From 13ec4b540e0d46c97fd7b089e0b7413da81e0a9f Mon Sep 17 00:00:00 2001 From: Marius Date: Sun, 19 May 2013 20:40:34 +0300 Subject: beta 2013.05.19 19:27 --- tex/context/base/anch-pgr.lua | 1388 +- tex/context/base/anch-pos.lua | 2072 +-- tex/context/base/attr-col.lua | 1076 +- tex/context/base/attr-eff.lua | 222 +- tex/context/base/attr-ini.lua | 334 +- tex/context/base/attr-lay.lua | 506 +- tex/context/base/attr-mkr.lua | 52 +- tex/context/base/attr-neg.lua | 196 +- tex/context/base/back-exp.lua | 4822 +++--- tex/context/base/back-ini.lua | 212 +- tex/context/base/bibl-bib.lua | 1532 +- tex/context/base/bibl-tra.lua | 494 +- tex/context/base/bibl-tst.lua | 42 +- tex/context/base/blob-ini.lua | 388 +- tex/context/base/buff-imp-default.lua | 84 +- tex/context/base/buff-imp-escaped.lua | 28 +- tex/context/base/buff-imp-lua.lua | 416 +- tex/context/base/buff-imp-mp.lua | 234 +- tex/context/base/buff-imp-nested.lua | 160 +- tex/context/base/buff-imp-parsed-xml.lua | 202 +- tex/context/base/buff-imp-tex.lua | 260 +- tex/context/base/buff-imp-xml.lua | 266 +- tex/context/base/buff-ini.lua | 732 +- tex/context/base/buff-par.lua | 368 +- tex/context/base/buff-ver.lua | 1536 +- tex/context/base/catc-ini.lua | 82 +- tex/context/base/char-cjk.lua | 730 +- tex/context/base/char-enc.lua | 372 +- tex/context/base/char-ent.lua | 4514 ++--- tex/context/base/char-ini.lua | 2316 +-- tex/context/base/char-map.lua | 2144 +-- tex/context/base/char-tex.lua | 422 +- tex/context/base/char-utf.lua | 1106 +- tex/context/base/chem-ini.lua | 86 +- tex/context/base/chem-str.lua | 1640 +- tex/context/base/cldf-bas.lua | 358 +- tex/context/base/cldf-com.lua | 72 +- tex/context/base/cldf-ini.lua | 2132 +-- tex/context/base/cldf-int.lua | 442 +- tex/context/base/cldf-prs.lua | 108 +- tex/context/base/cldf-ver.lua | 150 +- tex/context/base/colo-icc.lua | 240 +- tex/context/base/colo-run.lua | 136 +- tex/context/base/cont-new.mkiv | 2 +- tex/context/base/context-version.pdf | Bin 4134 -> 4134 bytes tex/context/base/context.mkiv | 2 +- tex/context/base/core-ctx.lua | 694 +- tex/context/base/core-dat.lua | 538 +- tex/context/base/core-env.lua | 308 +- tex/context/base/core-sys.lua | 202 +- tex/context/base/core-two.lua | 314 +- tex/context/base/core-uti.lua | 588 +- tex/context/base/data-aux.lua | 124 +- tex/context/base/data-bin.lua | 54 +- tex/context/base/data-con.lua | 276 +- tex/context/base/data-crl.lua | 122 +- tex/context/base/data-ctx.lua | 18 +- tex/context/base/data-env.lua | 582 +- tex/context/base/data-exp.lua | 940 +- tex/context/base/data-fil.lua | 226 +- tex/context/base/data-gen.lua | 18 +- tex/context/base/data-ini.lua | 464 +- tex/context/base/data-inp.lua | 50 +- tex/context/base/data-lst.lua | 154 +- tex/context/base/data-lua.lua | 262 +- tex/context/base/data-met.lua | 266 +- tex/context/base/data-out.lua | 36 +- tex/context/base/data-pre.lua | 492 +- tex/context/base/data-sch.lua | 400 +- tex/context/base/data-tex.lua | 366 +- tex/context/base/data-tmf.lua | 146 +- tex/context/base/data-tmp.lua | 840 +- tex/context/base/data-tre.lua | 150 +- tex/context/base/data-use.lua | 202 +- tex/context/base/data-vir.lua | 168 +- tex/context/base/data-zip.lua | 528 +- tex/context/base/file-ini.lua | 74 +- tex/context/base/file-job.lua | 2002 +-- tex/context/base/file-lib.lua | 130 +- tex/context/base/file-mod.lua | 362 +- tex/context/base/file-res.lua | 310 +- tex/context/base/file-syn.lua | 102 +- tex/context/base/font-afk.lua | 400 +- tex/context/base/font-afm.lua | 1942 +-- tex/context/base/font-age.lua | 8230 +++++----- tex/context/base/font-agl.lua | 1334 +- tex/context/base/font-aux.lua | 330 +- tex/context/base/font-chk.lua | 718 +- tex/context/base/font-cid.lua | 328 +- tex/context/base/font-col.lua | 476 +- tex/context/base/font-con.lua | 2660 +-- tex/context/base/font-ctx.lua | 3638 ++--- tex/context/base/font-def.lua | 898 +- tex/context/base/font-enc.lua | 294 +- tex/context/base/font-enh.lua | 400 +- tex/context/base/font-ext.lua | 1892 +-- tex/context/base/font-fbk.lua | 608 +- tex/context/base/font-gds.lua | 1504 +- tex/context/base/font-hsh.lua | 452 +- tex/context/base/font-ini.lua | 64 +- tex/context/base/font-ldr.lua | 140 +- tex/context/base/font-log.lua | 172 +- tex/context/base/font-lua.lua | 92 +- tex/context/base/font-map.lua | 658 +- tex/context/base/font-mis.lua | 222 +- tex/context/base/font-nod.lua | 868 +- tex/context/base/font-odk.lua | 1808 +-- tex/context/base/font-otb.lua | 1314 +- tex/context/base/font-otc.lua | 666 +- tex/context/base/font-otd.lua | 522 +- tex/context/base/font-otf.lua | 4310 ++--- tex/context/base/font-oth.lua | 102 +- tex/context/base/font-oti.lua | 182 +- tex/context/base/font-otp.lua | 1754 +- tex/context/base/font-ott.lua | 2226 +-- tex/context/base/font-sol.lua | 1768 +- tex/context/base/font-syn.lua | 3448 ++-- tex/context/base/font-tfm.lua | 304 +- tex/context/base/font-trt.lua | 114 +- tex/context/base/font-vf.lua | 410 +- tex/context/base/grph-epd.lua | 50 +- tex/context/base/grph-fil.lua | 142 +- tex/context/base/grph-inc.lua | 3218 ++-- tex/context/base/grph-raw.lua | 84 +- tex/context/base/grph-swf.lua | 188 +- tex/context/base/grph-u3d.lua | 102 +- tex/context/base/grph-wnd.lua | 94 +- tex/context/base/java-ini.lua | 452 +- tex/context/base/l-boolean.lua | 138 +- tex/context/base/l-dir.lua | 940 +- tex/context/base/l-file.lua | 1180 +- tex/context/base/l-function.lua | 22 +- tex/context/base/l-io.lua | 724 +- tex/context/base/l-lpeg.lua | 1704 +- tex/context/base/l-lua.lua | 300 +- tex/context/base/l-math.lua | 68 +- tex/context/base/l-md5.lua | 234 +- tex/context/base/l-number.lua | 414 +- tex/context/base/l-os.lua | 948 +- tex/context/base/l-package.lua | 680 +- tex/context/base/l-pdfview.lua | 286 +- tex/context/base/l-set.lua | 174 +- tex/context/base/l-string.lua | 410 +- tex/context/base/l-table.lua | 2724 ++-- tex/context/base/l-unicode.lua | 1884 +-- tex/context/base/l-url.lua | 688 +- tex/context/base/l-xml.lua | 46 +- tex/context/base/lang-def.lua | 932 +- tex/context/base/lang-frq-de.lua | 24 +- tex/context/base/lang-frq-en.lua | 52 +- tex/context/base/lang-frq-nl.lua | 24 +- tex/context/base/lang-ini.lua | 731 +- tex/context/base/lang-lab.lua | 284 +- tex/context/base/lang-url.lua | 226 +- tex/context/base/lang-wrd.lua | 706 +- tex/context/base/layo-ini.lua | 122 +- tex/context/base/lpdf-ano.lua | 1506 +- tex/context/base/lpdf-enc.lua | 314 +- tex/context/base/lpdf-epa.lua | 452 +- tex/context/base/lpdf-epd.lua | 702 +- tex/context/base/lpdf-fld.lua | 2610 +-- tex/context/base/lpdf-grp.lua | 488 +- tex/context/base/lpdf-ini.lua | 1644 +- tex/context/base/lpdf-mov.lua | 126 +- tex/context/base/lpdf-nod.lua | 272 +- tex/context/base/lpdf-ren.lua | 698 +- tex/context/base/lpdf-swf.lua | 612 +- tex/context/base/lpdf-tag.lua | 626 +- tex/context/base/lpdf-u3d.lua | 976 +- tex/context/base/lpdf-wid.lua | 1290 +- tex/context/base/luat-bwc.lua | 64 +- tex/context/base/luat-cbk.lua | 640 +- tex/context/base/luat-cnf.lua | 394 +- tex/context/base/luat-cod.lua | 362 +- tex/context/base/luat-env.lua | 352 +- tex/context/base/luat-exe.lua | 252 +- tex/context/base/luat-fio.lua | 234 +- tex/context/base/luat-fmt.lua | 280 +- tex/context/base/luat-ini.lua | 412 +- tex/context/base/luat-iop.lua | 390 +- tex/context/base/luat-lua.lua | 90 +- tex/context/base/luat-mac.lua | 868 +- tex/context/base/luat-run.lua | 316 +- tex/context/base/luat-sta.lua | 422 +- tex/context/base/luat-sto.lua | 338 +- tex/context/base/lxml-aux.lua | 1622 +- tex/context/base/lxml-css.lua | 316 +- tex/context/base/lxml-ctx.lua | 270 +- tex/context/base/lxml-dir.lua | 228 +- tex/context/base/lxml-ent.lua | 114 +- tex/context/base/lxml-inf.lua | 116 +- tex/context/base/lxml-lpt.lua | 2932 ++-- tex/context/base/lxml-mis.lua | 206 +- tex/context/base/lxml-sor.lua | 318 +- tex/context/base/lxml-tab.lua | 2734 ++-- tex/context/base/lxml-tex.lua | 3372 ++-- tex/context/base/lxml-xml.lua | 890 +- tex/context/base/m-chart.lua | 1832 +-- tex/context/base/m-database.lua | 274 +- tex/context/base/m-markdown.lua | 1648 +- tex/context/base/m-pstricks.lua | 148 +- tex/context/base/m-spreadsheet.lua | 664 +- tex/context/base/m-steps.lua | 454 +- tex/context/base/math-act.lua | 808 +- tex/context/base/math-dim.lua | 480 +- tex/context/base/math-ext.lua | 394 +- tex/context/base/math-fbk.lua | 624 +- tex/context/base/math-frc.lua | 102 +- tex/context/base/math-map.lua | 1368 +- tex/context/base/math-noa.lua | 2384 +-- tex/context/base/math-ren.lua | 138 +- tex/context/base/math-tag.lua | 690 +- tex/context/base/math-ttv.lua | 1602 +- tex/context/base/meta-fun.lua | 114 +- tex/context/base/meta-ini.lua | 330 +- tex/context/base/meta-pdf.lua | 1134 +- tex/context/base/meta-pdh.lua | 1220 +- tex/context/base/meta-tex.lua | 76 +- tex/context/base/mlib-ctx.lua | 356 +- tex/context/base/mlib-pdf.lua | 1060 +- tex/context/base/mlib-pps.lua | 2432 +-- tex/context/base/mlib-run.lua | 1182 +- tex/context/base/mult-aux.lua | 308 +- tex/context/base/mult-chk.lua | 152 +- tex/context/base/mult-fun.lua | 202 +- tex/context/base/mult-ini.lua | 666 +- tex/context/base/mult-low.lua | 694 +- tex/context/base/mult-mps.lua | 230 +- tex/context/base/node-acc.lua | 280 +- tex/context/base/node-aux.lua | 778 +- tex/context/base/node-bck.lua | 322 +- tex/context/base/node-dir.lua | 618 +- tex/context/base/node-ext.lua | 60 +- tex/context/base/node-fin.lua | 2444 +-- tex/context/base/node-fnt.lua | 452 +- tex/context/base/node-ini.lua | 842 +- tex/context/base/node-inj.lua | 1038 +- tex/context/base/node-mig.lua | 276 +- tex/context/base/node-pag.lua | 60 +- tex/context/base/node-pro.lua | 330 +- tex/context/base/node-ref.lua | 1170 +- tex/context/base/node-res.lua | 812 +- tex/context/base/node-rul.lua | 778 +- tex/context/base/node-ser.lua | 572 +- tex/context/base/node-shp.lua | 296 +- tex/context/base/node-snp.lua | 132 +- tex/context/base/node-tex.lua | 82 +- tex/context/base/node-tra.lua | 1058 +- tex/context/base/node-tsk.lua | 804 +- tex/context/base/node-tst.lua | 240 +- tex/context/base/node-typ.lua | 158 +- tex/context/base/pack-obj.lua | 154 +- tex/context/base/pack-rul.lua | 218 +- tex/context/base/page-flt.lua | 578 +- tex/context/base/page-inj.lua | 202 +- tex/context/base/page-ins.lua | 194 +- tex/context/base/page-lin.lua | 580 +- tex/context/base/page-mix.lua | 1390 +- tex/context/base/page-pst.lua | 156 +- tex/context/base/page-str.lua | 464 +- tex/context/base/regi-8859-1.lua | 52 +- tex/context/base/regi-8859-10.lua | 52 +- tex/context/base/regi-8859-11.lua | 52 +- tex/context/base/regi-8859-13.lua | 52 +- tex/context/base/regi-8859-14.lua | 52 +- tex/context/base/regi-8859-15.lua | 52 +- tex/context/base/regi-8859-16.lua | 52 +- tex/context/base/regi-8859-2.lua | 52 +- tex/context/base/regi-8859-3.lua | 52 +- tex/context/base/regi-8859-4.lua | 52 +- tex/context/base/regi-8859-5.lua | 52 +- tex/context/base/regi-8859-6.lua | 52 +- tex/context/base/regi-8859-7.lua | 52 +- tex/context/base/regi-8859-8.lua | 52 +- tex/context/base/regi-8859-9.lua | 52 +- tex/context/base/regi-cp1250.lua | 52 +- tex/context/base/regi-cp1251.lua | 52 +- tex/context/base/regi-cp1252.lua | 52 +- tex/context/base/regi-cp1253.lua | 52 +- tex/context/base/regi-cp1254.lua | 52 +- tex/context/base/regi-cp1255.lua | 52 +- tex/context/base/regi-cp1256.lua | 52 +- tex/context/base/regi-cp1257.lua | 52 +- tex/context/base/regi-cp1258.lua | 52 +- tex/context/base/regi-demo.lua | 44 +- tex/context/base/regi-ini.lua | 776 +- tex/context/base/s-fonts-coverage.lua | 226 +- tex/context/base/s-fonts-features.lua | 322 +- tex/context/base/s-fonts-goodies.lua | 234 +- tex/context/base/s-fonts-missing.lua | 202 +- tex/context/base/s-fonts-shapes.lua | 656 +- tex/context/base/s-fonts-system.lua | 136 +- tex/context/base/s-fonts-tables.lua | 624 +- tex/context/base/s-fonts-vectors.lua | 208 +- tex/context/base/s-lan-03.mkiv | 2 +- tex/context/base/s-languages-sorting.lua | 118 + tex/context/base/s-languages-sorting.mkiv | 30 + tex/context/base/s-languages-system.lua | 35 + tex/context/base/s-languages-system.mkiv | 30 + tex/context/base/s-math-coverage.lua | 360 +- tex/context/base/s-math-parameters.lua | 270 +- tex/context/base/s-pre-71.lua | 126 +- tex/context/base/scrn-but.lua | 38 +- tex/context/base/scrn-fld.lua | 170 +- tex/context/base/scrn-hlp.lua | 238 +- tex/context/base/scrn-ini.lua | 64 +- tex/context/base/scrn-pag.lua | 54 +- tex/context/base/scrn-ref.lua | 130 +- tex/context/base/scrn-wid.lua | 428 +- tex/context/base/scrp-cjk.lua | 1902 +-- tex/context/base/scrp-eth.lua | 300 +- tex/context/base/scrp-ini.lua | 1268 +- tex/context/base/sort-ini.lua | 1330 +- tex/context/base/sort-lan.lua | 1850 +-- tex/context/base/spac-adj.lua | 116 +- tex/context/base/spac-ali.lua | 268 +- tex/context/base/spac-chr.lua | 400 +- tex/context/base/spac-hor.lua | 62 +- tex/context/base/spac-ver.lua | 2716 ++-- tex/context/base/status-files.pdf | Bin 24757 -> 24738 bytes tex/context/base/status-lua.pdf | Bin 211838 -> 211829 bytes tex/context/base/status-mkiv.lua | 20 + tex/context/base/strc-bkm.lua | 392 +- tex/context/base/strc-blk.lua | 304 +- tex/context/base/strc-con.lua | 18 +- tex/context/base/strc-doc.lua | 1912 +-- tex/context/base/strc-flt.lua | 18 +- tex/context/base/strc-ini.lua | 676 +- tex/context/base/strc-itm.lua | 76 +- tex/context/base/strc-lev.lua | 102 +- tex/context/base/strc-lst.lua | 1690 +- tex/context/base/strc-mar.lua | 1392 +- tex/context/base/strc-not.lua | 894 +- tex/context/base/strc-num.lua | 1298 +- tex/context/base/strc-pag.lua | 626 +- tex/context/base/strc-ref.lua | 4316 ++--- tex/context/base/strc-reg.lua | 1724 +- tex/context/base/strc-rsc.lua | 308 +- tex/context/base/strc-syn.lua | 396 +- tex/context/base/strc-tag.lua | 708 +- tex/context/base/supp-box.lua | 224 +- tex/context/base/supp-ran.lua | 146 +- tex/context/base/symb-ini.lua | 100 +- tex/context/base/syst-aux.lua | 160 +- tex/context/base/syst-con.lua | 124 +- tex/context/base/syst-lua.lua | 246 +- tex/context/base/tabl-tbl.lua | 82 +- tex/context/base/tabl-xtb.lua | 1976 +-- tex/context/base/task-ini.lua | 382 +- tex/context/base/toks-ini.lua | 682 +- tex/context/base/trac-ctx.lua | 96 +- tex/context/base/trac-deb.lua | 496 +- tex/context/base/trac-exp.lua | 458 +- tex/context/base/trac-fil.lua | 362 +- tex/context/base/trac-inf.lua | 386 +- tex/context/base/trac-jus.lua | 272 +- tex/context/base/trac-lmx.lua | 1464 +- tex/context/base/trac-log.lua | 1632 +- tex/context/base/trac-pro.lua | 416 +- tex/context/base/trac-set.lua | 758 +- tex/context/base/trac-tex.lua | 150 +- tex/context/base/trac-tim.lua | 276 +- tex/context/base/trac-vis.lua | 1852 +-- tex/context/base/trac-xml.lua | 366 +- tex/context/base/type-ini.lua | 152 +- tex/context/base/typo-bld.lua | 370 +- tex/context/base/typo-brk.lua | 604 +- tex/context/base/typo-cap.lua | 662 +- tex/context/base/typo-cln.lua | 204 +- tex/context/base/typo-dig.lua | 324 +- tex/context/base/typo-dir.lua | 926 +- tex/context/base/typo-ini.lua | 22 +- tex/context/base/typo-itc.lua | 512 +- tex/context/base/typo-krn.lua | 670 +- tex/context/base/typo-lan.lua | 144 +- tex/context/base/typo-mar.lua | 1758 +- tex/context/base/typo-pag.lua | 358 +- tex/context/base/typo-par.lua | 362 +- tex/context/base/typo-prc.lua | 250 +- tex/context/base/typo-rep.lua | 256 +- tex/context/base/typo-spa.lua | 458 +- tex/context/base/unic-ini.lua | 38 +- tex/context/base/util-deb.lua | 256 +- tex/context/base/util-dim.lua | 898 +- tex/context/base/util-env.lua | 574 +- tex/context/base/util-fmt.lua | 152 +- tex/context/base/util-jsn.lua | 292 +- tex/context/base/util-lib.lua | 576 +- tex/context/base/util-lua.lua | 702 +- tex/context/base/util-mrg.lua | 456 +- tex/context/base/util-pck.lua | 288 +- tex/context/base/util-prs.lua | 1186 +- tex/context/base/util-ran.lua | 214 +- tex/context/base/util-seq.lua | 660 +- tex/context/base/util-soc.lua | 186 +- tex/context/base/util-sql-imp-client.lua | 512 +- tex/context/base/util-sql-imp-library.lua | 578 +- tex/context/base/util-sql-imp-swiglib.lua | 1010 +- tex/context/base/util-sql-loggers.lua | 554 +- tex/context/base/util-sql-sessions.lua | 698 +- tex/context/base/util-sql-tickets.lua | 1544 +- tex/context/base/util-sql-users.lua | 820 +- tex/context/base/util-sql.lua | 886 +- tex/context/base/util-sta.lua | 684 +- tex/context/base/util-sto.lua | 378 +- tex/context/base/util-str.lua | 1532 +- tex/context/base/util-tab.lua | 986 +- tex/context/base/util-tpl.lua | 348 +- tex/context/base/x-asciimath.lua | 540 +- tex/context/base/x-calcmath.lua | 724 +- tex/context/base/x-cals.lua | 436 +- tex/context/base/x-chemml.lua | 102 +- tex/context/base/x-ct.lua | 330 +- tex/context/base/x-ldx.lua | 682 +- tex/context/base/x-mathml.lua | 1658 +- tex/context/patterns/lang-af.lua | 147 +- tex/context/patterns/lang-agr.lua | 56 +- tex/context/patterns/lang-bg.lua | 5 +- tex/context/patterns/lang-ca.lua | 16 +- tex/context/patterns/lang-cs.lua | 52 +- tex/context/patterns/lang-cy.lua | 76 +- tex/context/patterns/lang-da.lua | 23 +- tex/context/patterns/lang-de.lua | 160 +- tex/context/patterns/lang-deo.lua | 169 +- tex/context/patterns/lang-es.lua | 66 +- tex/context/patterns/lang-et.lua | 48 +- tex/context/patterns/lang-eu.lua | 5 +- tex/context/patterns/lang-fi.lua | 6 +- tex/context/patterns/lang-fr.lua | 16 +- tex/context/patterns/lang-gb.lua | 108 +- tex/context/patterns/lang-hr.lua | 9 +- tex/context/patterns/lang-hu.lua | 734 +- tex/context/patterns/lang-is.lua | 58 +- tex/context/patterns/lang-it.lua | 8 +- tex/context/patterns/lang-la.lua | 11 +- tex/context/patterns/lang-lt.lua | 18 +- tex/context/patterns/lang-lv.lua | 132 +- tex/context/patterns/lang-mn.lua | 8 +- tex/context/patterns/lang-nb.lua | 369 +- tex/context/patterns/lang-nl.lua | 193 +- tex/context/patterns/lang-nn.lua | 369 +- tex/context/patterns/lang-pl.lua | 50 +- tex/context/patterns/lang-pt.lua | 11 +- tex/context/patterns/lang-ro.lua | 11 +- tex/context/patterns/lang-ru.lua | 108 +- tex/context/patterns/lang-sk.lua | 38 +- tex/context/patterns/lang-sl.lua | 13 +- tex/context/patterns/lang-sr.lua | 47 +- tex/context/patterns/lang-sv.lua | 57 +- tex/context/patterns/lang-th.lua | 71 +- tex/context/patterns/lang-tk.lua | 23 +- tex/context/patterns/lang-tr.lua | 12 +- tex/context/patterns/lang-uk.lua | 66 +- tex/context/patterns/lang-us.lua | 66 +- tex/context/patterns/lang-zh.lua | 6 +- tex/context/patterns/word-th.lua | 482 + tex/generic/context/luatex/luatex-basics-gen.lua | 654 +- tex/generic/context/luatex/luatex-basics-nod.lua | 208 +- tex/generic/context/luatex/luatex-fonts-cbk.lua | 136 +- tex/generic/context/luatex/luatex-fonts-def.lua | 194 +- .../context/luatex/luatex-fonts-demo-vf-1.lua | 76 +- tex/generic/context/luatex/luatex-fonts-enc.lua | 56 +- tex/generic/context/luatex/luatex-fonts-ext.lua | 544 +- tex/generic/context/luatex/luatex-fonts-lua.lua | 66 +- tex/generic/context/luatex/luatex-fonts-merged.lua | 16256 +++++++++---------- tex/generic/context/luatex/luatex-fonts-syn.lua | 204 +- tex/generic/context/luatex/luatex-fonts-tfm.lua | 76 +- tex/generic/context/luatex/luatex-languages.lua | 90 +- tex/generic/context/luatex/luatex-math.lua | 106 +- tex/generic/context/luatex/luatex-mplib.lua | 982 +- tex/generic/context/luatex/luatex-preprocessor.lua | 326 +- tex/generic/context/luatex/luatex-swiglib-test.lua | 50 +- tex/generic/context/luatex/luatex-swiglib.lua | 124 +- 473 files changed, 146095 insertions(+), 142024 deletions(-) create mode 100644 tex/context/base/s-languages-sorting.lua create mode 100644 tex/context/base/s-languages-sorting.mkiv create mode 100644 tex/context/base/s-languages-system.lua create mode 100644 tex/context/base/s-languages-system.mkiv create mode 100644 tex/context/patterns/word-th.lua (limited to 'tex') diff --git a/tex/context/base/anch-pgr.lua b/tex/context/base/anch-pgr.lua index 278448e3a..992b4deff 100644 --- a/tex/context/base/anch-pgr.lua +++ b/tex/context/base/anch-pgr.lua @@ -1,694 +1,694 @@ -if not modules then modules = { } end modules ['anch-pgr'] = { - version = 1.001, - comment = "companion to anch-pgr.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: we need to clean up lists (of previous pages) - -local commands, context = commands, context - -local format = string.format -local abs = math.abs -local concat, sort = table.concat, table.sort -local splitter = lpeg.splitat(":") -local lpegmatch = lpeg.match - -local jobpositions = job.positions -local formatters = string.formatters - -local report_graphics = logs.reporter("graphics") - -local f_b_tag = formatters["b:%s"] -local f_e_tag = formatters["e:%s"] -local f_p_tag = formatters["p:%s"] - -local f_tag_two = formatters["%s:%s"] - -local f_point = formatters["%p"] -local f_pair = formatters["(%p,%p)"] -local f_path = formatters["%--t--cycle"] - -local function regionarea(r) - local rx, ry = r.x, r.y - local rw = rx + r.w - local rh = ry + r.h - local rd = ry - r.d - return { - f_pair(rx, rh - ry), - f_pair(rw, rh - ry), - f_pair(rw, rd - ry), - f_pair(rx, rd - ry), - } -end - --- we can use a 'local t, n' and reuse the table - -local eps = 2 - -local function add(t,x,y,last,direction) - local n = #t - if n == 0 then - t[n+1] = { x, y } - else - local tn = t[n] - local lx = tn[1] - local ly = tn[2] - if x == lx and y == ly then - -- quick skip - elseif n == 1 then --- if abs(lx-x) <= eps or abs(ly-y) <= eps then - if abs(lx-x) > eps or abs(ly-y) > eps then - t[n+1] = { x, y } - end - else - local tm = t[n-1] - local px = tm[1] - local py = tm[2] -if (direction == "down" and y > ly) or (direction == "up" and y < ly) then - -- move back from too much hang -else - if abs(lx-px) <= eps and abs(lx-x) <= eps then - if abs(ly-y) > eps then - tn[2] = y - end - elseif abs(ly-py) <= eps and abs(ly-y) <= eps then - if abs(lx-x) > eps then - tn[1] = x - end - elseif not last then - t[n+1] = { x, y } - end -end - end - end -end - --- local function add(t,x,y,last) --- t[#t+1] = { x, y } --- end - -local function finish(t) - local n = #t - if n > 1 then - local first = t[1] - local last = t[n] - if abs(first[1]-last[1]) <= eps and abs(first[2]-last[2]) <= eps then - t[n] = nil - end - end -end - -local function clip(t,ytop,ybot) - local first, last = 1, #t - for i=first,last do - local y = t[i][2] - if ytop < y then - first = i - end - if ybot > y then - last = i - break - end - end - local lp = { } - lp[#lp+1] = { t[first][1], ytop } - for i=first+1,last-1 do - lp[#lp+1] = { t[i][1], t[i][2] } - end - lp[#lp+1] = { t[last][1], ybot } - return lp -end - --- todo: mark regions and free paragraphs in collected - -local function shapes(r,rx,ry,rw,rh,rd,lytop,lybot,rytop,rybot,obeyhang) - -- we assume that we only hang per page and not cross pages - -- which makes sense as hanging is only uses in special cases - -- - -- we can remove data as soon as a page is done so we could - -- remember per page and discard areas after each shipout - local leftshape, rightshape - leftshape = { { rx, rh } } -- spikes get removed so we can start at the edge - rightshape = { { rw, rh } } -- even if we hang next - local paragraphs = r.paragraphs - local extending = false - if paragraphs then - for i=1,#paragraphs do - local p = paragraphs[i] - local ha = p.ha - if obeyhang and ha and ha ~= 0 then - local py = p.y - local ph = p.h - local pd = p.d - local hi = p.hi - local hang = ha * (ph + pd) - local py_ph = py + ph - -- ha < 0 hi < 0 : right top - -- ha < 0 hi > 0 : left top - if ha < 0 then - if hi < 0 then -- right - add(rightshape,rw, py_ph,"up") - add(rightshape,rw + hi,py_ph,"up") - add(rightshape,rw + hi,py_ph + hang,"up") - add(rightshape,rw, py_ph + hang,"up") - else - -- left - add(leftshape,rx,py_ph,"down") - add(leftshape,rx + hi,py_ph,"down") - add(leftshape,rx + hi,py_ph + hang,"down") - add(leftshape,rx,py_ph + hang,"down") - end - else - -- maybe some day - end - extending = true -- false - else -- we need to clip to the next par - local ps = p.ps - if ps then - local py = p.y - local ph = p.h - local pd = p.d - local step = ph + pd - local size = #ps * step - local py_ph = py + ph - add(leftshape,rx,py_ph,"up") - add(rightshape,rw,py_ph,"down") - for i=1,#ps do - local p = ps[i] - local l = p[1] - local w = p[2] - add(leftshape,rx + l, py_ph,"up") - add(rightshape,rx + l + w, py_ph,"down") - py_ph = py_ph - step - add(leftshape,rx + l, py_ph,"up") - add(rightshape,rx + l + w, py_ph,"down") - end - extending = true - elseif extending then - local py = p.y - local ph = p.h - local pd = p.d - local py_ph = py + ph - local py_pd = py - pd - add(leftshape,leftshape[#leftshape][1],py_ph,"up") - add(rightshape,rightshape[#rightshape][1],py_ph,"down") - add(leftshape,rx,py_ph,"up") -- shouldn't this be py_pd - add(rightshape,rw,py_ph,"down") -- shouldn't this be py_pd - extending = false - end - end - end - end - -- we can have a simple variant when no paragraphs - if extending then - -- not ok - leftshape[#leftshape][2] = rd - rightshape[#rightshape][2] = rw - else - add(leftshape,rx,rd,"up") - add(rightshape,rw,rd,"down") - end - return clip(leftshape,lytop,lybot), clip(rightshape,rytop,rybot) -end - --- local function shapes(r,rx,ry,rw,rh,rd,lytop,lybot,rytop,rybot,obeyhang) --- local leftshape = { { rx, rh }, { rx, rd } } --- local rightshape = { { rw, rh }, { rw, rd } } --- return clip(leftshape,lytop,lybot), clip(rightshape,rytop,rybot) --- end - -local function singlepart(b,e,r,left,right,obeyhang) - local bx, by = b.x, b.y - local ex, ey = e.x, e.y - local rx, ry = r.x, r.y - local rw = rx + r.w - local rh = ry + r.h - local rd = ry - r.d - if left then - rx = rx + left - rw = rw - right - end - local bh = by + b.h - local bd = by - b.d - local eh = ey + e.h - local ed = ey - e.d - if ex == rx then - -- We probably have a strut at the next line so we force a width - -- although of course it is better to move up. But as we have whitespace - -- (at least visually) injected then it's best to stress the issue. - ex = rw - end - local area - if by == ey then - area = { - f_pair(bx,bh-ry), - f_pair(ex,eh-ry), - f_pair(ex,ed-ry), - f_pair(bx,bd-ry), - } - else - area = { } - local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,bd,ed,bh,eh,obeyhang) - add(area,bx,bh-ry) - for i=1,#rightshapes do - local ri = rightshapes[i] - add(area,ri[1],ri[2]-ry) - end - add(area,ex,eh-ry) - add(area,ex,ed-ry) - for i=#leftshapes,1,-1 do - local li = leftshapes[i] - add(area,li[1],li[2]-ry) - end - add(area,bx,bd-ry) - add(area,bx,bh-ry,true) -- finish last straight line (but no add as we cycle) - finish(area) - for i=1,#area do - local a = area[i] - area[i] = f_pair(a[1],a[2]) - end - end - return { - location = "single", - region = r, - area = area, - } -end - -local function firstpart(b,r,left,right,obeyhang) - local bx, by = b.x, b.y - local rx, ry = r.x, r.y - local rw = rx + r.w - local rh = ry + r.h - local rd = ry - r.d - if left then - rx = rx + left - rw = rw - right - end - local bh = by + b.h - local bd = by - b.d - local area = { } - local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,bd,rd,bh,rd,obeyhang) - add(area,bx,bh-ry) - for i=1,#rightshapes do - local ri = rightshapes[i] - add(area,ri[1],ri[2]-ry) - end - for i=#leftshapes,1,-1 do - local li = leftshapes[i] - add(area,li[1],li[2]-ry) - end - add(area,bx,bd-ry) - add(area,bx,bh-ry,true) -- finish last straight line (but no add as we cycle) - finish(area) - for i=1,#area do - local a = area[i] - area[i] = f_pair(a[1],a[2]) - end - return { - location = "first", - region = r, - area = area, - } -end - -local function middlepart(r,left,right,obeyhang) - local rx, ry = r.x, r.y - local rw = rx + r.w - local rh = ry + r.h - local rd = ry - r.d - if left then - rx = rx + left - rw = rw - right - end - local area = { } - local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,rh,rd,rh,rd,obeyhang) - for i=#leftshapes,1,-1 do - local li = leftshapes[i] - add(area,li[1],li[2]-ry) - end - for i=1,#rightshapes do - local ri = rightshapes[i] - add(area,ri[1],ri[2]-ry) - end - finish(area) - for i=1,#area do - local a = area[i] - area[i] = f_pair(a[1],a[2]) - end - return { - location = "middle", - region = r, - area = area, - } -end - -local function lastpart(e,r,left,right,obeyhang) - local ex, ey = e.x, e.y - local rx, ry = r.x, r.y - local rw = rx + r.w - local rh = ry + r.h - local rd = ry - r.d - if left then - rx = rx + left - rw = rw - right - end - local eh = ey + e.h - local ed = ey - e.d - local area = { } - -- two cases: till end and halfway e line - local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,rh,ed,rh,eh,obeyhang) - for i=1,#rightshapes do - local ri = rightshapes[i] - add(area,ri[1],ri[2]-ry) - end - add(area,ex,eh-ry) - add(area,ex,ed-ry) - for i=#leftshapes,1,-1 do - local li = leftshapes[i] - add(area,li[1],li[2]-ry) - end - finish(area) - for i=1,#area do - local a = area[i] - area[i] = f_pair(a[1],a[2]) - end - return { - location = "last", - region = r, - area = area, - } -end - -graphics = graphics or { } -local backgrounds = { } - -graphics.backgrounds = backgrounds - -local function calculatemultipar(tag,obeyhang) - local collected = jobpositions.collected - local b = collected[f_b_tag(tag)] - local e = collected[f_e_tag(tag)] - if not b or not e then - report_graphics("invalid tag %a",tag) - return { } - end - local br = b.r - local er = e.r - if not br or not er then - report_graphics("invalid region for %a",tag) - return { } - end - local btag, bindex = lpegmatch(splitter,br) - local etag, eindex = lpegmatch(splitter,er) - if not bindex or not eindex or btag ~= etag then - report_graphics("invalid indices for %a",tag) - return { } - end - local bindex = tonumber(bindex) - local eindex = tonumber(eindex) - -- Here we compensate for columns (in tables): a table can have a set of column - -- entries and these are shared. We compensate left/right based on the columns - -- x and w but need to take the region into acount where the specification was - -- flushed and not the begin pos's region, because otherwise we get the wrong - -- compensation for assymetrical doublesided layouts. - local left = 0 - local right = 0 - local rc = b.c - if rc then - rc = collected[rc] - if rc then - local tb = collected[rc.r] - if tb then - left = -(tb.x - rc.x) - right = (tb.w - rc.w - left) -- tb.x - rc.x - end - end - end - -- Obeying intermediate changes of left/rightskip makes no sense as it will - -- look bad, so we only look at the begin situation. - -- - local bn = b.n - if bn then - local bp = collected[f_p_tag(bn)] - if bp then - left = left + bp.ls - right = right + bp.rs - end - end - -- - if bindex == eindex then - return { - list = { [b.p] = { singlepart(b,e,collected[br],left,right,obeyhang) } }, - bpos = b, - epos = e, - } - else - local list = { - [b.p] = { firstpart(b,collected[br],left,right,obeyhang) }, - } - for i=bindex+1,eindex-1 do - br = f_tag_two(btag,i) - local r = collected[br] - if not r then - report_graphics("invalid middle for %a",br) - else - local p = r.p - local pp = list[p] - if pp then - pp[#pp+1] = middlepart(r,left,right,obeyhang) - else - list[p] = { middlepart(r,left,right,obeyhang) } - end - end - end - local p = e.p - local pp = list[p] - if pp then - pp[#pp+1] = lastpart(e,collected[er],left,right,obeyhang) - else - list[p] = { lastpart(e,collected[er],left,right,obeyhang) } - end - return { - list = list, - bpos = b, - epos = e, - } - end -end - --- local pending = { } -- needs gc --- --- local function register(data,n,anchor) --- local pa = pending[anchor] --- if not pa then --- pa = { } --- pending[anchor] = pa --- end --- for page, pagedata in next, data do --- local pap = pa[page] --- if pap then --- pap[#pap+1] = n --- else --- pa[page] = { n } --- end --- end --- end --- --- function backgrounds.registered(anchor,page) --- local pa = pending[anchor] --- if pa then --- concat(pa,",") --- else --- return "" --- end --- end - -local pbg = { } -- will move to pending - -function backgrounds.calculatemultipar(n) - if not pbg[n] then - pbg[n] = calculatemultipar("pbg",n) or { } - end -end - -local multilocs = { - single = 1, -- maybe 0 - first = 1, - middle = 2, - last = 3, -} - --- if unknown context_abck : input mp-abck.mpiv ; fi ; - -local f_template_a = [[ -path multiregs[], multipars[], multibox ; -string multikind[] ; -numeric multilocs[], nofmultipars ; -nofmultipars := %s ; -multibox := unitsquare xyscaled (%p,%p) ; -numeric par_strut_height, par_strut_depth, par_line_height ; -par_strut_height := %p ; -par_strut_depth := %p ; -par_line_height := %p ; -]] - -local f_template_b = [[ -multilocs[%s] := %s ; -multikind[%s] := "%s" ; -multipars[%s] := (%--t--cycle) shifted - (%p,%p) ; -]] - -local f_template_c = [[ -multiregs[%s] := (%--t--cycle) shifted - %s ; -]] - -local f_template_d = [[ -setbounds currentpicture to multibox ; -]] - -f_template_a = formatters[f_template_a] -f_template_b = formatters[f_template_b] -f_template_c = formatters[f_template_c] -f_template_d = formatters[f_template_d] - -function backgrounds.fetchmultipar(n,anchor,page,obeyhang) - local data = pbg[n] - if not data then - data = calculatemultipar(n,obeyhang) - pbg[n] = data -- can be replaced by register - -- register(data.list,n,anchor) - end - if data then - local list = data.list - if list then - local pagedata = list[page] - if pagedata then - local nofmultipars = #pagedata - -- report_graphics("fetching %a at page %s using anchor %a containing %s multipars",n,page,anchor,nofmultipars) - local a = jobpositions.collected[anchor] - if not a then - report_graphics("missing anchor %a",anchor) - else - local trace = false - local x, y, w, h, d = a.x, a.y, a.w, a.h, a.d - local bpos = data.bpos - local bh, bd = bpos.h, bpos.d - local result = { f_template_a(nofmultipars,w,h+d,bh,bd,bh+bd) } - for i=1,nofmultipars do - local region = pagedata[i] - result[#result+1] = f_template_b( - i, multilocs[region.location], - i, region.location, - i, region.area, x, y-region.region.y) - if trace then - result[#result+1] = f_template_c(i, regionarea(region.region), offset) - end - end - data[page] = nil - result[#result+1] = f_template_d() - result = concat(result,"\n") - return result - end - end - end - end - return f_template_a(0,"origin",0,0,0) -end - -backgrounds.point = f_point -backgrounds.pair = f_pair -backgrounds.path = f_path - -function commands.fetchmultipar(n,anchor,page) - context(backgrounds.fetchmultipar(n,anchor,page)) -end - -function commands.fetchmultishape(n,anchor,page) - context(backgrounds.fetchmultipar(n,anchor,page,true)) -end - -local f_template_a = [[ -path posboxes[], posregions[] ; -numeric pospages[] ; -numeric nofposboxes ; -nofposboxes := %s ; -%t ; -]] - -local f_template_b = [[ -pospages[%s] := %s ; -posboxes[%s] := (%p,%p)--(%p,%p)--(%p,%p)--(%p,%p)--cycle ; -posregions[%s] := (%p,%p)--(%p,%p)--(%p,%p)--(%p,%p)--cycle ; -]] - -f_template_a = formatters[f_template_a] -f_template_b = formatters[f_template_b] - -function commands.fetchposboxes(tags,anchor,page) -- no caching (yet) / todo: anchor, page - local collected = jobpositions.collected - if type(tags) == "string" then - tags = utilities.parsers.settings_to_array(tags) - end - local list, nofboxes = { }, 0 - for i=1,#tags do - local tag= tags[i] - local c = collected[tag] - if c then - local r = c.r - if r then - r = collected[r] - if r then - local rx, ry, rw, rh, rd = r.x, r.y, r.w, r.h, r.d - local cx = c.x - rx - local cy = c.y - ry - local cw = cx + c.w - local ch = cy + c.h - local cd = cy - c.d - nofboxes = nofboxes + 1 - list[nofboxes] = f_template_b( - nofboxes,c.p, - nofboxes,cx,ch,cw,ch,cw,cd,cx,cd, - nofboxes,0,rh,rw,rh,rw,rd,0,rd - ) - end - end - else - print("\n missing",tag) - end - end - context(f_template_a(nofboxes,list)) -end - -local doifelse = commands.doifelse - -function commands.doifelsemultipar(n,page,obeyhang) - local data = pbg[n] - if not data then - data = calculatemultipar(n,obeyhang) - pbg[n] = data - end - if page then - doifelse(data and data[page] and true) - else - doifelse(data and next(data) and true) - end -end - -function commands.doifelserangeonpage(first,last,page) - local collected = jobpositions.collected - local f = collected[first] - if not f then - doifelse(false) - return - end - local l = collected[last] - if not l then - doifelse(false) - return - end - doifelse(page >= f.p and page <= l.p) -end +if not modules then modules = { } end modules ['anch-pgr'] = { + version = 1.001, + comment = "companion to anch-pgr.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: we need to clean up lists (of previous pages) + +local commands, context = commands, context + +local format = string.format +local abs = math.abs +local concat, sort = table.concat, table.sort +local splitter = lpeg.splitat(":") +local lpegmatch = lpeg.match + +local jobpositions = job.positions +local formatters = string.formatters + +local report_graphics = logs.reporter("graphics") + +local f_b_tag = formatters["b:%s"] +local f_e_tag = formatters["e:%s"] +local f_p_tag = formatters["p:%s"] + +local f_tag_two = formatters["%s:%s"] + +local f_point = formatters["%p"] +local f_pair = formatters["(%p,%p)"] +local f_path = formatters["%--t--cycle"] + +local function regionarea(r) + local rx, ry = r.x, r.y + local rw = rx + r.w + local rh = ry + r.h + local rd = ry - r.d + return { + f_pair(rx, rh - ry), + f_pair(rw, rh - ry), + f_pair(rw, rd - ry), + f_pair(rx, rd - ry), + } +end + +-- we can use a 'local t, n' and reuse the table + +local eps = 2 + +local function add(t,x,y,last,direction) + local n = #t + if n == 0 then + t[n+1] = { x, y } + else + local tn = t[n] + local lx = tn[1] + local ly = tn[2] + if x == lx and y == ly then + -- quick skip + elseif n == 1 then +-- if abs(lx-x) <= eps or abs(ly-y) <= eps then + if abs(lx-x) > eps or abs(ly-y) > eps then + t[n+1] = { x, y } + end + else + local tm = t[n-1] + local px = tm[1] + local py = tm[2] +if (direction == "down" and y > ly) or (direction == "up" and y < ly) then + -- move back from too much hang +else + if abs(lx-px) <= eps and abs(lx-x) <= eps then + if abs(ly-y) > eps then + tn[2] = y + end + elseif abs(ly-py) <= eps and abs(ly-y) <= eps then + if abs(lx-x) > eps then + tn[1] = x + end + elseif not last then + t[n+1] = { x, y } + end +end + end + end +end + +-- local function add(t,x,y,last) +-- t[#t+1] = { x, y } +-- end + +local function finish(t) + local n = #t + if n > 1 then + local first = t[1] + local last = t[n] + if abs(first[1]-last[1]) <= eps and abs(first[2]-last[2]) <= eps then + t[n] = nil + end + end +end + +local function clip(t,ytop,ybot) + local first, last = 1, #t + for i=first,last do + local y = t[i][2] + if ytop < y then + first = i + end + if ybot > y then + last = i + break + end + end + local lp = { } + lp[#lp+1] = { t[first][1], ytop } + for i=first+1,last-1 do + lp[#lp+1] = { t[i][1], t[i][2] } + end + lp[#lp+1] = { t[last][1], ybot } + return lp +end + +-- todo: mark regions and free paragraphs in collected + +local function shapes(r,rx,ry,rw,rh,rd,lytop,lybot,rytop,rybot,obeyhang) + -- we assume that we only hang per page and not cross pages + -- which makes sense as hanging is only uses in special cases + -- + -- we can remove data as soon as a page is done so we could + -- remember per page and discard areas after each shipout + local leftshape, rightshape + leftshape = { { rx, rh } } -- spikes get removed so we can start at the edge + rightshape = { { rw, rh } } -- even if we hang next + local paragraphs = r.paragraphs + local extending = false + if paragraphs then + for i=1,#paragraphs do + local p = paragraphs[i] + local ha = p.ha + if obeyhang and ha and ha ~= 0 then + local py = p.y + local ph = p.h + local pd = p.d + local hi = p.hi + local hang = ha * (ph + pd) + local py_ph = py + ph + -- ha < 0 hi < 0 : right top + -- ha < 0 hi > 0 : left top + if ha < 0 then + if hi < 0 then -- right + add(rightshape,rw, py_ph,"up") + add(rightshape,rw + hi,py_ph,"up") + add(rightshape,rw + hi,py_ph + hang,"up") + add(rightshape,rw, py_ph + hang,"up") + else + -- left + add(leftshape,rx,py_ph,"down") + add(leftshape,rx + hi,py_ph,"down") + add(leftshape,rx + hi,py_ph + hang,"down") + add(leftshape,rx,py_ph + hang,"down") + end + else + -- maybe some day + end + extending = true -- false + else -- we need to clip to the next par + local ps = p.ps + if ps then + local py = p.y + local ph = p.h + local pd = p.d + local step = ph + pd + local size = #ps * step + local py_ph = py + ph + add(leftshape,rx,py_ph,"up") + add(rightshape,rw,py_ph,"down") + for i=1,#ps do + local p = ps[i] + local l = p[1] + local w = p[2] + add(leftshape,rx + l, py_ph,"up") + add(rightshape,rx + l + w, py_ph,"down") + py_ph = py_ph - step + add(leftshape,rx + l, py_ph,"up") + add(rightshape,rx + l + w, py_ph,"down") + end + extending = true + elseif extending then + local py = p.y + local ph = p.h + local pd = p.d + local py_ph = py + ph + local py_pd = py - pd + add(leftshape,leftshape[#leftshape][1],py_ph,"up") + add(rightshape,rightshape[#rightshape][1],py_ph,"down") + add(leftshape,rx,py_ph,"up") -- shouldn't this be py_pd + add(rightshape,rw,py_ph,"down") -- shouldn't this be py_pd + extending = false + end + end + end + end + -- we can have a simple variant when no paragraphs + if extending then + -- not ok + leftshape[#leftshape][2] = rd + rightshape[#rightshape][2] = rw + else + add(leftshape,rx,rd,"up") + add(rightshape,rw,rd,"down") + end + return clip(leftshape,lytop,lybot), clip(rightshape,rytop,rybot) +end + +-- local function shapes(r,rx,ry,rw,rh,rd,lytop,lybot,rytop,rybot,obeyhang) +-- local leftshape = { { rx, rh }, { rx, rd } } +-- local rightshape = { { rw, rh }, { rw, rd } } +-- return clip(leftshape,lytop,lybot), clip(rightshape,rytop,rybot) +-- end + +local function singlepart(b,e,r,left,right,obeyhang) + local bx, by = b.x, b.y + local ex, ey = e.x, e.y + local rx, ry = r.x, r.y + local rw = rx + r.w + local rh = ry + r.h + local rd = ry - r.d + if left then + rx = rx + left + rw = rw - right + end + local bh = by + b.h + local bd = by - b.d + local eh = ey + e.h + local ed = ey - e.d + if ex == rx then + -- We probably have a strut at the next line so we force a width + -- although of course it is better to move up. But as we have whitespace + -- (at least visually) injected then it's best to stress the issue. + ex = rw + end + local area + if by == ey then + area = { + f_pair(bx,bh-ry), + f_pair(ex,eh-ry), + f_pair(ex,ed-ry), + f_pair(bx,bd-ry), + } + else + area = { } + local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,bd,ed,bh,eh,obeyhang) + add(area,bx,bh-ry) + for i=1,#rightshapes do + local ri = rightshapes[i] + add(area,ri[1],ri[2]-ry) + end + add(area,ex,eh-ry) + add(area,ex,ed-ry) + for i=#leftshapes,1,-1 do + local li = leftshapes[i] + add(area,li[1],li[2]-ry) + end + add(area,bx,bd-ry) + add(area,bx,bh-ry,true) -- finish last straight line (but no add as we cycle) + finish(area) + for i=1,#area do + local a = area[i] + area[i] = f_pair(a[1],a[2]) + end + end + return { + location = "single", + region = r, + area = area, + } +end + +local function firstpart(b,r,left,right,obeyhang) + local bx, by = b.x, b.y + local rx, ry = r.x, r.y + local rw = rx + r.w + local rh = ry + r.h + local rd = ry - r.d + if left then + rx = rx + left + rw = rw - right + end + local bh = by + b.h + local bd = by - b.d + local area = { } + local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,bd,rd,bh,rd,obeyhang) + add(area,bx,bh-ry) + for i=1,#rightshapes do + local ri = rightshapes[i] + add(area,ri[1],ri[2]-ry) + end + for i=#leftshapes,1,-1 do + local li = leftshapes[i] + add(area,li[1],li[2]-ry) + end + add(area,bx,bd-ry) + add(area,bx,bh-ry,true) -- finish last straight line (but no add as we cycle) + finish(area) + for i=1,#area do + local a = area[i] + area[i] = f_pair(a[1],a[2]) + end + return { + location = "first", + region = r, + area = area, + } +end + +local function middlepart(r,left,right,obeyhang) + local rx, ry = r.x, r.y + local rw = rx + r.w + local rh = ry + r.h + local rd = ry - r.d + if left then + rx = rx + left + rw = rw - right + end + local area = { } + local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,rh,rd,rh,rd,obeyhang) + for i=#leftshapes,1,-1 do + local li = leftshapes[i] + add(area,li[1],li[2]-ry) + end + for i=1,#rightshapes do + local ri = rightshapes[i] + add(area,ri[1],ri[2]-ry) + end + finish(area) + for i=1,#area do + local a = area[i] + area[i] = f_pair(a[1],a[2]) + end + return { + location = "middle", + region = r, + area = area, + } +end + +local function lastpart(e,r,left,right,obeyhang) + local ex, ey = e.x, e.y + local rx, ry = r.x, r.y + local rw = rx + r.w + local rh = ry + r.h + local rd = ry - r.d + if left then + rx = rx + left + rw = rw - right + end + local eh = ey + e.h + local ed = ey - e.d + local area = { } + -- two cases: till end and halfway e line + local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,rh,ed,rh,eh,obeyhang) + for i=1,#rightshapes do + local ri = rightshapes[i] + add(area,ri[1],ri[2]-ry) + end + add(area,ex,eh-ry) + add(area,ex,ed-ry) + for i=#leftshapes,1,-1 do + local li = leftshapes[i] + add(area,li[1],li[2]-ry) + end + finish(area) + for i=1,#area do + local a = area[i] + area[i] = f_pair(a[1],a[2]) + end + return { + location = "last", + region = r, + area = area, + } +end + +graphics = graphics or { } +local backgrounds = { } + +graphics.backgrounds = backgrounds + +local function calculatemultipar(tag,obeyhang) + local collected = jobpositions.collected + local b = collected[f_b_tag(tag)] + local e = collected[f_e_tag(tag)] + if not b or not e then + report_graphics("invalid tag %a",tag) + return { } + end + local br = b.r + local er = e.r + if not br or not er then + report_graphics("invalid region for %a",tag) + return { } + end + local btag, bindex = lpegmatch(splitter,br) + local etag, eindex = lpegmatch(splitter,er) + if not bindex or not eindex or btag ~= etag then + report_graphics("invalid indices for %a",tag) + return { } + end + local bindex = tonumber(bindex) + local eindex = tonumber(eindex) + -- Here we compensate for columns (in tables): a table can have a set of column + -- entries and these are shared. We compensate left/right based on the columns + -- x and w but need to take the region into acount where the specification was + -- flushed and not the begin pos's region, because otherwise we get the wrong + -- compensation for assymetrical doublesided layouts. + local left = 0 + local right = 0 + local rc = b.c + if rc then + rc = collected[rc] + if rc then + local tb = collected[rc.r] + if tb then + left = -(tb.x - rc.x) + right = (tb.w - rc.w - left) -- tb.x - rc.x + end + end + end + -- Obeying intermediate changes of left/rightskip makes no sense as it will + -- look bad, so we only look at the begin situation. + -- + local bn = b.n + if bn then + local bp = collected[f_p_tag(bn)] + if bp then + left = left + bp.ls + right = right + bp.rs + end + end + -- + if bindex == eindex then + return { + list = { [b.p] = { singlepart(b,e,collected[br],left,right,obeyhang) } }, + bpos = b, + epos = e, + } + else + local list = { + [b.p] = { firstpart(b,collected[br],left,right,obeyhang) }, + } + for i=bindex+1,eindex-1 do + br = f_tag_two(btag,i) + local r = collected[br] + if not r then + report_graphics("invalid middle for %a",br) + else + local p = r.p + local pp = list[p] + if pp then + pp[#pp+1] = middlepart(r,left,right,obeyhang) + else + list[p] = { middlepart(r,left,right,obeyhang) } + end + end + end + local p = e.p + local pp = list[p] + if pp then + pp[#pp+1] = lastpart(e,collected[er],left,right,obeyhang) + else + list[p] = { lastpart(e,collected[er],left,right,obeyhang) } + end + return { + list = list, + bpos = b, + epos = e, + } + end +end + +-- local pending = { } -- needs gc +-- +-- local function register(data,n,anchor) +-- local pa = pending[anchor] +-- if not pa then +-- pa = { } +-- pending[anchor] = pa +-- end +-- for page, pagedata in next, data do +-- local pap = pa[page] +-- if pap then +-- pap[#pap+1] = n +-- else +-- pa[page] = { n } +-- end +-- end +-- end +-- +-- function backgrounds.registered(anchor,page) +-- local pa = pending[anchor] +-- if pa then +-- concat(pa,",") +-- else +-- return "" +-- end +-- end + +local pbg = { } -- will move to pending + +function backgrounds.calculatemultipar(n) + if not pbg[n] then + pbg[n] = calculatemultipar("pbg",n) or { } + end +end + +local multilocs = { + single = 1, -- maybe 0 + first = 1, + middle = 2, + last = 3, +} + +-- if unknown context_abck : input mp-abck.mpiv ; fi ; + +local f_template_a = [[ +path multiregs[], multipars[], multibox ; +string multikind[] ; +numeric multilocs[], nofmultipars ; +nofmultipars := %s ; +multibox := unitsquare xyscaled (%p,%p) ; +numeric par_strut_height, par_strut_depth, par_line_height ; +par_strut_height := %p ; +par_strut_depth := %p ; +par_line_height := %p ; +]] + +local f_template_b = [[ +multilocs[%s] := %s ; +multikind[%s] := "%s" ; +multipars[%s] := (%--t--cycle) shifted - (%p,%p) ; +]] + +local f_template_c = [[ +multiregs[%s] := (%--t--cycle) shifted - %s ; +]] + +local f_template_d = [[ +setbounds currentpicture to multibox ; +]] + +f_template_a = formatters[f_template_a] +f_template_b = formatters[f_template_b] +f_template_c = formatters[f_template_c] +f_template_d = formatters[f_template_d] + +function backgrounds.fetchmultipar(n,anchor,page,obeyhang) + local data = pbg[n] + if not data then + data = calculatemultipar(n,obeyhang) + pbg[n] = data -- can be replaced by register + -- register(data.list,n,anchor) + end + if data then + local list = data.list + if list then + local pagedata = list[page] + if pagedata then + local nofmultipars = #pagedata + -- report_graphics("fetching %a at page %s using anchor %a containing %s multipars",n,page,anchor,nofmultipars) + local a = jobpositions.collected[anchor] + if not a then + report_graphics("missing anchor %a",anchor) + else + local trace = false + local x, y, w, h, d = a.x, a.y, a.w, a.h, a.d + local bpos = data.bpos + local bh, bd = bpos.h, bpos.d + local result = { f_template_a(nofmultipars,w,h+d,bh,bd,bh+bd) } + for i=1,nofmultipars do + local region = pagedata[i] + result[#result+1] = f_template_b( + i, multilocs[region.location], + i, region.location, + i, region.area, x, y-region.region.y) + if trace then + result[#result+1] = f_template_c(i, regionarea(region.region), offset) + end + end + data[page] = nil + result[#result+1] = f_template_d() + result = concat(result,"\n") + return result + end + end + end + end + return f_template_a(0,"origin",0,0,0) +end + +backgrounds.point = f_point +backgrounds.pair = f_pair +backgrounds.path = f_path + +function commands.fetchmultipar(n,anchor,page) + context(backgrounds.fetchmultipar(n,anchor,page)) +end + +function commands.fetchmultishape(n,anchor,page) + context(backgrounds.fetchmultipar(n,anchor,page,true)) +end + +local f_template_a = [[ +path posboxes[], posregions[] ; +numeric pospages[] ; +numeric nofposboxes ; +nofposboxes := %s ; +%t ; +]] + +local f_template_b = [[ +pospages[%s] := %s ; +posboxes[%s] := (%p,%p)--(%p,%p)--(%p,%p)--(%p,%p)--cycle ; +posregions[%s] := (%p,%p)--(%p,%p)--(%p,%p)--(%p,%p)--cycle ; +]] + +f_template_a = formatters[f_template_a] +f_template_b = formatters[f_template_b] + +function commands.fetchposboxes(tags,anchor,page) -- no caching (yet) / todo: anchor, page + local collected = jobpositions.collected + if type(tags) == "string" then + tags = utilities.parsers.settings_to_array(tags) + end + local list, nofboxes = { }, 0 + for i=1,#tags do + local tag= tags[i] + local c = collected[tag] + if c then + local r = c.r + if r then + r = collected[r] + if r then + local rx, ry, rw, rh, rd = r.x, r.y, r.w, r.h, r.d + local cx = c.x - rx + local cy = c.y - ry + local cw = cx + c.w + local ch = cy + c.h + local cd = cy - c.d + nofboxes = nofboxes + 1 + list[nofboxes] = f_template_b( + nofboxes,c.p, + nofboxes,cx,ch,cw,ch,cw,cd,cx,cd, + nofboxes,0,rh,rw,rh,rw,rd,0,rd + ) + end + end + else + print("\n missing",tag) + end + end + context(f_template_a(nofboxes,list)) +end + +local doifelse = commands.doifelse + +function commands.doifelsemultipar(n,page,obeyhang) + local data = pbg[n] + if not data then + data = calculatemultipar(n,obeyhang) + pbg[n] = data + end + if page then + doifelse(data and data[page] and true) + else + doifelse(data and next(data) and true) + end +end + +function commands.doifelserangeonpage(first,last,page) + local collected = jobpositions.collected + local f = collected[first] + if not f then + doifelse(false) + return + end + local l = collected[last] + if not l then + doifelse(false) + return + end + doifelse(page >= f.p and page <= l.p) +end diff --git a/tex/context/base/anch-pos.lua b/tex/context/base/anch-pos.lua index 2697cecf4..c94fd60a0 100644 --- a/tex/context/base/anch-pos.lua +++ b/tex/context/base/anch-pos.lua @@ -1,1036 +1,1036 @@ -if not modules then modules = { } end modules ['anch-pos'] = { - version = 1.001, - comment = "companion to anch-pos.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

We save positional information in the main utility table. Not only -can we store much more information in but it's also -more efficient.

---ldx]]-- - --- plus (extra) is obsolete but we will keep it for a while - --- maybe replace texsp by our own converter (stay at the lua end) --- eventually mp will have large numbers so we can use sp there too - -local commands, context = commands, context - -local tostring, next, rawget, setmetatable = tostring, next, rawget, setmetatable -local sort = table.sort -local format, gmatch, match = string.format, string.gmatch, string.match -local rawget = rawget -local lpegmatch = lpeg.match -local insert, remove = table.insert, table.remove -local allocate, mark = utilities.storage.allocate, utilities.storage.mark -local texsp, texcount, texbox, texdimen, texsetcount = tex.sp, tex.count, tex.box, tex.dimen, tex.setcount ------ texsp = string.todimen -- because we cache this is much faster but no rounding - -local pdf = pdf -- h and v are variables - -local setmetatableindex = table.setmetatableindex -local new_latelua = nodes.pool.latelua -local find_tail = node.slide - -local variables = interfaces.variables -local v_text = variables.text -local v_column = variables.column - -local pt = number.dimenfactors.pt -local pts = number.pts -local formatters = string.formatters - -local collected = allocate() -local tobesaved = allocate() - -local jobpositions = { - collected = collected, - tobesaved = tobesaved, -} - -job.positions = jobpositions - -_plib_ = jobpositions -- might go - -local default = { -- not r and paragraphs etc - __index = { - x = 0, -- x position baseline - y = 0, -- y position baseline - w = 0, -- width - h = 0, -- height - d = 0, -- depth - p = 0, -- page - n = 0, -- paragraph - ls = 0, -- leftskip - rs = 0, -- rightskip - hi = 0, -- hangindent - ha = 0, -- hangafter - hs = 0, -- hsize - pi = 0, -- parindent - ps = false, -- parshape - } -} - -local f_b_tag = formatters["b:%s"] -local f_e_tag = formatters["e:%s"] -local f_p_tag = formatters["p:%s"] -local f_w_tag = formatters["w:%s"] - -local f_b_column = formatters["_plib_.b_col(%q)"] -local f_e_column = formatters["_plib_.e_col()"] - -local f_enhance = formatters["_plib_.enhance(%q)"] -local f_region = formatters["region:%s"] - -local f_b_region = formatters["_plib_.b_region(%q)"] -local f_e_region = formatters["_plib_.e_region(%s)"] - -local f_tag_three = formatters["%s:%s:%s"] -local f_tag_two = formatters["%s:%s"] - -local function sorter(a,b) - return a.y > b.y -end - -local nofusedregions = 0 -local nofmissingregions = 0 -local nofregular = 0 - --- todo: register subsets and count them indepently - -local function initializer() - tobesaved = jobpositions.tobesaved - collected = jobpositions.collected - -- enhance regions with paragraphs - for tag, data in next, collected do - local region = data.r - if region then - local r = collected[region] - if r then - local paragraphs = r.paragraphs - if not paragraphs then - r.paragraphs = { data } - else - paragraphs[#paragraphs+1] = data - end - nofusedregions = nofusedregions + 1 - else - nofmissingregions = nofmissingregions + 1 - end - else - nofregular = nofregular + 1 - end - setmetatable(data,default) - end - -- add metatable - -- for tag, data in next, collected do - -- setmetatable(data,default) - -- end - -- sort this data - for tag, data in next, collected do - local region = data.r - if region then - local r = collected[region] - if r then - local paragraphs = r.paragraphs - if paragraphs and #paragraphs > 1 then - sort(paragraphs,sorter) - end - end - end - -- so, we can be sparse and don't need 'or 0' code - end -end - -job.register('job.positions.collected', tobesaved, initializer) - -local regions = { } -local nofregions = 0 -local region = nil - -local columns = { } -local nofcolumns = 0 -local column = nil - -local nofpages = nil - --- beware ... we're not sparse here as lua will reserve slots for the nilled - -local function setdim(name,w,h,d,extra) -- will be used when we move to sp allover - local x = pdf.h - local y = pdf.v - if x == 0 then x = nil end - if y == 0 then y = nil end - if w == 0 then w = nil end - if h == 0 then h = nil end - if d == 0 then d = nil end - if extra == "" then extra = nil end - -- todo: sparse - tobesaved[name] = { - p = texcount.realpageno, - x = x, - y = y, - w = w, - h = h, - d = d, - e = extra, - r = region, - c = column, - } -end - -local function setall(name,p,x,y,w,h,d,extra) - if x == 0 then x = nil end - if y == 0 then y = nil end - if w == 0 then w = nil end - if h == 0 then h = nil end - if d == 0 then d = nil end - if extra == "" then extra = nil end - -- todo: sparse - tobesaved[name] = { - p = p, - x = x, - y = y, - w = w, - h = h, - d = d, - e = extra, - r = region, - c = column, - } -end - -local function enhance(data) - if not data then - return nil - end - if data.r == true then -- or "" - data.r = region - end - if data.x == true then - data.x = pdf.h - end - if data.y == true then - data.y = pdf.v - end - if data.p == true then - data.p = texcount.realpageno - end - if data.c == true then - data.c = column - end - if data.w == 0 then - data.w = nil - end - if data.h == 0 then - data.h = nil - end - if data.d == 0 then - data.d = nil - end - return data -end - -local function set(name,index,val) - local data = enhance(val or index) - if val then - container = tobesaved[name] - if not container then - tobesaved[name] = { - [index] = data - } - else - container[index] = data - end - else - tobesaved[name] = data - end -end - -local function get(id,index) - if index then - local container = collected[id] - return container and container[index] - else - return collected[id] - end -end - -jobpositions.setdim = setdim -jobpositions.setall = setall -jobpositions.set = set -jobpositions.get = get - -commands.setpos = setall - --- will become private table (could also become attribute driven but too nasty --- as attributes can bleed e.g. in margin stuff) - -function jobpositions.b_col(tag) - tobesaved[tag] = { - r = true, - x = pdf.h, - w = 0, - } - insert(columns,tag) - column = tag -end - -function jobpositions.e_col(tag) - local t = tobesaved[column] - if not t then - -- something's wrong - else - t.w = pdf.h - t.x - t.r = region - end - remove(columns) - column = columns[#columns] -end - -function commands.bcolumn(tag,register) - insert(columns,tag) - column = tag - if register then - context(new_latelua(f_b_column(tag))) - end -end - -function commands.ecolumn(register) - if register then - context(new_latelua(f_e_column())) - end - remove(columns) - column = columns[#columns] -end - --- regions - -function jobpositions.b_region(tag) - local last = tobesaved[tag] - last.x = pdf.h -last.y = pdf.v - last.p = texcount.realpageno - insert(regions,tag) - region = tag -end - -function jobpositions.e_region(correct) - local last = tobesaved[region] -if correct then - last.h = last.y - pdf.v -end - last.y = pdf.v - remove(regions) - region = regions[#regions] -end - -function jobpositions.markregionbox(n,tag,correct) - if not tag or tag == "" then - nofregions = nofregions + 1 - tag = f_region(nofregions) - end - local box = texbox[n] - local w = box.width - local h = box.height - local d = box.depth - tobesaved[tag] = { - p = true, - x = true, - y = pdf.v, -- true, - w = w ~= 0 and w or nil, - h = h ~= 0 and h or nil, - d = d ~= 0 and d or nil, - } - local push = new_latelua(f_b_region(tag)) - local pop = new_latelua(f_e_region(tostring(correct))) -- todo: check if tostring is needed with formatter - -- maybe we should construct a hbox first (needs experimenting) so that we can avoid some at the tex end - local head = box.list - if head then - local tail = find_tail(head) - head.prev = push - push.next = head - pop .prev = tail - tail.next = pop - else -- we can have a simple push/pop - push.next = pop - pop.prev = push - end - box.list = push -end - -function jobpositions.enhance(name) - enhance(tobesaved[name]) -end - -function commands.pos(name,t) - tobesaved[name] = t - context(new_latelua(f_enhance(name))) -end - -local nofparagraphs = 0 - -function commands.parpos() -- todo: relate to localpar (so this is an intermediate variant) - nofparagraphs = nofparagraphs + 1 - texsetcount("global","c_anch_positions_paragraph",nofparagraphs) - local strutbox = texbox.strutbox - local t = { - p = true, - c = true, - r = true, - x = true, - y = true, - h = strutbox.height, - d = strutbox.depth, - hs = tex.hsize, - } - local leftskip = tex.leftskip.width - local rightskip = tex.rightskip.width - local hangindent = tex.hangindent - local hangafter = tex.hangafter - local parindent = tex.parindent - local parshape = tex.parshape - if leftskip ~= 0 then - t.ls = leftskip - end - if rightskip ~= 0 then - t.rs = rightskip - end - if hangindent ~= 0 then - t.hi = hangindent - end - if hangafter ~= 1 and hangafter ~= 0 then -- can not be zero .. so it needs to be 1 if zero - t.ha = hangafter - end - if parindent ~= 0 then - t.pi = parindent - end - if parshape and #parshape > 0 then - t.ps = parshape - end - local tag = f_p_tag(nofparagraphs) - tobesaved[tag] = t - context(new_latelua(f_enhance(tag))) -end - -function commands.posxy(name) -- can node.write be used here? - tobesaved[name] = { - p = true, - c = column, - r = true, - x = true, - y = true, - n = nofparagraphs > 0 and nofparagraphs or nil, - } - context(new_latelua(f_enhance(name))) -end - -function commands.poswhd(name,w,h,d) - tobesaved[name] = { - p = true, - c = column, - r = true, - x = true, - y = true, - w = w, - h = h, - d = d, - n = nofparagraphs > 0 and nofparagraphs or nil, - } - context(new_latelua(f_enhance(name))) -end - -function commands.posplus(name,w,h,d,extra) - tobesaved[name] = { - p = true, - c = column, - r = true, - x = true, - y = true, - w = w, - h = h, - d = d, - n = nofparagraphs > 0 and nofparagraphs or nil, - e = extra, - } - context(new_latelua(f_enhance(name))) -end - -function commands.posstrut(name,w,h,d) - local strutbox = texbox.strutbox - tobesaved[name] = { - p = true, - c = column, - r = true, - x = true, - y = true, - h = strutbox.height, - d = strutbox.depth, - n = nofparagraphs > 0 and nofparagraphs or nil, - } - context(new_latelua(f_enhance(name))) -end - -function jobpositions.getreserved(tag,n) - if tag == v_column then - local fulltag = f_tag_three(tag,texcount.realpageno,n or 1) - local data = collected[fulltag] - if data then - return data, fulltag - end - tag = v_text - end - if tag == v_text then - local fulltag = f_tag_two(tag,texcount.realpageno) - return collected[fulltag] or false, fulltag - end - return collected[tag] or false, tag -end - -function jobpositions.copy(target,source) - collected[target] = collected[source] -end - -function jobpositions.replace(id,p,x,y,w,h,d) - collected[id] = { p = p, x = x, y = y, w = w, h = h, d = d } -- c g -end - -function jobpositions.page(id) - local jpi = collected[id] - return jpi and jpi.p -end - -function jobpositions.region(id) - local jpi = collected[id] - return jpi and jpi.r or false -end - -function jobpositions.column(id) - local jpi = collected[id] - return jpi and jpi.c or false -end - -function jobpositions.paragraph(id) - local jpi = collected[id] - return jpi and jpi.n -end - -jobpositions.p = jobpositions.page -jobpositions.r = jobpositions.region -jobpositions.c = jobpositions.column -jobpositions.n = jobpositions.paragraph - -function jobpositions.x(id) - local jpi = collected[id] - return jpi and jpi.x -end - -function jobpositions.y(id) - local jpi = collected[id] - return jpi and jpi.y -end - -function jobpositions.width(id) - local jpi = collected[id] - return jpi and jpi.w -end - -function jobpositions.height(id) - local jpi = collected[id] - return jpi and jpi.h -end - -function jobpositions.depth(id) - local jpi = collected[id] - return jpi and jpi.d -end - -function jobpositions.leftskip(id) - local jpi = collected[id] - return jpi and jpi.ls -end - -function jobpositions.rightskip(id) - local jpi = collected[id] - return jpi and jpi.rs -end - -function jobpositions.hsize(id) - local jpi = collected[id] - return jpi and jpi.hs -end - -function jobpositions.parindent(id) - local jpi = collected[id] - return jpi and jpi.pi -end - -function jobpositions.hangindent(id) - local jpi = collected[id] - return jpi and jpi.hi -end - -function jobpositions.hangafter(id) - local jpi = collected[id] - return jpi and jpi.ha or 1 -end - -function jobpositions.xy(id) - local jpi = collected[id] - if jpi then - return jpi.x, jpi.y - else - return 0, 0 - end -end - -function jobpositions.lowerleft(id) - local jpi = collected[id] - if jpi then - return jpi.x, jpi.y - jpi.d - else - return 0, 0 - end -end - -function jobpositions.lowerright(id) - local jpi = collected[id] - if jpi then - return jpi.x + jpi.w, jpi.y - jpi.d - else - return 0, 0 - end -end - -function jobpositions.upperright(id) - local jpi = collected[id] - if jpi then - return jpi.x + jpi.w, jpi.y + jpi.h - else - return 0, 0 - end -end - -function jobpositions.upperleft(id) - local jpi = collected[id] - if jpi then - return jpi.x, jpi.y + jpi.h - else - return 0, 0 - end -end - -function jobpositions.position(id) - local jpi = collected[id] - if jpi then - return jpi.p, jpi.x, jpi.y, jpi.w, jpi.h, jpi.d - else - return 0, 0, 0, 0, 0, 0 - end -end - -function jobpositions.extra(id,n,default) -- assume numbers - local jpi = collected[id] - if jpi then - local e = jpi.e - if e then - local split = jpi.split - if not split then - split = lpegmatch(splitter,jpi.e) - jpi.split = split - end - return texsp(split[n]) or default -- watch the texsp here - end - end - return default -end - -local function overlapping(one,two,overlappingmargin) -- hm, strings so this is wrong .. texsp - one = collected[one] - two = collected[two] - if one and two and one.p == two.p then - if not overlappingmargin then - overlappingmargin = 2 - end - local x_one = one.x - local x_two = two.x - local w_two = two.w - local llx_one = x_one - overlappingmargin - local urx_two = x_two + w_two + overlappingmargin - if llx_one > urx_two then - return false - end - local w_one = one.w - local urx_one = x_one + w_one + overlappingmargin - local llx_two = x_two - overlappingmargin - if urx_one < llx_two then - return false - end - local y_one = one.y - local y_two = two.y - local d_one = one.d - local h_two = two.h - local lly_one = y_one - d_one - overlappingmargin - local ury_two = y_two + h_two + overlappingmargin - if lly_one > ury_two then - return false - end - local h_one = one.h - local d_two = two.d - local ury_one = y_one + h_one + overlappingmargin - local lly_two = y_two - d_two - overlappingmargin - if ury_one < lly_two then - return false - end - return true - end -end - -local function onsamepage(list,page) - for id in gmatch(list,"(, )") do - local jpi = collected[id] - if jpi then - local p = jpi.p - if not p then - return false - elseif not page then - page = p - elseif page ~= p then - return false - end - end - end - return page -end - -jobpositions.overlapping = overlapping -jobpositions.onsamepage = onsamepage - --- interface - -commands.replacepospxywhd = jobpositions.replace -commands.copyposition = jobpositions.copy - -function commands.MPp(id) - local jpi = collected[id] - if jpi then - local p = jpi.p - if p and p ~= true then - context(p) - return - end - end - context('0') -end - -function commands.MPx(id) - local jpi = collected[id] - if jpi then - local x = jpi.x - if x and x ~= true and x ~= 0 then - context("%.5fpt",x*pt) - return - end - end - context('0pt') -end - -function commands.MPy(id) - local jpi = collected[id] - if jpi then - local y = jpi.y - if y and y ~= true and y ~= 0 then - context("%.5fpt",y*pt) - return - end - end - context('0pt') -end - -function commands.MPw(id) - local jpi = collected[id] - if jpi then - local w = jpi.w - if w and w ~= 0 then - context("%.5fpt",w*pt) - return - end - end - context('0pt') -end - -function commands.MPh(id) - local jpi = collected[id] - if jpi then - local h = jpi.h - if h and h ~= 0 then - context("%.5fpt",h*pt) - return - end - end - context('0pt') -end - -function commands.MPd(id) - local jpi = collected[id] - if jpi then - local d = jpi.d - if d and d ~= 0 then - context("%.5fpt",d*pt) - return - end - end - context('0pt') -end - -function commands.MPxy(id) - local jpi = collected[id] - if jpi then - context('(%.5fpt,%.5fpt)', - jpi.x*pt, - jpi.y*pt - ) - else - context('(0,0)') - end -end - -function commands.MPll(id) - local jpi = collected[id] - if jpi then - context('(%.5fpt,%.5fpt)', - jpi.x *pt, - (jpi.y-jpi.d)*pt - ) - else - context('(0,0)') -- for mp only - end -end - -function commands.MPlr(id) - local jpi = collected[id] - if jpi then - context('(%.5fpt,%.5fpt)', - (jpi.x + jpi.w)*pt, - (jpi.y - jpi.d)*pt - ) - else - context('(0,0)') -- for mp only - end -end - -function commands.MPur(id) - local jpi = collected[id] - if jpi then - context('(%.5fpt,%.5fpt)', - (jpi.x + jpi.w)*pt, - (jpi.y + jpi.h)*pt - ) - else - context('(0,0)') -- for mp only - end -end - -function commands.MPul(id) - local jpi = collected[id] - if jpi then - context('(%.5fpt,%.5fpt)', - jpi.x *pt, - (jpi.y + jpi.h)*pt - ) - else - context('(0,0)') -- for mp only - end -end - -local function MPpos(id) - local jpi = collected[id] - if jpi then - local p = jpi.p - if p then - context("%s,%.5fpt,%.5fpt,%.5fpt,%.5fpt,%.5fpt", - p, - jpi.x*pt, - jpi.y*pt, - jpi.w*pt, - jpi.h*pt, - jpi.d*pt - ) - return - end - end - context('0,0,0,0,0,0') -- for mp only -end - -commands.MPpos = MPpos - -function commands.MPn(id) - local jpi = collected[id] - if jpi then - local n = jpi.n - if n then - context(n) - return - end - end - context(0) -end - -function commands.MPc(id) - local jpi = collected[id] - if jpi then - local c = jpi.c - if c and p ~= true then - context(c) - return - end - end - context(c) -- number -end - -function commands.MPr(id) - local jpi = collected[id] - if jpi then - local r = jpi.r - if r and p ~= true then - context(r) - return - end - end -end - -local function MPpardata(n) - local t = collected[n] - if not t then - local tag = f_p_tag(n) - t = collected[tag] - end - if t then - context("%.5fpt,%.5fpt,%.5fpt,%.5fpt,%s,%.5fpt", - t.hs*pt, - t.ls*pt, - t.rs*pt, - t.hi*pt, - t.ha, - t.pi*pt - ) - else - context("0,0,0,0,0,0") -- for mp only - end -end - -commands.MPpardata = MPpardata - -function commands.MPposset(id) -- special helper, used in backgrounds - local b = f_b_tag(id) - local e = f_e_tag(id) - local w = f_w_tag(id) - local p = f_p_tag(jobpositions.n(b)) - MPpos(b) context(",") MPpos(e) context(",") MPpos(w) context(",") MPpos(p) context(",") MPpardata(p) -end - -function commands.MPls(id) - local t = collected[id] - if t then - context("%.5fpt",t.ls*pt) - else - context("0pt") - end -end - -function commands.MPrs(id) - local t = collected[id] - if t then - context("%.5fpt",t.rs*pt) - else - context("0pt") - end -end - -local splitter = lpeg.tsplitat(",") - -function commands.MPplus(id,n,default) - local jpi = collected[id] - if jpi then - local e = jpi.e - if e then - local split = jpi.split - if not split then - split = lpegmatch(splitter,jpi.e) - jpi.split = split - end - context(split[n] or default) - return - end - end - context(default) -end - -function commands.MPrest(id,default) - local jpi = collected[id] - context(jpi and jpi.e or default) -end - -function commands.MPxywhd(id) - local t = collected[id] - if t then - context("%.5fpt,%.5fpt,%.5fpt,%.5fpt,%.5fpt", - t.x*pt, - t.y*pt, - t.w*pt, - t.h*pt, - t.d*pt - ) - else - context("0,0,0,0,0") -- for mp only - end -end - -local doif, doifelse = commands.doif, commands.doifelse - -function commands.doifpositionelse(name) - doifelse(collected[name]) -end - -function commands.doifposition(name) - doif(collected[name]) -end - -function commands.doifpositiononpage(name,page) -- probably always realpageno - local c = collected[name] - doifelse(c and c.p == page) -end - -function commands.doifoverlappingelse(one,two,overlappingmargin) - doifelse(overlapping(one,two,overlappingmargin)) -end - -function commands.doifpositionsonsamepageelse(list,page) - doifelse(onsamepage(list)) -end - -function commands.doifpositionsonthispageelse(list) - doifelse(onsamepage(list,tostring(tex.count.realpageno))) -end - -function commands.doifelsepositionsused() - doifelse(next(collected)) -end - -commands.markcolumnbox = jobpositions.markcolumnbox -commands.markregionbox = jobpositions.markregionbox - --- statistics (at least for the moment, when testing) - -statistics.register("positions", function() - local total = nofregular + nofusedregions + nofmissingregions - if total > 0 then - return format("%s collected, %s regulars, %s regions, %s unresolved regions", - total, nofregular, nofusedregions, nofmissingregions) - else - return nil - end -end) +if not modules then modules = { } end modules ['anch-pos'] = { + version = 1.001, + comment = "companion to anch-pos.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

We save positional information in the main utility table. Not only +can we store much more information in but it's also +more efficient.

+--ldx]]-- + +-- plus (extra) is obsolete but we will keep it for a while + +-- maybe replace texsp by our own converter (stay at the lua end) +-- eventually mp will have large numbers so we can use sp there too + +local commands, context = commands, context + +local tostring, next, rawget, setmetatable = tostring, next, rawget, setmetatable +local sort = table.sort +local format, gmatch, match = string.format, string.gmatch, string.match +local rawget = rawget +local lpegmatch = lpeg.match +local insert, remove = table.insert, table.remove +local allocate, mark = utilities.storage.allocate, utilities.storage.mark +local texsp, texcount, texbox, texdimen, texsetcount = tex.sp, tex.count, tex.box, tex.dimen, tex.setcount +----- texsp = string.todimen -- because we cache this is much faster but no rounding + +local pdf = pdf -- h and v are variables + +local setmetatableindex = table.setmetatableindex +local new_latelua = nodes.pool.latelua +local find_tail = node.slide + +local variables = interfaces.variables +local v_text = variables.text +local v_column = variables.column + +local pt = number.dimenfactors.pt +local pts = number.pts +local formatters = string.formatters + +local collected = allocate() +local tobesaved = allocate() + +local jobpositions = { + collected = collected, + tobesaved = tobesaved, +} + +job.positions = jobpositions + +_plib_ = jobpositions -- might go + +local default = { -- not r and paragraphs etc + __index = { + x = 0, -- x position baseline + y = 0, -- y position baseline + w = 0, -- width + h = 0, -- height + d = 0, -- depth + p = 0, -- page + n = 0, -- paragraph + ls = 0, -- leftskip + rs = 0, -- rightskip + hi = 0, -- hangindent + ha = 0, -- hangafter + hs = 0, -- hsize + pi = 0, -- parindent + ps = false, -- parshape + } +} + +local f_b_tag = formatters["b:%s"] +local f_e_tag = formatters["e:%s"] +local f_p_tag = formatters["p:%s"] +local f_w_tag = formatters["w:%s"] + +local f_b_column = formatters["_plib_.b_col(%q)"] +local f_e_column = formatters["_plib_.e_col()"] + +local f_enhance = formatters["_plib_.enhance(%q)"] +local f_region = formatters["region:%s"] + +local f_b_region = formatters["_plib_.b_region(%q)"] +local f_e_region = formatters["_plib_.e_region(%s)"] + +local f_tag_three = formatters["%s:%s:%s"] +local f_tag_two = formatters["%s:%s"] + +local function sorter(a,b) + return a.y > b.y +end + +local nofusedregions = 0 +local nofmissingregions = 0 +local nofregular = 0 + +-- todo: register subsets and count them indepently + +local function initializer() + tobesaved = jobpositions.tobesaved + collected = jobpositions.collected + -- enhance regions with paragraphs + for tag, data in next, collected do + local region = data.r + if region then + local r = collected[region] + if r then + local paragraphs = r.paragraphs + if not paragraphs then + r.paragraphs = { data } + else + paragraphs[#paragraphs+1] = data + end + nofusedregions = nofusedregions + 1 + else + nofmissingregions = nofmissingregions + 1 + end + else + nofregular = nofregular + 1 + end + setmetatable(data,default) + end + -- add metatable + -- for tag, data in next, collected do + -- setmetatable(data,default) + -- end + -- sort this data + for tag, data in next, collected do + local region = data.r + if region then + local r = collected[region] + if r then + local paragraphs = r.paragraphs + if paragraphs and #paragraphs > 1 then + sort(paragraphs,sorter) + end + end + end + -- so, we can be sparse and don't need 'or 0' code + end +end + +job.register('job.positions.collected', tobesaved, initializer) + +local regions = { } +local nofregions = 0 +local region = nil + +local columns = { } +local nofcolumns = 0 +local column = nil + +local nofpages = nil + +-- beware ... we're not sparse here as lua will reserve slots for the nilled + +local function setdim(name,w,h,d,extra) -- will be used when we move to sp allover + local x = pdf.h + local y = pdf.v + if x == 0 then x = nil end + if y == 0 then y = nil end + if w == 0 then w = nil end + if h == 0 then h = nil end + if d == 0 then d = nil end + if extra == "" then extra = nil end + -- todo: sparse + tobesaved[name] = { + p = texcount.realpageno, + x = x, + y = y, + w = w, + h = h, + d = d, + e = extra, + r = region, + c = column, + } +end + +local function setall(name,p,x,y,w,h,d,extra) + if x == 0 then x = nil end + if y == 0 then y = nil end + if w == 0 then w = nil end + if h == 0 then h = nil end + if d == 0 then d = nil end + if extra == "" then extra = nil end + -- todo: sparse + tobesaved[name] = { + p = p, + x = x, + y = y, + w = w, + h = h, + d = d, + e = extra, + r = region, + c = column, + } +end + +local function enhance(data) + if not data then + return nil + end + if data.r == true then -- or "" + data.r = region + end + if data.x == true then + data.x = pdf.h + end + if data.y == true then + data.y = pdf.v + end + if data.p == true then + data.p = texcount.realpageno + end + if data.c == true then + data.c = column + end + if data.w == 0 then + data.w = nil + end + if data.h == 0 then + data.h = nil + end + if data.d == 0 then + data.d = nil + end + return data +end + +local function set(name,index,val) + local data = enhance(val or index) + if val then + container = tobesaved[name] + if not container then + tobesaved[name] = { + [index] = data + } + else + container[index] = data + end + else + tobesaved[name] = data + end +end + +local function get(id,index) + if index then + local container = collected[id] + return container and container[index] + else + return collected[id] + end +end + +jobpositions.setdim = setdim +jobpositions.setall = setall +jobpositions.set = set +jobpositions.get = get + +commands.setpos = setall + +-- will become private table (could also become attribute driven but too nasty +-- as attributes can bleed e.g. in margin stuff) + +function jobpositions.b_col(tag) + tobesaved[tag] = { + r = true, + x = pdf.h, + w = 0, + } + insert(columns,tag) + column = tag +end + +function jobpositions.e_col(tag) + local t = tobesaved[column] + if not t then + -- something's wrong + else + t.w = pdf.h - t.x + t.r = region + end + remove(columns) + column = columns[#columns] +end + +function commands.bcolumn(tag,register) + insert(columns,tag) + column = tag + if register then + context(new_latelua(f_b_column(tag))) + end +end + +function commands.ecolumn(register) + if register then + context(new_latelua(f_e_column())) + end + remove(columns) + column = columns[#columns] +end + +-- regions + +function jobpositions.b_region(tag) + local last = tobesaved[tag] + last.x = pdf.h +last.y = pdf.v + last.p = texcount.realpageno + insert(regions,tag) + region = tag +end + +function jobpositions.e_region(correct) + local last = tobesaved[region] +if correct then + last.h = last.y - pdf.v +end + last.y = pdf.v + remove(regions) + region = regions[#regions] +end + +function jobpositions.markregionbox(n,tag,correct) + if not tag or tag == "" then + nofregions = nofregions + 1 + tag = f_region(nofregions) + end + local box = texbox[n] + local w = box.width + local h = box.height + local d = box.depth + tobesaved[tag] = { + p = true, + x = true, + y = pdf.v, -- true, + w = w ~= 0 and w or nil, + h = h ~= 0 and h or nil, + d = d ~= 0 and d or nil, + } + local push = new_latelua(f_b_region(tag)) + local pop = new_latelua(f_e_region(tostring(correct))) -- todo: check if tostring is needed with formatter + -- maybe we should construct a hbox first (needs experimenting) so that we can avoid some at the tex end + local head = box.list + if head then + local tail = find_tail(head) + head.prev = push + push.next = head + pop .prev = tail + tail.next = pop + else -- we can have a simple push/pop + push.next = pop + pop.prev = push + end + box.list = push +end + +function jobpositions.enhance(name) + enhance(tobesaved[name]) +end + +function commands.pos(name,t) + tobesaved[name] = t + context(new_latelua(f_enhance(name))) +end + +local nofparagraphs = 0 + +function commands.parpos() -- todo: relate to localpar (so this is an intermediate variant) + nofparagraphs = nofparagraphs + 1 + texsetcount("global","c_anch_positions_paragraph",nofparagraphs) + local strutbox = texbox.strutbox + local t = { + p = true, + c = true, + r = true, + x = true, + y = true, + h = strutbox.height, + d = strutbox.depth, + hs = tex.hsize, + } + local leftskip = tex.leftskip.width + local rightskip = tex.rightskip.width + local hangindent = tex.hangindent + local hangafter = tex.hangafter + local parindent = tex.parindent + local parshape = tex.parshape + if leftskip ~= 0 then + t.ls = leftskip + end + if rightskip ~= 0 then + t.rs = rightskip + end + if hangindent ~= 0 then + t.hi = hangindent + end + if hangafter ~= 1 and hangafter ~= 0 then -- can not be zero .. so it needs to be 1 if zero + t.ha = hangafter + end + if parindent ~= 0 then + t.pi = parindent + end + if parshape and #parshape > 0 then + t.ps = parshape + end + local tag = f_p_tag(nofparagraphs) + tobesaved[tag] = t + context(new_latelua(f_enhance(tag))) +end + +function commands.posxy(name) -- can node.write be used here? + tobesaved[name] = { + p = true, + c = column, + r = true, + x = true, + y = true, + n = nofparagraphs > 0 and nofparagraphs or nil, + } + context(new_latelua(f_enhance(name))) +end + +function commands.poswhd(name,w,h,d) + tobesaved[name] = { + p = true, + c = column, + r = true, + x = true, + y = true, + w = w, + h = h, + d = d, + n = nofparagraphs > 0 and nofparagraphs or nil, + } + context(new_latelua(f_enhance(name))) +end + +function commands.posplus(name,w,h,d,extra) + tobesaved[name] = { + p = true, + c = column, + r = true, + x = true, + y = true, + w = w, + h = h, + d = d, + n = nofparagraphs > 0 and nofparagraphs or nil, + e = extra, + } + context(new_latelua(f_enhance(name))) +end + +function commands.posstrut(name,w,h,d) + local strutbox = texbox.strutbox + tobesaved[name] = { + p = true, + c = column, + r = true, + x = true, + y = true, + h = strutbox.height, + d = strutbox.depth, + n = nofparagraphs > 0 and nofparagraphs or nil, + } + context(new_latelua(f_enhance(name))) +end + +function jobpositions.getreserved(tag,n) + if tag == v_column then + local fulltag = f_tag_three(tag,texcount.realpageno,n or 1) + local data = collected[fulltag] + if data then + return data, fulltag + end + tag = v_text + end + if tag == v_text then + local fulltag = f_tag_two(tag,texcount.realpageno) + return collected[fulltag] or false, fulltag + end + return collected[tag] or false, tag +end + +function jobpositions.copy(target,source) + collected[target] = collected[source] +end + +function jobpositions.replace(id,p,x,y,w,h,d) + collected[id] = { p = p, x = x, y = y, w = w, h = h, d = d } -- c g +end + +function jobpositions.page(id) + local jpi = collected[id] + return jpi and jpi.p +end + +function jobpositions.region(id) + local jpi = collected[id] + return jpi and jpi.r or false +end + +function jobpositions.column(id) + local jpi = collected[id] + return jpi and jpi.c or false +end + +function jobpositions.paragraph(id) + local jpi = collected[id] + return jpi and jpi.n +end + +jobpositions.p = jobpositions.page +jobpositions.r = jobpositions.region +jobpositions.c = jobpositions.column +jobpositions.n = jobpositions.paragraph + +function jobpositions.x(id) + local jpi = collected[id] + return jpi and jpi.x +end + +function jobpositions.y(id) + local jpi = collected[id] + return jpi and jpi.y +end + +function jobpositions.width(id) + local jpi = collected[id] + return jpi and jpi.w +end + +function jobpositions.height(id) + local jpi = collected[id] + return jpi and jpi.h +end + +function jobpositions.depth(id) + local jpi = collected[id] + return jpi and jpi.d +end + +function jobpositions.leftskip(id) + local jpi = collected[id] + return jpi and jpi.ls +end + +function jobpositions.rightskip(id) + local jpi = collected[id] + return jpi and jpi.rs +end + +function jobpositions.hsize(id) + local jpi = collected[id] + return jpi and jpi.hs +end + +function jobpositions.parindent(id) + local jpi = collected[id] + return jpi and jpi.pi +end + +function jobpositions.hangindent(id) + local jpi = collected[id] + return jpi and jpi.hi +end + +function jobpositions.hangafter(id) + local jpi = collected[id] + return jpi and jpi.ha or 1 +end + +function jobpositions.xy(id) + local jpi = collected[id] + if jpi then + return jpi.x, jpi.y + else + return 0, 0 + end +end + +function jobpositions.lowerleft(id) + local jpi = collected[id] + if jpi then + return jpi.x, jpi.y - jpi.d + else + return 0, 0 + end +end + +function jobpositions.lowerright(id) + local jpi = collected[id] + if jpi then + return jpi.x + jpi.w, jpi.y - jpi.d + else + return 0, 0 + end +end + +function jobpositions.upperright(id) + local jpi = collected[id] + if jpi then + return jpi.x + jpi.w, jpi.y + jpi.h + else + return 0, 0 + end +end + +function jobpositions.upperleft(id) + local jpi = collected[id] + if jpi then + return jpi.x, jpi.y + jpi.h + else + return 0, 0 + end +end + +function jobpositions.position(id) + local jpi = collected[id] + if jpi then + return jpi.p, jpi.x, jpi.y, jpi.w, jpi.h, jpi.d + else + return 0, 0, 0, 0, 0, 0 + end +end + +function jobpositions.extra(id,n,default) -- assume numbers + local jpi = collected[id] + if jpi then + local e = jpi.e + if e then + local split = jpi.split + if not split then + split = lpegmatch(splitter,jpi.e) + jpi.split = split + end + return texsp(split[n]) or default -- watch the texsp here + end + end + return default +end + +local function overlapping(one,two,overlappingmargin) -- hm, strings so this is wrong .. texsp + one = collected[one] + two = collected[two] + if one and two and one.p == two.p then + if not overlappingmargin then + overlappingmargin = 2 + end + local x_one = one.x + local x_two = two.x + local w_two = two.w + local llx_one = x_one - overlappingmargin + local urx_two = x_two + w_two + overlappingmargin + if llx_one > urx_two then + return false + end + local w_one = one.w + local urx_one = x_one + w_one + overlappingmargin + local llx_two = x_two - overlappingmargin + if urx_one < llx_two then + return false + end + local y_one = one.y + local y_two = two.y + local d_one = one.d + local h_two = two.h + local lly_one = y_one - d_one - overlappingmargin + local ury_two = y_two + h_two + overlappingmargin + if lly_one > ury_two then + return false + end + local h_one = one.h + local d_two = two.d + local ury_one = y_one + h_one + overlappingmargin + local lly_two = y_two - d_two - overlappingmargin + if ury_one < lly_two then + return false + end + return true + end +end + +local function onsamepage(list,page) + for id in gmatch(list,"(, )") do + local jpi = collected[id] + if jpi then + local p = jpi.p + if not p then + return false + elseif not page then + page = p + elseif page ~= p then + return false + end + end + end + return page +end + +jobpositions.overlapping = overlapping +jobpositions.onsamepage = onsamepage + +-- interface + +commands.replacepospxywhd = jobpositions.replace +commands.copyposition = jobpositions.copy + +function commands.MPp(id) + local jpi = collected[id] + if jpi then + local p = jpi.p + if p and p ~= true then + context(p) + return + end + end + context('0') +end + +function commands.MPx(id) + local jpi = collected[id] + if jpi then + local x = jpi.x + if x and x ~= true and x ~= 0 then + context("%.5fpt",x*pt) + return + end + end + context('0pt') +end + +function commands.MPy(id) + local jpi = collected[id] + if jpi then + local y = jpi.y + if y and y ~= true and y ~= 0 then + context("%.5fpt",y*pt) + return + end + end + context('0pt') +end + +function commands.MPw(id) + local jpi = collected[id] + if jpi then + local w = jpi.w + if w and w ~= 0 then + context("%.5fpt",w*pt) + return + end + end + context('0pt') +end + +function commands.MPh(id) + local jpi = collected[id] + if jpi then + local h = jpi.h + if h and h ~= 0 then + context("%.5fpt",h*pt) + return + end + end + context('0pt') +end + +function commands.MPd(id) + local jpi = collected[id] + if jpi then + local d = jpi.d + if d and d ~= 0 then + context("%.5fpt",d*pt) + return + end + end + context('0pt') +end + +function commands.MPxy(id) + local jpi = collected[id] + if jpi then + context('(%.5fpt,%.5fpt)', + jpi.x*pt, + jpi.y*pt + ) + else + context('(0,0)') + end +end + +function commands.MPll(id) + local jpi = collected[id] + if jpi then + context('(%.5fpt,%.5fpt)', + jpi.x *pt, + (jpi.y-jpi.d)*pt + ) + else + context('(0,0)') -- for mp only + end +end + +function commands.MPlr(id) + local jpi = collected[id] + if jpi then + context('(%.5fpt,%.5fpt)', + (jpi.x + jpi.w)*pt, + (jpi.y - jpi.d)*pt + ) + else + context('(0,0)') -- for mp only + end +end + +function commands.MPur(id) + local jpi = collected[id] + if jpi then + context('(%.5fpt,%.5fpt)', + (jpi.x + jpi.w)*pt, + (jpi.y + jpi.h)*pt + ) + else + context('(0,0)') -- for mp only + end +end + +function commands.MPul(id) + local jpi = collected[id] + if jpi then + context('(%.5fpt,%.5fpt)', + jpi.x *pt, + (jpi.y + jpi.h)*pt + ) + else + context('(0,0)') -- for mp only + end +end + +local function MPpos(id) + local jpi = collected[id] + if jpi then + local p = jpi.p + if p then + context("%s,%.5fpt,%.5fpt,%.5fpt,%.5fpt,%.5fpt", + p, + jpi.x*pt, + jpi.y*pt, + jpi.w*pt, + jpi.h*pt, + jpi.d*pt + ) + return + end + end + context('0,0,0,0,0,0') -- for mp only +end + +commands.MPpos = MPpos + +function commands.MPn(id) + local jpi = collected[id] + if jpi then + local n = jpi.n + if n then + context(n) + return + end + end + context(0) +end + +function commands.MPc(id) + local jpi = collected[id] + if jpi then + local c = jpi.c + if c and p ~= true then + context(c) + return + end + end + context(c) -- number +end + +function commands.MPr(id) + local jpi = collected[id] + if jpi then + local r = jpi.r + if r and p ~= true then + context(r) + return + end + end +end + +local function MPpardata(n) + local t = collected[n] + if not t then + local tag = f_p_tag(n) + t = collected[tag] + end + if t then + context("%.5fpt,%.5fpt,%.5fpt,%.5fpt,%s,%.5fpt", + t.hs*pt, + t.ls*pt, + t.rs*pt, + t.hi*pt, + t.ha, + t.pi*pt + ) + else + context("0,0,0,0,0,0") -- for mp only + end +end + +commands.MPpardata = MPpardata + +function commands.MPposset(id) -- special helper, used in backgrounds + local b = f_b_tag(id) + local e = f_e_tag(id) + local w = f_w_tag(id) + local p = f_p_tag(jobpositions.n(b)) + MPpos(b) context(",") MPpos(e) context(",") MPpos(w) context(",") MPpos(p) context(",") MPpardata(p) +end + +function commands.MPls(id) + local t = collected[id] + if t then + context("%.5fpt",t.ls*pt) + else + context("0pt") + end +end + +function commands.MPrs(id) + local t = collected[id] + if t then + context("%.5fpt",t.rs*pt) + else + context("0pt") + end +end + +local splitter = lpeg.tsplitat(",") + +function commands.MPplus(id,n,default) + local jpi = collected[id] + if jpi then + local e = jpi.e + if e then + local split = jpi.split + if not split then + split = lpegmatch(splitter,jpi.e) + jpi.split = split + end + context(split[n] or default) + return + end + end + context(default) +end + +function commands.MPrest(id,default) + local jpi = collected[id] + context(jpi and jpi.e or default) +end + +function commands.MPxywhd(id) + local t = collected[id] + if t then + context("%.5fpt,%.5fpt,%.5fpt,%.5fpt,%.5fpt", + t.x*pt, + t.y*pt, + t.w*pt, + t.h*pt, + t.d*pt + ) + else + context("0,0,0,0,0") -- for mp only + end +end + +local doif, doifelse = commands.doif, commands.doifelse + +function commands.doifpositionelse(name) + doifelse(collected[name]) +end + +function commands.doifposition(name) + doif(collected[name]) +end + +function commands.doifpositiononpage(name,page) -- probably always realpageno + local c = collected[name] + doifelse(c and c.p == page) +end + +function commands.doifoverlappingelse(one,two,overlappingmargin) + doifelse(overlapping(one,two,overlappingmargin)) +end + +function commands.doifpositionsonsamepageelse(list,page) + doifelse(onsamepage(list)) +end + +function commands.doifpositionsonthispageelse(list) + doifelse(onsamepage(list,tostring(tex.count.realpageno))) +end + +function commands.doifelsepositionsused() + doifelse(next(collected)) +end + +commands.markcolumnbox = jobpositions.markcolumnbox +commands.markregionbox = jobpositions.markregionbox + +-- statistics (at least for the moment, when testing) + +statistics.register("positions", function() + local total = nofregular + nofusedregions + nofmissingregions + if total > 0 then + return format("%s collected, %s regulars, %s regions, %s unresolved regions", + total, nofregular, nofusedregions, nofmissingregions) + else + return nil + end +end) diff --git a/tex/context/base/attr-col.lua b/tex/context/base/attr-col.lua index 7c6b7909b..473bf8a74 100644 --- a/tex/context/base/attr-col.lua +++ b/tex/context/base/attr-col.lua @@ -1,538 +1,538 @@ -if not modules then modules = { } end modules ['attr-col'] = { - version = 1.001, - comment = "companion to attr-col.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this module is being reconstructed and code will move to other places --- we can also do the nsnone via a metatable and then also se index 0 - --- list could as well refer to the tables (instead of numbers that --- index into another table) .. depends on what we need - -local type = type -local format = string.format -local concat = table.concat -local min, max, floor = math.min, math.max, math.floor - -local attributes, nodes, utilities, logs, backends, storage = attributes, nodes, utilities, logs, backends, storage -local commands, context, interfaces = commands, context, interfaces -local tex = tex - -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex - -local report_attributes = logs.reporter("attributes","colors") -local report_colors = logs.reporter("colors","support") -local report_transparencies = logs.reporter("transparencies","support") - --- todo: document this but first reimplement this as it reflects the early --- days of luatex / mkiv and we have better ways now - --- nb: attributes: color etc is much slower than normal (marks + literals) but ... --- nb. too many "0 g"s - -local states = attributes.states -local tasks = nodes.tasks -local nodeinjections = backends.nodeinjections -local registrations = backends.registrations -local unsetvalue = attributes.unsetvalue - -local registerstorage = storage.register -local formatters = string.formatters - --- We can distinguish between rules and glyphs but it's not worth the trouble. A --- first implementation did that and while it saves a bit for glyphs and rules, it --- costs more resourses for transparencies. So why bother. - --- --- colors --- - --- we can also collapse the two attributes: n, n+1, n+2 and then --- at the tex end add 0, 1, 2, but this is not faster and less --- flexible (since sometimes we freeze color attribute values at --- the lua end of the game) --- --- we also need to store the colorvalues because we need then in mp --- --- This is a compromis between speed and simplicity. We used to store the --- values and data in one array, which made in neccessary to store the --- converters that need node constructor into strings and evaluate them --- at runtime (after reading from storage). Think of: --- --- colors.strings = colors.strings or { } --- --- if environment.initex then --- colors.strings[color] = "return colors." .. colorspace .. "(" .. concat({...},",") .. ")" --- end --- --- registerstorage("attributes/colors/data", colors.strings, "attributes.colors.data") -- evaluated --- --- We assume that only processcolors are defined in the format. - -attributes.colors = attributes.colors or { } -local colors = attributes.colors - -local a_color = attributes.private('color') -local a_selector = attributes.private('colormodel') - -colors.data = allocate() -colors.values = colors.values or { } -colors.registered = colors.registered or { } -colors.weightgray = true -colors.attribute = a_color -colors.selector = a_selector -colors.default = 1 -colors.main = nil -colors.triggering = true -colors.supported = true -colors.model = "all" - -local data = colors.data -local values = colors.values -local registered = colors.registered - -local numbers = attributes.numbers -local list = attributes.list - -registerstorage("attributes/colors/values", values, "attributes.colors.values") -registerstorage("attributes/colors/registered", registered, "attributes.colors.registered") - -local f_colors = { - rgb = formatters["r:%s:%s:%s"], - cmyk = formatters["c:%s:%s:%s:%s"], - gray = formatters["s:%s"], - spot = formatters["p:%s:%s:%s:%s"], -} - -local models = { - [interfaces.variables.none] = unsetvalue, - black = unsetvalue, - bw = unsetvalue, - all = 1, - gray = 2, - rgb = 3, - cmyk = 4, -} - -local function rgbtocmyk(r,g,b) -- we could reduce - return 1-r, 1-g, 1-b, 0 -end - -local function cmyktorgb(c,m,y,k) - return 1.0 - min(1.0,c+k), 1.0 - min(1.0,m+k), 1.0 - min(1.0,y+k) -end - -local function rgbtogray(r,g,b) - if colors.weightgray then - return .30*r + .59*g + .11*b - else - return r/3 + g/3 + b/3 - end -end - -local function cmyktogray(c,m,y,k) - return rgbtogray(cmyktorgb(c,m,y,k)) -end - --- not critical so not needed: --- --- local function cmyktogray(c,m,y,k) --- local r, g, b = 1.0 - min(1.0,c+k), 1.0 - min(1.0,m+k), 1.0 - min(1.0,y+k) --- if colors.weightgray then --- return .30*r + .59*g + .11*b --- else --- return r/3 + g/3 + b/3 --- end --- end - --- http://en.wikipedia.org/wiki/HSI_color_space --- http://nl.wikipedia.org/wiki/HSV_(kleurruimte) - -local function hsvtorgb(h,s,v) - -- h = h % 360 - local hd = h/60 - local hf = floor(hd) - local hi = hf % 6 - -- local f = hd - hi - local f = hd - hf - local p = v * (1 - s) - local q = v * (1 - f * s) - local t = v * (1 - (1 - f) * s) - if hi == 0 then - return v, t, p - elseif hi == 1 then - return q, v, p - elseif hi == 2 then - return p, v, t - elseif hi == 3 then - return p, q, v - elseif hi == 4 then - return t, p, v - elseif hi == 5 then - return v, p, q - else - print("error in hsv -> rgb",hi,h,s,v) - end -end - -local function rgbtohsv(r,g,b) - local offset, maximum, other_1, other_2 - if r >= g and r >= b then - offset, maximum, other_1, other_2 = 0, r, g, b - elseif g >= r and g >= b then - offset, maximum, other_1, other_2 = 2, g, b, r - else - offset, maximum, other_1, other_2 = 4, b, r, g - end - if maximum == 0 then - return 0, 0, 0 - end - local minimum = other_1 < other_2 and other_1 or other_2 - if maximum == minimum then - return 0, 0, maximum - end - local delta = maximum - minimum - return (offset + (other_1-other_2)/delta)*60, delta/maximum, maximum -end - -local function graytorgb(s) -- unweighted - return 1-s, 1-s, 1-s -end - -local function hsvtogray(h,s,v) - return rgb_to_gray(hsv_to_rgb(h,s,v)) -end - -local function graytohsv(s) - return 0, 0, s -end - -colors.rgbtocmyk = rgbtocmyk -colors.rgbtogray = rgbtogray -colors.cmyktorgb = cmyktorgb -colors.cmyktogray = cmyktogray -colors.rgbtohsv = rgbtohsv -colors.hsvtorgb = hsvtorgb -colors.hsvtogray = hsvtogray -colors.graytohsv = graytohsv - --- we can share some *data by using s, rgb and cmyk hashes, but --- normally the amount of colors is not that large; storing the --- components costs a bit of extra runtime, but we expect to gain --- some back because we have them at hand; the number indicates the --- default color space - -function colors.gray(s) - return { 2, s, s, s, s, 0, 0, 0, 1-s } -end - -function colors.rgb(r,g,b) - local s = rgbtogray(r,g,b) - local c, m, y, k = rgbtocmyk(r,g,b) - return { 3, s, r, g, b, c, m, y, k } -end - -function colors.cmyk(c,m,y,k) - local s = cmyktogray(c,m,y,k) - local r, g, b = cmyktorgb(c,m,y,k) - return { 4, s, r, g, b, c, m, y, k } -end - ---~ function colors.spot(parent,f,d,p) ---~ return { 5, .5, .5, .5, .5, 0, 0, 0, .5, parent, f, d, p } ---~ end - -function colors.spot(parent,f,d,p) - if type(p) == "number" then - local n = list[numbers.color][parent] -- hard coded ref to color number - if n then - local v = values[n] - if v then - -- the via cmyk hack is dirty, but it scales better - local c, m, y, k = p*v[6], p*v[7], p*v[8], p*v[8] - local r, g, b = cmyktorgb(c,m,y,k) - local s = cmyktogray(c,m,y,k) - return { 5, s, r, g, b, c, m, y, k, parent, f, d, p } - end - end - else - -- todo, multitone (maybe p should be a table) - end - return { 5, .5, .5, .5, .5, 0, 0, 0, .5, parent, f, d, p } -end - -local function graycolor(...) graycolor = nodeinjections.graycolor return graycolor(...) end -local function rgbcolor (...) rgbcolor = nodeinjections.rgbcolor return rgbcolor (...) end -local function cmykcolor(...) cmykcolor = nodeinjections.cmykcolor return cmykcolor(...) end -local function spotcolor(...) spotcolor = nodeinjections.spotcolor return spotcolor(...) end - -local function extender(colors,key) - if colors.supported and key == "none" then - local d = graycolor(0) - colors.none = d - return d - end -end - -local function reviver(data,n) - if colors.supported then - local v = values[n] - local d - if not v then - local gray = graycolor(0) - d = { gray, gray, gray, gray } - report_attributes("unable to revive color %a",n) - else - local model = colors.forcedmodel(v[1]) - if model == 2 then - local gray= graycolor(v[2]) - d = { gray, gray, gray, gray } - elseif model == 3 then - local gray, rgb, cmyk = graycolor(v[2]), rgbcolor(v[3],v[4],v[5]), cmykcolor(v[6],v[7],v[8],v[9]) - d = { rgb, gray, rgb, cmyk } - elseif model == 4 then - local gray, rgb, cmyk = graycolor(v[2]), rgbcolor(v[3],v[4],v[5]), cmykcolor(v[6],v[7],v[8],v[9]) - d = { cmyk, gray, rgb, cmyk } - elseif model == 5 then - local spot = spotcolor(v[10],v[11],v[12],v[13]) - -- d = { spot, gray, rgb, cmyk } - d = { spot, spot, spot, spot } - end - end - data[n] = d - return d - end -end - -setmetatableindex(colors, extender) -setmetatableindex(colors.data, reviver) - -function colors.filter(n) - return concat(data[n],":",5) -end - -function colors.setmodel(name,weightgray) - colors.model = name - colors.default = models[name] or 1 - colors.weightgray = weightgray ~= false - return colors.default -end - -function colors.register(name, colorspace, ...) -- passing 9 vars is faster (but not called that often) - local stamp = f_colors[colorspace](...) - local color = registered[stamp] - if not color then - color = #values + 1 - values[color] = colors[colorspace](...) - registered[stamp] = color - -- colors.reviver(color) - end - if name then - list[a_color][name] = color -- not grouped, so only global colors - end - return registered[stamp] -end - -function colors.value(id) - return values[id] -end - -attributes.colors.handler = nodes.installattributehandler { - name = "color", - namespace = colors, - initializer = states.initialize, - finalizer = states.finalize, - processor = states.selective, - resolver = function() return colors.main end, -} - -function colors.enable(value) - if value == false or not colors.supported then - tasks.disableaction("shipouts","attributes.colors.handler") - else - tasks.enableaction("shipouts","attributes.colors.handler") - end -end - -function colors.forcesupport(value) -- can move to attr-div - colors.supported = value - report_colors("color is %ssupported",value and "" or "not ") - colors.enable(value) -end - --- transparencies - -local a_transparency = attributes.private('transparency') - -attributes.transparencies = attributes.transparencies or { } -local transparencies = attributes.transparencies -transparencies.registered = transparencies.registered or { } -transparencies.data = allocate() -transparencies.values = transparencies.values or { } -transparencies.triggering = true -transparencies.attribute = a_transparency -transparencies.supported = true - -local registered = transparencies.registered -- we could use a 2 dimensional table instead -local data = transparencies.data -local values = transparencies.values -local f_transparency = formatters["%s:%s"] - -registerstorage("attributes/transparencies/registered", registered, "attributes.transparencies.registered") -registerstorage("attributes/transparencies/values", values, "attributes.transparencies.values") - -local function inject_transparency(...) - inject_transparency = nodeinjections.transparency - return inject_transparency(...) -end - -local function register_transparency(...) - register_transparency = registrations.transparency - return register_transparency(...) -end - -function transparencies.register(name,a,t,force) -- name is irrelevant here (can even be nil) - -- Force needed here for metapost converter. We could always force - -- but then we'd end up with transparencies resources even if we - -- would not use transparencies (but define them only). This is - -- somewhat messy. - local stamp = f_transparency(a,t) - local n = registered[stamp] - if not n then - n = #values + 1 - values[n] = { a, t } - registered[stamp] = n - if force then - register_transparency(n,a,t) - end - elseif force and not data[n] then - register_transparency(n,a,t) - end - if name then - list[a_transparency][name] = n -- not grouped, so only global transparencies - end - return registered[stamp] -end - -local function extender(transparencies,key) - if colors.supported and key == "none" then - local d = inject_transparency(0) - transparencies.none = d - return d - end -end - -local function reviver(data,n) - if transparencies.supported then - local v = values[n] - local d - if not v then - d = inject_transparency(0) - else - d = inject_transparency(n) - register_transparency(n,v[1],v[2]) - end - data[n] = d - return d - else - return "" - end -end - -setmetatableindex(transparencies, extender) -setmetatableindex(transparencies.data, reviver) -- register if used - --- check if there is an identity - -function transparencies.value(id) - return values[id] -end - -attributes.transparencies.handler = nodes.installattributehandler { - name = "transparency", - namespace = transparencies, - initializer = states.initialize, - finalizer = states.finalize, - processor = states.process, -} - -function transparencies.enable(value) -- nil is enable - if value == false or not transparencies.supported then - tasks.disableaction("shipouts","attributes.transparencies.handler") - else - tasks.enableaction("shipouts","attributes.transparencies.handler") - end -end - -function transparencies.forcesupport(value) -- can move to attr-div - transparencies.supported = value - report_transparencies("transparency is %ssupported",value and "" or "not ") - transparencies.enable(value) -end - ---- colorintents: overprint / knockout - -attributes.colorintents = attributes.colorintents or { } -local colorintents = attributes.colorintents -colorintents.data = allocate() -- colorintents.data or { } -colorintents.attribute = attributes.private('colorintent') - -colorintents.registered = allocate { - overprint = 1, - knockout = 2, -} - -local data, registered = colorintents.data, colorintents.registered - -local function extender(colorintents,key) - if key == "none" then - local d = data[2] - colorintents.none = d - return d - end -end - -local function reviver(data,n) - if n == 1 then - local d = nodeinjections.overprint() -- called once - data[1] = d - return d - elseif n == 2 then - local d = nodeinjections.knockout() -- called once - data[2] = d - return d - end -end - -setmetatableindex(colorintents, extender) -setmetatableindex(colorintents.data, reviver) - -function colorintents.register(stamp) - return registered[stamp] or registered.overprint -end - -colorintents.handler = nodes.installattributehandler { - name = "colorintent", - namespace = colorintents, - initializer = states.initialize, - finalizer = states.finalize, - processor = states.process, -} - -function colorintents.enable() - tasks.enableaction("shipouts","attributes.colorintents.handler") -end - --- interface - -commands.enablecolor = colors.enable -commands.enabletransparency = transparencies.enable -commands.enablecolorintents = colorintents.enable - -function commands.registercolor (...) context(colors .register(...)) end -function commands.registertransparency(...) context(transparencies.register(...)) end -function commands.registercolorintent (...) context(colorintents .register(...)) end +if not modules then modules = { } end modules ['attr-col'] = { + version = 1.001, + comment = "companion to attr-col.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this module is being reconstructed and code will move to other places +-- we can also do the nsnone via a metatable and then also se index 0 + +-- list could as well refer to the tables (instead of numbers that +-- index into another table) .. depends on what we need + +local type = type +local format = string.format +local concat = table.concat +local min, max, floor = math.min, math.max, math.floor + +local attributes, nodes, utilities, logs, backends, storage = attributes, nodes, utilities, logs, backends, storage +local commands, context, interfaces = commands, context, interfaces +local tex = tex + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex + +local report_attributes = logs.reporter("attributes","colors") +local report_colors = logs.reporter("colors","support") +local report_transparencies = logs.reporter("transparencies","support") + +-- todo: document this but first reimplement this as it reflects the early +-- days of luatex / mkiv and we have better ways now + +-- nb: attributes: color etc is much slower than normal (marks + literals) but ... +-- nb. too many "0 g"s + +local states = attributes.states +local tasks = nodes.tasks +local nodeinjections = backends.nodeinjections +local registrations = backends.registrations +local unsetvalue = attributes.unsetvalue + +local registerstorage = storage.register +local formatters = string.formatters + +-- We can distinguish between rules and glyphs but it's not worth the trouble. A +-- first implementation did that and while it saves a bit for glyphs and rules, it +-- costs more resourses for transparencies. So why bother. + +-- +-- colors +-- + +-- we can also collapse the two attributes: n, n+1, n+2 and then +-- at the tex end add 0, 1, 2, but this is not faster and less +-- flexible (since sometimes we freeze color attribute values at +-- the lua end of the game) +-- +-- we also need to store the colorvalues because we need then in mp +-- +-- This is a compromis between speed and simplicity. We used to store the +-- values and data in one array, which made in neccessary to store the +-- converters that need node constructor into strings and evaluate them +-- at runtime (after reading from storage). Think of: +-- +-- colors.strings = colors.strings or { } +-- +-- if environment.initex then +-- colors.strings[color] = "return colors." .. colorspace .. "(" .. concat({...},",") .. ")" +-- end +-- +-- registerstorage("attributes/colors/data", colors.strings, "attributes.colors.data") -- evaluated +-- +-- We assume that only processcolors are defined in the format. + +attributes.colors = attributes.colors or { } +local colors = attributes.colors + +local a_color = attributes.private('color') +local a_selector = attributes.private('colormodel') + +colors.data = allocate() +colors.values = colors.values or { } +colors.registered = colors.registered or { } +colors.weightgray = true +colors.attribute = a_color +colors.selector = a_selector +colors.default = 1 +colors.main = nil +colors.triggering = true +colors.supported = true +colors.model = "all" + +local data = colors.data +local values = colors.values +local registered = colors.registered + +local numbers = attributes.numbers +local list = attributes.list + +registerstorage("attributes/colors/values", values, "attributes.colors.values") +registerstorage("attributes/colors/registered", registered, "attributes.colors.registered") + +local f_colors = { + rgb = formatters["r:%s:%s:%s"], + cmyk = formatters["c:%s:%s:%s:%s"], + gray = formatters["s:%s"], + spot = formatters["p:%s:%s:%s:%s"], +} + +local models = { + [interfaces.variables.none] = unsetvalue, + black = unsetvalue, + bw = unsetvalue, + all = 1, + gray = 2, + rgb = 3, + cmyk = 4, +} + +local function rgbtocmyk(r,g,b) -- we could reduce + return 1-r, 1-g, 1-b, 0 +end + +local function cmyktorgb(c,m,y,k) + return 1.0 - min(1.0,c+k), 1.0 - min(1.0,m+k), 1.0 - min(1.0,y+k) +end + +local function rgbtogray(r,g,b) + if colors.weightgray then + return .30*r + .59*g + .11*b + else + return r/3 + g/3 + b/3 + end +end + +local function cmyktogray(c,m,y,k) + return rgbtogray(cmyktorgb(c,m,y,k)) +end + +-- not critical so not needed: +-- +-- local function cmyktogray(c,m,y,k) +-- local r, g, b = 1.0 - min(1.0,c+k), 1.0 - min(1.0,m+k), 1.0 - min(1.0,y+k) +-- if colors.weightgray then +-- return .30*r + .59*g + .11*b +-- else +-- return r/3 + g/3 + b/3 +-- end +-- end + +-- http://en.wikipedia.org/wiki/HSI_color_space +-- http://nl.wikipedia.org/wiki/HSV_(kleurruimte) + +local function hsvtorgb(h,s,v) + -- h = h % 360 + local hd = h/60 + local hf = floor(hd) + local hi = hf % 6 + -- local f = hd - hi + local f = hd - hf + local p = v * (1 - s) + local q = v * (1 - f * s) + local t = v * (1 - (1 - f) * s) + if hi == 0 then + return v, t, p + elseif hi == 1 then + return q, v, p + elseif hi == 2 then + return p, v, t + elseif hi == 3 then + return p, q, v + elseif hi == 4 then + return t, p, v + elseif hi == 5 then + return v, p, q + else + print("error in hsv -> rgb",hi,h,s,v) + end +end + +local function rgbtohsv(r,g,b) + local offset, maximum, other_1, other_2 + if r >= g and r >= b then + offset, maximum, other_1, other_2 = 0, r, g, b + elseif g >= r and g >= b then + offset, maximum, other_1, other_2 = 2, g, b, r + else + offset, maximum, other_1, other_2 = 4, b, r, g + end + if maximum == 0 then + return 0, 0, 0 + end + local minimum = other_1 < other_2 and other_1 or other_2 + if maximum == minimum then + return 0, 0, maximum + end + local delta = maximum - minimum + return (offset + (other_1-other_2)/delta)*60, delta/maximum, maximum +end + +local function graytorgb(s) -- unweighted + return 1-s, 1-s, 1-s +end + +local function hsvtogray(h,s,v) + return rgb_to_gray(hsv_to_rgb(h,s,v)) +end + +local function graytohsv(s) + return 0, 0, s +end + +colors.rgbtocmyk = rgbtocmyk +colors.rgbtogray = rgbtogray +colors.cmyktorgb = cmyktorgb +colors.cmyktogray = cmyktogray +colors.rgbtohsv = rgbtohsv +colors.hsvtorgb = hsvtorgb +colors.hsvtogray = hsvtogray +colors.graytohsv = graytohsv + +-- we can share some *data by using s, rgb and cmyk hashes, but +-- normally the amount of colors is not that large; storing the +-- components costs a bit of extra runtime, but we expect to gain +-- some back because we have them at hand; the number indicates the +-- default color space + +function colors.gray(s) + return { 2, s, s, s, s, 0, 0, 0, 1-s } +end + +function colors.rgb(r,g,b) + local s = rgbtogray(r,g,b) + local c, m, y, k = rgbtocmyk(r,g,b) + return { 3, s, r, g, b, c, m, y, k } +end + +function colors.cmyk(c,m,y,k) + local s = cmyktogray(c,m,y,k) + local r, g, b = cmyktorgb(c,m,y,k) + return { 4, s, r, g, b, c, m, y, k } +end + +--~ function colors.spot(parent,f,d,p) +--~ return { 5, .5, .5, .5, .5, 0, 0, 0, .5, parent, f, d, p } +--~ end + +function colors.spot(parent,f,d,p) + if type(p) == "number" then + local n = list[numbers.color][parent] -- hard coded ref to color number + if n then + local v = values[n] + if v then + -- the via cmyk hack is dirty, but it scales better + local c, m, y, k = p*v[6], p*v[7], p*v[8], p*v[8] + local r, g, b = cmyktorgb(c,m,y,k) + local s = cmyktogray(c,m,y,k) + return { 5, s, r, g, b, c, m, y, k, parent, f, d, p } + end + end + else + -- todo, multitone (maybe p should be a table) + end + return { 5, .5, .5, .5, .5, 0, 0, 0, .5, parent, f, d, p } +end + +local function graycolor(...) graycolor = nodeinjections.graycolor return graycolor(...) end +local function rgbcolor (...) rgbcolor = nodeinjections.rgbcolor return rgbcolor (...) end +local function cmykcolor(...) cmykcolor = nodeinjections.cmykcolor return cmykcolor(...) end +local function spotcolor(...) spotcolor = nodeinjections.spotcolor return spotcolor(...) end + +local function extender(colors,key) + if colors.supported and key == "none" then + local d = graycolor(0) + colors.none = d + return d + end +end + +local function reviver(data,n) + if colors.supported then + local v = values[n] + local d + if not v then + local gray = graycolor(0) + d = { gray, gray, gray, gray } + report_attributes("unable to revive color %a",n) + else + local model = colors.forcedmodel(v[1]) + if model == 2 then + local gray= graycolor(v[2]) + d = { gray, gray, gray, gray } + elseif model == 3 then + local gray, rgb, cmyk = graycolor(v[2]), rgbcolor(v[3],v[4],v[5]), cmykcolor(v[6],v[7],v[8],v[9]) + d = { rgb, gray, rgb, cmyk } + elseif model == 4 then + local gray, rgb, cmyk = graycolor(v[2]), rgbcolor(v[3],v[4],v[5]), cmykcolor(v[6],v[7],v[8],v[9]) + d = { cmyk, gray, rgb, cmyk } + elseif model == 5 then + local spot = spotcolor(v[10],v[11],v[12],v[13]) + -- d = { spot, gray, rgb, cmyk } + d = { spot, spot, spot, spot } + end + end + data[n] = d + return d + end +end + +setmetatableindex(colors, extender) +setmetatableindex(colors.data, reviver) + +function colors.filter(n) + return concat(data[n],":",5) +end + +function colors.setmodel(name,weightgray) + colors.model = name + colors.default = models[name] or 1 + colors.weightgray = weightgray ~= false + return colors.default +end + +function colors.register(name, colorspace, ...) -- passing 9 vars is faster (but not called that often) + local stamp = f_colors[colorspace](...) + local color = registered[stamp] + if not color then + color = #values + 1 + values[color] = colors[colorspace](...) + registered[stamp] = color + -- colors.reviver(color) + end + if name then + list[a_color][name] = color -- not grouped, so only global colors + end + return registered[stamp] +end + +function colors.value(id) + return values[id] +end + +attributes.colors.handler = nodes.installattributehandler { + name = "color", + namespace = colors, + initializer = states.initialize, + finalizer = states.finalize, + processor = states.selective, + resolver = function() return colors.main end, +} + +function colors.enable(value) + if value == false or not colors.supported then + tasks.disableaction("shipouts","attributes.colors.handler") + else + tasks.enableaction("shipouts","attributes.colors.handler") + end +end + +function colors.forcesupport(value) -- can move to attr-div + colors.supported = value + report_colors("color is %ssupported",value and "" or "not ") + colors.enable(value) +end + +-- transparencies + +local a_transparency = attributes.private('transparency') + +attributes.transparencies = attributes.transparencies or { } +local transparencies = attributes.transparencies +transparencies.registered = transparencies.registered or { } +transparencies.data = allocate() +transparencies.values = transparencies.values or { } +transparencies.triggering = true +transparencies.attribute = a_transparency +transparencies.supported = true + +local registered = transparencies.registered -- we could use a 2 dimensional table instead +local data = transparencies.data +local values = transparencies.values +local f_transparency = formatters["%s:%s"] + +registerstorage("attributes/transparencies/registered", registered, "attributes.transparencies.registered") +registerstorage("attributes/transparencies/values", values, "attributes.transparencies.values") + +local function inject_transparency(...) + inject_transparency = nodeinjections.transparency + return inject_transparency(...) +end + +local function register_transparency(...) + register_transparency = registrations.transparency + return register_transparency(...) +end + +function transparencies.register(name,a,t,force) -- name is irrelevant here (can even be nil) + -- Force needed here for metapost converter. We could always force + -- but then we'd end up with transparencies resources even if we + -- would not use transparencies (but define them only). This is + -- somewhat messy. + local stamp = f_transparency(a,t) + local n = registered[stamp] + if not n then + n = #values + 1 + values[n] = { a, t } + registered[stamp] = n + if force then + register_transparency(n,a,t) + end + elseif force and not data[n] then + register_transparency(n,a,t) + end + if name then + list[a_transparency][name] = n -- not grouped, so only global transparencies + end + return registered[stamp] +end + +local function extender(transparencies,key) + if colors.supported and key == "none" then + local d = inject_transparency(0) + transparencies.none = d + return d + end +end + +local function reviver(data,n) + if transparencies.supported then + local v = values[n] + local d + if not v then + d = inject_transparency(0) + else + d = inject_transparency(n) + register_transparency(n,v[1],v[2]) + end + data[n] = d + return d + else + return "" + end +end + +setmetatableindex(transparencies, extender) +setmetatableindex(transparencies.data, reviver) -- register if used + +-- check if there is an identity + +function transparencies.value(id) + return values[id] +end + +attributes.transparencies.handler = nodes.installattributehandler { + name = "transparency", + namespace = transparencies, + initializer = states.initialize, + finalizer = states.finalize, + processor = states.process, +} + +function transparencies.enable(value) -- nil is enable + if value == false or not transparencies.supported then + tasks.disableaction("shipouts","attributes.transparencies.handler") + else + tasks.enableaction("shipouts","attributes.transparencies.handler") + end +end + +function transparencies.forcesupport(value) -- can move to attr-div + transparencies.supported = value + report_transparencies("transparency is %ssupported",value and "" or "not ") + transparencies.enable(value) +end + +--- colorintents: overprint / knockout + +attributes.colorintents = attributes.colorintents or { } +local colorintents = attributes.colorintents +colorintents.data = allocate() -- colorintents.data or { } +colorintents.attribute = attributes.private('colorintent') + +colorintents.registered = allocate { + overprint = 1, + knockout = 2, +} + +local data, registered = colorintents.data, colorintents.registered + +local function extender(colorintents,key) + if key == "none" then + local d = data[2] + colorintents.none = d + return d + end +end + +local function reviver(data,n) + if n == 1 then + local d = nodeinjections.overprint() -- called once + data[1] = d + return d + elseif n == 2 then + local d = nodeinjections.knockout() -- called once + data[2] = d + return d + end +end + +setmetatableindex(colorintents, extender) +setmetatableindex(colorintents.data, reviver) + +function colorintents.register(stamp) + return registered[stamp] or registered.overprint +end + +colorintents.handler = nodes.installattributehandler { + name = "colorintent", + namespace = colorintents, + initializer = states.initialize, + finalizer = states.finalize, + processor = states.process, +} + +function colorintents.enable() + tasks.enableaction("shipouts","attributes.colorintents.handler") +end + +-- interface + +commands.enablecolor = colors.enable +commands.enabletransparency = transparencies.enable +commands.enablecolorintents = colorintents.enable + +function commands.registercolor (...) context(colors .register(...)) end +function commands.registertransparency(...) context(transparencies.register(...)) end +function commands.registercolorintent (...) context(colorintents .register(...)) end diff --git a/tex/context/base/attr-eff.lua b/tex/context/base/attr-eff.lua index 4dce5419a..b0a987747 100644 --- a/tex/context/base/attr-eff.lua +++ b/tex/context/base/attr-eff.lua @@ -1,111 +1,111 @@ -if not modules then modules = { } end modules ['attr-eff'] = { - version = 1.001, - comment = "companion to attr-eff.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local commands, interfaces = commands, interfaces -local attributes, nodes, backends, utilities = attributes, nodes, backends, utilities -local tex = tex - -local states = attributes.states -local tasks = nodes.tasks -local nodeinjections = backends.nodeinjections -local settexattribute = tex.setattribute -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex -local formatters = string.formatters - -local variables = interfaces.variables -local v_normal = variables.normal - -attributes.effects = attributes.effects or { } -local effects = attributes.effects - -local a_effect = attributes.private('effect') - -effects.data = allocate() -effects.values = effects.values or { } -effects.registered = effects.registered or { } -effects.attribute = a_effect - -local data = effects.data -local registered = effects.registered -local values = effects.values - -local f_stamp = formatters["%s:%s:%s"] - -storage.register("attributes/effects/registered", registered, "attributes.effects.registered") -storage.register("attributes/effects/values", values, "attributes.effects.values") - --- valid effects: normal inner outer both hidden (stretch,rulethickness,effect) - -local function effect(...) effect = nodeinjections.effect return effect(...) end - -local function extender(effects,key) - if key == "none" then - local d = effect(0,0,0) - effects.none = d - return d - end -end - -local function reviver(data,n) - local e = values[n] -- we could nil values[n] now but hardly needed - local d = effect(e[1],e[2],e[3]) - data[n] = d - return d -end - -setmetatableindex(effects, extender) -setmetatableindex(effects.data, reviver) - -effects.handler = nodes.installattributehandler { - name = "effect", - namespace = effects, - initializer = states.initialize, - finalizer = states.finalize, - processor = states.process, -} - -local function register(specification) - local alternative, stretch, rulethickness - if specification then - alternative = specification.alternative or v_normal - stretch = specification.stretch or 0 - rulethickness = specification.rulethickness or 0 - else - alternative = v_normal - stretch = 0 - rulethickness = 0 - end - local stamp = f_stamp(alternative,stretch,rulethickness) - local n = registered[stamp] - if not n then - n = #values + 1 - values[n] = { alternative, stretch, rulethickness } - registered[stamp] = n - end - return n -end - -local function enable() - tasks.enableaction("shipouts","attributes.effects.handler") -end - -effects.register = register -effects.enable = enable - --- interface - -local enabled = false - -function commands.triggereffect(specification) - if not enabled then - enable() - enabled = true - end - settexattribute(a_effect,register(specification)) -end +if not modules then modules = { } end modules ['attr-eff'] = { + version = 1.001, + comment = "companion to attr-eff.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local commands, interfaces = commands, interfaces +local attributes, nodes, backends, utilities = attributes, nodes, backends, utilities +local tex = tex + +local states = attributes.states +local tasks = nodes.tasks +local nodeinjections = backends.nodeinjections +local settexattribute = tex.setattribute +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex +local formatters = string.formatters + +local variables = interfaces.variables +local v_normal = variables.normal + +attributes.effects = attributes.effects or { } +local effects = attributes.effects + +local a_effect = attributes.private('effect') + +effects.data = allocate() +effects.values = effects.values or { } +effects.registered = effects.registered or { } +effects.attribute = a_effect + +local data = effects.data +local registered = effects.registered +local values = effects.values + +local f_stamp = formatters["%s:%s:%s"] + +storage.register("attributes/effects/registered", registered, "attributes.effects.registered") +storage.register("attributes/effects/values", values, "attributes.effects.values") + +-- valid effects: normal inner outer both hidden (stretch,rulethickness,effect) + +local function effect(...) effect = nodeinjections.effect return effect(...) end + +local function extender(effects,key) + if key == "none" then + local d = effect(0,0,0) + effects.none = d + return d + end +end + +local function reviver(data,n) + local e = values[n] -- we could nil values[n] now but hardly needed + local d = effect(e[1],e[2],e[3]) + data[n] = d + return d +end + +setmetatableindex(effects, extender) +setmetatableindex(effects.data, reviver) + +effects.handler = nodes.installattributehandler { + name = "effect", + namespace = effects, + initializer = states.initialize, + finalizer = states.finalize, + processor = states.process, +} + +local function register(specification) + local alternative, stretch, rulethickness + if specification then + alternative = specification.alternative or v_normal + stretch = specification.stretch or 0 + rulethickness = specification.rulethickness or 0 + else + alternative = v_normal + stretch = 0 + rulethickness = 0 + end + local stamp = f_stamp(alternative,stretch,rulethickness) + local n = registered[stamp] + if not n then + n = #values + 1 + values[n] = { alternative, stretch, rulethickness } + registered[stamp] = n + end + return n +end + +local function enable() + tasks.enableaction("shipouts","attributes.effects.handler") +end + +effects.register = register +effects.enable = enable + +-- interface + +local enabled = false + +function commands.triggereffect(specification) + if not enabled then + enable() + enabled = true + end + settexattribute(a_effect,register(specification)) +end diff --git a/tex/context/base/attr-ini.lua b/tex/context/base/attr-ini.lua index 206a86d79..cb8eecf77 100644 --- a/tex/context/base/attr-ini.lua +++ b/tex/context/base/attr-ini.lua @@ -1,167 +1,167 @@ -if not modules then modules = { } end modules ['attr-ini'] = { - version = 1.001, - comment = "companion to attr-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local commands, context, nodes, storage = commands, context, nodes, storage - -local next, type = next, type - ---[[ldx-- -

We start with a registration system for atributes so that we can use the -symbolic names later on.

---ldx]]-- - -attributes = attributes or { } -local attributes = attributes - -local sharedstorage = storage.shared - -attributes.names = attributes.names or { } -attributes.numbers = attributes.numbers or { } -attributes.list = attributes.list or { } -attributes.states = attributes.states or { } -attributes.handlers = attributes.handlers or { } -attributes.unsetvalue = -0x7FFFFFFF - -local names = attributes.names -local numbers = attributes.numbers -local list = attributes.list - -storage.register("attributes/names", names, "attributes.names") -storage.register("attributes/numbers", numbers, "attributes.numbers") -storage.register("attributes/list", list, "attributes.list") - -function attributes.define(name,number) -- at the tex end - if not numbers[name] then - numbers[name] = number - names[number] = name - list[number] = { } - end -end - ---[[ldx-- -

We reserve this one as we really want it to be always set (faster).

---ldx]]-- - -names[0], numbers["fontdynamic"] = "fontdynamic", 0 - ---[[ldx-- -

We can use the attributes in the range 127-255 (outside user space). These -are only used when no attribute is set at the \TEX\ end which normally -happens in .

---ldx]]-- - -sharedstorage.attributes_last_private = sharedstorage.attributes_last_private or 127 - --- to be considered (so that we can use an array access): --- --- local private = { } attributes.private = private --- --- setmetatable(private, { --- __index = function(t,name) --- local number = sharedstorage.attributes_last_private --- if number < 1023 then -- tex.count.minallocatedattribute - 1 --- number = number + 1 --- sharedstorage.attributes_last_private = number --- end --- numbers[name], names[number], list[number] = number, name, { } --- private[name] = number --- return number --- end, --- __call = function(t,name) --- return t[name] --- end --- } ) - -function attributes.private(name) -- at the lua end (hidden from user) - local number = numbers[name] - if not number then - local last = sharedstorage.attributes_last_private - if last < 1023 then -- tex.count.minallocatedattribute - 1 - last = last + 1 - sharedstorage.attributes_last_private = last - else - report_attribute("no more room for private attributes") - os.exit() - end - number = last - numbers[name], names[number], list[number] = number, name, { } - end - return number -end - --- tracers - -local report_attribute = logs.reporter("attributes") - -local function showlist(what,list) - if list then - local a = list.next - local i = 0 - while a do - local number, value = a.number, a.value - i = i + 1 - report_attribute("%S %2i: attribute %3i, value %4i, name %a",what,i,number,value,names[number]) - a = a.next - end - end -end - -function attributes.showcurrent() - showlist("current",node.current_attr()) -end - -function attributes.ofnode(n) - showlist(n,n.attr) -end - --- interface - -commands.defineattribute = attributes.define -commands.showattributes = attributes.showcurrent - -function commands.getprivateattribute(name) - context(attributes.private(name)) -end - --- rather special - -local store = { } - -function commands.savecurrentattributes(name) - name = name or "" - local n = node.current_attr() - n = n and n.next - local t = { } - while n do - t[n.number] = n.value - n = n.next - end - store[name] = { - attr = t, - font = font.current(), - } -end - -function commands.restorecurrentattributes(name) - name = name or "" - local t = store[name] - if t then - local attr = t.attr - local font = t.font - if attr then - for k, v in next, attr do - tex.attribute[k] = v - end - end - if font then - -- tex.font = font - context.getvalue(fonts.hashes.csnames[font]) -- we don't have a direct way yet (will discuss it with taco) - end - end - -- store[name] = nil -end +if not modules then modules = { } end modules ['attr-ini'] = { + version = 1.001, + comment = "companion to attr-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local commands, context, nodes, storage = commands, context, nodes, storage + +local next, type = next, type + +--[[ldx-- +

We start with a registration system for atributes so that we can use the +symbolic names later on.

+--ldx]]-- + +attributes = attributes or { } +local attributes = attributes + +local sharedstorage = storage.shared + +attributes.names = attributes.names or { } +attributes.numbers = attributes.numbers or { } +attributes.list = attributes.list or { } +attributes.states = attributes.states or { } +attributes.handlers = attributes.handlers or { } +attributes.unsetvalue = -0x7FFFFFFF + +local names = attributes.names +local numbers = attributes.numbers +local list = attributes.list + +storage.register("attributes/names", names, "attributes.names") +storage.register("attributes/numbers", numbers, "attributes.numbers") +storage.register("attributes/list", list, "attributes.list") + +function attributes.define(name,number) -- at the tex end + if not numbers[name] then + numbers[name] = number + names[number] = name + list[number] = { } + end +end + +--[[ldx-- +

We reserve this one as we really want it to be always set (faster).

+--ldx]]-- + +names[0], numbers["fontdynamic"] = "fontdynamic", 0 + +--[[ldx-- +

We can use the attributes in the range 127-255 (outside user space). These +are only used when no attribute is set at the \TEX\ end which normally +happens in .

+--ldx]]-- + +sharedstorage.attributes_last_private = sharedstorage.attributes_last_private or 127 + +-- to be considered (so that we can use an array access): +-- +-- local private = { } attributes.private = private +-- +-- setmetatable(private, { +-- __index = function(t,name) +-- local number = sharedstorage.attributes_last_private +-- if number < 1023 then -- tex.count.minallocatedattribute - 1 +-- number = number + 1 +-- sharedstorage.attributes_last_private = number +-- end +-- numbers[name], names[number], list[number] = number, name, { } +-- private[name] = number +-- return number +-- end, +-- __call = function(t,name) +-- return t[name] +-- end +-- } ) + +function attributes.private(name) -- at the lua end (hidden from user) + local number = numbers[name] + if not number then + local last = sharedstorage.attributes_last_private + if last < 1023 then -- tex.count.minallocatedattribute - 1 + last = last + 1 + sharedstorage.attributes_last_private = last + else + report_attribute("no more room for private attributes") + os.exit() + end + number = last + numbers[name], names[number], list[number] = number, name, { } + end + return number +end + +-- tracers + +local report_attribute = logs.reporter("attributes") + +local function showlist(what,list) + if list then + local a = list.next + local i = 0 + while a do + local number, value = a.number, a.value + i = i + 1 + report_attribute("%S %2i: attribute %3i, value %4i, name %a",what,i,number,value,names[number]) + a = a.next + end + end +end + +function attributes.showcurrent() + showlist("current",node.current_attr()) +end + +function attributes.ofnode(n) + showlist(n,n.attr) +end + +-- interface + +commands.defineattribute = attributes.define +commands.showattributes = attributes.showcurrent + +function commands.getprivateattribute(name) + context(attributes.private(name)) +end + +-- rather special + +local store = { } + +function commands.savecurrentattributes(name) + name = name or "" + local n = node.current_attr() + n = n and n.next + local t = { } + while n do + t[n.number] = n.value + n = n.next + end + store[name] = { + attr = t, + font = font.current(), + } +end + +function commands.restorecurrentattributes(name) + name = name or "" + local t = store[name] + if t then + local attr = t.attr + local font = t.font + if attr then + for k, v in next, attr do + tex.attribute[k] = v + end + end + if font then + -- tex.font = font + context.getvalue(fonts.hashes.csnames[font]) -- we don't have a direct way yet (will discuss it with taco) + end + end + -- store[name] = nil +end diff --git a/tex/context/base/attr-lay.lua b/tex/context/base/attr-lay.lua index 4bcc70b0c..60907bdf3 100644 --- a/tex/context/base/attr-lay.lua +++ b/tex/context/base/attr-lay.lua @@ -1,253 +1,253 @@ -if not modules then modules = { } end modules ['attr-lay'] = { - version = 1.001, - comment = "companion to attr-lay.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- layers (ugly code, due to no grouping and such); currently we use exclusive layers --- but when we need it stacked layers might show up too; the next function based --- approach can be replaced by static (metatable driven) resolvers - --- maybe use backends.registrations here too - -local type = type -local insert, remove = table.insert, table.remove - -local attributes, nodes, utilities, logs, backends = attributes, nodes, utilities, logs, backends -local commands, context, interfaces = commands, context, interfaces -local tex = tex - -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex -local formatters = string.formatters - -local report_viewerlayers = logs.reporter("viewerlayers") - --- todo: document this but first reimplement this as it reflects the early --- days of luatex / mkiv and we have better ways now - --- nb: attributes: color etc is much slower than normal (marks + literals) but ... --- nb. too many "0 g"s --- nb: more local tables - -attributes.viewerlayers = attributes.viewerlayers or { } -local viewerlayers = attributes.viewerlayers - -local variables = interfaces.variables -local v_local = variables["local"] -local v_global = variables["global"] - -local a_viewerlayer = attributes.private("viewerlayer") - -viewerlayers = viewerlayers or { } -viewerlayers.data = allocate() -viewerlayers.registered = viewerlayers.registered or { } -viewerlayers.values = viewerlayers.values or { } -viewerlayers.scopes = viewerlayers.scopes or { } -viewerlayers.listwise = allocate() -viewerlayers.attribute = a_viewerlayer -viewerlayers.supported = true -viewerlayers.hasorder = true - -local states = attributes.states -local tasks = nodes.tasks -local nodeinjections = backends.nodeinjections -local codeinjections = backends.codeinjections - -local texsetattribute = tex.setattribute -local texgetattribute = tex.getattribute -local texsettokenlist = tex.settoks -local unsetvalue = attributes.unsetvalue - -local nodepool = nodes.pool - -local data = viewerlayers.data -local values = viewerlayers.values -local listwise = viewerlayers.listwise -local registered = viewerlayers.registered -local scopes = viewerlayers.scopes - -local f_stamp = formatters["%s"] - -storage.register("attributes/viewerlayers/registered", registered, "attributes.viewerlayers.registered") -storage.register("attributes/viewerlayers/values", values, "attributes.viewerlayers.values") -storage.register("attributes/viewerlayers/scopes", scopes, "attributes.viewerlayers.scopes") - -local layerstacker = utilities.stacker.new("layers") -- experiment - -layerstacker.mode = "stack" -layerstacker.unset = attributes.unsetvalue - -viewerlayers.resolve_begin = layerstacker.resolve_begin -viewerlayers.resolve_step = layerstacker.resolve_step -viewerlayers.resolve_end = layerstacker.resolve_end - -function commands.cleanuplayers() - layerstacker.clean() - -- todo -end - --- stacked - -local function startlayer(...) startlayer = nodeinjections.startlayer return startlayer(...) end -local function stoplayer (...) stoplayer = nodeinjections.stoplayer return stoplayer (...) end - -local function extender(viewerlayers,key) - if viewerlayers.supported and key == "none" then - local d = stoplayer() - viewerlayers.none = d - return d - end -end - -local function reviver(data,n) - if viewerlayers.supported then - local v = values[n] - if v then - local d = startlayer(v) - data[n] = d - return d - else - report_viewerlayers("error: unknown reference %a",tostring(n)) - end - end -end - -setmetatableindex(viewerlayers,extender) -setmetatableindex(viewerlayers.data,reviver) - --- !!!! TEST CODE !!!! - -layerstacker.start = function(...) local f = nodeinjections.startstackedlayer layerstacker.start = f return f(...) end -layerstacker.stop = function(...) local f = nodeinjections.stopstackedlayer layerstacker.stop = f return f(...) end -layerstacker.change = function(...) local f = nodeinjections.changestackedlayer layerstacker.change = f return f(...) end - -local function initializer(...) - return states.initialize(...) -end - -attributes.viewerlayers.handler = nodes.installattributehandler { - name = "viewerlayer", - namespace = viewerlayers, - initializer = initializer, - finalizer = states.finalize, - -- processor = states.stacked, - processor = states.stacker, -} - -local stack, enabled, global = { }, false, false - -function viewerlayers.enable(value) - if value == false or not viewerlayers.supported then - if enabled then - tasks.disableaction("shipouts","attributes.viewerlayers.handler") - end - enabled = false - else - if not enabled then - tasks.enableaction("shipouts","attributes.viewerlayers.handler") - end - enabled = true - end -end - -function viewerlayers.forcesupport(value) - viewerlayers.supported = value - report_viewerlayers("viewerlayers are %ssupported",value and "" or "not ") - viewerlayers.enable(value) -end - -local function register(name,lw) -- if not inimode redefine data[n] in first call - if not enabled then - viewerlayers.enable(true) - end - local stamp = f_stamp(name) - local n = registered[stamp] - if not n then - n = #values + 1 - values[n] = name - registered[stamp] = n - listwise[n] = lw or false -- lw forces a used - end - return registered[stamp] -- == n -end - -viewerlayers.register = register - -function viewerlayers.setfeatures(hasorder) - viewerlayers.hasorder = hasorder -end - -local usestacker = true -- new, experimental - -function viewerlayers.start(name) - local a - if usestacker then - a = layerstacker.push(register(name) or unsetvalue) - else - insert(stack,texgetattribute(a_viewerlayer)) - a = register(name) or unsetvalue - end - if global or scopes[name] == v_global then - scopes[a] = v_global -- messy but we don't know the attributes yet - texsetattribute("global",a_viewerlayer,a) - else - texsetattribute(a_viewerlayer,a) - end - texsettokenlist("currentviewerlayertoks",name) -end - -function viewerlayers.stop() - local a - if usestacker then - a = layerstacker.pop() - else - a = remove(stack) - end - if not a then - -- error - elseif a >= 0 then - if global or scopes[a] == v_global then - texsetattribute("global",a_viewerlayer,a) - else - texsetattribute(a_viewerlayer,a) - end - texsettokenlist("currentviewerlayertoks",values[a] or "") - else - if global or scopes[a] == v_global then - texsetattribute("global",a_viewerlayer,unsetvalue) - else - texsetattribute(a_viewerlayer,unsetvalue) - end - texsettokenlist("currentviewerlayertoks","") - end -end - -function viewerlayers.define(settings) - local tag = settings.tag - if not tag or tag == "" then - -- error - elseif not scopes[tag] then -- prevent duplicates - local title = settings.title - if not title or title == "" then - settings.title = tag - end - scopes[tag] = settings.scope or v_local - codeinjections.defineviewerlayer(settings) - end -end - -commands.defineviewerlayer = viewerlayers.define -commands.startviewerlayer = viewerlayers.start -commands.stopviewerlayer = viewerlayers.stop - -function commands.definedviewerlayer(settings) - viewerlayers.define(settings) - context(register(settings.tag,true)) -- true forces a use -end - -function commands.registeredviewerlayer(name) - context(register(name,true)) -- true forces a use -end +if not modules then modules = { } end modules ['attr-lay'] = { + version = 1.001, + comment = "companion to attr-lay.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- layers (ugly code, due to no grouping and such); currently we use exclusive layers +-- but when we need it stacked layers might show up too; the next function based +-- approach can be replaced by static (metatable driven) resolvers + +-- maybe use backends.registrations here too + +local type = type +local insert, remove = table.insert, table.remove + +local attributes, nodes, utilities, logs, backends = attributes, nodes, utilities, logs, backends +local commands, context, interfaces = commands, context, interfaces +local tex = tex + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex +local formatters = string.formatters + +local report_viewerlayers = logs.reporter("viewerlayers") + +-- todo: document this but first reimplement this as it reflects the early +-- days of luatex / mkiv and we have better ways now + +-- nb: attributes: color etc is much slower than normal (marks + literals) but ... +-- nb. too many "0 g"s +-- nb: more local tables + +attributes.viewerlayers = attributes.viewerlayers or { } +local viewerlayers = attributes.viewerlayers + +local variables = interfaces.variables +local v_local = variables["local"] +local v_global = variables["global"] + +local a_viewerlayer = attributes.private("viewerlayer") + +viewerlayers = viewerlayers or { } +viewerlayers.data = allocate() +viewerlayers.registered = viewerlayers.registered or { } +viewerlayers.values = viewerlayers.values or { } +viewerlayers.scopes = viewerlayers.scopes or { } +viewerlayers.listwise = allocate() +viewerlayers.attribute = a_viewerlayer +viewerlayers.supported = true +viewerlayers.hasorder = true + +local states = attributes.states +local tasks = nodes.tasks +local nodeinjections = backends.nodeinjections +local codeinjections = backends.codeinjections + +local texsetattribute = tex.setattribute +local texgetattribute = tex.getattribute +local texsettokenlist = tex.settoks +local unsetvalue = attributes.unsetvalue + +local nodepool = nodes.pool + +local data = viewerlayers.data +local values = viewerlayers.values +local listwise = viewerlayers.listwise +local registered = viewerlayers.registered +local scopes = viewerlayers.scopes + +local f_stamp = formatters["%s"] + +storage.register("attributes/viewerlayers/registered", registered, "attributes.viewerlayers.registered") +storage.register("attributes/viewerlayers/values", values, "attributes.viewerlayers.values") +storage.register("attributes/viewerlayers/scopes", scopes, "attributes.viewerlayers.scopes") + +local layerstacker = utilities.stacker.new("layers") -- experiment + +layerstacker.mode = "stack" +layerstacker.unset = attributes.unsetvalue + +viewerlayers.resolve_begin = layerstacker.resolve_begin +viewerlayers.resolve_step = layerstacker.resolve_step +viewerlayers.resolve_end = layerstacker.resolve_end + +function commands.cleanuplayers() + layerstacker.clean() + -- todo +end + +-- stacked + +local function startlayer(...) startlayer = nodeinjections.startlayer return startlayer(...) end +local function stoplayer (...) stoplayer = nodeinjections.stoplayer return stoplayer (...) end + +local function extender(viewerlayers,key) + if viewerlayers.supported and key == "none" then + local d = stoplayer() + viewerlayers.none = d + return d + end +end + +local function reviver(data,n) + if viewerlayers.supported then + local v = values[n] + if v then + local d = startlayer(v) + data[n] = d + return d + else + report_viewerlayers("error: unknown reference %a",tostring(n)) + end + end +end + +setmetatableindex(viewerlayers,extender) +setmetatableindex(viewerlayers.data,reviver) + +-- !!!! TEST CODE !!!! + +layerstacker.start = function(...) local f = nodeinjections.startstackedlayer layerstacker.start = f return f(...) end +layerstacker.stop = function(...) local f = nodeinjections.stopstackedlayer layerstacker.stop = f return f(...) end +layerstacker.change = function(...) local f = nodeinjections.changestackedlayer layerstacker.change = f return f(...) end + +local function initializer(...) + return states.initialize(...) +end + +attributes.viewerlayers.handler = nodes.installattributehandler { + name = "viewerlayer", + namespace = viewerlayers, + initializer = initializer, + finalizer = states.finalize, + -- processor = states.stacked, + processor = states.stacker, +} + +local stack, enabled, global = { }, false, false + +function viewerlayers.enable(value) + if value == false or not viewerlayers.supported then + if enabled then + tasks.disableaction("shipouts","attributes.viewerlayers.handler") + end + enabled = false + else + if not enabled then + tasks.enableaction("shipouts","attributes.viewerlayers.handler") + end + enabled = true + end +end + +function viewerlayers.forcesupport(value) + viewerlayers.supported = value + report_viewerlayers("viewerlayers are %ssupported",value and "" or "not ") + viewerlayers.enable(value) +end + +local function register(name,lw) -- if not inimode redefine data[n] in first call + if not enabled then + viewerlayers.enable(true) + end + local stamp = f_stamp(name) + local n = registered[stamp] + if not n then + n = #values + 1 + values[n] = name + registered[stamp] = n + listwise[n] = lw or false -- lw forces a used + end + return registered[stamp] -- == n +end + +viewerlayers.register = register + +function viewerlayers.setfeatures(hasorder) + viewerlayers.hasorder = hasorder +end + +local usestacker = true -- new, experimental + +function viewerlayers.start(name) + local a + if usestacker then + a = layerstacker.push(register(name) or unsetvalue) + else + insert(stack,texgetattribute(a_viewerlayer)) + a = register(name) or unsetvalue + end + if global or scopes[name] == v_global then + scopes[a] = v_global -- messy but we don't know the attributes yet + texsetattribute("global",a_viewerlayer,a) + else + texsetattribute(a_viewerlayer,a) + end + texsettokenlist("currentviewerlayertoks",name) +end + +function viewerlayers.stop() + local a + if usestacker then + a = layerstacker.pop() + else + a = remove(stack) + end + if not a then + -- error + elseif a >= 0 then + if global or scopes[a] == v_global then + texsetattribute("global",a_viewerlayer,a) + else + texsetattribute(a_viewerlayer,a) + end + texsettokenlist("currentviewerlayertoks",values[a] or "") + else + if global or scopes[a] == v_global then + texsetattribute("global",a_viewerlayer,unsetvalue) + else + texsetattribute(a_viewerlayer,unsetvalue) + end + texsettokenlist("currentviewerlayertoks","") + end +end + +function viewerlayers.define(settings) + local tag = settings.tag + if not tag or tag == "" then + -- error + elseif not scopes[tag] then -- prevent duplicates + local title = settings.title + if not title or title == "" then + settings.title = tag + end + scopes[tag] = settings.scope or v_local + codeinjections.defineviewerlayer(settings) + end +end + +commands.defineviewerlayer = viewerlayers.define +commands.startviewerlayer = viewerlayers.start +commands.stopviewerlayer = viewerlayers.stop + +function commands.definedviewerlayer(settings) + viewerlayers.define(settings) + context(register(settings.tag,true)) -- true forces a use +end + +function commands.registeredviewerlayer(name) + context(register(name,true)) -- true forces a use +end diff --git a/tex/context/base/attr-mkr.lua b/tex/context/base/attr-mkr.lua index 976598fa0..87a8e2015 100644 --- a/tex/context/base/attr-mkr.lua +++ b/tex/context/base/attr-mkr.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['attr-mkr'] = { - version = 1.001, - comment = "companion to attr-mkr.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local markers = nodes.markers or { } -nodes.markers = markers - -local cache = { } -local numbers = attributes.numbers -local a_unknown = attributes.private("marker:unknown") - -table.setmetatableindex(cache,function(t,k) - local k = "marker:" .. k - local v = numbers[k] or a_unknown - t[k] = v - return v -end) - -function markers.get(n,name) - local a = cache[name] - return a and n[a] or nil -end +if not modules then modules = { } end modules ['attr-mkr'] = { + version = 1.001, + comment = "companion to attr-mkr.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local markers = nodes.markers or { } +nodes.markers = markers + +local cache = { } +local numbers = attributes.numbers +local a_unknown = attributes.private("marker:unknown") + +table.setmetatableindex(cache,function(t,k) + local k = "marker:" .. k + local v = numbers[k] or a_unknown + t[k] = v + return v +end) + +function markers.get(n,name) + local a = cache[name] + return a and n[a] or nil +end diff --git a/tex/context/base/attr-neg.lua b/tex/context/base/attr-neg.lua index c32cec956..d37490f11 100644 --- a/tex/context/base/attr-neg.lua +++ b/tex/context/base/attr-neg.lua @@ -1,98 +1,98 @@ -if not modules then modules = { } end modules ['attr-neg'] = { - version = 1.001, - comment = "companion to attr-neg.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this module is being reconstructed and code will move to other places --- we can also do the nsnone via a metatable and then also se index 0 - -local format = string.format - -local attributes, nodes, utilities, logs, backends = attributes, nodes, utilities, logs, backends -local commands, context, interfaces = commands, context, interfaces -local tex = tex - -local states = attributes.states -local tasks = nodes.tasks -local nodeinjections = backends.nodeinjections -local settexattribute = tex.setattribute -local variables = interfaces.variables -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex - ---- negative / positive - -attributes.negatives = attributes.negatives or { } -local negatives = attributes.negatives - -local a_negative = attributes.private("negative") - -local v_none = interfaces.variables.none - -negatives.data = allocate() -negatives.attribute = a_negative - -negatives.registered = allocate { - [variables.positive] = 1, - [variables.negative] = 2, -} - -local data = negatives.data -local registered = negatives.registered - -local function extender(negatives,key) - if key == "none" then -- v_none then - local d = data[1] - negatives.none = d - return d - end -end - -local function reviver(data,n) - if n == 1 then - local d = nodeinjections.positive() -- called once - data[1] = d - return d - elseif n == 2 then - local d = nodeinjections.negative() -- called once - data[2] = d - return d - end -end - -setmetatableindex(negatives, extender) -setmetatableindex(negatives.data, reviver) - -negatives.handler = nodes.installattributehandler { - name = "negative", - namespace = negatives, - initializer = states.initialize, - finalizer = states.finalize, - processor = states.process, -} - -local function register(stamp) - return registered[stamp] or registered.positive -end - -local function enable() - tasks.enableaction("shipouts","attributes.negatives.handler") -end - -negatives.register = register -negatives.enable = enable - --- interface - -local enabled = false - -function commands.triggernegative(stamp) - if not enabled then - enable() - enabled = true - end - settexattribute(a_negative,register(stamp)) -end +if not modules then modules = { } end modules ['attr-neg'] = { + version = 1.001, + comment = "companion to attr-neg.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this module is being reconstructed and code will move to other places +-- we can also do the nsnone via a metatable and then also se index 0 + +local format = string.format + +local attributes, nodes, utilities, logs, backends = attributes, nodes, utilities, logs, backends +local commands, context, interfaces = commands, context, interfaces +local tex = tex + +local states = attributes.states +local tasks = nodes.tasks +local nodeinjections = backends.nodeinjections +local settexattribute = tex.setattribute +local variables = interfaces.variables +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex + +--- negative / positive + +attributes.negatives = attributes.negatives or { } +local negatives = attributes.negatives + +local a_negative = attributes.private("negative") + +local v_none = interfaces.variables.none + +negatives.data = allocate() +negatives.attribute = a_negative + +negatives.registered = allocate { + [variables.positive] = 1, + [variables.negative] = 2, +} + +local data = negatives.data +local registered = negatives.registered + +local function extender(negatives,key) + if key == "none" then -- v_none then + local d = data[1] + negatives.none = d + return d + end +end + +local function reviver(data,n) + if n == 1 then + local d = nodeinjections.positive() -- called once + data[1] = d + return d + elseif n == 2 then + local d = nodeinjections.negative() -- called once + data[2] = d + return d + end +end + +setmetatableindex(negatives, extender) +setmetatableindex(negatives.data, reviver) + +negatives.handler = nodes.installattributehandler { + name = "negative", + namespace = negatives, + initializer = states.initialize, + finalizer = states.finalize, + processor = states.process, +} + +local function register(stamp) + return registered[stamp] or registered.positive +end + +local function enable() + tasks.enableaction("shipouts","attributes.negatives.handler") +end + +negatives.register = register +negatives.enable = enable + +-- interface + +local enabled = false + +function commands.triggernegative(stamp) + if not enabled then + enable() + enabled = true + end + settexattribute(a_negative,register(stamp)) +end diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua index 4d219a18b..4d61e64c7 100644 --- a/tex/context/base/back-exp.lua +++ b/tex/context/base/back-exp.lua @@ -1,2411 +1,2411 @@ -if not modules then modules = { } end modules ['back-exp'] = { - version = 1.001, - comment = "companion to back-exp.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- language -> only mainlanguage, local languages should happen through start/stoplanguage --- tocs/registers -> maybe add a stripper (i.e. just don't flush entries in final tree) --- footnotes -> css 3 --- bodyfont -> in styles.css --- delimited -> left/right string (needs marking) - --- Because we need to look ahead we now always build a tree (this was optional in --- the beginning). The extra overhead in the frontend is neglectable. --- --- We can optimize the code ... currently the overhead is some 10% for xml + html so --- there is no hurry. - --- todo: move critital formatters out of functions --- todo: delay loading (apart from basic tag stuff) - -local next, type = next, type -local format, match, concat, rep, sub, gsub, gmatch, find = string.format, string.match, table.concat, string.rep, string.sub, string.gsub, string.gmatch, string.find -local validstring = string.valid -local lpegmatch = lpeg.match -local utfchar, utfbyte, utfvalues = utf.char, utf.byte, utf.values -local insert, remove = table.insert, table.remove -local fromunicode16 = fonts.mappings.fromunicode16 -local sortedhash = table.sortedhash -local formatters = string.formatters - -local trace_export = false trackers.register ("export.trace", function(v) trace_export = v end) -local trace_spacing = false trackers.register ("export.trace.spacing", function(v) trace_spacing = v end) -local less_state = false directives.register("export.lessstate", function(v) less_state = v end) -local show_comment = true directives.register("export.comment", function(v) show_comment = v end) - --- maybe we will also support these: --- --- local css_hyphens = false directives.register("export.css.hyphens", function(v) css_hyphens = v end) --- local css_textalign = false directives.register("export.css.textalign", function(v) css_textalign = v end) --- local css_bodyfontsize = false directives.register("export.css.bodyfontsize", function(v) css_bodyfontsize = v end) --- local css_textwidth = false directives.register("export.css.textwidth", function(v) css_textwidth = v end) - -local report_export = logs.reporter("backend","export") - -local nodes = nodes -local attributes = attributes -local variables = interfaces.variables - -local settings_to_array = utilities.parsers.settings_to_array - -local setmetatableindex = table.setmetatableindex -local tasks = nodes.tasks -local fontchar = fonts.hashes.characters -local fontquads = fonts.hashes.quads -local languagenames = languages.numbers - -local nodecodes = nodes.nodecodes -local skipcodes = nodes.skipcodes -local whatsitcodes = nodes.whatsitcodes -local listcodes = nodes.listcodes - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local glyph_code = nodecodes.glyph -local glue_code = nodecodes.glue -local kern_code = nodecodes.kern -local disc_code = nodecodes.disc -local insert_code = nodecodes.insert -local whatsit_code = nodecodes.whatsit -local refximage_code = whatsitcodes.pdfrefximage -local localpar_code = whatsitcodes.localpar - -local userskip_code = skipcodes.userskip -local rightskip_code = skipcodes.rightskip -local parfillskip_code = skipcodes.parfillskip -local spaceskip_code = skipcodes.spaceskip -local xspaceskip_code = skipcodes.xspaceskip - -local line_code = listcodes.line - -local a_characters = attributes.private('characters') -local a_exportstatus = attributes.private('exportstatus') - -local a_tagged = attributes.private('tagged') -local a_taggedpar = attributes.private("taggedpar") -local a_image = attributes.private('image') -local a_reference = attributes.private('reference') - -local a_textblock = attributes.private("textblock") - -local traverse_id = node.traverse_id -local traverse_nodes = node.traverse -local slide_nodelist = node.slide -local texattribute = tex.attribute -local texdimen = tex.dimen -local texcount = tex.count -local locate_node = nodes.locate - -local references = structures.references -local structurestags = structures.tags -local taglist = structurestags.taglist -local properties = structurestags.properties -local userdata = structurestags.userdata -- might be combines with taglist -local tagdata = structurestags.data -local tagmetadata = structurestags.metadata -local detailedtag = structurestags.detailedtag - -local starttiming = statistics.starttiming -local stoptiming = statistics.stoptiming - --- todo: more locals (and optimize) - -local exportversion = "0.30" - -local nofcurrentcontent = 0 -- so we don't free (less garbage collection) -local currentcontent = { } -local currentnesting = nil -local currentattribute = nil -local last = nil -local currentparagraph = nil - -local noftextblocks = 0 - -local attributehash = { } -- to be considered: set the values at the tex end -local hyphencode = 0xAD -local hyphen = utfchar(0xAD) -- todo: also emdash etc -local colonsplitter = lpeg.splitat(":") -local dashsplitter = lpeg.splitat("-") -local threshold = 65536 -local indexing = false -local keephyphens = false - -local finetuning = { } - -local treestack = { } -local nesting = { } -local currentdepth = 0 - -local tree = { data = { }, fulltag == "root" } -- root -local treeroot = tree -local treehash = { } -local extras = { } -local checks = { } -local finalizers = { } -local nofbreaks = 0 -local used = { } -local exporting = false -local restart = false -local specialspaces = { [0x20] = " " } -- for conversion -local somespace = { [0x20] = true, [" "] = true } -- for testing -local entities = { ["&"] = "&", [">"] = ">", ["<"] = "<" } -local attribentities = { ["&"] = "&", [">"] = ">", ["<"] = "<", ['"'] = "quot;" } - -local entityremapper = utf.remapper(entities) - -local alignmapping = { - flushright = "right", - middle = "center", - flushleft = "left", -} - -local numbertoallign = { - [0] = "justify", ["0"] = "justify", [variables.normal ] = "justify", - [1] = "right", ["1"] = "right", [variables.flushright] = "right", - [2] = "center", ["2"] = "center", [variables.middle ] = "center", - [3] = "left", ["3"] = "left", [variables.flushleft ] = "left", -} - -local defaultnature = "mixed" -- "inline" - -setmetatableindex(used, function(t,k) - if k then - local v = { } - t[k] = v - return v - end -end) - -setmetatableindex(specialspaces, function(t,k) - local v = utfchar(k) - t[k] = v - entities[v] = formatters["&#x%X;"](k) - somespace[k] = true - somespace[v] = true - return v -end) - - -local namespaced = { - -- filled on -} - -local namespaces = { - msubsup = "m", - msub = "m", - msup = "m", - mn = "m", - mi = "m", - ms = "m", - mo = "m", - mtext = "m", - mrow = "m", - mfrac = "m", - mroot = "m", - msqrt = "m", - munderover = "m", - munder = "m", - mover = "m", - merror = "m", - math = "m", - mrow = "m", - mtable = "m", - mtr = "m", - mtd = "m", - mfenced = "m", - maction = "m", - mspace = "m", -} - -setmetatableindex(namespaced, function(t,k) - if k then - local namespace = namespaces[k] - local v = namespace and namespace .. ":" .. k or k - t[k] = v - return v - end -end) - -local function attribute(key,value) - if value and value ~= "" then - return formatters[' %s="%s"'](key,gsub(value,".",attribentities)) - else - return "" - end -end - --- local P, C, Cc = lpeg.P, lpeg.C, lpeg.Cc --- --- local dash, colon = P("-"), P(":") --- --- local precolon, predash, rest = P((1-colon)^1), P((1-dash )^1), P(1)^1 --- --- local tagsplitter = C(precolon) * colon * C(predash) * dash * C(rest) + --- C(predash) * dash * Cc(nil) * C(rest) - -local listdata = { } - -local function hashlistdata() - local c = structures.lists.collected - for i=1,#c do - local ci = c[i] - local tag = ci.references.tag - if tag then - local m = ci.metadata - listdata[m.kind .. ":" .. m.name .. "-" .. tag] = ci - end - end -end - -local spaces = utilities.strings.newrepeater(" ",-1) - -function structurestags.setattributehash(fulltag,key,value) -- public hash - if type(fulltag) == "number" then - fulltag = taglist[fulltag] - if fulltag then - fulltag = fulltag[#fulltag] - end - end - if fulltag then - local ah = attributehash[fulltag] -- could be metatable magic - if not ah then - ah = { } - attributehash[fulltag] = ah - end - ah[key] = value - end -end - - --- experiment: styles and images --- --- officially we should convert to bp but we round anyway - -local usedstyles = { } - --- /* padding : ; */ --- /* text-justify : inter-word ; */ - -local documenttemplate = [[ -document { - font-size : %s !important ; - max-width : %s !important ; - text-align : %s !important ; - hyphens : %s !important ; -} -]] - -local styletemplate = [[ -%s[detail='%s'] { - font-style : %s ; - font-variant : %s ; - font-weight : %s ; - font-family : %s ; - color : %s ; -}]] - -local function allusedstyles(xmlfile) - local result = { format("/* styles for file %s */",xmlfile) } - -- - local bodyfont = finetuning.bodyfont - local width = finetuning.width - local hyphen = finetuning.hyphen - local align = finetuning.align - -- - if not bodyfont or bodyfont == "" then - bodyfont = "12pt" - elseif type(bodyfont) == "number" then - bodyfont = number.todimen(bodyfont,"pt","%ipt") or "12pt" - end - if not width or width == "" then - width = "50em" - elseif type(width) == "number" then - width = number.todimen(width,"pt","%ipt") or "50em" - end - if hyphen == variables.yes then - hyphen = "manual" - else - hyphen = "inherited" - end - if align then - align = numbertoallign[align] - end - if not align then - align = hyphens and "justify" or "inherited" - end - -- - result[#result+1] = format(documenttemplate,bodyfont,width,align,hyphen) - -- - local colorspecification = xml.css.colorspecification - local fontspecification = xml.css.fontspecification - for element, details in sortedhash(usedstyles) do - for detail, data in sortedhash(details) do - local s = fontspecification(data.style) - local c = colorspecification(data.color) - result[#result+1] = formatters[styletemplate](element,detail, - s.style or "inherit", - s.variant or "inherit", - s.weight or "inherit", - s.family or "inherit", - c or "inherit") - end - end - return concat(result,"\n\n") -end - -local usedimages = { } - -local imagetemplate = [[ -%s[id="%s"] { - display : block ; - background-image : url(%s) ; - background-size : 100%% auto ; - background-repeat : no-repeat ; - width : %s ; - height : %s ; -}]] - -local function allusedimages(xmlfile) - local result = { format("/* images for file %s */",xmlfile) } - for element, details in sortedhash(usedimages) do - for detail, data in sortedhash(details) do - local name = data.name - if file.suffix(name) == "pdf" then - -- temp hack .. we will have a remapper - name = file.replacesuffix(name,"svg") - end - result[#result+1] = formatters[imagetemplate](element,detail,name,data.width,data.height) - end - end - return concat(result,"\n\n") -end - -local function uniqueusedimages() - local unique = { } - for element, details in next, usedimages do - for detail, data in next, details do - local name = data.name - if file.suffix(name) == "pdf" then - unique[file.replacesuffix(name,"svg")] = name - else - unique[name] = name - end - end - end - return unique -end - --- - -properties.vspace = { export = "break", nature = "display" } ------------------ = { export = "pagebreak", nature = "display" } - -local function makebreaklist(list) - nofbreaks = nofbreaks + 1 - local t = { } - if list then - for i=1,#list do - t[i] = list[i] - end - end - t[#t+1] = "break-" .. nofbreaks -- maybe no number - return t -end - -local breakattributes = { - type = "collapse" -} - -local function makebreaknode(attributes) -- maybe no fulltag - nofbreaks = nofbreaks + 1 - return { - tg = "break", - fulltag = "break-" .. nofbreaks, - n = nofbreaks, - element = "break", - nature = "display", - attributes = attributes or nil, - -- data = { }, -- not needed - -- attribute = 0, -- not needed - -- parnumber = 0, - } -end - -local fields = { "title", "subtitle", "author", "keywords" } - -local function checkdocument(root) - local data = root.data - if data then - for i=1,#data do - local di = data[i] - if di.content then - -- ok - elseif di.tg == "ignore" then - di.element = "" - checkdocument(di) - else - -- can't happen - end - end - end -end - -function extras.document(result,element,detail,n,fulltag,di) - result[#result+1] = format(" language=%q",languagenames[tex.count.mainlanguagenumber]) - if not less_state then - result[#result+1] = format(" file=%q",tex.jobname) - result[#result+1] = format(" date=%q",os.date()) - result[#result+1] = format(" context=%q",environment.version) - result[#result+1] = format(" version=%q",exportversion) - result[#result+1] = format(" xmlns:m=%q","http://www.w3.org/1998/Math/MathML") - local identity = interactions.general.getidentity() - for i=1,#fields do - local key = fields[i] - local value = identity[key] - if value and value ~= "" then - result[#result+1] = formatters[" %s=%q"](key,value) - end - end - end - checkdocument(di) -end - -local itemgroups = { } - -function structurestags.setitemgroup(current,packed,symbol) - itemgroups[detailedtag("itemgroup",current)] = { - packed = packed, - symbol = symbol, - } -end - -function extras.itemgroup(result,element,detail,n,fulltag,di) - local hash = itemgroups[fulltag] - if hash then - local v = hash.packed - if v then - result[#result+1] = " packed='yes'" - end - local v = hash.symbol - if v then - result[#result+1] = attribute("symbol",v) - end - end -end - -local synonyms = { } - -function structurestags.setsynonym(current,tag) - synonyms[detailedtag("synonym",current)] = tag -end - -function extras.synonym(result,element,detail,n,fulltag,di) - local tag = synonyms[fulltag] - if tag then - result[#result+1] = formatters[" tag='%s'"](tag) - end -end - -local sortings = { } - -function structurestags.setsorting(current,tag) - sortings[detailedtag("sorting",current)] = tag -end - -function extras.sorting(result,element,detail,n,fulltag,di) - local tag = sortings[fulltag] - if tag then - result[#result+1] = formatters[" tag='%s'"](tag) - end -end - -usedstyles.highlight = { } - -function structurestags.sethighlight(current,style,color) -- we assume global styles - usedstyles.highlight[current] = { - style = style, -- xml.css.fontspecification(style), - color = color, -- xml.css.colorspec(color), - } -end - -local descriptions = { } -local symbols = { } -local linked = { } - -function structurestags.setdescription(tag,n) - local nd = structures.notes.get(tag,n) -- todo: use listdata instead - if nd then - local references = nd.references - descriptions[references and references.internal] = detailedtag("description",tag) - end -end - -function structurestags.setdescriptionsymbol(tag,n) - local nd = structures.notes.get(tag,n) -- todo: use listdata instead - if nd then - local references = nd.references - symbols[references and references.internal] = detailedtag("descriptionsymbol",tag) - end -end - -function finalizers.descriptions(tree) - local n = 0 - for id, tag in next, descriptions do - local sym = symbols[id] - if sym then - n = n + 1 - linked[tag] = n - linked[sym] = n - end - end -end - -function extras.description(result,element,detail,n,fulltag,di) - local id = linked[fulltag] - if id then - result[#result+1] = formatters[" insert='%s'"](id) -- maybe just fulltag - end -end - -function extras.descriptionsymbol(result,element,detail,n,fulltag,di) - local id = linked[fulltag] - if id then - result[#result+1] = formatters[" insert='%s'"](id) - end -end - -usedimages.image = { } - -function structurestags.setfigure(name,page,width,height) - usedimages.image[detailedtag("image")] = { - name = name, - page = page, - width = number.todimen(width,"cm","%0.3fcm"), - height = number.todimen(height,"cm","%0.3fcm"), - } -end - -function extras.image(result,element,detail,n,fulltag,di) - local data = usedimages.image[fulltag] - if data then - result[#result+1] = attribute("name",data.name) - if tonumber(data.page) > 1 then - result[#result+1] = formatters[" page='%s'"](data.page) - end - result[#result+1] = formatters[" id='%s' width='%s' height='%s'"](fulltag,data.width,data.height) - end -end - -local combinations = { } - -function structurestags.setcombination(nx,ny) - combinations[detailedtag("combination")] = { - nx = nx, - ny = ny, - } -end - -function extras.combination(result,element,detail,n,fulltag,di) - local data = combinations[fulltag] - if data then - result[#result+1] = formatters[" nx='%s' ny='%s'"](data.nx,data.ny) - end -end - --- quite some code deals with exporting references -- - -local evaluators = { } -local specials = { } - -evaluators.inner = function(result,var) - local inner = var.inner - if inner then - result[#result+1] = attribute("location",inner) - end -end - -evaluators.outer = function(result,var) - local file, url = references.checkedfileorurl(var.outer,var.outer) - if url then - result[#result+1] = attribute("url",url) - elseif file then - result[#result+1] = attribute("file",file) - end -end - -evaluators["outer with inner"] = function(result,var) - local file = references.checkedfile(var.f) - if file then - result[#result+1] = attribute("file",file) - end - local inner = var.inner - if inner then - result[#result+1] = attribute("location",inner) - end -end - -evaluators.special = function(result,var) - local handler = specials[var.special] - if handler then - handler(result,var) - end -end - -evaluators["special outer with operation"] = evaluators.special -evaluators["special operation"] = evaluators.special -evaluators["special operation with arguments"] = evaluators.special - -function specials.url(result,var) - local url = references.checkedurl(var.operation) - if url then - result[#result+1] = attribute("url",url) - end -end - -function specials.file(result,var) - local file = references.checkedfile(var.operation) - if file then - result[#result+1] = attribute("file",file) - end -end - -function specials.fileorurl(result,var) - local file, url = references.checkedfileorurl(var.operation,var.operation) - if url then - result[#result+1] = attribute("url",url) - elseif file then - result[#result+1] = attribute("file",file) - end -end - -function specials.internal(result,var) - local internal = references.checkedurl(var.operation) - if internal then - result[#result+1] = formatters[" location='aut:%s'"](internal) - end -end - -local referencehash = { } - -local function adddestination(result,references) -- todo: specials -> exporters and then concat - if references then - local reference = references.reference - if reference and reference ~= "" then - local prefix = references.prefix - if prefix and prefix ~= "" then - result[#result+1] = formatters[" prefix='%s'"](prefix) - end - result[#result+1] = formatters[" destination='%s'"](reference) - for i=1,#references do - local r = references[i] - local e = evaluators[r.kind] - if e then - e(result,r) - end - end - end - end -end - -local function addreference(result,references) - if references then - local reference = references.reference - if reference and reference ~= "" then - local prefix = references.prefix - if prefix and prefix ~= "" then - result[#result+1] = formatters[" prefix='%s'"](prefix) - end - result[#result+1] = formatters[" reference='%s'"](reference) - end - local internal = references.internal - if internal and internal ~= "" then - result[#result+1] = formatters[" location='aut:%s'"](internal) - end - end -end - -function extras.link(result,element,detail,n,fulltag,di) - -- for instance in lists a link has nested elements and no own text - local reference = referencehash[fulltag] - if reference then - adddestination(result,structures.references.get(reference)) - return true - else - local data = di.data - if data then - for i=1,#data do - local di = data[i] - if di then - local fulltag = di.fulltag - if fulltag and extras.link(result,element,detail,n,fulltag,di) then - return true - end - end - end - end - end -end - --- no settings, as these are obscure ones - -local automathrows = true directives.register("backend.export.math.autorows", function(v) automathrows = v end) -local automathapply = true directives.register("backend.export.math.autoapply", function(v) automathapply = v end) -local automathnumber = true directives.register("backend.export.math.autonumber", function(v) automathnumber = v end) -local automathstrip = true directives.register("backend.export.math.autostrip", function(v) automathstrip = v end) - -local functions = mathematics.categories.functions - -local function collapse(di,i,data,ndata,detail,element) - local collapsing = di.data - if data then - di.element = element - di.detail = nil - i = i + 1 - while i <= ndata do - local dn = data[i] - if dn.detail == detail then - collapsing[#collapsing+1] = dn.data[1] - dn.skip = "ignore" - i = i + 1 - else - break - end - end - end - return i -end - -local function collapse_mn(di,i,data,ndata) - local collapsing = di.data - if data then - i = i + 1 - while i <= ndata do - local dn = data[i] - local tg = dn.tg - if tg == "mn" then - collapsing[#collapsing+1] = dn.data[1] - dn.skip = "ignore" - i = i + 1 - elseif tg == "mo" then - local d = dn.data[1] - if d == "." then - collapsing[#collapsing+1] = d - dn.skip = "ignore" - i = i + 1 - else - break - end - else - break - end - end - end - return i -end - --- maybe delay __i__ till we need it - -local apply_function = { - { - element = "mo", - -- comment = "apply function", - -- data = { utfchar(0x2061) }, - data = { "⁡" }, - nature = "mixed", - } -} - -local functioncontent = { } - -setmetatableindex(functioncontent,function(t,k) - local v = { { content = k } } - t[k] = v - return v -end) - -local function checkmath(root) -- we can provide utf.toentities as an option - local data = root.data - if data then - local ndata = #data - local roottg = root.tg - if roottg == "msubsup" then - local nucleus, superscript, subscript - for i=1,ndata do - local di = data[i] - if not di then - -- weird - elseif di.content then - -- text - elseif not nucleus then - nucleus = i - elseif not superscript then - superscript = i - elseif not subscript then - subscript = i - else - -- error - end - end - if superscript and subscript then - local sup, sub = data[superscript], data[subscript] - data[superscript], data[subscript] = sub, sup - -- sub.__o__, sup.__o__ = subscript, superscript - sub.__i__, sup.__i__ = superscript, subscript - end - elseif roottg == "mfenced" then - local new, n = { }, 0 - local attributes = { } - root.attributes = attributes - for i=1,ndata do - local di = data[i] - if not di then - -- weird - elseif di.content then - n = n + 1 - new[n] = di - else - local tg = di.tg - if tg == "mleft" then - attributes.left = tostring(di.data[1].data[1].content) - elseif tg == "mmiddle" then - attributes.middle = tostring(di.data[1].data[1].content) - elseif tg == "mright" then - attributes.right = tostring(di.data[1].data[1].content) - else - n = n + 1 - di.__i__ = n - new[n] = di - end - end - end - root.data = new - ndata = n - end - if ndata == 0 then - return - elseif ndata == 1 then - local d = data[1] - if not d then - return - elseif d.content then - return - elseif #root.data == 1 then - local tg = d.tg - if automathrows and roottg == "mrow" then - -- maybe just always ! check spec first - if tg == "mrow" or tg == "mfenced" or tg == "mfrac" or tg == "mroot" or tg == "msqrt"then - root.skip = "comment" - elseif tg == "mo" then - root.skip = "comment" - end - elseif roottg == "mo" then - if tg == "mo" then - root.skip = "comment" - end - end - end - end - local i = 1 - while i <= ndata do -- -- -- TOO MUCH NESTED CHECKING -- -- -- - local di = data[i] - if di and not di.content then - local tg = di.tg - local detail = di.detail - if tg == "math" then - -- di.element = "mrow" -- when properties - di.skip = "comment" - checkmath(di) - i = i + 1 - elseif tg == "mover" or tg == "munder" or tg == "munderover" then - if detail == "accent" then - di.attributes = { accent = "true" } - di.detail = nil - end - checkmath(di) - i = i + 1 - elseif tg == "mroot" then - if #di.data == 1 then - -- else firefox complains - di.element = "msqrt" - end - checkmath(di) - i = i + 1 - elseif tg == "break" then - di.skip = "comment" - i = i + 1 - elseif tg == "mrow" and detail then - di.detail = nil - checkmath(di) - di = { - element = "maction", - nature = "display", - attributes = { actiontype = detail }, - data = { di }, - n = 0, - } - data[i] = di - i = i + 1 - elseif detail then - -- no checkmath(di) here - local category = tonumber(detail) or 0 - if category == 1 then -- mo - i = collapse(di,i,data,ndata,detail,"mo") - elseif category == 2 then -- mi - i = collapse(di,i,data,ndata,detail,"mi") - elseif category == 3 then -- mn - i = collapse(di,i,data,ndata,detail,"mn") - elseif category == 4 then -- ms - i = collapse(di,i,data,ndata,detail,"ms") - elseif category >= 1000 then - local apply = category >= 2000 - if apply then - category = category - 1000 - end - if tg == "mi" then -- function - if roottg == "mrow" then - root.skip = "comment" - root.element = "function" - end - i = collapse(di,i,data,ndata,detail,"mi") - local tag = functions[category] - if tag then - di.data = functioncontent[tag] - end - if apply then - di.after = apply_function - elseif automathapply then -- make function - local following - if i <= ndata then - -- normally not the case - following = data[i] - else - local parent = di.__p__ -- == root - if parent.tg == "mrow" then - parent = parent.__p__ - end - local index = parent.__i__ - following = parent.data[index+1] - end - if following then - local tg = following.tg - if tg == "mrow" or tg == "mfenced" then -- we need to figure out the right condition - di.after = apply_function - end - end - end - else -- some problem - checkmath(di) - i = i + 1 - end - else - checkmath(di) - i = i + 1 - end - elseif automathnumber and tg == "mn" then - checkmath(di) - i = collapse_mn(di,i,data,ndata) - else - checkmath(di) - i = i + 1 - end - else -- can be string or boolean - if parenttg ~= "mtext" and di == " " then - data[i] = false - end - i = i + 1 - end - end - end -end - -function stripmath(di) - if not di then - -- - elseif di.content then - return di - else - local tg = di.tg - if tg == "mtext" or tg == "ms" then - return di - else - local data = di.data - local ndata = #data - local n = 0 - for i=1,ndata do - local di = data[i] - if di and not di.content then - di = stripmath(di) - end - if di then - local content = di.content - if not content then - n = n + 1 - di.__i__ = n - data[n] = di - elseif content == " " or content == "" then - -- skip - else - n = n + 1 - data[n] = di - end - end - end - for i=ndata,n+1,-1 do - data[i] = nil - end - if #data > 0 then - return di - end - end - end -end - -function checks.math(di) - local hash = attributehash[di.fulltag] - local mode = (hash and hash.mode) == "display" and "block" or "inline" - di.attributes = { - display = mode - } - -- can be option if needed: - if mode == "inline" then - di.nature = "mixed" -- else spacing problem (maybe inline) - else - di.nature = "display" - end - if automathstrip then - stripmath(di) - end - checkmath(di) -end - -local a, z, A, Z = 0x61, 0x7A, 0x41, 0x5A - -function extras.mi(result,element,detail,n,fulltag,di) -- check with content - local str = di.data[1].content - if str and sub(str,1,1) ~= "&" then -- hack but good enough (maybe gsub op eerste) - for v in utfvalues(str) do - if (v >= a and v <= z) or (v >= A and v <= Z) then - local a = di.attributes - if a then - a.mathvariant = "normal" - else - di.attributes = { mathvariant = "normal" } - end - end - end - end -end - -function extras.section(result,element,detail,n,fulltag,di) - local data = listdata[fulltag] - if data then - addreference(result,data.references) - return true - else - local data = di.data - if data then - for i=1,#data do - local di = data[i] - if di then - local ft = di.fulltag - if ft and extras.section(result,element,detail,n,ft,di) then - return true - end - end - end - end - end -end - -function extras.float(result,element,detail,n,fulltag,di) - local data = listdata[fulltag] - if data then - addreference(result,data.references) - return true - else - local data = di.data - if data then - for i=1,#data do - local di = data[i] - if di and extras.section(result,element,detail,n,di.fulltag,di) then - return true - end - end - end - end -end - -local tabledata = { } - -function structurestags.settablecell(rows,columns,align) - if align > 0 or rows > 1 or columns > 1 then - tabledata[detailedtag("tablecell")] = { - rows = rows, - columns = columns, - align = align, - } - end -end - -function extras.tablecell(result,element,detail,n,fulltag,di) - local hash = tabledata[fulltag] - if hash then - local v = hash.columns - if v and v > 1 then - result[#result+1] = formatters[" columns='%s'"](v) - end - local v = hash.rows - if v and v > 1 then - result[#result+1] = formatters[" rows='%s'"](v) - end - local v = hash.align - if not v or v == 0 then - -- normal - elseif v == 1 then -- use numbertoalign here - result[#result+1] = " align='flushright'" - elseif v == 2 then - result[#result+1] = " align='middle'" - elseif v == 3 then - result[#result+1] = " align='flushleft'" - end - end -end - -local tabulatedata = { } - -function structurestags.settabulatecell(align) - if align > 0 then - tabulatedata[detailedtag("tabulatecell")] = { - align = align, - } - end -end - -function extras.tabulate(result,element,detail,n,fulltag,di) - local data = di.data - for i=1,#data do - local di = data[i] - if di.tg == "tabulaterow" then - local did = di.data - local content = false - for i=1,#did do - local d = did[i].data - if d and #d > 0 and d[1].content then - content = true - break - end - end - if not content then - di.element = "" -- or simply remove - end - end - end -end - -function extras.tabulatecell(result,element,detail,n,fulltag,di) - local hash = tabulatedata[fulltag] - if hash then - local v = hash.align - if not v or v == 0 then - -- normal - elseif v == 1 then - result[#result+1] = " align='flushleft'" - elseif v == 2 then - result[#result+1] = " align='flushright'" - elseif v == 3 then - result[#result+1] = " align='middle'" - end - end -end - --- flusher - -local linedone = false -- can go ... we strip newlines anyway -local inlinedepth = 0 - --- todo: #result -> nofresult - -local function emptytag(result,element,nature,depth,di) -- currently only break but at some point - local a = di.attributes -- we might add detail etc - if a then -- happens seldom - if linedone then - result[#result+1] = formatters["%w<%s"](depth,namespaced[element]) - else - result[#result+1] = formatters["\n%w<%s"](depth,namespaced[element]) - end - for k, v in next, a do - result[#result+1] = formatters[" %s=%q"](k,v) - end - result[#result+1] = "/>\n" - else - if linedone then - result[#result+1] = formatters["%w<%s/>\n"](depth,namespaced[element]) - else - result[#result+1] = formatters["\n%w<%s/>\n"](depth,namespaced[element]) - end - end - linedone = false -end - -local function begintag(result,element,nature,depth,di,skip) - -- if needed we can use a local result with xresult - local detail = di.detail - local n = di.n - local fulltag = di.fulltag - local comment = di.comment - if nature == "inline" then - linedone = false - inlinedepth = inlinedepth + 1 - if show_comment and comment then - result[#result+1] = formatters[""](comment) - end - elseif nature == "mixed" then - if inlinedepth > 0 then - if show_comment and comment then - result[#result+1] = formatters[""](comment) - end - elseif linedone then - result[#result+1] = spaces[depth] - if show_comment and comment then - result[#result+1] = formatters[""](comment) - end - else - result[#result+1] = formatters["\n%w"](depth) - linedone = false - if show_comment and comment then - result[#result+1] = formatters["\n%w"](comment,depth) - end - end - inlinedepth = inlinedepth + 1 - else - if inlinedepth > 0 then - if show_comment and comment then - result[#result+1] = formatters[""](comment) - end - elseif linedone then - result[#result+1] = spaces[depth] - if show_comment and comment then - result[#result+1] = formatters[""](comment) - end - else - result[#result+1] = formatters["\n%w"](depth) -- can introduced extra line in mixed+mixed (filtered later on) - linedone = false - if show_comment and comment then - result[#result+1] = formatters["\n%w"](comment,depth) - end - end - end - if skip == "comment" then - if show_comment then - result[#result+1] = formatters[""](namespaced[element]) - end - elseif skip then - -- ignore - else - result[#result+1] = formatters["<%s"](namespaced[element]) - if detail then - result[#result+1] = formatters[" detail=%q"](detail) - end - if indexing and n then - result[#result+1] = formatters[" n=%q"](n) - end - local extra = extras[element] - if extra then - extra(result,element,detail,n,fulltag,di) - end - local u = userdata[fulltag] - if u then - for k, v in next, u do - result[#result+1] = formatters[" %s=%q"](k,v) - end - end - local a = di.attributes - if a then - for k, v in next, a do - result[#result+1] = formatters[" %s=%q"](k,v) - end - end - result[#result+1] = ">" - end - if inlinedepth > 0 then - elseif nature == "display" then - result[#result+1] = "\n" - linedone = true - end - used[element][detail or ""] = nature -- for template css - local metadata = tagmetadata[fulltag] - if metadata then - if not linedone then - result[#result+1] = "\n" - linedone = true - end - result[#result+1] = formatters["%w\n"](depth) - for k, v in table.sortedpairs(metadata) do - v = entityremapper(v) - result[#result+1] = formatters["%w%s\n"](depth+1,k,v) - end - result[#result+1] = formatters["%w\n"](depth) - end -end - -local function endtag(result,element,nature,depth,skip) - if nature == "display" then - if inlinedepth == 0 then - if not linedone then - result[#result+1] = "\n" - end - if skip == "comment" then - if show_comment then - result[#result+1] = formatters["%w\n"](depth,namespaced[element]) - end - elseif skip then - -- ignore - else - result[#result+1] = formatters["%w\n"](depth,namespaced[element]) - end - linedone = true - else - if skip == "comment" then - if show_comment then - result[#result+1] = formatters[""](namespaced[element]) - end - elseif skip then - -- ignore - else - result[#result+1] = formatters[""](namespaced[element]) - end - end - else - inlinedepth = inlinedepth - 1 - if skip == "comment" then - if show_comment then - result[#result+1] = formatters[""](namespaced[element]) - end - elseif skip then - -- ignore - else - result[#result+1] = formatters[""](namespaced[element]) - end - linedone = false - end -end - -local function flushtree(result,data,nature,depth) - depth = depth + 1 - local nofdata = #data - for i=1,nofdata do - local di = data[i] - if not di then -- hm, di can be string - -- whatever - elseif di.content then - -- already has breaks - local content = entityremapper(di.content) - if i == nofdata and sub(content,-1) == "\n" then -- move check - -- can be an end of line in par but can also be the last line - if trace_spacing then - result[#result+1] = formatters["%s"](di.parnumber or 0,sub(content,1,-2)) - else - result[#result+1] = sub(content,1,-2) - end - result[#result+1] = " " - else - if trace_spacing then - result[#result+1] = formatters["%s"](di.parnumber or 0,content) - else - result[#result+1] = content - end - end - linedone = false - elseif not di.collapsed then -- ignore collapsed data (is appended, reconstructed par) - local element = di.element - if not element then - -- skip - elseif element == "break" then -- or element == "pagebreak" - emptytag(result,element,nature,depth,di) - elseif element == "" or di.skip == "ignore" then - -- skip - else - if di.before then - flushtree(result,di.before,nature,depth) - end - local natu = di.nature - local skip = di.skip - if di.breaknode then - emptytag(result,"break","display",depth,di) - end - begintag(result,element,natu,depth,di,skip) - flushtree(result,di.data,natu,depth) - -- if sub(result[#result],-1) == " " and natu ~= "inline" then - -- result[#result] = sub(result[#result],1,-2) - -- end - endtag(result,element,natu,depth,skip) - if di.after then - flushtree(result,di.after,nature,depth) - end - end - end - end -end - -local function breaktree(tree,parent,parentelement) -- also removes double breaks - local data = tree.data - if data then - local nofdata = #data - local prevelement - local prevnature - local prevparnumber - local newdata = { } - local nofnewdata = 0 - for i=1,nofdata do - local di = data[i] - if not di then - -- skip - elseif di.content then - local parnumber = di.parnumber - if prevnature == "inline" and prevparnumber and prevparnumber ~= parnumber then - nofnewdata = nofnewdata + 1 - if trace_spacing then - newdata[nofnewdata] = makebreaknode { type = "a", p = prevparnumber, n = parnumber } - else - newdata[nofnewdata] = makebreaknode() - end - end - prevelement = nil - prevnature = "inline" - prevparnumber = parnumber - nofnewdata = nofnewdata + 1 - newdata[nofnewdata] = di - elseif not di.collapsed then - local element = di.element - if element == "break" then -- or element == "pagebreak" - if prevelement == "break" then - di.element = "" - end - prevelement = element - prevnature = "display" - elseif element == "" or di.skip == "ignore" then - -- skip - else - local nature = di.nature - local parnumber = di.parnumber - if prevnature == "inline" and nature == "inline" and prevparnumber and prevparnumber ~= parnumber then - nofnewdata = nofnewdata + 1 - if trace_spacing then - newdata[nofnewdata] = makebreaknode { type = "b", p = prevparnumber, n = parnumber } - else - newdata[nofnewdata] = makebreaknode() - end - end - prevnature = nature - prevparnumber = parnumber - prevelement = element - breaktree(di,tree,element) - end - nofnewdata = nofnewdata + 1 - newdata[nofnewdata] = di - else - local nature = di.nature - local parnumber = di.parnumber - if prevnature == "inline" and nature == "inline" and prevparnumber and prevparnumber ~= parnumber then - nofnewdata = nofnewdata + 1 - if trace_spacing then - newdata[nofnewdata] = makebreaknode { type = "c", p = prevparnumber, n = parnumber } - else - newdata[nofnewdata] = makebreaknode() - end - end - prevnature = nature - prevparnumber = parnumber - nofnewdata = nofnewdata + 1 - newdata[nofnewdata] = di - end - end - tree.data = newdata - end -end - --- also tabulaterow reconstruction .. maybe better as a checker --- i.e cell attribute - -local function collapsetree() - for tag, trees in next, treehash do - local d = trees[1].data - if d then - local nd = #d - if nd > 0 then - for i=2,#trees do - local currenttree = trees[i] - local currentdata = currenttree.data - local currentpar = currenttree.parnumber - local previouspar = trees[i-1].parnumber - currenttree.collapsed = true - -- is the next ok? - if previouspar == 0 or not (di and di.content) then - previouspar = nil -- no need anyway so no further testing needed - end - for j=1,#currentdata do - local cd = currentdata[j] - if not cd or cd == "" then - -- skip - elseif cd.content then - if not currentpar then - -- add space ? - elseif not previouspar then - -- add space ? - elseif currentpar ~= previouspar then - nd = nd + 1 - if trace_spacing then - d[nd] = makebreaknode { type = "d", p = previouspar, n = currentpar } - else - d[nd] = makebreaknode() - end - end - previouspar = currentpar - nd = nd + 1 - d[nd] = cd - else - nd = nd + 1 - d[nd] = cd - end - currentdata[j] = false - end - end - end - end - end -end - -local function finalizetree(tree) - for _, finalizer in next, finalizers do - finalizer(tree) - end -end - -local function indextree(tree) - local data = tree.data - if data then - local n, new = 0, { } - for i=1,#data do - local d = data[i] - if not d then - -- skip - elseif d.content then - n = n + 1 - new[n] = d - elseif not d.collapsed then - n = n + 1 - d.__i__ = n - d.__p__ = tree - indextree(d) - new[n] = d - end - end - tree.data = new - end -end - -local function checktree(tree) - local data = tree.data - if data then - for i=1,#data do - local d = data[i] - if type(d) == "table" then - local check = checks[d.tg] - if check then - check(d) - end - checktree(d) - end - end - end -end - --- collector code - -local function push(fulltag,depth) - local tag, n = lpegmatch(dashsplitter,fulltag) - local tg, detail = lpegmatch(colonsplitter,tag) - local element, nature - if detail then - local pd = properties[tag] - local pt = properties[tg] - element = pd and pd.export or pt and pt.export or tg - nature = pd and pd.nature or pt and pt.nature or defaultnature - else - local p = properties[tg] - element = p and p.export or tg - nature = p and p.nature or "inline" - end - local treedata = tree.data - local t = { - tg = tg, - fulltag = fulltag, - detail = detail, - n = tonumber(n), -- more efficient - element = element, - nature = nature, - data = { }, - attribute = currentattribute, - parnumber = currentparagraph, - } - treedata[#treedata+1] = t - currentdepth = currentdepth + 1 - nesting[currentdepth] = fulltag - treestack[currentdepth] = tree - if trace_export then - if detail and detail ~= "" then - report_export("%w<%s trigger=%a paragraph=%a index=%a detail=%a>",currentdepth-1,fulltag,currentattribute or 0,currentparagraph or 0,#treedata,detail) - else - report_export("%w<%s trigger=%a paragraph=%a index=%a>",currentdepth-1,fulltag,currentattribute or 0,currentparagraph or 0,#treedata) - end - end - tree = t - if tg == "break" then - -- no need for this - else - local h = treehash[fulltag] - if h then - h[#h+1] = t - else - treehash[fulltag] = { t } - end - end -end - -local function pop() - local top = nesting[currentdepth] - tree = treestack[currentdepth] - currentdepth = currentdepth - 1 - if trace_export then - if top then - report_export("%w",currentdepth,top) - else - report_export("",top) - end - end -end - -local function continueexport() - if nofcurrentcontent > 0 then - if trace_export then - report_export("%w",currentdepth) - end - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = " " -- pagebreak - end -end - -local function pushentry(current) - if current then - if restart then - continueexport() - restart = false - end - local newdepth = #current - local olddepth = currentdepth - if trace_export then - report_export("%w",currentdepth,olddepth,newdepth,current[newdepth]) - end - if olddepth <= 0 then - for i=1,newdepth do - push(current[i],i) - end - else - local difference - if olddepth < newdepth then - for i=1,olddepth do - if current[i] ~= nesting[i] then - difference = i - break - end - end - else - for i=1,newdepth do - if current[i] ~= nesting[i] then - difference = i - break - end - end - end - if difference then - for i=olddepth,difference,-1 do - pop() - end - for i=difference,newdepth do - push(current[i],i) - end - elseif newdepth > olddepth then - for i=olddepth+1,newdepth do - push(current[i],i) - end - elseif newdepth < olddepth then - for i=olddepth,newdepth,-1 do - pop() - end - elseif trace_export then - report_export("%w",currentdepth,newdepth,nesting[newdepth] or "?") - end - end - return olddepth, newdepth - end -end - -local function pushcontent(currentparagraph,newparagraph) - if nofcurrentcontent > 0 then - if currentparagraph then - if currentcontent[nofcurrentcontent] == "\n" then - if trace_export then - report_export("%w",currentdepth) - end - nofcurrentcontent = nofcurrentcontent - 1 - end - end - local content = concat(currentcontent,"",1,nofcurrentcontent) - if content == "" then - -- omit; when currentparagraph we could push, remove spaces, pop - elseif somespace[content] and currentparagraph then - -- omit; when currentparagraph we could push, remove spaces, pop - else - local olddepth, newdepth - local list = taglist[currentattribute] - if list then - olddepth, newdepth = pushentry(list) - end - local td = tree.data - local nd = #td - td[nd+1] = { parnumber = currentparagraph, content = content } - if trace_export then - report_export("%w",currentdepth,#content) - report_export("%w%s",currentdepth,(gsub(content,"\n","\\n"))) - report_export("%w",currentdepth) - end - if olddepth then - for i=newdepth-1,olddepth,-1 do - pop() - end - end - end - nofcurrentcontent = 0 - end - if currentparagraph then - pushentry(makebreaklist(currentnesting)) - if trace_export then - report_export("%w",currentdepth,currentparagraph,newparagraph) - end - end -end - -local function finishexport() - if trace_export then - report_export("%w",currentdepth) - end - if nofcurrentcontent > 0 then - if somespace[currentcontent[nofcurrentcontent]] then - if trace_export then - report_export("%w",currentdepth) - end - nofcurrentcontent = nofcurrentcontent - 1 - end - pushcontent() - end - for i=currentdepth,1,-1 do - pop() - end - currentcontent = { } -- we're nice and do a cleanup - if trace_export then - report_export("%w",currentdepth) - end -end - --- whatsit_code localpar_code - -local function collectresults(head,list) -- is last used (we also have currentattribute) - local p - for n in traverse_nodes(head) do - local id = n.id -- 14: image, 8: literal (mp) - if id == glyph_code then - local at = n[a_tagged] - if not at then - -- we need to tag the pagebody stuff as being valid skippable - -- - -- report_export("skipping character: %C (no attribute)",n.char) - else - -- we could add tonunicodes for ligatures (todo) - local components = n.components - if components then -- we loose data - collectresults(components,nil) - else - local c = n.char - if last ~= at then - local tl = taglist[at] - pushcontent() - currentnesting = tl - currentparagraph = n[a_taggedpar] - currentattribute = at - last = at - pushentry(currentnesting) - if trace_export then - report_export("%w",currentdepth,c,at) - end - -- We need to intercept this here; maybe I will also move this - -- to a regular setter at the tex end. - local r = n[a_reference] - if r then - referencehash[tl[#tl]] = r -- fulltag - end - -- - elseif last then - local ap = n[a_taggedpar] - if ap ~= currentparagraph then - pushcontent(currentparagraph,ap) - pushentry(currentnesting) - currentattribute = last - currentparagraph = ap - end - if trace_export then - report_export("%w",currentdepth,c,last) - end - else - if trace_export then - report_export("%w",currentdepth,c,at) - end - end - local s = n[a_exportstatus] - if s then - c = s - end - if c == 0 then - if trace_export then - report_export("%w",currentdepth) - end - elseif c == 0x20 then - local a = n[a_characters] - nofcurrentcontent = nofcurrentcontent + 1 - if a then - if trace_export then - report_export("%w",currentdepth,a) - end - currentcontent[nofcurrentcontent] = specialspaces[a] -- special space - else - currentcontent[nofcurrentcontent] = " " - end - else - local fc = fontchar[n.font] - if fc then - fc = fc and fc[c] - if fc then - local u = fc.tounicode - if u and u ~= "" then - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = utfchar(fromunicode16(u)) - else - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = utfchar(c) - end - else -- weird, happens in hz (we really need to get rid of the pseudo fonts) - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = utfchar(c) - end - else - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = utfchar(c) - end - end - end - end - elseif id == disc_code then -- probably too late - if keephyphens then - local pre = n.pre - if pre and not pre.next and pre.id == glyph_code and pre.char == hyphencode then - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = hyphen - end - end - collectresults(n.replace,nil) - elseif id == glue_code then - -- we need to distinguish between hskips and vskips - local ca = n[a_characters] - if ca == 0 then - -- skip this one ... already converted special character (node-acc) - elseif ca then - local a = n[a_tagged] - if a then - local c = specialspaces[ca] - if last ~= a then - local tl = taglist[a] - if trace_export then - report_export("%w",currentdepth,ca,a) - end - pushcontent() - currentnesting = tl - currentparagraph = n[a_taggedpar] - currentattribute = a - last = a - pushentry(currentnesting) - -- no reference check (see above) - elseif last then - local ap = n[a_taggedpar] - if ap ~= currentparagraph then - pushcontent(currentparagraph,ap) - pushentry(currentnesting) - currentattribute = last - currentparagraph = ap - end - if trace_export then - report_export("%w",currentdepth,ca,last) - end - end - -- if somespace[currentcontent[nofcurrentcontent]] then - -- if trace_export then - -- report_export("%w",currentdepth) - -- end - -- nofcurrentcontent = nofcurrentcontent - 1 - -- end - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = c - end - else - local subtype = n.subtype - if subtype == userskip_code then - if n.spec.width > threshold then - if last and not somespace[currentcontent[nofcurrentcontent]] then - local a = n[a_tagged] - if a == last then - if trace_export then - report_export("%w",currentdepth) - end - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = " " - elseif a then - -- e.g LOGOLOGO - if trace_export then - report_export("%w",currentdepth,last,a) - end - pushcontent() - if trace_export then - report_export("%w",currentdepth) - end - last = a - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = " " - currentnesting = taglist[last] - pushentry(currentnesting) - currentattribute = last - end - end - end - elseif subtype == spaceskip_code or subtype == xspaceskip_code then - if not somespace[currentcontent[nofcurrentcontent]] then - local a = n[a_tagged] - if a == last then - if trace_export then - report_export("%w",currentdepth) - end - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = " " - else - if trace_export then - report_export("%w",currentdepth) - end - last = a - pushcontent() - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = " " - currentnesting = taglist[last] - pushentry(currentnesting) - currentattribute = last - end - end - elseif subtype == rightskip_code then - -- a line - if nofcurrentcontent > 0 then - local r = currentcontent[nofcurrentcontent] - if r == hyphen then - if not keephyphens then - nofcurrentcontent = nofcurrentcontent - 1 - end - elseif not somespace[r] then - local a = n[a_tagged] - if a == last then - if trace_export then - report_export("%w",currentdepth) - end - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = " " - else - if trace_export then - report_export("%w",currentdepth) - end - last = a - pushcontent() - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = " " - currentnesting = taglist[last] - pushentry(currentnesting) - currentattribute = last - end - end - end - elseif subtype == parfillskip_code then - -- deal with paragaph endings (crossings) elsewhere and we quit here - -- as we don't want the rightskip space addition - return - end - end - elseif id == hlist_code or id == vlist_code then - local ai = n[a_image] - if ai then - local at = n[a_tagged] - if nofcurrentcontent > 0 then - pushcontent() - pushentry(currentnesting) -- ?? - end - pushentry(taglist[at]) -- has an index, todo: flag empty element - if trace_export then - report_export("%w",currentdepth,kern) - end - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = " " - end - elseif a then - -- e.g LOGOLOGO - if trace_export then - report_export("%w",currentdepth,limit,last,a) - end - last = a - pushcontent() - if trace_export then - report_export("%w",currentdepth,kern) - end - nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = " " - currentnesting = taglist[last] - pushentry(currentnesting) - currentattribute = last - end - end - end - end - end - p = n - end -end - -function nodes.handlers.export(head) -- hooks into the page builder - starttiming(treehash) - if trace_export then - report_export("%w",currentdepth) - end - -- continueexport() - restart = true - collectresults(head) - if trace_export then - report_export("%w",currentdepth) - end - stoptiming(treehash) - return head, true -end - -function builders.paragraphs.tag(head) - noftextblocks = noftextblocks + 1 - for n in traverse_id(hlist_code,head) do - local subtype = n.subtype - if subtype == line_code then - n[a_textblock] = noftextblocks - elseif subtype == glue_code or subtype == kern_code then - n[a_textblock] = 0 - end - end - return false -end - --- encoding='utf-8' - -local xmlpreamble = [[ - - - - - - - -]] - -local function wholepreamble() - return format(xmlpreamble,tex.jobname,os.date(),environment.version,exportversion) -end - - -local csspreamble = [[ - -]] - -local function allusedstylesheets(xmlfile,cssfiles,files) - local result = { } - for i=1,#cssfiles do - local cssfile = cssfiles[i] - if type(cssfile) ~= "string" or cssfile == variables.yes or cssfile == "" or cssfile == xmlfile then - cssfile = file.replacesuffix(xmlfile,"css") - else - cssfile = file.addsuffix(cssfile,"css") - end - files[#files+1] = cssfile - report_export("adding css reference '%s'",cssfile) - result[#result+1] = format(csspreamble,cssfile) - end - return concat(result) -end - -local e_template = [[ -%s { - display: %s ; -}]] - -local d_template = [[ -%s[detail=%s] { - display: %s ; -}]] - -local displaymapping = { - inline = "inline", - display = "block", - mixed = "inline", -} - -local function allusedelements(xmlfile) - local result = { format("/* template for file %s */",xmlfile) } - for element, details in sortedhash(used) do - result[#result+1] = format("/* category: %s */",element) - for detail, nature in sortedhash(details) do - local d = displaymapping[nature or "display"] or "block" - if detail == "" then - result[#result+1] = formatters[e_template](element,d) - else - result[#result+1] = formatters[d_template](element,detail,d) - end - end - end - return concat(result,"\n\n") -end - -local function allcontent(tree) - local result = { } - flushtree(result,tree.data,"display",0) -- we need to collect images - result = concat(result) - result = gsub(result,"\n *\n","\n") - result = gsub(result,"\n +([^< ])","\n%1") - return result -end - --- local xhtmlpreamble = [[ --- --- ]] - -local function cleanxhtmltree(xmltree) - if xmltree then - local xmlwrap = xml.wrap - for e in xml.collected(xmltree,"/document") do - e.at["xmlns:xhtml"] = "http://www.w3.org/1999/xhtml" - break - end - -- todo: inject xhtmlpreamble (xmlns should have be enough) - local wrapper = { tg = "a", ns = "xhtml", at = { href = "unknown" } } - for e in xml.collected(xmltree,"link") do - local at = e.at - local href - if at.location then - href = "#" .. gsub(at.location,":","_") - elseif at.url then - href = at.url - elseif at.file then - href = at.file - end - if href then - wrapper.at.href = href - xmlwrap(e,wrapper) - end - end - local wrapper = { tg = "a", ns = "xhtml", at = { name = "unknown" } } - for e in xml.collected(xmltree,"!link[@location]") do - local location = e.at.location - if location then - wrapper.at.name = gsub(location,":","_") - xmlwrap(e,wrapper) - end - end - return xmltree - else - return xml.convert("\ninvalid xhtml tree") - end -end - -local cssfile, xhtmlfile = nil, nil - -directives.register("backend.export.css", function(v) cssfile = v end) -directives.register("backend.export.xhtml",function(v) xhtmlfile = v end) - -local function stopexport(v) - starttiming(treehash) - -- - finishexport() - -- - collapsetree(tree) - indextree(tree) - checktree(tree) - breaktree(tree) - finalizetree(tree) - -- - hashlistdata() - -- - if type(v) ~= "string" or v == variables.yes or v == "" then - v = tex.jobname - end - local basename = file.basename(v) - local xmlfile = file.addsuffix(basename,"export") - -- - local imagefilename = file.addsuffix(file.removesuffix(xmlfile) .. "-images","css") - local stylefilename = file.addsuffix(file.removesuffix(xmlfile) .. "-styles","css") - local templatefilename = file.replacesuffix(xmlfile,"template") - local specificationfilename = file.replacesuffix(xmlfile,"specification") - -- - if xhtml and not cssfile then - cssfile = true - end - local cssfiles = { } - if cssfile then - if cssfile == true then - cssfiles = { "export-example.css" } - else - cssfiles = settings_to_array(cssfile or "") - end - insert(cssfiles,1,imagefilename) - insert(cssfiles,1,stylefilename) - end - cssfiles = table.unique(cssfiles) - -- - local result = allcontent(tree) -- also does some housekeeping and data collecting - -- - local files = { - } - local results = concat { - wholepreamble(), - allusedstylesheets(xmlfile,cssfiles,files), -- ads to files - result, - } - -- - files = table.unique(files) - -- - report_export("saving xml data in %a",xmlfile) - io.savedata(xmlfile,results) - -- - report_export("saving css image definitions in %a",imagefilename) - io.savedata(imagefilename,allusedimages(xmlfile)) - -- - report_export("saving css style definitions in %a",stylefilename) - io.savedata(stylefilename,allusedstyles(xmlfile)) - -- - report_export("saving css template in %a",templatefilename) - io.savedata(templatefilename,allusedelements(xmlfile)) - -- - if xhtmlfile then - if type(v) ~= "string" or xhtmlfile == true or xhtmlfile == variables.yes or xhtmlfile == "" or xhtmlfile == xmlfile then - xhtmlfile = file.replacesuffix(xmlfile,"xhtml") - else - xhtmlfile = file.addsuffix(xhtmlfile,"xhtml") - end - files[#files+1] = xhtmlfile - report_export("saving xhtml variant in %a",xhtmlfile) - local xmltree = cleanxhtmltree(xml.convert(results)) - xml.save(xmltree,xhtmlfile) - -- looking at identity is somewhat redundant as we also inherit from interaction - -- at the tex end - local identity = interactions.general.getidentity() - local specification = { - name = file.removesuffix(v), - identifier = os.uuid(), - images = uniqueusedimages(), - root = xhtmlfile, - files = files, - language = languagenames[tex.count.mainlanguagenumber], - title = validstring(finetuning.title) or validstring(identity.title), - subtitle = validstring(finetuning.subtitle) or validstring(identity.subtitle), - author = validstring(finetuning.author) or validstring(identity.author), - firstpage = validstring(finetuning.firstpage), - lastpage = validstring(finetuning.lastpage), - } - report_export("saving specification in %a (mtxrun --script epub --make %s)",specificationfilename,specificationfilename) - io.savedata(specificationfilename,table.serialize(specification,true)) - end - stoptiming(treehash) -end - -local appendaction = nodes.tasks.appendaction -local enableaction = nodes.tasks.enableaction - -function commands.setupexport(t) - table.merge(finetuning,t) - keephyphens = finetuning.hyphen == variables.yes -end - -local function startexport(v) - if v and not exporting then - report_export("enabling export to xml") --- not yet known in task-ini - appendaction("shipouts","normalizers", "nodes.handlers.export") --- enableaction("shipouts","nodes.handlers.export") - enableaction("shipouts","nodes.handlers.accessibility") - enableaction("math", "noads.handlers.tags") ---~ appendaction("finalizers","lists","builders.paragraphs.tag") ---~ enableaction("finalizers","builders.paragraphs.tag") - luatex.registerstopactions(function() stopexport(v) end) - exporting = true - end -end - -directives.register("backend.export",startexport) -- maybe .name - -statistics.register("xml exporting time", function() - if exporting then - return format("%s seconds, version %s", statistics.elapsedtime(treehash),exportversion) - end -end) - --- These are called at the tex end: - -commands.settagitemgroup = structurestags.setitemgroup -commands.settagsynonym = structurestags.setsynonym -commands.settagsorting = structurestags.setsorting -commands.settagdescription = structurestags.setdescription -commands.settagdescriptionsymbol = structurestags.setdescriptionsymbol -commands.settaghighlight = structurestags.sethighlight -commands.settagfigure = structurestags.setfigure -commands.settagcombination = structurestags.setcombination -commands.settagtablecell = structurestags.settablecell -commands.settagtabulatecell = structurestags.settabulatecell +if not modules then modules = { } end modules ['back-exp'] = { + version = 1.001, + comment = "companion to back-exp.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- language -> only mainlanguage, local languages should happen through start/stoplanguage +-- tocs/registers -> maybe add a stripper (i.e. just don't flush entries in final tree) +-- footnotes -> css 3 +-- bodyfont -> in styles.css +-- delimited -> left/right string (needs marking) + +-- Because we need to look ahead we now always build a tree (this was optional in +-- the beginning). The extra overhead in the frontend is neglectable. +-- +-- We can optimize the code ... currently the overhead is some 10% for xml + html so +-- there is no hurry. + +-- todo: move critital formatters out of functions +-- todo: delay loading (apart from basic tag stuff) + +local next, type = next, type +local format, match, concat, rep, sub, gsub, gmatch, find = string.format, string.match, table.concat, string.rep, string.sub, string.gsub, string.gmatch, string.find +local validstring = string.valid +local lpegmatch = lpeg.match +local utfchar, utfbyte, utfvalues = utf.char, utf.byte, utf.values +local insert, remove = table.insert, table.remove +local fromunicode16 = fonts.mappings.fromunicode16 +local sortedhash = table.sortedhash +local formatters = string.formatters + +local trace_export = false trackers.register ("export.trace", function(v) trace_export = v end) +local trace_spacing = false trackers.register ("export.trace.spacing", function(v) trace_spacing = v end) +local less_state = false directives.register("export.lessstate", function(v) less_state = v end) +local show_comment = true directives.register("export.comment", function(v) show_comment = v end) + +-- maybe we will also support these: +-- +-- local css_hyphens = false directives.register("export.css.hyphens", function(v) css_hyphens = v end) +-- local css_textalign = false directives.register("export.css.textalign", function(v) css_textalign = v end) +-- local css_bodyfontsize = false directives.register("export.css.bodyfontsize", function(v) css_bodyfontsize = v end) +-- local css_textwidth = false directives.register("export.css.textwidth", function(v) css_textwidth = v end) + +local report_export = logs.reporter("backend","export") + +local nodes = nodes +local attributes = attributes +local variables = interfaces.variables + +local settings_to_array = utilities.parsers.settings_to_array + +local setmetatableindex = table.setmetatableindex +local tasks = nodes.tasks +local fontchar = fonts.hashes.characters +local fontquads = fonts.hashes.quads +local languagenames = languages.numbers + +local nodecodes = nodes.nodecodes +local skipcodes = nodes.skipcodes +local whatsitcodes = nodes.whatsitcodes +local listcodes = nodes.listcodes + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local glyph_code = nodecodes.glyph +local glue_code = nodecodes.glue +local kern_code = nodecodes.kern +local disc_code = nodecodes.disc +local insert_code = nodecodes.insert +local whatsit_code = nodecodes.whatsit +local refximage_code = whatsitcodes.pdfrefximage +local localpar_code = whatsitcodes.localpar + +local userskip_code = skipcodes.userskip +local rightskip_code = skipcodes.rightskip +local parfillskip_code = skipcodes.parfillskip +local spaceskip_code = skipcodes.spaceskip +local xspaceskip_code = skipcodes.xspaceskip + +local line_code = listcodes.line + +local a_characters = attributes.private('characters') +local a_exportstatus = attributes.private('exportstatus') + +local a_tagged = attributes.private('tagged') +local a_taggedpar = attributes.private("taggedpar") +local a_image = attributes.private('image') +local a_reference = attributes.private('reference') + +local a_textblock = attributes.private("textblock") + +local traverse_id = node.traverse_id +local traverse_nodes = node.traverse +local slide_nodelist = node.slide +local texattribute = tex.attribute +local texdimen = tex.dimen +local texcount = tex.count +local locate_node = nodes.locate + +local references = structures.references +local structurestags = structures.tags +local taglist = structurestags.taglist +local properties = structurestags.properties +local userdata = structurestags.userdata -- might be combines with taglist +local tagdata = structurestags.data +local tagmetadata = structurestags.metadata +local detailedtag = structurestags.detailedtag + +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming + +-- todo: more locals (and optimize) + +local exportversion = "0.30" + +local nofcurrentcontent = 0 -- so we don't free (less garbage collection) +local currentcontent = { } +local currentnesting = nil +local currentattribute = nil +local last = nil +local currentparagraph = nil + +local noftextblocks = 0 + +local attributehash = { } -- to be considered: set the values at the tex end +local hyphencode = 0xAD +local hyphen = utfchar(0xAD) -- todo: also emdash etc +local colonsplitter = lpeg.splitat(":") +local dashsplitter = lpeg.splitat("-") +local threshold = 65536 +local indexing = false +local keephyphens = false + +local finetuning = { } + +local treestack = { } +local nesting = { } +local currentdepth = 0 + +local tree = { data = { }, fulltag == "root" } -- root +local treeroot = tree +local treehash = { } +local extras = { } +local checks = { } +local finalizers = { } +local nofbreaks = 0 +local used = { } +local exporting = false +local restart = false +local specialspaces = { [0x20] = " " } -- for conversion +local somespace = { [0x20] = true, [" "] = true } -- for testing +local entities = { ["&"] = "&", [">"] = ">", ["<"] = "<" } +local attribentities = { ["&"] = "&", [">"] = ">", ["<"] = "<", ['"'] = "quot;" } + +local entityremapper = utf.remapper(entities) + +local alignmapping = { + flushright = "right", + middle = "center", + flushleft = "left", +} + +local numbertoallign = { + [0] = "justify", ["0"] = "justify", [variables.normal ] = "justify", + [1] = "right", ["1"] = "right", [variables.flushright] = "right", + [2] = "center", ["2"] = "center", [variables.middle ] = "center", + [3] = "left", ["3"] = "left", [variables.flushleft ] = "left", +} + +local defaultnature = "mixed" -- "inline" + +setmetatableindex(used, function(t,k) + if k then + local v = { } + t[k] = v + return v + end +end) + +setmetatableindex(specialspaces, function(t,k) + local v = utfchar(k) + t[k] = v + entities[v] = formatters["&#x%X;"](k) + somespace[k] = true + somespace[v] = true + return v +end) + + +local namespaced = { + -- filled on +} + +local namespaces = { + msubsup = "m", + msub = "m", + msup = "m", + mn = "m", + mi = "m", + ms = "m", + mo = "m", + mtext = "m", + mrow = "m", + mfrac = "m", + mroot = "m", + msqrt = "m", + munderover = "m", + munder = "m", + mover = "m", + merror = "m", + math = "m", + mrow = "m", + mtable = "m", + mtr = "m", + mtd = "m", + mfenced = "m", + maction = "m", + mspace = "m", +} + +setmetatableindex(namespaced, function(t,k) + if k then + local namespace = namespaces[k] + local v = namespace and namespace .. ":" .. k or k + t[k] = v + return v + end +end) + +local function attribute(key,value) + if value and value ~= "" then + return formatters[' %s="%s"'](key,gsub(value,".",attribentities)) + else + return "" + end +end + +-- local P, C, Cc = lpeg.P, lpeg.C, lpeg.Cc +-- +-- local dash, colon = P("-"), P(":") +-- +-- local precolon, predash, rest = P((1-colon)^1), P((1-dash )^1), P(1)^1 +-- +-- local tagsplitter = C(precolon) * colon * C(predash) * dash * C(rest) + +-- C(predash) * dash * Cc(nil) * C(rest) + +local listdata = { } + +local function hashlistdata() + local c = structures.lists.collected + for i=1,#c do + local ci = c[i] + local tag = ci.references.tag + if tag then + local m = ci.metadata + listdata[m.kind .. ":" .. m.name .. "-" .. tag] = ci + end + end +end + +local spaces = utilities.strings.newrepeater(" ",-1) + +function structurestags.setattributehash(fulltag,key,value) -- public hash + if type(fulltag) == "number" then + fulltag = taglist[fulltag] + if fulltag then + fulltag = fulltag[#fulltag] + end + end + if fulltag then + local ah = attributehash[fulltag] -- could be metatable magic + if not ah then + ah = { } + attributehash[fulltag] = ah + end + ah[key] = value + end +end + + +-- experiment: styles and images +-- +-- officially we should convert to bp but we round anyway + +local usedstyles = { } + +-- /* padding : ; */ +-- /* text-justify : inter-word ; */ + +local documenttemplate = [[ +document { + font-size : %s !important ; + max-width : %s !important ; + text-align : %s !important ; + hyphens : %s !important ; +} +]] + +local styletemplate = [[ +%s[detail='%s'] { + font-style : %s ; + font-variant : %s ; + font-weight : %s ; + font-family : %s ; + color : %s ; +}]] + +local function allusedstyles(xmlfile) + local result = { format("/* styles for file %s */",xmlfile) } + -- + local bodyfont = finetuning.bodyfont + local width = finetuning.width + local hyphen = finetuning.hyphen + local align = finetuning.align + -- + if not bodyfont or bodyfont == "" then + bodyfont = "12pt" + elseif type(bodyfont) == "number" then + bodyfont = number.todimen(bodyfont,"pt","%ipt") or "12pt" + end + if not width or width == "" then + width = "50em" + elseif type(width) == "number" then + width = number.todimen(width,"pt","%ipt") or "50em" + end + if hyphen == variables.yes then + hyphen = "manual" + else + hyphen = "inherited" + end + if align then + align = numbertoallign[align] + end + if not align then + align = hyphens and "justify" or "inherited" + end + -- + result[#result+1] = format(documenttemplate,bodyfont,width,align,hyphen) + -- + local colorspecification = xml.css.colorspecification + local fontspecification = xml.css.fontspecification + for element, details in sortedhash(usedstyles) do + for detail, data in sortedhash(details) do + local s = fontspecification(data.style) + local c = colorspecification(data.color) + result[#result+1] = formatters[styletemplate](element,detail, + s.style or "inherit", + s.variant or "inherit", + s.weight or "inherit", + s.family or "inherit", + c or "inherit") + end + end + return concat(result,"\n\n") +end + +local usedimages = { } + +local imagetemplate = [[ +%s[id="%s"] { + display : block ; + background-image : url(%s) ; + background-size : 100%% auto ; + background-repeat : no-repeat ; + width : %s ; + height : %s ; +}]] + +local function allusedimages(xmlfile) + local result = { format("/* images for file %s */",xmlfile) } + for element, details in sortedhash(usedimages) do + for detail, data in sortedhash(details) do + local name = data.name + if file.suffix(name) == "pdf" then + -- temp hack .. we will have a remapper + name = file.replacesuffix(name,"svg") + end + result[#result+1] = formatters[imagetemplate](element,detail,name,data.width,data.height) + end + end + return concat(result,"\n\n") +end + +local function uniqueusedimages() + local unique = { } + for element, details in next, usedimages do + for detail, data in next, details do + local name = data.name + if file.suffix(name) == "pdf" then + unique[file.replacesuffix(name,"svg")] = name + else + unique[name] = name + end + end + end + return unique +end + +-- + +properties.vspace = { export = "break", nature = "display" } +----------------- = { export = "pagebreak", nature = "display" } + +local function makebreaklist(list) + nofbreaks = nofbreaks + 1 + local t = { } + if list then + for i=1,#list do + t[i] = list[i] + end + end + t[#t+1] = "break-" .. nofbreaks -- maybe no number + return t +end + +local breakattributes = { + type = "collapse" +} + +local function makebreaknode(attributes) -- maybe no fulltag + nofbreaks = nofbreaks + 1 + return { + tg = "break", + fulltag = "break-" .. nofbreaks, + n = nofbreaks, + element = "break", + nature = "display", + attributes = attributes or nil, + -- data = { }, -- not needed + -- attribute = 0, -- not needed + -- parnumber = 0, + } +end + +local fields = { "title", "subtitle", "author", "keywords" } + +local function checkdocument(root) + local data = root.data + if data then + for i=1,#data do + local di = data[i] + if di.content then + -- ok + elseif di.tg == "ignore" then + di.element = "" + checkdocument(di) + else + -- can't happen + end + end + end +end + +function extras.document(result,element,detail,n,fulltag,di) + result[#result+1] = format(" language=%q",languagenames[tex.count.mainlanguagenumber]) + if not less_state then + result[#result+1] = format(" file=%q",tex.jobname) + result[#result+1] = format(" date=%q",os.date()) + result[#result+1] = format(" context=%q",environment.version) + result[#result+1] = format(" version=%q",exportversion) + result[#result+1] = format(" xmlns:m=%q","http://www.w3.org/1998/Math/MathML") + local identity = interactions.general.getidentity() + for i=1,#fields do + local key = fields[i] + local value = identity[key] + if value and value ~= "" then + result[#result+1] = formatters[" %s=%q"](key,value) + end + end + end + checkdocument(di) +end + +local itemgroups = { } + +function structurestags.setitemgroup(current,packed,symbol) + itemgroups[detailedtag("itemgroup",current)] = { + packed = packed, + symbol = symbol, + } +end + +function extras.itemgroup(result,element,detail,n,fulltag,di) + local hash = itemgroups[fulltag] + if hash then + local v = hash.packed + if v then + result[#result+1] = " packed='yes'" + end + local v = hash.symbol + if v then + result[#result+1] = attribute("symbol",v) + end + end +end + +local synonyms = { } + +function structurestags.setsynonym(current,tag) + synonyms[detailedtag("synonym",current)] = tag +end + +function extras.synonym(result,element,detail,n,fulltag,di) + local tag = synonyms[fulltag] + if tag then + result[#result+1] = formatters[" tag='%s'"](tag) + end +end + +local sortings = { } + +function structurestags.setsorting(current,tag) + sortings[detailedtag("sorting",current)] = tag +end + +function extras.sorting(result,element,detail,n,fulltag,di) + local tag = sortings[fulltag] + if tag then + result[#result+1] = formatters[" tag='%s'"](tag) + end +end + +usedstyles.highlight = { } + +function structurestags.sethighlight(current,style,color) -- we assume global styles + usedstyles.highlight[current] = { + style = style, -- xml.css.fontspecification(style), + color = color, -- xml.css.colorspec(color), + } +end + +local descriptions = { } +local symbols = { } +local linked = { } + +function structurestags.setdescription(tag,n) + local nd = structures.notes.get(tag,n) -- todo: use listdata instead + if nd then + local references = nd.references + descriptions[references and references.internal] = detailedtag("description",tag) + end +end + +function structurestags.setdescriptionsymbol(tag,n) + local nd = structures.notes.get(tag,n) -- todo: use listdata instead + if nd then + local references = nd.references + symbols[references and references.internal] = detailedtag("descriptionsymbol",tag) + end +end + +function finalizers.descriptions(tree) + local n = 0 + for id, tag in next, descriptions do + local sym = symbols[id] + if sym then + n = n + 1 + linked[tag] = n + linked[sym] = n + end + end +end + +function extras.description(result,element,detail,n,fulltag,di) + local id = linked[fulltag] + if id then + result[#result+1] = formatters[" insert='%s'"](id) -- maybe just fulltag + end +end + +function extras.descriptionsymbol(result,element,detail,n,fulltag,di) + local id = linked[fulltag] + if id then + result[#result+1] = formatters[" insert='%s'"](id) + end +end + +usedimages.image = { } + +function structurestags.setfigure(name,page,width,height) + usedimages.image[detailedtag("image")] = { + name = name, + page = page, + width = number.todimen(width,"cm","%0.3fcm"), + height = number.todimen(height,"cm","%0.3fcm"), + } +end + +function extras.image(result,element,detail,n,fulltag,di) + local data = usedimages.image[fulltag] + if data then + result[#result+1] = attribute("name",data.name) + if tonumber(data.page) > 1 then + result[#result+1] = formatters[" page='%s'"](data.page) + end + result[#result+1] = formatters[" id='%s' width='%s' height='%s'"](fulltag,data.width,data.height) + end +end + +local combinations = { } + +function structurestags.setcombination(nx,ny) + combinations[detailedtag("combination")] = { + nx = nx, + ny = ny, + } +end + +function extras.combination(result,element,detail,n,fulltag,di) + local data = combinations[fulltag] + if data then + result[#result+1] = formatters[" nx='%s' ny='%s'"](data.nx,data.ny) + end +end + +-- quite some code deals with exporting references -- + +local evaluators = { } +local specials = { } + +evaluators.inner = function(result,var) + local inner = var.inner + if inner then + result[#result+1] = attribute("location",inner) + end +end + +evaluators.outer = function(result,var) + local file, url = references.checkedfileorurl(var.outer,var.outer) + if url then + result[#result+1] = attribute("url",url) + elseif file then + result[#result+1] = attribute("file",file) + end +end + +evaluators["outer with inner"] = function(result,var) + local file = references.checkedfile(var.f) + if file then + result[#result+1] = attribute("file",file) + end + local inner = var.inner + if inner then + result[#result+1] = attribute("location",inner) + end +end + +evaluators.special = function(result,var) + local handler = specials[var.special] + if handler then + handler(result,var) + end +end + +evaluators["special outer with operation"] = evaluators.special +evaluators["special operation"] = evaluators.special +evaluators["special operation with arguments"] = evaluators.special + +function specials.url(result,var) + local url = references.checkedurl(var.operation) + if url then + result[#result+1] = attribute("url",url) + end +end + +function specials.file(result,var) + local file = references.checkedfile(var.operation) + if file then + result[#result+1] = attribute("file",file) + end +end + +function specials.fileorurl(result,var) + local file, url = references.checkedfileorurl(var.operation,var.operation) + if url then + result[#result+1] = attribute("url",url) + elseif file then + result[#result+1] = attribute("file",file) + end +end + +function specials.internal(result,var) + local internal = references.checkedurl(var.operation) + if internal then + result[#result+1] = formatters[" location='aut:%s'"](internal) + end +end + +local referencehash = { } + +local function adddestination(result,references) -- todo: specials -> exporters and then concat + if references then + local reference = references.reference + if reference and reference ~= "" then + local prefix = references.prefix + if prefix and prefix ~= "" then + result[#result+1] = formatters[" prefix='%s'"](prefix) + end + result[#result+1] = formatters[" destination='%s'"](reference) + for i=1,#references do + local r = references[i] + local e = evaluators[r.kind] + if e then + e(result,r) + end + end + end + end +end + +local function addreference(result,references) + if references then + local reference = references.reference + if reference and reference ~= "" then + local prefix = references.prefix + if prefix and prefix ~= "" then + result[#result+1] = formatters[" prefix='%s'"](prefix) + end + result[#result+1] = formatters[" reference='%s'"](reference) + end + local internal = references.internal + if internal and internal ~= "" then + result[#result+1] = formatters[" location='aut:%s'"](internal) + end + end +end + +function extras.link(result,element,detail,n,fulltag,di) + -- for instance in lists a link has nested elements and no own text + local reference = referencehash[fulltag] + if reference then + adddestination(result,structures.references.get(reference)) + return true + else + local data = di.data + if data then + for i=1,#data do + local di = data[i] + if di then + local fulltag = di.fulltag + if fulltag and extras.link(result,element,detail,n,fulltag,di) then + return true + end + end + end + end + end +end + +-- no settings, as these are obscure ones + +local automathrows = true directives.register("backend.export.math.autorows", function(v) automathrows = v end) +local automathapply = true directives.register("backend.export.math.autoapply", function(v) automathapply = v end) +local automathnumber = true directives.register("backend.export.math.autonumber", function(v) automathnumber = v end) +local automathstrip = true directives.register("backend.export.math.autostrip", function(v) automathstrip = v end) + +local functions = mathematics.categories.functions + +local function collapse(di,i,data,ndata,detail,element) + local collapsing = di.data + if data then + di.element = element + di.detail = nil + i = i + 1 + while i <= ndata do + local dn = data[i] + if dn.detail == detail then + collapsing[#collapsing+1] = dn.data[1] + dn.skip = "ignore" + i = i + 1 + else + break + end + end + end + return i +end + +local function collapse_mn(di,i,data,ndata) + local collapsing = di.data + if data then + i = i + 1 + while i <= ndata do + local dn = data[i] + local tg = dn.tg + if tg == "mn" then + collapsing[#collapsing+1] = dn.data[1] + dn.skip = "ignore" + i = i + 1 + elseif tg == "mo" then + local d = dn.data[1] + if d == "." then + collapsing[#collapsing+1] = d + dn.skip = "ignore" + i = i + 1 + else + break + end + else + break + end + end + end + return i +end + +-- maybe delay __i__ till we need it + +local apply_function = { + { + element = "mo", + -- comment = "apply function", + -- data = { utfchar(0x2061) }, + data = { "⁡" }, + nature = "mixed", + } +} + +local functioncontent = { } + +setmetatableindex(functioncontent,function(t,k) + local v = { { content = k } } + t[k] = v + return v +end) + +local function checkmath(root) -- we can provide utf.toentities as an option + local data = root.data + if data then + local ndata = #data + local roottg = root.tg + if roottg == "msubsup" then + local nucleus, superscript, subscript + for i=1,ndata do + local di = data[i] + if not di then + -- weird + elseif di.content then + -- text + elseif not nucleus then + nucleus = i + elseif not superscript then + superscript = i + elseif not subscript then + subscript = i + else + -- error + end + end + if superscript and subscript then + local sup, sub = data[superscript], data[subscript] + data[superscript], data[subscript] = sub, sup + -- sub.__o__, sup.__o__ = subscript, superscript + sub.__i__, sup.__i__ = superscript, subscript + end + elseif roottg == "mfenced" then + local new, n = { }, 0 + local attributes = { } + root.attributes = attributes + for i=1,ndata do + local di = data[i] + if not di then + -- weird + elseif di.content then + n = n + 1 + new[n] = di + else + local tg = di.tg + if tg == "mleft" then + attributes.left = tostring(di.data[1].data[1].content) + elseif tg == "mmiddle" then + attributes.middle = tostring(di.data[1].data[1].content) + elseif tg == "mright" then + attributes.right = tostring(di.data[1].data[1].content) + else + n = n + 1 + di.__i__ = n + new[n] = di + end + end + end + root.data = new + ndata = n + end + if ndata == 0 then + return + elseif ndata == 1 then + local d = data[1] + if not d then + return + elseif d.content then + return + elseif #root.data == 1 then + local tg = d.tg + if automathrows and roottg == "mrow" then + -- maybe just always ! check spec first + if tg == "mrow" or tg == "mfenced" or tg == "mfrac" or tg == "mroot" or tg == "msqrt"then + root.skip = "comment" + elseif tg == "mo" then + root.skip = "comment" + end + elseif roottg == "mo" then + if tg == "mo" then + root.skip = "comment" + end + end + end + end + local i = 1 + while i <= ndata do -- -- -- TOO MUCH NESTED CHECKING -- -- -- + local di = data[i] + if di and not di.content then + local tg = di.tg + local detail = di.detail + if tg == "math" then + -- di.element = "mrow" -- when properties + di.skip = "comment" + checkmath(di) + i = i + 1 + elseif tg == "mover" or tg == "munder" or tg == "munderover" then + if detail == "accent" then + di.attributes = { accent = "true" } + di.detail = nil + end + checkmath(di) + i = i + 1 + elseif tg == "mroot" then + if #di.data == 1 then + -- else firefox complains + di.element = "msqrt" + end + checkmath(di) + i = i + 1 + elseif tg == "break" then + di.skip = "comment" + i = i + 1 + elseif tg == "mrow" and detail then + di.detail = nil + checkmath(di) + di = { + element = "maction", + nature = "display", + attributes = { actiontype = detail }, + data = { di }, + n = 0, + } + data[i] = di + i = i + 1 + elseif detail then + -- no checkmath(di) here + local category = tonumber(detail) or 0 + if category == 1 then -- mo + i = collapse(di,i,data,ndata,detail,"mo") + elseif category == 2 then -- mi + i = collapse(di,i,data,ndata,detail,"mi") + elseif category == 3 then -- mn + i = collapse(di,i,data,ndata,detail,"mn") + elseif category == 4 then -- ms + i = collapse(di,i,data,ndata,detail,"ms") + elseif category >= 1000 then + local apply = category >= 2000 + if apply then + category = category - 1000 + end + if tg == "mi" then -- function + if roottg == "mrow" then + root.skip = "comment" + root.element = "function" + end + i = collapse(di,i,data,ndata,detail,"mi") + local tag = functions[category] + if tag then + di.data = functioncontent[tag] + end + if apply then + di.after = apply_function + elseif automathapply then -- make function + local following + if i <= ndata then + -- normally not the case + following = data[i] + else + local parent = di.__p__ -- == root + if parent.tg == "mrow" then + parent = parent.__p__ + end + local index = parent.__i__ + following = parent.data[index+1] + end + if following then + local tg = following.tg + if tg == "mrow" or tg == "mfenced" then -- we need to figure out the right condition + di.after = apply_function + end + end + end + else -- some problem + checkmath(di) + i = i + 1 + end + else + checkmath(di) + i = i + 1 + end + elseif automathnumber and tg == "mn" then + checkmath(di) + i = collapse_mn(di,i,data,ndata) + else + checkmath(di) + i = i + 1 + end + else -- can be string or boolean + if parenttg ~= "mtext" and di == " " then + data[i] = false + end + i = i + 1 + end + end + end +end + +function stripmath(di) + if not di then + -- + elseif di.content then + return di + else + local tg = di.tg + if tg == "mtext" or tg == "ms" then + return di + else + local data = di.data + local ndata = #data + local n = 0 + for i=1,ndata do + local di = data[i] + if di and not di.content then + di = stripmath(di) + end + if di then + local content = di.content + if not content then + n = n + 1 + di.__i__ = n + data[n] = di + elseif content == " " or content == "" then + -- skip + else + n = n + 1 + data[n] = di + end + end + end + for i=ndata,n+1,-1 do + data[i] = nil + end + if #data > 0 then + return di + end + end + end +end + +function checks.math(di) + local hash = attributehash[di.fulltag] + local mode = (hash and hash.mode) == "display" and "block" or "inline" + di.attributes = { + display = mode + } + -- can be option if needed: + if mode == "inline" then + di.nature = "mixed" -- else spacing problem (maybe inline) + else + di.nature = "display" + end + if automathstrip then + stripmath(di) + end + checkmath(di) +end + +local a, z, A, Z = 0x61, 0x7A, 0x41, 0x5A + +function extras.mi(result,element,detail,n,fulltag,di) -- check with content + local str = di.data[1].content + if str and sub(str,1,1) ~= "&" then -- hack but good enough (maybe gsub op eerste) + for v in utfvalues(str) do + if (v >= a and v <= z) or (v >= A and v <= Z) then + local a = di.attributes + if a then + a.mathvariant = "normal" + else + di.attributes = { mathvariant = "normal" } + end + end + end + end +end + +function extras.section(result,element,detail,n,fulltag,di) + local data = listdata[fulltag] + if data then + addreference(result,data.references) + return true + else + local data = di.data + if data then + for i=1,#data do + local di = data[i] + if di then + local ft = di.fulltag + if ft and extras.section(result,element,detail,n,ft,di) then + return true + end + end + end + end + end +end + +function extras.float(result,element,detail,n,fulltag,di) + local data = listdata[fulltag] + if data then + addreference(result,data.references) + return true + else + local data = di.data + if data then + for i=1,#data do + local di = data[i] + if di and extras.section(result,element,detail,n,di.fulltag,di) then + return true + end + end + end + end +end + +local tabledata = { } + +function structurestags.settablecell(rows,columns,align) + if align > 0 or rows > 1 or columns > 1 then + tabledata[detailedtag("tablecell")] = { + rows = rows, + columns = columns, + align = align, + } + end +end + +function extras.tablecell(result,element,detail,n,fulltag,di) + local hash = tabledata[fulltag] + if hash then + local v = hash.columns + if v and v > 1 then + result[#result+1] = formatters[" columns='%s'"](v) + end + local v = hash.rows + if v and v > 1 then + result[#result+1] = formatters[" rows='%s'"](v) + end + local v = hash.align + if not v or v == 0 then + -- normal + elseif v == 1 then -- use numbertoalign here + result[#result+1] = " align='flushright'" + elseif v == 2 then + result[#result+1] = " align='middle'" + elseif v == 3 then + result[#result+1] = " align='flushleft'" + end + end +end + +local tabulatedata = { } + +function structurestags.settabulatecell(align) + if align > 0 then + tabulatedata[detailedtag("tabulatecell")] = { + align = align, + } + end +end + +function extras.tabulate(result,element,detail,n,fulltag,di) + local data = di.data + for i=1,#data do + local di = data[i] + if di.tg == "tabulaterow" then + local did = di.data + local content = false + for i=1,#did do + local d = did[i].data + if d and #d > 0 and d[1].content then + content = true + break + end + end + if not content then + di.element = "" -- or simply remove + end + end + end +end + +function extras.tabulatecell(result,element,detail,n,fulltag,di) + local hash = tabulatedata[fulltag] + if hash then + local v = hash.align + if not v or v == 0 then + -- normal + elseif v == 1 then + result[#result+1] = " align='flushleft'" + elseif v == 2 then + result[#result+1] = " align='flushright'" + elseif v == 3 then + result[#result+1] = " align='middle'" + end + end +end + +-- flusher + +local linedone = false -- can go ... we strip newlines anyway +local inlinedepth = 0 + +-- todo: #result -> nofresult + +local function emptytag(result,element,nature,depth,di) -- currently only break but at some point + local a = di.attributes -- we might add detail etc + if a then -- happens seldom + if linedone then + result[#result+1] = formatters["%w<%s"](depth,namespaced[element]) + else + result[#result+1] = formatters["\n%w<%s"](depth,namespaced[element]) + end + for k, v in next, a do + result[#result+1] = formatters[" %s=%q"](k,v) + end + result[#result+1] = "/>\n" + else + if linedone then + result[#result+1] = formatters["%w<%s/>\n"](depth,namespaced[element]) + else + result[#result+1] = formatters["\n%w<%s/>\n"](depth,namespaced[element]) + end + end + linedone = false +end + +local function begintag(result,element,nature,depth,di,skip) + -- if needed we can use a local result with xresult + local detail = di.detail + local n = di.n + local fulltag = di.fulltag + local comment = di.comment + if nature == "inline" then + linedone = false + inlinedepth = inlinedepth + 1 + if show_comment and comment then + result[#result+1] = formatters[""](comment) + end + elseif nature == "mixed" then + if inlinedepth > 0 then + if show_comment and comment then + result[#result+1] = formatters[""](comment) + end + elseif linedone then + result[#result+1] = spaces[depth] + if show_comment and comment then + result[#result+1] = formatters[""](comment) + end + else + result[#result+1] = formatters["\n%w"](depth) + linedone = false + if show_comment and comment then + result[#result+1] = formatters["\n%w"](comment,depth) + end + end + inlinedepth = inlinedepth + 1 + else + if inlinedepth > 0 then + if show_comment and comment then + result[#result+1] = formatters[""](comment) + end + elseif linedone then + result[#result+1] = spaces[depth] + if show_comment and comment then + result[#result+1] = formatters[""](comment) + end + else + result[#result+1] = formatters["\n%w"](depth) -- can introduced extra line in mixed+mixed (filtered later on) + linedone = false + if show_comment and comment then + result[#result+1] = formatters["\n%w"](comment,depth) + end + end + end + if skip == "comment" then + if show_comment then + result[#result+1] = formatters[""](namespaced[element]) + end + elseif skip then + -- ignore + else + result[#result+1] = formatters["<%s"](namespaced[element]) + if detail then + result[#result+1] = formatters[" detail=%q"](detail) + end + if indexing and n then + result[#result+1] = formatters[" n=%q"](n) + end + local extra = extras[element] + if extra then + extra(result,element,detail,n,fulltag,di) + end + local u = userdata[fulltag] + if u then + for k, v in next, u do + result[#result+1] = formatters[" %s=%q"](k,v) + end + end + local a = di.attributes + if a then + for k, v in next, a do + result[#result+1] = formatters[" %s=%q"](k,v) + end + end + result[#result+1] = ">" + end + if inlinedepth > 0 then + elseif nature == "display" then + result[#result+1] = "\n" + linedone = true + end + used[element][detail or ""] = nature -- for template css + local metadata = tagmetadata[fulltag] + if metadata then + if not linedone then + result[#result+1] = "\n" + linedone = true + end + result[#result+1] = formatters["%w\n"](depth) + for k, v in table.sortedpairs(metadata) do + v = entityremapper(v) + result[#result+1] = formatters["%w%s\n"](depth+1,k,v) + end + result[#result+1] = formatters["%w\n"](depth) + end +end + +local function endtag(result,element,nature,depth,skip) + if nature == "display" then + if inlinedepth == 0 then + if not linedone then + result[#result+1] = "\n" + end + if skip == "comment" then + if show_comment then + result[#result+1] = formatters["%w\n"](depth,namespaced[element]) + end + elseif skip then + -- ignore + else + result[#result+1] = formatters["%w\n"](depth,namespaced[element]) + end + linedone = true + else + if skip == "comment" then + if show_comment then + result[#result+1] = formatters[""](namespaced[element]) + end + elseif skip then + -- ignore + else + result[#result+1] = formatters[""](namespaced[element]) + end + end + else + inlinedepth = inlinedepth - 1 + if skip == "comment" then + if show_comment then + result[#result+1] = formatters[""](namespaced[element]) + end + elseif skip then + -- ignore + else + result[#result+1] = formatters[""](namespaced[element]) + end + linedone = false + end +end + +local function flushtree(result,data,nature,depth) + depth = depth + 1 + local nofdata = #data + for i=1,nofdata do + local di = data[i] + if not di then -- hm, di can be string + -- whatever + elseif di.content then + -- already has breaks + local content = entityremapper(di.content) + if i == nofdata and sub(content,-1) == "\n" then -- move check + -- can be an end of line in par but can also be the last line + if trace_spacing then + result[#result+1] = formatters["%s"](di.parnumber or 0,sub(content,1,-2)) + else + result[#result+1] = sub(content,1,-2) + end + result[#result+1] = " " + else + if trace_spacing then + result[#result+1] = formatters["%s"](di.parnumber or 0,content) + else + result[#result+1] = content + end + end + linedone = false + elseif not di.collapsed then -- ignore collapsed data (is appended, reconstructed par) + local element = di.element + if not element then + -- skip + elseif element == "break" then -- or element == "pagebreak" + emptytag(result,element,nature,depth,di) + elseif element == "" or di.skip == "ignore" then + -- skip + else + if di.before then + flushtree(result,di.before,nature,depth) + end + local natu = di.nature + local skip = di.skip + if di.breaknode then + emptytag(result,"break","display",depth,di) + end + begintag(result,element,natu,depth,di,skip) + flushtree(result,di.data,natu,depth) + -- if sub(result[#result],-1) == " " and natu ~= "inline" then + -- result[#result] = sub(result[#result],1,-2) + -- end + endtag(result,element,natu,depth,skip) + if di.after then + flushtree(result,di.after,nature,depth) + end + end + end + end +end + +local function breaktree(tree,parent,parentelement) -- also removes double breaks + local data = tree.data + if data then + local nofdata = #data + local prevelement + local prevnature + local prevparnumber + local newdata = { } + local nofnewdata = 0 + for i=1,nofdata do + local di = data[i] + if not di then + -- skip + elseif di.content then + local parnumber = di.parnumber + if prevnature == "inline" and prevparnumber and prevparnumber ~= parnumber then + nofnewdata = nofnewdata + 1 + if trace_spacing then + newdata[nofnewdata] = makebreaknode { type = "a", p = prevparnumber, n = parnumber } + else + newdata[nofnewdata] = makebreaknode() + end + end + prevelement = nil + prevnature = "inline" + prevparnumber = parnumber + nofnewdata = nofnewdata + 1 + newdata[nofnewdata] = di + elseif not di.collapsed then + local element = di.element + if element == "break" then -- or element == "pagebreak" + if prevelement == "break" then + di.element = "" + end + prevelement = element + prevnature = "display" + elseif element == "" or di.skip == "ignore" then + -- skip + else + local nature = di.nature + local parnumber = di.parnumber + if prevnature == "inline" and nature == "inline" and prevparnumber and prevparnumber ~= parnumber then + nofnewdata = nofnewdata + 1 + if trace_spacing then + newdata[nofnewdata] = makebreaknode { type = "b", p = prevparnumber, n = parnumber } + else + newdata[nofnewdata] = makebreaknode() + end + end + prevnature = nature + prevparnumber = parnumber + prevelement = element + breaktree(di,tree,element) + end + nofnewdata = nofnewdata + 1 + newdata[nofnewdata] = di + else + local nature = di.nature + local parnumber = di.parnumber + if prevnature == "inline" and nature == "inline" and prevparnumber and prevparnumber ~= parnumber then + nofnewdata = nofnewdata + 1 + if trace_spacing then + newdata[nofnewdata] = makebreaknode { type = "c", p = prevparnumber, n = parnumber } + else + newdata[nofnewdata] = makebreaknode() + end + end + prevnature = nature + prevparnumber = parnumber + nofnewdata = nofnewdata + 1 + newdata[nofnewdata] = di + end + end + tree.data = newdata + end +end + +-- also tabulaterow reconstruction .. maybe better as a checker +-- i.e cell attribute + +local function collapsetree() + for tag, trees in next, treehash do + local d = trees[1].data + if d then + local nd = #d + if nd > 0 then + for i=2,#trees do + local currenttree = trees[i] + local currentdata = currenttree.data + local currentpar = currenttree.parnumber + local previouspar = trees[i-1].parnumber + currenttree.collapsed = true + -- is the next ok? + if previouspar == 0 or not (di and di.content) then + previouspar = nil -- no need anyway so no further testing needed + end + for j=1,#currentdata do + local cd = currentdata[j] + if not cd or cd == "" then + -- skip + elseif cd.content then + if not currentpar then + -- add space ? + elseif not previouspar then + -- add space ? + elseif currentpar ~= previouspar then + nd = nd + 1 + if trace_spacing then + d[nd] = makebreaknode { type = "d", p = previouspar, n = currentpar } + else + d[nd] = makebreaknode() + end + end + previouspar = currentpar + nd = nd + 1 + d[nd] = cd + else + nd = nd + 1 + d[nd] = cd + end + currentdata[j] = false + end + end + end + end + end +end + +local function finalizetree(tree) + for _, finalizer in next, finalizers do + finalizer(tree) + end +end + +local function indextree(tree) + local data = tree.data + if data then + local n, new = 0, { } + for i=1,#data do + local d = data[i] + if not d then + -- skip + elseif d.content then + n = n + 1 + new[n] = d + elseif not d.collapsed then + n = n + 1 + d.__i__ = n + d.__p__ = tree + indextree(d) + new[n] = d + end + end + tree.data = new + end +end + +local function checktree(tree) + local data = tree.data + if data then + for i=1,#data do + local d = data[i] + if type(d) == "table" then + local check = checks[d.tg] + if check then + check(d) + end + checktree(d) + end + end + end +end + +-- collector code + +local function push(fulltag,depth) + local tag, n = lpegmatch(dashsplitter,fulltag) + local tg, detail = lpegmatch(colonsplitter,tag) + local element, nature + if detail then + local pd = properties[tag] + local pt = properties[tg] + element = pd and pd.export or pt and pt.export or tg + nature = pd and pd.nature or pt and pt.nature or defaultnature + else + local p = properties[tg] + element = p and p.export or tg + nature = p and p.nature or "inline" + end + local treedata = tree.data + local t = { + tg = tg, + fulltag = fulltag, + detail = detail, + n = tonumber(n), -- more efficient + element = element, + nature = nature, + data = { }, + attribute = currentattribute, + parnumber = currentparagraph, + } + treedata[#treedata+1] = t + currentdepth = currentdepth + 1 + nesting[currentdepth] = fulltag + treestack[currentdepth] = tree + if trace_export then + if detail and detail ~= "" then + report_export("%w<%s trigger=%a paragraph=%a index=%a detail=%a>",currentdepth-1,fulltag,currentattribute or 0,currentparagraph or 0,#treedata,detail) + else + report_export("%w<%s trigger=%a paragraph=%a index=%a>",currentdepth-1,fulltag,currentattribute or 0,currentparagraph or 0,#treedata) + end + end + tree = t + if tg == "break" then + -- no need for this + else + local h = treehash[fulltag] + if h then + h[#h+1] = t + else + treehash[fulltag] = { t } + end + end +end + +local function pop() + local top = nesting[currentdepth] + tree = treestack[currentdepth] + currentdepth = currentdepth - 1 + if trace_export then + if top then + report_export("%w",currentdepth,top) + else + report_export("",top) + end + end +end + +local function continueexport() + if nofcurrentcontent > 0 then + if trace_export then + report_export("%w",currentdepth) + end + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = " " -- pagebreak + end +end + +local function pushentry(current) + if current then + if restart then + continueexport() + restart = false + end + local newdepth = #current + local olddepth = currentdepth + if trace_export then + report_export("%w",currentdepth,olddepth,newdepth,current[newdepth]) + end + if olddepth <= 0 then + for i=1,newdepth do + push(current[i],i) + end + else + local difference + if olddepth < newdepth then + for i=1,olddepth do + if current[i] ~= nesting[i] then + difference = i + break + end + end + else + for i=1,newdepth do + if current[i] ~= nesting[i] then + difference = i + break + end + end + end + if difference then + for i=olddepth,difference,-1 do + pop() + end + for i=difference,newdepth do + push(current[i],i) + end + elseif newdepth > olddepth then + for i=olddepth+1,newdepth do + push(current[i],i) + end + elseif newdepth < olddepth then + for i=olddepth,newdepth,-1 do + pop() + end + elseif trace_export then + report_export("%w",currentdepth,newdepth,nesting[newdepth] or "?") + end + end + return olddepth, newdepth + end +end + +local function pushcontent(currentparagraph,newparagraph) + if nofcurrentcontent > 0 then + if currentparagraph then + if currentcontent[nofcurrentcontent] == "\n" then + if trace_export then + report_export("%w",currentdepth) + end + nofcurrentcontent = nofcurrentcontent - 1 + end + end + local content = concat(currentcontent,"",1,nofcurrentcontent) + if content == "" then + -- omit; when currentparagraph we could push, remove spaces, pop + elseif somespace[content] and currentparagraph then + -- omit; when currentparagraph we could push, remove spaces, pop + else + local olddepth, newdepth + local list = taglist[currentattribute] + if list then + olddepth, newdepth = pushentry(list) + end + local td = tree.data + local nd = #td + td[nd+1] = { parnumber = currentparagraph, content = content } + if trace_export then + report_export("%w",currentdepth,#content) + report_export("%w%s",currentdepth,(gsub(content,"\n","\\n"))) + report_export("%w",currentdepth) + end + if olddepth then + for i=newdepth-1,olddepth,-1 do + pop() + end + end + end + nofcurrentcontent = 0 + end + if currentparagraph then + pushentry(makebreaklist(currentnesting)) + if trace_export then + report_export("%w",currentdepth,currentparagraph,newparagraph) + end + end +end + +local function finishexport() + if trace_export then + report_export("%w",currentdepth) + end + if nofcurrentcontent > 0 then + if somespace[currentcontent[nofcurrentcontent]] then + if trace_export then + report_export("%w",currentdepth) + end + nofcurrentcontent = nofcurrentcontent - 1 + end + pushcontent() + end + for i=currentdepth,1,-1 do + pop() + end + currentcontent = { } -- we're nice and do a cleanup + if trace_export then + report_export("%w",currentdepth) + end +end + +-- whatsit_code localpar_code + +local function collectresults(head,list) -- is last used (we also have currentattribute) + local p + for n in traverse_nodes(head) do + local id = n.id -- 14: image, 8: literal (mp) + if id == glyph_code then + local at = n[a_tagged] + if not at then + -- we need to tag the pagebody stuff as being valid skippable + -- + -- report_export("skipping character: %C (no attribute)",n.char) + else + -- we could add tonunicodes for ligatures (todo) + local components = n.components + if components then -- we loose data + collectresults(components,nil) + else + local c = n.char + if last ~= at then + local tl = taglist[at] + pushcontent() + currentnesting = tl + currentparagraph = n[a_taggedpar] + currentattribute = at + last = at + pushentry(currentnesting) + if trace_export then + report_export("%w",currentdepth,c,at) + end + -- We need to intercept this here; maybe I will also move this + -- to a regular setter at the tex end. + local r = n[a_reference] + if r then + referencehash[tl[#tl]] = r -- fulltag + end + -- + elseif last then + local ap = n[a_taggedpar] + if ap ~= currentparagraph then + pushcontent(currentparagraph,ap) + pushentry(currentnesting) + currentattribute = last + currentparagraph = ap + end + if trace_export then + report_export("%w",currentdepth,c,last) + end + else + if trace_export then + report_export("%w",currentdepth,c,at) + end + end + local s = n[a_exportstatus] + if s then + c = s + end + if c == 0 then + if trace_export then + report_export("%w",currentdepth) + end + elseif c == 0x20 then + local a = n[a_characters] + nofcurrentcontent = nofcurrentcontent + 1 + if a then + if trace_export then + report_export("%w",currentdepth,a) + end + currentcontent[nofcurrentcontent] = specialspaces[a] -- special space + else + currentcontent[nofcurrentcontent] = " " + end + else + local fc = fontchar[n.font] + if fc then + fc = fc and fc[c] + if fc then + local u = fc.tounicode + if u and u ~= "" then + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = utfchar(fromunicode16(u)) + else + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = utfchar(c) + end + else -- weird, happens in hz (we really need to get rid of the pseudo fonts) + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = utfchar(c) + end + else + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = utfchar(c) + end + end + end + end + elseif id == disc_code then -- probably too late + if keephyphens then + local pre = n.pre + if pre and not pre.next and pre.id == glyph_code and pre.char == hyphencode then + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = hyphen + end + end + collectresults(n.replace,nil) + elseif id == glue_code then + -- we need to distinguish between hskips and vskips + local ca = n[a_characters] + if ca == 0 then + -- skip this one ... already converted special character (node-acc) + elseif ca then + local a = n[a_tagged] + if a then + local c = specialspaces[ca] + if last ~= a then + local tl = taglist[a] + if trace_export then + report_export("%w",currentdepth,ca,a) + end + pushcontent() + currentnesting = tl + currentparagraph = n[a_taggedpar] + currentattribute = a + last = a + pushentry(currentnesting) + -- no reference check (see above) + elseif last then + local ap = n[a_taggedpar] + if ap ~= currentparagraph then + pushcontent(currentparagraph,ap) + pushentry(currentnesting) + currentattribute = last + currentparagraph = ap + end + if trace_export then + report_export("%w",currentdepth,ca,last) + end + end + -- if somespace[currentcontent[nofcurrentcontent]] then + -- if trace_export then + -- report_export("%w",currentdepth) + -- end + -- nofcurrentcontent = nofcurrentcontent - 1 + -- end + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = c + end + else + local subtype = n.subtype + if subtype == userskip_code then + if n.spec.width > threshold then + if last and not somespace[currentcontent[nofcurrentcontent]] then + local a = n[a_tagged] + if a == last then + if trace_export then + report_export("%w",currentdepth) + end + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = " " + elseif a then + -- e.g LOGOLOGO + if trace_export then + report_export("%w",currentdepth,last,a) + end + pushcontent() + if trace_export then + report_export("%w",currentdepth) + end + last = a + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = " " + currentnesting = taglist[last] + pushentry(currentnesting) + currentattribute = last + end + end + end + elseif subtype == spaceskip_code or subtype == xspaceskip_code then + if not somespace[currentcontent[nofcurrentcontent]] then + local a = n[a_tagged] + if a == last then + if trace_export then + report_export("%w",currentdepth) + end + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = " " + else + if trace_export then + report_export("%w",currentdepth) + end + last = a + pushcontent() + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = " " + currentnesting = taglist[last] + pushentry(currentnesting) + currentattribute = last + end + end + elseif subtype == rightskip_code then + -- a line + if nofcurrentcontent > 0 then + local r = currentcontent[nofcurrentcontent] + if r == hyphen then + if not keephyphens then + nofcurrentcontent = nofcurrentcontent - 1 + end + elseif not somespace[r] then + local a = n[a_tagged] + if a == last then + if trace_export then + report_export("%w",currentdepth) + end + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = " " + else + if trace_export then + report_export("%w",currentdepth) + end + last = a + pushcontent() + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = " " + currentnesting = taglist[last] + pushentry(currentnesting) + currentattribute = last + end + end + end + elseif subtype == parfillskip_code then + -- deal with paragaph endings (crossings) elsewhere and we quit here + -- as we don't want the rightskip space addition + return + end + end + elseif id == hlist_code or id == vlist_code then + local ai = n[a_image] + if ai then + local at = n[a_tagged] + if nofcurrentcontent > 0 then + pushcontent() + pushentry(currentnesting) -- ?? + end + pushentry(taglist[at]) -- has an index, todo: flag empty element + if trace_export then + report_export("%w",currentdepth,kern) + end + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = " " + end + elseif a then + -- e.g LOGOLOGO + if trace_export then + report_export("%w",currentdepth,limit,last,a) + end + last = a + pushcontent() + if trace_export then + report_export("%w",currentdepth,kern) + end + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = " " + currentnesting = taglist[last] + pushentry(currentnesting) + currentattribute = last + end + end + end + end + end + p = n + end +end + +function nodes.handlers.export(head) -- hooks into the page builder + starttiming(treehash) + if trace_export then + report_export("%w",currentdepth) + end + -- continueexport() + restart = true + collectresults(head) + if trace_export then + report_export("%w",currentdepth) + end + stoptiming(treehash) + return head, true +end + +function builders.paragraphs.tag(head) + noftextblocks = noftextblocks + 1 + for n in traverse_id(hlist_code,head) do + local subtype = n.subtype + if subtype == line_code then + n[a_textblock] = noftextblocks + elseif subtype == glue_code or subtype == kern_code then + n[a_textblock] = 0 + end + end + return false +end + +-- encoding='utf-8' + +local xmlpreamble = [[ + + + + + + + +]] + +local function wholepreamble() + return format(xmlpreamble,tex.jobname,os.date(),environment.version,exportversion) +end + + +local csspreamble = [[ + +]] + +local function allusedstylesheets(xmlfile,cssfiles,files) + local result = { } + for i=1,#cssfiles do + local cssfile = cssfiles[i] + if type(cssfile) ~= "string" or cssfile == variables.yes or cssfile == "" or cssfile == xmlfile then + cssfile = file.replacesuffix(xmlfile,"css") + else + cssfile = file.addsuffix(cssfile,"css") + end + files[#files+1] = cssfile + report_export("adding css reference '%s'",cssfile) + result[#result+1] = format(csspreamble,cssfile) + end + return concat(result) +end + +local e_template = [[ +%s { + display: %s ; +}]] + +local d_template = [[ +%s[detail=%s] { + display: %s ; +}]] + +local displaymapping = { + inline = "inline", + display = "block", + mixed = "inline", +} + +local function allusedelements(xmlfile) + local result = { format("/* template for file %s */",xmlfile) } + for element, details in sortedhash(used) do + result[#result+1] = format("/* category: %s */",element) + for detail, nature in sortedhash(details) do + local d = displaymapping[nature or "display"] or "block" + if detail == "" then + result[#result+1] = formatters[e_template](element,d) + else + result[#result+1] = formatters[d_template](element,detail,d) + end + end + end + return concat(result,"\n\n") +end + +local function allcontent(tree) + local result = { } + flushtree(result,tree.data,"display",0) -- we need to collect images + result = concat(result) + result = gsub(result,"\n *\n","\n") + result = gsub(result,"\n +([^< ])","\n%1") + return result +end + +-- local xhtmlpreamble = [[ +-- +-- ]] + +local function cleanxhtmltree(xmltree) + if xmltree then + local xmlwrap = xml.wrap + for e in xml.collected(xmltree,"/document") do + e.at["xmlns:xhtml"] = "http://www.w3.org/1999/xhtml" + break + end + -- todo: inject xhtmlpreamble (xmlns should have be enough) + local wrapper = { tg = "a", ns = "xhtml", at = { href = "unknown" } } + for e in xml.collected(xmltree,"link") do + local at = e.at + local href + if at.location then + href = "#" .. gsub(at.location,":","_") + elseif at.url then + href = at.url + elseif at.file then + href = at.file + end + if href then + wrapper.at.href = href + xmlwrap(e,wrapper) + end + end + local wrapper = { tg = "a", ns = "xhtml", at = { name = "unknown" } } + for e in xml.collected(xmltree,"!link[@location]") do + local location = e.at.location + if location then + wrapper.at.name = gsub(location,":","_") + xmlwrap(e,wrapper) + end + end + return xmltree + else + return xml.convert("\ninvalid xhtml tree") + end +end + +local cssfile, xhtmlfile = nil, nil + +directives.register("backend.export.css", function(v) cssfile = v end) +directives.register("backend.export.xhtml",function(v) xhtmlfile = v end) + +local function stopexport(v) + starttiming(treehash) + -- + finishexport() + -- + collapsetree(tree) + indextree(tree) + checktree(tree) + breaktree(tree) + finalizetree(tree) + -- + hashlistdata() + -- + if type(v) ~= "string" or v == variables.yes or v == "" then + v = tex.jobname + end + local basename = file.basename(v) + local xmlfile = file.addsuffix(basename,"export") + -- + local imagefilename = file.addsuffix(file.removesuffix(xmlfile) .. "-images","css") + local stylefilename = file.addsuffix(file.removesuffix(xmlfile) .. "-styles","css") + local templatefilename = file.replacesuffix(xmlfile,"template") + local specificationfilename = file.replacesuffix(xmlfile,"specification") + -- + if xhtml and not cssfile then + cssfile = true + end + local cssfiles = { } + if cssfile then + if cssfile == true then + cssfiles = { "export-example.css" } + else + cssfiles = settings_to_array(cssfile or "") + end + insert(cssfiles,1,imagefilename) + insert(cssfiles,1,stylefilename) + end + cssfiles = table.unique(cssfiles) + -- + local result = allcontent(tree) -- also does some housekeeping and data collecting + -- + local files = { + } + local results = concat { + wholepreamble(), + allusedstylesheets(xmlfile,cssfiles,files), -- ads to files + result, + } + -- + files = table.unique(files) + -- + report_export("saving xml data in %a",xmlfile) + io.savedata(xmlfile,results) + -- + report_export("saving css image definitions in %a",imagefilename) + io.savedata(imagefilename,allusedimages(xmlfile)) + -- + report_export("saving css style definitions in %a",stylefilename) + io.savedata(stylefilename,allusedstyles(xmlfile)) + -- + report_export("saving css template in %a",templatefilename) + io.savedata(templatefilename,allusedelements(xmlfile)) + -- + if xhtmlfile then + if type(v) ~= "string" or xhtmlfile == true or xhtmlfile == variables.yes or xhtmlfile == "" or xhtmlfile == xmlfile then + xhtmlfile = file.replacesuffix(xmlfile,"xhtml") + else + xhtmlfile = file.addsuffix(xhtmlfile,"xhtml") + end + files[#files+1] = xhtmlfile + report_export("saving xhtml variant in %a",xhtmlfile) + local xmltree = cleanxhtmltree(xml.convert(results)) + xml.save(xmltree,xhtmlfile) + -- looking at identity is somewhat redundant as we also inherit from interaction + -- at the tex end + local identity = interactions.general.getidentity() + local specification = { + name = file.removesuffix(v), + identifier = os.uuid(), + images = uniqueusedimages(), + root = xhtmlfile, + files = files, + language = languagenames[tex.count.mainlanguagenumber], + title = validstring(finetuning.title) or validstring(identity.title), + subtitle = validstring(finetuning.subtitle) or validstring(identity.subtitle), + author = validstring(finetuning.author) or validstring(identity.author), + firstpage = validstring(finetuning.firstpage), + lastpage = validstring(finetuning.lastpage), + } + report_export("saving specification in %a (mtxrun --script epub --make %s)",specificationfilename,specificationfilename) + io.savedata(specificationfilename,table.serialize(specification,true)) + end + stoptiming(treehash) +end + +local appendaction = nodes.tasks.appendaction +local enableaction = nodes.tasks.enableaction + +function commands.setupexport(t) + table.merge(finetuning,t) + keephyphens = finetuning.hyphen == variables.yes +end + +local function startexport(v) + if v and not exporting then + report_export("enabling export to xml") +-- not yet known in task-ini + appendaction("shipouts","normalizers", "nodes.handlers.export") +-- enableaction("shipouts","nodes.handlers.export") + enableaction("shipouts","nodes.handlers.accessibility") + enableaction("math", "noads.handlers.tags") +--~ appendaction("finalizers","lists","builders.paragraphs.tag") +--~ enableaction("finalizers","builders.paragraphs.tag") + luatex.registerstopactions(function() stopexport(v) end) + exporting = true + end +end + +directives.register("backend.export",startexport) -- maybe .name + +statistics.register("xml exporting time", function() + if exporting then + return format("%s seconds, version %s", statistics.elapsedtime(treehash),exportversion) + end +end) + +-- These are called at the tex end: + +commands.settagitemgroup = structurestags.setitemgroup +commands.settagsynonym = structurestags.setsynonym +commands.settagsorting = structurestags.setsorting +commands.settagdescription = structurestags.setdescription +commands.settagdescriptionsymbol = structurestags.setdescriptionsymbol +commands.settaghighlight = structurestags.sethighlight +commands.settagfigure = structurestags.setfigure +commands.settagcombination = structurestags.setcombination +commands.settagtablecell = structurestags.settablecell +commands.settagtabulatecell = structurestags.settabulatecell diff --git a/tex/context/base/back-ini.lua b/tex/context/base/back-ini.lua index bdd931abd..1568e3564 100644 --- a/tex/context/base/back-ini.lua +++ b/tex/context/base/back-ini.lua @@ -1,106 +1,106 @@ -if not modules then modules = { } end modules ['back-ini'] = { - version = 1.001, - comment = "companion to back-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local next, type = next, type -local format = string.format -local sind, cosd = math.sind, math.cosd - -backends = backends or { } -local backends = backends - -local trace_backend = false trackers.register("backend.initializers", function(v) trace_finalizers = v end) - -local report_backend = logs.reporter("backend","initializing") - -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex - -local function nothing() return nil end - -backends.nothing = nothing - -local nodeinjections = { } -local codeinjections = { } -local registrations = { } -local tables = allocate() - -local function donothing(t,k) - t[k] = nothing - return nothing -end - -setmetatableindex(nodeinjections, donothing) -setmetatableindex(codeinjections, donothing) -setmetatableindex(registrations, donothing) - -local defaults = { - nodeinjections = nodeinjections, - codeinjections = codeinjections, - registrations = registrations, - tables = tables, -} - -backends.defaults = defaults - -backends.nodeinjections = { } setmetatableindex(backends.nodeinjections, nodeinjections) -backends.codeinjections = { } setmetatableindex(backends.codeinjections, codeinjections) -backends.registrations = { } setmetatableindex(backends.registrations, registrations) -backends.tables = { } setmetatableindex(backends.tables, tables) - -backends.current = "unknown" - -function backends.install(what) - if type(what) == "string" then - local backend = backends[what] - if backend then - if trace_backend then - if backend.comment then - report_backend("initializing backend %a, %a",what,backend.comment) - else - report_backend("initializing backend %a",what) - end - end - backends.current = what - for category, default in next, defaults do - local target, plugin = backends[category], backend[category] - setmetatableindex(plugin, default) - setmetatableindex(target, plugin) - end - elseif trace_backend then - report_backend("no backend named %a",what) - end - end -end - -statistics.register("used backend", function() - local bc = backends.current - if bc ~= "unknown" then - return format("%s (%s)",bc,backends[bc].comment or "no comment") - else - return nil - end -end) - -local comment = { "comment", "" } - -tables.vfspecials = allocate { - red = comment, - green = comment, - blue = comment, - black = comment, - startslant = comment, - stopslant = comment, -} - --- experimental code -- - -function commands.pdfrotation(a) -- somewhat weird here - local s, c = sind(a), cosd(a) - context("%0.6f %0.6f %0.6f %0.6f",c,s,-s,c) -end - +if not modules then modules = { } end modules ['back-ini'] = { + version = 1.001, + comment = "companion to back-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local next, type = next, type +local format = string.format +local sind, cosd = math.sind, math.cosd + +backends = backends or { } +local backends = backends + +local trace_backend = false trackers.register("backend.initializers", function(v) trace_finalizers = v end) + +local report_backend = logs.reporter("backend","initializing") + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex + +local function nothing() return nil end + +backends.nothing = nothing + +local nodeinjections = { } +local codeinjections = { } +local registrations = { } +local tables = allocate() + +local function donothing(t,k) + t[k] = nothing + return nothing +end + +setmetatableindex(nodeinjections, donothing) +setmetatableindex(codeinjections, donothing) +setmetatableindex(registrations, donothing) + +local defaults = { + nodeinjections = nodeinjections, + codeinjections = codeinjections, + registrations = registrations, + tables = tables, +} + +backends.defaults = defaults + +backends.nodeinjections = { } setmetatableindex(backends.nodeinjections, nodeinjections) +backends.codeinjections = { } setmetatableindex(backends.codeinjections, codeinjections) +backends.registrations = { } setmetatableindex(backends.registrations, registrations) +backends.tables = { } setmetatableindex(backends.tables, tables) + +backends.current = "unknown" + +function backends.install(what) + if type(what) == "string" then + local backend = backends[what] + if backend then + if trace_backend then + if backend.comment then + report_backend("initializing backend %a, %a",what,backend.comment) + else + report_backend("initializing backend %a",what) + end + end + backends.current = what + for category, default in next, defaults do + local target, plugin = backends[category], backend[category] + setmetatableindex(plugin, default) + setmetatableindex(target, plugin) + end + elseif trace_backend then + report_backend("no backend named %a",what) + end + end +end + +statistics.register("used backend", function() + local bc = backends.current + if bc ~= "unknown" then + return format("%s (%s)",bc,backends[bc].comment or "no comment") + else + return nil + end +end) + +local comment = { "comment", "" } + +tables.vfspecials = allocate { + red = comment, + green = comment, + blue = comment, + black = comment, + startslant = comment, + stopslant = comment, +} + +-- experimental code -- + +function commands.pdfrotation(a) -- somewhat weird here + local s, c = sind(a), cosd(a) + context("%0.6f %0.6f %0.6f %0.6f",c,s,-s,c) +end + diff --git a/tex/context/base/bibl-bib.lua b/tex/context/base/bibl-bib.lua index c86a0c0c2..ab38a0b28 100644 --- a/tex/context/base/bibl-bib.lua +++ b/tex/context/base/bibl-bib.lua @@ -1,766 +1,766 @@ -if not modules then modules = { } end modules ['bibl-bib'] = { - version = 1.001, - comment = "this module is the basis for the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

This is a prelude to integrated bibliography support. This file just loads -bibtex files and converts them to xml so that the we access the content -in a convenient way. Actually handling the data takes place elsewhere.

---ldx]]-- - -local lower, format, gsub, concat = string.lower, string.format, string.gsub, table.concat -local next = next -local utfchar = utf.char -local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns -local textoutf = characters and characters.tex.toutf -local variables = interfaces and interfaces.variables -local settings_to_hash = utilities.parsers.settings_to_hash -local finalizers = xml.finalizers.tex -local xmlfilter, xmltext, getid = xml.filter, xml.text, lxml.getid -local formatters = string.formatters - -local P, R, S, C, Cc, Cs, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct - -local trace_bibxml = false trackers.register("publications.bibxml", function(v) trace_bibtex = v end) - -local report_xml = logs.reporter("publications","xml") - -bibtex = bibtex or { } -local bibtex = bibtex - -bibtex.statistics = bibtex.statistics or { } -local bibtexstats = bibtex.statistics - -bibtexstats.nofbytes = 0 -bibtexstats.nofdefinitions = 0 -bibtexstats.nofshortcuts = 0 - -local defaultshortcuts = { - jan = "1", - feb = "2", - mar = "3", - apr = "4", - may = "5", - jun = "6", - jul = "7", - aug = "8", - sep = "9", - oct = "10", - nov = "11", - dec = "12", -} - -local shortcuts = { } -local data = { } -local entries - --- Currently we expand shortcuts and for large ones (like the acknowledgements --- in tugboat.bib this is not that efficient. However, eventually strings get --- hashed again. - -local function do_shortcut(tag,key,value) - bibtexstats.nofshortcuts = bibtexstats.nofshortcuts + 1 - if lower(tag) == "@string" then - shortcuts[key] = value - end -end - -local function do_definition(tag,key,tab) -- maybe check entries here (saves memory) - if not entries or entries[key] then - bibtexstats.nofdefinitions = bibtexstats.nofdefinitions + 1 - local t = { } - for i=1,#tab,2 do - t[tab[i]] = tab[i+1] - end - local p = data[tag] - if not p then - data[tag] = { [key] = t } - else - p[key] = t - end - end -end - -local function resolve(s) - return shortcuts[s] or defaultshortcuts[s] or s -- can be number -end - -local percent = P("%") -local start = P("@") -local comma = P(",") -local hash = P("#") -local escape = P("\\") -local single = P("'") -local double = P('"') -local left = P('{') -local right = P('}') -local both = left + right -local lineending = S("\n\r") -local space = S(" \t\n\r\f") -local spacing = space^0 -local equal = P("=") -local collapsed = (space^1)/ " " - -local function add(a,b) if b then return a..b else return a end end - -local keyword = C((R("az","AZ","09") + S("@_:-"))^1) -- C((1-space)^1) -local s_quoted = ((escape*single) + collapsed + (1-single))^0 -local d_quoted = ((escape*double) + collapsed + (1-double))^0 -local balanced = lpegpatterns.balanced - -local s_value = (single/"") * s_quoted * (single/"") -local d_value = (double/"") * d_quoted * (double/"") -local b_value = (left /"") * balanced * (right /"") -local r_value = keyword/resolve - -local somevalue = s_value + d_value + b_value + r_value -local value = Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0)) - -local assignment = spacing * keyword * spacing * equal * spacing * value * spacing -local shortcut = keyword * spacing * left * spacing * (assignment * comma^0)^0 * spacing * right -local definition = keyword * spacing * left * spacing * keyword * comma * Ct((assignment * comma^0)^0) * spacing * right -local comment = keyword * spacing * left * (1-right)^0 * spacing * right -local forget = percent^1 * (1-lineending)^0 - --- todo \% - -local grammar = (space + forget + shortcut/do_shortcut + definition/do_definition + comment + 1)^0 - -function bibtex.convert(session,content) - statistics.starttiming(bibtex) - data, shortcuts, entries = session.data, session.shortcuts, session.entries - bibtexstats.nofbytes = bibtexstats.nofbytes + #content - session.nofbytes = session.nofbytes + #content - lpegmatch(grammar,content or "") - statistics.stoptiming(bibtex) -end - -function bibtex.load(session,filename) - local filename = resolvers.findfile(filename,"bib") - if filename ~= "" then - local data = io.loaddata(filename) or "" - if data == "" then - report_xml("empty file %a, no conversion to xml",filename) - elseif trace_bibxml then - report_xml("converting file %a to xml",filename) - end - bibtex.convert(session,data) - end -end - -function bibtex.new() - return { - data = { }, - shortcuts = { }, - xml = xml.convert("\n"), - nofbytes = 0, - entries = nil, - loaded = false, - } -end - -local p_escaped = lpegpatterns.xml.escaped - -local ihatethis = { - f = "\\f", - n = "\\n", - r = "\\r", - s = "\\s", - t = "\\t", - v = "\\v", - z = "\\z", -} - -local command = P("\\")/"" * Cc("\\bibtexcommand{") * (R("az","AZ")^1) * Cc("}") -local any = P(1) -local done = P(-1) -local one_l = P("{") / "" -local one_r = P("}") / "" -local two_l = P("{{") / "" -local two_r = P("}}") / "" - -local filter = Cs( - two_l * (command + any - two_r - done)^0 * two_r * done + - one_l * (command + any - one_r - done)^0 * one_r * done + - (command + any )^0 -) - -function bibtex.toxml(session,options) - if session.loaded then - return - else - session.loaded = true - end - -- we can always speed this up if needed - -- format slows down things a bit but who cares - statistics.starttiming(bibtex) - local result, r = { }, 0 - local options = settings_to_hash(options) - local convert = options.convert -- todo: interface - local strip = options.strip -- todo: interface - local entries = session.entries - r = r + 1 ; result[r] = "" - r = r + 1 ; result[r] = "" - for id, categories in next, session.data do - id = lower(gsub(id,"^@","")) - for name, entry in next, categories do - if not entries or entries[name] then - r = r + 1 ; result[r] = formatters[""](lower(name),id) - for key, value in next, entry do - value = gsub(value,"\\(.)",ihatethis) -- this really needs checking - value = lpegmatch(p_escaped,value) - if value ~= "" then - if convert then - value = textoutf(value,true) - end - if strip then - -- as there is no proper namespace in bibtex we need this - -- kind of hackery ... bibtex databases are quite unportable - value = lpegmatch(filter,value) or value - end - r = r + 1 ; result[r] = formatters[" %s"](key,value) - end - end - r = r + 1 ; result[r] = "" - end - end - end - r = r + 1 ; result[r] = "" - result = concat(result,"\n") - -- alternatively we could use lxml.convert - session.xml = xml.convert(result, { - resolve_entities = true, - resolve_predefined_entities = true, -- in case we have escaped entities - -- unify_predefined_entities = true, -- & -> & - utfize_entities = true, - } ) - session.data = nil - session.shortcuts = nil - statistics.stoptiming(bibtex) -end - -statistics.register("bibtex load time", function() - local nofbytes = bibtexstats.nofbytes - if nofbytes > 0 then - return format("%s seconds (%s bytes, %s definitions, %s shortcuts)", - statistics.elapsedtime(bibtex),nofbytes,bibtexstats.nofdefinitions,bibtexstats.nofshortcuts) - else - return nil - end -end) - ---~ str = [[ ---~ @COMMENT { CRAP } ---~ @STRING{ hans = "h a n s" } ---~ @STRING{ taco = "t a c o" } ---~ @SOMETHING{ key1, abc = "t a c o" , def = "h a n s" } ---~ @SOMETHING{ key2, abc = hans # taco } ---~ @SOMETHING{ key3, abc = "hans" # taco } ---~ @SOMETHING{ key4, abc = hans # "taco" } ---~ @SOMETHING{ key5, abc = hans # taco # "hans" # "taco"} ---~ @SOMETHING{ key6, abc = {oeps {oeps} oeps} } ---~ ]] - ---~ local session = bibtex.new() ---~ bibtex.convert(session,str) ---~ bibtex.toxml(session) ---~ print(session.nofbytes,statistics.elapsedtime(bibtex)) - ---~ local session = bibtex.new() ---~ bibtex.load(session,"IEEEabrv.bib") ---~ bibtex.load(session,"IEEEfull.bib") ---~ bibtex.load(session,"IEEEexample.bib") ---~ bibtex.toxml(session) ---~ print(session.nofbytes,statistics.elapsedtime(bibtex)) - ---~ local session = bibtex.new() ---~ bibtex.load(session,"gut.bib") ---~ bibtex.load(session,"komoedie.bib") ---~ bibtex.load(session,"texbook1.bib") ---~ bibtex.load(session,"texbook2.bib") ---~ bibtex.load(session,"texbook3.bib") ---~ bibtex.load(session,"texgraph.bib") ---~ bibtex.load(session,"texjourn.bib") ---~ bibtex.load(session,"texnique.bib") ---~ bibtex.load(session,"tugboat.bib") ---~ bibtex.toxml(session) ---~ print(session.nofbytes,statistics.elapsedtime(bibtex)) - ---~ print(table.serialize(session.data)) ---~ print(table.serialize(session.shortcuts)) ---~ print(xml.serialize(session.xml)) - -if not characters then dofile(resolvers.findfile("char-def.lua")) end - -local chardata = characters.data -local concat = table.concat - -local lpeg = lpeg - -local P, Ct, lpegmatch, lpegpatterns = lpeg.P, lpeg.Ct, lpeg.match, lpeg.patterns - -local space, comma = P(" "), P(",") - -local andsplitter = lpeg.tsplitat(space^1 * "and" * space^1) -local commasplitter = lpeg.tsplitat(space^0 * comma * space^0) -local spacesplitter = lpeg.tsplitat(space^1) -local firstcharacter = lpegpatterns.utf8byte - -local function is_upper(str) - local first = lpegmatch(firstcharacter,str) - local okay = chardata[first] - return okay and okay.category == "lu" -end - -local function splitauthors(str) - local authors = lpegmatch(andsplitter,str) - for i=1,#authors do - local firstnames, vons, surnames, initials, juniors, words - local author = authors[i] - local split = lpegmatch(commasplitter,author) - local n = #split - if n == 1 then - --~ First von Last - words = lpegmatch(spacesplitter,author) - firstnames, vons, surnames = { }, { }, { } - local i, n = 1, #words - while i <= n do - local w = words[i] - if is_upper(w) then - firstnames[#firstnames+1], i = w, i + 1 - else - break - end - end - while i <= n do - local w = words[i] - if is_upper(w) then - break - else - vons[#vons+1], i = w, i + 1 - end - end - while i <= n do - surnames[#surnames+1], i = words[i], i + 1 - end - elseif n == 2 then - --~ von Last, First - words = lpegmatch(spacesplitter,split[2]) - surnames = lpegmatch(spacesplitter,split[1]) - firstnames, vons = { }, { } - local i, n = 1, #words - while i <= n do - local w = words[i] - if is_upper(w) then - firstnames[#firstnames+1], i = w, i + 1 - else - break - end - end - while i <= n do - vons[#vons+1], i = words[i], i + 1 - end - else - --~ von Last, Jr ,First - firstnames = lpegmatch(spacesplitter,split[1]) - juniors = lpegmatch(spacesplitter,split[2]) - surnames = lpegmatch(spacesplitter,split[3]) - if n > 3 then - -- error - end - end - if #surnames == 0 then - surnames[1] = firstnames[#firstnames] - firstnames[#firstnames] = nil - end - if firstnames then - initials = { } - for i=1,#firstnames do - initials[i] = utfchar(lpegmatch(firstcharacter,firstnames[i])) - end - end - authors[i] = { - original = author, - firstnames = firstnames, - vons = vons, - surnames = surnames, - initials = initials, - juniors = juniors, - } - end - authors.original = str - return authors -end - -local function the_initials(initials,symbol) - local t, symbol = { }, symbol or "." - for i=1,#initials do - t[i] = initials[i] .. symbol - end - return t -end - --- authors - -bibtex.authors = bibtex.authors or { } - -local authors = bibtex.authors - -local defaultsettings = { - firstnamesep = " ", - vonsep = " ", - surnamesep = " ", - juniorsep = " ", - surnamejuniorsep = ", ", - juniorjuniorsep = ", ", - surnamefirstnamesep = ", ", - surnameinitialsep = ", ", - namesep = ", ", - lastnamesep = " and ", - finalnamesep = " and ", -} - -function authors.normal(author,settings) - local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors - local result, settings = { }, settings or defaultsettings - if firstnames and #firstnames > 0 then - result[#result+1] = concat(firstnames," ") - result[#result+1] = settings.firstnamesep or defaultsettings.firstnamesep - end - if vons and #vons > 0 then - result[#result+1] = concat(vons," ") - result[#result+1] = settings.vonsep or defaultsettings.vonsep - end - if surnames then - result[#result+1] = concat(surnames," ") - end - if juniors and #juniors > 0 then - result[#result+1] = concat(juniors," ") - result[#result+1] = settings.surnamesep or defaultsettings.surnamesep - end - return concat(result) -end - -function authors.normalshort(author,settings) - local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors - local result, settings = { }, settings or defaultsettings - if firstnames and #firstnames > 0 then - result[#result+1] = concat(firstnames," ") - result[#result+1] = settings.firstnamesep or defaultsettings.firstnamesep - end - if vons and #vons > 0 then - result[#result+1] = concat(vons," ") - result[#result+1] = settings.vonsep or defaultsettings.vonsep - end - if surnames then - result[#result+1] = concat(surnames," ") - end - if juniors and #juniors > 0 then - result[#result+1] = concat(juniors," ") - result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep - end - return concat(result) -end - -function authors.inverted(author,settings) - local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors - local result, settings = { }, settings or defaultsettings - if vons and #vons > 0 then - result[#result+1] = concat(vons," ") - result[#result+1] = settings.vonsep or defaultsettings.vonsep - end - if surnames then - result[#result+1] = concat(surnames," ") - end - if juniors and #juniors > 0 then - result[#result+1] = settings.juniorjuniorsep or defaultsettings.juniorjuniorsep - result[#result+1] = concat(juniors," ") - end - if firstnames and #firstnames > 0 then - result[#result+1] = settings.surnamefirstnamesep or defaultsettings.surnamefirstnamesep - result[#result+1] = concat(firstnames," ") - end - return concat(result) -end - -function authors.invertedshort(author,settings) - local vons, surnames, initials, juniors = author.vons, author.surnames, author.initials, author.juniors - local result, settings = { }, settings or defaultsettings - if vons and #vons > 0 then - result[#result+1] = concat(vons," ") - result[#result+1] = settings.vonsep or defaultsettings.vonsep - end - if surnames then - result[#result+1] = concat(surnames," ") - end - if juniors and #juniors > 0 then - result[#result+1] = settings.juniorjuniorsep or defaultsettings.juniorjuniorsep - result[#result+1] = concat(juniors," ") - end - if initials and #initials > 0 then - result[#result+1] = settings.surnameinitialsep or defaultsettings.surnameinitialsep - result[#result+1] = concat(the_initials(initials)," ") - end - return concat(result) -end - -local lastconcatsize = 1 - -local function bibtexconcat(t,settings) - local namesep = settings.namesep or defaultsettings.namesep or ", " - local lastnamesep = settings.lastnamesep or defaultsettings.lastnamesep or namesep - local finalnamesep = settings.finalnamesep or defaultsettings.finalnamesep or lastnamesep - local lastconcatsize = #t - if lastconcatsize > 2 then - local s = { } - for i=1,lastconcatsize-2 do - s[i] = t[i] .. namesep - end - s[lastconcatsize-1], s[lastconcatsize] = t[lastconcatsize-1] .. finalnamesep, t[lastconcatsize] - return concat(s) - elseif lastconcatsize > 1 then - return concat(t,lastnamesep) - elseif lastconcatsize > 0 then - return t[1] - else - return "" - end -end - -function authors.concat(author,combiner,what,settings) - if type(combiner) == "string" then - combiner = authors[combiner or "normal"] or authors.normal - end - local split = splitauthors(author) - local setting = settings[what] - local etallimit, etaldisplay, etaltext = 1000, 1000, "" - if setting then - etallimit = settings.etallimit or 1000 - etaldisplay = settings.etaldisplay or etallimit - etalltext = settings.etaltext or "" - end - local max = #split - if max > etallimit and etaldisplay < max then - max = etaldisplay - end - for i=1,max do - split[i] = combiner(split[i],settings) - end - local result = bibtexconcat(split,settings) - if max < #split then - return result - else - return result .. etaltext - end -end - -function authors.short(author,year) - local result = { } - if author then - local authors = splitauthors(author) - for a=1,#authors do - local aa = authors[a] - local initials = aa.initials - for i=1,#initials do - result[#result+1] = initials[i] - end - local surnames = aa.surnames - for s=1,#surnames do - result[#result+1] = utfchar(lpegmatch(firstcharacter,surnames[s])) - end - end - end - if year then - result[#result+1] = year - end - return concat(result) -end - --- We can consider creating a hashtable key -> entry but I wonder if --- pays off. - -local function collectauthoryears(id,list) - list = settings_to_hash(list) - id = getid(id) - local found = { } - for e in xml.collected(id,"/bibtex/entry") do - if list[e.at.tag] then - local year = xmlfilter(e,"xml:///field[@name='year']/text()") - local author = xmlfilter(e,"xml:///field[@name='author']/text()") - if author and year then - local a = found[author] - if not a then - a = { } - found[author] = a - end - local y = a[year] - if not y then - y = { } - a[year] = y - end - y[#y+1] = e - end - end - end - -- found = { author = { year_1 = { e1, e2, e3 } } } - local done = { } - for author, years in next, found do - local yrs = { } - for year, entries in next, years do - if subyears then - -- -- add letters to all entries of an author and if so shouldn't - -- -- we tag all years of an author as soon as we do this? - -- if #entries > 1 then - -- for i=1,#years do - -- local entry = years[i] - -- -- years[i] = year .. string.char(i + string.byte("0") - 1) - -- end - -- end - else - yrs[#yrs+1] = year - end - end - done[author] = yrs - end - return done -end - -local method, settings = "normal", { } - -function authors.setsettings(s) - settings = s or settings -end - -if commands then - - local sessions = { } - - function commands.definebibtexsession(name) - sessions[name] = bibtex.new() - end - - function commands.preparebibtexsession(name,xmlname,options) - bibtex.toxml(sessions[name],options) - lxml.register(xmlname,sessions[name].xml) - end - - function commands.registerbibtexfile(name,filename) - bibtex.load(sessions[name],filename) - end - - function commands.registerbibtexentry(name,entry) - local session = sessions[name] - local entries = session.entries - if not entries then - session.entries = { [entry] = true } -- here we can keep more info - else - entries[entry] = true - end - end - - -- commands.bibtexconcat = bibtexconcat - - -- finalizers can be rather dumb as we have just text and no embedded xml - - function finalizers.bibtexconcat(collected,method,what) - if collected then - local author = collected[1].dt[1] or "" - if author ~= "" then - context(authors.concat(author,method,what,settings)) - end - end - end - - function finalizers.bibtexshort(collected) - if collected then - local c = collected[1] - local year = xmlfilter(c,"xml://field[@name='year']/text()") - local author = xmlfilter(c,"xml://field[@name='author']/text()") - context(authors.short(author,year)) - end - end - - -- experiment: - - --~ -- alternative approach: keep data at the tex end - - --~ local function xbibtexconcat(t,sep,finalsep,lastsep) - --~ local n = #t - --~ if n > 0 then - --~ context(t[1]) - --~ if n > 1 then - --~ if n > 2 then - --~ for i=2,n-1 do - --~ context.bibtexpublicationsparameter("sep") - --~ context(t[i]) - --~ end - --~ context.bibtexpublicationsparameter("finalsep") - --~ else - --~ context.bibtexpublicationsparameter("lastsep") - --~ end - --~ context(t[n]) - --~ end - --~ end - --~ end - - -- todo : sort - - -- todo: choose between bibtex or commands namespace - - function bibtex.authorref(id,list) - local result = collectauthoryears(id,list,method,what) - for author, years in next, result do - context(authors.concat(author,method,what,settings)) - end - end - - function bibtex.authoryearref(id,list) - local result = collectauthoryears(id,list,method,what) - for author, years in next, result do - context("%s (%s)",authors.concat(author,method,what,settings),concat(years,", ")) - end - end - - function bibtex.authoryearsref(id,list) - local result = collectauthoryears(id,list,method,what) - for author, years in next, result do - context("(%s, %s)",authors.concat(author,method,what,settings),concat(years,", ")) - end - end - - function bibtex.singularorplural(singular,plural) - if lastconcatsize and lastconcatsize > 1 then - context(plural) - else - context(singular) - end - end - -end - - ---~ local function test(sample) ---~ local authors = splitauthors(sample) ---~ print(table.serialize(authors)) ---~ for i=1,#authors do ---~ local author = authors[i] ---~ print(normalauthor (author,settings)) ---~ print(normalshortauthor (author,settings)) ---~ print(invertedauthor (author,settings)) ---~ print(invertedshortauthor(author,settings)) ---~ end ---~ print(concatauthors(sample,settings,normalauthor)) ---~ print(concatauthors(sample,settings,normalshortauthor)) ---~ print(concatauthors(sample,settings,invertedauthor)) ---~ print(concatauthors(sample,settings,invertedshortauthor)) ---~ end - ---~ local sample_a = "Hagen, Hans and Hoekwater, Taco Whoever T. Ex. and Henkel Hut, Hartmut Harald von der" ---~ local sample_b = "Hans Hagen and Taco Whoever T. Ex. Hoekwater and Hartmut Harald von der Henkel Hut" - ---~ test(sample_a) ---~ test(sample_b) +if not modules then modules = { } end modules ['bibl-bib'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

This is a prelude to integrated bibliography support. This file just loads +bibtex files and converts them to xml so that the we access the content +in a convenient way. Actually handling the data takes place elsewhere.

+--ldx]]-- + +local lower, format, gsub, concat = string.lower, string.format, string.gsub, table.concat +local next = next +local utfchar = utf.char +local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns +local textoutf = characters and characters.tex.toutf +local variables = interfaces and interfaces.variables +local settings_to_hash = utilities.parsers.settings_to_hash +local finalizers = xml.finalizers.tex +local xmlfilter, xmltext, getid = xml.filter, xml.text, lxml.getid +local formatters = string.formatters + +local P, R, S, C, Cc, Cs, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct + +local trace_bibxml = false trackers.register("publications.bibxml", function(v) trace_bibtex = v end) + +local report_xml = logs.reporter("publications","xml") + +bibtex = bibtex or { } +local bibtex = bibtex + +bibtex.statistics = bibtex.statistics or { } +local bibtexstats = bibtex.statistics + +bibtexstats.nofbytes = 0 +bibtexstats.nofdefinitions = 0 +bibtexstats.nofshortcuts = 0 + +local defaultshortcuts = { + jan = "1", + feb = "2", + mar = "3", + apr = "4", + may = "5", + jun = "6", + jul = "7", + aug = "8", + sep = "9", + oct = "10", + nov = "11", + dec = "12", +} + +local shortcuts = { } +local data = { } +local entries + +-- Currently we expand shortcuts and for large ones (like the acknowledgements +-- in tugboat.bib this is not that efficient. However, eventually strings get +-- hashed again. + +local function do_shortcut(tag,key,value) + bibtexstats.nofshortcuts = bibtexstats.nofshortcuts + 1 + if lower(tag) == "@string" then + shortcuts[key] = value + end +end + +local function do_definition(tag,key,tab) -- maybe check entries here (saves memory) + if not entries or entries[key] then + bibtexstats.nofdefinitions = bibtexstats.nofdefinitions + 1 + local t = { } + for i=1,#tab,2 do + t[tab[i]] = tab[i+1] + end + local p = data[tag] + if not p then + data[tag] = { [key] = t } + else + p[key] = t + end + end +end + +local function resolve(s) + return shortcuts[s] or defaultshortcuts[s] or s -- can be number +end + +local percent = P("%") +local start = P("@") +local comma = P(",") +local hash = P("#") +local escape = P("\\") +local single = P("'") +local double = P('"') +local left = P('{') +local right = P('}') +local both = left + right +local lineending = S("\n\r") +local space = S(" \t\n\r\f") +local spacing = space^0 +local equal = P("=") +local collapsed = (space^1)/ " " + +local function add(a,b) if b then return a..b else return a end end + +local keyword = C((R("az","AZ","09") + S("@_:-"))^1) -- C((1-space)^1) +local s_quoted = ((escape*single) + collapsed + (1-single))^0 +local d_quoted = ((escape*double) + collapsed + (1-double))^0 +local balanced = lpegpatterns.balanced + +local s_value = (single/"") * s_quoted * (single/"") +local d_value = (double/"") * d_quoted * (double/"") +local b_value = (left /"") * balanced * (right /"") +local r_value = keyword/resolve + +local somevalue = s_value + d_value + b_value + r_value +local value = Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0)) + +local assignment = spacing * keyword * spacing * equal * spacing * value * spacing +local shortcut = keyword * spacing * left * spacing * (assignment * comma^0)^0 * spacing * right +local definition = keyword * spacing * left * spacing * keyword * comma * Ct((assignment * comma^0)^0) * spacing * right +local comment = keyword * spacing * left * (1-right)^0 * spacing * right +local forget = percent^1 * (1-lineending)^0 + +-- todo \% + +local grammar = (space + forget + shortcut/do_shortcut + definition/do_definition + comment + 1)^0 + +function bibtex.convert(session,content) + statistics.starttiming(bibtex) + data, shortcuts, entries = session.data, session.shortcuts, session.entries + bibtexstats.nofbytes = bibtexstats.nofbytes + #content + session.nofbytes = session.nofbytes + #content + lpegmatch(grammar,content or "") + statistics.stoptiming(bibtex) +end + +function bibtex.load(session,filename) + local filename = resolvers.findfile(filename,"bib") + if filename ~= "" then + local data = io.loaddata(filename) or "" + if data == "" then + report_xml("empty file %a, no conversion to xml",filename) + elseif trace_bibxml then + report_xml("converting file %a to xml",filename) + end + bibtex.convert(session,data) + end +end + +function bibtex.new() + return { + data = { }, + shortcuts = { }, + xml = xml.convert("\n"), + nofbytes = 0, + entries = nil, + loaded = false, + } +end + +local p_escaped = lpegpatterns.xml.escaped + +local ihatethis = { + f = "\\f", + n = "\\n", + r = "\\r", + s = "\\s", + t = "\\t", + v = "\\v", + z = "\\z", +} + +local command = P("\\")/"" * Cc("\\bibtexcommand{") * (R("az","AZ")^1) * Cc("}") +local any = P(1) +local done = P(-1) +local one_l = P("{") / "" +local one_r = P("}") / "" +local two_l = P("{{") / "" +local two_r = P("}}") / "" + +local filter = Cs( + two_l * (command + any - two_r - done)^0 * two_r * done + + one_l * (command + any - one_r - done)^0 * one_r * done + + (command + any )^0 +) + +function bibtex.toxml(session,options) + if session.loaded then + return + else + session.loaded = true + end + -- we can always speed this up if needed + -- format slows down things a bit but who cares + statistics.starttiming(bibtex) + local result, r = { }, 0 + local options = settings_to_hash(options) + local convert = options.convert -- todo: interface + local strip = options.strip -- todo: interface + local entries = session.entries + r = r + 1 ; result[r] = "" + r = r + 1 ; result[r] = "" + for id, categories in next, session.data do + id = lower(gsub(id,"^@","")) + for name, entry in next, categories do + if not entries or entries[name] then + r = r + 1 ; result[r] = formatters[""](lower(name),id) + for key, value in next, entry do + value = gsub(value,"\\(.)",ihatethis) -- this really needs checking + value = lpegmatch(p_escaped,value) + if value ~= "" then + if convert then + value = textoutf(value,true) + end + if strip then + -- as there is no proper namespace in bibtex we need this + -- kind of hackery ... bibtex databases are quite unportable + value = lpegmatch(filter,value) or value + end + r = r + 1 ; result[r] = formatters[" %s"](key,value) + end + end + r = r + 1 ; result[r] = "" + end + end + end + r = r + 1 ; result[r] = "" + result = concat(result,"\n") + -- alternatively we could use lxml.convert + session.xml = xml.convert(result, { + resolve_entities = true, + resolve_predefined_entities = true, -- in case we have escaped entities + -- unify_predefined_entities = true, -- & -> & + utfize_entities = true, + } ) + session.data = nil + session.shortcuts = nil + statistics.stoptiming(bibtex) +end + +statistics.register("bibtex load time", function() + local nofbytes = bibtexstats.nofbytes + if nofbytes > 0 then + return format("%s seconds (%s bytes, %s definitions, %s shortcuts)", + statistics.elapsedtime(bibtex),nofbytes,bibtexstats.nofdefinitions,bibtexstats.nofshortcuts) + else + return nil + end +end) + +--~ str = [[ +--~ @COMMENT { CRAP } +--~ @STRING{ hans = "h a n s" } +--~ @STRING{ taco = "t a c o" } +--~ @SOMETHING{ key1, abc = "t a c o" , def = "h a n s" } +--~ @SOMETHING{ key2, abc = hans # taco } +--~ @SOMETHING{ key3, abc = "hans" # taco } +--~ @SOMETHING{ key4, abc = hans # "taco" } +--~ @SOMETHING{ key5, abc = hans # taco # "hans" # "taco"} +--~ @SOMETHING{ key6, abc = {oeps {oeps} oeps} } +--~ ]] + +--~ local session = bibtex.new() +--~ bibtex.convert(session,str) +--~ bibtex.toxml(session) +--~ print(session.nofbytes,statistics.elapsedtime(bibtex)) + +--~ local session = bibtex.new() +--~ bibtex.load(session,"IEEEabrv.bib") +--~ bibtex.load(session,"IEEEfull.bib") +--~ bibtex.load(session,"IEEEexample.bib") +--~ bibtex.toxml(session) +--~ print(session.nofbytes,statistics.elapsedtime(bibtex)) + +--~ local session = bibtex.new() +--~ bibtex.load(session,"gut.bib") +--~ bibtex.load(session,"komoedie.bib") +--~ bibtex.load(session,"texbook1.bib") +--~ bibtex.load(session,"texbook2.bib") +--~ bibtex.load(session,"texbook3.bib") +--~ bibtex.load(session,"texgraph.bib") +--~ bibtex.load(session,"texjourn.bib") +--~ bibtex.load(session,"texnique.bib") +--~ bibtex.load(session,"tugboat.bib") +--~ bibtex.toxml(session) +--~ print(session.nofbytes,statistics.elapsedtime(bibtex)) + +--~ print(table.serialize(session.data)) +--~ print(table.serialize(session.shortcuts)) +--~ print(xml.serialize(session.xml)) + +if not characters then dofile(resolvers.findfile("char-def.lua")) end + +local chardata = characters.data +local concat = table.concat + +local lpeg = lpeg + +local P, Ct, lpegmatch, lpegpatterns = lpeg.P, lpeg.Ct, lpeg.match, lpeg.patterns + +local space, comma = P(" "), P(",") + +local andsplitter = lpeg.tsplitat(space^1 * "and" * space^1) +local commasplitter = lpeg.tsplitat(space^0 * comma * space^0) +local spacesplitter = lpeg.tsplitat(space^1) +local firstcharacter = lpegpatterns.utf8byte + +local function is_upper(str) + local first = lpegmatch(firstcharacter,str) + local okay = chardata[first] + return okay and okay.category == "lu" +end + +local function splitauthors(str) + local authors = lpegmatch(andsplitter,str) + for i=1,#authors do + local firstnames, vons, surnames, initials, juniors, words + local author = authors[i] + local split = lpegmatch(commasplitter,author) + local n = #split + if n == 1 then + --~ First von Last + words = lpegmatch(spacesplitter,author) + firstnames, vons, surnames = { }, { }, { } + local i, n = 1, #words + while i <= n do + local w = words[i] + if is_upper(w) then + firstnames[#firstnames+1], i = w, i + 1 + else + break + end + end + while i <= n do + local w = words[i] + if is_upper(w) then + break + else + vons[#vons+1], i = w, i + 1 + end + end + while i <= n do + surnames[#surnames+1], i = words[i], i + 1 + end + elseif n == 2 then + --~ von Last, First + words = lpegmatch(spacesplitter,split[2]) + surnames = lpegmatch(spacesplitter,split[1]) + firstnames, vons = { }, { } + local i, n = 1, #words + while i <= n do + local w = words[i] + if is_upper(w) then + firstnames[#firstnames+1], i = w, i + 1 + else + break + end + end + while i <= n do + vons[#vons+1], i = words[i], i + 1 + end + else + --~ von Last, Jr ,First + firstnames = lpegmatch(spacesplitter,split[1]) + juniors = lpegmatch(spacesplitter,split[2]) + surnames = lpegmatch(spacesplitter,split[3]) + if n > 3 then + -- error + end + end + if #surnames == 0 then + surnames[1] = firstnames[#firstnames] + firstnames[#firstnames] = nil + end + if firstnames then + initials = { } + for i=1,#firstnames do + initials[i] = utfchar(lpegmatch(firstcharacter,firstnames[i])) + end + end + authors[i] = { + original = author, + firstnames = firstnames, + vons = vons, + surnames = surnames, + initials = initials, + juniors = juniors, + } + end + authors.original = str + return authors +end + +local function the_initials(initials,symbol) + local t, symbol = { }, symbol or "." + for i=1,#initials do + t[i] = initials[i] .. symbol + end + return t +end + +-- authors + +bibtex.authors = bibtex.authors or { } + +local authors = bibtex.authors + +local defaultsettings = { + firstnamesep = " ", + vonsep = " ", + surnamesep = " ", + juniorsep = " ", + surnamejuniorsep = ", ", + juniorjuniorsep = ", ", + surnamefirstnamesep = ", ", + surnameinitialsep = ", ", + namesep = ", ", + lastnamesep = " and ", + finalnamesep = " and ", +} + +function authors.normal(author,settings) + local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors + local result, settings = { }, settings or defaultsettings + if firstnames and #firstnames > 0 then + result[#result+1] = concat(firstnames," ") + result[#result+1] = settings.firstnamesep or defaultsettings.firstnamesep + end + if vons and #vons > 0 then + result[#result+1] = concat(vons," ") + result[#result+1] = settings.vonsep or defaultsettings.vonsep + end + if surnames then + result[#result+1] = concat(surnames," ") + end + if juniors and #juniors > 0 then + result[#result+1] = concat(juniors," ") + result[#result+1] = settings.surnamesep or defaultsettings.surnamesep + end + return concat(result) +end + +function authors.normalshort(author,settings) + local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors + local result, settings = { }, settings or defaultsettings + if firstnames and #firstnames > 0 then + result[#result+1] = concat(firstnames," ") + result[#result+1] = settings.firstnamesep or defaultsettings.firstnamesep + end + if vons and #vons > 0 then + result[#result+1] = concat(vons," ") + result[#result+1] = settings.vonsep or defaultsettings.vonsep + end + if surnames then + result[#result+1] = concat(surnames," ") + end + if juniors and #juniors > 0 then + result[#result+1] = concat(juniors," ") + result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep + end + return concat(result) +end + +function authors.inverted(author,settings) + local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors + local result, settings = { }, settings or defaultsettings + if vons and #vons > 0 then + result[#result+1] = concat(vons," ") + result[#result+1] = settings.vonsep or defaultsettings.vonsep + end + if surnames then + result[#result+1] = concat(surnames," ") + end + if juniors and #juniors > 0 then + result[#result+1] = settings.juniorjuniorsep or defaultsettings.juniorjuniorsep + result[#result+1] = concat(juniors," ") + end + if firstnames and #firstnames > 0 then + result[#result+1] = settings.surnamefirstnamesep or defaultsettings.surnamefirstnamesep + result[#result+1] = concat(firstnames," ") + end + return concat(result) +end + +function authors.invertedshort(author,settings) + local vons, surnames, initials, juniors = author.vons, author.surnames, author.initials, author.juniors + local result, settings = { }, settings or defaultsettings + if vons and #vons > 0 then + result[#result+1] = concat(vons," ") + result[#result+1] = settings.vonsep or defaultsettings.vonsep + end + if surnames then + result[#result+1] = concat(surnames," ") + end + if juniors and #juniors > 0 then + result[#result+1] = settings.juniorjuniorsep or defaultsettings.juniorjuniorsep + result[#result+1] = concat(juniors," ") + end + if initials and #initials > 0 then + result[#result+1] = settings.surnameinitialsep or defaultsettings.surnameinitialsep + result[#result+1] = concat(the_initials(initials)," ") + end + return concat(result) +end + +local lastconcatsize = 1 + +local function bibtexconcat(t,settings) + local namesep = settings.namesep or defaultsettings.namesep or ", " + local lastnamesep = settings.lastnamesep or defaultsettings.lastnamesep or namesep + local finalnamesep = settings.finalnamesep or defaultsettings.finalnamesep or lastnamesep + local lastconcatsize = #t + if lastconcatsize > 2 then + local s = { } + for i=1,lastconcatsize-2 do + s[i] = t[i] .. namesep + end + s[lastconcatsize-1], s[lastconcatsize] = t[lastconcatsize-1] .. finalnamesep, t[lastconcatsize] + return concat(s) + elseif lastconcatsize > 1 then + return concat(t,lastnamesep) + elseif lastconcatsize > 0 then + return t[1] + else + return "" + end +end + +function authors.concat(author,combiner,what,settings) + if type(combiner) == "string" then + combiner = authors[combiner or "normal"] or authors.normal + end + local split = splitauthors(author) + local setting = settings[what] + local etallimit, etaldisplay, etaltext = 1000, 1000, "" + if setting then + etallimit = settings.etallimit or 1000 + etaldisplay = settings.etaldisplay or etallimit + etalltext = settings.etaltext or "" + end + local max = #split + if max > etallimit and etaldisplay < max then + max = etaldisplay + end + for i=1,max do + split[i] = combiner(split[i],settings) + end + local result = bibtexconcat(split,settings) + if max < #split then + return result + else + return result .. etaltext + end +end + +function authors.short(author,year) + local result = { } + if author then + local authors = splitauthors(author) + for a=1,#authors do + local aa = authors[a] + local initials = aa.initials + for i=1,#initials do + result[#result+1] = initials[i] + end + local surnames = aa.surnames + for s=1,#surnames do + result[#result+1] = utfchar(lpegmatch(firstcharacter,surnames[s])) + end + end + end + if year then + result[#result+1] = year + end + return concat(result) +end + +-- We can consider creating a hashtable key -> entry but I wonder if +-- pays off. + +local function collectauthoryears(id,list) + list = settings_to_hash(list) + id = getid(id) + local found = { } + for e in xml.collected(id,"/bibtex/entry") do + if list[e.at.tag] then + local year = xmlfilter(e,"xml:///field[@name='year']/text()") + local author = xmlfilter(e,"xml:///field[@name='author']/text()") + if author and year then + local a = found[author] + if not a then + a = { } + found[author] = a + end + local y = a[year] + if not y then + y = { } + a[year] = y + end + y[#y+1] = e + end + end + end + -- found = { author = { year_1 = { e1, e2, e3 } } } + local done = { } + for author, years in next, found do + local yrs = { } + for year, entries in next, years do + if subyears then + -- -- add letters to all entries of an author and if so shouldn't + -- -- we tag all years of an author as soon as we do this? + -- if #entries > 1 then + -- for i=1,#years do + -- local entry = years[i] + -- -- years[i] = year .. string.char(i + string.byte("0") - 1) + -- end + -- end + else + yrs[#yrs+1] = year + end + end + done[author] = yrs + end + return done +end + +local method, settings = "normal", { } + +function authors.setsettings(s) + settings = s or settings +end + +if commands then + + local sessions = { } + + function commands.definebibtexsession(name) + sessions[name] = bibtex.new() + end + + function commands.preparebibtexsession(name,xmlname,options) + bibtex.toxml(sessions[name],options) + lxml.register(xmlname,sessions[name].xml) + end + + function commands.registerbibtexfile(name,filename) + bibtex.load(sessions[name],filename) + end + + function commands.registerbibtexentry(name,entry) + local session = sessions[name] + local entries = session.entries + if not entries then + session.entries = { [entry] = true } -- here we can keep more info + else + entries[entry] = true + end + end + + -- commands.bibtexconcat = bibtexconcat + + -- finalizers can be rather dumb as we have just text and no embedded xml + + function finalizers.bibtexconcat(collected,method,what) + if collected then + local author = collected[1].dt[1] or "" + if author ~= "" then + context(authors.concat(author,method,what,settings)) + end + end + end + + function finalizers.bibtexshort(collected) + if collected then + local c = collected[1] + local year = xmlfilter(c,"xml://field[@name='year']/text()") + local author = xmlfilter(c,"xml://field[@name='author']/text()") + context(authors.short(author,year)) + end + end + + -- experiment: + + --~ -- alternative approach: keep data at the tex end + + --~ local function xbibtexconcat(t,sep,finalsep,lastsep) + --~ local n = #t + --~ if n > 0 then + --~ context(t[1]) + --~ if n > 1 then + --~ if n > 2 then + --~ for i=2,n-1 do + --~ context.bibtexpublicationsparameter("sep") + --~ context(t[i]) + --~ end + --~ context.bibtexpublicationsparameter("finalsep") + --~ else + --~ context.bibtexpublicationsparameter("lastsep") + --~ end + --~ context(t[n]) + --~ end + --~ end + --~ end + + -- todo : sort + + -- todo: choose between bibtex or commands namespace + + function bibtex.authorref(id,list) + local result = collectauthoryears(id,list,method,what) + for author, years in next, result do + context(authors.concat(author,method,what,settings)) + end + end + + function bibtex.authoryearref(id,list) + local result = collectauthoryears(id,list,method,what) + for author, years in next, result do + context("%s (%s)",authors.concat(author,method,what,settings),concat(years,", ")) + end + end + + function bibtex.authoryearsref(id,list) + local result = collectauthoryears(id,list,method,what) + for author, years in next, result do + context("(%s, %s)",authors.concat(author,method,what,settings),concat(years,", ")) + end + end + + function bibtex.singularorplural(singular,plural) + if lastconcatsize and lastconcatsize > 1 then + context(plural) + else + context(singular) + end + end + +end + + +--~ local function test(sample) +--~ local authors = splitauthors(sample) +--~ print(table.serialize(authors)) +--~ for i=1,#authors do +--~ local author = authors[i] +--~ print(normalauthor (author,settings)) +--~ print(normalshortauthor (author,settings)) +--~ print(invertedauthor (author,settings)) +--~ print(invertedshortauthor(author,settings)) +--~ end +--~ print(concatauthors(sample,settings,normalauthor)) +--~ print(concatauthors(sample,settings,normalshortauthor)) +--~ print(concatauthors(sample,settings,invertedauthor)) +--~ print(concatauthors(sample,settings,invertedshortauthor)) +--~ end + +--~ local sample_a = "Hagen, Hans and Hoekwater, Taco Whoever T. Ex. and Henkel Hut, Hartmut Harald von der" +--~ local sample_b = "Hans Hagen and Taco Whoever T. Ex. Hoekwater and Hartmut Harald von der Henkel Hut" + +--~ test(sample_a) +--~ test(sample_b) diff --git a/tex/context/base/bibl-tra.lua b/tex/context/base/bibl-tra.lua index 6a7016023..7111108e8 100644 --- a/tex/context/base/bibl-tra.lua +++ b/tex/context/base/bibl-tra.lua @@ -1,247 +1,247 @@ -if not modules then modules = { } end modules ['bibl-tra'] = { - version = 1.001, - comment = "this module is the basis for the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -bibtex = bibtex or { } -local bibtex = bibtex - -bibtex.hacks = bibtex.hacks or { } -local hacks = bibtex.hacks - -local match, gmatch, format, concat, sort = string.match, string.gmatch, string.format, table.concat, table.sort -local variables, constants = interfaces.variables, interfaces.constants - -local trace_bibtex = false trackers.register("publications.bibtex", function(v) trace_bibtex = v end) - -local report_tex = logs.reporter("publications","tex") - -local context, structures = context, structures - -local references = structures.references -local sections = structures.sections - -local list, done, alldone, used, registered, ordered = { }, { }, { }, { }, { }, { } -local mode = 0 - -local template = utilities.strings.striplong([[ - \citation{*} - \bibstyle{cont-%s} - \bibdata{%s} -]]) - -local bibtexbin = environment.arguments.mlbibtex and "mlbibcontext" or "bibtex" - -directives.register("publications.usemlbibtex", function(v) - bibtexbin = v and "mlbibcontext" or "bibtex" -end) - -function hacks.process(settings) - local style = settings.style or "" - local database = settings.database or "" - local jobname = tex.jobname - if database ~= "" then - interfaces.showmessage("publications",3) - io.savedata(file.addsuffix(jobname,"aux"),format(template,style,database)) - if trace_bibtex then - report_tex("processing bibtex file %a using %a",jobname,bibtexbin) - end - os.execute(format("%s %q",bibtexbin,jobname)) - -- purge 'm - end -end - -function hacks.register(str) - if trace_bibtex then - report_tex("registering bibtex entry %a",str) - end - registered[#registered+1] = str - ordered[str] = #registered -end - -function hacks.nofregistered() - return #registered -end - -function hacks.reset(m) - mode, list, done = m, { }, { } -end - -function hacks.add(str,listindex) - if not str or mode == 0 then - -- skip - elseif mode == 1 then - -- all locals but no duplicates - local sc = sections.currentid() - if done[str] ~= sc then - done[str], alldone[str] = sc, true - list[#list+1] = { str, listindex } - end - elseif mode == 2 then - -- all locals but no preceding - local sc = sections.currentid() - if not alldone[str] and done[str] ~= sc then - done[str], alldone[str] = sc, true - list[#list+1] = { str, listindex } - end - end -end - -local function compare(a,b) -- quite some checking for non-nil - local aa, bb = a and a[1], b and b[1] - if aa and bb then - local oa, ob = ordered[aa], ordered[bb] - return oa and ob and oa < ob - end - return false -end - -function hacks.flush(sortvariant) - if sortvariant == "" or sortvariant == variables.cite or sortvariant == "default" then - -- order is cite order i.e. same as list - else - sort(list,compare) - end - for i=1,#list do - context.doprocessbibtexentry(list[i][1]) - end -end - -function hacks.filterall() - for i=1,#registered do - list[i] = { registered[i], i } - end -end - -function hacks.registerplaced(str) - used[str] = true -end - -function hacks.doifalreadyplaced(str) - commands.doifelse(used[str]) -end - --- we ask for :tag but when we can't find it we go back --- to look for previous definitions, and when not found again --- we look forward - -local function compare(a,b) - local aa, bb = a and a[3], b and b[3] - return aa and bb and aa < bb -end - -function hacks.resolve(prefix,block,reference) -- maybe already feed it split - -- needs checking (the prefix in relation to components) - local subsets - local collected = references.collected - if prefix and prefix ~= "" then - subsets = { collected[prefix] or collected[""] } - else - local components = references.productdata.components - local subset = collected[""] - if subset then - subsets = { subset } - else - subsets = { } - end - for i=1,#components do - local subset = collected[components[i]] - if subset then - subsets[#subsets+1] = subset - end - end - end - if #subsets > 0 then - local result, nofresult, done = { }, 0, { } - block = tonumber(block) - for i=1,#subsets do - local subset = subsets[i] - for rest in gmatch(reference,"[^, ]+") do - local blk, tag, found = block, nil, nil - if block then - tag = blk .. ":" .. rest - found = subset[tag] - if not found then - for i=block-1,1,-1 do - tag = i .. ":" .. rest - found = subset[tag] - if found then - blk = i - break - end - end - end - end - if not found then - blk = "*" - tag = blk .. ":" .. rest - found = subset[tag] - end - if found then - local current = tonumber(found.entries and found.entries.text) -- tonumber needed - if current and not done[current] then - nofresult = nofresult + 1 - result[nofresult] = { blk, rest, current } - done[current] = true - end - end - end - end - -- todo: ranges so the interface will change - sort(result,compare) - local first, last, firsti, lasti, firstr, lastr - local collected, nofcollected = { }, 0 - for i=1,nofresult do - local r = result[i] - local current = r[3] - if not first then - first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r - elseif current == last + 1 then - last, lasti, lastr = current, i, r - else - if last > first + 1 then - nofcollected = nofcollected + 1 - collected[nofcollected] = { firstr[1], firstr[2], lastr[1], lastr[2] } - else - nofcollected = nofcollected + 1 - collected[nofcollected] = { firstr[1], firstr[2] } - if last > first then - nofcollected = nofcollected + 1 - collected[nofcollected] = { lastr[1], lastr[2] } - end - end - first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r - end - end - if first and last then - if last > first + 1 then - nofcollected = nofcollected + 1 - collected[nofcollected] = { firstr[1], firstr[2], lastr[1], lastr[2] } - else - nofcollected = nofcollected + 1 - collected[nofcollected] = { firstr[1], firstr[2] } - if last > first then - nofcollected = nofcollected + 1 - collected[nofcollected] = { lastr[1], lastr[2] } - end - end - end - if nofcollected > 0 then - for i=1,nofcollected do - local c = collected[i] - if c[3] then - context.dowithbibtexnumrefrange(#collected,i,prefix,c[1],c[2],c[3],c[4]) - else - context.dowithbibtexnumref(#collected,i,prefix,c[1],c[2]) - end - end - else - context.nobibtexnumref("error 1") - end - else - context.nobibtexnumref("error 2") - end -end +if not modules then modules = { } end modules ['bibl-tra'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +bibtex = bibtex or { } +local bibtex = bibtex + +bibtex.hacks = bibtex.hacks or { } +local hacks = bibtex.hacks + +local match, gmatch, format, concat, sort = string.match, string.gmatch, string.format, table.concat, table.sort +local variables, constants = interfaces.variables, interfaces.constants + +local trace_bibtex = false trackers.register("publications.bibtex", function(v) trace_bibtex = v end) + +local report_tex = logs.reporter("publications","tex") + +local context, structures = context, structures + +local references = structures.references +local sections = structures.sections + +local list, done, alldone, used, registered, ordered = { }, { }, { }, { }, { }, { } +local mode = 0 + +local template = utilities.strings.striplong([[ + \citation{*} + \bibstyle{cont-%s} + \bibdata{%s} +]]) + +local bibtexbin = environment.arguments.mlbibtex and "mlbibcontext" or "bibtex" + +directives.register("publications.usemlbibtex", function(v) + bibtexbin = v and "mlbibcontext" or "bibtex" +end) + +function hacks.process(settings) + local style = settings.style or "" + local database = settings.database or "" + local jobname = tex.jobname + if database ~= "" then + interfaces.showmessage("publications",3) + io.savedata(file.addsuffix(jobname,"aux"),format(template,style,database)) + if trace_bibtex then + report_tex("processing bibtex file %a using %a",jobname,bibtexbin) + end + os.execute(format("%s %q",bibtexbin,jobname)) + -- purge 'm + end +end + +function hacks.register(str) + if trace_bibtex then + report_tex("registering bibtex entry %a",str) + end + registered[#registered+1] = str + ordered[str] = #registered +end + +function hacks.nofregistered() + return #registered +end + +function hacks.reset(m) + mode, list, done = m, { }, { } +end + +function hacks.add(str,listindex) + if not str or mode == 0 then + -- skip + elseif mode == 1 then + -- all locals but no duplicates + local sc = sections.currentid() + if done[str] ~= sc then + done[str], alldone[str] = sc, true + list[#list+1] = { str, listindex } + end + elseif mode == 2 then + -- all locals but no preceding + local sc = sections.currentid() + if not alldone[str] and done[str] ~= sc then + done[str], alldone[str] = sc, true + list[#list+1] = { str, listindex } + end + end +end + +local function compare(a,b) -- quite some checking for non-nil + local aa, bb = a and a[1], b and b[1] + if aa and bb then + local oa, ob = ordered[aa], ordered[bb] + return oa and ob and oa < ob + end + return false +end + +function hacks.flush(sortvariant) + if sortvariant == "" or sortvariant == variables.cite or sortvariant == "default" then + -- order is cite order i.e. same as list + else + sort(list,compare) + end + for i=1,#list do + context.doprocessbibtexentry(list[i][1]) + end +end + +function hacks.filterall() + for i=1,#registered do + list[i] = { registered[i], i } + end +end + +function hacks.registerplaced(str) + used[str] = true +end + +function hacks.doifalreadyplaced(str) + commands.doifelse(used[str]) +end + +-- we ask for :tag but when we can't find it we go back +-- to look for previous definitions, and when not found again +-- we look forward + +local function compare(a,b) + local aa, bb = a and a[3], b and b[3] + return aa and bb and aa < bb +end + +function hacks.resolve(prefix,block,reference) -- maybe already feed it split + -- needs checking (the prefix in relation to components) + local subsets + local collected = references.collected + if prefix and prefix ~= "" then + subsets = { collected[prefix] or collected[""] } + else + local components = references.productdata.components + local subset = collected[""] + if subset then + subsets = { subset } + else + subsets = { } + end + for i=1,#components do + local subset = collected[components[i]] + if subset then + subsets[#subsets+1] = subset + end + end + end + if #subsets > 0 then + local result, nofresult, done = { }, 0, { } + block = tonumber(block) + for i=1,#subsets do + local subset = subsets[i] + for rest in gmatch(reference,"[^, ]+") do + local blk, tag, found = block, nil, nil + if block then + tag = blk .. ":" .. rest + found = subset[tag] + if not found then + for i=block-1,1,-1 do + tag = i .. ":" .. rest + found = subset[tag] + if found then + blk = i + break + end + end + end + end + if not found then + blk = "*" + tag = blk .. ":" .. rest + found = subset[tag] + end + if found then + local current = tonumber(found.entries and found.entries.text) -- tonumber needed + if current and not done[current] then + nofresult = nofresult + 1 + result[nofresult] = { blk, rest, current } + done[current] = true + end + end + end + end + -- todo: ranges so the interface will change + sort(result,compare) + local first, last, firsti, lasti, firstr, lastr + local collected, nofcollected = { }, 0 + for i=1,nofresult do + local r = result[i] + local current = r[3] + if not first then + first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r + elseif current == last + 1 then + last, lasti, lastr = current, i, r + else + if last > first + 1 then + nofcollected = nofcollected + 1 + collected[nofcollected] = { firstr[1], firstr[2], lastr[1], lastr[2] } + else + nofcollected = nofcollected + 1 + collected[nofcollected] = { firstr[1], firstr[2] } + if last > first then + nofcollected = nofcollected + 1 + collected[nofcollected] = { lastr[1], lastr[2] } + end + end + first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r + end + end + if first and last then + if last > first + 1 then + nofcollected = nofcollected + 1 + collected[nofcollected] = { firstr[1], firstr[2], lastr[1], lastr[2] } + else + nofcollected = nofcollected + 1 + collected[nofcollected] = { firstr[1], firstr[2] } + if last > first then + nofcollected = nofcollected + 1 + collected[nofcollected] = { lastr[1], lastr[2] } + end + end + end + if nofcollected > 0 then + for i=1,nofcollected do + local c = collected[i] + if c[3] then + context.dowithbibtexnumrefrange(#collected,i,prefix,c[1],c[2],c[3],c[4]) + else + context.dowithbibtexnumref(#collected,i,prefix,c[1],c[2]) + end + end + else + context.nobibtexnumref("error 1") + end + else + context.nobibtexnumref("error 2") + end +end diff --git a/tex/context/base/bibl-tst.lua b/tex/context/base/bibl-tst.lua index 5ff8f4570..569f583c8 100644 --- a/tex/context/base/bibl-tst.lua +++ b/tex/context/base/bibl-tst.lua @@ -1,21 +1,21 @@ -dofile("bibl-bib.lua") - -local session = bibtex.new() - -bibtex.load(session,"gut.bib") -bibtex.load(session,"komoedie.bib") -bibtex.load(session,"texbook1.bib") -bibtex.load(session,"texbook2.bib") -bibtex.load(session,"texbook3.bib") -bibtex.load(session,"texgraph.bib") -bibtex.load(session,"texjourn.bib") -bibtex.load(session,"texnique.bib") -bibtex.load(session,"tugboat.bib") -print(bibtex.size,statistics.elapsedtime(bibtex)) -bibtex.toxml(session) -print(bibtex.size,statistics.elapsedtime(bibtex)) - ---~ print(table.serialize(session.data)) ---~ print(table.serialize(session.shortcuts)) ---~ print(xml.serialize(session.xml)) - +dofile("bibl-bib.lua") + +local session = bibtex.new() + +bibtex.load(session,"gut.bib") +bibtex.load(session,"komoedie.bib") +bibtex.load(session,"texbook1.bib") +bibtex.load(session,"texbook2.bib") +bibtex.load(session,"texbook3.bib") +bibtex.load(session,"texgraph.bib") +bibtex.load(session,"texjourn.bib") +bibtex.load(session,"texnique.bib") +bibtex.load(session,"tugboat.bib") +print(bibtex.size,statistics.elapsedtime(bibtex)) +bibtex.toxml(session) +print(bibtex.size,statistics.elapsedtime(bibtex)) + +--~ print(table.serialize(session.data)) +--~ print(table.serialize(session.shortcuts)) +--~ print(xml.serialize(session.xml)) + diff --git a/tex/context/base/blob-ini.lua b/tex/context/base/blob-ini.lua index 4debaf94c..148651b21 100644 --- a/tex/context/base/blob-ini.lua +++ b/tex/context/base/blob-ini.lua @@ -1,194 +1,194 @@ -if not modules then modules = { } end modules ['blob-ini'] = { - version = 1.001, - comment = "companion to blob-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- Experimental ... names and functionality will change ... just a --- place to collect code, so: --- --- DON'T USE THESE FUNCTIONS AS THEY WILL CHANGE! --- --- This module is just a playground. Occasionally we need to typeset --- at the lua and and this is one method. In principle we can construct --- pages this way too which sometimes makes sense in dumb cases. Actually, --- if one only needs this, one does not really need tex, okay maybe the --- parbuilder but that one can be simplified as well then. - --- set fonts, attributes --- rest already done in packers etc --- add local par whatsit (or wait till cleaned up) --- collapse or new pars --- interline spacing etc - --- blob.char --- blob.line --- blob.paragraph --- blob.page - -local type, tostring = type, tostring -local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns - -local report_blobs = logs.reporter("blobs") - -local t_tonodes = typesetters.tonodes -local t_hpack = typesetters.hpack - -local flush_node_list = node.flush_list -local hpack_node_list = node.hpack -local vpack_node_list = node.vpack -local write_node = node.write - -blobs = blobs or { } - -local newline = lpegpatterns.newline -local space = lpegpatterns.spacer -local spacing = newline * space^0 -local content = (space^1)/" " + (1-spacing) - -local ctxtextcapture = lpeg.Ct ( ( -- needs checking (see elsewhere) - space^0 * ( - newline^2 * space^0 * lpeg.Cc("") - + newline * space^0 * lpeg.Cc(" ") - + lpeg.Cs(content^1) - ) -)^0) - -function blobs.new() - return { - list = { }, - } -end - -function blobs.dispose(t) - local list = t.list - for i=1,#list do - local li = list[i] - local pack = li.pack - if pack then - flush_node_list(pack) - li.pack = nil - end - end -end - -function blobs.append(t,str) -- compare concat and link - local typ = type(str) - local dummy = nil - if typ == "number" then - str = tostring(str) - typ = "string" - end - local list = t.list - if typ == "string" then - local pars = lpegmatch(ctxtextcapture,str) - local noflist = #list - for p=1,#pars do - local str = pars[p] - if #str == 0 then - noflist = noflist + 1 - list[noflist] = { head = nil, tail = nil } - else - local l = list[noflist] - if not l then - l = { head = nil, tail = nil } - noflist = noflist + 1 - list[noflist] = l - end - local head, tail = t_tonodes(str,nil,nil) - if head then - if l.head then - l.tail.next = head - head.prev = l.tail - l.tail = tail - else - l.head, l.tail = head, tail - end - end - end - end - end -end - -function blobs.pack(t,how) - local list = t.list - for i=1,#list do - local pack = list[i].pack - if pack then - flush_node_list(node.pack) - end - if how == "vertical" then - -- we need to prepend a local par node - -- list[i].pack = node.vpack(list[i].head,"exactly") - report_blobs("vpack not yet supported") - else - list[i].pack = hpack_node_list(list[i].head,"exactly") - end - end -end - -function blobs.write(t) - local list = t.list - for i=1,#list do - local li = list[i] - local pack = li.pack - if pack then - write_node(pack) - flush_node_list(pack) - li.pack = nil - end - end -end - -function blobs.dimensions(t) - local list = t.list - local first = list and list[1] - if first then - local pack = first.pack - return pack.width, pack.height, pack.depth - else - return 0, 0, 0 - end -end - --- blob.char --- blob.line: head, tail --- blob.paragraph --- blob.page - ---~ local lineblob = { ---~ type = "line", ---~ head = false, ---~ tail = false, ---~ pack = false, ---~ properties = { }, ---~ end - ---~ local parblob = { ---~ type = "line", ---~ head = false, ---~ tail = false, ---~ pack = false, ---~ properties = { }, ---~ end - --- for the moment here: - -function commands.widthofstring(str) - local l = t_hpack(str) - context(number.todimen(l.width)) - flush_node_list(l) -end - --- less efficient: --- --- function commands.widthof(str) --- local b = blobs.new() --- blobs.append(b,str) --- blobs.pack(b) --- local w = blobs.dimensions(b) --- context(number.todimen(w)) --- blobs.dispose(b) --- end +if not modules then modules = { } end modules ['blob-ini'] = { + version = 1.001, + comment = "companion to blob-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Experimental ... names and functionality will change ... just a +-- place to collect code, so: +-- +-- DON'T USE THESE FUNCTIONS AS THEY WILL CHANGE! +-- +-- This module is just a playground. Occasionally we need to typeset +-- at the lua and and this is one method. In principle we can construct +-- pages this way too which sometimes makes sense in dumb cases. Actually, +-- if one only needs this, one does not really need tex, okay maybe the +-- parbuilder but that one can be simplified as well then. + +-- set fonts, attributes +-- rest already done in packers etc +-- add local par whatsit (or wait till cleaned up) +-- collapse or new pars +-- interline spacing etc + +-- blob.char +-- blob.line +-- blob.paragraph +-- blob.page + +local type, tostring = type, tostring +local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns + +local report_blobs = logs.reporter("blobs") + +local t_tonodes = typesetters.tonodes +local t_hpack = typesetters.hpack + +local flush_node_list = node.flush_list +local hpack_node_list = node.hpack +local vpack_node_list = node.vpack +local write_node = node.write + +blobs = blobs or { } + +local newline = lpegpatterns.newline +local space = lpegpatterns.spacer +local spacing = newline * space^0 +local content = (space^1)/" " + (1-spacing) + +local ctxtextcapture = lpeg.Ct ( ( -- needs checking (see elsewhere) + space^0 * ( + newline^2 * space^0 * lpeg.Cc("") + + newline * space^0 * lpeg.Cc(" ") + + lpeg.Cs(content^1) + ) +)^0) + +function blobs.new() + return { + list = { }, + } +end + +function blobs.dispose(t) + local list = t.list + for i=1,#list do + local li = list[i] + local pack = li.pack + if pack then + flush_node_list(pack) + li.pack = nil + end + end +end + +function blobs.append(t,str) -- compare concat and link + local typ = type(str) + local dummy = nil + if typ == "number" then + str = tostring(str) + typ = "string" + end + local list = t.list + if typ == "string" then + local pars = lpegmatch(ctxtextcapture,str) + local noflist = #list + for p=1,#pars do + local str = pars[p] + if #str == 0 then + noflist = noflist + 1 + list[noflist] = { head = nil, tail = nil } + else + local l = list[noflist] + if not l then + l = { head = nil, tail = nil } + noflist = noflist + 1 + list[noflist] = l + end + local head, tail = t_tonodes(str,nil,nil) + if head then + if l.head then + l.tail.next = head + head.prev = l.tail + l.tail = tail + else + l.head, l.tail = head, tail + end + end + end + end + end +end + +function blobs.pack(t,how) + local list = t.list + for i=1,#list do + local pack = list[i].pack + if pack then + flush_node_list(node.pack) + end + if how == "vertical" then + -- we need to prepend a local par node + -- list[i].pack = node.vpack(list[i].head,"exactly") + report_blobs("vpack not yet supported") + else + list[i].pack = hpack_node_list(list[i].head,"exactly") + end + end +end + +function blobs.write(t) + local list = t.list + for i=1,#list do + local li = list[i] + local pack = li.pack + if pack then + write_node(pack) + flush_node_list(pack) + li.pack = nil + end + end +end + +function blobs.dimensions(t) + local list = t.list + local first = list and list[1] + if first then + local pack = first.pack + return pack.width, pack.height, pack.depth + else + return 0, 0, 0 + end +end + +-- blob.char +-- blob.line: head, tail +-- blob.paragraph +-- blob.page + +--~ local lineblob = { +--~ type = "line", +--~ head = false, +--~ tail = false, +--~ pack = false, +--~ properties = { }, +--~ end + +--~ local parblob = { +--~ type = "line", +--~ head = false, +--~ tail = false, +--~ pack = false, +--~ properties = { }, +--~ end + +-- for the moment here: + +function commands.widthofstring(str) + local l = t_hpack(str) + context(number.todimen(l.width)) + flush_node_list(l) +end + +-- less efficient: +-- +-- function commands.widthof(str) +-- local b = blobs.new() +-- blobs.append(b,str) +-- blobs.pack(b) +-- local w = blobs.dimensions(b) +-- context(number.todimen(w)) +-- blobs.dispose(b) +-- end diff --git a/tex/context/base/buff-imp-default.lua b/tex/context/base/buff-imp-default.lua index 72a49d625..fd1634616 100644 --- a/tex/context/base/buff-imp-default.lua +++ b/tex/context/base/buff-imp-default.lua @@ -1,42 +1,42 @@ -if not modules then modules = { } end modules ['buff-imp-default'] = { - version = 1.001, - comment = "companion to buff-imp-default.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local patterns, P, V = lpeg.patterns, lpeg.P, lpeg.V -local makepattern = visualizers.makepattern - -local handler = visualizers.newhandler() - -local grammar = { "visualizer", - - -- basic - - signal = makepattern(handler,"signal", visualizers.signalpattern), - emptyline = makepattern(handler,"emptyline",patterns.emptyline), - beginline = makepattern(handler,"beginline",patterns.beginline), - newline = makepattern(handler,"newline", patterns.newline), - space = makepattern(handler,"space", patterns.space), - default = makepattern(handler,"default", patterns.utf8char), - content = makepattern(handler,"default", patterns.somecontent), -- not too efficient - - -- handy - - -- line = V("newline") * V("emptyline")^0 * V("beginline"), - line = V("newline") * V("emptyline")^0 * V("beginline") + V("emptyline") + V("newline"), - whitespace = (V("space") + V("line"))^1, - optionalwhitespace = (V("space") + V("line"))^0, - - -- used - - pattern = V("line") + V("space") + V("signal") + V("content"), - visualizer = V("pattern")^1 - -} - -local parser = P(grammar) - -visualizers.register("default", { parser = parser, handler = handler, grammar = grammar }) +if not modules then modules = { } end modules ['buff-imp-default'] = { + version = 1.001, + comment = "companion to buff-imp-default.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local patterns, P, V = lpeg.patterns, lpeg.P, lpeg.V +local makepattern = visualizers.makepattern + +local handler = visualizers.newhandler() + +local grammar = { "visualizer", + + -- basic + + signal = makepattern(handler,"signal", visualizers.signalpattern), + emptyline = makepattern(handler,"emptyline",patterns.emptyline), + beginline = makepattern(handler,"beginline",patterns.beginline), + newline = makepattern(handler,"newline", patterns.newline), + space = makepattern(handler,"space", patterns.space), + default = makepattern(handler,"default", patterns.utf8char), + content = makepattern(handler,"default", patterns.somecontent), -- not too efficient + + -- handy + + -- line = V("newline") * V("emptyline")^0 * V("beginline"), + line = V("newline") * V("emptyline")^0 * V("beginline") + V("emptyline") + V("newline"), + whitespace = (V("space") + V("line"))^1, + optionalwhitespace = (V("space") + V("line"))^0, + + -- used + + pattern = V("line") + V("space") + V("signal") + V("content"), + visualizer = V("pattern")^1 + +} + +local parser = P(grammar) + +visualizers.register("default", { parser = parser, handler = handler, grammar = grammar }) diff --git a/tex/context/base/buff-imp-escaped.lua b/tex/context/base/buff-imp-escaped.lua index 159921e2a..5a15c736a 100644 --- a/tex/context/base/buff-imp-escaped.lua +++ b/tex/context/base/buff-imp-escaped.lua @@ -1,14 +1,14 @@ -if not modules then modules = { } end modules ['buff-imp-escaped'] = { - version = 1.001, - comment = "companion to buff-imp-escaped.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -visualizers.registerescapepattern("/BTEX/ETEX","/BTEX","/ETEX") - -visualizers.register("escaped", { - parser = visualizers.escapepatterns["/BTEX/ETEX"], - handler = visualizers.newhandler(), -}) +if not modules then modules = { } end modules ['buff-imp-escaped'] = { + version = 1.001, + comment = "companion to buff-imp-escaped.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +visualizers.registerescapepattern("/BTEX/ETEX","/BTEX","/ETEX") + +visualizers.register("escaped", { + parser = visualizers.escapepatterns["/BTEX/ETEX"], + handler = visualizers.newhandler(), +}) diff --git a/tex/context/base/buff-imp-lua.lua b/tex/context/base/buff-imp-lua.lua index 1147666cc..4f47c0de9 100644 --- a/tex/context/base/buff-imp-lua.lua +++ b/tex/context/base/buff-imp-lua.lua @@ -1,208 +1,208 @@ -if not modules then modules = { } end modules ['buff-imp-lua'] = { - version = 1.001, - comment = "companion to buff-imp-lua.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- borrowed from scite --- --- depricated: --- --- gcinfo unpack getfenv setfenv loadlib --- table.maxn table.getn table.setn --- math.log10 math.mod math.modf math.fmod - -local format, tohash = string.format, table.tohash -local P, S, V, patterns = lpeg.P, lpeg.S, lpeg.V, lpeg.patterns -local C, Cs, Cg, Cb, Cmt, Carg = lpeg.C, lpeg.Cs, lpeg.Cg, lpeg.Cb, lpeg.Cmt, lpeg.Carg - -local core = tohash { - "and", "break", "do", "else", "elseif", "end", "false", "for", "function", - "if", "in", "local", "nil", "not", "or", "repeat", "return", "then", - "true", "until", "while" -} - -local base = tohash { - "assert", "collectgarbage", "dofile", "error", "loadfile", - "loadstring", "print", "rawget", "rawset", "require", "tonumber", - "tostring", "type", "_G", "getmetatable", "ipairs", "next", "pairs", - "pcall", "rawequal", "setmetatable", "xpcall", "module", "select", -} - -local libraries = { - coroutine = tohash { - "create", "resume", "status", "wrap", "yield", "running", - }, - package = tohash{ - "cpath", "loaded", "loadlib", "path", "config", "preload", "seeall", - }, - io = tohash{ - "close", "flush", "input", "lines", "open", "output", "read", "tmpfile", - "type", "write", "stdin", "stdout", "stderr", "popen", - }, - math = tohash{ - "abs", "acos", "asin", "atan", "atan2", "ceil", "cos", "deg", "exp", - "floor ", "ldexp", "log", "max", "min", "pi", "pow", "rad", "random", - "randomseed", "sin", "sqrt", "tan", "cosh", "sinh", "tanh", "huge", - }, - string = tohash{ - "byte", "char", "dump", "find", "len", "lower", "rep", "sub", "upper", - "format", "gfind", "gsub", "gmatch", "match", "reverse", - }, - table = tohash{ - "concat", "foreach", "foreachi", "sort", "insert", "remove", "pack", - "unpack", - }, - os = tohash{ - "clock", "date", "difftime", "execute", "exit", "getenv", "remove", - "rename", "setlocale", "time", "tmpname", - }, - lpeg = tohash{ - "print", "match", "locale", "type", "version", "setmaxstack", - "P", "R", "S", "C", "V", "Cs", "Ct", "Cs", "Cp", "Carg", - "Cg", "Cb", "Cmt", "Cf", "B", - }, - -- bit - -- debug -} - -local context = context -local verbatim = context.verbatim -local makepattern = visualizers.makepattern - -local LuaSnippet = context.LuaSnippet -local startLuaSnippet = context.startLuaSnippet -local stopLuaSnippet = context.stopLuaSnippet - -local LuaSnippetBoundary = verbatim.LuaSnippetBoundary -local LuaSnippetQuote = verbatim.LuaSnippetQuote -local LuaSnippetString = verbatim.LuaSnippetString -local LuaSnippetSpecial = verbatim.LuaSnippetSpecial -local LuaSnippetComment = verbatim.LuaSnippetComment -local LuaSnippetNameCore = verbatim.LuaSnippetNameCore -local LuaSnippetNameBase = verbatim.LuaSnippetNameBase -local LuaSnippetNameLibraries = verbatim.LuaSnippetNameLibraries -local LuaSnippetName = verbatim.LuaSnippetName - -local namespace - -local function visualizename_a(s) - if core[s] then - namespace = nil - LuaSnippetNameCore(s) - elseif base[s] then - namespace = nil - LuaSnippetNameBase(s) - else - namespace = libraries[s] - if namespace then - LuaSnippetNameLibraries(s) - else - LuaSnippetName(s) - end - end -end - -local function visualizename_b(s) - if namespace and namespace[s] then - namespace = nil - LuaSnippetNameLibraries(s) - else - LuaSnippetName(s) - end -end - -local function visualizename_c(s) - LuaSnippetName(s) -end - -local handler = visualizers.newhandler { - startinline = function() LuaSnippet(false,"{") end, - stopinline = function() context("}") end, - startdisplay = function() startLuaSnippet() end, - stopdisplay = function() stopLuaSnippet() end , - boundary = function(s) LuaSnippetBoundary(s) end, - special = function(s) LuaSnippetSpecial(s) end, - comment = function(s) LuaSnippetComment(s) end, - quote = function(s) LuaSnippetQuote(s) end, - string = function(s) LuaSnippetString(s) end, - period = function(s) verbatim(s) end, - name_a = visualizename_a, - name_b = visualizename_b, - name_c = visualizename_c, -} - -local comment = P("--") -local name = (patterns.letter + patterns.underscore) - * (patterns.letter + patterns.underscore + patterns.digit)^0 -local boundary = S('()[]{}') -local special = S("-+/*^%=#") + P("..") - --- The following longstring parser is taken from Roberto's documentation --- that can be found at http://www.inf.puc-rio.br/~roberto/lpeg/lpeg.html. - -local equals = P("=")^0 -local open = P("[") * Cg(equals, "init") * P("[") * P("\n")^-1 -- maybe better: patterns.newline^-1 -local close = P("]") * C(equals) * P("]") -local closeeq = Cmt(close * Cb("init"), function(s,i,a,b) return a == b end) -- wrong return value -local longstring = open * Cs((P(1) - closeeq)^0) * close * Carg(1) - -local function long(content,equals,settings) - handler.boundary(format("[%s[",equals or "")) - visualizers.write(content,settings) -- unhandled - handler.boundary(format("]%s]",equals or "")) -end - -local grammar = visualizers.newgrammar("default", { "visualizer", - sstring = - makepattern(handler,"quote",patterns.dquote) - * (V("whitespace") + makepattern(handler,"string",1-patterns.dquote))^0 -- patterns.nodquote - * makepattern(handler,"quote",patterns.dquote), - dstring = - makepattern(handler,"quote",patterns.squote) - * (V("whitespace") + makepattern(handler,"string",1-patterns.squote))^0 -- patterns.nosquote - * makepattern(handler,"quote",patterns.squote), - longstring = - longstring / long, - comment = - makepattern(handler,"comment",comment) - * (V("space") + V("content"))^0, - longcomment = - makepattern(handler,"comment",comment) - * longstring / long, - name = - makepattern(handler,"name_a",name) - * ( V("optionalwhitespace") - * makepattern(handler,"default",patterns.period) - * V("optionalwhitespace") - * makepattern(handler,"name_b",name) - )^-1 - * ( V("optionalwhitespace") - * makepattern(handler,"default",patterns.period) - * V("optionalwhitespace") - * makepattern(handler,"name_c",name) - )^0, - - pattern = - V("longcomment") - + V("comment") - + V("longstring") - + V("dstring") - + V("sstring") - + V("name") - + makepattern(handler,"boundary",boundary) - + makepattern(handler,"special",special) - - + V("space") - + V("line") - + V("default"), - - visualizer = - V("pattern")^1 -} ) - -local parser = P(grammar) - -visualizers.register("lua", { parser = parser, handler = handler, grammar = grammar } ) +if not modules then modules = { } end modules ['buff-imp-lua'] = { + version = 1.001, + comment = "companion to buff-imp-lua.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- borrowed from scite +-- +-- depricated: +-- +-- gcinfo unpack getfenv setfenv loadlib +-- table.maxn table.getn table.setn +-- math.log10 math.mod math.modf math.fmod + +local format, tohash = string.format, table.tohash +local P, S, V, patterns = lpeg.P, lpeg.S, lpeg.V, lpeg.patterns +local C, Cs, Cg, Cb, Cmt, Carg = lpeg.C, lpeg.Cs, lpeg.Cg, lpeg.Cb, lpeg.Cmt, lpeg.Carg + +local core = tohash { + "and", "break", "do", "else", "elseif", "end", "false", "for", "function", + "if", "in", "local", "nil", "not", "or", "repeat", "return", "then", + "true", "until", "while" +} + +local base = tohash { + "assert", "collectgarbage", "dofile", "error", "loadfile", + "loadstring", "print", "rawget", "rawset", "require", "tonumber", + "tostring", "type", "_G", "getmetatable", "ipairs", "next", "pairs", + "pcall", "rawequal", "setmetatable", "xpcall", "module", "select", +} + +local libraries = { + coroutine = tohash { + "create", "resume", "status", "wrap", "yield", "running", + }, + package = tohash{ + "cpath", "loaded", "loadlib", "path", "config", "preload", "seeall", + }, + io = tohash{ + "close", "flush", "input", "lines", "open", "output", "read", "tmpfile", + "type", "write", "stdin", "stdout", "stderr", "popen", + }, + math = tohash{ + "abs", "acos", "asin", "atan", "atan2", "ceil", "cos", "deg", "exp", + "floor ", "ldexp", "log", "max", "min", "pi", "pow", "rad", "random", + "randomseed", "sin", "sqrt", "tan", "cosh", "sinh", "tanh", "huge", + }, + string = tohash{ + "byte", "char", "dump", "find", "len", "lower", "rep", "sub", "upper", + "format", "gfind", "gsub", "gmatch", "match", "reverse", + }, + table = tohash{ + "concat", "foreach", "foreachi", "sort", "insert", "remove", "pack", + "unpack", + }, + os = tohash{ + "clock", "date", "difftime", "execute", "exit", "getenv", "remove", + "rename", "setlocale", "time", "tmpname", + }, + lpeg = tohash{ + "print", "match", "locale", "type", "version", "setmaxstack", + "P", "R", "S", "C", "V", "Cs", "Ct", "Cs", "Cp", "Carg", + "Cg", "Cb", "Cmt", "Cf", "B", + }, + -- bit + -- debug +} + +local context = context +local verbatim = context.verbatim +local makepattern = visualizers.makepattern + +local LuaSnippet = context.LuaSnippet +local startLuaSnippet = context.startLuaSnippet +local stopLuaSnippet = context.stopLuaSnippet + +local LuaSnippetBoundary = verbatim.LuaSnippetBoundary +local LuaSnippetQuote = verbatim.LuaSnippetQuote +local LuaSnippetString = verbatim.LuaSnippetString +local LuaSnippetSpecial = verbatim.LuaSnippetSpecial +local LuaSnippetComment = verbatim.LuaSnippetComment +local LuaSnippetNameCore = verbatim.LuaSnippetNameCore +local LuaSnippetNameBase = verbatim.LuaSnippetNameBase +local LuaSnippetNameLibraries = verbatim.LuaSnippetNameLibraries +local LuaSnippetName = verbatim.LuaSnippetName + +local namespace + +local function visualizename_a(s) + if core[s] then + namespace = nil + LuaSnippetNameCore(s) + elseif base[s] then + namespace = nil + LuaSnippetNameBase(s) + else + namespace = libraries[s] + if namespace then + LuaSnippetNameLibraries(s) + else + LuaSnippetName(s) + end + end +end + +local function visualizename_b(s) + if namespace and namespace[s] then + namespace = nil + LuaSnippetNameLibraries(s) + else + LuaSnippetName(s) + end +end + +local function visualizename_c(s) + LuaSnippetName(s) +end + +local handler = visualizers.newhandler { + startinline = function() LuaSnippet(false,"{") end, + stopinline = function() context("}") end, + startdisplay = function() startLuaSnippet() end, + stopdisplay = function() stopLuaSnippet() end , + boundary = function(s) LuaSnippetBoundary(s) end, + special = function(s) LuaSnippetSpecial(s) end, + comment = function(s) LuaSnippetComment(s) end, + quote = function(s) LuaSnippetQuote(s) end, + string = function(s) LuaSnippetString(s) end, + period = function(s) verbatim(s) end, + name_a = visualizename_a, + name_b = visualizename_b, + name_c = visualizename_c, +} + +local comment = P("--") +local name = (patterns.letter + patterns.underscore) + * (patterns.letter + patterns.underscore + patterns.digit)^0 +local boundary = S('()[]{}') +local special = S("-+/*^%=#") + P("..") + +-- The following longstring parser is taken from Roberto's documentation +-- that can be found at http://www.inf.puc-rio.br/~roberto/lpeg/lpeg.html. + +local equals = P("=")^0 +local open = P("[") * Cg(equals, "init") * P("[") * P("\n")^-1 -- maybe better: patterns.newline^-1 +local close = P("]") * C(equals) * P("]") +local closeeq = Cmt(close * Cb("init"), function(s,i,a,b) return a == b end) -- wrong return value +local longstring = open * Cs((P(1) - closeeq)^0) * close * Carg(1) + +local function long(content,equals,settings) + handler.boundary(format("[%s[",equals or "")) + visualizers.write(content,settings) -- unhandled + handler.boundary(format("]%s]",equals or "")) +end + +local grammar = visualizers.newgrammar("default", { "visualizer", + sstring = + makepattern(handler,"quote",patterns.dquote) + * (V("whitespace") + makepattern(handler,"string",1-patterns.dquote))^0 -- patterns.nodquote + * makepattern(handler,"quote",patterns.dquote), + dstring = + makepattern(handler,"quote",patterns.squote) + * (V("whitespace") + makepattern(handler,"string",1-patterns.squote))^0 -- patterns.nosquote + * makepattern(handler,"quote",patterns.squote), + longstring = + longstring / long, + comment = + makepattern(handler,"comment",comment) + * (V("space") + V("content"))^0, + longcomment = + makepattern(handler,"comment",comment) + * longstring / long, + name = + makepattern(handler,"name_a",name) + * ( V("optionalwhitespace") + * makepattern(handler,"default",patterns.period) + * V("optionalwhitespace") + * makepattern(handler,"name_b",name) + )^-1 + * ( V("optionalwhitespace") + * makepattern(handler,"default",patterns.period) + * V("optionalwhitespace") + * makepattern(handler,"name_c",name) + )^0, + + pattern = + V("longcomment") + + V("comment") + + V("longstring") + + V("dstring") + + V("sstring") + + V("name") + + makepattern(handler,"boundary",boundary) + + makepattern(handler,"special",special) + + + V("space") + + V("line") + + V("default"), + + visualizer = + V("pattern")^1 +} ) + +local parser = P(grammar) + +visualizers.register("lua", { parser = parser, handler = handler, grammar = grammar } ) diff --git a/tex/context/base/buff-imp-mp.lua b/tex/context/base/buff-imp-mp.lua index 34e3459c6..a535b8c80 100644 --- a/tex/context/base/buff-imp-mp.lua +++ b/tex/context/base/buff-imp-mp.lua @@ -1,117 +1,117 @@ -if not modules then modules = { } end modules ['buff-imp-mp'] = { - version = 1.001, - comment = "companion to buff-imp-mp.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- Now that we also use lpeg lexers in scite, we can share the keywords --- so we have moved the keyword lists to mult-mps.lua. Don't confuse the --- scite lexers with the ones we use here. Of course all those lexers --- boil down to doing similar things, but here we need more control over --- the rendering and have a different way of nesting. It is no coincidence --- that the coloring looks similar: both are derived from earlier lexing (in --- texedit, mkii and the c++ scite lexer). --- --- In the meantime we have lpeg based lexers in scite! And, as all this --- lexing boils down to the same principles (associating symbolic rendering --- with ranges of characters) and as the scite lexers do nesting, it makes --- sense at some point to share code. However, keep in mind that the pretty --- printers are also supposed to support invalid code (for educational --- purposes). The scite lexers are more recent and there a different color --- scheme is used. So, we might move away from the traditional coloring. - -local P, S, V, patterns = lpeg.P, lpeg.S, lpeg.V, lpeg.patterns - -local context = context -local verbatim = context.verbatim -local makepattern = visualizers.makepattern - -local MetapostSnippet = context.MetapostSnippet -local startMetapostSnippet = context.startMetapostSnippet -local stopMetapostSnippet = context.stopMetapostSnippet - -local MetapostSnippetConstructor = verbatim.MetapostSnippetConstructor -local MetapostSnippetBoundary = verbatim.MetapostSnippetBoundary -local MetapostSnippetSpecial = verbatim.MetapostSnippetSpecial -local MetapostSnippetComment = verbatim.MetapostSnippetComment -local MetapostSnippetNamePrimitive = verbatim.MetapostSnippetNamePrimitive -local MetapostSnippetNamePlain = verbatim.MetapostSnippetNamePlain -local MetapostSnippetNameMetafun = verbatim.MetapostSnippetNameMetafun -local MetapostSnippetName = verbatim.MetapostSnippetName - -local primitives, plain, metafun - -local function initialize() - local mps = dofile(resolvers.findfile("mult-mps.lua","tex")) or { - primitives = { }, - plain = { }, - metafun = { }, - } - primitives = table.tohash(mps.primitives) - plain = table.tohash(mps.plain) - metafun = table.tohash(mps.metafun) -end - -local function visualizename(s) - if not primitives then - initialize() - end - if primitives[s] then - MetapostSnippetNamePrimitive(s) - elseif plain[s] then - MetapostSnippetNamePlain(s) - elseif metafun[s] then - MetapostSnippetNameMetafun(s) - else - MetapostSnippetName(s) - end -end - -local handler = visualizers.newhandler { - startinline = function() MetapostSnippet(false,"{") end, - stopinline = function() context("}") end, - startdisplay = function() startMetapostSnippet() end, - stopdisplay = function() stopMetapostSnippet() end , - constructor = function(s) MetapostSnippetConstructor(s) end, - boundary = function(s) MetapostSnippetBoundary(s) end, - special = function(s) MetapostSnippetSpecial(s) end, - comment = function(s) MetapostSnippetComment(s) end, - string = function(s) MetapostSnippetString(s) end, - quote = function(s) MetapostSnippetQuote(s) end, - name = visualizename, -} - -local comment = S("%") -local name = (patterns.letter + S("_"))^1 -local constructor = S("$@#") -local boundary = S('()[]:=<>;"') -local special = S("-+/*|`!?^&%.,") - -local grammar = visualizers.newgrammar("default", { "visualizer", - - comment = makepattern(handler,"comment",comment) - * (V("space") + V("content"))^0, - dstring = makepattern(handler,"quote",patterns.dquote) - * makepattern(handler,"string",patterns.nodquote) - * makepattern(handler,"quote",patterns.dquote), - name = makepattern(handler,"name",name), - constructor = makepattern(handler,"constructor",constructor), - boundary = makepattern(handler,"boundary",boundary), - special = makepattern(handler,"special",special), - - pattern = - V("comment") + V("dstring") + V("name") + V("constructor") + V("boundary") + V("special") - + V("newline") * V("emptyline")^0 * V("beginline") - + V("space") - + V("default"), - - visualizer = - V("pattern")^1 - -} ) - -local parser = P(grammar) - -visualizers.register("mp", { parser = parser, handler = handler, grammar = grammar } ) +if not modules then modules = { } end modules ['buff-imp-mp'] = { + version = 1.001, + comment = "companion to buff-imp-mp.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Now that we also use lpeg lexers in scite, we can share the keywords +-- so we have moved the keyword lists to mult-mps.lua. Don't confuse the +-- scite lexers with the ones we use here. Of course all those lexers +-- boil down to doing similar things, but here we need more control over +-- the rendering and have a different way of nesting. It is no coincidence +-- that the coloring looks similar: both are derived from earlier lexing (in +-- texedit, mkii and the c++ scite lexer). +-- +-- In the meantime we have lpeg based lexers in scite! And, as all this +-- lexing boils down to the same principles (associating symbolic rendering +-- with ranges of characters) and as the scite lexers do nesting, it makes +-- sense at some point to share code. However, keep in mind that the pretty +-- printers are also supposed to support invalid code (for educational +-- purposes). The scite lexers are more recent and there a different color +-- scheme is used. So, we might move away from the traditional coloring. + +local P, S, V, patterns = lpeg.P, lpeg.S, lpeg.V, lpeg.patterns + +local context = context +local verbatim = context.verbatim +local makepattern = visualizers.makepattern + +local MetapostSnippet = context.MetapostSnippet +local startMetapostSnippet = context.startMetapostSnippet +local stopMetapostSnippet = context.stopMetapostSnippet + +local MetapostSnippetConstructor = verbatim.MetapostSnippetConstructor +local MetapostSnippetBoundary = verbatim.MetapostSnippetBoundary +local MetapostSnippetSpecial = verbatim.MetapostSnippetSpecial +local MetapostSnippetComment = verbatim.MetapostSnippetComment +local MetapostSnippetNamePrimitive = verbatim.MetapostSnippetNamePrimitive +local MetapostSnippetNamePlain = verbatim.MetapostSnippetNamePlain +local MetapostSnippetNameMetafun = verbatim.MetapostSnippetNameMetafun +local MetapostSnippetName = verbatim.MetapostSnippetName + +local primitives, plain, metafun + +local function initialize() + local mps = dofile(resolvers.findfile("mult-mps.lua","tex")) or { + primitives = { }, + plain = { }, + metafun = { }, + } + primitives = table.tohash(mps.primitives) + plain = table.tohash(mps.plain) + metafun = table.tohash(mps.metafun) +end + +local function visualizename(s) + if not primitives then + initialize() + end + if primitives[s] then + MetapostSnippetNamePrimitive(s) + elseif plain[s] then + MetapostSnippetNamePlain(s) + elseif metafun[s] then + MetapostSnippetNameMetafun(s) + else + MetapostSnippetName(s) + end +end + +local handler = visualizers.newhandler { + startinline = function() MetapostSnippet(false,"{") end, + stopinline = function() context("}") end, + startdisplay = function() startMetapostSnippet() end, + stopdisplay = function() stopMetapostSnippet() end , + constructor = function(s) MetapostSnippetConstructor(s) end, + boundary = function(s) MetapostSnippetBoundary(s) end, + special = function(s) MetapostSnippetSpecial(s) end, + comment = function(s) MetapostSnippetComment(s) end, + string = function(s) MetapostSnippetString(s) end, + quote = function(s) MetapostSnippetQuote(s) end, + name = visualizename, +} + +local comment = S("%") +local name = (patterns.letter + S("_"))^1 +local constructor = S("$@#") +local boundary = S('()[]:=<>;"') +local special = S("-+/*|`!?^&%.,") + +local grammar = visualizers.newgrammar("default", { "visualizer", + + comment = makepattern(handler,"comment",comment) + * (V("space") + V("content"))^0, + dstring = makepattern(handler,"quote",patterns.dquote) + * makepattern(handler,"string",patterns.nodquote) + * makepattern(handler,"quote",patterns.dquote), + name = makepattern(handler,"name",name), + constructor = makepattern(handler,"constructor",constructor), + boundary = makepattern(handler,"boundary",boundary), + special = makepattern(handler,"special",special), + + pattern = + V("comment") + V("dstring") + V("name") + V("constructor") + V("boundary") + V("special") + + V("newline") * V("emptyline")^0 * V("beginline") + + V("space") + + V("default"), + + visualizer = + V("pattern")^1 + +} ) + +local parser = P(grammar) + +visualizers.register("mp", { parser = parser, handler = handler, grammar = grammar } ) diff --git a/tex/context/base/buff-imp-nested.lua b/tex/context/base/buff-imp-nested.lua index 019cd996d..16b8ac67a 100644 --- a/tex/context/base/buff-imp-nested.lua +++ b/tex/context/base/buff-imp-nested.lua @@ -1,80 +1,80 @@ -if not modules then modules = { } end modules ['buff-imp-nested'] = { - version = 1.001, - comment = "companion to buff-imp-nested.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local lpegmatch, patterns = lpeg.match, lpeg.patterns -local P, V, Carg = lpeg.P, lpeg.V, lpeg.Carg - -local context = context -local verbatim = context.verbatim -local variables = interfaces.variables - -local makepattern = visualizers.makepattern -local getvisualizer = visualizers.getvisualizer - -local nested = nil - -local donestedtypingstart = context.donestedtypingstart -local donestedtypingstop = context.donestedtypingstop - -local v_none = variables.none -local v_slanted = variables.slanted - -local handler = visualizers.newhandler { - initialize = function(settings) - local option = settings and settings.option - if not option or option == "" then - nested = nil - elseif option == v_slanted then - nested = nil - elseif option == v_none then - nested = nil - else - nested = getvisualizer(option,"direct") - end - end, - open = function() - donestedtypingstart() - end, - close = function() - donestedtypingstop() - end, - content = function(s) - if nested then - nested(s) - else - verbatim(s) - end - end, -} - -local open = P("<<") -local close = P(">>") -local rest = (1 - open - close - patterns.space - patterns.newline)^1 - -local grammar = visualizers.newgrammar("default", { - - initialize = patterns.beginofstring * Carg(1) / handler.initialize, - - open = makepattern(handler,"open",open), - close = makepattern(handler,"close",close), - rest = makepattern(handler,"content",rest), - - nested = V("open") * (V("pattern")^0) * V("close"), - pattern = V("line") + V("space") + V("nested") + V("rest"), - - visualizer = V("initialize") * (V("pattern")^1) - -} ) - -local parser = P(grammar) - -visualizers.register("nested", { parser = parser, handler = handler, grammar = grammar } ) - --- lpeg.match(parser,[[<>tf<>tf>>]]) context.par() --- lpeg.match(parser,[[<>sl>>tf>>]]) context.par() --- lpeg.match(parser,[[sl<>tf>>sl]]) context.par() +if not modules then modules = { } end modules ['buff-imp-nested'] = { + version = 1.001, + comment = "companion to buff-imp-nested.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local lpegmatch, patterns = lpeg.match, lpeg.patterns +local P, V, Carg = lpeg.P, lpeg.V, lpeg.Carg + +local context = context +local verbatim = context.verbatim +local variables = interfaces.variables + +local makepattern = visualizers.makepattern +local getvisualizer = visualizers.getvisualizer + +local nested = nil + +local donestedtypingstart = context.donestedtypingstart +local donestedtypingstop = context.donestedtypingstop + +local v_none = variables.none +local v_slanted = variables.slanted + +local handler = visualizers.newhandler { + initialize = function(settings) + local option = settings and settings.option + if not option or option == "" then + nested = nil + elseif option == v_slanted then + nested = nil + elseif option == v_none then + nested = nil + else + nested = getvisualizer(option,"direct") + end + end, + open = function() + donestedtypingstart() + end, + close = function() + donestedtypingstop() + end, + content = function(s) + if nested then + nested(s) + else + verbatim(s) + end + end, +} + +local open = P("<<") +local close = P(">>") +local rest = (1 - open - close - patterns.space - patterns.newline)^1 + +local grammar = visualizers.newgrammar("default", { + + initialize = patterns.beginofstring * Carg(1) / handler.initialize, + + open = makepattern(handler,"open",open), + close = makepattern(handler,"close",close), + rest = makepattern(handler,"content",rest), + + nested = V("open") * (V("pattern")^0) * V("close"), + pattern = V("line") + V("space") + V("nested") + V("rest"), + + visualizer = V("initialize") * (V("pattern")^1) + +} ) + +local parser = P(grammar) + +visualizers.register("nested", { parser = parser, handler = handler, grammar = grammar } ) + +-- lpeg.match(parser,[[<>tf<>tf>>]]) context.par() +-- lpeg.match(parser,[[<>sl>>tf>>]]) context.par() +-- lpeg.match(parser,[[sl<>tf>>sl]]) context.par() diff --git a/tex/context/base/buff-imp-parsed-xml.lua b/tex/context/base/buff-imp-parsed-xml.lua index 22611ac8a..ef80fccb4 100644 --- a/tex/context/base/buff-imp-parsed-xml.lua +++ b/tex/context/base/buff-imp-parsed-xml.lua @@ -1,101 +1,101 @@ -if not modules then modules = { } end modules ['buff-imp-parsed-xml'] = { - version = 1.001, - comment = "companion to buff-imp-parsed-xml.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format - -local context = context -local verbatim = context.verbatim - -local write = visualizers.write -local writespace = visualizers.writespace -local writeargument = visualizers.writeargument - -local ParsedXmlSnippetKey = context.ParsedXmlSnippetKey -local ParsedXmlSnippetValue = context.ParsedXmlSnippetValue - -local ParsedXmlSnippetElement = verbatim.ParsedXmlSnippetElement -local ParsedXmlSnippetInstruction = verbatim.ParsedXmlSnippetInstruction -local ParsedXmlSnippetComment = verbatim.ParsedXmlSnippetComment -local ParsedXmlSnippetCdata = verbatim.ParsedXmlSnippetCdata -local ParsedXmlSnippetDoctype = verbatim.ParsedXmlSnippetDoctype - -local startParsedXmlSnippet = context.startParsedXmlSnippet -local stopParsedXmlSnippet = context.stopParsedXmlSnippet - -local parsedxmlhandler = xml.newhandlers { -- todo: treat spaces and tabs - name = "parsedxml", - handle = function(...) - print("error:",...) -- we need a handler as fallback, even if not used - end, - functions = { - ["@el@"] = function(e,handler) - local at = e.at - if at and next(at) then - ParsedXmlSnippetElement(format("<%s",e.tg)) - for k, v in next, at do - writespace() - ParsedXmlSnippetKey() - writeargument(k) - verbatim("=") - ParsedXmlSnippetValue() - writeargument(format("%q",k)) - end - ParsedXmlSnippetElement(">") - else - ParsedXmlSnippetElement(format("<%s>",e.tg)) - end - handler.serialize(e.dt,handler) - ParsedXmlSnippetElement(format("",e.tg)) - end, - ["@pi@"] = function(e,handler) - ParsedXmlSnippetInstruction("") - end , - ["@cm@"] = function(e,handler) - ParsedXmlSnippetComment("") - end, - ["@cd@"] = function(e,handler) - ParsedXmlSnippetCdata("") - end, - ["@dt@"] = function(e,handler) - ParsedXmlSnippetDoctype("") - end, - ["@tx@"] = function(s,handler) - write(s) - end, - } -} - -local function parsedxml(root,pattern) - if root then - if pattern then - root = xml.filter(root,pattern) - end - if root then - context.startParsedXmlSnippet() - xml.serialize(root,parsedxmlhandler) - context.stopParsedXmlSnippet() - end - end -end - -local function parser(str,settings) - parsedxml(xml.convert(str),settings and settings.pattern) -end - -visualizers.parsedxml = parsedxml -- for use at the lua end (maybe namespace needed) - -visualizers.register("parsed-xml", { parser = parser } ) - +if not modules then modules = { } end modules ['buff-imp-parsed-xml'] = { + version = 1.001, + comment = "companion to buff-imp-parsed-xml.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format + +local context = context +local verbatim = context.verbatim + +local write = visualizers.write +local writespace = visualizers.writespace +local writeargument = visualizers.writeargument + +local ParsedXmlSnippetKey = context.ParsedXmlSnippetKey +local ParsedXmlSnippetValue = context.ParsedXmlSnippetValue + +local ParsedXmlSnippetElement = verbatim.ParsedXmlSnippetElement +local ParsedXmlSnippetInstruction = verbatim.ParsedXmlSnippetInstruction +local ParsedXmlSnippetComment = verbatim.ParsedXmlSnippetComment +local ParsedXmlSnippetCdata = verbatim.ParsedXmlSnippetCdata +local ParsedXmlSnippetDoctype = verbatim.ParsedXmlSnippetDoctype + +local startParsedXmlSnippet = context.startParsedXmlSnippet +local stopParsedXmlSnippet = context.stopParsedXmlSnippet + +local parsedxmlhandler = xml.newhandlers { -- todo: treat spaces and tabs + name = "parsedxml", + handle = function(...) + print("error:",...) -- we need a handler as fallback, even if not used + end, + functions = { + ["@el@"] = function(e,handler) + local at = e.at + if at and next(at) then + ParsedXmlSnippetElement(format("<%s",e.tg)) + for k, v in next, at do + writespace() + ParsedXmlSnippetKey() + writeargument(k) + verbatim("=") + ParsedXmlSnippetValue() + writeargument(format("%q",k)) + end + ParsedXmlSnippetElement(">") + else + ParsedXmlSnippetElement(format("<%s>",e.tg)) + end + handler.serialize(e.dt,handler) + ParsedXmlSnippetElement(format("",e.tg)) + end, + ["@pi@"] = function(e,handler) + ParsedXmlSnippetInstruction("") + end , + ["@cm@"] = function(e,handler) + ParsedXmlSnippetComment("") + end, + ["@cd@"] = function(e,handler) + ParsedXmlSnippetCdata("") + end, + ["@dt@"] = function(e,handler) + ParsedXmlSnippetDoctype("") + end, + ["@tx@"] = function(s,handler) + write(s) + end, + } +} + +local function parsedxml(root,pattern) + if root then + if pattern then + root = xml.filter(root,pattern) + end + if root then + context.startParsedXmlSnippet() + xml.serialize(root,parsedxmlhandler) + context.stopParsedXmlSnippet() + end + end +end + +local function parser(str,settings) + parsedxml(xml.convert(str),settings and settings.pattern) +end + +visualizers.parsedxml = parsedxml -- for use at the lua end (maybe namespace needed) + +visualizers.register("parsed-xml", { parser = parser } ) + diff --git a/tex/context/base/buff-imp-tex.lua b/tex/context/base/buff-imp-tex.lua index 29fd8c0c5..cf7ea9796 100644 --- a/tex/context/base/buff-imp-tex.lua +++ b/tex/context/base/buff-imp-tex.lua @@ -1,130 +1,130 @@ -if not modules then modules = { } end modules ['buff-imp-tex'] = { - version = 1.001, - comment = "companion to v-tex.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local P, S, V, patterns = lpeg.P, lpeg.S, lpeg.V, lpeg.patterns - -local context = context -local verbatim = context.verbatim -local makepattern = visualizers.makepattern -local makenested = visualizers.makenested -local getvisualizer = visualizers.getvisualizer - -local TexSnippet = context.TexSnippet -local startTexSnippet = context.startTexSnippet -local stopTexSnippet = context.stopTexSnippet - -local TexSnippetName = verbatim.TexSnippetName -local TexSnippetGroup = verbatim.TexSnippetGroup -local TexSnippetBoundary = verbatim.TexSnippetBoundary -local TexSnippetSpecial = verbatim.TexSnippetSpecial -local TexSnippetComment = verbatim.TexSnippetComment - -local handler = visualizers.newhandler { - startinline = function() TexSnippet(false,"{") end, - stopinline = function() context("}") end, - startdisplay = function() startTexSnippet() end, - stopdisplay = function() stopTexSnippet() end , - name = function(s) TexSnippetName(s) end, - group = function(s) TexSnippetGroup(s) end, - boundary = function(s) TexSnippetBoundary(s) end, - special = function(s) TexSnippetSpecial(s) end, - comment = function(s) TexSnippetComment(s) end, -} - --- todo: unicode letters in control sequences (slow as we need to test the nature) - -local comment = S("%") -local name = P("\\") * (patterns.letter + S("@!?"))^1 -local escape = P("\\") * (patterns.anything - patterns.newline)^-1 -- else we get \n -local group = S("${}") -local boundary = S('[]()<>#="') -local special = S("/^_-&+'`|") - -local p_comment = makepattern(handler,"comment",comment) - * (V("space") + V("content"))^0 -local p_name = makepattern(handler,"name",name) -local p_escape = makepattern(handler,"name",escape) -local p_group = makepattern(handler,"group",group) -local p_boundary = makepattern(handler,"boundary",boundary) -local p_special = makepattern(handler,"special",special) -local p_somespace = V("newline") * V("emptyline")^0 * V("beginline") - + V("space") - ---~ local pattern = visualizers.pattern - -local grammar = visualizers.newgrammar("default", { "visualizer", - - comment = p_comment, - name = p_name, - escape = p_escape, - group = p_group, - boundary = p_boundary, - special = p_special, - somespace = p_somespace, - - pattern = V("comment") - + V("name") + V("escape") + V("group") + V("boundary") + V("special") - + V("newline") * V("emptyline")^0 * V("beginline") - + V("space") - + V("default"), - - visualizer = V("pattern")^1 - -} ) - -local parser = P(grammar) - -visualizers.register("tex", { parser = parser, handler = handler, grammar = grammar } ) - -local function makecommand(handler,how,start,left,right) - local c, l, r, f = P(start), P(left), P(right), how - local n = ( P { l * ((1 - (l + r)) + V(1))^0 * r } + P(1-r) )^0 - if type(how) == "string" then - f = function(s) getvisualizer(how,"direct")(s) end - end - return makepattern(handler,"name",c) - * V("somespace")^0 - * makepattern(handler,"group",l) - * (n/f) - * makepattern(handler,"group",r) -end - -local grammar = visualizers.newgrammar("default", { "visualizer", - - comment = p_comment, - name = p_name, - escape = p_escape, - group = p_group, - boundary = p_boundary, - special = p_special, - somespace = p_somespace, - - mpcode = makenested(handler,"mp","\\startMPcode","\\stopMPcode") - + makenested(handler,"mp","\\startMPgraphic","\\stopMPgraphic") - + makenested(handler,"mp","\\startuseMPgraphic","\\stopuseMPgraphic") - + makenested(handler,"mp","\\startreusableMPgraphic","\\stopreusableMPgraphic") - + makenested(handler,"mp","\\startuniqueMPgraphic","\\stopuniqueMPgraphic") - + makenested(handler,"mp","\\startMPpage","\\stopMPpage"), - - luacode = makenested (handler,"lua","\\startluacode","\\stopluacode") - + makecommand(handler,"lua","\\ctxlua","{","}"), - - pattern = V("comment") - + V("mpcode") + V("luacode") - + V("name") + V("escape") + V("group") + V("boundary") + V("special") - + V("newline") * V("emptyline")^0 * V("beginline") - + V("space") - + V("default"), - - visualizer = V("pattern")^1 - -} ) - -local parser = P(grammar) - -visualizers.register("context", { parser = parser, handler = handler, grammar = grammar } ) +if not modules then modules = { } end modules ['buff-imp-tex'] = { + version = 1.001, + comment = "companion to v-tex.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local P, S, V, patterns = lpeg.P, lpeg.S, lpeg.V, lpeg.patterns + +local context = context +local verbatim = context.verbatim +local makepattern = visualizers.makepattern +local makenested = visualizers.makenested +local getvisualizer = visualizers.getvisualizer + +local TexSnippet = context.TexSnippet +local startTexSnippet = context.startTexSnippet +local stopTexSnippet = context.stopTexSnippet + +local TexSnippetName = verbatim.TexSnippetName +local TexSnippetGroup = verbatim.TexSnippetGroup +local TexSnippetBoundary = verbatim.TexSnippetBoundary +local TexSnippetSpecial = verbatim.TexSnippetSpecial +local TexSnippetComment = verbatim.TexSnippetComment + +local handler = visualizers.newhandler { + startinline = function() TexSnippet(false,"{") end, + stopinline = function() context("}") end, + startdisplay = function() startTexSnippet() end, + stopdisplay = function() stopTexSnippet() end , + name = function(s) TexSnippetName(s) end, + group = function(s) TexSnippetGroup(s) end, + boundary = function(s) TexSnippetBoundary(s) end, + special = function(s) TexSnippetSpecial(s) end, + comment = function(s) TexSnippetComment(s) end, +} + +-- todo: unicode letters in control sequences (slow as we need to test the nature) + +local comment = S("%") +local name = P("\\") * (patterns.letter + S("@!?"))^1 +local escape = P("\\") * (patterns.anything - patterns.newline)^-1 -- else we get \n +local group = S("${}") +local boundary = S('[]()<>#="') +local special = S("/^_-&+'`|") + +local p_comment = makepattern(handler,"comment",comment) + * (V("space") + V("content"))^0 +local p_name = makepattern(handler,"name",name) +local p_escape = makepattern(handler,"name",escape) +local p_group = makepattern(handler,"group",group) +local p_boundary = makepattern(handler,"boundary",boundary) +local p_special = makepattern(handler,"special",special) +local p_somespace = V("newline") * V("emptyline")^0 * V("beginline") + + V("space") + +--~ local pattern = visualizers.pattern + +local grammar = visualizers.newgrammar("default", { "visualizer", + + comment = p_comment, + name = p_name, + escape = p_escape, + group = p_group, + boundary = p_boundary, + special = p_special, + somespace = p_somespace, + + pattern = V("comment") + + V("name") + V("escape") + V("group") + V("boundary") + V("special") + + V("newline") * V("emptyline")^0 * V("beginline") + + V("space") + + V("default"), + + visualizer = V("pattern")^1 + +} ) + +local parser = P(grammar) + +visualizers.register("tex", { parser = parser, handler = handler, grammar = grammar } ) + +local function makecommand(handler,how,start,left,right) + local c, l, r, f = P(start), P(left), P(right), how + local n = ( P { l * ((1 - (l + r)) + V(1))^0 * r } + P(1-r) )^0 + if type(how) == "string" then + f = function(s) getvisualizer(how,"direct")(s) end + end + return makepattern(handler,"name",c) + * V("somespace")^0 + * makepattern(handler,"group",l) + * (n/f) + * makepattern(handler,"group",r) +end + +local grammar = visualizers.newgrammar("default", { "visualizer", + + comment = p_comment, + name = p_name, + escape = p_escape, + group = p_group, + boundary = p_boundary, + special = p_special, + somespace = p_somespace, + + mpcode = makenested(handler,"mp","\\startMPcode","\\stopMPcode") + + makenested(handler,"mp","\\startMPgraphic","\\stopMPgraphic") + + makenested(handler,"mp","\\startuseMPgraphic","\\stopuseMPgraphic") + + makenested(handler,"mp","\\startreusableMPgraphic","\\stopreusableMPgraphic") + + makenested(handler,"mp","\\startuniqueMPgraphic","\\stopuniqueMPgraphic") + + makenested(handler,"mp","\\startMPpage","\\stopMPpage"), + + luacode = makenested (handler,"lua","\\startluacode","\\stopluacode") + + makecommand(handler,"lua","\\ctxlua","{","}"), + + pattern = V("comment") + + V("mpcode") + V("luacode") + + V("name") + V("escape") + V("group") + V("boundary") + V("special") + + V("newline") * V("emptyline")^0 * V("beginline") + + V("space") + + V("default"), + + visualizer = V("pattern")^1 + +} ) + +local parser = P(grammar) + +visualizers.register("context", { parser = parser, handler = handler, grammar = grammar } ) diff --git a/tex/context/base/buff-imp-xml.lua b/tex/context/base/buff-imp-xml.lua index 0c48ed3b0..298a98ac0 100644 --- a/tex/context/base/buff-imp-xml.lua +++ b/tex/context/base/buff-imp-xml.lua @@ -1,133 +1,133 @@ -if not modules then modules = { } end modules ['buff-imp-xml'] = { - version = 1.001, - comment = "companion to v-xml.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local P, S, V, patterns = lpeg.P, lpeg.S, lpeg.V, lpeg.patterns - -local context = context -local verbatim = context.verbatim -local makepattern = visualizers.makepattern - -local XmlSnippet = context.XmlSnippet -local startXmlSnippet = context.startXmlSnippet -local stopXmlSnippet = context.stopXmlSnippet - -local XmlSnippetName = verbatim.XmlSnippetName -local XmlSnippetKey = verbatim.XmlSnippetKey -local XmlSnippetBoundary = verbatim.XmlSnippetBoundary -local XmlSnippetString = verbatim.XmlSnippetString -local XmlSnippetEqual = verbatim.XmlSnippetEqual -local XmlSnippetEntity = verbatim.XmlSnippetEntity -local XmlSnippetComment = verbatim.XmlSnippetComment -local XmlSnippetCdata = verbatim.XmlSnippetCdata - -local handler = visualizers.newhandler { - startinline = function() XmlSnippet(false,"{") end, - stopinline = function() context("}") end, - startdisplay = function() startXmlSnippet() end, - stopdisplay = function() stopXmlSnippet () end, - name = function(s) XmlSnippetName(s) end, - key = function(s) XmlSnippetKey(s) end, - boundary = function(s) XmlSnippetBoundary(s) end, - string = function(s) XmlSnippetString(s) end, - equal = function(s) XmlSnippetEqual(s) end, - entity = function(s) XmlSnippetEntity(s) end, - comment = function(s) XmlSnippetComment(s) end, - cdata = function(s) XmlSnippetCdata(s) end, -} - -local comment = P("--") -local name = (patterns.letter + patterns.digit + S('_-.'))^1 -local entity = P("&") * (1-P(";"))^1 * P(";") -local openbegin = P("<") -local openend = P("") + P(">") -local closeend = P(">") -local opencomment = P("") -local openinstruction = P("") -local opencdata = P("") - -local grammar = visualizers.newgrammar("default", { "visualizer", - sstring = - makepattern(handler,"string",patterns.dquote) - * (V("whitespace") + makepattern(handler,"default",1-patterns.dquote))^0 - * makepattern(handler,"string",patterns.dquote), - dstring = - makepattern(handler,"string",patterns.squote) - * (V("whitespace") + makepattern(handler,"default",1-patterns.squote))^0 - * makepattern(handler,"string",patterns.squote), - entity = - makepattern(handler,"entity",entity), - name = - makepattern(handler,"name",name) - * ( - makepattern(handler,"default",patterns.colon) - * makepattern(handler,"name",name) - )^0, - key = - makepattern(handler,"key",name) - * ( - makepattern(handler,"default",patterns.colon) - * makepattern(handler,"key",name) - )^0, - attributes = ( - V("optionalwhitespace") - * V("key") - * V("optionalwhitespace") - * makepattern(handler,"equal",patterns.equal) - * V("optionalwhitespace") - * (V("dstring") + V("sstring")) - * V("optionalwhitespace") - )^0, - open = - makepattern(handler,"boundary",openbegin) - * V("name") - * V("optionalwhitespace") - * V("attributes") - * makepattern(handler,"boundary",closebegin), - close = - makepattern(handler,"boundary",openend) - * V("name") - * V("optionalwhitespace") - * makepattern(handler,"boundary",closeend), - comment = - makepattern(handler,"boundary",opencomment) - * (V("whitespace") + makepattern(handler,"comment",(1-closecomment)^1))^0 -- slow - * makepattern(handler,"boundary",closecomment), - cdata = - makepattern(handler,"boundary",opencdata) - * (V("whitespace") + makepattern(handler,"comment",(1-closecdata)^1))^0 -- slow - * makepattern(handler,"boundary",closecdata), - instruction = - makepattern(handler,"boundary",openinstruction) - * V("name") - * V("optionalwhitespace") - * V("attributes") - * V("optionalwhitespace") - * makepattern(handler,"boundary",closeinstruction), - - pattern = - V("comment") - + V("instruction") - + V("cdata") - + V("close") - + V("open") - + V("entity") - + V("space") - + V("line") - + V("default"), - - visualizer = - V("pattern")^1 -} ) - -local parser = P(grammar) - -visualizers.register("xml", { parser = parser, handler = handler, grammar = grammar } ) +if not modules then modules = { } end modules ['buff-imp-xml'] = { + version = 1.001, + comment = "companion to v-xml.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local P, S, V, patterns = lpeg.P, lpeg.S, lpeg.V, lpeg.patterns + +local context = context +local verbatim = context.verbatim +local makepattern = visualizers.makepattern + +local XmlSnippet = context.XmlSnippet +local startXmlSnippet = context.startXmlSnippet +local stopXmlSnippet = context.stopXmlSnippet + +local XmlSnippetName = verbatim.XmlSnippetName +local XmlSnippetKey = verbatim.XmlSnippetKey +local XmlSnippetBoundary = verbatim.XmlSnippetBoundary +local XmlSnippetString = verbatim.XmlSnippetString +local XmlSnippetEqual = verbatim.XmlSnippetEqual +local XmlSnippetEntity = verbatim.XmlSnippetEntity +local XmlSnippetComment = verbatim.XmlSnippetComment +local XmlSnippetCdata = verbatim.XmlSnippetCdata + +local handler = visualizers.newhandler { + startinline = function() XmlSnippet(false,"{") end, + stopinline = function() context("}") end, + startdisplay = function() startXmlSnippet() end, + stopdisplay = function() stopXmlSnippet () end, + name = function(s) XmlSnippetName(s) end, + key = function(s) XmlSnippetKey(s) end, + boundary = function(s) XmlSnippetBoundary(s) end, + string = function(s) XmlSnippetString(s) end, + equal = function(s) XmlSnippetEqual(s) end, + entity = function(s) XmlSnippetEntity(s) end, + comment = function(s) XmlSnippetComment(s) end, + cdata = function(s) XmlSnippetCdata(s) end, +} + +local comment = P("--") +local name = (patterns.letter + patterns.digit + S('_-.'))^1 +local entity = P("&") * (1-P(";"))^1 * P(";") +local openbegin = P("<") +local openend = P("") + P(">") +local closeend = P(">") +local opencomment = P("") +local openinstruction = P("") +local opencdata = P("") + +local grammar = visualizers.newgrammar("default", { "visualizer", + sstring = + makepattern(handler,"string",patterns.dquote) + * (V("whitespace") + makepattern(handler,"default",1-patterns.dquote))^0 + * makepattern(handler,"string",patterns.dquote), + dstring = + makepattern(handler,"string",patterns.squote) + * (V("whitespace") + makepattern(handler,"default",1-patterns.squote))^0 + * makepattern(handler,"string",patterns.squote), + entity = + makepattern(handler,"entity",entity), + name = + makepattern(handler,"name",name) + * ( + makepattern(handler,"default",patterns.colon) + * makepattern(handler,"name",name) + )^0, + key = + makepattern(handler,"key",name) + * ( + makepattern(handler,"default",patterns.colon) + * makepattern(handler,"key",name) + )^0, + attributes = ( + V("optionalwhitespace") + * V("key") + * V("optionalwhitespace") + * makepattern(handler,"equal",patterns.equal) + * V("optionalwhitespace") + * (V("dstring") + V("sstring")) + * V("optionalwhitespace") + )^0, + open = + makepattern(handler,"boundary",openbegin) + * V("name") + * V("optionalwhitespace") + * V("attributes") + * makepattern(handler,"boundary",closebegin), + close = + makepattern(handler,"boundary",openend) + * V("name") + * V("optionalwhitespace") + * makepattern(handler,"boundary",closeend), + comment = + makepattern(handler,"boundary",opencomment) + * (V("whitespace") + makepattern(handler,"comment",(1-closecomment)^1))^0 -- slow + * makepattern(handler,"boundary",closecomment), + cdata = + makepattern(handler,"boundary",opencdata) + * (V("whitespace") + makepattern(handler,"comment",(1-closecdata)^1))^0 -- slow + * makepattern(handler,"boundary",closecdata), + instruction = + makepattern(handler,"boundary",openinstruction) + * V("name") + * V("optionalwhitespace") + * V("attributes") + * V("optionalwhitespace") + * makepattern(handler,"boundary",closeinstruction), + + pattern = + V("comment") + + V("instruction") + + V("cdata") + + V("close") + + V("open") + + V("entity") + + V("space") + + V("line") + + V("default"), + + visualizer = + V("pattern")^1 +} ) + +local parser = P(grammar) + +visualizers.register("xml", { parser = parser, handler = handler, grammar = grammar } ) diff --git a/tex/context/base/buff-ini.lua b/tex/context/base/buff-ini.lua index 475d23efe..358c0f2a7 100644 --- a/tex/context/base/buff-ini.lua +++ b/tex/context/base/buff-ini.lua @@ -1,366 +1,366 @@ -if not modules then modules = { } end modules ['buff-ini'] = { - version = 1.001, - comment = "companion to buff-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local trace_run = false trackers.register("buffers.run", function(v) trace_run = v end) -local trace_grab = false trackers.register("buffers.grab", function(v) trace_grab = v end) -local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end) - -local report_buffers = logs.reporter("buffers","usage") -local report_grabbing = logs.reporter("buffers","grabbing") - -local context, commands = context, commands - -local concat = table.concat -local type, next, load = type, next, load -local sub, format = string.sub, string.format -local splitlines, validstring = string.splitlines, string.valid -local P, Cs, patterns, lpegmatch = lpeg.P, lpeg.Cs, lpeg.patterns, lpeg.match - -local variables = interfaces.variables -local settings_to_array = utilities.parsers.settings_to_array -local formatters = string.formatters - -local v_yes = variables.yes - -local catcodenumbers = catcodes.numbers - -local ctxcatcodes = catcodenumbers.ctxcatcodes -local txtcatcodes = catcodenumbers.txtcatcodes - -buffers = buffers or { } -local buffers = buffers - -local cache = { } - -local function erase(name) - cache[name] = nil -end - -local function assign(name,str,catcodes) - cache[name] = { data = str, catcodes = catcodes } -end - -local function append(name,str) - local buffer = cache[name] - if buffer then - buffer.data = buffer.data .. str - else - cache[name] = { data = str } - end -end - -local function exists(name) - return cache[name] -end - -local function getcontent(name) - local buffer = name and cache[name] - return buffer and buffer.data or "" -end - -local function getlines(name) - local buffer = name and cache[name] - return buffer and splitlines(buffer.data) -end - -local function collectcontent(names,separator) -- no print - if type(names) == "string" then - names = settings_to_array(names) - end - local nnames = #names - if nnames == 0 then - return getcontent("") -- default buffer - elseif nnames == 1 then - return getcontent(names[1]) - else - local t, n = { }, 0 - for i=1,nnames do - local c = getcontent(names[i]) - if c ~= "" then - n = n + 1 - t[n] = c - end - end - return concat(t,separator or "\n") -- was \r - end -end - -local function loadcontent(names) -- no print - if type(names) == "string" then - names = settings_to_array(names) - end - local nnames = #names - local ok = false - if nnames == 0 then - ok = load(getcontent("")) -- default buffer - elseif nnames == 1 then - ok = load(getcontent(names[1])) - else - -- lua 5.2 chunked load - local i = 0 - ok = load(function() - while true do - i = i + 1 - if i > nnames then - return nil - end - local c = getcontent(names[i]) - if c == "" then - -- would trigger end of load - else - return c - end - end - end) - end - if ok then - return ok() - elseif nnames == 0 then - report_buffers("invalid lua code in default buffer") - else - report_buffers("invalid lua code in buffer %a",concat(names,",")) - end -end - - -buffers.raw = getcontent -buffers.erase = erase -buffers.assign = assign -buffers.append = append -buffers.exists = exists -buffers.getcontent = getcontent -buffers.getlines = getlines -buffers.collectcontent = collectcontent -buffers.loadcontent = loadcontent - --- the context interface - -commands.erasebuffer = erase -commands.assignbuffer = assign - -local anything = patterns.anything -local alwaysmatched = patterns.alwaysmatched - -local function countnesting(b,e) - local n - local g = P(b) / function() n = n + 1 end - + P(e) / function() n = n - 1 end - + anything - local p = alwaysmatched / function() n = 0 end - * g^0 - * alwaysmatched / function() return n end - return p -end - -local counters = { } -local nesting = 0 -local autoundent = true -local continue = false - --- Beware: the first character of bufferdata has to be discarded as it's there to --- prevent gobbling of newlines in the case of nested buffers. The last one is --- a newlinechar and is removed too. --- --- An \n is unlikely to show up as \r is the endlinechar but \n is more generic --- for us. - --- This fits the way we fetch verbatim: the indentatio before the sentinel --- determines the stripping. - --- str = [[ --- test test test test test test test --- test test test test test test test --- test test test test test test test --- --- test test test test test test test --- test test test test test test test --- test test test test test test test --- ]] - --- local function undent(str) --- local margin = match(str,"[\n\r]( +)[\n\r]*$") or "" --- local indent = #margin --- if indent > 0 then --- local lines = splitlines(str) --- local ok = true --- local pattern = "^" .. margin --- for i=1,#lines do --- local l = lines[i] --- if find(l,pattern) then --- lines[i] = sub(l,indent+1) --- else --- ok = false --- break --- end --- end --- if ok then --- return concat(lines,"\n") --- end --- end --- return str --- end - --- how about tabs - -local getmargin = (Cs(P(" ")^1)*P(-1)+1)^1 -local eol = patterns.eol -local whatever = (P(1)-eol)^0 * eol^1 - -local strippers = { } - -local function undent(str) -- new version, needs testing - local margin = lpegmatch(getmargin,str) - if type(margin) ~= "string" then - return str - end - local indent = #margin - if indent == 0 then - return str - end - local stripper = strippers[indent] - if not stripper then - stripper = Cs((P(margin)/"" * whatever + eol^1)^1) - strippers[indent] = stripper - end - return lpegmatch(stripper,str) or str -end - -function commands.grabbuffer(name,begintag,endtag,bufferdata,catcodes) -- maybe move \\ to call - local dn = getcontent(name) - if dn == "" then - nesting = 0 - continue = false - end - if trace_grab then - if #bufferdata > 30 then - report_grabbing("%s => |%s..%s|",name,sub(bufferdata,1,10),sub(bufferdata,-10,#bufferdata)) - else - report_grabbing("%s => |%s|",name,bufferdata) - end - end - local counter = counters[begintag] - if not counter then - counter = countnesting(begintag,endtag) - counters[begintag] = counter - end - nesting = nesting + lpegmatch(counter,bufferdata) - local more = nesting > 0 - if more then - dn = dn .. sub(bufferdata,2,-1) .. endtag - nesting = nesting - 1 - continue = true - else - if continue then - dn = dn .. sub(bufferdata,2,-2) -- no \r, \n is more generic - elseif dn == "" then - dn = sub(bufferdata,2,-2) - else - dn = dn .. "\n" .. sub(bufferdata,2,-2) -- no \r, \n is more generic - end - local last = sub(dn,-1) - if last == "\n" or last == "\r" then -- \n is unlikely as \r is the endlinechar - dn = sub(dn,1,-2) - end - if autoundent then - dn = undent(dn) - end - end - assign(name,dn,catcodes) - commands.doifelse(more) -end - --- The optional prefix hack is there for the typesetbuffer feature and --- in mkii we needed that (this hidden feature is used in a manual). - -local function prepared(name,list,prefix) -- list is optional - if not list or list == "" then - list = name - end - if not name or name == "" then - name = list - end - local content = collectcontent(list,nil) or "" - if content == "" then - content = "empty buffer" - end - if prefix then - local name = file.addsuffix(name,"tmp") - return tex.jobname .. "-" .. name, content - else - return name, content - end -end - -local capsule = "\\starttext\n%s\n\\stoptext\n" -local command = "context %s" - -function commands.runbuffer(name,list,encapsulate) - local name, content = prepared(name,list) - if encapsulate then - content = format(capsule,content) - end - local data = io.loaddata(name) - if data ~= content then - if trace_run then - report_buffers("changes in %a, processing forced",name) - end - io.savedata(name,content) - os.execute(format(command,name)) - elseif trace_run then - report_buffers("no changes in %a, not processed",name) - end -end - -function commands.savebuffer(list,name,prefix) -- name is optional - local name, content = prepared(name,list,prefix==v_yes) - io.savedata(name,content) -end - -function commands.getbuffer(name) - local str = getcontent(name) - if str ~= "" then - context.viafile(str,formatters["buffer.%s"](validstring(name,"noname"))) - end -end - -function commands.getbuffermkvi(name) -- rather direct ! - context.viafile(resolvers.macros.preprocessed(getcontent(name)),formatters["buffer.%s.mkiv"](validstring(name,"noname"))) -end - -function commands.gettexbuffer(name) - local buffer = name and cache[name] - if buffer and buffer.data ~= "" then - context.pushcatcodetable() - if buffer.catcodes == txtcatcodes then - context.setcatcodetable(txtcatcodes) - else - context.setcatcodetable(ctxcatcodes) - end - -- context(function() context.viafile(buffer.data) end) - context.getbuffer { name } -- viafile flushes too soon - context.popcatcodetable() - end -end - -commands.getbufferctxlua = loadcontent - -function commands.doifelsebuffer(name) - commands.doifelse(exists(name)) -end - --- This only used for mp buffers and is a kludge. Don't change the --- texprint into texsprint as it fails because "penddef" becomes --- "penddef" then. - --- function commands.feedback(names) --- texprint(ctxcatcodes,splitlines(collectcontent(names))) --- end - -function commands.feedback(names) -- bad name, maybe rename to injectbuffercontent - context.printlines(collectcontent(names)) -end +if not modules then modules = { } end modules ['buff-ini'] = { + version = 1.001, + comment = "companion to buff-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local trace_run = false trackers.register("buffers.run", function(v) trace_run = v end) +local trace_grab = false trackers.register("buffers.grab", function(v) trace_grab = v end) +local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end) + +local report_buffers = logs.reporter("buffers","usage") +local report_grabbing = logs.reporter("buffers","grabbing") + +local context, commands = context, commands + +local concat = table.concat +local type, next, load = type, next, load +local sub, format = string.sub, string.format +local splitlines, validstring = string.splitlines, string.valid +local P, Cs, patterns, lpegmatch = lpeg.P, lpeg.Cs, lpeg.patterns, lpeg.match + +local variables = interfaces.variables +local settings_to_array = utilities.parsers.settings_to_array +local formatters = string.formatters + +local v_yes = variables.yes + +local catcodenumbers = catcodes.numbers + +local ctxcatcodes = catcodenumbers.ctxcatcodes +local txtcatcodes = catcodenumbers.txtcatcodes + +buffers = buffers or { } +local buffers = buffers + +local cache = { } + +local function erase(name) + cache[name] = nil +end + +local function assign(name,str,catcodes) + cache[name] = { data = str, catcodes = catcodes } +end + +local function append(name,str) + local buffer = cache[name] + if buffer then + buffer.data = buffer.data .. str + else + cache[name] = { data = str } + end +end + +local function exists(name) + return cache[name] +end + +local function getcontent(name) + local buffer = name and cache[name] + return buffer and buffer.data or "" +end + +local function getlines(name) + local buffer = name and cache[name] + return buffer and splitlines(buffer.data) +end + +local function collectcontent(names,separator) -- no print + if type(names) == "string" then + names = settings_to_array(names) + end + local nnames = #names + if nnames == 0 then + return getcontent("") -- default buffer + elseif nnames == 1 then + return getcontent(names[1]) + else + local t, n = { }, 0 + for i=1,nnames do + local c = getcontent(names[i]) + if c ~= "" then + n = n + 1 + t[n] = c + end + end + return concat(t,separator or "\n") -- was \r + end +end + +local function loadcontent(names) -- no print + if type(names) == "string" then + names = settings_to_array(names) + end + local nnames = #names + local ok = false + if nnames == 0 then + ok = load(getcontent("")) -- default buffer + elseif nnames == 1 then + ok = load(getcontent(names[1])) + else + -- lua 5.2 chunked load + local i = 0 + ok = load(function() + while true do + i = i + 1 + if i > nnames then + return nil + end + local c = getcontent(names[i]) + if c == "" then + -- would trigger end of load + else + return c + end + end + end) + end + if ok then + return ok() + elseif nnames == 0 then + report_buffers("invalid lua code in default buffer") + else + report_buffers("invalid lua code in buffer %a",concat(names,",")) + end +end + + +buffers.raw = getcontent +buffers.erase = erase +buffers.assign = assign +buffers.append = append +buffers.exists = exists +buffers.getcontent = getcontent +buffers.getlines = getlines +buffers.collectcontent = collectcontent +buffers.loadcontent = loadcontent + +-- the context interface + +commands.erasebuffer = erase +commands.assignbuffer = assign + +local anything = patterns.anything +local alwaysmatched = patterns.alwaysmatched + +local function countnesting(b,e) + local n + local g = P(b) / function() n = n + 1 end + + P(e) / function() n = n - 1 end + + anything + local p = alwaysmatched / function() n = 0 end + * g^0 + * alwaysmatched / function() return n end + return p +end + +local counters = { } +local nesting = 0 +local autoundent = true +local continue = false + +-- Beware: the first character of bufferdata has to be discarded as it's there to +-- prevent gobbling of newlines in the case of nested buffers. The last one is +-- a newlinechar and is removed too. +-- +-- An \n is unlikely to show up as \r is the endlinechar but \n is more generic +-- for us. + +-- This fits the way we fetch verbatim: the indentatio before the sentinel +-- determines the stripping. + +-- str = [[ +-- test test test test test test test +-- test test test test test test test +-- test test test test test test test +-- +-- test test test test test test test +-- test test test test test test test +-- test test test test test test test +-- ]] + +-- local function undent(str) +-- local margin = match(str,"[\n\r]( +)[\n\r]*$") or "" +-- local indent = #margin +-- if indent > 0 then +-- local lines = splitlines(str) +-- local ok = true +-- local pattern = "^" .. margin +-- for i=1,#lines do +-- local l = lines[i] +-- if find(l,pattern) then +-- lines[i] = sub(l,indent+1) +-- else +-- ok = false +-- break +-- end +-- end +-- if ok then +-- return concat(lines,"\n") +-- end +-- end +-- return str +-- end + +-- how about tabs + +local getmargin = (Cs(P(" ")^1)*P(-1)+1)^1 +local eol = patterns.eol +local whatever = (P(1)-eol)^0 * eol^1 + +local strippers = { } + +local function undent(str) -- new version, needs testing + local margin = lpegmatch(getmargin,str) + if type(margin) ~= "string" then + return str + end + local indent = #margin + if indent == 0 then + return str + end + local stripper = strippers[indent] + if not stripper then + stripper = Cs((P(margin)/"" * whatever + eol^1)^1) + strippers[indent] = stripper + end + return lpegmatch(stripper,str) or str +end + +function commands.grabbuffer(name,begintag,endtag,bufferdata,catcodes) -- maybe move \\ to call + local dn = getcontent(name) + if dn == "" then + nesting = 0 + continue = false + end + if trace_grab then + if #bufferdata > 30 then + report_grabbing("%s => |%s..%s|",name,sub(bufferdata,1,10),sub(bufferdata,-10,#bufferdata)) + else + report_grabbing("%s => |%s|",name,bufferdata) + end + end + local counter = counters[begintag] + if not counter then + counter = countnesting(begintag,endtag) + counters[begintag] = counter + end + nesting = nesting + lpegmatch(counter,bufferdata) + local more = nesting > 0 + if more then + dn = dn .. sub(bufferdata,2,-1) .. endtag + nesting = nesting - 1 + continue = true + else + if continue then + dn = dn .. sub(bufferdata,2,-2) -- no \r, \n is more generic + elseif dn == "" then + dn = sub(bufferdata,2,-2) + else + dn = dn .. "\n" .. sub(bufferdata,2,-2) -- no \r, \n is more generic + end + local last = sub(dn,-1) + if last == "\n" or last == "\r" then -- \n is unlikely as \r is the endlinechar + dn = sub(dn,1,-2) + end + if autoundent then + dn = undent(dn) + end + end + assign(name,dn,catcodes) + commands.doifelse(more) +end + +-- The optional prefix hack is there for the typesetbuffer feature and +-- in mkii we needed that (this hidden feature is used in a manual). + +local function prepared(name,list,prefix) -- list is optional + if not list or list == "" then + list = name + end + if not name or name == "" then + name = list + end + local content = collectcontent(list,nil) or "" + if content == "" then + content = "empty buffer" + end + if prefix then + local name = file.addsuffix(name,"tmp") + return tex.jobname .. "-" .. name, content + else + return name, content + end +end + +local capsule = "\\starttext\n%s\n\\stoptext\n" +local command = "context %s" + +function commands.runbuffer(name,list,encapsulate) + local name, content = prepared(name,list) + if encapsulate then + content = format(capsule,content) + end + local data = io.loaddata(name) + if data ~= content then + if trace_run then + report_buffers("changes in %a, processing forced",name) + end + io.savedata(name,content) + os.execute(format(command,name)) + elseif trace_run then + report_buffers("no changes in %a, not processed",name) + end +end + +function commands.savebuffer(list,name,prefix) -- name is optional + local name, content = prepared(name,list,prefix==v_yes) + io.savedata(name,content) +end + +function commands.getbuffer(name) + local str = getcontent(name) + if str ~= "" then + context.viafile(str,formatters["buffer.%s"](validstring(name,"noname"))) + end +end + +function commands.getbuffermkvi(name) -- rather direct ! + context.viafile(resolvers.macros.preprocessed(getcontent(name)),formatters["buffer.%s.mkiv"](validstring(name,"noname"))) +end + +function commands.gettexbuffer(name) + local buffer = name and cache[name] + if buffer and buffer.data ~= "" then + context.pushcatcodetable() + if buffer.catcodes == txtcatcodes then + context.setcatcodetable(txtcatcodes) + else + context.setcatcodetable(ctxcatcodes) + end + -- context(function() context.viafile(buffer.data) end) + context.getbuffer { name } -- viafile flushes too soon + context.popcatcodetable() + end +end + +commands.getbufferctxlua = loadcontent + +function commands.doifelsebuffer(name) + commands.doifelse(exists(name)) +end + +-- This only used for mp buffers and is a kludge. Don't change the +-- texprint into texsprint as it fails because "penddef" becomes +-- "penddef" then. + +-- function commands.feedback(names) +-- texprint(ctxcatcodes,splitlines(collectcontent(names))) +-- end + +function commands.feedback(names) -- bad name, maybe rename to injectbuffercontent + context.printlines(collectcontent(names)) +end diff --git a/tex/context/base/buff-par.lua b/tex/context/base/buff-par.lua index 2c1cd40e9..e0d32274f 100644 --- a/tex/context/base/buff-par.lua +++ b/tex/context/base/buff-par.lua @@ -1,184 +1,184 @@ -if not modules then modules = { } end modules ['buff-par'] = { - version = 1.001, - comment = "companion to buff-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local context, commands = context, commands - -local insert, remove, find, gmatch = table.insert, table.remove, string.find, string.gmatch -local strip, format = string.strip, string.format - -local trace_parallel = false trackers.register("buffers.parallel", function(v) trace_parallel = v end) - -local report_parallel = logs.reporter("buffers","parallel") - -local variables = interfaces.variables - -local parallel = buffers.parallel or { } -buffers.parallel = parallel - -local settings_to_array = utilities.parsers.settings_to_array - -local data = { } - -function parallel.define(category,tags) - local tags = settings_to_array(tags) - local entries = { } - data[category] = { - tags = tags, - entries = entries, - } - for i=1,#tags do - entries[tags[i]] = { - lines = { }, - number = 0, - } - end -end - -function parallel.reset(category,tags) - if not tags or tags == "" or tags == variables.all then - tags = table.keys(entries) - else - tags = settings_to_array(tags) - end - for i=1,#tags do - entries[tags[i]] = { - lines = { }, - number = 0, - } - end -end - -function parallel.next(category) - local dc = data[category] - local tags = dc.tags - local entries = dc.entries - for i=1,#tags do - insert(entries[tags[i]].lines, { }) - end -end - -function parallel.save(category,tag,content) - local dc = data[category] - if not dc then - return - end - local entries = dc.entries[tag] - if not entries then - return - end - local lines = entries.lines - if not lines then - return - end - local line = lines[#lines] - if not line then - return - end - -- maybe no strip - -- use lpeg - if find(content,"%s*%[") then - local done = false - for label, content in gmatch(content,"%s*%[(.-)%]%s*([^%[]+)") do - if done then - line = { } - insert(lines,line) - else - done = true - end - if trace_parallel and label ~= "" then - report_parallel("reference found of category %a, tag %a, label %a",category,tag,label) - end - line.label = label - line.content = strip(content) - end - else - line.content = strip(content) - line.label = "" - end -end - -function parallel.hassomecontent(category,tags) - local dc = data[category] - if not dc then - return false - end - local entries = dc.entries - if not tags or tags == "" or tags == variables.all then - tags = table.keys(entries) - else - tags = utilities.parsers.settings_to_array(tags) - end - for t=1,#tags do - local tag = tags[t] - local lines = entries[tag].lines - for i=1,#lines do - local content = lines[i].content - if content and content ~= "" then - return true - end - end - end - return false -end - -local save = resolvers.savers.byscheme - -function parallel.place(category,tags,options) - local dc = data[category] - if not dc then - return - end - local entries = dc.entries - local tags = utilities.parsers.settings_to_array(tags) - local options = utilities.parsers.settings_to_hash(options) - local start, n, criterium = options.start, options.n, options.criterium - start, n = start and tonumber(start), n and tonumber(n) - local max = 1 - if n then - max = n - elseif criterium == variables.all then - max = 0 - for t=1,#tags do - local tag = tags[t] - local lines = entries[tag].lines - if #lines > max then - max = #lines - end - end - end - for i=1,max do - for t=1,#tags do - local tag = tags[t] - local entry = entries[tag] - if entry then - local lines = entry.lines - local number = entry.number + 1 - entry.number = number - local line = remove(lines,1) - if line and line.content then - local content = format("\\input{%s}",save("virtual","parallel",line.content)) - context.doflushparallel(tag,1,number,line.label,content) - else - context.doflushparallel(tag,0,number,"","") - end - end - end - end -end - --- interface - -commands.defineparallel = parallel.define -commands.nextparallel = parallel.next -commands.saveparallel = parallel.save -commands.placeparallel = parallel.place -commands.resetparallel = parallel.reset - -function commands.doifelseparallel(category,tags) - commands.doifelse(parallel.hassomecontent(category,tags)) -end +if not modules then modules = { } end modules ['buff-par'] = { + version = 1.001, + comment = "companion to buff-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local context, commands = context, commands + +local insert, remove, find, gmatch = table.insert, table.remove, string.find, string.gmatch +local strip, format = string.strip, string.format + +local trace_parallel = false trackers.register("buffers.parallel", function(v) trace_parallel = v end) + +local report_parallel = logs.reporter("buffers","parallel") + +local variables = interfaces.variables + +local parallel = buffers.parallel or { } +buffers.parallel = parallel + +local settings_to_array = utilities.parsers.settings_to_array + +local data = { } + +function parallel.define(category,tags) + local tags = settings_to_array(tags) + local entries = { } + data[category] = { + tags = tags, + entries = entries, + } + for i=1,#tags do + entries[tags[i]] = { + lines = { }, + number = 0, + } + end +end + +function parallel.reset(category,tags) + if not tags or tags == "" or tags == variables.all then + tags = table.keys(entries) + else + tags = settings_to_array(tags) + end + for i=1,#tags do + entries[tags[i]] = { + lines = { }, + number = 0, + } + end +end + +function parallel.next(category) + local dc = data[category] + local tags = dc.tags + local entries = dc.entries + for i=1,#tags do + insert(entries[tags[i]].lines, { }) + end +end + +function parallel.save(category,tag,content) + local dc = data[category] + if not dc then + return + end + local entries = dc.entries[tag] + if not entries then + return + end + local lines = entries.lines + if not lines then + return + end + local line = lines[#lines] + if not line then + return + end + -- maybe no strip + -- use lpeg + if find(content,"%s*%[") then + local done = false + for label, content in gmatch(content,"%s*%[(.-)%]%s*([^%[]+)") do + if done then + line = { } + insert(lines,line) + else + done = true + end + if trace_parallel and label ~= "" then + report_parallel("reference found of category %a, tag %a, label %a",category,tag,label) + end + line.label = label + line.content = strip(content) + end + else + line.content = strip(content) + line.label = "" + end +end + +function parallel.hassomecontent(category,tags) + local dc = data[category] + if not dc then + return false + end + local entries = dc.entries + if not tags or tags == "" or tags == variables.all then + tags = table.keys(entries) + else + tags = utilities.parsers.settings_to_array(tags) + end + for t=1,#tags do + local tag = tags[t] + local lines = entries[tag].lines + for i=1,#lines do + local content = lines[i].content + if content and content ~= "" then + return true + end + end + end + return false +end + +local save = resolvers.savers.byscheme + +function parallel.place(category,tags,options) + local dc = data[category] + if not dc then + return + end + local entries = dc.entries + local tags = utilities.parsers.settings_to_array(tags) + local options = utilities.parsers.settings_to_hash(options) + local start, n, criterium = options.start, options.n, options.criterium + start, n = start and tonumber(start), n and tonumber(n) + local max = 1 + if n then + max = n + elseif criterium == variables.all then + max = 0 + for t=1,#tags do + local tag = tags[t] + local lines = entries[tag].lines + if #lines > max then + max = #lines + end + end + end + for i=1,max do + for t=1,#tags do + local tag = tags[t] + local entry = entries[tag] + if entry then + local lines = entry.lines + local number = entry.number + 1 + entry.number = number + local line = remove(lines,1) + if line and line.content then + local content = format("\\input{%s}",save("virtual","parallel",line.content)) + context.doflushparallel(tag,1,number,line.label,content) + else + context.doflushparallel(tag,0,number,"","") + end + end + end + end +end + +-- interface + +commands.defineparallel = parallel.define +commands.nextparallel = parallel.next +commands.saveparallel = parallel.save +commands.placeparallel = parallel.place +commands.resetparallel = parallel.reset + +function commands.doifelseparallel(category,tags) + commands.doifelse(parallel.hassomecontent(category,tags)) +end diff --git a/tex/context/base/buff-ver.lua b/tex/context/base/buff-ver.lua index e327a59dd..30525b456 100644 --- a/tex/context/base/buff-ver.lua +++ b/tex/context/base/buff-ver.lua @@ -1,768 +1,768 @@ -if not modules then modules = { } end modules ['buff-ver'] = { - version = 1.001, - comment = "companion to buff-ver.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- The default visualizers have reserved names starting with buff-imp-*. Users are --- supposed to use different names for their own variants. --- --- todo: skip=auto - -local type, next, rawset, rawget, setmetatable, getmetatable = type, next, rawset, rawget, setmetatable, getmetatable -local format, lower, upper,match, find, sub = string.format, string.lower, string.upper, string.match, string.find, string.sub -local splitlines = string.splitlines -local concat = table.concat -local C, P, R, S, V, Carg, Cc, Cs = lpeg.C, lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Carg, lpeg.Cc, lpeg.Cs -local patterns, lpegmatch, is_lpeg = lpeg.patterns, lpeg.match, lpeg.is_lpeg - -local context, commands = context, commands - -local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end) -local report_visualizers = logs.reporter("buffers","visualizers") - -local allocate = utilities.storage.allocate - -visualizers = visualizers or { } -local specifications = allocate() -visualizers.specifications = specifications - -local tabtospace = utilities.strings.tabtospace -local variables = interfaces.variables -local settings_to_array = utilities.parsers.settings_to_array -local variables = interfaces.variables -local findfile = resolvers.findfile -local addsuffix = file.addsuffix - -local v_auto = variables.auto -local v_yes = variables.yes - --- beware, all macros have an argument: - -local doinlineverbatimnewline = context.doinlineverbatimnewline -local doinlineverbatimbeginline = context.doinlineverbatimbeginline -local doinlineverbatimemptyline = context.doinlineverbatimemptyline -local doinlineverbatimstart = context.doinlineverbatimstart -local doinlineverbatimstop = context.doinlineverbatimstop - -local dodisplayverbatimnewline = context.dodisplayverbatimnewline -local dodisplayverbatimbeginline = context.dodisplayverbatimbeginline -local dodisplayverbatimemptyline = context.dodisplayverbatimemptyline -local dodisplayverbatimstart = context.dodisplayverbatimstart -local dodisplayverbatimstop = context.dodisplayverbatimstop - -local verbatim = context.verbatim -local doverbatimspace = context.doverbatimspace - -local CargOne = Carg(1) - -local function f_emptyline(s,settings) - if settings and settings.nature == "inline" then - doinlineverbatimemptyline() - else - dodisplayverbatimemptyline() - end -end - -local function f_beginline(s,settings) - if settings and settings.nature == "inline" then - doinlineverbatimbeginline() - else - dodisplayverbatimbeginline() - end -end - -local function f_newline(s,settings) - if settings and settings.nature == "inline" then - doinlineverbatimnewline() - else - dodisplayverbatimnewline() - end -end - -local function f_start(s,settings) - if settings and settings.nature == "inline" then - doinlineverbatimstart() - else - dodisplayverbatimstart() - end -end - -local function f_stop(s,settings) - if settings and settings.nature == "inline" then - doinlineverbatimstop() - else - dodisplayverbatimstop() - end -end - -local function f_default(s) -- (s,settings) - verbatim(s) -end - -local function f_space() -- (s,settings) - doverbatimspace() -end - -local function f_signal() -- (s,settings) - -- we use these for special purposes -end - -local signal = "\000" - -visualizers.signal = signal -visualizers.signalpattern = P(signal) - -local functions = { __index = { - emptyline = f_emptyline, - newline = f_newline, - default = f_default, - beginline = f_beginline, - space = f_space, - start = f_start, - stop = f_stop, - signal = f_signal, - } -} - -local handlers = { } - -function visualizers.newhandler(name,data) - local tname, tdata = type(name), type(data) - if tname == "table" then -- (data) - setmetatable(name,getmetatable(name) or functions) - return name - elseif tname == "string" then - if tdata == "string" then -- ("name","parent") - local result = { } - setmetatable(result,getmetatable(handlers[data]) or functions) - handlers[name] = result - return result - elseif tdata == "table" then -- ("name",data) - setmetatable(data,getmetatable(data) or functions) - handlers[name] = data - return data - else -- ("name") - local result = { } - setmetatable(result,functions) - handlers[name] = result - return result - end - else -- () - local result = { } - setmetatable(result,functions) - return result - end -end - -function visualizers.newgrammar(name,t) - name = lower(name) - t = t or { } - local g = visualizers.specifications[name] - g = g and g.grammar - if g then - if trace_visualize then - report_visualizers("cloning grammar %a",name) - end - for k,v in next, g do - if not t[k] then - t[k] = v - end - if is_lpeg(v) then - t[name..":"..k] = v - end - end - end - return t -end - -local function getvisualizer(method,nature) - method = lower(method) - local m = specifications[method] or specifications.default - if nature then - if trace_visualize then - report_visualizers("getting visualizer %a with nature %a",method,nature) - end - return m and (m[nature] or m.parser) or nil - else - if trace_visualize then - report_visualizers("getting visualizer %a",method) - end - return m and m.parser or nil - end -end - -local fallback = context.verbatim - -local function makepattern(visualizer,replacement,pattern) - if not pattern then - report_visualizers("error in visualizer %a",replacement) - return patterns.alwaystrue - else - if type(visualizer) == "table" and type(replacement) == "string" then - replacement = visualizer[replacement] or fallback - else - replacement = fallback - end - return (C(pattern) * CargOne) / replacement - end -end - -local function makenested(handler,how,start,stop) - local b, e, f = P(start), P(stop), how - if type(how) == "string" then - f = function(s) getvisualizer(how,"direct")(s) end - end - return makepattern(handler,"name",b) - * ((1-e)^1/f) - * makepattern(handler,"name",e) -end - -visualizers.pattern = makepattern -visualizers.makepattern = makepattern -visualizers.makenested = makenested - -function visualizers.load(name) - name = lower(name) - if rawget(specifications,name) == nil then - name = lower(name) - local texname = findfile(format("buff-imp-%s.mkiv",name)) - local luaname = findfile(format("buff-imp-%s.lua" ,name)) - if texname == "" or luaname == "" then - -- assume a user specific file - luaname = findfile(addsuffix(name,"mkiv")) - texname = findfile(addsuffix(name,"lua" )) - end - if texname == "" or luaname == "" then - if trace_visualize then - report_visualizers("unknown visualizer %a",name) - end - else - if trace_visualize then - report_visualizers("loading visualizer %a",name) - end - lua.registercode(luaname) - context.input(texname) - end - if rawget(specifications,name) == nil then - rawset(specifications,name,false) - end - end -end - -function visualizers.register(name,specification) - name = lower(name) - if trace_visualize then - report_visualizers("registering visualizer %a",name) - end - specifications[name] = specification - local parser, handler = specification.parser, specification.handler - local displayparser = specification.display or parser - local inlineparser = specification.inline or parser - local isparser = is_lpeg(parser) - local start, stop - if isparser then - start = makepattern(handler,"start",patterns.alwaysmatched) - stop = makepattern(handler,"stop",patterns.alwaysmatched) - end - if handler then - if isparser then - specification.display = function(content,settings) - if handler.startdisplay then handler.startdisplay(settings) end - lpegmatch(start * displayparser * stop,content,1,settings) - if handler.stopdisplay then handler.stopdisplay(settings) end - end - specification.inline = function(content,settings) - if handler.startinline then handler.startinline(settings) end - lpegmatch(start * inlineparser * stop,content,1,settings) - if handler.stopinline then handler.stopinline(settings) end - end - specification.direct = function(content,settings) - lpegmatch(parser,content,1,settings) - end - elseif parser then - specification.display = function(content,settings) - if handler.startdisplay then handler.startdisplay(settings) end - parser(content,settings) - if handler.stopdisplay then handler.stopdisplay(settings) end - end - specification.inline = function(content,settings) - if handler.startinline then handler.startinline(settings) end - parser(content,settings) - if handler.stopinline then handler.stopinline(settings) end - end - specification.direct = parser - end - elseif isparser then - specification.display = function(content,settings) - lpegmatch(start * displayparser * stop,content,1,settings) - end - specification.inline = function(content,settings) - lpegmatch(start * inlineparser * stop,content,1,settings) - end - specification.direct = function(content,settings) - lpegmatch(parser,content,1,settings) - end - elseif parser then - specification.display = parser - specification.inline = parser - specification.direct = parser - end - return specification -end - -local escapepatterns = allocate() -visualizers.escapepatterns = escapepatterns - -local function texmethod(s) - context.bgroup() - context(s) - context.egroup() -end - -local function texcommand(s) - context[s]() -end - -local function defaultmethod(s,settings) - lpegmatch(getvisualizer("default"),lower(s),1,settings) -end - --- we can consider using a nested instead - -local space_pattern = patterns.space^0 -local name_pattern = R("az","AZ")^1 - --- the hack is needed in order to retain newlines when an escape happens at the --- at the begin of a line; it also ensures proper line numbering; a bit messy - -local function hack(pattern) - return Cs(pattern * Cc(signal)) -end - -local split_processor = typesetters.processors.split -local apply_processor = typesetters.processors.apply - --- todo: { before = b, after = a, processor = p }, ... - -function visualizers.registerescapepattern(name,befores,afters,normalmethod,escapemethod,processors) - local escapepattern = escapepatterns[name] - if not escapepattern then - if type(befores) ~= "table" then befores = { befores } end - if type(afters) ~= "table" then afters = { afters } end - if type(processors) ~= "table" then processors = { processors } end - for i=1,#befores do - local before = befores[i] - local after = afters[i] - local processor = processors[i] - if trace_visualize then - report_visualizers("registering escape pattern, name %a, index %a, before %a, after %a, processor %a", - name,i,before,after,processor or "default") - end - before = P(before) * space_pattern - after = space_pattern * P(after) - local action - if processor then - action = function(s) apply_processor(processor,s) end - else - action = escapemethod or texmethod - end - local ep = (before / "") * ((1 - after)^0 / action) * (after / "") - if escapepattern then - escapepattern = escapepattern + ep - else - escapepattern = ep - end - end - escapepattern = ( - escapepattern - + hack((1 - escapepattern)^1) / (normalmethod or defaultmethod) - )^0 - escapepatterns[name] = escapepattern - end - return escapepattern -end - -function visualizers.registerescapeline(name,befores,normalmethod,escapemethod,processors) - local escapepattern = escapepatterns[name] - if not escapepattern then - if type(befores) ~= "table" then befores = { befores } end - if type(processors) ~= "table" then processors = { processors } end - for i=1,#befores do - local before = befores[i] - local processor = processors[i] - if trace_visualize then - report_visualizers("registering escape line pattern, name %a, before %a, after <>",name,before) - end - before = P(before) * space_pattern - after = space_pattern * P("\n") - local action - if processor then - action = function(s) apply_processor(processor,s) end - else - action = escapemethod or texmethod - end - local ep = (before / "") * ((1 - after)^0 / action) * (space_pattern / "") - if escapepattern then - escapepattern = escapepattern + ep - else - escapepattern = ep - end - end - escapepattern = ( - escapepattern - + hack((1 - escapepattern)^1) / (normalmethod or defaultmethod) - )^0 - escapepatterns[name] = escapepattern - end - return escapepattern -end - -function visualizers.registerescapecommand(name,token,normalmethod,escapecommand,processor) - local escapepattern = escapepatterns[name] - if not escapepattern then - if trace_visualize then - report_visualizers("registering escape token, name %a, token %a",name,token) - end - token = P(token) - local notoken = hack((1 - token)^1) - local cstoken = name_pattern * space_pattern - escapepattern = ( - (token / "") - * (cstoken / (escapecommand or texcommand)) - + (notoken / (normalmethod or defaultmethod)) - )^0 - escapepatterns[name] = escapepattern - end - return escapepattern -end - -local escapedvisualizers = { } - -local function visualize(content,settings) -- maybe also method in settings - if content and content ~= "" then - local method = lower(settings.method or "default") - local m - local e = settings.escape - if e and e ~= "" then - local newname = format("%s : %s",method,e) - local newspec = specifications[newname] - if newspec then - m = newspec - else - local starts, stops, processors = { }, { }, { } - if e == v_yes then - starts[1] = "/BTEX" - stops [1] = "/ETEX" - else - local s = settings_to_array(e,true) - for i=1,#s do - local si = s[i] - local processor, pattern = split_processor(si) - si = processor and pattern or si - local start, stop = match(si,"^(.-),(.-)$") - if start then - local n = #starts + 1 - starts[n] = start - stops [n] = stop or "" - processors[n] = processor - end - end - end - local oldvisualizer = specifications[method] or specifications.default - local oldparser = oldvisualizer.direct - local newparser - if starts[1] and stops[1] ~= "" then - newparser = visualizers.registerescapepattern(newname,starts,stops,oldparser,nil,processors) - elseif starts[1] then - newparser = visualizers.registerescapeline(newname,starts,oldparser,nil,processors) - else -- for old times sake: /em - newparser = visualizers.registerescapecommand(newname,e,oldparser,nil,processors) - end - m = visualizers.register(newname, { - parser = newparser, - handler = oldvisualizer.handler, - }) - end - else - m = specifications[method] or specifications.default - end - local nature = settings.nature or "display" - local n = m and m[nature] - if n then - if trace_visualize then - report_visualizers("visualize using method %a and nature %a",method,nature) - end - n(content,settings) - else - if trace_visualize then - report_visualizers("visualize using method %a",method) - end - fallback(content,1,settings) - end - end -end - -visualizers.visualize = visualize -visualizers.getvisualizer = getvisualizer - -local fallbacks = { } table.setmetatableindex(fallbacks,function(t,k) local v = { nature = k } t[k] = v return v end) - -local function checkedsettings(settings,nature) - if not settings then - -- let's avoid dummy tables as much as possible - return fallbacks[nature] - else - if not settings.nature then - settings.nature = nature - end - return settings - end -end - -function visualizers.visualizestring(content,settings) - visualize(content,checkedsettings(settings,"inline")) -end - -function visualizers.visualizefile(name,settings) - visualize(resolvers.loadtexfile(name),checkedsettings(settings,"display")) -end - -function visualizers.visualizebuffer(name,settings) - visualize(buffers.getcontent(name),checkedsettings(settings,"display")) -end - --- -- - -local space = C(patterns.space) * CargOne / f_space -local newline = C(patterns.newline) * CargOne / f_newline -local emptyline = C(patterns.emptyline) * CargOne / f_emptyline -local beginline = C(patterns.beginline) * CargOne / f_beginline -local anything = C(patterns.somecontent) * CargOne / f_default - ------ verbosed = (space + newline * (emptyline^0) * beginline + anything)^0 -local verbosed = (space + newline * (emptyline^0) * beginline + emptyline + newline + anything)^0 - -local function write(s,settings) -- bad name - lpegmatch(verbosed,s,1,settings or false) -end - -visualizers.write = write -visualizers.writenewline = f_newline -visualizers.writeemptyline = f_emptyline -visualizers.writespace = f_space -visualizers.writedefault = f_default - -function visualizers.writeargument(...) - context("{") -- If we didn't have tracing then we could - write(...) -- use a faster print to tex variant for the - context("}") -- { } tokens as they always have ctxcatcodes. -end - --- helpers - -local function realign(lines,strip) -- "yes", - local n - if strip == v_yes then - n = math.huge - for i=1, #lines do - local spaces = find(lines[i],"%S") - if not spaces then - -- empty line - elseif spaces == 0 then - n = 0 - break - elseif spaces < n then - n = spaces - end - end - n = n - 1 - else - n = tonumber(strip) - end - if n and n > 0 then - local copy = { } - for i=1,#lines do - copy[i] = sub(lines[i],n+1) - end - return copy - end - return lines -end - -local function getstrip(lines,first,last) - local first, last = first or 1, last or #lines - for i=first,last do - local li = lines[i] - if #li == 0 or find(li,"^%s*$") then - first = first + 1 - else - break - end - end - for i=last,first,-1 do - local li = lines[i] - if #li == 0 or find(li,"^%s*$") then - last = last - 1 - else - break - end - end - return first, last, last - first + 1 -end - -local function getrange(lines,first,last,range) -- 1,3 1,+3 fromhere,tothere - local noflines = #lines - local first, last = first or 1, last or noflines - if last < 0 then - last = noflines + last - end - local range = settings.range - local what = settings_to_array(range) - local r_first, r_last = what[1], what[2] - local f, l = tonumber(r_first), tonumber(r_last) - if r_first then - if f then - if f > first then - first = f - end - else - for i=first,last do - if find(lines[i],r_first) then - first = i + 1 - break - end - end - end - end - if r_last then - if l then - if l < 0 then - l = noflines + l - end - if find(r_last,"^[%+]") then -- 1,+3 - l = first + l - end - if l < last then - last = l - end - else - for i=first,last do - if find(lines[i],r_last) then - last = i - 1 - break - end - end - end - end - return first, last -end - -local tablength = 7 - -local function dotabs(content,settings) - local tab = settings.tab - tab = tab and (tab == v_yes and tablength or tonumber(tab)) - if tab then - return tabtospace(content,tab) - else - return content - end -end - -local function filter(lines,settings) -- todo: inline or display in settings - local strip = settings.strip - if strip and strip ~= "" then - lines = realign(lines,strip) - end - local line, n = 0, 0 - local first, last, m = getstrip(lines) - if range then - first, last = getrange(lines,first,last,range) - first, last = getstrip(lines,first,last) - end - -- \r is \endlinechar but \n would is more generic so this choice is debatable - local content = concat(lines,(settings.nature == "inline" and " ") or "\n",first,last) - return content, m -end - -local getlines = buffers.getlines - --- interface - -function commands.doifelsevisualizer(name) - commands.doifelse(specifications[lower(name)]) -end - -commands.loadvisualizer = visualizers.load - --- local decodecomment = resolvers.macros.decodecomment -- experiment - -function commands.typebuffer(settings) - local lines = getlines(settings.name) - if lines then - local content, m = filter(lines,settings) - if content and content ~= "" then - -- content = decodecomment(content) - content = dotabs(content,settings) - visualize(content,checkedsettings(settings,"display")) - end - end -end - -function commands.processbuffer(settings) - local lines = getlines(settings.name) - if lines then - local content, m = filter(lines,settings) - if content and content ~= "" then - content = dotabs(content,settings) - visualize(content,checkedsettings(settings,"direct")) - end - end -end - --- not really buffers but it's closely related - --- A string.gsub(str,"(\\.-) +$","%1") is faster than an lpeg when there is a --- match but slower when there is no match. But anyway, we need a more clever --- parser so we use lpeg. --- --- [[\text ]] [[\text{}]] [[\text \text ]] [[\text \\ \text ]] - ------ strip = Cs((P(" ")^1 * P(-1)/"" + 1)^0) -local strip = Cs((P("\\") * ((1-S("\\ "))^1) * (P(" ")/"") + 1)^0) -- - -function commands.typestring(settings) - local content = settings.data - if content and content ~= "" then - content = #content > 1 and lpegmatch(strip,content) or content -- can be an option, but needed in e.g. tabulate - -- content = decodecomment(content) - -- content = dotabs(content,settings) - visualize(content,checkedsettings(settings,"inline")) - end -end - -function commands.typefile(settings) - local filename = settings.name - local foundname = resolvers.findtexfile(filename) - if foundname and foundname ~= "" then - local str = resolvers.loadtexfile(foundname) - if str and str ~= "" then - local regime = settings.regime - if regime and regime ~= "" then - str = regimes.translate(str,regime) - end - if str and str~= "" then - -- content = decodecomment(content) - local lines = splitlines(str) - local content, m = filter(lines,settings) - if content and content ~= "" then - content = dotabs(content,settings) - visualize(content,checkedsettings(settings,"display")) - end - end - end - end -end +if not modules then modules = { } end modules ['buff-ver'] = { + version = 1.001, + comment = "companion to buff-ver.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- The default visualizers have reserved names starting with buff-imp-*. Users are +-- supposed to use different names for their own variants. +-- +-- todo: skip=auto + +local type, next, rawset, rawget, setmetatable, getmetatable = type, next, rawset, rawget, setmetatable, getmetatable +local format, lower, upper,match, find, sub = string.format, string.lower, string.upper, string.match, string.find, string.sub +local splitlines = string.splitlines +local concat = table.concat +local C, P, R, S, V, Carg, Cc, Cs = lpeg.C, lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Carg, lpeg.Cc, lpeg.Cs +local patterns, lpegmatch, is_lpeg = lpeg.patterns, lpeg.match, lpeg.is_lpeg + +local context, commands = context, commands + +local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end) +local report_visualizers = logs.reporter("buffers","visualizers") + +local allocate = utilities.storage.allocate + +visualizers = visualizers or { } +local specifications = allocate() +visualizers.specifications = specifications + +local tabtospace = utilities.strings.tabtospace +local variables = interfaces.variables +local settings_to_array = utilities.parsers.settings_to_array +local variables = interfaces.variables +local findfile = resolvers.findfile +local addsuffix = file.addsuffix + +local v_auto = variables.auto +local v_yes = variables.yes + +-- beware, all macros have an argument: + +local doinlineverbatimnewline = context.doinlineverbatimnewline +local doinlineverbatimbeginline = context.doinlineverbatimbeginline +local doinlineverbatimemptyline = context.doinlineverbatimemptyline +local doinlineverbatimstart = context.doinlineverbatimstart +local doinlineverbatimstop = context.doinlineverbatimstop + +local dodisplayverbatimnewline = context.dodisplayverbatimnewline +local dodisplayverbatimbeginline = context.dodisplayverbatimbeginline +local dodisplayverbatimemptyline = context.dodisplayverbatimemptyline +local dodisplayverbatimstart = context.dodisplayverbatimstart +local dodisplayverbatimstop = context.dodisplayverbatimstop + +local verbatim = context.verbatim +local doverbatimspace = context.doverbatimspace + +local CargOne = Carg(1) + +local function f_emptyline(s,settings) + if settings and settings.nature == "inline" then + doinlineverbatimemptyline() + else + dodisplayverbatimemptyline() + end +end + +local function f_beginline(s,settings) + if settings and settings.nature == "inline" then + doinlineverbatimbeginline() + else + dodisplayverbatimbeginline() + end +end + +local function f_newline(s,settings) + if settings and settings.nature == "inline" then + doinlineverbatimnewline() + else + dodisplayverbatimnewline() + end +end + +local function f_start(s,settings) + if settings and settings.nature == "inline" then + doinlineverbatimstart() + else + dodisplayverbatimstart() + end +end + +local function f_stop(s,settings) + if settings and settings.nature == "inline" then + doinlineverbatimstop() + else + dodisplayverbatimstop() + end +end + +local function f_default(s) -- (s,settings) + verbatim(s) +end + +local function f_space() -- (s,settings) + doverbatimspace() +end + +local function f_signal() -- (s,settings) + -- we use these for special purposes +end + +local signal = "\000" + +visualizers.signal = signal +visualizers.signalpattern = P(signal) + +local functions = { __index = { + emptyline = f_emptyline, + newline = f_newline, + default = f_default, + beginline = f_beginline, + space = f_space, + start = f_start, + stop = f_stop, + signal = f_signal, + } +} + +local handlers = { } + +function visualizers.newhandler(name,data) + local tname, tdata = type(name), type(data) + if tname == "table" then -- (data) + setmetatable(name,getmetatable(name) or functions) + return name + elseif tname == "string" then + if tdata == "string" then -- ("name","parent") + local result = { } + setmetatable(result,getmetatable(handlers[data]) or functions) + handlers[name] = result + return result + elseif tdata == "table" then -- ("name",data) + setmetatable(data,getmetatable(data) or functions) + handlers[name] = data + return data + else -- ("name") + local result = { } + setmetatable(result,functions) + handlers[name] = result + return result + end + else -- () + local result = { } + setmetatable(result,functions) + return result + end +end + +function visualizers.newgrammar(name,t) + name = lower(name) + t = t or { } + local g = visualizers.specifications[name] + g = g and g.grammar + if g then + if trace_visualize then + report_visualizers("cloning grammar %a",name) + end + for k,v in next, g do + if not t[k] then + t[k] = v + end + if is_lpeg(v) then + t[name..":"..k] = v + end + end + end + return t +end + +local function getvisualizer(method,nature) + method = lower(method) + local m = specifications[method] or specifications.default + if nature then + if trace_visualize then + report_visualizers("getting visualizer %a with nature %a",method,nature) + end + return m and (m[nature] or m.parser) or nil + else + if trace_visualize then + report_visualizers("getting visualizer %a",method) + end + return m and m.parser or nil + end +end + +local fallback = context.verbatim + +local function makepattern(visualizer,replacement,pattern) + if not pattern then + report_visualizers("error in visualizer %a",replacement) + return patterns.alwaystrue + else + if type(visualizer) == "table" and type(replacement) == "string" then + replacement = visualizer[replacement] or fallback + else + replacement = fallback + end + return (C(pattern) * CargOne) / replacement + end +end + +local function makenested(handler,how,start,stop) + local b, e, f = P(start), P(stop), how + if type(how) == "string" then + f = function(s) getvisualizer(how,"direct")(s) end + end + return makepattern(handler,"name",b) + * ((1-e)^1/f) + * makepattern(handler,"name",e) +end + +visualizers.pattern = makepattern +visualizers.makepattern = makepattern +visualizers.makenested = makenested + +function visualizers.load(name) + name = lower(name) + if rawget(specifications,name) == nil then + name = lower(name) + local texname = findfile(format("buff-imp-%s.mkiv",name)) + local luaname = findfile(format("buff-imp-%s.lua" ,name)) + if texname == "" or luaname == "" then + -- assume a user specific file + luaname = findfile(addsuffix(name,"mkiv")) + texname = findfile(addsuffix(name,"lua" )) + end + if texname == "" or luaname == "" then + if trace_visualize then + report_visualizers("unknown visualizer %a",name) + end + else + if trace_visualize then + report_visualizers("loading visualizer %a",name) + end + lua.registercode(luaname) + context.input(texname) + end + if rawget(specifications,name) == nil then + rawset(specifications,name,false) + end + end +end + +function visualizers.register(name,specification) + name = lower(name) + if trace_visualize then + report_visualizers("registering visualizer %a",name) + end + specifications[name] = specification + local parser, handler = specification.parser, specification.handler + local displayparser = specification.display or parser + local inlineparser = specification.inline or parser + local isparser = is_lpeg(parser) + local start, stop + if isparser then + start = makepattern(handler,"start",patterns.alwaysmatched) + stop = makepattern(handler,"stop",patterns.alwaysmatched) + end + if handler then + if isparser then + specification.display = function(content,settings) + if handler.startdisplay then handler.startdisplay(settings) end + lpegmatch(start * displayparser * stop,content,1,settings) + if handler.stopdisplay then handler.stopdisplay(settings) end + end + specification.inline = function(content,settings) + if handler.startinline then handler.startinline(settings) end + lpegmatch(start * inlineparser * stop,content,1,settings) + if handler.stopinline then handler.stopinline(settings) end + end + specification.direct = function(content,settings) + lpegmatch(parser,content,1,settings) + end + elseif parser then + specification.display = function(content,settings) + if handler.startdisplay then handler.startdisplay(settings) end + parser(content,settings) + if handler.stopdisplay then handler.stopdisplay(settings) end + end + specification.inline = function(content,settings) + if handler.startinline then handler.startinline(settings) end + parser(content,settings) + if handler.stopinline then handler.stopinline(settings) end + end + specification.direct = parser + end + elseif isparser then + specification.display = function(content,settings) + lpegmatch(start * displayparser * stop,content,1,settings) + end + specification.inline = function(content,settings) + lpegmatch(start * inlineparser * stop,content,1,settings) + end + specification.direct = function(content,settings) + lpegmatch(parser,content,1,settings) + end + elseif parser then + specification.display = parser + specification.inline = parser + specification.direct = parser + end + return specification +end + +local escapepatterns = allocate() +visualizers.escapepatterns = escapepatterns + +local function texmethod(s) + context.bgroup() + context(s) + context.egroup() +end + +local function texcommand(s) + context[s]() +end + +local function defaultmethod(s,settings) + lpegmatch(getvisualizer("default"),lower(s),1,settings) +end + +-- we can consider using a nested instead + +local space_pattern = patterns.space^0 +local name_pattern = R("az","AZ")^1 + +-- the hack is needed in order to retain newlines when an escape happens at the +-- at the begin of a line; it also ensures proper line numbering; a bit messy + +local function hack(pattern) + return Cs(pattern * Cc(signal)) +end + +local split_processor = typesetters.processors.split +local apply_processor = typesetters.processors.apply + +-- todo: { before = b, after = a, processor = p }, ... + +function visualizers.registerescapepattern(name,befores,afters,normalmethod,escapemethod,processors) + local escapepattern = escapepatterns[name] + if not escapepattern then + if type(befores) ~= "table" then befores = { befores } end + if type(afters) ~= "table" then afters = { afters } end + if type(processors) ~= "table" then processors = { processors } end + for i=1,#befores do + local before = befores[i] + local after = afters[i] + local processor = processors[i] + if trace_visualize then + report_visualizers("registering escape pattern, name %a, index %a, before %a, after %a, processor %a", + name,i,before,after,processor or "default") + end + before = P(before) * space_pattern + after = space_pattern * P(after) + local action + if processor then + action = function(s) apply_processor(processor,s) end + else + action = escapemethod or texmethod + end + local ep = (before / "") * ((1 - after)^0 / action) * (after / "") + if escapepattern then + escapepattern = escapepattern + ep + else + escapepattern = ep + end + end + escapepattern = ( + escapepattern + + hack((1 - escapepattern)^1) / (normalmethod or defaultmethod) + )^0 + escapepatterns[name] = escapepattern + end + return escapepattern +end + +function visualizers.registerescapeline(name,befores,normalmethod,escapemethod,processors) + local escapepattern = escapepatterns[name] + if not escapepattern then + if type(befores) ~= "table" then befores = { befores } end + if type(processors) ~= "table" then processors = { processors } end + for i=1,#befores do + local before = befores[i] + local processor = processors[i] + if trace_visualize then + report_visualizers("registering escape line pattern, name %a, before %a, after <>",name,before) + end + before = P(before) * space_pattern + after = space_pattern * P("\n") + local action + if processor then + action = function(s) apply_processor(processor,s) end + else + action = escapemethod or texmethod + end + local ep = (before / "") * ((1 - after)^0 / action) * (space_pattern / "") + if escapepattern then + escapepattern = escapepattern + ep + else + escapepattern = ep + end + end + escapepattern = ( + escapepattern + + hack((1 - escapepattern)^1) / (normalmethod or defaultmethod) + )^0 + escapepatterns[name] = escapepattern + end + return escapepattern +end + +function visualizers.registerescapecommand(name,token,normalmethod,escapecommand,processor) + local escapepattern = escapepatterns[name] + if not escapepattern then + if trace_visualize then + report_visualizers("registering escape token, name %a, token %a",name,token) + end + token = P(token) + local notoken = hack((1 - token)^1) + local cstoken = name_pattern * space_pattern + escapepattern = ( + (token / "") + * (cstoken / (escapecommand or texcommand)) + + (notoken / (normalmethod or defaultmethod)) + )^0 + escapepatterns[name] = escapepattern + end + return escapepattern +end + +local escapedvisualizers = { } + +local function visualize(content,settings) -- maybe also method in settings + if content and content ~= "" then + local method = lower(settings.method or "default") + local m + local e = settings.escape + if e and e ~= "" then + local newname = format("%s : %s",method,e) + local newspec = specifications[newname] + if newspec then + m = newspec + else + local starts, stops, processors = { }, { }, { } + if e == v_yes then + starts[1] = "/BTEX" + stops [1] = "/ETEX" + else + local s = settings_to_array(e,true) + for i=1,#s do + local si = s[i] + local processor, pattern = split_processor(si) + si = processor and pattern or si + local start, stop = match(si,"^(.-),(.-)$") + if start then + local n = #starts + 1 + starts[n] = start + stops [n] = stop or "" + processors[n] = processor + end + end + end + local oldvisualizer = specifications[method] or specifications.default + local oldparser = oldvisualizer.direct + local newparser + if starts[1] and stops[1] ~= "" then + newparser = visualizers.registerescapepattern(newname,starts,stops,oldparser,nil,processors) + elseif starts[1] then + newparser = visualizers.registerescapeline(newname,starts,oldparser,nil,processors) + else -- for old times sake: /em + newparser = visualizers.registerescapecommand(newname,e,oldparser,nil,processors) + end + m = visualizers.register(newname, { + parser = newparser, + handler = oldvisualizer.handler, + }) + end + else + m = specifications[method] or specifications.default + end + local nature = settings.nature or "display" + local n = m and m[nature] + if n then + if trace_visualize then + report_visualizers("visualize using method %a and nature %a",method,nature) + end + n(content,settings) + else + if trace_visualize then + report_visualizers("visualize using method %a",method) + end + fallback(content,1,settings) + end + end +end + +visualizers.visualize = visualize +visualizers.getvisualizer = getvisualizer + +local fallbacks = { } table.setmetatableindex(fallbacks,function(t,k) local v = { nature = k } t[k] = v return v end) + +local function checkedsettings(settings,nature) + if not settings then + -- let's avoid dummy tables as much as possible + return fallbacks[nature] + else + if not settings.nature then + settings.nature = nature + end + return settings + end +end + +function visualizers.visualizestring(content,settings) + visualize(content,checkedsettings(settings,"inline")) +end + +function visualizers.visualizefile(name,settings) + visualize(resolvers.loadtexfile(name),checkedsettings(settings,"display")) +end + +function visualizers.visualizebuffer(name,settings) + visualize(buffers.getcontent(name),checkedsettings(settings,"display")) +end + +-- -- + +local space = C(patterns.space) * CargOne / f_space +local newline = C(patterns.newline) * CargOne / f_newline +local emptyline = C(patterns.emptyline) * CargOne / f_emptyline +local beginline = C(patterns.beginline) * CargOne / f_beginline +local anything = C(patterns.somecontent) * CargOne / f_default + +----- verbosed = (space + newline * (emptyline^0) * beginline + anything)^0 +local verbosed = (space + newline * (emptyline^0) * beginline + emptyline + newline + anything)^0 + +local function write(s,settings) -- bad name + lpegmatch(verbosed,s,1,settings or false) +end + +visualizers.write = write +visualizers.writenewline = f_newline +visualizers.writeemptyline = f_emptyline +visualizers.writespace = f_space +visualizers.writedefault = f_default + +function visualizers.writeargument(...) + context("{") -- If we didn't have tracing then we could + write(...) -- use a faster print to tex variant for the + context("}") -- { } tokens as they always have ctxcatcodes. +end + +-- helpers + +local function realign(lines,strip) -- "yes", + local n + if strip == v_yes then + n = math.huge + for i=1, #lines do + local spaces = find(lines[i],"%S") + if not spaces then + -- empty line + elseif spaces == 0 then + n = 0 + break + elseif spaces < n then + n = spaces + end + end + n = n - 1 + else + n = tonumber(strip) + end + if n and n > 0 then + local copy = { } + for i=1,#lines do + copy[i] = sub(lines[i],n+1) + end + return copy + end + return lines +end + +local function getstrip(lines,first,last) + local first, last = first or 1, last or #lines + for i=first,last do + local li = lines[i] + if #li == 0 or find(li,"^%s*$") then + first = first + 1 + else + break + end + end + for i=last,first,-1 do + local li = lines[i] + if #li == 0 or find(li,"^%s*$") then + last = last - 1 + else + break + end + end + return first, last, last - first + 1 +end + +local function getrange(lines,first,last,range) -- 1,3 1,+3 fromhere,tothere + local noflines = #lines + local first, last = first or 1, last or noflines + if last < 0 then + last = noflines + last + end + local range = settings.range + local what = settings_to_array(range) + local r_first, r_last = what[1], what[2] + local f, l = tonumber(r_first), tonumber(r_last) + if r_first then + if f then + if f > first then + first = f + end + else + for i=first,last do + if find(lines[i],r_first) then + first = i + 1 + break + end + end + end + end + if r_last then + if l then + if l < 0 then + l = noflines + l + end + if find(r_last,"^[%+]") then -- 1,+3 + l = first + l + end + if l < last then + last = l + end + else + for i=first,last do + if find(lines[i],r_last) then + last = i - 1 + break + end + end + end + end + return first, last +end + +local tablength = 7 + +local function dotabs(content,settings) + local tab = settings.tab + tab = tab and (tab == v_yes and tablength or tonumber(tab)) + if tab then + return tabtospace(content,tab) + else + return content + end +end + +local function filter(lines,settings) -- todo: inline or display in settings + local strip = settings.strip + if strip and strip ~= "" then + lines = realign(lines,strip) + end + local line, n = 0, 0 + local first, last, m = getstrip(lines) + if range then + first, last = getrange(lines,first,last,range) + first, last = getstrip(lines,first,last) + end + -- \r is \endlinechar but \n would is more generic so this choice is debatable + local content = concat(lines,(settings.nature == "inline" and " ") or "\n",first,last) + return content, m +end + +local getlines = buffers.getlines + +-- interface + +function commands.doifelsevisualizer(name) + commands.doifelse(specifications[lower(name)]) +end + +commands.loadvisualizer = visualizers.load + +-- local decodecomment = resolvers.macros.decodecomment -- experiment + +function commands.typebuffer(settings) + local lines = getlines(settings.name) + if lines then + local content, m = filter(lines,settings) + if content and content ~= "" then + -- content = decodecomment(content) + content = dotabs(content,settings) + visualize(content,checkedsettings(settings,"display")) + end + end +end + +function commands.processbuffer(settings) + local lines = getlines(settings.name) + if lines then + local content, m = filter(lines,settings) + if content and content ~= "" then + content = dotabs(content,settings) + visualize(content,checkedsettings(settings,"direct")) + end + end +end + +-- not really buffers but it's closely related + +-- A string.gsub(str,"(\\.-) +$","%1") is faster than an lpeg when there is a +-- match but slower when there is no match. But anyway, we need a more clever +-- parser so we use lpeg. +-- +-- [[\text ]] [[\text{}]] [[\text \text ]] [[\text \\ \text ]] + +----- strip = Cs((P(" ")^1 * P(-1)/"" + 1)^0) +local strip = Cs((P("\\") * ((1-S("\\ "))^1) * (P(" ")/"") + 1)^0) -- + +function commands.typestring(settings) + local content = settings.data + if content and content ~= "" then + content = #content > 1 and lpegmatch(strip,content) or content -- can be an option, but needed in e.g. tabulate + -- content = decodecomment(content) + -- content = dotabs(content,settings) + visualize(content,checkedsettings(settings,"inline")) + end +end + +function commands.typefile(settings) + local filename = settings.name + local foundname = resolvers.findtexfile(filename) + if foundname and foundname ~= "" then + local str = resolvers.loadtexfile(foundname) + if str and str ~= "" then + local regime = settings.regime + if regime and regime ~= "" then + str = regimes.translate(str,regime) + end + if str and str~= "" then + -- content = decodecomment(content) + local lines = splitlines(str) + local content, m = filter(lines,settings) + if content and content ~= "" then + content = dotabs(content,settings) + visualize(content,checkedsettings(settings,"display")) + end + end + end + end +end diff --git a/tex/context/base/catc-ini.lua b/tex/context/base/catc-ini.lua index d4f9b65af..61f28d789 100644 --- a/tex/context/base/catc-ini.lua +++ b/tex/context/base/catc-ini.lua @@ -1,41 +1,41 @@ -if not modules then modules = { } end modules ['catc-ini'] = { - version = 1.001, - comment = "companion to catc-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -catcodes = catcodes or { } -catcodes.numbers = catcodes.numbers or { } -catcodes.names = catcodes.names or { } - -storage.register("catcodes/numbers", catcodes.numbers, "catcodes.numbers") -storage.register("catcodes/names", catcodes.names, "catcodes.names") - -local numbers = catcodes.numbers -local names = catcodes.names - --- this only happens at initime - -function catcodes.register(name,number) - numbers[name] = number - local cnn = names[number] - if cnn then - cnn[#cnn+1] = name - else - names[number] = { name } - end - tex[name] = number -- downward compatible -end - --- this only happens at runtime - -for k, v in next, numbers do - tex[k] = v -- downward compatible -end - --- nasty - -table.setmetatableindex(numbers,function(t,k) if type(k) == "number" then t[k] = k return k end end) -table.setmetatableindex(names, function(t,k) if type(k) == "string" then t[k] = k return k end end) +if not modules then modules = { } end modules ['catc-ini'] = { + version = 1.001, + comment = "companion to catc-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +catcodes = catcodes or { } +catcodes.numbers = catcodes.numbers or { } +catcodes.names = catcodes.names or { } + +storage.register("catcodes/numbers", catcodes.numbers, "catcodes.numbers") +storage.register("catcodes/names", catcodes.names, "catcodes.names") + +local numbers = catcodes.numbers +local names = catcodes.names + +-- this only happens at initime + +function catcodes.register(name,number) + numbers[name] = number + local cnn = names[number] + if cnn then + cnn[#cnn+1] = name + else + names[number] = { name } + end + tex[name] = number -- downward compatible +end + +-- this only happens at runtime + +for k, v in next, numbers do + tex[k] = v -- downward compatible +end + +-- nasty + +table.setmetatableindex(numbers,function(t,k) if type(k) == "number" then t[k] = k return k end end) +table.setmetatableindex(names, function(t,k) if type(k) == "string" then t[k] = k return k end end) diff --git a/tex/context/base/char-cjk.lua b/tex/context/base/char-cjk.lua index 3d7de1423..f4b3373a2 100644 --- a/tex/context/base/char-cjk.lua +++ b/tex/context/base/char-cjk.lua @@ -1,365 +1,365 @@ -if not modules then modules = { } end modules ['char-cjk'] = { - version = 1.001, - comment = "companion to char-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local setmetatable = setmetatable -local insert = table.insert -local floor = math.floor -local format = string.format -local utfchar = utf.char - -local ranges = characters.ranges -local allocate = utilities.storage.allocate - --- Hangul Syllable - --- The following conversion is taken from unicode.org/reports/tr15/tr15-23.html#Hangul --- but adapted to our needs. - --- local SBase = 0xAC00 --- --- local LBase, LCount = 0x1100, 19 --- local VBase, VCount = 0x1161, 21 --- local TBase, TCount = 0x11A7, 28 --- --- local NCount = VCount * TCount --- local SCount = LCount * NCount --- --- local function decomposed(unicode) --- local SIndex = unicode - SBase --- if SIndex >= 0 and SIndex < SCount then --- local lead_consonant = LBase + floor( SIndex / NCount) --- local medial_vowel = VBase + floor((SIndex % NCount) / TCount) --- local tail_consonant = TBase + SIndex % TCount --- if tail_consonant ~= TBase then --- return lead_consonant, medial_vowel, tail_consonant --- else --- return lead_consonant, medial_vowel --- end --- end --- end --- --- Lua will optimize the inline constants so the next variant is --- 10% faster. In practice this will go unnoticed, but it's also less --- code, so let's do it. Pushing the constant section into the --- function body saves 5%. - -local function decomposed(unicode) - local index = unicode - 0xAC00 - if index >= 0 and index < 19 * 21 * 28 then - local lead_consonant = 0x1100 + floor( index / (21 * 28)) - local medial_vowel = 0x1161 + floor((index % (21 * 28)) / 28) - local tail_consonant = 0x11A7 + index % 28 - if tail_consonant ~= 0x11A7 then - return lead_consonant, medial_vowel, tail_consonant - else - return lead_consonant, medial_vowel - end - end -end - -local lead_consonants = { [0] = - "G", "GG", "N", "D", "DD", "R", "M", "B", "BB", - "S", "SS", "", "J", "JJ", "C", "K", "T", "P", "H" -} - -local medial_vowels = { [0] = - "A", "AE", "YA", "YAE", "EO", "E", "YEO", "YE", "O", - "WA", "WAE", "OE", "YO", "U", "WEO", "WE", "WI", - "YU", "EU", "YI", "I" -} - -local tail_consonants = { [0] = - "", "G", "GG", "GS", "N", "NJ", "NH", "D", "L", "LG", "LM", - "LB", "LS", "LT", "LP", "LH", "M", "B", "BS", - "S", "SS", "NG", "J", "C", "K", "T", "P", "H" -} - --- local function description(unicode) --- local index = unicode - 0xAC00 --- if index >= 0 and index < 19 * 21 * 28 then --- local lead_consonant = floor( index / NCount) --- local medial_vowel = floor((index % NCount) / TCount) --- local tail_consonant = index % TCount --- return format( --- "HANGUL SYLLABLE %s%s%s", --- lead_consonants[lead_consonant], --- medial_vowels [medial_vowel ], --- tail_consonants[tail_consonant] --- ) --- end --- end - -local function description(unicode) - local index = unicode - 0xAC00 - if index >= 0 and index < 19 * 21 * 28 then - local lead_consonant = floor( index / (21 * 28)) - local medial_vowel = floor((index % (21 * 28)) / 28) - local tail_consonant = index % 28 - return format( - "HANGUL SYLLABLE %s%s%s", - lead_consonants[lead_consonant], - medial_vowels [medial_vowel ], - tail_consonants[tail_consonant] - ) - end -end - --- so far - --- We have a [lead consonant,medial vowel,tail consonant] where the last one --- is optional. For sort ranges we need the first one but some are collapsed. --- Beware, we map to modern so the font should support it. - -local function leadconsonant(unicode) - return - -- unicode < 0xAC00 and nil -- original - -- unicode > 0xD7AF and nil or -- original - unicode >= 0xD558 and 0x314E or -- 하 => ã…Ž - unicode >= 0xD30C and 0x314D or -- 파 => ã… - unicode >= 0xD0C0 and 0x314C or -- 타 => ã…Œ - unicode >= 0xCE74 and 0x314B or -- ì¹´ => ã…‹ - unicode >= 0xCC28 and 0x314A or -- ì°¨ => ã…Š - unicode >= 0xC790 and 0x3148 or -- ìž => ã…ˆ - unicode >= 0xC544 and 0x3147 or -- ì•„ => ã…‡ - unicode >= 0xC0AC and 0x3145 or -- 사 => ã…… - unicode >= 0xBC14 and 0x3142 or -- ë°” => ã…‚ - unicode >= 0xB9C8 and 0x3141 or -- 마 => ã… - unicode >= 0xB77C and 0x3139 or -- ë¼ => ㄹ - unicode >= 0xB2E4 and 0x3137 or -- 다 => ã„· - unicode >= 0xB098 and 0x3134 or -- 나 => ã„´ - unicode >= 0xAC00 and 0x3131 or -- ê°€ => ㄱ - nil -- can't happen -end - -local remapped = { -- this might be merged into char-def.lua - [0x1100] = 0x3131, -- G - [0x1101] = 0x3132, -- GG - [0x1102] = 0x3134, -- N - [0x1103] = 0x3137, -- D - [0x1104] = 0x3138, -- DD - [0x1105] = 0x3139, -- R - -- [0X111A] = 0x3140, -- LH used for last sound - [0x1106] = 0x3141, -- M - [0x1107] = 0x3142, -- B - [0x1108] = 0x3143, -- BB - -- [0x1121] = 0x3144, -- BS used for last sound - [0x1109] = 0x3145, -- S - [0x110A] = 0x3146, -- SS - [0x110B] = 0x3147, -- (IEUNG) no sound but has form - [0x110C] = 0x3148, -- J - [0x110D] = 0x3149, -- JJ - [0x110E] = 0x314A, -- C - [0x110F] = 0x314B, -- K - [0x1110] = 0x314C, -- T - [0x1111] = 0x314D, -- P - [0x1112] = 0x314E, -- H - - [0x1161] = 0x314F, -- A - [0x1162] = 0x3150, -- AE - [0x1163] = 0x3151, -- YA - [0x1164] = 0x3152, -- YAE - [0x1165] = 0x3153, -- EO - [0x1166] = 0x3154, -- E - [0x1167] = 0x3155, -- YEO - [0x1168] = 0x3156, -- YE - [0x1169] = 0x3157, -- O - [0x116A] = 0x3158, -- WA - [0x116B] = 0x3159, -- WAE - [0x116C] = 0x315A, -- OE - [0x116D] = 0x315B, -- YO - [0x116E] = 0x315C, -- U - [0x116F] = 0x315D, -- WEO - [0x1170] = 0x315E, -- WE - [0x1171] = 0x315F, -- WI - [0x1172] = 0x3160, -- YU - [0x1173] = 0x3161, -- EU - [0x1174] = 0x3162, -- YI - [0x1175] = 0x3163, -- I - - [0x11A8] = 0x3131, -- G - [0x11A9] = 0x3132, -- GG - [0x11AA] = 0x3133, -- GS - [0x11AB] = 0x3134, -- N - [0x11AC] = 0x3135, -- NJ - [0x11AD] = 0x3136, -- NH - [0x11AE] = 0x3137, -- D - [0x11AF] = 0x3139, -- L - [0x11B0] = 0x313A, -- LG - [0x11B1] = 0x313B, -- LM - [0x11B2] = 0x313C, -- LB - [0x11B3] = 0x313D, -- LS - [0x11B4] = 0x313E, -- LT - [0x11B5] = 0x313F, -- LP - [0x11B6] = 0x3140, -- LH - [0x11B7] = 0x3141, -- M - [0x11B8] = 0x3142, -- B - [0x11B9] = 0x3144, -- BS - [0x11BA] = 0x3145, -- S - [0x11BB] = 0x3146, -- SS - [0x11BC] = 0x3147, -- NG - [0x11BD] = 0x3148, -- J - [0x11BE] = 0x314A, -- C - [0x11BF] = 0x314B, -- K - [0x11C0] = 0x314C, -- T - [0x11C1] = 0x314D, -- P - [0x11C2] = 0x314E, -- H -} - -characters.hangul = allocate { - decomposed = decomposed, - description = description, - leadconsonant = leadconsonant, - remapped = remapped, -} - --- so far - -local hangul_syllable_basetable = { - category = "lo", - cjkwd = "w", - description = "", - direction = "l", - linebreak = "h2", -} - -local hangul_syllable_metatable = { - __index = function(t,k) - local u = t.unicodeslot - if k == "fscode" or k == "leadconsonant" then - return leadconsonant(u) - elseif k == "decomposed" then - return { decomposed(u) } - elseif k == "specials" then - return { "char", decomposed(u) } - elseif k == "description" then - return description(u) - else - return hangul_syllable_basetable[k] - end - end -} - -function characters.remap_hangul_syllabe(t) - local tt = type(t) - if tt == "number" then - return remapped[t] or t - elseif tt == "table" then - local r = { } - for i=1,#t do - local ti = t[i] - r[i] = remapped[ti] or ti - end - return r - else - return t - end -end - -local hangul_syllable_extender = function(k,v) - local t = { - unicodeslot = k, - } - setmetatable(t,hangul_syllable_metatable) - return t -end - -local hangul_syllable_range = { - first = 0xAC00, - last = 0xD7A3, - extender = hangul_syllable_extender, -} - -setmetatable(hangul_syllable_range, hangul_syllable_metatable) - --- CJK Ideograph - -local cjk_ideograph_metatable = { - __index = { - category = "lo", - cjkwd = "w", - description = "", - direction = "l", - linebreak = "id", - } -} - -local cjk_ideograph_extender = function(k,v) - local t = { - -- shcode = shcode, - unicodeslot = k, - } - setmetatable(t,cjk_ideograph_metatable) - return t -end - -local cjk_ideograph_range = { - first = 0x4E00, - last = 0x9FBB, - extender = cjk_ideograph_extender, -} - --- CJK Ideograph Extension A - -local cjk_ideograph_extension_a_metatable = { - __index = { - category = "lo", - cjkwd = "w", - description = "", - direction = "l", - linebreak = "id", - } -} - -local cjk_ideograph_extension_a_extender = function(k,v) - local t = { - -- shcode = shcode, - unicodeslot = k, - } - setmetatable(t,cjk_ideograph_extension_a_metatable) - return t -end - -local cjk_ideograph_extension_a_range = { - first = 0x3400, - last = 0x4DB5, - extender = cjk_ideograph_extension_a_extender, -} - --- CJK Ideograph Extension B - -local cjk_ideograph_extension_b_metatable = { - __index = { - category = "lo", - cjkwd = "w", - description = "", - direction = "l", - linebreak = "id", - } -} - -local cjk_ideograph_extension_b_extender = function(k,v) - local t = { - -- shcode = shcode, - unicodeslot = k, - } - setmetatable(t,cjk_ideograph_extension_b_metatable) - return t -end - -local cjk_ideograph_extension_b_range = { - first = 0x20000, - last = 0x2A6D6, - extender = cjk_ideograph_extension_b_extender, -} - --- Ranges - -insert(ranges, hangul_syllable_range) -insert(ranges, cjk_ideograph_range) -insert(ranges, cjk_ideograph_extension_a_range) -insert(ranges, cjk_ideograph_extension_b_range) +if not modules then modules = { } end modules ['char-cjk'] = { + version = 1.001, + comment = "companion to char-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local setmetatable = setmetatable +local insert = table.insert +local floor = math.floor +local format = string.format +local utfchar = utf.char + +local ranges = characters.ranges +local allocate = utilities.storage.allocate + +-- Hangul Syllable + +-- The following conversion is taken from unicode.org/reports/tr15/tr15-23.html#Hangul +-- but adapted to our needs. + +-- local SBase = 0xAC00 +-- +-- local LBase, LCount = 0x1100, 19 +-- local VBase, VCount = 0x1161, 21 +-- local TBase, TCount = 0x11A7, 28 +-- +-- local NCount = VCount * TCount +-- local SCount = LCount * NCount +-- +-- local function decomposed(unicode) +-- local SIndex = unicode - SBase +-- if SIndex >= 0 and SIndex < SCount then +-- local lead_consonant = LBase + floor( SIndex / NCount) +-- local medial_vowel = VBase + floor((SIndex % NCount) / TCount) +-- local tail_consonant = TBase + SIndex % TCount +-- if tail_consonant ~= TBase then +-- return lead_consonant, medial_vowel, tail_consonant +-- else +-- return lead_consonant, medial_vowel +-- end +-- end +-- end +-- +-- Lua will optimize the inline constants so the next variant is +-- 10% faster. In practice this will go unnoticed, but it's also less +-- code, so let's do it. Pushing the constant section into the +-- function body saves 5%. + +local function decomposed(unicode) + local index = unicode - 0xAC00 + if index >= 0 and index < 19 * 21 * 28 then + local lead_consonant = 0x1100 + floor( index / (21 * 28)) + local medial_vowel = 0x1161 + floor((index % (21 * 28)) / 28) + local tail_consonant = 0x11A7 + index % 28 + if tail_consonant ~= 0x11A7 then + return lead_consonant, medial_vowel, tail_consonant + else + return lead_consonant, medial_vowel + end + end +end + +local lead_consonants = { [0] = + "G", "GG", "N", "D", "DD", "R", "M", "B", "BB", + "S", "SS", "", "J", "JJ", "C", "K", "T", "P", "H" +} + +local medial_vowels = { [0] = + "A", "AE", "YA", "YAE", "EO", "E", "YEO", "YE", "O", + "WA", "WAE", "OE", "YO", "U", "WEO", "WE", "WI", + "YU", "EU", "YI", "I" +} + +local tail_consonants = { [0] = + "", "G", "GG", "GS", "N", "NJ", "NH", "D", "L", "LG", "LM", + "LB", "LS", "LT", "LP", "LH", "M", "B", "BS", + "S", "SS", "NG", "J", "C", "K", "T", "P", "H" +} + +-- local function description(unicode) +-- local index = unicode - 0xAC00 +-- if index >= 0 and index < 19 * 21 * 28 then +-- local lead_consonant = floor( index / NCount) +-- local medial_vowel = floor((index % NCount) / TCount) +-- local tail_consonant = index % TCount +-- return format( +-- "HANGUL SYLLABLE %s%s%s", +-- lead_consonants[lead_consonant], +-- medial_vowels [medial_vowel ], +-- tail_consonants[tail_consonant] +-- ) +-- end +-- end + +local function description(unicode) + local index = unicode - 0xAC00 + if index >= 0 and index < 19 * 21 * 28 then + local lead_consonant = floor( index / (21 * 28)) + local medial_vowel = floor((index % (21 * 28)) / 28) + local tail_consonant = index % 28 + return format( + "HANGUL SYLLABLE %s%s%s", + lead_consonants[lead_consonant], + medial_vowels [medial_vowel ], + tail_consonants[tail_consonant] + ) + end +end + +-- so far + +-- We have a [lead consonant,medial vowel,tail consonant] where the last one +-- is optional. For sort ranges we need the first one but some are collapsed. +-- Beware, we map to modern so the font should support it. + +local function leadconsonant(unicode) + return + -- unicode < 0xAC00 and nil -- original + -- unicode > 0xD7AF and nil or -- original + unicode >= 0xD558 and 0x314E or -- 하 => ã…Ž + unicode >= 0xD30C and 0x314D or -- 파 => ã… + unicode >= 0xD0C0 and 0x314C or -- 타 => ã…Œ + unicode >= 0xCE74 and 0x314B or -- ì¹´ => ã…‹ + unicode >= 0xCC28 and 0x314A or -- ì°¨ => ã…Š + unicode >= 0xC790 and 0x3148 or -- ìž => ã…ˆ + unicode >= 0xC544 and 0x3147 or -- ì•„ => ã…‡ + unicode >= 0xC0AC and 0x3145 or -- 사 => ã…… + unicode >= 0xBC14 and 0x3142 or -- ë°” => ã…‚ + unicode >= 0xB9C8 and 0x3141 or -- 마 => ã… + unicode >= 0xB77C and 0x3139 or -- ë¼ => ㄹ + unicode >= 0xB2E4 and 0x3137 or -- 다 => ã„· + unicode >= 0xB098 and 0x3134 or -- 나 => ã„´ + unicode >= 0xAC00 and 0x3131 or -- ê°€ => ㄱ + nil -- can't happen +end + +local remapped = { -- this might be merged into char-def.lua + [0x1100] = 0x3131, -- G + [0x1101] = 0x3132, -- GG + [0x1102] = 0x3134, -- N + [0x1103] = 0x3137, -- D + [0x1104] = 0x3138, -- DD + [0x1105] = 0x3139, -- R + -- [0X111A] = 0x3140, -- LH used for last sound + [0x1106] = 0x3141, -- M + [0x1107] = 0x3142, -- B + [0x1108] = 0x3143, -- BB + -- [0x1121] = 0x3144, -- BS used for last sound + [0x1109] = 0x3145, -- S + [0x110A] = 0x3146, -- SS + [0x110B] = 0x3147, -- (IEUNG) no sound but has form + [0x110C] = 0x3148, -- J + [0x110D] = 0x3149, -- JJ + [0x110E] = 0x314A, -- C + [0x110F] = 0x314B, -- K + [0x1110] = 0x314C, -- T + [0x1111] = 0x314D, -- P + [0x1112] = 0x314E, -- H + + [0x1161] = 0x314F, -- A + [0x1162] = 0x3150, -- AE + [0x1163] = 0x3151, -- YA + [0x1164] = 0x3152, -- YAE + [0x1165] = 0x3153, -- EO + [0x1166] = 0x3154, -- E + [0x1167] = 0x3155, -- YEO + [0x1168] = 0x3156, -- YE + [0x1169] = 0x3157, -- O + [0x116A] = 0x3158, -- WA + [0x116B] = 0x3159, -- WAE + [0x116C] = 0x315A, -- OE + [0x116D] = 0x315B, -- YO + [0x116E] = 0x315C, -- U + [0x116F] = 0x315D, -- WEO + [0x1170] = 0x315E, -- WE + [0x1171] = 0x315F, -- WI + [0x1172] = 0x3160, -- YU + [0x1173] = 0x3161, -- EU + [0x1174] = 0x3162, -- YI + [0x1175] = 0x3163, -- I + + [0x11A8] = 0x3131, -- G + [0x11A9] = 0x3132, -- GG + [0x11AA] = 0x3133, -- GS + [0x11AB] = 0x3134, -- N + [0x11AC] = 0x3135, -- NJ + [0x11AD] = 0x3136, -- NH + [0x11AE] = 0x3137, -- D + [0x11AF] = 0x3139, -- L + [0x11B0] = 0x313A, -- LG + [0x11B1] = 0x313B, -- LM + [0x11B2] = 0x313C, -- LB + [0x11B3] = 0x313D, -- LS + [0x11B4] = 0x313E, -- LT + [0x11B5] = 0x313F, -- LP + [0x11B6] = 0x3140, -- LH + [0x11B7] = 0x3141, -- M + [0x11B8] = 0x3142, -- B + [0x11B9] = 0x3144, -- BS + [0x11BA] = 0x3145, -- S + [0x11BB] = 0x3146, -- SS + [0x11BC] = 0x3147, -- NG + [0x11BD] = 0x3148, -- J + [0x11BE] = 0x314A, -- C + [0x11BF] = 0x314B, -- K + [0x11C0] = 0x314C, -- T + [0x11C1] = 0x314D, -- P + [0x11C2] = 0x314E, -- H +} + +characters.hangul = allocate { + decomposed = decomposed, + description = description, + leadconsonant = leadconsonant, + remapped = remapped, +} + +-- so far + +local hangul_syllable_basetable = { + category = "lo", + cjkwd = "w", + description = "", + direction = "l", + linebreak = "h2", +} + +local hangul_syllable_metatable = { + __index = function(t,k) + local u = t.unicodeslot + if k == "fscode" or k == "leadconsonant" then + return leadconsonant(u) + elseif k == "decomposed" then + return { decomposed(u) } + elseif k == "specials" then + return { "char", decomposed(u) } + elseif k == "description" then + return description(u) + else + return hangul_syllable_basetable[k] + end + end +} + +function characters.remap_hangul_syllabe(t) + local tt = type(t) + if tt == "number" then + return remapped[t] or t + elseif tt == "table" then + local r = { } + for i=1,#t do + local ti = t[i] + r[i] = remapped[ti] or ti + end + return r + else + return t + end +end + +local hangul_syllable_extender = function(k,v) + local t = { + unicodeslot = k, + } + setmetatable(t,hangul_syllable_metatable) + return t +end + +local hangul_syllable_range = { + first = 0xAC00, + last = 0xD7A3, + extender = hangul_syllable_extender, +} + +setmetatable(hangul_syllable_range, hangul_syllable_metatable) + +-- CJK Ideograph + +local cjk_ideograph_metatable = { + __index = { + category = "lo", + cjkwd = "w", + description = "", + direction = "l", + linebreak = "id", + } +} + +local cjk_ideograph_extender = function(k,v) + local t = { + -- shcode = shcode, + unicodeslot = k, + } + setmetatable(t,cjk_ideograph_metatable) + return t +end + +local cjk_ideograph_range = { + first = 0x4E00, + last = 0x9FBB, + extender = cjk_ideograph_extender, +} + +-- CJK Ideograph Extension A + +local cjk_ideograph_extension_a_metatable = { + __index = { + category = "lo", + cjkwd = "w", + description = "", + direction = "l", + linebreak = "id", + } +} + +local cjk_ideograph_extension_a_extender = function(k,v) + local t = { + -- shcode = shcode, + unicodeslot = k, + } + setmetatable(t,cjk_ideograph_extension_a_metatable) + return t +end + +local cjk_ideograph_extension_a_range = { + first = 0x3400, + last = 0x4DB5, + extender = cjk_ideograph_extension_a_extender, +} + +-- CJK Ideograph Extension B + +local cjk_ideograph_extension_b_metatable = { + __index = { + category = "lo", + cjkwd = "w", + description = "", + direction = "l", + linebreak = "id", + } +} + +local cjk_ideograph_extension_b_extender = function(k,v) + local t = { + -- shcode = shcode, + unicodeslot = k, + } + setmetatable(t,cjk_ideograph_extension_b_metatable) + return t +end + +local cjk_ideograph_extension_b_range = { + first = 0x20000, + last = 0x2A6D6, + extender = cjk_ideograph_extension_b_extender, +} + +-- Ranges + +insert(ranges, hangul_syllable_range) +insert(ranges, cjk_ideograph_range) +insert(ranges, cjk_ideograph_extension_a_range) +insert(ranges, cjk_ideograph_extension_b_range) diff --git a/tex/context/base/char-enc.lua b/tex/context/base/char-enc.lua index 048837eec..5f3ecd888 100644 --- a/tex/context/base/char-enc.lua +++ b/tex/context/base/char-enc.lua @@ -1,186 +1,186 @@ -if not modules then modules = { } end modules ['char-enc'] = { - version = 1.001, - comment = "companion to char-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" - -- dataonly = true, -} - --- Thanks to tex4ht for these mappings. - -local allocate, setinitializer = utilities.storage.allocate, utilities.storage.setinitializer - -characters = characters or { } -local characters = characters - -characters.synonyms = allocate { -- afm mess - angle = 0x2220, - anticlockwise = 0x21BA, - arrowaxisleft = 0x2190, - arrowaxisright = 0x2192, - arrowparrleftright = 0x21C6, - arrowparrrightleft = 0x21C4, - arrowtailleft = 0x21A2, - arrowtailright = 0x21A3, - arrowtripleleft = 0x21DA, - arrowtripleright = 0x21DB, - axisshort = 0x2212, - because = 0x2235, - between = 0x226C, - check = 0x2713, - circleasteris = 0x229B, - circleequal = 0x2257, - circleminus = 0x229D, - circleR = 0x24C7, - circlering = 0x229A, - circleS = 0x24C8, - clockwise = 0x21BB, - complement = 0x2201, - curlyleft = 0x21AB, - curlyright = 0x21AC, - dblarrowdwn = 0x21CA, - dblarrowheadleft = 0x219E, - dblarrowheadright = 0x21A0, - dblarrowleft = 0x21C7, - dblarrowright = 0x21C9, - dblarrowup = 0x21C8, - defines = 0x225C, - diamond = 0x2662, - diamondsolid = 0x2666, - difference = 0x224F, - dotplus = 0x2214, - downfall = 0x22CE, - equaldotleftright = 0x2252, - equaldotrightleft = 0x2253, - equalorfollows = 0x22DF, - equalorgreater = 0x22DD, - equalorless = 0x22DC, - equalorprecedes = 0x22DE, - equalsdots = 0x2251, - followsorcurly = 0x227D, - followsorequal = 0x227F, - forces = 0x22A9, - forcesbar = 0x22AA, - fork = 0x22D4, - frown = 0x2322, - geomequivalent = 0x224E, - greaterdbleqlless = 0x22Da, - greaterdblequal = 0x2267, - greaterlessequal = 0x22DA, - greaterorapproxeql = 0x227F, - greaterorequalslant= 0x2265, - greaterorless = 0x2277, - greaterorsimilar = 0x2273, - harpoondownleft = 0x21C3, - harpoondownright = 0x21C2, - harpoonleftright = 0x21CC, - harpoonrightleft = 0x21CB, - harpoonupleft = 0x21BF, - harpoonupright = 0x21BE, - intercal = 0x22BA, - intersectiondbl = 0x22D2, - lessdbleqlgreater = 0x22DB, - lessdblequal = 0x2266, - lessequalgreater = 0x22DB, - lessorapproxeql = 0x227E, - lessorequalslant = 0x2264, - lessorgreater = 0x2276, - lessorsimilar = 0x2272, - maltesecross = 0xFFFD, - measuredangle = 0x2221, - muchgreater = 0x22D9, - muchless = 0x22D8, - multimap = 0x22B8, - multiopenleft = 0x22CB, - multiopenright = 0x22CC, - nand = 0x22BC, - orunderscore = 0x22BB, - perpcorrespond = 0x2259, - precedesorcurly = 0x227C, - precedesorequal = 0x227E, - primereverse = 0x2035, - proportional = 0x221D, - revasymptequal = 0x2243, - revsimilar = 0x223D, - rightanglene = 0x231D, - rightanglenw = 0x231C, - rightanglese = 0x231F, - rightanglesw = 0x231E, - ringinequal = 0x2256, - satisfies = 0x22A8, - shiftleft = 0x21B0, - shiftright = 0x21B1, - smile = 0x2323, - sphericalangle = 0x2222, - square = 0x25A1, - squaredot = 0x22A1, - squareimage = 0x228F, - squareminus = 0x229F, - squaremultiply = 0x22A0, - squareoriginal = 0x2290, - squareplus = 0x229E, - squaresmallsolid = 0x25AA, - squaresolid = 0x25A0, - squiggleleftright = 0x21AD, - squiggleright = 0x21DD, - star = 0x22C6, - subsetdbl = 0x22D0, - subsetdblequal = 0x2286, - supersetdbl = 0x22D1, - supersetdblequa = 0x2287, - therefore = 0x2234, - triangle = 0x25B5, - triangledownsld = 0x25BE, - triangleinv = 0x25BF, - triangleleft = 0x25C3, - triangleleftequal = 0x22B4, - triangleleftsld = 0x25C2, - triangleright = 0x25B9, - trianglerightequal = 0x22B5, - trianglerightsld = 0x25B8, - trianglesolid = 0x25B4, - uniondbl = 0x22D3, - uprise = 0x22CF, - Yen = 0x00A5, -} - --- if not characters.enccodes then --- --- local enccodes = { } characters.enccodes = enccodes --- --- for unicode, data in next, characters.data do --- local encname = data.adobename or data.contextname --- if encname then --- enccodes[encname] = unicode --- end --- end --- --- for name, unicode in next, characters.synonyms do --- if not enccodes[name] then enccodes[name] = unicode end --- end --- --- --- end --- --- storage.register("characters.enccodes", characters.enccodes, "characters.enccodes") - --- As this table is seldom used, we can delay its definition. Beware, this means --- that table.print would not work on this file unless it is accessed once. This --- why the serializer does a dummy access. - -local enccodes = allocate() characters.enccodes = enccodes - -local function initialize() - for unicode, data in next, characters.data do - local encname = data.adobename or data.contextname - if encname then - enccodes[encname] = unicode - end - end - for name, unicode in next, characters.synonyms do - if not enccodes[name] then enccodes[name] = unicode end - end -end - -setinitializer(enccodes,initialize) +if not modules then modules = { } end modules ['char-enc'] = { + version = 1.001, + comment = "companion to char-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" + -- dataonly = true, +} + +-- Thanks to tex4ht for these mappings. + +local allocate, setinitializer = utilities.storage.allocate, utilities.storage.setinitializer + +characters = characters or { } +local characters = characters + +characters.synonyms = allocate { -- afm mess + angle = 0x2220, + anticlockwise = 0x21BA, + arrowaxisleft = 0x2190, + arrowaxisright = 0x2192, + arrowparrleftright = 0x21C6, + arrowparrrightleft = 0x21C4, + arrowtailleft = 0x21A2, + arrowtailright = 0x21A3, + arrowtripleleft = 0x21DA, + arrowtripleright = 0x21DB, + axisshort = 0x2212, + because = 0x2235, + between = 0x226C, + check = 0x2713, + circleasteris = 0x229B, + circleequal = 0x2257, + circleminus = 0x229D, + circleR = 0x24C7, + circlering = 0x229A, + circleS = 0x24C8, + clockwise = 0x21BB, + complement = 0x2201, + curlyleft = 0x21AB, + curlyright = 0x21AC, + dblarrowdwn = 0x21CA, + dblarrowheadleft = 0x219E, + dblarrowheadright = 0x21A0, + dblarrowleft = 0x21C7, + dblarrowright = 0x21C9, + dblarrowup = 0x21C8, + defines = 0x225C, + diamond = 0x2662, + diamondsolid = 0x2666, + difference = 0x224F, + dotplus = 0x2214, + downfall = 0x22CE, + equaldotleftright = 0x2252, + equaldotrightleft = 0x2253, + equalorfollows = 0x22DF, + equalorgreater = 0x22DD, + equalorless = 0x22DC, + equalorprecedes = 0x22DE, + equalsdots = 0x2251, + followsorcurly = 0x227D, + followsorequal = 0x227F, + forces = 0x22A9, + forcesbar = 0x22AA, + fork = 0x22D4, + frown = 0x2322, + geomequivalent = 0x224E, + greaterdbleqlless = 0x22Da, + greaterdblequal = 0x2267, + greaterlessequal = 0x22DA, + greaterorapproxeql = 0x227F, + greaterorequalslant= 0x2265, + greaterorless = 0x2277, + greaterorsimilar = 0x2273, + harpoondownleft = 0x21C3, + harpoondownright = 0x21C2, + harpoonleftright = 0x21CC, + harpoonrightleft = 0x21CB, + harpoonupleft = 0x21BF, + harpoonupright = 0x21BE, + intercal = 0x22BA, + intersectiondbl = 0x22D2, + lessdbleqlgreater = 0x22DB, + lessdblequal = 0x2266, + lessequalgreater = 0x22DB, + lessorapproxeql = 0x227E, + lessorequalslant = 0x2264, + lessorgreater = 0x2276, + lessorsimilar = 0x2272, + maltesecross = 0xFFFD, + measuredangle = 0x2221, + muchgreater = 0x22D9, + muchless = 0x22D8, + multimap = 0x22B8, + multiopenleft = 0x22CB, + multiopenright = 0x22CC, + nand = 0x22BC, + orunderscore = 0x22BB, + perpcorrespond = 0x2259, + precedesorcurly = 0x227C, + precedesorequal = 0x227E, + primereverse = 0x2035, + proportional = 0x221D, + revasymptequal = 0x2243, + revsimilar = 0x223D, + rightanglene = 0x231D, + rightanglenw = 0x231C, + rightanglese = 0x231F, + rightanglesw = 0x231E, + ringinequal = 0x2256, + satisfies = 0x22A8, + shiftleft = 0x21B0, + shiftright = 0x21B1, + smile = 0x2323, + sphericalangle = 0x2222, + square = 0x25A1, + squaredot = 0x22A1, + squareimage = 0x228F, + squareminus = 0x229F, + squaremultiply = 0x22A0, + squareoriginal = 0x2290, + squareplus = 0x229E, + squaresmallsolid = 0x25AA, + squaresolid = 0x25A0, + squiggleleftright = 0x21AD, + squiggleright = 0x21DD, + star = 0x22C6, + subsetdbl = 0x22D0, + subsetdblequal = 0x2286, + supersetdbl = 0x22D1, + supersetdblequa = 0x2287, + therefore = 0x2234, + triangle = 0x25B5, + triangledownsld = 0x25BE, + triangleinv = 0x25BF, + triangleleft = 0x25C3, + triangleleftequal = 0x22B4, + triangleleftsld = 0x25C2, + triangleright = 0x25B9, + trianglerightequal = 0x22B5, + trianglerightsld = 0x25B8, + trianglesolid = 0x25B4, + uniondbl = 0x22D3, + uprise = 0x22CF, + Yen = 0x00A5, +} + +-- if not characters.enccodes then +-- +-- local enccodes = { } characters.enccodes = enccodes +-- +-- for unicode, data in next, characters.data do +-- local encname = data.adobename or data.contextname +-- if encname then +-- enccodes[encname] = unicode +-- end +-- end +-- +-- for name, unicode in next, characters.synonyms do +-- if not enccodes[name] then enccodes[name] = unicode end +-- end +-- +-- +-- end +-- +-- storage.register("characters.enccodes", characters.enccodes, "characters.enccodes") + +-- As this table is seldom used, we can delay its definition. Beware, this means +-- that table.print would not work on this file unless it is accessed once. This +-- why the serializer does a dummy access. + +local enccodes = allocate() characters.enccodes = enccodes + +local function initialize() + for unicode, data in next, characters.data do + local encname = data.adobename or data.contextname + if encname then + enccodes[encname] = unicode + end + end + for name, unicode in next, characters.synonyms do + if not enccodes[name] then enccodes[name] = unicode end + end +end + +setinitializer(enccodes,initialize) diff --git a/tex/context/base/char-ent.lua b/tex/context/base/char-ent.lua index 58ee9472c..b642d887f 100644 --- a/tex/context/base/char-ent.lua +++ b/tex/context/base/char-ent.lua @@ -1,2257 +1,2257 @@ -if not modules then modules = { } end modules ['char-ent'] = { - version = 1.001, - comment = "companion to math-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "derived from the mathml 2.0 specification", - dataonly = true, -} - --- http://www.w3.org/2003/entities/2007/w3centities-f.ent --- http://www.w3.org/2003/entities/2007/htmlmathml-f.ent - -local entities = utilities.storage.allocate { - ["AElig"] = "Æ", -- U+000C6 - ["AMP"] = "&", -- U+00026 - ["Aacgr"] = "Ά", -- U+00386 - ["Aacute"] = "Ã", -- U+000C1 - ["Abreve"] = "Ä‚", -- U+00102 - ["Acirc"] = "Â", -- U+000C2 - ["Acy"] = "Ð", -- U+00410 - ["Afr"] = "ð”„", -- U+1D504 - ["Agr"] = "Α", -- U+00391 - ["Agrave"] = "À", -- U+000C0 - ["Alpha"] = "Α", -- U+00391 - ["Amacr"] = "Ä€", -- U+00100 - ["And"] = "â©“", -- U+02A53 - ["Aogon"] = "Ä„", -- U+00104 - ["Aopf"] = "ð”¸", -- U+1D538 - ["ApplyFunction"] = "â¡", -- U+02061 - ["Aring"] = "Ã…", -- U+000C5 - ["Ascr"] = "ð’œ", -- U+1D49C - ["Assign"] = "≔", -- U+02254 - ["Atilde"] = "Ã", -- U+000C3 - ["Auml"] = "Ä", -- U+000C4 - ["Backslash"] = "∖", -- U+02216 - ["Barv"] = "⫧", -- U+02AE7 - ["Barwed"] = "⌆", -- U+02306 - ["Bcy"] = "Б", -- U+00411 - ["Because"] = "∵", -- U+02235 - ["Bernoullis"] = "ℬ", -- U+0212C - ["Beta"] = "Î’", -- U+00392 - ["Bfr"] = "ð”…", -- U+1D505 - ["Bgr"] = "Î’", -- U+00392 - ["Bopf"] = "ð”¹", -- U+1D539 - ["Breve"] = "˘", -- U+002D8 - ["Bscr"] = "ℬ", -- U+0212C - ["Bumpeq"] = "≎", -- U+0224E - ["CHcy"] = "Ч", -- U+00427 - ["COPY"] = "©", -- U+000A9 - ["Cacute"] = "Ć", -- U+00106 - ["Cap"] = "â‹’", -- U+022D2 - ["CapitalDifferentialD"] = "â……", -- U+02145 - ["Cayleys"] = "â„­", -- U+0212D - ["Ccaron"] = "ÄŒ", -- U+0010C - ["Ccedil"] = "Ç", -- U+000C7 - ["Ccirc"] = "Ĉ", -- U+00108 - ["Cconint"] = "∰", -- U+02230 - ["Cdot"] = "ÄŠ", -- U+0010A - ["Cedilla"] = "¸", -- U+000B8 - ["CenterDot"] = "·", -- U+000B7 - ["Cfr"] = "â„­", -- U+0212D - ["Chi"] = "Χ", -- U+003A7 - ["CircleDot"] = "⊙", -- U+02299 - ["CircleMinus"] = "⊖", -- U+02296 - ["CirclePlus"] = "⊕", -- U+02295 - ["CircleTimes"] = "⊗", -- U+02297 - ["ClockwiseContourIntegral"] = "∲", -- U+02232 - ["CloseCurlyDoubleQuote"] = "â€", -- U+0201D - ["CloseCurlyQuote"] = "’", -- U+02019 - ["Colon"] = "∷", -- U+02237 - ["Colone"] = "â©´", -- U+02A74 - ["Congruent"] = "≡", -- U+02261 - ["Conint"] = "∯", -- U+0222F - ["ContourIntegral"] = "∮", -- U+0222E - ["Copf"] = "â„‚", -- U+02102 - ["Coproduct"] = "âˆ", -- U+02210 - ["CounterClockwiseContourIntegral"] = "∳", -- U+02233 - ["Cross"] = "⨯", -- U+02A2F - ["Cscr"] = "ð’ž", -- U+1D49E - ["Cup"] = "â‹“", -- U+022D3 - ["CupCap"] = "â‰", -- U+0224D - ["DD"] = "â……", -- U+02145 - ["DDotrahd"] = "⤑", -- U+02911 - ["DJcy"] = "Ђ", -- U+00402 - ["DScy"] = "Ð…", -- U+00405 - ["DZcy"] = "Ð", -- U+0040F - ["Dagger"] = "‡", -- U+02021 - ["Darr"] = "↡", -- U+021A1 - ["Dashv"] = "⫤", -- U+02AE4 - ["Dcaron"] = "ÄŽ", -- U+0010E - ["Dcy"] = "Д", -- U+00414 - ["Del"] = "∇", -- U+02207 - ["Delta"] = "Δ", -- U+00394 - ["Dfr"] = "ð”‡", -- U+1D507 - ["Dgr"] = "Δ", -- U+00394 - ["DiacriticalAcute"] = "´", -- U+000B4 - ["DiacriticalDot"] = "Ë™", -- U+002D9 - ["DiacriticalDoubleAcute"] = "Ë", -- U+002DD - ["DiacriticalGrave"] = "`", -- U+00060 - ["DiacriticalTilde"] = "Ëœ", -- U+002DC - ["Diamond"] = "â‹„", -- U+022C4 - ["DifferentialD"] = "â…†", -- U+02146 - ["Dopf"] = "ð”»", -- U+1D53B - ["Dot"] = "¨", -- U+000A8 - ["DotDot"] = "⃜", -- U+020DC - ["DotEqual"] = "â‰", -- U+02250 - ["DoubleContourIntegral"] = "∯", -- U+0222F - ["DoubleDot"] = "¨", -- U+000A8 - ["DoubleDownArrow"] = "⇓", -- U+021D3 - ["DoubleLeftArrow"] = "â‡", -- U+021D0 - ["DoubleLeftRightArrow"] = "⇔", -- U+021D4 - ["DoubleLeftTee"] = "⫤", -- U+02AE4 - ["DoubleLongLeftArrow"] = "⟸", -- U+027F8 - ["DoubleLongLeftRightArrow"] = "⟺", -- U+027FA - ["DoubleLongRightArrow"] = "⟹", -- U+027F9 - ["DoubleRightArrow"] = "⇒", -- U+021D2 - ["DoubleRightTee"] = "⊨", -- U+022A8 - ["DoubleUpArrow"] = "⇑", -- U+021D1 - ["DoubleUpDownArrow"] = "⇕", -- U+021D5 - ["DoubleVerticalBar"] = "∥", -- U+02225 - ["DownArrow"] = "↓", -- U+02193 - ["DownArrowBar"] = "⤓", -- U+02913 - ["DownArrowUpArrow"] = "⇵", -- U+021F5 - ["DownBreve"] = "Ì‘", -- U+00311 - ["DownLeftRightVector"] = "â¥", -- U+02950 - ["DownLeftTeeVector"] = "⥞", -- U+0295E - ["DownLeftVector"] = "↽", -- U+021BD - ["DownLeftVectorBar"] = "⥖", -- U+02956 - ["DownRightTeeVector"] = "⥟", -- U+0295F - ["DownRightVector"] = "â‡", -- U+021C1 - ["DownRightVectorBar"] = "⥗", -- U+02957 - ["DownTee"] = "⊤", -- U+022A4 - ["DownTeeArrow"] = "↧", -- U+021A7 - ["Downarrow"] = "⇓", -- U+021D3 - ["Dscr"] = "ð’Ÿ", -- U+1D49F - ["Dstrok"] = "Ä", -- U+00110 - ["EEacgr"] = "Ή", -- U+00389 - ["EEgr"] = "Η", -- U+00397 - ["ENG"] = "ÅŠ", -- U+0014A - ["ETH"] = "Ã", -- U+000D0 - ["Eacgr"] = "Έ", -- U+00388 - ["Eacute"] = "É", -- U+000C9 - ["Ecaron"] = "Äš", -- U+0011A - ["Ecirc"] = "Ê", -- U+000CA - ["Ecy"] = "Э", -- U+0042D - ["Edot"] = "Ä–", -- U+00116 - ["Efr"] = "ð”ˆ", -- U+1D508 - ["Egr"] = "Ε", -- U+00395 - ["Egrave"] = "È", -- U+000C8 - ["Element"] = "∈", -- U+02208 - ["Emacr"] = "Ä’", -- U+00112 - ["EmptySmallSquare"] = "â—»", -- U+025FB - ["EmptyVerySmallSquare"] = "â–«", -- U+025AB - ["Eogon"] = "Ę", -- U+00118 - ["Eopf"] = "ð”¼", -- U+1D53C - ["Epsilon"] = "Ε", -- U+00395 - ["Equal"] = "⩵", -- U+02A75 - ["EqualTilde"] = "≂", -- U+02242 - ["Equilibrium"] = "⇌", -- U+021CC - ["Escr"] = "â„°", -- U+02130 - ["Esim"] = "⩳", -- U+02A73 - ["Eta"] = "Η", -- U+00397 - ["Euml"] = "Ë", -- U+000CB - ["Exists"] = "∃", -- U+02203 - ["ExponentialE"] = "â…‡", -- U+02147 - ["Fcy"] = "Ф", -- U+00424 - ["Ffr"] = "ð”‰", -- U+1D509 - ["FilledSmallSquare"] = "â—¼", -- U+025FC - ["FilledVerySmallSquare"] = "â–ª", -- U+025AA - ["Fopf"] = "ð”½", -- U+1D53D - ["ForAll"] = "∀", -- U+02200 - ["Fouriertrf"] = "ℱ", -- U+02131 - ["Fscr"] = "ℱ", -- U+02131 - ["GJcy"] = "Ѓ", -- U+00403 - ["GT"] = ">", -- U+0003E - ["Gamma"] = "Γ", -- U+00393 - ["Gammad"] = "Ïœ", -- U+003DC - ["Gbreve"] = "Äž", -- U+0011E - ["Gcedil"] = "Ä¢", -- U+00122 - ["Gcirc"] = "Äœ", -- U+0011C - ["Gcy"] = "Г", -- U+00413 - ["Gdot"] = "Ä ", -- U+00120 - ["Gfr"] = "ð”Š", -- U+1D50A - ["Gg"] = "â‹™", -- U+022D9 - ["Ggr"] = "Γ", -- U+00393 - ["Gopf"] = "ð”¾", -- U+1D53E - ["GreaterEqual"] = "≥", -- U+02265 - ["GreaterEqualLess"] = "â‹›", -- U+022DB - ["GreaterFullEqual"] = "≧", -- U+02267 - ["GreaterGreater"] = "⪢", -- U+02AA2 - ["GreaterLess"] = "≷", -- U+02277 - ["GreaterSlantEqual"] = "⩾", -- U+02A7E - ["GreaterTilde"] = "≳", -- U+02273 - ["Gscr"] = "ð’¢", -- U+1D4A2 - ["Gt"] = "≫", -- U+0226B - ["HARDcy"] = "Ъ", -- U+0042A - ["Hacek"] = "ˇ", -- U+002C7 - ["Hat"] = "^", -- U+0005E - ["Hcirc"] = "Ĥ", -- U+00124 - ["Hfr"] = "â„Œ", -- U+0210C - ["HilbertSpace"] = "â„‹", -- U+0210B - ["Hopf"] = "â„", -- U+0210D - ["HorizontalLine"] = "─", -- U+02500 - ["Hscr"] = "â„‹", -- U+0210B - ["Hstrok"] = "Ħ", -- U+00126 - ["HumpDownHump"] = "≎", -- U+0224E - ["HumpEqual"] = "â‰", -- U+0224F - ["IEcy"] = "Е", -- U+00415 - ["IJlig"] = "IJ", -- U+00132 - ["IOcy"] = "Ð", -- U+00401 - ["Iacgr"] = "Ί", -- U+0038A - ["Iacute"] = "Ã", -- U+000CD - ["Icirc"] = "ÃŽ", -- U+000CE - ["Icy"] = "И", -- U+00418 - ["Idigr"] = "Ϊ", -- U+003AA - ["Idot"] = "Ä°", -- U+00130 - ["Ifr"] = "â„‘", -- U+02111 - ["Igr"] = "Ι", -- U+00399 - ["Igrave"] = "ÃŒ", -- U+000CC - ["Im"] = "â„‘", -- U+02111 - ["Imacr"] = "Ī", -- U+0012A - ["ImaginaryI"] = "â…ˆ", -- U+02148 - ["Implies"] = "⇒", -- U+021D2 - ["Int"] = "∬", -- U+0222C - ["Integral"] = "∫", -- U+0222B - ["Intersection"] = "â‹‚", -- U+022C2 - ["InvisibleComma"] = "â£", -- U+02063 - ["InvisibleTimes"] = "â¢", -- U+02062 - ["Iogon"] = "Ä®", -- U+0012E - ["Iopf"] = "ð•€", -- U+1D540 - ["Iota"] = "Ι", -- U+00399 - ["Iscr"] = "â„", -- U+02110 - ["Itilde"] = "Ĩ", -- U+00128 - ["Iukcy"] = "І", -- U+00406 - ["Iuml"] = "Ã", -- U+000CF - ["Jcirc"] = "Ä´", -- U+00134 - ["Jcy"] = "Й", -- U+00419 - ["Jfr"] = "ð”", -- U+1D50D - ["Jopf"] = "ð•", -- U+1D541 - ["Jscr"] = "ð’¥", -- U+1D4A5 - ["Jsercy"] = "Ј", -- U+00408 - ["Jukcy"] = "Є", -- U+00404 - ["KHcy"] = "Ð¥", -- U+00425 - ["KHgr"] = "Χ", -- U+003A7 - ["KJcy"] = "ÐŒ", -- U+0040C - ["Kappa"] = "Κ", -- U+0039A - ["Kcedil"] = "Ķ", -- U+00136 - ["Kcy"] = "К", -- U+0041A - ["Kfr"] = "ð”Ž", -- U+1D50E - ["Kgr"] = "Κ", -- U+0039A - ["Kopf"] = "ð•‚", -- U+1D542 - ["Kscr"] = "ð’¦", -- U+1D4A6 - ["LJcy"] = "Љ", -- U+00409 - ["LT"] = "<", -- U+00026 - ["Lacute"] = "Ĺ", -- U+00139 - ["Lambda"] = "Λ", -- U+0039B - ["Lang"] = "⟪", -- U+027EA - ["Laplacetrf"] = "â„’", -- U+02112 - ["Larr"] = "↞", -- U+0219E - ["Lcaron"] = "Ľ", -- U+0013D - ["Lcedil"] = "Ä»", -- U+0013B - ["Lcy"] = "Л", -- U+0041B - ["LeftAngleBracket"] = "⟨", -- U+027E8 - ["LeftArrow"] = "â†", -- U+02190 - ["LeftArrowBar"] = "⇤", -- U+021E4 - ["LeftArrowRightArrow"] = "⇆", -- U+021C6 - ["LeftCeiling"] = "⌈", -- U+02308 - ["LeftDoubleBracket"] = "⟦", -- U+027E6 - ["LeftDownTeeVector"] = "⥡", -- U+02961 - ["LeftDownVector"] = "⇃", -- U+021C3 - ["LeftDownVectorBar"] = "⥙", -- U+02959 - ["LeftFloor"] = "⌊", -- U+0230A - ["LeftRightArrow"] = "↔", -- U+02194 - ["LeftRightVector"] = "⥎", -- U+0294E - ["LeftTee"] = "⊣", -- U+022A3 - ["LeftTeeArrow"] = "↤", -- U+021A4 - ["LeftTeeVector"] = "⥚", -- U+0295A - ["LeftTriangle"] = "⊲", -- U+022B2 - ["LeftTriangleBar"] = "â§", -- U+029CF - ["LeftTriangleEqual"] = "⊴", -- U+022B4 - ["LeftUpDownVector"] = "⥑", -- U+02951 - ["LeftUpTeeVector"] = "⥠", -- U+02960 - ["LeftUpVector"] = "↿", -- U+021BF - ["LeftUpVectorBar"] = "⥘", -- U+02958 - ["LeftVector"] = "↼", -- U+021BC - ["LeftVectorBar"] = "⥒", -- U+02952 - ["Leftarrow"] = "â‡", -- U+021D0 - ["Leftrightarrow"] = "⇔", -- U+021D4 - ["LessEqualGreater"] = "â‹š", -- U+022DA - ["LessFullEqual"] = "≦", -- U+02266 - ["LessGreater"] = "≶", -- U+02276 - ["LessLess"] = "⪡", -- U+02AA1 - ["LessSlantEqual"] = "⩽", -- U+02A7D - ["LessTilde"] = "≲", -- U+02272 - ["Lfr"] = "ð”", -- U+1D50F - ["Lgr"] = "Λ", -- U+0039B - ["Ll"] = "⋘", -- U+022D8 - ["Lleftarrow"] = "⇚", -- U+021DA - ["Lmidot"] = "Ä¿", -- U+0013F - ["LongLeftArrow"] = "⟵", -- U+027F5 - ["LongLeftRightArrow"] = "⟷", -- U+027F7 - ["LongRightArrow"] = "⟶", -- U+027F6 - ["Longleftarrow"] = "⟸", -- U+027F8 - ["Longleftrightarrow"] = "⟺", -- U+027FA - ["Longrightarrow"] = "⟹", -- U+027F9 - ["Lopf"] = "ð•ƒ", -- U+1D543 - ["LowerLeftArrow"] = "↙", -- U+02199 - ["LowerRightArrow"] = "↘", -- U+02198 - ["Lscr"] = "â„’", -- U+02112 - ["Lsh"] = "↰", -- U+021B0 - ["Lstrok"] = "Å", -- U+00141 - ["Lt"] = "≪", -- U+0226A - ["Map"] = "⤅", -- U+02905 - ["Mcy"] = "Ðœ", -- U+0041C - ["MediumSpace"] = "âŸ", -- U+0205F - ["Mellintrf"] = "ℳ", -- U+02133 - ["Mfr"] = "ð”", -- U+1D510 - ["Mgr"] = "Îœ", -- U+0039C - ["MinusPlus"] = "∓", -- U+02213 - ["Mopf"] = "ð•„", -- U+1D544 - ["Mscr"] = "ℳ", -- U+02133 - ["Mu"] = "Îœ", -- U+0039C - ["NJcy"] = "Њ", -- U+0040A - ["Nacute"] = "Ń", -- U+00143 - ["Ncaron"] = "Ň", -- U+00147 - ["Ncedil"] = "Å…", -- U+00145 - ["Ncy"] = "Ð", -- U+0041D - ["NegativeMediumSpace"] = "​", -- U+0200B - ["NegativeThickSpace"] = "​", -- U+0200B - ["NegativeThinSpace"] = "​", -- U+0200B - ["NegativeVeryThinSpace"] = "​", -- U+0200B - ["NestedGreaterGreater"] = "≫", -- U+0226B - ["NestedLessLess"] = "≪", -- U+0226A - ["Nfr"] = "ð”‘", -- U+1D511 - ["Ngr"] = "Î", -- U+0039D - ["NoBreak"] = "â ", -- U+02060 - ["NonBreakingSpace"] = " ", -- U+000A0 - ["Nopf"] = "â„•", -- U+02115 - ["Not"] = "⫬", -- U+02AEC - ["NotCongruent"] = "≢", -- U+02262 - ["NotCupCap"] = "≭", -- U+0226D - ["NotDoubleVerticalBar"] = "∦", -- U+02226 - ["NotElement"] = "∉", -- U+02209 - ["NotEqual"] = "≠", -- U+02260 - ["NotEqualTilde"] = "≂̸", -- U+02242 00338 - ["NotExists"] = "∄", -- U+02204 - ["NotGreater"] = "≯", -- U+0226F - ["NotGreaterEqual"] = "≱", -- U+02271 - ["NotGreaterFullEqual"] = "≧̸", -- U+02267 00338 - ["NotGreaterGreater"] = "≫̸", -- U+0226B 00338 - ["NotGreaterLess"] = "≹", -- U+02279 - ["NotGreaterSlantEqual"] = "⩾̸", -- U+02A7E 00338 - ["NotGreaterTilde"] = "≵", -- U+02275 - ["NotHumpDownHump"] = "≎̸", -- U+0224E 00338 - ["NotHumpEqual"] = "â‰Ì¸", -- U+0224F 00338 - ["NotLeftTriangle"] = "⋪", -- U+022EA - ["NotLeftTriangleBar"] = "â§Ì¸", -- U+029CF 00338 - ["NotLeftTriangleEqual"] = "⋬", -- U+022EC - ["NotLess"] = "≮", -- U+0226E - ["NotLessEqual"] = "≰", -- U+02270 - ["NotLessGreater"] = "≸", -- U+02278 - ["NotLessLess"] = "≪̸", -- U+0226A 00338 - ["NotLessSlantEqual"] = "⩽̸", -- U+02A7D 00338 - ["NotLessTilde"] = "≴", -- U+02274 - ["NotNestedGreaterGreater"] = "⪢̸", -- U+02AA2 00338 - ["NotNestedLessLess"] = "⪡̸", -- U+02AA1 00338 - ["NotPrecedes"] = "⊀", -- U+02280 - ["NotPrecedesEqual"] = "⪯̸", -- U+02AAF 00338 - ["NotPrecedesSlantEqual"] = "â‹ ", -- U+022E0 - ["NotReverseElement"] = "∌", -- U+0220C - ["NotRightTriangle"] = "â‹«", -- U+022EB - ["NotRightTriangleBar"] = "â§Ì¸", -- U+029D0 00338 - ["NotRightTriangleEqual"] = "â‹­", -- U+022ED - ["NotSquareSubset"] = "âŠÌ¸", -- U+0228F 00338 - ["NotSquareSubsetEqual"] = "â‹¢", -- U+022E2 - ["NotSquareSuperset"] = "âŠÌ¸", -- U+02290 00338 - ["NotSquareSupersetEqual"] = "â‹£", -- U+022E3 - ["NotSubset"] = "⊂⃒", -- U+02282 020D2 - ["NotSubsetEqual"] = "⊈", -- U+02288 - ["NotSucceeds"] = "âŠ", -- U+02281 - ["NotSucceedsEqual"] = "⪰̸", -- U+02AB0 00338 - ["NotSucceedsSlantEqual"] = "â‹¡", -- U+022E1 - ["NotSucceedsTilde"] = "≿̸", -- U+0227F 00338 - ["NotSuperset"] = "⊃⃒", -- U+02283 020D2 - ["NotSupersetEqual"] = "⊉", -- U+02289 - ["NotTilde"] = "â‰", -- U+02241 - ["NotTildeEqual"] = "≄", -- U+02244 - ["NotTildeFullEqual"] = "≇", -- U+02247 - ["NotTildeTilde"] = "≉", -- U+02249 - ["NotVerticalBar"] = "∤", -- U+02224 - ["Nscr"] = "ð’©", -- U+1D4A9 - ["Ntilde"] = "Ñ", -- U+000D1 - ["Nu"] = "Î", -- U+0039D - ["OElig"] = "Å’", -- U+00152 - ["OHacgr"] = "Î", -- U+0038F - ["OHgr"] = "Ω", -- U+003A9 - ["Oacgr"] = "ÎŒ", -- U+0038C - ["Oacute"] = "Ó", -- U+000D3 - ["Ocirc"] = "Ô", -- U+000D4 - ["Ocy"] = "О", -- U+0041E - ["Odblac"] = "Å", -- U+00150 - ["Ofr"] = "ð”’", -- U+1D512 - ["Ogr"] = "Ο", -- U+0039F - ["Ograve"] = "Ã’", -- U+000D2 - ["Omacr"] = "ÅŒ", -- U+0014C - ["Omega"] = "Ω", -- U+003A9 - ["Omicron"] = "Ο", -- U+0039F - ["Oopf"] = "ð•†", -- U+1D546 - ["OpenCurlyDoubleQuote"] = "“", -- U+0201C - ["OpenCurlyQuote"] = "‘", -- U+02018 - ["Or"] = "â©”", -- U+02A54 - ["Oscr"] = "ð’ª", -- U+1D4AA - ["Oslash"] = "Ø", -- U+000D8 - ["Otilde"] = "Õ", -- U+000D5 - ["Otimes"] = "⨷", -- U+02A37 - ["Ouml"] = "Ö", -- U+000D6 - ["OverBar"] = "‾", -- U+0203E - ["OverBrace"] = "âž", -- U+023DE - ["OverBracket"] = "⎴", -- U+023B4 - ["OverParenthesis"] = "âœ", -- U+023DC - ["PHgr"] = "Φ", -- U+003A6 - ["PSgr"] = "Ψ", -- U+003A8 - ["PartialD"] = "∂", -- U+02202 - ["Pcy"] = "П", -- U+0041F - ["Pfr"] = "ð”“", -- U+1D513 - ["Pgr"] = "Π", -- U+003A0 - ["Phi"] = "Φ", -- U+003A6 - ["Pi"] = "Π", -- U+003A0 - ["PlusMinus"] = "±", -- U+000B1 - ["Poincareplane"] = "â„Œ", -- U+0210C - ["Popf"] = "â„™", -- U+02119 - ["Pr"] = "⪻", -- U+02ABB - ["Precedes"] = "≺", -- U+0227A - ["PrecedesEqual"] = "⪯", -- U+02AAF - ["PrecedesSlantEqual"] = "≼", -- U+0227C - ["PrecedesTilde"] = "≾", -- U+0227E - ["Prime"] = "″", -- U+02033 - ["Product"] = "âˆ", -- U+0220F - ["Proportion"] = "∷", -- U+02237 - ["Proportional"] = "âˆ", -- U+0221D - ["Pscr"] = "ð’«", -- U+1D4AB - ["Psi"] = "Ψ", -- U+003A8 - ["QUOT"] = "\"", -- U+00022 - ["Qfr"] = "ð””", -- U+1D514 - ["Qopf"] = "â„š", -- U+0211A - ["Qscr"] = "ð’¬", -- U+1D4AC - ["RBarr"] = "â¤", -- U+02910 - ["REG"] = "®", -- U+000AE - ["Racute"] = "Å”", -- U+00154 - ["Rang"] = "⟫", -- U+027EB - ["Rarr"] = "↠", -- U+021A0 - ["Rarrtl"] = "⤖", -- U+02916 - ["Rcaron"] = "Ř", -- U+00158 - ["Rcedil"] = "Å–", -- U+00156 - ["Rcy"] = "Р", -- U+00420 - ["Re"] = "â„œ", -- U+0211C - ["ReverseElement"] = "∋", -- U+0220B - ["ReverseEquilibrium"] = "⇋", -- U+021CB - ["ReverseUpEquilibrium"] = "⥯", -- U+0296F - ["Rfr"] = "â„œ", -- U+0211C - ["Rgr"] = "Ρ", -- U+003A1 - ["Rho"] = "Ρ", -- U+003A1 - ["RightAngleBracket"] = "⟩", -- U+027E9 - ["RightArrow"] = "→", -- U+02192 - ["RightArrowBar"] = "⇥", -- U+021E5 - ["RightArrowLeftArrow"] = "⇄", -- U+021C4 - ["RightCeiling"] = "⌉", -- U+02309 - ["RightDoubleBracket"] = "⟧", -- U+027E7 - ["RightDownTeeVector"] = "â¥", -- U+0295D - ["RightDownVector"] = "⇂", -- U+021C2 - ["RightDownVectorBar"] = "⥕", -- U+02955 - ["RightFloor"] = "⌋", -- U+0230B - ["RightTee"] = "⊢", -- U+022A2 - ["RightTeeArrow"] = "↦", -- U+021A6 - ["RightTeeVector"] = "⥛", -- U+0295B - ["RightTriangle"] = "⊳", -- U+022B3 - ["RightTriangleBar"] = "â§", -- U+029D0 - ["RightTriangleEqual"] = "⊵", -- U+022B5 - ["RightUpDownVector"] = "â¥", -- U+0294F - ["RightUpTeeVector"] = "⥜", -- U+0295C - ["RightUpVector"] = "↾", -- U+021BE - ["RightUpVectorBar"] = "⥔", -- U+02954 - ["RightVector"] = "⇀", -- U+021C0 - ["RightVectorBar"] = "⥓", -- U+02953 - ["Rightarrow"] = "⇒", -- U+021D2 - ["Ropf"] = "â„", -- U+0211D - ["RoundImplies"] = "⥰", -- U+02970 - ["Rrightarrow"] = "⇛", -- U+021DB - ["Rscr"] = "â„›", -- U+0211B - ["Rsh"] = "↱", -- U+021B1 - ["RuleDelayed"] = "⧴", -- U+029F4 - ["SHCHcy"] = "Щ", -- U+00429 - ["SHcy"] = "Ш", -- U+00428 - ["SOFTcy"] = "Ь", -- U+0042C - ["Sacute"] = "Åš", -- U+0015A - ["Sc"] = "⪼", -- U+02ABC - ["Scaron"] = "Å ", -- U+00160 - ["Scedil"] = "Åž", -- U+0015E - ["Scirc"] = "Åœ", -- U+0015C - ["Scy"] = "С", -- U+00421 - ["Sfr"] = "ð”–", -- U+1D516 - ["Sgr"] = "Σ", -- U+003A3 - ["ShortDownArrow"] = "↓", -- U+02193 - ["ShortLeftArrow"] = "â†", -- U+02190 - ["ShortRightArrow"] = "→", -- U+02192 - ["ShortUpArrow"] = "↑", -- U+02191 - ["Sigma"] = "Σ", -- U+003A3 - ["SmallCircle"] = "∘", -- U+02218 - ["Sopf"] = "ð•Š", -- U+1D54A - ["Sqrt"] = "√", -- U+0221A - ["Square"] = "â–¡", -- U+025A1 - ["SquareIntersection"] = "⊓", -- U+02293 - ["SquareSubset"] = "âŠ", -- U+0228F - ["SquareSubsetEqual"] = "⊑", -- U+02291 - ["SquareSuperset"] = "âŠ", -- U+02290 - ["SquareSupersetEqual"] = "⊒", -- U+02292 - ["SquareUnion"] = "⊔", -- U+02294 - ["Sscr"] = "ð’®", -- U+1D4AE - ["Star"] = "⋆", -- U+022C6 - ["Sub"] = "â‹", -- U+022D0 - ["Subset"] = "â‹", -- U+022D0 - ["SubsetEqual"] = "⊆", -- U+02286 - ["Succeeds"] = "≻", -- U+0227B - ["SucceedsEqual"] = "⪰", -- U+02AB0 - ["SucceedsSlantEqual"] = "≽", -- U+0227D - ["SucceedsTilde"] = "≿", -- U+0227F - ["SuchThat"] = "∋", -- U+0220B - ["Sum"] = "∑", -- U+02211 - ["Sup"] = "â‹‘", -- U+022D1 - ["Superset"] = "⊃", -- U+02283 - ["SupersetEqual"] = "⊇", -- U+02287 - ["Supset"] = "â‹‘", -- U+022D1 - ["THORN"] = "Þ", -- U+000DE - ["THgr"] = "Θ", -- U+00398 - ["TRADE"] = "â„¢", -- U+02122 - ["TSHcy"] = "Ћ", -- U+0040B - ["TScy"] = "Ц", -- U+00426 - ["Tab"] = "\9", -- U+00009 - ["Tau"] = "Τ", -- U+003A4 - ["Tcaron"] = "Ť", -- U+00164 - ["Tcedil"] = "Å¢", -- U+00162 - ["Tcy"] = "Т", -- U+00422 - ["Tfr"] = "ð”—", -- U+1D517 - ["Tgr"] = "Τ", -- U+003A4 - ["Therefore"] = "∴", -- U+02234 - ["Theta"] = "Θ", -- U+00398 - ["ThickSpace"] = "âŸâ€Š", -- U+0205F 0200A - ["ThinSpace"] = " ", -- U+02009 - ["Tilde"] = "∼", -- U+0223C - ["TildeEqual"] = "≃", -- U+02243 - ["TildeFullEqual"] = "≅", -- U+02245 - ["TildeTilde"] = "≈", -- U+02248 - ["Topf"] = "ð•‹", -- U+1D54B - ["TripleDot"] = "⃛", -- U+020DB - ["Tscr"] = "ð’¯", -- U+1D4AF - ["Tstrok"] = "Ŧ", -- U+00166 - ["Uacgr"] = "ÎŽ", -- U+0038E - ["Uacute"] = "Ú", -- U+000DA - ["Uarr"] = "↟", -- U+0219F - ["Uarrocir"] = "⥉", -- U+02949 - ["Ubrcy"] = "ÐŽ", -- U+0040E - ["Ubreve"] = "Ŭ", -- U+0016C - ["Ucirc"] = "Û", -- U+000DB - ["Ucy"] = "У", -- U+00423 - ["Udblac"] = "Å°", -- U+00170 - ["Udigr"] = "Ϋ", -- U+003AB - ["Ufr"] = "ð”˜", -- U+1D518 - ["Ugr"] = "Î¥", -- U+003A5 - ["Ugrave"] = "Ù", -- U+000D9 - ["Umacr"] = "Ū", -- U+0016A - -- ["UnderBar"] = "_", -- U+0005F - ["UnderBar"] = "‾", -- U+0203E - ["UnderBrace"] = "âŸ", -- U+023DF - ["UnderBracket"] = "⎵", -- U+023B5 - ["UnderParenthesis"] = "â", -- U+023DD - ["Union"] = "⋃", -- U+022C3 - ["UnionPlus"] = "⊎", -- U+0228E - ["Uogon"] = "Ų", -- U+00172 - ["Uopf"] = "ð•Œ", -- U+1D54C - ["UpArrow"] = "↑", -- U+02191 - ["UpArrowBar"] = "⤒", -- U+02912 - ["UpArrowDownArrow"] = "⇅", -- U+021C5 - ["UpDownArrow"] = "↕", -- U+02195 - ["UpEquilibrium"] = "⥮", -- U+0296E - ["UpTee"] = "⊥", -- U+022A5 - ["UpTeeArrow"] = "↥", -- U+021A5 - ["Uparrow"] = "⇑", -- U+021D1 - ["Updownarrow"] = "⇕", -- U+021D5 - ["UpperLeftArrow"] = "↖", -- U+02196 - ["UpperRightArrow"] = "↗", -- U+02197 - ["Upsi"] = "Ï’", -- U+003D2 - ["Upsilon"] = "Î¥", -- U+003A5 - ["Uring"] = "Å®", -- U+0016E - ["Uscr"] = "ð’°", -- U+1D4B0 - ["Utilde"] = "Ũ", -- U+00168 - ["Uuml"] = "Ãœ", -- U+000DC - ["VDash"] = "⊫", -- U+022AB - ["Vbar"] = "â««", -- U+02AEB - ["Vcy"] = "Ð’", -- U+00412 - ["Vdash"] = "⊩", -- U+022A9 - ["Vdashl"] = "⫦", -- U+02AE6 - ["Vee"] = "â‹", -- U+022C1 - ["Verbar"] = "‖", -- U+02016 - ["Vert"] = "‖", -- U+02016 - ["VerticalBar"] = "∣", -- U+02223 - ["VerticalLine"] = "|", -- U+0007C - ["VerticalSeparator"] = "â˜", -- U+02758 - ["VerticalTilde"] = "≀", -- U+02240 - ["VeryThinSpace"] = " ", -- U+0200A - ["Vfr"] = "ð”™", -- U+1D519 - ["Vopf"] = "ð•", -- U+1D54D - ["Vscr"] = "ð’±", -- U+1D4B1 - ["Vvdash"] = "⊪", -- U+022AA - ["Wcirc"] = "Å´", -- U+00174 - ["Wedge"] = "â‹€", -- U+022C0 - ["Wfr"] = "ð”š", -- U+1D51A - ["Wopf"] = "ð•Ž", -- U+1D54E - ["Wscr"] = "ð’²", -- U+1D4B2 - ["Xfr"] = "ð”›", -- U+1D51B - ["Xgr"] = "Ξ", -- U+0039E - ["Xi"] = "Ξ", -- U+0039E - ["Xopf"] = "ð•", -- U+1D54F - ["Xscr"] = "ð’³", -- U+1D4B3 - ["YAcy"] = "Я", -- U+0042F - ["YIcy"] = "Ї", -- U+00407 - ["YUcy"] = "Ю", -- U+0042E - ["Yacute"] = "Ã", -- U+000DD - ["Ycirc"] = "Ŷ", -- U+00176 - ["Ycy"] = "Ы", -- U+0042B - ["Yfr"] = "ð”œ", -- U+1D51C - ["Yopf"] = "ð•", -- U+1D550 - ["Yscr"] = "ð’´", -- U+1D4B4 - ["Yuml"] = "Ÿ", -- U+00178 - ["ZHcy"] = "Ж", -- U+00416 - ["Zacute"] = "Ź", -- U+00179 - ["Zcaron"] = "Ž", -- U+0017D - ["Zcy"] = "З", -- U+00417 - ["Zdot"] = "Å»", -- U+0017B - ["ZeroWidthSpace"] = "​", -- U+0200B - ["Zeta"] = "Ζ", -- U+00396 - ["Zfr"] = "ℨ", -- U+02128 - ["Zgr"] = "Ζ", -- U+00396 - ["Zopf"] = "ℤ", -- U+02124 - ["Zscr"] = "ð’µ", -- U+1D4B5 - ["aacgr"] = "ά", -- U+003AC - ["aacute"] = "á", -- U+000E1 - ["abreve"] = "ă", -- U+00103 - ["ac"] = "∾", -- U+0223E - ["acE"] = "∾̳", -- U+0223E 00333 - ["acd"] = "∿", -- U+0223F - ["acirc"] = "â", -- U+000E2 - ["acute"] = "´", -- U+000B4 - ["acy"] = "а", -- U+00430 - ["aelig"] = "æ", -- U+000E6 - ["af"] = "â¡", -- U+02061 - ["afr"] = "ð”ž", -- U+1D51E - ["agr"] = "α", -- U+003B1 - ["agrave"] = "à", -- U+000E0 - ["alefsym"] = "ℵ", -- U+02135 - ["aleph"] = "ℵ", -- U+02135 - ["alpha"] = "α", -- U+003B1 - ["amacr"] = "Ä", -- U+00101 - ["amalg"] = "⨿", -- U+02A3F - ["amp"] = "&", -- U+00026 - ["and"] = "∧", -- U+02227 - ["andand"] = "â©•", -- U+02A55 - ["andd"] = "â©œ", -- U+02A5C - ["andslope"] = "⩘", -- U+02A58 - ["andv"] = "â©š", -- U+02A5A - ["ang"] = "∠", -- U+02220 - ["ange"] = "⦤", -- U+029A4 - ["angle"] = "∠", -- U+02220 - ["angmsd"] = "∡", -- U+02221 - ["angmsdaa"] = "⦨", -- U+029A8 - ["angmsdab"] = "⦩", -- U+029A9 - ["angmsdac"] = "⦪", -- U+029AA - ["angmsdad"] = "⦫", -- U+029AB - ["angmsdae"] = "⦬", -- U+029AC - ["angmsdaf"] = "⦭", -- U+029AD - ["angmsdag"] = "⦮", -- U+029AE - ["angmsdah"] = "⦯", -- U+029AF - ["angrt"] = "∟", -- U+0221F - ["angrtvb"] = "⊾", -- U+022BE - ["angrtvbd"] = "â¦", -- U+0299D - ["angsph"] = "∢", -- U+02222 - ["angst"] = "Ã…", -- U+000C5 - ["angzarr"] = "â¼", -- U+0237C - ["aogon"] = "Ä…", -- U+00105 - ["aopf"] = "ð•’", -- U+1D552 - ["ap"] = "≈", -- U+02248 - ["apE"] = "â©°", -- U+02A70 - ["apacir"] = "⩯", -- U+02A6F - ["ape"] = "≊", -- U+0224A - ["apid"] = "≋", -- U+0224B - ["apos"] = "'", -- U+00027 - ["approx"] = "≈", -- U+02248 - ["approxeq"] = "≊", -- U+0224A - ["aring"] = "Ã¥", -- U+000E5 - ["ascr"] = "ð’¶", -- U+1D4B6 - ["ast"] = "*", -- U+0002A - ["asymp"] = "≈", -- U+02248 - ["asympeq"] = "â‰", -- U+0224D - ["atilde"] = "ã", -- U+000E3 - ["auml"] = "ä", -- U+000E4 - ["awconint"] = "∳", -- U+02233 - ["awint"] = "⨑", -- U+02A11 - ["b.Delta"] = "ðš«", -- U+1D6AB - ["b.Gamma"] = "ðšª", -- U+1D6AA - ["b.Gammad"] = "ðŸŠ", -- U+1D7CA - ["b.Lambda"] = "ðš²", -- U+1D6B2 - ["b.Omega"] = "ð›€", -- U+1D6C0 - ["b.Phi"] = "ðš½", -- U+1D6BD - ["b.Pi"] = "ðš·", -- U+1D6B7 - ["b.Psi"] = "ðš¿", -- U+1D6BF - ["b.Sigma"] = "ðšº", -- U+1D6BA - ["b.Theta"] = "ðš¯", -- U+1D6AF - ["b.Upsi"] = "ðš¼", -- U+1D6BC - ["b.Xi"] = "ðšµ", -- U+1D6B5 - ["b.alpha"] = "ð›‚", -- U+1D6C2 - ["b.beta"] = "ð›ƒ", -- U+1D6C3 - ["b.chi"] = "ð›˜", -- U+1D6D8 - ["b.delta"] = "ð›…", -- U+1D6C5 - ["b.epsi"] = "ð›†", -- U+1D6C6 - ["b.epsiv"] = "ð›œ", -- U+1D6DC - ["b.eta"] = "ð›ˆ", -- U+1D6C8 - ["b.gamma"] = "ð›„", -- U+1D6C4 - ["b.gammad"] = "ðŸ‹", -- U+1D7CB - ["b.iota"] = "ð›Š", -- U+1D6CA - ["b.kappa"] = "ð›‹", -- U+1D6CB - ["b.kappav"] = "ð›ž", -- U+1D6DE - ["b.lambda"] = "ð›Œ", -- U+1D6CC - ["b.mu"] = "ð›", -- U+1D6CD - ["b.nu"] = "ð›Ž", -- U+1D6CE - ["b.omega"] = "ð›š", -- U+1D6DA - ["b.phi"] = "ð›—", -- U+1D6D7 - ["b.phiv"] = "ð›Ÿ", -- U+1D6DF - ["b.pi"] = "ð›‘", -- U+1D6D1 - ["b.piv"] = "ð›¡", -- U+1D6E1 - ["b.psi"] = "ð›™", -- U+1D6D9 - ["b.rho"] = "ð›’", -- U+1D6D2 - ["b.rhov"] = "ð› ", -- U+1D6E0 - ["b.sigma"] = "ð›”", -- U+1D6D4 - ["b.sigmav"] = "ð›“", -- U+1D6D3 - ["b.tau"] = "ð›•", -- U+1D6D5 - ["b.thetas"] = "ð›‰", -- U+1D6C9 - ["b.thetav"] = "ð›", -- U+1D6DD - ["b.upsi"] = "ð›–", -- U+1D6D6 - ["b.xi"] = "ð›", -- U+1D6CF - ["b.zeta"] = "ð›‡", -- U+1D6C7 - ["bNot"] = "â«­", -- U+02AED - ["backcong"] = "≌", -- U+0224C - ["backepsilon"] = "϶", -- U+003F6 - ["backprime"] = "‵", -- U+02035 - ["backsim"] = "∽", -- U+0223D - ["backsimeq"] = "â‹", -- U+022CD - ["barvee"] = "⊽", -- U+022BD - ["barwed"] = "⌅", -- U+02305 - ["barwedge"] = "⌅", -- U+02305 - ["bbrk"] = "⎵", -- U+023B5 - ["bbrktbrk"] = "⎶", -- U+023B6 - ["bcong"] = "≌", -- U+0224C - ["bcy"] = "б", -- U+00431 - ["bdquo"] = "„", -- U+0201E - ["becaus"] = "∵", -- U+02235 - ["because"] = "∵", -- U+02235 - ["bemptyv"] = "⦰", -- U+029B0 - ["bepsi"] = "϶", -- U+003F6 - ["bernou"] = "ℬ", -- U+0212C - ["beta"] = "β", -- U+003B2 - ["beth"] = "ℶ", -- U+02136 - ["between"] = "≬", -- U+0226C - ["bfr"] = "ð”Ÿ", -- U+1D51F - ["bgr"] = "β", -- U+003B2 - ["bigcap"] = "â‹‚", -- U+022C2 - ["bigcirc"] = "â—¯", -- U+025EF - ["bigcup"] = "⋃", -- U+022C3 - ["bigodot"] = "⨀", -- U+02A00 - ["bigoplus"] = "â¨", -- U+02A01 - ["bigotimes"] = "⨂", -- U+02A02 - ["bigsqcup"] = "⨆", -- U+02A06 - ["bigstar"] = "★", -- U+02605 - ["bigtriangledown"] = "â–½", -- U+025BD - ["bigtriangleup"] = "â–³", -- U+025B3 - ["biguplus"] = "⨄", -- U+02A04 - ["bigvee"] = "â‹", -- U+022C1 - ["bigwedge"] = "â‹€", -- U+022C0 - ["bkarow"] = "â¤", -- U+0290D - ["blacklozenge"] = "⧫", -- U+029EB - ["blacksquare"] = "â–ª", -- U+025AA - ["blacktriangle"] = "â–´", -- U+025B4 - ["blacktriangledown"] = "â–¾", -- U+025BE - ["blacktriangleleft"] = "â—‚", -- U+025C2 - ["blacktriangleright"] = "â–¸", -- U+025B8 - ["blank"] = "â£", -- U+02423 - ["blk12"] = "â–’", -- U+02592 - ["blk14"] = "â–‘", -- U+02591 - ["blk34"] = "â–“", -- U+02593 - ["block"] = "â–ˆ", -- U+02588 - ["bne"] = "=⃥", -- U+0003D 020E5 - ["bnequiv"] = "≡⃥", -- U+02261 020E5 - ["bnot"] = "âŒ", -- U+02310 - ["bopf"] = "ð•“", -- U+1D553 - ["bot"] = "⊥", -- U+022A5 - ["bottom"] = "⊥", -- U+022A5 - ["bowtie"] = "⋈", -- U+022C8 - ["boxDL"] = "â•—", -- U+02557 - ["boxDR"] = "â•”", -- U+02554 - ["boxDl"] = "â•–", -- U+02556 - ["boxDr"] = "â•“", -- U+02553 - ["boxH"] = "â•", -- U+02550 - ["boxHD"] = "╦", -- U+02566 - ["boxHU"] = "â•©", -- U+02569 - ["boxHd"] = "╤", -- U+02564 - ["boxHu"] = "╧", -- U+02567 - ["boxUL"] = "â•", -- U+0255D - ["boxUR"] = "â•š", -- U+0255A - ["boxUl"] = "â•œ", -- U+0255C - ["boxUr"] = "â•™", -- U+02559 - ["boxV"] = "â•‘", -- U+02551 - ["boxVH"] = "╬", -- U+0256C - ["boxVL"] = "â•£", -- U+02563 - ["boxVR"] = "â• ", -- U+02560 - ["boxVh"] = "â•«", -- U+0256B - ["boxVl"] = "â•¢", -- U+02562 - ["boxVr"] = "â•Ÿ", -- U+0255F - ["boxbox"] = "⧉", -- U+029C9 - ["boxdL"] = "â••", -- U+02555 - ["boxdR"] = "â•’", -- U+02552 - ["boxdl"] = "â”", -- U+02510 - ["boxdr"] = "┌", -- U+0250C - ["boxh"] = "─", -- U+02500 - ["boxhD"] = "â•¥", -- U+02565 - ["boxhU"] = "╨", -- U+02568 - ["boxhd"] = "┬", -- U+0252C - ["boxhu"] = "â”´", -- U+02534 - ["boxminus"] = "⊟", -- U+0229F - ["boxplus"] = "⊞", -- U+0229E - ["boxtimes"] = "⊠", -- U+022A0 - ["boxuL"] = "â•›", -- U+0255B - ["boxuR"] = "╘", -- U+02558 - ["boxul"] = "┘", -- U+02518 - ["boxur"] = "â””", -- U+02514 - ["boxv"] = "│", -- U+02502 - ["boxvH"] = "╪", -- U+0256A - ["boxvL"] = "â•¡", -- U+02561 - ["boxvR"] = "â•ž", -- U+0255E - ["boxvh"] = "┼", -- U+0253C - ["boxvl"] = "┤", -- U+02524 - ["boxvr"] = "├", -- U+0251C - ["bprime"] = "‵", -- U+02035 - ["breve"] = "˘", -- U+002D8 - ["brvbar"] = "¦", -- U+000A6 - ["bscr"] = "ð’·", -- U+1D4B7 - ["bsemi"] = "â", -- U+0204F - ["bsim"] = "∽", -- U+0223D - ["bsime"] = "â‹", -- U+022CD - ["bsol"] = "\\", -- U+0005C - ["bsolb"] = "⧅", -- U+029C5 - ["bsolhsub"] = "⟈", -- U+027C8 - ["bull"] = "•", -- U+02022 - ["bullet"] = "•", -- U+02022 - ["bump"] = "≎", -- U+0224E - ["bumpE"] = "⪮", -- U+02AAE - ["bumpe"] = "â‰", -- U+0224F - ["bumpeq"] = "â‰", -- U+0224F - ["cacute"] = "ć", -- U+00107 - ["cap"] = "∩", -- U+02229 - ["capand"] = "â©„", -- U+02A44 - ["capbrcup"] = "⩉", -- U+02A49 - ["capcap"] = "â©‹", -- U+02A4B - ["capcup"] = "⩇", -- U+02A47 - ["capdot"] = "â©€", -- U+02A40 - ["caps"] = "∩︀", -- U+02229 0FE00 - ["caret"] = "â", -- U+02041 - ["caron"] = "ˇ", -- U+002C7 - ["ccaps"] = "â©", -- U+02A4D - ["ccaron"] = "Ä", -- U+0010D - ["ccedil"] = "ç", -- U+000E7 - ["ccirc"] = "ĉ", -- U+00109 - ["ccups"] = "â©Œ", -- U+02A4C - ["ccupssm"] = "â©", -- U+02A50 - ["cdot"] = "Ä‹", -- U+0010B - ["cedil"] = "¸", -- U+000B8 - ["cemptyv"] = "⦲", -- U+029B2 - ["cent"] = "¢", -- U+000A2 - ["centerdot"] = "·", -- U+000B7 - ["cfr"] = "ð” ", -- U+1D520 - ["chcy"] = "ч", -- U+00447 - ["check"] = "✓", -- U+02713 - ["checkmark"] = "✓", -- U+02713 - ["chi"] = "χ", -- U+003C7 - ["cir"] = "â—‹", -- U+025CB - ["cirE"] = "⧃", -- U+029C3 - ["circ"] = "ˆ", -- U+002C6 - ["circeq"] = "≗", -- U+02257 - ["circlearrowleft"] = "↺", -- U+021BA - ["circlearrowright"] = "↻", -- U+021BB - ["circledR"] = "®", -- U+000AE - ["circledS"] = "Ⓢ", -- U+024C8 - ["circledast"] = "⊛", -- U+0229B - ["circledcirc"] = "⊚", -- U+0229A - ["circleddash"] = "âŠ", -- U+0229D - ["cire"] = "≗", -- U+02257 - ["cirfnint"] = "â¨", -- U+02A10 - ["cirmid"] = "⫯", -- U+02AEF - ["cirscir"] = "⧂", -- U+029C2 - ["clubs"] = "♣", -- U+02663 - ["clubsuit"] = "♣", -- U+02663 - ["colon"] = ":", -- U+0003A - ["colone"] = "≔", -- U+02254 - ["coloneq"] = "≔", -- U+02254 - ["comma"] = ",", -- U+0002C - ["commat"] = "@", -- U+00040 - ["comp"] = "âˆ", -- U+02201 - ["compfn"] = "∘", -- U+02218 - ["complement"] = "âˆ", -- U+02201 - ["complexes"] = "â„‚", -- U+02102 - ["cong"] = "≅", -- U+02245 - ["congdot"] = "â©­", -- U+02A6D - ["conint"] = "∮", -- U+0222E - ["copf"] = "ð•”", -- U+1D554 - ["coprod"] = "âˆ", -- U+02210 - ["copy"] = "©", -- U+000A9 - ["copysr"] = "â„—", -- U+02117 - ["crarr"] = "↵", -- U+021B5 - ["cross"] = "✗", -- U+02717 - ["cscr"] = "ð’¸", -- U+1D4B8 - ["csub"] = "â«", -- U+02ACF - ["csube"] = "â«‘", -- U+02AD1 - ["csup"] = "â«", -- U+02AD0 - ["csupe"] = "â«’", -- U+02AD2 - ["ctdot"] = "⋯", -- U+022EF - ["cudarrl"] = "⤸", -- U+02938 - ["cudarrr"] = "⤵", -- U+02935 - ["cuepr"] = "â‹ž", -- U+022DE - ["cuesc"] = "â‹Ÿ", -- U+022DF - ["cularr"] = "↶", -- U+021B6 - ["cularrp"] = "⤽", -- U+0293D - ["cup"] = "∪", -- U+0222A - ["cupbrcap"] = "⩈", -- U+02A48 - ["cupcap"] = "⩆", -- U+02A46 - ["cupcup"] = "â©Š", -- U+02A4A - ["cupdot"] = "âŠ", -- U+0228D - ["cupor"] = "â©…", -- U+02A45 - ["cups"] = "∪︀", -- U+0222A 0FE00 - ["curarr"] = "↷", -- U+021B7 - ["curarrm"] = "⤼", -- U+0293C - ["curlyeqprec"] = "â‹ž", -- U+022DE - ["curlyeqsucc"] = "â‹Ÿ", -- U+022DF - ["curlyvee"] = "â‹Ž", -- U+022CE - ["curlywedge"] = "â‹", -- U+022CF - ["curren"] = "¤", -- U+000A4 - ["curvearrowleft"] = "↶", -- U+021B6 - ["curvearrowright"] = "↷", -- U+021B7 - ["cuvee"] = "â‹Ž", -- U+022CE - ["cuwed"] = "â‹", -- U+022CF - ["cwconint"] = "∲", -- U+02232 - ["cwint"] = "∱", -- U+02231 - ["cylcty"] = "⌭", -- U+0232D - ["dArr"] = "⇓", -- U+021D3 - ["dHar"] = "⥥", -- U+02965 - ["dagger"] = "†", -- U+02020 - ["daleth"] = "ℸ", -- U+02138 - ["darr"] = "↓", -- U+02193 - ["dash"] = "â€", -- U+02010 - ["dashv"] = "⊣", -- U+022A3 - ["dbkarow"] = "â¤", -- U+0290F - ["dblac"] = "Ë", -- U+002DD - ["dcaron"] = "Ä", -- U+0010F - ["dcy"] = "д", -- U+00434 - ["dd"] = "â…†", -- U+02146 - ["ddagger"] = "‡", -- U+02021 - ["ddarr"] = "⇊", -- U+021CA - ["ddotseq"] = "â©·", -- U+02A77 - ["deg"] = "°", -- U+000B0 - ["delta"] = "δ", -- U+003B4 - ["demptyv"] = "⦱", -- U+029B1 - ["dfisht"] = "⥿", -- U+0297F - ["dfr"] = "ð”¡", -- U+1D521 - ["dgr"] = "δ", -- U+003B4 - ["dharl"] = "⇃", -- U+021C3 - ["dharr"] = "⇂", -- U+021C2 - ["diam"] = "â‹„", -- U+022C4 - ["diamond"] = "â‹„", -- U+022C4 - ["diamondsuit"] = "♦", -- U+02666 - ["diams"] = "♦", -- U+02666 - ["die"] = "¨", -- U+000A8 - ["digamma"] = "Ï", -- U+003DD - ["disin"] = "⋲", -- U+022F2 - ["div"] = "÷", -- U+000F7 - ["divide"] = "÷", -- U+000F7 - ["divideontimes"] = "⋇", -- U+022C7 - ["divonx"] = "⋇", -- U+022C7 - ["djcy"] = "Ñ’", -- U+00452 - ["dlcorn"] = "⌞", -- U+0231E - ["dlcrop"] = "âŒ", -- U+0230D - ["dollar"] = "$", -- U+00024 - ["dopf"] = "ð••", -- U+1D555 - ["dot"] = "Ë™", -- U+002D9 - ["doteq"] = "â‰", -- U+02250 - ["doteqdot"] = "≑", -- U+02251 - ["dotminus"] = "∸", -- U+02238 - ["dotplus"] = "∔", -- U+02214 - ["dotsquare"] = "⊡", -- U+022A1 - ["doublebarwedge"] = "⌆", -- U+02306 - ["downarrow"] = "↓", -- U+02193 - ["downdownarrows"] = "⇊", -- U+021CA - ["downharpoonleft"] = "⇃", -- U+021C3 - ["downharpoonright"] = "⇂", -- U+021C2 - ["drbkarow"] = "â¤", -- U+02910 - ["drcorn"] = "⌟", -- U+0231F - ["drcrop"] = "⌌", -- U+0230C - ["dscr"] = "ð’¹", -- U+1D4B9 - ["dscy"] = "Ñ•", -- U+00455 - ["dsol"] = "⧶", -- U+029F6 - ["dstrok"] = "Ä‘", -- U+00111 - ["dtdot"] = "⋱", -- U+022F1 - ["dtri"] = "â–¿", -- U+025BF - ["dtrif"] = "â–¾", -- U+025BE - ["duarr"] = "⇵", -- U+021F5 - ["duhar"] = "⥯", -- U+0296F - ["dwangle"] = "⦦", -- U+029A6 - ["dzcy"] = "ÑŸ", -- U+0045F - ["dzigrarr"] = "⟿", -- U+027FF - ["eDDot"] = "â©·", -- U+02A77 - ["eDot"] = "≑", -- U+02251 - ["eacgr"] = "έ", -- U+003AD - ["eacute"] = "é", -- U+000E9 - ["easter"] = "â©®", -- U+02A6E - ["ecaron"] = "Ä›", -- U+0011B - ["ecir"] = "≖", -- U+02256 - ["ecirc"] = "ê", -- U+000EA - ["ecolon"] = "≕", -- U+02255 - ["ecy"] = "Ñ", -- U+0044D - ["edot"] = "Ä—", -- U+00117 - ["ee"] = "â…‡", -- U+02147 - ["eeacgr"] = "ή", -- U+003AE - ["eegr"] = "η", -- U+003B7 - ["efDot"] = "≒", -- U+02252 - ["efr"] = "ð”¢", -- U+1D522 - ["eg"] = "⪚", -- U+02A9A - ["egr"] = "ε", -- U+003B5 - ["egrave"] = "è", -- U+000E8 - ["egs"] = "⪖", -- U+02A96 - ["egsdot"] = "⪘", -- U+02A98 - ["el"] = "⪙", -- U+02A99 - ["elinters"] = "â§", -- U+023E7 - ["ell"] = "â„“", -- U+02113 - ["els"] = "⪕", -- U+02A95 - ["elsdot"] = "⪗", -- U+02A97 - ["emacr"] = "Ä“", -- U+00113 - ["empty"] = "∅", -- U+02205 - ["emptyset"] = "∅", -- U+02205 - ["emptyv"] = "∅", -- U+02205 - ["emsp"] = " ", -- U+02003 - ["emsp13"] = " ", -- U+02004 - ["emsp14"] = " ", -- U+02005 - ["eng"] = "Å‹", -- U+0014B - ["ensp"] = " ", -- U+02002 - ["eogon"] = "Ä™", -- U+00119 - ["eopf"] = "ð•–", -- U+1D556 - ["epar"] = "â‹•", -- U+022D5 - ["eparsl"] = "⧣", -- U+029E3 - ["eplus"] = "⩱", -- U+02A71 - ["epsi"] = "ε", -- U+003B5 - ["epsilon"] = "ε", -- U+003B5 - ["epsiv"] = "ϵ", -- U+003F5 - ["eqcirc"] = "≖", -- U+02256 - ["eqcolon"] = "≕", -- U+02255 - ["eqsim"] = "≂", -- U+02242 - ["eqslantgtr"] = "⪖", -- U+02A96 - ["eqslantless"] = "⪕", -- U+02A95 - ["equals"] = "=", -- U+0003D - ["equest"] = "≟", -- U+0225F - ["equiv"] = "≡", -- U+02261 - ["equivDD"] = "⩸", -- U+02A78 - ["eqvparsl"] = "⧥", -- U+029E5 - ["erDot"] = "≓", -- U+02253 - ["erarr"] = "⥱", -- U+02971 - ["escr"] = "ℯ", -- U+0212F - ["esdot"] = "â‰", -- U+02250 - ["esim"] = "≂", -- U+02242 - ["eta"] = "η", -- U+003B7 - ["eth"] = "ð", -- U+000F0 - ["euml"] = "ë", -- U+000EB - ["euro"] = "€", -- U+020AC - ["excl"] = "!", -- U+00021 - ["exist"] = "∃", -- U+02203 - ["expectation"] = "â„°", -- U+02130 - ["exponentiale"] = "â…‡", -- U+02147 - ["fallingdotseq"] = "≒", -- U+02252 - ["fcy"] = "Ñ„", -- U+00444 - ["female"] = "♀", -- U+02640 - ["ffilig"] = "ffi", -- U+0FB03 - ["fflig"] = "ff", -- U+0FB00 - ["ffllig"] = "ffl", -- U+0FB04 - ["ffr"] = "ð”£", -- U+1D523 - ["filig"] = "ï¬", -- U+0FB01 - ["fjlig"] = "fj", -- U+00066 0006A - ["flat"] = "â™­", -- U+0266D - ["fllig"] = "fl", -- U+0FB02 - ["fltns"] = "â–±", -- U+025B1 - ["fnof"] = "Æ’", -- U+00192 - ["fopf"] = "ð•—", -- U+1D557 - ["forall"] = "∀", -- U+02200 - ["fork"] = "â‹”", -- U+022D4 - ["forkv"] = "â«™", -- U+02AD9 - ["fpartint"] = "â¨", -- U+02A0D - ["frac12"] = "½", -- U+000BD - ["frac13"] = "â…“", -- U+02153 - ["frac14"] = "¼", -- U+000BC - ["frac15"] = "â…•", -- U+02155 - ["frac16"] = "â…™", -- U+02159 - ["frac18"] = "â…›", -- U+0215B - ["frac23"] = "â…”", -- U+02154 - ["frac25"] = "â…–", -- U+02156 - ["frac34"] = "¾", -- U+000BE - ["frac35"] = "â…—", -- U+02157 - ["frac38"] = "â…œ", -- U+0215C - ["frac45"] = "â…˜", -- U+02158 - ["frac56"] = "â…š", -- U+0215A - ["frac58"] = "â…", -- U+0215D - ["frac78"] = "â…ž", -- U+0215E - ["frasl"] = "â„", -- U+02044 - ["frown"] = "⌢", -- U+02322 - ["fscr"] = "ð’»", -- U+1D4BB - ["gE"] = "≧", -- U+02267 - ["gEl"] = "⪌", -- U+02A8C - ["gacute"] = "ǵ", -- U+001F5 - ["gamma"] = "γ", -- U+003B3 - ["gammad"] = "Ï", -- U+003DD - ["gap"] = "⪆", -- U+02A86 - ["gbreve"] = "ÄŸ", -- U+0011F - ["gcirc"] = "Ä", -- U+0011D - ["gcy"] = "г", -- U+00433 - ["gdot"] = "Ä¡", -- U+00121 - ["ge"] = "≥", -- U+02265 - ["gel"] = "â‹›", -- U+022DB - ["geq"] = "≥", -- U+02265 - ["geqq"] = "≧", -- U+02267 - ["geqslant"] = "⩾", -- U+02A7E - ["ges"] = "⩾", -- U+02A7E - ["gescc"] = "⪩", -- U+02AA9 - ["gesdot"] = "⪀", -- U+02A80 - ["gesdoto"] = "⪂", -- U+02A82 - ["gesdotol"] = "⪄", -- U+02A84 - ["gesl"] = "⋛︀", -- U+022DB 0FE00 - ["gesles"] = "⪔", -- U+02A94 - ["gfr"] = "ð”¤", -- U+1D524 - ["gg"] = "≫", -- U+0226B - ["ggg"] = "â‹™", -- U+022D9 - ["ggr"] = "γ", -- U+003B3 - ["gimel"] = "â„·", -- U+02137 - ["gjcy"] = "Ñ“", -- U+00453 - ["gl"] = "≷", -- U+02277 - ["glE"] = "⪒", -- U+02A92 - ["gla"] = "⪥", -- U+02AA5 - ["glj"] = "⪤", -- U+02AA4 - ["gnE"] = "≩", -- U+02269 - ["gnap"] = "⪊", -- U+02A8A - ["gnapprox"] = "⪊", -- U+02A8A - ["gne"] = "⪈", -- U+02A88 - ["gneq"] = "⪈", -- U+02A88 - ["gneqq"] = "≩", -- U+02269 - ["gnsim"] = "⋧", -- U+022E7 - ["gopf"] = "ð•˜", -- U+1D558 - ["grave"] = "`", -- U+00060 - ["gscr"] = "â„Š", -- U+0210A - ["gsim"] = "≳", -- U+02273 - ["gsime"] = "⪎", -- U+02A8E - ["gsiml"] = "âª", -- U+02A90 - ["gt"] = ">", -- U+0003E - ["gtcc"] = "⪧", -- U+02AA7 - ["gtcir"] = "⩺", -- U+02A7A - ["gtdot"] = "â‹—", -- U+022D7 - ["gtlPar"] = "⦕", -- U+02995 - ["gtquest"] = "⩼", -- U+02A7C - ["gtrapprox"] = "⪆", -- U+02A86 - ["gtrarr"] = "⥸", -- U+02978 - ["gtrdot"] = "â‹—", -- U+022D7 - ["gtreqless"] = "â‹›", -- U+022DB - ["gtreqqless"] = "⪌", -- U+02A8C - ["gtrless"] = "≷", -- U+02277 - ["gtrsim"] = "≳", -- U+02273 - ["gvertneqq"] = "≩︀", -- U+02269 0FE00 - ["gvnE"] = "≩︀", -- U+02269 0FE00 - ["hArr"] = "⇔", -- U+021D4 - ["hairsp"] = " ", -- U+0200A - ["half"] = "½", -- U+000BD - ["hamilt"] = "â„‹", -- U+0210B - ["hardcy"] = "ÑŠ", -- U+0044A - ["harr"] = "↔", -- U+02194 - ["harrcir"] = "⥈", -- U+02948 - ["harrw"] = "↭", -- U+021AD - ["hbar"] = "â„", -- U+0210F - ["hcirc"] = "Ä¥", -- U+00125 - ["hearts"] = "♥", -- U+02665 - ["heartsuit"] = "♥", -- U+02665 - ["hellip"] = "…", -- U+02026 - ["hercon"] = "⊹", -- U+022B9 - ["hfr"] = "ð”¥", -- U+1D525 - ["hksearow"] = "⤥", -- U+02925 - ["hkswarow"] = "⤦", -- U+02926 - ["hoarr"] = "⇿", -- U+021FF - ["homtht"] = "∻", -- U+0223B - ["hookleftarrow"] = "↩", -- U+021A9 - ["hookrightarrow"] = "↪", -- U+021AA - ["hopf"] = "ð•™", -- U+1D559 - ["horbar"] = "―", -- U+02015 - ["hscr"] = "ð’½", -- U+1D4BD - ["hslash"] = "â„", -- U+0210F - ["hstrok"] = "ħ", -- U+00127 - ["hybull"] = "âƒ", -- U+02043 - ["hyphen"] = "â€", -- U+02010 - ["iacgr"] = "ί", -- U+003AF - ["iacute"] = "í", -- U+000ED - ["ic"] = "â£", -- U+02063 - ["icirc"] = "î", -- U+000EE - ["icy"] = "и", -- U+00438 - ["idiagr"] = "Î", -- U+00390 - ["idigr"] = "ÏŠ", -- U+003CA - ["iecy"] = "е", -- U+00435 - ["iexcl"] = "¡", -- U+000A1 - ["iff"] = "⇔", -- U+021D4 - ["ifr"] = "ð”¦", -- U+1D526 - ["igr"] = "ι", -- U+003B9 - ["igrave"] = "ì", -- U+000EC - ["ii"] = "â…ˆ", -- U+02148 - ["iiiint"] = "⨌", -- U+02A0C - ["iiint"] = "∭", -- U+0222D - ["iinfin"] = "⧜", -- U+029DC - ["iiota"] = "â„©", -- U+02129 - ["ijlig"] = "ij", -- U+00133 - ["imacr"] = "Ä«", -- U+0012B - ["image"] = "â„‘", -- U+02111 - ["imagline"] = "â„", -- U+02110 - ["imagpart"] = "â„‘", -- U+02111 - ["imath"] = "ı", -- U+00131 - ["imof"] = "⊷", -- U+022B7 - ["imped"] = "Ƶ", -- U+001B5 - ["in"] = "∈", -- U+02208 - ["incare"] = "â„…", -- U+02105 - ["infin"] = "∞", -- U+0221E - ["infintie"] = "â§", -- U+029DD - ["inodot"] = "ı", -- U+00131 - ["int"] = "∫", -- U+0222B - ["intcal"] = "⊺", -- U+022BA - ["integers"] = "ℤ", -- U+02124 - ["intercal"] = "⊺", -- U+022BA - ["intlarhk"] = "⨗", -- U+02A17 - ["intprod"] = "⨼", -- U+02A3C - ["iocy"] = "Ñ‘", -- U+00451 - ["iogon"] = "į", -- U+0012F - ["iopf"] = "ð•š", -- U+1D55A - ["iota"] = "ι", -- U+003B9 - ["iprod"] = "⨼", -- U+02A3C - ["iquest"] = "¿", -- U+000BF - ["iscr"] = "ð’¾", -- U+1D4BE - ["isin"] = "∈", -- U+02208 - ["isinE"] = "⋹", -- U+022F9 - ["isindot"] = "⋵", -- U+022F5 - ["isins"] = "â‹´", -- U+022F4 - ["isinsv"] = "⋳", -- U+022F3 - ["isinv"] = "∈", -- U+02208 - ["it"] = "â¢", -- U+02062 - ["itilde"] = "Ä©", -- U+00129 - ["iukcy"] = "Ñ–", -- U+00456 - ["iuml"] = "ï", -- U+000EF - ["jcirc"] = "ĵ", -- U+00135 - ["jcy"] = "й", -- U+00439 - ["jfr"] = "ð”§", -- U+1D527 - ["jmath"] = "È·", -- U+00237 - ["jopf"] = "ð•›", -- U+1D55B - ["jscr"] = "ð’¿", -- U+1D4BF - ["jsercy"] = "ј", -- U+00458 - ["jukcy"] = "Ñ”", -- U+00454 - ["kappa"] = "κ", -- U+003BA - ["kappav"] = "Ï°", -- U+003F0 - ["kcedil"] = "Ä·", -- U+00137 - ["kcy"] = "к", -- U+0043A - ["kfr"] = "ð”¨", -- U+1D528 - ["kgr"] = "κ", -- U+003BA - ["kgreen"] = "ĸ", -- U+00138 - ["khcy"] = "Ñ…", -- U+00445 - ["khgr"] = "χ", -- U+003C7 - ["kjcy"] = "Ñœ", -- U+0045C - ["kopf"] = "ð•œ", -- U+1D55C - ["kscr"] = "ð“€", -- U+1D4C0 - ["lAarr"] = "⇚", -- U+021DA - ["lArr"] = "â‡", -- U+021D0 - ["lAtail"] = "⤛", -- U+0291B - ["lBarr"] = "⤎", -- U+0290E - ["lE"] = "≦", -- U+02266 - ["lEg"] = "⪋", -- U+02A8B - ["lHar"] = "⥢", -- U+02962 - ["lacute"] = "ĺ", -- U+0013A - ["laemptyv"] = "⦴", -- U+029B4 - ["lagran"] = "â„’", -- U+02112 - ["lambda"] = "λ", -- U+003BB - ["lang"] = "⟨", -- U+027E8 - ["langd"] = "⦑", -- U+02991 - ["langle"] = "⟨", -- U+027E8 - ["lap"] = "⪅", -- U+02A85 - ["laquo"] = "«", -- U+000AB - ["larr"] = "â†", -- U+02190 - ["larrb"] = "⇤", -- U+021E4 - ["larrbfs"] = "⤟", -- U+0291F - ["larrfs"] = "â¤", -- U+0291D - ["larrhk"] = "↩", -- U+021A9 - ["larrlp"] = "↫", -- U+021AB - ["larrpl"] = "⤹", -- U+02939 - ["larrsim"] = "⥳", -- U+02973 - ["larrtl"] = "↢", -- U+021A2 - ["lat"] = "⪫", -- U+02AAB - ["latail"] = "⤙", -- U+02919 - ["late"] = "⪭", -- U+02AAD - ["lates"] = "⪭︀", -- U+02AAD 0FE00 - ["lbarr"] = "⤌", -- U+0290C - ["lbbrk"] = "â²", -- U+02772 - ["lbrace"] = "{", -- U+0007B - ["lbrack"] = "[", -- U+0005B - ["lbrke"] = "⦋", -- U+0298B - ["lbrksld"] = "â¦", -- U+0298F - ["lbrkslu"] = "â¦", -- U+0298D - ["lcaron"] = "ľ", -- U+0013E - ["lcedil"] = "ļ", -- U+0013C - ["lceil"] = "⌈", -- U+02308 - ["lcub"] = "{", -- U+0007B - ["lcy"] = "л", -- U+0043B - ["ldca"] = "⤶", -- U+02936 - ["ldquo"] = "“", -- U+0201C - ["ldquor"] = "„", -- U+0201E - ["ldrdhar"] = "⥧", -- U+02967 - ["ldrushar"] = "⥋", -- U+0294B - ["ldsh"] = "↲", -- U+021B2 - ["le"] = "≤", -- U+02264 - ["leftarrow"] = "â†", -- U+02190 - ["leftarrowtail"] = "↢", -- U+021A2 - ["leftharpoondown"] = "↽", -- U+021BD - ["leftharpoonup"] = "↼", -- U+021BC - ["leftleftarrows"] = "⇇", -- U+021C7 - ["leftrightarrow"] = "↔", -- U+02194 - ["leftrightarrows"] = "⇆", -- U+021C6 - ["leftrightharpoons"] = "⇋", -- U+021CB - ["leftrightsquigarrow"] = "↭", -- U+021AD - ["leftthreetimes"] = "â‹‹", -- U+022CB - ["leg"] = "â‹š", -- U+022DA - ["leq"] = "≤", -- U+02264 - ["leqq"] = "≦", -- U+02266 - ["leqslant"] = "⩽", -- U+02A7D - ["les"] = "⩽", -- U+02A7D - ["lescc"] = "⪨", -- U+02AA8 - ["lesdot"] = "â©¿", -- U+02A7F - ["lesdoto"] = "âª", -- U+02A81 - ["lesdotor"] = "⪃", -- U+02A83 - ["lesg"] = "⋚︀", -- U+022DA 0FE00 - ["lesges"] = "⪓", -- U+02A93 - ["lessapprox"] = "⪅", -- U+02A85 - ["lessdot"] = "â‹–", -- U+022D6 - ["lesseqgtr"] = "â‹š", -- U+022DA - ["lesseqqgtr"] = "⪋", -- U+02A8B - ["lessgtr"] = "≶", -- U+02276 - ["lesssim"] = "≲", -- U+02272 - ["lfisht"] = "⥼", -- U+0297C - ["lfloor"] = "⌊", -- U+0230A - ["lfr"] = "ð”©", -- U+1D529 - ["lg"] = "≶", -- U+02276 - ["lgE"] = "⪑", -- U+02A91 - ["lgr"] = "λ", -- U+003BB - ["lhard"] = "↽", -- U+021BD - ["lharu"] = "↼", -- U+021BC - ["lharul"] = "⥪", -- U+0296A - ["lhblk"] = "â–„", -- U+02584 - ["ljcy"] = "Ñ™", -- U+00459 - ["ll"] = "≪", -- U+0226A - ["llarr"] = "⇇", -- U+021C7 - ["llcorner"] = "⌞", -- U+0231E - ["llhard"] = "⥫", -- U+0296B - ["lltri"] = "â—º", -- U+025FA - ["lmidot"] = "Å€", -- U+00140 - ["lmoust"] = "⎰", -- U+023B0 - ["lmoustache"] = "⎰", -- U+023B0 - ["lnE"] = "≨", -- U+02268 - ["lnap"] = "⪉", -- U+02A89 - ["lnapprox"] = "⪉", -- U+02A89 - ["lne"] = "⪇", -- U+02A87 - ["lneq"] = "⪇", -- U+02A87 - ["lneqq"] = "≨", -- U+02268 - ["lnsim"] = "⋦", -- U+022E6 - ["loang"] = "⟬", -- U+027EC - ["loarr"] = "⇽", -- U+021FD - ["lobrk"] = "⟦", -- U+027E6 - ["longleftarrow"] = "⟵", -- U+027F5 - ["longleftrightarrow"] = "⟷", -- U+027F7 - ["longmapsto"] = "⟼", -- U+027FC - ["longrightarrow"] = "⟶", -- U+027F6 - ["looparrowleft"] = "↫", -- U+021AB - ["looparrowright"] = "↬", -- U+021AC - ["lopar"] = "⦅", -- U+02985 - ["lopf"] = "ð•", -- U+1D55D - ["loplus"] = "⨭", -- U+02A2D - ["lotimes"] = "⨴", -- U+02A34 - ["lowast"] = "∗", -- U+02217 - ["lowbar"] = "_", -- U+0005F - ["loz"] = "â—Š", -- U+025CA - ["lozenge"] = "â—Š", -- U+025CA - ["lozf"] = "⧫", -- U+029EB - ["lpar"] = "(", -- U+00028 - ["lparlt"] = "⦓", -- U+02993 - ["lrarr"] = "⇆", -- U+021C6 - ["lrcorner"] = "⌟", -- U+0231F - ["lrhar"] = "⇋", -- U+021CB - ["lrhard"] = "⥭", -- U+0296D - ["lrm"] = "‎", -- U+0200E - ["lrtri"] = "⊿", -- U+022BF - ["lsaquo"] = "‹", -- U+02039 - ["lscr"] = "ð“", -- U+1D4C1 - ["lsh"] = "↰", -- U+021B0 - ["lsim"] = "≲", -- U+02272 - ["lsime"] = "âª", -- U+02A8D - ["lsimg"] = "âª", -- U+02A8F - ["lsqb"] = "[", -- U+0005B - ["lsquo"] = "‘", -- U+02018 - ["lsquor"] = "‚", -- U+0201A - ["lstrok"] = "Å‚", -- U+00142 - ["lt"] = "<", -- U+00026 - ["ltcc"] = "⪦", -- U+02AA6 - ["ltcir"] = "⩹", -- U+02A79 - ["ltdot"] = "â‹–", -- U+022D6 - ["lthree"] = "â‹‹", -- U+022CB - ["ltimes"] = "⋉", -- U+022C9 - ["ltlarr"] = "⥶", -- U+02976 - ["ltquest"] = "â©»", -- U+02A7B - ["ltrPar"] = "⦖", -- U+02996 - ["ltri"] = "â—ƒ", -- U+025C3 - ["ltrie"] = "⊴", -- U+022B4 - ["ltrif"] = "â—‚", -- U+025C2 - ["lurdshar"] = "⥊", -- U+0294A - ["luruhar"] = "⥦", -- U+02966 - ["lvertneqq"] = "≨︀", -- U+02268 0FE00 - ["lvnE"] = "≨︀", -- U+02268 0FE00 - ["mDDot"] = "∺", -- U+0223A - ["macr"] = "¯", -- U+000AF - ["male"] = "♂", -- U+02642 - ["malt"] = "✠", -- U+02720 - ["maltese"] = "✠", -- U+02720 - ["map"] = "↦", -- U+021A6 - ["mapsto"] = "↦", -- U+021A6 - ["mapstodown"] = "↧", -- U+021A7 - ["mapstoleft"] = "↤", -- U+021A4 - ["mapstoup"] = "↥", -- U+021A5 - ["marker"] = "â–®", -- U+025AE - ["mcomma"] = "⨩", -- U+02A29 - ["mcy"] = "м", -- U+0043C - ["mdash"] = "—", -- U+02014 - ["measuredangle"] = "∡", -- U+02221 - ["mfr"] = "ð”ª", -- U+1D52A - ["mgr"] = "μ", -- U+003BC - ["mho"] = "℧", -- U+02127 - ["micro"] = "µ", -- U+000B5 - ["mid"] = "∣", -- U+02223 - ["midast"] = "*", -- U+0002A - ["midcir"] = "â«°", -- U+02AF0 - ["middot"] = "·", -- U+000B7 - ["minus"] = "−", -- U+02212 - ["minusb"] = "⊟", -- U+0229F - ["minusd"] = "∸", -- U+02238 - ["minusdu"] = "⨪", -- U+02A2A - ["mlcp"] = "â«›", -- U+02ADB - ["mldr"] = "…", -- U+02026 - ["mnplus"] = "∓", -- U+02213 - ["models"] = "⊧", -- U+022A7 - ["mopf"] = "ð•ž", -- U+1D55E - ["mp"] = "∓", -- U+02213 - ["mscr"] = "ð“‚", -- U+1D4C2 - ["mstpos"] = "∾", -- U+0223E - ["mu"] = "μ", -- U+003BC - ["multimap"] = "⊸", -- U+022B8 - ["mumap"] = "⊸", -- U+022B8 - ["nGg"] = "⋙̸", -- U+022D9 00338 - ["nGt"] = "≫⃒", -- U+0226B 020D2 - ["nGtv"] = "≫̸", -- U+0226B 00338 - ["nLeftarrow"] = "â‡", -- U+021CD - ["nLeftrightarrow"] = "⇎", -- U+021CE - ["nLl"] = "⋘̸", -- U+022D8 00338 - ["nLt"] = "≪⃒", -- U+0226A 020D2 - ["nLtv"] = "≪̸", -- U+0226A 00338 - ["nRightarrow"] = "â‡", -- U+021CF - ["nVDash"] = "⊯", -- U+022AF - ["nVdash"] = "⊮", -- U+022AE - ["nabla"] = "∇", -- U+02207 - ["nacute"] = "Å„", -- U+00144 - ["nang"] = "∠⃒", -- U+02220 020D2 - ["nap"] = "≉", -- U+02249 - ["napE"] = "⩰̸", -- U+02A70 00338 - ["napid"] = "≋̸", -- U+0224B 00338 - ["napos"] = "ʼn", -- U+00149 - ["napprox"] = "≉", -- U+02249 - ["natur"] = "â™®", -- U+0266E - ["natural"] = "â™®", -- U+0266E - ["naturals"] = "â„•", -- U+02115 - ["nbsp"] = " ", -- U+000A0 - ["nbump"] = "≎̸", -- U+0224E 00338 - ["nbumpe"] = "â‰Ì¸", -- U+0224F 00338 - ["ncap"] = "⩃", -- U+02A43 - ["ncaron"] = "ň", -- U+00148 - ["ncedil"] = "ņ", -- U+00146 - ["ncong"] = "≇", -- U+02247 - ["ncongdot"] = "⩭̸", -- U+02A6D 00338 - ["ncup"] = "â©‚", -- U+02A42 - ["ncy"] = "н", -- U+0043D - ["ndash"] = "–", -- U+02013 - ["ne"] = "≠", -- U+02260 - ["neArr"] = "⇗", -- U+021D7 - ["nearhk"] = "⤤", -- U+02924 - ["nearr"] = "↗", -- U+02197 - ["nearrow"] = "↗", -- U+02197 - ["nedot"] = "â‰Ì¸", -- U+02250 00338 - ["nequiv"] = "≢", -- U+02262 - ["nesear"] = "⤨", -- U+02928 - ["nesim"] = "≂̸", -- U+02242 00338 - ["nexist"] = "∄", -- U+02204 - ["nexists"] = "∄", -- U+02204 - ["nfr"] = "ð”«", -- U+1D52B - ["ngE"] = "≧̸", -- U+02267 00338 - ["nge"] = "≱", -- U+02271 - ["ngeq"] = "≱", -- U+02271 - ["ngeqq"] = "≧̸", -- U+02267 00338 - ["ngeqslant"] = "⩾̸", -- U+02A7E 00338 - ["nges"] = "⩾̸", -- U+02A7E 00338 - ["ngr"] = "ν", -- U+003BD - ["ngsim"] = "≵", -- U+02275 - ["ngt"] = "≯", -- U+0226F - ["ngtr"] = "≯", -- U+0226F - ["nhArr"] = "⇎", -- U+021CE - ["nharr"] = "↮", -- U+021AE - ["nhpar"] = "⫲", -- U+02AF2 - ["ni"] = "∋", -- U+0220B - ["nis"] = "⋼", -- U+022FC - ["nisd"] = "⋺", -- U+022FA - ["niv"] = "∋", -- U+0220B - ["njcy"] = "Ñš", -- U+0045A - ["nlArr"] = "â‡", -- U+021CD - ["nlE"] = "≦̸", -- U+02266 00338 - ["nlarr"] = "↚", -- U+0219A - ["nldr"] = "‥", -- U+02025 - ["nle"] = "≰", -- U+02270 - ["nleftarrow"] = "↚", -- U+0219A - ["nleftrightarrow"] = "↮", -- U+021AE - ["nleq"] = "≰", -- U+02270 - ["nleqq"] = "≦̸", -- U+02266 00338 - ["nleqslant"] = "⩽̸", -- U+02A7D 00338 - ["nles"] = "⩽̸", -- U+02A7D 00338 - ["nless"] = "≮", -- U+0226E - ["nlsim"] = "≴", -- U+02274 - ["nlt"] = "≮", -- U+0226E - ["nltri"] = "⋪", -- U+022EA - ["nltrie"] = "⋬", -- U+022EC - ["nmid"] = "∤", -- U+02224 - ["nopf"] = "ð•Ÿ", -- U+1D55F - ["not"] = "¬", -- U+000AC - ["notin"] = "∉", -- U+02209 - ["notinE"] = "⋹̸", -- U+022F9 00338 - ["notindot"] = "⋵̸", -- U+022F5 00338 - ["notinva"] = "∉", -- U+02209 - ["notinvb"] = "â‹·", -- U+022F7 - ["notinvc"] = "⋶", -- U+022F6 - ["notni"] = "∌", -- U+0220C - ["notniva"] = "∌", -- U+0220C - ["notnivb"] = "⋾", -- U+022FE - ["notnivc"] = "⋽", -- U+022FD - ["npar"] = "∦", -- U+02226 - ["nparallel"] = "∦", -- U+02226 - ["nparsl"] = "⫽⃥", -- U+02AFD 020E5 - ["npart"] = "∂̸", -- U+02202 00338 - ["npolint"] = "⨔", -- U+02A14 - ["npr"] = "⊀", -- U+02280 - ["nprcue"] = "â‹ ", -- U+022E0 - ["npre"] = "⪯̸", -- U+02AAF 00338 - ["nprec"] = "⊀", -- U+02280 - ["npreceq"] = "⪯̸", -- U+02AAF 00338 - ["nrArr"] = "â‡", -- U+021CF - ["nrarr"] = "↛", -- U+0219B - ["nrarrc"] = "⤳̸", -- U+02933 00338 - ["nrarrw"] = "â†Ì¸", -- U+0219D 00338 - ["nrightarrow"] = "↛", -- U+0219B - ["nrtri"] = "â‹«", -- U+022EB - ["nrtrie"] = "â‹­", -- U+022ED - ["nsc"] = "âŠ", -- U+02281 - ["nsccue"] = "â‹¡", -- U+022E1 - ["nsce"] = "⪰̸", -- U+02AB0 00338 - ["nscr"] = "ð“ƒ", -- U+1D4C3 - ["nshortmid"] = "∤", -- U+02224 - ["nshortparallel"] = "∦", -- U+02226 - ["nsim"] = "â‰", -- U+02241 - ["nsime"] = "≄", -- U+02244 - ["nsimeq"] = "≄", -- U+02244 - ["nsmid"] = "∤", -- U+02224 - ["nspar"] = "∦", -- U+02226 - ["nsqsube"] = "â‹¢", -- U+022E2 - ["nsqsupe"] = "â‹£", -- U+022E3 - ["nsub"] = "⊄", -- U+02284 - ["nsubE"] = "⫅̸", -- U+02AC5 00338 - ["nsube"] = "⊈", -- U+02288 - ["nsubset"] = "⊂⃒", -- U+02282 020D2 - ["nsubseteq"] = "⊈", -- U+02288 - ["nsubseteqq"] = "⫅̸", -- U+02AC5 00338 - ["nsucc"] = "âŠ", -- U+02281 - ["nsucceq"] = "⪰̸", -- U+02AB0 00338 - ["nsup"] = "⊅", -- U+02285 - ["nsupE"] = "⫆̸", -- U+02AC6 00338 - ["nsupe"] = "⊉", -- U+02289 - ["nsupset"] = "⊃⃒", -- U+02283 020D2 - ["nsupseteq"] = "⊉", -- U+02289 - ["nsupseteqq"] = "⫆̸", -- U+02AC6 00338 - ["ntgl"] = "≹", -- U+02279 - ["ntilde"] = "ñ", -- U+000F1 - ["ntlg"] = "≸", -- U+02278 - ["ntriangleleft"] = "⋪", -- U+022EA - ["ntrianglelefteq"] = "⋬", -- U+022EC - ["ntriangleright"] = "â‹«", -- U+022EB - ["ntrianglerighteq"] = "â‹­", -- U+022ED - ["nu"] = "ν", -- U+003BD - ["num"] = "#", -- U+00023 - ["numero"] = "â„–", -- U+02116 - ["numsp"] = " ", -- U+02007 - ["nvDash"] = "⊭", -- U+022AD - ["nvHarr"] = "⤄", -- U+02904 - ["nvap"] = "â‰âƒ’", -- U+0224D 020D2 - ["nvdash"] = "⊬", -- U+022AC - ["nvge"] = "≥⃒", -- U+02265 020D2 - ["nvgt"] = ">⃒", -- U+0003E 020D2 - ["nvinfin"] = "⧞", -- U+029DE - ["nvlArr"] = "⤂", -- U+02902 - ["nvle"] = "≤⃒", -- U+02264 020D2 - ["nvlt"] = "&⃒", -- U+00026 020D2 - ["nvltrie"] = "⊴⃒", -- U+022B4 020D2 - ["nvrArr"] = "⤃", -- U+02903 - ["nvrtrie"] = "⊵⃒", -- U+022B5 020D2 - ["nvsim"] = "∼⃒", -- U+0223C 020D2 - ["nwArr"] = "⇖", -- U+021D6 - ["nwarhk"] = "⤣", -- U+02923 - ["nwarr"] = "↖", -- U+02196 - ["nwarrow"] = "↖", -- U+02196 - ["nwnear"] = "⤧", -- U+02927 - ["oS"] = "Ⓢ", -- U+024C8 - ["oacgr"] = "ÏŒ", -- U+003CC - ["oacute"] = "ó", -- U+000F3 - ["oast"] = "⊛", -- U+0229B - ["ocir"] = "⊚", -- U+0229A - ["ocirc"] = "ô", -- U+000F4 - ["ocy"] = "о", -- U+0043E - ["odash"] = "âŠ", -- U+0229D - ["odblac"] = "Å‘", -- U+00151 - ["odiv"] = "⨸", -- U+02A38 - ["odot"] = "⊙", -- U+02299 - ["odsold"] = "⦼", -- U+029BC - ["oelig"] = "Å“", -- U+00153 - ["ofcir"] = "⦿", -- U+029BF - ["ofr"] = "ð”¬", -- U+1D52C - ["ogon"] = "Ë›", -- U+002DB - ["ogr"] = "ο", -- U+003BF - ["ograve"] = "ò", -- U+000F2 - ["ogt"] = "â§", -- U+029C1 - ["ohacgr"] = "ÏŽ", -- U+003CE - ["ohbar"] = "⦵", -- U+029B5 - ["ohgr"] = "ω", -- U+003C9 - ["ohm"] = "Ω", -- U+003A9 - ["oint"] = "∮", -- U+0222E - ["olarr"] = "↺", -- U+021BA - ["olcir"] = "⦾", -- U+029BE - ["olcross"] = "⦻", -- U+029BB - ["oline"] = "‾", -- U+0203E - ["olt"] = "⧀", -- U+029C0 - ["omacr"] = "Å", -- U+0014D - ["omega"] = "ω", -- U+003C9 - ["omicron"] = "ο", -- U+003BF - ["omid"] = "⦶", -- U+029B6 - ["ominus"] = "⊖", -- U+02296 - ["oopf"] = "ð• ", -- U+1D560 - ["opar"] = "⦷", -- U+029B7 - ["operp"] = "⦹", -- U+029B9 - ["oplus"] = "⊕", -- U+02295 - ["or"] = "∨", -- U+02228 - ["orarr"] = "↻", -- U+021BB - ["ord"] = "â©", -- U+02A5D - ["order"] = "â„´", -- U+02134 - ["orderof"] = "â„´", -- U+02134 - ["ordf"] = "ª", -- U+000AA - ["ordm"] = "º", -- U+000BA - ["origof"] = "⊶", -- U+022B6 - ["oror"] = "â©–", -- U+02A56 - ["orslope"] = "â©—", -- U+02A57 - ["orv"] = "â©›", -- U+02A5B - ["oscr"] = "â„´", -- U+02134 - ["oslash"] = "ø", -- U+000F8 - ["osol"] = "⊘", -- U+02298 - ["otilde"] = "õ", -- U+000F5 - ["otimes"] = "⊗", -- U+02297 - ["otimesas"] = "⨶", -- U+02A36 - ["ouml"] = "ö", -- U+000F6 - ["ovbar"] = "⌽", -- U+0233D - ["par"] = "∥", -- U+02225 - ["para"] = "¶", -- U+000B6 - ["parallel"] = "∥", -- U+02225 - ["parsim"] = "⫳", -- U+02AF3 - ["parsl"] = "⫽", -- U+02AFD - ["part"] = "∂", -- U+02202 - ["pcy"] = "п", -- U+0043F - ["percnt"] = "%", -- U+00025 - ["period"] = ".", -- U+0002E - ["permil"] = "‰", -- U+02030 - ["perp"] = "⊥", -- U+022A5 - ["pertenk"] = "‱", -- U+02031 - ["pfr"] = "ð”­", -- U+1D52D - ["pgr"] = "Ï€", -- U+003C0 - ["phgr"] = "φ", -- U+003C6 - ["phi"] = "φ", -- U+003C6 - ["phiv"] = "Ï•", -- U+003D5 - ["phmmat"] = "ℳ", -- U+02133 - ["phone"] = "☎", -- U+0260E - ["pi"] = "Ï€", -- U+003C0 - ["pitchfork"] = "â‹”", -- U+022D4 - ["piv"] = "Ï–", -- U+003D6 - ["planck"] = "â„", -- U+0210F - ["planckh"] = "â„Ž", -- U+0210E - ["plankv"] = "â„", -- U+0210F - ["plus"] = "+", -- U+0002B - ["plusacir"] = "⨣", -- U+02A23 - ["plusb"] = "⊞", -- U+0229E - ["pluscir"] = "⨢", -- U+02A22 - ["plusdo"] = "∔", -- U+02214 - ["plusdu"] = "⨥", -- U+02A25 - ["pluse"] = "⩲", -- U+02A72 - ["plusmn"] = "±", -- U+000B1 - ["plussim"] = "⨦", -- U+02A26 - ["plustwo"] = "⨧", -- U+02A27 - ["pm"] = "±", -- U+000B1 - ["pointint"] = "⨕", -- U+02A15 - ["popf"] = "ð•¡", -- U+1D561 - ["pound"] = "£", -- U+000A3 - ["pr"] = "≺", -- U+0227A - ["prE"] = "⪳", -- U+02AB3 - ["prap"] = "⪷", -- U+02AB7 - ["prcue"] = "≼", -- U+0227C - ["pre"] = "⪯", -- U+02AAF - ["prec"] = "≺", -- U+0227A - ["precapprox"] = "⪷", -- U+02AB7 - ["preccurlyeq"] = "≼", -- U+0227C - ["preceq"] = "⪯", -- U+02AAF - ["precnapprox"] = "⪹", -- U+02AB9 - ["precneqq"] = "⪵", -- U+02AB5 - ["precnsim"] = "⋨", -- U+022E8 - ["precsim"] = "≾", -- U+0227E - ["prime"] = "′", -- U+02032 - ["primes"] = "â„™", -- U+02119 - ["prnE"] = "⪵", -- U+02AB5 - ["prnap"] = "⪹", -- U+02AB9 - ["prnsim"] = "⋨", -- U+022E8 - ["prod"] = "âˆ", -- U+0220F - ["profalar"] = "⌮", -- U+0232E - ["profline"] = "⌒", -- U+02312 - ["profsurf"] = "⌓", -- U+02313 - ["prop"] = "âˆ", -- U+0221D - ["propto"] = "âˆ", -- U+0221D - ["prsim"] = "≾", -- U+0227E - ["prurel"] = "⊰", -- U+022B0 - ["pscr"] = "ð“…", -- U+1D4C5 - ["psgr"] = "ψ", -- U+003C8 - ["psi"] = "ψ", -- U+003C8 - ["puncsp"] = " ", -- U+02008 - ["qfr"] = "ð”®", -- U+1D52E - ["qint"] = "⨌", -- U+02A0C - ["qopf"] = "ð•¢", -- U+1D562 - ["qprime"] = "â—", -- U+02057 - ["qscr"] = "ð“†", -- U+1D4C6 - ["quaternions"] = "â„", -- U+0210D - ["quatint"] = "⨖", -- U+02A16 - ["quest"] = "?", -- U+0003F - ["questeq"] = "≟", -- U+0225F - ["quot"] = "\"", -- U+00022 - ["rAarr"] = "⇛", -- U+021DB - ["rArr"] = "⇒", -- U+021D2 - ["rAtail"] = "⤜", -- U+0291C - ["rBarr"] = "â¤", -- U+0290F - ["rHar"] = "⥤", -- U+02964 - ["race"] = "∽̱", -- U+0223D 00331 - ["racute"] = "Å•", -- U+00155 - ["radic"] = "√", -- U+0221A - ["raemptyv"] = "⦳", -- U+029B3 - ["rang"] = "⟩", -- U+027E9 - ["rangd"] = "⦒", -- U+02992 - ["range"] = "⦥", -- U+029A5 - ["rangle"] = "⟩", -- U+027E9 - ["raquo"] = "»", -- U+000BB - ["rarr"] = "→", -- U+02192 - ["rarrap"] = "⥵", -- U+02975 - ["rarrb"] = "⇥", -- U+021E5 - ["rarrbfs"] = "⤠", -- U+02920 - ["rarrc"] = "⤳", -- U+02933 - ["rarrfs"] = "⤞", -- U+0291E - ["rarrhk"] = "↪", -- U+021AA - ["rarrlp"] = "↬", -- U+021AC - ["rarrpl"] = "⥅", -- U+02945 - ["rarrsim"] = "⥴", -- U+02974 - ["rarrtl"] = "↣", -- U+021A3 - ["rarrw"] = "â†", -- U+0219D - ["ratail"] = "⤚", -- U+0291A - ["ratio"] = "∶", -- U+02236 - ["rationals"] = "â„š", -- U+0211A - ["rbarr"] = "â¤", -- U+0290D - ["rbbrk"] = "â³", -- U+02773 - ["rbrace"] = "}", -- U+0007D - ["rbrack"] = "]", -- U+0005D - ["rbrke"] = "⦌", -- U+0298C - ["rbrksld"] = "⦎", -- U+0298E - ["rbrkslu"] = "â¦", -- U+02990 - ["rcaron"] = "Å™", -- U+00159 - ["rcedil"] = "Å—", -- U+00157 - ["rceil"] = "⌉", -- U+02309 - ["rcub"] = "}", -- U+0007D - ["rcy"] = "Ñ€", -- U+00440 - ["rdca"] = "⤷", -- U+02937 - ["rdldhar"] = "⥩", -- U+02969 - ["rdquo"] = "â€", -- U+0201D - ["rdquor"] = "â€", -- U+0201D - ["rdsh"] = "↳", -- U+021B3 - ["real"] = "â„œ", -- U+0211C - ["realine"] = "â„›", -- U+0211B - ["realpart"] = "â„œ", -- U+0211C - ["reals"] = "â„", -- U+0211D - ["rect"] = "â–­", -- U+025AD - ["reg"] = "®", -- U+000AE - ["rfisht"] = "⥽", -- U+0297D - ["rfloor"] = "⌋", -- U+0230B - ["rfr"] = "ð”¯", -- U+1D52F - ["rgr"] = "Ï", -- U+003C1 - ["rhard"] = "â‡", -- U+021C1 - ["rharu"] = "⇀", -- U+021C0 - ["rharul"] = "⥬", -- U+0296C - ["rho"] = "Ï", -- U+003C1 - ["rhov"] = "ϱ", -- U+003F1 - ["rightarrow"] = "→", -- U+02192 - ["rightarrowtail"] = "↣", -- U+021A3 - ["rightharpoondown"] = "â‡", -- U+021C1 - ["rightharpoonup"] = "⇀", -- U+021C0 - ["rightleftarrows"] = "⇄", -- U+021C4 - ["rightleftharpoons"] = "⇌", -- U+021CC - ["rightrightarrows"] = "⇉", -- U+021C9 - ["rightsquigarrow"] = "â†", -- U+0219D - ["rightthreetimes"] = "â‹Œ", -- U+022CC - ["ring"] = "Ëš", -- U+002DA - ["risingdotseq"] = "≓", -- U+02253 - ["rlarr"] = "⇄", -- U+021C4 - ["rlhar"] = "⇌", -- U+021CC - ["rlm"] = "â€", -- U+0200F - ["rmoust"] = "⎱", -- U+023B1 - ["rmoustache"] = "⎱", -- U+023B1 - ["rnmid"] = "â«®", -- U+02AEE - ["roang"] = "⟭", -- U+027ED - ["roarr"] = "⇾", -- U+021FE - ["robrk"] = "⟧", -- U+027E7 - ["ropar"] = "⦆", -- U+02986 - ["ropf"] = "ð•£", -- U+1D563 - ["roplus"] = "⨮", -- U+02A2E - ["rotimes"] = "⨵", -- U+02A35 - ["rpar"] = ")", -- U+00029 - ["rpargt"] = "⦔", -- U+02994 - ["rppolint"] = "⨒", -- U+02A12 - ["rrarr"] = "⇉", -- U+021C9 - ["rsaquo"] = "›", -- U+0203A - ["rscr"] = "ð“‡", -- U+1D4C7 - ["rsh"] = "↱", -- U+021B1 - ["rsqb"] = "]", -- U+0005D - ["rsquo"] = "’", -- U+02019 - ["rsquor"] = "’", -- U+02019 - ["rthree"] = "â‹Œ", -- U+022CC - ["rtimes"] = "â‹Š", -- U+022CA - ["rtri"] = "â–¹", -- U+025B9 - ["rtrie"] = "⊵", -- U+022B5 - ["rtrif"] = "â–¸", -- U+025B8 - ["rtriltri"] = "⧎", -- U+029CE - ["ruluhar"] = "⥨", -- U+02968 - ["rx"] = "â„ž", -- U+0211E - ["sacute"] = "Å›", -- U+0015B - ["sbquo"] = "‚", -- U+0201A - ["sc"] = "≻", -- U+0227B - ["scE"] = "⪴", -- U+02AB4 - ["scap"] = "⪸", -- U+02AB8 - ["scaron"] = "Å¡", -- U+00161 - ["sccue"] = "≽", -- U+0227D - ["sce"] = "⪰", -- U+02AB0 - ["scedil"] = "ÅŸ", -- U+0015F - ["scirc"] = "Å", -- U+0015D - ["scnE"] = "⪶", -- U+02AB6 - ["scnap"] = "⪺", -- U+02ABA - ["scnsim"] = "â‹©", -- U+022E9 - ["scpolint"] = "⨓", -- U+02A13 - ["scsim"] = "≿", -- U+0227F - ["scy"] = "Ñ", -- U+00441 - ["sdot"] = "â‹…", -- U+022C5 - ["sdotb"] = "⊡", -- U+022A1 - ["sdote"] = "⩦", -- U+02A66 - ["seArr"] = "⇘", -- U+021D8 - ["searhk"] = "⤥", -- U+02925 - ["searr"] = "↘", -- U+02198 - ["searrow"] = "↘", -- U+02198 - ["sect"] = "§", -- U+000A7 - ["semi"] = ";", -- U+0003B - ["seswar"] = "⤩", -- U+02929 - ["setminus"] = "∖", -- U+02216 - ["setmn"] = "∖", -- U+02216 - ["sext"] = "✶", -- U+02736 - ["sfgr"] = "Ï‚", -- U+003C2 - ["sfr"] = "ð”°", -- U+1D530 - ["sfrown"] = "⌢", -- U+02322 - ["sgr"] = "σ", -- U+003C3 - ["sharp"] = "♯", -- U+0266F - ["shchcy"] = "щ", -- U+00449 - ["shcy"] = "ш", -- U+00448 - ["shortmid"] = "∣", -- U+02223 - ["shortparallel"] = "∥", -- U+02225 - ["shy"] = "­", -- U+000AD - ["sigma"] = "σ", -- U+003C3 - ["sigmaf"] = "Ï‚", -- U+003C2 - ["sigmav"] = "Ï‚", -- U+003C2 - ["sim"] = "∼", -- U+0223C - ["simdot"] = "⩪", -- U+02A6A - ["sime"] = "≃", -- U+02243 - ["simeq"] = "≃", -- U+02243 - ["simg"] = "⪞", -- U+02A9E - ["simgE"] = "⪠", -- U+02AA0 - ["siml"] = "âª", -- U+02A9D - ["simlE"] = "⪟", -- U+02A9F - ["simne"] = "≆", -- U+02246 - ["simplus"] = "⨤", -- U+02A24 - ["simrarr"] = "⥲", -- U+02972 - ["slarr"] = "â†", -- U+02190 - ["smallsetminus"] = "∖", -- U+02216 - ["smashp"] = "⨳", -- U+02A33 - ["smeparsl"] = "⧤", -- U+029E4 - ["smid"] = "∣", -- U+02223 - ["smile"] = "⌣", -- U+02323 - ["smt"] = "⪪", -- U+02AAA - ["smte"] = "⪬", -- U+02AAC - ["smtes"] = "⪬︀", -- U+02AAC 0FE00 - ["softcy"] = "ÑŒ", -- U+0044C - ["sol"] = "/", -- U+0002F - ["solb"] = "⧄", -- U+029C4 - ["solbar"] = "⌿", -- U+0233F - ["sopf"] = "ð•¤", -- U+1D564 - ["spades"] = "â™ ", -- U+02660 - ["spadesuit"] = "â™ ", -- U+02660 - ["spar"] = "∥", -- U+02225 - ["sqcap"] = "⊓", -- U+02293 - ["sqcaps"] = "⊓︀", -- U+02293 0FE00 - ["sqcup"] = "⊔", -- U+02294 - ["sqcups"] = "⊔︀", -- U+02294 0FE00 - ["sqsub"] = "âŠ", -- U+0228F - ["sqsube"] = "⊑", -- U+02291 - ["sqsubset"] = "âŠ", -- U+0228F - ["sqsubseteq"] = "⊑", -- U+02291 - ["sqsup"] = "âŠ", -- U+02290 - ["sqsupe"] = "⊒", -- U+02292 - ["sqsupset"] = "âŠ", -- U+02290 - ["sqsupseteq"] = "⊒", -- U+02292 - ["squ"] = "â–¡", -- U+025A1 - ["square"] = "â–¡", -- U+025A1 - ["squarf"] = "â–ª", -- U+025AA - ["squf"] = "â–ª", -- U+025AA - ["srarr"] = "→", -- U+02192 - ["sscr"] = "ð“ˆ", -- U+1D4C8 - ["ssetmn"] = "∖", -- U+02216 - ["ssmile"] = "⌣", -- U+02323 - ["sstarf"] = "⋆", -- U+022C6 - ["star"] = "☆", -- U+02606 - ["starf"] = "★", -- U+02605 - ["straightepsilon"] = "ϵ", -- U+003F5 - ["straightphi"] = "Ï•", -- U+003D5 - ["strns"] = "¯", -- U+000AF - ["sub"] = "⊂", -- U+02282 - ["subE"] = "â«…", -- U+02AC5 - ["subdot"] = "⪽", -- U+02ABD - ["sube"] = "⊆", -- U+02286 - ["subedot"] = "⫃", -- U+02AC3 - ["submult"] = "â«", -- U+02AC1 - ["subnE"] = "â«‹", -- U+02ACB - ["subne"] = "⊊", -- U+0228A - ["subplus"] = "⪿", -- U+02ABF - ["subrarr"] = "⥹", -- U+02979 - ["subset"] = "⊂", -- U+02282 - ["subseteq"] = "⊆", -- U+02286 - ["subseteqq"] = "â«…", -- U+02AC5 - ["subsetneq"] = "⊊", -- U+0228A - ["subsetneqq"] = "â«‹", -- U+02ACB - ["subsim"] = "⫇", -- U+02AC7 - ["subsub"] = "â«•", -- U+02AD5 - ["subsup"] = "â«“", -- U+02AD3 - ["succ"] = "≻", -- U+0227B - ["succapprox"] = "⪸", -- U+02AB8 - ["succcurlyeq"] = "≽", -- U+0227D - ["succeq"] = "⪰", -- U+02AB0 - ["succnapprox"] = "⪺", -- U+02ABA - ["succneqq"] = "⪶", -- U+02AB6 - ["succnsim"] = "â‹©", -- U+022E9 - ["succsim"] = "≿", -- U+0227F - ["sum"] = "∑", -- U+02211 - ["sung"] = "♪", -- U+0266A - ["sup"] = "⊃", -- U+02283 - ["sup1"] = "¹", -- U+000B9 - ["sup2"] = "²", -- U+000B2 - ["sup3"] = "³", -- U+000B3 - ["supE"] = "⫆", -- U+02AC6 - ["supdot"] = "⪾", -- U+02ABE - ["supdsub"] = "⫘", -- U+02AD8 - ["supe"] = "⊇", -- U+02287 - ["supedot"] = "â«„", -- U+02AC4 - ["suphsol"] = "⟉", -- U+027C9 - ["suphsub"] = "â«—", -- U+02AD7 - ["suplarr"] = "⥻", -- U+0297B - ["supmult"] = "â«‚", -- U+02AC2 - ["supnE"] = "â«Œ", -- U+02ACC - ["supne"] = "⊋", -- U+0228B - ["supplus"] = "â«€", -- U+02AC0 - ["supset"] = "⊃", -- U+02283 - ["supseteq"] = "⊇", -- U+02287 - ["supseteqq"] = "⫆", -- U+02AC6 - ["supsetneq"] = "⊋", -- U+0228B - ["supsetneqq"] = "â«Œ", -- U+02ACC - ["supsim"] = "⫈", -- U+02AC8 - ["supsub"] = "â«”", -- U+02AD4 - ["supsup"] = "â«–", -- U+02AD6 - ["swArr"] = "⇙", -- U+021D9 - ["swarhk"] = "⤦", -- U+02926 - ["swarr"] = "↙", -- U+02199 - ["swarrow"] = "↙", -- U+02199 - ["swnwar"] = "⤪", -- U+0292A - ["szlig"] = "ß", -- U+000DF - ["target"] = "⌖", -- U+02316 - ["tau"] = "Ï„", -- U+003C4 - ["tbrk"] = "⎴", -- U+023B4 - ["tcaron"] = "Å¥", -- U+00165 - ["tcedil"] = "Å£", -- U+00163 - ["tcy"] = "Ñ‚", -- U+00442 - ["tdot"] = "⃛", -- U+020DB - ["telrec"] = "⌕", -- U+02315 - ["tfr"] = "ð”±", -- U+1D531 - ["tgr"] = "Ï„", -- U+003C4 - ["there4"] = "∴", -- U+02234 - ["therefore"] = "∴", -- U+02234 - ["theta"] = "θ", -- U+003B8 - ["thetasym"] = "Ï‘", -- U+003D1 - ["thetav"] = "Ï‘", -- U+003D1 - ["thgr"] = "θ", -- U+003B8 - ["thickapprox"] = "≈", -- U+02248 - ["thicksim"] = "∼", -- U+0223C - ["thinsp"] = " ", -- U+02009 - ["thkap"] = "≈", -- U+02248 - ["thksim"] = "∼", -- U+0223C - ["thorn"] = "þ", -- U+000FE - ["tilde"] = "Ëœ", -- U+002DC - ["times"] = "×", -- U+000D7 - ["timesb"] = "⊠", -- U+022A0 - ["timesbar"] = "⨱", -- U+02A31 - ["timesd"] = "⨰", -- U+02A30 - ["tint"] = "∭", -- U+0222D - ["toea"] = "⤨", -- U+02928 - ["top"] = "⊤", -- U+022A4 - ["topbot"] = "⌶", -- U+02336 - ["topcir"] = "⫱", -- U+02AF1 - ["topf"] = "ð•¥", -- U+1D565 - ["topfork"] = "â«š", -- U+02ADA - ["tosa"] = "⤩", -- U+02929 - ["tprime"] = "‴", -- U+02034 - ["trade"] = "â„¢", -- U+02122 - ["triangle"] = "â–µ", -- U+025B5 - ["triangledown"] = "â–¿", -- U+025BF - ["triangleleft"] = "â—ƒ", -- U+025C3 - ["trianglelefteq"] = "⊴", -- U+022B4 - ["triangleq"] = "≜", -- U+0225C - ["triangleright"] = "â–¹", -- U+025B9 - ["trianglerighteq"] = "⊵", -- U+022B5 - ["tridot"] = "â—¬", -- U+025EC - ["trie"] = "≜", -- U+0225C - ["triminus"] = "⨺", -- U+02A3A - ["triplus"] = "⨹", -- U+02A39 - ["trisb"] = "â§", -- U+029CD - ["tritime"] = "⨻", -- U+02A3B - ["trpezium"] = "â¢", -- U+023E2 - ["tscr"] = "ð“‰", -- U+1D4C9 - ["tscy"] = "ц", -- U+00446 - ["tshcy"] = "Ñ›", -- U+0045B - ["tstrok"] = "ŧ", -- U+00167 - ["twixt"] = "≬", -- U+0226C - ["twoheadleftarrow"] = "↞", -- U+0219E - ["twoheadrightarrow"] = "↠", -- U+021A0 - ["uArr"] = "⇑", -- U+021D1 - ["uHar"] = "⥣", -- U+02963 - ["uacgr"] = "Ï", -- U+003CD - ["uacute"] = "ú", -- U+000FA - ["uarr"] = "↑", -- U+02191 - ["ubrcy"] = "Ñž", -- U+0045E - ["ubreve"] = "Å­", -- U+0016D - ["ucirc"] = "û", -- U+000FB - ["ucy"] = "у", -- U+00443 - ["udarr"] = "⇅", -- U+021C5 - ["udblac"] = "ű", -- U+00171 - ["udhar"] = "⥮", -- U+0296E - ["udiagr"] = "ΰ", -- U+003B0 - ["udigr"] = "Ï‹", -- U+003CB - ["ufisht"] = "⥾", -- U+0297E - ["ufr"] = "ð”²", -- U+1D532 - ["ugr"] = "Ï…", -- U+003C5 - ["ugrave"] = "ù", -- U+000F9 - ["uharl"] = "↿", -- U+021BF - ["uharr"] = "↾", -- U+021BE - ["uhblk"] = "â–€", -- U+02580 - ["ulcorn"] = "⌜", -- U+0231C - ["ulcorner"] = "⌜", -- U+0231C - ["ulcrop"] = "âŒ", -- U+0230F - ["ultri"] = "â—¸", -- U+025F8 - ["umacr"] = "Å«", -- U+0016B - ["uml"] = "¨", -- U+000A8 - ["uogon"] = "ų", -- U+00173 - ["uopf"] = "ð•¦", -- U+1D566 - ["uparrow"] = "↑", -- U+02191 - ["updownarrow"] = "↕", -- U+02195 - ["upharpoonleft"] = "↿", -- U+021BF - ["upharpoonright"] = "↾", -- U+021BE - ["uplus"] = "⊎", -- U+0228E - ["upsi"] = "Ï…", -- U+003C5 - ["upsih"] = "Ï’", -- U+003D2 - ["upsilon"] = "Ï…", -- U+003C5 - ["upuparrows"] = "⇈", -- U+021C8 - ["urcorn"] = "âŒ", -- U+0231D - ["urcorner"] = "âŒ", -- U+0231D - ["urcrop"] = "⌎", -- U+0230E - ["uring"] = "ů", -- U+0016F - ["urtri"] = "â—¹", -- U+025F9 - ["uscr"] = "ð“Š", -- U+1D4CA - ["utdot"] = "â‹°", -- U+022F0 - ["utilde"] = "Å©", -- U+00169 - ["utri"] = "â–µ", -- U+025B5 - ["utrif"] = "â–´", -- U+025B4 - ["uuarr"] = "⇈", -- U+021C8 - ["uuml"] = "ü", -- U+000FC - ["uwangle"] = "⦧", -- U+029A7 - ["vArr"] = "⇕", -- U+021D5 - ["vBar"] = "⫨", -- U+02AE8 - ["vBarv"] = "â«©", -- U+02AE9 - ["vDash"] = "⊨", -- U+022A8 - ["vangrt"] = "⦜", -- U+0299C - ["varepsilon"] = "ϵ", -- U+003F5 - ["varkappa"] = "Ï°", -- U+003F0 - ["varnothing"] = "∅", -- U+02205 - ["varphi"] = "Ï•", -- U+003D5 - ["varpi"] = "Ï–", -- U+003D6 - ["varpropto"] = "âˆ", -- U+0221D - ["varr"] = "↕", -- U+02195 - ["varrho"] = "ϱ", -- U+003F1 - ["varsigma"] = "Ï‚", -- U+003C2 - ["varsubsetneq"] = "⊊︀", -- U+0228A 0FE00 - ["varsubsetneqq"] = "⫋︀", -- U+02ACB 0FE00 - ["varsupsetneq"] = "⊋︀", -- U+0228B 0FE00 - ["varsupsetneqq"] = "⫌︀", -- U+02ACC 0FE00 - ["vartheta"] = "Ï‘", -- U+003D1 - ["vartriangleleft"] = "⊲", -- U+022B2 - ["vartriangleright"] = "⊳", -- U+022B3 - ["vcy"] = "в", -- U+00432 - ["vdash"] = "⊢", -- U+022A2 - ["vee"] = "∨", -- U+02228 - ["veebar"] = "⊻", -- U+022BB - ["veeeq"] = "≚", -- U+0225A - ["vellip"] = "â‹®", -- U+022EE - ["verbar"] = "|", -- U+0007C - ["vert"] = "|", -- U+0007C - ["vfr"] = "ð”³", -- U+1D533 - ["vltri"] = "⊲", -- U+022B2 - ["vnsub"] = "⊂⃒", -- U+02282 020D2 - ["vnsup"] = "⊃⃒", -- U+02283 020D2 - ["vopf"] = "ð•§", -- U+1D567 - ["vprop"] = "âˆ", -- U+0221D - ["vrtri"] = "⊳", -- U+022B3 - ["vscr"] = "ð“‹", -- U+1D4CB - ["vsubnE"] = "⫋︀", -- U+02ACB 0FE00 - ["vsubne"] = "⊊︀", -- U+0228A 0FE00 - ["vsupnE"] = "⫌︀", -- U+02ACC 0FE00 - ["vsupne"] = "⊋︀", -- U+0228B 0FE00 - ["vzigzag"] = "⦚", -- U+0299A - ["wcirc"] = "ŵ", -- U+00175 - ["wedbar"] = "â©Ÿ", -- U+02A5F - ["wedge"] = "∧", -- U+02227 - ["wedgeq"] = "≙", -- U+02259 - ["weierp"] = "℘", -- U+02118 - ["wfr"] = "ð”´", -- U+1D534 - ["wopf"] = "ð•¨", -- U+1D568 - ["wp"] = "℘", -- U+02118 - ["wr"] = "≀", -- U+02240 - ["wreath"] = "≀", -- U+02240 - ["wscr"] = "ð“Œ", -- U+1D4CC - ["xcap"] = "â‹‚", -- U+022C2 - ["xcirc"] = "â—¯", -- U+025EF - ["xcup"] = "⋃", -- U+022C3 - ["xdtri"] = "â–½", -- U+025BD - ["xfr"] = "ð”µ", -- U+1D535 - ["xgr"] = "ξ", -- U+003BE - ["xhArr"] = "⟺", -- U+027FA - ["xharr"] = "⟷", -- U+027F7 - ["xi"] = "ξ", -- U+003BE - ["xlArr"] = "⟸", -- U+027F8 - ["xlarr"] = "⟵", -- U+027F5 - ["xmap"] = "⟼", -- U+027FC - ["xnis"] = "â‹»", -- U+022FB - ["xodot"] = "⨀", -- U+02A00 - ["xopf"] = "ð•©", -- U+1D569 - ["xoplus"] = "â¨", -- U+02A01 - ["xotime"] = "⨂", -- U+02A02 - ["xrArr"] = "⟹", -- U+027F9 - ["xrarr"] = "⟶", -- U+027F6 - ["xscr"] = "ð“", -- U+1D4CD - ["xsqcup"] = "⨆", -- U+02A06 - ["xuplus"] = "⨄", -- U+02A04 - ["xutri"] = "â–³", -- U+025B3 - ["xvee"] = "â‹", -- U+022C1 - ["xwedge"] = "â‹€", -- U+022C0 - ["yacute"] = "ý", -- U+000FD - ["yacy"] = "Ñ", -- U+0044F - ["ycirc"] = "Å·", -- U+00177 - ["ycy"] = "Ñ‹", -- U+0044B - ["yen"] = "Â¥", -- U+000A5 - ["yfr"] = "ð”¶", -- U+1D536 - ["yicy"] = "Ñ—", -- U+00457 - ["yopf"] = "ð•ª", -- U+1D56A - ["yscr"] = "ð“Ž", -- U+1D4CE - ["yucy"] = "ÑŽ", -- U+0044E - ["yuml"] = "ÿ", -- U+000FF - ["zacute"] = "ź", -- U+0017A - ["zcaron"] = "ž", -- U+0017E - ["zcy"] = "з", -- U+00437 - ["zdot"] = "ż", -- U+0017C - ["zeetrf"] = "ℨ", -- U+02128 - ["zeta"] = "ζ", -- U+003B6 - ["zfr"] = "ð”·", -- U+1D537 - ["zgr"] = "ζ", -- U+003B6 - ["zhcy"] = "ж", -- U+00436 - ["zigrarr"] = "â‡", -- U+021DD - ["zopf"] = "ð•«", -- U+1D56B - ["zscr"] = "ð“", -- U+1D4CF - ["zwj"] = "â€", -- U+0200D - ["zwnj"] = "‌", -- U+0200C -} - -characters = characters or { } -characters.entities = entities - -entities.plusminus = "±" -- 0x000B1 -entities.minusplus = "∓" -- 0x02213 -entities.cdots = utf.char(0x02026) -- U+02026 +if not modules then modules = { } end modules ['char-ent'] = { + version = 1.001, + comment = "companion to math-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "derived from the mathml 2.0 specification", + dataonly = true, +} + +-- http://www.w3.org/2003/entities/2007/w3centities-f.ent +-- http://www.w3.org/2003/entities/2007/htmlmathml-f.ent + +local entities = utilities.storage.allocate { + ["AElig"] = "Æ", -- U+000C6 + ["AMP"] = "&", -- U+00026 + ["Aacgr"] = "Ά", -- U+00386 + ["Aacute"] = "Ã", -- U+000C1 + ["Abreve"] = "Ä‚", -- U+00102 + ["Acirc"] = "Â", -- U+000C2 + ["Acy"] = "Ð", -- U+00410 + ["Afr"] = "ð”„", -- U+1D504 + ["Agr"] = "Α", -- U+00391 + ["Agrave"] = "À", -- U+000C0 + ["Alpha"] = "Α", -- U+00391 + ["Amacr"] = "Ä€", -- U+00100 + ["And"] = "â©“", -- U+02A53 + ["Aogon"] = "Ä„", -- U+00104 + ["Aopf"] = "ð”¸", -- U+1D538 + ["ApplyFunction"] = "â¡", -- U+02061 + ["Aring"] = "Ã…", -- U+000C5 + ["Ascr"] = "ð’œ", -- U+1D49C + ["Assign"] = "≔", -- U+02254 + ["Atilde"] = "Ã", -- U+000C3 + ["Auml"] = "Ä", -- U+000C4 + ["Backslash"] = "∖", -- U+02216 + ["Barv"] = "⫧", -- U+02AE7 + ["Barwed"] = "⌆", -- U+02306 + ["Bcy"] = "Б", -- U+00411 + ["Because"] = "∵", -- U+02235 + ["Bernoullis"] = "ℬ", -- U+0212C + ["Beta"] = "Î’", -- U+00392 + ["Bfr"] = "ð”…", -- U+1D505 + ["Bgr"] = "Î’", -- U+00392 + ["Bopf"] = "ð”¹", -- U+1D539 + ["Breve"] = "˘", -- U+002D8 + ["Bscr"] = "ℬ", -- U+0212C + ["Bumpeq"] = "≎", -- U+0224E + ["CHcy"] = "Ч", -- U+00427 + ["COPY"] = "©", -- U+000A9 + ["Cacute"] = "Ć", -- U+00106 + ["Cap"] = "â‹’", -- U+022D2 + ["CapitalDifferentialD"] = "â……", -- U+02145 + ["Cayleys"] = "â„­", -- U+0212D + ["Ccaron"] = "ÄŒ", -- U+0010C + ["Ccedil"] = "Ç", -- U+000C7 + ["Ccirc"] = "Ĉ", -- U+00108 + ["Cconint"] = "∰", -- U+02230 + ["Cdot"] = "ÄŠ", -- U+0010A + ["Cedilla"] = "¸", -- U+000B8 + ["CenterDot"] = "·", -- U+000B7 + ["Cfr"] = "â„­", -- U+0212D + ["Chi"] = "Χ", -- U+003A7 + ["CircleDot"] = "⊙", -- U+02299 + ["CircleMinus"] = "⊖", -- U+02296 + ["CirclePlus"] = "⊕", -- U+02295 + ["CircleTimes"] = "⊗", -- U+02297 + ["ClockwiseContourIntegral"] = "∲", -- U+02232 + ["CloseCurlyDoubleQuote"] = "â€", -- U+0201D + ["CloseCurlyQuote"] = "’", -- U+02019 + ["Colon"] = "∷", -- U+02237 + ["Colone"] = "â©´", -- U+02A74 + ["Congruent"] = "≡", -- U+02261 + ["Conint"] = "∯", -- U+0222F + ["ContourIntegral"] = "∮", -- U+0222E + ["Copf"] = "â„‚", -- U+02102 + ["Coproduct"] = "âˆ", -- U+02210 + ["CounterClockwiseContourIntegral"] = "∳", -- U+02233 + ["Cross"] = "⨯", -- U+02A2F + ["Cscr"] = "ð’ž", -- U+1D49E + ["Cup"] = "â‹“", -- U+022D3 + ["CupCap"] = "â‰", -- U+0224D + ["DD"] = "â……", -- U+02145 + ["DDotrahd"] = "⤑", -- U+02911 + ["DJcy"] = "Ђ", -- U+00402 + ["DScy"] = "Ð…", -- U+00405 + ["DZcy"] = "Ð", -- U+0040F + ["Dagger"] = "‡", -- U+02021 + ["Darr"] = "↡", -- U+021A1 + ["Dashv"] = "⫤", -- U+02AE4 + ["Dcaron"] = "ÄŽ", -- U+0010E + ["Dcy"] = "Д", -- U+00414 + ["Del"] = "∇", -- U+02207 + ["Delta"] = "Δ", -- U+00394 + ["Dfr"] = "ð”‡", -- U+1D507 + ["Dgr"] = "Δ", -- U+00394 + ["DiacriticalAcute"] = "´", -- U+000B4 + ["DiacriticalDot"] = "Ë™", -- U+002D9 + ["DiacriticalDoubleAcute"] = "Ë", -- U+002DD + ["DiacriticalGrave"] = "`", -- U+00060 + ["DiacriticalTilde"] = "Ëœ", -- U+002DC + ["Diamond"] = "â‹„", -- U+022C4 + ["DifferentialD"] = "â…†", -- U+02146 + ["Dopf"] = "ð”»", -- U+1D53B + ["Dot"] = "¨", -- U+000A8 + ["DotDot"] = "⃜", -- U+020DC + ["DotEqual"] = "â‰", -- U+02250 + ["DoubleContourIntegral"] = "∯", -- U+0222F + ["DoubleDot"] = "¨", -- U+000A8 + ["DoubleDownArrow"] = "⇓", -- U+021D3 + ["DoubleLeftArrow"] = "â‡", -- U+021D0 + ["DoubleLeftRightArrow"] = "⇔", -- U+021D4 + ["DoubleLeftTee"] = "⫤", -- U+02AE4 + ["DoubleLongLeftArrow"] = "⟸", -- U+027F8 + ["DoubleLongLeftRightArrow"] = "⟺", -- U+027FA + ["DoubleLongRightArrow"] = "⟹", -- U+027F9 + ["DoubleRightArrow"] = "⇒", -- U+021D2 + ["DoubleRightTee"] = "⊨", -- U+022A8 + ["DoubleUpArrow"] = "⇑", -- U+021D1 + ["DoubleUpDownArrow"] = "⇕", -- U+021D5 + ["DoubleVerticalBar"] = "∥", -- U+02225 + ["DownArrow"] = "↓", -- U+02193 + ["DownArrowBar"] = "⤓", -- U+02913 + ["DownArrowUpArrow"] = "⇵", -- U+021F5 + ["DownBreve"] = "Ì‘", -- U+00311 + ["DownLeftRightVector"] = "â¥", -- U+02950 + ["DownLeftTeeVector"] = "⥞", -- U+0295E + ["DownLeftVector"] = "↽", -- U+021BD + ["DownLeftVectorBar"] = "⥖", -- U+02956 + ["DownRightTeeVector"] = "⥟", -- U+0295F + ["DownRightVector"] = "â‡", -- U+021C1 + ["DownRightVectorBar"] = "⥗", -- U+02957 + ["DownTee"] = "⊤", -- U+022A4 + ["DownTeeArrow"] = "↧", -- U+021A7 + ["Downarrow"] = "⇓", -- U+021D3 + ["Dscr"] = "ð’Ÿ", -- U+1D49F + ["Dstrok"] = "Ä", -- U+00110 + ["EEacgr"] = "Ή", -- U+00389 + ["EEgr"] = "Η", -- U+00397 + ["ENG"] = "ÅŠ", -- U+0014A + ["ETH"] = "Ã", -- U+000D0 + ["Eacgr"] = "Έ", -- U+00388 + ["Eacute"] = "É", -- U+000C9 + ["Ecaron"] = "Äš", -- U+0011A + ["Ecirc"] = "Ê", -- U+000CA + ["Ecy"] = "Э", -- U+0042D + ["Edot"] = "Ä–", -- U+00116 + ["Efr"] = "ð”ˆ", -- U+1D508 + ["Egr"] = "Ε", -- U+00395 + ["Egrave"] = "È", -- U+000C8 + ["Element"] = "∈", -- U+02208 + ["Emacr"] = "Ä’", -- U+00112 + ["EmptySmallSquare"] = "â—»", -- U+025FB + ["EmptyVerySmallSquare"] = "â–«", -- U+025AB + ["Eogon"] = "Ę", -- U+00118 + ["Eopf"] = "ð”¼", -- U+1D53C + ["Epsilon"] = "Ε", -- U+00395 + ["Equal"] = "⩵", -- U+02A75 + ["EqualTilde"] = "≂", -- U+02242 + ["Equilibrium"] = "⇌", -- U+021CC + ["Escr"] = "â„°", -- U+02130 + ["Esim"] = "⩳", -- U+02A73 + ["Eta"] = "Η", -- U+00397 + ["Euml"] = "Ë", -- U+000CB + ["Exists"] = "∃", -- U+02203 + ["ExponentialE"] = "â…‡", -- U+02147 + ["Fcy"] = "Ф", -- U+00424 + ["Ffr"] = "ð”‰", -- U+1D509 + ["FilledSmallSquare"] = "â—¼", -- U+025FC + ["FilledVerySmallSquare"] = "â–ª", -- U+025AA + ["Fopf"] = "ð”½", -- U+1D53D + ["ForAll"] = "∀", -- U+02200 + ["Fouriertrf"] = "ℱ", -- U+02131 + ["Fscr"] = "ℱ", -- U+02131 + ["GJcy"] = "Ѓ", -- U+00403 + ["GT"] = ">", -- U+0003E + ["Gamma"] = "Γ", -- U+00393 + ["Gammad"] = "Ïœ", -- U+003DC + ["Gbreve"] = "Äž", -- U+0011E + ["Gcedil"] = "Ä¢", -- U+00122 + ["Gcirc"] = "Äœ", -- U+0011C + ["Gcy"] = "Г", -- U+00413 + ["Gdot"] = "Ä ", -- U+00120 + ["Gfr"] = "ð”Š", -- U+1D50A + ["Gg"] = "â‹™", -- U+022D9 + ["Ggr"] = "Γ", -- U+00393 + ["Gopf"] = "ð”¾", -- U+1D53E + ["GreaterEqual"] = "≥", -- U+02265 + ["GreaterEqualLess"] = "â‹›", -- U+022DB + ["GreaterFullEqual"] = "≧", -- U+02267 + ["GreaterGreater"] = "⪢", -- U+02AA2 + ["GreaterLess"] = "≷", -- U+02277 + ["GreaterSlantEqual"] = "⩾", -- U+02A7E + ["GreaterTilde"] = "≳", -- U+02273 + ["Gscr"] = "ð’¢", -- U+1D4A2 + ["Gt"] = "≫", -- U+0226B + ["HARDcy"] = "Ъ", -- U+0042A + ["Hacek"] = "ˇ", -- U+002C7 + ["Hat"] = "^", -- U+0005E + ["Hcirc"] = "Ĥ", -- U+00124 + ["Hfr"] = "â„Œ", -- U+0210C + ["HilbertSpace"] = "â„‹", -- U+0210B + ["Hopf"] = "â„", -- U+0210D + ["HorizontalLine"] = "─", -- U+02500 + ["Hscr"] = "â„‹", -- U+0210B + ["Hstrok"] = "Ħ", -- U+00126 + ["HumpDownHump"] = "≎", -- U+0224E + ["HumpEqual"] = "â‰", -- U+0224F + ["IEcy"] = "Е", -- U+00415 + ["IJlig"] = "IJ", -- U+00132 + ["IOcy"] = "Ð", -- U+00401 + ["Iacgr"] = "Ί", -- U+0038A + ["Iacute"] = "Ã", -- U+000CD + ["Icirc"] = "ÃŽ", -- U+000CE + ["Icy"] = "И", -- U+00418 + ["Idigr"] = "Ϊ", -- U+003AA + ["Idot"] = "Ä°", -- U+00130 + ["Ifr"] = "â„‘", -- U+02111 + ["Igr"] = "Ι", -- U+00399 + ["Igrave"] = "ÃŒ", -- U+000CC + ["Im"] = "â„‘", -- U+02111 + ["Imacr"] = "Ī", -- U+0012A + ["ImaginaryI"] = "â…ˆ", -- U+02148 + ["Implies"] = "⇒", -- U+021D2 + ["Int"] = "∬", -- U+0222C + ["Integral"] = "∫", -- U+0222B + ["Intersection"] = "â‹‚", -- U+022C2 + ["InvisibleComma"] = "â£", -- U+02063 + ["InvisibleTimes"] = "â¢", -- U+02062 + ["Iogon"] = "Ä®", -- U+0012E + ["Iopf"] = "ð•€", -- U+1D540 + ["Iota"] = "Ι", -- U+00399 + ["Iscr"] = "â„", -- U+02110 + ["Itilde"] = "Ĩ", -- U+00128 + ["Iukcy"] = "І", -- U+00406 + ["Iuml"] = "Ã", -- U+000CF + ["Jcirc"] = "Ä´", -- U+00134 + ["Jcy"] = "Й", -- U+00419 + ["Jfr"] = "ð”", -- U+1D50D + ["Jopf"] = "ð•", -- U+1D541 + ["Jscr"] = "ð’¥", -- U+1D4A5 + ["Jsercy"] = "Ј", -- U+00408 + ["Jukcy"] = "Є", -- U+00404 + ["KHcy"] = "Ð¥", -- U+00425 + ["KHgr"] = "Χ", -- U+003A7 + ["KJcy"] = "ÐŒ", -- U+0040C + ["Kappa"] = "Κ", -- U+0039A + ["Kcedil"] = "Ķ", -- U+00136 + ["Kcy"] = "К", -- U+0041A + ["Kfr"] = "ð”Ž", -- U+1D50E + ["Kgr"] = "Κ", -- U+0039A + ["Kopf"] = "ð•‚", -- U+1D542 + ["Kscr"] = "ð’¦", -- U+1D4A6 + ["LJcy"] = "Љ", -- U+00409 + ["LT"] = "<", -- U+00026 + ["Lacute"] = "Ĺ", -- U+00139 + ["Lambda"] = "Λ", -- U+0039B + ["Lang"] = "⟪", -- U+027EA + ["Laplacetrf"] = "â„’", -- U+02112 + ["Larr"] = "↞", -- U+0219E + ["Lcaron"] = "Ľ", -- U+0013D + ["Lcedil"] = "Ä»", -- U+0013B + ["Lcy"] = "Л", -- U+0041B + ["LeftAngleBracket"] = "⟨", -- U+027E8 + ["LeftArrow"] = "â†", -- U+02190 + ["LeftArrowBar"] = "⇤", -- U+021E4 + ["LeftArrowRightArrow"] = "⇆", -- U+021C6 + ["LeftCeiling"] = "⌈", -- U+02308 + ["LeftDoubleBracket"] = "⟦", -- U+027E6 + ["LeftDownTeeVector"] = "⥡", -- U+02961 + ["LeftDownVector"] = "⇃", -- U+021C3 + ["LeftDownVectorBar"] = "⥙", -- U+02959 + ["LeftFloor"] = "⌊", -- U+0230A + ["LeftRightArrow"] = "↔", -- U+02194 + ["LeftRightVector"] = "⥎", -- U+0294E + ["LeftTee"] = "⊣", -- U+022A3 + ["LeftTeeArrow"] = "↤", -- U+021A4 + ["LeftTeeVector"] = "⥚", -- U+0295A + ["LeftTriangle"] = "⊲", -- U+022B2 + ["LeftTriangleBar"] = "â§", -- U+029CF + ["LeftTriangleEqual"] = "⊴", -- U+022B4 + ["LeftUpDownVector"] = "⥑", -- U+02951 + ["LeftUpTeeVector"] = "⥠", -- U+02960 + ["LeftUpVector"] = "↿", -- U+021BF + ["LeftUpVectorBar"] = "⥘", -- U+02958 + ["LeftVector"] = "↼", -- U+021BC + ["LeftVectorBar"] = "⥒", -- U+02952 + ["Leftarrow"] = "â‡", -- U+021D0 + ["Leftrightarrow"] = "⇔", -- U+021D4 + ["LessEqualGreater"] = "â‹š", -- U+022DA + ["LessFullEqual"] = "≦", -- U+02266 + ["LessGreater"] = "≶", -- U+02276 + ["LessLess"] = "⪡", -- U+02AA1 + ["LessSlantEqual"] = "⩽", -- U+02A7D + ["LessTilde"] = "≲", -- U+02272 + ["Lfr"] = "ð”", -- U+1D50F + ["Lgr"] = "Λ", -- U+0039B + ["Ll"] = "⋘", -- U+022D8 + ["Lleftarrow"] = "⇚", -- U+021DA + ["Lmidot"] = "Ä¿", -- U+0013F + ["LongLeftArrow"] = "⟵", -- U+027F5 + ["LongLeftRightArrow"] = "⟷", -- U+027F7 + ["LongRightArrow"] = "⟶", -- U+027F6 + ["Longleftarrow"] = "⟸", -- U+027F8 + ["Longleftrightarrow"] = "⟺", -- U+027FA + ["Longrightarrow"] = "⟹", -- U+027F9 + ["Lopf"] = "ð•ƒ", -- U+1D543 + ["LowerLeftArrow"] = "↙", -- U+02199 + ["LowerRightArrow"] = "↘", -- U+02198 + ["Lscr"] = "â„’", -- U+02112 + ["Lsh"] = "↰", -- U+021B0 + ["Lstrok"] = "Å", -- U+00141 + ["Lt"] = "≪", -- U+0226A + ["Map"] = "⤅", -- U+02905 + ["Mcy"] = "Ðœ", -- U+0041C + ["MediumSpace"] = "âŸ", -- U+0205F + ["Mellintrf"] = "ℳ", -- U+02133 + ["Mfr"] = "ð”", -- U+1D510 + ["Mgr"] = "Îœ", -- U+0039C + ["MinusPlus"] = "∓", -- U+02213 + ["Mopf"] = "ð•„", -- U+1D544 + ["Mscr"] = "ℳ", -- U+02133 + ["Mu"] = "Îœ", -- U+0039C + ["NJcy"] = "Њ", -- U+0040A + ["Nacute"] = "Ń", -- U+00143 + ["Ncaron"] = "Ň", -- U+00147 + ["Ncedil"] = "Å…", -- U+00145 + ["Ncy"] = "Ð", -- U+0041D + ["NegativeMediumSpace"] = "​", -- U+0200B + ["NegativeThickSpace"] = "​", -- U+0200B + ["NegativeThinSpace"] = "​", -- U+0200B + ["NegativeVeryThinSpace"] = "​", -- U+0200B + ["NestedGreaterGreater"] = "≫", -- U+0226B + ["NestedLessLess"] = "≪", -- U+0226A + ["Nfr"] = "ð”‘", -- U+1D511 + ["Ngr"] = "Î", -- U+0039D + ["NoBreak"] = "â ", -- U+02060 + ["NonBreakingSpace"] = " ", -- U+000A0 + ["Nopf"] = "â„•", -- U+02115 + ["Not"] = "⫬", -- U+02AEC + ["NotCongruent"] = "≢", -- U+02262 + ["NotCupCap"] = "≭", -- U+0226D + ["NotDoubleVerticalBar"] = "∦", -- U+02226 + ["NotElement"] = "∉", -- U+02209 + ["NotEqual"] = "≠", -- U+02260 + ["NotEqualTilde"] = "≂̸", -- U+02242 00338 + ["NotExists"] = "∄", -- U+02204 + ["NotGreater"] = "≯", -- U+0226F + ["NotGreaterEqual"] = "≱", -- U+02271 + ["NotGreaterFullEqual"] = "≧̸", -- U+02267 00338 + ["NotGreaterGreater"] = "≫̸", -- U+0226B 00338 + ["NotGreaterLess"] = "≹", -- U+02279 + ["NotGreaterSlantEqual"] = "⩾̸", -- U+02A7E 00338 + ["NotGreaterTilde"] = "≵", -- U+02275 + ["NotHumpDownHump"] = "≎̸", -- U+0224E 00338 + ["NotHumpEqual"] = "â‰Ì¸", -- U+0224F 00338 + ["NotLeftTriangle"] = "⋪", -- U+022EA + ["NotLeftTriangleBar"] = "â§Ì¸", -- U+029CF 00338 + ["NotLeftTriangleEqual"] = "⋬", -- U+022EC + ["NotLess"] = "≮", -- U+0226E + ["NotLessEqual"] = "≰", -- U+02270 + ["NotLessGreater"] = "≸", -- U+02278 + ["NotLessLess"] = "≪̸", -- U+0226A 00338 + ["NotLessSlantEqual"] = "⩽̸", -- U+02A7D 00338 + ["NotLessTilde"] = "≴", -- U+02274 + ["NotNestedGreaterGreater"] = "⪢̸", -- U+02AA2 00338 + ["NotNestedLessLess"] = "⪡̸", -- U+02AA1 00338 + ["NotPrecedes"] = "⊀", -- U+02280 + ["NotPrecedesEqual"] = "⪯̸", -- U+02AAF 00338 + ["NotPrecedesSlantEqual"] = "â‹ ", -- U+022E0 + ["NotReverseElement"] = "∌", -- U+0220C + ["NotRightTriangle"] = "â‹«", -- U+022EB + ["NotRightTriangleBar"] = "â§Ì¸", -- U+029D0 00338 + ["NotRightTriangleEqual"] = "â‹­", -- U+022ED + ["NotSquareSubset"] = "âŠÌ¸", -- U+0228F 00338 + ["NotSquareSubsetEqual"] = "â‹¢", -- U+022E2 + ["NotSquareSuperset"] = "âŠÌ¸", -- U+02290 00338 + ["NotSquareSupersetEqual"] = "â‹£", -- U+022E3 + ["NotSubset"] = "⊂⃒", -- U+02282 020D2 + ["NotSubsetEqual"] = "⊈", -- U+02288 + ["NotSucceeds"] = "âŠ", -- U+02281 + ["NotSucceedsEqual"] = "⪰̸", -- U+02AB0 00338 + ["NotSucceedsSlantEqual"] = "â‹¡", -- U+022E1 + ["NotSucceedsTilde"] = "≿̸", -- U+0227F 00338 + ["NotSuperset"] = "⊃⃒", -- U+02283 020D2 + ["NotSupersetEqual"] = "⊉", -- U+02289 + ["NotTilde"] = "â‰", -- U+02241 + ["NotTildeEqual"] = "≄", -- U+02244 + ["NotTildeFullEqual"] = "≇", -- U+02247 + ["NotTildeTilde"] = "≉", -- U+02249 + ["NotVerticalBar"] = "∤", -- U+02224 + ["Nscr"] = "ð’©", -- U+1D4A9 + ["Ntilde"] = "Ñ", -- U+000D1 + ["Nu"] = "Î", -- U+0039D + ["OElig"] = "Å’", -- U+00152 + ["OHacgr"] = "Î", -- U+0038F + ["OHgr"] = "Ω", -- U+003A9 + ["Oacgr"] = "ÎŒ", -- U+0038C + ["Oacute"] = "Ó", -- U+000D3 + ["Ocirc"] = "Ô", -- U+000D4 + ["Ocy"] = "О", -- U+0041E + ["Odblac"] = "Å", -- U+00150 + ["Ofr"] = "ð”’", -- U+1D512 + ["Ogr"] = "Ο", -- U+0039F + ["Ograve"] = "Ã’", -- U+000D2 + ["Omacr"] = "ÅŒ", -- U+0014C + ["Omega"] = "Ω", -- U+003A9 + ["Omicron"] = "Ο", -- U+0039F + ["Oopf"] = "ð•†", -- U+1D546 + ["OpenCurlyDoubleQuote"] = "“", -- U+0201C + ["OpenCurlyQuote"] = "‘", -- U+02018 + ["Or"] = "â©”", -- U+02A54 + ["Oscr"] = "ð’ª", -- U+1D4AA + ["Oslash"] = "Ø", -- U+000D8 + ["Otilde"] = "Õ", -- U+000D5 + ["Otimes"] = "⨷", -- U+02A37 + ["Ouml"] = "Ö", -- U+000D6 + ["OverBar"] = "‾", -- U+0203E + ["OverBrace"] = "âž", -- U+023DE + ["OverBracket"] = "⎴", -- U+023B4 + ["OverParenthesis"] = "âœ", -- U+023DC + ["PHgr"] = "Φ", -- U+003A6 + ["PSgr"] = "Ψ", -- U+003A8 + ["PartialD"] = "∂", -- U+02202 + ["Pcy"] = "П", -- U+0041F + ["Pfr"] = "ð”“", -- U+1D513 + ["Pgr"] = "Π", -- U+003A0 + ["Phi"] = "Φ", -- U+003A6 + ["Pi"] = "Π", -- U+003A0 + ["PlusMinus"] = "±", -- U+000B1 + ["Poincareplane"] = "â„Œ", -- U+0210C + ["Popf"] = "â„™", -- U+02119 + ["Pr"] = "⪻", -- U+02ABB + ["Precedes"] = "≺", -- U+0227A + ["PrecedesEqual"] = "⪯", -- U+02AAF + ["PrecedesSlantEqual"] = "≼", -- U+0227C + ["PrecedesTilde"] = "≾", -- U+0227E + ["Prime"] = "″", -- U+02033 + ["Product"] = "âˆ", -- U+0220F + ["Proportion"] = "∷", -- U+02237 + ["Proportional"] = "âˆ", -- U+0221D + ["Pscr"] = "ð’«", -- U+1D4AB + ["Psi"] = "Ψ", -- U+003A8 + ["QUOT"] = "\"", -- U+00022 + ["Qfr"] = "ð””", -- U+1D514 + ["Qopf"] = "â„š", -- U+0211A + ["Qscr"] = "ð’¬", -- U+1D4AC + ["RBarr"] = "â¤", -- U+02910 + ["REG"] = "®", -- U+000AE + ["Racute"] = "Å”", -- U+00154 + ["Rang"] = "⟫", -- U+027EB + ["Rarr"] = "↠", -- U+021A0 + ["Rarrtl"] = "⤖", -- U+02916 + ["Rcaron"] = "Ř", -- U+00158 + ["Rcedil"] = "Å–", -- U+00156 + ["Rcy"] = "Р", -- U+00420 + ["Re"] = "â„œ", -- U+0211C + ["ReverseElement"] = "∋", -- U+0220B + ["ReverseEquilibrium"] = "⇋", -- U+021CB + ["ReverseUpEquilibrium"] = "⥯", -- U+0296F + ["Rfr"] = "â„œ", -- U+0211C + ["Rgr"] = "Ρ", -- U+003A1 + ["Rho"] = "Ρ", -- U+003A1 + ["RightAngleBracket"] = "⟩", -- U+027E9 + ["RightArrow"] = "→", -- U+02192 + ["RightArrowBar"] = "⇥", -- U+021E5 + ["RightArrowLeftArrow"] = "⇄", -- U+021C4 + ["RightCeiling"] = "⌉", -- U+02309 + ["RightDoubleBracket"] = "⟧", -- U+027E7 + ["RightDownTeeVector"] = "â¥", -- U+0295D + ["RightDownVector"] = "⇂", -- U+021C2 + ["RightDownVectorBar"] = "⥕", -- U+02955 + ["RightFloor"] = "⌋", -- U+0230B + ["RightTee"] = "⊢", -- U+022A2 + ["RightTeeArrow"] = "↦", -- U+021A6 + ["RightTeeVector"] = "⥛", -- U+0295B + ["RightTriangle"] = "⊳", -- U+022B3 + ["RightTriangleBar"] = "â§", -- U+029D0 + ["RightTriangleEqual"] = "⊵", -- U+022B5 + ["RightUpDownVector"] = "â¥", -- U+0294F + ["RightUpTeeVector"] = "⥜", -- U+0295C + ["RightUpVector"] = "↾", -- U+021BE + ["RightUpVectorBar"] = "⥔", -- U+02954 + ["RightVector"] = "⇀", -- U+021C0 + ["RightVectorBar"] = "⥓", -- U+02953 + ["Rightarrow"] = "⇒", -- U+021D2 + ["Ropf"] = "â„", -- U+0211D + ["RoundImplies"] = "⥰", -- U+02970 + ["Rrightarrow"] = "⇛", -- U+021DB + ["Rscr"] = "â„›", -- U+0211B + ["Rsh"] = "↱", -- U+021B1 + ["RuleDelayed"] = "⧴", -- U+029F4 + ["SHCHcy"] = "Щ", -- U+00429 + ["SHcy"] = "Ш", -- U+00428 + ["SOFTcy"] = "Ь", -- U+0042C + ["Sacute"] = "Åš", -- U+0015A + ["Sc"] = "⪼", -- U+02ABC + ["Scaron"] = "Å ", -- U+00160 + ["Scedil"] = "Åž", -- U+0015E + ["Scirc"] = "Åœ", -- U+0015C + ["Scy"] = "С", -- U+00421 + ["Sfr"] = "ð”–", -- U+1D516 + ["Sgr"] = "Σ", -- U+003A3 + ["ShortDownArrow"] = "↓", -- U+02193 + ["ShortLeftArrow"] = "â†", -- U+02190 + ["ShortRightArrow"] = "→", -- U+02192 + ["ShortUpArrow"] = "↑", -- U+02191 + ["Sigma"] = "Σ", -- U+003A3 + ["SmallCircle"] = "∘", -- U+02218 + ["Sopf"] = "ð•Š", -- U+1D54A + ["Sqrt"] = "√", -- U+0221A + ["Square"] = "â–¡", -- U+025A1 + ["SquareIntersection"] = "⊓", -- U+02293 + ["SquareSubset"] = "âŠ", -- U+0228F + ["SquareSubsetEqual"] = "⊑", -- U+02291 + ["SquareSuperset"] = "âŠ", -- U+02290 + ["SquareSupersetEqual"] = "⊒", -- U+02292 + ["SquareUnion"] = "⊔", -- U+02294 + ["Sscr"] = "ð’®", -- U+1D4AE + ["Star"] = "⋆", -- U+022C6 + ["Sub"] = "â‹", -- U+022D0 + ["Subset"] = "â‹", -- U+022D0 + ["SubsetEqual"] = "⊆", -- U+02286 + ["Succeeds"] = "≻", -- U+0227B + ["SucceedsEqual"] = "⪰", -- U+02AB0 + ["SucceedsSlantEqual"] = "≽", -- U+0227D + ["SucceedsTilde"] = "≿", -- U+0227F + ["SuchThat"] = "∋", -- U+0220B + ["Sum"] = "∑", -- U+02211 + ["Sup"] = "â‹‘", -- U+022D1 + ["Superset"] = "⊃", -- U+02283 + ["SupersetEqual"] = "⊇", -- U+02287 + ["Supset"] = "â‹‘", -- U+022D1 + ["THORN"] = "Þ", -- U+000DE + ["THgr"] = "Θ", -- U+00398 + ["TRADE"] = "â„¢", -- U+02122 + ["TSHcy"] = "Ћ", -- U+0040B + ["TScy"] = "Ц", -- U+00426 + ["Tab"] = "\9", -- U+00009 + ["Tau"] = "Τ", -- U+003A4 + ["Tcaron"] = "Ť", -- U+00164 + ["Tcedil"] = "Å¢", -- U+00162 + ["Tcy"] = "Т", -- U+00422 + ["Tfr"] = "ð”—", -- U+1D517 + ["Tgr"] = "Τ", -- U+003A4 + ["Therefore"] = "∴", -- U+02234 + ["Theta"] = "Θ", -- U+00398 + ["ThickSpace"] = "âŸâ€Š", -- U+0205F 0200A + ["ThinSpace"] = " ", -- U+02009 + ["Tilde"] = "∼", -- U+0223C + ["TildeEqual"] = "≃", -- U+02243 + ["TildeFullEqual"] = "≅", -- U+02245 + ["TildeTilde"] = "≈", -- U+02248 + ["Topf"] = "ð•‹", -- U+1D54B + ["TripleDot"] = "⃛", -- U+020DB + ["Tscr"] = "ð’¯", -- U+1D4AF + ["Tstrok"] = "Ŧ", -- U+00166 + ["Uacgr"] = "ÎŽ", -- U+0038E + ["Uacute"] = "Ú", -- U+000DA + ["Uarr"] = "↟", -- U+0219F + ["Uarrocir"] = "⥉", -- U+02949 + ["Ubrcy"] = "ÐŽ", -- U+0040E + ["Ubreve"] = "Ŭ", -- U+0016C + ["Ucirc"] = "Û", -- U+000DB + ["Ucy"] = "У", -- U+00423 + ["Udblac"] = "Å°", -- U+00170 + ["Udigr"] = "Ϋ", -- U+003AB + ["Ufr"] = "ð”˜", -- U+1D518 + ["Ugr"] = "Î¥", -- U+003A5 + ["Ugrave"] = "Ù", -- U+000D9 + ["Umacr"] = "Ū", -- U+0016A + -- ["UnderBar"] = "_", -- U+0005F + ["UnderBar"] = "‾", -- U+0203E + ["UnderBrace"] = "âŸ", -- U+023DF + ["UnderBracket"] = "⎵", -- U+023B5 + ["UnderParenthesis"] = "â", -- U+023DD + ["Union"] = "⋃", -- U+022C3 + ["UnionPlus"] = "⊎", -- U+0228E + ["Uogon"] = "Ų", -- U+00172 + ["Uopf"] = "ð•Œ", -- U+1D54C + ["UpArrow"] = "↑", -- U+02191 + ["UpArrowBar"] = "⤒", -- U+02912 + ["UpArrowDownArrow"] = "⇅", -- U+021C5 + ["UpDownArrow"] = "↕", -- U+02195 + ["UpEquilibrium"] = "⥮", -- U+0296E + ["UpTee"] = "⊥", -- U+022A5 + ["UpTeeArrow"] = "↥", -- U+021A5 + ["Uparrow"] = "⇑", -- U+021D1 + ["Updownarrow"] = "⇕", -- U+021D5 + ["UpperLeftArrow"] = "↖", -- U+02196 + ["UpperRightArrow"] = "↗", -- U+02197 + ["Upsi"] = "Ï’", -- U+003D2 + ["Upsilon"] = "Î¥", -- U+003A5 + ["Uring"] = "Å®", -- U+0016E + ["Uscr"] = "ð’°", -- U+1D4B0 + ["Utilde"] = "Ũ", -- U+00168 + ["Uuml"] = "Ãœ", -- U+000DC + ["VDash"] = "⊫", -- U+022AB + ["Vbar"] = "â««", -- U+02AEB + ["Vcy"] = "Ð’", -- U+00412 + ["Vdash"] = "⊩", -- U+022A9 + ["Vdashl"] = "⫦", -- U+02AE6 + ["Vee"] = "â‹", -- U+022C1 + ["Verbar"] = "‖", -- U+02016 + ["Vert"] = "‖", -- U+02016 + ["VerticalBar"] = "∣", -- U+02223 + ["VerticalLine"] = "|", -- U+0007C + ["VerticalSeparator"] = "â˜", -- U+02758 + ["VerticalTilde"] = "≀", -- U+02240 + ["VeryThinSpace"] = " ", -- U+0200A + ["Vfr"] = "ð”™", -- U+1D519 + ["Vopf"] = "ð•", -- U+1D54D + ["Vscr"] = "ð’±", -- U+1D4B1 + ["Vvdash"] = "⊪", -- U+022AA + ["Wcirc"] = "Å´", -- U+00174 + ["Wedge"] = "â‹€", -- U+022C0 + ["Wfr"] = "ð”š", -- U+1D51A + ["Wopf"] = "ð•Ž", -- U+1D54E + ["Wscr"] = "ð’²", -- U+1D4B2 + ["Xfr"] = "ð”›", -- U+1D51B + ["Xgr"] = "Ξ", -- U+0039E + ["Xi"] = "Ξ", -- U+0039E + ["Xopf"] = "ð•", -- U+1D54F + ["Xscr"] = "ð’³", -- U+1D4B3 + ["YAcy"] = "Я", -- U+0042F + ["YIcy"] = "Ї", -- U+00407 + ["YUcy"] = "Ю", -- U+0042E + ["Yacute"] = "Ã", -- U+000DD + ["Ycirc"] = "Ŷ", -- U+00176 + ["Ycy"] = "Ы", -- U+0042B + ["Yfr"] = "ð”œ", -- U+1D51C + ["Yopf"] = "ð•", -- U+1D550 + ["Yscr"] = "ð’´", -- U+1D4B4 + ["Yuml"] = "Ÿ", -- U+00178 + ["ZHcy"] = "Ж", -- U+00416 + ["Zacute"] = "Ź", -- U+00179 + ["Zcaron"] = "Ž", -- U+0017D + ["Zcy"] = "З", -- U+00417 + ["Zdot"] = "Å»", -- U+0017B + ["ZeroWidthSpace"] = "​", -- U+0200B + ["Zeta"] = "Ζ", -- U+00396 + ["Zfr"] = "ℨ", -- U+02128 + ["Zgr"] = "Ζ", -- U+00396 + ["Zopf"] = "ℤ", -- U+02124 + ["Zscr"] = "ð’µ", -- U+1D4B5 + ["aacgr"] = "ά", -- U+003AC + ["aacute"] = "á", -- U+000E1 + ["abreve"] = "ă", -- U+00103 + ["ac"] = "∾", -- U+0223E + ["acE"] = "∾̳", -- U+0223E 00333 + ["acd"] = "∿", -- U+0223F + ["acirc"] = "â", -- U+000E2 + ["acute"] = "´", -- U+000B4 + ["acy"] = "а", -- U+00430 + ["aelig"] = "æ", -- U+000E6 + ["af"] = "â¡", -- U+02061 + ["afr"] = "ð”ž", -- U+1D51E + ["agr"] = "α", -- U+003B1 + ["agrave"] = "à", -- U+000E0 + ["alefsym"] = "ℵ", -- U+02135 + ["aleph"] = "ℵ", -- U+02135 + ["alpha"] = "α", -- U+003B1 + ["amacr"] = "Ä", -- U+00101 + ["amalg"] = "⨿", -- U+02A3F + ["amp"] = "&", -- U+00026 + ["and"] = "∧", -- U+02227 + ["andand"] = "â©•", -- U+02A55 + ["andd"] = "â©œ", -- U+02A5C + ["andslope"] = "⩘", -- U+02A58 + ["andv"] = "â©š", -- U+02A5A + ["ang"] = "∠", -- U+02220 + ["ange"] = "⦤", -- U+029A4 + ["angle"] = "∠", -- U+02220 + ["angmsd"] = "∡", -- U+02221 + ["angmsdaa"] = "⦨", -- U+029A8 + ["angmsdab"] = "⦩", -- U+029A9 + ["angmsdac"] = "⦪", -- U+029AA + ["angmsdad"] = "⦫", -- U+029AB + ["angmsdae"] = "⦬", -- U+029AC + ["angmsdaf"] = "⦭", -- U+029AD + ["angmsdag"] = "⦮", -- U+029AE + ["angmsdah"] = "⦯", -- U+029AF + ["angrt"] = "∟", -- U+0221F + ["angrtvb"] = "⊾", -- U+022BE + ["angrtvbd"] = "â¦", -- U+0299D + ["angsph"] = "∢", -- U+02222 + ["angst"] = "Ã…", -- U+000C5 + ["angzarr"] = "â¼", -- U+0237C + ["aogon"] = "Ä…", -- U+00105 + ["aopf"] = "ð•’", -- U+1D552 + ["ap"] = "≈", -- U+02248 + ["apE"] = "â©°", -- U+02A70 + ["apacir"] = "⩯", -- U+02A6F + ["ape"] = "≊", -- U+0224A + ["apid"] = "≋", -- U+0224B + ["apos"] = "'", -- U+00027 + ["approx"] = "≈", -- U+02248 + ["approxeq"] = "≊", -- U+0224A + ["aring"] = "Ã¥", -- U+000E5 + ["ascr"] = "ð’¶", -- U+1D4B6 + ["ast"] = "*", -- U+0002A + ["asymp"] = "≈", -- U+02248 + ["asympeq"] = "â‰", -- U+0224D + ["atilde"] = "ã", -- U+000E3 + ["auml"] = "ä", -- U+000E4 + ["awconint"] = "∳", -- U+02233 + ["awint"] = "⨑", -- U+02A11 + ["b.Delta"] = "ðš«", -- U+1D6AB + ["b.Gamma"] = "ðšª", -- U+1D6AA + ["b.Gammad"] = "ðŸŠ", -- U+1D7CA + ["b.Lambda"] = "ðš²", -- U+1D6B2 + ["b.Omega"] = "ð›€", -- U+1D6C0 + ["b.Phi"] = "ðš½", -- U+1D6BD + ["b.Pi"] = "ðš·", -- U+1D6B7 + ["b.Psi"] = "ðš¿", -- U+1D6BF + ["b.Sigma"] = "ðšº", -- U+1D6BA + ["b.Theta"] = "ðš¯", -- U+1D6AF + ["b.Upsi"] = "ðš¼", -- U+1D6BC + ["b.Xi"] = "ðšµ", -- U+1D6B5 + ["b.alpha"] = "ð›‚", -- U+1D6C2 + ["b.beta"] = "ð›ƒ", -- U+1D6C3 + ["b.chi"] = "ð›˜", -- U+1D6D8 + ["b.delta"] = "ð›…", -- U+1D6C5 + ["b.epsi"] = "ð›†", -- U+1D6C6 + ["b.epsiv"] = "ð›œ", -- U+1D6DC + ["b.eta"] = "ð›ˆ", -- U+1D6C8 + ["b.gamma"] = "ð›„", -- U+1D6C4 + ["b.gammad"] = "ðŸ‹", -- U+1D7CB + ["b.iota"] = "ð›Š", -- U+1D6CA + ["b.kappa"] = "ð›‹", -- U+1D6CB + ["b.kappav"] = "ð›ž", -- U+1D6DE + ["b.lambda"] = "ð›Œ", -- U+1D6CC + ["b.mu"] = "ð›", -- U+1D6CD + ["b.nu"] = "ð›Ž", -- U+1D6CE + ["b.omega"] = "ð›š", -- U+1D6DA + ["b.phi"] = "ð›—", -- U+1D6D7 + ["b.phiv"] = "ð›Ÿ", -- U+1D6DF + ["b.pi"] = "ð›‘", -- U+1D6D1 + ["b.piv"] = "ð›¡", -- U+1D6E1 + ["b.psi"] = "ð›™", -- U+1D6D9 + ["b.rho"] = "ð›’", -- U+1D6D2 + ["b.rhov"] = "ð› ", -- U+1D6E0 + ["b.sigma"] = "ð›”", -- U+1D6D4 + ["b.sigmav"] = "ð›“", -- U+1D6D3 + ["b.tau"] = "ð›•", -- U+1D6D5 + ["b.thetas"] = "ð›‰", -- U+1D6C9 + ["b.thetav"] = "ð›", -- U+1D6DD + ["b.upsi"] = "ð›–", -- U+1D6D6 + ["b.xi"] = "ð›", -- U+1D6CF + ["b.zeta"] = "ð›‡", -- U+1D6C7 + ["bNot"] = "â«­", -- U+02AED + ["backcong"] = "≌", -- U+0224C + ["backepsilon"] = "϶", -- U+003F6 + ["backprime"] = "‵", -- U+02035 + ["backsim"] = "∽", -- U+0223D + ["backsimeq"] = "â‹", -- U+022CD + ["barvee"] = "⊽", -- U+022BD + ["barwed"] = "⌅", -- U+02305 + ["barwedge"] = "⌅", -- U+02305 + ["bbrk"] = "⎵", -- U+023B5 + ["bbrktbrk"] = "⎶", -- U+023B6 + ["bcong"] = "≌", -- U+0224C + ["bcy"] = "б", -- U+00431 + ["bdquo"] = "„", -- U+0201E + ["becaus"] = "∵", -- U+02235 + ["because"] = "∵", -- U+02235 + ["bemptyv"] = "⦰", -- U+029B0 + ["bepsi"] = "϶", -- U+003F6 + ["bernou"] = "ℬ", -- U+0212C + ["beta"] = "β", -- U+003B2 + ["beth"] = "ℶ", -- U+02136 + ["between"] = "≬", -- U+0226C + ["bfr"] = "ð”Ÿ", -- U+1D51F + ["bgr"] = "β", -- U+003B2 + ["bigcap"] = "â‹‚", -- U+022C2 + ["bigcirc"] = "â—¯", -- U+025EF + ["bigcup"] = "⋃", -- U+022C3 + ["bigodot"] = "⨀", -- U+02A00 + ["bigoplus"] = "â¨", -- U+02A01 + ["bigotimes"] = "⨂", -- U+02A02 + ["bigsqcup"] = "⨆", -- U+02A06 + ["bigstar"] = "★", -- U+02605 + ["bigtriangledown"] = "â–½", -- U+025BD + ["bigtriangleup"] = "â–³", -- U+025B3 + ["biguplus"] = "⨄", -- U+02A04 + ["bigvee"] = "â‹", -- U+022C1 + ["bigwedge"] = "â‹€", -- U+022C0 + ["bkarow"] = "â¤", -- U+0290D + ["blacklozenge"] = "⧫", -- U+029EB + ["blacksquare"] = "â–ª", -- U+025AA + ["blacktriangle"] = "â–´", -- U+025B4 + ["blacktriangledown"] = "â–¾", -- U+025BE + ["blacktriangleleft"] = "â—‚", -- U+025C2 + ["blacktriangleright"] = "â–¸", -- U+025B8 + ["blank"] = "â£", -- U+02423 + ["blk12"] = "â–’", -- U+02592 + ["blk14"] = "â–‘", -- U+02591 + ["blk34"] = "â–“", -- U+02593 + ["block"] = "â–ˆ", -- U+02588 + ["bne"] = "=⃥", -- U+0003D 020E5 + ["bnequiv"] = "≡⃥", -- U+02261 020E5 + ["bnot"] = "âŒ", -- U+02310 + ["bopf"] = "ð•“", -- U+1D553 + ["bot"] = "⊥", -- U+022A5 + ["bottom"] = "⊥", -- U+022A5 + ["bowtie"] = "⋈", -- U+022C8 + ["boxDL"] = "â•—", -- U+02557 + ["boxDR"] = "â•”", -- U+02554 + ["boxDl"] = "â•–", -- U+02556 + ["boxDr"] = "â•“", -- U+02553 + ["boxH"] = "â•", -- U+02550 + ["boxHD"] = "╦", -- U+02566 + ["boxHU"] = "â•©", -- U+02569 + ["boxHd"] = "╤", -- U+02564 + ["boxHu"] = "╧", -- U+02567 + ["boxUL"] = "â•", -- U+0255D + ["boxUR"] = "â•š", -- U+0255A + ["boxUl"] = "â•œ", -- U+0255C + ["boxUr"] = "â•™", -- U+02559 + ["boxV"] = "â•‘", -- U+02551 + ["boxVH"] = "╬", -- U+0256C + ["boxVL"] = "â•£", -- U+02563 + ["boxVR"] = "â• ", -- U+02560 + ["boxVh"] = "â•«", -- U+0256B + ["boxVl"] = "â•¢", -- U+02562 + ["boxVr"] = "â•Ÿ", -- U+0255F + ["boxbox"] = "⧉", -- U+029C9 + ["boxdL"] = "â••", -- U+02555 + ["boxdR"] = "â•’", -- U+02552 + ["boxdl"] = "â”", -- U+02510 + ["boxdr"] = "┌", -- U+0250C + ["boxh"] = "─", -- U+02500 + ["boxhD"] = "â•¥", -- U+02565 + ["boxhU"] = "╨", -- U+02568 + ["boxhd"] = "┬", -- U+0252C + ["boxhu"] = "â”´", -- U+02534 + ["boxminus"] = "⊟", -- U+0229F + ["boxplus"] = "⊞", -- U+0229E + ["boxtimes"] = "⊠", -- U+022A0 + ["boxuL"] = "â•›", -- U+0255B + ["boxuR"] = "╘", -- U+02558 + ["boxul"] = "┘", -- U+02518 + ["boxur"] = "â””", -- U+02514 + ["boxv"] = "│", -- U+02502 + ["boxvH"] = "╪", -- U+0256A + ["boxvL"] = "â•¡", -- U+02561 + ["boxvR"] = "â•ž", -- U+0255E + ["boxvh"] = "┼", -- U+0253C + ["boxvl"] = "┤", -- U+02524 + ["boxvr"] = "├", -- U+0251C + ["bprime"] = "‵", -- U+02035 + ["breve"] = "˘", -- U+002D8 + ["brvbar"] = "¦", -- U+000A6 + ["bscr"] = "ð’·", -- U+1D4B7 + ["bsemi"] = "â", -- U+0204F + ["bsim"] = "∽", -- U+0223D + ["bsime"] = "â‹", -- U+022CD + ["bsol"] = "\\", -- U+0005C + ["bsolb"] = "⧅", -- U+029C5 + ["bsolhsub"] = "⟈", -- U+027C8 + ["bull"] = "•", -- U+02022 + ["bullet"] = "•", -- U+02022 + ["bump"] = "≎", -- U+0224E + ["bumpE"] = "⪮", -- U+02AAE + ["bumpe"] = "â‰", -- U+0224F + ["bumpeq"] = "â‰", -- U+0224F + ["cacute"] = "ć", -- U+00107 + ["cap"] = "∩", -- U+02229 + ["capand"] = "â©„", -- U+02A44 + ["capbrcup"] = "⩉", -- U+02A49 + ["capcap"] = "â©‹", -- U+02A4B + ["capcup"] = "⩇", -- U+02A47 + ["capdot"] = "â©€", -- U+02A40 + ["caps"] = "∩︀", -- U+02229 0FE00 + ["caret"] = "â", -- U+02041 + ["caron"] = "ˇ", -- U+002C7 + ["ccaps"] = "â©", -- U+02A4D + ["ccaron"] = "Ä", -- U+0010D + ["ccedil"] = "ç", -- U+000E7 + ["ccirc"] = "ĉ", -- U+00109 + ["ccups"] = "â©Œ", -- U+02A4C + ["ccupssm"] = "â©", -- U+02A50 + ["cdot"] = "Ä‹", -- U+0010B + ["cedil"] = "¸", -- U+000B8 + ["cemptyv"] = "⦲", -- U+029B2 + ["cent"] = "¢", -- U+000A2 + ["centerdot"] = "·", -- U+000B7 + ["cfr"] = "ð” ", -- U+1D520 + ["chcy"] = "ч", -- U+00447 + ["check"] = "✓", -- U+02713 + ["checkmark"] = "✓", -- U+02713 + ["chi"] = "χ", -- U+003C7 + ["cir"] = "â—‹", -- U+025CB + ["cirE"] = "⧃", -- U+029C3 + ["circ"] = "ˆ", -- U+002C6 + ["circeq"] = "≗", -- U+02257 + ["circlearrowleft"] = "↺", -- U+021BA + ["circlearrowright"] = "↻", -- U+021BB + ["circledR"] = "®", -- U+000AE + ["circledS"] = "Ⓢ", -- U+024C8 + ["circledast"] = "⊛", -- U+0229B + ["circledcirc"] = "⊚", -- U+0229A + ["circleddash"] = "âŠ", -- U+0229D + ["cire"] = "≗", -- U+02257 + ["cirfnint"] = "â¨", -- U+02A10 + ["cirmid"] = "⫯", -- U+02AEF + ["cirscir"] = "⧂", -- U+029C2 + ["clubs"] = "♣", -- U+02663 + ["clubsuit"] = "♣", -- U+02663 + ["colon"] = ":", -- U+0003A + ["colone"] = "≔", -- U+02254 + ["coloneq"] = "≔", -- U+02254 + ["comma"] = ",", -- U+0002C + ["commat"] = "@", -- U+00040 + ["comp"] = "âˆ", -- U+02201 + ["compfn"] = "∘", -- U+02218 + ["complement"] = "âˆ", -- U+02201 + ["complexes"] = "â„‚", -- U+02102 + ["cong"] = "≅", -- U+02245 + ["congdot"] = "â©­", -- U+02A6D + ["conint"] = "∮", -- U+0222E + ["copf"] = "ð•”", -- U+1D554 + ["coprod"] = "âˆ", -- U+02210 + ["copy"] = "©", -- U+000A9 + ["copysr"] = "â„—", -- U+02117 + ["crarr"] = "↵", -- U+021B5 + ["cross"] = "✗", -- U+02717 + ["cscr"] = "ð’¸", -- U+1D4B8 + ["csub"] = "â«", -- U+02ACF + ["csube"] = "â«‘", -- U+02AD1 + ["csup"] = "â«", -- U+02AD0 + ["csupe"] = "â«’", -- U+02AD2 + ["ctdot"] = "⋯", -- U+022EF + ["cudarrl"] = "⤸", -- U+02938 + ["cudarrr"] = "⤵", -- U+02935 + ["cuepr"] = "â‹ž", -- U+022DE + ["cuesc"] = "â‹Ÿ", -- U+022DF + ["cularr"] = "↶", -- U+021B6 + ["cularrp"] = "⤽", -- U+0293D + ["cup"] = "∪", -- U+0222A + ["cupbrcap"] = "⩈", -- U+02A48 + ["cupcap"] = "⩆", -- U+02A46 + ["cupcup"] = "â©Š", -- U+02A4A + ["cupdot"] = "âŠ", -- U+0228D + ["cupor"] = "â©…", -- U+02A45 + ["cups"] = "∪︀", -- U+0222A 0FE00 + ["curarr"] = "↷", -- U+021B7 + ["curarrm"] = "⤼", -- U+0293C + ["curlyeqprec"] = "â‹ž", -- U+022DE + ["curlyeqsucc"] = "â‹Ÿ", -- U+022DF + ["curlyvee"] = "â‹Ž", -- U+022CE + ["curlywedge"] = "â‹", -- U+022CF + ["curren"] = "¤", -- U+000A4 + ["curvearrowleft"] = "↶", -- U+021B6 + ["curvearrowright"] = "↷", -- U+021B7 + ["cuvee"] = "â‹Ž", -- U+022CE + ["cuwed"] = "â‹", -- U+022CF + ["cwconint"] = "∲", -- U+02232 + ["cwint"] = "∱", -- U+02231 + ["cylcty"] = "⌭", -- U+0232D + ["dArr"] = "⇓", -- U+021D3 + ["dHar"] = "⥥", -- U+02965 + ["dagger"] = "†", -- U+02020 + ["daleth"] = "ℸ", -- U+02138 + ["darr"] = "↓", -- U+02193 + ["dash"] = "â€", -- U+02010 + ["dashv"] = "⊣", -- U+022A3 + ["dbkarow"] = "â¤", -- U+0290F + ["dblac"] = "Ë", -- U+002DD + ["dcaron"] = "Ä", -- U+0010F + ["dcy"] = "д", -- U+00434 + ["dd"] = "â…†", -- U+02146 + ["ddagger"] = "‡", -- U+02021 + ["ddarr"] = "⇊", -- U+021CA + ["ddotseq"] = "â©·", -- U+02A77 + ["deg"] = "°", -- U+000B0 + ["delta"] = "δ", -- U+003B4 + ["demptyv"] = "⦱", -- U+029B1 + ["dfisht"] = "⥿", -- U+0297F + ["dfr"] = "ð”¡", -- U+1D521 + ["dgr"] = "δ", -- U+003B4 + ["dharl"] = "⇃", -- U+021C3 + ["dharr"] = "⇂", -- U+021C2 + ["diam"] = "â‹„", -- U+022C4 + ["diamond"] = "â‹„", -- U+022C4 + ["diamondsuit"] = "♦", -- U+02666 + ["diams"] = "♦", -- U+02666 + ["die"] = "¨", -- U+000A8 + ["digamma"] = "Ï", -- U+003DD + ["disin"] = "⋲", -- U+022F2 + ["div"] = "÷", -- U+000F7 + ["divide"] = "÷", -- U+000F7 + ["divideontimes"] = "⋇", -- U+022C7 + ["divonx"] = "⋇", -- U+022C7 + ["djcy"] = "Ñ’", -- U+00452 + ["dlcorn"] = "⌞", -- U+0231E + ["dlcrop"] = "âŒ", -- U+0230D + ["dollar"] = "$", -- U+00024 + ["dopf"] = "ð••", -- U+1D555 + ["dot"] = "Ë™", -- U+002D9 + ["doteq"] = "â‰", -- U+02250 + ["doteqdot"] = "≑", -- U+02251 + ["dotminus"] = "∸", -- U+02238 + ["dotplus"] = "∔", -- U+02214 + ["dotsquare"] = "⊡", -- U+022A1 + ["doublebarwedge"] = "⌆", -- U+02306 + ["downarrow"] = "↓", -- U+02193 + ["downdownarrows"] = "⇊", -- U+021CA + ["downharpoonleft"] = "⇃", -- U+021C3 + ["downharpoonright"] = "⇂", -- U+021C2 + ["drbkarow"] = "â¤", -- U+02910 + ["drcorn"] = "⌟", -- U+0231F + ["drcrop"] = "⌌", -- U+0230C + ["dscr"] = "ð’¹", -- U+1D4B9 + ["dscy"] = "Ñ•", -- U+00455 + ["dsol"] = "⧶", -- U+029F6 + ["dstrok"] = "Ä‘", -- U+00111 + ["dtdot"] = "⋱", -- U+022F1 + ["dtri"] = "â–¿", -- U+025BF + ["dtrif"] = "â–¾", -- U+025BE + ["duarr"] = "⇵", -- U+021F5 + ["duhar"] = "⥯", -- U+0296F + ["dwangle"] = "⦦", -- U+029A6 + ["dzcy"] = "ÑŸ", -- U+0045F + ["dzigrarr"] = "⟿", -- U+027FF + ["eDDot"] = "â©·", -- U+02A77 + ["eDot"] = "≑", -- U+02251 + ["eacgr"] = "έ", -- U+003AD + ["eacute"] = "é", -- U+000E9 + ["easter"] = "â©®", -- U+02A6E + ["ecaron"] = "Ä›", -- U+0011B + ["ecir"] = "≖", -- U+02256 + ["ecirc"] = "ê", -- U+000EA + ["ecolon"] = "≕", -- U+02255 + ["ecy"] = "Ñ", -- U+0044D + ["edot"] = "Ä—", -- U+00117 + ["ee"] = "â…‡", -- U+02147 + ["eeacgr"] = "ή", -- U+003AE + ["eegr"] = "η", -- U+003B7 + ["efDot"] = "≒", -- U+02252 + ["efr"] = "ð”¢", -- U+1D522 + ["eg"] = "⪚", -- U+02A9A + ["egr"] = "ε", -- U+003B5 + ["egrave"] = "è", -- U+000E8 + ["egs"] = "⪖", -- U+02A96 + ["egsdot"] = "⪘", -- U+02A98 + ["el"] = "⪙", -- U+02A99 + ["elinters"] = "â§", -- U+023E7 + ["ell"] = "â„“", -- U+02113 + ["els"] = "⪕", -- U+02A95 + ["elsdot"] = "⪗", -- U+02A97 + ["emacr"] = "Ä“", -- U+00113 + ["empty"] = "∅", -- U+02205 + ["emptyset"] = "∅", -- U+02205 + ["emptyv"] = "∅", -- U+02205 + ["emsp"] = " ", -- U+02003 + ["emsp13"] = " ", -- U+02004 + ["emsp14"] = " ", -- U+02005 + ["eng"] = "Å‹", -- U+0014B + ["ensp"] = " ", -- U+02002 + ["eogon"] = "Ä™", -- U+00119 + ["eopf"] = "ð•–", -- U+1D556 + ["epar"] = "â‹•", -- U+022D5 + ["eparsl"] = "⧣", -- U+029E3 + ["eplus"] = "⩱", -- U+02A71 + ["epsi"] = "ε", -- U+003B5 + ["epsilon"] = "ε", -- U+003B5 + ["epsiv"] = "ϵ", -- U+003F5 + ["eqcirc"] = "≖", -- U+02256 + ["eqcolon"] = "≕", -- U+02255 + ["eqsim"] = "≂", -- U+02242 + ["eqslantgtr"] = "⪖", -- U+02A96 + ["eqslantless"] = "⪕", -- U+02A95 + ["equals"] = "=", -- U+0003D + ["equest"] = "≟", -- U+0225F + ["equiv"] = "≡", -- U+02261 + ["equivDD"] = "⩸", -- U+02A78 + ["eqvparsl"] = "⧥", -- U+029E5 + ["erDot"] = "≓", -- U+02253 + ["erarr"] = "⥱", -- U+02971 + ["escr"] = "ℯ", -- U+0212F + ["esdot"] = "â‰", -- U+02250 + ["esim"] = "≂", -- U+02242 + ["eta"] = "η", -- U+003B7 + ["eth"] = "ð", -- U+000F0 + ["euml"] = "ë", -- U+000EB + ["euro"] = "€", -- U+020AC + ["excl"] = "!", -- U+00021 + ["exist"] = "∃", -- U+02203 + ["expectation"] = "â„°", -- U+02130 + ["exponentiale"] = "â…‡", -- U+02147 + ["fallingdotseq"] = "≒", -- U+02252 + ["fcy"] = "Ñ„", -- U+00444 + ["female"] = "♀", -- U+02640 + ["ffilig"] = "ffi", -- U+0FB03 + ["fflig"] = "ff", -- U+0FB00 + ["ffllig"] = "ffl", -- U+0FB04 + ["ffr"] = "ð”£", -- U+1D523 + ["filig"] = "ï¬", -- U+0FB01 + ["fjlig"] = "fj", -- U+00066 0006A + ["flat"] = "â™­", -- U+0266D + ["fllig"] = "fl", -- U+0FB02 + ["fltns"] = "â–±", -- U+025B1 + ["fnof"] = "Æ’", -- U+00192 + ["fopf"] = "ð•—", -- U+1D557 + ["forall"] = "∀", -- U+02200 + ["fork"] = "â‹”", -- U+022D4 + ["forkv"] = "â«™", -- U+02AD9 + ["fpartint"] = "â¨", -- U+02A0D + ["frac12"] = "½", -- U+000BD + ["frac13"] = "â…“", -- U+02153 + ["frac14"] = "¼", -- U+000BC + ["frac15"] = "â…•", -- U+02155 + ["frac16"] = "â…™", -- U+02159 + ["frac18"] = "â…›", -- U+0215B + ["frac23"] = "â…”", -- U+02154 + ["frac25"] = "â…–", -- U+02156 + ["frac34"] = "¾", -- U+000BE + ["frac35"] = "â…—", -- U+02157 + ["frac38"] = "â…œ", -- U+0215C + ["frac45"] = "â…˜", -- U+02158 + ["frac56"] = "â…š", -- U+0215A + ["frac58"] = "â…", -- U+0215D + ["frac78"] = "â…ž", -- U+0215E + ["frasl"] = "â„", -- U+02044 + ["frown"] = "⌢", -- U+02322 + ["fscr"] = "ð’»", -- U+1D4BB + ["gE"] = "≧", -- U+02267 + ["gEl"] = "⪌", -- U+02A8C + ["gacute"] = "ǵ", -- U+001F5 + ["gamma"] = "γ", -- U+003B3 + ["gammad"] = "Ï", -- U+003DD + ["gap"] = "⪆", -- U+02A86 + ["gbreve"] = "ÄŸ", -- U+0011F + ["gcirc"] = "Ä", -- U+0011D + ["gcy"] = "г", -- U+00433 + ["gdot"] = "Ä¡", -- U+00121 + ["ge"] = "≥", -- U+02265 + ["gel"] = "â‹›", -- U+022DB + ["geq"] = "≥", -- U+02265 + ["geqq"] = "≧", -- U+02267 + ["geqslant"] = "⩾", -- U+02A7E + ["ges"] = "⩾", -- U+02A7E + ["gescc"] = "⪩", -- U+02AA9 + ["gesdot"] = "⪀", -- U+02A80 + ["gesdoto"] = "⪂", -- U+02A82 + ["gesdotol"] = "⪄", -- U+02A84 + ["gesl"] = "⋛︀", -- U+022DB 0FE00 + ["gesles"] = "⪔", -- U+02A94 + ["gfr"] = "ð”¤", -- U+1D524 + ["gg"] = "≫", -- U+0226B + ["ggg"] = "â‹™", -- U+022D9 + ["ggr"] = "γ", -- U+003B3 + ["gimel"] = "â„·", -- U+02137 + ["gjcy"] = "Ñ“", -- U+00453 + ["gl"] = "≷", -- U+02277 + ["glE"] = "⪒", -- U+02A92 + ["gla"] = "⪥", -- U+02AA5 + ["glj"] = "⪤", -- U+02AA4 + ["gnE"] = "≩", -- U+02269 + ["gnap"] = "⪊", -- U+02A8A + ["gnapprox"] = "⪊", -- U+02A8A + ["gne"] = "⪈", -- U+02A88 + ["gneq"] = "⪈", -- U+02A88 + ["gneqq"] = "≩", -- U+02269 + ["gnsim"] = "⋧", -- U+022E7 + ["gopf"] = "ð•˜", -- U+1D558 + ["grave"] = "`", -- U+00060 + ["gscr"] = "â„Š", -- U+0210A + ["gsim"] = "≳", -- U+02273 + ["gsime"] = "⪎", -- U+02A8E + ["gsiml"] = "âª", -- U+02A90 + ["gt"] = ">", -- U+0003E + ["gtcc"] = "⪧", -- U+02AA7 + ["gtcir"] = "⩺", -- U+02A7A + ["gtdot"] = "â‹—", -- U+022D7 + ["gtlPar"] = "⦕", -- U+02995 + ["gtquest"] = "⩼", -- U+02A7C + ["gtrapprox"] = "⪆", -- U+02A86 + ["gtrarr"] = "⥸", -- U+02978 + ["gtrdot"] = "â‹—", -- U+022D7 + ["gtreqless"] = "â‹›", -- U+022DB + ["gtreqqless"] = "⪌", -- U+02A8C + ["gtrless"] = "≷", -- U+02277 + ["gtrsim"] = "≳", -- U+02273 + ["gvertneqq"] = "≩︀", -- U+02269 0FE00 + ["gvnE"] = "≩︀", -- U+02269 0FE00 + ["hArr"] = "⇔", -- U+021D4 + ["hairsp"] = " ", -- U+0200A + ["half"] = "½", -- U+000BD + ["hamilt"] = "â„‹", -- U+0210B + ["hardcy"] = "ÑŠ", -- U+0044A + ["harr"] = "↔", -- U+02194 + ["harrcir"] = "⥈", -- U+02948 + ["harrw"] = "↭", -- U+021AD + ["hbar"] = "â„", -- U+0210F + ["hcirc"] = "Ä¥", -- U+00125 + ["hearts"] = "♥", -- U+02665 + ["heartsuit"] = "♥", -- U+02665 + ["hellip"] = "…", -- U+02026 + ["hercon"] = "⊹", -- U+022B9 + ["hfr"] = "ð”¥", -- U+1D525 + ["hksearow"] = "⤥", -- U+02925 + ["hkswarow"] = "⤦", -- U+02926 + ["hoarr"] = "⇿", -- U+021FF + ["homtht"] = "∻", -- U+0223B + ["hookleftarrow"] = "↩", -- U+021A9 + ["hookrightarrow"] = "↪", -- U+021AA + ["hopf"] = "ð•™", -- U+1D559 + ["horbar"] = "―", -- U+02015 + ["hscr"] = "ð’½", -- U+1D4BD + ["hslash"] = "â„", -- U+0210F + ["hstrok"] = "ħ", -- U+00127 + ["hybull"] = "âƒ", -- U+02043 + ["hyphen"] = "â€", -- U+02010 + ["iacgr"] = "ί", -- U+003AF + ["iacute"] = "í", -- U+000ED + ["ic"] = "â£", -- U+02063 + ["icirc"] = "î", -- U+000EE + ["icy"] = "и", -- U+00438 + ["idiagr"] = "Î", -- U+00390 + ["idigr"] = "ÏŠ", -- U+003CA + ["iecy"] = "е", -- U+00435 + ["iexcl"] = "¡", -- U+000A1 + ["iff"] = "⇔", -- U+021D4 + ["ifr"] = "ð”¦", -- U+1D526 + ["igr"] = "ι", -- U+003B9 + ["igrave"] = "ì", -- U+000EC + ["ii"] = "â…ˆ", -- U+02148 + ["iiiint"] = "⨌", -- U+02A0C + ["iiint"] = "∭", -- U+0222D + ["iinfin"] = "⧜", -- U+029DC + ["iiota"] = "â„©", -- U+02129 + ["ijlig"] = "ij", -- U+00133 + ["imacr"] = "Ä«", -- U+0012B + ["image"] = "â„‘", -- U+02111 + ["imagline"] = "â„", -- U+02110 + ["imagpart"] = "â„‘", -- U+02111 + ["imath"] = "ı", -- U+00131 + ["imof"] = "⊷", -- U+022B7 + ["imped"] = "Ƶ", -- U+001B5 + ["in"] = "∈", -- U+02208 + ["incare"] = "â„…", -- U+02105 + ["infin"] = "∞", -- U+0221E + ["infintie"] = "â§", -- U+029DD + ["inodot"] = "ı", -- U+00131 + ["int"] = "∫", -- U+0222B + ["intcal"] = "⊺", -- U+022BA + ["integers"] = "ℤ", -- U+02124 + ["intercal"] = "⊺", -- U+022BA + ["intlarhk"] = "⨗", -- U+02A17 + ["intprod"] = "⨼", -- U+02A3C + ["iocy"] = "Ñ‘", -- U+00451 + ["iogon"] = "į", -- U+0012F + ["iopf"] = "ð•š", -- U+1D55A + ["iota"] = "ι", -- U+003B9 + ["iprod"] = "⨼", -- U+02A3C + ["iquest"] = "¿", -- U+000BF + ["iscr"] = "ð’¾", -- U+1D4BE + ["isin"] = "∈", -- U+02208 + ["isinE"] = "⋹", -- U+022F9 + ["isindot"] = "⋵", -- U+022F5 + ["isins"] = "â‹´", -- U+022F4 + ["isinsv"] = "⋳", -- U+022F3 + ["isinv"] = "∈", -- U+02208 + ["it"] = "â¢", -- U+02062 + ["itilde"] = "Ä©", -- U+00129 + ["iukcy"] = "Ñ–", -- U+00456 + ["iuml"] = "ï", -- U+000EF + ["jcirc"] = "ĵ", -- U+00135 + ["jcy"] = "й", -- U+00439 + ["jfr"] = "ð”§", -- U+1D527 + ["jmath"] = "È·", -- U+00237 + ["jopf"] = "ð•›", -- U+1D55B + ["jscr"] = "ð’¿", -- U+1D4BF + ["jsercy"] = "ј", -- U+00458 + ["jukcy"] = "Ñ”", -- U+00454 + ["kappa"] = "κ", -- U+003BA + ["kappav"] = "Ï°", -- U+003F0 + ["kcedil"] = "Ä·", -- U+00137 + ["kcy"] = "к", -- U+0043A + ["kfr"] = "ð”¨", -- U+1D528 + ["kgr"] = "κ", -- U+003BA + ["kgreen"] = "ĸ", -- U+00138 + ["khcy"] = "Ñ…", -- U+00445 + ["khgr"] = "χ", -- U+003C7 + ["kjcy"] = "Ñœ", -- U+0045C + ["kopf"] = "ð•œ", -- U+1D55C + ["kscr"] = "ð“€", -- U+1D4C0 + ["lAarr"] = "⇚", -- U+021DA + ["lArr"] = "â‡", -- U+021D0 + ["lAtail"] = "⤛", -- U+0291B + ["lBarr"] = "⤎", -- U+0290E + ["lE"] = "≦", -- U+02266 + ["lEg"] = "⪋", -- U+02A8B + ["lHar"] = "⥢", -- U+02962 + ["lacute"] = "ĺ", -- U+0013A + ["laemptyv"] = "⦴", -- U+029B4 + ["lagran"] = "â„’", -- U+02112 + ["lambda"] = "λ", -- U+003BB + ["lang"] = "⟨", -- U+027E8 + ["langd"] = "⦑", -- U+02991 + ["langle"] = "⟨", -- U+027E8 + ["lap"] = "⪅", -- U+02A85 + ["laquo"] = "«", -- U+000AB + ["larr"] = "â†", -- U+02190 + ["larrb"] = "⇤", -- U+021E4 + ["larrbfs"] = "⤟", -- U+0291F + ["larrfs"] = "â¤", -- U+0291D + ["larrhk"] = "↩", -- U+021A9 + ["larrlp"] = "↫", -- U+021AB + ["larrpl"] = "⤹", -- U+02939 + ["larrsim"] = "⥳", -- U+02973 + ["larrtl"] = "↢", -- U+021A2 + ["lat"] = "⪫", -- U+02AAB + ["latail"] = "⤙", -- U+02919 + ["late"] = "⪭", -- U+02AAD + ["lates"] = "⪭︀", -- U+02AAD 0FE00 + ["lbarr"] = "⤌", -- U+0290C + ["lbbrk"] = "â²", -- U+02772 + ["lbrace"] = "{", -- U+0007B + ["lbrack"] = "[", -- U+0005B + ["lbrke"] = "⦋", -- U+0298B + ["lbrksld"] = "â¦", -- U+0298F + ["lbrkslu"] = "â¦", -- U+0298D + ["lcaron"] = "ľ", -- U+0013E + ["lcedil"] = "ļ", -- U+0013C + ["lceil"] = "⌈", -- U+02308 + ["lcub"] = "{", -- U+0007B + ["lcy"] = "л", -- U+0043B + ["ldca"] = "⤶", -- U+02936 + ["ldquo"] = "“", -- U+0201C + ["ldquor"] = "„", -- U+0201E + ["ldrdhar"] = "⥧", -- U+02967 + ["ldrushar"] = "⥋", -- U+0294B + ["ldsh"] = "↲", -- U+021B2 + ["le"] = "≤", -- U+02264 + ["leftarrow"] = "â†", -- U+02190 + ["leftarrowtail"] = "↢", -- U+021A2 + ["leftharpoondown"] = "↽", -- U+021BD + ["leftharpoonup"] = "↼", -- U+021BC + ["leftleftarrows"] = "⇇", -- U+021C7 + ["leftrightarrow"] = "↔", -- U+02194 + ["leftrightarrows"] = "⇆", -- U+021C6 + ["leftrightharpoons"] = "⇋", -- U+021CB + ["leftrightsquigarrow"] = "↭", -- U+021AD + ["leftthreetimes"] = "â‹‹", -- U+022CB + ["leg"] = "â‹š", -- U+022DA + ["leq"] = "≤", -- U+02264 + ["leqq"] = "≦", -- U+02266 + ["leqslant"] = "⩽", -- U+02A7D + ["les"] = "⩽", -- U+02A7D + ["lescc"] = "⪨", -- U+02AA8 + ["lesdot"] = "â©¿", -- U+02A7F + ["lesdoto"] = "âª", -- U+02A81 + ["lesdotor"] = "⪃", -- U+02A83 + ["lesg"] = "⋚︀", -- U+022DA 0FE00 + ["lesges"] = "⪓", -- U+02A93 + ["lessapprox"] = "⪅", -- U+02A85 + ["lessdot"] = "â‹–", -- U+022D6 + ["lesseqgtr"] = "â‹š", -- U+022DA + ["lesseqqgtr"] = "⪋", -- U+02A8B + ["lessgtr"] = "≶", -- U+02276 + ["lesssim"] = "≲", -- U+02272 + ["lfisht"] = "⥼", -- U+0297C + ["lfloor"] = "⌊", -- U+0230A + ["lfr"] = "ð”©", -- U+1D529 + ["lg"] = "≶", -- U+02276 + ["lgE"] = "⪑", -- U+02A91 + ["lgr"] = "λ", -- U+003BB + ["lhard"] = "↽", -- U+021BD + ["lharu"] = "↼", -- U+021BC + ["lharul"] = "⥪", -- U+0296A + ["lhblk"] = "â–„", -- U+02584 + ["ljcy"] = "Ñ™", -- U+00459 + ["ll"] = "≪", -- U+0226A + ["llarr"] = "⇇", -- U+021C7 + ["llcorner"] = "⌞", -- U+0231E + ["llhard"] = "⥫", -- U+0296B + ["lltri"] = "â—º", -- U+025FA + ["lmidot"] = "Å€", -- U+00140 + ["lmoust"] = "⎰", -- U+023B0 + ["lmoustache"] = "⎰", -- U+023B0 + ["lnE"] = "≨", -- U+02268 + ["lnap"] = "⪉", -- U+02A89 + ["lnapprox"] = "⪉", -- U+02A89 + ["lne"] = "⪇", -- U+02A87 + ["lneq"] = "⪇", -- U+02A87 + ["lneqq"] = "≨", -- U+02268 + ["lnsim"] = "⋦", -- U+022E6 + ["loang"] = "⟬", -- U+027EC + ["loarr"] = "⇽", -- U+021FD + ["lobrk"] = "⟦", -- U+027E6 + ["longleftarrow"] = "⟵", -- U+027F5 + ["longleftrightarrow"] = "⟷", -- U+027F7 + ["longmapsto"] = "⟼", -- U+027FC + ["longrightarrow"] = "⟶", -- U+027F6 + ["looparrowleft"] = "↫", -- U+021AB + ["looparrowright"] = "↬", -- U+021AC + ["lopar"] = "⦅", -- U+02985 + ["lopf"] = "ð•", -- U+1D55D + ["loplus"] = "⨭", -- U+02A2D + ["lotimes"] = "⨴", -- U+02A34 + ["lowast"] = "∗", -- U+02217 + ["lowbar"] = "_", -- U+0005F + ["loz"] = "â—Š", -- U+025CA + ["lozenge"] = "â—Š", -- U+025CA + ["lozf"] = "⧫", -- U+029EB + ["lpar"] = "(", -- U+00028 + ["lparlt"] = "⦓", -- U+02993 + ["lrarr"] = "⇆", -- U+021C6 + ["lrcorner"] = "⌟", -- U+0231F + ["lrhar"] = "⇋", -- U+021CB + ["lrhard"] = "⥭", -- U+0296D + ["lrm"] = "‎", -- U+0200E + ["lrtri"] = "⊿", -- U+022BF + ["lsaquo"] = "‹", -- U+02039 + ["lscr"] = "ð“", -- U+1D4C1 + ["lsh"] = "↰", -- U+021B0 + ["lsim"] = "≲", -- U+02272 + ["lsime"] = "âª", -- U+02A8D + ["lsimg"] = "âª", -- U+02A8F + ["lsqb"] = "[", -- U+0005B + ["lsquo"] = "‘", -- U+02018 + ["lsquor"] = "‚", -- U+0201A + ["lstrok"] = "Å‚", -- U+00142 + ["lt"] = "<", -- U+00026 + ["ltcc"] = "⪦", -- U+02AA6 + ["ltcir"] = "⩹", -- U+02A79 + ["ltdot"] = "â‹–", -- U+022D6 + ["lthree"] = "â‹‹", -- U+022CB + ["ltimes"] = "⋉", -- U+022C9 + ["ltlarr"] = "⥶", -- U+02976 + ["ltquest"] = "â©»", -- U+02A7B + ["ltrPar"] = "⦖", -- U+02996 + ["ltri"] = "â—ƒ", -- U+025C3 + ["ltrie"] = "⊴", -- U+022B4 + ["ltrif"] = "â—‚", -- U+025C2 + ["lurdshar"] = "⥊", -- U+0294A + ["luruhar"] = "⥦", -- U+02966 + ["lvertneqq"] = "≨︀", -- U+02268 0FE00 + ["lvnE"] = "≨︀", -- U+02268 0FE00 + ["mDDot"] = "∺", -- U+0223A + ["macr"] = "¯", -- U+000AF + ["male"] = "♂", -- U+02642 + ["malt"] = "✠", -- U+02720 + ["maltese"] = "✠", -- U+02720 + ["map"] = "↦", -- U+021A6 + ["mapsto"] = "↦", -- U+021A6 + ["mapstodown"] = "↧", -- U+021A7 + ["mapstoleft"] = "↤", -- U+021A4 + ["mapstoup"] = "↥", -- U+021A5 + ["marker"] = "â–®", -- U+025AE + ["mcomma"] = "⨩", -- U+02A29 + ["mcy"] = "м", -- U+0043C + ["mdash"] = "—", -- U+02014 + ["measuredangle"] = "∡", -- U+02221 + ["mfr"] = "ð”ª", -- U+1D52A + ["mgr"] = "μ", -- U+003BC + ["mho"] = "℧", -- U+02127 + ["micro"] = "µ", -- U+000B5 + ["mid"] = "∣", -- U+02223 + ["midast"] = "*", -- U+0002A + ["midcir"] = "â«°", -- U+02AF0 + ["middot"] = "·", -- U+000B7 + ["minus"] = "−", -- U+02212 + ["minusb"] = "⊟", -- U+0229F + ["minusd"] = "∸", -- U+02238 + ["minusdu"] = "⨪", -- U+02A2A + ["mlcp"] = "â«›", -- U+02ADB + ["mldr"] = "…", -- U+02026 + ["mnplus"] = "∓", -- U+02213 + ["models"] = "⊧", -- U+022A7 + ["mopf"] = "ð•ž", -- U+1D55E + ["mp"] = "∓", -- U+02213 + ["mscr"] = "ð“‚", -- U+1D4C2 + ["mstpos"] = "∾", -- U+0223E + ["mu"] = "μ", -- U+003BC + ["multimap"] = "⊸", -- U+022B8 + ["mumap"] = "⊸", -- U+022B8 + ["nGg"] = "⋙̸", -- U+022D9 00338 + ["nGt"] = "≫⃒", -- U+0226B 020D2 + ["nGtv"] = "≫̸", -- U+0226B 00338 + ["nLeftarrow"] = "â‡", -- U+021CD + ["nLeftrightarrow"] = "⇎", -- U+021CE + ["nLl"] = "⋘̸", -- U+022D8 00338 + ["nLt"] = "≪⃒", -- U+0226A 020D2 + ["nLtv"] = "≪̸", -- U+0226A 00338 + ["nRightarrow"] = "â‡", -- U+021CF + ["nVDash"] = "⊯", -- U+022AF + ["nVdash"] = "⊮", -- U+022AE + ["nabla"] = "∇", -- U+02207 + ["nacute"] = "Å„", -- U+00144 + ["nang"] = "∠⃒", -- U+02220 020D2 + ["nap"] = "≉", -- U+02249 + ["napE"] = "⩰̸", -- U+02A70 00338 + ["napid"] = "≋̸", -- U+0224B 00338 + ["napos"] = "ʼn", -- U+00149 + ["napprox"] = "≉", -- U+02249 + ["natur"] = "â™®", -- U+0266E + ["natural"] = "â™®", -- U+0266E + ["naturals"] = "â„•", -- U+02115 + ["nbsp"] = " ", -- U+000A0 + ["nbump"] = "≎̸", -- U+0224E 00338 + ["nbumpe"] = "â‰Ì¸", -- U+0224F 00338 + ["ncap"] = "⩃", -- U+02A43 + ["ncaron"] = "ň", -- U+00148 + ["ncedil"] = "ņ", -- U+00146 + ["ncong"] = "≇", -- U+02247 + ["ncongdot"] = "⩭̸", -- U+02A6D 00338 + ["ncup"] = "â©‚", -- U+02A42 + ["ncy"] = "н", -- U+0043D + ["ndash"] = "–", -- U+02013 + ["ne"] = "≠", -- U+02260 + ["neArr"] = "⇗", -- U+021D7 + ["nearhk"] = "⤤", -- U+02924 + ["nearr"] = "↗", -- U+02197 + ["nearrow"] = "↗", -- U+02197 + ["nedot"] = "â‰Ì¸", -- U+02250 00338 + ["nequiv"] = "≢", -- U+02262 + ["nesear"] = "⤨", -- U+02928 + ["nesim"] = "≂̸", -- U+02242 00338 + ["nexist"] = "∄", -- U+02204 + ["nexists"] = "∄", -- U+02204 + ["nfr"] = "ð”«", -- U+1D52B + ["ngE"] = "≧̸", -- U+02267 00338 + ["nge"] = "≱", -- U+02271 + ["ngeq"] = "≱", -- U+02271 + ["ngeqq"] = "≧̸", -- U+02267 00338 + ["ngeqslant"] = "⩾̸", -- U+02A7E 00338 + ["nges"] = "⩾̸", -- U+02A7E 00338 + ["ngr"] = "ν", -- U+003BD + ["ngsim"] = "≵", -- U+02275 + ["ngt"] = "≯", -- U+0226F + ["ngtr"] = "≯", -- U+0226F + ["nhArr"] = "⇎", -- U+021CE + ["nharr"] = "↮", -- U+021AE + ["nhpar"] = "⫲", -- U+02AF2 + ["ni"] = "∋", -- U+0220B + ["nis"] = "⋼", -- U+022FC + ["nisd"] = "⋺", -- U+022FA + ["niv"] = "∋", -- U+0220B + ["njcy"] = "Ñš", -- U+0045A + ["nlArr"] = "â‡", -- U+021CD + ["nlE"] = "≦̸", -- U+02266 00338 + ["nlarr"] = "↚", -- U+0219A + ["nldr"] = "‥", -- U+02025 + ["nle"] = "≰", -- U+02270 + ["nleftarrow"] = "↚", -- U+0219A + ["nleftrightarrow"] = "↮", -- U+021AE + ["nleq"] = "≰", -- U+02270 + ["nleqq"] = "≦̸", -- U+02266 00338 + ["nleqslant"] = "⩽̸", -- U+02A7D 00338 + ["nles"] = "⩽̸", -- U+02A7D 00338 + ["nless"] = "≮", -- U+0226E + ["nlsim"] = "≴", -- U+02274 + ["nlt"] = "≮", -- U+0226E + ["nltri"] = "⋪", -- U+022EA + ["nltrie"] = "⋬", -- U+022EC + ["nmid"] = "∤", -- U+02224 + ["nopf"] = "ð•Ÿ", -- U+1D55F + ["not"] = "¬", -- U+000AC + ["notin"] = "∉", -- U+02209 + ["notinE"] = "⋹̸", -- U+022F9 00338 + ["notindot"] = "⋵̸", -- U+022F5 00338 + ["notinva"] = "∉", -- U+02209 + ["notinvb"] = "â‹·", -- U+022F7 + ["notinvc"] = "⋶", -- U+022F6 + ["notni"] = "∌", -- U+0220C + ["notniva"] = "∌", -- U+0220C + ["notnivb"] = "⋾", -- U+022FE + ["notnivc"] = "⋽", -- U+022FD + ["npar"] = "∦", -- U+02226 + ["nparallel"] = "∦", -- U+02226 + ["nparsl"] = "⫽⃥", -- U+02AFD 020E5 + ["npart"] = "∂̸", -- U+02202 00338 + ["npolint"] = "⨔", -- U+02A14 + ["npr"] = "⊀", -- U+02280 + ["nprcue"] = "â‹ ", -- U+022E0 + ["npre"] = "⪯̸", -- U+02AAF 00338 + ["nprec"] = "⊀", -- U+02280 + ["npreceq"] = "⪯̸", -- U+02AAF 00338 + ["nrArr"] = "â‡", -- U+021CF + ["nrarr"] = "↛", -- U+0219B + ["nrarrc"] = "⤳̸", -- U+02933 00338 + ["nrarrw"] = "â†Ì¸", -- U+0219D 00338 + ["nrightarrow"] = "↛", -- U+0219B + ["nrtri"] = "â‹«", -- U+022EB + ["nrtrie"] = "â‹­", -- U+022ED + ["nsc"] = "âŠ", -- U+02281 + ["nsccue"] = "â‹¡", -- U+022E1 + ["nsce"] = "⪰̸", -- U+02AB0 00338 + ["nscr"] = "ð“ƒ", -- U+1D4C3 + ["nshortmid"] = "∤", -- U+02224 + ["nshortparallel"] = "∦", -- U+02226 + ["nsim"] = "â‰", -- U+02241 + ["nsime"] = "≄", -- U+02244 + ["nsimeq"] = "≄", -- U+02244 + ["nsmid"] = "∤", -- U+02224 + ["nspar"] = "∦", -- U+02226 + ["nsqsube"] = "â‹¢", -- U+022E2 + ["nsqsupe"] = "â‹£", -- U+022E3 + ["nsub"] = "⊄", -- U+02284 + ["nsubE"] = "⫅̸", -- U+02AC5 00338 + ["nsube"] = "⊈", -- U+02288 + ["nsubset"] = "⊂⃒", -- U+02282 020D2 + ["nsubseteq"] = "⊈", -- U+02288 + ["nsubseteqq"] = "⫅̸", -- U+02AC5 00338 + ["nsucc"] = "âŠ", -- U+02281 + ["nsucceq"] = "⪰̸", -- U+02AB0 00338 + ["nsup"] = "⊅", -- U+02285 + ["nsupE"] = "⫆̸", -- U+02AC6 00338 + ["nsupe"] = "⊉", -- U+02289 + ["nsupset"] = "⊃⃒", -- U+02283 020D2 + ["nsupseteq"] = "⊉", -- U+02289 + ["nsupseteqq"] = "⫆̸", -- U+02AC6 00338 + ["ntgl"] = "≹", -- U+02279 + ["ntilde"] = "ñ", -- U+000F1 + ["ntlg"] = "≸", -- U+02278 + ["ntriangleleft"] = "⋪", -- U+022EA + ["ntrianglelefteq"] = "⋬", -- U+022EC + ["ntriangleright"] = "â‹«", -- U+022EB + ["ntrianglerighteq"] = "â‹­", -- U+022ED + ["nu"] = "ν", -- U+003BD + ["num"] = "#", -- U+00023 + ["numero"] = "â„–", -- U+02116 + ["numsp"] = " ", -- U+02007 + ["nvDash"] = "⊭", -- U+022AD + ["nvHarr"] = "⤄", -- U+02904 + ["nvap"] = "â‰âƒ’", -- U+0224D 020D2 + ["nvdash"] = "⊬", -- U+022AC + ["nvge"] = "≥⃒", -- U+02265 020D2 + ["nvgt"] = ">⃒", -- U+0003E 020D2 + ["nvinfin"] = "⧞", -- U+029DE + ["nvlArr"] = "⤂", -- U+02902 + ["nvle"] = "≤⃒", -- U+02264 020D2 + ["nvlt"] = "&⃒", -- U+00026 020D2 + ["nvltrie"] = "⊴⃒", -- U+022B4 020D2 + ["nvrArr"] = "⤃", -- U+02903 + ["nvrtrie"] = "⊵⃒", -- U+022B5 020D2 + ["nvsim"] = "∼⃒", -- U+0223C 020D2 + ["nwArr"] = "⇖", -- U+021D6 + ["nwarhk"] = "⤣", -- U+02923 + ["nwarr"] = "↖", -- U+02196 + ["nwarrow"] = "↖", -- U+02196 + ["nwnear"] = "⤧", -- U+02927 + ["oS"] = "Ⓢ", -- U+024C8 + ["oacgr"] = "ÏŒ", -- U+003CC + ["oacute"] = "ó", -- U+000F3 + ["oast"] = "⊛", -- U+0229B + ["ocir"] = "⊚", -- U+0229A + ["ocirc"] = "ô", -- U+000F4 + ["ocy"] = "о", -- U+0043E + ["odash"] = "âŠ", -- U+0229D + ["odblac"] = "Å‘", -- U+00151 + ["odiv"] = "⨸", -- U+02A38 + ["odot"] = "⊙", -- U+02299 + ["odsold"] = "⦼", -- U+029BC + ["oelig"] = "Å“", -- U+00153 + ["ofcir"] = "⦿", -- U+029BF + ["ofr"] = "ð”¬", -- U+1D52C + ["ogon"] = "Ë›", -- U+002DB + ["ogr"] = "ο", -- U+003BF + ["ograve"] = "ò", -- U+000F2 + ["ogt"] = "â§", -- U+029C1 + ["ohacgr"] = "ÏŽ", -- U+003CE + ["ohbar"] = "⦵", -- U+029B5 + ["ohgr"] = "ω", -- U+003C9 + ["ohm"] = "Ω", -- U+003A9 + ["oint"] = "∮", -- U+0222E + ["olarr"] = "↺", -- U+021BA + ["olcir"] = "⦾", -- U+029BE + ["olcross"] = "⦻", -- U+029BB + ["oline"] = "‾", -- U+0203E + ["olt"] = "⧀", -- U+029C0 + ["omacr"] = "Å", -- U+0014D + ["omega"] = "ω", -- U+003C9 + ["omicron"] = "ο", -- U+003BF + ["omid"] = "⦶", -- U+029B6 + ["ominus"] = "⊖", -- U+02296 + ["oopf"] = "ð• ", -- U+1D560 + ["opar"] = "⦷", -- U+029B7 + ["operp"] = "⦹", -- U+029B9 + ["oplus"] = "⊕", -- U+02295 + ["or"] = "∨", -- U+02228 + ["orarr"] = "↻", -- U+021BB + ["ord"] = "â©", -- U+02A5D + ["order"] = "â„´", -- U+02134 + ["orderof"] = "â„´", -- U+02134 + ["ordf"] = "ª", -- U+000AA + ["ordm"] = "º", -- U+000BA + ["origof"] = "⊶", -- U+022B6 + ["oror"] = "â©–", -- U+02A56 + ["orslope"] = "â©—", -- U+02A57 + ["orv"] = "â©›", -- U+02A5B + ["oscr"] = "â„´", -- U+02134 + ["oslash"] = "ø", -- U+000F8 + ["osol"] = "⊘", -- U+02298 + ["otilde"] = "õ", -- U+000F5 + ["otimes"] = "⊗", -- U+02297 + ["otimesas"] = "⨶", -- U+02A36 + ["ouml"] = "ö", -- U+000F6 + ["ovbar"] = "⌽", -- U+0233D + ["par"] = "∥", -- U+02225 + ["para"] = "¶", -- U+000B6 + ["parallel"] = "∥", -- U+02225 + ["parsim"] = "⫳", -- U+02AF3 + ["parsl"] = "⫽", -- U+02AFD + ["part"] = "∂", -- U+02202 + ["pcy"] = "п", -- U+0043F + ["percnt"] = "%", -- U+00025 + ["period"] = ".", -- U+0002E + ["permil"] = "‰", -- U+02030 + ["perp"] = "⊥", -- U+022A5 + ["pertenk"] = "‱", -- U+02031 + ["pfr"] = "ð”­", -- U+1D52D + ["pgr"] = "Ï€", -- U+003C0 + ["phgr"] = "φ", -- U+003C6 + ["phi"] = "φ", -- U+003C6 + ["phiv"] = "Ï•", -- U+003D5 + ["phmmat"] = "ℳ", -- U+02133 + ["phone"] = "☎", -- U+0260E + ["pi"] = "Ï€", -- U+003C0 + ["pitchfork"] = "â‹”", -- U+022D4 + ["piv"] = "Ï–", -- U+003D6 + ["planck"] = "â„", -- U+0210F + ["planckh"] = "â„Ž", -- U+0210E + ["plankv"] = "â„", -- U+0210F + ["plus"] = "+", -- U+0002B + ["plusacir"] = "⨣", -- U+02A23 + ["plusb"] = "⊞", -- U+0229E + ["pluscir"] = "⨢", -- U+02A22 + ["plusdo"] = "∔", -- U+02214 + ["plusdu"] = "⨥", -- U+02A25 + ["pluse"] = "⩲", -- U+02A72 + ["plusmn"] = "±", -- U+000B1 + ["plussim"] = "⨦", -- U+02A26 + ["plustwo"] = "⨧", -- U+02A27 + ["pm"] = "±", -- U+000B1 + ["pointint"] = "⨕", -- U+02A15 + ["popf"] = "ð•¡", -- U+1D561 + ["pound"] = "£", -- U+000A3 + ["pr"] = "≺", -- U+0227A + ["prE"] = "⪳", -- U+02AB3 + ["prap"] = "⪷", -- U+02AB7 + ["prcue"] = "≼", -- U+0227C + ["pre"] = "⪯", -- U+02AAF + ["prec"] = "≺", -- U+0227A + ["precapprox"] = "⪷", -- U+02AB7 + ["preccurlyeq"] = "≼", -- U+0227C + ["preceq"] = "⪯", -- U+02AAF + ["precnapprox"] = "⪹", -- U+02AB9 + ["precneqq"] = "⪵", -- U+02AB5 + ["precnsim"] = "⋨", -- U+022E8 + ["precsim"] = "≾", -- U+0227E + ["prime"] = "′", -- U+02032 + ["primes"] = "â„™", -- U+02119 + ["prnE"] = "⪵", -- U+02AB5 + ["prnap"] = "⪹", -- U+02AB9 + ["prnsim"] = "⋨", -- U+022E8 + ["prod"] = "âˆ", -- U+0220F + ["profalar"] = "⌮", -- U+0232E + ["profline"] = "⌒", -- U+02312 + ["profsurf"] = "⌓", -- U+02313 + ["prop"] = "âˆ", -- U+0221D + ["propto"] = "âˆ", -- U+0221D + ["prsim"] = "≾", -- U+0227E + ["prurel"] = "⊰", -- U+022B0 + ["pscr"] = "ð“…", -- U+1D4C5 + ["psgr"] = "ψ", -- U+003C8 + ["psi"] = "ψ", -- U+003C8 + ["puncsp"] = " ", -- U+02008 + ["qfr"] = "ð”®", -- U+1D52E + ["qint"] = "⨌", -- U+02A0C + ["qopf"] = "ð•¢", -- U+1D562 + ["qprime"] = "â—", -- U+02057 + ["qscr"] = "ð“†", -- U+1D4C6 + ["quaternions"] = "â„", -- U+0210D + ["quatint"] = "⨖", -- U+02A16 + ["quest"] = "?", -- U+0003F + ["questeq"] = "≟", -- U+0225F + ["quot"] = "\"", -- U+00022 + ["rAarr"] = "⇛", -- U+021DB + ["rArr"] = "⇒", -- U+021D2 + ["rAtail"] = "⤜", -- U+0291C + ["rBarr"] = "â¤", -- U+0290F + ["rHar"] = "⥤", -- U+02964 + ["race"] = "∽̱", -- U+0223D 00331 + ["racute"] = "Å•", -- U+00155 + ["radic"] = "√", -- U+0221A + ["raemptyv"] = "⦳", -- U+029B3 + ["rang"] = "⟩", -- U+027E9 + ["rangd"] = "⦒", -- U+02992 + ["range"] = "⦥", -- U+029A5 + ["rangle"] = "⟩", -- U+027E9 + ["raquo"] = "»", -- U+000BB + ["rarr"] = "→", -- U+02192 + ["rarrap"] = "⥵", -- U+02975 + ["rarrb"] = "⇥", -- U+021E5 + ["rarrbfs"] = "⤠", -- U+02920 + ["rarrc"] = "⤳", -- U+02933 + ["rarrfs"] = "⤞", -- U+0291E + ["rarrhk"] = "↪", -- U+021AA + ["rarrlp"] = "↬", -- U+021AC + ["rarrpl"] = "⥅", -- U+02945 + ["rarrsim"] = "⥴", -- U+02974 + ["rarrtl"] = "↣", -- U+021A3 + ["rarrw"] = "â†", -- U+0219D + ["ratail"] = "⤚", -- U+0291A + ["ratio"] = "∶", -- U+02236 + ["rationals"] = "â„š", -- U+0211A + ["rbarr"] = "â¤", -- U+0290D + ["rbbrk"] = "â³", -- U+02773 + ["rbrace"] = "}", -- U+0007D + ["rbrack"] = "]", -- U+0005D + ["rbrke"] = "⦌", -- U+0298C + ["rbrksld"] = "⦎", -- U+0298E + ["rbrkslu"] = "â¦", -- U+02990 + ["rcaron"] = "Å™", -- U+00159 + ["rcedil"] = "Å—", -- U+00157 + ["rceil"] = "⌉", -- U+02309 + ["rcub"] = "}", -- U+0007D + ["rcy"] = "Ñ€", -- U+00440 + ["rdca"] = "⤷", -- U+02937 + ["rdldhar"] = "⥩", -- U+02969 + ["rdquo"] = "â€", -- U+0201D + ["rdquor"] = "â€", -- U+0201D + ["rdsh"] = "↳", -- U+021B3 + ["real"] = "â„œ", -- U+0211C + ["realine"] = "â„›", -- U+0211B + ["realpart"] = "â„œ", -- U+0211C + ["reals"] = "â„", -- U+0211D + ["rect"] = "â–­", -- U+025AD + ["reg"] = "®", -- U+000AE + ["rfisht"] = "⥽", -- U+0297D + ["rfloor"] = "⌋", -- U+0230B + ["rfr"] = "ð”¯", -- U+1D52F + ["rgr"] = "Ï", -- U+003C1 + ["rhard"] = "â‡", -- U+021C1 + ["rharu"] = "⇀", -- U+021C0 + ["rharul"] = "⥬", -- U+0296C + ["rho"] = "Ï", -- U+003C1 + ["rhov"] = "ϱ", -- U+003F1 + ["rightarrow"] = "→", -- U+02192 + ["rightarrowtail"] = "↣", -- U+021A3 + ["rightharpoondown"] = "â‡", -- U+021C1 + ["rightharpoonup"] = "⇀", -- U+021C0 + ["rightleftarrows"] = "⇄", -- U+021C4 + ["rightleftharpoons"] = "⇌", -- U+021CC + ["rightrightarrows"] = "⇉", -- U+021C9 + ["rightsquigarrow"] = "â†", -- U+0219D + ["rightthreetimes"] = "â‹Œ", -- U+022CC + ["ring"] = "Ëš", -- U+002DA + ["risingdotseq"] = "≓", -- U+02253 + ["rlarr"] = "⇄", -- U+021C4 + ["rlhar"] = "⇌", -- U+021CC + ["rlm"] = "â€", -- U+0200F + ["rmoust"] = "⎱", -- U+023B1 + ["rmoustache"] = "⎱", -- U+023B1 + ["rnmid"] = "â«®", -- U+02AEE + ["roang"] = "⟭", -- U+027ED + ["roarr"] = "⇾", -- U+021FE + ["robrk"] = "⟧", -- U+027E7 + ["ropar"] = "⦆", -- U+02986 + ["ropf"] = "ð•£", -- U+1D563 + ["roplus"] = "⨮", -- U+02A2E + ["rotimes"] = "⨵", -- U+02A35 + ["rpar"] = ")", -- U+00029 + ["rpargt"] = "⦔", -- U+02994 + ["rppolint"] = "⨒", -- U+02A12 + ["rrarr"] = "⇉", -- U+021C9 + ["rsaquo"] = "›", -- U+0203A + ["rscr"] = "ð“‡", -- U+1D4C7 + ["rsh"] = "↱", -- U+021B1 + ["rsqb"] = "]", -- U+0005D + ["rsquo"] = "’", -- U+02019 + ["rsquor"] = "’", -- U+02019 + ["rthree"] = "â‹Œ", -- U+022CC + ["rtimes"] = "â‹Š", -- U+022CA + ["rtri"] = "â–¹", -- U+025B9 + ["rtrie"] = "⊵", -- U+022B5 + ["rtrif"] = "â–¸", -- U+025B8 + ["rtriltri"] = "⧎", -- U+029CE + ["ruluhar"] = "⥨", -- U+02968 + ["rx"] = "â„ž", -- U+0211E + ["sacute"] = "Å›", -- U+0015B + ["sbquo"] = "‚", -- U+0201A + ["sc"] = "≻", -- U+0227B + ["scE"] = "⪴", -- U+02AB4 + ["scap"] = "⪸", -- U+02AB8 + ["scaron"] = "Å¡", -- U+00161 + ["sccue"] = "≽", -- U+0227D + ["sce"] = "⪰", -- U+02AB0 + ["scedil"] = "ÅŸ", -- U+0015F + ["scirc"] = "Å", -- U+0015D + ["scnE"] = "⪶", -- U+02AB6 + ["scnap"] = "⪺", -- U+02ABA + ["scnsim"] = "â‹©", -- U+022E9 + ["scpolint"] = "⨓", -- U+02A13 + ["scsim"] = "≿", -- U+0227F + ["scy"] = "Ñ", -- U+00441 + ["sdot"] = "â‹…", -- U+022C5 + ["sdotb"] = "⊡", -- U+022A1 + ["sdote"] = "⩦", -- U+02A66 + ["seArr"] = "⇘", -- U+021D8 + ["searhk"] = "⤥", -- U+02925 + ["searr"] = "↘", -- U+02198 + ["searrow"] = "↘", -- U+02198 + ["sect"] = "§", -- U+000A7 + ["semi"] = ";", -- U+0003B + ["seswar"] = "⤩", -- U+02929 + ["setminus"] = "∖", -- U+02216 + ["setmn"] = "∖", -- U+02216 + ["sext"] = "✶", -- U+02736 + ["sfgr"] = "Ï‚", -- U+003C2 + ["sfr"] = "ð”°", -- U+1D530 + ["sfrown"] = "⌢", -- U+02322 + ["sgr"] = "σ", -- U+003C3 + ["sharp"] = "♯", -- U+0266F + ["shchcy"] = "щ", -- U+00449 + ["shcy"] = "ш", -- U+00448 + ["shortmid"] = "∣", -- U+02223 + ["shortparallel"] = "∥", -- U+02225 + ["shy"] = "­", -- U+000AD + ["sigma"] = "σ", -- U+003C3 + ["sigmaf"] = "Ï‚", -- U+003C2 + ["sigmav"] = "Ï‚", -- U+003C2 + ["sim"] = "∼", -- U+0223C + ["simdot"] = "⩪", -- U+02A6A + ["sime"] = "≃", -- U+02243 + ["simeq"] = "≃", -- U+02243 + ["simg"] = "⪞", -- U+02A9E + ["simgE"] = "⪠", -- U+02AA0 + ["siml"] = "âª", -- U+02A9D + ["simlE"] = "⪟", -- U+02A9F + ["simne"] = "≆", -- U+02246 + ["simplus"] = "⨤", -- U+02A24 + ["simrarr"] = "⥲", -- U+02972 + ["slarr"] = "â†", -- U+02190 + ["smallsetminus"] = "∖", -- U+02216 + ["smashp"] = "⨳", -- U+02A33 + ["smeparsl"] = "⧤", -- U+029E4 + ["smid"] = "∣", -- U+02223 + ["smile"] = "⌣", -- U+02323 + ["smt"] = "⪪", -- U+02AAA + ["smte"] = "⪬", -- U+02AAC + ["smtes"] = "⪬︀", -- U+02AAC 0FE00 + ["softcy"] = "ÑŒ", -- U+0044C + ["sol"] = "/", -- U+0002F + ["solb"] = "⧄", -- U+029C4 + ["solbar"] = "⌿", -- U+0233F + ["sopf"] = "ð•¤", -- U+1D564 + ["spades"] = "â™ ", -- U+02660 + ["spadesuit"] = "â™ ", -- U+02660 + ["spar"] = "∥", -- U+02225 + ["sqcap"] = "⊓", -- U+02293 + ["sqcaps"] = "⊓︀", -- U+02293 0FE00 + ["sqcup"] = "⊔", -- U+02294 + ["sqcups"] = "⊔︀", -- U+02294 0FE00 + ["sqsub"] = "âŠ", -- U+0228F + ["sqsube"] = "⊑", -- U+02291 + ["sqsubset"] = "âŠ", -- U+0228F + ["sqsubseteq"] = "⊑", -- U+02291 + ["sqsup"] = "âŠ", -- U+02290 + ["sqsupe"] = "⊒", -- U+02292 + ["sqsupset"] = "âŠ", -- U+02290 + ["sqsupseteq"] = "⊒", -- U+02292 + ["squ"] = "â–¡", -- U+025A1 + ["square"] = "â–¡", -- U+025A1 + ["squarf"] = "â–ª", -- U+025AA + ["squf"] = "â–ª", -- U+025AA + ["srarr"] = "→", -- U+02192 + ["sscr"] = "ð“ˆ", -- U+1D4C8 + ["ssetmn"] = "∖", -- U+02216 + ["ssmile"] = "⌣", -- U+02323 + ["sstarf"] = "⋆", -- U+022C6 + ["star"] = "☆", -- U+02606 + ["starf"] = "★", -- U+02605 + ["straightepsilon"] = "ϵ", -- U+003F5 + ["straightphi"] = "Ï•", -- U+003D5 + ["strns"] = "¯", -- U+000AF + ["sub"] = "⊂", -- U+02282 + ["subE"] = "â«…", -- U+02AC5 + ["subdot"] = "⪽", -- U+02ABD + ["sube"] = "⊆", -- U+02286 + ["subedot"] = "⫃", -- U+02AC3 + ["submult"] = "â«", -- U+02AC1 + ["subnE"] = "â«‹", -- U+02ACB + ["subne"] = "⊊", -- U+0228A + ["subplus"] = "⪿", -- U+02ABF + ["subrarr"] = "⥹", -- U+02979 + ["subset"] = "⊂", -- U+02282 + ["subseteq"] = "⊆", -- U+02286 + ["subseteqq"] = "â«…", -- U+02AC5 + ["subsetneq"] = "⊊", -- U+0228A + ["subsetneqq"] = "â«‹", -- U+02ACB + ["subsim"] = "⫇", -- U+02AC7 + ["subsub"] = "â«•", -- U+02AD5 + ["subsup"] = "â«“", -- U+02AD3 + ["succ"] = "≻", -- U+0227B + ["succapprox"] = "⪸", -- U+02AB8 + ["succcurlyeq"] = "≽", -- U+0227D + ["succeq"] = "⪰", -- U+02AB0 + ["succnapprox"] = "⪺", -- U+02ABA + ["succneqq"] = "⪶", -- U+02AB6 + ["succnsim"] = "â‹©", -- U+022E9 + ["succsim"] = "≿", -- U+0227F + ["sum"] = "∑", -- U+02211 + ["sung"] = "♪", -- U+0266A + ["sup"] = "⊃", -- U+02283 + ["sup1"] = "¹", -- U+000B9 + ["sup2"] = "²", -- U+000B2 + ["sup3"] = "³", -- U+000B3 + ["supE"] = "⫆", -- U+02AC6 + ["supdot"] = "⪾", -- U+02ABE + ["supdsub"] = "⫘", -- U+02AD8 + ["supe"] = "⊇", -- U+02287 + ["supedot"] = "â«„", -- U+02AC4 + ["suphsol"] = "⟉", -- U+027C9 + ["suphsub"] = "â«—", -- U+02AD7 + ["suplarr"] = "⥻", -- U+0297B + ["supmult"] = "â«‚", -- U+02AC2 + ["supnE"] = "â«Œ", -- U+02ACC + ["supne"] = "⊋", -- U+0228B + ["supplus"] = "â«€", -- U+02AC0 + ["supset"] = "⊃", -- U+02283 + ["supseteq"] = "⊇", -- U+02287 + ["supseteqq"] = "⫆", -- U+02AC6 + ["supsetneq"] = "⊋", -- U+0228B + ["supsetneqq"] = "â«Œ", -- U+02ACC + ["supsim"] = "⫈", -- U+02AC8 + ["supsub"] = "â«”", -- U+02AD4 + ["supsup"] = "â«–", -- U+02AD6 + ["swArr"] = "⇙", -- U+021D9 + ["swarhk"] = "⤦", -- U+02926 + ["swarr"] = "↙", -- U+02199 + ["swarrow"] = "↙", -- U+02199 + ["swnwar"] = "⤪", -- U+0292A + ["szlig"] = "ß", -- U+000DF + ["target"] = "⌖", -- U+02316 + ["tau"] = "Ï„", -- U+003C4 + ["tbrk"] = "⎴", -- U+023B4 + ["tcaron"] = "Å¥", -- U+00165 + ["tcedil"] = "Å£", -- U+00163 + ["tcy"] = "Ñ‚", -- U+00442 + ["tdot"] = "⃛", -- U+020DB + ["telrec"] = "⌕", -- U+02315 + ["tfr"] = "ð”±", -- U+1D531 + ["tgr"] = "Ï„", -- U+003C4 + ["there4"] = "∴", -- U+02234 + ["therefore"] = "∴", -- U+02234 + ["theta"] = "θ", -- U+003B8 + ["thetasym"] = "Ï‘", -- U+003D1 + ["thetav"] = "Ï‘", -- U+003D1 + ["thgr"] = "θ", -- U+003B8 + ["thickapprox"] = "≈", -- U+02248 + ["thicksim"] = "∼", -- U+0223C + ["thinsp"] = " ", -- U+02009 + ["thkap"] = "≈", -- U+02248 + ["thksim"] = "∼", -- U+0223C + ["thorn"] = "þ", -- U+000FE + ["tilde"] = "Ëœ", -- U+002DC + ["times"] = "×", -- U+000D7 + ["timesb"] = "⊠", -- U+022A0 + ["timesbar"] = "⨱", -- U+02A31 + ["timesd"] = "⨰", -- U+02A30 + ["tint"] = "∭", -- U+0222D + ["toea"] = "⤨", -- U+02928 + ["top"] = "⊤", -- U+022A4 + ["topbot"] = "⌶", -- U+02336 + ["topcir"] = "⫱", -- U+02AF1 + ["topf"] = "ð•¥", -- U+1D565 + ["topfork"] = "â«š", -- U+02ADA + ["tosa"] = "⤩", -- U+02929 + ["tprime"] = "‴", -- U+02034 + ["trade"] = "â„¢", -- U+02122 + ["triangle"] = "â–µ", -- U+025B5 + ["triangledown"] = "â–¿", -- U+025BF + ["triangleleft"] = "â—ƒ", -- U+025C3 + ["trianglelefteq"] = "⊴", -- U+022B4 + ["triangleq"] = "≜", -- U+0225C + ["triangleright"] = "â–¹", -- U+025B9 + ["trianglerighteq"] = "⊵", -- U+022B5 + ["tridot"] = "â—¬", -- U+025EC + ["trie"] = "≜", -- U+0225C + ["triminus"] = "⨺", -- U+02A3A + ["triplus"] = "⨹", -- U+02A39 + ["trisb"] = "â§", -- U+029CD + ["tritime"] = "⨻", -- U+02A3B + ["trpezium"] = "â¢", -- U+023E2 + ["tscr"] = "ð“‰", -- U+1D4C9 + ["tscy"] = "ц", -- U+00446 + ["tshcy"] = "Ñ›", -- U+0045B + ["tstrok"] = "ŧ", -- U+00167 + ["twixt"] = "≬", -- U+0226C + ["twoheadleftarrow"] = "↞", -- U+0219E + ["twoheadrightarrow"] = "↠", -- U+021A0 + ["uArr"] = "⇑", -- U+021D1 + ["uHar"] = "⥣", -- U+02963 + ["uacgr"] = "Ï", -- U+003CD + ["uacute"] = "ú", -- U+000FA + ["uarr"] = "↑", -- U+02191 + ["ubrcy"] = "Ñž", -- U+0045E + ["ubreve"] = "Å­", -- U+0016D + ["ucirc"] = "û", -- U+000FB + ["ucy"] = "у", -- U+00443 + ["udarr"] = "⇅", -- U+021C5 + ["udblac"] = "ű", -- U+00171 + ["udhar"] = "⥮", -- U+0296E + ["udiagr"] = "ΰ", -- U+003B0 + ["udigr"] = "Ï‹", -- U+003CB + ["ufisht"] = "⥾", -- U+0297E + ["ufr"] = "ð”²", -- U+1D532 + ["ugr"] = "Ï…", -- U+003C5 + ["ugrave"] = "ù", -- U+000F9 + ["uharl"] = "↿", -- U+021BF + ["uharr"] = "↾", -- U+021BE + ["uhblk"] = "â–€", -- U+02580 + ["ulcorn"] = "⌜", -- U+0231C + ["ulcorner"] = "⌜", -- U+0231C + ["ulcrop"] = "âŒ", -- U+0230F + ["ultri"] = "â—¸", -- U+025F8 + ["umacr"] = "Å«", -- U+0016B + ["uml"] = "¨", -- U+000A8 + ["uogon"] = "ų", -- U+00173 + ["uopf"] = "ð•¦", -- U+1D566 + ["uparrow"] = "↑", -- U+02191 + ["updownarrow"] = "↕", -- U+02195 + ["upharpoonleft"] = "↿", -- U+021BF + ["upharpoonright"] = "↾", -- U+021BE + ["uplus"] = "⊎", -- U+0228E + ["upsi"] = "Ï…", -- U+003C5 + ["upsih"] = "Ï’", -- U+003D2 + ["upsilon"] = "Ï…", -- U+003C5 + ["upuparrows"] = "⇈", -- U+021C8 + ["urcorn"] = "âŒ", -- U+0231D + ["urcorner"] = "âŒ", -- U+0231D + ["urcrop"] = "⌎", -- U+0230E + ["uring"] = "ů", -- U+0016F + ["urtri"] = "â—¹", -- U+025F9 + ["uscr"] = "ð“Š", -- U+1D4CA + ["utdot"] = "â‹°", -- U+022F0 + ["utilde"] = "Å©", -- U+00169 + ["utri"] = "â–µ", -- U+025B5 + ["utrif"] = "â–´", -- U+025B4 + ["uuarr"] = "⇈", -- U+021C8 + ["uuml"] = "ü", -- U+000FC + ["uwangle"] = "⦧", -- U+029A7 + ["vArr"] = "⇕", -- U+021D5 + ["vBar"] = "⫨", -- U+02AE8 + ["vBarv"] = "â«©", -- U+02AE9 + ["vDash"] = "⊨", -- U+022A8 + ["vangrt"] = "⦜", -- U+0299C + ["varepsilon"] = "ϵ", -- U+003F5 + ["varkappa"] = "Ï°", -- U+003F0 + ["varnothing"] = "∅", -- U+02205 + ["varphi"] = "Ï•", -- U+003D5 + ["varpi"] = "Ï–", -- U+003D6 + ["varpropto"] = "âˆ", -- U+0221D + ["varr"] = "↕", -- U+02195 + ["varrho"] = "ϱ", -- U+003F1 + ["varsigma"] = "Ï‚", -- U+003C2 + ["varsubsetneq"] = "⊊︀", -- U+0228A 0FE00 + ["varsubsetneqq"] = "⫋︀", -- U+02ACB 0FE00 + ["varsupsetneq"] = "⊋︀", -- U+0228B 0FE00 + ["varsupsetneqq"] = "⫌︀", -- U+02ACC 0FE00 + ["vartheta"] = "Ï‘", -- U+003D1 + ["vartriangleleft"] = "⊲", -- U+022B2 + ["vartriangleright"] = "⊳", -- U+022B3 + ["vcy"] = "в", -- U+00432 + ["vdash"] = "⊢", -- U+022A2 + ["vee"] = "∨", -- U+02228 + ["veebar"] = "⊻", -- U+022BB + ["veeeq"] = "≚", -- U+0225A + ["vellip"] = "â‹®", -- U+022EE + ["verbar"] = "|", -- U+0007C + ["vert"] = "|", -- U+0007C + ["vfr"] = "ð”³", -- U+1D533 + ["vltri"] = "⊲", -- U+022B2 + ["vnsub"] = "⊂⃒", -- U+02282 020D2 + ["vnsup"] = "⊃⃒", -- U+02283 020D2 + ["vopf"] = "ð•§", -- U+1D567 + ["vprop"] = "âˆ", -- U+0221D + ["vrtri"] = "⊳", -- U+022B3 + ["vscr"] = "ð“‹", -- U+1D4CB + ["vsubnE"] = "⫋︀", -- U+02ACB 0FE00 + ["vsubne"] = "⊊︀", -- U+0228A 0FE00 + ["vsupnE"] = "⫌︀", -- U+02ACC 0FE00 + ["vsupne"] = "⊋︀", -- U+0228B 0FE00 + ["vzigzag"] = "⦚", -- U+0299A + ["wcirc"] = "ŵ", -- U+00175 + ["wedbar"] = "â©Ÿ", -- U+02A5F + ["wedge"] = "∧", -- U+02227 + ["wedgeq"] = "≙", -- U+02259 + ["weierp"] = "℘", -- U+02118 + ["wfr"] = "ð”´", -- U+1D534 + ["wopf"] = "ð•¨", -- U+1D568 + ["wp"] = "℘", -- U+02118 + ["wr"] = "≀", -- U+02240 + ["wreath"] = "≀", -- U+02240 + ["wscr"] = "ð“Œ", -- U+1D4CC + ["xcap"] = "â‹‚", -- U+022C2 + ["xcirc"] = "â—¯", -- U+025EF + ["xcup"] = "⋃", -- U+022C3 + ["xdtri"] = "â–½", -- U+025BD + ["xfr"] = "ð”µ", -- U+1D535 + ["xgr"] = "ξ", -- U+003BE + ["xhArr"] = "⟺", -- U+027FA + ["xharr"] = "⟷", -- U+027F7 + ["xi"] = "ξ", -- U+003BE + ["xlArr"] = "⟸", -- U+027F8 + ["xlarr"] = "⟵", -- U+027F5 + ["xmap"] = "⟼", -- U+027FC + ["xnis"] = "â‹»", -- U+022FB + ["xodot"] = "⨀", -- U+02A00 + ["xopf"] = "ð•©", -- U+1D569 + ["xoplus"] = "â¨", -- U+02A01 + ["xotime"] = "⨂", -- U+02A02 + ["xrArr"] = "⟹", -- U+027F9 + ["xrarr"] = "⟶", -- U+027F6 + ["xscr"] = "ð“", -- U+1D4CD + ["xsqcup"] = "⨆", -- U+02A06 + ["xuplus"] = "⨄", -- U+02A04 + ["xutri"] = "â–³", -- U+025B3 + ["xvee"] = "â‹", -- U+022C1 + ["xwedge"] = "â‹€", -- U+022C0 + ["yacute"] = "ý", -- U+000FD + ["yacy"] = "Ñ", -- U+0044F + ["ycirc"] = "Å·", -- U+00177 + ["ycy"] = "Ñ‹", -- U+0044B + ["yen"] = "Â¥", -- U+000A5 + ["yfr"] = "ð”¶", -- U+1D536 + ["yicy"] = "Ñ—", -- U+00457 + ["yopf"] = "ð•ª", -- U+1D56A + ["yscr"] = "ð“Ž", -- U+1D4CE + ["yucy"] = "ÑŽ", -- U+0044E + ["yuml"] = "ÿ", -- U+000FF + ["zacute"] = "ź", -- U+0017A + ["zcaron"] = "ž", -- U+0017E + ["zcy"] = "з", -- U+00437 + ["zdot"] = "ż", -- U+0017C + ["zeetrf"] = "ℨ", -- U+02128 + ["zeta"] = "ζ", -- U+003B6 + ["zfr"] = "ð”·", -- U+1D537 + ["zgr"] = "ζ", -- U+003B6 + ["zhcy"] = "ж", -- U+00436 + ["zigrarr"] = "â‡", -- U+021DD + ["zopf"] = "ð•«", -- U+1D56B + ["zscr"] = "ð“", -- U+1D4CF + ["zwj"] = "â€", -- U+0200D + ["zwnj"] = "‌", -- U+0200C +} + +characters = characters or { } +characters.entities = entities + +entities.plusminus = "±" -- 0x000B1 +entities.minusplus = "∓" -- 0x02213 +entities.cdots = utf.char(0x02026) -- U+02026 diff --git a/tex/context/base/char-ini.lua b/tex/context/base/char-ini.lua index b75f5eda7..c5e4da8c4 100644 --- a/tex/context/base/char-ini.lua +++ b/tex/context/base/char-ini.lua @@ -1,1158 +1,1158 @@ -if not modules then modules = { } end modules ['char-ini'] = { - version = 1.001, - comment = "companion to char-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: make two files, one for format generation, one for format use - --- we can remove the tag range starting at 0xE0000 (special applications) - -local utfchar, utfbyte, utfvalues, ustring = utf.char, utf.byte, utf.values, utf.ustring -local concat, unpack, tohash = table.concat, table.unpack, table.tohash -local next, tonumber, type, rawget, rawset = next, tonumber, type, rawget, rawset -local format, lower, gsub, match, gmatch = string.format, string.lower, string.gsub, string.match, string.match, string.gmatch -local P, R, Cs, lpegmatch, patterns = lpeg.P, lpeg.R, lpeg.Cs, lpeg.match, lpeg.patterns - -local utf8byte = patterns.utf8byte -local utf8char = patterns.utf8char - -local allocate = utilities.storage.allocate -local mark = utilities.storage.mark - -local setmetatableindex = table.setmetatableindex - -local trace_defining = false trackers.register("characters.defining", function(v) characters_defining = v end) - -local report_defining = logs.reporter("characters") - ---[[ldx-- -

This module implements some methods and creates additional datastructured -from the big character table that we use for all kind of purposes: -char-def.lua.

- -

We assume that at this point characters.data is already -loaded!

---ldx]]-- - -characters = characters or { } -local characters = characters -local data = characters.data - -if data then - mark(data) -- why does this fail -else - report_defining("fatal error: 'char-def.lua' is not loaded") - os.exit() -end - ---[[ldx-- -

This converts a string (if given) into a number.

---ldx]]-- - -local pattern = (P("0x") + P("U+")) * ((R("09","AF")^1 * P(-1)) / function(s) return tonumber(s,16) end) - -patterns.chartonumber = pattern - -local function chartonumber(k) - if type(k) == "string" then - local u = lpegmatch(pattern,k) - if u then - return utfbyte(u) - else - return utfbyte(k) or 0 - end - else - return k or 0 - end -end - -local function charfromnumber(k) - if type(k) == "number" then - return utfchar(k) or "" - else - local u = lpegmatch(pattern,k) - if u then - return utfchar(u) - else - return k - end - end -end - ---~ print(chartonumber(97), chartonumber("a"), chartonumber("0x61"), chartonumber("U+61")) - -characters.tonumber = chartonumber -characters.fromnumber = charfromnumber - -local private = { - description = "PRIVATE SLOT", -} - -local ranges = allocate() -characters.ranges = ranges - -setmetatableindex(data, function(t,k) - local tk = type(k) - if tk == "string" then - k = lpegmatch(pattern,k) or utfbyte(k) - if k then - local v = rawget(t,k) - if v then - return v - else - tk = "number" -- fall through to range - end - else - return private - end - end - if tk == "number" and k < 0xF0000 then - for r=1,#ranges do - local rr = ranges[r] - if k >= rr.first and k <= rr.last then - local extender = rr.extender - if extender then - local v = extender(k,v) - t[k] = v - return v - end - end - end - end - return private -- handy for when we loop over characters in fonts and check for a property -end) - -local blocks = allocate { - ["aegeannumbers"] = { first = 0x10100, last = 0x1013F, description = "Aegean Numbers" }, - ["alchemicalsymbols"] = { first = 0x1F700, last = 0x1F77F, description = "Alchemical Symbols" }, - ["alphabeticpresentationforms"] = { first = 0x0FB00, last = 0x0FB4F, otf="latn", description = "Alphabetic Presentation Forms" }, - ["ancientgreekmusicalnotation"] = { first = 0x1D200, last = 0x1D24F, otf="grek", description = "Ancient Greek Musical Notation" }, - ["ancientgreeknumbers"] = { first = 0x10140, last = 0x1018F, otf="grek", description = "Ancient Greek Numbers" }, - ["ancientsymbols"] = { first = 0x10190, last = 0x101CF, otf="grek", description = "Ancient Symbols" }, - ["arabic"] = { first = 0x00600, last = 0x006FF, otf="arab", description = "Arabic" }, - ["arabicextendeda"] = { first = 0x008A0, last = 0x008FF, description = "Arabic Extended-A" }, - ["arabicmathematicalalphabeticsymbols"] = { first = 0x1EE00, last = 0x1EEFF, description = "Arabic Mathematical Alphabetic Symbols" }, - ["arabicpresentationformsa"] = { first = 0x0FB50, last = 0x0FDFF, otf="arab", description = "Arabic Presentation Forms-A" }, - ["arabicpresentationformsb"] = { first = 0x0FE70, last = 0x0FEFF, otf="arab", description = "Arabic Presentation Forms-B" }, - ["arabicsupplement"] = { first = 0x00750, last = 0x0077F, otf="arab", description = "Arabic Supplement" }, - ["armenian"] = { first = 0x00530, last = 0x0058F, otf="armn", description = "Armenian" }, - ["arrows"] = { first = 0x02190, last = 0x021FF, description = "Arrows" }, - ["avestan"] = { first = 0x10B00, last = 0x10B3F, description = "Avestan" }, - ["balinese"] = { first = 0x01B00, last = 0x01B7F, otf="bali", description = "Balinese" }, - ["bamum"] = { first = 0x0A6A0, last = 0x0A6FF, description = "Bamum" }, - ["bamumsupplement"] = { first = 0x16800, last = 0x16A3F, description = "Bamum Supplement" }, - ["basiclatin"] = { first = 0x00000, last = 0x0007F, otf="latn", description = "Basic Latin" }, - ["batak"] = { first = 0x01BC0, last = 0x01BFF, description = "Batak" }, - ["bengali"] = { first = 0x00980, last = 0x009FF, otf="beng", description = "Bengali" }, - ["blockelements"] = { first = 0x02580, last = 0x0259F, otf="bopo", description = "Block Elements" }, - ["bopomofo"] = { first = 0x03100, last = 0x0312F, otf="bopo", description = "Bopomofo" }, - ["bopomofoextended"] = { first = 0x031A0, last = 0x031BF, otf="bopo", description = "Bopomofo Extended" }, - ["boxdrawing"] = { first = 0x02500, last = 0x0257F, description = "Box Drawing" }, - ["brahmi"] = { first = 0x11000, last = 0x1107F, description = "Brahmi" }, - ["braillepatterns"] = { first = 0x02800, last = 0x028FF, otf="brai", description = "Braille Patterns" }, - ["buginese"] = { first = 0x01A00, last = 0x01A1F, otf="bugi", description = "Buginese" }, - ["buhid"] = { first = 0x01740, last = 0x0175F, otf="buhd", description = "Buhid" }, - ["byzantinemusicalsymbols"] = { first = 0x1D000, last = 0x1D0FF, otf="byzm", description = "Byzantine Musical Symbols" }, - ["commonindicnumberforms"] = { first = 0x0A830, last = 0x0A83F, description = "Common Indic Number Forms" }, - ["carian"] = { first = 0x102A0, last = 0x102DF, description = "Carian" }, - ["cham"] = { first = 0x0AA00, last = 0x0AA5F, description = "Cham" }, - ["cherokee"] = { first = 0x013A0, last = 0x013FF, otf="cher", description = "Cherokee" }, - ["cjkcompatibility"] = { first = 0x03300, last = 0x033FF, otf="hang", description = "CJK Compatibility" }, - ["cjkcompatibilityforms"] = { first = 0x0FE30, last = 0x0FE4F, otf="hang", description = "CJK Compatibility Forms" }, - ["cjkcompatibilityideographs"] = { first = 0x0F900, last = 0x0FAFF, otf="hang", description = "CJK Compatibility Ideographs" }, - ["cjkcompatibilityideographssupplement"] = { first = 0x2F800, last = 0x2FA1F, otf="hang", description = "CJK Compatibility Ideographs Supplement" }, - ["cjkradicalssupplement"] = { first = 0x02E80, last = 0x02EFF, otf="hang", description = "CJK Radicals Supplement" }, - ["cjkstrokes"] = { first = 0x031C0, last = 0x031EF, otf="hang", description = "CJK Strokes" }, - ["cjksymbolsandpunctuation"] = { first = 0x03000, last = 0x0303F, otf="hang", description = "CJK Symbols and Punctuation" }, - ["cjkunifiedideographs"] = { first = 0x04E00, last = 0x09FFF, otf="hang", description = "CJK Unified Ideographs" }, - ["cjkunifiedideographsextensiona"] = { first = 0x03400, last = 0x04DBF, otf="hang", description = "CJK Unified Ideographs Extension A" }, - ["cjkunifiedideographsextensionb"] = { first = 0x20000, last = 0x2A6DF, otf="hang", description = "CJK Unified Ideographs Extension B" }, - ["combiningdiacriticalmarks"] = { first = 0x00300, last = 0x0036F, description = "Combining Diacritical Marks" }, - ["combiningdiacriticalmarksforsymbols"] = { first = 0x020D0, last = 0x020FF, description = "Combining Diacritical Marks for Symbols" }, - ["combiningdiacriticalmarkssupplement"] = { first = 0x01DC0, last = 0x01DFF, description = "Combining Diacritical Marks Supplement" }, - ["combininghalfmarks"] = { first = 0x0FE20, last = 0x0FE2F, description = "Combining Half Marks" }, - ["controlpictures"] = { first = 0x02400, last = 0x0243F, description = "Control Pictures" }, - ["coptic"] = { first = 0x02C80, last = 0x02CFF, otf="copt", description = "Coptic" }, - ["countingrodnumerals"] = { first = 0x1D360, last = 0x1D37F, description = "Counting Rod Numerals" }, - ["cuneiform"] = { first = 0x12000, last = 0x123FF, otf="xsux", description = "Cuneiform" }, - ["cuneiformnumbersandpunctuation"] = { first = 0x12400, last = 0x1247F, otf="xsux", description = "Cuneiform Numbers and Punctuation" }, - ["currencysymbols"] = { first = 0x020A0, last = 0x020CF, description = "Currency Symbols" }, - ["cypriotsyllabary"] = { first = 0x10800, last = 0x1083F, otf="cprt", description = "Cypriot Syllabary" }, - ["cyrillic"] = { first = 0x00400, last = 0x004FF, otf="cyrl", description = "Cyrillic" }, - ["cyrillicextendeda"] = { first = 0x02DE0, last = 0x02DFF, otf="cyrl", description = "Cyrillic Extended-A" }, - ["cyrillicextendedb"] = { first = 0x0A640, last = 0x0A69F, otf="cyrl", description = "Cyrillic Extended-B" }, - ["cyrillicsupplement"] = { first = 0x00500, last = 0x0052F, otf="cyrl", description = "Cyrillic Supplement" }, - ["deseret"] = { first = 0x10400, last = 0x1044F, otf="dsrt", description = "Deseret" }, - ["devanagari"] = { first = 0x00900, last = 0x0097F, otf="deva", description = "Devanagari" }, - ["devanagariextended"] = { first = 0x0A8E0, last = 0x0A8FF, description = "Devanagari Extended" }, - ["dingbats"] = { first = 0x02700, last = 0x027BF, description = "Dingbats" }, - ["dominotiles"] = { first = 0x1F030, last = 0x1F09F, description = "Domino Tiles" }, - ["egyptianhieroglyphs"] = { first = 0x13000, last = 0x1342F, description = "Egyptian Hieroglyphs" }, - ["emoticons"] = { first = 0x1F600, last = 0x1F64F, description = "Emoticons" }, - ["enclosedalphanumericsupplement"] = { first = 0x1F100, last = 0x1F1FF, description = "Enclosed Alphanumeric Supplement" }, - ["enclosedalphanumerics"] = { first = 0x02460, last = 0x024FF, description = "Enclosed Alphanumerics" }, - ["enclosedcjklettersandmonths"] = { first = 0x03200, last = 0x032FF, description = "Enclosed CJK Letters and Months" }, - ["enclosedideographicsupplement"] = { first = 0x1F200, last = 0x1F2FF, description = "Enclosed Ideographic Supplement" }, - ["ethiopic"] = { first = 0x01200, last = 0x0137F, otf="ethi", description = "Ethiopic" }, - ["ethiopicextended"] = { first = 0x02D80, last = 0x02DDF, otf="ethi", description = "Ethiopic Extended" }, - ["ethiopicextendeda"] = { first = 0x0AB00, last = 0x0AB2F, description = "Ethiopic Extended-A" }, - ["ethiopicsupplement"] = { first = 0x01380, last = 0x0139F, otf="ethi", description = "Ethiopic Supplement" }, - ["generalpunctuation"] = { first = 0x02000, last = 0x0206F, description = "General Punctuation" }, - ["geometricshapes"] = { first = 0x025A0, last = 0x025FF, description = "Geometric Shapes" }, - ["georgian"] = { first = 0x010A0, last = 0x010FF, otf="geor", description = "Georgian" }, - ["georgiansupplement"] = { first = 0x02D00, last = 0x02D2F, otf="geor", description = "Georgian Supplement" }, - ["glagolitic"] = { first = 0x02C00, last = 0x02C5F, otf="glag", description = "Glagolitic" }, - ["gothic"] = { first = 0x10330, last = 0x1034F, otf="goth", description = "Gothic" }, - ["greekandcoptic"] = { first = 0x00370, last = 0x003FF, otf="grek", description = "Greek and Coptic" }, - ["greekextended"] = { first = 0x01F00, last = 0x01FFF, otf="grek", description = "Greek Extended" }, - ["gujarati"] = { first = 0x00A80, last = 0x00AFF, otf="gujr", description = "Gujarati" }, - ["gurmukhi"] = { first = 0x00A00, last = 0x00A7F, otf="guru", description = "Gurmukhi" }, - ["halfwidthandfullwidthforms"] = { first = 0x0FF00, last = 0x0FFEF, description = "Halfwidth and Fullwidth Forms" }, - ["hangulcompatibilityjamo"] = { first = 0x03130, last = 0x0318F, otf="jamo", description = "Hangul Compatibility Jamo" }, - ["hanguljamo"] = { first = 0x01100, last = 0x011FF, otf="jamo", description = "Hangul Jamo" }, - ["hanguljamoextendeda"] = { first = 0x0A960, last = 0x0A97F, description = "Hangul Jamo Extended-A" }, - ["hanguljamoextendedb"] = { first = 0x0D7B0, last = 0x0D7FF, description = "Hangul Jamo Extended-B" }, - ["hangulsyllables"] = { first = 0x0AC00, last = 0x0D7AF, otf="hang", description = "Hangul Syllables" }, - ["hanunoo"] = { first = 0x01720, last = 0x0173F, otf="hano", description = "Hanunoo" }, - ["hebrew"] = { first = 0x00590, last = 0x005FF, otf="hebr", description = "Hebrew" }, - ["highprivateusesurrogates"] = { first = 0x0DB80, last = 0x0DBFF, description = "High Private Use Surrogates" }, - ["highsurrogates"] = { first = 0x0D800, last = 0x0DB7F, description = "High Surrogates" }, - ["hiragana"] = { first = 0x03040, last = 0x0309F, otf="kana", description = "Hiragana" }, - ["ideographicdescriptioncharacters"] = { first = 0x02FF0, last = 0x02FFF, description = "Ideographic Description Characters" }, - ["imperialaramaic"] = { first = 0x10840, last = 0x1085F, description = "Imperial Aramaic" }, - ["inscriptionalpahlavi"] = { first = 0x10B60, last = 0x10B7F, description = "Inscriptional Pahlavi" }, - ["inscriptionalparthian"] = { first = 0x10B40, last = 0x10B5F, description = "Inscriptional Parthian" }, - ["ipaextensions"] = { first = 0x00250, last = 0x002AF, description = "IPA Extensions" }, - ["javanese"] = { first = 0x0A980, last = 0x0A9DF, description = "Javanese" }, - ["kaithi"] = { first = 0x11080, last = 0x110CF, description = "Kaithi" }, - ["kanasupplement"] = { first = 0x1B000, last = 0x1B0FF, description = "Kana Supplement" }, - ["kanbun"] = { first = 0x03190, last = 0x0319F, description = "Kanbun" }, - ["kangxiradicals"] = { first = 0x02F00, last = 0x02FDF, description = "Kangxi Radicals" }, - ["kannada"] = { first = 0x00C80, last = 0x00CFF, otf="knda", description = "Kannada" }, - ["katakana"] = { first = 0x030A0, last = 0x030FF, otf="kana", description = "Katakana" }, - ["katakanaphoneticextensions"] = { first = 0x031F0, last = 0x031FF, otf="kana", description = "Katakana Phonetic Extensions" }, - ["kayahli"] = { first = 0x0A900, last = 0x0A92F, description = "Kayah Li" }, - ["kharoshthi"] = { first = 0x10A00, last = 0x10A5F, otf="khar", description = "Kharoshthi" }, - ["khmer"] = { first = 0x01780, last = 0x017FF, otf="khmr", description = "Khmer" }, - ["khmersymbols"] = { first = 0x019E0, last = 0x019FF, otf="khmr", description = "Khmer Symbols" }, - ["lao"] = { first = 0x00E80, last = 0x00EFF, otf="lao", description = "Lao" }, - ["latinextendeda"] = { first = 0x00100, last = 0x0017F, otf="latn", description = "Latin Extended-A" }, - ["latinextendedadditional"] = { first = 0x01E00, last = 0x01EFF, otf="latn", description = "Latin Extended Additional" }, - ["latinextendedb"] = { first = 0x00180, last = 0x0024F, otf="latn", description = "Latin Extended-B" }, - ["latinextendedc"] = { first = 0x02C60, last = 0x02C7F, otf="latn", description = "Latin Extended-C" }, - ["latinextendedd"] = { first = 0x0A720, last = 0x0A7FF, otf="latn", description = "Latin Extended-D" }, - ["latinsupplement"] = { first = 0x00080, last = 0x000FF, otf="latn", description = "Latin-1 Supplement" }, - ["lepcha"] = { first = 0x01C00, last = 0x01C4F, description = "Lepcha" }, - ["letterlikesymbols"] = { first = 0x02100, last = 0x0214F, description = "Letterlike Symbols" }, - ["limbu"] = { first = 0x01900, last = 0x0194F, otf="limb", description = "Limbu" }, - ["linearbideograms"] = { first = 0x10080, last = 0x100FF, otf="linb", description = "Linear B Ideograms" }, - ["linearbsyllabary"] = { first = 0x10000, last = 0x1007F, otf="linb", description = "Linear B Syllabary" }, - ["lisu"] = { first = 0x0A4D0, last = 0x0A4FF, description = "Lisu" }, - ["lowsurrogates"] = { first = 0x0DC00, last = 0x0DFFF, description = "Low Surrogates" }, - ["lycian"] = { first = 0x10280, last = 0x1029F, description = "Lycian" }, - ["lydian"] = { first = 0x10920, last = 0x1093F, description = "Lydian" }, - ["mahjongtiles"] = { first = 0x1F000, last = 0x1F02F, description = "Mahjong Tiles" }, - ["malayalam"] = { first = 0x00D00, last = 0x00D7F, otf="mlym", description = "Malayalam" }, - ["mandiac"] = { first = 0x00840, last = 0x0085F, otf="mand", description = "Mandaic" }, - ["mathematicalalphanumericsymbols"] = { first = 0x1D400, last = 0x1D7FF, description = "Mathematical Alphanumeric Symbols" }, - ["mathematicaloperators"] = { first = 0x02200, last = 0x022FF, description = "Mathematical Operators" }, - ["meeteimayek"] = { first = 0x0ABC0, last = 0x0ABFF, description = "Meetei Mayek" }, - ["meeteimayekextensions"] = { first = 0x0AAE0, last = 0x0AAFF, description = "Meetei Mayek Extensions" }, - ["meroiticcursive"] = { first = 0x109A0, last = 0x109FF, description = "Meroitic Cursive" }, - ["meroitichieroglyphs"] = { first = 0x10980, last = 0x1099F, description = "Meroitic Hieroglyphs" }, - ["miao"] = { first = 0x16F00, last = 0x16F9F, description = "Miao" }, - ["miscellaneousmathematicalsymbolsa"] = { first = 0x027C0, last = 0x027EF, description = "Miscellaneous Mathematical Symbols-A" }, - ["miscellaneousmathematicalsymbolsb"] = { first = 0x02980, last = 0x029FF, description = "Miscellaneous Mathematical Symbols-B" }, - ["miscellaneoussymbols"] = { first = 0x02600, last = 0x026FF, description = "Miscellaneous Symbols" }, - ["miscellaneoussymbolsandarrows"] = { first = 0x02B00, last = 0x02BFF, description = "Miscellaneous Symbols and Arrows" }, - ["miscellaneoussymbolsandpictographs"] = { first = 0x1F300, last = 0x1F5FF, description = "Miscellaneous Symbols And Pictographs" }, - ["miscellaneoustechnical"] = { first = 0x02300, last = 0x023FF, description = "Miscellaneous Technical" }, - ["modifiertoneletters"] = { first = 0x0A700, last = 0x0A71F, description = "Modifier Tone Letters" }, - ["mongolian"] = { first = 0x01800, last = 0x018AF, otf="mong", description = "Mongolian" }, - ["musicalsymbols"] = { first = 0x1D100, last = 0x1D1FF, otf="musc", description = "Musical Symbols" }, - ["myanmar"] = { first = 0x01000, last = 0x0109F, otf="mymr", description = "Myanmar" }, - ["myanmarextendeda"] = { first = 0x0AA60, last = 0x0AA7F, description = "Myanmar Extended-A" }, - ["newtailue"] = { first = 0x01980, last = 0x019DF, description = "New Tai Lue" }, - ["nko"] = { first = 0x007C0, last = 0x007FF, otf="nko", description = "NKo" }, - ["numberforms"] = { first = 0x02150, last = 0x0218F, description = "Number Forms" }, - ["ogham"] = { first = 0x01680, last = 0x0169F, otf="ogam", description = "Ogham" }, - ["olchiki"] = { first = 0x01C50, last = 0x01C7F, description = "Ol Chiki" }, - ["olditalic"] = { first = 0x10300, last = 0x1032F, otf="ital", description = "Old Italic" }, - ["oldpersian"] = { first = 0x103A0, last = 0x103DF, otf="xpeo", description = "Old Persian" }, - ["oldsoutharabian"] = { first = 0x10A60, last = 0x10A7F, description = "Old South Arabian" }, - ["odlturkic"] = { first = 0x10C00, last = 0x10C4F, description = "Old Turkic" }, - ["opticalcharacterrecognition"] = { first = 0x02440, last = 0x0245F, description = "Optical Character Recognition" }, - ["oriya"] = { first = 0x00B00, last = 0x00B7F, otf="orya", description = "Oriya" }, - ["osmanya"] = { first = 0x10480, last = 0x104AF, otf="osma", description = "Osmanya" }, - ["phagspa"] = { first = 0x0A840, last = 0x0A87F, otf="phag", description = "Phags-pa" }, - ["phaistosdisc"] = { first = 0x101D0, last = 0x101FF, description = "Phaistos Disc" }, - ["phoenician"] = { first = 0x10900, last = 0x1091F, otf="phnx", description = "Phoenician" }, - ["phoneticextensions"] = { first = 0x01D00, last = 0x01D7F, description = "Phonetic Extensions" }, - ["phoneticextensionssupplement"] = { first = 0x01D80, last = 0x01DBF, description = "Phonetic Extensions Supplement" }, - ["playingcards"] = { first = 0x1F0A0, last = 0x1F0FF, description = "Playing Cards" }, - ["privateusearea"] = { first = 0x0E000, last = 0x0F8FF, description = "Private Use Area" }, - ["rejang"] = { first = 0x0A930, last = 0x0A95F, description = "Rejang" }, - ["ruminumeralsymbols"] = { first = 0x10E60, last = 0x10E7F, description = "Rumi Numeral Symbols" }, - ["runic"] = { first = 0x016A0, last = 0x016FF, otf="runr", description = "Runic" }, - ["samaritan"] = { first = 0x00800, last = 0x0083F, description = "Samaritan" }, - ["saurashtra"] = { first = 0x0A880, last = 0x0A8DF, description = "Saurashtra" }, - ["sharada"] = { first = 0x11180, last = 0x111DF, description = "Sharada" }, - ["shavian"] = { first = 0x10450, last = 0x1047F, otf="shaw", description = "Shavian" }, - ["sinhala"] = { first = 0x00D80, last = 0x00DFF, otf="sinh", description = "Sinhala" }, - ["smallformvariants"] = { first = 0x0FE50, last = 0x0FE6F, description = "Small Form Variants" }, - ["sorasompeng"] = { first = 0x110D0, last = 0x110FF, description = "Sora Sompeng" }, - ["spacingmodifierletters"] = { first = 0x002B0, last = 0x002FF, description = "Spacing Modifier Letters" }, - ["specials"] = { first = 0x0FFF0, last = 0x0FFFF, description = "Specials" }, - ["sundanese"] = { first = 0x01B80, last = 0x01BBF, description = "Sundanese" }, - ["sundanesesupplement"] = { first = 0x01CC0, last = 0x01CCF, description = "Sundanese Supplement" }, - ["superscriptsandsubscripts"] = { first = 0x02070, last = 0x0209F, description = "Superscripts and Subscripts" }, - ["supplementalarrowsa"] = { first = 0x027F0, last = 0x027FF, description = "Supplemental Arrows-A" }, - ["supplementalarrowsb"] = { first = 0x02900, last = 0x0297F, description = "Supplemental Arrows-B" }, - ["supplementalmathematicaloperators"] = { first = 0x02A00, last = 0x02AFF, description = "Supplemental Mathematical Operators" }, - ["supplementalpunctuation"] = { first = 0x02E00, last = 0x02E7F, description = "Supplemental Punctuation" }, - ["supplementaryprivateuseareaa"] = { first = 0xF0000, last = 0xFFFFF, description = "Supplementary Private Use Area-A" }, - ["supplementaryprivateuseareab"] = { first = 0x100000,last = 0x10FFFF, description = "Supplementary Private Use Area-B" }, - ["sylotinagri"] = { first = 0x0A800, last = 0x0A82F, otf="sylo", description = "Syloti Nagri" }, - ["syriac"] = { first = 0x00700, last = 0x0074F, otf="syrc", description = "Syriac" }, - ["tagalog"] = { first = 0x01700, last = 0x0171F, otf="tglg", description = "Tagalog" }, - ["tagbanwa"] = { first = 0x01760, last = 0x0177F, otf="tagb", description = "Tagbanwa" }, - ["tags"] = { first = 0xE0000, last = 0xE007F, description = "Tags" }, - ["taile"] = { first = 0x01950, last = 0x0197F, otf="tale", description = "Tai Le" }, - ["taitham"] = { first = 0x01A20, last = 0x01AAF, description = "Tai Tham" }, - ["taiviet"] = { first = 0x0AA80, last = 0x0AADF, description = "Tai Viet" }, - ["taixuanjingsymbols"] = { first = 0x1D300, last = 0x1D35F, description = "Tai Xuan Jing Symbols" }, - ["takri"] = { first = 0x11680, last = 0x116CF, description = "Takri" }, - ["tamil"] = { first = 0x00B80, last = 0x00BFF, otf="taml", description = "Tamil" }, - ["telugu"] = { first = 0x00C00, last = 0x00C7F, otf="telu", description = "Telugu" }, - ["thaana"] = { first = 0x00780, last = 0x007BF, otf="thaa", description = "Thaana" }, - ["thai"] = { first = 0x00E00, last = 0x00E7F, otf="thai", description = "Thai" }, - ["tibetan"] = { first = 0x00F00, last = 0x00FFF, otf="tibt", description = "Tibetan" }, - ["tifinagh"] = { first = 0x02D30, last = 0x02D7F, otf="tfng", description = "Tifinagh" }, - ["transportandmapsymbols"] = { first = 0x1F680, last = 0x1F6FF, description = "Transport And Map Symbols" }, - ["ugaritic"] = { first = 0x10380, last = 0x1039F, otf="ugar", description = "Ugaritic" }, - ["unifiedcanadianaboriginalsyllabics"] = { first = 0x01400, last = 0x0167F, otf="cans", description = "Unified Canadian Aboriginal Syllabics" }, - ["unifiedcanadianaboriginalsyllabicsextended"] = { first = 0x018B0, last = 0x018FF, description = "Unified Canadian Aboriginal Syllabics Extended" }, - ["vai"] = { first = 0x0A500, last = 0x0A63F, description = "Vai" }, - ["variationselectors"] = { first = 0x0FE00, last = 0x0FE0F, description = "Variation Selectors" }, - ["variationselectorssupplement"] = { first = 0xE0100, last = 0xE01EF, description = "Variation Selectors Supplement" }, - ["vedicextensions"] = { first = 0x01CD0, last = 0x01CFF, description = "Vedic Extensions" }, - ["verticalforms"] = { first = 0x0FE10, last = 0x0FE1F, description = "Vertical Forms" }, - ["yijinghexagramsymbols"] = { first = 0x04DC0, last = 0x04DFF, otf="yi", description = "Yijing Hexagram Symbols" }, - ["yiradicals"] = { first = 0x0A490, last = 0x0A4CF, otf="yi", description = "Yi Radicals" }, - ["yisyllables"] = { first = 0x0A000, last = 0x0A48F, otf="yi", description = "Yi Syllables" }, -} - -characters.blocks = blocks - -function characters.blockrange(name) - local b = blocks[name] - if b then - return b.first, b.last - else - return 0, 0 - end -end - -setmetatableindex(blocks, function(t,k) -- we could use an intermediate table if called often - return k and rawget(t,lower(gsub(k,"[^a-zA-Z]",""))) -end) - -local otfscripts = utilities.storage.allocate() -characters.otfscripts = otfscripts - -setmetatableindex(otfscripts,function(t,unicode) - for k, v in next, blocks do - local first, last = v.first, v.last - if unicode >= first and unicode <= last then - local script = v.otf or "dflt" - for u=first,last do - t[u] = script - end - return script - end - end - -- pretty slow when we're here - t[unicode] = "dflt" - return "dflt" -end) - -function characters.getrange(name) -- used in font fallback definitions (name or range) - local range = blocks[name] - if range then - return range.first, range.last, range.description - end - name = gsub(name,'"',"0x") -- goodie: tex hex notation - local start, stop = match(name,"^(.-)[%-%:](.-)$") - if start and stop then - start, stop = tonumber(start,16) or tonumber(start), tonumber(stop,16) or tonumber(stop) - if start and stop then - return start, stop, nil - end - end - local slot = tonumber(name,16) or tonumber(name) - return slot, slot, nil -end - -local categorytags = allocate { - lu = "Letter Uppercase", - ll = "Letter Lowercase", - lt = "Letter Titlecase", - lm = "Letter Modifier", - lo = "Letter Other", - mn = "Mark Nonspacing", - mc = "Mark Spacing Combining", - me = "Mark Enclosing", - nd = "Number Decimal Digit", - nl = "Number Letter", - no = "Number Other", - pc = "Punctuation Connector", - pd = "Punctuation Dash", - ps = "Punctuation Open", - pe = "Punctuation Close", - pi = "Punctuation Initial Quote", - pf = "Punctuation Final Quote", - po = "Punctuation Other", - sm = "Symbol Math", - sc = "Symbol Currency", - sk = "Symbol Modifier", - so = "Symbol Other", - zs = "Separator Space", - zl = "Separator Line", - zp = "Separator Paragraph", - cc = "Other Control", - cf = "Other Format", - cs = "Other Surrogate", - co = "Other Private Use", - cn = "Other Not Assigned", -} - -characters.categorytags = categorytags - ---~ special : cf (softhyphen) zs (emspace) ---~ characters: ll lm lo lt lu mn nl no pc pd pe pf pi po ps sc sk sm so - -local is_character = allocate ( tohash { - "lu","ll","lt","lm","lo", - "nd","nl","no", - "mn", - "nl","no", - "pc","pd","ps","pe","pi","pf","po", - "sm","sc","sk","so" -} ) - -local is_letter = allocate ( tohash { - "ll","lm","lo","lt","lu" -} ) - -local is_command = allocate ( tohash { - "cf","zs" -} ) - -local is_spacing = allocate ( tohash { - "zs", "zl","zp", -} ) - -local is_mark = allocate ( tohash { - "mn", "ms", -} ) - --- to be redone: store checked characters - -characters.is_character = is_character -characters.is_letter = is_letter -characters.is_command = is_command -characters.is_spacing = is_spacing -characters.is_mark = is_mark - -local mt = { -- yes or no ? - __index = function(t,k) - if type(k) == "number" then - local c = data[k].category - return c and rawget(t,c) - else - -- avoid auto conversion in data.characters lookups - end - end -} - -setmetatableindex(characters.is_character, mt) -setmetatableindex(characters.is_letter, mt) -setmetatableindex(characters.is_command, mt) -setmetatableindex(characters.is_spacing, mt) - --- linebreak: todo: hash --- --- normative : BK CR LF CM SG GL CB SP ZW NL WJ JL JV JT H2 H3 --- informative : XX OP CL QU NS EX SY IS PR PO NU AL ID IN HY BB BA SA AI B2 new:CP - --- east asian width: --- --- N A H W F Na - -characters.bidi = allocate { - l = "Left-to-Right", - lre = "Left-to-Right Embedding", - lro = "Left-to-Right Override", - r = "Right-to-Left", - al = "Right-to-Left Arabic", - rle = "Right-to-Left Embedding", - rlo = "Right-to-Left Override", - pdf = "Pop Directional Format", - en = "European Number", - es = "European Number Separator", - et = "European Number Terminator", - an = "Arabic Number", - cs = "Common Number Separator", - nsm = "Non-Spacing Mark", - bn = "Boundary Neutral", - b = "Paragraph Separator", - s = "Segment Separator", - ws = "Whitespace", - on = "Other Neutrals", -} - ---[[ldx-- -

At this point we assume that the big data table is loaded. From this -table we derive a few more.

---ldx]]-- - -if not characters.fallbacks then - - characters.fallbacks = { } -- not than many - - local fallbacks = characters.fallbacks - - for k, d in next, data do - local specials = d.specials - if specials and specials[1] == "compat" and specials[2] == 0x0020 then - local s = specials[3] - if s then - fallbacks[k] = s - fallbacks[s] = k - end - end - end - -end - -if storage then - storage.register("characters/fallbacks", characters.fallbacks, "characters.fallbacks") -- accents and such -end - -characters.directions = { } - -setmetatableindex(characters.directions,function(t,k) - local d = data[k] - if d then - local v = d.direction - if v then - t[k] = v - return v - end - end - t[k] = false -- maybe 'l' - return v -end) - ---[[ldx-- -

Next comes a whole series of helper methods. These are (will be) part -of the official .

---ldx]]-- - --- we could make them virtual: characters.contextnames[n] - -function characters.contextname(n) return data[n].contextname or "" end -function characters.adobename (n) return data[n].adobename or "" end -function characters.description(n) return data[n].description or "" end --------- characters.category (n) return data[n].category or "" end - -function characters.category(n,verbose) - local c = data[n].category - if not c then - return "" - elseif verbose then - return categorytags[c] - else - return c - end -end - --- -- some day we will make a table .. not that many calls to utfchar --- --- local utfchar = utf.char --- local utfbyte = utf.byte --- local utfbytes = { } --- local utfchars = { } --- --- table.setmetatableindex(utfbytes,function(t,k) local v = utfchar(k) t[k] = v return v end) --- table.setmetatableindex(utfchars,function(t,k) local v = utfbyte(k) t[k] = v return v end) - -local function toutfstring(s) - if type(s) == "table" then - return utfchar(unpack(s)) -- concat { utfchar( unpack(s) ) } - else - return utfchar(s) - end -end - -utf.tostring = toutfstring - -local categories = allocate() characters.categories = categories -- lazy table - -setmetatableindex(categories, function(t,u) if u then local c = data[u] c = c and c.category or u t[u] = c return c end end) - -local lccodes = allocate() characters.lccodes = lccodes -- lazy table -local uccodes = allocate() characters.uccodes = uccodes -- lazy table -local shcodes = allocate() characters.shcodes = shcodes -- lazy table -local fscodes = allocate() characters.fscodes = fscodes -- lazy table - -setmetatableindex(lccodes, function(t,u) if u then local c = data[u] c = c and c.lccode or (type(u) == "string" and utfbyte(u)) or u t[u] = c return c end end) -setmetatableindex(uccodes, function(t,u) if u then local c = data[u] c = c and c.uccode or (type(u) == "string" and utfbyte(u)) or u t[u] = c return c end end) -setmetatableindex(shcodes, function(t,u) if u then local c = data[u] c = c and c.shcode or (type(u) == "string" and utfbyte(u)) or u t[u] = c return c end end) -setmetatableindex(fscodes, function(t,u) if u then local c = data[u] c = c and c.fscode or (type(u) == "string" and utfbyte(u)) or u t[u] = c return c end end) - -local lcchars = allocate() characters.lcchars = lcchars -- lazy table -local ucchars = allocate() characters.ucchars = ucchars -- lazy table -local shchars = allocate() characters.shchars = shchars -- lazy table -local fschars = allocate() characters.fschars = fschars -- lazy table - -setmetatableindex(lcchars, function(t,u) if u then local c = data[u] c = c and c.lccode c = c and toutfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end) -setmetatableindex(ucchars, function(t,u) if u then local c = data[u] c = c and c.uccode c = c and toutfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end) -setmetatableindex(shchars, function(t,u) if u then local c = data[u] c = c and c.shcode c = c and toutfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end) -setmetatableindex(fschars, function(t,u) if u then local c = data[u] c = c and c.fscode c = c and toutfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end) - -local decomposed = allocate() characters.decomposed = decomposed -- lazy table -local specials = allocate() characters.specials = specials -- lazy table - -setmetatableindex(decomposed, function(t,u) -- either a table or false - if u then - local c = data[u] - local s = c and c.decomposed or false -- could fall back to specials - t[u] = s - return s - end -end) - -setmetatableindex(specials, function(t,u) -- either a table or false - if u then - local c = data[u] - local s = c and c.specials or false - t[u] = s - return s - end -end) - -local specialchars = allocate() characters.specialchars = specialchars -- lazy table -local descriptions = allocate() characters.descriptions = descriptions -- lazy table - -setmetatableindex(specialchars, function(t,u) - if u then - local c = data[u] - local s = c and c.specials - if s then - local tt, ttn = { }, 0 - for i=2,#s do - local si = s[i] - local c = data[si] - if is_letter[c.category] then - ttn = ttn + 1 - tt[ttn] = utfchar(si) - end - end - c = concat(tt) - t[u] = c - return c - else - if type(u) == "number" then - u = utfchar(u) - end - t[u] = u - return u - end - end -end) - -setmetatableindex(descriptions, function(t,k) - -- 0.05 - 0.10 sec - for u, c in next, data do - local d = c.description - if d then - d = gsub(d," ","") - d = lower(d) - t[d] = u - end - end - local d = rawget(t,k) - if not d then - t[k] = k - end - return d -end) - -function characters.unicodechar(asked) - local n = tonumber(asked) - if n then - return n - elseif type(asked) == "string" then - return descriptions[asked] or descriptions[gsub(asked," ","")] - end -end - --- function characters.lower(str) --- local new, n = { }, 0 --- for u in utfvalues(str) do --- n = n + 1 --- new[n] = lcchars[u] --- end --- return concat(new) --- end --- --- function characters.upper(str) --- local new, n = { }, 0 --- for u in utfvalues(str) do --- n = n + 1 --- new[n] = ucchars[u] --- end --- return concat(new) --- end --- --- function characters.shaped(str) --- local new, n = { }, 0 --- for u in utfvalues(str) do --- n = n + 1 --- new[n] = shchars[u] --- end --- return concat(new) --- end - ------ tolower = Cs((utf8byte/lcchars)^0) ------ toupper = Cs((utf8byte/ucchars)^0) ------ toshape = Cs((utf8byte/shchars)^0) - -local tolower = Cs((utf8char/lcchars)^0) -local toupper = Cs((utf8char/ucchars)^0) -local toshape = Cs((utf8char/shchars)^0) - -patterns.tolower = tolower -patterns.toupper = toupper -patterns.toshape = toshape - -function characters.lower (str) return lpegmatch(tolower,str) end -function characters.upper (str) return lpegmatch(toupper,str) end -function characters.shaped(str) return lpegmatch(toshape,str) end - -function characters.lettered(str,spacing) - local new, n = { }, 0 - if spacing then - local done = false - for u in utfvalues(str) do - local c = data[u].category - if is_letter[c] then - if done and n > 1 then - n = n + 1 - new[n] = " " - done = false - end - n = n + 1 - new[n] = utfchar(u) - elseif spacing and is_spacing[c] then - done = true - end - end - else - for u in utfvalues(str) do - if is_letter[data[u].category] then - n = n + 1 - new[n] = utfchar(u) - end - end - end - return concat(new) -end - ---[[ldx-- -

Requesting lower and uppercase codes:

---ldx]]-- - -function characters.uccode(n) return uccodes[n] end -- obsolete -function characters.lccode(n) return lccodes[n] end -- obsolete - -function characters.safechar(n) - local c = data[n] - if c and c.contextname then - return "\\" .. c.contextname - else - return utfchar(n) - end -end - -function characters.shape(n) - local shcode = shcodes[n] - if not shcode then - return n, nil - elseif type(shcode) == "table" then - return shcode[1], shcode[#shcode] - else - return shcode, nil - end -end - --- -- some day we might go this route, but it does not really save that much --- -- so not now (we can generate a lot using mtx-unicode that operates on the --- -- database) --- --- -- category cjkwd direction linebreak --- --- -- adobename comment contextcommand contextname description fallback lccode --- -- mathclass mathfiller mathname mathspec mathstretch mathsymbol mirror --- -- range shcode specials uccode uccodes unicodeslot --- --- local data = { --- ['one']={ --- common = { --- category="cc", --- direction="bn", --- linebreak="cm", --- }, --- vector = { --- [0x0000] = { --- description="NULL", --- group='one', --- unicodeslot=0x0000, --- }, --- { --- description="START OF HEADING", --- group='one', --- unicodeslot=0x0001, --- }, --- } --- } --- } --- --- local chardata, groupdata = { }, { } --- --- for group, gdata in next, data do --- local common, vector = { __index = gdata.common }, gdata.vector --- for character, cdata in next, vector do --- chardata[character] = cdata --- setmetatable(cdata,common) --- end --- groupdata[group] = gdata --- end - ---~ characters.data, characters.groups = chardata, groupdata - ---~ [0xF0000]={ ---~ category="co", ---~ cjkwd="a", ---~ description="", ---~ direction="l", ---~ unicodeslot=0xF0000, ---~ }, ---~ [0xFFFFD]={ ---~ category="co", ---~ cjkwd="a", ---~ description="", ---~ direction="l", ---~ unicodeslot=0xFFFFD, ---~ }, ---~ [0x100000]={ ---~ category="co", ---~ cjkwd="a", ---~ description="", ---~ direction="l", ---~ unicodeslot=0x100000, ---~ }, ---~ [0x10FFFD]={ ---~ category="co", ---~ cjkwd="a", ---~ description="", ---~ direction="l", ---~ unicodeslot=0x10FFFD, ---~ }, - -if not characters.superscripts then - - local superscripts = allocate() characters.superscripts = superscripts - local subscripts = allocate() characters.subscripts = subscripts - - -- skipping U+02120 (service mark) U+02122 (trademark) - - for k, v in next, data do - local specials = v.specials - if specials then - local what = specials[1] - if what == "super" then - if #specials == 2 then - superscripts[k] = specials[2] - else - report_defining("ignoring %s %a, char %c, description %a","superscript",ustring(k),k,v.description) - end - elseif what == "sub" then - if #specials == 2 then - subscripts[k] = specials[2] - else - report_defining("ignoring %s %a, char %c, description %a","subscript",ustring(k),k,v.description) - end - end - end - end - - -- print(table.serialize(superscripts, "superscripts", { hexify = true })) - -- print(table.serialize(subscripts, "subscripts", { hexify = true })) - - if storage then - storage.register("characters/superscripts", superscripts, "characters.superscripts") - storage.register("characters/subscripts", subscripts, "characters.subscripts") - end - -end - --- for the moment only a few - -local tracedchars = utilities.strings.tracers - -tracedchars[0x00] = "[signal]" -tracedchars[0x20] = "[space]" - --- the following code will move to char-tex.lua - --- tex - -if not tex or not context or not commands then return characters end - -local tex = tex -local texsetlccode = tex.setlccode -local texsetuccode = tex.setuccode -local texsetsfcode = tex.setsfcode -local texsetcatcode = tex.setcatcode - -local contextsprint = context.sprint -local ctxcatcodes = catcodes.numbers.ctxcatcodes - ---[[ldx-- -

Instead of using a file to define the named glyphs, we -use the table. After all, we have this information available anyway.

---ldx]]-- - -function commands.makeactive(n,name) -- - contextsprint(ctxcatcodes,format("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name)) - -- context("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name) -end - -function commands.utfchar(c,n) - if n then - -- contextsprint(c,charfromnumber(n)) - contextsprint(c,utfchar(n)) - else - -- contextsprint(charfromnumber(c)) - contextsprint(utfchar(c)) - end -end - -function commands.safechar(n) - local c = data[n] - if c and c.contextname then - contextsprint("\\" .. c.contextname) -- context[c.contextname]() - else - contextsprint(utfchar(n)) - end -end - -tex.uprint = commands.utfchar - -local forbidden = tohash { -- at least now - 0x00A0, - 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A, 0x200B, 0x200C, 0x200D, - 0x202F, - 0x205F, - -- 0xFEFF, -} - -function characters.define(tobelettered, tobeactivated) -- catcodetables - - if trace_defining then - report_defining("defining active character commands") - end - - local activated, a = { }, 0 - - for u, chr in next, data do -- these will be commands - local fallback = chr.fallback - if fallback then - contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\checkedchar{",u,"}{",fallback,"}}}") - a = a + 1 - activated[a] = u - else - local contextname = chr.contextname - if contextname then - local category = chr.category - if is_character[category] then - if chr.unicodeslot < 128 then - if is_letter[category] then - contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u))) -- has no s - else - contextsprint(ctxcatcodes,format("\\chardef\\%s=%s",contextname,u)) -- has no s - end - else - contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u))) -- has no s - end - elseif is_command[category] and not forbidden[u] then - contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}") - a = a + 1 - activated[a] = u - end - end - end - end - - if tobelettered then -- shared - local saved = tex.catcodetable - for i=1,#tobelettered do - tex.catcodetable = tobelettered[i] - if trace_defining then - report_defining("defining letters (global, shared)") - end - for u, chr in next, data do - if not chr.fallback and is_letter[chr.category] and u >= 128 and u <= 65536 then - texsetcatcode(u,11) - end - local range = chr.range - if range then - for i=1,range.first,range.last do - texsetcatcode(i,11) - end - end - end - texsetcatcode(0x200C,11) -- non-joiner - texsetcatcode(0x200D,11) -- joiner - end - tex.catcodetable = saved - end - - local nofactivated = #tobeactivated - if tobeactivated and nofactivated > 0 then - for i=1,nofactivated do - local u = activated[i] - if u then - report_defining("character %U is active in set %a, containing %a",u,data[u].description,tobeactivated) - end - end - local saved = tex.catcodetable - for i=1,#tobeactivated do - local vector = tobeactivated[i] - if trace_defining then - report_defining("defining %a active characters in vector %a",nofactivated,vector) - end - tex.catcodetable = vector - for i=1,nofactivated do - local u = activated[i] - if u then - texsetcatcode(u,13) - end - end - end - tex.catcodetable = saved - end - -end - ---[[ldx-- -

Setting the lccodes is also done in a loop over the data table.

---ldx]]-- - -local sfmode = "unset" -- unset, traditional, normal - -function characters.setcodes() - if trace_defining then - report_defining("defining lc and uc codes") - end - local traditional = sfstate == "traditional" or sfstate == "unset" - for code, chr in next, data do - local cc = chr.category - if is_letter[cc] then - local range = chr.range - if range then - for i=range.first,range.last do - texsetcatcode(i,11) -- letter - texsetlccode(i,i,i) -- self self - end - else - local lc, uc = chr.lccode, chr.uccode - if not lc then - chr.lccode, lc = code, code - elseif type(lc) == "table" then - lc = code - end - if not uc then - chr.uccode, uc = code, code - elseif type(uc) == "table" then - uc = code - end - texsetcatcode(code,11) -- letter - texsetlccode(code,lc,uc) - if traditional and cc == "lu" then - texsetsfcode(code,999) - end - end - elseif is_mark[cc] then - texsetlccode(code,code,code) -- for hyphenation - end - end - if traditional then - sfstate = "traditional" - end -end - --- If this is something that is not documentwide and used a lot, then we --- need a more clever approach (trivial but not now). - -local function setuppersfcodes(v,n) - if sfstate ~= "unset" then - report_defining("setting uppercase sf codes to %a",n) - for code, chr in next, data do - if chr.category == "lu" then - texsetsfcode(code,n) - end - end - end - sfstate = v -end - -directives.register("characters.spaceafteruppercase",function(v) - if v == "traditional" then - setuppersfcodes(v,999) - elseif v == "normal" then - setuppersfcodes(v,1000) - end -end) - --- xml - -characters.activeoffset = 0x10000 -- there will be remapped in that byte range - -function commands.remapentity(chr,slot) - contextsprint(format("{\\catcode%s=13\\xdef%s{\\string%s}}",slot,utfchar(slot),chr)) -end - --- xml.entities = xml.entities or { } --- --- storage.register("xml/entities",xml.entities,"xml.entities") -- this will move to lxml --- --- function characters.setmkiventities() --- local entities = xml.entities --- entities.lt = "<" --- entities.amp = "&" --- entities.gt = ">" --- end --- --- function characters.setmkiientities() --- local entities = xml.entities --- entities.lt = utfchar(characters.activeoffset + utfbyte("<")) --- entities.amp = utfchar(characters.activeoffset + utfbyte("&")) --- entities.gt = utfchar(characters.activeoffset + utfbyte(">")) --- end - +if not modules then modules = { } end modules ['char-ini'] = { + version = 1.001, + comment = "companion to char-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: make two files, one for format generation, one for format use + +-- we can remove the tag range starting at 0xE0000 (special applications) + +local utfchar, utfbyte, utfvalues, ustring = utf.char, utf.byte, utf.values, utf.ustring +local concat, unpack, tohash = table.concat, table.unpack, table.tohash +local next, tonumber, type, rawget, rawset = next, tonumber, type, rawget, rawset +local format, lower, gsub, match, gmatch = string.format, string.lower, string.gsub, string.match, string.match, string.gmatch +local P, R, Cs, lpegmatch, patterns = lpeg.P, lpeg.R, lpeg.Cs, lpeg.match, lpeg.patterns + +local utf8byte = patterns.utf8byte +local utf8char = patterns.utf8char + +local allocate = utilities.storage.allocate +local mark = utilities.storage.mark + +local setmetatableindex = table.setmetatableindex + +local trace_defining = false trackers.register("characters.defining", function(v) characters_defining = v end) + +local report_defining = logs.reporter("characters") + +--[[ldx-- +

This module implements some methods and creates additional datastructured +from the big character table that we use for all kind of purposes: +char-def.lua.

+ +

We assume that at this point characters.data is already +loaded!

+--ldx]]-- + +characters = characters or { } +local characters = characters +local data = characters.data + +if data then + mark(data) -- why does this fail +else + report_defining("fatal error: 'char-def.lua' is not loaded") + os.exit() +end + +--[[ldx-- +

This converts a string (if given) into a number.

+--ldx]]-- + +local pattern = (P("0x") + P("U+")) * ((R("09","AF")^1 * P(-1)) / function(s) return tonumber(s,16) end) + +patterns.chartonumber = pattern + +local function chartonumber(k) + if type(k) == "string" then + local u = lpegmatch(pattern,k) + if u then + return utfbyte(u) + else + return utfbyte(k) or 0 + end + else + return k or 0 + end +end + +local function charfromnumber(k) + if type(k) == "number" then + return utfchar(k) or "" + else + local u = lpegmatch(pattern,k) + if u then + return utfchar(u) + else + return k + end + end +end + +--~ print(chartonumber(97), chartonumber("a"), chartonumber("0x61"), chartonumber("U+61")) + +characters.tonumber = chartonumber +characters.fromnumber = charfromnumber + +local private = { + description = "PRIVATE SLOT", +} + +local ranges = allocate() +characters.ranges = ranges + +setmetatableindex(data, function(t,k) + local tk = type(k) + if tk == "string" then + k = lpegmatch(pattern,k) or utfbyte(k) + if k then + local v = rawget(t,k) + if v then + return v + else + tk = "number" -- fall through to range + end + else + return private + end + end + if tk == "number" and k < 0xF0000 then + for r=1,#ranges do + local rr = ranges[r] + if k >= rr.first and k <= rr.last then + local extender = rr.extender + if extender then + local v = extender(k,v) + t[k] = v + return v + end + end + end + end + return private -- handy for when we loop over characters in fonts and check for a property +end) + +local blocks = allocate { + ["aegeannumbers"] = { first = 0x10100, last = 0x1013F, description = "Aegean Numbers" }, + ["alchemicalsymbols"] = { first = 0x1F700, last = 0x1F77F, description = "Alchemical Symbols" }, + ["alphabeticpresentationforms"] = { first = 0x0FB00, last = 0x0FB4F, otf="latn", description = "Alphabetic Presentation Forms" }, + ["ancientgreekmusicalnotation"] = { first = 0x1D200, last = 0x1D24F, otf="grek", description = "Ancient Greek Musical Notation" }, + ["ancientgreeknumbers"] = { first = 0x10140, last = 0x1018F, otf="grek", description = "Ancient Greek Numbers" }, + ["ancientsymbols"] = { first = 0x10190, last = 0x101CF, otf="grek", description = "Ancient Symbols" }, + ["arabic"] = { first = 0x00600, last = 0x006FF, otf="arab", description = "Arabic" }, + ["arabicextendeda"] = { first = 0x008A0, last = 0x008FF, description = "Arabic Extended-A" }, + ["arabicmathematicalalphabeticsymbols"] = { first = 0x1EE00, last = 0x1EEFF, description = "Arabic Mathematical Alphabetic Symbols" }, + ["arabicpresentationformsa"] = { first = 0x0FB50, last = 0x0FDFF, otf="arab", description = "Arabic Presentation Forms-A" }, + ["arabicpresentationformsb"] = { first = 0x0FE70, last = 0x0FEFF, otf="arab", description = "Arabic Presentation Forms-B" }, + ["arabicsupplement"] = { first = 0x00750, last = 0x0077F, otf="arab", description = "Arabic Supplement" }, + ["armenian"] = { first = 0x00530, last = 0x0058F, otf="armn", description = "Armenian" }, + ["arrows"] = { first = 0x02190, last = 0x021FF, description = "Arrows" }, + ["avestan"] = { first = 0x10B00, last = 0x10B3F, description = "Avestan" }, + ["balinese"] = { first = 0x01B00, last = 0x01B7F, otf="bali", description = "Balinese" }, + ["bamum"] = { first = 0x0A6A0, last = 0x0A6FF, description = "Bamum" }, + ["bamumsupplement"] = { first = 0x16800, last = 0x16A3F, description = "Bamum Supplement" }, + ["basiclatin"] = { first = 0x00000, last = 0x0007F, otf="latn", description = "Basic Latin" }, + ["batak"] = { first = 0x01BC0, last = 0x01BFF, description = "Batak" }, + ["bengali"] = { first = 0x00980, last = 0x009FF, otf="beng", description = "Bengali" }, + ["blockelements"] = { first = 0x02580, last = 0x0259F, otf="bopo", description = "Block Elements" }, + ["bopomofo"] = { first = 0x03100, last = 0x0312F, otf="bopo", description = "Bopomofo" }, + ["bopomofoextended"] = { first = 0x031A0, last = 0x031BF, otf="bopo", description = "Bopomofo Extended" }, + ["boxdrawing"] = { first = 0x02500, last = 0x0257F, description = "Box Drawing" }, + ["brahmi"] = { first = 0x11000, last = 0x1107F, description = "Brahmi" }, + ["braillepatterns"] = { first = 0x02800, last = 0x028FF, otf="brai", description = "Braille Patterns" }, + ["buginese"] = { first = 0x01A00, last = 0x01A1F, otf="bugi", description = "Buginese" }, + ["buhid"] = { first = 0x01740, last = 0x0175F, otf="buhd", description = "Buhid" }, + ["byzantinemusicalsymbols"] = { first = 0x1D000, last = 0x1D0FF, otf="byzm", description = "Byzantine Musical Symbols" }, + ["commonindicnumberforms"] = { first = 0x0A830, last = 0x0A83F, description = "Common Indic Number Forms" }, + ["carian"] = { first = 0x102A0, last = 0x102DF, description = "Carian" }, + ["cham"] = { first = 0x0AA00, last = 0x0AA5F, description = "Cham" }, + ["cherokee"] = { first = 0x013A0, last = 0x013FF, otf="cher", description = "Cherokee" }, + ["cjkcompatibility"] = { first = 0x03300, last = 0x033FF, otf="hang", description = "CJK Compatibility" }, + ["cjkcompatibilityforms"] = { first = 0x0FE30, last = 0x0FE4F, otf="hang", description = "CJK Compatibility Forms" }, + ["cjkcompatibilityideographs"] = { first = 0x0F900, last = 0x0FAFF, otf="hang", description = "CJK Compatibility Ideographs" }, + ["cjkcompatibilityideographssupplement"] = { first = 0x2F800, last = 0x2FA1F, otf="hang", description = "CJK Compatibility Ideographs Supplement" }, + ["cjkradicalssupplement"] = { first = 0x02E80, last = 0x02EFF, otf="hang", description = "CJK Radicals Supplement" }, + ["cjkstrokes"] = { first = 0x031C0, last = 0x031EF, otf="hang", description = "CJK Strokes" }, + ["cjksymbolsandpunctuation"] = { first = 0x03000, last = 0x0303F, otf="hang", description = "CJK Symbols and Punctuation" }, + ["cjkunifiedideographs"] = { first = 0x04E00, last = 0x09FFF, otf="hang", description = "CJK Unified Ideographs" }, + ["cjkunifiedideographsextensiona"] = { first = 0x03400, last = 0x04DBF, otf="hang", description = "CJK Unified Ideographs Extension A" }, + ["cjkunifiedideographsextensionb"] = { first = 0x20000, last = 0x2A6DF, otf="hang", description = "CJK Unified Ideographs Extension B" }, + ["combiningdiacriticalmarks"] = { first = 0x00300, last = 0x0036F, description = "Combining Diacritical Marks" }, + ["combiningdiacriticalmarksforsymbols"] = { first = 0x020D0, last = 0x020FF, description = "Combining Diacritical Marks for Symbols" }, + ["combiningdiacriticalmarkssupplement"] = { first = 0x01DC0, last = 0x01DFF, description = "Combining Diacritical Marks Supplement" }, + ["combininghalfmarks"] = { first = 0x0FE20, last = 0x0FE2F, description = "Combining Half Marks" }, + ["controlpictures"] = { first = 0x02400, last = 0x0243F, description = "Control Pictures" }, + ["coptic"] = { first = 0x02C80, last = 0x02CFF, otf="copt", description = "Coptic" }, + ["countingrodnumerals"] = { first = 0x1D360, last = 0x1D37F, description = "Counting Rod Numerals" }, + ["cuneiform"] = { first = 0x12000, last = 0x123FF, otf="xsux", description = "Cuneiform" }, + ["cuneiformnumbersandpunctuation"] = { first = 0x12400, last = 0x1247F, otf="xsux", description = "Cuneiform Numbers and Punctuation" }, + ["currencysymbols"] = { first = 0x020A0, last = 0x020CF, description = "Currency Symbols" }, + ["cypriotsyllabary"] = { first = 0x10800, last = 0x1083F, otf="cprt", description = "Cypriot Syllabary" }, + ["cyrillic"] = { first = 0x00400, last = 0x004FF, otf="cyrl", description = "Cyrillic" }, + ["cyrillicextendeda"] = { first = 0x02DE0, last = 0x02DFF, otf="cyrl", description = "Cyrillic Extended-A" }, + ["cyrillicextendedb"] = { first = 0x0A640, last = 0x0A69F, otf="cyrl", description = "Cyrillic Extended-B" }, + ["cyrillicsupplement"] = { first = 0x00500, last = 0x0052F, otf="cyrl", description = "Cyrillic Supplement" }, + ["deseret"] = { first = 0x10400, last = 0x1044F, otf="dsrt", description = "Deseret" }, + ["devanagari"] = { first = 0x00900, last = 0x0097F, otf="deva", description = "Devanagari" }, + ["devanagariextended"] = { first = 0x0A8E0, last = 0x0A8FF, description = "Devanagari Extended" }, + ["dingbats"] = { first = 0x02700, last = 0x027BF, description = "Dingbats" }, + ["dominotiles"] = { first = 0x1F030, last = 0x1F09F, description = "Domino Tiles" }, + ["egyptianhieroglyphs"] = { first = 0x13000, last = 0x1342F, description = "Egyptian Hieroglyphs" }, + ["emoticons"] = { first = 0x1F600, last = 0x1F64F, description = "Emoticons" }, + ["enclosedalphanumericsupplement"] = { first = 0x1F100, last = 0x1F1FF, description = "Enclosed Alphanumeric Supplement" }, + ["enclosedalphanumerics"] = { first = 0x02460, last = 0x024FF, description = "Enclosed Alphanumerics" }, + ["enclosedcjklettersandmonths"] = { first = 0x03200, last = 0x032FF, description = "Enclosed CJK Letters and Months" }, + ["enclosedideographicsupplement"] = { first = 0x1F200, last = 0x1F2FF, description = "Enclosed Ideographic Supplement" }, + ["ethiopic"] = { first = 0x01200, last = 0x0137F, otf="ethi", description = "Ethiopic" }, + ["ethiopicextended"] = { first = 0x02D80, last = 0x02DDF, otf="ethi", description = "Ethiopic Extended" }, + ["ethiopicextendeda"] = { first = 0x0AB00, last = 0x0AB2F, description = "Ethiopic Extended-A" }, + ["ethiopicsupplement"] = { first = 0x01380, last = 0x0139F, otf="ethi", description = "Ethiopic Supplement" }, + ["generalpunctuation"] = { first = 0x02000, last = 0x0206F, description = "General Punctuation" }, + ["geometricshapes"] = { first = 0x025A0, last = 0x025FF, description = "Geometric Shapes" }, + ["georgian"] = { first = 0x010A0, last = 0x010FF, otf="geor", description = "Georgian" }, + ["georgiansupplement"] = { first = 0x02D00, last = 0x02D2F, otf="geor", description = "Georgian Supplement" }, + ["glagolitic"] = { first = 0x02C00, last = 0x02C5F, otf="glag", description = "Glagolitic" }, + ["gothic"] = { first = 0x10330, last = 0x1034F, otf="goth", description = "Gothic" }, + ["greekandcoptic"] = { first = 0x00370, last = 0x003FF, otf="grek", description = "Greek and Coptic" }, + ["greekextended"] = { first = 0x01F00, last = 0x01FFF, otf="grek", description = "Greek Extended" }, + ["gujarati"] = { first = 0x00A80, last = 0x00AFF, otf="gujr", description = "Gujarati" }, + ["gurmukhi"] = { first = 0x00A00, last = 0x00A7F, otf="guru", description = "Gurmukhi" }, + ["halfwidthandfullwidthforms"] = { first = 0x0FF00, last = 0x0FFEF, description = "Halfwidth and Fullwidth Forms" }, + ["hangulcompatibilityjamo"] = { first = 0x03130, last = 0x0318F, otf="jamo", description = "Hangul Compatibility Jamo" }, + ["hanguljamo"] = { first = 0x01100, last = 0x011FF, otf="jamo", description = "Hangul Jamo" }, + ["hanguljamoextendeda"] = { first = 0x0A960, last = 0x0A97F, description = "Hangul Jamo Extended-A" }, + ["hanguljamoextendedb"] = { first = 0x0D7B0, last = 0x0D7FF, description = "Hangul Jamo Extended-B" }, + ["hangulsyllables"] = { first = 0x0AC00, last = 0x0D7AF, otf="hang", description = "Hangul Syllables" }, + ["hanunoo"] = { first = 0x01720, last = 0x0173F, otf="hano", description = "Hanunoo" }, + ["hebrew"] = { first = 0x00590, last = 0x005FF, otf="hebr", description = "Hebrew" }, + ["highprivateusesurrogates"] = { first = 0x0DB80, last = 0x0DBFF, description = "High Private Use Surrogates" }, + ["highsurrogates"] = { first = 0x0D800, last = 0x0DB7F, description = "High Surrogates" }, + ["hiragana"] = { first = 0x03040, last = 0x0309F, otf="kana", description = "Hiragana" }, + ["ideographicdescriptioncharacters"] = { first = 0x02FF0, last = 0x02FFF, description = "Ideographic Description Characters" }, + ["imperialaramaic"] = { first = 0x10840, last = 0x1085F, description = "Imperial Aramaic" }, + ["inscriptionalpahlavi"] = { first = 0x10B60, last = 0x10B7F, description = "Inscriptional Pahlavi" }, + ["inscriptionalparthian"] = { first = 0x10B40, last = 0x10B5F, description = "Inscriptional Parthian" }, + ["ipaextensions"] = { first = 0x00250, last = 0x002AF, description = "IPA Extensions" }, + ["javanese"] = { first = 0x0A980, last = 0x0A9DF, description = "Javanese" }, + ["kaithi"] = { first = 0x11080, last = 0x110CF, description = "Kaithi" }, + ["kanasupplement"] = { first = 0x1B000, last = 0x1B0FF, description = "Kana Supplement" }, + ["kanbun"] = { first = 0x03190, last = 0x0319F, description = "Kanbun" }, + ["kangxiradicals"] = { first = 0x02F00, last = 0x02FDF, description = "Kangxi Radicals" }, + ["kannada"] = { first = 0x00C80, last = 0x00CFF, otf="knda", description = "Kannada" }, + ["katakana"] = { first = 0x030A0, last = 0x030FF, otf="kana", description = "Katakana" }, + ["katakanaphoneticextensions"] = { first = 0x031F0, last = 0x031FF, otf="kana", description = "Katakana Phonetic Extensions" }, + ["kayahli"] = { first = 0x0A900, last = 0x0A92F, description = "Kayah Li" }, + ["kharoshthi"] = { first = 0x10A00, last = 0x10A5F, otf="khar", description = "Kharoshthi" }, + ["khmer"] = { first = 0x01780, last = 0x017FF, otf="khmr", description = "Khmer" }, + ["khmersymbols"] = { first = 0x019E0, last = 0x019FF, otf="khmr", description = "Khmer Symbols" }, + ["lao"] = { first = 0x00E80, last = 0x00EFF, otf="lao", description = "Lao" }, + ["latinextendeda"] = { first = 0x00100, last = 0x0017F, otf="latn", description = "Latin Extended-A" }, + ["latinextendedadditional"] = { first = 0x01E00, last = 0x01EFF, otf="latn", description = "Latin Extended Additional" }, + ["latinextendedb"] = { first = 0x00180, last = 0x0024F, otf="latn", description = "Latin Extended-B" }, + ["latinextendedc"] = { first = 0x02C60, last = 0x02C7F, otf="latn", description = "Latin Extended-C" }, + ["latinextendedd"] = { first = 0x0A720, last = 0x0A7FF, otf="latn", description = "Latin Extended-D" }, + ["latinsupplement"] = { first = 0x00080, last = 0x000FF, otf="latn", description = "Latin-1 Supplement" }, + ["lepcha"] = { first = 0x01C00, last = 0x01C4F, description = "Lepcha" }, + ["letterlikesymbols"] = { first = 0x02100, last = 0x0214F, description = "Letterlike Symbols" }, + ["limbu"] = { first = 0x01900, last = 0x0194F, otf="limb", description = "Limbu" }, + ["linearbideograms"] = { first = 0x10080, last = 0x100FF, otf="linb", description = "Linear B Ideograms" }, + ["linearbsyllabary"] = { first = 0x10000, last = 0x1007F, otf="linb", description = "Linear B Syllabary" }, + ["lisu"] = { first = 0x0A4D0, last = 0x0A4FF, description = "Lisu" }, + ["lowsurrogates"] = { first = 0x0DC00, last = 0x0DFFF, description = "Low Surrogates" }, + ["lycian"] = { first = 0x10280, last = 0x1029F, description = "Lycian" }, + ["lydian"] = { first = 0x10920, last = 0x1093F, description = "Lydian" }, + ["mahjongtiles"] = { first = 0x1F000, last = 0x1F02F, description = "Mahjong Tiles" }, + ["malayalam"] = { first = 0x00D00, last = 0x00D7F, otf="mlym", description = "Malayalam" }, + ["mandiac"] = { first = 0x00840, last = 0x0085F, otf="mand", description = "Mandaic" }, + ["mathematicalalphanumericsymbols"] = { first = 0x1D400, last = 0x1D7FF, description = "Mathematical Alphanumeric Symbols" }, + ["mathematicaloperators"] = { first = 0x02200, last = 0x022FF, description = "Mathematical Operators" }, + ["meeteimayek"] = { first = 0x0ABC0, last = 0x0ABFF, description = "Meetei Mayek" }, + ["meeteimayekextensions"] = { first = 0x0AAE0, last = 0x0AAFF, description = "Meetei Mayek Extensions" }, + ["meroiticcursive"] = { first = 0x109A0, last = 0x109FF, description = "Meroitic Cursive" }, + ["meroitichieroglyphs"] = { first = 0x10980, last = 0x1099F, description = "Meroitic Hieroglyphs" }, + ["miao"] = { first = 0x16F00, last = 0x16F9F, description = "Miao" }, + ["miscellaneousmathematicalsymbolsa"] = { first = 0x027C0, last = 0x027EF, description = "Miscellaneous Mathematical Symbols-A" }, + ["miscellaneousmathematicalsymbolsb"] = { first = 0x02980, last = 0x029FF, description = "Miscellaneous Mathematical Symbols-B" }, + ["miscellaneoussymbols"] = { first = 0x02600, last = 0x026FF, description = "Miscellaneous Symbols" }, + ["miscellaneoussymbolsandarrows"] = { first = 0x02B00, last = 0x02BFF, description = "Miscellaneous Symbols and Arrows" }, + ["miscellaneoussymbolsandpictographs"] = { first = 0x1F300, last = 0x1F5FF, description = "Miscellaneous Symbols And Pictographs" }, + ["miscellaneoustechnical"] = { first = 0x02300, last = 0x023FF, description = "Miscellaneous Technical" }, + ["modifiertoneletters"] = { first = 0x0A700, last = 0x0A71F, description = "Modifier Tone Letters" }, + ["mongolian"] = { first = 0x01800, last = 0x018AF, otf="mong", description = "Mongolian" }, + ["musicalsymbols"] = { first = 0x1D100, last = 0x1D1FF, otf="musc", description = "Musical Symbols" }, + ["myanmar"] = { first = 0x01000, last = 0x0109F, otf="mymr", description = "Myanmar" }, + ["myanmarextendeda"] = { first = 0x0AA60, last = 0x0AA7F, description = "Myanmar Extended-A" }, + ["newtailue"] = { first = 0x01980, last = 0x019DF, description = "New Tai Lue" }, + ["nko"] = { first = 0x007C0, last = 0x007FF, otf="nko", description = "NKo" }, + ["numberforms"] = { first = 0x02150, last = 0x0218F, description = "Number Forms" }, + ["ogham"] = { first = 0x01680, last = 0x0169F, otf="ogam", description = "Ogham" }, + ["olchiki"] = { first = 0x01C50, last = 0x01C7F, description = "Ol Chiki" }, + ["olditalic"] = { first = 0x10300, last = 0x1032F, otf="ital", description = "Old Italic" }, + ["oldpersian"] = { first = 0x103A0, last = 0x103DF, otf="xpeo", description = "Old Persian" }, + ["oldsoutharabian"] = { first = 0x10A60, last = 0x10A7F, description = "Old South Arabian" }, + ["odlturkic"] = { first = 0x10C00, last = 0x10C4F, description = "Old Turkic" }, + ["opticalcharacterrecognition"] = { first = 0x02440, last = 0x0245F, description = "Optical Character Recognition" }, + ["oriya"] = { first = 0x00B00, last = 0x00B7F, otf="orya", description = "Oriya" }, + ["osmanya"] = { first = 0x10480, last = 0x104AF, otf="osma", description = "Osmanya" }, + ["phagspa"] = { first = 0x0A840, last = 0x0A87F, otf="phag", description = "Phags-pa" }, + ["phaistosdisc"] = { first = 0x101D0, last = 0x101FF, description = "Phaistos Disc" }, + ["phoenician"] = { first = 0x10900, last = 0x1091F, otf="phnx", description = "Phoenician" }, + ["phoneticextensions"] = { first = 0x01D00, last = 0x01D7F, description = "Phonetic Extensions" }, + ["phoneticextensionssupplement"] = { first = 0x01D80, last = 0x01DBF, description = "Phonetic Extensions Supplement" }, + ["playingcards"] = { first = 0x1F0A0, last = 0x1F0FF, description = "Playing Cards" }, + ["privateusearea"] = { first = 0x0E000, last = 0x0F8FF, description = "Private Use Area" }, + ["rejang"] = { first = 0x0A930, last = 0x0A95F, description = "Rejang" }, + ["ruminumeralsymbols"] = { first = 0x10E60, last = 0x10E7F, description = "Rumi Numeral Symbols" }, + ["runic"] = { first = 0x016A0, last = 0x016FF, otf="runr", description = "Runic" }, + ["samaritan"] = { first = 0x00800, last = 0x0083F, description = "Samaritan" }, + ["saurashtra"] = { first = 0x0A880, last = 0x0A8DF, description = "Saurashtra" }, + ["sharada"] = { first = 0x11180, last = 0x111DF, description = "Sharada" }, + ["shavian"] = { first = 0x10450, last = 0x1047F, otf="shaw", description = "Shavian" }, + ["sinhala"] = { first = 0x00D80, last = 0x00DFF, otf="sinh", description = "Sinhala" }, + ["smallformvariants"] = { first = 0x0FE50, last = 0x0FE6F, description = "Small Form Variants" }, + ["sorasompeng"] = { first = 0x110D0, last = 0x110FF, description = "Sora Sompeng" }, + ["spacingmodifierletters"] = { first = 0x002B0, last = 0x002FF, description = "Spacing Modifier Letters" }, + ["specials"] = { first = 0x0FFF0, last = 0x0FFFF, description = "Specials" }, + ["sundanese"] = { first = 0x01B80, last = 0x01BBF, description = "Sundanese" }, + ["sundanesesupplement"] = { first = 0x01CC0, last = 0x01CCF, description = "Sundanese Supplement" }, + ["superscriptsandsubscripts"] = { first = 0x02070, last = 0x0209F, description = "Superscripts and Subscripts" }, + ["supplementalarrowsa"] = { first = 0x027F0, last = 0x027FF, description = "Supplemental Arrows-A" }, + ["supplementalarrowsb"] = { first = 0x02900, last = 0x0297F, description = "Supplemental Arrows-B" }, + ["supplementalmathematicaloperators"] = { first = 0x02A00, last = 0x02AFF, description = "Supplemental Mathematical Operators" }, + ["supplementalpunctuation"] = { first = 0x02E00, last = 0x02E7F, description = "Supplemental Punctuation" }, + ["supplementaryprivateuseareaa"] = { first = 0xF0000, last = 0xFFFFF, description = "Supplementary Private Use Area-A" }, + ["supplementaryprivateuseareab"] = { first = 0x100000,last = 0x10FFFF, description = "Supplementary Private Use Area-B" }, + ["sylotinagri"] = { first = 0x0A800, last = 0x0A82F, otf="sylo", description = "Syloti Nagri" }, + ["syriac"] = { first = 0x00700, last = 0x0074F, otf="syrc", description = "Syriac" }, + ["tagalog"] = { first = 0x01700, last = 0x0171F, otf="tglg", description = "Tagalog" }, + ["tagbanwa"] = { first = 0x01760, last = 0x0177F, otf="tagb", description = "Tagbanwa" }, + ["tags"] = { first = 0xE0000, last = 0xE007F, description = "Tags" }, + ["taile"] = { first = 0x01950, last = 0x0197F, otf="tale", description = "Tai Le" }, + ["taitham"] = { first = 0x01A20, last = 0x01AAF, description = "Tai Tham" }, + ["taiviet"] = { first = 0x0AA80, last = 0x0AADF, description = "Tai Viet" }, + ["taixuanjingsymbols"] = { first = 0x1D300, last = 0x1D35F, description = "Tai Xuan Jing Symbols" }, + ["takri"] = { first = 0x11680, last = 0x116CF, description = "Takri" }, + ["tamil"] = { first = 0x00B80, last = 0x00BFF, otf="taml", description = "Tamil" }, + ["telugu"] = { first = 0x00C00, last = 0x00C7F, otf="telu", description = "Telugu" }, + ["thaana"] = { first = 0x00780, last = 0x007BF, otf="thaa", description = "Thaana" }, + ["thai"] = { first = 0x00E00, last = 0x00E7F, otf="thai", description = "Thai" }, + ["tibetan"] = { first = 0x00F00, last = 0x00FFF, otf="tibt", description = "Tibetan" }, + ["tifinagh"] = { first = 0x02D30, last = 0x02D7F, otf="tfng", description = "Tifinagh" }, + ["transportandmapsymbols"] = { first = 0x1F680, last = 0x1F6FF, description = "Transport And Map Symbols" }, + ["ugaritic"] = { first = 0x10380, last = 0x1039F, otf="ugar", description = "Ugaritic" }, + ["unifiedcanadianaboriginalsyllabics"] = { first = 0x01400, last = 0x0167F, otf="cans", description = "Unified Canadian Aboriginal Syllabics" }, + ["unifiedcanadianaboriginalsyllabicsextended"] = { first = 0x018B0, last = 0x018FF, description = "Unified Canadian Aboriginal Syllabics Extended" }, + ["vai"] = { first = 0x0A500, last = 0x0A63F, description = "Vai" }, + ["variationselectors"] = { first = 0x0FE00, last = 0x0FE0F, description = "Variation Selectors" }, + ["variationselectorssupplement"] = { first = 0xE0100, last = 0xE01EF, description = "Variation Selectors Supplement" }, + ["vedicextensions"] = { first = 0x01CD0, last = 0x01CFF, description = "Vedic Extensions" }, + ["verticalforms"] = { first = 0x0FE10, last = 0x0FE1F, description = "Vertical Forms" }, + ["yijinghexagramsymbols"] = { first = 0x04DC0, last = 0x04DFF, otf="yi", description = "Yijing Hexagram Symbols" }, + ["yiradicals"] = { first = 0x0A490, last = 0x0A4CF, otf="yi", description = "Yi Radicals" }, + ["yisyllables"] = { first = 0x0A000, last = 0x0A48F, otf="yi", description = "Yi Syllables" }, +} + +characters.blocks = blocks + +function characters.blockrange(name) + local b = blocks[name] + if b then + return b.first, b.last + else + return 0, 0 + end +end + +setmetatableindex(blocks, function(t,k) -- we could use an intermediate table if called often + return k and rawget(t,lower(gsub(k,"[^a-zA-Z]",""))) +end) + +local otfscripts = utilities.storage.allocate() +characters.otfscripts = otfscripts + +setmetatableindex(otfscripts,function(t,unicode) + for k, v in next, blocks do + local first, last = v.first, v.last + if unicode >= first and unicode <= last then + local script = v.otf or "dflt" + for u=first,last do + t[u] = script + end + return script + end + end + -- pretty slow when we're here + t[unicode] = "dflt" + return "dflt" +end) + +function characters.getrange(name) -- used in font fallback definitions (name or range) + local range = blocks[name] + if range then + return range.first, range.last, range.description + end + name = gsub(name,'"',"0x") -- goodie: tex hex notation + local start, stop = match(name,"^(.-)[%-%:](.-)$") + if start and stop then + start, stop = tonumber(start,16) or tonumber(start), tonumber(stop,16) or tonumber(stop) + if start and stop then + return start, stop, nil + end + end + local slot = tonumber(name,16) or tonumber(name) + return slot, slot, nil +end + +local categorytags = allocate { + lu = "Letter Uppercase", + ll = "Letter Lowercase", + lt = "Letter Titlecase", + lm = "Letter Modifier", + lo = "Letter Other", + mn = "Mark Nonspacing", + mc = "Mark Spacing Combining", + me = "Mark Enclosing", + nd = "Number Decimal Digit", + nl = "Number Letter", + no = "Number Other", + pc = "Punctuation Connector", + pd = "Punctuation Dash", + ps = "Punctuation Open", + pe = "Punctuation Close", + pi = "Punctuation Initial Quote", + pf = "Punctuation Final Quote", + po = "Punctuation Other", + sm = "Symbol Math", + sc = "Symbol Currency", + sk = "Symbol Modifier", + so = "Symbol Other", + zs = "Separator Space", + zl = "Separator Line", + zp = "Separator Paragraph", + cc = "Other Control", + cf = "Other Format", + cs = "Other Surrogate", + co = "Other Private Use", + cn = "Other Not Assigned", +} + +characters.categorytags = categorytags + +--~ special : cf (softhyphen) zs (emspace) +--~ characters: ll lm lo lt lu mn nl no pc pd pe pf pi po ps sc sk sm so + +local is_character = allocate ( tohash { + "lu","ll","lt","lm","lo", + "nd","nl","no", + "mn", + "nl","no", + "pc","pd","ps","pe","pi","pf","po", + "sm","sc","sk","so" +} ) + +local is_letter = allocate ( tohash { + "ll","lm","lo","lt","lu" +} ) + +local is_command = allocate ( tohash { + "cf","zs" +} ) + +local is_spacing = allocate ( tohash { + "zs", "zl","zp", +} ) + +local is_mark = allocate ( tohash { + "mn", "ms", +} ) + +-- to be redone: store checked characters + +characters.is_character = is_character +characters.is_letter = is_letter +characters.is_command = is_command +characters.is_spacing = is_spacing +characters.is_mark = is_mark + +local mt = { -- yes or no ? + __index = function(t,k) + if type(k) == "number" then + local c = data[k].category + return c and rawget(t,c) + else + -- avoid auto conversion in data.characters lookups + end + end +} + +setmetatableindex(characters.is_character, mt) +setmetatableindex(characters.is_letter, mt) +setmetatableindex(characters.is_command, mt) +setmetatableindex(characters.is_spacing, mt) + +-- linebreak: todo: hash +-- +-- normative : BK CR LF CM SG GL CB SP ZW NL WJ JL JV JT H2 H3 +-- informative : XX OP CL QU NS EX SY IS PR PO NU AL ID IN HY BB BA SA AI B2 new:CP + +-- east asian width: +-- +-- N A H W F Na + +characters.bidi = allocate { + l = "Left-to-Right", + lre = "Left-to-Right Embedding", + lro = "Left-to-Right Override", + r = "Right-to-Left", + al = "Right-to-Left Arabic", + rle = "Right-to-Left Embedding", + rlo = "Right-to-Left Override", + pdf = "Pop Directional Format", + en = "European Number", + es = "European Number Separator", + et = "European Number Terminator", + an = "Arabic Number", + cs = "Common Number Separator", + nsm = "Non-Spacing Mark", + bn = "Boundary Neutral", + b = "Paragraph Separator", + s = "Segment Separator", + ws = "Whitespace", + on = "Other Neutrals", +} + +--[[ldx-- +

At this point we assume that the big data table is loaded. From this +table we derive a few more.

+--ldx]]-- + +if not characters.fallbacks then + + characters.fallbacks = { } -- not than many + + local fallbacks = characters.fallbacks + + for k, d in next, data do + local specials = d.specials + if specials and specials[1] == "compat" and specials[2] == 0x0020 then + local s = specials[3] + if s then + fallbacks[k] = s + fallbacks[s] = k + end + end + end + +end + +if storage then + storage.register("characters/fallbacks", characters.fallbacks, "characters.fallbacks") -- accents and such +end + +characters.directions = { } + +setmetatableindex(characters.directions,function(t,k) + local d = data[k] + if d then + local v = d.direction + if v then + t[k] = v + return v + end + end + t[k] = false -- maybe 'l' + return v +end) + +--[[ldx-- +

Next comes a whole series of helper methods. These are (will be) part +of the official .

+--ldx]]-- + +-- we could make them virtual: characters.contextnames[n] + +function characters.contextname(n) return data[n].contextname or "" end +function characters.adobename (n) return data[n].adobename or "" end +function characters.description(n) return data[n].description or "" end +-------- characters.category (n) return data[n].category or "" end + +function characters.category(n,verbose) + local c = data[n].category + if not c then + return "" + elseif verbose then + return categorytags[c] + else + return c + end +end + +-- -- some day we will make a table .. not that many calls to utfchar +-- +-- local utfchar = utf.char +-- local utfbyte = utf.byte +-- local utfbytes = { } +-- local utfchars = { } +-- +-- table.setmetatableindex(utfbytes,function(t,k) local v = utfchar(k) t[k] = v return v end) +-- table.setmetatableindex(utfchars,function(t,k) local v = utfbyte(k) t[k] = v return v end) + +local function toutfstring(s) + if type(s) == "table" then + return utfchar(unpack(s)) -- concat { utfchar( unpack(s) ) } + else + return utfchar(s) + end +end + +utf.tostring = toutfstring + +local categories = allocate() characters.categories = categories -- lazy table + +setmetatableindex(categories, function(t,u) if u then local c = data[u] c = c and c.category or u t[u] = c return c end end) + +local lccodes = allocate() characters.lccodes = lccodes -- lazy table +local uccodes = allocate() characters.uccodes = uccodes -- lazy table +local shcodes = allocate() characters.shcodes = shcodes -- lazy table +local fscodes = allocate() characters.fscodes = fscodes -- lazy table + +setmetatableindex(lccodes, function(t,u) if u then local c = data[u] c = c and c.lccode or (type(u) == "string" and utfbyte(u)) or u t[u] = c return c end end) +setmetatableindex(uccodes, function(t,u) if u then local c = data[u] c = c and c.uccode or (type(u) == "string" and utfbyte(u)) or u t[u] = c return c end end) +setmetatableindex(shcodes, function(t,u) if u then local c = data[u] c = c and c.shcode or (type(u) == "string" and utfbyte(u)) or u t[u] = c return c end end) +setmetatableindex(fscodes, function(t,u) if u then local c = data[u] c = c and c.fscode or (type(u) == "string" and utfbyte(u)) or u t[u] = c return c end end) + +local lcchars = allocate() characters.lcchars = lcchars -- lazy table +local ucchars = allocate() characters.ucchars = ucchars -- lazy table +local shchars = allocate() characters.shchars = shchars -- lazy table +local fschars = allocate() characters.fschars = fschars -- lazy table + +setmetatableindex(lcchars, function(t,u) if u then local c = data[u] c = c and c.lccode c = c and toutfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end) +setmetatableindex(ucchars, function(t,u) if u then local c = data[u] c = c and c.uccode c = c and toutfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end) +setmetatableindex(shchars, function(t,u) if u then local c = data[u] c = c and c.shcode c = c and toutfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end) +setmetatableindex(fschars, function(t,u) if u then local c = data[u] c = c and c.fscode c = c and toutfstring(c) or (type(u) == "number" and utfchar(u)) or u t[u] = c return c end end) + +local decomposed = allocate() characters.decomposed = decomposed -- lazy table +local specials = allocate() characters.specials = specials -- lazy table + +setmetatableindex(decomposed, function(t,u) -- either a table or false + if u then + local c = data[u] + local s = c and c.decomposed or false -- could fall back to specials + t[u] = s + return s + end +end) + +setmetatableindex(specials, function(t,u) -- either a table or false + if u then + local c = data[u] + local s = c and c.specials or false + t[u] = s + return s + end +end) + +local specialchars = allocate() characters.specialchars = specialchars -- lazy table +local descriptions = allocate() characters.descriptions = descriptions -- lazy table + +setmetatableindex(specialchars, function(t,u) + if u then + local c = data[u] + local s = c and c.specials + if s then + local tt, ttn = { }, 0 + for i=2,#s do + local si = s[i] + local c = data[si] + if is_letter[c.category] then + ttn = ttn + 1 + tt[ttn] = utfchar(si) + end + end + c = concat(tt) + t[u] = c + return c + else + if type(u) == "number" then + u = utfchar(u) + end + t[u] = u + return u + end + end +end) + +setmetatableindex(descriptions, function(t,k) + -- 0.05 - 0.10 sec + for u, c in next, data do + local d = c.description + if d then + d = gsub(d," ","") + d = lower(d) + t[d] = u + end + end + local d = rawget(t,k) + if not d then + t[k] = k + end + return d +end) + +function characters.unicodechar(asked) + local n = tonumber(asked) + if n then + return n + elseif type(asked) == "string" then + return descriptions[asked] or descriptions[gsub(asked," ","")] + end +end + +-- function characters.lower(str) +-- local new, n = { }, 0 +-- for u in utfvalues(str) do +-- n = n + 1 +-- new[n] = lcchars[u] +-- end +-- return concat(new) +-- end +-- +-- function characters.upper(str) +-- local new, n = { }, 0 +-- for u in utfvalues(str) do +-- n = n + 1 +-- new[n] = ucchars[u] +-- end +-- return concat(new) +-- end +-- +-- function characters.shaped(str) +-- local new, n = { }, 0 +-- for u in utfvalues(str) do +-- n = n + 1 +-- new[n] = shchars[u] +-- end +-- return concat(new) +-- end + +----- tolower = Cs((utf8byte/lcchars)^0) +----- toupper = Cs((utf8byte/ucchars)^0) +----- toshape = Cs((utf8byte/shchars)^0) + +local tolower = Cs((utf8char/lcchars)^0) +local toupper = Cs((utf8char/ucchars)^0) +local toshape = Cs((utf8char/shchars)^0) + +patterns.tolower = tolower +patterns.toupper = toupper +patterns.toshape = toshape + +function characters.lower (str) return lpegmatch(tolower,str) end +function characters.upper (str) return lpegmatch(toupper,str) end +function characters.shaped(str) return lpegmatch(toshape,str) end + +function characters.lettered(str,spacing) + local new, n = { }, 0 + if spacing then + local done = false + for u in utfvalues(str) do + local c = data[u].category + if is_letter[c] then + if done and n > 1 then + n = n + 1 + new[n] = " " + done = false + end + n = n + 1 + new[n] = utfchar(u) + elseif spacing and is_spacing[c] then + done = true + end + end + else + for u in utfvalues(str) do + if is_letter[data[u].category] then + n = n + 1 + new[n] = utfchar(u) + end + end + end + return concat(new) +end + +--[[ldx-- +

Requesting lower and uppercase codes:

+--ldx]]-- + +function characters.uccode(n) return uccodes[n] end -- obsolete +function characters.lccode(n) return lccodes[n] end -- obsolete + +function characters.safechar(n) + local c = data[n] + if c and c.contextname then + return "\\" .. c.contextname + else + return utfchar(n) + end +end + +function characters.shape(n) + local shcode = shcodes[n] + if not shcode then + return n, nil + elseif type(shcode) == "table" then + return shcode[1], shcode[#shcode] + else + return shcode, nil + end +end + +-- -- some day we might go this route, but it does not really save that much +-- -- so not now (we can generate a lot using mtx-unicode that operates on the +-- -- database) +-- +-- -- category cjkwd direction linebreak +-- +-- -- adobename comment contextcommand contextname description fallback lccode +-- -- mathclass mathfiller mathname mathspec mathstretch mathsymbol mirror +-- -- range shcode specials uccode uccodes unicodeslot +-- +-- local data = { +-- ['one']={ +-- common = { +-- category="cc", +-- direction="bn", +-- linebreak="cm", +-- }, +-- vector = { +-- [0x0000] = { +-- description="NULL", +-- group='one', +-- unicodeslot=0x0000, +-- }, +-- { +-- description="START OF HEADING", +-- group='one', +-- unicodeslot=0x0001, +-- }, +-- } +-- } +-- } +-- +-- local chardata, groupdata = { }, { } +-- +-- for group, gdata in next, data do +-- local common, vector = { __index = gdata.common }, gdata.vector +-- for character, cdata in next, vector do +-- chardata[character] = cdata +-- setmetatable(cdata,common) +-- end +-- groupdata[group] = gdata +-- end + +--~ characters.data, characters.groups = chardata, groupdata + +--~ [0xF0000]={ +--~ category="co", +--~ cjkwd="a", +--~ description="", +--~ direction="l", +--~ unicodeslot=0xF0000, +--~ }, +--~ [0xFFFFD]={ +--~ category="co", +--~ cjkwd="a", +--~ description="", +--~ direction="l", +--~ unicodeslot=0xFFFFD, +--~ }, +--~ [0x100000]={ +--~ category="co", +--~ cjkwd="a", +--~ description="", +--~ direction="l", +--~ unicodeslot=0x100000, +--~ }, +--~ [0x10FFFD]={ +--~ category="co", +--~ cjkwd="a", +--~ description="", +--~ direction="l", +--~ unicodeslot=0x10FFFD, +--~ }, + +if not characters.superscripts then + + local superscripts = allocate() characters.superscripts = superscripts + local subscripts = allocate() characters.subscripts = subscripts + + -- skipping U+02120 (service mark) U+02122 (trademark) + + for k, v in next, data do + local specials = v.specials + if specials then + local what = specials[1] + if what == "super" then + if #specials == 2 then + superscripts[k] = specials[2] + else + report_defining("ignoring %s %a, char %c, description %a","superscript",ustring(k),k,v.description) + end + elseif what == "sub" then + if #specials == 2 then + subscripts[k] = specials[2] + else + report_defining("ignoring %s %a, char %c, description %a","subscript",ustring(k),k,v.description) + end + end + end + end + + -- print(table.serialize(superscripts, "superscripts", { hexify = true })) + -- print(table.serialize(subscripts, "subscripts", { hexify = true })) + + if storage then + storage.register("characters/superscripts", superscripts, "characters.superscripts") + storage.register("characters/subscripts", subscripts, "characters.subscripts") + end + +end + +-- for the moment only a few + +local tracedchars = utilities.strings.tracers + +tracedchars[0x00] = "[signal]" +tracedchars[0x20] = "[space]" + +-- the following code will move to char-tex.lua + +-- tex + +if not tex or not context or not commands then return characters end + +local tex = tex +local texsetlccode = tex.setlccode +local texsetuccode = tex.setuccode +local texsetsfcode = tex.setsfcode +local texsetcatcode = tex.setcatcode + +local contextsprint = context.sprint +local ctxcatcodes = catcodes.numbers.ctxcatcodes + +--[[ldx-- +

Instead of using a file to define the named glyphs, we +use the table. After all, we have this information available anyway.

+--ldx]]-- + +function commands.makeactive(n,name) -- + contextsprint(ctxcatcodes,format("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name)) + -- context("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name) +end + +function commands.utfchar(c,n) + if n then + -- contextsprint(c,charfromnumber(n)) + contextsprint(c,utfchar(n)) + else + -- contextsprint(charfromnumber(c)) + contextsprint(utfchar(c)) + end +end + +function commands.safechar(n) + local c = data[n] + if c and c.contextname then + contextsprint("\\" .. c.contextname) -- context[c.contextname]() + else + contextsprint(utfchar(n)) + end +end + +tex.uprint = commands.utfchar + +local forbidden = tohash { -- at least now + 0x00A0, + 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A, 0x200B, 0x200C, 0x200D, + 0x202F, + 0x205F, + -- 0xFEFF, +} + +function characters.define(tobelettered, tobeactivated) -- catcodetables + + if trace_defining then + report_defining("defining active character commands") + end + + local activated, a = { }, 0 + + for u, chr in next, data do -- these will be commands + local fallback = chr.fallback + if fallback then + contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\checkedchar{",u,"}{",fallback,"}}}") + a = a + 1 + activated[a] = u + else + local contextname = chr.contextname + if contextname then + local category = chr.category + if is_character[category] then + if chr.unicodeslot < 128 then + if is_letter[category] then + contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u))) -- has no s + else + contextsprint(ctxcatcodes,format("\\chardef\\%s=%s",contextname,u)) -- has no s + end + else + contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u))) -- has no s + end + elseif is_command[category] and not forbidden[u] then + contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}") + a = a + 1 + activated[a] = u + end + end + end + end + + if tobelettered then -- shared + local saved = tex.catcodetable + for i=1,#tobelettered do + tex.catcodetable = tobelettered[i] + if trace_defining then + report_defining("defining letters (global, shared)") + end + for u, chr in next, data do + if not chr.fallback and is_letter[chr.category] and u >= 128 and u <= 65536 then + texsetcatcode(u,11) + end + local range = chr.range + if range then + for i=1,range.first,range.last do + texsetcatcode(i,11) + end + end + end + texsetcatcode(0x200C,11) -- non-joiner + texsetcatcode(0x200D,11) -- joiner + end + tex.catcodetable = saved + end + + local nofactivated = #tobeactivated + if tobeactivated and nofactivated > 0 then + for i=1,nofactivated do + local u = activated[i] + if u then + report_defining("character %U is active in set %a, containing %a",u,data[u].description,tobeactivated) + end + end + local saved = tex.catcodetable + for i=1,#tobeactivated do + local vector = tobeactivated[i] + if trace_defining then + report_defining("defining %a active characters in vector %a",nofactivated,vector) + end + tex.catcodetable = vector + for i=1,nofactivated do + local u = activated[i] + if u then + texsetcatcode(u,13) + end + end + end + tex.catcodetable = saved + end + +end + +--[[ldx-- +

Setting the lccodes is also done in a loop over the data table.

+--ldx]]-- + +local sfmode = "unset" -- unset, traditional, normal + +function characters.setcodes() + if trace_defining then + report_defining("defining lc and uc codes") + end + local traditional = sfstate == "traditional" or sfstate == "unset" + for code, chr in next, data do + local cc = chr.category + if is_letter[cc] then + local range = chr.range + if range then + for i=range.first,range.last do + texsetcatcode(i,11) -- letter + texsetlccode(i,i,i) -- self self + end + else + local lc, uc = chr.lccode, chr.uccode + if not lc then + chr.lccode, lc = code, code + elseif type(lc) == "table" then + lc = code + end + if not uc then + chr.uccode, uc = code, code + elseif type(uc) == "table" then + uc = code + end + texsetcatcode(code,11) -- letter + texsetlccode(code,lc,uc) + if traditional and cc == "lu" then + texsetsfcode(code,999) + end + end + elseif is_mark[cc] then + texsetlccode(code,code,code) -- for hyphenation + end + end + if traditional then + sfstate = "traditional" + end +end + +-- If this is something that is not documentwide and used a lot, then we +-- need a more clever approach (trivial but not now). + +local function setuppersfcodes(v,n) + if sfstate ~= "unset" then + report_defining("setting uppercase sf codes to %a",n) + for code, chr in next, data do + if chr.category == "lu" then + texsetsfcode(code,n) + end + end + end + sfstate = v +end + +directives.register("characters.spaceafteruppercase",function(v) + if v == "traditional" then + setuppersfcodes(v,999) + elseif v == "normal" then + setuppersfcodes(v,1000) + end +end) + +-- xml + +characters.activeoffset = 0x10000 -- there will be remapped in that byte range + +function commands.remapentity(chr,slot) + contextsprint(format("{\\catcode%s=13\\xdef%s{\\string%s}}",slot,utfchar(slot),chr)) +end + +-- xml.entities = xml.entities or { } +-- +-- storage.register("xml/entities",xml.entities,"xml.entities") -- this will move to lxml +-- +-- function characters.setmkiventities() +-- local entities = xml.entities +-- entities.lt = "<" +-- entities.amp = "&" +-- entities.gt = ">" +-- end +-- +-- function characters.setmkiientities() +-- local entities = xml.entities +-- entities.lt = utfchar(characters.activeoffset + utfbyte("<")) +-- entities.amp = utfchar(characters.activeoffset + utfbyte("&")) +-- entities.gt = utfchar(characters.activeoffset + utfbyte(">")) +-- end + diff --git a/tex/context/base/char-map.lua b/tex/context/base/char-map.lua index 749da5289..e0e275169 100644 --- a/tex/context/base/char-map.lua +++ b/tex/context/base/char-map.lua @@ -1,1072 +1,1072 @@ -if not modules then modules = { } end modules ['char-map'] = { - version = 1.001, - comment = "companion to char-ini.mkiv", - author = "Hans Hagen & Arthur Reutenauer", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", - dataonly = true, -} - --- not yet used - -characters = characters or { } - -characters.casemap={ - [0x0049]={ - ["az"]={ - ["not_before_dot"]={ - ["lower"]={ 0x0131 }, - ["title"]={ 0x0049 }, - ["upper"]={ 0x0049 }, - }, - }, - ["lt"]={ - ["more_above"]={ - ["lower"]={ 0x0069, 0x0307 }, - ["title"]={ 0x0049 }, - ["upper"]={ 0x0049 }, - }, - }, - ["tr"]={ - ["not_before_dot"]={ - ["lower"]={ 0x0131 }, - ["title"]={ 0x0049 }, - ["upper"]={ 0x0049 }, - }, - }, - }, - [0x004A]={ - ["lt"]={ - ["more_above"]={ - ["lower"]={ 0x006A, 0x0307 }, - ["title"]={ 0x004A }, - ["upper"]={ 0x004A }, - }, - }, - }, - [0x0069]={ - ["az"]={ - ["all"]={ - ["lower"]={ 0x0069 }, - ["title"]={ 0x0130 }, - ["upper"]={ 0x0130 }, - }, - }, - ["tr"]={ - ["all"]={ - ["lower"]={ 0x0069 }, - ["title"]={ 0x0130 }, - ["upper"]={ 0x0130 }, - }, - }, - }, - [0x00CC]={ - ["lt"]={ - ["all"]={ - ["lower"]={ 0x0069, 0x0307, 0x0300 }, - ["title"]={ 0x00CC }, - ["upper"]={ 0x00CC }, - }, - }, - }, - [0x00CD]={ - ["lt"]={ - ["all"]={ - ["lower"]={ 0x0069, 0x0307, 0x0301 }, - ["title"]={ 0x00CD }, - ["upper"]={ 0x00CD }, - }, - }, - }, - [0x00DF]={ - [""]={ - ["all"]={ - ["lower"]={ 0x00DF }, - ["title"]={ 0x0053, 0x0073 }, - ["upper"]={ 0x0053, 0x0053 }, - }, - }, - }, - [0x0128]={ - ["lt"]={ - ["all"]={ - ["lower"]={ 0x0069, 0x0307, 0x0303 }, - ["title"]={ 0x0128 }, - ["upper"]={ 0x0128 }, - }, - }, - }, - [0x012E]={ - ["lt"]={ - ["more_above"]={ - ["lower"]={ 0x012F, 0x0307 }, - ["title"]={ 0x012E }, - ["upper"]={ 0x012E }, - }, - }, - }, - [0x0130]={ - [""]={ - ["all"]={ - ["lower"]={ 0x0069, 0x0307 }, - ["title"]={ 0x0130 }, - ["upper"]={ 0x0130 }, - }, - }, - ["az"]={ - ["all"]={ - ["lower"]={ 0x0069 }, - ["title"]={ 0x0130 }, - ["upper"]={ 0x0130 }, - }, - }, - ["tr"]={ - ["all"]={ - ["lower"]={ 0x0069 }, - ["title"]={ 0x0130 }, - ["upper"]={ 0x0130 }, - }, - }, - }, - [0x0149]={ - [""]={ - ["all"]={ - ["lower"]={ 0x0149 }, - ["title"]={ 0x02BC, 0x004E }, - ["upper"]={ 0x02BC, 0x004E }, - }, - }, - }, - [0x01F0]={ - [""]={ - ["all"]={ - ["lower"]={ 0x01F0 }, - ["title"]={ 0x004A, 0x030C }, - ["upper"]={ 0x004A, 0x030C }, - }, - }, - }, - [0x0307]={ - ["az"]={ - ["after_i"]={ - ["lower"]={}, - ["title"]={ 0x0307 }, - ["upper"]={ 0x0307 }, - }, - }, - ["lt"]={ - ["after_soft_dotted"]={ - ["lower"]={ 0x0307 }, - ["title"]={}, - ["upper"]={}, - }, - }, - ["tr"]={ - ["after_i"]={ - ["lower"]={}, - ["title"]={ 0x0307 }, - ["upper"]={ 0x0307 }, - }, - }, - }, - [0x0390]={ - [""]={ - ["all"]={ - ["lower"]={ 0x0390 }, - ["title"]={ 0x0399, 0x0308, 0x0301 }, - ["upper"]={ 0x0399, 0x0308, 0x0301 }, - }, - }, - }, - [0x03A3]={ - ["final_sigma"]={ - ["all"]={ - ["lower"]={ 0x03C2 }, - ["title"]={ 0x03A3 }, - ["upper"]={ 0x03A3 }, - }, - }, - }, - [0x03B0]={ - [""]={ - ["all"]={ - ["lower"]={ 0x03B0 }, - ["title"]={ 0x03A5, 0x0308, 0x0301 }, - ["upper"]={ 0x03A5, 0x0308, 0x0301 }, - }, - }, - }, - [0x0587]={ - [""]={ - ["all"]={ - ["lower"]={ 0x0587 }, - ["title"]={ 0x0535, 0x0582 }, - ["upper"]={ 0x0535, 0x0552 }, - }, - }, - }, - [0x1E96]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1E96 }, - ["title"]={ 0x0048, 0x0331 }, - ["upper"]={ 0x0048, 0x0331 }, - }, - }, - }, - [0x1E97]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1E97 }, - ["title"]={ 0x0054, 0x0308 }, - ["upper"]={ 0x0054, 0x0308 }, - }, - }, - }, - [0x1E98]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1E98 }, - ["title"]={ 0x0057, 0x030A }, - ["upper"]={ 0x0057, 0x030A }, - }, - }, - }, - [0x1E99]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1E99 }, - ["title"]={ 0x0059, 0x030A }, - ["upper"]={ 0x0059, 0x030A }, - }, - }, - }, - [0x1E9A]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1E9A }, - ["title"]={ 0x0041, 0x02BE }, - ["upper"]={ 0x0041, 0x02BE }, - }, - }, - }, - [0x1F50]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F50 }, - ["title"]={ 0x03A5, 0x0313 }, - ["upper"]={ 0x03A5, 0x0313 }, - }, - }, - }, - [0x1F52]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F52 }, - ["title"]={ 0x03A5, 0x0313, 0x0300 }, - ["upper"]={ 0x03A5, 0x0313, 0x0300 }, - }, - }, - }, - [0x1F54]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F54 }, - ["title"]={ 0x03A5, 0x0313, 0x0301 }, - ["upper"]={ 0x03A5, 0x0313, 0x0301 }, - }, - }, - }, - [0x1F56]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F56 }, - ["title"]={ 0x03A5, 0x0313, 0x0342 }, - ["upper"]={ 0x03A5, 0x0313, 0x0342 }, - }, - }, - }, - [0x1F80]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F80 }, - ["title"]={ 0x1F88 }, - ["upper"]={ 0x1F08, 0x0399 }, - }, - }, - }, - [0x1F81]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F81 }, - ["title"]={ 0x1F89 }, - ["upper"]={ 0x1F09, 0x0399 }, - }, - }, - }, - [0x1F82]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F82 }, - ["title"]={ 0x1F8A }, - ["upper"]={ 0x1F0A, 0x0399 }, - }, - }, - }, - [0x1F83]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F83 }, - ["title"]={ 0x1F8B }, - ["upper"]={ 0x1F0B, 0x0399 }, - }, - }, - }, - [0x1F84]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F84 }, - ["title"]={ 0x1F8C }, - ["upper"]={ 0x1F0C, 0x0399 }, - }, - }, - }, - [0x1F85]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F85 }, - ["title"]={ 0x1F8D }, - ["upper"]={ 0x1F0D, 0x0399 }, - }, - }, - }, - [0x1F86]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F86 }, - ["title"]={ 0x1F8E }, - ["upper"]={ 0x1F0E, 0x0399 }, - }, - }, - }, - [0x1F87]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F87 }, - ["title"]={ 0x1F8F }, - ["upper"]={ 0x1F0F, 0x0399 }, - }, - }, - }, - [0x1F88]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F80 }, - ["title"]={ 0x1F88 }, - ["upper"]={ 0x1F08, 0x0399 }, - }, - }, - }, - [0x1F89]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F81 }, - ["title"]={ 0x1F89 }, - ["upper"]={ 0x1F09, 0x0399 }, - }, - }, - }, - [0x1F8A]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F82 }, - ["title"]={ 0x1F8A }, - ["upper"]={ 0x1F0A, 0x0399 }, - }, - }, - }, - [0x1F8B]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F83 }, - ["title"]={ 0x1F8B }, - ["upper"]={ 0x1F0B, 0x0399 }, - }, - }, - }, - [0x1F8C]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F84 }, - ["title"]={ 0x1F8C }, - ["upper"]={ 0x1F0C, 0x0399 }, - }, - }, - }, - [0x1F8D]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F85 }, - ["title"]={ 0x1F8D }, - ["upper"]={ 0x1F0D, 0x0399 }, - }, - }, - }, - [0x1F8E]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F86 }, - ["title"]={ 0x1F8E }, - ["upper"]={ 0x1F0E, 0x0399 }, - }, - }, - }, - [0x1F8F]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F87 }, - ["title"]={ 0x1F8F }, - ["upper"]={ 0x1F0F, 0x0399 }, - }, - }, - }, - [0x1F90]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F90 }, - ["title"]={ 0x1F98 }, - ["upper"]={ 0x1F28, 0x0399 }, - }, - }, - }, - [0x1F91]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F91 }, - ["title"]={ 0x1F99 }, - ["upper"]={ 0x1F29, 0x0399 }, - }, - }, - }, - [0x1F92]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F92 }, - ["title"]={ 0x1F9A }, - ["upper"]={ 0x1F2A, 0x0399 }, - }, - }, - }, - [0x1F93]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F93 }, - ["title"]={ 0x1F9B }, - ["upper"]={ 0x1F2B, 0x0399 }, - }, - }, - }, - [0x1F94]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F94 }, - ["title"]={ 0x1F9C }, - ["upper"]={ 0x1F2C, 0x0399 }, - }, - }, - }, - [0x1F95]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F95 }, - ["title"]={ 0x1F9D }, - ["upper"]={ 0x1F2D, 0x0399 }, - }, - }, - }, - [0x1F96]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F96 }, - ["title"]={ 0x1F9E }, - ["upper"]={ 0x1F2E, 0x0399 }, - }, - }, - }, - [0x1F97]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F97 }, - ["title"]={ 0x1F9F }, - ["upper"]={ 0x1F2F, 0x0399 }, - }, - }, - }, - [0x1F98]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F90 }, - ["title"]={ 0x1F98 }, - ["upper"]={ 0x1F28, 0x0399 }, - }, - }, - }, - [0x1F99]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F91 }, - ["title"]={ 0x1F99 }, - ["upper"]={ 0x1F29, 0x0399 }, - }, - }, - }, - [0x1F9A]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F92 }, - ["title"]={ 0x1F9A }, - ["upper"]={ 0x1F2A, 0x0399 }, - }, - }, - }, - [0x1F9B]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F93 }, - ["title"]={ 0x1F9B }, - ["upper"]={ 0x1F2B, 0x0399 }, - }, - }, - }, - [0x1F9C]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F94 }, - ["title"]={ 0x1F9C }, - ["upper"]={ 0x1F2C, 0x0399 }, - }, - }, - }, - [0x1F9D]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F95 }, - ["title"]={ 0x1F9D }, - ["upper"]={ 0x1F2D, 0x0399 }, - }, - }, - }, - [0x1F9E]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F96 }, - ["title"]={ 0x1F9E }, - ["upper"]={ 0x1F2E, 0x0399 }, - }, - }, - }, - [0x1F9F]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1F97 }, - ["title"]={ 0x1F9F }, - ["upper"]={ 0x1F2F, 0x0399 }, - }, - }, - }, - [0x1FA0]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA0 }, - ["title"]={ 0x1FA8 }, - ["upper"]={ 0x1F68, 0x0399 }, - }, - }, - }, - [0x1FA1]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA1 }, - ["title"]={ 0x1FA9 }, - ["upper"]={ 0x1F69, 0x0399 }, - }, - }, - }, - [0x1FA2]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA2 }, - ["title"]={ 0x1FAA }, - ["upper"]={ 0x1F6A, 0x0399 }, - }, - }, - }, - [0x1FA3]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA3 }, - ["title"]={ 0x1FAB }, - ["upper"]={ 0x1F6B, 0x0399 }, - }, - }, - }, - [0x1FA4]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA4 }, - ["title"]={ 0x1FAC }, - ["upper"]={ 0x1F6C, 0x0399 }, - }, - }, - }, - [0x1FA5]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA5 }, - ["title"]={ 0x1FAD }, - ["upper"]={ 0x1F6D, 0x0399 }, - }, - }, - }, - [0x1FA6]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA6 }, - ["title"]={ 0x1FAE }, - ["upper"]={ 0x1F6E, 0x0399 }, - }, - }, - }, - [0x1FA7]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA7 }, - ["title"]={ 0x1FAF }, - ["upper"]={ 0x1F6F, 0x0399 }, - }, - }, - }, - [0x1FA8]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA0 }, - ["title"]={ 0x1FA8 }, - ["upper"]={ 0x1F68, 0x0399 }, - }, - }, - }, - [0x1FA9]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA1 }, - ["title"]={ 0x1FA9 }, - ["upper"]={ 0x1F69, 0x0399 }, - }, - }, - }, - [0x1FAA]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA2 }, - ["title"]={ 0x1FAA }, - ["upper"]={ 0x1F6A, 0x0399 }, - }, - }, - }, - [0x1FAB]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA3 }, - ["title"]={ 0x1FAB }, - ["upper"]={ 0x1F6B, 0x0399 }, - }, - }, - }, - [0x1FAC]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA4 }, - ["title"]={ 0x1FAC }, - ["upper"]={ 0x1F6C, 0x0399 }, - }, - }, - }, - [0x1FAD]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA5 }, - ["title"]={ 0x1FAD }, - ["upper"]={ 0x1F6D, 0x0399 }, - }, - }, - }, - [0x1FAE]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA6 }, - ["title"]={ 0x1FAE }, - ["upper"]={ 0x1F6E, 0x0399 }, - }, - }, - }, - [0x1FAF]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FA7 }, - ["title"]={ 0x1FAF }, - ["upper"]={ 0x1F6F, 0x0399 }, - }, - }, - }, - [0x1FB2]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FB2 }, - ["title"]={ 0x1FBA, 0x0345 }, - ["upper"]={ 0x1FBA, 0x0399 }, - }, - }, - }, - [0x1FB3]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FB3 }, - ["title"]={ 0x1FBC }, - ["upper"]={ 0x0391, 0x0399 }, - }, - }, - }, - [0x1FB4]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FB4 }, - ["title"]={ 0x0386, 0x0345 }, - ["upper"]={ 0x0386, 0x0399 }, - }, - }, - }, - [0x1FB6]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FB6 }, - ["title"]={ 0x0391, 0x0342 }, - ["upper"]={ 0x0391, 0x0342 }, - }, - }, - }, - [0x1FB7]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FB7 }, - ["title"]={ 0x0391, 0x0342, 0x0345 }, - ["upper"]={ 0x0391, 0x0342, 0x0399 }, - }, - }, - }, - [0x1FBC]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FB3 }, - ["title"]={ 0x1FBC }, - ["upper"]={ 0x0391, 0x0399 }, - }, - }, - }, - [0x1FC2]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FC2 }, - ["title"]={ 0x1FCA, 0x0345 }, - ["upper"]={ 0x1FCA, 0x0399 }, - }, - }, - }, - [0x1FC3]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FC3 }, - ["title"]={ 0x1FCC }, - ["upper"]={ 0x0397, 0x0399 }, - }, - }, - }, - [0x1FC4]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FC4 }, - ["title"]={ 0x0389, 0x0345 }, - ["upper"]={ 0x0389, 0x0399 }, - }, - }, - }, - [0x1FC6]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FC6 }, - ["title"]={ 0x0397, 0x0342 }, - ["upper"]={ 0x0397, 0x0342 }, - }, - }, - }, - [0x1FC7]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FC7 }, - ["title"]={ 0x0397, 0x0342, 0x0345 }, - ["upper"]={ 0x0397, 0x0342, 0x0399 }, - }, - }, - }, - [0x1FCC]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FC3 }, - ["title"]={ 0x1FCC }, - ["upper"]={ 0x0397, 0x0399 }, - }, - }, - }, - [0x1FD2]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FD2 }, - ["title"]={ 0x0399, 0x0308, 0x0300 }, - ["upper"]={ 0x0399, 0x0308, 0x0300 }, - }, - }, - }, - [0x1FD3]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FD3 }, - ["title"]={ 0x0399, 0x0308, 0x0301 }, - ["upper"]={ 0x0399, 0x0308, 0x0301 }, - }, - }, - }, - [0x1FD6]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FD6 }, - ["title"]={ 0x0399, 0x0342 }, - ["upper"]={ 0x0399, 0x0342 }, - }, - }, - }, - [0x1FD7]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FD7 }, - ["title"]={ 0x0399, 0x0308, 0x0342 }, - ["upper"]={ 0x0399, 0x0308, 0x0342 }, - }, - }, - }, - [0x1FE2]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FE2 }, - ["title"]={ 0x03A5, 0x0308, 0x0300 }, - ["upper"]={ 0x03A5, 0x0308, 0x0300 }, - }, - }, - }, - [0x1FE3]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FE3 }, - ["title"]={ 0x03A5, 0x0308, 0x0301 }, - ["upper"]={ 0x03A5, 0x0308, 0x0301 }, - }, - }, - }, - [0x1FE4]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FE4 }, - ["title"]={ 0x03A1, 0x0313 }, - ["upper"]={ 0x03A1, 0x0313 }, - }, - }, - }, - [0x1FE6]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FE6 }, - ["title"]={ 0x03A5, 0x0342 }, - ["upper"]={ 0x03A5, 0x0342 }, - }, - }, - }, - [0x1FE7]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FE7 }, - ["title"]={ 0x03A5, 0x0308, 0x0342 }, - ["upper"]={ 0x03A5, 0x0308, 0x0342 }, - }, - }, - }, - [0x1FF2]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FF2 }, - ["title"]={ 0x1FFA, 0x0345 }, - ["upper"]={ 0x1FFA, 0x0399 }, - }, - }, - }, - [0x1FF3]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FF3 }, - ["title"]={ 0x1FFC }, - ["upper"]={ 0x03A9, 0x0399 }, - }, - }, - }, - [0x1FF4]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FF4 }, - ["title"]={ 0x038F, 0x0345 }, - ["upper"]={ 0x038F, 0x0399 }, - }, - }, - }, - [0x1FF6]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FF6 }, - ["title"]={ 0x03A9, 0x0342 }, - ["upper"]={ 0x03A9, 0x0342 }, - }, - }, - }, - [0x1FF7]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FF7 }, - ["title"]={ 0x03A9, 0x0342, 0x0345 }, - ["upper"]={ 0x03A9, 0x0342, 0x0399 }, - }, - }, - }, - [0x1FFC]={ - [""]={ - ["all"]={ - ["lower"]={ 0x1FF3 }, - ["title"]={ 0x1FFC }, - ["upper"]={ 0x03A9, 0x0399 }, - }, - }, - }, - [0xFB00]={ - [""]={ - ["all"]={ - ["lower"]={ 0xFB00 }, - ["title"]={ 0x0046, 0x0066 }, - ["upper"]={ 0x0046, 0x0046 }, - }, - }, - }, - [0xFB01]={ - [""]={ - ["all"]={ - ["lower"]={ 0xFB01 }, - ["title"]={ 0x0046, 0x0069 }, - ["upper"]={ 0x0046, 0x0049 }, - }, - }, - }, - [0xFB02]={ - [""]={ - ["all"]={ - ["lower"]={ 0xFB02 }, - ["title"]={ 0x0046, 0x006C }, - ["upper"]={ 0x0046, 0x004C }, - }, - }, - }, - [0xFB03]={ - [""]={ - ["all"]={ - ["lower"]={ 0xFB03 }, - ["title"]={ 0x0046, 0x0066, 0x0069 }, - ["upper"]={ 0x0046, 0x0046, 0x0049 }, - }, - }, - }, - [0xFB04]={ - [""]={ - ["all"]={ - ["lower"]={ 0xFB04 }, - ["title"]={ 0x0046, 0x0066, 0x006C }, - ["upper"]={ 0x0046, 0x0046, 0x004C }, - }, - }, - }, - [0xFB05]={ - [""]={ - ["all"]={ - ["lower"]={ 0xFB05 }, - ["title"]={ 0x0053, 0x0074 }, - ["upper"]={ 0x0053, 0x0054 }, - }, - }, - }, - [0xFB06]={ - [""]={ - ["all"]={ - ["lower"]={ 0xFB06 }, - ["title"]={ 0x0053, 0x0074 }, - ["upper"]={ 0x0053, 0x0054 }, - }, - }, - }, - [0xFB13]={ - [""]={ - ["all"]={ - ["lower"]={ 0xFB13 }, - ["title"]={ 0x0544, 0x0576 }, - ["upper"]={ 0x0544, 0x0546 }, - }, - }, - }, - [0xFB14]={ - [""]={ - ["all"]={ - ["lower"]={ 0xFB14 }, - ["title"]={ 0x0544, 0x0565 }, - ["upper"]={ 0x0544, 0x0535 }, - }, - }, - }, - [0xFB15]={ - [""]={ - ["all"]={ - ["lower"]={ 0xFB15 }, - ["title"]={ 0x0544, 0x056B }, - ["upper"]={ 0x0544, 0x053B }, - }, - }, - }, - [0xFB16]={ - [""]={ - ["all"]={ - ["lower"]={ 0xFB16 }, - ["title"]={ 0x054E, 0x0576 }, - ["upper"]={ 0x054E, 0x0546 }, - }, - }, - }, - [0xFB17]={ - [""]={ - ["all"]={ - ["lower"]={ 0xFB17 }, - ["title"]={ 0x0544, 0x056D }, - ["upper"]={ 0x0544, 0x053D }, - }, - }, - }, -} +if not modules then modules = { } end modules ['char-map'] = { + version = 1.001, + comment = "companion to char-ini.mkiv", + author = "Hans Hagen & Arthur Reutenauer", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", + dataonly = true, +} + +-- not yet used + +characters = characters or { } + +characters.casemap={ + [0x0049]={ + ["az"]={ + ["not_before_dot"]={ + ["lower"]={ 0x0131 }, + ["title"]={ 0x0049 }, + ["upper"]={ 0x0049 }, + }, + }, + ["lt"]={ + ["more_above"]={ + ["lower"]={ 0x0069, 0x0307 }, + ["title"]={ 0x0049 }, + ["upper"]={ 0x0049 }, + }, + }, + ["tr"]={ + ["not_before_dot"]={ + ["lower"]={ 0x0131 }, + ["title"]={ 0x0049 }, + ["upper"]={ 0x0049 }, + }, + }, + }, + [0x004A]={ + ["lt"]={ + ["more_above"]={ + ["lower"]={ 0x006A, 0x0307 }, + ["title"]={ 0x004A }, + ["upper"]={ 0x004A }, + }, + }, + }, + [0x0069]={ + ["az"]={ + ["all"]={ + ["lower"]={ 0x0069 }, + ["title"]={ 0x0130 }, + ["upper"]={ 0x0130 }, + }, + }, + ["tr"]={ + ["all"]={ + ["lower"]={ 0x0069 }, + ["title"]={ 0x0130 }, + ["upper"]={ 0x0130 }, + }, + }, + }, + [0x00CC]={ + ["lt"]={ + ["all"]={ + ["lower"]={ 0x0069, 0x0307, 0x0300 }, + ["title"]={ 0x00CC }, + ["upper"]={ 0x00CC }, + }, + }, + }, + [0x00CD]={ + ["lt"]={ + ["all"]={ + ["lower"]={ 0x0069, 0x0307, 0x0301 }, + ["title"]={ 0x00CD }, + ["upper"]={ 0x00CD }, + }, + }, + }, + [0x00DF]={ + [""]={ + ["all"]={ + ["lower"]={ 0x00DF }, + ["title"]={ 0x0053, 0x0073 }, + ["upper"]={ 0x0053, 0x0053 }, + }, + }, + }, + [0x0128]={ + ["lt"]={ + ["all"]={ + ["lower"]={ 0x0069, 0x0307, 0x0303 }, + ["title"]={ 0x0128 }, + ["upper"]={ 0x0128 }, + }, + }, + }, + [0x012E]={ + ["lt"]={ + ["more_above"]={ + ["lower"]={ 0x012F, 0x0307 }, + ["title"]={ 0x012E }, + ["upper"]={ 0x012E }, + }, + }, + }, + [0x0130]={ + [""]={ + ["all"]={ + ["lower"]={ 0x0069, 0x0307 }, + ["title"]={ 0x0130 }, + ["upper"]={ 0x0130 }, + }, + }, + ["az"]={ + ["all"]={ + ["lower"]={ 0x0069 }, + ["title"]={ 0x0130 }, + ["upper"]={ 0x0130 }, + }, + }, + ["tr"]={ + ["all"]={ + ["lower"]={ 0x0069 }, + ["title"]={ 0x0130 }, + ["upper"]={ 0x0130 }, + }, + }, + }, + [0x0149]={ + [""]={ + ["all"]={ + ["lower"]={ 0x0149 }, + ["title"]={ 0x02BC, 0x004E }, + ["upper"]={ 0x02BC, 0x004E }, + }, + }, + }, + [0x01F0]={ + [""]={ + ["all"]={ + ["lower"]={ 0x01F0 }, + ["title"]={ 0x004A, 0x030C }, + ["upper"]={ 0x004A, 0x030C }, + }, + }, + }, + [0x0307]={ + ["az"]={ + ["after_i"]={ + ["lower"]={}, + ["title"]={ 0x0307 }, + ["upper"]={ 0x0307 }, + }, + }, + ["lt"]={ + ["after_soft_dotted"]={ + ["lower"]={ 0x0307 }, + ["title"]={}, + ["upper"]={}, + }, + }, + ["tr"]={ + ["after_i"]={ + ["lower"]={}, + ["title"]={ 0x0307 }, + ["upper"]={ 0x0307 }, + }, + }, + }, + [0x0390]={ + [""]={ + ["all"]={ + ["lower"]={ 0x0390 }, + ["title"]={ 0x0399, 0x0308, 0x0301 }, + ["upper"]={ 0x0399, 0x0308, 0x0301 }, + }, + }, + }, + [0x03A3]={ + ["final_sigma"]={ + ["all"]={ + ["lower"]={ 0x03C2 }, + ["title"]={ 0x03A3 }, + ["upper"]={ 0x03A3 }, + }, + }, + }, + [0x03B0]={ + [""]={ + ["all"]={ + ["lower"]={ 0x03B0 }, + ["title"]={ 0x03A5, 0x0308, 0x0301 }, + ["upper"]={ 0x03A5, 0x0308, 0x0301 }, + }, + }, + }, + [0x0587]={ + [""]={ + ["all"]={ + ["lower"]={ 0x0587 }, + ["title"]={ 0x0535, 0x0582 }, + ["upper"]={ 0x0535, 0x0552 }, + }, + }, + }, + [0x1E96]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1E96 }, + ["title"]={ 0x0048, 0x0331 }, + ["upper"]={ 0x0048, 0x0331 }, + }, + }, + }, + [0x1E97]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1E97 }, + ["title"]={ 0x0054, 0x0308 }, + ["upper"]={ 0x0054, 0x0308 }, + }, + }, + }, + [0x1E98]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1E98 }, + ["title"]={ 0x0057, 0x030A }, + ["upper"]={ 0x0057, 0x030A }, + }, + }, + }, + [0x1E99]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1E99 }, + ["title"]={ 0x0059, 0x030A }, + ["upper"]={ 0x0059, 0x030A }, + }, + }, + }, + [0x1E9A]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1E9A }, + ["title"]={ 0x0041, 0x02BE }, + ["upper"]={ 0x0041, 0x02BE }, + }, + }, + }, + [0x1F50]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F50 }, + ["title"]={ 0x03A5, 0x0313 }, + ["upper"]={ 0x03A5, 0x0313 }, + }, + }, + }, + [0x1F52]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F52 }, + ["title"]={ 0x03A5, 0x0313, 0x0300 }, + ["upper"]={ 0x03A5, 0x0313, 0x0300 }, + }, + }, + }, + [0x1F54]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F54 }, + ["title"]={ 0x03A5, 0x0313, 0x0301 }, + ["upper"]={ 0x03A5, 0x0313, 0x0301 }, + }, + }, + }, + [0x1F56]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F56 }, + ["title"]={ 0x03A5, 0x0313, 0x0342 }, + ["upper"]={ 0x03A5, 0x0313, 0x0342 }, + }, + }, + }, + [0x1F80]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F80 }, + ["title"]={ 0x1F88 }, + ["upper"]={ 0x1F08, 0x0399 }, + }, + }, + }, + [0x1F81]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F81 }, + ["title"]={ 0x1F89 }, + ["upper"]={ 0x1F09, 0x0399 }, + }, + }, + }, + [0x1F82]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F82 }, + ["title"]={ 0x1F8A }, + ["upper"]={ 0x1F0A, 0x0399 }, + }, + }, + }, + [0x1F83]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F83 }, + ["title"]={ 0x1F8B }, + ["upper"]={ 0x1F0B, 0x0399 }, + }, + }, + }, + [0x1F84]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F84 }, + ["title"]={ 0x1F8C }, + ["upper"]={ 0x1F0C, 0x0399 }, + }, + }, + }, + [0x1F85]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F85 }, + ["title"]={ 0x1F8D }, + ["upper"]={ 0x1F0D, 0x0399 }, + }, + }, + }, + [0x1F86]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F86 }, + ["title"]={ 0x1F8E }, + ["upper"]={ 0x1F0E, 0x0399 }, + }, + }, + }, + [0x1F87]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F87 }, + ["title"]={ 0x1F8F }, + ["upper"]={ 0x1F0F, 0x0399 }, + }, + }, + }, + [0x1F88]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F80 }, + ["title"]={ 0x1F88 }, + ["upper"]={ 0x1F08, 0x0399 }, + }, + }, + }, + [0x1F89]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F81 }, + ["title"]={ 0x1F89 }, + ["upper"]={ 0x1F09, 0x0399 }, + }, + }, + }, + [0x1F8A]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F82 }, + ["title"]={ 0x1F8A }, + ["upper"]={ 0x1F0A, 0x0399 }, + }, + }, + }, + [0x1F8B]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F83 }, + ["title"]={ 0x1F8B }, + ["upper"]={ 0x1F0B, 0x0399 }, + }, + }, + }, + [0x1F8C]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F84 }, + ["title"]={ 0x1F8C }, + ["upper"]={ 0x1F0C, 0x0399 }, + }, + }, + }, + [0x1F8D]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F85 }, + ["title"]={ 0x1F8D }, + ["upper"]={ 0x1F0D, 0x0399 }, + }, + }, + }, + [0x1F8E]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F86 }, + ["title"]={ 0x1F8E }, + ["upper"]={ 0x1F0E, 0x0399 }, + }, + }, + }, + [0x1F8F]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F87 }, + ["title"]={ 0x1F8F }, + ["upper"]={ 0x1F0F, 0x0399 }, + }, + }, + }, + [0x1F90]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F90 }, + ["title"]={ 0x1F98 }, + ["upper"]={ 0x1F28, 0x0399 }, + }, + }, + }, + [0x1F91]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F91 }, + ["title"]={ 0x1F99 }, + ["upper"]={ 0x1F29, 0x0399 }, + }, + }, + }, + [0x1F92]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F92 }, + ["title"]={ 0x1F9A }, + ["upper"]={ 0x1F2A, 0x0399 }, + }, + }, + }, + [0x1F93]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F93 }, + ["title"]={ 0x1F9B }, + ["upper"]={ 0x1F2B, 0x0399 }, + }, + }, + }, + [0x1F94]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F94 }, + ["title"]={ 0x1F9C }, + ["upper"]={ 0x1F2C, 0x0399 }, + }, + }, + }, + [0x1F95]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F95 }, + ["title"]={ 0x1F9D }, + ["upper"]={ 0x1F2D, 0x0399 }, + }, + }, + }, + [0x1F96]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F96 }, + ["title"]={ 0x1F9E }, + ["upper"]={ 0x1F2E, 0x0399 }, + }, + }, + }, + [0x1F97]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F97 }, + ["title"]={ 0x1F9F }, + ["upper"]={ 0x1F2F, 0x0399 }, + }, + }, + }, + [0x1F98]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F90 }, + ["title"]={ 0x1F98 }, + ["upper"]={ 0x1F28, 0x0399 }, + }, + }, + }, + [0x1F99]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F91 }, + ["title"]={ 0x1F99 }, + ["upper"]={ 0x1F29, 0x0399 }, + }, + }, + }, + [0x1F9A]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F92 }, + ["title"]={ 0x1F9A }, + ["upper"]={ 0x1F2A, 0x0399 }, + }, + }, + }, + [0x1F9B]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F93 }, + ["title"]={ 0x1F9B }, + ["upper"]={ 0x1F2B, 0x0399 }, + }, + }, + }, + [0x1F9C]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F94 }, + ["title"]={ 0x1F9C }, + ["upper"]={ 0x1F2C, 0x0399 }, + }, + }, + }, + [0x1F9D]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F95 }, + ["title"]={ 0x1F9D }, + ["upper"]={ 0x1F2D, 0x0399 }, + }, + }, + }, + [0x1F9E]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F96 }, + ["title"]={ 0x1F9E }, + ["upper"]={ 0x1F2E, 0x0399 }, + }, + }, + }, + [0x1F9F]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1F97 }, + ["title"]={ 0x1F9F }, + ["upper"]={ 0x1F2F, 0x0399 }, + }, + }, + }, + [0x1FA0]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA0 }, + ["title"]={ 0x1FA8 }, + ["upper"]={ 0x1F68, 0x0399 }, + }, + }, + }, + [0x1FA1]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA1 }, + ["title"]={ 0x1FA9 }, + ["upper"]={ 0x1F69, 0x0399 }, + }, + }, + }, + [0x1FA2]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA2 }, + ["title"]={ 0x1FAA }, + ["upper"]={ 0x1F6A, 0x0399 }, + }, + }, + }, + [0x1FA3]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA3 }, + ["title"]={ 0x1FAB }, + ["upper"]={ 0x1F6B, 0x0399 }, + }, + }, + }, + [0x1FA4]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA4 }, + ["title"]={ 0x1FAC }, + ["upper"]={ 0x1F6C, 0x0399 }, + }, + }, + }, + [0x1FA5]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA5 }, + ["title"]={ 0x1FAD }, + ["upper"]={ 0x1F6D, 0x0399 }, + }, + }, + }, + [0x1FA6]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA6 }, + ["title"]={ 0x1FAE }, + ["upper"]={ 0x1F6E, 0x0399 }, + }, + }, + }, + [0x1FA7]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA7 }, + ["title"]={ 0x1FAF }, + ["upper"]={ 0x1F6F, 0x0399 }, + }, + }, + }, + [0x1FA8]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA0 }, + ["title"]={ 0x1FA8 }, + ["upper"]={ 0x1F68, 0x0399 }, + }, + }, + }, + [0x1FA9]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA1 }, + ["title"]={ 0x1FA9 }, + ["upper"]={ 0x1F69, 0x0399 }, + }, + }, + }, + [0x1FAA]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA2 }, + ["title"]={ 0x1FAA }, + ["upper"]={ 0x1F6A, 0x0399 }, + }, + }, + }, + [0x1FAB]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA3 }, + ["title"]={ 0x1FAB }, + ["upper"]={ 0x1F6B, 0x0399 }, + }, + }, + }, + [0x1FAC]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA4 }, + ["title"]={ 0x1FAC }, + ["upper"]={ 0x1F6C, 0x0399 }, + }, + }, + }, + [0x1FAD]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA5 }, + ["title"]={ 0x1FAD }, + ["upper"]={ 0x1F6D, 0x0399 }, + }, + }, + }, + [0x1FAE]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA6 }, + ["title"]={ 0x1FAE }, + ["upper"]={ 0x1F6E, 0x0399 }, + }, + }, + }, + [0x1FAF]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FA7 }, + ["title"]={ 0x1FAF }, + ["upper"]={ 0x1F6F, 0x0399 }, + }, + }, + }, + [0x1FB2]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FB2 }, + ["title"]={ 0x1FBA, 0x0345 }, + ["upper"]={ 0x1FBA, 0x0399 }, + }, + }, + }, + [0x1FB3]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FB3 }, + ["title"]={ 0x1FBC }, + ["upper"]={ 0x0391, 0x0399 }, + }, + }, + }, + [0x1FB4]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FB4 }, + ["title"]={ 0x0386, 0x0345 }, + ["upper"]={ 0x0386, 0x0399 }, + }, + }, + }, + [0x1FB6]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FB6 }, + ["title"]={ 0x0391, 0x0342 }, + ["upper"]={ 0x0391, 0x0342 }, + }, + }, + }, + [0x1FB7]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FB7 }, + ["title"]={ 0x0391, 0x0342, 0x0345 }, + ["upper"]={ 0x0391, 0x0342, 0x0399 }, + }, + }, + }, + [0x1FBC]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FB3 }, + ["title"]={ 0x1FBC }, + ["upper"]={ 0x0391, 0x0399 }, + }, + }, + }, + [0x1FC2]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FC2 }, + ["title"]={ 0x1FCA, 0x0345 }, + ["upper"]={ 0x1FCA, 0x0399 }, + }, + }, + }, + [0x1FC3]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FC3 }, + ["title"]={ 0x1FCC }, + ["upper"]={ 0x0397, 0x0399 }, + }, + }, + }, + [0x1FC4]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FC4 }, + ["title"]={ 0x0389, 0x0345 }, + ["upper"]={ 0x0389, 0x0399 }, + }, + }, + }, + [0x1FC6]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FC6 }, + ["title"]={ 0x0397, 0x0342 }, + ["upper"]={ 0x0397, 0x0342 }, + }, + }, + }, + [0x1FC7]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FC7 }, + ["title"]={ 0x0397, 0x0342, 0x0345 }, + ["upper"]={ 0x0397, 0x0342, 0x0399 }, + }, + }, + }, + [0x1FCC]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FC3 }, + ["title"]={ 0x1FCC }, + ["upper"]={ 0x0397, 0x0399 }, + }, + }, + }, + [0x1FD2]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FD2 }, + ["title"]={ 0x0399, 0x0308, 0x0300 }, + ["upper"]={ 0x0399, 0x0308, 0x0300 }, + }, + }, + }, + [0x1FD3]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FD3 }, + ["title"]={ 0x0399, 0x0308, 0x0301 }, + ["upper"]={ 0x0399, 0x0308, 0x0301 }, + }, + }, + }, + [0x1FD6]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FD6 }, + ["title"]={ 0x0399, 0x0342 }, + ["upper"]={ 0x0399, 0x0342 }, + }, + }, + }, + [0x1FD7]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FD7 }, + ["title"]={ 0x0399, 0x0308, 0x0342 }, + ["upper"]={ 0x0399, 0x0308, 0x0342 }, + }, + }, + }, + [0x1FE2]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FE2 }, + ["title"]={ 0x03A5, 0x0308, 0x0300 }, + ["upper"]={ 0x03A5, 0x0308, 0x0300 }, + }, + }, + }, + [0x1FE3]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FE3 }, + ["title"]={ 0x03A5, 0x0308, 0x0301 }, + ["upper"]={ 0x03A5, 0x0308, 0x0301 }, + }, + }, + }, + [0x1FE4]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FE4 }, + ["title"]={ 0x03A1, 0x0313 }, + ["upper"]={ 0x03A1, 0x0313 }, + }, + }, + }, + [0x1FE6]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FE6 }, + ["title"]={ 0x03A5, 0x0342 }, + ["upper"]={ 0x03A5, 0x0342 }, + }, + }, + }, + [0x1FE7]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FE7 }, + ["title"]={ 0x03A5, 0x0308, 0x0342 }, + ["upper"]={ 0x03A5, 0x0308, 0x0342 }, + }, + }, + }, + [0x1FF2]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FF2 }, + ["title"]={ 0x1FFA, 0x0345 }, + ["upper"]={ 0x1FFA, 0x0399 }, + }, + }, + }, + [0x1FF3]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FF3 }, + ["title"]={ 0x1FFC }, + ["upper"]={ 0x03A9, 0x0399 }, + }, + }, + }, + [0x1FF4]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FF4 }, + ["title"]={ 0x038F, 0x0345 }, + ["upper"]={ 0x038F, 0x0399 }, + }, + }, + }, + [0x1FF6]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FF6 }, + ["title"]={ 0x03A9, 0x0342 }, + ["upper"]={ 0x03A9, 0x0342 }, + }, + }, + }, + [0x1FF7]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FF7 }, + ["title"]={ 0x03A9, 0x0342, 0x0345 }, + ["upper"]={ 0x03A9, 0x0342, 0x0399 }, + }, + }, + }, + [0x1FFC]={ + [""]={ + ["all"]={ + ["lower"]={ 0x1FF3 }, + ["title"]={ 0x1FFC }, + ["upper"]={ 0x03A9, 0x0399 }, + }, + }, + }, + [0xFB00]={ + [""]={ + ["all"]={ + ["lower"]={ 0xFB00 }, + ["title"]={ 0x0046, 0x0066 }, + ["upper"]={ 0x0046, 0x0046 }, + }, + }, + }, + [0xFB01]={ + [""]={ + ["all"]={ + ["lower"]={ 0xFB01 }, + ["title"]={ 0x0046, 0x0069 }, + ["upper"]={ 0x0046, 0x0049 }, + }, + }, + }, + [0xFB02]={ + [""]={ + ["all"]={ + ["lower"]={ 0xFB02 }, + ["title"]={ 0x0046, 0x006C }, + ["upper"]={ 0x0046, 0x004C }, + }, + }, + }, + [0xFB03]={ + [""]={ + ["all"]={ + ["lower"]={ 0xFB03 }, + ["title"]={ 0x0046, 0x0066, 0x0069 }, + ["upper"]={ 0x0046, 0x0046, 0x0049 }, + }, + }, + }, + [0xFB04]={ + [""]={ + ["all"]={ + ["lower"]={ 0xFB04 }, + ["title"]={ 0x0046, 0x0066, 0x006C }, + ["upper"]={ 0x0046, 0x0046, 0x004C }, + }, + }, + }, + [0xFB05]={ + [""]={ + ["all"]={ + ["lower"]={ 0xFB05 }, + ["title"]={ 0x0053, 0x0074 }, + ["upper"]={ 0x0053, 0x0054 }, + }, + }, + }, + [0xFB06]={ + [""]={ + ["all"]={ + ["lower"]={ 0xFB06 }, + ["title"]={ 0x0053, 0x0074 }, + ["upper"]={ 0x0053, 0x0054 }, + }, + }, + }, + [0xFB13]={ + [""]={ + ["all"]={ + ["lower"]={ 0xFB13 }, + ["title"]={ 0x0544, 0x0576 }, + ["upper"]={ 0x0544, 0x0546 }, + }, + }, + }, + [0xFB14]={ + [""]={ + ["all"]={ + ["lower"]={ 0xFB14 }, + ["title"]={ 0x0544, 0x0565 }, + ["upper"]={ 0x0544, 0x0535 }, + }, + }, + }, + [0xFB15]={ + [""]={ + ["all"]={ + ["lower"]={ 0xFB15 }, + ["title"]={ 0x0544, 0x056B }, + ["upper"]={ 0x0544, 0x053B }, + }, + }, + }, + [0xFB16]={ + [""]={ + ["all"]={ + ["lower"]={ 0xFB16 }, + ["title"]={ 0x054E, 0x0576 }, + ["upper"]={ 0x054E, 0x0546 }, + }, + }, + }, + [0xFB17]={ + [""]={ + ["all"]={ + ["lower"]={ 0xFB17 }, + ["title"]={ 0x0544, 0x056D }, + ["upper"]={ 0x0544, 0x053D }, + }, + }, + }, +} diff --git a/tex/context/base/char-tex.lua b/tex/context/base/char-tex.lua index 91aa387b9..c470eb6c4 100644 --- a/tex/context/base/char-tex.lua +++ b/tex/context/base/char-tex.lua @@ -1,211 +1,211 @@ -if not modules then modules = { } end modules ['char-tex'] = { - version = 1.001, - comment = "companion to char-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local lpeg = lpeg - -local find = string.find -local P, C, R, S, Cs, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc -local U, lpegmatch = lpeg.patterns.utf8, lpeg.match - -local allocate, mark = utilities.storage.allocate, utilities.storage.mark - -characters = characters or { } -local characters = characters -characters.tex = characters.tex or { } - -local accentmapping = allocate { - ['"'] = { [""] = "¨", - A = "Ä", a = "ä", - E = "Ë", e = "ë", - I = "Ã", i = "ï", ["ı"] = "ï", - O = "Ö", o = "ö", - U = "Ãœ", u = "ü", - Y = "Ÿ", y = "ÿ", - }, - ["'"] = { [""] = "´", - A = "Ã", a = "á", - C = "Ć", c = "ć", - E = "É", e = "é", - I = "Ã", i = "í", ["ı"] = "í", - L = "Ĺ", l = "ĺ", - N = "Ń", n = "Å„", - O = "Ó", o = "ó", - R = "Å”", r = "Å•", - S = "Åš", s = "Å›", - U = "Ú", u = "ú", - Y = "Ã", y = "ý", - Z = "Ź", z = "ź", - }, - ["."] = { [""] = "Ë™", - C = "ÄŠ", c = "Ä‹", - E = "Ä–", e = "Ä—", - G = "Ä ", g = "Ä¡", - I = "Ä°", i = "i", ["ı"] = "i", - Z = "Å»", z = "ż", - }, - ["="] = { [""] = "¯", - A = "Ä€", a = "Ä", - E = "Ä’", e = "Ä“", - I = "Ī", i = "Ä«", ["ı"] = "Ä«", - O = "ÅŒ", o = "Å", - U = "Ū", u = "Å«", - }, - ["H"] = { [""] = "Ë", - O = "Å", o = "Å‘", - U = "Å°", u = "ű", - }, - ["^"] = { [""] = "ˆ", - A = "Â", a = "â", - C = "Ĉ", c = "ĉ", - E = "Ê", e = "ê", - G = "Äœ", g = "Ä", - H = "Ĥ", h = "Ä¥", - I = "ÃŽ", i = "î", ["ı"] = "î", - J = "Ä´", j = "ĵ", - O = "Ô", o = "ô", - S = "Åœ", s = "Å", - U = "Û", u = "û", - W = "Å´", w = "ŵ", - Y = "Ŷ", y = "Å·", - }, - ["`"] = { [""] = "`", - A = "À", a = "à", - E = "È", e = "è", - I = "ÃŒ", i = "ì", ["ı"] = "ì", - O = "Ã’", o = "ò", - U = "Ù", u = "ù", - Y = "Ỳ", y = "ỳ", - }, - ["c"] = { [""] = "¸", - C = "Ç", c = "ç", - K = "Ķ", k = "Ä·", - L = "Ä»", l = "ļ", - N = "Å…", n = "ņ", - R = "Å–", r = "Å—", - S = "Åž", s = "ÅŸ", - T = "Å¢", t = "Å£", - }, - ["k"] = { [""] = "Ë›", - A = "Ä„", a = "Ä…", - E = "Ę", e = "Ä™", - I = "Ä®", i = "į", - U = "Ų", u = "ų", - }, - ["r"] = { [""] = "Ëš", - A = "Ã…", a = "Ã¥", - U = "Å®", u = "ů", - }, - ["u"] = { [""] = "˘", - A = "Ä‚", a = "ă", - E = "Ä”", e = "Ä•", - G = "Äž", g = "ÄŸ", - I = "Ĭ", i = "Ä­", ["ı"] = "Ä­", - O = "ÅŽ", o = "Å", - U = "Ŭ", u = "Å­", - }, - ["v"] = { [""] = "ˇ", - C = "ÄŒ", c = "Ä", - D = "ÄŽ", d = "Ä", - E = "Äš", e = "Ä›", - L = "Ľ", l = "ľ", - N = "Ň", n = "ň", - R = "Ř", r = "Å™", - S = "Å ", s = "Å¡", - T = "Ť", t = "Å¥", - Z = "Ž", z = "ž", - }, - ["~"] = { [""] = "Ëœ", - A = "Ã", a = "ã", - I = "Ĩ", i = "Ä©", ["ı"] = "Ä©", - N = "Ñ", n = "ñ", - O = "Õ", o = "õ", - U = "Ũ", u = "Å©", - }, -} - -characters.tex.accentmapping = accentmapping - -local accent_map = allocate { -- incomplete - ['~'] = "̃" , -- ̃ Ẽ - ['"'] = "̈" , -- ̈ Ë - ["`"] = "Ì€" , -- Ì€ È - ["'"] = "Ì" , -- Ì Ã‰ - ["^"] = "Ì‚" , -- Ì‚ Ê - -- Ì„ Ä’ - -- ̆ Ä” - -- ̇ Ä– - -- ̉ Ẻ - -- ÌŒ Äš - -- Ì È„ - -- Ì‘ Ȇ - -- Ì£ Ẹ - -- ̧ Ȩ - -- ̨ Ę - -- Ì­ Ḙ - -- Ì° Ḛ -} - -local accents = table.concat(table.keys(accent_map)) - -local function remap_accents(a,c,braced) - local m = accent_map[a] - if m then - return c .. m - elseif braced then - return "\\" .. a .. "{" .. c .. "}" - else - return "\\" .. a .. c - end -end - -local command_map = allocate { - ["i"] = "ı" -} - -local function remap_commands(c) - local m = command_map[c] - if m then - return m - else - return "\\" .. c - end -end - -local accents = (P('\\') * C(S(accents)) * (P("{") * C(U) * P("}" * Cc(true)) + C(U) * Cc(false))) / remap_accents -local commands = (P('\\') * C(R("az","AZ")^1)) / remap_commands - -local convert_accents = Cs((accents + P(1))^0) -local convert_commands = Cs((commands + P(1))^0) - -local no_l = P("{") / "" -local no_r = P("}") / "" - -local convert_accents_strip = Cs((no_l * accents * no_r + accents + P(1))^0) -local convert_commands_strip = Cs((no_l * commands * no_r + commands + P(1))^0) - -function characters.tex.toutf(str,strip) - if not find(str,"\\") then -- we can start at the found position - return str - elseif strip then - return lpegmatch(convert_accents_strip,lpegmatch(convert_commands_strip,str)) - else - return lpegmatch(convert_accents, lpegmatch(convert_commands, str)) - end -end - ---~ print(characters.tex.toutf([[\"{e}]]),true) ---~ print(characters.tex.toutf([[{\"{e}}]],true)) - -function characters.tex.defineaccents() - for accent, group in next, accentmapping do - context.dodefineaccentcommand(accent) - for character, mapping in next, group do - context.dodefineaccent(accent,character,mapping) - end - end -end +if not modules then modules = { } end modules ['char-tex'] = { + version = 1.001, + comment = "companion to char-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local lpeg = lpeg + +local find = string.find +local P, C, R, S, Cs, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc +local U, lpegmatch = lpeg.patterns.utf8, lpeg.match + +local allocate, mark = utilities.storage.allocate, utilities.storage.mark + +characters = characters or { } +local characters = characters +characters.tex = characters.tex or { } + +local accentmapping = allocate { + ['"'] = { [""] = "¨", + A = "Ä", a = "ä", + E = "Ë", e = "ë", + I = "Ã", i = "ï", ["ı"] = "ï", + O = "Ö", o = "ö", + U = "Ãœ", u = "ü", + Y = "Ÿ", y = "ÿ", + }, + ["'"] = { [""] = "´", + A = "Ã", a = "á", + C = "Ć", c = "ć", + E = "É", e = "é", + I = "Ã", i = "í", ["ı"] = "í", + L = "Ĺ", l = "ĺ", + N = "Ń", n = "Å„", + O = "Ó", o = "ó", + R = "Å”", r = "Å•", + S = "Åš", s = "Å›", + U = "Ú", u = "ú", + Y = "Ã", y = "ý", + Z = "Ź", z = "ź", + }, + ["."] = { [""] = "Ë™", + C = "ÄŠ", c = "Ä‹", + E = "Ä–", e = "Ä—", + G = "Ä ", g = "Ä¡", + I = "Ä°", i = "i", ["ı"] = "i", + Z = "Å»", z = "ż", + }, + ["="] = { [""] = "¯", + A = "Ä€", a = "Ä", + E = "Ä’", e = "Ä“", + I = "Ī", i = "Ä«", ["ı"] = "Ä«", + O = "ÅŒ", o = "Å", + U = "Ū", u = "Å«", + }, + ["H"] = { [""] = "Ë", + O = "Å", o = "Å‘", + U = "Å°", u = "ű", + }, + ["^"] = { [""] = "ˆ", + A = "Â", a = "â", + C = "Ĉ", c = "ĉ", + E = "Ê", e = "ê", + G = "Äœ", g = "Ä", + H = "Ĥ", h = "Ä¥", + I = "ÃŽ", i = "î", ["ı"] = "î", + J = "Ä´", j = "ĵ", + O = "Ô", o = "ô", + S = "Åœ", s = "Å", + U = "Û", u = "û", + W = "Å´", w = "ŵ", + Y = "Ŷ", y = "Å·", + }, + ["`"] = { [""] = "`", + A = "À", a = "à", + E = "È", e = "è", + I = "ÃŒ", i = "ì", ["ı"] = "ì", + O = "Ã’", o = "ò", + U = "Ù", u = "ù", + Y = "Ỳ", y = "ỳ", + }, + ["c"] = { [""] = "¸", + C = "Ç", c = "ç", + K = "Ķ", k = "Ä·", + L = "Ä»", l = "ļ", + N = "Å…", n = "ņ", + R = "Å–", r = "Å—", + S = "Åž", s = "ÅŸ", + T = "Å¢", t = "Å£", + }, + ["k"] = { [""] = "Ë›", + A = "Ä„", a = "Ä…", + E = "Ę", e = "Ä™", + I = "Ä®", i = "į", + U = "Ų", u = "ų", + }, + ["r"] = { [""] = "Ëš", + A = "Ã…", a = "Ã¥", + U = "Å®", u = "ů", + }, + ["u"] = { [""] = "˘", + A = "Ä‚", a = "ă", + E = "Ä”", e = "Ä•", + G = "Äž", g = "ÄŸ", + I = "Ĭ", i = "Ä­", ["ı"] = "Ä­", + O = "ÅŽ", o = "Å", + U = "Ŭ", u = "Å­", + }, + ["v"] = { [""] = "ˇ", + C = "ÄŒ", c = "Ä", + D = "ÄŽ", d = "Ä", + E = "Äš", e = "Ä›", + L = "Ľ", l = "ľ", + N = "Ň", n = "ň", + R = "Ř", r = "Å™", + S = "Å ", s = "Å¡", + T = "Ť", t = "Å¥", + Z = "Ž", z = "ž", + }, + ["~"] = { [""] = "Ëœ", + A = "Ã", a = "ã", + I = "Ĩ", i = "Ä©", ["ı"] = "Ä©", + N = "Ñ", n = "ñ", + O = "Õ", o = "õ", + U = "Ũ", u = "Å©", + }, +} + +characters.tex.accentmapping = accentmapping + +local accent_map = allocate { -- incomplete + ['~'] = "̃" , -- ̃ Ẽ + ['"'] = "̈" , -- ̈ Ë + ["`"] = "Ì€" , -- Ì€ È + ["'"] = "Ì" , -- Ì Ã‰ + ["^"] = "Ì‚" , -- Ì‚ Ê + -- Ì„ Ä’ + -- ̆ Ä” + -- ̇ Ä– + -- ̉ Ẻ + -- ÌŒ Äš + -- Ì È„ + -- Ì‘ Ȇ + -- Ì£ Ẹ + -- ̧ Ȩ + -- ̨ Ę + -- Ì­ Ḙ + -- Ì° Ḛ +} + +local accents = table.concat(table.keys(accent_map)) + +local function remap_accents(a,c,braced) + local m = accent_map[a] + if m then + return c .. m + elseif braced then + return "\\" .. a .. "{" .. c .. "}" + else + return "\\" .. a .. c + end +end + +local command_map = allocate { + ["i"] = "ı" +} + +local function remap_commands(c) + local m = command_map[c] + if m then + return m + else + return "\\" .. c + end +end + +local accents = (P('\\') * C(S(accents)) * (P("{") * C(U) * P("}" * Cc(true)) + C(U) * Cc(false))) / remap_accents +local commands = (P('\\') * C(R("az","AZ")^1)) / remap_commands + +local convert_accents = Cs((accents + P(1))^0) +local convert_commands = Cs((commands + P(1))^0) + +local no_l = P("{") / "" +local no_r = P("}") / "" + +local convert_accents_strip = Cs((no_l * accents * no_r + accents + P(1))^0) +local convert_commands_strip = Cs((no_l * commands * no_r + commands + P(1))^0) + +function characters.tex.toutf(str,strip) + if not find(str,"\\") then -- we can start at the found position + return str + elseif strip then + return lpegmatch(convert_accents_strip,lpegmatch(convert_commands_strip,str)) + else + return lpegmatch(convert_accents, lpegmatch(convert_commands, str)) + end +end + +--~ print(characters.tex.toutf([[\"{e}]]),true) +--~ print(characters.tex.toutf([[{\"{e}}]],true)) + +function characters.tex.defineaccents() + for accent, group in next, accentmapping do + context.dodefineaccentcommand(accent) + for character, mapping in next, group do + context.dodefineaccent(accent,character,mapping) + end + end +end diff --git a/tex/context/base/char-utf.lua b/tex/context/base/char-utf.lua index d0e40e664..424018b62 100644 --- a/tex/context/base/char-utf.lua +++ b/tex/context/base/char-utf.lua @@ -1,553 +1,553 @@ -if not modules then modules = { } end modules ['char-utf'] = { - version = 1.001, - comment = "companion to char-utf.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

When a sequence of characters enters the application, it may -be neccessary to collapse subsequences into their composed variant.

- -

This module implements methods for collapsing and expanding -sequences. We also provide means to deal with characters that are -special to as well as 8-bit characters that need to end up -in special kinds of output (for instance ).

- -

We implement these manipulations as filters. One can run multiple filters -over a string.

---ldx]]-- - -local concat, gmatch, gsub, find = table.concat, string.gmatch, string.gsub, string.find -local utfchar, utfbyte, utfcharacters, utfvalues = utf.char, utf.byte, utf.characters, utf.values -local allocate = utilities.storage.allocate -local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns - -local charfromnumber = characters.fromnumber - --- todo: trackers --- graphemes: basic symbols - -characters = characters or { } -local characters = characters - -characters.graphemes = allocate() -local graphemes = characters.graphemes - -characters.combined = allocate() -local combined = characters.combined - -characters.decomposed = allocate() -local decomposed = characters.decomposed - -characters.mathpairs = allocate() -local mathpairs = characters.mathpairs - -characters.filters = allocate() -local filters = characters.filters - -filters.utf = filters.utf or { } -local utffilters = characters.filters.utf - --- is characters.combined cached? - ---[[ldx-- -

It only makes sense to collapse at runtime, since we don't expect -source code to depend on collapsing.

---ldx]]-- - --- for the moment, will be entries in char-def.lua - -local decomposed = allocate { - ["IJ"] = "IJ", - ["ij"] = "ij", - ["Ö‡"] = "Õ¥Ö‚", - ["ff"] = "ff", - ["ï¬"] = "fi", - ["fl"] = "fl", - ["ffi"] = "ffi", - ["ffl"] = "ffl", - ["ſt"] = "Å¿t", - ["st"] = "st", - ["ﬓ"] = "Õ´Õ¶", - ["ﬔ"] = "Õ´Õ¥", - ["ﬕ"] = "Õ´Õ«", - ["ﬖ"] = "Õ¾Õ¶", - ["ﬗ"] = "Õ´Õ­", -} - -characters.decomposed = decomposed - -local function initialize() -- maybe only 'mn' - local data = characters.data - for unicode, v in next, data do - -- using vs and first testing for length is faster (.02->.01 s) - local vs = v.specials - local vc = vs and #vs == 3 and vs[1] - if vc == "char" then - local one, two = vs[2], vs[3] - if data[two].category == "mn" then - local cgf = combined[one] - if not cgf then - cgf = { [two] = unicode } - combined[one] = cgf - else - cgf[two] = unicode - end - end - local first, second, combination = utfchar(one), utfchar(two), utfchar(unicode) - local cgf = graphemes[first] - if not cgf then - cgf = { [second] = combination } - graphemes[first] = cgf - else - cgf[second] = combination - end - if v.mathclass or v.mathspec then - local mps = mathpairs[two] - if not mps then - mps = { [one] = unicode } - mathpairs[two] = mps - else - mps[one] = unicode -- here unicode - end - local mps = mathpairs[second] - if not mps then - mps = { [first] = combination } - mathpairs[second] = mps - else - mps[first] = combination - end - end - -- elseif vc == "compat" then - -- else - -- local description = v.description - -- if find(description,"LIGATURE") then - -- if vs then - -- local t = { } - -- for i=2,#vs do - -- t[#t+1] = utfchar(vs[i]) - -- end - -- decomposed[utfchar(unicode)] = concat(t) - -- else - -- local vs = v.shcode - -- if vs then - -- local t = { } - -- for i=1,#vs do - -- t[i] = utfchar(vs[i]) - -- end - -- decomposed[utfchar(unicode)] = concat(t) - -- end - -- end - -- end - end - end - initialize = false - characters.initialize = function() end -- when used outside tex -end - -characters.initialize = initialize - --- utffilters.addgrapheme(utfchar(318),'l','\string~') --- utffilters.addgrapheme('c','a','b') - -function utffilters.addgrapheme(result,first,second) -- can be U+ 0x string or utf or number - local result = charfromnumber(result) - local first = charfromnumber(first) - local second = charfromnumber(second) - if not graphemes[first] then - graphemes[first] = { [second] = result } - else - graphemes[first][second] = result - end -end - ---[[ldx-- -

In order to deal with 8-bit output, we need to find a way to -go from to 8-bit. This is handled in the - engine itself.

- -

This leaves us problems with characters that are specific to - like {}, $ and alike.

- -

We can remap some chars that tex input files are sensitive for to -a private area (while writing to a utility file) and revert then -to their original slot when we read in such a file. Instead of -reverting, we can (when we resolve characters to glyphs) map them -to their right glyph there.

- -

For this purpose we can use the private planes 0x0F0000 and -0x100000.

---ldx]]-- - -local low = allocate({ }) -local high = allocate({ }) -local escapes = allocate({ }) -local special = "~#$%^&_{}\\|" - -local private = { - low = low, - high = high, - escapes = escapes, -} - -utffilters.private = private - -local tohigh = lpeg.replacer(low) -- frozen, only for basic tex -local tolow = lpeg.replacer(high) -- frozen, only for basic tex - -lpegpatterns.utftohigh = tohigh -lpegpatterns.utftolow = tolow - -function utffilters.harden(str) - return lpegmatch(tohigh,str) -end - -function utffilters.soften(str) - return lpegmatch(tolow,str) -end - -local function set(ch) - local cb - if type(ch) == "number" then - cb, ch = ch, utfchar(ch) - else - cb = utfbyte(ch) - end - if cb < 256 then - escapes[ch] = "\\" .. ch - low[ch] = utfchar(0x0F0000 + cb) - if ch == "%" then - ch = "%%" -- nasty, but we need this as in replacements (also in lpeg) % is interpreted - end - high[utfchar(0x0F0000 + cb)] = ch - end -end - -private.set = set - --- function private.escape (str) return gsub(str,"(.)", escapes) end --- function private.replace(str) return utfgsub(str,"(.)", low ) end --- function private.revert (str) return utfgsub(str,"(.)", high ) end - -private.escape = utf.remapper(escapes) -private.replace = utf.remapper(low) -private.revert = utf.remapper(high) - -for ch in gmatch(special,".") do set(ch) end - ---[[ldx-- -

We get a more efficient variant of this when we integrate -replacements in collapser. This more or less renders the previous -private code redundant. The following code is equivalent but the -first snippet uses the relocated dollars.

- - -[󰀤x󰀤] [$x$] - - -

The next variant has lazy token collecting, on a 140 page mk.tex this saves -about .25 seconds, which is understandable because we have no graphmes and -not collecting tokens is not only faster but also saves garbage collecting. -

---ldx]]-- - --- lpeg variant is not faster --- --- I might use the combined loop at some point for the filter --- some day. - --- function utffilters.collapse(str) -- not really tested (we could preallocate a table) --- if str and str ~= "" then --- local nstr = #str --- if nstr > 1 then --- if initialize then -- saves a call --- initialize() --- end --- local tokens, t, first, done, n = { }, 0, false, false, 0 --- for second in utfcharacters(str) do --- local dec = decomposed[second] --- if dec then --- if not done then --- if n > 0 then --- for s in utfcharacters(str) do --- if n == 1 then --- break --- else --- t = t + 1 --- tokens[t] = s --- n = n - 1 --- end --- end --- end --- done = true --- elseif first then --- t = t + 1 --- tokens[t] = first --- end --- t = t + 1 --- tokens[t] = dec --- first = false --- elseif done then --- local crs = high[second] --- if crs then --- if first then --- t = t + 1 --- tokens[t] = first --- end --- first = crs --- else --- local cgf = graphemes[first] --- if cgf and cgf[second] then --- first = cgf[second] --- elseif first then --- t = t + 1 --- tokens[t] = first --- first = second --- else --- first = second --- end --- end --- else --- local crs = high[second] --- if crs then --- for s in utfcharacters(str) do --- if n == 1 then --- break --- else --- t = t + 1 --- tokens[t] = s --- n = n - 1 --- end --- end --- if first then --- t = t + 1 --- tokens[t] = first --- end --- first = crs --- done = true --- else --- local cgf = graphemes[first] --- if cgf and cgf[second] then --- for s in utfcharacters(str) do --- if n == 1 then --- break --- else --- t = t + 1 --- tokens[t] = s --- n = n - 1 --- end --- end --- first = cgf[second] --- done = true --- else --- first = second --- n = n + 1 --- end --- end --- end --- end --- if done then --- if first then --- t = t + 1 --- tokens[t] = first --- end --- return concat(tokens) -- seldom called --- end --- elseif nstr > 0 then --- return high[str] or str --- end --- end --- return str --- end - -local skippable = table.tohash { "mkiv", "mkvi" } -local filesuffix = file.suffix - --- we could reuse tokens but it's seldom populated anyway - -function utffilters.collapse(str,filename) -- not really tested (we could preallocate a table) - if skippable[filesuffix(filename)] then - return str - end - if str and str ~= "" then - local nstr = #str - if nstr > 1 then - if initialize then -- saves a call - initialize() - end - local tokens, t, first, done, n = { }, 0, false, false, 0 - for second in utfcharacters(str) do - if done then - local crs = high[second] - if crs then - if first then - t = t + 1 - tokens[t] = first - end - first = crs - else - local cgf = graphemes[first] - if cgf and cgf[second] then - first = cgf[second] - elseif first then - t = t + 1 - tokens[t] = first - first = second - else - first = second - end - end - else - local crs = high[second] - if crs then - for s in utfcharacters(str) do - if n == 1 then - break - else - t = t + 1 - tokens[t] = s - n = n - 1 - end - end - if first then - t = t + 1 - tokens[t] = first - end - first = crs - done = true - else - local cgf = graphemes[first] - if cgf and cgf[second] then - for s in utfcharacters(str) do - if n == 1 then - break - else - t = t + 1 - tokens[t] = s - n = n - 1 - end - end - first = cgf[second] - done = true - else - first = second - n = n + 1 - end - end - end - end - if done then - if first then - t = t + 1 - tokens[t] = first - end - return concat(tokens) -- seldom called - end - elseif nstr > 0 then - return high[str] or str - end - end - return str -end - -function utffilters.decompose(str) - if str and str ~= "" then - local nstr = #str - if nstr > 1 then - -- if initialize then -- saves a call - -- initialize() - -- end - local tokens, t, done, n = { }, 0, false, 0 - for s in utfcharacters(str) do - local dec = decomposed[s] - if dec then - if not done then - if n > 0 then - for s in utfcharacters(str) do - if n == 1 then - break - else - t = t + 1 - tokens[t] = s - n = n - 1 - end - end - end - done = true - end - t = t + 1 - tokens[t] = dec - elseif done then - t = t + 1 - tokens[t] = s - else - n = n + 1 - end - end - if done then - return concat(tokens) -- seldom called - end - end - end - return str -end - -local sequencers = utilities.sequencers - -if sequencers then - - local textfileactions = resolvers.openers.helpers.textfileactions - - sequencers.appendaction (textfileactions,"system","characters.filters.utf.collapse") - sequencers.disableaction(textfileactions,"characters.filters.utf.collapse") - - sequencers.appendaction (textfileactions,"system","characters.filters.utf.decompose") - sequencers.disableaction(textfileactions,"characters.filters.utf.decompose") - - function characters.filters.utf.enable() - sequencers.enableaction(textfileactions,"characters.filters.utf.collapse") - sequencers.enableaction(textfileactions,"characters.filters.utf.decompose") - end - - directives.register("filters.utf.collapse", function(v) - sequencers[v and "enableaction" or "disableaction"](textfileactions,"characters.filters.utf.collapse") - end) - - directives.register("filters.utf.decompose", function(v) - sequencers[v and "enableaction" or "disableaction"](textfileactions,"characters.filters.utf.decompose") - end) - -end - ---[[ldx-- -

Next we implement some commands that are used in the user interface.

---ldx]]-- - --- commands = commands or { } --- --- function commands.uchar(first,second) --- context(utfchar(first*256+second)) --- end - ---[[ldx-- -

A few helpers (used to be luat-uni).

---ldx]]-- - --- obsolete: --- --- function utf.split(str) --- local t, n = { }, 0 --- for snippet in utfcharacters(str) do --- n = n + 1 --- t[n+1] = snippet --- end --- return t --- end --- --- function utf.each(str,fnc) --- for snippet in utfcharacters(str) do --- fnc(snippet) --- end --- end +if not modules then modules = { } end modules ['char-utf'] = { + version = 1.001, + comment = "companion to char-utf.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

When a sequence of characters enters the application, it may +be neccessary to collapse subsequences into their composed variant.

+ +

This module implements methods for collapsing and expanding +sequences. We also provide means to deal with characters that are +special to as well as 8-bit characters that need to end up +in special kinds of output (for instance ).

+ +

We implement these manipulations as filters. One can run multiple filters +over a string.

+--ldx]]-- + +local concat, gmatch, gsub, find = table.concat, string.gmatch, string.gsub, string.find +local utfchar, utfbyte, utfcharacters, utfvalues = utf.char, utf.byte, utf.characters, utf.values +local allocate = utilities.storage.allocate +local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns + +local charfromnumber = characters.fromnumber + +-- todo: trackers +-- graphemes: basic symbols + +characters = characters or { } +local characters = characters + +characters.graphemes = allocate() +local graphemes = characters.graphemes + +characters.combined = allocate() +local combined = characters.combined + +characters.decomposed = allocate() +local decomposed = characters.decomposed + +characters.mathpairs = allocate() +local mathpairs = characters.mathpairs + +characters.filters = allocate() +local filters = characters.filters + +filters.utf = filters.utf or { } +local utffilters = characters.filters.utf + +-- is characters.combined cached? + +--[[ldx-- +

It only makes sense to collapse at runtime, since we don't expect +source code to depend on collapsing.

+--ldx]]-- + +-- for the moment, will be entries in char-def.lua + +local decomposed = allocate { + ["IJ"] = "IJ", + ["ij"] = "ij", + ["Ö‡"] = "Õ¥Ö‚", + ["ff"] = "ff", + ["ï¬"] = "fi", + ["fl"] = "fl", + ["ffi"] = "ffi", + ["ffl"] = "ffl", + ["ſt"] = "Å¿t", + ["st"] = "st", + ["ﬓ"] = "Õ´Õ¶", + ["ﬔ"] = "Õ´Õ¥", + ["ﬕ"] = "Õ´Õ«", + ["ﬖ"] = "Õ¾Õ¶", + ["ﬗ"] = "Õ´Õ­", +} + +characters.decomposed = decomposed + +local function initialize() -- maybe only 'mn' + local data = characters.data + for unicode, v in next, data do + -- using vs and first testing for length is faster (.02->.01 s) + local vs = v.specials + local vc = vs and #vs == 3 and vs[1] + if vc == "char" then + local one, two = vs[2], vs[3] + if data[two].category == "mn" then + local cgf = combined[one] + if not cgf then + cgf = { [two] = unicode } + combined[one] = cgf + else + cgf[two] = unicode + end + end + local first, second, combination = utfchar(one), utfchar(two), utfchar(unicode) + local cgf = graphemes[first] + if not cgf then + cgf = { [second] = combination } + graphemes[first] = cgf + else + cgf[second] = combination + end + if v.mathclass or v.mathspec then + local mps = mathpairs[two] + if not mps then + mps = { [one] = unicode } + mathpairs[two] = mps + else + mps[one] = unicode -- here unicode + end + local mps = mathpairs[second] + if not mps then + mps = { [first] = combination } + mathpairs[second] = mps + else + mps[first] = combination + end + end + -- elseif vc == "compat" then + -- else + -- local description = v.description + -- if find(description,"LIGATURE") then + -- if vs then + -- local t = { } + -- for i=2,#vs do + -- t[#t+1] = utfchar(vs[i]) + -- end + -- decomposed[utfchar(unicode)] = concat(t) + -- else + -- local vs = v.shcode + -- if vs then + -- local t = { } + -- for i=1,#vs do + -- t[i] = utfchar(vs[i]) + -- end + -- decomposed[utfchar(unicode)] = concat(t) + -- end + -- end + -- end + end + end + initialize = false + characters.initialize = function() end -- when used outside tex +end + +characters.initialize = initialize + +-- utffilters.addgrapheme(utfchar(318),'l','\string~') +-- utffilters.addgrapheme('c','a','b') + +function utffilters.addgrapheme(result,first,second) -- can be U+ 0x string or utf or number + local result = charfromnumber(result) + local first = charfromnumber(first) + local second = charfromnumber(second) + if not graphemes[first] then + graphemes[first] = { [second] = result } + else + graphemes[first][second] = result + end +end + +--[[ldx-- +

In order to deal with 8-bit output, we need to find a way to +go from to 8-bit. This is handled in the + engine itself.

+ +

This leaves us problems with characters that are specific to + like {}, $ and alike.

+ +

We can remap some chars that tex input files are sensitive for to +a private area (while writing to a utility file) and revert then +to their original slot when we read in such a file. Instead of +reverting, we can (when we resolve characters to glyphs) map them +to their right glyph there.

+ +

For this purpose we can use the private planes 0x0F0000 and +0x100000.

+--ldx]]-- + +local low = allocate({ }) +local high = allocate({ }) +local escapes = allocate({ }) +local special = "~#$%^&_{}\\|" + +local private = { + low = low, + high = high, + escapes = escapes, +} + +utffilters.private = private + +local tohigh = lpeg.replacer(low) -- frozen, only for basic tex +local tolow = lpeg.replacer(high) -- frozen, only for basic tex + +lpegpatterns.utftohigh = tohigh +lpegpatterns.utftolow = tolow + +function utffilters.harden(str) + return lpegmatch(tohigh,str) +end + +function utffilters.soften(str) + return lpegmatch(tolow,str) +end + +local function set(ch) + local cb + if type(ch) == "number" then + cb, ch = ch, utfchar(ch) + else + cb = utfbyte(ch) + end + if cb < 256 then + escapes[ch] = "\\" .. ch + low[ch] = utfchar(0x0F0000 + cb) + if ch == "%" then + ch = "%%" -- nasty, but we need this as in replacements (also in lpeg) % is interpreted + end + high[utfchar(0x0F0000 + cb)] = ch + end +end + +private.set = set + +-- function private.escape (str) return gsub(str,"(.)", escapes) end +-- function private.replace(str) return utfgsub(str,"(.)", low ) end +-- function private.revert (str) return utfgsub(str,"(.)", high ) end + +private.escape = utf.remapper(escapes) +private.replace = utf.remapper(low) +private.revert = utf.remapper(high) + +for ch in gmatch(special,".") do set(ch) end + +--[[ldx-- +

We get a more efficient variant of this when we integrate +replacements in collapser. This more or less renders the previous +private code redundant. The following code is equivalent but the +first snippet uses the relocated dollars.

+ + +[󰀤x󰀤] [$x$] + + +

The next variant has lazy token collecting, on a 140 page mk.tex this saves +about .25 seconds, which is understandable because we have no graphmes and +not collecting tokens is not only faster but also saves garbage collecting. +

+--ldx]]-- + +-- lpeg variant is not faster +-- +-- I might use the combined loop at some point for the filter +-- some day. + +-- function utffilters.collapse(str) -- not really tested (we could preallocate a table) +-- if str and str ~= "" then +-- local nstr = #str +-- if nstr > 1 then +-- if initialize then -- saves a call +-- initialize() +-- end +-- local tokens, t, first, done, n = { }, 0, false, false, 0 +-- for second in utfcharacters(str) do +-- local dec = decomposed[second] +-- if dec then +-- if not done then +-- if n > 0 then +-- for s in utfcharacters(str) do +-- if n == 1 then +-- break +-- else +-- t = t + 1 +-- tokens[t] = s +-- n = n - 1 +-- end +-- end +-- end +-- done = true +-- elseif first then +-- t = t + 1 +-- tokens[t] = first +-- end +-- t = t + 1 +-- tokens[t] = dec +-- first = false +-- elseif done then +-- local crs = high[second] +-- if crs then +-- if first then +-- t = t + 1 +-- tokens[t] = first +-- end +-- first = crs +-- else +-- local cgf = graphemes[first] +-- if cgf and cgf[second] then +-- first = cgf[second] +-- elseif first then +-- t = t + 1 +-- tokens[t] = first +-- first = second +-- else +-- first = second +-- end +-- end +-- else +-- local crs = high[second] +-- if crs then +-- for s in utfcharacters(str) do +-- if n == 1 then +-- break +-- else +-- t = t + 1 +-- tokens[t] = s +-- n = n - 1 +-- end +-- end +-- if first then +-- t = t + 1 +-- tokens[t] = first +-- end +-- first = crs +-- done = true +-- else +-- local cgf = graphemes[first] +-- if cgf and cgf[second] then +-- for s in utfcharacters(str) do +-- if n == 1 then +-- break +-- else +-- t = t + 1 +-- tokens[t] = s +-- n = n - 1 +-- end +-- end +-- first = cgf[second] +-- done = true +-- else +-- first = second +-- n = n + 1 +-- end +-- end +-- end +-- end +-- if done then +-- if first then +-- t = t + 1 +-- tokens[t] = first +-- end +-- return concat(tokens) -- seldom called +-- end +-- elseif nstr > 0 then +-- return high[str] or str +-- end +-- end +-- return str +-- end + +local skippable = table.tohash { "mkiv", "mkvi" } +local filesuffix = file.suffix + +-- we could reuse tokens but it's seldom populated anyway + +function utffilters.collapse(str,filename) -- not really tested (we could preallocate a table) + if skippable[filesuffix(filename)] then + return str + end + if str and str ~= "" then + local nstr = #str + if nstr > 1 then + if initialize then -- saves a call + initialize() + end + local tokens, t, first, done, n = { }, 0, false, false, 0 + for second in utfcharacters(str) do + if done then + local crs = high[second] + if crs then + if first then + t = t + 1 + tokens[t] = first + end + first = crs + else + local cgf = graphemes[first] + if cgf and cgf[second] then + first = cgf[second] + elseif first then + t = t + 1 + tokens[t] = first + first = second + else + first = second + end + end + else + local crs = high[second] + if crs then + for s in utfcharacters(str) do + if n == 1 then + break + else + t = t + 1 + tokens[t] = s + n = n - 1 + end + end + if first then + t = t + 1 + tokens[t] = first + end + first = crs + done = true + else + local cgf = graphemes[first] + if cgf and cgf[second] then + for s in utfcharacters(str) do + if n == 1 then + break + else + t = t + 1 + tokens[t] = s + n = n - 1 + end + end + first = cgf[second] + done = true + else + first = second + n = n + 1 + end + end + end + end + if done then + if first then + t = t + 1 + tokens[t] = first + end + return concat(tokens) -- seldom called + end + elseif nstr > 0 then + return high[str] or str + end + end + return str +end + +function utffilters.decompose(str) + if str and str ~= "" then + local nstr = #str + if nstr > 1 then + -- if initialize then -- saves a call + -- initialize() + -- end + local tokens, t, done, n = { }, 0, false, 0 + for s in utfcharacters(str) do + local dec = decomposed[s] + if dec then + if not done then + if n > 0 then + for s in utfcharacters(str) do + if n == 1 then + break + else + t = t + 1 + tokens[t] = s + n = n - 1 + end + end + end + done = true + end + t = t + 1 + tokens[t] = dec + elseif done then + t = t + 1 + tokens[t] = s + else + n = n + 1 + end + end + if done then + return concat(tokens) -- seldom called + end + end + end + return str +end + +local sequencers = utilities.sequencers + +if sequencers then + + local textfileactions = resolvers.openers.helpers.textfileactions + + sequencers.appendaction (textfileactions,"system","characters.filters.utf.collapse") + sequencers.disableaction(textfileactions,"characters.filters.utf.collapse") + + sequencers.appendaction (textfileactions,"system","characters.filters.utf.decompose") + sequencers.disableaction(textfileactions,"characters.filters.utf.decompose") + + function characters.filters.utf.enable() + sequencers.enableaction(textfileactions,"characters.filters.utf.collapse") + sequencers.enableaction(textfileactions,"characters.filters.utf.decompose") + end + + directives.register("filters.utf.collapse", function(v) + sequencers[v and "enableaction" or "disableaction"](textfileactions,"characters.filters.utf.collapse") + end) + + directives.register("filters.utf.decompose", function(v) + sequencers[v and "enableaction" or "disableaction"](textfileactions,"characters.filters.utf.decompose") + end) + +end + +--[[ldx-- +

Next we implement some commands that are used in the user interface.

+--ldx]]-- + +-- commands = commands or { } +-- +-- function commands.uchar(first,second) +-- context(utfchar(first*256+second)) +-- end + +--[[ldx-- +

A few helpers (used to be luat-uni).

+--ldx]]-- + +-- obsolete: +-- +-- function utf.split(str) +-- local t, n = { }, 0 +-- for snippet in utfcharacters(str) do +-- n = n + 1 +-- t[n+1] = snippet +-- end +-- return t +-- end +-- +-- function utf.each(str,fnc) +-- for snippet in utfcharacters(str) do +-- fnc(snippet) +-- end +-- end diff --git a/tex/context/base/chem-ini.lua b/tex/context/base/chem-ini.lua index 10db1a1e4..e694a92de 100644 --- a/tex/context/base/chem-ini.lua +++ b/tex/context/base/chem-ini.lua @@ -1,43 +1,43 @@ -if not modules then modules = { } end modules ['chem-ini'] = { - version = 1.001, - comment = "companion to chem-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format -local lpegmatch, patterns = lpeg.match, lpeg.patterns - -local trace_molecules = false trackers.register("chemistry.molecules", function(v) trace_molecules = v end) - -local report_chemistry = logs.reporter("chemistry") - -local context = context -local cpatterns = patterns.context - -chemistry = chemistry or { } -local chemistry = chemistry - ---[[ -

The next code started out as adaptation of code from Wolfgang Schuster as -posted on the mailing list. The current version supports nested braces and -unbraced integers as scripts.

-]]-- - -local moleculeparser = cpatterns.scripted -chemistry.moleculeparser = moleculeparser - -function chemistry.molecule(str) - return lpegmatch(moleculeparser,str) -end - -function commands.molecule(str) - if trace_molecules then - local rep = lpegmatch(moleculeparser,str) - report_chemistry("molecule %a becomes %a",str,rep) - context(rep) - else - context(lpegmatch(moleculeparser,str)) - end -end +if not modules then modules = { } end modules ['chem-ini'] = { + version = 1.001, + comment = "companion to chem-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format +local lpegmatch, patterns = lpeg.match, lpeg.patterns + +local trace_molecules = false trackers.register("chemistry.molecules", function(v) trace_molecules = v end) + +local report_chemistry = logs.reporter("chemistry") + +local context = context +local cpatterns = patterns.context + +chemistry = chemistry or { } +local chemistry = chemistry + +--[[ +

The next code started out as adaptation of code from Wolfgang Schuster as +posted on the mailing list. The current version supports nested braces and +unbraced integers as scripts.

+]]-- + +local moleculeparser = cpatterns.scripted +chemistry.moleculeparser = moleculeparser + +function chemistry.molecule(str) + return lpegmatch(moleculeparser,str) +end + +function commands.molecule(str) + if trace_molecules then + local rep = lpegmatch(moleculeparser,str) + report_chemistry("molecule %a becomes %a",str,rep) + context(rep) + else + context(lpegmatch(moleculeparser,str)) + end +end diff --git a/tex/context/base/chem-str.lua b/tex/context/base/chem-str.lua index dfcf0a3e1..679314e2d 100644 --- a/tex/context/base/chem-str.lua +++ b/tex/context/base/chem-str.lua @@ -1,820 +1,820 @@ -if not modules then modules = { } end modules ['chem-str'] = { - version = 1.001, - comment = "companion to chem-str.mkiv", - author = "Hans Hagen and Alan Braslau", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- The original \PPCHTEX\ code was written in pure \TEX\, although later we made --- the move from \PICTEX\ to \METAPOST\. The current implementation is a mix between --- \TEX\, \LUA\ and \METAPOST. Although the first objective is to get a compatible --- but better implementation, later versions might provide more. --- --- Well, the later version has arrived as Alan took it upon him to make the code --- deviate even further from the original implementation. The original (early \MKII) --- variant operated within the boundaries of \PICTEX\ and as it supported MetaPost as --- alternative output. As a consequence it still used a stepwise graphic construction --- approach. As we used \TEX\ for parsing, the syntax was more rigid than it is now. --- This new variant uses a more mathematical and metapostisch approach. In the process --- more rendering variants have been added and alignment has been automated. As a result --- the current user interface is slightly different from the old one but hopefully users --- will like the added value. - --- directive_strictorder: one might set this to off when associated texts are disordered too - -local trace_structure = false trackers .register("chemistry.structure", function(v) trace_structure = v end) -local trace_metapost = false trackers .register("chemistry.metapost", function(v) trace_metapost = v end) -local trace_boundingbox = false trackers .register("chemistry.boundingbox", function(v) trace_boundingbox = v end) -local trace_textstack = false trackers .register("chemistry.textstack", function(v) trace_textstack = v end) -local directive_strictorder = true directives.register("chemistry.strictorder", function(v) directive_strictorder = v end) -local directive_strictindex = false directives.register("chemistry.strictindex", function(v) directive_strictindex = v end) - -local report_chemistry = logs.reporter("chemistry") - -local format, gmatch, match, lower, gsub = string.format, string.gmatch, string.match, string.lower, string.gsub -local concat, insert, remove, unique, sorted = table.concat, table.insert, table.remove, table.unique, table.sorted -local processor_tostring = typesetters and typesetters.processors.tostring -local settings_to_array = utilities.parsers.settings_to_array -local settings_to_array_with_repeat = utilities.parsers.settings_to_array_with_repeat -local formatters = string.formatters - -local lpegmatch = lpeg.match -local P, R, S, C, Cs, Ct, Cc, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cmt - -local variables = interfaces and interfaces.variables -local context = context -local formatters = string.formatters -local texcount = tex.count - -local v_default = variables.default -local v_small = variables.small -local v_medium = variables.medium -local v_big = variables.big -local v_normal = variables.normal -local v_fit = variables.fit -local v_on = variables.on -local v_none = variables.none - -local mpnamedcolor = attributes.colors.mpnamedcolor -local topoints = number.topoints -local todimen = string.todimen - -chemistry = chemistry or { } -local chemistry = chemistry - -chemistry.instance = "chemistry" -chemistry.format = "metafun" -chemistry.structures = 0 - -local common_keys = { - b = "line", - r = "line", - sb = "line", - sr = "line", - rd = "line", - rh = "line", - rb = "line", - rbd = "line", - cc = "line", - ccd = "line", - line = "line", - dash = "line", - arrow = "line", - c = "fixed", - cd = "fixed", - z = "text", - zt = "text", - zlt = "text", - zrt = "text", - rz = "text", - rt = "text", - lrt = "text", - rrt = "text", - label = "text", - zln = "number", - zrn = "number", - rn = "number", - lrn = "number", - rrn = "number", - zn = "number", - number = "number", - mov = "transform", - mark = "transform", - move = "transform", - diff = "transform", - off = "transform", - adj = "transform", - sub = "transform", -} - -local front_keys = { - bb = "line", - eb = "line", - rr = "line", - lr = "line", - lsr = "line", - rsr = "line", - lrd = "line", - rrd = "line", - lrh = "line", - rrh = "line", - lrbd = "line", - rrbd = "line", - lrb = "line", - rrb = "line", - lrz = "text", - rrz = "text", - lsub = "transform", - rsub = "transform", -} - -local one_keys = { - db = "line", - tb = "line", - bb = "line", - dr = "line", - hb = "line", - bd = "line", - bw = "line", - oe = "line", - sd = "line", - rdb = "line", - ldb = "line", - ldd = "line", - rdd = "line", - ep = "line", - es = "line", - ed = "line", - et = "line", - cz = "text", - rot = "transform", - dir = "transform", - rm = "transform", - mir = "transform", -} - -local ring_keys = { - db = "line", - br = "line", - lr = "line", - rr = "line", - lsr = "line", - rsr = "line", - lrd = "line", - rrd = "line", - lrb = "line", - rrb = "line", - lrh = "line", - rrh = "line", - lrbd = "line", - rrbd = "line", - dr = "line", - eb = "line", - er = "line", - ed = "line", - au = "line", - ad = "line", - s = "line", - ss = "line", - mid = "line", - mids = "line", - midz = "text", - lrz = "text", - rrz = "text", - crz = "text", - rot = "transform", - mir = "transform", - adj = "transform", - lsub = "transform", - rsub = "transform", - rm = "transform", -} - --- table.setmetatableindex(front_keys,common_keys) --- table.setmetatableindex(one_keys,common_keys) --- table.setmetatableindex(ring_keys,common_keys) - --- or (faster but not needed here): - -front_keys = table.merged(front_keys,common_keys) -one_keys = table.merged(one_keys,common_keys) -ring_keys = table.merged(ring_keys,common_keys) - -local syntax = { - carbon = { max = 4, keys = one_keys, }, - alkyl = { max = 4, keys = one_keys, }, - newmanstagger = { max = 6, keys = one_keys, }, - newmaneclipsed = { max = 6, keys = one_keys, }, - one = { max = 8, keys = one_keys, }, - three = { max = 3, keys = ring_keys, }, - four = { max = 4, keys = ring_keys, }, - five = { max = 5, keys = ring_keys, }, - six = { max = 6, keys = ring_keys, }, - seven = { max = 7, keys = ring_keys, }, - eight = { max = 8, keys = ring_keys, }, - nine = { max = 9, keys = ring_keys, }, - fivefront = { max = 5, keys = front_keys, }, - sixfront = { max = 6, keys = front_keys, }, - chair = { max = 6, keys = front_keys, }, - boat = { max = 6, keys = front_keys, }, - pb = { direct = 'chem_pb;' }, - pe = { direct = 'chem_pe;' }, - save = { direct = 'chem_save;' }, - restore = { direct = 'chem_restore;' }, - chem = { direct = formatters['chem_symbol("\\chemicaltext{%s}");'], arguments = 1 }, - space = { direct = 'chem_symbol("\\chemicalsymbol[space]");' }, - plus = { direct = 'chem_symbol("\\chemicalsymbol[plus]");' }, - minus = { direct = 'chem_symbol("\\chemicalsymbol[minus]");' }, - gives = { direct = formatters['chem_symbol("\\chemicalsymbol[gives]{%s}{%s}");'], arguments = 2 }, - equilibrium = { direct = formatters['chem_symbol("\\chemicalsymbol[equilibrium]{%s}{%s}");'], arguments = 2 }, - mesomeric = { direct = formatters['chem_symbol("\\chemicalsymbol[mesomeric]{%s}{%s}");'], arguments = 2 }, - opencomplex = { direct = 'chem_symbol("\\chemicalsymbol[opencomplex]");' }, - closecomplex = { direct = 'chem_symbol("\\chemicalsymbol[closecomplex]");' }, - reset = { direct = 'chem_reset;' }, - mp = { direct = formatters['%s'], arguments = 1 }, -- backdoor MP code - dangerous! -} - -chemistry.definitions = chemistry.definitions or { } -local definitions = chemistry.definitions - -storage.register("chemistry/definitions",definitions,"chemistry.definitions") - -function chemistry.undefine(name) - definitions[lower(name)] = nil -end - -function chemistry.define(name,spec,text) - name = lower(name) - local dn = definitions[name] - if not dn then - dn = { } - definitions[name] = dn - end - dn[#dn+1] = { - spec = settings_to_array_with_repeat(spec,true), - text = settings_to_array_with_repeat(text,true), - } -end - -local metacode, variant, keys, max, txt, pstack, sstack, align -local molecule = chemistry.molecule -- or use lpegmatch(chemistry.moleculeparser,...) - -local function fetch(txt) - local st = stack[txt] - local t = st.text[st.n] - while not t and txt > 1 do - txt = txt - 1 - st = stack[txt] - t = st.text[st.n] - end - if t then - if trace_textstack then - report_chemistry("fetching from stack %a, slot %a, data %a",txt,st.n,t) - end - st.n = st.n + 1 - end - return txt, t -end - -local remapper = { - ["+"] = "p", - ["-"] = "m", -} - -local dchrs = R("09") -local sign = S("+-") -local digit = dchrs / tonumber -local amount = (sign^-1 * (dchrs^0 * P('.'))^-1 * dchrs^1) / tonumber -local single = digit -local range = digit * P("..") * digit -local set = Ct(digit^2) -local colon = P(":") -local equal = P("=") -local other = 1 - digit - colon - equal -local remapped = sign / remapper -local operation = Cs(other^1) -local special = (colon * C(other^1)) + Cc("") -local text = (equal * C(P(1)^0)) + Cc(false) - -local pattern = - (amount + Cc(1)) - * (remapped + Cc("")) - * Cs(operation/lower) - * Cs(special/lower) * ( - range * Cc(false) * text + - Cc(false) * Cc(false) * set * text + - single * Cc(false) * Cc(false) * text + - Cc(false) * Cc(false) * Cc(false) * text - ) - --- local n, operation, index, upto, set, text = lpegmatch(pattern,"RZ1357") - --- print(lpegmatch(pattern,"RZ=x")) -- 1 RZ false false false x --- print(lpegmatch(pattern,"RZ1=x")) -- 1 RZ 1 false false x --- print(lpegmatch(pattern,"RZ1..3=x")) -- 1 RZ 1 3 false x --- print(lpegmatch(pattern,"RZ13=x")) -- 1 RZ false false table x - -local f_initialize = 'if unknown context_chem : input mp-chem.mpiv ; fi ;' -local f_start_structure = formatters['chem_start_structure(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);'] -local f_set_trace_bounds = formatters['chem_trace_boundingbox := %l ;'] -local f_stop_structure = 'chem_stop_structure;' -local f_start_component = 'chem_start_component;' -local f_stop_component = 'chem_stop_component;' -local f_line = formatters['chem_%s%s(%s,%s,%s,%s,%s);'] -local f_set = formatters['chem_set(%s);'] -local f_number = formatters['chem_%s%s(%s,%s,"\\chemicaltext{%s}");'] -local f_text = f_number -local f_empty_normal = formatters['chem_%s(%s,%s,"");'] -local f_empty_center = formatters['chem_c%s(%s,%s,"");'] -local f_transform = formatters['chem_%s(%s,%s,%s);'] - -local prepareMPvariable = commands and commands.prepareMPvariable - -local function process(level,spec,text,n,rulethickness,rulecolor,offset,default_variant) - insert(stack,{ spec = spec, text = text, n = n }) - local txt = #stack - local m = #metacode - local saved_rulethickness = rulethickness - local saved_rulecolor = rulecolor - local saved_align = align - local current_variant = default_variant or "six" - for i=1,#spec do - local step = spec[i] - local s = lower(step) - local n = current_variant .. ":" .. s - local d = definitions[n] - if not d then - n = s - d = definitions[n] - end - if d then - if trace_structure then - report_chemistry("level %a, step %a, definition %a, snippets %a",level,step,n,#d) - end - for i=1,#d do - local di = d[i] - current_variant = process(level+1,di.spec,di.text,1,rulethickness,rulecolor,offset,current_variant) -- offset? - end - else - local factor, osign, operation, special, index, upto, set, text = lpegmatch(pattern,step) - if trace_structure then - local set = set and concat(set," ") or "-" - report_chemistry("level %a, step %a, factor %a, osign %a, operation %a, special %a, index %a, upto %a, set %a, text %a", - level,step,factor,osign,operation,special,index,upto,set,text) - end - if operation == "rulecolor" then - local t = text - if not t then - txt, t = fetch(txt) - end - if t == v_default or t == v_normal or t == "" then - rulecolor = saved_rulecolor - elseif t then - rulecolor = mpnamedcolor(t) - end - elseif operation == "rulethickness" then - local t = text - if not t then - txt, t = fetch(txt) - end - if t == v_default or t == v_normal or t == t_medium or t == "" then - rulethickness = saved_rulethickness - elseif t == v_small then - rulethickness = topoints(1/1.2 * todimen(saved_rulethickness)) - elseif t == v_big then - rulethickness = topoints(1.2 * todimen(saved_rulethickness)) - elseif t then - -- rulethickness = topoints(todimen(t)) -- mp can't handle sp - rulethickness = topoints(tonumber(t) * todimen(saved_rulethickness)) - end - elseif operation == "symalign" then - local t = text - if not t then - txt, t = fetch(txt) - end - if t == v_default or t == v_normal then - align = saved_align - elseif t and t ~= "" then - align = "." .. t - end - elseif operation == "pb" then - insert(pstack,variant) - m = m + 1 ; metacode[m] = syntax.pb.direct - if keys[special] == "text" and index then - if keys["c"..special] == "text" then -- can be option: auto ... - m = m + 1 ; metacode[m] = f_empty_center(special,variant,index) - else - m = m + 1 ; metacode[m] = f_empty_normal(special,variant,index) - end - end - elseif operation == "pe" then - variant = remove(pstack) - local ss = syntax[variant] - keys, max = ss.keys, ss.max - m = m + 1 ; metacode[m] = syntax.pe.direct - m = m + 1 ; metacode[m] = f_set(variant) - current_variant = variant - elseif operation == "save" then - insert(sstack,variant) - m = m + 1 ; metacode[m] = syntax.save.direct - elseif operation == "restore" then - variant = remove(sstack) - local ss = syntax[variant] - keys, max = ss.keys, ss.max - m = m + 1 ; metacode[m] = syntax.restore.direct - m = m + 1 ; metacode[m] = f_set(variant) - current_variant = variant - elseif operation then - local ss = syntax[operation] - local what = keys[operation] - local ns = 0 - if set then - local sv = syntax[current_variant] - local ms = sv and sv.max - set = unique(set) - ns = #set - if directive_strictorder then - if what == "line" then - set = sorted(set) - end - if directive_strictindex and ms then - for i=ns,1,-1 do - local si = set[i] - if si > ms then - report_chemistry("level %a, operation %a, max nofsteps %a, ignoring %a",level,operation,ms,si) - set[i] = nil - ns = ns - 1 - else - break - end - end - end - else - if directive_strictindex and ms then - local t, nt = { }, 0 - for i=1,ns do - local si = set[i] - if si > ms then - report_chemistry("level %a, operation %a, max nofsteps %a, ignoring %a",level,operation,ms,si) - set[i] = nil - else - nt = nt + 1 - t[nt] = si - end - end - ns = nt - set = t - end - end - end - if ss then - local ds = ss.direct - if ds then - local sa = ss.arguments - if sa == 1 then - local one ; txt, one = fetch(txt) - m = m + 1 ; metacode[m] = ds(one or "") - elseif sa == 2 then - local one ; txt, one = fetch(txt) - local two ; txt, two = fetch(txt) - m = m + 1 ; metacode[m] = ds(one or "",two or "") - else - m = m + 1 ; metacode[m] = ds - end - elseif ss.keys then - variant, keys, max = s, ss.keys, ss.max - m = m + 1 ; metacode[m] = f_set(variant) - current_variant = variant - end - elseif what == "line" then - local s = osign - if s ~= "" then - s = "." .. s - end - if set then - -- condense consecutive numbers in a set to a range - local sf, st = set[1] - for i=1,ns do - if i > 1 and set[i] ~= set[i-1]+1 then - m = m + 1 ; metacode[m] = f_line(operation,s,variant,sf,st,rulethickness,rulecolor) - sf = set[i] - end - st = set[i] - end - m = m + 1 ; metacode[m] = f_line(operation,s,variant,sf,st,rulethickness,rulecolor) - elseif upto then - m = m + 1 ; metacode[m] = f_line(operation,s,variant,index,upto,rulethickness,rulecolor) - elseif index then - m = m + 1 ; metacode[m] = f_line(operation,s,variant,index,index,rulethickness,rulecolor) - else - m = m + 1 ; metacode[m] = f_line(operation,s,variant,1,max,rulethickness,rulecolor) - end - elseif what == "number" then - if set then - for i=1,ns do - local si = set[i] - m = m + 1 ; metacode[m] = f_number(operation,align,variant,si,si) - end - elseif upto then - for i=index,upto do - local si = set[i] - m = m + 1 ; metacode[m] = f_number(operation,align,variant,si,si) - end - elseif index then - m = m + 1 ; metacode[m] = f_number(operation,align,variant,index,index) - else - for i=1,max do - m = m + 1 ; metacode[m] = f_number(operation,align,variant,i,i) - end - end - elseif what == "text" then - if set then - for i=1,ns do - local si = set[i] - local t = text - if not t then txt, t = fetch(txt) end - if t then - t = molecule(processor_tostring(t)) - m = m + 1 ; metacode[m] = f_text(operation,align,variant,si,t) - end - end - elseif upto then - for i=index,upto do - local t = text - if not t then txt, t = fetch(txt) end - if t then - t = molecule(processor_tostring(t)) - m = m + 1 ; metacode[m] = f_text(operation,align,variant,i,t) - end - end - elseif index == 0 then - local t = text - if not t then txt, t = fetch(txt) end - if t then - t = molecule(processor_tostring(t)) - m = m + 1 ; metacode[m] = f_text(operation,align,variant,index,t) - end - elseif index then - local t = text - if not t then txt, t = fetch(txt) end - if t then - t = molecule(processor_tostring(t)) - m = m + 1 ; metacode[m] = f_text(operation,align,variant,index,t) - end - else - for i=1,max do - local t = text - if not t then txt, t = fetch(txt) end - if t then - t = molecule(processor_tostring(t)) - m = m + 1 ; metacode[m] = f_text(operation,align,variant,i,t) - end - end - end - elseif what == "transform" then - if osign == "m" then - factor = -factor - end - if set then - for i=1,ns do - local si = set[i] - m = m + 1 ; metacode[m] = f_transform(operation,variant,si,factor) - end - elseif upto then - for i=index,upto do - m = m + 1 ; metacode[m] = f_transform(operation,variant,i,factor) - end - else - m = m + 1 ; metacode[m] = f_transform(operation,variant,index or 1,factor) - end - elseif what == "fixed" then - m = m + 1 ; metacode[m] = f_transform(operation,variant,rulethickness,rulecolor) - elseif trace_structure then - report_chemistry("level %a, ignoring undefined operation %s",level,operation) - end - end - end - end - remove(stack) - return current_variant -end - --- the size related values are somewhat special but we want to be --- compatible --- --- rulethickness in points - -local function checked(d,factor,unit,scale) - if d == v_none then - return 0 - end - local n = tonumber(d) - if not n then - -- assume dimen - elseif n >= 10 or n <= -10 then - return factor * unit * n / 1000 - else - return factor * unit * n - end - local n = todimen(d) - if n then - return scale * n - else - return v_fit - end -end - -local function calculated(height,bottom,top,factor,unit,scale) - local scaled = 0 - if height == v_none then - -- this always wins - height = "0pt" - bottom = "0pt" - top = "0pt" - elseif height == v_fit then - height = "true" - bottom = bottom == v_fit and "true" or topoints(checked(bottom,factor,unit,scale)) - top = top == v_fit and "true" or topoints(checked(top, factor,unit,scale)) - else - height = checked(height,factor,unit,scale) - if bottom == v_fit then - if top == v_fit then - bottom = height / 2 - top = bottom - else - top = checked(top,factor,unit,scale) - bottom = height - top - end - elseif top == v_fit then - bottom = checked(bottom,factor,unit,scale) - top = height - bottom - else - bottom = checked(bottom,factor,unit,scale) - top = checked(top, factor,unit,scale) - local ratio = height / (bottom+top) - bottom = bottom * ratio - top = top * ratio - end - scaled = height - top = topoints(top) - bottom = topoints(bottom) - height = topoints(height) - end - return height, bottom, top, scaled -end - -function chemistry.start(settings) - -- - local width = settings.width or v_fit - local height = settings.height or v_fit - local unit = settings.unit or 655360 - local factor = settings.factor or 3 - local rulethickness = settings.rulethickness or 65536 - local rulecolor = settings.rulecolor or "" - local axiscolor = settings.framecolor or "" - local scale = settings.scale or "normal" - local rotation = settings.rotation or 0 - local offset = settings.offset or 0 - local left = settings.left or v_fit - local right = settings.right or v_fit - local top = settings.top or v_fit - local bottom = settings.bottom or v_fit - -- - align = settings.symalign or "auto" - if trace_structure then - report_chemistry("unit %p, factor %s, symalign %s",unit,factor,align) - end - if align ~= "" then - align = "." .. align - end - if trace_structure then - report_chemistry("%s scale %a, rotation %a, width %s, height %s, left %s, right %s, top %s, bottom %s","asked",scale,rotation,width,height,left,right,top,bottom) - end - if scale == v_small then - scale = 1/1.2 - elseif scale == v_normal or scale == v_medium or scale == 0 then - scale = 1 - elseif scale == v_big then - scale = 1.2 - else - scale = tonumber(scale) - if not scale or scale == 0 then - scale = 1 - elseif scale >= 10 then - scale = scale / 1000 - elseif scale < .01 then - scale = .01 - end - end - -- - unit = scale * unit - -- - local sp_width = 0 - local sp_height = 0 - -- - width, left, right, sp_width = calculated(width, left, right,factor,unit,scale) - height, bottom, top, sp_height = calculated(height,bottom,top, factor,unit,scale) - -- - if width ~= "true" and height ~= "true" and texcount["@@trialtypesetting"] ~= 0 then - if trace_structure then - report_chemistry("skipping trial run") - end - context.hrule(sp_width,sp_height,0) -- maybe depth - return - end - -- - chemistry.structures = chemistry.structures + 1 - -- - rotation = tonumber(rotation) or 0 - -- - metacode = { } - -- - if trace_structure then - report_chemistry("%s scale %a, rotation %a, width %s, height %s, left %s, right %s, top %s, bottom %s","used",scale,rotation,width,height,left,right,top,bottom) - end - metacode[#metacode+1] = f_start_structure( - chemistry.structures, - left, right, top, bottom, - rotation, topoints(unit), factor, topoints(offset), - tostring(settings.axis == v_on), topoints(rulethickness), tostring(axiscolor) - ) - metacode[#metacode+1] = f_set_trace_bounds(trace_boundingbox) ; - -- - variant, keys, stack, pstack, sstack = "one", { }, { }, { }, { } -end - -function chemistry.stop() - if metacode then - metacode[#metacode+1] = f_stop_structure - local mpcode = concat(metacode,"\n") - if trace_metapost then - report_chemistry("metapost code:\n%s", mpcode) - end - if metapost.instance(chemistry.instance) then - f_initialize = nil - end - metapost.graphic { - instance = chemistry.instance, - format = chemistry.format, - data = mpcode, - definitions = f_initialize, - } - t_initialize = "" - metacode = nil - end -end - -function chemistry.component(spec,text,settings) - if metacode then - rulethickness, rulecolor, offset = settings.rulethickness, settings.rulecolor - local spec = settings_to_array_with_repeat(spec,true) -- no lower? - local text = settings_to_array_with_repeat(text,true) - -- inspect(spec) - metacode[#metacode+1] = f_start_component - process(1,spec,text,1,rulethickness,rulecolor) -- offset? - metacode[#metacode+1] = f_stop_component - end -end - -statistics.register("chemical formulas", function() - if chemistry.structures > 0 then - return format("%s chemical structure formulas",chemistry.structures) -- no timing needed, part of metapost - end -end) - --- interfaces - -commands.undefinechemical = chemistry.undefine -commands.definechemical = chemistry.define -commands.startchemical = chemistry.start -commands.stopchemical = chemistry.stop -commands.chemicalcomponent = chemistry.component - --- todo: top / bottom --- maybe add "=" for double and "≡" for triple? - -local inline = { - ["single"] = "\\chemicalsinglebond", ["-"] = "\\chemicalsinglebond", - ["double"] = "\\chemicaldoublebond", ["--"] = "\\chemicaldoublebond", - ["triple"] = "\\chemicaltriplebond", ["---"] = "\\chemicaltriplebond", - ["gives"] = "\\chemicalgives", ["->"] = "\\chemicalgives", - ["equilibrium"] = "\\chemicalequilibrium", ["<->"] = "\\chemicalequilibrium", - ["mesomeric"] = "\\chemicalmesomeric", ["<>"] = "\\chemicalmesomeric", - ["plus"] = "\\chemicalplus", ["+"] = "\\chemicalplus", - ["minus"] = "\\chemicalminus", - ["space"] = "\\chemicalspace", -} - -function commands.inlinechemical(spec) - local spec = settings_to_array_with_repeat(spec,true) - for i=1,#spec do - local s = spec[i] - local inl = inline[lower(s)] - if inl then - context(inl) -- could be a fast context.sprint - else - context.chemicalinline(molecule(s)) - end - end -end +if not modules then modules = { } end modules ['chem-str'] = { + version = 1.001, + comment = "companion to chem-str.mkiv", + author = "Hans Hagen and Alan Braslau", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- The original \PPCHTEX\ code was written in pure \TEX\, although later we made +-- the move from \PICTEX\ to \METAPOST\. The current implementation is a mix between +-- \TEX\, \LUA\ and \METAPOST. Although the first objective is to get a compatible +-- but better implementation, later versions might provide more. +-- +-- Well, the later version has arrived as Alan took it upon him to make the code +-- deviate even further from the original implementation. The original (early \MKII) +-- variant operated within the boundaries of \PICTEX\ and as it supported MetaPost as +-- alternative output. As a consequence it still used a stepwise graphic construction +-- approach. As we used \TEX\ for parsing, the syntax was more rigid than it is now. +-- This new variant uses a more mathematical and metapostisch approach. In the process +-- more rendering variants have been added and alignment has been automated. As a result +-- the current user interface is slightly different from the old one but hopefully users +-- will like the added value. + +-- directive_strictorder: one might set this to off when associated texts are disordered too + +local trace_structure = false trackers .register("chemistry.structure", function(v) trace_structure = v end) +local trace_metapost = false trackers .register("chemistry.metapost", function(v) trace_metapost = v end) +local trace_boundingbox = false trackers .register("chemistry.boundingbox", function(v) trace_boundingbox = v end) +local trace_textstack = false trackers .register("chemistry.textstack", function(v) trace_textstack = v end) +local directive_strictorder = true directives.register("chemistry.strictorder", function(v) directive_strictorder = v end) +local directive_strictindex = false directives.register("chemistry.strictindex", function(v) directive_strictindex = v end) + +local report_chemistry = logs.reporter("chemistry") + +local format, gmatch, match, lower, gsub = string.format, string.gmatch, string.match, string.lower, string.gsub +local concat, insert, remove, unique, sorted = table.concat, table.insert, table.remove, table.unique, table.sorted +local processor_tostring = typesetters and typesetters.processors.tostring +local settings_to_array = utilities.parsers.settings_to_array +local settings_to_array_with_repeat = utilities.parsers.settings_to_array_with_repeat +local formatters = string.formatters + +local lpegmatch = lpeg.match +local P, R, S, C, Cs, Ct, Cc, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cmt + +local variables = interfaces and interfaces.variables +local context = context +local formatters = string.formatters +local texcount = tex.count + +local v_default = variables.default +local v_small = variables.small +local v_medium = variables.medium +local v_big = variables.big +local v_normal = variables.normal +local v_fit = variables.fit +local v_on = variables.on +local v_none = variables.none + +local mpnamedcolor = attributes.colors.mpnamedcolor +local topoints = number.topoints +local todimen = string.todimen + +chemistry = chemistry or { } +local chemistry = chemistry + +chemistry.instance = "chemistry" +chemistry.format = "metafun" +chemistry.structures = 0 + +local common_keys = { + b = "line", + r = "line", + sb = "line", + sr = "line", + rd = "line", + rh = "line", + rb = "line", + rbd = "line", + cc = "line", + ccd = "line", + line = "line", + dash = "line", + arrow = "line", + c = "fixed", + cd = "fixed", + z = "text", + zt = "text", + zlt = "text", + zrt = "text", + rz = "text", + rt = "text", + lrt = "text", + rrt = "text", + label = "text", + zln = "number", + zrn = "number", + rn = "number", + lrn = "number", + rrn = "number", + zn = "number", + number = "number", + mov = "transform", + mark = "transform", + move = "transform", + diff = "transform", + off = "transform", + adj = "transform", + sub = "transform", +} + +local front_keys = { + bb = "line", + eb = "line", + rr = "line", + lr = "line", + lsr = "line", + rsr = "line", + lrd = "line", + rrd = "line", + lrh = "line", + rrh = "line", + lrbd = "line", + rrbd = "line", + lrb = "line", + rrb = "line", + lrz = "text", + rrz = "text", + lsub = "transform", + rsub = "transform", +} + +local one_keys = { + db = "line", + tb = "line", + bb = "line", + dr = "line", + hb = "line", + bd = "line", + bw = "line", + oe = "line", + sd = "line", + rdb = "line", + ldb = "line", + ldd = "line", + rdd = "line", + ep = "line", + es = "line", + ed = "line", + et = "line", + cz = "text", + rot = "transform", + dir = "transform", + rm = "transform", + mir = "transform", +} + +local ring_keys = { + db = "line", + br = "line", + lr = "line", + rr = "line", + lsr = "line", + rsr = "line", + lrd = "line", + rrd = "line", + lrb = "line", + rrb = "line", + lrh = "line", + rrh = "line", + lrbd = "line", + rrbd = "line", + dr = "line", + eb = "line", + er = "line", + ed = "line", + au = "line", + ad = "line", + s = "line", + ss = "line", + mid = "line", + mids = "line", + midz = "text", + lrz = "text", + rrz = "text", + crz = "text", + rot = "transform", + mir = "transform", + adj = "transform", + lsub = "transform", + rsub = "transform", + rm = "transform", +} + +-- table.setmetatableindex(front_keys,common_keys) +-- table.setmetatableindex(one_keys,common_keys) +-- table.setmetatableindex(ring_keys,common_keys) + +-- or (faster but not needed here): + +front_keys = table.merged(front_keys,common_keys) +one_keys = table.merged(one_keys,common_keys) +ring_keys = table.merged(ring_keys,common_keys) + +local syntax = { + carbon = { max = 4, keys = one_keys, }, + alkyl = { max = 4, keys = one_keys, }, + newmanstagger = { max = 6, keys = one_keys, }, + newmaneclipsed = { max = 6, keys = one_keys, }, + one = { max = 8, keys = one_keys, }, + three = { max = 3, keys = ring_keys, }, + four = { max = 4, keys = ring_keys, }, + five = { max = 5, keys = ring_keys, }, + six = { max = 6, keys = ring_keys, }, + seven = { max = 7, keys = ring_keys, }, + eight = { max = 8, keys = ring_keys, }, + nine = { max = 9, keys = ring_keys, }, + fivefront = { max = 5, keys = front_keys, }, + sixfront = { max = 6, keys = front_keys, }, + chair = { max = 6, keys = front_keys, }, + boat = { max = 6, keys = front_keys, }, + pb = { direct = 'chem_pb;' }, + pe = { direct = 'chem_pe;' }, + save = { direct = 'chem_save;' }, + restore = { direct = 'chem_restore;' }, + chem = { direct = formatters['chem_symbol("\\chemicaltext{%s}");'], arguments = 1 }, + space = { direct = 'chem_symbol("\\chemicalsymbol[space]");' }, + plus = { direct = 'chem_symbol("\\chemicalsymbol[plus]");' }, + minus = { direct = 'chem_symbol("\\chemicalsymbol[minus]");' }, + gives = { direct = formatters['chem_symbol("\\chemicalsymbol[gives]{%s}{%s}");'], arguments = 2 }, + equilibrium = { direct = formatters['chem_symbol("\\chemicalsymbol[equilibrium]{%s}{%s}");'], arguments = 2 }, + mesomeric = { direct = formatters['chem_symbol("\\chemicalsymbol[mesomeric]{%s}{%s}");'], arguments = 2 }, + opencomplex = { direct = 'chem_symbol("\\chemicalsymbol[opencomplex]");' }, + closecomplex = { direct = 'chem_symbol("\\chemicalsymbol[closecomplex]");' }, + reset = { direct = 'chem_reset;' }, + mp = { direct = formatters['%s'], arguments = 1 }, -- backdoor MP code - dangerous! +} + +chemistry.definitions = chemistry.definitions or { } +local definitions = chemistry.definitions + +storage.register("chemistry/definitions",definitions,"chemistry.definitions") + +function chemistry.undefine(name) + definitions[lower(name)] = nil +end + +function chemistry.define(name,spec,text) + name = lower(name) + local dn = definitions[name] + if not dn then + dn = { } + definitions[name] = dn + end + dn[#dn+1] = { + spec = settings_to_array_with_repeat(spec,true), + text = settings_to_array_with_repeat(text,true), + } +end + +local metacode, variant, keys, max, txt, pstack, sstack, align +local molecule = chemistry.molecule -- or use lpegmatch(chemistry.moleculeparser,...) + +local function fetch(txt) + local st = stack[txt] + local t = st.text[st.n] + while not t and txt > 1 do + txt = txt - 1 + st = stack[txt] + t = st.text[st.n] + end + if t then + if trace_textstack then + report_chemistry("fetching from stack %a, slot %a, data %a",txt,st.n,t) + end + st.n = st.n + 1 + end + return txt, t +end + +local remapper = { + ["+"] = "p", + ["-"] = "m", +} + +local dchrs = R("09") +local sign = S("+-") +local digit = dchrs / tonumber +local amount = (sign^-1 * (dchrs^0 * P('.'))^-1 * dchrs^1) / tonumber +local single = digit +local range = digit * P("..") * digit +local set = Ct(digit^2) +local colon = P(":") +local equal = P("=") +local other = 1 - digit - colon - equal +local remapped = sign / remapper +local operation = Cs(other^1) +local special = (colon * C(other^1)) + Cc("") +local text = (equal * C(P(1)^0)) + Cc(false) + +local pattern = + (amount + Cc(1)) + * (remapped + Cc("")) + * Cs(operation/lower) + * Cs(special/lower) * ( + range * Cc(false) * text + + Cc(false) * Cc(false) * set * text + + single * Cc(false) * Cc(false) * text + + Cc(false) * Cc(false) * Cc(false) * text + ) + +-- local n, operation, index, upto, set, text = lpegmatch(pattern,"RZ1357") + +-- print(lpegmatch(pattern,"RZ=x")) -- 1 RZ false false false x +-- print(lpegmatch(pattern,"RZ1=x")) -- 1 RZ 1 false false x +-- print(lpegmatch(pattern,"RZ1..3=x")) -- 1 RZ 1 3 false x +-- print(lpegmatch(pattern,"RZ13=x")) -- 1 RZ false false table x + +local f_initialize = 'if unknown context_chem : input mp-chem.mpiv ; fi ;' +local f_start_structure = formatters['chem_start_structure(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);'] +local f_set_trace_bounds = formatters['chem_trace_boundingbox := %l ;'] +local f_stop_structure = 'chem_stop_structure;' +local f_start_component = 'chem_start_component;' +local f_stop_component = 'chem_stop_component;' +local f_line = formatters['chem_%s%s(%s,%s,%s,%s,%s);'] +local f_set = formatters['chem_set(%s);'] +local f_number = formatters['chem_%s%s(%s,%s,"\\chemicaltext{%s}");'] +local f_text = f_number +local f_empty_normal = formatters['chem_%s(%s,%s,"");'] +local f_empty_center = formatters['chem_c%s(%s,%s,"");'] +local f_transform = formatters['chem_%s(%s,%s,%s);'] + +local prepareMPvariable = commands and commands.prepareMPvariable + +local function process(level,spec,text,n,rulethickness,rulecolor,offset,default_variant) + insert(stack,{ spec = spec, text = text, n = n }) + local txt = #stack + local m = #metacode + local saved_rulethickness = rulethickness + local saved_rulecolor = rulecolor + local saved_align = align + local current_variant = default_variant or "six" + for i=1,#spec do + local step = spec[i] + local s = lower(step) + local n = current_variant .. ":" .. s + local d = definitions[n] + if not d then + n = s + d = definitions[n] + end + if d then + if trace_structure then + report_chemistry("level %a, step %a, definition %a, snippets %a",level,step,n,#d) + end + for i=1,#d do + local di = d[i] + current_variant = process(level+1,di.spec,di.text,1,rulethickness,rulecolor,offset,current_variant) -- offset? + end + else + local factor, osign, operation, special, index, upto, set, text = lpegmatch(pattern,step) + if trace_structure then + local set = set and concat(set," ") or "-" + report_chemistry("level %a, step %a, factor %a, osign %a, operation %a, special %a, index %a, upto %a, set %a, text %a", + level,step,factor,osign,operation,special,index,upto,set,text) + end + if operation == "rulecolor" then + local t = text + if not t then + txt, t = fetch(txt) + end + if t == v_default or t == v_normal or t == "" then + rulecolor = saved_rulecolor + elseif t then + rulecolor = mpnamedcolor(t) + end + elseif operation == "rulethickness" then + local t = text + if not t then + txt, t = fetch(txt) + end + if t == v_default or t == v_normal or t == t_medium or t == "" then + rulethickness = saved_rulethickness + elseif t == v_small then + rulethickness = topoints(1/1.2 * todimen(saved_rulethickness)) + elseif t == v_big then + rulethickness = topoints(1.2 * todimen(saved_rulethickness)) + elseif t then + -- rulethickness = topoints(todimen(t)) -- mp can't handle sp + rulethickness = topoints(tonumber(t) * todimen(saved_rulethickness)) + end + elseif operation == "symalign" then + local t = text + if not t then + txt, t = fetch(txt) + end + if t == v_default or t == v_normal then + align = saved_align + elseif t and t ~= "" then + align = "." .. t + end + elseif operation == "pb" then + insert(pstack,variant) + m = m + 1 ; metacode[m] = syntax.pb.direct + if keys[special] == "text" and index then + if keys["c"..special] == "text" then -- can be option: auto ... + m = m + 1 ; metacode[m] = f_empty_center(special,variant,index) + else + m = m + 1 ; metacode[m] = f_empty_normal(special,variant,index) + end + end + elseif operation == "pe" then + variant = remove(pstack) + local ss = syntax[variant] + keys, max = ss.keys, ss.max + m = m + 1 ; metacode[m] = syntax.pe.direct + m = m + 1 ; metacode[m] = f_set(variant) + current_variant = variant + elseif operation == "save" then + insert(sstack,variant) + m = m + 1 ; metacode[m] = syntax.save.direct + elseif operation == "restore" then + variant = remove(sstack) + local ss = syntax[variant] + keys, max = ss.keys, ss.max + m = m + 1 ; metacode[m] = syntax.restore.direct + m = m + 1 ; metacode[m] = f_set(variant) + current_variant = variant + elseif operation then + local ss = syntax[operation] + local what = keys[operation] + local ns = 0 + if set then + local sv = syntax[current_variant] + local ms = sv and sv.max + set = unique(set) + ns = #set + if directive_strictorder then + if what == "line" then + set = sorted(set) + end + if directive_strictindex and ms then + for i=ns,1,-1 do + local si = set[i] + if si > ms then + report_chemistry("level %a, operation %a, max nofsteps %a, ignoring %a",level,operation,ms,si) + set[i] = nil + ns = ns - 1 + else + break + end + end + end + else + if directive_strictindex and ms then + local t, nt = { }, 0 + for i=1,ns do + local si = set[i] + if si > ms then + report_chemistry("level %a, operation %a, max nofsteps %a, ignoring %a",level,operation,ms,si) + set[i] = nil + else + nt = nt + 1 + t[nt] = si + end + end + ns = nt + set = t + end + end + end + if ss then + local ds = ss.direct + if ds then + local sa = ss.arguments + if sa == 1 then + local one ; txt, one = fetch(txt) + m = m + 1 ; metacode[m] = ds(one or "") + elseif sa == 2 then + local one ; txt, one = fetch(txt) + local two ; txt, two = fetch(txt) + m = m + 1 ; metacode[m] = ds(one or "",two or "") + else + m = m + 1 ; metacode[m] = ds + end + elseif ss.keys then + variant, keys, max = s, ss.keys, ss.max + m = m + 1 ; metacode[m] = f_set(variant) + current_variant = variant + end + elseif what == "line" then + local s = osign + if s ~= "" then + s = "." .. s + end + if set then + -- condense consecutive numbers in a set to a range + local sf, st = set[1] + for i=1,ns do + if i > 1 and set[i] ~= set[i-1]+1 then + m = m + 1 ; metacode[m] = f_line(operation,s,variant,sf,st,rulethickness,rulecolor) + sf = set[i] + end + st = set[i] + end + m = m + 1 ; metacode[m] = f_line(operation,s,variant,sf,st,rulethickness,rulecolor) + elseif upto then + m = m + 1 ; metacode[m] = f_line(operation,s,variant,index,upto,rulethickness,rulecolor) + elseif index then + m = m + 1 ; metacode[m] = f_line(operation,s,variant,index,index,rulethickness,rulecolor) + else + m = m + 1 ; metacode[m] = f_line(operation,s,variant,1,max,rulethickness,rulecolor) + end + elseif what == "number" then + if set then + for i=1,ns do + local si = set[i] + m = m + 1 ; metacode[m] = f_number(operation,align,variant,si,si) + end + elseif upto then + for i=index,upto do + local si = set[i] + m = m + 1 ; metacode[m] = f_number(operation,align,variant,si,si) + end + elseif index then + m = m + 1 ; metacode[m] = f_number(operation,align,variant,index,index) + else + for i=1,max do + m = m + 1 ; metacode[m] = f_number(operation,align,variant,i,i) + end + end + elseif what == "text" then + if set then + for i=1,ns do + local si = set[i] + local t = text + if not t then txt, t = fetch(txt) end + if t then + t = molecule(processor_tostring(t)) + m = m + 1 ; metacode[m] = f_text(operation,align,variant,si,t) + end + end + elseif upto then + for i=index,upto do + local t = text + if not t then txt, t = fetch(txt) end + if t then + t = molecule(processor_tostring(t)) + m = m + 1 ; metacode[m] = f_text(operation,align,variant,i,t) + end + end + elseif index == 0 then + local t = text + if not t then txt, t = fetch(txt) end + if t then + t = molecule(processor_tostring(t)) + m = m + 1 ; metacode[m] = f_text(operation,align,variant,index,t) + end + elseif index then + local t = text + if not t then txt, t = fetch(txt) end + if t then + t = molecule(processor_tostring(t)) + m = m + 1 ; metacode[m] = f_text(operation,align,variant,index,t) + end + else + for i=1,max do + local t = text + if not t then txt, t = fetch(txt) end + if t then + t = molecule(processor_tostring(t)) + m = m + 1 ; metacode[m] = f_text(operation,align,variant,i,t) + end + end + end + elseif what == "transform" then + if osign == "m" then + factor = -factor + end + if set then + for i=1,ns do + local si = set[i] + m = m + 1 ; metacode[m] = f_transform(operation,variant,si,factor) + end + elseif upto then + for i=index,upto do + m = m + 1 ; metacode[m] = f_transform(operation,variant,i,factor) + end + else + m = m + 1 ; metacode[m] = f_transform(operation,variant,index or 1,factor) + end + elseif what == "fixed" then + m = m + 1 ; metacode[m] = f_transform(operation,variant,rulethickness,rulecolor) + elseif trace_structure then + report_chemistry("level %a, ignoring undefined operation %s",level,operation) + end + end + end + end + remove(stack) + return current_variant +end + +-- the size related values are somewhat special but we want to be +-- compatible +-- +-- rulethickness in points + +local function checked(d,factor,unit,scale) + if d == v_none then + return 0 + end + local n = tonumber(d) + if not n then + -- assume dimen + elseif n >= 10 or n <= -10 then + return factor * unit * n / 1000 + else + return factor * unit * n + end + local n = todimen(d) + if n then + return scale * n + else + return v_fit + end +end + +local function calculated(height,bottom,top,factor,unit,scale) + local scaled = 0 + if height == v_none then + -- this always wins + height = "0pt" + bottom = "0pt" + top = "0pt" + elseif height == v_fit then + height = "true" + bottom = bottom == v_fit and "true" or topoints(checked(bottom,factor,unit,scale)) + top = top == v_fit and "true" or topoints(checked(top, factor,unit,scale)) + else + height = checked(height,factor,unit,scale) + if bottom == v_fit then + if top == v_fit then + bottom = height / 2 + top = bottom + else + top = checked(top,factor,unit,scale) + bottom = height - top + end + elseif top == v_fit then + bottom = checked(bottom,factor,unit,scale) + top = height - bottom + else + bottom = checked(bottom,factor,unit,scale) + top = checked(top, factor,unit,scale) + local ratio = height / (bottom+top) + bottom = bottom * ratio + top = top * ratio + end + scaled = height + top = topoints(top) + bottom = topoints(bottom) + height = topoints(height) + end + return height, bottom, top, scaled +end + +function chemistry.start(settings) + -- + local width = settings.width or v_fit + local height = settings.height or v_fit + local unit = settings.unit or 655360 + local factor = settings.factor or 3 + local rulethickness = settings.rulethickness or 65536 + local rulecolor = settings.rulecolor or "" + local axiscolor = settings.framecolor or "" + local scale = settings.scale or "normal" + local rotation = settings.rotation or 0 + local offset = settings.offset or 0 + local left = settings.left or v_fit + local right = settings.right or v_fit + local top = settings.top or v_fit + local bottom = settings.bottom or v_fit + -- + align = settings.symalign or "auto" + if trace_structure then + report_chemistry("unit %p, factor %s, symalign %s",unit,factor,align) + end + if align ~= "" then + align = "." .. align + end + if trace_structure then + report_chemistry("%s scale %a, rotation %a, width %s, height %s, left %s, right %s, top %s, bottom %s","asked",scale,rotation,width,height,left,right,top,bottom) + end + if scale == v_small then + scale = 1/1.2 + elseif scale == v_normal or scale == v_medium or scale == 0 then + scale = 1 + elseif scale == v_big then + scale = 1.2 + else + scale = tonumber(scale) + if not scale or scale == 0 then + scale = 1 + elseif scale >= 10 then + scale = scale / 1000 + elseif scale < .01 then + scale = .01 + end + end + -- + unit = scale * unit + -- + local sp_width = 0 + local sp_height = 0 + -- + width, left, right, sp_width = calculated(width, left, right,factor,unit,scale) + height, bottom, top, sp_height = calculated(height,bottom,top, factor,unit,scale) + -- + if width ~= "true" and height ~= "true" and texcount["@@trialtypesetting"] ~= 0 then + if trace_structure then + report_chemistry("skipping trial run") + end + context.hrule(sp_width,sp_height,0) -- maybe depth + return + end + -- + chemistry.structures = chemistry.structures + 1 + -- + rotation = tonumber(rotation) or 0 + -- + metacode = { } + -- + if trace_structure then + report_chemistry("%s scale %a, rotation %a, width %s, height %s, left %s, right %s, top %s, bottom %s","used",scale,rotation,width,height,left,right,top,bottom) + end + metacode[#metacode+1] = f_start_structure( + chemistry.structures, + left, right, top, bottom, + rotation, topoints(unit), factor, topoints(offset), + tostring(settings.axis == v_on), topoints(rulethickness), tostring(axiscolor) + ) + metacode[#metacode+1] = f_set_trace_bounds(trace_boundingbox) ; + -- + variant, keys, stack, pstack, sstack = "one", { }, { }, { }, { } +end + +function chemistry.stop() + if metacode then + metacode[#metacode+1] = f_stop_structure + local mpcode = concat(metacode,"\n") + if trace_metapost then + report_chemistry("metapost code:\n%s", mpcode) + end + if metapost.instance(chemistry.instance) then + f_initialize = nil + end + metapost.graphic { + instance = chemistry.instance, + format = chemistry.format, + data = mpcode, + definitions = f_initialize, + } + t_initialize = "" + metacode = nil + end +end + +function chemistry.component(spec,text,settings) + if metacode then + rulethickness, rulecolor, offset = settings.rulethickness, settings.rulecolor + local spec = settings_to_array_with_repeat(spec,true) -- no lower? + local text = settings_to_array_with_repeat(text,true) + -- inspect(spec) + metacode[#metacode+1] = f_start_component + process(1,spec,text,1,rulethickness,rulecolor) -- offset? + metacode[#metacode+1] = f_stop_component + end +end + +statistics.register("chemical formulas", function() + if chemistry.structures > 0 then + return format("%s chemical structure formulas",chemistry.structures) -- no timing needed, part of metapost + end +end) + +-- interfaces + +commands.undefinechemical = chemistry.undefine +commands.definechemical = chemistry.define +commands.startchemical = chemistry.start +commands.stopchemical = chemistry.stop +commands.chemicalcomponent = chemistry.component + +-- todo: top / bottom +-- maybe add "=" for double and "≡" for triple? + +local inline = { + ["single"] = "\\chemicalsinglebond", ["-"] = "\\chemicalsinglebond", + ["double"] = "\\chemicaldoublebond", ["--"] = "\\chemicaldoublebond", + ["triple"] = "\\chemicaltriplebond", ["---"] = "\\chemicaltriplebond", + ["gives"] = "\\chemicalgives", ["->"] = "\\chemicalgives", + ["equilibrium"] = "\\chemicalequilibrium", ["<->"] = "\\chemicalequilibrium", + ["mesomeric"] = "\\chemicalmesomeric", ["<>"] = "\\chemicalmesomeric", + ["plus"] = "\\chemicalplus", ["+"] = "\\chemicalplus", + ["minus"] = "\\chemicalminus", + ["space"] = "\\chemicalspace", +} + +function commands.inlinechemical(spec) + local spec = settings_to_array_with_repeat(spec,true) + for i=1,#spec do + local s = spec[i] + local inl = inline[lower(s)] + if inl then + context(inl) -- could be a fast context.sprint + else + context.chemicalinline(molecule(s)) + end + end +end diff --git a/tex/context/base/cldf-bas.lua b/tex/context/base/cldf-bas.lua index 6adeb2272..6e887f2ba 100644 --- a/tex/context/base/cldf-bas.lua +++ b/tex/context/base/cldf-bas.lua @@ -1,179 +1,179 @@ -if not modules then modules = { } end modules ['cldf-bas'] = { - version = 1.001, - comment = "companion to cldf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- -- speedtest needed: --- --- local flush, writer = context.getlogger() --- --- trackers.register("context.trace",function(v) --- flush, writer = context.getlogger() --- end) --- --- function context.bgroup() --- flush(ctxcatcodes,"{") --- end --- --- function context.egroup() --- flush(ctxcatcodes,"}") --- end - --- maybe use context.generics - -local type = type -local format = string.format -local utfchar = utf.char -local concat = table.concat - -local context = context -local generics = context.generics -local variables = interfaces.variables - -local nodepool = nodes.pool -local new_rule = nodepool.rule -local new_glyph = nodepool.glyph - -local current_font = font.current -local texcount = tex.count - -function context.char(k) -- used as escape too, so don't change to utf - if type(k) == "table" then - local n = #k - if n == 1 then - context([[\char%s\relax]],k[1]) - elseif n > 0 then - context([[\char%s\relax]],concat(k,[[\relax\char]])) - end - elseif k then - context([[\char%s\relax]],k) - end -end - -function context.utfchar(k) - context(utfchar(k)) -end - --- plain variants - -function context.chardef(cs,u) - context([[\chardef\%s=%s\relax]],k) -end - -function context.par() - context([[\par]]) -- no need to add {} there -end - -function context.bgroup() - context("{") -end - -function context.egroup() - context("}") -end - -function context.space() - context("\\space") -- no " " as that gets intercepted -end - -function context.hrule(w,h,d,dir) - if type(w) == "table" then - context(new_rule(w.width,w.height,w.depth,w.dir)) - else - context(new_rule(w,h,d,dir)) - end -end - -function context.glyph(id,k) - if id then - if not k then - id, k = current_font(), id - end - context(new_glyph(id,k)) - end -end - -context.vrule = context.hrule - ---~ local hbox, bgroup, egroup = context.hbox, context.bgroup, context.egroup - ---~ function context.hbox(a,...) ---~ if type(a) == "table" then ---~ local s = { } ---~ if a.width then ---~ s[#s+1] = "to " .. a.width -- todo: check for number ---~ elseif a.spread then ---~ s[#s+1] = "spread " .. a.spread -- todo: check for number ---~ end ---~ -- todo: dir, attr etc ---~ hbox(false,table.concat(s," ")) ---~ bgroup() ---~ context(string.format(...)) ---~ egroup() ---~ else ---~ hbox(a,...) ---~ end ---~ end - --- not yet used ... but will get variant at the tex end as well - -function context.sethboxregister(n) context([[\setbox %s\hbox]],n) end -function context.setvboxregister(n) context([[\setbox %s\vbox]],n) end - -function context.starthboxregister(n) - if type(n) == "number" then - context([[\setbox%s\hbox{]],n) - else - context([[\setbox\%s\hbox{]],n) - end -end - -function context.startvboxregister(n) - if type(n) == "number" then - context([[\setbox%s\vbox{]],n) - else - context([[\setbox\%s\vbox{]],n) - end -end - -context.stophboxregister = context.egroup -context.stopvboxregister = context.egroup - -function context.flushboxregister(n) - if type(n) == "number" then - context([[\box%s ]],n) - else - context([[\box\%s]],n) - end -end - -function context.beginvbox() - context([[\vbox{]]) -- we can do \bvbox ... \evbox (less tokens) -end - -function context.beginhbox() - context([[\hbox{]]) -- todo: use fast one -end - -context.endvbox = context.egroup -context.endhbox = context.egroup - -local function allocate(name,what,cmd) - local a = format("c_syst_last_allocated_%s",what) - local n = texcount[a] + 1 - if n <= texcount.c_syst_max_allocated_register then - texcount[a] = n - end - context("\\global\\expandafter\\%sdef\\csname %s\\endcsname %s\\relax",cmd or what,name,n) - return n -end - -function context.newdimen (name) return allocate(name,"dimen") end -function context.newskip (name) return allocate(name,"skip") end -function context.newcount (name) return allocate(name,"count") end -function context.newmuskip(name) return allocate(name,"muskip") end -function context.newtoks (name) return allocate(name,"toks") end -function context.newbox (name) return allocate(name,"box","mathchar") end +if not modules then modules = { } end modules ['cldf-bas'] = { + version = 1.001, + comment = "companion to cldf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- -- speedtest needed: +-- +-- local flush, writer = context.getlogger() +-- +-- trackers.register("context.trace",function(v) +-- flush, writer = context.getlogger() +-- end) +-- +-- function context.bgroup() +-- flush(ctxcatcodes,"{") +-- end +-- +-- function context.egroup() +-- flush(ctxcatcodes,"}") +-- end + +-- maybe use context.generics + +local type = type +local format = string.format +local utfchar = utf.char +local concat = table.concat + +local context = context +local generics = context.generics +local variables = interfaces.variables + +local nodepool = nodes.pool +local new_rule = nodepool.rule +local new_glyph = nodepool.glyph + +local current_font = font.current +local texcount = tex.count + +function context.char(k) -- used as escape too, so don't change to utf + if type(k) == "table" then + local n = #k + if n == 1 then + context([[\char%s\relax]],k[1]) + elseif n > 0 then + context([[\char%s\relax]],concat(k,[[\relax\char]])) + end + elseif k then + context([[\char%s\relax]],k) + end +end + +function context.utfchar(k) + context(utfchar(k)) +end + +-- plain variants + +function context.chardef(cs,u) + context([[\chardef\%s=%s\relax]],k) +end + +function context.par() + context([[\par]]) -- no need to add {} there +end + +function context.bgroup() + context("{") +end + +function context.egroup() + context("}") +end + +function context.space() + context("\\space") -- no " " as that gets intercepted +end + +function context.hrule(w,h,d,dir) + if type(w) == "table" then + context(new_rule(w.width,w.height,w.depth,w.dir)) + else + context(new_rule(w,h,d,dir)) + end +end + +function context.glyph(id,k) + if id then + if not k then + id, k = current_font(), id + end + context(new_glyph(id,k)) + end +end + +context.vrule = context.hrule + +--~ local hbox, bgroup, egroup = context.hbox, context.bgroup, context.egroup + +--~ function context.hbox(a,...) +--~ if type(a) == "table" then +--~ local s = { } +--~ if a.width then +--~ s[#s+1] = "to " .. a.width -- todo: check for number +--~ elseif a.spread then +--~ s[#s+1] = "spread " .. a.spread -- todo: check for number +--~ end +--~ -- todo: dir, attr etc +--~ hbox(false,table.concat(s," ")) +--~ bgroup() +--~ context(string.format(...)) +--~ egroup() +--~ else +--~ hbox(a,...) +--~ end +--~ end + +-- not yet used ... but will get variant at the tex end as well + +function context.sethboxregister(n) context([[\setbox %s\hbox]],n) end +function context.setvboxregister(n) context([[\setbox %s\vbox]],n) end + +function context.starthboxregister(n) + if type(n) == "number" then + context([[\setbox%s\hbox{]],n) + else + context([[\setbox\%s\hbox{]],n) + end +end + +function context.startvboxregister(n) + if type(n) == "number" then + context([[\setbox%s\vbox{]],n) + else + context([[\setbox\%s\vbox{]],n) + end +end + +context.stophboxregister = context.egroup +context.stopvboxregister = context.egroup + +function context.flushboxregister(n) + if type(n) == "number" then + context([[\box%s ]],n) + else + context([[\box\%s]],n) + end +end + +function context.beginvbox() + context([[\vbox{]]) -- we can do \bvbox ... \evbox (less tokens) +end + +function context.beginhbox() + context([[\hbox{]]) -- todo: use fast one +end + +context.endvbox = context.egroup +context.endhbox = context.egroup + +local function allocate(name,what,cmd) + local a = format("c_syst_last_allocated_%s",what) + local n = texcount[a] + 1 + if n <= texcount.c_syst_max_allocated_register then + texcount[a] = n + end + context("\\global\\expandafter\\%sdef\\csname %s\\endcsname %s\\relax",cmd or what,name,n) + return n +end + +function context.newdimen (name) return allocate(name,"dimen") end +function context.newskip (name) return allocate(name,"skip") end +function context.newcount (name) return allocate(name,"count") end +function context.newmuskip(name) return allocate(name,"muskip") end +function context.newtoks (name) return allocate(name,"toks") end +function context.newbox (name) return allocate(name,"box","mathchar") end diff --git a/tex/context/base/cldf-com.lua b/tex/context/base/cldf-com.lua index fa0dbed3e..5046343c9 100644 --- a/tex/context/base/cldf-com.lua +++ b/tex/context/base/cldf-com.lua @@ -1,36 +1,36 @@ -if not modules then modules = { } end modules ['cldf-com'] = { - version = 1.001, - comment = "companion to cldf-com.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local tostring = tostring -local context = context -local generics = context.generics -- needs documentation -local variables = interfaces.variables - -generics.starttabulate = "starttabulate" -- "start" .. variables.tabulate -- todo: e!start -generics.stoptabulate = "stoptabulate" -- "stop" .. variables.tabulate -- todo: e!stop - -local NC, NR = context.NC, context.NR - -local function tabulaterow(how,...) - for i=1,select("#",...) do - local ti = tostring(select(i,...)) - NC() - if how then - context[how](ti) - else - context(ti) - end - end - NC() - NR() -end - -function context.tabulaterow (...) tabulaterow(false, ...) end -function context.tabulaterowbold(...) tabulaterow("bold",...) end -function context.tabulaterowtype(...) tabulaterow("type",...) end -function context.tabulaterowtyp (...) tabulaterow("typ", ...) end +if not modules then modules = { } end modules ['cldf-com'] = { + version = 1.001, + comment = "companion to cldf-com.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local tostring = tostring +local context = context +local generics = context.generics -- needs documentation +local variables = interfaces.variables + +generics.starttabulate = "starttabulate" -- "start" .. variables.tabulate -- todo: e!start +generics.stoptabulate = "stoptabulate" -- "stop" .. variables.tabulate -- todo: e!stop + +local NC, NR = context.NC, context.NR + +local function tabulaterow(how,...) + for i=1,select("#",...) do + local ti = tostring(select(i,...)) + NC() + if how then + context[how](ti) + else + context(ti) + end + end + NC() + NR() +end + +function context.tabulaterow (...) tabulaterow(false, ...) end +function context.tabulaterowbold(...) tabulaterow("bold",...) end +function context.tabulaterowtype(...) tabulaterow("type",...) end +function context.tabulaterowtyp (...) tabulaterow("typ", ...) end diff --git a/tex/context/base/cldf-ini.lua b/tex/context/base/cldf-ini.lua index 4a7d9f025..c61a5b523 100644 --- a/tex/context/base/cldf-ini.lua +++ b/tex/context/base/cldf-ini.lua @@ -1,1066 +1,1066 @@ -if not modules then modules = { } end modules ['cldf-ini'] = { - version = 1.001, - comment = "companion to cldf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This started as an experiment: generating context code at the lua end. After all --- it is surprisingly simple to implement due to metatables. I was wondering if --- there was a more natural way to deal with commands at the lua end. Of course it's --- a bit slower but often more readable when mixed with lua code. It can also be handy --- when generating documents from databases or when constructing large tables or so. --- --- maybe optional checking against interface --- currently no coroutine trickery --- we could always use prtcatcodes (context.a_b_c) but then we loose protection --- tflush needs checking ... sort of weird that it's not a table --- __flushlines is an experiment and rather ugly so it will go away --- --- tex.print == line with endlinechar appended - --- todo: context("%bold{total: }%s",total) --- todo: context.documentvariable("title") - -local tex = tex - -context = context or { } -local context = context - -local format, gsub, validstring = string.format, string.gsub, string.valid -local next, type, tostring, tonumber, setmetatable, unpack, select = next, type, tostring, tonumber, setmetatable, unpack, select -local insert, remove, concat = table.insert, table.remove, table.concat -local lpegmatch, lpegC, lpegS, lpegP, lpegCc, patterns = lpeg.match, lpeg.C, lpeg.S, lpeg.P, lpeg.Cc, lpeg.patterns -local formatters = string.formatters -- using formatteds is slower in this case - -local texsprint = tex.sprint -local textprint = tex.tprint -local texprint = tex.print -local texwrite = tex.write -local texcount = tex.count - -local isnode = node.is_node -- after 0.65 just node.type -local writenode = node.write -local copynodelist = node.copy_list - -local catcodenumbers = catcodes.numbers - -local ctxcatcodes = catcodenumbers.ctxcatcodes -local prtcatcodes = catcodenumbers.prtcatcodes -local texcatcodes = catcodenumbers.texcatcodes -local txtcatcodes = catcodenumbers.txtcatcodes -local vrbcatcodes = catcodenumbers.vrbcatcodes -local xmlcatcodes = catcodenumbers.xmlcatcodes - -local flush = texsprint -local flushdirect = texprint -local flushraw = texwrite - -local report_context = logs.reporter("cld","tex") -local report_cld = logs.reporter("cld","stack") - -local processlines = true -- experiments.register("context.processlines", function(v) processlines = v end) - --- for tracing it's easier to have two stacks - -local _stack_f_, _n_f_ = { }, 0 -local _stack_n_, _n_n_ = { }, 0 - -local function _store_f_(ti) - _n_f_ = _n_f_ + 1 - _stack_f_[_n_f_] = ti - return _n_f_ -end - -local function _store_n_(ti) - _n_n_ = _n_n_ + 1 - _stack_n_[_n_n_] = ti - return _n_n_ -end - -local function _flush_f_(n) - local sn = _stack_f_[n] - if not sn then - report_cld("data with id %a cannot be found on stack",n) - else - local tn = type(sn) - if tn == "function" then - if not sn() and texcount["@@trialtypesetting"] == 0 then -- @@trialtypesetting is private! - _stack_f_[n] = nil - else - -- keep, beware, that way the stack can grow - end - else - if texcount["@@trialtypesetting"] == 0 then -- @@trialtypesetting is private! - writenode(sn) - _stack_f_[n] = nil - else - writenode(copynodelist(sn)) - -- keep, beware, that way the stack can grow - end - end - end -end - -local function _flush_n_(n) - local sn = _stack_n_[n] - if not sn then - report_cld("data with id %a cannot be found on stack",n) - elseif texcount["@@trialtypesetting"] == 0 then -- @@trialtypesetting is private! - writenode(sn) - _stack_n_[n] = nil - else - writenode(copynodelist(sn)) - -- keep, beware, that way the stack can grow - end -end - -function context.restart() - _stack_f_, _n_f_ = { }, 0 - _stack_n_, _n_n_ = { }, 0 -end - -context._stack_f_ = _stack_f_ -context._store_f_ = _store_f_ -context._flush_f_ = _flush_f_ _cldf_ = _flush_f_ - -context._stack_n_ = _stack_n_ -context._store_n_ = _store_n_ -context._flush_n_ = _flush_n_ _cldn_ = _flush_n_ - --- Should we keep the catcodes with the function? - -local catcodestack = { } -local currentcatcodes = ctxcatcodes -local contentcatcodes = ctxcatcodes - -local catcodes = { - ctx = ctxcatcodes, ctxcatcodes = ctxcatcodes, context = ctxcatcodes, - prt = prtcatcodes, prtcatcodes = prtcatcodes, protect = prtcatcodes, - tex = texcatcodes, texcatcodes = texcatcodes, plain = texcatcodes, - txt = txtcatcodes, txtcatcodes = txtcatcodes, text = txtcatcodes, - vrb = vrbcatcodes, vrbcatcodes = vrbcatcodes, verbatim = vrbcatcodes, - xml = xmlcatcodes, xmlcatcodes = xmlcatcodes, -} - -local function pushcatcodes(c) - insert(catcodestack,currentcatcodes) - currentcatcodes = (c and catcodes[c] or tonumber(c)) or currentcatcodes - contentcatcodes = currentcatcodes -end - -local function popcatcodes() - currentcatcodes = remove(catcodestack) or currentcatcodes - contentcatcodes = currentcatcodes -end - -context.pushcatcodes = pushcatcodes -context.popcatcodes = popcatcodes - --- -- -- - -local newline = patterns.newline -local space = patterns.spacer -local spacing = newline * space^0 -local content = lpegC((1-spacing)^1) -- texsprint -local emptyline = space^0 * newline^2 -- texprint("") -local endofline = space^0 * newline * space^0 -- texsprint(" ") -local simpleline = endofline * lpegP(-1) -- - -local verbose = lpegC((1-space-newline)^1) -local beginstripper = (lpegS(" \t")^1 * newline^1) / "" -local endstripper = beginstripper * lpegP(-1) - -local justaspace = space * lpegCc("") -local justanewline = newline * lpegCc("") - -local function n_content(s) - flush(contentcatcodes,s) -end - -local function n_verbose(s) - flush(vrbcatcodes,s) -end - -local function n_endofline() - flush(currentcatcodes," \r") -end - -local function n_emptyline() - flushdirect(currentcatcodes,"\r") -end - -local function n_simpleline() - flush(currentcatcodes," \r") -end - -local n_exception = "" - --- better a table specification - -function context.newtexthandler(specification) -- can also be used for verbose - specification = specification or { } - -- - local s_catcodes = specification.catcodes - -- - local f_before = specification.before - local f_after = specification.after - -- - local f_endofline = specification.endofline or n_endofline - local f_emptyline = specification.emptyline or n_emptyline - local f_simpleline = specification.simpleline or n_simpleline - local f_content = specification.content or n_content - local f_space = specification.space - -- - local p_exception = specification.exception - -- - if s_catcodes then - f_content = function(s) - flush(s_catcodes,s) - end - end - -- - local pattern - if f_space then - if p_exception then - local content = lpegC((1-spacing-p_exception)^1) - pattern = - ( - justaspace / f_space - + justanewline / f_endofline - + p_exception - + content / f_content - )^0 - else - local content = lpegC((1-space-endofline)^1) - pattern = - ( - justaspace / f_space - + justanewline / f_endofline - + content / f_content - )^0 - end - else - if p_exception then - local content = lpegC((1-spacing-p_exception)^1) - pattern = - simpleline / f_simpleline - + - ( - emptyline / f_emptyline - + endofline / f_endofline - + p_exception - + content / f_content - )^0 - else - local content = lpegC((1-spacing)^1) - pattern = - simpleline / f_simpleline - + - ( - emptyline / f_emptyline - + endofline / f_endofline - + content / f_content - )^0 - end - end - -- - if f_before then - pattern = (P(true) / f_before) * pattern - end - -- - if f_after then - pattern = pattern * (P(true) / f_after) - end - -- - return function(str) return lpegmatch(pattern,str) end, pattern -end - -function context.newverbosehandler(specification) -- a special variant for e.g. cdata in lxml-tex - specification = specification or { } - -- - local f_line = specification.line or function() flushdirect("\r") end - local f_space = specification.space or function() flush(" ") end - local f_content = specification.content or n_verbose - local f_before = specification.before - local f_after = specification.after - -- - local pattern = - justanewline / f_line -- so we get call{} - + verbose / f_content - + justaspace / f_space -- so we get call{} - -- - if specification.strip then - pattern = beginstripper^0 * (endstripper + pattern)^0 - else - pattern = pattern^0 - end - -- - if f_before then - pattern = (lpegP(true) / f_before) * pattern - end - -- - if f_after then - pattern = pattern * (lpegP(true) / f_after) - end - -- - return function(str) return lpegmatch(pattern,str) end, pattern -end - -local flushlines = context.newtexthandler { - content = n_content, - endofline = n_endofline, - emptyline = n_emptyline, - simpleline = n_simpleline, -} - -context.__flushlines = flushlines -- maybe context.helpers.flushtexlines -context.__flush = flush -context.__flushdirect = flushdirect - --- The next variant is only used in rare cases (buffer to mp): - -local printlines_ctx = ( - (newline) / function() texprint("") end + - (1-newline)^1 / function(s) texprint(ctxcatcodes,s) end * newline^-1 -)^0 - -local printlines_raw = ( - (newline) / function() texprint("") end + - (1-newline)^1 / function(s) texprint(s) end * newline^-1 -)^0 - -function context.printlines(str,raw) -- todo: see if via file is useable - if raw then - lpegmatch(printlines_raw,str) - else - lpegmatch(printlines_ctx,str) - end -end - --- This is the most reliable way to deal with nested buffers and other --- catcode sensitive data. - -local methodhandler = resolvers.methodhandler - -function context.viafile(data,tag) - if data and data ~= "" then - local filename = resolvers.savers.byscheme("virtual",validstring(tag,"viafile"),data) - -- context.startregime { "utf" } - context.input(filename) - -- context.stopregime() - end -end - --- -- -- "{" .. ti .. "}" is somewhat slower in a cld-mkiv run than "{",ti,"}" - -local containseol = patterns.containseol - -local function writer(parent,command,first,...) -- already optimized before call - local t = { first, ... } - flush(currentcatcodes,command) -- todo: ctx|prt|texcatcodes - local direct = false - for i=1,#t do - local ti = t[i] - local typ = type(ti) - if direct then - if typ == "string" or typ == "number" then - flush(currentcatcodes,ti) - else -- node.write - report_context("error: invalid use of direct in %a, only strings and numbers can be flushed directly, not %a",command,typ) - end - direct = false - elseif ti == nil then - -- nothing - elseif ti == "" then - flush(currentcatcodes,"{}") - elseif typ == "string" then - -- is processelines seen ? - if processlines and lpegmatch(containseol,ti) then - flush(currentcatcodes,"{") - local flushlines = parent.__flushlines or flushlines - flushlines(ti) - flush(currentcatcodes,"}") - elseif currentcatcodes == contentcatcodes then - flush(currentcatcodes,"{",ti,"}") - else - flush(currentcatcodes,"{") - flush(contentcatcodes,ti) - flush(currentcatcodes,"}") - end - elseif typ == "number" then - -- numbers never have funny catcodes - flush(currentcatcodes,"{",ti,"}") - elseif typ == "table" then - local tn = #ti - if tn == 0 then - local done = false - for k, v in next, ti do - if done then - if v == "" then - flush(currentcatcodes,",",k,'=') - else - flush(currentcatcodes,",",k,"={",v,"}") - end - else - if v == "" then - flush(currentcatcodes,"[",k,"=") - else - flush(currentcatcodes,"[",k,"={",v,"}") - end - done = true - end - end - if done then - flush(currentcatcodes,"]") - else - flush(currentcatcodes,"[]") - end - elseif tn == 1 then -- some 20% faster than the next loop - local tj = ti[1] - if type(tj) == "function" then - flush(currentcatcodes,"[\\cldf{",_store_f_(tj),"}]") - else - flush(currentcatcodes,"[",tj,"]") - end - else -- is concat really faster than flushes here? probably needed anyway (print artifacts) - for j=1,tn do - local tj = ti[j] - if type(tj) == "function" then - ti[j] = "\\cldf{" .. _store_f_(tj) .. "}" - end - end - flush(currentcatcodes,"[",concat(ti,","),"]") - end - elseif typ == "function" then - flush(currentcatcodes,"{\\cldf{",_store_f_(ti),"}}") -- todo: ctx|prt|texcatcodes - elseif typ == "boolean" then - if ti then - flushdirect(currentcatcodes,"\r") - else - direct = true - end - elseif typ == "thread" then - report_context("coroutines not supported as we cannot yield across boundaries") - elseif isnode(ti) then -- slow - flush(currentcatcodes,"{\\cldn{",_store_n_(ti),"}}") - else - report_context("error: %a gets a weird argument %a",command,ti) - end - end -end - -local generics = { } context.generics = generics - -local function indexer(parent,k) - if type(k) == "string" then - local c = "\\" .. tostring(generics[k] or k) - local f = function(first,...) - if first == nil then - flush(currentcatcodes,c) - else - return writer(parent,c,first,...) - end - end - parent[k] = f - return f - else - return context -- catch - end -end - --- Potential optimization: after the first call we know if there will be an --- argument. Of course there is the side effect that for instance abuse like --- context.NC(str) fails as well as optional arguments. So, we don't do this --- in practice. We just keep the next trick commented. The gain on some --- 100000 calls is not that large: 0.100 => 0.95 which is neglectable. --- --- local function constructor(parent,k,c,first,...) --- if first == nil then --- local f = function() --- flush(currentcatcodes,c) --- end --- parent[k] = f --- return f() --- else --- local f = function(...) --- return writer(parent,c,...) --- end --- parent[k] = f --- return f(first,...) --- end --- end --- --- local function indexer(parent,k) --- local c = "\\" .. tostring(generics[k] or k) --- local f = function(...) --- return constructor(parent,k,c,...) --- end --- parent[k] = f --- return f --- end - --- only for internal usage: - -function context.constructcsonly(k) -- not much faster than the next but more mem efficient - local c = "\\" .. tostring(generics[k] or k) - rawset(context, k, function() - flush(prtcatcodes,c) - end) -end - -function context.constructcs(k) - local c = "\\" .. tostring(generics[k] or k) - rawset(context, k, function(first,...) - if first == nil then - flush(prtcatcodes,c) - else - return writer(context,c,first,...) - end - end) -end - -local function caller(parent,f,a,...) - if not parent then - -- so we don't need to test in the calling (slower but often no issue) - elseif f ~= nil then - local typ = type(f) - if typ == "string" then - if a then - flush(contentcatcodes,formatters[f](a,...)) -- was currentcatcodes - elseif processlines and lpegmatch(containseol,f) then - local flushlines = parent.__flushlines or flushlines - flushlines(f) - else - flush(contentcatcodes,f) - end - elseif typ == "number" then - if a then - flush(currentcatcodes,f,a,...) - else - flush(currentcatcodes,f) - end - elseif typ == "function" then - -- ignored: a ... - flush(currentcatcodes,"{\\cldf{",_store_f_(f),"}}") -- todo: ctx|prt|texcatcodes - elseif typ == "boolean" then - if f then - if a ~= nil then - local flushlines = parent.__flushlines or flushlines - flushlines(a) - else - flushdirect(currentcatcodes,"\n") -- no \r, else issues with \startlines ... use context.par() otherwise - end - else - if a ~= nil then - -- no command, same as context(a,...) - writer(parent,"",a,...) - else - -- ignored - end - end - elseif typ == "thread" then - report_context("coroutines not supported as we cannot yield across boundaries") - elseif isnode(f) then -- slow - -- writenode(f) - flush(currentcatcodes,"\\cldn{",_store_n_(f),"}") - else - report_context("error: %a gets a weird argument %a","context",f) - end - end -end - -local defaultcaller = caller - -setmetatable(context, { __index = indexer, __call = caller } ) - --- now we tweak unprotect and protect - -function context.unprotect() - -- at the lua end - insert(catcodestack,currentcatcodes) - currentcatcodes = prtcatcodes - contentcatcodes = currentcatcodes - -- at the tex end - flush("\\unprotect") -end - -function context.protect() - -- at the tex end - flush("\\protect") - -- at the lua end - currentcatcodes = remove(catcodestack) or currentcatcodes - contentcatcodes = currentcatcodes -end - -function context.sprint(...) -- takes catcodes as first argument - flush(...) -end - -function context.fprint(catcodes,fmt,first,...) - if type(catcodes) == "number" then - if first then - flush(catcodes,formatters[fmt](first,...)) - else - flush(catcodes,fmt) - end - else - if fmt then - flush(formatters[catcodes](fmt,first,...)) - else - flush(catcodes) - end - end -end - -function tex.fprint(fmt,first,...) -- goodie - if first then - flush(currentcatcodes,formatters[fmt](first,...)) - else - flush(currentcatcodes,fmt) - end -end - --- logging - -local trace_stack = { } - -local normalflush = flush -local normalflushdirect = flushdirect -local normalflushraw = flushraw -local normalwriter = writer -local currenttrace = nil -local nofwriters = 0 -local nofflushes = 0 - -local visualizer = lpeg.replacer { - { "\n","<>" }, - { "\r","<>" }, -} - -statistics.register("traced context", function() - if nofwriters > 0 or nofflushes > 0 then - return format("writers: %s, flushes: %s, maxstack: %s",nofwriters,nofflushes,_n_f_) - end -end) - -local tracedwriter = function(parent,...) -- also catcodes ? - nofwriters = nofwriters + 1 - local savedflush = flush - local savedflushdirect = flushdirect -- unlikely to be used here - local t, n = { "w : - : " }, 1 - local traced = function(normal,catcodes,...) -- todo: check for catcodes - local s = concat({...}) - s = lpegmatch(visualizer,s) - n = n + 1 - t[n] = s - normal(catcodes,...) - end - flush = function(...) traced(normalflush, ...) end - flushdirect = function(...) traced(normalflushdirect,...) end - normalwriter(parent,...) - flush = savedflush - flushdirect = savedflushdirect - currenttrace(concat(t)) -end - --- we could reuse collapsed - -local traced = function(normal,one,two,...) - nofflushes = nofflushes + 1 - if two then - -- only catcodes if 'one' is number - normal(one,two,...) - local catcodes = type(one) == "number" and one - local arguments = catcodes and { two, ... } or { one, two, ... } - local collapsed, c = { formatters["f : %s : "](catcodes or '-') }, 1 - for i=1,#arguments do - local argument = arguments[i] - local argtype = type(argument) - c = c + 1 - if argtype == "string" then - collapsed[c] = lpegmatch(visualizer,argument) - elseif argtype == "number" then - collapsed[c] = argument - else - collapsed[c] = formatters["<<%S>>"](argument) - end - end - currenttrace(concat(collapsed)) - else - -- no catcodes - normal(one) - local argtype = type(one) - if argtype == "string" then - currenttrace(formatters["f : - : %s"](lpegmatch(visualizer,one))) - elseif argtype == "number" then - currenttrace(formatters["f : - : %s"](one)) - else - currenttrace(formatters["f : - : <<%S>>"](one)) - end - end -end - -local tracedflush = function(...) traced(normalflush, ...) end -local tracedflushdirect = function(...) traced(normalflushdirect,...) end - -local function pushlogger(trace) - trace = trace or report_context - insert(trace_stack,currenttrace) - currenttrace = trace - -- - flush = tracedflush - flushdirect = tracedflushdirect - writer = tracedwriter - -- - context.__flush = flush - context.__flushdirect = flushdirect - -- - return flush, writer, flushdirect -end - -local function poplogger() - currenttrace = remove(trace_stack) - if not currenttrace then - flush = normalflush - flushdirect = normalflushdirect - writer = normalwriter - -- - context.__flush = flush - context.__flushdirect = flushdirect - end - return flush, writer, flushdirect -end - -local function settracing(v) - if v then - return pushlogger(report_context) - else - return poplogger() - end -end - --- todo: share flushers so that we can define in other files - -trackers.register("context.trace",settracing) - -context.pushlogger = pushlogger -context.poplogger = poplogger -context.settracing = settracing - --- -- untested, no time now: --- --- local tracestack, tracestacktop = { }, false --- --- function context.pushtracing(v) --- insert(tracestack,tracestacktop) --- if type(v) == "function" then --- pushlogger(v) --- v = true --- else --- pushlogger() --- end --- tracestacktop = v --- settracing(v) --- end --- --- function context.poptracing() --- poplogger() --- tracestacktop = remove(tracestack) or false --- settracing(tracestacktop) --- end - -function context.getlogger() - return flush, writer, flush_direct -end - -local trace_cld = false trackers.register("context.files", function(v) trace_cld = v end) - -function context.runfile(filename) - local foundname = resolvers.findtexfile(file.addsuffix(filename,"cld")) or "" - if foundname ~= "" then - local ok = dofile(foundname) - if type(ok) == "function" then - if trace_cld then - report_context("begin of file %a (function call)",foundname) - end - ok() - if trace_cld then - report_context("end of file %a (function call)",foundname) - end - elseif ok then - report_context("file %a is processed and returns true",foundname) - else - report_context("file %a is processed and returns nothing",foundname) - end - else - report_context("unknown file %a",filename) - end -end - --- some functions - -function context.direct(first,...) - if first ~= nil then - return writer(context,"",first,...) - end -end - --- context.delayed (todo: lines) - -local delayed = { } context.delayed = delayed -- maybe also store them - -local function indexer(parent,k) - local f = function(...) - local a = { ... } - return function() - return context[k](unpack(a)) - end - end - parent[k] = f - return f -end - -local function caller(parent,...) -- todo: nodes - local a = { ... } - return function() - return context(unpack(a)) - end -end - --- local function indexer(parent,k) --- local f = function(a,...) --- if not a then --- return function() --- return context[k]() --- end --- elseif select("#",...) == 0 then --- return function() --- return context[k](a) --- end --- elseif a then --- local t = { ... } --- return function() --- return context[k](a,unpack(t)) --- end --- end --- end --- parent[k] = f --- return f --- end --- --- local function caller(parent,a,...) -- todo: nodes --- if not a then --- return function() --- return context() --- end --- elseif select("#",...) == 0 then --- return function() --- return context(a) --- end --- elseif a then --- local t = { ... } --- return function() --- return context(a,unpack(t)) --- end --- end --- end - -setmetatable(delayed, { __index = indexer, __call = caller } ) - --- context.nested (todo: lines) - -local nested = { } context.nested = nested - -local function indexer(parent,k) - local f = function(...) - local t, savedflush, n = { }, flush, 0 - flush = function(c,f,s,...) -- catcodes are ignored - n = n + 1 - t[n] = s and concat{f,s,...} or f -- optimized for #args == 1 - end - context[k](...) - flush = savedflush - return concat(t) - end - parent[k] = f - return f -end - -local function caller(parent,...) - local t, savedflush, n = { }, flush, 0 - flush = function(c,f,s,...) -- catcodes are ignored - n = n + 1 - t[n] = s and concat{f,s,...} or f -- optimized for #args == 1 - end - context(...) - flush = savedflush - return concat(t) -end - -setmetatable(nested, { __index = indexer, __call = caller } ) - --- verbatim - -local verbatim = { } context.verbatim = verbatim - -local function indexer(parent,k) - local command = context[k] - local f = function(...) - local savedcatcodes = contentcatcodes - contentcatcodes = vrbcatcodes - command(...) - contentcatcodes = savedcatcodes - end - parent[k] = f - return f -end - -local function caller(parent,...) - local savedcatcodes = contentcatcodes - contentcatcodes = vrbcatcodes - defaultcaller(parent,...) - contentcatcodes = savedcatcodes -end - -setmetatable(verbatim, { __index = indexer, __call = caller } ) - --- formatted - -local formatted = { } context.formatted = formatted - --- local function indexer(parent,k) --- local command = context[k] --- local f = function(fmt,...) --- command(formatters[fmt](...)) --- end --- parent[k] = f --- return f --- end - -local function indexer(parent,k) - if type(k) == "string" then - local c = "\\" .. tostring(generics[k] or k) - local f = function(first,second,...) - if first == nil then - flush(currentcatcodes,c) - elseif second then - return writer(parent,c,formatters[first](second,...)) - else - return writer(parent,c,first) - end - end - parent[k] = f - return f - else - return context -- catch - end -end - --- local function caller(parent,...) --- context.fprint(...) --- end - -local function caller(parent,catcodes,fmt,first,...) - if type(catcodes) == "number" then - if first then - flush(catcodes,formatters[fmt](first,...)) - else - flush(catcodes,fmt) - end - else - if fmt then - flush(formatters[catcodes](fmt,first,...)) - else - flush(catcodes) - end - end -end - -setmetatable(formatted, { __index = indexer, __call = caller } ) - --- metafun (this will move to another file) - -local metafun = { } context.metafun = metafun - -local mpdrawing = "\\MPdrawing" - -local function caller(parent,f,a,...) - if not parent then - -- skip - elseif f then - local typ = type(f) - if typ == "string" then - if a then - flush(currentcatcodes,mpdrawing,"{",formatters[f](a,...),"}") - else - flush(currentcatcodes,mpdrawing,"{",f,"}") - end - elseif typ == "number" then - if a then - flush(currentcatcodes,mpdrawing,"{",f,a,...,"}") - else - flush(currentcatcodes,mpdrawing,"{",f,"}") - end - elseif typ == "function" then - -- ignored: a ... - flush(currentcatcodes,mpdrawing,"{\\cldf{",store_(f),"}}") - elseif typ == "boolean" then - -- ignored: a ... - if f then - flush(currentcatcodes,mpdrawing,"{^^M}") - else - report_context("warning: %a gets argument 'false' which is currently unsupported","metafun") - end - else - report_context("error: %a gets a weird argument %a","metafun",tostring(f)) - end - end -end - -setmetatable(metafun, { __call = caller } ) - -function metafun.start() - context.resetMPdrawing() -end - -function metafun.stop() - context.MPdrawingdonetrue() - context.getMPdrawing() -end - -function metafun.color(name) - return formatters[ [[\MPcolor{%s}]] ](name) -end - --- metafun.delayed - -local delayed = { } metafun.delayed = delayed - -local function indexer(parent,k) - local f = function(...) - local a = { ... } - return function() - return metafun[k](unpack(a)) - end - end - parent[k] = f - return f -end - - -local function caller(parent,...) - local a = { ... } - return function() - return metafun(unpack(a)) - end -end - -setmetatable(delayed, { __index = indexer, __call = caller } ) - --- helpers: - -function context.concat(...) - context(concat(...)) -end +if not modules then modules = { } end modules ['cldf-ini'] = { + version = 1.001, + comment = "companion to cldf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This started as an experiment: generating context code at the lua end. After all +-- it is surprisingly simple to implement due to metatables. I was wondering if +-- there was a more natural way to deal with commands at the lua end. Of course it's +-- a bit slower but often more readable when mixed with lua code. It can also be handy +-- when generating documents from databases or when constructing large tables or so. +-- +-- maybe optional checking against interface +-- currently no coroutine trickery +-- we could always use prtcatcodes (context.a_b_c) but then we loose protection +-- tflush needs checking ... sort of weird that it's not a table +-- __flushlines is an experiment and rather ugly so it will go away +-- +-- tex.print == line with endlinechar appended + +-- todo: context("%bold{total: }%s",total) +-- todo: context.documentvariable("title") + +local tex = tex + +context = context or { } +local context = context + +local format, gsub, validstring = string.format, string.gsub, string.valid +local next, type, tostring, tonumber, setmetatable, unpack, select = next, type, tostring, tonumber, setmetatable, unpack, select +local insert, remove, concat = table.insert, table.remove, table.concat +local lpegmatch, lpegC, lpegS, lpegP, lpegCc, patterns = lpeg.match, lpeg.C, lpeg.S, lpeg.P, lpeg.Cc, lpeg.patterns +local formatters = string.formatters -- using formatteds is slower in this case + +local texsprint = tex.sprint +local textprint = tex.tprint +local texprint = tex.print +local texwrite = tex.write +local texcount = tex.count + +local isnode = node.is_node -- after 0.65 just node.type +local writenode = node.write +local copynodelist = node.copy_list + +local catcodenumbers = catcodes.numbers + +local ctxcatcodes = catcodenumbers.ctxcatcodes +local prtcatcodes = catcodenumbers.prtcatcodes +local texcatcodes = catcodenumbers.texcatcodes +local txtcatcodes = catcodenumbers.txtcatcodes +local vrbcatcodes = catcodenumbers.vrbcatcodes +local xmlcatcodes = catcodenumbers.xmlcatcodes + +local flush = texsprint +local flushdirect = texprint +local flushraw = texwrite + +local report_context = logs.reporter("cld","tex") +local report_cld = logs.reporter("cld","stack") + +local processlines = true -- experiments.register("context.processlines", function(v) processlines = v end) + +-- for tracing it's easier to have two stacks + +local _stack_f_, _n_f_ = { }, 0 +local _stack_n_, _n_n_ = { }, 0 + +local function _store_f_(ti) + _n_f_ = _n_f_ + 1 + _stack_f_[_n_f_] = ti + return _n_f_ +end + +local function _store_n_(ti) + _n_n_ = _n_n_ + 1 + _stack_n_[_n_n_] = ti + return _n_n_ +end + +local function _flush_f_(n) + local sn = _stack_f_[n] + if not sn then + report_cld("data with id %a cannot be found on stack",n) + else + local tn = type(sn) + if tn == "function" then + if not sn() and texcount["@@trialtypesetting"] == 0 then -- @@trialtypesetting is private! + _stack_f_[n] = nil + else + -- keep, beware, that way the stack can grow + end + else + if texcount["@@trialtypesetting"] == 0 then -- @@trialtypesetting is private! + writenode(sn) + _stack_f_[n] = nil + else + writenode(copynodelist(sn)) + -- keep, beware, that way the stack can grow + end + end + end +end + +local function _flush_n_(n) + local sn = _stack_n_[n] + if not sn then + report_cld("data with id %a cannot be found on stack",n) + elseif texcount["@@trialtypesetting"] == 0 then -- @@trialtypesetting is private! + writenode(sn) + _stack_n_[n] = nil + else + writenode(copynodelist(sn)) + -- keep, beware, that way the stack can grow + end +end + +function context.restart() + _stack_f_, _n_f_ = { }, 0 + _stack_n_, _n_n_ = { }, 0 +end + +context._stack_f_ = _stack_f_ +context._store_f_ = _store_f_ +context._flush_f_ = _flush_f_ _cldf_ = _flush_f_ + +context._stack_n_ = _stack_n_ +context._store_n_ = _store_n_ +context._flush_n_ = _flush_n_ _cldn_ = _flush_n_ + +-- Should we keep the catcodes with the function? + +local catcodestack = { } +local currentcatcodes = ctxcatcodes +local contentcatcodes = ctxcatcodes + +local catcodes = { + ctx = ctxcatcodes, ctxcatcodes = ctxcatcodes, context = ctxcatcodes, + prt = prtcatcodes, prtcatcodes = prtcatcodes, protect = prtcatcodes, + tex = texcatcodes, texcatcodes = texcatcodes, plain = texcatcodes, + txt = txtcatcodes, txtcatcodes = txtcatcodes, text = txtcatcodes, + vrb = vrbcatcodes, vrbcatcodes = vrbcatcodes, verbatim = vrbcatcodes, + xml = xmlcatcodes, xmlcatcodes = xmlcatcodes, +} + +local function pushcatcodes(c) + insert(catcodestack,currentcatcodes) + currentcatcodes = (c and catcodes[c] or tonumber(c)) or currentcatcodes + contentcatcodes = currentcatcodes +end + +local function popcatcodes() + currentcatcodes = remove(catcodestack) or currentcatcodes + contentcatcodes = currentcatcodes +end + +context.pushcatcodes = pushcatcodes +context.popcatcodes = popcatcodes + +-- -- -- + +local newline = patterns.newline +local space = patterns.spacer +local spacing = newline * space^0 +local content = lpegC((1-spacing)^1) -- texsprint +local emptyline = space^0 * newline^2 -- texprint("") +local endofline = space^0 * newline * space^0 -- texsprint(" ") +local simpleline = endofline * lpegP(-1) -- + +local verbose = lpegC((1-space-newline)^1) +local beginstripper = (lpegS(" \t")^1 * newline^1) / "" +local endstripper = beginstripper * lpegP(-1) + +local justaspace = space * lpegCc("") +local justanewline = newline * lpegCc("") + +local function n_content(s) + flush(contentcatcodes,s) +end + +local function n_verbose(s) + flush(vrbcatcodes,s) +end + +local function n_endofline() + flush(currentcatcodes," \r") +end + +local function n_emptyline() + flushdirect(currentcatcodes,"\r") +end + +local function n_simpleline() + flush(currentcatcodes," \r") +end + +local n_exception = "" + +-- better a table specification + +function context.newtexthandler(specification) -- can also be used for verbose + specification = specification or { } + -- + local s_catcodes = specification.catcodes + -- + local f_before = specification.before + local f_after = specification.after + -- + local f_endofline = specification.endofline or n_endofline + local f_emptyline = specification.emptyline or n_emptyline + local f_simpleline = specification.simpleline or n_simpleline + local f_content = specification.content or n_content + local f_space = specification.space + -- + local p_exception = specification.exception + -- + if s_catcodes then + f_content = function(s) + flush(s_catcodes,s) + end + end + -- + local pattern + if f_space then + if p_exception then + local content = lpegC((1-spacing-p_exception)^1) + pattern = + ( + justaspace / f_space + + justanewline / f_endofline + + p_exception + + content / f_content + )^0 + else + local content = lpegC((1-space-endofline)^1) + pattern = + ( + justaspace / f_space + + justanewline / f_endofline + + content / f_content + )^0 + end + else + if p_exception then + local content = lpegC((1-spacing-p_exception)^1) + pattern = + simpleline / f_simpleline + + + ( + emptyline / f_emptyline + + endofline / f_endofline + + p_exception + + content / f_content + )^0 + else + local content = lpegC((1-spacing)^1) + pattern = + simpleline / f_simpleline + + + ( + emptyline / f_emptyline + + endofline / f_endofline + + content / f_content + )^0 + end + end + -- + if f_before then + pattern = (P(true) / f_before) * pattern + end + -- + if f_after then + pattern = pattern * (P(true) / f_after) + end + -- + return function(str) return lpegmatch(pattern,str) end, pattern +end + +function context.newverbosehandler(specification) -- a special variant for e.g. cdata in lxml-tex + specification = specification or { } + -- + local f_line = specification.line or function() flushdirect("\r") end + local f_space = specification.space or function() flush(" ") end + local f_content = specification.content or n_verbose + local f_before = specification.before + local f_after = specification.after + -- + local pattern = + justanewline / f_line -- so we get call{} + + verbose / f_content + + justaspace / f_space -- so we get call{} + -- + if specification.strip then + pattern = beginstripper^0 * (endstripper + pattern)^0 + else + pattern = pattern^0 + end + -- + if f_before then + pattern = (lpegP(true) / f_before) * pattern + end + -- + if f_after then + pattern = pattern * (lpegP(true) / f_after) + end + -- + return function(str) return lpegmatch(pattern,str) end, pattern +end + +local flushlines = context.newtexthandler { + content = n_content, + endofline = n_endofline, + emptyline = n_emptyline, + simpleline = n_simpleline, +} + +context.__flushlines = flushlines -- maybe context.helpers.flushtexlines +context.__flush = flush +context.__flushdirect = flushdirect + +-- The next variant is only used in rare cases (buffer to mp): + +local printlines_ctx = ( + (newline) / function() texprint("") end + + (1-newline)^1 / function(s) texprint(ctxcatcodes,s) end * newline^-1 +)^0 + +local printlines_raw = ( + (newline) / function() texprint("") end + + (1-newline)^1 / function(s) texprint(s) end * newline^-1 +)^0 + +function context.printlines(str,raw) -- todo: see if via file is useable + if raw then + lpegmatch(printlines_raw,str) + else + lpegmatch(printlines_ctx,str) + end +end + +-- This is the most reliable way to deal with nested buffers and other +-- catcode sensitive data. + +local methodhandler = resolvers.methodhandler + +function context.viafile(data,tag) + if data and data ~= "" then + local filename = resolvers.savers.byscheme("virtual",validstring(tag,"viafile"),data) + -- context.startregime { "utf" } + context.input(filename) + -- context.stopregime() + end +end + +-- -- -- "{" .. ti .. "}" is somewhat slower in a cld-mkiv run than "{",ti,"}" + +local containseol = patterns.containseol + +local function writer(parent,command,first,...) -- already optimized before call + local t = { first, ... } + flush(currentcatcodes,command) -- todo: ctx|prt|texcatcodes + local direct = false + for i=1,#t do + local ti = t[i] + local typ = type(ti) + if direct then + if typ == "string" or typ == "number" then + flush(currentcatcodes,ti) + else -- node.write + report_context("error: invalid use of direct in %a, only strings and numbers can be flushed directly, not %a",command,typ) + end + direct = false + elseif ti == nil then + -- nothing + elseif ti == "" then + flush(currentcatcodes,"{}") + elseif typ == "string" then + -- is processelines seen ? + if processlines and lpegmatch(containseol,ti) then + flush(currentcatcodes,"{") + local flushlines = parent.__flushlines or flushlines + flushlines(ti) + flush(currentcatcodes,"}") + elseif currentcatcodes == contentcatcodes then + flush(currentcatcodes,"{",ti,"}") + else + flush(currentcatcodes,"{") + flush(contentcatcodes,ti) + flush(currentcatcodes,"}") + end + elseif typ == "number" then + -- numbers never have funny catcodes + flush(currentcatcodes,"{",ti,"}") + elseif typ == "table" then + local tn = #ti + if tn == 0 then + local done = false + for k, v in next, ti do + if done then + if v == "" then + flush(currentcatcodes,",",k,'=') + else + flush(currentcatcodes,",",k,"={",v,"}") + end + else + if v == "" then + flush(currentcatcodes,"[",k,"=") + else + flush(currentcatcodes,"[",k,"={",v,"}") + end + done = true + end + end + if done then + flush(currentcatcodes,"]") + else + flush(currentcatcodes,"[]") + end + elseif tn == 1 then -- some 20% faster than the next loop + local tj = ti[1] + if type(tj) == "function" then + flush(currentcatcodes,"[\\cldf{",_store_f_(tj),"}]") + else + flush(currentcatcodes,"[",tj,"]") + end + else -- is concat really faster than flushes here? probably needed anyway (print artifacts) + for j=1,tn do + local tj = ti[j] + if type(tj) == "function" then + ti[j] = "\\cldf{" .. _store_f_(tj) .. "}" + end + end + flush(currentcatcodes,"[",concat(ti,","),"]") + end + elseif typ == "function" then + flush(currentcatcodes,"{\\cldf{",_store_f_(ti),"}}") -- todo: ctx|prt|texcatcodes + elseif typ == "boolean" then + if ti then + flushdirect(currentcatcodes,"\r") + else + direct = true + end + elseif typ == "thread" then + report_context("coroutines not supported as we cannot yield across boundaries") + elseif isnode(ti) then -- slow + flush(currentcatcodes,"{\\cldn{",_store_n_(ti),"}}") + else + report_context("error: %a gets a weird argument %a",command,ti) + end + end +end + +local generics = { } context.generics = generics + +local function indexer(parent,k) + if type(k) == "string" then + local c = "\\" .. tostring(generics[k] or k) + local f = function(first,...) + if first == nil then + flush(currentcatcodes,c) + else + return writer(parent,c,first,...) + end + end + parent[k] = f + return f + else + return context -- catch + end +end + +-- Potential optimization: after the first call we know if there will be an +-- argument. Of course there is the side effect that for instance abuse like +-- context.NC(str) fails as well as optional arguments. So, we don't do this +-- in practice. We just keep the next trick commented. The gain on some +-- 100000 calls is not that large: 0.100 => 0.95 which is neglectable. +-- +-- local function constructor(parent,k,c,first,...) +-- if first == nil then +-- local f = function() +-- flush(currentcatcodes,c) +-- end +-- parent[k] = f +-- return f() +-- else +-- local f = function(...) +-- return writer(parent,c,...) +-- end +-- parent[k] = f +-- return f(first,...) +-- end +-- end +-- +-- local function indexer(parent,k) +-- local c = "\\" .. tostring(generics[k] or k) +-- local f = function(...) +-- return constructor(parent,k,c,...) +-- end +-- parent[k] = f +-- return f +-- end + +-- only for internal usage: + +function context.constructcsonly(k) -- not much faster than the next but more mem efficient + local c = "\\" .. tostring(generics[k] or k) + rawset(context, k, function() + flush(prtcatcodes,c) + end) +end + +function context.constructcs(k) + local c = "\\" .. tostring(generics[k] or k) + rawset(context, k, function(first,...) + if first == nil then + flush(prtcatcodes,c) + else + return writer(context,c,first,...) + end + end) +end + +local function caller(parent,f,a,...) + if not parent then + -- so we don't need to test in the calling (slower but often no issue) + elseif f ~= nil then + local typ = type(f) + if typ == "string" then + if a then + flush(contentcatcodes,formatters[f](a,...)) -- was currentcatcodes + elseif processlines and lpegmatch(containseol,f) then + local flushlines = parent.__flushlines or flushlines + flushlines(f) + else + flush(contentcatcodes,f) + end + elseif typ == "number" then + if a then + flush(currentcatcodes,f,a,...) + else + flush(currentcatcodes,f) + end + elseif typ == "function" then + -- ignored: a ... + flush(currentcatcodes,"{\\cldf{",_store_f_(f),"}}") -- todo: ctx|prt|texcatcodes + elseif typ == "boolean" then + if f then + if a ~= nil then + local flushlines = parent.__flushlines or flushlines + flushlines(a) + else + flushdirect(currentcatcodes,"\n") -- no \r, else issues with \startlines ... use context.par() otherwise + end + else + if a ~= nil then + -- no command, same as context(a,...) + writer(parent,"",a,...) + else + -- ignored + end + end + elseif typ == "thread" then + report_context("coroutines not supported as we cannot yield across boundaries") + elseif isnode(f) then -- slow + -- writenode(f) + flush(currentcatcodes,"\\cldn{",_store_n_(f),"}") + else + report_context("error: %a gets a weird argument %a","context",f) + end + end +end + +local defaultcaller = caller + +setmetatable(context, { __index = indexer, __call = caller } ) + +-- now we tweak unprotect and protect + +function context.unprotect() + -- at the lua end + insert(catcodestack,currentcatcodes) + currentcatcodes = prtcatcodes + contentcatcodes = currentcatcodes + -- at the tex end + flush("\\unprotect") +end + +function context.protect() + -- at the tex end + flush("\\protect") + -- at the lua end + currentcatcodes = remove(catcodestack) or currentcatcodes + contentcatcodes = currentcatcodes +end + +function context.sprint(...) -- takes catcodes as first argument + flush(...) +end + +function context.fprint(catcodes,fmt,first,...) + if type(catcodes) == "number" then + if first then + flush(catcodes,formatters[fmt](first,...)) + else + flush(catcodes,fmt) + end + else + if fmt then + flush(formatters[catcodes](fmt,first,...)) + else + flush(catcodes) + end + end +end + +function tex.fprint(fmt,first,...) -- goodie + if first then + flush(currentcatcodes,formatters[fmt](first,...)) + else + flush(currentcatcodes,fmt) + end +end + +-- logging + +local trace_stack = { } + +local normalflush = flush +local normalflushdirect = flushdirect +local normalflushraw = flushraw +local normalwriter = writer +local currenttrace = nil +local nofwriters = 0 +local nofflushes = 0 + +local visualizer = lpeg.replacer { + { "\n","<>" }, + { "\r","<>" }, +} + +statistics.register("traced context", function() + if nofwriters > 0 or nofflushes > 0 then + return format("writers: %s, flushes: %s, maxstack: %s",nofwriters,nofflushes,_n_f_) + end +end) + +local tracedwriter = function(parent,...) -- also catcodes ? + nofwriters = nofwriters + 1 + local savedflush = flush + local savedflushdirect = flushdirect -- unlikely to be used here + local t, n = { "w : - : " }, 1 + local traced = function(normal,catcodes,...) -- todo: check for catcodes + local s = concat({...}) + s = lpegmatch(visualizer,s) + n = n + 1 + t[n] = s + normal(catcodes,...) + end + flush = function(...) traced(normalflush, ...) end + flushdirect = function(...) traced(normalflushdirect,...) end + normalwriter(parent,...) + flush = savedflush + flushdirect = savedflushdirect + currenttrace(concat(t)) +end + +-- we could reuse collapsed + +local traced = function(normal,one,two,...) + nofflushes = nofflushes + 1 + if two then + -- only catcodes if 'one' is number + normal(one,two,...) + local catcodes = type(one) == "number" and one + local arguments = catcodes and { two, ... } or { one, two, ... } + local collapsed, c = { formatters["f : %s : "](catcodes or '-') }, 1 + for i=1,#arguments do + local argument = arguments[i] + local argtype = type(argument) + c = c + 1 + if argtype == "string" then + collapsed[c] = lpegmatch(visualizer,argument) + elseif argtype == "number" then + collapsed[c] = argument + else + collapsed[c] = formatters["<<%S>>"](argument) + end + end + currenttrace(concat(collapsed)) + else + -- no catcodes + normal(one) + local argtype = type(one) + if argtype == "string" then + currenttrace(formatters["f : - : %s"](lpegmatch(visualizer,one))) + elseif argtype == "number" then + currenttrace(formatters["f : - : %s"](one)) + else + currenttrace(formatters["f : - : <<%S>>"](one)) + end + end +end + +local tracedflush = function(...) traced(normalflush, ...) end +local tracedflushdirect = function(...) traced(normalflushdirect,...) end + +local function pushlogger(trace) + trace = trace or report_context + insert(trace_stack,currenttrace) + currenttrace = trace + -- + flush = tracedflush + flushdirect = tracedflushdirect + writer = tracedwriter + -- + context.__flush = flush + context.__flushdirect = flushdirect + -- + return flush, writer, flushdirect +end + +local function poplogger() + currenttrace = remove(trace_stack) + if not currenttrace then + flush = normalflush + flushdirect = normalflushdirect + writer = normalwriter + -- + context.__flush = flush + context.__flushdirect = flushdirect + end + return flush, writer, flushdirect +end + +local function settracing(v) + if v then + return pushlogger(report_context) + else + return poplogger() + end +end + +-- todo: share flushers so that we can define in other files + +trackers.register("context.trace",settracing) + +context.pushlogger = pushlogger +context.poplogger = poplogger +context.settracing = settracing + +-- -- untested, no time now: +-- +-- local tracestack, tracestacktop = { }, false +-- +-- function context.pushtracing(v) +-- insert(tracestack,tracestacktop) +-- if type(v) == "function" then +-- pushlogger(v) +-- v = true +-- else +-- pushlogger() +-- end +-- tracestacktop = v +-- settracing(v) +-- end +-- +-- function context.poptracing() +-- poplogger() +-- tracestacktop = remove(tracestack) or false +-- settracing(tracestacktop) +-- end + +function context.getlogger() + return flush, writer, flush_direct +end + +local trace_cld = false trackers.register("context.files", function(v) trace_cld = v end) + +function context.runfile(filename) + local foundname = resolvers.findtexfile(file.addsuffix(filename,"cld")) or "" + if foundname ~= "" then + local ok = dofile(foundname) + if type(ok) == "function" then + if trace_cld then + report_context("begin of file %a (function call)",foundname) + end + ok() + if trace_cld then + report_context("end of file %a (function call)",foundname) + end + elseif ok then + report_context("file %a is processed and returns true",foundname) + else + report_context("file %a is processed and returns nothing",foundname) + end + else + report_context("unknown file %a",filename) + end +end + +-- some functions + +function context.direct(first,...) + if first ~= nil then + return writer(context,"",first,...) + end +end + +-- context.delayed (todo: lines) + +local delayed = { } context.delayed = delayed -- maybe also store them + +local function indexer(parent,k) + local f = function(...) + local a = { ... } + return function() + return context[k](unpack(a)) + end + end + parent[k] = f + return f +end + +local function caller(parent,...) -- todo: nodes + local a = { ... } + return function() + return context(unpack(a)) + end +end + +-- local function indexer(parent,k) +-- local f = function(a,...) +-- if not a then +-- return function() +-- return context[k]() +-- end +-- elseif select("#",...) == 0 then +-- return function() +-- return context[k](a) +-- end +-- elseif a then +-- local t = { ... } +-- return function() +-- return context[k](a,unpack(t)) +-- end +-- end +-- end +-- parent[k] = f +-- return f +-- end +-- +-- local function caller(parent,a,...) -- todo: nodes +-- if not a then +-- return function() +-- return context() +-- end +-- elseif select("#",...) == 0 then +-- return function() +-- return context(a) +-- end +-- elseif a then +-- local t = { ... } +-- return function() +-- return context(a,unpack(t)) +-- end +-- end +-- end + +setmetatable(delayed, { __index = indexer, __call = caller } ) + +-- context.nested (todo: lines) + +local nested = { } context.nested = nested + +local function indexer(parent,k) + local f = function(...) + local t, savedflush, n = { }, flush, 0 + flush = function(c,f,s,...) -- catcodes are ignored + n = n + 1 + t[n] = s and concat{f,s,...} or f -- optimized for #args == 1 + end + context[k](...) + flush = savedflush + return concat(t) + end + parent[k] = f + return f +end + +local function caller(parent,...) + local t, savedflush, n = { }, flush, 0 + flush = function(c,f,s,...) -- catcodes are ignored + n = n + 1 + t[n] = s and concat{f,s,...} or f -- optimized for #args == 1 + end + context(...) + flush = savedflush + return concat(t) +end + +setmetatable(nested, { __index = indexer, __call = caller } ) + +-- verbatim + +local verbatim = { } context.verbatim = verbatim + +local function indexer(parent,k) + local command = context[k] + local f = function(...) + local savedcatcodes = contentcatcodes + contentcatcodes = vrbcatcodes + command(...) + contentcatcodes = savedcatcodes + end + parent[k] = f + return f +end + +local function caller(parent,...) + local savedcatcodes = contentcatcodes + contentcatcodes = vrbcatcodes + defaultcaller(parent,...) + contentcatcodes = savedcatcodes +end + +setmetatable(verbatim, { __index = indexer, __call = caller } ) + +-- formatted + +local formatted = { } context.formatted = formatted + +-- local function indexer(parent,k) +-- local command = context[k] +-- local f = function(fmt,...) +-- command(formatters[fmt](...)) +-- end +-- parent[k] = f +-- return f +-- end + +local function indexer(parent,k) + if type(k) == "string" then + local c = "\\" .. tostring(generics[k] or k) + local f = function(first,second,...) + if first == nil then + flush(currentcatcodes,c) + elseif second then + return writer(parent,c,formatters[first](second,...)) + else + return writer(parent,c,first) + end + end + parent[k] = f + return f + else + return context -- catch + end +end + +-- local function caller(parent,...) +-- context.fprint(...) +-- end + +local function caller(parent,catcodes,fmt,first,...) + if type(catcodes) == "number" then + if first then + flush(catcodes,formatters[fmt](first,...)) + else + flush(catcodes,fmt) + end + else + if fmt then + flush(formatters[catcodes](fmt,first,...)) + else + flush(catcodes) + end + end +end + +setmetatable(formatted, { __index = indexer, __call = caller } ) + +-- metafun (this will move to another file) + +local metafun = { } context.metafun = metafun + +local mpdrawing = "\\MPdrawing" + +local function caller(parent,f,a,...) + if not parent then + -- skip + elseif f then + local typ = type(f) + if typ == "string" then + if a then + flush(currentcatcodes,mpdrawing,"{",formatters[f](a,...),"}") + else + flush(currentcatcodes,mpdrawing,"{",f,"}") + end + elseif typ == "number" then + if a then + flush(currentcatcodes,mpdrawing,"{",f,a,...,"}") + else + flush(currentcatcodes,mpdrawing,"{",f,"}") + end + elseif typ == "function" then + -- ignored: a ... + flush(currentcatcodes,mpdrawing,"{\\cldf{",store_(f),"}}") + elseif typ == "boolean" then + -- ignored: a ... + if f then + flush(currentcatcodes,mpdrawing,"{^^M}") + else + report_context("warning: %a gets argument 'false' which is currently unsupported","metafun") + end + else + report_context("error: %a gets a weird argument %a","metafun",tostring(f)) + end + end +end + +setmetatable(metafun, { __call = caller } ) + +function metafun.start() + context.resetMPdrawing() +end + +function metafun.stop() + context.MPdrawingdonetrue() + context.getMPdrawing() +end + +function metafun.color(name) + return formatters[ [[\MPcolor{%s}]] ](name) +end + +-- metafun.delayed + +local delayed = { } metafun.delayed = delayed + +local function indexer(parent,k) + local f = function(...) + local a = { ... } + return function() + return metafun[k](unpack(a)) + end + end + parent[k] = f + return f +end + + +local function caller(parent,...) + local a = { ... } + return function() + return metafun(unpack(a)) + end +end + +setmetatable(delayed, { __index = indexer, __call = caller } ) + +-- helpers: + +function context.concat(...) + context(concat(...)) +end diff --git a/tex/context/base/cldf-int.lua b/tex/context/base/cldf-int.lua index 6cbfd666f..6ead8e841 100644 --- a/tex/context/base/cldf-int.lua +++ b/tex/context/base/cldf-int.lua @@ -1,221 +1,221 @@ -if not modules then modules = { } end modules ['cldf-int'] = { - version = 1.001, - comment = "companion to mult-clm.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- another experiment --- needs upgrading --- needs checking --- todo: multilingual - -local format, insert, remove, concat = string.format, table.insert, table.remove, table.concat -local unpack = unpack or table.unpack - -local catcodenumbers = catcodes.numbers - -local ctxcatcodes = catcodenumbers.ctxcatcodes -local vrbcatcodes = catcodenumbers.vrbcatcodes - -local contextsprint = context.sprint - -local trace_define = false trackers.register("context.define", function(v) trace_define = v end) - -interfaces = interfaces or { } - -_clmh_ = utilities.parsers.settings_to_array -_clma_ = utilities.parsers.settings_to_array - -local starters, stoppers, macros, stack = { }, { }, { }, { } - -local checkers = { - [0] = "", - "\\dosingleempty", - "\\dodoubleempty", - "\\dotripleempty", - "\\doquadrupleempty", - "\\doquintupleempty", - "\\dosixtupleempty", -} - -function _clmm_(name,...) - macros[name](...) -end - -function _clmb_(name,...) - local sn = stack[name] - insert(sn,{...}) - starters[name](...) -end - -function _clme_(name) - local sn = stack[name] - local sv = remove(sn) - if sv then - stoppers[name](unpack(sv)) - else - -- nesting error - end -end - -_clmn_ = tonumber - -local estart = interfaces.elements.start -local estop = interfaces.elements.stop - -function interfaces.definecommand(name,specification) -- name is optional - if type(name) == "table" then - specification = name - name = specification.name - end - if name and specification then - local arguments = specification.arguments - local na = (arguments and #arguments) or 0 - local environment = specification.environment - if na == 0 then - if environment then - contextsprint(ctxcatcodes,"\\setuvalue{",estart,name,"}{\\ctxlua{_clmb_('",name,"')}}") - contextsprint(ctxcatcodes,"\\setuvalue{",estop, name,"}{\\ctxlua{_clme_('",name,"')}}") - end - if not environment or environment == "both" then - contextsprint(ctxcatcodes,"\\setuvalue{", name,"}{\\ctxlua{_clmm_('",name,"')}}") - end - else - -- we could flush immediate but tracing is bad then - stack[name] = { } - local opt, done = 0, false - local snippets = { } -- we can reuse it - local mkivdo = "\\mkivdo" .. name -- maybe clddo - snippets[#snippets+1] = "\\def" - snippets[#snippets+1] = mkivdo - for i=1,na do - local a = arguments[i] - local variant = a[1] - if variant == "option" then - snippets[#snippets+1] = "[#" - snippets[#snippets+1] = i - snippets[#snippets+1] = "]" - if not done then - opt = opt + 1 - end - else - done = true -- no more optional checking after this - snippets[#snippets+1] = "#" - snippets[#snippets+1] = i - end - end - if environment then - snippets[#snippets+1] = "{\\ctxlua{_clmb_('" - snippets[#snippets+1] = name - snippets[#snippets+1] = "'" - else - snippets[#snippets+1] = "{\\ctxlua{_clmm_('" - snippets[#snippets+1] = name - snippets[#snippets+1] = "'" - end - for i=1,na do - local a = arguments[i] - local variant = a[2] - if variant == "list" then - snippets[#snippets+1] = ",_clma_([[#" - snippets[#snippets+1] = i - snippets[#snippets+1] = "]])" - elseif variant == "hash" then - snippets[#snippets+1] = ",_clmh_([[#" - snippets[#snippets+1] = i - snippets[#snippets+1] = "]])" - elseif variant == "number" then - snippets[#snippets+1] = ",_clmn_([[#" - snippets[#snippets+1] = i - snippets[#snippets+1] = "]])" - else - snippets[#snippets+1] = ",[[#" - snippets[#snippets+1] = i - snippets[#snippets+1] = "]]" - end - end - snippets[#snippets+1] = ")}}" - contextsprint(ctxcatcodes,unpack(snippets)) - if environment then - -- needs checking - contextsprint(ctxcatcodes,"\\setuvalue{",estart,name,"}{",checkers[opt],mkivdo,"}") - contextsprint(ctxcatcodes,"\\setuvalue{",estop, name,"}{\\ctxlua{_clme_('",name,"')}}") - end - if not environment or environment == "both" then - contextsprint(ctxcatcodes,"\\setuvalue{", name,"}{",checkers[opt],mkivdo,"}") - end - end - if environment then - starters[name] = specification.starter - stoppers[name] = specification.stopper - else - macros[name] = specification.macro - end - end -end - -function interfaces.tolist(t) - local r = { } - for i=1,#t do - r[i] = t[i] - end - local n = #r - for k,v in table.sortedhash(t) do - if type(k) ~= "number" then - n = n + 1 - r[n] = k .. "=" .. v - end - end - return concat(r,", ") -end - ---~ \startluacode ---~ function test(opt_1, opt_2, arg_1) ---~ context.startnarrower() ---~ context("options 1: %s",interfaces.tolist(opt_1)) ---~ context.par() ---~ context("options 2: %s",interfaces.tolist(opt_2)) ---~ context.par() ---~ context("argument 1: %s",arg_1) ---~ context.stopnarrower() ---~ end - ---~ interfaces.definecommand { ---~ name = "test", ---~ arguments = { ---~ { "option", "list" }, ---~ { "option", "hash" }, ---~ { "content", "string" }, ---~ }, ---~ macro = test, ---~ } ---~ \stopluacode - ---~ test: \test[1][a=3]{whatever} - ---~ \startluacode ---~ local function startmore(opt_1) ---~ context.startnarrower() ---~ context("start more, options: %s",interfaces.tolist(opt_1)) ---~ context.startnarrower() ---~ end - ---~ local function stopmore(opt_1) ---~ context.stopnarrower() ---~ context("stop more, options: %s",interfaces.tolist(opt_1)) ---~ context.stopnarrower() ---~ end - ---~ interfaces.definecommand ( "more", { ---~ environment = true, ---~ arguments = { ---~ { "option", "list" }, ---~ }, ---~ starter = startmore, ---~ stopper = stopmore, ---~ } ) ---~ \stopluacode - ---~ more: \startmore[1] one \startmore[2] two \stopmore one \stopmore +if not modules then modules = { } end modules ['cldf-int'] = { + version = 1.001, + comment = "companion to mult-clm.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- another experiment +-- needs upgrading +-- needs checking +-- todo: multilingual + +local format, insert, remove, concat = string.format, table.insert, table.remove, table.concat +local unpack = unpack or table.unpack + +local catcodenumbers = catcodes.numbers + +local ctxcatcodes = catcodenumbers.ctxcatcodes +local vrbcatcodes = catcodenumbers.vrbcatcodes + +local contextsprint = context.sprint + +local trace_define = false trackers.register("context.define", function(v) trace_define = v end) + +interfaces = interfaces or { } + +_clmh_ = utilities.parsers.settings_to_array +_clma_ = utilities.parsers.settings_to_array + +local starters, stoppers, macros, stack = { }, { }, { }, { } + +local checkers = { + [0] = "", + "\\dosingleempty", + "\\dodoubleempty", + "\\dotripleempty", + "\\doquadrupleempty", + "\\doquintupleempty", + "\\dosixtupleempty", +} + +function _clmm_(name,...) + macros[name](...) +end + +function _clmb_(name,...) + local sn = stack[name] + insert(sn,{...}) + starters[name](...) +end + +function _clme_(name) + local sn = stack[name] + local sv = remove(sn) + if sv then + stoppers[name](unpack(sv)) + else + -- nesting error + end +end + +_clmn_ = tonumber + +local estart = interfaces.elements.start +local estop = interfaces.elements.stop + +function interfaces.definecommand(name,specification) -- name is optional + if type(name) == "table" then + specification = name + name = specification.name + end + if name and specification then + local arguments = specification.arguments + local na = (arguments and #arguments) or 0 + local environment = specification.environment + if na == 0 then + if environment then + contextsprint(ctxcatcodes,"\\setuvalue{",estart,name,"}{\\ctxlua{_clmb_('",name,"')}}") + contextsprint(ctxcatcodes,"\\setuvalue{",estop, name,"}{\\ctxlua{_clme_('",name,"')}}") + end + if not environment or environment == "both" then + contextsprint(ctxcatcodes,"\\setuvalue{", name,"}{\\ctxlua{_clmm_('",name,"')}}") + end + else + -- we could flush immediate but tracing is bad then + stack[name] = { } + local opt, done = 0, false + local snippets = { } -- we can reuse it + local mkivdo = "\\mkivdo" .. name -- maybe clddo + snippets[#snippets+1] = "\\def" + snippets[#snippets+1] = mkivdo + for i=1,na do + local a = arguments[i] + local variant = a[1] + if variant == "option" then + snippets[#snippets+1] = "[#" + snippets[#snippets+1] = i + snippets[#snippets+1] = "]" + if not done then + opt = opt + 1 + end + else + done = true -- no more optional checking after this + snippets[#snippets+1] = "#" + snippets[#snippets+1] = i + end + end + if environment then + snippets[#snippets+1] = "{\\ctxlua{_clmb_('" + snippets[#snippets+1] = name + snippets[#snippets+1] = "'" + else + snippets[#snippets+1] = "{\\ctxlua{_clmm_('" + snippets[#snippets+1] = name + snippets[#snippets+1] = "'" + end + for i=1,na do + local a = arguments[i] + local variant = a[2] + if variant == "list" then + snippets[#snippets+1] = ",_clma_([[#" + snippets[#snippets+1] = i + snippets[#snippets+1] = "]])" + elseif variant == "hash" then + snippets[#snippets+1] = ",_clmh_([[#" + snippets[#snippets+1] = i + snippets[#snippets+1] = "]])" + elseif variant == "number" then + snippets[#snippets+1] = ",_clmn_([[#" + snippets[#snippets+1] = i + snippets[#snippets+1] = "]])" + else + snippets[#snippets+1] = ",[[#" + snippets[#snippets+1] = i + snippets[#snippets+1] = "]]" + end + end + snippets[#snippets+1] = ")}}" + contextsprint(ctxcatcodes,unpack(snippets)) + if environment then + -- needs checking + contextsprint(ctxcatcodes,"\\setuvalue{",estart,name,"}{",checkers[opt],mkivdo,"}") + contextsprint(ctxcatcodes,"\\setuvalue{",estop, name,"}{\\ctxlua{_clme_('",name,"')}}") + end + if not environment or environment == "both" then + contextsprint(ctxcatcodes,"\\setuvalue{", name,"}{",checkers[opt],mkivdo,"}") + end + end + if environment then + starters[name] = specification.starter + stoppers[name] = specification.stopper + else + macros[name] = specification.macro + end + end +end + +function interfaces.tolist(t) + local r = { } + for i=1,#t do + r[i] = t[i] + end + local n = #r + for k,v in table.sortedhash(t) do + if type(k) ~= "number" then + n = n + 1 + r[n] = k .. "=" .. v + end + end + return concat(r,", ") +end + +--~ \startluacode +--~ function test(opt_1, opt_2, arg_1) +--~ context.startnarrower() +--~ context("options 1: %s",interfaces.tolist(opt_1)) +--~ context.par() +--~ context("options 2: %s",interfaces.tolist(opt_2)) +--~ context.par() +--~ context("argument 1: %s",arg_1) +--~ context.stopnarrower() +--~ end + +--~ interfaces.definecommand { +--~ name = "test", +--~ arguments = { +--~ { "option", "list" }, +--~ { "option", "hash" }, +--~ { "content", "string" }, +--~ }, +--~ macro = test, +--~ } +--~ \stopluacode + +--~ test: \test[1][a=3]{whatever} + +--~ \startluacode +--~ local function startmore(opt_1) +--~ context.startnarrower() +--~ context("start more, options: %s",interfaces.tolist(opt_1)) +--~ context.startnarrower() +--~ end + +--~ local function stopmore(opt_1) +--~ context.stopnarrower() +--~ context("stop more, options: %s",interfaces.tolist(opt_1)) +--~ context.stopnarrower() +--~ end + +--~ interfaces.definecommand ( "more", { +--~ environment = true, +--~ arguments = { +--~ { "option", "list" }, +--~ }, +--~ starter = startmore, +--~ stopper = stopmore, +--~ } ) +--~ \stopluacode + +--~ more: \startmore[1] one \startmore[2] two \stopmore one \stopmore diff --git a/tex/context/base/cldf-prs.lua b/tex/context/base/cldf-prs.lua index 9fbdba0c8..7715e8695 100644 --- a/tex/context/base/cldf-prs.lua +++ b/tex/context/base/cldf-prs.lua @@ -1,54 +1,54 @@ -if not modules then modules = { } end modules ['cldf-bas'] = { - version = 1.001, - comment = "companion to cldf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local lpegmatch, patterns = lpeg.match, lpeg.patterns -local P, R, V, Cc, Cs = lpeg.P, lpeg.R, lpeg.V, lpeg.Cc, lpeg.Cs -local format = string.format - -local cpatterns = patterns.context or { } -patterns.context = cpatterns - -local backslash = P("\\") -local csname = backslash * P(1) * (1-backslash)^0 -local sign = P("+") / "\\textplus " - + P("-") / "\\textminus " -local leftbrace = P("{") -local rightbrace = P("}") -local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace } -local subscript = P("_") -local superscript = P("^") -local utf8char = patterns.utf8char -local cardinal = patterns.cardinal - --- local scripts = P { "start", --- start = V("csname") + V("lowfirst") + V("highfirst"), --- csname = csname, --- content = Cs(V("csname") + nested + sign^-1 * (cardinal + utf8char)), --- lowfirst = subscript * ( Cc("\\lohi{%s}{%s}") * V("content") * superscript + Cc("\\low{%s}" ) ) * V("content") / format, --- highfirst = superscript * ( Cc("\\hilo{%s}{%s}") * V("content") * subscript + Cc("\\high{%s}") ) * V("content") / format, --- } - -local scripts = P { "start", - start = V("csname") + V("lowfirst") + V("highfirst"), - csname = csname, - content = Cs(V("csname") + nested + sign^-1 * (cardinal + utf8char)), - lowfirst = (subscript /"") * ( Cc("\\lohi{") * V("content") * Cc("}{") * (superscript/"") + Cc("\\low{" ) ) * V("content") * Cc("}"), - highfirst = (superscript/"") * ( Cc("\\hilo{") * V("content") * Cc("}{") * (subscript /"") + Cc("\\high{") ) * V("content") * Cc("}"), - } - -local scripted = Cs((csname + scripts + utf8char)^0) - -cpatterns.scripts = scripts -cpatterns.csname = csname -cpatterns.scripted = scripted -cpatterns.nested = nested - --- inspect(scripted) --- print(lpegmatch(scripted,"10^-3_x")) --- print(lpegmatch(scripted,"10^-a")) - +if not modules then modules = { } end modules ['cldf-bas'] = { + version = 1.001, + comment = "companion to cldf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local lpegmatch, patterns = lpeg.match, lpeg.patterns +local P, R, V, Cc, Cs = lpeg.P, lpeg.R, lpeg.V, lpeg.Cc, lpeg.Cs +local format = string.format + +local cpatterns = patterns.context or { } +patterns.context = cpatterns + +local backslash = P("\\") +local csname = backslash * P(1) * (1-backslash)^0 +local sign = P("+") / "\\textplus " + + P("-") / "\\textminus " +local leftbrace = P("{") +local rightbrace = P("}") +local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace } +local subscript = P("_") +local superscript = P("^") +local utf8char = patterns.utf8char +local cardinal = patterns.cardinal + +-- local scripts = P { "start", +-- start = V("csname") + V("lowfirst") + V("highfirst"), +-- csname = csname, +-- content = Cs(V("csname") + nested + sign^-1 * (cardinal + utf8char)), +-- lowfirst = subscript * ( Cc("\\lohi{%s}{%s}") * V("content") * superscript + Cc("\\low{%s}" ) ) * V("content") / format, +-- highfirst = superscript * ( Cc("\\hilo{%s}{%s}") * V("content") * subscript + Cc("\\high{%s}") ) * V("content") / format, +-- } + +local scripts = P { "start", + start = V("csname") + V("lowfirst") + V("highfirst"), + csname = csname, + content = Cs(V("csname") + nested + sign^-1 * (cardinal + utf8char)), + lowfirst = (subscript /"") * ( Cc("\\lohi{") * V("content") * Cc("}{") * (superscript/"") + Cc("\\low{" ) ) * V("content") * Cc("}"), + highfirst = (superscript/"") * ( Cc("\\hilo{") * V("content") * Cc("}{") * (subscript /"") + Cc("\\high{") ) * V("content") * Cc("}"), + } + +local scripted = Cs((csname + scripts + utf8char)^0) + +cpatterns.scripts = scripts +cpatterns.csname = csname +cpatterns.scripted = scripted +cpatterns.nested = nested + +-- inspect(scripted) +-- print(lpegmatch(scripted,"10^-3_x")) +-- print(lpegmatch(scripted,"10^-a")) + diff --git a/tex/context/base/cldf-ver.lua b/tex/context/base/cldf-ver.lua index b48fd253a..601c98e89 100644 --- a/tex/context/base/cldf-ver.lua +++ b/tex/context/base/cldf-ver.lua @@ -1,75 +1,75 @@ -if not modules then modules = { } end modules ['cldf-ver'] = { - version = 1.001, - comment = "companion to cldf-ver.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- We have better verbatim: context.verbatim so that needs to be looked --- into. We can also directly store in buffers although this variant works --- better when used mixed with other code (synchronization issue). - -local concat, tohandle = table.concat, table.tohandle -local find, splitlines = string.find, string.splitlines -local tostring, type = tostring, type - -local context = context - -local function flush(...) - context(concat{...,"\r"}) -- was \n -end - -local function t_tocontext(...) - context.starttyping { "typing" } -- else [1] is intercepted - context.pushcatcodes("verbatim") - tohandle(flush,...) -- ok? - context.stoptyping() - context.popcatcodes() -end - -local function s_tocontext(...) -- we need to catch {\} - context.type() - context("{") - context.pushcatcodes("verbatim") - context(concat({...}," ")) - context.popcatcodes() - context("}") -end - -local function b_tocontext(b) - s_tocontext(tostring(b)) -end - -table .tocontext = t_tocontext -string .tocontext = s_tocontext -boolean.tocontext = b_tocontext - -function context.tocontext(first,...) - local t = type(first) - if t == "string" then - s_tocontext(first,...) - elseif t == "table" then - t_tocontext(first,...) - elseif t == "boolean" then - b_tocontext(first,...) - end -end - -function context.tobuffer(name,str) - context.startbuffer { name } - context.pushcatcodes("verbatim") - local lines = (type(str) == "string" and find(str,"[\n\r]") and splitlines(str)) or str - for i=1,#lines do - context(lines[i] .. " ") - end - context.stopbuffer() - context.popcatcodes() -end - -function context.tolines(str) - local lines = type(str) == "string" and splitlines(str) or str - for i=1,#lines do - context(lines[i] .. " ") - end -end +if not modules then modules = { } end modules ['cldf-ver'] = { + version = 1.001, + comment = "companion to cldf-ver.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- We have better verbatim: context.verbatim so that needs to be looked +-- into. We can also directly store in buffers although this variant works +-- better when used mixed with other code (synchronization issue). + +local concat, tohandle = table.concat, table.tohandle +local find, splitlines = string.find, string.splitlines +local tostring, type = tostring, type + +local context = context + +local function flush(...) + context(concat{...,"\r"}) -- was \n +end + +local function t_tocontext(...) + context.starttyping { "typing" } -- else [1] is intercepted + context.pushcatcodes("verbatim") + tohandle(flush,...) -- ok? + context.stoptyping() + context.popcatcodes() +end + +local function s_tocontext(...) -- we need to catch {\} + context.type() + context("{") + context.pushcatcodes("verbatim") + context(concat({...}," ")) + context.popcatcodes() + context("}") +end + +local function b_tocontext(b) + s_tocontext(tostring(b)) +end + +table .tocontext = t_tocontext +string .tocontext = s_tocontext +boolean.tocontext = b_tocontext + +function context.tocontext(first,...) + local t = type(first) + if t == "string" then + s_tocontext(first,...) + elseif t == "table" then + t_tocontext(first,...) + elseif t == "boolean" then + b_tocontext(first,...) + end +end + +function context.tobuffer(name,str) + context.startbuffer { name } + context.pushcatcodes("verbatim") + local lines = (type(str) == "string" and find(str,"[\n\r]") and splitlines(str)) or str + for i=1,#lines do + context(lines[i] .. " ") + end + context.stopbuffer() + context.popcatcodes() +end + +function context.tolines(str) + local lines = type(str) == "string" and splitlines(str) or str + for i=1,#lines do + context(lines[i] .. " ") + end +end diff --git a/tex/context/base/colo-icc.lua b/tex/context/base/colo-icc.lua index f7ed561c1..7880e0778 100644 --- a/tex/context/base/colo-icc.lua +++ b/tex/context/base/colo-icc.lua @@ -1,120 +1,120 @@ -if not modules then modules = { } end modules ['colo-icc'] = { - version = 1.000, - comment = "companion to colo-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local char, byte, gsub, match, format, strip = string.char, string.byte, string.gsub, string.match, string.format, string.strip -local readstring, readnumber = io.readstring, io.readnumber -local formatters = string.formatters - -local colors = attributes and attributes.colors or { } -- when used in mtxrun - -local report_colors = logs.reporter("colors","icc") - -local R, Cs, lpegmatch = lpeg.R, lpeg.Cs, lpeg.match - -local invalid = R(char(0)..char(31)) -local cleaned = invalid^0 * Cs((1-invalid)^0) - -function colors.iccprofile(filename,verbose) - local fullname = resolvers.findfile(filename,"icc") or "" - if fullname == "" then - local locate = resolvers.finders.byscheme -- not in mtxrun - if locate then - fullname = locate("loc",filename) - end - end - if fullname == "" then - report_colors("profile %a cannot be found",filename) - return nil, false - end - local f = io.open(fullname,"rb") - if not f then - report_colors("profile %a cannot be loaded",fullname) - return nil, false - end - local header = { - size = readnumber(f,4), - cmmtype = readnumber(f,4), - version = readnumber(f,4), - deviceclass = strip(readstring(f,4)), - colorspace = strip(readstring(f,4)), - connectionspace = strip(readstring(f,4)), - datetime = { - year = readnumber(f,2), - month = readnumber(f,2), - day = readnumber(f,2), - hour = readnumber(f,2), - minutes = readnumber(f,2), - seconds = readnumber(f,2), - }, - filesignature = strip(readstring(f,4)), - platformsignature = strip(readstring(f,4)), - options = readnumber(f,4), - devicemanufacturer = strip(readstring(f,4)), - devicemodel = strip(readstring(f,4)), - deviceattributes = readnumber(f,4), - renderingintent = readnumber(f,4), - illuminantxyz = { - x = readnumber(f,4), - y = readnumber(f,4), - z = readnumber(f,4), - }, - profilecreator = readnumber(f,4), - id = strip(readstring(f,16)), - } - local tags = { } - for i=1,readnumber(f,128,4) do - tags[readstring(f,4)] = { - offset = readnumber(f,4), - length = readnumber(f,4), - } - end - local o = header.options - header.options = - o == 0 and "embedded" or - o == 1 and "dependent" or "unknown" - local d = header.deviceattributes - header.deviceattributes = { - [number.hasbit(d,1) and "transparency" or "reflective"] = true, - [number.hasbit(d,2) and "mate" or "glossy" ] = true, - [number.hasbit(d,3) and "negative" or "positive" ] = true, - [number.hasbit(d,4) and "bw" or "color" ] = true, - } - local r = header.renderingintent - header.renderingintent = - r == 0 and "perceptual" or - r == 1 and "relative" or - r == 2 and "saturation" or - r == 3 and "absolute" or "unknown" - for tag, spec in next, tags do - if tag then - local offset, length = spec.offset, spec.length - local variant = readstring(f,offset,4) - if variant == "text" or variant == "desc" then - local str = readstring(f,length-4) - tags[tag] = { - data = str, - cleaned = lpegmatch(cleaned,str), - } - else - if verbose then - report_colors("ignoring tag %a or type %a in profile %a",tag,variant,fullname) - end - tags[tag] = nil - end - end - end - f:close() - local profile = { - filename = filename, - fullname = fullname, - header = header, - tags = tags, - } - report_colors("profile %a loaded",fullname) - return profile, true -end +if not modules then modules = { } end modules ['colo-icc'] = { + version = 1.000, + comment = "companion to colo-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local char, byte, gsub, match, format, strip = string.char, string.byte, string.gsub, string.match, string.format, string.strip +local readstring, readnumber = io.readstring, io.readnumber +local formatters = string.formatters + +local colors = attributes and attributes.colors or { } -- when used in mtxrun + +local report_colors = logs.reporter("colors","icc") + +local R, Cs, lpegmatch = lpeg.R, lpeg.Cs, lpeg.match + +local invalid = R(char(0)..char(31)) +local cleaned = invalid^0 * Cs((1-invalid)^0) + +function colors.iccprofile(filename,verbose) + local fullname = resolvers.findfile(filename,"icc") or "" + if fullname == "" then + local locate = resolvers.finders.byscheme -- not in mtxrun + if locate then + fullname = locate("loc",filename) + end + end + if fullname == "" then + report_colors("profile %a cannot be found",filename) + return nil, false + end + local f = io.open(fullname,"rb") + if not f then + report_colors("profile %a cannot be loaded",fullname) + return nil, false + end + local header = { + size = readnumber(f,4), + cmmtype = readnumber(f,4), + version = readnumber(f,4), + deviceclass = strip(readstring(f,4)), + colorspace = strip(readstring(f,4)), + connectionspace = strip(readstring(f,4)), + datetime = { + year = readnumber(f,2), + month = readnumber(f,2), + day = readnumber(f,2), + hour = readnumber(f,2), + minutes = readnumber(f,2), + seconds = readnumber(f,2), + }, + filesignature = strip(readstring(f,4)), + platformsignature = strip(readstring(f,4)), + options = readnumber(f,4), + devicemanufacturer = strip(readstring(f,4)), + devicemodel = strip(readstring(f,4)), + deviceattributes = readnumber(f,4), + renderingintent = readnumber(f,4), + illuminantxyz = { + x = readnumber(f,4), + y = readnumber(f,4), + z = readnumber(f,4), + }, + profilecreator = readnumber(f,4), + id = strip(readstring(f,16)), + } + local tags = { } + for i=1,readnumber(f,128,4) do + tags[readstring(f,4)] = { + offset = readnumber(f,4), + length = readnumber(f,4), + } + end + local o = header.options + header.options = + o == 0 and "embedded" or + o == 1 and "dependent" or "unknown" + local d = header.deviceattributes + header.deviceattributes = { + [number.hasbit(d,1) and "transparency" or "reflective"] = true, + [number.hasbit(d,2) and "mate" or "glossy" ] = true, + [number.hasbit(d,3) and "negative" or "positive" ] = true, + [number.hasbit(d,4) and "bw" or "color" ] = true, + } + local r = header.renderingintent + header.renderingintent = + r == 0 and "perceptual" or + r == 1 and "relative" or + r == 2 and "saturation" or + r == 3 and "absolute" or "unknown" + for tag, spec in next, tags do + if tag then + local offset, length = spec.offset, spec.length + local variant = readstring(f,offset,4) + if variant == "text" or variant == "desc" then + local str = readstring(f,length-4) + tags[tag] = { + data = str, + cleaned = lpegmatch(cleaned,str), + } + else + if verbose then + report_colors("ignoring tag %a or type %a in profile %a",tag,variant,fullname) + end + tags[tag] = nil + end + end + end + f:close() + local profile = { + filename = filename, + fullname = fullname, + header = header, + tags = tags, + } + report_colors("profile %a loaded",fullname) + return profile, true +end diff --git a/tex/context/base/colo-run.lua b/tex/context/base/colo-run.lua index 27f7c6b12..c7ff0b159 100644 --- a/tex/context/base/colo-run.lua +++ b/tex/context/base/colo-run.lua @@ -1,68 +1,68 @@ -if not modules then modules = { } end modules ['colo-run'] = { - version = 1.000, - comment = "companion to colo-run.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- For historic reasons the core has a couple of tracing features. Nowadays --- these would end up in modules. - -local colors, commands, context, utilities = colors, commands, context, utilities - -local colors= attributes.colors - -function commands.showcolorset(name) - local set = colors.setlist(name) - context.starttabulate { "|l|l|l|l|l|l|l|" } - for i=1,#set do - local s = set[i] - local r = { width = "4em", height = "max", depth = "max", color = s } - context.NC() - context.setcolormodel { "gray" } - context.blackrule(r) - context.NC() - context.blackrule(r) - context.NC() - context.grayvalue(s) - context.NC() - context.colorvalue(s) - context.NC() - context(s) - context.NC() - context.NR() - end - context.stoptabulate() -end - -function commands.showcolorcomponents(list) - local set = utilities.parsers.settings_to_array(list) - context.starttabulate { "|lT|lT|lT|lT|" } - context.NC() - context("color") - context.NC() - context("name") - context.NC() - context("transparency") - context.NC() - context("specification ") - context.NC() - context.NR() - context.TB() - for i=1,#set do - local s = set[i] - context.NC() - context.showcolorbar { s } - context.NC() - context(s) - context.NC() - context.transparencycomponents(s) - context.NC() - context.colorcomponents(s) - context.NC() - context.NR() - end - context.stoptabulate() -end - +if not modules then modules = { } end modules ['colo-run'] = { + version = 1.000, + comment = "companion to colo-run.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- For historic reasons the core has a couple of tracing features. Nowadays +-- these would end up in modules. + +local colors, commands, context, utilities = colors, commands, context, utilities + +local colors= attributes.colors + +function commands.showcolorset(name) + local set = colors.setlist(name) + context.starttabulate { "|l|l|l|l|l|l|l|" } + for i=1,#set do + local s = set[i] + local r = { width = "4em", height = "max", depth = "max", color = s } + context.NC() + context.setcolormodel { "gray" } + context.blackrule(r) + context.NC() + context.blackrule(r) + context.NC() + context.grayvalue(s) + context.NC() + context.colorvalue(s) + context.NC() + context(s) + context.NC() + context.NR() + end + context.stoptabulate() +end + +function commands.showcolorcomponents(list) + local set = utilities.parsers.settings_to_array(list) + context.starttabulate { "|lT|lT|lT|lT|" } + context.NC() + context("color") + context.NC() + context("name") + context.NC() + context("transparency") + context.NC() + context("specification ") + context.NC() + context.NR() + context.TB() + for i=1,#set do + local s = set[i] + context.NC() + context.showcolorbar { s } + context.NC() + context(s) + context.NC() + context.transparencycomponents(s) + context.NC() + context.colorcomponents(s) + context.NC() + context.NR() + end + context.stoptabulate() +end + diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv index 8dcacd5a9..098763903 100644 --- a/tex/context/base/cont-new.mkiv +++ b/tex/context/base/cont-new.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2013.05.18 12:41} +\newcontextversion{2013.05.19 19:27} %D This file is loaded at runtime, thereby providing an excellent place for %D hacks, patches, extensions and new features. diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf index 2e30d9d38..fbe0f4a22 100644 Binary files a/tex/context/base/context-version.pdf and b/tex/context/base/context-version.pdf differ diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv index c10c70e6f..e9e0006bd 100644 --- a/tex/context/base/context.mkiv +++ b/tex/context/base/context.mkiv @@ -25,7 +25,7 @@ %D up and the dependencies are more consistent. \edef\contextformat {\jobname} -\edef\contextversion{2013.05.18 12:41} +\edef\contextversion{2013.05.19 19:27} \edef\contextkind {beta} %D For those who want to use this: diff --git a/tex/context/base/core-ctx.lua b/tex/context/base/core-ctx.lua index 18978a530..d5cdc3143 100644 --- a/tex/context/base/core-ctx.lua +++ b/tex/context/base/core-ctx.lua @@ -1,347 +1,347 @@ -if not modules then modules = { } end modules ['core-ctx'] = { - version = 1.001, - comment = "companion to core-ctx.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ -Job control files aka ctx files are rather old and date from the mkii times. -They were handled in texexec and mtx-context and deals with modes, modules, -environments and preprocessing in projects where one such file drives the -processing of lots of files without the need to provide command line -arguments. - -In mkiv this concept was of course supported as well. The first implementation -of mtx-context took much of the approach of texexec, but by now we have gotten -rid of the option file (for passing modes, modules and environments), the stubs -(for directly processing cld and xml) as well as the preprocessing component -of the ctx files. Special helper features, like typesetting listings, were -already moved to the extras (a direct side effect of the ability to pass along -command line arguments.) All this made mtx-context more simple than its ancestor -texexec. - -Because some of the modes might affect the mtx-context end, the ctx file is -still loaded there but only for getting the modes. The file is loaded again -during the run but as loading and basic processing takes less than a -millisecond it's not that much of a burden. ---]] - --- the ctxrunner tabel might either become private or move to the job namespace --- which also affects the loading order - -local trace_prepfiles = false trackers.register("system.prepfiles", function(v) trace_prepfiles = v end) - -local gsub, find, match, validstring = string.gsub, string.find, string.match, string.valid -local concat = table.concat -local xmltext = xml.text - -local report_prepfiles = logs.reporter("system","prepfiles") - -commands = commands or { } -local commands = commands - -ctxrunner = ctxrunner or { } - -ctxrunner.prepfiles = utilities.storage.allocate() - -local function dontpreparefile(t,k) - return k -- we only store when we have a prepper -end - -table.setmetatableindex(ctxrunner.prepfiles,dontpreparefile) - -local function filtered(str,method) -- in resolvers? - str = tostring(str) - if method == 'name' then str = file.nameonly(str) - elseif method == 'path' then str = file.dirname(str) - elseif method == 'suffix' then str = file.suffix(str) - elseif method == 'nosuffix' then str = file.removesuffix(str) - elseif method == 'nopath' then str = file.basename(str) - elseif method == 'base' then str = file.basename(str) --- elseif method == 'full' then --- elseif method == 'complete' then --- elseif method == 'expand' then -- str = file.expandpath(str) - end - return (gsub(str,"\\","/")) -end - --- local function substitute(e,str) --- local attributes = e.at --- if str and attributes then --- if attributes['method'] then --- str = filtered(str,attributes['method']) --- end --- if str == "" and attributes['default'] then --- str = attributes['default'] --- end --- end --- return str --- end - -local function substitute(str) - return str -end - -local function justtext(str) - str = xml.unescaped(tostring(str)) - str = xml.cleansed(str) - str = gsub(str,"\\+",'/') - str = gsub(str,"%s+",' ') - return str -end - -function ctxrunner.load(ctxname) - - local xmldata = xml.load(ctxname) - - local jobname = tex.jobname -- todo - - local variables = { job = jobname } - local commands = { } - local flags = { } - local paths = { } -- todo - local treatments = { } - local suffix = "prep" - - xml.include(xmldata,'ctx:include','name', {'.', file.dirname(ctxname), "..", "../.." }) - - for e in xml.collected(xmldata,"/ctx:job/ctx:flags/ctx:flag") do - local flag = xmltext(e) - local key, value = match(flag,"^(.-)=(.+)$") - if key and value then - environment.setargument(key,value) - else - environment.setargument(flag,true) - end - end - - -- add to document.options.ctxfile[...] - - local ctxfile = document.options.ctxfile - - local modes = ctxfile.modes - local modules = ctxfile.modules - local environments = ctxfile.environments - - for e in xml.collected(xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:mode") do - modes[#modes+1] = xmltext(e) - end - - for e in xml.collected(xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:module") do - modules[#modules+1] = xmltext(e) - end - - for e in xml.collected(xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:environment") do - environments[#environments+1] = xmltext(e) - end - - for e in xml.collected(xmldata,"ctx:message") do - report_prepfiles("ctx comment: %s", xmltext(e)) - end - - for r, d, k in xml.elements(xmldata,"ctx:value[@name='job']") do - d[k] = variables['job'] or "" - end - - for e in xml.collected(xmldata,"/ctx:job/ctx:preprocess/ctx:processors/ctx:processor") do - local name = e.at and e.at['name'] or "unknown" - local suffix = e.at and e.at['suffix'] or "prep" - for r, d, k in xml.elements(command,"ctx:old") do - d[k] = "%old%" - end - for r, d, k in xml.elements(e,"ctx:new") do - d[k] = "%new%" - end - for r, d, k in xml.elements(e,"ctx:value") do - local tag = d[k].at['name'] - if tag then - d[k] = "%" .. tag .. "%" - end - end - local runner = xml.textonly(e) - if runner and runner ~= "" then - commands[name] = { - suffix = suffix, - runner = runner, - } - end - end - - local suffix = xml.filter(xmldata,"xml:///ctx:job/ctx:preprocess/attribute('suffix')") or suffix - local runlocal = xml.filter(xmldata,"xml:///ctx:job/ctx:preprocess/ctx:processors/attribute('local')") - - runlocal = toboolean(runlocal) - - -- todo: only collect, then plug into file handler - - local inputfile = validstring(environment.arguments.input) or jobname - - variables.old = inputfile - - for files in xml.collected(xmldata,"/ctx:job/ctx:preprocess/ctx:files") do - for pattern in xml.collected(files,"ctx:file") do - local preprocessor = pattern.at['processor'] or "" - for r, d, k in xml.elements(pattern,"/ctx:old") do - d[k] = jobname - end - for r, d, k in xml.elements(pattern,"/ctx:value[@name='old'") do - d[k] = jobname - end - pattern =justtext(xml.tostring(pattern)) - if preprocessor and preprocessor ~= "" and pattern and pattern ~= "" then - local noftreatments = #treatments + 1 - local findpattern = string.topattern(pattern) - local preprocessors = utilities.parsers.settings_to_array(preprocessor) - treatments[noftreatments] = { - pattern = findpattern, - preprocessors = preprocessors, - } - report_prepfiles("step %s, pattern %a, preprocessor: %a",noftreatments,findpattern,preprocessors) - end - end - end - - local function needstreatment(oldfile) - for i=1,#treatments do - local treatment = treatments[i] - local pattern = treatment.pattern - if find(oldfile,pattern) then - return treatment - end - end - end - - local preparefile = #treatments > 0 and function(prepfiles,filename) - - local treatment = needstreatment(filename) - local oldfile = filename - local newfile = false - if treatment then - local preprocessors = treatment.preprocessors - local runners = { } - for i=1,#preprocessors do - local preprocessor = preprocessors[i] - local command = commands[preprocessor] - if command then - local runner = command.runner - local suffix = command.suffix - local result = filename .. "." .. suffix - if runlocal then - result = file.basename(result) - end - variables.old = oldfile - variables.new = result - runner = utilities.templates.replace(runner,variables) - if runner and runner ~= "" then - runners[#runners+1] = runner - oldfile = result - if runlocal then - oldfile = file.basename(oldfile) - end - newfile = oldfile - end - end - end - if not newfile then - newfile = filename - elseif file.needsupdating(filename,newfile) then - for i=1,#runners do - report_prepfiles("step %i: %s",i,runners[i]) - end - -- - for i=1,#runners do - local command = runners[i] - report_prepfiles("command: %s",command) - local result = os.spawn(command) or 0 - -- if result > 0 then - -- report_prepfiles("error, return code: %s",result) - -- end - end - if lfs.isfile(newfile) then - file.syncmtimes(filename,newfile) - report_prepfiles("%a is converted to %a",filename,newfile) - else - report_prepfiles("%a is not converted to %a",filename,newfile) - newfile = filename - end - elseif lfs.isfile(newfile) then - report_prepfiles("%a is already converted to %a",filename,newfile) - end - else - newfile = filename - end - prepfiles[filename] = newfile - -- in case we ask twice (with the prepped name) ... todo: avoid this mess - prepfiles[newfile] = newfile - return newfile - end - - table.setmetatableindex(ctxrunner.prepfiles,preparefile or dontpreparefile) - - -- we need to deal with the input filename as it has already be resolved - -end - --- print("\n") --- document = { --- options = { --- ctxfile = { --- modes = { }, --- modules = { }, --- environments = { }, --- } --- } --- } --- environment.arguments.input = "test.tex" --- ctxrunner.load("x-ldx.ctx") - -local function resolve(name) -- used a few times later on - return ctxrunner.prepfiles[file.collapsepath(name)] or false -end - -local processfile = commands.processfile -local doifinputfileelse = commands.doifinputfileelse - -function commands.processfile(name,maxreadlevel) -- overloaded - local prepname = resolve(name) - if prepname then - return processfile(prepname,0) - end - return processfile(name,maxreadlevel) -end - -function commands.doifinputfileelse(name,depth) - local prepname = resolve(name) - if prepname then - return doifinputfileelse(prepname,0) - end - return doifinputfileelse(name,depth) -end - -function commands.preparedfile(name) - return resolve(name) or name -end - -function commands.getctxfile() - local ctxfile = document.arguments.ctx or "" - if ctxfile ~= "" then - ctxrunner.load(ctxfile) -- do we need to locate it? - end -end - -function ctxrunner.resolve(name) -- used a few times later on - local collapsedname = file.collapsepath(name,".") - return ctxrunner.prepfiles[collapsedname] or collapsedname -end - --- ctxrunner.load("t:/sources/core-ctx.ctx") - --- context(ctxrunner.prepfiles["one-a.xml"]) context.par() --- context(ctxrunner.prepfiles["one-b.xml"]) context.par() --- context(ctxrunner.prepfiles["two-c.xml"]) context.par() --- context(ctxrunner.prepfiles["two-d.xml"]) context.par() --- context(ctxrunner.prepfiles["all-x.xml"]) context.par() - --- inspect(ctxrunner.prepfiles) +if not modules then modules = { } end modules ['core-ctx'] = { + version = 1.001, + comment = "companion to core-ctx.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ +Job control files aka ctx files are rather old and date from the mkii times. +They were handled in texexec and mtx-context and deals with modes, modules, +environments and preprocessing in projects where one such file drives the +processing of lots of files without the need to provide command line +arguments. + +In mkiv this concept was of course supported as well. The first implementation +of mtx-context took much of the approach of texexec, but by now we have gotten +rid of the option file (for passing modes, modules and environments), the stubs +(for directly processing cld and xml) as well as the preprocessing component +of the ctx files. Special helper features, like typesetting listings, were +already moved to the extras (a direct side effect of the ability to pass along +command line arguments.) All this made mtx-context more simple than its ancestor +texexec. + +Because some of the modes might affect the mtx-context end, the ctx file is +still loaded there but only for getting the modes. The file is loaded again +during the run but as loading and basic processing takes less than a +millisecond it's not that much of a burden. +--]] + +-- the ctxrunner tabel might either become private or move to the job namespace +-- which also affects the loading order + +local trace_prepfiles = false trackers.register("system.prepfiles", function(v) trace_prepfiles = v end) + +local gsub, find, match, validstring = string.gsub, string.find, string.match, string.valid +local concat = table.concat +local xmltext = xml.text + +local report_prepfiles = logs.reporter("system","prepfiles") + +commands = commands or { } +local commands = commands + +ctxrunner = ctxrunner or { } + +ctxrunner.prepfiles = utilities.storage.allocate() + +local function dontpreparefile(t,k) + return k -- we only store when we have a prepper +end + +table.setmetatableindex(ctxrunner.prepfiles,dontpreparefile) + +local function filtered(str,method) -- in resolvers? + str = tostring(str) + if method == 'name' then str = file.nameonly(str) + elseif method == 'path' then str = file.dirname(str) + elseif method == 'suffix' then str = file.suffix(str) + elseif method == 'nosuffix' then str = file.removesuffix(str) + elseif method == 'nopath' then str = file.basename(str) + elseif method == 'base' then str = file.basename(str) +-- elseif method == 'full' then +-- elseif method == 'complete' then +-- elseif method == 'expand' then -- str = file.expandpath(str) + end + return (gsub(str,"\\","/")) +end + +-- local function substitute(e,str) +-- local attributes = e.at +-- if str and attributes then +-- if attributes['method'] then +-- str = filtered(str,attributes['method']) +-- end +-- if str == "" and attributes['default'] then +-- str = attributes['default'] +-- end +-- end +-- return str +-- end + +local function substitute(str) + return str +end + +local function justtext(str) + str = xml.unescaped(tostring(str)) + str = xml.cleansed(str) + str = gsub(str,"\\+",'/') + str = gsub(str,"%s+",' ') + return str +end + +function ctxrunner.load(ctxname) + + local xmldata = xml.load(ctxname) + + local jobname = tex.jobname -- todo + + local variables = { job = jobname } + local commands = { } + local flags = { } + local paths = { } -- todo + local treatments = { } + local suffix = "prep" + + xml.include(xmldata,'ctx:include','name', {'.', file.dirname(ctxname), "..", "../.." }) + + for e in xml.collected(xmldata,"/ctx:job/ctx:flags/ctx:flag") do + local flag = xmltext(e) + local key, value = match(flag,"^(.-)=(.+)$") + if key and value then + environment.setargument(key,value) + else + environment.setargument(flag,true) + end + end + + -- add to document.options.ctxfile[...] + + local ctxfile = document.options.ctxfile + + local modes = ctxfile.modes + local modules = ctxfile.modules + local environments = ctxfile.environments + + for e in xml.collected(xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:mode") do + modes[#modes+1] = xmltext(e) + end + + for e in xml.collected(xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:module") do + modules[#modules+1] = xmltext(e) + end + + for e in xml.collected(xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:environment") do + environments[#environments+1] = xmltext(e) + end + + for e in xml.collected(xmldata,"ctx:message") do + report_prepfiles("ctx comment: %s", xmltext(e)) + end + + for r, d, k in xml.elements(xmldata,"ctx:value[@name='job']") do + d[k] = variables['job'] or "" + end + + for e in xml.collected(xmldata,"/ctx:job/ctx:preprocess/ctx:processors/ctx:processor") do + local name = e.at and e.at['name'] or "unknown" + local suffix = e.at and e.at['suffix'] or "prep" + for r, d, k in xml.elements(command,"ctx:old") do + d[k] = "%old%" + end + for r, d, k in xml.elements(e,"ctx:new") do + d[k] = "%new%" + end + for r, d, k in xml.elements(e,"ctx:value") do + local tag = d[k].at['name'] + if tag then + d[k] = "%" .. tag .. "%" + end + end + local runner = xml.textonly(e) + if runner and runner ~= "" then + commands[name] = { + suffix = suffix, + runner = runner, + } + end + end + + local suffix = xml.filter(xmldata,"xml:///ctx:job/ctx:preprocess/attribute('suffix')") or suffix + local runlocal = xml.filter(xmldata,"xml:///ctx:job/ctx:preprocess/ctx:processors/attribute('local')") + + runlocal = toboolean(runlocal) + + -- todo: only collect, then plug into file handler + + local inputfile = validstring(environment.arguments.input) or jobname + + variables.old = inputfile + + for files in xml.collected(xmldata,"/ctx:job/ctx:preprocess/ctx:files") do + for pattern in xml.collected(files,"ctx:file") do + local preprocessor = pattern.at['processor'] or "" + for r, d, k in xml.elements(pattern,"/ctx:old") do + d[k] = jobname + end + for r, d, k in xml.elements(pattern,"/ctx:value[@name='old'") do + d[k] = jobname + end + pattern =justtext(xml.tostring(pattern)) + if preprocessor and preprocessor ~= "" and pattern and pattern ~= "" then + local noftreatments = #treatments + 1 + local findpattern = string.topattern(pattern) + local preprocessors = utilities.parsers.settings_to_array(preprocessor) + treatments[noftreatments] = { + pattern = findpattern, + preprocessors = preprocessors, + } + report_prepfiles("step %s, pattern %a, preprocessor: %a",noftreatments,findpattern,preprocessors) + end + end + end + + local function needstreatment(oldfile) + for i=1,#treatments do + local treatment = treatments[i] + local pattern = treatment.pattern + if find(oldfile,pattern) then + return treatment + end + end + end + + local preparefile = #treatments > 0 and function(prepfiles,filename) + + local treatment = needstreatment(filename) + local oldfile = filename + local newfile = false + if treatment then + local preprocessors = treatment.preprocessors + local runners = { } + for i=1,#preprocessors do + local preprocessor = preprocessors[i] + local command = commands[preprocessor] + if command then + local runner = command.runner + local suffix = command.suffix + local result = filename .. "." .. suffix + if runlocal then + result = file.basename(result) + end + variables.old = oldfile + variables.new = result + runner = utilities.templates.replace(runner,variables) + if runner and runner ~= "" then + runners[#runners+1] = runner + oldfile = result + if runlocal then + oldfile = file.basename(oldfile) + end + newfile = oldfile + end + end + end + if not newfile then + newfile = filename + elseif file.needsupdating(filename,newfile) then + for i=1,#runners do + report_prepfiles("step %i: %s",i,runners[i]) + end + -- + for i=1,#runners do + local command = runners[i] + report_prepfiles("command: %s",command) + local result = os.spawn(command) or 0 + -- if result > 0 then + -- report_prepfiles("error, return code: %s",result) + -- end + end + if lfs.isfile(newfile) then + file.syncmtimes(filename,newfile) + report_prepfiles("%a is converted to %a",filename,newfile) + else + report_prepfiles("%a is not converted to %a",filename,newfile) + newfile = filename + end + elseif lfs.isfile(newfile) then + report_prepfiles("%a is already converted to %a",filename,newfile) + end + else + newfile = filename + end + prepfiles[filename] = newfile + -- in case we ask twice (with the prepped name) ... todo: avoid this mess + prepfiles[newfile] = newfile + return newfile + end + + table.setmetatableindex(ctxrunner.prepfiles,preparefile or dontpreparefile) + + -- we need to deal with the input filename as it has already be resolved + +end + +-- print("\n") +-- document = { +-- options = { +-- ctxfile = { +-- modes = { }, +-- modules = { }, +-- environments = { }, +-- } +-- } +-- } +-- environment.arguments.input = "test.tex" +-- ctxrunner.load("x-ldx.ctx") + +local function resolve(name) -- used a few times later on + return ctxrunner.prepfiles[file.collapsepath(name)] or false +end + +local processfile = commands.processfile +local doifinputfileelse = commands.doifinputfileelse + +function commands.processfile(name,maxreadlevel) -- overloaded + local prepname = resolve(name) + if prepname then + return processfile(prepname,0) + end + return processfile(name,maxreadlevel) +end + +function commands.doifinputfileelse(name,depth) + local prepname = resolve(name) + if prepname then + return doifinputfileelse(prepname,0) + end + return doifinputfileelse(name,depth) +end + +function commands.preparedfile(name) + return resolve(name) or name +end + +function commands.getctxfile() + local ctxfile = document.arguments.ctx or "" + if ctxfile ~= "" then + ctxrunner.load(ctxfile) -- do we need to locate it? + end +end + +function ctxrunner.resolve(name) -- used a few times later on + local collapsedname = file.collapsepath(name,".") + return ctxrunner.prepfiles[collapsedname] or collapsedname +end + +-- ctxrunner.load("t:/sources/core-ctx.ctx") + +-- context(ctxrunner.prepfiles["one-a.xml"]) context.par() +-- context(ctxrunner.prepfiles["one-b.xml"]) context.par() +-- context(ctxrunner.prepfiles["two-c.xml"]) context.par() +-- context(ctxrunner.prepfiles["two-d.xml"]) context.par() +-- context(ctxrunner.prepfiles["all-x.xml"]) context.par() + +-- inspect(ctxrunner.prepfiles) diff --git a/tex/context/base/core-dat.lua b/tex/context/base/core-dat.lua index 826d3a675..80e0f60f7 100644 --- a/tex/context/base/core-dat.lua +++ b/tex/context/base/core-dat.lua @@ -1,269 +1,269 @@ -if not modules then modules = { } end modules ['core-dat'] = { - version = 1.001, - comment = "companion to core-dat.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

This module provides a (multipass) container for arbitrary data. It -replaces the twopass data mechanism.

---ldx]]-- - -local tonumber, tostring, type = tonumber, tostring, type - -local context, commands = context, commands - -local trace_datasets = false trackers.register("job.datasets" , function(v) trace_datasets = v end) -local trace_pagestates = false trackers.register("job.pagestates", function(v) trace_pagestates = v end) - -local report_dataset = logs.reporter("dataset") -local report_pagestate = logs.reporter("pagestate") - -local allocate = utilities.storage.allocate -local settings_to_hash = utilities.parsers.settings_to_hash -local texcount = tex.count -local formatters = string.formatters -local v_yes = interfaces.variables.yes - -local new_latelua = nodes.pool.latelua - -local collected = allocate() -local tobesaved = allocate() - -local datasets = { - collected = collected, - tobesaved = tobesaved, -} - -job.datasets = datasets - -local function initializer() - collected = datasets.collected - tobesaved = datasets.tobesaved -end - -job.register('job.datasets.collected', tobesaved, initializer, nil) - -local sets = { } - -table.setmetatableindex(tobesaved, function(t,k) - local v = { } - t[k] = v - return v -end) - -table.setmetatableindex(sets, function(t,k) - local v = { - index = 0, - order = 0, - } - t[k] = v - return v -end) - -local function setdata(settings) - local name = settings.name - local tag = settings.tag - local data = settings.data - local list = tobesaved[name] - if settings.convert and type(data) == "string" then - data = settings_to_hash(data) - end - if type(data) ~= "table" then - data = { data = settings.data } - end - if not tag then - tag = #list + 1 - else - tag = tonumber(tag) or tag -- autonumber saves keys - end - list[tag] = data - if settings.delay == v_yes then - local set = sets[name] - local index = set.index + 1 - set.index = index - data.index = index - data.order = index - data.realpage = texcount.realpageno - if trace_datasets then - report_dataset("action %a, name %a, tag %a, index %a","assign delayed",name,tag,index) - end - elseif trace_datasets then - report_dataset("action %a, name %a, tag %a","assign immediate",name,tag) - end - return name, tag, data -end - -datasets.setdata = setdata - -function datasets.extend(name,tag) - local set = sets[name] - local order = set.order + 1 - local realpage = texcount.realpageno - set.order = order - local t = tobesaved[name][tag] - t.realpage = realpage - t.order = order - if trace_datasets then - report_dataset("action %a, name %a, tag %a, page %a, index %a","flush by order",name,tag,t.index or 0,order,realpage) - end -end - -function datasets.getdata(name,tag,key,default) - local t = collected[name] - if t == nil then - if trace_datasets then - report_dataset("error: unknown dataset, name %a",name) - end - elseif type(t) ~= "table" then - return t - else - t = t[tag] or t[tonumber(tag)] - if not t then - if trace_datasets then - report_dataset("error: unknown dataset, name %a, tag %a",name,tag) - end - elseif key then - return t[key] or default - else - return t - end - end - return default -end - -function commands.setdataset(settings) - settings.convert = true - local name, tag = setdata(settings) - if settings.delay ~= v_yes then - -- - elseif type(tag) == "number" then - context(new_latelua(formatters["job.datasets.extend(%q,%i)"](name,tag))) - else - context(new_latelua(formatters["job.datasets.extend(%q,%q)"](name,tag))) - end -end - -function commands.datasetvariable(name,tag,key) - local t = collected[name] - if t == nil then - if trace_datasets then - report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name) -- no tag - end - elseif type(t) ~= "table" then - context(tostring(t)) - else - t = t and (t[tag] or t[tonumber(tag)]) - if not t then - if trace_datasets then - report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name,tag) - end - elseif type(t) == "table" then - local s = t[key] - if type(s) ~= "table" then - context(tostring(s)) - elseif trace_datasets then - report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name,tag) - end - end - end -end - ---[[ldx-- -

We also provide an efficient variant for page states.

---ldx]]-- - -local collected = allocate() -local tobesaved = allocate() - -local pagestates = { - collected = collected, - tobesaved = tobesaved, -} - -job.pagestates = pagestates - -local function initializer() - collected = pagestates.collected - tobesaved = pagestates.tobesaved -end - -job.register('job.pagestates.collected', tobesaved, initializer, nil) - -table.setmetatableindex(tobesaved, function(t,k) - local v = { } - t[k] = v - return v -end) - -local function setstate(settings) - local name = settings.name - local tag = settings.tag - local list = tobesaved[name] - if not tag then - tag = #list + 1 - else - tag = tonumber(tag) or tag -- autonumber saves keys - end - local realpage = texcount.realpageno - local data = realpage - list[tag] = data - if trace_pagestates then - report_pagestate("action %a, name %a, tag %a, preset %a","set",name,tag,realpage) - end - return name, tag, data -end - -pagestates.setstate = setstate - -function pagestates.extend(name,tag) - local realpage = texcount.realpageno - if trace_pagestates then - report_pagestate("action %a, name %a, tag %a, preset %a","synchronize",name,tag,realpage) - end - tobesaved[name][tag] = realpage -end - -function pagestates.realpage(name,tag,default) - local t = collected[name] - if t then - t = t[tag] or t[tonumber(tag)] - if t then - return tonumber(t or default) - elseif trace_pagestates then - report_pagestate("error: unknown dataset, name %a, tag %a",name,tag) - end - elseif trace_pagestates then - report_pagestate("error: unknown dataset, name %a, tag %a",name) -- nil - end - return default -end - -function commands.setpagestate(settings) - local name, tag, data = setstate(settings) - if type(tag) == "number" then - context(new_latelua(formatters["job.pagestates.extend(%q,%i)"](name,tag))) - else - context(new_latelua(formatters["job.pagestates.extend(%q,%q)"](name,tag))) - end -end - -function commands.pagestaterealpage(name,tag) - local t = collected[name] - t = t and (t[tag] or t[tonumber(tag)]) - if t then - context(t) - end -end - -function commands.setpagestaterealpageno(name,tag) - local t = collected[name] - t = t and (t[tag] or t[tonumber(tag)]) - if t then - texcount.realpagestateno = t - else - texcount.realpagestateno = texcount.realpageno - end -end +if not modules then modules = { } end modules ['core-dat'] = { + version = 1.001, + comment = "companion to core-dat.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

This module provides a (multipass) container for arbitrary data. It +replaces the twopass data mechanism.

+--ldx]]-- + +local tonumber, tostring, type = tonumber, tostring, type + +local context, commands = context, commands + +local trace_datasets = false trackers.register("job.datasets" , function(v) trace_datasets = v end) +local trace_pagestates = false trackers.register("job.pagestates", function(v) trace_pagestates = v end) + +local report_dataset = logs.reporter("dataset") +local report_pagestate = logs.reporter("pagestate") + +local allocate = utilities.storage.allocate +local settings_to_hash = utilities.parsers.settings_to_hash +local texcount = tex.count +local formatters = string.formatters +local v_yes = interfaces.variables.yes + +local new_latelua = nodes.pool.latelua + +local collected = allocate() +local tobesaved = allocate() + +local datasets = { + collected = collected, + tobesaved = tobesaved, +} + +job.datasets = datasets + +local function initializer() + collected = datasets.collected + tobesaved = datasets.tobesaved +end + +job.register('job.datasets.collected', tobesaved, initializer, nil) + +local sets = { } + +table.setmetatableindex(tobesaved, function(t,k) + local v = { } + t[k] = v + return v +end) + +table.setmetatableindex(sets, function(t,k) + local v = { + index = 0, + order = 0, + } + t[k] = v + return v +end) + +local function setdata(settings) + local name = settings.name + local tag = settings.tag + local data = settings.data + local list = tobesaved[name] + if settings.convert and type(data) == "string" then + data = settings_to_hash(data) + end + if type(data) ~= "table" then + data = { data = settings.data } + end + if not tag then + tag = #list + 1 + else + tag = tonumber(tag) or tag -- autonumber saves keys + end + list[tag] = data + if settings.delay == v_yes then + local set = sets[name] + local index = set.index + 1 + set.index = index + data.index = index + data.order = index + data.realpage = texcount.realpageno + if trace_datasets then + report_dataset("action %a, name %a, tag %a, index %a","assign delayed",name,tag,index) + end + elseif trace_datasets then + report_dataset("action %a, name %a, tag %a","assign immediate",name,tag) + end + return name, tag, data +end + +datasets.setdata = setdata + +function datasets.extend(name,tag) + local set = sets[name] + local order = set.order + 1 + local realpage = texcount.realpageno + set.order = order + local t = tobesaved[name][tag] + t.realpage = realpage + t.order = order + if trace_datasets then + report_dataset("action %a, name %a, tag %a, page %a, index %a","flush by order",name,tag,t.index or 0,order,realpage) + end +end + +function datasets.getdata(name,tag,key,default) + local t = collected[name] + if t == nil then + if trace_datasets then + report_dataset("error: unknown dataset, name %a",name) + end + elseif type(t) ~= "table" then + return t + else + t = t[tag] or t[tonumber(tag)] + if not t then + if trace_datasets then + report_dataset("error: unknown dataset, name %a, tag %a",name,tag) + end + elseif key then + return t[key] or default + else + return t + end + end + return default +end + +function commands.setdataset(settings) + settings.convert = true + local name, tag = setdata(settings) + if settings.delay ~= v_yes then + -- + elseif type(tag) == "number" then + context(new_latelua(formatters["job.datasets.extend(%q,%i)"](name,tag))) + else + context(new_latelua(formatters["job.datasets.extend(%q,%q)"](name,tag))) + end +end + +function commands.datasetvariable(name,tag,key) + local t = collected[name] + if t == nil then + if trace_datasets then + report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name) -- no tag + end + elseif type(t) ~= "table" then + context(tostring(t)) + else + t = t and (t[tag] or t[tonumber(tag)]) + if not t then + if trace_datasets then + report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name,tag) + end + elseif type(t) == "table" then + local s = t[key] + if type(s) ~= "table" then + context(tostring(s)) + elseif trace_datasets then + report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name,tag) + end + end + end +end + +--[[ldx-- +

We also provide an efficient variant for page states.

+--ldx]]-- + +local collected = allocate() +local tobesaved = allocate() + +local pagestates = { + collected = collected, + tobesaved = tobesaved, +} + +job.pagestates = pagestates + +local function initializer() + collected = pagestates.collected + tobesaved = pagestates.tobesaved +end + +job.register('job.pagestates.collected', tobesaved, initializer, nil) + +table.setmetatableindex(tobesaved, function(t,k) + local v = { } + t[k] = v + return v +end) + +local function setstate(settings) + local name = settings.name + local tag = settings.tag + local list = tobesaved[name] + if not tag then + tag = #list + 1 + else + tag = tonumber(tag) or tag -- autonumber saves keys + end + local realpage = texcount.realpageno + local data = realpage + list[tag] = data + if trace_pagestates then + report_pagestate("action %a, name %a, tag %a, preset %a","set",name,tag,realpage) + end + return name, tag, data +end + +pagestates.setstate = setstate + +function pagestates.extend(name,tag) + local realpage = texcount.realpageno + if trace_pagestates then + report_pagestate("action %a, name %a, tag %a, preset %a","synchronize",name,tag,realpage) + end + tobesaved[name][tag] = realpage +end + +function pagestates.realpage(name,tag,default) + local t = collected[name] + if t then + t = t[tag] or t[tonumber(tag)] + if t then + return tonumber(t or default) + elseif trace_pagestates then + report_pagestate("error: unknown dataset, name %a, tag %a",name,tag) + end + elseif trace_pagestates then + report_pagestate("error: unknown dataset, name %a, tag %a",name) -- nil + end + return default +end + +function commands.setpagestate(settings) + local name, tag, data = setstate(settings) + if type(tag) == "number" then + context(new_latelua(formatters["job.pagestates.extend(%q,%i)"](name,tag))) + else + context(new_latelua(formatters["job.pagestates.extend(%q,%q)"](name,tag))) + end +end + +function commands.pagestaterealpage(name,tag) + local t = collected[name] + t = t and (t[tag] or t[tonumber(tag)]) + if t then + context(t) + end +end + +function commands.setpagestaterealpageno(name,tag) + local t = collected[name] + t = t and (t[tag] or t[tonumber(tag)]) + if t then + texcount.realpagestateno = t + else + texcount.realpagestateno = texcount.realpageno + end +end diff --git a/tex/context/base/core-env.lua b/tex/context/base/core-env.lua index 025192d4b..c85a5e144 100644 --- a/tex/context/base/core-env.lua +++ b/tex/context/base/core-env.lua @@ -1,154 +1,154 @@ -if not modules then modules = { } end modules ['core-env'] = { - version = 1.001, - comment = "companion to core-env.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- maybe this will move to the context name space although the --- plurals are unlikely to clash with future tex primitives --- --- if tex.modes['xxxx'] then .... else .... end - -local P, C, S, Cc, lpegmatch, patterns = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc, lpeg.match, lpeg.patterns - -local csname_id = token.csname_id -local create = token.create -local texcount = tex.count -local texsetcount = tex.setcount - -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex - -local undefined = csname_id("*undefined*crap*") -local iftrue = create("iftrue")[2] -- inefficient hack - -tex.modes = allocate { } -tex.systemmodes = allocate { } -tex.constants = allocate { } -tex.conditionals = allocate { } -tex.ifs = allocate { } - -local modes = { } -local systemmodes = { } - -setmetatableindex(tex.modes, function(t,k) - local m = modes[k] - if m then - return m() - else - local n = "mode>" .. k - if csname_id(n) == undefined then - return false - else - modes[k] = function() return texcount[n] >= 1 end - return texcount[n] >= 1 - end - end -end) - -setmetatableindex(tex.systemmodes, function(t,k) - local m = systemmodes[k] - if m then - return m() - else - local n = "mode>*" .. k - if csname_id(n) == undefined then - return false - else - systemmodes[k] = function() return texcount[n] >= 1 end - return texcount[n] >= 1 - end - end -end) - -setmetatableindex(tex.constants, function(t,k) - return csname_id(k) ~= undefined and texcount[k] or 0 -end) - -setmetatableindex(tex.conditionals, function(t,k) -- 0 == true - return csname_id(k) ~= undefined and texcount[k] == 0 -end) - -setmetatableindex(tex.ifs, function(t,k) - -- k = "if" .. k -- better not - return csname_id(k) ~= undefined and create(k)[2] == iftrue -- inefficient, this create, we need a helper -end) - --- todo : global - --- not possible as we let at the tex end to zerocount and plusone --- --- function tex.settrue(name,glob) --- if glob then --- texsetcount("global",name,0) --- else --- texcount[name] = 0 --- end --- end --- --- function tex.setfalse(name,glob) --- if glob then --- texsetcount("global",name,1) --- else --- texcount[name] = 1 --- end --- end - ----- arg = P("{") * C(patterns.nested) * P("}") + Cc("") - -local sep = S("), ") -local str = C((1-sep)^1) -local tag = P("(") * C((1-S(")" ))^1) * P(")") -local arg = P("(") * C((1-S("){"))^1) * P("{") * C((1-P("}"))^0) * P("}") * P(")") - -local pattern = ( - P("lua") * tag / context.luasetup - + P("xml") * arg / context.setupwithargument -- or xmlw as xmlsetup has swapped arguments - + (P("tex") * tag + str) / context.texsetup - + sep^1 -)^1 - -function commands.autosetups(str) - lpegmatch(pattern,str) -end - --- new (inefficient) - -local lookuptoken = token.lookup - -local dimencode = lookuptoken("scratchdimen" )[1] -local countcode = lookuptoken("scratchcounter")[1] -local tokencode = lookuptoken("scratchtoken" )[1] -local skipcode = lookuptoken("scratchskip" )[1] - -local types = { - [dimencode] = "dimen", - [countcode] = "count", - [tokencode] = "token", - [skipcode ] = "skip", -} - -function tex.isdimen(name) - return lookuptoken(name)[1] == dimencode -end - -function tex.iscount(name) - return lookuptoken(name)[1] == countcode -end - -function tex.istoken(name) - return lookuptoken(name)[1] == tokencode -end - -function tex.isskip(name) - return lookuptoken(name)[1] == skipcode -end - -function tex.type(name) - return types[lookuptoken(name)[1]] or "macro" -end - --- inspect(tex.isdimen("xxxxxxxxxxxxxxx")) --- inspect(tex.isdimen("textwidth")) +if not modules then modules = { } end modules ['core-env'] = { + version = 1.001, + comment = "companion to core-env.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- maybe this will move to the context name space although the +-- plurals are unlikely to clash with future tex primitives +-- +-- if tex.modes['xxxx'] then .... else .... end + +local P, C, S, Cc, lpegmatch, patterns = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc, lpeg.match, lpeg.patterns + +local csname_id = token.csname_id +local create = token.create +local texcount = tex.count +local texsetcount = tex.setcount + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex + +local undefined = csname_id("*undefined*crap*") +local iftrue = create("iftrue")[2] -- inefficient hack + +tex.modes = allocate { } +tex.systemmodes = allocate { } +tex.constants = allocate { } +tex.conditionals = allocate { } +tex.ifs = allocate { } + +local modes = { } +local systemmodes = { } + +setmetatableindex(tex.modes, function(t,k) + local m = modes[k] + if m then + return m() + else + local n = "mode>" .. k + if csname_id(n) == undefined then + return false + else + modes[k] = function() return texcount[n] >= 1 end + return texcount[n] >= 1 + end + end +end) + +setmetatableindex(tex.systemmodes, function(t,k) + local m = systemmodes[k] + if m then + return m() + else + local n = "mode>*" .. k + if csname_id(n) == undefined then + return false + else + systemmodes[k] = function() return texcount[n] >= 1 end + return texcount[n] >= 1 + end + end +end) + +setmetatableindex(tex.constants, function(t,k) + return csname_id(k) ~= undefined and texcount[k] or 0 +end) + +setmetatableindex(tex.conditionals, function(t,k) -- 0 == true + return csname_id(k) ~= undefined and texcount[k] == 0 +end) + +setmetatableindex(tex.ifs, function(t,k) + -- k = "if" .. k -- better not + return csname_id(k) ~= undefined and create(k)[2] == iftrue -- inefficient, this create, we need a helper +end) + +-- todo : global + +-- not possible as we let at the tex end to zerocount and plusone +-- +-- function tex.settrue(name,glob) +-- if glob then +-- texsetcount("global",name,0) +-- else +-- texcount[name] = 0 +-- end +-- end +-- +-- function tex.setfalse(name,glob) +-- if glob then +-- texsetcount("global",name,1) +-- else +-- texcount[name] = 1 +-- end +-- end + +---- arg = P("{") * C(patterns.nested) * P("}") + Cc("") + +local sep = S("), ") +local str = C((1-sep)^1) +local tag = P("(") * C((1-S(")" ))^1) * P(")") +local arg = P("(") * C((1-S("){"))^1) * P("{") * C((1-P("}"))^0) * P("}") * P(")") + +local pattern = ( + P("lua") * tag / context.luasetup + + P("xml") * arg / context.setupwithargument -- or xmlw as xmlsetup has swapped arguments + + (P("tex") * tag + str) / context.texsetup + + sep^1 +)^1 + +function commands.autosetups(str) + lpegmatch(pattern,str) +end + +-- new (inefficient) + +local lookuptoken = token.lookup + +local dimencode = lookuptoken("scratchdimen" )[1] +local countcode = lookuptoken("scratchcounter")[1] +local tokencode = lookuptoken("scratchtoken" )[1] +local skipcode = lookuptoken("scratchskip" )[1] + +local types = { + [dimencode] = "dimen", + [countcode] = "count", + [tokencode] = "token", + [skipcode ] = "skip", +} + +function tex.isdimen(name) + return lookuptoken(name)[1] == dimencode +end + +function tex.iscount(name) + return lookuptoken(name)[1] == countcode +end + +function tex.istoken(name) + return lookuptoken(name)[1] == tokencode +end + +function tex.isskip(name) + return lookuptoken(name)[1] == skipcode +end + +function tex.type(name) + return types[lookuptoken(name)[1]] or "macro" +end + +-- inspect(tex.isdimen("xxxxxxxxxxxxxxx")) +-- inspect(tex.isdimen("textwidth")) diff --git a/tex/context/base/core-sys.lua b/tex/context/base/core-sys.lua index 009ec16ea..2c76dac5f 100644 --- a/tex/context/base/core-sys.lua +++ b/tex/context/base/core-sys.lua @@ -1,101 +1,101 @@ -if not modules then modules = { } end modules ['core-sys'] = { - version = 1.001, - comment = "companion to core-sys.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local lower, format, gsub = string.lower, string.format, string.gsub -local suffixonly, basename, removesuffix = file.suffix, file.basename, file.removesuffix - -local environment = environment - -local report_files = logs.reporter("system","files") - --- function commands.updatefilenames(jobname,fulljobname,inputfilename,outputfilename) --- -- --- environment.jobname = jobname --- -- --- local jobfilename = gsub(fulljobname or jobname or inputfilename or tex.jobname or "","%./","") --- -- --- environment.jobfilename = jobfilename --- environment.jobfilesuffix = lower(suffixonly(environment.jobfilename)) --- -- --- local inputfilename = gsub(inputfilename or "","%./","") --- environment.inputfilename = inputfilename --- environment.inputfilebarename = removesuffix(basename(inputfilename)) --- -- --- local inputfilerealsuffix = suffixonly(inputfilename) --- environment.inputfilerealsuffix = inputfilerealsuffix --- -- --- local inputfilesuffix = inputfilerealsuffix == "" and "tex" or lower(inputfilerealsuffix) --- environment.inputfilesuffix = inputfilesuffix --- -- --- local outputfilename = outputfilename or environment.inputfilebarename or "" --- environment.outputfilename = outputfilename --- -- --- local runpath = resolvers.cleanpath(lfs.currentdir()) --- environment.runpath = runpath --- -- --- statistics.register("running on path", function() --- return environment.runpath --- end) --- -- --- statistics.register("job file properties", function() --- return format("jobname %a, input %a, suffix %a",jobfilename,inputfilename,inputfilesuffix) --- end) --- -- --- end - -function environment.initializefilenames() -- commands.updatefilenames(jobname,fulljobname,input,result) - - local arguments = environment.arguments - - local jobname = arguments.jobname or tex.jobname - local fulljobname = arguments.fulljobname or jobname - local inputfilename = arguments.input or fulljobname - local outputfilename = arguments.result or removesuffix(jobname) - - local inputfilename = suffixonly(inputfilename) == "tex" and removesuffix(inputfilename) or inputfilename or "" - - local filename = fulljobname - local suffix = suffixonly(filename) - - local filename = ctxrunner.resolve(filename) -- in case we're prepped - - local jobfilename = jobname or inputfilename or tex.jobname or "" - local inputfilename = inputfilename or "" - - local jobfilebase = basename(jobfilename) - local inputfilebase = basename(inputfilename) - - -- jobfilename = gsub(jobfilename, "^./","") - -- inputfilename = gsub(inputfilename,"^./","") - - environment.jobfilename = jobfilebase - environment.jobfilesuffix = lower(suffixonly(jobfilebase)) - - environment.inputfilename = inputfilename -- so here we keep e.g. ./ or explicit paths - environment.inputfilebarename = removesuffix(inputfilebase) - environment.inputfilesuffix = lower(suffixonly(inputfilebase)) - - environment.outputfilename = outputfilename or environment.inputfilebarename or "" - - environment.filename = filename - environment.suffix = suffix - - report_files("jobname %a, input %a, result %a",jobfilename,inputfilename,outputfilename) - - function environment.initializefilenames() end -end - -statistics.register("result saved in file", function() - -- suffix will be fetched from backend - local outputfilename = environment.outputfilename or environment.jobname or tex.jobname or "" - if tex.pdfoutput > 0 then - return format("%s.%s, compresslevel %s, objectcompreslevel %s",outputfilename,"pdf",tex.pdfcompresslevel, tex.pdfobjcompresslevel) - else - return format("%s.%s",outputfilename,"dvi") -- hard to imagine - end -end) +if not modules then modules = { } end modules ['core-sys'] = { + version = 1.001, + comment = "companion to core-sys.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local lower, format, gsub = string.lower, string.format, string.gsub +local suffixonly, basename, removesuffix = file.suffix, file.basename, file.removesuffix + +local environment = environment + +local report_files = logs.reporter("system","files") + +-- function commands.updatefilenames(jobname,fulljobname,inputfilename,outputfilename) +-- -- +-- environment.jobname = jobname +-- -- +-- local jobfilename = gsub(fulljobname or jobname or inputfilename or tex.jobname or "","%./","") +-- -- +-- environment.jobfilename = jobfilename +-- environment.jobfilesuffix = lower(suffixonly(environment.jobfilename)) +-- -- +-- local inputfilename = gsub(inputfilename or "","%./","") +-- environment.inputfilename = inputfilename +-- environment.inputfilebarename = removesuffix(basename(inputfilename)) +-- -- +-- local inputfilerealsuffix = suffixonly(inputfilename) +-- environment.inputfilerealsuffix = inputfilerealsuffix +-- -- +-- local inputfilesuffix = inputfilerealsuffix == "" and "tex" or lower(inputfilerealsuffix) +-- environment.inputfilesuffix = inputfilesuffix +-- -- +-- local outputfilename = outputfilename or environment.inputfilebarename or "" +-- environment.outputfilename = outputfilename +-- -- +-- local runpath = resolvers.cleanpath(lfs.currentdir()) +-- environment.runpath = runpath +-- -- +-- statistics.register("running on path", function() +-- return environment.runpath +-- end) +-- -- +-- statistics.register("job file properties", function() +-- return format("jobname %a, input %a, suffix %a",jobfilename,inputfilename,inputfilesuffix) +-- end) +-- -- +-- end + +function environment.initializefilenames() -- commands.updatefilenames(jobname,fulljobname,input,result) + + local arguments = environment.arguments + + local jobname = arguments.jobname or tex.jobname + local fulljobname = arguments.fulljobname or jobname + local inputfilename = arguments.input or fulljobname + local outputfilename = arguments.result or removesuffix(jobname) + + local inputfilename = suffixonly(inputfilename) == "tex" and removesuffix(inputfilename) or inputfilename or "" + + local filename = fulljobname + local suffix = suffixonly(filename) + + local filename = ctxrunner.resolve(filename) -- in case we're prepped + + local jobfilename = jobname or inputfilename or tex.jobname or "" + local inputfilename = inputfilename or "" + + local jobfilebase = basename(jobfilename) + local inputfilebase = basename(inputfilename) + + -- jobfilename = gsub(jobfilename, "^./","") + -- inputfilename = gsub(inputfilename,"^./","") + + environment.jobfilename = jobfilebase + environment.jobfilesuffix = lower(suffixonly(jobfilebase)) + + environment.inputfilename = inputfilename -- so here we keep e.g. ./ or explicit paths + environment.inputfilebarename = removesuffix(inputfilebase) + environment.inputfilesuffix = lower(suffixonly(inputfilebase)) + + environment.outputfilename = outputfilename or environment.inputfilebarename or "" + + environment.filename = filename + environment.suffix = suffix + + report_files("jobname %a, input %a, result %a",jobfilename,inputfilename,outputfilename) + + function environment.initializefilenames() end +end + +statistics.register("result saved in file", function() + -- suffix will be fetched from backend + local outputfilename = environment.outputfilename or environment.jobname or tex.jobname or "" + if tex.pdfoutput > 0 then + return format("%s.%s, compresslevel %s, objectcompreslevel %s",outputfilename,"pdf",tex.pdfcompresslevel, tex.pdfobjcompresslevel) + else + return format("%s.%s",outputfilename,"dvi") -- hard to imagine + end +end) diff --git a/tex/context/base/core-two.lua b/tex/context/base/core-two.lua index d6e006e04..734ad8e31 100644 --- a/tex/context/base/core-two.lua +++ b/tex/context/base/core-two.lua @@ -1,157 +1,157 @@ -if not modules then modules = { } end modules ['core-two'] = { - version = 1.001, - comment = "companion to core-two.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local remove, concat = table.remove, table.concat -local allocate = utilities.storage.allocate - ---[[ldx-- -

We save multi-pass information in the main utility table. This is a -bit of a mess because we support old and new methods.

---ldx]]-- - -local collected = allocate() -local tobesaved = allocate() - -local jobpasses = { - collected = collected, - tobesaved = tobesaved, -} - -job.passes = jobpasses - -local function initializer() - collected = jobpasses.collected - tobesaved = jobpasses.tobesaved -end - -job.register('job.passes.collected', tobesaved, initializer, nil) - -local function allocate(id) - local p = tobesaved[id] - if not p then - p = { } - tobesaved[id] = p - end - return p -end - -jobpasses.define = allocate - -function jobpasses.save(id,str) - local jti = allocate(id) - jti[#jti+1] = str -end - -function jobpasses.savetagged(id,tag,str) - local jti = allocate(id) - jti[tag] = str -end - -function jobpasses.getdata(id,index,default) - local jti = collected[id] - local value = jit and jti[index] - return value ~= "" and value or default or "" -end - -function jobpasses.getfield(id,index,tag,default) - local jti = collected[id] - jti = jti and jti[index] - local value = jti and jti[tag] - return value ~= "" and value or default or "" -end - -function jobpasses.getcollected(id) - return collected[id] or { } -end - -function jobpasses.gettobesaved(id) - return allocate(id) -end - -local function get(id) - local jti = collected[id] - if jti and #jti > 0 then - return remove(jti,1) - end -end - -local function first(id) - local jti = collected[id] - if jti and #jti > 0 then - return jti[1] - end -end - -local function last(id) - local jti = collected[id] - if jti and #jti > 0 then - return jti[#jti] - end -end - -local function find(id,n) - local jti = collected[id] - if jti and jti[n] then - return jti[n] - end -end - -local function count(id) - local jti = collected[id] - return jti and #jti or 0 -end - -local function list(id) - local jti = collected[id] - if jti then - return concat(jti,',') - end -end - -local function inlist(id,str) - local jti = collected[id] - if jti then - for _, v in next, jti do - if v == str then - return true - end - end - end - return false -end - -local check = first - --- - -jobpasses.get = get -jobpasses.first = first -jobpasses.last = last -jobpasses.find = find -jobpasses.list = list -jobpasses.count = count -jobpasses.check = check -jobpasses.inlist = inlist - --- interface - -function commands.gettwopassdata (id) local r = get (id) if r then context(r) end end -function commands.getfirsttwopassdata(id) local r = first(id) if r then context(r) end end -function commands.getlasttwopassdata (id) local r = last (id) if r then context(r) end end -function commands.findtwopassdata (id,n) local r = find (id,n) if r then context(r) end end -function commands.gettwopassdatalist (id) local r = list (id) if r then context(r) end end -function commands.counttwopassdata (id) local r = count(id) if r then context(r) end end -function commands.checktwopassdata (id) local r = check(id) if r then context(r) end end - -commands.definetwopasslist = jobpasses.define -commands.savetwopassdata = jobpasses.save -commands.savetaggedtwopassdata = jobpasses.savetagged - -function commands.doifelseintwopassdata(id,str) - commands.doifelse(inlist(id,str)) -end +if not modules then modules = { } end modules ['core-two'] = { + version = 1.001, + comment = "companion to core-two.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local remove, concat = table.remove, table.concat +local allocate = utilities.storage.allocate + +--[[ldx-- +

We save multi-pass information in the main utility table. This is a +bit of a mess because we support old and new methods.

+--ldx]]-- + +local collected = allocate() +local tobesaved = allocate() + +local jobpasses = { + collected = collected, + tobesaved = tobesaved, +} + +job.passes = jobpasses + +local function initializer() + collected = jobpasses.collected + tobesaved = jobpasses.tobesaved +end + +job.register('job.passes.collected', tobesaved, initializer, nil) + +local function allocate(id) + local p = tobesaved[id] + if not p then + p = { } + tobesaved[id] = p + end + return p +end + +jobpasses.define = allocate + +function jobpasses.save(id,str) + local jti = allocate(id) + jti[#jti+1] = str +end + +function jobpasses.savetagged(id,tag,str) + local jti = allocate(id) + jti[tag] = str +end + +function jobpasses.getdata(id,index,default) + local jti = collected[id] + local value = jit and jti[index] + return value ~= "" and value or default or "" +end + +function jobpasses.getfield(id,index,tag,default) + local jti = collected[id] + jti = jti and jti[index] + local value = jti and jti[tag] + return value ~= "" and value or default or "" +end + +function jobpasses.getcollected(id) + return collected[id] or { } +end + +function jobpasses.gettobesaved(id) + return allocate(id) +end + +local function get(id) + local jti = collected[id] + if jti and #jti > 0 then + return remove(jti,1) + end +end + +local function first(id) + local jti = collected[id] + if jti and #jti > 0 then + return jti[1] + end +end + +local function last(id) + local jti = collected[id] + if jti and #jti > 0 then + return jti[#jti] + end +end + +local function find(id,n) + local jti = collected[id] + if jti and jti[n] then + return jti[n] + end +end + +local function count(id) + local jti = collected[id] + return jti and #jti or 0 +end + +local function list(id) + local jti = collected[id] + if jti then + return concat(jti,',') + end +end + +local function inlist(id,str) + local jti = collected[id] + if jti then + for _, v in next, jti do + if v == str then + return true + end + end + end + return false +end + +local check = first + +-- + +jobpasses.get = get +jobpasses.first = first +jobpasses.last = last +jobpasses.find = find +jobpasses.list = list +jobpasses.count = count +jobpasses.check = check +jobpasses.inlist = inlist + +-- interface + +function commands.gettwopassdata (id) local r = get (id) if r then context(r) end end +function commands.getfirsttwopassdata(id) local r = first(id) if r then context(r) end end +function commands.getlasttwopassdata (id) local r = last (id) if r then context(r) end end +function commands.findtwopassdata (id,n) local r = find (id,n) if r then context(r) end end +function commands.gettwopassdatalist (id) local r = list (id) if r then context(r) end end +function commands.counttwopassdata (id) local r = count(id) if r then context(r) end end +function commands.checktwopassdata (id) local r = check(id) if r then context(r) end end + +commands.definetwopasslist = jobpasses.define +commands.savetwopassdata = jobpasses.save +commands.savetaggedtwopassdata = jobpasses.savetagged + +function commands.doifelseintwopassdata(id,str) + commands.doifelse(inlist(id,str)) +end diff --git a/tex/context/base/core-uti.lua b/tex/context/base/core-uti.lua index 96ccdca48..f5003a132 100644 --- a/tex/context/base/core-uti.lua +++ b/tex/context/base/core-uti.lua @@ -1,294 +1,294 @@ -if not modules then modules = { } end modules ['core-uti'] = { - version = 1.001, - comment = "companion to core-uti.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: keep track of changes here (hm, track access, and only true when --- accessed and changed) - ---[[ldx-- -

A utility file has always been part of and with -the move to we also moved a lot of multi-pass info -to a table. Instead of loading a based -utility file under different setups, we now load a table once. This -saves much runtime but at the cost of more memory usage.

---ldx]]-- - -local format, match = string.format, string.match -local next, type, tostring = next, type, tostring -local concat = table.concat -local texcount = tex.count - -local definetable = utilities.tables.definetable -local accesstable = utilities.tables.accesstable -local migratetable = utilities.tables.migratetable -local serialize = table.serialize -local packers = utilities.packers -local allocate = utilities.storage.allocate -local mark = utilities.storage.mark - -local report_passes = logs.reporter("job","passes") - -job = job or { } -local job = job - -job.version = 1.22 -- make sure we don't have old lua 5.1 hash leftovers -job.packversion = 1.02 -- make sure we don't have old lua 5.1 hash leftovers - --- some day we will implement loading of other jobs and then we need --- job.jobs - ---[[ldx-- -

Variables are saved using in the previously defined table and passed -onto using the following method. Of course one can also -directly access the variable using a call.

---ldx]]-- - -local savelist, comment = { }, { } - -function job.comment(key,value) - comment[key] = value -end - -job.comment("version",job.version) - -local enabled = true - -directives.register("job.save",function(v) enabled = v end) - -function job.disablesave() -- can be command - enabled = false -end - -function job.initialize(loadname,savename) - job.load(loadname) -- has to come after structure is defined ! - luatex.registerstopactions(function() - if enabled and not status.lasterrorstring or status.lasterrorstring == "" then - job.save(savename) - end - end) -end - -function job.register(collected, tobesaved, initializer, finalizer) - savelist[#savelist+1] = { collected, tobesaved, initializer, finalizer } -end - --- as an example we implement variables - -local tobesaved, collected, checksums = allocate(), allocate(), allocate() - -local jobvariables = { - collected = collected, - tobesaved = tobesaved, - checksums = checksums, -} - -job.variables = jobvariables - -if not checksums.old then checksums.old = md5.HEX("old") end -- used in experiment -if not checksums.new then checksums.new = md5.HEX("new") end -- used in experiment - -job.register('job.variables.checksums', checksums) - -local rmethod, rvalue - -local function initializer() - tobesaved = jobvariables.tobesaved - collected = jobvariables.collected - checksums = jobvariables.checksums - rvalue = collected.randomseed - if not rvalue then - rvalue = math.random() - math.setrandomseedi(rvalue,"initialize") - rmethod = "initialized" - else - math.setrandomseedi(rvalue,"previous run") - rmethod = "resumed" - end - tobesaved.randomseed = rvalue - for cs, value in next, collected do - context.setxvalue(cs,value) - end -end - -job.register('job.variables.collected', tobesaved, initializer) - -function jobvariables.save(cs,value) - tobesaved[cs] = value -end - -local packlist = { - "numbers", - "metadata", - "sectiondata", - "prefixdata", - "numberdata", - "pagedata", - "directives", - "specification", - "processors", -- might become key under directives or metadata --- "references", -- we need to rename of them as only one packs (not structures.lists.references) -} - -local jobpacker = packers.new(packlist,job.packversion) -- jump number when changs in hash - -job.pack = true --- job.pack = false - -directives.register("job.pack",function(v) pack = v end) - -local _save_, _load_, _others_ = { }, { }, { } -- registers timing - -function job.save(filename) -- we could return a table but it can get pretty large - statistics.starttiming(_save_) - local f = io.open(filename,'w') - if f then - f:write("local utilitydata = { }\n\n") - f:write(serialize(comment,"utilitydata.comment",true,true),"\n\n") - for l=1,#savelist do - local list = savelist[l] - local target = format("utilitydata.%s",list[1]) - local data = list[2] - local finalizer = list[4] - if type(finalizer) == "function" then - finalizer() - end - if job.pack then - packers.pack(data,jobpacker,true) - end - local definer, name = definetable(target,true,true) -- no first and no last - f:write(definer,"\n\n",serialize(data,name,true,true),"\n\n") - end - if job.pack then - packers.strip(jobpacker) - f:write(serialize(jobpacker,"utilitydata.job.packed",true,true),"\n\n") - end - f:write("return utilitydata") - f:close() - end - statistics.stoptiming(_save_) -end - -local function load(filename) - if lfs.isfile(filename) then - local okay, data = pcall(dofile,filename) - if okay and type(data) == "table" then - local jobversion = job.version - local datacomment = data.comment - local dataversion = datacomment and datacomment.version or "?" - if dataversion ~= jobversion then - report_passes("version mismatch: %s <> %s",dataversion,jobversion) - else - return data - end - else - os.remove(filename) -- probably a bad file - report_passes("removing stale job data file %a, restart job",filename) - os.exit(true) -- trigger second run - end - end -end - -function job.load(filename) - statistics.starttiming(_load_) - local utilitydata = load(filename) - if utilitydata then - local jobpacker = utilitydata.job.packed - for l=1,#savelist do - local list = savelist[l] - local target = list[1] - local initializer = list[3] - local result = accesstable(target,utilitydata) - local done = packers.unpack(result,jobpacker,true) - if done then - migratetable(target,mark(result)) - if type(initializer) == "function" then - initializer(result) - end - else - report_passes("pack version mismatch") - end - end - end - statistics.stoptiming(_load_) -end - -function job.loadother(filename) - statistics.starttiming(_load_) - _others_[#_others_+1] = file.nameonly(filename) - local utilitydata = load(filename) - if utilitydata then - local jobpacker = utilitydata.job.packed - local unpacked = { } - for l=1,#savelist do - local list = savelist[l] - local target = list[1] - local result = accesstable(target,utilitydata) - local done = packers.unpack(result,jobpacker,true) - if done then - migratetable(target,result,unpacked) - end - end - unpacked.job.packed = nil -- nicer in inspecting - return unpacked - end - statistics.stoptiming(_load_) -end - --- eventually this will end up in strc-ini - -statistics.register("startup time", function() - return statistics.elapsedseconds(statistics,"including runtime option file processing") -end) - -statistics.register("jobdata time",function() - if enabled then - if #_others_ > 0 then - return format("%s seconds saving, %s seconds loading, other files: %s",statistics.elapsedtime(_save_),statistics.elapsedtime(_load_),concat(_others_," ")) - else - return format("%s seconds saving, %s seconds loading",statistics.elapsedtime(_save_),statistics.elapsedtime(_load_)) - end - else - if #_others_ > 0 then - return format("nothing saved, %s seconds loading, other files: %s",statistics.elapsedtime(_load_),concat(_others_," ")) - else - return format("nothing saved, %s seconds loading",statistics.elapsedtime(_load_)) - end - end -end) - -statistics.register("callbacks", function() - local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0 - local pages = texcount['realpageno'] - 1 - if pages > 1 then - return format("direct: %s, indirect: %s, total: %s (%i per page)", total-indirect, indirect, total, total/pages) - else - return format("direct: %s, indirect: %s, total: %s", total-indirect, indirect, total) - end -end) - -statistics.register("randomizer", function() - if rmethod and rvalue then - return format("%s with value %s",rmethod,rvalue) - end -end) - -function statistics.formatruntime(runtime) - if not environment.initex then -- else error when testing as not counters yet - local shipped = texcount['nofshipouts'] - local pages = texcount['realpageno'] - if pages > shipped then - pages = shipped - end - if shipped > 0 or pages > 0 then - local persecond = shipped / runtime - if pages == 0 then pages = shipped end - return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second",runtime,pages,shipped,persecond) - else - return format("%s seconds",runtime) - end - end -end +if not modules then modules = { } end modules ['core-uti'] = { + version = 1.001, + comment = "companion to core-uti.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: keep track of changes here (hm, track access, and only true when +-- accessed and changed) + +--[[ldx-- +

A utility file has always been part of and with +the move to we also moved a lot of multi-pass info +to a table. Instead of loading a based +utility file under different setups, we now load a table once. This +saves much runtime but at the cost of more memory usage.

+--ldx]]-- + +local format, match = string.format, string.match +local next, type, tostring = next, type, tostring +local concat = table.concat +local texcount = tex.count + +local definetable = utilities.tables.definetable +local accesstable = utilities.tables.accesstable +local migratetable = utilities.tables.migratetable +local serialize = table.serialize +local packers = utilities.packers +local allocate = utilities.storage.allocate +local mark = utilities.storage.mark + +local report_passes = logs.reporter("job","passes") + +job = job or { } +local job = job + +job.version = 1.22 -- make sure we don't have old lua 5.1 hash leftovers +job.packversion = 1.02 -- make sure we don't have old lua 5.1 hash leftovers + +-- some day we will implement loading of other jobs and then we need +-- job.jobs + +--[[ldx-- +

Variables are saved using in the previously defined table and passed +onto using the following method. Of course one can also +directly access the variable using a call.

+--ldx]]-- + +local savelist, comment = { }, { } + +function job.comment(key,value) + comment[key] = value +end + +job.comment("version",job.version) + +local enabled = true + +directives.register("job.save",function(v) enabled = v end) + +function job.disablesave() -- can be command + enabled = false +end + +function job.initialize(loadname,savename) + job.load(loadname) -- has to come after structure is defined ! + luatex.registerstopactions(function() + if enabled and not status.lasterrorstring or status.lasterrorstring == "" then + job.save(savename) + end + end) +end + +function job.register(collected, tobesaved, initializer, finalizer) + savelist[#savelist+1] = { collected, tobesaved, initializer, finalizer } +end + +-- as an example we implement variables + +local tobesaved, collected, checksums = allocate(), allocate(), allocate() + +local jobvariables = { + collected = collected, + tobesaved = tobesaved, + checksums = checksums, +} + +job.variables = jobvariables + +if not checksums.old then checksums.old = md5.HEX("old") end -- used in experiment +if not checksums.new then checksums.new = md5.HEX("new") end -- used in experiment + +job.register('job.variables.checksums', checksums) + +local rmethod, rvalue + +local function initializer() + tobesaved = jobvariables.tobesaved + collected = jobvariables.collected + checksums = jobvariables.checksums + rvalue = collected.randomseed + if not rvalue then + rvalue = math.random() + math.setrandomseedi(rvalue,"initialize") + rmethod = "initialized" + else + math.setrandomseedi(rvalue,"previous run") + rmethod = "resumed" + end + tobesaved.randomseed = rvalue + for cs, value in next, collected do + context.setxvalue(cs,value) + end +end + +job.register('job.variables.collected', tobesaved, initializer) + +function jobvariables.save(cs,value) + tobesaved[cs] = value +end + +local packlist = { + "numbers", + "metadata", + "sectiondata", + "prefixdata", + "numberdata", + "pagedata", + "directives", + "specification", + "processors", -- might become key under directives or metadata +-- "references", -- we need to rename of them as only one packs (not structures.lists.references) +} + +local jobpacker = packers.new(packlist,job.packversion) -- jump number when changs in hash + +job.pack = true +-- job.pack = false + +directives.register("job.pack",function(v) pack = v end) + +local _save_, _load_, _others_ = { }, { }, { } -- registers timing + +function job.save(filename) -- we could return a table but it can get pretty large + statistics.starttiming(_save_) + local f = io.open(filename,'w') + if f then + f:write("local utilitydata = { }\n\n") + f:write(serialize(comment,"utilitydata.comment",true,true),"\n\n") + for l=1,#savelist do + local list = savelist[l] + local target = format("utilitydata.%s",list[1]) + local data = list[2] + local finalizer = list[4] + if type(finalizer) == "function" then + finalizer() + end + if job.pack then + packers.pack(data,jobpacker,true) + end + local definer, name = definetable(target,true,true) -- no first and no last + f:write(definer,"\n\n",serialize(data,name,true,true),"\n\n") + end + if job.pack then + packers.strip(jobpacker) + f:write(serialize(jobpacker,"utilitydata.job.packed",true,true),"\n\n") + end + f:write("return utilitydata") + f:close() + end + statistics.stoptiming(_save_) +end + +local function load(filename) + if lfs.isfile(filename) then + local okay, data = pcall(dofile,filename) + if okay and type(data) == "table" then + local jobversion = job.version + local datacomment = data.comment + local dataversion = datacomment and datacomment.version or "?" + if dataversion ~= jobversion then + report_passes("version mismatch: %s <> %s",dataversion,jobversion) + else + return data + end + else + os.remove(filename) -- probably a bad file + report_passes("removing stale job data file %a, restart job",filename) + os.exit(true) -- trigger second run + end + end +end + +function job.load(filename) + statistics.starttiming(_load_) + local utilitydata = load(filename) + if utilitydata then + local jobpacker = utilitydata.job.packed + for l=1,#savelist do + local list = savelist[l] + local target = list[1] + local initializer = list[3] + local result = accesstable(target,utilitydata) + local done = packers.unpack(result,jobpacker,true) + if done then + migratetable(target,mark(result)) + if type(initializer) == "function" then + initializer(result) + end + else + report_passes("pack version mismatch") + end + end + end + statistics.stoptiming(_load_) +end + +function job.loadother(filename) + statistics.starttiming(_load_) + _others_[#_others_+1] = file.nameonly(filename) + local utilitydata = load(filename) + if utilitydata then + local jobpacker = utilitydata.job.packed + local unpacked = { } + for l=1,#savelist do + local list = savelist[l] + local target = list[1] + local result = accesstable(target,utilitydata) + local done = packers.unpack(result,jobpacker,true) + if done then + migratetable(target,result,unpacked) + end + end + unpacked.job.packed = nil -- nicer in inspecting + return unpacked + end + statistics.stoptiming(_load_) +end + +-- eventually this will end up in strc-ini + +statistics.register("startup time", function() + return statistics.elapsedseconds(statistics,"including runtime option file processing") +end) + +statistics.register("jobdata time",function() + if enabled then + if #_others_ > 0 then + return format("%s seconds saving, %s seconds loading, other files: %s",statistics.elapsedtime(_save_),statistics.elapsedtime(_load_),concat(_others_," ")) + else + return format("%s seconds saving, %s seconds loading",statistics.elapsedtime(_save_),statistics.elapsedtime(_load_)) + end + else + if #_others_ > 0 then + return format("nothing saved, %s seconds loading, other files: %s",statistics.elapsedtime(_load_),concat(_others_," ")) + else + return format("nothing saved, %s seconds loading",statistics.elapsedtime(_load_)) + end + end +end) + +statistics.register("callbacks", function() + local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0 + local pages = texcount['realpageno'] - 1 + if pages > 1 then + return format("direct: %s, indirect: %s, total: %s (%i per page)", total-indirect, indirect, total, total/pages) + else + return format("direct: %s, indirect: %s, total: %s", total-indirect, indirect, total) + end +end) + +statistics.register("randomizer", function() + if rmethod and rvalue then + return format("%s with value %s",rmethod,rvalue) + end +end) + +function statistics.formatruntime(runtime) + if not environment.initex then -- else error when testing as not counters yet + local shipped = texcount['nofshipouts'] + local pages = texcount['realpageno'] + if pages > shipped then + pages = shipped + end + if shipped > 0 or pages > 0 then + local persecond = shipped / runtime + if pages == 0 then pages = shipped end + return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second",runtime,pages,shipped,persecond) + else + return format("%s seconds",runtime) + end + end +end diff --git a/tex/context/base/data-aux.lua b/tex/context/base/data-aux.lua index b969e6070..805d289b2 100644 --- a/tex/context/base/data-aux.lua +++ b/tex/context/base/data-aux.lua @@ -1,62 +1,62 @@ -if not modules then modules = { } end modules ['data-aux'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local find = string.find -local type, next = type, next - -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) - -local resolvers = resolvers - -local report_scripts = logs.reporter("resolvers","scripts") - -function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per se a suffix - local scriptpath = "scripts/context/lua" - newname = file.addsuffix(newname,"lua") - local oldscript = resolvers.cleanpath(oldname) - if trace_locating then - report_scripts("to be replaced old script %a", oldscript) - end - local newscripts = resolvers.findfiles(newname) or { } - if #newscripts == 0 then - if trace_locating then - report_scripts("unable to locate new script") - end - else - for i=1,#newscripts do - local newscript = resolvers.cleanpath(newscripts[i]) - if trace_locating then - report_scripts("checking new script %a", newscript) - end - if oldscript == newscript then - if trace_locating then - report_scripts("old and new script are the same") - end - elseif not find(newscript,scriptpath) then - if trace_locating then - report_scripts("new script should come from %a",scriptpath) - end - elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then - if trace_locating then - report_scripts("invalid new script name") - end - else - local newdata = io.loaddata(newscript) - if newdata then - if trace_locating then - report_scripts("old script content replaced by new content") - end - io.savedata(oldscript,newdata) - break - elseif trace_locating then - report_scripts("unable to load new script") - end - end - end - end -end +if not modules then modules = { } end modules ['data-aux'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local find = string.find +local type, next = type, next + +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) + +local resolvers = resolvers + +local report_scripts = logs.reporter("resolvers","scripts") + +function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per se a suffix + local scriptpath = "scripts/context/lua" + newname = file.addsuffix(newname,"lua") + local oldscript = resolvers.cleanpath(oldname) + if trace_locating then + report_scripts("to be replaced old script %a", oldscript) + end + local newscripts = resolvers.findfiles(newname) or { } + if #newscripts == 0 then + if trace_locating then + report_scripts("unable to locate new script") + end + else + for i=1,#newscripts do + local newscript = resolvers.cleanpath(newscripts[i]) + if trace_locating then + report_scripts("checking new script %a", newscript) + end + if oldscript == newscript then + if trace_locating then + report_scripts("old and new script are the same") + end + elseif not find(newscript,scriptpath) then + if trace_locating then + report_scripts("new script should come from %a",scriptpath) + end + elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then + if trace_locating then + report_scripts("invalid new script name") + end + else + local newdata = io.loaddata(newscript) + if newdata then + if trace_locating then + report_scripts("old script content replaced by new content") + end + io.savedata(oldscript,newdata) + break + elseif trace_locating then + report_scripts("unable to load new script") + end + end + end + end +end diff --git a/tex/context/base/data-bin.lua b/tex/context/base/data-bin.lua index 1d1e8b749..341d844fe 100644 --- a/tex/context/base/data-bin.lua +++ b/tex/context/base/data-bin.lua @@ -1,27 +1,27 @@ -if not modules then modules = { } end modules ['data-bin'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local resolvers = resolvers -local methodhandler = resolvers.methodhandler - -function resolvers.findbinfile(filename,filetype) - return methodhandler('finders',filename,filetype) -end - -function resolvers.openbinfile(filename) - return methodhandler('loaders',filename) -- a bit weird: load -end - -function resolvers.loadbinfile(filename,filetype) - local fname = methodhandler('finders',filename,filetype) - if fname and fname ~= "" then - return resolvers.openbinfile(fname) -- a bit weird: open - else - return resolvers.loaders.notfound() - end -end +if not modules then modules = { } end modules ['data-bin'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local resolvers = resolvers +local methodhandler = resolvers.methodhandler + +function resolvers.findbinfile(filename,filetype) + return methodhandler('finders',filename,filetype) +end + +function resolvers.openbinfile(filename) + return methodhandler('loaders',filename) -- a bit weird: load +end + +function resolvers.loadbinfile(filename,filetype) + local fname = methodhandler('finders',filename,filetype) + if fname and fname ~= "" then + return resolvers.openbinfile(fname) -- a bit weird: open + else + return resolvers.loaders.notfound() + end +end diff --git a/tex/context/base/data-con.lua b/tex/context/base/data-con.lua index 240538df2..9b893df9c 100644 --- a/tex/context/base/data-con.lua +++ b/tex/context/base/data-con.lua @@ -1,138 +1,138 @@ -if not modules then modules = { } end modules ['data-con'] = { - version = 1.100, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format, lower, gsub = string.format, string.lower, string.gsub - -local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end) -local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end) - ---[[ldx-- -

Once we found ourselves defining similar cache constructs -several times, containers were introduced. Containers are used -to collect tables in memory and reuse them when possible based -on (unique) hashes (to be provided by the calling function).

- -

Caching to disk is disabled by default. Version numbers are -stored in the saved table which makes it possible to change the -table structures without bothering about the disk cache.

- -

Examples of usage can be found in the font related code.

---ldx]]-- - -containers = containers or { } -local containers = containers -containers.usecache = true - -local report_containers = logs.reporter("resolvers","containers") - -local allocated = { } - -local mt = { - __index = function(t,k) - if k == "writable" then - local writable = caches.getwritablepath(t.category,t.subcategory) or { "." } - t.writable = writable - return writable - elseif k == "readables" then - local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." } - t.readables = readables - return readables - end - end, - __storage__ = true -} - -function containers.define(category, subcategory, version, enabled) - if category and subcategory then - local c = allocated[category] - if not c then - c = { } - allocated[category] = c - end - local s = c[subcategory] - if not s then - s = { - category = category, - subcategory = subcategory, - storage = { }, - enabled = enabled, - version = version or math.pi, -- after all, this is TeX - trace = false, - -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." }, - -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." }, - } - setmetatable(s,mt) - c[subcategory] = s - end - return s - end -end - -function containers.is_usable(container,name) - return container.enabled and caches and caches.is_writable(container.writable, name) -end - -function containers.is_valid(container,name) - if name and name ~= "" then - local storage = container.storage[name] - return storage and storage.cache_version == container.version - else - return false - end -end - -function containers.read(container,name) - local storage = container.storage - local stored = storage[name] - if not stored and container.enabled and caches and containers.usecache then - stored = caches.loaddata(container.readables,name) - if stored and stored.cache_version == container.version then - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","load",container.subcategory,name) - end - else - stored = nil - end - storage[name] = stored - elseif stored then - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","reuse",container.subcategory,name) - end - end - return stored -end - -function containers.write(container, name, data) - if data then - data.cache_version = container.version - if container.enabled and caches then - local unique, shared = data.unique, data.shared - data.unique, data.shared = nil, nil - caches.savedata(container.writable, name, data) - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","save",container.subcategory,name) - end - data.unique, data.shared = unique, shared - end - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","store",container.subcategory,name) - end - container.storage[name] = data - end - return data -end - -function containers.content(container,name) - return container.storage[name] -end - -function containers.cleanname(name) - -- return (gsub(lower(name),"[^%w]+","-")) - return (gsub(lower(name),"[^%w\128-\255]+","-")) -- more utf friendly -end +if not modules then modules = { } end modules ['data-con'] = { + version = 1.100, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, lower, gsub = string.format, string.lower, string.gsub + +local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) +local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end) +local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end) + +--[[ldx-- +

Once we found ourselves defining similar cache constructs +several times, containers were introduced. Containers are used +to collect tables in memory and reuse them when possible based +on (unique) hashes (to be provided by the calling function).

+ +

Caching to disk is disabled by default. Version numbers are +stored in the saved table which makes it possible to change the +table structures without bothering about the disk cache.

+ +

Examples of usage can be found in the font related code.

+--ldx]]-- + +containers = containers or { } +local containers = containers +containers.usecache = true + +local report_containers = logs.reporter("resolvers","containers") + +local allocated = { } + +local mt = { + __index = function(t,k) + if k == "writable" then + local writable = caches.getwritablepath(t.category,t.subcategory) or { "." } + t.writable = writable + return writable + elseif k == "readables" then + local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." } + t.readables = readables + return readables + end + end, + __storage__ = true +} + +function containers.define(category, subcategory, version, enabled) + if category and subcategory then + local c = allocated[category] + if not c then + c = { } + allocated[category] = c + end + local s = c[subcategory] + if not s then + s = { + category = category, + subcategory = subcategory, + storage = { }, + enabled = enabled, + version = version or math.pi, -- after all, this is TeX + trace = false, + -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." }, + -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." }, + } + setmetatable(s,mt) + c[subcategory] = s + end + return s + end +end + +function containers.is_usable(container,name) + return container.enabled and caches and caches.is_writable(container.writable, name) +end + +function containers.is_valid(container,name) + if name and name ~= "" then + local storage = container.storage[name] + return storage and storage.cache_version == container.version + else + return false + end +end + +function containers.read(container,name) + local storage = container.storage + local stored = storage[name] + if not stored and container.enabled and caches and containers.usecache then + stored = caches.loaddata(container.readables,name) + if stored and stored.cache_version == container.version then + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","load",container.subcategory,name) + end + else + stored = nil + end + storage[name] = stored + elseif stored then + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","reuse",container.subcategory,name) + end + end + return stored +end + +function containers.write(container, name, data) + if data then + data.cache_version = container.version + if container.enabled and caches then + local unique, shared = data.unique, data.shared + data.unique, data.shared = nil, nil + caches.savedata(container.writable, name, data) + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","save",container.subcategory,name) + end + data.unique, data.shared = unique, shared + end + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","store",container.subcategory,name) + end + container.storage[name] = data + end + return data +end + +function containers.content(container,name) + return container.storage[name] +end + +function containers.cleanname(name) + -- return (gsub(lower(name),"[^%w]+","-")) + return (gsub(lower(name),"[^%w\128-\255]+","-")) -- more utf friendly +end diff --git a/tex/context/base/data-crl.lua b/tex/context/base/data-crl.lua index 445bd5b0a..303c0fa9f 100644 --- a/tex/context/base/data-crl.lua +++ b/tex/context/base/data-crl.lua @@ -1,61 +1,61 @@ -if not modules then modules = { } end modules ['data-crl'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this one is replaced by data-sch.lua -- - -local gsub = string.gsub - -local resolvers = resolvers - -local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders - -resolvers.curl = resolvers.curl or { } -local curl = resolvers.curl - -local cached = { } - -local function runcurl(specification) - local original = specification.original - -- local scheme = specification.scheme - local cleanname = gsub(original,"[^%a%d%.]+","-") - local cachename = caches.setfirstwritablefile(cleanname,"curl") - if not cached[original] then - if not io.exists(cachename) then - cached[original] = cachename - local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original - os.spawn(command) - end - if io.exists(cachename) then - cached[original] = cachename - else - cached[original] = "" - end - end - return cached[original] -end - --- old code: we could be cleaner using specification (see schemes) - -local function finder(specification,filetype) - return resolvers.methodhandler("finders",runcurl(specification),filetype) -end - -local opener = openers.file -local loader = loaders.file - -local function install(scheme) - finders[scheme] = finder - openers[scheme] = opener - loaders[scheme] = loader -end - -resolvers.curl.install = install - -install('http') -install('https') -install('ftp') +if not modules then modules = { } end modules ['data-crl'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this one is replaced by data-sch.lua -- + +local gsub = string.gsub + +local resolvers = resolvers + +local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders + +resolvers.curl = resolvers.curl or { } +local curl = resolvers.curl + +local cached = { } + +local function runcurl(specification) + local original = specification.original + -- local scheme = specification.scheme + local cleanname = gsub(original,"[^%a%d%.]+","-") + local cachename = caches.setfirstwritablefile(cleanname,"curl") + if not cached[original] then + if not io.exists(cachename) then + cached[original] = cachename + local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original + os.spawn(command) + end + if io.exists(cachename) then + cached[original] = cachename + else + cached[original] = "" + end + end + return cached[original] +end + +-- old code: we could be cleaner using specification (see schemes) + +local function finder(specification,filetype) + return resolvers.methodhandler("finders",runcurl(specification),filetype) +end + +local opener = openers.file +local loader = loaders.file + +local function install(scheme) + finders[scheme] = finder + openers[scheme] = opener + loaders[scheme] = loader +end + +resolvers.curl.install = install + +install('http') +install('https') +install('ftp') diff --git a/tex/context/base/data-ctx.lua b/tex/context/base/data-ctx.lua index 345e9c741..c3fc1e62f 100644 --- a/tex/context/base/data-ctx.lua +++ b/tex/context/base/data-ctx.lua @@ -1,9 +1,9 @@ -if not modules then modules = { } end modules ['data-ctx'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- empty +if not modules then modules = { } end modules ['data-ctx'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- empty diff --git a/tex/context/base/data-env.lua b/tex/context/base/data-env.lua index 2ee25120e..8aba977a3 100644 --- a/tex/context/base/data-env.lua +++ b/tex/context/base/data-env.lua @@ -1,291 +1,291 @@ -if not modules then modules = { } end modules ['data-env'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - -local lower, gsub = string.lower, string.gsub - -local resolvers = resolvers - -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex -local suffixonly = file.suffixonly - -local formats = allocate() -local suffixes = allocate() -local dangerous = allocate() -local suffixmap = allocate() - -resolvers.formats = formats -resolvers.suffixes = suffixes -resolvers.dangerous = dangerous -resolvers.suffixmap = suffixmap - -local luasuffixes = utilities.lua.suffixes - -local relations = allocate { -- todo: handlers also here - core = { - ofm = { -- will become obsolete - names = { "ofm", "omega font metric", "omega font metrics" }, - variable = 'OFMFONTS', - suffixes = { 'ofm', 'tfm' }, - }, - ovf = { -- will become obsolete - names = { "ovf", "omega virtual font", "omega virtual fonts" }, - variable = 'OVFFONTS', - suffixes = { 'ovf', 'vf' }, - }, - tfm = { - names = { "tfm", "tex font metric", "tex font metrics" }, - variable = 'TFMFONTS', - suffixes = { 'tfm' }, - }, - vf = { - names = { "vf", "virtual font", "virtual fonts" }, - variable = 'VFFONTS', - suffixes = { 'vf' }, - }, - otf = { - names = { "otf", "opentype", "opentype font", "opentype fonts"}, - variable = 'OPENTYPEFONTS', - suffixes = { 'otf' }, - }, - ttf = { - names = { "ttf", "truetype", "truetype font", "truetype fonts", "truetype collection", "truetype collections", "truetype dictionary", "truetype dictionaries" }, - variable = 'TTFONTS', - suffixes = { 'ttf', 'ttc', 'dfont' }, - }, - afm = { - names = { "afm", "adobe font metric", "adobe font metrics" }, - variable = "AFMFONTS", - suffixes = { "afm" }, - }, - pfb = { - names = { "pfb", "type1", "type 1", "type1 font", "type 1 font", "type1 fonts", "type 1 fonts" }, - variable = 'T1FONTS', - suffixes = { 'pfb', 'pfa' }, - }, - fea = { - names = { "fea", "font feature", "font features", "font feature file", "font feature files" }, - variable = 'FONTFEATURES', - suffixes = { 'fea' }, - }, - cid = { - names = { "cid", "cid map", "cid maps", "cid file", "cid files" }, - variable = 'FONTCIDMAPS', - suffixes = { 'cid', 'cidmap' }, - }, - fmt = { - names = { "fmt", "format", "tex format" }, - variable = 'TEXFORMATS', - suffixes = { 'fmt' }, - }, - mem = { -- will become obsolete - names = { 'mem', "metapost format" }, - variable = 'MPMEMS', - suffixes = { 'mem' }, - }, - mp = { - names = { "mp" }, - variable = 'MPINPUTS', - suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' }, - }, - tex = { - names = { "tex" }, - variable = 'TEXINPUTS', - suffixes = { 'tex', "mkvi", "mkiv", "mkii" }, - }, - icc = { - names = { "icc", "icc profile", "icc profiles" }, - variable = 'ICCPROFILES', - suffixes = { 'icc' }, - }, - texmfscripts = { - names = { "texmfscript", "texmfscripts", "script", "scripts" }, - variable = 'TEXMFSCRIPTS', - suffixes = { 'rb', 'pl', 'py' }, - }, - lua = { - names = { "lua" }, - variable = 'LUAINPUTS', - suffixes = { luasuffixes.lua, luasuffixes.luc, luasuffixes.tma, luasuffixes.tmc }, - }, - lib = { - names = { "lib" }, - variable = 'CLUAINPUTS', - suffixes = os.libsuffix and { os.libsuffix } or { 'dll', 'so' }, - }, - bib = { - names = { 'bib' }, - suffixes = { 'bib' }, - }, - bst = { - names = { 'bst' }, - suffixes = { 'bst' }, - }, - fontconfig = { - names = { 'fontconfig', 'fontconfig file', 'fontconfig files' }, - variable = 'FONTCONFIG_PATH', - }, - }, - obsolete = { - enc = { - names = { "enc", "enc files", "enc file", "encoding files", "encoding file" }, - variable = 'ENCFONTS', - suffixes = { 'enc' }, - }, - map = { - names = { "map", "map files", "map file" }, - variable = 'TEXFONTMAPS', - suffixes = { 'map' }, - }, - lig = { - names = { "lig files", "lig file", "ligature file", "ligature files" }, - variable = 'LIGFONTS', - suffixes = { 'lig' }, - }, - opl = { - names = { "opl" }, - variable = 'OPLFONTS', - suffixes = { 'opl' }, - }, - ovp = { - names = { "ovp" }, - variable = 'OVPFONTS', - suffixes = { 'ovp' }, - }, - }, - kpse = { -- subset - base = { - names = { 'base', "metafont format" }, - variable = 'MFBASES', - suffixes = { 'base', 'bas' }, - }, - cmap = { - names = { 'cmap', 'cmap files', 'cmap file' }, - variable = 'CMAPFONTS', - suffixes = { 'cmap' }, - }, - cnf = { - names = { 'cnf' }, - suffixes = { 'cnf' }, - }, - web = { - names = { 'web' }, - suffixes = { 'web', 'ch' } - }, - cweb = { - names = { 'cweb' }, - suffixes = { 'w', 'web', 'ch' }, - }, - gf = { - names = { 'gf' }, - suffixes = { 'gf' }, - }, - mf = { - names = { 'mf' }, - variable = 'MFINPUTS', - suffixes = { 'mf' }, - }, - mft = { - names = { 'mft' }, - suffixes = { 'mft' }, - }, - pk = { - names = { 'pk' }, - suffixes = { 'pk' }, - }, - }, -} - -resolvers.relations = relations - --- formats: maps a format onto a variable - -function resolvers.updaterelations() - for category, categories in next, relations do - for name, relation in next, categories do - local rn = relation.names - local rv = relation.variable - local rs = relation.suffixes - if rn and rv then - for i=1,#rn do - local rni = lower(gsub(rn[i]," ","")) - formats[rni] = rv - if rs then - suffixes[rni] = rs - for i=1,#rs do - local rsi = rs[i] - suffixmap[rsi] = rni - end - end - end - end - if rs then - end - end - end -end - -resolvers.updaterelations() -- push this in the metatable -> newindex - -local function simplified(t,k) - return k and rawget(t,lower(gsub(k," ",""))) or nil -end - -setmetatableindex(formats, simplified) -setmetatableindex(suffixes, simplified) -setmetatableindex(suffixmap, simplified) - --- A few accessors, mostly for command line tool. - -function resolvers.suffixofformat(str) - local s = suffixes[str] - return s and s[1] or "" -end - -function resolvers.suffixofformat(str) - return suffixes[str] or { } -end - -for name, format in next, formats do - dangerous[name] = true -- still needed ? -end - --- because vf searching is somewhat dangerous, we want to prevent --- too liberal searching esp because we do a lookup on the current --- path anyway; only tex (or any) is safe - -dangerous.tex = nil - ---~ print(table.serialize(dangerous)) - --- more helpers - -function resolvers.formatofvariable(str) - return formats[str] or '' -end - -function resolvers.formatofsuffix(str) -- of file - return suffixmap[suffixonly(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc) -end - -function resolvers.variableofformat(str) - return formats[str] or '' -end - -function resolvers.variableofformatorsuffix(str) - local v = formats[str] - if v then - return v - end - v = suffixmap[suffixonly(str)] - if v then - return formats[v] - end - return '' -end - +if not modules then modules = { } end modules ['data-env'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +local lower, gsub = string.lower, string.gsub + +local resolvers = resolvers + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex +local suffixonly = file.suffixonly + +local formats = allocate() +local suffixes = allocate() +local dangerous = allocate() +local suffixmap = allocate() + +resolvers.formats = formats +resolvers.suffixes = suffixes +resolvers.dangerous = dangerous +resolvers.suffixmap = suffixmap + +local luasuffixes = utilities.lua.suffixes + +local relations = allocate { -- todo: handlers also here + core = { + ofm = { -- will become obsolete + names = { "ofm", "omega font metric", "omega font metrics" }, + variable = 'OFMFONTS', + suffixes = { 'ofm', 'tfm' }, + }, + ovf = { -- will become obsolete + names = { "ovf", "omega virtual font", "omega virtual fonts" }, + variable = 'OVFFONTS', + suffixes = { 'ovf', 'vf' }, + }, + tfm = { + names = { "tfm", "tex font metric", "tex font metrics" }, + variable = 'TFMFONTS', + suffixes = { 'tfm' }, + }, + vf = { + names = { "vf", "virtual font", "virtual fonts" }, + variable = 'VFFONTS', + suffixes = { 'vf' }, + }, + otf = { + names = { "otf", "opentype", "opentype font", "opentype fonts"}, + variable = 'OPENTYPEFONTS', + suffixes = { 'otf' }, + }, + ttf = { + names = { "ttf", "truetype", "truetype font", "truetype fonts", "truetype collection", "truetype collections", "truetype dictionary", "truetype dictionaries" }, + variable = 'TTFONTS', + suffixes = { 'ttf', 'ttc', 'dfont' }, + }, + afm = { + names = { "afm", "adobe font metric", "adobe font metrics" }, + variable = "AFMFONTS", + suffixes = { "afm" }, + }, + pfb = { + names = { "pfb", "type1", "type 1", "type1 font", "type 1 font", "type1 fonts", "type 1 fonts" }, + variable = 'T1FONTS', + suffixes = { 'pfb', 'pfa' }, + }, + fea = { + names = { "fea", "font feature", "font features", "font feature file", "font feature files" }, + variable = 'FONTFEATURES', + suffixes = { 'fea' }, + }, + cid = { + names = { "cid", "cid map", "cid maps", "cid file", "cid files" }, + variable = 'FONTCIDMAPS', + suffixes = { 'cid', 'cidmap' }, + }, + fmt = { + names = { "fmt", "format", "tex format" }, + variable = 'TEXFORMATS', + suffixes = { 'fmt' }, + }, + mem = { -- will become obsolete + names = { 'mem', "metapost format" }, + variable = 'MPMEMS', + suffixes = { 'mem' }, + }, + mp = { + names = { "mp" }, + variable = 'MPINPUTS', + suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' }, + }, + tex = { + names = { "tex" }, + variable = 'TEXINPUTS', + suffixes = { 'tex', "mkvi", "mkiv", "mkii" }, + }, + icc = { + names = { "icc", "icc profile", "icc profiles" }, + variable = 'ICCPROFILES', + suffixes = { 'icc' }, + }, + texmfscripts = { + names = { "texmfscript", "texmfscripts", "script", "scripts" }, + variable = 'TEXMFSCRIPTS', + suffixes = { 'rb', 'pl', 'py' }, + }, + lua = { + names = { "lua" }, + variable = 'LUAINPUTS', + suffixes = { luasuffixes.lua, luasuffixes.luc, luasuffixes.tma, luasuffixes.tmc }, + }, + lib = { + names = { "lib" }, + variable = 'CLUAINPUTS', + suffixes = os.libsuffix and { os.libsuffix } or { 'dll', 'so' }, + }, + bib = { + names = { 'bib' }, + suffixes = { 'bib' }, + }, + bst = { + names = { 'bst' }, + suffixes = { 'bst' }, + }, + fontconfig = { + names = { 'fontconfig', 'fontconfig file', 'fontconfig files' }, + variable = 'FONTCONFIG_PATH', + }, + }, + obsolete = { + enc = { + names = { "enc", "enc files", "enc file", "encoding files", "encoding file" }, + variable = 'ENCFONTS', + suffixes = { 'enc' }, + }, + map = { + names = { "map", "map files", "map file" }, + variable = 'TEXFONTMAPS', + suffixes = { 'map' }, + }, + lig = { + names = { "lig files", "lig file", "ligature file", "ligature files" }, + variable = 'LIGFONTS', + suffixes = { 'lig' }, + }, + opl = { + names = { "opl" }, + variable = 'OPLFONTS', + suffixes = { 'opl' }, + }, + ovp = { + names = { "ovp" }, + variable = 'OVPFONTS', + suffixes = { 'ovp' }, + }, + }, + kpse = { -- subset + base = { + names = { 'base', "metafont format" }, + variable = 'MFBASES', + suffixes = { 'base', 'bas' }, + }, + cmap = { + names = { 'cmap', 'cmap files', 'cmap file' }, + variable = 'CMAPFONTS', + suffixes = { 'cmap' }, + }, + cnf = { + names = { 'cnf' }, + suffixes = { 'cnf' }, + }, + web = { + names = { 'web' }, + suffixes = { 'web', 'ch' } + }, + cweb = { + names = { 'cweb' }, + suffixes = { 'w', 'web', 'ch' }, + }, + gf = { + names = { 'gf' }, + suffixes = { 'gf' }, + }, + mf = { + names = { 'mf' }, + variable = 'MFINPUTS', + suffixes = { 'mf' }, + }, + mft = { + names = { 'mft' }, + suffixes = { 'mft' }, + }, + pk = { + names = { 'pk' }, + suffixes = { 'pk' }, + }, + }, +} + +resolvers.relations = relations + +-- formats: maps a format onto a variable + +function resolvers.updaterelations() + for category, categories in next, relations do + for name, relation in next, categories do + local rn = relation.names + local rv = relation.variable + local rs = relation.suffixes + if rn and rv then + for i=1,#rn do + local rni = lower(gsub(rn[i]," ","")) + formats[rni] = rv + if rs then + suffixes[rni] = rs + for i=1,#rs do + local rsi = rs[i] + suffixmap[rsi] = rni + end + end + end + end + if rs then + end + end + end +end + +resolvers.updaterelations() -- push this in the metatable -> newindex + +local function simplified(t,k) + return k and rawget(t,lower(gsub(k," ",""))) or nil +end + +setmetatableindex(formats, simplified) +setmetatableindex(suffixes, simplified) +setmetatableindex(suffixmap, simplified) + +-- A few accessors, mostly for command line tool. + +function resolvers.suffixofformat(str) + local s = suffixes[str] + return s and s[1] or "" +end + +function resolvers.suffixofformat(str) + return suffixes[str] or { } +end + +for name, format in next, formats do + dangerous[name] = true -- still needed ? +end + +-- because vf searching is somewhat dangerous, we want to prevent +-- too liberal searching esp because we do a lookup on the current +-- path anyway; only tex (or any) is safe + +dangerous.tex = nil + +--~ print(table.serialize(dangerous)) + +-- more helpers + +function resolvers.formatofvariable(str) + return formats[str] or '' +end + +function resolvers.formatofsuffix(str) -- of file + return suffixmap[suffixonly(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc) +end + +function resolvers.variableofformat(str) + return formats[str] or '' +end + +function resolvers.variableofformatorsuffix(str) + local v = formats[str] + if v then + return v + end + v = suffixmap[suffixonly(str)] + if v then + return formats[v] + end + return '' +end + diff --git a/tex/context/base/data-exp.lua b/tex/context/base/data-exp.lua index 8a2fd0320..1bf620a09 100644 --- a/tex/context/base/data-exp.lua +++ b/tex/context/base/data-exp.lua @@ -1,470 +1,470 @@ -if not modules then modules = { } end modules ['data-exp'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - -local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub -local concat, sort = table.concat, table.sort -local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns -local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S -local type, next = type, next - -local ostype = os.type -local collapsepath = file.collapsepath - -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end) - -local report_expansions = logs.reporter("resolvers","expansions") - -local resolvers = resolvers - --- As this bit of code is somewhat special it gets its own module. After --- all, when working on the main resolver code, I don't want to scroll --- past this every time. See data-obs.lua for the gsub variant. - -local function f_first(a,b) - local t, n = { }, 0 - for s in gmatch(b,"[^,]+") do - n = n + 1 ; t[n] = a .. s - end - return concat(t,",") -end - -local function f_second(a,b) - local t, n = { }, 0 - for s in gmatch(a,"[^,]+") do - n = n + 1 ; t[n] = s .. b - end - return concat(t,",") -end - --- kpsewhich --expand-braces '{a,b}{c,d}' --- ac:bc:ad:bd - --- old {a,b}{c,d} => ac ad bc bd --- --- local function f_both(a,b) --- local t, n = { }, 0 --- for sa in gmatch(a,"[^,]+") do --- for sb in gmatch(b,"[^,]+") do --- n = n + 1 ; t[n] = sa .. sb --- end --- end --- return concat(t,",") --- end --- --- new {a,b}{c,d} => ac bc ad bd - -local function f_both(a,b) - local t, n = { }, 0 - for sb in gmatch(b,"[^,]+") do -- and not sa - for sa in gmatch(a,"[^,]+") do -- sb - n = n + 1 ; t[n] = sa .. sb - end - end - return concat(t,",") -end - -local left = P("{") -local right = P("}") -local var = P((1 - S("{}" ))^0) -local set = P((1 - S("{},"))^0) -local other = P(1) - -local l_first = Cs( ( Cc("{") * (C(set) * left * C(var) * right / f_first) * Cc("}") + other )^0 ) -local l_second = Cs( ( Cc("{") * (left * C(var) * right * C(set) / f_second) * Cc("}") + other )^0 ) -local l_both = Cs( ( Cc("{") * (left * C(var) * right * left * C(var) * right / f_both) * Cc("}") + other )^0 ) -local l_rest = Cs( ( left * var * (left/"") * var * (right/"") * var * right + other )^0 ) - -local stripper_1 = lpeg.stripper ("{}@") -local replacer_1 = lpeg.replacer { { ",}", ",@}" }, { "{,", "{@," }, } - -local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpegging it (nice exercise). - if trace_expansions then - report_expansions("expanding variable %a",str) - end - local t, ok, done = newlist or { }, false, false - local n = #t - str = lpegmatch(replacer_1,str) - repeat - local old = str - repeat - local old = str - str = lpegmatch(l_first, str) - until old == str - repeat - local old = str - str = lpegmatch(l_second,str) - until old == str - repeat - local old = str - str = lpegmatch(l_both, str) - until old == str - repeat - local old = str - str = lpegmatch(l_rest, str) - until old == str - until old == str -- or not find(str,"{") - str = lpegmatch(stripper_1,str) - if validate then - for s in gmatch(str,"[^,]+") do - s = validate(s) - if s then - n = n + 1 - t[n] = s - end - end - else - for s in gmatch(str,"[^,]+") do - n = n + 1 - t[n] = s - end - end - if trace_expansions then - for k=1,#t do - report_expansions("% 4i: %s",k,t[k]) - end - end - return t -end - --- We could make the previous one public. - -local function validate(s) - s = collapsepath(s) -- already keeps the trailing / and // - return s ~= "" and not find(s,"^!*unset/*$") and s -end - -resolvers.validatedpath = validate -- keeps the trailing // - -function resolvers.expandedpathfromlist(pathlist) - local newlist = { } - for k=1,#pathlist do - splitpathexpr(pathlist[k],newlist,validate) - end - return newlist -end - --- {a,b,c,d} --- a,b,c/{p,q,r},d --- a,b,c/{p,q,r}/d/{x,y,z}// --- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} --- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} --- a{b,c}{d,e}f --- {a,b,c,d} --- {a,b,c/{p,q,r},d} --- {a,b,c/{p,q,r}/d/{x,y,z}//} --- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}} --- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}} --- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c} - -local cleanup = lpeg.replacer { - { "!" , "" }, - { "\\" , "/" }, -} - -function resolvers.cleanpath(str) -- tricky, maybe only simple paths - local doslashes = (P("\\")/"/" + 1)^0 - local donegation = (P("!") /"" )^0 - local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "") - if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then - if trace_expansions then - report_expansions("no home dir set, ignoring dependent paths") - end - function resolvers.cleanpath(str) - if not str or find(str,"~") then - return "" -- special case - else - return lpegmatch(cleanup,str) - end - end - else - local dohome = ((P("~")+P("$HOME"))/homedir)^0 - local cleanup = Cs(donegation * dohome * doslashes) - function resolvers.cleanpath(str) - return str and lpegmatch(cleanup,str) or "" - end - end - return resolvers.cleanpath(str) -end - --- print(resolvers.cleanpath("")) --- print(resolvers.cleanpath("!")) --- print(resolvers.cleanpath("~")) --- print(resolvers.cleanpath("~/test")) --- print(resolvers.cleanpath("!~/test")) --- print(resolvers.cleanpath("~/test~test")) - --- This one strips quotes and funny tokens. - -local expandhome = P("~") / "$HOME" -- environment.homedir or "home:" - -local dodouble = P('"')/"" * (expandhome + (1 - P('"')))^0 * P('"')/"" -local dosingle = P("'")/"" * (expandhome + (1 - P("'")))^0 * P("'")/"" -local dostring = (expandhome + 1 )^0 - -local stripper = Cs( - lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer -) - -function resolvers.checkedvariable(str) -- assumes str is a string - return type(str) == "string" and lpegmatch(stripper,str) or str -end - --- The path splitter: - --- A config (optionally) has the paths split in tables. Internally --- we join them and split them after the expansion has taken place. This --- is more convenient. - -local cache = { } - ------ splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add , -local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do : - -local backslashswapper = lpeg.replacer("\\","/") - -local function splitconfigurationpath(str) -- beware, this can be either a path or a { specification } - if str then - local found = cache[str] - if not found then - if str == "" then - found = { } - else - local split = lpegmatch(splitter,lpegmatch(backslashswapper,str)) -- can be combined - found = { } - local noffound = 0 - for i=1,#split do - local s = split[i] - if not find(s,"^{*unset}*") then - noffound = noffound + 1 - found[noffound] = s - end - end - if trace_expansions then - report_expansions("splitting path specification %a",str) - for k=1,noffound do - report_expansions("% 4i: %s",k,found[k]) - end - end - cache[str] = found - end - end - return found - end -end - -resolvers.splitconfigurationpath = splitconfigurationpath - -function resolvers.splitpath(str) - if type(str) == 'table' then - return str - else - return splitconfigurationpath(str) - end -end - -function resolvers.joinpath(str) - if type(str) == 'table' then - return file.joinpath(str) - else - return str - end -end - --- The next function scans directories and returns a hash where the --- entries are either strings or tables. - --- starting with . or .. etc or funny char - ---~ local l_forbidden = S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") ---~ local l_confusing = P(" ") ---~ local l_character = lpegpatterns.utf8 ---~ local l_dangerous = P(".") - ---~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * P(-1) ---~ ----- l_normal = l_normal * Cc(true) + Cc(false) - ---~ local function test(str) ---~ print(str,lpegmatch(l_normal,str)) ---~ end ---~ test("ãƒ’ãƒ©ã‚®ãƒŽæ˜Žæœ Pro W3") ---~ test("..ãƒ’ãƒ©ã‚®ãƒŽæ˜Žæœ Pro W3") ---~ test(":ãƒ’ãƒ©ã‚®ãƒŽæ˜Žæœ Pro W3;") ---~ test("ãƒ’ãƒ©ã‚®ãƒŽæ˜Žæœ /Pro W3;") ---~ test("ãƒ’ãƒ©ã‚®ãƒŽæ˜Žæœ Pro W3") - --- a lot of this caching can be stripped away when we have ssd's everywhere --- --- we could cache all the (sub)paths here if needed - -local attributes, directory = lfs.attributes, lfs.dir - -local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) -local timer = { } -local scanned = { } -local nofscans = 0 -local scancache = { } - -local function scan(files,spec,path,n,m,r) - local full = (path == "" and spec) or (spec .. path .. '/') - local dirs = { } - local nofdirs = 0 - for name in directory(full) do - if not lpegmatch(weird,name) then - local mode = attributes(full..name,'mode') - if mode == 'file' then - n = n + 1 - local f = files[name] - if f then - if type(f) == 'string' then - files[name] = { f, path } - else - f[#f+1] = path - end - else -- probably unique anyway - files[name] = path - local lower = lower(name) - if name ~= lower then - files["remap:"..lower] = name - r = r + 1 - end - end - elseif mode == 'directory' then - m = m + 1 - nofdirs = nofdirs + 1 - if path ~= "" then - dirs[nofdirs] = path..'/'..name - else - dirs[nofdirs] = name - end - end - end - end - if nofdirs > 0 then - sort(dirs) - for i=1,nofdirs do - files, n, m, r = scan(files,spec,dirs[i],n,m,r) - end - end - scancache[sub(full,1,-2)] = files - return files, n, m, r -end - -local fullcache = { } - -function resolvers.scanfiles(path,branch,usecache) - statistics.starttiming(timer) - local realpath = resolvers.resolve(path) -- no shortcut - if usecache then - local files = fullcache[realpath] - if files then - if trace_locating then - report_expansions("using caches scan of path %a, branch %a",path,branch or path) - end - return files - end - end - if trace_locating then - report_expansions("scanning path %a, branch %a",path,branch or path) - end - local files, n, m, r = scan({ },realpath .. '/',"",0,0,0) - files.__path__ = path -- can be selfautoparent:texmf-whatever - files.__files__ = n - files.__directories__ = m - files.__remappings__ = r - if trace_locating then - report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r) - end - if usecache then - scanned[#scanned+1] = realpath - fullcache[realpath] = files - end - nofscans = nofscans + 1 - statistics.stoptiming(timer) - return files -end - -local function simplescan(files,spec,path) -- first match only, no map and such - local full = (path == "" and spec) or (spec .. path .. '/') - local dirs = { } - local nofdirs = 0 - for name in directory(full) do - if not lpegmatch(weird,name) then - local mode = attributes(full..name,'mode') - if mode == 'file' then - if not files[name] then - -- only first match - files[name] = path - end - elseif mode == 'directory' then - nofdirs = nofdirs + 1 - if path ~= "" then - dirs[nofdirs] = path..'/'..name - else - dirs[nofdirs] = name - end - end - end - end - if nofdirs > 0 then - sort(dirs) - for i=1,nofdirs do - files = simplescan(files,spec,dirs[i]) - end - end - return files -end - -local simplecache = { } -local nofsharedscans = 0 - -function resolvers.simplescanfiles(path,branch,usecache) - statistics.starttiming(timer) - local realpath = resolvers.resolve(path) -- no shortcut - if usecache then - local files = simplecache[realpath] - if not files then - files = scancache[realpath] - if files then - nofsharedscans = nofsharedscans + 1 - end - end - if files then - if trace_locating then - report_expansions("using caches scan of path %a, branch %a",path,branch or path) - end - return files - end - end - if trace_locating then - report_expansions("scanning path %a, branch %a",path,branch or path) - end - local files = simplescan({ },realpath .. '/',"") - if trace_locating then - report_expansions("%s files found",table.count(files)) - end - if usecache then - scanned[#scanned+1] = realpath - simplecache[realpath] = files - end - nofscans = nofscans + 1 - statistics.stoptiming(timer) - return files -end - -function resolvers.scandata() - table.sort(scanned) - return { - n = nofscans, - shared = nofsharedscans, - time = statistics.elapsedtime(timer), - paths = scanned, - } -end - ---~ print(table.serialize(resolvers.scanfiles("t:/sources"))) +if not modules then modules = { } end modules ['data-exp'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub +local concat, sort = table.concat, table.sort +local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns +local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S +local type, next = type, next + +local ostype = os.type +local collapsepath = file.collapsepath + +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) +local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end) + +local report_expansions = logs.reporter("resolvers","expansions") + +local resolvers = resolvers + +-- As this bit of code is somewhat special it gets its own module. After +-- all, when working on the main resolver code, I don't want to scroll +-- past this every time. See data-obs.lua for the gsub variant. + +local function f_first(a,b) + local t, n = { }, 0 + for s in gmatch(b,"[^,]+") do + n = n + 1 ; t[n] = a .. s + end + return concat(t,",") +end + +local function f_second(a,b) + local t, n = { }, 0 + for s in gmatch(a,"[^,]+") do + n = n + 1 ; t[n] = s .. b + end + return concat(t,",") +end + +-- kpsewhich --expand-braces '{a,b}{c,d}' +-- ac:bc:ad:bd + +-- old {a,b}{c,d} => ac ad bc bd +-- +-- local function f_both(a,b) +-- local t, n = { }, 0 +-- for sa in gmatch(a,"[^,]+") do +-- for sb in gmatch(b,"[^,]+") do +-- n = n + 1 ; t[n] = sa .. sb +-- end +-- end +-- return concat(t,",") +-- end +-- +-- new {a,b}{c,d} => ac bc ad bd + +local function f_both(a,b) + local t, n = { }, 0 + for sb in gmatch(b,"[^,]+") do -- and not sa + for sa in gmatch(a,"[^,]+") do -- sb + n = n + 1 ; t[n] = sa .. sb + end + end + return concat(t,",") +end + +local left = P("{") +local right = P("}") +local var = P((1 - S("{}" ))^0) +local set = P((1 - S("{},"))^0) +local other = P(1) + +local l_first = Cs( ( Cc("{") * (C(set) * left * C(var) * right / f_first) * Cc("}") + other )^0 ) +local l_second = Cs( ( Cc("{") * (left * C(var) * right * C(set) / f_second) * Cc("}") + other )^0 ) +local l_both = Cs( ( Cc("{") * (left * C(var) * right * left * C(var) * right / f_both) * Cc("}") + other )^0 ) +local l_rest = Cs( ( left * var * (left/"") * var * (right/"") * var * right + other )^0 ) + +local stripper_1 = lpeg.stripper ("{}@") +local replacer_1 = lpeg.replacer { { ",}", ",@}" }, { "{,", "{@," }, } + +local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpegging it (nice exercise). + if trace_expansions then + report_expansions("expanding variable %a",str) + end + local t, ok, done = newlist or { }, false, false + local n = #t + str = lpegmatch(replacer_1,str) + repeat + local old = str + repeat + local old = str + str = lpegmatch(l_first, str) + until old == str + repeat + local old = str + str = lpegmatch(l_second,str) + until old == str + repeat + local old = str + str = lpegmatch(l_both, str) + until old == str + repeat + local old = str + str = lpegmatch(l_rest, str) + until old == str + until old == str -- or not find(str,"{") + str = lpegmatch(stripper_1,str) + if validate then + for s in gmatch(str,"[^,]+") do + s = validate(s) + if s then + n = n + 1 + t[n] = s + end + end + else + for s in gmatch(str,"[^,]+") do + n = n + 1 + t[n] = s + end + end + if trace_expansions then + for k=1,#t do + report_expansions("% 4i: %s",k,t[k]) + end + end + return t +end + +-- We could make the previous one public. + +local function validate(s) + s = collapsepath(s) -- already keeps the trailing / and // + return s ~= "" and not find(s,"^!*unset/*$") and s +end + +resolvers.validatedpath = validate -- keeps the trailing // + +function resolvers.expandedpathfromlist(pathlist) + local newlist = { } + for k=1,#pathlist do + splitpathexpr(pathlist[k],newlist,validate) + end + return newlist +end + +-- {a,b,c,d} +-- a,b,c/{p,q,r},d +-- a,b,c/{p,q,r}/d/{x,y,z}// +-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} +-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} +-- a{b,c}{d,e}f +-- {a,b,c,d} +-- {a,b,c/{p,q,r},d} +-- {a,b,c/{p,q,r}/d/{x,y,z}//} +-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}} +-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}} +-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c} + +local cleanup = lpeg.replacer { + { "!" , "" }, + { "\\" , "/" }, +} + +function resolvers.cleanpath(str) -- tricky, maybe only simple paths + local doslashes = (P("\\")/"/" + 1)^0 + local donegation = (P("!") /"" )^0 + local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "") + if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then + if trace_expansions then + report_expansions("no home dir set, ignoring dependent paths") + end + function resolvers.cleanpath(str) + if not str or find(str,"~") then + return "" -- special case + else + return lpegmatch(cleanup,str) + end + end + else + local dohome = ((P("~")+P("$HOME"))/homedir)^0 + local cleanup = Cs(donegation * dohome * doslashes) + function resolvers.cleanpath(str) + return str and lpegmatch(cleanup,str) or "" + end + end + return resolvers.cleanpath(str) +end + +-- print(resolvers.cleanpath("")) +-- print(resolvers.cleanpath("!")) +-- print(resolvers.cleanpath("~")) +-- print(resolvers.cleanpath("~/test")) +-- print(resolvers.cleanpath("!~/test")) +-- print(resolvers.cleanpath("~/test~test")) + +-- This one strips quotes and funny tokens. + +local expandhome = P("~") / "$HOME" -- environment.homedir or "home:" + +local dodouble = P('"')/"" * (expandhome + (1 - P('"')))^0 * P('"')/"" +local dosingle = P("'")/"" * (expandhome + (1 - P("'")))^0 * P("'")/"" +local dostring = (expandhome + 1 )^0 + +local stripper = Cs( + lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer +) + +function resolvers.checkedvariable(str) -- assumes str is a string + return type(str) == "string" and lpegmatch(stripper,str) or str +end + +-- The path splitter: + +-- A config (optionally) has the paths split in tables. Internally +-- we join them and split them after the expansion has taken place. This +-- is more convenient. + +local cache = { } + +----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add , +local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do : + +local backslashswapper = lpeg.replacer("\\","/") + +local function splitconfigurationpath(str) -- beware, this can be either a path or a { specification } + if str then + local found = cache[str] + if not found then + if str == "" then + found = { } + else + local split = lpegmatch(splitter,lpegmatch(backslashswapper,str)) -- can be combined + found = { } + local noffound = 0 + for i=1,#split do + local s = split[i] + if not find(s,"^{*unset}*") then + noffound = noffound + 1 + found[noffound] = s + end + end + if trace_expansions then + report_expansions("splitting path specification %a",str) + for k=1,noffound do + report_expansions("% 4i: %s",k,found[k]) + end + end + cache[str] = found + end + end + return found + end +end + +resolvers.splitconfigurationpath = splitconfigurationpath + +function resolvers.splitpath(str) + if type(str) == 'table' then + return str + else + return splitconfigurationpath(str) + end +end + +function resolvers.joinpath(str) + if type(str) == 'table' then + return file.joinpath(str) + else + return str + end +end + +-- The next function scans directories and returns a hash where the +-- entries are either strings or tables. + +-- starting with . or .. etc or funny char + +--~ local l_forbidden = S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = P(" ") +--~ local l_character = lpegpatterns.utf8 +--~ local l_dangerous = P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * P(-1) +--~ ----- l_normal = l_normal * Cc(true) + Cc(false) + +--~ local function test(str) +--~ print(str,lpegmatch(l_normal,str)) +--~ end +--~ test("ãƒ’ãƒ©ã‚®ãƒŽæ˜Žæœ Pro W3") +--~ test("..ãƒ’ãƒ©ã‚®ãƒŽæ˜Žæœ Pro W3") +--~ test(":ãƒ’ãƒ©ã‚®ãƒŽæ˜Žæœ Pro W3;") +--~ test("ãƒ’ãƒ©ã‚®ãƒŽæ˜Žæœ /Pro W3;") +--~ test("ãƒ’ãƒ©ã‚®ãƒŽæ˜Žæœ Pro W3") + +-- a lot of this caching can be stripped away when we have ssd's everywhere +-- +-- we could cache all the (sub)paths here if needed + +local attributes, directory = lfs.attributes, lfs.dir + +local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +local timer = { } +local scanned = { } +local nofscans = 0 +local scancache = { } + +local function scan(files,spec,path,n,m,r) + local full = (path == "" and spec) or (spec .. path .. '/') + local dirs = { } + local nofdirs = 0 + for name in directory(full) do + if not lpegmatch(weird,name) then + local mode = attributes(full..name,'mode') + if mode == 'file' then + n = n + 1 + local f = files[name] + if f then + if type(f) == 'string' then + files[name] = { f, path } + else + f[#f+1] = path + end + else -- probably unique anyway + files[name] = path + local lower = lower(name) + if name ~= lower then + files["remap:"..lower] = name + r = r + 1 + end + end + elseif mode == 'directory' then + m = m + 1 + nofdirs = nofdirs + 1 + if path ~= "" then + dirs[nofdirs] = path..'/'..name + else + dirs[nofdirs] = name + end + end + end + end + if nofdirs > 0 then + sort(dirs) + for i=1,nofdirs do + files, n, m, r = scan(files,spec,dirs[i],n,m,r) + end + end + scancache[sub(full,1,-2)] = files + return files, n, m, r +end + +local fullcache = { } + +function resolvers.scanfiles(path,branch,usecache) + statistics.starttiming(timer) + local realpath = resolvers.resolve(path) -- no shortcut + if usecache then + local files = fullcache[realpath] + if files then + if trace_locating then + report_expansions("using caches scan of path %a, branch %a",path,branch or path) + end + return files + end + end + if trace_locating then + report_expansions("scanning path %a, branch %a",path,branch or path) + end + local files, n, m, r = scan({ },realpath .. '/',"",0,0,0) + files.__path__ = path -- can be selfautoparent:texmf-whatever + files.__files__ = n + files.__directories__ = m + files.__remappings__ = r + if trace_locating then + report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r) + end + if usecache then + scanned[#scanned+1] = realpath + fullcache[realpath] = files + end + nofscans = nofscans + 1 + statistics.stoptiming(timer) + return files +end + +local function simplescan(files,spec,path) -- first match only, no map and such + local full = (path == "" and spec) or (spec .. path .. '/') + local dirs = { } + local nofdirs = 0 + for name in directory(full) do + if not lpegmatch(weird,name) then + local mode = attributes(full..name,'mode') + if mode == 'file' then + if not files[name] then + -- only first match + files[name] = path + end + elseif mode == 'directory' then + nofdirs = nofdirs + 1 + if path ~= "" then + dirs[nofdirs] = path..'/'..name + else + dirs[nofdirs] = name + end + end + end + end + if nofdirs > 0 then + sort(dirs) + for i=1,nofdirs do + files = simplescan(files,spec,dirs[i]) + end + end + return files +end + +local simplecache = { } +local nofsharedscans = 0 + +function resolvers.simplescanfiles(path,branch,usecache) + statistics.starttiming(timer) + local realpath = resolvers.resolve(path) -- no shortcut + if usecache then + local files = simplecache[realpath] + if not files then + files = scancache[realpath] + if files then + nofsharedscans = nofsharedscans + 1 + end + end + if files then + if trace_locating then + report_expansions("using caches scan of path %a, branch %a",path,branch or path) + end + return files + end + end + if trace_locating then + report_expansions("scanning path %a, branch %a",path,branch or path) + end + local files = simplescan({ },realpath .. '/',"") + if trace_locating then + report_expansions("%s files found",table.count(files)) + end + if usecache then + scanned[#scanned+1] = realpath + simplecache[realpath] = files + end + nofscans = nofscans + 1 + statistics.stoptiming(timer) + return files +end + +function resolvers.scandata() + table.sort(scanned) + return { + n = nofscans, + shared = nofsharedscans, + time = statistics.elapsedtime(timer), + paths = scanned, + } +end + +--~ print(table.serialize(resolvers.scanfiles("t:/sources"))) diff --git a/tex/context/base/data-fil.lua b/tex/context/base/data-fil.lua index 09129e03c..5ef2612e9 100644 --- a/tex/context/base/data-fil.lua +++ b/tex/context/base/data-fil.lua @@ -1,113 +1,113 @@ -if not modules then modules = { } end modules ['data-fil'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) - -local report_files = logs.reporter("resolvers","files") - -local resolvers = resolvers - -local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers -local locators, hashers, generators, concatinators = resolvers.locators, resolvers.hashers, resolvers.generators, resolvers.concatinators - -local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check - -function locators.file(specification) - local name = specification.filename - local realname = resolvers.resolve(name) -- no shortcut - if realname and realname ~= '' and lfs.isdir(realname) then - if trace_locating then - report_files("file locator %a found as %a",name,realname) - end - resolvers.appendhash('file',name,true) -- cache - elseif trace_locating then - report_files("file locator %a not found",name) - end -end - -function hashers.file(specification) - local name = specification.filename - local content = caches.loadcontent(name,'files') - resolvers.registerfilehash(name,content,content==nil) -end - -function generators.file(specification) - local path = specification.filename - local content = resolvers.scanfiles(path,false,true) -- scan once ---~ inspect(content) - resolvers.registerfilehash(path,content,true) -end - -concatinators.file = file.join - -function finders.file(specification,filetype) - local filename = specification.filename - local foundname = resolvers.findfile(filename,filetype) - if foundname and foundname ~= "" then - if trace_locating then - report_files("file finder: %a found",filename) - end - return foundname - else - if trace_locating then - report_files("file finder: %a not found",filename) - end - return finders.notfound() - end -end - --- The default textopener will be overloaded later on. - -function openers.helpers.textopener(tag,filename,f) - return { - reader = function() return f:read () end, - close = function() logs.show_close(filename) return f:close() end, - } -end - -function openers.file(specification,filetype) - local filename = specification.filename - if filename and filename ~= "" then - local f = io.open(filename,"r") - if f then - if trace_locating then - report_files("file opener: %a opened",filename) - end - return openers.helpers.textopener("file",filename,f) - end - end - if trace_locating then - report_files("file opener: %a not found",filename) - end - return openers.notfound() -end - -function loaders.file(specification,filetype) - local filename = specification.filename - if filename and filename ~= "" then - local f = io.open(filename,"rb") - if f then - logs.show_load(filename) - if trace_locating then - report_files("file loader: %a loaded",filename) - end - local s = f:read("*a") -- io.readall(f) is faster but we never have large files here - if checkgarbage then - checkgarbage(#s) - end - f:close() - if s then - return true, s, #s - end - end - end - if trace_locating then - report_files("file loader: %a not found",filename) - end - return loaders.notfound() -end +if not modules then modules = { } end modules ['data-fil'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) + +local report_files = logs.reporter("resolvers","files") + +local resolvers = resolvers + +local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers +local locators, hashers, generators, concatinators = resolvers.locators, resolvers.hashers, resolvers.generators, resolvers.concatinators + +local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check + +function locators.file(specification) + local name = specification.filename + local realname = resolvers.resolve(name) -- no shortcut + if realname and realname ~= '' and lfs.isdir(realname) then + if trace_locating then + report_files("file locator %a found as %a",name,realname) + end + resolvers.appendhash('file',name,true) -- cache + elseif trace_locating then + report_files("file locator %a not found",name) + end +end + +function hashers.file(specification) + local name = specification.filename + local content = caches.loadcontent(name,'files') + resolvers.registerfilehash(name,content,content==nil) +end + +function generators.file(specification) + local path = specification.filename + local content = resolvers.scanfiles(path,false,true) -- scan once +--~ inspect(content) + resolvers.registerfilehash(path,content,true) +end + +concatinators.file = file.join + +function finders.file(specification,filetype) + local filename = specification.filename + local foundname = resolvers.findfile(filename,filetype) + if foundname and foundname ~= "" then + if trace_locating then + report_files("file finder: %a found",filename) + end + return foundname + else + if trace_locating then + report_files("file finder: %a not found",filename) + end + return finders.notfound() + end +end + +-- The default textopener will be overloaded later on. + +function openers.helpers.textopener(tag,filename,f) + return { + reader = function() return f:read () end, + close = function() logs.show_close(filename) return f:close() end, + } +end + +function openers.file(specification,filetype) + local filename = specification.filename + if filename and filename ~= "" then + local f = io.open(filename,"r") + if f then + if trace_locating then + report_files("file opener: %a opened",filename) + end + return openers.helpers.textopener("file",filename,f) + end + end + if trace_locating then + report_files("file opener: %a not found",filename) + end + return openers.notfound() +end + +function loaders.file(specification,filetype) + local filename = specification.filename + if filename and filename ~= "" then + local f = io.open(filename,"rb") + if f then + logs.show_load(filename) + if trace_locating then + report_files("file loader: %a loaded",filename) + end + local s = f:read("*a") -- io.readall(f) is faster but we never have large files here + if checkgarbage then + checkgarbage(#s) + end + f:close() + if s then + return true, s, #s + end + end + end + if trace_locating then + report_files("file loader: %a not found",filename) + end + return loaders.notfound() +end diff --git a/tex/context/base/data-gen.lua b/tex/context/base/data-gen.lua index 5a0755831..c1861fea1 100644 --- a/tex/context/base/data-gen.lua +++ b/tex/context/base/data-gen.lua @@ -1,9 +1,9 @@ -if not modules then modules = { } end modules ['data-gen'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- move generators here +if not modules then modules = { } end modules ['data-gen'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- move generators here diff --git a/tex/context/base/data-ini.lua b/tex/context/base/data-ini.lua index 201c6a2d7..16f61a8c4 100644 --- a/tex/context/base/data-ini.lua +++ b/tex/context/base/data-ini.lua @@ -1,232 +1,232 @@ -if not modules then modules = { } end modules ['data-ini'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - -local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char -local next, type = next, type - -local filedirname, filebasename, filejoin = file.dirname, file.basename, file.join - -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end) -local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end) - -local report_initialization = logs.reporter("resolvers","initialization") - -local ostype, osname, ossetenv, osgetenv = os.type, os.name, os.setenv, os.getenv - --- The code here used to be part of a data-res but for convenience --- we now split it over multiple files. As this file is now the --- starting point we introduce resolvers here. - -resolvers = resolvers or { } -local resolvers = resolvers - --- We don't want the kpse library to kick in. Also, we want to be able to --- execute programs. Control over execution is implemented later. - -texconfig.kpse_init = false -texconfig.shell_escape = 't' - -if not (environment and environment.default_texmfcnf) and kpse and kpse.default_texmfcnf then - local default_texmfcnf = kpse.default_texmfcnf() - -- looks more like context: - default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTOLOC","selfautoloc:") - default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTODIR","selfautodir:") - default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTOPARENT","selfautoparent:") - default_texmfcnf = gsub(default_texmfcnf,"$HOME","home:") - -- - environment.default_texmfcnf = default_texmfcnf -end - -kpse = { original = kpse } - -setmetatable(kpse, { - __index = function(kp,name) - report_initialization("fatal error: kpse library is accessed (key: %s)",name) - os.exit() - end -} ) - --- First we check a couple of environment variables. Some might be --- set already but we need then later on. We start with the system --- font path. - -do - - local osfontdir = osgetenv("OSFONTDIR") - - if osfontdir and osfontdir ~= "" then - -- ok - elseif osname == "windows" then - ossetenv("OSFONTDIR","c:/windows/fonts//") - elseif osname == "macosx" then - ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//") - end - -end - --- Next comes the user's home path. We need this as later on we have --- to replace ~ with its value. - -do - - local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or '' - - if not homedir or homedir == "" then - homedir = char(127) -- we need a value, later we wil trigger on it - end - - homedir = file.collapsepath(homedir) - - ossetenv("HOME", homedir) -- can be used in unix cnf files - ossetenv("USERPROFILE",homedir) -- can be used in windows cnf files - - environment.homedir = homedir - -end - --- The following code sets the name of the own binary and its --- path. This is fallback code as we have os.selfdir now. - -do - - local args = environment.originalarguments or arg -- this needs a cleanup - - if not environment.ownmain then - environment.ownmain = status and string.match(string.lower(status.banner),"this is ([%a]+)") or "luatex" - end - - local ownbin = environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex" - local ownpath = environment.ownpath or os.selfdir - - ownbin = file.collapsepath(ownbin) - ownpath = file.collapsepath(ownpath) - - if not ownpath or ownpath == "" or ownpath == "unset" then - ownpath = args[-1] or arg[-1] - ownpath = ownpath and filedirname(gsub(ownpath,"\\","/")) - if not ownpath or ownpath == "" then - ownpath = args[-0] or arg[-0] - ownpath = ownpath and filedirname(gsub(ownpath,"\\","/")) - end - local binary = ownbin - if not ownpath or ownpath == "" then - ownpath = ownpath and filedirname(binary) - end - if not ownpath or ownpath == "" then - if os.binsuffix ~= "" then - binary = file.replacesuffix(binary,os.binsuffix) - end - local path = osgetenv("PATH") - if path then - for p in gmatch(path,"[^"..io.pathseparator.."]+") do - local b = filejoin(p,binary) - if lfs.isfile(b) then - -- we assume that after changing to the path the currentdir function - -- resolves to the real location and use this side effect here; this - -- trick is needed because on the mac installations use symlinks in the - -- path instead of real locations - local olddir = lfs.currentdir() - if lfs.chdir(p) then - local pp = lfs.currentdir() - if trace_locating and p ~= pp then - report_initialization("following symlink %a to %a",p,pp) - end - ownpath = pp - lfs.chdir(olddir) - else - if trace_locating then - report_initialization("unable to check path %a",p) - end - ownpath = p - end - break - end - end - end - end - if not ownpath or ownpath == "" then - ownpath = "." - report_initialization("forcing fallback to ownpath %a",ownpath) - elseif trace_locating then - report_initialization("using ownpath %a",ownpath) - end - end - - environment.ownbin = ownbin - environment.ownpath = ownpath - -end - -resolvers.ownpath = environment.ownpath - -function resolvers.getownpath() - return environment.ownpath -end - --- The self variables permit us to use only a few (or even no) --- environment variables. - -do - - local ownpath = environment.ownpath or dir.current() - - if ownpath then - ossetenv('SELFAUTOLOC', file.collapsepath(ownpath)) - ossetenv('SELFAUTODIR', file.collapsepath(ownpath .. "/..")) - ossetenv('SELFAUTOPARENT', file.collapsepath(ownpath .. "/../..")) - else - report_initialization("error: unable to locate ownpath") - os.exit() - end - -end - --- The running os: - --- todo: check is context sits here os.platform is more trustworthy --- that the bin check as mtx-update runs from another path - -local texos = environment.texos or osgetenv("TEXOS") -local texmfos = environment.texmfos or osgetenv('SELFAUTODIR') - -if not texos or texos == "" then - texos = file.basename(texmfos) -end - -ossetenv('TEXMFOS', texmfos) -- full bin path -ossetenv('TEXOS', texos) -- partial bin parent -ossetenv('SELFAUTOSYSTEM',os.platform) -- bonus - -environment.texos = texos -environment.texmfos = texmfos - --- The current root: - -local texroot = environment.texroot or osgetenv("TEXROOT") - -if not texroot or texroot == "" then - texroot = osgetenv('SELFAUTOPARENT') - ossetenv('TEXROOT',texroot) -end - -environment.texroot = file.collapsepath(texroot) - -if profiler then - directives.register("system.profile",function() - profiler.start("luatex-profile.log") - end) -end - --- a forward definition - -if not resolvers.resolve then - function resolvers.resolve (s) return s end - function resolvers.unresolve(s) return s end - function resolvers.repath (s) return s end -end +if not modules then modules = { } end modules ['data-ini'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char +local next, type = next, type + +local filedirname, filebasename, filejoin = file.dirname, file.basename, file.join + +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) +local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end) +local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end) + +local report_initialization = logs.reporter("resolvers","initialization") + +local ostype, osname, ossetenv, osgetenv = os.type, os.name, os.setenv, os.getenv + +-- The code here used to be part of a data-res but for convenience +-- we now split it over multiple files. As this file is now the +-- starting point we introduce resolvers here. + +resolvers = resolvers or { } +local resolvers = resolvers + +-- We don't want the kpse library to kick in. Also, we want to be able to +-- execute programs. Control over execution is implemented later. + +texconfig.kpse_init = false +texconfig.shell_escape = 't' + +if not (environment and environment.default_texmfcnf) and kpse and kpse.default_texmfcnf then + local default_texmfcnf = kpse.default_texmfcnf() + -- looks more like context: + default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTOLOC","selfautoloc:") + default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTODIR","selfautodir:") + default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTOPARENT","selfautoparent:") + default_texmfcnf = gsub(default_texmfcnf,"$HOME","home:") + -- + environment.default_texmfcnf = default_texmfcnf +end + +kpse = { original = kpse } + +setmetatable(kpse, { + __index = function(kp,name) + report_initialization("fatal error: kpse library is accessed (key: %s)",name) + os.exit() + end +} ) + +-- First we check a couple of environment variables. Some might be +-- set already but we need then later on. We start with the system +-- font path. + +do + + local osfontdir = osgetenv("OSFONTDIR") + + if osfontdir and osfontdir ~= "" then + -- ok + elseif osname == "windows" then + ossetenv("OSFONTDIR","c:/windows/fonts//") + elseif osname == "macosx" then + ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//") + end + +end + +-- Next comes the user's home path. We need this as later on we have +-- to replace ~ with its value. + +do + + local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or '' + + if not homedir or homedir == "" then + homedir = char(127) -- we need a value, later we wil trigger on it + end + + homedir = file.collapsepath(homedir) + + ossetenv("HOME", homedir) -- can be used in unix cnf files + ossetenv("USERPROFILE",homedir) -- can be used in windows cnf files + + environment.homedir = homedir + +end + +-- The following code sets the name of the own binary and its +-- path. This is fallback code as we have os.selfdir now. + +do + + local args = environment.originalarguments or arg -- this needs a cleanup + + if not environment.ownmain then + environment.ownmain = status and string.match(string.lower(status.banner),"this is ([%a]+)") or "luatex" + end + + local ownbin = environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex" + local ownpath = environment.ownpath or os.selfdir + + ownbin = file.collapsepath(ownbin) + ownpath = file.collapsepath(ownpath) + + if not ownpath or ownpath == "" or ownpath == "unset" then + ownpath = args[-1] or arg[-1] + ownpath = ownpath and filedirname(gsub(ownpath,"\\","/")) + if not ownpath or ownpath == "" then + ownpath = args[-0] or arg[-0] + ownpath = ownpath and filedirname(gsub(ownpath,"\\","/")) + end + local binary = ownbin + if not ownpath or ownpath == "" then + ownpath = ownpath and filedirname(binary) + end + if not ownpath or ownpath == "" then + if os.binsuffix ~= "" then + binary = file.replacesuffix(binary,os.binsuffix) + end + local path = osgetenv("PATH") + if path then + for p in gmatch(path,"[^"..io.pathseparator.."]+") do + local b = filejoin(p,binary) + if lfs.isfile(b) then + -- we assume that after changing to the path the currentdir function + -- resolves to the real location and use this side effect here; this + -- trick is needed because on the mac installations use symlinks in the + -- path instead of real locations + local olddir = lfs.currentdir() + if lfs.chdir(p) then + local pp = lfs.currentdir() + if trace_locating and p ~= pp then + report_initialization("following symlink %a to %a",p,pp) + end + ownpath = pp + lfs.chdir(olddir) + else + if trace_locating then + report_initialization("unable to check path %a",p) + end + ownpath = p + end + break + end + end + end + end + if not ownpath or ownpath == "" then + ownpath = "." + report_initialization("forcing fallback to ownpath %a",ownpath) + elseif trace_locating then + report_initialization("using ownpath %a",ownpath) + end + end + + environment.ownbin = ownbin + environment.ownpath = ownpath + +end + +resolvers.ownpath = environment.ownpath + +function resolvers.getownpath() + return environment.ownpath +end + +-- The self variables permit us to use only a few (or even no) +-- environment variables. + +do + + local ownpath = environment.ownpath or dir.current() + + if ownpath then + ossetenv('SELFAUTOLOC', file.collapsepath(ownpath)) + ossetenv('SELFAUTODIR', file.collapsepath(ownpath .. "/..")) + ossetenv('SELFAUTOPARENT', file.collapsepath(ownpath .. "/../..")) + else + report_initialization("error: unable to locate ownpath") + os.exit() + end + +end + +-- The running os: + +-- todo: check is context sits here os.platform is more trustworthy +-- that the bin check as mtx-update runs from another path + +local texos = environment.texos or osgetenv("TEXOS") +local texmfos = environment.texmfos or osgetenv('SELFAUTODIR') + +if not texos or texos == "" then + texos = file.basename(texmfos) +end + +ossetenv('TEXMFOS', texmfos) -- full bin path +ossetenv('TEXOS', texos) -- partial bin parent +ossetenv('SELFAUTOSYSTEM',os.platform) -- bonus + +environment.texos = texos +environment.texmfos = texmfos + +-- The current root: + +local texroot = environment.texroot or osgetenv("TEXROOT") + +if not texroot or texroot == "" then + texroot = osgetenv('SELFAUTOPARENT') + ossetenv('TEXROOT',texroot) +end + +environment.texroot = file.collapsepath(texroot) + +if profiler then + directives.register("system.profile",function() + profiler.start("luatex-profile.log") + end) +end + +-- a forward definition + +if not resolvers.resolve then + function resolvers.resolve (s) return s end + function resolvers.unresolve(s) return s end + function resolvers.repath (s) return s end +end diff --git a/tex/context/base/data-inp.lua b/tex/context/base/data-inp.lua index 97fb8904b..2fed75dab 100644 --- a/tex/context/base/data-inp.lua +++ b/tex/context/base/data-inp.lua @@ -1,25 +1,25 @@ -if not modules then modules = { } end modules ['data-inp'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local allocate = utilities.storage.allocate -local resolvers = resolvers - -local methodhandler = resolvers.methodhandler -local registermethod = resolvers.registermethod - -local finders = allocate { helpers = { }, notfound = function() end } -local openers = allocate { helpers = { }, notfound = function() end } -local loaders = allocate { helpers = { }, notfound = function() return false, nil, 0 end } - -registermethod("finders", finders, "uri") -registermethod("openers", openers, "uri") -registermethod("loaders", loaders, "uri") - -resolvers.finders = finders -resolvers.openers = openers -resolvers.loaders = loaders +if not modules then modules = { } end modules ['data-inp'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local allocate = utilities.storage.allocate +local resolvers = resolvers + +local methodhandler = resolvers.methodhandler +local registermethod = resolvers.registermethod + +local finders = allocate { helpers = { }, notfound = function() end } +local openers = allocate { helpers = { }, notfound = function() end } +local loaders = allocate { helpers = { }, notfound = function() return false, nil, 0 end } + +registermethod("finders", finders, "uri") +registermethod("openers", openers, "uri") +registermethod("loaders", loaders, "uri") + +resolvers.finders = finders +resolvers.openers = openers +resolvers.loaders = loaders diff --git a/tex/context/base/data-lst.lua b/tex/context/base/data-lst.lua index 8996fa251..d830c4f1f 100644 --- a/tex/context/base/data-lst.lua +++ b/tex/context/base/data-lst.lua @@ -1,77 +1,77 @@ -if not modules then modules = { } end modules ['data-lst'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- used in mtxrun, can be loaded later .. todo - -local find, concat, upper, format = string.find, table.concat, string.upper, string.format -local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs - -resolvers.listers = resolvers.listers or { } - -local resolvers = resolvers - -local report_lists = logs.reporter("resolvers","lists") - -local function tabstr(str) - if type(str) == 'table' then - return concat(str," | ") - else - return str - end -end - -function resolvers.listers.variables(pattern) - local instance = resolvers.instance - local environment = instance.environment - local variables = instance.variables - local expansions = instance.expansions - local pattern = upper(pattern or "") - local configured = { } - local order = instance.order - for i=1,#order do - for k, v in next, order[i] do - if v ~= nil and configured[k] == nil then - configured[k] = v - end - end - end - local env = fastcopy(environment) - local var = fastcopy(variables) - local exp = fastcopy(expansions) - for key, value in sortedpairs(configured) do - if key ~= "" and (pattern == "" or find(upper(key),pattern)) then - report_lists(key) - report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset") - report_lists(" var: %s",tabstr(configured[key]) or "unset") - report_lists(" exp: %s",tabstr(expansions[key]) or "unset") - report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset") - end - end - instance.environment = fastcopy(env) - instance.variables = fastcopy(var) - instance.expansions = fastcopy(exp) -end - -local report_resolved = logs.reporter("system","resolved") - -function resolvers.listers.configurations() - local configurations = resolvers.instance.specification - for i=1,#configurations do - report_resolved("file : %s",resolvers.resolve(configurations[i])) - end - report_resolved("") - local list = resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec)) - for i=1,#list do - local li = resolvers.resolve(list[i]) - if lfs.isdir(li) then - report_resolved("path - %s",li) - else - report_resolved("path + %s",li) - end - end -end +if not modules then modules = { } end modules ['data-lst'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- used in mtxrun, can be loaded later .. todo + +local find, concat, upper, format = string.find, table.concat, string.upper, string.format +local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs + +resolvers.listers = resolvers.listers or { } + +local resolvers = resolvers + +local report_lists = logs.reporter("resolvers","lists") + +local function tabstr(str) + if type(str) == 'table' then + return concat(str," | ") + else + return str + end +end + +function resolvers.listers.variables(pattern) + local instance = resolvers.instance + local environment = instance.environment + local variables = instance.variables + local expansions = instance.expansions + local pattern = upper(pattern or "") + local configured = { } + local order = instance.order + for i=1,#order do + for k, v in next, order[i] do + if v ~= nil and configured[k] == nil then + configured[k] = v + end + end + end + local env = fastcopy(environment) + local var = fastcopy(variables) + local exp = fastcopy(expansions) + for key, value in sortedpairs(configured) do + if key ~= "" and (pattern == "" or find(upper(key),pattern)) then + report_lists(key) + report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset") + report_lists(" var: %s",tabstr(configured[key]) or "unset") + report_lists(" exp: %s",tabstr(expansions[key]) or "unset") + report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset") + end + end + instance.environment = fastcopy(env) + instance.variables = fastcopy(var) + instance.expansions = fastcopy(exp) +end + +local report_resolved = logs.reporter("system","resolved") + +function resolvers.listers.configurations() + local configurations = resolvers.instance.specification + for i=1,#configurations do + report_resolved("file : %s",resolvers.resolve(configurations[i])) + end + report_resolved("") + local list = resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec)) + for i=1,#list do + local li = resolvers.resolve(list[i]) + if lfs.isdir(li) then + report_resolved("path - %s",li) + else + report_resolved("path + %s",li) + end + end +end diff --git a/tex/context/base/data-lua.lua b/tex/context/base/data-lua.lua index 0e7c81181..cacffcaf8 100644 --- a/tex/context/base/data-lua.lua +++ b/tex/context/base/data-lua.lua @@ -1,131 +1,131 @@ -if not modules then modules = { } end modules ['data-lua'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This is now a plug in into l-lua (as we also use the extra paths elsewhere). - -local resolvers, package = resolvers, package - -local gsub = string.gsub -local concat = table.concat -local addsuffix = file.addsuffix - -local P, S, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.Cs, lpeg.match - -local luasuffixes = { 'tex', 'lua' } -local libsuffixes = { 'lib' } -local luaformats = { 'TEXINPUTS', 'LUAINPUTS' } -local libformats = { 'CLUAINPUTS' } -local helpers = package.helpers or { } -local methods = helpers.methods or { } - -trackers.register("resolvers.libraries", function(v) helpers.trace = v end) -trackers.register("resolvers.locating", function(v) helpers.trace = v end) - -helpers.report = logs.reporter("resolvers","libraries") - -helpers.sequence = { - "already loaded", - "preload table", - "lua variable format", - "lib variable format", - "lua extra list", - "lib extra list", - "path specification", - "cpath specification", - "all in one fallback", - "not loaded", -} - -local pattern = Cs(P("!")^0 / "" * (P("/") * P(-1) / "/" + P("/")^1 / "/" + 1)^0) - -function helpers.cleanpath(path) -- hm, don't we have a helper for this? - return resolvers.resolve(lpegmatch(pattern,path)) -end - -local loadedaslib = helpers.loadedaslib -local getextraluapaths = package.extraluapaths -local getextralibpaths = package.extralibpaths -local registerpath = helpers.registerpath -local lualibfile = helpers.lualibfile - -local luaformatpaths -local libformatpaths - -local function getluaformatpaths() - if not luaformatpaths then - luaformatpaths = { } - for i=1,#luaformats do - registerpath("lua format","lua",luaformatpaths,resolvers.expandedpathlistfromvariable(luaformats[i])) - end - end - return luaformatpaths -end - -local function getlibformatpaths() - if not libformatpaths then - libformatpaths = { } - for i=1,#libformats do - registerpath("lib format","lib",libformatpaths,resolvers.expandedpathlistfromvariable(libformats[i])) - end - end - return libformatpaths -end - -local function loadedbyformat(name,rawname,suffixes,islib,what) - local trace = helpers.trace - local report = helpers.report - for i=1,#suffixes do -- so we use findfile and not a lookup loop - local format = suffixes[i] - local resolved = resolvers.findfile(name,format) or "" - if trace then - report("%s format, identifying %a using format %a",what,name,format) - end - if resolved ~= "" then - if trace then - report("%s format, %a found on %a",what,name,resolved) - end - if islib then - return loadedaslib(resolved,rawname) - else - return loadfile(resolved) - end - end - end -end - -helpers.loadedbyformat = loadedbyformat - --- print(lualibfile("bar")) --- print(lualibfile("foo.bar")) --- print(lualibfile("crap/foo...bar")) --- print(lualibfile("crap//foo.bar")) --- print(lualibfile("crap/../foo.bar")) --- print(lualibfile("crap/.././foo.bar")) - --- alternatively we could split in path and base and temporary set the libpath to path - --- we could build a list of relevant paths but for tracing it's better to have the --- whole lot (ok, we could skip the duplicates) - -methods["lua variable format"] = function(name) - if helpers.trace then - helpers.report("%s format, checking %s paths","lua",#getluaformatpaths()) -- call triggers building - end - return loadedbyformat(addsuffix(lualibfile(name),"lua"),name,luasuffixes,false,"lua") -end - -methods["lib variable format"] = function(name) - if helpers.trace then - helpers.report("%s format, checking %s paths","lib",#getlibformatpaths()) -- call triggers building - end - return loadedbyformat(addsuffix(lualibfile(name),os.libsuffix),name,libsuffixes,true,"lib") -end - --- package.extraclibpath(environment.ownpath) - -resolvers.loadlualib = require -- hm +if not modules then modules = { } end modules ['data-lua'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This is now a plug in into l-lua (as we also use the extra paths elsewhere). + +local resolvers, package = resolvers, package + +local gsub = string.gsub +local concat = table.concat +local addsuffix = file.addsuffix + +local P, S, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.Cs, lpeg.match + +local luasuffixes = { 'tex', 'lua' } +local libsuffixes = { 'lib' } +local luaformats = { 'TEXINPUTS', 'LUAINPUTS' } +local libformats = { 'CLUAINPUTS' } +local helpers = package.helpers or { } +local methods = helpers.methods or { } + +trackers.register("resolvers.libraries", function(v) helpers.trace = v end) +trackers.register("resolvers.locating", function(v) helpers.trace = v end) + +helpers.report = logs.reporter("resolvers","libraries") + +helpers.sequence = { + "already loaded", + "preload table", + "lua variable format", + "lib variable format", + "lua extra list", + "lib extra list", + "path specification", + "cpath specification", + "all in one fallback", + "not loaded", +} + +local pattern = Cs(P("!")^0 / "" * (P("/") * P(-1) / "/" + P("/")^1 / "/" + 1)^0) + +function helpers.cleanpath(path) -- hm, don't we have a helper for this? + return resolvers.resolve(lpegmatch(pattern,path)) +end + +local loadedaslib = helpers.loadedaslib +local getextraluapaths = package.extraluapaths +local getextralibpaths = package.extralibpaths +local registerpath = helpers.registerpath +local lualibfile = helpers.lualibfile + +local luaformatpaths +local libformatpaths + +local function getluaformatpaths() + if not luaformatpaths then + luaformatpaths = { } + for i=1,#luaformats do + registerpath("lua format","lua",luaformatpaths,resolvers.expandedpathlistfromvariable(luaformats[i])) + end + end + return luaformatpaths +end + +local function getlibformatpaths() + if not libformatpaths then + libformatpaths = { } + for i=1,#libformats do + registerpath("lib format","lib",libformatpaths,resolvers.expandedpathlistfromvariable(libformats[i])) + end + end + return libformatpaths +end + +local function loadedbyformat(name,rawname,suffixes,islib,what) + local trace = helpers.trace + local report = helpers.report + for i=1,#suffixes do -- so we use findfile and not a lookup loop + local format = suffixes[i] + local resolved = resolvers.findfile(name,format) or "" + if trace then + report("%s format, identifying %a using format %a",what,name,format) + end + if resolved ~= "" then + if trace then + report("%s format, %a found on %a",what,name,resolved) + end + if islib then + return loadedaslib(resolved,rawname) + else + return loadfile(resolved) + end + end + end +end + +helpers.loadedbyformat = loadedbyformat + +-- print(lualibfile("bar")) +-- print(lualibfile("foo.bar")) +-- print(lualibfile("crap/foo...bar")) +-- print(lualibfile("crap//foo.bar")) +-- print(lualibfile("crap/../foo.bar")) +-- print(lualibfile("crap/.././foo.bar")) + +-- alternatively we could split in path and base and temporary set the libpath to path + +-- we could build a list of relevant paths but for tracing it's better to have the +-- whole lot (ok, we could skip the duplicates) + +methods["lua variable format"] = function(name) + if helpers.trace then + helpers.report("%s format, checking %s paths","lua",#getluaformatpaths()) -- call triggers building + end + return loadedbyformat(addsuffix(lualibfile(name),"lua"),name,luasuffixes,false,"lua") +end + +methods["lib variable format"] = function(name) + if helpers.trace then + helpers.report("%s format, checking %s paths","lib",#getlibformatpaths()) -- call triggers building + end + return loadedbyformat(addsuffix(lualibfile(name),os.libsuffix),name,libsuffixes,true,"lib") +end + +-- package.extraclibpath(environment.ownpath) + +resolvers.loadlualib = require -- hm diff --git a/tex/context/base/data-met.lua b/tex/context/base/data-met.lua index 96da70bfd..7f97fbced 100644 --- a/tex/context/base/data-met.lua +++ b/tex/context/base/data-met.lua @@ -1,133 +1,133 @@ -if not modules then modules = { } end modules ['data-met'] = { - version = 1.100, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local find, format = string.find, string.format -local sequenced = table.sequenced -local addurlscheme, urlhashed = url.addscheme, url.hashed -local getcurrentdir = lfs.currentdir - -local trace_locating = false -local trace_methods = false - -trackers.register("resolvers.locating", function(v) trace_methods = v end) -trackers.register("resolvers.methods", function(v) trace_methods = v end) - ---~ trace_methods = true - -local report_methods = logs.reporter("resolvers","methods") - -local allocate = utilities.storage.allocate - -local resolvers = resolvers - -local registered = { } - -local function splitmethod(filename) -- todo: filetype in specification - if not filename then - return { scheme = "unknown", original = filename } - end - if type(filename) == "table" then - return filename -- already split - end - filename = file.collapsepath(filename,".") -- hm, we should keep ./ in some cases - --- filename = gsub(filename,"^%./",getcurrentdir().."/") -- we will merge dir.expandname and collapse some day - - if not find(filename,"://") then - return { scheme = "file", path = filename, original = filename, filename = filename } - end - local specification = url.hashed(filename) - if not specification.scheme or specification.scheme == "" then - return { scheme = "file", path = filename, original = filename, filename = filename } - else - return specification - end -end - -resolvers.splitmethod = splitmethod -- bad name but ok - --- the second argument is always analyzed (saves time later on) and the original --- gets passed as original but also as argument - -local function methodhandler(what,first,...) -- filename can be nil or false - local method = registered[what] - if method then - local how, namespace = method.how, method.namespace - if how == "uri" or how == "url" then - local specification = splitmethod(first) - local scheme = specification.scheme - local resolver = namespace and namespace[scheme] - if resolver then - if trace_methods then - report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,scheme,first) - end - return resolver(specification,...) - else - resolver = namespace.default or namespace.file - if resolver then - if trace_methods then - report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"default",first) - end - return resolver(specification,...) - elseif trace_methods then - report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"unset") - end - end - elseif how == "tag" then - local resolver = namespace and namespace[first] - if resolver then - if trace_methods then - report_methods("resolving, method %a, how %a, tag %a",what,how,first) - end - return resolver(...) - else - resolver = namespace.default or namespace.file - if resolver then - if trace_methods then - report_methods("resolving, method %a, how %a, tag %a",what,how,"default") - end - return resolver(...) - elseif trace_methods then - report_methods("resolving, method %a, how %a, tag %a",what,how,"unset") - end - end - end - else - report_methods("resolving, invalid method %a") - end -end - -resolvers.methodhandler = methodhandler - -function resolvers.registermethod(name,namespace,how) - registered[name] = { how = how or "tag", namespace = namespace } - namespace["byscheme"] = function(scheme,filename,...) - if scheme == "file" then - return methodhandler(name,filename,...) - else - return methodhandler(name,addurlscheme(filename,scheme),...) - end - end -end - -local concatinators = allocate { notfound = file.join } -- concatinate paths -local locators = allocate { notfound = function() end } -- locate databases -local hashers = allocate { notfound = function() end } -- load databases -local generators = allocate { notfound = function() end } -- generate databases - -resolvers.concatinators = concatinators -resolvers.locators = locators -resolvers.hashers = hashers -resolvers.generators = generators - -local registermethod = resolvers.registermethod - -registermethod("concatinators",concatinators,"tag") -registermethod("locators", locators, "uri") -registermethod("hashers", hashers, "uri") -registermethod("generators", generators, "uri") +if not modules then modules = { } end modules ['data-met'] = { + version = 1.100, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local find, format = string.find, string.format +local sequenced = table.sequenced +local addurlscheme, urlhashed = url.addscheme, url.hashed +local getcurrentdir = lfs.currentdir + +local trace_locating = false +local trace_methods = false + +trackers.register("resolvers.locating", function(v) trace_methods = v end) +trackers.register("resolvers.methods", function(v) trace_methods = v end) + +--~ trace_methods = true + +local report_methods = logs.reporter("resolvers","methods") + +local allocate = utilities.storage.allocate + +local resolvers = resolvers + +local registered = { } + +local function splitmethod(filename) -- todo: filetype in specification + if not filename then + return { scheme = "unknown", original = filename } + end + if type(filename) == "table" then + return filename -- already split + end + filename = file.collapsepath(filename,".") -- hm, we should keep ./ in some cases + +-- filename = gsub(filename,"^%./",getcurrentdir().."/") -- we will merge dir.expandname and collapse some day + + if not find(filename,"://") then + return { scheme = "file", path = filename, original = filename, filename = filename } + end + local specification = url.hashed(filename) + if not specification.scheme or specification.scheme == "" then + return { scheme = "file", path = filename, original = filename, filename = filename } + else + return specification + end +end + +resolvers.splitmethod = splitmethod -- bad name but ok + +-- the second argument is always analyzed (saves time later on) and the original +-- gets passed as original but also as argument + +local function methodhandler(what,first,...) -- filename can be nil or false + local method = registered[what] + if method then + local how, namespace = method.how, method.namespace + if how == "uri" or how == "url" then + local specification = splitmethod(first) + local scheme = specification.scheme + local resolver = namespace and namespace[scheme] + if resolver then + if trace_methods then + report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,scheme,first) + end + return resolver(specification,...) + else + resolver = namespace.default or namespace.file + if resolver then + if trace_methods then + report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"default",first) + end + return resolver(specification,...) + elseif trace_methods then + report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"unset") + end + end + elseif how == "tag" then + local resolver = namespace and namespace[first] + if resolver then + if trace_methods then + report_methods("resolving, method %a, how %a, tag %a",what,how,first) + end + return resolver(...) + else + resolver = namespace.default or namespace.file + if resolver then + if trace_methods then + report_methods("resolving, method %a, how %a, tag %a",what,how,"default") + end + return resolver(...) + elseif trace_methods then + report_methods("resolving, method %a, how %a, tag %a",what,how,"unset") + end + end + end + else + report_methods("resolving, invalid method %a") + end +end + +resolvers.methodhandler = methodhandler + +function resolvers.registermethod(name,namespace,how) + registered[name] = { how = how or "tag", namespace = namespace } + namespace["byscheme"] = function(scheme,filename,...) + if scheme == "file" then + return methodhandler(name,filename,...) + else + return methodhandler(name,addurlscheme(filename,scheme),...) + end + end +end + +local concatinators = allocate { notfound = file.join } -- concatinate paths +local locators = allocate { notfound = function() end } -- locate databases +local hashers = allocate { notfound = function() end } -- load databases +local generators = allocate { notfound = function() end } -- generate databases + +resolvers.concatinators = concatinators +resolvers.locators = locators +resolvers.hashers = hashers +resolvers.generators = generators + +local registermethod = resolvers.registermethod + +registermethod("concatinators",concatinators,"tag") +registermethod("locators", locators, "uri") +registermethod("hashers", hashers, "uri") +registermethod("generators", generators, "uri") diff --git a/tex/context/base/data-out.lua b/tex/context/base/data-out.lua index 11304c2ce..c427fa4b3 100644 --- a/tex/context/base/data-out.lua +++ b/tex/context/base/data-out.lua @@ -1,18 +1,18 @@ -if not modules then modules = { } end modules ['data-out'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local allocate = utilities.storage.allocate -local resolvers = resolvers - -local registermethod = resolvers.registermethod - -local savers = allocate { helpers = { } } - -resolvers.savers = savers - -registermethod("savers", savers, "uri") +if not modules then modules = { } end modules ['data-out'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local allocate = utilities.storage.allocate +local resolvers = resolvers + +local registermethod = resolvers.registermethod + +local savers = allocate { helpers = { } } + +resolvers.savers = savers + +registermethod("savers", savers, "uri") diff --git a/tex/context/base/data-pre.lua b/tex/context/base/data-pre.lua index f2f5bddc4..e8499c237 100644 --- a/tex/context/base/data-pre.lua +++ b/tex/context/base/data-pre.lua @@ -1,246 +1,246 @@ -if not modules then modules = { } end modules ['data-pre'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- It could be interesting to hook the resolver in the file --- opener so that unresolved prefixes travel around and we --- get more abstraction. - --- As we use this beforehand we will move this up in the chain --- of loading. - ---~ print(resolvers.resolve("abc env:tmp file:cont-en.tex path:cont-en.tex full:cont-en.tex rel:zapf/one/p-chars.tex")) - -local resolvers = resolvers -local prefixes = utilities.storage.allocate() -resolvers.prefixes = prefixes - -local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findgivenfile, resolvers.expansion -local getenv = resolvers.getenv -- we can probably also use resolvers.expansion -local P, S, R, C, Cs, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.match -local joinpath, basename, dirname = file.join, file.basename, file.dirname -local getmetatable, rawset, type = getmetatable, rawset, type - --- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on) - -prefixes.environment = function(str) - return cleanpath(expansion(str)) -end - -prefixes.relative = function(str,n) -- lfs.isfile - if io.exists(str) then - -- nothing - elseif io.exists("./" .. str) then - str = "./" .. str - else - local p = "../" - for i=1,n or 2 do - if io.exists(p .. str) then - str = p .. str - break - else - p = p .. "../" - end - end - end - return cleanpath(str) -end - -prefixes.auto = function(str) - local fullname = prefixes.relative(str) - if not lfs.isfile(fullname) then - fullname = prefixes.locate(str) - end - return fullname -end - -prefixes.locate = function(str) - local fullname = findgivenfile(str) or "" - return cleanpath((fullname ~= "" and fullname) or str) -end - -prefixes.filename = function(str) - local fullname = findgivenfile(str) or "" - return cleanpath(basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here -end - -prefixes.pathname = function(str) - local fullname = findgivenfile(str) or "" - return cleanpath(dirname((fullname ~= "" and fullname) or str)) -end - -prefixes.selfautoloc = function(str) - return cleanpath(joinpath(getenv('SELFAUTOLOC'),str)) -end - -prefixes.selfautoparent = function(str) - return cleanpath(joinpath(getenv('SELFAUTOPARENT'),str)) -end - -prefixes.selfautodir = function(str) - return cleanpath(joinpath(getenv('SELFAUTODIR'),str)) -end - -prefixes.home = function(str) - return cleanpath(joinpath(getenv('HOME'),str)) -end - -local function toppath() - local inputstack = resolvers.inputstack -- dependency, actually the code should move but it's - if not inputstack then -- more convenient to keep it here - return "." - end - local pathname = dirname(inputstack[#inputstack] or "") - if pathname == "" then - return "." - else - return pathname - end -end - -resolvers.toppath = toppath - -prefixes.toppath = function(str) - return cleanpath(joinpath(toppath(),str)) -end - -prefixes.env = prefixes.environment -prefixes.rel = prefixes.relative -prefixes.loc = prefixes.locate -prefixes.kpse = prefixes.locate -prefixes.full = prefixes.locate -prefixes.file = prefixes.filename -prefixes.path = prefixes.pathname - -function resolvers.allprefixes(separator) - local all = table.sortedkeys(prefixes) - if separator then - for i=1,#all do - all[i] = all[i] .. ":" - end - end - return all -end - -local function _resolve_(method,target) - local action = prefixes[method] - if action then - return action(target) - else - return method .. ":" .. target - end -end - -local resolved, abstract = { }, { } - -function resolvers.resetresolve(str) - resolved, abstract = { }, { } -end - --- todo: use an lpeg (see data-lua for !! / stripper) - --- local function resolve(str) -- use schemes, this one is then for the commandline only --- if type(str) == "table" then --- local t = { } --- for i=1,#str do --- t[i] = resolve(str[i]) --- end --- return t --- else --- local res = resolved[str] --- if not res then --- res = gsub(str,"([a-z][a-z]+):([^ \"\';,]*)",_resolve_) -- home:xx;selfautoparent:xx; etc (comma added) --- resolved[str] = res --- abstract[res] = str --- end --- return res --- end --- end - --- home:xx;selfautoparent:xx; - -local pattern = Cs((C(R("az")^2) * P(":") * C((1-S(" \"\';,"))^1) / _resolve_ + P(1))^0) - -local prefix = C(R("az")^2) * P(":") -local target = C((1-S(" \"\';,"))^1) -local notarget = (#S(";,") + P(-1)) * Cc("") - -local pattern = Cs(((prefix * (target + notarget)) / _resolve_ + P(1))^0) - -local function resolve(str) -- use schemes, this one is then for the commandline only - if type(str) == "table" then - local t = { } - for i=1,#str do - t[i] = resolve(str[i]) - end - return t - else - local res = resolved[str] - if not res then - res = lpegmatch(pattern,str) - resolved[str] = res - abstract[res] = str - end - return res - end -end - -local function unresolve(str) - return abstract[str] or str -end - -resolvers.resolve = resolve -resolvers.unresolve = unresolve - -if type(os.uname) == "function" then - - for k, v in next, os.uname() do - if not prefixes[k] then - prefixes[k] = function() return v end - end - end - -end - -if os.type == "unix" then - - -- We need to distringuish between a prefix and something else : so we - -- have a special repath variant for linux. Also, when a new prefix is - -- defined, we need to remake the matcher. - - local pattern - - local function makepattern(t,k,v) - if t then - rawset(t,k,v) - end - local colon = P(":") - for k, v in table.sortedpairs(prefixes) do - if p then - p = P(k) + p - else - p = P(k) - end - end - pattern = Cs((p * colon + colon/";" + P(1))^0) - end - - makepattern() - - getmetatable(prefixes).__newindex = makepattern - - function resolvers.repath(str) - return lpegmatch(pattern,str) - end - -else -- already the default: - - function resolvers.repath(str) - return str - end - -end +if not modules then modules = { } end modules ['data-pre'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- It could be interesting to hook the resolver in the file +-- opener so that unresolved prefixes travel around and we +-- get more abstraction. + +-- As we use this beforehand we will move this up in the chain +-- of loading. + +--~ print(resolvers.resolve("abc env:tmp file:cont-en.tex path:cont-en.tex full:cont-en.tex rel:zapf/one/p-chars.tex")) + +local resolvers = resolvers +local prefixes = utilities.storage.allocate() +resolvers.prefixes = prefixes + +local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findgivenfile, resolvers.expansion +local getenv = resolvers.getenv -- we can probably also use resolvers.expansion +local P, S, R, C, Cs, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.match +local joinpath, basename, dirname = file.join, file.basename, file.dirname +local getmetatable, rawset, type = getmetatable, rawset, type + +-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on) + +prefixes.environment = function(str) + return cleanpath(expansion(str)) +end + +prefixes.relative = function(str,n) -- lfs.isfile + if io.exists(str) then + -- nothing + elseif io.exists("./" .. str) then + str = "./" .. str + else + local p = "../" + for i=1,n or 2 do + if io.exists(p .. str) then + str = p .. str + break + else + p = p .. "../" + end + end + end + return cleanpath(str) +end + +prefixes.auto = function(str) + local fullname = prefixes.relative(str) + if not lfs.isfile(fullname) then + fullname = prefixes.locate(str) + end + return fullname +end + +prefixes.locate = function(str) + local fullname = findgivenfile(str) or "" + return cleanpath((fullname ~= "" and fullname) or str) +end + +prefixes.filename = function(str) + local fullname = findgivenfile(str) or "" + return cleanpath(basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here +end + +prefixes.pathname = function(str) + local fullname = findgivenfile(str) or "" + return cleanpath(dirname((fullname ~= "" and fullname) or str)) +end + +prefixes.selfautoloc = function(str) + return cleanpath(joinpath(getenv('SELFAUTOLOC'),str)) +end + +prefixes.selfautoparent = function(str) + return cleanpath(joinpath(getenv('SELFAUTOPARENT'),str)) +end + +prefixes.selfautodir = function(str) + return cleanpath(joinpath(getenv('SELFAUTODIR'),str)) +end + +prefixes.home = function(str) + return cleanpath(joinpath(getenv('HOME'),str)) +end + +local function toppath() + local inputstack = resolvers.inputstack -- dependency, actually the code should move but it's + if not inputstack then -- more convenient to keep it here + return "." + end + local pathname = dirname(inputstack[#inputstack] or "") + if pathname == "" then + return "." + else + return pathname + end +end + +resolvers.toppath = toppath + +prefixes.toppath = function(str) + return cleanpath(joinpath(toppath(),str)) +end + +prefixes.env = prefixes.environment +prefixes.rel = prefixes.relative +prefixes.loc = prefixes.locate +prefixes.kpse = prefixes.locate +prefixes.full = prefixes.locate +prefixes.file = prefixes.filename +prefixes.path = prefixes.pathname + +function resolvers.allprefixes(separator) + local all = table.sortedkeys(prefixes) + if separator then + for i=1,#all do + all[i] = all[i] .. ":" + end + end + return all +end + +local function _resolve_(method,target) + local action = prefixes[method] + if action then + return action(target) + else + return method .. ":" .. target + end +end + +local resolved, abstract = { }, { } + +function resolvers.resetresolve(str) + resolved, abstract = { }, { } +end + +-- todo: use an lpeg (see data-lua for !! / stripper) + +-- local function resolve(str) -- use schemes, this one is then for the commandline only +-- if type(str) == "table" then +-- local t = { } +-- for i=1,#str do +-- t[i] = resolve(str[i]) +-- end +-- return t +-- else +-- local res = resolved[str] +-- if not res then +-- res = gsub(str,"([a-z][a-z]+):([^ \"\';,]*)",_resolve_) -- home:xx;selfautoparent:xx; etc (comma added) +-- resolved[str] = res +-- abstract[res] = str +-- end +-- return res +-- end +-- end + +-- home:xx;selfautoparent:xx; + +local pattern = Cs((C(R("az")^2) * P(":") * C((1-S(" \"\';,"))^1) / _resolve_ + P(1))^0) + +local prefix = C(R("az")^2) * P(":") +local target = C((1-S(" \"\';,"))^1) +local notarget = (#S(";,") + P(-1)) * Cc("") + +local pattern = Cs(((prefix * (target + notarget)) / _resolve_ + P(1))^0) + +local function resolve(str) -- use schemes, this one is then for the commandline only + if type(str) == "table" then + local t = { } + for i=1,#str do + t[i] = resolve(str[i]) + end + return t + else + local res = resolved[str] + if not res then + res = lpegmatch(pattern,str) + resolved[str] = res + abstract[res] = str + end + return res + end +end + +local function unresolve(str) + return abstract[str] or str +end + +resolvers.resolve = resolve +resolvers.unresolve = unresolve + +if type(os.uname) == "function" then + + for k, v in next, os.uname() do + if not prefixes[k] then + prefixes[k] = function() return v end + end + end + +end + +if os.type == "unix" then + + -- We need to distringuish between a prefix and something else : so we + -- have a special repath variant for linux. Also, when a new prefix is + -- defined, we need to remake the matcher. + + local pattern + + local function makepattern(t,k,v) + if t then + rawset(t,k,v) + end + local colon = P(":") + for k, v in table.sortedpairs(prefixes) do + if p then + p = P(k) + p + else + p = P(k) + end + end + pattern = Cs((p * colon + colon/";" + P(1))^0) + end + + makepattern() + + getmetatable(prefixes).__newindex = makepattern + + function resolvers.repath(str) + return lpegmatch(pattern,str) + end + +else -- already the default: + + function resolvers.repath(str) + return str + end + +end diff --git a/tex/context/base/data-sch.lua b/tex/context/base/data-sch.lua index 41b941c5a..16bade8db 100644 --- a/tex/context/base/data-sch.lua +++ b/tex/context/base/data-sch.lua @@ -1,200 +1,200 @@ -if not modules then modules = { } end modules ['data-sch'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local load = load -local gsub, concat, format = string.gsub, table.concat, string.format -local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders - -local trace_schemes = false trackers.register("resolvers.schemes",function(v) trace_schemes = v end) -local report_schemes = logs.reporter("resolvers","schemes") - -local http = require("socket.http") -local ltn12 = require("ltn12") - -local resolvers = resolvers -local schemes = resolvers.schemes or { } -resolvers.schemes = schemes - -local cleaners = { } -schemes.cleaners = cleaners - -local threshold = 24 * 60 * 60 - -directives.register("schemes.threshold", function(v) threshold = tonumber(v) or threshold end) - -function cleaners.none(specification) - return specification.original -end - -function cleaners.strip(specification) - return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods -end - -function cleaners.md5(specification) - return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path)) -end - -local cleaner = cleaners.strip - -directives.register("schemes.cleanmethod", function(v) cleaner = cleaners[v] or cleaners.strip end) - -function resolvers.schemes.cleanname(specification) - local hash = cleaner(specification) - if trace_schemes then - report_schemes("hashing %a to %a",specification.original,hash) - end - return hash -end - -local cached, loaded, reused, thresholds, handlers = { }, { }, { }, { }, { } - -local function runcurl(name,cachename) -- we use sockets instead or the curl library when possible - local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name - os.spawn(command) -end - -local function fetch(specification) - local original = specification.original - local scheme = specification.scheme - local cleanname = schemes.cleanname(specification) - local cachename = caches.setfirstwritablefile(cleanname,"schemes") - if not cached[original] then - statistics.starttiming(schemes) - if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > (thresholds[protocol] or threshold)) then - cached[original] = cachename - local handler = handlers[scheme] - if handler then - if trace_schemes then - report_schemes("fetching %a, protocol %a, method %a",original,scheme,"built-in") - end - logs.flush() - handler(specification,cachename) - else - if trace_schemes then - report_schemes("fetching %a, protocol %a, method %a",original,scheme,"curl") - end - logs.flush() - runcurl(original,cachename) - end - end - if io.exists(cachename) then - cached[original] = cachename - if trace_schemes then - report_schemes("using cached %a, protocol %a, cachename %a",original,scheme,cachename) - end - else - cached[original] = "" - if trace_schemes then - report_schemes("using missing %a, protocol %a",original,scheme) - end - end - loaded[scheme] = loaded[scheme] + 1 - statistics.stoptiming(schemes) - else - if trace_schemes then - report_schemes("reusing %a, protocol %a",original,scheme) - end - reused[scheme] = reused[scheme] + 1 - end - return cached[original] -end - -local function finder(specification,filetype) - return resolvers.methodhandler("finders",fetch(specification),filetype) -end - -local opener = openers.file -local loader = loaders.file - -local function install(scheme,handler,newthreshold) - handlers [scheme] = handler - loaded [scheme] = 0 - reused [scheme] = 0 - finders [scheme] = finder - openers [scheme] = opener - loaders [scheme] = loader - thresholds[scheme] = newthreshold or threshold -end - -schemes.install = install - -local function http_handler(specification,cachename) - local tempname = cachename .. ".tmp" - local f = io.open(tempname,"wb") - local status, message = http.request { - url = specification.original, - sink = ltn12.sink.file(f) - } - if not status then - os.remove(tempname) - else - os.remove(cachename) - os.rename(tempname,cachename) - end - return cachename -end - -install('http',http_handler) -install('https') -- see pod -install('ftp') - -statistics.register("scheme handling time", function() - local l, r, nl, nr = { }, { }, 0, 0 - for k, v in table.sortedhash(loaded) do - if v > 0 then - nl = nl + 1 - l[nl] = k .. ":" .. v - end - end - for k, v in table.sortedhash(reused) do - if v > 0 then - nr = nr + 1 - r[nr] = k .. ":" .. v - end - end - local n = nl + nr - if n > 0 then - l = nl > 0 and concat(l) or "none" - r = nr > 0 and concat(r) or "none" - return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s", - statistics.elapsedtime(schemes), n, threshold, l, r) - else - return nil - end -end) - --- We provide a few more helpers: - ------ http = require("socket.http") -local httprequest = http.request -local toquery = url.toquery - --- local function httprequest(url) --- return os.resultof(format("curl --silent %q", url)) --- end - -local function fetchstring(url,data) - local q = data and toquery(data) - if q then - url = url .. "?" .. q - end - local reply = httprequest(url) - return reply -- just one argument -end - -schemes.fetchstring = fetchstring - -function schemes.fetchtable(url,data) - local reply = fetchstring(url,data) - if reply then - local s = load("return " .. reply) - if s then - return s() - end - end -end +if not modules then modules = { } end modules ['data-sch'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local load = load +local gsub, concat, format = string.gsub, table.concat, string.format +local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders + +local trace_schemes = false trackers.register("resolvers.schemes",function(v) trace_schemes = v end) +local report_schemes = logs.reporter("resolvers","schemes") + +local http = require("socket.http") +local ltn12 = require("ltn12") + +local resolvers = resolvers +local schemes = resolvers.schemes or { } +resolvers.schemes = schemes + +local cleaners = { } +schemes.cleaners = cleaners + +local threshold = 24 * 60 * 60 + +directives.register("schemes.threshold", function(v) threshold = tonumber(v) or threshold end) + +function cleaners.none(specification) + return specification.original +end + +function cleaners.strip(specification) + return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods +end + +function cleaners.md5(specification) + return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path)) +end + +local cleaner = cleaners.strip + +directives.register("schemes.cleanmethod", function(v) cleaner = cleaners[v] or cleaners.strip end) + +function resolvers.schemes.cleanname(specification) + local hash = cleaner(specification) + if trace_schemes then + report_schemes("hashing %a to %a",specification.original,hash) + end + return hash +end + +local cached, loaded, reused, thresholds, handlers = { }, { }, { }, { }, { } + +local function runcurl(name,cachename) -- we use sockets instead or the curl library when possible + local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name + os.spawn(command) +end + +local function fetch(specification) + local original = specification.original + local scheme = specification.scheme + local cleanname = schemes.cleanname(specification) + local cachename = caches.setfirstwritablefile(cleanname,"schemes") + if not cached[original] then + statistics.starttiming(schemes) + if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > (thresholds[protocol] or threshold)) then + cached[original] = cachename + local handler = handlers[scheme] + if handler then + if trace_schemes then + report_schemes("fetching %a, protocol %a, method %a",original,scheme,"built-in") + end + logs.flush() + handler(specification,cachename) + else + if trace_schemes then + report_schemes("fetching %a, protocol %a, method %a",original,scheme,"curl") + end + logs.flush() + runcurl(original,cachename) + end + end + if io.exists(cachename) then + cached[original] = cachename + if trace_schemes then + report_schemes("using cached %a, protocol %a, cachename %a",original,scheme,cachename) + end + else + cached[original] = "" + if trace_schemes then + report_schemes("using missing %a, protocol %a",original,scheme) + end + end + loaded[scheme] = loaded[scheme] + 1 + statistics.stoptiming(schemes) + else + if trace_schemes then + report_schemes("reusing %a, protocol %a",original,scheme) + end + reused[scheme] = reused[scheme] + 1 + end + return cached[original] +end + +local function finder(specification,filetype) + return resolvers.methodhandler("finders",fetch(specification),filetype) +end + +local opener = openers.file +local loader = loaders.file + +local function install(scheme,handler,newthreshold) + handlers [scheme] = handler + loaded [scheme] = 0 + reused [scheme] = 0 + finders [scheme] = finder + openers [scheme] = opener + loaders [scheme] = loader + thresholds[scheme] = newthreshold or threshold +end + +schemes.install = install + +local function http_handler(specification,cachename) + local tempname = cachename .. ".tmp" + local f = io.open(tempname,"wb") + local status, message = http.request { + url = specification.original, + sink = ltn12.sink.file(f) + } + if not status then + os.remove(tempname) + else + os.remove(cachename) + os.rename(tempname,cachename) + end + return cachename +end + +install('http',http_handler) +install('https') -- see pod +install('ftp') + +statistics.register("scheme handling time", function() + local l, r, nl, nr = { }, { }, 0, 0 + for k, v in table.sortedhash(loaded) do + if v > 0 then + nl = nl + 1 + l[nl] = k .. ":" .. v + end + end + for k, v in table.sortedhash(reused) do + if v > 0 then + nr = nr + 1 + r[nr] = k .. ":" .. v + end + end + local n = nl + nr + if n > 0 then + l = nl > 0 and concat(l) or "none" + r = nr > 0 and concat(r) or "none" + return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s", + statistics.elapsedtime(schemes), n, threshold, l, r) + else + return nil + end +end) + +-- We provide a few more helpers: + +----- http = require("socket.http") +local httprequest = http.request +local toquery = url.toquery + +-- local function httprequest(url) +-- return os.resultof(format("curl --silent %q", url)) +-- end + +local function fetchstring(url,data) + local q = data and toquery(data) + if q then + url = url .. "?" .. q + end + local reply = httprequest(url) + return reply -- just one argument +end + +schemes.fetchstring = fetchstring + +function schemes.fetchtable(url,data) + local reply = fetchstring(url,data) + if reply then + local s = load("return " .. reply) + if s then + return s() + end + end +end diff --git a/tex/context/base/data-tex.lua b/tex/context/base/data-tex.lua index f5c986d77..18e318f43 100644 --- a/tex/context/base/data-tex.lua +++ b/tex/context/base/data-tex.lua @@ -1,183 +1,183 @@ -if not modules then modules = { } end modules ['data-tex'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local char = string.char -local insert, remove = table.insert, table.remove - -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) - -local report_tex = logs.reporter("resolvers","tex") - -local resolvers = resolvers - -local sequencers = utilities.sequencers -local methodhandler = resolvers.methodhandler -local splitlines = string.splitlines -local utffiletype = utf.filetype - --- local fileprocessor = nil --- local lineprocessor = nil - -local textfileactions = sequencers.new { - arguments = "str,filename,coding", - returnvalues = "str", - results = "str", -} - -local textlineactions = sequencers.new { - arguments = "str,filename,linenumber,noflines,coding", - returnvalues = "str", - results = "str", -} - -local helpers = resolvers.openers.helpers -local appendgroup = sequencers.appendgroup - -helpers.textfileactions = textfileactions -helpers.textlineactions = textlineactions - -appendgroup(textfileactions,"before") -- user -appendgroup(textfileactions,"system") -- private -appendgroup(textfileactions,"after" ) -- user - -appendgroup(textlineactions,"before") -- user -appendgroup(textlineactions,"system") -- private -appendgroup(textlineactions,"after" ) -- user - -local ctrl_d = char( 4) -- unix -local ctrl_z = char(26) -- windows - -resolvers.inputstack = resolvers.inputstack or { } - -local inputstack = resolvers.inputstack - -function helpers.textopener(tag,filename,filehandle,coding) - local lines - local t_filehandle = type(filehandle) - if not filehandle then - lines = io.loaddata(filename) - elseif t_filehandle == "string" then - lines = filehandle - elseif t_filehandle == "table" then - lines = filehandle - else - lines = filehandle:read("*a") -- io.readall(filehandle) ... but never that large files anyway - -- lines = io.readall(filehandle) - filehandle:close() - end - if type(lines) == "string" then - local coding = coding or utffiletype(lines) -- so we can signal no regime - if trace_locating then - report_tex("%a opener: %a opened using method %a",tag,filename,coding) - end - if coding == "utf-16-be" then - lines = utf.utf16_to_utf8_be(lines) - elseif coding == "utf-16-le" then - lines = utf.utf16_to_utf8_le(lines) - elseif coding == "utf-32-be" then - lines = utf.utf32_to_utf8_be(lines) - elseif coding == "utf-32-le" then - lines = utf.utf32_to_utf8_le(lines) - else -- utf8 or unknown (could be a mkvi file) - local runner = textfileactions.runner - if runner then - lines = runner(lines,filename,coding) or lines - end - lines = splitlines(lines) - end - elseif trace_locating then - report_tex("%a opener: %a opened",tag,filename) - end - local noflines = #lines - if lines[noflines] == "" then -- maybe some special check is needed - lines[noflines] = nil - end - logs.show_open(filename) - insert(inputstack,filename) - return { - filename = filename, - noflines = noflines, - currentline = 0, - close = function() - if trace_locating then - report_tex("%a closer: %a closed",tag,filename) - end - logs.show_close(filename) - remove(inputstack) - t = nil - end, - reader = function(self) - self = self or t - local currentline, noflines = self.currentline, self.noflines - if currentline >= noflines then - return nil - else - currentline = currentline + 1 - self.currentline = currentline - local content = lines[currentline] - if not content then - return nil - elseif content == "" then - return "" - -- elseif content == ctrl_d or ctrl_z then - -- return nil -- we need this as \endinput does not work in prints - else - local runner = textlineactions.runner - if runner then - return runner(content,filename,currentline,noflines,coding) or content - else - return content - end - end - end - end - } -end - -function resolvers.findtexfile(filename,filetype) - return methodhandler('finders',filename,filetype) -end - -function resolvers.opentexfile(filename) - return methodhandler('openers',filename) -end - -function resolvers.openfile(filename) - local fullname = methodhandler('finders',filename) - return fullname and fullname ~= "" and methodhandler('openers',fullname) or nil -end - -function resolvers.loadtexfile(filename,filetype) - -- todo: optionally apply filters - local ok, data, size = resolvers.loadbinfile(filename, filetype) - return data or "" -end - -resolvers.texdatablob = resolvers.loadtexfile - -local function installhandler(namespace,what,where,func) - if not func then - where, func = "after", where - end - if where == "before" or where == "after" then - sequencers.appendaction(namespace,where,func) - else - report_tex("installing input %a handlers in %a is not possible",what,tostring(where)) - end -end - -function resolvers.installinputlinehandler(...) installhandler(helpers.textlineactions,"line",...) end -function resolvers.installinputfilehandler(...) installhandler(helpers.textfileactions,"file",...) end - --- local basename = file.basename --- resolvers.installinputlinehandler(function(str,filename,linenumber,noflines) --- report_tex("[lc] file %a, line %a of %a, length %a",basename(filename),linenumber,noflines,#str) --- end) --- resolvers.installinputfilehandler(function(str,filename) --- report_tex("[fc] file %a, length %a",basename(filename),#str) --- end) +if not modules then modules = { } end modules ['data-tex'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local char = string.char +local insert, remove = table.insert, table.remove + +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) + +local report_tex = logs.reporter("resolvers","tex") + +local resolvers = resolvers + +local sequencers = utilities.sequencers +local methodhandler = resolvers.methodhandler +local splitlines = string.splitlines +local utffiletype = utf.filetype + +-- local fileprocessor = nil +-- local lineprocessor = nil + +local textfileactions = sequencers.new { + arguments = "str,filename,coding", + returnvalues = "str", + results = "str", +} + +local textlineactions = sequencers.new { + arguments = "str,filename,linenumber,noflines,coding", + returnvalues = "str", + results = "str", +} + +local helpers = resolvers.openers.helpers +local appendgroup = sequencers.appendgroup + +helpers.textfileactions = textfileactions +helpers.textlineactions = textlineactions + +appendgroup(textfileactions,"before") -- user +appendgroup(textfileactions,"system") -- private +appendgroup(textfileactions,"after" ) -- user + +appendgroup(textlineactions,"before") -- user +appendgroup(textlineactions,"system") -- private +appendgroup(textlineactions,"after" ) -- user + +local ctrl_d = char( 4) -- unix +local ctrl_z = char(26) -- windows + +resolvers.inputstack = resolvers.inputstack or { } + +local inputstack = resolvers.inputstack + +function helpers.textopener(tag,filename,filehandle,coding) + local lines + local t_filehandle = type(filehandle) + if not filehandle then + lines = io.loaddata(filename) + elseif t_filehandle == "string" then + lines = filehandle + elseif t_filehandle == "table" then + lines = filehandle + else + lines = filehandle:read("*a") -- io.readall(filehandle) ... but never that large files anyway + -- lines = io.readall(filehandle) + filehandle:close() + end + if type(lines) == "string" then + local coding = coding or utffiletype(lines) -- so we can signal no regime + if trace_locating then + report_tex("%a opener: %a opened using method %a",tag,filename,coding) + end + if coding == "utf-16-be" then + lines = utf.utf16_to_utf8_be(lines) + elseif coding == "utf-16-le" then + lines = utf.utf16_to_utf8_le(lines) + elseif coding == "utf-32-be" then + lines = utf.utf32_to_utf8_be(lines) + elseif coding == "utf-32-le" then + lines = utf.utf32_to_utf8_le(lines) + else -- utf8 or unknown (could be a mkvi file) + local runner = textfileactions.runner + if runner then + lines = runner(lines,filename,coding) or lines + end + lines = splitlines(lines) + end + elseif trace_locating then + report_tex("%a opener: %a opened",tag,filename) + end + local noflines = #lines + if lines[noflines] == "" then -- maybe some special check is needed + lines[noflines] = nil + end + logs.show_open(filename) + insert(inputstack,filename) + return { + filename = filename, + noflines = noflines, + currentline = 0, + close = function() + if trace_locating then + report_tex("%a closer: %a closed",tag,filename) + end + logs.show_close(filename) + remove(inputstack) + t = nil + end, + reader = function(self) + self = self or t + local currentline, noflines = self.currentline, self.noflines + if currentline >= noflines then + return nil + else + currentline = currentline + 1 + self.currentline = currentline + local content = lines[currentline] + if not content then + return nil + elseif content == "" then + return "" + -- elseif content == ctrl_d or ctrl_z then + -- return nil -- we need this as \endinput does not work in prints + else + local runner = textlineactions.runner + if runner then + return runner(content,filename,currentline,noflines,coding) or content + else + return content + end + end + end + end + } +end + +function resolvers.findtexfile(filename,filetype) + return methodhandler('finders',filename,filetype) +end + +function resolvers.opentexfile(filename) + return methodhandler('openers',filename) +end + +function resolvers.openfile(filename) + local fullname = methodhandler('finders',filename) + return fullname and fullname ~= "" and methodhandler('openers',fullname) or nil +end + +function resolvers.loadtexfile(filename,filetype) + -- todo: optionally apply filters + local ok, data, size = resolvers.loadbinfile(filename, filetype) + return data or "" +end + +resolvers.texdatablob = resolvers.loadtexfile + +local function installhandler(namespace,what,where,func) + if not func then + where, func = "after", where + end + if where == "before" or where == "after" then + sequencers.appendaction(namespace,where,func) + else + report_tex("installing input %a handlers in %a is not possible",what,tostring(where)) + end +end + +function resolvers.installinputlinehandler(...) installhandler(helpers.textlineactions,"line",...) end +function resolvers.installinputfilehandler(...) installhandler(helpers.textfileactions,"file",...) end + +-- local basename = file.basename +-- resolvers.installinputlinehandler(function(str,filename,linenumber,noflines) +-- report_tex("[lc] file %a, line %a of %a, length %a",basename(filename),linenumber,noflines,#str) +-- end) +-- resolvers.installinputfilehandler(function(str,filename) +-- report_tex("[fc] file %a, length %a",basename(filename),#str) +-- end) diff --git a/tex/context/base/data-tmf.lua b/tex/context/base/data-tmf.lua index c52225193..8300c3560 100644 --- a/tex/context/base/data-tmf.lua +++ b/tex/context/base/data-tmf.lua @@ -1,73 +1,73 @@ -if not modules then modules = { } end modules ['data-tmf'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local resolvers = resolvers - -local report_tds = logs.reporter("resolvers","tds") - --- = << --- ? ?? --- < += --- > =+ - -function resolvers.load_tree(tree,resolve) - if type(tree) == "string" and tree ~= "" then - - local getenv, setenv = resolvers.getenv, resolvers.setenv - - -- later might listen to the raw osenv var as well - local texos = "texmf-" .. os.platform - - local oldroot = environment.texroot - local newroot = file.collapsepath(tree) - - local newtree = file.join(newroot,texos) - local newpath = file.join(newtree,"bin") - - if not lfs.isdir(newtree) then - report_tds("no %a under tree %a",texos,tree) - os.exit() - end - if not lfs.isdir(newpath) then - report_tds("no '%s/bin' under tree %a",texos,tree) - os.exit() - end - - local texmfos = newtree - - environment.texroot = newroot - environment.texos = texos - environment.texmfos = texmfos - - -- Beware, we need to obey the relocatable autoparent so we - -- set TEXMFCNF to its raw value. This is somewhat tricky when - -- we run a mkii job from within. Therefore, in mtxrun, there - -- is a resolve applied when we're in mkii/kpse mode or when - -- --resolve is passed to mtxrun. Maybe we should also set the - -- local AUTOPARENT etc. although these are alwasy set new. - - if resolve then - -- resolvers.luacnfspec = resolvers.joinpath(resolvers.resolve(resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec)))) - resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec) - end - - setenv('SELFAUTOPARENT', newroot) - setenv('SELFAUTODIR', newtree) - setenv('SELFAUTOLOC', newpath) - setenv('TEXROOT', newroot) - setenv('TEXOS', texos) - setenv('TEXMFOS', texmfos) - setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved - setenv('PATH', newpath .. io.pathseparator .. getenv('PATH')) - - report_tds("changing from root %a to %a",oldroot,newroot) - report_tds("prepending %a to PATH",newpath) - report_tds("setting TEXMFCNF to %a",resolvers.luacnfspec) - report_tds() - end -end +if not modules then modules = { } end modules ['data-tmf'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local resolvers = resolvers + +local report_tds = logs.reporter("resolvers","tds") + +-- = << +-- ? ?? +-- < += +-- > =+ + +function resolvers.load_tree(tree,resolve) + if type(tree) == "string" and tree ~= "" then + + local getenv, setenv = resolvers.getenv, resolvers.setenv + + -- later might listen to the raw osenv var as well + local texos = "texmf-" .. os.platform + + local oldroot = environment.texroot + local newroot = file.collapsepath(tree) + + local newtree = file.join(newroot,texos) + local newpath = file.join(newtree,"bin") + + if not lfs.isdir(newtree) then + report_tds("no %a under tree %a",texos,tree) + os.exit() + end + if not lfs.isdir(newpath) then + report_tds("no '%s/bin' under tree %a",texos,tree) + os.exit() + end + + local texmfos = newtree + + environment.texroot = newroot + environment.texos = texos + environment.texmfos = texmfos + + -- Beware, we need to obey the relocatable autoparent so we + -- set TEXMFCNF to its raw value. This is somewhat tricky when + -- we run a mkii job from within. Therefore, in mtxrun, there + -- is a resolve applied when we're in mkii/kpse mode or when + -- --resolve is passed to mtxrun. Maybe we should also set the + -- local AUTOPARENT etc. although these are alwasy set new. + + if resolve then + -- resolvers.luacnfspec = resolvers.joinpath(resolvers.resolve(resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec)))) + resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec) + end + + setenv('SELFAUTOPARENT', newroot) + setenv('SELFAUTODIR', newtree) + setenv('SELFAUTOLOC', newpath) + setenv('TEXROOT', newroot) + setenv('TEXOS', texos) + setenv('TEXMFOS', texmfos) + setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved + setenv('PATH', newpath .. io.pathseparator .. getenv('PATH')) + + report_tds("changing from root %a to %a",oldroot,newroot) + report_tds("prepending %a to PATH",newpath) + report_tds("setting TEXMFCNF to %a",resolvers.luacnfspec) + report_tds() + end +end diff --git a/tex/context/base/data-tmp.lua b/tex/context/base/data-tmp.lua index 5025a8a0a..2f12ecfb9 100644 --- a/tex/context/base/data-tmp.lua +++ b/tex/context/base/data-tmp.lua @@ -1,420 +1,420 @@ -if not modules then modules = { } end modules ['data-tmp'] = { - version = 1.100, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

This module deals with caching data. It sets up the paths and -implements loaders and savers for tables. Best is to set the -following variable. When not set, the usual paths will be -checked. Personally I prefer the (users) temporary path.

- - -TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;. - - -

Currently we do no locking when we write files. This is no real -problem because most caching involves fonts and the chance of them -being written at the same time is small. We also need to extend -luatools with a recache feature.

---ldx]]-- - -local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat -local concat, serialize, serializetofile = table.concat, table.serialize, table.tofile -local mkdirs, isdir, isfile = dir.mkdirs, lfs.isdir, lfs.isfile -local addsuffix, is_writable, is_readable = file.addsuffix, file.is_writable, file.is_readable -local formatters = string.formatters - -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) - -local report_caches = logs.reporter("resolvers","caches") -local report_resolvers = logs.reporter("resolvers","caching") - -local resolvers = resolvers - --- intermezzo - -local directive_cleanup = false directives.register("system.compile.cleanup", function(v) directive_cleanup = v end) -local directive_strip = false directives.register("system.compile.strip", function(v) directive_strip = v end) - -local compile = utilities.lua.compile - -function utilities.lua.compile(luafile,lucfile,cleanup,strip) - if cleanup == nil then cleanup = directive_cleanup end - if strip == nil then strip = directive_strip end - return compile(luafile,lucfile,cleanup,strip) -end - --- end of intermezzo - -caches = caches or { } -local caches = caches - -local luasuffixes = utilities.lua.suffixes - -caches.base = caches.base or "luatex-cache" -caches.more = caches.more or "context" -caches.direct = false -- true is faster but may need huge amounts of memory -caches.tree = false -caches.force = true -caches.ask = false -caches.relocate = false -caches.defaults = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" } - -local writable, readables, usedreadables = nil, { }, { } - --- we could use a metatable for writable and readable but not yet - -local function identify() - -- Combining the loops makes it messy. First we check the format cache path - -- and when the last component is not present we try to create it. - local texmfcaches = resolvers.cleanpathlist("TEXMFCACHE") - if texmfcaches then - for k=1,#texmfcaches do - local cachepath = texmfcaches[k] - if cachepath ~= "" then - cachepath = resolvers.resolve(cachepath) - cachepath = resolvers.cleanpath(cachepath) - cachepath = file.collapsepath(cachepath) - local valid = isdir(cachepath) - if valid then - if is_readable(cachepath) then - readables[#readables+1] = cachepath - if not writable and is_writable(cachepath) then - writable = cachepath - end - end - elseif not writable and caches.force then - local cacheparent = file.dirname(cachepath) - if is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths) - if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then - mkdirs(cachepath) - if isdir(cachepath) and is_writable(cachepath) then - report_caches("path %a created",cachepath) - writable = cachepath - readables[#readables+1] = cachepath - end - end - end - end - end - end - end - -- As a last resort we check some temporary paths but this time we don't - -- create them. - local texmfcaches = caches.defaults - if texmfcaches then - for k=1,#texmfcaches do - local cachepath = texmfcaches[k] - cachepath = resolvers.expansion(cachepath) -- was getenv - if cachepath ~= "" then - cachepath = resolvers.resolve(cachepath) - cachepath = resolvers.cleanpath(cachepath) - local valid = isdir(cachepath) - if valid and is_readable(cachepath) then - if not writable and is_writable(cachepath) then - readables[#readables+1] = cachepath - writable = cachepath - break - end - end - end - end - end - -- Some extra checking. If we have no writable or readable path then we simply - -- quit. - if not writable then - report_caches("fatal error: there is no valid writable cache path defined") - os.exit() - elseif #readables == 0 then - report_caches("fatal error: there is no valid readable cache path defined") - os.exit() - end - -- why here - writable = dir.expandname(resolvers.cleanpath(writable)) -- just in case - -- moved here - local base, more, tree = caches.base, caches.more, caches.tree or caches.treehash() -- we have only one writable tree - if tree then - caches.tree = tree - writable = mkdirs(writable,base,more,tree) - for i=1,#readables do - readables[i] = file.join(readables[i],base,more,tree) - end - else - writable = mkdirs(writable,base,more) - for i=1,#readables do - readables[i] = file.join(readables[i],base,more) - end - end - -- end - if trace_cache then - for i=1,#readables do - report_caches("using readable path %a (order %s)",readables[i],i) - end - report_caches("using writable path %a",writable) - end - identify = function() - return writable, readables - end - return writable, readables -end - -function caches.usedpaths(separator) - local writable, readables = identify() - if #readables > 1 then - local result = { } - local done = { } - for i=1,#readables do - local readable = readables[i] - if readable == writable then - done[readable] = true - result[#result+1] = formatters["readable+writable: %a"](readable) - elseif usedreadables[i] then - done[readable] = true - result[#result+1] = formatters["readable: %a"](readable) - end - end - if not done[writable] then - result[#result+1] = formatters["writable: %a"](writable) - end - return concat(result,separator or " | ") - else - return writable or "?" - end -end - -function caches.configfiles() - return concat(resolvers.instance.specification,";") -end - -function caches.hashed(tree) - tree = gsub(tree,"[\\/]+$","") - tree = lower(tree) - local hash = md5.hex(tree) - if trace_cache or trace_locating then - report_caches("hashing tree %a, hash %a",tree,hash) - end - return hash -end - -function caches.treehash() - local tree = caches.configfiles() - if not tree or tree == "" then - return false - else - return caches.hashed(tree) - end -end - -local r_cache, w_cache = { }, { } -- normally w in in r but who cares - -local function getreadablepaths(...) - local tags = { ... } - local hash = concat(tags,"/") - local done = r_cache[hash] - if not done then - local writable, readables = identify() -- exit if not found - if #tags > 0 then - done = { } - for i=1,#readables do - done[i] = file.join(readables[i],...) - end - else - done = readables - end - r_cache[hash] = done - end - return done -end - -local function getwritablepath(...) - local tags = { ... } - local hash = concat(tags,"/") - local done = w_cache[hash] - if not done then - local writable, readables = identify() -- exit if not found - if #tags > 0 then - done = mkdirs(writable,...) - else - done = writable - end - w_cache[hash] = done - end - return done -end - -caches.getreadablepaths = getreadablepaths -caches.getwritablepath = getwritablepath - -function caches.getfirstreadablefile(filename,...) - local rd = getreadablepaths(...) - for i=1,#rd do - local path = rd[i] - local fullname = file.join(path,filename) - if is_readable(fullname) then - usedreadables[i] = true - return fullname, path - end - end - return caches.setfirstwritablefile(filename,...) -end - -function caches.setfirstwritablefile(filename,...) - local wr = getwritablepath(...) - local fullname = file.join(wr,filename) - return fullname, wr -end - -function caches.define(category,subcategory) -- for old times sake - return function() - return getwritablepath(category,subcategory) - end -end - -function caches.setluanames(path,name) - return format("%s/%s.%s",path,name,luasuffixes.tma), format("%s/%s.%s",path,name,luasuffixes.tmc) -end - -function caches.loaddata(readables,name) - if type(readables) == "string" then - readables = { readables } - end - for i=1,#readables do - local path = readables[i] - local tmaname, tmcname = caches.setluanames(path,name) - local loader = false - if isfile(tmcname) then - loader = loadfile(tmcname) - end - if not loader and isfile(tmaname) then - -- in case we have a different engine - utilities.lua.compile(tmaname,tmcname) - if isfile(tmcname) then - loader = loadfile(tmcname) - end - if not loader then - loader = loadfile(tmaname) - end - end - if loader then - loader = loader() - collectgarbage("step") - return loader - end - end - return false -end - -function caches.is_writable(filepath,filename) - local tmaname, tmcname = caches.setluanames(filepath,filename) - return is_writable(tmaname) -end - -local saveoptions = { compact = true } - --- add some point we will only use the internal bytecode compiler and --- then we can flag success in the tma so that it can trigger a compile --- if the other engine - -function caches.savedata(filepath,filename,data,raw) - local tmaname, tmcname = caches.setluanames(filepath,filename) - local reduce, simplify = true, true - if raw then - reduce, simplify = false, false - end - data.cache_uuid = os.uuid() - if caches.direct then - file.savedata(tmaname,serialize(data,true,saveoptions)) - else - serializetofile(tmaname,data,true,saveoptions) - end - utilities.lua.compile(tmaname,tmcname) -end - --- moved from data-res: - -local content_state = { } - -function caches.contentstate() - return content_state or { } -end - -function caches.loadcontent(cachename,dataname) - local name = caches.hashed(cachename) - local full, path = caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees") - local filename = file.join(path,name) - local blob = loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua)) - if blob then - local data = blob() - if data and data.content then - if data.type == dataname then - if data.version == resolvers.cacheversion then - content_state[#content_state+1] = data.uuid - if trace_locating then - report_resolvers("loading %a for %a from %a",dataname,cachename,filename) - end - return data.content - else - report_resolvers("skipping %a for %a from %a (version mismatch)",dataname,cachename,filename) - end - else - report_resolvers("skipping %a for %a from %a (datatype mismatch)",dataname,cachename,filename) - end - elseif trace_locating then - report_resolvers("skipping %a for %a from %a (no content)",dataname,cachename,filename) - end - elseif trace_locating then - report_resolvers("skipping %a for %a from %a (invalid file)",dataname,cachename,filename) - end -end - -function caches.collapsecontent(content) - for k, v in next, content do - if type(v) == "table" and #v == 1 then - content[k] = v[1] - end - end -end - -function caches.savecontent(cachename,dataname,content) - local name = caches.hashed(cachename) - local full, path = caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees") - local filename = file.join(path,name) -- is full - local luaname = addsuffix(filename,luasuffixes.lua) - local lucname = addsuffix(filename,luasuffixes.luc) - if trace_locating then - report_resolvers("preparing %a for %a",dataname,cachename) - end - local data = { - type = dataname, - root = cachename, - version = resolvers.cacheversion, - date = os.date("%Y-%m-%d"), - time = os.date("%H:%M:%S"), - content = content, - uuid = os.uuid(), - } - local ok = io.savedata(luaname,serialize(data,true)) - if ok then - if trace_locating then - report_resolvers("category %a, cachename %a saved in %a",dataname,cachename,luaname) - end - if utilities.lua.compile(luaname,lucname) then - if trace_locating then - report_resolvers("%a compiled to %a",dataname,lucname) - end - return true - else - if trace_locating then - report_resolvers("compiling failed for %a, deleting file %a",dataname,lucname) - end - os.remove(lucname) - end - elseif trace_locating then - report_resolvers("unable to save %a in %a (access error)",dataname,luaname) - end -end +if not modules then modules = { } end modules ['data-tmp'] = { + version = 1.100, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

This module deals with caching data. It sets up the paths and +implements loaders and savers for tables. Best is to set the +following variable. When not set, the usual paths will be +checked. Personally I prefer the (users) temporary path.

+ + +TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;. + + +

Currently we do no locking when we write files. This is no real +problem because most caching involves fonts and the chance of them +being written at the same time is small. We also need to extend +luatools with a recache feature.

+--ldx]]-- + +local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat +local concat, serialize, serializetofile = table.concat, table.serialize, table.tofile +local mkdirs, isdir, isfile = dir.mkdirs, lfs.isdir, lfs.isfile +local addsuffix, is_writable, is_readable = file.addsuffix, file.is_writable, file.is_readable +local formatters = string.formatters + +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) +local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) + +local report_caches = logs.reporter("resolvers","caches") +local report_resolvers = logs.reporter("resolvers","caching") + +local resolvers = resolvers + +-- intermezzo + +local directive_cleanup = false directives.register("system.compile.cleanup", function(v) directive_cleanup = v end) +local directive_strip = false directives.register("system.compile.strip", function(v) directive_strip = v end) + +local compile = utilities.lua.compile + +function utilities.lua.compile(luafile,lucfile,cleanup,strip) + if cleanup == nil then cleanup = directive_cleanup end + if strip == nil then strip = directive_strip end + return compile(luafile,lucfile,cleanup,strip) +end + +-- end of intermezzo + +caches = caches or { } +local caches = caches + +local luasuffixes = utilities.lua.suffixes + +caches.base = caches.base or "luatex-cache" +caches.more = caches.more or "context" +caches.direct = false -- true is faster but may need huge amounts of memory +caches.tree = false +caches.force = true +caches.ask = false +caches.relocate = false +caches.defaults = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" } + +local writable, readables, usedreadables = nil, { }, { } + +-- we could use a metatable for writable and readable but not yet + +local function identify() + -- Combining the loops makes it messy. First we check the format cache path + -- and when the last component is not present we try to create it. + local texmfcaches = resolvers.cleanpathlist("TEXMFCACHE") + if texmfcaches then + for k=1,#texmfcaches do + local cachepath = texmfcaches[k] + if cachepath ~= "" then + cachepath = resolvers.resolve(cachepath) + cachepath = resolvers.cleanpath(cachepath) + cachepath = file.collapsepath(cachepath) + local valid = isdir(cachepath) + if valid then + if is_readable(cachepath) then + readables[#readables+1] = cachepath + if not writable and is_writable(cachepath) then + writable = cachepath + end + end + elseif not writable and caches.force then + local cacheparent = file.dirname(cachepath) + if is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths) + if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then + mkdirs(cachepath) + if isdir(cachepath) and is_writable(cachepath) then + report_caches("path %a created",cachepath) + writable = cachepath + readables[#readables+1] = cachepath + end + end + end + end + end + end + end + -- As a last resort we check some temporary paths but this time we don't + -- create them. + local texmfcaches = caches.defaults + if texmfcaches then + for k=1,#texmfcaches do + local cachepath = texmfcaches[k] + cachepath = resolvers.expansion(cachepath) -- was getenv + if cachepath ~= "" then + cachepath = resolvers.resolve(cachepath) + cachepath = resolvers.cleanpath(cachepath) + local valid = isdir(cachepath) + if valid and is_readable(cachepath) then + if not writable and is_writable(cachepath) then + readables[#readables+1] = cachepath + writable = cachepath + break + end + end + end + end + end + -- Some extra checking. If we have no writable or readable path then we simply + -- quit. + if not writable then + report_caches("fatal error: there is no valid writable cache path defined") + os.exit() + elseif #readables == 0 then + report_caches("fatal error: there is no valid readable cache path defined") + os.exit() + end + -- why here + writable = dir.expandname(resolvers.cleanpath(writable)) -- just in case + -- moved here + local base, more, tree = caches.base, caches.more, caches.tree or caches.treehash() -- we have only one writable tree + if tree then + caches.tree = tree + writable = mkdirs(writable,base,more,tree) + for i=1,#readables do + readables[i] = file.join(readables[i],base,more,tree) + end + else + writable = mkdirs(writable,base,more) + for i=1,#readables do + readables[i] = file.join(readables[i],base,more) + end + end + -- end + if trace_cache then + for i=1,#readables do + report_caches("using readable path %a (order %s)",readables[i],i) + end + report_caches("using writable path %a",writable) + end + identify = function() + return writable, readables + end + return writable, readables +end + +function caches.usedpaths(separator) + local writable, readables = identify() + if #readables > 1 then + local result = { } + local done = { } + for i=1,#readables do + local readable = readables[i] + if readable == writable then + done[readable] = true + result[#result+1] = formatters["readable+writable: %a"](readable) + elseif usedreadables[i] then + done[readable] = true + result[#result+1] = formatters["readable: %a"](readable) + end + end + if not done[writable] then + result[#result+1] = formatters["writable: %a"](writable) + end + return concat(result,separator or " | ") + else + return writable or "?" + end +end + +function caches.configfiles() + return concat(resolvers.instance.specification,";") +end + +function caches.hashed(tree) + tree = gsub(tree,"[\\/]+$","") + tree = lower(tree) + local hash = md5.hex(tree) + if trace_cache or trace_locating then + report_caches("hashing tree %a, hash %a",tree,hash) + end + return hash +end + +function caches.treehash() + local tree = caches.configfiles() + if not tree or tree == "" then + return false + else + return caches.hashed(tree) + end +end + +local r_cache, w_cache = { }, { } -- normally w in in r but who cares + +local function getreadablepaths(...) + local tags = { ... } + local hash = concat(tags,"/") + local done = r_cache[hash] + if not done then + local writable, readables = identify() -- exit if not found + if #tags > 0 then + done = { } + for i=1,#readables do + done[i] = file.join(readables[i],...) + end + else + done = readables + end + r_cache[hash] = done + end + return done +end + +local function getwritablepath(...) + local tags = { ... } + local hash = concat(tags,"/") + local done = w_cache[hash] + if not done then + local writable, readables = identify() -- exit if not found + if #tags > 0 then + done = mkdirs(writable,...) + else + done = writable + end + w_cache[hash] = done + end + return done +end + +caches.getreadablepaths = getreadablepaths +caches.getwritablepath = getwritablepath + +function caches.getfirstreadablefile(filename,...) + local rd = getreadablepaths(...) + for i=1,#rd do + local path = rd[i] + local fullname = file.join(path,filename) + if is_readable(fullname) then + usedreadables[i] = true + return fullname, path + end + end + return caches.setfirstwritablefile(filename,...) +end + +function caches.setfirstwritablefile(filename,...) + local wr = getwritablepath(...) + local fullname = file.join(wr,filename) + return fullname, wr +end + +function caches.define(category,subcategory) -- for old times sake + return function() + return getwritablepath(category,subcategory) + end +end + +function caches.setluanames(path,name) + return format("%s/%s.%s",path,name,luasuffixes.tma), format("%s/%s.%s",path,name,luasuffixes.tmc) +end + +function caches.loaddata(readables,name) + if type(readables) == "string" then + readables = { readables } + end + for i=1,#readables do + local path = readables[i] + local tmaname, tmcname = caches.setluanames(path,name) + local loader = false + if isfile(tmcname) then + loader = loadfile(tmcname) + end + if not loader and isfile(tmaname) then + -- in case we have a different engine + utilities.lua.compile(tmaname,tmcname) + if isfile(tmcname) then + loader = loadfile(tmcname) + end + if not loader then + loader = loadfile(tmaname) + end + end + if loader then + loader = loader() + collectgarbage("step") + return loader + end + end + return false +end + +function caches.is_writable(filepath,filename) + local tmaname, tmcname = caches.setluanames(filepath,filename) + return is_writable(tmaname) +end + +local saveoptions = { compact = true } + +-- add some point we will only use the internal bytecode compiler and +-- then we can flag success in the tma so that it can trigger a compile +-- if the other engine + +function caches.savedata(filepath,filename,data,raw) + local tmaname, tmcname = caches.setluanames(filepath,filename) + local reduce, simplify = true, true + if raw then + reduce, simplify = false, false + end + data.cache_uuid = os.uuid() + if caches.direct then + file.savedata(tmaname,serialize(data,true,saveoptions)) + else + serializetofile(tmaname,data,true,saveoptions) + end + utilities.lua.compile(tmaname,tmcname) +end + +-- moved from data-res: + +local content_state = { } + +function caches.contentstate() + return content_state or { } +end + +function caches.loadcontent(cachename,dataname) + local name = caches.hashed(cachename) + local full, path = caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees") + local filename = file.join(path,name) + local blob = loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua)) + if blob then + local data = blob() + if data and data.content then + if data.type == dataname then + if data.version == resolvers.cacheversion then + content_state[#content_state+1] = data.uuid + if trace_locating then + report_resolvers("loading %a for %a from %a",dataname,cachename,filename) + end + return data.content + else + report_resolvers("skipping %a for %a from %a (version mismatch)",dataname,cachename,filename) + end + else + report_resolvers("skipping %a for %a from %a (datatype mismatch)",dataname,cachename,filename) + end + elseif trace_locating then + report_resolvers("skipping %a for %a from %a (no content)",dataname,cachename,filename) + end + elseif trace_locating then + report_resolvers("skipping %a for %a from %a (invalid file)",dataname,cachename,filename) + end +end + +function caches.collapsecontent(content) + for k, v in next, content do + if type(v) == "table" and #v == 1 then + content[k] = v[1] + end + end +end + +function caches.savecontent(cachename,dataname,content) + local name = caches.hashed(cachename) + local full, path = caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees") + local filename = file.join(path,name) -- is full + local luaname = addsuffix(filename,luasuffixes.lua) + local lucname = addsuffix(filename,luasuffixes.luc) + if trace_locating then + report_resolvers("preparing %a for %a",dataname,cachename) + end + local data = { + type = dataname, + root = cachename, + version = resolvers.cacheversion, + date = os.date("%Y-%m-%d"), + time = os.date("%H:%M:%S"), + content = content, + uuid = os.uuid(), + } + local ok = io.savedata(luaname,serialize(data,true)) + if ok then + if trace_locating then + report_resolvers("category %a, cachename %a saved in %a",dataname,cachename,luaname) + end + if utilities.lua.compile(luaname,lucname) then + if trace_locating then + report_resolvers("%a compiled to %a",dataname,lucname) + end + return true + else + if trace_locating then + report_resolvers("compiling failed for %a, deleting file %a",dataname,lucname) + end + os.remove(lucname) + end + elseif trace_locating then + report_resolvers("unable to save %a in %a (access error)",dataname,luaname) + end +end diff --git a/tex/context/base/data-tre.lua b/tex/context/base/data-tre.lua index 0a8b00d9b..5fe8fc4f2 100644 --- a/tex/context/base/data-tre.lua +++ b/tex/context/base/data-tre.lua @@ -1,75 +1,75 @@ -if not modules then modules = { } end modules ['data-tre'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- \input tree://oeps1/**/oeps.tex - -local find, gsub, format = string.find, string.gsub, string.format - -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) - -local report_trees = logs.reporter("resolvers","trees") - -local resolvers = resolvers - -local done, found, notfound = { }, { }, resolvers.finders.notfound - -function resolvers.finders.tree(specification) - local spec = specification.filename - local fnd = found[spec] - if fnd == nil then - if spec ~= "" then - local path, name = file.dirname(spec), file.basename(spec) - if path == "" then path = "." end - local hash = done[path] - if not hash then - local pattern = path .. "/*" -- we will use the proper splitter - hash = dir.glob(pattern) - done[path] = hash - end - local pattern = "/" .. gsub(name,"([%.%-%+])", "%%%1") .. "$" - for k=1,#hash do - local v = hash[k] - if find(v,pattern) then - found[spec] = v - return v - end - end - end - fnd = notfound() -- false - found[spec] = fnd - end - return fnd -end - -function resolvers.locators.tree(specification) - local name = specification.filename - local realname = resolvers.resolve(name) -- no shortcut - if realname and realname ~= '' and lfs.isdir(realname) then - if trace_locating then - report_trees("locator %a found",realname) - end - resolvers.appendhash('tree',name,false) -- don't cache - elseif trace_locating then - report_trees("locator %a not found",name) - end -end - -function resolvers.hashers.tree(specification) - local name = specification.filename - if trace_locating then - report_trees("analysing %a",name) - end - resolvers.methodhandler("hashers",name) - - resolvers.generators.file(specification) -end - -resolvers.concatinators.tree = resolvers.concatinators.file -resolvers.generators.tree = resolvers.generators.file -resolvers.openers.tree = resolvers.openers.file -resolvers.loaders.tree = resolvers.loaders.file +if not modules then modules = { } end modules ['data-tre'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- \input tree://oeps1/**/oeps.tex + +local find, gsub, format = string.find, string.gsub, string.format + +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) + +local report_trees = logs.reporter("resolvers","trees") + +local resolvers = resolvers + +local done, found, notfound = { }, { }, resolvers.finders.notfound + +function resolvers.finders.tree(specification) + local spec = specification.filename + local fnd = found[spec] + if fnd == nil then + if spec ~= "" then + local path, name = file.dirname(spec), file.basename(spec) + if path == "" then path = "." end + local hash = done[path] + if not hash then + local pattern = path .. "/*" -- we will use the proper splitter + hash = dir.glob(pattern) + done[path] = hash + end + local pattern = "/" .. gsub(name,"([%.%-%+])", "%%%1") .. "$" + for k=1,#hash do + local v = hash[k] + if find(v,pattern) then + found[spec] = v + return v + end + end + end + fnd = notfound() -- false + found[spec] = fnd + end + return fnd +end + +function resolvers.locators.tree(specification) + local name = specification.filename + local realname = resolvers.resolve(name) -- no shortcut + if realname and realname ~= '' and lfs.isdir(realname) then + if trace_locating then + report_trees("locator %a found",realname) + end + resolvers.appendhash('tree',name,false) -- don't cache + elseif trace_locating then + report_trees("locator %a not found",name) + end +end + +function resolvers.hashers.tree(specification) + local name = specification.filename + if trace_locating then + report_trees("analysing %a",name) + end + resolvers.methodhandler("hashers",name) + + resolvers.generators.file(specification) +end + +resolvers.concatinators.tree = resolvers.concatinators.file +resolvers.generators.tree = resolvers.generators.file +resolvers.openers.tree = resolvers.openers.file +resolvers.loaders.tree = resolvers.loaders.file diff --git a/tex/context/base/data-use.lua b/tex/context/base/data-use.lua index 9c15263bb..f03b19c7d 100644 --- a/tex/context/base/data-use.lua +++ b/tex/context/base/data-use.lua @@ -1,101 +1,101 @@ -if not modules then modules = { } end modules ['data-use'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find - -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) - -local report_mounts = logs.reporter("resolvers","mounts") - -local resolvers = resolvers - --- we will make a better format, maybe something xml or just text or lua - -resolvers.automounted = resolvers.automounted or { } - -function resolvers.automount(usecache) - local mountpaths = resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT')) - if (not mountpaths or #mountpaths == 0) and usecache then - mountpaths = caches.getreadablepaths("mount") - end - if mountpaths and #mountpaths > 0 then - statistics.starttiming(resolvers.instance) - for k=1,#mountpaths do - local root = mountpaths[k] - local f = io.open(root.."/url.tmi") - if f then - for line in f:lines() do - if line then - if find(line,"^[%%#%-]") then -- or %W - -- skip - elseif find(line,"^zip://") then - if trace_locating then - report_mounts("mounting %a",line) - end - table.insert(resolvers.automounted,line) - resolvers.usezipfile(line) - end - end - end - f:close() - end - end - statistics.stoptiming(resolvers.instance) - end -end - --- status info - -statistics.register("used config file", function() return caches.configfiles() end) -statistics.register("used cache path", function() return caches.usedpaths() end) - --- experiment (code will move) - -function statistics.savefmtstatus(texname,formatbanner,sourcefile) -- texname == formatname - local enginebanner = status.list().banner - if formatbanner and enginebanner and sourcefile then - local luvname = file.replacesuffix(texname,"luv") -- utilities.lua.suffixes.luv - local luvdata = { - enginebanner = enginebanner, - formatbanner = formatbanner, - sourcehash = md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"), - sourcefile = sourcefile, - } - io.savedata(luvname,table.serialize(luvdata,true)) - end -end - --- todo: check this at startup and return (say) 999 as signal that the run --- was aborted due to a wrong format in which case mtx-context can trigger --- a remake - -function statistics.checkfmtstatus(texname) - local enginebanner = status.list().banner - if enginebanner and texname then - local luvname = file.replacesuffix(texname,"luv") -- utilities.lua.suffixes.luv - if lfs.isfile(luvname) then - local luv = dofile(luvname) - if luv and luv.sourcefile then - local sourcehash = md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown") - local luvbanner = luv.enginebanner or "?" - if luvbanner ~= enginebanner then - return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner) - end - local luvhash = luv.sourcehash or "?" - if luvhash ~= sourcehash then - return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash) - end - else - return "invalid status file" - end - else - return "missing status file" - end - end - return true -end +if not modules then modules = { } end modules ['data-use'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find + +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) + +local report_mounts = logs.reporter("resolvers","mounts") + +local resolvers = resolvers + +-- we will make a better format, maybe something xml or just text or lua + +resolvers.automounted = resolvers.automounted or { } + +function resolvers.automount(usecache) + local mountpaths = resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT')) + if (not mountpaths or #mountpaths == 0) and usecache then + mountpaths = caches.getreadablepaths("mount") + end + if mountpaths and #mountpaths > 0 then + statistics.starttiming(resolvers.instance) + for k=1,#mountpaths do + local root = mountpaths[k] + local f = io.open(root.."/url.tmi") + if f then + for line in f:lines() do + if line then + if find(line,"^[%%#%-]") then -- or %W + -- skip + elseif find(line,"^zip://") then + if trace_locating then + report_mounts("mounting %a",line) + end + table.insert(resolvers.automounted,line) + resolvers.usezipfile(line) + end + end + end + f:close() + end + end + statistics.stoptiming(resolvers.instance) + end +end + +-- status info + +statistics.register("used config file", function() return caches.configfiles() end) +statistics.register("used cache path", function() return caches.usedpaths() end) + +-- experiment (code will move) + +function statistics.savefmtstatus(texname,formatbanner,sourcefile) -- texname == formatname + local enginebanner = status.list().banner + if formatbanner and enginebanner and sourcefile then + local luvname = file.replacesuffix(texname,"luv") -- utilities.lua.suffixes.luv + local luvdata = { + enginebanner = enginebanner, + formatbanner = formatbanner, + sourcehash = md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"), + sourcefile = sourcefile, + } + io.savedata(luvname,table.serialize(luvdata,true)) + end +end + +-- todo: check this at startup and return (say) 999 as signal that the run +-- was aborted due to a wrong format in which case mtx-context can trigger +-- a remake + +function statistics.checkfmtstatus(texname) + local enginebanner = status.list().banner + if enginebanner and texname then + local luvname = file.replacesuffix(texname,"luv") -- utilities.lua.suffixes.luv + if lfs.isfile(luvname) then + local luv = dofile(luvname) + if luv and luv.sourcefile then + local sourcehash = md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown") + local luvbanner = luv.enginebanner or "?" + if luvbanner ~= enginebanner then + return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner) + end + local luvhash = luv.sourcehash or "?" + if luvhash ~= sourcehash then + return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash) + end + else + return "invalid status file" + end + else + return "missing status file" + end + end + return true +end diff --git a/tex/context/base/data-vir.lua b/tex/context/base/data-vir.lua index e5bf35fa7..fe8c30bfb 100644 --- a/tex/context/base/data-vir.lua +++ b/tex/context/base/data-vir.lua @@ -1,84 +1,84 @@ -if not modules then modules = { } end modules ['data-vir'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format, validstrings = string.format, string.valid - -local trace_virtual = false -local report_virtual = logs.reporter("resolvers","virtual") - -trackers.register("resolvers.locating", function(v) trace_virtual = v end) -trackers.register("resolvers.virtual", function(v) trace_virtual = v end) - -local resolvers = resolvers - -local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers - -local data = { } -local n = 0 -- hm, number can be query -local template = "virtual://%s.%s" - -function savers.virtual(specification,content) - n = n + 1 -- one number for all namespaces - local path = specification.path - local filename = format(template,path ~= "" and path or "virtualfile",n) - if trace_virtual then - report_virtual("saver: file %a saved",filename) - end - data[filename] = content - return filename -end - -function finders.virtual(specification) - local original = specification.original - local d = data[original] - if d then - if trace_virtual then - report_virtual("finder: file %a found",original) - end - return original - else - if trace_virtual then - report_virtual("finder: unknown file %a",original) - end - return finders.notfound() - end -end - -function openers.virtual(specification) - local original = specification.original - local d = data[original] - if d then - if trace_virtual then - report_virtual("opener: file %a opened",original) - end - data[original] = nil -- when we comment this we can have error messages - -- With utf-8 we signal that no regime is to be applied! - return openers.helpers.textopener("virtual",original,d,"utf-8") - else - if trace_virtual then - report_virtual("opener: file %a not found",original) - end - return openers.notfound() - end -end - -function loaders.virtual(specification) - local original = specification.original - local d = data[original] - if d then - if trace_virtual then - report_virtual("loader: file %a loaded",original) - end - data[original] = nil - return true, d, #d - end - if trace_virtual then - report_virtual("loader: file %a not loaded",original) - end - return loaders.notfound() -end +if not modules then modules = { } end modules ['data-vir'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, validstrings = string.format, string.valid + +local trace_virtual = false +local report_virtual = logs.reporter("resolvers","virtual") + +trackers.register("resolvers.locating", function(v) trace_virtual = v end) +trackers.register("resolvers.virtual", function(v) trace_virtual = v end) + +local resolvers = resolvers + +local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers + +local data = { } +local n = 0 -- hm, number can be query +local template = "virtual://%s.%s" + +function savers.virtual(specification,content) + n = n + 1 -- one number for all namespaces + local path = specification.path + local filename = format(template,path ~= "" and path or "virtualfile",n) + if trace_virtual then + report_virtual("saver: file %a saved",filename) + end + data[filename] = content + return filename +end + +function finders.virtual(specification) + local original = specification.original + local d = data[original] + if d then + if trace_virtual then + report_virtual("finder: file %a found",original) + end + return original + else + if trace_virtual then + report_virtual("finder: unknown file %a",original) + end + return finders.notfound() + end +end + +function openers.virtual(specification) + local original = specification.original + local d = data[original] + if d then + if trace_virtual then + report_virtual("opener: file %a opened",original) + end + data[original] = nil -- when we comment this we can have error messages + -- With utf-8 we signal that no regime is to be applied! + return openers.helpers.textopener("virtual",original,d,"utf-8") + else + if trace_virtual then + report_virtual("opener: file %a not found",original) + end + return openers.notfound() + end +end + +function loaders.virtual(specification) + local original = specification.original + local d = data[original] + if d then + if trace_virtual then + report_virtual("loader: file %a loaded",original) + end + data[original] = nil + return true, d, #d + end + if trace_virtual then + report_virtual("loader: file %a not loaded",original) + end + return loaders.notfound() +end diff --git a/tex/context/base/data-zip.lua b/tex/context/base/data-zip.lua index 5db69670c..62eeb1f38 100644 --- a/tex/context/base/data-zip.lua +++ b/tex/context/base/data-zip.lua @@ -1,264 +1,264 @@ -if not modules then modules = { } end modules ['data-zip'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- partly redone .. needs testing - -local format, find, match = string.format, string.find, string.match - -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) - -local report_zip = logs.reporter("resolvers","zip") - ---[[ldx-- -

We use a url syntax for accessing the zip file itself and file in it:

- - -zip:///oeps.zip?name=bla/bla.tex -zip:///oeps.zip?tree=tex/texmf-local -zip:///texmf.zip?tree=/tex/texmf -zip:///texmf.zip?tree=/tex/texmf-local -zip:///texmf-mine.zip?tree=/tex/texmf-projects - ---ldx]]-- - -local resolvers = resolvers - -zip = zip or { } -local zip = zip - -zip.archives = zip.archives or { } -local archives = zip.archives - -zip.registeredfiles = zip.registeredfiles or { } -local registeredfiles = zip.registeredfiles - -local limited = false - -directives.register("system.inputmode", function(v) - if not limited then - local i_limiter = io.i_limiter(v) - if i_limiter then - zip.open = i_limiter.protect(zip.open) - limited = true - end - end -end) - -local function validzip(str) -- todo: use url splitter - if not find(str,"^zip://") then - return "zip:///" .. str - else - return str - end -end - -function zip.openarchive(name) - if not name or name == "" then - return nil - else - local arch = archives[name] - if not arch then - local full = resolvers.findfile(name) or "" - arch = (full ~= "" and zip.open(full)) or false - archives[name] = arch - end - return arch - end -end - -function zip.closearchive(name) - if not name or (name == "" and archives[name]) then - zip.close(archives[name]) - archives[name] = nil - end -end - -function resolvers.locators.zip(specification) - local archive = specification.filename - local zipfile = archive and archive ~= "" and zip.openarchive(archive) -- tricky, could be in to be initialized tree - if trace_locating then - if zipfile then - report_zip("locator: archive %a found",archive) - else - report_zip("locator: archive %a not found",archive) - end - end -end - -function resolvers.hashers.zip(specification) - local archive = specification.filename - if trace_locating then - report_zip("loading file %a",archive) - end - resolvers.usezipfile(specification.original) -end - -function resolvers.concatinators.zip(zipfile,path,name) -- ok ? - if not path or path == "" then - return format('%s?name=%s',zipfile,name) - else - return format('%s?name=%s/%s',zipfile,path,name) - end -end - -function resolvers.finders.zip(specification) - local original = specification.original - local archive = specification.filename - if archive then - local query = url.query(specification.query) - local queryname = query.name - if queryname then - local zfile = zip.openarchive(archive) - if zfile then - if trace_locating then - report_zip("finder: archive %a found",archive) - end - local dfile = zfile:open(queryname) - if dfile then - dfile = zfile:close() - if trace_locating then - report_zip("finder: file %a found",queryname) - end - return specification.original - elseif trace_locating then - report_zip("finder: file %a not found",queryname) - end - elseif trace_locating then - report_zip("finder: unknown archive %a",archive) - end - end - end - if trace_locating then - report_zip("finder: %a not found",original) - end - return resolvers.finders.notfound() -end - -function resolvers.openers.zip(specification) - local original = specification.original - local archive = specification.filename - if archive then - local query = url.query(specification.query) - local queryname = query.name - if queryname then - local zfile = zip.openarchive(archive) - if zfile then - if trace_locating then - report_zip("opener; archive %a opened",archive) - end - local dfile = zfile:open(queryname) - if dfile then - if trace_locating then - report_zip("opener: file %a found",queryname) - end - return resolvers.openers.helpers.textopener('zip',original,dfile) - elseif trace_locating then - report_zip("opener: file %a not found",queryname) - end - elseif trace_locating then - report_zip("opener: unknown archive %a",archive) - end - end - end - if trace_locating then - report_zip("opener: %a not found",original) - end - return resolvers.openers.notfound() -end - -function resolvers.loaders.zip(specification) - local original = specification.original - local archive = specification.filename - if archive then - local query = url.query(specification.query) - local queryname = query.name - if queryname then - local zfile = zip.openarchive(archive) - if zfile then - if trace_locating then - report_zip("loader: archive %a opened",archive) - end - local dfile = zfile:open(queryname) - if dfile then - logs.show_load(original) - if trace_locating then - report_zip("loader; file %a loaded",original) - end - local s = dfile:read("*all") - dfile:close() - return true, s, #s - elseif trace_locating then - report_zip("loader: file %a not found",queryname) - end - elseif trace_locating then - report_zip("loader; unknown archive %a",archive) - end - end - end - if trace_locating then - report_zip("loader: %a not found",original) - end - return resolvers.openers.notfound() -end - --- zip:///somefile.zip --- zip:///somefile.zip?tree=texmf-local -> mount - -function resolvers.usezipfile(archive) - local specification = resolvers.splitmethod(archive) -- to be sure - local archive = specification.filename - if archive and not registeredfiles[archive] then - local z = zip.openarchive(archive) - if z then - local instance = resolvers.instance - local tree = url.query(specification.query).tree or "" - if trace_locating then - report_zip("registering: archive %a",archive) - end - statistics.starttiming(instance) - resolvers.prependhash('zip',archive) - resolvers.extendtexmfvariable(archive) -- resets hashes too - registeredfiles[archive] = z - instance.files[archive] = resolvers.registerzipfile(z,tree) - statistics.stoptiming(instance) - elseif trace_locating then - report_zip("registering: unknown archive %a",archive) - end - elseif trace_locating then - report_zip("registering: archive %a not found",archive) - end -end - -function resolvers.registerzipfile(z,tree) - local files, filter = { }, "" - if tree == "" then - filter = "^(.+)/(.-)$" - else - filter = format("^%s/(.+)/(.-)$",tree) - end - if trace_locating then - report_zip("registering: using filter %a",filter) - end - local register, n = resolvers.registerfile, 0 - for i in z:files() do - local path, name = match(i.filename,filter) - if path then - if name and name ~= '' then - register(files, name, path) - n = n + 1 - else - -- directory - end - else - register(files, i.filename, '') - n = n + 1 - end - end - report_zip("registering: %s files registered",n) - return files -end +if not modules then modules = { } end modules ['data-zip'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- partly redone .. needs testing + +local format, find, match = string.format, string.find, string.match + +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) + +local report_zip = logs.reporter("resolvers","zip") + +--[[ldx-- +

We use a url syntax for accessing the zip file itself and file in it:

+ + +zip:///oeps.zip?name=bla/bla.tex +zip:///oeps.zip?tree=tex/texmf-local +zip:///texmf.zip?tree=/tex/texmf +zip:///texmf.zip?tree=/tex/texmf-local +zip:///texmf-mine.zip?tree=/tex/texmf-projects + +--ldx]]-- + +local resolvers = resolvers + +zip = zip or { } +local zip = zip + +zip.archives = zip.archives or { } +local archives = zip.archives + +zip.registeredfiles = zip.registeredfiles or { } +local registeredfiles = zip.registeredfiles + +local limited = false + +directives.register("system.inputmode", function(v) + if not limited then + local i_limiter = io.i_limiter(v) + if i_limiter then + zip.open = i_limiter.protect(zip.open) + limited = true + end + end +end) + +local function validzip(str) -- todo: use url splitter + if not find(str,"^zip://") then + return "zip:///" .. str + else + return str + end +end + +function zip.openarchive(name) + if not name or name == "" then + return nil + else + local arch = archives[name] + if not arch then + local full = resolvers.findfile(name) or "" + arch = (full ~= "" and zip.open(full)) or false + archives[name] = arch + end + return arch + end +end + +function zip.closearchive(name) + if not name or (name == "" and archives[name]) then + zip.close(archives[name]) + archives[name] = nil + end +end + +function resolvers.locators.zip(specification) + local archive = specification.filename + local zipfile = archive and archive ~= "" and zip.openarchive(archive) -- tricky, could be in to be initialized tree + if trace_locating then + if zipfile then + report_zip("locator: archive %a found",archive) + else + report_zip("locator: archive %a not found",archive) + end + end +end + +function resolvers.hashers.zip(specification) + local archive = specification.filename + if trace_locating then + report_zip("loading file %a",archive) + end + resolvers.usezipfile(specification.original) +end + +function resolvers.concatinators.zip(zipfile,path,name) -- ok ? + if not path or path == "" then + return format('%s?name=%s',zipfile,name) + else + return format('%s?name=%s/%s',zipfile,path,name) + end +end + +function resolvers.finders.zip(specification) + local original = specification.original + local archive = specification.filename + if archive then + local query = url.query(specification.query) + local queryname = query.name + if queryname then + local zfile = zip.openarchive(archive) + if zfile then + if trace_locating then + report_zip("finder: archive %a found",archive) + end + local dfile = zfile:open(queryname) + if dfile then + dfile = zfile:close() + if trace_locating then + report_zip("finder: file %a found",queryname) + end + return specification.original + elseif trace_locating then + report_zip("finder: file %a not found",queryname) + end + elseif trace_locating then + report_zip("finder: unknown archive %a",archive) + end + end + end + if trace_locating then + report_zip("finder: %a not found",original) + end + return resolvers.finders.notfound() +end + +function resolvers.openers.zip(specification) + local original = specification.original + local archive = specification.filename + if archive then + local query = url.query(specification.query) + local queryname = query.name + if queryname then + local zfile = zip.openarchive(archive) + if zfile then + if trace_locating then + report_zip("opener; archive %a opened",archive) + end + local dfile = zfile:open(queryname) + if dfile then + if trace_locating then + report_zip("opener: file %a found",queryname) + end + return resolvers.openers.helpers.textopener('zip',original,dfile) + elseif trace_locating then + report_zip("opener: file %a not found",queryname) + end + elseif trace_locating then + report_zip("opener: unknown archive %a",archive) + end + end + end + if trace_locating then + report_zip("opener: %a not found",original) + end + return resolvers.openers.notfound() +end + +function resolvers.loaders.zip(specification) + local original = specification.original + local archive = specification.filename + if archive then + local query = url.query(specification.query) + local queryname = query.name + if queryname then + local zfile = zip.openarchive(archive) + if zfile then + if trace_locating then + report_zip("loader: archive %a opened",archive) + end + local dfile = zfile:open(queryname) + if dfile then + logs.show_load(original) + if trace_locating then + report_zip("loader; file %a loaded",original) + end + local s = dfile:read("*all") + dfile:close() + return true, s, #s + elseif trace_locating then + report_zip("loader: file %a not found",queryname) + end + elseif trace_locating then + report_zip("loader; unknown archive %a",archive) + end + end + end + if trace_locating then + report_zip("loader: %a not found",original) + end + return resolvers.openers.notfound() +end + +-- zip:///somefile.zip +-- zip:///somefile.zip?tree=texmf-local -> mount + +function resolvers.usezipfile(archive) + local specification = resolvers.splitmethod(archive) -- to be sure + local archive = specification.filename + if archive and not registeredfiles[archive] then + local z = zip.openarchive(archive) + if z then + local instance = resolvers.instance + local tree = url.query(specification.query).tree or "" + if trace_locating then + report_zip("registering: archive %a",archive) + end + statistics.starttiming(instance) + resolvers.prependhash('zip',archive) + resolvers.extendtexmfvariable(archive) -- resets hashes too + registeredfiles[archive] = z + instance.files[archive] = resolvers.registerzipfile(z,tree) + statistics.stoptiming(instance) + elseif trace_locating then + report_zip("registering: unknown archive %a",archive) + end + elseif trace_locating then + report_zip("registering: archive %a not found",archive) + end +end + +function resolvers.registerzipfile(z,tree) + local files, filter = { }, "" + if tree == "" then + filter = "^(.+)/(.-)$" + else + filter = format("^%s/(.+)/(.-)$",tree) + end + if trace_locating then + report_zip("registering: using filter %a",filter) + end + local register, n = resolvers.registerfile, 0 + for i in z:files() do + local path, name = match(i.filename,filter) + if path then + if name and name ~= '' then + register(files, name, path) + n = n + 1 + else + -- directory + end + else + register(files, i.filename, '') + n = n + 1 + end + end + report_zip("registering: %s files registered",n) + return files +end diff --git a/tex/context/base/file-ini.lua b/tex/context/base/file-ini.lua index 1872ed3d3..fe4515c84 100644 --- a/tex/context/base/file-ini.lua +++ b/tex/context/base/file-ini.lua @@ -1,37 +1,37 @@ -if not modules then modules = { } end modules ['file-ini'] = { - version = 1.001, - comment = "companion to file-ini.mkvi", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

It's more convenient to manipulate filenames (paths) in than in -. These methods have counterparts at the end.

---ldx]]-- - -resolvers.jobs = resolvers.jobs or { } - -local texcount = tex.count -local setvalue = context.setvalue - -function commands.splitfilename(fullname) - local t = file.nametotable(fullname) - local path = t.path - texcount.splitoffkind = (path == "" and 0) or (path == '.' and 1) or 2 - setvalue("splitofffull",fullname) - setvalue("splitoffpath",path) - setvalue("splitoffname",t.name) - setvalue("splitoffbase",t.base) - setvalue("splitofftype",t.suffix) -end - -function commands.doifparentfileelse(n) - commands.doifelse(n == environment.jobname or n == environment.jobname .. '.tex' or n == environment.outputfilename) -end - -function commands.doiffileexistelse(name) - local foundname = resolvers.findtexfile(name) - commands.doifelse(foundname and foundname ~= "") -end +if not modules then modules = { } end modules ['file-ini'] = { + version = 1.001, + comment = "companion to file-ini.mkvi", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

It's more convenient to manipulate filenames (paths) in than in +. These methods have counterparts at the end.

+--ldx]]-- + +resolvers.jobs = resolvers.jobs or { } + +local texcount = tex.count +local setvalue = context.setvalue + +function commands.splitfilename(fullname) + local t = file.nametotable(fullname) + local path = t.path + texcount.splitoffkind = (path == "" and 0) or (path == '.' and 1) or 2 + setvalue("splitofffull",fullname) + setvalue("splitoffpath",path) + setvalue("splitoffname",t.name) + setvalue("splitoffbase",t.base) + setvalue("splitofftype",t.suffix) +end + +function commands.doifparentfileelse(n) + commands.doifelse(n == environment.jobname or n == environment.jobname .. '.tex' or n == environment.outputfilename) +end + +function commands.doiffileexistelse(name) + local foundname = resolvers.findtexfile(name) + commands.doifelse(foundname and foundname ~= "") +end diff --git a/tex/context/base/file-job.lua b/tex/context/base/file-job.lua index 288a690d2..9a88cefb4 100644 --- a/tex/context/base/file-job.lua +++ b/tex/context/base/file-job.lua @@ -1,1001 +1,1001 @@ -if not modules then modules = { } end modules ['file-job'] = { - version = 1.001, - comment = "companion to file-job.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- in retrospect dealing it's not that bad to deal with the nesting --- and push/poppign at the tex end - -local gsub, match, find = string.gsub, string.match, string.find -local insert, remove, concat = table.insert, table.remove, table.concat -local validstring = string.valid -local sortedhash = table.sortedhash -local formatters = string.formatters - -local commands, resolvers, context = commands, resolvers, context - -local trace_jobfiles = false trackers.register("system.jobfiles", function(v) trace_jobfiles = v end) - -local report_jobfiles = logs.reporter("system","jobfiles") - -local texsetcount = tex.setcount -local elements = interfaces.elements -local constants = interfaces.constants -local variables = interfaces.variables -local logsnewline = logs.newline -local logspushtarget = logs.pushtarget -local logspoptarget = logs.poptarget -local settings_to_array = utilities.parsers.settings_to_array -local allocate = utilities.storage.allocate - -local nameonly = file.nameonly -local suffixonly = file.suffix -local basename = file.basename -local addsuffix = file.addsuffix -local removesuffix = file.removesuffix -local dirname = file.dirname -local joinpath = file.join -local is_qualified_path = file.is_qualified_path - -local cleanpath = resolvers.cleanpath -local inputstack = resolvers.inputstack - -local v_outer = variables.outer -local v_text = variables.text -local v_project = variables.project -local v_environment = variables.environment -local v_product = variables.product -local v_component = variables.component -local c_prefix = variables.prefix - --- main code .. there is some overlap .. here we have loc:// - -local function findctxfile(name) -- loc ? any ? - if is_qualified_path(name) then -- maybe when no suffix do some test for tex - return name - elseif not url.hasscheme(name) then - return resolvers.finders.byscheme("loc",name) or "" - else - return resolvers.findtexfile(name) or "" - end -end - -resolvers.findctxfile = findctxfile - -function commands.processfile(name) - name = findctxfile(name) - if name ~= "" then - context.input(name) - end -end - -function commands.doifinputfileelse(name) - commands.doifelse(findctxfile(name) ~= "") -end - -function commands.locatefilepath(name) - context(dirname(findctxfile(name))) -end - -function commands.usepath(paths) - resolvers.registerextrapath(paths) -end - -function commands.usesubpath(subpaths) - resolvers.registerextrapath(nil,subpaths) -end - -function commands.allinputpaths() - context(concat(resolvers.instance.extra_paths or { },",")) -end - -function commands.setdocumentfilenames() - environment.initializefilenames() -end - -function commands.usezipfile(name,tree) - if tree and tree ~= "" then - resolvers.usezipfile(formatters["zip:///%s?tree=%s"](name,tree)) - else - resolvers.usezipfile(formatters["zip:///%s"](name)) - end -end - -local report_system = logs.reporter("system") - --- moved from tex to lua: - -local texpatterns = { "%s.mkvi", "%s.mkiv", "%s.tex" } -local luapatterns = { "%s" .. utilities.lua.suffixes.luc, "%s.lua" } -local cldpatterns = { "%s.cld" } -local xmlpatterns = { "%s.xml" } - -local uselibrary = commands.uselibrary -local input = context.input - --- status --- --- these need to be synced with input stream: - -local processstack = { } -local processedfile = "" -local processedfiles = { } - -function commands.processedfile() - context(processedfile) -end - -function commands.processedfiles() - context(concat(processedfiles,",")) -end - -function commands.dostarttextfile(name) - insert(processstack,name) - processedfile = name - insert(processedfiles,name) -end - -function commands.dostoptextfile() - processedfile = remove(processstack) or "" -end - -local function startprocessing(name,notext) - if not notext then - -- report_system("begin file %a at line %a",name,status.linenumber or 0) - context.dostarttextfile(name) - end -end - -local function stopprocessing(notext) - if not notext then - context.dostoptextfile() - -- report_system("end file %a at line %a",name,status.linenumber or 0) - end -end - --- - -local action = function(name,foundname) input(foundname) end -local failure = function(name,foundname) report_jobfiles("unknown %s file %a","tex",name) end - -local function usetexfile(name,onlyonce,notext) - startprocessing(name,notext) - uselibrary { - name = name, - patterns = texpatterns, - action = action, - failure = failure, - onlyonce = onlyonce, - } - stopprocessing(notext) -end - -local action = function(name,foundname) dofile(foundname) end -local failure = function(name,foundname) report_jobfiles("unknown %s file %a","lua",name) end - -local function useluafile(name,onlyonce,notext) - uselibrary { - name = name, - patterns = luapatterns, - action = action, - failure = failure, - onlyonce = onlyonce, - } -end - -local action = function(name,foundname) dofile(foundname) end -local failure = function(name,foundname) report_jobfiles("unknown %s file %a","cld",name) end - -local function usecldfile(name,onlyonce,notext) - startprocessing(name,notext) - uselibrary { - name = name, - patterns = cldpatterns, - action = action, - failure = failure, - onlyonce = onlyonce, - } - stopprocessing(notext) -end - -local action = function(name,foundname) context.xmlprocess(foundname,"main","") end -local failure = function(name,foundname) report_jobfiles("unknown %s file %a","xml",name) end - -local function usexmlfile(name,onlyonce,notext) - startprocessing(name,notext) - uselibrary { - name = name, - patterns = xmlpatterns, - action = action, - failure = failure, - onlyonce = onlyonce, - } - stopprocessing(notext) -end - -commands.usetexfile = usetexfile -commands.useluafile = useluafile -commands.usecldfile = usecldfile -commands.usexmlfile = usexmlfile - -local suffixes = { - mkvi = usetexfile, - mkiv = usetexfile, - tex = usetexfile, - luc = useluafile, - lua = useluafile, - cld = usecldfile, - xml = usexmlfile, - [""] = usetexfile, -} - -local function useanyfile(name,onlyonce) - local s = suffixes[file.suffix(name)] - if s then - s(removesuffix(name),onlyonce) - else - usetexfile(name,onlyonce) -- e.g. ctx file ---~ resolvers.readfilename(name) - end -end - -commands.useanyfile = useanyfile - -function resolvers.jobs.usefile(name,onlyonce,notext) - local s = suffixes[file.suffix(name)] - if s then - s(removesuffix(name),onlyonce,notext) - end -end - --- document structure - -local textlevel = 0 -- inaccessible for user, we need to define counter textlevel at the tex end - -local function dummyfunction() end - -local function startstoperror() - report_system("invalid \\%s%s ... \\%s%s structure",elements.start,v_text,elements.stop,v_text) - startstoperror = dummyfunction -end - -local function starttext() - if textlevel == 0 then - if trace_jobfiles then - report_jobfiles("starting text") - end - -- registerfileinfo[begin]jobfilename - context.dostarttext() - end - textlevel = textlevel + 1 - texsetcount("global","textlevel",textlevel) -end - -local function stoptext() - if textlevel == 0 then - startstoperror() - elseif textlevel > 0 then - textlevel = textlevel - 1 - end - texsetcount("global","textlevel",textlevel) - if textlevel <= 0 then - if trace_jobfiles then - report_jobfiles("stopping text") - end - context.dostoptext() - -- registerfileinfo[end]jobfilename - context.finalend() - commands.stoptext = dummyfunction - end -end - -commands.starttext = starttext -commands.stoptext = stoptext - -function commands.forcequitjob(reason) - if reason then - report_system("forcing quit: %s",reason) - else - report_system("forcing quit") - end - context.batchmode() - while textlevel >= 0 do - context.stoptext() - end -end - -function commands.forceendjob() - report_system([[don't use \end to finish a document]]) - context.stoptext() -end - -function commands.autostarttext() - if textlevel == 0 then - report_system([[auto \starttext ... \stoptext]]) - end - context.starttext() -end - -commands.autostoptext = stoptext - --- project structure - -function commands.processfilemany(name) - useanyfile(name,false) -end - -function commands.processfileonce(name) - useanyfile(name,true) -end - -function commands.processfilenone(name) - -- skip file -end - --- - -local typestack = { } -local pathstack = { } - -local currenttype = v_text -local currentpath = "." - -local tree = { type = "text", name = "", branches = { } } -local treestack = { } -local top = tree.branches -local root = tree - -local project_stack = { } -local product_stack = { } -local component_stack = { } -local environment_stack = { } - -local stacks = { - [v_project ] = project_stack, - [v_product ] = product_stack, - [v_component ] = component_stack, - [v_environment] = environment_stack, -} - --- - -local report_structures = logs.reporter("system","structure") -local report_structure = logs.reporter("used structure") - -local function pushtree(what,name) - local t = { } - top[#top+1] = { type = what, name = name, branches = t } - insert(treestack,top) - top = t -end - -local function poptree() - top = remove(treestack) - -- inspect(top) -end - -local function log_tree(top,depth) - report_structure("%s%s: %s",depth,top.type,top.name) - local branches = top.branches - if #branches > 0 then - depth = depth .. " " - for i=1,#branches do - log_tree(branches[i],depth) - end - end -end - -luatex.registerstopactions(function() - logspushtarget("logfile") - logsnewline() - report_structures("start used structure") - logsnewline() - root.name = environment.jobname - log_tree(root,"") - logsnewline() - report_structures("stop used structure") - logsnewline() - logspoptarget() -end) - -job.structure = job.structure or { } -job.structure.collected = job.structure.collected or { } -job.structure.tobesaved = root -job.structure.components = { } - -local function initialize() - local function collect(root,result) - local branches = root.branches - if branches then - for i=1,#branches do - local branch = branches[i] - if branch.type == "component" then - result[#result+1] = branch.name - end - collect(branch,result) - end - end - return result - end - job.structure.components = collect(job.structure.collected,{}) -end - -job.register('job.structure.collected',root,initialize) - --- component: small unit, either or not components itself --- product : combination of components - -local context_processfilemany = context.processfilemany -local context_processfileonce = context.processfileonce -local context_processfilenone = context.processfilenone - -local processors = utilities.storage.allocate { - -- [v_outer] = { - -- [v_text] = { "many", context_processfilemany }, - -- [v_project] = { "once", context_processfileonce }, - -- [v_environment] = { "once", context_processfileonce }, - -- [v_product] = { "once", context_processfileonce }, - -- [v_component] = { "many", context_processfilemany }, - -- }, - [v_text] = { - [v_text] = { "many", context_processfilemany }, - [v_project] = { "once", context_processfileonce }, -- dubious - [v_environment] = { "once", context_processfileonce }, - [v_product] = { "many", context_processfilemany }, -- dubious - [v_component] = { "many", context_processfilemany }, - }, - [v_project] = { - [v_text] = { "many", context_processfilemany }, - [v_project] = { "none", context_processfilenone }, - [v_environment] = { "once", context_processfileonce }, - [v_product] = { "none", context_processfilenone }, - [v_component] = { "none", context_processfilenone }, - }, - [v_environment] = { - [v_text] = { "many", context_processfilemany }, - [v_project] = { "none", context_processfilenone }, - [v_environment] = { "once", context_processfileonce }, - [v_product] = { "none", context_processfilenone }, - [v_component] = { "none", context_processfilenone }, - }, - [v_product] = { - [v_text] = { "many", context_processfilemany }, - [v_project] = { "once", context_processfileonce }, - [v_environment] = { "once", context_processfileonce }, - [v_product] = { "many", context_processfilemany }, - [v_component] = { "many", context_processfilemany }, - }, - [v_component] = { - [v_text] = { "many", context_processfilemany }, - [v_project] = { "once", context_processfileonce }, - [v_environment] = { "once", context_processfileonce }, - [v_product] = { "none", context_processfilenone }, - [v_component] = { "many", context_processfilemany }, - } -} - -local start = { - [v_text] = nil, - [v_project] = nil, - [v_environment] = context.startreadingfile, - [v_product] = context.starttext, - [v_component] = context.starttext, -} - -local stop = { - [v_text] = nil, - [v_project] = nil, - [v_environment] = context.stopreadingfile, - [v_product] = context.stoptext, - [v_component] = context.stoptext, -} - -resolvers.jobs.processors = processors - -local function topofstack(what) - local stack = stacks[what] - return stack and stack[#stack] or environment.jobname -end - -local function productcomponent() -- only when in product - local product = product_stack[#product_stack] - if product and product ~= "" then - local component = component_stack[1] - if component and component ~= "" then - return component - end - end -end - -local function justacomponent() - local product = product_stack[#product_stack] - if not product or product == "" then - local component = component_stack[1] - if component and component ~= "" then - return component - end - end -end - -resolvers.jobs.productcomponent = productcomponent -resolvers.jobs.justacomponent = justacomponent - -function resolvers.jobs.currentproject () return topofstack(v_project ) end -function resolvers.jobs.currentproduct () return topofstack(v_product ) end -function resolvers.jobs.currentcomponent () return topofstack(v_component ) end -function resolvers.jobs.currentenvironment() return topofstack(v_environment) end - -local done = { } -local tolerant = false -- too messy, mkii user with the wrong sructure should adapt - -local function process(what,name) - local depth = #typestack - local process - -- - name = resolvers.resolve(name) - -- --- if not tolerant then - -- okay, would be best but not compatible with mkii - process = processors[currenttype][what] --- elseif depth == 0 then --- -- could be a component, product or (brr) project --- if trace_jobfiles then --- report_jobfiles("%s : %s > %s (case 1)",depth,currenttype,v_outer) --- end --- process = processors[v_outer][what] --- elseif depth == 1 and typestack[1] == v_text then --- -- we're still not doing a component or product --- if trace_jobfiles then --- report_jobfiles("%s : %s > %s (case 2)",depth,currenttype,v_outer) --- end --- process = processors[v_outer][what] --- else --- process = processors[currenttype][what] --- end - if process then - local method = process[1] - if method == "none" then - if trace_jobfiles then - report_jobfiles("%s : %s : %s %s %a in %s %a",depth,method,"ignoring",what,name,currenttype,topofstack(currenttype)) - end - elseif method == "once" and done[name] then - if trace_jobfiles then - report_jobfiles("%s : %s : %s %s %a in %s %a",depth,method,"skipping",what,name,currenttype,topofstack(currenttype)) - end - else - -- keep in mind that we also handle "once" at the file level - -- so there is a double catch - done[name] = true - local before = start[what] - local after = stop [what] - if trace_jobfiles then - report_jobfiles("%s : %s : %s %s %a in %s %a",depth,method,"processing",what,name,currenttype,topofstack(currenttype)) - end - if before then - before() - end - process[2](name) - if after then - after() - end - end - else - if trace_jobfiles then - report_jobfiles("%s : %s : %s %s %a in %s %a",depth,"none","ignoring",what,name,currenttype,topofstack(currenttype)) - end - end -end - -function commands.useproject (name) process(v_project, name) end -function commands.useenvironment(name) process(v_environment,name) end -function commands.useproduct (name) process(v_product, name) end -function commands.usecomponent (name) process(v_component, name) end - --- todo: setsystemmode to currenttype --- todo: make start/stop commands at the tex end - -local start = { - [v_project] = context.startprojectindeed, - [v_product] = context.startproductindeed, - [v_component] = context.startcomponentindeed, - [v_environment] = context.startenvironmentindeed, -} - -local stop = { - [v_project] = context.stopprojectindeed, - [v_product] = context.stopproductindeed, - [v_component] = context.stopcomponentindeed, - [v_environment] = context.stopenvironmentindeed, -} - -local function gotonextlevel(what,name) -- todo: something with suffix name - insert(stacks[what],name) - insert(typestack,currenttype) - insert(pathstack,currentpath) - currenttype = what - currentpath = dirname(name) - pushtree(what,name) - if start[what] then - start[what]() - end -end - -local function gotopreviouslevel(what) - if stop[what] then - stop[what]() - end - poptree() - currentpath = remove(pathstack) or "." - currenttype = remove(typestack) or v_text - remove(stacks[what]) -- not currenttype ... weak recovery - -- context.endinput() -- does not work - context.signalendofinput(what) -end - -local function autoname(name) - if name == "*" then - name = nameonly(inputstack[#inputstack] or name) - end - return name -end - -function commands.startproject (name) gotonextlevel(v_project, autoname(name)) end -function commands.startproduct (name) gotonextlevel(v_product, autoname(name)) end -function commands.startcomponent (name) gotonextlevel(v_component, autoname(name)) end -function commands.startenvironment(name) gotonextlevel(v_environment,autoname(name)) end - -function commands.stopproject () gotopreviouslevel(v_project ) end -function commands.stopproduct () gotopreviouslevel(v_product ) end -function commands.stopcomponent () gotopreviouslevel(v_component ) end -function commands.stopenvironment() gotopreviouslevel(v_environment) end - -function commands.currentproject () context(topofstack(v_project )) end -function commands.currentproduct () context(topofstack(v_product )) end -function commands.currentcomponent () context(topofstack(v_component )) end -function commands.currentenvironment() context(topofstack(v_environment)) end - --- -- -- this will move -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --- --- --- --- nee --- standaard --- --- --- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - -local report_examodes = logs.reporter("system","examodes") - -local function convertexamodes(str) - local x = xml.convert(str) - for e in xml.collected(x,"exa:variable") do - local label = e.at and e.at.label - if label and label ~= "" then - local data = xml.text(e) - local mode = match(label,"^mode:(.+)$") - if mode then - context.enablemode { formatters["%s:%s"](mode,data) } - end - context.setvariable("exa:variables",label,(gsub(data,"([{}])","\\%1"))) - end - end -end - -function commands.loadexamodes(filename) - if not filename or filename == "" then - filename = removesuffix(tex.jobname) - end - filename = resolvers.findfile(addsuffix(filename,'ctm')) or "" - if filename ~= "" then - report_examodes("loading %a",filename) -- todo: message system - convertexamodes(io.loaddata(filename)) - else - report_examodes("no mode file %a",filename) -- todo: message system - end -end - --- changed in mtx-context --- code moved from luat-ini - --- todo: locals when mtx-context is changed - -document = document or { - arguments = allocate(), - files = allocate(), - variables = allocate(), -- for templates - options = { - commandline = { - environments = allocate(), - modules = allocate(), - modes = allocate(), - }, - ctxfile = { - environments = allocate(), - modules = allocate(), - modes = allocate(), - }, - }, -} - -function document.setargument(key,value) - document.arguments[key] = value -end - -function document.setdefaultargument(key,default) - local v = document.arguments[key] - if v == nil or v == "" then - document.arguments[key] = default - end -end - -function document.setfilename(i,name) - if name then - document.files[tonumber(i)] = name - else - document.files[#document.files+1] = tostring(i) - end -end - -function document.getargument(key,default) -- commands - local v = document.arguments[key] - if type(v) == "boolean" then - v = (v and "yes") or "no" - document.arguments[key] = v - end - context(v or default or "") -end - -function document.getfilename(i) -- commands - context(document.files[tonumber(i)] or "") -end - -function commands.getcommandline() -- has to happen at the tex end in order to expand - - -- the document[arguments|files] tables are copies - - local arguments = document.arguments - local files = document.files - local options = document.options - - for k, v in next, environment.arguments do - k = gsub(k,"^c:","") -- already done, but better be safe than sorry - if arguments[k] == nil then - arguments[k] = v - end - end - - -- in the new mtx=context approach we always pass a stub file so we need to - -- to trick the files table which actually only has one entry in a tex job - - if arguments.timing then - context.usemodule("timing") - end - - if arguments.batchmode then - context.batchmode(false) - end - - if arguments.nonstopmode then - context.nonstopmode(false) - end - - if arguments.nostatistics then - directives.enable("system.nostatistics") - end - - if arguments.paranoid then - context.setvalue("maxreadlevel",1) - end - - if validstring(arguments.path) then - context.usepath { arguments.path } - end - - local inputfile = validstring(arguments.input) - - if inputfile and dirname(inputfile) == "." and lfs.isfile(inputfile) then - -- nicer in checks - inputfile = basename(inputfile) - end - - local kindofrun = arguments.kindofrun - local currentrun = arguments.maxnofruns - local maxnofruns = arguments.currentrun - - context.setupsystem { - [constants.directory] = validstring(arguments.setuppath), - [constants.inputfile] = inputfile, - [constants.file] = validstring(arguments.result), - [constants.random] = validstring(arguments.randomseed), - -- old: - [constants.n] = validstring(kindofrun), - [constants.m] = validstring(currentrun), - } - - environment.kindofrun = tonumber(kindofrun) or 0 - environment.maxnofruns = tonumber(maxnofruns) or 0 - environment.currentrun = tonumber(currentrun) or 0 - - if validstring(arguments.arguments) then - context.setupenv { arguments.arguments } - end - - if arguments.once then - directives.enable("system.runonce") - end - - if arguments.noarrange then - context.setuparranging { variables.disable } - end - - -- - - local commandline = options.commandline - - commandline.environments = table.append(commandline.environments,settings_to_array(validstring(arguments.environment))) - commandline.modules = table.append(commandline.modules, settings_to_array(validstring(arguments.usemodule))) - commandline.modes = table.append(commandline.modes, settings_to_array(validstring(arguments.mode))) - - -- - - if #files == 0 then - local list = settings_to_array(validstring(arguments.files)) - if list and #list > 0 then - files = list - end - end - - if #files == 0 then - files = { validstring(arguments.input) } - end - - -- - - document.arguments = arguments - document.files = files - -end - --- commandline wins over ctxfile - -local function apply(list,action) - if list then - for i=1,#list do - action { list[i] } - end - end -end - -function commands.setdocumentmodes() -- was setup: *runtime:modes - apply(document.options.ctxfile .modes,context.enablemode) - apply(document.options.commandline.modes,context.enablemode) -end - -function commands.setdocumentmodules() -- was setup: *runtime:modules - apply(document.options.ctxfile .modules,context.usemodule) - apply(document.options.commandline.modules,context.usemodule) -end - -function commands.setdocumentenvironments() -- was setup: *runtime:environments - apply(document.options.ctxfile .environments,context.environment) - apply(document.options.commandline.environments,context.environment) -end - -local report_files = logs.reporter("system","files") -local report_options = logs.reporter("system","options") -local report_file = logs.reporter("used file") -local report_option = logs.reporter("used option") - -luatex.registerstopactions(function() - local foundintrees = resolvers.instance.foundintrees - if #foundintrees > 0 then - logspushtarget("logfile") - logsnewline() - report_files("start used files") - logsnewline() - for i=1,#foundintrees do - report_file("%4i: % T",i,foundintrees[i]) - end - logsnewline() - report_files("stop used files") - logsnewline() - logspoptarget() - end -end) - -luatex.registerstopactions(function() - local files = document.files -- or environment.files - local arguments = document.arguments -- or environment.arguments - -- - logspushtarget("logfile") - logsnewline() - report_options("start commandline options") - logsnewline() - for argument, value in sortedhash(arguments) do - report_option("%s=%A",argument,value) - end - logsnewline() - report_options("stop commandline options") - logsnewline() - report_options("start commandline files") - logsnewline() - for i=1,#files do - report_file("% 4i: %s",i,files[i]) - end - logsnewline() - report_options("stop commandline files") - logsnewline() - logspoptarget() -end) - -if environment.initex then - - local report_storage = logs.reporter("system","storage") - local report_table = logs.reporter("stored table") - local report_module = logs.reporter("stored module") - local report_attribute = logs.reporter("stored attribute") - local report_catcodetable = logs.reporter("stored catcodetable") - local report_corenamespace = logs.reporter("stored corenamespace") - - luatex.registerstopactions(function() - logspushtarget("logfile") - logsnewline() - report_storage("start stored tables") - logsnewline() - for k,v in sortedhash(storage.data) do - report_table("%03i %s",k,v[1]) - end - logsnewline() - report_storage("stop stored tables") - logsnewline() - report_storage("start stored modules") - logsnewline() - for k,v in sortedhash(lua.bytedata) do - report_module("%03i %s %s",k,v[2],v[1]) - end - logsnewline() - report_storage("stop stored modules") - logsnewline() - report_storage("start stored attributes") - logsnewline() - for k,v in sortedhash(attributes.names) do - report_attribute("%03i %s",k,v) - end - logsnewline() - report_storage("stop stored attributes") - logsnewline() - report_storage("start stored catcodetables") - logsnewline() - for k,v in sortedhash(catcodes.names) do - report_catcodetable("%03i % t",k,v) - end - logsnewline() - report_storage("stop stored catcodetables") - logsnewline() - report_storage("start stored corenamespaces") - for k,v in sortedhash(interfaces.corenamespaces) do - report_corenamespace("%03i %s",k,v) - end - logsnewline() - report_storage("stop stored corenamespaces") - logsnewline() - logspoptarget() - end) - -end - -function commands.doifelsecontinuewithfile(inpname,basetoo) - local inpnamefull = addsuffix(inpname,"tex") - local inpfilefull = addsuffix(environment.inputfilename,"tex") - local continue = inpnamefull == inpfilefull - if basetoo and not continue then - continue = inpnamefull == basename(inpfilefull) - end - if continue then - report_system("continuing input file %a",inpname) - end - commands.doifelse(continue) -end +if not modules then modules = { } end modules ['file-job'] = { + version = 1.001, + comment = "companion to file-job.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- in retrospect dealing it's not that bad to deal with the nesting +-- and push/poppign at the tex end + +local gsub, match, find = string.gsub, string.match, string.find +local insert, remove, concat = table.insert, table.remove, table.concat +local validstring = string.valid +local sortedhash = table.sortedhash +local formatters = string.formatters + +local commands, resolvers, context = commands, resolvers, context + +local trace_jobfiles = false trackers.register("system.jobfiles", function(v) trace_jobfiles = v end) + +local report_jobfiles = logs.reporter("system","jobfiles") + +local texsetcount = tex.setcount +local elements = interfaces.elements +local constants = interfaces.constants +local variables = interfaces.variables +local logsnewline = logs.newline +local logspushtarget = logs.pushtarget +local logspoptarget = logs.poptarget +local settings_to_array = utilities.parsers.settings_to_array +local allocate = utilities.storage.allocate + +local nameonly = file.nameonly +local suffixonly = file.suffix +local basename = file.basename +local addsuffix = file.addsuffix +local removesuffix = file.removesuffix +local dirname = file.dirname +local joinpath = file.join +local is_qualified_path = file.is_qualified_path + +local cleanpath = resolvers.cleanpath +local inputstack = resolvers.inputstack + +local v_outer = variables.outer +local v_text = variables.text +local v_project = variables.project +local v_environment = variables.environment +local v_product = variables.product +local v_component = variables.component +local c_prefix = variables.prefix + +-- main code .. there is some overlap .. here we have loc:// + +local function findctxfile(name) -- loc ? any ? + if is_qualified_path(name) then -- maybe when no suffix do some test for tex + return name + elseif not url.hasscheme(name) then + return resolvers.finders.byscheme("loc",name) or "" + else + return resolvers.findtexfile(name) or "" + end +end + +resolvers.findctxfile = findctxfile + +function commands.processfile(name) + name = findctxfile(name) + if name ~= "" then + context.input(name) + end +end + +function commands.doifinputfileelse(name) + commands.doifelse(findctxfile(name) ~= "") +end + +function commands.locatefilepath(name) + context(dirname(findctxfile(name))) +end + +function commands.usepath(paths) + resolvers.registerextrapath(paths) +end + +function commands.usesubpath(subpaths) + resolvers.registerextrapath(nil,subpaths) +end + +function commands.allinputpaths() + context(concat(resolvers.instance.extra_paths or { },",")) +end + +function commands.setdocumentfilenames() + environment.initializefilenames() +end + +function commands.usezipfile(name,tree) + if tree and tree ~= "" then + resolvers.usezipfile(formatters["zip:///%s?tree=%s"](name,tree)) + else + resolvers.usezipfile(formatters["zip:///%s"](name)) + end +end + +local report_system = logs.reporter("system") + +-- moved from tex to lua: + +local texpatterns = { "%s.mkvi", "%s.mkiv", "%s.tex" } +local luapatterns = { "%s" .. utilities.lua.suffixes.luc, "%s.lua" } +local cldpatterns = { "%s.cld" } +local xmlpatterns = { "%s.xml" } + +local uselibrary = commands.uselibrary +local input = context.input + +-- status +-- +-- these need to be synced with input stream: + +local processstack = { } +local processedfile = "" +local processedfiles = { } + +function commands.processedfile() + context(processedfile) +end + +function commands.processedfiles() + context(concat(processedfiles,",")) +end + +function commands.dostarttextfile(name) + insert(processstack,name) + processedfile = name + insert(processedfiles,name) +end + +function commands.dostoptextfile() + processedfile = remove(processstack) or "" +end + +local function startprocessing(name,notext) + if not notext then + -- report_system("begin file %a at line %a",name,status.linenumber or 0) + context.dostarttextfile(name) + end +end + +local function stopprocessing(notext) + if not notext then + context.dostoptextfile() + -- report_system("end file %a at line %a",name,status.linenumber or 0) + end +end + +-- + +local action = function(name,foundname) input(foundname) end +local failure = function(name,foundname) report_jobfiles("unknown %s file %a","tex",name) end + +local function usetexfile(name,onlyonce,notext) + startprocessing(name,notext) + uselibrary { + name = name, + patterns = texpatterns, + action = action, + failure = failure, + onlyonce = onlyonce, + } + stopprocessing(notext) +end + +local action = function(name,foundname) dofile(foundname) end +local failure = function(name,foundname) report_jobfiles("unknown %s file %a","lua",name) end + +local function useluafile(name,onlyonce,notext) + uselibrary { + name = name, + patterns = luapatterns, + action = action, + failure = failure, + onlyonce = onlyonce, + } +end + +local action = function(name,foundname) dofile(foundname) end +local failure = function(name,foundname) report_jobfiles("unknown %s file %a","cld",name) end + +local function usecldfile(name,onlyonce,notext) + startprocessing(name,notext) + uselibrary { + name = name, + patterns = cldpatterns, + action = action, + failure = failure, + onlyonce = onlyonce, + } + stopprocessing(notext) +end + +local action = function(name,foundname) context.xmlprocess(foundname,"main","") end +local failure = function(name,foundname) report_jobfiles("unknown %s file %a","xml",name) end + +local function usexmlfile(name,onlyonce,notext) + startprocessing(name,notext) + uselibrary { + name = name, + patterns = xmlpatterns, + action = action, + failure = failure, + onlyonce = onlyonce, + } + stopprocessing(notext) +end + +commands.usetexfile = usetexfile +commands.useluafile = useluafile +commands.usecldfile = usecldfile +commands.usexmlfile = usexmlfile + +local suffixes = { + mkvi = usetexfile, + mkiv = usetexfile, + tex = usetexfile, + luc = useluafile, + lua = useluafile, + cld = usecldfile, + xml = usexmlfile, + [""] = usetexfile, +} + +local function useanyfile(name,onlyonce) + local s = suffixes[file.suffix(name)] + if s then + s(removesuffix(name),onlyonce) + else + usetexfile(name,onlyonce) -- e.g. ctx file +--~ resolvers.readfilename(name) + end +end + +commands.useanyfile = useanyfile + +function resolvers.jobs.usefile(name,onlyonce,notext) + local s = suffixes[file.suffix(name)] + if s then + s(removesuffix(name),onlyonce,notext) + end +end + +-- document structure + +local textlevel = 0 -- inaccessible for user, we need to define counter textlevel at the tex end + +local function dummyfunction() end + +local function startstoperror() + report_system("invalid \\%s%s ... \\%s%s structure",elements.start,v_text,elements.stop,v_text) + startstoperror = dummyfunction +end + +local function starttext() + if textlevel == 0 then + if trace_jobfiles then + report_jobfiles("starting text") + end + -- registerfileinfo[begin]jobfilename + context.dostarttext() + end + textlevel = textlevel + 1 + texsetcount("global","textlevel",textlevel) +end + +local function stoptext() + if textlevel == 0 then + startstoperror() + elseif textlevel > 0 then + textlevel = textlevel - 1 + end + texsetcount("global","textlevel",textlevel) + if textlevel <= 0 then + if trace_jobfiles then + report_jobfiles("stopping text") + end + context.dostoptext() + -- registerfileinfo[end]jobfilename + context.finalend() + commands.stoptext = dummyfunction + end +end + +commands.starttext = starttext +commands.stoptext = stoptext + +function commands.forcequitjob(reason) + if reason then + report_system("forcing quit: %s",reason) + else + report_system("forcing quit") + end + context.batchmode() + while textlevel >= 0 do + context.stoptext() + end +end + +function commands.forceendjob() + report_system([[don't use \end to finish a document]]) + context.stoptext() +end + +function commands.autostarttext() + if textlevel == 0 then + report_system([[auto \starttext ... \stoptext]]) + end + context.starttext() +end + +commands.autostoptext = stoptext + +-- project structure + +function commands.processfilemany(name) + useanyfile(name,false) +end + +function commands.processfileonce(name) + useanyfile(name,true) +end + +function commands.processfilenone(name) + -- skip file +end + +-- + +local typestack = { } +local pathstack = { } + +local currenttype = v_text +local currentpath = "." + +local tree = { type = "text", name = "", branches = { } } +local treestack = { } +local top = tree.branches +local root = tree + +local project_stack = { } +local product_stack = { } +local component_stack = { } +local environment_stack = { } + +local stacks = { + [v_project ] = project_stack, + [v_product ] = product_stack, + [v_component ] = component_stack, + [v_environment] = environment_stack, +} + +-- + +local report_structures = logs.reporter("system","structure") +local report_structure = logs.reporter("used structure") + +local function pushtree(what,name) + local t = { } + top[#top+1] = { type = what, name = name, branches = t } + insert(treestack,top) + top = t +end + +local function poptree() + top = remove(treestack) + -- inspect(top) +end + +local function log_tree(top,depth) + report_structure("%s%s: %s",depth,top.type,top.name) + local branches = top.branches + if #branches > 0 then + depth = depth .. " " + for i=1,#branches do + log_tree(branches[i],depth) + end + end +end + +luatex.registerstopactions(function() + logspushtarget("logfile") + logsnewline() + report_structures("start used structure") + logsnewline() + root.name = environment.jobname + log_tree(root,"") + logsnewline() + report_structures("stop used structure") + logsnewline() + logspoptarget() +end) + +job.structure = job.structure or { } +job.structure.collected = job.structure.collected or { } +job.structure.tobesaved = root +job.structure.components = { } + +local function initialize() + local function collect(root,result) + local branches = root.branches + if branches then + for i=1,#branches do + local branch = branches[i] + if branch.type == "component" then + result[#result+1] = branch.name + end + collect(branch,result) + end + end + return result + end + job.structure.components = collect(job.structure.collected,{}) +end + +job.register('job.structure.collected',root,initialize) + +-- component: small unit, either or not components itself +-- product : combination of components + +local context_processfilemany = context.processfilemany +local context_processfileonce = context.processfileonce +local context_processfilenone = context.processfilenone + +local processors = utilities.storage.allocate { + -- [v_outer] = { + -- [v_text] = { "many", context_processfilemany }, + -- [v_project] = { "once", context_processfileonce }, + -- [v_environment] = { "once", context_processfileonce }, + -- [v_product] = { "once", context_processfileonce }, + -- [v_component] = { "many", context_processfilemany }, + -- }, + [v_text] = { + [v_text] = { "many", context_processfilemany }, + [v_project] = { "once", context_processfileonce }, -- dubious + [v_environment] = { "once", context_processfileonce }, + [v_product] = { "many", context_processfilemany }, -- dubious + [v_component] = { "many", context_processfilemany }, + }, + [v_project] = { + [v_text] = { "many", context_processfilemany }, + [v_project] = { "none", context_processfilenone }, + [v_environment] = { "once", context_processfileonce }, + [v_product] = { "none", context_processfilenone }, + [v_component] = { "none", context_processfilenone }, + }, + [v_environment] = { + [v_text] = { "many", context_processfilemany }, + [v_project] = { "none", context_processfilenone }, + [v_environment] = { "once", context_processfileonce }, + [v_product] = { "none", context_processfilenone }, + [v_component] = { "none", context_processfilenone }, + }, + [v_product] = { + [v_text] = { "many", context_processfilemany }, + [v_project] = { "once", context_processfileonce }, + [v_environment] = { "once", context_processfileonce }, + [v_product] = { "many", context_processfilemany }, + [v_component] = { "many", context_processfilemany }, + }, + [v_component] = { + [v_text] = { "many", context_processfilemany }, + [v_project] = { "once", context_processfileonce }, + [v_environment] = { "once", context_processfileonce }, + [v_product] = { "none", context_processfilenone }, + [v_component] = { "many", context_processfilemany }, + } +} + +local start = { + [v_text] = nil, + [v_project] = nil, + [v_environment] = context.startreadingfile, + [v_product] = context.starttext, + [v_component] = context.starttext, +} + +local stop = { + [v_text] = nil, + [v_project] = nil, + [v_environment] = context.stopreadingfile, + [v_product] = context.stoptext, + [v_component] = context.stoptext, +} + +resolvers.jobs.processors = processors + +local function topofstack(what) + local stack = stacks[what] + return stack and stack[#stack] or environment.jobname +end + +local function productcomponent() -- only when in product + local product = product_stack[#product_stack] + if product and product ~= "" then + local component = component_stack[1] + if component and component ~= "" then + return component + end + end +end + +local function justacomponent() + local product = product_stack[#product_stack] + if not product or product == "" then + local component = component_stack[1] + if component and component ~= "" then + return component + end + end +end + +resolvers.jobs.productcomponent = productcomponent +resolvers.jobs.justacomponent = justacomponent + +function resolvers.jobs.currentproject () return topofstack(v_project ) end +function resolvers.jobs.currentproduct () return topofstack(v_product ) end +function resolvers.jobs.currentcomponent () return topofstack(v_component ) end +function resolvers.jobs.currentenvironment() return topofstack(v_environment) end + +local done = { } +local tolerant = false -- too messy, mkii user with the wrong sructure should adapt + +local function process(what,name) + local depth = #typestack + local process + -- + name = resolvers.resolve(name) + -- +-- if not tolerant then + -- okay, would be best but not compatible with mkii + process = processors[currenttype][what] +-- elseif depth == 0 then +-- -- could be a component, product or (brr) project +-- if trace_jobfiles then +-- report_jobfiles("%s : %s > %s (case 1)",depth,currenttype,v_outer) +-- end +-- process = processors[v_outer][what] +-- elseif depth == 1 and typestack[1] == v_text then +-- -- we're still not doing a component or product +-- if trace_jobfiles then +-- report_jobfiles("%s : %s > %s (case 2)",depth,currenttype,v_outer) +-- end +-- process = processors[v_outer][what] +-- else +-- process = processors[currenttype][what] +-- end + if process then + local method = process[1] + if method == "none" then + if trace_jobfiles then + report_jobfiles("%s : %s : %s %s %a in %s %a",depth,method,"ignoring",what,name,currenttype,topofstack(currenttype)) + end + elseif method == "once" and done[name] then + if trace_jobfiles then + report_jobfiles("%s : %s : %s %s %a in %s %a",depth,method,"skipping",what,name,currenttype,topofstack(currenttype)) + end + else + -- keep in mind that we also handle "once" at the file level + -- so there is a double catch + done[name] = true + local before = start[what] + local after = stop [what] + if trace_jobfiles then + report_jobfiles("%s : %s : %s %s %a in %s %a",depth,method,"processing",what,name,currenttype,topofstack(currenttype)) + end + if before then + before() + end + process[2](name) + if after then + after() + end + end + else + if trace_jobfiles then + report_jobfiles("%s : %s : %s %s %a in %s %a",depth,"none","ignoring",what,name,currenttype,topofstack(currenttype)) + end + end +end + +function commands.useproject (name) process(v_project, name) end +function commands.useenvironment(name) process(v_environment,name) end +function commands.useproduct (name) process(v_product, name) end +function commands.usecomponent (name) process(v_component, name) end + +-- todo: setsystemmode to currenttype +-- todo: make start/stop commands at the tex end + +local start = { + [v_project] = context.startprojectindeed, + [v_product] = context.startproductindeed, + [v_component] = context.startcomponentindeed, + [v_environment] = context.startenvironmentindeed, +} + +local stop = { + [v_project] = context.stopprojectindeed, + [v_product] = context.stopproductindeed, + [v_component] = context.stopcomponentindeed, + [v_environment] = context.stopenvironmentindeed, +} + +local function gotonextlevel(what,name) -- todo: something with suffix name + insert(stacks[what],name) + insert(typestack,currenttype) + insert(pathstack,currentpath) + currenttype = what + currentpath = dirname(name) + pushtree(what,name) + if start[what] then + start[what]() + end +end + +local function gotopreviouslevel(what) + if stop[what] then + stop[what]() + end + poptree() + currentpath = remove(pathstack) or "." + currenttype = remove(typestack) or v_text + remove(stacks[what]) -- not currenttype ... weak recovery + -- context.endinput() -- does not work + context.signalendofinput(what) +end + +local function autoname(name) + if name == "*" then + name = nameonly(inputstack[#inputstack] or name) + end + return name +end + +function commands.startproject (name) gotonextlevel(v_project, autoname(name)) end +function commands.startproduct (name) gotonextlevel(v_product, autoname(name)) end +function commands.startcomponent (name) gotonextlevel(v_component, autoname(name)) end +function commands.startenvironment(name) gotonextlevel(v_environment,autoname(name)) end + +function commands.stopproject () gotopreviouslevel(v_project ) end +function commands.stopproduct () gotopreviouslevel(v_product ) end +function commands.stopcomponent () gotopreviouslevel(v_component ) end +function commands.stopenvironment() gotopreviouslevel(v_environment) end + +function commands.currentproject () context(topofstack(v_project )) end +function commands.currentproduct () context(topofstack(v_product )) end +function commands.currentcomponent () context(topofstack(v_component )) end +function commands.currentenvironment() context(topofstack(v_environment)) end + +-- -- -- this will move -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- +-- +-- +-- +-- nee +-- standaard +-- +-- +-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + +local report_examodes = logs.reporter("system","examodes") + +local function convertexamodes(str) + local x = xml.convert(str) + for e in xml.collected(x,"exa:variable") do + local label = e.at and e.at.label + if label and label ~= "" then + local data = xml.text(e) + local mode = match(label,"^mode:(.+)$") + if mode then + context.enablemode { formatters["%s:%s"](mode,data) } + end + context.setvariable("exa:variables",label,(gsub(data,"([{}])","\\%1"))) + end + end +end + +function commands.loadexamodes(filename) + if not filename or filename == "" then + filename = removesuffix(tex.jobname) + end + filename = resolvers.findfile(addsuffix(filename,'ctm')) or "" + if filename ~= "" then + report_examodes("loading %a",filename) -- todo: message system + convertexamodes(io.loaddata(filename)) + else + report_examodes("no mode file %a",filename) -- todo: message system + end +end + +-- changed in mtx-context +-- code moved from luat-ini + +-- todo: locals when mtx-context is changed + +document = document or { + arguments = allocate(), + files = allocate(), + variables = allocate(), -- for templates + options = { + commandline = { + environments = allocate(), + modules = allocate(), + modes = allocate(), + }, + ctxfile = { + environments = allocate(), + modules = allocate(), + modes = allocate(), + }, + }, +} + +function document.setargument(key,value) + document.arguments[key] = value +end + +function document.setdefaultargument(key,default) + local v = document.arguments[key] + if v == nil or v == "" then + document.arguments[key] = default + end +end + +function document.setfilename(i,name) + if name then + document.files[tonumber(i)] = name + else + document.files[#document.files+1] = tostring(i) + end +end + +function document.getargument(key,default) -- commands + local v = document.arguments[key] + if type(v) == "boolean" then + v = (v and "yes") or "no" + document.arguments[key] = v + end + context(v or default or "") +end + +function document.getfilename(i) -- commands + context(document.files[tonumber(i)] or "") +end + +function commands.getcommandline() -- has to happen at the tex end in order to expand + + -- the document[arguments|files] tables are copies + + local arguments = document.arguments + local files = document.files + local options = document.options + + for k, v in next, environment.arguments do + k = gsub(k,"^c:","") -- already done, but better be safe than sorry + if arguments[k] == nil then + arguments[k] = v + end + end + + -- in the new mtx=context approach we always pass a stub file so we need to + -- to trick the files table which actually only has one entry in a tex job + + if arguments.timing then + context.usemodule("timing") + end + + if arguments.batchmode then + context.batchmode(false) + end + + if arguments.nonstopmode then + context.nonstopmode(false) + end + + if arguments.nostatistics then + directives.enable("system.nostatistics") + end + + if arguments.paranoid then + context.setvalue("maxreadlevel",1) + end + + if validstring(arguments.path) then + context.usepath { arguments.path } + end + + local inputfile = validstring(arguments.input) + + if inputfile and dirname(inputfile) == "." and lfs.isfile(inputfile) then + -- nicer in checks + inputfile = basename(inputfile) + end + + local kindofrun = arguments.kindofrun + local currentrun = arguments.maxnofruns + local maxnofruns = arguments.currentrun + + context.setupsystem { + [constants.directory] = validstring(arguments.setuppath), + [constants.inputfile] = inputfile, + [constants.file] = validstring(arguments.result), + [constants.random] = validstring(arguments.randomseed), + -- old: + [constants.n] = validstring(kindofrun), + [constants.m] = validstring(currentrun), + } + + environment.kindofrun = tonumber(kindofrun) or 0 + environment.maxnofruns = tonumber(maxnofruns) or 0 + environment.currentrun = tonumber(currentrun) or 0 + + if validstring(arguments.arguments) then + context.setupenv { arguments.arguments } + end + + if arguments.once then + directives.enable("system.runonce") + end + + if arguments.noarrange then + context.setuparranging { variables.disable } + end + + -- + + local commandline = options.commandline + + commandline.environments = table.append(commandline.environments,settings_to_array(validstring(arguments.environment))) + commandline.modules = table.append(commandline.modules, settings_to_array(validstring(arguments.usemodule))) + commandline.modes = table.append(commandline.modes, settings_to_array(validstring(arguments.mode))) + + -- + + if #files == 0 then + local list = settings_to_array(validstring(arguments.files)) + if list and #list > 0 then + files = list + end + end + + if #files == 0 then + files = { validstring(arguments.input) } + end + + -- + + document.arguments = arguments + document.files = files + +end + +-- commandline wins over ctxfile + +local function apply(list,action) + if list then + for i=1,#list do + action { list[i] } + end + end +end + +function commands.setdocumentmodes() -- was setup: *runtime:modes + apply(document.options.ctxfile .modes,context.enablemode) + apply(document.options.commandline.modes,context.enablemode) +end + +function commands.setdocumentmodules() -- was setup: *runtime:modules + apply(document.options.ctxfile .modules,context.usemodule) + apply(document.options.commandline.modules,context.usemodule) +end + +function commands.setdocumentenvironments() -- was setup: *runtime:environments + apply(document.options.ctxfile .environments,context.environment) + apply(document.options.commandline.environments,context.environment) +end + +local report_files = logs.reporter("system","files") +local report_options = logs.reporter("system","options") +local report_file = logs.reporter("used file") +local report_option = logs.reporter("used option") + +luatex.registerstopactions(function() + local foundintrees = resolvers.instance.foundintrees + if #foundintrees > 0 then + logspushtarget("logfile") + logsnewline() + report_files("start used files") + logsnewline() + for i=1,#foundintrees do + report_file("%4i: % T",i,foundintrees[i]) + end + logsnewline() + report_files("stop used files") + logsnewline() + logspoptarget() + end +end) + +luatex.registerstopactions(function() + local files = document.files -- or environment.files + local arguments = document.arguments -- or environment.arguments + -- + logspushtarget("logfile") + logsnewline() + report_options("start commandline options") + logsnewline() + for argument, value in sortedhash(arguments) do + report_option("%s=%A",argument,value) + end + logsnewline() + report_options("stop commandline options") + logsnewline() + report_options("start commandline files") + logsnewline() + for i=1,#files do + report_file("% 4i: %s",i,files[i]) + end + logsnewline() + report_options("stop commandline files") + logsnewline() + logspoptarget() +end) + +if environment.initex then + + local report_storage = logs.reporter("system","storage") + local report_table = logs.reporter("stored table") + local report_module = logs.reporter("stored module") + local report_attribute = logs.reporter("stored attribute") + local report_catcodetable = logs.reporter("stored catcodetable") + local report_corenamespace = logs.reporter("stored corenamespace") + + luatex.registerstopactions(function() + logspushtarget("logfile") + logsnewline() + report_storage("start stored tables") + logsnewline() + for k,v in sortedhash(storage.data) do + report_table("%03i %s",k,v[1]) + end + logsnewline() + report_storage("stop stored tables") + logsnewline() + report_storage("start stored modules") + logsnewline() + for k,v in sortedhash(lua.bytedata) do + report_module("%03i %s %s",k,v[2],v[1]) + end + logsnewline() + report_storage("stop stored modules") + logsnewline() + report_storage("start stored attributes") + logsnewline() + for k,v in sortedhash(attributes.names) do + report_attribute("%03i %s",k,v) + end + logsnewline() + report_storage("stop stored attributes") + logsnewline() + report_storage("start stored catcodetables") + logsnewline() + for k,v in sortedhash(catcodes.names) do + report_catcodetable("%03i % t",k,v) + end + logsnewline() + report_storage("stop stored catcodetables") + logsnewline() + report_storage("start stored corenamespaces") + for k,v in sortedhash(interfaces.corenamespaces) do + report_corenamespace("%03i %s",k,v) + end + logsnewline() + report_storage("stop stored corenamespaces") + logsnewline() + logspoptarget() + end) + +end + +function commands.doifelsecontinuewithfile(inpname,basetoo) + local inpnamefull = addsuffix(inpname,"tex") + local inpfilefull = addsuffix(environment.inputfilename,"tex") + local continue = inpnamefull == inpfilefull + if basetoo and not continue then + continue = inpnamefull == basename(inpfilefull) + end + if continue then + report_system("continuing input file %a",inpname) + end + commands.doifelse(continue) +end diff --git a/tex/context/base/file-lib.lua b/tex/context/base/file-lib.lua index 3311321c5..5f1a3a1bb 100644 --- a/tex/context/base/file-lib.lua +++ b/tex/context/base/file-lib.lua @@ -1,65 +1,65 @@ -if not modules then modules = { } end modules ['file-lib'] = { - version = 1.001, - comment = "companion to file-lib.mkvi", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: check all usage of truefilename at the tex end and remove --- files there (and replace definitions by full names) - -local format = string.format - -local trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end) -local report_files = logs.reporter("files","readfile") - -local loaded = { } -local defaultpatterns = { "%s" } - -local function defaultaction(name,foundname) - report_files("asked name %a, found name %a",name,foundname) -end - -local function defaultfailure(name) - report_files("asked name %a, not found",name) -end - -function commands.uselibrary(specification) -- todo; reporter - local name = specification.name - if name and name ~= "" then - local patterns = specification.patterns or defaultpatterns - local action = specification.action or defaultaction - local failure = specification.failure or defaultfailure - local onlyonce = specification.onlyonce - local files = utilities.parsers.settings_to_array(name) - local truename = environment.truefilename - local done = false - for i=1,#files do - local filename = files[i] - if not loaded[filename] then - if onlyonce then - loaded[filename] = true -- todo: base this on return value - end - for i=1,#patterns do - local somename = format(patterns[i],filename) - if truename then - somename = truename(somename) - end - local foundname = resolvers.getreadfilename("any",".",somename) or "" - if foundname ~= "" then - action(name,foundname) - done = true - break - end - end - if done then - break - end - end - end - if failure and not done then - failure(name) - end - end -end +if not modules then modules = { } end modules ['file-lib'] = { + version = 1.001, + comment = "companion to file-lib.mkvi", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: check all usage of truefilename at the tex end and remove +-- files there (and replace definitions by full names) + +local format = string.format + +local trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end) +local report_files = logs.reporter("files","readfile") + +local loaded = { } +local defaultpatterns = { "%s" } + +local function defaultaction(name,foundname) + report_files("asked name %a, found name %a",name,foundname) +end + +local function defaultfailure(name) + report_files("asked name %a, not found",name) +end + +function commands.uselibrary(specification) -- todo; reporter + local name = specification.name + if name and name ~= "" then + local patterns = specification.patterns or defaultpatterns + local action = specification.action or defaultaction + local failure = specification.failure or defaultfailure + local onlyonce = specification.onlyonce + local files = utilities.parsers.settings_to_array(name) + local truename = environment.truefilename + local done = false + for i=1,#files do + local filename = files[i] + if not loaded[filename] then + if onlyonce then + loaded[filename] = true -- todo: base this on return value + end + for i=1,#patterns do + local somename = format(patterns[i],filename) + if truename then + somename = truename(somename) + end + local foundname = resolvers.getreadfilename("any",".",somename) or "" + if foundname ~= "" then + action(name,foundname) + done = true + break + end + end + if done then + break + end + end + end + if failure and not done then + failure(name) + end + end +end diff --git a/tex/context/base/file-mod.lua b/tex/context/base/file-mod.lua index 3659d3089..fd31955f8 100644 --- a/tex/context/base/file-mod.lua +++ b/tex/context/base/file-mod.lua @@ -1,181 +1,181 @@ -if not modules then modules = { } end modules ['file-mod'] = { - version = 1.001, - comment = "companion to file-mod.mkvi", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This module will be redone! For instance, the prefixes will move to data-* --- as they arr sort of generic along with home:// etc/. - --- context is not defined yet! todo! (we need to load tupp-fil after cld) --- todo: move startreadingfile to lua and push regime there - ---[[ldx-- -

It's more convenient to manipulate filenames (paths) in - than in . These methods have counterparts -at the side.

---ldx]]-- - -local format, concat, tonumber = string.format, table.concat, tonumber - -local trace_modules = false trackers.register("modules.loading", function(v) trace_modules = v end) - -local report_modules = logs.reporter("resolvers","modules") - -commands = commands or { } -local commands = commands - -local findbyscheme = resolvers.finders.byscheme -- use different one -local iterator = utilities.parsers.iterator - --- modules can have a specific suffix or can specify one - -local prefixes = { "m", "p", "s", "x", "v", "t" } -local suffixes = { "mkvi", "mkiv", "tex", "cld", "lua" } -- order might change and how about cld -local modstatus = { } - -local function usemodule(name,hasscheme) - local foundname - if hasscheme then - -- no auto suffix as http will return a home page or error page - -- so we only add one if missing - local fullname = file.addsuffix(name,"tex") - if trace_modules then - report_modules("checking url %a",fullname) - end - foundname = resolvers.findtexfile(fullname) or "" - elseif file.suffix(name) ~= "" then - if trace_modules then - report_modules("checking file %a",name) - end - foundname = findbyscheme("any",name) or "" - else - for i=1,#suffixes do - local fullname = file.addsuffix(name,suffixes[i]) - if trace_modules then - report_modules("checking file %a",fullname) - end - foundname = findbyscheme("any",fullname) or "" - if foundname ~= "" then - break - end - end - end - if foundname ~= "" then - if trace_modules then - report_modules("loading file %a",foundname) - end - context.startreadingfile() - resolvers.jobs.usefile(foundname,true) -- once, notext - -- context.input(foundname) - context.stopreadingfile() - return true - else - return false - end -end - -function commands.usemodules(prefix,askedname,truename) - local truename = truename or environment.truefilename(askedname) - local hasprefix = prefix and prefix ~= "" - local hashname = ((hasprefix and prefix) or "*") .. "-" .. truename - local status = modstatus[hashname] - if status == 0 then - -- not found - elseif status == 1 then - status = status + 1 - else - if trace_modules then - report_modules("locating, prefix %a, askedname %a, truename %a",prefix,askedname,truename) - end - local hasscheme = url.hasscheme(truename) - if hasscheme then - -- no prefix and suffix done - if usemodule(truename,true) then - status = 1 - else - status = 0 - end - elseif hasprefix then - if usemodule(prefix .. "-" .. truename) then - status = 1 - else - status = 0 - end - else - for i=1,#prefixes do - -- todo: reconstruct name i.e. basename - local thename = prefixes[i] .. "-" .. truename - if usemodule(thename) then - status = 1 - break - end - end - if status then - -- ok, don't change - elseif usemodule(truename) then - status = 1 - else - status = 0 - end - end - end - if status == 0 then - report_modules("%a is not found",askedname) - elseif status == 1 then - report_modules("%a is loaded",trace_modules and truename or askedname) - else - report_modules("%a is already loaded",trace_modules and truename or askedname) - end - modstatus[hashname] = status -end - -statistics.register("loaded tex modules", function() - if next(modstatus) then - local t, f, nt, nf = { }, { }, 0, 0 - for k, v in table.sortedhash(modstatus) do - k = file.basename(k) - if v == 0 then - nf = nf + 1 - f[nf] = k - else - nt = nt + 1 - t[nt] = k - end - end - if nf == 0 then - return format("%s requested, all found (%s)",nt,concat(t," ")) - elseif nt == 0 then - return format("%s requested, all missing (%s)",nf,concat(f," ")) - else - return format("%s requested, %s found (%s), %s missing (%s)",nt+nf,nt,concat(t," "),nf,concat(f," ")) - end - else - return nil - end -end) - --- moved from syst-lua.lua: - -local splitter = lpeg.tsplitter(lpeg.S(". "),tonumber) - -function commands.doifolderversionelse(one,two) -- one >= two - if not two then - one, two = environment.version, one - elseif one == "" then - one = environment.version - end - one = lpeg.match(splitter,one) - two = lpeg.match(splitter,two) - one = (one[1] or 0) * 10000 + (one[2] or 0) * 100 + (one[3] or 0) - two = (two[1] or 0) * 10000 + (two[2] or 0) * 100 + (two[3] or 0) - commands.doifelse(one>=two) -end - -function commands.useluamodule(list) - for filename in iterator(list) do - environment.loadluafile(filename) - end -end +if not modules then modules = { } end modules ['file-mod'] = { + version = 1.001, + comment = "companion to file-mod.mkvi", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This module will be redone! For instance, the prefixes will move to data-* +-- as they arr sort of generic along with home:// etc/. + +-- context is not defined yet! todo! (we need to load tupp-fil after cld) +-- todo: move startreadingfile to lua and push regime there + +--[[ldx-- +

It's more convenient to manipulate filenames (paths) in + than in . These methods have counterparts +at the side.

+--ldx]]-- + +local format, concat, tonumber = string.format, table.concat, tonumber + +local trace_modules = false trackers.register("modules.loading", function(v) trace_modules = v end) + +local report_modules = logs.reporter("resolvers","modules") + +commands = commands or { } +local commands = commands + +local findbyscheme = resolvers.finders.byscheme -- use different one +local iterator = utilities.parsers.iterator + +-- modules can have a specific suffix or can specify one + +local prefixes = { "m", "p", "s", "x", "v", "t" } +local suffixes = { "mkvi", "mkiv", "tex", "cld", "lua" } -- order might change and how about cld +local modstatus = { } + +local function usemodule(name,hasscheme) + local foundname + if hasscheme then + -- no auto suffix as http will return a home page or error page + -- so we only add one if missing + local fullname = file.addsuffix(name,"tex") + if trace_modules then + report_modules("checking url %a",fullname) + end + foundname = resolvers.findtexfile(fullname) or "" + elseif file.suffix(name) ~= "" then + if trace_modules then + report_modules("checking file %a",name) + end + foundname = findbyscheme("any",name) or "" + else + for i=1,#suffixes do + local fullname = file.addsuffix(name,suffixes[i]) + if trace_modules then + report_modules("checking file %a",fullname) + end + foundname = findbyscheme("any",fullname) or "" + if foundname ~= "" then + break + end + end + end + if foundname ~= "" then + if trace_modules then + report_modules("loading file %a",foundname) + end + context.startreadingfile() + resolvers.jobs.usefile(foundname,true) -- once, notext + -- context.input(foundname) + context.stopreadingfile() + return true + else + return false + end +end + +function commands.usemodules(prefix,askedname,truename) + local truename = truename or environment.truefilename(askedname) + local hasprefix = prefix and prefix ~= "" + local hashname = ((hasprefix and prefix) or "*") .. "-" .. truename + local status = modstatus[hashname] + if status == 0 then + -- not found + elseif status == 1 then + status = status + 1 + else + if trace_modules then + report_modules("locating, prefix %a, askedname %a, truename %a",prefix,askedname,truename) + end + local hasscheme = url.hasscheme(truename) + if hasscheme then + -- no prefix and suffix done + if usemodule(truename,true) then + status = 1 + else + status = 0 + end + elseif hasprefix then + if usemodule(prefix .. "-" .. truename) then + status = 1 + else + status = 0 + end + else + for i=1,#prefixes do + -- todo: reconstruct name i.e. basename + local thename = prefixes[i] .. "-" .. truename + if usemodule(thename) then + status = 1 + break + end + end + if status then + -- ok, don't change + elseif usemodule(truename) then + status = 1 + else + status = 0 + end + end + end + if status == 0 then + report_modules("%a is not found",askedname) + elseif status == 1 then + report_modules("%a is loaded",trace_modules and truename or askedname) + else + report_modules("%a is already loaded",trace_modules and truename or askedname) + end + modstatus[hashname] = status +end + +statistics.register("loaded tex modules", function() + if next(modstatus) then + local t, f, nt, nf = { }, { }, 0, 0 + for k, v in table.sortedhash(modstatus) do + k = file.basename(k) + if v == 0 then + nf = nf + 1 + f[nf] = k + else + nt = nt + 1 + t[nt] = k + end + end + if nf == 0 then + return format("%s requested, all found (%s)",nt,concat(t," ")) + elseif nt == 0 then + return format("%s requested, all missing (%s)",nf,concat(f," ")) + else + return format("%s requested, %s found (%s), %s missing (%s)",nt+nf,nt,concat(t," "),nf,concat(f," ")) + end + else + return nil + end +end) + +-- moved from syst-lua.lua: + +local splitter = lpeg.tsplitter(lpeg.S(". "),tonumber) + +function commands.doifolderversionelse(one,two) -- one >= two + if not two then + one, two = environment.version, one + elseif one == "" then + one = environment.version + end + one = lpeg.match(splitter,one) + two = lpeg.match(splitter,two) + one = (one[1] or 0) * 10000 + (one[2] or 0) * 100 + (one[3] or 0) + two = (two[1] or 0) * 10000 + (two[2] or 0) * 100 + (two[3] or 0) + commands.doifelse(one>=two) +end + +function commands.useluamodule(list) + for filename in iterator(list) do + environment.loadluafile(filename) + end +end diff --git a/tex/context/base/file-res.lua b/tex/context/base/file-res.lua index 8e65ba4c7..6eb8667d6 100644 --- a/tex/context/base/file-res.lua +++ b/tex/context/base/file-res.lua @@ -1,155 +1,155 @@ -if not modules then modules = { } end modules ['file-res'] = { - version = 1.001, - comment = "companion to supp-fil.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format -local isfile = lfs.isfile -local is_qualified_path = file.is_qualified_path -local hasscheme = url.hasscheme - -local trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end) -local report_files = logs.reporter("files","readfile") - -resolvers.maxreadlevel = 2 - -directives.register("resolvers.maxreadlevel", function(v) resolvers.maxreadlevel = tonumber(v) or resolvers.maxreadlevel end) - -local finders, loaders, openers = resolvers.finders, resolvers.loaders, resolvers.openers - -local found = { } -- can best be done in the resolver itself - -local function readfilename(specification,backtrack,treetoo) - local name = specification.filename - local fnd = name and found[name] - if not fnd then - local names - local suffix = file.suffix(name) - if suffix ~= "" then - names = { name } - else - local defaultsuffixes = resolvers.defaultsuffixes - names = { } - for i=1,#defaultsuffixes do - names[i] = name .. "." .. defaultsuffixes[i] - end - if trace_files then - report_files("locating: %s, using default suffixes: %a",name,defaultsuffixes) - end - end - for i=1,#names do - local fname = names[i] - if isfile(fname) then - if trace_files then - report_files("found local: %s",name) - end - fnd = fname - break - end - end - if not fnd and backtrack then - for i=1,#names do - local fname = names[i] - for i=1,backtrack,1 do - fname = "../" .. fname - if isfile(fname) then - if trace_files then - report_files("found by backtracking: %s",fname) - end - fnd = fname - break - elseif trace_files then - report_files("not found by backtracking: %s",fname) - end - end - if fnd then - break - end - end - end - if not fnd then - local paths = resolvers.instance.extra_paths - if paths then - for i=1,#paths do - for i=1,#names do - local fname = paths[i] .. "/" .. names[i] - if isfile(fname) then - if trace_files then - report_files("found on extra path: %s",fname) - end - fnd = fname - break - end - end - if fnd then - break - end - end - end - end - if not fnd and treetoo then - fnd = resolvers.findtexfile(name) or "" - if trace_files then - if fnd ~= "" then - report_files("found by tree lookup: %s",fnd) - else - report_files("not found by tree lookup: %s",name) - end - end - end - found[name] = fnd - elseif trace_files then - if fnd ~= "" then - report_files("already found: %s",fnd) - else - report_files("already not found: %s",name) - end - end - return fnd or "" -end - ---~ resolvers.readfilename = readfilename -- bonus use getreadfilename instead - -function finders.job(specification) return readfilename(specification,false, false) end -- current path, no backtracking -function finders.loc(specification) return readfilename(specification,resolvers.maxreadlevel,false) end -- current path, backtracking -function finders.sys(specification) return readfilename(specification,false, true ) end -- current path, obeys tex search -function finders.fix(specification) return readfilename(specification,resolvers.maxreadlevel,false) end -- specified path, backtracking -function finders.set(specification) return readfilename(specification,false, false) end -- specified path, no backtracking -function finders.any(specification) return readfilename(specification,resolvers.maxreadlevel,true ) end -- loc job sys - -openers.job = openers.file loaders.job = loaders.file -- default anyway -openers.loc = openers.file loaders.loc = loaders.file -openers.sys = openers.file loaders.sys = loaders.file -openers.fix = openers.file loaders.fix = loaders.file -openers.set = openers.file loaders.set = loaders.file -openers.any = openers.file loaders.any = loaders.file - -function getreadfilename(scheme,path,name) -- better do a split and then pass table - local fullname - if hasscheme(name) or is_qualified_path(name) then - fullname = name - else - fullname = ((path == "") and format("%s:///%s",scheme,name)) or format("%s:///%s/%s",scheme,path,name) - end ---~ print(">>>",fullname) - return resolvers.findtexfile(fullname) or "" -- can be more direct -end - -resolvers.getreadfilename = getreadfilename - -function commands.getreadfilename(scheme,path,name) - context(getreadfilename(scheme,path,name)) -end - --- a name belonging to the run but also honoring qualified - -function commands.locfilename(name) - context(getreadfilename("loc",".",name)) -end - -function commands.doiflocfileelse(name) - commands.doifelse(isfile(getreadfilename("loc",".",name))) -end +if not modules then modules = { } end modules ['file-res'] = { + version = 1.001, + comment = "companion to supp-fil.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format +local isfile = lfs.isfile +local is_qualified_path = file.is_qualified_path +local hasscheme = url.hasscheme + +local trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end) +local report_files = logs.reporter("files","readfile") + +resolvers.maxreadlevel = 2 + +directives.register("resolvers.maxreadlevel", function(v) resolvers.maxreadlevel = tonumber(v) or resolvers.maxreadlevel end) + +local finders, loaders, openers = resolvers.finders, resolvers.loaders, resolvers.openers + +local found = { } -- can best be done in the resolver itself + +local function readfilename(specification,backtrack,treetoo) + local name = specification.filename + local fnd = name and found[name] + if not fnd then + local names + local suffix = file.suffix(name) + if suffix ~= "" then + names = { name } + else + local defaultsuffixes = resolvers.defaultsuffixes + names = { } + for i=1,#defaultsuffixes do + names[i] = name .. "." .. defaultsuffixes[i] + end + if trace_files then + report_files("locating: %s, using default suffixes: %a",name,defaultsuffixes) + end + end + for i=1,#names do + local fname = names[i] + if isfile(fname) then + if trace_files then + report_files("found local: %s",name) + end + fnd = fname + break + end + end + if not fnd and backtrack then + for i=1,#names do + local fname = names[i] + for i=1,backtrack,1 do + fname = "../" .. fname + if isfile(fname) then + if trace_files then + report_files("found by backtracking: %s",fname) + end + fnd = fname + break + elseif trace_files then + report_files("not found by backtracking: %s",fname) + end + end + if fnd then + break + end + end + end + if not fnd then + local paths = resolvers.instance.extra_paths + if paths then + for i=1,#paths do + for i=1,#names do + local fname = paths[i] .. "/" .. names[i] + if isfile(fname) then + if trace_files then + report_files("found on extra path: %s",fname) + end + fnd = fname + break + end + end + if fnd then + break + end + end + end + end + if not fnd and treetoo then + fnd = resolvers.findtexfile(name) or "" + if trace_files then + if fnd ~= "" then + report_files("found by tree lookup: %s",fnd) + else + report_files("not found by tree lookup: %s",name) + end + end + end + found[name] = fnd + elseif trace_files then + if fnd ~= "" then + report_files("already found: %s",fnd) + else + report_files("already not found: %s",name) + end + end + return fnd or "" +end + +--~ resolvers.readfilename = readfilename -- bonus use getreadfilename instead + +function finders.job(specification) return readfilename(specification,false, false) end -- current path, no backtracking +function finders.loc(specification) return readfilename(specification,resolvers.maxreadlevel,false) end -- current path, backtracking +function finders.sys(specification) return readfilename(specification,false, true ) end -- current path, obeys tex search +function finders.fix(specification) return readfilename(specification,resolvers.maxreadlevel,false) end -- specified path, backtracking +function finders.set(specification) return readfilename(specification,false, false) end -- specified path, no backtracking +function finders.any(specification) return readfilename(specification,resolvers.maxreadlevel,true ) end -- loc job sys + +openers.job = openers.file loaders.job = loaders.file -- default anyway +openers.loc = openers.file loaders.loc = loaders.file +openers.sys = openers.file loaders.sys = loaders.file +openers.fix = openers.file loaders.fix = loaders.file +openers.set = openers.file loaders.set = loaders.file +openers.any = openers.file loaders.any = loaders.file + +function getreadfilename(scheme,path,name) -- better do a split and then pass table + local fullname + if hasscheme(name) or is_qualified_path(name) then + fullname = name + else + fullname = ((path == "") and format("%s:///%s",scheme,name)) or format("%s:///%s/%s",scheme,path,name) + end +--~ print(">>>",fullname) + return resolvers.findtexfile(fullname) or "" -- can be more direct +end + +resolvers.getreadfilename = getreadfilename + +function commands.getreadfilename(scheme,path,name) + context(getreadfilename(scheme,path,name)) +end + +-- a name belonging to the run but also honoring qualified + +function commands.locfilename(name) + context(getreadfilename("loc",".",name)) +end + +function commands.doiflocfileelse(name) + commands.doifelse(isfile(getreadfilename("loc",".",name))) +end diff --git a/tex/context/base/file-syn.lua b/tex/context/base/file-syn.lua index 8d913bb37..60dcb462d 100644 --- a/tex/context/base/file-syn.lua +++ b/tex/context/base/file-syn.lua @@ -1,51 +1,51 @@ -if not modules then modules = { } end modules ['file-syn'] = { - version = 1.001, - comment = "companion to file-syn.mkvi", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local report_files = logs.reporter("files") - -environment.filesynonyms = environment.filesynonyms or { } -local filesynonyms = environment.filesynonyms - -local settings_to_array = utilities.parsers.settings_to_array -local findfile = resolvers.findfile - -storage.register("environment/filesynonyms", filesynonyms, "environment.filesynonyms") - -local function truefilename(name) - local realname = filesynonyms[name] or name - if realname ~= name then - return truefilename(realname) - else - return realname - end -end - -environment.truefilename = truefilename - -function commands.truefilename(name) - context(truefilename(name)) -end - -function commands.definefilesynonym(name,realname) - local synonym = filesynonyms[name] - if synonym then - interfaces.showmessage("files",1,{ name or "?", realname or "?", synonym or "?" }) - end - filesynonyms[name] = realname -end - -function commands.definefilefallback(name,alternatives) - local names = settings_to_array(alternatives) - for i=1,#names do - local realname = findfile(names[i]) - if realname ~= "" then - filesynonyms[name] = realname - break - end - end -end +if not modules then modules = { } end modules ['file-syn'] = { + version = 1.001, + comment = "companion to file-syn.mkvi", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local report_files = logs.reporter("files") + +environment.filesynonyms = environment.filesynonyms or { } +local filesynonyms = environment.filesynonyms + +local settings_to_array = utilities.parsers.settings_to_array +local findfile = resolvers.findfile + +storage.register("environment/filesynonyms", filesynonyms, "environment.filesynonyms") + +local function truefilename(name) + local realname = filesynonyms[name] or name + if realname ~= name then + return truefilename(realname) + else + return realname + end +end + +environment.truefilename = truefilename + +function commands.truefilename(name) + context(truefilename(name)) +end + +function commands.definefilesynonym(name,realname) + local synonym = filesynonyms[name] + if synonym then + interfaces.showmessage("files",1,{ name or "?", realname or "?", synonym or "?" }) + end + filesynonyms[name] = realname +end + +function commands.definefilefallback(name,alternatives) + local names = settings_to_array(alternatives) + for i=1,#names do + local realname = findfile(names[i]) + if realname ~= "" then + filesynonyms[name] = realname + break + end + end +end diff --git a/tex/context/base/font-afk.lua b/tex/context/base/font-afk.lua index 8b65b0631..bd8d3276b 100644 --- a/tex/context/base/font-afk.lua +++ b/tex/context/base/font-afk.lua @@ -1,200 +1,200 @@ -if not modules then modules = { } end modules ['font-afk'] = { - version = 1.001, - comment = "companion to font-afm.lua", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", - dataonly = true, -} - ---[[ldx-- -

For ligatures, only characters with a code smaller than 128 make sense, -anything larger is encoding dependent. An interesting complication is that a -character can be in an encoding twice but is hashed once.

---ldx]]-- - -local allocate = utilities.storage.allocate - -fonts.handlers.afm.helpdata = { - ligatures = allocate { -- okay, nowadays we could parse the name but type 1 fonts - ['f'] = { -- don't have that many ligatures anyway - { 'f', 'ff' }, - { 'i', 'fi' }, - { 'l', 'fl' }, - }, - ['ff'] = { - { 'i', 'ffi' } - }, - ['fi'] = { - { 'i', 'fii' } - }, - ['fl'] = { - { 'i', 'fli' } - }, - ['s'] = { - { 't', 'st' } - }, - ['i'] = { - { 'j', 'ij' } - }, - }, - texligatures = allocate { - -- ['space'] = { - -- { 'L', 'Lslash' }, - -- { 'l', 'lslash' } - -- }, - -- ['question'] = { - -- { 'quoteleft', 'questiondown' } - -- }, - -- ['exclam'] = { - -- { 'quoteleft', 'exclamdown' } - -- }, - ['quoteleft'] = { - { 'quoteleft', 'quotedblleft' } - }, - ['quoteright'] = { - { 'quoteright', 'quotedblright' } - }, - ['hyphen'] = { - { 'hyphen', 'endash' } - }, - ['endash'] = { - { 'hyphen', 'emdash' } - } - }, - leftkerned = allocate { - AEligature = "A", aeligature = "a", - OEligature = "O", oeligature = "o", - IJligature = "I", ijligature = "i", - AE = "A", ae = "a", - OE = "O", oe = "o", - IJ = "I", ij = "i", - Ssharp = "S", ssharp = "s", - }, - rightkerned = allocate { - AEligature = "E", aeligature = "e", - OEligature = "E", oeligature = "e", - IJligature = "J", ijligature = "j", - AE = "E", ae = "e", - OE = "E", oe = "e", - IJ = "J", ij = "j", - Ssharp = "S", ssharp = "s", - }, - bothkerned = allocate { - Acircumflex = "A", acircumflex = "a", - Ccircumflex = "C", ccircumflex = "c", - Ecircumflex = "E", ecircumflex = "e", - Gcircumflex = "G", gcircumflex = "g", - Hcircumflex = "H", hcircumflex = "h", - Icircumflex = "I", icircumflex = "i", - Jcircumflex = "J", jcircumflex = "j", - Ocircumflex = "O", ocircumflex = "o", - Scircumflex = "S", scircumflex = "s", - Ucircumflex = "U", ucircumflex = "u", - Wcircumflex = "W", wcircumflex = "w", - Ycircumflex = "Y", ycircumflex = "y", - - Agrave = "A", agrave = "a", - Egrave = "E", egrave = "e", - Igrave = "I", igrave = "i", - Ograve = "O", ograve = "o", - Ugrave = "U", ugrave = "u", - Ygrave = "Y", ygrave = "y", - - Atilde = "A", atilde = "a", - Itilde = "I", itilde = "i", - Otilde = "O", otilde = "o", - Utilde = "U", utilde = "u", - Ntilde = "N", ntilde = "n", - - Adiaeresis = "A", adiaeresis = "a", Adieresis = "A", adieresis = "a", - Ediaeresis = "E", ediaeresis = "e", Edieresis = "E", edieresis = "e", - Idiaeresis = "I", idiaeresis = "i", Idieresis = "I", idieresis = "i", - Odiaeresis = "O", odiaeresis = "o", Odieresis = "O", odieresis = "o", - Udiaeresis = "U", udiaeresis = "u", Udieresis = "U", udieresis = "u", - Ydiaeresis = "Y", ydiaeresis = "y", Ydieresis = "Y", ydieresis = "y", - - Aacute = "A", aacute = "a", - Cacute = "C", cacute = "c", - Eacute = "E", eacute = "e", - Iacute = "I", iacute = "i", - Lacute = "L", lacute = "l", - Nacute = "N", nacute = "n", - Oacute = "O", oacute = "o", - Racute = "R", racute = "r", - Sacute = "S", sacute = "s", - Uacute = "U", uacute = "u", - Yacute = "Y", yacute = "y", - Zacute = "Z", zacute = "z", - - Dstroke = "D", dstroke = "d", - Hstroke = "H", hstroke = "h", - Tstroke = "T", tstroke = "t", - - Cdotaccent = "C", cdotaccent = "c", - Edotaccent = "E", edotaccent = "e", - Gdotaccent = "G", gdotaccent = "g", - Idotaccent = "I", idotaccent = "i", - Zdotaccent = "Z", zdotaccent = "z", - - Amacron = "A", amacron = "a", - Emacron = "E", emacron = "e", - Imacron = "I", imacron = "i", - Omacron = "O", omacron = "o", - Umacron = "U", umacron = "u", - - Ccedilla = "C", ccedilla = "c", - Kcedilla = "K", kcedilla = "k", - Lcedilla = "L", lcedilla = "l", - Ncedilla = "N", ncedilla = "n", - Rcedilla = "R", rcedilla = "r", - Scedilla = "S", scedilla = "s", - Tcedilla = "T", tcedilla = "t", - - Ohungarumlaut = "O", ohungarumlaut = "o", - Uhungarumlaut = "U", uhungarumlaut = "u", - - Aogonek = "A", aogonek = "a", - Eogonek = "E", eogonek = "e", - Iogonek = "I", iogonek = "i", - Uogonek = "U", uogonek = "u", - - Aring = "A", aring = "a", - Uring = "U", uring = "u", - - Abreve = "A", abreve = "a", - Ebreve = "E", ebreve = "e", - Gbreve = "G", gbreve = "g", - Ibreve = "I", ibreve = "i", - Obreve = "O", obreve = "o", - Ubreve = "U", ubreve = "u", - - Ccaron = "C", ccaron = "c", - Dcaron = "D", dcaron = "d", - Ecaron = "E", ecaron = "e", - Lcaron = "L", lcaron = "l", - Ncaron = "N", ncaron = "n", - Rcaron = "R", rcaron = "r", - Scaron = "S", scaron = "s", - Tcaron = "T", tcaron = "t", - Zcaron = "Z", zcaron = "z", - - dotlessI = "I", dotlessi = "i", - dotlessJ = "J", dotlessj = "j", - - AEligature = "AE", aeligature = "ae", AE = "AE", ae = "ae", - OEligature = "OE", oeligature = "oe", OE = "OE", oe = "oe", - IJligature = "IJ", ijligature = "ij", IJ = "IJ", ij = "ij", - - Lstroke = "L", lstroke = "l", Lslash = "L", lslash = "l", - Ostroke = "O", ostroke = "o", Oslash = "O", oslash = "o", - - Ssharp = "SS", ssharp = "ss", - - Aumlaut = "A", aumlaut = "a", - Eumlaut = "E", eumlaut = "e", - Iumlaut = "I", iumlaut = "i", - Oumlaut = "O", oumlaut = "o", - Uumlaut = "U", uumlaut = "u", - } -} +if not modules then modules = { } end modules ['font-afk'] = { + version = 1.001, + comment = "companion to font-afm.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", + dataonly = true, +} + +--[[ldx-- +

For ligatures, only characters with a code smaller than 128 make sense, +anything larger is encoding dependent. An interesting complication is that a +character can be in an encoding twice but is hashed once.

+--ldx]]-- + +local allocate = utilities.storage.allocate + +fonts.handlers.afm.helpdata = { + ligatures = allocate { -- okay, nowadays we could parse the name but type 1 fonts + ['f'] = { -- don't have that many ligatures anyway + { 'f', 'ff' }, + { 'i', 'fi' }, + { 'l', 'fl' }, + }, + ['ff'] = { + { 'i', 'ffi' } + }, + ['fi'] = { + { 'i', 'fii' } + }, + ['fl'] = { + { 'i', 'fli' } + }, + ['s'] = { + { 't', 'st' } + }, + ['i'] = { + { 'j', 'ij' } + }, + }, + texligatures = allocate { + -- ['space'] = { + -- { 'L', 'Lslash' }, + -- { 'l', 'lslash' } + -- }, + -- ['question'] = { + -- { 'quoteleft', 'questiondown' } + -- }, + -- ['exclam'] = { + -- { 'quoteleft', 'exclamdown' } + -- }, + ['quoteleft'] = { + { 'quoteleft', 'quotedblleft' } + }, + ['quoteright'] = { + { 'quoteright', 'quotedblright' } + }, + ['hyphen'] = { + { 'hyphen', 'endash' } + }, + ['endash'] = { + { 'hyphen', 'emdash' } + } + }, + leftkerned = allocate { + AEligature = "A", aeligature = "a", + OEligature = "O", oeligature = "o", + IJligature = "I", ijligature = "i", + AE = "A", ae = "a", + OE = "O", oe = "o", + IJ = "I", ij = "i", + Ssharp = "S", ssharp = "s", + }, + rightkerned = allocate { + AEligature = "E", aeligature = "e", + OEligature = "E", oeligature = "e", + IJligature = "J", ijligature = "j", + AE = "E", ae = "e", + OE = "E", oe = "e", + IJ = "J", ij = "j", + Ssharp = "S", ssharp = "s", + }, + bothkerned = allocate { + Acircumflex = "A", acircumflex = "a", + Ccircumflex = "C", ccircumflex = "c", + Ecircumflex = "E", ecircumflex = "e", + Gcircumflex = "G", gcircumflex = "g", + Hcircumflex = "H", hcircumflex = "h", + Icircumflex = "I", icircumflex = "i", + Jcircumflex = "J", jcircumflex = "j", + Ocircumflex = "O", ocircumflex = "o", + Scircumflex = "S", scircumflex = "s", + Ucircumflex = "U", ucircumflex = "u", + Wcircumflex = "W", wcircumflex = "w", + Ycircumflex = "Y", ycircumflex = "y", + + Agrave = "A", agrave = "a", + Egrave = "E", egrave = "e", + Igrave = "I", igrave = "i", + Ograve = "O", ograve = "o", + Ugrave = "U", ugrave = "u", + Ygrave = "Y", ygrave = "y", + + Atilde = "A", atilde = "a", + Itilde = "I", itilde = "i", + Otilde = "O", otilde = "o", + Utilde = "U", utilde = "u", + Ntilde = "N", ntilde = "n", + + Adiaeresis = "A", adiaeresis = "a", Adieresis = "A", adieresis = "a", + Ediaeresis = "E", ediaeresis = "e", Edieresis = "E", edieresis = "e", + Idiaeresis = "I", idiaeresis = "i", Idieresis = "I", idieresis = "i", + Odiaeresis = "O", odiaeresis = "o", Odieresis = "O", odieresis = "o", + Udiaeresis = "U", udiaeresis = "u", Udieresis = "U", udieresis = "u", + Ydiaeresis = "Y", ydiaeresis = "y", Ydieresis = "Y", ydieresis = "y", + + Aacute = "A", aacute = "a", + Cacute = "C", cacute = "c", + Eacute = "E", eacute = "e", + Iacute = "I", iacute = "i", + Lacute = "L", lacute = "l", + Nacute = "N", nacute = "n", + Oacute = "O", oacute = "o", + Racute = "R", racute = "r", + Sacute = "S", sacute = "s", + Uacute = "U", uacute = "u", + Yacute = "Y", yacute = "y", + Zacute = "Z", zacute = "z", + + Dstroke = "D", dstroke = "d", + Hstroke = "H", hstroke = "h", + Tstroke = "T", tstroke = "t", + + Cdotaccent = "C", cdotaccent = "c", + Edotaccent = "E", edotaccent = "e", + Gdotaccent = "G", gdotaccent = "g", + Idotaccent = "I", idotaccent = "i", + Zdotaccent = "Z", zdotaccent = "z", + + Amacron = "A", amacron = "a", + Emacron = "E", emacron = "e", + Imacron = "I", imacron = "i", + Omacron = "O", omacron = "o", + Umacron = "U", umacron = "u", + + Ccedilla = "C", ccedilla = "c", + Kcedilla = "K", kcedilla = "k", + Lcedilla = "L", lcedilla = "l", + Ncedilla = "N", ncedilla = "n", + Rcedilla = "R", rcedilla = "r", + Scedilla = "S", scedilla = "s", + Tcedilla = "T", tcedilla = "t", + + Ohungarumlaut = "O", ohungarumlaut = "o", + Uhungarumlaut = "U", uhungarumlaut = "u", + + Aogonek = "A", aogonek = "a", + Eogonek = "E", eogonek = "e", + Iogonek = "I", iogonek = "i", + Uogonek = "U", uogonek = "u", + + Aring = "A", aring = "a", + Uring = "U", uring = "u", + + Abreve = "A", abreve = "a", + Ebreve = "E", ebreve = "e", + Gbreve = "G", gbreve = "g", + Ibreve = "I", ibreve = "i", + Obreve = "O", obreve = "o", + Ubreve = "U", ubreve = "u", + + Ccaron = "C", ccaron = "c", + Dcaron = "D", dcaron = "d", + Ecaron = "E", ecaron = "e", + Lcaron = "L", lcaron = "l", + Ncaron = "N", ncaron = "n", + Rcaron = "R", rcaron = "r", + Scaron = "S", scaron = "s", + Tcaron = "T", tcaron = "t", + Zcaron = "Z", zcaron = "z", + + dotlessI = "I", dotlessi = "i", + dotlessJ = "J", dotlessj = "j", + + AEligature = "AE", aeligature = "ae", AE = "AE", ae = "ae", + OEligature = "OE", oeligature = "oe", OE = "OE", oe = "oe", + IJligature = "IJ", ijligature = "ij", IJ = "IJ", ij = "ij", + + Lstroke = "L", lstroke = "l", Lslash = "L", lslash = "l", + Ostroke = "O", ostroke = "o", Oslash = "O", oslash = "o", + + Ssharp = "SS", ssharp = "ss", + + Aumlaut = "A", aumlaut = "a", + Eumlaut = "E", eumlaut = "e", + Iumlaut = "I", iumlaut = "i", + Oumlaut = "O", oumlaut = "o", + Uumlaut = "U", uumlaut = "u", + } +} diff --git a/tex/context/base/font-afm.lua b/tex/context/base/font-afm.lua index cb0c2438f..58408457e 100644 --- a/tex/context/base/font-afm.lua +++ b/tex/context/base/font-afm.lua @@ -1,971 +1,971 @@ -if not modules then modules = { } end modules ['font-afm'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

Some code may look a bit obscure but this has to do with the -fact that we also use this code for testing and much code evolved -in the transition from to to .

- -

The following code still has traces of intermediate font support -where we handles font encodings. Eventually font encoding goes -away.

---ldx]]-- - -local fonts, logs, trackers, containers, resolvers = fonts, logs, trackers, containers, resolvers - -local next, type, tonumber = next, type, tonumber -local format, match, gmatch, lower, gsub, strip = string.format, string.match, string.gmatch, string.lower, string.gsub, string.strip -local abs = math.abs -local P, S, C, R, lpegmatch, patterns = lpeg.P, lpeg.S, lpeg.C, lpeg.R, lpeg.match, lpeg.patterns -local derivetable = table.derive - -local trace_features = false trackers.register("afm.features", function(v) trace_features = v end) -local trace_indexing = false trackers.register("afm.indexing", function(v) trace_indexing = v end) -local trace_loading = false trackers.register("afm.loading", function(v) trace_loading = v end) -local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) - -local report_afm = logs.reporter("fonts","afm loading") - -local findbinfile = resolvers.findbinfile - -local definers = fonts.definers -local readers = fonts.readers -local constructors = fonts.constructors - -local afm = constructors.newhandler("afm") -local pfb = constructors.newhandler("pfb") - -local afmfeatures = constructors.newfeatures("afm") -local registerafmfeature = afmfeatures.register - -afm.version = 1.410 -- incrementing this number one up will force a re-cache -afm.cache = containers.define("fonts", "afm", afm.version, true) -afm.autoprefixed = true -- this will become false some day (catches texnansi-blabla.*) - -afm.helpdata = { } -- set later on so no local for this -afm.syncspace = true -- when true, nicer stretch values -afm.addligatures = true -- best leave this set to true -afm.addtexligatures = true -- best leave this set to true -afm.addkerns = true -- best leave this set to true - -local function setmode(tfmdata,value) - if value then - tfmdata.properties.mode = lower(value) - end -end - -registerafmfeature { - name = "mode", - description = "mode", - initializers = { - base = setmode, - node = setmode, - } -} - ---[[ldx-- -

We start with the basic reader which we give a name similar to the -built in and reader.

---ldx]]-- - ---~ Comment FONTIDENTIFIER LMMATHSYMBOLS10 ---~ Comment CODINGSCHEME TEX MATH SYMBOLS ---~ Comment DESIGNSIZE 10.0 pt ---~ Comment CHECKSUM O 4261307036 ---~ Comment SPACE 0 plus 0 minus 0 ---~ Comment QUAD 1000 ---~ Comment EXTRASPACE 0 ---~ Comment NUM 676.508 393.732 443.731 ---~ Comment DENOM 685.951 344.841 ---~ Comment SUP 412.892 362.892 288.889 ---~ Comment SUB 150 247.217 ---~ Comment SUPDROP 386.108 ---~ Comment SUBDROP 50 ---~ Comment DELIM 2390 1010 ---~ Comment AXISHEIGHT 250 - -local comment = P("Comment") -local spacing = patterns.spacer -- S(" \t")^1 -local lineend = patterns.newline -- S("\n\r") -local words = C((1 - lineend)^1) -local number = C((R("09") + S("."))^1) / tonumber * spacing^0 -local data = lpeg.Carg(1) - -local pattern = ( -- needs testing ... not used anyway as we no longer need math afm's - comment * spacing * - ( - data * ( - ("CODINGSCHEME" * spacing * words ) / function(fd,a) end + - ("DESIGNSIZE" * spacing * number * words ) / function(fd,a) fd[ 1] = a end + - ("CHECKSUM" * spacing * number * words ) / function(fd,a) fd[ 2] = a end + - ("SPACE" * spacing * number * "plus" * number * "minus" * number) / function(fd,a,b,c) fd[ 3], fd[ 4], fd[ 5] = a, b, c end + - ("QUAD" * spacing * number ) / function(fd,a) fd[ 6] = a end + - ("EXTRASPACE" * spacing * number ) / function(fd,a) fd[ 7] = a end + - ("NUM" * spacing * number * number * number ) / function(fd,a,b,c) fd[ 8], fd[ 9], fd[10] = a, b, c end + - ("DENOM" * spacing * number * number ) / function(fd,a,b ) fd[11], fd[12] = a, b end + - ("SUP" * spacing * number * number * number ) / function(fd,a,b,c) fd[13], fd[14], fd[15] = a, b, c end + - ("SUB" * spacing * number * number ) / function(fd,a,b) fd[16], fd[17] = a, b end + - ("SUPDROP" * spacing * number ) / function(fd,a) fd[18] = a end + - ("SUBDROP" * spacing * number ) / function(fd,a) fd[19] = a end + - ("DELIM" * spacing * number * number ) / function(fd,a,b) fd[20], fd[21] = a, b end + - ("AXISHEIGHT" * spacing * number ) / function(fd,a) fd[22] = a end - ) - + (1-lineend)^0 - ) - + (1-comment)^1 -)^0 - -local function scan_comment(str) - local fd = { } - lpegmatch(pattern,str,1,fd) - return fd -end - --- On a rainy day I will rewrite this in lpeg ... or we can use the (slower) fontloader --- as in now supports afm/pfb loading but it's not too bad to have different methods --- for testing approaches. - -local keys = { } - -function keys.FontName (data,line) data.metadata.fontname = strip (line) -- get rid of spaces - data.metadata.fullname = strip (line) end -function keys.ItalicAngle (data,line) data.metadata.italicangle = tonumber (line) end -function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch = toboolean(line,true) end -function keys.CharWidth (data,line) data.metadata.charwidth = tonumber (line) end -function keys.XHeight (data,line) data.metadata.xheight = tonumber (line) end -function keys.Descender (data,line) data.metadata.descender = tonumber (line) end -function keys.Ascender (data,line) data.metadata.ascender = tonumber (line) end -function keys.Comment (data,line) - -- Comment DesignSize 12 (pts) - -- Comment TFM designsize: 12 (in points) - line = lower(line) - local designsize = match(line,"designsize[^%d]*(%d+)") - if designsize then data.metadata.designsize = tonumber(designsize) end -end - -local function get_charmetrics(data,charmetrics,vector) - local characters = data.characters - local chr, ind = { }, 0 - for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do - if k == 'C' then - v = tonumber(v) - if v < 0 then - ind = ind + 1 -- ? - else - ind = v - end - chr = { - index = ind - } - elseif k == 'WX' then - chr.width = tonumber(v) - elseif k == 'N' then - characters[v] = chr - elseif k == 'B' then - local llx, lly, urx, ury = match(v,"^ *(.-) +(.-) +(.-) +(.-)$") - chr.boundingbox = { tonumber(llx), tonumber(lly), tonumber(urx), tonumber(ury) } - elseif k == 'L' then - local plus, becomes = match(v,"^(.-) +(.-)$") - local ligatures = chr.ligatures - if ligatures then - ligatures[plus] = becomes - else - chr.ligatures = { [plus] = becomes } - end - end - end -end - -local function get_kernpairs(data,kernpairs) - local characters = data.characters - for one, two, value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do - local chr = characters[one] - if chr then - local kerns = chr.kerns - if kerns then - kerns[two] = tonumber(value) - else - chr.kerns = { [two] = tonumber(value) } - end - end - end -end - -local function get_variables(data,fontmetrics) - for key, rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do - local keyhandler = keys[key] - if keyhandler then - keyhandler(data,rest) - end - end -end - -local function get_indexes(data,pfbname) - data.resources.filename = resolvers.unresolve(pfbname) -- no shortcut - local pfbblob = fontloader.open(pfbname) - if pfbblob then - local characters = data.characters - local pfbdata = fontloader.to_table(pfbblob) - if pfbdata then - local glyphs = pfbdata.glyphs - if glyphs then - if trace_loading then - report_afm("getting index data from %a",pfbname) - end - for index, glyph in next, glyphs do - local name = glyph.name - if name then - local char = characters[name] - if char then - if trace_indexing then - report_afm("glyph %a has index %a",name,index) - end - char.index = index - end - end - end - elseif trace_loading then - report_afm("no glyph data in pfb file %a",pfbname) - end - elseif trace_loading then - report_afm("no data in pfb file %a",pfbname) - end - fontloader.close(pfbblob) - elseif trace_loading then - report_afm("invalid pfb file %a",pfbname) - end -end - -local function readafm(filename) - local ok, afmblob, size = resolvers.loadbinfile(filename) -- has logging - if ok and afmblob then - local data = { - resources = { - filename = resolvers.unresolve(filename), - version = afm.version, - creator = "context mkiv", - }, - properties = { - hasitalics = false, - }, - goodies = { - }, - metadata = { - filename = file.removesuffix(file.basename(filename)) - }, - characters = { - -- a temporary store - }, - descriptions = { - -- the final store - }, - } - afmblob = gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics", function(charmetrics) - if trace_loading then - report_afm("loading char metrics") - end - get_charmetrics(data,charmetrics,vector) - return "" - end) - afmblob = gsub(afmblob,"StartKernPairs(.-)EndKernPairs", function(kernpairs) - if trace_loading then - report_afm("loading kern pairs") - end - get_kernpairs(data,kernpairs) - return "" - end) - afmblob = gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics", function(version,fontmetrics) - if trace_loading then - report_afm("loading variables") - end - data.afmversion = version - get_variables(data,fontmetrics) - data.fontdimens = scan_comment(fontmetrics) -- todo: all lpeg, no time now - return "" - end) - return data - else - if trace_loading then - report_afm("no valid afm file %a",filename) - end - return nil - end -end - ---[[ldx-- -

We cache files. Caching is taken care of in the loader. We cheat a bit -by adding ligatures and kern information to the afm derived data. That -way we can set them faster when defining a font.

---ldx]]-- - -local addkerns, addligatures, addtexligatures, unify, normalize -- we will implement these later - -function afm.load(filename) - -- hm, for some reasons not resolved yet - filename = resolvers.findfile(filename,'afm') or "" - if filename ~= "" then - local name = file.removesuffix(file.basename(filename)) - local data = containers.read(afm.cache,name) - local attr = lfs.attributes(filename) - local size, time = attr.size or 0, attr.modification or 0 - -- - local pfbfile = file.replacesuffix(name,"pfb") - local pfbname = resolvers.findfile(pfbfile,"pfb") or "" - if pfbname == "" then - pfbname = resolvers.findfile(file.basename(pfbfile),"pfb") or "" - end - local pfbsize, pfbtime = 0, 0 - if pfbname ~= "" then - local attr = lfs.attributes(pfbname) - pfbsize = attr.size or 0 - pfbtime = attr.modification or 0 - end - if not data or data.size ~= size or data.time ~= time or data.pfbsize ~= pfbsize or data.pfbtime ~= pfbtime then - report_afm("reading %a",filename) - data = readafm(filename) - if data then - if pfbname ~= "" then - get_indexes(data,pfbname) - elseif trace_loading then - report_afm("no pfb file for %a",filename) - end - report_afm("unifying %a",filename) - unify(data,filename) - if afm.addligatures then - report_afm("add ligatures") - addligatures(data) - end - if afm.addtexligatures then - report_afm("add tex ligatures") - addtexligatures(data) - end - if afm.addkerns then - report_afm("add extra kerns") - addkerns(data) - end - normalize(data) - report_afm("add tounicode data") - fonts.mappings.addtounicode(data,filename) - data.size = size - data.time = time - data.pfbsize = pfbsize - data.pfbtime = pfbtime - report_afm("saving %a in cache",name) - data = containers.write(afm.cache, name, data) - data = containers.read(afm.cache,name) - end - end - return data - else - return nil - end -end - -local uparser = fonts.mappings.makenameparser() - -unify = function(data, filename) - local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context - local unicodes, names = { }, { } - local private = constructors.privateoffset - local descriptions = data.descriptions - for name, blob in next, data.characters do - local code = unicodevector[name] -- or characters.name_to_unicode[name] - if not code then - code = lpegmatch(uparser,name) - if not code then - code = private - private = private + 1 - report_afm("assigning private slot %U for unknown glyph name %a",code,name) - end - end - local index = blob.index - unicodes[name] = code - names[name] = index - blob.name = name - descriptions[code] = { - boundingbox = blob.boundingbox, - width = blob.width, - kerns = blob.kerns, - index = index, - name = name, - } - end - for unicode, description in next, descriptions do - local kerns = description.kerns - if kerns then - local krn = { } - for name, kern in next, kerns do - local unicode = unicodes[name] - if unicode then - krn[unicode] = kern - else - print(unicode,name) - end - end - description.kerns = krn - end - end - data.characters = nil - local resources = data.resources - local filename = resources.filename or file.removesuffix(file.basename(filename)) - resources.filename = resolvers.unresolve(filename) -- no shortcut - resources.unicodes = unicodes -- name to unicode - resources.marks = { } -- todo - resources.names = names -- name to index - resources.private = private -end - -normalize = function(data) -end - ---[[ldx-- -

These helpers extend the basic table with extra ligatures, texligatures -and extra kerns. This saves quite some lookups later.

---ldx]]-- - -local addthem = function(rawdata,ligatures) - if ligatures then - local descriptions = rawdata.descriptions - local resources = rawdata.resources - local unicodes = resources.unicodes - local names = resources.names - for ligname, ligdata in next, ligatures do - local one = descriptions[unicodes[ligname]] - if one then - for _, pair in next, ligdata do - local two, three = unicodes[pair[1]], unicodes[pair[2]] - if two and three then - local ol = one.ligatures - if ol then - if not ol[two] then - ol[two] = three - end - else - one.ligatures = { [two] = three } - end - end - end - end - end - end -end - -addligatures = function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end -addtexligatures = function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end - ---[[ldx-- -

We keep the extra kerns in separate kerning tables so that we can use -them selectively.

---ldx]]-- - --- This is rather old code (from the beginning when we had only tfm). If --- we unify the afm data (now we have names all over the place) then --- we can use shcodes but there will be many more looping then. But we --- could get rid of the tables in char-cmp then. Als, in the generic version --- we don't use the character database. (Ok, we can have a context specific --- variant). - -addkerns = function(rawdata) -- using shcodes is not robust here - local descriptions = rawdata.descriptions - local resources = rawdata.resources - local unicodes = resources.unicodes - local function do_it_left(what) - if what then - for unicode, description in next, descriptions do - local kerns = description.kerns - if kerns then - local extrakerns - for complex, simple in next, what do - complex = unicodes[complex] - simple = unicodes[simple] - if complex and simple then - local ks = kerns[simple] - if ks and not kerns[complex] then - if extrakerns then - extrakerns[complex] = ks - else - extrakerns = { [complex] = ks } - end - end - end - end - if extrakerns then - description.extrakerns = extrakerns - end - end - end - end - end - local function do_it_copy(what) - if what then - for complex, simple in next, what do - complex = unicodes[complex] - simple = unicodes[simple] - if complex and simple then - local complexdescription = descriptions[complex] - if complexdescription then -- optional - local simpledescription = descriptions[complex] - if simpledescription then - local extrakerns - local kerns = simpledescription.kerns - if kerns then - for unicode, kern in next, kerns do - if extrakerns then - extrakerns[unicode] = kern - else - extrakerns = { [unicode] = kern } - end - end - end - local extrakerns = simpledescription.extrakerns - if extrakerns then - for unicode, kern in next, extrakerns do - if extrakerns then - extrakerns[unicode] = kern - else - extrakerns = { [unicode] = kern } - end - end - end - if extrakerns then - complexdescription.extrakerns = extrakerns - end - end - end - end - end - end - end - -- add complex with values of simplified when present - do_it_left(afm.helpdata.leftkerned) - do_it_left(afm.helpdata.bothkerned) - -- copy kerns from simple char to complex char unless set - do_it_copy(afm.helpdata.bothkerned) - do_it_copy(afm.helpdata.rightkerned) -end - ---[[ldx-- -

The copying routine looks messy (and is indeed a bit messy).

---ldx]]-- - -local function adddimensions(data) -- we need to normalize afm to otf i.e. indexed table instead of name - if data then - for unicode, description in next, data.descriptions do - local bb = description.boundingbox - if bb then - local ht, dp = bb[4], -bb[2] - if ht == 0 or ht < 0 then - -- no need to set it and no negative heights, nil == 0 - else - description.height = ht - end - if dp == 0 or dp < 0 then - -- no negative depths and no negative depths, nil == 0 - else - description.depth = dp - end - end - end - end -end - -local function copytotfm(data) - if data and data.descriptions then - local metadata = data.metadata - local resources = data.resources - local properties = derivetable(data.properties) - local descriptions = derivetable(data.descriptions) - local goodies = derivetable(data.goodies) - local characters = { } - local parameters = { } - local unicodes = resources.unicodes - -- - for unicode, description in next, data.descriptions do -- use parent table - characters[unicode] = { } - end - -- - local filename = constructors.checkedfilename(resources) - local fontname = metadata.fontname or metadata.fullname - local fullname = metadata.fullname or metadata.fontname - local endash = unicodes['space'] - local emdash = unicodes['emdash'] - local spacer = "space" - local spaceunits = 500 - -- - local monospaced = metadata.isfixedpitch - local charwidth = metadata.charwidth - local italicangle = metadata.italicangle - local charxheight = metadata.xheight and metadata.xheight > 0 and metadata.xheight - properties.monospaced = monospaced - parameters.italicangle = italicangle - parameters.charwidth = charwidth - parameters.charxheight = charxheight - -- same as otf - if properties.monospaced then - if descriptions[endash] then - spaceunits, spacer = descriptions[endash].width, "space" - end - if not spaceunits and descriptions[emdash] then - spaceunits, spacer = descriptions[emdash].width, "emdash" - end - if not spaceunits and charwidth then - spaceunits, spacer = charwidth, "charwidth" - end - else - if descriptions[endash] then - spaceunits, spacer = descriptions[endash].width, "space" - end - if not spaceunits and charwidth then - spaceunits, spacer = charwidth, "charwidth" - end - end - spaceunits = tonumber(spaceunits) - if spaceunits < 200 then - -- todo: warning - end - -- - parameters.slant = 0 - parameters.space = spaceunits - parameters.space_stretch = 500 - parameters.space_shrink = 333 - parameters.x_height = 400 - parameters.quad = 1000 - -- - if italicangle then - parameters.italicangle = italicangle - parameters.italicfactor = math.cos(math.rad(90+italicangle)) - parameters.slant = - math.round(math.tan(italicangle*math.pi/180)) - end - if monospaced then - parameters.space_stretch = 0 - parameters.space_shrink = 0 - elseif afm.syncspace then - parameters.space_stretch = spaceunits/2 - parameters.space_shrink = spaceunits/3 - end - parameters.extra_space = parameters.space_shrink - if charxheight then - parameters.x_height = charxheight - else - -- same as otf - local x = unicodes['x'] - if x then - local x = descriptions[x] - if x then - parameters.x_height = x.height - end - end - -- - end - local fd = data.fontdimens - if fd and fd[8] and fd[9] and fd[10] then -- math - for k,v in next, fd do - parameters[k] = v - end - end - -- - parameters.designsize = (metadata.designsize or 10)*65536 - parameters.ascender = abs(metadata.ascender or 0) - parameters.descender = abs(metadata.descender or 0) - parameters.units = 1000 - -- - properties.spacer = spacer - properties.encodingbytes = 2 - properties.format = fonts.formats[filename] or "type1" - properties.filename = filename - properties.fontname = fontname - properties.fullname = fullname - properties.psname = fullname - properties.name = filename or fullname or fontname - -- - if next(characters) then - return { - characters = characters, - descriptions = descriptions, - parameters = parameters, - resources = resources, - properties = properties, - goodies = goodies, - } - end - end - return nil -end - ---[[ldx-- -

Originally we had features kind of hard coded for -files but since I expect to support more font formats, I decided -to treat this fontformat like any other and handle features in a -more configurable way.

---ldx]]-- - -function afm.setfeatures(tfmdata,features) - local okay = constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm) - if okay then - return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm) - else - return { } -- will become false - end -end - -local function checkfeatures(specification) -end - -local function afmtotfm(specification) - local afmname = specification.filename or specification.name - if specification.forced == "afm" or specification.format == "afm" then -- move this one up - if trace_loading then - report_afm("forcing afm format for %a",afmname) - end - else - local tfmname = findbinfile(afmname,"ofm") or "" - if tfmname ~= "" then - if trace_loading then - report_afm("fallback from afm to tfm for %a",afmname) - end - return -- just that - end - end - if afmname ~= "" then - -- weird, isn't this already done then? - local features = constructors.checkedfeatures("afm",specification.features.normal) - specification.features.normal = features - constructors.hashinstance(specification,true) -- also weird here - -- - specification = definers.resolve(specification) -- new, was forgotten - local cache_id = specification.hash - local tfmdata = containers.read(constructors.cache, cache_id) -- cache with features applied - if not tfmdata then - local rawdata = afm.load(afmname) - if rawdata and next(rawdata) then - adddimensions(rawdata) - tfmdata = copytotfm(rawdata) - if tfmdata and next(tfmdata) then - local shared = tfmdata.shared - if not shared then - shared = { } - tfmdata.shared = shared - end - shared.rawdata = rawdata - shared.features = features - shared.processes = afm.setfeatures(tfmdata,features) - end - elseif trace_loading then - report_afm("no (valid) afm file found with name %a",afmname) - end - tfmdata = containers.write(constructors.cache,cache_id,tfmdata) - end - return tfmdata - end -end - ---[[ldx-- -

As soon as we could intercept the reader, I implemented an - reader. Since traditional could use -fonts with companions, the following method also could handle -those cases, but now that we can handle directly we no longer -need this features.

---ldx]]-- - -local function read_from_afm(specification) - local tfmdata = afmtotfm(specification) - if tfmdata then - tfmdata.properties.name = specification.name - tfmdata = constructors.scale(tfmdata, specification) - local allfeatures = tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm) - fonts.loggers.register(tfmdata,'afm',specification) - end - return tfmdata -end - ---[[ldx-- -

Here comes the implementation of a few features. We only implement -those that make sense for this format.

---ldx]]-- - -local function prepareligatures(tfmdata,ligatures,value) - if value then - local descriptions = tfmdata.descriptions - for unicode, character in next, tfmdata.characters do - local description = descriptions[unicode] - local dligatures = description.ligatures - if dligatures then - local cligatures = character.ligatures - if not cligatures then - cligatures = { } - character.ligatures = cligatures - end - for unicode, ligature in next, dligatures do - cligatures[unicode] = { - char = ligature, - type = 0 - } - end - end - end - end -end - -local function preparekerns(tfmdata,kerns,value) - if value then - local rawdata = tfmdata.shared.rawdata - local resources = rawdata.resources - local unicodes = resources.unicodes - local descriptions = tfmdata.descriptions - for u, chr in next, tfmdata.characters do - local d = descriptions[u] - local newkerns = d[kerns] - if newkerns then - local kerns = chr.kerns - if not kerns then - kerns = { } - chr.kerns = kerns - end - for k,v in next, newkerns do - local uk = unicodes[k] - if uk then - kerns[uk] = v - end - end - end - end - end -end - -local list = { - -- [0x0022] = 0x201D, - [0x0027] = 0x2019, - -- [0x0060] = 0x2018, -} - -local function texreplacements(tfmdata,value) - local descriptions = tfmdata.descriptions - local characters = tfmdata.characters - for k, v in next, list do - characters [k] = characters [v] -- we forget about kerns - descriptions[k] = descriptions[v] -- we forget about kerns - end -end - -local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures', value) end -local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end -local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns', value) end -local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns', value) end - -registerafmfeature { - name = "liga", - description = "traditional ligatures", - initializers = { - base = ligatures, - node = ligatures, - } -} - -registerafmfeature { - name = "kern", - description = "intercharacter kerning", - initializers = { - base = kerns, - node = kerns, - } -} - -registerafmfeature { - name = "extrakerns", - description = "additional intercharacter kerning", - initializers = { - base = extrakerns, - node = extrakerns, - } -} - -registerafmfeature { - name = 'tlig', - description = 'tex ligatures', - initializers = { - base = texligatures, - node = texligatures, - } -} - -registerafmfeature { - name = 'trep', - description = 'tex replacements', - initializers = { - base = texreplacements, - node = texreplacements, - } -} - --- readers - -local check_tfm = readers.check_tfm - -fonts.formats.afm = "type1" -fonts.formats.pfb = "type1" - -local function check_afm(specification,fullname) - local foundname = findbinfile(fullname, 'afm') or "" -- just to be sure - if foundname == "" then - foundname = fonts.names.getfilename(fullname,"afm") or "" - end - if foundname == "" and afm.autoprefixed then - local encoding, shortname = match(fullname,"^(.-)%-(.*)$") -- context: encoding-name.* - if encoding and shortname and fonts.encodings.known[encoding] then - shortname = findbinfile(shortname,'afm') or "" -- just to be sure - if shortname ~= "" then - foundname = shortname - if trace_defining then - report_afm("stripping encoding prefix from filename %a",afmname) - end - end - end - end - if foundname ~= "" then - specification.filename = foundname - specification.format = "afm" - return read_from_afm(specification) - end -end - -function readers.afm(specification,method) - local fullname, tfmdata = specification.filename or "", nil - if fullname == "" then - local forced = specification.forced or "" - if forced ~= "" then - tfmdata = check_afm(specification,specification.name .. "." .. forced) - end - if not tfmdata then - method = method or definers.method or "afm or tfm" - if method == "tfm" then - tfmdata = check_tfm(specification,specification.name) - elseif method == "afm" then - tfmdata = check_afm(specification,specification.name) - elseif method == "tfm or afm" then - tfmdata = check_tfm(specification,specification.name) or check_afm(specification,specification.name) - else -- method == "afm or tfm" or method == "" then - tfmdata = check_afm(specification,specification.name) or check_tfm(specification,specification.name) - end - end - else - tfmdata = check_afm(specification,fullname) - end - return tfmdata -end - -function readers.pfb(specification,method) -- only called when forced - local original = specification.specification - if trace_defining then - report_afm("using afm reader for %a",original) - end - specification.specification = gsub(original,"%.pfb",".afm") - specification.forced = "afm" - return readers.afm(specification,method) -end +if not modules then modules = { } end modules ['font-afm'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

Some code may look a bit obscure but this has to do with the +fact that we also use this code for testing and much code evolved +in the transition from to to .

+ +

The following code still has traces of intermediate font support +where we handles font encodings. Eventually font encoding goes +away.

+--ldx]]-- + +local fonts, logs, trackers, containers, resolvers = fonts, logs, trackers, containers, resolvers + +local next, type, tonumber = next, type, tonumber +local format, match, gmatch, lower, gsub, strip = string.format, string.match, string.gmatch, string.lower, string.gsub, string.strip +local abs = math.abs +local P, S, C, R, lpegmatch, patterns = lpeg.P, lpeg.S, lpeg.C, lpeg.R, lpeg.match, lpeg.patterns +local derivetable = table.derive + +local trace_features = false trackers.register("afm.features", function(v) trace_features = v end) +local trace_indexing = false trackers.register("afm.indexing", function(v) trace_indexing = v end) +local trace_loading = false trackers.register("afm.loading", function(v) trace_loading = v end) +local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) + +local report_afm = logs.reporter("fonts","afm loading") + +local findbinfile = resolvers.findbinfile + +local definers = fonts.definers +local readers = fonts.readers +local constructors = fonts.constructors + +local afm = constructors.newhandler("afm") +local pfb = constructors.newhandler("pfb") + +local afmfeatures = constructors.newfeatures("afm") +local registerafmfeature = afmfeatures.register + +afm.version = 1.410 -- incrementing this number one up will force a re-cache +afm.cache = containers.define("fonts", "afm", afm.version, true) +afm.autoprefixed = true -- this will become false some day (catches texnansi-blabla.*) + +afm.helpdata = { } -- set later on so no local for this +afm.syncspace = true -- when true, nicer stretch values +afm.addligatures = true -- best leave this set to true +afm.addtexligatures = true -- best leave this set to true +afm.addkerns = true -- best leave this set to true + +local function setmode(tfmdata,value) + if value then + tfmdata.properties.mode = lower(value) + end +end + +registerafmfeature { + name = "mode", + description = "mode", + initializers = { + base = setmode, + node = setmode, + } +} + +--[[ldx-- +

We start with the basic reader which we give a name similar to the +built in and reader.

+--ldx]]-- + +--~ Comment FONTIDENTIFIER LMMATHSYMBOLS10 +--~ Comment CODINGSCHEME TEX MATH SYMBOLS +--~ Comment DESIGNSIZE 10.0 pt +--~ Comment CHECKSUM O 4261307036 +--~ Comment SPACE 0 plus 0 minus 0 +--~ Comment QUAD 1000 +--~ Comment EXTRASPACE 0 +--~ Comment NUM 676.508 393.732 443.731 +--~ Comment DENOM 685.951 344.841 +--~ Comment SUP 412.892 362.892 288.889 +--~ Comment SUB 150 247.217 +--~ Comment SUPDROP 386.108 +--~ Comment SUBDROP 50 +--~ Comment DELIM 2390 1010 +--~ Comment AXISHEIGHT 250 + +local comment = P("Comment") +local spacing = patterns.spacer -- S(" \t")^1 +local lineend = patterns.newline -- S("\n\r") +local words = C((1 - lineend)^1) +local number = C((R("09") + S("."))^1) / tonumber * spacing^0 +local data = lpeg.Carg(1) + +local pattern = ( -- needs testing ... not used anyway as we no longer need math afm's + comment * spacing * + ( + data * ( + ("CODINGSCHEME" * spacing * words ) / function(fd,a) end + + ("DESIGNSIZE" * spacing * number * words ) / function(fd,a) fd[ 1] = a end + + ("CHECKSUM" * spacing * number * words ) / function(fd,a) fd[ 2] = a end + + ("SPACE" * spacing * number * "plus" * number * "minus" * number) / function(fd,a,b,c) fd[ 3], fd[ 4], fd[ 5] = a, b, c end + + ("QUAD" * spacing * number ) / function(fd,a) fd[ 6] = a end + + ("EXTRASPACE" * spacing * number ) / function(fd,a) fd[ 7] = a end + + ("NUM" * spacing * number * number * number ) / function(fd,a,b,c) fd[ 8], fd[ 9], fd[10] = a, b, c end + + ("DENOM" * spacing * number * number ) / function(fd,a,b ) fd[11], fd[12] = a, b end + + ("SUP" * spacing * number * number * number ) / function(fd,a,b,c) fd[13], fd[14], fd[15] = a, b, c end + + ("SUB" * spacing * number * number ) / function(fd,a,b) fd[16], fd[17] = a, b end + + ("SUPDROP" * spacing * number ) / function(fd,a) fd[18] = a end + + ("SUBDROP" * spacing * number ) / function(fd,a) fd[19] = a end + + ("DELIM" * spacing * number * number ) / function(fd,a,b) fd[20], fd[21] = a, b end + + ("AXISHEIGHT" * spacing * number ) / function(fd,a) fd[22] = a end + ) + + (1-lineend)^0 + ) + + (1-comment)^1 +)^0 + +local function scan_comment(str) + local fd = { } + lpegmatch(pattern,str,1,fd) + return fd +end + +-- On a rainy day I will rewrite this in lpeg ... or we can use the (slower) fontloader +-- as in now supports afm/pfb loading but it's not too bad to have different methods +-- for testing approaches. + +local keys = { } + +function keys.FontName (data,line) data.metadata.fontname = strip (line) -- get rid of spaces + data.metadata.fullname = strip (line) end +function keys.ItalicAngle (data,line) data.metadata.italicangle = tonumber (line) end +function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch = toboolean(line,true) end +function keys.CharWidth (data,line) data.metadata.charwidth = tonumber (line) end +function keys.XHeight (data,line) data.metadata.xheight = tonumber (line) end +function keys.Descender (data,line) data.metadata.descender = tonumber (line) end +function keys.Ascender (data,line) data.metadata.ascender = tonumber (line) end +function keys.Comment (data,line) + -- Comment DesignSize 12 (pts) + -- Comment TFM designsize: 12 (in points) + line = lower(line) + local designsize = match(line,"designsize[^%d]*(%d+)") + if designsize then data.metadata.designsize = tonumber(designsize) end +end + +local function get_charmetrics(data,charmetrics,vector) + local characters = data.characters + local chr, ind = { }, 0 + for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do + if k == 'C' then + v = tonumber(v) + if v < 0 then + ind = ind + 1 -- ? + else + ind = v + end + chr = { + index = ind + } + elseif k == 'WX' then + chr.width = tonumber(v) + elseif k == 'N' then + characters[v] = chr + elseif k == 'B' then + local llx, lly, urx, ury = match(v,"^ *(.-) +(.-) +(.-) +(.-)$") + chr.boundingbox = { tonumber(llx), tonumber(lly), tonumber(urx), tonumber(ury) } + elseif k == 'L' then + local plus, becomes = match(v,"^(.-) +(.-)$") + local ligatures = chr.ligatures + if ligatures then + ligatures[plus] = becomes + else + chr.ligatures = { [plus] = becomes } + end + end + end +end + +local function get_kernpairs(data,kernpairs) + local characters = data.characters + for one, two, value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do + local chr = characters[one] + if chr then + local kerns = chr.kerns + if kerns then + kerns[two] = tonumber(value) + else + chr.kerns = { [two] = tonumber(value) } + end + end + end +end + +local function get_variables(data,fontmetrics) + for key, rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do + local keyhandler = keys[key] + if keyhandler then + keyhandler(data,rest) + end + end +end + +local function get_indexes(data,pfbname) + data.resources.filename = resolvers.unresolve(pfbname) -- no shortcut + local pfbblob = fontloader.open(pfbname) + if pfbblob then + local characters = data.characters + local pfbdata = fontloader.to_table(pfbblob) + if pfbdata then + local glyphs = pfbdata.glyphs + if glyphs then + if trace_loading then + report_afm("getting index data from %a",pfbname) + end + for index, glyph in next, glyphs do + local name = glyph.name + if name then + local char = characters[name] + if char then + if trace_indexing then + report_afm("glyph %a has index %a",name,index) + end + char.index = index + end + end + end + elseif trace_loading then + report_afm("no glyph data in pfb file %a",pfbname) + end + elseif trace_loading then + report_afm("no data in pfb file %a",pfbname) + end + fontloader.close(pfbblob) + elseif trace_loading then + report_afm("invalid pfb file %a",pfbname) + end +end + +local function readafm(filename) + local ok, afmblob, size = resolvers.loadbinfile(filename) -- has logging + if ok and afmblob then + local data = { + resources = { + filename = resolvers.unresolve(filename), + version = afm.version, + creator = "context mkiv", + }, + properties = { + hasitalics = false, + }, + goodies = { + }, + metadata = { + filename = file.removesuffix(file.basename(filename)) + }, + characters = { + -- a temporary store + }, + descriptions = { + -- the final store + }, + } + afmblob = gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics", function(charmetrics) + if trace_loading then + report_afm("loading char metrics") + end + get_charmetrics(data,charmetrics,vector) + return "" + end) + afmblob = gsub(afmblob,"StartKernPairs(.-)EndKernPairs", function(kernpairs) + if trace_loading then + report_afm("loading kern pairs") + end + get_kernpairs(data,kernpairs) + return "" + end) + afmblob = gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics", function(version,fontmetrics) + if trace_loading then + report_afm("loading variables") + end + data.afmversion = version + get_variables(data,fontmetrics) + data.fontdimens = scan_comment(fontmetrics) -- todo: all lpeg, no time now + return "" + end) + return data + else + if trace_loading then + report_afm("no valid afm file %a",filename) + end + return nil + end +end + +--[[ldx-- +

We cache files. Caching is taken care of in the loader. We cheat a bit +by adding ligatures and kern information to the afm derived data. That +way we can set them faster when defining a font.

+--ldx]]-- + +local addkerns, addligatures, addtexligatures, unify, normalize -- we will implement these later + +function afm.load(filename) + -- hm, for some reasons not resolved yet + filename = resolvers.findfile(filename,'afm') or "" + if filename ~= "" then + local name = file.removesuffix(file.basename(filename)) + local data = containers.read(afm.cache,name) + local attr = lfs.attributes(filename) + local size, time = attr.size or 0, attr.modification or 0 + -- + local pfbfile = file.replacesuffix(name,"pfb") + local pfbname = resolvers.findfile(pfbfile,"pfb") or "" + if pfbname == "" then + pfbname = resolvers.findfile(file.basename(pfbfile),"pfb") or "" + end + local pfbsize, pfbtime = 0, 0 + if pfbname ~= "" then + local attr = lfs.attributes(pfbname) + pfbsize = attr.size or 0 + pfbtime = attr.modification or 0 + end + if not data or data.size ~= size or data.time ~= time or data.pfbsize ~= pfbsize or data.pfbtime ~= pfbtime then + report_afm("reading %a",filename) + data = readafm(filename) + if data then + if pfbname ~= "" then + get_indexes(data,pfbname) + elseif trace_loading then + report_afm("no pfb file for %a",filename) + end + report_afm("unifying %a",filename) + unify(data,filename) + if afm.addligatures then + report_afm("add ligatures") + addligatures(data) + end + if afm.addtexligatures then + report_afm("add tex ligatures") + addtexligatures(data) + end + if afm.addkerns then + report_afm("add extra kerns") + addkerns(data) + end + normalize(data) + report_afm("add tounicode data") + fonts.mappings.addtounicode(data,filename) + data.size = size + data.time = time + data.pfbsize = pfbsize + data.pfbtime = pfbtime + report_afm("saving %a in cache",name) + data = containers.write(afm.cache, name, data) + data = containers.read(afm.cache,name) + end + end + return data + else + return nil + end +end + +local uparser = fonts.mappings.makenameparser() + +unify = function(data, filename) + local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context + local unicodes, names = { }, { } + local private = constructors.privateoffset + local descriptions = data.descriptions + for name, blob in next, data.characters do + local code = unicodevector[name] -- or characters.name_to_unicode[name] + if not code then + code = lpegmatch(uparser,name) + if not code then + code = private + private = private + 1 + report_afm("assigning private slot %U for unknown glyph name %a",code,name) + end + end + local index = blob.index + unicodes[name] = code + names[name] = index + blob.name = name + descriptions[code] = { + boundingbox = blob.boundingbox, + width = blob.width, + kerns = blob.kerns, + index = index, + name = name, + } + end + for unicode, description in next, descriptions do + local kerns = description.kerns + if kerns then + local krn = { } + for name, kern in next, kerns do + local unicode = unicodes[name] + if unicode then + krn[unicode] = kern + else + print(unicode,name) + end + end + description.kerns = krn + end + end + data.characters = nil + local resources = data.resources + local filename = resources.filename or file.removesuffix(file.basename(filename)) + resources.filename = resolvers.unresolve(filename) -- no shortcut + resources.unicodes = unicodes -- name to unicode + resources.marks = { } -- todo + resources.names = names -- name to index + resources.private = private +end + +normalize = function(data) +end + +--[[ldx-- +

These helpers extend the basic table with extra ligatures, texligatures +and extra kerns. This saves quite some lookups later.

+--ldx]]-- + +local addthem = function(rawdata,ligatures) + if ligatures then + local descriptions = rawdata.descriptions + local resources = rawdata.resources + local unicodes = resources.unicodes + local names = resources.names + for ligname, ligdata in next, ligatures do + local one = descriptions[unicodes[ligname]] + if one then + for _, pair in next, ligdata do + local two, three = unicodes[pair[1]], unicodes[pair[2]] + if two and three then + local ol = one.ligatures + if ol then + if not ol[two] then + ol[two] = three + end + else + one.ligatures = { [two] = three } + end + end + end + end + end + end +end + +addligatures = function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end +addtexligatures = function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end + +--[[ldx-- +

We keep the extra kerns in separate kerning tables so that we can use +them selectively.

+--ldx]]-- + +-- This is rather old code (from the beginning when we had only tfm). If +-- we unify the afm data (now we have names all over the place) then +-- we can use shcodes but there will be many more looping then. But we +-- could get rid of the tables in char-cmp then. Als, in the generic version +-- we don't use the character database. (Ok, we can have a context specific +-- variant). + +addkerns = function(rawdata) -- using shcodes is not robust here + local descriptions = rawdata.descriptions + local resources = rawdata.resources + local unicodes = resources.unicodes + local function do_it_left(what) + if what then + for unicode, description in next, descriptions do + local kerns = description.kerns + if kerns then + local extrakerns + for complex, simple in next, what do + complex = unicodes[complex] + simple = unicodes[simple] + if complex and simple then + local ks = kerns[simple] + if ks and not kerns[complex] then + if extrakerns then + extrakerns[complex] = ks + else + extrakerns = { [complex] = ks } + end + end + end + end + if extrakerns then + description.extrakerns = extrakerns + end + end + end + end + end + local function do_it_copy(what) + if what then + for complex, simple in next, what do + complex = unicodes[complex] + simple = unicodes[simple] + if complex and simple then + local complexdescription = descriptions[complex] + if complexdescription then -- optional + local simpledescription = descriptions[complex] + if simpledescription then + local extrakerns + local kerns = simpledescription.kerns + if kerns then + for unicode, kern in next, kerns do + if extrakerns then + extrakerns[unicode] = kern + else + extrakerns = { [unicode] = kern } + end + end + end + local extrakerns = simpledescription.extrakerns + if extrakerns then + for unicode, kern in next, extrakerns do + if extrakerns then + extrakerns[unicode] = kern + else + extrakerns = { [unicode] = kern } + end + end + end + if extrakerns then + complexdescription.extrakerns = extrakerns + end + end + end + end + end + end + end + -- add complex with values of simplified when present + do_it_left(afm.helpdata.leftkerned) + do_it_left(afm.helpdata.bothkerned) + -- copy kerns from simple char to complex char unless set + do_it_copy(afm.helpdata.bothkerned) + do_it_copy(afm.helpdata.rightkerned) +end + +--[[ldx-- +

The copying routine looks messy (and is indeed a bit messy).

+--ldx]]-- + +local function adddimensions(data) -- we need to normalize afm to otf i.e. indexed table instead of name + if data then + for unicode, description in next, data.descriptions do + local bb = description.boundingbox + if bb then + local ht, dp = bb[4], -bb[2] + if ht == 0 or ht < 0 then + -- no need to set it and no negative heights, nil == 0 + else + description.height = ht + end + if dp == 0 or dp < 0 then + -- no negative depths and no negative depths, nil == 0 + else + description.depth = dp + end + end + end + end +end + +local function copytotfm(data) + if data and data.descriptions then + local metadata = data.metadata + local resources = data.resources + local properties = derivetable(data.properties) + local descriptions = derivetable(data.descriptions) + local goodies = derivetable(data.goodies) + local characters = { } + local parameters = { } + local unicodes = resources.unicodes + -- + for unicode, description in next, data.descriptions do -- use parent table + characters[unicode] = { } + end + -- + local filename = constructors.checkedfilename(resources) + local fontname = metadata.fontname or metadata.fullname + local fullname = metadata.fullname or metadata.fontname + local endash = unicodes['space'] + local emdash = unicodes['emdash'] + local spacer = "space" + local spaceunits = 500 + -- + local monospaced = metadata.isfixedpitch + local charwidth = metadata.charwidth + local italicangle = metadata.italicangle + local charxheight = metadata.xheight and metadata.xheight > 0 and metadata.xheight + properties.monospaced = monospaced + parameters.italicangle = italicangle + parameters.charwidth = charwidth + parameters.charxheight = charxheight + -- same as otf + if properties.monospaced then + if descriptions[endash] then + spaceunits, spacer = descriptions[endash].width, "space" + end + if not spaceunits and descriptions[emdash] then + spaceunits, spacer = descriptions[emdash].width, "emdash" + end + if not spaceunits and charwidth then + spaceunits, spacer = charwidth, "charwidth" + end + else + if descriptions[endash] then + spaceunits, spacer = descriptions[endash].width, "space" + end + if not spaceunits and charwidth then + spaceunits, spacer = charwidth, "charwidth" + end + end + spaceunits = tonumber(spaceunits) + if spaceunits < 200 then + -- todo: warning + end + -- + parameters.slant = 0 + parameters.space = spaceunits + parameters.space_stretch = 500 + parameters.space_shrink = 333 + parameters.x_height = 400 + parameters.quad = 1000 + -- + if italicangle then + parameters.italicangle = italicangle + parameters.italicfactor = math.cos(math.rad(90+italicangle)) + parameters.slant = - math.round(math.tan(italicangle*math.pi/180)) + end + if monospaced then + parameters.space_stretch = 0 + parameters.space_shrink = 0 + elseif afm.syncspace then + parameters.space_stretch = spaceunits/2 + parameters.space_shrink = spaceunits/3 + end + parameters.extra_space = parameters.space_shrink + if charxheight then + parameters.x_height = charxheight + else + -- same as otf + local x = unicodes['x'] + if x then + local x = descriptions[x] + if x then + parameters.x_height = x.height + end + end + -- + end + local fd = data.fontdimens + if fd and fd[8] and fd[9] and fd[10] then -- math + for k,v in next, fd do + parameters[k] = v + end + end + -- + parameters.designsize = (metadata.designsize or 10)*65536 + parameters.ascender = abs(metadata.ascender or 0) + parameters.descender = abs(metadata.descender or 0) + parameters.units = 1000 + -- + properties.spacer = spacer + properties.encodingbytes = 2 + properties.format = fonts.formats[filename] or "type1" + properties.filename = filename + properties.fontname = fontname + properties.fullname = fullname + properties.psname = fullname + properties.name = filename or fullname or fontname + -- + if next(characters) then + return { + characters = characters, + descriptions = descriptions, + parameters = parameters, + resources = resources, + properties = properties, + goodies = goodies, + } + end + end + return nil +end + +--[[ldx-- +

Originally we had features kind of hard coded for +files but since I expect to support more font formats, I decided +to treat this fontformat like any other and handle features in a +more configurable way.

+--ldx]]-- + +function afm.setfeatures(tfmdata,features) + local okay = constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm) + if okay then + return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm) + else + return { } -- will become false + end +end + +local function checkfeatures(specification) +end + +local function afmtotfm(specification) + local afmname = specification.filename or specification.name + if specification.forced == "afm" or specification.format == "afm" then -- move this one up + if trace_loading then + report_afm("forcing afm format for %a",afmname) + end + else + local tfmname = findbinfile(afmname,"ofm") or "" + if tfmname ~= "" then + if trace_loading then + report_afm("fallback from afm to tfm for %a",afmname) + end + return -- just that + end + end + if afmname ~= "" then + -- weird, isn't this already done then? + local features = constructors.checkedfeatures("afm",specification.features.normal) + specification.features.normal = features + constructors.hashinstance(specification,true) -- also weird here + -- + specification = definers.resolve(specification) -- new, was forgotten + local cache_id = specification.hash + local tfmdata = containers.read(constructors.cache, cache_id) -- cache with features applied + if not tfmdata then + local rawdata = afm.load(afmname) + if rawdata and next(rawdata) then + adddimensions(rawdata) + tfmdata = copytotfm(rawdata) + if tfmdata and next(tfmdata) then + local shared = tfmdata.shared + if not shared then + shared = { } + tfmdata.shared = shared + end + shared.rawdata = rawdata + shared.features = features + shared.processes = afm.setfeatures(tfmdata,features) + end + elseif trace_loading then + report_afm("no (valid) afm file found with name %a",afmname) + end + tfmdata = containers.write(constructors.cache,cache_id,tfmdata) + end + return tfmdata + end +end + +--[[ldx-- +

As soon as we could intercept the reader, I implemented an + reader. Since traditional could use +fonts with companions, the following method also could handle +those cases, but now that we can handle directly we no longer +need this features.

+--ldx]]-- + +local function read_from_afm(specification) + local tfmdata = afmtotfm(specification) + if tfmdata then + tfmdata.properties.name = specification.name + tfmdata = constructors.scale(tfmdata, specification) + local allfeatures = tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm) + fonts.loggers.register(tfmdata,'afm',specification) + end + return tfmdata +end + +--[[ldx-- +

Here comes the implementation of a few features. We only implement +those that make sense for this format.

+--ldx]]-- + +local function prepareligatures(tfmdata,ligatures,value) + if value then + local descriptions = tfmdata.descriptions + for unicode, character in next, tfmdata.characters do + local description = descriptions[unicode] + local dligatures = description.ligatures + if dligatures then + local cligatures = character.ligatures + if not cligatures then + cligatures = { } + character.ligatures = cligatures + end + for unicode, ligature in next, dligatures do + cligatures[unicode] = { + char = ligature, + type = 0 + } + end + end + end + end +end + +local function preparekerns(tfmdata,kerns,value) + if value then + local rawdata = tfmdata.shared.rawdata + local resources = rawdata.resources + local unicodes = resources.unicodes + local descriptions = tfmdata.descriptions + for u, chr in next, tfmdata.characters do + local d = descriptions[u] + local newkerns = d[kerns] + if newkerns then + local kerns = chr.kerns + if not kerns then + kerns = { } + chr.kerns = kerns + end + for k,v in next, newkerns do + local uk = unicodes[k] + if uk then + kerns[uk] = v + end + end + end + end + end +end + +local list = { + -- [0x0022] = 0x201D, + [0x0027] = 0x2019, + -- [0x0060] = 0x2018, +} + +local function texreplacements(tfmdata,value) + local descriptions = tfmdata.descriptions + local characters = tfmdata.characters + for k, v in next, list do + characters [k] = characters [v] -- we forget about kerns + descriptions[k] = descriptions[v] -- we forget about kerns + end +end + +local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures', value) end +local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end +local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns', value) end +local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns', value) end + +registerafmfeature { + name = "liga", + description = "traditional ligatures", + initializers = { + base = ligatures, + node = ligatures, + } +} + +registerafmfeature { + name = "kern", + description = "intercharacter kerning", + initializers = { + base = kerns, + node = kerns, + } +} + +registerafmfeature { + name = "extrakerns", + description = "additional intercharacter kerning", + initializers = { + base = extrakerns, + node = extrakerns, + } +} + +registerafmfeature { + name = 'tlig', + description = 'tex ligatures', + initializers = { + base = texligatures, + node = texligatures, + } +} + +registerafmfeature { + name = 'trep', + description = 'tex replacements', + initializers = { + base = texreplacements, + node = texreplacements, + } +} + +-- readers + +local check_tfm = readers.check_tfm + +fonts.formats.afm = "type1" +fonts.formats.pfb = "type1" + +local function check_afm(specification,fullname) + local foundname = findbinfile(fullname, 'afm') or "" -- just to be sure + if foundname == "" then + foundname = fonts.names.getfilename(fullname,"afm") or "" + end + if foundname == "" and afm.autoprefixed then + local encoding, shortname = match(fullname,"^(.-)%-(.*)$") -- context: encoding-name.* + if encoding and shortname and fonts.encodings.known[encoding] then + shortname = findbinfile(shortname,'afm') or "" -- just to be sure + if shortname ~= "" then + foundname = shortname + if trace_defining then + report_afm("stripping encoding prefix from filename %a",afmname) + end + end + end + end + if foundname ~= "" then + specification.filename = foundname + specification.format = "afm" + return read_from_afm(specification) + end +end + +function readers.afm(specification,method) + local fullname, tfmdata = specification.filename or "", nil + if fullname == "" then + local forced = specification.forced or "" + if forced ~= "" then + tfmdata = check_afm(specification,specification.name .. "." .. forced) + end + if not tfmdata then + method = method or definers.method or "afm or tfm" + if method == "tfm" then + tfmdata = check_tfm(specification,specification.name) + elseif method == "afm" then + tfmdata = check_afm(specification,specification.name) + elseif method == "tfm or afm" then + tfmdata = check_tfm(specification,specification.name) or check_afm(specification,specification.name) + else -- method == "afm or tfm" or method == "" then + tfmdata = check_afm(specification,specification.name) or check_tfm(specification,specification.name) + end + end + else + tfmdata = check_afm(specification,fullname) + end + return tfmdata +end + +function readers.pfb(specification,method) -- only called when forced + local original = specification.specification + if trace_defining then + report_afm("using afm reader for %a",original) + end + specification.specification = gsub(original,"%.pfb",".afm") + specification.forced = "afm" + return readers.afm(specification,method) +end diff --git a/tex/context/base/font-age.lua b/tex/context/base/font-age.lua index bb6883a74..b7632b55f 100644 --- a/tex/context/base/font-age.lua +++ b/tex/context/base/font-age.lua @@ -1,4115 +1,4115 @@ -if not modules then modules = { } end modules ['font-age'] = { - version = 1.001, - comment = "companion to luatex-fonts.lua", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "derived from http://www.adobe.com/devnet/opentype/archives/glyphlist.txt", - original = "Adobe Glyph List, version 2.0, September 20, 2002", - dataonly = true, -} - -if context then - logs.report("fatal error","this module is not for context") - os.exit() -end - -return { -- generated: inspect(fonts.encodings.agl.unicodes) - ["A"]=65, - ["AE"]=198, - ["AEacute"]=508, - ["AEmacron"]=482, - ["Aacute"]=193, - ["Abreve"]=258, - ["Abreveacute"]=7854, - ["Abrevecyrillic"]=1232, - ["Abrevedotbelow"]=7862, - ["Abrevegrave"]=7856, - ["Abrevehookabove"]=7858, - ["Abrevetilde"]=7860, - ["Acaron"]=461, - ["Acircle"]=9398, - ["Acircumflex"]=194, - ["Acircumflexacute"]=7844, - ["Acircumflexdotbelow"]=7852, - ["Acircumflexgrave"]=7846, - ["Acircumflexhookabove"]=7848, - ["Acircumflextilde"]=7850, - ["Acyrillic"]=1040, - ["Adblgrave"]=512, - ["Adieresis"]=196, - ["Adieresiscyrillic"]=1234, - ["Adieresismacron"]=478, - ["Adotbelow"]=7840, - ["Adotmacron"]=480, - ["Agrave"]=192, - ["Ahookabove"]=7842, - ["Aiecyrillic"]=1236, - ["Ainvertedbreve"]=514, - ["Alpha"]=913, - ["Alphatonos"]=902, - ["Amacron"]=256, - ["Amonospace"]=65313, - ["Aogonek"]=260, - ["Aring"]=197, - ["Aringacute"]=506, - ["Aringbelow"]=7680, - ["Atilde"]=195, - ["Aybarmenian"]=1329, - ["B"]=66, - ["Bcircle"]=9399, - ["Bdotaccent"]=7682, - ["Bdotbelow"]=7684, - ["Becyrillic"]=1041, - ["Benarmenian"]=1330, - ["Beta"]=914, - ["Bhook"]=385, - ["Blinebelow"]=7686, - ["Bmonospace"]=65314, - ["Btopbar"]=386, - ["C"]=67, - ["Caarmenian"]=1342, - ["Cacute"]=262, - ["Ccaron"]=268, - ["Ccedilla"]=199, - ["Ccedillaacute"]=7688, - ["Ccircle"]=9400, - ["Ccircumflex"]=264, - ["Cdot"]=266, - ["Cdotaccent"]=266, - ["Chaarmenian"]=1353, - ["Cheabkhasiancyrillic"]=1212, - ["Checyrillic"]=1063, - ["Chedescenderabkhasiancyrillic"]=1214, - ["Chedescendercyrillic"]=1206, - ["Chedieresiscyrillic"]=1268, - ["Cheharmenian"]=1347, - ["Chekhakassiancyrillic"]=1227, - ["Cheverticalstrokecyrillic"]=1208, - ["Chi"]=935, - ["Chook"]=391, - ["Cmonospace"]=65315, - ["Coarmenian"]=1361, - ["D"]=68, - ["DZ"]=497, - ["DZcaron"]=452, - ["Daarmenian"]=1332, - ["Dafrican"]=393, - ["Dcaron"]=270, - ["Dcedilla"]=7696, - ["Dcircle"]=9401, - ["Dcircumflexbelow"]=7698, - ["Dcroat"]=272, - ["Ddotaccent"]=7690, - ["Ddotbelow"]=7692, - ["Decyrillic"]=1044, - ["Deicoptic"]=1006, - ["Delta"]=8710, - ["Deltagreek"]=916, - ["Dhook"]=394, - ["Digammagreek"]=988, - ["Djecyrillic"]=1026, - ["Dlinebelow"]=7694, - ["Dmonospace"]=65316, - ["Dslash"]=272, - ["Dtopbar"]=395, - ["Dz"]=498, - ["Dzcaron"]=453, - ["Dzeabkhasiancyrillic"]=1248, - ["Dzecyrillic"]=1029, - ["Dzhecyrillic"]=1039, - ["E"]=69, - ["Eacute"]=201, - ["Ebreve"]=276, - ["Ecaron"]=282, - ["Ecedillabreve"]=7708, - ["Echarmenian"]=1333, - ["Ecircle"]=9402, - ["Ecircumflex"]=202, - ["Ecircumflexacute"]=7870, - ["Ecircumflexbelow"]=7704, - ["Ecircumflexdotbelow"]=7878, - ["Ecircumflexgrave"]=7872, - ["Ecircumflexhookabove"]=7874, - ["Ecircumflextilde"]=7876, - ["Ecyrillic"]=1028, - ["Edblgrave"]=516, - ["Edieresis"]=203, - ["Edot"]=278, - ["Edotaccent"]=278, - ["Edotbelow"]=7864, - ["Efcyrillic"]=1060, - ["Egrave"]=200, - ["Eharmenian"]=1335, - ["Ehookabove"]=7866, - ["Eightroman"]=8551, - ["Einvertedbreve"]=518, - ["Eiotifiedcyrillic"]=1124, - ["Elcyrillic"]=1051, - ["Elevenroman"]=8554, - ["Emacron"]=274, - ["Emacronacute"]=7702, - ["Emacrongrave"]=7700, - ["Emcyrillic"]=1052, - ["Emonospace"]=65317, - ["Encyrillic"]=1053, - ["Endescendercyrillic"]=1186, - ["Eng"]=330, - ["Enghecyrillic"]=1188, - ["Enhookcyrillic"]=1223, - ["Eogonek"]=280, - ["Eopen"]=400, - ["Epsilon"]=917, - ["Epsilontonos"]=904, - ["Ercyrillic"]=1056, - ["Ereversed"]=398, - ["Ereversedcyrillic"]=1069, - ["Escyrillic"]=1057, - ["Esdescendercyrillic"]=1194, - ["Esh"]=425, - ["Eta"]=919, - ["Etarmenian"]=1336, - ["Etatonos"]=905, - ["Eth"]=208, - ["Etilde"]=7868, - ["Etildebelow"]=7706, - ["Euro"]=8364, - ["Ezh"]=439, - ["Ezhcaron"]=494, - ["Ezhreversed"]=440, - ["F"]=70, - ["Fcircle"]=9403, - ["Fdotaccent"]=7710, - ["Feharmenian"]=1366, - ["Feicoptic"]=996, - ["Fhook"]=401, - ["Fitacyrillic"]=1138, - ["Fiveroman"]=8548, - ["Fmonospace"]=65318, - ["Fourroman"]=8547, - ["G"]=71, - ["GBsquare"]=13191, - ["Gacute"]=500, - ["Gamma"]=915, - ["Gammaafrican"]=404, - ["Gangiacoptic"]=1002, - ["Gbreve"]=286, - ["Gcaron"]=486, - ["Gcedilla"]=290, - ["Gcircle"]=9404, - ["Gcircumflex"]=284, - ["Gcommaaccent"]=290, - ["Gdot"]=288, - ["Gdotaccent"]=288, - ["Gecyrillic"]=1043, - ["Ghadarmenian"]=1346, - ["Ghemiddlehookcyrillic"]=1172, - ["Ghestrokecyrillic"]=1170, - ["Gheupturncyrillic"]=1168, - ["Ghook"]=403, - ["Gimarmenian"]=1331, - ["Gjecyrillic"]=1027, - ["Gmacron"]=7712, - ["Gmonospace"]=65319, - ["Gsmallhook"]=667, - ["Gstroke"]=484, - ["H"]=72, - ["H18533"]=9679, - ["H18543"]=9642, - ["H18551"]=9643, - ["H22073"]=9633, - ["HPsquare"]=13259, - ["Haabkhasiancyrillic"]=1192, - ["Hadescendercyrillic"]=1202, - ["Hardsigncyrillic"]=1066, - ["Hbar"]=294, - ["Hbrevebelow"]=7722, - ["Hcedilla"]=7720, - ["Hcircle"]=9405, - ["Hcircumflex"]=292, - ["Hdieresis"]=7718, - ["Hdotaccent"]=7714, - ["Hdotbelow"]=7716, - ["Hmonospace"]=65320, - ["Hoarmenian"]=1344, - ["Horicoptic"]=1000, - ["Hzsquare"]=13200, - ["I"]=73, - ["IAcyrillic"]=1071, - ["IJ"]=306, - ["IUcyrillic"]=1070, - ["Iacute"]=205, - ["Ibreve"]=300, - ["Icaron"]=463, - ["Icircle"]=9406, - ["Icircumflex"]=206, - ["Icyrillic"]=1030, - ["Idblgrave"]=520, - ["Idieresis"]=207, - ["Idieresisacute"]=7726, - ["Idieresiscyrillic"]=1252, - ["Idot"]=304, - ["Idotaccent"]=304, - ["Idotbelow"]=7882, - ["Iebrevecyrillic"]=1238, - ["Iecyrillic"]=1045, - ["Ifraktur"]=8465, - ["Igrave"]=204, - ["Ihookabove"]=7880, - ["Iicyrillic"]=1048, - ["Iinvertedbreve"]=522, - ["Iishortcyrillic"]=1049, - ["Imacron"]=298, - ["Imacroncyrillic"]=1250, - ["Imonospace"]=65321, - ["Iniarmenian"]=1339, - ["Iocyrillic"]=1025, - ["Iogonek"]=302, - ["Iota"]=921, - ["Iotaafrican"]=406, - ["Iotadieresis"]=938, - ["Iotatonos"]=906, - ["Istroke"]=407, - ["Itilde"]=296, - ["Itildebelow"]=7724, - ["Izhitsacyrillic"]=1140, - ["Izhitsadblgravecyrillic"]=1142, - ["J"]=74, - ["Jaarmenian"]=1345, - ["Jcircle"]=9407, - ["Jcircumflex"]=308, - ["Jecyrillic"]=1032, - ["Jheharmenian"]=1355, - ["Jmonospace"]=65322, - ["K"]=75, - ["KBsquare"]=13189, - ["KKsquare"]=13261, - ["Kabashkircyrillic"]=1184, - ["Kacute"]=7728, - ["Kacyrillic"]=1050, - ["Kadescendercyrillic"]=1178, - ["Kahookcyrillic"]=1219, - ["Kappa"]=922, - ["Kastrokecyrillic"]=1182, - ["Kaverticalstrokecyrillic"]=1180, - ["Kcaron"]=488, - ["Kcedilla"]=310, - ["Kcircle"]=9408, - ["Kcommaaccent"]=310, - ["Kdotbelow"]=7730, - ["Keharmenian"]=1364, - ["Kenarmenian"]=1343, - ["Khacyrillic"]=1061, - ["Kheicoptic"]=998, - ["Khook"]=408, - ["Kjecyrillic"]=1036, - ["Klinebelow"]=7732, - ["Kmonospace"]=65323, - ["Koppacyrillic"]=1152, - ["Koppagreek"]=990, - ["Ksicyrillic"]=1134, - ["L"]=76, - ["LJ"]=455, - ["Lacute"]=313, - ["Lambda"]=923, - ["Lcaron"]=317, - ["Lcedilla"]=315, - ["Lcircle"]=9409, - ["Lcircumflexbelow"]=7740, - ["Lcommaaccent"]=315, - ["Ldot"]=319, - ["Ldotaccent"]=319, - ["Ldotbelow"]=7734, - ["Ldotbelowmacron"]=7736, - ["Liwnarmenian"]=1340, - ["Lj"]=456, - ["Ljecyrillic"]=1033, - ["Llinebelow"]=7738, - ["Lmonospace"]=65324, - ["Lslash"]=321, - ["M"]=77, - ["MBsquare"]=13190, - ["Macute"]=7742, - ["Mcircle"]=9410, - ["Mdotaccent"]=7744, - ["Mdotbelow"]=7746, - ["Menarmenian"]=1348, - ["Mmonospace"]=65325, - ["Mturned"]=412, - ["Mu"]=924, - ["N"]=78, - ["NJ"]=458, - ["Nacute"]=323, - ["Ncaron"]=327, - ["Ncedilla"]=325, - ["Ncircle"]=9411, - ["Ncircumflexbelow"]=7754, - ["Ncommaaccent"]=325, - ["Ndotaccent"]=7748, - ["Ndotbelow"]=7750, - ["Nhookleft"]=413, - ["Nineroman"]=8552, - ["Nj"]=459, - ["Njecyrillic"]=1034, - ["Nlinebelow"]=7752, - ["Nmonospace"]=65326, - ["Nowarmenian"]=1350, - ["Ntilde"]=209, - ["Nu"]=925, - ["O"]=79, - ["OE"]=338, - ["Oacute"]=211, - ["Obarredcyrillic"]=1256, - ["Obarreddieresiscyrillic"]=1258, - ["Obreve"]=334, - ["Ocaron"]=465, - ["Ocenteredtilde"]=415, - ["Ocircle"]=9412, - ["Ocircumflex"]=212, - ["Ocircumflexacute"]=7888, - ["Ocircumflexdotbelow"]=7896, - ["Ocircumflexgrave"]=7890, - ["Ocircumflexhookabove"]=7892, - ["Ocircumflextilde"]=7894, - ["Ocyrillic"]=1054, - ["Odblacute"]=336, - ["Odblgrave"]=524, - ["Odieresis"]=214, - ["Odieresiscyrillic"]=1254, - ["Odotbelow"]=7884, - ["Ograve"]=210, - ["Oharmenian"]=1365, - ["Ohm"]=8486, - ["Ohookabove"]=7886, - ["Ohorn"]=416, - ["Ohornacute"]=7898, - ["Ohorndotbelow"]=7906, - ["Ohorngrave"]=7900, - ["Ohornhookabove"]=7902, - ["Ohorntilde"]=7904, - ["Ohungarumlaut"]=336, - ["Oi"]=418, - ["Oinvertedbreve"]=526, - ["Omacron"]=332, - ["Omacronacute"]=7762, - ["Omacrongrave"]=7760, - ["Omega"]=8486, - ["Omegacyrillic"]=1120, - ["Omegagreek"]=937, - ["Omegaroundcyrillic"]=1146, - ["Omegatitlocyrillic"]=1148, - ["Omegatonos"]=911, - ["Omicron"]=927, - ["Omicrontonos"]=908, - ["Omonospace"]=65327, - ["Oneroman"]=8544, - ["Oogonek"]=490, - ["Oogonekmacron"]=492, - ["Oopen"]=390, - ["Oslash"]=216, - ["Oslashacute"]=510, - ["Ostrokeacute"]=510, - ["Otcyrillic"]=1150, - ["Otilde"]=213, - ["Otildeacute"]=7756, - ["Otildedieresis"]=7758, - ["P"]=80, - ["Pacute"]=7764, - ["Pcircle"]=9413, - ["Pdotaccent"]=7766, - ["Pecyrillic"]=1055, - ["Peharmenian"]=1354, - ["Pemiddlehookcyrillic"]=1190, - ["Phi"]=934, - ["Phook"]=420, - ["Pi"]=928, - ["Piwrarmenian"]=1363, - ["Pmonospace"]=65328, - ["Psi"]=936, - ["Psicyrillic"]=1136, - ["Q"]=81, - ["Qcircle"]=9414, - ["Qmonospace"]=65329, - ["R"]=82, - ["Raarmenian"]=1356, - ["Racute"]=340, - ["Rcaron"]=344, - ["Rcedilla"]=342, - ["Rcircle"]=9415, - ["Rcommaaccent"]=342, - ["Rdblgrave"]=528, - ["Rdotaccent"]=7768, - ["Rdotbelow"]=7770, - ["Rdotbelowmacron"]=7772, - ["Reharmenian"]=1360, - ["Rfraktur"]=8476, - ["Rho"]=929, - ["Rinvertedbreve"]=530, - ["Rlinebelow"]=7774, - ["Rmonospace"]=65330, - ["Rsmallinverted"]=641, - ["Rsmallinvertedsuperior"]=694, - ["S"]=83, - ["SF010000"]=9484, - ["SF020000"]=9492, - ["SF030000"]=9488, - ["SF040000"]=9496, - ["SF050000"]=9532, - ["SF060000"]=9516, - ["SF070000"]=9524, - ["SF080000"]=9500, - ["SF090000"]=9508, - ["SF10000"]=9484, - ["SF100000"]=9472, - ["SF110000"]=9474, - ["SF190000"]=9569, - ["SF20000"]=9492, - ["SF200000"]=9570, - ["SF210000"]=9558, - ["SF220000"]=9557, - ["SF230000"]=9571, - ["SF240000"]=9553, - ["SF250000"]=9559, - ["SF260000"]=9565, - ["SF270000"]=9564, - ["SF280000"]=9563, - ["SF30000"]=9488, - ["SF360000"]=9566, - ["SF370000"]=9567, - ["SF380000"]=9562, - ["SF390000"]=9556, - ["SF40000"]=9496, - ["SF400000"]=9577, - ["SF410000"]=9574, - ["SF420000"]=9568, - ["SF430000"]=9552, - ["SF440000"]=9580, - ["SF450000"]=9575, - ["SF460000"]=9576, - ["SF470000"]=9572, - ["SF480000"]=9573, - ["SF490000"]=9561, - ["SF50000"]=9532, - ["SF500000"]=9560, - ["SF510000"]=9554, - ["SF520000"]=9555, - ["SF530000"]=9579, - ["SF540000"]=9578, - ["SF60000"]=9516, - ["SF70000"]=9524, - ["SF80000"]=9500, - ["SF90000"]=9508, - ["Sacute"]=346, - ["Sacutedotaccent"]=7780, - ["Sampigreek"]=992, - ["Scaron"]=352, - ["Scarondotaccent"]=7782, - ["Scedilla"]=350, - ["Schwa"]=399, - ["Schwacyrillic"]=1240, - ["Schwadieresiscyrillic"]=1242, - ["Scircle"]=9416, - ["Scircumflex"]=348, - ["Scommaaccent"]=536, - ["Sdotaccent"]=7776, - ["Sdotbelow"]=7778, - ["Sdotbelowdotaccent"]=7784, - ["Seharmenian"]=1357, - ["Sevenroman"]=8550, - ["Shaarmenian"]=1351, - ["Shacyrillic"]=1064, - ["Shchacyrillic"]=1065, - ["Sheicoptic"]=994, - ["Shhacyrillic"]=1210, - ["Shimacoptic"]=1004, - ["Sigma"]=931, - ["Sixroman"]=8549, - ["Smonospace"]=65331, - ["Softsigncyrillic"]=1068, - ["Stigmagreek"]=986, - ["T"]=84, - ["Tau"]=932, - ["Tbar"]=358, - ["Tcaron"]=356, - ["Tcedilla"]=354, - ["Tcircle"]=9417, - ["Tcircumflexbelow"]=7792, - ["Tcommaaccent"]=354, - ["Tdotaccent"]=7786, - ["Tdotbelow"]=7788, - ["Tecyrillic"]=1058, - ["Tedescendercyrillic"]=1196, - ["Tenroman"]=8553, - ["Tetsecyrillic"]=1204, - ["Theta"]=920, - ["Thook"]=428, - ["Thorn"]=222, - ["Threeroman"]=8546, - ["Tiwnarmenian"]=1359, - ["Tlinebelow"]=7790, - ["Tmonospace"]=65332, - ["Toarmenian"]=1337, - ["Tonefive"]=444, - ["Tonesix"]=388, - ["Tonetwo"]=423, - ["Tretroflexhook"]=430, - ["Tsecyrillic"]=1062, - ["Tshecyrillic"]=1035, - ["Twelveroman"]=8555, - ["Tworoman"]=8545, - ["U"]=85, - ["Uacute"]=218, - ["Ubreve"]=364, - ["Ucaron"]=467, - ["Ucircle"]=9418, - ["Ucircumflex"]=219, - ["Ucircumflexbelow"]=7798, - ["Ucyrillic"]=1059, - ["Udblacute"]=368, - ["Udblgrave"]=532, - ["Udieresis"]=220, - ["Udieresisacute"]=471, - ["Udieresisbelow"]=7794, - ["Udieresiscaron"]=473, - ["Udieresiscyrillic"]=1264, - ["Udieresisgrave"]=475, - ["Udieresismacron"]=469, - ["Udotbelow"]=7908, - ["Ugrave"]=217, - ["Uhookabove"]=7910, - ["Uhorn"]=431, - ["Uhornacute"]=7912, - ["Uhorndotbelow"]=7920, - ["Uhorngrave"]=7914, - ["Uhornhookabove"]=7916, - ["Uhorntilde"]=7918, - ["Uhungarumlaut"]=368, - ["Uhungarumlautcyrillic"]=1266, - ["Uinvertedbreve"]=534, - ["Ukcyrillic"]=1144, - ["Umacron"]=362, - ["Umacroncyrillic"]=1262, - ["Umacrondieresis"]=7802, - ["Umonospace"]=65333, - ["Uogonek"]=370, - ["Upsilon"]=933, - ["Upsilon1"]=978, - ["Upsilonacutehooksymbolgreek"]=979, - ["Upsilonafrican"]=433, - ["Upsilondieresis"]=939, - ["Upsilondieresishooksymbolgreek"]=980, - ["Upsilonhooksymbol"]=978, - ["Upsilontonos"]=910, - ["Uring"]=366, - ["Ushortcyrillic"]=1038, - ["Ustraightcyrillic"]=1198, - ["Ustraightstrokecyrillic"]=1200, - ["Utilde"]=360, - ["Utildeacute"]=7800, - ["Utildebelow"]=7796, - ["V"]=86, - ["Vcircle"]=9419, - ["Vdotbelow"]=7806, - ["Vecyrillic"]=1042, - ["Vewarmenian"]=1358, - ["Vhook"]=434, - ["Vmonospace"]=65334, - ["Voarmenian"]=1352, - ["Vtilde"]=7804, - ["W"]=87, - ["Wacute"]=7810, - ["Wcircle"]=9420, - ["Wcircumflex"]=372, - ["Wdieresis"]=7812, - ["Wdotaccent"]=7814, - ["Wdotbelow"]=7816, - ["Wgrave"]=7808, - ["Wmonospace"]=65335, - ["X"]=88, - ["Xcircle"]=9421, - ["Xdieresis"]=7820, - ["Xdotaccent"]=7818, - ["Xeharmenian"]=1341, - ["Xi"]=926, - ["Xmonospace"]=65336, - ["Y"]=89, - ["Yacute"]=221, - ["Yatcyrillic"]=1122, - ["Ycircle"]=9422, - ["Ycircumflex"]=374, - ["Ydieresis"]=376, - ["Ydotaccent"]=7822, - ["Ydotbelow"]=7924, - ["Yericyrillic"]=1067, - ["Yerudieresiscyrillic"]=1272, - ["Ygrave"]=7922, - ["Yhook"]=435, - ["Yhookabove"]=7926, - ["Yiarmenian"]=1349, - ["Yicyrillic"]=1031, - ["Yiwnarmenian"]=1362, - ["Ymonospace"]=65337, - ["Ytilde"]=7928, - ["Yusbigcyrillic"]=1130, - ["Yusbigiotifiedcyrillic"]=1132, - ["Yuslittlecyrillic"]=1126, - ["Yuslittleiotifiedcyrillic"]=1128, - ["Z"]=90, - ["Zaarmenian"]=1334, - ["Zacute"]=377, - ["Zcaron"]=381, - ["Zcircle"]=9423, - ["Zcircumflex"]=7824, - ["Zdot"]=379, - ["Zdotaccent"]=379, - ["Zdotbelow"]=7826, - ["Zecyrillic"]=1047, - ["Zedescendercyrillic"]=1176, - ["Zedieresiscyrillic"]=1246, - ["Zeta"]=918, - ["Zhearmenian"]=1338, - ["Zhebrevecyrillic"]=1217, - ["Zhecyrillic"]=1046, - ["Zhedescendercyrillic"]=1174, - ["Zhedieresiscyrillic"]=1244, - ["Zlinebelow"]=7828, - ["Zmonospace"]=65338, - ["Zstroke"]=437, - ["a"]=97, - ["aabengali"]=2438, - ["aacute"]=225, - ["aadeva"]=2310, - ["aagujarati"]=2694, - ["aagurmukhi"]=2566, - ["aamatragurmukhi"]=2622, - ["aarusquare"]=13059, - ["aavowelsignbengali"]=2494, - ["aavowelsigndeva"]=2366, - ["aavowelsigngujarati"]=2750, - ["abbreviationmarkarmenian"]=1375, - ["abbreviationsigndeva"]=2416, - ["abengali"]=2437, - ["abopomofo"]=12570, - ["abreve"]=259, - ["abreveacute"]=7855, - ["abrevecyrillic"]=1233, - ["abrevedotbelow"]=7863, - ["abrevegrave"]=7857, - ["abrevehookabove"]=7859, - ["abrevetilde"]=7861, - ["acaron"]=462, - ["acircle"]=9424, - ["acircumflex"]=226, - ["acircumflexacute"]=7845, - ["acircumflexdotbelow"]=7853, - ["acircumflexgrave"]=7847, - ["acircumflexhookabove"]=7849, - ["acircumflextilde"]=7851, - ["acute"]=180, - ["acutebelowcmb"]=791, - ["acutecmb"]=769, - ["acutecomb"]=769, - ["acutedeva"]=2388, - ["acutelowmod"]=719, - ["acutetonecmb"]=833, - ["acyrillic"]=1072, - ["adblgrave"]=513, - ["addakgurmukhi"]=2673, - ["adeva"]=2309, - ["adieresis"]=228, - ["adieresiscyrillic"]=1235, - ["adieresismacron"]=479, - ["adotbelow"]=7841, - ["adotmacron"]=481, - ["ae"]=230, - ["aeacute"]=509, - ["aekorean"]=12624, - ["aemacron"]=483, - ["afii00208"]=8213, - ["afii08941"]=8356, - ["afii10017"]=1040, - ["afii10018"]=1041, - ["afii10019"]=1042, - ["afii10020"]=1043, - ["afii10021"]=1044, - ["afii10022"]=1045, - ["afii10023"]=1025, - ["afii10024"]=1046, - ["afii10025"]=1047, - ["afii10026"]=1048, - ["afii10027"]=1049, - ["afii10028"]=1050, - ["afii10029"]=1051, - ["afii10030"]=1052, - ["afii10031"]=1053, - ["afii10032"]=1054, - ["afii10033"]=1055, - ["afii10034"]=1056, - ["afii10035"]=1057, - ["afii10036"]=1058, - ["afii10037"]=1059, - ["afii10038"]=1060, - ["afii10039"]=1061, - ["afii10040"]=1062, - ["afii10041"]=1063, - ["afii10042"]=1064, - ["afii10043"]=1065, - ["afii10044"]=1066, - ["afii10045"]=1067, - ["afii10046"]=1068, - ["afii10047"]=1069, - ["afii10048"]=1070, - ["afii10049"]=1071, - ["afii10050"]=1168, - ["afii10051"]=1026, - ["afii10052"]=1027, - ["afii10053"]=1028, - ["afii10054"]=1029, - ["afii10055"]=1030, - ["afii10056"]=1031, - ["afii10057"]=1032, - ["afii10058"]=1033, - ["afii10059"]=1034, - ["afii10060"]=1035, - ["afii10061"]=1036, - ["afii10062"]=1038, - ["afii10065"]=1072, - ["afii10066"]=1073, - ["afii10067"]=1074, - ["afii10068"]=1075, - ["afii10069"]=1076, - ["afii10070"]=1077, - ["afii10071"]=1105, - ["afii10072"]=1078, - ["afii10073"]=1079, - ["afii10074"]=1080, - ["afii10075"]=1081, - ["afii10076"]=1082, - ["afii10077"]=1083, - ["afii10078"]=1084, - ["afii10079"]=1085, - ["afii10080"]=1086, - ["afii10081"]=1087, - ["afii10082"]=1088, - ["afii10083"]=1089, - ["afii10084"]=1090, - ["afii10085"]=1091, - ["afii10086"]=1092, - ["afii10087"]=1093, - ["afii10088"]=1094, - ["afii10089"]=1095, - ["afii10090"]=1096, - ["afii10091"]=1097, - ["afii10092"]=1098, - ["afii10093"]=1099, - ["afii10094"]=1100, - ["afii10095"]=1101, - ["afii10096"]=1102, - ["afii10097"]=1103, - ["afii10098"]=1169, - ["afii10099"]=1106, - ["afii10100"]=1107, - ["afii10101"]=1108, - ["afii10102"]=1109, - ["afii10103"]=1110, - ["afii10104"]=1111, - ["afii10105"]=1112, - ["afii10106"]=1113, - ["afii10107"]=1114, - ["afii10108"]=1115, - ["afii10109"]=1116, - ["afii10110"]=1118, - ["afii10145"]=1039, - ["afii10146"]=1122, - ["afii10147"]=1138, - ["afii10148"]=1140, - ["afii10193"]=1119, - ["afii10194"]=1123, - ["afii10195"]=1139, - ["afii10196"]=1141, - ["afii10846"]=1241, - ["afii208"]=8213, - ["afii299"]=8206, - ["afii300"]=8207, - ["afii301"]=8205, - ["afii57381"]=1642, - ["afii57388"]=1548, - ["afii57392"]=1632, - ["afii57393"]=1633, - ["afii57394"]=1634, - ["afii57395"]=1635, - ["afii57396"]=1636, - ["afii57397"]=1637, - ["afii57398"]=1638, - ["afii57399"]=1639, - ["afii57400"]=1640, - ["afii57401"]=1641, - ["afii57403"]=1563, - ["afii57407"]=1567, - ["afii57409"]=1569, - ["afii57410"]=1570, - ["afii57411"]=1571, - ["afii57412"]=1572, - ["afii57413"]=1573, - ["afii57414"]=1574, - ["afii57415"]=1575, - ["afii57416"]=1576, - ["afii57417"]=1577, - ["afii57418"]=1578, - ["afii57419"]=1579, - ["afii57420"]=1580, - ["afii57421"]=1581, - ["afii57422"]=1582, - ["afii57423"]=1583, - ["afii57424"]=1584, - ["afii57425"]=1585, - ["afii57426"]=1586, - ["afii57427"]=1587, - ["afii57428"]=1588, - ["afii57429"]=1589, - ["afii57430"]=1590, - ["afii57431"]=1591, - ["afii57432"]=1592, - ["afii57433"]=1593, - ["afii57434"]=1594, - ["afii57440"]=1600, - ["afii57441"]=1601, - ["afii57442"]=1602, - ["afii57443"]=1603, - ["afii57444"]=1604, - ["afii57445"]=1605, - ["afii57446"]=1606, - ["afii57448"]=1608, - ["afii57449"]=1609, - ["afii57450"]=1610, - ["afii57451"]=1611, - ["afii57452"]=1612, - ["afii57453"]=1613, - ["afii57454"]=1614, - ["afii57455"]=1615, - ["afii57456"]=1616, - ["afii57457"]=1617, - ["afii57458"]=1618, - ["afii57470"]=1607, - ["afii57505"]=1700, - ["afii57506"]=1662, - ["afii57507"]=1670, - ["afii57508"]=1688, - ["afii57509"]=1711, - ["afii57511"]=1657, - ["afii57512"]=1672, - ["afii57513"]=1681, - ["afii57514"]=1722, - ["afii57519"]=1746, - ["afii57534"]=1749, - ["afii57636"]=8362, - ["afii57645"]=1470, - ["afii57658"]=1475, - ["afii57664"]=1488, - ["afii57665"]=1489, - ["afii57666"]=1490, - ["afii57667"]=1491, - ["afii57668"]=1492, - ["afii57669"]=1493, - ["afii57670"]=1494, - ["afii57671"]=1495, - ["afii57672"]=1496, - ["afii57673"]=1497, - ["afii57674"]=1498, - ["afii57675"]=1499, - ["afii57676"]=1500, - ["afii57677"]=1501, - ["afii57678"]=1502, - ["afii57679"]=1503, - ["afii57680"]=1504, - ["afii57681"]=1505, - ["afii57682"]=1506, - ["afii57683"]=1507, - ["afii57684"]=1508, - ["afii57685"]=1509, - ["afii57686"]=1510, - ["afii57687"]=1511, - ["afii57688"]=1512, - ["afii57689"]=1513, - ["afii57690"]=1514, - ["afii57694"]=64298, - ["afii57695"]=64299, - ["afii57700"]=64331, - ["afii57705"]=64287, - ["afii57716"]=1520, - ["afii57717"]=1521, - ["afii57718"]=1522, - ["afii57723"]=64309, - ["afii57793"]=1460, - ["afii57794"]=1461, - ["afii57795"]=1462, - ["afii57796"]=1467, - ["afii57797"]=1464, - ["afii57798"]=1463, - ["afii57799"]=1456, - ["afii57800"]=1458, - ["afii57801"]=1457, - ["afii57802"]=1459, - ["afii57803"]=1474, - ["afii57804"]=1473, - ["afii57806"]=1465, - ["afii57807"]=1468, - ["afii57839"]=1469, - ["afii57841"]=1471, - ["afii57842"]=1472, - ["afii57929"]=700, - ["afii61248"]=8453, - ["afii61289"]=8467, - ["afii61352"]=8470, - ["afii61573"]=8236, - ["afii61574"]=8237, - ["afii61575"]=8238, - ["afii61664"]=8204, - ["afii63167"]=1645, - ["afii64937"]=701, - ["agrave"]=224, - ["agujarati"]=2693, - ["agurmukhi"]=2565, - ["ahiragana"]=12354, - ["ahookabove"]=7843, - ["aibengali"]=2448, - ["aibopomofo"]=12574, - ["aideva"]=2320, - ["aiecyrillic"]=1237, - ["aigujarati"]=2704, - ["aigurmukhi"]=2576, - ["aimatragurmukhi"]=2632, - ["ainarabic"]=1593, - ["ainfinalarabic"]=65226, - ["aininitialarabic"]=65227, - ["ainmedialarabic"]=65228, - ["ainvertedbreve"]=515, - ["aivowelsignbengali"]=2504, - ["aivowelsigndeva"]=2376, - ["aivowelsigngujarati"]=2760, - ["akatakana"]=12450, - ["akatakanahalfwidth"]=65393, - ["akorean"]=12623, - ["alef"]=1488, - ["alefarabic"]=1575, - ["alefdageshhebrew"]=64304, - ["aleffinalarabic"]=65166, - ["alefhamzaabovearabic"]=1571, - ["alefhamzaabovefinalarabic"]=65156, - ["alefhamzabelowarabic"]=1573, - ["alefhamzabelowfinalarabic"]=65160, - ["alefhebrew"]=1488, - ["aleflamedhebrew"]=64335, - ["alefmaddaabovearabic"]=1570, - ["alefmaddaabovefinalarabic"]=65154, - ["alefmaksuraarabic"]=1609, - ["alefmaksurafinalarabic"]=65264, - ["alefmaksurainitialarabic"]=65267, - ["alefmaksuramedialarabic"]=65268, - ["alefpatahhebrew"]=64302, - ["alefqamatshebrew"]=64303, - ["aleph"]=8501, - ["allequal"]=8780, - ["alpha"]=945, - ["alphatonos"]=940, - ["amacron"]=257, - ["amonospace"]=65345, - ["ampersand"]=38, - ["ampersandmonospace"]=65286, - ["amsquare"]=13250, - ["anbopomofo"]=12578, - ["angbopomofo"]=12580, - ["angkhankhuthai"]=3674, - ["angle"]=8736, - ["anglebracketleft"]=12296, - ["anglebracketleftvertical"]=65087, - ["anglebracketright"]=12297, - ["anglebracketrightvertical"]=65088, - ["angleleft"]=9001, - ["angleright"]=9002, - ["angstrom"]=8491, - ["anoteleia"]=903, - ["anudattadeva"]=2386, - ["anusvarabengali"]=2434, - ["anusvaradeva"]=2306, - ["anusvaragujarati"]=2690, - ["aogonek"]=261, - ["apaatosquare"]=13056, - ["aparen"]=9372, - ["apostrophearmenian"]=1370, - ["apostrophemod"]=700, - ["apple"]=63743, - ["approaches"]=8784, - ["approxequal"]=8776, - ["approxequalorimage"]=8786, - ["approximatelyequal"]=8773, - ["araeaekorean"]=12686, - ["araeakorean"]=12685, - ["arc"]=8978, - ["arighthalfring"]=7834, - ["aring"]=229, - ["aringacute"]=507, - ["aringbelow"]=7681, - ["arrowboth"]=8596, - ["arrowdashdown"]=8675, - ["arrowdashleft"]=8672, - ["arrowdashright"]=8674, - ["arrowdashup"]=8673, - ["arrowdblboth"]=8660, - ["arrowdbldown"]=8659, - ["arrowdblleft"]=8656, - ["arrowdblright"]=8658, - ["arrowdblup"]=8657, - ["arrowdown"]=8595, - ["arrowdownleft"]=8601, - ["arrowdownright"]=8600, - ["arrowdownwhite"]=8681, - ["arrowheaddownmod"]=709, - ["arrowheadleftmod"]=706, - ["arrowheadrightmod"]=707, - ["arrowheadupmod"]=708, - ["arrowleft"]=8592, - ["arrowleftdbl"]=8656, - ["arrowleftdblstroke"]=8653, - ["arrowleftoverright"]=8646, - ["arrowleftwhite"]=8678, - ["arrowright"]=8594, - ["arrowrightdblstroke"]=8655, - ["arrowrightheavy"]=10142, - ["arrowrightoverleft"]=8644, - ["arrowrightwhite"]=8680, - ["arrowtableft"]=8676, - ["arrowtabright"]=8677, - ["arrowup"]=8593, - ["arrowupdn"]=8597, - ["arrowupdnbse"]=8616, - ["arrowupdownbase"]=8616, - ["arrowupleft"]=8598, - ["arrowupleftofdown"]=8645, - ["arrowupright"]=8599, - ["arrowupwhite"]=8679, - ["asciicircum"]=94, - ["asciicircummonospace"]=65342, - ["asciitilde"]=126, - ["asciitildemonospace"]=65374, - ["ascript"]=593, - ["ascriptturned"]=594, - ["asmallhiragana"]=12353, - ["asmallkatakana"]=12449, - ["asmallkatakanahalfwidth"]=65383, - ["asterisk"]=42, - ["asteriskaltonearabic"]=1645, - ["asteriskarabic"]=1645, - ["asteriskmath"]=8727, - ["asteriskmonospace"]=65290, - ["asterisksmall"]=65121, - ["asterism"]=8258, - ["asymptoticallyequal"]=8771, - ["at"]=64, - ["atilde"]=227, - ["atmonospace"]=65312, - ["atsmall"]=65131, - ["aturned"]=592, - ["aubengali"]=2452, - ["aubopomofo"]=12576, - ["audeva"]=2324, - ["augujarati"]=2708, - ["augurmukhi"]=2580, - ["aulengthmarkbengali"]=2519, - ["aumatragurmukhi"]=2636, - ["auvowelsignbengali"]=2508, - ["auvowelsigndeva"]=2380, - ["auvowelsigngujarati"]=2764, - ["avagrahadeva"]=2365, - ["aybarmenian"]=1377, - ["ayin"]=1506, - ["ayinaltonehebrew"]=64288, - ["ayinhebrew"]=1506, - ["b"]=98, - ["babengali"]=2476, - ["backslash"]=92, - ["backslashmonospace"]=65340, - ["badeva"]=2348, - ["bagujarati"]=2732, - ["bagurmukhi"]=2604, - ["bahiragana"]=12400, - ["bahtthai"]=3647, - ["bakatakana"]=12496, - ["bar"]=124, - ["barmonospace"]=65372, - ["bbopomofo"]=12549, - ["bcircle"]=9425, - ["bdotaccent"]=7683, - ["bdotbelow"]=7685, - ["beamedsixteenthnotes"]=9836, - ["because"]=8757, - ["becyrillic"]=1073, - ["beharabic"]=1576, - ["behfinalarabic"]=65168, - ["behinitialarabic"]=65169, - ["behiragana"]=12409, - ["behmedialarabic"]=65170, - ["behmeeminitialarabic"]=64671, - ["behmeemisolatedarabic"]=64520, - ["behnoonfinalarabic"]=64621, - ["bekatakana"]=12505, - ["benarmenian"]=1378, - ["bet"]=1489, - ["beta"]=946, - ["betasymbolgreek"]=976, - ["betdagesh"]=64305, - ["betdageshhebrew"]=64305, - ["bethebrew"]=1489, - ["betrafehebrew"]=64332, - ["bhabengali"]=2477, - ["bhadeva"]=2349, - ["bhagujarati"]=2733, - ["bhagurmukhi"]=2605, - ["bhook"]=595, - ["bihiragana"]=12403, - ["bikatakana"]=12499, - ["bilabialclick"]=664, - ["bindigurmukhi"]=2562, - ["birusquare"]=13105, - ["blackcircle"]=9679, - ["blackdiamond"]=9670, - ["blackdownpointingtriangle"]=9660, - ["blackleftpointingpointer"]=9668, - ["blackleftpointingtriangle"]=9664, - ["blacklenticularbracketleft"]=12304, - ["blacklenticularbracketleftvertical"]=65083, - ["blacklenticularbracketright"]=12305, - ["blacklenticularbracketrightvertical"]=65084, - ["blacklowerlefttriangle"]=9699, - ["blacklowerrighttriangle"]=9698, - ["blackrectangle"]=9644, - ["blackrightpointingpointer"]=9658, - ["blackrightpointingtriangle"]=9654, - ["blacksmallsquare"]=9642, - ["blacksmilingface"]=9787, - ["blacksquare"]=9632, - ["blackstar"]=9733, - ["blackupperlefttriangle"]=9700, - ["blackupperrighttriangle"]=9701, - ["blackuppointingsmalltriangle"]=9652, - ["blackuppointingtriangle"]=9650, - ["blank"]=9251, - ["blinebelow"]=7687, - ["block"]=9608, - ["bmonospace"]=65346, - ["bobaimaithai"]=3610, - ["bohiragana"]=12412, - ["bokatakana"]=12508, - ["bparen"]=9373, - ["bqsquare"]=13251, - ["braceleft"]=123, - ["braceleftmonospace"]=65371, - ["braceleftsmall"]=65115, - ["braceleftvertical"]=65079, - ["braceright"]=125, - ["bracerightmonospace"]=65373, - ["bracerightsmall"]=65116, - ["bracerightvertical"]=65080, - ["bracketleft"]=91, - ["bracketleftmonospace"]=65339, - ["bracketright"]=93, - ["bracketrightmonospace"]=65341, - ["breve"]=728, - ["brevebelowcmb"]=814, - ["brevecmb"]=774, - ["breveinvertedbelowcmb"]=815, - ["breveinvertedcmb"]=785, - ["breveinverteddoublecmb"]=865, - ["bridgebelowcmb"]=810, - ["bridgeinvertedbelowcmb"]=826, - ["brokenbar"]=166, - ["bstroke"]=384, - ["btopbar"]=387, - ["buhiragana"]=12406, - ["bukatakana"]=12502, - ["bullet"]=8226, - ["bulletinverse"]=9688, - ["bulletoperator"]=8729, - ["bullseye"]=9678, - ["c"]=99, - ["caarmenian"]=1390, - ["cabengali"]=2458, - ["cacute"]=263, - ["cadeva"]=2330, - ["cagujarati"]=2714, - ["cagurmukhi"]=2586, - ["calsquare"]=13192, - ["candrabindubengali"]=2433, - ["candrabinducmb"]=784, - ["candrabindudeva"]=2305, - ["candrabindugujarati"]=2689, - ["capslock"]=8682, - ["careof"]=8453, - ["caron"]=711, - ["caronbelowcmb"]=812, - ["caroncmb"]=780, - ["carriagereturn"]=8629, - ["cbopomofo"]=12568, - ["ccaron"]=269, - ["ccedilla"]=231, - ["ccedillaacute"]=7689, - ["ccircle"]=9426, - ["ccircumflex"]=265, - ["ccurl"]=597, - ["cdot"]=267, - ["cdotaccent"]=267, - ["cdsquare"]=13253, - ["cedilla"]=184, - ["cedillacmb"]=807, - ["cent"]=162, - ["centigrade"]=8451, - ["centmonospace"]=65504, - ["chaarmenian"]=1401, - ["chabengali"]=2459, - ["chadeva"]=2331, - ["chagujarati"]=2715, - ["chagurmukhi"]=2587, - ["chbopomofo"]=12564, - ["cheabkhasiancyrillic"]=1213, - ["checkmark"]=10003, - ["checyrillic"]=1095, - ["chedescenderabkhasiancyrillic"]=1215, - ["chedescendercyrillic"]=1207, - ["chedieresiscyrillic"]=1269, - ["cheharmenian"]=1395, - ["chekhakassiancyrillic"]=1228, - ["cheverticalstrokecyrillic"]=1209, - ["chi"]=967, - ["chieuchacirclekorean"]=12919, - ["chieuchaparenkorean"]=12823, - ["chieuchcirclekorean"]=12905, - ["chieuchkorean"]=12618, - ["chieuchparenkorean"]=12809, - ["chochangthai"]=3594, - ["chochanthai"]=3592, - ["chochingthai"]=3593, - ["chochoethai"]=3596, - ["chook"]=392, - ["cieucacirclekorean"]=12918, - ["cieucaparenkorean"]=12822, - ["cieuccirclekorean"]=12904, - ["cieuckorean"]=12616, - ["cieucparenkorean"]=12808, - ["cieucuparenkorean"]=12828, - ["circle"]=9675, - ["circlemultiply"]=8855, - ["circleot"]=8857, - ["circleplus"]=8853, - ["circlepostalmark"]=12342, - ["circlewithlefthalfblack"]=9680, - ["circlewithrighthalfblack"]=9681, - ["circumflex"]=710, - ["circumflexbelowcmb"]=813, - ["circumflexcmb"]=770, - ["clear"]=8999, - ["clickalveolar"]=450, - ["clickdental"]=448, - ["clicklateral"]=449, - ["clickretroflex"]=451, - ["club"]=9827, - ["clubsuitblack"]=9827, - ["clubsuitwhite"]=9831, - ["cmcubedsquare"]=13220, - ["cmonospace"]=65347, - ["cmsquaredsquare"]=13216, - ["coarmenian"]=1409, - ["colon"]=58, - ["colonmonetary"]=8353, - ["colonmonospace"]=65306, - ["colonsign"]=8353, - ["colonsmall"]=65109, - ["colontriangularhalfmod"]=721, - ["colontriangularmod"]=720, - ["comma"]=44, - ["commaabovecmb"]=787, - ["commaaboverightcmb"]=789, - ["commaarabic"]=1548, - ["commaarmenian"]=1373, - ["commamonospace"]=65292, - ["commareversedabovecmb"]=788, - ["commareversedmod"]=701, - ["commasmall"]=65104, - ["commaturnedabovecmb"]=786, - ["commaturnedmod"]=699, - ["compass"]=9788, - ["congruent"]=8773, - ["contourintegral"]=8750, - ["control"]=8963, - ["controlACK"]=6, - ["controlBEL"]=7, - ["controlBS"]=8, - ["controlCAN"]=24, - ["controlCR"]=13, - ["controlDC1"]=17, - ["controlDC2"]=18, - ["controlDC3"]=19, - ["controlDC4"]=20, - ["controlDEL"]=127, - ["controlDLE"]=16, - ["controlEM"]=25, - ["controlENQ"]=5, - ["controlEOT"]=4, - ["controlESC"]=27, - ["controlETB"]=23, - ["controlETX"]=3, - ["controlFF"]=12, - ["controlFS"]=28, - ["controlGS"]=29, - ["controlHT"]=9, - ["controlLF"]=10, - ["controlNAK"]=21, - ["controlRS"]=30, - ["controlSI"]=15, - ["controlSO"]=14, - ["controlSOT"]=2, - ["controlSTX"]=1, - ["controlSUB"]=26, - ["controlSYN"]=22, - ["controlUS"]=31, - ["controlVT"]=11, - ["copyright"]=169, - ["cornerbracketleft"]=12300, - ["cornerbracketlefthalfwidth"]=65378, - ["cornerbracketleftvertical"]=65089, - ["cornerbracketright"]=12301, - ["cornerbracketrighthalfwidth"]=65379, - ["cornerbracketrightvertical"]=65090, - ["corporationsquare"]=13183, - ["cosquare"]=13255, - ["coverkgsquare"]=13254, - ["cparen"]=9374, - ["cruzeiro"]=8354, - ["cstretched"]=663, - ["curlyand"]=8911, - ["curlyor"]=8910, - ["currency"]=164, - ["d"]=100, - ["daarmenian"]=1380, - ["dabengali"]=2470, - ["dadarabic"]=1590, - ["dadeva"]=2342, - ["dadfinalarabic"]=65214, - ["dadinitialarabic"]=65215, - ["dadmedialarabic"]=65216, - ["dagesh"]=1468, - ["dageshhebrew"]=1468, - ["dagger"]=8224, - ["daggerdbl"]=8225, - ["dagujarati"]=2726, - ["dagurmukhi"]=2598, - ["dahiragana"]=12384, - ["dakatakana"]=12480, - ["dalarabic"]=1583, - ["dalet"]=1491, - ["daletdagesh"]=64307, - ["daletdageshhebrew"]=64307, - ["dalethatafpatah"]=1491, - ["dalethatafpatahhebrew"]=1491, - ["dalethatafsegol"]=1491, - ["dalethatafsegolhebrew"]=1491, - ["dalethebrew"]=1491, - ["dalethiriq"]=1491, - ["dalethiriqhebrew"]=1491, - ["daletholam"]=1491, - ["daletholamhebrew"]=1491, - ["daletpatah"]=1491, - ["daletpatahhebrew"]=1491, - ["daletqamats"]=1491, - ["daletqamatshebrew"]=1491, - ["daletqubuts"]=1491, - ["daletqubutshebrew"]=1491, - ["daletsegol"]=1491, - ["daletsegolhebrew"]=1491, - ["daletsheva"]=1491, - ["daletshevahebrew"]=1491, - ["dalettsere"]=1491, - ["dalettserehebrew"]=1491, - ["dalfinalarabic"]=65194, - ["dammaarabic"]=1615, - ["dammalowarabic"]=1615, - ["dammatanaltonearabic"]=1612, - ["dammatanarabic"]=1612, - ["danda"]=2404, - ["dargahebrew"]=1447, - ["dargalefthebrew"]=1447, - ["dasiapneumatacyrilliccmb"]=1157, - ["dblanglebracketleft"]=12298, - ["dblanglebracketleftvertical"]=65085, - ["dblanglebracketright"]=12299, - ["dblanglebracketrightvertical"]=65086, - ["dblarchinvertedbelowcmb"]=811, - ["dblarrowleft"]=8660, - ["dblarrowright"]=8658, - ["dbldanda"]=2405, - ["dblgravecmb"]=783, - ["dblintegral"]=8748, - ["dbllowline"]=8215, - ["dbllowlinecmb"]=819, - ["dbloverlinecmb"]=831, - ["dblprimemod"]=698, - ["dblverticalbar"]=8214, - ["dblverticallineabovecmb"]=782, - ["dbopomofo"]=12553, - ["dbsquare"]=13256, - ["dcaron"]=271, - ["dcedilla"]=7697, - ["dcircle"]=9427, - ["dcircumflexbelow"]=7699, - ["dcroat"]=273, - ["ddabengali"]=2465, - ["ddadeva"]=2337, - ["ddagujarati"]=2721, - ["ddagurmukhi"]=2593, - ["ddalarabic"]=1672, - ["ddalfinalarabic"]=64393, - ["dddhadeva"]=2396, - ["ddhabengali"]=2466, - ["ddhadeva"]=2338, - ["ddhagujarati"]=2722, - ["ddhagurmukhi"]=2594, - ["ddotaccent"]=7691, - ["ddotbelow"]=7693, - ["decimalseparatorarabic"]=1643, - ["decimalseparatorpersian"]=1643, - ["decyrillic"]=1076, - ["degree"]=176, - ["dehihebrew"]=1453, - ["dehiragana"]=12391, - ["deicoptic"]=1007, - ["dekatakana"]=12487, - ["deleteleft"]=9003, - ["deleteright"]=8998, - ["delta"]=948, - ["deltaturned"]=397, - ["denominatorminusonenumeratorbengali"]=2552, - ["dezh"]=676, - ["dhabengali"]=2471, - ["dhadeva"]=2343, - ["dhagujarati"]=2727, - ["dhagurmukhi"]=2599, - ["dhook"]=599, - ["dialytikatonos"]=901, - ["dialytikatonoscmb"]=836, - ["diamond"]=9830, - ["diamondsuitwhite"]=9826, - ["dieresis"]=168, - ["dieresisbelowcmb"]=804, - ["dieresiscmb"]=776, - ["dieresistonos"]=901, - ["dihiragana"]=12386, - ["dikatakana"]=12482, - ["dittomark"]=12291, - ["divide"]=247, - ["divides"]=8739, - ["divisionslash"]=8725, - ["djecyrillic"]=1106, - ["dkshade"]=9619, - ["dlinebelow"]=7695, - ["dlsquare"]=13207, - ["dmacron"]=273, - ["dmonospace"]=65348, - ["dnblock"]=9604, - ["dochadathai"]=3598, - ["dodekthai"]=3604, - ["dohiragana"]=12393, - ["dokatakana"]=12489, - ["dollar"]=36, - ["dollarmonospace"]=65284, - ["dollarsmall"]=65129, - ["dong"]=8363, - ["dorusquare"]=13094, - ["dotaccent"]=729, - ["dotaccentcmb"]=775, - ["dotbelowcmb"]=803, - ["dotbelowcomb"]=803, - ["dotkatakana"]=12539, - ["dotlessi"]=305, - ["dotlessjstrokehook"]=644, - ["dotmath"]=8901, - ["dottedcircle"]=9676, - ["doubleyodpatah"]=64287, - ["doubleyodpatahhebrew"]=64287, - ["downtackbelowcmb"]=798, - ["downtackmod"]=725, - ["dparen"]=9375, - ["dtail"]=598, - ["dtopbar"]=396, - ["duhiragana"]=12389, - ["dukatakana"]=12485, - ["dz"]=499, - ["dzaltone"]=675, - ["dzcaron"]=454, - ["dzcurl"]=677, - ["dzeabkhasiancyrillic"]=1249, - ["dzecyrillic"]=1109, - ["dzhecyrillic"]=1119, - ["e"]=101, - ["eacute"]=233, - ["earth"]=9793, - ["ebengali"]=2447, - ["ebopomofo"]=12572, - ["ebreve"]=277, - ["ecandradeva"]=2317, - ["ecandragujarati"]=2701, - ["ecandravowelsigndeva"]=2373, - ["ecandravowelsigngujarati"]=2757, - ["ecaron"]=283, - ["ecedillabreve"]=7709, - ["echarmenian"]=1381, - ["echyiwnarmenian"]=1415, - ["ecircle"]=9428, - ["ecircumflex"]=234, - ["ecircumflexacute"]=7871, - ["ecircumflexbelow"]=7705, - ["ecircumflexdotbelow"]=7879, - ["ecircumflexgrave"]=7873, - ["ecircumflexhookabove"]=7875, - ["ecircumflextilde"]=7877, - ["ecyrillic"]=1108, - ["edblgrave"]=517, - ["edeva"]=2319, - ["edieresis"]=235, - ["edot"]=279, - ["edotaccent"]=279, - ["edotbelow"]=7865, - ["eegurmukhi"]=2575, - ["eematragurmukhi"]=2631, - ["efcyrillic"]=1092, - ["egrave"]=232, - ["egujarati"]=2703, - ["eharmenian"]=1383, - ["ehbopomofo"]=12573, - ["ehiragana"]=12360, - ["ehookabove"]=7867, - ["eibopomofo"]=12575, - ["eight"]=56, - ["eightarabic"]=1640, - ["eightbengali"]=2542, - ["eightcircle"]=9319, - ["eightcircleinversesansserif"]=10129, - ["eightdeva"]=2414, - ["eighteencircle"]=9329, - ["eighteenparen"]=9349, - ["eighteenperiod"]=9369, - ["eightgujarati"]=2798, - ["eightgurmukhi"]=2670, - ["eighthackarabic"]=1640, - ["eighthangzhou"]=12328, - ["eighthnotebeamed"]=9835, - ["eightideographicparen"]=12839, - ["eightinferior"]=8328, - ["eightmonospace"]=65304, - ["eightparen"]=9339, - ["eightperiod"]=9359, - ["eightpersian"]=1784, - ["eightroman"]=8567, - ["eightsuperior"]=8312, - ["eightthai"]=3672, - ["einvertedbreve"]=519, - ["eiotifiedcyrillic"]=1125, - ["ekatakana"]=12456, - ["ekatakanahalfwidth"]=65396, - ["ekonkargurmukhi"]=2676, - ["ekorean"]=12628, - ["elcyrillic"]=1083, - ["element"]=8712, - ["elevencircle"]=9322, - ["elevenparen"]=9342, - ["elevenperiod"]=9362, - ["elevenroman"]=8570, - ["ellipsis"]=8230, - ["ellipsisvertical"]=8942, - ["emacron"]=275, - ["emacronacute"]=7703, - ["emacrongrave"]=7701, - ["emcyrillic"]=1084, - ["emdash"]=8212, - ["emdashvertical"]=65073, - ["emonospace"]=65349, - ["emphasismarkarmenian"]=1371, - ["emptyset"]=8709, - ["enbopomofo"]=12579, - ["encyrillic"]=1085, - ["endash"]=8211, - ["endashvertical"]=65074, - ["endescendercyrillic"]=1187, - ["eng"]=331, - ["engbopomofo"]=12581, - ["enghecyrillic"]=1189, - ["enhookcyrillic"]=1224, - ["enspace"]=8194, - ["eogonek"]=281, - ["eokorean"]=12627, - ["eopen"]=603, - ["eopenclosed"]=666, - ["eopenreversed"]=604, - ["eopenreversedclosed"]=606, - ["eopenreversedhook"]=605, - ["eparen"]=9376, - ["epsilon"]=949, - ["epsilontonos"]=941, - ["equal"]=61, - ["equalmonospace"]=65309, - ["equalsmall"]=65126, - ["equalsuperior"]=8316, - ["equivalence"]=8801, - ["erbopomofo"]=12582, - ["ercyrillic"]=1088, - ["ereversed"]=600, - ["ereversedcyrillic"]=1101, - ["escyrillic"]=1089, - ["esdescendercyrillic"]=1195, - ["esh"]=643, - ["eshcurl"]=646, - ["eshortdeva"]=2318, - ["eshortvowelsigndeva"]=2374, - ["eshreversedloop"]=426, - ["eshsquatreversed"]=645, - ["esmallhiragana"]=12359, - ["esmallkatakana"]=12455, - ["esmallkatakanahalfwidth"]=65386, - ["estimated"]=8494, - ["eta"]=951, - ["etarmenian"]=1384, - ["etatonos"]=942, - ["eth"]=240, - ["etilde"]=7869, - ["etildebelow"]=7707, - ["etnahtafoukhhebrew"]=1425, - ["etnahtafoukhlefthebrew"]=1425, - ["etnahtahebrew"]=1425, - ["etnahtalefthebrew"]=1425, - ["eturned"]=477, - ["eukorean"]=12641, - ["euro"]=8364, - ["evowelsignbengali"]=2503, - ["evowelsigndeva"]=2375, - ["evowelsigngujarati"]=2759, - ["exclam"]=33, - ["exclamarmenian"]=1372, - ["exclamdbl"]=8252, - ["exclamdown"]=161, - ["exclammonospace"]=65281, - ["existential"]=8707, - ["ezh"]=658, - ["ezhcaron"]=495, - ["ezhcurl"]=659, - ["ezhreversed"]=441, - ["ezhtail"]=442, - ["f"]=102, - ["fadeva"]=2398, - ["fagurmukhi"]=2654, - ["fahrenheit"]=8457, - ["fathaarabic"]=1614, - ["fathalowarabic"]=1614, - ["fathatanarabic"]=1611, - ["fbopomofo"]=12552, - ["fcircle"]=9429, - ["fdotaccent"]=7711, - ["feharabic"]=1601, - ["feharmenian"]=1414, - ["fehfinalarabic"]=65234, - ["fehinitialarabic"]=65235, - ["fehmedialarabic"]=65236, - ["feicoptic"]=997, - ["female"]=9792, - ["ff"]=64256, - ["ffi"]=64259, - ["ffl"]=64260, - ["fi"]=64257, - ["fifteencircle"]=9326, - ["fifteenparen"]=9346, - ["fifteenperiod"]=9366, - ["figuredash"]=8210, - ["filledbox"]=9632, - ["filledrect"]=9644, - ["finalkaf"]=1498, - ["finalkafdagesh"]=64314, - ["finalkafdageshhebrew"]=64314, - ["finalkafhebrew"]=1498, - ["finalkafqamats"]=1498, - ["finalkafqamatshebrew"]=1498, - ["finalkafsheva"]=1498, - ["finalkafshevahebrew"]=1498, - ["finalmem"]=1501, - ["finalmemhebrew"]=1501, - ["finalnun"]=1503, - ["finalnunhebrew"]=1503, - ["finalpe"]=1507, - ["finalpehebrew"]=1507, - ["finaltsadi"]=1509, - ["finaltsadihebrew"]=1509, - ["firsttonechinese"]=713, - ["fisheye"]=9673, - ["fitacyrillic"]=1139, - ["five"]=53, - ["fivearabic"]=1637, - ["fivebengali"]=2539, - ["fivecircle"]=9316, - ["fivecircleinversesansserif"]=10126, - ["fivedeva"]=2411, - ["fiveeighths"]=8541, - ["fivegujarati"]=2795, - ["fivegurmukhi"]=2667, - ["fivehackarabic"]=1637, - ["fivehangzhou"]=12325, - ["fiveideographicparen"]=12836, - ["fiveinferior"]=8325, - ["fivemonospace"]=65301, - ["fiveparen"]=9336, - ["fiveperiod"]=9356, - ["fivepersian"]=1781, - ["fiveroman"]=8564, - ["fivesuperior"]=8309, - ["fivethai"]=3669, - ["fl"]=64258, - ["florin"]=402, - ["fmonospace"]=65350, - ["fmsquare"]=13209, - ["fofanthai"]=3615, - ["fofathai"]=3613, - ["fongmanthai"]=3663, - ["forall"]=8704, - ["four"]=52, - ["fourarabic"]=1636, - ["fourbengali"]=2538, - ["fourcircle"]=9315, - ["fourcircleinversesansserif"]=10125, - ["fourdeva"]=2410, - ["fourgujarati"]=2794, - ["fourgurmukhi"]=2666, - ["fourhackarabic"]=1636, - ["fourhangzhou"]=12324, - ["fourideographicparen"]=12835, - ["fourinferior"]=8324, - ["fourmonospace"]=65300, - ["fournumeratorbengali"]=2551, - ["fourparen"]=9335, - ["fourperiod"]=9355, - ["fourpersian"]=1780, - ["fourroman"]=8563, - ["foursuperior"]=8308, - ["fourteencircle"]=9325, - ["fourteenparen"]=9345, - ["fourteenperiod"]=9365, - ["fourthai"]=3668, - ["fourthtonechinese"]=715, - ["fparen"]=9377, - ["fraction"]=8260, - ["franc"]=8355, - ["g"]=103, - ["gabengali"]=2455, - ["gacute"]=501, - ["gadeva"]=2327, - ["gafarabic"]=1711, - ["gaffinalarabic"]=64403, - ["gafinitialarabic"]=64404, - ["gafmedialarabic"]=64405, - ["gagujarati"]=2711, - ["gagurmukhi"]=2583, - ["gahiragana"]=12364, - ["gakatakana"]=12460, - ["gamma"]=947, - ["gammalatinsmall"]=611, - ["gammasuperior"]=736, - ["gangiacoptic"]=1003, - ["gbopomofo"]=12557, - ["gbreve"]=287, - ["gcaron"]=487, - ["gcedilla"]=291, - ["gcircle"]=9430, - ["gcircumflex"]=285, - ["gcommaaccent"]=291, - ["gdot"]=289, - ["gdotaccent"]=289, - ["gecyrillic"]=1075, - ["gehiragana"]=12370, - ["gekatakana"]=12466, - ["geometricallyequal"]=8785, - ["gereshaccenthebrew"]=1436, - ["gereshhebrew"]=1523, - ["gereshmuqdamhebrew"]=1437, - ["germandbls"]=223, - ["gershayimaccenthebrew"]=1438, - ["gershayimhebrew"]=1524, - ["getamark"]=12307, - ["ghabengali"]=2456, - ["ghadarmenian"]=1394, - ["ghadeva"]=2328, - ["ghagujarati"]=2712, - ["ghagurmukhi"]=2584, - ["ghainarabic"]=1594, - ["ghainfinalarabic"]=65230, - ["ghaininitialarabic"]=65231, - ["ghainmedialarabic"]=65232, - ["ghemiddlehookcyrillic"]=1173, - ["ghestrokecyrillic"]=1171, - ["gheupturncyrillic"]=1169, - ["ghhadeva"]=2394, - ["ghhagurmukhi"]=2650, - ["ghook"]=608, - ["ghzsquare"]=13203, - ["gihiragana"]=12366, - ["gikatakana"]=12462, - ["gimarmenian"]=1379, - ["gimel"]=1490, - ["gimeldagesh"]=64306, - ["gimeldageshhebrew"]=64306, - ["gimelhebrew"]=1490, - ["gjecyrillic"]=1107, - ["glottalinvertedstroke"]=446, - ["glottalstop"]=660, - ["glottalstopinverted"]=662, - ["glottalstopmod"]=704, - ["glottalstopreversed"]=661, - ["glottalstopreversedmod"]=705, - ["glottalstopreversedsuperior"]=740, - ["glottalstopstroke"]=673, - ["glottalstopstrokereversed"]=674, - ["gmacron"]=7713, - ["gmonospace"]=65351, - ["gohiragana"]=12372, - ["gokatakana"]=12468, - ["gparen"]=9378, - ["gpasquare"]=13228, - ["gradient"]=8711, - ["grave"]=96, - ["gravebelowcmb"]=790, - ["gravecmb"]=768, - ["gravecomb"]=768, - ["gravedeva"]=2387, - ["gravelowmod"]=718, - ["gravemonospace"]=65344, - ["gravetonecmb"]=832, - ["greater"]=62, - ["greaterequal"]=8805, - ["greaterequalorless"]=8923, - ["greatermonospace"]=65310, - ["greaterorequivalent"]=8819, - ["greaterorless"]=8823, - ["greateroverequal"]=8807, - ["greatersmall"]=65125, - ["gscript"]=609, - ["gstroke"]=485, - ["guhiragana"]=12368, - ["guillemotleft"]=171, - ["guillemotright"]=187, - ["guilsinglleft"]=8249, - ["guilsinglright"]=8250, - ["gukatakana"]=12464, - ["guramusquare"]=13080, - ["gysquare"]=13257, - ["h"]=104, - ["haabkhasiancyrillic"]=1193, - ["haaltonearabic"]=1729, - ["habengali"]=2489, - ["hadescendercyrillic"]=1203, - ["hadeva"]=2361, - ["hagujarati"]=2745, - ["hagurmukhi"]=2617, - ["haharabic"]=1581, - ["hahfinalarabic"]=65186, - ["hahinitialarabic"]=65187, - ["hahiragana"]=12399, - ["hahmedialarabic"]=65188, - ["haitusquare"]=13098, - ["hakatakana"]=12495, - ["hakatakanahalfwidth"]=65418, - ["halantgurmukhi"]=2637, - ["hamzaarabic"]=1569, - ["hamzadammaarabic"]=1569, - ["hamzadammatanarabic"]=1569, - ["hamzafathaarabic"]=1569, - ["hamzafathatanarabic"]=1569, - ["hamzalowarabic"]=1569, - ["hamzalowkasraarabic"]=1569, - ["hamzalowkasratanarabic"]=1569, - ["hamzasukunarabic"]=1569, - ["hangulfiller"]=12644, - ["hardsigncyrillic"]=1098, - ["harpoonleftbarbup"]=8636, - ["harpoonrightbarbup"]=8640, - ["hasquare"]=13258, - ["hatafpatah"]=1458, - ["hatafpatah16"]=1458, - ["hatafpatah23"]=1458, - ["hatafpatah2f"]=1458, - ["hatafpatahhebrew"]=1458, - ["hatafpatahnarrowhebrew"]=1458, - ["hatafpatahquarterhebrew"]=1458, - ["hatafpatahwidehebrew"]=1458, - ["hatafqamats"]=1459, - ["hatafqamats1b"]=1459, - ["hatafqamats28"]=1459, - ["hatafqamats34"]=1459, - ["hatafqamatshebrew"]=1459, - ["hatafqamatsnarrowhebrew"]=1459, - ["hatafqamatsquarterhebrew"]=1459, - ["hatafqamatswidehebrew"]=1459, - ["hatafsegol"]=1457, - ["hatafsegol17"]=1457, - ["hatafsegol24"]=1457, - ["hatafsegol30"]=1457, - ["hatafsegolhebrew"]=1457, - ["hatafsegolnarrowhebrew"]=1457, - ["hatafsegolquarterhebrew"]=1457, - ["hatafsegolwidehebrew"]=1457, - ["hbar"]=295, - ["hbopomofo"]=12559, - ["hbrevebelow"]=7723, - ["hcedilla"]=7721, - ["hcircle"]=9431, - ["hcircumflex"]=293, - ["hdieresis"]=7719, - ["hdotaccent"]=7715, - ["hdotbelow"]=7717, - ["he"]=1492, - ["heart"]=9829, - ["heartsuitblack"]=9829, - ["heartsuitwhite"]=9825, - ["hedagesh"]=64308, - ["hedageshhebrew"]=64308, - ["hehaltonearabic"]=1729, - ["heharabic"]=1607, - ["hehebrew"]=1492, - ["hehfinalaltonearabic"]=64423, - ["hehfinalalttwoarabic"]=65258, - ["hehfinalarabic"]=65258, - ["hehhamzaabovefinalarabic"]=64421, - ["hehhamzaaboveisolatedarabic"]=64420, - ["hehinitialaltonearabic"]=64424, - ["hehinitialarabic"]=65259, - ["hehiragana"]=12408, - ["hehmedialaltonearabic"]=64425, - ["hehmedialarabic"]=65260, - ["heiseierasquare"]=13179, - ["hekatakana"]=12504, - ["hekatakanahalfwidth"]=65421, - ["hekutaarusquare"]=13110, - ["henghook"]=615, - ["herutusquare"]=13113, - ["het"]=1495, - ["hethebrew"]=1495, - ["hhook"]=614, - ["hhooksuperior"]=689, - ["hieuhacirclekorean"]=12923, - ["hieuhaparenkorean"]=12827, - ["hieuhcirclekorean"]=12909, - ["hieuhkorean"]=12622, - ["hieuhparenkorean"]=12813, - ["hihiragana"]=12402, - ["hikatakana"]=12498, - ["hikatakanahalfwidth"]=65419, - ["hiriq"]=1460, - ["hiriq14"]=1460, - ["hiriq21"]=1460, - ["hiriq2d"]=1460, - ["hiriqhebrew"]=1460, - ["hiriqnarrowhebrew"]=1460, - ["hiriqquarterhebrew"]=1460, - ["hiriqwidehebrew"]=1460, - ["hlinebelow"]=7830, - ["hmonospace"]=65352, - ["hoarmenian"]=1392, - ["hohipthai"]=3627, - ["hohiragana"]=12411, - ["hokatakana"]=12507, - ["hokatakanahalfwidth"]=65422, - ["holam"]=1465, - ["holam19"]=1465, - ["holam26"]=1465, - ["holam32"]=1465, - ["holamhebrew"]=1465, - ["holamnarrowhebrew"]=1465, - ["holamquarterhebrew"]=1465, - ["holamwidehebrew"]=1465, - ["honokhukthai"]=3630, - ["hookabovecomb"]=777, - ["hookcmb"]=777, - ["hookpalatalizedbelowcmb"]=801, - ["hookretroflexbelowcmb"]=802, - ["hoonsquare"]=13122, - ["horicoptic"]=1001, - ["horizontalbar"]=8213, - ["horncmb"]=795, - ["hotsprings"]=9832, - ["house"]=8962, - ["hparen"]=9379, - ["hsuperior"]=688, - ["hturned"]=613, - ["huhiragana"]=12405, - ["huiitosquare"]=13107, - ["hukatakana"]=12501, - ["hukatakanahalfwidth"]=65420, - ["hungarumlaut"]=733, - ["hungarumlautcmb"]=779, - ["hv"]=405, - ["hyphen"]=45, - ["hyphenmonospace"]=65293, - ["hyphensmall"]=65123, - ["hyphentwo"]=8208, - ["i"]=105, - ["iacute"]=237, - ["iacyrillic"]=1103, - ["ibengali"]=2439, - ["ibopomofo"]=12583, - ["ibreve"]=301, - ["icaron"]=464, - ["icircle"]=9432, - ["icircumflex"]=238, - ["icyrillic"]=1110, - ["idblgrave"]=521, - ["ideographearthcircle"]=12943, - ["ideographfirecircle"]=12939, - ["ideographicallianceparen"]=12863, - ["ideographiccallparen"]=12858, - ["ideographiccentrecircle"]=12965, - ["ideographicclose"]=12294, - ["ideographiccomma"]=12289, - ["ideographiccommaleft"]=65380, - ["ideographiccongratulationparen"]=12855, - ["ideographiccorrectcircle"]=12963, - ["ideographicearthparen"]=12847, - ["ideographicenterpriseparen"]=12861, - ["ideographicexcellentcircle"]=12957, - ["ideographicfestivalparen"]=12864, - ["ideographicfinancialcircle"]=12950, - ["ideographicfinancialparen"]=12854, - ["ideographicfireparen"]=12843, - ["ideographichaveparen"]=12850, - ["ideographichighcircle"]=12964, - ["ideographiciterationmark"]=12293, - ["ideographiclaborcircle"]=12952, - ["ideographiclaborparen"]=12856, - ["ideographicleftcircle"]=12967, - ["ideographiclowcircle"]=12966, - ["ideographicmedicinecircle"]=12969, - ["ideographicmetalparen"]=12846, - ["ideographicmoonparen"]=12842, - ["ideographicnameparen"]=12852, - ["ideographicperiod"]=12290, - ["ideographicprintcircle"]=12958, - ["ideographicreachparen"]=12867, - ["ideographicrepresentparen"]=12857, - ["ideographicresourceparen"]=12862, - ["ideographicrightcircle"]=12968, - ["ideographicsecretcircle"]=12953, - ["ideographicselfparen"]=12866, - ["ideographicsocietyparen"]=12851, - ["ideographicspace"]=12288, - ["ideographicspecialparen"]=12853, - ["ideographicstockparen"]=12849, - ["ideographicstudyparen"]=12859, - ["ideographicsunparen"]=12848, - ["ideographicsuperviseparen"]=12860, - ["ideographicwaterparen"]=12844, - ["ideographicwoodparen"]=12845, - ["ideographiczero"]=12295, - ["ideographmetalcircle"]=12942, - ["ideographmooncircle"]=12938, - ["ideographnamecircle"]=12948, - ["ideographsuncircle"]=12944, - ["ideographwatercircle"]=12940, - ["ideographwoodcircle"]=12941, - ["ideva"]=2311, - ["idieresis"]=239, - ["idieresisacute"]=7727, - ["idieresiscyrillic"]=1253, - ["idotbelow"]=7883, - ["iebrevecyrillic"]=1239, - ["iecyrillic"]=1077, - ["ieungacirclekorean"]=12917, - ["ieungaparenkorean"]=12821, - ["ieungcirclekorean"]=12903, - ["ieungkorean"]=12615, - ["ieungparenkorean"]=12807, - ["igrave"]=236, - ["igujarati"]=2695, - ["igurmukhi"]=2567, - ["ihiragana"]=12356, - ["ihookabove"]=7881, - ["iibengali"]=2440, - ["iicyrillic"]=1080, - ["iideva"]=2312, - ["iigujarati"]=2696, - ["iigurmukhi"]=2568, - ["iimatragurmukhi"]=2624, - ["iinvertedbreve"]=523, - ["iishortcyrillic"]=1081, - ["iivowelsignbengali"]=2496, - ["iivowelsigndeva"]=2368, - ["iivowelsigngujarati"]=2752, - ["ij"]=307, - ["ikatakana"]=12452, - ["ikatakanahalfwidth"]=65394, - ["ikorean"]=12643, - ["ilde"]=732, - ["iluyhebrew"]=1452, - ["imacron"]=299, - ["imacroncyrillic"]=1251, - ["imageorapproximatelyequal"]=8787, - ["imatragurmukhi"]=2623, - ["imonospace"]=65353, - ["increment"]=8710, - ["infinity"]=8734, - ["iniarmenian"]=1387, - ["integral"]=8747, - ["integralbottom"]=8993, - ["integralbt"]=8993, - ["integraltop"]=8992, - ["integraltp"]=8992, - ["intersection"]=8745, - ["intisquare"]=13061, - ["invbullet"]=9688, - ["invcircle"]=9689, - ["invsmileface"]=9787, - ["iocyrillic"]=1105, - ["iogonek"]=303, - ["iota"]=953, - ["iotadieresis"]=970, - ["iotadieresistonos"]=912, - ["iotalatin"]=617, - ["iotatonos"]=943, - ["iparen"]=9380, - ["irigurmukhi"]=2674, - ["ismallhiragana"]=12355, - ["ismallkatakana"]=12451, - ["ismallkatakanahalfwidth"]=65384, - ["issharbengali"]=2554, - ["istroke"]=616, - ["iterationhiragana"]=12445, - ["iterationkatakana"]=12541, - ["itilde"]=297, - ["itildebelow"]=7725, - ["iubopomofo"]=12585, - ["iucyrillic"]=1102, - ["ivowelsignbengali"]=2495, - ["ivowelsigndeva"]=2367, - ["ivowelsigngujarati"]=2751, - ["izhitsacyrillic"]=1141, - ["izhitsadblgravecyrillic"]=1143, - ["j"]=106, - ["jaarmenian"]=1393, - ["jabengali"]=2460, - ["jadeva"]=2332, - ["jagujarati"]=2716, - ["jagurmukhi"]=2588, - ["jbopomofo"]=12560, - ["jcaron"]=496, - ["jcircle"]=9433, - ["jcircumflex"]=309, - ["jcrossedtail"]=669, - ["jdotlessstroke"]=607, - ["jecyrillic"]=1112, - ["jeemarabic"]=1580, - ["jeemfinalarabic"]=65182, - ["jeeminitialarabic"]=65183, - ["jeemmedialarabic"]=65184, - ["jeharabic"]=1688, - ["jehfinalarabic"]=64395, - ["jhabengali"]=2461, - ["jhadeva"]=2333, - ["jhagujarati"]=2717, - ["jhagurmukhi"]=2589, - ["jheharmenian"]=1403, - ["jis"]=12292, - ["jmonospace"]=65354, - ["jparen"]=9381, - ["jsuperior"]=690, - ["k"]=107, - ["kabashkircyrillic"]=1185, - ["kabengali"]=2453, - ["kacute"]=7729, - ["kacyrillic"]=1082, - ["kadescendercyrillic"]=1179, - ["kadeva"]=2325, - ["kaf"]=1499, - ["kafarabic"]=1603, - ["kafdagesh"]=64315, - ["kafdageshhebrew"]=64315, - ["kaffinalarabic"]=65242, - ["kafhebrew"]=1499, - ["kafinitialarabic"]=65243, - ["kafmedialarabic"]=65244, - ["kafrafehebrew"]=64333, - ["kagujarati"]=2709, - ["kagurmukhi"]=2581, - ["kahiragana"]=12363, - ["kahookcyrillic"]=1220, - ["kakatakana"]=12459, - ["kakatakanahalfwidth"]=65398, - ["kappa"]=954, - ["kappasymbolgreek"]=1008, - ["kapyeounmieumkorean"]=12657, - ["kapyeounphieuphkorean"]=12676, - ["kapyeounpieupkorean"]=12664, - ["kapyeounssangpieupkorean"]=12665, - ["karoriisquare"]=13069, - ["kashidaautoarabic"]=1600, - ["kashidaautonosidebearingarabic"]=1600, - ["kasmallkatakana"]=12533, - ["kasquare"]=13188, - ["kasraarabic"]=1616, - ["kasratanarabic"]=1613, - ["kastrokecyrillic"]=1183, - ["katahiraprolongmarkhalfwidth"]=65392, - ["kaverticalstrokecyrillic"]=1181, - ["kbopomofo"]=12558, - ["kcalsquare"]=13193, - ["kcaron"]=489, - ["kcedilla"]=311, - ["kcircle"]=9434, - ["kcommaaccent"]=311, - ["kdotbelow"]=7731, - ["keharmenian"]=1412, - ["kehiragana"]=12369, - ["kekatakana"]=12465, - ["kekatakanahalfwidth"]=65401, - ["kenarmenian"]=1391, - ["kesmallkatakana"]=12534, - ["kgreenlandic"]=312, - ["khabengali"]=2454, - ["khacyrillic"]=1093, - ["khadeva"]=2326, - ["khagujarati"]=2710, - ["khagurmukhi"]=2582, - ["khaharabic"]=1582, - ["khahfinalarabic"]=65190, - ["khahinitialarabic"]=65191, - ["khahmedialarabic"]=65192, - ["kheicoptic"]=999, - ["khhadeva"]=2393, - ["khhagurmukhi"]=2649, - ["khieukhacirclekorean"]=12920, - ["khieukhaparenkorean"]=12824, - ["khieukhcirclekorean"]=12906, - ["khieukhkorean"]=12619, - ["khieukhparenkorean"]=12810, - ["khokhaithai"]=3586, - ["khokhonthai"]=3589, - ["khokhuatthai"]=3587, - ["khokhwaithai"]=3588, - ["khomutthai"]=3675, - ["khook"]=409, - ["khorakhangthai"]=3590, - ["khzsquare"]=13201, - ["kihiragana"]=12365, - ["kikatakana"]=12461, - ["kikatakanahalfwidth"]=65399, - ["kiroguramusquare"]=13077, - ["kiromeetorusquare"]=13078, - ["kirosquare"]=13076, - ["kiyeokacirclekorean"]=12910, - ["kiyeokaparenkorean"]=12814, - ["kiyeokcirclekorean"]=12896, - ["kiyeokkorean"]=12593, - ["kiyeokparenkorean"]=12800, - ["kiyeoksioskorean"]=12595, - ["kjecyrillic"]=1116, - ["klinebelow"]=7733, - ["klsquare"]=13208, - ["kmcubedsquare"]=13222, - ["kmonospace"]=65355, - ["kmsquaredsquare"]=13218, - ["kohiragana"]=12371, - ["kohmsquare"]=13248, - ["kokaithai"]=3585, - ["kokatakana"]=12467, - ["kokatakanahalfwidth"]=65402, - ["kooposquare"]=13086, - ["koppacyrillic"]=1153, - ["koreanstandardsymbol"]=12927, - ["koroniscmb"]=835, - ["kparen"]=9382, - ["kpasquare"]=13226, - ["ksicyrillic"]=1135, - ["ktsquare"]=13263, - ["kturned"]=670, - ["kuhiragana"]=12367, - ["kukatakana"]=12463, - ["kukatakanahalfwidth"]=65400, - ["kvsquare"]=13240, - ["kwsquare"]=13246, - ["l"]=108, - ["labengali"]=2482, - ["lacute"]=314, - ["ladeva"]=2354, - ["lagujarati"]=2738, - ["lagurmukhi"]=2610, - ["lakkhangyaothai"]=3653, - ["lamaleffinalarabic"]=65276, - ["lamalefhamzaabovefinalarabic"]=65272, - ["lamalefhamzaaboveisolatedarabic"]=65271, - ["lamalefhamzabelowfinalarabic"]=65274, - ["lamalefhamzabelowisolatedarabic"]=65273, - ["lamalefisolatedarabic"]=65275, - ["lamalefmaddaabovefinalarabic"]=65270, - ["lamalefmaddaaboveisolatedarabic"]=65269, - ["lamarabic"]=1604, - ["lambda"]=955, - ["lambdastroke"]=411, - ["lamed"]=1500, - ["lameddagesh"]=64316, - ["lameddageshhebrew"]=64316, - ["lamedhebrew"]=1500, - ["lamedholam"]=1500, - ["lamedholamdagesh"]=1500, - ["lamedholamdageshhebrew"]=1500, - ["lamedholamhebrew"]=1500, - ["lamfinalarabic"]=65246, - ["lamhahinitialarabic"]=64714, - ["laminitialarabic"]=65247, - ["lamjeeminitialarabic"]=64713, - ["lamkhahinitialarabic"]=64715, - ["lamlamhehisolatedarabic"]=65010, - ["lammedialarabic"]=65248, - ["lammeemhahinitialarabic"]=64904, - ["lammeeminitialarabic"]=64716, - ["lammeemjeeminitialarabic"]=65247, - ["lammeemkhahinitialarabic"]=65247, - ["largecircle"]=9711, - ["lbar"]=410, - ["lbelt"]=620, - ["lbopomofo"]=12556, - ["lcaron"]=318, - ["lcedilla"]=316, - ["lcircle"]=9435, - ["lcircumflexbelow"]=7741, - ["lcommaaccent"]=316, - ["ldot"]=320, - ["ldotaccent"]=320, - ["ldotbelow"]=7735, - ["ldotbelowmacron"]=7737, - ["leftangleabovecmb"]=794, - ["lefttackbelowcmb"]=792, - ["less"]=60, - ["lessequal"]=8804, - ["lessequalorgreater"]=8922, - ["lessmonospace"]=65308, - ["lessorequivalent"]=8818, - ["lessorgreater"]=8822, - ["lessoverequal"]=8806, - ["lesssmall"]=65124, - ["lezh"]=622, - ["lfblock"]=9612, - ["lhookretroflex"]=621, - ["lira"]=8356, - ["liwnarmenian"]=1388, - ["lj"]=457, - ["ljecyrillic"]=1113, - ["lladeva"]=2355, - ["llagujarati"]=2739, - ["llinebelow"]=7739, - ["llladeva"]=2356, - ["llvocalicbengali"]=2529, - ["llvocalicdeva"]=2401, - ["llvocalicvowelsignbengali"]=2531, - ["llvocalicvowelsigndeva"]=2403, - ["lmiddletilde"]=619, - ["lmonospace"]=65356, - ["lmsquare"]=13264, - ["lochulathai"]=3628, - ["logicaland"]=8743, - ["logicalnot"]=172, - ["logicalnotreversed"]=8976, - ["logicalor"]=8744, - ["lolingthai"]=3621, - ["longs"]=383, - ["lowlinecenterline"]=65102, - ["lowlinecmb"]=818, - ["lowlinedashed"]=65101, - ["lozenge"]=9674, - ["lparen"]=9383, - ["lslash"]=322, - ["lsquare"]=8467, - ["ltshade"]=9617, - ["luthai"]=3622, - ["lvocalicbengali"]=2444, - ["lvocalicdeva"]=2316, - ["lvocalicvowelsignbengali"]=2530, - ["lvocalicvowelsigndeva"]=2402, - ["lxsquare"]=13267, - ["m"]=109, - ["mabengali"]=2478, - ["macron"]=175, - ["macronbelowcmb"]=817, - ["macroncmb"]=772, - ["macronlowmod"]=717, - ["macronmonospace"]=65507, - ["macute"]=7743, - ["madeva"]=2350, - ["magujarati"]=2734, - ["magurmukhi"]=2606, - ["mahapakhhebrew"]=1444, - ["mahapakhlefthebrew"]=1444, - ["mahiragana"]=12414, - ["maichattawathai"]=3659, - ["maiekthai"]=3656, - ["maihanakatthai"]=3633, - ["maitaikhuthai"]=3655, - ["maithothai"]=3657, - ["maitrithai"]=3658, - ["maiyamokthai"]=3654, - ["makatakana"]=12510, - ["makatakanahalfwidth"]=65423, - ["male"]=9794, - ["mansyonsquare"]=13127, - ["maqafhebrew"]=1470, - ["mars"]=9794, - ["masoracirclehebrew"]=1455, - ["masquare"]=13187, - ["mbopomofo"]=12551, - ["mbsquare"]=13268, - ["mcircle"]=9436, - ["mcubedsquare"]=13221, - ["mdotaccent"]=7745, - ["mdotbelow"]=7747, - ["meemarabic"]=1605, - ["meemfinalarabic"]=65250, - ["meeminitialarabic"]=65251, - ["meemmedialarabic"]=65252, - ["meemmeeminitialarabic"]=64721, - ["meemmeemisolatedarabic"]=64584, - ["meetorusquare"]=13133, - ["mehiragana"]=12417, - ["meizierasquare"]=13182, - ["mekatakana"]=12513, - ["mekatakanahalfwidth"]=65426, - ["mem"]=1502, - ["memdagesh"]=64318, - ["memdageshhebrew"]=64318, - ["memhebrew"]=1502, - ["menarmenian"]=1396, - ["merkhahebrew"]=1445, - ["merkhakefulahebrew"]=1446, - ["merkhakefulalefthebrew"]=1446, - ["merkhalefthebrew"]=1445, - ["mhook"]=625, - ["mhzsquare"]=13202, - ["middledotkatakanahalfwidth"]=65381, - ["middot"]=183, - ["mieumacirclekorean"]=12914, - ["mieumaparenkorean"]=12818, - ["mieumcirclekorean"]=12900, - ["mieumkorean"]=12609, - ["mieumpansioskorean"]=12656, - ["mieumparenkorean"]=12804, - ["mieumpieupkorean"]=12654, - ["mieumsioskorean"]=12655, - ["mihiragana"]=12415, - ["mikatakana"]=12511, - ["mikatakanahalfwidth"]=65424, - ["minus"]=8722, - ["minusbelowcmb"]=800, - ["minuscircle"]=8854, - ["minusmod"]=727, - ["minusplus"]=8723, - ["minute"]=8242, - ["miribaarusquare"]=13130, - ["mirisquare"]=13129, - ["mlonglegturned"]=624, - ["mlsquare"]=13206, - ["mmcubedsquare"]=13219, - ["mmonospace"]=65357, - ["mmsquaredsquare"]=13215, - ["mohiragana"]=12418, - ["mohmsquare"]=13249, - ["mokatakana"]=12514, - ["mokatakanahalfwidth"]=65427, - ["molsquare"]=13270, - ["momathai"]=3617, - ["moverssquare"]=13223, - ["moverssquaredsquare"]=13224, - ["mparen"]=9384, - ["mpasquare"]=13227, - ["mssquare"]=13235, - ["mturned"]=623, - ["mu"]=181, - ["mu1"]=181, - ["muasquare"]=13186, - ["muchgreater"]=8811, - ["muchless"]=8810, - ["mufsquare"]=13196, - ["mugreek"]=956, - ["mugsquare"]=13197, - ["muhiragana"]=12416, - ["mukatakana"]=12512, - ["mukatakanahalfwidth"]=65425, - ["mulsquare"]=13205, - ["multiply"]=215, - ["mumsquare"]=13211, - ["munahhebrew"]=1443, - ["munahlefthebrew"]=1443, - ["musicalnote"]=9834, - ["musicalnotedbl"]=9835, - ["musicflatsign"]=9837, - ["musicsharpsign"]=9839, - ["mussquare"]=13234, - ["muvsquare"]=13238, - ["muwsquare"]=13244, - ["mvmegasquare"]=13241, - ["mvsquare"]=13239, - ["mwmegasquare"]=13247, - ["mwsquare"]=13245, - ["n"]=110, - ["nabengali"]=2472, - ["nabla"]=8711, - ["nacute"]=324, - ["nadeva"]=2344, - ["nagujarati"]=2728, - ["nagurmukhi"]=2600, - ["nahiragana"]=12394, - ["nakatakana"]=12490, - ["nakatakanahalfwidth"]=65413, - ["napostrophe"]=329, - ["nasquare"]=13185, - ["nbopomofo"]=12555, - ["nbspace"]=160, - ["ncaron"]=328, - ["ncedilla"]=326, - ["ncircle"]=9437, - ["ncircumflexbelow"]=7755, - ["ncommaaccent"]=326, - ["ndotaccent"]=7749, - ["ndotbelow"]=7751, - ["nehiragana"]=12397, - ["nekatakana"]=12493, - ["nekatakanahalfwidth"]=65416, - ["newsheqelsign"]=8362, - ["nfsquare"]=13195, - ["ngabengali"]=2457, - ["ngadeva"]=2329, - ["ngagujarati"]=2713, - ["ngagurmukhi"]=2585, - ["ngonguthai"]=3591, - ["nhiragana"]=12435, - ["nhookleft"]=626, - ["nhookretroflex"]=627, - ["nieunacirclekorean"]=12911, - ["nieunaparenkorean"]=12815, - ["nieuncieuckorean"]=12597, - ["nieuncirclekorean"]=12897, - ["nieunhieuhkorean"]=12598, - ["nieunkorean"]=12596, - ["nieunpansioskorean"]=12648, - ["nieunparenkorean"]=12801, - ["nieunsioskorean"]=12647, - ["nieuntikeutkorean"]=12646, - ["nihiragana"]=12395, - ["nikatakana"]=12491, - ["nikatakanahalfwidth"]=65414, - ["nikhahitthai"]=3661, - ["nine"]=57, - ["ninearabic"]=1641, - ["ninebengali"]=2543, - ["ninecircle"]=9320, - ["ninecircleinversesansserif"]=10130, - ["ninedeva"]=2415, - ["ninegujarati"]=2799, - ["ninegurmukhi"]=2671, - ["ninehackarabic"]=1641, - ["ninehangzhou"]=12329, - ["nineideographicparen"]=12840, - ["nineinferior"]=8329, - ["ninemonospace"]=65305, - ["nineparen"]=9340, - ["nineperiod"]=9360, - ["ninepersian"]=1785, - ["nineroman"]=8568, - ["ninesuperior"]=8313, - ["nineteencircle"]=9330, - ["nineteenparen"]=9350, - ["nineteenperiod"]=9370, - ["ninethai"]=3673, - ["nj"]=460, - ["njecyrillic"]=1114, - ["nkatakana"]=12531, - ["nkatakanahalfwidth"]=65437, - ["nlegrightlong"]=414, - ["nlinebelow"]=7753, - ["nmonospace"]=65358, - ["nmsquare"]=13210, - ["nnabengali"]=2467, - ["nnadeva"]=2339, - ["nnagujarati"]=2723, - ["nnagurmukhi"]=2595, - ["nnnadeva"]=2345, - ["nohiragana"]=12398, - ["nokatakana"]=12494, - ["nokatakanahalfwidth"]=65417, - ["nonbreakingspace"]=160, - ["nonenthai"]=3603, - ["nonuthai"]=3609, - ["noonarabic"]=1606, - ["noonfinalarabic"]=65254, - ["noonghunnaarabic"]=1722, - ["noonghunnafinalarabic"]=64415, - ["noonhehinitialarabic"]=65255, - ["nooninitialarabic"]=65255, - ["noonjeeminitialarabic"]=64722, - ["noonjeemisolatedarabic"]=64587, - ["noonmedialarabic"]=65256, - ["noonmeeminitialarabic"]=64725, - ["noonmeemisolatedarabic"]=64590, - ["noonnoonfinalarabic"]=64653, - ["notcontains"]=8716, - ["notelement"]=8713, - ["notelementof"]=8713, - ["notequal"]=8800, - ["notgreater"]=8815, - ["notgreaternorequal"]=8817, - ["notgreaternorless"]=8825, - ["notidentical"]=8802, - ["notless"]=8814, - ["notlessnorequal"]=8816, - ["notparallel"]=8742, - ["notprecedes"]=8832, - ["notsubset"]=8836, - ["notsucceeds"]=8833, - ["notsuperset"]=8837, - ["nowarmenian"]=1398, - ["nparen"]=9385, - ["nssquare"]=13233, - ["nsuperior"]=8319, - ["ntilde"]=241, - ["nu"]=957, - ["nuhiragana"]=12396, - ["nukatakana"]=12492, - ["nukatakanahalfwidth"]=65415, - ["nuktabengali"]=2492, - ["nuktadeva"]=2364, - ["nuktagujarati"]=2748, - ["nuktagurmukhi"]=2620, - ["numbersign"]=35, - ["numbersignmonospace"]=65283, - ["numbersignsmall"]=65119, - ["numeralsigngreek"]=884, - ["numeralsignlowergreek"]=885, - ["numero"]=8470, - ["nun"]=1504, - ["nundagesh"]=64320, - ["nundageshhebrew"]=64320, - ["nunhebrew"]=1504, - ["nvsquare"]=13237, - ["nwsquare"]=13243, - ["nyabengali"]=2462, - ["nyadeva"]=2334, - ["nyagujarati"]=2718, - ["nyagurmukhi"]=2590, - ["o"]=111, - ["oacute"]=243, - ["oangthai"]=3629, - ["obarred"]=629, - ["obarredcyrillic"]=1257, - ["obarreddieresiscyrillic"]=1259, - ["obengali"]=2451, - ["obopomofo"]=12571, - ["obreve"]=335, - ["ocandradeva"]=2321, - ["ocandragujarati"]=2705, - ["ocandravowelsigndeva"]=2377, - ["ocandravowelsigngujarati"]=2761, - ["ocaron"]=466, - ["ocircle"]=9438, - ["ocircumflex"]=244, - ["ocircumflexacute"]=7889, - ["ocircumflexdotbelow"]=7897, - ["ocircumflexgrave"]=7891, - ["ocircumflexhookabove"]=7893, - ["ocircumflextilde"]=7895, - ["ocyrillic"]=1086, - ["odblacute"]=337, - ["odblgrave"]=525, - ["odeva"]=2323, - ["odieresis"]=246, - ["odieresiscyrillic"]=1255, - ["odotbelow"]=7885, - ["oe"]=339, - ["oekorean"]=12634, - ["ogonek"]=731, - ["ogonekcmb"]=808, - ["ograve"]=242, - ["ogujarati"]=2707, - ["oharmenian"]=1413, - ["ohiragana"]=12362, - ["ohookabove"]=7887, - ["ohorn"]=417, - ["ohornacute"]=7899, - ["ohorndotbelow"]=7907, - ["ohorngrave"]=7901, - ["ohornhookabove"]=7903, - ["ohorntilde"]=7905, - ["ohungarumlaut"]=337, - ["oi"]=419, - ["oinvertedbreve"]=527, - ["okatakana"]=12458, - ["okatakanahalfwidth"]=65397, - ["okorean"]=12631, - ["olehebrew"]=1451, - ["omacron"]=333, - ["omacronacute"]=7763, - ["omacrongrave"]=7761, - ["omdeva"]=2384, - ["omega"]=969, - ["omega1"]=982, - ["omegacyrillic"]=1121, - ["omegalatinclosed"]=631, - ["omegaroundcyrillic"]=1147, - ["omegatitlocyrillic"]=1149, - ["omegatonos"]=974, - ["omgujarati"]=2768, - ["omicron"]=959, - ["omicrontonos"]=972, - ["omonospace"]=65359, - ["one"]=49, - ["onearabic"]=1633, - ["onebengali"]=2535, - ["onecircle"]=9312, - ["onecircleinversesansserif"]=10122, - ["onedeva"]=2407, - ["onedotenleader"]=8228, - ["oneeighth"]=8539, - ["onegujarati"]=2791, - ["onegurmukhi"]=2663, - ["onehackarabic"]=1633, - ["onehalf"]=189, - ["onehangzhou"]=12321, - ["oneideographicparen"]=12832, - ["oneinferior"]=8321, - ["onemonospace"]=65297, - ["onenumeratorbengali"]=2548, - ["oneparen"]=9332, - ["oneperiod"]=9352, - ["onepersian"]=1777, - ["onequarter"]=188, - ["oneroman"]=8560, - ["onesuperior"]=185, - ["onethai"]=3665, - ["onethird"]=8531, - ["oogonek"]=491, - ["oogonekmacron"]=493, - ["oogurmukhi"]=2579, - ["oomatragurmukhi"]=2635, - ["oopen"]=596, - ["oparen"]=9386, - ["openbullet"]=9702, - ["option"]=8997, - ["ordfeminine"]=170, - ["ordmasculine"]=186, - ["orthogonal"]=8735, - ["oshortdeva"]=2322, - ["oshortvowelsigndeva"]=2378, - ["oslash"]=248, - ["oslashacute"]=511, - ["osmallhiragana"]=12361, - ["osmallkatakana"]=12457, - ["osmallkatakanahalfwidth"]=65387, - ["ostrokeacute"]=511, - ["otcyrillic"]=1151, - ["otilde"]=245, - ["otildeacute"]=7757, - ["otildedieresis"]=7759, - ["oubopomofo"]=12577, - ["overline"]=8254, - ["overlinecenterline"]=65098, - ["overlinecmb"]=773, - ["overlinedashed"]=65097, - ["overlinedblwavy"]=65100, - ["overlinewavy"]=65099, - ["overscore"]=175, - ["ovowelsignbengali"]=2507, - ["ovowelsigndeva"]=2379, - ["ovowelsigngujarati"]=2763, - ["p"]=112, - ["paampssquare"]=13184, - ["paasentosquare"]=13099, - ["pabengali"]=2474, - ["pacute"]=7765, - ["padeva"]=2346, - ["pagedown"]=8671, - ["pageup"]=8670, - ["pagujarati"]=2730, - ["pagurmukhi"]=2602, - ["pahiragana"]=12401, - ["paiyannoithai"]=3631, - ["pakatakana"]=12497, - ["palatalizationcyrilliccmb"]=1156, - ["palochkacyrillic"]=1216, - ["pansioskorean"]=12671, - ["paragraph"]=182, - ["parallel"]=8741, - ["parenleft"]=40, - ["parenleftaltonearabic"]=64830, - ["parenleftinferior"]=8333, - ["parenleftmonospace"]=65288, - ["parenleftsmall"]=65113, - ["parenleftsuperior"]=8317, - ["parenleftvertical"]=65077, - ["parenright"]=41, - ["parenrightaltonearabic"]=64831, - ["parenrightinferior"]=8334, - ["parenrightmonospace"]=65289, - ["parenrightsmall"]=65114, - ["parenrightsuperior"]=8318, - ["parenrightvertical"]=65078, - ["partialdiff"]=8706, - ["paseqhebrew"]=1472, - ["pashtahebrew"]=1433, - ["pasquare"]=13225, - ["patah"]=1463, - ["patah11"]=1463, - ["patah1d"]=1463, - ["patah2a"]=1463, - ["patahhebrew"]=1463, - ["patahnarrowhebrew"]=1463, - ["patahquarterhebrew"]=1463, - ["patahwidehebrew"]=1463, - ["pazerhebrew"]=1441, - ["pbopomofo"]=12550, - ["pcircle"]=9439, - ["pdotaccent"]=7767, - ["pe"]=1508, - ["pecyrillic"]=1087, - ["pedagesh"]=64324, - ["pedageshhebrew"]=64324, - ["peezisquare"]=13115, - ["pefinaldageshhebrew"]=64323, - ["peharabic"]=1662, - ["peharmenian"]=1402, - ["pehebrew"]=1508, - ["pehfinalarabic"]=64343, - ["pehinitialarabic"]=64344, - ["pehiragana"]=12410, - ["pehmedialarabic"]=64345, - ["pekatakana"]=12506, - ["pemiddlehookcyrillic"]=1191, - ["perafehebrew"]=64334, - ["percent"]=37, - ["percentarabic"]=1642, - ["percentmonospace"]=65285, - ["percentsmall"]=65130, - ["period"]=46, - ["periodarmenian"]=1417, - ["periodcentered"]=183, - ["periodhalfwidth"]=65377, - ["periodmonospace"]=65294, - ["periodsmall"]=65106, - ["perispomenigreekcmb"]=834, - ["perpendicular"]=8869, - ["perthousand"]=8240, - ["peseta"]=8359, - ["pfsquare"]=13194, - ["phabengali"]=2475, - ["phadeva"]=2347, - ["phagujarati"]=2731, - ["phagurmukhi"]=2603, - ["phi"]=966, - ["phi1"]=981, - ["phieuphacirclekorean"]=12922, - ["phieuphaparenkorean"]=12826, - ["phieuphcirclekorean"]=12908, - ["phieuphkorean"]=12621, - ["phieuphparenkorean"]=12812, - ["philatin"]=632, - ["phinthuthai"]=3642, - ["phisymbolgreek"]=981, - ["phook"]=421, - ["phophanthai"]=3614, - ["phophungthai"]=3612, - ["phosamphaothai"]=3616, - ["pi"]=960, - ["pieupacirclekorean"]=12915, - ["pieupaparenkorean"]=12819, - ["pieupcieuckorean"]=12662, - ["pieupcirclekorean"]=12901, - ["pieupkiyeokkorean"]=12658, - ["pieupkorean"]=12610, - ["pieupparenkorean"]=12805, - ["pieupsioskiyeokkorean"]=12660, - ["pieupsioskorean"]=12612, - ["pieupsiostikeutkorean"]=12661, - ["pieupthieuthkorean"]=12663, - ["pieuptikeutkorean"]=12659, - ["pihiragana"]=12404, - ["pikatakana"]=12500, - ["pisymbolgreek"]=982, - ["piwrarmenian"]=1411, - ["plus"]=43, - ["plusbelowcmb"]=799, - ["pluscircle"]=8853, - ["plusminus"]=177, - ["plusmod"]=726, - ["plusmonospace"]=65291, - ["plussmall"]=65122, - ["plussuperior"]=8314, - ["pmonospace"]=65360, - ["pmsquare"]=13272, - ["pohiragana"]=12413, - ["pointingindexdownwhite"]=9759, - ["pointingindexleftwhite"]=9756, - ["pointingindexrightwhite"]=9758, - ["pointingindexupwhite"]=9757, - ["pokatakana"]=12509, - ["poplathai"]=3611, - ["postalmark"]=12306, - ["postalmarkface"]=12320, - ["pparen"]=9387, - ["precedes"]=8826, - ["prescription"]=8478, - ["primemod"]=697, - ["primereversed"]=8245, - ["product"]=8719, - ["projective"]=8965, - ["prolongedkana"]=12540, - ["propellor"]=8984, - ["propersubset"]=8834, - ["propersuperset"]=8835, - ["proportion"]=8759, - ["proportional"]=8733, - ["psi"]=968, - ["psicyrillic"]=1137, - ["psilipneumatacyrilliccmb"]=1158, - ["pssquare"]=13232, - ["puhiragana"]=12407, - ["pukatakana"]=12503, - ["pvsquare"]=13236, - ["pwsquare"]=13242, - ["q"]=113, - ["qadeva"]=2392, - ["qadmahebrew"]=1448, - ["qafarabic"]=1602, - ["qaffinalarabic"]=65238, - ["qafinitialarabic"]=65239, - ["qafmedialarabic"]=65240, - ["qamats"]=1464, - ["qamats10"]=1464, - ["qamats1a"]=1464, - ["qamats1c"]=1464, - ["qamats27"]=1464, - ["qamats29"]=1464, - ["qamats33"]=1464, - ["qamatsde"]=1464, - ["qamatshebrew"]=1464, - ["qamatsnarrowhebrew"]=1464, - ["qamatsqatanhebrew"]=1464, - ["qamatsqatannarrowhebrew"]=1464, - ["qamatsqatanquarterhebrew"]=1464, - ["qamatsqatanwidehebrew"]=1464, - ["qamatsquarterhebrew"]=1464, - ["qamatswidehebrew"]=1464, - ["qarneyparahebrew"]=1439, - ["qbopomofo"]=12561, - ["qcircle"]=9440, - ["qhook"]=672, - ["qmonospace"]=65361, - ["qof"]=1511, - ["qofdagesh"]=64327, - ["qofdageshhebrew"]=64327, - ["qofhatafpatah"]=1511, - ["qofhatafpatahhebrew"]=1511, - ["qofhatafsegol"]=1511, - ["qofhatafsegolhebrew"]=1511, - ["qofhebrew"]=1511, - ["qofhiriq"]=1511, - ["qofhiriqhebrew"]=1511, - ["qofholam"]=1511, - ["qofholamhebrew"]=1511, - ["qofpatah"]=1511, - ["qofpatahhebrew"]=1511, - ["qofqamats"]=1511, - ["qofqamatshebrew"]=1511, - ["qofqubuts"]=1511, - ["qofqubutshebrew"]=1511, - ["qofsegol"]=1511, - ["qofsegolhebrew"]=1511, - ["qofsheva"]=1511, - ["qofshevahebrew"]=1511, - ["qoftsere"]=1511, - ["qoftserehebrew"]=1511, - ["qparen"]=9388, - ["quarternote"]=9833, - ["qubuts"]=1467, - ["qubuts18"]=1467, - ["qubuts25"]=1467, - ["qubuts31"]=1467, - ["qubutshebrew"]=1467, - ["qubutsnarrowhebrew"]=1467, - ["qubutsquarterhebrew"]=1467, - ["qubutswidehebrew"]=1467, - ["question"]=63, - ["questionarabic"]=1567, - ["questionarmenian"]=1374, - ["questiondown"]=191, - ["questiongreek"]=894, - ["questionmonospace"]=65311, - ["quotedbl"]=34, - ["quotedblbase"]=8222, - ["quotedblleft"]=8220, - ["quotedblmonospace"]=65282, - ["quotedblprime"]=12318, - ["quotedblprimereversed"]=12317, - ["quotedblright"]=8221, - ["quoteleft"]=8216, - ["quoteleftreversed"]=8219, - ["quotereversed"]=8219, - ["quoteright"]=8217, - ["quoterightn"]=329, - ["quotesinglbase"]=8218, - ["quotesingle"]=39, - ["quotesinglemonospace"]=65287, - ["r"]=114, - ["raarmenian"]=1404, - ["rabengali"]=2480, - ["racute"]=341, - ["radeva"]=2352, - ["radical"]=8730, - ["radoverssquare"]=13230, - ["radoverssquaredsquare"]=13231, - ["radsquare"]=13229, - ["rafe"]=1471, - ["rafehebrew"]=1471, - ["ragujarati"]=2736, - ["ragurmukhi"]=2608, - ["rahiragana"]=12425, - ["rakatakana"]=12521, - ["rakatakanahalfwidth"]=65431, - ["ralowerdiagonalbengali"]=2545, - ["ramiddlediagonalbengali"]=2544, - ["ramshorn"]=612, - ["ratio"]=8758, - ["rbopomofo"]=12566, - ["rcaron"]=345, - ["rcedilla"]=343, - ["rcircle"]=9441, - ["rcommaaccent"]=343, - ["rdblgrave"]=529, - ["rdotaccent"]=7769, - ["rdotbelow"]=7771, - ["rdotbelowmacron"]=7773, - ["referencemark"]=8251, - ["reflexsubset"]=8838, - ["reflexsuperset"]=8839, - ["registered"]=174, - ["reharabic"]=1585, - ["reharmenian"]=1408, - ["rehfinalarabic"]=65198, - ["rehiragana"]=12428, - ["rehyehaleflamarabic"]=1585, - ["rekatakana"]=12524, - ["rekatakanahalfwidth"]=65434, - ["resh"]=1512, - ["reshdageshhebrew"]=64328, - ["reshhatafpatah"]=1512, - ["reshhatafpatahhebrew"]=1512, - ["reshhatafsegol"]=1512, - ["reshhatafsegolhebrew"]=1512, - ["reshhebrew"]=1512, - ["reshhiriq"]=1512, - ["reshhiriqhebrew"]=1512, - ["reshholam"]=1512, - ["reshholamhebrew"]=1512, - ["reshpatah"]=1512, - ["reshpatahhebrew"]=1512, - ["reshqamats"]=1512, - ["reshqamatshebrew"]=1512, - ["reshqubuts"]=1512, - ["reshqubutshebrew"]=1512, - ["reshsegol"]=1512, - ["reshsegolhebrew"]=1512, - ["reshsheva"]=1512, - ["reshshevahebrew"]=1512, - ["reshtsere"]=1512, - ["reshtserehebrew"]=1512, - ["reversedtilde"]=8765, - ["reviahebrew"]=1431, - ["reviamugrashhebrew"]=1431, - ["revlogicalnot"]=8976, - ["rfishhook"]=638, - ["rfishhookreversed"]=639, - ["rhabengali"]=2525, - ["rhadeva"]=2397, - ["rho"]=961, - ["rhook"]=637, - ["rhookturned"]=635, - ["rhookturnedsuperior"]=693, - ["rhosymbolgreek"]=1009, - ["rhotichookmod"]=734, - ["rieulacirclekorean"]=12913, - ["rieulaparenkorean"]=12817, - ["rieulcirclekorean"]=12899, - ["rieulhieuhkorean"]=12608, - ["rieulkiyeokkorean"]=12602, - ["rieulkiyeoksioskorean"]=12649, - ["rieulkorean"]=12601, - ["rieulmieumkorean"]=12603, - ["rieulpansioskorean"]=12652, - ["rieulparenkorean"]=12803, - ["rieulphieuphkorean"]=12607, - ["rieulpieupkorean"]=12604, - ["rieulpieupsioskorean"]=12651, - ["rieulsioskorean"]=12605, - ["rieulthieuthkorean"]=12606, - ["rieultikeutkorean"]=12650, - ["rieulyeorinhieuhkorean"]=12653, - ["rightangle"]=8735, - ["righttackbelowcmb"]=793, - ["righttriangle"]=8895, - ["rihiragana"]=12426, - ["rikatakana"]=12522, - ["rikatakanahalfwidth"]=65432, - ["ring"]=730, - ["ringbelowcmb"]=805, - ["ringcmb"]=778, - ["ringhalfleft"]=703, - ["ringhalfleftarmenian"]=1369, - ["ringhalfleftbelowcmb"]=796, - ["ringhalfleftcentered"]=723, - ["ringhalfright"]=702, - ["ringhalfrightbelowcmb"]=825, - ["ringhalfrightcentered"]=722, - ["rinvertedbreve"]=531, - ["rittorusquare"]=13137, - ["rlinebelow"]=7775, - ["rlongleg"]=636, - ["rlonglegturned"]=634, - ["rmonospace"]=65362, - ["rohiragana"]=12429, - ["rokatakana"]=12525, - ["rokatakanahalfwidth"]=65435, - ["roruathai"]=3619, - ["rparen"]=9389, - ["rrabengali"]=2524, - ["rradeva"]=2353, - ["rragurmukhi"]=2652, - ["rreharabic"]=1681, - ["rrehfinalarabic"]=64397, - ["rrvocalicbengali"]=2528, - ["rrvocalicdeva"]=2400, - ["rrvocalicgujarati"]=2784, - ["rrvocalicvowelsignbengali"]=2500, - ["rrvocalicvowelsigndeva"]=2372, - ["rrvocalicvowelsigngujarati"]=2756, - ["rtblock"]=9616, - ["rturned"]=633, - ["rturnedsuperior"]=692, - ["ruhiragana"]=12427, - ["rukatakana"]=12523, - ["rukatakanahalfwidth"]=65433, - ["rupeemarkbengali"]=2546, - ["rupeesignbengali"]=2547, - ["ruthai"]=3620, - ["rvocalicbengali"]=2443, - ["rvocalicdeva"]=2315, - ["rvocalicgujarati"]=2699, - ["rvocalicvowelsignbengali"]=2499, - ["rvocalicvowelsigndeva"]=2371, - ["rvocalicvowelsigngujarati"]=2755, - ["s"]=115, - ["sabengali"]=2488, - ["sacute"]=347, - ["sacutedotaccent"]=7781, - ["sadarabic"]=1589, - ["sadeva"]=2360, - ["sadfinalarabic"]=65210, - ["sadinitialarabic"]=65211, - ["sadmedialarabic"]=65212, - ["sagujarati"]=2744, - ["sagurmukhi"]=2616, - ["sahiragana"]=12373, - ["sakatakana"]=12469, - ["sakatakanahalfwidth"]=65403, - ["sallallahoualayhewasallamarabic"]=65018, - ["samekh"]=1505, - ["samekhdagesh"]=64321, - ["samekhdageshhebrew"]=64321, - ["samekhhebrew"]=1505, - ["saraaathai"]=3634, - ["saraaethai"]=3649, - ["saraaimaimalaithai"]=3652, - ["saraaimaimuanthai"]=3651, - ["saraamthai"]=3635, - ["saraathai"]=3632, - ["saraethai"]=3648, - ["saraiithai"]=3637, - ["saraithai"]=3636, - ["saraothai"]=3650, - ["saraueethai"]=3639, - ["sarauethai"]=3638, - ["sarauthai"]=3640, - ["sarauuthai"]=3641, - ["sbopomofo"]=12569, - ["scaron"]=353, - ["scarondotaccent"]=7783, - ["scedilla"]=351, - ["schwa"]=601, - ["schwacyrillic"]=1241, - ["schwadieresiscyrillic"]=1243, - ["schwahook"]=602, - ["scircle"]=9442, - ["scircumflex"]=349, - ["scommaaccent"]=537, - ["sdotaccent"]=7777, - ["sdotbelow"]=7779, - ["sdotbelowdotaccent"]=7785, - ["seagullbelowcmb"]=828, - ["second"]=8243, - ["secondtonechinese"]=714, - ["section"]=167, - ["seenarabic"]=1587, - ["seenfinalarabic"]=65202, - ["seeninitialarabic"]=65203, - ["seenmedialarabic"]=65204, - ["segol"]=1462, - ["segol13"]=1462, - ["segol1f"]=1462, - ["segol2c"]=1462, - ["segolhebrew"]=1462, - ["segolnarrowhebrew"]=1462, - ["segolquarterhebrew"]=1462, - ["segoltahebrew"]=1426, - ["segolwidehebrew"]=1462, - ["seharmenian"]=1405, - ["sehiragana"]=12379, - ["sekatakana"]=12475, - ["sekatakanahalfwidth"]=65406, - ["semicolon"]=59, - ["semicolonarabic"]=1563, - ["semicolonmonospace"]=65307, - ["semicolonsmall"]=65108, - ["semivoicedmarkkana"]=12444, - ["semivoicedmarkkanahalfwidth"]=65439, - ["sentisquare"]=13090, - ["sentosquare"]=13091, - ["seven"]=55, - ["sevenarabic"]=1639, - ["sevenbengali"]=2541, - ["sevencircle"]=9318, - ["sevencircleinversesansserif"]=10128, - ["sevendeva"]=2413, - ["seveneighths"]=8542, - ["sevengujarati"]=2797, - ["sevengurmukhi"]=2669, - ["sevenhackarabic"]=1639, - ["sevenhangzhou"]=12327, - ["sevenideographicparen"]=12838, - ["seveninferior"]=8327, - ["sevenmonospace"]=65303, - ["sevenparen"]=9338, - ["sevenperiod"]=9358, - ["sevenpersian"]=1783, - ["sevenroman"]=8566, - ["sevensuperior"]=8311, - ["seventeencircle"]=9328, - ["seventeenparen"]=9348, - ["seventeenperiod"]=9368, - ["seventhai"]=3671, - ["sfthyphen"]=173, - ["shaarmenian"]=1399, - ["shabengali"]=2486, - ["shacyrillic"]=1096, - ["shaddaarabic"]=1617, - ["shaddadammaarabic"]=64609, - ["shaddadammatanarabic"]=64606, - ["shaddafathaarabic"]=64608, - ["shaddafathatanarabic"]=1617, - ["shaddakasraarabic"]=64610, - ["shaddakasratanarabic"]=64607, - ["shade"]=9618, - ["shadedark"]=9619, - ["shadelight"]=9617, - ["shademedium"]=9618, - ["shadeva"]=2358, - ["shagujarati"]=2742, - ["shagurmukhi"]=2614, - ["shalshelethebrew"]=1427, - ["shbopomofo"]=12565, - ["shchacyrillic"]=1097, - ["sheenarabic"]=1588, - ["sheenfinalarabic"]=65206, - ["sheeninitialarabic"]=65207, - ["sheenmedialarabic"]=65208, - ["sheicoptic"]=995, - ["sheqel"]=8362, - ["sheqelhebrew"]=8362, - ["sheva"]=1456, - ["sheva115"]=1456, - ["sheva15"]=1456, - ["sheva22"]=1456, - ["sheva2e"]=1456, - ["shevahebrew"]=1456, - ["shevanarrowhebrew"]=1456, - ["shevaquarterhebrew"]=1456, - ["shevawidehebrew"]=1456, - ["shhacyrillic"]=1211, - ["shimacoptic"]=1005, - ["shin"]=1513, - ["shindagesh"]=64329, - ["shindageshhebrew"]=64329, - ["shindageshshindot"]=64300, - ["shindageshshindothebrew"]=64300, - ["shindageshsindot"]=64301, - ["shindageshsindothebrew"]=64301, - ["shindothebrew"]=1473, - ["shinhebrew"]=1513, - ["shinshindot"]=64298, - ["shinshindothebrew"]=64298, - ["shinsindot"]=64299, - ["shinsindothebrew"]=64299, - ["shook"]=642, - ["sigma"]=963, - ["sigma1"]=962, - ["sigmafinal"]=962, - ["sigmalunatesymbolgreek"]=1010, - ["sihiragana"]=12375, - ["sikatakana"]=12471, - ["sikatakanahalfwidth"]=65404, - ["siluqhebrew"]=1469, - ["siluqlefthebrew"]=1469, - ["similar"]=8764, - ["sindothebrew"]=1474, - ["siosacirclekorean"]=12916, - ["siosaparenkorean"]=12820, - ["sioscieuckorean"]=12670, - ["sioscirclekorean"]=12902, - ["sioskiyeokkorean"]=12666, - ["sioskorean"]=12613, - ["siosnieunkorean"]=12667, - ["siosparenkorean"]=12806, - ["siospieupkorean"]=12669, - ["siostikeutkorean"]=12668, - ["six"]=54, - ["sixarabic"]=1638, - ["sixbengali"]=2540, - ["sixcircle"]=9317, - ["sixcircleinversesansserif"]=10127, - ["sixdeva"]=2412, - ["sixgujarati"]=2796, - ["sixgurmukhi"]=2668, - ["sixhackarabic"]=1638, - ["sixhangzhou"]=12326, - ["sixideographicparen"]=12837, - ["sixinferior"]=8326, - ["sixmonospace"]=65302, - ["sixparen"]=9337, - ["sixperiod"]=9357, - ["sixpersian"]=1782, - ["sixroman"]=8565, - ["sixsuperior"]=8310, - ["sixteencircle"]=9327, - ["sixteencurrencydenominatorbengali"]=2553, - ["sixteenparen"]=9347, - ["sixteenperiod"]=9367, - ["sixthai"]=3670, - ["slash"]=47, - ["slashmonospace"]=65295, - ["slong"]=383, - ["slongdotaccent"]=7835, - ["smileface"]=9786, - ["smonospace"]=65363, - ["sofpasuqhebrew"]=1475, - ["softhyphen"]=173, - ["softsigncyrillic"]=1100, - ["sohiragana"]=12381, - ["sokatakana"]=12477, - ["sokatakanahalfwidth"]=65407, - ["soliduslongoverlaycmb"]=824, - ["solidusshortoverlaycmb"]=823, - ["sorusithai"]=3625, - ["sosalathai"]=3624, - ["sosothai"]=3595, - ["sosuathai"]=3626, - ["space"]=32, - ["spacehackarabic"]=32, - ["spade"]=9824, - ["spadesuitblack"]=9824, - ["spadesuitwhite"]=9828, - ["sparen"]=9390, - ["squarebelowcmb"]=827, - ["squarecc"]=13252, - ["squarecm"]=13213, - ["squarediagonalcrosshatchfill"]=9641, - ["squarehorizontalfill"]=9636, - ["squarekg"]=13199, - ["squarekm"]=13214, - ["squarekmcapital"]=13262, - ["squareln"]=13265, - ["squarelog"]=13266, - ["squaremg"]=13198, - ["squaremil"]=13269, - ["squaremm"]=13212, - ["squaremsquared"]=13217, - ["squareorthogonalcrosshatchfill"]=9638, - ["squareupperlefttolowerrightfill"]=9639, - ["squareupperrighttolowerleftfill"]=9640, - ["squareverticalfill"]=9637, - ["squarewhitewithsmallblack"]=9635, - ["srsquare"]=13275, - ["ssabengali"]=2487, - ["ssadeva"]=2359, - ["ssagujarati"]=2743, - ["ssangcieuckorean"]=12617, - ["ssanghieuhkorean"]=12677, - ["ssangieungkorean"]=12672, - ["ssangkiyeokkorean"]=12594, - ["ssangnieunkorean"]=12645, - ["ssangpieupkorean"]=12611, - ["ssangsioskorean"]=12614, - ["ssangtikeutkorean"]=12600, - ["sterling"]=163, - ["sterlingmonospace"]=65505, - ["strokelongoverlaycmb"]=822, - ["strokeshortoverlaycmb"]=821, - ["subset"]=8834, - ["subsetnotequal"]=8842, - ["subsetorequal"]=8838, - ["succeeds"]=8827, - ["suchthat"]=8715, - ["suhiragana"]=12377, - ["sukatakana"]=12473, - ["sukatakanahalfwidth"]=65405, - ["sukunarabic"]=1618, - ["summation"]=8721, - ["sun"]=9788, - ["superset"]=8835, - ["supersetnotequal"]=8843, - ["supersetorequal"]=8839, - ["svsquare"]=13276, - ["syouwaerasquare"]=13180, - ["t"]=116, - ["tabengali"]=2468, - ["tackdown"]=8868, - ["tackleft"]=8867, - ["tadeva"]=2340, - ["tagujarati"]=2724, - ["tagurmukhi"]=2596, - ["taharabic"]=1591, - ["tahfinalarabic"]=65218, - ["tahinitialarabic"]=65219, - ["tahiragana"]=12383, - ["tahmedialarabic"]=65220, - ["taisyouerasquare"]=13181, - ["takatakana"]=12479, - ["takatakanahalfwidth"]=65408, - ["tatweelarabic"]=1600, - ["tau"]=964, - ["tav"]=1514, - ["tavdages"]=64330, - ["tavdagesh"]=64330, - ["tavdageshhebrew"]=64330, - ["tavhebrew"]=1514, - ["tbar"]=359, - ["tbopomofo"]=12554, - ["tcaron"]=357, - ["tccurl"]=680, - ["tcedilla"]=355, - ["tcheharabic"]=1670, - ["tchehfinalarabic"]=64379, - ["tchehinitialarabic"]=64380, - ["tchehmedialarabic"]=64381, - ["tchehmeeminitialarabic"]=64380, - ["tcircle"]=9443, - ["tcircumflexbelow"]=7793, - ["tcommaaccent"]=355, - ["tdieresis"]=7831, - ["tdotaccent"]=7787, - ["tdotbelow"]=7789, - ["tecyrillic"]=1090, - ["tedescendercyrillic"]=1197, - ["teharabic"]=1578, - ["tehfinalarabic"]=65174, - ["tehhahinitialarabic"]=64674, - ["tehhahisolatedarabic"]=64524, - ["tehinitialarabic"]=65175, - ["tehiragana"]=12390, - ["tehjeeminitialarabic"]=64673, - ["tehjeemisolatedarabic"]=64523, - ["tehmarbutaarabic"]=1577, - ["tehmarbutafinalarabic"]=65172, - ["tehmedialarabic"]=65176, - ["tehmeeminitialarabic"]=64676, - ["tehmeemisolatedarabic"]=64526, - ["tehnoonfinalarabic"]=64627, - ["tekatakana"]=12486, - ["tekatakanahalfwidth"]=65411, - ["telephone"]=8481, - ["telephoneblack"]=9742, - ["telishagedolahebrew"]=1440, - ["telishaqetanahebrew"]=1449, - ["tencircle"]=9321, - ["tenideographicparen"]=12841, - ["tenparen"]=9341, - ["tenperiod"]=9361, - ["tenroman"]=8569, - ["tesh"]=679, - ["tet"]=1496, - ["tetdagesh"]=64312, - ["tetdageshhebrew"]=64312, - ["tethebrew"]=1496, - ["tetsecyrillic"]=1205, - ["tevirhebrew"]=1435, - ["tevirlefthebrew"]=1435, - ["thabengali"]=2469, - ["thadeva"]=2341, - ["thagujarati"]=2725, - ["thagurmukhi"]=2597, - ["thalarabic"]=1584, - ["thalfinalarabic"]=65196, - ["thanthakhatthai"]=3660, - ["theharabic"]=1579, - ["thehfinalarabic"]=65178, - ["thehinitialarabic"]=65179, - ["thehmedialarabic"]=65180, - ["thereexists"]=8707, - ["therefore"]=8756, - ["theta"]=952, - ["theta1"]=977, - ["thetasymbolgreek"]=977, - ["thieuthacirclekorean"]=12921, - ["thieuthaparenkorean"]=12825, - ["thieuthcirclekorean"]=12907, - ["thieuthkorean"]=12620, - ["thieuthparenkorean"]=12811, - ["thirteencircle"]=9324, - ["thirteenparen"]=9344, - ["thirteenperiod"]=9364, - ["thonangmonthothai"]=3601, - ["thook"]=429, - ["thophuthaothai"]=3602, - ["thorn"]=254, - ["thothahanthai"]=3607, - ["thothanthai"]=3600, - ["thothongthai"]=3608, - ["thothungthai"]=3606, - ["thousandcyrillic"]=1154, - ["thousandsseparatorarabic"]=1644, - ["thousandsseparatorpersian"]=1644, - ["three"]=51, - ["threearabic"]=1635, - ["threebengali"]=2537, - ["threecircle"]=9314, - ["threecircleinversesansserif"]=10124, - ["threedeva"]=2409, - ["threeeighths"]=8540, - ["threegujarati"]=2793, - ["threegurmukhi"]=2665, - ["threehackarabic"]=1635, - ["threehangzhou"]=12323, - ["threeideographicparen"]=12834, - ["threeinferior"]=8323, - ["threemonospace"]=65299, - ["threenumeratorbengali"]=2550, - ["threeparen"]=9334, - ["threeperiod"]=9354, - ["threepersian"]=1779, - ["threequarters"]=190, - ["threeroman"]=8562, - ["threesuperior"]=179, - ["threethai"]=3667, - ["thzsquare"]=13204, - ["tihiragana"]=12385, - ["tikatakana"]=12481, - ["tikatakanahalfwidth"]=65409, - ["tikeutacirclekorean"]=12912, - ["tikeutaparenkorean"]=12816, - ["tikeutcirclekorean"]=12898, - ["tikeutkorean"]=12599, - ["tikeutparenkorean"]=12802, - ["tilde"]=732, - ["tildebelowcmb"]=816, - ["tildecmb"]=771, - ["tildecomb"]=771, - ["tildedoublecmb"]=864, - ["tildeoperator"]=8764, - ["tildeoverlaycmb"]=820, - ["tildeverticalcmb"]=830, - ["timescircle"]=8855, - ["tipehahebrew"]=1430, - ["tipehalefthebrew"]=1430, - ["tippigurmukhi"]=2672, - ["titlocyrilliccmb"]=1155, - ["tiwnarmenian"]=1407, - ["tlinebelow"]=7791, - ["tmonospace"]=65364, - ["toarmenian"]=1385, - ["tohiragana"]=12392, - ["tokatakana"]=12488, - ["tokatakanahalfwidth"]=65412, - ["tonebarextrahighmod"]=741, - ["tonebarextralowmod"]=745, - ["tonebarhighmod"]=742, - ["tonebarlowmod"]=744, - ["tonebarmidmod"]=743, - ["tonefive"]=445, - ["tonesix"]=389, - ["tonetwo"]=424, - ["tonos"]=900, - ["tonsquare"]=13095, - ["topatakthai"]=3599, - ["tortoiseshellbracketleft"]=12308, - ["tortoiseshellbracketleftsmall"]=65117, - ["tortoiseshellbracketleftvertical"]=65081, - ["tortoiseshellbracketright"]=12309, - ["tortoiseshellbracketrightsmall"]=65118, - ["tortoiseshellbracketrightvertical"]=65082, - ["totaothai"]=3605, - ["tpalatalhook"]=427, - ["tparen"]=9391, - ["trademark"]=8482, - ["tretroflexhook"]=648, - ["triagdn"]=9660, - ["triaglf"]=9668, - ["triagrt"]=9658, - ["triagup"]=9650, - ["ts"]=678, - ["tsadi"]=1510, - ["tsadidagesh"]=64326, - ["tsadidageshhebrew"]=64326, - ["tsadihebrew"]=1510, - ["tsecyrillic"]=1094, - ["tsere"]=1461, - ["tsere12"]=1461, - ["tsere1e"]=1461, - ["tsere2b"]=1461, - ["tserehebrew"]=1461, - ["tserenarrowhebrew"]=1461, - ["tserequarterhebrew"]=1461, - ["tserewidehebrew"]=1461, - ["tshecyrillic"]=1115, - ["ttabengali"]=2463, - ["ttadeva"]=2335, - ["ttagujarati"]=2719, - ["ttagurmukhi"]=2591, - ["tteharabic"]=1657, - ["ttehfinalarabic"]=64359, - ["ttehinitialarabic"]=64360, - ["ttehmedialarabic"]=64361, - ["tthabengali"]=2464, - ["tthadeva"]=2336, - ["tthagujarati"]=2720, - ["tthagurmukhi"]=2592, - ["tturned"]=647, - ["tuhiragana"]=12388, - ["tukatakana"]=12484, - ["tukatakanahalfwidth"]=65410, - ["tusmallhiragana"]=12387, - ["tusmallkatakana"]=12483, - ["tusmallkatakanahalfwidth"]=65391, - ["twelvecircle"]=9323, - ["twelveparen"]=9343, - ["twelveperiod"]=9363, - ["twelveroman"]=8571, - ["twentycircle"]=9331, - ["twentyparen"]=9351, - ["twentyperiod"]=9371, - ["two"]=50, - ["twoarabic"]=1634, - ["twobengali"]=2536, - ["twocircle"]=9313, - ["twocircleinversesansserif"]=10123, - ["twodeva"]=2408, - ["twodotenleader"]=8229, - ["twodotleader"]=8229, - ["twodotleadervertical"]=65072, - ["twogujarati"]=2792, - ["twogurmukhi"]=2664, - ["twohackarabic"]=1634, - ["twohangzhou"]=12322, - ["twoideographicparen"]=12833, - ["twoinferior"]=8322, - ["twomonospace"]=65298, - ["twonumeratorbengali"]=2549, - ["twoparen"]=9333, - ["twoperiod"]=9353, - ["twopersian"]=1778, - ["tworoman"]=8561, - ["twostroke"]=443, - ["twosuperior"]=178, - ["twothai"]=3666, - ["twothirds"]=8532, - ["u"]=117, - ["uacute"]=250, - ["ubar"]=649, - ["ubengali"]=2441, - ["ubopomofo"]=12584, - ["ubreve"]=365, - ["ucaron"]=468, - ["ucircle"]=9444, - ["ucircumflex"]=251, - ["ucircumflexbelow"]=7799, - ["ucyrillic"]=1091, - ["udattadeva"]=2385, - ["udblacute"]=369, - ["udblgrave"]=533, - ["udeva"]=2313, - ["udieresis"]=252, - ["udieresisacute"]=472, - ["udieresisbelow"]=7795, - ["udieresiscaron"]=474, - ["udieresiscyrillic"]=1265, - ["udieresisgrave"]=476, - ["udieresismacron"]=470, - ["udotbelow"]=7909, - ["ugrave"]=249, - ["ugujarati"]=2697, - ["ugurmukhi"]=2569, - ["uhiragana"]=12358, - ["uhookabove"]=7911, - ["uhorn"]=432, - ["uhornacute"]=7913, - ["uhorndotbelow"]=7921, - ["uhorngrave"]=7915, - ["uhornhookabove"]=7917, - ["uhorntilde"]=7919, - ["uhungarumlaut"]=369, - ["uhungarumlautcyrillic"]=1267, - ["uinvertedbreve"]=535, - ["ukatakana"]=12454, - ["ukatakanahalfwidth"]=65395, - ["ukcyrillic"]=1145, - ["ukorean"]=12636, - ["umacron"]=363, - ["umacroncyrillic"]=1263, - ["umacrondieresis"]=7803, - ["umatragurmukhi"]=2625, - ["umonospace"]=65365, - ["underscore"]=95, - ["underscoredbl"]=8215, - ["underscoremonospace"]=65343, - ["underscorevertical"]=65075, - ["underscorewavy"]=65103, - ["union"]=8746, - ["universal"]=8704, - ["uogonek"]=371, - ["uparen"]=9392, - ["upblock"]=9600, - ["upperdothebrew"]=1476, - ["upsilon"]=965, - ["upsilondieresis"]=971, - ["upsilondieresistonos"]=944, - ["upsilonlatin"]=650, - ["upsilontonos"]=973, - ["uptackbelowcmb"]=797, - ["uptackmod"]=724, - ["uragurmukhi"]=2675, - ["uring"]=367, - ["ushortcyrillic"]=1118, - ["usmallhiragana"]=12357, - ["usmallkatakana"]=12453, - ["usmallkatakanahalfwidth"]=65385, - ["ustraightcyrillic"]=1199, - ["ustraightstrokecyrillic"]=1201, - ["utilde"]=361, - ["utildeacute"]=7801, - ["utildebelow"]=7797, - ["uubengali"]=2442, - ["uudeva"]=2314, - ["uugujarati"]=2698, - ["uugurmukhi"]=2570, - ["uumatragurmukhi"]=2626, - ["uuvowelsignbengali"]=2498, - ["uuvowelsigndeva"]=2370, - ["uuvowelsigngujarati"]=2754, - ["uvowelsignbengali"]=2497, - ["uvowelsigndeva"]=2369, - ["uvowelsigngujarati"]=2753, - ["v"]=118, - ["vadeva"]=2357, - ["vagujarati"]=2741, - ["vagurmukhi"]=2613, - ["vakatakana"]=12535, - ["vav"]=1493, - ["vavdagesh"]=64309, - ["vavdagesh65"]=64309, - ["vavdageshhebrew"]=64309, - ["vavhebrew"]=1493, - ["vavholam"]=64331, - ["vavholamhebrew"]=64331, - ["vavvavhebrew"]=1520, - ["vavyodhebrew"]=1521, - ["vcircle"]=9445, - ["vdotbelow"]=7807, - ["vecyrillic"]=1074, - ["veharabic"]=1700, - ["vehfinalarabic"]=64363, - ["vehinitialarabic"]=64364, - ["vehmedialarabic"]=64365, - ["vekatakana"]=12537, - ["venus"]=9792, - ["verticalbar"]=124, - ["verticallineabovecmb"]=781, - ["verticallinebelowcmb"]=809, - ["verticallinelowmod"]=716, - ["verticallinemod"]=712, - ["vewarmenian"]=1406, - ["vhook"]=651, - ["vikatakana"]=12536, - ["viramabengali"]=2509, - ["viramadeva"]=2381, - ["viramagujarati"]=2765, - ["visargabengali"]=2435, - ["visargadeva"]=2307, - ["visargagujarati"]=2691, - ["vmonospace"]=65366, - ["voarmenian"]=1400, - ["voicediterationhiragana"]=12446, - ["voicediterationkatakana"]=12542, - ["voicedmarkkana"]=12443, - ["voicedmarkkanahalfwidth"]=65438, - ["vokatakana"]=12538, - ["vparen"]=9393, - ["vtilde"]=7805, - ["vturned"]=652, - ["vuhiragana"]=12436, - ["vukatakana"]=12532, - ["w"]=119, - ["wacute"]=7811, - ["waekorean"]=12633, - ["wahiragana"]=12431, - ["wakatakana"]=12527, - ["wakatakanahalfwidth"]=65436, - ["wakorean"]=12632, - ["wasmallhiragana"]=12430, - ["wasmallkatakana"]=12526, - ["wattosquare"]=13143, - ["wavedash"]=12316, - ["wavyunderscorevertical"]=65076, - ["wawarabic"]=1608, - ["wawfinalarabic"]=65262, - ["wawhamzaabovearabic"]=1572, - ["wawhamzaabovefinalarabic"]=65158, - ["wbsquare"]=13277, - ["wcircle"]=9446, - ["wcircumflex"]=373, - ["wdieresis"]=7813, - ["wdotaccent"]=7815, - ["wdotbelow"]=7817, - ["wehiragana"]=12433, - ["weierstrass"]=8472, - ["wekatakana"]=12529, - ["wekorean"]=12638, - ["weokorean"]=12637, - ["wgrave"]=7809, - ["whitebullet"]=9702, - ["whitecircle"]=9675, - ["whitecircleinverse"]=9689, - ["whitecornerbracketleft"]=12302, - ["whitecornerbracketleftvertical"]=65091, - ["whitecornerbracketright"]=12303, - ["whitecornerbracketrightvertical"]=65092, - ["whitediamond"]=9671, - ["whitediamondcontainingblacksmalldiamond"]=9672, - ["whitedownpointingsmalltriangle"]=9663, - ["whitedownpointingtriangle"]=9661, - ["whiteleftpointingsmalltriangle"]=9667, - ["whiteleftpointingtriangle"]=9665, - ["whitelenticularbracketleft"]=12310, - ["whitelenticularbracketright"]=12311, - ["whiterightpointingsmalltriangle"]=9657, - ["whiterightpointingtriangle"]=9655, - ["whitesmallsquare"]=9643, - ["whitesmilingface"]=9786, - ["whitesquare"]=9633, - ["whitestar"]=9734, - ["whitetelephone"]=9743, - ["whitetortoiseshellbracketleft"]=12312, - ["whitetortoiseshellbracketright"]=12313, - ["whiteuppointingsmalltriangle"]=9653, - ["whiteuppointingtriangle"]=9651, - ["wihiragana"]=12432, - ["wikatakana"]=12528, - ["wikorean"]=12639, - ["wmonospace"]=65367, - ["wohiragana"]=12434, - ["wokatakana"]=12530, - ["wokatakanahalfwidth"]=65382, - ["won"]=8361, - ["wonmonospace"]=65510, - ["wowaenthai"]=3623, - ["wparen"]=9394, - ["wring"]=7832, - ["wsuperior"]=695, - ["wturned"]=653, - ["wynn"]=447, - ["x"]=120, - ["xabovecmb"]=829, - ["xbopomofo"]=12562, - ["xcircle"]=9447, - ["xdieresis"]=7821, - ["xdotaccent"]=7819, - ["xeharmenian"]=1389, - ["xi"]=958, - ["xmonospace"]=65368, - ["xparen"]=9395, - ["xsuperior"]=739, - ["y"]=121, - ["yaadosquare"]=13134, - ["yabengali"]=2479, - ["yacute"]=253, - ["yadeva"]=2351, - ["yaekorean"]=12626, - ["yagujarati"]=2735, - ["yagurmukhi"]=2607, - ["yahiragana"]=12420, - ["yakatakana"]=12516, - ["yakatakanahalfwidth"]=65428, - ["yakorean"]=12625, - ["yamakkanthai"]=3662, - ["yasmallhiragana"]=12419, - ["yasmallkatakana"]=12515, - ["yasmallkatakanahalfwidth"]=65388, - ["yatcyrillic"]=1123, - ["ycircle"]=9448, - ["ycircumflex"]=375, - ["ydieresis"]=255, - ["ydotaccent"]=7823, - ["ydotbelow"]=7925, - ["yeharabic"]=1610, - ["yehbarreearabic"]=1746, - ["yehbarreefinalarabic"]=64431, - ["yehfinalarabic"]=65266, - ["yehhamzaabovearabic"]=1574, - ["yehhamzaabovefinalarabic"]=65162, - ["yehhamzaaboveinitialarabic"]=65163, - ["yehhamzaabovemedialarabic"]=65164, - ["yehinitialarabic"]=65267, - ["yehmedialarabic"]=65268, - ["yehmeeminitialarabic"]=64733, - ["yehmeemisolatedarabic"]=64600, - ["yehnoonfinalarabic"]=64660, - ["yehthreedotsbelowarabic"]=1745, - ["yekorean"]=12630, - ["yen"]=165, - ["yenmonospace"]=65509, - ["yeokorean"]=12629, - ["yeorinhieuhkorean"]=12678, - ["yerahbenyomohebrew"]=1450, - ["yerahbenyomolefthebrew"]=1450, - ["yericyrillic"]=1099, - ["yerudieresiscyrillic"]=1273, - ["yesieungkorean"]=12673, - ["yesieungpansioskorean"]=12675, - ["yesieungsioskorean"]=12674, - ["yetivhebrew"]=1434, - ["ygrave"]=7923, - ["yhook"]=436, - ["yhookabove"]=7927, - ["yiarmenian"]=1397, - ["yicyrillic"]=1111, - ["yikorean"]=12642, - ["yinyang"]=9775, - ["yiwnarmenian"]=1410, - ["ymonospace"]=65369, - ["yod"]=1497, - ["yoddagesh"]=64313, - ["yoddageshhebrew"]=64313, - ["yodhebrew"]=1497, - ["yodyodhebrew"]=1522, - ["yodyodpatahhebrew"]=64287, - ["yohiragana"]=12424, - ["yoikorean"]=12681, - ["yokatakana"]=12520, - ["yokatakanahalfwidth"]=65430, - ["yokorean"]=12635, - ["yosmallhiragana"]=12423, - ["yosmallkatakana"]=12519, - ["yosmallkatakanahalfwidth"]=65390, - ["yotgreek"]=1011, - ["yoyaekorean"]=12680, - ["yoyakorean"]=12679, - ["yoyakthai"]=3618, - ["yoyingthai"]=3597, - ["yparen"]=9396, - ["ypogegrammeni"]=890, - ["ypogegrammenigreekcmb"]=837, - ["yr"]=422, - ["yring"]=7833, - ["ysuperior"]=696, - ["ytilde"]=7929, - ["yturned"]=654, - ["yuhiragana"]=12422, - ["yuikorean"]=12684, - ["yukatakana"]=12518, - ["yukatakanahalfwidth"]=65429, - ["yukorean"]=12640, - ["yusbigcyrillic"]=1131, - ["yusbigiotifiedcyrillic"]=1133, - ["yuslittlecyrillic"]=1127, - ["yuslittleiotifiedcyrillic"]=1129, - ["yusmallhiragana"]=12421, - ["yusmallkatakana"]=12517, - ["yusmallkatakanahalfwidth"]=65389, - ["yuyekorean"]=12683, - ["yuyeokorean"]=12682, - ["yyabengali"]=2527, - ["yyadeva"]=2399, - ["z"]=122, - ["zaarmenian"]=1382, - ["zacute"]=378, - ["zadeva"]=2395, - ["zagurmukhi"]=2651, - ["zaharabic"]=1592, - ["zahfinalarabic"]=65222, - ["zahinitialarabic"]=65223, - ["zahiragana"]=12374, - ["zahmedialarabic"]=65224, - ["zainarabic"]=1586, - ["zainfinalarabic"]=65200, - ["zakatakana"]=12470, - ["zaqefgadolhebrew"]=1429, - ["zaqefqatanhebrew"]=1428, - ["zarqahebrew"]=1432, - ["zayin"]=1494, - ["zayindagesh"]=64310, - ["zayindageshhebrew"]=64310, - ["zayinhebrew"]=1494, - ["zbopomofo"]=12567, - ["zcaron"]=382, - ["zcircle"]=9449, - ["zcircumflex"]=7825, - ["zcurl"]=657, - ["zdot"]=380, - ["zdotaccent"]=380, - ["zdotbelow"]=7827, - ["zecyrillic"]=1079, - ["zedescendercyrillic"]=1177, - ["zedieresiscyrillic"]=1247, - ["zehiragana"]=12380, - ["zekatakana"]=12476, - ["zero"]=48, - ["zeroarabic"]=1632, - ["zerobengali"]=2534, - ["zerodeva"]=2406, - ["zerogujarati"]=2790, - ["zerogurmukhi"]=2662, - ["zerohackarabic"]=1632, - ["zeroinferior"]=8320, - ["zeromonospace"]=65296, - ["zeropersian"]=1776, - ["zerosuperior"]=8304, - ["zerothai"]=3664, - ["zerowidthjoiner"]=65279, - ["zerowidthnonjoiner"]=8204, - ["zerowidthspace"]=8203, - ["zeta"]=950, - ["zhbopomofo"]=12563, - ["zhearmenian"]=1386, - ["zhebrevecyrillic"]=1218, - ["zhecyrillic"]=1078, - ["zhedescendercyrillic"]=1175, - ["zhedieresiscyrillic"]=1245, - ["zihiragana"]=12376, - ["zikatakana"]=12472, - ["zinorhebrew"]=1454, - ["zlinebelow"]=7829, - ["zmonospace"]=65370, - ["zohiragana"]=12382, - ["zokatakana"]=12478, - ["zparen"]=9397, - ["zretroflexhook"]=656, - ["zstroke"]=438, - ["zuhiragana"]=12378, - ["zukatakana"]=12474, -} +if not modules then modules = { } end modules ['font-age'] = { + version = 1.001, + comment = "companion to luatex-fonts.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "derived from http://www.adobe.com/devnet/opentype/archives/glyphlist.txt", + original = "Adobe Glyph List, version 2.0, September 20, 2002", + dataonly = true, +} + +if context then + logs.report("fatal error","this module is not for context") + os.exit() +end + +return { -- generated: inspect(fonts.encodings.agl.unicodes) + ["A"]=65, + ["AE"]=198, + ["AEacute"]=508, + ["AEmacron"]=482, + ["Aacute"]=193, + ["Abreve"]=258, + ["Abreveacute"]=7854, + ["Abrevecyrillic"]=1232, + ["Abrevedotbelow"]=7862, + ["Abrevegrave"]=7856, + ["Abrevehookabove"]=7858, + ["Abrevetilde"]=7860, + ["Acaron"]=461, + ["Acircle"]=9398, + ["Acircumflex"]=194, + ["Acircumflexacute"]=7844, + ["Acircumflexdotbelow"]=7852, + ["Acircumflexgrave"]=7846, + ["Acircumflexhookabove"]=7848, + ["Acircumflextilde"]=7850, + ["Acyrillic"]=1040, + ["Adblgrave"]=512, + ["Adieresis"]=196, + ["Adieresiscyrillic"]=1234, + ["Adieresismacron"]=478, + ["Adotbelow"]=7840, + ["Adotmacron"]=480, + ["Agrave"]=192, + ["Ahookabove"]=7842, + ["Aiecyrillic"]=1236, + ["Ainvertedbreve"]=514, + ["Alpha"]=913, + ["Alphatonos"]=902, + ["Amacron"]=256, + ["Amonospace"]=65313, + ["Aogonek"]=260, + ["Aring"]=197, + ["Aringacute"]=506, + ["Aringbelow"]=7680, + ["Atilde"]=195, + ["Aybarmenian"]=1329, + ["B"]=66, + ["Bcircle"]=9399, + ["Bdotaccent"]=7682, + ["Bdotbelow"]=7684, + ["Becyrillic"]=1041, + ["Benarmenian"]=1330, + ["Beta"]=914, + ["Bhook"]=385, + ["Blinebelow"]=7686, + ["Bmonospace"]=65314, + ["Btopbar"]=386, + ["C"]=67, + ["Caarmenian"]=1342, + ["Cacute"]=262, + ["Ccaron"]=268, + ["Ccedilla"]=199, + ["Ccedillaacute"]=7688, + ["Ccircle"]=9400, + ["Ccircumflex"]=264, + ["Cdot"]=266, + ["Cdotaccent"]=266, + ["Chaarmenian"]=1353, + ["Cheabkhasiancyrillic"]=1212, + ["Checyrillic"]=1063, + ["Chedescenderabkhasiancyrillic"]=1214, + ["Chedescendercyrillic"]=1206, + ["Chedieresiscyrillic"]=1268, + ["Cheharmenian"]=1347, + ["Chekhakassiancyrillic"]=1227, + ["Cheverticalstrokecyrillic"]=1208, + ["Chi"]=935, + ["Chook"]=391, + ["Cmonospace"]=65315, + ["Coarmenian"]=1361, + ["D"]=68, + ["DZ"]=497, + ["DZcaron"]=452, + ["Daarmenian"]=1332, + ["Dafrican"]=393, + ["Dcaron"]=270, + ["Dcedilla"]=7696, + ["Dcircle"]=9401, + ["Dcircumflexbelow"]=7698, + ["Dcroat"]=272, + ["Ddotaccent"]=7690, + ["Ddotbelow"]=7692, + ["Decyrillic"]=1044, + ["Deicoptic"]=1006, + ["Delta"]=8710, + ["Deltagreek"]=916, + ["Dhook"]=394, + ["Digammagreek"]=988, + ["Djecyrillic"]=1026, + ["Dlinebelow"]=7694, + ["Dmonospace"]=65316, + ["Dslash"]=272, + ["Dtopbar"]=395, + ["Dz"]=498, + ["Dzcaron"]=453, + ["Dzeabkhasiancyrillic"]=1248, + ["Dzecyrillic"]=1029, + ["Dzhecyrillic"]=1039, + ["E"]=69, + ["Eacute"]=201, + ["Ebreve"]=276, + ["Ecaron"]=282, + ["Ecedillabreve"]=7708, + ["Echarmenian"]=1333, + ["Ecircle"]=9402, + ["Ecircumflex"]=202, + ["Ecircumflexacute"]=7870, + ["Ecircumflexbelow"]=7704, + ["Ecircumflexdotbelow"]=7878, + ["Ecircumflexgrave"]=7872, + ["Ecircumflexhookabove"]=7874, + ["Ecircumflextilde"]=7876, + ["Ecyrillic"]=1028, + ["Edblgrave"]=516, + ["Edieresis"]=203, + ["Edot"]=278, + ["Edotaccent"]=278, + ["Edotbelow"]=7864, + ["Efcyrillic"]=1060, + ["Egrave"]=200, + ["Eharmenian"]=1335, + ["Ehookabove"]=7866, + ["Eightroman"]=8551, + ["Einvertedbreve"]=518, + ["Eiotifiedcyrillic"]=1124, + ["Elcyrillic"]=1051, + ["Elevenroman"]=8554, + ["Emacron"]=274, + ["Emacronacute"]=7702, + ["Emacrongrave"]=7700, + ["Emcyrillic"]=1052, + ["Emonospace"]=65317, + ["Encyrillic"]=1053, + ["Endescendercyrillic"]=1186, + ["Eng"]=330, + ["Enghecyrillic"]=1188, + ["Enhookcyrillic"]=1223, + ["Eogonek"]=280, + ["Eopen"]=400, + ["Epsilon"]=917, + ["Epsilontonos"]=904, + ["Ercyrillic"]=1056, + ["Ereversed"]=398, + ["Ereversedcyrillic"]=1069, + ["Escyrillic"]=1057, + ["Esdescendercyrillic"]=1194, + ["Esh"]=425, + ["Eta"]=919, + ["Etarmenian"]=1336, + ["Etatonos"]=905, + ["Eth"]=208, + ["Etilde"]=7868, + ["Etildebelow"]=7706, + ["Euro"]=8364, + ["Ezh"]=439, + ["Ezhcaron"]=494, + ["Ezhreversed"]=440, + ["F"]=70, + ["Fcircle"]=9403, + ["Fdotaccent"]=7710, + ["Feharmenian"]=1366, + ["Feicoptic"]=996, + ["Fhook"]=401, + ["Fitacyrillic"]=1138, + ["Fiveroman"]=8548, + ["Fmonospace"]=65318, + ["Fourroman"]=8547, + ["G"]=71, + ["GBsquare"]=13191, + ["Gacute"]=500, + ["Gamma"]=915, + ["Gammaafrican"]=404, + ["Gangiacoptic"]=1002, + ["Gbreve"]=286, + ["Gcaron"]=486, + ["Gcedilla"]=290, + ["Gcircle"]=9404, + ["Gcircumflex"]=284, + ["Gcommaaccent"]=290, + ["Gdot"]=288, + ["Gdotaccent"]=288, + ["Gecyrillic"]=1043, + ["Ghadarmenian"]=1346, + ["Ghemiddlehookcyrillic"]=1172, + ["Ghestrokecyrillic"]=1170, + ["Gheupturncyrillic"]=1168, + ["Ghook"]=403, + ["Gimarmenian"]=1331, + ["Gjecyrillic"]=1027, + ["Gmacron"]=7712, + ["Gmonospace"]=65319, + ["Gsmallhook"]=667, + ["Gstroke"]=484, + ["H"]=72, + ["H18533"]=9679, + ["H18543"]=9642, + ["H18551"]=9643, + ["H22073"]=9633, + ["HPsquare"]=13259, + ["Haabkhasiancyrillic"]=1192, + ["Hadescendercyrillic"]=1202, + ["Hardsigncyrillic"]=1066, + ["Hbar"]=294, + ["Hbrevebelow"]=7722, + ["Hcedilla"]=7720, + ["Hcircle"]=9405, + ["Hcircumflex"]=292, + ["Hdieresis"]=7718, + ["Hdotaccent"]=7714, + ["Hdotbelow"]=7716, + ["Hmonospace"]=65320, + ["Hoarmenian"]=1344, + ["Horicoptic"]=1000, + ["Hzsquare"]=13200, + ["I"]=73, + ["IAcyrillic"]=1071, + ["IJ"]=306, + ["IUcyrillic"]=1070, + ["Iacute"]=205, + ["Ibreve"]=300, + ["Icaron"]=463, + ["Icircle"]=9406, + ["Icircumflex"]=206, + ["Icyrillic"]=1030, + ["Idblgrave"]=520, + ["Idieresis"]=207, + ["Idieresisacute"]=7726, + ["Idieresiscyrillic"]=1252, + ["Idot"]=304, + ["Idotaccent"]=304, + ["Idotbelow"]=7882, + ["Iebrevecyrillic"]=1238, + ["Iecyrillic"]=1045, + ["Ifraktur"]=8465, + ["Igrave"]=204, + ["Ihookabove"]=7880, + ["Iicyrillic"]=1048, + ["Iinvertedbreve"]=522, + ["Iishortcyrillic"]=1049, + ["Imacron"]=298, + ["Imacroncyrillic"]=1250, + ["Imonospace"]=65321, + ["Iniarmenian"]=1339, + ["Iocyrillic"]=1025, + ["Iogonek"]=302, + ["Iota"]=921, + ["Iotaafrican"]=406, + ["Iotadieresis"]=938, + ["Iotatonos"]=906, + ["Istroke"]=407, + ["Itilde"]=296, + ["Itildebelow"]=7724, + ["Izhitsacyrillic"]=1140, + ["Izhitsadblgravecyrillic"]=1142, + ["J"]=74, + ["Jaarmenian"]=1345, + ["Jcircle"]=9407, + ["Jcircumflex"]=308, + ["Jecyrillic"]=1032, + ["Jheharmenian"]=1355, + ["Jmonospace"]=65322, + ["K"]=75, + ["KBsquare"]=13189, + ["KKsquare"]=13261, + ["Kabashkircyrillic"]=1184, + ["Kacute"]=7728, + ["Kacyrillic"]=1050, + ["Kadescendercyrillic"]=1178, + ["Kahookcyrillic"]=1219, + ["Kappa"]=922, + ["Kastrokecyrillic"]=1182, + ["Kaverticalstrokecyrillic"]=1180, + ["Kcaron"]=488, + ["Kcedilla"]=310, + ["Kcircle"]=9408, + ["Kcommaaccent"]=310, + ["Kdotbelow"]=7730, + ["Keharmenian"]=1364, + ["Kenarmenian"]=1343, + ["Khacyrillic"]=1061, + ["Kheicoptic"]=998, + ["Khook"]=408, + ["Kjecyrillic"]=1036, + ["Klinebelow"]=7732, + ["Kmonospace"]=65323, + ["Koppacyrillic"]=1152, + ["Koppagreek"]=990, + ["Ksicyrillic"]=1134, + ["L"]=76, + ["LJ"]=455, + ["Lacute"]=313, + ["Lambda"]=923, + ["Lcaron"]=317, + ["Lcedilla"]=315, + ["Lcircle"]=9409, + ["Lcircumflexbelow"]=7740, + ["Lcommaaccent"]=315, + ["Ldot"]=319, + ["Ldotaccent"]=319, + ["Ldotbelow"]=7734, + ["Ldotbelowmacron"]=7736, + ["Liwnarmenian"]=1340, + ["Lj"]=456, + ["Ljecyrillic"]=1033, + ["Llinebelow"]=7738, + ["Lmonospace"]=65324, + ["Lslash"]=321, + ["M"]=77, + ["MBsquare"]=13190, + ["Macute"]=7742, + ["Mcircle"]=9410, + ["Mdotaccent"]=7744, + ["Mdotbelow"]=7746, + ["Menarmenian"]=1348, + ["Mmonospace"]=65325, + ["Mturned"]=412, + ["Mu"]=924, + ["N"]=78, + ["NJ"]=458, + ["Nacute"]=323, + ["Ncaron"]=327, + ["Ncedilla"]=325, + ["Ncircle"]=9411, + ["Ncircumflexbelow"]=7754, + ["Ncommaaccent"]=325, + ["Ndotaccent"]=7748, + ["Ndotbelow"]=7750, + ["Nhookleft"]=413, + ["Nineroman"]=8552, + ["Nj"]=459, + ["Njecyrillic"]=1034, + ["Nlinebelow"]=7752, + ["Nmonospace"]=65326, + ["Nowarmenian"]=1350, + ["Ntilde"]=209, + ["Nu"]=925, + ["O"]=79, + ["OE"]=338, + ["Oacute"]=211, + ["Obarredcyrillic"]=1256, + ["Obarreddieresiscyrillic"]=1258, + ["Obreve"]=334, + ["Ocaron"]=465, + ["Ocenteredtilde"]=415, + ["Ocircle"]=9412, + ["Ocircumflex"]=212, + ["Ocircumflexacute"]=7888, + ["Ocircumflexdotbelow"]=7896, + ["Ocircumflexgrave"]=7890, + ["Ocircumflexhookabove"]=7892, + ["Ocircumflextilde"]=7894, + ["Ocyrillic"]=1054, + ["Odblacute"]=336, + ["Odblgrave"]=524, + ["Odieresis"]=214, + ["Odieresiscyrillic"]=1254, + ["Odotbelow"]=7884, + ["Ograve"]=210, + ["Oharmenian"]=1365, + ["Ohm"]=8486, + ["Ohookabove"]=7886, + ["Ohorn"]=416, + ["Ohornacute"]=7898, + ["Ohorndotbelow"]=7906, + ["Ohorngrave"]=7900, + ["Ohornhookabove"]=7902, + ["Ohorntilde"]=7904, + ["Ohungarumlaut"]=336, + ["Oi"]=418, + ["Oinvertedbreve"]=526, + ["Omacron"]=332, + ["Omacronacute"]=7762, + ["Omacrongrave"]=7760, + ["Omega"]=8486, + ["Omegacyrillic"]=1120, + ["Omegagreek"]=937, + ["Omegaroundcyrillic"]=1146, + ["Omegatitlocyrillic"]=1148, + ["Omegatonos"]=911, + ["Omicron"]=927, + ["Omicrontonos"]=908, + ["Omonospace"]=65327, + ["Oneroman"]=8544, + ["Oogonek"]=490, + ["Oogonekmacron"]=492, + ["Oopen"]=390, + ["Oslash"]=216, + ["Oslashacute"]=510, + ["Ostrokeacute"]=510, + ["Otcyrillic"]=1150, + ["Otilde"]=213, + ["Otildeacute"]=7756, + ["Otildedieresis"]=7758, + ["P"]=80, + ["Pacute"]=7764, + ["Pcircle"]=9413, + ["Pdotaccent"]=7766, + ["Pecyrillic"]=1055, + ["Peharmenian"]=1354, + ["Pemiddlehookcyrillic"]=1190, + ["Phi"]=934, + ["Phook"]=420, + ["Pi"]=928, + ["Piwrarmenian"]=1363, + ["Pmonospace"]=65328, + ["Psi"]=936, + ["Psicyrillic"]=1136, + ["Q"]=81, + ["Qcircle"]=9414, + ["Qmonospace"]=65329, + ["R"]=82, + ["Raarmenian"]=1356, + ["Racute"]=340, + ["Rcaron"]=344, + ["Rcedilla"]=342, + ["Rcircle"]=9415, + ["Rcommaaccent"]=342, + ["Rdblgrave"]=528, + ["Rdotaccent"]=7768, + ["Rdotbelow"]=7770, + ["Rdotbelowmacron"]=7772, + ["Reharmenian"]=1360, + ["Rfraktur"]=8476, + ["Rho"]=929, + ["Rinvertedbreve"]=530, + ["Rlinebelow"]=7774, + ["Rmonospace"]=65330, + ["Rsmallinverted"]=641, + ["Rsmallinvertedsuperior"]=694, + ["S"]=83, + ["SF010000"]=9484, + ["SF020000"]=9492, + ["SF030000"]=9488, + ["SF040000"]=9496, + ["SF050000"]=9532, + ["SF060000"]=9516, + ["SF070000"]=9524, + ["SF080000"]=9500, + ["SF090000"]=9508, + ["SF10000"]=9484, + ["SF100000"]=9472, + ["SF110000"]=9474, + ["SF190000"]=9569, + ["SF20000"]=9492, + ["SF200000"]=9570, + ["SF210000"]=9558, + ["SF220000"]=9557, + ["SF230000"]=9571, + ["SF240000"]=9553, + ["SF250000"]=9559, + ["SF260000"]=9565, + ["SF270000"]=9564, + ["SF280000"]=9563, + ["SF30000"]=9488, + ["SF360000"]=9566, + ["SF370000"]=9567, + ["SF380000"]=9562, + ["SF390000"]=9556, + ["SF40000"]=9496, + ["SF400000"]=9577, + ["SF410000"]=9574, + ["SF420000"]=9568, + ["SF430000"]=9552, + ["SF440000"]=9580, + ["SF450000"]=9575, + ["SF460000"]=9576, + ["SF470000"]=9572, + ["SF480000"]=9573, + ["SF490000"]=9561, + ["SF50000"]=9532, + ["SF500000"]=9560, + ["SF510000"]=9554, + ["SF520000"]=9555, + ["SF530000"]=9579, + ["SF540000"]=9578, + ["SF60000"]=9516, + ["SF70000"]=9524, + ["SF80000"]=9500, + ["SF90000"]=9508, + ["Sacute"]=346, + ["Sacutedotaccent"]=7780, + ["Sampigreek"]=992, + ["Scaron"]=352, + ["Scarondotaccent"]=7782, + ["Scedilla"]=350, + ["Schwa"]=399, + ["Schwacyrillic"]=1240, + ["Schwadieresiscyrillic"]=1242, + ["Scircle"]=9416, + ["Scircumflex"]=348, + ["Scommaaccent"]=536, + ["Sdotaccent"]=7776, + ["Sdotbelow"]=7778, + ["Sdotbelowdotaccent"]=7784, + ["Seharmenian"]=1357, + ["Sevenroman"]=8550, + ["Shaarmenian"]=1351, + ["Shacyrillic"]=1064, + ["Shchacyrillic"]=1065, + ["Sheicoptic"]=994, + ["Shhacyrillic"]=1210, + ["Shimacoptic"]=1004, + ["Sigma"]=931, + ["Sixroman"]=8549, + ["Smonospace"]=65331, + ["Softsigncyrillic"]=1068, + ["Stigmagreek"]=986, + ["T"]=84, + ["Tau"]=932, + ["Tbar"]=358, + ["Tcaron"]=356, + ["Tcedilla"]=354, + ["Tcircle"]=9417, + ["Tcircumflexbelow"]=7792, + ["Tcommaaccent"]=354, + ["Tdotaccent"]=7786, + ["Tdotbelow"]=7788, + ["Tecyrillic"]=1058, + ["Tedescendercyrillic"]=1196, + ["Tenroman"]=8553, + ["Tetsecyrillic"]=1204, + ["Theta"]=920, + ["Thook"]=428, + ["Thorn"]=222, + ["Threeroman"]=8546, + ["Tiwnarmenian"]=1359, + ["Tlinebelow"]=7790, + ["Tmonospace"]=65332, + ["Toarmenian"]=1337, + ["Tonefive"]=444, + ["Tonesix"]=388, + ["Tonetwo"]=423, + ["Tretroflexhook"]=430, + ["Tsecyrillic"]=1062, + ["Tshecyrillic"]=1035, + ["Twelveroman"]=8555, + ["Tworoman"]=8545, + ["U"]=85, + ["Uacute"]=218, + ["Ubreve"]=364, + ["Ucaron"]=467, + ["Ucircle"]=9418, + ["Ucircumflex"]=219, + ["Ucircumflexbelow"]=7798, + ["Ucyrillic"]=1059, + ["Udblacute"]=368, + ["Udblgrave"]=532, + ["Udieresis"]=220, + ["Udieresisacute"]=471, + ["Udieresisbelow"]=7794, + ["Udieresiscaron"]=473, + ["Udieresiscyrillic"]=1264, + ["Udieresisgrave"]=475, + ["Udieresismacron"]=469, + ["Udotbelow"]=7908, + ["Ugrave"]=217, + ["Uhookabove"]=7910, + ["Uhorn"]=431, + ["Uhornacute"]=7912, + ["Uhorndotbelow"]=7920, + ["Uhorngrave"]=7914, + ["Uhornhookabove"]=7916, + ["Uhorntilde"]=7918, + ["Uhungarumlaut"]=368, + ["Uhungarumlautcyrillic"]=1266, + ["Uinvertedbreve"]=534, + ["Ukcyrillic"]=1144, + ["Umacron"]=362, + ["Umacroncyrillic"]=1262, + ["Umacrondieresis"]=7802, + ["Umonospace"]=65333, + ["Uogonek"]=370, + ["Upsilon"]=933, + ["Upsilon1"]=978, + ["Upsilonacutehooksymbolgreek"]=979, + ["Upsilonafrican"]=433, + ["Upsilondieresis"]=939, + ["Upsilondieresishooksymbolgreek"]=980, + ["Upsilonhooksymbol"]=978, + ["Upsilontonos"]=910, + ["Uring"]=366, + ["Ushortcyrillic"]=1038, + ["Ustraightcyrillic"]=1198, + ["Ustraightstrokecyrillic"]=1200, + ["Utilde"]=360, + ["Utildeacute"]=7800, + ["Utildebelow"]=7796, + ["V"]=86, + ["Vcircle"]=9419, + ["Vdotbelow"]=7806, + ["Vecyrillic"]=1042, + ["Vewarmenian"]=1358, + ["Vhook"]=434, + ["Vmonospace"]=65334, + ["Voarmenian"]=1352, + ["Vtilde"]=7804, + ["W"]=87, + ["Wacute"]=7810, + ["Wcircle"]=9420, + ["Wcircumflex"]=372, + ["Wdieresis"]=7812, + ["Wdotaccent"]=7814, + ["Wdotbelow"]=7816, + ["Wgrave"]=7808, + ["Wmonospace"]=65335, + ["X"]=88, + ["Xcircle"]=9421, + ["Xdieresis"]=7820, + ["Xdotaccent"]=7818, + ["Xeharmenian"]=1341, + ["Xi"]=926, + ["Xmonospace"]=65336, + ["Y"]=89, + ["Yacute"]=221, + ["Yatcyrillic"]=1122, + ["Ycircle"]=9422, + ["Ycircumflex"]=374, + ["Ydieresis"]=376, + ["Ydotaccent"]=7822, + ["Ydotbelow"]=7924, + ["Yericyrillic"]=1067, + ["Yerudieresiscyrillic"]=1272, + ["Ygrave"]=7922, + ["Yhook"]=435, + ["Yhookabove"]=7926, + ["Yiarmenian"]=1349, + ["Yicyrillic"]=1031, + ["Yiwnarmenian"]=1362, + ["Ymonospace"]=65337, + ["Ytilde"]=7928, + ["Yusbigcyrillic"]=1130, + ["Yusbigiotifiedcyrillic"]=1132, + ["Yuslittlecyrillic"]=1126, + ["Yuslittleiotifiedcyrillic"]=1128, + ["Z"]=90, + ["Zaarmenian"]=1334, + ["Zacute"]=377, + ["Zcaron"]=381, + ["Zcircle"]=9423, + ["Zcircumflex"]=7824, + ["Zdot"]=379, + ["Zdotaccent"]=379, + ["Zdotbelow"]=7826, + ["Zecyrillic"]=1047, + ["Zedescendercyrillic"]=1176, + ["Zedieresiscyrillic"]=1246, + ["Zeta"]=918, + ["Zhearmenian"]=1338, + ["Zhebrevecyrillic"]=1217, + ["Zhecyrillic"]=1046, + ["Zhedescendercyrillic"]=1174, + ["Zhedieresiscyrillic"]=1244, + ["Zlinebelow"]=7828, + ["Zmonospace"]=65338, + ["Zstroke"]=437, + ["a"]=97, + ["aabengali"]=2438, + ["aacute"]=225, + ["aadeva"]=2310, + ["aagujarati"]=2694, + ["aagurmukhi"]=2566, + ["aamatragurmukhi"]=2622, + ["aarusquare"]=13059, + ["aavowelsignbengali"]=2494, + ["aavowelsigndeva"]=2366, + ["aavowelsigngujarati"]=2750, + ["abbreviationmarkarmenian"]=1375, + ["abbreviationsigndeva"]=2416, + ["abengali"]=2437, + ["abopomofo"]=12570, + ["abreve"]=259, + ["abreveacute"]=7855, + ["abrevecyrillic"]=1233, + ["abrevedotbelow"]=7863, + ["abrevegrave"]=7857, + ["abrevehookabove"]=7859, + ["abrevetilde"]=7861, + ["acaron"]=462, + ["acircle"]=9424, + ["acircumflex"]=226, + ["acircumflexacute"]=7845, + ["acircumflexdotbelow"]=7853, + ["acircumflexgrave"]=7847, + ["acircumflexhookabove"]=7849, + ["acircumflextilde"]=7851, + ["acute"]=180, + ["acutebelowcmb"]=791, + ["acutecmb"]=769, + ["acutecomb"]=769, + ["acutedeva"]=2388, + ["acutelowmod"]=719, + ["acutetonecmb"]=833, + ["acyrillic"]=1072, + ["adblgrave"]=513, + ["addakgurmukhi"]=2673, + ["adeva"]=2309, + ["adieresis"]=228, + ["adieresiscyrillic"]=1235, + ["adieresismacron"]=479, + ["adotbelow"]=7841, + ["adotmacron"]=481, + ["ae"]=230, + ["aeacute"]=509, + ["aekorean"]=12624, + ["aemacron"]=483, + ["afii00208"]=8213, + ["afii08941"]=8356, + ["afii10017"]=1040, + ["afii10018"]=1041, + ["afii10019"]=1042, + ["afii10020"]=1043, + ["afii10021"]=1044, + ["afii10022"]=1045, + ["afii10023"]=1025, + ["afii10024"]=1046, + ["afii10025"]=1047, + ["afii10026"]=1048, + ["afii10027"]=1049, + ["afii10028"]=1050, + ["afii10029"]=1051, + ["afii10030"]=1052, + ["afii10031"]=1053, + ["afii10032"]=1054, + ["afii10033"]=1055, + ["afii10034"]=1056, + ["afii10035"]=1057, + ["afii10036"]=1058, + ["afii10037"]=1059, + ["afii10038"]=1060, + ["afii10039"]=1061, + ["afii10040"]=1062, + ["afii10041"]=1063, + ["afii10042"]=1064, + ["afii10043"]=1065, + ["afii10044"]=1066, + ["afii10045"]=1067, + ["afii10046"]=1068, + ["afii10047"]=1069, + ["afii10048"]=1070, + ["afii10049"]=1071, + ["afii10050"]=1168, + ["afii10051"]=1026, + ["afii10052"]=1027, + ["afii10053"]=1028, + ["afii10054"]=1029, + ["afii10055"]=1030, + ["afii10056"]=1031, + ["afii10057"]=1032, + ["afii10058"]=1033, + ["afii10059"]=1034, + ["afii10060"]=1035, + ["afii10061"]=1036, + ["afii10062"]=1038, + ["afii10065"]=1072, + ["afii10066"]=1073, + ["afii10067"]=1074, + ["afii10068"]=1075, + ["afii10069"]=1076, + ["afii10070"]=1077, + ["afii10071"]=1105, + ["afii10072"]=1078, + ["afii10073"]=1079, + ["afii10074"]=1080, + ["afii10075"]=1081, + ["afii10076"]=1082, + ["afii10077"]=1083, + ["afii10078"]=1084, + ["afii10079"]=1085, + ["afii10080"]=1086, + ["afii10081"]=1087, + ["afii10082"]=1088, + ["afii10083"]=1089, + ["afii10084"]=1090, + ["afii10085"]=1091, + ["afii10086"]=1092, + ["afii10087"]=1093, + ["afii10088"]=1094, + ["afii10089"]=1095, + ["afii10090"]=1096, + ["afii10091"]=1097, + ["afii10092"]=1098, + ["afii10093"]=1099, + ["afii10094"]=1100, + ["afii10095"]=1101, + ["afii10096"]=1102, + ["afii10097"]=1103, + ["afii10098"]=1169, + ["afii10099"]=1106, + ["afii10100"]=1107, + ["afii10101"]=1108, + ["afii10102"]=1109, + ["afii10103"]=1110, + ["afii10104"]=1111, + ["afii10105"]=1112, + ["afii10106"]=1113, + ["afii10107"]=1114, + ["afii10108"]=1115, + ["afii10109"]=1116, + ["afii10110"]=1118, + ["afii10145"]=1039, + ["afii10146"]=1122, + ["afii10147"]=1138, + ["afii10148"]=1140, + ["afii10193"]=1119, + ["afii10194"]=1123, + ["afii10195"]=1139, + ["afii10196"]=1141, + ["afii10846"]=1241, + ["afii208"]=8213, + ["afii299"]=8206, + ["afii300"]=8207, + ["afii301"]=8205, + ["afii57381"]=1642, + ["afii57388"]=1548, + ["afii57392"]=1632, + ["afii57393"]=1633, + ["afii57394"]=1634, + ["afii57395"]=1635, + ["afii57396"]=1636, + ["afii57397"]=1637, + ["afii57398"]=1638, + ["afii57399"]=1639, + ["afii57400"]=1640, + ["afii57401"]=1641, + ["afii57403"]=1563, + ["afii57407"]=1567, + ["afii57409"]=1569, + ["afii57410"]=1570, + ["afii57411"]=1571, + ["afii57412"]=1572, + ["afii57413"]=1573, + ["afii57414"]=1574, + ["afii57415"]=1575, + ["afii57416"]=1576, + ["afii57417"]=1577, + ["afii57418"]=1578, + ["afii57419"]=1579, + ["afii57420"]=1580, + ["afii57421"]=1581, + ["afii57422"]=1582, + ["afii57423"]=1583, + ["afii57424"]=1584, + ["afii57425"]=1585, + ["afii57426"]=1586, + ["afii57427"]=1587, + ["afii57428"]=1588, + ["afii57429"]=1589, + ["afii57430"]=1590, + ["afii57431"]=1591, + ["afii57432"]=1592, + ["afii57433"]=1593, + ["afii57434"]=1594, + ["afii57440"]=1600, + ["afii57441"]=1601, + ["afii57442"]=1602, + ["afii57443"]=1603, + ["afii57444"]=1604, + ["afii57445"]=1605, + ["afii57446"]=1606, + ["afii57448"]=1608, + ["afii57449"]=1609, + ["afii57450"]=1610, + ["afii57451"]=1611, + ["afii57452"]=1612, + ["afii57453"]=1613, + ["afii57454"]=1614, + ["afii57455"]=1615, + ["afii57456"]=1616, + ["afii57457"]=1617, + ["afii57458"]=1618, + ["afii57470"]=1607, + ["afii57505"]=1700, + ["afii57506"]=1662, + ["afii57507"]=1670, + ["afii57508"]=1688, + ["afii57509"]=1711, + ["afii57511"]=1657, + ["afii57512"]=1672, + ["afii57513"]=1681, + ["afii57514"]=1722, + ["afii57519"]=1746, + ["afii57534"]=1749, + ["afii57636"]=8362, + ["afii57645"]=1470, + ["afii57658"]=1475, + ["afii57664"]=1488, + ["afii57665"]=1489, + ["afii57666"]=1490, + ["afii57667"]=1491, + ["afii57668"]=1492, + ["afii57669"]=1493, + ["afii57670"]=1494, + ["afii57671"]=1495, + ["afii57672"]=1496, + ["afii57673"]=1497, + ["afii57674"]=1498, + ["afii57675"]=1499, + ["afii57676"]=1500, + ["afii57677"]=1501, + ["afii57678"]=1502, + ["afii57679"]=1503, + ["afii57680"]=1504, + ["afii57681"]=1505, + ["afii57682"]=1506, + ["afii57683"]=1507, + ["afii57684"]=1508, + ["afii57685"]=1509, + ["afii57686"]=1510, + ["afii57687"]=1511, + ["afii57688"]=1512, + ["afii57689"]=1513, + ["afii57690"]=1514, + ["afii57694"]=64298, + ["afii57695"]=64299, + ["afii57700"]=64331, + ["afii57705"]=64287, + ["afii57716"]=1520, + ["afii57717"]=1521, + ["afii57718"]=1522, + ["afii57723"]=64309, + ["afii57793"]=1460, + ["afii57794"]=1461, + ["afii57795"]=1462, + ["afii57796"]=1467, + ["afii57797"]=1464, + ["afii57798"]=1463, + ["afii57799"]=1456, + ["afii57800"]=1458, + ["afii57801"]=1457, + ["afii57802"]=1459, + ["afii57803"]=1474, + ["afii57804"]=1473, + ["afii57806"]=1465, + ["afii57807"]=1468, + ["afii57839"]=1469, + ["afii57841"]=1471, + ["afii57842"]=1472, + ["afii57929"]=700, + ["afii61248"]=8453, + ["afii61289"]=8467, + ["afii61352"]=8470, + ["afii61573"]=8236, + ["afii61574"]=8237, + ["afii61575"]=8238, + ["afii61664"]=8204, + ["afii63167"]=1645, + ["afii64937"]=701, + ["agrave"]=224, + ["agujarati"]=2693, + ["agurmukhi"]=2565, + ["ahiragana"]=12354, + ["ahookabove"]=7843, + ["aibengali"]=2448, + ["aibopomofo"]=12574, + ["aideva"]=2320, + ["aiecyrillic"]=1237, + ["aigujarati"]=2704, + ["aigurmukhi"]=2576, + ["aimatragurmukhi"]=2632, + ["ainarabic"]=1593, + ["ainfinalarabic"]=65226, + ["aininitialarabic"]=65227, + ["ainmedialarabic"]=65228, + ["ainvertedbreve"]=515, + ["aivowelsignbengali"]=2504, + ["aivowelsigndeva"]=2376, + ["aivowelsigngujarati"]=2760, + ["akatakana"]=12450, + ["akatakanahalfwidth"]=65393, + ["akorean"]=12623, + ["alef"]=1488, + ["alefarabic"]=1575, + ["alefdageshhebrew"]=64304, + ["aleffinalarabic"]=65166, + ["alefhamzaabovearabic"]=1571, + ["alefhamzaabovefinalarabic"]=65156, + ["alefhamzabelowarabic"]=1573, + ["alefhamzabelowfinalarabic"]=65160, + ["alefhebrew"]=1488, + ["aleflamedhebrew"]=64335, + ["alefmaddaabovearabic"]=1570, + ["alefmaddaabovefinalarabic"]=65154, + ["alefmaksuraarabic"]=1609, + ["alefmaksurafinalarabic"]=65264, + ["alefmaksurainitialarabic"]=65267, + ["alefmaksuramedialarabic"]=65268, + ["alefpatahhebrew"]=64302, + ["alefqamatshebrew"]=64303, + ["aleph"]=8501, + ["allequal"]=8780, + ["alpha"]=945, + ["alphatonos"]=940, + ["amacron"]=257, + ["amonospace"]=65345, + ["ampersand"]=38, + ["ampersandmonospace"]=65286, + ["amsquare"]=13250, + ["anbopomofo"]=12578, + ["angbopomofo"]=12580, + ["angkhankhuthai"]=3674, + ["angle"]=8736, + ["anglebracketleft"]=12296, + ["anglebracketleftvertical"]=65087, + ["anglebracketright"]=12297, + ["anglebracketrightvertical"]=65088, + ["angleleft"]=9001, + ["angleright"]=9002, + ["angstrom"]=8491, + ["anoteleia"]=903, + ["anudattadeva"]=2386, + ["anusvarabengali"]=2434, + ["anusvaradeva"]=2306, + ["anusvaragujarati"]=2690, + ["aogonek"]=261, + ["apaatosquare"]=13056, + ["aparen"]=9372, + ["apostrophearmenian"]=1370, + ["apostrophemod"]=700, + ["apple"]=63743, + ["approaches"]=8784, + ["approxequal"]=8776, + ["approxequalorimage"]=8786, + ["approximatelyequal"]=8773, + ["araeaekorean"]=12686, + ["araeakorean"]=12685, + ["arc"]=8978, + ["arighthalfring"]=7834, + ["aring"]=229, + ["aringacute"]=507, + ["aringbelow"]=7681, + ["arrowboth"]=8596, + ["arrowdashdown"]=8675, + ["arrowdashleft"]=8672, + ["arrowdashright"]=8674, + ["arrowdashup"]=8673, + ["arrowdblboth"]=8660, + ["arrowdbldown"]=8659, + ["arrowdblleft"]=8656, + ["arrowdblright"]=8658, + ["arrowdblup"]=8657, + ["arrowdown"]=8595, + ["arrowdownleft"]=8601, + ["arrowdownright"]=8600, + ["arrowdownwhite"]=8681, + ["arrowheaddownmod"]=709, + ["arrowheadleftmod"]=706, + ["arrowheadrightmod"]=707, + ["arrowheadupmod"]=708, + ["arrowleft"]=8592, + ["arrowleftdbl"]=8656, + ["arrowleftdblstroke"]=8653, + ["arrowleftoverright"]=8646, + ["arrowleftwhite"]=8678, + ["arrowright"]=8594, + ["arrowrightdblstroke"]=8655, + ["arrowrightheavy"]=10142, + ["arrowrightoverleft"]=8644, + ["arrowrightwhite"]=8680, + ["arrowtableft"]=8676, + ["arrowtabright"]=8677, + ["arrowup"]=8593, + ["arrowupdn"]=8597, + ["arrowupdnbse"]=8616, + ["arrowupdownbase"]=8616, + ["arrowupleft"]=8598, + ["arrowupleftofdown"]=8645, + ["arrowupright"]=8599, + ["arrowupwhite"]=8679, + ["asciicircum"]=94, + ["asciicircummonospace"]=65342, + ["asciitilde"]=126, + ["asciitildemonospace"]=65374, + ["ascript"]=593, + ["ascriptturned"]=594, + ["asmallhiragana"]=12353, + ["asmallkatakana"]=12449, + ["asmallkatakanahalfwidth"]=65383, + ["asterisk"]=42, + ["asteriskaltonearabic"]=1645, + ["asteriskarabic"]=1645, + ["asteriskmath"]=8727, + ["asteriskmonospace"]=65290, + ["asterisksmall"]=65121, + ["asterism"]=8258, + ["asymptoticallyequal"]=8771, + ["at"]=64, + ["atilde"]=227, + ["atmonospace"]=65312, + ["atsmall"]=65131, + ["aturned"]=592, + ["aubengali"]=2452, + ["aubopomofo"]=12576, + ["audeva"]=2324, + ["augujarati"]=2708, + ["augurmukhi"]=2580, + ["aulengthmarkbengali"]=2519, + ["aumatragurmukhi"]=2636, + ["auvowelsignbengali"]=2508, + ["auvowelsigndeva"]=2380, + ["auvowelsigngujarati"]=2764, + ["avagrahadeva"]=2365, + ["aybarmenian"]=1377, + ["ayin"]=1506, + ["ayinaltonehebrew"]=64288, + ["ayinhebrew"]=1506, + ["b"]=98, + ["babengali"]=2476, + ["backslash"]=92, + ["backslashmonospace"]=65340, + ["badeva"]=2348, + ["bagujarati"]=2732, + ["bagurmukhi"]=2604, + ["bahiragana"]=12400, + ["bahtthai"]=3647, + ["bakatakana"]=12496, + ["bar"]=124, + ["barmonospace"]=65372, + ["bbopomofo"]=12549, + ["bcircle"]=9425, + ["bdotaccent"]=7683, + ["bdotbelow"]=7685, + ["beamedsixteenthnotes"]=9836, + ["because"]=8757, + ["becyrillic"]=1073, + ["beharabic"]=1576, + ["behfinalarabic"]=65168, + ["behinitialarabic"]=65169, + ["behiragana"]=12409, + ["behmedialarabic"]=65170, + ["behmeeminitialarabic"]=64671, + ["behmeemisolatedarabic"]=64520, + ["behnoonfinalarabic"]=64621, + ["bekatakana"]=12505, + ["benarmenian"]=1378, + ["bet"]=1489, + ["beta"]=946, + ["betasymbolgreek"]=976, + ["betdagesh"]=64305, + ["betdageshhebrew"]=64305, + ["bethebrew"]=1489, + ["betrafehebrew"]=64332, + ["bhabengali"]=2477, + ["bhadeva"]=2349, + ["bhagujarati"]=2733, + ["bhagurmukhi"]=2605, + ["bhook"]=595, + ["bihiragana"]=12403, + ["bikatakana"]=12499, + ["bilabialclick"]=664, + ["bindigurmukhi"]=2562, + ["birusquare"]=13105, + ["blackcircle"]=9679, + ["blackdiamond"]=9670, + ["blackdownpointingtriangle"]=9660, + ["blackleftpointingpointer"]=9668, + ["blackleftpointingtriangle"]=9664, + ["blacklenticularbracketleft"]=12304, + ["blacklenticularbracketleftvertical"]=65083, + ["blacklenticularbracketright"]=12305, + ["blacklenticularbracketrightvertical"]=65084, + ["blacklowerlefttriangle"]=9699, + ["blacklowerrighttriangle"]=9698, + ["blackrectangle"]=9644, + ["blackrightpointingpointer"]=9658, + ["blackrightpointingtriangle"]=9654, + ["blacksmallsquare"]=9642, + ["blacksmilingface"]=9787, + ["blacksquare"]=9632, + ["blackstar"]=9733, + ["blackupperlefttriangle"]=9700, + ["blackupperrighttriangle"]=9701, + ["blackuppointingsmalltriangle"]=9652, + ["blackuppointingtriangle"]=9650, + ["blank"]=9251, + ["blinebelow"]=7687, + ["block"]=9608, + ["bmonospace"]=65346, + ["bobaimaithai"]=3610, + ["bohiragana"]=12412, + ["bokatakana"]=12508, + ["bparen"]=9373, + ["bqsquare"]=13251, + ["braceleft"]=123, + ["braceleftmonospace"]=65371, + ["braceleftsmall"]=65115, + ["braceleftvertical"]=65079, + ["braceright"]=125, + ["bracerightmonospace"]=65373, + ["bracerightsmall"]=65116, + ["bracerightvertical"]=65080, + ["bracketleft"]=91, + ["bracketleftmonospace"]=65339, + ["bracketright"]=93, + ["bracketrightmonospace"]=65341, + ["breve"]=728, + ["brevebelowcmb"]=814, + ["brevecmb"]=774, + ["breveinvertedbelowcmb"]=815, + ["breveinvertedcmb"]=785, + ["breveinverteddoublecmb"]=865, + ["bridgebelowcmb"]=810, + ["bridgeinvertedbelowcmb"]=826, + ["brokenbar"]=166, + ["bstroke"]=384, + ["btopbar"]=387, + ["buhiragana"]=12406, + ["bukatakana"]=12502, + ["bullet"]=8226, + ["bulletinverse"]=9688, + ["bulletoperator"]=8729, + ["bullseye"]=9678, + ["c"]=99, + ["caarmenian"]=1390, + ["cabengali"]=2458, + ["cacute"]=263, + ["cadeva"]=2330, + ["cagujarati"]=2714, + ["cagurmukhi"]=2586, + ["calsquare"]=13192, + ["candrabindubengali"]=2433, + ["candrabinducmb"]=784, + ["candrabindudeva"]=2305, + ["candrabindugujarati"]=2689, + ["capslock"]=8682, + ["careof"]=8453, + ["caron"]=711, + ["caronbelowcmb"]=812, + ["caroncmb"]=780, + ["carriagereturn"]=8629, + ["cbopomofo"]=12568, + ["ccaron"]=269, + ["ccedilla"]=231, + ["ccedillaacute"]=7689, + ["ccircle"]=9426, + ["ccircumflex"]=265, + ["ccurl"]=597, + ["cdot"]=267, + ["cdotaccent"]=267, + ["cdsquare"]=13253, + ["cedilla"]=184, + ["cedillacmb"]=807, + ["cent"]=162, + ["centigrade"]=8451, + ["centmonospace"]=65504, + ["chaarmenian"]=1401, + ["chabengali"]=2459, + ["chadeva"]=2331, + ["chagujarati"]=2715, + ["chagurmukhi"]=2587, + ["chbopomofo"]=12564, + ["cheabkhasiancyrillic"]=1213, + ["checkmark"]=10003, + ["checyrillic"]=1095, + ["chedescenderabkhasiancyrillic"]=1215, + ["chedescendercyrillic"]=1207, + ["chedieresiscyrillic"]=1269, + ["cheharmenian"]=1395, + ["chekhakassiancyrillic"]=1228, + ["cheverticalstrokecyrillic"]=1209, + ["chi"]=967, + ["chieuchacirclekorean"]=12919, + ["chieuchaparenkorean"]=12823, + ["chieuchcirclekorean"]=12905, + ["chieuchkorean"]=12618, + ["chieuchparenkorean"]=12809, + ["chochangthai"]=3594, + ["chochanthai"]=3592, + ["chochingthai"]=3593, + ["chochoethai"]=3596, + ["chook"]=392, + ["cieucacirclekorean"]=12918, + ["cieucaparenkorean"]=12822, + ["cieuccirclekorean"]=12904, + ["cieuckorean"]=12616, + ["cieucparenkorean"]=12808, + ["cieucuparenkorean"]=12828, + ["circle"]=9675, + ["circlemultiply"]=8855, + ["circleot"]=8857, + ["circleplus"]=8853, + ["circlepostalmark"]=12342, + ["circlewithlefthalfblack"]=9680, + ["circlewithrighthalfblack"]=9681, + ["circumflex"]=710, + ["circumflexbelowcmb"]=813, + ["circumflexcmb"]=770, + ["clear"]=8999, + ["clickalveolar"]=450, + ["clickdental"]=448, + ["clicklateral"]=449, + ["clickretroflex"]=451, + ["club"]=9827, + ["clubsuitblack"]=9827, + ["clubsuitwhite"]=9831, + ["cmcubedsquare"]=13220, + ["cmonospace"]=65347, + ["cmsquaredsquare"]=13216, + ["coarmenian"]=1409, + ["colon"]=58, + ["colonmonetary"]=8353, + ["colonmonospace"]=65306, + ["colonsign"]=8353, + ["colonsmall"]=65109, + ["colontriangularhalfmod"]=721, + ["colontriangularmod"]=720, + ["comma"]=44, + ["commaabovecmb"]=787, + ["commaaboverightcmb"]=789, + ["commaarabic"]=1548, + ["commaarmenian"]=1373, + ["commamonospace"]=65292, + ["commareversedabovecmb"]=788, + ["commareversedmod"]=701, + ["commasmall"]=65104, + ["commaturnedabovecmb"]=786, + ["commaturnedmod"]=699, + ["compass"]=9788, + ["congruent"]=8773, + ["contourintegral"]=8750, + ["control"]=8963, + ["controlACK"]=6, + ["controlBEL"]=7, + ["controlBS"]=8, + ["controlCAN"]=24, + ["controlCR"]=13, + ["controlDC1"]=17, + ["controlDC2"]=18, + ["controlDC3"]=19, + ["controlDC4"]=20, + ["controlDEL"]=127, + ["controlDLE"]=16, + ["controlEM"]=25, + ["controlENQ"]=5, + ["controlEOT"]=4, + ["controlESC"]=27, + ["controlETB"]=23, + ["controlETX"]=3, + ["controlFF"]=12, + ["controlFS"]=28, + ["controlGS"]=29, + ["controlHT"]=9, + ["controlLF"]=10, + ["controlNAK"]=21, + ["controlRS"]=30, + ["controlSI"]=15, + ["controlSO"]=14, + ["controlSOT"]=2, + ["controlSTX"]=1, + ["controlSUB"]=26, + ["controlSYN"]=22, + ["controlUS"]=31, + ["controlVT"]=11, + ["copyright"]=169, + ["cornerbracketleft"]=12300, + ["cornerbracketlefthalfwidth"]=65378, + ["cornerbracketleftvertical"]=65089, + ["cornerbracketright"]=12301, + ["cornerbracketrighthalfwidth"]=65379, + ["cornerbracketrightvertical"]=65090, + ["corporationsquare"]=13183, + ["cosquare"]=13255, + ["coverkgsquare"]=13254, + ["cparen"]=9374, + ["cruzeiro"]=8354, + ["cstretched"]=663, + ["curlyand"]=8911, + ["curlyor"]=8910, + ["currency"]=164, + ["d"]=100, + ["daarmenian"]=1380, + ["dabengali"]=2470, + ["dadarabic"]=1590, + ["dadeva"]=2342, + ["dadfinalarabic"]=65214, + ["dadinitialarabic"]=65215, + ["dadmedialarabic"]=65216, + ["dagesh"]=1468, + ["dageshhebrew"]=1468, + ["dagger"]=8224, + ["daggerdbl"]=8225, + ["dagujarati"]=2726, + ["dagurmukhi"]=2598, + ["dahiragana"]=12384, + ["dakatakana"]=12480, + ["dalarabic"]=1583, + ["dalet"]=1491, + ["daletdagesh"]=64307, + ["daletdageshhebrew"]=64307, + ["dalethatafpatah"]=1491, + ["dalethatafpatahhebrew"]=1491, + ["dalethatafsegol"]=1491, + ["dalethatafsegolhebrew"]=1491, + ["dalethebrew"]=1491, + ["dalethiriq"]=1491, + ["dalethiriqhebrew"]=1491, + ["daletholam"]=1491, + ["daletholamhebrew"]=1491, + ["daletpatah"]=1491, + ["daletpatahhebrew"]=1491, + ["daletqamats"]=1491, + ["daletqamatshebrew"]=1491, + ["daletqubuts"]=1491, + ["daletqubutshebrew"]=1491, + ["daletsegol"]=1491, + ["daletsegolhebrew"]=1491, + ["daletsheva"]=1491, + ["daletshevahebrew"]=1491, + ["dalettsere"]=1491, + ["dalettserehebrew"]=1491, + ["dalfinalarabic"]=65194, + ["dammaarabic"]=1615, + ["dammalowarabic"]=1615, + ["dammatanaltonearabic"]=1612, + ["dammatanarabic"]=1612, + ["danda"]=2404, + ["dargahebrew"]=1447, + ["dargalefthebrew"]=1447, + ["dasiapneumatacyrilliccmb"]=1157, + ["dblanglebracketleft"]=12298, + ["dblanglebracketleftvertical"]=65085, + ["dblanglebracketright"]=12299, + ["dblanglebracketrightvertical"]=65086, + ["dblarchinvertedbelowcmb"]=811, + ["dblarrowleft"]=8660, + ["dblarrowright"]=8658, + ["dbldanda"]=2405, + ["dblgravecmb"]=783, + ["dblintegral"]=8748, + ["dbllowline"]=8215, + ["dbllowlinecmb"]=819, + ["dbloverlinecmb"]=831, + ["dblprimemod"]=698, + ["dblverticalbar"]=8214, + ["dblverticallineabovecmb"]=782, + ["dbopomofo"]=12553, + ["dbsquare"]=13256, + ["dcaron"]=271, + ["dcedilla"]=7697, + ["dcircle"]=9427, + ["dcircumflexbelow"]=7699, + ["dcroat"]=273, + ["ddabengali"]=2465, + ["ddadeva"]=2337, + ["ddagujarati"]=2721, + ["ddagurmukhi"]=2593, + ["ddalarabic"]=1672, + ["ddalfinalarabic"]=64393, + ["dddhadeva"]=2396, + ["ddhabengali"]=2466, + ["ddhadeva"]=2338, + ["ddhagujarati"]=2722, + ["ddhagurmukhi"]=2594, + ["ddotaccent"]=7691, + ["ddotbelow"]=7693, + ["decimalseparatorarabic"]=1643, + ["decimalseparatorpersian"]=1643, + ["decyrillic"]=1076, + ["degree"]=176, + ["dehihebrew"]=1453, + ["dehiragana"]=12391, + ["deicoptic"]=1007, + ["dekatakana"]=12487, + ["deleteleft"]=9003, + ["deleteright"]=8998, + ["delta"]=948, + ["deltaturned"]=397, + ["denominatorminusonenumeratorbengali"]=2552, + ["dezh"]=676, + ["dhabengali"]=2471, + ["dhadeva"]=2343, + ["dhagujarati"]=2727, + ["dhagurmukhi"]=2599, + ["dhook"]=599, + ["dialytikatonos"]=901, + ["dialytikatonoscmb"]=836, + ["diamond"]=9830, + ["diamondsuitwhite"]=9826, + ["dieresis"]=168, + ["dieresisbelowcmb"]=804, + ["dieresiscmb"]=776, + ["dieresistonos"]=901, + ["dihiragana"]=12386, + ["dikatakana"]=12482, + ["dittomark"]=12291, + ["divide"]=247, + ["divides"]=8739, + ["divisionslash"]=8725, + ["djecyrillic"]=1106, + ["dkshade"]=9619, + ["dlinebelow"]=7695, + ["dlsquare"]=13207, + ["dmacron"]=273, + ["dmonospace"]=65348, + ["dnblock"]=9604, + ["dochadathai"]=3598, + ["dodekthai"]=3604, + ["dohiragana"]=12393, + ["dokatakana"]=12489, + ["dollar"]=36, + ["dollarmonospace"]=65284, + ["dollarsmall"]=65129, + ["dong"]=8363, + ["dorusquare"]=13094, + ["dotaccent"]=729, + ["dotaccentcmb"]=775, + ["dotbelowcmb"]=803, + ["dotbelowcomb"]=803, + ["dotkatakana"]=12539, + ["dotlessi"]=305, + ["dotlessjstrokehook"]=644, + ["dotmath"]=8901, + ["dottedcircle"]=9676, + ["doubleyodpatah"]=64287, + ["doubleyodpatahhebrew"]=64287, + ["downtackbelowcmb"]=798, + ["downtackmod"]=725, + ["dparen"]=9375, + ["dtail"]=598, + ["dtopbar"]=396, + ["duhiragana"]=12389, + ["dukatakana"]=12485, + ["dz"]=499, + ["dzaltone"]=675, + ["dzcaron"]=454, + ["dzcurl"]=677, + ["dzeabkhasiancyrillic"]=1249, + ["dzecyrillic"]=1109, + ["dzhecyrillic"]=1119, + ["e"]=101, + ["eacute"]=233, + ["earth"]=9793, + ["ebengali"]=2447, + ["ebopomofo"]=12572, + ["ebreve"]=277, + ["ecandradeva"]=2317, + ["ecandragujarati"]=2701, + ["ecandravowelsigndeva"]=2373, + ["ecandravowelsigngujarati"]=2757, + ["ecaron"]=283, + ["ecedillabreve"]=7709, + ["echarmenian"]=1381, + ["echyiwnarmenian"]=1415, + ["ecircle"]=9428, + ["ecircumflex"]=234, + ["ecircumflexacute"]=7871, + ["ecircumflexbelow"]=7705, + ["ecircumflexdotbelow"]=7879, + ["ecircumflexgrave"]=7873, + ["ecircumflexhookabove"]=7875, + ["ecircumflextilde"]=7877, + ["ecyrillic"]=1108, + ["edblgrave"]=517, + ["edeva"]=2319, + ["edieresis"]=235, + ["edot"]=279, + ["edotaccent"]=279, + ["edotbelow"]=7865, + ["eegurmukhi"]=2575, + ["eematragurmukhi"]=2631, + ["efcyrillic"]=1092, + ["egrave"]=232, + ["egujarati"]=2703, + ["eharmenian"]=1383, + ["ehbopomofo"]=12573, + ["ehiragana"]=12360, + ["ehookabove"]=7867, + ["eibopomofo"]=12575, + ["eight"]=56, + ["eightarabic"]=1640, + ["eightbengali"]=2542, + ["eightcircle"]=9319, + ["eightcircleinversesansserif"]=10129, + ["eightdeva"]=2414, + ["eighteencircle"]=9329, + ["eighteenparen"]=9349, + ["eighteenperiod"]=9369, + ["eightgujarati"]=2798, + ["eightgurmukhi"]=2670, + ["eighthackarabic"]=1640, + ["eighthangzhou"]=12328, + ["eighthnotebeamed"]=9835, + ["eightideographicparen"]=12839, + ["eightinferior"]=8328, + ["eightmonospace"]=65304, + ["eightparen"]=9339, + ["eightperiod"]=9359, + ["eightpersian"]=1784, + ["eightroman"]=8567, + ["eightsuperior"]=8312, + ["eightthai"]=3672, + ["einvertedbreve"]=519, + ["eiotifiedcyrillic"]=1125, + ["ekatakana"]=12456, + ["ekatakanahalfwidth"]=65396, + ["ekonkargurmukhi"]=2676, + ["ekorean"]=12628, + ["elcyrillic"]=1083, + ["element"]=8712, + ["elevencircle"]=9322, + ["elevenparen"]=9342, + ["elevenperiod"]=9362, + ["elevenroman"]=8570, + ["ellipsis"]=8230, + ["ellipsisvertical"]=8942, + ["emacron"]=275, + ["emacronacute"]=7703, + ["emacrongrave"]=7701, + ["emcyrillic"]=1084, + ["emdash"]=8212, + ["emdashvertical"]=65073, + ["emonospace"]=65349, + ["emphasismarkarmenian"]=1371, + ["emptyset"]=8709, + ["enbopomofo"]=12579, + ["encyrillic"]=1085, + ["endash"]=8211, + ["endashvertical"]=65074, + ["endescendercyrillic"]=1187, + ["eng"]=331, + ["engbopomofo"]=12581, + ["enghecyrillic"]=1189, + ["enhookcyrillic"]=1224, + ["enspace"]=8194, + ["eogonek"]=281, + ["eokorean"]=12627, + ["eopen"]=603, + ["eopenclosed"]=666, + ["eopenreversed"]=604, + ["eopenreversedclosed"]=606, + ["eopenreversedhook"]=605, + ["eparen"]=9376, + ["epsilon"]=949, + ["epsilontonos"]=941, + ["equal"]=61, + ["equalmonospace"]=65309, + ["equalsmall"]=65126, + ["equalsuperior"]=8316, + ["equivalence"]=8801, + ["erbopomofo"]=12582, + ["ercyrillic"]=1088, + ["ereversed"]=600, + ["ereversedcyrillic"]=1101, + ["escyrillic"]=1089, + ["esdescendercyrillic"]=1195, + ["esh"]=643, + ["eshcurl"]=646, + ["eshortdeva"]=2318, + ["eshortvowelsigndeva"]=2374, + ["eshreversedloop"]=426, + ["eshsquatreversed"]=645, + ["esmallhiragana"]=12359, + ["esmallkatakana"]=12455, + ["esmallkatakanahalfwidth"]=65386, + ["estimated"]=8494, + ["eta"]=951, + ["etarmenian"]=1384, + ["etatonos"]=942, + ["eth"]=240, + ["etilde"]=7869, + ["etildebelow"]=7707, + ["etnahtafoukhhebrew"]=1425, + ["etnahtafoukhlefthebrew"]=1425, + ["etnahtahebrew"]=1425, + ["etnahtalefthebrew"]=1425, + ["eturned"]=477, + ["eukorean"]=12641, + ["euro"]=8364, + ["evowelsignbengali"]=2503, + ["evowelsigndeva"]=2375, + ["evowelsigngujarati"]=2759, + ["exclam"]=33, + ["exclamarmenian"]=1372, + ["exclamdbl"]=8252, + ["exclamdown"]=161, + ["exclammonospace"]=65281, + ["existential"]=8707, + ["ezh"]=658, + ["ezhcaron"]=495, + ["ezhcurl"]=659, + ["ezhreversed"]=441, + ["ezhtail"]=442, + ["f"]=102, + ["fadeva"]=2398, + ["fagurmukhi"]=2654, + ["fahrenheit"]=8457, + ["fathaarabic"]=1614, + ["fathalowarabic"]=1614, + ["fathatanarabic"]=1611, + ["fbopomofo"]=12552, + ["fcircle"]=9429, + ["fdotaccent"]=7711, + ["feharabic"]=1601, + ["feharmenian"]=1414, + ["fehfinalarabic"]=65234, + ["fehinitialarabic"]=65235, + ["fehmedialarabic"]=65236, + ["feicoptic"]=997, + ["female"]=9792, + ["ff"]=64256, + ["ffi"]=64259, + ["ffl"]=64260, + ["fi"]=64257, + ["fifteencircle"]=9326, + ["fifteenparen"]=9346, + ["fifteenperiod"]=9366, + ["figuredash"]=8210, + ["filledbox"]=9632, + ["filledrect"]=9644, + ["finalkaf"]=1498, + ["finalkafdagesh"]=64314, + ["finalkafdageshhebrew"]=64314, + ["finalkafhebrew"]=1498, + ["finalkafqamats"]=1498, + ["finalkafqamatshebrew"]=1498, + ["finalkafsheva"]=1498, + ["finalkafshevahebrew"]=1498, + ["finalmem"]=1501, + ["finalmemhebrew"]=1501, + ["finalnun"]=1503, + ["finalnunhebrew"]=1503, + ["finalpe"]=1507, + ["finalpehebrew"]=1507, + ["finaltsadi"]=1509, + ["finaltsadihebrew"]=1509, + ["firsttonechinese"]=713, + ["fisheye"]=9673, + ["fitacyrillic"]=1139, + ["five"]=53, + ["fivearabic"]=1637, + ["fivebengali"]=2539, + ["fivecircle"]=9316, + ["fivecircleinversesansserif"]=10126, + ["fivedeva"]=2411, + ["fiveeighths"]=8541, + ["fivegujarati"]=2795, + ["fivegurmukhi"]=2667, + ["fivehackarabic"]=1637, + ["fivehangzhou"]=12325, + ["fiveideographicparen"]=12836, + ["fiveinferior"]=8325, + ["fivemonospace"]=65301, + ["fiveparen"]=9336, + ["fiveperiod"]=9356, + ["fivepersian"]=1781, + ["fiveroman"]=8564, + ["fivesuperior"]=8309, + ["fivethai"]=3669, + ["fl"]=64258, + ["florin"]=402, + ["fmonospace"]=65350, + ["fmsquare"]=13209, + ["fofanthai"]=3615, + ["fofathai"]=3613, + ["fongmanthai"]=3663, + ["forall"]=8704, + ["four"]=52, + ["fourarabic"]=1636, + ["fourbengali"]=2538, + ["fourcircle"]=9315, + ["fourcircleinversesansserif"]=10125, + ["fourdeva"]=2410, + ["fourgujarati"]=2794, + ["fourgurmukhi"]=2666, + ["fourhackarabic"]=1636, + ["fourhangzhou"]=12324, + ["fourideographicparen"]=12835, + ["fourinferior"]=8324, + ["fourmonospace"]=65300, + ["fournumeratorbengali"]=2551, + ["fourparen"]=9335, + ["fourperiod"]=9355, + ["fourpersian"]=1780, + ["fourroman"]=8563, + ["foursuperior"]=8308, + ["fourteencircle"]=9325, + ["fourteenparen"]=9345, + ["fourteenperiod"]=9365, + ["fourthai"]=3668, + ["fourthtonechinese"]=715, + ["fparen"]=9377, + ["fraction"]=8260, + ["franc"]=8355, + ["g"]=103, + ["gabengali"]=2455, + ["gacute"]=501, + ["gadeva"]=2327, + ["gafarabic"]=1711, + ["gaffinalarabic"]=64403, + ["gafinitialarabic"]=64404, + ["gafmedialarabic"]=64405, + ["gagujarati"]=2711, + ["gagurmukhi"]=2583, + ["gahiragana"]=12364, + ["gakatakana"]=12460, + ["gamma"]=947, + ["gammalatinsmall"]=611, + ["gammasuperior"]=736, + ["gangiacoptic"]=1003, + ["gbopomofo"]=12557, + ["gbreve"]=287, + ["gcaron"]=487, + ["gcedilla"]=291, + ["gcircle"]=9430, + ["gcircumflex"]=285, + ["gcommaaccent"]=291, + ["gdot"]=289, + ["gdotaccent"]=289, + ["gecyrillic"]=1075, + ["gehiragana"]=12370, + ["gekatakana"]=12466, + ["geometricallyequal"]=8785, + ["gereshaccenthebrew"]=1436, + ["gereshhebrew"]=1523, + ["gereshmuqdamhebrew"]=1437, + ["germandbls"]=223, + ["gershayimaccenthebrew"]=1438, + ["gershayimhebrew"]=1524, + ["getamark"]=12307, + ["ghabengali"]=2456, + ["ghadarmenian"]=1394, + ["ghadeva"]=2328, + ["ghagujarati"]=2712, + ["ghagurmukhi"]=2584, + ["ghainarabic"]=1594, + ["ghainfinalarabic"]=65230, + ["ghaininitialarabic"]=65231, + ["ghainmedialarabic"]=65232, + ["ghemiddlehookcyrillic"]=1173, + ["ghestrokecyrillic"]=1171, + ["gheupturncyrillic"]=1169, + ["ghhadeva"]=2394, + ["ghhagurmukhi"]=2650, + ["ghook"]=608, + ["ghzsquare"]=13203, + ["gihiragana"]=12366, + ["gikatakana"]=12462, + ["gimarmenian"]=1379, + ["gimel"]=1490, + ["gimeldagesh"]=64306, + ["gimeldageshhebrew"]=64306, + ["gimelhebrew"]=1490, + ["gjecyrillic"]=1107, + ["glottalinvertedstroke"]=446, + ["glottalstop"]=660, + ["glottalstopinverted"]=662, + ["glottalstopmod"]=704, + ["glottalstopreversed"]=661, + ["glottalstopreversedmod"]=705, + ["glottalstopreversedsuperior"]=740, + ["glottalstopstroke"]=673, + ["glottalstopstrokereversed"]=674, + ["gmacron"]=7713, + ["gmonospace"]=65351, + ["gohiragana"]=12372, + ["gokatakana"]=12468, + ["gparen"]=9378, + ["gpasquare"]=13228, + ["gradient"]=8711, + ["grave"]=96, + ["gravebelowcmb"]=790, + ["gravecmb"]=768, + ["gravecomb"]=768, + ["gravedeva"]=2387, + ["gravelowmod"]=718, + ["gravemonospace"]=65344, + ["gravetonecmb"]=832, + ["greater"]=62, + ["greaterequal"]=8805, + ["greaterequalorless"]=8923, + ["greatermonospace"]=65310, + ["greaterorequivalent"]=8819, + ["greaterorless"]=8823, + ["greateroverequal"]=8807, + ["greatersmall"]=65125, + ["gscript"]=609, + ["gstroke"]=485, + ["guhiragana"]=12368, + ["guillemotleft"]=171, + ["guillemotright"]=187, + ["guilsinglleft"]=8249, + ["guilsinglright"]=8250, + ["gukatakana"]=12464, + ["guramusquare"]=13080, + ["gysquare"]=13257, + ["h"]=104, + ["haabkhasiancyrillic"]=1193, + ["haaltonearabic"]=1729, + ["habengali"]=2489, + ["hadescendercyrillic"]=1203, + ["hadeva"]=2361, + ["hagujarati"]=2745, + ["hagurmukhi"]=2617, + ["haharabic"]=1581, + ["hahfinalarabic"]=65186, + ["hahinitialarabic"]=65187, + ["hahiragana"]=12399, + ["hahmedialarabic"]=65188, + ["haitusquare"]=13098, + ["hakatakana"]=12495, + ["hakatakanahalfwidth"]=65418, + ["halantgurmukhi"]=2637, + ["hamzaarabic"]=1569, + ["hamzadammaarabic"]=1569, + ["hamzadammatanarabic"]=1569, + ["hamzafathaarabic"]=1569, + ["hamzafathatanarabic"]=1569, + ["hamzalowarabic"]=1569, + ["hamzalowkasraarabic"]=1569, + ["hamzalowkasratanarabic"]=1569, + ["hamzasukunarabic"]=1569, + ["hangulfiller"]=12644, + ["hardsigncyrillic"]=1098, + ["harpoonleftbarbup"]=8636, + ["harpoonrightbarbup"]=8640, + ["hasquare"]=13258, + ["hatafpatah"]=1458, + ["hatafpatah16"]=1458, + ["hatafpatah23"]=1458, + ["hatafpatah2f"]=1458, + ["hatafpatahhebrew"]=1458, + ["hatafpatahnarrowhebrew"]=1458, + ["hatafpatahquarterhebrew"]=1458, + ["hatafpatahwidehebrew"]=1458, + ["hatafqamats"]=1459, + ["hatafqamats1b"]=1459, + ["hatafqamats28"]=1459, + ["hatafqamats34"]=1459, + ["hatafqamatshebrew"]=1459, + ["hatafqamatsnarrowhebrew"]=1459, + ["hatafqamatsquarterhebrew"]=1459, + ["hatafqamatswidehebrew"]=1459, + ["hatafsegol"]=1457, + ["hatafsegol17"]=1457, + ["hatafsegol24"]=1457, + ["hatafsegol30"]=1457, + ["hatafsegolhebrew"]=1457, + ["hatafsegolnarrowhebrew"]=1457, + ["hatafsegolquarterhebrew"]=1457, + ["hatafsegolwidehebrew"]=1457, + ["hbar"]=295, + ["hbopomofo"]=12559, + ["hbrevebelow"]=7723, + ["hcedilla"]=7721, + ["hcircle"]=9431, + ["hcircumflex"]=293, + ["hdieresis"]=7719, + ["hdotaccent"]=7715, + ["hdotbelow"]=7717, + ["he"]=1492, + ["heart"]=9829, + ["heartsuitblack"]=9829, + ["heartsuitwhite"]=9825, + ["hedagesh"]=64308, + ["hedageshhebrew"]=64308, + ["hehaltonearabic"]=1729, + ["heharabic"]=1607, + ["hehebrew"]=1492, + ["hehfinalaltonearabic"]=64423, + ["hehfinalalttwoarabic"]=65258, + ["hehfinalarabic"]=65258, + ["hehhamzaabovefinalarabic"]=64421, + ["hehhamzaaboveisolatedarabic"]=64420, + ["hehinitialaltonearabic"]=64424, + ["hehinitialarabic"]=65259, + ["hehiragana"]=12408, + ["hehmedialaltonearabic"]=64425, + ["hehmedialarabic"]=65260, + ["heiseierasquare"]=13179, + ["hekatakana"]=12504, + ["hekatakanahalfwidth"]=65421, + ["hekutaarusquare"]=13110, + ["henghook"]=615, + ["herutusquare"]=13113, + ["het"]=1495, + ["hethebrew"]=1495, + ["hhook"]=614, + ["hhooksuperior"]=689, + ["hieuhacirclekorean"]=12923, + ["hieuhaparenkorean"]=12827, + ["hieuhcirclekorean"]=12909, + ["hieuhkorean"]=12622, + ["hieuhparenkorean"]=12813, + ["hihiragana"]=12402, + ["hikatakana"]=12498, + ["hikatakanahalfwidth"]=65419, + ["hiriq"]=1460, + ["hiriq14"]=1460, + ["hiriq21"]=1460, + ["hiriq2d"]=1460, + ["hiriqhebrew"]=1460, + ["hiriqnarrowhebrew"]=1460, + ["hiriqquarterhebrew"]=1460, + ["hiriqwidehebrew"]=1460, + ["hlinebelow"]=7830, + ["hmonospace"]=65352, + ["hoarmenian"]=1392, + ["hohipthai"]=3627, + ["hohiragana"]=12411, + ["hokatakana"]=12507, + ["hokatakanahalfwidth"]=65422, + ["holam"]=1465, + ["holam19"]=1465, + ["holam26"]=1465, + ["holam32"]=1465, + ["holamhebrew"]=1465, + ["holamnarrowhebrew"]=1465, + ["holamquarterhebrew"]=1465, + ["holamwidehebrew"]=1465, + ["honokhukthai"]=3630, + ["hookabovecomb"]=777, + ["hookcmb"]=777, + ["hookpalatalizedbelowcmb"]=801, + ["hookretroflexbelowcmb"]=802, + ["hoonsquare"]=13122, + ["horicoptic"]=1001, + ["horizontalbar"]=8213, + ["horncmb"]=795, + ["hotsprings"]=9832, + ["house"]=8962, + ["hparen"]=9379, + ["hsuperior"]=688, + ["hturned"]=613, + ["huhiragana"]=12405, + ["huiitosquare"]=13107, + ["hukatakana"]=12501, + ["hukatakanahalfwidth"]=65420, + ["hungarumlaut"]=733, + ["hungarumlautcmb"]=779, + ["hv"]=405, + ["hyphen"]=45, + ["hyphenmonospace"]=65293, + ["hyphensmall"]=65123, + ["hyphentwo"]=8208, + ["i"]=105, + ["iacute"]=237, + ["iacyrillic"]=1103, + ["ibengali"]=2439, + ["ibopomofo"]=12583, + ["ibreve"]=301, + ["icaron"]=464, + ["icircle"]=9432, + ["icircumflex"]=238, + ["icyrillic"]=1110, + ["idblgrave"]=521, + ["ideographearthcircle"]=12943, + ["ideographfirecircle"]=12939, + ["ideographicallianceparen"]=12863, + ["ideographiccallparen"]=12858, + ["ideographiccentrecircle"]=12965, + ["ideographicclose"]=12294, + ["ideographiccomma"]=12289, + ["ideographiccommaleft"]=65380, + ["ideographiccongratulationparen"]=12855, + ["ideographiccorrectcircle"]=12963, + ["ideographicearthparen"]=12847, + ["ideographicenterpriseparen"]=12861, + ["ideographicexcellentcircle"]=12957, + ["ideographicfestivalparen"]=12864, + ["ideographicfinancialcircle"]=12950, + ["ideographicfinancialparen"]=12854, + ["ideographicfireparen"]=12843, + ["ideographichaveparen"]=12850, + ["ideographichighcircle"]=12964, + ["ideographiciterationmark"]=12293, + ["ideographiclaborcircle"]=12952, + ["ideographiclaborparen"]=12856, + ["ideographicleftcircle"]=12967, + ["ideographiclowcircle"]=12966, + ["ideographicmedicinecircle"]=12969, + ["ideographicmetalparen"]=12846, + ["ideographicmoonparen"]=12842, + ["ideographicnameparen"]=12852, + ["ideographicperiod"]=12290, + ["ideographicprintcircle"]=12958, + ["ideographicreachparen"]=12867, + ["ideographicrepresentparen"]=12857, + ["ideographicresourceparen"]=12862, + ["ideographicrightcircle"]=12968, + ["ideographicsecretcircle"]=12953, + ["ideographicselfparen"]=12866, + ["ideographicsocietyparen"]=12851, + ["ideographicspace"]=12288, + ["ideographicspecialparen"]=12853, + ["ideographicstockparen"]=12849, + ["ideographicstudyparen"]=12859, + ["ideographicsunparen"]=12848, + ["ideographicsuperviseparen"]=12860, + ["ideographicwaterparen"]=12844, + ["ideographicwoodparen"]=12845, + ["ideographiczero"]=12295, + ["ideographmetalcircle"]=12942, + ["ideographmooncircle"]=12938, + ["ideographnamecircle"]=12948, + ["ideographsuncircle"]=12944, + ["ideographwatercircle"]=12940, + ["ideographwoodcircle"]=12941, + ["ideva"]=2311, + ["idieresis"]=239, + ["idieresisacute"]=7727, + ["idieresiscyrillic"]=1253, + ["idotbelow"]=7883, + ["iebrevecyrillic"]=1239, + ["iecyrillic"]=1077, + ["ieungacirclekorean"]=12917, + ["ieungaparenkorean"]=12821, + ["ieungcirclekorean"]=12903, + ["ieungkorean"]=12615, + ["ieungparenkorean"]=12807, + ["igrave"]=236, + ["igujarati"]=2695, + ["igurmukhi"]=2567, + ["ihiragana"]=12356, + ["ihookabove"]=7881, + ["iibengali"]=2440, + ["iicyrillic"]=1080, + ["iideva"]=2312, + ["iigujarati"]=2696, + ["iigurmukhi"]=2568, + ["iimatragurmukhi"]=2624, + ["iinvertedbreve"]=523, + ["iishortcyrillic"]=1081, + ["iivowelsignbengali"]=2496, + ["iivowelsigndeva"]=2368, + ["iivowelsigngujarati"]=2752, + ["ij"]=307, + ["ikatakana"]=12452, + ["ikatakanahalfwidth"]=65394, + ["ikorean"]=12643, + ["ilde"]=732, + ["iluyhebrew"]=1452, + ["imacron"]=299, + ["imacroncyrillic"]=1251, + ["imageorapproximatelyequal"]=8787, + ["imatragurmukhi"]=2623, + ["imonospace"]=65353, + ["increment"]=8710, + ["infinity"]=8734, + ["iniarmenian"]=1387, + ["integral"]=8747, + ["integralbottom"]=8993, + ["integralbt"]=8993, + ["integraltop"]=8992, + ["integraltp"]=8992, + ["intersection"]=8745, + ["intisquare"]=13061, + ["invbullet"]=9688, + ["invcircle"]=9689, + ["invsmileface"]=9787, + ["iocyrillic"]=1105, + ["iogonek"]=303, + ["iota"]=953, + ["iotadieresis"]=970, + ["iotadieresistonos"]=912, + ["iotalatin"]=617, + ["iotatonos"]=943, + ["iparen"]=9380, + ["irigurmukhi"]=2674, + ["ismallhiragana"]=12355, + ["ismallkatakana"]=12451, + ["ismallkatakanahalfwidth"]=65384, + ["issharbengali"]=2554, + ["istroke"]=616, + ["iterationhiragana"]=12445, + ["iterationkatakana"]=12541, + ["itilde"]=297, + ["itildebelow"]=7725, + ["iubopomofo"]=12585, + ["iucyrillic"]=1102, + ["ivowelsignbengali"]=2495, + ["ivowelsigndeva"]=2367, + ["ivowelsigngujarati"]=2751, + ["izhitsacyrillic"]=1141, + ["izhitsadblgravecyrillic"]=1143, + ["j"]=106, + ["jaarmenian"]=1393, + ["jabengali"]=2460, + ["jadeva"]=2332, + ["jagujarati"]=2716, + ["jagurmukhi"]=2588, + ["jbopomofo"]=12560, + ["jcaron"]=496, + ["jcircle"]=9433, + ["jcircumflex"]=309, + ["jcrossedtail"]=669, + ["jdotlessstroke"]=607, + ["jecyrillic"]=1112, + ["jeemarabic"]=1580, + ["jeemfinalarabic"]=65182, + ["jeeminitialarabic"]=65183, + ["jeemmedialarabic"]=65184, + ["jeharabic"]=1688, + ["jehfinalarabic"]=64395, + ["jhabengali"]=2461, + ["jhadeva"]=2333, + ["jhagujarati"]=2717, + ["jhagurmukhi"]=2589, + ["jheharmenian"]=1403, + ["jis"]=12292, + ["jmonospace"]=65354, + ["jparen"]=9381, + ["jsuperior"]=690, + ["k"]=107, + ["kabashkircyrillic"]=1185, + ["kabengali"]=2453, + ["kacute"]=7729, + ["kacyrillic"]=1082, + ["kadescendercyrillic"]=1179, + ["kadeva"]=2325, + ["kaf"]=1499, + ["kafarabic"]=1603, + ["kafdagesh"]=64315, + ["kafdageshhebrew"]=64315, + ["kaffinalarabic"]=65242, + ["kafhebrew"]=1499, + ["kafinitialarabic"]=65243, + ["kafmedialarabic"]=65244, + ["kafrafehebrew"]=64333, + ["kagujarati"]=2709, + ["kagurmukhi"]=2581, + ["kahiragana"]=12363, + ["kahookcyrillic"]=1220, + ["kakatakana"]=12459, + ["kakatakanahalfwidth"]=65398, + ["kappa"]=954, + ["kappasymbolgreek"]=1008, + ["kapyeounmieumkorean"]=12657, + ["kapyeounphieuphkorean"]=12676, + ["kapyeounpieupkorean"]=12664, + ["kapyeounssangpieupkorean"]=12665, + ["karoriisquare"]=13069, + ["kashidaautoarabic"]=1600, + ["kashidaautonosidebearingarabic"]=1600, + ["kasmallkatakana"]=12533, + ["kasquare"]=13188, + ["kasraarabic"]=1616, + ["kasratanarabic"]=1613, + ["kastrokecyrillic"]=1183, + ["katahiraprolongmarkhalfwidth"]=65392, + ["kaverticalstrokecyrillic"]=1181, + ["kbopomofo"]=12558, + ["kcalsquare"]=13193, + ["kcaron"]=489, + ["kcedilla"]=311, + ["kcircle"]=9434, + ["kcommaaccent"]=311, + ["kdotbelow"]=7731, + ["keharmenian"]=1412, + ["kehiragana"]=12369, + ["kekatakana"]=12465, + ["kekatakanahalfwidth"]=65401, + ["kenarmenian"]=1391, + ["kesmallkatakana"]=12534, + ["kgreenlandic"]=312, + ["khabengali"]=2454, + ["khacyrillic"]=1093, + ["khadeva"]=2326, + ["khagujarati"]=2710, + ["khagurmukhi"]=2582, + ["khaharabic"]=1582, + ["khahfinalarabic"]=65190, + ["khahinitialarabic"]=65191, + ["khahmedialarabic"]=65192, + ["kheicoptic"]=999, + ["khhadeva"]=2393, + ["khhagurmukhi"]=2649, + ["khieukhacirclekorean"]=12920, + ["khieukhaparenkorean"]=12824, + ["khieukhcirclekorean"]=12906, + ["khieukhkorean"]=12619, + ["khieukhparenkorean"]=12810, + ["khokhaithai"]=3586, + ["khokhonthai"]=3589, + ["khokhuatthai"]=3587, + ["khokhwaithai"]=3588, + ["khomutthai"]=3675, + ["khook"]=409, + ["khorakhangthai"]=3590, + ["khzsquare"]=13201, + ["kihiragana"]=12365, + ["kikatakana"]=12461, + ["kikatakanahalfwidth"]=65399, + ["kiroguramusquare"]=13077, + ["kiromeetorusquare"]=13078, + ["kirosquare"]=13076, + ["kiyeokacirclekorean"]=12910, + ["kiyeokaparenkorean"]=12814, + ["kiyeokcirclekorean"]=12896, + ["kiyeokkorean"]=12593, + ["kiyeokparenkorean"]=12800, + ["kiyeoksioskorean"]=12595, + ["kjecyrillic"]=1116, + ["klinebelow"]=7733, + ["klsquare"]=13208, + ["kmcubedsquare"]=13222, + ["kmonospace"]=65355, + ["kmsquaredsquare"]=13218, + ["kohiragana"]=12371, + ["kohmsquare"]=13248, + ["kokaithai"]=3585, + ["kokatakana"]=12467, + ["kokatakanahalfwidth"]=65402, + ["kooposquare"]=13086, + ["koppacyrillic"]=1153, + ["koreanstandardsymbol"]=12927, + ["koroniscmb"]=835, + ["kparen"]=9382, + ["kpasquare"]=13226, + ["ksicyrillic"]=1135, + ["ktsquare"]=13263, + ["kturned"]=670, + ["kuhiragana"]=12367, + ["kukatakana"]=12463, + ["kukatakanahalfwidth"]=65400, + ["kvsquare"]=13240, + ["kwsquare"]=13246, + ["l"]=108, + ["labengali"]=2482, + ["lacute"]=314, + ["ladeva"]=2354, + ["lagujarati"]=2738, + ["lagurmukhi"]=2610, + ["lakkhangyaothai"]=3653, + ["lamaleffinalarabic"]=65276, + ["lamalefhamzaabovefinalarabic"]=65272, + ["lamalefhamzaaboveisolatedarabic"]=65271, + ["lamalefhamzabelowfinalarabic"]=65274, + ["lamalefhamzabelowisolatedarabic"]=65273, + ["lamalefisolatedarabic"]=65275, + ["lamalefmaddaabovefinalarabic"]=65270, + ["lamalefmaddaaboveisolatedarabic"]=65269, + ["lamarabic"]=1604, + ["lambda"]=955, + ["lambdastroke"]=411, + ["lamed"]=1500, + ["lameddagesh"]=64316, + ["lameddageshhebrew"]=64316, + ["lamedhebrew"]=1500, + ["lamedholam"]=1500, + ["lamedholamdagesh"]=1500, + ["lamedholamdageshhebrew"]=1500, + ["lamedholamhebrew"]=1500, + ["lamfinalarabic"]=65246, + ["lamhahinitialarabic"]=64714, + ["laminitialarabic"]=65247, + ["lamjeeminitialarabic"]=64713, + ["lamkhahinitialarabic"]=64715, + ["lamlamhehisolatedarabic"]=65010, + ["lammedialarabic"]=65248, + ["lammeemhahinitialarabic"]=64904, + ["lammeeminitialarabic"]=64716, + ["lammeemjeeminitialarabic"]=65247, + ["lammeemkhahinitialarabic"]=65247, + ["largecircle"]=9711, + ["lbar"]=410, + ["lbelt"]=620, + ["lbopomofo"]=12556, + ["lcaron"]=318, + ["lcedilla"]=316, + ["lcircle"]=9435, + ["lcircumflexbelow"]=7741, + ["lcommaaccent"]=316, + ["ldot"]=320, + ["ldotaccent"]=320, + ["ldotbelow"]=7735, + ["ldotbelowmacron"]=7737, + ["leftangleabovecmb"]=794, + ["lefttackbelowcmb"]=792, + ["less"]=60, + ["lessequal"]=8804, + ["lessequalorgreater"]=8922, + ["lessmonospace"]=65308, + ["lessorequivalent"]=8818, + ["lessorgreater"]=8822, + ["lessoverequal"]=8806, + ["lesssmall"]=65124, + ["lezh"]=622, + ["lfblock"]=9612, + ["lhookretroflex"]=621, + ["lira"]=8356, + ["liwnarmenian"]=1388, + ["lj"]=457, + ["ljecyrillic"]=1113, + ["lladeva"]=2355, + ["llagujarati"]=2739, + ["llinebelow"]=7739, + ["llladeva"]=2356, + ["llvocalicbengali"]=2529, + ["llvocalicdeva"]=2401, + ["llvocalicvowelsignbengali"]=2531, + ["llvocalicvowelsigndeva"]=2403, + ["lmiddletilde"]=619, + ["lmonospace"]=65356, + ["lmsquare"]=13264, + ["lochulathai"]=3628, + ["logicaland"]=8743, + ["logicalnot"]=172, + ["logicalnotreversed"]=8976, + ["logicalor"]=8744, + ["lolingthai"]=3621, + ["longs"]=383, + ["lowlinecenterline"]=65102, + ["lowlinecmb"]=818, + ["lowlinedashed"]=65101, + ["lozenge"]=9674, + ["lparen"]=9383, + ["lslash"]=322, + ["lsquare"]=8467, + ["ltshade"]=9617, + ["luthai"]=3622, + ["lvocalicbengali"]=2444, + ["lvocalicdeva"]=2316, + ["lvocalicvowelsignbengali"]=2530, + ["lvocalicvowelsigndeva"]=2402, + ["lxsquare"]=13267, + ["m"]=109, + ["mabengali"]=2478, + ["macron"]=175, + ["macronbelowcmb"]=817, + ["macroncmb"]=772, + ["macronlowmod"]=717, + ["macronmonospace"]=65507, + ["macute"]=7743, + ["madeva"]=2350, + ["magujarati"]=2734, + ["magurmukhi"]=2606, + ["mahapakhhebrew"]=1444, + ["mahapakhlefthebrew"]=1444, + ["mahiragana"]=12414, + ["maichattawathai"]=3659, + ["maiekthai"]=3656, + ["maihanakatthai"]=3633, + ["maitaikhuthai"]=3655, + ["maithothai"]=3657, + ["maitrithai"]=3658, + ["maiyamokthai"]=3654, + ["makatakana"]=12510, + ["makatakanahalfwidth"]=65423, + ["male"]=9794, + ["mansyonsquare"]=13127, + ["maqafhebrew"]=1470, + ["mars"]=9794, + ["masoracirclehebrew"]=1455, + ["masquare"]=13187, + ["mbopomofo"]=12551, + ["mbsquare"]=13268, + ["mcircle"]=9436, + ["mcubedsquare"]=13221, + ["mdotaccent"]=7745, + ["mdotbelow"]=7747, + ["meemarabic"]=1605, + ["meemfinalarabic"]=65250, + ["meeminitialarabic"]=65251, + ["meemmedialarabic"]=65252, + ["meemmeeminitialarabic"]=64721, + ["meemmeemisolatedarabic"]=64584, + ["meetorusquare"]=13133, + ["mehiragana"]=12417, + ["meizierasquare"]=13182, + ["mekatakana"]=12513, + ["mekatakanahalfwidth"]=65426, + ["mem"]=1502, + ["memdagesh"]=64318, + ["memdageshhebrew"]=64318, + ["memhebrew"]=1502, + ["menarmenian"]=1396, + ["merkhahebrew"]=1445, + ["merkhakefulahebrew"]=1446, + ["merkhakefulalefthebrew"]=1446, + ["merkhalefthebrew"]=1445, + ["mhook"]=625, + ["mhzsquare"]=13202, + ["middledotkatakanahalfwidth"]=65381, + ["middot"]=183, + ["mieumacirclekorean"]=12914, + ["mieumaparenkorean"]=12818, + ["mieumcirclekorean"]=12900, + ["mieumkorean"]=12609, + ["mieumpansioskorean"]=12656, + ["mieumparenkorean"]=12804, + ["mieumpieupkorean"]=12654, + ["mieumsioskorean"]=12655, + ["mihiragana"]=12415, + ["mikatakana"]=12511, + ["mikatakanahalfwidth"]=65424, + ["minus"]=8722, + ["minusbelowcmb"]=800, + ["minuscircle"]=8854, + ["minusmod"]=727, + ["minusplus"]=8723, + ["minute"]=8242, + ["miribaarusquare"]=13130, + ["mirisquare"]=13129, + ["mlonglegturned"]=624, + ["mlsquare"]=13206, + ["mmcubedsquare"]=13219, + ["mmonospace"]=65357, + ["mmsquaredsquare"]=13215, + ["mohiragana"]=12418, + ["mohmsquare"]=13249, + ["mokatakana"]=12514, + ["mokatakanahalfwidth"]=65427, + ["molsquare"]=13270, + ["momathai"]=3617, + ["moverssquare"]=13223, + ["moverssquaredsquare"]=13224, + ["mparen"]=9384, + ["mpasquare"]=13227, + ["mssquare"]=13235, + ["mturned"]=623, + ["mu"]=181, + ["mu1"]=181, + ["muasquare"]=13186, + ["muchgreater"]=8811, + ["muchless"]=8810, + ["mufsquare"]=13196, + ["mugreek"]=956, + ["mugsquare"]=13197, + ["muhiragana"]=12416, + ["mukatakana"]=12512, + ["mukatakanahalfwidth"]=65425, + ["mulsquare"]=13205, + ["multiply"]=215, + ["mumsquare"]=13211, + ["munahhebrew"]=1443, + ["munahlefthebrew"]=1443, + ["musicalnote"]=9834, + ["musicalnotedbl"]=9835, + ["musicflatsign"]=9837, + ["musicsharpsign"]=9839, + ["mussquare"]=13234, + ["muvsquare"]=13238, + ["muwsquare"]=13244, + ["mvmegasquare"]=13241, + ["mvsquare"]=13239, + ["mwmegasquare"]=13247, + ["mwsquare"]=13245, + ["n"]=110, + ["nabengali"]=2472, + ["nabla"]=8711, + ["nacute"]=324, + ["nadeva"]=2344, + ["nagujarati"]=2728, + ["nagurmukhi"]=2600, + ["nahiragana"]=12394, + ["nakatakana"]=12490, + ["nakatakanahalfwidth"]=65413, + ["napostrophe"]=329, + ["nasquare"]=13185, + ["nbopomofo"]=12555, + ["nbspace"]=160, + ["ncaron"]=328, + ["ncedilla"]=326, + ["ncircle"]=9437, + ["ncircumflexbelow"]=7755, + ["ncommaaccent"]=326, + ["ndotaccent"]=7749, + ["ndotbelow"]=7751, + ["nehiragana"]=12397, + ["nekatakana"]=12493, + ["nekatakanahalfwidth"]=65416, + ["newsheqelsign"]=8362, + ["nfsquare"]=13195, + ["ngabengali"]=2457, + ["ngadeva"]=2329, + ["ngagujarati"]=2713, + ["ngagurmukhi"]=2585, + ["ngonguthai"]=3591, + ["nhiragana"]=12435, + ["nhookleft"]=626, + ["nhookretroflex"]=627, + ["nieunacirclekorean"]=12911, + ["nieunaparenkorean"]=12815, + ["nieuncieuckorean"]=12597, + ["nieuncirclekorean"]=12897, + ["nieunhieuhkorean"]=12598, + ["nieunkorean"]=12596, + ["nieunpansioskorean"]=12648, + ["nieunparenkorean"]=12801, + ["nieunsioskorean"]=12647, + ["nieuntikeutkorean"]=12646, + ["nihiragana"]=12395, + ["nikatakana"]=12491, + ["nikatakanahalfwidth"]=65414, + ["nikhahitthai"]=3661, + ["nine"]=57, + ["ninearabic"]=1641, + ["ninebengali"]=2543, + ["ninecircle"]=9320, + ["ninecircleinversesansserif"]=10130, + ["ninedeva"]=2415, + ["ninegujarati"]=2799, + ["ninegurmukhi"]=2671, + ["ninehackarabic"]=1641, + ["ninehangzhou"]=12329, + ["nineideographicparen"]=12840, + ["nineinferior"]=8329, + ["ninemonospace"]=65305, + ["nineparen"]=9340, + ["nineperiod"]=9360, + ["ninepersian"]=1785, + ["nineroman"]=8568, + ["ninesuperior"]=8313, + ["nineteencircle"]=9330, + ["nineteenparen"]=9350, + ["nineteenperiod"]=9370, + ["ninethai"]=3673, + ["nj"]=460, + ["njecyrillic"]=1114, + ["nkatakana"]=12531, + ["nkatakanahalfwidth"]=65437, + ["nlegrightlong"]=414, + ["nlinebelow"]=7753, + ["nmonospace"]=65358, + ["nmsquare"]=13210, + ["nnabengali"]=2467, + ["nnadeva"]=2339, + ["nnagujarati"]=2723, + ["nnagurmukhi"]=2595, + ["nnnadeva"]=2345, + ["nohiragana"]=12398, + ["nokatakana"]=12494, + ["nokatakanahalfwidth"]=65417, + ["nonbreakingspace"]=160, + ["nonenthai"]=3603, + ["nonuthai"]=3609, + ["noonarabic"]=1606, + ["noonfinalarabic"]=65254, + ["noonghunnaarabic"]=1722, + ["noonghunnafinalarabic"]=64415, + ["noonhehinitialarabic"]=65255, + ["nooninitialarabic"]=65255, + ["noonjeeminitialarabic"]=64722, + ["noonjeemisolatedarabic"]=64587, + ["noonmedialarabic"]=65256, + ["noonmeeminitialarabic"]=64725, + ["noonmeemisolatedarabic"]=64590, + ["noonnoonfinalarabic"]=64653, + ["notcontains"]=8716, + ["notelement"]=8713, + ["notelementof"]=8713, + ["notequal"]=8800, + ["notgreater"]=8815, + ["notgreaternorequal"]=8817, + ["notgreaternorless"]=8825, + ["notidentical"]=8802, + ["notless"]=8814, + ["notlessnorequal"]=8816, + ["notparallel"]=8742, + ["notprecedes"]=8832, + ["notsubset"]=8836, + ["notsucceeds"]=8833, + ["notsuperset"]=8837, + ["nowarmenian"]=1398, + ["nparen"]=9385, + ["nssquare"]=13233, + ["nsuperior"]=8319, + ["ntilde"]=241, + ["nu"]=957, + ["nuhiragana"]=12396, + ["nukatakana"]=12492, + ["nukatakanahalfwidth"]=65415, + ["nuktabengali"]=2492, + ["nuktadeva"]=2364, + ["nuktagujarati"]=2748, + ["nuktagurmukhi"]=2620, + ["numbersign"]=35, + ["numbersignmonospace"]=65283, + ["numbersignsmall"]=65119, + ["numeralsigngreek"]=884, + ["numeralsignlowergreek"]=885, + ["numero"]=8470, + ["nun"]=1504, + ["nundagesh"]=64320, + ["nundageshhebrew"]=64320, + ["nunhebrew"]=1504, + ["nvsquare"]=13237, + ["nwsquare"]=13243, + ["nyabengali"]=2462, + ["nyadeva"]=2334, + ["nyagujarati"]=2718, + ["nyagurmukhi"]=2590, + ["o"]=111, + ["oacute"]=243, + ["oangthai"]=3629, + ["obarred"]=629, + ["obarredcyrillic"]=1257, + ["obarreddieresiscyrillic"]=1259, + ["obengali"]=2451, + ["obopomofo"]=12571, + ["obreve"]=335, + ["ocandradeva"]=2321, + ["ocandragujarati"]=2705, + ["ocandravowelsigndeva"]=2377, + ["ocandravowelsigngujarati"]=2761, + ["ocaron"]=466, + ["ocircle"]=9438, + ["ocircumflex"]=244, + ["ocircumflexacute"]=7889, + ["ocircumflexdotbelow"]=7897, + ["ocircumflexgrave"]=7891, + ["ocircumflexhookabove"]=7893, + ["ocircumflextilde"]=7895, + ["ocyrillic"]=1086, + ["odblacute"]=337, + ["odblgrave"]=525, + ["odeva"]=2323, + ["odieresis"]=246, + ["odieresiscyrillic"]=1255, + ["odotbelow"]=7885, + ["oe"]=339, + ["oekorean"]=12634, + ["ogonek"]=731, + ["ogonekcmb"]=808, + ["ograve"]=242, + ["ogujarati"]=2707, + ["oharmenian"]=1413, + ["ohiragana"]=12362, + ["ohookabove"]=7887, + ["ohorn"]=417, + ["ohornacute"]=7899, + ["ohorndotbelow"]=7907, + ["ohorngrave"]=7901, + ["ohornhookabove"]=7903, + ["ohorntilde"]=7905, + ["ohungarumlaut"]=337, + ["oi"]=419, + ["oinvertedbreve"]=527, + ["okatakana"]=12458, + ["okatakanahalfwidth"]=65397, + ["okorean"]=12631, + ["olehebrew"]=1451, + ["omacron"]=333, + ["omacronacute"]=7763, + ["omacrongrave"]=7761, + ["omdeva"]=2384, + ["omega"]=969, + ["omega1"]=982, + ["omegacyrillic"]=1121, + ["omegalatinclosed"]=631, + ["omegaroundcyrillic"]=1147, + ["omegatitlocyrillic"]=1149, + ["omegatonos"]=974, + ["omgujarati"]=2768, + ["omicron"]=959, + ["omicrontonos"]=972, + ["omonospace"]=65359, + ["one"]=49, + ["onearabic"]=1633, + ["onebengali"]=2535, + ["onecircle"]=9312, + ["onecircleinversesansserif"]=10122, + ["onedeva"]=2407, + ["onedotenleader"]=8228, + ["oneeighth"]=8539, + ["onegujarati"]=2791, + ["onegurmukhi"]=2663, + ["onehackarabic"]=1633, + ["onehalf"]=189, + ["onehangzhou"]=12321, + ["oneideographicparen"]=12832, + ["oneinferior"]=8321, + ["onemonospace"]=65297, + ["onenumeratorbengali"]=2548, + ["oneparen"]=9332, + ["oneperiod"]=9352, + ["onepersian"]=1777, + ["onequarter"]=188, + ["oneroman"]=8560, + ["onesuperior"]=185, + ["onethai"]=3665, + ["onethird"]=8531, + ["oogonek"]=491, + ["oogonekmacron"]=493, + ["oogurmukhi"]=2579, + ["oomatragurmukhi"]=2635, + ["oopen"]=596, + ["oparen"]=9386, + ["openbullet"]=9702, + ["option"]=8997, + ["ordfeminine"]=170, + ["ordmasculine"]=186, + ["orthogonal"]=8735, + ["oshortdeva"]=2322, + ["oshortvowelsigndeva"]=2378, + ["oslash"]=248, + ["oslashacute"]=511, + ["osmallhiragana"]=12361, + ["osmallkatakana"]=12457, + ["osmallkatakanahalfwidth"]=65387, + ["ostrokeacute"]=511, + ["otcyrillic"]=1151, + ["otilde"]=245, + ["otildeacute"]=7757, + ["otildedieresis"]=7759, + ["oubopomofo"]=12577, + ["overline"]=8254, + ["overlinecenterline"]=65098, + ["overlinecmb"]=773, + ["overlinedashed"]=65097, + ["overlinedblwavy"]=65100, + ["overlinewavy"]=65099, + ["overscore"]=175, + ["ovowelsignbengali"]=2507, + ["ovowelsigndeva"]=2379, + ["ovowelsigngujarati"]=2763, + ["p"]=112, + ["paampssquare"]=13184, + ["paasentosquare"]=13099, + ["pabengali"]=2474, + ["pacute"]=7765, + ["padeva"]=2346, + ["pagedown"]=8671, + ["pageup"]=8670, + ["pagujarati"]=2730, + ["pagurmukhi"]=2602, + ["pahiragana"]=12401, + ["paiyannoithai"]=3631, + ["pakatakana"]=12497, + ["palatalizationcyrilliccmb"]=1156, + ["palochkacyrillic"]=1216, + ["pansioskorean"]=12671, + ["paragraph"]=182, + ["parallel"]=8741, + ["parenleft"]=40, + ["parenleftaltonearabic"]=64830, + ["parenleftinferior"]=8333, + ["parenleftmonospace"]=65288, + ["parenleftsmall"]=65113, + ["parenleftsuperior"]=8317, + ["parenleftvertical"]=65077, + ["parenright"]=41, + ["parenrightaltonearabic"]=64831, + ["parenrightinferior"]=8334, + ["parenrightmonospace"]=65289, + ["parenrightsmall"]=65114, + ["parenrightsuperior"]=8318, + ["parenrightvertical"]=65078, + ["partialdiff"]=8706, + ["paseqhebrew"]=1472, + ["pashtahebrew"]=1433, + ["pasquare"]=13225, + ["patah"]=1463, + ["patah11"]=1463, + ["patah1d"]=1463, + ["patah2a"]=1463, + ["patahhebrew"]=1463, + ["patahnarrowhebrew"]=1463, + ["patahquarterhebrew"]=1463, + ["patahwidehebrew"]=1463, + ["pazerhebrew"]=1441, + ["pbopomofo"]=12550, + ["pcircle"]=9439, + ["pdotaccent"]=7767, + ["pe"]=1508, + ["pecyrillic"]=1087, + ["pedagesh"]=64324, + ["pedageshhebrew"]=64324, + ["peezisquare"]=13115, + ["pefinaldageshhebrew"]=64323, + ["peharabic"]=1662, + ["peharmenian"]=1402, + ["pehebrew"]=1508, + ["pehfinalarabic"]=64343, + ["pehinitialarabic"]=64344, + ["pehiragana"]=12410, + ["pehmedialarabic"]=64345, + ["pekatakana"]=12506, + ["pemiddlehookcyrillic"]=1191, + ["perafehebrew"]=64334, + ["percent"]=37, + ["percentarabic"]=1642, + ["percentmonospace"]=65285, + ["percentsmall"]=65130, + ["period"]=46, + ["periodarmenian"]=1417, + ["periodcentered"]=183, + ["periodhalfwidth"]=65377, + ["periodmonospace"]=65294, + ["periodsmall"]=65106, + ["perispomenigreekcmb"]=834, + ["perpendicular"]=8869, + ["perthousand"]=8240, + ["peseta"]=8359, + ["pfsquare"]=13194, + ["phabengali"]=2475, + ["phadeva"]=2347, + ["phagujarati"]=2731, + ["phagurmukhi"]=2603, + ["phi"]=966, + ["phi1"]=981, + ["phieuphacirclekorean"]=12922, + ["phieuphaparenkorean"]=12826, + ["phieuphcirclekorean"]=12908, + ["phieuphkorean"]=12621, + ["phieuphparenkorean"]=12812, + ["philatin"]=632, + ["phinthuthai"]=3642, + ["phisymbolgreek"]=981, + ["phook"]=421, + ["phophanthai"]=3614, + ["phophungthai"]=3612, + ["phosamphaothai"]=3616, + ["pi"]=960, + ["pieupacirclekorean"]=12915, + ["pieupaparenkorean"]=12819, + ["pieupcieuckorean"]=12662, + ["pieupcirclekorean"]=12901, + ["pieupkiyeokkorean"]=12658, + ["pieupkorean"]=12610, + ["pieupparenkorean"]=12805, + ["pieupsioskiyeokkorean"]=12660, + ["pieupsioskorean"]=12612, + ["pieupsiostikeutkorean"]=12661, + ["pieupthieuthkorean"]=12663, + ["pieuptikeutkorean"]=12659, + ["pihiragana"]=12404, + ["pikatakana"]=12500, + ["pisymbolgreek"]=982, + ["piwrarmenian"]=1411, + ["plus"]=43, + ["plusbelowcmb"]=799, + ["pluscircle"]=8853, + ["plusminus"]=177, + ["plusmod"]=726, + ["plusmonospace"]=65291, + ["plussmall"]=65122, + ["plussuperior"]=8314, + ["pmonospace"]=65360, + ["pmsquare"]=13272, + ["pohiragana"]=12413, + ["pointingindexdownwhite"]=9759, + ["pointingindexleftwhite"]=9756, + ["pointingindexrightwhite"]=9758, + ["pointingindexupwhite"]=9757, + ["pokatakana"]=12509, + ["poplathai"]=3611, + ["postalmark"]=12306, + ["postalmarkface"]=12320, + ["pparen"]=9387, + ["precedes"]=8826, + ["prescription"]=8478, + ["primemod"]=697, + ["primereversed"]=8245, + ["product"]=8719, + ["projective"]=8965, + ["prolongedkana"]=12540, + ["propellor"]=8984, + ["propersubset"]=8834, + ["propersuperset"]=8835, + ["proportion"]=8759, + ["proportional"]=8733, + ["psi"]=968, + ["psicyrillic"]=1137, + ["psilipneumatacyrilliccmb"]=1158, + ["pssquare"]=13232, + ["puhiragana"]=12407, + ["pukatakana"]=12503, + ["pvsquare"]=13236, + ["pwsquare"]=13242, + ["q"]=113, + ["qadeva"]=2392, + ["qadmahebrew"]=1448, + ["qafarabic"]=1602, + ["qaffinalarabic"]=65238, + ["qafinitialarabic"]=65239, + ["qafmedialarabic"]=65240, + ["qamats"]=1464, + ["qamats10"]=1464, + ["qamats1a"]=1464, + ["qamats1c"]=1464, + ["qamats27"]=1464, + ["qamats29"]=1464, + ["qamats33"]=1464, + ["qamatsde"]=1464, + ["qamatshebrew"]=1464, + ["qamatsnarrowhebrew"]=1464, + ["qamatsqatanhebrew"]=1464, + ["qamatsqatannarrowhebrew"]=1464, + ["qamatsqatanquarterhebrew"]=1464, + ["qamatsqatanwidehebrew"]=1464, + ["qamatsquarterhebrew"]=1464, + ["qamatswidehebrew"]=1464, + ["qarneyparahebrew"]=1439, + ["qbopomofo"]=12561, + ["qcircle"]=9440, + ["qhook"]=672, + ["qmonospace"]=65361, + ["qof"]=1511, + ["qofdagesh"]=64327, + ["qofdageshhebrew"]=64327, + ["qofhatafpatah"]=1511, + ["qofhatafpatahhebrew"]=1511, + ["qofhatafsegol"]=1511, + ["qofhatafsegolhebrew"]=1511, + ["qofhebrew"]=1511, + ["qofhiriq"]=1511, + ["qofhiriqhebrew"]=1511, + ["qofholam"]=1511, + ["qofholamhebrew"]=1511, + ["qofpatah"]=1511, + ["qofpatahhebrew"]=1511, + ["qofqamats"]=1511, + ["qofqamatshebrew"]=1511, + ["qofqubuts"]=1511, + ["qofqubutshebrew"]=1511, + ["qofsegol"]=1511, + ["qofsegolhebrew"]=1511, + ["qofsheva"]=1511, + ["qofshevahebrew"]=1511, + ["qoftsere"]=1511, + ["qoftserehebrew"]=1511, + ["qparen"]=9388, + ["quarternote"]=9833, + ["qubuts"]=1467, + ["qubuts18"]=1467, + ["qubuts25"]=1467, + ["qubuts31"]=1467, + ["qubutshebrew"]=1467, + ["qubutsnarrowhebrew"]=1467, + ["qubutsquarterhebrew"]=1467, + ["qubutswidehebrew"]=1467, + ["question"]=63, + ["questionarabic"]=1567, + ["questionarmenian"]=1374, + ["questiondown"]=191, + ["questiongreek"]=894, + ["questionmonospace"]=65311, + ["quotedbl"]=34, + ["quotedblbase"]=8222, + ["quotedblleft"]=8220, + ["quotedblmonospace"]=65282, + ["quotedblprime"]=12318, + ["quotedblprimereversed"]=12317, + ["quotedblright"]=8221, + ["quoteleft"]=8216, + ["quoteleftreversed"]=8219, + ["quotereversed"]=8219, + ["quoteright"]=8217, + ["quoterightn"]=329, + ["quotesinglbase"]=8218, + ["quotesingle"]=39, + ["quotesinglemonospace"]=65287, + ["r"]=114, + ["raarmenian"]=1404, + ["rabengali"]=2480, + ["racute"]=341, + ["radeva"]=2352, + ["radical"]=8730, + ["radoverssquare"]=13230, + ["radoverssquaredsquare"]=13231, + ["radsquare"]=13229, + ["rafe"]=1471, + ["rafehebrew"]=1471, + ["ragujarati"]=2736, + ["ragurmukhi"]=2608, + ["rahiragana"]=12425, + ["rakatakana"]=12521, + ["rakatakanahalfwidth"]=65431, + ["ralowerdiagonalbengali"]=2545, + ["ramiddlediagonalbengali"]=2544, + ["ramshorn"]=612, + ["ratio"]=8758, + ["rbopomofo"]=12566, + ["rcaron"]=345, + ["rcedilla"]=343, + ["rcircle"]=9441, + ["rcommaaccent"]=343, + ["rdblgrave"]=529, + ["rdotaccent"]=7769, + ["rdotbelow"]=7771, + ["rdotbelowmacron"]=7773, + ["referencemark"]=8251, + ["reflexsubset"]=8838, + ["reflexsuperset"]=8839, + ["registered"]=174, + ["reharabic"]=1585, + ["reharmenian"]=1408, + ["rehfinalarabic"]=65198, + ["rehiragana"]=12428, + ["rehyehaleflamarabic"]=1585, + ["rekatakana"]=12524, + ["rekatakanahalfwidth"]=65434, + ["resh"]=1512, + ["reshdageshhebrew"]=64328, + ["reshhatafpatah"]=1512, + ["reshhatafpatahhebrew"]=1512, + ["reshhatafsegol"]=1512, + ["reshhatafsegolhebrew"]=1512, + ["reshhebrew"]=1512, + ["reshhiriq"]=1512, + ["reshhiriqhebrew"]=1512, + ["reshholam"]=1512, + ["reshholamhebrew"]=1512, + ["reshpatah"]=1512, + ["reshpatahhebrew"]=1512, + ["reshqamats"]=1512, + ["reshqamatshebrew"]=1512, + ["reshqubuts"]=1512, + ["reshqubutshebrew"]=1512, + ["reshsegol"]=1512, + ["reshsegolhebrew"]=1512, + ["reshsheva"]=1512, + ["reshshevahebrew"]=1512, + ["reshtsere"]=1512, + ["reshtserehebrew"]=1512, + ["reversedtilde"]=8765, + ["reviahebrew"]=1431, + ["reviamugrashhebrew"]=1431, + ["revlogicalnot"]=8976, + ["rfishhook"]=638, + ["rfishhookreversed"]=639, + ["rhabengali"]=2525, + ["rhadeva"]=2397, + ["rho"]=961, + ["rhook"]=637, + ["rhookturned"]=635, + ["rhookturnedsuperior"]=693, + ["rhosymbolgreek"]=1009, + ["rhotichookmod"]=734, + ["rieulacirclekorean"]=12913, + ["rieulaparenkorean"]=12817, + ["rieulcirclekorean"]=12899, + ["rieulhieuhkorean"]=12608, + ["rieulkiyeokkorean"]=12602, + ["rieulkiyeoksioskorean"]=12649, + ["rieulkorean"]=12601, + ["rieulmieumkorean"]=12603, + ["rieulpansioskorean"]=12652, + ["rieulparenkorean"]=12803, + ["rieulphieuphkorean"]=12607, + ["rieulpieupkorean"]=12604, + ["rieulpieupsioskorean"]=12651, + ["rieulsioskorean"]=12605, + ["rieulthieuthkorean"]=12606, + ["rieultikeutkorean"]=12650, + ["rieulyeorinhieuhkorean"]=12653, + ["rightangle"]=8735, + ["righttackbelowcmb"]=793, + ["righttriangle"]=8895, + ["rihiragana"]=12426, + ["rikatakana"]=12522, + ["rikatakanahalfwidth"]=65432, + ["ring"]=730, + ["ringbelowcmb"]=805, + ["ringcmb"]=778, + ["ringhalfleft"]=703, + ["ringhalfleftarmenian"]=1369, + ["ringhalfleftbelowcmb"]=796, + ["ringhalfleftcentered"]=723, + ["ringhalfright"]=702, + ["ringhalfrightbelowcmb"]=825, + ["ringhalfrightcentered"]=722, + ["rinvertedbreve"]=531, + ["rittorusquare"]=13137, + ["rlinebelow"]=7775, + ["rlongleg"]=636, + ["rlonglegturned"]=634, + ["rmonospace"]=65362, + ["rohiragana"]=12429, + ["rokatakana"]=12525, + ["rokatakanahalfwidth"]=65435, + ["roruathai"]=3619, + ["rparen"]=9389, + ["rrabengali"]=2524, + ["rradeva"]=2353, + ["rragurmukhi"]=2652, + ["rreharabic"]=1681, + ["rrehfinalarabic"]=64397, + ["rrvocalicbengali"]=2528, + ["rrvocalicdeva"]=2400, + ["rrvocalicgujarati"]=2784, + ["rrvocalicvowelsignbengali"]=2500, + ["rrvocalicvowelsigndeva"]=2372, + ["rrvocalicvowelsigngujarati"]=2756, + ["rtblock"]=9616, + ["rturned"]=633, + ["rturnedsuperior"]=692, + ["ruhiragana"]=12427, + ["rukatakana"]=12523, + ["rukatakanahalfwidth"]=65433, + ["rupeemarkbengali"]=2546, + ["rupeesignbengali"]=2547, + ["ruthai"]=3620, + ["rvocalicbengali"]=2443, + ["rvocalicdeva"]=2315, + ["rvocalicgujarati"]=2699, + ["rvocalicvowelsignbengali"]=2499, + ["rvocalicvowelsigndeva"]=2371, + ["rvocalicvowelsigngujarati"]=2755, + ["s"]=115, + ["sabengali"]=2488, + ["sacute"]=347, + ["sacutedotaccent"]=7781, + ["sadarabic"]=1589, + ["sadeva"]=2360, + ["sadfinalarabic"]=65210, + ["sadinitialarabic"]=65211, + ["sadmedialarabic"]=65212, + ["sagujarati"]=2744, + ["sagurmukhi"]=2616, + ["sahiragana"]=12373, + ["sakatakana"]=12469, + ["sakatakanahalfwidth"]=65403, + ["sallallahoualayhewasallamarabic"]=65018, + ["samekh"]=1505, + ["samekhdagesh"]=64321, + ["samekhdageshhebrew"]=64321, + ["samekhhebrew"]=1505, + ["saraaathai"]=3634, + ["saraaethai"]=3649, + ["saraaimaimalaithai"]=3652, + ["saraaimaimuanthai"]=3651, + ["saraamthai"]=3635, + ["saraathai"]=3632, + ["saraethai"]=3648, + ["saraiithai"]=3637, + ["saraithai"]=3636, + ["saraothai"]=3650, + ["saraueethai"]=3639, + ["sarauethai"]=3638, + ["sarauthai"]=3640, + ["sarauuthai"]=3641, + ["sbopomofo"]=12569, + ["scaron"]=353, + ["scarondotaccent"]=7783, + ["scedilla"]=351, + ["schwa"]=601, + ["schwacyrillic"]=1241, + ["schwadieresiscyrillic"]=1243, + ["schwahook"]=602, + ["scircle"]=9442, + ["scircumflex"]=349, + ["scommaaccent"]=537, + ["sdotaccent"]=7777, + ["sdotbelow"]=7779, + ["sdotbelowdotaccent"]=7785, + ["seagullbelowcmb"]=828, + ["second"]=8243, + ["secondtonechinese"]=714, + ["section"]=167, + ["seenarabic"]=1587, + ["seenfinalarabic"]=65202, + ["seeninitialarabic"]=65203, + ["seenmedialarabic"]=65204, + ["segol"]=1462, + ["segol13"]=1462, + ["segol1f"]=1462, + ["segol2c"]=1462, + ["segolhebrew"]=1462, + ["segolnarrowhebrew"]=1462, + ["segolquarterhebrew"]=1462, + ["segoltahebrew"]=1426, + ["segolwidehebrew"]=1462, + ["seharmenian"]=1405, + ["sehiragana"]=12379, + ["sekatakana"]=12475, + ["sekatakanahalfwidth"]=65406, + ["semicolon"]=59, + ["semicolonarabic"]=1563, + ["semicolonmonospace"]=65307, + ["semicolonsmall"]=65108, + ["semivoicedmarkkana"]=12444, + ["semivoicedmarkkanahalfwidth"]=65439, + ["sentisquare"]=13090, + ["sentosquare"]=13091, + ["seven"]=55, + ["sevenarabic"]=1639, + ["sevenbengali"]=2541, + ["sevencircle"]=9318, + ["sevencircleinversesansserif"]=10128, + ["sevendeva"]=2413, + ["seveneighths"]=8542, + ["sevengujarati"]=2797, + ["sevengurmukhi"]=2669, + ["sevenhackarabic"]=1639, + ["sevenhangzhou"]=12327, + ["sevenideographicparen"]=12838, + ["seveninferior"]=8327, + ["sevenmonospace"]=65303, + ["sevenparen"]=9338, + ["sevenperiod"]=9358, + ["sevenpersian"]=1783, + ["sevenroman"]=8566, + ["sevensuperior"]=8311, + ["seventeencircle"]=9328, + ["seventeenparen"]=9348, + ["seventeenperiod"]=9368, + ["seventhai"]=3671, + ["sfthyphen"]=173, + ["shaarmenian"]=1399, + ["shabengali"]=2486, + ["shacyrillic"]=1096, + ["shaddaarabic"]=1617, + ["shaddadammaarabic"]=64609, + ["shaddadammatanarabic"]=64606, + ["shaddafathaarabic"]=64608, + ["shaddafathatanarabic"]=1617, + ["shaddakasraarabic"]=64610, + ["shaddakasratanarabic"]=64607, + ["shade"]=9618, + ["shadedark"]=9619, + ["shadelight"]=9617, + ["shademedium"]=9618, + ["shadeva"]=2358, + ["shagujarati"]=2742, + ["shagurmukhi"]=2614, + ["shalshelethebrew"]=1427, + ["shbopomofo"]=12565, + ["shchacyrillic"]=1097, + ["sheenarabic"]=1588, + ["sheenfinalarabic"]=65206, + ["sheeninitialarabic"]=65207, + ["sheenmedialarabic"]=65208, + ["sheicoptic"]=995, + ["sheqel"]=8362, + ["sheqelhebrew"]=8362, + ["sheva"]=1456, + ["sheva115"]=1456, + ["sheva15"]=1456, + ["sheva22"]=1456, + ["sheva2e"]=1456, + ["shevahebrew"]=1456, + ["shevanarrowhebrew"]=1456, + ["shevaquarterhebrew"]=1456, + ["shevawidehebrew"]=1456, + ["shhacyrillic"]=1211, + ["shimacoptic"]=1005, + ["shin"]=1513, + ["shindagesh"]=64329, + ["shindageshhebrew"]=64329, + ["shindageshshindot"]=64300, + ["shindageshshindothebrew"]=64300, + ["shindageshsindot"]=64301, + ["shindageshsindothebrew"]=64301, + ["shindothebrew"]=1473, + ["shinhebrew"]=1513, + ["shinshindot"]=64298, + ["shinshindothebrew"]=64298, + ["shinsindot"]=64299, + ["shinsindothebrew"]=64299, + ["shook"]=642, + ["sigma"]=963, + ["sigma1"]=962, + ["sigmafinal"]=962, + ["sigmalunatesymbolgreek"]=1010, + ["sihiragana"]=12375, + ["sikatakana"]=12471, + ["sikatakanahalfwidth"]=65404, + ["siluqhebrew"]=1469, + ["siluqlefthebrew"]=1469, + ["similar"]=8764, + ["sindothebrew"]=1474, + ["siosacirclekorean"]=12916, + ["siosaparenkorean"]=12820, + ["sioscieuckorean"]=12670, + ["sioscirclekorean"]=12902, + ["sioskiyeokkorean"]=12666, + ["sioskorean"]=12613, + ["siosnieunkorean"]=12667, + ["siosparenkorean"]=12806, + ["siospieupkorean"]=12669, + ["siostikeutkorean"]=12668, + ["six"]=54, + ["sixarabic"]=1638, + ["sixbengali"]=2540, + ["sixcircle"]=9317, + ["sixcircleinversesansserif"]=10127, + ["sixdeva"]=2412, + ["sixgujarati"]=2796, + ["sixgurmukhi"]=2668, + ["sixhackarabic"]=1638, + ["sixhangzhou"]=12326, + ["sixideographicparen"]=12837, + ["sixinferior"]=8326, + ["sixmonospace"]=65302, + ["sixparen"]=9337, + ["sixperiod"]=9357, + ["sixpersian"]=1782, + ["sixroman"]=8565, + ["sixsuperior"]=8310, + ["sixteencircle"]=9327, + ["sixteencurrencydenominatorbengali"]=2553, + ["sixteenparen"]=9347, + ["sixteenperiod"]=9367, + ["sixthai"]=3670, + ["slash"]=47, + ["slashmonospace"]=65295, + ["slong"]=383, + ["slongdotaccent"]=7835, + ["smileface"]=9786, + ["smonospace"]=65363, + ["sofpasuqhebrew"]=1475, + ["softhyphen"]=173, + ["softsigncyrillic"]=1100, + ["sohiragana"]=12381, + ["sokatakana"]=12477, + ["sokatakanahalfwidth"]=65407, + ["soliduslongoverlaycmb"]=824, + ["solidusshortoverlaycmb"]=823, + ["sorusithai"]=3625, + ["sosalathai"]=3624, + ["sosothai"]=3595, + ["sosuathai"]=3626, + ["space"]=32, + ["spacehackarabic"]=32, + ["spade"]=9824, + ["spadesuitblack"]=9824, + ["spadesuitwhite"]=9828, + ["sparen"]=9390, + ["squarebelowcmb"]=827, + ["squarecc"]=13252, + ["squarecm"]=13213, + ["squarediagonalcrosshatchfill"]=9641, + ["squarehorizontalfill"]=9636, + ["squarekg"]=13199, + ["squarekm"]=13214, + ["squarekmcapital"]=13262, + ["squareln"]=13265, + ["squarelog"]=13266, + ["squaremg"]=13198, + ["squaremil"]=13269, + ["squaremm"]=13212, + ["squaremsquared"]=13217, + ["squareorthogonalcrosshatchfill"]=9638, + ["squareupperlefttolowerrightfill"]=9639, + ["squareupperrighttolowerleftfill"]=9640, + ["squareverticalfill"]=9637, + ["squarewhitewithsmallblack"]=9635, + ["srsquare"]=13275, + ["ssabengali"]=2487, + ["ssadeva"]=2359, + ["ssagujarati"]=2743, + ["ssangcieuckorean"]=12617, + ["ssanghieuhkorean"]=12677, + ["ssangieungkorean"]=12672, + ["ssangkiyeokkorean"]=12594, + ["ssangnieunkorean"]=12645, + ["ssangpieupkorean"]=12611, + ["ssangsioskorean"]=12614, + ["ssangtikeutkorean"]=12600, + ["sterling"]=163, + ["sterlingmonospace"]=65505, + ["strokelongoverlaycmb"]=822, + ["strokeshortoverlaycmb"]=821, + ["subset"]=8834, + ["subsetnotequal"]=8842, + ["subsetorequal"]=8838, + ["succeeds"]=8827, + ["suchthat"]=8715, + ["suhiragana"]=12377, + ["sukatakana"]=12473, + ["sukatakanahalfwidth"]=65405, + ["sukunarabic"]=1618, + ["summation"]=8721, + ["sun"]=9788, + ["superset"]=8835, + ["supersetnotequal"]=8843, + ["supersetorequal"]=8839, + ["svsquare"]=13276, + ["syouwaerasquare"]=13180, + ["t"]=116, + ["tabengali"]=2468, + ["tackdown"]=8868, + ["tackleft"]=8867, + ["tadeva"]=2340, + ["tagujarati"]=2724, + ["tagurmukhi"]=2596, + ["taharabic"]=1591, + ["tahfinalarabic"]=65218, + ["tahinitialarabic"]=65219, + ["tahiragana"]=12383, + ["tahmedialarabic"]=65220, + ["taisyouerasquare"]=13181, + ["takatakana"]=12479, + ["takatakanahalfwidth"]=65408, + ["tatweelarabic"]=1600, + ["tau"]=964, + ["tav"]=1514, + ["tavdages"]=64330, + ["tavdagesh"]=64330, + ["tavdageshhebrew"]=64330, + ["tavhebrew"]=1514, + ["tbar"]=359, + ["tbopomofo"]=12554, + ["tcaron"]=357, + ["tccurl"]=680, + ["tcedilla"]=355, + ["tcheharabic"]=1670, + ["tchehfinalarabic"]=64379, + ["tchehinitialarabic"]=64380, + ["tchehmedialarabic"]=64381, + ["tchehmeeminitialarabic"]=64380, + ["tcircle"]=9443, + ["tcircumflexbelow"]=7793, + ["tcommaaccent"]=355, + ["tdieresis"]=7831, + ["tdotaccent"]=7787, + ["tdotbelow"]=7789, + ["tecyrillic"]=1090, + ["tedescendercyrillic"]=1197, + ["teharabic"]=1578, + ["tehfinalarabic"]=65174, + ["tehhahinitialarabic"]=64674, + ["tehhahisolatedarabic"]=64524, + ["tehinitialarabic"]=65175, + ["tehiragana"]=12390, + ["tehjeeminitialarabic"]=64673, + ["tehjeemisolatedarabic"]=64523, + ["tehmarbutaarabic"]=1577, + ["tehmarbutafinalarabic"]=65172, + ["tehmedialarabic"]=65176, + ["tehmeeminitialarabic"]=64676, + ["tehmeemisolatedarabic"]=64526, + ["tehnoonfinalarabic"]=64627, + ["tekatakana"]=12486, + ["tekatakanahalfwidth"]=65411, + ["telephone"]=8481, + ["telephoneblack"]=9742, + ["telishagedolahebrew"]=1440, + ["telishaqetanahebrew"]=1449, + ["tencircle"]=9321, + ["tenideographicparen"]=12841, + ["tenparen"]=9341, + ["tenperiod"]=9361, + ["tenroman"]=8569, + ["tesh"]=679, + ["tet"]=1496, + ["tetdagesh"]=64312, + ["tetdageshhebrew"]=64312, + ["tethebrew"]=1496, + ["tetsecyrillic"]=1205, + ["tevirhebrew"]=1435, + ["tevirlefthebrew"]=1435, + ["thabengali"]=2469, + ["thadeva"]=2341, + ["thagujarati"]=2725, + ["thagurmukhi"]=2597, + ["thalarabic"]=1584, + ["thalfinalarabic"]=65196, + ["thanthakhatthai"]=3660, + ["theharabic"]=1579, + ["thehfinalarabic"]=65178, + ["thehinitialarabic"]=65179, + ["thehmedialarabic"]=65180, + ["thereexists"]=8707, + ["therefore"]=8756, + ["theta"]=952, + ["theta1"]=977, + ["thetasymbolgreek"]=977, + ["thieuthacirclekorean"]=12921, + ["thieuthaparenkorean"]=12825, + ["thieuthcirclekorean"]=12907, + ["thieuthkorean"]=12620, + ["thieuthparenkorean"]=12811, + ["thirteencircle"]=9324, + ["thirteenparen"]=9344, + ["thirteenperiod"]=9364, + ["thonangmonthothai"]=3601, + ["thook"]=429, + ["thophuthaothai"]=3602, + ["thorn"]=254, + ["thothahanthai"]=3607, + ["thothanthai"]=3600, + ["thothongthai"]=3608, + ["thothungthai"]=3606, + ["thousandcyrillic"]=1154, + ["thousandsseparatorarabic"]=1644, + ["thousandsseparatorpersian"]=1644, + ["three"]=51, + ["threearabic"]=1635, + ["threebengali"]=2537, + ["threecircle"]=9314, + ["threecircleinversesansserif"]=10124, + ["threedeva"]=2409, + ["threeeighths"]=8540, + ["threegujarati"]=2793, + ["threegurmukhi"]=2665, + ["threehackarabic"]=1635, + ["threehangzhou"]=12323, + ["threeideographicparen"]=12834, + ["threeinferior"]=8323, + ["threemonospace"]=65299, + ["threenumeratorbengali"]=2550, + ["threeparen"]=9334, + ["threeperiod"]=9354, + ["threepersian"]=1779, + ["threequarters"]=190, + ["threeroman"]=8562, + ["threesuperior"]=179, + ["threethai"]=3667, + ["thzsquare"]=13204, + ["tihiragana"]=12385, + ["tikatakana"]=12481, + ["tikatakanahalfwidth"]=65409, + ["tikeutacirclekorean"]=12912, + ["tikeutaparenkorean"]=12816, + ["tikeutcirclekorean"]=12898, + ["tikeutkorean"]=12599, + ["tikeutparenkorean"]=12802, + ["tilde"]=732, + ["tildebelowcmb"]=816, + ["tildecmb"]=771, + ["tildecomb"]=771, + ["tildedoublecmb"]=864, + ["tildeoperator"]=8764, + ["tildeoverlaycmb"]=820, + ["tildeverticalcmb"]=830, + ["timescircle"]=8855, + ["tipehahebrew"]=1430, + ["tipehalefthebrew"]=1430, + ["tippigurmukhi"]=2672, + ["titlocyrilliccmb"]=1155, + ["tiwnarmenian"]=1407, + ["tlinebelow"]=7791, + ["tmonospace"]=65364, + ["toarmenian"]=1385, + ["tohiragana"]=12392, + ["tokatakana"]=12488, + ["tokatakanahalfwidth"]=65412, + ["tonebarextrahighmod"]=741, + ["tonebarextralowmod"]=745, + ["tonebarhighmod"]=742, + ["tonebarlowmod"]=744, + ["tonebarmidmod"]=743, + ["tonefive"]=445, + ["tonesix"]=389, + ["tonetwo"]=424, + ["tonos"]=900, + ["tonsquare"]=13095, + ["topatakthai"]=3599, + ["tortoiseshellbracketleft"]=12308, + ["tortoiseshellbracketleftsmall"]=65117, + ["tortoiseshellbracketleftvertical"]=65081, + ["tortoiseshellbracketright"]=12309, + ["tortoiseshellbracketrightsmall"]=65118, + ["tortoiseshellbracketrightvertical"]=65082, + ["totaothai"]=3605, + ["tpalatalhook"]=427, + ["tparen"]=9391, + ["trademark"]=8482, + ["tretroflexhook"]=648, + ["triagdn"]=9660, + ["triaglf"]=9668, + ["triagrt"]=9658, + ["triagup"]=9650, + ["ts"]=678, + ["tsadi"]=1510, + ["tsadidagesh"]=64326, + ["tsadidageshhebrew"]=64326, + ["tsadihebrew"]=1510, + ["tsecyrillic"]=1094, + ["tsere"]=1461, + ["tsere12"]=1461, + ["tsere1e"]=1461, + ["tsere2b"]=1461, + ["tserehebrew"]=1461, + ["tserenarrowhebrew"]=1461, + ["tserequarterhebrew"]=1461, + ["tserewidehebrew"]=1461, + ["tshecyrillic"]=1115, + ["ttabengali"]=2463, + ["ttadeva"]=2335, + ["ttagujarati"]=2719, + ["ttagurmukhi"]=2591, + ["tteharabic"]=1657, + ["ttehfinalarabic"]=64359, + ["ttehinitialarabic"]=64360, + ["ttehmedialarabic"]=64361, + ["tthabengali"]=2464, + ["tthadeva"]=2336, + ["tthagujarati"]=2720, + ["tthagurmukhi"]=2592, + ["tturned"]=647, + ["tuhiragana"]=12388, + ["tukatakana"]=12484, + ["tukatakanahalfwidth"]=65410, + ["tusmallhiragana"]=12387, + ["tusmallkatakana"]=12483, + ["tusmallkatakanahalfwidth"]=65391, + ["twelvecircle"]=9323, + ["twelveparen"]=9343, + ["twelveperiod"]=9363, + ["twelveroman"]=8571, + ["twentycircle"]=9331, + ["twentyparen"]=9351, + ["twentyperiod"]=9371, + ["two"]=50, + ["twoarabic"]=1634, + ["twobengali"]=2536, + ["twocircle"]=9313, + ["twocircleinversesansserif"]=10123, + ["twodeva"]=2408, + ["twodotenleader"]=8229, + ["twodotleader"]=8229, + ["twodotleadervertical"]=65072, + ["twogujarati"]=2792, + ["twogurmukhi"]=2664, + ["twohackarabic"]=1634, + ["twohangzhou"]=12322, + ["twoideographicparen"]=12833, + ["twoinferior"]=8322, + ["twomonospace"]=65298, + ["twonumeratorbengali"]=2549, + ["twoparen"]=9333, + ["twoperiod"]=9353, + ["twopersian"]=1778, + ["tworoman"]=8561, + ["twostroke"]=443, + ["twosuperior"]=178, + ["twothai"]=3666, + ["twothirds"]=8532, + ["u"]=117, + ["uacute"]=250, + ["ubar"]=649, + ["ubengali"]=2441, + ["ubopomofo"]=12584, + ["ubreve"]=365, + ["ucaron"]=468, + ["ucircle"]=9444, + ["ucircumflex"]=251, + ["ucircumflexbelow"]=7799, + ["ucyrillic"]=1091, + ["udattadeva"]=2385, + ["udblacute"]=369, + ["udblgrave"]=533, + ["udeva"]=2313, + ["udieresis"]=252, + ["udieresisacute"]=472, + ["udieresisbelow"]=7795, + ["udieresiscaron"]=474, + ["udieresiscyrillic"]=1265, + ["udieresisgrave"]=476, + ["udieresismacron"]=470, + ["udotbelow"]=7909, + ["ugrave"]=249, + ["ugujarati"]=2697, + ["ugurmukhi"]=2569, + ["uhiragana"]=12358, + ["uhookabove"]=7911, + ["uhorn"]=432, + ["uhornacute"]=7913, + ["uhorndotbelow"]=7921, + ["uhorngrave"]=7915, + ["uhornhookabove"]=7917, + ["uhorntilde"]=7919, + ["uhungarumlaut"]=369, + ["uhungarumlautcyrillic"]=1267, + ["uinvertedbreve"]=535, + ["ukatakana"]=12454, + ["ukatakanahalfwidth"]=65395, + ["ukcyrillic"]=1145, + ["ukorean"]=12636, + ["umacron"]=363, + ["umacroncyrillic"]=1263, + ["umacrondieresis"]=7803, + ["umatragurmukhi"]=2625, + ["umonospace"]=65365, + ["underscore"]=95, + ["underscoredbl"]=8215, + ["underscoremonospace"]=65343, + ["underscorevertical"]=65075, + ["underscorewavy"]=65103, + ["union"]=8746, + ["universal"]=8704, + ["uogonek"]=371, + ["uparen"]=9392, + ["upblock"]=9600, + ["upperdothebrew"]=1476, + ["upsilon"]=965, + ["upsilondieresis"]=971, + ["upsilondieresistonos"]=944, + ["upsilonlatin"]=650, + ["upsilontonos"]=973, + ["uptackbelowcmb"]=797, + ["uptackmod"]=724, + ["uragurmukhi"]=2675, + ["uring"]=367, + ["ushortcyrillic"]=1118, + ["usmallhiragana"]=12357, + ["usmallkatakana"]=12453, + ["usmallkatakanahalfwidth"]=65385, + ["ustraightcyrillic"]=1199, + ["ustraightstrokecyrillic"]=1201, + ["utilde"]=361, + ["utildeacute"]=7801, + ["utildebelow"]=7797, + ["uubengali"]=2442, + ["uudeva"]=2314, + ["uugujarati"]=2698, + ["uugurmukhi"]=2570, + ["uumatragurmukhi"]=2626, + ["uuvowelsignbengali"]=2498, + ["uuvowelsigndeva"]=2370, + ["uuvowelsigngujarati"]=2754, + ["uvowelsignbengali"]=2497, + ["uvowelsigndeva"]=2369, + ["uvowelsigngujarati"]=2753, + ["v"]=118, + ["vadeva"]=2357, + ["vagujarati"]=2741, + ["vagurmukhi"]=2613, + ["vakatakana"]=12535, + ["vav"]=1493, + ["vavdagesh"]=64309, + ["vavdagesh65"]=64309, + ["vavdageshhebrew"]=64309, + ["vavhebrew"]=1493, + ["vavholam"]=64331, + ["vavholamhebrew"]=64331, + ["vavvavhebrew"]=1520, + ["vavyodhebrew"]=1521, + ["vcircle"]=9445, + ["vdotbelow"]=7807, + ["vecyrillic"]=1074, + ["veharabic"]=1700, + ["vehfinalarabic"]=64363, + ["vehinitialarabic"]=64364, + ["vehmedialarabic"]=64365, + ["vekatakana"]=12537, + ["venus"]=9792, + ["verticalbar"]=124, + ["verticallineabovecmb"]=781, + ["verticallinebelowcmb"]=809, + ["verticallinelowmod"]=716, + ["verticallinemod"]=712, + ["vewarmenian"]=1406, + ["vhook"]=651, + ["vikatakana"]=12536, + ["viramabengali"]=2509, + ["viramadeva"]=2381, + ["viramagujarati"]=2765, + ["visargabengali"]=2435, + ["visargadeva"]=2307, + ["visargagujarati"]=2691, + ["vmonospace"]=65366, + ["voarmenian"]=1400, + ["voicediterationhiragana"]=12446, + ["voicediterationkatakana"]=12542, + ["voicedmarkkana"]=12443, + ["voicedmarkkanahalfwidth"]=65438, + ["vokatakana"]=12538, + ["vparen"]=9393, + ["vtilde"]=7805, + ["vturned"]=652, + ["vuhiragana"]=12436, + ["vukatakana"]=12532, + ["w"]=119, + ["wacute"]=7811, + ["waekorean"]=12633, + ["wahiragana"]=12431, + ["wakatakana"]=12527, + ["wakatakanahalfwidth"]=65436, + ["wakorean"]=12632, + ["wasmallhiragana"]=12430, + ["wasmallkatakana"]=12526, + ["wattosquare"]=13143, + ["wavedash"]=12316, + ["wavyunderscorevertical"]=65076, + ["wawarabic"]=1608, + ["wawfinalarabic"]=65262, + ["wawhamzaabovearabic"]=1572, + ["wawhamzaabovefinalarabic"]=65158, + ["wbsquare"]=13277, + ["wcircle"]=9446, + ["wcircumflex"]=373, + ["wdieresis"]=7813, + ["wdotaccent"]=7815, + ["wdotbelow"]=7817, + ["wehiragana"]=12433, + ["weierstrass"]=8472, + ["wekatakana"]=12529, + ["wekorean"]=12638, + ["weokorean"]=12637, + ["wgrave"]=7809, + ["whitebullet"]=9702, + ["whitecircle"]=9675, + ["whitecircleinverse"]=9689, + ["whitecornerbracketleft"]=12302, + ["whitecornerbracketleftvertical"]=65091, + ["whitecornerbracketright"]=12303, + ["whitecornerbracketrightvertical"]=65092, + ["whitediamond"]=9671, + ["whitediamondcontainingblacksmalldiamond"]=9672, + ["whitedownpointingsmalltriangle"]=9663, + ["whitedownpointingtriangle"]=9661, + ["whiteleftpointingsmalltriangle"]=9667, + ["whiteleftpointingtriangle"]=9665, + ["whitelenticularbracketleft"]=12310, + ["whitelenticularbracketright"]=12311, + ["whiterightpointingsmalltriangle"]=9657, + ["whiterightpointingtriangle"]=9655, + ["whitesmallsquare"]=9643, + ["whitesmilingface"]=9786, + ["whitesquare"]=9633, + ["whitestar"]=9734, + ["whitetelephone"]=9743, + ["whitetortoiseshellbracketleft"]=12312, + ["whitetortoiseshellbracketright"]=12313, + ["whiteuppointingsmalltriangle"]=9653, + ["whiteuppointingtriangle"]=9651, + ["wihiragana"]=12432, + ["wikatakana"]=12528, + ["wikorean"]=12639, + ["wmonospace"]=65367, + ["wohiragana"]=12434, + ["wokatakana"]=12530, + ["wokatakanahalfwidth"]=65382, + ["won"]=8361, + ["wonmonospace"]=65510, + ["wowaenthai"]=3623, + ["wparen"]=9394, + ["wring"]=7832, + ["wsuperior"]=695, + ["wturned"]=653, + ["wynn"]=447, + ["x"]=120, + ["xabovecmb"]=829, + ["xbopomofo"]=12562, + ["xcircle"]=9447, + ["xdieresis"]=7821, + ["xdotaccent"]=7819, + ["xeharmenian"]=1389, + ["xi"]=958, + ["xmonospace"]=65368, + ["xparen"]=9395, + ["xsuperior"]=739, + ["y"]=121, + ["yaadosquare"]=13134, + ["yabengali"]=2479, + ["yacute"]=253, + ["yadeva"]=2351, + ["yaekorean"]=12626, + ["yagujarati"]=2735, + ["yagurmukhi"]=2607, + ["yahiragana"]=12420, + ["yakatakana"]=12516, + ["yakatakanahalfwidth"]=65428, + ["yakorean"]=12625, + ["yamakkanthai"]=3662, + ["yasmallhiragana"]=12419, + ["yasmallkatakana"]=12515, + ["yasmallkatakanahalfwidth"]=65388, + ["yatcyrillic"]=1123, + ["ycircle"]=9448, + ["ycircumflex"]=375, + ["ydieresis"]=255, + ["ydotaccent"]=7823, + ["ydotbelow"]=7925, + ["yeharabic"]=1610, + ["yehbarreearabic"]=1746, + ["yehbarreefinalarabic"]=64431, + ["yehfinalarabic"]=65266, + ["yehhamzaabovearabic"]=1574, + ["yehhamzaabovefinalarabic"]=65162, + ["yehhamzaaboveinitialarabic"]=65163, + ["yehhamzaabovemedialarabic"]=65164, + ["yehinitialarabic"]=65267, + ["yehmedialarabic"]=65268, + ["yehmeeminitialarabic"]=64733, + ["yehmeemisolatedarabic"]=64600, + ["yehnoonfinalarabic"]=64660, + ["yehthreedotsbelowarabic"]=1745, + ["yekorean"]=12630, + ["yen"]=165, + ["yenmonospace"]=65509, + ["yeokorean"]=12629, + ["yeorinhieuhkorean"]=12678, + ["yerahbenyomohebrew"]=1450, + ["yerahbenyomolefthebrew"]=1450, + ["yericyrillic"]=1099, + ["yerudieresiscyrillic"]=1273, + ["yesieungkorean"]=12673, + ["yesieungpansioskorean"]=12675, + ["yesieungsioskorean"]=12674, + ["yetivhebrew"]=1434, + ["ygrave"]=7923, + ["yhook"]=436, + ["yhookabove"]=7927, + ["yiarmenian"]=1397, + ["yicyrillic"]=1111, + ["yikorean"]=12642, + ["yinyang"]=9775, + ["yiwnarmenian"]=1410, + ["ymonospace"]=65369, + ["yod"]=1497, + ["yoddagesh"]=64313, + ["yoddageshhebrew"]=64313, + ["yodhebrew"]=1497, + ["yodyodhebrew"]=1522, + ["yodyodpatahhebrew"]=64287, + ["yohiragana"]=12424, + ["yoikorean"]=12681, + ["yokatakana"]=12520, + ["yokatakanahalfwidth"]=65430, + ["yokorean"]=12635, + ["yosmallhiragana"]=12423, + ["yosmallkatakana"]=12519, + ["yosmallkatakanahalfwidth"]=65390, + ["yotgreek"]=1011, + ["yoyaekorean"]=12680, + ["yoyakorean"]=12679, + ["yoyakthai"]=3618, + ["yoyingthai"]=3597, + ["yparen"]=9396, + ["ypogegrammeni"]=890, + ["ypogegrammenigreekcmb"]=837, + ["yr"]=422, + ["yring"]=7833, + ["ysuperior"]=696, + ["ytilde"]=7929, + ["yturned"]=654, + ["yuhiragana"]=12422, + ["yuikorean"]=12684, + ["yukatakana"]=12518, + ["yukatakanahalfwidth"]=65429, + ["yukorean"]=12640, + ["yusbigcyrillic"]=1131, + ["yusbigiotifiedcyrillic"]=1133, + ["yuslittlecyrillic"]=1127, + ["yuslittleiotifiedcyrillic"]=1129, + ["yusmallhiragana"]=12421, + ["yusmallkatakana"]=12517, + ["yusmallkatakanahalfwidth"]=65389, + ["yuyekorean"]=12683, + ["yuyeokorean"]=12682, + ["yyabengali"]=2527, + ["yyadeva"]=2399, + ["z"]=122, + ["zaarmenian"]=1382, + ["zacute"]=378, + ["zadeva"]=2395, + ["zagurmukhi"]=2651, + ["zaharabic"]=1592, + ["zahfinalarabic"]=65222, + ["zahinitialarabic"]=65223, + ["zahiragana"]=12374, + ["zahmedialarabic"]=65224, + ["zainarabic"]=1586, + ["zainfinalarabic"]=65200, + ["zakatakana"]=12470, + ["zaqefgadolhebrew"]=1429, + ["zaqefqatanhebrew"]=1428, + ["zarqahebrew"]=1432, + ["zayin"]=1494, + ["zayindagesh"]=64310, + ["zayindageshhebrew"]=64310, + ["zayinhebrew"]=1494, + ["zbopomofo"]=12567, + ["zcaron"]=382, + ["zcircle"]=9449, + ["zcircumflex"]=7825, + ["zcurl"]=657, + ["zdot"]=380, + ["zdotaccent"]=380, + ["zdotbelow"]=7827, + ["zecyrillic"]=1079, + ["zedescendercyrillic"]=1177, + ["zedieresiscyrillic"]=1247, + ["zehiragana"]=12380, + ["zekatakana"]=12476, + ["zero"]=48, + ["zeroarabic"]=1632, + ["zerobengali"]=2534, + ["zerodeva"]=2406, + ["zerogujarati"]=2790, + ["zerogurmukhi"]=2662, + ["zerohackarabic"]=1632, + ["zeroinferior"]=8320, + ["zeromonospace"]=65296, + ["zeropersian"]=1776, + ["zerosuperior"]=8304, + ["zerothai"]=3664, + ["zerowidthjoiner"]=65279, + ["zerowidthnonjoiner"]=8204, + ["zerowidthspace"]=8203, + ["zeta"]=950, + ["zhbopomofo"]=12563, + ["zhearmenian"]=1386, + ["zhebrevecyrillic"]=1218, + ["zhecyrillic"]=1078, + ["zhedescendercyrillic"]=1175, + ["zhedieresiscyrillic"]=1245, + ["zihiragana"]=12376, + ["zikatakana"]=12472, + ["zinorhebrew"]=1454, + ["zlinebelow"]=7829, + ["zmonospace"]=65370, + ["zohiragana"]=12382, + ["zokatakana"]=12478, + ["zparen"]=9397, + ["zretroflexhook"]=656, + ["zstroke"]=438, + ["zuhiragana"]=12378, + ["zukatakana"]=12474, +} diff --git a/tex/context/base/font-agl.lua b/tex/context/base/font-agl.lua index 5ee34b028..19121c358 100644 --- a/tex/context/base/font-agl.lua +++ b/tex/context/base/font-agl.lua @@ -1,667 +1,667 @@ -if not modules then modules = { } end modules ['font-agl'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "derived from http://www.adobe.com/devnet/opentype/archives/glyphlist.txt", - original = "Adobe Glyph List, version 2.0, September 20, 2002", -} - -local allocate = utilities.storage.allocate - -local names = allocate { - -- filled from char-def.lua -} -local unicodes = allocate { - -- filled from char-def.lua -} - -local synonyms = { - Acyrillic = 0x0410, - Becyrillic = 0x0411, - Cdot = 0x010A, - Checyrillic = 0x0427, - Decyrillic = 0x0414, - Djecyrillic = 0x0402, - Dzecyrillic = 0x0405, - Dzhecyrillic = 0x040F, - Ecyrillic = 0x0404, - Edot = 0x0116, - Efcyrillic = 0x0424, - Elcyrillic = 0x041B, - Emcyrillic = 0x041C, - Encyrillic = 0x041D, - Ercyrillic = 0x0420, - Ereversedcyrillic = 0x042D, - Escyrillic = 0x0421, - Fitacyrillic = 0x0472, - Gcedilla = 0x0122, - Gdot = 0x0120, - Gecyrillic = 0x0413, - Gheupturncyrillic = 0x0490, - Gjecyrillic = 0x0403, - Hardsigncyrillic = 0x042A, - IAcyrillic = 0x042F, - IUcyrillic = 0x042E, - Icyrillic = 0x0406, - Idot = 0x0130, - Iecyrillic = 0x0415, - Iicyrillic = 0x0418, - Iishortcyrillic = 0x0419, - Iocyrillic = 0x0401, - Izhitsacyrillic = 0x0474, - Jecyrillic = 0x0408, - Kacyrillic = 0x041A, - Kcedilla = 0x0136, - Khacyrillic = 0x0425, - Kjecyrillic = 0x040C, - Lcedilla = 0x013B, - Ljecyrillic = 0x0409, - Ncedilla = 0x0145, - Njecyrillic = 0x040A, - Ocyrillic = 0x041E, - Odblacute = 0x0150, - Ohm = 0x2126, - Pecyrillic = 0x041F, - Rcedilla = 0x0156, - Shacyrillic = 0x0428, - Shchacyrillic = 0x0429, - Softsigncyrillic = 0x042C, - Tcedilla = 0x0162, - Tecyrillic = 0x0422, - Tsecyrillic = 0x0426, - Tshecyrillic = 0x040B, - Ucyrillic = 0x0423, - Udblacute = 0x0170, - Ushortcyrillic = 0x040E, - Vecyrillic = 0x0412, - Yatcyrillic = 0x0462, - Yericyrillic = 0x042B, - Yicyrillic = 0x0407, - Zdot = 0x017B, - Zecyrillic = 0x0417, - Zhecyrillic = 0x0416, - acutecmb = 0x0301, - acyrillic = 0x0430, - afii00208 = 0x2015, - afii08941 = 0x20A4, - afii57694 = 0xFB2A, - afii57695 = 0xFB2B, - afii57700 = 0xFB4B, - afii57705 = 0xFB1F, - afii57723 = 0xFB35, - alef = 0x05D0, - alefmaksurainitialarabic = 0xFEF3, - alefmaksuramedialarabic = 0xFEF4, - approximatelyequal = 0x2245, - asteriskaltonearabic = 0x066D, - ayin = 0x05E2, - bet = 0x05D1, - betdagesh = 0xFB31, - blackdownpointingtriangle = 0x25BC, - blackleftpointingpointer = 0x25C4, - blackrectangle = 0x25AC, - blackrightpointingpointer = 0x25BA, - blacksmilingface = 0x263B, - blacksquare = 0x25A0, - blackuppointingtriangle = 0x25B2, - bulletinverse = 0x25D8, - cdot = 0x010B, - compass = 0x263C, - dagesh = 0x05BC, - dalet = 0x05D3, - daletdagesh = 0xFB33, - dalethatafpatah = 0x05D3, - dalethatafpatahhebrew = 0x05D3, - dalethatafsegol = 0x05D3, - dalethatafsegolhebrew = 0x05D3, - dalethebrew = 0x05D3, - dalethiriq = 0x05D3, - dalethiriqhebrew = 0x05D3, - daletholam = 0x05D3, - daletholamhebrew = 0x05D3, - daletpatah = 0x05D3, - daletpatahhebrew = 0x05D3, - daletqamats = 0x05D3, - daletqamatshebrew = 0x05D3, - daletqubuts = 0x05D3, - daletqubutshebrew = 0x05D3, - daletsegol = 0x05D3, - daletsegolhebrew = 0x05D3, - daletsheva = 0x05D3, - daletshevahebrew = 0x05D3, - dalettsere = 0x05D3, - dammaarabic = 0x064F, - dammatanaltonearabic = 0x064C, - dargahebrew = 0x05A7, - dbllowline = 0x2017, - decimalseparatorarabic = 0x066B, - dialytikatonos = 0x0385, - dotbelowcmb = 0x0323, - doubleyodpatah = 0xFB1F, - doubleyodpatahhebrew = 0xFB1F, - edot = 0x0117, - eightarabic = 0x0668, - eighthnotebeamed = 0x266B, - etnahtafoukhhebrew = 0x0591, - etnahtafoukhlefthebrew = 0x0591, - etnahtahebrew = 0x0591, - fathaarabic = 0x064E, - finalkaf = 0x05DA, - finalkafdagesh = 0xFB3A, - finalkafhebrew = 0x05DA, - finalkafqamats = 0x05DA, - finalkafqamatshebrew = 0x05DA, - finalkafsheva = 0x05DA, - finalmem = 0x05DD, - finalnun = 0x05DF, - finalpe = 0x05E3, - finaltsadi = 0x05E5, - fivearabic = 0x0665, - forall = 0x2200, - fourarabic = 0x0664, - gcedilla = 0x0123, - gdot = 0x0121, - gimel = 0x05D2, - gimeldagesh = 0xFB32, - gravecmb = 0x0300, - haaltonearabic = 0x06C1, - hamzaarabic = 0x0621, - hamzadammaarabic = 0x0621, - hamzadammatanarabic = 0x0621, - hamzafathaarabic = 0x0621, - hamzafathatanarabic = 0x0621, - hamzalowarabic = 0x0621, - hamzalowkasraarabic = 0x0621, - hamzalowkasratanarabic = 0x0621, - hatafpatah = 0x05B2, - hatafpatah16 = 0x05B2, - hatafpatah23 = 0x05B2, - hatafpatah2f = 0x05B2, - hatafpatahhebrew = 0x05B2, - hatafpatahnarrowhebrew = 0x05B2, - hatafpatahquarterhebrew = 0x05B2, - hatafqamats = 0x05B3, - hatafqamats1b = 0x05B3, - hatafqamats28 = 0x05B3, - hatafqamats34 = 0x05B3, - hatafqamatshebrew = 0x05B3, - hatafqamatsnarrowhebrew = 0x05B3, - hatafqamatsquarterhebrew = 0x05B3, - hatafsegol = 0x05B1, - hatafsegol17 = 0x05B1, - hatafsegol24 = 0x05B1, - hatafsegol30 = 0x05B1, - hatafsegolhebrew = 0x05B1, - hatafsegolnarrowhebrew = 0x05B1, - hatafsegolquarterhebrew = 0x05B1, - he = 0x05D4, - hedagesh = 0xFB34, - hehfinalalttwoarabic = 0xFEEA, - het = 0x05D7, - hiriq = 0x05B4, - hiriq14 = 0x05B4, - hiriq21 = 0x05B4, - hiriq2d = 0x05B4, - hiriqhebrew = 0x05B4, - hiriqnarrowhebrew = 0x05B4, - hiriqquarterhebrew = 0x05B4, - holam = 0x05B9, - holam19 = 0x05B9, - holam26 = 0x05B9, - holam32 = 0x05B9, - holamhebrew = 0x05B9, - holamnarrowhebrew = 0x05B9, - holamquarterhebrew = 0x05B9, - ilde = 0x02DC, - integralbottom = 0x2321, - integraltop = 0x2320, - kaf = 0x05DB, - kafdagesh = 0xFB3B, - kashidaautoarabic = 0x0640, - kashidaautonosidebearingarabic = 0x0640, - kcedilla = 0x0137, - lamed = 0x05DC, - lameddagesh = 0xFB3C, - lamedhebrew = 0x05DC, - lamedholam = 0x05DC, - lamedholamdagesh = 0x05DC, - lamedholamdageshhebrew = 0x05DC, - laminitialarabic = 0xFEDF, - lammeemjeeminitialarabic = 0xFEDF, - lcedilla = 0x013C, - logicalnotreversed = 0x2310, - mahapakhhebrew = 0x05A4, - mem = 0x05DE, - memdagesh = 0xFB3E, - merkhahebrew = 0x05A5, - merkhakefulahebrew = 0x05A6, - middot = 0x00B7, - munahhebrew = 0x05A3, - nbspace = 0x00A0, - ncedilla = 0x0146, - newsheqelsign = 0x20AA, - ninearabic = 0x0669, - noonhehinitialarabic = 0xFEE7, - nun = 0x05E0, - nundagesh = 0xFB40, - odblacute = 0x0151, - onearabic = 0x0661, - overscore = 0x00AF, - patah = 0x05B7, - patah11 = 0x05B7, - patah1d = 0x05B7, - patah2a = 0x05B7, - patahhebrew = 0x05B7, - patahnarrowhebrew = 0x05B7, - patahquarterhebrew = 0x05B7, - pe = 0x05E4, - pedagesh = 0xFB44, - qamats = 0x05B8, - qamats10 = 0x05B8, - qamats1a = 0x05B8, - qamats1c = 0x05B8, - qamats27 = 0x05B8, - qamats29 = 0x05B8, - qamats33 = 0x05B8, - qamatsde = 0x05B8, - qamatshebrew = 0x05B8, - qamatsnarrowhebrew = 0x05B8, - qamatsqatanhebrew = 0x05B8, - qamatsqatannarrowhebrew = 0x05B8, - qamatsqatanquarterhebrew = 0x05B8, - qamatsqatanwidehebrew = 0x05B8, - qamatsquarterhebrew = 0x05B8, - qof = 0x05E7, - qofdagesh = 0xFB47, - qofhatafpatah = 0x05E7, - qofhatafpatahhebrew = 0x05E7, - qofhatafsegol = 0x05E7, - qofhatafsegolhebrew = 0x05E7, - qofhebrew = 0x05E7, - qofhiriq = 0x05E7, - qofhiriqhebrew = 0x05E7, - qofholam = 0x05E7, - qofholamhebrew = 0x05E7, - qofpatah = 0x05E7, - qofpatahhebrew = 0x05E7, - qofqamats = 0x05E7, - qofqamatshebrew = 0x05E7, - qofqubuts = 0x05E7, - qofqubutshebrew = 0x05E7, - qofsegol = 0x05E7, - qofsegolhebrew = 0x05E7, - qofsheva = 0x05E7, - qofshevahebrew = 0x05E7, - qoftsere = 0x05E7, - qubuts = 0x05BB, - qubuts18 = 0x05BB, - qubuts25 = 0x05BB, - qubuts31 = 0x05BB, - qubutshebrew = 0x05BB, - qubutsnarrowhebrew = 0x05BB, - qubutsquarterhebrew = 0x05BB, - quoteleftreversed = 0x201B, - rafe = 0x05BF, - rcedilla = 0x0157, - reharabic = 0x0631, - resh = 0x05E8, - reshhatafpatah = 0x05E8, - reshhatafpatahhebrew = 0x05E8, - reshhatafsegol = 0x05E8, - reshhatafsegolhebrew = 0x05E8, - reshhebrew = 0x05E8, - reshhiriq = 0x05E8, - reshhiriqhebrew = 0x05E8, - reshholam = 0x05E8, - reshholamhebrew = 0x05E8, - reshpatah = 0x05E8, - reshpatahhebrew = 0x05E8, - reshqamats = 0x05E8, - reshqamatshebrew = 0x05E8, - reshqubuts = 0x05E8, - reshqubutshebrew = 0x05E8, - reshsegol = 0x05E8, - reshsegolhebrew = 0x05E8, - reshsheva = 0x05E8, - reshshevahebrew = 0x05E8, - reshtsere = 0x05E8, - reviahebrew = 0x0597, - samekh = 0x05E1, - samekhdagesh = 0xFB41, - segol = 0x05B6, - segol13 = 0x05B6, - segol1f = 0x05B6, - segol2c = 0x05B6, - segolhebrew = 0x05B6, - segolnarrowhebrew = 0x05B6, - segolquarterhebrew = 0x05B6, - sevenarabic = 0x0667, - sfthyphen = 0x00AD, - shaddaarabic = 0x0651, - sheqel = 0x20AA, - sheva = 0x05B0, - sheva115 = 0x05B0, - sheva15 = 0x05B0, - sheva22 = 0x05B0, - sheva2e = 0x05B0, - shevahebrew = 0x05B0, - shevanarrowhebrew = 0x05B0, - shevaquarterhebrew = 0x05B0, - shin = 0x05E9, - shindagesh = 0xFB49, - shindageshshindot = 0xFB2C, - shindageshsindot = 0xFB2D, - shinshindot = 0xFB2A, - shinsindot = 0xFB2B, - siluqhebrew = 0x05BD, - sixarabic = 0x0666, - tav = 0x05EA, - tavdages = 0xFB4A, - tavdagesh = 0xFB4A, - tcedilla = 0x0163, - tchehinitialarabic = 0xFB7C, - tet = 0x05D8, - tetdagesh = 0xFB38, - tevirhebrew = 0x059B, - thousandsseparatorarabic = 0x066C, - threearabic = 0x0663, - tildecmb = 0x0303, - tipehahebrew = 0x0596, - tsadi = 0x05E6, - tsadidagesh = 0xFB46, - tsere = 0x05B5, - tsere12 = 0x05B5, - tsere1e = 0x05B5, - tsere2b = 0x05B5, - tserehebrew = 0x05B5, - tserenarrowhebrew = 0x05B5, - tserequarterhebrew = 0x05B5, - twoarabic = 0x0662, - udblacute = 0x0171, - vav = 0x05D5, - vavdagesh = 0xFB35, - vavdagesh65 = 0xFB35, - vavholam = 0xFB4B, - yerahbenyomohebrew = 0x05AA, - yod = 0x05D9, - yoddagesh = 0xFB39, - zayin = 0x05D6, - zayindagesh = 0xFB36, - zdot = 0x017C, - zeroarabic = 0x0660, -} - -local extras = allocate { -- private extensions - Dcroat = 0x0110, - Delta = 0x2206, - Euro = 0x20AC, - H18533 = 0x25CF, - H18543 = 0x25AA, - H18551 = 0x25AB, - H22073 = 0x25A1, - Ldot = 0x013F, - Oslashacute = 0x01FE, - SF10000 = 0x250C, - SF20000 = 0x2514, - SF30000 = 0x2510, - SF40000 = 0x2518, - SF50000 = 0x253C, - SF60000 = 0x252C, - SF70000 = 0x2534, - SF80000 = 0x251C, - SF90000 = 0x2524, - Upsilon1 = 0x03D2, - afii10066 = 0x0431, - afii10067 = 0x0432, - afii10068 = 0x0433, - afii10069 = 0x0434, - afii10070 = 0x0435, - afii10071 = 0x0451, - afii10072 = 0x0436, - afii10073 = 0x0437, - afii10074 = 0x0438, - afii10075 = 0x0439, - afii10076 = 0x043A, - afii10077 = 0x043B, - afii10078 = 0x043C, - afii10079 = 0x043D, - afii10080 = 0x043E, - afii10081 = 0x043F, - afii10082 = 0x0440, - afii10083 = 0x0441, - afii10084 = 0x0442, - afii10085 = 0x0443, - afii10086 = 0x0444, - afii10087 = 0x0445, - afii10088 = 0x0446, - afii10089 = 0x0447, - afii10090 = 0x0448, - afii10091 = 0x0449, - afii10092 = 0x044A, - afii10093 = 0x044B, - afii10094 = 0x044C, - afii10095 = 0x044D, - afii10096 = 0x044E, - afii10097 = 0x044F, - afii10098 = 0x0491, - afii10099 = 0x0452, - afii10100 = 0x0453, - afii10101 = 0x0454, - afii10102 = 0x0455, - afii10103 = 0x0456, - afii10104 = 0x0457, - afii10105 = 0x0458, - afii10106 = 0x0459, - afii10107 = 0x045A, - afii10108 = 0x045B, - afii10109 = 0x045C, - afii10110 = 0x045E, - afii10193 = 0x045F, - afii10194 = 0x0463, - afii10195 = 0x0473, - afii10196 = 0x0475, - afii10846 = 0x04D9, - afii208 = 0x2015, - afii57381 = 0x066A, - afii57388 = 0x060C, - afii57392 = 0x0660, - afii57393 = 0x0661, - afii57394 = 0x0662, - afii57395 = 0x0663, - afii57396 = 0x0664, - afii57397 = 0x0665, - afii57398 = 0x0666, - afii57399 = 0x0667, - afii57400 = 0x0668, - afii57401 = 0x0669, - afii57403 = 0x061B, - afii57407 = 0x061F, - afii57409 = 0x0621, - afii57410 = 0x0622, - afii57411 = 0x0623, - afii57412 = 0x0624, - afii57413 = 0x0625, - afii57414 = 0x0626, - afii57415 = 0x0627, - afii57416 = 0x0628, - afii57417 = 0x0629, - afii57418 = 0x062A, - afii57419 = 0x062B, - afii57420 = 0x062C, - afii57421 = 0x062D, - afii57422 = 0x062E, - afii57423 = 0x062F, - afii57424 = 0x0630, - afii57425 = 0x0631, - afii57426 = 0x0632, - afii57427 = 0x0633, - afii57428 = 0x0634, - afii57429 = 0x0635, - afii57430 = 0x0636, - afii57431 = 0x0637, - afii57432 = 0x0638, - afii57433 = 0x0639, - afii57434 = 0x063A, - afii57440 = 0x0640, - afii57441 = 0x0641, - afii57442 = 0x0642, - afii57443 = 0x0643, - afii57444 = 0x0644, - afii57445 = 0x0645, - afii57446 = 0x0646, - afii57448 = 0x0648, - afii57449 = 0x0649, - afii57450 = 0x064A, - afii57451 = 0x064B, - afii57452 = 0x064C, - afii57453 = 0x064D, - afii57454 = 0x064E, - afii57455 = 0x064F, - afii57456 = 0x0650, - afii57457 = 0x0651, - afii57458 = 0x0652, - afii57470 = 0x0647, - afii57505 = 0x06A4, - afii57506 = 0x067E, - afii57507 = 0x0686, - afii57508 = 0x0698, - afii57509 = 0x06AF, - afii57511 = 0x0679, - afii57512 = 0x0688, - afii57513 = 0x0691, - afii57514 = 0x06BA, - afii57519 = 0x06D2, - afii57636 = 0x20AA, - afii57645 = 0x05BE, - afii57658 = 0x05C3, - afii57664 = 0x05D0, - afii57665 = 0x05D1, - afii57666 = 0x05D2, - afii57667 = 0x05D3, - afii57668 = 0x05D4, - afii57669 = 0x05D5, - afii57670 = 0x05D6, - afii57671 = 0x05D7, - afii57672 = 0x05D8, - afii57673 = 0x05D9, - afii57674 = 0x05DA, - afii57675 = 0x05DB, - afii57676 = 0x05DC, - afii57677 = 0x05DD, - afii57678 = 0x05DE, - afii57679 = 0x05DF, - afii57680 = 0x05E0, - afii57681 = 0x05E1, - afii57682 = 0x05E2, - afii57683 = 0x05E3, - afii57684 = 0x05E4, - afii57685 = 0x05E5, - afii57686 = 0x05E6, - afii57687 = 0x05E7, - afii57688 = 0x05E8, - afii57689 = 0x05E9, - afii57690 = 0x05EA, - afii57716 = 0x05F0, - afii57717 = 0x05F1, - afii57718 = 0x05F2, - afii57793 = 0x05B4, - afii57794 = 0x05B5, - afii57795 = 0x05B6, - afii57796 = 0x05BB, - afii57797 = 0x05B8, - afii57798 = 0x05B7, - afii57799 = 0x05B0, - afii57800 = 0x05B2, - afii57801 = 0x05B1, - afii57802 = 0x05B3, - afii57803 = 0x05C2, - afii57804 = 0x05C1, - afii57806 = 0x05B9, - afii57807 = 0x05BC, - afii57839 = 0x05BD, - afii57841 = 0x05BF, - afii57842 = 0x05C0, - afii57929 = 0x02BC, - afii61248 = 0x2105, - afii61289 = 0x2113, - afii61352 = 0x2116, - afii61664 = 0x200C, - afii63167 = 0x066D, - afii64937 = 0x02BD, - arrowdblboth = 0x21D4, - arrowdblleft = 0x21D0, - arrowdblright = 0x21D2, - arrowupdnbse = 0x21A8, - bar = 0x007C, - circle = 0x25CB, - circlemultiply = 0x2297, - circleplus = 0x2295, - club = 0x2663, - colonmonetary = 0x20A1, - dcroat = 0x0111, - dkshade = 0x2593, - existential = 0x2203, - female = 0x2640, - gradient = 0x2207, - heart = 0x2665, - hookabovecomb = 0x0309, - invcircle = 0x25D9, - ldot = 0x0140, - longs = 0x017F, - ltshade = 0x2591, - male = 0x2642, - mu = 0x00B5, - napostrophe = 0x0149, - notelement = 0x2209, - omega1 = 0x03D6, - openbullet = 0x25E6, - orthogonal = 0x221F, - oslashacute = 0x01FF, - phi1 = 0x03D5, - propersubset = 0x2282, - propersuperset = 0x2283, - reflexsubset = 0x2286, - reflexsuperset = 0x2287, - shade = 0x2592, - sigma1 = 0x03C2, - similar = 0x223C, - smileface = 0x263A, - spacehackarabic = 0x0020, - spade = 0x2660, - theta1 = 0x03D1, - twodotenleader = 0x2025, -} - -for u, c in next, characters.data do - local a = c.adobename - if a then - unicodes[a] = u - names [u] = a - end -end - -for a, u in next, extras do - unicodes[a] = u - if not names[u] then - names[u] = a - end -end - -for s, u in next, synonyms do - unicodes[s] = u - if not names[u] then - names[u] = s - end -end - --- We load this table only when needed. We could use a loading mechanism --- return the table but there are no more vectors like this so why bother. - -fonts.encodings = fonts.encodings or { } - -fonts.encodings.agl = { - names = names, -- unicode -> name - unicodes = unicodes, -- name -> unicode - synonyms = synonyms, -- merged into the other two - extras = extras, -- merged into the other two -} +if not modules then modules = { } end modules ['font-agl'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "derived from http://www.adobe.com/devnet/opentype/archives/glyphlist.txt", + original = "Adobe Glyph List, version 2.0, September 20, 2002", +} + +local allocate = utilities.storage.allocate + +local names = allocate { + -- filled from char-def.lua +} +local unicodes = allocate { + -- filled from char-def.lua +} + +local synonyms = { + Acyrillic = 0x0410, + Becyrillic = 0x0411, + Cdot = 0x010A, + Checyrillic = 0x0427, + Decyrillic = 0x0414, + Djecyrillic = 0x0402, + Dzecyrillic = 0x0405, + Dzhecyrillic = 0x040F, + Ecyrillic = 0x0404, + Edot = 0x0116, + Efcyrillic = 0x0424, + Elcyrillic = 0x041B, + Emcyrillic = 0x041C, + Encyrillic = 0x041D, + Ercyrillic = 0x0420, + Ereversedcyrillic = 0x042D, + Escyrillic = 0x0421, + Fitacyrillic = 0x0472, + Gcedilla = 0x0122, + Gdot = 0x0120, + Gecyrillic = 0x0413, + Gheupturncyrillic = 0x0490, + Gjecyrillic = 0x0403, + Hardsigncyrillic = 0x042A, + IAcyrillic = 0x042F, + IUcyrillic = 0x042E, + Icyrillic = 0x0406, + Idot = 0x0130, + Iecyrillic = 0x0415, + Iicyrillic = 0x0418, + Iishortcyrillic = 0x0419, + Iocyrillic = 0x0401, + Izhitsacyrillic = 0x0474, + Jecyrillic = 0x0408, + Kacyrillic = 0x041A, + Kcedilla = 0x0136, + Khacyrillic = 0x0425, + Kjecyrillic = 0x040C, + Lcedilla = 0x013B, + Ljecyrillic = 0x0409, + Ncedilla = 0x0145, + Njecyrillic = 0x040A, + Ocyrillic = 0x041E, + Odblacute = 0x0150, + Ohm = 0x2126, + Pecyrillic = 0x041F, + Rcedilla = 0x0156, + Shacyrillic = 0x0428, + Shchacyrillic = 0x0429, + Softsigncyrillic = 0x042C, + Tcedilla = 0x0162, + Tecyrillic = 0x0422, + Tsecyrillic = 0x0426, + Tshecyrillic = 0x040B, + Ucyrillic = 0x0423, + Udblacute = 0x0170, + Ushortcyrillic = 0x040E, + Vecyrillic = 0x0412, + Yatcyrillic = 0x0462, + Yericyrillic = 0x042B, + Yicyrillic = 0x0407, + Zdot = 0x017B, + Zecyrillic = 0x0417, + Zhecyrillic = 0x0416, + acutecmb = 0x0301, + acyrillic = 0x0430, + afii00208 = 0x2015, + afii08941 = 0x20A4, + afii57694 = 0xFB2A, + afii57695 = 0xFB2B, + afii57700 = 0xFB4B, + afii57705 = 0xFB1F, + afii57723 = 0xFB35, + alef = 0x05D0, + alefmaksurainitialarabic = 0xFEF3, + alefmaksuramedialarabic = 0xFEF4, + approximatelyequal = 0x2245, + asteriskaltonearabic = 0x066D, + ayin = 0x05E2, + bet = 0x05D1, + betdagesh = 0xFB31, + blackdownpointingtriangle = 0x25BC, + blackleftpointingpointer = 0x25C4, + blackrectangle = 0x25AC, + blackrightpointingpointer = 0x25BA, + blacksmilingface = 0x263B, + blacksquare = 0x25A0, + blackuppointingtriangle = 0x25B2, + bulletinverse = 0x25D8, + cdot = 0x010B, + compass = 0x263C, + dagesh = 0x05BC, + dalet = 0x05D3, + daletdagesh = 0xFB33, + dalethatafpatah = 0x05D3, + dalethatafpatahhebrew = 0x05D3, + dalethatafsegol = 0x05D3, + dalethatafsegolhebrew = 0x05D3, + dalethebrew = 0x05D3, + dalethiriq = 0x05D3, + dalethiriqhebrew = 0x05D3, + daletholam = 0x05D3, + daletholamhebrew = 0x05D3, + daletpatah = 0x05D3, + daletpatahhebrew = 0x05D3, + daletqamats = 0x05D3, + daletqamatshebrew = 0x05D3, + daletqubuts = 0x05D3, + daletqubutshebrew = 0x05D3, + daletsegol = 0x05D3, + daletsegolhebrew = 0x05D3, + daletsheva = 0x05D3, + daletshevahebrew = 0x05D3, + dalettsere = 0x05D3, + dammaarabic = 0x064F, + dammatanaltonearabic = 0x064C, + dargahebrew = 0x05A7, + dbllowline = 0x2017, + decimalseparatorarabic = 0x066B, + dialytikatonos = 0x0385, + dotbelowcmb = 0x0323, + doubleyodpatah = 0xFB1F, + doubleyodpatahhebrew = 0xFB1F, + edot = 0x0117, + eightarabic = 0x0668, + eighthnotebeamed = 0x266B, + etnahtafoukhhebrew = 0x0591, + etnahtafoukhlefthebrew = 0x0591, + etnahtahebrew = 0x0591, + fathaarabic = 0x064E, + finalkaf = 0x05DA, + finalkafdagesh = 0xFB3A, + finalkafhebrew = 0x05DA, + finalkafqamats = 0x05DA, + finalkafqamatshebrew = 0x05DA, + finalkafsheva = 0x05DA, + finalmem = 0x05DD, + finalnun = 0x05DF, + finalpe = 0x05E3, + finaltsadi = 0x05E5, + fivearabic = 0x0665, + forall = 0x2200, + fourarabic = 0x0664, + gcedilla = 0x0123, + gdot = 0x0121, + gimel = 0x05D2, + gimeldagesh = 0xFB32, + gravecmb = 0x0300, + haaltonearabic = 0x06C1, + hamzaarabic = 0x0621, + hamzadammaarabic = 0x0621, + hamzadammatanarabic = 0x0621, + hamzafathaarabic = 0x0621, + hamzafathatanarabic = 0x0621, + hamzalowarabic = 0x0621, + hamzalowkasraarabic = 0x0621, + hamzalowkasratanarabic = 0x0621, + hatafpatah = 0x05B2, + hatafpatah16 = 0x05B2, + hatafpatah23 = 0x05B2, + hatafpatah2f = 0x05B2, + hatafpatahhebrew = 0x05B2, + hatafpatahnarrowhebrew = 0x05B2, + hatafpatahquarterhebrew = 0x05B2, + hatafqamats = 0x05B3, + hatafqamats1b = 0x05B3, + hatafqamats28 = 0x05B3, + hatafqamats34 = 0x05B3, + hatafqamatshebrew = 0x05B3, + hatafqamatsnarrowhebrew = 0x05B3, + hatafqamatsquarterhebrew = 0x05B3, + hatafsegol = 0x05B1, + hatafsegol17 = 0x05B1, + hatafsegol24 = 0x05B1, + hatafsegol30 = 0x05B1, + hatafsegolhebrew = 0x05B1, + hatafsegolnarrowhebrew = 0x05B1, + hatafsegolquarterhebrew = 0x05B1, + he = 0x05D4, + hedagesh = 0xFB34, + hehfinalalttwoarabic = 0xFEEA, + het = 0x05D7, + hiriq = 0x05B4, + hiriq14 = 0x05B4, + hiriq21 = 0x05B4, + hiriq2d = 0x05B4, + hiriqhebrew = 0x05B4, + hiriqnarrowhebrew = 0x05B4, + hiriqquarterhebrew = 0x05B4, + holam = 0x05B9, + holam19 = 0x05B9, + holam26 = 0x05B9, + holam32 = 0x05B9, + holamhebrew = 0x05B9, + holamnarrowhebrew = 0x05B9, + holamquarterhebrew = 0x05B9, + ilde = 0x02DC, + integralbottom = 0x2321, + integraltop = 0x2320, + kaf = 0x05DB, + kafdagesh = 0xFB3B, + kashidaautoarabic = 0x0640, + kashidaautonosidebearingarabic = 0x0640, + kcedilla = 0x0137, + lamed = 0x05DC, + lameddagesh = 0xFB3C, + lamedhebrew = 0x05DC, + lamedholam = 0x05DC, + lamedholamdagesh = 0x05DC, + lamedholamdageshhebrew = 0x05DC, + laminitialarabic = 0xFEDF, + lammeemjeeminitialarabic = 0xFEDF, + lcedilla = 0x013C, + logicalnotreversed = 0x2310, + mahapakhhebrew = 0x05A4, + mem = 0x05DE, + memdagesh = 0xFB3E, + merkhahebrew = 0x05A5, + merkhakefulahebrew = 0x05A6, + middot = 0x00B7, + munahhebrew = 0x05A3, + nbspace = 0x00A0, + ncedilla = 0x0146, + newsheqelsign = 0x20AA, + ninearabic = 0x0669, + noonhehinitialarabic = 0xFEE7, + nun = 0x05E0, + nundagesh = 0xFB40, + odblacute = 0x0151, + onearabic = 0x0661, + overscore = 0x00AF, + patah = 0x05B7, + patah11 = 0x05B7, + patah1d = 0x05B7, + patah2a = 0x05B7, + patahhebrew = 0x05B7, + patahnarrowhebrew = 0x05B7, + patahquarterhebrew = 0x05B7, + pe = 0x05E4, + pedagesh = 0xFB44, + qamats = 0x05B8, + qamats10 = 0x05B8, + qamats1a = 0x05B8, + qamats1c = 0x05B8, + qamats27 = 0x05B8, + qamats29 = 0x05B8, + qamats33 = 0x05B8, + qamatsde = 0x05B8, + qamatshebrew = 0x05B8, + qamatsnarrowhebrew = 0x05B8, + qamatsqatanhebrew = 0x05B8, + qamatsqatannarrowhebrew = 0x05B8, + qamatsqatanquarterhebrew = 0x05B8, + qamatsqatanwidehebrew = 0x05B8, + qamatsquarterhebrew = 0x05B8, + qof = 0x05E7, + qofdagesh = 0xFB47, + qofhatafpatah = 0x05E7, + qofhatafpatahhebrew = 0x05E7, + qofhatafsegol = 0x05E7, + qofhatafsegolhebrew = 0x05E7, + qofhebrew = 0x05E7, + qofhiriq = 0x05E7, + qofhiriqhebrew = 0x05E7, + qofholam = 0x05E7, + qofholamhebrew = 0x05E7, + qofpatah = 0x05E7, + qofpatahhebrew = 0x05E7, + qofqamats = 0x05E7, + qofqamatshebrew = 0x05E7, + qofqubuts = 0x05E7, + qofqubutshebrew = 0x05E7, + qofsegol = 0x05E7, + qofsegolhebrew = 0x05E7, + qofsheva = 0x05E7, + qofshevahebrew = 0x05E7, + qoftsere = 0x05E7, + qubuts = 0x05BB, + qubuts18 = 0x05BB, + qubuts25 = 0x05BB, + qubuts31 = 0x05BB, + qubutshebrew = 0x05BB, + qubutsnarrowhebrew = 0x05BB, + qubutsquarterhebrew = 0x05BB, + quoteleftreversed = 0x201B, + rafe = 0x05BF, + rcedilla = 0x0157, + reharabic = 0x0631, + resh = 0x05E8, + reshhatafpatah = 0x05E8, + reshhatafpatahhebrew = 0x05E8, + reshhatafsegol = 0x05E8, + reshhatafsegolhebrew = 0x05E8, + reshhebrew = 0x05E8, + reshhiriq = 0x05E8, + reshhiriqhebrew = 0x05E8, + reshholam = 0x05E8, + reshholamhebrew = 0x05E8, + reshpatah = 0x05E8, + reshpatahhebrew = 0x05E8, + reshqamats = 0x05E8, + reshqamatshebrew = 0x05E8, + reshqubuts = 0x05E8, + reshqubutshebrew = 0x05E8, + reshsegol = 0x05E8, + reshsegolhebrew = 0x05E8, + reshsheva = 0x05E8, + reshshevahebrew = 0x05E8, + reshtsere = 0x05E8, + reviahebrew = 0x0597, + samekh = 0x05E1, + samekhdagesh = 0xFB41, + segol = 0x05B6, + segol13 = 0x05B6, + segol1f = 0x05B6, + segol2c = 0x05B6, + segolhebrew = 0x05B6, + segolnarrowhebrew = 0x05B6, + segolquarterhebrew = 0x05B6, + sevenarabic = 0x0667, + sfthyphen = 0x00AD, + shaddaarabic = 0x0651, + sheqel = 0x20AA, + sheva = 0x05B0, + sheva115 = 0x05B0, + sheva15 = 0x05B0, + sheva22 = 0x05B0, + sheva2e = 0x05B0, + shevahebrew = 0x05B0, + shevanarrowhebrew = 0x05B0, + shevaquarterhebrew = 0x05B0, + shin = 0x05E9, + shindagesh = 0xFB49, + shindageshshindot = 0xFB2C, + shindageshsindot = 0xFB2D, + shinshindot = 0xFB2A, + shinsindot = 0xFB2B, + siluqhebrew = 0x05BD, + sixarabic = 0x0666, + tav = 0x05EA, + tavdages = 0xFB4A, + tavdagesh = 0xFB4A, + tcedilla = 0x0163, + tchehinitialarabic = 0xFB7C, + tet = 0x05D8, + tetdagesh = 0xFB38, + tevirhebrew = 0x059B, + thousandsseparatorarabic = 0x066C, + threearabic = 0x0663, + tildecmb = 0x0303, + tipehahebrew = 0x0596, + tsadi = 0x05E6, + tsadidagesh = 0xFB46, + tsere = 0x05B5, + tsere12 = 0x05B5, + tsere1e = 0x05B5, + tsere2b = 0x05B5, + tserehebrew = 0x05B5, + tserenarrowhebrew = 0x05B5, + tserequarterhebrew = 0x05B5, + twoarabic = 0x0662, + udblacute = 0x0171, + vav = 0x05D5, + vavdagesh = 0xFB35, + vavdagesh65 = 0xFB35, + vavholam = 0xFB4B, + yerahbenyomohebrew = 0x05AA, + yod = 0x05D9, + yoddagesh = 0xFB39, + zayin = 0x05D6, + zayindagesh = 0xFB36, + zdot = 0x017C, + zeroarabic = 0x0660, +} + +local extras = allocate { -- private extensions + Dcroat = 0x0110, + Delta = 0x2206, + Euro = 0x20AC, + H18533 = 0x25CF, + H18543 = 0x25AA, + H18551 = 0x25AB, + H22073 = 0x25A1, + Ldot = 0x013F, + Oslashacute = 0x01FE, + SF10000 = 0x250C, + SF20000 = 0x2514, + SF30000 = 0x2510, + SF40000 = 0x2518, + SF50000 = 0x253C, + SF60000 = 0x252C, + SF70000 = 0x2534, + SF80000 = 0x251C, + SF90000 = 0x2524, + Upsilon1 = 0x03D2, + afii10066 = 0x0431, + afii10067 = 0x0432, + afii10068 = 0x0433, + afii10069 = 0x0434, + afii10070 = 0x0435, + afii10071 = 0x0451, + afii10072 = 0x0436, + afii10073 = 0x0437, + afii10074 = 0x0438, + afii10075 = 0x0439, + afii10076 = 0x043A, + afii10077 = 0x043B, + afii10078 = 0x043C, + afii10079 = 0x043D, + afii10080 = 0x043E, + afii10081 = 0x043F, + afii10082 = 0x0440, + afii10083 = 0x0441, + afii10084 = 0x0442, + afii10085 = 0x0443, + afii10086 = 0x0444, + afii10087 = 0x0445, + afii10088 = 0x0446, + afii10089 = 0x0447, + afii10090 = 0x0448, + afii10091 = 0x0449, + afii10092 = 0x044A, + afii10093 = 0x044B, + afii10094 = 0x044C, + afii10095 = 0x044D, + afii10096 = 0x044E, + afii10097 = 0x044F, + afii10098 = 0x0491, + afii10099 = 0x0452, + afii10100 = 0x0453, + afii10101 = 0x0454, + afii10102 = 0x0455, + afii10103 = 0x0456, + afii10104 = 0x0457, + afii10105 = 0x0458, + afii10106 = 0x0459, + afii10107 = 0x045A, + afii10108 = 0x045B, + afii10109 = 0x045C, + afii10110 = 0x045E, + afii10193 = 0x045F, + afii10194 = 0x0463, + afii10195 = 0x0473, + afii10196 = 0x0475, + afii10846 = 0x04D9, + afii208 = 0x2015, + afii57381 = 0x066A, + afii57388 = 0x060C, + afii57392 = 0x0660, + afii57393 = 0x0661, + afii57394 = 0x0662, + afii57395 = 0x0663, + afii57396 = 0x0664, + afii57397 = 0x0665, + afii57398 = 0x0666, + afii57399 = 0x0667, + afii57400 = 0x0668, + afii57401 = 0x0669, + afii57403 = 0x061B, + afii57407 = 0x061F, + afii57409 = 0x0621, + afii57410 = 0x0622, + afii57411 = 0x0623, + afii57412 = 0x0624, + afii57413 = 0x0625, + afii57414 = 0x0626, + afii57415 = 0x0627, + afii57416 = 0x0628, + afii57417 = 0x0629, + afii57418 = 0x062A, + afii57419 = 0x062B, + afii57420 = 0x062C, + afii57421 = 0x062D, + afii57422 = 0x062E, + afii57423 = 0x062F, + afii57424 = 0x0630, + afii57425 = 0x0631, + afii57426 = 0x0632, + afii57427 = 0x0633, + afii57428 = 0x0634, + afii57429 = 0x0635, + afii57430 = 0x0636, + afii57431 = 0x0637, + afii57432 = 0x0638, + afii57433 = 0x0639, + afii57434 = 0x063A, + afii57440 = 0x0640, + afii57441 = 0x0641, + afii57442 = 0x0642, + afii57443 = 0x0643, + afii57444 = 0x0644, + afii57445 = 0x0645, + afii57446 = 0x0646, + afii57448 = 0x0648, + afii57449 = 0x0649, + afii57450 = 0x064A, + afii57451 = 0x064B, + afii57452 = 0x064C, + afii57453 = 0x064D, + afii57454 = 0x064E, + afii57455 = 0x064F, + afii57456 = 0x0650, + afii57457 = 0x0651, + afii57458 = 0x0652, + afii57470 = 0x0647, + afii57505 = 0x06A4, + afii57506 = 0x067E, + afii57507 = 0x0686, + afii57508 = 0x0698, + afii57509 = 0x06AF, + afii57511 = 0x0679, + afii57512 = 0x0688, + afii57513 = 0x0691, + afii57514 = 0x06BA, + afii57519 = 0x06D2, + afii57636 = 0x20AA, + afii57645 = 0x05BE, + afii57658 = 0x05C3, + afii57664 = 0x05D0, + afii57665 = 0x05D1, + afii57666 = 0x05D2, + afii57667 = 0x05D3, + afii57668 = 0x05D4, + afii57669 = 0x05D5, + afii57670 = 0x05D6, + afii57671 = 0x05D7, + afii57672 = 0x05D8, + afii57673 = 0x05D9, + afii57674 = 0x05DA, + afii57675 = 0x05DB, + afii57676 = 0x05DC, + afii57677 = 0x05DD, + afii57678 = 0x05DE, + afii57679 = 0x05DF, + afii57680 = 0x05E0, + afii57681 = 0x05E1, + afii57682 = 0x05E2, + afii57683 = 0x05E3, + afii57684 = 0x05E4, + afii57685 = 0x05E5, + afii57686 = 0x05E6, + afii57687 = 0x05E7, + afii57688 = 0x05E8, + afii57689 = 0x05E9, + afii57690 = 0x05EA, + afii57716 = 0x05F0, + afii57717 = 0x05F1, + afii57718 = 0x05F2, + afii57793 = 0x05B4, + afii57794 = 0x05B5, + afii57795 = 0x05B6, + afii57796 = 0x05BB, + afii57797 = 0x05B8, + afii57798 = 0x05B7, + afii57799 = 0x05B0, + afii57800 = 0x05B2, + afii57801 = 0x05B1, + afii57802 = 0x05B3, + afii57803 = 0x05C2, + afii57804 = 0x05C1, + afii57806 = 0x05B9, + afii57807 = 0x05BC, + afii57839 = 0x05BD, + afii57841 = 0x05BF, + afii57842 = 0x05C0, + afii57929 = 0x02BC, + afii61248 = 0x2105, + afii61289 = 0x2113, + afii61352 = 0x2116, + afii61664 = 0x200C, + afii63167 = 0x066D, + afii64937 = 0x02BD, + arrowdblboth = 0x21D4, + arrowdblleft = 0x21D0, + arrowdblright = 0x21D2, + arrowupdnbse = 0x21A8, + bar = 0x007C, + circle = 0x25CB, + circlemultiply = 0x2297, + circleplus = 0x2295, + club = 0x2663, + colonmonetary = 0x20A1, + dcroat = 0x0111, + dkshade = 0x2593, + existential = 0x2203, + female = 0x2640, + gradient = 0x2207, + heart = 0x2665, + hookabovecomb = 0x0309, + invcircle = 0x25D9, + ldot = 0x0140, + longs = 0x017F, + ltshade = 0x2591, + male = 0x2642, + mu = 0x00B5, + napostrophe = 0x0149, + notelement = 0x2209, + omega1 = 0x03D6, + openbullet = 0x25E6, + orthogonal = 0x221F, + oslashacute = 0x01FF, + phi1 = 0x03D5, + propersubset = 0x2282, + propersuperset = 0x2283, + reflexsubset = 0x2286, + reflexsuperset = 0x2287, + shade = 0x2592, + sigma1 = 0x03C2, + similar = 0x223C, + smileface = 0x263A, + spacehackarabic = 0x0020, + spade = 0x2660, + theta1 = 0x03D1, + twodotenleader = 0x2025, +} + +for u, c in next, characters.data do + local a = c.adobename + if a then + unicodes[a] = u + names [u] = a + end +end + +for a, u in next, extras do + unicodes[a] = u + if not names[u] then + names[u] = a + end +end + +for s, u in next, synonyms do + unicodes[s] = u + if not names[u] then + names[u] = s + end +end + +-- We load this table only when needed. We could use a loading mechanism +-- return the table but there are no more vectors like this so why bother. + +fonts.encodings = fonts.encodings or { } + +fonts.encodings.agl = { + names = names, -- unicode -> name + unicodes = unicodes, -- name -> unicode + synonyms = synonyms, -- merged into the other two + extras = extras, -- merged into the other two +} diff --git a/tex/context/base/font-aux.lua b/tex/context/base/font-aux.lua index 2a605d224..e50b69881 100644 --- a/tex/context/base/font-aux.lua +++ b/tex/context/base/font-aux.lua @@ -1,165 +1,165 @@ -if not modules then modules = { } end modules ['font-aux'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local tonumber, type = tonumber, type ------ wrap, yield = coroutine.wrap, coroutine.yield - -local fonts, font = fonts, font - -local iterators = { } -fonts.iterators = iterators - -local currentfont = font.current -local identifiers = fonts.hashes.identifiers -local sortedkeys = table.sortedkeys - --- for unicode, character in fonts.iterators.characters () do print(unicode) end --- for unicode, description in fonts.iterators.descriptions() do print(unicode) end --- for index, glyph in fonts.iterators.glyphs () do print(index ) end - -local function dummy() end - -local function checkeddata(data) -- beware, nullfont is the fallback in identifiers - local t = type(data) - if t == "table" then - return data - elseif t ~= "number" then - data = currentfont() - end - return identifiers[data] -- has nullfont as fallback -end - -local function getindices(data) - data = checkeddata(data) - local indices = { } - local characters = data.characters - if characters then - for unicode, character in next, characters do - indices[character.index or unicode] = unicode - end - end - return indices -end - --- function iterators.characters(data) --- data = checkeddata(data) --- local characters = data.characters --- if characters then --- local collected = sortedkeys(characters) --- return wrap(function() --- for c=1,#collected do --- local cc = collected[c] --- local dc = characters[cc] --- if dc then --- yield(cc,dc) --- end --- end --- end) --- else --- return wrap(function() end) --- end --- end - --- function iterators.descriptions(data) --- data = checkeddata(data) --- local characters = data.characters --- local descriptions = data.descriptions --- if characters and descriptions then --- local collected = sortedkeys(characters) --- return wrap(function() --- for c=1,#collected do --- local cc = collected[c] --- local dc = descriptions[cc] --- if dc then --- yield(cc,dc) --- end --- end --- end) --- else --- return wrap(function() end) --- end --- end - --- function iterators.glyphs(data) --- data = checkeddata(data) --- local descriptions = data.descriptions --- if descriptions then --- local indices = getindices(data) --- local collected = sortedkeys(indices) --- return wrap(function() --- for c=1,#collected do --- local cc = collected[c] --- local dc = descriptions[indices[cc]] --- if dc then --- yield(cc,dc) --- end --- end --- end) --- else --- return wrap(function() end) --- end --- end - -function iterators.characters(data) - data = checkeddata(data) - local characters = data.characters - if characters then - local collected = sortedkeys(characters) - local n, i = #collected, 0 - return function() - i = i + 1 - if i <= n then - local cc = collected[i] - local dc = characters[cc] - return cc, dc or { } - end - end - else - return dummy - end -end - -function iterators.descriptions(data) - data = checkeddata(data) - local characters = data.characters - local descriptions = data.descriptions - if characters and descriptions then - local collected = sortedkeys(characters) - local n, i = #collected, 0 - return function() - i = i + 1 - if i <= n then - local cc = collected[i] - local dc = descriptions[cc] - return cc, dc or { } - end - end - else - return dummy - end -end - -function iterators.glyphs(data) - data = checkeddata(data) - local descriptions = data.descriptions - if descriptions then - local indices = getindices(data) - local collected = sortedkeys(indices) - local n, i = #collected, 0 - return function() - i = i + 1 - if i <= n then - local cc = collected[i] - local dc = descriptions[indices[cc]] - return cc, dc or { } - end - end - else - return dummy - end -end +if not modules then modules = { } end modules ['font-aux'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local tonumber, type = tonumber, type +----- wrap, yield = coroutine.wrap, coroutine.yield + +local fonts, font = fonts, font + +local iterators = { } +fonts.iterators = iterators + +local currentfont = font.current +local identifiers = fonts.hashes.identifiers +local sortedkeys = table.sortedkeys + +-- for unicode, character in fonts.iterators.characters () do print(unicode) end +-- for unicode, description in fonts.iterators.descriptions() do print(unicode) end +-- for index, glyph in fonts.iterators.glyphs () do print(index ) end + +local function dummy() end + +local function checkeddata(data) -- beware, nullfont is the fallback in identifiers + local t = type(data) + if t == "table" then + return data + elseif t ~= "number" then + data = currentfont() + end + return identifiers[data] -- has nullfont as fallback +end + +local function getindices(data) + data = checkeddata(data) + local indices = { } + local characters = data.characters + if characters then + for unicode, character in next, characters do + indices[character.index or unicode] = unicode + end + end + return indices +end + +-- function iterators.characters(data) +-- data = checkeddata(data) +-- local characters = data.characters +-- if characters then +-- local collected = sortedkeys(characters) +-- return wrap(function() +-- for c=1,#collected do +-- local cc = collected[c] +-- local dc = characters[cc] +-- if dc then +-- yield(cc,dc) +-- end +-- end +-- end) +-- else +-- return wrap(function() end) +-- end +-- end + +-- function iterators.descriptions(data) +-- data = checkeddata(data) +-- local characters = data.characters +-- local descriptions = data.descriptions +-- if characters and descriptions then +-- local collected = sortedkeys(characters) +-- return wrap(function() +-- for c=1,#collected do +-- local cc = collected[c] +-- local dc = descriptions[cc] +-- if dc then +-- yield(cc,dc) +-- end +-- end +-- end) +-- else +-- return wrap(function() end) +-- end +-- end + +-- function iterators.glyphs(data) +-- data = checkeddata(data) +-- local descriptions = data.descriptions +-- if descriptions then +-- local indices = getindices(data) +-- local collected = sortedkeys(indices) +-- return wrap(function() +-- for c=1,#collected do +-- local cc = collected[c] +-- local dc = descriptions[indices[cc]] +-- if dc then +-- yield(cc,dc) +-- end +-- end +-- end) +-- else +-- return wrap(function() end) +-- end +-- end + +function iterators.characters(data) + data = checkeddata(data) + local characters = data.characters + if characters then + local collected = sortedkeys(characters) + local n, i = #collected, 0 + return function() + i = i + 1 + if i <= n then + local cc = collected[i] + local dc = characters[cc] + return cc, dc or { } + end + end + else + return dummy + end +end + +function iterators.descriptions(data) + data = checkeddata(data) + local characters = data.characters + local descriptions = data.descriptions + if characters and descriptions then + local collected = sortedkeys(characters) + local n, i = #collected, 0 + return function() + i = i + 1 + if i <= n then + local cc = collected[i] + local dc = descriptions[cc] + return cc, dc or { } + end + end + else + return dummy + end +end + +function iterators.glyphs(data) + data = checkeddata(data) + local descriptions = data.descriptions + if descriptions then + local indices = getindices(data) + local collected = sortedkeys(indices) + local n, i = #collected, 0 + return function() + i = i + 1 + if i <= n then + local cc = collected[i] + local dc = descriptions[indices[cc]] + return cc, dc or { } + end + end + else + return dummy + end +end diff --git a/tex/context/base/font-chk.lua b/tex/context/base/font-chk.lua index 1b89366fd..9e420744a 100644 --- a/tex/context/base/font-chk.lua +++ b/tex/context/base/font-chk.lua @@ -1,359 +1,359 @@ -if not modules then modules = { } end modules ['font-chk'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- possible optimization: delayed initialization of vectors --- move to the nodes namespace - -local format = string.format -local bpfactor = number.dimenfactors.bp - -local report_fonts = logs.reporter("fonts","checking") - -local fonts = fonts - -fonts.checkers = fonts.checkers or { } -local checkers = fonts.checkers - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers -local fontcharacters = fonthashes.characters - -local addprivate = fonts.helpers.addprivate -local hasprivate = fonts.helpers.hasprivate -local getprivatenode = fonts.helpers.getprivatenode - -local otffeatures = fonts.constructors.newfeatures("otf") -local registerotffeature = otffeatures.register - -local is_character = characters.is_character -local chardata = characters.data - -local tasks = nodes.tasks -local enableaction = tasks.enableaction -local disableaction = tasks.disableaction - -local glyph_code = nodes.nodecodes.glyph -local traverse_id = node.traverse_id -local remove_node = nodes.remove -local insert_node_after = node.insert_after - --- maybe in fonts namespace --- deletion can be option - -local action = false - --- to tfmdata.properties ? - -local function onetimemessage(font,char,message) -- char == false returns table - local tfmdata = fontdata[font] - local shared = tfmdata.shared - local messages = shared.messages - if not messages then - messages = { } - shared.messages = messages - end - local category = messages[message] - if not category then - category = { } - messages[message] = category - end - if char == false then - return table.sortedkeys(category) - elseif not category[char] then - report_fonts("char %U in font %a with id %a: %s",char,tfmdata.properties.fullname,font,message) - category[char] = true - end -end - -fonts.loggers.onetimemessage = onetimemessage - -local mapping = { -- this is just an experiment to illustrate some principles elsewhere - lu = "placeholder uppercase red", - ll = "placeholder lowercase red", - lt = "placeholder uppercase red", - lm = "placeholder lowercase red", - lo = "placeholder lowercase red", - mn = "placeholder mark green", - mc = "placeholder mark green", - me = "placeholder mark green", - nd = "placeholder lowercase blue", - nl = "placeholder lowercase blue", - no = "placeholder lowercase blue", - pc = "placeholder punctuation cyan", - pd = "placeholder punctuation cyan", - ps = "placeholder punctuation cyan", - pe = "placeholder punctuation cyan", - pi = "placeholder punctuation cyan", - pf = "placeholder punctuation cyan", - po = "placeholder punctuation cyan", - sm = "placeholder lowercase magenta", - sc = "placeholder lowercase yellow", - sk = "placeholder lowercase yellow", - so = "placeholder lowercase yellow", -} - -table.setmetatableindex(mapping,function(t,k) v = "placeholder unknown gray" t[k] = v return v end) - -local fakes = { - { - name = "lowercase", - code = ".025 -.175 m .425 -.175 l .425 .525 l .025 .525 l .025 -.175 l .025 0 l .425 0 l .025 -.175 m h S", - width = .45, - height = .55, - depth = .20, - }, - { - name = "uppercase", - code = ".025 -.225 m .625 -.225 l .625 .675 l .025 .675 l .025 -.225 l .025 0 l .625 0 l .025 -.225 m h S", - width = .65, - height = .70, - depth = .25, - }, - { - name = "mark", - code = ".025 .475 m .125 .475 l .125 .675 l .025 .675 l .025 .475 l h B", - width = .15, - height = .70, - depth = -.50, - }, - { - name = "punctuation", - code = ".025 -.175 m .125 -.175 l .125 .525 l .025 .525 l .025 -.175 l h B", - width = .15, - height = .55, - depth = .20, - }, - { - name = "unknown", - code = ".025 0 m .425 0 l .425 .175 l .025 .175 l .025 0 l h B", - width = .45, - height = .20, - depth = 0, - }, -} - -local variants = { - { tag = "gray", r = .6, g = .6, b = .6 }, - { tag = "red", r = .6, g = 0, b = 0 }, - { tag = "green", r = 0, g = .6, b = 0 }, - { tag = "blue", r = 0, g = 0, b = .6 }, - { tag = "cyan", r = 0, g = .6, b = .6 }, - { tag = "magenta", r = .6, g = 0, b = .6 }, - { tag = "yellow", r = .6, g = .6, b = 0 }, -} - -local package = "q %0.6f 0 0 %0.6f 0 0 cm %s %s %s rg %s %s %s RG 10 M 1 j 1 J 0.05 w %s Q" - -local cache = { } -- saves some tables but not that impressive - -local function addmissingsymbols(tfmdata) -- we can have an alternative with rules - local characters = tfmdata.characters - local size = tfmdata.parameters.size - local privates = tfmdata.properties.privates - local scale = size * bpfactor - for i=1,#variants do - local v = variants[i] - local tag, r, g, b = v.tag, v.r, v.g, v.b - for i =1, #fakes do - local fake = fakes[i] - local name = fake.name - local privatename = format("placeholder %s %s",name,tag) - if not hasprivate(tfmdata,privatename) then - local hash = format("%s_%s_%s_%s_%s_%s",name,tag,r,g,b,size) - local char = cache[hash] - if not char then - char = { - width = size*fake.width, - height = size*fake.height, - depth = size*fake.depth, - -- bah .. low level pdf ... should be a rule or plugged in - commands = { { "special", "pdf: " .. format(package,scale,scale,r,g,b,r,g,b,fake.code) } } - } - cache[hash] = char - end - addprivate(tfmdata, privatename, char) - end - end - end -end - -registerotffeature { - name = "missing", - description = "missing symbols", - manipulators = { - base = addmissingsymbols, - node = addmissingsymbols, - } -} - -fonts.loggers.add_placeholders = function(id) addmissingsymbols(fontdata[id or true]) end -fonts.loggers.category_to_placeholder = mapping - -function commands.getplaceholderchar(name) - local id = font.current() - addmissingsymbols(fontdata[id]) - context(fonts.helpers.getprivatenode(fontdata[id],name)) -end - -function checkers.missing(head) - local lastfont, characters, found = nil, nil, nil - for n in traverse_id(glyph_code,head) do -- faster than while loop so we delay removal - local font = n.font - local char = n.char - if font ~= lastfont then - characters = fontcharacters[font] - end - if not characters[char] and is_character[chardata[char].category] then - if action == "remove" then - onetimemessage(font,char,"missing (will be deleted)") - elseif action == "replace" then - onetimemessage(font,char,"missing (will be flagged)") - else - onetimemessage(font,char,"missing") - end - if not found then - found = { n } - else - found[#found+1] = n - end - end - end - if not found then - -- all well - elseif action == "remove" then - for i=1,#found do - head = remove_node(head,found[i],true) - end - elseif action == "replace" then - for i=1,#found do - local n = found[i] - local font = n.font - local char = n.char - local tfmdata = fontdata[font] - local properties = tfmdata.properties - local privates = properties.privates - local category = chardata[char].category - local fakechar = mapping[category] - local p = privates and privates[fakechar] - if not p then - addmissingsymbols(tfmdata) - p = properties.privates[fakechar] - end - if properties.lateprivates then -- .frozen - -- bad, we don't have them at the tex end - local fake = getprivatenode(tfmdata,fakechar) - insert_node_after(head,n,fake) - head = remove_node(head,n,true) - else - -- good, we have \definefontfeature[default][default][missing=yes] - n.char = p - end - end - else - -- maye write a report to the log - end - return head, false -end - -local relevant = { "missing (will be deleted)", "missing (will be flagged)", "missing" } - -function checkers.getmissing(id) - if id then - local list = checkers.getmissing(font.current()) - if list then - local _, list = next(checkers.getmissing(font.current())) - return list - else - return { } - end - else - local t = { } - for id, d in next, fontdata do - local shared = d.shared - local messages = shared.messages - if messages then - local tf = t[d.properties.filename] or { } - for i=1,#relevant do - local tm = messages[relevant[i]] - if tm then - tf = table.merged(tf,tm) - end - end - if next(tf) then - t[d.properties.filename] = tf - end - end - end - for k, v in next, t do - t[k] = table.sortedkeys(v) - end - return t - end -end - -local tracked = false - -trackers.register("fonts.missing", function(v) - if v then - enableaction("processors","fonts.checkers.missing") - tracked = true - else - disableaction("processors","fonts.checkers.missing") - end - if v == "replace" then - otffeatures.defaults.missing = true - end - action = v -end) - -function commands.checkcharactersinfont() - enableaction("processors","fonts.checkers.missing") - tracked = true -end - -function commands.removemissingcharacters() - enableaction("processors","fonts.checkers.missing") - action = "remove" - tracked = true -end - -function commands.replacemissingcharacters() - enableaction("processors","fonts.checkers.missing") - action = "replace" - otffeatures.defaults.missing = true - tracked = true -end - -local report_characters = logs.reporter("fonts","characters") -local report_character = logs.reporter("missing") - -local logsnewline = logs.newline -local logspushtarget = logs.pushtarget -local logspoptarget = logs.poptarget - -luatex.registerstopactions(function() - if tracked then - local collected = checkers.getmissing() - if next(collected) then - logspushtarget("logfile") - for filename, list in table.sortedhash(collected) do - logsnewline() - report_characters("start missing characters: %s",filename) - logsnewline() - for i=1,#list do - local u = list[i] - report_character("%U %c %s",u,u,chardata[u].description) - end - logsnewline() - report_characters("stop missing characters") - logsnewline() - end - logspoptarget() - end - end -end) +if not modules then modules = { } end modules ['font-chk'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- possible optimization: delayed initialization of vectors +-- move to the nodes namespace + +local format = string.format +local bpfactor = number.dimenfactors.bp + +local report_fonts = logs.reporter("fonts","checking") + +local fonts = fonts + +fonts.checkers = fonts.checkers or { } +local checkers = fonts.checkers + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers +local fontcharacters = fonthashes.characters + +local addprivate = fonts.helpers.addprivate +local hasprivate = fonts.helpers.hasprivate +local getprivatenode = fonts.helpers.getprivatenode + +local otffeatures = fonts.constructors.newfeatures("otf") +local registerotffeature = otffeatures.register + +local is_character = characters.is_character +local chardata = characters.data + +local tasks = nodes.tasks +local enableaction = tasks.enableaction +local disableaction = tasks.disableaction + +local glyph_code = nodes.nodecodes.glyph +local traverse_id = node.traverse_id +local remove_node = nodes.remove +local insert_node_after = node.insert_after + +-- maybe in fonts namespace +-- deletion can be option + +local action = false + +-- to tfmdata.properties ? + +local function onetimemessage(font,char,message) -- char == false returns table + local tfmdata = fontdata[font] + local shared = tfmdata.shared + local messages = shared.messages + if not messages then + messages = { } + shared.messages = messages + end + local category = messages[message] + if not category then + category = { } + messages[message] = category + end + if char == false then + return table.sortedkeys(category) + elseif not category[char] then + report_fonts("char %U in font %a with id %a: %s",char,tfmdata.properties.fullname,font,message) + category[char] = true + end +end + +fonts.loggers.onetimemessage = onetimemessage + +local mapping = { -- this is just an experiment to illustrate some principles elsewhere + lu = "placeholder uppercase red", + ll = "placeholder lowercase red", + lt = "placeholder uppercase red", + lm = "placeholder lowercase red", + lo = "placeholder lowercase red", + mn = "placeholder mark green", + mc = "placeholder mark green", + me = "placeholder mark green", + nd = "placeholder lowercase blue", + nl = "placeholder lowercase blue", + no = "placeholder lowercase blue", + pc = "placeholder punctuation cyan", + pd = "placeholder punctuation cyan", + ps = "placeholder punctuation cyan", + pe = "placeholder punctuation cyan", + pi = "placeholder punctuation cyan", + pf = "placeholder punctuation cyan", + po = "placeholder punctuation cyan", + sm = "placeholder lowercase magenta", + sc = "placeholder lowercase yellow", + sk = "placeholder lowercase yellow", + so = "placeholder lowercase yellow", +} + +table.setmetatableindex(mapping,function(t,k) v = "placeholder unknown gray" t[k] = v return v end) + +local fakes = { + { + name = "lowercase", + code = ".025 -.175 m .425 -.175 l .425 .525 l .025 .525 l .025 -.175 l .025 0 l .425 0 l .025 -.175 m h S", + width = .45, + height = .55, + depth = .20, + }, + { + name = "uppercase", + code = ".025 -.225 m .625 -.225 l .625 .675 l .025 .675 l .025 -.225 l .025 0 l .625 0 l .025 -.225 m h S", + width = .65, + height = .70, + depth = .25, + }, + { + name = "mark", + code = ".025 .475 m .125 .475 l .125 .675 l .025 .675 l .025 .475 l h B", + width = .15, + height = .70, + depth = -.50, + }, + { + name = "punctuation", + code = ".025 -.175 m .125 -.175 l .125 .525 l .025 .525 l .025 -.175 l h B", + width = .15, + height = .55, + depth = .20, + }, + { + name = "unknown", + code = ".025 0 m .425 0 l .425 .175 l .025 .175 l .025 0 l h B", + width = .45, + height = .20, + depth = 0, + }, +} + +local variants = { + { tag = "gray", r = .6, g = .6, b = .6 }, + { tag = "red", r = .6, g = 0, b = 0 }, + { tag = "green", r = 0, g = .6, b = 0 }, + { tag = "blue", r = 0, g = 0, b = .6 }, + { tag = "cyan", r = 0, g = .6, b = .6 }, + { tag = "magenta", r = .6, g = 0, b = .6 }, + { tag = "yellow", r = .6, g = .6, b = 0 }, +} + +local package = "q %0.6f 0 0 %0.6f 0 0 cm %s %s %s rg %s %s %s RG 10 M 1 j 1 J 0.05 w %s Q" + +local cache = { } -- saves some tables but not that impressive + +local function addmissingsymbols(tfmdata) -- we can have an alternative with rules + local characters = tfmdata.characters + local size = tfmdata.parameters.size + local privates = tfmdata.properties.privates + local scale = size * bpfactor + for i=1,#variants do + local v = variants[i] + local tag, r, g, b = v.tag, v.r, v.g, v.b + for i =1, #fakes do + local fake = fakes[i] + local name = fake.name + local privatename = format("placeholder %s %s",name,tag) + if not hasprivate(tfmdata,privatename) then + local hash = format("%s_%s_%s_%s_%s_%s",name,tag,r,g,b,size) + local char = cache[hash] + if not char then + char = { + width = size*fake.width, + height = size*fake.height, + depth = size*fake.depth, + -- bah .. low level pdf ... should be a rule or plugged in + commands = { { "special", "pdf: " .. format(package,scale,scale,r,g,b,r,g,b,fake.code) } } + } + cache[hash] = char + end + addprivate(tfmdata, privatename, char) + end + end + end +end + +registerotffeature { + name = "missing", + description = "missing symbols", + manipulators = { + base = addmissingsymbols, + node = addmissingsymbols, + } +} + +fonts.loggers.add_placeholders = function(id) addmissingsymbols(fontdata[id or true]) end +fonts.loggers.category_to_placeholder = mapping + +function commands.getplaceholderchar(name) + local id = font.current() + addmissingsymbols(fontdata[id]) + context(fonts.helpers.getprivatenode(fontdata[id],name)) +end + +function checkers.missing(head) + local lastfont, characters, found = nil, nil, nil + for n in traverse_id(glyph_code,head) do -- faster than while loop so we delay removal + local font = n.font + local char = n.char + if font ~= lastfont then + characters = fontcharacters[font] + end + if not characters[char] and is_character[chardata[char].category] then + if action == "remove" then + onetimemessage(font,char,"missing (will be deleted)") + elseif action == "replace" then + onetimemessage(font,char,"missing (will be flagged)") + else + onetimemessage(font,char,"missing") + end + if not found then + found = { n } + else + found[#found+1] = n + end + end + end + if not found then + -- all well + elseif action == "remove" then + for i=1,#found do + head = remove_node(head,found[i],true) + end + elseif action == "replace" then + for i=1,#found do + local n = found[i] + local font = n.font + local char = n.char + local tfmdata = fontdata[font] + local properties = tfmdata.properties + local privates = properties.privates + local category = chardata[char].category + local fakechar = mapping[category] + local p = privates and privates[fakechar] + if not p then + addmissingsymbols(tfmdata) + p = properties.privates[fakechar] + end + if properties.lateprivates then -- .frozen + -- bad, we don't have them at the tex end + local fake = getprivatenode(tfmdata,fakechar) + insert_node_after(head,n,fake) + head = remove_node(head,n,true) + else + -- good, we have \definefontfeature[default][default][missing=yes] + n.char = p + end + end + else + -- maye write a report to the log + end + return head, false +end + +local relevant = { "missing (will be deleted)", "missing (will be flagged)", "missing" } + +function checkers.getmissing(id) + if id then + local list = checkers.getmissing(font.current()) + if list then + local _, list = next(checkers.getmissing(font.current())) + return list + else + return { } + end + else + local t = { } + for id, d in next, fontdata do + local shared = d.shared + local messages = shared.messages + if messages then + local tf = t[d.properties.filename] or { } + for i=1,#relevant do + local tm = messages[relevant[i]] + if tm then + tf = table.merged(tf,tm) + end + end + if next(tf) then + t[d.properties.filename] = tf + end + end + end + for k, v in next, t do + t[k] = table.sortedkeys(v) + end + return t + end +end + +local tracked = false + +trackers.register("fonts.missing", function(v) + if v then + enableaction("processors","fonts.checkers.missing") + tracked = true + else + disableaction("processors","fonts.checkers.missing") + end + if v == "replace" then + otffeatures.defaults.missing = true + end + action = v +end) + +function commands.checkcharactersinfont() + enableaction("processors","fonts.checkers.missing") + tracked = true +end + +function commands.removemissingcharacters() + enableaction("processors","fonts.checkers.missing") + action = "remove" + tracked = true +end + +function commands.replacemissingcharacters() + enableaction("processors","fonts.checkers.missing") + action = "replace" + otffeatures.defaults.missing = true + tracked = true +end + +local report_characters = logs.reporter("fonts","characters") +local report_character = logs.reporter("missing") + +local logsnewline = logs.newline +local logspushtarget = logs.pushtarget +local logspoptarget = logs.poptarget + +luatex.registerstopactions(function() + if tracked then + local collected = checkers.getmissing() + if next(collected) then + logspushtarget("logfile") + for filename, list in table.sortedhash(collected) do + logsnewline() + report_characters("start missing characters: %s",filename) + logsnewline() + for i=1,#list do + local u = list[i] + report_character("%U %c %s",u,u,chardata[u].description) + end + logsnewline() + report_characters("stop missing characters") + logsnewline() + end + logspoptarget() + end + end +end) diff --git a/tex/context/base/font-cid.lua b/tex/context/base/font-cid.lua index e4b565313..b588493b7 100644 --- a/tex/context/base/font-cid.lua +++ b/tex/context/base/font-cid.lua @@ -1,164 +1,164 @@ -if not modules then modules = { } end modules ['font-cid'] = { - version = 1.001, - comment = "companion to font-otf.lua (cidmaps)", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format, match, lower = string.format, string.match, string.lower -local tonumber = tonumber -local P, S, R, C, V, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.match - -local fonts, logs, trackers = fonts, logs, trackers - -local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end) - -local report_otf = logs.reporter("fonts","otf loading") - -local cid = { } -fonts.cid = cid - -local cidmap = { } -local cidmax = 10 - --- original string parser: 0.109, lpeg parser: 0.036 seconds for Adobe-CNS1-4.cidmap --- --- 18964 18964 (leader) --- 0 /.notdef --- 1..95 0020 --- 99 3000 - -local number = C(R("09","af","AF")^1) -local space = S(" \n\r\t") -local spaces = space^0 -local period = P(".") -local periods = period * period -local name = P("/") * C((1-space)^1) - -local unicodes, names = { }, { } -- we could use Carg now - -local function do_one(a,b) - unicodes[tonumber(a)] = tonumber(b,16) -end - -local function do_range(a,b,c) - c = tonumber(c,16) - for i=tonumber(a),tonumber(b) do - unicodes[i] = c - c = c + 1 - end -end - -local function do_name(a,b) - names[tonumber(a)] = b -end - -local grammar = P { "start", - start = number * spaces * number * V("series"), - series = (spaces * (V("one") + V("range") + V("named")))^1, - one = (number * spaces * number) / do_one, - range = (number * periods * number * spaces * number) / do_range, - named = (number * spaces * name) / do_name -} - -local function loadcidfile(filename) - local data = io.loaddata(filename) - if data then - unicodes, names = { }, { } - lpegmatch(grammar,data) - local supplement, registry, ordering = match(filename,"^(.-)%-(.-)%-()%.(.-)$") - return { - supplement = supplement, - registry = registry, - ordering = ordering, - filename = filename, - unicodes = unicodes, - names = names - } - end -end - -cid.loadfile = loadcidfile -- we use the frozen variant -local template = "%s-%s-%s.cidmap" - -local function locate(registry,ordering,supplement) - local filename = format(template,registry,ordering,supplement) - local hashname = lower(filename) - local found = cidmap[hashname] - if not found then - if trace_loading then - report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename) - end - local fullname = resolvers.findfile(filename,'cid') or "" - if fullname ~= "" then - found = loadcidfile(fullname) - if found then - if trace_loading then - report_otf("using cidmap file %a",filename) - end - cidmap[hashname] = found - found.usedname = file.basename(filename) - end - end - end - return found -end - --- cf Arthur R. we can safely scan upwards since cids are downward compatible - -function cid.getmap(specification) - if not specification then - report_otf("invalid cidinfo specification, table expected") - return - end - local registry = specification.registry - local ordering = specification.ordering - local supplement = specification.supplement - -- check for already loaded file - local filename = format(registry,ordering,supplement) - local found = cidmap[lower(filename)] - if found then - return found - end - if trace_loading then - report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement) - end - found = locate(registry,ordering,supplement) - if not found then - local supnum = tonumber(supplement) - local cidnum = nil - -- next highest (alternatively we could start high) - if supnum < cidmax then - for s=supnum+1,cidmax do - local c = locate(registry,ordering,s) - if c then - found, cidnum = c, s - break - end - end - end - -- next lowest (least worse fit) - if not found and supnum > 0 then - for s=supnum-1,0,-1 do - local c = locate(registry,ordering,s) - if c then - found, cidnum = c, s - break - end - end - end - -- prevent further lookups -- somewhat tricky - registry = lower(registry) - ordering = lower(ordering) - if found and cidnum > 0 then - for s=0,cidnum-1 do - local filename = format(template,registry,ordering,s) - if not cidmap[filename] then - cidmap[filename] = found - end - end - end - end - return found -end +if not modules then modules = { } end modules ['font-cid'] = { + version = 1.001, + comment = "companion to font-otf.lua (cidmaps)", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, match, lower = string.format, string.match, string.lower +local tonumber = tonumber +local P, S, R, C, V, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.match + +local fonts, logs, trackers = fonts, logs, trackers + +local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end) + +local report_otf = logs.reporter("fonts","otf loading") + +local cid = { } +fonts.cid = cid + +local cidmap = { } +local cidmax = 10 + +-- original string parser: 0.109, lpeg parser: 0.036 seconds for Adobe-CNS1-4.cidmap +-- +-- 18964 18964 (leader) +-- 0 /.notdef +-- 1..95 0020 +-- 99 3000 + +local number = C(R("09","af","AF")^1) +local space = S(" \n\r\t") +local spaces = space^0 +local period = P(".") +local periods = period * period +local name = P("/") * C((1-space)^1) + +local unicodes, names = { }, { } -- we could use Carg now + +local function do_one(a,b) + unicodes[tonumber(a)] = tonumber(b,16) +end + +local function do_range(a,b,c) + c = tonumber(c,16) + for i=tonumber(a),tonumber(b) do + unicodes[i] = c + c = c + 1 + end +end + +local function do_name(a,b) + names[tonumber(a)] = b +end + +local grammar = P { "start", + start = number * spaces * number * V("series"), + series = (spaces * (V("one") + V("range") + V("named")))^1, + one = (number * spaces * number) / do_one, + range = (number * periods * number * spaces * number) / do_range, + named = (number * spaces * name) / do_name +} + +local function loadcidfile(filename) + local data = io.loaddata(filename) + if data then + unicodes, names = { }, { } + lpegmatch(grammar,data) + local supplement, registry, ordering = match(filename,"^(.-)%-(.-)%-()%.(.-)$") + return { + supplement = supplement, + registry = registry, + ordering = ordering, + filename = filename, + unicodes = unicodes, + names = names + } + end +end + +cid.loadfile = loadcidfile -- we use the frozen variant +local template = "%s-%s-%s.cidmap" + +local function locate(registry,ordering,supplement) + local filename = format(template,registry,ordering,supplement) + local hashname = lower(filename) + local found = cidmap[hashname] + if not found then + if trace_loading then + report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename) + end + local fullname = resolvers.findfile(filename,'cid') or "" + if fullname ~= "" then + found = loadcidfile(fullname) + if found then + if trace_loading then + report_otf("using cidmap file %a",filename) + end + cidmap[hashname] = found + found.usedname = file.basename(filename) + end + end + end + return found +end + +-- cf Arthur R. we can safely scan upwards since cids are downward compatible + +function cid.getmap(specification) + if not specification then + report_otf("invalid cidinfo specification, table expected") + return + end + local registry = specification.registry + local ordering = specification.ordering + local supplement = specification.supplement + -- check for already loaded file + local filename = format(registry,ordering,supplement) + local found = cidmap[lower(filename)] + if found then + return found + end + if trace_loading then + report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement) + end + found = locate(registry,ordering,supplement) + if not found then + local supnum = tonumber(supplement) + local cidnum = nil + -- next highest (alternatively we could start high) + if supnum < cidmax then + for s=supnum+1,cidmax do + local c = locate(registry,ordering,s) + if c then + found, cidnum = c, s + break + end + end + end + -- next lowest (least worse fit) + if not found and supnum > 0 then + for s=supnum-1,0,-1 do + local c = locate(registry,ordering,s) + if c then + found, cidnum = c, s + break + end + end + end + -- prevent further lookups -- somewhat tricky + registry = lower(registry) + ordering = lower(ordering) + if found and cidnum > 0 then + for s=0,cidnum-1 do + local filename = format(template,registry,ordering,s) + if not cidmap[filename] then + cidmap[filename] = found + end + end + end + end + return found +end diff --git a/tex/context/base/font-col.lua b/tex/context/base/font-col.lua index 20c99c9b4..b8b221fc4 100644 --- a/tex/context/base/font-col.lua +++ b/tex/context/base/font-col.lua @@ -1,238 +1,238 @@ -if not modules then modules = { } end modules ['font-col'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- possible optimization: delayed initialization of vectors - -local context, commands, trackers, logs = context, commands, trackers, logs -local node, nodes, fonts, characters = node, nodes, fonts, characters -local file, lpeg, table, string = file, lpeg, table, string - -local type, next, toboolean = type, next, toboolean -local gmatch = string.gmatch -local fastcopy = table.fastcopy ------ P, Cc, lpegmatch = lpeg.P, lpeg.Cc, lpeg.match - -local traverse_id = node.traverse_id -local settings_to_hash = utilities.parsers.settings_to_hash - -local trace_collecting = false trackers.register("fonts.collecting", function(v) trace_collecting = v end) - -local report_fonts = logs.reporter("fonts","collections") - -local collections = fonts.collections or { } -fonts.collections = collections - -local definitions = collections.definitions or { } -collections.definitions = definitions - -local vectors = collections.vectors or { } -collections.vectors = vectors - -local fontdata = fonts.hashes.identifiers -local glyph_code = nodes.nodecodes.glyph -local currentfont = font.current - -local fontpatternhassize = fonts.helpers.fontpatternhassize - -local list = { } -local current = 0 -local enabled = false - --- maybe also a copy - -function collections.reset(name,font) - if font and font ~= "" then - local d = definitions[name] - if d then - d[font] = nil - if not next(d) then - definitions[name] = nil - end - end - else - definitions[name] = nil - end -end - -function collections.define(name,font,ranges,details) - -- todo: details -> method=force|conditional rscale= - -- todo: remap=name - local d = definitions[name] - if not d then - d = { } - definitions[name] = d - end - if name and trace_collecting then - report_fonts("extending collection %a using %a",name,font) - end - details = settings_to_hash(details) - -- todo, combine per font start/stop as arrays - for s in gmatch(ranges,"[^, ]+") do - local start, stop, description = characters.getrange(s) - if start and stop then - if trace_collecting then - if description then - report_fonts("using range %a, slots %U - %U, description %a)",s,start,stop,description) - end - for i=1,#d do - local di = d[i] - if (start >= di.start and start <= di.stop) or (stop >= di.start and stop <= di.stop) then - report_fonts("overlapping ranges %U - %U and %U - %U",start,stop,di.start,di.stop) - end - end - end - details.font, details.start, details.stop = font, start, stop - d[#d+1] = fastcopy(details) - end - end -end - --- todo: provide a lua variant (like with definefont) - -function collections.registermain(name) - local last = currentfont() - if trace_collecting then - report_fonts("registering font %a with name %a",last,name) - end - list[#list+1] = last -end - -function collections.clonevector(name) - statistics.starttiming(fonts) - local d = definitions[name] - local t = { } - if trace_collecting then - report_fonts("processing collection %a",name) - end - for i=1,#d do - local f = d[i] - local id = list[i] - local start, stop = f.start, f.stop - if trace_collecting then - report_fonts("remapping font %a to %a for range %U - %U",current,id,start,stop) - end - local check = toboolean(f.check or "false",true) - local force = toboolean(f.force or "true",true) - local remap = f.remap or nil - -- check: when true, only set when present in font - -- force: when false, then not set when already set - local oldchars = fontdata[current].characters - local newchars = fontdata[id].characters - if check then - for i=start,stop do - if newchars[i] and (force or (not t[i] and not oldchars[i])) then - if remap then - t[i] = { id, remap[i] } - else - t[i] = id - end - end - end - else - for i=start,stop do - if force or (not t[i] and not oldchars[i]) then - if remap then - t[i] = { id, remap[i] } - else - t[i] = id - end - end - end - end - end - vectors[current] = t - if trace_collecting then - report_fonts("activating collection %a for font %a",name,current) - end - if not enabled then - nodes.tasks.enableaction("processors","fonts.collections.process") - enabled = true - end - statistics.stoptiming(fonts) -end - --- we already have this parser --- --- local spec = (P("sa") + P("at") + P("scaled") + P("at") + P("mo")) * P(" ")^1 * (1-P(" "))^1 * P(" ")^0 * -1 --- local okay = ((1-spec)^1 * spec * Cc(true)) + Cc(false) --- --- if lpegmatch(okay,name) then - -function collections.prepare(name) - current = currentfont() - if vectors[current] then - return - end - local d = definitions[name] - if d then - if trace_collecting then - local filename = file.basename(fontdata[current].properties.filename or "?") - report_fonts("applying collection %a to %a, file %a",name,current,filename) - end - list = { } - context.pushcatcodes("prt") -- context.unprotect() - context.font_fallbacks_start_cloning() - for i=1,#d do - local f = d[i] - local name = f.font - local scale = f.rscale or 1 - if fontpatternhassize(name) then - context.font_fallbacks_clone_unique(name,scale) - else - context.font_fallbacks_clone_inherited(name,scale) - end - context.font_fallbacks_register_main(name) - end - context.font_fallbacks_prepare_clone_vectors(name) - context.font_fallbacks_stop_cloning() - context.popcatcodes() -- context.protect() - elseif trace_collecting then - local filename = file.basename(fontdata[current].properties.filename or "?") - report_fonts("error while applying collection %a to %a, file %a",name,current,filename) - end -end - -function collections.report(message) - if trace_collecting then - report_fonts("tex: %s",message) - end -end - -function collections.process(head) -- this way we keep feature processing - local done = false - for n in traverse_id(glyph_code,head) do - local v = vectors[n.font] - if v then - local id = v[n.char] - if id then - if type(id) == "table" then - local newid, newchar = id[1], id[2] - if trace_collecting then - report_fonts("remapping character %a in font %a to character %a in font %a",n.char,n.font,newchar,newid) - end - n.font, n.char = newid, newchar - else - if trace_collecting then - report_fonts("remapping font %a to %a for character %a",n.font,id,n.char) - end - n.font = id - end - end - end - end - return head, done -end - --- interface - -commands.fontcollectiondefine = collections.define -commands.fontcollectionreset = collections.reset -commands.fontcollectionprepare = collections.prepare -commands.fontcollectionreport = collections.report -commands.fontcollectionregister = collections.registermain -commands.fontcollectionclone = collections.clonevector +if not modules then modules = { } end modules ['font-col'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- possible optimization: delayed initialization of vectors + +local context, commands, trackers, logs = context, commands, trackers, logs +local node, nodes, fonts, characters = node, nodes, fonts, characters +local file, lpeg, table, string = file, lpeg, table, string + +local type, next, toboolean = type, next, toboolean +local gmatch = string.gmatch +local fastcopy = table.fastcopy +----- P, Cc, lpegmatch = lpeg.P, lpeg.Cc, lpeg.match + +local traverse_id = node.traverse_id +local settings_to_hash = utilities.parsers.settings_to_hash + +local trace_collecting = false trackers.register("fonts.collecting", function(v) trace_collecting = v end) + +local report_fonts = logs.reporter("fonts","collections") + +local collections = fonts.collections or { } +fonts.collections = collections + +local definitions = collections.definitions or { } +collections.definitions = definitions + +local vectors = collections.vectors or { } +collections.vectors = vectors + +local fontdata = fonts.hashes.identifiers +local glyph_code = nodes.nodecodes.glyph +local currentfont = font.current + +local fontpatternhassize = fonts.helpers.fontpatternhassize + +local list = { } +local current = 0 +local enabled = false + +-- maybe also a copy + +function collections.reset(name,font) + if font and font ~= "" then + local d = definitions[name] + if d then + d[font] = nil + if not next(d) then + definitions[name] = nil + end + end + else + definitions[name] = nil + end +end + +function collections.define(name,font,ranges,details) + -- todo: details -> method=force|conditional rscale= + -- todo: remap=name + local d = definitions[name] + if not d then + d = { } + definitions[name] = d + end + if name and trace_collecting then + report_fonts("extending collection %a using %a",name,font) + end + details = settings_to_hash(details) + -- todo, combine per font start/stop as arrays + for s in gmatch(ranges,"[^, ]+") do + local start, stop, description = characters.getrange(s) + if start and stop then + if trace_collecting then + if description then + report_fonts("using range %a, slots %U - %U, description %a)",s,start,stop,description) + end + for i=1,#d do + local di = d[i] + if (start >= di.start and start <= di.stop) or (stop >= di.start and stop <= di.stop) then + report_fonts("overlapping ranges %U - %U and %U - %U",start,stop,di.start,di.stop) + end + end + end + details.font, details.start, details.stop = font, start, stop + d[#d+1] = fastcopy(details) + end + end +end + +-- todo: provide a lua variant (like with definefont) + +function collections.registermain(name) + local last = currentfont() + if trace_collecting then + report_fonts("registering font %a with name %a",last,name) + end + list[#list+1] = last +end + +function collections.clonevector(name) + statistics.starttiming(fonts) + local d = definitions[name] + local t = { } + if trace_collecting then + report_fonts("processing collection %a",name) + end + for i=1,#d do + local f = d[i] + local id = list[i] + local start, stop = f.start, f.stop + if trace_collecting then + report_fonts("remapping font %a to %a for range %U - %U",current,id,start,stop) + end + local check = toboolean(f.check or "false",true) + local force = toboolean(f.force or "true",true) + local remap = f.remap or nil + -- check: when true, only set when present in font + -- force: when false, then not set when already set + local oldchars = fontdata[current].characters + local newchars = fontdata[id].characters + if check then + for i=start,stop do + if newchars[i] and (force or (not t[i] and not oldchars[i])) then + if remap then + t[i] = { id, remap[i] } + else + t[i] = id + end + end + end + else + for i=start,stop do + if force or (not t[i] and not oldchars[i]) then + if remap then + t[i] = { id, remap[i] } + else + t[i] = id + end + end + end + end + end + vectors[current] = t + if trace_collecting then + report_fonts("activating collection %a for font %a",name,current) + end + if not enabled then + nodes.tasks.enableaction("processors","fonts.collections.process") + enabled = true + end + statistics.stoptiming(fonts) +end + +-- we already have this parser +-- +-- local spec = (P("sa") + P("at") + P("scaled") + P("at") + P("mo")) * P(" ")^1 * (1-P(" "))^1 * P(" ")^0 * -1 +-- local okay = ((1-spec)^1 * spec * Cc(true)) + Cc(false) +-- +-- if lpegmatch(okay,name) then + +function collections.prepare(name) + current = currentfont() + if vectors[current] then + return + end + local d = definitions[name] + if d then + if trace_collecting then + local filename = file.basename(fontdata[current].properties.filename or "?") + report_fonts("applying collection %a to %a, file %a",name,current,filename) + end + list = { } + context.pushcatcodes("prt") -- context.unprotect() + context.font_fallbacks_start_cloning() + for i=1,#d do + local f = d[i] + local name = f.font + local scale = f.rscale or 1 + if fontpatternhassize(name) then + context.font_fallbacks_clone_unique(name,scale) + else + context.font_fallbacks_clone_inherited(name,scale) + end + context.font_fallbacks_register_main(name) + end + context.font_fallbacks_prepare_clone_vectors(name) + context.font_fallbacks_stop_cloning() + context.popcatcodes() -- context.protect() + elseif trace_collecting then + local filename = file.basename(fontdata[current].properties.filename or "?") + report_fonts("error while applying collection %a to %a, file %a",name,current,filename) + end +end + +function collections.report(message) + if trace_collecting then + report_fonts("tex: %s",message) + end +end + +function collections.process(head) -- this way we keep feature processing + local done = false + for n in traverse_id(glyph_code,head) do + local v = vectors[n.font] + if v then + local id = v[n.char] + if id then + if type(id) == "table" then + local newid, newchar = id[1], id[2] + if trace_collecting then + report_fonts("remapping character %a in font %a to character %a in font %a",n.char,n.font,newchar,newid) + end + n.font, n.char = newid, newchar + else + if trace_collecting then + report_fonts("remapping font %a to %a for character %a",n.font,id,n.char) + end + n.font = id + end + end + end + end + return head, done +end + +-- interface + +commands.fontcollectiondefine = collections.define +commands.fontcollectionreset = collections.reset +commands.fontcollectionprepare = collections.prepare +commands.fontcollectionreport = collections.report +commands.fontcollectionregister = collections.registermain +commands.fontcollectionclone = collections.clonevector diff --git a/tex/context/base/font-con.lua b/tex/context/base/font-con.lua index 790d4877a..e441ebefe 100644 --- a/tex/context/base/font-con.lua +++ b/tex/context/base/font-con.lua @@ -1,1330 +1,1330 @@ -if not modules then modules = { } end modules ['font-con'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- some names of table entries will be changed (no _) - -local next, tostring, rawget = next, tostring, rawget -local format, match, lower, gsub = string.format, string.match, string.lower, string.gsub -local utfbyte = utf.byte -local sort, insert, concat, sortedkeys, serialize, fastcopy = table.sort, table.insert, table.concat, table.sortedkeys, table.serialize, table.fastcopy -local derivetable = table.derive - -local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) -local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end) - -local report_defining = logs.reporter("fonts","defining") - --- watch out: no negative depths and negative eights permitted in regular fonts - ---[[ldx-- -

Here we only implement a few helper functions.

---ldx]]-- - -local fonts = fonts -local constructors = fonts.constructors or { } -fonts.constructors = constructors -local handlers = fonts.handlers or { } -- can have preloaded tables -fonts.handlers = handlers - -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex - --- will be directives - -constructors.dontembed = allocate() -constructors.autocleanup = true -constructors.namemode = "fullpath" -- will be a function - -constructors.version = 1.01 -constructors.cache = containers.define("fonts", "constructors", constructors.version, false) - -constructors.privateoffset = 0xF0000 -- 0x10FFFF - --- Some experimental helpers (handy for tracing): --- --- todo: extra: --- --- extra_space => space.extra --- space => space.width --- space_stretch => space.stretch --- space_shrink => space.shrink - --- We do keep the x-height, extra_space, space_shrink and space_stretch --- around as these are low level official names. - -constructors.keys = { - properties = { - encodingbytes = "number", - embedding = "number", - cidinfo = { - }, - format = "string", - fontname = "string", - fullname = "string", - filename = "filename", - psname = "string", - name = "string", - virtualized = "boolean", - hasitalics = "boolean", - autoitalicamount = "basepoints", - nostackmath = "boolean", - noglyphnames = "boolean", - mode = "string", - hasmath = "boolean", - mathitalics = "boolean", - textitalics = "boolean", - finalized = "boolean", - }, - parameters = { - mathsize = "number", - scriptpercentage = "float", - scriptscriptpercentage = "float", - units = "cardinal", - designsize = "scaledpoints", - expansion = { - stretch = "integerscale", -- might become float - shrink = "integerscale", -- might become float - step = "integerscale", -- might become float - auto = "boolean", - }, - protrusion = { - auto = "boolean", - }, - slantfactor = "float", - extendfactor = "float", - factor = "float", - hfactor = "float", - vfactor = "float", - size = "scaledpoints", - units = "scaledpoints", - scaledpoints = "scaledpoints", - slantperpoint = "scaledpoints", - spacing = { - width = "scaledpoints", - stretch = "scaledpoints", - shrink = "scaledpoints", - extra = "scaledpoints", - }, - xheight = "scaledpoints", - quad = "scaledpoints", - ascender = "scaledpoints", - descender = "scaledpoints", - synonyms = { - space = "spacing.width", - spacestretch = "spacing.stretch", - spaceshrink = "spacing.shrink", - extraspace = "spacing.extra", - x_height = "xheight", - space_stretch = "spacing.stretch", - space_shrink = "spacing.shrink", - extra_space = "spacing.extra", - em = "quad", - ex = "xheight", - slant = "slantperpoint", - }, - }, - description = { - width = "basepoints", - height = "basepoints", - depth = "basepoints", - boundingbox = { }, - }, - character = { - width = "scaledpoints", - height = "scaledpoints", - depth = "scaledpoints", - italic = "scaledpoints", - }, -} - --- This might become an interface: - -local designsizes = allocate() -constructors.designsizes = designsizes -local loadedfonts = allocate() -constructors.loadedfonts = loadedfonts - ---[[ldx-- -

We need to normalize the scale factor (in scaled points). This has to -do with the fact that uses a negative multiple of 1000 as -a signal for a font scaled based on the design size.

---ldx]]-- - -local factors = { - pt = 65536.0, - bp = 65781.8, -} - -function constructors.setfactor(f) - constructors.factor = factors[f or 'pt'] or factors.pt -end - -constructors.setfactor() - -function constructors.scaled(scaledpoints, designsize) -- handles designsize in sp as well - if scaledpoints < 0 then - if designsize then - local factor = constructors.factor - if designsize > factor then -- or just 1000 / when? mp? - return (- scaledpoints/1000) * designsize -- sp's - else - return (- scaledpoints/1000) * designsize * factor - end - else - return (- scaledpoints/1000) * 10 * factor - end - else - return scaledpoints - end -end - ---[[ldx-- -

Beware, the boundingbox is passed as reference so we may not overwrite it -in the process; numbers are of course copies. Here 65536 equals 1pt. (Due to -excessive memory usage in CJK fonts, we no longer pass the boundingbox.)

---ldx]]-- - --- The scaler is only used for otf and afm and virtual fonts. If --- a virtual font has italic correction make sure to set the --- hasitalics flag. Some more flags will be added in --- the future. - ---[[ldx-- -

The reason why the scaler was originally split, is that for a while we experimented -with a helper function. However, in practice the calls are too slow to -make this profitable and the based variant was just faster. A days -wasted day but an experience richer.

---ldx]]-- - --- we can get rid of the tfm instance when we have fast access to the --- scaled character dimensions at the tex end, e.g. a fontobject.width --- actually we already have some of that now as virtual keys in glyphs --- --- flushing the kern and ligature tables from memory saves a lot (only --- base mode) but it complicates vf building where the new characters --- demand this data .. solution: functions that access them - -function constructors.cleanuptable(tfmdata) - if constructors.autocleanup and tfmdata.properties.virtualized then - for k, v in next, tfmdata.characters do - if v.commands then v.commands = nil end - -- if v.kerns then v.kerns = nil end - end - end -end - --- experimental, sharing kerns (unscaled and scaled) saves memory --- local sharedkerns, basekerns = constructors.check_base_kerns(tfmdata) --- loop over descriptions (afm and otf have descriptions, tfm not) --- there is no need (yet) to assign a value to chr.tonunicode - --- constructors.prepare_base_kerns(tfmdata) -- optimalization - --- we have target.name=metricfile and target.fullname=RealName and target.filename=diskfilename --- when collapsing fonts, luatex looks as both target.name and target.fullname as ttc files --- can have multiple subfonts - -function constructors.calculatescale(tfmdata,scaledpoints) - local parameters = tfmdata.parameters - if scaledpoints < 0 then - scaledpoints = (- scaledpoints/1000) * (tfmdata.designsize or parameters.designsize) -- already in sp - end - return scaledpoints, scaledpoints / (parameters.units or 1000) -- delta -end - -local unscaled = { - ScriptPercentScaleDown = true, - ScriptScriptPercentScaleDown = true, - RadicalDegreeBottomRaisePercent = true -} - -function constructors.assignmathparameters(target,original) -- simple variant, not used in context - -- when a tfm file is loaded, it has already been scaled - -- and it never enters the scaled so this is otf only and - -- even then we do some extra in the context math plugins - local mathparameters = original.mathparameters - if mathparameters and next(mathparameters) then - local targetparameters = target.parameters - local targetproperties = target.properties - local targetmathparameters = { } - local factor = targetproperties.math_is_scaled and 1 or targetparameters.factor - for name, value in next, mathparameters do - if unscaled[name] then - targetmathparameters[name] = value - else - targetmathparameters[name] = value * factor - end - end - if not targetmathparameters.FractionDelimiterSize then - targetmathparameters.FractionDelimiterSize = 1.01 * targetparameters.size - end - if not mathparameters.FractionDelimiterDisplayStyleSize then - targetmathparameters.FractionDelimiterDisplayStyleSize = 2.40 * targetparameters.size - end - target.mathparameters = targetmathparameters - end -end - -function constructors.beforecopyingcharacters(target,original) - -- can be used for additional tweaking -end - -function constructors.aftercopyingcharacters(target,original) - -- can be used for additional tweaking -end - -function constructors.enhanceparameters(parameters) - local xheight = parameters.x_height - local quad = parameters.quad - local space = parameters.space - local stretch = parameters.space_stretch - local shrink = parameters.space_shrink - local extra = parameters.extra_space - local slant = parameters.slant - parameters.xheight = xheight - parameters.spacestretch = stretch - parameters.spaceshrink = shrink - parameters.extraspace = extra - parameters.em = quad - parameters.ex = xheight - parameters.slantperpoint = slant - parameters.spacing = { - width = space, - stretch = stretch, - shrink = shrink, - extra = extra, - } -end - -function constructors.scale(tfmdata,specification) - local target = { } -- the new table - -- - if tonumber(specification) then - specification = { size = specification } - end - -- - local scaledpoints = specification.size - local relativeid = specification.relativeid - -- - local properties = tfmdata.properties or { } - local goodies = tfmdata.goodies or { } - local resources = tfmdata.resources or { } - local descriptions = tfmdata.descriptions or { } -- bad news if empty - local characters = tfmdata.characters or { } -- bad news if empty - local changed = tfmdata.changed or { } -- for base mode - local shared = tfmdata.shared or { } - local parameters = tfmdata.parameters or { } - local mathparameters = tfmdata.mathparameters or { } - -- - local targetcharacters = { } - local targetdescriptions = derivetable(descriptions) - local targetparameters = derivetable(parameters) - local targetproperties = derivetable(properties) - local targetgoodies = goodies -- we need to loop so no metatable - target.characters = targetcharacters - target.descriptions = targetdescriptions - target.parameters = targetparameters - -- target.mathparameters = targetmathparameters -- happens elsewhere - target.properties = targetproperties - target.goodies = targetgoodies - target.shared = shared - target.resources = resources - target.unscaled = tfmdata -- the original unscaled one - -- - -- specification.mathsize : 1=text 2=script 3=scriptscript - -- specification.textsize : natural (text)size - -- parameters.mathsize : 1=text 2=script 3=scriptscript >1000 enforced size (feature value other than yes) - -- - local mathsize = tonumber(specification.mathsize) or 0 - local textsize = tonumber(specification.textsize) or scaledpoints - local forcedsize = tonumber(parameters.mathsize ) or 0 - local extrafactor = tonumber(specification.factor ) or 1 - if (mathsize == 2 or forcedsize == 2) and parameters.scriptpercentage then - scaledpoints = parameters.scriptpercentage * textsize / 100 - elseif (mathsize == 3 or forcedsize == 3) and parameters.scriptscriptpercentage then - scaledpoints = parameters.scriptscriptpercentage * textsize / 100 - elseif forcedsize > 1000 then -- safeguard - scaledpoints = forcedsize - end - targetparameters.mathsize = mathsize -- context specific - targetparameters.textsize = textsize -- context specific - targetparameters.forcedsize = forcedsize -- context specific - targetparameters.extrafactor = extrafactor -- context specific - -- - local tounicode = resources.tounicode - local defaultwidth = resources.defaultwidth or 0 - local defaultheight = resources.defaultheight or 0 - local defaultdepth = resources.defaultdepth or 0 - local units = parameters.units or 1000 - -- - if target.fonts then - target.fonts = fastcopy(target.fonts) -- maybe we virtualize more afterwards - end - -- - -- boundary keys are no longer needed as we now have a string 'right_boundary' - -- that can be used in relevant tables (kerns and ligatures) ... not that I ever - -- used them - -- - -- boundarychar_label = 0, -- not needed - -- boundarychar = 65536, -- there is now a string 'right_boundary' - -- false_boundarychar = 65536, -- produces invalid tfm in luatex - -- - targetproperties.language = properties.language or "dflt" -- inherited - targetproperties.script = properties.script or "dflt" -- inherited - targetproperties.mode = properties.mode or "base" -- inherited - -- - local askedscaledpoints = scaledpoints - local scaledpoints, delta = constructors.calculatescale(tfmdata,scaledpoints) -- no shortcut, dan be redefined - -- - local hdelta = delta - local vdelta = delta - -- - target.designsize = parameters.designsize -- not really needed so it muight become obsolete - target.units_per_em = units -- just a trigger for the backend (does luatex use this? if not it will go) - -- - local direction = properties.direction or tfmdata.direction or 0 -- pointless, as we don't use omf fonts at all - target.direction = direction - properties.direction = direction - -- - target.size = scaledpoints - -- - target.encodingbytes = properties.encodingbytes or 1 - target.embedding = properties.embedding or "subset" - target.tounicode = 1 - target.cidinfo = properties.cidinfo - target.format = properties.format - -- - local fontname = properties.fontname or tfmdata.fontname -- for the moment we fall back on - local fullname = properties.fullname or tfmdata.fullname -- names in the tfmdata although - local filename = properties.filename or tfmdata.filename -- that is not the right place to - local psname = properties.psname or tfmdata.psname -- pass them - local name = properties.name or tfmdata.name - -- - if not psname or psname == "" then - -- name used in pdf file as well as for selecting subfont in ttc/dfont - psname = fontname or (fullname and fonts.names.cleanname(fullname)) - end - target.fontname = fontname - target.fullname = fullname - target.filename = filename - target.psname = psname - target.name = name - -- - -- inspect(properties) - -- - properties.fontname = fontname - properties.fullname = fullname - properties.filename = filename - properties.psname = psname - properties.name = name - -- expansion (hz) - local expansion = parameters.expansion - if expansion then - target.stretch = expansion.stretch - target.shrink = expansion.shrink - target.step = expansion.step - target.auto_expand = expansion.auto - end - -- protrusion - local protrusion = parameters.protrusion - if protrusion then - target.auto_protrude = protrusion.auto - end - -- widening - local extendfactor = parameters.extendfactor or 0 - if extendfactor ~= 0 and extendfactor ~= 1 then - hdelta = hdelta * extendfactor - target.extend = extendfactor * 1000 -- extent ? - else - target.extend = 1000 -- extent ? - end - -- slanting - local slantfactor = parameters.slantfactor or 0 - if slantfactor ~= 0 then - target.slant = slantfactor * 1000 - else - target.slant = 0 - end - -- - targetparameters.factor = delta - targetparameters.hfactor = hdelta - targetparameters.vfactor = vdelta - targetparameters.size = scaledpoints - targetparameters.units = units - targetparameters.scaledpoints = askedscaledpoints - -- - local isvirtual = properties.virtualized or tfmdata.type == "virtual" - local hasquality = target.auto_expand or target.auto_protrude - local hasitalics = properties.hasitalics - local autoitalicamount = properties.autoitalicamount - local stackmath = not properties.nostackmath - local nonames = properties.noglyphnames - local nodemode = properties.mode == "node" - -- - if changed and not next(changed) then - changed = false - end - -- - target.type = isvirtual and "virtual" or "real" - -- - target.postprocessors = tfmdata.postprocessors - -- - local targetslant = (parameters.slant or parameters[1] or 0) - local targetspace = (parameters.space or parameters[2] or 0)*hdelta - local targetspace_stretch = (parameters.space_stretch or parameters[3] or 0)*hdelta - local targetspace_shrink = (parameters.space_shrink or parameters[4] or 0)*hdelta - local targetx_height = (parameters.x_height or parameters[5] or 0)*vdelta - local targetquad = (parameters.quad or parameters[6] or 0)*hdelta - local targetextra_space = (parameters.extra_space or parameters[7] or 0)*hdelta - -- - targetparameters.slant = targetslant -- slantperpoint - targetparameters.space = targetspace - targetparameters.space_stretch = targetspace_stretch - targetparameters.space_shrink = targetspace_shrink - targetparameters.x_height = targetx_height - targetparameters.quad = targetquad - targetparameters.extra_space = targetextra_space - -- - local ascender = parameters.ascender - if ascender then - targetparameters.ascender = delta * ascender - end - local descender = parameters.descender - if descender then - targetparameters.descender = delta * descender - end - -- - constructors.enhanceparameters(targetparameters) -- official copies for us - -- - local protrusionfactor = (targetquad ~= 0 and 1000/targetquad) or 0 - local scaledwidth = defaultwidth * hdelta - local scaledheight = defaultheight * vdelta - local scaleddepth = defaultdepth * vdelta - -- - local hasmath = (properties.hasmath or next(mathparameters)) and true - -- - if hasmath then - constructors.assignmathparameters(target,tfmdata) -- does scaling and whatever is needed - properties.hasmath = true - target.nomath = false - target.MathConstants = target.mathparameters - else - properties.hasmath = false - target.nomath = true - target.mathparameters = nil -- nop - end - -- - local italickey = "italic" - local useitalics = true -- something context - -- - -- some context specific trickery (this will move to a plugin) - -- - if hasmath then - -- the latest luatex can deal with it itself so we now disable this - -- mechanism here - -- - -- if properties.mathitalics then - -- italickey = "italic_correction" - -- if trace_defining then - -- report_defining("math italics disabled for font %a, fullname %a, filename %a",name,fullname,filename) - -- end - -- end - autoitalicamount = false -- new - elseif properties.textitalics then - italickey = "italic_correction" - useitalics = false - if properties.delaytextitalics then - autoitalicamount = false - end - end - -- - -- end of context specific trickery - -- - if trace_defining then - report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a", - name,fullname,filename,hdelta,vdelta, - hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled") - end - -- - constructors.beforecopyingcharacters(target,tfmdata) - -- - local sharedkerns = { } - -- - -- we can have a dumb mode (basemode without math etc) that skips most - -- - for unicode, character in next, characters do - local chr, description, index, touni - if changed then - -- basemode hack (we try to catch missing tounicodes, e.g. needed for ssty in math cambria) - local c = changed[unicode] - if c then - description = descriptions[c] or descriptions[unicode] or character - character = characters[c] or character - index = description.index or c - if tounicode then - touni = tounicode[index] -- nb: index! - if not touni then -- goodie - local d = descriptions[unicode] or characters[unicode] - local i = d.index or unicode - touni = tounicode[i] -- nb: index! - end - end - else - description = descriptions[unicode] or character - index = description.index or unicode - if tounicode then - touni = tounicode[index] -- nb: index! - end - end - else - description = descriptions[unicode] or character - index = description.index or unicode - if tounicode then - touni = tounicode[index] -- nb: index! - end - end - local width = description.width - local height = description.height - local depth = description.depth - if width then width = hdelta*width else width = scaledwidth end - if height then height = vdelta*height else height = scaledheight end - -- if depth then depth = vdelta*depth else depth = scaleddepth end - if depth and depth ~= 0 then - depth = delta*depth - if nonames then - chr = { - index = index, - height = height, - depth = depth, - width = width, - } - else - chr = { - name = description.name, - index = index, - height = height, - depth = depth, - width = width, - } - end - else - -- this saves a little bit of memory time and memory, esp for big cjk fonts - if nonames then - chr = { - index = index, - height = height, - width = width, - } - else - chr = { - name = description.name, - index = index, - height = height, - width = width, - } - end - end - if touni then - chr.tounicode = touni - end - if hasquality then - -- we could move these calculations elsewhere (saves calculations) - local ve = character.expansion_factor - if ve then - chr.expansion_factor = ve*1000 -- expansionfactor, hm, can happen elsewhere - end - local vl = character.left_protruding - if vl then - chr.left_protruding = protrusionfactor*width*vl - end - local vr = character.right_protruding - if vr then - chr.right_protruding = protrusionfactor*width*vr - end - end - -- - if autoitalicamount then - local vi = description.italic - if not vi then - local vi = description.boundingbox[3] - description.width + autoitalicamount - if vi > 0 then -- < 0 indicates no overshoot or a very small auto italic - chr[italickey] = vi*hdelta - end - elseif vi ~= 0 then - chr[italickey] = vi*hdelta - end - elseif hasitalics then - local vi = description.italic - if vi and vi ~= 0 then - chr[italickey] = vi*hdelta - end - end - -- to be tested - if hasmath then - -- todo, just operate on descriptions.math - local vn = character.next - if vn then - chr.next = vn - else - local vv = character.vert_variants - if vv then - local t = { } - for i=1,#vv do - local vvi = vv[i] - t[i] = { - ["start"] = (vvi["start"] or 0)*vdelta, - ["end"] = (vvi["end"] or 0)*vdelta, - ["advance"] = (vvi["advance"] or 0)*vdelta, - ["extender"] = vvi["extender"], - ["glyph"] = vvi["glyph"], - } - end - chr.vert_variants = t - else - local hv = character.horiz_variants - if hv then - local t = { } - for i=1,#hv do - local hvi = hv[i] - t[i] = { - ["start"] = (hvi["start"] or 0)*hdelta, - ["end"] = (hvi["end"] or 0)*hdelta, - ["advance"] = (hvi["advance"] or 0)*hdelta, - ["extender"] = hvi["extender"], - ["glyph"] = hvi["glyph"], - } - end - chr.horiz_variants = t - end - end - end - local va = character.top_accent - if va then - chr.top_accent = vdelta*va - end - if stackmath then - local mk = character.mathkerns -- not in math ? - if mk then - local kerns = { } - local v = mk.top_right if v then local k = { } for i=1,#v do local vi = v[i] - k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern } - end kerns.top_right = k end - local v = mk.top_left if v then local k = { } for i=1,#v do local vi = v[i] - k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern } - end kerns.top_left = k end - local v = mk.bottom_left if v then local k = { } for i=1,#v do local vi = v[i] - k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern } - end kerns.bottom_left = k end - local v = mk.bottom_right if v then local k = { } for i=1,#v do local vi = v[i] - k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern } - end kerns.bottom_right = k end - chr.mathkern = kerns -- singular -> should be patched in luatex ! - end - end - end - if not nodemode then - local vk = character.kerns - if vk then - local s = sharedkerns[vk] - if not s then - s = { } - for k,v in next, vk do s[k] = v*hdelta end - sharedkerns[vk] = s - end - chr.kerns = s - end - local vl = character.ligatures - if vl then - if true then - chr.ligatures = vl -- shared - else - local tt = { } - for i,l in next, vl do - tt[i] = l - end - chr.ligatures = tt - end - end - end - if isvirtual then - local vc = character.commands - if vc then - -- we assume non scaled commands here - -- tricky .. we need to scale pseudo math glyphs too - -- which is why we deal with rules too - local ok = false - for i=1,#vc do - local key = vc[i][1] - if key == "right" or key == "down" then - ok = true - break - end - end - if ok then - local tt = { } - for i=1,#vc do - local ivc = vc[i] - local key = ivc[1] - if key == "right" then - tt[i] = { key, ivc[2]*hdelta } - elseif key == "down" then - tt[i] = { key, ivc[2]*vdelta } - elseif key == "rule" then - tt[i] = { key, ivc[2]*vdelta, ivc[3]*hdelta } - else -- not comment - tt[i] = ivc -- shared since in cache and untouched - end - end - chr.commands = tt - else - chr.commands = vc - end - chr.index = nil - end - end - targetcharacters[unicode] = chr - end - -- - constructors.aftercopyingcharacters(target,tfmdata) - -- - return target -end - -function constructors.finalize(tfmdata) - if tfmdata.properties and tfmdata.properties.finalized then - return - end - -- - if not tfmdata.characters then - return nil - end - -- - if not tfmdata.goodies then - tfmdata.goodies = { } -- context specific - end - -- - local parameters = tfmdata.parameters - if not parameters then - return nil - end - -- - if not parameters.expansion then - parameters.expansion = { - stretch = tfmdata.stretch or 0, - shrink = tfmdata.shrink or 0, - step = tfmdata.step or 0, - auto = tfmdata.auto_expand or false, - } - end - -- - if not parameters.protrusion then - parameters.protrusion = { - auto = auto_protrude - } - end - -- - if not parameters.size then - parameters.size = tfmdata.size - end - -- - if not parameters.extendfactor then - parameters.extendfactor = tfmdata.extend or 0 - end - -- - if not parameters.slantfactor then - parameters.slantfactor = tfmdata.slant or 0 - end - -- - if not parameters.designsize then - parameters.designsize = tfmdata.designsize or 655360 - end - -- - if not parameters.units then - parameters.units = tfmdata.units_per_em or 1000 - end - -- - if not tfmdata.descriptions then - local descriptions = { } -- yes or no - setmetatableindex(descriptions, function(t,k) local v = { } t[k] = v return v end) - tfmdata.descriptions = descriptions - end - -- - local properties = tfmdata.properties - if not properties then - properties = { } - tfmdata.properties = properties - end - -- - if not properties.virtualized then - properties.virtualized = tfmdata.type == "virtual" - end - -- - if not tfmdata.properties then - tfmdata.properties = { - fontname = tfmdata.fontname, - filename = tfmdata.filename, - fullname = tfmdata.fullname, - name = tfmdata.name, - psname = tfmdata.psname, - -- - encodingbytes = tfmdata.encodingbytes or 1, - embedding = tfmdata.embedding or "subset", - tounicode = tfmdata.tounicode or 1, - cidinfo = tfmdata.cidinfo or nil, - format = tfmdata.format or "type1", - direction = tfmdata.direction or 0, - } - end - if not tfmdata.resources then - tfmdata.resources = { } - end - if not tfmdata.shared then - tfmdata.shared = { } - end - -- - -- tfmdata.fonts - -- tfmdata.unscaled - -- - if not properties.hasmath then - properties.hasmath = not tfmdata.nomath - end - -- - tfmdata.MathConstants = nil - tfmdata.postprocessors = nil - -- - tfmdata.fontname = nil - tfmdata.filename = nil - tfmdata.fullname = nil - tfmdata.name = nil -- most tricky part - tfmdata.psname = nil - -- - tfmdata.encodingbytes = nil - tfmdata.embedding = nil - tfmdata.tounicode = nil - tfmdata.cidinfo = nil - tfmdata.format = nil - tfmdata.direction = nil - tfmdata.type = nil - tfmdata.nomath = nil - tfmdata.designsize = nil - -- - tfmdata.size = nil - tfmdata.stretch = nil - tfmdata.shrink = nil - tfmdata.step = nil - tfmdata.auto_expand = nil - tfmdata.auto_protrude = nil - tfmdata.extend = nil - tfmdata.slant = nil - tfmdata.units_per_em = nil - -- - properties.finalized = true - -- - return tfmdata -end - ---[[ldx-- -

A unique hash value is generated by:

---ldx]]-- - -local hashmethods = { } -constructors.hashmethods = hashmethods - -function constructors.hashfeatures(specification) -- will be overloaded - local features = specification.features - if features then - local t, tn = { }, 0 - for category, list in next, features do - if next(list) then - local hasher = hashmethods[category] - if hasher then - local hash = hasher(list) - if hash then - tn = tn + 1 - t[tn] = category .. ":" .. hash - end - end - end - end - if tn > 0 then - return concat(t," & ") - end - end - return "unknown" -end - -hashmethods.normal = function(list) - local s = { } - local n = 0 - for k, v in next, list do - if not k then - -- no need to add to hash - elseif k == "number" or k == "features" then - -- no need to add to hash (maybe we need a skip list) - else - n = n + 1 - s[n] = k - end - end - if n > 0 then - sort(s) - for i=1,n do - local k = s[i] - s[i] = k .. '=' .. tostring(list[k]) - end - return concat(s,"+") - end -end - ---[[ldx-- -

In principle we can share tfm tables when we are in node for a font, but then -we need to define a font switch as an id/attr switch which is no fun, so in that -case users can best use dynamic features ... so, we will not use that speedup. Okay, -when we get rid of base mode we can optimize even further by sharing, but then we -loose our testcases for .

---ldx]]-- - -function constructors.hashinstance(specification,force) - local hash, size, fallbacks = specification.hash, specification.size, specification.fallbacks - if force or not hash then - hash = constructors.hashfeatures(specification) - specification.hash = hash - end - if size < 1000 and designsizes[hash] then - size = math.round(constructors.scaled(size,designsizes[hash])) - specification.size = size - end - -- local mathsize = specification.mathsize or 0 - -- if mathsize > 0 then - -- local textsize = specification.textsize - -- if fallbacks then - -- return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ] @ ' .. fallbacks - -- else - -- return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ]' - -- end - -- else - if fallbacks then - return hash .. ' @ ' .. tostring(size) .. ' @ ' .. fallbacks - else - return hash .. ' @ ' .. tostring(size) - end - -- end -end - -function constructors.setname(tfmdata,specification) -- todo: get specification from tfmdata - if constructors.namemode == "specification" then - -- not to be used in context ! - local specname = specification.specification - if specname then - tfmdata.properties.name = specname - if trace_defining then - report_otf("overloaded fontname %a",specname) - end - end - end -end - -function constructors.checkedfilename(data) - local foundfilename = data.foundfilename - if not foundfilename then - local askedfilename = data.filename or "" - if askedfilename ~= "" then - askedfilename = resolvers.resolve(askedfilename) -- no shortcut - foundfilename = resolvers.findbinfile(askedfilename,"") or "" - if foundfilename == "" then - report_defining("source file %a is not found",askedfilename) - foundfilename = resolvers.findbinfile(file.basename(askedfilename),"") or "" - if foundfilename ~= "" then - report_defining("using source file %a due to cache mismatch",foundfilename) - end - end - end - data.foundfilename = foundfilename - end - return foundfilename -end - -local formats = allocate() -fonts.formats = formats - -setmetatableindex(formats, function(t,k) - local l = lower(k) - if rawget(t,k) then - t[k] = l - return l - end - return rawget(t,file.suffix(l)) -end) - -local locations = { } - -local function setindeed(mode,target,group,name,action,position) - local t = target[mode] - if not t then - report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode) - os.exit() - elseif position then - -- todo: remove existing - insert(t, position, { name = name, action = action }) - else - for i=1,#t do - local ti = t[i] - if ti.name == name then - ti.action = action - return - end - end - insert(t, { name = name, action = action }) - end -end - -local function set(group,name,target,source) - target = target[group] - if not target then - report_defining("fatal target error in setting feature %a, group %a",name,group) - os.exit() - end - local source = source[group] - if not source then - report_defining("fatal source error in setting feature %a, group %a",name,group) - os.exit() - end - local node = source.node - local base = source.base - local position = source.position - if node then - setindeed("node",target,group,name,node,position) - end - if base then - setindeed("base",target,group,name,base,position) - end -end - -local function register(where,specification) - local name = specification.name - if name and name ~= "" then - local default = specification.default - local description = specification.description - local initializers = specification.initializers - local processors = specification.processors - local manipulators = specification.manipulators - local modechecker = specification.modechecker - if default then - where.defaults[name] = default - end - if description and description ~= "" then - where.descriptions[name] = description - end - if initializers then - set('initializers',name,where,specification) - end - if processors then - set('processors', name,where,specification) - end - if manipulators then - set('manipulators',name,where,specification) - end - if modechecker then - where.modechecker = modechecker - end - end -end - -constructors.registerfeature = register - -function constructors.getfeatureaction(what,where,mode,name) - what = handlers[what].features - if what then - where = what[where] - if where then - mode = where[mode] - if mode then - for i=1,#mode do - local m = mode[i] - if m.name == name then - return m.action - end - end - end - end - end -end - -function constructors.newhandler(what) -- could be a metatable newindex - local handler = handlers[what] - if not handler then - handler = { } - handlers[what] = handler - end - return handler -end - -function constructors.newfeatures(what) -- could be a metatable newindex - local handler = handlers[what] - local features = handler.features - if not features then - local tables = handler.tables -- can be preloaded - local statistics = handler.statistics -- can be preloaded - features = allocate { - defaults = { }, - descriptions = tables and tables.features or { }, - used = statistics and statistics.usedfeatures or { }, - initializers = { base = { }, node = { } }, - processors = { base = { }, node = { } }, - manipulators = { base = { }, node = { } }, - } - features.register = function(specification) return register(features,specification) end - handler.features = features -- will also become hidden - end - return features -end - ---[[ldx-- -

We need to check for default features. For this we provide -a helper function.

---ldx]]-- - -function constructors.checkedfeatures(what,features) - local defaults = handlers[what].features.defaults - if features and next(features) then - features = fastcopy(features) -- can be inherited (mt) but then no loops possible - for key, value in next, defaults do - if features[key] == nil then - features[key] = value - end - end - return features - else - return fastcopy(defaults) -- we can change features in place - end -end - --- before scaling - -function constructors.initializefeatures(what,tfmdata,features,trace,report) - if features and next(features) then - local properties = tfmdata.properties or { } -- brrr - local whathandler = handlers[what] - local whatfeatures = whathandler.features - local whatinitializers = whatfeatures.initializers - local whatmodechecker = whatfeatures.modechecker - -- properties.mode can be enforces (for instance in font-otd) - local mode = properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base" - properties.mode = mode -- also status - features.mode = mode -- both properties.mode or features.mode can be changed - -- - local done = { } - while true do - local redo = false - local initializers = whatfeatures.initializers[mode] - if initializers then - for i=1,#initializers do - local step = initializers[i] - local feature = step.name --- we could intercept mode here .. needs a rewrite of this whole loop then but it's cleaner that way - local value = features[feature] - if not value then - -- disabled - elseif done[feature] then - -- already done - else - local action = step.action - if trace then - report("initializing feature %a to %a for mode %a for font %a",feature, - value,mode,tfmdata.properties.fullname) - end - action(tfmdata,value,features) -- can set mode (e.g. goodies) so it can trigger a restart - if mode ~= properties.mode or mode ~= features.mode then - if whatmodechecker then - properties.mode = whatmodechecker(tfmdata,features,properties.mode) -- force checking - features.mode = properties.mode - end - if mode ~= properties.mode then - mode = properties.mode - redo = true - end - end - done[feature] = true - end - if redo then - break - end - end - if not redo then - break - end - else - break - end - end - properties.mode = mode -- to be sure - return true - else - return false - end -end - --- while typesetting - -function constructors.collectprocessors(what,tfmdata,features,trace,report) - local processes, nofprocesses = { }, 0 - if features and next(features) then - local properties = tfmdata.properties - local whathandler = handlers[what] - local whatfeatures = whathandler.features - local whatprocessors = whatfeatures.processors - local processors = whatprocessors[properties.mode] - if processors then - for i=1,#processors do - local step = processors[i] - local feature = step.name - if features[feature] then - local action = step.action - if trace then - report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) - end - if action then - nofprocesses = nofprocesses + 1 - processes[nofprocesses] = action - end - end - end - elseif trace then - report("no feature processors for mode %a for font %a",mode,tfmdata.properties.fullname) - end - end - return processes -end - --- after scaling - -function constructors.applymanipulators(what,tfmdata,features,trace,report) - if features and next(features) then - local properties = tfmdata.properties - local whathandler = handlers[what] - local whatfeatures = whathandler.features - local whatmanipulators = whatfeatures.manipulators - local manipulators = whatmanipulators[properties.mode] - if manipulators then - for i=1,#manipulators do - local step = manipulators[i] - local feature = step.name - local value = features[feature] - if value then - local action = step.action - if trace then - report("applying feature manipulator %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) - end - if action then - action(tfmdata,feature,value) - end - end - end - end - end -end +if not modules then modules = { } end modules ['font-con'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- some names of table entries will be changed (no _) + +local next, tostring, rawget = next, tostring, rawget +local format, match, lower, gsub = string.format, string.match, string.lower, string.gsub +local utfbyte = utf.byte +local sort, insert, concat, sortedkeys, serialize, fastcopy = table.sort, table.insert, table.concat, table.sortedkeys, table.serialize, table.fastcopy +local derivetable = table.derive + +local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) +local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end) + +local report_defining = logs.reporter("fonts","defining") + +-- watch out: no negative depths and negative eights permitted in regular fonts + +--[[ldx-- +

Here we only implement a few helper functions.

+--ldx]]-- + +local fonts = fonts +local constructors = fonts.constructors or { } +fonts.constructors = constructors +local handlers = fonts.handlers or { } -- can have preloaded tables +fonts.handlers = handlers + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex + +-- will be directives + +constructors.dontembed = allocate() +constructors.autocleanup = true +constructors.namemode = "fullpath" -- will be a function + +constructors.version = 1.01 +constructors.cache = containers.define("fonts", "constructors", constructors.version, false) + +constructors.privateoffset = 0xF0000 -- 0x10FFFF + +-- Some experimental helpers (handy for tracing): +-- +-- todo: extra: +-- +-- extra_space => space.extra +-- space => space.width +-- space_stretch => space.stretch +-- space_shrink => space.shrink + +-- We do keep the x-height, extra_space, space_shrink and space_stretch +-- around as these are low level official names. + +constructors.keys = { + properties = { + encodingbytes = "number", + embedding = "number", + cidinfo = { + }, + format = "string", + fontname = "string", + fullname = "string", + filename = "filename", + psname = "string", + name = "string", + virtualized = "boolean", + hasitalics = "boolean", + autoitalicamount = "basepoints", + nostackmath = "boolean", + noglyphnames = "boolean", + mode = "string", + hasmath = "boolean", + mathitalics = "boolean", + textitalics = "boolean", + finalized = "boolean", + }, + parameters = { + mathsize = "number", + scriptpercentage = "float", + scriptscriptpercentage = "float", + units = "cardinal", + designsize = "scaledpoints", + expansion = { + stretch = "integerscale", -- might become float + shrink = "integerscale", -- might become float + step = "integerscale", -- might become float + auto = "boolean", + }, + protrusion = { + auto = "boolean", + }, + slantfactor = "float", + extendfactor = "float", + factor = "float", + hfactor = "float", + vfactor = "float", + size = "scaledpoints", + units = "scaledpoints", + scaledpoints = "scaledpoints", + slantperpoint = "scaledpoints", + spacing = { + width = "scaledpoints", + stretch = "scaledpoints", + shrink = "scaledpoints", + extra = "scaledpoints", + }, + xheight = "scaledpoints", + quad = "scaledpoints", + ascender = "scaledpoints", + descender = "scaledpoints", + synonyms = { + space = "spacing.width", + spacestretch = "spacing.stretch", + spaceshrink = "spacing.shrink", + extraspace = "spacing.extra", + x_height = "xheight", + space_stretch = "spacing.stretch", + space_shrink = "spacing.shrink", + extra_space = "spacing.extra", + em = "quad", + ex = "xheight", + slant = "slantperpoint", + }, + }, + description = { + width = "basepoints", + height = "basepoints", + depth = "basepoints", + boundingbox = { }, + }, + character = { + width = "scaledpoints", + height = "scaledpoints", + depth = "scaledpoints", + italic = "scaledpoints", + }, +} + +-- This might become an interface: + +local designsizes = allocate() +constructors.designsizes = designsizes +local loadedfonts = allocate() +constructors.loadedfonts = loadedfonts + +--[[ldx-- +

We need to normalize the scale factor (in scaled points). This has to +do with the fact that uses a negative multiple of 1000 as +a signal for a font scaled based on the design size.

+--ldx]]-- + +local factors = { + pt = 65536.0, + bp = 65781.8, +} + +function constructors.setfactor(f) + constructors.factor = factors[f or 'pt'] or factors.pt +end + +constructors.setfactor() + +function constructors.scaled(scaledpoints, designsize) -- handles designsize in sp as well + if scaledpoints < 0 then + if designsize then + local factor = constructors.factor + if designsize > factor then -- or just 1000 / when? mp? + return (- scaledpoints/1000) * designsize -- sp's + else + return (- scaledpoints/1000) * designsize * factor + end + else + return (- scaledpoints/1000) * 10 * factor + end + else + return scaledpoints + end +end + +--[[ldx-- +

Beware, the boundingbox is passed as reference so we may not overwrite it +in the process; numbers are of course copies. Here 65536 equals 1pt. (Due to +excessive memory usage in CJK fonts, we no longer pass the boundingbox.)

+--ldx]]-- + +-- The scaler is only used for otf and afm and virtual fonts. If +-- a virtual font has italic correction make sure to set the +-- hasitalics flag. Some more flags will be added in +-- the future. + +--[[ldx-- +

The reason why the scaler was originally split, is that for a while we experimented +with a helper function. However, in practice the calls are too slow to +make this profitable and the based variant was just faster. A days +wasted day but an experience richer.

+--ldx]]-- + +-- we can get rid of the tfm instance when we have fast access to the +-- scaled character dimensions at the tex end, e.g. a fontobject.width +-- actually we already have some of that now as virtual keys in glyphs +-- +-- flushing the kern and ligature tables from memory saves a lot (only +-- base mode) but it complicates vf building where the new characters +-- demand this data .. solution: functions that access them + +function constructors.cleanuptable(tfmdata) + if constructors.autocleanup and tfmdata.properties.virtualized then + for k, v in next, tfmdata.characters do + if v.commands then v.commands = nil end + -- if v.kerns then v.kerns = nil end + end + end +end + +-- experimental, sharing kerns (unscaled and scaled) saves memory +-- local sharedkerns, basekerns = constructors.check_base_kerns(tfmdata) +-- loop over descriptions (afm and otf have descriptions, tfm not) +-- there is no need (yet) to assign a value to chr.tonunicode + +-- constructors.prepare_base_kerns(tfmdata) -- optimalization + +-- we have target.name=metricfile and target.fullname=RealName and target.filename=diskfilename +-- when collapsing fonts, luatex looks as both target.name and target.fullname as ttc files +-- can have multiple subfonts + +function constructors.calculatescale(tfmdata,scaledpoints) + local parameters = tfmdata.parameters + if scaledpoints < 0 then + scaledpoints = (- scaledpoints/1000) * (tfmdata.designsize or parameters.designsize) -- already in sp + end + return scaledpoints, scaledpoints / (parameters.units or 1000) -- delta +end + +local unscaled = { + ScriptPercentScaleDown = true, + ScriptScriptPercentScaleDown = true, + RadicalDegreeBottomRaisePercent = true +} + +function constructors.assignmathparameters(target,original) -- simple variant, not used in context + -- when a tfm file is loaded, it has already been scaled + -- and it never enters the scaled so this is otf only and + -- even then we do some extra in the context math plugins + local mathparameters = original.mathparameters + if mathparameters and next(mathparameters) then + local targetparameters = target.parameters + local targetproperties = target.properties + local targetmathparameters = { } + local factor = targetproperties.math_is_scaled and 1 or targetparameters.factor + for name, value in next, mathparameters do + if unscaled[name] then + targetmathparameters[name] = value + else + targetmathparameters[name] = value * factor + end + end + if not targetmathparameters.FractionDelimiterSize then + targetmathparameters.FractionDelimiterSize = 1.01 * targetparameters.size + end + if not mathparameters.FractionDelimiterDisplayStyleSize then + targetmathparameters.FractionDelimiterDisplayStyleSize = 2.40 * targetparameters.size + end + target.mathparameters = targetmathparameters + end +end + +function constructors.beforecopyingcharacters(target,original) + -- can be used for additional tweaking +end + +function constructors.aftercopyingcharacters(target,original) + -- can be used for additional tweaking +end + +function constructors.enhanceparameters(parameters) + local xheight = parameters.x_height + local quad = parameters.quad + local space = parameters.space + local stretch = parameters.space_stretch + local shrink = parameters.space_shrink + local extra = parameters.extra_space + local slant = parameters.slant + parameters.xheight = xheight + parameters.spacestretch = stretch + parameters.spaceshrink = shrink + parameters.extraspace = extra + parameters.em = quad + parameters.ex = xheight + parameters.slantperpoint = slant + parameters.spacing = { + width = space, + stretch = stretch, + shrink = shrink, + extra = extra, + } +end + +function constructors.scale(tfmdata,specification) + local target = { } -- the new table + -- + if tonumber(specification) then + specification = { size = specification } + end + -- + local scaledpoints = specification.size + local relativeid = specification.relativeid + -- + local properties = tfmdata.properties or { } + local goodies = tfmdata.goodies or { } + local resources = tfmdata.resources or { } + local descriptions = tfmdata.descriptions or { } -- bad news if empty + local characters = tfmdata.characters or { } -- bad news if empty + local changed = tfmdata.changed or { } -- for base mode + local shared = tfmdata.shared or { } + local parameters = tfmdata.parameters or { } + local mathparameters = tfmdata.mathparameters or { } + -- + local targetcharacters = { } + local targetdescriptions = derivetable(descriptions) + local targetparameters = derivetable(parameters) + local targetproperties = derivetable(properties) + local targetgoodies = goodies -- we need to loop so no metatable + target.characters = targetcharacters + target.descriptions = targetdescriptions + target.parameters = targetparameters + -- target.mathparameters = targetmathparameters -- happens elsewhere + target.properties = targetproperties + target.goodies = targetgoodies + target.shared = shared + target.resources = resources + target.unscaled = tfmdata -- the original unscaled one + -- + -- specification.mathsize : 1=text 2=script 3=scriptscript + -- specification.textsize : natural (text)size + -- parameters.mathsize : 1=text 2=script 3=scriptscript >1000 enforced size (feature value other than yes) + -- + local mathsize = tonumber(specification.mathsize) or 0 + local textsize = tonumber(specification.textsize) or scaledpoints + local forcedsize = tonumber(parameters.mathsize ) or 0 + local extrafactor = tonumber(specification.factor ) or 1 + if (mathsize == 2 or forcedsize == 2) and parameters.scriptpercentage then + scaledpoints = parameters.scriptpercentage * textsize / 100 + elseif (mathsize == 3 or forcedsize == 3) and parameters.scriptscriptpercentage then + scaledpoints = parameters.scriptscriptpercentage * textsize / 100 + elseif forcedsize > 1000 then -- safeguard + scaledpoints = forcedsize + end + targetparameters.mathsize = mathsize -- context specific + targetparameters.textsize = textsize -- context specific + targetparameters.forcedsize = forcedsize -- context specific + targetparameters.extrafactor = extrafactor -- context specific + -- + local tounicode = resources.tounicode + local defaultwidth = resources.defaultwidth or 0 + local defaultheight = resources.defaultheight or 0 + local defaultdepth = resources.defaultdepth or 0 + local units = parameters.units or 1000 + -- + if target.fonts then + target.fonts = fastcopy(target.fonts) -- maybe we virtualize more afterwards + end + -- + -- boundary keys are no longer needed as we now have a string 'right_boundary' + -- that can be used in relevant tables (kerns and ligatures) ... not that I ever + -- used them + -- + -- boundarychar_label = 0, -- not needed + -- boundarychar = 65536, -- there is now a string 'right_boundary' + -- false_boundarychar = 65536, -- produces invalid tfm in luatex + -- + targetproperties.language = properties.language or "dflt" -- inherited + targetproperties.script = properties.script or "dflt" -- inherited + targetproperties.mode = properties.mode or "base" -- inherited + -- + local askedscaledpoints = scaledpoints + local scaledpoints, delta = constructors.calculatescale(tfmdata,scaledpoints) -- no shortcut, dan be redefined + -- + local hdelta = delta + local vdelta = delta + -- + target.designsize = parameters.designsize -- not really needed so it muight become obsolete + target.units_per_em = units -- just a trigger for the backend (does luatex use this? if not it will go) + -- + local direction = properties.direction or tfmdata.direction or 0 -- pointless, as we don't use omf fonts at all + target.direction = direction + properties.direction = direction + -- + target.size = scaledpoints + -- + target.encodingbytes = properties.encodingbytes or 1 + target.embedding = properties.embedding or "subset" + target.tounicode = 1 + target.cidinfo = properties.cidinfo + target.format = properties.format + -- + local fontname = properties.fontname or tfmdata.fontname -- for the moment we fall back on + local fullname = properties.fullname or tfmdata.fullname -- names in the tfmdata although + local filename = properties.filename or tfmdata.filename -- that is not the right place to + local psname = properties.psname or tfmdata.psname -- pass them + local name = properties.name or tfmdata.name + -- + if not psname or psname == "" then + -- name used in pdf file as well as for selecting subfont in ttc/dfont + psname = fontname or (fullname and fonts.names.cleanname(fullname)) + end + target.fontname = fontname + target.fullname = fullname + target.filename = filename + target.psname = psname + target.name = name + -- + -- inspect(properties) + -- + properties.fontname = fontname + properties.fullname = fullname + properties.filename = filename + properties.psname = psname + properties.name = name + -- expansion (hz) + local expansion = parameters.expansion + if expansion then + target.stretch = expansion.stretch + target.shrink = expansion.shrink + target.step = expansion.step + target.auto_expand = expansion.auto + end + -- protrusion + local protrusion = parameters.protrusion + if protrusion then + target.auto_protrude = protrusion.auto + end + -- widening + local extendfactor = parameters.extendfactor or 0 + if extendfactor ~= 0 and extendfactor ~= 1 then + hdelta = hdelta * extendfactor + target.extend = extendfactor * 1000 -- extent ? + else + target.extend = 1000 -- extent ? + end + -- slanting + local slantfactor = parameters.slantfactor or 0 + if slantfactor ~= 0 then + target.slant = slantfactor * 1000 + else + target.slant = 0 + end + -- + targetparameters.factor = delta + targetparameters.hfactor = hdelta + targetparameters.vfactor = vdelta + targetparameters.size = scaledpoints + targetparameters.units = units + targetparameters.scaledpoints = askedscaledpoints + -- + local isvirtual = properties.virtualized or tfmdata.type == "virtual" + local hasquality = target.auto_expand or target.auto_protrude + local hasitalics = properties.hasitalics + local autoitalicamount = properties.autoitalicamount + local stackmath = not properties.nostackmath + local nonames = properties.noglyphnames + local nodemode = properties.mode == "node" + -- + if changed and not next(changed) then + changed = false + end + -- + target.type = isvirtual and "virtual" or "real" + -- + target.postprocessors = tfmdata.postprocessors + -- + local targetslant = (parameters.slant or parameters[1] or 0) + local targetspace = (parameters.space or parameters[2] or 0)*hdelta + local targetspace_stretch = (parameters.space_stretch or parameters[3] or 0)*hdelta + local targetspace_shrink = (parameters.space_shrink or parameters[4] or 0)*hdelta + local targetx_height = (parameters.x_height or parameters[5] or 0)*vdelta + local targetquad = (parameters.quad or parameters[6] or 0)*hdelta + local targetextra_space = (parameters.extra_space or parameters[7] or 0)*hdelta + -- + targetparameters.slant = targetslant -- slantperpoint + targetparameters.space = targetspace + targetparameters.space_stretch = targetspace_stretch + targetparameters.space_shrink = targetspace_shrink + targetparameters.x_height = targetx_height + targetparameters.quad = targetquad + targetparameters.extra_space = targetextra_space + -- + local ascender = parameters.ascender + if ascender then + targetparameters.ascender = delta * ascender + end + local descender = parameters.descender + if descender then + targetparameters.descender = delta * descender + end + -- + constructors.enhanceparameters(targetparameters) -- official copies for us + -- + local protrusionfactor = (targetquad ~= 0 and 1000/targetquad) or 0 + local scaledwidth = defaultwidth * hdelta + local scaledheight = defaultheight * vdelta + local scaleddepth = defaultdepth * vdelta + -- + local hasmath = (properties.hasmath or next(mathparameters)) and true + -- + if hasmath then + constructors.assignmathparameters(target,tfmdata) -- does scaling and whatever is needed + properties.hasmath = true + target.nomath = false + target.MathConstants = target.mathparameters + else + properties.hasmath = false + target.nomath = true + target.mathparameters = nil -- nop + end + -- + local italickey = "italic" + local useitalics = true -- something context + -- + -- some context specific trickery (this will move to a plugin) + -- + if hasmath then + -- the latest luatex can deal with it itself so we now disable this + -- mechanism here + -- + -- if properties.mathitalics then + -- italickey = "italic_correction" + -- if trace_defining then + -- report_defining("math italics disabled for font %a, fullname %a, filename %a",name,fullname,filename) + -- end + -- end + autoitalicamount = false -- new + elseif properties.textitalics then + italickey = "italic_correction" + useitalics = false + if properties.delaytextitalics then + autoitalicamount = false + end + end + -- + -- end of context specific trickery + -- + if trace_defining then + report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a", + name,fullname,filename,hdelta,vdelta, + hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled") + end + -- + constructors.beforecopyingcharacters(target,tfmdata) + -- + local sharedkerns = { } + -- + -- we can have a dumb mode (basemode without math etc) that skips most + -- + for unicode, character in next, characters do + local chr, description, index, touni + if changed then + -- basemode hack (we try to catch missing tounicodes, e.g. needed for ssty in math cambria) + local c = changed[unicode] + if c then + description = descriptions[c] or descriptions[unicode] or character + character = characters[c] or character + index = description.index or c + if tounicode then + touni = tounicode[index] -- nb: index! + if not touni then -- goodie + local d = descriptions[unicode] or characters[unicode] + local i = d.index or unicode + touni = tounicode[i] -- nb: index! + end + end + else + description = descriptions[unicode] or character + index = description.index or unicode + if tounicode then + touni = tounicode[index] -- nb: index! + end + end + else + description = descriptions[unicode] or character + index = description.index or unicode + if tounicode then + touni = tounicode[index] -- nb: index! + end + end + local width = description.width + local height = description.height + local depth = description.depth + if width then width = hdelta*width else width = scaledwidth end + if height then height = vdelta*height else height = scaledheight end + -- if depth then depth = vdelta*depth else depth = scaleddepth end + if depth and depth ~= 0 then + depth = delta*depth + if nonames then + chr = { + index = index, + height = height, + depth = depth, + width = width, + } + else + chr = { + name = description.name, + index = index, + height = height, + depth = depth, + width = width, + } + end + else + -- this saves a little bit of memory time and memory, esp for big cjk fonts + if nonames then + chr = { + index = index, + height = height, + width = width, + } + else + chr = { + name = description.name, + index = index, + height = height, + width = width, + } + end + end + if touni then + chr.tounicode = touni + end + if hasquality then + -- we could move these calculations elsewhere (saves calculations) + local ve = character.expansion_factor + if ve then + chr.expansion_factor = ve*1000 -- expansionfactor, hm, can happen elsewhere + end + local vl = character.left_protruding + if vl then + chr.left_protruding = protrusionfactor*width*vl + end + local vr = character.right_protruding + if vr then + chr.right_protruding = protrusionfactor*width*vr + end + end + -- + if autoitalicamount then + local vi = description.italic + if not vi then + local vi = description.boundingbox[3] - description.width + autoitalicamount + if vi > 0 then -- < 0 indicates no overshoot or a very small auto italic + chr[italickey] = vi*hdelta + end + elseif vi ~= 0 then + chr[italickey] = vi*hdelta + end + elseif hasitalics then + local vi = description.italic + if vi and vi ~= 0 then + chr[italickey] = vi*hdelta + end + end + -- to be tested + if hasmath then + -- todo, just operate on descriptions.math + local vn = character.next + if vn then + chr.next = vn + else + local vv = character.vert_variants + if vv then + local t = { } + for i=1,#vv do + local vvi = vv[i] + t[i] = { + ["start"] = (vvi["start"] or 0)*vdelta, + ["end"] = (vvi["end"] or 0)*vdelta, + ["advance"] = (vvi["advance"] or 0)*vdelta, + ["extender"] = vvi["extender"], + ["glyph"] = vvi["glyph"], + } + end + chr.vert_variants = t + else + local hv = character.horiz_variants + if hv then + local t = { } + for i=1,#hv do + local hvi = hv[i] + t[i] = { + ["start"] = (hvi["start"] or 0)*hdelta, + ["end"] = (hvi["end"] or 0)*hdelta, + ["advance"] = (hvi["advance"] or 0)*hdelta, + ["extender"] = hvi["extender"], + ["glyph"] = hvi["glyph"], + } + end + chr.horiz_variants = t + end + end + end + local va = character.top_accent + if va then + chr.top_accent = vdelta*va + end + if stackmath then + local mk = character.mathkerns -- not in math ? + if mk then + local kerns = { } + local v = mk.top_right if v then local k = { } for i=1,#v do local vi = v[i] + k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern } + end kerns.top_right = k end + local v = mk.top_left if v then local k = { } for i=1,#v do local vi = v[i] + k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern } + end kerns.top_left = k end + local v = mk.bottom_left if v then local k = { } for i=1,#v do local vi = v[i] + k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern } + end kerns.bottom_left = k end + local v = mk.bottom_right if v then local k = { } for i=1,#v do local vi = v[i] + k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern } + end kerns.bottom_right = k end + chr.mathkern = kerns -- singular -> should be patched in luatex ! + end + end + end + if not nodemode then + local vk = character.kerns + if vk then + local s = sharedkerns[vk] + if not s then + s = { } + for k,v in next, vk do s[k] = v*hdelta end + sharedkerns[vk] = s + end + chr.kerns = s + end + local vl = character.ligatures + if vl then + if true then + chr.ligatures = vl -- shared + else + local tt = { } + for i,l in next, vl do + tt[i] = l + end + chr.ligatures = tt + end + end + end + if isvirtual then + local vc = character.commands + if vc then + -- we assume non scaled commands here + -- tricky .. we need to scale pseudo math glyphs too + -- which is why we deal with rules too + local ok = false + for i=1,#vc do + local key = vc[i][1] + if key == "right" or key == "down" then + ok = true + break + end + end + if ok then + local tt = { } + for i=1,#vc do + local ivc = vc[i] + local key = ivc[1] + if key == "right" then + tt[i] = { key, ivc[2]*hdelta } + elseif key == "down" then + tt[i] = { key, ivc[2]*vdelta } + elseif key == "rule" then + tt[i] = { key, ivc[2]*vdelta, ivc[3]*hdelta } + else -- not comment + tt[i] = ivc -- shared since in cache and untouched + end + end + chr.commands = tt + else + chr.commands = vc + end + chr.index = nil + end + end + targetcharacters[unicode] = chr + end + -- + constructors.aftercopyingcharacters(target,tfmdata) + -- + return target +end + +function constructors.finalize(tfmdata) + if tfmdata.properties and tfmdata.properties.finalized then + return + end + -- + if not tfmdata.characters then + return nil + end + -- + if not tfmdata.goodies then + tfmdata.goodies = { } -- context specific + end + -- + local parameters = tfmdata.parameters + if not parameters then + return nil + end + -- + if not parameters.expansion then + parameters.expansion = { + stretch = tfmdata.stretch or 0, + shrink = tfmdata.shrink or 0, + step = tfmdata.step or 0, + auto = tfmdata.auto_expand or false, + } + end + -- + if not parameters.protrusion then + parameters.protrusion = { + auto = auto_protrude + } + end + -- + if not parameters.size then + parameters.size = tfmdata.size + end + -- + if not parameters.extendfactor then + parameters.extendfactor = tfmdata.extend or 0 + end + -- + if not parameters.slantfactor then + parameters.slantfactor = tfmdata.slant or 0 + end + -- + if not parameters.designsize then + parameters.designsize = tfmdata.designsize or 655360 + end + -- + if not parameters.units then + parameters.units = tfmdata.units_per_em or 1000 + end + -- + if not tfmdata.descriptions then + local descriptions = { } -- yes or no + setmetatableindex(descriptions, function(t,k) local v = { } t[k] = v return v end) + tfmdata.descriptions = descriptions + end + -- + local properties = tfmdata.properties + if not properties then + properties = { } + tfmdata.properties = properties + end + -- + if not properties.virtualized then + properties.virtualized = tfmdata.type == "virtual" + end + -- + if not tfmdata.properties then + tfmdata.properties = { + fontname = tfmdata.fontname, + filename = tfmdata.filename, + fullname = tfmdata.fullname, + name = tfmdata.name, + psname = tfmdata.psname, + -- + encodingbytes = tfmdata.encodingbytes or 1, + embedding = tfmdata.embedding or "subset", + tounicode = tfmdata.tounicode or 1, + cidinfo = tfmdata.cidinfo or nil, + format = tfmdata.format or "type1", + direction = tfmdata.direction or 0, + } + end + if not tfmdata.resources then + tfmdata.resources = { } + end + if not tfmdata.shared then + tfmdata.shared = { } + end + -- + -- tfmdata.fonts + -- tfmdata.unscaled + -- + if not properties.hasmath then + properties.hasmath = not tfmdata.nomath + end + -- + tfmdata.MathConstants = nil + tfmdata.postprocessors = nil + -- + tfmdata.fontname = nil + tfmdata.filename = nil + tfmdata.fullname = nil + tfmdata.name = nil -- most tricky part + tfmdata.psname = nil + -- + tfmdata.encodingbytes = nil + tfmdata.embedding = nil + tfmdata.tounicode = nil + tfmdata.cidinfo = nil + tfmdata.format = nil + tfmdata.direction = nil + tfmdata.type = nil + tfmdata.nomath = nil + tfmdata.designsize = nil + -- + tfmdata.size = nil + tfmdata.stretch = nil + tfmdata.shrink = nil + tfmdata.step = nil + tfmdata.auto_expand = nil + tfmdata.auto_protrude = nil + tfmdata.extend = nil + tfmdata.slant = nil + tfmdata.units_per_em = nil + -- + properties.finalized = true + -- + return tfmdata +end + +--[[ldx-- +

A unique hash value is generated by:

+--ldx]]-- + +local hashmethods = { } +constructors.hashmethods = hashmethods + +function constructors.hashfeatures(specification) -- will be overloaded + local features = specification.features + if features then + local t, tn = { }, 0 + for category, list in next, features do + if next(list) then + local hasher = hashmethods[category] + if hasher then + local hash = hasher(list) + if hash then + tn = tn + 1 + t[tn] = category .. ":" .. hash + end + end + end + end + if tn > 0 then + return concat(t," & ") + end + end + return "unknown" +end + +hashmethods.normal = function(list) + local s = { } + local n = 0 + for k, v in next, list do + if not k then + -- no need to add to hash + elseif k == "number" or k == "features" then + -- no need to add to hash (maybe we need a skip list) + else + n = n + 1 + s[n] = k + end + end + if n > 0 then + sort(s) + for i=1,n do + local k = s[i] + s[i] = k .. '=' .. tostring(list[k]) + end + return concat(s,"+") + end +end + +--[[ldx-- +

In principle we can share tfm tables when we are in node for a font, but then +we need to define a font switch as an id/attr switch which is no fun, so in that +case users can best use dynamic features ... so, we will not use that speedup. Okay, +when we get rid of base mode we can optimize even further by sharing, but then we +loose our testcases for .

+--ldx]]-- + +function constructors.hashinstance(specification,force) + local hash, size, fallbacks = specification.hash, specification.size, specification.fallbacks + if force or not hash then + hash = constructors.hashfeatures(specification) + specification.hash = hash + end + if size < 1000 and designsizes[hash] then + size = math.round(constructors.scaled(size,designsizes[hash])) + specification.size = size + end + -- local mathsize = specification.mathsize or 0 + -- if mathsize > 0 then + -- local textsize = specification.textsize + -- if fallbacks then + -- return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ] @ ' .. fallbacks + -- else + -- return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ]' + -- end + -- else + if fallbacks then + return hash .. ' @ ' .. tostring(size) .. ' @ ' .. fallbacks + else + return hash .. ' @ ' .. tostring(size) + end + -- end +end + +function constructors.setname(tfmdata,specification) -- todo: get specification from tfmdata + if constructors.namemode == "specification" then + -- not to be used in context ! + local specname = specification.specification + if specname then + tfmdata.properties.name = specname + if trace_defining then + report_otf("overloaded fontname %a",specname) + end + end + end +end + +function constructors.checkedfilename(data) + local foundfilename = data.foundfilename + if not foundfilename then + local askedfilename = data.filename or "" + if askedfilename ~= "" then + askedfilename = resolvers.resolve(askedfilename) -- no shortcut + foundfilename = resolvers.findbinfile(askedfilename,"") or "" + if foundfilename == "" then + report_defining("source file %a is not found",askedfilename) + foundfilename = resolvers.findbinfile(file.basename(askedfilename),"") or "" + if foundfilename ~= "" then + report_defining("using source file %a due to cache mismatch",foundfilename) + end + end + end + data.foundfilename = foundfilename + end + return foundfilename +end + +local formats = allocate() +fonts.formats = formats + +setmetatableindex(formats, function(t,k) + local l = lower(k) + if rawget(t,k) then + t[k] = l + return l + end + return rawget(t,file.suffix(l)) +end) + +local locations = { } + +local function setindeed(mode,target,group,name,action,position) + local t = target[mode] + if not t then + report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode) + os.exit() + elseif position then + -- todo: remove existing + insert(t, position, { name = name, action = action }) + else + for i=1,#t do + local ti = t[i] + if ti.name == name then + ti.action = action + return + end + end + insert(t, { name = name, action = action }) + end +end + +local function set(group,name,target,source) + target = target[group] + if not target then + report_defining("fatal target error in setting feature %a, group %a",name,group) + os.exit() + end + local source = source[group] + if not source then + report_defining("fatal source error in setting feature %a, group %a",name,group) + os.exit() + end + local node = source.node + local base = source.base + local position = source.position + if node then + setindeed("node",target,group,name,node,position) + end + if base then + setindeed("base",target,group,name,base,position) + end +end + +local function register(where,specification) + local name = specification.name + if name and name ~= "" then + local default = specification.default + local description = specification.description + local initializers = specification.initializers + local processors = specification.processors + local manipulators = specification.manipulators + local modechecker = specification.modechecker + if default then + where.defaults[name] = default + end + if description and description ~= "" then + where.descriptions[name] = description + end + if initializers then + set('initializers',name,where,specification) + end + if processors then + set('processors', name,where,specification) + end + if manipulators then + set('manipulators',name,where,specification) + end + if modechecker then + where.modechecker = modechecker + end + end +end + +constructors.registerfeature = register + +function constructors.getfeatureaction(what,where,mode,name) + what = handlers[what].features + if what then + where = what[where] + if where then + mode = where[mode] + if mode then + for i=1,#mode do + local m = mode[i] + if m.name == name then + return m.action + end + end + end + end + end +end + +function constructors.newhandler(what) -- could be a metatable newindex + local handler = handlers[what] + if not handler then + handler = { } + handlers[what] = handler + end + return handler +end + +function constructors.newfeatures(what) -- could be a metatable newindex + local handler = handlers[what] + local features = handler.features + if not features then + local tables = handler.tables -- can be preloaded + local statistics = handler.statistics -- can be preloaded + features = allocate { + defaults = { }, + descriptions = tables and tables.features or { }, + used = statistics and statistics.usedfeatures or { }, + initializers = { base = { }, node = { } }, + processors = { base = { }, node = { } }, + manipulators = { base = { }, node = { } }, + } + features.register = function(specification) return register(features,specification) end + handler.features = features -- will also become hidden + end + return features +end + +--[[ldx-- +

We need to check for default features. For this we provide +a helper function.

+--ldx]]-- + +function constructors.checkedfeatures(what,features) + local defaults = handlers[what].features.defaults + if features and next(features) then + features = fastcopy(features) -- can be inherited (mt) but then no loops possible + for key, value in next, defaults do + if features[key] == nil then + features[key] = value + end + end + return features + else + return fastcopy(defaults) -- we can change features in place + end +end + +-- before scaling + +function constructors.initializefeatures(what,tfmdata,features,trace,report) + if features and next(features) then + local properties = tfmdata.properties or { } -- brrr + local whathandler = handlers[what] + local whatfeatures = whathandler.features + local whatinitializers = whatfeatures.initializers + local whatmodechecker = whatfeatures.modechecker + -- properties.mode can be enforces (for instance in font-otd) + local mode = properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base" + properties.mode = mode -- also status + features.mode = mode -- both properties.mode or features.mode can be changed + -- + local done = { } + while true do + local redo = false + local initializers = whatfeatures.initializers[mode] + if initializers then + for i=1,#initializers do + local step = initializers[i] + local feature = step.name +-- we could intercept mode here .. needs a rewrite of this whole loop then but it's cleaner that way + local value = features[feature] + if not value then + -- disabled + elseif done[feature] then + -- already done + else + local action = step.action + if trace then + report("initializing feature %a to %a for mode %a for font %a",feature, + value,mode,tfmdata.properties.fullname) + end + action(tfmdata,value,features) -- can set mode (e.g. goodies) so it can trigger a restart + if mode ~= properties.mode or mode ~= features.mode then + if whatmodechecker then + properties.mode = whatmodechecker(tfmdata,features,properties.mode) -- force checking + features.mode = properties.mode + end + if mode ~= properties.mode then + mode = properties.mode + redo = true + end + end + done[feature] = true + end + if redo then + break + end + end + if not redo then + break + end + else + break + end + end + properties.mode = mode -- to be sure + return true + else + return false + end +end + +-- while typesetting + +function constructors.collectprocessors(what,tfmdata,features,trace,report) + local processes, nofprocesses = { }, 0 + if features and next(features) then + local properties = tfmdata.properties + local whathandler = handlers[what] + local whatfeatures = whathandler.features + local whatprocessors = whatfeatures.processors + local processors = whatprocessors[properties.mode] + if processors then + for i=1,#processors do + local step = processors[i] + local feature = step.name + if features[feature] then + local action = step.action + if trace then + report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) + end + if action then + nofprocesses = nofprocesses + 1 + processes[nofprocesses] = action + end + end + end + elseif trace then + report("no feature processors for mode %a for font %a",mode,tfmdata.properties.fullname) + end + end + return processes +end + +-- after scaling + +function constructors.applymanipulators(what,tfmdata,features,trace,report) + if features and next(features) then + local properties = tfmdata.properties + local whathandler = handlers[what] + local whatfeatures = whathandler.features + local whatmanipulators = whatfeatures.manipulators + local manipulators = whatmanipulators[properties.mode] + if manipulators then + for i=1,#manipulators do + local step = manipulators[i] + local feature = step.name + local value = features[feature] + if value then + local action = step.action + if trace then + report("applying feature manipulator %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) + end + if action then + action(tfmdata,feature,value) + end + end + end + end + end +end diff --git a/tex/context/base/font-ctx.lua b/tex/context/base/font-ctx.lua index 2583c6520..965542f0a 100644 --- a/tex/context/base/font-ctx.lua +++ b/tex/context/base/font-ctx.lua @@ -1,1819 +1,1819 @@ -if not modules then modules = { } end modules ['font-ctx'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- At some point I will clean up the code here so that at the tex end --- the table interface is used. --- --- Todo: make a proper 'next id' mechanism (register etc) or wait till 'true' --- in virtual fonts indices is implemented. - -local context, commands = context, commands - -local texcount, texsetcount = tex.count, tex.setcount -local format, gmatch, match, find, lower, gsub, byte = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub, string.byte -local concat, serialize, sort, fastcopy, mergedtable = table.concat, table.serialize, table.sort, table.fastcopy, table.merged -local sortedhash, sortedkeys, sequenced = table.sortedhash, table.sortedkeys, table.sequenced -local settings_to_hash, hash_to_string = utilities.parsers.settings_to_hash, utilities.parsers.hash_to_string -local formatcolumns = utilities.formatters.formatcolumns -local mergehashes = utilities.parsers.mergehashes -local formatters = string.formatters - -local tostring, next, type, rawget, tonumber = tostring, next, type, rawget, tonumber -local utfchar, utfbyte = utf.char, utf.byte -local round = math.round - -local P, S, C, Cc, Cf, Cg, Ct, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Ct, lpeg.match - -local trace_features = false trackers.register("fonts.features", function(v) trace_features = v end) -local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) -local trace_designsize = false trackers.register("fonts.designsize", function(v) trace_designsize = v end) -local trace_usage = false trackers.register("fonts.usage", function(v) trace_usage = v end) -local trace_mapfiles = false trackers.register("fonts.mapfiles", function(v) trace_mapfiles = v end) -local trace_automode = false trackers.register("fonts.automode", function(v) trace_automode = v end) - -local report_features = logs.reporter("fonts","features") -local report_cummulative = logs.reporter("fonts","cummulative") -local report_defining = logs.reporter("fonts","defining") -local report_status = logs.reporter("fonts","status") -local report_mapfiles = logs.reporter("fonts","mapfiles") - -local setmetatableindex = table.setmetatableindex - -local fonts = fonts -local handlers = fonts.handlers -local otf = handlers.otf -- brrr -local names = fonts.names -local definers = fonts.definers -local specifiers = fonts.specifiers -local constructors = fonts.constructors -local loggers = fonts.loggers -local fontgoodies = fonts.goodies -local helpers = fonts.helpers -local hashes = fonts.hashes -local currentfont = font.current -local texattribute = tex.attribute -local texdimen = tex.dimen - -local fontdata = hashes.identifiers -local characters = hashes.chardata -local descriptions = hashes.descriptions -local properties = hashes.properties -local resources = hashes.resources -local csnames = hashes.csnames -local marks = hashes.markdata -local lastmathids = hashes.lastmathids - -local designsizefilename = fontgoodies.designsizes.filename - -local otffeatures = otf.features -local otftables = otf.tables - -local registerotffeature = otffeatures.register -local baseprocessors = otffeatures.processors.base -local baseinitializers = otffeatures.initializers.base - -local sequencers = utilities.sequencers -local appendgroup = sequencers.appendgroup -local appendaction = sequencers.appendaction - -specifiers.contextsetups = specifiers.contextsetups or { } -specifiers.contextnumbers = specifiers.contextnumbers or { } -specifiers.contextmerged = specifiers.contextmerged or { } -specifiers.synonyms = specifiers.synonyms or { } - -local setups = specifiers.contextsetups -local numbers = specifiers.contextnumbers -local merged = specifiers.contextmerged -local synonyms = specifiers.synonyms - -storage.register("fonts/setups" , setups , "fonts.specifiers.contextsetups" ) -storage.register("fonts/numbers", numbers, "fonts.specifiers.contextnumbers") -storage.register("fonts/merged", merged, "fonts.specifiers.contextmerged") -storage.register("fonts/synonyms", synonyms, "fonts.specifiers.synonyms") - --- inspect(setups) - -if environment.initex then - setmetatableindex(setups,function(t,k) - return type(k) == "number" and rawget(t,numbers[k]) or nil - end) -else - setmetatableindex(setups,function(t,k) - local v = type(k) == "number" and rawget(t,numbers[k]) - if v then - t[k] = v - return v - end - end) -end - --- this will move elsewhere ... - -utilities.strings.formatters.add(formatters,"font:name", [["'"..file.basename(%s.properties.name).."'"]]) -utilities.strings.formatters.add(formatters,"font:features",[["'"..table.sequenced(%s," ",true).."'"]]) - --- ... like font-sfm or so - -constructors.resolvevirtualtoo = true -- context specific (due to resolver) - -local limited = false - -directives.register("system.inputmode", function(v) - if not limited then - local i_limiter = io.i_limiter(v) - if i_limiter then - fontloader.open = i_limiter.protect(fontloader.open) - fontloader.info = i_limiter.protect(fontloader.info) - limited = true - end - end -end) - -function definers.resetnullfont() - -- resetting is needed because tikz misuses nullfont - local parameters = fonts.nulldata.parameters - -- - parameters.slant = 0 -- 1 - parameters.space = 0 -- 2 - parameters.space_stretch = 0 -- 3 - parameters.space_shrink = 0 -- 4 - parameters.x_height = 0 -- 5 - parameters.quad = 0 -- 6 - parameters.extra_space = 0 -- 7 - -- - constructors.enhanceparameters(parameters) -- official copies for us - -- - definers.resetnullfont = function() end -end - -commands.resetnullfont = definers.resetnullfont - --- this cannot be a feature initializer as there is no auto namespace --- so we never enter the loop then; we can store the defaults in the tma --- file (features.gpos.mkmk = 1 etc) - -local needsnodemode = { - gpos_mark2mark = true, - gpos_mark2base = true, - gpos_mark2ligature = true, -} - -otftables.scripts.auto = "automatic fallback to latn when no dflt present" - --- setmetatableindex(otffeatures.descriptions,otftables.features) - -local privatefeatures = { - tlig = true, - trep = true, - anum = true, -} - -local function checkedscript(tfmdata,resources,features) - local latn = false - local script = false - for g, list in next, resources.features do - for f, scripts in next, list do - if privatefeatures[f] then - -- skip - elseif scripts.dflt then - script = "dflt" - break - elseif scripts.latn then - latn = true - end - end - end - if not script then - script = latn and "latn" or "dflt" - end - if trace_automode then - report_defining("auto script mode, using script %a in font %!font:name!",script,tfmdata) - end - features.script = script - return script -end - -local function checkedmode(tfmdata,resources,features) - local sequences = resources.sequences - if sequences and #sequences > 0 then - local script = features.script or "dflt" - local language = features.language or "dflt" - for feature, value in next, features do - if value then - local found = false - for i=1,#sequences do - local sequence = sequences[i] - local features = sequence.features - if features then - local scripts = features[feature] - if scripts then - local languages = scripts[script] - if languages and languages[language] then - if found then - -- more than one lookup - if trace_automode then - report_defining("forcing mode %a, font %!font:name!, feature %a, script %a, language %a, %s", - "node",tfmdata,feature,script,language,"multiple lookups") - end - features.mode = "node" - return "node" - elseif needsnodemode[sequence.type] then - if trace_automode then - report_defining("forcing mode %a, font %!font:name!, feature %a, script %a, language %a, %s", - "node",tfmdata,feature,script,language,"no base support") - end - features.mode = "node" - return "node" - else - -- at least one lookup - found = true - end - end - end - end - end - end - end - end - features.mode = "base" -- new, or is this wrong? - return "base" -end - -definers.checkedscript = checkedscript -definers.checkedmode = checkedmode - -local function modechecker(tfmdata,features,mode) -- we cannot adapt features as they are shared! - if trace_features then - report_features("fontname %!font:name!, features %!font:features!",tfmdata,features) - end - local rawdata = tfmdata.shared.rawdata - local resources = rawdata and rawdata.resources - local script = features.script - if resources then - if script == "auto" then - script = checkedscript(tfmdata,resources,features) - end - if mode == "auto" then - mode = checkedmode(tfmdata,resources,features) - end - else - report_features("missing resources for font %!font:name!",tfmdata) - end - return mode -end - -registerotffeature { - -- we only set the checker and leave other settings of the mode - -- feature as they are - name = "mode", - modechecker = modechecker, -} - --- -- default = true anyway --- --- local normalinitializer = constructors.getfeatureaction("otf","initializers","node","analyze") --- --- local function analyzeinitializer(tfmdata,value,features) -- attr --- if value == "auto" and features then --- value = features.init or features.medi or features.fina or features.isol or false --- end --- return normalinitializer(tfmdata,value,features) --- end --- --- registerotffeature { --- name = "analyze", --- initializers = { --- node = analyzeinitializer, --- }, --- } - -local beforecopyingcharacters = sequencers.new { - name = "beforecopyingcharacters", - arguments = "target,original", -} - -appendgroup(beforecopyingcharacters,"before") -- user -appendgroup(beforecopyingcharacters,"system") -- private -appendgroup(beforecopyingcharacters,"after" ) -- user - -function constructors.beforecopyingcharacters(original,target) - local runner = beforecopyingcharacters.runner - if runner then - runner(original,target) - end -end - -local aftercopyingcharacters = sequencers.new { - name = "aftercopyingcharacters", - arguments = "target,original", -} - -appendgroup(aftercopyingcharacters,"before") -- user -appendgroup(aftercopyingcharacters,"system") -- private -appendgroup(aftercopyingcharacters,"after" ) -- user - -function constructors.aftercopyingcharacters(original,target) - local runner = aftercopyingcharacters.runner - if runner then - runner(original,target) - end -end - ---[[ldx-- -

So far we haven't really dealt with features (or whatever we want -to pass along with the font definition. We distinguish the following -situations:

-situations:

- - -name:xetex like specs -name@virtual font spec -name*context specification - ---ldx]]-- - --- currently fonts are scaled while constructing the font, so we --- have to do scaling of commands in the vf at that point using e.g. --- "local scale = g.parameters.factor or 1" after all, we need to --- work with copies anyway and scaling needs to be done at some point; --- however, when virtual tricks are used as feature (makes more --- sense) we scale the commands in fonts.constructors.scale (and set the --- factor there) - -local loadfont = definers.loadfont - -function definers.loadfont(specification,size,id) -- overloads the one in font-def - local variants = definers.methods.variants - local virtualfeatures = specification.features.virtual - if virtualfeatures and virtualfeatures.preset then - local variant = variants[virtualfeatures.preset] - if variant then - return variant(specification,size,id) - end - else - local tfmdata = loadfont(specification,size,id) - -- constructors.checkvirtualid(tfmdata,id) - return tfmdata - end -end - -local function predefined(specification) - local variants = definers.methods.variants - local detail = specification.detail - if detail ~= "" and variants[detail] then - specification.features.virtual = { preset = detail } - end - return specification -end - -definers.registersplit("@", predefined,"virtual") - -local normalize_features = otffeatures.normalize -- should be general - -local function definecontext(name,t) -- can be shared - local number = setups[name] and setups[name].number or 0 -- hm, numbers[name] - if number == 0 then - number = #numbers + 1 - numbers[number] = name - end - t.number = number - setups[name] = t - return number, t -end - -local function presetcontext(name,parent,features) -- will go to con and shared - if features == "" and find(parent,"=") then - features = parent - parent = "" - end - if not features or features == "" then - features = { } - elseif type(features) == "string" then - features = normalize_features(settings_to_hash(features)) - else - features = normalize_features(features) - end - -- todo: synonyms, and not otf bound - if parent ~= "" then - for p in gmatch(parent,"[^, ]+") do - local s = setups[p] - if s then - for k,v in next, s do - if features[k] == nil then - features[k] = v - end - end - else - -- just ignore an undefined one .. i.e. we can refer to not yet defined - end - end - end - -- these are auto set so in order to prevent redundant definitions - -- we need to preset them (we hash the features and adding a default - -- setting during initialization may result in a different hash) - -- - -- for k,v in next, triggers do - -- if features[v] == nil then -- not false ! - -- local vv = default_features[v] - -- if vv then features[v] = vv end - -- end - -- end - -- - for feature,value in next, features do - if value == nil then -- not false ! - local default = default_features[feature] - if default ~= nil then - features[feature] = default - end - end - end - -- sparse 'm so that we get a better hash and less test (experimental - -- optimization) - local t = { } -- can we avoid t ? - for k,v in next, features do --- if v then t[k] = v end - t[k] = v - end - -- needed for dynamic features - -- maybe number should always be renewed as we can redefine features - local number = setups[name] and setups[name].number or 0 -- hm, numbers[name] - if number == 0 then - number = #numbers + 1 - numbers[number] = name - end - t.number = number - setups[name] = t - return number, t -end - -local function contextnumber(name) -- will be replaced - local t = setups[name] - if not t then - return 0 - elseif t.auto then - local lng = tonumber(tex.language) - local tag = name .. ":" .. lng - local s = setups[tag] - if s then - return s.number or 0 - else - local script, language = languages.association(lng) - if t.script ~= script or t.language ~= language then - local s = fastcopy(t) - local n = #numbers + 1 - setups[tag] = s - numbers[n] = tag - s.number = n - s.script = script - s.language = language - return n - else - setups[tag] = t - return t.number or 0 - end - end - else - return t.number or 0 - end -end - -local function mergecontext(currentnumber,extraname,option) -- number string number (used in scrp-ini - local extra = setups[extraname] - if extra then - local current = setups[numbers[currentnumber]] - local mergedfeatures, mergedname = { }, nil - if option < 0 then - if current then - for k, v in next, current do - if not extra[k] then - mergedfeatures[k] = v - end - end - end - mergedname = currentnumber .. "-" .. extraname - else - if current then - for k, v in next, current do - mergedfeatures[k] = v - end - end - for k, v in next, extra do - mergedfeatures[k] = v - end - mergedname = currentnumber .. "+" .. extraname - end - local number = #numbers + 1 - mergedfeatures.number = number - numbers[number] = mergedname - merged[number] = option - setups[mergedname] = mergedfeatures - return number -- contextnumber(mergedname) - else - return currentnumber - end -end - -local extrasets = { } - -setmetatableindex(extrasets,function(t,k) - local v = mergehashes(setups,k) - t[k] = v - return v -end) - -local function mergecontextfeatures(currentname,extraname,how,mergedname) -- string string - local extra = setups[extraname] or extrasets[extraname] - if extra then - local current = setups[currentname] - local mergedfeatures = { } - if how == "+" then - if current then - for k, v in next, current do - mergedfeatures[k] = v - end - end - for k, v in next, extra do - mergedfeatures[k] = v - end - elseif how == "-" then - if current then - for k, v in next, current do - mergedfeatures[k] = v - end - end - for k, v in next, extra do - -- only boolean features - if v == true then - mergedfeatures[k] = false - end - end - else -- = - for k, v in next, extra do - mergedfeatures[k] = v - end - end - local number = #numbers + 1 - mergedfeatures.number = number - numbers[number] = mergedname - merged[number] = option - setups[mergedname] = mergedfeatures - return number - else - return numbers[currentname] or 0 - end -end - -local function registercontext(fontnumber,extraname,option) - local extra = setups[extraname] - if extra then - local mergedfeatures, mergedname = { }, nil - if option < 0 then - mergedname = fontnumber .. "-" .. extraname - else - mergedname = fontnumber .. "+" .. extraname - end - for k, v in next, extra do - mergedfeatures[k] = v - end - local number = #numbers + 1 - mergedfeatures.number = number - numbers[number] = mergedname - merged[number] = option - setups[mergedname] = mergedfeatures - return number -- contextnumber(mergedname) - else - return 0 - end -end - -local function registercontextfeature(mergedname,extraname,how) - local extra = setups[extraname] - if extra then - local mergedfeatures = { } - for k, v in next, extra do - mergedfeatures[k] = v - end - local number = #numbers + 1 - mergedfeatures.number = number - numbers[number] = mergedname - merged[number] = how == "=" and 1 or 2 -- 1=replace, 2=combine - setups[mergedname] = mergedfeatures - return number -- contextnumber(mergedname) - else - return 0 - end -end - -specifiers.presetcontext = presetcontext -specifiers.contextnumber = contextnumber -specifiers.mergecontext = mergecontext -specifiers.registercontext = registercontext -specifiers.definecontext = definecontext - --- we extend the hasher: - -constructors.hashmethods.virtual = function(list) - local s = { } - local n = 0 - for k, v in next, list do - n = n + 1 - s[n] = k -- no checking on k - end - if n > 0 then - sort(s) - for i=1,n do - local k = s[i] - s[i] = k .. '=' .. tostring(list[k]) - end - return concat(s,"+") - end -end - --- end of redefine - --- local withcache = { } -- concat might be less efficient than nested tables --- --- local function withset(name,what) --- local zero = texattribute[0] --- local hash = zero .. "+" .. name .. "*" .. what --- local done = withcache[hash] --- if not done then --- done = mergecontext(zero,name,what) --- withcache[hash] = done --- end --- texattribute[0] = done --- end --- --- local function withfnt(name,what,font) --- local font = font or currentfont() --- local hash = font .. "*" .. name .. "*" .. what --- local done = withcache[hash] --- if not done then --- done = registercontext(font,name,what) --- withcache[hash] = done --- end --- texattribute[0] = done --- end - -function specifiers.showcontext(name) - return setups[name] or setups[numbers[name]] or setups[numbers[tonumber(name)]] or { } -end - --- we need a copy as we will add (fontclass) goodies to the features and --- that is bad for a shared table - --- local function splitcontext(features) -- presetcontext creates dummy here --- return fastcopy(setups[features] or (presetcontext(features,"","") and setups[features])) --- end - -local function splitcontext(features) -- presetcontext creates dummy here - local sf = setups[features] - if not sf then - local n -- number - if find(features,",") then - -- let's assume a combination which is not yet defined but just specified (as in math) - n, sf = presetcontext(features,features,"") - else - -- we've run into an unknown feature and or a direct spec so we create a dummy - n, sf = presetcontext(features,"","") - end - end - return fastcopy(sf) -end - --- local splitter = lpeg.splitat("=") --- --- local function splitcontext(features) --- local setup = setups[features] --- if setup then --- return setup --- elseif find(features,",") then --- -- This is not that efficient but handy anyway for quick and dirty tests --- -- beware, due to the way of caching setups you can get the wrong results --- -- when components change. A safeguard is to nil the cache. --- local merge = nil --- for feature in gmatch(features,"[^, ]+") do --- if find(feature,"=") then --- local k, v = lpegmatch(splitter,feature) --- if k and v then --- if not merge then --- merge = { k = v } --- else --- merge[k] = v --- end --- end --- else --- local s = setups[feature] --- if not s then --- -- skip --- elseif not merge then --- merge = s --- else --- for k, v in next, s do --- merge[k] = v --- end --- end --- end --- end --- setup = merge and presetcontext(features,"",merge) and setups[features] --- -- actually we have to nil setups[features] in order to permit redefinitions --- setups[features] = nil --- end --- return setup or (presetcontext(features,"","") and setups[features]) -- creates dummy --- end - -specifiers.splitcontext = splitcontext - -function specifiers.contexttostring(name,kind,separator,yes,no,strict,omit) -- not used - return hash_to_string(mergedtable(handlers[kind].features.defaults or {},setups[name] or {}),separator,yes,no,strict,omit) -end - -local function starred(features) -- no longer fallbacks here - local detail = features.detail - if detail and detail ~= "" then - features.features.normal = splitcontext(detail) - else - features.features.normal = { } - end - return features -end - -definers.registersplit('*',starred,"featureset") - --- sort of xetex mode, but without [] and / as we have file: and name: etc - -local space = P(" ") -local separator = S(";,") -local equal = P("=") -local spaces = space^0 -local sometext = C((1-equal-space-separator)^1) -local truevalue = P("+") * spaces * sometext * Cc(true) -- "yes" -local falsevalue = P("-") * spaces * sometext * Cc(false) -- "no" -local keyvalue = sometext * spaces * equal * spaces * sometext -local somevalue = sometext * spaces * Cc(true) -- "yes" -local pattern = Cf(Ct("") * (space + separator + Cg(keyvalue + falsevalue + truevalue + somevalue))^0, rawset) - -local function colonized(specification) - specification.features.normal = normalize_features(lpegmatch(pattern,specification.detail)) - return specification -end - -definers.registersplit(":",colonized,"direct") - --- define (two steps) - -local space = P(" ") -local spaces = space^0 -local leftparent = (P"(") -local rightparent = (P")") -local value = C((leftparent * (1-rightparent)^0 * rightparent + (1-space))^1) -local dimension = C((space/"" + P(1))^1) -local rest = C(P(1)^0) -local scale_none = Cc(0) -local scale_at = P("at") * Cc(1) * spaces * dimension -- value -local scale_sa = P("sa") * Cc(2) * spaces * dimension -- value -local scale_mo = P("mo") * Cc(3) * spaces * dimension -- value -local scale_scaled = P("scaled") * Cc(4) * spaces * dimension -- value - -local sizepattern = spaces * (scale_at + scale_sa + scale_mo + scale_scaled + scale_none) -local splitpattern = spaces * value * spaces * rest - -function helpers.splitfontpattern(str) - local name, size = lpegmatch(splitpattern,str) - local kind, size = lpegmatch(sizepattern,size) - return name, kind, size -end - -function helpers.fontpatternhassize(str) - local name, size = lpegmatch(splitpattern,str) - local kind, size = lpegmatch(sizepattern,size) - return size or false -end - -local specification -- still needed as local ? - -local getspecification = definers.getspecification - --- we can make helper macros which saves parsing (but normaly not --- that many calls, e.g. in mk a couple of 100 and in metafun 3500) - -local setdefaultfontname = context.fntsetdefname -local setsomefontname = context.fntsetsomename -local setemptyfontsize = context.fntsetnopsize -local setsomefontsize = context.fntsetsomesize -local letvaluerelax = context.letvaluerelax - -function commands.definefont_one(str) - statistics.starttiming(fonts) - if trace_defining then - report_defining("memory usage before: %s",statistics.memused()) - report_defining("start stage one: %s",str) - end - local fullname, size = lpegmatch(splitpattern,str) - local lookup, name, sub, method, detail = getspecification(fullname) - if not name then - report_defining("strange definition %a",str) - setdefaultfontname() - elseif name == "unknown" then - setdefaultfontname() - else - setsomefontname(name) - end - -- we can also use a count for the size - if size and size ~= "" then - local mode, size = lpegmatch(sizepattern,size) - if size and mode then - texcount.scaledfontmode = mode - setsomefontsize(size) - else - texcount.scaledfontmode = 0 - setemptyfontsize() - end - elseif true then - -- so we don't need to check in tex - texcount.scaledfontmode = 2 - setemptyfontsize() - else - texcount.scaledfontmode = 0 - setemptyfontsize() - end - specification = definers.makespecification(str,lookup,name,sub,method,detail,size) - if trace_defining then - report_defining("stop stage one") - end -end - -local n = 0 - --- we can also move rscale to here (more consistent) --- the argument list will become a table - -local function nice_cs(cs) - return (gsub(cs,".->", "")) -end - -function commands.definefont_two(global,cs,str,size,inheritancemode,classfeatures,fontfeatures,classfallbacks,fontfallbacks, - mathsize,textsize,relativeid,classgoodies,goodies,classdesignsize,fontdesignsize) - if trace_defining then - report_defining("start stage two: %s (size %s)",str,size) - end - -- name is now resolved and size is scaled cf sa/mo - local lookup, name, sub, method, detail = getspecification(str or "") - -- new (todo: inheritancemode) - local designsize = fontdesignsize ~= "" and fontdesignsize or classdesignsize or "" - local designname = designsizefilename(name,designsize,size) - if designname and designname ~= "" then - if trace_defining or trace_designsize then - report_defining("remapping name %a, specification %a, size %a, designsize %a",name,designsize,size,designname) - end - -- we don't catch detail here - local o_lookup, o_name, o_sub, o_method, o_detail = getspecification(designname) - if o_lookup and o_lookup ~= "" then lookup = o_lookup end - if o_method and o_method ~= "" then method = o_method end - if o_detail and o_detail ~= "" then detail = o_detail end - name = o_name - sub = o_sub - end - -- so far - -- some settings can have been overloaded - if lookup and lookup ~= "" then - specification.lookup = lookup - end - if relativeid and relativeid ~= "" then -- experimental hook - local id = tonumber(relativeid) or 0 - specification.relativeid = id > 0 and id - end - specification.name = name - specification.size = size - specification.sub = (sub and sub ~= "" and sub) or specification.sub - specification.mathsize = mathsize - specification.textsize = textsize - specification.goodies = goodies - specification.cs = cs - specification.global = global - if detail and detail ~= "" then - specification.method = method or "*" - specification.detail = detail - elseif specification.detail and specification.detail ~= "" then - -- already set - elseif inheritancemode == 0 then - -- nothing - elseif inheritancemode == 1 then - -- fontonly - if fontfeatures and fontfeatures ~= "" then - specification.method = "*" - specification.detail = fontfeatures - end - if fontfallbacks and fontfallbacks ~= "" then - specification.fallbacks = fontfallbacks - end - elseif inheritancemode == 2 then - -- classonly - if classfeatures and classfeatures ~= "" then - specification.method = "*" - specification.detail = classfeatures - end - if classfallbacks and classfallbacks ~= "" then - specification.fallbacks = classfallbacks - end - elseif inheritancemode == 3 then - -- fontfirst - if fontfeatures and fontfeatures ~= "" then - specification.method = "*" - specification.detail = fontfeatures - elseif classfeatures and classfeatures ~= "" then - specification.method = "*" - specification.detail = classfeatures - end - if fontfallbacks and fontfallbacks ~= "" then - specification.fallbacks = fontfallbacks - elseif classfallbacks and classfallbacks ~= "" then - specification.fallbacks = classfallbacks - end - elseif inheritancemode == 4 then - -- classfirst - if classfeatures and classfeatures ~= "" then - specification.method = "*" - specification.detail = classfeatures - elseif fontfeatures and fontfeatures ~= "" then - specification.method = "*" - specification.detail = fontfeatures - end - if classfallbacks and classfallbacks ~= "" then - specification.fallbacks = classfallbacks - elseif fontfallbacks and fontfallbacks ~= "" then - specification.fallbacks = fontfallbacks - end - end - local tfmdata = definers.read(specification,size) -- id not yet known (size in spec?) - -- - local lastfontid = 0 - if not tfmdata then - report_defining("unable to define %a as %a",name,nice_cs(cs)) - lastfontid = -1 - letvaluerelax(cs) -- otherwise the current definition takes the previous one - elseif type(tfmdata) == "number" then - if trace_defining then - report_defining("reusing %s, id %a, target %a, features %a / %a, fallbacks %a / %a, goodies %a / %a, designsize %a / %a", - name,tfmdata,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks,classgoodies,goodies,classdesignsize,fontdesignsize) - end - csnames[tfmdata] = specification.cs - tex.definefont(global,cs,tfmdata) - -- resolved (when designsize is used): - setsomefontsize((fontdata[tfmdata].parameters.size or 0) .. "sp") - lastfontid = tfmdata - else - -- setting the extra characters will move elsewhere - local characters = tfmdata.characters - local parameters = tfmdata.parameters - -- we use char0 as signal; cf the spec pdf can handle this (no char in slot) - characters[0] = nil - -- characters[0x00A0] = { width = parameters.space } - -- characters[0x2007] = { width = characters[0x0030] and characters[0x0030].width or parameters.space } -- figure - -- characters[0x2008] = { width = characters[0x002E] and characters[0x002E].width or parameters.space } -- period - -- - local id = font.define(tfmdata) - csnames[id] = specification.cs - tfmdata.properties.id = id - definers.register(tfmdata,id) -- to be sure, normally already done - tex.definefont(global,cs,id) - constructors.cleanuptable(tfmdata) - constructors.finalize(tfmdata) - if trace_defining then - report_defining("defining %a, id %a, target %a, features %a / %a, fallbacks %a / %a", - name,id,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks) - end - -- resolved (when designsize is used): - setsomefontsize((tfmdata.parameters.size or 655360) .. "sp") - lastfontid = id - end - if trace_defining then - report_defining("memory usage after: %s",statistics.memused()) - report_defining("stop stage two") - end - -- - texsetcount("global","lastfontid",lastfontid) - if not mathsize then - -- forget about it - elseif mathsize == 0 then - lastmathids[1] = lastfontid - else - lastmathids[mathsize] = lastfontid - end - -- - statistics.stoptiming(fonts) -end - -function definers.define(specification) - -- - local name = specification.name - if not name or name == "" then - return -1 - else - statistics.starttiming(fonts) - -- - -- following calls expect a few properties to be set: - -- - local lookup, name, sub, method, detail = getspecification(name or "") - -- - specification.name = (name ~= "" and name) or specification.name - -- - specification.lookup = specification.lookup or (lookup ~= "" and lookup) or "file" - specification.size = specification.size or 655260 - specification.sub = specification.sub or (sub ~= "" and sub) or "" - specification.method = specification.method or (method ~= "" and method) or "*" - specification.detail = specification.detail or (detail ~= "" and detail) or "" - -- - if type(specification.size) == "string" then - specification.size = tex.sp(specification.size) or 655260 - end - -- - specification.specification = "" -- not used - specification.resolved = "" - specification.forced = "" - specification.features = { } -- via detail, maybe some day - -- - -- we don't care about mathsize textsize goodies fallbacks - -- - local cs = specification.cs - if cs == "" then - cs = nil - specification.cs = nil - specification.global = false - elseif specification.global == nil then - specification.global = false - end - -- - local tfmdata = definers.read(specification,specification.size) - if not tfmdata then - return -1, nil - elseif type(tfmdata) == "number" then - if cs then - tex.definefont(specification.global,cs,tfmdata) - csnames[tfmdata] = cs - end - return tfmdata, fontdata[tfmdata] - else - local id = font.define(tfmdata) - tfmdata.properties.id = id - definers.register(tfmdata,id) - if cs then - tex.definefont(specification.global,cs,id) - csnames[id] = cs - end - constructors.cleanuptable(tfmdata) - constructors.finalize(tfmdata) - return id, tfmdata - end - statistics.stoptiming(fonts) - end -end - --- local id, cs = fonts.definers.internal { } --- local id, cs = fonts.definers.internal { number = 2 } --- local id, cs = fonts.definers.internal { name = "dejavusans" } - -local n = 0 - -function definers.internal(specification,cs) - specification = specification or { } - local name = specification.name - local size = specification.size and number.todimen(specification.size) or texdimen.bodyfontsize - local number = tonumber(specification.number) - local id = nil - if number then - id = number - elseif name and name ~= "" then - local cs = cs or specification.cs - if not cs then - n = n + 1 -- beware ... there can be many and they are often used once - -- cs = formatters["internal font %s"](n) - cs = "internal font " .. n - else - specification.cs = cs - end - id = definers.define { - name = name, - size = size, - cs = cs, - } - end - if not id then - id = currentfont() - end - return id, csnames[id] -end - -local enable_auto_r_scale = false - -experiments.register("fonts.autorscale", function(v) - enable_auto_r_scale = v -end) - --- Not ok, we can best use a database for this. The problem is that we --- have delayed definitions and so we never know what style is taken --- as start. - -local calculatescale = constructors.calculatescale - -function constructors.calculatescale(tfmdata,scaledpoints,relativeid) - local scaledpoints, delta = calculatescale(tfmdata,scaledpoints) - -- if enable_auto_r_scale and relativeid then -- for the moment this is rather context specific - -- local relativedata = fontdata[relativeid] - -- local rfmdata = relativedata and relativedata.unscaled and relativedata.unscaled - -- local id_x_height = rfmdata and rfmdata.parameters and rfmdata.parameters.x_height - -- local tf_x_height = tfmdata and tfmdata.parameters and tfmdata.parameters.x_height - -- if id_x_height and tf_x_height then - -- local rscale = id_x_height/tf_x_height - -- delta = rscale * delta - -- scaledpoints = rscale * scaledpoints - -- end - -- end - return scaledpoints, delta -end - --- We overload the (generic) resolver: - -local resolvers = definers.resolvers -local hashfeatures = constructors.hashfeatures - -function definers.resolve(specification) -- overload function in font-con.lua - if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash - local r = resolvers[specification.lookup] - if r then - r(specification) - end - end - if specification.forced == "" then - specification.forced = nil - else - specification.forced = specification.forced - end - -- goodies are a context specific thing and not always defined - -- as feature, so we need to make sure we add them here before - -- hashing because otherwise we get funny goodies applied - local goodies = specification.goodies - if goodies and goodies ~= "" then - -- this adapts the features table so it has best be a copy - local normal = specification.features.normal - if not normal then - specification.features.normal = { goodies = goodies } - elseif not normal.goodies then - local g = normal.goodies - if g and g ~= "" then - normal.goodies = formatters["%s,%s"](g,goodies) - else - normal.goodies = goodies - end - end - end - -- so far for goodie hacks - specification.hash = lower(specification.name .. ' @ ' .. hashfeatures(specification)) - if specification.sub and specification.sub ~= "" then - specification.hash = specification.sub .. ' @ ' .. specification.hash - end - return specification -end - - --- soon to be obsolete: - -local mappings = fonts.mappings - -local loaded = { -- prevent loading (happens in cont-sys files) - ["original-base.map" ] = true, - ["original-ams-base.map" ] = true, - ["original-ams-euler.map"] = true, - ["original-public-lm.map"] = true, -} - -function mappings.loadfile(name) - name = file.addsuffix(name,"map") - if not loaded[name] then - if trace_mapfiles then - report_mapfiles("loading map file %a",name) - end - pdf.mapfile(name) - loaded[name] = true - end -end - -local loaded = { -- prevent double loading -} - -function mappings.loadline(how,line) - if line then - how = how .. " " .. line - elseif how == "" then - how = "= " .. line - end - if not loaded[how] then - if trace_mapfiles then - report_mapfiles("processing map line %a",line) - end - pdf.mapline(how) - loaded[how] = true - end -end - -function mappings.reset() - pdf.mapfile("") -end - -mappings.reset() -- resets the default file - --- we need an 'do after the banner hook' - --- => commands - -local function nametoslot(name) - local t = type(name) - if t == "string" then - return resources[true].unicodes[name] - elseif t == "number" then - return n - end -end - -helpers.nametoslot = nametoslot - --- this will change ... - -function loggers.reportdefinedfonts() - if trace_usage then - local t, tn = { }, 0 - for id, data in sortedhash(fontdata) do - local properties = data.properties or { } - local parameters = data.parameters or { } - tn = tn + 1 - t[tn] = { - format("%03i",id or 0), - format("%09i",parameters.size or 0), - properties.type or "real", - properties.format or "unknown", - properties.name or "", - properties.psname or "", - properties.fullname or "", - } - report_status("%s: % t",properties.name,sortedkeys(data)) - end - formatcolumns(t," ") - report_status() - report_status("defined fonts:") - report_status() - for k=1,tn do - report_status(t[k]) - end - end -end - -luatex.registerstopactions(loggers.reportdefinedfonts) - -function loggers.reportusedfeatures() - -- numbers, setups, merged - if trace_usage then - local t, n = { }, #numbers - for i=1,n do - local name = numbers[i] - local setup = setups[name] - local n = setup.number - setup.number = nil -- we have no reason to show this - t[i] = { i, name, sequenced(setup,false,true) } -- simple mode - setup.number = n -- restore it (normally not needed as we're done anyway) - end - formatcolumns(t," ") - report_status() - report_status("defined featuresets:") - report_status() - for k=1,n do - report_status(t[k]) - end - end -end - -luatex.registerstopactions(loggers.reportusedfeatures) - -statistics.register("fonts load time", function() - return statistics.elapsedseconds(fonts) -end) - --- experimental mechanism for Mojca: --- --- fonts.definetypeface { --- name = "mainbodyfont-light", --- preset = "antykwapoltawskiego-light", --- } --- --- fonts.definetypeface { --- name = "mojcasfavourite", --- preset = "antykwapoltawskiego", --- normalweight = "light", --- boldweight = "bold", --- width = "condensed", --- } - -local Shapes = { - serif = "Serif", - sans = "Sans", - mono = "Mono", -} - -function fonts.definetypeface(name,t) - if type(name) == "table" then - -- {name=abc,k=v,...} - t = name - elseif t then - if type(t) == "string" then - -- "abc", "k=v,..." - t = settings_to_hash(name) - else - -- "abc", {k=v,...} - end - t.name = t.name or name - else - -- "name=abc,k=v,..." - t = settings_to_hash(name) - end - local p = t.preset and fonts.typefaces[t.preset] or { } - local name = t.name or "unknowntypeface" - local shortcut = t.shortcut or p.shortcut or "rm" - local size = t.size or p.size or "default" - local shape = t.shape or p.shape or "serif" - local fontname = t.fontname or p.fontname or "unknown" - local normalweight = t.normalweight or t.weight or p.normalweight or p.weight or "normal" - local boldweight = t.boldweight or t.weight or p.boldweight or p.weight or "normal" - local normalwidth = t.normalwidth or t.width or p.normalwidth or p.width or "normal" - local boldwidth = t.boldwidth or t.width or p.boldwidth or p.width or "normal" - Shape = Shapes[shape] or "Serif" - context.startfontclass { name } - context.definefontsynonym( { format("%s", Shape) }, { format("spec:%s-%s-regular-%s", fontname, normalweight, normalwidth) } ) - context.definefontsynonym( { format("%sBold", Shape) }, { format("spec:%s-%s-regular-%s", fontname, boldweight, boldwidth ) } ) - context.definefontsynonym( { format("%sBoldItalic", Shape) }, { format("spec:%s-%s-italic-%s", fontname, boldweight, boldwidth ) } ) - context.definefontsynonym( { format("%sItalic", Shape) }, { format("spec:%s-%s-italic-%s", fontname, normalweight, normalwidth) } ) - context.stopfontclass() - local settings = sequenced({ features= t.features },",") - context.dofastdefinetypeface(name, shortcut, shape, size, settings) -end - -function fonts.current() -- todo: also handle name - return fontdata[currentfont()] or fontdata[0] -end - -function fonts.currentid() - return currentfont() or 0 -end - --- interfaces - -function commands.fontchar(n) - n = nametoslot(n) - if n then - context.char(n) - end -end - -function commands.doifelsecurrentfonthasfeature(name) -- can be made faster with a supportedfeatures hash - local f = fontdata[currentfont()] - f = f and f.shared - f = f and f.rawdata - f = f and f.resources - f = f and f.features - commands.doifelse(f and (f.gpos[name] or f.gsub[name])) -end - -local p, f = 1, formatters["%0.1fpt"] -- normally this value is changed only once - -local stripper = lpeg.patterns.stripzeros - -function commands.nbfs(amount,precision) - if precision ~= p then - p = precision - f = formatters["%0." .. p .. "fpt"] - end - context(lpegmatch(stripper,f(amount/65536))) -end - -function commands.featureattribute(tag) - context(contextnumber(tag)) -end - -function commands.setfontfeature(tag) - texattribute[0] = contextnumber(tag) -end - -function commands.resetfontfeature() - texattribute[0] = 0 -end - --- function commands.addfs(tag) withset(tag, 1) end --- function commands.subfs(tag) withset(tag,-1) end --- function commands.addff(tag) withfnt(tag, 2) end -- on top of font features --- function commands.subff(tag) withfnt(tag,-2) end -- on top of font features - -function commands.cleanfontname (name) context(names.cleanname(name)) end - -function commands.fontlookupinitialize (name) names.lookup(name) end -function commands.fontlookupnoffound () context(names.noflookups()) end -function commands.fontlookupgetkeyofindex(key,index) context(names.getlookupkey(key,index)) end -function commands.fontlookupgetkey (key) context(names.getlookupkey(key)) end - --- this might move to a runtime module: - -function commands.showchardata(n) - local tfmdata = fontdata[currentfont()] - if tfmdata then - if type(n) == "string" then - n = utfbyte(n) - end - local chr = tfmdata.characters[n] - if chr then - report_status("%s @ %s => %U => %c => %s",tfmdata.properties.fullname,tfmdata.parameters.size,n,n,serialize(chr,false)) - end - end -end - -function commands.showfontparameters(tfmdata) - -- this will become more clever - local tfmdata = tfmdata or fontdata[currentfont()] - if tfmdata then - local parameters = tfmdata.parameters - local mathparameters = tfmdata.mathparameters - local properties = tfmdata.properties - local hasparameters = parameters and next(parameters) - local hasmathparameters = mathparameters and next(mathparameters) - if hasparameters then - report_status("%s @ %s => text parameters => %s",properties.fullname,parameters.size,serialize(parameters,false)) - end - if hasmathparameters then - report_status("%s @ %s => math parameters => %s",properties.fullname,parameters.size,serialize(mathparameters,false)) - end - if not hasparameters and not hasmathparameters then - report_status("%s @ %s => no text parameters and/or math parameters",properties.fullname,parameters.size) - end - end -end - --- for the moment here, this will become a chain of extras that is --- hooked into the ctx registration (or scaler or ...) - -local dimenfactors = number.dimenfactors - -function helpers.dimenfactor(unit,tfmdata) -- could be a method of a font instance - if unit == "ex" then - return (tfmdata and tfmdata.parameters.x_height) or 655360 - elseif unit == "em" then - return (tfmdata and tfmdata.parameters.em_width) or 655360 - else - local du = dimenfactors[unit] - return du and 1/du or tonumber(unit) or 1 - end -end - -local function digitwidth(font) -- max(quad/2,wd(0..9)) - local tfmdata = fontdata[font] - local parameters = tfmdata.parameters - local width = parameters.digitwidth - if not width then - width = round(parameters.quad/2) -- maybe tex.scale - local characters = tfmdata.characters - for i=48,57 do - local wd = round(characters[i].width) - if wd > width then - width = wd - end - end - parameters.digitwidth = width - end - return width -end - -helpers.getdigitwidth = digitwidth -helpers.setdigitwidth = digitwidth - --- - -function helpers.getparameters(tfmdata) - local p = { } - local m = p - local parameters = tfmdata.parameters - while true do - for k, v in next, parameters do - m[k] = v - end - parameters = getmetatable(parameters) - parameters = parameters and parameters.__index - if type(parameters) == "table" then - m = { } - p.metatable = m - else - break - end - end - return p -end - -if environment.initex then - - local function names(t) - local nt = #t - if nt > 0 then - local n = { } - for i=1,nt do - n[i] = t[i].name - end - return concat(n," ") - else - return "-" - end - end - - statistics.register("font processing", function() - local l = { } - for what, handler in table.sortedpairs(handlers) do - local features = handler.features - if features then - l[#l+1] = format("[%s (base initializers: %s) (base processors: %s) (base manipulators: %s) (node initializers: %s) (node processors: %s) (node manipulators: %s)]", - what, - names(features.initializers.base), - names(features.processors .base), - names(features.manipulators.base), - names(features.initializers.node), - names(features.processors .node), - names(features.manipulators.node) - ) - end - end - return concat(l, " | ") - end) - -end - --- redefinition - -local quads = hashes.quads -local xheights = hashes.xheights - -setmetatableindex(number.dimenfactors, function(t,k) - if k == "ex" then - return xheigths[currentfont()] - elseif k == "em" then - return quads[currentfont()] - elseif k == "%" then - return dimen.hsize/100 - else - -- error("wrong dimension: " .. (s or "?")) -- better a message - return false - end -end) - ---[[ldx-- -

Before a font is passed to we scale it. Here we also need -to scale virtual characters.

---ldx]]-- - -function constructors.checkvirtualids(tfmdata) - -- begin of experiment: we can use { "slot", 0, number } in virtual fonts - local fonts = tfmdata.fonts - local selfid = font.nextid() - if fonts and #fonts > 0 then - for i=1,#fonts do - if fonts[i][2] == 0 then - fonts[i][2] = selfid - end - end - else - -- tfmdata.fonts = { "id", selfid } -- conflicts with other next id's (vf math), too late anyway - end - -- end of experiment -end - --- function constructors.getvirtualid(tfmdata) --- -- since we don't know the id yet, we use 0 as signal --- local tf = tfmdata.fonts --- if not tf then --- local properties = tfmdata.properties --- if properties then --- properties.virtualized = true --- else --- tfmdata.properties = { virtualized = true } --- end --- tf = { } --- tfmdata.fonts = tf --- end --- local ntf = #tf + 1 --- tf[ntf] = { id = 0 } --- return ntf --- end --- --- function constructors.checkvirtualid(tfmdata, id) -- will go --- local properties = tfmdata.properties --- if tfmdata and tfmdata.type == "virtual" or (properties and properties.virtualized) then --- local vfonts = tfmdata.fonts --- if not vffonts or #vfonts == 0 then --- if properties then --- properties.virtualized = false --- end --- tfmdata.fonts = nil --- else --- for f=1,#vfonts do --- local fnt = vfonts[f] --- if fnt.id and fnt.id == 0 then --- fnt.id = id --- end --- end --- end --- end --- end - -function commands.setfontofid(id) - context.getvalue(csnames[id]) -end - --- more interfacing: - -commands.definefontfeature = presetcontext - -local cache = { } - -local hows = { - ["+"] = "add", - ["-"] = "subtract", - ["="] = "replace", -} - -function commands.feature(how,parent,name,font) - if not how then - if trace_features and texattribute[0] ~= 0 then - report_cummulative("font %!font:name!, reset",fontdata[font or true]) - end - texattribute[0] = 0 - elseif how == true then - local hash = "feature > " .. parent - local done = cache[hash] - if trace_features and done then - report_cummulative("font %!font:name!, revive %a : %!font:features!",fontdata[font or true],parent,setups[numbers[done]]) - end - texattribute[0] = done or 0 - else - local full = parent .. how .. name - local hash = "feature > " .. full - local done = cache[hash] - if not done then - local n = setups[full] - if n then - -- already defined - else - n = mergecontextfeatures(parent,name,how,full) - end - done = registercontextfeature(hash,full,how) - cache[hash] = done - if trace_features then - report_cummulative("font %!font:name!, %s %a : %!font:features!",fontdata[font or true],hows[how],full,setups[numbers[done]]) - end - end - texattribute[0] = done - end -end - -function commands.featurelist(...) - context(fonts.specifiers.contexttostring(...)) -end - -function commands.registerlanguagefeatures() - local specifications = languages.data.specifications - for i=1,#specifications do - local specification = specifications[i] - local language = specification.opentype - if language then - local script = specification.opentypescript or specification.script - if script then - local context = specification.context - if type(context) == "table" then - for i=1,#context do - definecontext(context[i], { language = language, script = script}) - end - elseif type(context) == "string" then - definecontext(context, { language = language, script = script}) - end - end - end - end -end - --- a fontkern plug: - -local copy_node = node.copy -local kern = nodes.pool.register(nodes.pool.kern()) - -node.set_attribute(kern,attributes.private('fontkern'),1) -- we can have several, attributes are shared - -nodes.injections.installnewkern(function(k) - local c = copy_node(kern) - c.kern = k - return c -end) - -directives.register("nodes.injections.fontkern", function(v) kern.subtype = v and 0 or 1 end) - --- here - -local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end) - -local otffeatures = fonts.constructors.newfeatures("otf") -local registerotffeature = otffeatures.register - -local analyzers = fonts.analyzers -local methods = analyzers.methods - -local unsetvalue = attributes.unsetvalue - -local traverse_by_id = node.traverse_id - -local a_color = attributes.private('color') -local a_colormodel = attributes.private('colormodel') -local a_state = attributes.private('state') -local m_color = attributes.list[a_color] or { } - -local glyph_code = nodes.nodecodes.glyph - -local states = analyzers.states - -local names = { - [states.init] = "font:1", - [states.medi] = "font:2", - [states.fina] = "font:3", - [states.isol] = "font:4", - [states.mark] = "font:5", - [states.rest] = "font:6", - [states.rphf] = "font:1", - [states.half] = "font:2", - [states.pref] = "font:3", - [states.blwf] = "font:4", - [states.pstf] = "font:5", -} - -local function markstates(head) - if head then - local model = head[a_colormodel] or 1 - for glyph in traverse_by_id(glyph_code,head) do - local a = glyph[a_state] - if a then - local name = names[a] - if name then - local color = m_color[name] - if color then - glyph[a_colormodel] = model - glyph[a_color] = color - end - end - end - end - end -end - -local function analyzeprocessor(head,font,attr) - local tfmdata = fontdata[font] - local script, language = otf.scriptandlanguage(tfmdata,attr) - local action = methods[script] - if not action then - return head, false - end - if type(action) == "function" then - local head, done = action(head,font,attr) - if done and trace_analyzing then - markstates(head) - end - return head, done - end - action = action[language] - if action then - local head, done = action(head,font,attr) - if done and trace_analyzing then - markstates(head) - end - return head, done - else - return head, false - end -end - -registerotffeature { -- adapts - name = "analyze", - processors = { - node = analyzeprocessor, - } -} - -function methods.nocolor(head,font,attr) - for n in traverse_by_id(glyph_code,head) do - if not font or n.font == font then - n[a_color] = unsetvalue - end - end - return head, true -end +if not modules then modules = { } end modules ['font-ctx'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- At some point I will clean up the code here so that at the tex end +-- the table interface is used. +-- +-- Todo: make a proper 'next id' mechanism (register etc) or wait till 'true' +-- in virtual fonts indices is implemented. + +local context, commands = context, commands + +local texcount, texsetcount = tex.count, tex.setcount +local format, gmatch, match, find, lower, gsub, byte = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub, string.byte +local concat, serialize, sort, fastcopy, mergedtable = table.concat, table.serialize, table.sort, table.fastcopy, table.merged +local sortedhash, sortedkeys, sequenced = table.sortedhash, table.sortedkeys, table.sequenced +local settings_to_hash, hash_to_string = utilities.parsers.settings_to_hash, utilities.parsers.hash_to_string +local formatcolumns = utilities.formatters.formatcolumns +local mergehashes = utilities.parsers.mergehashes +local formatters = string.formatters + +local tostring, next, type, rawget, tonumber = tostring, next, type, rawget, tonumber +local utfchar, utfbyte = utf.char, utf.byte +local round = math.round + +local P, S, C, Cc, Cf, Cg, Ct, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Ct, lpeg.match + +local trace_features = false trackers.register("fonts.features", function(v) trace_features = v end) +local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) +local trace_designsize = false trackers.register("fonts.designsize", function(v) trace_designsize = v end) +local trace_usage = false trackers.register("fonts.usage", function(v) trace_usage = v end) +local trace_mapfiles = false trackers.register("fonts.mapfiles", function(v) trace_mapfiles = v end) +local trace_automode = false trackers.register("fonts.automode", function(v) trace_automode = v end) + +local report_features = logs.reporter("fonts","features") +local report_cummulative = logs.reporter("fonts","cummulative") +local report_defining = logs.reporter("fonts","defining") +local report_status = logs.reporter("fonts","status") +local report_mapfiles = logs.reporter("fonts","mapfiles") + +local setmetatableindex = table.setmetatableindex + +local fonts = fonts +local handlers = fonts.handlers +local otf = handlers.otf -- brrr +local names = fonts.names +local definers = fonts.definers +local specifiers = fonts.specifiers +local constructors = fonts.constructors +local loggers = fonts.loggers +local fontgoodies = fonts.goodies +local helpers = fonts.helpers +local hashes = fonts.hashes +local currentfont = font.current +local texattribute = tex.attribute +local texdimen = tex.dimen + +local fontdata = hashes.identifiers +local characters = hashes.chardata +local descriptions = hashes.descriptions +local properties = hashes.properties +local resources = hashes.resources +local csnames = hashes.csnames +local marks = hashes.markdata +local lastmathids = hashes.lastmathids + +local designsizefilename = fontgoodies.designsizes.filename + +local otffeatures = otf.features +local otftables = otf.tables + +local registerotffeature = otffeatures.register +local baseprocessors = otffeatures.processors.base +local baseinitializers = otffeatures.initializers.base + +local sequencers = utilities.sequencers +local appendgroup = sequencers.appendgroup +local appendaction = sequencers.appendaction + +specifiers.contextsetups = specifiers.contextsetups or { } +specifiers.contextnumbers = specifiers.contextnumbers or { } +specifiers.contextmerged = specifiers.contextmerged or { } +specifiers.synonyms = specifiers.synonyms or { } + +local setups = specifiers.contextsetups +local numbers = specifiers.contextnumbers +local merged = specifiers.contextmerged +local synonyms = specifiers.synonyms + +storage.register("fonts/setups" , setups , "fonts.specifiers.contextsetups" ) +storage.register("fonts/numbers", numbers, "fonts.specifiers.contextnumbers") +storage.register("fonts/merged", merged, "fonts.specifiers.contextmerged") +storage.register("fonts/synonyms", synonyms, "fonts.specifiers.synonyms") + +-- inspect(setups) + +if environment.initex then + setmetatableindex(setups,function(t,k) + return type(k) == "number" and rawget(t,numbers[k]) or nil + end) +else + setmetatableindex(setups,function(t,k) + local v = type(k) == "number" and rawget(t,numbers[k]) + if v then + t[k] = v + return v + end + end) +end + +-- this will move elsewhere ... + +utilities.strings.formatters.add(formatters,"font:name", [["'"..file.basename(%s.properties.name).."'"]]) +utilities.strings.formatters.add(formatters,"font:features",[["'"..table.sequenced(%s," ",true).."'"]]) + +-- ... like font-sfm or so + +constructors.resolvevirtualtoo = true -- context specific (due to resolver) + +local limited = false + +directives.register("system.inputmode", function(v) + if not limited then + local i_limiter = io.i_limiter(v) + if i_limiter then + fontloader.open = i_limiter.protect(fontloader.open) + fontloader.info = i_limiter.protect(fontloader.info) + limited = true + end + end +end) + +function definers.resetnullfont() + -- resetting is needed because tikz misuses nullfont + local parameters = fonts.nulldata.parameters + -- + parameters.slant = 0 -- 1 + parameters.space = 0 -- 2 + parameters.space_stretch = 0 -- 3 + parameters.space_shrink = 0 -- 4 + parameters.x_height = 0 -- 5 + parameters.quad = 0 -- 6 + parameters.extra_space = 0 -- 7 + -- + constructors.enhanceparameters(parameters) -- official copies for us + -- + definers.resetnullfont = function() end +end + +commands.resetnullfont = definers.resetnullfont + +-- this cannot be a feature initializer as there is no auto namespace +-- so we never enter the loop then; we can store the defaults in the tma +-- file (features.gpos.mkmk = 1 etc) + +local needsnodemode = { + gpos_mark2mark = true, + gpos_mark2base = true, + gpos_mark2ligature = true, +} + +otftables.scripts.auto = "automatic fallback to latn when no dflt present" + +-- setmetatableindex(otffeatures.descriptions,otftables.features) + +local privatefeatures = { + tlig = true, + trep = true, + anum = true, +} + +local function checkedscript(tfmdata,resources,features) + local latn = false + local script = false + for g, list in next, resources.features do + for f, scripts in next, list do + if privatefeatures[f] then + -- skip + elseif scripts.dflt then + script = "dflt" + break + elseif scripts.latn then + latn = true + end + end + end + if not script then + script = latn and "latn" or "dflt" + end + if trace_automode then + report_defining("auto script mode, using script %a in font %!font:name!",script,tfmdata) + end + features.script = script + return script +end + +local function checkedmode(tfmdata,resources,features) + local sequences = resources.sequences + if sequences and #sequences > 0 then + local script = features.script or "dflt" + local language = features.language or "dflt" + for feature, value in next, features do + if value then + local found = false + for i=1,#sequences do + local sequence = sequences[i] + local features = sequence.features + if features then + local scripts = features[feature] + if scripts then + local languages = scripts[script] + if languages and languages[language] then + if found then + -- more than one lookup + if trace_automode then + report_defining("forcing mode %a, font %!font:name!, feature %a, script %a, language %a, %s", + "node",tfmdata,feature,script,language,"multiple lookups") + end + features.mode = "node" + return "node" + elseif needsnodemode[sequence.type] then + if trace_automode then + report_defining("forcing mode %a, font %!font:name!, feature %a, script %a, language %a, %s", + "node",tfmdata,feature,script,language,"no base support") + end + features.mode = "node" + return "node" + else + -- at least one lookup + found = true + end + end + end + end + end + end + end + end + features.mode = "base" -- new, or is this wrong? + return "base" +end + +definers.checkedscript = checkedscript +definers.checkedmode = checkedmode + +local function modechecker(tfmdata,features,mode) -- we cannot adapt features as they are shared! + if trace_features then + report_features("fontname %!font:name!, features %!font:features!",tfmdata,features) + end + local rawdata = tfmdata.shared.rawdata + local resources = rawdata and rawdata.resources + local script = features.script + if resources then + if script == "auto" then + script = checkedscript(tfmdata,resources,features) + end + if mode == "auto" then + mode = checkedmode(tfmdata,resources,features) + end + else + report_features("missing resources for font %!font:name!",tfmdata) + end + return mode +end + +registerotffeature { + -- we only set the checker and leave other settings of the mode + -- feature as they are + name = "mode", + modechecker = modechecker, +} + +-- -- default = true anyway +-- +-- local normalinitializer = constructors.getfeatureaction("otf","initializers","node","analyze") +-- +-- local function analyzeinitializer(tfmdata,value,features) -- attr +-- if value == "auto" and features then +-- value = features.init or features.medi or features.fina or features.isol or false +-- end +-- return normalinitializer(tfmdata,value,features) +-- end +-- +-- registerotffeature { +-- name = "analyze", +-- initializers = { +-- node = analyzeinitializer, +-- }, +-- } + +local beforecopyingcharacters = sequencers.new { + name = "beforecopyingcharacters", + arguments = "target,original", +} + +appendgroup(beforecopyingcharacters,"before") -- user +appendgroup(beforecopyingcharacters,"system") -- private +appendgroup(beforecopyingcharacters,"after" ) -- user + +function constructors.beforecopyingcharacters(original,target) + local runner = beforecopyingcharacters.runner + if runner then + runner(original,target) + end +end + +local aftercopyingcharacters = sequencers.new { + name = "aftercopyingcharacters", + arguments = "target,original", +} + +appendgroup(aftercopyingcharacters,"before") -- user +appendgroup(aftercopyingcharacters,"system") -- private +appendgroup(aftercopyingcharacters,"after" ) -- user + +function constructors.aftercopyingcharacters(original,target) + local runner = aftercopyingcharacters.runner + if runner then + runner(original,target) + end +end + +--[[ldx-- +

So far we haven't really dealt with features (or whatever we want +to pass along with the font definition. We distinguish the following +situations:

+situations:

+ + +name:xetex like specs +name@virtual font spec +name*context specification + +--ldx]]-- + +-- currently fonts are scaled while constructing the font, so we +-- have to do scaling of commands in the vf at that point using e.g. +-- "local scale = g.parameters.factor or 1" after all, we need to +-- work with copies anyway and scaling needs to be done at some point; +-- however, when virtual tricks are used as feature (makes more +-- sense) we scale the commands in fonts.constructors.scale (and set the +-- factor there) + +local loadfont = definers.loadfont + +function definers.loadfont(specification,size,id) -- overloads the one in font-def + local variants = definers.methods.variants + local virtualfeatures = specification.features.virtual + if virtualfeatures and virtualfeatures.preset then + local variant = variants[virtualfeatures.preset] + if variant then + return variant(specification,size,id) + end + else + local tfmdata = loadfont(specification,size,id) + -- constructors.checkvirtualid(tfmdata,id) + return tfmdata + end +end + +local function predefined(specification) + local variants = definers.methods.variants + local detail = specification.detail + if detail ~= "" and variants[detail] then + specification.features.virtual = { preset = detail } + end + return specification +end + +definers.registersplit("@", predefined,"virtual") + +local normalize_features = otffeatures.normalize -- should be general + +local function definecontext(name,t) -- can be shared + local number = setups[name] and setups[name].number or 0 -- hm, numbers[name] + if number == 0 then + number = #numbers + 1 + numbers[number] = name + end + t.number = number + setups[name] = t + return number, t +end + +local function presetcontext(name,parent,features) -- will go to con and shared + if features == "" and find(parent,"=") then + features = parent + parent = "" + end + if not features or features == "" then + features = { } + elseif type(features) == "string" then + features = normalize_features(settings_to_hash(features)) + else + features = normalize_features(features) + end + -- todo: synonyms, and not otf bound + if parent ~= "" then + for p in gmatch(parent,"[^, ]+") do + local s = setups[p] + if s then + for k,v in next, s do + if features[k] == nil then + features[k] = v + end + end + else + -- just ignore an undefined one .. i.e. we can refer to not yet defined + end + end + end + -- these are auto set so in order to prevent redundant definitions + -- we need to preset them (we hash the features and adding a default + -- setting during initialization may result in a different hash) + -- + -- for k,v in next, triggers do + -- if features[v] == nil then -- not false ! + -- local vv = default_features[v] + -- if vv then features[v] = vv end + -- end + -- end + -- + for feature,value in next, features do + if value == nil then -- not false ! + local default = default_features[feature] + if default ~= nil then + features[feature] = default + end + end + end + -- sparse 'm so that we get a better hash and less test (experimental + -- optimization) + local t = { } -- can we avoid t ? + for k,v in next, features do +-- if v then t[k] = v end + t[k] = v + end + -- needed for dynamic features + -- maybe number should always be renewed as we can redefine features + local number = setups[name] and setups[name].number or 0 -- hm, numbers[name] + if number == 0 then + number = #numbers + 1 + numbers[number] = name + end + t.number = number + setups[name] = t + return number, t +end + +local function contextnumber(name) -- will be replaced + local t = setups[name] + if not t then + return 0 + elseif t.auto then + local lng = tonumber(tex.language) + local tag = name .. ":" .. lng + local s = setups[tag] + if s then + return s.number or 0 + else + local script, language = languages.association(lng) + if t.script ~= script or t.language ~= language then + local s = fastcopy(t) + local n = #numbers + 1 + setups[tag] = s + numbers[n] = tag + s.number = n + s.script = script + s.language = language + return n + else + setups[tag] = t + return t.number or 0 + end + end + else + return t.number or 0 + end +end + +local function mergecontext(currentnumber,extraname,option) -- number string number (used in scrp-ini + local extra = setups[extraname] + if extra then + local current = setups[numbers[currentnumber]] + local mergedfeatures, mergedname = { }, nil + if option < 0 then + if current then + for k, v in next, current do + if not extra[k] then + mergedfeatures[k] = v + end + end + end + mergedname = currentnumber .. "-" .. extraname + else + if current then + for k, v in next, current do + mergedfeatures[k] = v + end + end + for k, v in next, extra do + mergedfeatures[k] = v + end + mergedname = currentnumber .. "+" .. extraname + end + local number = #numbers + 1 + mergedfeatures.number = number + numbers[number] = mergedname + merged[number] = option + setups[mergedname] = mergedfeatures + return number -- contextnumber(mergedname) + else + return currentnumber + end +end + +local extrasets = { } + +setmetatableindex(extrasets,function(t,k) + local v = mergehashes(setups,k) + t[k] = v + return v +end) + +local function mergecontextfeatures(currentname,extraname,how,mergedname) -- string string + local extra = setups[extraname] or extrasets[extraname] + if extra then + local current = setups[currentname] + local mergedfeatures = { } + if how == "+" then + if current then + for k, v in next, current do + mergedfeatures[k] = v + end + end + for k, v in next, extra do + mergedfeatures[k] = v + end + elseif how == "-" then + if current then + for k, v in next, current do + mergedfeatures[k] = v + end + end + for k, v in next, extra do + -- only boolean features + if v == true then + mergedfeatures[k] = false + end + end + else -- = + for k, v in next, extra do + mergedfeatures[k] = v + end + end + local number = #numbers + 1 + mergedfeatures.number = number + numbers[number] = mergedname + merged[number] = option + setups[mergedname] = mergedfeatures + return number + else + return numbers[currentname] or 0 + end +end + +local function registercontext(fontnumber,extraname,option) + local extra = setups[extraname] + if extra then + local mergedfeatures, mergedname = { }, nil + if option < 0 then + mergedname = fontnumber .. "-" .. extraname + else + mergedname = fontnumber .. "+" .. extraname + end + for k, v in next, extra do + mergedfeatures[k] = v + end + local number = #numbers + 1 + mergedfeatures.number = number + numbers[number] = mergedname + merged[number] = option + setups[mergedname] = mergedfeatures + return number -- contextnumber(mergedname) + else + return 0 + end +end + +local function registercontextfeature(mergedname,extraname,how) + local extra = setups[extraname] + if extra then + local mergedfeatures = { } + for k, v in next, extra do + mergedfeatures[k] = v + end + local number = #numbers + 1 + mergedfeatures.number = number + numbers[number] = mergedname + merged[number] = how == "=" and 1 or 2 -- 1=replace, 2=combine + setups[mergedname] = mergedfeatures + return number -- contextnumber(mergedname) + else + return 0 + end +end + +specifiers.presetcontext = presetcontext +specifiers.contextnumber = contextnumber +specifiers.mergecontext = mergecontext +specifiers.registercontext = registercontext +specifiers.definecontext = definecontext + +-- we extend the hasher: + +constructors.hashmethods.virtual = function(list) + local s = { } + local n = 0 + for k, v in next, list do + n = n + 1 + s[n] = k -- no checking on k + end + if n > 0 then + sort(s) + for i=1,n do + local k = s[i] + s[i] = k .. '=' .. tostring(list[k]) + end + return concat(s,"+") + end +end + +-- end of redefine + +-- local withcache = { } -- concat might be less efficient than nested tables +-- +-- local function withset(name,what) +-- local zero = texattribute[0] +-- local hash = zero .. "+" .. name .. "*" .. what +-- local done = withcache[hash] +-- if not done then +-- done = mergecontext(zero,name,what) +-- withcache[hash] = done +-- end +-- texattribute[0] = done +-- end +-- +-- local function withfnt(name,what,font) +-- local font = font or currentfont() +-- local hash = font .. "*" .. name .. "*" .. what +-- local done = withcache[hash] +-- if not done then +-- done = registercontext(font,name,what) +-- withcache[hash] = done +-- end +-- texattribute[0] = done +-- end + +function specifiers.showcontext(name) + return setups[name] or setups[numbers[name]] or setups[numbers[tonumber(name)]] or { } +end + +-- we need a copy as we will add (fontclass) goodies to the features and +-- that is bad for a shared table + +-- local function splitcontext(features) -- presetcontext creates dummy here +-- return fastcopy(setups[features] or (presetcontext(features,"","") and setups[features])) +-- end + +local function splitcontext(features) -- presetcontext creates dummy here + local sf = setups[features] + if not sf then + local n -- number + if find(features,",") then + -- let's assume a combination which is not yet defined but just specified (as in math) + n, sf = presetcontext(features,features,"") + else + -- we've run into an unknown feature and or a direct spec so we create a dummy + n, sf = presetcontext(features,"","") + end + end + return fastcopy(sf) +end + +-- local splitter = lpeg.splitat("=") +-- +-- local function splitcontext(features) +-- local setup = setups[features] +-- if setup then +-- return setup +-- elseif find(features,",") then +-- -- This is not that efficient but handy anyway for quick and dirty tests +-- -- beware, due to the way of caching setups you can get the wrong results +-- -- when components change. A safeguard is to nil the cache. +-- local merge = nil +-- for feature in gmatch(features,"[^, ]+") do +-- if find(feature,"=") then +-- local k, v = lpegmatch(splitter,feature) +-- if k and v then +-- if not merge then +-- merge = { k = v } +-- else +-- merge[k] = v +-- end +-- end +-- else +-- local s = setups[feature] +-- if not s then +-- -- skip +-- elseif not merge then +-- merge = s +-- else +-- for k, v in next, s do +-- merge[k] = v +-- end +-- end +-- end +-- end +-- setup = merge and presetcontext(features,"",merge) and setups[features] +-- -- actually we have to nil setups[features] in order to permit redefinitions +-- setups[features] = nil +-- end +-- return setup or (presetcontext(features,"","") and setups[features]) -- creates dummy +-- end + +specifiers.splitcontext = splitcontext + +function specifiers.contexttostring(name,kind,separator,yes,no,strict,omit) -- not used + return hash_to_string(mergedtable(handlers[kind].features.defaults or {},setups[name] or {}),separator,yes,no,strict,omit) +end + +local function starred(features) -- no longer fallbacks here + local detail = features.detail + if detail and detail ~= "" then + features.features.normal = splitcontext(detail) + else + features.features.normal = { } + end + return features +end + +definers.registersplit('*',starred,"featureset") + +-- sort of xetex mode, but without [] and / as we have file: and name: etc + +local space = P(" ") +local separator = S(";,") +local equal = P("=") +local spaces = space^0 +local sometext = C((1-equal-space-separator)^1) +local truevalue = P("+") * spaces * sometext * Cc(true) -- "yes" +local falsevalue = P("-") * spaces * sometext * Cc(false) -- "no" +local keyvalue = sometext * spaces * equal * spaces * sometext +local somevalue = sometext * spaces * Cc(true) -- "yes" +local pattern = Cf(Ct("") * (space + separator + Cg(keyvalue + falsevalue + truevalue + somevalue))^0, rawset) + +local function colonized(specification) + specification.features.normal = normalize_features(lpegmatch(pattern,specification.detail)) + return specification +end + +definers.registersplit(":",colonized,"direct") + +-- define (two steps) + +local space = P(" ") +local spaces = space^0 +local leftparent = (P"(") +local rightparent = (P")") +local value = C((leftparent * (1-rightparent)^0 * rightparent + (1-space))^1) +local dimension = C((space/"" + P(1))^1) +local rest = C(P(1)^0) +local scale_none = Cc(0) +local scale_at = P("at") * Cc(1) * spaces * dimension -- value +local scale_sa = P("sa") * Cc(2) * spaces * dimension -- value +local scale_mo = P("mo") * Cc(3) * spaces * dimension -- value +local scale_scaled = P("scaled") * Cc(4) * spaces * dimension -- value + +local sizepattern = spaces * (scale_at + scale_sa + scale_mo + scale_scaled + scale_none) +local splitpattern = spaces * value * spaces * rest + +function helpers.splitfontpattern(str) + local name, size = lpegmatch(splitpattern,str) + local kind, size = lpegmatch(sizepattern,size) + return name, kind, size +end + +function helpers.fontpatternhassize(str) + local name, size = lpegmatch(splitpattern,str) + local kind, size = lpegmatch(sizepattern,size) + return size or false +end + +local specification -- still needed as local ? + +local getspecification = definers.getspecification + +-- we can make helper macros which saves parsing (but normaly not +-- that many calls, e.g. in mk a couple of 100 and in metafun 3500) + +local setdefaultfontname = context.fntsetdefname +local setsomefontname = context.fntsetsomename +local setemptyfontsize = context.fntsetnopsize +local setsomefontsize = context.fntsetsomesize +local letvaluerelax = context.letvaluerelax + +function commands.definefont_one(str) + statistics.starttiming(fonts) + if trace_defining then + report_defining("memory usage before: %s",statistics.memused()) + report_defining("start stage one: %s",str) + end + local fullname, size = lpegmatch(splitpattern,str) + local lookup, name, sub, method, detail = getspecification(fullname) + if not name then + report_defining("strange definition %a",str) + setdefaultfontname() + elseif name == "unknown" then + setdefaultfontname() + else + setsomefontname(name) + end + -- we can also use a count for the size + if size and size ~= "" then + local mode, size = lpegmatch(sizepattern,size) + if size and mode then + texcount.scaledfontmode = mode + setsomefontsize(size) + else + texcount.scaledfontmode = 0 + setemptyfontsize() + end + elseif true then + -- so we don't need to check in tex + texcount.scaledfontmode = 2 + setemptyfontsize() + else + texcount.scaledfontmode = 0 + setemptyfontsize() + end + specification = definers.makespecification(str,lookup,name,sub,method,detail,size) + if trace_defining then + report_defining("stop stage one") + end +end + +local n = 0 + +-- we can also move rscale to here (more consistent) +-- the argument list will become a table + +local function nice_cs(cs) + return (gsub(cs,".->", "")) +end + +function commands.definefont_two(global,cs,str,size,inheritancemode,classfeatures,fontfeatures,classfallbacks,fontfallbacks, + mathsize,textsize,relativeid,classgoodies,goodies,classdesignsize,fontdesignsize) + if trace_defining then + report_defining("start stage two: %s (size %s)",str,size) + end + -- name is now resolved and size is scaled cf sa/mo + local lookup, name, sub, method, detail = getspecification(str or "") + -- new (todo: inheritancemode) + local designsize = fontdesignsize ~= "" and fontdesignsize or classdesignsize or "" + local designname = designsizefilename(name,designsize,size) + if designname and designname ~= "" then + if trace_defining or trace_designsize then + report_defining("remapping name %a, specification %a, size %a, designsize %a",name,designsize,size,designname) + end + -- we don't catch detail here + local o_lookup, o_name, o_sub, o_method, o_detail = getspecification(designname) + if o_lookup and o_lookup ~= "" then lookup = o_lookup end + if o_method and o_method ~= "" then method = o_method end + if o_detail and o_detail ~= "" then detail = o_detail end + name = o_name + sub = o_sub + end + -- so far + -- some settings can have been overloaded + if lookup and lookup ~= "" then + specification.lookup = lookup + end + if relativeid and relativeid ~= "" then -- experimental hook + local id = tonumber(relativeid) or 0 + specification.relativeid = id > 0 and id + end + specification.name = name + specification.size = size + specification.sub = (sub and sub ~= "" and sub) or specification.sub + specification.mathsize = mathsize + specification.textsize = textsize + specification.goodies = goodies + specification.cs = cs + specification.global = global + if detail and detail ~= "" then + specification.method = method or "*" + specification.detail = detail + elseif specification.detail and specification.detail ~= "" then + -- already set + elseif inheritancemode == 0 then + -- nothing + elseif inheritancemode == 1 then + -- fontonly + if fontfeatures and fontfeatures ~= "" then + specification.method = "*" + specification.detail = fontfeatures + end + if fontfallbacks and fontfallbacks ~= "" then + specification.fallbacks = fontfallbacks + end + elseif inheritancemode == 2 then + -- classonly + if classfeatures and classfeatures ~= "" then + specification.method = "*" + specification.detail = classfeatures + end + if classfallbacks and classfallbacks ~= "" then + specification.fallbacks = classfallbacks + end + elseif inheritancemode == 3 then + -- fontfirst + if fontfeatures and fontfeatures ~= "" then + specification.method = "*" + specification.detail = fontfeatures + elseif classfeatures and classfeatures ~= "" then + specification.method = "*" + specification.detail = classfeatures + end + if fontfallbacks and fontfallbacks ~= "" then + specification.fallbacks = fontfallbacks + elseif classfallbacks and classfallbacks ~= "" then + specification.fallbacks = classfallbacks + end + elseif inheritancemode == 4 then + -- classfirst + if classfeatures and classfeatures ~= "" then + specification.method = "*" + specification.detail = classfeatures + elseif fontfeatures and fontfeatures ~= "" then + specification.method = "*" + specification.detail = fontfeatures + end + if classfallbacks and classfallbacks ~= "" then + specification.fallbacks = classfallbacks + elseif fontfallbacks and fontfallbacks ~= "" then + specification.fallbacks = fontfallbacks + end + end + local tfmdata = definers.read(specification,size) -- id not yet known (size in spec?) + -- + local lastfontid = 0 + if not tfmdata then + report_defining("unable to define %a as %a",name,nice_cs(cs)) + lastfontid = -1 + letvaluerelax(cs) -- otherwise the current definition takes the previous one + elseif type(tfmdata) == "number" then + if trace_defining then + report_defining("reusing %s, id %a, target %a, features %a / %a, fallbacks %a / %a, goodies %a / %a, designsize %a / %a", + name,tfmdata,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks,classgoodies,goodies,classdesignsize,fontdesignsize) + end + csnames[tfmdata] = specification.cs + tex.definefont(global,cs,tfmdata) + -- resolved (when designsize is used): + setsomefontsize((fontdata[tfmdata].parameters.size or 0) .. "sp") + lastfontid = tfmdata + else + -- setting the extra characters will move elsewhere + local characters = tfmdata.characters + local parameters = tfmdata.parameters + -- we use char0 as signal; cf the spec pdf can handle this (no char in slot) + characters[0] = nil + -- characters[0x00A0] = { width = parameters.space } + -- characters[0x2007] = { width = characters[0x0030] and characters[0x0030].width or parameters.space } -- figure + -- characters[0x2008] = { width = characters[0x002E] and characters[0x002E].width or parameters.space } -- period + -- + local id = font.define(tfmdata) + csnames[id] = specification.cs + tfmdata.properties.id = id + definers.register(tfmdata,id) -- to be sure, normally already done + tex.definefont(global,cs,id) + constructors.cleanuptable(tfmdata) + constructors.finalize(tfmdata) + if trace_defining then + report_defining("defining %a, id %a, target %a, features %a / %a, fallbacks %a / %a", + name,id,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks) + end + -- resolved (when designsize is used): + setsomefontsize((tfmdata.parameters.size or 655360) .. "sp") + lastfontid = id + end + if trace_defining then + report_defining("memory usage after: %s",statistics.memused()) + report_defining("stop stage two") + end + -- + texsetcount("global","lastfontid",lastfontid) + if not mathsize then + -- forget about it + elseif mathsize == 0 then + lastmathids[1] = lastfontid + else + lastmathids[mathsize] = lastfontid + end + -- + statistics.stoptiming(fonts) +end + +function definers.define(specification) + -- + local name = specification.name + if not name or name == "" then + return -1 + else + statistics.starttiming(fonts) + -- + -- following calls expect a few properties to be set: + -- + local lookup, name, sub, method, detail = getspecification(name or "") + -- + specification.name = (name ~= "" and name) or specification.name + -- + specification.lookup = specification.lookup or (lookup ~= "" and lookup) or "file" + specification.size = specification.size or 655260 + specification.sub = specification.sub or (sub ~= "" and sub) or "" + specification.method = specification.method or (method ~= "" and method) or "*" + specification.detail = specification.detail or (detail ~= "" and detail) or "" + -- + if type(specification.size) == "string" then + specification.size = tex.sp(specification.size) or 655260 + end + -- + specification.specification = "" -- not used + specification.resolved = "" + specification.forced = "" + specification.features = { } -- via detail, maybe some day + -- + -- we don't care about mathsize textsize goodies fallbacks + -- + local cs = specification.cs + if cs == "" then + cs = nil + specification.cs = nil + specification.global = false + elseif specification.global == nil then + specification.global = false + end + -- + local tfmdata = definers.read(specification,specification.size) + if not tfmdata then + return -1, nil + elseif type(tfmdata) == "number" then + if cs then + tex.definefont(specification.global,cs,tfmdata) + csnames[tfmdata] = cs + end + return tfmdata, fontdata[tfmdata] + else + local id = font.define(tfmdata) + tfmdata.properties.id = id + definers.register(tfmdata,id) + if cs then + tex.definefont(specification.global,cs,id) + csnames[id] = cs + end + constructors.cleanuptable(tfmdata) + constructors.finalize(tfmdata) + return id, tfmdata + end + statistics.stoptiming(fonts) + end +end + +-- local id, cs = fonts.definers.internal { } +-- local id, cs = fonts.definers.internal { number = 2 } +-- local id, cs = fonts.definers.internal { name = "dejavusans" } + +local n = 0 + +function definers.internal(specification,cs) + specification = specification or { } + local name = specification.name + local size = specification.size and number.todimen(specification.size) or texdimen.bodyfontsize + local number = tonumber(specification.number) + local id = nil + if number then + id = number + elseif name and name ~= "" then + local cs = cs or specification.cs + if not cs then + n = n + 1 -- beware ... there can be many and they are often used once + -- cs = formatters["internal font %s"](n) + cs = "internal font " .. n + else + specification.cs = cs + end + id = definers.define { + name = name, + size = size, + cs = cs, + } + end + if not id then + id = currentfont() + end + return id, csnames[id] +end + +local enable_auto_r_scale = false + +experiments.register("fonts.autorscale", function(v) + enable_auto_r_scale = v +end) + +-- Not ok, we can best use a database for this. The problem is that we +-- have delayed definitions and so we never know what style is taken +-- as start. + +local calculatescale = constructors.calculatescale + +function constructors.calculatescale(tfmdata,scaledpoints,relativeid) + local scaledpoints, delta = calculatescale(tfmdata,scaledpoints) + -- if enable_auto_r_scale and relativeid then -- for the moment this is rather context specific + -- local relativedata = fontdata[relativeid] + -- local rfmdata = relativedata and relativedata.unscaled and relativedata.unscaled + -- local id_x_height = rfmdata and rfmdata.parameters and rfmdata.parameters.x_height + -- local tf_x_height = tfmdata and tfmdata.parameters and tfmdata.parameters.x_height + -- if id_x_height and tf_x_height then + -- local rscale = id_x_height/tf_x_height + -- delta = rscale * delta + -- scaledpoints = rscale * scaledpoints + -- end + -- end + return scaledpoints, delta +end + +-- We overload the (generic) resolver: + +local resolvers = definers.resolvers +local hashfeatures = constructors.hashfeatures + +function definers.resolve(specification) -- overload function in font-con.lua + if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash + local r = resolvers[specification.lookup] + if r then + r(specification) + end + end + if specification.forced == "" then + specification.forced = nil + else + specification.forced = specification.forced + end + -- goodies are a context specific thing and not always defined + -- as feature, so we need to make sure we add them here before + -- hashing because otherwise we get funny goodies applied + local goodies = specification.goodies + if goodies and goodies ~= "" then + -- this adapts the features table so it has best be a copy + local normal = specification.features.normal + if not normal then + specification.features.normal = { goodies = goodies } + elseif not normal.goodies then + local g = normal.goodies + if g and g ~= "" then + normal.goodies = formatters["%s,%s"](g,goodies) + else + normal.goodies = goodies + end + end + end + -- so far for goodie hacks + specification.hash = lower(specification.name .. ' @ ' .. hashfeatures(specification)) + if specification.sub and specification.sub ~= "" then + specification.hash = specification.sub .. ' @ ' .. specification.hash + end + return specification +end + + +-- soon to be obsolete: + +local mappings = fonts.mappings + +local loaded = { -- prevent loading (happens in cont-sys files) + ["original-base.map" ] = true, + ["original-ams-base.map" ] = true, + ["original-ams-euler.map"] = true, + ["original-public-lm.map"] = true, +} + +function mappings.loadfile(name) + name = file.addsuffix(name,"map") + if not loaded[name] then + if trace_mapfiles then + report_mapfiles("loading map file %a",name) + end + pdf.mapfile(name) + loaded[name] = true + end +end + +local loaded = { -- prevent double loading +} + +function mappings.loadline(how,line) + if line then + how = how .. " " .. line + elseif how == "" then + how = "= " .. line + end + if not loaded[how] then + if trace_mapfiles then + report_mapfiles("processing map line %a",line) + end + pdf.mapline(how) + loaded[how] = true + end +end + +function mappings.reset() + pdf.mapfile("") +end + +mappings.reset() -- resets the default file + +-- we need an 'do after the banner hook' + +-- => commands + +local function nametoslot(name) + local t = type(name) + if t == "string" then + return resources[true].unicodes[name] + elseif t == "number" then + return n + end +end + +helpers.nametoslot = nametoslot + +-- this will change ... + +function loggers.reportdefinedfonts() + if trace_usage then + local t, tn = { }, 0 + for id, data in sortedhash(fontdata) do + local properties = data.properties or { } + local parameters = data.parameters or { } + tn = tn + 1 + t[tn] = { + format("%03i",id or 0), + format("%09i",parameters.size or 0), + properties.type or "real", + properties.format or "unknown", + properties.name or "", + properties.psname or "", + properties.fullname or "", + } + report_status("%s: % t",properties.name,sortedkeys(data)) + end + formatcolumns(t," ") + report_status() + report_status("defined fonts:") + report_status() + for k=1,tn do + report_status(t[k]) + end + end +end + +luatex.registerstopactions(loggers.reportdefinedfonts) + +function loggers.reportusedfeatures() + -- numbers, setups, merged + if trace_usage then + local t, n = { }, #numbers + for i=1,n do + local name = numbers[i] + local setup = setups[name] + local n = setup.number + setup.number = nil -- we have no reason to show this + t[i] = { i, name, sequenced(setup,false,true) } -- simple mode + setup.number = n -- restore it (normally not needed as we're done anyway) + end + formatcolumns(t," ") + report_status() + report_status("defined featuresets:") + report_status() + for k=1,n do + report_status(t[k]) + end + end +end + +luatex.registerstopactions(loggers.reportusedfeatures) + +statistics.register("fonts load time", function() + return statistics.elapsedseconds(fonts) +end) + +-- experimental mechanism for Mojca: +-- +-- fonts.definetypeface { +-- name = "mainbodyfont-light", +-- preset = "antykwapoltawskiego-light", +-- } +-- +-- fonts.definetypeface { +-- name = "mojcasfavourite", +-- preset = "antykwapoltawskiego", +-- normalweight = "light", +-- boldweight = "bold", +-- width = "condensed", +-- } + +local Shapes = { + serif = "Serif", + sans = "Sans", + mono = "Mono", +} + +function fonts.definetypeface(name,t) + if type(name) == "table" then + -- {name=abc,k=v,...} + t = name + elseif t then + if type(t) == "string" then + -- "abc", "k=v,..." + t = settings_to_hash(name) + else + -- "abc", {k=v,...} + end + t.name = t.name or name + else + -- "name=abc,k=v,..." + t = settings_to_hash(name) + end + local p = t.preset and fonts.typefaces[t.preset] or { } + local name = t.name or "unknowntypeface" + local shortcut = t.shortcut or p.shortcut or "rm" + local size = t.size or p.size or "default" + local shape = t.shape or p.shape or "serif" + local fontname = t.fontname or p.fontname or "unknown" + local normalweight = t.normalweight or t.weight or p.normalweight or p.weight or "normal" + local boldweight = t.boldweight or t.weight or p.boldweight or p.weight or "normal" + local normalwidth = t.normalwidth or t.width or p.normalwidth or p.width or "normal" + local boldwidth = t.boldwidth or t.width or p.boldwidth or p.width or "normal" + Shape = Shapes[shape] or "Serif" + context.startfontclass { name } + context.definefontsynonym( { format("%s", Shape) }, { format("spec:%s-%s-regular-%s", fontname, normalweight, normalwidth) } ) + context.definefontsynonym( { format("%sBold", Shape) }, { format("spec:%s-%s-regular-%s", fontname, boldweight, boldwidth ) } ) + context.definefontsynonym( { format("%sBoldItalic", Shape) }, { format("spec:%s-%s-italic-%s", fontname, boldweight, boldwidth ) } ) + context.definefontsynonym( { format("%sItalic", Shape) }, { format("spec:%s-%s-italic-%s", fontname, normalweight, normalwidth) } ) + context.stopfontclass() + local settings = sequenced({ features= t.features },",") + context.dofastdefinetypeface(name, shortcut, shape, size, settings) +end + +function fonts.current() -- todo: also handle name + return fontdata[currentfont()] or fontdata[0] +end + +function fonts.currentid() + return currentfont() or 0 +end + +-- interfaces + +function commands.fontchar(n) + n = nametoslot(n) + if n then + context.char(n) + end +end + +function commands.doifelsecurrentfonthasfeature(name) -- can be made faster with a supportedfeatures hash + local f = fontdata[currentfont()] + f = f and f.shared + f = f and f.rawdata + f = f and f.resources + f = f and f.features + commands.doifelse(f and (f.gpos[name] or f.gsub[name])) +end + +local p, f = 1, formatters["%0.1fpt"] -- normally this value is changed only once + +local stripper = lpeg.patterns.stripzeros + +function commands.nbfs(amount,precision) + if precision ~= p then + p = precision + f = formatters["%0." .. p .. "fpt"] + end + context(lpegmatch(stripper,f(amount/65536))) +end + +function commands.featureattribute(tag) + context(contextnumber(tag)) +end + +function commands.setfontfeature(tag) + texattribute[0] = contextnumber(tag) +end + +function commands.resetfontfeature() + texattribute[0] = 0 +end + +-- function commands.addfs(tag) withset(tag, 1) end +-- function commands.subfs(tag) withset(tag,-1) end +-- function commands.addff(tag) withfnt(tag, 2) end -- on top of font features +-- function commands.subff(tag) withfnt(tag,-2) end -- on top of font features + +function commands.cleanfontname (name) context(names.cleanname(name)) end + +function commands.fontlookupinitialize (name) names.lookup(name) end +function commands.fontlookupnoffound () context(names.noflookups()) end +function commands.fontlookupgetkeyofindex(key,index) context(names.getlookupkey(key,index)) end +function commands.fontlookupgetkey (key) context(names.getlookupkey(key)) end + +-- this might move to a runtime module: + +function commands.showchardata(n) + local tfmdata = fontdata[currentfont()] + if tfmdata then + if type(n) == "string" then + n = utfbyte(n) + end + local chr = tfmdata.characters[n] + if chr then + report_status("%s @ %s => %U => %c => %s",tfmdata.properties.fullname,tfmdata.parameters.size,n,n,serialize(chr,false)) + end + end +end + +function commands.showfontparameters(tfmdata) + -- this will become more clever + local tfmdata = tfmdata or fontdata[currentfont()] + if tfmdata then + local parameters = tfmdata.parameters + local mathparameters = tfmdata.mathparameters + local properties = tfmdata.properties + local hasparameters = parameters and next(parameters) + local hasmathparameters = mathparameters and next(mathparameters) + if hasparameters then + report_status("%s @ %s => text parameters => %s",properties.fullname,parameters.size,serialize(parameters,false)) + end + if hasmathparameters then + report_status("%s @ %s => math parameters => %s",properties.fullname,parameters.size,serialize(mathparameters,false)) + end + if not hasparameters and not hasmathparameters then + report_status("%s @ %s => no text parameters and/or math parameters",properties.fullname,parameters.size) + end + end +end + +-- for the moment here, this will become a chain of extras that is +-- hooked into the ctx registration (or scaler or ...) + +local dimenfactors = number.dimenfactors + +function helpers.dimenfactor(unit,tfmdata) -- could be a method of a font instance + if unit == "ex" then + return (tfmdata and tfmdata.parameters.x_height) or 655360 + elseif unit == "em" then + return (tfmdata and tfmdata.parameters.em_width) or 655360 + else + local du = dimenfactors[unit] + return du and 1/du or tonumber(unit) or 1 + end +end + +local function digitwidth(font) -- max(quad/2,wd(0..9)) + local tfmdata = fontdata[font] + local parameters = tfmdata.parameters + local width = parameters.digitwidth + if not width then + width = round(parameters.quad/2) -- maybe tex.scale + local characters = tfmdata.characters + for i=48,57 do + local wd = round(characters[i].width) + if wd > width then + width = wd + end + end + parameters.digitwidth = width + end + return width +end + +helpers.getdigitwidth = digitwidth +helpers.setdigitwidth = digitwidth + +-- + +function helpers.getparameters(tfmdata) + local p = { } + local m = p + local parameters = tfmdata.parameters + while true do + for k, v in next, parameters do + m[k] = v + end + parameters = getmetatable(parameters) + parameters = parameters and parameters.__index + if type(parameters) == "table" then + m = { } + p.metatable = m + else + break + end + end + return p +end + +if environment.initex then + + local function names(t) + local nt = #t + if nt > 0 then + local n = { } + for i=1,nt do + n[i] = t[i].name + end + return concat(n," ") + else + return "-" + end + end + + statistics.register("font processing", function() + local l = { } + for what, handler in table.sortedpairs(handlers) do + local features = handler.features + if features then + l[#l+1] = format("[%s (base initializers: %s) (base processors: %s) (base manipulators: %s) (node initializers: %s) (node processors: %s) (node manipulators: %s)]", + what, + names(features.initializers.base), + names(features.processors .base), + names(features.manipulators.base), + names(features.initializers.node), + names(features.processors .node), + names(features.manipulators.node) + ) + end + end + return concat(l, " | ") + end) + +end + +-- redefinition + +local quads = hashes.quads +local xheights = hashes.xheights + +setmetatableindex(number.dimenfactors, function(t,k) + if k == "ex" then + return xheigths[currentfont()] + elseif k == "em" then + return quads[currentfont()] + elseif k == "%" then + return dimen.hsize/100 + else + -- error("wrong dimension: " .. (s or "?")) -- better a message + return false + end +end) + +--[[ldx-- +

Before a font is passed to we scale it. Here we also need +to scale virtual characters.

+--ldx]]-- + +function constructors.checkvirtualids(tfmdata) + -- begin of experiment: we can use { "slot", 0, number } in virtual fonts + local fonts = tfmdata.fonts + local selfid = font.nextid() + if fonts and #fonts > 0 then + for i=1,#fonts do + if fonts[i][2] == 0 then + fonts[i][2] = selfid + end + end + else + -- tfmdata.fonts = { "id", selfid } -- conflicts with other next id's (vf math), too late anyway + end + -- end of experiment +end + +-- function constructors.getvirtualid(tfmdata) +-- -- since we don't know the id yet, we use 0 as signal +-- local tf = tfmdata.fonts +-- if not tf then +-- local properties = tfmdata.properties +-- if properties then +-- properties.virtualized = true +-- else +-- tfmdata.properties = { virtualized = true } +-- end +-- tf = { } +-- tfmdata.fonts = tf +-- end +-- local ntf = #tf + 1 +-- tf[ntf] = { id = 0 } +-- return ntf +-- end +-- +-- function constructors.checkvirtualid(tfmdata, id) -- will go +-- local properties = tfmdata.properties +-- if tfmdata and tfmdata.type == "virtual" or (properties and properties.virtualized) then +-- local vfonts = tfmdata.fonts +-- if not vffonts or #vfonts == 0 then +-- if properties then +-- properties.virtualized = false +-- end +-- tfmdata.fonts = nil +-- else +-- for f=1,#vfonts do +-- local fnt = vfonts[f] +-- if fnt.id and fnt.id == 0 then +-- fnt.id = id +-- end +-- end +-- end +-- end +-- end + +function commands.setfontofid(id) + context.getvalue(csnames[id]) +end + +-- more interfacing: + +commands.definefontfeature = presetcontext + +local cache = { } + +local hows = { + ["+"] = "add", + ["-"] = "subtract", + ["="] = "replace", +} + +function commands.feature(how,parent,name,font) + if not how then + if trace_features and texattribute[0] ~= 0 then + report_cummulative("font %!font:name!, reset",fontdata[font or true]) + end + texattribute[0] = 0 + elseif how == true then + local hash = "feature > " .. parent + local done = cache[hash] + if trace_features and done then + report_cummulative("font %!font:name!, revive %a : %!font:features!",fontdata[font or true],parent,setups[numbers[done]]) + end + texattribute[0] = done or 0 + else + local full = parent .. how .. name + local hash = "feature > " .. full + local done = cache[hash] + if not done then + local n = setups[full] + if n then + -- already defined + else + n = mergecontextfeatures(parent,name,how,full) + end + done = registercontextfeature(hash,full,how) + cache[hash] = done + if trace_features then + report_cummulative("font %!font:name!, %s %a : %!font:features!",fontdata[font or true],hows[how],full,setups[numbers[done]]) + end + end + texattribute[0] = done + end +end + +function commands.featurelist(...) + context(fonts.specifiers.contexttostring(...)) +end + +function commands.registerlanguagefeatures() + local specifications = languages.data.specifications + for i=1,#specifications do + local specification = specifications[i] + local language = specification.opentype + if language then + local script = specification.opentypescript or specification.script + if script then + local context = specification.context + if type(context) == "table" then + for i=1,#context do + definecontext(context[i], { language = language, script = script}) + end + elseif type(context) == "string" then + definecontext(context, { language = language, script = script}) + end + end + end + end +end + +-- a fontkern plug: + +local copy_node = node.copy +local kern = nodes.pool.register(nodes.pool.kern()) + +node.set_attribute(kern,attributes.private('fontkern'),1) -- we can have several, attributes are shared + +nodes.injections.installnewkern(function(k) + local c = copy_node(kern) + c.kern = k + return c +end) + +directives.register("nodes.injections.fontkern", function(v) kern.subtype = v and 0 or 1 end) + +-- here + +local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end) + +local otffeatures = fonts.constructors.newfeatures("otf") +local registerotffeature = otffeatures.register + +local analyzers = fonts.analyzers +local methods = analyzers.methods + +local unsetvalue = attributes.unsetvalue + +local traverse_by_id = node.traverse_id + +local a_color = attributes.private('color') +local a_colormodel = attributes.private('colormodel') +local a_state = attributes.private('state') +local m_color = attributes.list[a_color] or { } + +local glyph_code = nodes.nodecodes.glyph + +local states = analyzers.states + +local names = { + [states.init] = "font:1", + [states.medi] = "font:2", + [states.fina] = "font:3", + [states.isol] = "font:4", + [states.mark] = "font:5", + [states.rest] = "font:6", + [states.rphf] = "font:1", + [states.half] = "font:2", + [states.pref] = "font:3", + [states.blwf] = "font:4", + [states.pstf] = "font:5", +} + +local function markstates(head) + if head then + local model = head[a_colormodel] or 1 + for glyph in traverse_by_id(glyph_code,head) do + local a = glyph[a_state] + if a then + local name = names[a] + if name then + local color = m_color[name] + if color then + glyph[a_colormodel] = model + glyph[a_color] = color + end + end + end + end + end +end + +local function analyzeprocessor(head,font,attr) + local tfmdata = fontdata[font] + local script, language = otf.scriptandlanguage(tfmdata,attr) + local action = methods[script] + if not action then + return head, false + end + if type(action) == "function" then + local head, done = action(head,font,attr) + if done and trace_analyzing then + markstates(head) + end + return head, done + end + action = action[language] + if action then + local head, done = action(head,font,attr) + if done and trace_analyzing then + markstates(head) + end + return head, done + else + return head, false + end +end + +registerotffeature { -- adapts + name = "analyze", + processors = { + node = analyzeprocessor, + } +} + +function methods.nocolor(head,font,attr) + for n in traverse_by_id(glyph_code,head) do + if not font or n.font == font then + n[a_color] = unsetvalue + end + end + return head, true +end diff --git a/tex/context/base/font-def.lua b/tex/context/base/font-def.lua index bee02e8dc..7e01c5620 100644 --- a/tex/context/base/font-def.lua +++ b/tex/context/base/font-def.lua @@ -1,449 +1,449 @@ -if not modules then modules = { } end modules ['font-def'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- We can overload some of the definers.functions so we don't local them. - -local format, gmatch, match, find, lower, gsub = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub -local tostring, next = tostring, next -local lpegmatch = lpeg.match - -local allocate = utilities.storage.allocate - -local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end) -local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end) - -trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading") -trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*") - -local report_defining = logs.reporter("fonts","defining") - ---[[ldx-- -

Here we deal with defining fonts. We do so by intercepting the -default loader that only handles .

---ldx]]-- - -local fonts = fonts -local fontdata = fonts.hashes.identifiers -local readers = fonts.readers -local definers = fonts.definers -local specifiers = fonts.specifiers -local constructors = fonts.constructors -local fontgoodies = fonts.goodies - -readers.sequence = allocate { 'otf', 'ttf', 'afm', 'tfm', 'lua' } -- dfont ttc - -local variants = allocate() -specifiers.variants = variants - -definers.methods = definers.methods or { } - -local internalized = allocate() -- internal tex numbers (private) -local lastdefined = nil -- we don't want this one to end up in s-tra-02 - -local loadedfonts = constructors.loadedfonts -local designsizes = constructors.designsizes - --- not in generic (some day I'll make two defs, one for context, one for generic) - -local resolvefile = fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end - ---[[ldx-- -

We hardly gain anything when we cache the final (pre scaled) - table. But it can be handy for debugging, so we no -longer carry this code along. Also, we now have quite some reference -to other tables so we would end up with lots of catches.

---ldx]]-- - ---[[ldx-- -

We can prefix a font specification by name: or -file:. The first case will result in a lookup in the -synonym table.

- - -[ name: | file: ] identifier [ separator [ specification ] ] - - -

The following function split the font specification into components -and prepares a table that will move along as we proceed.

---ldx]]-- - --- beware, we discard additional specs --- --- method:name method:name(sub) method:name(sub)*spec method:name*spec --- name name(sub) name(sub)*spec name*spec --- name@spec*oeps - -local splitter, splitspecifiers = nil, "" -- not so nice - -local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc - -local left = P("(") -local right = P(")") -local colon = P(":") -local space = P(" ") - -definers.defaultlookup = "file" - -local prefixpattern = P(false) - -local function addspecifier(symbol) - splitspecifiers = splitspecifiers .. symbol - local method = S(splitspecifiers) - local lookup = C(prefixpattern) * colon - local sub = left * C(P(1-left-right-method)^1) * right - local specification = C(method) * C(P(1)^1) - local name = C((1-sub-specification)^1) - splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc(""))) -end - -local function addlookup(str,default) - prefixpattern = prefixpattern + P(str) -end - -definers.addlookup = addlookup - -addlookup("file") -addlookup("name") -addlookup("spec") - -local function getspecification(str) - return lpegmatch(splitter,str) -end - -definers.getspecification = getspecification - -function definers.registersplit(symbol,action,verbosename) - addspecifier(symbol) - variants[symbol] = action - if verbosename then - variants[verbosename] = action - end -end - -local function makespecification(specification,lookup,name,sub,method,detail,size) - size = size or 655360 - if not lookup or lookup == "" then - lookup = definers.defaultlookup - end - if trace_defining then - report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a", - specification, lookup, name, sub, method, detail) - end - local t = { - lookup = lookup, -- forced type - specification = specification, -- full specification - size = size, -- size in scaled points or -1000*n - name = name, -- font or filename - sub = sub, -- subfont (eg in ttc) - method = method, -- specification method - detail = detail, -- specification - resolved = "", -- resolved font name - forced = "", -- forced loader - features = { }, -- preprocessed features - } - return t -end - - -definers.makespecification = makespecification - -function definers.analyze(specification, size) - -- can be optimized with locals - local lookup, name, sub, method, detail = getspecification(specification or "") - return makespecification(specification, lookup, name, sub, method, detail, size) -end - ---[[ldx-- -

We can resolve the filename using the next function:

---ldx]]-- - -definers.resolvers = definers.resolvers or { } -local resolvers = definers.resolvers - --- todo: reporter - -function resolvers.file(specification) - local name = resolvefile(specification.name) -- catch for renames - local suffix = file.suffix(name) - if fonts.formats[suffix] then - specification.forced = suffix - specification.name = file.removesuffix(name) - else - specification.name = name -- can be resolved - end -end - -function resolvers.name(specification) - local resolve = fonts.names.resolve - if resolve then - local resolved, sub = resolve(specification.name,specification.sub,specification) -- we pass specification for overloaded versions - if resolved then - specification.resolved = resolved - specification.sub = sub - local suffix = file.suffix(resolved) - if fonts.formats[suffix] then - specification.forced = suffix - specification.name = file.removesuffix(resolved) - else - specification.name = resolved - end - end - else - resolvers.file(specification) - end -end - -function resolvers.spec(specification) - local resolvespec = fonts.names.resolvespec - if resolvespec then - local resolved, sub = resolvespec(specification.name,specification.sub,specification) -- we pass specification for overloaded versions - if resolved then - specification.resolved = resolved - specification.sub = sub - specification.forced = file.suffix(resolved) - specification.name = file.removesuffix(resolved) - end - else - resolvers.name(specification) - end -end - -function definers.resolve(specification) - if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash - local r = resolvers[specification.lookup] - if r then - r(specification) - end - end - if specification.forced == "" then - specification.forced = nil - else - specification.forced = specification.forced - end - specification.hash = lower(specification.name .. ' @ ' .. constructors.hashfeatures(specification)) - if specification.sub and specification.sub ~= "" then - specification.hash = specification.sub .. ' @ ' .. specification.hash - end - return specification -end - ---[[ldx-- -

The main read function either uses a forced reader (as determined by -a lookup) or tries to resolve the name using the list of readers.

- -

We need to cache when possible. We do cache raw tfm data (from , or ). After that we can cache based -on specificstion (name) and size, that is, only needs a number -for an already loaded fonts. However, it may make sense to cache fonts -before they're scaled as well (store 's with applied methods -and features). However, there may be a relation between the size and -features (esp in virtual fonts) so let's not do that now.

- -

Watch out, here we do load a font, but we don't prepare the -specification yet.

---ldx]]-- - --- very experimental: - -function definers.applypostprocessors(tfmdata) - local postprocessors = tfmdata.postprocessors - if postprocessors then - local properties = tfmdata.properties - for i=1,#postprocessors do - local extrahash = postprocessors[i](tfmdata) -- after scaling etc - if type(extrahash) == "string" and extrahash ~= "" then - -- e.g. a reencoding needs this - extrahash = gsub(lower(extrahash),"[^a-z]","-") - properties.fullname = format("%s-%s",properties.fullname,extrahash) - end - end - end - return tfmdata -end - --- function definers.applypostprocessors(tfmdata) --- return tfmdata --- end - -local function checkembedding(tfmdata) - local properties = tfmdata.properties - local embedding - if directive_embedall then - embedding = "full" - elseif properties and properties.filename and constructors.dontembed[properties.filename] then - embedding = "no" - else - embedding = "subset" - end - if properties then - properties.embedding = embedding - else - tfmdata.properties = { embedding = embedding } - end - tfmdata.embedding = embedding -end - -function definers.loadfont(specification) - local hash = constructors.hashinstance(specification) - local tfmdata = loadedfonts[hash] -- hashes by size ! - if not tfmdata then - local forced = specification.forced or "" - if forced ~= "" then - local reader = readers[lower(forced)] - tfmdata = reader and reader(specification) - if not tfmdata then - report_defining("forced type %a of %a not found",forced,specification.name) - end - else - local sequence = readers.sequence -- can be overloaded so only a shortcut here - for s=1,#sequence do - local reader = sequence[s] - if readers[reader] then -- we skip not loaded readers - if trace_defining then - report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename) - end - tfmdata = readers[reader](specification) - if tfmdata then - break - else - specification.filename = nil - end - end - end - end - if tfmdata then - tfmdata = definers.applypostprocessors(tfmdata) - checkembedding(tfmdata) -- todo: general postprocessor - loadedfonts[hash] = tfmdata - designsizes[specification.hash] = tfmdata.parameters.designsize - end - end - if not tfmdata then - report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup) - end - return tfmdata -end - -function constructors.checkvirtualids() - -- dummy in plain version -end - -function constructors.readanddefine(name,size) -- no id -- maybe a dummy first - local specification = definers.analyze(name,size) - local method = specification.method - if method and variants[method] then - specification = variants[method](specification) - end - specification = definers.resolve(specification) - local hash = constructors.hashinstance(specification) - local id = definers.registered(hash) - if not id then - local tfmdata = definers.loadfont(specification) - if tfmdata then - tfmdata.properties.hash = hash - constructors.checkvirtualids(tfmdata) -- experiment, will become obsolete when slots can selfreference - id = font.define(tfmdata) - definers.register(tfmdata,id) - else - id = 0 -- signal - end - end - return fontdata[id], id -end - ---[[ldx-- -

So far the specifiers. Now comes the real definer. Here we cache -based on id's. Here we also intercept the virtual font handler. Since -it evolved stepwise I may rewrite this bit (combine code).

- -In the previously defined reader (the one resulting in a -table) we cached the (scaled) instances. Here we cache them again, but -this time based on id. We could combine this in one cache but this does -not gain much. By the way, passing id's back to in the callback was -introduced later in the development.

---ldx]]-- - -function definers.current() -- or maybe current - return lastdefined -end - -function definers.registered(hash) - local id = internalized[hash] - return id, id and fontdata[id] -end - -function definers.register(tfmdata,id) - if tfmdata and id then - local hash = tfmdata.properties.hash - if not hash then - report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?") - elseif not internalized[hash] then - internalized[hash] = id - if trace_defining then - report_defining("registering font, id %s, hash %a",id,hash) - end - fontdata[id] = tfmdata - end - end -end - -function definers.read(specification,size,id) -- id can be optional, name can already be table - statistics.starttiming(fonts) - if type(specification) == "string" then - specification = definers.analyze(specification,size) - end - local method = specification.method - if method and variants[method] then - specification = variants[method](specification) - end - specification = definers.resolve(specification) - local hash = constructors.hashinstance(specification) - local tfmdata = definers.registered(hash) -- id - if tfmdata then - if trace_defining then - report_defining("already hashed: %s",hash) - end - else - tfmdata = definers.loadfont(specification) -- can be overloaded - if tfmdata then - if trace_defining then - report_defining("loaded and hashed: %s",hash) - end - tfmdata.properties.hash = hash - if id then - definers.register(tfmdata,id) - end - else - if trace_defining then - report_defining("not loaded and hashed: %s",hash) - end - end - end - lastdefined = tfmdata or id -- todo ! ! ! ! ! - if not tfmdata then -- or id? - report_defining( "unknown font %a, loading aborted",specification.name) - elseif trace_defining and type(tfmdata) == "table" then - local properties = tfmdata.properties or { } - local parameters = tfmdata.parameters or { } - report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a", - properties.format, id, properties.name, parameters.size, properties.encodingbytes, - properties.encodingname, properties.fullname, file.basename(properties.filename)) - end - statistics.stoptiming(fonts) - return tfmdata -end - -function font.getfont(id) - return fontdata[id] -- otherwise issues -end - ---[[ldx-- -

We overload the reader.

---ldx]]-- - -callbacks.register('define_font', definers.read, "definition of fonts (tfmdata preparation)") +if not modules then modules = { } end modules ['font-def'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- We can overload some of the definers.functions so we don't local them. + +local format, gmatch, match, find, lower, gsub = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub +local tostring, next = tostring, next +local lpegmatch = lpeg.match + +local allocate = utilities.storage.allocate + +local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end) +local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end) + +trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading") +trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*") + +local report_defining = logs.reporter("fonts","defining") + +--[[ldx-- +

Here we deal with defining fonts. We do so by intercepting the +default loader that only handles .

+--ldx]]-- + +local fonts = fonts +local fontdata = fonts.hashes.identifiers +local readers = fonts.readers +local definers = fonts.definers +local specifiers = fonts.specifiers +local constructors = fonts.constructors +local fontgoodies = fonts.goodies + +readers.sequence = allocate { 'otf', 'ttf', 'afm', 'tfm', 'lua' } -- dfont ttc + +local variants = allocate() +specifiers.variants = variants + +definers.methods = definers.methods or { } + +local internalized = allocate() -- internal tex numbers (private) +local lastdefined = nil -- we don't want this one to end up in s-tra-02 + +local loadedfonts = constructors.loadedfonts +local designsizes = constructors.designsizes + +-- not in generic (some day I'll make two defs, one for context, one for generic) + +local resolvefile = fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end + +--[[ldx-- +

We hardly gain anything when we cache the final (pre scaled) + table. But it can be handy for debugging, so we no +longer carry this code along. Also, we now have quite some reference +to other tables so we would end up with lots of catches.

+--ldx]]-- + +--[[ldx-- +

We can prefix a font specification by name: or +file:. The first case will result in a lookup in the +synonym table.

+ + +[ name: | file: ] identifier [ separator [ specification ] ] + + +

The following function split the font specification into components +and prepares a table that will move along as we proceed.

+--ldx]]-- + +-- beware, we discard additional specs +-- +-- method:name method:name(sub) method:name(sub)*spec method:name*spec +-- name name(sub) name(sub)*spec name*spec +-- name@spec*oeps + +local splitter, splitspecifiers = nil, "" -- not so nice + +local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc + +local left = P("(") +local right = P(")") +local colon = P(":") +local space = P(" ") + +definers.defaultlookup = "file" + +local prefixpattern = P(false) + +local function addspecifier(symbol) + splitspecifiers = splitspecifiers .. symbol + local method = S(splitspecifiers) + local lookup = C(prefixpattern) * colon + local sub = left * C(P(1-left-right-method)^1) * right + local specification = C(method) * C(P(1)^1) + local name = C((1-sub-specification)^1) + splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc(""))) +end + +local function addlookup(str,default) + prefixpattern = prefixpattern + P(str) +end + +definers.addlookup = addlookup + +addlookup("file") +addlookup("name") +addlookup("spec") + +local function getspecification(str) + return lpegmatch(splitter,str) +end + +definers.getspecification = getspecification + +function definers.registersplit(symbol,action,verbosename) + addspecifier(symbol) + variants[symbol] = action + if verbosename then + variants[verbosename] = action + end +end + +local function makespecification(specification,lookup,name,sub,method,detail,size) + size = size or 655360 + if not lookup or lookup == "" then + lookup = definers.defaultlookup + end + if trace_defining then + report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a", + specification, lookup, name, sub, method, detail) + end + local t = { + lookup = lookup, -- forced type + specification = specification, -- full specification + size = size, -- size in scaled points or -1000*n + name = name, -- font or filename + sub = sub, -- subfont (eg in ttc) + method = method, -- specification method + detail = detail, -- specification + resolved = "", -- resolved font name + forced = "", -- forced loader + features = { }, -- preprocessed features + } + return t +end + + +definers.makespecification = makespecification + +function definers.analyze(specification, size) + -- can be optimized with locals + local lookup, name, sub, method, detail = getspecification(specification or "") + return makespecification(specification, lookup, name, sub, method, detail, size) +end + +--[[ldx-- +

We can resolve the filename using the next function:

+--ldx]]-- + +definers.resolvers = definers.resolvers or { } +local resolvers = definers.resolvers + +-- todo: reporter + +function resolvers.file(specification) + local name = resolvefile(specification.name) -- catch for renames + local suffix = file.suffix(name) + if fonts.formats[suffix] then + specification.forced = suffix + specification.name = file.removesuffix(name) + else + specification.name = name -- can be resolved + end +end + +function resolvers.name(specification) + local resolve = fonts.names.resolve + if resolve then + local resolved, sub = resolve(specification.name,specification.sub,specification) -- we pass specification for overloaded versions + if resolved then + specification.resolved = resolved + specification.sub = sub + local suffix = file.suffix(resolved) + if fonts.formats[suffix] then + specification.forced = suffix + specification.name = file.removesuffix(resolved) + else + specification.name = resolved + end + end + else + resolvers.file(specification) + end +end + +function resolvers.spec(specification) + local resolvespec = fonts.names.resolvespec + if resolvespec then + local resolved, sub = resolvespec(specification.name,specification.sub,specification) -- we pass specification for overloaded versions + if resolved then + specification.resolved = resolved + specification.sub = sub + specification.forced = file.suffix(resolved) + specification.name = file.removesuffix(resolved) + end + else + resolvers.name(specification) + end +end + +function definers.resolve(specification) + if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash + local r = resolvers[specification.lookup] + if r then + r(specification) + end + end + if specification.forced == "" then + specification.forced = nil + else + specification.forced = specification.forced + end + specification.hash = lower(specification.name .. ' @ ' .. constructors.hashfeatures(specification)) + if specification.sub and specification.sub ~= "" then + specification.hash = specification.sub .. ' @ ' .. specification.hash + end + return specification +end + +--[[ldx-- +

The main read function either uses a forced reader (as determined by +a lookup) or tries to resolve the name using the list of readers.

+ +

We need to cache when possible. We do cache raw tfm data (from , or ). After that we can cache based +on specificstion (name) and size, that is, only needs a number +for an already loaded fonts. However, it may make sense to cache fonts +before they're scaled as well (store 's with applied methods +and features). However, there may be a relation between the size and +features (esp in virtual fonts) so let's not do that now.

+ +

Watch out, here we do load a font, but we don't prepare the +specification yet.

+--ldx]]-- + +-- very experimental: + +function definers.applypostprocessors(tfmdata) + local postprocessors = tfmdata.postprocessors + if postprocessors then + local properties = tfmdata.properties + for i=1,#postprocessors do + local extrahash = postprocessors[i](tfmdata) -- after scaling etc + if type(extrahash) == "string" and extrahash ~= "" then + -- e.g. a reencoding needs this + extrahash = gsub(lower(extrahash),"[^a-z]","-") + properties.fullname = format("%s-%s",properties.fullname,extrahash) + end + end + end + return tfmdata +end + +-- function definers.applypostprocessors(tfmdata) +-- return tfmdata +-- end + +local function checkembedding(tfmdata) + local properties = tfmdata.properties + local embedding + if directive_embedall then + embedding = "full" + elseif properties and properties.filename and constructors.dontembed[properties.filename] then + embedding = "no" + else + embedding = "subset" + end + if properties then + properties.embedding = embedding + else + tfmdata.properties = { embedding = embedding } + end + tfmdata.embedding = embedding +end + +function definers.loadfont(specification) + local hash = constructors.hashinstance(specification) + local tfmdata = loadedfonts[hash] -- hashes by size ! + if not tfmdata then + local forced = specification.forced or "" + if forced ~= "" then + local reader = readers[lower(forced)] + tfmdata = reader and reader(specification) + if not tfmdata then + report_defining("forced type %a of %a not found",forced,specification.name) + end + else + local sequence = readers.sequence -- can be overloaded so only a shortcut here + for s=1,#sequence do + local reader = sequence[s] + if readers[reader] then -- we skip not loaded readers + if trace_defining then + report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename) + end + tfmdata = readers[reader](specification) + if tfmdata then + break + else + specification.filename = nil + end + end + end + end + if tfmdata then + tfmdata = definers.applypostprocessors(tfmdata) + checkembedding(tfmdata) -- todo: general postprocessor + loadedfonts[hash] = tfmdata + designsizes[specification.hash] = tfmdata.parameters.designsize + end + end + if not tfmdata then + report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup) + end + return tfmdata +end + +function constructors.checkvirtualids() + -- dummy in plain version +end + +function constructors.readanddefine(name,size) -- no id -- maybe a dummy first + local specification = definers.analyze(name,size) + local method = specification.method + if method and variants[method] then + specification = variants[method](specification) + end + specification = definers.resolve(specification) + local hash = constructors.hashinstance(specification) + local id = definers.registered(hash) + if not id then + local tfmdata = definers.loadfont(specification) + if tfmdata then + tfmdata.properties.hash = hash + constructors.checkvirtualids(tfmdata) -- experiment, will become obsolete when slots can selfreference + id = font.define(tfmdata) + definers.register(tfmdata,id) + else + id = 0 -- signal + end + end + return fontdata[id], id +end + +--[[ldx-- +

So far the specifiers. Now comes the real definer. Here we cache +based on id's. Here we also intercept the virtual font handler. Since +it evolved stepwise I may rewrite this bit (combine code).

+ +In the previously defined reader (the one resulting in a +table) we cached the (scaled) instances. Here we cache them again, but +this time based on id. We could combine this in one cache but this does +not gain much. By the way, passing id's back to in the callback was +introduced later in the development.

+--ldx]]-- + +function definers.current() -- or maybe current + return lastdefined +end + +function definers.registered(hash) + local id = internalized[hash] + return id, id and fontdata[id] +end + +function definers.register(tfmdata,id) + if tfmdata and id then + local hash = tfmdata.properties.hash + if not hash then + report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?") + elseif not internalized[hash] then + internalized[hash] = id + if trace_defining then + report_defining("registering font, id %s, hash %a",id,hash) + end + fontdata[id] = tfmdata + end + end +end + +function definers.read(specification,size,id) -- id can be optional, name can already be table + statistics.starttiming(fonts) + if type(specification) == "string" then + specification = definers.analyze(specification,size) + end + local method = specification.method + if method and variants[method] then + specification = variants[method](specification) + end + specification = definers.resolve(specification) + local hash = constructors.hashinstance(specification) + local tfmdata = definers.registered(hash) -- id + if tfmdata then + if trace_defining then + report_defining("already hashed: %s",hash) + end + else + tfmdata = definers.loadfont(specification) -- can be overloaded + if tfmdata then + if trace_defining then + report_defining("loaded and hashed: %s",hash) + end + tfmdata.properties.hash = hash + if id then + definers.register(tfmdata,id) + end + else + if trace_defining then + report_defining("not loaded and hashed: %s",hash) + end + end + end + lastdefined = tfmdata or id -- todo ! ! ! ! ! + if not tfmdata then -- or id? + report_defining( "unknown font %a, loading aborted",specification.name) + elseif trace_defining and type(tfmdata) == "table" then + local properties = tfmdata.properties or { } + local parameters = tfmdata.parameters or { } + report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a", + properties.format, id, properties.name, parameters.size, properties.encodingbytes, + properties.encodingname, properties.fullname, file.basename(properties.filename)) + end + statistics.stoptiming(fonts) + return tfmdata +end + +function font.getfont(id) + return fontdata[id] -- otherwise issues +end + +--[[ldx-- +

We overload the reader.

+--ldx]]-- + +callbacks.register('define_font', definers.read, "definition of fonts (tfmdata preparation)") diff --git a/tex/context/base/font-enc.lua b/tex/context/base/font-enc.lua index 5305f0736..9795e0948 100644 --- a/tex/context/base/font-enc.lua +++ b/tex/context/base/font-enc.lua @@ -1,147 +1,147 @@ -if not modules then modules = { } end modules ['font-enc'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this module is obsolete - -local match, gmatch, gsub = string.match, string.gmatch, string.gsub - -local setmetatableindex = table.setmetatableindex - ---[[ldx-- -

Because encodings are going to disappear, we don't bother defining -them in tables. But we may do so some day, for consistency.

---ldx]]-- - -local report_encoding = logs.reporter("fonts","encoding") - -local encodings = { } -fonts.encodings = encodings - -encodings.version = 1.03 -encodings.cache = containers.define("fonts", "enc", fonts.encodings.version, true) -encodings.known = utilities.storage.allocate { -- sort of obsolete - texnansi = true, - ec = true, - qx = true, - t5 = true, - t2a = true, - t2b = true, - t2c = true, - unicode = true, -} - -function encodings.is_known(encoding) - return containers.is_valid(encodings.cache,encoding) -end - ---[[ldx-- -

An encoding file looks like this:

- - -/TeXnANSIEncoding [ -/.notdef -/Euro -... -/ydieresis -] def - - -

Beware! The generic encoding files don't always apply to the ones that -ship with fonts. This has to do with the fact that names follow (slightly) -different standards. However, the fonts where this applies to (for instance -Latin Modern or Gyre) come in OpenType variants too, so these -will be used.

---ldx]]-- - -local enccodes = characters.enccodes or { } - -function encodings.load(filename) - local name = file.removesuffix(filename) - local data = containers.read(encodings.cache,name) - if data then - return data - end - if name == "unicode" then - data = encodings.make_unicode_vector() -- special case, no tex file for this - end - if data then - return data - end - local vector, tag, hash, unicodes = { }, "", { }, { } - local foundname = resolvers.findfile(filename,'enc') - if foundname and foundname ~= "" then - local ok, encoding, size = resolvers.loadbinfile(foundname) - if ok and encoding then - encoding = gsub(encoding,"%%(.-)\n","") - local tag, vec = match(encoding,"/(%w+)%s*%[(.*)%]%s*def") - local i = 0 - for ch in gmatch(vec,"/([%a%d%.]+)") do - if ch ~= ".notdef" then - vector[i] = ch - if not hash[ch] then - hash[ch] = i - else - -- duplicate, play safe for tex ligs and take first - end - if enccodes[ch] then - unicodes[enccodes[ch]] = i - end - end - i = i + 1 - end - end - end - local data = { - name = name, - tag = tag, - vector = vector, - hash = hash, - unicodes = unicodes - } - return containers.write(encodings.cache, name, data) -end - ---[[ldx-- -

There is no unicode encoding but for practical purposes we define -one.

---ldx]]-- - --- maybe make this a function: - -function encodings.make_unicode_vector() - local vector, hash = { }, { } - for code, v in next, characters.data do - local name = v.adobename - if name then - vector[code] = name - hash[name] = code - else - vector[code] = '.notdef' - end - end - for name, code in next, characters.synonyms do - vector[code], hash[name] = name, code - end - return containers.write(encodings.cache, 'unicode', { name='unicode', tag='unicode', vector=vector, hash=hash }) -end - -if not encodings.agl then - - -- We delay delay loading this rather big vector that is only needed when a - -- font is loaded for caching. Once we're further along the route we can also - -- delay it in the generic version (which doesn't use this file). - - encodings.agl = { } - - setmetatableindex(encodings.agl, function(t,k) - report_encoding("loading (extended) adobe glyph list") - dofile(resolvers.findfile("font-agl.lua")) - return rawget(encodings.agl,k) - end) - -end +if not modules then modules = { } end modules ['font-enc'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this module is obsolete + +local match, gmatch, gsub = string.match, string.gmatch, string.gsub + +local setmetatableindex = table.setmetatableindex + +--[[ldx-- +

Because encodings are going to disappear, we don't bother defining +them in tables. But we may do so some day, for consistency.

+--ldx]]-- + +local report_encoding = logs.reporter("fonts","encoding") + +local encodings = { } +fonts.encodings = encodings + +encodings.version = 1.03 +encodings.cache = containers.define("fonts", "enc", fonts.encodings.version, true) +encodings.known = utilities.storage.allocate { -- sort of obsolete + texnansi = true, + ec = true, + qx = true, + t5 = true, + t2a = true, + t2b = true, + t2c = true, + unicode = true, +} + +function encodings.is_known(encoding) + return containers.is_valid(encodings.cache,encoding) +end + +--[[ldx-- +

An encoding file looks like this:

+ + +/TeXnANSIEncoding [ +/.notdef +/Euro +... +/ydieresis +] def + + +

Beware! The generic encoding files don't always apply to the ones that +ship with fonts. This has to do with the fact that names follow (slightly) +different standards. However, the fonts where this applies to (for instance +Latin Modern or Gyre) come in OpenType variants too, so these +will be used.

+--ldx]]-- + +local enccodes = characters.enccodes or { } + +function encodings.load(filename) + local name = file.removesuffix(filename) + local data = containers.read(encodings.cache,name) + if data then + return data + end + if name == "unicode" then + data = encodings.make_unicode_vector() -- special case, no tex file for this + end + if data then + return data + end + local vector, tag, hash, unicodes = { }, "", { }, { } + local foundname = resolvers.findfile(filename,'enc') + if foundname and foundname ~= "" then + local ok, encoding, size = resolvers.loadbinfile(foundname) + if ok and encoding then + encoding = gsub(encoding,"%%(.-)\n","") + local tag, vec = match(encoding,"/(%w+)%s*%[(.*)%]%s*def") + local i = 0 + for ch in gmatch(vec,"/([%a%d%.]+)") do + if ch ~= ".notdef" then + vector[i] = ch + if not hash[ch] then + hash[ch] = i + else + -- duplicate, play safe for tex ligs and take first + end + if enccodes[ch] then + unicodes[enccodes[ch]] = i + end + end + i = i + 1 + end + end + end + local data = { + name = name, + tag = tag, + vector = vector, + hash = hash, + unicodes = unicodes + } + return containers.write(encodings.cache, name, data) +end + +--[[ldx-- +

There is no unicode encoding but for practical purposes we define +one.

+--ldx]]-- + +-- maybe make this a function: + +function encodings.make_unicode_vector() + local vector, hash = { }, { } + for code, v in next, characters.data do + local name = v.adobename + if name then + vector[code] = name + hash[name] = code + else + vector[code] = '.notdef' + end + end + for name, code in next, characters.synonyms do + vector[code], hash[name] = name, code + end + return containers.write(encodings.cache, 'unicode', { name='unicode', tag='unicode', vector=vector, hash=hash }) +end + +if not encodings.agl then + + -- We delay delay loading this rather big vector that is only needed when a + -- font is loaded for caching. Once we're further along the route we can also + -- delay it in the generic version (which doesn't use this file). + + encodings.agl = { } + + setmetatableindex(encodings.agl, function(t,k) + report_encoding("loading (extended) adobe glyph list") + dofile(resolvers.findfile("font-agl.lua")) + return rawget(encodings.agl,k) + end) + +end diff --git a/tex/context/base/font-enh.lua b/tex/context/base/font-enh.lua index 2bf0741f5..cb152083d 100644 --- a/tex/context/base/font-enh.lua +++ b/tex/context/base/font-enh.lua @@ -1,200 +1,200 @@ -if not modules then modules = { } end modules ['font-enh'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local next = next - -local trace_unicoding = false - -trackers.register("fonts.defining", function(v) trace_unicoding = v end) -trackers.register("fonts.unicoding", function(v) trace_unicoding = v end) - -local report_unicoding = logs.reporter("fonts","unicoding") - -local fonts = fonts -local constructors = fonts.constructors - -local tfmfeatures = constructors.newfeatures("tfm") -local registertfmfeature = tfmfeatures.register - -local afmfeatures = fonts.constructors.newfeatures("afm") -local registerafmfeature = afmfeatures.register - -local otffeatures = fonts.constructors.newfeatures("otf") -local registerotffeature = otffeatures.register - --- -- these will become goodies (when needed at all) --- --- local fontencodings = fonts.encodings --- fontencodings.remappings = fontencodings.remappings or { } --- --- local function reencode(tfmdata,encoding) --- if encoding and fontencodings.known[encoding] then --- local data = fontencodings.load(encoding) --- if data then --- tfmdata.properties.encoding = encoding --- local characters = tfmdata.characters --- local original = { } --- local vector = data.vector --- for unicode, character in next, characters do --- character.name = vector[unicode] --- character.index = unicode, character --- original[unicode] = character --- end --- for newcode, oldcode in next, data.unicodes do --- if newcode ~= oldcode then --- if trace_unicoding then --- report_unicoding("reencoding %U to %U",oldcode,newcode) --- end --- characters[newcode] = original[oldcode] --- end --- end --- end --- end --- end --- --- registertfmfeature { --- name = "reencode", --- description = "reencode", --- manipulators = { --- base = reencode, --- node = reencode, --- } --- } --- --- local function remap(tfmdata,remapping) --- local vector = remapping and fontencodings.remappings[remapping] --- if vector then --- local characters, original = tfmdata.characters, { } --- for k, v in next, characters do --- original[k], characters[k] = v, nil --- end --- for k,v in next, vector do --- if k ~= v then --- if trace_unicoding then --- report_unicoding("remapping %U to %U",k,v) --- end --- local c = original[k] --- characters[v] = c --- c.index = k --- end --- end --- local properties = tfmdata.properties --- if not properties then --- properties = { } --- tfmdata.properties = properties --- else --- properties.encodingbytes = 2 --- properties.format = properties.format or 'type1' --- end --- end --- end --- --- registertfmfeature { --- name = "remap", --- description = "remap", --- manipulators = { --- base = remap, --- node = remap, --- } --- } - --- \definefontfeature[dingbats][goodies=dingbats,unicoding=yes] - --- we only add and don't replace --- we could also add kerns but we asssume symbols --- todo: complain if not basemode - --- remapping = { --- tounicode = true, --- unicodes = { --- a1 = 0x2701, - -local tosixteen = fonts.mappings.tounicode16 - -local function initializeunicoding(tfmdata) - local goodies = tfmdata.goodies - local newcoding = nil - local tounicode = false - for i=1,#goodies do - local remapping = goodies[i].remapping - if remapping and remapping.unicodes then - newcoding = remapping.unicodes -- names to unicodes - tounicode = remapping.tounicode - end - end - if newcoding then - local characters = tfmdata.characters - local descriptions = tfmdata.descriptions - local oldcoding = tfmdata.resources.unicodes - local tounicodes = tfmdata.resources.tounicode -- index to unicode - local originals = { } - for name, newcode in next, newcoding do - local oldcode = oldcoding[name] - if characters[newcode] and not originals[newcode] then - originals[newcode] = { - character = characters [newcode], - description = descriptions[newcode], - } - end - if oldcode then - local original = originals[oldcode] - if original then - characters [newcode] = original.character - descriptions[newcode] = original.description - else - characters [newcode] = characters [oldcode] - descriptions[newcode] = descriptions[oldcode] - end - else - oldcoding[name] = newcode - end - if tounicode then - local description = descriptions[newcode] - if description then - local index = description.index - if not tounicodes[index] then - tounicodes[index] = tosixteen(newcode) -- shared (we could have a metatable) - end - end - end - if trace_unicoding then - if oldcode then - report_unicoding("aliasing glyph %a from %U to %U",name,oldcode,newcode) - else - report_unicoding("aliasing glyph %a to %U",name,newcode) - end - end - end - end -end - -registerafmfeature { - name = "unicoding", - description = "adapt unicode table", - initializers = { - base = initializeunicoding, - node = initializeunicoding, - }, - -- manipulators = { - -- base = finalizeunicoding, - -- node = finalizeunicoding, - -- } -} - -registerotffeature { - name = "unicoding", - description = "adapt unicode table", - initializers = { - base = initializeunicoding, - node = initializeunicoding, - }, - -- manipulators = { - -- base = finalizeunicoding, - -- node = finalizeunicoding, - -- } -} +if not modules then modules = { } end modules ['font-enh'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local next = next + +local trace_unicoding = false + +trackers.register("fonts.defining", function(v) trace_unicoding = v end) +trackers.register("fonts.unicoding", function(v) trace_unicoding = v end) + +local report_unicoding = logs.reporter("fonts","unicoding") + +local fonts = fonts +local constructors = fonts.constructors + +local tfmfeatures = constructors.newfeatures("tfm") +local registertfmfeature = tfmfeatures.register + +local afmfeatures = fonts.constructors.newfeatures("afm") +local registerafmfeature = afmfeatures.register + +local otffeatures = fonts.constructors.newfeatures("otf") +local registerotffeature = otffeatures.register + +-- -- these will become goodies (when needed at all) +-- +-- local fontencodings = fonts.encodings +-- fontencodings.remappings = fontencodings.remappings or { } +-- +-- local function reencode(tfmdata,encoding) +-- if encoding and fontencodings.known[encoding] then +-- local data = fontencodings.load(encoding) +-- if data then +-- tfmdata.properties.encoding = encoding +-- local characters = tfmdata.characters +-- local original = { } +-- local vector = data.vector +-- for unicode, character in next, characters do +-- character.name = vector[unicode] +-- character.index = unicode, character +-- original[unicode] = character +-- end +-- for newcode, oldcode in next, data.unicodes do +-- if newcode ~= oldcode then +-- if trace_unicoding then +-- report_unicoding("reencoding %U to %U",oldcode,newcode) +-- end +-- characters[newcode] = original[oldcode] +-- end +-- end +-- end +-- end +-- end +-- +-- registertfmfeature { +-- name = "reencode", +-- description = "reencode", +-- manipulators = { +-- base = reencode, +-- node = reencode, +-- } +-- } +-- +-- local function remap(tfmdata,remapping) +-- local vector = remapping and fontencodings.remappings[remapping] +-- if vector then +-- local characters, original = tfmdata.characters, { } +-- for k, v in next, characters do +-- original[k], characters[k] = v, nil +-- end +-- for k,v in next, vector do +-- if k ~= v then +-- if trace_unicoding then +-- report_unicoding("remapping %U to %U",k,v) +-- end +-- local c = original[k] +-- characters[v] = c +-- c.index = k +-- end +-- end +-- local properties = tfmdata.properties +-- if not properties then +-- properties = { } +-- tfmdata.properties = properties +-- else +-- properties.encodingbytes = 2 +-- properties.format = properties.format or 'type1' +-- end +-- end +-- end +-- +-- registertfmfeature { +-- name = "remap", +-- description = "remap", +-- manipulators = { +-- base = remap, +-- node = remap, +-- } +-- } + +-- \definefontfeature[dingbats][goodies=dingbats,unicoding=yes] + +-- we only add and don't replace +-- we could also add kerns but we asssume symbols +-- todo: complain if not basemode + +-- remapping = { +-- tounicode = true, +-- unicodes = { +-- a1 = 0x2701, + +local tosixteen = fonts.mappings.tounicode16 + +local function initializeunicoding(tfmdata) + local goodies = tfmdata.goodies + local newcoding = nil + local tounicode = false + for i=1,#goodies do + local remapping = goodies[i].remapping + if remapping and remapping.unicodes then + newcoding = remapping.unicodes -- names to unicodes + tounicode = remapping.tounicode + end + end + if newcoding then + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local oldcoding = tfmdata.resources.unicodes + local tounicodes = tfmdata.resources.tounicode -- index to unicode + local originals = { } + for name, newcode in next, newcoding do + local oldcode = oldcoding[name] + if characters[newcode] and not originals[newcode] then + originals[newcode] = { + character = characters [newcode], + description = descriptions[newcode], + } + end + if oldcode then + local original = originals[oldcode] + if original then + characters [newcode] = original.character + descriptions[newcode] = original.description + else + characters [newcode] = characters [oldcode] + descriptions[newcode] = descriptions[oldcode] + end + else + oldcoding[name] = newcode + end + if tounicode then + local description = descriptions[newcode] + if description then + local index = description.index + if not tounicodes[index] then + tounicodes[index] = tosixteen(newcode) -- shared (we could have a metatable) + end + end + end + if trace_unicoding then + if oldcode then + report_unicoding("aliasing glyph %a from %U to %U",name,oldcode,newcode) + else + report_unicoding("aliasing glyph %a to %U",name,newcode) + end + end + end + end +end + +registerafmfeature { + name = "unicoding", + description = "adapt unicode table", + initializers = { + base = initializeunicoding, + node = initializeunicoding, + }, + -- manipulators = { + -- base = finalizeunicoding, + -- node = finalizeunicoding, + -- } +} + +registerotffeature { + name = "unicoding", + description = "adapt unicode table", + initializers = { + base = initializeunicoding, + node = initializeunicoding, + }, + -- manipulators = { + -- base = finalizeunicoding, + -- node = finalizeunicoding, + -- } +} diff --git a/tex/context/base/font-ext.lua b/tex/context/base/font-ext.lua index 89d5927d4..d2bc21837 100644 --- a/tex/context/base/font-ext.lua +++ b/tex/context/base/font-ext.lua @@ -1,946 +1,946 @@ -if not modules then modules = { } end modules ['font-ext'] = { - version = 1.001, - comment = "companion to font-ini.mkiv and hand-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local next, type, byte = next, type, string.byte -local gmatch, concat, format = string.gmatch, table.concat, string.format -local utfchar = utf.char - -local commands, context = commands, context -local fonts, utilities = fonts, utilities - -local trace_protrusion = false trackers.register("fonts.protrusion", function(v) trace_protrusion = v end) -local trace_expansion = false trackers.register("fonts.expansion", function(v) trace_expansion = v end) - -local report_expansions = logs.reporter("fonts","expansions") -local report_protrusions = logs.reporter("fonts","protrusions") - --- todo: byte(..) => 0xHHHH - ---[[ldx-- -

When we implement functions that deal with features, most of them -will depend of the font format. Here we define the few that are kind -of neutral.

---ldx]]-- - -local handlers = fonts.handlers -local hashes = fonts.hashes -local otf = handlers.otf - -local registerotffeature = handlers.otf.features.register -local registerafmfeature = handlers.afm.features.register - -local fontdata = hashes.identifiers - -local allocate = utilities.storage.allocate -local settings_to_array = utilities.parsers.settings_to_array -local getparameters = utilities.parsers.getparameters - -local setmetatableindex = table.setmetatableindex - --- -- -- -- -- -- --- shared --- -- -- -- -- -- - -local function get_class_and_vector(tfmdata,value,where) -- "expansions" - local g_where = tfmdata.goodies and tfmdata.goodies[where] - local f_where = fonts[where] - local g_classes = g_where and g_where.classes - local f_classes = f_where and f_where.classes - local class = (g_classes and g_classes[value]) or (f_classes and f_classes[value]) - if class then - local class_vector = class.vector - local g_vectors = g_where and g_where.vectors - local f_vectors = f_where and f_where.vectors - local vector = (g_vectors and g_vectors[class_vector]) or (f_vectors and f_vectors[class_vector]) - return class, vector - end -end - --- -- -- -- -- -- --- expansion (hz) --- -- -- -- -- -- - -local expansions = fonts.expansions or allocate() - -fonts.expansions = expansions - -local classes = expansions.classes or allocate() -local vectors = expansions.vectors or allocate() - -expansions.classes = classes -expansions.vectors = vectors - --- beware, pdftex itself uses percentages * 10 - -classes.preset = { stretch = 2, shrink = 2, step = .5, factor = 1 } - -function commands.setupfontexpansion(class,settings) - getparameters(classes,class,'preset',settings) -end - -classes['quality'] = { - stretch = 2, shrink = 2, step = .5, vector = 'default', factor = 1 -} - -vectors['default'] = { - [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7, - [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7, - [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7, - [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7, - [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7, - [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7, - [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7, - [byte('w')] = 0.7, [byte('z')] = 0.7, - [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7, -} - -vectors['quality'] = vectors['default'] -- metatable ? - -local function initializeexpansion(tfmdata,value) - if value then - local class, vector = get_class_and_vector(tfmdata,value,"expansions") - if class then - if vector then - local stretch = class.stretch or 0 - local shrink = class.shrink or 0 - local step = class.step or 0 - local factor = class.factor or 1 - if trace_expansion then - report_expansions("setting class %a, vector %a, factor %a, stretch %a, shrink %a, step %a", - value,class.vector,factor,stretch,shrink,step) - end - tfmdata.parameters.expansion = { - stretch = 10 * stretch, - shrink = 10 * shrink, - step = 10 * step, - factor = factor, - auto = true, - } - local data = characters and characters.data - for i, chr in next, tfmdata.characters do - local v = vector[i] - if data and not v then -- we could move the data test outside (needed for plain) - local d = data[i] - if d then - local s = d.shcode - if not s then - -- sorry - elseif type(s) == "table" then - v = ((vector[s[1]] or 0) + (vector[s[#s]] or 0)) / 2 - else - v = vector[s] or 0 - end - end - end - if v and v ~= 0 then - chr.expansion_factor = v*factor - else -- can be option - chr.expansion_factor = factor - end - end - elseif trace_expansion then - report_expansions("unknown vector %a in class %a",class.vector,value) - end - elseif trace_expansion then - report_expansions("unknown class %a",value) - end - end -end - -registerotffeature { - name = "expansion", - description = "apply hz optimization", - initializers = { - base = initializeexpansion, - node = initializeexpansion, - } -} - -registerafmfeature { - name = "expansion", - description = "apply hz optimization", - initializers = { - base = initializeexpansion, - node = initializeexpansion, - } -} - -fonts.goodies.register("expansions", function(...) return fonts.goodies.report("expansions", trace_expansion, ...) end) - -local report_opbd = logs.reporter("fonts","otf opbd") - --- -- -- -- -- -- --- protrusion --- -- -- -- -- -- - -fonts.protrusions = allocate() -local protrusions = fonts.protrusions - -protrusions.classes = allocate() -protrusions.vectors = allocate() - -local classes = protrusions.classes -local vectors = protrusions.vectors - --- the values need to be revisioned - -classes.preset = { factor = 1, left = 1, right = 1 } - -function commands.setupfontprotrusion(class,settings) - getparameters(classes,class,'preset',settings) -end - -classes['pure'] = { - vector = 'pure', factor = 1 -} -classes['punctuation'] = { - vector = 'punctuation', factor = 1 -} -classes['alpha'] = { - vector = 'alpha', factor = 1 -} -classes['quality'] = { - vector = 'quality', factor = 1 -} - -vectors['pure'] = { - - [0x002C] = { 0, 1 }, -- comma - [0x002E] = { 0, 1 }, -- period - [0x003A] = { 0, 1 }, -- colon - [0x003B] = { 0, 1 }, -- semicolon - [0x002D] = { 0, 1 }, -- hyphen - [0x00AD] = { 0, 1 }, -- also hyphen - [0x2013] = { 0, 0.50 }, -- endash - [0x2014] = { 0, 0.33 }, -- emdash - [0x3001] = { 0, 1 }, -- ideographic comma 〠- [0x3002] = { 0, 1 }, -- ideographic full stop 。 - [0x060C] = { 0, 1 }, -- arabic comma ، - [0x061B] = { 0, 1 }, -- arabic semicolon ؛ - [0x06D4] = { 0, 1 }, -- arabic full stop ۔ - -} - -vectors['punctuation'] = { - - [0x003F] = { 0, 0.20 }, -- ? - [0x00BF] = { 0, 0.20 }, -- ¿ - [0x0021] = { 0, 0.20 }, -- ! - [0x00A1] = { 0, 0.20 }, -- ¡ - [0x0028] = { 0.05, 0 }, -- ( - [0x0029] = { 0, 0.05 }, -- ) - [0x005B] = { 0.05, 0 }, -- [ - [0x005D] = { 0, 0.05 }, -- ] - [0x002C] = { 0, 0.70 }, -- comma - [0x002E] = { 0, 0.70 }, -- period - [0x003A] = { 0, 0.50 }, -- colon - [0x003B] = { 0, 0.50 }, -- semicolon - [0x002D] = { 0, 0.70 }, -- hyphen - [0x00AD] = { 0, 0.70 }, -- also hyphen - [0x2013] = { 0, 0.30 }, -- endash - [0x2014] = { 0, 0.20 }, -- emdash - [0x060C] = { 0, 0.70 }, -- arabic comma - [0x061B] = { 0, 0.50 }, -- arabic semicolon - [0x06D4] = { 0, 0.70 }, -- arabic full stop - [0x061F] = { 0, 0.20 }, -- ؟ - - -- todo: left and right quotes: .5 double, .7 single - - [0x2039] = { 0.70, 0.70 }, -- left single guillemet ‹ - [0x203A] = { 0.70, 0.70 }, -- right single guillemet › - [0x00AB] = { 0.50, 0.50 }, -- left guillemet « - [0x00BB] = { 0.50, 0.50 }, -- right guillemet » - - [0x2018] = { 0.70, 0.70 }, -- left single quotation mark ‘ - [0x2019] = { 0, 0.70 }, -- right single quotation mark ’ - [0x201A] = { 0.70, 0 }, -- single low-9 quotation mark , - [0x201B] = { 0.70, 0 }, -- single high-reversed-9 quotation mark ‛ - [0x201C] = { 0.50, 0.50 }, -- left double quotation mark “ - [0x201D] = { 0, 0.50 }, -- right double quotation mark †- [0x201E] = { 0.50, 0 }, -- double low-9 quotation mark „ - [0x201F] = { 0.50, 0 }, -- double high-reversed-9 quotation mark ‟ - -} - -vectors['alpha'] = { - - [byte("A")] = { .05, .05 }, - [byte("F")] = { 0, .05 }, - [byte("J")] = { .05, 0 }, - [byte("K")] = { 0, .05 }, - [byte("L")] = { 0, .05 }, - [byte("T")] = { .05, .05 }, - [byte("V")] = { .05, .05 }, - [byte("W")] = { .05, .05 }, - [byte("X")] = { .05, .05 }, - [byte("Y")] = { .05, .05 }, - - [byte("k")] = { 0, .05 }, - [byte("r")] = { 0, .05 }, - [byte("t")] = { 0, .05 }, - [byte("v")] = { .05, .05 }, - [byte("w")] = { .05, .05 }, - [byte("x")] = { .05, .05 }, - [byte("y")] = { .05, .05 }, - -} - -vectors['quality'] = table.merged( - vectors['punctuation'], - vectors['alpha'] -) - --- As this is experimental code, users should not depend on it. The --- implications are still discussed on the ConTeXt Dev List and we're --- not sure yet what exactly the spec is (the next code is tested with --- a gyre font patched by / fea file made by Khaled Hosny). The double --- trick should not be needed it proper hanging punctuation is used in --- which case values < 1 can be used. --- --- preferred (in context, usine vectors): --- --- \definefontfeature[whatever][default][mode=node,protrusion=quality] --- --- using lfbd and rtbd, with possibibility to enable only one side : --- --- \definefontfeature[whocares][default][mode=node,protrusion=yes, opbd=yes,script=latn] --- \definefontfeature[whocares][default][mode=node,protrusion=right,opbd=yes,script=latn] --- --- idem, using multiplier --- --- \definefontfeature[whocares][default][mode=node,protrusion=2,opbd=yes,script=latn] --- \definefontfeature[whocares][default][mode=node,protrusion=double,opbd=yes,script=latn] --- --- idem, using named feature file (less frozen): --- --- \definefontfeature[whocares][default][mode=node,protrusion=2,opbd=yes,script=latn,featurefile=texgyrepagella-regularxx.fea] - -classes['double'] = { -- for testing opbd - factor = 2, left = 1, right = 1, -} - -local function map_opbd_onto_protrusion(tfmdata,value,opbd) - local characters = tfmdata.characters - local descriptions = tfmdata.descriptions - local properties = tfmdata.properties - local rawdata = tfmdata.shared.rawdata - local lookuphash = rawdata.lookuphash - local script = properties.script - local language = properties.language - local done, factor, left, right = false, 1, 1, 1 - local class = classes[value] - if class then - factor = class.factor or 1 - left = class.left or 1 - right = class.right or 1 - else - factor = tonumber(value) or 1 - end - if opbd ~= "right" then - local validlookups, lookuplist = otf.collectlookups(rawdata,"lfbd",script,language) - if validlookups then - for i=1,#lookuplist do - local lookup = lookuplist[i] - local data = lookuphash[lookup] - if data then - if trace_protrusion then - report_protrusions("setting left using lfbd lookup %a",lookup) - end - for k, v in next, data do - -- local p = - v[3] / descriptions[k].width-- or 1 ~= 0 too but the same - local p = - (v[1] / 1000) * factor * left - characters[k].left_protruding = p - if trace_protrusion then - report_protrusions("lfbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v) - end - end - done = true - end - end - end - end - if opbd ~= "left" then - local validlookups, lookuplist = otf.collectlookups(rawdata,"rtbd",script,language) - if validlookups then - for i=1,#lookuplist do - local lookup = lookuplist[i] - local data = lookuphash[lookup] - if data then - if trace_protrusion then - report_protrusions("setting right using rtbd lookup %a",lookup) - end - for k, v in next, data do - -- local p = v[3] / descriptions[k].width -- or 3 - local p = (v[1] / 1000) * factor * right - characters[k].right_protruding = p - if trace_protrusion then - report_protrusions("rtbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v) - end - end - end - done = true - end - end - end - local parameters = tfmdata.parameters - local protrusion = tfmdata.protrusion - if not protrusion then - parameters.protrusion = { - auto = true - } - else - protrusion.auto = true - end -end - --- The opbd test is just there because it was discussed on the --- context development list. However, the mentioned fxlbi.otf font --- only has some kerns for digits. So, consider this feature not --- supported till we have a proper test font. - -local function initializeprotrusion(tfmdata,value) - if value then - local opbd = tfmdata.shared.features.opbd - if opbd then - -- possible values: left right both yes no (experimental) - map_opbd_onto_protrusion(tfmdata,value,opbd) - else - local class, vector = get_class_and_vector(tfmdata,value,"protrusions") - if class then - if vector then - local factor = class.factor or 1 - local left = class.left or 1 - local right = class.right or 1 - if trace_protrusion then - report_protrusions("setting class %a, vector %a, factor %a, left %a, right %a", - value,class.vector,factor,left,right) - end - local data = characters.data - local emwidth = tfmdata.parameters.quad - tfmdata.parameters.protrusion = { - factor = factor, - left = left, - right = right, - auto = true, - } - for i, chr in next, tfmdata.characters do - local v, pl, pr = vector[i], nil, nil - if v then - pl, pr = v[1], v[2] - else - local d = data[i] - if d then - local s = d.shcode - if not s then - -- sorry - elseif type(s) == "table" then - local vl, vr = vector[s[1]], vector[s[#s]] - if vl then pl = vl[1] end - if vr then pr = vr[2] end - else - v = vector[s] - if v then - pl, pr = v[1], v[2] - end - end - end - end - if pl and pl ~= 0 then - chr.left_protruding = left *pl*factor - end - if pr and pr ~= 0 then - chr.right_protruding = right*pr*factor - end - end - elseif trace_protrusion then - report_protrusions("unknown vector %a in class %a",class.vector,value) - end - elseif trace_protrusion then - report_protrusions("unknown class %a",value) - end - end - end -end - -registerotffeature { - name = "protrusion", - description = "shift characters into the left and or right margin", - initializers = { - base = initializeprotrusion, - node = initializeprotrusion, - } -} - -registerafmfeature { - name = "protrusion", - description = "shift characters into the left and or right margin", - initializers = { - base = initializeprotrusion, - node = initializeprotrusion, - } -} - -fonts.goodies.register("protrusions", function(...) return fonts.goodies.report("protrusions", trace_protrusion, ...) end) - --- -- -- - -local function initializenostackmath(tfmdata,value) - tfmdata.properties.nostackmath = value and true -end - -registerotffeature { - name = "nostackmath", - description = "disable math stacking mechanism", - initializers = { - base = initializenostackmath, - node = initializenostackmath, - } -} - -local function initializeitlc(tfmdata,value) -- hm, always value - if value then - -- the magic 40 and it formula come from Dohyun Kim but we might need another guess - local parameters = tfmdata.parameters - local italicangle = parameters.italicangle - if italicangle and italicangle ~= 0 then - local properties = tfmdata.properties - local factor = tonumber(value) or 1 - properties.hasitalics = true - properties.autoitalicamount = factor * (parameters.uwidth or 40)/2 - end - end -end - -registerotffeature { - name = "itlc", - description = "italic correction", - initializers = { - base = initializeitlc, - node = initializeitlc, - } -} - -registerafmfeature { - name = "itlc", - description = "italic correction", - initializers = { - base = initializeitlc, - node = initializeitlc, - } -} - -local function initializetextitalics(tfmdata,value) -- yes no delay - local delay = value == "delay" - tfmdata.properties.textitalics = delay and true or value - tfmdata.properties.delaytextitalics = delay -end - -registerotffeature { - name = "textitalics", - description = "use alternative text italic correction", - initializers = { - base = initializetextitalics, - node = initializetextitalics, - } -} - -registerafmfeature { - name = "textitalics", - description = "use alternative text italic correction", - initializers = { - base = initializetextitalics, - node = initializetextitalics, - } -} - --- slanting - -local function initializeslant(tfmdata,value) - value = tonumber(value) - if not value then - value = 0 - elseif value > 1 then - value = 1 - elseif value < -1 then - value = -1 - end - tfmdata.parameters.slantfactor = value -end - -registerotffeature { - name = "slant", - description = "slant glyphs", - initializers = { - base = initializeslant, - node = initializeslant, - } -} - -registerafmfeature { - name = "slant", - description = "slant glyphs", - initializers = { - base = initializeslant, - node = initializeslant, - } -} - -local function initializeextend(tfmdata,value) - value = tonumber(value) - if not value then - value = 0 - elseif value > 10 then - value = 10 - elseif value < -10 then - value = -10 - end - tfmdata.parameters.extendfactor = value -end - -registerotffeature { - name = "extend", - description = "scale glyphs horizontally", - initializers = { - base = initializeextend, - node = initializeextend, - } -} - -registerafmfeature { - name = "extend", - description = "scale glyphs horizontally", - initializers = { - base = initializeextend, - node = initializeextend, - } -} - --- For Wolfgang Schuster: --- --- \definefontfeature[thisway][default][script=hang,language=zhs,dimensions={2,2,2}] --- \definedfont[file:kozminpr6nregular*thisway] --- --- For the moment we don't mess with the descriptions. - -local function manipulatedimensions(tfmdata,key,value) - if type(value) == "string" and value ~= "" then - local characters = tfmdata.characters - local parameters = tfmdata.parameters - local emwidth = parameters.quad - local exheight = parameters.xheight - local spec = settings_to_array(value) - local width = (spec[1] or 0) * emwidth - local height = (spec[2] or 0) * exheight - local depth = (spec[3] or 0) * exheight - if width > 0 then - local resources = tfmdata.resources - local additions = { } - local private = resources.private - for unicode, old_c in next, characters do - local oldwidth = old_c.width - if oldwidth ~= width then - -- Defining the tables in one step is more efficient - -- than adding fields later. - private = private + 1 - local new_c - local commands = { - { "right", (width - oldwidth) / 2 }, - { "slot", 1, private }, - } - if height > 0 then - if depth > 0 then - new_c = { - width = width, - height = height, - depth = depth, - commands = commands, - } - else - new_c = { - width = width, - height = height, - commands = commands, - } - end - else - if depth > 0 then - new_c = { - width = width, - depth = depth, - commands = commands, - } - else - new_c = { - width = width, - commands = commands, - } - end - end - setmetatableindex(new_c,old_c) - characters[unicode] = new_c - additions[private] = old_c - end - end - for k, v in next, additions do - characters[k] = v - end - resources.private = private - elseif height > 0 and depth > 0 then - for unicode, old_c in next, characters do - old_c.height = height - old_c.depth = depth - end - elseif height > 0 then - for unicode, old_c in next, characters do - old_c.height = height - end - elseif depth > 0 then - for unicode, old_c in next, characters do - old_c.depth = depth - end - end - end -end - -registerotffeature { - name = "dimensions", - description = "force dimensions", - manipulators = { - base = manipulatedimensions, - node = manipulatedimensions, - } -} - --- for zhichu chen (see mailing list archive): we might add a few more variants --- in due time --- --- \definefontfeature[boxed][default][boundingbox=yes] % paleblue --- --- maybe: --- --- \definecolor[DummyColor][s=.75,t=.5,a=1] {\DummyColor test} \nopdfcompression --- --- local gray = { "special", "pdf: /Tr1 gs .75 g" } --- local black = { "special", "pdf: /Tr0 gs 0 g" } - -local push = { "push" } -local pop = { "pop" } -local gray = { "special", "pdf: .75 g" } -local black = { "special", "pdf: 0 g" } - -local downcache = { } -- handy for huge cjk fonts -local rulecache = { } -- handy for huge cjk fonts - -setmetatableindex(downcache,function(t,d) - local v = { "down", d } - t[d] = v - return v -end) - -setmetatableindex(rulecache,function(t,h) - local v = { } - t[h] = v - setmetatableindex(v,function(t,w) - local v = { "rule", h, w } - t[w] = v - return v - end) - return v -end) - -local function showboundingbox(tfmdata,key,value) - if value then - local vfspecials = backends.pdf.tables.vfspecials - local gray = vfspecials and (vfspecials.rulecolors[value] or vfspecials.rulecolors.palegray) or gray - local characters = tfmdata.characters - local resources = tfmdata.resources - local additions = { } - local private = resources.private - for unicode, old_c in next, characters do - private = private + 1 - local width = old_c.width or 0 - local height = old_c.height or 0 - local depth = old_c.depth or 0 - local new_c - if depth == 0 then - new_c = { - width = width, - height = height, - commands = { - push, - gray, - rulecache[height][width], - black, - pop, - { "slot", 1, private }, - } - } - else - new_c = { - width = width, - height = height, - depth = depth, - commands = { - push, - downcache[depth], - gray, - rulecache[height+depth][width], - black, - pop, - { "slot", 1, private }, - } - } - end - setmetatableindex(new_c,old_c) - characters[unicode] = new_c - additions[private] = old_c - end - for k, v in next, additions do - characters[k] = v - end - resources.private = private - end -end - -registerotffeature { - name = "boundingbox", - description = "show boundingbox", - manipulators = { - base = showboundingbox, - node = showboundingbox, - } -} - --- -- historic stuff, move from font-ota (handled differently, typo-rep) --- --- local delete_node = nodes.delete --- local fontdata = fonts.hashes.identifiers --- --- local nodecodes = nodes.nodecodes --- local glyph_code = nodecodes.glyph --- --- local strippables = allocate() --- fonts.strippables = strippables --- --- strippables.joiners = table.tohash { --- 0x200C, -- zwnj --- 0x200D, -- zwj --- } --- --- strippables.all = table.tohash { --- 0x000AD, 0x017B4, 0x017B5, 0x0200B, 0x0200C, 0x0200D, 0x0200E, 0x0200F, 0x0202A, 0x0202B, --- 0x0202C, 0x0202D, 0x0202E, 0x02060, 0x02061, 0x02062, 0x02063, 0x0206A, 0x0206B, 0x0206C, --- 0x0206D, 0x0206E, 0x0206F, 0x0FEFF, 0x1D173, 0x1D174, 0x1D175, 0x1D176, 0x1D177, 0x1D178, --- 0x1D179, 0x1D17A, 0xE0001, 0xE0020, 0xE0021, 0xE0022, 0xE0023, 0xE0024, 0xE0025, 0xE0026, --- 0xE0027, 0xE0028, 0xE0029, 0xE002A, 0xE002B, 0xE002C, 0xE002D, 0xE002E, 0xE002F, 0xE0030, --- 0xE0031, 0xE0032, 0xE0033, 0xE0034, 0xE0035, 0xE0036, 0xE0037, 0xE0038, 0xE0039, 0xE003A, --- 0xE003B, 0xE003C, 0xE003D, 0xE003E, 0xE003F, 0xE0040, 0xE0041, 0xE0042, 0xE0043, 0xE0044, --- 0xE0045, 0xE0046, 0xE0047, 0xE0048, 0xE0049, 0xE004A, 0xE004B, 0xE004C, 0xE004D, 0xE004E, --- 0xE004F, 0xE0050, 0xE0051, 0xE0052, 0xE0053, 0xE0054, 0xE0055, 0xE0056, 0xE0057, 0xE0058, --- 0xE0059, 0xE005A, 0xE005B, 0xE005C, 0xE005D, 0xE005E, 0xE005F, 0xE0060, 0xE0061, 0xE0062, --- 0xE0063, 0xE0064, 0xE0065, 0xE0066, 0xE0067, 0xE0068, 0xE0069, 0xE006A, 0xE006B, 0xE006C, --- 0xE006D, 0xE006E, 0xE006F, 0xE0070, 0xE0071, 0xE0072, 0xE0073, 0xE0074, 0xE0075, 0xE0076, --- 0xE0077, 0xE0078, 0xE0079, 0xE007A, 0xE007B, 0xE007C, 0xE007D, 0xE007E, 0xE007F, --- } --- --- strippables[true] = strippables.joiners --- --- local function processformatters(head,font) --- local subset = fontdata[font].shared.features.formatters --- local vector = subset and strippables[subset] --- if vector then --- local current, done = head, false --- while current do --- if current.id == glyph_code and current.subtype<256 and current.font == font then --- local char = current.char --- if vector[char] then --- head, current = delete_node(head,current) --- done = true --- else --- current = current.next --- end --- else --- current = current.next --- end --- end --- return head, done --- else --- return head, false --- end --- end --- --- registerotffeature { --- name = "formatters", --- description = "hide formatting characters", --- methods = { --- base = processformatters, --- node = processformatters, --- } --- } - --- a handy helper (might change or be moved to another namespace) - -local new_special = nodes.pool.special -local new_glyph = nodes.pool.glyph -local hpack_node = node.hpack - -function fonts.helpers.addprivate(tfmdata,name,characterdata) - local properties = tfmdata.properties - local privates = properties.privates - local lastprivate = properties.lastprivate - if lastprivate then - lastprivate = lastprivate + 1 - else - lastprivate = 0xE000 - end - if not privates then - privates = { } - properties.privates = privates - end - if name then - privates[name] = lastprivate - end - properties.lastprivate = lastprivate - tfmdata.characters[lastprivate] = characterdata - if properties.finalized then - properties.lateprivates = true - end - return lastprivate -end - -function fonts.helpers.getprivatenode(tfmdata,name) - local properties = tfmdata.properties - local privates = properties and properties.privates - if privates then - local p = privates[name] - if p then - local char = tfmdata.characters[p] - local commands = char.commands - if commands then - local fake = hpack_node(new_special(commands[1][2])) - fake.width = char.width - fake.height = char.height - fake.depth = char.depth - return fake - else - -- todo: set current attribibutes - return new_glyph(properties.id,p) - end - end - end -end - -function fonts.helpers.hasprivate(tfmdata,name) - local properties = tfmdata.properties - local privates = properties and properties.privates - return privates and privates[name] or false -end - -function commands.getprivatechar(name) - context(fonts.helpers.getprivatenode(fontdata[font.current()],name)) -end +if not modules then modules = { } end modules ['font-ext'] = { + version = 1.001, + comment = "companion to font-ini.mkiv and hand-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local next, type, byte = next, type, string.byte +local gmatch, concat, format = string.gmatch, table.concat, string.format +local utfchar = utf.char + +local commands, context = commands, context +local fonts, utilities = fonts, utilities + +local trace_protrusion = false trackers.register("fonts.protrusion", function(v) trace_protrusion = v end) +local trace_expansion = false trackers.register("fonts.expansion", function(v) trace_expansion = v end) + +local report_expansions = logs.reporter("fonts","expansions") +local report_protrusions = logs.reporter("fonts","protrusions") + +-- todo: byte(..) => 0xHHHH + +--[[ldx-- +

When we implement functions that deal with features, most of them +will depend of the font format. Here we define the few that are kind +of neutral.

+--ldx]]-- + +local handlers = fonts.handlers +local hashes = fonts.hashes +local otf = handlers.otf + +local registerotffeature = handlers.otf.features.register +local registerafmfeature = handlers.afm.features.register + +local fontdata = hashes.identifiers + +local allocate = utilities.storage.allocate +local settings_to_array = utilities.parsers.settings_to_array +local getparameters = utilities.parsers.getparameters + +local setmetatableindex = table.setmetatableindex + +-- -- -- -- -- -- +-- shared +-- -- -- -- -- -- + +local function get_class_and_vector(tfmdata,value,where) -- "expansions" + local g_where = tfmdata.goodies and tfmdata.goodies[where] + local f_where = fonts[where] + local g_classes = g_where and g_where.classes + local f_classes = f_where and f_where.classes + local class = (g_classes and g_classes[value]) or (f_classes and f_classes[value]) + if class then + local class_vector = class.vector + local g_vectors = g_where and g_where.vectors + local f_vectors = f_where and f_where.vectors + local vector = (g_vectors and g_vectors[class_vector]) or (f_vectors and f_vectors[class_vector]) + return class, vector + end +end + +-- -- -- -- -- -- +-- expansion (hz) +-- -- -- -- -- -- + +local expansions = fonts.expansions or allocate() + +fonts.expansions = expansions + +local classes = expansions.classes or allocate() +local vectors = expansions.vectors or allocate() + +expansions.classes = classes +expansions.vectors = vectors + +-- beware, pdftex itself uses percentages * 10 + +classes.preset = { stretch = 2, shrink = 2, step = .5, factor = 1 } + +function commands.setupfontexpansion(class,settings) + getparameters(classes,class,'preset',settings) +end + +classes['quality'] = { + stretch = 2, shrink = 2, step = .5, vector = 'default', factor = 1 +} + +vectors['default'] = { + [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7, + [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7, + [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7, + [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7, + [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7, + [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7, + [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7, + [byte('w')] = 0.7, [byte('z')] = 0.7, + [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7, +} + +vectors['quality'] = vectors['default'] -- metatable ? + +local function initializeexpansion(tfmdata,value) + if value then + local class, vector = get_class_and_vector(tfmdata,value,"expansions") + if class then + if vector then + local stretch = class.stretch or 0 + local shrink = class.shrink or 0 + local step = class.step or 0 + local factor = class.factor or 1 + if trace_expansion then + report_expansions("setting class %a, vector %a, factor %a, stretch %a, shrink %a, step %a", + value,class.vector,factor,stretch,shrink,step) + end + tfmdata.parameters.expansion = { + stretch = 10 * stretch, + shrink = 10 * shrink, + step = 10 * step, + factor = factor, + auto = true, + } + local data = characters and characters.data + for i, chr in next, tfmdata.characters do + local v = vector[i] + if data and not v then -- we could move the data test outside (needed for plain) + local d = data[i] + if d then + local s = d.shcode + if not s then + -- sorry + elseif type(s) == "table" then + v = ((vector[s[1]] or 0) + (vector[s[#s]] or 0)) / 2 + else + v = vector[s] or 0 + end + end + end + if v and v ~= 0 then + chr.expansion_factor = v*factor + else -- can be option + chr.expansion_factor = factor + end + end + elseif trace_expansion then + report_expansions("unknown vector %a in class %a",class.vector,value) + end + elseif trace_expansion then + report_expansions("unknown class %a",value) + end + end +end + +registerotffeature { + name = "expansion", + description = "apply hz optimization", + initializers = { + base = initializeexpansion, + node = initializeexpansion, + } +} + +registerafmfeature { + name = "expansion", + description = "apply hz optimization", + initializers = { + base = initializeexpansion, + node = initializeexpansion, + } +} + +fonts.goodies.register("expansions", function(...) return fonts.goodies.report("expansions", trace_expansion, ...) end) + +local report_opbd = logs.reporter("fonts","otf opbd") + +-- -- -- -- -- -- +-- protrusion +-- -- -- -- -- -- + +fonts.protrusions = allocate() +local protrusions = fonts.protrusions + +protrusions.classes = allocate() +protrusions.vectors = allocate() + +local classes = protrusions.classes +local vectors = protrusions.vectors + +-- the values need to be revisioned + +classes.preset = { factor = 1, left = 1, right = 1 } + +function commands.setupfontprotrusion(class,settings) + getparameters(classes,class,'preset',settings) +end + +classes['pure'] = { + vector = 'pure', factor = 1 +} +classes['punctuation'] = { + vector = 'punctuation', factor = 1 +} +classes['alpha'] = { + vector = 'alpha', factor = 1 +} +classes['quality'] = { + vector = 'quality', factor = 1 +} + +vectors['pure'] = { + + [0x002C] = { 0, 1 }, -- comma + [0x002E] = { 0, 1 }, -- period + [0x003A] = { 0, 1 }, -- colon + [0x003B] = { 0, 1 }, -- semicolon + [0x002D] = { 0, 1 }, -- hyphen + [0x00AD] = { 0, 1 }, -- also hyphen + [0x2013] = { 0, 0.50 }, -- endash + [0x2014] = { 0, 0.33 }, -- emdash + [0x3001] = { 0, 1 }, -- ideographic comma 〠+ [0x3002] = { 0, 1 }, -- ideographic full stop 。 + [0x060C] = { 0, 1 }, -- arabic comma ، + [0x061B] = { 0, 1 }, -- arabic semicolon ؛ + [0x06D4] = { 0, 1 }, -- arabic full stop ۔ + +} + +vectors['punctuation'] = { + + [0x003F] = { 0, 0.20 }, -- ? + [0x00BF] = { 0, 0.20 }, -- ¿ + [0x0021] = { 0, 0.20 }, -- ! + [0x00A1] = { 0, 0.20 }, -- ¡ + [0x0028] = { 0.05, 0 }, -- ( + [0x0029] = { 0, 0.05 }, -- ) + [0x005B] = { 0.05, 0 }, -- [ + [0x005D] = { 0, 0.05 }, -- ] + [0x002C] = { 0, 0.70 }, -- comma + [0x002E] = { 0, 0.70 }, -- period + [0x003A] = { 0, 0.50 }, -- colon + [0x003B] = { 0, 0.50 }, -- semicolon + [0x002D] = { 0, 0.70 }, -- hyphen + [0x00AD] = { 0, 0.70 }, -- also hyphen + [0x2013] = { 0, 0.30 }, -- endash + [0x2014] = { 0, 0.20 }, -- emdash + [0x060C] = { 0, 0.70 }, -- arabic comma + [0x061B] = { 0, 0.50 }, -- arabic semicolon + [0x06D4] = { 0, 0.70 }, -- arabic full stop + [0x061F] = { 0, 0.20 }, -- ؟ + + -- todo: left and right quotes: .5 double, .7 single + + [0x2039] = { 0.70, 0.70 }, -- left single guillemet ‹ + [0x203A] = { 0.70, 0.70 }, -- right single guillemet › + [0x00AB] = { 0.50, 0.50 }, -- left guillemet « + [0x00BB] = { 0.50, 0.50 }, -- right guillemet » + + [0x2018] = { 0.70, 0.70 }, -- left single quotation mark ‘ + [0x2019] = { 0, 0.70 }, -- right single quotation mark ’ + [0x201A] = { 0.70, 0 }, -- single low-9 quotation mark , + [0x201B] = { 0.70, 0 }, -- single high-reversed-9 quotation mark ‛ + [0x201C] = { 0.50, 0.50 }, -- left double quotation mark “ + [0x201D] = { 0, 0.50 }, -- right double quotation mark †+ [0x201E] = { 0.50, 0 }, -- double low-9 quotation mark „ + [0x201F] = { 0.50, 0 }, -- double high-reversed-9 quotation mark ‟ + +} + +vectors['alpha'] = { + + [byte("A")] = { .05, .05 }, + [byte("F")] = { 0, .05 }, + [byte("J")] = { .05, 0 }, + [byte("K")] = { 0, .05 }, + [byte("L")] = { 0, .05 }, + [byte("T")] = { .05, .05 }, + [byte("V")] = { .05, .05 }, + [byte("W")] = { .05, .05 }, + [byte("X")] = { .05, .05 }, + [byte("Y")] = { .05, .05 }, + + [byte("k")] = { 0, .05 }, + [byte("r")] = { 0, .05 }, + [byte("t")] = { 0, .05 }, + [byte("v")] = { .05, .05 }, + [byte("w")] = { .05, .05 }, + [byte("x")] = { .05, .05 }, + [byte("y")] = { .05, .05 }, + +} + +vectors['quality'] = table.merged( + vectors['punctuation'], + vectors['alpha'] +) + +-- As this is experimental code, users should not depend on it. The +-- implications are still discussed on the ConTeXt Dev List and we're +-- not sure yet what exactly the spec is (the next code is tested with +-- a gyre font patched by / fea file made by Khaled Hosny). The double +-- trick should not be needed it proper hanging punctuation is used in +-- which case values < 1 can be used. +-- +-- preferred (in context, usine vectors): +-- +-- \definefontfeature[whatever][default][mode=node,protrusion=quality] +-- +-- using lfbd and rtbd, with possibibility to enable only one side : +-- +-- \definefontfeature[whocares][default][mode=node,protrusion=yes, opbd=yes,script=latn] +-- \definefontfeature[whocares][default][mode=node,protrusion=right,opbd=yes,script=latn] +-- +-- idem, using multiplier +-- +-- \definefontfeature[whocares][default][mode=node,protrusion=2,opbd=yes,script=latn] +-- \definefontfeature[whocares][default][mode=node,protrusion=double,opbd=yes,script=latn] +-- +-- idem, using named feature file (less frozen): +-- +-- \definefontfeature[whocares][default][mode=node,protrusion=2,opbd=yes,script=latn,featurefile=texgyrepagella-regularxx.fea] + +classes['double'] = { -- for testing opbd + factor = 2, left = 1, right = 1, +} + +local function map_opbd_onto_protrusion(tfmdata,value,opbd) + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local properties = tfmdata.properties + local rawdata = tfmdata.shared.rawdata + local lookuphash = rawdata.lookuphash + local script = properties.script + local language = properties.language + local done, factor, left, right = false, 1, 1, 1 + local class = classes[value] + if class then + factor = class.factor or 1 + left = class.left or 1 + right = class.right or 1 + else + factor = tonumber(value) or 1 + end + if opbd ~= "right" then + local validlookups, lookuplist = otf.collectlookups(rawdata,"lfbd",script,language) + if validlookups then + for i=1,#lookuplist do + local lookup = lookuplist[i] + local data = lookuphash[lookup] + if data then + if trace_protrusion then + report_protrusions("setting left using lfbd lookup %a",lookup) + end + for k, v in next, data do + -- local p = - v[3] / descriptions[k].width-- or 1 ~= 0 too but the same + local p = - (v[1] / 1000) * factor * left + characters[k].left_protruding = p + if trace_protrusion then + report_protrusions("lfbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v) + end + end + done = true + end + end + end + end + if opbd ~= "left" then + local validlookups, lookuplist = otf.collectlookups(rawdata,"rtbd",script,language) + if validlookups then + for i=1,#lookuplist do + local lookup = lookuplist[i] + local data = lookuphash[lookup] + if data then + if trace_protrusion then + report_protrusions("setting right using rtbd lookup %a",lookup) + end + for k, v in next, data do + -- local p = v[3] / descriptions[k].width -- or 3 + local p = (v[1] / 1000) * factor * right + characters[k].right_protruding = p + if trace_protrusion then + report_protrusions("rtbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v) + end + end + end + done = true + end + end + end + local parameters = tfmdata.parameters + local protrusion = tfmdata.protrusion + if not protrusion then + parameters.protrusion = { + auto = true + } + else + protrusion.auto = true + end +end + +-- The opbd test is just there because it was discussed on the +-- context development list. However, the mentioned fxlbi.otf font +-- only has some kerns for digits. So, consider this feature not +-- supported till we have a proper test font. + +local function initializeprotrusion(tfmdata,value) + if value then + local opbd = tfmdata.shared.features.opbd + if opbd then + -- possible values: left right both yes no (experimental) + map_opbd_onto_protrusion(tfmdata,value,opbd) + else + local class, vector = get_class_and_vector(tfmdata,value,"protrusions") + if class then + if vector then + local factor = class.factor or 1 + local left = class.left or 1 + local right = class.right or 1 + if trace_protrusion then + report_protrusions("setting class %a, vector %a, factor %a, left %a, right %a", + value,class.vector,factor,left,right) + end + local data = characters.data + local emwidth = tfmdata.parameters.quad + tfmdata.parameters.protrusion = { + factor = factor, + left = left, + right = right, + auto = true, + } + for i, chr in next, tfmdata.characters do + local v, pl, pr = vector[i], nil, nil + if v then + pl, pr = v[1], v[2] + else + local d = data[i] + if d then + local s = d.shcode + if not s then + -- sorry + elseif type(s) == "table" then + local vl, vr = vector[s[1]], vector[s[#s]] + if vl then pl = vl[1] end + if vr then pr = vr[2] end + else + v = vector[s] + if v then + pl, pr = v[1], v[2] + end + end + end + end + if pl and pl ~= 0 then + chr.left_protruding = left *pl*factor + end + if pr and pr ~= 0 then + chr.right_protruding = right*pr*factor + end + end + elseif trace_protrusion then + report_protrusions("unknown vector %a in class %a",class.vector,value) + end + elseif trace_protrusion then + report_protrusions("unknown class %a",value) + end + end + end +end + +registerotffeature { + name = "protrusion", + description = "shift characters into the left and or right margin", + initializers = { + base = initializeprotrusion, + node = initializeprotrusion, + } +} + +registerafmfeature { + name = "protrusion", + description = "shift characters into the left and or right margin", + initializers = { + base = initializeprotrusion, + node = initializeprotrusion, + } +} + +fonts.goodies.register("protrusions", function(...) return fonts.goodies.report("protrusions", trace_protrusion, ...) end) + +-- -- -- + +local function initializenostackmath(tfmdata,value) + tfmdata.properties.nostackmath = value and true +end + +registerotffeature { + name = "nostackmath", + description = "disable math stacking mechanism", + initializers = { + base = initializenostackmath, + node = initializenostackmath, + } +} + +local function initializeitlc(tfmdata,value) -- hm, always value + if value then + -- the magic 40 and it formula come from Dohyun Kim but we might need another guess + local parameters = tfmdata.parameters + local italicangle = parameters.italicangle + if italicangle and italicangle ~= 0 then + local properties = tfmdata.properties + local factor = tonumber(value) or 1 + properties.hasitalics = true + properties.autoitalicamount = factor * (parameters.uwidth or 40)/2 + end + end +end + +registerotffeature { + name = "itlc", + description = "italic correction", + initializers = { + base = initializeitlc, + node = initializeitlc, + } +} + +registerafmfeature { + name = "itlc", + description = "italic correction", + initializers = { + base = initializeitlc, + node = initializeitlc, + } +} + +local function initializetextitalics(tfmdata,value) -- yes no delay + local delay = value == "delay" + tfmdata.properties.textitalics = delay and true or value + tfmdata.properties.delaytextitalics = delay +end + +registerotffeature { + name = "textitalics", + description = "use alternative text italic correction", + initializers = { + base = initializetextitalics, + node = initializetextitalics, + } +} + +registerafmfeature { + name = "textitalics", + description = "use alternative text italic correction", + initializers = { + base = initializetextitalics, + node = initializetextitalics, + } +} + +-- slanting + +local function initializeslant(tfmdata,value) + value = tonumber(value) + if not value then + value = 0 + elseif value > 1 then + value = 1 + elseif value < -1 then + value = -1 + end + tfmdata.parameters.slantfactor = value +end + +registerotffeature { + name = "slant", + description = "slant glyphs", + initializers = { + base = initializeslant, + node = initializeslant, + } +} + +registerafmfeature { + name = "slant", + description = "slant glyphs", + initializers = { + base = initializeslant, + node = initializeslant, + } +} + +local function initializeextend(tfmdata,value) + value = tonumber(value) + if not value then + value = 0 + elseif value > 10 then + value = 10 + elseif value < -10 then + value = -10 + end + tfmdata.parameters.extendfactor = value +end + +registerotffeature { + name = "extend", + description = "scale glyphs horizontally", + initializers = { + base = initializeextend, + node = initializeextend, + } +} + +registerafmfeature { + name = "extend", + description = "scale glyphs horizontally", + initializers = { + base = initializeextend, + node = initializeextend, + } +} + +-- For Wolfgang Schuster: +-- +-- \definefontfeature[thisway][default][script=hang,language=zhs,dimensions={2,2,2}] +-- \definedfont[file:kozminpr6nregular*thisway] +-- +-- For the moment we don't mess with the descriptions. + +local function manipulatedimensions(tfmdata,key,value) + if type(value) == "string" and value ~= "" then + local characters = tfmdata.characters + local parameters = tfmdata.parameters + local emwidth = parameters.quad + local exheight = parameters.xheight + local spec = settings_to_array(value) + local width = (spec[1] or 0) * emwidth + local height = (spec[2] or 0) * exheight + local depth = (spec[3] or 0) * exheight + if width > 0 then + local resources = tfmdata.resources + local additions = { } + local private = resources.private + for unicode, old_c in next, characters do + local oldwidth = old_c.width + if oldwidth ~= width then + -- Defining the tables in one step is more efficient + -- than adding fields later. + private = private + 1 + local new_c + local commands = { + { "right", (width - oldwidth) / 2 }, + { "slot", 1, private }, + } + if height > 0 then + if depth > 0 then + new_c = { + width = width, + height = height, + depth = depth, + commands = commands, + } + else + new_c = { + width = width, + height = height, + commands = commands, + } + end + else + if depth > 0 then + new_c = { + width = width, + depth = depth, + commands = commands, + } + else + new_c = { + width = width, + commands = commands, + } + end + end + setmetatableindex(new_c,old_c) + characters[unicode] = new_c + additions[private] = old_c + end + end + for k, v in next, additions do + characters[k] = v + end + resources.private = private + elseif height > 0 and depth > 0 then + for unicode, old_c in next, characters do + old_c.height = height + old_c.depth = depth + end + elseif height > 0 then + for unicode, old_c in next, characters do + old_c.height = height + end + elseif depth > 0 then + for unicode, old_c in next, characters do + old_c.depth = depth + end + end + end +end + +registerotffeature { + name = "dimensions", + description = "force dimensions", + manipulators = { + base = manipulatedimensions, + node = manipulatedimensions, + } +} + +-- for zhichu chen (see mailing list archive): we might add a few more variants +-- in due time +-- +-- \definefontfeature[boxed][default][boundingbox=yes] % paleblue +-- +-- maybe: +-- +-- \definecolor[DummyColor][s=.75,t=.5,a=1] {\DummyColor test} \nopdfcompression +-- +-- local gray = { "special", "pdf: /Tr1 gs .75 g" } +-- local black = { "special", "pdf: /Tr0 gs 0 g" } + +local push = { "push" } +local pop = { "pop" } +local gray = { "special", "pdf: .75 g" } +local black = { "special", "pdf: 0 g" } + +local downcache = { } -- handy for huge cjk fonts +local rulecache = { } -- handy for huge cjk fonts + +setmetatableindex(downcache,function(t,d) + local v = { "down", d } + t[d] = v + return v +end) + +setmetatableindex(rulecache,function(t,h) + local v = { } + t[h] = v + setmetatableindex(v,function(t,w) + local v = { "rule", h, w } + t[w] = v + return v + end) + return v +end) + +local function showboundingbox(tfmdata,key,value) + if value then + local vfspecials = backends.pdf.tables.vfspecials + local gray = vfspecials and (vfspecials.rulecolors[value] or vfspecials.rulecolors.palegray) or gray + local characters = tfmdata.characters + local resources = tfmdata.resources + local additions = { } + local private = resources.private + for unicode, old_c in next, characters do + private = private + 1 + local width = old_c.width or 0 + local height = old_c.height or 0 + local depth = old_c.depth or 0 + local new_c + if depth == 0 then + new_c = { + width = width, + height = height, + commands = { + push, + gray, + rulecache[height][width], + black, + pop, + { "slot", 1, private }, + } + } + else + new_c = { + width = width, + height = height, + depth = depth, + commands = { + push, + downcache[depth], + gray, + rulecache[height+depth][width], + black, + pop, + { "slot", 1, private }, + } + } + end + setmetatableindex(new_c,old_c) + characters[unicode] = new_c + additions[private] = old_c + end + for k, v in next, additions do + characters[k] = v + end + resources.private = private + end +end + +registerotffeature { + name = "boundingbox", + description = "show boundingbox", + manipulators = { + base = showboundingbox, + node = showboundingbox, + } +} + +-- -- historic stuff, move from font-ota (handled differently, typo-rep) +-- +-- local delete_node = nodes.delete +-- local fontdata = fonts.hashes.identifiers +-- +-- local nodecodes = nodes.nodecodes +-- local glyph_code = nodecodes.glyph +-- +-- local strippables = allocate() +-- fonts.strippables = strippables +-- +-- strippables.joiners = table.tohash { +-- 0x200C, -- zwnj +-- 0x200D, -- zwj +-- } +-- +-- strippables.all = table.tohash { +-- 0x000AD, 0x017B4, 0x017B5, 0x0200B, 0x0200C, 0x0200D, 0x0200E, 0x0200F, 0x0202A, 0x0202B, +-- 0x0202C, 0x0202D, 0x0202E, 0x02060, 0x02061, 0x02062, 0x02063, 0x0206A, 0x0206B, 0x0206C, +-- 0x0206D, 0x0206E, 0x0206F, 0x0FEFF, 0x1D173, 0x1D174, 0x1D175, 0x1D176, 0x1D177, 0x1D178, +-- 0x1D179, 0x1D17A, 0xE0001, 0xE0020, 0xE0021, 0xE0022, 0xE0023, 0xE0024, 0xE0025, 0xE0026, +-- 0xE0027, 0xE0028, 0xE0029, 0xE002A, 0xE002B, 0xE002C, 0xE002D, 0xE002E, 0xE002F, 0xE0030, +-- 0xE0031, 0xE0032, 0xE0033, 0xE0034, 0xE0035, 0xE0036, 0xE0037, 0xE0038, 0xE0039, 0xE003A, +-- 0xE003B, 0xE003C, 0xE003D, 0xE003E, 0xE003F, 0xE0040, 0xE0041, 0xE0042, 0xE0043, 0xE0044, +-- 0xE0045, 0xE0046, 0xE0047, 0xE0048, 0xE0049, 0xE004A, 0xE004B, 0xE004C, 0xE004D, 0xE004E, +-- 0xE004F, 0xE0050, 0xE0051, 0xE0052, 0xE0053, 0xE0054, 0xE0055, 0xE0056, 0xE0057, 0xE0058, +-- 0xE0059, 0xE005A, 0xE005B, 0xE005C, 0xE005D, 0xE005E, 0xE005F, 0xE0060, 0xE0061, 0xE0062, +-- 0xE0063, 0xE0064, 0xE0065, 0xE0066, 0xE0067, 0xE0068, 0xE0069, 0xE006A, 0xE006B, 0xE006C, +-- 0xE006D, 0xE006E, 0xE006F, 0xE0070, 0xE0071, 0xE0072, 0xE0073, 0xE0074, 0xE0075, 0xE0076, +-- 0xE0077, 0xE0078, 0xE0079, 0xE007A, 0xE007B, 0xE007C, 0xE007D, 0xE007E, 0xE007F, +-- } +-- +-- strippables[true] = strippables.joiners +-- +-- local function processformatters(head,font) +-- local subset = fontdata[font].shared.features.formatters +-- local vector = subset and strippables[subset] +-- if vector then +-- local current, done = head, false +-- while current do +-- if current.id == glyph_code and current.subtype<256 and current.font == font then +-- local char = current.char +-- if vector[char] then +-- head, current = delete_node(head,current) +-- done = true +-- else +-- current = current.next +-- end +-- else +-- current = current.next +-- end +-- end +-- return head, done +-- else +-- return head, false +-- end +-- end +-- +-- registerotffeature { +-- name = "formatters", +-- description = "hide formatting characters", +-- methods = { +-- base = processformatters, +-- node = processformatters, +-- } +-- } + +-- a handy helper (might change or be moved to another namespace) + +local new_special = nodes.pool.special +local new_glyph = nodes.pool.glyph +local hpack_node = node.hpack + +function fonts.helpers.addprivate(tfmdata,name,characterdata) + local properties = tfmdata.properties + local privates = properties.privates + local lastprivate = properties.lastprivate + if lastprivate then + lastprivate = lastprivate + 1 + else + lastprivate = 0xE000 + end + if not privates then + privates = { } + properties.privates = privates + end + if name then + privates[name] = lastprivate + end + properties.lastprivate = lastprivate + tfmdata.characters[lastprivate] = characterdata + if properties.finalized then + properties.lateprivates = true + end + return lastprivate +end + +function fonts.helpers.getprivatenode(tfmdata,name) + local properties = tfmdata.properties + local privates = properties and properties.privates + if privates then + local p = privates[name] + if p then + local char = tfmdata.characters[p] + local commands = char.commands + if commands then + local fake = hpack_node(new_special(commands[1][2])) + fake.width = char.width + fake.height = char.height + fake.depth = char.depth + return fake + else + -- todo: set current attribibutes + return new_glyph(properties.id,p) + end + end + end +end + +function fonts.helpers.hasprivate(tfmdata,name) + local properties = tfmdata.properties + local privates = properties and properties.privates + return privates and privates[name] or false +end + +function commands.getprivatechar(name) + context(fonts.helpers.getprivatenode(fontdata[font.current()],name)) +end diff --git a/tex/context/base/font-fbk.lua b/tex/context/base/font-fbk.lua index 48e2167e6..32e5d16de 100644 --- a/tex/context/base/font-fbk.lua +++ b/tex/context/base/font-fbk.lua @@ -1,304 +1,304 @@ -if not modules then modules = { } end modules ['font-fbk'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local cos, tan, rad, format = math.cos, math.tan, math.rad, string.format -local utfbyte, utfchar = utf.byte, utf.char - ---[[ldx-- -

This is very experimental code!

---ldx]]-- - -local trace_combining_visualize = false trackers.register("fonts.composing.visualize", function(v) trace_combining_visualize = v end) -local trace_combining_define = false trackers.register("fonts.composing.define", function(v) trace_combining_define = v end) - -trackers.register("fonts.combining", "fonts.composing.define") -- for old times sake (and manuals) -trackers.register("fonts.combining.all", "fonts.composing.*") -- for old times sake (and manuals) - -local report_combining = logs.reporter("fonts","combining") - -local force_combining = false -- just for demo purposes (see mk) - -local allocate = utilities.storage.allocate - -local fonts = fonts -local handlers = fonts.handlers -local constructors = fonts.constructors - -local registerotffeature = handlers.otf.features.register -local registerafmfeature = handlers.afm.features.register - -local unicodecharacters = characters.data -local unicodefallbacks = characters.fallbacks - -local vf = handlers.vf -local commands = vf.combiner.commands -local push = vf.predefined.push -local pop = vf.predefined.pop - -local force_composed = false -local cache = { } -- we could make these weak -local fraction = 0.15 -- 30 units for lucida - -local function composecharacters(tfmdata) - -- this assumes that slot 1 is self, there will be a proper self some day - local characters = tfmdata.characters - local descriptions = tfmdata.descriptions - local parameters = tfmdata.parameters - local properties = tfmdata.properties - local Xdesc = descriptions[utfbyte("X")] - local xdesc = descriptions[utfbyte("x")] - if Xdesc and xdesc then - local scale = parameters.factor or 1 - local deltaxheight = scale * (Xdesc.boundingbox[4] - xdesc.boundingbox[4]) - local extraxheight = fraction * deltaxheight -- maybe use compose value - local italicfactor = parameters.italicfactor or 0 - local vfspecials = backends.tables.vfspecials --brr - local red, green, blue, black - if trace_combining_visualize then - red = vfspecials.red - green = vfspecials.green - blue = vfspecials.blue - black = vfspecials.black - end - local compose = fonts.goodies.getcompositions(tfmdata) - if compose and trace_combining_visualize then - report_combining("using compose information from goodies file") - end - local done = false - for i, c in next, unicodecharacters do -- loop over all characters ... not that efficient but a specials hash takes memory - if force_combining or not characters[i] then - local s = c.specials - if s and s[1] == 'char' then - local chr = s[2] - local charschr = characters[chr] - if charschr then - local cc = c.category - if cc == 'll' or cc == 'lu' or cc == 'lt' then -- characters.is_letter[cc] - local acc = s[3] - local t = { } - for k, v in next, charschr do - if k ~= "commands" then - t[k] = v - end - end - local charsacc = characters[acc] - --~ local ca = charsacc.category - --~ if ca == "mn" then - --~ -- mark nonspacing - --~ elseif ca == "ms" then - --~ -- mark spacing combining - --~ elseif ca == "me" then - --~ -- mark enclosing - --~ else - if not charsacc then -- fallback accents - acc = unicodefallbacks[acc] - charsacc = acc and characters[acc] - end - local chr_t = cache[chr] - if not chr_t then - chr_t = {"slot", 1, chr} - cache[chr] = chr_t - end - if charsacc then - if trace_combining_define then - report_combining("composed %C, base %C, accent %C",i,chr,acc) - end - local acc_t = cache[acc] - if not acc_t then - acc_t = {"slot", 1, acc} - cache[acc] = acc_t - end - local cb = descriptions[chr].boundingbox - local ab = descriptions[acc].boundingbox - -- todo: adapt height - if cb and ab then - local c_llx, c_lly, c_urx, c_ury = scale*cb[1], scale*cb[2], scale*cb[3], scale*cb[4] - local a_llx, a_lly, a_urx, a_ury = scale*ab[1], scale*ab[2], scale*ab[3], scale*ab[4] - local done = false - if compose then - local i_compose = compose[i] - local i_anchored = i_compose and i_compose.anchored - if i_anchored then - local c_compose = compose[chr] - local a_compose = compose[acc] - local c_anchors = c_compose and c_compose.anchors - local a_anchors = a_compose and a_compose.anchors - if c_anchors and a_anchors then - local c_anchor = c_anchors[i_anchored] - local a_anchor = a_anchors[i_anchored] - if c_anchor and a_anchor then - local cx = c_anchor.x or 0 - local cy = c_anchor.y or 0 - local ax = a_anchor.x or 0 - local ay = a_anchor.y or 0 - local dx = cx - ax - local dy = cy - ay - if trace_combining_define then - report_combining("building %C from %C and %C",i,chr,acc) - report_combining(" boundingbox:") - report_combining(" chr: %3i %3i %3i %3i",unpack(cb)) - report_combining(" acc: %3i %3i %3i %3i",unpack(ab)) - report_combining(" anchors:") - report_combining(" chr: %3i %3i",cx,cy) - report_combining(" acc: %3i %3i",ax,ay) - report_combining(" delta:") - report_combining(" %s: %3i %3i",i_anchored,dx,dy) - end - if trace_combining_visualize then - t.commands = { push, {"right", scale*dx}, {"down",-scale*dy}, green, acc_t, black, pop, chr_t } - -- t.commands = { - -- push, {"right", scale*cx}, {"down", -scale*cy}, red, {"rule",10000,10000,10000}, pop, - -- push, {"right", scale*ax}, {"down", -scale*ay}, blue, {"rule",10000,10000,10000}, pop, - -- push, {"right", scale*dx}, {"down", -scale*dy}, green, acc_t, black, pop, chr_t - -- } - else - t.commands = { push, {"right", scale*dx}, {"down",-scale*dy}, acc_t, pop, chr_t } - end - done = true - end - end - end - end - if not done then - -- can be sped up for scale == 1 - local dx = (c_urx - a_urx - a_llx + c_llx)/2 - local dd = (c_urx - c_llx)*italicfactor - if a_ury < 0 then - if trace_combining_visualize then - t.commands = { push, {"right", dx-dd}, red, acc_t, black, pop, chr_t } - else - t.commands = { push, {"right", dx-dd}, acc_t, pop, chr_t } - end - elseif c_ury > a_lly then -- messy test - local dy - if compose then - -- experimental: we could use sx but all that testing - -- takes time and code - dy = compose[i] - if dy then - dy = dy.dy - end - if not dy then - dy = compose[acc] - if dy then - dy = dy and dy.dy - end - end - if not dy then - dy = compose.dy - end - if not dy then - dy = - deltaxheight + extraxheight - elseif dy > -1.5 and dy < 1.5 then - -- we assume a fraction of (percentage) - dy = - dy * deltaxheight - else - -- we assume fontunits (value smaller than 2 make no sense) - dy = - dy * scale - end - else - dy = - deltaxheight + extraxheight - end - if trace_combining_visualize then - t.commands = { push, {"right", dx+dd}, {"down", dy}, green, acc_t, black, pop, chr_t } - else - t.commands = { push, {"right", dx+dd}, {"down", dy}, acc_t, pop, chr_t } - end - else - if trace_combining_visualize then - t.commands = { push, {"right", dx+dd}, blue, acc_t, black, pop, chr_t } - else - t.commands = { push, {"right", dx+dd}, acc_t, pop, chr_t } - end - end - end - else - t.commands = { chr_t } -- else index mess - end - else - if trace_combining_define then - report_combining("%C becomes simplfied %C",i,chr) - end - t.commands = { chr_t } -- else index mess - end - done = true - characters[i] = t - local d = { } - for k, v in next, descriptions[chr] do - d[k] = v - end - descriptions[i] = d - end - end - end - end - end - if done then - properties.virtualized = true - end - end -end - -registerotffeature { - name = "compose", - description = "additional composed characters", - manipulators = { - base = composecharacters, - node = composecharacters, - } -} - -registerafmfeature { - name = "compose", - description = "additional composed characters", - manipulators = { - base = composecharacters, - node = composecharacters, - } -} - -vf.helpers.composecharacters = composecharacters - --- This installs the builder into the regular virtual font builder, --- which only makes sense as demo. - -commands["compose.trace.enable"] = function() - trace_combining_visualize = true -end - -commands["compose.trace.disable"] = function() - trace_combining_visualize = false -end - -commands["compose.force.enable"] = function() - force_combining = true -end - -commands["compose.force.disable"] = function() - force_combining = false -end - -commands["compose.trace.set"] = function(g,v) - if v[2] == nil then - trace_combining_visualize = true - else - trace_combining_visualize = v[2] - end -end - -commands["compose.apply"] = function(g,v) - composecharacters(g) -end - --- vf builder - --- {'special', 'pdf: q ' .. s .. ' 0 0 '.. s .. ' 0 0 cm'}, --- {'special', 'pdf: q 1 0 0 1 ' .. -w .. ' ' .. -h .. ' cm'}, --- {'special', 'pdf: /Fm\XX\space Do'}, --- {'special', 'pdf: Q'}, --- {'special', 'pdf: Q'}, +if not modules then modules = { } end modules ['font-fbk'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local cos, tan, rad, format = math.cos, math.tan, math.rad, string.format +local utfbyte, utfchar = utf.byte, utf.char + +--[[ldx-- +

This is very experimental code!

+--ldx]]-- + +local trace_combining_visualize = false trackers.register("fonts.composing.visualize", function(v) trace_combining_visualize = v end) +local trace_combining_define = false trackers.register("fonts.composing.define", function(v) trace_combining_define = v end) + +trackers.register("fonts.combining", "fonts.composing.define") -- for old times sake (and manuals) +trackers.register("fonts.combining.all", "fonts.composing.*") -- for old times sake (and manuals) + +local report_combining = logs.reporter("fonts","combining") + +local force_combining = false -- just for demo purposes (see mk) + +local allocate = utilities.storage.allocate + +local fonts = fonts +local handlers = fonts.handlers +local constructors = fonts.constructors + +local registerotffeature = handlers.otf.features.register +local registerafmfeature = handlers.afm.features.register + +local unicodecharacters = characters.data +local unicodefallbacks = characters.fallbacks + +local vf = handlers.vf +local commands = vf.combiner.commands +local push = vf.predefined.push +local pop = vf.predefined.pop + +local force_composed = false +local cache = { } -- we could make these weak +local fraction = 0.15 -- 30 units for lucida + +local function composecharacters(tfmdata) + -- this assumes that slot 1 is self, there will be a proper self some day + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local parameters = tfmdata.parameters + local properties = tfmdata.properties + local Xdesc = descriptions[utfbyte("X")] + local xdesc = descriptions[utfbyte("x")] + if Xdesc and xdesc then + local scale = parameters.factor or 1 + local deltaxheight = scale * (Xdesc.boundingbox[4] - xdesc.boundingbox[4]) + local extraxheight = fraction * deltaxheight -- maybe use compose value + local italicfactor = parameters.italicfactor or 0 + local vfspecials = backends.tables.vfspecials --brr + local red, green, blue, black + if trace_combining_visualize then + red = vfspecials.red + green = vfspecials.green + blue = vfspecials.blue + black = vfspecials.black + end + local compose = fonts.goodies.getcompositions(tfmdata) + if compose and trace_combining_visualize then + report_combining("using compose information from goodies file") + end + local done = false + for i, c in next, unicodecharacters do -- loop over all characters ... not that efficient but a specials hash takes memory + if force_combining or not characters[i] then + local s = c.specials + if s and s[1] == 'char' then + local chr = s[2] + local charschr = characters[chr] + if charschr then + local cc = c.category + if cc == 'll' or cc == 'lu' or cc == 'lt' then -- characters.is_letter[cc] + local acc = s[3] + local t = { } + for k, v in next, charschr do + if k ~= "commands" then + t[k] = v + end + end + local charsacc = characters[acc] + --~ local ca = charsacc.category + --~ if ca == "mn" then + --~ -- mark nonspacing + --~ elseif ca == "ms" then + --~ -- mark spacing combining + --~ elseif ca == "me" then + --~ -- mark enclosing + --~ else + if not charsacc then -- fallback accents + acc = unicodefallbacks[acc] + charsacc = acc and characters[acc] + end + local chr_t = cache[chr] + if not chr_t then + chr_t = {"slot", 1, chr} + cache[chr] = chr_t + end + if charsacc then + if trace_combining_define then + report_combining("composed %C, base %C, accent %C",i,chr,acc) + end + local acc_t = cache[acc] + if not acc_t then + acc_t = {"slot", 1, acc} + cache[acc] = acc_t + end + local cb = descriptions[chr].boundingbox + local ab = descriptions[acc].boundingbox + -- todo: adapt height + if cb and ab then + local c_llx, c_lly, c_urx, c_ury = scale*cb[1], scale*cb[2], scale*cb[3], scale*cb[4] + local a_llx, a_lly, a_urx, a_ury = scale*ab[1], scale*ab[2], scale*ab[3], scale*ab[4] + local done = false + if compose then + local i_compose = compose[i] + local i_anchored = i_compose and i_compose.anchored + if i_anchored then + local c_compose = compose[chr] + local a_compose = compose[acc] + local c_anchors = c_compose and c_compose.anchors + local a_anchors = a_compose and a_compose.anchors + if c_anchors and a_anchors then + local c_anchor = c_anchors[i_anchored] + local a_anchor = a_anchors[i_anchored] + if c_anchor and a_anchor then + local cx = c_anchor.x or 0 + local cy = c_anchor.y or 0 + local ax = a_anchor.x or 0 + local ay = a_anchor.y or 0 + local dx = cx - ax + local dy = cy - ay + if trace_combining_define then + report_combining("building %C from %C and %C",i,chr,acc) + report_combining(" boundingbox:") + report_combining(" chr: %3i %3i %3i %3i",unpack(cb)) + report_combining(" acc: %3i %3i %3i %3i",unpack(ab)) + report_combining(" anchors:") + report_combining(" chr: %3i %3i",cx,cy) + report_combining(" acc: %3i %3i",ax,ay) + report_combining(" delta:") + report_combining(" %s: %3i %3i",i_anchored,dx,dy) + end + if trace_combining_visualize then + t.commands = { push, {"right", scale*dx}, {"down",-scale*dy}, green, acc_t, black, pop, chr_t } + -- t.commands = { + -- push, {"right", scale*cx}, {"down", -scale*cy}, red, {"rule",10000,10000,10000}, pop, + -- push, {"right", scale*ax}, {"down", -scale*ay}, blue, {"rule",10000,10000,10000}, pop, + -- push, {"right", scale*dx}, {"down", -scale*dy}, green, acc_t, black, pop, chr_t + -- } + else + t.commands = { push, {"right", scale*dx}, {"down",-scale*dy}, acc_t, pop, chr_t } + end + done = true + end + end + end + end + if not done then + -- can be sped up for scale == 1 + local dx = (c_urx - a_urx - a_llx + c_llx)/2 + local dd = (c_urx - c_llx)*italicfactor + if a_ury < 0 then + if trace_combining_visualize then + t.commands = { push, {"right", dx-dd}, red, acc_t, black, pop, chr_t } + else + t.commands = { push, {"right", dx-dd}, acc_t, pop, chr_t } + end + elseif c_ury > a_lly then -- messy test + local dy + if compose then + -- experimental: we could use sx but all that testing + -- takes time and code + dy = compose[i] + if dy then + dy = dy.dy + end + if not dy then + dy = compose[acc] + if dy then + dy = dy and dy.dy + end + end + if not dy then + dy = compose.dy + end + if not dy then + dy = - deltaxheight + extraxheight + elseif dy > -1.5 and dy < 1.5 then + -- we assume a fraction of (percentage) + dy = - dy * deltaxheight + else + -- we assume fontunits (value smaller than 2 make no sense) + dy = - dy * scale + end + else + dy = - deltaxheight + extraxheight + end + if trace_combining_visualize then + t.commands = { push, {"right", dx+dd}, {"down", dy}, green, acc_t, black, pop, chr_t } + else + t.commands = { push, {"right", dx+dd}, {"down", dy}, acc_t, pop, chr_t } + end + else + if trace_combining_visualize then + t.commands = { push, {"right", dx+dd}, blue, acc_t, black, pop, chr_t } + else + t.commands = { push, {"right", dx+dd}, acc_t, pop, chr_t } + end + end + end + else + t.commands = { chr_t } -- else index mess + end + else + if trace_combining_define then + report_combining("%C becomes simplfied %C",i,chr) + end + t.commands = { chr_t } -- else index mess + end + done = true + characters[i] = t + local d = { } + for k, v in next, descriptions[chr] do + d[k] = v + end + descriptions[i] = d + end + end + end + end + end + if done then + properties.virtualized = true + end + end +end + +registerotffeature { + name = "compose", + description = "additional composed characters", + manipulators = { + base = composecharacters, + node = composecharacters, + } +} + +registerafmfeature { + name = "compose", + description = "additional composed characters", + manipulators = { + base = composecharacters, + node = composecharacters, + } +} + +vf.helpers.composecharacters = composecharacters + +-- This installs the builder into the regular virtual font builder, +-- which only makes sense as demo. + +commands["compose.trace.enable"] = function() + trace_combining_visualize = true +end + +commands["compose.trace.disable"] = function() + trace_combining_visualize = false +end + +commands["compose.force.enable"] = function() + force_combining = true +end + +commands["compose.force.disable"] = function() + force_combining = false +end + +commands["compose.trace.set"] = function(g,v) + if v[2] == nil then + trace_combining_visualize = true + else + trace_combining_visualize = v[2] + end +end + +commands["compose.apply"] = function(g,v) + composecharacters(g) +end + +-- vf builder + +-- {'special', 'pdf: q ' .. s .. ' 0 0 '.. s .. ' 0 0 cm'}, +-- {'special', 'pdf: q 1 0 0 1 ' .. -w .. ' ' .. -h .. ' cm'}, +-- {'special', 'pdf: /Fm\XX\space Do'}, +-- {'special', 'pdf: Q'}, +-- {'special', 'pdf: Q'}, diff --git a/tex/context/base/font-gds.lua b/tex/context/base/font-gds.lua index 6332f40b0..4eb57bfa4 100644 --- a/tex/context/base/font-gds.lua +++ b/tex/context/base/font-gds.lua @@ -1,752 +1,752 @@ -if not modules then modules = { } end modules ['font-gds'] = { - version = 1.000, - comment = "companion to font-gds.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- depends on ctx - -local type, next, tonumber = type, next, tonumber -local gmatch, format, lower, find, splitup = string.gmatch, string.format, string.lower, string.find, string.splitup -local texsp = tex.sp - -local fonts, nodes, attributes, node = fonts, nodes, attributes, node - -local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end) -local report_goodies = logs.reporter("fonts","goodies") - -local allocate = utilities.storage.allocate - -local otf = fonts.handlers.otf -local afm = fonts.handlers.afm -local tfm = fonts.handlers.tfm - -local registerotffeature = otf.features.register -local registerafmfeature = afm.features.register -local registertfmfeature = tfm.features.register - -local fontgoodies = fonts.goodies or { } -fonts.goodies = fontgoodies - -local typefaces = fonts.typefaces or allocate() -fonts.typefaces = typefaces - -local data = fontgoodies.data or allocate() -fontgoodies.data = data - -local list = fontgoodies.list or { } -fontgoodies.list = list -- no allocate as we want to see what is there - -local addotffeature = otf.enhancers.addfeature - -local findfile = resolvers.findfile - -function fontgoodies.report(what,trace,goodies) - if trace_goodies or trace then - local whatever = goodies[what] - if whatever then - report_goodies("goodie %a found in %a",what,goodies.name) - end - end -end - -local function loadgoodies(filename) -- maybe a merge is better - local goodies = data[filename] -- we assume no suffix is given - if goodies ~= nil then - -- found or tagged unfound - elseif type(filename) == "string" then - local fullname = findfile(file.addsuffix(filename,"lfg")) or "" -- prefered suffix - if fullname == "" then - fullname = findfile(file.addsuffix(filename,"lua")) or "" -- fallback suffix - end - if fullname == "" then - report_goodies("goodie file '%s.lfg' is not found",filename) - data[filename] = false -- signal for not found - else - goodies = dofile(fullname) or false - if not goodies then - report_goodies("goodie file %a is invalid",fullname) - return nil - elseif trace_goodies then - report_goodies("goodie file %a is loaded",fullname) - end - goodies.name = goodies.name or "no name" - for name, fnc in next, list do - fnc(goodies) - end - goodies.initialized = true - data[filename] = goodies - end - end - return goodies -end - -function fontgoodies.register(name,fnc) -- will be a proper sequencer - list[name] = fnc -end - -fontgoodies.load = loadgoodies - --- register goodies file - -local function setgoodies(tfmdata,value) - local goodies = tfmdata.goodies - if not goodies then -- actually an error - goodies = { } - tfmdata.goodies = goodies - end - for filename in gmatch(value,"[^, ]+") do - -- we need to check for duplicates - local ok = loadgoodies(filename) - if ok then - if trace_goodies then - report_goodies("assigning goodie %a",filename) - end - goodies[#goodies+1] = ok - end - end -end - --- this will be split into good-* files and this file might become good-ini.lua - --- featuresets - -local function flattenedfeatures(t,tt) - -- first set value dominates - local tt = tt or { } - for i=1,#t do - local ti = t[i] - if type(ti) == "table" then - flattenedfeatures(ti,tt) - elseif tt[ti] == nil then - tt[ti] = true - end - end - for k, v in next, t do - if type(k) ~= "number" then -- not tonumber(k) - if type(v) == "table" then - flattenedfeatures(v,tt) - elseif tt[k] == nil then - tt[k] = v - end - end - end - return tt -end - --- fonts.features.flattened = flattenedfeatures - -local function prepare_features(goodies,name,set) - if set then - local ff = flattenedfeatures(set) - local fullname = goodies.name .. "::" .. name - local n, s = fonts.specifiers.presetcontext(fullname,"",ff) - goodies.featuresets[name] = s -- set - if trace_goodies then - report_goodies("feature set %a gets number %a and name %a",name,n,fullname) - end - return n - end -end - -fontgoodies.prepare_features = prepare_features - -local function initialize(goodies,tfmdata) - local featuresets = goodies.featuresets - local goodiesname = goodies.name - if featuresets then - if trace_goodies then - report_goodies("checking featuresets in %a",goodies.name) - end - for name, set in next, featuresets do - prepare_features(goodies,name,set) - end - end -end - -fontgoodies.register("featureset",initialize) - -local function setfeatureset(tfmdata,set,features) - local goodies = tfmdata.goodies -- shared ? - if goodies then - local properties = tfmdata.properties - local what - for i=1,#goodies do - -- last one wins - local g = goodies[i] - what = g.featuresets and g.featuresets[set] or what - end - if what then - for feature, value in next, what do - if features[feature] == nil then - features[feature] = value - end - end - properties.mode = what.mode or properties.mode - end - end -end - --- postprocessors (we could hash processor and share code) - -function fontgoodies.registerpostprocessor(tfmdata,f,prepend) - local postprocessors = tfmdata.postprocessors - if not postprocessors then - tfmdata.postprocessors = { f } - elseif prepend then - table.insert(postprocessors,f,1) - else - table.insert(postprocessors,f) - end -end - -local function setpostprocessor(tfmdata,processor) - local goodies = tfmdata.goodies - if goodies and type(processor) == "string" then - local found = { } - local asked = utilities.parsers.settings_to_array(processor) - for i=1,#goodies do - local g = goodies[i] - local p = g.postprocessors - if p then - for i=1,#asked do - local a = asked[i] - local f = p[a] - if type(f) == "function" then - found[a] = f - end - end - end - end - local postprocessors = tfmdata.postprocessors or { } - for i=1,#asked do - local a = asked[i] - local f = found[a] - if f then - postprocessors[#postprocessors+1] = f - end - end - if #postprocessors > 0 then - tfmdata.postprocessors = postprocessors - end - end -end - --- colorschemes - -local colorschemes = fontgoodies.colorschemes or allocate { } -fontgoodies.colorschemes = colorschemes -colorschemes.data = colorschemes.data or { } - -local function setcolorscheme(tfmdata,scheme) - if type(scheme) == "string" then - local goodies = tfmdata.goodies - -- todo : check for already defined in shared - if goodies then - local what - for i=1,#goodies do - -- last one counts - local g = goodies[i] - what = g.colorschemes and g.colorschemes[scheme] or what - end - if type(what) == "table" then - -- this is font bound but we can share them if needed - -- just as we could hash the conversions (per font) - local hash = tfmdata.resources.unicodes - local reverse = { } - local characters = tfmdata.characters - for i=1,#what do - local w = what[i] - for j=1,#w do - local name = w[j] - if name == "*" then - -- inefficient but only used for tracing anyway - for _, unicode in next, hash do - reverse[unicode] = i - end - elseif type(name) == "number" then - reverse[name] = i - elseif find(name,":") then - local start, stop = splitup(name,":") - start = tonumber(start) - stop = tonumber(stop) - if start and stop then - -- limited usage: we only deal with non reassigned - -- maybe some day I'll also support the ones with a - -- tounicode in this range - for unicode=start,stop do - if characters[unicode] then - reverse[unicode] = i - end - end - end - else - local unicode = hash[name] - if unicode then - reverse[unicode] = i - end - end - end - end - tfmdata.properties.colorscheme = reverse - return - end - end - end - tfmdata.properties.colorscheme = false -end - -local fontdata = fonts.hashes.identifiers -local setnodecolor = nodes.tracers.colors.set -local traverse_id = node.traverse_id -local a_colorscheme = attributes.private('colorscheme') -local glyph = node.id("glyph") - -function colorschemes.coloring(head) - local lastfont, lastscheme - local done = false - for n in traverse_id(glyph,head) do - local a = n[a_colorscheme] - if a then - local f = n.font - if f ~= lastfont then - lastscheme, lastfont = fontdata[f].properties.colorscheme, f - end - if lastscheme then - local sc = lastscheme[n.char] - if sc then - done = true - setnodecolor(n,"colorscheme:"..a..":"..sc) -- slow - end - end - end - end - return head, done -end - -function colorschemes.enable() - nodes.tasks.appendaction("processors","fonts","fonts.goodies.colorschemes.coloring") - function colorschemes.enable() end -end - -local function setextrafeatures(tfmdata) - local goodies = tfmdata.goodies - if goodies then - for i=1,#goodies do - local g = goodies[i] - local f = g.features - if f then - for feature, specification in next, f do - addotffeature(tfmdata.shared.rawdata,feature,specification) - registerotffeature { - name = feature, - description = format("extra: %s",feature) - } - end - end - end - end -end - --- installation (collected to keep the overview) -- also for type 1 - -registerotffeature { - name = "goodies", - description = "goodies on top of built in features", - initializers = { - position = 1, - base = setgoodies, - node = setgoodies, - } -} - -registerotffeature { - name = "extrafeatures", - description = "extra features", - default = true, - initializers = { - position = 2, - base = setextrafeatures, - node = setextrafeatures, - } -} - -registerotffeature { - name = "featureset", - description = "goodie feature set", - initializers = { - position = 3, - base = setfeatureset, - node = setfeatureset, - } -} - -registerotffeature { - name = "colorscheme", - description = "goodie color scheme", - initializers = { - base = setcolorscheme, - node = setcolorscheme, - } -} - -registerotffeature { - name = "postprocessor", - description = "goodie postprocessor", - initializers = { - base = setpostprocessor, - node = setpostprocessor, - } -} - --- afm - -registerafmfeature { - name = "goodies", - description = "goodies on top of built in features", - initializers = { - position = 1, - base = setgoodies, - node = setgoodies, - } -} - --- tfm - -registertfmfeature { - name = "goodies", - description = "goodies on top of built in features", - initializers = { - position = 1, - base = setgoodies, - node = setgoodies, - } -} - --- experiment, we have to load the definitions immediately as they precede --- the definition so they need to be initialized in the typescript - -local function initialize(goodies) - local mathgoodies = goodies.mathematics - if mathgoodies then - local virtuals = mathgoodies.virtuals - local mapfiles = mathgoodies.mapfiles - local maplines = mathgoodies.maplines - if virtuals then - for name, specification in next, virtuals do - -- beware, they are all constructed - mathematics.makefont(name,specification,goodies) - end - end - if mapfiles then - for i=1,#mapfiles do - fonts.mappings.loadfile(mapfiles[i]) -- todo: backend function - end - end - if maplines then - for i=1,#maplines do - fonts.mappings.loadline(maplines[i]) -- todo: backend function - end - end - end -end - -fontgoodies.register("mathematics", initialize) - --- the following takes care of explicit file specifications --- --- files = { --- name = "antykwapoltawskiego", --- list = { --- ["AntPoltLtCond-Regular.otf"] = { --- -- name = "antykwapoltawskiego", --- style = "regular", --- weight = "light", --- width = "condensed", --- }, --- }, --- } - --- math italics - --- it would be nice to have a \noitalics\font option - -local function initialize(tfmdata) - local goodies = tfmdata.goodies - if goodies then - local shared = tfmdata.shared - for i=1,#goodies do - local mathgoodies = goodies[i].mathematics - local mathitalics = mathgoodies and mathgoodies.italics - if mathitalics then - local properties = tfmdata.properties - mathitalics = mathitalics[file.nameonly(properties.name)] or mathitalics - if mathitalics then - if trace_goodies then - report_goodies("loading mathitalics for font %a",properties.name) - end - local corrections = mathitalics.corrections - local defaultfactor = mathitalics.defaultfactor - local disableengine = mathitalics.disableengine - properties.hasitalics = true - properties.mathitalic_defaultfactor = defaultfactor -- we inherit outer one anyway (name will change) - if properties.mathitalics == nil then - properties.mathitalics = disableengine - end - if corrections then - -- As we want to set italic_correction (the context one) we need a - -- postprocessor instead of messing with the (unscaled) descriptions. - fontgoodies.registerpostprocessor(tfmdata, function(tfmdata) -- this is another tfmdata (a copy) - -- better make a helper so that we have less code being defined - local properties = tfmdata.properties - local parameters = tfmdata.parameters - local characters = tfmdata.characters - properties.hasitalics = true - properties.mathitalic_defaultfactor = defaultfactor - properties.mathitalic_defaultvalue = defaultfactor * parameters.quad - if properties.mathitalics == nil then - properties.mathitalics = disableengine - end - if trace_goodies then - report_goodies("assigning mathitalics for font %a",properties.name) - end - local mathitalics = properties.mathitalics - local quad = parameters.quad - local hfactor = parameters.hfactor - for k, v in next, corrections do - local c = characters[k] - if v > -1 and v < 1 then - v = v * quad - else - v = v * hfactor - end - c.italic_correction = v -- for context - if mathitalics then - c.italic = v -- for tex - else - c.italic = nil - end - end - end) - end - return -- maybe not as these can accumulate - end - end - end - end -end - -registerotffeature { - name = "mathitalics", - description = "additional math italic corrections", - -- default = true, - initializers = { - base = initialize, - node = initialize, - } -} - --- fontgoodies.register("mathitalics", initialize) - --- files - -local function initialize(goodies) - local files = goodies.files - if files then - fonts.names.register(files) - end -end - -fontgoodies.register("files", initialize) - --- some day we will have a define command and then we can also do some --- proper tracing --- --- fonts.typefaces["antykwapoltawskiego-condensed"] = { --- shortcut = "rm", --- shape = "serif", --- fontname = "antykwapoltawskiego", --- normalweight = "light", --- boldweight = "medium", --- width = "condensed", --- size = "default", --- features = "default", --- } - -local function initialize(goodies) - local typefaces = goodies.typefaces - if typefaces then - local ft = fonts.typefaces - for k, v in next, typefaces do - ft[k] = v - end - end -end - -fontgoodies.register("typefaces", initialize) - -local compositions = { } - -function fontgoodies.getcompositions(tfmdata) - return compositions[file.nameonly(tfmdata.properties.filename or "")] -end - -local function initialize(goodies) - local gc = goodies.compositions - if gc then - for k, v in next, gc do - compositions[k] = v - end - end -end - -fontgoodies.register("compositions", initialize) - --- extra treatments (on top of defaults): \loadfontgoodies[mytreatments] - -local treatmentdata = fonts.treatments.data - -local function initialize(goodies) - local treatments = goodies.treatments - if treatments then - for name, data in next, treatments do - treatmentdata[name] = data -- always wins - end - end -end - -fontgoodies.register("treatments", initialize) - -local filenames = fontgoodies.filenames or allocate() -fontgoodies.filenames = filenames - -local filedata = filenames.data or allocate() -filenames.data = filedata - -local function initialize(goodies) -- design sizes are registered global - local fn = goodies.filenames - if fn then - for usedname, alternativenames in next, fn do - filedata[usedname] = alternativenames - end - end -end - -fontgoodies.register("filenames", initialize) - -function fontgoodies.filenames.resolve(name) - local fd = filedata[name] - if fd and findfile(name) == "" then - for i=1,#fd do - local fn = fd[i] - if findfile(fn) ~= "" then - return fn - end - end - else - -- no lookup, just use the regular mechanism - end - return name -end - -local designsizes = fontgoodies.designsizes or allocate() -fontgoodies.designsizes = designsizes - -local designdata = designsizes.data or allocate() -designsizes.data = designdata - -local function initialize(goodies) -- design sizes are registered global - local gd = goodies.designsizes - if gd then - for name, data in next, gd do - local ranges = { } - for size, file in next, data do - if size ~= "default" then - ranges[#ranges+1] = { texsp(size), file } -- also lower(file) - end - end - table.sort(ranges,function(a,b) return a[1] < b[1] end) - designdata[lower(name)] = { -- overloads, doesn't merge! - default = data.default, - ranges = ranges, - } - end - end -end - -fontgoodies.register("designsizes", initialize) - -function fontgoodies.designsizes.register(name,size,specification) - local d = designdata[name] - if not d then - d = { - ranges = { }, - default = nil, -- so we have no default set - } - designdata[name] = d - end - if size == "default" then - d.default = specification - else - if type(size) == "string" then - size = texsp(size) - end - local ranges = d.ranges - ranges[#ranges+1] = { size, specification } - end -end - -function fontgoodies.designsizes.filename(name,spec,size) -- returns nil of no match - if spec and spec ~= "" then - local data = designdata[lower(name)] - if data then - if spec == "default" then - return data.default - elseif spec == "auto" then - local ranges = data.ranges - if ranges then - for i=1,#ranges do - local r = ranges[i] - if r[1] >= size then -- todo: rounding so maybe size - 100 - return r[2] - end - end - end - return data.default or (ranges and ranges[#ranges][2]) - end - end - end -end - --- The following file (husayni.lfg) is the experimental setup that we used --- for Idris font. For the moment we don't store this in the cache and quite --- probably these files sit in one of the paths: --- --- tex/context/fonts/goodies --- tex/fonts/goodies/context --- tex/fonts/data/foundry/collection --- --- see lfg files in distribution - --- interface - -commands.loadfontgoodies = fontgoodies.load -commands.enablefontcolorschemes = colorschemes.enable - --- weird place ... depends on math - -local function finalize(tfmdata,feature,value) - mathematics.overloaddimensions(tfmdata,tfmdata,value) -end - -registerotffeature { - name = "mathdimensions", - description = "manipulate math dimensions", - -- default = true, - manipulators = { - base = finalize, - node = finalize, - } -} +if not modules then modules = { } end modules ['font-gds'] = { + version = 1.000, + comment = "companion to font-gds.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- depends on ctx + +local type, next, tonumber = type, next, tonumber +local gmatch, format, lower, find, splitup = string.gmatch, string.format, string.lower, string.find, string.splitup +local texsp = tex.sp + +local fonts, nodes, attributes, node = fonts, nodes, attributes, node + +local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end) +local report_goodies = logs.reporter("fonts","goodies") + +local allocate = utilities.storage.allocate + +local otf = fonts.handlers.otf +local afm = fonts.handlers.afm +local tfm = fonts.handlers.tfm + +local registerotffeature = otf.features.register +local registerafmfeature = afm.features.register +local registertfmfeature = tfm.features.register + +local fontgoodies = fonts.goodies or { } +fonts.goodies = fontgoodies + +local typefaces = fonts.typefaces or allocate() +fonts.typefaces = typefaces + +local data = fontgoodies.data or allocate() +fontgoodies.data = data + +local list = fontgoodies.list or { } +fontgoodies.list = list -- no allocate as we want to see what is there + +local addotffeature = otf.enhancers.addfeature + +local findfile = resolvers.findfile + +function fontgoodies.report(what,trace,goodies) + if trace_goodies or trace then + local whatever = goodies[what] + if whatever then + report_goodies("goodie %a found in %a",what,goodies.name) + end + end +end + +local function loadgoodies(filename) -- maybe a merge is better + local goodies = data[filename] -- we assume no suffix is given + if goodies ~= nil then + -- found or tagged unfound + elseif type(filename) == "string" then + local fullname = findfile(file.addsuffix(filename,"lfg")) or "" -- prefered suffix + if fullname == "" then + fullname = findfile(file.addsuffix(filename,"lua")) or "" -- fallback suffix + end + if fullname == "" then + report_goodies("goodie file '%s.lfg' is not found",filename) + data[filename] = false -- signal for not found + else + goodies = dofile(fullname) or false + if not goodies then + report_goodies("goodie file %a is invalid",fullname) + return nil + elseif trace_goodies then + report_goodies("goodie file %a is loaded",fullname) + end + goodies.name = goodies.name or "no name" + for name, fnc in next, list do + fnc(goodies) + end + goodies.initialized = true + data[filename] = goodies + end + end + return goodies +end + +function fontgoodies.register(name,fnc) -- will be a proper sequencer + list[name] = fnc +end + +fontgoodies.load = loadgoodies + +-- register goodies file + +local function setgoodies(tfmdata,value) + local goodies = tfmdata.goodies + if not goodies then -- actually an error + goodies = { } + tfmdata.goodies = goodies + end + for filename in gmatch(value,"[^, ]+") do + -- we need to check for duplicates + local ok = loadgoodies(filename) + if ok then + if trace_goodies then + report_goodies("assigning goodie %a",filename) + end + goodies[#goodies+1] = ok + end + end +end + +-- this will be split into good-* files and this file might become good-ini.lua + +-- featuresets + +local function flattenedfeatures(t,tt) + -- first set value dominates + local tt = tt or { } + for i=1,#t do + local ti = t[i] + if type(ti) == "table" then + flattenedfeatures(ti,tt) + elseif tt[ti] == nil then + tt[ti] = true + end + end + for k, v in next, t do + if type(k) ~= "number" then -- not tonumber(k) + if type(v) == "table" then + flattenedfeatures(v,tt) + elseif tt[k] == nil then + tt[k] = v + end + end + end + return tt +end + +-- fonts.features.flattened = flattenedfeatures + +local function prepare_features(goodies,name,set) + if set then + local ff = flattenedfeatures(set) + local fullname = goodies.name .. "::" .. name + local n, s = fonts.specifiers.presetcontext(fullname,"",ff) + goodies.featuresets[name] = s -- set + if trace_goodies then + report_goodies("feature set %a gets number %a and name %a",name,n,fullname) + end + return n + end +end + +fontgoodies.prepare_features = prepare_features + +local function initialize(goodies,tfmdata) + local featuresets = goodies.featuresets + local goodiesname = goodies.name + if featuresets then + if trace_goodies then + report_goodies("checking featuresets in %a",goodies.name) + end + for name, set in next, featuresets do + prepare_features(goodies,name,set) + end + end +end + +fontgoodies.register("featureset",initialize) + +local function setfeatureset(tfmdata,set,features) + local goodies = tfmdata.goodies -- shared ? + if goodies then + local properties = tfmdata.properties + local what + for i=1,#goodies do + -- last one wins + local g = goodies[i] + what = g.featuresets and g.featuresets[set] or what + end + if what then + for feature, value in next, what do + if features[feature] == nil then + features[feature] = value + end + end + properties.mode = what.mode or properties.mode + end + end +end + +-- postprocessors (we could hash processor and share code) + +function fontgoodies.registerpostprocessor(tfmdata,f,prepend) + local postprocessors = tfmdata.postprocessors + if not postprocessors then + tfmdata.postprocessors = { f } + elseif prepend then + table.insert(postprocessors,f,1) + else + table.insert(postprocessors,f) + end +end + +local function setpostprocessor(tfmdata,processor) + local goodies = tfmdata.goodies + if goodies and type(processor) == "string" then + local found = { } + local asked = utilities.parsers.settings_to_array(processor) + for i=1,#goodies do + local g = goodies[i] + local p = g.postprocessors + if p then + for i=1,#asked do + local a = asked[i] + local f = p[a] + if type(f) == "function" then + found[a] = f + end + end + end + end + local postprocessors = tfmdata.postprocessors or { } + for i=1,#asked do + local a = asked[i] + local f = found[a] + if f then + postprocessors[#postprocessors+1] = f + end + end + if #postprocessors > 0 then + tfmdata.postprocessors = postprocessors + end + end +end + +-- colorschemes + +local colorschemes = fontgoodies.colorschemes or allocate { } +fontgoodies.colorschemes = colorschemes +colorschemes.data = colorschemes.data or { } + +local function setcolorscheme(tfmdata,scheme) + if type(scheme) == "string" then + local goodies = tfmdata.goodies + -- todo : check for already defined in shared + if goodies then + local what + for i=1,#goodies do + -- last one counts + local g = goodies[i] + what = g.colorschemes and g.colorschemes[scheme] or what + end + if type(what) == "table" then + -- this is font bound but we can share them if needed + -- just as we could hash the conversions (per font) + local hash = tfmdata.resources.unicodes + local reverse = { } + local characters = tfmdata.characters + for i=1,#what do + local w = what[i] + for j=1,#w do + local name = w[j] + if name == "*" then + -- inefficient but only used for tracing anyway + for _, unicode in next, hash do + reverse[unicode] = i + end + elseif type(name) == "number" then + reverse[name] = i + elseif find(name,":") then + local start, stop = splitup(name,":") + start = tonumber(start) + stop = tonumber(stop) + if start and stop then + -- limited usage: we only deal with non reassigned + -- maybe some day I'll also support the ones with a + -- tounicode in this range + for unicode=start,stop do + if characters[unicode] then + reverse[unicode] = i + end + end + end + else + local unicode = hash[name] + if unicode then + reverse[unicode] = i + end + end + end + end + tfmdata.properties.colorscheme = reverse + return + end + end + end + tfmdata.properties.colorscheme = false +end + +local fontdata = fonts.hashes.identifiers +local setnodecolor = nodes.tracers.colors.set +local traverse_id = node.traverse_id +local a_colorscheme = attributes.private('colorscheme') +local glyph = node.id("glyph") + +function colorschemes.coloring(head) + local lastfont, lastscheme + local done = false + for n in traverse_id(glyph,head) do + local a = n[a_colorscheme] + if a then + local f = n.font + if f ~= lastfont then + lastscheme, lastfont = fontdata[f].properties.colorscheme, f + end + if lastscheme then + local sc = lastscheme[n.char] + if sc then + done = true + setnodecolor(n,"colorscheme:"..a..":"..sc) -- slow + end + end + end + end + return head, done +end + +function colorschemes.enable() + nodes.tasks.appendaction("processors","fonts","fonts.goodies.colorschemes.coloring") + function colorschemes.enable() end +end + +local function setextrafeatures(tfmdata) + local goodies = tfmdata.goodies + if goodies then + for i=1,#goodies do + local g = goodies[i] + local f = g.features + if f then + for feature, specification in next, f do + addotffeature(tfmdata.shared.rawdata,feature,specification) + registerotffeature { + name = feature, + description = format("extra: %s",feature) + } + end + end + end + end +end + +-- installation (collected to keep the overview) -- also for type 1 + +registerotffeature { + name = "goodies", + description = "goodies on top of built in features", + initializers = { + position = 1, + base = setgoodies, + node = setgoodies, + } +} + +registerotffeature { + name = "extrafeatures", + description = "extra features", + default = true, + initializers = { + position = 2, + base = setextrafeatures, + node = setextrafeatures, + } +} + +registerotffeature { + name = "featureset", + description = "goodie feature set", + initializers = { + position = 3, + base = setfeatureset, + node = setfeatureset, + } +} + +registerotffeature { + name = "colorscheme", + description = "goodie color scheme", + initializers = { + base = setcolorscheme, + node = setcolorscheme, + } +} + +registerotffeature { + name = "postprocessor", + description = "goodie postprocessor", + initializers = { + base = setpostprocessor, + node = setpostprocessor, + } +} + +-- afm + +registerafmfeature { + name = "goodies", + description = "goodies on top of built in features", + initializers = { + position = 1, + base = setgoodies, + node = setgoodies, + } +} + +-- tfm + +registertfmfeature { + name = "goodies", + description = "goodies on top of built in features", + initializers = { + position = 1, + base = setgoodies, + node = setgoodies, + } +} + +-- experiment, we have to load the definitions immediately as they precede +-- the definition so they need to be initialized in the typescript + +local function initialize(goodies) + local mathgoodies = goodies.mathematics + if mathgoodies then + local virtuals = mathgoodies.virtuals + local mapfiles = mathgoodies.mapfiles + local maplines = mathgoodies.maplines + if virtuals then + for name, specification in next, virtuals do + -- beware, they are all constructed + mathematics.makefont(name,specification,goodies) + end + end + if mapfiles then + for i=1,#mapfiles do + fonts.mappings.loadfile(mapfiles[i]) -- todo: backend function + end + end + if maplines then + for i=1,#maplines do + fonts.mappings.loadline(maplines[i]) -- todo: backend function + end + end + end +end + +fontgoodies.register("mathematics", initialize) + +-- the following takes care of explicit file specifications +-- +-- files = { +-- name = "antykwapoltawskiego", +-- list = { +-- ["AntPoltLtCond-Regular.otf"] = { +-- -- name = "antykwapoltawskiego", +-- style = "regular", +-- weight = "light", +-- width = "condensed", +-- }, +-- }, +-- } + +-- math italics + +-- it would be nice to have a \noitalics\font option + +local function initialize(tfmdata) + local goodies = tfmdata.goodies + if goodies then + local shared = tfmdata.shared + for i=1,#goodies do + local mathgoodies = goodies[i].mathematics + local mathitalics = mathgoodies and mathgoodies.italics + if mathitalics then + local properties = tfmdata.properties + mathitalics = mathitalics[file.nameonly(properties.name)] or mathitalics + if mathitalics then + if trace_goodies then + report_goodies("loading mathitalics for font %a",properties.name) + end + local corrections = mathitalics.corrections + local defaultfactor = mathitalics.defaultfactor + local disableengine = mathitalics.disableengine + properties.hasitalics = true + properties.mathitalic_defaultfactor = defaultfactor -- we inherit outer one anyway (name will change) + if properties.mathitalics == nil then + properties.mathitalics = disableengine + end + if corrections then + -- As we want to set italic_correction (the context one) we need a + -- postprocessor instead of messing with the (unscaled) descriptions. + fontgoodies.registerpostprocessor(tfmdata, function(tfmdata) -- this is another tfmdata (a copy) + -- better make a helper so that we have less code being defined + local properties = tfmdata.properties + local parameters = tfmdata.parameters + local characters = tfmdata.characters + properties.hasitalics = true + properties.mathitalic_defaultfactor = defaultfactor + properties.mathitalic_defaultvalue = defaultfactor * parameters.quad + if properties.mathitalics == nil then + properties.mathitalics = disableengine + end + if trace_goodies then + report_goodies("assigning mathitalics for font %a",properties.name) + end + local mathitalics = properties.mathitalics + local quad = parameters.quad + local hfactor = parameters.hfactor + for k, v in next, corrections do + local c = characters[k] + if v > -1 and v < 1 then + v = v * quad + else + v = v * hfactor + end + c.italic_correction = v -- for context + if mathitalics then + c.italic = v -- for tex + else + c.italic = nil + end + end + end) + end + return -- maybe not as these can accumulate + end + end + end + end +end + +registerotffeature { + name = "mathitalics", + description = "additional math italic corrections", + -- default = true, + initializers = { + base = initialize, + node = initialize, + } +} + +-- fontgoodies.register("mathitalics", initialize) + +-- files + +local function initialize(goodies) + local files = goodies.files + if files then + fonts.names.register(files) + end +end + +fontgoodies.register("files", initialize) + +-- some day we will have a define command and then we can also do some +-- proper tracing +-- +-- fonts.typefaces["antykwapoltawskiego-condensed"] = { +-- shortcut = "rm", +-- shape = "serif", +-- fontname = "antykwapoltawskiego", +-- normalweight = "light", +-- boldweight = "medium", +-- width = "condensed", +-- size = "default", +-- features = "default", +-- } + +local function initialize(goodies) + local typefaces = goodies.typefaces + if typefaces then + local ft = fonts.typefaces + for k, v in next, typefaces do + ft[k] = v + end + end +end + +fontgoodies.register("typefaces", initialize) + +local compositions = { } + +function fontgoodies.getcompositions(tfmdata) + return compositions[file.nameonly(tfmdata.properties.filename or "")] +end + +local function initialize(goodies) + local gc = goodies.compositions + if gc then + for k, v in next, gc do + compositions[k] = v + end + end +end + +fontgoodies.register("compositions", initialize) + +-- extra treatments (on top of defaults): \loadfontgoodies[mytreatments] + +local treatmentdata = fonts.treatments.data + +local function initialize(goodies) + local treatments = goodies.treatments + if treatments then + for name, data in next, treatments do + treatmentdata[name] = data -- always wins + end + end +end + +fontgoodies.register("treatments", initialize) + +local filenames = fontgoodies.filenames or allocate() +fontgoodies.filenames = filenames + +local filedata = filenames.data or allocate() +filenames.data = filedata + +local function initialize(goodies) -- design sizes are registered global + local fn = goodies.filenames + if fn then + for usedname, alternativenames in next, fn do + filedata[usedname] = alternativenames + end + end +end + +fontgoodies.register("filenames", initialize) + +function fontgoodies.filenames.resolve(name) + local fd = filedata[name] + if fd and findfile(name) == "" then + for i=1,#fd do + local fn = fd[i] + if findfile(fn) ~= "" then + return fn + end + end + else + -- no lookup, just use the regular mechanism + end + return name +end + +local designsizes = fontgoodies.designsizes or allocate() +fontgoodies.designsizes = designsizes + +local designdata = designsizes.data or allocate() +designsizes.data = designdata + +local function initialize(goodies) -- design sizes are registered global + local gd = goodies.designsizes + if gd then + for name, data in next, gd do + local ranges = { } + for size, file in next, data do + if size ~= "default" then + ranges[#ranges+1] = { texsp(size), file } -- also lower(file) + end + end + table.sort(ranges,function(a,b) return a[1] < b[1] end) + designdata[lower(name)] = { -- overloads, doesn't merge! + default = data.default, + ranges = ranges, + } + end + end +end + +fontgoodies.register("designsizes", initialize) + +function fontgoodies.designsizes.register(name,size,specification) + local d = designdata[name] + if not d then + d = { + ranges = { }, + default = nil, -- so we have no default set + } + designdata[name] = d + end + if size == "default" then + d.default = specification + else + if type(size) == "string" then + size = texsp(size) + end + local ranges = d.ranges + ranges[#ranges+1] = { size, specification } + end +end + +function fontgoodies.designsizes.filename(name,spec,size) -- returns nil of no match + if spec and spec ~= "" then + local data = designdata[lower(name)] + if data then + if spec == "default" then + return data.default + elseif spec == "auto" then + local ranges = data.ranges + if ranges then + for i=1,#ranges do + local r = ranges[i] + if r[1] >= size then -- todo: rounding so maybe size - 100 + return r[2] + end + end + end + return data.default or (ranges and ranges[#ranges][2]) + end + end + end +end + +-- The following file (husayni.lfg) is the experimental setup that we used +-- for Idris font. For the moment we don't store this in the cache and quite +-- probably these files sit in one of the paths: +-- +-- tex/context/fonts/goodies +-- tex/fonts/goodies/context +-- tex/fonts/data/foundry/collection +-- +-- see lfg files in distribution + +-- interface + +commands.loadfontgoodies = fontgoodies.load +commands.enablefontcolorschemes = colorschemes.enable + +-- weird place ... depends on math + +local function finalize(tfmdata,feature,value) + mathematics.overloaddimensions(tfmdata,tfmdata,value) +end + +registerotffeature { + name = "mathdimensions", + description = "manipulate math dimensions", + -- default = true, + manipulators = { + base = finalize, + node = finalize, + } +} diff --git a/tex/context/base/font-hsh.lua b/tex/context/base/font-hsh.lua index f5c80d705..c11f9a721 100644 --- a/tex/context/base/font-hsh.lua +++ b/tex/context/base/font-hsh.lua @@ -1,226 +1,226 @@ -if not modules then modules = { } end modules ['font-hsh'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local setmetatableindex = table.setmetatableindex -local currentfont = font.current -local allocate = utilities.storage.allocate - -local fonts = fonts -local hashes = fonts.hashes or allocate() -fonts.hashes = hashes - --- todo: autoallocate ... just create on the fly .. use constructors.keys (problem: plurals) - -local identifiers = hashes.identifiers or allocate() -local characters = hashes.characters or allocate() -- chardata -local descriptions = hashes.descriptions or allocate() -local parameters = hashes.parameters or allocate() -local properties = hashes.properties or allocate() -local resources = hashes.resources or allocate() -local spacings = hashes.spacings or allocate() -local spaces = hashes.spaces or allocate() -local quads = hashes.quads or allocate() -- maybe also spacedata -local xheights = hashes.xheights or allocate() -local csnames = hashes.csnames or allocate() -- namedata -local marks = hashes.marks or allocate() -local italics = hashes.italics or allocate() -local lastmathids = hashes.lastmathids or allocate() -local dynamics = hashes.dynamics or allocate() - -hashes.characters = characters -hashes.descriptions = descriptions -hashes.parameters = parameters -hashes.properties = properties -hashes.resources = resources -hashes.spacings = spacings -hashes.spaces = spaces -hashes.quads = quads hashes.emwidths = quads -hashes.xheights = xheights hashes.exheights = xheights -hashes.csnames = csnames -hashes.marks = marks -hashes.italics = italics -hashes.lastmathids = lastmathids -hashes.dynamics = dynamics - -local nulldata = allocate { - name = "nullfont", - characters = { }, - descriptions = { }, - properties = { }, - parameters = { -- lmromanregular @ 12pt - slantperpoint = 0, - spacing = { - width = 256377, - stretch = 128188, - shrink = 85459, - extra = 85459, - }, - quad = 786432, - xheight = 338952, - -- compatibility: - slant = 0, -- 1 - space = 256377, -- 2 - space_stretch = 128188, -- 3 - space_shrink = 85459, -- 4 - x_height = 338952, -- 5 - quad = 786432, -- 6 - extra_space = 85459, -- 7 - }, -} - -fonts.nulldata = nulldata - -fonts.constructors.enhanceparameters(nulldata.parameters) -- official copies for us - -setmetatableindex(identifiers, function(t,k) - return k == true and identifiers[currentfont()] or nulldata -end) - -setmetatableindex(characters, function(t,k) - if k == true then - return characters[currentfont()] - else - local characters = identifiers[k].characters - t[k] = characters - return characters - end -end) - -setmetatableindex(descriptions, function(t,k) - if k == true then - return descriptions[currentfont()] - else - local descriptions = identifiers[k].descriptions - t[k] = descriptions - return descriptions - end -end) - -setmetatableindex(parameters, function(t,k) - if k == true then - return parameters[currentfont()] - else - local parameters = identifiers[k].parameters - t[k] = parameters - return parameters - end -end) - -setmetatableindex(properties, function(t,k) - if k == true then - return properties[currentfont()] - else - local properties = identifiers[k].properties - t[k] = properties - return properties - end -end) - -setmetatableindex(resources, function(t,k) - if k == true then - return resources[currentfont()] - else - local shared = identifiers[k].shared - local rawdata = shared and shared.rawdata - local resources = rawdata and rawdata.resources - t[k] = resources or false -- better than resolving each time - return resources - end -end) - -setmetatableindex(quads, function(t,k) - if k == true then - return quads[currentfont()] - else - local parameters = parameters[k] - local quad = parameters and parameters.quad or 0 - t[k] = quad - return quad - end -end) - -local nospacing = { - width = 0, - stretch = 0, - shrink = 0, - extra = 0, -} - -setmetatableindex(spacings, function(t,k) - if k == true then - return spacings[currentfont()] - else - local parameters = parameters[k] - local spacing = parameters and parameters.spacing or nospacing - t[k] = spacing - return spacing - end -end) - -setmetatableindex(spaces, function(t,k) - if k == true then - return spaces[currentfont()] - else - local space = spacings[k].width - t[k] = space - return space - end -end) - -setmetatableindex(marks, function(t,k) - if k == true then - return marks[currentfont()] - else - local resources = identifiers[k].resources or { } - local marks = resources.marks or { } - t[k] = marks - return marks - end -end) - -setmetatableindex(xheights, function(t,k) - if k == true then - return xheights[currentfont()] - else - local parameters = parameters[k] - local xheight = parameters and parameters.xheight or 0 - t[k] = xheight - return xheight - end -end) - -setmetatableindex(italics, function(t,k) -- is test ! - if k == true then - return italics[currentfont()] - else - local properties = identifiers[k].properties - local hasitalics = properties and properties.hasitalics - if hasitalics then - hasitalics = characters[k] -- convenient return - else - hasitalics = false - end - t[k] = hasitalics - return hasitalics - end -end) - -setmetatableindex(dynamics, function(t,k) - if k == true then - return dynamics[currentfont()] - else - local shared = identifiers[k].shared - local dynamics = shared and shared.dynamics or false - t[k] = dynamics - return dynamics - end -end) - -function font.getfont(id) - return identifiers[id] -end +if not modules then modules = { } end modules ['font-hsh'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local setmetatableindex = table.setmetatableindex +local currentfont = font.current +local allocate = utilities.storage.allocate + +local fonts = fonts +local hashes = fonts.hashes or allocate() +fonts.hashes = hashes + +-- todo: autoallocate ... just create on the fly .. use constructors.keys (problem: plurals) + +local identifiers = hashes.identifiers or allocate() +local characters = hashes.characters or allocate() -- chardata +local descriptions = hashes.descriptions or allocate() +local parameters = hashes.parameters or allocate() +local properties = hashes.properties or allocate() +local resources = hashes.resources or allocate() +local spacings = hashes.spacings or allocate() +local spaces = hashes.spaces or allocate() +local quads = hashes.quads or allocate() -- maybe also spacedata +local xheights = hashes.xheights or allocate() +local csnames = hashes.csnames or allocate() -- namedata +local marks = hashes.marks or allocate() +local italics = hashes.italics or allocate() +local lastmathids = hashes.lastmathids or allocate() +local dynamics = hashes.dynamics or allocate() + +hashes.characters = characters +hashes.descriptions = descriptions +hashes.parameters = parameters +hashes.properties = properties +hashes.resources = resources +hashes.spacings = spacings +hashes.spaces = spaces +hashes.quads = quads hashes.emwidths = quads +hashes.xheights = xheights hashes.exheights = xheights +hashes.csnames = csnames +hashes.marks = marks +hashes.italics = italics +hashes.lastmathids = lastmathids +hashes.dynamics = dynamics + +local nulldata = allocate { + name = "nullfont", + characters = { }, + descriptions = { }, + properties = { }, + parameters = { -- lmromanregular @ 12pt + slantperpoint = 0, + spacing = { + width = 256377, + stretch = 128188, + shrink = 85459, + extra = 85459, + }, + quad = 786432, + xheight = 338952, + -- compatibility: + slant = 0, -- 1 + space = 256377, -- 2 + space_stretch = 128188, -- 3 + space_shrink = 85459, -- 4 + x_height = 338952, -- 5 + quad = 786432, -- 6 + extra_space = 85459, -- 7 + }, +} + +fonts.nulldata = nulldata + +fonts.constructors.enhanceparameters(nulldata.parameters) -- official copies for us + +setmetatableindex(identifiers, function(t,k) + return k == true and identifiers[currentfont()] or nulldata +end) + +setmetatableindex(characters, function(t,k) + if k == true then + return characters[currentfont()] + else + local characters = identifiers[k].characters + t[k] = characters + return characters + end +end) + +setmetatableindex(descriptions, function(t,k) + if k == true then + return descriptions[currentfont()] + else + local descriptions = identifiers[k].descriptions + t[k] = descriptions + return descriptions + end +end) + +setmetatableindex(parameters, function(t,k) + if k == true then + return parameters[currentfont()] + else + local parameters = identifiers[k].parameters + t[k] = parameters + return parameters + end +end) + +setmetatableindex(properties, function(t,k) + if k == true then + return properties[currentfont()] + else + local properties = identifiers[k].properties + t[k] = properties + return properties + end +end) + +setmetatableindex(resources, function(t,k) + if k == true then + return resources[currentfont()] + else + local shared = identifiers[k].shared + local rawdata = shared and shared.rawdata + local resources = rawdata and rawdata.resources + t[k] = resources or false -- better than resolving each time + return resources + end +end) + +setmetatableindex(quads, function(t,k) + if k == true then + return quads[currentfont()] + else + local parameters = parameters[k] + local quad = parameters and parameters.quad or 0 + t[k] = quad + return quad + end +end) + +local nospacing = { + width = 0, + stretch = 0, + shrink = 0, + extra = 0, +} + +setmetatableindex(spacings, function(t,k) + if k == true then + return spacings[currentfont()] + else + local parameters = parameters[k] + local spacing = parameters and parameters.spacing or nospacing + t[k] = spacing + return spacing + end +end) + +setmetatableindex(spaces, function(t,k) + if k == true then + return spaces[currentfont()] + else + local space = spacings[k].width + t[k] = space + return space + end +end) + +setmetatableindex(marks, function(t,k) + if k == true then + return marks[currentfont()] + else + local resources = identifiers[k].resources or { } + local marks = resources.marks or { } + t[k] = marks + return marks + end +end) + +setmetatableindex(xheights, function(t,k) + if k == true then + return xheights[currentfont()] + else + local parameters = parameters[k] + local xheight = parameters and parameters.xheight or 0 + t[k] = xheight + return xheight + end +end) + +setmetatableindex(italics, function(t,k) -- is test ! + if k == true then + return italics[currentfont()] + else + local properties = identifiers[k].properties + local hasitalics = properties and properties.hasitalics + if hasitalics then + hasitalics = characters[k] -- convenient return + else + hasitalics = false + end + t[k] = hasitalics + return hasitalics + end +end) + +setmetatableindex(dynamics, function(t,k) + if k == true then + return dynamics[currentfont()] + else + local shared = identifiers[k].shared + local dynamics = shared and shared.dynamics or false + t[k] = dynamics + return dynamics + end +end) + +function font.getfont(id) + return identifiers[id] +end diff --git a/tex/context/base/font-ini.lua b/tex/context/base/font-ini.lua index 884b22474..e902eca03 100644 --- a/tex/context/base/font-ini.lua +++ b/tex/context/base/font-ini.lua @@ -1,32 +1,32 @@ -if not modules then modules = { } end modules ['font-ini'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

Not much is happening here.

---ldx]]-- - -local allocate = utilities.storage.allocate - -local report_defining = logs.reporter("fonts","defining") - -fonts = fonts or { } -local fonts = fonts - -fonts.hashes = { identifiers = allocate() } - -fonts.tables = fonts.tables or { } -fonts.helpers = fonts.helpers or { } -fonts.tracers = fonts.tracers or { } -- for the moment till we have move to moduledata -fonts.specifiers = fonts.specifiers or { } -- in format ! - -fonts.analyzers = { } -- not needed here -fonts.readers = { } -fonts.definers = { methods = { } } -fonts.loggers = { register = function() end } - -fontloader.totable = fontloader.to_table +if not modules then modules = { } end modules ['font-ini'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

Not much is happening here.

+--ldx]]-- + +local allocate = utilities.storage.allocate + +local report_defining = logs.reporter("fonts","defining") + +fonts = fonts or { } +local fonts = fonts + +fonts.hashes = { identifiers = allocate() } + +fonts.tables = fonts.tables or { } +fonts.helpers = fonts.helpers or { } +fonts.tracers = fonts.tracers or { } -- for the moment till we have move to moduledata +fonts.specifiers = fonts.specifiers or { } -- in format ! + +fonts.analyzers = { } -- not needed here +fonts.readers = { } +fonts.definers = { methods = { } } +fonts.loggers = { register = function() end } + +fontloader.totable = fontloader.to_table diff --git a/tex/context/base/font-ldr.lua b/tex/context/base/font-ldr.lua index 175b4d0cc..46cd396f8 100644 --- a/tex/context/base/font-ldr.lua +++ b/tex/context/base/font-ldr.lua @@ -1,70 +1,70 @@ -if not modules then modules = { } end modules ['font-ldr'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This module provides an experimental replacement for fontloader.to_table --- but is not used that much. - -local fields = fontloader.fields - -if fields then - - local glyphfields - - local function get_glyphs(r) - local t = { } - local g = r.glyphs - for i=1,r.glyphmax-1 do - local gi = g[i] - if gi then - if not glyphfields then - glyphfields = fields(gi) - end - local h = { } - for i=1,#glyphfields do - local s = glyphfields[i] - h[s] = gi[s] - end - t[i] = h - end - end - return t - end - - local function to_table(r) - local f = fields(r) - if f then - local t = { } - for i=1,#f do - local fi = f[i] - local ri = r[fi] - if not ri then - -- skip - elseif fi == "glyphs" then - t.glyphs = get_glyphs(r) - elseif fi == "subfonts" then - t[fi] = ri - ri.glyphs = get_glyphs(ri) - else - t[fi] = r[fi] - end - end - return t - end - end - - -- currently glyphs, subfont-glyphs and the main table are userdata - - function fonts.to_table(raw) - return to_table(raw) - end - -else - - fonts.to_table = fontloader.to_table - -end +if not modules then modules = { } end modules ['font-ldr'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This module provides an experimental replacement for fontloader.to_table +-- but is not used that much. + +local fields = fontloader.fields + +if fields then + + local glyphfields + + local function get_glyphs(r) + local t = { } + local g = r.glyphs + for i=1,r.glyphmax-1 do + local gi = g[i] + if gi then + if not glyphfields then + glyphfields = fields(gi) + end + local h = { } + for i=1,#glyphfields do + local s = glyphfields[i] + h[s] = gi[s] + end + t[i] = h + end + end + return t + end + + local function to_table(r) + local f = fields(r) + if f then + local t = { } + for i=1,#f do + local fi = f[i] + local ri = r[fi] + if not ri then + -- skip + elseif fi == "glyphs" then + t.glyphs = get_glyphs(r) + elseif fi == "subfonts" then + t[fi] = ri + ri.glyphs = get_glyphs(ri) + else + t[fi] = r[fi] + end + end + return t + end + end + + -- currently glyphs, subfont-glyphs and the main table are userdata + + function fonts.to_table(raw) + return to_table(raw) + end + +else + + fonts.to_table = fontloader.to_table + +end diff --git a/tex/context/base/font-log.lua b/tex/context/base/font-log.lua index 41da75378..3a2a1c5de 100644 --- a/tex/context/base/font-log.lua +++ b/tex/context/base/font-log.lua @@ -1,86 +1,86 @@ -if not modules then modules = { } end modules ['font-log'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local next, format, lower, concat = next, string.format, string.lower, table.concat - -local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) -local report_defining = logs.reporter("fonts","defining") - -local basename = file.basename - -local fonts = fonts -local loggers = { } -fonts.loggers = loggers -local usedfonts = utilities.storage.allocate() ------ loadedfonts = utilities.storage.allocate() - ---[[ldx-- -

The following functions are used for reporting about the fonts -used. The message itself is not that useful in regular runs but since -we now have several readers it may be handy to know what reader is -used for which font.

---ldx]]-- - -function loggers.onetimemessage(font,char,message,reporter) - local tfmdata = fonts.hashes.identifiers[font] - local shared = tfmdata.shared - local messages = shared.messages - if not messages then - messages = { } - shared.messages = messages - end - local category = messages[message] - if not category then - category = { } - messages[message] = category - end - if not category[char] then - if not reporter then - reporter = report_defining - end - reporter("char %U in font %a with id %s: %s",char,tfmdata.properties.fullname,font,message) - category[char] = true - end -end - -function loggers.register(tfmdata,source,specification) -- save file name in spec here ! ! ! ! ! ! - if tfmdata and specification and specification.specification then - local name = lower(specification.name) - if trace_defining and not usedfonts[name] then - report_defining("registering %a as %a, used %a",file.basename(specification.name),source,file.basename(specification.filename)) - end - specification.source = source - -- loadedfonts[lower(specification.specification)] = specification - usedfonts[lower(specification.filename or specification.name)] = source - end -end - -function loggers.format(name) -- should be avoided - return usedfonts[name] or "unknown" -end - -statistics.register("loaded fonts", function() - if next(usedfonts) then - local t, n = { }, 0 - local treatmentdata = fonts.treatments.data - for name, used in table.sortedhash(usedfonts) do - n = n + 1 - local base = basename(name) - if complete then - t[n] = format("%s -> %s",used,base) - else - t[n] = base - end - local treatment = treatmentdata[base] - if treatment and treatment.comment then - t[n] = format("%s (%s)",t[n],treatment.comment) - end - end - return n > 0 and format("%s files: %s",n,concat(t,", ")) or "none" - end -end) +if not modules then modules = { } end modules ['font-log'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local next, format, lower, concat = next, string.format, string.lower, table.concat + +local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) +local report_defining = logs.reporter("fonts","defining") + +local basename = file.basename + +local fonts = fonts +local loggers = { } +fonts.loggers = loggers +local usedfonts = utilities.storage.allocate() +----- loadedfonts = utilities.storage.allocate() + +--[[ldx-- +

The following functions are used for reporting about the fonts +used. The message itself is not that useful in regular runs but since +we now have several readers it may be handy to know what reader is +used for which font.

+--ldx]]-- + +function loggers.onetimemessage(font,char,message,reporter) + local tfmdata = fonts.hashes.identifiers[font] + local shared = tfmdata.shared + local messages = shared.messages + if not messages then + messages = { } + shared.messages = messages + end + local category = messages[message] + if not category then + category = { } + messages[message] = category + end + if not category[char] then + if not reporter then + reporter = report_defining + end + reporter("char %U in font %a with id %s: %s",char,tfmdata.properties.fullname,font,message) + category[char] = true + end +end + +function loggers.register(tfmdata,source,specification) -- save file name in spec here ! ! ! ! ! ! + if tfmdata and specification and specification.specification then + local name = lower(specification.name) + if trace_defining and not usedfonts[name] then + report_defining("registering %a as %a, used %a",file.basename(specification.name),source,file.basename(specification.filename)) + end + specification.source = source + -- loadedfonts[lower(specification.specification)] = specification + usedfonts[lower(specification.filename or specification.name)] = source + end +end + +function loggers.format(name) -- should be avoided + return usedfonts[name] or "unknown" +end + +statistics.register("loaded fonts", function() + if next(usedfonts) then + local t, n = { }, 0 + local treatmentdata = fonts.treatments.data + for name, used in table.sortedhash(usedfonts) do + n = n + 1 + local base = basename(name) + if complete then + t[n] = format("%s -> %s",used,base) + else + t[n] = base + end + local treatment = treatmentdata[base] + if treatment and treatment.comment then + t[n] = format("%s (%s)",t[n],treatment.comment) + end + end + return n > 0 and format("%s files: %s",n,concat(t,", ")) or "none" + end +end) diff --git a/tex/context/base/font-lua.lua b/tex/context/base/font-lua.lua index 6fbbcf17e..27b40e5b8 100644 --- a/tex/context/base/font-lua.lua +++ b/tex/context/base/font-lua.lua @@ -1,46 +1,46 @@ -if not modules then modules = { } end modules ['font-lua'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) - -local report_lua = logs.reporter("fonts","lua loading") - -local fonts = fonts -local readers = fonts.readers -fonts.formats.lua = "lua" - --- we could add support for features here - -local function check_lua(specification,fullname) - -- standard tex file lookup - local fullname = resolvers.findfile(fullname) or "" - if fullname ~= "" then - local loader = loadfile(fullname) - loader = loader and loader() - return loader and loader(specification) - end -end - -readers.check_lua = check_lua - -function readers.lua(specification) - local original = specification.specification - if trace_defining then - report_lua("using lua reader for %a",original) - end - local fullname = specification.filename or "" - if fullname == "" then - local forced = specification.forced or "" - if forced ~= "" then - fullname = specification.name .. "." .. forced - else - fullname = specification.name - end - end - return check_lua(specification,fullname) -end +if not modules then modules = { } end modules ['font-lua'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) + +local report_lua = logs.reporter("fonts","lua loading") + +local fonts = fonts +local readers = fonts.readers +fonts.formats.lua = "lua" + +-- we could add support for features here + +local function check_lua(specification,fullname) + -- standard tex file lookup + local fullname = resolvers.findfile(fullname) or "" + if fullname ~= "" then + local loader = loadfile(fullname) + loader = loader and loader() + return loader and loader(specification) + end +end + +readers.check_lua = check_lua + +function readers.lua(specification) + local original = specification.specification + if trace_defining then + report_lua("using lua reader for %a",original) + end + local fullname = specification.filename or "" + if fullname == "" then + local forced = specification.forced or "" + if forced ~= "" then + fullname = specification.name .. "." .. forced + else + fullname = specification.name + end + end + return check_lua(specification,fullname) +end diff --git a/tex/context/base/font-map.lua b/tex/context/base/font-map.lua index 6988b9b9e..864b43c24 100644 --- a/tex/context/base/font-map.lua +++ b/tex/context/base/font-map.lua @@ -1,329 +1,329 @@ -if not modules then modules = { } end modules ['font-map'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local tonumber = tonumber - -local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower -local P, R, S, C, Ct, Cc, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.match -local utfbyte = utf.byte -local floor = math.floor - -local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end) -local trace_mapping = false trackers.register("fonts.mapping", function(v) trace_unimapping = v end) - -local report_fonts = logs.reporter("fonts","loading") -- not otf only - -local fonts = fonts or { } -local mappings = fonts.mappings or { } -fonts.mappings = mappings - ---[[ldx-- -

Eventually this code will disappear because map files are kind -of obsolete. Some code may move to runtime or auxiliary modules.

-

The name to unciode related code will stay of course.

---ldx]]-- - -local function loadlumtable(filename) -- will move to font goodies - local lumname = file.replacesuffix(file.basename(filename),"lum") - local lumfile = resolvers.findfile(lumname,"map") or "" - if lumfile ~= "" and lfs.isfile(lumfile) then - if trace_loading or trace_mapping then - report_fonts("loading map table %a",lumfile) - end - lumunic = dofile(lumfile) - return lumunic, lumfile - end -end - -local hex = R("AF","09") -local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end -local hexsix = (hex*hex*hex*hex*hex*hex) / function(s) return tonumber(s,16) end -local dec = (R("09")^1) / tonumber -local period = P(".") -local unicode = P("uni") * (hexfour * (period + P(-1)) * Cc(false) + Ct(hexfour^1) * Cc(true)) -local ucode = P("u") * (hexsix * (period + P(-1)) * Cc(false) + Ct(hexsix ^1) * Cc(true)) -local index = P("index") * dec * Cc(false) - -local parser = unicode + ucode + index - -local parsers = { } - -local function makenameparser(str) - if not str or str == "" then - return parser - else - local p = parsers[str] - if not p then - p = P(str) * period * dec * Cc(false) - parsers[str] = p - end - return p - end -end - --- local parser = makenameparser("Japan1") --- local parser = makenameparser() --- local function test(str) --- local b, a = lpegmatch(parser,str) --- print((a and table.serialize(b)) or b) --- end --- test("a.sc") --- test("a") --- test("uni1234") --- test("uni1234.xx") --- test("uni12349876") --- test("u123400987600") --- test("index1234") --- test("Japan1.123") - -local function tounicode16(unicode,name) - if unicode < 0x10000 then - return format("%04X",unicode) - elseif unicode < 0x1FFFFFFFFF then - return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00) - else - report_fonts("can't convert %a in %a into tounicode",unicode,name) - end -end - -local function tounicode16sequence(unicodes,name) - local t = { } - for l=1,#unicodes do - local unicode = unicodes[l] - if unicode < 0x10000 then - t[l] = format("%04X",unicode) - elseif unicode < 0x1FFFFFFFFF then - t[l] = format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00) - else - report_fonts ("can't convert %a in %a into tounicode",unicode,name) - end - end - return concat(t) -end - -local function fromunicode16(str) - if #str == 4 then - return tonumber(str,16) - else - local l, r = match(str,"(....)(....)") - return (tonumber(l,16))*0x400 + tonumber(r,16) - 0xDC00 - end -end - --- Slightly slower: --- --- local p = C(4) * (C(4)^-1) / function(l,r) --- if r then --- return (tonumber(l,16))*0x400 + tonumber(r,16) - 0xDC00 --- else --- return tonumber(l,16) --- end --- end --- --- local function fromunicode16(str) --- return lpegmatch(p,str) --- end - --- This is quite a bit faster but at the cost of some memory but if we --- do this we will also use it elsewhere so let's not follow this route --- now. I might use this method in the plain variant (no caching there) --- but then I need a flag that distinguishes between code branches. --- --- local cache = { } --- --- function mappings.tounicode16(unicode) --- local s = cache[unicode] --- if not s then --- if unicode < 0x10000 then --- s = format("%04X",unicode) --- else --- s = format("%04X%04X",unicode/0x400+0xD800,unicode%0x400+0xDC00) --- end --- cache[unicode] = s --- end --- return s --- end - -mappings.loadlumtable = loadlumtable -mappings.makenameparser = makenameparser -mappings.tounicode16 = tounicode16 -mappings.tounicode16sequence = tounicode16sequence -mappings.fromunicode16 = fromunicode16 - -local separator = S("_.") -local other = C((1 - separator)^1) -local ligsplitter = Ct(other * (separator * other)^0) - ---~ print(table.serialize(lpegmatch(ligsplitter,"this"))) ---~ print(table.serialize(lpegmatch(ligsplitter,"this.that"))) ---~ print(table.serialize(lpegmatch(ligsplitter,"japan1.123"))) ---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more"))) ---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that"))) - -function mappings.addtounicode(data,filename) - local resources = data.resources - local properties = data.properties - local descriptions = data.descriptions - local unicodes = resources.unicodes - if not unicodes then - return - end - -- we need to move this code - unicodes['space'] = unicodes['space'] or 32 - unicodes['hyphen'] = unicodes['hyphen'] or 45 - unicodes['zwj'] = unicodes['zwj'] or 0x200D - unicodes['zwnj'] = unicodes['zwnj'] or 0x200C - -- the tounicode mapping is sparse and only needed for alternatives - local private = fonts.constructors.privateoffset - local unknown = format("%04X",utfbyte("?")) - local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context - local tounicode = { } - local originals = { } - resources.tounicode = tounicode - resources.originals = originals - local lumunic, uparser, oparser - local cidinfo, cidnames, cidcodes, usedmap - if false then -- will become an option - lumunic = loadlumtable(filename) - lumunic = lumunic and lumunic.tounicode - end - -- - cidinfo = properties.cidinfo - usedmap = cidinfo and fonts.cid.getmap(cidinfo) - -- - if usedmap then - oparser = usedmap and makenameparser(cidinfo.ordering) - cidnames = usedmap.names - cidcodes = usedmap.unicodes - end - uparser = makenameparser() - local ns, nl = 0, 0 - for unic, glyph in next, descriptions do - local index = glyph.index - local name = glyph.name - if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then - local unicode = lumunic and lumunic[name] or unicodevector[name] - if unicode then - originals[index] = unicode - tounicode[index] = tounicode16(unicode,name) - ns = ns + 1 - end - -- cidmap heuristics, beware, there is no guarantee for a match unless - -- the chain resolves - if (not unicode) and usedmap then - local foundindex = lpegmatch(oparser,name) - if foundindex then - unicode = cidcodes[foundindex] -- name to number - if unicode then - originals[index] = unicode - tounicode[index] = tounicode16(unicode,name) - ns = ns + 1 - else - local reference = cidnames[foundindex] -- number to name - if reference then - local foundindex = lpegmatch(oparser,reference) - if foundindex then - unicode = cidcodes[foundindex] - if unicode then - originals[index] = unicode - tounicode[index] = tounicode16(unicode,name) - ns = ns + 1 - end - end - if not unicode or unicode == "" then - local foundcodes, multiple = lpegmatch(uparser,reference) - if foundcodes then - originals[index] = foundcodes - if multiple then - tounicode[index] = tounicode16sequence(foundcodes) - nl = nl + 1 - unicode = true - else - tounicode[index] = tounicode16(foundcodes,name) - ns = ns + 1 - unicode = foundcodes - end - end - end - end - end - end - end - -- a.whatever or a_b_c.whatever or a_b_c (no numbers) - if not unicode or unicode == "" then - local split = lpegmatch(ligsplitter,name) - local nplit = split and #split or 0 - if nplit >= 2 then - local t, n = { }, 0 - for l=1,nplit do - local base = split[l] - local u = unicodes[base] or unicodevector[base] - if not u then - break - elseif type(u) == "table" then - n = n + 1 - t[n] = u[1] - else - n = n + 1 - t[n] = u - end - end - if n == 0 then -- done then - -- nothing - elseif n == 1 then - originals[index] = t[1] - tounicode[index] = tounicode16(t[1],name) - else - originals[index] = t - tounicode[index] = tounicode16sequence(t) - end - nl = nl + 1 - unicode = true - else - -- skip: already checked and we don't want privates here - end - end - -- last resort (we might need to catch private here as well) - if not unicode or unicode == "" then - local foundcodes, multiple = lpegmatch(uparser,name) - if foundcodes then - if multiple then - originals[index] = foundcodes - tounicode[index] = tounicode16sequence(foundcodes,name) - nl = nl + 1 - unicode = true - else - originals[index] = foundcodes - tounicode[index] = tounicode16(foundcodes,name) - ns = ns + 1 - unicode = foundcodes - end - end - end - -- if not unicode then - -- originals[index] = 0xFFFD - -- tounicode[index] = "FFFD" - -- end - end - end - if trace_mapping then - for unic, glyph in table.sortedhash(descriptions) do - local name = glyph.name - local index = glyph.index - local toun = tounicode[index] - if toun then - report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun) - else - report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) - end - end - end - if trace_loading and (ns > 0 or nl > 0) then - report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns) - end -end +if not modules then modules = { } end modules ['font-map'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local tonumber = tonumber + +local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower +local P, R, S, C, Ct, Cc, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.match +local utfbyte = utf.byte +local floor = math.floor + +local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end) +local trace_mapping = false trackers.register("fonts.mapping", function(v) trace_unimapping = v end) + +local report_fonts = logs.reporter("fonts","loading") -- not otf only + +local fonts = fonts or { } +local mappings = fonts.mappings or { } +fonts.mappings = mappings + +--[[ldx-- +

Eventually this code will disappear because map files are kind +of obsolete. Some code may move to runtime or auxiliary modules.

+

The name to unciode related code will stay of course.

+--ldx]]-- + +local function loadlumtable(filename) -- will move to font goodies + local lumname = file.replacesuffix(file.basename(filename),"lum") + local lumfile = resolvers.findfile(lumname,"map") or "" + if lumfile ~= "" and lfs.isfile(lumfile) then + if trace_loading or trace_mapping then + report_fonts("loading map table %a",lumfile) + end + lumunic = dofile(lumfile) + return lumunic, lumfile + end +end + +local hex = R("AF","09") +local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end +local hexsix = (hex*hex*hex*hex*hex*hex) / function(s) return tonumber(s,16) end +local dec = (R("09")^1) / tonumber +local period = P(".") +local unicode = P("uni") * (hexfour * (period + P(-1)) * Cc(false) + Ct(hexfour^1) * Cc(true)) +local ucode = P("u") * (hexsix * (period + P(-1)) * Cc(false) + Ct(hexsix ^1) * Cc(true)) +local index = P("index") * dec * Cc(false) + +local parser = unicode + ucode + index + +local parsers = { } + +local function makenameparser(str) + if not str or str == "" then + return parser + else + local p = parsers[str] + if not p then + p = P(str) * period * dec * Cc(false) + parsers[str] = p + end + return p + end +end + +-- local parser = makenameparser("Japan1") +-- local parser = makenameparser() +-- local function test(str) +-- local b, a = lpegmatch(parser,str) +-- print((a and table.serialize(b)) or b) +-- end +-- test("a.sc") +-- test("a") +-- test("uni1234") +-- test("uni1234.xx") +-- test("uni12349876") +-- test("u123400987600") +-- test("index1234") +-- test("Japan1.123") + +local function tounicode16(unicode,name) + if unicode < 0x10000 then + return format("%04X",unicode) + elseif unicode < 0x1FFFFFFFFF then + return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts("can't convert %a in %a into tounicode",unicode,name) + end +end + +local function tounicode16sequence(unicodes,name) + local t = { } + for l=1,#unicodes do + local unicode = unicodes[l] + if unicode < 0x10000 then + t[l] = format("%04X",unicode) + elseif unicode < 0x1FFFFFFFFF then + t[l] = format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts ("can't convert %a in %a into tounicode",unicode,name) + end + end + return concat(t) +end + +local function fromunicode16(str) + if #str == 4 then + return tonumber(str,16) + else + local l, r = match(str,"(....)(....)") + return (tonumber(l,16))*0x400 + tonumber(r,16) - 0xDC00 + end +end + +-- Slightly slower: +-- +-- local p = C(4) * (C(4)^-1) / function(l,r) +-- if r then +-- return (tonumber(l,16))*0x400 + tonumber(r,16) - 0xDC00 +-- else +-- return tonumber(l,16) +-- end +-- end +-- +-- local function fromunicode16(str) +-- return lpegmatch(p,str) +-- end + +-- This is quite a bit faster but at the cost of some memory but if we +-- do this we will also use it elsewhere so let's not follow this route +-- now. I might use this method in the plain variant (no caching there) +-- but then I need a flag that distinguishes between code branches. +-- +-- local cache = { } +-- +-- function mappings.tounicode16(unicode) +-- local s = cache[unicode] +-- if not s then +-- if unicode < 0x10000 then +-- s = format("%04X",unicode) +-- else +-- s = format("%04X%04X",unicode/0x400+0xD800,unicode%0x400+0xDC00) +-- end +-- cache[unicode] = s +-- end +-- return s +-- end + +mappings.loadlumtable = loadlumtable +mappings.makenameparser = makenameparser +mappings.tounicode16 = tounicode16 +mappings.tounicode16sequence = tounicode16sequence +mappings.fromunicode16 = fromunicode16 + +local separator = S("_.") +local other = C((1 - separator)^1) +local ligsplitter = Ct(other * (separator * other)^0) + +--~ print(table.serialize(lpegmatch(ligsplitter,"this"))) +--~ print(table.serialize(lpegmatch(ligsplitter,"this.that"))) +--~ print(table.serialize(lpegmatch(ligsplitter,"japan1.123"))) +--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more"))) +--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that"))) + +function mappings.addtounicode(data,filename) + local resources = data.resources + local properties = data.properties + local descriptions = data.descriptions + local unicodes = resources.unicodes + if not unicodes then + return + end + -- we need to move this code + unicodes['space'] = unicodes['space'] or 32 + unicodes['hyphen'] = unicodes['hyphen'] or 45 + unicodes['zwj'] = unicodes['zwj'] or 0x200D + unicodes['zwnj'] = unicodes['zwnj'] or 0x200C + -- the tounicode mapping is sparse and only needed for alternatives + local private = fonts.constructors.privateoffset + local unknown = format("%04X",utfbyte("?")) + local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context + local tounicode = { } + local originals = { } + resources.tounicode = tounicode + resources.originals = originals + local lumunic, uparser, oparser + local cidinfo, cidnames, cidcodes, usedmap + if false then -- will become an option + lumunic = loadlumtable(filename) + lumunic = lumunic and lumunic.tounicode + end + -- + cidinfo = properties.cidinfo + usedmap = cidinfo and fonts.cid.getmap(cidinfo) + -- + if usedmap then + oparser = usedmap and makenameparser(cidinfo.ordering) + cidnames = usedmap.names + cidcodes = usedmap.unicodes + end + uparser = makenameparser() + local ns, nl = 0, 0 + for unic, glyph in next, descriptions do + local index = glyph.index + local name = glyph.name + if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then + local unicode = lumunic and lumunic[name] or unicodevector[name] + if unicode then + originals[index] = unicode + tounicode[index] = tounicode16(unicode,name) + ns = ns + 1 + end + -- cidmap heuristics, beware, there is no guarantee for a match unless + -- the chain resolves + if (not unicode) and usedmap then + local foundindex = lpegmatch(oparser,name) + if foundindex then + unicode = cidcodes[foundindex] -- name to number + if unicode then + originals[index] = unicode + tounicode[index] = tounicode16(unicode,name) + ns = ns + 1 + else + local reference = cidnames[foundindex] -- number to name + if reference then + local foundindex = lpegmatch(oparser,reference) + if foundindex then + unicode = cidcodes[foundindex] + if unicode then + originals[index] = unicode + tounicode[index] = tounicode16(unicode,name) + ns = ns + 1 + end + end + if not unicode or unicode == "" then + local foundcodes, multiple = lpegmatch(uparser,reference) + if foundcodes then + originals[index] = foundcodes + if multiple then + tounicode[index] = tounicode16sequence(foundcodes) + nl = nl + 1 + unicode = true + else + tounicode[index] = tounicode16(foundcodes,name) + ns = ns + 1 + unicode = foundcodes + end + end + end + end + end + end + end + -- a.whatever or a_b_c.whatever or a_b_c (no numbers) + if not unicode or unicode == "" then + local split = lpegmatch(ligsplitter,name) + local nplit = split and #split or 0 + if nplit >= 2 then + local t, n = { }, 0 + for l=1,nplit do + local base = split[l] + local u = unicodes[base] or unicodevector[base] + if not u then + break + elseif type(u) == "table" then + n = n + 1 + t[n] = u[1] + else + n = n + 1 + t[n] = u + end + end + if n == 0 then -- done then + -- nothing + elseif n == 1 then + originals[index] = t[1] + tounicode[index] = tounicode16(t[1],name) + else + originals[index] = t + tounicode[index] = tounicode16sequence(t) + end + nl = nl + 1 + unicode = true + else + -- skip: already checked and we don't want privates here + end + end + -- last resort (we might need to catch private here as well) + if not unicode or unicode == "" then + local foundcodes, multiple = lpegmatch(uparser,name) + if foundcodes then + if multiple then + originals[index] = foundcodes + tounicode[index] = tounicode16sequence(foundcodes,name) + nl = nl + 1 + unicode = true + else + originals[index] = foundcodes + tounicode[index] = tounicode16(foundcodes,name) + ns = ns + 1 + unicode = foundcodes + end + end + end + -- if not unicode then + -- originals[index] = 0xFFFD + -- tounicode[index] = "FFFD" + -- end + end + end + if trace_mapping then + for unic, glyph in table.sortedhash(descriptions) do + local name = glyph.name + local index = glyph.index + local toun = tounicode[index] + if toun then + report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun) + else + report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) + end + end + end + if trace_loading and (ns > 0 or nl > 0) then + report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns) + end +end diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua index 83df65341..1915f7a82 100644 --- a/tex/context/base/font-mis.lua +++ b/tex/context/base/font-mis.lua @@ -1,111 +1,111 @@ -if not modules then modules = { } end modules ['font-mis'] = { - version = 1.001, - comment = "companion to mtx-fonts", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local next = next -local lower, strip = string.lower, string.strip - --- also used in other scripts so we need to check some tables: - -fonts = fonts or { } - -fonts.helpers = fonts.helpers or { } -local helpers = fonts.helpers - -fonts.handlers = fonts.handlers or { } -local handlers = fonts.handlers - -handlers.otf = handlers.otf or { } -local otf = handlers.otf - -otf.version = otf.version or 2.743 -otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true) - -function otf.loadcached(filename,format,sub) - -- no recache when version mismatch - local name = file.basename(file.removesuffix(filename)) - if sub == "" then sub = false end - local hash = name - if sub then - hash = hash .. "-" .. sub - end - hash = containers.cleanname(hash) - local data = containers.read(otf.cache, hash) - if data and not data.verbose then - otf.enhancers.unpack(data) - return data - else - return nil - end -end - -local featuregroups = { "gsub", "gpos" } - -function fonts.helpers.getfeatures(name,t,script,language) -- maybe per font type - local t = lower(t or (name and file.suffix(name)) or "") - if t == "otf" or t == "ttf" or t == "ttc" or t == "dfont" then - local filename = resolvers.findfile(name,t) or "" - if filename ~= "" then - local data = otf.loadcached(filename) - if data and data.resources and data.resources.features then - return data.resources.features - else - local ff = fontloader.open(filename) - if ff then - local data = fontloader.to_table(ff) - fontloader.close(ff) - local features = { } - for k=1,#featuregroups do - local what = featuregroups[k] - local dw = data[what] - if dw then - local f = { } - features[what] = f - for i=1,#dw do - local d = dw[i] - local dfeatures = d.features - if dfeatures then - for i=1,#dfeatures do - local df = dfeatures[i] - local tag = strip(lower(df.tag)) - local ft = f[tag] if not ft then ft = {} f[tag] = ft end - local dfscripts = df.scripts - for i=1,#dfscripts do - local ds = dfscripts[i] - local scri = strip(lower(ds.script)) - local fts = ft[scri] if not fts then fts = {} ft[scri] = fts end - local dslangs = ds.langs - for i=1,#dslangs do - local lang = dslangs[i] - lang = strip(lower(lang)) - if scri == script then - if lang == language then - fts[lang] = 'sl' - else - fts[lang] = 's' - end - else - if lang == language then - fts[lang] = 'l' - else - fts[lang] = true - end - end - end - end - end - end - end - end - end - return features - end - end - end - end - return nil, nil -end +if not modules then modules = { } end modules ['font-mis'] = { + version = 1.001, + comment = "companion to mtx-fonts", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local next = next +local lower, strip = string.lower, string.strip + +-- also used in other scripts so we need to check some tables: + +fonts = fonts or { } + +fonts.helpers = fonts.helpers or { } +local helpers = fonts.helpers + +fonts.handlers = fonts.handlers or { } +local handlers = fonts.handlers + +handlers.otf = handlers.otf or { } +local otf = handlers.otf + +otf.version = otf.version or 2.743 +otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true) + +function otf.loadcached(filename,format,sub) + -- no recache when version mismatch + local name = file.basename(file.removesuffix(filename)) + if sub == "" then sub = false end + local hash = name + if sub then + hash = hash .. "-" .. sub + end + hash = containers.cleanname(hash) + local data = containers.read(otf.cache, hash) + if data and not data.verbose then + otf.enhancers.unpack(data) + return data + else + return nil + end +end + +local featuregroups = { "gsub", "gpos" } + +function fonts.helpers.getfeatures(name,t,script,language) -- maybe per font type + local t = lower(t or (name and file.suffix(name)) or "") + if t == "otf" or t == "ttf" or t == "ttc" or t == "dfont" then + local filename = resolvers.findfile(name,t) or "" + if filename ~= "" then + local data = otf.loadcached(filename) + if data and data.resources and data.resources.features then + return data.resources.features + else + local ff = fontloader.open(filename) + if ff then + local data = fontloader.to_table(ff) + fontloader.close(ff) + local features = { } + for k=1,#featuregroups do + local what = featuregroups[k] + local dw = data[what] + if dw then + local f = { } + features[what] = f + for i=1,#dw do + local d = dw[i] + local dfeatures = d.features + if dfeatures then + for i=1,#dfeatures do + local df = dfeatures[i] + local tag = strip(lower(df.tag)) + local ft = f[tag] if not ft then ft = {} f[tag] = ft end + local dfscripts = df.scripts + for i=1,#dfscripts do + local ds = dfscripts[i] + local scri = strip(lower(ds.script)) + local fts = ft[scri] if not fts then fts = {} ft[scri] = fts end + local dslangs = ds.langs + for i=1,#dslangs do + local lang = dslangs[i] + lang = strip(lower(lang)) + if scri == script then + if lang == language then + fts[lang] = 'sl' + else + fts[lang] = 's' + end + else + if lang == language then + fts[lang] = 'l' + else + fts[lang] = true + end + end + end + end + end + end + end + end + end + return features + end + end + end + end + return nil, nil +end diff --git a/tex/context/base/font-nod.lua b/tex/context/base/font-nod.lua index f99130279..7c93e294c 100644 --- a/tex/context/base/font-nod.lua +++ b/tex/context/base/font-nod.lua @@ -1,434 +1,434 @@ -if not modules then modules = { } end modules ['font-nod'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

This is rather experimental. We need more control and some of this -might become a runtime module instead. This module will be cleaned up!

---ldx]]-- - -local tonumber, tostring = tonumber, tostring -local utfchar = utf.char -local concat = table.concat -local match, gmatch, concat, rep = string.match, string.gmatch, table.concat, string.rep - -local report_nodes = logs.reporter("fonts","tracing") - -fonts = fonts or { } -nodes = nodes or { } - -local fonts, nodes, node, context = fonts, nodes, node, context - -local tracers = nodes.tracers or { } -nodes.tracers = tracers - -local tasks = nodes.tasks or { } -nodes.tasks = tasks - -local handlers = nodes.handlers or { } -nodes.handlers = handlers - -local injections = nodes.injections or { } -nodes.injections = injections - -local char_tracers = tracers.characters or { } -tracers.characters = char_tracers - -local step_tracers = tracers.steppers or { } -tracers.steppers = step_tracers - -local copy_node_list = node.copy_list -local hpack_node_list = node.hpack -local free_node_list = node.flush_list -local traverse_nodes = node.traverse - -local nodecodes = nodes.nodecodes -local whatcodes = nodes.whatcodes - -local glyph_code = nodecodes.glyph -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local disc_code = nodecodes.disc -local glue_code = nodecodes.glue -local kern_code = nodecodes.kern -local rule_code = nodecodes.rule -local whatsit_code = nodecodes.whatsit -local spec_code = nodecodes.glue_spec - -local localpar_code = whatcodes.localpar -local dir_code = whatcodes.dir - -local nodepool = nodes.pool -local new_glyph = nodepool.glyph - -local formatters = string.formatters -local formatter = string.formatter - -local hashes = fonts.hashes - -local fontidentifiers = hashes.identifiers -local fontdescriptions = hashes.descriptions -local fontcharacters = hashes.characters -local fontproperties = hashes.properties -local fontparameters = hashes.parameters - -function char_tracers.collect(head,list,tag,n) - n = n or 0 - local ok, fn = false, nil - while head do - local id = head.id - if id == glyph_code then - local f = head.font - if f ~= fn then - ok, fn = false, f - end - local c = head.char - local i = fontidentifiers[f].indices[c] or 0 - if not ok then - ok = true - n = n + 1 - list[n] = list[n] or { } - list[n][tag] = { } - end - local l = list[n][tag] - l[#l+1] = { c, f, i } - elseif id == disc_code then - -- skip - else - ok = false - end - head = head.next - end -end - -function char_tracers.equal(ta, tb) - if #ta ~= #tb then - return false - else - for i=1,#ta do - local a, b = ta[i], tb[i] - if a[1] ~= b[1] or a[2] ~= b[2] or a[3] ~= b[3] then - return false - end - end - end - return true -end - -function char_tracers.string(t) - local tt = { } - for i=1,#t do - tt[i] = utfchar(t[i][1]) - end - return concat(tt,"") -end - -local f_unicode = formatters["%U"] - -function char_tracers.unicodes(t,decimal) - local tt = { } - for i=1,#t do - local n = t[i][1] - if n == 0 then - tt[i] = "-" - elseif decimal then - tt[i] = n - else - tt[i] = f_unicode(n) - end - end - return concat(tt," ") -end - -function char_tracers.indices(t,decimal) - local tt = { } - for i=1,#t do - local n = t[i][3] - if n == 0 then - tt[i] = "-" - elseif decimal then - tt[i] = n - else - tt[i] = f_unicode(n) - end - end - return concat(tt," ") -end - -function char_tracers.start() - local npc = handlers.characters - local list = { } - function handlers.characters(head) - local n = #list - char_tracers.collect(head,list,'before',n) - local h, d = npc(head) - char_tracers.collect(head,list,'after',n) - if #list > n then - list[#list+1] = { } - end - return h, d - end - function char_tracers.stop() - tracers.list['characters'] = list - local variables = { - ['title'] = 'ConTeXt Character Processing Information', - ['color-background-one'] = lmx.get('color-background-yellow'), - ['color-background-two'] = lmx.get('color-background-purple'), - } - lmx.show('context-characters.lmx',variables) - handlers.characters = npc - tasks.restart("processors", "characters") - end - tasks.restart("processors", "characters") -end - -local stack = { } - -function tracers.start(tag) - stack[#stack+1] = tag - local tracer = tracers[tag] - if tracer and tracer.start then - tracer.start() - end -end -function tracers.stop() - local tracer = stack[#stack] - if tracer and tracer.stop then - tracer.stop() - end - stack[#stack] = nil -end - --- experimental - -local collection, collecting, messages = { }, false, { } - -function step_tracers.start() - collecting = true -end - -function step_tracers.stop() - collecting = false -end - -function step_tracers.reset() - for i=1,#collection do - local c = collection[i] - if c then - free_node_list(c) - end - end - collection, messages = { }, { } -end - -function step_tracers.nofsteps() - return context(#collection) -end - -function step_tracers.glyphs(n,i) - local c = collection[i] - if c then - tex.box[n] = hpack_node_list(copy_node_list(c)) - end -end - -function step_tracers.features() - -- we cannot use first_glyph here as it only finds characters with subtype < 256 - local f = collection[1] - while f do - if f.id == glyph_code then - local tfmdata, t = fontidentifiers[f.font], { } - for feature, value in table.sortedhash(tfmdata.shared.features) do - if feature == "number" or feature == "features" then - -- private - elseif type(value) == "boolean" then - if value then - t[#t+1] = formatters["%s=yes"](feature) - else - -- skip - end - else - t[#t+1] = formatters["%s=%s"](feature,value) - end - end - if #t > 0 then - context(concat(t,", ")) - else - context("no features") - end - return - end - f = f.next - end -end - -function tracers.fontchar(font,char) - local n = new_glyph() - n.font, n.char, n.subtype = font, char, 256 - context(n) -end - -function step_tracers.font(command) - local c = collection[1] - while c do - local id = c.id - if id == glyph_code then - local font = c.font - local name = file.basename(fontproperties[font].filename or "unknown") - local size = fontparameters[font].size or 0 - if command then - context[command](font,name,size) -- size in sp - else - context("[%s: %s @ %p]",font,name,size) - end - return - else - c = c.next - end - end -end - -function step_tracers.codes(i,command) - local c = collection[i] - while c do - local id = c.id - if id == glyph_code then - if command then - local f, c = c.font,c.char - local d = fontdescriptions[f] - local d = d and d[c] - context[command](f,c,d and d.class or "") - else - context("[%s:U+%04X]",c.font,c.char) - end - elseif id == whatsit_code and (c.subtype == localpar_code or c.subtype == dir_code) then - context("[%s]",c.dir) - else - context("[%s]",nodecodes[id]) - end - c = c.next - end -end - -function step_tracers.messages(i,command,split) - local list = messages[i] -- or { "no messages" } - if list then - for i=1,#list do - local l = list[i] - if not command then - context("(%s)",l) - elseif split then - local a, b = match(l,"^(.-)%s*:%s*(.*)$") - context[command](a or l or "",b or "") - else - context[command](l) - end - end - end -end - --- hooks into the node list processor (see otf) - -function step_tracers.check(head) - if collecting then - step_tracers.reset() - local n = copy_node_list(head) - injections.handler(n,nil,"trace",true) - handlers.protectglyphs(n) -- can be option - collection[1] = n - end -end - -function step_tracers.register(head) - if collecting then - local nc = #collection+1 - if messages[nc] then - local n = copy_node_list(head) - injections.handler(n,nil,"trace",true) - handlers.protectglyphs(n) -- can be option - collection[nc] = n - end - end -end - -function step_tracers.message(str,...) - str = formatter(str,...) - if collecting then - local n = #collection + 1 - local m = messages[n] - if not m then m = { } messages[n] = m end - m[#m+1] = str - end - return str -- saves an intermediate var in the caller -end - --- - -local threshold = 65536 - -local function toutf(list,result,nofresult,stopcriterium) - if list then - for n in traverse_nodes(list) do - local id = n.id - if id == glyph_code then - local components = n.components - if components then - result, nofresult = toutf(components,result,nofresult) - else - local c = n.char - local fc = fontcharacters[n.font] - if fc then - local u = fc[c].tounicode - if u then - for s in gmatch(u,"....") do - nofresult = nofresult + 1 - result[nofresult] = utfchar(tonumber(s,16)) - end - else - nofresult = nofresult + 1 - result[nofresult] = utfchar(c) - end - else - nofresult = nofresult + 1 - result[nofresult] = utfchar(c) - end - end - elseif id == disc_code then - result, nofresult = toutf(n.replace,result,nofresult) -- needed? - elseif id == hlist_code or id == vlist_code then - -- if nofresult > 0 and result[nofresult] ~= " " then - -- nofresult = nofresult + 1 - -- result[nofresult] = " " - -- end - result, nofresult = toutf(n.list,result,nofresult) - elseif id == glue_code then - if nofresult > 0 and result[nofresult] ~= " " then - nofresult = nofresult + 1 - result[nofresult] = " " - end - elseif id == kern_code and n.kern > threshold then - if nofresult > 0 and result[nofresult] ~= " " then - nofresult = nofresult + 1 - result[nofresult] = " " - end - end - if n == stopcriterium then - break - end - end - end - if nofresult > 0 and result[nofresult] == " " then - result[nofresult] = nil - nofresult = nofresult - 1 - end - return result, nofresult -end - -function nodes.toutf(list,stopcriterium) - local result, nofresult = toutf(list,{},0,stopcriterium) - return concat(result) -end +if not modules then modules = { } end modules ['font-nod'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

This is rather experimental. We need more control and some of this +might become a runtime module instead. This module will be cleaned up!

+--ldx]]-- + +local tonumber, tostring = tonumber, tostring +local utfchar = utf.char +local concat = table.concat +local match, gmatch, concat, rep = string.match, string.gmatch, table.concat, string.rep + +local report_nodes = logs.reporter("fonts","tracing") + +fonts = fonts or { } +nodes = nodes or { } + +local fonts, nodes, node, context = fonts, nodes, node, context + +local tracers = nodes.tracers or { } +nodes.tracers = tracers + +local tasks = nodes.tasks or { } +nodes.tasks = tasks + +local handlers = nodes.handlers or { } +nodes.handlers = handlers + +local injections = nodes.injections or { } +nodes.injections = injections + +local char_tracers = tracers.characters or { } +tracers.characters = char_tracers + +local step_tracers = tracers.steppers or { } +tracers.steppers = step_tracers + +local copy_node_list = node.copy_list +local hpack_node_list = node.hpack +local free_node_list = node.flush_list +local traverse_nodes = node.traverse + +local nodecodes = nodes.nodecodes +local whatcodes = nodes.whatcodes + +local glyph_code = nodecodes.glyph +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local disc_code = nodecodes.disc +local glue_code = nodecodes.glue +local kern_code = nodecodes.kern +local rule_code = nodecodes.rule +local whatsit_code = nodecodes.whatsit +local spec_code = nodecodes.glue_spec + +local localpar_code = whatcodes.localpar +local dir_code = whatcodes.dir + +local nodepool = nodes.pool +local new_glyph = nodepool.glyph + +local formatters = string.formatters +local formatter = string.formatter + +local hashes = fonts.hashes + +local fontidentifiers = hashes.identifiers +local fontdescriptions = hashes.descriptions +local fontcharacters = hashes.characters +local fontproperties = hashes.properties +local fontparameters = hashes.parameters + +function char_tracers.collect(head,list,tag,n) + n = n or 0 + local ok, fn = false, nil + while head do + local id = head.id + if id == glyph_code then + local f = head.font + if f ~= fn then + ok, fn = false, f + end + local c = head.char + local i = fontidentifiers[f].indices[c] or 0 + if not ok then + ok = true + n = n + 1 + list[n] = list[n] or { } + list[n][tag] = { } + end + local l = list[n][tag] + l[#l+1] = { c, f, i } + elseif id == disc_code then + -- skip + else + ok = false + end + head = head.next + end +end + +function char_tracers.equal(ta, tb) + if #ta ~= #tb then + return false + else + for i=1,#ta do + local a, b = ta[i], tb[i] + if a[1] ~= b[1] or a[2] ~= b[2] or a[3] ~= b[3] then + return false + end + end + end + return true +end + +function char_tracers.string(t) + local tt = { } + for i=1,#t do + tt[i] = utfchar(t[i][1]) + end + return concat(tt,"") +end + +local f_unicode = formatters["%U"] + +function char_tracers.unicodes(t,decimal) + local tt = { } + for i=1,#t do + local n = t[i][1] + if n == 0 then + tt[i] = "-" + elseif decimal then + tt[i] = n + else + tt[i] = f_unicode(n) + end + end + return concat(tt," ") +end + +function char_tracers.indices(t,decimal) + local tt = { } + for i=1,#t do + local n = t[i][3] + if n == 0 then + tt[i] = "-" + elseif decimal then + tt[i] = n + else + tt[i] = f_unicode(n) + end + end + return concat(tt," ") +end + +function char_tracers.start() + local npc = handlers.characters + local list = { } + function handlers.characters(head) + local n = #list + char_tracers.collect(head,list,'before',n) + local h, d = npc(head) + char_tracers.collect(head,list,'after',n) + if #list > n then + list[#list+1] = { } + end + return h, d + end + function char_tracers.stop() + tracers.list['characters'] = list + local variables = { + ['title'] = 'ConTeXt Character Processing Information', + ['color-background-one'] = lmx.get('color-background-yellow'), + ['color-background-two'] = lmx.get('color-background-purple'), + } + lmx.show('context-characters.lmx',variables) + handlers.characters = npc + tasks.restart("processors", "characters") + end + tasks.restart("processors", "characters") +end + +local stack = { } + +function tracers.start(tag) + stack[#stack+1] = tag + local tracer = tracers[tag] + if tracer and tracer.start then + tracer.start() + end +end +function tracers.stop() + local tracer = stack[#stack] + if tracer and tracer.stop then + tracer.stop() + end + stack[#stack] = nil +end + +-- experimental + +local collection, collecting, messages = { }, false, { } + +function step_tracers.start() + collecting = true +end + +function step_tracers.stop() + collecting = false +end + +function step_tracers.reset() + for i=1,#collection do + local c = collection[i] + if c then + free_node_list(c) + end + end + collection, messages = { }, { } +end + +function step_tracers.nofsteps() + return context(#collection) +end + +function step_tracers.glyphs(n,i) + local c = collection[i] + if c then + tex.box[n] = hpack_node_list(copy_node_list(c)) + end +end + +function step_tracers.features() + -- we cannot use first_glyph here as it only finds characters with subtype < 256 + local f = collection[1] + while f do + if f.id == glyph_code then + local tfmdata, t = fontidentifiers[f.font], { } + for feature, value in table.sortedhash(tfmdata.shared.features) do + if feature == "number" or feature == "features" then + -- private + elseif type(value) == "boolean" then + if value then + t[#t+1] = formatters["%s=yes"](feature) + else + -- skip + end + else + t[#t+1] = formatters["%s=%s"](feature,value) + end + end + if #t > 0 then + context(concat(t,", ")) + else + context("no features") + end + return + end + f = f.next + end +end + +function tracers.fontchar(font,char) + local n = new_glyph() + n.font, n.char, n.subtype = font, char, 256 + context(n) +end + +function step_tracers.font(command) + local c = collection[1] + while c do + local id = c.id + if id == glyph_code then + local font = c.font + local name = file.basename(fontproperties[font].filename or "unknown") + local size = fontparameters[font].size or 0 + if command then + context[command](font,name,size) -- size in sp + else + context("[%s: %s @ %p]",font,name,size) + end + return + else + c = c.next + end + end +end + +function step_tracers.codes(i,command) + local c = collection[i] + while c do + local id = c.id + if id == glyph_code then + if command then + local f, c = c.font,c.char + local d = fontdescriptions[f] + local d = d and d[c] + context[command](f,c,d and d.class or "") + else + context("[%s:U+%04X]",c.font,c.char) + end + elseif id == whatsit_code and (c.subtype == localpar_code or c.subtype == dir_code) then + context("[%s]",c.dir) + else + context("[%s]",nodecodes[id]) + end + c = c.next + end +end + +function step_tracers.messages(i,command,split) + local list = messages[i] -- or { "no messages" } + if list then + for i=1,#list do + local l = list[i] + if not command then + context("(%s)",l) + elseif split then + local a, b = match(l,"^(.-)%s*:%s*(.*)$") + context[command](a or l or "",b or "") + else + context[command](l) + end + end + end +end + +-- hooks into the node list processor (see otf) + +function step_tracers.check(head) + if collecting then + step_tracers.reset() + local n = copy_node_list(head) + injections.handler(n,nil,"trace",true) + handlers.protectglyphs(n) -- can be option + collection[1] = n + end +end + +function step_tracers.register(head) + if collecting then + local nc = #collection+1 + if messages[nc] then + local n = copy_node_list(head) + injections.handler(n,nil,"trace",true) + handlers.protectglyphs(n) -- can be option + collection[nc] = n + end + end +end + +function step_tracers.message(str,...) + str = formatter(str,...) + if collecting then + local n = #collection + 1 + local m = messages[n] + if not m then m = { } messages[n] = m end + m[#m+1] = str + end + return str -- saves an intermediate var in the caller +end + +-- + +local threshold = 65536 + +local function toutf(list,result,nofresult,stopcriterium) + if list then + for n in traverse_nodes(list) do + local id = n.id + if id == glyph_code then + local components = n.components + if components then + result, nofresult = toutf(components,result,nofresult) + else + local c = n.char + local fc = fontcharacters[n.font] + if fc then + local u = fc[c].tounicode + if u then + for s in gmatch(u,"....") do + nofresult = nofresult + 1 + result[nofresult] = utfchar(tonumber(s,16)) + end + else + nofresult = nofresult + 1 + result[nofresult] = utfchar(c) + end + else + nofresult = nofresult + 1 + result[nofresult] = utfchar(c) + end + end + elseif id == disc_code then + result, nofresult = toutf(n.replace,result,nofresult) -- needed? + elseif id == hlist_code or id == vlist_code then + -- if nofresult > 0 and result[nofresult] ~= " " then + -- nofresult = nofresult + 1 + -- result[nofresult] = " " + -- end + result, nofresult = toutf(n.list,result,nofresult) + elseif id == glue_code then + if nofresult > 0 and result[nofresult] ~= " " then + nofresult = nofresult + 1 + result[nofresult] = " " + end + elseif id == kern_code and n.kern > threshold then + if nofresult > 0 and result[nofresult] ~= " " then + nofresult = nofresult + 1 + result[nofresult] = " " + end + end + if n == stopcriterium then + break + end + end + end + if nofresult > 0 and result[nofresult] == " " then + result[nofresult] = nil + nofresult = nofresult - 1 + end + return result, nofresult +end + +function nodes.toutf(list,stopcriterium) + local result, nofresult = toutf(list,{},0,stopcriterium) + return concat(result) +end diff --git a/tex/context/base/font-odk.lua b/tex/context/base/font-odk.lua index c34efc120..3ed562348 100644 --- a/tex/context/base/font-odk.lua +++ b/tex/context/base/font-odk.lua @@ -1,904 +1,904 @@ --- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --- We keep the original around for a while so that we can check it -- --- when the above code does it wrong (data tables are not included). -- --- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - --- author : Kai Eigner, TAT Zetwerk --- copyright : TAT Zetwerk --- comment : see font-odv.lua for current implementation - --- local state = attributes.private('state') --- local sylnr = attributes.private('syllabe') --- --- local function install_dev(tfmdata) --- local features = tfmdata.resources.features --- local sequences = tfmdata.resources.sequences --- --- local insertpos = 1 --- for s=1,#sequences do -- classify chars --- for k in pairs(basic_shaping_forms) do --- if sequences[s].features and ( sequences[s].features[k] or sequences[s].features.locl ) then insertpos = s + 1 end --- end --- end --- --- features.gsub["dev2_reorder_matras"] = { ["dev2"] = { ["dflt"] = true } } --- features.gsub["dev2_reorder_reph"] = { ["dev2"] = { ["dflt"] = true } } --- features.gsub["dev2_reorder_pre_base_reordering_consonants"] = { ["dev2"] = { ["dflt"] = true } } --- features.gsub["remove_joiners"] = { ["deva"] = { ["dflt"] = true }, ["dev2"] = { ["dflt"] = true } } --- --- local sequence_dev2_reorder_matras = { --- chain = 0, --- features = { dev2_reorder_matras = { dev2 = { dflt = true } } }, --- flags = { false, false, false, false }, --- name = "dev2_reorder_matras", --- subtables = { "dev2_reorder_matras" }, --- type = "dev2_reorder_matras", --- } --- local sequence_dev2_reorder_reph = { --- chain = 0, --- features = { dev2_reorder_reph = { dev2 = { dflt = true } } }, --- flags = { false, false, false, false }, --- name = "dev2_reorder_reph", --- subtables = { "dev2_reorder_reph" }, --- type = "dev2_reorder_reph", --- } --- local sequence_dev2_reorder_pre_base_reordering_consonants = { --- chain = 0, --- features = { dev2_reorder_pre_base_reordering_consonants = { dev2 = { dflt = true } } }, --- flags = { false, false, false, false }, --- name = "dev2_reorder_pre_base_reordering_consonants", --- subtables = { "dev2_reorder_pre_base_reordering_consonants" }, --- type = "dev2_reorder_pre_base_reordering_consonants", --- } --- local sequence_remove_joiners = { --- chain = 0, --- features = { remove_joiners = { deva = { dflt = true }, dev2 = { dflt = true } } }, --- flags = { false, false, false, false }, --- name = "remove_joiners", --- subtables = { "remove_joiners" }, --- type = "remove_joiners", --- } --- table.insert(sequences, insertpos, sequence_dev2_reorder_pre_base_reordering_consonants) --- table.insert(sequences, insertpos, sequence_dev2_reorder_reph) --- table.insert(sequences, insertpos, sequence_dev2_reorder_matras) --- table.insert(sequences, insertpos, sequence_remove_joiners) --- end --- --- local function deva_reorder(head,start,stop,font,attr) --- local tfmdata = fontdata[font] --- local lookuphash = tfmdata.resources.lookuphash --- local sequences = tfmdata.resources.sequences --- --- if not lookuphash["remove_joiners"] then install_dev(tfmdata) end --install Devanagari-features --- --- local sharedfeatures = tfmdata.shared.features --- sharedfeatures["remove_joiners"] = true --- local datasets = otf.dataset(tfmdata,font,attr) --- --- lookuphash["remove_joiners"] = { [0x200C] = true, [0x200D] = true } --- --- local current, n, base, firstcons, lastcons, basefound = start, start.next, nil, nil, nil, false --- local reph, vattu = false, false --- for s=1,#sequences do --- local dataset = datasets[s] --- featurevalue = dataset and dataset[1] --- if featurevalue and dataset[4] == "rphf" then reph = true end --- if featurevalue and dataset[4] == "blwf" then vattu = true end --- end --- if ra[start.char] and halant[n.char] and reph then -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants --- if n == stop then return head, stop end --- if zwj[n.next.char] then --- current = start --- else --- current = n.next --- set_attribute(start,state,5) -- rphf --- end --- end --- --- if nbsp[current.char] then --Stand Alone cluster --- if current == stop then --- stop = stop.prev --- head = node.remove(head, current) --- node.free(current) --- return head, stop --- else --- base, firstcons, lastcons = current, current, current --- current = current.next --- if current ~= stop then --- if nukta[current.char] then current = current.next end --- if zwj[current.char] then --- if current ~= stop and current.next ~= stop and halant[current.next.char] then --- current = current.next --- local tmp = current.next.next --- local changestop = current.next == stop --- local tempcurrent = node.copy(current.next) --- tempcurrent.next = node.copy(current) --- tempcurrent.next.prev = tempcurrent --- set_attribute(tempcurrent,state,8) --blwf --- tempcurrent = nodes.handlers.characters(tempcurrent) --- unset_attribute(tempcurrent,state) --- if current.next.char == tempcurrent.char then --- node.flush_list(tempcurrent) --- local n = node.copy(current) --- current.char = dotted_circle --- head = node.insert_after(head, current, n) --- else --- current.char = tempcurrent.char -- (assumes that result of blwf consists of one node) --- local freenode = current.next --- current.next = tmp --- tmp.prev = current --- node.free(freenode) --- node.flush_list(tempcurrent) --- if changestop then stop = current end --- end --- end --- end --- end --- end --- end --- --- while not basefound do -- find base consonant --- if consonant[current.char] then --- set_attribute(current, state, 6) -- half --- if not firstcons then firstcons = current end --- lastcons = current --- if not base then --- base = current --- else --check whether consonant has below-base (or post-base) form --- local baseform = true --- for s=1,#sequences do --- local sequence = sequences[s] --- local dataset = datasets[s] --- featurevalue = dataset and dataset[1] --- if featurevalue and dataset[4] == "blwf" then --- local subtables = sequence.subtables --- for i=1,#subtables do --- local lookupname = subtables[i] --- local lookupcache = lookuphash[lookupname] --- if lookupcache then --- local lookupmatch = lookupcache[current.char] --- if lookupmatch then --- set_attribute(current, state, 8) -- blwf --- baseform = false --- end --- end --- end --- end --- end --- if baseform then base = current end --- end --- end --- basefound = current == stop --- current = current.next --- end --- if base ~= lastcons then -- if base consonant is not last one then move halant from base consonant to last one --- n = base.next --- if nukta[n.char] then n = n.next end --- if halant[n.char] then --- if lastcons ~= stop then --- local ln = lastcons.next --- if nukta[ln.char] then lastcons = ln end --- end --- local np, nn, ln = n.prev, n.next, lastcons.next --- np.next = n.next --- nn.prev = n.prev --- lastcons.next = n --- if ln then ln.prev = n end --- n.next = ln --- n.prev = lastcons --- if lastcons == stop then stop = n end --- end --- end --- --- n = start.next --- if ra[start.char] and halant[n.char] and not ( n ~= stop and ( zwj[n.next.char] or zwnj[n.next.char] ) ) then -- if syllable starts with Ra + H then move this combination so that it follows either: the post-base 'matra' (if any) or the base consonant --- local matra = base --- if base ~= stop and dependent_vowel[base.next.char] then matra = base.next end --- local sp, nn, mn = start.prev, n.next, matra.next --- if sp then sp.next = nn end --- nn.prev = sp --- matra.next = start --- start.prev = matra --- n.next = mn --- if mn then mn.prev = n end --- if head == start then head = nn end --- start = nn --- if matra == stop then stop = n end --- end --- --- local current = start --- while current ~= stop do --- if halant[current.next.char] and current.next ~= stop and zwnj[current.next.next.char] then unset_attribute(current, state) end --- current = current.next --- end --- --- if has_attribute(base, state) and base ~= stop and halant[base.next.char] and not ( base.next ~= stop and zwj[base.next.next.char] ) then unset_attribute(base, state) end --- --- local current, allreordered, moved = start, false, { [base] = true } --- local a, b, p, bn = base, base, base, base.next --- if base ~= stop and nukta[bn.char] then a, b, p = bn, bn, bn end --- while not allreordered do --- local c, n, l = current, current.next, nil --current is always consonant --- if c ~= stop and nukta[n.char] then c = n n = n.next end --- if c ~= stop and halant[n.char] then c = n n = n.next end --- while c ~= stop and dependent_vowel[n.char] do c = n n = n.next end --- if c ~= stop and vowel_modifier[n.char] then c = n n = n.next end --- if c ~= stop and stress_tone_mark[n.char] then c = n n = n.next end --- local bp, cn = firstcons.prev, current.next --- while cn ~= c.next do -- move pre-base matras... --- if pre_mark[cn.char] then --- if bp then bp.next = cn end --- cn.prev.next = cn.next --- if cn.next then cn.next.prev = cn.prev end --- if cn == stop then stop = cn.prev end --- cn.prev = bp --- cn.next = firstcons --- firstcons.prev = cn --- if firstcons == start then --- if head == start then head = cn end --- start = cn --- end --- break --- end --- cn = cn.next --- end --- allreordered = c == stop --- current = c.next --- end --- --- if reph or vattu then --- local current, cns = start, nil --- while current ~= stop do --- local c, n = current, current.next --- if ra[current.char] and halant[n.char] then --- c, n = n, n.next --- local b, bn = base, base --- while bn ~= stop do --- if dependent_vowel[bn.next.char] then b = bn.next end --- bn = bn.next --- end --- if has_attribute(current,state,attribute) == 5 then -- position Reph (Ra + H) after post-base 'matra' (if any) since these become marks on the 'matra', not on the base glyph --- if b ~= current then --- if current == start then --- if head == start then head = n end --- start = n --- end --- if b == stop then stop = c end --- if current.prev then current.prev.next = n end --- if n then n.prev = current.prev end --- c.next = b.next --- if b.next then b.next.prev = c end --- b.next = current --- current.prev = b --- end --- elseif cns and cns.next ~= current then -- position below-base Ra (vattu) following the consonants on which it is placed (either the base consonant or one of the pre-base consonants) --- local cp, cnsn = current.prev, cns.next --- if cp then cp.next = n end --- if n then n.prev = cp end --- cns.next = current --- current.prev = cns --- c.next = cnsn --- if cnsn then cnsn.prev = c end --- if c == stop then stop = cp break end --- current = n.prev --- end --- elseif consonant[current.char] or nbsp[current.char] then --- cns = current --- if halant[cns.next.char] then cns = cns.next end --- end --- current = current.next --- end --- end --- --- if nbsp[base.char] then --- head = node.remove(head, base) --- node.free(base) --- end --- --- return head, stop --- end --- --- function dev2_reorder_matras(start,kind,lookupname,replacement) --- local current = start --- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --- if halant[current.char] and not has_attribute(current, state) then --- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end --- local sn = start.next --- start.next.prev = start.prev --- if start.prev then start.prev.next = start.next end --- if current.next then current.next.prev = start end --- start.next = current.next --- current.next = start --- start.prev = current --- start = sn --- break --- end --- current = current.next --- end --- return start, true --- end --- --- function dev2_reorder_reph(start,kind,lookupname,replacement) --- local current, sn = start.next, nil --- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 2 --- if halant[current.char] and not has_attribute(current, state) then --- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end --- sn = start.next --- start.next.prev = start.prev --- if start.prev then start.prev.next = start.next end --- if current.next then current.next.prev = start end --- start.next = current.next --- current.next = start --- start.prev = current --- start = sn --- break --- end --- current = current.next --- end --- if not sn then --- current = start.next --- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 4 --- if has_attribute(current, state) == 9 then --post-base --- sn = start.next --- start.next.prev = start.prev --- if start.prev then start.prev.next = start.next end --- start.prev = current.prev --- current.prev.next = start --- start.next = current --- current.prev = start --- start = sn --- break --- end --- current = current.next --- end --- end --- if not sn then --- current = start.next --- local c = nil --- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 5 --- if not c and ( above_mark[current.char] or below_mark[current.char] or post_mark[current.char] ) and ReorderClass[current.char] ~= "after subscript" then c = current end --- current = current.next --- end --- if c then --- sn = start.next --- start.next.prev = start.prev --- if start.prev then start.prev.next = start.next end --- start.prev = c.prev --- c.prev.next = start --- start.next = c --- c.prev = start --- start = sn --- end --- end --- if not sn then --- current = start --- while current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) do --step 6 --- current = current.next --- end --- if start ~= current then --- sn = start.next --- start.next.prev = start.prev --- if start.prev then start.prev.next = start.next end --- if current.next then current.next.prev = start end --- start.next = current.next --- current.next = start --- start.prev = current --- start = sn --- end --- end --- return start, true --- end --- --- function dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement) --- local current, sn = start, nil --- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --- if halant[current.char] and not has_attribute(current, state) then --- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end --- sn = start.next --- start.next.prev = start.prev --- if start.prev then start.prev.next = start.next end --- if current.next then current.next.prev = start end --- start.next = current.next --- current.next = start --- start.prev = current --- start = sn --- break --- end --- current = current.next --- end --- if not sn then --- current = start.next --- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --- if not consonant[current.char] and has_attribute(current, state) then --main --- sn = start.next --- start.next.prev = start.prev --- if start.prev then start.prev.next = start.next end --- start.prev = current.prev --- current.prev.next = start --- start.next = current --- current.prev = start --- start = sn --- break --- end --- current = current.next --- end --- end --- return start, true --- end --- --- function remove_joiners(start,kind,lookupname,replacement) --- local stop = start.next --- while stop and stop.id == glyph and stop.subtype<256 and stop.font == start.font and (zwj[stop.char] or zwnj[stop.char]) do stop = stop.next end --- if stop then stop.prev.next = nil stop.prev = start.prev end --- if start.prev then start.prev.next = stop end --- node.flush_list(start) --- return stop, true --- end --- --- local function dev2_reorder(head,start,stop,font,attr) --- local tfmdata = fontdata[font] --- local lookuphash = tfmdata.resources.lookuphash --- local sequences = tfmdata.resources.sequences --- --- if not lookuphash["remove_joiners"] then install_dev(tfmdata) end --install Devanagari-features --- --- local sharedfeatures = tfmdata.shared.features --- sharedfeatures["dev2_reorder_matras"] = true --- sharedfeatures["dev2_reorder_reph"] = true --- sharedfeatures["dev2_reorder_pre_base_reordering_consonants"] = true --- sharedfeatures["remove_joiners"] = true --- local datasets = otf.dataset(tfmdata,font,attr) --- --- local reph, pre_base_reordering_consonants = false, nil --- local halfpos, basepos, subpos, postpos = nil, nil, nil, nil --- local locl = { } --- --- for s=1,#sequences do -- classify chars --- local sequence = sequences[s] --- local dataset = datasets[s] --- featurevalue = dataset and dataset[1] --- if featurevalue and dataset[4] then --- local subtables = sequence.subtables --- for i=1,#subtables do --- local lookupname = subtables[i] --- local lookupcache = lookuphash[lookupname] --- if lookupcache then --- if dataset[4] == "rphf" then --- if dataset[3] ~= 0 then --rphf is result of of chain --- else --- reph = lookupcache[0x0930] and lookupcache[0x0930][0x094D] and lookupcache[0x0930][0x094D]["ligature"] --- end --- end --- if dataset[4] == "pref" and not pre_base_reordering_consonants then --- for k, v in pairs(lookupcache[0x094D]) do --- pre_base_reordering_consonants[k] = v and v["ligature"] --ToDo: reph might also be result of chain --- end --- end --- local current = start --- while current ~= stop.next do --- if dataset[4] == "locl" then locl[current] = lookupcache[current.char] end --ToDo: locl might also be result of chain --- if current ~= stop then --- local c, n = locl[current] or current.char, locl[current.next] or current.next.char --- if dataset[4] == "rphf" and lookupcache[c] and lookupcache[c][n] then --above-base: rphf Consonant + Halant --- if current.next ~= stop and ( zwj[current.next.next.char] or zwnj[current.next.next.char] ) then --ZWJ and ZWNJ prevent creation of reph --- current = current.next --- elseif current == start then --- set_attribute(current,state,5) --- end --- current = current.next --- end --- if dataset[4] == "half" and lookupcache[c] and lookupcache[c][n] then --half forms: half Consonant + Halant --- if current.next ~= stop and zwnj[current.next.next.char] then --ZWNJ prevent creation of half --- current = current.next --- else --- set_attribute(current,state,6) --- if not halfpos then halfpos = current end --- end --- current = current.next --- end --- if dataset[4] == "pref" and lookupcache[c] and lookupcache[c][n] then --pre-base: pref Halant + Consonant --- set_attribute(current,state,7) --- set_attribute(current.next,state,7) --- current = current.next --- end --- if dataset[4] == "blwf" and lookupcache[c] and lookupcache[c][n] then --below-base: blwf Halant + Consonant --- set_attribute(current,state,8) --- set_attribute(current.next,state,8) --- current = current.next --- subpos = current --- end --- if dataset[4] == "pstf" and lookupcache[c] and lookupcache[c][n] then --post-base: pstf Halant + Consonant --- set_attribute(current,state,9) --- set_attribute(current.next,state,9) --- current = current.next --- postpos = current --- end --- end --- current = current.next --- end --- end --- end --- end --- end --- --- lookuphash["dev2_reorder_matras"] = pre_mark --- lookuphash["dev2_reorder_reph"] = { [reph] = true } --- lookuphash["dev2_reorder_pre_base_reordering_consonants"] = pre_base_reordering_consonants or { } --- lookuphash["remove_joiners"] = { [0x200C] = true, [0x200D] = true } --- --- local current, base, firstcons = start, nil, nil --- if has_attribute(start,state) == 5 then current = start.next.next end -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants --- --- if current ~= stop.next and nbsp[current.char] then --Stand Alone cluster --- if current == stop then --- stop = stop.prev --- head = node.remove(head, current) --- node.free(current) --- return head, stop --- else --- base = current --- current = current.next --- if current ~= stop then --- if nukta[current.char] then current = current.next end --- if zwj[current.char] then --- if current ~= stop and current.next ~= stop and halant[current.next.char] then --- current = current.next --- local tmp = current.next.next --- local changestop = current.next == stop --- current.next.next = nil --- set_attribute(current,state,7) --pref --- current = nodes.handlers.characters(current) --- set_attribute(current,state,8) --blwf --- current = nodes.handlers.characters(current) --- set_attribute(current,state,9) --pstf --- current = nodes.handlers.characters(current) --- unset_attribute(current,state) --- if halant[current.char] then --- current.next.next = tmp --- local nc = node.copy(current) --- current.char = dotted_circle --- head = node.insert_after(head, current, nc) --- else --- current.next = tmp -- (assumes that result of pref, blwf, or pstf consists of one node) --- if changestop then stop = current end --- end --- end --- end --- end --- end --- else --not Stand Alone cluster --- while current ~= stop.next do -- find base consonant --- if consonant[current.char] and not ( current ~= stop and halant[current.next.char] and current.next ~= stop and zwj[current.next.next.char] ) then --- if not firstcons then firstcons = current end --- if not ( has_attribute(current, state) == 7 or has_attribute(current, state) == 8 or has_attribute(current, state) == 9 ) then base = current end --check whether consonant has below-base or post-base form or is pre-base reordering Ra --- end --- current = current.next --- end --- if not base then --- base = firstcons --- end --- end --- --- if not base then --- if has_attribute(start, state) == 5 then unset_attribute(start, state) end --- return head, stop --- else --- if has_attribute(base, state) then unset_attribute(base, state) end --- basepos = base --- end --- if not halfpos then halfpos = base end --- if not subpos then subpos = base end --- if not postpos then postpos = subpos or base end --- --- --Matra characters are classified and reordered by which consonant in a conjunct they have affinity for --- local moved = { } --- current = start --- while current ~= stop.next do --- local char, target, cn = locl[current] or current.char, nil, current.next --- if not moved[current] and dependent_vowel[char] then --- if pre_mark[char] then -- Before first half form in the syllable --- moved[current] = true --- if current.prev then current.prev.next = current.next end --- if current.next then current.next.prev = current.prev end --- if current == stop then stop = current.prev end --- if halfpos == start then --- if head == start then head = current end --- start = current --- end --- if halfpos.prev then halfpos.prev.next = current end --- current.prev = halfpos.prev --- halfpos.prev = current --- current.next = halfpos --- halfpos = current --- elseif above_mark[char] then -- After main consonant --- target = basepos --- if subpos == basepos then subpos = current end --- if postpos == basepos then postpos = current end --- basepos = current --- elseif below_mark[char] then -- After subjoined consonants --- target = subpos --- if postpos == subpos then postpos = current end --- subpos = current --- elseif post_mark[char] then -- After post-form consonant --- target = postpos --- postpos = current --- end --- if ( above_mark[char] or below_mark[char] or post_mark[char] ) and current.prev ~= target then --- if current.prev then current.prev.next = current.next end --- if current.next then current.next.prev = current.prev end --- if current == stop then stop = current.prev end --- if target.next then target.next.prev = current end --- current.next = target.next --- target.next = current --- current.prev = target --- end --- end --- current = cn --- end --- --- --Reorder marks to canonical order: Adjacent nukta and halant or nukta and vedic sign are always repositioned if necessary, so that the nukta is first. --- local current, c = start, nil --- while current ~= stop do --- if halant[current.char] or stress_tone_mark[current.char] then --- if not c then c = current end --- else --- c = nil --- end --- if c and nukta[current.next.char] then --- if head == c then head = current.next end --- if stop == current.next then stop = current end --- if c.prev then c.prev.next = current.next end --- current.next.prev = c.prev --- current.next = current.next.next --- if current.next.next then current.next.next.prev = current end --- c.prev = current.next --- current.next.next = c --- end --- if stop == current then break end --- current = current.next --- end --- --- if nbsp[base.char] then --- head = node.remove(head, base) --- node.free(base) --- end --- --- return head, stop --- end --- --- function fonts.analyzers.methods.deva(head,font,attr) --- local orighead = head --- local current, start, done = head, true, false --- while current do --- if current.id == glyph and current.subtype<256 and current.font == font then --- done = true --- local syllablestart, syllableend = current, nil --- --- local c = current --Checking Stand Alone cluster (this behavior is copied from dev2) --- if ra[c.char] and c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] and c.next.next and c.next.next.id == glyph and c.next.next.subtype<256 and c.next.next.font == font then c = c.next.next end --- if nbsp[c.char] and ( not current.prev or current.prev.id ~= glyph or current.prev.subtype>=256 or current.prev.font ~= font or --- ( not consonant[current.prev.char] and not independent_vowel[current.prev.char] and not dependent_vowel[current.prev.char] and --- not vowel_modifier[current.prev.char] and not stress_tone_mark[current.prev.char] and not nukta[current.prev.char] and not halant[current.prev.char] ) --- ) then --Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)] --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end --- local n = c.next --- if n and n.id == glyph and n.subtype<256 and n.font == font then --- local ni = n.next --- if ( zwj[n.char] or zwnj[n.char] ) and ni and ni.id == glyph and ni.subtype<256 and ni.font == font then n = ni ni = ni.next end --- if halant[n.char] and ni and ni.id == glyph and ni.subtype<256 and ni.font == font and consonant[ni.char] then c = ni end --- end --- while c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] do c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end --- current = c.next --- syllableend = c --- if syllablestart ~= syllableend then --- head, current = deva_reorder(head, syllablestart,syllableend,font,attr) --- current = current.next --- end --- elseif consonant[current.char] then -- syllable containing consonant --- prevc = true --- while prevc do --- prevc = false --- local n = current.next --- if n and n.id == glyph and n.subtype<256 and n.font == font and nukta[n.char] then n = n.next end --- if n and n.id == glyph and n.subtype<256 and n.font == font and halant[n.char] then --- local n = n.next --- if n and n.id == glyph and n.subtype<256 and n.font == font and ( zwj[n.char] or zwnj[n.char] ) then n = n.next end --- if n and n.id == glyph and n.subtype<256 and n.font == font and consonant[n.char] then --- prevc = true --- current = n --- end --- end --- end --- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == font and nukta[current.next.char] then current = current.next end -- nukta (not specified in Microsft Devanagari OpenType specification) --- syllableend = current --- current = current.next --- if current and current.id == glyph and current.subtype<256 and current.font == font and halant[current.char] then -- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H --- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == font and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end --- syllableend = current --- current = current.next --- else -- syllable containing consonant with vowels: {C + [Nukta] + H} + C + [M] + [VM] + [SM] --- if current and current.id == glyph and current.subtype<256 and current.font == font and dependent_vowel[current.char] then --- syllableend = current --- current = current.next --- end --- if current and current.id == glyph and current.subtype<256 and current.font == font and vowel_modifier[current.char] then --- syllableend = current --- current = current.next --- end --- if current and current.id == glyph and current.subtype<256 and current.font == font and stress_tone_mark[current.char] then --- syllableend = current --- current = current.next --- end --- end --- if syllablestart ~= syllableend then --- head, current = deva_reorder(head,syllablestart,syllableend,font,attr) --- current = current.next --- end --- elseif current.id == glyph and current.subtype<256 and current.font == font and independent_vowel[current.char] then -- syllable without consonants: VO + [VM] + [SM] --- syllableend = current --- current = current.next --- if current and current.id == glyph and current.subtype<256 and current.font == font and vowel_modifier[current.char] then --- syllableend = current --- current = current.next --- end --- if current and current.id == glyph and current.subtype<256 and current.font == font and stress_tone_mark[current.char] then --- syllableend = current --- current = current.next --- end --- else -- Syntax error --- if pre_mark[current.char] or above_mark[current.char] or below_mark[current.char] or post_mark[current.char] then --- local n = node.copy(current) --- if pre_mark[current.char] then --- n.char = dotted_circle --- else --- current.char = dotted_circle --- end --- head, current = node.insert_after(head, current, n) --- end --- current = current.next --- end --- else --- current = current.next --- end --- start = false --- end --- --- return head, done --- end --- --- function fonts.analyzers.methods.dev2(head,font,attr) --- local current, start, done, syl_nr = head, true, false, 0 --- while current do --- local syllablestart, syllableend = nil, nil --- if current.id == glyph and current.subtype<256 and current.font == font then --- syllablestart = current --- done = true --- local c, n = current, current.next --- if ra[current.char] and n and n.id == glyph and n.subtype<256 and n.font == font and halant[n.char] and n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font then c = n.next end --- if independent_vowel[c.char] then --Vowel-based syllable: [Ra+H]+V+[N]+[<[]+H+C|ZWJ+C>]+[{M}+[N]+[H]]+[SM]+[(VD)] --- n = c.next --- local ni, nii = nil, nil --- if n and n.id == glyph and n.subtype<256 and n.font == font and nukta[n.char] then n = n.next end --- if n and n.id == glyph and n.subtype<256 and n.font == font then local ni = n.next end --- if ni and ni.id == glyph and ni.subtype<256 and ni.font == font and ni.next and ni.next.id == glyph and ni.next.subtype<256 and ni.next.font == font then --- nii = ni.next --- if zwj[ni.char] and consonant[nii.char] then --- c = nii --- elseif (zwj[ni.char] or zwnj[ni.char]) and halant[nii.char] and nii.next and nii.next.id == glyph and nii.next.subtype<256 and nii.next.font == font and consonant[nii.next.char] then --- c = nii.next --- end --- end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end --- current = c --- syllableend = c --- elseif nbsp[c.char] and ( not current.prev or current.prev.id ~= glyph or current.prev.subtype>=256 or current.prev.font ~= font or --- ( not consonant[current.prev.char] and not independent_vowel[current.prev.char] and not dependent_vowel[current.prev.char] and --- not vowel_modifier[current.prev.char] and not stress_tone_mark[current.prev.char] and not nukta[current.prev.char] and not halant[current.prev.char] ) --- ) then --Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)] --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end --- n = c.next --- if n and n.id == glyph and n.subtype<256 and n.font == font then --- local ni = n.next --- if ( zwj[n.char] or zwnj[n.char] ) and ni and ni.id == glyph and ni.subtype<256 and ni.font == font then n = ni ni = ni.next end --- if halant[n.char] and ni and ni.id == glyph and ni.subtype<256 and ni.font == font and consonant[ni.char] then c = ni end --- end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end --- current = c --- syllableend = c --- elseif consonant[current.char] then --Consonant syllable: {C+[N]+]|+H>} + C+[N]+[A] + [< H+[] | {M}+[N]+[H]>]+[SM]+[(VD)] --- c = current --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end --- n = c --- while n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and ( halant[n.next.char] or zwnj[n.next.char] or zwj[n.next.char] ) do --- if halant[n.next.char] then --- n = n.next --- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and ( zwnj[n.next.char] or zwj[n.next.char] ) then n = n.next end --- else --- if n.next.next and n.next.next.id == glyph and n.next.next.subtype<256 and n.next.next.font == font and halant[n.next.next.char] then n = n.next.next end --- end --- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and consonant[n.next.char] then --- n = n.next --- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and nukta[n.next.char] then n = n.next end --- c = n --- else --- break --- end --- end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and anudatta[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then --- c = c.next --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and ( zwnj[c.next.char] or zwj[c.next.char] ) then c = c.next end --- else --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end --- end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end --- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end --- current = c --- syllableend = c --- end --- end --- --- if syllableend then --- syl_nr = syl_nr + 1 --- c = syllablestart --- while c ~= syllableend.next do --- set_attribute(c,sylnr,syl_nr) --- c = c.next --- end --- end --- if syllableend and syllablestart ~= syllableend then --- head, current = dev2_reorder(head,syllablestart,syllableend,font,attr) --- end --- --- if not syllableend and not has_attribute(current, state) and current.id == glyph and current.subtype<256 and current.font == font then -- Syntax error --- if pre_mark[current.char] or above_mark[current.char] or below_mark[current.char] or post_mark[current.char] then --- local n = node.copy(current) --- if pre_mark[current.char] then --- n.char = dotted_circle --- else --- current.char = dotted_circle --- end --- head, current = node.insert_after(head, current, n) --- end --- end --- --- start = false --- current = current.next --- end --- --- return head, done --- end --- --- function otf.handlers.dev2_reorder_matras(start,kind,lookupname,replacement) --- return dev2_reorder_matras(start,kind,lookupname,replacement) --- end --- --- function otf.handlers.dev2_reorder_reph(start,kind,lookupname,replacement) --- return dev2_reorder_reph(start,kind,lookupname,replacement) --- end --- --- function otf.handlers.dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement) --- return dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement) --- end --- --- function otf.handlers.remove_joiners(start,kind,lookupname,replacement) --- return remove_joiners(start,kind,lookupname,replacement) --- end +-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- +-- We keep the original around for a while so that we can check it -- +-- when the above code does it wrong (data tables are not included). -- +-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + +-- author : Kai Eigner, TAT Zetwerk +-- copyright : TAT Zetwerk +-- comment : see font-odv.lua for current implementation + +-- local state = attributes.private('state') +-- local sylnr = attributes.private('syllabe') +-- +-- local function install_dev(tfmdata) +-- local features = tfmdata.resources.features +-- local sequences = tfmdata.resources.sequences +-- +-- local insertpos = 1 +-- for s=1,#sequences do -- classify chars +-- for k in pairs(basic_shaping_forms) do +-- if sequences[s].features and ( sequences[s].features[k] or sequences[s].features.locl ) then insertpos = s + 1 end +-- end +-- end +-- +-- features.gsub["dev2_reorder_matras"] = { ["dev2"] = { ["dflt"] = true } } +-- features.gsub["dev2_reorder_reph"] = { ["dev2"] = { ["dflt"] = true } } +-- features.gsub["dev2_reorder_pre_base_reordering_consonants"] = { ["dev2"] = { ["dflt"] = true } } +-- features.gsub["remove_joiners"] = { ["deva"] = { ["dflt"] = true }, ["dev2"] = { ["dflt"] = true } } +-- +-- local sequence_dev2_reorder_matras = { +-- chain = 0, +-- features = { dev2_reorder_matras = { dev2 = { dflt = true } } }, +-- flags = { false, false, false, false }, +-- name = "dev2_reorder_matras", +-- subtables = { "dev2_reorder_matras" }, +-- type = "dev2_reorder_matras", +-- } +-- local sequence_dev2_reorder_reph = { +-- chain = 0, +-- features = { dev2_reorder_reph = { dev2 = { dflt = true } } }, +-- flags = { false, false, false, false }, +-- name = "dev2_reorder_reph", +-- subtables = { "dev2_reorder_reph" }, +-- type = "dev2_reorder_reph", +-- } +-- local sequence_dev2_reorder_pre_base_reordering_consonants = { +-- chain = 0, +-- features = { dev2_reorder_pre_base_reordering_consonants = { dev2 = { dflt = true } } }, +-- flags = { false, false, false, false }, +-- name = "dev2_reorder_pre_base_reordering_consonants", +-- subtables = { "dev2_reorder_pre_base_reordering_consonants" }, +-- type = "dev2_reorder_pre_base_reordering_consonants", +-- } +-- local sequence_remove_joiners = { +-- chain = 0, +-- features = { remove_joiners = { deva = { dflt = true }, dev2 = { dflt = true } } }, +-- flags = { false, false, false, false }, +-- name = "remove_joiners", +-- subtables = { "remove_joiners" }, +-- type = "remove_joiners", +-- } +-- table.insert(sequences, insertpos, sequence_dev2_reorder_pre_base_reordering_consonants) +-- table.insert(sequences, insertpos, sequence_dev2_reorder_reph) +-- table.insert(sequences, insertpos, sequence_dev2_reorder_matras) +-- table.insert(sequences, insertpos, sequence_remove_joiners) +-- end +-- +-- local function deva_reorder(head,start,stop,font,attr) +-- local tfmdata = fontdata[font] +-- local lookuphash = tfmdata.resources.lookuphash +-- local sequences = tfmdata.resources.sequences +-- +-- if not lookuphash["remove_joiners"] then install_dev(tfmdata) end --install Devanagari-features +-- +-- local sharedfeatures = tfmdata.shared.features +-- sharedfeatures["remove_joiners"] = true +-- local datasets = otf.dataset(tfmdata,font,attr) +-- +-- lookuphash["remove_joiners"] = { [0x200C] = true, [0x200D] = true } +-- +-- local current, n, base, firstcons, lastcons, basefound = start, start.next, nil, nil, nil, false +-- local reph, vattu = false, false +-- for s=1,#sequences do +-- local dataset = datasets[s] +-- featurevalue = dataset and dataset[1] +-- if featurevalue and dataset[4] == "rphf" then reph = true end +-- if featurevalue and dataset[4] == "blwf" then vattu = true end +-- end +-- if ra[start.char] and halant[n.char] and reph then -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants +-- if n == stop then return head, stop end +-- if zwj[n.next.char] then +-- current = start +-- else +-- current = n.next +-- set_attribute(start,state,5) -- rphf +-- end +-- end +-- +-- if nbsp[current.char] then --Stand Alone cluster +-- if current == stop then +-- stop = stop.prev +-- head = node.remove(head, current) +-- node.free(current) +-- return head, stop +-- else +-- base, firstcons, lastcons = current, current, current +-- current = current.next +-- if current ~= stop then +-- if nukta[current.char] then current = current.next end +-- if zwj[current.char] then +-- if current ~= stop and current.next ~= stop and halant[current.next.char] then +-- current = current.next +-- local tmp = current.next.next +-- local changestop = current.next == stop +-- local tempcurrent = node.copy(current.next) +-- tempcurrent.next = node.copy(current) +-- tempcurrent.next.prev = tempcurrent +-- set_attribute(tempcurrent,state,8) --blwf +-- tempcurrent = nodes.handlers.characters(tempcurrent) +-- unset_attribute(tempcurrent,state) +-- if current.next.char == tempcurrent.char then +-- node.flush_list(tempcurrent) +-- local n = node.copy(current) +-- current.char = dotted_circle +-- head = node.insert_after(head, current, n) +-- else +-- current.char = tempcurrent.char -- (assumes that result of blwf consists of one node) +-- local freenode = current.next +-- current.next = tmp +-- tmp.prev = current +-- node.free(freenode) +-- node.flush_list(tempcurrent) +-- if changestop then stop = current end +-- end +-- end +-- end +-- end +-- end +-- end +-- +-- while not basefound do -- find base consonant +-- if consonant[current.char] then +-- set_attribute(current, state, 6) -- half +-- if not firstcons then firstcons = current end +-- lastcons = current +-- if not base then +-- base = current +-- else --check whether consonant has below-base (or post-base) form +-- local baseform = true +-- for s=1,#sequences do +-- local sequence = sequences[s] +-- local dataset = datasets[s] +-- featurevalue = dataset and dataset[1] +-- if featurevalue and dataset[4] == "blwf" then +-- local subtables = sequence.subtables +-- for i=1,#subtables do +-- local lookupname = subtables[i] +-- local lookupcache = lookuphash[lookupname] +-- if lookupcache then +-- local lookupmatch = lookupcache[current.char] +-- if lookupmatch then +-- set_attribute(current, state, 8) -- blwf +-- baseform = false +-- end +-- end +-- end +-- end +-- end +-- if baseform then base = current end +-- end +-- end +-- basefound = current == stop +-- current = current.next +-- end +-- if base ~= lastcons then -- if base consonant is not last one then move halant from base consonant to last one +-- n = base.next +-- if nukta[n.char] then n = n.next end +-- if halant[n.char] then +-- if lastcons ~= stop then +-- local ln = lastcons.next +-- if nukta[ln.char] then lastcons = ln end +-- end +-- local np, nn, ln = n.prev, n.next, lastcons.next +-- np.next = n.next +-- nn.prev = n.prev +-- lastcons.next = n +-- if ln then ln.prev = n end +-- n.next = ln +-- n.prev = lastcons +-- if lastcons == stop then stop = n end +-- end +-- end +-- +-- n = start.next +-- if ra[start.char] and halant[n.char] and not ( n ~= stop and ( zwj[n.next.char] or zwnj[n.next.char] ) ) then -- if syllable starts with Ra + H then move this combination so that it follows either: the post-base 'matra' (if any) or the base consonant +-- local matra = base +-- if base ~= stop and dependent_vowel[base.next.char] then matra = base.next end +-- local sp, nn, mn = start.prev, n.next, matra.next +-- if sp then sp.next = nn end +-- nn.prev = sp +-- matra.next = start +-- start.prev = matra +-- n.next = mn +-- if mn then mn.prev = n end +-- if head == start then head = nn end +-- start = nn +-- if matra == stop then stop = n end +-- end +-- +-- local current = start +-- while current ~= stop do +-- if halant[current.next.char] and current.next ~= stop and zwnj[current.next.next.char] then unset_attribute(current, state) end +-- current = current.next +-- end +-- +-- if has_attribute(base, state) and base ~= stop and halant[base.next.char] and not ( base.next ~= stop and zwj[base.next.next.char] ) then unset_attribute(base, state) end +-- +-- local current, allreordered, moved = start, false, { [base] = true } +-- local a, b, p, bn = base, base, base, base.next +-- if base ~= stop and nukta[bn.char] then a, b, p = bn, bn, bn end +-- while not allreordered do +-- local c, n, l = current, current.next, nil --current is always consonant +-- if c ~= stop and nukta[n.char] then c = n n = n.next end +-- if c ~= stop and halant[n.char] then c = n n = n.next end +-- while c ~= stop and dependent_vowel[n.char] do c = n n = n.next end +-- if c ~= stop and vowel_modifier[n.char] then c = n n = n.next end +-- if c ~= stop and stress_tone_mark[n.char] then c = n n = n.next end +-- local bp, cn = firstcons.prev, current.next +-- while cn ~= c.next do -- move pre-base matras... +-- if pre_mark[cn.char] then +-- if bp then bp.next = cn end +-- cn.prev.next = cn.next +-- if cn.next then cn.next.prev = cn.prev end +-- if cn == stop then stop = cn.prev end +-- cn.prev = bp +-- cn.next = firstcons +-- firstcons.prev = cn +-- if firstcons == start then +-- if head == start then head = cn end +-- start = cn +-- end +-- break +-- end +-- cn = cn.next +-- end +-- allreordered = c == stop +-- current = c.next +-- end +-- +-- if reph or vattu then +-- local current, cns = start, nil +-- while current ~= stop do +-- local c, n = current, current.next +-- if ra[current.char] and halant[n.char] then +-- c, n = n, n.next +-- local b, bn = base, base +-- while bn ~= stop do +-- if dependent_vowel[bn.next.char] then b = bn.next end +-- bn = bn.next +-- end +-- if has_attribute(current,state,attribute) == 5 then -- position Reph (Ra + H) after post-base 'matra' (if any) since these become marks on the 'matra', not on the base glyph +-- if b ~= current then +-- if current == start then +-- if head == start then head = n end +-- start = n +-- end +-- if b == stop then stop = c end +-- if current.prev then current.prev.next = n end +-- if n then n.prev = current.prev end +-- c.next = b.next +-- if b.next then b.next.prev = c end +-- b.next = current +-- current.prev = b +-- end +-- elseif cns and cns.next ~= current then -- position below-base Ra (vattu) following the consonants on which it is placed (either the base consonant or one of the pre-base consonants) +-- local cp, cnsn = current.prev, cns.next +-- if cp then cp.next = n end +-- if n then n.prev = cp end +-- cns.next = current +-- current.prev = cns +-- c.next = cnsn +-- if cnsn then cnsn.prev = c end +-- if c == stop then stop = cp break end +-- current = n.prev +-- end +-- elseif consonant[current.char] or nbsp[current.char] then +-- cns = current +-- if halant[cns.next.char] then cns = cns.next end +-- end +-- current = current.next +-- end +-- end +-- +-- if nbsp[base.char] then +-- head = node.remove(head, base) +-- node.free(base) +-- end +-- +-- return head, stop +-- end +-- +-- function dev2_reorder_matras(start,kind,lookupname,replacement) +-- local current = start +-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do +-- if halant[current.char] and not has_attribute(current, state) then +-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end +-- local sn = start.next +-- start.next.prev = start.prev +-- if start.prev then start.prev.next = start.next end +-- if current.next then current.next.prev = start end +-- start.next = current.next +-- current.next = start +-- start.prev = current +-- start = sn +-- break +-- end +-- current = current.next +-- end +-- return start, true +-- end +-- +-- function dev2_reorder_reph(start,kind,lookupname,replacement) +-- local current, sn = start.next, nil +-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 2 +-- if halant[current.char] and not has_attribute(current, state) then +-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end +-- sn = start.next +-- start.next.prev = start.prev +-- if start.prev then start.prev.next = start.next end +-- if current.next then current.next.prev = start end +-- start.next = current.next +-- current.next = start +-- start.prev = current +-- start = sn +-- break +-- end +-- current = current.next +-- end +-- if not sn then +-- current = start.next +-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 4 +-- if has_attribute(current, state) == 9 then --post-base +-- sn = start.next +-- start.next.prev = start.prev +-- if start.prev then start.prev.next = start.next end +-- start.prev = current.prev +-- current.prev.next = start +-- start.next = current +-- current.prev = start +-- start = sn +-- break +-- end +-- current = current.next +-- end +-- end +-- if not sn then +-- current = start.next +-- local c = nil +-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 5 +-- if not c and ( above_mark[current.char] or below_mark[current.char] or post_mark[current.char] ) and ReorderClass[current.char] ~= "after subscript" then c = current end +-- current = current.next +-- end +-- if c then +-- sn = start.next +-- start.next.prev = start.prev +-- if start.prev then start.prev.next = start.next end +-- start.prev = c.prev +-- c.prev.next = start +-- start.next = c +-- c.prev = start +-- start = sn +-- end +-- end +-- if not sn then +-- current = start +-- while current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) do --step 6 +-- current = current.next +-- end +-- if start ~= current then +-- sn = start.next +-- start.next.prev = start.prev +-- if start.prev then start.prev.next = start.next end +-- if current.next then current.next.prev = start end +-- start.next = current.next +-- current.next = start +-- start.prev = current +-- start = sn +-- end +-- end +-- return start, true +-- end +-- +-- function dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement) +-- local current, sn = start, nil +-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do +-- if halant[current.char] and not has_attribute(current, state) then +-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end +-- sn = start.next +-- start.next.prev = start.prev +-- if start.prev then start.prev.next = start.next end +-- if current.next then current.next.prev = start end +-- start.next = current.next +-- current.next = start +-- start.prev = current +-- start = sn +-- break +-- end +-- current = current.next +-- end +-- if not sn then +-- current = start.next +-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do +-- if not consonant[current.char] and has_attribute(current, state) then --main +-- sn = start.next +-- start.next.prev = start.prev +-- if start.prev then start.prev.next = start.next end +-- start.prev = current.prev +-- current.prev.next = start +-- start.next = current +-- current.prev = start +-- start = sn +-- break +-- end +-- current = current.next +-- end +-- end +-- return start, true +-- end +-- +-- function remove_joiners(start,kind,lookupname,replacement) +-- local stop = start.next +-- while stop and stop.id == glyph and stop.subtype<256 and stop.font == start.font and (zwj[stop.char] or zwnj[stop.char]) do stop = stop.next end +-- if stop then stop.prev.next = nil stop.prev = start.prev end +-- if start.prev then start.prev.next = stop end +-- node.flush_list(start) +-- return stop, true +-- end +-- +-- local function dev2_reorder(head,start,stop,font,attr) +-- local tfmdata = fontdata[font] +-- local lookuphash = tfmdata.resources.lookuphash +-- local sequences = tfmdata.resources.sequences +-- +-- if not lookuphash["remove_joiners"] then install_dev(tfmdata) end --install Devanagari-features +-- +-- local sharedfeatures = tfmdata.shared.features +-- sharedfeatures["dev2_reorder_matras"] = true +-- sharedfeatures["dev2_reorder_reph"] = true +-- sharedfeatures["dev2_reorder_pre_base_reordering_consonants"] = true +-- sharedfeatures["remove_joiners"] = true +-- local datasets = otf.dataset(tfmdata,font,attr) +-- +-- local reph, pre_base_reordering_consonants = false, nil +-- local halfpos, basepos, subpos, postpos = nil, nil, nil, nil +-- local locl = { } +-- +-- for s=1,#sequences do -- classify chars +-- local sequence = sequences[s] +-- local dataset = datasets[s] +-- featurevalue = dataset and dataset[1] +-- if featurevalue and dataset[4] then +-- local subtables = sequence.subtables +-- for i=1,#subtables do +-- local lookupname = subtables[i] +-- local lookupcache = lookuphash[lookupname] +-- if lookupcache then +-- if dataset[4] == "rphf" then +-- if dataset[3] ~= 0 then --rphf is result of of chain +-- else +-- reph = lookupcache[0x0930] and lookupcache[0x0930][0x094D] and lookupcache[0x0930][0x094D]["ligature"] +-- end +-- end +-- if dataset[4] == "pref" and not pre_base_reordering_consonants then +-- for k, v in pairs(lookupcache[0x094D]) do +-- pre_base_reordering_consonants[k] = v and v["ligature"] --ToDo: reph might also be result of chain +-- end +-- end +-- local current = start +-- while current ~= stop.next do +-- if dataset[4] == "locl" then locl[current] = lookupcache[current.char] end --ToDo: locl might also be result of chain +-- if current ~= stop then +-- local c, n = locl[current] or current.char, locl[current.next] or current.next.char +-- if dataset[4] == "rphf" and lookupcache[c] and lookupcache[c][n] then --above-base: rphf Consonant + Halant +-- if current.next ~= stop and ( zwj[current.next.next.char] or zwnj[current.next.next.char] ) then --ZWJ and ZWNJ prevent creation of reph +-- current = current.next +-- elseif current == start then +-- set_attribute(current,state,5) +-- end +-- current = current.next +-- end +-- if dataset[4] == "half" and lookupcache[c] and lookupcache[c][n] then --half forms: half Consonant + Halant +-- if current.next ~= stop and zwnj[current.next.next.char] then --ZWNJ prevent creation of half +-- current = current.next +-- else +-- set_attribute(current,state,6) +-- if not halfpos then halfpos = current end +-- end +-- current = current.next +-- end +-- if dataset[4] == "pref" and lookupcache[c] and lookupcache[c][n] then --pre-base: pref Halant + Consonant +-- set_attribute(current,state,7) +-- set_attribute(current.next,state,7) +-- current = current.next +-- end +-- if dataset[4] == "blwf" and lookupcache[c] and lookupcache[c][n] then --below-base: blwf Halant + Consonant +-- set_attribute(current,state,8) +-- set_attribute(current.next,state,8) +-- current = current.next +-- subpos = current +-- end +-- if dataset[4] == "pstf" and lookupcache[c] and lookupcache[c][n] then --post-base: pstf Halant + Consonant +-- set_attribute(current,state,9) +-- set_attribute(current.next,state,9) +-- current = current.next +-- postpos = current +-- end +-- end +-- current = current.next +-- end +-- end +-- end +-- end +-- end +-- +-- lookuphash["dev2_reorder_matras"] = pre_mark +-- lookuphash["dev2_reorder_reph"] = { [reph] = true } +-- lookuphash["dev2_reorder_pre_base_reordering_consonants"] = pre_base_reordering_consonants or { } +-- lookuphash["remove_joiners"] = { [0x200C] = true, [0x200D] = true } +-- +-- local current, base, firstcons = start, nil, nil +-- if has_attribute(start,state) == 5 then current = start.next.next end -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants +-- +-- if current ~= stop.next and nbsp[current.char] then --Stand Alone cluster +-- if current == stop then +-- stop = stop.prev +-- head = node.remove(head, current) +-- node.free(current) +-- return head, stop +-- else +-- base = current +-- current = current.next +-- if current ~= stop then +-- if nukta[current.char] then current = current.next end +-- if zwj[current.char] then +-- if current ~= stop and current.next ~= stop and halant[current.next.char] then +-- current = current.next +-- local tmp = current.next.next +-- local changestop = current.next == stop +-- current.next.next = nil +-- set_attribute(current,state,7) --pref +-- current = nodes.handlers.characters(current) +-- set_attribute(current,state,8) --blwf +-- current = nodes.handlers.characters(current) +-- set_attribute(current,state,9) --pstf +-- current = nodes.handlers.characters(current) +-- unset_attribute(current,state) +-- if halant[current.char] then +-- current.next.next = tmp +-- local nc = node.copy(current) +-- current.char = dotted_circle +-- head = node.insert_after(head, current, nc) +-- else +-- current.next = tmp -- (assumes that result of pref, blwf, or pstf consists of one node) +-- if changestop then stop = current end +-- end +-- end +-- end +-- end +-- end +-- else --not Stand Alone cluster +-- while current ~= stop.next do -- find base consonant +-- if consonant[current.char] and not ( current ~= stop and halant[current.next.char] and current.next ~= stop and zwj[current.next.next.char] ) then +-- if not firstcons then firstcons = current end +-- if not ( has_attribute(current, state) == 7 or has_attribute(current, state) == 8 or has_attribute(current, state) == 9 ) then base = current end --check whether consonant has below-base or post-base form or is pre-base reordering Ra +-- end +-- current = current.next +-- end +-- if not base then +-- base = firstcons +-- end +-- end +-- +-- if not base then +-- if has_attribute(start, state) == 5 then unset_attribute(start, state) end +-- return head, stop +-- else +-- if has_attribute(base, state) then unset_attribute(base, state) end +-- basepos = base +-- end +-- if not halfpos then halfpos = base end +-- if not subpos then subpos = base end +-- if not postpos then postpos = subpos or base end +-- +-- --Matra characters are classified and reordered by which consonant in a conjunct they have affinity for +-- local moved = { } +-- current = start +-- while current ~= stop.next do +-- local char, target, cn = locl[current] or current.char, nil, current.next +-- if not moved[current] and dependent_vowel[char] then +-- if pre_mark[char] then -- Before first half form in the syllable +-- moved[current] = true +-- if current.prev then current.prev.next = current.next end +-- if current.next then current.next.prev = current.prev end +-- if current == stop then stop = current.prev end +-- if halfpos == start then +-- if head == start then head = current end +-- start = current +-- end +-- if halfpos.prev then halfpos.prev.next = current end +-- current.prev = halfpos.prev +-- halfpos.prev = current +-- current.next = halfpos +-- halfpos = current +-- elseif above_mark[char] then -- After main consonant +-- target = basepos +-- if subpos == basepos then subpos = current end +-- if postpos == basepos then postpos = current end +-- basepos = current +-- elseif below_mark[char] then -- After subjoined consonants +-- target = subpos +-- if postpos == subpos then postpos = current end +-- subpos = current +-- elseif post_mark[char] then -- After post-form consonant +-- target = postpos +-- postpos = current +-- end +-- if ( above_mark[char] or below_mark[char] or post_mark[char] ) and current.prev ~= target then +-- if current.prev then current.prev.next = current.next end +-- if current.next then current.next.prev = current.prev end +-- if current == stop then stop = current.prev end +-- if target.next then target.next.prev = current end +-- current.next = target.next +-- target.next = current +-- current.prev = target +-- end +-- end +-- current = cn +-- end +-- +-- --Reorder marks to canonical order: Adjacent nukta and halant or nukta and vedic sign are always repositioned if necessary, so that the nukta is first. +-- local current, c = start, nil +-- while current ~= stop do +-- if halant[current.char] or stress_tone_mark[current.char] then +-- if not c then c = current end +-- else +-- c = nil +-- end +-- if c and nukta[current.next.char] then +-- if head == c then head = current.next end +-- if stop == current.next then stop = current end +-- if c.prev then c.prev.next = current.next end +-- current.next.prev = c.prev +-- current.next = current.next.next +-- if current.next.next then current.next.next.prev = current end +-- c.prev = current.next +-- current.next.next = c +-- end +-- if stop == current then break end +-- current = current.next +-- end +-- +-- if nbsp[base.char] then +-- head = node.remove(head, base) +-- node.free(base) +-- end +-- +-- return head, stop +-- end +-- +-- function fonts.analyzers.methods.deva(head,font,attr) +-- local orighead = head +-- local current, start, done = head, true, false +-- while current do +-- if current.id == glyph and current.subtype<256 and current.font == font then +-- done = true +-- local syllablestart, syllableend = current, nil +-- +-- local c = current --Checking Stand Alone cluster (this behavior is copied from dev2) +-- if ra[c.char] and c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] and c.next.next and c.next.next.id == glyph and c.next.next.subtype<256 and c.next.next.font == font then c = c.next.next end +-- if nbsp[c.char] and ( not current.prev or current.prev.id ~= glyph or current.prev.subtype>=256 or current.prev.font ~= font or +-- ( not consonant[current.prev.char] and not independent_vowel[current.prev.char] and not dependent_vowel[current.prev.char] and +-- not vowel_modifier[current.prev.char] and not stress_tone_mark[current.prev.char] and not nukta[current.prev.char] and not halant[current.prev.char] ) +-- ) then --Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)] +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end +-- local n = c.next +-- if n and n.id == glyph and n.subtype<256 and n.font == font then +-- local ni = n.next +-- if ( zwj[n.char] or zwnj[n.char] ) and ni and ni.id == glyph and ni.subtype<256 and ni.font == font then n = ni ni = ni.next end +-- if halant[n.char] and ni and ni.id == glyph and ni.subtype<256 and ni.font == font and consonant[ni.char] then c = ni end +-- end +-- while c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] do c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end +-- current = c.next +-- syllableend = c +-- if syllablestart ~= syllableend then +-- head, current = deva_reorder(head, syllablestart,syllableend,font,attr) +-- current = current.next +-- end +-- elseif consonant[current.char] then -- syllable containing consonant +-- prevc = true +-- while prevc do +-- prevc = false +-- local n = current.next +-- if n and n.id == glyph and n.subtype<256 and n.font == font and nukta[n.char] then n = n.next end +-- if n and n.id == glyph and n.subtype<256 and n.font == font and halant[n.char] then +-- local n = n.next +-- if n and n.id == glyph and n.subtype<256 and n.font == font and ( zwj[n.char] or zwnj[n.char] ) then n = n.next end +-- if n and n.id == glyph and n.subtype<256 and n.font == font and consonant[n.char] then +-- prevc = true +-- current = n +-- end +-- end +-- end +-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == font and nukta[current.next.char] then current = current.next end -- nukta (not specified in Microsft Devanagari OpenType specification) +-- syllableend = current +-- current = current.next +-- if current and current.id == glyph and current.subtype<256 and current.font == font and halant[current.char] then -- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H +-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == font and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end +-- syllableend = current +-- current = current.next +-- else -- syllable containing consonant with vowels: {C + [Nukta] + H} + C + [M] + [VM] + [SM] +-- if current and current.id == glyph and current.subtype<256 and current.font == font and dependent_vowel[current.char] then +-- syllableend = current +-- current = current.next +-- end +-- if current and current.id == glyph and current.subtype<256 and current.font == font and vowel_modifier[current.char] then +-- syllableend = current +-- current = current.next +-- end +-- if current and current.id == glyph and current.subtype<256 and current.font == font and stress_tone_mark[current.char] then +-- syllableend = current +-- current = current.next +-- end +-- end +-- if syllablestart ~= syllableend then +-- head, current = deva_reorder(head,syllablestart,syllableend,font,attr) +-- current = current.next +-- end +-- elseif current.id == glyph and current.subtype<256 and current.font == font and independent_vowel[current.char] then -- syllable without consonants: VO + [VM] + [SM] +-- syllableend = current +-- current = current.next +-- if current and current.id == glyph and current.subtype<256 and current.font == font and vowel_modifier[current.char] then +-- syllableend = current +-- current = current.next +-- end +-- if current and current.id == glyph and current.subtype<256 and current.font == font and stress_tone_mark[current.char] then +-- syllableend = current +-- current = current.next +-- end +-- else -- Syntax error +-- if pre_mark[current.char] or above_mark[current.char] or below_mark[current.char] or post_mark[current.char] then +-- local n = node.copy(current) +-- if pre_mark[current.char] then +-- n.char = dotted_circle +-- else +-- current.char = dotted_circle +-- end +-- head, current = node.insert_after(head, current, n) +-- end +-- current = current.next +-- end +-- else +-- current = current.next +-- end +-- start = false +-- end +-- +-- return head, done +-- end +-- +-- function fonts.analyzers.methods.dev2(head,font,attr) +-- local current, start, done, syl_nr = head, true, false, 0 +-- while current do +-- local syllablestart, syllableend = nil, nil +-- if current.id == glyph and current.subtype<256 and current.font == font then +-- syllablestart = current +-- done = true +-- local c, n = current, current.next +-- if ra[current.char] and n and n.id == glyph and n.subtype<256 and n.font == font and halant[n.char] and n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font then c = n.next end +-- if independent_vowel[c.char] then --Vowel-based syllable: [Ra+H]+V+[N]+[<[]+H+C|ZWJ+C>]+[{M}+[N]+[H]]+[SM]+[(VD)] +-- n = c.next +-- local ni, nii = nil, nil +-- if n and n.id == glyph and n.subtype<256 and n.font == font and nukta[n.char] then n = n.next end +-- if n and n.id == glyph and n.subtype<256 and n.font == font then local ni = n.next end +-- if ni and ni.id == glyph and ni.subtype<256 and ni.font == font and ni.next and ni.next.id == glyph and ni.next.subtype<256 and ni.next.font == font then +-- nii = ni.next +-- if zwj[ni.char] and consonant[nii.char] then +-- c = nii +-- elseif (zwj[ni.char] or zwnj[ni.char]) and halant[nii.char] and nii.next and nii.next.id == glyph and nii.next.subtype<256 and nii.next.font == font and consonant[nii.next.char] then +-- c = nii.next +-- end +-- end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end +-- current = c +-- syllableend = c +-- elseif nbsp[c.char] and ( not current.prev or current.prev.id ~= glyph or current.prev.subtype>=256 or current.prev.font ~= font or +-- ( not consonant[current.prev.char] and not independent_vowel[current.prev.char] and not dependent_vowel[current.prev.char] and +-- not vowel_modifier[current.prev.char] and not stress_tone_mark[current.prev.char] and not nukta[current.prev.char] and not halant[current.prev.char] ) +-- ) then --Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)] +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end +-- n = c.next +-- if n and n.id == glyph and n.subtype<256 and n.font == font then +-- local ni = n.next +-- if ( zwj[n.char] or zwnj[n.char] ) and ni and ni.id == glyph and ni.subtype<256 and ni.font == font then n = ni ni = ni.next end +-- if halant[n.char] and ni and ni.id == glyph and ni.subtype<256 and ni.font == font and consonant[ni.char] then c = ni end +-- end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end +-- current = c +-- syllableend = c +-- elseif consonant[current.char] then --Consonant syllable: {C+[N]+]|+H>} + C+[N]+[A] + [< H+[] | {M}+[N]+[H]>]+[SM]+[(VD)] +-- c = current +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end +-- n = c +-- while n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and ( halant[n.next.char] or zwnj[n.next.char] or zwj[n.next.char] ) do +-- if halant[n.next.char] then +-- n = n.next +-- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and ( zwnj[n.next.char] or zwj[n.next.char] ) then n = n.next end +-- else +-- if n.next.next and n.next.next.id == glyph and n.next.next.subtype<256 and n.next.next.font == font and halant[n.next.next.char] then n = n.next.next end +-- end +-- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and consonant[n.next.char] then +-- n = n.next +-- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and nukta[n.next.char] then n = n.next end +-- c = n +-- else +-- break +-- end +-- end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and anudatta[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then +-- c = c.next +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and ( zwnj[c.next.char] or zwj[c.next.char] ) then c = c.next end +-- else +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end +-- end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end +-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end +-- current = c +-- syllableend = c +-- end +-- end +-- +-- if syllableend then +-- syl_nr = syl_nr + 1 +-- c = syllablestart +-- while c ~= syllableend.next do +-- set_attribute(c,sylnr,syl_nr) +-- c = c.next +-- end +-- end +-- if syllableend and syllablestart ~= syllableend then +-- head, current = dev2_reorder(head,syllablestart,syllableend,font,attr) +-- end +-- +-- if not syllableend and not has_attribute(current, state) and current.id == glyph and current.subtype<256 and current.font == font then -- Syntax error +-- if pre_mark[current.char] or above_mark[current.char] or below_mark[current.char] or post_mark[current.char] then +-- local n = node.copy(current) +-- if pre_mark[current.char] then +-- n.char = dotted_circle +-- else +-- current.char = dotted_circle +-- end +-- head, current = node.insert_after(head, current, n) +-- end +-- end +-- +-- start = false +-- current = current.next +-- end +-- +-- return head, done +-- end +-- +-- function otf.handlers.dev2_reorder_matras(start,kind,lookupname,replacement) +-- return dev2_reorder_matras(start,kind,lookupname,replacement) +-- end +-- +-- function otf.handlers.dev2_reorder_reph(start,kind,lookupname,replacement) +-- return dev2_reorder_reph(start,kind,lookupname,replacement) +-- end +-- +-- function otf.handlers.dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement) +-- return dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement) +-- end +-- +-- function otf.handlers.remove_joiners(start,kind,lookupname,replacement) +-- return remove_joiners(start,kind,lookupname,replacement) +-- end diff --git a/tex/context/base/font-otb.lua b/tex/context/base/font-otb.lua index 2a7b821ea..75bda383e 100644 --- a/tex/context/base/font-otb.lua +++ b/tex/context/base/font-otb.lua @@ -1,657 +1,657 @@ -if not modules then modules = { } end modules ['font-otb'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} -local concat = table.concat -local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip -local type, next, tonumber, tostring = type, next, tonumber, tostring -local lpegmatch = lpeg.match -local utfchar = utf.char - -local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end) -local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end) -local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end) -local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end) -local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end) -local trace_ligatures_detail = false trackers.register("otf.ligatures.detail", function(v) trace_ligatures_detail = v end) -local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end) -local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end) - -local report_prepare = logs.reporter("fonts","otf prepare") - -local fonts = fonts -local otf = fonts.handlers.otf - -local otffeatures = otf.features -local registerotffeature = otffeatures.register - -otf.defaultbasealternate = "none" -- first last - -local wildcard = "*" -local default = "dflt" - -local formatters = string.formatters -local f_unicode = formatters["%U"] -local f_uniname = formatters["%U (%s)"] -local f_unilist = formatters["% t (% t)"] - -local function gref(descriptions,n) - if type(n) == "number" then - local name = descriptions[n].name - if name then - return f_uniname(n,name) - else - return f_unicode(n) - end - elseif n then - local num, nam = { }, { } - for i=2,#n do - local ni = n[i] - if tonumber(ni) then -- first is likely a key - local di = descriptions[ni] - num[i] = f_unicode(ni) - nam[i] = di and di.name or "-" - end - end - return f_unilist(num,nam) - else - return "" - end -end - -local function cref(feature,lookupname) - if lookupname then - return formatters["feature %a, lookup %a"](feature,lookupname) - else - return formatters["feature %a"](feature) - end -end - -local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment) - report_prepare("%s: base alternate %s => %s (%S => %S)", - cref(feature,lookupname), - gref(descriptions,unicode), - replacement and gref(descriptions,replacement), - value, - comment) -end - -local function report_substitution(feature,lookupname,descriptions,unicode,substitution) - report_prepare("%s: base substitution %s => %S", - cref(feature,lookupname), - gref(descriptions,unicode), - gref(descriptions,substitution)) -end - -local function report_ligature(feature,lookupname,descriptions,unicode,ligature) - report_prepare("%s: base ligature %s => %S", - cref(feature,lookupname), - gref(descriptions,ligature), - gref(descriptions,unicode)) -end - -local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value) - report_prepare("%s: base kern %s + %s => %S", - cref(feature,lookupname), - gref(descriptions,unicode), - gref(descriptions,otherunicode), - value) -end - -local basemethods = { } -local basemethod = "" - -local function applybasemethod(what,...) - local m = basemethods[basemethod][what] - if m then - return m(...) - end -end - --- We need to make sure that luatex sees the difference between --- base fonts that have different glyphs in the same slots in fonts --- that have the same fullname (or filename). LuaTeX will merge fonts --- eventually (and subset later on). If needed we can use a more --- verbose name as long as we don't use <()<>[]{}/%> and the length --- is < 128. - -local basehash, basehashes, applied = { }, 1, { } - -local function registerbasehash(tfmdata) - local properties = tfmdata.properties - local hash = concat(applied," ") - local base = basehash[hash] - if not base then - basehashes = basehashes + 1 - base = basehashes - basehash[hash] = base - end - properties.basehash = base - properties.fullname = properties.fullname .. "-" .. base - -- report_prepare("fullname base hash '%a, featureset %a",tfmdata.properties.fullname,hash) - applied = { } -end - -local function registerbasefeature(feature,value) - applied[#applied+1] = feature .. "=" .. tostring(value) -end - --- The original basemode ligature builder used the names of components --- and did some expression juggling to get the chain right. The current --- variant starts with unicodes but still uses names to make the chain. --- This is needed because we have to create intermediates when needed --- but use predefined snippets when available. To some extend the --- current builder is more stupid but I don't worry that much about it --- as ligatures are rather predicatable. --- --- Personally I think that an ff + i == ffi rule as used in for instance --- latin modern is pretty weird as no sane person will key that in and --- expect a glyph for that ligature plus the following character. Anyhow, --- as we need to deal with this, we do, but no guarantes are given. --- --- latin modern dejavu --- --- f+f 102 102 102 102 --- f+i 102 105 102 105 --- f+l 102 108 102 108 --- f+f+i 102 102 105 --- f+f+l 102 102 108 102 102 108 --- ff+i 64256 105 64256 105 --- ff+l 64256 108 --- --- As you can see here, latin modern is less complete than dejavu but --- in practice one will not notice it. --- --- The while loop is needed because we need to resolve for instance --- pseudo names like hyphen_hyphen to endash so in practice we end --- up with a bit too many definitions but the overhead is neglectable. --- --- Todo: if changed[first] or changed[second] then ... end - -local trace = false - -local function finalize_ligatures(tfmdata,ligatures) - local nofligatures = #ligatures - if nofligatures > 0 then - local characters = tfmdata.characters - local descriptions = tfmdata.descriptions - local resources = tfmdata.resources - local unicodes = resources.unicodes - local private = resources.private - local alldone = false - while not alldone do - local done = 0 - for i=1,nofligatures do - local ligature = ligatures[i] - if ligature then - local unicode, lookupdata = ligature[1], ligature[2] - if trace then - trace_ligatures_detail("building % a into %a",lookupdata,unicode) - end - local size = #lookupdata - local firstcode = lookupdata[1] -- [2] - local firstdata = characters[firstcode] - local okay = false - if firstdata then - local firstname = "ctx_" .. firstcode - for i=1,size-1 do -- for i=2,size-1 do - local firstdata = characters[firstcode] - if not firstdata then - firstcode = private - if trace then - trace_ligatures_detail("defining %a as %a",firstname,firstcode) - end - unicodes[firstname] = firstcode - firstdata = { intermediate = true, ligatures = { } } - characters[firstcode] = firstdata - descriptions[firstcode] = { name = firstname } - private = private + 1 - end - local target - local secondcode = lookupdata[i+1] - local secondname = firstname .. "_" .. secondcode - if i == size - 1 then - target = unicode - if not unicodes[secondname] then - unicodes[secondname] = unicode -- map final ligature onto intermediates - end - okay = true - else - target = unicodes[secondname] - if not target then - break - end - end - if trace then - trace_ligatures_detail("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target) - end - local firstligs = firstdata.ligatures - if firstligs then - firstligs[secondcode] = { char = target } - else - firstdata.ligatures = { [secondcode] = { char = target } } - end - firstcode = target - firstname = secondname - end - end - if okay then - ligatures[i] = false - done = done + 1 - end - end - end - alldone = done == 0 - end - if trace then - for k, v in next, characters do - if v.ligatures then table.print(v,k) end - end - end - tfmdata.resources.private = private - end -end - -local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) - local characters = tfmdata.characters - local descriptions = tfmdata.descriptions - local resources = tfmdata.resources - local changed = tfmdata.changed - local unicodes = resources.unicodes - local lookuphash = resources.lookuphash - local lookuptypes = resources.lookuptypes - - local ligatures = { } - local alternate = tonumber(value) - local defaultalt = otf.defaultbasealternate - - local trace_singles = trace_baseinit and trace_singles - local trace_alternatives = trace_baseinit and trace_alternatives - local trace_ligatures = trace_baseinit and trace_ligatures - - local actions = { - substitution = function(lookupdata,lookupname,description,unicode) - if trace_singles then - report_substitution(feature,lookupname,descriptions,unicode,lookupdata) - end - changed[unicode] = lookupdata - end, - alternate = function(lookupdata,lookupname,description,unicode) - local replacement = lookupdata[alternate] - if replacement then - changed[unicode] = replacement - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal") - end - elseif defaultalt == "first" then - replacement = lookupdata[1] - changed[unicode] = replacement - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - elseif defaultalt == "last" then - replacement = lookupdata[#data] - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - else - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown") - end - end - end, - ligature = function(lookupdata,lookupname,description,unicode) - if trace_ligatures then - report_ligature(feature,lookupname,descriptions,unicode,lookupdata) - end - ligatures[#ligatures+1] = { unicode, lookupdata } - end, - } - - for unicode, character in next, characters do - local description = descriptions[unicode] - local lookups = description.slookups - if lookups then - for l=1,#lookuplist do - local lookupname = lookuplist[l] - local lookupdata = lookups[lookupname] - if lookupdata then - local lookuptype = lookuptypes[lookupname] - local action = actions[lookuptype] - if action then - action(lookupdata,lookupname,description,unicode) - end - end - end - end - local lookups = description.mlookups - if lookups then - for l=1,#lookuplist do - local lookupname = lookuplist[l] - local lookuplist = lookups[lookupname] - if lookuplist then - local lookuptype = lookuptypes[lookupname] - local action = actions[lookuptype] - if action then - for i=1,#lookuplist do - action(lookuplist[i],lookupname,description,unicode) - end - end - end - end - end - end - - finalize_ligatures(tfmdata,ligatures) -end - -local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) -- todo what kind of kerns, currently all - local characters = tfmdata.characters - local descriptions = tfmdata.descriptions - local resources = tfmdata.resources - local unicodes = resources.unicodes - local sharedkerns = { } - local traceindeed = trace_baseinit and trace_kerns - for unicode, character in next, characters do - local description = descriptions[unicode] - local rawkerns = description.kerns -- shared - if rawkerns then - local s = sharedkerns[rawkerns] - if s == false then - -- skip - elseif s then - character.kerns = s - else - local newkerns = character.kerns - local done = false - for l=1,#lookuplist do - local lookup = lookuplist[l] - local kerns = rawkerns[lookup] - if kerns then - for otherunicode, value in next, kerns do - if value == 0 then - -- maybe no 0 test here - elseif not newkerns then - newkerns = { [otherunicode] = value } - done = true - if traceindeed then - report_kern(feature,lookup,descriptions,unicode,otherunicode,value) - end - elseif not newkerns[otherunicode] then -- first wins - newkerns[otherunicode] = value - done = true - if traceindeed then - report_kern(feature,lookup,descriptions,unicode,otherunicode,value) - end - end - end - end - end - if done then - sharedkerns[rawkerns] = newkerns - character.kerns = newkerns -- no empty assignments - else - sharedkerns[rawkerns] = false - end - end - end - end -end - -basemethods.independent = { - preparesubstitutions = preparesubstitutions, - preparepositionings = preparepositionings, -} - -local function makefake(tfmdata,name,present) - local resources = tfmdata.resources - local private = resources.private - local character = { intermediate = true, ligatures = { } } - resources.unicodes[name] = private - tfmdata.characters[private] = character - tfmdata.descriptions[private] = { name = name } - resources.private = private + 1 - present[name] = private - return character -end - -local function make_1(present,tree,name) - for k, v in next, tree do - if k == "ligature" then - present[name] = v - else - make_1(present,v,name .. "_" .. k) - end - end -end - -local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname) - for k, v in next, tree do - if k == "ligature" then - local character = characters[preceding] - if not character then - if trace_baseinit then - report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding) - end - character = makefake(tfmdata,name,present) - end - local ligatures = character.ligatures - if ligatures then - ligatures[unicode] = { char = v } - else - character.ligatures = { [unicode] = { char = v } } - end - if done then - local d = done[lookupname] - if not d then - done[lookupname] = { "dummy", v } - else - d[#d+1] = v - end - end - else - local code = present[name] or unicode - local name = name .. "_" .. k - make_2(present,tfmdata,characters,v,name,code,k,done,lookupname) - end - end -end - -local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) - local characters = tfmdata.characters - local descriptions = tfmdata.descriptions - local resources = tfmdata.resources - local changed = tfmdata.changed - local lookuphash = resources.lookuphash - local lookuptypes = resources.lookuptypes - - local ligatures = { } - local alternate = tonumber(value) - local defaultalt = otf.defaultbasealternate - - local trace_singles = trace_baseinit and trace_singles - local trace_alternatives = trace_baseinit and trace_alternatives - local trace_ligatures = trace_baseinit and trace_ligatures - - for l=1,#lookuplist do - local lookupname = lookuplist[l] - local lookupdata = lookuphash[lookupname] - local lookuptype = lookuptypes[lookupname] - for unicode, data in next, lookupdata do - if lookuptype == "substitution" then - if trace_singles then - report_substitution(feature,lookupname,descriptions,unicode,data) - end - changed[unicode] = data - elseif lookuptype == "alternate" then - local replacement = data[alternate] - if replacement then - changed[unicode] = replacement - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal") - end - elseif defaultalt == "first" then - replacement = data[1] - changed[unicode] = replacement - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - elseif defaultalt == "last" then - replacement = data[#data] - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - else - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown") - end - end - elseif lookuptype == "ligature" then - ligatures[#ligatures+1] = { unicode, data, lookupname } - if trace_ligatures then - report_ligature(feature,lookupname,descriptions,unicode,data) - end - end - end - end - - local nofligatures = #ligatures - - if nofligatures > 0 then - - local characters = tfmdata.characters - local present = { } - local done = trace_baseinit and trace_ligatures and { } - - for i=1,nofligatures do - local ligature = ligatures[i] - local unicode, tree = ligature[1], ligature[2] - make_1(present,tree,"ctx_"..unicode) - end - - for i=1,nofligatures do - local ligature = ligatures[i] - local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3] - make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname) - end - - end - -end - -local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) - local characters = tfmdata.characters - local descriptions = tfmdata.descriptions - local resources = tfmdata.resources - local lookuphash = resources.lookuphash - local traceindeed = trace_baseinit and trace_kerns - - -- check out this sharedkerns trickery - - for l=1,#lookuplist do - local lookupname = lookuplist[l] - local lookupdata = lookuphash[lookupname] - for unicode, data in next, lookupdata do - local character = characters[unicode] - local kerns = character.kerns - if not kerns then - kerns = { } - character.kerns = kerns - end - if traceindeed then - for otherunicode, kern in next, data do - if not kerns[otherunicode] and kern ~= 0 then - kerns[otherunicode] = kern - report_kern(feature,lookup,descriptions,unicode,otherunicode,kern) - end - end - else - for otherunicode, kern in next, data do - if not kerns[otherunicode] and kern ~= 0 then - kerns[otherunicode] = kern - end - end - end - end - end - -end - -local function initializehashes(tfmdata) - nodeinitializers.features(tfmdata) -end - -basemethods.shared = { - initializehashes = initializehashes, - preparesubstitutions = preparesubstitutions, - preparepositionings = preparepositionings, -} - -basemethod = "independent" - -local function featuresinitializer(tfmdata,value) - if true then -- value then - local t = trace_preparing and os.clock() - local features = tfmdata.shared.features - if features then - applybasemethod("initializehashes",tfmdata) - local collectlookups = otf.collectlookups - local rawdata = tfmdata.shared.rawdata - local properties = tfmdata.properties - local script = properties.script - local language = properties.language - local basesubstitutions = rawdata.resources.features.gsub - local basepositionings = rawdata.resources.features.gpos - if basesubstitutions then - for feature, data in next, basesubstitutions do - local value = features[feature] - if value then - local validlookups, lookuplist = collectlookups(rawdata,feature,script,language) - if validlookups then - applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist) - registerbasefeature(feature,value) - end - end - end - end - if basepositionings then - for feature, data in next, basepositionings do - local value = features[feature] - if value then - local validlookups, lookuplist = collectlookups(rawdata,feature,script,language) - if validlookups then - applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist) - registerbasefeature(feature,value) - end - end - end - end - registerbasehash(tfmdata) - end - if trace_preparing then - report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname) - end - end -end - -registerotffeature { - name = "features", - description = "features", - default = true, - initializers = { - -- position = 1, -- after setscript (temp hack ... we need to force script / language to 1 - base = featuresinitializer, - } -} - --- independent : collect lookups independently (takes more runtime ... neglectable) --- shared : shares lookups with node mode (takes more memory unless also a node mode variant is used ... noticeable) - -directives.register("fonts.otf.loader.basemethod", function(v) - if basemethods[v] then - basemethod = v - end -end) +if not modules then modules = { } end modules ['font-otb'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} +local concat = table.concat +local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip +local type, next, tonumber, tostring = type, next, tonumber, tostring +local lpegmatch = lpeg.match +local utfchar = utf.char + +local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end) +local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end) +local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end) +local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end) +local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end) +local trace_ligatures_detail = false trackers.register("otf.ligatures.detail", function(v) trace_ligatures_detail = v end) +local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end) +local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end) + +local report_prepare = logs.reporter("fonts","otf prepare") + +local fonts = fonts +local otf = fonts.handlers.otf + +local otffeatures = otf.features +local registerotffeature = otffeatures.register + +otf.defaultbasealternate = "none" -- first last + +local wildcard = "*" +local default = "dflt" + +local formatters = string.formatters +local f_unicode = formatters["%U"] +local f_uniname = formatters["%U (%s)"] +local f_unilist = formatters["% t (% t)"] + +local function gref(descriptions,n) + if type(n) == "number" then + local name = descriptions[n].name + if name then + return f_uniname(n,name) + else + return f_unicode(n) + end + elseif n then + local num, nam = { }, { } + for i=2,#n do + local ni = n[i] + if tonumber(ni) then -- first is likely a key + local di = descriptions[ni] + num[i] = f_unicode(ni) + nam[i] = di and di.name or "-" + end + end + return f_unilist(num,nam) + else + return "" + end +end + +local function cref(feature,lookupname) + if lookupname then + return formatters["feature %a, lookup %a"](feature,lookupname) + else + return formatters["feature %a"](feature) + end +end + +local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment) + report_prepare("%s: base alternate %s => %s (%S => %S)", + cref(feature,lookupname), + gref(descriptions,unicode), + replacement and gref(descriptions,replacement), + value, + comment) +end + +local function report_substitution(feature,lookupname,descriptions,unicode,substitution) + report_prepare("%s: base substitution %s => %S", + cref(feature,lookupname), + gref(descriptions,unicode), + gref(descriptions,substitution)) +end + +local function report_ligature(feature,lookupname,descriptions,unicode,ligature) + report_prepare("%s: base ligature %s => %S", + cref(feature,lookupname), + gref(descriptions,ligature), + gref(descriptions,unicode)) +end + +local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value) + report_prepare("%s: base kern %s + %s => %S", + cref(feature,lookupname), + gref(descriptions,unicode), + gref(descriptions,otherunicode), + value) +end + +local basemethods = { } +local basemethod = "" + +local function applybasemethod(what,...) + local m = basemethods[basemethod][what] + if m then + return m(...) + end +end + +-- We need to make sure that luatex sees the difference between +-- base fonts that have different glyphs in the same slots in fonts +-- that have the same fullname (or filename). LuaTeX will merge fonts +-- eventually (and subset later on). If needed we can use a more +-- verbose name as long as we don't use <()<>[]{}/%> and the length +-- is < 128. + +local basehash, basehashes, applied = { }, 1, { } + +local function registerbasehash(tfmdata) + local properties = tfmdata.properties + local hash = concat(applied," ") + local base = basehash[hash] + if not base then + basehashes = basehashes + 1 + base = basehashes + basehash[hash] = base + end + properties.basehash = base + properties.fullname = properties.fullname .. "-" .. base + -- report_prepare("fullname base hash '%a, featureset %a",tfmdata.properties.fullname,hash) + applied = { } +end + +local function registerbasefeature(feature,value) + applied[#applied+1] = feature .. "=" .. tostring(value) +end + +-- The original basemode ligature builder used the names of components +-- and did some expression juggling to get the chain right. The current +-- variant starts with unicodes but still uses names to make the chain. +-- This is needed because we have to create intermediates when needed +-- but use predefined snippets when available. To some extend the +-- current builder is more stupid but I don't worry that much about it +-- as ligatures are rather predicatable. +-- +-- Personally I think that an ff + i == ffi rule as used in for instance +-- latin modern is pretty weird as no sane person will key that in and +-- expect a glyph for that ligature plus the following character. Anyhow, +-- as we need to deal with this, we do, but no guarantes are given. +-- +-- latin modern dejavu +-- +-- f+f 102 102 102 102 +-- f+i 102 105 102 105 +-- f+l 102 108 102 108 +-- f+f+i 102 102 105 +-- f+f+l 102 102 108 102 102 108 +-- ff+i 64256 105 64256 105 +-- ff+l 64256 108 +-- +-- As you can see here, latin modern is less complete than dejavu but +-- in practice one will not notice it. +-- +-- The while loop is needed because we need to resolve for instance +-- pseudo names like hyphen_hyphen to endash so in practice we end +-- up with a bit too many definitions but the overhead is neglectable. +-- +-- Todo: if changed[first] or changed[second] then ... end + +local trace = false + +local function finalize_ligatures(tfmdata,ligatures) + local nofligatures = #ligatures + if nofligatures > 0 then + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local resources = tfmdata.resources + local unicodes = resources.unicodes + local private = resources.private + local alldone = false + while not alldone do + local done = 0 + for i=1,nofligatures do + local ligature = ligatures[i] + if ligature then + local unicode, lookupdata = ligature[1], ligature[2] + if trace then + trace_ligatures_detail("building % a into %a",lookupdata,unicode) + end + local size = #lookupdata + local firstcode = lookupdata[1] -- [2] + local firstdata = characters[firstcode] + local okay = false + if firstdata then + local firstname = "ctx_" .. firstcode + for i=1,size-1 do -- for i=2,size-1 do + local firstdata = characters[firstcode] + if not firstdata then + firstcode = private + if trace then + trace_ligatures_detail("defining %a as %a",firstname,firstcode) + end + unicodes[firstname] = firstcode + firstdata = { intermediate = true, ligatures = { } } + characters[firstcode] = firstdata + descriptions[firstcode] = { name = firstname } + private = private + 1 + end + local target + local secondcode = lookupdata[i+1] + local secondname = firstname .. "_" .. secondcode + if i == size - 1 then + target = unicode + if not unicodes[secondname] then + unicodes[secondname] = unicode -- map final ligature onto intermediates + end + okay = true + else + target = unicodes[secondname] + if not target then + break + end + end + if trace then + trace_ligatures_detail("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target) + end + local firstligs = firstdata.ligatures + if firstligs then + firstligs[secondcode] = { char = target } + else + firstdata.ligatures = { [secondcode] = { char = target } } + end + firstcode = target + firstname = secondname + end + end + if okay then + ligatures[i] = false + done = done + 1 + end + end + end + alldone = done == 0 + end + if trace then + for k, v in next, characters do + if v.ligatures then table.print(v,k) end + end + end + tfmdata.resources.private = private + end +end + +local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local resources = tfmdata.resources + local changed = tfmdata.changed + local unicodes = resources.unicodes + local lookuphash = resources.lookuphash + local lookuptypes = resources.lookuptypes + + local ligatures = { } + local alternate = tonumber(value) + local defaultalt = otf.defaultbasealternate + + local trace_singles = trace_baseinit and trace_singles + local trace_alternatives = trace_baseinit and trace_alternatives + local trace_ligatures = trace_baseinit and trace_ligatures + + local actions = { + substitution = function(lookupdata,lookupname,description,unicode) + if trace_singles then + report_substitution(feature,lookupname,descriptions,unicode,lookupdata) + end + changed[unicode] = lookupdata + end, + alternate = function(lookupdata,lookupname,description,unicode) + local replacement = lookupdata[alternate] + if replacement then + changed[unicode] = replacement + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal") + end + elseif defaultalt == "first" then + replacement = lookupdata[1] + changed[unicode] = replacement + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + elseif defaultalt == "last" then + replacement = lookupdata[#data] + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + else + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown") + end + end + end, + ligature = function(lookupdata,lookupname,description,unicode) + if trace_ligatures then + report_ligature(feature,lookupname,descriptions,unicode,lookupdata) + end + ligatures[#ligatures+1] = { unicode, lookupdata } + end, + } + + for unicode, character in next, characters do + local description = descriptions[unicode] + local lookups = description.slookups + if lookups then + for l=1,#lookuplist do + local lookupname = lookuplist[l] + local lookupdata = lookups[lookupname] + if lookupdata then + local lookuptype = lookuptypes[lookupname] + local action = actions[lookuptype] + if action then + action(lookupdata,lookupname,description,unicode) + end + end + end + end + local lookups = description.mlookups + if lookups then + for l=1,#lookuplist do + local lookupname = lookuplist[l] + local lookuplist = lookups[lookupname] + if lookuplist then + local lookuptype = lookuptypes[lookupname] + local action = actions[lookuptype] + if action then + for i=1,#lookuplist do + action(lookuplist[i],lookupname,description,unicode) + end + end + end + end + end + end + + finalize_ligatures(tfmdata,ligatures) +end + +local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) -- todo what kind of kerns, currently all + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local resources = tfmdata.resources + local unicodes = resources.unicodes + local sharedkerns = { } + local traceindeed = trace_baseinit and trace_kerns + for unicode, character in next, characters do + local description = descriptions[unicode] + local rawkerns = description.kerns -- shared + if rawkerns then + local s = sharedkerns[rawkerns] + if s == false then + -- skip + elseif s then + character.kerns = s + else + local newkerns = character.kerns + local done = false + for l=1,#lookuplist do + local lookup = lookuplist[l] + local kerns = rawkerns[lookup] + if kerns then + for otherunicode, value in next, kerns do + if value == 0 then + -- maybe no 0 test here + elseif not newkerns then + newkerns = { [otherunicode] = value } + done = true + if traceindeed then + report_kern(feature,lookup,descriptions,unicode,otherunicode,value) + end + elseif not newkerns[otherunicode] then -- first wins + newkerns[otherunicode] = value + done = true + if traceindeed then + report_kern(feature,lookup,descriptions,unicode,otherunicode,value) + end + end + end + end + end + if done then + sharedkerns[rawkerns] = newkerns + character.kerns = newkerns -- no empty assignments + else + sharedkerns[rawkerns] = false + end + end + end + end +end + +basemethods.independent = { + preparesubstitutions = preparesubstitutions, + preparepositionings = preparepositionings, +} + +local function makefake(tfmdata,name,present) + local resources = tfmdata.resources + local private = resources.private + local character = { intermediate = true, ligatures = { } } + resources.unicodes[name] = private + tfmdata.characters[private] = character + tfmdata.descriptions[private] = { name = name } + resources.private = private + 1 + present[name] = private + return character +end + +local function make_1(present,tree,name) + for k, v in next, tree do + if k == "ligature" then + present[name] = v + else + make_1(present,v,name .. "_" .. k) + end + end +end + +local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname) + for k, v in next, tree do + if k == "ligature" then + local character = characters[preceding] + if not character then + if trace_baseinit then + report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding) + end + character = makefake(tfmdata,name,present) + end + local ligatures = character.ligatures + if ligatures then + ligatures[unicode] = { char = v } + else + character.ligatures = { [unicode] = { char = v } } + end + if done then + local d = done[lookupname] + if not d then + done[lookupname] = { "dummy", v } + else + d[#d+1] = v + end + end + else + local code = present[name] or unicode + local name = name .. "_" .. k + make_2(present,tfmdata,characters,v,name,code,k,done,lookupname) + end + end +end + +local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local resources = tfmdata.resources + local changed = tfmdata.changed + local lookuphash = resources.lookuphash + local lookuptypes = resources.lookuptypes + + local ligatures = { } + local alternate = tonumber(value) + local defaultalt = otf.defaultbasealternate + + local trace_singles = trace_baseinit and trace_singles + local trace_alternatives = trace_baseinit and trace_alternatives + local trace_ligatures = trace_baseinit and trace_ligatures + + for l=1,#lookuplist do + local lookupname = lookuplist[l] + local lookupdata = lookuphash[lookupname] + local lookuptype = lookuptypes[lookupname] + for unicode, data in next, lookupdata do + if lookuptype == "substitution" then + if trace_singles then + report_substitution(feature,lookupname,descriptions,unicode,data) + end + changed[unicode] = data + elseif lookuptype == "alternate" then + local replacement = data[alternate] + if replacement then + changed[unicode] = replacement + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal") + end + elseif defaultalt == "first" then + replacement = data[1] + changed[unicode] = replacement + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + elseif defaultalt == "last" then + replacement = data[#data] + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + else + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown") + end + end + elseif lookuptype == "ligature" then + ligatures[#ligatures+1] = { unicode, data, lookupname } + if trace_ligatures then + report_ligature(feature,lookupname,descriptions,unicode,data) + end + end + end + end + + local nofligatures = #ligatures + + if nofligatures > 0 then + + local characters = tfmdata.characters + local present = { } + local done = trace_baseinit and trace_ligatures and { } + + for i=1,nofligatures do + local ligature = ligatures[i] + local unicode, tree = ligature[1], ligature[2] + make_1(present,tree,"ctx_"..unicode) + end + + for i=1,nofligatures do + local ligature = ligatures[i] + local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3] + make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname) + end + + end + +end + +local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local resources = tfmdata.resources + local lookuphash = resources.lookuphash + local traceindeed = trace_baseinit and trace_kerns + + -- check out this sharedkerns trickery + + for l=1,#lookuplist do + local lookupname = lookuplist[l] + local lookupdata = lookuphash[lookupname] + for unicode, data in next, lookupdata do + local character = characters[unicode] + local kerns = character.kerns + if not kerns then + kerns = { } + character.kerns = kerns + end + if traceindeed then + for otherunicode, kern in next, data do + if not kerns[otherunicode] and kern ~= 0 then + kerns[otherunicode] = kern + report_kern(feature,lookup,descriptions,unicode,otherunicode,kern) + end + end + else + for otherunicode, kern in next, data do + if not kerns[otherunicode] and kern ~= 0 then + kerns[otherunicode] = kern + end + end + end + end + end + +end + +local function initializehashes(tfmdata) + nodeinitializers.features(tfmdata) +end + +basemethods.shared = { + initializehashes = initializehashes, + preparesubstitutions = preparesubstitutions, + preparepositionings = preparepositionings, +} + +basemethod = "independent" + +local function featuresinitializer(tfmdata,value) + if true then -- value then + local t = trace_preparing and os.clock() + local features = tfmdata.shared.features + if features then + applybasemethod("initializehashes",tfmdata) + local collectlookups = otf.collectlookups + local rawdata = tfmdata.shared.rawdata + local properties = tfmdata.properties + local script = properties.script + local language = properties.language + local basesubstitutions = rawdata.resources.features.gsub + local basepositionings = rawdata.resources.features.gpos + if basesubstitutions then + for feature, data in next, basesubstitutions do + local value = features[feature] + if value then + local validlookups, lookuplist = collectlookups(rawdata,feature,script,language) + if validlookups then + applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist) + registerbasefeature(feature,value) + end + end + end + end + if basepositionings then + for feature, data in next, basepositionings do + local value = features[feature] + if value then + local validlookups, lookuplist = collectlookups(rawdata,feature,script,language) + if validlookups then + applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist) + registerbasefeature(feature,value) + end + end + end + end + registerbasehash(tfmdata) + end + if trace_preparing then + report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname) + end + end +end + +registerotffeature { + name = "features", + description = "features", + default = true, + initializers = { + -- position = 1, -- after setscript (temp hack ... we need to force script / language to 1 + base = featuresinitializer, + } +} + +-- independent : collect lookups independently (takes more runtime ... neglectable) +-- shared : shares lookups with node mode (takes more memory unless also a node mode variant is used ... noticeable) + +directives.register("fonts.otf.loader.basemethod", function(v) + if basemethods[v] then + basemethod = v + end +end) diff --git a/tex/context/base/font-otc.lua b/tex/context/base/font-otc.lua index a87dcadf8..0ea900008 100644 --- a/tex/context/base/font-otc.lua +++ b/tex/context/base/font-otc.lua @@ -1,333 +1,333 @@ -if not modules then modules = { } end modules ['font-otc'] = { - version = 1.001, - comment = "companion to font-otf.lua (context)", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format, insert = string.format, table.insert -local type, next = type, next -local lpegmatch = lpeg.match - --- we assume that the other otf stuff is loaded already - -local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end) -local report_otf = logs.reporter("fonts","otf loading") - -local fonts = fonts -local otf = fonts.handlers.otf -local registerotffeature = otf.features.register -local setmetatableindex = table.setmetatableindex - --- In the userdata interface we can not longer tweak the loaded font as --- conveniently as before. For instance, instead of pushing extra data in --- in the table using the original structure, we now have to operate on --- the mkiv representation. And as the fontloader interface is modelled --- after fontforge we cannot change that one too much either. - -local types = { - substitution = "gsub_single", - ligature = "gsub_ligature", - alternate = "gsub_alternate", -} - -setmetatableindex(types, function(t,k) t[k] = k return k end) -- "key" - -local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } } -local noflags = { } - -local function addfeature(data,feature,specifications) - local descriptions = data.descriptions - local resources = data.resources - local lookups = resources.lookups - local gsubfeatures = resources.features.gsub - if gsubfeatures and gsubfeatures[feature] then - -- already present - else - local sequences = resources.sequences - local fontfeatures = resources.features - local unicodes = resources.unicodes - local lookuptypes = resources.lookuptypes - local splitter = lpeg.splitter(" ",unicodes) - local done = 0 - local skip = 0 - if not specifications[1] then - -- so we accept a one entry specification - specifications = { specifications } - end - -- subtables are tables themselves but we also accept flattened singular subtables - for s=1,#specifications do - local specification = specifications[s] - local valid = specification.valid - if not valid or valid(data,specification,feature) then - local initialize = specification.initialize - if initialize then - -- when false is returned we initialize only once - specification.initialize = initialize(specification) and initialize or nil - end - local askedfeatures = specification.features or everywhere - local subtables = specification.subtables or { specification.data } or { } - local featuretype = types[specification.type or "substitution"] - local featureflags = specification.flags or noflags - local added = false - local featurename = format("ctx_%s_%s",feature,s) - local st = { } - for t=1,#subtables do - local list = subtables[t] - local full = format("%s_%s",featurename,t) - st[t] = full - if featuretype == "gsub_ligature" then - lookuptypes[full] = "ligature" - for code, ligature in next, list do - local unicode = tonumber(code) or unicodes[code] - local description = descriptions[unicode] - if description then - local slookups = description.slookups - if type(ligature) == "string" then - ligature = { lpegmatch(splitter,ligature) } - end - local present = true - for i=1,#ligature do - if not descriptions[ligature[i]] then - present = false - break - end - end - if present then - if slookups then - slookups[full] = ligature - else - description.slookups = { [full] = ligature } - end - done, added = done + 1, true - else - skip = skip + 1 - end - end - end - elseif featuretype == "gsub_single" then - lookuptypes[full] = "substitution" - for code, replacement in next, list do - local unicode = tonumber(code) or unicodes[code] - local description = descriptions[unicode] - if description then - local slookups = description.slookups - replacement = tonumber(replacement) or unicodes[replacement] - if descriptions[replacement] then - if slookups then - slookups[full] = replacement - else - description.slookups = { [full] = replacement } - end - done, added = done + 1, true - end - end - end - end - end - if added then - -- script = { lang1, lang2, lang3 } or script = { lang1 = true, ... } - for k, v in next, askedfeatures do - if v[1] then - askedfeatures[k] = table.tohash(v) - end - end - sequences[#sequences+1] = { - chain = 0, - features = { [feature] = askedfeatures }, - flags = featureflags, - name = featurename, - subtables = st, - type = featuretype, - } - -- register in metadata (merge as there can be a few) - if not gsubfeatures then - gsubfeatures = { } - fontfeatures.gsub = gsubfeatures - end - local k = gsubfeatures[feature] - if not k then - k = { } - gsubfeatures[feature] = k - end - for script, languages in next, askedfeatures do - local kk = k[script] - if not kk then - kk = { } - k[script] = kk - end - for language, value in next, languages do - kk[language] = value - end - end - end - end - end - if trace_loading then - report_otf("registering feature %a, affected glyphs %a, skipped glyphs %a",feature,done,skip) - end - end -end - -otf.enhancers.addfeature = addfeature - -local extrafeatures = { } - -function otf.addfeature(name,specification) - extrafeatures[name] = specification -end - -local function enhance(data,filename,raw) - for feature, specification in next, extrafeatures do - addfeature(data,feature,specification) - end -end - -otf.enhancers.register("check extra features",enhance) - --- tlig -- - -local tlig = { - endash = "hyphen hyphen", - emdash = "hyphen hyphen hyphen", - -- quotedblleft = "quoteleft quoteleft", - -- quotedblright = "quoteright quoteright", - -- quotedblleft = "grave grave", - -- quotedblright = "quotesingle quotesingle", - -- quotedblbase = "comma comma", -} - -local tlig_specification = { - type = "ligature", - features = everywhere, - data = tlig, - flags = noflags, -} - -otf.addfeature("tlig",tlig_specification) - -registerotffeature { - name = 'tlig', - description = 'tex ligatures', -} - --- trep - -local trep = { - -- [0x0022] = 0x201D, - [0x0027] = 0x2019, - -- [0x0060] = 0x2018, -} - -local trep_specification = { - type = "substitution", - features = everywhere, - data = trep, - flags = noflags, -} - -otf.addfeature("trep",trep_specification) - -registerotffeature { - name = 'trep', - description = 'tex replacements', -} - --- tcom - -if characters.combined then - - local tcom = { } - - local function initialize() - characters.initialize() - for first, seconds in next, characters.combined do - for second, combination in next, seconds do - tcom[combination] = { first, second } - end - end - -- return false - end - - local tcom_specification = { - type = "ligature", - features = everywhere, - data = tcom, - flags = noflags, - initialize = initialize, - } - - otf.addfeature("tcom",tcom_specification) - - registerotffeature { - name = 'tcom', - description = 'tex combinations', - } - -end - --- anum - -local anum_arabic = { - [0x0030] = 0x0660, - [0x0031] = 0x0661, - [0x0032] = 0x0662, - [0x0033] = 0x0663, - [0x0034] = 0x0664, - [0x0035] = 0x0665, - [0x0036] = 0x0666, - [0x0037] = 0x0667, - [0x0038] = 0x0668, - [0x0039] = 0x0669, -} - -local anum_persian = { - [0x0030] = 0x06F0, - [0x0031] = 0x06F1, - [0x0032] = 0x06F2, - [0x0033] = 0x06F3, - [0x0034] = 0x06F4, - [0x0035] = 0x06F5, - [0x0036] = 0x06F6, - [0x0037] = 0x06F7, - [0x0038] = 0x06F8, - [0x0039] = 0x06F9, -} - -local function valid(data) - local features = data.resources.features - if features then - for k, v in next, features do - for k, v in next, v do - if v.arab then - return true - end - end - end - end -end - -local anum_specification = { - { - type = "substitution", - features = { arab = { urd = true, dflt = true } }, - data = anum_arabic, - flags = noflags, -- { }, - valid = valid, - }, - { - type = "substitution", - features = { arab = { urd = true } }, - data = anum_persian, - flags = noflags, -- { }, - valid = valid, - }, -} - -otf.addfeature("anum",anum_specification) -- todo: only when there is already an arab script feature - -registerotffeature { - name = 'anum', - description = 'arabic digits', -} +if not modules then modules = { } end modules ['font-otc'] = { + version = 1.001, + comment = "companion to font-otf.lua (context)", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, insert = string.format, table.insert +local type, next = type, next +local lpegmatch = lpeg.match + +-- we assume that the other otf stuff is loaded already + +local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end) +local report_otf = logs.reporter("fonts","otf loading") + +local fonts = fonts +local otf = fonts.handlers.otf +local registerotffeature = otf.features.register +local setmetatableindex = table.setmetatableindex + +-- In the userdata interface we can not longer tweak the loaded font as +-- conveniently as before. For instance, instead of pushing extra data in +-- in the table using the original structure, we now have to operate on +-- the mkiv representation. And as the fontloader interface is modelled +-- after fontforge we cannot change that one too much either. + +local types = { + substitution = "gsub_single", + ligature = "gsub_ligature", + alternate = "gsub_alternate", +} + +setmetatableindex(types, function(t,k) t[k] = k return k end) -- "key" + +local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } } +local noflags = { } + +local function addfeature(data,feature,specifications) + local descriptions = data.descriptions + local resources = data.resources + local lookups = resources.lookups + local gsubfeatures = resources.features.gsub + if gsubfeatures and gsubfeatures[feature] then + -- already present + else + local sequences = resources.sequences + local fontfeatures = resources.features + local unicodes = resources.unicodes + local lookuptypes = resources.lookuptypes + local splitter = lpeg.splitter(" ",unicodes) + local done = 0 + local skip = 0 + if not specifications[1] then + -- so we accept a one entry specification + specifications = { specifications } + end + -- subtables are tables themselves but we also accept flattened singular subtables + for s=1,#specifications do + local specification = specifications[s] + local valid = specification.valid + if not valid or valid(data,specification,feature) then + local initialize = specification.initialize + if initialize then + -- when false is returned we initialize only once + specification.initialize = initialize(specification) and initialize or nil + end + local askedfeatures = specification.features or everywhere + local subtables = specification.subtables or { specification.data } or { } + local featuretype = types[specification.type or "substitution"] + local featureflags = specification.flags or noflags + local added = false + local featurename = format("ctx_%s_%s",feature,s) + local st = { } + for t=1,#subtables do + local list = subtables[t] + local full = format("%s_%s",featurename,t) + st[t] = full + if featuretype == "gsub_ligature" then + lookuptypes[full] = "ligature" + for code, ligature in next, list do + local unicode = tonumber(code) or unicodes[code] + local description = descriptions[unicode] + if description then + local slookups = description.slookups + if type(ligature) == "string" then + ligature = { lpegmatch(splitter,ligature) } + end + local present = true + for i=1,#ligature do + if not descriptions[ligature[i]] then + present = false + break + end + end + if present then + if slookups then + slookups[full] = ligature + else + description.slookups = { [full] = ligature } + end + done, added = done + 1, true + else + skip = skip + 1 + end + end + end + elseif featuretype == "gsub_single" then + lookuptypes[full] = "substitution" + for code, replacement in next, list do + local unicode = tonumber(code) or unicodes[code] + local description = descriptions[unicode] + if description then + local slookups = description.slookups + replacement = tonumber(replacement) or unicodes[replacement] + if descriptions[replacement] then + if slookups then + slookups[full] = replacement + else + description.slookups = { [full] = replacement } + end + done, added = done + 1, true + end + end + end + end + end + if added then + -- script = { lang1, lang2, lang3 } or script = { lang1 = true, ... } + for k, v in next, askedfeatures do + if v[1] then + askedfeatures[k] = table.tohash(v) + end + end + sequences[#sequences+1] = { + chain = 0, + features = { [feature] = askedfeatures }, + flags = featureflags, + name = featurename, + subtables = st, + type = featuretype, + } + -- register in metadata (merge as there can be a few) + if not gsubfeatures then + gsubfeatures = { } + fontfeatures.gsub = gsubfeatures + end + local k = gsubfeatures[feature] + if not k then + k = { } + gsubfeatures[feature] = k + end + for script, languages in next, askedfeatures do + local kk = k[script] + if not kk then + kk = { } + k[script] = kk + end + for language, value in next, languages do + kk[language] = value + end + end + end + end + end + if trace_loading then + report_otf("registering feature %a, affected glyphs %a, skipped glyphs %a",feature,done,skip) + end + end +end + +otf.enhancers.addfeature = addfeature + +local extrafeatures = { } + +function otf.addfeature(name,specification) + extrafeatures[name] = specification +end + +local function enhance(data,filename,raw) + for feature, specification in next, extrafeatures do + addfeature(data,feature,specification) + end +end + +otf.enhancers.register("check extra features",enhance) + +-- tlig -- + +local tlig = { + endash = "hyphen hyphen", + emdash = "hyphen hyphen hyphen", + -- quotedblleft = "quoteleft quoteleft", + -- quotedblright = "quoteright quoteright", + -- quotedblleft = "grave grave", + -- quotedblright = "quotesingle quotesingle", + -- quotedblbase = "comma comma", +} + +local tlig_specification = { + type = "ligature", + features = everywhere, + data = tlig, + flags = noflags, +} + +otf.addfeature("tlig",tlig_specification) + +registerotffeature { + name = 'tlig', + description = 'tex ligatures', +} + +-- trep + +local trep = { + -- [0x0022] = 0x201D, + [0x0027] = 0x2019, + -- [0x0060] = 0x2018, +} + +local trep_specification = { + type = "substitution", + features = everywhere, + data = trep, + flags = noflags, +} + +otf.addfeature("trep",trep_specification) + +registerotffeature { + name = 'trep', + description = 'tex replacements', +} + +-- tcom + +if characters.combined then + + local tcom = { } + + local function initialize() + characters.initialize() + for first, seconds in next, characters.combined do + for second, combination in next, seconds do + tcom[combination] = { first, second } + end + end + -- return false + end + + local tcom_specification = { + type = "ligature", + features = everywhere, + data = tcom, + flags = noflags, + initialize = initialize, + } + + otf.addfeature("tcom",tcom_specification) + + registerotffeature { + name = 'tcom', + description = 'tex combinations', + } + +end + +-- anum + +local anum_arabic = { + [0x0030] = 0x0660, + [0x0031] = 0x0661, + [0x0032] = 0x0662, + [0x0033] = 0x0663, + [0x0034] = 0x0664, + [0x0035] = 0x0665, + [0x0036] = 0x0666, + [0x0037] = 0x0667, + [0x0038] = 0x0668, + [0x0039] = 0x0669, +} + +local anum_persian = { + [0x0030] = 0x06F0, + [0x0031] = 0x06F1, + [0x0032] = 0x06F2, + [0x0033] = 0x06F3, + [0x0034] = 0x06F4, + [0x0035] = 0x06F5, + [0x0036] = 0x06F6, + [0x0037] = 0x06F7, + [0x0038] = 0x06F8, + [0x0039] = 0x06F9, +} + +local function valid(data) + local features = data.resources.features + if features then + for k, v in next, features do + for k, v in next, v do + if v.arab then + return true + end + end + end + end +end + +local anum_specification = { + { + type = "substitution", + features = { arab = { urd = true, dflt = true } }, + data = anum_arabic, + flags = noflags, -- { }, + valid = valid, + }, + { + type = "substitution", + features = { arab = { urd = true } }, + data = anum_persian, + flags = noflags, -- { }, + valid = valid, + }, +} + +otf.addfeature("anum",anum_specification) -- todo: only when there is already an arab script feature + +registerotffeature { + name = 'anum', + description = 'arabic digits', +} diff --git a/tex/context/base/font-otd.lua b/tex/context/base/font-otd.lua index a9d093d6d..12e2da55f 100644 --- a/tex/context/base/font-otd.lua +++ b/tex/context/base/font-otd.lua @@ -1,261 +1,261 @@ -if not modules then modules = { } end modules ['font-otd'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local type = type -local match = string.match -local sequenced = table.sequenced - -local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end) -local trace_applied = false trackers.register("otf.applied", function(v) trace_applied = v end) - -local report_otf = logs.reporter("fonts","otf loading") -local report_process = logs.reporter("fonts","otf process") - -local allocate = utilities.storage.allocate - -local fonts = fonts -local otf = fonts.handlers.otf -local hashes = fonts.hashes -local definers = fonts.definers -local constructors = fonts.constructors -local specifiers = fonts.specifiers - -local fontidentifiers = hashes.identifiers -local fontresources = hashes.resources -local fontproperties = hashes.properties -local fontdynamics = hashes.dynamics - -local contextsetups = specifiers.contextsetups -local contextnumbers = specifiers.contextnumbers -local contextmerged = specifiers.contextmerged - -local setmetatableindex = table.setmetatableindex - -local otffeatures = fonts.constructors.newfeatures("otf") -local registerotffeature = otffeatures.register - -local a_to_script = { } -local a_to_language = { } - --- we can have a scripts hash in fonts.hashes - -function otf.setdynamics(font,attribute) - -- local features = contextsetups[contextnumbers[attribute]] -- can be moved to caller - local features = contextsetups[attribute] - if features then - local dynamics = fontdynamics[font] - dynamic = contextmerged[attribute] or 0 - local script, language - if dynamic == 2 then -- merge - language = features.language or fontproperties[font].language or "dflt" - script = features.script or fontproperties[font].script or "dflt" - else -- if dynamic == 1 then -- replace - language = features.language or "dflt" - script = features.script or "dflt" - end - if script == "auto" then - -- checkedscript and resources are defined later so we cannot shortcut them -- todo: make installer - script = definers.checkedscript(fontidentifiers[font],fontresources[font],features) - end - local ds = dynamics[script] -- can be metatable magic (less testing) - if not ds then - ds = { } - dynamics[script] = ds - end - local dsl = ds[language] - if not dsl then - dsl = { } - ds[language] = dsl - end - local dsla = dsl[attribute] - if not dsla then - local tfmdata = fontidentifiers[font] - a_to_script [attribute] = script - a_to_language[attribute] = language - -- we need to save some values .. quite messy - local properties = tfmdata.properties - local shared = tfmdata.shared - local s_script = properties.script - local s_language = properties.language - local s_mode = properties.mode - local s_features = shared.features - properties.mode = "node" - properties.language = language - properties.script = script - properties.dynamics = true -- handy for tracing - shared.features = { } - -- end of save - local set = constructors.checkedfeatures("otf",features) - set.mode = "node" -- really needed - dsla = otf.setfeatures(tfmdata,set) - if trace_dynamics then - report_otf("setting dynamics %s: attribute %a, script %a, language %a, set %a",contextnumbers[attribute],attribute,script,language,set) - end - -- we need to restore some values - properties.script = s_script - properties.language = s_language - properties.mode = s_mode - shared.features = s_features - -- end of restore - dynamics[script][language][attribute] = dsla -- cache - elseif trace_dynamics then - -- report_otf("using dynamics %s: attribute %a, script %a, language %a",contextnumbers[attribute],attribute,script,language) - end - return dsla - end -end - -function otf.scriptandlanguage(tfmdata,attr) - local properties = tfmdata.properties - if attr and attr > 0 then - return a_to_script[attr] or properties.script or "dflt", a_to_language[attr] or properties.language or "dflt" - else - return properties.script or "dflt", properties.language or "dflt" - end -end - --- we reimplement the dataset resolver - -local autofeatures = fonts.analyzers.features -- was: constants - -local resolved = { } -- we only resolve a font,script,language,attribute pair once -local wildcard = "*" -local default = "dflt" - --- what about analyze in local and not in font - -local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr,dynamic) - local features = sequence.features - if features then - for kind, scripts in next, features do - local e_e - local a_e = a_enabled and a_enabled[kind] -- the value (location) - if a_e ~= nil then - e_e = a_e - else - e_e = s_enabled and s_enabled[kind] -- the value (font) - end - if e_e then - local languages = scripts[script] or scripts[wildcard] - if languages then - -- local valid, what = false - local valid = false - -- not languages[language] or languages[default] or languages[wildcard] because we want tracing - -- only first attribute match check, so we assume simple fina's - -- default can become a font feature itself - if languages[language] then - valid = e_e -- was true - -- what = language - -- elseif languages[default] then - -- valid = true - -- what = default - elseif languages[wildcard] then - valid = e_e -- was true - -- what = wildcard - end - if valid then - local attribute = autofeatures[kind] or false - -- if a_e and dynamic < 0 then - -- valid = false - -- end - -- if trace_applied then - -- local typ, action = match(sequence.type,"(.*)_(.*)") -- brrr - -- report_process( - -- "%s font: %03i, dynamic: %03i, kind: %s, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s", - -- (valid and "+") or "-",font,attr or 0,kind,script,language,what,typ,action,sequence.name) - -- end - if trace_applied then - report_process( - "font %s, dynamic %a (%a), feature %a, script %a, language %a, lookup %a, value %a", - font,attr or 0,dynamic,kind,script,language,sequence.name,valid) - end - return { valid, attribute, sequence.chain or 0, kind, sequence } - end - end - end - end - return false -- { valid, attribute, chain, "generic", sequence } -- false anyway, could be flag instead of table - else - return false -- { false, false, chain, false, sequence } -- indirect lookup, part of chain (todo: make this a separate table) - end -end - --- there is some fuzzy language/script state stuff in properties (temporary) - -function otf.dataset(tfmdata,font,attr) -- attr only when explicit (as in special parbuilder) - - local script, language, s_enabled, a_enabled, dynamic - - if attr and attr ~= 0 then - dynamic = contextmerged[attr] or 0 - -- local features = contextsetups[contextnumbers[attr]] -- could be a direct list - local features = contextsetups[attr] - a_enabled = features -- location based - if dynamic == 1 then -- or dynamic == -1 then - -- replace - language = features.language or "dflt" - script = features.script or "dflt" - elseif dynamic == 2 then -- or dynamic == -2 then - -- merge - local properties = tfmdata.properties - s_enabled = tfmdata.shared.features -- font based - language = features.language or properties.language or "dflt" - script = features.script or properties.script or "dflt" - else - -- error - local properties = tfmdata.properties - language = properties.language or "dflt" - script = properties.script or "dflt" - end - else - local properties = tfmdata.properties - language = properties.language or "dflt" - script = properties.script or "dflt" - s_enabled = tfmdata.shared.features -- can be made local to the resolver - dynamic = 0 - end - - local res = resolved[font] - if not res then - res = { } - resolved[font] = res - end - local rs = res[script] - if not rs then - rs = { } - res[script] = rs - end - local rl = rs[language] - if not rl then - rl = { } - rs[language] = rl - end - local ra = rl[attr] - if ra == nil then -- attr can be false - ra = { - -- indexed but we can also add specific data by key in: - } - rl[attr] = ra - local sequences = tfmdata.resources.sequences --- setmetatableindex(ra, function(t,k) --- if type(k) == "number" then --- local v = initialize(sequences[k],script,language,s_enabled,a_enabled,font,attr,dynamic) --- t[k] = v or false --- return v --- end --- end) -for s=1,#sequences do - local v = initialize(sequences[s],script,language,s_enabled,a_enabled,font,attr,dynamic) - if v then - ra[#ra+1] = v - end -end - end - return ra - -end +if not modules then modules = { } end modules ['font-otd'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local type = type +local match = string.match +local sequenced = table.sequenced + +local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end) +local trace_applied = false trackers.register("otf.applied", function(v) trace_applied = v end) + +local report_otf = logs.reporter("fonts","otf loading") +local report_process = logs.reporter("fonts","otf process") + +local allocate = utilities.storage.allocate + +local fonts = fonts +local otf = fonts.handlers.otf +local hashes = fonts.hashes +local definers = fonts.definers +local constructors = fonts.constructors +local specifiers = fonts.specifiers + +local fontidentifiers = hashes.identifiers +local fontresources = hashes.resources +local fontproperties = hashes.properties +local fontdynamics = hashes.dynamics + +local contextsetups = specifiers.contextsetups +local contextnumbers = specifiers.contextnumbers +local contextmerged = specifiers.contextmerged + +local setmetatableindex = table.setmetatableindex + +local otffeatures = fonts.constructors.newfeatures("otf") +local registerotffeature = otffeatures.register + +local a_to_script = { } +local a_to_language = { } + +-- we can have a scripts hash in fonts.hashes + +function otf.setdynamics(font,attribute) + -- local features = contextsetups[contextnumbers[attribute]] -- can be moved to caller + local features = contextsetups[attribute] + if features then + local dynamics = fontdynamics[font] + dynamic = contextmerged[attribute] or 0 + local script, language + if dynamic == 2 then -- merge + language = features.language or fontproperties[font].language or "dflt" + script = features.script or fontproperties[font].script or "dflt" + else -- if dynamic == 1 then -- replace + language = features.language or "dflt" + script = features.script or "dflt" + end + if script == "auto" then + -- checkedscript and resources are defined later so we cannot shortcut them -- todo: make installer + script = definers.checkedscript(fontidentifiers[font],fontresources[font],features) + end + local ds = dynamics[script] -- can be metatable magic (less testing) + if not ds then + ds = { } + dynamics[script] = ds + end + local dsl = ds[language] + if not dsl then + dsl = { } + ds[language] = dsl + end + local dsla = dsl[attribute] + if not dsla then + local tfmdata = fontidentifiers[font] + a_to_script [attribute] = script + a_to_language[attribute] = language + -- we need to save some values .. quite messy + local properties = tfmdata.properties + local shared = tfmdata.shared + local s_script = properties.script + local s_language = properties.language + local s_mode = properties.mode + local s_features = shared.features + properties.mode = "node" + properties.language = language + properties.script = script + properties.dynamics = true -- handy for tracing + shared.features = { } + -- end of save + local set = constructors.checkedfeatures("otf",features) + set.mode = "node" -- really needed + dsla = otf.setfeatures(tfmdata,set) + if trace_dynamics then + report_otf("setting dynamics %s: attribute %a, script %a, language %a, set %a",contextnumbers[attribute],attribute,script,language,set) + end + -- we need to restore some values + properties.script = s_script + properties.language = s_language + properties.mode = s_mode + shared.features = s_features + -- end of restore + dynamics[script][language][attribute] = dsla -- cache + elseif trace_dynamics then + -- report_otf("using dynamics %s: attribute %a, script %a, language %a",contextnumbers[attribute],attribute,script,language) + end + return dsla + end +end + +function otf.scriptandlanguage(tfmdata,attr) + local properties = tfmdata.properties + if attr and attr > 0 then + return a_to_script[attr] or properties.script or "dflt", a_to_language[attr] or properties.language or "dflt" + else + return properties.script or "dflt", properties.language or "dflt" + end +end + +-- we reimplement the dataset resolver + +local autofeatures = fonts.analyzers.features -- was: constants + +local resolved = { } -- we only resolve a font,script,language,attribute pair once +local wildcard = "*" +local default = "dflt" + +-- what about analyze in local and not in font + +local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr,dynamic) + local features = sequence.features + if features then + for kind, scripts in next, features do + local e_e + local a_e = a_enabled and a_enabled[kind] -- the value (location) + if a_e ~= nil then + e_e = a_e + else + e_e = s_enabled and s_enabled[kind] -- the value (font) + end + if e_e then + local languages = scripts[script] or scripts[wildcard] + if languages then + -- local valid, what = false + local valid = false + -- not languages[language] or languages[default] or languages[wildcard] because we want tracing + -- only first attribute match check, so we assume simple fina's + -- default can become a font feature itself + if languages[language] then + valid = e_e -- was true + -- what = language + -- elseif languages[default] then + -- valid = true + -- what = default + elseif languages[wildcard] then + valid = e_e -- was true + -- what = wildcard + end + if valid then + local attribute = autofeatures[kind] or false + -- if a_e and dynamic < 0 then + -- valid = false + -- end + -- if trace_applied then + -- local typ, action = match(sequence.type,"(.*)_(.*)") -- brrr + -- report_process( + -- "%s font: %03i, dynamic: %03i, kind: %s, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s", + -- (valid and "+") or "-",font,attr or 0,kind,script,language,what,typ,action,sequence.name) + -- end + if trace_applied then + report_process( + "font %s, dynamic %a (%a), feature %a, script %a, language %a, lookup %a, value %a", + font,attr or 0,dynamic,kind,script,language,sequence.name,valid) + end + return { valid, attribute, sequence.chain or 0, kind, sequence } + end + end + end + end + return false -- { valid, attribute, chain, "generic", sequence } -- false anyway, could be flag instead of table + else + return false -- { false, false, chain, false, sequence } -- indirect lookup, part of chain (todo: make this a separate table) + end +end + +-- there is some fuzzy language/script state stuff in properties (temporary) + +function otf.dataset(tfmdata,font,attr) -- attr only when explicit (as in special parbuilder) + + local script, language, s_enabled, a_enabled, dynamic + + if attr and attr ~= 0 then + dynamic = contextmerged[attr] or 0 + -- local features = contextsetups[contextnumbers[attr]] -- could be a direct list + local features = contextsetups[attr] + a_enabled = features -- location based + if dynamic == 1 then -- or dynamic == -1 then + -- replace + language = features.language or "dflt" + script = features.script or "dflt" + elseif dynamic == 2 then -- or dynamic == -2 then + -- merge + local properties = tfmdata.properties + s_enabled = tfmdata.shared.features -- font based + language = features.language or properties.language or "dflt" + script = features.script or properties.script or "dflt" + else + -- error + local properties = tfmdata.properties + language = properties.language or "dflt" + script = properties.script or "dflt" + end + else + local properties = tfmdata.properties + language = properties.language or "dflt" + script = properties.script or "dflt" + s_enabled = tfmdata.shared.features -- can be made local to the resolver + dynamic = 0 + end + + local res = resolved[font] + if not res then + res = { } + resolved[font] = res + end + local rs = res[script] + if not rs then + rs = { } + res[script] = rs + end + local rl = rs[language] + if not rl then + rl = { } + rs[language] = rl + end + local ra = rl[attr] + if ra == nil then -- attr can be false + ra = { + -- indexed but we can also add specific data by key in: + } + rl[attr] = ra + local sequences = tfmdata.resources.sequences +-- setmetatableindex(ra, function(t,k) +-- if type(k) == "number" then +-- local v = initialize(sequences[k],script,language,s_enabled,a_enabled,font,attr,dynamic) +-- t[k] = v or false +-- return v +-- end +-- end) +for s=1,#sequences do + local v = initialize(sequences[s],script,language,s_enabled,a_enabled,font,attr,dynamic) + if v then + ra[#ra+1] = v + end +end + end + return ra + +end diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua index c1f2f14fc..737dc9927 100644 --- a/tex/context/base/font-otf.lua +++ b/tex/context/base/font-otf.lua @@ -1,2155 +1,2155 @@ -if not modules then modules = { } end modules ['font-otf'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- langs -> languages enz --- anchor_classes vs kernclasses --- modification/creationtime in subfont is runtime dus zinloos --- to_table -> totable --- ascent descent - --- more checking against low level calls of functions - -local utfbyte = utf.byte -local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip -local type, next, tonumber, tostring = type, next, tonumber, tostring -local abs = math.abs -local getn = table.getn -local lpegmatch = lpeg.match -local reversed, concat, remove = table.reversed, table.concat, table.remove -local ioflush = io.flush -local fastcopy, tohash, derivetable = table.fastcopy, table.tohash, table.derive -local formatters = string.formatters - -local allocate = utilities.storage.allocate -local registertracker = trackers.register -local registerdirective = directives.register -local starttiming = statistics.starttiming -local stoptiming = statistics.stoptiming -local elapsedtime = statistics.elapsedtime -local findbinfile = resolvers.findbinfile - -local trace_private = false registertracker("otf.private", function(v) trace_private = v end) -local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end) -local trace_features = false registertracker("otf.features", function(v) trace_features = v end) -local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end) -local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end) -local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end) -local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end) - -local report_otf = logs.reporter("fonts","otf loading") - -local fonts = fonts -local otf = fonts.handlers.otf - -otf.glists = { "gsub", "gpos" } - -otf.version = 2.743 -- beware: also sync font-mis.lua -otf.cache = containers.define("fonts", "otf", otf.version, true) - -local fontdata = fonts.hashes.identifiers -local chardata = characters and characters.data -- not used - -local otffeatures = fonts.constructors.newfeatures("otf") -local registerotffeature = otffeatures.register - -local enhancers = allocate() -otf.enhancers = enhancers -local patches = { } -enhancers.patches = patches - -local definers = fonts.definers -local readers = fonts.readers -local constructors = fonts.constructors - -local forceload = false -local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M) -local usemetatables = false -- .4 slower on mk but 30 M less mem so we might change the default -- will be directive -local packdata = true -local syncspace = true -local forcenotdef = false -local includesubfonts = false - -local wildcard = "*" -local default = "dflt" - -local fontloaderfields = fontloader.fields -local mainfields = nil -local glyphfields = nil -- not used yet - -registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end) -registerdirective("fonts.otf.loader.force", function(v) forceload = v end) -registerdirective("fonts.otf.loader.usemetatables", function(v) usemetatables = v end) -registerdirective("fonts.otf.loader.pack", function(v) packdata = v end) -registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end) -registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end) - -local function load_featurefile(raw,featurefile) - if featurefile and featurefile ~= "" then - if trace_loading then - report_otf("using featurefile %a", featurefile) - end - fontloader.apply_featurefile(raw, featurefile) - end -end - -local function showfeatureorder(rawdata,filename) - local sequences = rawdata.resources.sequences - if sequences and #sequences > 0 then - if trace_loading then - report_otf("font %a has %s sequences",filename,#sequences) - report_otf(" ") - end - for nos=1,#sequences do - local sequence = sequences[nos] - local typ = sequence.type or "no-type" - local name = sequence.name or "no-name" - local subtables = sequence.subtables or { "no-subtables" } - local features = sequence.features - if trace_loading then - report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables) - end - if features then - for feature, scripts in next, features do - local tt = { } - if type(scripts) == "table" then - for script, languages in next, scripts do - local ttt = { } - for language, _ in next, languages do - ttt[#ttt+1] = language - end - tt[#tt+1] = formatters["[%s: % t]"](script,ttt) - end - if trace_loading then - report_otf(" %s: % t",feature,tt) - end - else - if trace_loading then - report_otf(" %s: %S",feature,scripts) - end - end - end - end - end - if trace_loading then - report_otf("\n") - end - elseif trace_loading then - report_otf("font %a has no sequences",filename) - end -end - ---[[ldx-- -

We start with a lot of tables and related functions.

---ldx]]-- - -local valid_fields = table.tohash { - -- "anchor_classes", - "ascent", - -- "cache_version", - "cidinfo", - "copyright", - -- "creationtime", - "descent", - "design_range_bottom", - "design_range_top", - "design_size", - "encodingchanged", - "extrema_bound", - "familyname", - "fontname", - "fontname", - "fontstyle_id", - "fontstyle_name", - "fullname", - -- "glyphs", - "hasvmetrics", - -- "head_optimized_for_cleartype", - "horiz_base", - "issans", - "isserif", - "italicangle", - -- "kerns", - -- "lookups", - "macstyle", - -- "modificationtime", - "onlybitmaps", - "origname", - "os2_version", - "pfminfo", - -- "private", - "serifcheck", - "sfd_version", - -- "size", - "strokedfont", - "strokewidth", - -- "subfonts", - "table_version", - -- "tables", - -- "ttf_tab_saved", - "ttf_tables", - "uni_interp", - "uniqueid", - "units_per_em", - "upos", - "use_typo_metrics", - "uwidth", - -- "validation_state", - "version", - "vert_base", - "weight", - "weight_width_slope_only", - -- "xuid", -} - -local ordered_enhancers = { - "prepare tables", - - "prepare glyphs", - "prepare lookups", - - "analyze glyphs", - "analyze math", - - "prepare tounicode", -- maybe merge with prepare - - "reorganize lookups", - "reorganize mark classes", - "reorganize anchor classes", - - "reorganize glyph kerns", - "reorganize glyph lookups", - "reorganize glyph anchors", - - "merge kern classes", - - "reorganize features", - "reorganize subtables", - - "check glyphs", - "check metadata", - "check extra features", -- after metadata - - "check encoding", -- moved - "add duplicates", - - "cleanup tables", -} - ---[[ldx-- -

Here we go.

---ldx]]-- - -local actions = allocate() -local before = allocate() -local after = allocate() - -patches.before = before -patches.after = after - -local function enhance(name,data,filename,raw) - local enhancer = actions[name] - if enhancer then - if trace_loading then - report_otf("apply enhancement %a to file %a",name,filename) - ioflush() - end - enhancer(data,filename,raw) - else - -- no message as we can have private ones - end -end - -function enhancers.apply(data,filename,raw) - local basename = file.basename(lower(filename)) - if trace_loading then - report_otf("%s enhancing file %a","start",filename) - end - ioflush() -- we want instant messages - for e=1,#ordered_enhancers do - local enhancer = ordered_enhancers[e] - local b = before[enhancer] - if b then - for pattern, action in next, b do - if find(basename,pattern) then - action(data,filename,raw) - end - end - end - enhance(enhancer,data,filename,raw) - local a = after[enhancer] - if a then - for pattern, action in next, a do - if find(basename,pattern) then - action(data,filename,raw) - end - end - end - ioflush() -- we want instant messages - end - if trace_loading then - report_otf("%s enhancing file %a","stop",filename) - end - ioflush() -- we want instant messages -end - --- patches.register("before","migrate metadata","cambria",function() end) - -function patches.register(what,where,pattern,action) - local pw = patches[what] - if pw then - local ww = pw[where] - if ww then - ww[pattern] = action - else - pw[where] = { [pattern] = action} - end - end -end - -function patches.report(fmt,...) - if trace_loading then - report_otf("patching: %s",formatters[fmt](...)) - end -end - -function enhancers.register(what,action) -- only already registered can be overloaded - actions[what] = action -end - -function otf.load(filename,format,sub,featurefile) - local base = file.basename(file.removesuffix(filename)) - local name = file.removesuffix(base) - local attr = lfs.attributes(filename) - local size = attr and attr.size or 0 - local time = attr and attr.modification or 0 - if featurefile then - name = name .. "@" .. file.removesuffix(file.basename(featurefile)) - end - if sub == "" then - sub = false - end - local hash = name - if sub then - hash = hash .. "-" .. sub - end - hash = containers.cleanname(hash) - local featurefiles - if featurefile then - featurefiles = { } - for s in gmatch(featurefile,"[^,]+") do - local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or "" - if name == "" then - report_otf("loading error, no featurefile %a",s) - else - local attr = lfs.attributes(name) - featurefiles[#featurefiles+1] = { - name = name, - size = attr and attr.size or 0, - time = attr and attr.modification or 0, - } - end - end - if #featurefiles == 0 then - featurefiles = nil - end - end - local data = containers.read(otf.cache,hash) - local reload = not data or data.size ~= size or data.time ~= time - if forceload then - report_otf("forced reload of %a due to hard coded flag",filename) - reload = true - end - if not reload then - local featuredata = data.featuredata - if featurefiles then - if not featuredata or #featuredata ~= #featurefiles then - reload = true - else - for i=1,#featurefiles do - local fi, fd = featurefiles[i], featuredata[i] - if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then - reload = true - break - end - end - end - elseif featuredata then - reload = true - end - if reload then - report_otf("loading: forced reload due to changed featurefile specification %a",featurefile) - end - end - if reload then - report_otf("loading %a, hash %a",filename,hash) - local fontdata, messages - if sub then - fontdata, messages = fontloader.open(filename,sub) - else - fontdata, messages = fontloader.open(filename) - end - if fontdata then - mainfields = mainfields or (fontloaderfields and fontloaderfields(fontdata)) - end - if trace_loading and messages and #messages > 0 then - if type(messages) == "string" then - report_otf("warning: %s",messages) - else - for m=1,#messages do - report_otf("warning: %S",messages[m]) - end - end - else - report_otf("loading done") - end - if fontdata then - if featurefiles then - for i=1,#featurefiles do - load_featurefile(fontdata,featurefiles[i].name) - end - end - local unicodes = { - -- names to unicodes - } - local splitter = lpeg.splitter(" ",unicodes) - data = { - size = size, - time = time, - format = format, - featuredata = featurefiles, - resources = { - filename = resolvers.unresolve(filename), -- no shortcut - version = otf.version, - creator = "context mkiv", - unicodes = unicodes, - indices = { - -- index to unicodes - }, - duplicates = { - -- alternative unicodes - }, - variants = { - -- alternative unicodes (variants) - }, - lookuptypes = { - }, - }, - metadata = { - -- raw metadata, not to be used - }, - properties = { - -- normalized metadata - }, - descriptions = { - }, - goodies = { - }, - helpers = { - tounicodelist = splitter, - tounicodetable = lpeg.Ct(splitter), - }, - } - starttiming(data) - report_otf("file size: %s", size) - enhancers.apply(data,filename,fontdata) - local packtime = { } - if packdata then - if cleanup > 0 then - collectgarbage("collect") - end - starttiming(packtime) - enhance("pack",data,filename,nil) - stoptiming(packtime) - end - report_otf("saving %a in cache",filename) - data = containers.write(otf.cache, hash, data) - if cleanup > 1 then - collectgarbage("collect") - end - stoptiming(data) - if elapsedtime then -- not in generic - report_otf("preprocessing and caching time %s, packtime %s", - elapsedtime(data),packdata and elapsedtime(packtime) or 0) - end - fontloader.close(fontdata) -- free memory - if cleanup > 3 then - collectgarbage("collect") - end - data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one - if cleanup > 2 then - collectgarbage("collect") - end - else - data = nil - report_otf("loading failed due to read error") - end - end - if data then - if trace_defining then - report_otf("loading from cache using hash %a",hash) - end - enhance("unpack",data,filename,nil,false) - enhance("add dimensions",data,filename,nil,false) - if trace_sequences then - showfeatureorder(data,filename) - end - end - return data -end - -local mt = { - __index = function(t,k) -- maybe set it - if k == "height" then - local ht = t.boundingbox[4] - return ht < 0 and 0 or ht - elseif k == "depth" then - local dp = -t.boundingbox[2] - return dp < 0 and 0 or dp - elseif k == "width" then - return 0 - elseif k == "name" then -- or maybe uni* - return forcenotdef and ".notdef" - end - end -} - -actions["prepare tables"] = function(data,filename,raw) - data.properties.hasitalics = false -end - -actions["add dimensions"] = function(data,filename) - -- todo: forget about the width if it's the defaultwidth (saves mem) - -- we could also build the marks hash here (instead of storing it) - if data then - local descriptions = data.descriptions - local resources = data.resources - local defaultwidth = resources.defaultwidth or 0 - local defaultheight = resources.defaultheight or 0 - local defaultdepth = resources.defaultdepth or 0 - local basename = trace_markwidth and file.basename(filename) - if usemetatables then - for _, d in next, descriptions do - local wd = d.width - if not wd then - d.width = defaultwidth - elseif trace_markwidth and wd ~= 0 and d.class == "mark" then - report_otf("mark %a with width %b found in %a",d.name or "",wd,basename) - -- d.width = -wd - end - setmetatable(d,mt) - end - else - for _, d in next, descriptions do - local bb, wd = d.boundingbox, d.width - if not wd then - d.width = defaultwidth - elseif trace_markwidth and wd ~= 0 and d.class == "mark" then - report_otf("mark %a with width %b found in %a",d.name or "",wd,basename) - -- d.width = -wd - end - -- if forcenotdef and not d.name then - -- d.name = ".notdef" - -- end - if bb then - local ht, dp = bb[4], -bb[2] - if ht == 0 or ht < 0 then - -- not set - else - d.height = ht - end - if dp == 0 or dp < 0 then - -- not set - else - d.depth = dp - end - end - end - end - end -end - -local function somecopy(old) -- fast one - if old then - local new = { } - if type(old) == "table" then - for k, v in next, old do - if k == "glyphs" then - -- skip - elseif type(v) == "table" then - new[k] = somecopy(v) - else - new[k] = v - end - end - else - for i=1,#mainfields do - local k = mainfields[i] - local v = old[k] - if k == "glyphs" then - -- skip - elseif type(v) == "table" then - new[k] = somecopy(v) - else - new[k] = v - end - end - end - return new - else - return { } - end -end - --- not setting hasitalics and class (when nil) during table cronstruction can save some mem - -actions["prepare glyphs"] = function(data,filename,raw) - local rawglyphs = raw.glyphs - local rawsubfonts = raw.subfonts - local rawcidinfo = raw.cidinfo - local criterium = constructors.privateoffset - local private = criterium - local resources = data.resources - local metadata = data.metadata - local properties = data.properties - local descriptions = data.descriptions - local unicodes = resources.unicodes -- name to unicode - local indices = resources.indices -- index to unicode - local duplicates = resources.duplicates - local variants = resources.variants - - if rawsubfonts then - - metadata.subfonts = includesubfonts and { } - properties.cidinfo = rawcidinfo - - if rawcidinfo.registry then - local cidmap = fonts.cid.getmap(rawcidinfo) - if cidmap then - rawcidinfo.usedname = cidmap.usedname - local nofnames, nofunicodes = 0, 0 - local cidunicodes, cidnames = cidmap.unicodes, cidmap.names - for cidindex=1,#rawsubfonts do - local subfont = rawsubfonts[cidindex] - local cidglyphs = subfont.glyphs - if includesubfonts then - metadata.subfonts[cidindex] = somecopy(subfont) - end - for index=0,subfont.glyphcnt-1 do -- we could take the previous glyphcnt instead of 0 - local glyph = cidglyphs[index] - if glyph then - local unicode = glyph.unicode - local name = glyph.name or cidnames[index] - if not unicode or unicode == -1 or unicode >= criterium then - unicode = cidunicodes[index] - end - if unicode and descriptions[unicode] then - report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode) - unicode = -1 - end - if not unicode or unicode == -1 or unicode >= criterium then - if not name then - name = format("u%06X",private) - end - unicode = private - unicodes[name] = private - if trace_private then - report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) - end - private = private + 1 - nofnames = nofnames + 1 - else - if not name then - name = format("u%06X",unicode) - end - unicodes[name] = unicode - nofunicodes = nofunicodes + 1 - end - indices[index] = unicode -- each index is unique (at least now) - - local description = { - -- width = glyph.width, - boundingbox = glyph.boundingbox, - name = glyph.name or name or "unknown", -- uniXXXX - cidindex = cidindex, - index = index, - glyph = glyph, - } - - descriptions[unicode] = description - else - -- report_otf("potential problem: glyph %U is used but empty",index) - end - end - end - if trace_loading then - report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames) - end - elseif trace_loading then - report_otf("unable to remap cid font, missing cid file for %a",filename) - end - elseif trace_loading then - report_otf("font %a has no glyphs",filename) - end - - else - - for index=0,raw.glyphcnt-1 do -- not raw.glyphmax-1 (as that will crash) - local glyph = rawglyphs[index] - if glyph then - local unicode = glyph.unicode - local name = glyph.name - if not unicode or unicode == -1 or unicode >= criterium then - unicode = private - unicodes[name] = private - if trace_private then - report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) - end - private = private + 1 - else - unicodes[name] = unicode - end - indices[index] = unicode - if not name then - name = format("u%06X",unicode) - end - descriptions[unicode] = { - -- width = glyph.width, - boundingbox = glyph.boundingbox, - name = name, - index = index, - glyph = glyph, - } - local altuni = glyph.altuni - if altuni then - local d - for i=1,#altuni do - local a = altuni[i] - local u = a.unicode - local v = a.variant - if v then - -- tricky: no addition to d? needs checking but in practice such dups are either very simple - -- shapes or e.g cjk with not that many features - local vv = variants[v] - if vv then - vv[u] = unicode - else -- xits-math has some: - vv = { [u] = unicode } - variants[v] = vv - end - elseif d then - d[#d+1] = u - else - d = { u } - end - end - if d then - duplicates[unicode] = d - end - end - else - report_otf("potential problem: glyph %U is used but empty",index) - end - end - - end - - resources.private = private - -end - --- the next one is still messy but will get better when we have --- flattened map/enc tables in the font loader - -actions["check encoding"] = function(data,filename,raw) - local descriptions = data.descriptions - local resources = data.resources - local properties = data.properties - local unicodes = resources.unicodes -- name to unicode - local indices = resources.indices -- index to unicodes - local duplicates = resources.duplicates - - -- begin of messy (not needed when cidmap) - - local mapdata = raw.map or { } - local unicodetoindex = mapdata and mapdata.map or { } - -- local encname = lower(data.enc_name or raw.enc_name or mapdata.enc_name or "") - local encname = lower(data.enc_name or mapdata.enc_name or "") - local criterium = 0xFFFF -- for instance cambria has a lot of mess up there - - -- end of messy - - if find(encname,"unicode") then -- unicodebmp, unicodefull, ... - if trace_loading then - report_otf("checking embedded unicode map %a",encname) - end - for unicode, index in next, unicodetoindex do -- altuni already covers this - if unicode <= criterium and not descriptions[unicode] then - local parent = indices[index] -- why nil? - if not parent then - report_otf("weird, unicode %U points to nowhere with index %H",unicode,index) - else - local parentdescription = descriptions[parent] - if parentdescription then - local altuni = parentdescription.altuni - if not altuni then - altuni = { { unicode = parent } } - parentdescription.altuni = altuni - duplicates[parent] = { unicode } - else - local done = false - for i=1,#altuni do - if altuni[i].unicode == parent then - done = true - break - end - end - if not done then - -- let's assume simple cjk reuse - altuni[#altuni+1] = { unicode = parent } - table.insert(duplicates[parent],unicode) - end - end - if trace_loading then - report_otf("weird, unicode %U points to nowhere with index %H",unicode,index) - end - else - report_otf("weird, unicode %U points to %U with index %H",unicode,index) - end - end - end - end - elseif properties.cidinfo then - report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname) - else - report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever") - end - - if mapdata then - mapdata.map = { } -- clear some memory - end -end - --- for the moment we assume that a font with lookups will not use --- altuni so we stick to kerns only - -actions["add duplicates"] = function(data,filename,raw) - local descriptions = data.descriptions - local resources = data.resources - local properties = data.properties - local unicodes = resources.unicodes -- name to unicode - local indices = resources.indices -- index to unicodes - local duplicates = resources.duplicates - - for unicode, d in next, duplicates do - for i=1,#d do - local u = d[i] - if not descriptions[u] then - local description = descriptions[unicode] - local duplicate = table.copy(description) -- else packing problem - duplicate.comment = format("copy of U+%05X", unicode) - descriptions[u] = duplicate - local n = 0 - for _, description in next, descriptions do - if kerns then - local kerns = description.kerns - for _, k in next, kerns do - local ku = k[unicode] - if ku then - k[u] = ku - n = n + 1 - end - end - end - -- todo: lookups etc - end - if trace_loading then - report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n) - end - end - end - end -end - --- class : nil base mark ligature component (maybe we don't need it in description) --- boundingbox: split into ht/dp takes more memory (larger tables and less sharing) - -actions["analyze glyphs"] = function(data,filename,raw) -- maybe integrate this in the previous - local descriptions = data.descriptions - local resources = data.resources - local metadata = data.metadata - local properties = data.properties - local hasitalics = false - local widths = { } - local marks = { } -- always present (saves checking) - for unicode, description in next, descriptions do - local glyph = description.glyph - local italic = glyph.italic_correction - if not italic then - -- skip - elseif italic == 0 then - -- skip - else - description.italic = italic - hasitalics = true - end - local width = glyph.width - widths[width] = (widths[width] or 0) + 1 - local class = glyph.class - if class then - if class == "mark" then - marks[unicode] = true - end - description.class = class - end - end - -- flag italic - properties.hasitalics = hasitalics - -- flag marks - resources.marks = marks - -- share most common width for cjk fonts - local wd, most = 0, 1 - for k,v in next, widths do - if v > most then - wd, most = k, v - end - end - if most > 1000 then -- maybe 500 - if trace_loading then - report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most) - end - for unicode, description in next, descriptions do - if description.width == wd then - -- description.width = nil - else - description.width = description.glyph.width - end - end - resources.defaultwidth = wd - else - for unicode, description in next, descriptions do - description.width = description.glyph.width - end - end -end - -actions["reorganize mark classes"] = function(data,filename,raw) - local mark_classes = raw.mark_classes - if mark_classes then - local resources = data.resources - local unicodes = resources.unicodes - local markclasses = { } - resources.markclasses = markclasses -- reversed - for name, class in next, mark_classes do - local t = { } - for s in gmatch(class,"[^ ]+") do - t[unicodes[s]] = true - end - markclasses[name] = t - end - end -end - -actions["reorganize features"] = function(data,filename,raw) -- combine with other - local features = { } - data.resources.features = features - for k, what in next, otf.glists do - local dw = raw[what] - if dw then - local f = { } - features[what] = f - for i=1,#dw do - local d= dw[i] - local dfeatures = d.features - if dfeatures then - for i=1,#dfeatures do - local df = dfeatures[i] - local tag = strip(lower(df.tag)) - local ft = f[tag] - if not ft then - ft = { } - f[tag] = ft - end - local dscripts = df.scripts - for i=1,#dscripts do - local d = dscripts[i] - local languages = d.langs - local script = strip(lower(d.script)) - local fts = ft[script] if not fts then fts = {} ft[script] = fts end - for i=1,#languages do - fts[strip(lower(languages[i]))] = true - end - end - end - end - end - end - end -end - -actions["reorganize anchor classes"] = function(data,filename,raw) - local resources = data.resources - local anchor_to_lookup = { } - local lookup_to_anchor = { } - resources.anchor_to_lookup = anchor_to_lookup - resources.lookup_to_anchor = lookup_to_anchor - local classes = raw.anchor_classes -- anchor classes not in final table - if classes then - for c=1,#classes do - local class = classes[c] - local anchor = class.name - local lookups = class.lookup - if type(lookups) ~= "table" then - lookups = { lookups } - end - local a = anchor_to_lookup[anchor] - if not a then - a = { } - anchor_to_lookup[anchor] = a - end - for l=1,#lookups do - local lookup = lookups[l] - local l = lookup_to_anchor[lookup] - if l then - l[anchor] = true - else - l = { [anchor] = true } - lookup_to_anchor[lookup] = l - end - a[lookup] = true - end - end - end -end - -actions["prepare tounicode"] = function(data,filename,raw) - fonts.mappings.addtounicode(data,filename) -end - -local g_directions = { - gsub_contextchain = 1, - gpos_contextchain = 1, - -- gsub_context = 1, - -- gpos_context = 1, - gsub_reversecontextchain = -1, - gpos_reversecontextchain = -1, -} - --- Research by Khaled Hosny has demonstrated that the font loader merges --- regular and AAT features and that these can interfere (especially because --- we dropped checking for valid features elsewhere. So, we just check for --- the special flag and drop the feature if such a tag is found. - -local function supported(features) - for i=1,#features do - if features[i].ismac then - return false - end - end - return true -end - -actions["reorganize subtables"] = function(data,filename,raw) - local resources = data.resources - local sequences = { } - local lookups = { } - local chainedfeatures = { } - resources.sequences = sequences - resources.lookups = lookups - for _, what in next, otf.glists do - local dw = raw[what] - if dw then - for k=1,#dw do - local gk = dw[k] - local features = gk.features --- if features and supported(features) then - if not features or supported(features) then -- not always features ! - local typ = gk.type - local chain = g_directions[typ] or 0 - local subtables = gk.subtables - if subtables then - local t = { } - for s=1,#subtables do - t[s] = subtables[s].name - end - subtables = t - end - local flags, markclass = gk.flags, nil - if flags then - local t = { -- forcing false packs nicer - (flags.ignorecombiningmarks and "mark") or false, - (flags.ignoreligatures and "ligature") or false, - (flags.ignorebaseglyphs and "base") or false, - flags.r2l or false, - } - markclass = flags.mark_class - if markclass then - markclass = resources.markclasses[markclass] - end - flags = t - end - -- - local name = gk.name - -- - if not name then - -- in fact an error - report_otf("skipping weird lookup number %s",k) - elseif features then - -- scripts, tag, ismac - local f = { } - for i=1,#features do - local df = features[i] - local tag = strip(lower(df.tag)) - local ft = f[tag] if not ft then ft = {} f[tag] = ft end - local dscripts = df.scripts - for i=1,#dscripts do - local d = dscripts[i] - local languages = d.langs - local script = strip(lower(d.script)) - local fts = ft[script] if not fts then fts = {} ft[script] = fts end - for i=1,#languages do - fts[strip(lower(languages[i]))] = true - end - end - end - sequences[#sequences+1] = { - type = typ, - chain = chain, - flags = flags, - name = name, - subtables = subtables, - markclass = markclass, - features = f, - } - else - lookups[name] = { - type = typ, - chain = chain, - flags = flags, - subtables = subtables, - markclass = markclass, - } - end - end - end - end - end -end - --- test this: --- --- for _, what in next, otf.glists do --- raw[what] = nil --- end - -actions["prepare lookups"] = function(data,filename,raw) - local lookups = raw.lookups - if lookups then - data.lookups = lookups - end -end - --- The reverse handler does a bit redundant splitting but it's seldom --- seen so we don't bother too much. We could store the replacement --- in the current list (value instead of true) but it makes other code --- uglier. Maybe some day. - -local function t_uncover(splitter,cache,covers) - local result = { } - for n=1,#covers do - local cover = covers[n] - local uncovered = cache[cover] - if not uncovered then - uncovered = lpegmatch(splitter,cover) - cache[cover] = uncovered - end - result[n] = uncovered - end - return result -end - -local function s_uncover(splitter,cache,cover) - if cover == "" then - return nil - else - local uncovered = cache[cover] - if not uncovered then - uncovered = lpegmatch(splitter,cover) --- for i=1,#uncovered do --- uncovered[i] = { [uncovered[i]] = true } --- end - cache[cover] = uncovered - end - return { uncovered } - end -end - -local function t_hashed(t,cache) - if t then - local ht = { } - for i=1,#t do - local ti = t[i] - local tih = cache[ti] - if not tih then - tih = { } - for i=1,#ti do - tih[ti[i]] = true - end - cache[ti] = tih - end - ht[i] = tih - end - return ht - else - return nil - end -end - --- local s_hashed = t_hashed - -local function s_hashed(t,cache) - if t then - local ht = { } - local tf = t[1] - for i=1,#tf do - ht[i] = { [tf[i]] = true } - end - return ht - else - return nil - end -end - -local function r_uncover(splitter,cache,cover,replacements) - if cover == "" then - return nil - else - -- we always have current as { } even in the case of one - local uncovered = cover[1] - local replaced = cache[replacements] - if not replaced then - replaced = lpegmatch(splitter,replacements) - cache[replacements] = replaced - end - local nu, nr = #uncovered, #replaced - local r = { } - if nu == nr then - for i=1,nu do - r[uncovered[i]] = replaced[i] - end - end - return r - end -end - -actions["reorganize lookups"] = function(data,filename,raw) -- we could check for "" and n == 0 - -- we prefer the before lookups in a normal order - if data.lookups then - local splitter = data.helpers.tounicodetable - local t_u_cache = { } - local s_u_cache = t_u_cache -- string keys - local t_h_cache = { } - local s_h_cache = t_h_cache -- table keys (so we could use one cache) - local r_u_cache = { } -- maybe shared - for _, lookup in next, data.lookups do - local rules = lookup.rules - if rules then - local format = lookup.format - if format == "class" then - local before_class = lookup.before_class - if before_class then - before_class = t_uncover(splitter,t_u_cache,reversed(before_class)) - end - local current_class = lookup.current_class - if current_class then - current_class = t_uncover(splitter,t_u_cache,current_class) - end - local after_class = lookup.after_class - if after_class then - after_class = t_uncover(splitter,t_u_cache,after_class) - end - for i=1,#rules do - local rule = rules[i] - local class = rule.class - local before = class.before - if before then - for i=1,#before do - before[i] = before_class[before[i]] or { } - end - rule.before = t_hashed(before,t_h_cache) - end - local current = class.current - local lookups = rule.lookups - if current then - for i=1,#current do - current[i] = current_class[current[i]] or { } - -- let's not be sparse - if lookups and not lookups[i] then - lookups[i] = "" -- (was: false) e.g. we can have two lookups and one replacement - end - -- end of fix - end - rule.current = t_hashed(current,t_h_cache) - end - local after = class.after - if after then - for i=1,#after do - after[i] = after_class[after[i]] or { } - end - rule.after = t_hashed(after,t_h_cache) - end - rule.class = nil - end - lookup.before_class = nil - lookup.current_class = nil - lookup.after_class = nil - lookup.format = "coverage" - elseif format == "coverage" then - for i=1,#rules do - local rule = rules[i] - local coverage = rule.coverage - if coverage then - local before = coverage.before - if before then - before = t_uncover(splitter,t_u_cache,reversed(before)) - rule.before = t_hashed(before,t_h_cache) - end - local current = coverage.current - if current then - current = t_uncover(splitter,t_u_cache,current) - -- let's not be sparse - local lookups = rule.lookups - if lookups then - for i=1,#current do - if not lookups[i] then - lookups[i] = "" -- fix sparse array - end - end - end - -- - rule.current = t_hashed(current,t_h_cache) - end - local after = coverage.after - if after then - after = t_uncover(splitter,t_u_cache,after) - rule.after = t_hashed(after,t_h_cache) - end - rule.coverage = nil - end - end - elseif format == "reversecoverage" then -- special case, single substitution only - for i=1,#rules do - local rule = rules[i] - local reversecoverage = rule.reversecoverage - if reversecoverage then - local before = reversecoverage.before - if before then - before = t_uncover(splitter,t_u_cache,reversed(before)) - rule.before = t_hashed(before,t_h_cache) - end - local current = reversecoverage.current - if current then - current = t_uncover(splitter,t_u_cache,current) - rule.current = t_hashed(current,t_h_cache) - end - local after = reversecoverage.after - if after then - after = t_uncover(splitter,t_u_cache,after) - rule.after = t_hashed(after,t_h_cache) - end - local replacements = reversecoverage.replacements - if replacements then - rule.replacements = r_uncover(splitter,r_u_cache,current,replacements) - end - rule.reversecoverage = nil - end - end - elseif format == "glyphs" then - -- I could store these more efficient (as not we use a nested tables for before, - -- after and current but this features happens so seldom that I don't bother - -- about it right now. - for i=1,#rules do - local rule = rules[i] - local glyphs = rule.glyphs - if glyphs then - local fore = glyphs.fore - if fore and fore ~= "" then - fore = s_uncover(splitter,s_u_cache,fore) - rule.before = s_hashed(fore,s_h_cache) - end - local back = glyphs.back - if back then - back = s_uncover(splitter,s_u_cache,back) - rule.after = s_hashed(back,s_h_cache) - end - local names = glyphs.names - if names then - names = s_uncover(splitter,s_u_cache,names) - rule.current = s_hashed(names,s_h_cache) - end - rule.glyphs = nil - end - end - end - end - end - end -end - -local function check_variants(unicode,the_variants,splitter,unicodes) - local variants = the_variants.variants - if variants then -- use splitter - local glyphs = lpegmatch(splitter,variants) - local done = { [unicode] = true } - local n = 0 - for i=1,#glyphs do - local g = glyphs[i] - if done[g] then - report_otf("skipping cyclic reference %U in math variant %U",g,unicode) - else - if n == 0 then - n = 1 - variants = { g } - else - n = n + 1 - variants[n] = g - end - done[g] = true - end - end - if n == 0 then - variants = nil - end - end - local parts = the_variants.parts - if parts then - local p = #parts - if p > 0 then - for i=1,p do - local pi = parts[i] - pi.glyph = unicodes[pi.component] or 0 - pi.component = nil - end - else - parts = nil - end - end - local italic_correction = the_variants.italic_correction - if italic_correction and italic_correction == 0 then - italic_correction = nil - end - return variants, parts, italic_correction -end - -actions["analyze math"] = function(data,filename,raw) - if raw.math then - data.metadata.math = raw.math - local unicodes = data.resources.unicodes - local splitter = data.helpers.tounicodetable - for unicode, description in next, data.descriptions do - local glyph = description.glyph - local mathkerns = glyph.mathkern -- singular - local horiz_variants = glyph.horiz_variants - local vert_variants = glyph.vert_variants - local top_accent = glyph.top_accent - if mathkerns or horiz_variants or vert_variants or top_accent then - local math = { } - if top_accent then - math.top_accent = top_accent - end - if mathkerns then - for k, v in next, mathkerns do - if not next(v) then - mathkerns[k] = nil - else - for k, v in next, v do - if v == 0 then - k[v] = nil -- height / kern can be zero - end - end - end - end - math.kerns = mathkerns - end - if horiz_variants then - math.horiz_variants, math.horiz_parts, math.horiz_italic_correction = check_variants(unicode,horiz_variants,splitter,unicodes) - end - if vert_variants then - math.vert_variants, math.vert_parts, math.vert_italic_correction = check_variants(unicode,vert_variants,splitter,unicodes) - end - local italic_correction = description.italic - if italic_correction and italic_correction ~= 0 then - math.italic_correction = italic_correction - end - description.math = math - end - end - end -end - -actions["reorganize glyph kerns"] = function(data,filename,raw) - local descriptions = data.descriptions - local resources = data.resources - local unicodes = resources.unicodes - for unicode, description in next, descriptions do - local kerns = description.glyph.kerns - if kerns then - local newkerns = { } - for k, kern in next, kerns do - local name = kern.char - local offset = kern.off - local lookup = kern.lookup - if name and offset and lookup then - local unicode = unicodes[name] - if unicode then - if type(lookup) == "table" then - for l=1,#lookup do - local lookup = lookup[l] - local lookupkerns = newkerns[lookup] - if lookupkerns then - lookupkerns[unicode] = offset - else - newkerns[lookup] = { [unicode] = offset } - end - end - else - local lookupkerns = newkerns[lookup] - if lookupkerns then - lookupkerns[unicode] = offset - else - newkerns[lookup] = { [unicode] = offset } - end - end - elseif trace_loading then - report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode) - end - end - end - description.kerns = newkerns - end - end -end - -actions["merge kern classes"] = function(data,filename,raw) - local gposlist = raw.gpos - if gposlist then - local descriptions = data.descriptions - local resources = data.resources - local unicodes = resources.unicodes - local splitter = data.helpers.tounicodetable - for gp=1,#gposlist do - local gpos = gposlist[gp] - local subtables = gpos.subtables - if subtables then - for s=1,#subtables do - local subtable = subtables[s] - local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes - if kernclass then -- the next one is quite slow - local split = { } -- saves time - for k=1,#kernclass do - local kcl = kernclass[k] - local firsts = kcl.firsts - local seconds = kcl.seconds - local offsets = kcl.offsets - local lookups = kcl.lookup -- singular - if type(lookups) ~= "table" then - lookups = { lookups } - end - -- if offsets[1] == nil then - -- offsets[1] = "" - -- end - -- we can check the max in the loop - -- local maxseconds = getn(seconds) - for n, s in next, firsts do - split[s] = split[s] or lpegmatch(splitter,s) - end - local maxseconds = 0 - for n, s in next, seconds do - if n > maxseconds then - maxseconds = n - end - split[s] = split[s] or lpegmatch(splitter,s) - end - for l=1,#lookups do - local lookup = lookups[l] - for fk=1,#firsts do -- maxfirsts ? - local fv = firsts[fk] - local splt = split[fv] - if splt then - local extrakerns = { } - local baseoffset = (fk-1) * maxseconds - for sk=2,maxseconds do -- will become 1 based in future luatex - local sv = seconds[sk] - -- for sk, sv in next, seconds do - local splt = split[sv] - if splt then -- redundant test - local offset = offsets[baseoffset + sk] - if offset then - for i=1,#splt do - extrakerns[splt[i]] = offset - end - end - end - end - for i=1,#splt do - local first_unicode = splt[i] - local description = descriptions[first_unicode] - if description then - local kerns = description.kerns - if not kerns then - kerns = { } -- unicode indexed ! - description.kerns = kerns - end - local lookupkerns = kerns[lookup] - if not lookupkerns then - lookupkerns = { } - kerns[lookup] = lookupkerns - end - for second_unicode, kern in next, extrakerns do - lookupkerns[second_unicode] = kern - end - elseif trace_loading then - report_otf("no glyph data for %U", first_unicode) - end - end - end - end - end - end - subtable.kernclass = { } - end - end - end - end - end -end - -actions["check glyphs"] = function(data,filename,raw) - for unicode, description in next, data.descriptions do - description.glyph = nil - end -end - --- future versions will remove _ - -actions["check metadata"] = function(data,filename,raw) - local metadata = data.metadata - for _, k in next, mainfields do - if valid_fields[k] then - local v = raw[k] - if not metadata[k] then - metadata[k] = v - end - end - end - -- metadata.pfminfo = raw.pfminfo -- not already done? - local ttftables = metadata.ttf_tables - if ttftables then - for i=1,#ttftables do - ttftables[i].data = "deleted" - end - end -end - -actions["cleanup tables"] = function(data,filename,raw) - data.resources.indices = nil -- not needed - data.helpers = nil -end - --- kern: ttf has a table with kerns --- --- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but --- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of --- unpredictable alternatively we could force an [1] if not set (maybe I will do that --- anyway). - --- we can share { } as it is never set - ---- ligatures have an extra specification.char entry that we don't use - -actions["reorganize glyph lookups"] = function(data,filename,raw) - local resources = data.resources - local unicodes = resources.unicodes - local descriptions = data.descriptions - local splitter = data.helpers.tounicodelist - - local lookuptypes = resources.lookuptypes - - for unicode, description in next, descriptions do - local lookups = description.glyph.lookups - if lookups then - for tag, lookuplist in next, lookups do - for l=1,#lookuplist do - local lookup = lookuplist[l] - local specification = lookup.specification - local lookuptype = lookup.type - local lt = lookuptypes[tag] - if not lt then - lookuptypes[tag] = lookuptype - elseif lt ~= lookuptype then - report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype) - end - if lookuptype == "ligature" then - lookuplist[l] = { lpegmatch(splitter,specification.components) } - elseif lookuptype == "alternate" then - lookuplist[l] = { lpegmatch(splitter,specification.components) } - elseif lookuptype == "substitution" then - lookuplist[l] = unicodes[specification.variant] - elseif lookuptype == "multiple" then - lookuplist[l] = { lpegmatch(splitter,specification.components) } - elseif lookuptype == "position" then - lookuplist[l] = { - specification.x or 0, - specification.y or 0, - specification.h or 0, - specification.v or 0 - } - elseif lookuptype == "pair" then - local one = specification.offsets[1] - local two = specification.offsets[2] - local paired = unicodes[specification.paired] - if one then - if two then - lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } } - else - lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } } - end - else - if two then - lookuplist[l] = { paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { } - else - lookuplist[l] = { paired } - end - end - end - end - end - local slookups, mlookups - for tag, lookuplist in next, lookups do - if #lookuplist == 1 then - if slookups then - slookups[tag] = lookuplist[1] - else - slookups = { [tag] = lookuplist[1] } - end - else - if mlookups then - mlookups[tag] = lookuplist - else - mlookups = { [tag] = lookuplist } - end - end - end - if slookups then - description.slookups = slookups - end - if mlookups then - description.mlookups = mlookups - end - end - end - -end - -actions["reorganize glyph anchors"] = function(data,filename,raw) -- when we replace inplace we safe entries - local descriptions = data.descriptions - for unicode, description in next, descriptions do - local anchors = description.glyph.anchors - if anchors then - for class, data in next, anchors do - if class == "baselig" then - for tag, specification in next, data do - for i=1,#specification do - local si = specification[i] - specification[i] = { si.x or 0, si.y or 0 } - end - end - else - for tag, specification in next, data do - data[tag] = { specification.x or 0, specification.y or 0 } - end - end - end - description.anchors = anchors - end - end -end - --- modes: node, base, none - -function otf.setfeatures(tfmdata,features) - local okay = constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf) - if okay then - return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf) - else - return { } -- will become false - end -end - --- the first version made a top/mid/not extensible table, now we just --- pass on the variants data and deal with it in the tfm scaler (there --- is no longer an extensible table anyway) --- --- we cannot share descriptions as virtual fonts might extend them (ok, --- we could use a cache with a hash --- --- we already assing an empty tabel to characters as we can add for --- instance protruding info and loop over characters; one is not supposed --- to change descriptions and if one does so one should make a copy! - -local function copytotfm(data,cache_id) - if data then - local metadata = data.metadata - local resources = data.resources - local properties = derivetable(data.properties) - local descriptions = derivetable(data.descriptions) - local goodies = derivetable(data.goodies) - local characters = { } - local parameters = { } - local mathparameters = { } - -- - local pfminfo = metadata.pfminfo or { } - local resources = data.resources - local unicodes = resources.unicodes - -- local mode = data.mode or "base" - local spaceunits = 500 - local spacer = "space" - local designsize = metadata.designsize or metadata.design_size or 100 - local mathspecs = metadata.math - -- - if designsize == 0 then - designsize = 100 - end - if mathspecs then - for name, value in next, mathspecs do - mathparameters[name] = value - end - end - for unicode, _ in next, data.descriptions do -- use parent table - characters[unicode] = { } - end - if mathspecs then - -- we could move this to the scaler but not that much is saved - -- and this is cleaner - for unicode, character in next, characters do - local d = descriptions[unicode] - local m = d.math - if m then - -- watch out: luatex uses horiz_variants for the parts - local variants = m.horiz_variants - local parts = m.horiz_parts - -- local done = { [unicode] = true } - if variants then - local c = character - for i=1,#variants do - local un = variants[i] - -- if done[un] then - -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode) - -- else - c.next = un - c = characters[un] - -- done[un] = true - -- end - end -- c is now last in chain - c.horiz_variants = parts - elseif parts then - character.horiz_variants = parts - end - local variants = m.vert_variants - local parts = m.vert_parts - -- local done = { [unicode] = true } - if variants then - local c = character - for i=1,#variants do - local un = variants[i] - -- if done[un] then - -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode) - -- else - c.next = un - c = characters[un] - -- done[un] = true - -- end - end -- c is now last in chain - c.vert_variants = parts - elseif parts then - character.vert_variants = parts - end - local italic_correction = m.vert_italic_correction - if italic_correction then - character.vert_italic_correction = italic_correction -- was c. - end - local top_accent = m.top_accent - if top_accent then - character.top_accent = top_accent - end - local kerns = m.kerns - if kerns then - character.mathkerns = kerns - end - end - end - end - -- end math - local monospaced = metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced") - local charwidth = pfminfo.avgwidth -- or unset - local italicangle = metadata.italicangle - local charxheight = pfminfo.os2_xheight and pfminfo.os2_xheight > 0 and pfminfo.os2_xheight - properties.monospaced = monospaced - parameters.italicangle = italicangle - parameters.charwidth = charwidth - parameters.charxheight = charxheight - -- - local space = 0x0020 -- unicodes['space'], unicodes['emdash'] - local emdash = 0x2014 -- unicodes['space'], unicodes['emdash'] - if monospaced then - if descriptions[space] then - spaceunits, spacer = descriptions[space].width, "space" - end - if not spaceunits and descriptions[emdash] then - spaceunits, spacer = descriptions[emdash].width, "emdash" - end - if not spaceunits and charwidth then - spaceunits, spacer = charwidth, "charwidth" - end - else - if descriptions[space] then - spaceunits, spacer = descriptions[space].width, "space" - end - if not spaceunits and descriptions[emdash] then - spaceunits, spacer = descriptions[emdash].width/2, "emdash/2" - end - if not spaceunits and charwidth then - spaceunits, spacer = charwidth, "charwidth" - end - end - spaceunits = tonumber(spaceunits) or 500 -- brrr - -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?) - local filename = constructors.checkedfilename(resources) - local fontname = metadata.fontname - local fullname = metadata.fullname or fontname - local units = metadata.units_per_em or 1000 - -- - if units == 0 then -- catch bugs in fonts - units = 1000 - metadata.units_per_em = 1000 - end - -- - parameters.slant = 0 - parameters.space = spaceunits -- 3.333 (cmr10) - parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10) - parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10) - parameters.x_height = 2*units/5 -- 400 - parameters.quad = units -- 1000 - if spaceunits < 2*units/5 then - -- todo: warning - end - if italicangle then - parameters.italicangle = italicangle - parameters.italicfactor = math.cos(math.rad(90+italicangle)) - parameters.slant = - math.round(math.tan(italicangle*math.pi/180)) - end - if monospaced then - parameters.space_stretch = 0 - parameters.space_shrink = 0 - elseif syncspace then -- - parameters.space_stretch = spaceunits/2 - parameters.space_shrink = spaceunits/3 - end - parameters.extra_space = parameters.space_shrink -- 1.111 (cmr10) - if charxheight then - parameters.x_height = charxheight - else - local x = 0x78 -- unicodes['x'] - if x then - local x = descriptions[x] - if x then - parameters.x_height = x.height - end - end - end - -- - parameters.designsize = (designsize/10)*65536 - parameters.ascender = abs(metadata.ascent or 0) - parameters.descender = abs(metadata.descent or 0) - parameters.units = units - -- - properties.space = spacer - properties.encodingbytes = 2 - properties.format = data.format or fonts.formats[filename] or "opentype" - properties.noglyphnames = true - properties.filename = filename - properties.fontname = fontname - properties.fullname = fullname - properties.psname = fontname or fullname - properties.name = filename or fullname - -- - -- properties.name = specification.name - -- properties.sub = specification.sub - return { - characters = characters, - descriptions = descriptions, - parameters = parameters, - mathparameters = mathparameters, - resources = resources, - properties = properties, - goodies = goodies, - } - end -end - -local function otftotfm(specification) - local cache_id = specification.hash - local tfmdata = containers.read(constructors.cache,cache_id) - if not tfmdata then - local name = specification.name - local sub = specification.sub - local filename = specification.filename - local format = specification.format - local features = specification.features.normal - local rawdata = otf.load(filename,format,sub,features and features.featurefile) - if rawdata and next(rawdata) then - rawdata.lookuphash = { } - tfmdata = copytotfm(rawdata,cache_id) - if tfmdata and next(tfmdata) then - -- at this moment no characters are assigned yet, only empty slots - local features = constructors.checkedfeatures("otf",features) - local shared = tfmdata.shared - if not shared then - shared = { } - tfmdata.shared = shared - end - shared.rawdata = rawdata - -- shared.features = features -- default - shared.dynamics = { } - -- shared.processes = { } - tfmdata.changed = { } - shared.features = features - shared.processes = otf.setfeatures(tfmdata,features) - end - end - containers.write(constructors.cache,cache_id,tfmdata) - end - return tfmdata -end - -local function read_from_otf(specification) - local tfmdata = otftotfm(specification) - if tfmdata then - -- this late ? .. needs checking - tfmdata.properties.name = specification.name - tfmdata.properties.sub = specification.sub - -- - tfmdata = constructors.scale(tfmdata,specification) - local allfeatures = tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf) - constructors.setname(tfmdata,specification) -- only otf? - fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification) - end - return tfmdata -end - -local function checkmathsize(tfmdata,mathsize) - local mathdata = tfmdata.shared.rawdata.metadata.math - local mathsize = tonumber(mathsize) - if mathdata then -- we cannot use mathparameters as luatex will complain - local parameters = tfmdata.parameters - parameters.scriptpercentage = mathdata.ScriptPercentScaleDown - parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown - parameters.mathsize = mathsize - end -end - -registerotffeature { - name = "mathsize", - description = "apply mathsize specified in the font", - initializers = { - base = checkmathsize, - node = checkmathsize, - } -} - --- helpers - -function otf.collectlookups(rawdata,kind,script,language) - local sequences = rawdata.resources.sequences - if sequences then - local featuremap, featurelist = { }, { } - for s=1,#sequences do - local sequence = sequences[s] - local features = sequence.features - features = features and features[kind] - features = features and (features[script] or features[default] or features[wildcard]) - features = features and (features[language] or features[default] or features[wildcard]) - if features then - local subtables = sequence.subtables - if subtables then - for s=1,#subtables do - local ss = subtables[s] - if not featuremap[s] then - featuremap[ss] = true - featurelist[#featurelist+1] = ss - end - end - end - end - end - if #featurelist > 0 then - return featuremap, featurelist - end - end - return nil, nil -end - --- readers - -local function check_otf(forced,specification,suffix,what) - local name = specification.name - if forced then - name = file.addsuffix(name,suffix,true) - end - local fullname = findbinfile(name,suffix) or "" - if fullname == "" then - fullname = fonts.names.getfilename(name,suffix) or "" - end - if fullname ~= "" then - specification.filename = fullname - specification.format = what - return read_from_otf(specification) - end -end - -local function opentypereader(specification,suffix,what) - local forced = specification.forced or "" - if forced == "otf" then - return check_otf(true,specification,forced,"opentype") - elseif forced == "ttf" or forced == "ttc" or forced == "dfont" then - return check_otf(true,specification,forced,"truetype") - else - return check_otf(false,specification,suffix,what) - end -end - -readers.opentype = opentypereader - -local formats = fonts.formats - -formats.otf = "opentype" -formats.ttf = "truetype" -formats.ttc = "truetype" -formats.dfont = "truetype" - -function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end -function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end -function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end -function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end - --- this will be overloaded - -function otf.scriptandlanguage(tfmdata,attr) - local properties = tfmdata.properties - return properties.script or "dflt", properties.language or "dflt" -end +if not modules then modules = { } end modules ['font-otf'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- langs -> languages enz +-- anchor_classes vs kernclasses +-- modification/creationtime in subfont is runtime dus zinloos +-- to_table -> totable +-- ascent descent + +-- more checking against low level calls of functions + +local utfbyte = utf.byte +local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip +local type, next, tonumber, tostring = type, next, tonumber, tostring +local abs = math.abs +local getn = table.getn +local lpegmatch = lpeg.match +local reversed, concat, remove = table.reversed, table.concat, table.remove +local ioflush = io.flush +local fastcopy, tohash, derivetable = table.fastcopy, table.tohash, table.derive +local formatters = string.formatters + +local allocate = utilities.storage.allocate +local registertracker = trackers.register +local registerdirective = directives.register +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming +local elapsedtime = statistics.elapsedtime +local findbinfile = resolvers.findbinfile + +local trace_private = false registertracker("otf.private", function(v) trace_private = v end) +local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end) +local trace_features = false registertracker("otf.features", function(v) trace_features = v end) +local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end) +local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end) +local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end) +local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end) + +local report_otf = logs.reporter("fonts","otf loading") + +local fonts = fonts +local otf = fonts.handlers.otf + +otf.glists = { "gsub", "gpos" } + +otf.version = 2.743 -- beware: also sync font-mis.lua +otf.cache = containers.define("fonts", "otf", otf.version, true) + +local fontdata = fonts.hashes.identifiers +local chardata = characters and characters.data -- not used + +local otffeatures = fonts.constructors.newfeatures("otf") +local registerotffeature = otffeatures.register + +local enhancers = allocate() +otf.enhancers = enhancers +local patches = { } +enhancers.patches = patches + +local definers = fonts.definers +local readers = fonts.readers +local constructors = fonts.constructors + +local forceload = false +local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M) +local usemetatables = false -- .4 slower on mk but 30 M less mem so we might change the default -- will be directive +local packdata = true +local syncspace = true +local forcenotdef = false +local includesubfonts = false + +local wildcard = "*" +local default = "dflt" + +local fontloaderfields = fontloader.fields +local mainfields = nil +local glyphfields = nil -- not used yet + +registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end) +registerdirective("fonts.otf.loader.force", function(v) forceload = v end) +registerdirective("fonts.otf.loader.usemetatables", function(v) usemetatables = v end) +registerdirective("fonts.otf.loader.pack", function(v) packdata = v end) +registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end) +registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end) + +local function load_featurefile(raw,featurefile) + if featurefile and featurefile ~= "" then + if trace_loading then + report_otf("using featurefile %a", featurefile) + end + fontloader.apply_featurefile(raw, featurefile) + end +end + +local function showfeatureorder(rawdata,filename) + local sequences = rawdata.resources.sequences + if sequences and #sequences > 0 then + if trace_loading then + report_otf("font %a has %s sequences",filename,#sequences) + report_otf(" ") + end + for nos=1,#sequences do + local sequence = sequences[nos] + local typ = sequence.type or "no-type" + local name = sequence.name or "no-name" + local subtables = sequence.subtables or { "no-subtables" } + local features = sequence.features + if trace_loading then + report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables) + end + if features then + for feature, scripts in next, features do + local tt = { } + if type(scripts) == "table" then + for script, languages in next, scripts do + local ttt = { } + for language, _ in next, languages do + ttt[#ttt+1] = language + end + tt[#tt+1] = formatters["[%s: % t]"](script,ttt) + end + if trace_loading then + report_otf(" %s: % t",feature,tt) + end + else + if trace_loading then + report_otf(" %s: %S",feature,scripts) + end + end + end + end + end + if trace_loading then + report_otf("\n") + end + elseif trace_loading then + report_otf("font %a has no sequences",filename) + end +end + +--[[ldx-- +

We start with a lot of tables and related functions.

+--ldx]]-- + +local valid_fields = table.tohash { + -- "anchor_classes", + "ascent", + -- "cache_version", + "cidinfo", + "copyright", + -- "creationtime", + "descent", + "design_range_bottom", + "design_range_top", + "design_size", + "encodingchanged", + "extrema_bound", + "familyname", + "fontname", + "fontname", + "fontstyle_id", + "fontstyle_name", + "fullname", + -- "glyphs", + "hasvmetrics", + -- "head_optimized_for_cleartype", + "horiz_base", + "issans", + "isserif", + "italicangle", + -- "kerns", + -- "lookups", + "macstyle", + -- "modificationtime", + "onlybitmaps", + "origname", + "os2_version", + "pfminfo", + -- "private", + "serifcheck", + "sfd_version", + -- "size", + "strokedfont", + "strokewidth", + -- "subfonts", + "table_version", + -- "tables", + -- "ttf_tab_saved", + "ttf_tables", + "uni_interp", + "uniqueid", + "units_per_em", + "upos", + "use_typo_metrics", + "uwidth", + -- "validation_state", + "version", + "vert_base", + "weight", + "weight_width_slope_only", + -- "xuid", +} + +local ordered_enhancers = { + "prepare tables", + + "prepare glyphs", + "prepare lookups", + + "analyze glyphs", + "analyze math", + + "prepare tounicode", -- maybe merge with prepare + + "reorganize lookups", + "reorganize mark classes", + "reorganize anchor classes", + + "reorganize glyph kerns", + "reorganize glyph lookups", + "reorganize glyph anchors", + + "merge kern classes", + + "reorganize features", + "reorganize subtables", + + "check glyphs", + "check metadata", + "check extra features", -- after metadata + + "check encoding", -- moved + "add duplicates", + + "cleanup tables", +} + +--[[ldx-- +

Here we go.

+--ldx]]-- + +local actions = allocate() +local before = allocate() +local after = allocate() + +patches.before = before +patches.after = after + +local function enhance(name,data,filename,raw) + local enhancer = actions[name] + if enhancer then + if trace_loading then + report_otf("apply enhancement %a to file %a",name,filename) + ioflush() + end + enhancer(data,filename,raw) + else + -- no message as we can have private ones + end +end + +function enhancers.apply(data,filename,raw) + local basename = file.basename(lower(filename)) + if trace_loading then + report_otf("%s enhancing file %a","start",filename) + end + ioflush() -- we want instant messages + for e=1,#ordered_enhancers do + local enhancer = ordered_enhancers[e] + local b = before[enhancer] + if b then + for pattern, action in next, b do + if find(basename,pattern) then + action(data,filename,raw) + end + end + end + enhance(enhancer,data,filename,raw) + local a = after[enhancer] + if a then + for pattern, action in next, a do + if find(basename,pattern) then + action(data,filename,raw) + end + end + end + ioflush() -- we want instant messages + end + if trace_loading then + report_otf("%s enhancing file %a","stop",filename) + end + ioflush() -- we want instant messages +end + +-- patches.register("before","migrate metadata","cambria",function() end) + +function patches.register(what,where,pattern,action) + local pw = patches[what] + if pw then + local ww = pw[where] + if ww then + ww[pattern] = action + else + pw[where] = { [pattern] = action} + end + end +end + +function patches.report(fmt,...) + if trace_loading then + report_otf("patching: %s",formatters[fmt](...)) + end +end + +function enhancers.register(what,action) -- only already registered can be overloaded + actions[what] = action +end + +function otf.load(filename,format,sub,featurefile) + local base = file.basename(file.removesuffix(filename)) + local name = file.removesuffix(base) + local attr = lfs.attributes(filename) + local size = attr and attr.size or 0 + local time = attr and attr.modification or 0 + if featurefile then + name = name .. "@" .. file.removesuffix(file.basename(featurefile)) + end + if sub == "" then + sub = false + end + local hash = name + if sub then + hash = hash .. "-" .. sub + end + hash = containers.cleanname(hash) + local featurefiles + if featurefile then + featurefiles = { } + for s in gmatch(featurefile,"[^,]+") do + local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or "" + if name == "" then + report_otf("loading error, no featurefile %a",s) + else + local attr = lfs.attributes(name) + featurefiles[#featurefiles+1] = { + name = name, + size = attr and attr.size or 0, + time = attr and attr.modification or 0, + } + end + end + if #featurefiles == 0 then + featurefiles = nil + end + end + local data = containers.read(otf.cache,hash) + local reload = not data or data.size ~= size or data.time ~= time + if forceload then + report_otf("forced reload of %a due to hard coded flag",filename) + reload = true + end + if not reload then + local featuredata = data.featuredata + if featurefiles then + if not featuredata or #featuredata ~= #featurefiles then + reload = true + else + for i=1,#featurefiles do + local fi, fd = featurefiles[i], featuredata[i] + if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then + reload = true + break + end + end + end + elseif featuredata then + reload = true + end + if reload then + report_otf("loading: forced reload due to changed featurefile specification %a",featurefile) + end + end + if reload then + report_otf("loading %a, hash %a",filename,hash) + local fontdata, messages + if sub then + fontdata, messages = fontloader.open(filename,sub) + else + fontdata, messages = fontloader.open(filename) + end + if fontdata then + mainfields = mainfields or (fontloaderfields and fontloaderfields(fontdata)) + end + if trace_loading and messages and #messages > 0 then + if type(messages) == "string" then + report_otf("warning: %s",messages) + else + for m=1,#messages do + report_otf("warning: %S",messages[m]) + end + end + else + report_otf("loading done") + end + if fontdata then + if featurefiles then + for i=1,#featurefiles do + load_featurefile(fontdata,featurefiles[i].name) + end + end + local unicodes = { + -- names to unicodes + } + local splitter = lpeg.splitter(" ",unicodes) + data = { + size = size, + time = time, + format = format, + featuredata = featurefiles, + resources = { + filename = resolvers.unresolve(filename), -- no shortcut + version = otf.version, + creator = "context mkiv", + unicodes = unicodes, + indices = { + -- index to unicodes + }, + duplicates = { + -- alternative unicodes + }, + variants = { + -- alternative unicodes (variants) + }, + lookuptypes = { + }, + }, + metadata = { + -- raw metadata, not to be used + }, + properties = { + -- normalized metadata + }, + descriptions = { + }, + goodies = { + }, + helpers = { + tounicodelist = splitter, + tounicodetable = lpeg.Ct(splitter), + }, + } + starttiming(data) + report_otf("file size: %s", size) + enhancers.apply(data,filename,fontdata) + local packtime = { } + if packdata then + if cleanup > 0 then + collectgarbage("collect") + end + starttiming(packtime) + enhance("pack",data,filename,nil) + stoptiming(packtime) + end + report_otf("saving %a in cache",filename) + data = containers.write(otf.cache, hash, data) + if cleanup > 1 then + collectgarbage("collect") + end + stoptiming(data) + if elapsedtime then -- not in generic + report_otf("preprocessing and caching time %s, packtime %s", + elapsedtime(data),packdata and elapsedtime(packtime) or 0) + end + fontloader.close(fontdata) -- free memory + if cleanup > 3 then + collectgarbage("collect") + end + data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one + if cleanup > 2 then + collectgarbage("collect") + end + else + data = nil + report_otf("loading failed due to read error") + end + end + if data then + if trace_defining then + report_otf("loading from cache using hash %a",hash) + end + enhance("unpack",data,filename,nil,false) + enhance("add dimensions",data,filename,nil,false) + if trace_sequences then + showfeatureorder(data,filename) + end + end + return data +end + +local mt = { + __index = function(t,k) -- maybe set it + if k == "height" then + local ht = t.boundingbox[4] + return ht < 0 and 0 or ht + elseif k == "depth" then + local dp = -t.boundingbox[2] + return dp < 0 and 0 or dp + elseif k == "width" then + return 0 + elseif k == "name" then -- or maybe uni* + return forcenotdef and ".notdef" + end + end +} + +actions["prepare tables"] = function(data,filename,raw) + data.properties.hasitalics = false +end + +actions["add dimensions"] = function(data,filename) + -- todo: forget about the width if it's the defaultwidth (saves mem) + -- we could also build the marks hash here (instead of storing it) + if data then + local descriptions = data.descriptions + local resources = data.resources + local defaultwidth = resources.defaultwidth or 0 + local defaultheight = resources.defaultheight or 0 + local defaultdepth = resources.defaultdepth or 0 + local basename = trace_markwidth and file.basename(filename) + if usemetatables then + for _, d in next, descriptions do + local wd = d.width + if not wd then + d.width = defaultwidth + elseif trace_markwidth and wd ~= 0 and d.class == "mark" then + report_otf("mark %a with width %b found in %a",d.name or "",wd,basename) + -- d.width = -wd + end + setmetatable(d,mt) + end + else + for _, d in next, descriptions do + local bb, wd = d.boundingbox, d.width + if not wd then + d.width = defaultwidth + elseif trace_markwidth and wd ~= 0 and d.class == "mark" then + report_otf("mark %a with width %b found in %a",d.name or "",wd,basename) + -- d.width = -wd + end + -- if forcenotdef and not d.name then + -- d.name = ".notdef" + -- end + if bb then + local ht, dp = bb[4], -bb[2] + if ht == 0 or ht < 0 then + -- not set + else + d.height = ht + end + if dp == 0 or dp < 0 then + -- not set + else + d.depth = dp + end + end + end + end + end +end + +local function somecopy(old) -- fast one + if old then + local new = { } + if type(old) == "table" then + for k, v in next, old do + if k == "glyphs" then + -- skip + elseif type(v) == "table" then + new[k] = somecopy(v) + else + new[k] = v + end + end + else + for i=1,#mainfields do + local k = mainfields[i] + local v = old[k] + if k == "glyphs" then + -- skip + elseif type(v) == "table" then + new[k] = somecopy(v) + else + new[k] = v + end + end + end + return new + else + return { } + end +end + +-- not setting hasitalics and class (when nil) during table cronstruction can save some mem + +actions["prepare glyphs"] = function(data,filename,raw) + local rawglyphs = raw.glyphs + local rawsubfonts = raw.subfonts + local rawcidinfo = raw.cidinfo + local criterium = constructors.privateoffset + local private = criterium + local resources = data.resources + local metadata = data.metadata + local properties = data.properties + local descriptions = data.descriptions + local unicodes = resources.unicodes -- name to unicode + local indices = resources.indices -- index to unicode + local duplicates = resources.duplicates + local variants = resources.variants + + if rawsubfonts then + + metadata.subfonts = includesubfonts and { } + properties.cidinfo = rawcidinfo + + if rawcidinfo.registry then + local cidmap = fonts.cid.getmap(rawcidinfo) + if cidmap then + rawcidinfo.usedname = cidmap.usedname + local nofnames, nofunicodes = 0, 0 + local cidunicodes, cidnames = cidmap.unicodes, cidmap.names + for cidindex=1,#rawsubfonts do + local subfont = rawsubfonts[cidindex] + local cidglyphs = subfont.glyphs + if includesubfonts then + metadata.subfonts[cidindex] = somecopy(subfont) + end + for index=0,subfont.glyphcnt-1 do -- we could take the previous glyphcnt instead of 0 + local glyph = cidglyphs[index] + if glyph then + local unicode = glyph.unicode + local name = glyph.name or cidnames[index] + if not unicode or unicode == -1 or unicode >= criterium then + unicode = cidunicodes[index] + end + if unicode and descriptions[unicode] then + report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode) + unicode = -1 + end + if not unicode or unicode == -1 or unicode >= criterium then + if not name then + name = format("u%06X",private) + end + unicode = private + unicodes[name] = private + if trace_private then + report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) + end + private = private + 1 + nofnames = nofnames + 1 + else + if not name then + name = format("u%06X",unicode) + end + unicodes[name] = unicode + nofunicodes = nofunicodes + 1 + end + indices[index] = unicode -- each index is unique (at least now) + + local description = { + -- width = glyph.width, + boundingbox = glyph.boundingbox, + name = glyph.name or name or "unknown", -- uniXXXX + cidindex = cidindex, + index = index, + glyph = glyph, + } + + descriptions[unicode] = description + else + -- report_otf("potential problem: glyph %U is used but empty",index) + end + end + end + if trace_loading then + report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames) + end + elseif trace_loading then + report_otf("unable to remap cid font, missing cid file for %a",filename) + end + elseif trace_loading then + report_otf("font %a has no glyphs",filename) + end + + else + + for index=0,raw.glyphcnt-1 do -- not raw.glyphmax-1 (as that will crash) + local glyph = rawglyphs[index] + if glyph then + local unicode = glyph.unicode + local name = glyph.name + if not unicode or unicode == -1 or unicode >= criterium then + unicode = private + unicodes[name] = private + if trace_private then + report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) + end + private = private + 1 + else + unicodes[name] = unicode + end + indices[index] = unicode + if not name then + name = format("u%06X",unicode) + end + descriptions[unicode] = { + -- width = glyph.width, + boundingbox = glyph.boundingbox, + name = name, + index = index, + glyph = glyph, + } + local altuni = glyph.altuni + if altuni then + local d + for i=1,#altuni do + local a = altuni[i] + local u = a.unicode + local v = a.variant + if v then + -- tricky: no addition to d? needs checking but in practice such dups are either very simple + -- shapes or e.g cjk with not that many features + local vv = variants[v] + if vv then + vv[u] = unicode + else -- xits-math has some: + vv = { [u] = unicode } + variants[v] = vv + end + elseif d then + d[#d+1] = u + else + d = { u } + end + end + if d then + duplicates[unicode] = d + end + end + else + report_otf("potential problem: glyph %U is used but empty",index) + end + end + + end + + resources.private = private + +end + +-- the next one is still messy but will get better when we have +-- flattened map/enc tables in the font loader + +actions["check encoding"] = function(data,filename,raw) + local descriptions = data.descriptions + local resources = data.resources + local properties = data.properties + local unicodes = resources.unicodes -- name to unicode + local indices = resources.indices -- index to unicodes + local duplicates = resources.duplicates + + -- begin of messy (not needed when cidmap) + + local mapdata = raw.map or { } + local unicodetoindex = mapdata and mapdata.map or { } + -- local encname = lower(data.enc_name or raw.enc_name or mapdata.enc_name or "") + local encname = lower(data.enc_name or mapdata.enc_name or "") + local criterium = 0xFFFF -- for instance cambria has a lot of mess up there + + -- end of messy + + if find(encname,"unicode") then -- unicodebmp, unicodefull, ... + if trace_loading then + report_otf("checking embedded unicode map %a",encname) + end + for unicode, index in next, unicodetoindex do -- altuni already covers this + if unicode <= criterium and not descriptions[unicode] then + local parent = indices[index] -- why nil? + if not parent then + report_otf("weird, unicode %U points to nowhere with index %H",unicode,index) + else + local parentdescription = descriptions[parent] + if parentdescription then + local altuni = parentdescription.altuni + if not altuni then + altuni = { { unicode = parent } } + parentdescription.altuni = altuni + duplicates[parent] = { unicode } + else + local done = false + for i=1,#altuni do + if altuni[i].unicode == parent then + done = true + break + end + end + if not done then + -- let's assume simple cjk reuse + altuni[#altuni+1] = { unicode = parent } + table.insert(duplicates[parent],unicode) + end + end + if trace_loading then + report_otf("weird, unicode %U points to nowhere with index %H",unicode,index) + end + else + report_otf("weird, unicode %U points to %U with index %H",unicode,index) + end + end + end + end + elseif properties.cidinfo then + report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname) + else + report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever") + end + + if mapdata then + mapdata.map = { } -- clear some memory + end +end + +-- for the moment we assume that a font with lookups will not use +-- altuni so we stick to kerns only + +actions["add duplicates"] = function(data,filename,raw) + local descriptions = data.descriptions + local resources = data.resources + local properties = data.properties + local unicodes = resources.unicodes -- name to unicode + local indices = resources.indices -- index to unicodes + local duplicates = resources.duplicates + + for unicode, d in next, duplicates do + for i=1,#d do + local u = d[i] + if not descriptions[u] then + local description = descriptions[unicode] + local duplicate = table.copy(description) -- else packing problem + duplicate.comment = format("copy of U+%05X", unicode) + descriptions[u] = duplicate + local n = 0 + for _, description in next, descriptions do + if kerns then + local kerns = description.kerns + for _, k in next, kerns do + local ku = k[unicode] + if ku then + k[u] = ku + n = n + 1 + end + end + end + -- todo: lookups etc + end + if trace_loading then + report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n) + end + end + end + end +end + +-- class : nil base mark ligature component (maybe we don't need it in description) +-- boundingbox: split into ht/dp takes more memory (larger tables and less sharing) + +actions["analyze glyphs"] = function(data,filename,raw) -- maybe integrate this in the previous + local descriptions = data.descriptions + local resources = data.resources + local metadata = data.metadata + local properties = data.properties + local hasitalics = false + local widths = { } + local marks = { } -- always present (saves checking) + for unicode, description in next, descriptions do + local glyph = description.glyph + local italic = glyph.italic_correction + if not italic then + -- skip + elseif italic == 0 then + -- skip + else + description.italic = italic + hasitalics = true + end + local width = glyph.width + widths[width] = (widths[width] or 0) + 1 + local class = glyph.class + if class then + if class == "mark" then + marks[unicode] = true + end + description.class = class + end + end + -- flag italic + properties.hasitalics = hasitalics + -- flag marks + resources.marks = marks + -- share most common width for cjk fonts + local wd, most = 0, 1 + for k,v in next, widths do + if v > most then + wd, most = k, v + end + end + if most > 1000 then -- maybe 500 + if trace_loading then + report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most) + end + for unicode, description in next, descriptions do + if description.width == wd then + -- description.width = nil + else + description.width = description.glyph.width + end + end + resources.defaultwidth = wd + else + for unicode, description in next, descriptions do + description.width = description.glyph.width + end + end +end + +actions["reorganize mark classes"] = function(data,filename,raw) + local mark_classes = raw.mark_classes + if mark_classes then + local resources = data.resources + local unicodes = resources.unicodes + local markclasses = { } + resources.markclasses = markclasses -- reversed + for name, class in next, mark_classes do + local t = { } + for s in gmatch(class,"[^ ]+") do + t[unicodes[s]] = true + end + markclasses[name] = t + end + end +end + +actions["reorganize features"] = function(data,filename,raw) -- combine with other + local features = { } + data.resources.features = features + for k, what in next, otf.glists do + local dw = raw[what] + if dw then + local f = { } + features[what] = f + for i=1,#dw do + local d= dw[i] + local dfeatures = d.features + if dfeatures then + for i=1,#dfeatures do + local df = dfeatures[i] + local tag = strip(lower(df.tag)) + local ft = f[tag] + if not ft then + ft = { } + f[tag] = ft + end + local dscripts = df.scripts + for i=1,#dscripts do + local d = dscripts[i] + local languages = d.langs + local script = strip(lower(d.script)) + local fts = ft[script] if not fts then fts = {} ft[script] = fts end + for i=1,#languages do + fts[strip(lower(languages[i]))] = true + end + end + end + end + end + end + end +end + +actions["reorganize anchor classes"] = function(data,filename,raw) + local resources = data.resources + local anchor_to_lookup = { } + local lookup_to_anchor = { } + resources.anchor_to_lookup = anchor_to_lookup + resources.lookup_to_anchor = lookup_to_anchor + local classes = raw.anchor_classes -- anchor classes not in final table + if classes then + for c=1,#classes do + local class = classes[c] + local anchor = class.name + local lookups = class.lookup + if type(lookups) ~= "table" then + lookups = { lookups } + end + local a = anchor_to_lookup[anchor] + if not a then + a = { } + anchor_to_lookup[anchor] = a + end + for l=1,#lookups do + local lookup = lookups[l] + local l = lookup_to_anchor[lookup] + if l then + l[anchor] = true + else + l = { [anchor] = true } + lookup_to_anchor[lookup] = l + end + a[lookup] = true + end + end + end +end + +actions["prepare tounicode"] = function(data,filename,raw) + fonts.mappings.addtounicode(data,filename) +end + +local g_directions = { + gsub_contextchain = 1, + gpos_contextchain = 1, + -- gsub_context = 1, + -- gpos_context = 1, + gsub_reversecontextchain = -1, + gpos_reversecontextchain = -1, +} + +-- Research by Khaled Hosny has demonstrated that the font loader merges +-- regular and AAT features and that these can interfere (especially because +-- we dropped checking for valid features elsewhere. So, we just check for +-- the special flag and drop the feature if such a tag is found. + +local function supported(features) + for i=1,#features do + if features[i].ismac then + return false + end + end + return true +end + +actions["reorganize subtables"] = function(data,filename,raw) + local resources = data.resources + local sequences = { } + local lookups = { } + local chainedfeatures = { } + resources.sequences = sequences + resources.lookups = lookups + for _, what in next, otf.glists do + local dw = raw[what] + if dw then + for k=1,#dw do + local gk = dw[k] + local features = gk.features +-- if features and supported(features) then + if not features or supported(features) then -- not always features ! + local typ = gk.type + local chain = g_directions[typ] or 0 + local subtables = gk.subtables + if subtables then + local t = { } + for s=1,#subtables do + t[s] = subtables[s].name + end + subtables = t + end + local flags, markclass = gk.flags, nil + if flags then + local t = { -- forcing false packs nicer + (flags.ignorecombiningmarks and "mark") or false, + (flags.ignoreligatures and "ligature") or false, + (flags.ignorebaseglyphs and "base") or false, + flags.r2l or false, + } + markclass = flags.mark_class + if markclass then + markclass = resources.markclasses[markclass] + end + flags = t + end + -- + local name = gk.name + -- + if not name then + -- in fact an error + report_otf("skipping weird lookup number %s",k) + elseif features then + -- scripts, tag, ismac + local f = { } + for i=1,#features do + local df = features[i] + local tag = strip(lower(df.tag)) + local ft = f[tag] if not ft then ft = {} f[tag] = ft end + local dscripts = df.scripts + for i=1,#dscripts do + local d = dscripts[i] + local languages = d.langs + local script = strip(lower(d.script)) + local fts = ft[script] if not fts then fts = {} ft[script] = fts end + for i=1,#languages do + fts[strip(lower(languages[i]))] = true + end + end + end + sequences[#sequences+1] = { + type = typ, + chain = chain, + flags = flags, + name = name, + subtables = subtables, + markclass = markclass, + features = f, + } + else + lookups[name] = { + type = typ, + chain = chain, + flags = flags, + subtables = subtables, + markclass = markclass, + } + end + end + end + end + end +end + +-- test this: +-- +-- for _, what in next, otf.glists do +-- raw[what] = nil +-- end + +actions["prepare lookups"] = function(data,filename,raw) + local lookups = raw.lookups + if lookups then + data.lookups = lookups + end +end + +-- The reverse handler does a bit redundant splitting but it's seldom +-- seen so we don't bother too much. We could store the replacement +-- in the current list (value instead of true) but it makes other code +-- uglier. Maybe some day. + +local function t_uncover(splitter,cache,covers) + local result = { } + for n=1,#covers do + local cover = covers[n] + local uncovered = cache[cover] + if not uncovered then + uncovered = lpegmatch(splitter,cover) + cache[cover] = uncovered + end + result[n] = uncovered + end + return result +end + +local function s_uncover(splitter,cache,cover) + if cover == "" then + return nil + else + local uncovered = cache[cover] + if not uncovered then + uncovered = lpegmatch(splitter,cover) +-- for i=1,#uncovered do +-- uncovered[i] = { [uncovered[i]] = true } +-- end + cache[cover] = uncovered + end + return { uncovered } + end +end + +local function t_hashed(t,cache) + if t then + local ht = { } + for i=1,#t do + local ti = t[i] + local tih = cache[ti] + if not tih then + tih = { } + for i=1,#ti do + tih[ti[i]] = true + end + cache[ti] = tih + end + ht[i] = tih + end + return ht + else + return nil + end +end + +-- local s_hashed = t_hashed + +local function s_hashed(t,cache) + if t then + local ht = { } + local tf = t[1] + for i=1,#tf do + ht[i] = { [tf[i]] = true } + end + return ht + else + return nil + end +end + +local function r_uncover(splitter,cache,cover,replacements) + if cover == "" then + return nil + else + -- we always have current as { } even in the case of one + local uncovered = cover[1] + local replaced = cache[replacements] + if not replaced then + replaced = lpegmatch(splitter,replacements) + cache[replacements] = replaced + end + local nu, nr = #uncovered, #replaced + local r = { } + if nu == nr then + for i=1,nu do + r[uncovered[i]] = replaced[i] + end + end + return r + end +end + +actions["reorganize lookups"] = function(data,filename,raw) -- we could check for "" and n == 0 + -- we prefer the before lookups in a normal order + if data.lookups then + local splitter = data.helpers.tounicodetable + local t_u_cache = { } + local s_u_cache = t_u_cache -- string keys + local t_h_cache = { } + local s_h_cache = t_h_cache -- table keys (so we could use one cache) + local r_u_cache = { } -- maybe shared + for _, lookup in next, data.lookups do + local rules = lookup.rules + if rules then + local format = lookup.format + if format == "class" then + local before_class = lookup.before_class + if before_class then + before_class = t_uncover(splitter,t_u_cache,reversed(before_class)) + end + local current_class = lookup.current_class + if current_class then + current_class = t_uncover(splitter,t_u_cache,current_class) + end + local after_class = lookup.after_class + if after_class then + after_class = t_uncover(splitter,t_u_cache,after_class) + end + for i=1,#rules do + local rule = rules[i] + local class = rule.class + local before = class.before + if before then + for i=1,#before do + before[i] = before_class[before[i]] or { } + end + rule.before = t_hashed(before,t_h_cache) + end + local current = class.current + local lookups = rule.lookups + if current then + for i=1,#current do + current[i] = current_class[current[i]] or { } + -- let's not be sparse + if lookups and not lookups[i] then + lookups[i] = "" -- (was: false) e.g. we can have two lookups and one replacement + end + -- end of fix + end + rule.current = t_hashed(current,t_h_cache) + end + local after = class.after + if after then + for i=1,#after do + after[i] = after_class[after[i]] or { } + end + rule.after = t_hashed(after,t_h_cache) + end + rule.class = nil + end + lookup.before_class = nil + lookup.current_class = nil + lookup.after_class = nil + lookup.format = "coverage" + elseif format == "coverage" then + for i=1,#rules do + local rule = rules[i] + local coverage = rule.coverage + if coverage then + local before = coverage.before + if before then + before = t_uncover(splitter,t_u_cache,reversed(before)) + rule.before = t_hashed(before,t_h_cache) + end + local current = coverage.current + if current then + current = t_uncover(splitter,t_u_cache,current) + -- let's not be sparse + local lookups = rule.lookups + if lookups then + for i=1,#current do + if not lookups[i] then + lookups[i] = "" -- fix sparse array + end + end + end + -- + rule.current = t_hashed(current,t_h_cache) + end + local after = coverage.after + if after then + after = t_uncover(splitter,t_u_cache,after) + rule.after = t_hashed(after,t_h_cache) + end + rule.coverage = nil + end + end + elseif format == "reversecoverage" then -- special case, single substitution only + for i=1,#rules do + local rule = rules[i] + local reversecoverage = rule.reversecoverage + if reversecoverage then + local before = reversecoverage.before + if before then + before = t_uncover(splitter,t_u_cache,reversed(before)) + rule.before = t_hashed(before,t_h_cache) + end + local current = reversecoverage.current + if current then + current = t_uncover(splitter,t_u_cache,current) + rule.current = t_hashed(current,t_h_cache) + end + local after = reversecoverage.after + if after then + after = t_uncover(splitter,t_u_cache,after) + rule.after = t_hashed(after,t_h_cache) + end + local replacements = reversecoverage.replacements + if replacements then + rule.replacements = r_uncover(splitter,r_u_cache,current,replacements) + end + rule.reversecoverage = nil + end + end + elseif format == "glyphs" then + -- I could store these more efficient (as not we use a nested tables for before, + -- after and current but this features happens so seldom that I don't bother + -- about it right now. + for i=1,#rules do + local rule = rules[i] + local glyphs = rule.glyphs + if glyphs then + local fore = glyphs.fore + if fore and fore ~= "" then + fore = s_uncover(splitter,s_u_cache,fore) + rule.before = s_hashed(fore,s_h_cache) + end + local back = glyphs.back + if back then + back = s_uncover(splitter,s_u_cache,back) + rule.after = s_hashed(back,s_h_cache) + end + local names = glyphs.names + if names then + names = s_uncover(splitter,s_u_cache,names) + rule.current = s_hashed(names,s_h_cache) + end + rule.glyphs = nil + end + end + end + end + end + end +end + +local function check_variants(unicode,the_variants,splitter,unicodes) + local variants = the_variants.variants + if variants then -- use splitter + local glyphs = lpegmatch(splitter,variants) + local done = { [unicode] = true } + local n = 0 + for i=1,#glyphs do + local g = glyphs[i] + if done[g] then + report_otf("skipping cyclic reference %U in math variant %U",g,unicode) + else + if n == 0 then + n = 1 + variants = { g } + else + n = n + 1 + variants[n] = g + end + done[g] = true + end + end + if n == 0 then + variants = nil + end + end + local parts = the_variants.parts + if parts then + local p = #parts + if p > 0 then + for i=1,p do + local pi = parts[i] + pi.glyph = unicodes[pi.component] or 0 + pi.component = nil + end + else + parts = nil + end + end + local italic_correction = the_variants.italic_correction + if italic_correction and italic_correction == 0 then + italic_correction = nil + end + return variants, parts, italic_correction +end + +actions["analyze math"] = function(data,filename,raw) + if raw.math then + data.metadata.math = raw.math + local unicodes = data.resources.unicodes + local splitter = data.helpers.tounicodetable + for unicode, description in next, data.descriptions do + local glyph = description.glyph + local mathkerns = glyph.mathkern -- singular + local horiz_variants = glyph.horiz_variants + local vert_variants = glyph.vert_variants + local top_accent = glyph.top_accent + if mathkerns or horiz_variants or vert_variants or top_accent then + local math = { } + if top_accent then + math.top_accent = top_accent + end + if mathkerns then + for k, v in next, mathkerns do + if not next(v) then + mathkerns[k] = nil + else + for k, v in next, v do + if v == 0 then + k[v] = nil -- height / kern can be zero + end + end + end + end + math.kerns = mathkerns + end + if horiz_variants then + math.horiz_variants, math.horiz_parts, math.horiz_italic_correction = check_variants(unicode,horiz_variants,splitter,unicodes) + end + if vert_variants then + math.vert_variants, math.vert_parts, math.vert_italic_correction = check_variants(unicode,vert_variants,splitter,unicodes) + end + local italic_correction = description.italic + if italic_correction and italic_correction ~= 0 then + math.italic_correction = italic_correction + end + description.math = math + end + end + end +end + +actions["reorganize glyph kerns"] = function(data,filename,raw) + local descriptions = data.descriptions + local resources = data.resources + local unicodes = resources.unicodes + for unicode, description in next, descriptions do + local kerns = description.glyph.kerns + if kerns then + local newkerns = { } + for k, kern in next, kerns do + local name = kern.char + local offset = kern.off + local lookup = kern.lookup + if name and offset and lookup then + local unicode = unicodes[name] + if unicode then + if type(lookup) == "table" then + for l=1,#lookup do + local lookup = lookup[l] + local lookupkerns = newkerns[lookup] + if lookupkerns then + lookupkerns[unicode] = offset + else + newkerns[lookup] = { [unicode] = offset } + end + end + else + local lookupkerns = newkerns[lookup] + if lookupkerns then + lookupkerns[unicode] = offset + else + newkerns[lookup] = { [unicode] = offset } + end + end + elseif trace_loading then + report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode) + end + end + end + description.kerns = newkerns + end + end +end + +actions["merge kern classes"] = function(data,filename,raw) + local gposlist = raw.gpos + if gposlist then + local descriptions = data.descriptions + local resources = data.resources + local unicodes = resources.unicodes + local splitter = data.helpers.tounicodetable + for gp=1,#gposlist do + local gpos = gposlist[gp] + local subtables = gpos.subtables + if subtables then + for s=1,#subtables do + local subtable = subtables[s] + local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes + if kernclass then -- the next one is quite slow + local split = { } -- saves time + for k=1,#kernclass do + local kcl = kernclass[k] + local firsts = kcl.firsts + local seconds = kcl.seconds + local offsets = kcl.offsets + local lookups = kcl.lookup -- singular + if type(lookups) ~= "table" then + lookups = { lookups } + end + -- if offsets[1] == nil then + -- offsets[1] = "" + -- end + -- we can check the max in the loop + -- local maxseconds = getn(seconds) + for n, s in next, firsts do + split[s] = split[s] or lpegmatch(splitter,s) + end + local maxseconds = 0 + for n, s in next, seconds do + if n > maxseconds then + maxseconds = n + end + split[s] = split[s] or lpegmatch(splitter,s) + end + for l=1,#lookups do + local lookup = lookups[l] + for fk=1,#firsts do -- maxfirsts ? + local fv = firsts[fk] + local splt = split[fv] + if splt then + local extrakerns = { } + local baseoffset = (fk-1) * maxseconds + for sk=2,maxseconds do -- will become 1 based in future luatex + local sv = seconds[sk] + -- for sk, sv in next, seconds do + local splt = split[sv] + if splt then -- redundant test + local offset = offsets[baseoffset + sk] + if offset then + for i=1,#splt do + extrakerns[splt[i]] = offset + end + end + end + end + for i=1,#splt do + local first_unicode = splt[i] + local description = descriptions[first_unicode] + if description then + local kerns = description.kerns + if not kerns then + kerns = { } -- unicode indexed ! + description.kerns = kerns + end + local lookupkerns = kerns[lookup] + if not lookupkerns then + lookupkerns = { } + kerns[lookup] = lookupkerns + end + for second_unicode, kern in next, extrakerns do + lookupkerns[second_unicode] = kern + end + elseif trace_loading then + report_otf("no glyph data for %U", first_unicode) + end + end + end + end + end + end + subtable.kernclass = { } + end + end + end + end + end +end + +actions["check glyphs"] = function(data,filename,raw) + for unicode, description in next, data.descriptions do + description.glyph = nil + end +end + +-- future versions will remove _ + +actions["check metadata"] = function(data,filename,raw) + local metadata = data.metadata + for _, k in next, mainfields do + if valid_fields[k] then + local v = raw[k] + if not metadata[k] then + metadata[k] = v + end + end + end + -- metadata.pfminfo = raw.pfminfo -- not already done? + local ttftables = metadata.ttf_tables + if ttftables then + for i=1,#ttftables do + ttftables[i].data = "deleted" + end + end +end + +actions["cleanup tables"] = function(data,filename,raw) + data.resources.indices = nil -- not needed + data.helpers = nil +end + +-- kern: ttf has a table with kerns +-- +-- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but +-- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of +-- unpredictable alternatively we could force an [1] if not set (maybe I will do that +-- anyway). + +-- we can share { } as it is never set + +--- ligatures have an extra specification.char entry that we don't use + +actions["reorganize glyph lookups"] = function(data,filename,raw) + local resources = data.resources + local unicodes = resources.unicodes + local descriptions = data.descriptions + local splitter = data.helpers.tounicodelist + + local lookuptypes = resources.lookuptypes + + for unicode, description in next, descriptions do + local lookups = description.glyph.lookups + if lookups then + for tag, lookuplist in next, lookups do + for l=1,#lookuplist do + local lookup = lookuplist[l] + local specification = lookup.specification + local lookuptype = lookup.type + local lt = lookuptypes[tag] + if not lt then + lookuptypes[tag] = lookuptype + elseif lt ~= lookuptype then + report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype) + end + if lookuptype == "ligature" then + lookuplist[l] = { lpegmatch(splitter,specification.components) } + elseif lookuptype == "alternate" then + lookuplist[l] = { lpegmatch(splitter,specification.components) } + elseif lookuptype == "substitution" then + lookuplist[l] = unicodes[specification.variant] + elseif lookuptype == "multiple" then + lookuplist[l] = { lpegmatch(splitter,specification.components) } + elseif lookuptype == "position" then + lookuplist[l] = { + specification.x or 0, + specification.y or 0, + specification.h or 0, + specification.v or 0 + } + elseif lookuptype == "pair" then + local one = specification.offsets[1] + local two = specification.offsets[2] + local paired = unicodes[specification.paired] + if one then + if two then + lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } } + else + lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } } + end + else + if two then + lookuplist[l] = { paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { } + else + lookuplist[l] = { paired } + end + end + end + end + end + local slookups, mlookups + for tag, lookuplist in next, lookups do + if #lookuplist == 1 then + if slookups then + slookups[tag] = lookuplist[1] + else + slookups = { [tag] = lookuplist[1] } + end + else + if mlookups then + mlookups[tag] = lookuplist + else + mlookups = { [tag] = lookuplist } + end + end + end + if slookups then + description.slookups = slookups + end + if mlookups then + description.mlookups = mlookups + end + end + end + +end + +actions["reorganize glyph anchors"] = function(data,filename,raw) -- when we replace inplace we safe entries + local descriptions = data.descriptions + for unicode, description in next, descriptions do + local anchors = description.glyph.anchors + if anchors then + for class, data in next, anchors do + if class == "baselig" then + for tag, specification in next, data do + for i=1,#specification do + local si = specification[i] + specification[i] = { si.x or 0, si.y or 0 } + end + end + else + for tag, specification in next, data do + data[tag] = { specification.x or 0, specification.y or 0 } + end + end + end + description.anchors = anchors + end + end +end + +-- modes: node, base, none + +function otf.setfeatures(tfmdata,features) + local okay = constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf) + if okay then + return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf) + else + return { } -- will become false + end +end + +-- the first version made a top/mid/not extensible table, now we just +-- pass on the variants data and deal with it in the tfm scaler (there +-- is no longer an extensible table anyway) +-- +-- we cannot share descriptions as virtual fonts might extend them (ok, +-- we could use a cache with a hash +-- +-- we already assing an empty tabel to characters as we can add for +-- instance protruding info and loop over characters; one is not supposed +-- to change descriptions and if one does so one should make a copy! + +local function copytotfm(data,cache_id) + if data then + local metadata = data.metadata + local resources = data.resources + local properties = derivetable(data.properties) + local descriptions = derivetable(data.descriptions) + local goodies = derivetable(data.goodies) + local characters = { } + local parameters = { } + local mathparameters = { } + -- + local pfminfo = metadata.pfminfo or { } + local resources = data.resources + local unicodes = resources.unicodes + -- local mode = data.mode or "base" + local spaceunits = 500 + local spacer = "space" + local designsize = metadata.designsize or metadata.design_size or 100 + local mathspecs = metadata.math + -- + if designsize == 0 then + designsize = 100 + end + if mathspecs then + for name, value in next, mathspecs do + mathparameters[name] = value + end + end + for unicode, _ in next, data.descriptions do -- use parent table + characters[unicode] = { } + end + if mathspecs then + -- we could move this to the scaler but not that much is saved + -- and this is cleaner + for unicode, character in next, characters do + local d = descriptions[unicode] + local m = d.math + if m then + -- watch out: luatex uses horiz_variants for the parts + local variants = m.horiz_variants + local parts = m.horiz_parts + -- local done = { [unicode] = true } + if variants then + local c = character + for i=1,#variants do + local un = variants[i] + -- if done[un] then + -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode) + -- else + c.next = un + c = characters[un] + -- done[un] = true + -- end + end -- c is now last in chain + c.horiz_variants = parts + elseif parts then + character.horiz_variants = parts + end + local variants = m.vert_variants + local parts = m.vert_parts + -- local done = { [unicode] = true } + if variants then + local c = character + for i=1,#variants do + local un = variants[i] + -- if done[un] then + -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode) + -- else + c.next = un + c = characters[un] + -- done[un] = true + -- end + end -- c is now last in chain + c.vert_variants = parts + elseif parts then + character.vert_variants = parts + end + local italic_correction = m.vert_italic_correction + if italic_correction then + character.vert_italic_correction = italic_correction -- was c. + end + local top_accent = m.top_accent + if top_accent then + character.top_accent = top_accent + end + local kerns = m.kerns + if kerns then + character.mathkerns = kerns + end + end + end + end + -- end math + local monospaced = metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced") + local charwidth = pfminfo.avgwidth -- or unset + local italicangle = metadata.italicangle + local charxheight = pfminfo.os2_xheight and pfminfo.os2_xheight > 0 and pfminfo.os2_xheight + properties.monospaced = monospaced + parameters.italicangle = italicangle + parameters.charwidth = charwidth + parameters.charxheight = charxheight + -- + local space = 0x0020 -- unicodes['space'], unicodes['emdash'] + local emdash = 0x2014 -- unicodes['space'], unicodes['emdash'] + if monospaced then + if descriptions[space] then + spaceunits, spacer = descriptions[space].width, "space" + end + if not spaceunits and descriptions[emdash] then + spaceunits, spacer = descriptions[emdash].width, "emdash" + end + if not spaceunits and charwidth then + spaceunits, spacer = charwidth, "charwidth" + end + else + if descriptions[space] then + spaceunits, spacer = descriptions[space].width, "space" + end + if not spaceunits and descriptions[emdash] then + spaceunits, spacer = descriptions[emdash].width/2, "emdash/2" + end + if not spaceunits and charwidth then + spaceunits, spacer = charwidth, "charwidth" + end + end + spaceunits = tonumber(spaceunits) or 500 -- brrr + -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?) + local filename = constructors.checkedfilename(resources) + local fontname = metadata.fontname + local fullname = metadata.fullname or fontname + local units = metadata.units_per_em or 1000 + -- + if units == 0 then -- catch bugs in fonts + units = 1000 + metadata.units_per_em = 1000 + end + -- + parameters.slant = 0 + parameters.space = spaceunits -- 3.333 (cmr10) + parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10) + parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10) + parameters.x_height = 2*units/5 -- 400 + parameters.quad = units -- 1000 + if spaceunits < 2*units/5 then + -- todo: warning + end + if italicangle then + parameters.italicangle = italicangle + parameters.italicfactor = math.cos(math.rad(90+italicangle)) + parameters.slant = - math.round(math.tan(italicangle*math.pi/180)) + end + if monospaced then + parameters.space_stretch = 0 + parameters.space_shrink = 0 + elseif syncspace then -- + parameters.space_stretch = spaceunits/2 + parameters.space_shrink = spaceunits/3 + end + parameters.extra_space = parameters.space_shrink -- 1.111 (cmr10) + if charxheight then + parameters.x_height = charxheight + else + local x = 0x78 -- unicodes['x'] + if x then + local x = descriptions[x] + if x then + parameters.x_height = x.height + end + end + end + -- + parameters.designsize = (designsize/10)*65536 + parameters.ascender = abs(metadata.ascent or 0) + parameters.descender = abs(metadata.descent or 0) + parameters.units = units + -- + properties.space = spacer + properties.encodingbytes = 2 + properties.format = data.format or fonts.formats[filename] or "opentype" + properties.noglyphnames = true + properties.filename = filename + properties.fontname = fontname + properties.fullname = fullname + properties.psname = fontname or fullname + properties.name = filename or fullname + -- + -- properties.name = specification.name + -- properties.sub = specification.sub + return { + characters = characters, + descriptions = descriptions, + parameters = parameters, + mathparameters = mathparameters, + resources = resources, + properties = properties, + goodies = goodies, + } + end +end + +local function otftotfm(specification) + local cache_id = specification.hash + local tfmdata = containers.read(constructors.cache,cache_id) + if not tfmdata then + local name = specification.name + local sub = specification.sub + local filename = specification.filename + local format = specification.format + local features = specification.features.normal + local rawdata = otf.load(filename,format,sub,features and features.featurefile) + if rawdata and next(rawdata) then + rawdata.lookuphash = { } + tfmdata = copytotfm(rawdata,cache_id) + if tfmdata and next(tfmdata) then + -- at this moment no characters are assigned yet, only empty slots + local features = constructors.checkedfeatures("otf",features) + local shared = tfmdata.shared + if not shared then + shared = { } + tfmdata.shared = shared + end + shared.rawdata = rawdata + -- shared.features = features -- default + shared.dynamics = { } + -- shared.processes = { } + tfmdata.changed = { } + shared.features = features + shared.processes = otf.setfeatures(tfmdata,features) + end + end + containers.write(constructors.cache,cache_id,tfmdata) + end + return tfmdata +end + +local function read_from_otf(specification) + local tfmdata = otftotfm(specification) + if tfmdata then + -- this late ? .. needs checking + tfmdata.properties.name = specification.name + tfmdata.properties.sub = specification.sub + -- + tfmdata = constructors.scale(tfmdata,specification) + local allfeatures = tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf) + constructors.setname(tfmdata,specification) -- only otf? + fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification) + end + return tfmdata +end + +local function checkmathsize(tfmdata,mathsize) + local mathdata = tfmdata.shared.rawdata.metadata.math + local mathsize = tonumber(mathsize) + if mathdata then -- we cannot use mathparameters as luatex will complain + local parameters = tfmdata.parameters + parameters.scriptpercentage = mathdata.ScriptPercentScaleDown + parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown + parameters.mathsize = mathsize + end +end + +registerotffeature { + name = "mathsize", + description = "apply mathsize specified in the font", + initializers = { + base = checkmathsize, + node = checkmathsize, + } +} + +-- helpers + +function otf.collectlookups(rawdata,kind,script,language) + local sequences = rawdata.resources.sequences + if sequences then + local featuremap, featurelist = { }, { } + for s=1,#sequences do + local sequence = sequences[s] + local features = sequence.features + features = features and features[kind] + features = features and (features[script] or features[default] or features[wildcard]) + features = features and (features[language] or features[default] or features[wildcard]) + if features then + local subtables = sequence.subtables + if subtables then + for s=1,#subtables do + local ss = subtables[s] + if not featuremap[s] then + featuremap[ss] = true + featurelist[#featurelist+1] = ss + end + end + end + end + end + if #featurelist > 0 then + return featuremap, featurelist + end + end + return nil, nil +end + +-- readers + +local function check_otf(forced,specification,suffix,what) + local name = specification.name + if forced then + name = file.addsuffix(name,suffix,true) + end + local fullname = findbinfile(name,suffix) or "" + if fullname == "" then + fullname = fonts.names.getfilename(name,suffix) or "" + end + if fullname ~= "" then + specification.filename = fullname + specification.format = what + return read_from_otf(specification) + end +end + +local function opentypereader(specification,suffix,what) + local forced = specification.forced or "" + if forced == "otf" then + return check_otf(true,specification,forced,"opentype") + elseif forced == "ttf" or forced == "ttc" or forced == "dfont" then + return check_otf(true,specification,forced,"truetype") + else + return check_otf(false,specification,suffix,what) + end +end + +readers.opentype = opentypereader + +local formats = fonts.formats + +formats.otf = "opentype" +formats.ttf = "truetype" +formats.ttc = "truetype" +formats.dfont = "truetype" + +function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end +function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end +function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end +function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end + +-- this will be overloaded + +function otf.scriptandlanguage(tfmdata,attr) + local properties = tfmdata.properties + return properties.script or "dflt", properties.language or "dflt" +end diff --git a/tex/context/base/font-oth.lua b/tex/context/base/font-oth.lua index 59dca31d9..5e2e567da 100644 --- a/tex/context/base/font-oth.lua +++ b/tex/context/base/font-oth.lua @@ -1,51 +1,51 @@ -if not modules then modules = { } end modules ['font-oth'] = { - version = 1.001, - comment = "companion to font-oth.lua (helpers)", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local fonts = fonts -local otf = fonts.handlers.otf - --- todo: use nodemode data is available - -function otf.getalternate(tfmdata,k,kind,value) -- just initialize nodemode and use that (larger mem print) - if value then - local description = tfmdata.descriptions[k] - if description then - local slookups = description.slookups -- we assume only slookups (we can always extend) - if slookups then - local shared = tfmdata.shared - local rawdata = shared and shared.rawdata - if rawdata then - local lookuptypes = rawdata.resources.lookuptypes - if lookuptypes then - local properties = tfmdata.properties - -- we could cache these - local validlookups, lookuplist = otf.collectlookups(rawdata,kind,properties.script,properties.language) - if validlookups then - local choice = tonumber(value) or 1 -- no random here (yet) - for l=1,#lookuplist do - local lookup = lookuplist[l] - local found = slookups[lookup] - if found then - local lookuptype = lookuptypes[lookup] - if lookuptype == "substitution" then - return found - elseif lookuptype == "alternate" then - return found[choice] or found[#found] - else - -- ignore - end - end - end - end - end - end - end - end - end - return k -end +if not modules then modules = { } end modules ['font-oth'] = { + version = 1.001, + comment = "companion to font-oth.lua (helpers)", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local fonts = fonts +local otf = fonts.handlers.otf + +-- todo: use nodemode data is available + +function otf.getalternate(tfmdata,k,kind,value) -- just initialize nodemode and use that (larger mem print) + if value then + local description = tfmdata.descriptions[k] + if description then + local slookups = description.slookups -- we assume only slookups (we can always extend) + if slookups then + local shared = tfmdata.shared + local rawdata = shared and shared.rawdata + if rawdata then + local lookuptypes = rawdata.resources.lookuptypes + if lookuptypes then + local properties = tfmdata.properties + -- we could cache these + local validlookups, lookuplist = otf.collectlookups(rawdata,kind,properties.script,properties.language) + if validlookups then + local choice = tonumber(value) or 1 -- no random here (yet) + for l=1,#lookuplist do + local lookup = lookuplist[l] + local found = slookups[lookup] + if found then + local lookuptype = lookuptypes[lookup] + if lookuptype == "substitution" then + return found + elseif lookuptype == "alternate" then + return found[choice] or found[#found] + else + -- ignore + end + end + end + end + end + end + end + end + end + return k +end diff --git a/tex/context/base/font-oti.lua b/tex/context/base/font-oti.lua index 06c2a42fa..e33b57a6f 100644 --- a/tex/context/base/font-oti.lua +++ b/tex/context/base/font-oti.lua @@ -1,91 +1,91 @@ -if not modules then modules = { } end modules ['font-oti'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local lower = string.lower - -local fonts = fonts -local constructors = fonts.constructors - -local otf = constructors.newhandler("otf") -local otffeatures = constructors.newfeatures("otf") -local otftables = otf.tables -local registerotffeature = otffeatures.register - -local allocate = utilities.storage.allocate - -registerotffeature { - name = "features", - description = "initialization of feature handler", - default = true, -} - --- these are later hooked into node and base initializaters - -local function setmode(tfmdata,value) - if value then - tfmdata.properties.mode = lower(value) - end -end - -local function setlanguage(tfmdata,value) - if value then - local cleanvalue = lower(value) - local languages = otftables and otftables.languages - local properties = tfmdata.properties - if not languages then - properties.language = cleanvalue - elseif languages[value] then - properties.language = cleanvalue - else - properties.language = "dflt" - end - end -end - -local function setscript(tfmdata,value) - if value then - local cleanvalue = lower(value) - local scripts = otftables and otftables.scripts - local properties = tfmdata.properties - if not scripts then - properties.script = cleanvalue - elseif scripts[value] then - properties.script = cleanvalue - else - properties.script = "dflt" - end - end -end - -registerotffeature { - name = "mode", - description = "mode", - initializers = { - base = setmode, - node = setmode, - } -} - -registerotffeature { - name = "language", - description = "language", - initializers = { - base = setlanguage, - node = setlanguage, - } -} - -registerotffeature { - name = "script", - description = "script", - initializers = { - base = setscript, - node = setscript, - } -} - +if not modules then modules = { } end modules ['font-oti'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local lower = string.lower + +local fonts = fonts +local constructors = fonts.constructors + +local otf = constructors.newhandler("otf") +local otffeatures = constructors.newfeatures("otf") +local otftables = otf.tables +local registerotffeature = otffeatures.register + +local allocate = utilities.storage.allocate + +registerotffeature { + name = "features", + description = "initialization of feature handler", + default = true, +} + +-- these are later hooked into node and base initializaters + +local function setmode(tfmdata,value) + if value then + tfmdata.properties.mode = lower(value) + end +end + +local function setlanguage(tfmdata,value) + if value then + local cleanvalue = lower(value) + local languages = otftables and otftables.languages + local properties = tfmdata.properties + if not languages then + properties.language = cleanvalue + elseif languages[value] then + properties.language = cleanvalue + else + properties.language = "dflt" + end + end +end + +local function setscript(tfmdata,value) + if value then + local cleanvalue = lower(value) + local scripts = otftables and otftables.scripts + local properties = tfmdata.properties + if not scripts then + properties.script = cleanvalue + elseif scripts[value] then + properties.script = cleanvalue + else + properties.script = "dflt" + end + end +end + +registerotffeature { + name = "mode", + description = "mode", + initializers = { + base = setmode, + node = setmode, + } +} + +registerotffeature { + name = "language", + description = "language", + initializers = { + base = setlanguage, + node = setlanguage, + } +} + +registerotffeature { + name = "script", + description = "script", + initializers = { + base = setscript, + node = setscript, + } +} + diff --git a/tex/context/base/font-otp.lua b/tex/context/base/font-otp.lua index 217bb7535..f0c2edd86 100644 --- a/tex/context/base/font-otp.lua +++ b/tex/context/base/font-otp.lua @@ -1,877 +1,877 @@ -if not modules then modules = { } end modules ['font-otp'] = { - version = 1.001, - comment = "companion to font-otf.lua (packing)", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: pack math (but not that much to share) --- --- pitfall 5.2: hashed tables can suddenly become indexed with nil slots - -local next, type = next, type -local sort, concat = table.sort, table.concat -local sortedhash = table.sortedhash - -local trace_packing = false trackers.register("otf.packing", function(v) trace_packing = v end) -local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end) - -local report_otf = logs.reporter("fonts","otf loading") - --- also used in other scripts so we need to check some tables: - -fonts = fonts or { } - -local handlers = fonts.handlers or { } -fonts.handlers = handlers - -local otf = handlers.otf or { } -handlers.otf = otf - -local enhancers = otf.enhancers or { } -otf.enhancers = enhancers - -local glists = otf.glists or { "gsub", "gpos" } -otf.glists = glists - -local criterium = 1 -local threshold = 0 - -local function tabstr_normal(t) - local s = { } - local n = 0 - for k, v in next, t do - n = n + 1 - if type(v) == "table" then - s[n] = k .. ">" .. tabstr_normal(v) - elseif v == true then - s[n] = k .. "+" -- "=true" - elseif v then - s[n] = k .. "=" .. v - else - s[n] = k .. "-" -- "=false" - end - end - if n == 0 then - return "" - elseif n == 1 then - return s[1] - else - sort(s) -- costly but needed (occasional wrong hit otherwise) - return concat(s,",") - end -end - -local function tabstr_flat(t) - local s = { } - local n = 0 - for k, v in next, t do - n = n + 1 - s[n] = k .. "=" .. v - end - if n == 0 then - return "" - elseif n == 1 then - return s[1] - else - sort(s) -- costly but needed (occasional wrong hit otherwise) - return concat(s,",") - end -end - -local function tabstr_mixed(t) -- indexed - local s = { } - local n = #t - if n == 0 then - return "" - elseif n == 1 then - local k = t[1] - if k == true then - return "++" -- we need to distinguish from "true" - elseif k == false then - return "--" -- we need to distinguish from "false" - else - return tostring(k) -- number or string - end - else - for i=1,n do - local k = t[i] - if k == true then - s[i] = "++" -- we need to distinguish from "true" - elseif k == false then - s[i] = "--" -- we need to distinguish from "false" - else - s[i] = k -- number or string - end - end - return concat(s,",") - end -end - -local function tabstr_boolean(t) - local s = { } - local n = 0 - for k, v in next, t do - n = n + 1 - if v then - s[n] = k .. "+" - else - s[n] = k .. "-" - end - end - if n == 0 then - return "" - elseif n == 1 then - return s[1] - else - sort(s) -- costly but needed (occasional wrong hit otherwise) - return concat(s,",") - end -end - --- tabstr_boolean_x = tabstr_boolean - --- tabstr_boolean = function(t) --- local a = tabstr_normal(t) --- local b = tabstr_boolean_x(t) --- print(a) --- print(b) --- return b --- end - -local function packdata(data) - if data then - -- stripdata(data) - local h, t, c = { }, { }, { } - local hh, tt, cc = { }, { }, { } - local nt, ntt = 0, 0 - local function pack_normal(v) - local tag = tabstr_normal(v) - local ht = h[tag] - if ht then - c[ht] = c[ht] + 1 - return ht - else - nt = nt + 1 - t[nt] = v - h[tag] = nt - c[nt] = 1 - return nt - end - end - local function pack_flat(v) - local tag = tabstr_flat(v) - local ht = h[tag] - if ht then - c[ht] = c[ht] + 1 - return ht - else - nt = nt + 1 - t[nt] = v - h[tag] = nt - c[nt] = 1 - return nt - end - end - local function pack_boolean(v) - local tag = tabstr_boolean(v) - local ht = h[tag] - if ht then - c[ht] = c[ht] + 1 - return ht - else - nt = nt + 1 - t[nt] = v - h[tag] = nt - c[nt] = 1 - return nt - end - end - local function pack_indexed(v) - local tag = concat(v," ") - local ht = h[tag] - if ht then - c[ht] = c[ht] + 1 - return ht - else - nt = nt + 1 - t[nt] = v - h[tag] = nt - c[nt] = 1 - return nt - end - end - local function pack_mixed(v) - local tag = tabstr_mixed(v) - local ht = h[tag] - if ht then - c[ht] = c[ht] + 1 - return ht - else - nt = nt + 1 - t[nt] = v - h[tag] = nt - c[nt] = 1 - return nt - end - end - local function pack_final(v) - -- v == number - if c[v] <= criterium then - return t[v] - else - -- compact hash - local hv = hh[v] - if hv then - return hv - else - ntt = ntt + 1 - tt[ntt] = t[v] - hh[v] = ntt - cc[ntt] = c[v] - return ntt - end - end - end - local function success(stage,pass) - if nt == 0 then - if trace_loading or trace_packing then - report_otf("pack quality: nothing to pack") - end - return false - elseif nt >= threshold then - local one, two, rest = 0, 0, 0 - if pass == 1 then - for k,v in next, c do - if v == 1 then - one = one + 1 - elseif v == 2 then - two = two + 1 - else - rest = rest + 1 - end - end - else - for k,v in next, cc do - if v > 20 then - rest = rest + 1 - elseif v > 10 then - two = two + 1 - else - one = one + 1 - end - end - data.tables = tt - end - if trace_loading or trace_packing then - report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)", stage, pass, one+two+rest, one, two, rest, criterium) - end - return true - else - if trace_loading or trace_packing then - report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)", stage, pass, nt, threshold) - end - return false - end - end - local function packers(pass) - if pass == 1 then - return pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed - else - return pack_final, pack_final, pack_final, pack_final, pack_final - end - end - local resources = data.resources - local lookuptypes = resources.lookuptypes - for pass=1,2 do - if trace_packing then - report_otf("start packing: stage 1, pass %s",pass) - end - local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass) - for unicode, description in next, data.descriptions do - local boundingbox = description.boundingbox - if boundingbox then - description.boundingbox = pack_indexed(boundingbox) - end - local slookups = description.slookups - if slookups then - for tag, slookup in next, slookups do - local what = lookuptypes[tag] - if what == "pair" then - local t = slookup[2] if t then slookup[2] = pack_indexed(t) end - local t = slookup[3] if t then slookup[3] = pack_indexed(t) end - elseif what ~= "substitution" then - slookups[tag] = pack_indexed(slookup) -- true is new - end - end - end - local mlookups = description.mlookups - if mlookups then - for tag, mlookup in next, mlookups do - local what = lookuptypes[tag] - if what == "pair" then - for i=1,#mlookup do - local lookup = mlookup[i] - local t = lookup[2] if t then lookup[2] = pack_indexed(t) end - local t = lookup[3] if t then lookup[3] = pack_indexed(t) end - end - elseif what ~= "substitution" then - for i=1,#mlookup do - mlookup[i] = pack_indexed(mlookup[i]) -- true is new - end - end - end - end - local kerns = description.kerns - if kerns then - for tag, kern in next, kerns do - kerns[tag] = pack_flat(kern) - end - end - local math = description.math - if math then - local kerns = math.kerns - if kerns then - for tag, kern in next, kerns do - kerns[tag] = pack_normal(kern) - end - end - end - local anchors = description.anchors - if anchors then - for what, anchor in next, anchors do - if what == "baselig" then - for _, a in next, anchor do - for k=1,#a do - a[k] = pack_indexed(a[k]) - end - end - else - for k, v in next, anchor do - anchor[k] = pack_indexed(v) - end - end - end - end - local altuni = description.altuni - if altuni then - for i=1,#altuni do - altuni[i] = pack_flat(altuni[i]) - end - end - end - local lookups = data.lookups - if lookups then - for _, lookup in next, lookups do - local rules = lookup.rules - if rules then - for i=1,#rules do - local rule = rules[i] - local r = rule.before if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end - local r = rule.after if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end - local r = rule.current if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end - local r = rule.replacements if r then rule.replacements = pack_flat (r) end -- can have holes - local r = rule.lookups if r then rule.lookups = pack_indexed(r) end -- can have "" - -- local r = rule.lookups if r then rule.lookups = pack_flat(r) end -- can have holes (already taken care of some cases) - end - end - end - end - local anchor_to_lookup = resources.anchor_to_lookup - if anchor_to_lookup then - for anchor, lookup in next, anchor_to_lookup do - anchor_to_lookup[anchor] = pack_normal(lookup) - end - end - local lookup_to_anchor = resources.lookup_to_anchor - if lookup_to_anchor then - for lookup, anchor in next, lookup_to_anchor do - lookup_to_anchor[lookup] = pack_normal(anchor) - end - end - local sequences = resources.sequences - if sequences then - for feature, sequence in next, sequences do - local flags = sequence.flags - if flags then - sequence.flags = pack_normal(flags) - end - local subtables = sequence.subtables - if subtables then - sequence.subtables = pack_normal(subtables) - end - local features = sequence.features - if features then - for script, feature in next, features do - features[script] = pack_normal(feature) - end - end - end - end - local lookups = resources.lookups - if lookups then - for name, lookup in next, lookups do - local flags = lookup.flags - if flags then - lookup.flags = pack_normal(flags) - end - local subtables = lookup.subtables - if subtables then - lookup.subtables = pack_normal(subtables) - end - end - end - local features = resources.features - if features then - for _, what in next, glists do - local list = features[what] - if list then - for feature, spec in next, list do - list[feature] = pack_normal(spec) - end - end - end - end - if not success(1,pass) then - return - end - end - if nt > 0 then - for pass=1,2 do - if trace_packing then - report_otf("start packing: stage 2, pass %s",pass) - end - local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass) - for unicode, description in next, data.descriptions do - local kerns = description.kerns - if kerns then - description.kerns = pack_normal(kerns) - end - local math = description.math - if math then - local kerns = math.kerns - if kerns then - math.kerns = pack_normal(kerns) - end - end - local anchors = description.anchors - if anchors then - description.anchors = pack_normal(anchors) - end - local mlookups = description.mlookups - if mlookups then - for tag, mlookup in next, mlookups do - mlookups[tag] = pack_normal(mlookup) - end - end - local altuni = description.altuni - if altuni then - description.altuni = pack_normal(altuni) - end - end - local lookups = data.lookups - if lookups then - for _, lookup in next, lookups do - local rules = lookup.rules - if rules then - for i=1,#rules do -- was next loop - local rule = rules[i] - local r = rule.before if r then rule.before = pack_normal(r) end - local r = rule.after if r then rule.after = pack_normal(r) end - local r = rule.current if r then rule.current = pack_normal(r) end - end - end - end - end - local sequences = resources.sequences - if sequences then - for feature, sequence in next, sequences do - sequence.features = pack_normal(sequence.features) - end - end - if not success(2,pass) then - -- return - end - end - - for pass=1,2 do - local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass) - for unicode, description in next, data.descriptions do - local slookups = description.slookups - if slookups then - description.slookups = pack_normal(slookups) - end - local mlookups = description.mlookups - if mlookups then - description.mlookups = pack_normal(mlookups) - end - end - end - - end - end -end - -local unpacked_mt = { - __index = - function(t,k) - t[k] = false - return k -- next time true - end -} - -local function unpackdata(data) - if data then - local tables = data.tables - if tables then - local resources = data.resources - local lookuptypes = resources.lookuptypes - local unpacked = { } - setmetatable(unpacked,unpacked_mt) - for unicode, description in next, data.descriptions do - local tv = tables[description.boundingbox] - if tv then - description.boundingbox = tv - end - local slookups = description.slookups - if slookups then - local tv = tables[slookups] - if tv then - description.slookups = tv - slookups = unpacked[tv] - end - if slookups then - for tag, lookup in next, slookups do - local what = lookuptypes[tag] - if what == "pair" then - local tv = tables[lookup[2]] - if tv then - lookup[2] = tv - end - local tv = tables[lookup[3]] - if tv then - lookup[3] = tv - end - elseif what ~= "substitution" then - local tv = tables[lookup] - if tv then - slookups[tag] = tv - end - end - end - end - end - local mlookups = description.mlookups - if mlookups then - local tv = tables[mlookups] - if tv then - description.mlookups = tv - mlookups = unpacked[tv] - end - if mlookups then - for tag, list in next, mlookups do - local tv = tables[list] - if tv then - mlookups[tag] = tv - list = unpacked[tv] - end - if list then - local what = lookuptypes[tag] - if what == "pair" then - for i=1,#list do - local lookup = list[i] - local tv = tables[lookup[2]] - if tv then - lookup[2] = tv - end - local tv = tables[lookup[3]] - if tv then - lookup[3] = tv - end - end - elseif what ~= "substitution" then - for i=1,#list do - local tv = tables[list[i]] - if tv then - list[i] = tv - end - end - end - end - end - end - end - local kerns = description.kerns - if kerns then - local tm = tables[kerns] - if tm then - description.kerns = tm - kerns = unpacked[tm] - end - if kerns then - for k, kern in next, kerns do - local tv = tables[kern] - if tv then - kerns[k] = tv - end - end - end - end - local math = description.math - if math then - local kerns = math.kerns - if kerns then - local tm = tables[kerns] - if tm then - math.kerns = tm - kerns = unpacked[tm] - end - if kerns then - for k, kern in next, kerns do - local tv = tables[kern] - if tv then - kerns[k] = tv - end - end - end - end - end - local anchors = description.anchors - if anchors then - local ta = tables[anchors] - if ta then - description.anchors = ta - anchors = unpacked[ta] - end - if anchors then - for tag, anchor in next, anchors do - if tag == "baselig" then - for _, list in next, anchor do - for i=1,#list do - local tv = tables[list[i]] - if tv then - list[i] = tv - end - end - end - else - for a, data in next, anchor do - local tv = tables[data] - if tv then - anchor[a] = tv - end - end - end - end - end - end - local altuni = description.altuni - if altuni then - local altuni = tables[altuni] - if altuni then - description.altuni = altuni - for i=1,#altuni do - local tv = tables[altuni[i]] - if tv then - altuni[i] = tv - end - end - end - end - end - local lookups = data.lookups - if lookups then - for _, lookup in next, lookups do - local rules = lookup.rules - if rules then - for i=1,#rules do -- was next loop - local rule = rules[i] - local before = rule.before - if before then - local tv = tables[before] - if tv then - rule.before = tv - before = unpacked[tv] - end - if before then - for i=1,#before do - local tv = tables[before[i]] - if tv then - before[i] = tv - end - end - end - end - local after = rule.after - if after then - local tv = tables[after] - if tv then - rule.after = tv - after = unpacked[tv] - end - if after then - for i=1,#after do - local tv = tables[after[i]] - if tv then - after[i] = tv - end - end - end - end - local current = rule.current - if current then - local tv = tables[current] - if tv then - rule.current = tv - current = unpacked[tv] - end - if current then - for i=1,#current do - local tv = tables[current[i]] - if tv then - current[i] = tv - end - end - end - end - local replacements = rule.replacements - if replacements then - local tv = tables[replacements] - if tv then - rule.replacements = tv - end - end - local fore = rule.fore - if fore then - local tv = tables[fore] - if tv then - rule.fore = tv - end - end - local back = rule.back - if back then - local tv = tables[back] - if tv then - rule.back = tv - end - end - local names = rule.names - if names then - local tv = tables[names] - if tv then - rule.names = tv - end - end - local lookups = rule.lookups - if lookups then - local tv = tables[lookups] - if tv then - rule.lookups = tv - end - end - end - end - end - end - local anchor_to_lookup = resources.anchor_to_lookup - if anchor_to_lookup then - for anchor, lookup in next, anchor_to_lookup do - local tv = tables[lookup] - if tv then - anchor_to_lookup[anchor] = tv - end - end - end - local lookup_to_anchor = resources.lookup_to_anchor - if lookup_to_anchor then - for lookup, anchor in next, lookup_to_anchor do - local tv = tables[anchor] - if tv then - lookup_to_anchor[lookup] = tv - end - end - end - local ls = resources.sequences - if ls then - for _, feature in next, ls do - local flags = feature.flags - if flags then - local tv = tables[flags] - if tv then - feature.flags = tv - end - end - local subtables = feature.subtables - if subtables then - local tv = tables[subtables] - if tv then - feature.subtables = tv - end - end - local features = feature.features - if features then - local tv = tables[features] - if tv then - feature.features = tv - features = unpacked[tv] - end - if features then - for script, data in next, features do - local tv = tables[data] - if tv then - features[script] = tv - end - end - end - end - end - end - local lookups = resources.lookups - if lookups then - for _, lookup in next, lookups do - local flags = lookup.flags - if flags then - local tv = tables[flags] - if tv then - lookup.flags = tv - end - end - local subtables = lookup.subtables - if subtables then - local tv = tables[subtables] - if tv then - lookup.subtables = tv - end - end - end - end - local features = resources.features - if features then - for _, what in next, glists do - local feature = features[what] - if feature then - for tag, spec in next, feature do - local tv = tables[spec] - if tv then - feature[tag] = tv - end - end - end - end - end - data.tables = nil - end - end -end - -if otf.enhancers.register then - - otf.enhancers.register( "pack", packdata) - otf.enhancers.register("unpack",unpackdata) - --- todo: directive - -end - -otf.enhancers.unpack = unpackdata -- used elsewhere +if not modules then modules = { } end modules ['font-otp'] = { + version = 1.001, + comment = "companion to font-otf.lua (packing)", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: pack math (but not that much to share) +-- +-- pitfall 5.2: hashed tables can suddenly become indexed with nil slots + +local next, type = next, type +local sort, concat = table.sort, table.concat +local sortedhash = table.sortedhash + +local trace_packing = false trackers.register("otf.packing", function(v) trace_packing = v end) +local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end) + +local report_otf = logs.reporter("fonts","otf loading") + +-- also used in other scripts so we need to check some tables: + +fonts = fonts or { } + +local handlers = fonts.handlers or { } +fonts.handlers = handlers + +local otf = handlers.otf or { } +handlers.otf = otf + +local enhancers = otf.enhancers or { } +otf.enhancers = enhancers + +local glists = otf.glists or { "gsub", "gpos" } +otf.glists = glists + +local criterium = 1 +local threshold = 0 + +local function tabstr_normal(t) + local s = { } + local n = 0 + for k, v in next, t do + n = n + 1 + if type(v) == "table" then + s[n] = k .. ">" .. tabstr_normal(v) + elseif v == true then + s[n] = k .. "+" -- "=true" + elseif v then + s[n] = k .. "=" .. v + else + s[n] = k .. "-" -- "=false" + end + end + if n == 0 then + return "" + elseif n == 1 then + return s[1] + else + sort(s) -- costly but needed (occasional wrong hit otherwise) + return concat(s,",") + end +end + +local function tabstr_flat(t) + local s = { } + local n = 0 + for k, v in next, t do + n = n + 1 + s[n] = k .. "=" .. v + end + if n == 0 then + return "" + elseif n == 1 then + return s[1] + else + sort(s) -- costly but needed (occasional wrong hit otherwise) + return concat(s,",") + end +end + +local function tabstr_mixed(t) -- indexed + local s = { } + local n = #t + if n == 0 then + return "" + elseif n == 1 then + local k = t[1] + if k == true then + return "++" -- we need to distinguish from "true" + elseif k == false then + return "--" -- we need to distinguish from "false" + else + return tostring(k) -- number or string + end + else + for i=1,n do + local k = t[i] + if k == true then + s[i] = "++" -- we need to distinguish from "true" + elseif k == false then + s[i] = "--" -- we need to distinguish from "false" + else + s[i] = k -- number or string + end + end + return concat(s,",") + end +end + +local function tabstr_boolean(t) + local s = { } + local n = 0 + for k, v in next, t do + n = n + 1 + if v then + s[n] = k .. "+" + else + s[n] = k .. "-" + end + end + if n == 0 then + return "" + elseif n == 1 then + return s[1] + else + sort(s) -- costly but needed (occasional wrong hit otherwise) + return concat(s,",") + end +end + +-- tabstr_boolean_x = tabstr_boolean + +-- tabstr_boolean = function(t) +-- local a = tabstr_normal(t) +-- local b = tabstr_boolean_x(t) +-- print(a) +-- print(b) +-- return b +-- end + +local function packdata(data) + if data then + -- stripdata(data) + local h, t, c = { }, { }, { } + local hh, tt, cc = { }, { }, { } + local nt, ntt = 0, 0 + local function pack_normal(v) + local tag = tabstr_normal(v) + local ht = h[tag] + if ht then + c[ht] = c[ht] + 1 + return ht + else + nt = nt + 1 + t[nt] = v + h[tag] = nt + c[nt] = 1 + return nt + end + end + local function pack_flat(v) + local tag = tabstr_flat(v) + local ht = h[tag] + if ht then + c[ht] = c[ht] + 1 + return ht + else + nt = nt + 1 + t[nt] = v + h[tag] = nt + c[nt] = 1 + return nt + end + end + local function pack_boolean(v) + local tag = tabstr_boolean(v) + local ht = h[tag] + if ht then + c[ht] = c[ht] + 1 + return ht + else + nt = nt + 1 + t[nt] = v + h[tag] = nt + c[nt] = 1 + return nt + end + end + local function pack_indexed(v) + local tag = concat(v," ") + local ht = h[tag] + if ht then + c[ht] = c[ht] + 1 + return ht + else + nt = nt + 1 + t[nt] = v + h[tag] = nt + c[nt] = 1 + return nt + end + end + local function pack_mixed(v) + local tag = tabstr_mixed(v) + local ht = h[tag] + if ht then + c[ht] = c[ht] + 1 + return ht + else + nt = nt + 1 + t[nt] = v + h[tag] = nt + c[nt] = 1 + return nt + end + end + local function pack_final(v) + -- v == number + if c[v] <= criterium then + return t[v] + else + -- compact hash + local hv = hh[v] + if hv then + return hv + else + ntt = ntt + 1 + tt[ntt] = t[v] + hh[v] = ntt + cc[ntt] = c[v] + return ntt + end + end + end + local function success(stage,pass) + if nt == 0 then + if trace_loading or trace_packing then + report_otf("pack quality: nothing to pack") + end + return false + elseif nt >= threshold then + local one, two, rest = 0, 0, 0 + if pass == 1 then + for k,v in next, c do + if v == 1 then + one = one + 1 + elseif v == 2 then + two = two + 1 + else + rest = rest + 1 + end + end + else + for k,v in next, cc do + if v > 20 then + rest = rest + 1 + elseif v > 10 then + two = two + 1 + else + one = one + 1 + end + end + data.tables = tt + end + if trace_loading or trace_packing then + report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)", stage, pass, one+two+rest, one, two, rest, criterium) + end + return true + else + if trace_loading or trace_packing then + report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)", stage, pass, nt, threshold) + end + return false + end + end + local function packers(pass) + if pass == 1 then + return pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed + else + return pack_final, pack_final, pack_final, pack_final, pack_final + end + end + local resources = data.resources + local lookuptypes = resources.lookuptypes + for pass=1,2 do + if trace_packing then + report_otf("start packing: stage 1, pass %s",pass) + end + local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass) + for unicode, description in next, data.descriptions do + local boundingbox = description.boundingbox + if boundingbox then + description.boundingbox = pack_indexed(boundingbox) + end + local slookups = description.slookups + if slookups then + for tag, slookup in next, slookups do + local what = lookuptypes[tag] + if what == "pair" then + local t = slookup[2] if t then slookup[2] = pack_indexed(t) end + local t = slookup[3] if t then slookup[3] = pack_indexed(t) end + elseif what ~= "substitution" then + slookups[tag] = pack_indexed(slookup) -- true is new + end + end + end + local mlookups = description.mlookups + if mlookups then + for tag, mlookup in next, mlookups do + local what = lookuptypes[tag] + if what == "pair" then + for i=1,#mlookup do + local lookup = mlookup[i] + local t = lookup[2] if t then lookup[2] = pack_indexed(t) end + local t = lookup[3] if t then lookup[3] = pack_indexed(t) end + end + elseif what ~= "substitution" then + for i=1,#mlookup do + mlookup[i] = pack_indexed(mlookup[i]) -- true is new + end + end + end + end + local kerns = description.kerns + if kerns then + for tag, kern in next, kerns do + kerns[tag] = pack_flat(kern) + end + end + local math = description.math + if math then + local kerns = math.kerns + if kerns then + for tag, kern in next, kerns do + kerns[tag] = pack_normal(kern) + end + end + end + local anchors = description.anchors + if anchors then + for what, anchor in next, anchors do + if what == "baselig" then + for _, a in next, anchor do + for k=1,#a do + a[k] = pack_indexed(a[k]) + end + end + else + for k, v in next, anchor do + anchor[k] = pack_indexed(v) + end + end + end + end + local altuni = description.altuni + if altuni then + for i=1,#altuni do + altuni[i] = pack_flat(altuni[i]) + end + end + end + local lookups = data.lookups + if lookups then + for _, lookup in next, lookups do + local rules = lookup.rules + if rules then + for i=1,#rules do + local rule = rules[i] + local r = rule.before if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end + local r = rule.after if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end + local r = rule.current if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end + local r = rule.replacements if r then rule.replacements = pack_flat (r) end -- can have holes + local r = rule.lookups if r then rule.lookups = pack_indexed(r) end -- can have "" + -- local r = rule.lookups if r then rule.lookups = pack_flat(r) end -- can have holes (already taken care of some cases) + end + end + end + end + local anchor_to_lookup = resources.anchor_to_lookup + if anchor_to_lookup then + for anchor, lookup in next, anchor_to_lookup do + anchor_to_lookup[anchor] = pack_normal(lookup) + end + end + local lookup_to_anchor = resources.lookup_to_anchor + if lookup_to_anchor then + for lookup, anchor in next, lookup_to_anchor do + lookup_to_anchor[lookup] = pack_normal(anchor) + end + end + local sequences = resources.sequences + if sequences then + for feature, sequence in next, sequences do + local flags = sequence.flags + if flags then + sequence.flags = pack_normal(flags) + end + local subtables = sequence.subtables + if subtables then + sequence.subtables = pack_normal(subtables) + end + local features = sequence.features + if features then + for script, feature in next, features do + features[script] = pack_normal(feature) + end + end + end + end + local lookups = resources.lookups + if lookups then + for name, lookup in next, lookups do + local flags = lookup.flags + if flags then + lookup.flags = pack_normal(flags) + end + local subtables = lookup.subtables + if subtables then + lookup.subtables = pack_normal(subtables) + end + end + end + local features = resources.features + if features then + for _, what in next, glists do + local list = features[what] + if list then + for feature, spec in next, list do + list[feature] = pack_normal(spec) + end + end + end + end + if not success(1,pass) then + return + end + end + if nt > 0 then + for pass=1,2 do + if trace_packing then + report_otf("start packing: stage 2, pass %s",pass) + end + local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass) + for unicode, description in next, data.descriptions do + local kerns = description.kerns + if kerns then + description.kerns = pack_normal(kerns) + end + local math = description.math + if math then + local kerns = math.kerns + if kerns then + math.kerns = pack_normal(kerns) + end + end + local anchors = description.anchors + if anchors then + description.anchors = pack_normal(anchors) + end + local mlookups = description.mlookups + if mlookups then + for tag, mlookup in next, mlookups do + mlookups[tag] = pack_normal(mlookup) + end + end + local altuni = description.altuni + if altuni then + description.altuni = pack_normal(altuni) + end + end + local lookups = data.lookups + if lookups then + for _, lookup in next, lookups do + local rules = lookup.rules + if rules then + for i=1,#rules do -- was next loop + local rule = rules[i] + local r = rule.before if r then rule.before = pack_normal(r) end + local r = rule.after if r then rule.after = pack_normal(r) end + local r = rule.current if r then rule.current = pack_normal(r) end + end + end + end + end + local sequences = resources.sequences + if sequences then + for feature, sequence in next, sequences do + sequence.features = pack_normal(sequence.features) + end + end + if not success(2,pass) then + -- return + end + end + + for pass=1,2 do + local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass) + for unicode, description in next, data.descriptions do + local slookups = description.slookups + if slookups then + description.slookups = pack_normal(slookups) + end + local mlookups = description.mlookups + if mlookups then + description.mlookups = pack_normal(mlookups) + end + end + end + + end + end +end + +local unpacked_mt = { + __index = + function(t,k) + t[k] = false + return k -- next time true + end +} + +local function unpackdata(data) + if data then + local tables = data.tables + if tables then + local resources = data.resources + local lookuptypes = resources.lookuptypes + local unpacked = { } + setmetatable(unpacked,unpacked_mt) + for unicode, description in next, data.descriptions do + local tv = tables[description.boundingbox] + if tv then + description.boundingbox = tv + end + local slookups = description.slookups + if slookups then + local tv = tables[slookups] + if tv then + description.slookups = tv + slookups = unpacked[tv] + end + if slookups then + for tag, lookup in next, slookups do + local what = lookuptypes[tag] + if what == "pair" then + local tv = tables[lookup[2]] + if tv then + lookup[2] = tv + end + local tv = tables[lookup[3]] + if tv then + lookup[3] = tv + end + elseif what ~= "substitution" then + local tv = tables[lookup] + if tv then + slookups[tag] = tv + end + end + end + end + end + local mlookups = description.mlookups + if mlookups then + local tv = tables[mlookups] + if tv then + description.mlookups = tv + mlookups = unpacked[tv] + end + if mlookups then + for tag, list in next, mlookups do + local tv = tables[list] + if tv then + mlookups[tag] = tv + list = unpacked[tv] + end + if list then + local what = lookuptypes[tag] + if what == "pair" then + for i=1,#list do + local lookup = list[i] + local tv = tables[lookup[2]] + if tv then + lookup[2] = tv + end + local tv = tables[lookup[3]] + if tv then + lookup[3] = tv + end + end + elseif what ~= "substitution" then + for i=1,#list do + local tv = tables[list[i]] + if tv then + list[i] = tv + end + end + end + end + end + end + end + local kerns = description.kerns + if kerns then + local tm = tables[kerns] + if tm then + description.kerns = tm + kerns = unpacked[tm] + end + if kerns then + for k, kern in next, kerns do + local tv = tables[kern] + if tv then + kerns[k] = tv + end + end + end + end + local math = description.math + if math then + local kerns = math.kerns + if kerns then + local tm = tables[kerns] + if tm then + math.kerns = tm + kerns = unpacked[tm] + end + if kerns then + for k, kern in next, kerns do + local tv = tables[kern] + if tv then + kerns[k] = tv + end + end + end + end + end + local anchors = description.anchors + if anchors then + local ta = tables[anchors] + if ta then + description.anchors = ta + anchors = unpacked[ta] + end + if anchors then + for tag, anchor in next, anchors do + if tag == "baselig" then + for _, list in next, anchor do + for i=1,#list do + local tv = tables[list[i]] + if tv then + list[i] = tv + end + end + end + else + for a, data in next, anchor do + local tv = tables[data] + if tv then + anchor[a] = tv + end + end + end + end + end + end + local altuni = description.altuni + if altuni then + local altuni = tables[altuni] + if altuni then + description.altuni = altuni + for i=1,#altuni do + local tv = tables[altuni[i]] + if tv then + altuni[i] = tv + end + end + end + end + end + local lookups = data.lookups + if lookups then + for _, lookup in next, lookups do + local rules = lookup.rules + if rules then + for i=1,#rules do -- was next loop + local rule = rules[i] + local before = rule.before + if before then + local tv = tables[before] + if tv then + rule.before = tv + before = unpacked[tv] + end + if before then + for i=1,#before do + local tv = tables[before[i]] + if tv then + before[i] = tv + end + end + end + end + local after = rule.after + if after then + local tv = tables[after] + if tv then + rule.after = tv + after = unpacked[tv] + end + if after then + for i=1,#after do + local tv = tables[after[i]] + if tv then + after[i] = tv + end + end + end + end + local current = rule.current + if current then + local tv = tables[current] + if tv then + rule.current = tv + current = unpacked[tv] + end + if current then + for i=1,#current do + local tv = tables[current[i]] + if tv then + current[i] = tv + end + end + end + end + local replacements = rule.replacements + if replacements then + local tv = tables[replacements] + if tv then + rule.replacements = tv + end + end + local fore = rule.fore + if fore then + local tv = tables[fore] + if tv then + rule.fore = tv + end + end + local back = rule.back + if back then + local tv = tables[back] + if tv then + rule.back = tv + end + end + local names = rule.names + if names then + local tv = tables[names] + if tv then + rule.names = tv + end + end + local lookups = rule.lookups + if lookups then + local tv = tables[lookups] + if tv then + rule.lookups = tv + end + end + end + end + end + end + local anchor_to_lookup = resources.anchor_to_lookup + if anchor_to_lookup then + for anchor, lookup in next, anchor_to_lookup do + local tv = tables[lookup] + if tv then + anchor_to_lookup[anchor] = tv + end + end + end + local lookup_to_anchor = resources.lookup_to_anchor + if lookup_to_anchor then + for lookup, anchor in next, lookup_to_anchor do + local tv = tables[anchor] + if tv then + lookup_to_anchor[lookup] = tv + end + end + end + local ls = resources.sequences + if ls then + for _, feature in next, ls do + local flags = feature.flags + if flags then + local tv = tables[flags] + if tv then + feature.flags = tv + end + end + local subtables = feature.subtables + if subtables then + local tv = tables[subtables] + if tv then + feature.subtables = tv + end + end + local features = feature.features + if features then + local tv = tables[features] + if tv then + feature.features = tv + features = unpacked[tv] + end + if features then + for script, data in next, features do + local tv = tables[data] + if tv then + features[script] = tv + end + end + end + end + end + end + local lookups = resources.lookups + if lookups then + for _, lookup in next, lookups do + local flags = lookup.flags + if flags then + local tv = tables[flags] + if tv then + lookup.flags = tv + end + end + local subtables = lookup.subtables + if subtables then + local tv = tables[subtables] + if tv then + lookup.subtables = tv + end + end + end + end + local features = resources.features + if features then + for _, what in next, glists do + local feature = features[what] + if feature then + for tag, spec in next, feature do + local tv = tables[spec] + if tv then + feature[tag] = tv + end + end + end + end + end + data.tables = nil + end + end +end + +if otf.enhancers.register then + + otf.enhancers.register( "pack", packdata) + otf.enhancers.register("unpack",unpackdata) + +-- todo: directive + +end + +otf.enhancers.unpack = unpackdata -- used elsewhere diff --git a/tex/context/base/font-ott.lua b/tex/context/base/font-ott.lua index e3aacd0d1..3b171c4a4 100644 --- a/tex/context/base/font-ott.lua +++ b/tex/context/base/font-ott.lua @@ -1,1113 +1,1113 @@ -if not modules then modules = { } end modules ['font-ott'] = { - version = 1.001, - comment = "companion to font-otf.lua (tables)", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", - -- dataonly = true, -} - -local type, next, tonumber, tostring, rawget, rawset = type, next, tonumber, tostring, rawget, rawset -local gsub, lower, format, match = string.gsub, string.lower, string.format, string.match -local is_boolean = string.is_boolean - -local setmetatableindex = table.setmetatableindex -local setmetatablenewindex = table.setmetatablenewindex -local allocate = utilities.storage.allocate - -local fonts = fonts -local otf = fonts.handlers.otf -local otffeatures = otf.features -local registerotffeature = otffeatures.register - -local tables = otf.tables or { } -otf.tables = tables - -local statistics = otf.statistics or { } -otf.statistics = statistics - -local scripts = allocate { - ['arab'] = 'arabic', - ['armn'] = 'armenian', - ['bali'] = 'balinese', - ['beng'] = 'bengali', - ['bopo'] = 'bopomofo', - ['brai'] = 'braille', - ['bugi'] = 'buginese', - ['buhd'] = 'buhid', - ['byzm'] = 'byzantine music', - ['cans'] = 'canadian syllabics', - ['cher'] = 'cherokee', - ['copt'] = 'coptic', - ['cprt'] = 'cypriot syllabary', - ['cyrl'] = 'cyrillic', - ['deva'] = 'devanagari', - ['dsrt'] = 'deseret', - ['ethi'] = 'ethiopic', - ['geor'] = 'georgian', - ['glag'] = 'glagolitic', - ['goth'] = 'gothic', - ['grek'] = 'greek', - ['gujr'] = 'gujarati', - ['guru'] = 'gurmukhi', - ['hang'] = 'hangul', - ['hani'] = 'cjk ideographic', - ['hano'] = 'hanunoo', - ['hebr'] = 'hebrew', - ['ital'] = 'old italic', - ['jamo'] = 'hangul jamo', - ['java'] = 'javanese', - ['kana'] = 'hiragana and katakana', - ['khar'] = 'kharosthi', - ['khmr'] = 'khmer', - ['knda'] = 'kannada', - ['lao' ] = 'lao', - ['latn'] = 'latin', - ['limb'] = 'limbu', - ['linb'] = 'linear b', - ['math'] = 'mathematical alphanumeric symbols', - ['mlym'] = 'malayalam', - ['mong'] = 'mongolian', - ['musc'] = 'musical symbols', - ['mymr'] = 'myanmar', - ['nko' ] = "n'ko", - ['ogam'] = 'ogham', - ['orya'] = 'oriya', - ['osma'] = 'osmanya', - ['phag'] = 'phags-pa', - ['phnx'] = 'phoenician', - ['runr'] = 'runic', - ['shaw'] = 'shavian', - ['sinh'] = 'sinhala', - ['sylo'] = 'syloti nagri', - ['syrc'] = 'syriac', - ['tagb'] = 'tagbanwa', - ['tale'] = 'tai le', - ['talu'] = 'tai lu', - ['taml'] = 'tamil', - ['telu'] = 'telugu', - ['tfng'] = 'tifinagh', - ['tglg'] = 'tagalog', - ['thaa'] = 'thaana', - ['thai'] = 'thai', - ['tibt'] = 'tibetan', - ['ugar'] = 'ugaritic cuneiform', - ['xpeo'] = 'old persian cuneiform', - ['xsux'] = 'sumero-akkadian cuneiform', - ['yi' ] = 'yi', -} - -local languages = allocate { - ['aba'] = 'abaza', - ['abk'] = 'abkhazian', - ['ady'] = 'adyghe', - ['afk'] = 'afrikaans', - ['afr'] = 'afar', - ['agw'] = 'agaw', - ['als'] = 'alsatian', - ['alt'] = 'altai', - ['amh'] = 'amharic', - ['ara'] = 'arabic', - ['ari'] = 'aari', - ['ark'] = 'arakanese', - ['asm'] = 'assamese', - ['ath'] = 'athapaskan', - ['avr'] = 'avar', - ['awa'] = 'awadhi', - ['aym'] = 'aymara', - ['aze'] = 'azeri', - ['bad'] = 'badaga', - ['bag'] = 'baghelkhandi', - ['bal'] = 'balkar', - ['bau'] = 'baule', - ['bbr'] = 'berber', - ['bch'] = 'bench', - ['bcr'] = 'bible cree', - ['bel'] = 'belarussian', - ['bem'] = 'bemba', - ['ben'] = 'bengali', - ['bgr'] = 'bulgarian', - ['bhi'] = 'bhili', - ['bho'] = 'bhojpuri', - ['bik'] = 'bikol', - ['bil'] = 'bilen', - ['bkf'] = 'blackfoot', - ['bli'] = 'balochi', - ['bln'] = 'balante', - ['blt'] = 'balti', - ['bmb'] = 'bambara', - ['bml'] = 'bamileke', - ['bos'] = 'bosnian', - ['bre'] = 'breton', - ['brh'] = 'brahui', - ['bri'] = 'braj bhasha', - ['brm'] = 'burmese', - ['bsh'] = 'bashkir', - ['bti'] = 'beti', - ['cat'] = 'catalan', - ['ceb'] = 'cebuano', - ['che'] = 'chechen', - ['chg'] = 'chaha gurage', - ['chh'] = 'chattisgarhi', - ['chi'] = 'chichewa', - ['chk'] = 'chukchi', - ['chp'] = 'chipewyan', - ['chr'] = 'cherokee', - ['chu'] = 'chuvash', - ['cmr'] = 'comorian', - ['cop'] = 'coptic', - ['cos'] = 'corsican', - ['cre'] = 'cree', - ['crr'] = 'carrier', - ['crt'] = 'crimean tatar', - ['csl'] = 'church slavonic', - ['csy'] = 'czech', - ['dan'] = 'danish', - ['dar'] = 'dargwa', - ['dcr'] = 'woods cree', - ['deu'] = 'german', - ['dgr'] = 'dogri', - ['div'] = 'divehi', - ['djr'] = 'djerma', - ['dng'] = 'dangme', - ['dnk'] = 'dinka', - ['dri'] = 'dari', - ['dun'] = 'dungan', - ['dzn'] = 'dzongkha', - ['ebi'] = 'ebira', - ['ecr'] = 'eastern cree', - ['edo'] = 'edo', - ['efi'] = 'efik', - ['ell'] = 'greek', - ['eng'] = 'english', - ['erz'] = 'erzya', - ['esp'] = 'spanish', - ['eti'] = 'estonian', - ['euq'] = 'basque', - ['evk'] = 'evenki', - ['evn'] = 'even', - ['ewe'] = 'ewe', - ['fan'] = 'french antillean', - ['far'] = 'farsi', - ['fin'] = 'finnish', - ['fji'] = 'fijian', - ['fle'] = 'flemish', - ['fne'] = 'forest nenets', - ['fon'] = 'fon', - ['fos'] = 'faroese', - ['fra'] = 'french', - ['fri'] = 'frisian', - ['frl'] = 'friulian', - ['fta'] = 'futa', - ['ful'] = 'fulani', - ['gad'] = 'ga', - ['gae'] = 'gaelic', - ['gag'] = 'gagauz', - ['gal'] = 'galician', - ['gar'] = 'garshuni', - ['gaw'] = 'garhwali', - ['gez'] = "ge'ez", - ['gil'] = 'gilyak', - ['gmz'] = 'gumuz', - ['gon'] = 'gondi', - ['grn'] = 'greenlandic', - ['gro'] = 'garo', - ['gua'] = 'guarani', - ['guj'] = 'gujarati', - ['hai'] = 'haitian', - ['hal'] = 'halam', - ['har'] = 'harauti', - ['hau'] = 'hausa', - ['haw'] = 'hawaiin', - ['hbn'] = 'hammer-banna', - ['hil'] = 'hiligaynon', - ['hin'] = 'hindi', - ['hma'] = 'high mari', - ['hnd'] = 'hindko', - ['ho'] = 'ho', - ['hri'] = 'harari', - ['hrv'] = 'croatian', - ['hun'] = 'hungarian', - ['hye'] = 'armenian', - ['ibo'] = 'igbo', - ['ijo'] = 'ijo', - ['ilo'] = 'ilokano', - ['ind'] = 'indonesian', - ['ing'] = 'ingush', - ['inu'] = 'inuktitut', - ['iri'] = 'irish', - ['irt'] = 'irish traditional', - ['isl'] = 'icelandic', - ['ism'] = 'inari sami', - ['ita'] = 'italian', - ['iwr'] = 'hebrew', - ['jan'] = 'japanese', - ['jav'] = 'javanese', - ['jii'] = 'yiddish', - ['jud'] = 'judezmo', - ['jul'] = 'jula', - ['kab'] = 'kabardian', - ['kac'] = 'kachchi', - ['kal'] = 'kalenjin', - ['kan'] = 'kannada', - ['kar'] = 'karachay', - ['kat'] = 'georgian', - ['kaz'] = 'kazakh', - ['keb'] = 'kebena', - ['kge'] = 'khutsuri georgian', - ['kha'] = 'khakass', - ['khk'] = 'khanty-kazim', - ['khm'] = 'khmer', - ['khs'] = 'khanty-shurishkar', - ['khv'] = 'khanty-vakhi', - ['khw'] = 'khowar', - ['kik'] = 'kikuyu', - ['kir'] = 'kirghiz', - ['kis'] = 'kisii', - ['kkn'] = 'kokni', - ['klm'] = 'kalmyk', - ['kmb'] = 'kamba', - ['kmn'] = 'kumaoni', - ['kmo'] = 'komo', - ['kms'] = 'komso', - ['knr'] = 'kanuri', - ['kod'] = 'kodagu', - ['koh'] = 'korean old hangul', - ['kok'] = 'konkani', - ['kon'] = 'kikongo', - ['kop'] = 'komi-permyak', - ['kor'] = 'korean', - ['koz'] = 'komi-zyrian', - ['kpl'] = 'kpelle', - ['kri'] = 'krio', - ['krk'] = 'karakalpak', - ['krl'] = 'karelian', - ['krm'] = 'karaim', - ['krn'] = 'karen', - ['krt'] = 'koorete', - ['ksh'] = 'kashmiri', - ['ksi'] = 'khasi', - ['ksm'] = 'kildin sami', - ['kui'] = 'kui', - ['kul'] = 'kulvi', - ['kum'] = 'kumyk', - ['kur'] = 'kurdish', - ['kuu'] = 'kurukh', - ['kuy'] = 'kuy', - ['kyk'] = 'koryak', - ['lad'] = 'ladin', - ['lah'] = 'lahuli', - ['lak'] = 'lak', - ['lam'] = 'lambani', - ['lao'] = 'lao', - ['lat'] = 'latin', - ['laz'] = 'laz', - ['lcr'] = 'l-cree', - ['ldk'] = 'ladakhi', - ['lez'] = 'lezgi', - ['lin'] = 'lingala', - ['lma'] = 'low mari', - ['lmb'] = 'limbu', - ['lmw'] = 'lomwe', - ['lsb'] = 'lower sorbian', - ['lsm'] = 'lule sami', - ['lth'] = 'lithuanian', - ['ltz'] = 'luxembourgish', - ['lub'] = 'luba', - ['lug'] = 'luganda', - ['luh'] = 'luhya', - ['luo'] = 'luo', - ['lvi'] = 'latvian', - ['maj'] = 'majang', - ['mak'] = 'makua', - ['mal'] = 'malayalam traditional', - ['man'] = 'mansi', - ['map'] = 'mapudungun', - ['mar'] = 'marathi', - ['maw'] = 'marwari', - ['mbn'] = 'mbundu', - ['mch'] = 'manchu', - ['mcr'] = 'moose cree', - ['mde'] = 'mende', - ['men'] = "me'en", - ['miz'] = 'mizo', - ['mkd'] = 'macedonian', - ['mle'] = 'male', - ['mlg'] = 'malagasy', - ['mln'] = 'malinke', - ['mlr'] = 'malayalam reformed', - ['mly'] = 'malay', - ['mnd'] = 'mandinka', - ['mng'] = 'mongolian', - ['mni'] = 'manipuri', - ['mnk'] = 'maninka', - ['mnx'] = 'manx gaelic', - ['moh'] = 'mohawk', - ['mok'] = 'moksha', - ['mol'] = 'moldavian', - ['mon'] = 'mon', - ['mor'] = 'moroccan', - ['mri'] = 'maori', - ['mth'] = 'maithili', - ['mts'] = 'maltese', - ['mun'] = 'mundari', - ['nag'] = 'naga-assamese', - ['nan'] = 'nanai', - ['nas'] = 'naskapi', - ['ncr'] = 'n-cree', - ['ndb'] = 'ndebele', - ['ndg'] = 'ndonga', - ['nep'] = 'nepali', - ['new'] = 'newari', - ['ngr'] = 'nagari', - ['nhc'] = 'norway house cree', - ['nis'] = 'nisi', - ['niu'] = 'niuean', - ['nkl'] = 'nkole', - ['nko'] = "n'ko", - ['nld'] = 'dutch', - ['nog'] = 'nogai', - ['nor'] = 'norwegian', - ['nsm'] = 'northern sami', - ['nta'] = 'northern tai', - ['nto'] = 'esperanto', - ['nyn'] = 'nynorsk', - ['oci'] = 'occitan', - ['ocr'] = 'oji-cree', - ['ojb'] = 'ojibway', - ['ori'] = 'oriya', - ['oro'] = 'oromo', - ['oss'] = 'ossetian', - ['paa'] = 'palestinian aramaic', - ['pal'] = 'pali', - ['pan'] = 'punjabi', - ['pap'] = 'palpa', - ['pas'] = 'pashto', - ['pgr'] = 'polytonic greek', - ['pil'] = 'pilipino', - ['plg'] = 'palaung', - ['plk'] = 'polish', - ['pro'] = 'provencal', - ['ptg'] = 'portuguese', - ['qin'] = 'chin', - ['raj'] = 'rajasthani', - ['rbu'] = 'russian buriat', - ['rcr'] = 'r-cree', - ['ria'] = 'riang', - ['rms'] = 'rhaeto-romanic', - ['rom'] = 'romanian', - ['roy'] = 'romany', - ['rsy'] = 'rusyn', - ['rua'] = 'ruanda', - ['rus'] = 'russian', - ['sad'] = 'sadri', - ['san'] = 'sanskrit', - ['sat'] = 'santali', - ['say'] = 'sayisi', - ['sek'] = 'sekota', - ['sel'] = 'selkup', - ['sgo'] = 'sango', - ['shn'] = 'shan', - ['sib'] = 'sibe', - ['sid'] = 'sidamo', - ['sig'] = 'silte gurage', - ['sks'] = 'skolt sami', - ['sky'] = 'slovak', - ['sla'] = 'slavey', - ['slv'] = 'slovenian', - ['sml'] = 'somali', - ['smo'] = 'samoan', - ['sna'] = 'sena', - ['snd'] = 'sindhi', - ['snh'] = 'sinhalese', - ['snk'] = 'soninke', - ['sog'] = 'sodo gurage', - ['sot'] = 'sotho', - ['sqi'] = 'albanian', - ['srb'] = 'serbian', - ['srk'] = 'saraiki', - ['srr'] = 'serer', - ['ssl'] = 'south slavey', - ['ssm'] = 'southern sami', - ['sur'] = 'suri', - ['sva'] = 'svan', - ['sve'] = 'swedish', - ['swa'] = 'swadaya aramaic', - ['swk'] = 'swahili', - ['swz'] = 'swazi', - ['sxt'] = 'sutu', - ['syr'] = 'syriac', - ['tab'] = 'tabasaran', - ['taj'] = 'tajiki', - ['tam'] = 'tamil', - ['tat'] = 'tatar', - ['tcr'] = 'th-cree', - ['tel'] = 'telugu', - ['tgn'] = 'tongan', - ['tgr'] = 'tigre', - ['tgy'] = 'tigrinya', - ['tha'] = 'thai', - ['tht'] = 'tahitian', - ['tib'] = 'tibetan', - ['tkm'] = 'turkmen', - ['tmn'] = 'temne', - ['tna'] = 'tswana', - ['tne'] = 'tundra nenets', - ['tng'] = 'tonga', - ['tod'] = 'todo', - ['trk'] = 'turkish', - ['tsg'] = 'tsonga', - ['tua'] = 'turoyo aramaic', - ['tul'] = 'tulu', - ['tuv'] = 'tuvin', - ['twi'] = 'twi', - ['udm'] = 'udmurt', - ['ukr'] = 'ukrainian', - ['urd'] = 'urdu', - ['usb'] = 'upper sorbian', - ['uyg'] = 'uyghur', - ['uzb'] = 'uzbek', - ['ven'] = 'venda', - ['vit'] = 'vietnamese', - ['wa' ] = 'wa', - ['wag'] = 'wagdi', - ['wcr'] = 'west-cree', - ['wel'] = 'welsh', - ['wlf'] = 'wolof', - ['xbd'] = 'tai lue', - ['xhs'] = 'xhosa', - ['yak'] = 'yakut', - ['yba'] = 'yoruba', - ['ycr'] = 'y-cree', - ['yic'] = 'yi classic', - ['yim'] = 'yi modern', - ['zhh'] = 'chinese hong kong', - ['zhp'] = 'chinese phonetic', - ['zhs'] = 'chinese simplified', - ['zht'] = 'chinese traditional', - ['znd'] = 'zande', - ['zul'] = 'zulu' -} - -local features = allocate { - ['aalt'] = 'access all alternates', - ['abvf'] = 'above-base forms', - ['abvm'] = 'above-base mark positioning', - ['abvs'] = 'above-base substitutions', - ['afrc'] = 'alternative fractions', - ['akhn'] = 'akhands', - ['blwf'] = 'below-base forms', - ['blwm'] = 'below-base mark positioning', - ['blws'] = 'below-base substitutions', - ['c2pc'] = 'petite capitals from capitals', - ['c2sc'] = 'small capitals from capitals', - ['calt'] = 'contextual alternates', - ['case'] = 'case-sensitive forms', - ['ccmp'] = 'glyph composition/decomposition', - ['cjct'] = 'conjunct forms', - ['clig'] = 'contextual ligatures', - ['cpsp'] = 'capital spacing', - ['cswh'] = 'contextual swash', - ['curs'] = 'cursive positioning', - ['dflt'] = 'default processing', - ['dist'] = 'distances', - ['dlig'] = 'discretionary ligatures', - ['dnom'] = 'denominators', - ['dtls'] = 'dotless forms', -- math - ['expt'] = 'expert forms', - ['falt'] = 'final glyph alternates', - ['fin2'] = 'terminal forms #2', - ['fin3'] = 'terminal forms #3', - ['fina'] = 'terminal forms', - ['flac'] = 'flattened accents over capitals', -- math - ['frac'] = 'fractions', - ['fwid'] = 'full width', - ['half'] = 'half forms', - ['haln'] = 'halant forms', - ['halt'] = 'alternate half width', - ['hist'] = 'historical forms', - ['hkna'] = 'horizontal kana alternates', - ['hlig'] = 'historical ligatures', - ['hngl'] = 'hangul', - ['hojo'] = 'hojo kanji forms', - ['hwid'] = 'half width', - ['init'] = 'initial forms', - ['isol'] = 'isolated forms', - ['ital'] = 'italics', - ['jalt'] = 'justification alternatives', - ['jp04'] = 'jis2004 forms', - ['jp78'] = 'jis78 forms', - ['jp83'] = 'jis83 forms', - ['jp90'] = 'jis90 forms', - ['kern'] = 'kerning', - ['lfbd'] = 'left bounds', - ['liga'] = 'standard ligatures', - ['ljmo'] = 'leading jamo forms', - ['lnum'] = 'lining figures', - ['locl'] = 'localized forms', - ['mark'] = 'mark positioning', - ['med2'] = 'medial forms #2', - ['medi'] = 'medial forms', - ['mgrk'] = 'mathematical greek', - ['mkmk'] = 'mark to mark positioning', - ['mset'] = 'mark positioning via substitution', - ['nalt'] = 'alternate annotation forms', - ['nlck'] = 'nlc kanji forms', - ['nukt'] = 'nukta forms', - ['numr'] = 'numerators', - ['onum'] = 'old style figures', - ['opbd'] = 'optical bounds', - ['ordn'] = 'ordinals', - ['ornm'] = 'ornaments', - ['palt'] = 'proportional alternate width', - ['pcap'] = 'petite capitals', - ['pnum'] = 'proportional figures', - ['pref'] = 'pre-base forms', - ['pres'] = 'pre-base substitutions', - ['pstf'] = 'post-base forms', - ['psts'] = 'post-base substitutions', - ['pwid'] = 'proportional widths', - ['qwid'] = 'quarter widths', - ['rand'] = 'randomize', - ['rkrf'] = 'rakar forms', - ['rlig'] = 'required ligatures', - ['rphf'] = 'reph form', - ['rtbd'] = 'right bounds', - ['rtla'] = 'right-to-left alternates', - ['rtlm'] = 'right to left math', -- math - ['ruby'] = 'ruby notation forms', - ['salt'] = 'stylistic alternates', - ['sinf'] = 'scientific inferiors', - ['size'] = 'optical size', - ['smcp'] = 'small capitals', - ['smpl'] = 'simplified forms', - -- ['ss01'] = 'stylistic set 1', - -- ['ss02'] = 'stylistic set 2', - -- ['ss03'] = 'stylistic set 3', - -- ['ss04'] = 'stylistic set 4', - -- ['ss05'] = 'stylistic set 5', - -- ['ss06'] = 'stylistic set 6', - -- ['ss07'] = 'stylistic set 7', - -- ['ss08'] = 'stylistic set 8', - -- ['ss09'] = 'stylistic set 9', - -- ['ss10'] = 'stylistic set 10', - -- ['ss11'] = 'stylistic set 11', - -- ['ss12'] = 'stylistic set 12', - -- ['ss13'] = 'stylistic set 13', - -- ['ss14'] = 'stylistic set 14', - -- ['ss15'] = 'stylistic set 15', - -- ['ss16'] = 'stylistic set 16', - -- ['ss17'] = 'stylistic set 17', - -- ['ss18'] = 'stylistic set 18', - -- ['ss19'] = 'stylistic set 19', - -- ['ss20'] = 'stylistic set 20', - ['ssty'] = 'script style', -- math - ['subs'] = 'subscript', - ['sups'] = 'superscript', - ['swsh'] = 'swash', - ['titl'] = 'titling', - ['tjmo'] = 'trailing jamo forms', - ['tnam'] = 'traditional name forms', - ['tnum'] = 'tabular figures', - ['trad'] = 'traditional forms', - ['twid'] = 'third widths', - ['unic'] = 'unicase', - ['valt'] = 'alternate vertical metrics', - ['vatu'] = 'vattu variants', - ['vert'] = 'vertical writing', - ['vhal'] = 'alternate vertical half metrics', - ['vjmo'] = 'vowel jamo forms', - ['vkna'] = 'vertical kana alternates', - ['vkrn'] = 'vertical kerning', - ['vpal'] = 'proportional alternate vertical metrics', - ['vrt2'] = 'vertical rotation', - ['zero'] = 'slashed zero', - - ['trep'] = 'traditional tex replacements', - ['tlig'] = 'traditional tex ligatures', - - ['ss..'] = 'stylistic set ..', - ['cv..'] = 'character variant ..', - ['js..'] = 'justification ..', - - ["dv.."] = "devanagari ..", -} - -local baselines = allocate { - ['hang'] = 'hanging baseline', - ['icfb'] = 'ideographic character face bottom edge baseline', - ['icft'] = 'ideographic character face tope edige baseline', - ['ideo'] = 'ideographic em-box bottom edge baseline', - ['idtp'] = 'ideographic em-box top edge baseline', - ['math'] = 'mathmatical centered baseline', - ['romn'] = 'roman baseline' -} - -tables.scripts = scripts -tables.languages = languages -tables.features = features -tables.baselines = baselines - -local acceptscripts = true directives.register("otf.acceptscripts", function(v) acceptscripts = v end) -local acceptlanguages = true directives.register("otf.acceptlanguages", function(v) acceptlanguages = v end) - -local report_checks = logs.reporter("fonts","checks") - --- hm, we overload the metatables - -if otffeatures.features then - for k, v in next, otffeatures.features do - features[k] = v - end - otffeatures.features = features -end - -local function swapped(h) - local r = { } - for k, v in next, h do - r[gsub(v,"[^a-z0-9]","")] = k -- is already lower - end - return r -end - -local verbosescripts = allocate(swapped(scripts )) -local verboselanguages = allocate(swapped(languages)) -local verbosefeatures = allocate(swapped(features )) -local verbosebaselines = allocate(swapped(baselines)) - --- lets forget about trailing spaces - -local function resolve(t,k) - if k then - k = gsub(lower(k),"[^a-z0-9]","") - local v = rawget(t,k) - if v then - return v - end - end -end - -setmetatableindex(verbosescripts, resolve) -setmetatableindex(verboselanguages, resolve) -setmetatableindex(verbosefeatures, resolve) -setmetatableindex(verbosebaselines, resolve) - --- We could optimize the next lookups by using an extra metatable and storing --- already found values but in practice there are not that many lookups so --- it's never a bottleneck. - -setmetatableindex(scripts, function(t,k) - if k then - k = lower(k) - if k == "dflt" then - return k - end - local v = rawget(t,k) - if v then - return v - end - k = gsub(k," ","") - v = rawget(t,v) - if v then - return v - elseif acceptscripts then - report_checks("registering extra script %a",k) - rawset(t,k,k) - return k - end - end - return "dflt" -end) - -setmetatableindex(languages, function(t,k) - if k then - k = lower(k) - if k == "dflt" then - return k - end - local v = rawget(t,k) - if v then - return v - end - k = gsub(k," ","") - v = rawget(t,v) - if v then - return v - elseif acceptlanguages then - report_checks("registering extra language %a",k) - rawset(t,k,k) - return k - end - end - return "dflt" -end) - -setmetatablenewindex(languages, "ignore") -setmetatablenewindex(baselines, "ignore") -setmetatablenewindex(baselines, "ignore") - -local function resolve(t,k) - if k then - k = lower(k) - local v = rawget(t,k) - if v then - return v - end - k = gsub(k," ","") - local v = rawget(t,k) - if v then - return v - end - local tag, dd = match(k,"(..)(%d+)") - if tag and dd then - local v = rawget(t,tag) - if v then - return v -- return format(v,tonumber(dd)) -- old way - else - local v = rawget(t,tag.."..") -- nicer in overview - if v then - return (gsub(v,"%.%.",tonumber(dd))) -- new way - end - end - end - end - return k -- "dflt" -end - -setmetatableindex(features, resolve) - -local function assign(t,k,v) - if k and v then - v = lower(v) - rawset(t,k,v) -- rawset ? - -- rawset(features,gsub(v,"[^a-z0-9]",""),k) -- why ? old code - end -end - -setmetatablenewindex(features, assign) - -local checkers = { - rand = function(v) - return v == true and "random" or v - end -} - --- Keep this: --- --- function otf.features.normalize(features) --- if features then --- local h = { } --- for k, v in next, features do --- k = lower(k) --- if k == "language" then --- v = gsub(lower(v),"[^a-z0-9]","") --- h.language = rawget(verboselanguages,v) or (languages[v] and v) or "dflt" -- auto adds --- elseif k == "script" then --- v = gsub(lower(v),"[^a-z0-9]","") --- h.script = rawget(verbosescripts,v) or (scripts[v] and v) or "dflt" -- auto adds --- else --- if type(v) == "string" then --- local b = is_boolean(v) --- if type(b) == "nil" then --- v = tonumber(v) or lower(v) --- else --- v = b --- end --- end --- if not rawget(features,k) then --- k = rawget(verbosefeatures,k) or k --- end --- local c = checkers[k] --- h[k] = c and c(v) or v --- end --- end --- return h --- end --- end - --- inspect(fonts.handlers.otf.statistics.usedfeatures) - -if not storage then - return -end - -local usedfeatures = statistics.usedfeatures or { } -statistics.usedfeatures = usedfeatures - -table.setmetatableindex(usedfeatures, function(t,k) if k then local v = { } t[k] = v return v end end) -- table.autotable - -storage.register("fonts/otf/usedfeatures", usedfeatures, "fonts.handlers.otf.statistics.usedfeatures" ) - -function otf.features.normalize(features) - if features then - local h = { } - for key, value in next, features do - local k = lower(key) - if k == "language" then - local v = gsub(lower(value),"[^a-z0-9]","") - h.language = rawget(verboselanguages,v) or (languages[v] and v) or "dflt" -- auto adds - elseif k == "script" then - local v = gsub(lower(value),"[^a-z0-9]","") - h.script = rawget(verbosescripts,v) or (scripts[v] and v) or "dflt" -- auto adds - else - local uk = usedfeatures[key] - local uv = uk[value] - if uv then - -- report_checks("feature value %a first seen at %a",value,key) - else - if type(value) == "string" then - local b = is_boolean(value) - if type(b) == "nil" then - uv = tonumber(value) or lower(value) - else - uv = b - end - else - uv = v - end - if not rawget(features,k) then - k = rawget(verbosefeatures,k) or k - end - local c = checkers[k] - if c then - uv = c(uv) or vc - end - uk[value] = uv - end - h[k] = uv - end - end - return h - end -end - ---~ table.print(otf.features.normalize({ language = "dutch", liga = "yes", ss99 = true, aalt = 3, abcd = "yes" } )) - --- When I feel the need ... - ---~ tables.aat = { ---~ [ 0] = { ---~ name = "allTypographicFeaturesType", ---~ [ 0] = "allTypeFeaturesOnSelector", ---~ [ 1] = "allTypeFeaturesOffSelector", ---~ }, ---~ [ 1] = { ---~ name = "ligaturesType", ---~ [0 ] = "requiredLigaturesOnSelector", ---~ [1 ] = "requiredLigaturesOffSelector", ---~ [2 ] = "commonLigaturesOnSelector", ---~ [3 ] = "commonLigaturesOffSelector", ---~ [4 ] = "rareLigaturesOnSelector", ---~ [5 ] = "rareLigaturesOffSelector", ---~ [6 ] = "logosOnSelector ", ---~ [7 ] = "logosOffSelector ", ---~ [8 ] = "rebusPicturesOnSelector", ---~ [9 ] = "rebusPicturesOffSelector", ---~ [10] = "diphthongLigaturesOnSelector", ---~ [11] = "diphthongLigaturesOffSelector", ---~ [12] = "squaredLigaturesOnSelector", ---~ [13] = "squaredLigaturesOffSelector", ---~ [14] = "abbrevSquaredLigaturesOnSelector", ---~ [15] = "abbrevSquaredLigaturesOffSelector", ---~ }, ---~ [ 2] = { ---~ name = "cursiveConnectionType", ---~ [ 0] = "unconnectedSelector", ---~ [ 1] = "partiallyConnectedSelector", ---~ [ 2] = "cursiveSelector ", ---~ }, ---~ [ 3] = { ---~ name = "letterCaseType", ---~ [ 0] = "upperAndLowerCaseSelector", ---~ [ 1] = "allCapsSelector ", ---~ [ 2] = "allLowerCaseSelector", ---~ [ 3] = "smallCapsSelector ", ---~ [ 4] = "initialCapsSelector", ---~ [ 5] = "initialCapsAndSmallCapsSelector", ---~ }, ---~ [ 4] = { ---~ name = "verticalSubstitutionType", ---~ [ 0] = "substituteVerticalFormsOnSelector", ---~ [ 1] = "substituteVerticalFormsOffSelector", ---~ }, ---~ [ 5] = { ---~ name = "linguisticRearrangementType", ---~ [ 0] = "linguisticRearrangementOnSelector", ---~ [ 1] = "linguisticRearrangementOffSelector", ---~ }, ---~ [ 6] = { ---~ name = "numberSpacingType", ---~ [ 0] = "monospacedNumbersSelector", ---~ [ 1] = "proportionalNumbersSelector", ---~ }, ---~ [ 7] = { ---~ name = "appleReserved1Type", ---~ }, ---~ [ 8] = { ---~ name = "smartSwashType", ---~ [ 0] = "wordInitialSwashesOnSelector", ---~ [ 1] = "wordInitialSwashesOffSelector", ---~ [ 2] = "wordFinalSwashesOnSelector", ---~ [ 3] = "wordFinalSwashesOffSelector", ---~ [ 4] = "lineInitialSwashesOnSelector", ---~ [ 5] = "lineInitialSwashesOffSelector", ---~ [ 6] = "lineFinalSwashesOnSelector", ---~ [ 7] = "lineFinalSwashesOffSelector", ---~ [ 8] = "nonFinalSwashesOnSelector", ---~ [ 9] = "nonFinalSwashesOffSelector", ---~ }, ---~ [ 9] = { ---~ name = "diacriticsType", ---~ [ 0] = "showDiacriticsSelector", ---~ [ 1] = "hideDiacriticsSelector", ---~ [ 2] = "decomposeDiacriticsSelector", ---~ }, ---~ [10] = { ---~ name = "verticalPositionType", ---~ [ 0] = "normalPositionSelector", ---~ [ 1] = "superiorsSelector ", ---~ [ 2] = "inferiorsSelector ", ---~ [ 3] = "ordinalsSelector ", ---~ }, ---~ [11] = { ---~ name = "fractionsType", ---~ [ 0] = "noFractionsSelector", ---~ [ 1] = "verticalFractionsSelector", ---~ [ 2] = "diagonalFractionsSelector", ---~ }, ---~ [12] = { ---~ name = "appleReserved2Type", ---~ }, ---~ [13] = { ---~ name = "overlappingCharactersType", ---~ [ 0] = "preventOverlapOnSelector", ---~ [ 1] = "preventOverlapOffSelector", ---~ }, ---~ [14] = { ---~ name = "typographicExtrasType", ---~ [0 ] = "hyphensToEmDashOnSelector", ---~ [1 ] = "hyphensToEmDashOffSelector", ---~ [2 ] = "hyphenToEnDashOnSelector", ---~ [3 ] = "hyphenToEnDashOffSelector", ---~ [4 ] = "unslashedZeroOnSelector", ---~ [5 ] = "unslashedZeroOffSelector", ---~ [6 ] = "formInterrobangOnSelector", ---~ [7 ] = "formInterrobangOffSelector", ---~ [8 ] = "smartQuotesOnSelector", ---~ [9 ] = "smartQuotesOffSelector", ---~ [10] = "periodsToEllipsisOnSelector", ---~ [11] = "periodsToEllipsisOffSelector", ---~ }, ---~ [15] = { ---~ name = "mathematicalExtrasType", ---~ [ 0] = "hyphenToMinusOnSelector", ---~ [ 1] = "hyphenToMinusOffSelector", ---~ [ 2] = "asteriskToMultiplyOnSelector", ---~ [ 3] = "asteriskToMultiplyOffSelector", ---~ [ 4] = "slashToDivideOnSelector", ---~ [ 5] = "slashToDivideOffSelector", ---~ [ 6] = "inequalityLigaturesOnSelector", ---~ [ 7] = "inequalityLigaturesOffSelector", ---~ [ 8] = "exponentsOnSelector", ---~ [ 9] = "exponentsOffSelector", ---~ }, ---~ [16] = { ---~ name = "ornamentSetsType", ---~ [ 0] = "noOrnamentsSelector", ---~ [ 1] = "dingbatsSelector ", ---~ [ 2] = "piCharactersSelector", ---~ [ 3] = "fleuronsSelector ", ---~ [ 4] = "decorativeBordersSelector", ---~ [ 5] = "internationalSymbolsSelector", ---~ [ 6] = "mathSymbolsSelector", ---~ }, ---~ [17] = { ---~ name = "characterAlternativesType", ---~ [ 0] = "noAlternatesSelector", ---~ }, ---~ [18] = { ---~ name = "designComplexityType", ---~ [ 0] = "designLevel1Selector", ---~ [ 1] = "designLevel2Selector", ---~ [ 2] = "designLevel3Selector", ---~ [ 3] = "designLevel4Selector", ---~ [ 4] = "designLevel5Selector", ---~ }, ---~ [19] = { ---~ name = "styleOptionsType", ---~ [ 0] = "noStyleOptionsSelector", ---~ [ 1] = "displayTextSelector", ---~ [ 2] = "engravedTextSelector", ---~ [ 3] = "illuminatedCapsSelector", ---~ [ 4] = "titlingCapsSelector", ---~ [ 5] = "tallCapsSelector ", ---~ }, ---~ [20] = { ---~ name = "characterShapeType", ---~ [0 ] = "traditionalCharactersSelector", ---~ [1 ] = "simplifiedCharactersSelector", ---~ [2 ] = "jis1978CharactersSelector", ---~ [3 ] = "jis1983CharactersSelector", ---~ [4 ] = "jis1990CharactersSelector", ---~ [5 ] = "traditionalAltOneSelector", ---~ [6 ] = "traditionalAltTwoSelector", ---~ [7 ] = "traditionalAltThreeSelector", ---~ [8 ] = "traditionalAltFourSelector", ---~ [9 ] = "traditionalAltFiveSelector", ---~ [10] = "expertCharactersSelector", ---~ }, ---~ [21] = { ---~ name = "numberCaseType", ---~ [ 0] = "lowerCaseNumbersSelector", ---~ [ 1] = "upperCaseNumbersSelector", ---~ }, ---~ [22] = { ---~ name = "textSpacingType", ---~ [ 0] = "proportionalTextSelector", ---~ [ 1] = "monospacedTextSelector", ---~ [ 2] = "halfWidthTextSelector", ---~ [ 3] = "normallySpacedTextSelector", ---~ }, ---~ [23] = { ---~ name = "transliterationType", ---~ [ 0] = "noTransliterationSelector", ---~ [ 1] = "hanjaToHangulSelector", ---~ [ 2] = "hiraganaToKatakanaSelector", ---~ [ 3] = "katakanaToHiraganaSelector", ---~ [ 4] = "kanaToRomanizationSelector", ---~ [ 5] = "romanizationToHiraganaSelector", ---~ [ 6] = "romanizationToKatakanaSelector", ---~ [ 7] = "hanjaToHangulAltOneSelector", ---~ [ 8] = "hanjaToHangulAltTwoSelector", ---~ [ 9] = "hanjaToHangulAltThreeSelector", ---~ }, ---~ [24] = { ---~ name = "annotationType", ---~ [ 0] = "noAnnotationSelector", ---~ [ 1] = "boxAnnotationSelector", ---~ [ 2] = "roundedBoxAnnotationSelector", ---~ [ 3] = "circleAnnotationSelector", ---~ [ 4] = "invertedCircleAnnotationSelector", ---~ [ 5] = "parenthesisAnnotationSelector", ---~ [ 6] = "periodAnnotationSelector", ---~ [ 7] = "romanNumeralAnnotationSelector", ---~ [ 8] = "diamondAnnotationSelector", ---~ }, ---~ [25] = { ---~ name = "kanaSpacingType", ---~ [ 0] = "fullWidthKanaSelector", ---~ [ 1] = "proportionalKanaSelector", ---~ }, ---~ [26] = { ---~ name = "ideographicSpacingType", ---~ [ 0] = "fullWidthIdeographsSelector", ---~ [ 1] = "proportionalIdeographsSelector", ---~ }, ---~ [103] = { ---~ name = "cjkRomanSpacingType", ---~ [ 0] = "halfWidthCJKRomanSelector", ---~ [ 1] = "proportionalCJKRomanSelector", ---~ [ 2] = "defaultCJKRomanSelector", ---~ [ 3] = "fullWidthCJKRomanSelector", ---~ }, ---~ } +if not modules then modules = { } end modules ['font-ott'] = { + version = 1.001, + comment = "companion to font-otf.lua (tables)", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", + -- dataonly = true, +} + +local type, next, tonumber, tostring, rawget, rawset = type, next, tonumber, tostring, rawget, rawset +local gsub, lower, format, match = string.gsub, string.lower, string.format, string.match +local is_boolean = string.is_boolean + +local setmetatableindex = table.setmetatableindex +local setmetatablenewindex = table.setmetatablenewindex +local allocate = utilities.storage.allocate + +local fonts = fonts +local otf = fonts.handlers.otf +local otffeatures = otf.features +local registerotffeature = otffeatures.register + +local tables = otf.tables or { } +otf.tables = tables + +local statistics = otf.statistics or { } +otf.statistics = statistics + +local scripts = allocate { + ['arab'] = 'arabic', + ['armn'] = 'armenian', + ['bali'] = 'balinese', + ['beng'] = 'bengali', + ['bopo'] = 'bopomofo', + ['brai'] = 'braille', + ['bugi'] = 'buginese', + ['buhd'] = 'buhid', + ['byzm'] = 'byzantine music', + ['cans'] = 'canadian syllabics', + ['cher'] = 'cherokee', + ['copt'] = 'coptic', + ['cprt'] = 'cypriot syllabary', + ['cyrl'] = 'cyrillic', + ['deva'] = 'devanagari', + ['dsrt'] = 'deseret', + ['ethi'] = 'ethiopic', + ['geor'] = 'georgian', + ['glag'] = 'glagolitic', + ['goth'] = 'gothic', + ['grek'] = 'greek', + ['gujr'] = 'gujarati', + ['guru'] = 'gurmukhi', + ['hang'] = 'hangul', + ['hani'] = 'cjk ideographic', + ['hano'] = 'hanunoo', + ['hebr'] = 'hebrew', + ['ital'] = 'old italic', + ['jamo'] = 'hangul jamo', + ['java'] = 'javanese', + ['kana'] = 'hiragana and katakana', + ['khar'] = 'kharosthi', + ['khmr'] = 'khmer', + ['knda'] = 'kannada', + ['lao' ] = 'lao', + ['latn'] = 'latin', + ['limb'] = 'limbu', + ['linb'] = 'linear b', + ['math'] = 'mathematical alphanumeric symbols', + ['mlym'] = 'malayalam', + ['mong'] = 'mongolian', + ['musc'] = 'musical symbols', + ['mymr'] = 'myanmar', + ['nko' ] = "n'ko", + ['ogam'] = 'ogham', + ['orya'] = 'oriya', + ['osma'] = 'osmanya', + ['phag'] = 'phags-pa', + ['phnx'] = 'phoenician', + ['runr'] = 'runic', + ['shaw'] = 'shavian', + ['sinh'] = 'sinhala', + ['sylo'] = 'syloti nagri', + ['syrc'] = 'syriac', + ['tagb'] = 'tagbanwa', + ['tale'] = 'tai le', + ['talu'] = 'tai lu', + ['taml'] = 'tamil', + ['telu'] = 'telugu', + ['tfng'] = 'tifinagh', + ['tglg'] = 'tagalog', + ['thaa'] = 'thaana', + ['thai'] = 'thai', + ['tibt'] = 'tibetan', + ['ugar'] = 'ugaritic cuneiform', + ['xpeo'] = 'old persian cuneiform', + ['xsux'] = 'sumero-akkadian cuneiform', + ['yi' ] = 'yi', +} + +local languages = allocate { + ['aba'] = 'abaza', + ['abk'] = 'abkhazian', + ['ady'] = 'adyghe', + ['afk'] = 'afrikaans', + ['afr'] = 'afar', + ['agw'] = 'agaw', + ['als'] = 'alsatian', + ['alt'] = 'altai', + ['amh'] = 'amharic', + ['ara'] = 'arabic', + ['ari'] = 'aari', + ['ark'] = 'arakanese', + ['asm'] = 'assamese', + ['ath'] = 'athapaskan', + ['avr'] = 'avar', + ['awa'] = 'awadhi', + ['aym'] = 'aymara', + ['aze'] = 'azeri', + ['bad'] = 'badaga', + ['bag'] = 'baghelkhandi', + ['bal'] = 'balkar', + ['bau'] = 'baule', + ['bbr'] = 'berber', + ['bch'] = 'bench', + ['bcr'] = 'bible cree', + ['bel'] = 'belarussian', + ['bem'] = 'bemba', + ['ben'] = 'bengali', + ['bgr'] = 'bulgarian', + ['bhi'] = 'bhili', + ['bho'] = 'bhojpuri', + ['bik'] = 'bikol', + ['bil'] = 'bilen', + ['bkf'] = 'blackfoot', + ['bli'] = 'balochi', + ['bln'] = 'balante', + ['blt'] = 'balti', + ['bmb'] = 'bambara', + ['bml'] = 'bamileke', + ['bos'] = 'bosnian', + ['bre'] = 'breton', + ['brh'] = 'brahui', + ['bri'] = 'braj bhasha', + ['brm'] = 'burmese', + ['bsh'] = 'bashkir', + ['bti'] = 'beti', + ['cat'] = 'catalan', + ['ceb'] = 'cebuano', + ['che'] = 'chechen', + ['chg'] = 'chaha gurage', + ['chh'] = 'chattisgarhi', + ['chi'] = 'chichewa', + ['chk'] = 'chukchi', + ['chp'] = 'chipewyan', + ['chr'] = 'cherokee', + ['chu'] = 'chuvash', + ['cmr'] = 'comorian', + ['cop'] = 'coptic', + ['cos'] = 'corsican', + ['cre'] = 'cree', + ['crr'] = 'carrier', + ['crt'] = 'crimean tatar', + ['csl'] = 'church slavonic', + ['csy'] = 'czech', + ['dan'] = 'danish', + ['dar'] = 'dargwa', + ['dcr'] = 'woods cree', + ['deu'] = 'german', + ['dgr'] = 'dogri', + ['div'] = 'divehi', + ['djr'] = 'djerma', + ['dng'] = 'dangme', + ['dnk'] = 'dinka', + ['dri'] = 'dari', + ['dun'] = 'dungan', + ['dzn'] = 'dzongkha', + ['ebi'] = 'ebira', + ['ecr'] = 'eastern cree', + ['edo'] = 'edo', + ['efi'] = 'efik', + ['ell'] = 'greek', + ['eng'] = 'english', + ['erz'] = 'erzya', + ['esp'] = 'spanish', + ['eti'] = 'estonian', + ['euq'] = 'basque', + ['evk'] = 'evenki', + ['evn'] = 'even', + ['ewe'] = 'ewe', + ['fan'] = 'french antillean', + ['far'] = 'farsi', + ['fin'] = 'finnish', + ['fji'] = 'fijian', + ['fle'] = 'flemish', + ['fne'] = 'forest nenets', + ['fon'] = 'fon', + ['fos'] = 'faroese', + ['fra'] = 'french', + ['fri'] = 'frisian', + ['frl'] = 'friulian', + ['fta'] = 'futa', + ['ful'] = 'fulani', + ['gad'] = 'ga', + ['gae'] = 'gaelic', + ['gag'] = 'gagauz', + ['gal'] = 'galician', + ['gar'] = 'garshuni', + ['gaw'] = 'garhwali', + ['gez'] = "ge'ez", + ['gil'] = 'gilyak', + ['gmz'] = 'gumuz', + ['gon'] = 'gondi', + ['grn'] = 'greenlandic', + ['gro'] = 'garo', + ['gua'] = 'guarani', + ['guj'] = 'gujarati', + ['hai'] = 'haitian', + ['hal'] = 'halam', + ['har'] = 'harauti', + ['hau'] = 'hausa', + ['haw'] = 'hawaiin', + ['hbn'] = 'hammer-banna', + ['hil'] = 'hiligaynon', + ['hin'] = 'hindi', + ['hma'] = 'high mari', + ['hnd'] = 'hindko', + ['ho'] = 'ho', + ['hri'] = 'harari', + ['hrv'] = 'croatian', + ['hun'] = 'hungarian', + ['hye'] = 'armenian', + ['ibo'] = 'igbo', + ['ijo'] = 'ijo', + ['ilo'] = 'ilokano', + ['ind'] = 'indonesian', + ['ing'] = 'ingush', + ['inu'] = 'inuktitut', + ['iri'] = 'irish', + ['irt'] = 'irish traditional', + ['isl'] = 'icelandic', + ['ism'] = 'inari sami', + ['ita'] = 'italian', + ['iwr'] = 'hebrew', + ['jan'] = 'japanese', + ['jav'] = 'javanese', + ['jii'] = 'yiddish', + ['jud'] = 'judezmo', + ['jul'] = 'jula', + ['kab'] = 'kabardian', + ['kac'] = 'kachchi', + ['kal'] = 'kalenjin', + ['kan'] = 'kannada', + ['kar'] = 'karachay', + ['kat'] = 'georgian', + ['kaz'] = 'kazakh', + ['keb'] = 'kebena', + ['kge'] = 'khutsuri georgian', + ['kha'] = 'khakass', + ['khk'] = 'khanty-kazim', + ['khm'] = 'khmer', + ['khs'] = 'khanty-shurishkar', + ['khv'] = 'khanty-vakhi', + ['khw'] = 'khowar', + ['kik'] = 'kikuyu', + ['kir'] = 'kirghiz', + ['kis'] = 'kisii', + ['kkn'] = 'kokni', + ['klm'] = 'kalmyk', + ['kmb'] = 'kamba', + ['kmn'] = 'kumaoni', + ['kmo'] = 'komo', + ['kms'] = 'komso', + ['knr'] = 'kanuri', + ['kod'] = 'kodagu', + ['koh'] = 'korean old hangul', + ['kok'] = 'konkani', + ['kon'] = 'kikongo', + ['kop'] = 'komi-permyak', + ['kor'] = 'korean', + ['koz'] = 'komi-zyrian', + ['kpl'] = 'kpelle', + ['kri'] = 'krio', + ['krk'] = 'karakalpak', + ['krl'] = 'karelian', + ['krm'] = 'karaim', + ['krn'] = 'karen', + ['krt'] = 'koorete', + ['ksh'] = 'kashmiri', + ['ksi'] = 'khasi', + ['ksm'] = 'kildin sami', + ['kui'] = 'kui', + ['kul'] = 'kulvi', + ['kum'] = 'kumyk', + ['kur'] = 'kurdish', + ['kuu'] = 'kurukh', + ['kuy'] = 'kuy', + ['kyk'] = 'koryak', + ['lad'] = 'ladin', + ['lah'] = 'lahuli', + ['lak'] = 'lak', + ['lam'] = 'lambani', + ['lao'] = 'lao', + ['lat'] = 'latin', + ['laz'] = 'laz', + ['lcr'] = 'l-cree', + ['ldk'] = 'ladakhi', + ['lez'] = 'lezgi', + ['lin'] = 'lingala', + ['lma'] = 'low mari', + ['lmb'] = 'limbu', + ['lmw'] = 'lomwe', + ['lsb'] = 'lower sorbian', + ['lsm'] = 'lule sami', + ['lth'] = 'lithuanian', + ['ltz'] = 'luxembourgish', + ['lub'] = 'luba', + ['lug'] = 'luganda', + ['luh'] = 'luhya', + ['luo'] = 'luo', + ['lvi'] = 'latvian', + ['maj'] = 'majang', + ['mak'] = 'makua', + ['mal'] = 'malayalam traditional', + ['man'] = 'mansi', + ['map'] = 'mapudungun', + ['mar'] = 'marathi', + ['maw'] = 'marwari', + ['mbn'] = 'mbundu', + ['mch'] = 'manchu', + ['mcr'] = 'moose cree', + ['mde'] = 'mende', + ['men'] = "me'en", + ['miz'] = 'mizo', + ['mkd'] = 'macedonian', + ['mle'] = 'male', + ['mlg'] = 'malagasy', + ['mln'] = 'malinke', + ['mlr'] = 'malayalam reformed', + ['mly'] = 'malay', + ['mnd'] = 'mandinka', + ['mng'] = 'mongolian', + ['mni'] = 'manipuri', + ['mnk'] = 'maninka', + ['mnx'] = 'manx gaelic', + ['moh'] = 'mohawk', + ['mok'] = 'moksha', + ['mol'] = 'moldavian', + ['mon'] = 'mon', + ['mor'] = 'moroccan', + ['mri'] = 'maori', + ['mth'] = 'maithili', + ['mts'] = 'maltese', + ['mun'] = 'mundari', + ['nag'] = 'naga-assamese', + ['nan'] = 'nanai', + ['nas'] = 'naskapi', + ['ncr'] = 'n-cree', + ['ndb'] = 'ndebele', + ['ndg'] = 'ndonga', + ['nep'] = 'nepali', + ['new'] = 'newari', + ['ngr'] = 'nagari', + ['nhc'] = 'norway house cree', + ['nis'] = 'nisi', + ['niu'] = 'niuean', + ['nkl'] = 'nkole', + ['nko'] = "n'ko", + ['nld'] = 'dutch', + ['nog'] = 'nogai', + ['nor'] = 'norwegian', + ['nsm'] = 'northern sami', + ['nta'] = 'northern tai', + ['nto'] = 'esperanto', + ['nyn'] = 'nynorsk', + ['oci'] = 'occitan', + ['ocr'] = 'oji-cree', + ['ojb'] = 'ojibway', + ['ori'] = 'oriya', + ['oro'] = 'oromo', + ['oss'] = 'ossetian', + ['paa'] = 'palestinian aramaic', + ['pal'] = 'pali', + ['pan'] = 'punjabi', + ['pap'] = 'palpa', + ['pas'] = 'pashto', + ['pgr'] = 'polytonic greek', + ['pil'] = 'pilipino', + ['plg'] = 'palaung', + ['plk'] = 'polish', + ['pro'] = 'provencal', + ['ptg'] = 'portuguese', + ['qin'] = 'chin', + ['raj'] = 'rajasthani', + ['rbu'] = 'russian buriat', + ['rcr'] = 'r-cree', + ['ria'] = 'riang', + ['rms'] = 'rhaeto-romanic', + ['rom'] = 'romanian', + ['roy'] = 'romany', + ['rsy'] = 'rusyn', + ['rua'] = 'ruanda', + ['rus'] = 'russian', + ['sad'] = 'sadri', + ['san'] = 'sanskrit', + ['sat'] = 'santali', + ['say'] = 'sayisi', + ['sek'] = 'sekota', + ['sel'] = 'selkup', + ['sgo'] = 'sango', + ['shn'] = 'shan', + ['sib'] = 'sibe', + ['sid'] = 'sidamo', + ['sig'] = 'silte gurage', + ['sks'] = 'skolt sami', + ['sky'] = 'slovak', + ['sla'] = 'slavey', + ['slv'] = 'slovenian', + ['sml'] = 'somali', + ['smo'] = 'samoan', + ['sna'] = 'sena', + ['snd'] = 'sindhi', + ['snh'] = 'sinhalese', + ['snk'] = 'soninke', + ['sog'] = 'sodo gurage', + ['sot'] = 'sotho', + ['sqi'] = 'albanian', + ['srb'] = 'serbian', + ['srk'] = 'saraiki', + ['srr'] = 'serer', + ['ssl'] = 'south slavey', + ['ssm'] = 'southern sami', + ['sur'] = 'suri', + ['sva'] = 'svan', + ['sve'] = 'swedish', + ['swa'] = 'swadaya aramaic', + ['swk'] = 'swahili', + ['swz'] = 'swazi', + ['sxt'] = 'sutu', + ['syr'] = 'syriac', + ['tab'] = 'tabasaran', + ['taj'] = 'tajiki', + ['tam'] = 'tamil', + ['tat'] = 'tatar', + ['tcr'] = 'th-cree', + ['tel'] = 'telugu', + ['tgn'] = 'tongan', + ['tgr'] = 'tigre', + ['tgy'] = 'tigrinya', + ['tha'] = 'thai', + ['tht'] = 'tahitian', + ['tib'] = 'tibetan', + ['tkm'] = 'turkmen', + ['tmn'] = 'temne', + ['tna'] = 'tswana', + ['tne'] = 'tundra nenets', + ['tng'] = 'tonga', + ['tod'] = 'todo', + ['trk'] = 'turkish', + ['tsg'] = 'tsonga', + ['tua'] = 'turoyo aramaic', + ['tul'] = 'tulu', + ['tuv'] = 'tuvin', + ['twi'] = 'twi', + ['udm'] = 'udmurt', + ['ukr'] = 'ukrainian', + ['urd'] = 'urdu', + ['usb'] = 'upper sorbian', + ['uyg'] = 'uyghur', + ['uzb'] = 'uzbek', + ['ven'] = 'venda', + ['vit'] = 'vietnamese', + ['wa' ] = 'wa', + ['wag'] = 'wagdi', + ['wcr'] = 'west-cree', + ['wel'] = 'welsh', + ['wlf'] = 'wolof', + ['xbd'] = 'tai lue', + ['xhs'] = 'xhosa', + ['yak'] = 'yakut', + ['yba'] = 'yoruba', + ['ycr'] = 'y-cree', + ['yic'] = 'yi classic', + ['yim'] = 'yi modern', + ['zhh'] = 'chinese hong kong', + ['zhp'] = 'chinese phonetic', + ['zhs'] = 'chinese simplified', + ['zht'] = 'chinese traditional', + ['znd'] = 'zande', + ['zul'] = 'zulu' +} + +local features = allocate { + ['aalt'] = 'access all alternates', + ['abvf'] = 'above-base forms', + ['abvm'] = 'above-base mark positioning', + ['abvs'] = 'above-base substitutions', + ['afrc'] = 'alternative fractions', + ['akhn'] = 'akhands', + ['blwf'] = 'below-base forms', + ['blwm'] = 'below-base mark positioning', + ['blws'] = 'below-base substitutions', + ['c2pc'] = 'petite capitals from capitals', + ['c2sc'] = 'small capitals from capitals', + ['calt'] = 'contextual alternates', + ['case'] = 'case-sensitive forms', + ['ccmp'] = 'glyph composition/decomposition', + ['cjct'] = 'conjunct forms', + ['clig'] = 'contextual ligatures', + ['cpsp'] = 'capital spacing', + ['cswh'] = 'contextual swash', + ['curs'] = 'cursive positioning', + ['dflt'] = 'default processing', + ['dist'] = 'distances', + ['dlig'] = 'discretionary ligatures', + ['dnom'] = 'denominators', + ['dtls'] = 'dotless forms', -- math + ['expt'] = 'expert forms', + ['falt'] = 'final glyph alternates', + ['fin2'] = 'terminal forms #2', + ['fin3'] = 'terminal forms #3', + ['fina'] = 'terminal forms', + ['flac'] = 'flattened accents over capitals', -- math + ['frac'] = 'fractions', + ['fwid'] = 'full width', + ['half'] = 'half forms', + ['haln'] = 'halant forms', + ['halt'] = 'alternate half width', + ['hist'] = 'historical forms', + ['hkna'] = 'horizontal kana alternates', + ['hlig'] = 'historical ligatures', + ['hngl'] = 'hangul', + ['hojo'] = 'hojo kanji forms', + ['hwid'] = 'half width', + ['init'] = 'initial forms', + ['isol'] = 'isolated forms', + ['ital'] = 'italics', + ['jalt'] = 'justification alternatives', + ['jp04'] = 'jis2004 forms', + ['jp78'] = 'jis78 forms', + ['jp83'] = 'jis83 forms', + ['jp90'] = 'jis90 forms', + ['kern'] = 'kerning', + ['lfbd'] = 'left bounds', + ['liga'] = 'standard ligatures', + ['ljmo'] = 'leading jamo forms', + ['lnum'] = 'lining figures', + ['locl'] = 'localized forms', + ['mark'] = 'mark positioning', + ['med2'] = 'medial forms #2', + ['medi'] = 'medial forms', + ['mgrk'] = 'mathematical greek', + ['mkmk'] = 'mark to mark positioning', + ['mset'] = 'mark positioning via substitution', + ['nalt'] = 'alternate annotation forms', + ['nlck'] = 'nlc kanji forms', + ['nukt'] = 'nukta forms', + ['numr'] = 'numerators', + ['onum'] = 'old style figures', + ['opbd'] = 'optical bounds', + ['ordn'] = 'ordinals', + ['ornm'] = 'ornaments', + ['palt'] = 'proportional alternate width', + ['pcap'] = 'petite capitals', + ['pnum'] = 'proportional figures', + ['pref'] = 'pre-base forms', + ['pres'] = 'pre-base substitutions', + ['pstf'] = 'post-base forms', + ['psts'] = 'post-base substitutions', + ['pwid'] = 'proportional widths', + ['qwid'] = 'quarter widths', + ['rand'] = 'randomize', + ['rkrf'] = 'rakar forms', + ['rlig'] = 'required ligatures', + ['rphf'] = 'reph form', + ['rtbd'] = 'right bounds', + ['rtla'] = 'right-to-left alternates', + ['rtlm'] = 'right to left math', -- math + ['ruby'] = 'ruby notation forms', + ['salt'] = 'stylistic alternates', + ['sinf'] = 'scientific inferiors', + ['size'] = 'optical size', + ['smcp'] = 'small capitals', + ['smpl'] = 'simplified forms', + -- ['ss01'] = 'stylistic set 1', + -- ['ss02'] = 'stylistic set 2', + -- ['ss03'] = 'stylistic set 3', + -- ['ss04'] = 'stylistic set 4', + -- ['ss05'] = 'stylistic set 5', + -- ['ss06'] = 'stylistic set 6', + -- ['ss07'] = 'stylistic set 7', + -- ['ss08'] = 'stylistic set 8', + -- ['ss09'] = 'stylistic set 9', + -- ['ss10'] = 'stylistic set 10', + -- ['ss11'] = 'stylistic set 11', + -- ['ss12'] = 'stylistic set 12', + -- ['ss13'] = 'stylistic set 13', + -- ['ss14'] = 'stylistic set 14', + -- ['ss15'] = 'stylistic set 15', + -- ['ss16'] = 'stylistic set 16', + -- ['ss17'] = 'stylistic set 17', + -- ['ss18'] = 'stylistic set 18', + -- ['ss19'] = 'stylistic set 19', + -- ['ss20'] = 'stylistic set 20', + ['ssty'] = 'script style', -- math + ['subs'] = 'subscript', + ['sups'] = 'superscript', + ['swsh'] = 'swash', + ['titl'] = 'titling', + ['tjmo'] = 'trailing jamo forms', + ['tnam'] = 'traditional name forms', + ['tnum'] = 'tabular figures', + ['trad'] = 'traditional forms', + ['twid'] = 'third widths', + ['unic'] = 'unicase', + ['valt'] = 'alternate vertical metrics', + ['vatu'] = 'vattu variants', + ['vert'] = 'vertical writing', + ['vhal'] = 'alternate vertical half metrics', + ['vjmo'] = 'vowel jamo forms', + ['vkna'] = 'vertical kana alternates', + ['vkrn'] = 'vertical kerning', + ['vpal'] = 'proportional alternate vertical metrics', + ['vrt2'] = 'vertical rotation', + ['zero'] = 'slashed zero', + + ['trep'] = 'traditional tex replacements', + ['tlig'] = 'traditional tex ligatures', + + ['ss..'] = 'stylistic set ..', + ['cv..'] = 'character variant ..', + ['js..'] = 'justification ..', + + ["dv.."] = "devanagari ..", +} + +local baselines = allocate { + ['hang'] = 'hanging baseline', + ['icfb'] = 'ideographic character face bottom edge baseline', + ['icft'] = 'ideographic character face tope edige baseline', + ['ideo'] = 'ideographic em-box bottom edge baseline', + ['idtp'] = 'ideographic em-box top edge baseline', + ['math'] = 'mathmatical centered baseline', + ['romn'] = 'roman baseline' +} + +tables.scripts = scripts +tables.languages = languages +tables.features = features +tables.baselines = baselines + +local acceptscripts = true directives.register("otf.acceptscripts", function(v) acceptscripts = v end) +local acceptlanguages = true directives.register("otf.acceptlanguages", function(v) acceptlanguages = v end) + +local report_checks = logs.reporter("fonts","checks") + +-- hm, we overload the metatables + +if otffeatures.features then + for k, v in next, otffeatures.features do + features[k] = v + end + otffeatures.features = features +end + +local function swapped(h) + local r = { } + for k, v in next, h do + r[gsub(v,"[^a-z0-9]","")] = k -- is already lower + end + return r +end + +local verbosescripts = allocate(swapped(scripts )) +local verboselanguages = allocate(swapped(languages)) +local verbosefeatures = allocate(swapped(features )) +local verbosebaselines = allocate(swapped(baselines)) + +-- lets forget about trailing spaces + +local function resolve(t,k) + if k then + k = gsub(lower(k),"[^a-z0-9]","") + local v = rawget(t,k) + if v then + return v + end + end +end + +setmetatableindex(verbosescripts, resolve) +setmetatableindex(verboselanguages, resolve) +setmetatableindex(verbosefeatures, resolve) +setmetatableindex(verbosebaselines, resolve) + +-- We could optimize the next lookups by using an extra metatable and storing +-- already found values but in practice there are not that many lookups so +-- it's never a bottleneck. + +setmetatableindex(scripts, function(t,k) + if k then + k = lower(k) + if k == "dflt" then + return k + end + local v = rawget(t,k) + if v then + return v + end + k = gsub(k," ","") + v = rawget(t,v) + if v then + return v + elseif acceptscripts then + report_checks("registering extra script %a",k) + rawset(t,k,k) + return k + end + end + return "dflt" +end) + +setmetatableindex(languages, function(t,k) + if k then + k = lower(k) + if k == "dflt" then + return k + end + local v = rawget(t,k) + if v then + return v + end + k = gsub(k," ","") + v = rawget(t,v) + if v then + return v + elseif acceptlanguages then + report_checks("registering extra language %a",k) + rawset(t,k,k) + return k + end + end + return "dflt" +end) + +setmetatablenewindex(languages, "ignore") +setmetatablenewindex(baselines, "ignore") +setmetatablenewindex(baselines, "ignore") + +local function resolve(t,k) + if k then + k = lower(k) + local v = rawget(t,k) + if v then + return v + end + k = gsub(k," ","") + local v = rawget(t,k) + if v then + return v + end + local tag, dd = match(k,"(..)(%d+)") + if tag and dd then + local v = rawget(t,tag) + if v then + return v -- return format(v,tonumber(dd)) -- old way + else + local v = rawget(t,tag.."..") -- nicer in overview + if v then + return (gsub(v,"%.%.",tonumber(dd))) -- new way + end + end + end + end + return k -- "dflt" +end + +setmetatableindex(features, resolve) + +local function assign(t,k,v) + if k and v then + v = lower(v) + rawset(t,k,v) -- rawset ? + -- rawset(features,gsub(v,"[^a-z0-9]",""),k) -- why ? old code + end +end + +setmetatablenewindex(features, assign) + +local checkers = { + rand = function(v) + return v == true and "random" or v + end +} + +-- Keep this: +-- +-- function otf.features.normalize(features) +-- if features then +-- local h = { } +-- for k, v in next, features do +-- k = lower(k) +-- if k == "language" then +-- v = gsub(lower(v),"[^a-z0-9]","") +-- h.language = rawget(verboselanguages,v) or (languages[v] and v) or "dflt" -- auto adds +-- elseif k == "script" then +-- v = gsub(lower(v),"[^a-z0-9]","") +-- h.script = rawget(verbosescripts,v) or (scripts[v] and v) or "dflt" -- auto adds +-- else +-- if type(v) == "string" then +-- local b = is_boolean(v) +-- if type(b) == "nil" then +-- v = tonumber(v) or lower(v) +-- else +-- v = b +-- end +-- end +-- if not rawget(features,k) then +-- k = rawget(verbosefeatures,k) or k +-- end +-- local c = checkers[k] +-- h[k] = c and c(v) or v +-- end +-- end +-- return h +-- end +-- end + +-- inspect(fonts.handlers.otf.statistics.usedfeatures) + +if not storage then + return +end + +local usedfeatures = statistics.usedfeatures or { } +statistics.usedfeatures = usedfeatures + +table.setmetatableindex(usedfeatures, function(t,k) if k then local v = { } t[k] = v return v end end) -- table.autotable + +storage.register("fonts/otf/usedfeatures", usedfeatures, "fonts.handlers.otf.statistics.usedfeatures" ) + +function otf.features.normalize(features) + if features then + local h = { } + for key, value in next, features do + local k = lower(key) + if k == "language" then + local v = gsub(lower(value),"[^a-z0-9]","") + h.language = rawget(verboselanguages,v) or (languages[v] and v) or "dflt" -- auto adds + elseif k == "script" then + local v = gsub(lower(value),"[^a-z0-9]","") + h.script = rawget(verbosescripts,v) or (scripts[v] and v) or "dflt" -- auto adds + else + local uk = usedfeatures[key] + local uv = uk[value] + if uv then + -- report_checks("feature value %a first seen at %a",value,key) + else + if type(value) == "string" then + local b = is_boolean(value) + if type(b) == "nil" then + uv = tonumber(value) or lower(value) + else + uv = b + end + else + uv = v + end + if not rawget(features,k) then + k = rawget(verbosefeatures,k) or k + end + local c = checkers[k] + if c then + uv = c(uv) or vc + end + uk[value] = uv + end + h[k] = uv + end + end + return h + end +end + +--~ table.print(otf.features.normalize({ language = "dutch", liga = "yes", ss99 = true, aalt = 3, abcd = "yes" } )) + +-- When I feel the need ... + +--~ tables.aat = { +--~ [ 0] = { +--~ name = "allTypographicFeaturesType", +--~ [ 0] = "allTypeFeaturesOnSelector", +--~ [ 1] = "allTypeFeaturesOffSelector", +--~ }, +--~ [ 1] = { +--~ name = "ligaturesType", +--~ [0 ] = "requiredLigaturesOnSelector", +--~ [1 ] = "requiredLigaturesOffSelector", +--~ [2 ] = "commonLigaturesOnSelector", +--~ [3 ] = "commonLigaturesOffSelector", +--~ [4 ] = "rareLigaturesOnSelector", +--~ [5 ] = "rareLigaturesOffSelector", +--~ [6 ] = "logosOnSelector ", +--~ [7 ] = "logosOffSelector ", +--~ [8 ] = "rebusPicturesOnSelector", +--~ [9 ] = "rebusPicturesOffSelector", +--~ [10] = "diphthongLigaturesOnSelector", +--~ [11] = "diphthongLigaturesOffSelector", +--~ [12] = "squaredLigaturesOnSelector", +--~ [13] = "squaredLigaturesOffSelector", +--~ [14] = "abbrevSquaredLigaturesOnSelector", +--~ [15] = "abbrevSquaredLigaturesOffSelector", +--~ }, +--~ [ 2] = { +--~ name = "cursiveConnectionType", +--~ [ 0] = "unconnectedSelector", +--~ [ 1] = "partiallyConnectedSelector", +--~ [ 2] = "cursiveSelector ", +--~ }, +--~ [ 3] = { +--~ name = "letterCaseType", +--~ [ 0] = "upperAndLowerCaseSelector", +--~ [ 1] = "allCapsSelector ", +--~ [ 2] = "allLowerCaseSelector", +--~ [ 3] = "smallCapsSelector ", +--~ [ 4] = "initialCapsSelector", +--~ [ 5] = "initialCapsAndSmallCapsSelector", +--~ }, +--~ [ 4] = { +--~ name = "verticalSubstitutionType", +--~ [ 0] = "substituteVerticalFormsOnSelector", +--~ [ 1] = "substituteVerticalFormsOffSelector", +--~ }, +--~ [ 5] = { +--~ name = "linguisticRearrangementType", +--~ [ 0] = "linguisticRearrangementOnSelector", +--~ [ 1] = "linguisticRearrangementOffSelector", +--~ }, +--~ [ 6] = { +--~ name = "numberSpacingType", +--~ [ 0] = "monospacedNumbersSelector", +--~ [ 1] = "proportionalNumbersSelector", +--~ }, +--~ [ 7] = { +--~ name = "appleReserved1Type", +--~ }, +--~ [ 8] = { +--~ name = "smartSwashType", +--~ [ 0] = "wordInitialSwashesOnSelector", +--~ [ 1] = "wordInitialSwashesOffSelector", +--~ [ 2] = "wordFinalSwashesOnSelector", +--~ [ 3] = "wordFinalSwashesOffSelector", +--~ [ 4] = "lineInitialSwashesOnSelector", +--~ [ 5] = "lineInitialSwashesOffSelector", +--~ [ 6] = "lineFinalSwashesOnSelector", +--~ [ 7] = "lineFinalSwashesOffSelector", +--~ [ 8] = "nonFinalSwashesOnSelector", +--~ [ 9] = "nonFinalSwashesOffSelector", +--~ }, +--~ [ 9] = { +--~ name = "diacriticsType", +--~ [ 0] = "showDiacriticsSelector", +--~ [ 1] = "hideDiacriticsSelector", +--~ [ 2] = "decomposeDiacriticsSelector", +--~ }, +--~ [10] = { +--~ name = "verticalPositionType", +--~ [ 0] = "normalPositionSelector", +--~ [ 1] = "superiorsSelector ", +--~ [ 2] = "inferiorsSelector ", +--~ [ 3] = "ordinalsSelector ", +--~ }, +--~ [11] = { +--~ name = "fractionsType", +--~ [ 0] = "noFractionsSelector", +--~ [ 1] = "verticalFractionsSelector", +--~ [ 2] = "diagonalFractionsSelector", +--~ }, +--~ [12] = { +--~ name = "appleReserved2Type", +--~ }, +--~ [13] = { +--~ name = "overlappingCharactersType", +--~ [ 0] = "preventOverlapOnSelector", +--~ [ 1] = "preventOverlapOffSelector", +--~ }, +--~ [14] = { +--~ name = "typographicExtrasType", +--~ [0 ] = "hyphensToEmDashOnSelector", +--~ [1 ] = "hyphensToEmDashOffSelector", +--~ [2 ] = "hyphenToEnDashOnSelector", +--~ [3 ] = "hyphenToEnDashOffSelector", +--~ [4 ] = "unslashedZeroOnSelector", +--~ [5 ] = "unslashedZeroOffSelector", +--~ [6 ] = "formInterrobangOnSelector", +--~ [7 ] = "formInterrobangOffSelector", +--~ [8 ] = "smartQuotesOnSelector", +--~ [9 ] = "smartQuotesOffSelector", +--~ [10] = "periodsToEllipsisOnSelector", +--~ [11] = "periodsToEllipsisOffSelector", +--~ }, +--~ [15] = { +--~ name = "mathematicalExtrasType", +--~ [ 0] = "hyphenToMinusOnSelector", +--~ [ 1] = "hyphenToMinusOffSelector", +--~ [ 2] = "asteriskToMultiplyOnSelector", +--~ [ 3] = "asteriskToMultiplyOffSelector", +--~ [ 4] = "slashToDivideOnSelector", +--~ [ 5] = "slashToDivideOffSelector", +--~ [ 6] = "inequalityLigaturesOnSelector", +--~ [ 7] = "inequalityLigaturesOffSelector", +--~ [ 8] = "exponentsOnSelector", +--~ [ 9] = "exponentsOffSelector", +--~ }, +--~ [16] = { +--~ name = "ornamentSetsType", +--~ [ 0] = "noOrnamentsSelector", +--~ [ 1] = "dingbatsSelector ", +--~ [ 2] = "piCharactersSelector", +--~ [ 3] = "fleuronsSelector ", +--~ [ 4] = "decorativeBordersSelector", +--~ [ 5] = "internationalSymbolsSelector", +--~ [ 6] = "mathSymbolsSelector", +--~ }, +--~ [17] = { +--~ name = "characterAlternativesType", +--~ [ 0] = "noAlternatesSelector", +--~ }, +--~ [18] = { +--~ name = "designComplexityType", +--~ [ 0] = "designLevel1Selector", +--~ [ 1] = "designLevel2Selector", +--~ [ 2] = "designLevel3Selector", +--~ [ 3] = "designLevel4Selector", +--~ [ 4] = "designLevel5Selector", +--~ }, +--~ [19] = { +--~ name = "styleOptionsType", +--~ [ 0] = "noStyleOptionsSelector", +--~ [ 1] = "displayTextSelector", +--~ [ 2] = "engravedTextSelector", +--~ [ 3] = "illuminatedCapsSelector", +--~ [ 4] = "titlingCapsSelector", +--~ [ 5] = "tallCapsSelector ", +--~ }, +--~ [20] = { +--~ name = "characterShapeType", +--~ [0 ] = "traditionalCharactersSelector", +--~ [1 ] = "simplifiedCharactersSelector", +--~ [2 ] = "jis1978CharactersSelector", +--~ [3 ] = "jis1983CharactersSelector", +--~ [4 ] = "jis1990CharactersSelector", +--~ [5 ] = "traditionalAltOneSelector", +--~ [6 ] = "traditionalAltTwoSelector", +--~ [7 ] = "traditionalAltThreeSelector", +--~ [8 ] = "traditionalAltFourSelector", +--~ [9 ] = "traditionalAltFiveSelector", +--~ [10] = "expertCharactersSelector", +--~ }, +--~ [21] = { +--~ name = "numberCaseType", +--~ [ 0] = "lowerCaseNumbersSelector", +--~ [ 1] = "upperCaseNumbersSelector", +--~ }, +--~ [22] = { +--~ name = "textSpacingType", +--~ [ 0] = "proportionalTextSelector", +--~ [ 1] = "monospacedTextSelector", +--~ [ 2] = "halfWidthTextSelector", +--~ [ 3] = "normallySpacedTextSelector", +--~ }, +--~ [23] = { +--~ name = "transliterationType", +--~ [ 0] = "noTransliterationSelector", +--~ [ 1] = "hanjaToHangulSelector", +--~ [ 2] = "hiraganaToKatakanaSelector", +--~ [ 3] = "katakanaToHiraganaSelector", +--~ [ 4] = "kanaToRomanizationSelector", +--~ [ 5] = "romanizationToHiraganaSelector", +--~ [ 6] = "romanizationToKatakanaSelector", +--~ [ 7] = "hanjaToHangulAltOneSelector", +--~ [ 8] = "hanjaToHangulAltTwoSelector", +--~ [ 9] = "hanjaToHangulAltThreeSelector", +--~ }, +--~ [24] = { +--~ name = "annotationType", +--~ [ 0] = "noAnnotationSelector", +--~ [ 1] = "boxAnnotationSelector", +--~ [ 2] = "roundedBoxAnnotationSelector", +--~ [ 3] = "circleAnnotationSelector", +--~ [ 4] = "invertedCircleAnnotationSelector", +--~ [ 5] = "parenthesisAnnotationSelector", +--~ [ 6] = "periodAnnotationSelector", +--~ [ 7] = "romanNumeralAnnotationSelector", +--~ [ 8] = "diamondAnnotationSelector", +--~ }, +--~ [25] = { +--~ name = "kanaSpacingType", +--~ [ 0] = "fullWidthKanaSelector", +--~ [ 1] = "proportionalKanaSelector", +--~ }, +--~ [26] = { +--~ name = "ideographicSpacingType", +--~ [ 0] = "fullWidthIdeographsSelector", +--~ [ 1] = "proportionalIdeographsSelector", +--~ }, +--~ [103] = { +--~ name = "cjkRomanSpacingType", +--~ [ 0] = "halfWidthCJKRomanSelector", +--~ [ 1] = "proportionalCJKRomanSelector", +--~ [ 2] = "defaultCJKRomanSelector", +--~ [ 3] = "fullWidthCJKRomanSelector", +--~ }, +--~ } diff --git a/tex/context/base/font-sol.lua b/tex/context/base/font-sol.lua index db2dd24c2..b37ab8869 100644 --- a/tex/context/base/font-sol.lua +++ b/tex/context/base/font-sol.lua @@ -1,884 +1,884 @@ -if not modules then modules = { } end modules ['font-sol'] = { -- this was: node-spl - version = 1.001, - comment = "companion to font-sol.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This module is dedicated to the oriental tex project and for --- the moment is too experimental to be publicly supported. --- --- We could cache solutions: say that we store the featureset and --- all 'words' -> replacement ... so we create a large solution --- database (per font) --- --- This module can be optimized by using a dedicated dynamics handler --- but I'll only do that when the rest of the code is stable. --- --- Todo: bind setups to paragraph. - -local gmatch, concat, format, remove = string.gmatch, table.concat, string.format, table.remove -local next, tostring, tonumber = next, tostring, tonumber -local insert, remove = table.insert, table.remove -local utfchar = utf.char -local random = math.random - -local utilities, logs, statistics, fonts, trackers = utilities, logs, statistics, fonts, trackers -local interfaces, commands, attributes = interfaces, commands, attributes -local nodes, node, tex = nodes, node, tex - -local trace_split = false trackers.register("builders.paragraphs.solutions.splitters.splitter", function(v) trace_split = v end) -local trace_optimize = false trackers.register("builders.paragraphs.solutions.splitters.optimizer", function(v) trace_optimize = v end) -local trace_colors = false trackers.register("builders.paragraphs.solutions.splitters.colors", function(v) trace_colors = v end) -local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end) - -local report_solutions = logs.reporter("fonts","solutions") -local report_splitters = logs.reporter("fonts","splitters") -local report_optimizers = logs.reporter("fonts","optimizers") - -local variables = interfaces.variables - -local v_normal = variables.normal -local v_reverse = variables.reverse -local v_preroll = variables.preroll -local v_random = variables.random -local v_split = variables.split - -local settings_to_array = utilities.parsers.settings_to_array -local settings_to_hash = utilities.parsers.settings_to_hash - -local find_node_tail = node.tail or node.slide -local free_node = node.free -local free_nodelist = node.flush_list -local copy_nodelist = node.copy_list -local traverse_nodes = node.traverse -local traverse_ids = node.traverse_id -local protect_glyphs = nodes.handlers.protectglyphs or node.protect_glyphs -local hpack_nodes = node.hpack -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after -local repack_hlist = nodes.repackhlist -local nodes_to_utf = nodes.listtoutf - -local setnodecolor = nodes.tracers.colors.set - -local nodecodes = nodes.nodecodes -local whatsitcodes = nodes.whatsitcodes -local kerncodes = nodes.kerncodes - -local glyph_code = nodecodes.glyph -local disc_code = nodecodes.disc -local kern_code = nodecodes.kern -local hlist_code = nodecodes.hlist -local whatsit_code = nodecodes.whatsit - -local fontkern_code = kerncodes.fontkern - -local localpar_code = whatsitcodes.localpar -local dir_code = whatsitcodes.dir -local userdefined_code = whatsitcodes.userdefined - -local nodepool = nodes.pool -local tasks = nodes.tasks -local usernodeids = nodepool.userids - -local new_textdir = nodepool.textdir -local new_usernumber = nodepool.usernumber -local new_glue = nodepool.glue -local new_leftskip = nodepool.leftskip - -local starttiming = statistics.starttiming -local stoptiming = statistics.stoptiming -local process_characters = nodes.handlers.characters -local inject_kerns = nodes.injections.handler - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers -local setfontdynamics = fonthashes.setdynamics -local fontprocesses = fonthashes.processes - -local texsetattribute = tex.setattribute -local unsetvalue = attributes.unsetvalue - -local parbuilders = builders.paragraphs -parbuilders.solutions = parbuilders.solutions or { } -local parsolutions = parbuilders.solutions -parsolutions.splitters = parsolutions.splitters or { } -local splitters = parsolutions.splitters - -local solutions = { } -- attribute sets -local registered = { } -- backmapping -splitters.registered = registered - -local a_split = attributes.private('splitter') - -local preroll = true -local criterium = 0 -local randomseed = nil -local optimize = nil -- set later -local variant = v_normal -local splitwords = true - -local cache = { } -local variants = { } -local max_less = 0 -local max_more = 0 - -local stack = { } - -local dummy = { - attribute = unsetvalue, - randomseed = 0, - criterium = 0, - preroll = false, - optimize = nil, - splitwords = false, - variant = v_normal, -} - -local function checksettings(r,settings) - local s = r.settings - local method = settings_to_hash(settings.method or "") - local optimize, preroll, splitwords - for k, v in next, method do - if k == v_preroll then - preroll = true - elseif k == v_split then - splitwords = true - elseif variants[k] then - variant = k - optimize = variants[k] -- last one wins - end - end - r.randomseed = tonumber(settings.randomseed) or s.randomseed or r.randomseed or 0 - r.criterium = tonumber(settings.criterium ) or s.criterium or r.criterium or 0 - r.preroll = preroll or false - r.splitwords = splitwords or false - r.optimize = optimize or s.optimize or r.optimize or variants[v_normal] -end - -local function pushsplitter(name,settings) - local r = name and registered[name] - if r then - if settings then - checksettings(r,settings) - end - else - r = dummy - end - insert(stack,r) - -- brr - randomseed = r.randomseed or 0 - criterium = r.criterium or 0 - preroll = r.preroll or false - optimize = r.optimize or nil - splitwords = r.splitwords or nil - -- - texsetattribute(a_split,r.attribute) - return #stack -end - -local function popsplitter() - remove(stack) - local n = #stack - local r = stack[n] or dummy - -- - randomseed = r.randomseed or 0 - criterium = r.criterium or 0 - preroll = r.preroll or false - optimize = r.optimize or nil - -- - texsetattribute(a_split,r.attribute) - return n -end - -local contextsetups = fonts.specifiers.contextsetups - -local function convert(featuresets,name,list) - if list then - local numbers = { } - local nofnumbers = 0 - for i=1,#list do - local feature = list[i] - local fs = featuresets[feature] - local fn = fs and fs.number - if not fn then - -- fall back on global features - fs = contextsetups[feature] - fn = fs and fs.number - end - if fn then - nofnumbers = nofnumbers + 1 - numbers[nofnumbers] = fn - if trace_goodies or trace_optimize then - report_solutions("solution %a of %a uses feature %a with number %s",i,name,feature,fn) - end - else - report_solutions("solution %a of %a has an invalid feature reference %a",i,name,feature) - end - end - return nofnumbers > 0 and numbers - end -end - -local function initialize(goodies) - local solutions = goodies.solutions - if solutions then - local featuresets = goodies.featuresets - local goodiesname = goodies.name - if trace_goodies or trace_optimize then - report_solutions("checking solutions in %a",goodiesname) - end - for name, set in next, solutions do - set.less = convert(featuresets,name,set.less) - set.more = convert(featuresets,name,set.more) - end - end -end - -fonts.goodies.register("solutions",initialize) - -function splitters.define(name,settings) - local goodies = settings.goodies - local solution = settings.solution - local less = settings.less - local more = settings.more - local less_set, more_set - local l = less and settings_to_array(less) - local m = more and settings_to_array(more) - if goodies then - goodies = fonts.goodies.load(goodies) -- also in tfmdata - if goodies then - local featuresets = goodies.featuresets - local solution = solution and goodies.solutions[solution] - if l and #l > 0 then - less_set = convert(featuresets,name,less) -- take from settings - else - less_set = solution and solution.less -- take from goodies - end - if m and #m > 0 then - more_set = convert(featuresets,name,more) -- take from settings - else - more_set = solution and solution.more -- take from goodies - end - end - else - if l then - local n = #less_set - for i=1,#l do - local ss = contextsetups[l[i]] - if ss then - n = n + 1 - less_set[n] = ss.number - end - end - end - if m then - local n = #more_set - for i=1,#m do - local ss = contextsetups[m[i]] - if ss then - n = n + 1 - more_set[n] = ss.number - end - end - end - end - if trace_optimize then - report_solutions("defining solutions %a, less %a, more %a",name,concat(less_set or {}," "),concat(more_set or {}," ")) - end - local nofsolutions = #solutions + 1 - local t = { - solution = solution, - less = less_set or { }, - more = more_set or { }, - settings = settings, -- for tracing - attribute = nofsolutions, - } - solutions[nofsolutions] = t - registered[name] = t - return nofsolutions -end - -local nofwords, noftries, nofadapted, nofkept, nofparagraphs = 0, 0, 0, 0, 0 - -local splitter_one = usernodeids["splitters.one"] -local splitter_two = usernodeids["splitters.two"] - -local a_word = attributes.private('word') -local a_fontkern = attributes.private('fontkern') - -local encapsulate = false - -directives.register("builders.paragraphs.solutions.splitters.encapsulate", function(v) - encapsulate = v -end) - -function splitters.split(head) - -- quite fast - local current, done, rlmode, start, stop, attribute = head, false, false, nil, nil, 0 - cache, max_less, max_more = { }, 0, 0 - local function flush() -- we can move this - local font = start.font - local last = stop.next - local list = last and copy_nodelist(start,last) or copy_nodelist(start) - local n = #cache + 1 - if encapsulate then - local user_one = new_usernumber(splitter_one,n) - local user_two = new_usernumber(splitter_two,n) - head, start = insert_node_before(head,start,user_one) - insert_node_after(head,stop,user_two) - else - local current = start - while true do - current[a_word] = n - if current == stop then - break - else - current = current.next - end - end - end - if rlmode == "TRT" or rlmode == "+TRT" then - local dirnode = new_textdir("+TRT") - list.prev = dirnode - dirnode.next = list - list = dirnode - end - local c = { - original = list, - attribute = attribute, - direction = rlmode, - font = font - } - if trace_split then - report_splitters("cached %4i: font %a, attribute %a, direction %a, word %a", - n, font, attribute, nodes_to_utf(list,true), rlmode and "r2l" or "l2r") - end - cache[n] = c - local solution = solutions[attribute] - local l, m = #solution.less, #solution.more - if l > max_less then max_less = l end - if m > max_more then max_more = m end - start, stop, done = nil, nil, true - end - while current do -- also nextid - local next = current.next - local id = current.id - if id == glyph_code then - if current.subtype < 256 then - local a = current[a_split] - if not a then - start, stop = nil, nil - elseif not start then - start, stop, attribute = current, current, a - elseif a ~= attribute then - start, stop = nil, nil - else - stop = current - end - end - elseif id == disc_code then - if splitwords then - if start then - flush() - end - elseif start and next and next.id == glyph_code and next.subtype < 256 then - -- beware: we can cross future lines - stop = next - else - start, stop = nil, nil - end - elseif id == whatsit_code then - if start then - flush() - end - local subtype = current.subtype - if subtype == dir_code or subtype == localpar_code then - rlmode = current.dir - end - else - if start then - flush() - end - end - current = next - end - if start then - flush() - end - nofparagraphs = nofparagraphs + 1 - nofwords = nofwords + #cache - return head, done -end - -local function collect_words(list) -- can be made faster for attributes - local words, w, word = { }, 0, nil - if encapsulate then - for current in traverse_ids(whatsit_code,list) do - if current.subtype == userdefined_code then -- hm - local user_id = current.user_id - if user_id == splitter_one then - word = { current.value, current, current } - w = w + 1 - words[w] = word - elseif user_id == splitter_two then - if word then - word[3] = current - else - -- something is wrong - end - end - end - end - else - local current, first, last, index = list, nil, nil, nil - while current do - -- todo: disc and kern - local id = current.id - if id == glyph_code or id == disc_code then - local a = current[a_word] - if a then - if a == index then - -- same word - last = current - elseif index then - w = w + 1 - words[w] = { index, first, last } - first = current - last = current - index = a - elseif first then - last = current - index = a - else - first = current - last = current - index = a - end - elseif index then - if first then - w = w + 1 - words[w] = { index, first, last } - end - index = nil - first = nil - elseif trace_split then - if id == disc_code then - report_splitters("skipped: disc node") - else - report_splitters("skipped: %C",current.char) - end - end - elseif id == kern_code and (current.subtype == fontkern_code or current[a_fontkern]) then - if first then - last = current - else - first = current - last = current - end - elseif index then - w = w + 1 - words[w] = { index, first, last } - index = nil - first = nil - if id == disc_node then - if trace_split then - report_splitters("skipped: disc node") - end - end - end - current = current.next - end - if index then - w = w + 1 - words[w] = { index, first, last } - end - if trace_split then - for i=1,#words do - local w = words[i] - local n, f, l = w[1], w[2], w[3] - local c = cache[n] - if c then - report_splitters("found %4i: word %a, cached %a",n,nodes_to_utf(f,true,true,l),nodes_to_utf(c.original,true)) - else - report_splitters("found %4i: word %a, not in cache",n,nodes_to_utf(f,true,true,l)) - end - end - end - end - return words, list -- check for empty (elsewhere) -end - --- we could avoid a hpack but hpack is not that slow - -local function doit(word,list,best,width,badness,line,set,listdir) - local changed = 0 - local n = word[1] - local found = cache[n] - if found then - local h, t - if encapsulate then - h = word[2].next -- head of current word - t = word[3].prev -- tail of current word - else - h = word[2] - t = word[3] - end - if splitwords then - -- there are no lines crossed in a word - else - local ok = false - local c = h - while c do - if c == t then - ok = true - break - else - c = c.next - end - end - if not ok then - report_solutions("skipping hyphenated word (for now)") - -- todo: mark in words as skipped, saves a bit runtime - return false, changed - end - end - local original, attribute, direction = found.original, found.attribute, found.direction - local solution = solutions[attribute] - local features = solution and solution[set] - if features then - local featurenumber = features[best] -- not ok probably - if featurenumber then - noftries = noftries + 1 - local first = copy_nodelist(original) - if not trace_colors then - for n in traverse_nodes(first) do -- maybe fast force so no attr needed - n[0] = featurenumber -- this forces dynamics - end - elseif set == "less" then - for n in traverse_nodes(first) do - setnodecolor(n,"font:isol") -- yellow - n[0] = featurenumber - end - else - for n in traverse_nodes(first) do - setnodecolor(n,"font:medi") -- green - n[0] = featurenumber - end - end - local font = found.font - local setdynamics = setfontdynamics[font] - if setdynamics then - local processes = setdynamics(font,featurenumber) - for i=1,#processes do -- often more than 1 - first = processes[i](first,font,featurenumber) - end - else - report_solutions("fatal error, no dynamics for font %a",font) - end - first = inject_kerns(first) - if first.id == whatsit_code then - local temp = first - first = first.next - free_node(temp) - end - local last = find_node_tail(first) - -- replace [u]h->t by [u]first->last - local prev = h.prev - local next = t.next - prev.next = first - first.prev = prev - if next then - last.next = next - next.prev = last - end - -- check new pack - local temp, b = repack_hlist(list,width,'exactly',listdir) - if b > badness then - if trace_optimize then - report_optimizers("line %a, badness before %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"quit") - end - -- remove last insert - prev.next = h - h.prev = prev - if next then - t.next = next - next.prev = t - else - t.next = nil - end - last.next = nil - free_nodelist(first) - else - if trace_optimize then - report_optimizers("line %a, badness before: %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"continue") - end - -- free old h->t - t.next = nil - free_nodelist(h) -- somhow fails - if not encapsulate then - word[2] = first - word[3] = last - end - changed, badness = changed + 1, b - end - if b <= criterium then - return true, changed - end - end - end - end - return false, changed -end - --- We repeat some code but adding yet another layer of indirectness is not --- making things better. - -variants[v_normal] = function(words,list,best,width,badness,line,set,listdir) - local changed = 0 - for i=1,#words do - local done, c = doit(words[i],list,best,width,badness,line,set,listdir) - changed = changed + c - if done then - break - end - end - if changed > 0 then - nofadapted = nofadapted + 1 - -- todo: get rid of pack when ok because we already have packed and we only need the last b - local list, b = repack_hlist(list,width,'exactly',listdir) - return list, true, changed, b -- badness - else - nofkept = nofkept + 1 - return list, false, 0, badness - end -end - -variants[v_reverse] = function(words,list,best,width,badness,line,set,listdir) - local changed = 0 - for i=#words,1,-1 do - local done, c = doit(words[i],list,best,width,badness,line,set,listdir) - changed = changed + c - if done then - break - end - end - if changed > 0 then - nofadapted = nofadapted + 1 - -- todo: get rid of pack when ok because we already have packed and we only need the last b - local list, b = repack_hlist(list,width,'exactly',listdir) - return list, true, changed, b -- badness - else - nofkept = nofkept + 1 - return list, false, 0, badness - end -end - -variants[v_random] = function(words,list,best,width,badness,line,set,listdir) - local changed = 0 - while #words > 0 do - local done, c = doit(remove(words,random(1,#words)),list,best,width,badness,line,set,listdir) - changed = changed + c - if done then - break - end - end - if changed > 0 then - nofadapted = nofadapted + 1 - -- todo: get rid of pack when ok because we already have packed and we only need the last b - local list, b = repack_hlist(list,width,'exactly',listdir) - return list, true, changed, b -- badness - else - nofkept = nofkept + 1 - return list, false, 0, badness - end -end - -local function show_quality(current,what,line) - local set = current.glue_set - local sign = current.glue_sign - local order = current.glue_order - local amount = set * ((sign == 2 and -1) or 1) - report_optimizers("line %a, category %a, amount %a, set %a, sign %a, how %a, order %a",line,what,amount,set,sign,how,order) -end - -function splitters.optimize(head) - if not optimize then - report_optimizers("no optimizer set") - return - end - local nc = #cache - if nc == 0 then - return - end - starttiming(splitters) - local listdir = nil -- todo ! ! ! - if randomseed then - math.setrandomseedi(randomseed) - randomseed = nil - end - local line = 0 - local tex_hbadness, tex_hfuzz = tex.hbadness, tex.hfuzz - tex.hbadness, tex.hfuzz = 10000, number.maxdimen - if trace_optimize then - report_optimizers("preroll %a, variant %a, criterium %a, cache size %a",preroll,variant,criterium,nc) - end - for current in traverse_ids(hlist_code,head) do - -- report_splitters("before: [%s] => %s",current.dir,nodes.tosequence(current.list,nil)) - line = line + 1 - local sign, dir, list, width = current.glue_sign, current.dir, current.list, current.width - if not encapsulate and list.id == glyph_code then - -- nasty .. we always assume a prev being there .. future luatex will always have a leftskip set - -- current.list, list = insert_node_before(list,list,new_glue(0)) - current.list, list = insert_node_before(list,list,new_leftskip(0)) - end - local temp, badness = repack_hlist(list,width,'exactly',dir) -- it would be nice if the badness was stored in the node - if badness > 0 then - if sign == 0 then - if trace_optimize then - report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"okay","okay") - end - else - local set, max - if sign == 1 then - if trace_optimize then - report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"underfull","trying more") - end - set, max = "more", max_more - else - if trace_optimize then - report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"overfull","trying less") - end - set, max = "less", max_less - end - -- we can keep the best variants - local lastbest, lastbadness = nil, badness - if preroll then - local bb, base - for i=1,max do - if base then - free_nodelist(base) - end - base = copy_nodelist(list) - local words = collect_words(base) -- beware: words is adapted - for j=i,max do - local temp, done, changes, b = optimize(words,base,j,width,badness,line,set,dir) - base = temp - if trace_optimize then - report_optimizers("line %a, alternative %a.%a, changes %a, badness %a",line,i,j,changes,b) - end - bb = b - if b <= criterium then - break - end - -- if done then - -- break - -- end - end - if bb and bb > criterium then -- needs checking - if not lastbest then - lastbest, lastbadness = i, bb - elseif bb > lastbadness then - lastbest, lastbadness = i, bb - end - else - break - end - end - free_nodelist(base) - end - local words = collect_words(list) - for best=lastbest or 1,max do - local temp, done, changes, b = optimize(words,list,best,width,badness,line,set,dir) - current.list = temp - if trace_optimize then - report_optimizers("line %a, alternative %a, changes %a, badness %a",line,best,changes,b) - end - if done then - if b <= criterium then -- was == 0 - protect_glyphs(list) - break - end - end - end - end - else - if trace_optimize then - report_optimizers("line %a, verdict %a",line,"not bad enough") - end - end - -- we pack inside the outer hpack and that way keep the original wd/ht/dp as bonus - current.list = hpack_nodes(current.list,width,'exactly',listdir) - -- report_splitters("after: [%s] => %s",temp.dir,nodes.tosequence(temp.list,nil)) - end - for i=1,nc do - local ci = cache[i] - free_nodelist(ci.original) - end - cache = { } - tex.hbadness, tex.hfuzz = tex_hbadness, tex_hfuzz - stoptiming(splitters) -end - -statistics.register("optimizer statistics", function() - if nofwords > 0 then - local elapsed = statistics.elapsedtime(splitters) - local average = noftries/elapsed - return format("%s words identified in %s paragraphs, %s words retried, %s lines tried, %0.3f seconds used, %s adapted, %0.1f lines per second", - nofwords,nofparagraphs,noftries,nofadapted+nofkept,elapsed,nofadapted,average) - end -end) - --- we could use a stack - -local enableaction = tasks.enableaction -local disableaction = tasks.disableaction - -local function enable() - enableaction("processors", "builders.paragraphs.solutions.splitters.split") - enableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize") -end - -local function disable() - disableaction("processors", "builders.paragraphs.solutions.splitters.split") - disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize") -end - -function splitters.start(name,settings) - if pushsplitter(name,settings) == 1 then - enable() - end -end - -function splitters.stop() - if popsplitter() == 0 then - disable() - end -end - -function splitters.set(name,settings) - if #stack > 0 then - stack = { } - else - enable() - end - pushsplitter(name,settings) -- sets attribute etc -end - -function splitters.reset() - if #stack > 0 then - stack = { } - popsplitter() -- resets attribute etc - disable() - end -end - --- interface - -commands.definefontsolution = splitters.define -commands.startfontsolution = splitters.start -commands.stopfontsolution = splitters.stop -commands.setfontsolution = splitters.set -commands.resetfontsolution = splitters.reset +if not modules then modules = { } end modules ['font-sol'] = { -- this was: node-spl + version = 1.001, + comment = "companion to font-sol.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This module is dedicated to the oriental tex project and for +-- the moment is too experimental to be publicly supported. +-- +-- We could cache solutions: say that we store the featureset and +-- all 'words' -> replacement ... so we create a large solution +-- database (per font) +-- +-- This module can be optimized by using a dedicated dynamics handler +-- but I'll only do that when the rest of the code is stable. +-- +-- Todo: bind setups to paragraph. + +local gmatch, concat, format, remove = string.gmatch, table.concat, string.format, table.remove +local next, tostring, tonumber = next, tostring, tonumber +local insert, remove = table.insert, table.remove +local utfchar = utf.char +local random = math.random + +local utilities, logs, statistics, fonts, trackers = utilities, logs, statistics, fonts, trackers +local interfaces, commands, attributes = interfaces, commands, attributes +local nodes, node, tex = nodes, node, tex + +local trace_split = false trackers.register("builders.paragraphs.solutions.splitters.splitter", function(v) trace_split = v end) +local trace_optimize = false trackers.register("builders.paragraphs.solutions.splitters.optimizer", function(v) trace_optimize = v end) +local trace_colors = false trackers.register("builders.paragraphs.solutions.splitters.colors", function(v) trace_colors = v end) +local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end) + +local report_solutions = logs.reporter("fonts","solutions") +local report_splitters = logs.reporter("fonts","splitters") +local report_optimizers = logs.reporter("fonts","optimizers") + +local variables = interfaces.variables + +local v_normal = variables.normal +local v_reverse = variables.reverse +local v_preroll = variables.preroll +local v_random = variables.random +local v_split = variables.split + +local settings_to_array = utilities.parsers.settings_to_array +local settings_to_hash = utilities.parsers.settings_to_hash + +local find_node_tail = node.tail or node.slide +local free_node = node.free +local free_nodelist = node.flush_list +local copy_nodelist = node.copy_list +local traverse_nodes = node.traverse +local traverse_ids = node.traverse_id +local protect_glyphs = nodes.handlers.protectglyphs or node.protect_glyphs +local hpack_nodes = node.hpack +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after +local repack_hlist = nodes.repackhlist +local nodes_to_utf = nodes.listtoutf + +local setnodecolor = nodes.tracers.colors.set + +local nodecodes = nodes.nodecodes +local whatsitcodes = nodes.whatsitcodes +local kerncodes = nodes.kerncodes + +local glyph_code = nodecodes.glyph +local disc_code = nodecodes.disc +local kern_code = nodecodes.kern +local hlist_code = nodecodes.hlist +local whatsit_code = nodecodes.whatsit + +local fontkern_code = kerncodes.fontkern + +local localpar_code = whatsitcodes.localpar +local dir_code = whatsitcodes.dir +local userdefined_code = whatsitcodes.userdefined + +local nodepool = nodes.pool +local tasks = nodes.tasks +local usernodeids = nodepool.userids + +local new_textdir = nodepool.textdir +local new_usernumber = nodepool.usernumber +local new_glue = nodepool.glue +local new_leftskip = nodepool.leftskip + +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming +local process_characters = nodes.handlers.characters +local inject_kerns = nodes.injections.handler + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers +local setfontdynamics = fonthashes.setdynamics +local fontprocesses = fonthashes.processes + +local texsetattribute = tex.setattribute +local unsetvalue = attributes.unsetvalue + +local parbuilders = builders.paragraphs +parbuilders.solutions = parbuilders.solutions or { } +local parsolutions = parbuilders.solutions +parsolutions.splitters = parsolutions.splitters or { } +local splitters = parsolutions.splitters + +local solutions = { } -- attribute sets +local registered = { } -- backmapping +splitters.registered = registered + +local a_split = attributes.private('splitter') + +local preroll = true +local criterium = 0 +local randomseed = nil +local optimize = nil -- set later +local variant = v_normal +local splitwords = true + +local cache = { } +local variants = { } +local max_less = 0 +local max_more = 0 + +local stack = { } + +local dummy = { + attribute = unsetvalue, + randomseed = 0, + criterium = 0, + preroll = false, + optimize = nil, + splitwords = false, + variant = v_normal, +} + +local function checksettings(r,settings) + local s = r.settings + local method = settings_to_hash(settings.method or "") + local optimize, preroll, splitwords + for k, v in next, method do + if k == v_preroll then + preroll = true + elseif k == v_split then + splitwords = true + elseif variants[k] then + variant = k + optimize = variants[k] -- last one wins + end + end + r.randomseed = tonumber(settings.randomseed) or s.randomseed or r.randomseed or 0 + r.criterium = tonumber(settings.criterium ) or s.criterium or r.criterium or 0 + r.preroll = preroll or false + r.splitwords = splitwords or false + r.optimize = optimize or s.optimize or r.optimize or variants[v_normal] +end + +local function pushsplitter(name,settings) + local r = name and registered[name] + if r then + if settings then + checksettings(r,settings) + end + else + r = dummy + end + insert(stack,r) + -- brr + randomseed = r.randomseed or 0 + criterium = r.criterium or 0 + preroll = r.preroll or false + optimize = r.optimize or nil + splitwords = r.splitwords or nil + -- + texsetattribute(a_split,r.attribute) + return #stack +end + +local function popsplitter() + remove(stack) + local n = #stack + local r = stack[n] or dummy + -- + randomseed = r.randomseed or 0 + criterium = r.criterium or 0 + preroll = r.preroll or false + optimize = r.optimize or nil + -- + texsetattribute(a_split,r.attribute) + return n +end + +local contextsetups = fonts.specifiers.contextsetups + +local function convert(featuresets,name,list) + if list then + local numbers = { } + local nofnumbers = 0 + for i=1,#list do + local feature = list[i] + local fs = featuresets[feature] + local fn = fs and fs.number + if not fn then + -- fall back on global features + fs = contextsetups[feature] + fn = fs and fs.number + end + if fn then + nofnumbers = nofnumbers + 1 + numbers[nofnumbers] = fn + if trace_goodies or trace_optimize then + report_solutions("solution %a of %a uses feature %a with number %s",i,name,feature,fn) + end + else + report_solutions("solution %a of %a has an invalid feature reference %a",i,name,feature) + end + end + return nofnumbers > 0 and numbers + end +end + +local function initialize(goodies) + local solutions = goodies.solutions + if solutions then + local featuresets = goodies.featuresets + local goodiesname = goodies.name + if trace_goodies or trace_optimize then + report_solutions("checking solutions in %a",goodiesname) + end + for name, set in next, solutions do + set.less = convert(featuresets,name,set.less) + set.more = convert(featuresets,name,set.more) + end + end +end + +fonts.goodies.register("solutions",initialize) + +function splitters.define(name,settings) + local goodies = settings.goodies + local solution = settings.solution + local less = settings.less + local more = settings.more + local less_set, more_set + local l = less and settings_to_array(less) + local m = more and settings_to_array(more) + if goodies then + goodies = fonts.goodies.load(goodies) -- also in tfmdata + if goodies then + local featuresets = goodies.featuresets + local solution = solution and goodies.solutions[solution] + if l and #l > 0 then + less_set = convert(featuresets,name,less) -- take from settings + else + less_set = solution and solution.less -- take from goodies + end + if m and #m > 0 then + more_set = convert(featuresets,name,more) -- take from settings + else + more_set = solution and solution.more -- take from goodies + end + end + else + if l then + local n = #less_set + for i=1,#l do + local ss = contextsetups[l[i]] + if ss then + n = n + 1 + less_set[n] = ss.number + end + end + end + if m then + local n = #more_set + for i=1,#m do + local ss = contextsetups[m[i]] + if ss then + n = n + 1 + more_set[n] = ss.number + end + end + end + end + if trace_optimize then + report_solutions("defining solutions %a, less %a, more %a",name,concat(less_set or {}," "),concat(more_set or {}," ")) + end + local nofsolutions = #solutions + 1 + local t = { + solution = solution, + less = less_set or { }, + more = more_set or { }, + settings = settings, -- for tracing + attribute = nofsolutions, + } + solutions[nofsolutions] = t + registered[name] = t + return nofsolutions +end + +local nofwords, noftries, nofadapted, nofkept, nofparagraphs = 0, 0, 0, 0, 0 + +local splitter_one = usernodeids["splitters.one"] +local splitter_two = usernodeids["splitters.two"] + +local a_word = attributes.private('word') +local a_fontkern = attributes.private('fontkern') + +local encapsulate = false + +directives.register("builders.paragraphs.solutions.splitters.encapsulate", function(v) + encapsulate = v +end) + +function splitters.split(head) + -- quite fast + local current, done, rlmode, start, stop, attribute = head, false, false, nil, nil, 0 + cache, max_less, max_more = { }, 0, 0 + local function flush() -- we can move this + local font = start.font + local last = stop.next + local list = last and copy_nodelist(start,last) or copy_nodelist(start) + local n = #cache + 1 + if encapsulate then + local user_one = new_usernumber(splitter_one,n) + local user_two = new_usernumber(splitter_two,n) + head, start = insert_node_before(head,start,user_one) + insert_node_after(head,stop,user_two) + else + local current = start + while true do + current[a_word] = n + if current == stop then + break + else + current = current.next + end + end + end + if rlmode == "TRT" or rlmode == "+TRT" then + local dirnode = new_textdir("+TRT") + list.prev = dirnode + dirnode.next = list + list = dirnode + end + local c = { + original = list, + attribute = attribute, + direction = rlmode, + font = font + } + if trace_split then + report_splitters("cached %4i: font %a, attribute %a, direction %a, word %a", + n, font, attribute, nodes_to_utf(list,true), rlmode and "r2l" or "l2r") + end + cache[n] = c + local solution = solutions[attribute] + local l, m = #solution.less, #solution.more + if l > max_less then max_less = l end + if m > max_more then max_more = m end + start, stop, done = nil, nil, true + end + while current do -- also nextid + local next = current.next + local id = current.id + if id == glyph_code then + if current.subtype < 256 then + local a = current[a_split] + if not a then + start, stop = nil, nil + elseif not start then + start, stop, attribute = current, current, a + elseif a ~= attribute then + start, stop = nil, nil + else + stop = current + end + end + elseif id == disc_code then + if splitwords then + if start then + flush() + end + elseif start and next and next.id == glyph_code and next.subtype < 256 then + -- beware: we can cross future lines + stop = next + else + start, stop = nil, nil + end + elseif id == whatsit_code then + if start then + flush() + end + local subtype = current.subtype + if subtype == dir_code or subtype == localpar_code then + rlmode = current.dir + end + else + if start then + flush() + end + end + current = next + end + if start then + flush() + end + nofparagraphs = nofparagraphs + 1 + nofwords = nofwords + #cache + return head, done +end + +local function collect_words(list) -- can be made faster for attributes + local words, w, word = { }, 0, nil + if encapsulate then + for current in traverse_ids(whatsit_code,list) do + if current.subtype == userdefined_code then -- hm + local user_id = current.user_id + if user_id == splitter_one then + word = { current.value, current, current } + w = w + 1 + words[w] = word + elseif user_id == splitter_two then + if word then + word[3] = current + else + -- something is wrong + end + end + end + end + else + local current, first, last, index = list, nil, nil, nil + while current do + -- todo: disc and kern + local id = current.id + if id == glyph_code or id == disc_code then + local a = current[a_word] + if a then + if a == index then + -- same word + last = current + elseif index then + w = w + 1 + words[w] = { index, first, last } + first = current + last = current + index = a + elseif first then + last = current + index = a + else + first = current + last = current + index = a + end + elseif index then + if first then + w = w + 1 + words[w] = { index, first, last } + end + index = nil + first = nil + elseif trace_split then + if id == disc_code then + report_splitters("skipped: disc node") + else + report_splitters("skipped: %C",current.char) + end + end + elseif id == kern_code and (current.subtype == fontkern_code or current[a_fontkern]) then + if first then + last = current + else + first = current + last = current + end + elseif index then + w = w + 1 + words[w] = { index, first, last } + index = nil + first = nil + if id == disc_node then + if trace_split then + report_splitters("skipped: disc node") + end + end + end + current = current.next + end + if index then + w = w + 1 + words[w] = { index, first, last } + end + if trace_split then + for i=1,#words do + local w = words[i] + local n, f, l = w[1], w[2], w[3] + local c = cache[n] + if c then + report_splitters("found %4i: word %a, cached %a",n,nodes_to_utf(f,true,true,l),nodes_to_utf(c.original,true)) + else + report_splitters("found %4i: word %a, not in cache",n,nodes_to_utf(f,true,true,l)) + end + end + end + end + return words, list -- check for empty (elsewhere) +end + +-- we could avoid a hpack but hpack is not that slow + +local function doit(word,list,best,width,badness,line,set,listdir) + local changed = 0 + local n = word[1] + local found = cache[n] + if found then + local h, t + if encapsulate then + h = word[2].next -- head of current word + t = word[3].prev -- tail of current word + else + h = word[2] + t = word[3] + end + if splitwords then + -- there are no lines crossed in a word + else + local ok = false + local c = h + while c do + if c == t then + ok = true + break + else + c = c.next + end + end + if not ok then + report_solutions("skipping hyphenated word (for now)") + -- todo: mark in words as skipped, saves a bit runtime + return false, changed + end + end + local original, attribute, direction = found.original, found.attribute, found.direction + local solution = solutions[attribute] + local features = solution and solution[set] + if features then + local featurenumber = features[best] -- not ok probably + if featurenumber then + noftries = noftries + 1 + local first = copy_nodelist(original) + if not trace_colors then + for n in traverse_nodes(first) do -- maybe fast force so no attr needed + n[0] = featurenumber -- this forces dynamics + end + elseif set == "less" then + for n in traverse_nodes(first) do + setnodecolor(n,"font:isol") -- yellow + n[0] = featurenumber + end + else + for n in traverse_nodes(first) do + setnodecolor(n,"font:medi") -- green + n[0] = featurenumber + end + end + local font = found.font + local setdynamics = setfontdynamics[font] + if setdynamics then + local processes = setdynamics(font,featurenumber) + for i=1,#processes do -- often more than 1 + first = processes[i](first,font,featurenumber) + end + else + report_solutions("fatal error, no dynamics for font %a",font) + end + first = inject_kerns(first) + if first.id == whatsit_code then + local temp = first + first = first.next + free_node(temp) + end + local last = find_node_tail(first) + -- replace [u]h->t by [u]first->last + local prev = h.prev + local next = t.next + prev.next = first + first.prev = prev + if next then + last.next = next + next.prev = last + end + -- check new pack + local temp, b = repack_hlist(list,width,'exactly',listdir) + if b > badness then + if trace_optimize then + report_optimizers("line %a, badness before %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"quit") + end + -- remove last insert + prev.next = h + h.prev = prev + if next then + t.next = next + next.prev = t + else + t.next = nil + end + last.next = nil + free_nodelist(first) + else + if trace_optimize then + report_optimizers("line %a, badness before: %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"continue") + end + -- free old h->t + t.next = nil + free_nodelist(h) -- somhow fails + if not encapsulate then + word[2] = first + word[3] = last + end + changed, badness = changed + 1, b + end + if b <= criterium then + return true, changed + end + end + end + end + return false, changed +end + +-- We repeat some code but adding yet another layer of indirectness is not +-- making things better. + +variants[v_normal] = function(words,list,best,width,badness,line,set,listdir) + local changed = 0 + for i=1,#words do + local done, c = doit(words[i],list,best,width,badness,line,set,listdir) + changed = changed + c + if done then + break + end + end + if changed > 0 then + nofadapted = nofadapted + 1 + -- todo: get rid of pack when ok because we already have packed and we only need the last b + local list, b = repack_hlist(list,width,'exactly',listdir) + return list, true, changed, b -- badness + else + nofkept = nofkept + 1 + return list, false, 0, badness + end +end + +variants[v_reverse] = function(words,list,best,width,badness,line,set,listdir) + local changed = 0 + for i=#words,1,-1 do + local done, c = doit(words[i],list,best,width,badness,line,set,listdir) + changed = changed + c + if done then + break + end + end + if changed > 0 then + nofadapted = nofadapted + 1 + -- todo: get rid of pack when ok because we already have packed and we only need the last b + local list, b = repack_hlist(list,width,'exactly',listdir) + return list, true, changed, b -- badness + else + nofkept = nofkept + 1 + return list, false, 0, badness + end +end + +variants[v_random] = function(words,list,best,width,badness,line,set,listdir) + local changed = 0 + while #words > 0 do + local done, c = doit(remove(words,random(1,#words)),list,best,width,badness,line,set,listdir) + changed = changed + c + if done then + break + end + end + if changed > 0 then + nofadapted = nofadapted + 1 + -- todo: get rid of pack when ok because we already have packed and we only need the last b + local list, b = repack_hlist(list,width,'exactly',listdir) + return list, true, changed, b -- badness + else + nofkept = nofkept + 1 + return list, false, 0, badness + end +end + +local function show_quality(current,what,line) + local set = current.glue_set + local sign = current.glue_sign + local order = current.glue_order + local amount = set * ((sign == 2 and -1) or 1) + report_optimizers("line %a, category %a, amount %a, set %a, sign %a, how %a, order %a",line,what,amount,set,sign,how,order) +end + +function splitters.optimize(head) + if not optimize then + report_optimizers("no optimizer set") + return + end + local nc = #cache + if nc == 0 then + return + end + starttiming(splitters) + local listdir = nil -- todo ! ! ! + if randomseed then + math.setrandomseedi(randomseed) + randomseed = nil + end + local line = 0 + local tex_hbadness, tex_hfuzz = tex.hbadness, tex.hfuzz + tex.hbadness, tex.hfuzz = 10000, number.maxdimen + if trace_optimize then + report_optimizers("preroll %a, variant %a, criterium %a, cache size %a",preroll,variant,criterium,nc) + end + for current in traverse_ids(hlist_code,head) do + -- report_splitters("before: [%s] => %s",current.dir,nodes.tosequence(current.list,nil)) + line = line + 1 + local sign, dir, list, width = current.glue_sign, current.dir, current.list, current.width + if not encapsulate and list.id == glyph_code then + -- nasty .. we always assume a prev being there .. future luatex will always have a leftskip set + -- current.list, list = insert_node_before(list,list,new_glue(0)) + current.list, list = insert_node_before(list,list,new_leftskip(0)) + end + local temp, badness = repack_hlist(list,width,'exactly',dir) -- it would be nice if the badness was stored in the node + if badness > 0 then + if sign == 0 then + if trace_optimize then + report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"okay","okay") + end + else + local set, max + if sign == 1 then + if trace_optimize then + report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"underfull","trying more") + end + set, max = "more", max_more + else + if trace_optimize then + report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"overfull","trying less") + end + set, max = "less", max_less + end + -- we can keep the best variants + local lastbest, lastbadness = nil, badness + if preroll then + local bb, base + for i=1,max do + if base then + free_nodelist(base) + end + base = copy_nodelist(list) + local words = collect_words(base) -- beware: words is adapted + for j=i,max do + local temp, done, changes, b = optimize(words,base,j,width,badness,line,set,dir) + base = temp + if trace_optimize then + report_optimizers("line %a, alternative %a.%a, changes %a, badness %a",line,i,j,changes,b) + end + bb = b + if b <= criterium then + break + end + -- if done then + -- break + -- end + end + if bb and bb > criterium then -- needs checking + if not lastbest then + lastbest, lastbadness = i, bb + elseif bb > lastbadness then + lastbest, lastbadness = i, bb + end + else + break + end + end + free_nodelist(base) + end + local words = collect_words(list) + for best=lastbest or 1,max do + local temp, done, changes, b = optimize(words,list,best,width,badness,line,set,dir) + current.list = temp + if trace_optimize then + report_optimizers("line %a, alternative %a, changes %a, badness %a",line,best,changes,b) + end + if done then + if b <= criterium then -- was == 0 + protect_glyphs(list) + break + end + end + end + end + else + if trace_optimize then + report_optimizers("line %a, verdict %a",line,"not bad enough") + end + end + -- we pack inside the outer hpack and that way keep the original wd/ht/dp as bonus + current.list = hpack_nodes(current.list,width,'exactly',listdir) + -- report_splitters("after: [%s] => %s",temp.dir,nodes.tosequence(temp.list,nil)) + end + for i=1,nc do + local ci = cache[i] + free_nodelist(ci.original) + end + cache = { } + tex.hbadness, tex.hfuzz = tex_hbadness, tex_hfuzz + stoptiming(splitters) +end + +statistics.register("optimizer statistics", function() + if nofwords > 0 then + local elapsed = statistics.elapsedtime(splitters) + local average = noftries/elapsed + return format("%s words identified in %s paragraphs, %s words retried, %s lines tried, %0.3f seconds used, %s adapted, %0.1f lines per second", + nofwords,nofparagraphs,noftries,nofadapted+nofkept,elapsed,nofadapted,average) + end +end) + +-- we could use a stack + +local enableaction = tasks.enableaction +local disableaction = tasks.disableaction + +local function enable() + enableaction("processors", "builders.paragraphs.solutions.splitters.split") + enableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize") +end + +local function disable() + disableaction("processors", "builders.paragraphs.solutions.splitters.split") + disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize") +end + +function splitters.start(name,settings) + if pushsplitter(name,settings) == 1 then + enable() + end +end + +function splitters.stop() + if popsplitter() == 0 then + disable() + end +end + +function splitters.set(name,settings) + if #stack > 0 then + stack = { } + else + enable() + end + pushsplitter(name,settings) -- sets attribute etc +end + +function splitters.reset() + if #stack > 0 then + stack = { } + popsplitter() -- resets attribute etc + disable() + end +end + +-- interface + +commands.definefontsolution = splitters.define +commands.startfontsolution = splitters.start +commands.stopfontsolution = splitters.stop +commands.setfontsolution = splitters.set +commands.resetfontsolution = splitters.reset diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua index 27176dade..dd6c47a88 100644 --- a/tex/context/base/font-syn.lua +++ b/tex/context/base/font-syn.lua @@ -1,1724 +1,1724 @@ -if not modules then modules = { } end modules ['font-syn'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: subs in lookups requests - -local next, tonumber, type, tostring = next, tonumber, type, tostring -local sub, gsub, lower, match, find, lower, upper = string.sub, string.gsub, string.lower, string.match, string.find, string.lower, string.upper -local find, gmatch = string.find, string.gmatch -local concat, sort, format = table.concat, table.sort, string.format -local serialize = table.serialize -local lpegmatch = lpeg.match -local unpack = unpack or table.unpack -local formatters = string.formatters - -local allocate = utilities.storage.allocate -local sparse = utilities.storage.sparse - -local removesuffix = file.removesuffix -local splitbase = file.splitbase -local splitname = file.splitname -local basename = file.basename -local nameonly = file.nameonly -local pathpart = file.pathpart -local filejoin = file.join -local is_qualified_path = file.is_qualified_path -local exists = io.exists - -local findfile = resolvers.findfile -local cleanpath = resolvers.cleanpath -local resolveresolved = resolvers.resolve - -local trace_names = false trackers.register("fonts.names", function(v) trace_names = v end) -local trace_warnings = false trackers.register("fonts.warnings", function(v) trace_warnings = v end) -local trace_specifications = false trackers.register("fonts.specifications", function(v) trace_specifications = v end) - -local report_names = logs.reporter("fonts","names") - ---[[ldx-- -

This module implements a name to filename resolver. Names are resolved -using a table that has keys filtered from the font related files.

---ldx]]-- - -fonts = fonts or { } -- also used elsewhere - -local names = font.names or allocate { } -fonts.names = names - -local filters = names.filters or { } -names.filters = filters - -names.data = names.data or allocate { } - -names.version = 1.110 -names.basename = "names" -names.saved = false -names.loaded = false -names.be_clever = true -names.enabled = true -names.cache = containers.define("fonts","data",names.version,true) - -local autoreload = true - -directives.register("fonts.autoreload", function(v) autoreload = toboolean(v) end) - ---[[ldx-- -

A few helpers.

---ldx]]-- - -local P, C, Cc, Cs = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cs - --- what to do with 'thin' - -local weights = Cs ( -- not extra - P("demibold") - + P("semibold") - + P("mediumbold") - + P("ultrabold") - + P("extrabold") - + P("ultralight") - + P("bold") - + P("demi") - + P("semi") - + P("light") - + P("medium") - + P("heavy") - + P("ultra") - + P("black") - + P("bol") -- / "bold" - + P("regular") / "normal" -) - -local normalized_weights = sparse { - regular = "normal", -} - -local styles = Cs ( - P("reverseoblique") / "reverseitalic" - + P("regular") / "normal" - + P("italic") - + P("oblique") / "italic" - + P("slanted") - + P("roman") / "normal" - + P("ital") / "italic" - + P("ita") / "italic" -) - -local normalized_styles = sparse { - reverseoblique = "reverseitalic", - regular = "normal", - oblique = "italic", -} - -local widths = Cs( - P("condensed") - + P("thin") - + P("expanded") - + P("cond") / "condensed" - + P("normal") - + P("book") / "normal" -) - -local normalized_widths = sparse() - -local variants = Cs( -- fax casual - P("smallcaps") - + P("oldstyle") - + P("caps") / "smallcaps" -) - -local normalized_variants = sparse() - -names.knownweights = { - "black", - "bold", - "demi", - "demibold", - "extrabold", - "heavy", - "light", - "medium", - "mediumbold", - "normal", - "regular", - "semi", - "semibold", - "ultra", - "ultrabold", - "ultralight", -} - -names.knownstyles = { - "italic", - "normal", - "oblique", - "regular", - "reverseitalic", - "reverseoblique", - "roman", - "slanted", -} - -names.knownwidths = { - "book", - "condensed", - "expanded", - "normal", - "thin", -} - -names.knownvariants = { - "normal", - "oldstyle", - "smallcaps", -} - -local any = P(1) - -local analyzed_table - -local analyzer = Cs ( - ( - weights / function(s) analyzed_table[1] = s return "" end - + styles / function(s) analyzed_table[2] = s return "" end - + widths / function(s) analyzed_table[3] = s return "" end - + variants / function(s) analyzed_table[4] = s return "" end - + any - )^0 -) - -local splitter = lpeg.splitat("-") - -function names.splitspec(askedname) - local name, weight, style, width, variant = lpegmatch(splitter,askedname) - weight = weight and lpegmatch(weights, weight) or weight - style = style and lpegmatch(styles, style) or style - width = width and lpegmatch(widths, width) or width - variant = variant and lpegmatch(variants,variant) or variant - if trace_names then - report_names("requested name %a split in name %a, weight %a, style %a, width %a and variant %a", - askedname,name,weight,style,width,variant) - end - if not weight or not weight or not width or not variant then - weight, style, width, variant = weight or "normal", style or "normal", width or "normal", variant or "normal" - if trace_names then - report_names("request %a normalized to '%s-%s-%s-%s-%s'", - askedname,name,weight,style,width,variant) - end - end - return name or askedname, weight, style, width, variant -end - -local function analyzespec(somename) - if somename then - analyzed_table = { } - local name = lpegmatch(analyzer,somename) - return name, analyzed_table[1], analyzed_table[2], analyzed_table[3], analyzed_table[4] - end -end - ---[[ldx-- -

It would make sense to implement the filters in the related modules, -but to keep the overview, we define them here.

---ldx]]-- - -filters.otf = fontloader.info -filters.ttf = fontloader.info -filters.ttc = fontloader.info -filters.dfont = fontloader.info - -function fontloader.fullinfo(...) -- check with taco what we get / could get - local ff = fontloader.open(...) - if ff then - local d = ff and fontloader.to_table(ff) - d.glyphs, d.subfonts, d.gpos, d.gsub, d.lookups = nil, nil, nil, nil, nil - fontloader.close(ff) - return d - else - return nil, "error in loading font" - end -end - -filters.otf = fontloader.fullinfo - -function filters.afm(name) - -- we could parse the afm file as well, and then report an error but - -- it's not worth the trouble - local pfbname = findfile(removesuffix(name)..".pfb","pfb") or "" - if pfbname == "" then - pfbname = findfile(nameonly(name)..".pfb","pfb") or "" - end - if pfbname ~= "" then - local f = io.open(name) - if f then - local hash = { } - for line in f:lines() do - local key, value = match(line,"^(.+)%s+(.+)%s*$") - if key and #key > 0 then - hash[lower(key)] = value - end - if find(line,"StartCharMetrics") then - break - end - end - f:close() - return hash - end - end - return nil, "no matching pfb file" -end - -function filters.pfb(name) - return fontloader.info(name) -end - ---[[ldx-- -

The scanner loops over the filters using the information stored in -the file databases. Watch how we check not only for the names, but also -for combination with the weight of a font.

---ldx]]-- - -filters.list = { - "otf", "ttf", "ttc", "dfont", "afm", - -- "ttc", "otf", "ttf", "dfont", "afm", -} - -names.fontconfigfile = "fonts.conf" -- a bit weird format, bonus feature -names.osfontdirvariable = "OSFONTDIR" -- the official way, in minimals etc - -filters.paths = { } -filters.names = { } - -function names.getpaths(trace) - local hash, result, r = { }, { }, 0 - local function collect(t,where) - for i=1,#t do - local v = cleanpath(t[i]) - v = gsub(v,"/+$","") -- not needed any more - local key = lower(v) - report_names("%a specifies path %a",where,v) - if not hash[key] then - r = r + 1 - result[r] = v - hash[key] = true - end - end - end - local path = names.osfontdirvariable or "" - if path ~= "" then - collect(resolvers.expandedpathlist(path),path) - end - if xml then - local confname = resolvers.expansion("FONTCONFIG_FILE") or "" - if confname == "" then - confname = names.fontconfigfile or "" - end - if confname ~= "" then - -- first look in the tex tree - local name = findfile(confname,"fontconfig files") or "" - if name == "" then - -- after all, fontconfig is a unix thing - name = filejoin("/etc",confname) - if not lfs.isfile(name) then - name = "" -- force quit - end - end - if name ~= "" and lfs.isfile(name) then - if trace_names then - report_names("%s fontconfig file %a","loading",name) - end - local xmldata = xml.load(name) - -- begin of untested mess - xml.include(xmldata,"include","",true,function(incname) - if not is_qualified_path(incname) then - local path = pathpart(name) -- main name - if path ~= "" then - incname = filejoin(path,incname) - end - end - if lfs.isfile(incname) then - if trace_names then - report_names("%s fontconfig file %a","merging included",incname) - end - return io.loaddata(incname) - elseif trace_names then - report_names("%s fontconfig file: %a","ignoring included",incname) - end - end) - -- end of untested mess - local fontdirs = xml.collect_texts(xmldata,"dir",true) - if trace_names then - report_names("%s dirs found in fontconfig",#fontdirs) - end - collect(fontdirs,"fontconfig file") - end - end - end - function names.getpaths() - return result - end - return result -end - -local function cleanname(name) - return (gsub(lower(name),"[^%a%d]","")) -end - -local function cleanfilename(fullname,defaultsuffix) - local path, name, suffix = splitname(fullname) - name = gsub(lower(name),"[^%a%d]","") - if suffix and suffix ~= "" then - return name .. ".".. suffix - elseif defaultsuffix and defaultsuffix ~= "" then - return name .. ".".. defaultsuffix - else - return name - end -end - -names.cleanname = cleanname -names.cleanfilename = cleanfilename - -local function check_names(result) - local names = result.names - if names then - for i=1,#names do - local name = names[i] - if name.lang == "English (US)" then - return name.names - end - end - end -end - -local function walk_tree(pathlist,suffix,identify) - if pathlist then - for i=1,#pathlist do - local path = pathlist[i] - path = cleanpath(path .. "/") - path = gsub(path,"/+","/") - local pattern = path .. "**." .. suffix -- ** forces recurse - report_names("globbing path %a",pattern) - local t = dir.glob(pattern) - sort(t,sorter) - for j=1,#t do - local completename = t[j] - identify(completename,basename(completename),suffix,completename) - end - end - end -end - -local function check_name(data,result,filename,modification,suffix,subfont) - -- shortcuts - local specifications = data.specifications - -- prepare - local names = check_names(result) - -- fetch - local familyname = names and names.preffamilyname or result.familyname - local fullname = names and names.fullname or result.fullname - local fontname = result.fontname - local subfamily = names and names.subfamily - local modifiers = names and names.prefmodifiers - local weight = names and names.weight or result.weight - local italicangle = tonumber(result.italicangle) - local subfont = subfont or nil - local rawname = fullname or fontname or familyname - -- normalize - familyname = familyname and cleanname(familyname) - fullname = fullname and cleanname(fullname) - fontname = fontname and cleanname(fontname) - subfamily = subfamily and cleanname(subfamily) - modifiers = modifiers and cleanname(modifiers) - weight = weight and cleanname(weight) - italicangle = italicangle == 0 and nil - -- analyze - local a_name, a_weight, a_style, a_width, a_variant = analyzespec(fullname or fontname or familyname) - -- check - local width = a_width - local variant = a_variant - local style = modifiers and gsub(modifiers,"[^%a]","") - if not style and italicangle then - style = "italic" - end - if not variant or variant == "" then - variant = "normal" - end - if not weight or weight == "" then - weight = a_weight - end - if not style or style == "" then - style = a_style - end - if not familyname then - familyname = a_name - end - fontname = fontname or fullname or familyname or basename(filename) - fullname = fullname or fontname - familyname = familyname or fontname - specifications[#specifications + 1] = { - filename = filename, -- unresolved - format = lower(suffix), - subfont = subfont, - rawname = rawname, - familyname = familyname, - fullname = fullname, - fontname = fontname, - subfamily = subfamily, - modifiers = modifiers, - weight = weight, - style = style, - width = width, - variant = variant, - minsize = result.design_range_bottom or 0, - maxsize = result.design_range_top or 0, - designsize = result.design_size or 0, - modification = modification or 0, - } -end - -local function cleanupkeywords() - local data = names.data - local specifications = names.data.specifications - if specifications then - local weights = { } - local styles = { } - local widths = { } - local variants = { } - for i=1,#specifications do - local s = specifications[i] - -- fix (sofar styles are taken from the name, and widths from the specification) - local _, b_weight, b_style, b_width, b_variant = analyzespec(s.weight) - local _, c_weight, c_style, c_width, c_variant = analyzespec(s.style) - local _, d_weight, d_style, d_width, d_variant = analyzespec(s.width) - local _, e_weight, e_style, e_width, e_variant = analyzespec(s.variant) - local _, f_weight, f_style, f_width, f_variant = analyzespec(s.fullname or "") - local weight = b_weight or c_weight or d_weight or e_weight or f_weight or "normal" - local style = b_style or c_style or d_style or e_style or f_style or "normal" - local width = b_width or c_width or d_width or e_width or f_width or "normal" - local variant = b_variant or c_variant or d_variant or e_variant or f_variant or "normal" - if not weight or weight == "" then weight = "normal" end - if not style or style == "" then style = "normal" end - if not width or width == "" then width = "normal" end - if not variant or variant == "" then variant = "normal" end - weights [weight ] = (weights [weight ] or 0) + 1 - styles [style ] = (styles [style ] or 0) + 1 - widths [width ] = (widths [width ] or 0) + 1 - variants[variant] = (variants[variant] or 0) + 1 - if weight ~= s.weight then - s.fontweight = s.weight - end - s.weight, s.style, s.width, s.variant = weight, style, width, variant - end - local stats = data.statistics - stats.used_weights, stats.used_styles, stats.used_widths, stats.used_variants = weights, styles, widths, variants - end -end - -local function collectstatistics() - local data = names.data - local specifications = data.specifications - if specifications then - local weights = { } - local styles = { } - local widths = { } - local variants = { } - for i=1,#specifications do - local s = specifications[i] - local weight = s.weight - local style = s.style - local width = s.width - local variant = s.variant - if weight then weights [weight ] = (weights [weight ] or 0) + 1 end - if style then styles [style ] = (styles [style ] or 0) + 1 end - if width then widths [width ] = (widths [width ] or 0) + 1 end - if variant then variants[variant] = (variants[variant] or 0) + 1 end - end - local stats = data.statistics - stats.weights = weights - stats.styles = styles - stats.widths = widths - stats.variants = variants - stats.fonts = #specifications - end -end - -local function collecthashes() - local data = names.data - local mappings = data.mappings - local fallbacks = data.fallbacks - local specifications = data.specifications - local nofmappings = 0 - local noffallbacks = 0 - if specifications then - -- maybe multiple passes - for index=1,#specifications do - local s = specifications[index] - local format, fullname, fontname, familyname, weight, subfamily = s.format, s.fullname, s.fontname, s.familyname, s.weight, s.subfamily - local mf, ff = mappings[format], fallbacks[format] - if fullname and not mf[fullname] then - mf[fullname], nofmappings = index, nofmappings + 1 - end - if fontname and not mf[fontname] then - mf[fontname], nofmappings = index, nofmappings + 1 - end - if familyname and weight and weight ~= sub(familyname,#familyname-#weight+1,#familyname) then - local madename = familyname .. weight - if not mf[madename] and not ff[madename] then - ff[madename], noffallbacks = index, noffallbacks + 1 - end - end - if familyname and subfamily and subfamily ~= sub(familyname,#familyname-#subfamily+1,#familyname) then - local extraname = familyname .. subfamily - if not mf[extraname] and not ff[extraname] then - ff[extraname], noffallbacks = index, noffallbacks + 1 - end - end - if familyname and not mf[familyname] and not ff[familyname] then - ff[familyname], noffallbacks = index, noffallbacks + 1 - end - end - end - return nofmappings, noffallbacks -end - -local function collectfamilies() - local data = names.data - local specifications = data.specifications - local families = data.families - for index=1,#specifications do - local familyname = specifications[index].familyname - local family = families[familyname] - if not family then - families[familyname] = { index } - else - family[#family+1] = index - end - end -end - -local function checkduplicate(where) -- fails on "Romantik" but that's a border case anyway - local data = names.data - local mapping = data[where] - local specifications = data.specifications - local loaded = { } - if specifications and mapping then - for _, m in next, mapping do - for k, v in next, m do - local s = specifications[v] - local hash = formatters["%s-%s-%s-%s-%s"](s.familyname,s.weight or "*",s.style or "*",s.width or "*",s.variant or "*") - local h = loaded[hash] - if h then - local ok = true - local fn = s.filename - for i=1,#h do - local hn = s.filename - if h[i] == fn then - ok = false - break - end - end - if ok then - h[#h+1] = fn - end - else - loaded[hash] = { s.filename } - end - end - end - end - local n = 0 - for k, v in table.sortedhash(loaded) do - local nv = #v - if nv > 1 then - if trace_warnings then - report_names("lookup %a clashes with %a",k,v) - end - n = n + nv - end - end - report_names("%a double lookups in %a",n,where) -end - -local function checkduplicates() - checkduplicate("mappings") - checkduplicate("fallbacks") -end - -local sorter = function(a,b) - return a > b -- to be checked -end - -local function sorthashes() - local data = names.data - local list = filters.list - local mappings = data.mappings - local fallbacks = data.fallbacks - local sorted_mappings = { } - local sorted_fallbacks = { } - data.sorted_mappings = sorted_mappings - data.sorted_fallbacks = sorted_fallbacks - for i=1,#list do - local l = list[i] - sorted_mappings [l] = table.keys(mappings[l]) - sorted_fallbacks[l] = table.keys(fallbacks[l]) - sort(sorted_mappings [l],sorter) - sort(sorted_fallbacks[l],sorter) - end - data.sorted_families = table.keys(data.families) - sort(data.sorted_families,sorter) -end - -local function unpackreferences() - local data = names.data - local specifications = data.specifications - if specifications then - for k, v in next, data.families do - for i=1,#v do - v[i] = specifications[v[i]] - end - end - local mappings = data.mappings - if mappings then - for _, m in next, mappings do - for k, v in next, m do - m[k] = specifications[v] - end - end - end - local fallbacks = data.fallbacks - if fallbacks then - for _, f in next, fallbacks do - for k, v in next, f do - f[k] = specifications[v] - end - end - end - end -end - -local function analyzefiles(olddata) - if not trace_warnings then - report_names("warnings are disabled (tracker 'fonts.warnings')") - end - local data = names.data - local done = { } - local totalnofread = 0 - local totalnofskipped = 0 - local totalnofduplicates = 0 - local nofread = 0 - local nofskipped = 0 - local nofduplicates = 0 - local skip_paths = filters.paths - local skip_names = filters.names - local specifications = data.specifications - local oldindices = olddata and olddata.indices or { } - local oldspecifications = olddata and olddata.specifications or { } - local oldrejected = olddata and olddata.rejected or { } - local treatmentdata = fonts.treatments.data - local function identify(completename,name,suffix,storedname) - local pathpart, basepart = splitbase(completename) - nofread = nofread + 1 - local treatment = treatmentdata[completename] or treatmentdata[basepart] - if treatment and treatment.ignored then - if trace_names then - report_names("%s font %a is ignored, reason %a",suffix,completename,treatment.comment or "unknown") - end - nofskipped = nofskipped + 1 - elseif done[name] then - -- already done (avoid otf afm clash) - if trace_names then - report_names("%s font %a already done",suffix,completename) - end - nofduplicates = nofduplicates + 1 - nofskipped = nofskipped + 1 - elseif not exists(completename) then - -- weird error - if trace_names then - report_names("%s font %a does not really exist",suffix,completename) - end - nofskipped = nofskipped + 1 - elseif not is_qualified_path(completename) and findfile(completename,suffix) == "" then - -- not locatable by backend anyway - if trace_names then - report_names("%s font %a cannot be found by backend",suffix,completename) - end - nofskipped = nofskipped + 1 - else - if #skip_paths > 0 then - for i=1,#skip_paths do - if find(pathpart,skip_paths[i]) then - if trace_names then - report_names("rejecting path of %s font %a",suffix,completename) - end - nofskipped = nofskipped + 1 - return - end - end - end - if #skip_names > 0 then - for i=1,#skip_paths do - if find(basepart,skip_names[i]) then - done[name] = true - if trace_names then - report_names("rejecting name of %s font %a",suffix,completename) - end - nofskipped = nofskipped + 1 - return - end - end - end - if trace_names then - report_names("identifying %s font %a",suffix,completename) - end - local result = nil - local modification = lfs.attributes(completename,"modification") - if olddata and modification and modification > 0 then - local oldindex = oldindices[storedname] -- index into specifications - if oldindex then - local oldspecification = oldspecifications[oldindex] - if oldspecification and oldspecification.filename == storedname then -- double check for out of sync - local oldmodification = oldspecification.modification - if oldmodification == modification then - result = oldspecification - specifications[#specifications + 1] = result - else - end - else - end - elseif oldrejected[storedname] == modification then - result = false - end - end - if result == nil then - local result, message = filters[lower(suffix)](completename) - if result then - if result[1] then - for r=1,#result do - local ok = check_name(data,result[r],storedname,modification,suffix,r-1) -- subfonts start at zero - -- if not ok then - -- nofskipped = nofskipped + 1 - -- end - end - else - local ok = check_name(data,result,storedname,modification,suffix) - -- if not ok then - -- nofskipped = nofskipped + 1 - -- end - end - if trace_warnings and message and message ~= "" then - report_names("warning when identifying %s font %a, %s",suffix,completename,message) - end - elseif trace_warnings then - nofskipped = nofskipped + 1 - report_names("error when identifying %s font %a, %s",suffix,completename,message or "unknown") - end - end - done[name] = true - end - logs.flush() -- a bit overkill for each font, maybe not needed here - end - local function traverse(what, method) - local list = filters.list - for n=1,#list do - local suffix = list[n] - local t = os.gettimeofday() -- use elapser - nofread, nofskipped, nofduplicates = 0, 0, 0 - suffix = lower(suffix) - report_names("identifying %s font files with suffix %a",what,suffix) - method(suffix) - suffix = upper(suffix) - report_names("identifying %s font files with suffix %a",what,suffix) - method(suffix) - totalnofread, totalnofskipped, totalnofduplicates = totalnofread + nofread, totalnofskipped + nofskipped, totalnofduplicates + nofduplicates - local elapsed = os.gettimeofday() - t - report_names("%s %s files identified, %s skipped, %s duplicates, %s hash entries added, runtime %0.3f seconds",nofread,what,nofskipped,nofduplicates,nofread-nofskipped,elapsed) - end - logs.flush() - end - -- problem .. this will not take care of duplicates - local function withtree(suffix) - resolvers.dowithfilesintree(".*%." .. suffix .. "$", function(method,root,path,name) - if method == "file" or method == "tree" then - local completename = root .."/" .. path .. "/" .. name - completename = resolveresolved(completename) -- no shortcut - identify(completename,name,suffix,name) - return true - end - end, function(blobtype,blobpath,pattern) - blobpath = resolveresolved(blobpath) -- no shortcut - report_names("scanning path %a for %s files",blobpath,suffix) - end, function(blobtype,blobpath,pattern,total,checked,done) - blobpath = resolveresolved(blobpath) -- no shortcut - report_names("%s entries found, %s %s files checked, %s okay",total,checked,suffix,done) - end) - end - local function withlsr(suffix) -- all trees - -- we do this only for a stupid names run, not used for context itself, - -- using the vars is too clumsy so we just stick to a full scan instead - local pathlist = resolvers.splitpath(resolvers.showpath("ls-R") or "") - walk_tree(pathlist,suffix,identify) - end - local function withsystem(suffix) -- OSFONTDIR cum suis - walk_tree(names.getpaths(trace),suffix,identify) - end - traverse("tree",withtree) -- TEXTREE only - if texconfig.kpse_init then - traverse("lsr", withlsr) - else - traverse("system", withsystem) - end - data.statistics.readfiles = totalnofread - data.statistics.skippedfiles = totalnofskipped - data.statistics.duplicatefiles = totalnofduplicates -end - -local function addfilenames() - local data = names.data - local specifications = data.specifications - local indices = { } - local files = { } - for i=1,#specifications do - local fullname = specifications[i].filename - files[cleanfilename(fullname)] = fullname - indices[fullname] = i - end - data.files = files - data.indices = indices -end - -local function rejectclashes() -- just to be sure, so no explicit afm will be found then - local specifications = names.data.specifications - local used = { } - local okay = { } - local rejected = { } -- only keep modification - local o = 0 - for i=1,#specifications do - local s = specifications[i] - local f = s.fontname - if f then - local fnd = used[f] - local fnm = s.filename - if fnd then - if trace_warnings then - report_names("fontname %a clashes, %a rejected in favor of %a",f,fnm,fnd) - end - rejected[f] = s.modification - else - used[f] = fnm - o = o + 1 - okay[o] = s - end - else - o = o + 1 - okay[o] = s - end - end - local d = #specifications - #okay - if d > 0 then - report_names("%s files rejected due to clashes",d) - end - names.data.specifications = okay - names.data.rejected = rejected -end - -local function resetdata() - local mappings = { } - local fallbacks = { } - for _, k in next, filters.list do - mappings [k] = { } - fallbacks[k] = { } - end - names.data = { - version = names.version, - mappings = mappings, - fallbacks = fallbacks, - specifications = { }, - families = { }, - statistics = { }, - names = { }, - indices = { }, - rejected = { }, - datastate = resolvers.datastate(), - } -end - -function names.identify(force) - local starttime = os.gettimeofday() -- use elapser - resetdata() - analyzefiles(not force and names.readdata(names.basename)) - rejectclashes() - collectfamilies() - collectstatistics() - cleanupkeywords() - collecthashes() - checkduplicates() - addfilenames() - -- sorthashes() -- will be resorted when saved - report_names("total scan time %0.3f seconds",os.gettimeofday()-starttime) -end - -function names.is_permitted(name) - return containers.is_usable(names.cache, name) -end -function names.writedata(name,data) - containers.write(names.cache,name,data) -end -function names.readdata(name) - return containers.read(names.cache,name) -end - -function names.load(reload,force) - if not names.loaded then - if reload then - if names.is_permitted(names.basename) then - names.identify(force) - names.writedata(names.basename,names.data) - else - report_names("unable to access database cache") - end - names.saved = true - end - local data = names.readdata(names.basename) - names.data = data - if not names.saved then - if not data or not next(data) or not data.specifications or not next(data.specifications) then - names.load(true) - end - names.saved = true - end - if not data then - report_names("accessing the data table failed") - else - unpackreferences() - sorthashes() - end - names.loaded = true - end -end - -local function list_them(mapping,sorted,pattern,t,all) - if mapping[pattern] then - t[pattern] = mapping[pattern] - else - for k=1,#sorted do - local v = sorted[k] - if not t[v] and find(v,pattern) then - t[v] = mapping[v] - if not all then - return - end - end - end - end -end - -function names.list(pattern,reload,all) -- here? - names.load() -- todo reload - if names.loaded then - local t = { } - local data = names.data - if data then - local list = filters.list - local mappings = data.mappings - local sorted_mappings = data.sorted_mappings - local fallbacks = data.fallbacks - local sorted_fallbacks = data.sorted_fallbacks - for i=1,#list do - local format = list[i] - list_them(mappings[format],sorted_mappings[format],pattern,t,all) - if next(t) and not all then - return t - end - list_them(fallbacks[format],sorted_fallbacks[format],pattern,t,all) - if next(t) and not all then - return t - end - end - end - return t - end -end - -local reloaded = false - -local function is_reloaded() - if not reloaded then - local data = names.data - if autoreload then - local c_status = serialize(resolvers.datastate()) - local f_status = serialize(data.datastate) - if c_status == f_status then - if trace_names then - report_names("font database has matching configuration and file hashes") - end - return - else - report_names("font database has mismatching configuration and file hashes") - end - else - report_names("font database is regenerated (controlled by directive 'fonts.autoreload')") - end - names.loaded = false - reloaded = true - logs.flush() - names.load(true) - end -end - ---[[ldx-- -

The resolver also checks if the cached names are loaded. Being clever -here is for testing purposes only (it deals with names prefixed by an -encoding name).

---ldx]]-- - -local function fuzzy(mapping,sorted,name,sub) - local condensed = gsub(name,"[^%a%d]","") - for k=1,#sorted do - local v = sorted[k] - if find(v,condensed) then - return mapping[v], v - end - end -end - --- we could cache a lookup .. maybe some day ... (only when auto loaded!) - -local function foundname(name,sub) -- sub is not used currently - local data = names.data - local mappings = data.mappings - local sorted_mappings = data.sorted_mappings - local fallbacks = data.fallbacks - local sorted_fallbacks = data.sorted_fallbacks - local list = filters.list - -- dilemma: we lookup in the order otf ttf ttc ... afm but now an otf fallback - -- can come after an afm match ... well, one should provide nice names anyway - -- and having two lists is not an option - for i=1,#list do - local l = list[i] - local found = mappings[l][name] - if found then - if trace_names then - report_names("resolved via direct name match: %a",name) - end - return found - end - end - for i=1,#list do - local l = list[i] - local found, fname = fuzzy(mappings[l],sorted_mappings[l],name,sub) - if found then - if trace_names then - report_names("resolved via fuzzy name match: %a onto %a",name,fname) - end - return found - end - end - for i=1,#list do - local l = list[i] - local found = fallbacks[l][name] - if found then - if trace_names then - report_names("resolved via direct fallback match: %a",name) - end - return found - end - end - for i=1,#list do - local l = list[i] - local found, fname = fuzzy(sorted_mappings[l],sorted_fallbacks[l],name,sub) - if found then - if trace_names then - report_names("resolved via fuzzy fallback match: %a onto %a",name,fname) - end - return found - end - end - if trace_names then - report_names("font with name %a cannot be found",name) - end -end - -function names.resolvedspecification(askedname,sub) - if askedname and askedname ~= "" and names.enabled then - askedname = cleanname(askedname) - names.load() - local found = foundname(askedname,sub) - if not found and is_reloaded() then - found = foundname(askedname,sub) - end - return found - end -end - -function names.resolve(askedname,sub) - local found = names.resolvedspecification(askedname,sub) - if found then - return found.filename, found.subfont and found.rawname - end -end - --- function names.getfilename(askedname,suffix) -- last resort, strip funny chars --- names.load() --- local files = names.data.files --- askedname = files and files[cleanfilename(askedname,suffix)] or "" --- if askedname == "" then --- return "" --- else -- never entered --- return resolvers.findbinfile(askedname,suffix) or "" --- end --- end - -function names.getfilename(askedname,suffix) -- last resort, strip funny chars - names.load() - local files = names.data.files - local cleanname = cleanfilename(askedname,suffix) - local found = files and files[cleanname] or "" - if found == "" and is_reloaded() then - files = names.data.files - found = files and files[cleanname] or "" - end - if found and found ~= "" then - return resolvers.findbinfile(found,suffix) or "" -- we still need to locate it - end -end - --- specified search - -local function s_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,family) - if family then - for i=1,#family do - local f = family[i] - if f and weight == f.weight and style == f.style and width == f.width and variant == f.variant then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end -local function m_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,families,sorted,strictname) - for i=1,#sorted do - local k = sorted[i] - local family = families[k] - for i=1,#family do - local f = family[i] - if not done[f] and weight == f.weight and style == f.style and width == f.width and variant == f.variant and find(f.fontname,strictname) then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end - -local function s_collect_weight_style_width(found,done,all,weight,style,width,family) - if family then - for i=1,#family do - local f = family[i] - if f and weight == f.weight and style == f.style and width == f.width then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end -local function m_collect_weight_style_width(found,done,all,weight,style,width,families,sorted,strictname) - for i=1,#sorted do - local k = sorted[i] - local family = families[k] - for i=1,#family do - local f = family[i] - if not done[f] and weight == f.weight and style == f.style and width == f.width and find(f.fontname,strictname) then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end - -local function s_collect_weight_style(found,done,all,weight,style,family) - if family then - for i=1,#family do local f = family[i] - if f and weight == f.weight and style == f.style then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end -local function m_collect_weight_style(found,done,all,weight,style,families,sorted,strictname) - for i=1,#sorted do - local k = sorted[i] - local family = families[k] - for i=1,#family do - local f = family[i] - if not done[f] and weight == f.weight and style == f.style and find(f.fontname,strictname) then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end - -local function s_collect_style_width(found,done,all,style,width,family) - if family then - for i=1,#family do local f = family[i] - if f and style == f.style and width == f.width then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end -local function m_collect_style_width(found,done,all,style,width,families,sorted,strictname) - for i=1,#sorted do - local k = sorted[i] - local family = families[k] - for i=1,#family do - local f = family[i] - if not done[f] and style == f.style and width == f.width and find(f.fontname,strictname) then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end - -local function s_collect_weight(found,done,all,weight,family) - if family then - for i=1,#family do local f = family[i] - if f and weight == f.weight then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end -local function m_collect_weight(found,done,all,weight,families,sorted,strictname) - for i=1,#sorted do - local k = sorted[i] - local family = families[k] - for i=1,#family do - local f = family[i] - if not done[f] and weight == f.weight and find(f.fontname,strictname) then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end - -local function s_collect_style(found,done,all,style,family) - if family then - for i=1,#family do local f = family[i] - if f and style == f.style then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end -local function m_collect_style(found,done,all,style,families,sorted,strictname) - for i=1,#sorted do - local k = sorted[i] - local family = families[k] - for i=1,#family do - local f = family[i] - if not done[f] and style == f.style and find(f.fontname,strictname) then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end - -local function s_collect_width(found,done,all,width,family) - if family then - for i=1,#family do local f = family[i] - if f and width == f.width then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end -local function m_collect_width(found,done,all,width,families,sorted,strictname) - for i=1,#sorted do - local k = sorted[i] - local family = families[k] - for i=1,#family do - local f = family[i] - if not done[f] and width == f.width and find(f.fontname,strictname) then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end - -local function s_collect(found,done,all,family) - if family then - for i=1,#family do local f = family[i] - if f then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end -local function m_collect(found,done,all,families,sorted,strictname) - for i=1,#sorted do - local k = sorted[i] - local family = families[k] - for i=1,#family do - local f = family[i] - if not done[f] and find(f.fontname,strictname) then - found[#found+1], done[f] = f, true - if not all then return end - end - end - end -end - -local function collect(stage,found,done,name,weight,style,width,variant,all) - local data = names.data - local families = data.families - local sorted = data.sorted_families - local strictname = "^".. name -- to be checked - local family = families[name] - if trace_names then - report_names("resolving name %a, weight %a, style %a, width %a, variant %a",name,weight,style,width,variant) - end - if weight and weight ~= "" then - if style and style ~= "" then - if width and width ~= "" then - if variant and variant ~= "" then - if trace_names then - report_names("resolving stage %s, name %a, weight %a, style %a, width %a, variant %a",stage,name,weight,style,width,variant) - end - s_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,family) - m_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,families,sorted,strictname) - else - if trace_names then - report_names("resolving stage %s, name %a, weight %a, style %a, width %a",stage,name,weight,style,width) - end - s_collect_weight_style_width(found,done,all,weight,style,width,family) - m_collect_weight_style_width(found,done,all,weight,style,width,families,sorted,strictname) - end - else - if trace_names then - report_names("resolving stage %s, name %a, weight %a, style %a",stage,name,weight,style) - end - s_collect_weight_style(found,done,all,weight,style,family) - m_collect_weight_style(found,done,all,weight,style,families,sorted,strictname) - end - else - if trace_names then - report_names("resolving stage %s, name %a, weight %a",stage,name,weight) - end - s_collect_weight(found,done,all,weight,family) - m_collect_weight(found,done,all,weight,families,sorted,strictname) - end - elseif style and style ~= "" then - if width and width ~= "" then - if trace_names then - report_names("resolving stage %s, name %a, style %a, width %a",stage,name,style,width) - end - s_collect_style_width(found,done,all,style,width,family) - m_collect_style_width(found,done,all,style,width,families,sorted,strictname) - else - if trace_names then - report_names("resolving stage %s, name %a, style %a",stage,name,style) - end - s_collect_style(found,done,all,style,family) - m_collect_style(found,done,all,style,families,sorted,strictname) - end - elseif width and width ~= "" then - if trace_names then - report_names("resolving stage %s, name %a, width %a",stage,name,width) - end - s_collect_width(found,done,all,width,family) - m_collect_width(found,done,all,width,families,sorted,strictname) - else - if trace_names then - report_names("resolving stage %s, name %a",stage,name) - end - s_collect(found,done,all,family) - m_collect(found,done,all,families,sorted,strictname) - end -end - -local function heuristic(name,weight,style,width,variant,all) -- todo: fallbacks - local found, done = { }, { } ---~ print(name,weight,style,width,variant) - weight, style, width, variant = weight or "normal", style or "normal", width or "normal", variant or "normal" - name = cleanname(name) - collect(1,found,done,name,weight,style,width,variant,all) - -- still needed ? - if #found == 0 and variant ~= "normal" then -- not weight - variant = "normal" - collect(4,found,done,name,weight,style,width,variant,all) - end - if #found == 0 and width ~= "normal" then - width = "normal" - collect(2,found,done,name,weight,style,width,variant,all) - end - if #found == 0 and weight ~= "normal" then -- not style - weight = "normal" - collect(3,found,done,name,weight,style,width,variant,all) - end - if #found == 0 and style ~= "normal" then -- not weight - style = "normal" - collect(4,found,done,name,weight,style,width,variant,all) - end - -- - local nf = #found - if trace_names then - if nf then - local t = { } - for i=1,nf do - t[i] = formatters["%a"](found[i].fontname) - end - report_names("name %a resolved to %s instances: % t",name,nf,t) - else - report_names("name %a unresolved",name) - end - end - if all then - return nf > 0 and found - else - return found[1] - end -end - -function names.specification(askedname,weight,style,width,variant,reload,all) - if askedname and askedname ~= "" and names.enabled then - askedname = cleanname(askedname) -- or cleanname - names.load(reload) - local found = heuristic(askedname,weight,style,width,variant,all) - if not found and is_reloaded() then - found = heuristic(askedname,weight,style,width,variant,all) - if not filename then - found = foundname(askedname) -- old method - end - end - return found - end -end - -function names.collect(askedname,weight,style,width,variant,reload,all) - if askedname and askedname ~= "" and names.enabled then - askedname = cleanname(askedname) -- or cleanname - names.load(reload) - local list = heuristic(askedname,weight,style,width,variant,true) - if not list or #list == 0 and is_reloaded() then - list = heuristic(askedname,weight,style,width,variant,true) - end - return list - end -end - -function names.collectspec(askedname,reload,all) - local name, weight, style, width, variant = names.splitspec(askedname) - return names.collect(name,weight,style,width,variant,reload,all) -end - -function names.resolvespec(askedname,sub) -- redefined later - local found = names.specification(names.splitspec(askedname)) - if found then - return found.filename, found.subfont and found.rawname - end -end - -function names.collectfiles(askedname,reload) -- no all - if askedname and askedname ~= "" and names.enabled then - askedname = cleanname(askedname) -- or cleanname - names.load(reload) - local list = { } - local specifications = names.data.specifications - for i=1,#specifications do - local s = specifications[i] - if find(cleanname(basename(s.filename)),askedname) then - list[#list+1] = s - end - end - return list - end -end - --- todo: --- --- blacklisted = { --- ["cmr10.ttf"] = "completely messed up", --- } - -function names.exists(name) - local found = false - local list = filters.list - for k=1,#list do - local v = list[k] - found = (findfile(name,v) or "") ~= "" - if found then - return found - end - end - return (findfile(name,"tfm") or "") ~= "" or (names.resolve(name) or "") ~= "" -end - -local lastlookups, lastpattern = { }, "" - -function names.lookup(pattern,name,reload) -- todo: find - if lastpattern ~= pattern then - names.load(reload) - local specifications = names.data.specifications - local families = names.data.families - local lookups = specifications - if name then - lookups = families[name] - elseif not find(pattern,"=") then - lookups = families[pattern] - end - if trace_names then - report_names("starting with %s lookups for %a",#lookups,pattern) - end - if lookups then - for key, value in gmatch(pattern,"([^=,]+)=([^=,]+)") do - local t, n = { }, 0 - if find(value,"*") then - value = string.topattern(value) - for i=1,#lookups do - local s = lookups[i] - if find(s[key],value) then - n = n + 1 - t[n] = lookups[i] - end - end - else - for i=1,#lookups do - local s = lookups[i] - if s[key] == value then - n = n + 1 - t[n] = lookups[i] - end - end - end - if trace_names then - report_names("%s matches for key %a with value %a",#t,key,value) - end - lookups = t - end - end - lastpattern = pattern - lastlookups = lookups or { } - end - return #lastlookups -end - -function names.getlookupkey(key,n) - local l = lastlookups[n or 1] - return (l and l[key]) or "" -end - -function names.noflookups() - return #lastlookups -end - -function names.getlookups(pattern,name,reload) - if pattern then - names.lookup(pattern,name,reload) - end - return lastlookups -end - --- The following is new ... watch the overload! - -local specifications = allocate() -names.specifications = specifications - --- files = { --- name = "antykwapoltawskiego", --- list = { --- ["AntPoltLtCond-Regular.otf"] = { --- -- name = "antykwapoltawskiego", --- style = "regular", --- weight = "light", --- width = "condensed", --- }, --- }, --- } - -function names.register(files) - if files then - local list, commonname = files.list, files.name - if list then - local n, m = 0, 0 - for filename, filespec in next, list do - local name = lower(filespec.name or commonname) - if name and name ~= "" then - local style = normalized_styles [lower(filespec.style or "normal")] - local width = normalized_widths [lower(filespec.width or "normal")] - local weight = normalized_weights [lower(filespec.weight or "normal")] - local variant = normalized_variants[lower(filespec.variant or "normal")] - local weights = specifications[name ] if not weights then weights = { } specifications[name ] = weights end - local styles = weights [weight] if not styles then styles = { } weights [weight] = styles end - local widths = styles [style ] if not widths then widths = { } styles [style ] = widths end - local variants = widths [width ] if not variants then variants = { } widths [width ] = variants end - variants[variant] = filename - n = n + 1 - else - m = m + 1 - end - end - if trace_specifications then - report_names("%s filenames registered, %s filenames rejected",n,m) - end - end - end -end - -function names.registered(name,weight,style,width,variant) - local ok = specifications[name] - ok = ok and (ok[(weight and weight ~= "" and weight ) or "normal"] or ok.normal) - ok = ok and (ok[(style and style ~= "" and style ) or "normal"] or ok.normal) - ok = ok and (ok[(width and width ~= "" and width ) or "normal"] or ok.normal) - ok = ok and (ok[(variant and variant ~= "" and variant) or "normal"] or ok.normal) - -- - -- todo: same fallbacks as with database - -- - if ok then - return { - filename = ok, - subname = "", - -- rawname = nil, - } - end -end - -function names.resolvespec(askedname,sub) -- overloads previous definition - local name, weight, style, width, variant = names.splitspec(askedname) - if trace_specifications then - report_names("resolving specification: %a to name=%s, weight=%s, style=%s, width=%s, variant=%s",askedname,name,weight,style,width,variant) - end - local found = names.registered(name,weight,style,width,variant) - if found and found.filename then - if trace_specifications then - report_names("resolved by registered names: %a to %s",askedname,found.filename) - end - return found.filename, found.subname, found.rawname - else - found = names.specification(name,weight,style,width,variant) - if found and found.filename then - if trace_specifications then - report_names("resolved by font database: %a to %s",askedname,found.filename) - end - return found.filename, found.subfont and found.rawname - end - end - if trace_specifications then - report_names("unresolved: %s",askedname) - end -end +if not modules then modules = { } end modules ['font-syn'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: subs in lookups requests + +local next, tonumber, type, tostring = next, tonumber, type, tostring +local sub, gsub, lower, match, find, lower, upper = string.sub, string.gsub, string.lower, string.match, string.find, string.lower, string.upper +local find, gmatch = string.find, string.gmatch +local concat, sort, format = table.concat, table.sort, string.format +local serialize = table.serialize +local lpegmatch = lpeg.match +local unpack = unpack or table.unpack +local formatters = string.formatters + +local allocate = utilities.storage.allocate +local sparse = utilities.storage.sparse + +local removesuffix = file.removesuffix +local splitbase = file.splitbase +local splitname = file.splitname +local basename = file.basename +local nameonly = file.nameonly +local pathpart = file.pathpart +local filejoin = file.join +local is_qualified_path = file.is_qualified_path +local exists = io.exists + +local findfile = resolvers.findfile +local cleanpath = resolvers.cleanpath +local resolveresolved = resolvers.resolve + +local trace_names = false trackers.register("fonts.names", function(v) trace_names = v end) +local trace_warnings = false trackers.register("fonts.warnings", function(v) trace_warnings = v end) +local trace_specifications = false trackers.register("fonts.specifications", function(v) trace_specifications = v end) + +local report_names = logs.reporter("fonts","names") + +--[[ldx-- +

This module implements a name to filename resolver. Names are resolved +using a table that has keys filtered from the font related files.

+--ldx]]-- + +fonts = fonts or { } -- also used elsewhere + +local names = font.names or allocate { } +fonts.names = names + +local filters = names.filters or { } +names.filters = filters + +names.data = names.data or allocate { } + +names.version = 1.110 +names.basename = "names" +names.saved = false +names.loaded = false +names.be_clever = true +names.enabled = true +names.cache = containers.define("fonts","data",names.version,true) + +local autoreload = true + +directives.register("fonts.autoreload", function(v) autoreload = toboolean(v) end) + +--[[ldx-- +

A few helpers.

+--ldx]]-- + +local P, C, Cc, Cs = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cs + +-- what to do with 'thin' + +local weights = Cs ( -- not extra + P("demibold") + + P("semibold") + + P("mediumbold") + + P("ultrabold") + + P("extrabold") + + P("ultralight") + + P("bold") + + P("demi") + + P("semi") + + P("light") + + P("medium") + + P("heavy") + + P("ultra") + + P("black") + + P("bol") -- / "bold" + + P("regular") / "normal" +) + +local normalized_weights = sparse { + regular = "normal", +} + +local styles = Cs ( + P("reverseoblique") / "reverseitalic" + + P("regular") / "normal" + + P("italic") + + P("oblique") / "italic" + + P("slanted") + + P("roman") / "normal" + + P("ital") / "italic" + + P("ita") / "italic" +) + +local normalized_styles = sparse { + reverseoblique = "reverseitalic", + regular = "normal", + oblique = "italic", +} + +local widths = Cs( + P("condensed") + + P("thin") + + P("expanded") + + P("cond") / "condensed" + + P("normal") + + P("book") / "normal" +) + +local normalized_widths = sparse() + +local variants = Cs( -- fax casual + P("smallcaps") + + P("oldstyle") + + P("caps") / "smallcaps" +) + +local normalized_variants = sparse() + +names.knownweights = { + "black", + "bold", + "demi", + "demibold", + "extrabold", + "heavy", + "light", + "medium", + "mediumbold", + "normal", + "regular", + "semi", + "semibold", + "ultra", + "ultrabold", + "ultralight", +} + +names.knownstyles = { + "italic", + "normal", + "oblique", + "regular", + "reverseitalic", + "reverseoblique", + "roman", + "slanted", +} + +names.knownwidths = { + "book", + "condensed", + "expanded", + "normal", + "thin", +} + +names.knownvariants = { + "normal", + "oldstyle", + "smallcaps", +} + +local any = P(1) + +local analyzed_table + +local analyzer = Cs ( + ( + weights / function(s) analyzed_table[1] = s return "" end + + styles / function(s) analyzed_table[2] = s return "" end + + widths / function(s) analyzed_table[3] = s return "" end + + variants / function(s) analyzed_table[4] = s return "" end + + any + )^0 +) + +local splitter = lpeg.splitat("-") + +function names.splitspec(askedname) + local name, weight, style, width, variant = lpegmatch(splitter,askedname) + weight = weight and lpegmatch(weights, weight) or weight + style = style and lpegmatch(styles, style) or style + width = width and lpegmatch(widths, width) or width + variant = variant and lpegmatch(variants,variant) or variant + if trace_names then + report_names("requested name %a split in name %a, weight %a, style %a, width %a and variant %a", + askedname,name,weight,style,width,variant) + end + if not weight or not weight or not width or not variant then + weight, style, width, variant = weight or "normal", style or "normal", width or "normal", variant or "normal" + if trace_names then + report_names("request %a normalized to '%s-%s-%s-%s-%s'", + askedname,name,weight,style,width,variant) + end + end + return name or askedname, weight, style, width, variant +end + +local function analyzespec(somename) + if somename then + analyzed_table = { } + local name = lpegmatch(analyzer,somename) + return name, analyzed_table[1], analyzed_table[2], analyzed_table[3], analyzed_table[4] + end +end + +--[[ldx-- +

It would make sense to implement the filters in the related modules, +but to keep the overview, we define them here.

+--ldx]]-- + +filters.otf = fontloader.info +filters.ttf = fontloader.info +filters.ttc = fontloader.info +filters.dfont = fontloader.info + +function fontloader.fullinfo(...) -- check with taco what we get / could get + local ff = fontloader.open(...) + if ff then + local d = ff and fontloader.to_table(ff) + d.glyphs, d.subfonts, d.gpos, d.gsub, d.lookups = nil, nil, nil, nil, nil + fontloader.close(ff) + return d + else + return nil, "error in loading font" + end +end + +filters.otf = fontloader.fullinfo + +function filters.afm(name) + -- we could parse the afm file as well, and then report an error but + -- it's not worth the trouble + local pfbname = findfile(removesuffix(name)..".pfb","pfb") or "" + if pfbname == "" then + pfbname = findfile(nameonly(name)..".pfb","pfb") or "" + end + if pfbname ~= "" then + local f = io.open(name) + if f then + local hash = { } + for line in f:lines() do + local key, value = match(line,"^(.+)%s+(.+)%s*$") + if key and #key > 0 then + hash[lower(key)] = value + end + if find(line,"StartCharMetrics") then + break + end + end + f:close() + return hash + end + end + return nil, "no matching pfb file" +end + +function filters.pfb(name) + return fontloader.info(name) +end + +--[[ldx-- +

The scanner loops over the filters using the information stored in +the file databases. Watch how we check not only for the names, but also +for combination with the weight of a font.

+--ldx]]-- + +filters.list = { + "otf", "ttf", "ttc", "dfont", "afm", + -- "ttc", "otf", "ttf", "dfont", "afm", +} + +names.fontconfigfile = "fonts.conf" -- a bit weird format, bonus feature +names.osfontdirvariable = "OSFONTDIR" -- the official way, in minimals etc + +filters.paths = { } +filters.names = { } + +function names.getpaths(trace) + local hash, result, r = { }, { }, 0 + local function collect(t,where) + for i=1,#t do + local v = cleanpath(t[i]) + v = gsub(v,"/+$","") -- not needed any more + local key = lower(v) + report_names("%a specifies path %a",where,v) + if not hash[key] then + r = r + 1 + result[r] = v + hash[key] = true + end + end + end + local path = names.osfontdirvariable or "" + if path ~= "" then + collect(resolvers.expandedpathlist(path),path) + end + if xml then + local confname = resolvers.expansion("FONTCONFIG_FILE") or "" + if confname == "" then + confname = names.fontconfigfile or "" + end + if confname ~= "" then + -- first look in the tex tree + local name = findfile(confname,"fontconfig files") or "" + if name == "" then + -- after all, fontconfig is a unix thing + name = filejoin("/etc",confname) + if not lfs.isfile(name) then + name = "" -- force quit + end + end + if name ~= "" and lfs.isfile(name) then + if trace_names then + report_names("%s fontconfig file %a","loading",name) + end + local xmldata = xml.load(name) + -- begin of untested mess + xml.include(xmldata,"include","",true,function(incname) + if not is_qualified_path(incname) then + local path = pathpart(name) -- main name + if path ~= "" then + incname = filejoin(path,incname) + end + end + if lfs.isfile(incname) then + if trace_names then + report_names("%s fontconfig file %a","merging included",incname) + end + return io.loaddata(incname) + elseif trace_names then + report_names("%s fontconfig file: %a","ignoring included",incname) + end + end) + -- end of untested mess + local fontdirs = xml.collect_texts(xmldata,"dir",true) + if trace_names then + report_names("%s dirs found in fontconfig",#fontdirs) + end + collect(fontdirs,"fontconfig file") + end + end + end + function names.getpaths() + return result + end + return result +end + +local function cleanname(name) + return (gsub(lower(name),"[^%a%d]","")) +end + +local function cleanfilename(fullname,defaultsuffix) + local path, name, suffix = splitname(fullname) + name = gsub(lower(name),"[^%a%d]","") + if suffix and suffix ~= "" then + return name .. ".".. suffix + elseif defaultsuffix and defaultsuffix ~= "" then + return name .. ".".. defaultsuffix + else + return name + end +end + +names.cleanname = cleanname +names.cleanfilename = cleanfilename + +local function check_names(result) + local names = result.names + if names then + for i=1,#names do + local name = names[i] + if name.lang == "English (US)" then + return name.names + end + end + end +end + +local function walk_tree(pathlist,suffix,identify) + if pathlist then + for i=1,#pathlist do + local path = pathlist[i] + path = cleanpath(path .. "/") + path = gsub(path,"/+","/") + local pattern = path .. "**." .. suffix -- ** forces recurse + report_names("globbing path %a",pattern) + local t = dir.glob(pattern) + sort(t,sorter) + for j=1,#t do + local completename = t[j] + identify(completename,basename(completename),suffix,completename) + end + end + end +end + +local function check_name(data,result,filename,modification,suffix,subfont) + -- shortcuts + local specifications = data.specifications + -- prepare + local names = check_names(result) + -- fetch + local familyname = names and names.preffamilyname or result.familyname + local fullname = names and names.fullname or result.fullname + local fontname = result.fontname + local subfamily = names and names.subfamily + local modifiers = names and names.prefmodifiers + local weight = names and names.weight or result.weight + local italicangle = tonumber(result.italicangle) + local subfont = subfont or nil + local rawname = fullname or fontname or familyname + -- normalize + familyname = familyname and cleanname(familyname) + fullname = fullname and cleanname(fullname) + fontname = fontname and cleanname(fontname) + subfamily = subfamily and cleanname(subfamily) + modifiers = modifiers and cleanname(modifiers) + weight = weight and cleanname(weight) + italicangle = italicangle == 0 and nil + -- analyze + local a_name, a_weight, a_style, a_width, a_variant = analyzespec(fullname or fontname or familyname) + -- check + local width = a_width + local variant = a_variant + local style = modifiers and gsub(modifiers,"[^%a]","") + if not style and italicangle then + style = "italic" + end + if not variant or variant == "" then + variant = "normal" + end + if not weight or weight == "" then + weight = a_weight + end + if not style or style == "" then + style = a_style + end + if not familyname then + familyname = a_name + end + fontname = fontname or fullname or familyname or basename(filename) + fullname = fullname or fontname + familyname = familyname or fontname + specifications[#specifications + 1] = { + filename = filename, -- unresolved + format = lower(suffix), + subfont = subfont, + rawname = rawname, + familyname = familyname, + fullname = fullname, + fontname = fontname, + subfamily = subfamily, + modifiers = modifiers, + weight = weight, + style = style, + width = width, + variant = variant, + minsize = result.design_range_bottom or 0, + maxsize = result.design_range_top or 0, + designsize = result.design_size or 0, + modification = modification or 0, + } +end + +local function cleanupkeywords() + local data = names.data + local specifications = names.data.specifications + if specifications then + local weights = { } + local styles = { } + local widths = { } + local variants = { } + for i=1,#specifications do + local s = specifications[i] + -- fix (sofar styles are taken from the name, and widths from the specification) + local _, b_weight, b_style, b_width, b_variant = analyzespec(s.weight) + local _, c_weight, c_style, c_width, c_variant = analyzespec(s.style) + local _, d_weight, d_style, d_width, d_variant = analyzespec(s.width) + local _, e_weight, e_style, e_width, e_variant = analyzespec(s.variant) + local _, f_weight, f_style, f_width, f_variant = analyzespec(s.fullname or "") + local weight = b_weight or c_weight or d_weight or e_weight or f_weight or "normal" + local style = b_style or c_style or d_style or e_style or f_style or "normal" + local width = b_width or c_width or d_width or e_width or f_width or "normal" + local variant = b_variant or c_variant or d_variant or e_variant or f_variant or "normal" + if not weight or weight == "" then weight = "normal" end + if not style or style == "" then style = "normal" end + if not width or width == "" then width = "normal" end + if not variant or variant == "" then variant = "normal" end + weights [weight ] = (weights [weight ] or 0) + 1 + styles [style ] = (styles [style ] or 0) + 1 + widths [width ] = (widths [width ] or 0) + 1 + variants[variant] = (variants[variant] or 0) + 1 + if weight ~= s.weight then + s.fontweight = s.weight + end + s.weight, s.style, s.width, s.variant = weight, style, width, variant + end + local stats = data.statistics + stats.used_weights, stats.used_styles, stats.used_widths, stats.used_variants = weights, styles, widths, variants + end +end + +local function collectstatistics() + local data = names.data + local specifications = data.specifications + if specifications then + local weights = { } + local styles = { } + local widths = { } + local variants = { } + for i=1,#specifications do + local s = specifications[i] + local weight = s.weight + local style = s.style + local width = s.width + local variant = s.variant + if weight then weights [weight ] = (weights [weight ] or 0) + 1 end + if style then styles [style ] = (styles [style ] or 0) + 1 end + if width then widths [width ] = (widths [width ] or 0) + 1 end + if variant then variants[variant] = (variants[variant] or 0) + 1 end + end + local stats = data.statistics + stats.weights = weights + stats.styles = styles + stats.widths = widths + stats.variants = variants + stats.fonts = #specifications + end +end + +local function collecthashes() + local data = names.data + local mappings = data.mappings + local fallbacks = data.fallbacks + local specifications = data.specifications + local nofmappings = 0 + local noffallbacks = 0 + if specifications then + -- maybe multiple passes + for index=1,#specifications do + local s = specifications[index] + local format, fullname, fontname, familyname, weight, subfamily = s.format, s.fullname, s.fontname, s.familyname, s.weight, s.subfamily + local mf, ff = mappings[format], fallbacks[format] + if fullname and not mf[fullname] then + mf[fullname], nofmappings = index, nofmappings + 1 + end + if fontname and not mf[fontname] then + mf[fontname], nofmappings = index, nofmappings + 1 + end + if familyname and weight and weight ~= sub(familyname,#familyname-#weight+1,#familyname) then + local madename = familyname .. weight + if not mf[madename] and not ff[madename] then + ff[madename], noffallbacks = index, noffallbacks + 1 + end + end + if familyname and subfamily and subfamily ~= sub(familyname,#familyname-#subfamily+1,#familyname) then + local extraname = familyname .. subfamily + if not mf[extraname] and not ff[extraname] then + ff[extraname], noffallbacks = index, noffallbacks + 1 + end + end + if familyname and not mf[familyname] and not ff[familyname] then + ff[familyname], noffallbacks = index, noffallbacks + 1 + end + end + end + return nofmappings, noffallbacks +end + +local function collectfamilies() + local data = names.data + local specifications = data.specifications + local families = data.families + for index=1,#specifications do + local familyname = specifications[index].familyname + local family = families[familyname] + if not family then + families[familyname] = { index } + else + family[#family+1] = index + end + end +end + +local function checkduplicate(where) -- fails on "Romantik" but that's a border case anyway + local data = names.data + local mapping = data[where] + local specifications = data.specifications + local loaded = { } + if specifications and mapping then + for _, m in next, mapping do + for k, v in next, m do + local s = specifications[v] + local hash = formatters["%s-%s-%s-%s-%s"](s.familyname,s.weight or "*",s.style or "*",s.width or "*",s.variant or "*") + local h = loaded[hash] + if h then + local ok = true + local fn = s.filename + for i=1,#h do + local hn = s.filename + if h[i] == fn then + ok = false + break + end + end + if ok then + h[#h+1] = fn + end + else + loaded[hash] = { s.filename } + end + end + end + end + local n = 0 + for k, v in table.sortedhash(loaded) do + local nv = #v + if nv > 1 then + if trace_warnings then + report_names("lookup %a clashes with %a",k,v) + end + n = n + nv + end + end + report_names("%a double lookups in %a",n,where) +end + +local function checkduplicates() + checkduplicate("mappings") + checkduplicate("fallbacks") +end + +local sorter = function(a,b) + return a > b -- to be checked +end + +local function sorthashes() + local data = names.data + local list = filters.list + local mappings = data.mappings + local fallbacks = data.fallbacks + local sorted_mappings = { } + local sorted_fallbacks = { } + data.sorted_mappings = sorted_mappings + data.sorted_fallbacks = sorted_fallbacks + for i=1,#list do + local l = list[i] + sorted_mappings [l] = table.keys(mappings[l]) + sorted_fallbacks[l] = table.keys(fallbacks[l]) + sort(sorted_mappings [l],sorter) + sort(sorted_fallbacks[l],sorter) + end + data.sorted_families = table.keys(data.families) + sort(data.sorted_families,sorter) +end + +local function unpackreferences() + local data = names.data + local specifications = data.specifications + if specifications then + for k, v in next, data.families do + for i=1,#v do + v[i] = specifications[v[i]] + end + end + local mappings = data.mappings + if mappings then + for _, m in next, mappings do + for k, v in next, m do + m[k] = specifications[v] + end + end + end + local fallbacks = data.fallbacks + if fallbacks then + for _, f in next, fallbacks do + for k, v in next, f do + f[k] = specifications[v] + end + end + end + end +end + +local function analyzefiles(olddata) + if not trace_warnings then + report_names("warnings are disabled (tracker 'fonts.warnings')") + end + local data = names.data + local done = { } + local totalnofread = 0 + local totalnofskipped = 0 + local totalnofduplicates = 0 + local nofread = 0 + local nofskipped = 0 + local nofduplicates = 0 + local skip_paths = filters.paths + local skip_names = filters.names + local specifications = data.specifications + local oldindices = olddata and olddata.indices or { } + local oldspecifications = olddata and olddata.specifications or { } + local oldrejected = olddata and olddata.rejected or { } + local treatmentdata = fonts.treatments.data + local function identify(completename,name,suffix,storedname) + local pathpart, basepart = splitbase(completename) + nofread = nofread + 1 + local treatment = treatmentdata[completename] or treatmentdata[basepart] + if treatment and treatment.ignored then + if trace_names then + report_names("%s font %a is ignored, reason %a",suffix,completename,treatment.comment or "unknown") + end + nofskipped = nofskipped + 1 + elseif done[name] then + -- already done (avoid otf afm clash) + if trace_names then + report_names("%s font %a already done",suffix,completename) + end + nofduplicates = nofduplicates + 1 + nofskipped = nofskipped + 1 + elseif not exists(completename) then + -- weird error + if trace_names then + report_names("%s font %a does not really exist",suffix,completename) + end + nofskipped = nofskipped + 1 + elseif not is_qualified_path(completename) and findfile(completename,suffix) == "" then + -- not locatable by backend anyway + if trace_names then + report_names("%s font %a cannot be found by backend",suffix,completename) + end + nofskipped = nofskipped + 1 + else + if #skip_paths > 0 then + for i=1,#skip_paths do + if find(pathpart,skip_paths[i]) then + if trace_names then + report_names("rejecting path of %s font %a",suffix,completename) + end + nofskipped = nofskipped + 1 + return + end + end + end + if #skip_names > 0 then + for i=1,#skip_paths do + if find(basepart,skip_names[i]) then + done[name] = true + if trace_names then + report_names("rejecting name of %s font %a",suffix,completename) + end + nofskipped = nofskipped + 1 + return + end + end + end + if trace_names then + report_names("identifying %s font %a",suffix,completename) + end + local result = nil + local modification = lfs.attributes(completename,"modification") + if olddata and modification and modification > 0 then + local oldindex = oldindices[storedname] -- index into specifications + if oldindex then + local oldspecification = oldspecifications[oldindex] + if oldspecification and oldspecification.filename == storedname then -- double check for out of sync + local oldmodification = oldspecification.modification + if oldmodification == modification then + result = oldspecification + specifications[#specifications + 1] = result + else + end + else + end + elseif oldrejected[storedname] == modification then + result = false + end + end + if result == nil then + local result, message = filters[lower(suffix)](completename) + if result then + if result[1] then + for r=1,#result do + local ok = check_name(data,result[r],storedname,modification,suffix,r-1) -- subfonts start at zero + -- if not ok then + -- nofskipped = nofskipped + 1 + -- end + end + else + local ok = check_name(data,result,storedname,modification,suffix) + -- if not ok then + -- nofskipped = nofskipped + 1 + -- end + end + if trace_warnings and message and message ~= "" then + report_names("warning when identifying %s font %a, %s",suffix,completename,message) + end + elseif trace_warnings then + nofskipped = nofskipped + 1 + report_names("error when identifying %s font %a, %s",suffix,completename,message or "unknown") + end + end + done[name] = true + end + logs.flush() -- a bit overkill for each font, maybe not needed here + end + local function traverse(what, method) + local list = filters.list + for n=1,#list do + local suffix = list[n] + local t = os.gettimeofday() -- use elapser + nofread, nofskipped, nofduplicates = 0, 0, 0 + suffix = lower(suffix) + report_names("identifying %s font files with suffix %a",what,suffix) + method(suffix) + suffix = upper(suffix) + report_names("identifying %s font files with suffix %a",what,suffix) + method(suffix) + totalnofread, totalnofskipped, totalnofduplicates = totalnofread + nofread, totalnofskipped + nofskipped, totalnofduplicates + nofduplicates + local elapsed = os.gettimeofday() - t + report_names("%s %s files identified, %s skipped, %s duplicates, %s hash entries added, runtime %0.3f seconds",nofread,what,nofskipped,nofduplicates,nofread-nofskipped,elapsed) + end + logs.flush() + end + -- problem .. this will not take care of duplicates + local function withtree(suffix) + resolvers.dowithfilesintree(".*%." .. suffix .. "$", function(method,root,path,name) + if method == "file" or method == "tree" then + local completename = root .."/" .. path .. "/" .. name + completename = resolveresolved(completename) -- no shortcut + identify(completename,name,suffix,name) + return true + end + end, function(blobtype,blobpath,pattern) + blobpath = resolveresolved(blobpath) -- no shortcut + report_names("scanning path %a for %s files",blobpath,suffix) + end, function(blobtype,blobpath,pattern,total,checked,done) + blobpath = resolveresolved(blobpath) -- no shortcut + report_names("%s entries found, %s %s files checked, %s okay",total,checked,suffix,done) + end) + end + local function withlsr(suffix) -- all trees + -- we do this only for a stupid names run, not used for context itself, + -- using the vars is too clumsy so we just stick to a full scan instead + local pathlist = resolvers.splitpath(resolvers.showpath("ls-R") or "") + walk_tree(pathlist,suffix,identify) + end + local function withsystem(suffix) -- OSFONTDIR cum suis + walk_tree(names.getpaths(trace),suffix,identify) + end + traverse("tree",withtree) -- TEXTREE only + if texconfig.kpse_init then + traverse("lsr", withlsr) + else + traverse("system", withsystem) + end + data.statistics.readfiles = totalnofread + data.statistics.skippedfiles = totalnofskipped + data.statistics.duplicatefiles = totalnofduplicates +end + +local function addfilenames() + local data = names.data + local specifications = data.specifications + local indices = { } + local files = { } + for i=1,#specifications do + local fullname = specifications[i].filename + files[cleanfilename(fullname)] = fullname + indices[fullname] = i + end + data.files = files + data.indices = indices +end + +local function rejectclashes() -- just to be sure, so no explicit afm will be found then + local specifications = names.data.specifications + local used = { } + local okay = { } + local rejected = { } -- only keep modification + local o = 0 + for i=1,#specifications do + local s = specifications[i] + local f = s.fontname + if f then + local fnd = used[f] + local fnm = s.filename + if fnd then + if trace_warnings then + report_names("fontname %a clashes, %a rejected in favor of %a",f,fnm,fnd) + end + rejected[f] = s.modification + else + used[f] = fnm + o = o + 1 + okay[o] = s + end + else + o = o + 1 + okay[o] = s + end + end + local d = #specifications - #okay + if d > 0 then + report_names("%s files rejected due to clashes",d) + end + names.data.specifications = okay + names.data.rejected = rejected +end + +local function resetdata() + local mappings = { } + local fallbacks = { } + for _, k in next, filters.list do + mappings [k] = { } + fallbacks[k] = { } + end + names.data = { + version = names.version, + mappings = mappings, + fallbacks = fallbacks, + specifications = { }, + families = { }, + statistics = { }, + names = { }, + indices = { }, + rejected = { }, + datastate = resolvers.datastate(), + } +end + +function names.identify(force) + local starttime = os.gettimeofday() -- use elapser + resetdata() + analyzefiles(not force and names.readdata(names.basename)) + rejectclashes() + collectfamilies() + collectstatistics() + cleanupkeywords() + collecthashes() + checkduplicates() + addfilenames() + -- sorthashes() -- will be resorted when saved + report_names("total scan time %0.3f seconds",os.gettimeofday()-starttime) +end + +function names.is_permitted(name) + return containers.is_usable(names.cache, name) +end +function names.writedata(name,data) + containers.write(names.cache,name,data) +end +function names.readdata(name) + return containers.read(names.cache,name) +end + +function names.load(reload,force) + if not names.loaded then + if reload then + if names.is_permitted(names.basename) then + names.identify(force) + names.writedata(names.basename,names.data) + else + report_names("unable to access database cache") + end + names.saved = true + end + local data = names.readdata(names.basename) + names.data = data + if not names.saved then + if not data or not next(data) or not data.specifications or not next(data.specifications) then + names.load(true) + end + names.saved = true + end + if not data then + report_names("accessing the data table failed") + else + unpackreferences() + sorthashes() + end + names.loaded = true + end +end + +local function list_them(mapping,sorted,pattern,t,all) + if mapping[pattern] then + t[pattern] = mapping[pattern] + else + for k=1,#sorted do + local v = sorted[k] + if not t[v] and find(v,pattern) then + t[v] = mapping[v] + if not all then + return + end + end + end + end +end + +function names.list(pattern,reload,all) -- here? + names.load() -- todo reload + if names.loaded then + local t = { } + local data = names.data + if data then + local list = filters.list + local mappings = data.mappings + local sorted_mappings = data.sorted_mappings + local fallbacks = data.fallbacks + local sorted_fallbacks = data.sorted_fallbacks + for i=1,#list do + local format = list[i] + list_them(mappings[format],sorted_mappings[format],pattern,t,all) + if next(t) and not all then + return t + end + list_them(fallbacks[format],sorted_fallbacks[format],pattern,t,all) + if next(t) and not all then + return t + end + end + end + return t + end +end + +local reloaded = false + +local function is_reloaded() + if not reloaded then + local data = names.data + if autoreload then + local c_status = serialize(resolvers.datastate()) + local f_status = serialize(data.datastate) + if c_status == f_status then + if trace_names then + report_names("font database has matching configuration and file hashes") + end + return + else + report_names("font database has mismatching configuration and file hashes") + end + else + report_names("font database is regenerated (controlled by directive 'fonts.autoreload')") + end + names.loaded = false + reloaded = true + logs.flush() + names.load(true) + end +end + +--[[ldx-- +

The resolver also checks if the cached names are loaded. Being clever +here is for testing purposes only (it deals with names prefixed by an +encoding name).

+--ldx]]-- + +local function fuzzy(mapping,sorted,name,sub) + local condensed = gsub(name,"[^%a%d]","") + for k=1,#sorted do + local v = sorted[k] + if find(v,condensed) then + return mapping[v], v + end + end +end + +-- we could cache a lookup .. maybe some day ... (only when auto loaded!) + +local function foundname(name,sub) -- sub is not used currently + local data = names.data + local mappings = data.mappings + local sorted_mappings = data.sorted_mappings + local fallbacks = data.fallbacks + local sorted_fallbacks = data.sorted_fallbacks + local list = filters.list + -- dilemma: we lookup in the order otf ttf ttc ... afm but now an otf fallback + -- can come after an afm match ... well, one should provide nice names anyway + -- and having two lists is not an option + for i=1,#list do + local l = list[i] + local found = mappings[l][name] + if found then + if trace_names then + report_names("resolved via direct name match: %a",name) + end + return found + end + end + for i=1,#list do + local l = list[i] + local found, fname = fuzzy(mappings[l],sorted_mappings[l],name,sub) + if found then + if trace_names then + report_names("resolved via fuzzy name match: %a onto %a",name,fname) + end + return found + end + end + for i=1,#list do + local l = list[i] + local found = fallbacks[l][name] + if found then + if trace_names then + report_names("resolved via direct fallback match: %a",name) + end + return found + end + end + for i=1,#list do + local l = list[i] + local found, fname = fuzzy(sorted_mappings[l],sorted_fallbacks[l],name,sub) + if found then + if trace_names then + report_names("resolved via fuzzy fallback match: %a onto %a",name,fname) + end + return found + end + end + if trace_names then + report_names("font with name %a cannot be found",name) + end +end + +function names.resolvedspecification(askedname,sub) + if askedname and askedname ~= "" and names.enabled then + askedname = cleanname(askedname) + names.load() + local found = foundname(askedname,sub) + if not found and is_reloaded() then + found = foundname(askedname,sub) + end + return found + end +end + +function names.resolve(askedname,sub) + local found = names.resolvedspecification(askedname,sub) + if found then + return found.filename, found.subfont and found.rawname + end +end + +-- function names.getfilename(askedname,suffix) -- last resort, strip funny chars +-- names.load() +-- local files = names.data.files +-- askedname = files and files[cleanfilename(askedname,suffix)] or "" +-- if askedname == "" then +-- return "" +-- else -- never entered +-- return resolvers.findbinfile(askedname,suffix) or "" +-- end +-- end + +function names.getfilename(askedname,suffix) -- last resort, strip funny chars + names.load() + local files = names.data.files + local cleanname = cleanfilename(askedname,suffix) + local found = files and files[cleanname] or "" + if found == "" and is_reloaded() then + files = names.data.files + found = files and files[cleanname] or "" + end + if found and found ~= "" then + return resolvers.findbinfile(found,suffix) or "" -- we still need to locate it + end +end + +-- specified search + +local function s_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,family) + if family then + for i=1,#family do + local f = family[i] + if f and weight == f.weight and style == f.style and width == f.width and variant == f.variant then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end +local function m_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,families,sorted,strictname) + for i=1,#sorted do + local k = sorted[i] + local family = families[k] + for i=1,#family do + local f = family[i] + if not done[f] and weight == f.weight and style == f.style and width == f.width and variant == f.variant and find(f.fontname,strictname) then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end + +local function s_collect_weight_style_width(found,done,all,weight,style,width,family) + if family then + for i=1,#family do + local f = family[i] + if f and weight == f.weight and style == f.style and width == f.width then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end +local function m_collect_weight_style_width(found,done,all,weight,style,width,families,sorted,strictname) + for i=1,#sorted do + local k = sorted[i] + local family = families[k] + for i=1,#family do + local f = family[i] + if not done[f] and weight == f.weight and style == f.style and width == f.width and find(f.fontname,strictname) then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end + +local function s_collect_weight_style(found,done,all,weight,style,family) + if family then + for i=1,#family do local f = family[i] + if f and weight == f.weight and style == f.style then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end +local function m_collect_weight_style(found,done,all,weight,style,families,sorted,strictname) + for i=1,#sorted do + local k = sorted[i] + local family = families[k] + for i=1,#family do + local f = family[i] + if not done[f] and weight == f.weight and style == f.style and find(f.fontname,strictname) then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end + +local function s_collect_style_width(found,done,all,style,width,family) + if family then + for i=1,#family do local f = family[i] + if f and style == f.style and width == f.width then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end +local function m_collect_style_width(found,done,all,style,width,families,sorted,strictname) + for i=1,#sorted do + local k = sorted[i] + local family = families[k] + for i=1,#family do + local f = family[i] + if not done[f] and style == f.style and width == f.width and find(f.fontname,strictname) then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end + +local function s_collect_weight(found,done,all,weight,family) + if family then + for i=1,#family do local f = family[i] + if f and weight == f.weight then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end +local function m_collect_weight(found,done,all,weight,families,sorted,strictname) + for i=1,#sorted do + local k = sorted[i] + local family = families[k] + for i=1,#family do + local f = family[i] + if not done[f] and weight == f.weight and find(f.fontname,strictname) then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end + +local function s_collect_style(found,done,all,style,family) + if family then + for i=1,#family do local f = family[i] + if f and style == f.style then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end +local function m_collect_style(found,done,all,style,families,sorted,strictname) + for i=1,#sorted do + local k = sorted[i] + local family = families[k] + for i=1,#family do + local f = family[i] + if not done[f] and style == f.style and find(f.fontname,strictname) then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end + +local function s_collect_width(found,done,all,width,family) + if family then + for i=1,#family do local f = family[i] + if f and width == f.width then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end +local function m_collect_width(found,done,all,width,families,sorted,strictname) + for i=1,#sorted do + local k = sorted[i] + local family = families[k] + for i=1,#family do + local f = family[i] + if not done[f] and width == f.width and find(f.fontname,strictname) then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end + +local function s_collect(found,done,all,family) + if family then + for i=1,#family do local f = family[i] + if f then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end +local function m_collect(found,done,all,families,sorted,strictname) + for i=1,#sorted do + local k = sorted[i] + local family = families[k] + for i=1,#family do + local f = family[i] + if not done[f] and find(f.fontname,strictname) then + found[#found+1], done[f] = f, true + if not all then return end + end + end + end +end + +local function collect(stage,found,done,name,weight,style,width,variant,all) + local data = names.data + local families = data.families + local sorted = data.sorted_families + local strictname = "^".. name -- to be checked + local family = families[name] + if trace_names then + report_names("resolving name %a, weight %a, style %a, width %a, variant %a",name,weight,style,width,variant) + end + if weight and weight ~= "" then + if style and style ~= "" then + if width and width ~= "" then + if variant and variant ~= "" then + if trace_names then + report_names("resolving stage %s, name %a, weight %a, style %a, width %a, variant %a",stage,name,weight,style,width,variant) + end + s_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,family) + m_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,families,sorted,strictname) + else + if trace_names then + report_names("resolving stage %s, name %a, weight %a, style %a, width %a",stage,name,weight,style,width) + end + s_collect_weight_style_width(found,done,all,weight,style,width,family) + m_collect_weight_style_width(found,done,all,weight,style,width,families,sorted,strictname) + end + else + if trace_names then + report_names("resolving stage %s, name %a, weight %a, style %a",stage,name,weight,style) + end + s_collect_weight_style(found,done,all,weight,style,family) + m_collect_weight_style(found,done,all,weight,style,families,sorted,strictname) + end + else + if trace_names then + report_names("resolving stage %s, name %a, weight %a",stage,name,weight) + end + s_collect_weight(found,done,all,weight,family) + m_collect_weight(found,done,all,weight,families,sorted,strictname) + end + elseif style and style ~= "" then + if width and width ~= "" then + if trace_names then + report_names("resolving stage %s, name %a, style %a, width %a",stage,name,style,width) + end + s_collect_style_width(found,done,all,style,width,family) + m_collect_style_width(found,done,all,style,width,families,sorted,strictname) + else + if trace_names then + report_names("resolving stage %s, name %a, style %a",stage,name,style) + end + s_collect_style(found,done,all,style,family) + m_collect_style(found,done,all,style,families,sorted,strictname) + end + elseif width and width ~= "" then + if trace_names then + report_names("resolving stage %s, name %a, width %a",stage,name,width) + end + s_collect_width(found,done,all,width,family) + m_collect_width(found,done,all,width,families,sorted,strictname) + else + if trace_names then + report_names("resolving stage %s, name %a",stage,name) + end + s_collect(found,done,all,family) + m_collect(found,done,all,families,sorted,strictname) + end +end + +local function heuristic(name,weight,style,width,variant,all) -- todo: fallbacks + local found, done = { }, { } +--~ print(name,weight,style,width,variant) + weight, style, width, variant = weight or "normal", style or "normal", width or "normal", variant or "normal" + name = cleanname(name) + collect(1,found,done,name,weight,style,width,variant,all) + -- still needed ? + if #found == 0 and variant ~= "normal" then -- not weight + variant = "normal" + collect(4,found,done,name,weight,style,width,variant,all) + end + if #found == 0 and width ~= "normal" then + width = "normal" + collect(2,found,done,name,weight,style,width,variant,all) + end + if #found == 0 and weight ~= "normal" then -- not style + weight = "normal" + collect(3,found,done,name,weight,style,width,variant,all) + end + if #found == 0 and style ~= "normal" then -- not weight + style = "normal" + collect(4,found,done,name,weight,style,width,variant,all) + end + -- + local nf = #found + if trace_names then + if nf then + local t = { } + for i=1,nf do + t[i] = formatters["%a"](found[i].fontname) + end + report_names("name %a resolved to %s instances: % t",name,nf,t) + else + report_names("name %a unresolved",name) + end + end + if all then + return nf > 0 and found + else + return found[1] + end +end + +function names.specification(askedname,weight,style,width,variant,reload,all) + if askedname and askedname ~= "" and names.enabled then + askedname = cleanname(askedname) -- or cleanname + names.load(reload) + local found = heuristic(askedname,weight,style,width,variant,all) + if not found and is_reloaded() then + found = heuristic(askedname,weight,style,width,variant,all) + if not filename then + found = foundname(askedname) -- old method + end + end + return found + end +end + +function names.collect(askedname,weight,style,width,variant,reload,all) + if askedname and askedname ~= "" and names.enabled then + askedname = cleanname(askedname) -- or cleanname + names.load(reload) + local list = heuristic(askedname,weight,style,width,variant,true) + if not list or #list == 0 and is_reloaded() then + list = heuristic(askedname,weight,style,width,variant,true) + end + return list + end +end + +function names.collectspec(askedname,reload,all) + local name, weight, style, width, variant = names.splitspec(askedname) + return names.collect(name,weight,style,width,variant,reload,all) +end + +function names.resolvespec(askedname,sub) -- redefined later + local found = names.specification(names.splitspec(askedname)) + if found then + return found.filename, found.subfont and found.rawname + end +end + +function names.collectfiles(askedname,reload) -- no all + if askedname and askedname ~= "" and names.enabled then + askedname = cleanname(askedname) -- or cleanname + names.load(reload) + local list = { } + local specifications = names.data.specifications + for i=1,#specifications do + local s = specifications[i] + if find(cleanname(basename(s.filename)),askedname) then + list[#list+1] = s + end + end + return list + end +end + +-- todo: +-- +-- blacklisted = { +-- ["cmr10.ttf"] = "completely messed up", +-- } + +function names.exists(name) + local found = false + local list = filters.list + for k=1,#list do + local v = list[k] + found = (findfile(name,v) or "") ~= "" + if found then + return found + end + end + return (findfile(name,"tfm") or "") ~= "" or (names.resolve(name) or "") ~= "" +end + +local lastlookups, lastpattern = { }, "" + +function names.lookup(pattern,name,reload) -- todo: find + if lastpattern ~= pattern then + names.load(reload) + local specifications = names.data.specifications + local families = names.data.families + local lookups = specifications + if name then + lookups = families[name] + elseif not find(pattern,"=") then + lookups = families[pattern] + end + if trace_names then + report_names("starting with %s lookups for %a",#lookups,pattern) + end + if lookups then + for key, value in gmatch(pattern,"([^=,]+)=([^=,]+)") do + local t, n = { }, 0 + if find(value,"*") then + value = string.topattern(value) + for i=1,#lookups do + local s = lookups[i] + if find(s[key],value) then + n = n + 1 + t[n] = lookups[i] + end + end + else + for i=1,#lookups do + local s = lookups[i] + if s[key] == value then + n = n + 1 + t[n] = lookups[i] + end + end + end + if trace_names then + report_names("%s matches for key %a with value %a",#t,key,value) + end + lookups = t + end + end + lastpattern = pattern + lastlookups = lookups or { } + end + return #lastlookups +end + +function names.getlookupkey(key,n) + local l = lastlookups[n or 1] + return (l and l[key]) or "" +end + +function names.noflookups() + return #lastlookups +end + +function names.getlookups(pattern,name,reload) + if pattern then + names.lookup(pattern,name,reload) + end + return lastlookups +end + +-- The following is new ... watch the overload! + +local specifications = allocate() +names.specifications = specifications + +-- files = { +-- name = "antykwapoltawskiego", +-- list = { +-- ["AntPoltLtCond-Regular.otf"] = { +-- -- name = "antykwapoltawskiego", +-- style = "regular", +-- weight = "light", +-- width = "condensed", +-- }, +-- }, +-- } + +function names.register(files) + if files then + local list, commonname = files.list, files.name + if list then + local n, m = 0, 0 + for filename, filespec in next, list do + local name = lower(filespec.name or commonname) + if name and name ~= "" then + local style = normalized_styles [lower(filespec.style or "normal")] + local width = normalized_widths [lower(filespec.width or "normal")] + local weight = normalized_weights [lower(filespec.weight or "normal")] + local variant = normalized_variants[lower(filespec.variant or "normal")] + local weights = specifications[name ] if not weights then weights = { } specifications[name ] = weights end + local styles = weights [weight] if not styles then styles = { } weights [weight] = styles end + local widths = styles [style ] if not widths then widths = { } styles [style ] = widths end + local variants = widths [width ] if not variants then variants = { } widths [width ] = variants end + variants[variant] = filename + n = n + 1 + else + m = m + 1 + end + end + if trace_specifications then + report_names("%s filenames registered, %s filenames rejected",n,m) + end + end + end +end + +function names.registered(name,weight,style,width,variant) + local ok = specifications[name] + ok = ok and (ok[(weight and weight ~= "" and weight ) or "normal"] or ok.normal) + ok = ok and (ok[(style and style ~= "" and style ) or "normal"] or ok.normal) + ok = ok and (ok[(width and width ~= "" and width ) or "normal"] or ok.normal) + ok = ok and (ok[(variant and variant ~= "" and variant) or "normal"] or ok.normal) + -- + -- todo: same fallbacks as with database + -- + if ok then + return { + filename = ok, + subname = "", + -- rawname = nil, + } + end +end + +function names.resolvespec(askedname,sub) -- overloads previous definition + local name, weight, style, width, variant = names.splitspec(askedname) + if trace_specifications then + report_names("resolving specification: %a to name=%s, weight=%s, style=%s, width=%s, variant=%s",askedname,name,weight,style,width,variant) + end + local found = names.registered(name,weight,style,width,variant) + if found and found.filename then + if trace_specifications then + report_names("resolved by registered names: %a to %s",askedname,found.filename) + end + return found.filename, found.subname, found.rawname + else + found = names.specification(name,weight,style,width,variant) + if found and found.filename then + if trace_specifications then + report_names("resolved by font database: %a to %s",askedname,found.filename) + end + return found.filename, found.subfont and found.rawname + end + end + if trace_specifications then + report_names("unresolved: %s",askedname) + end +end diff --git a/tex/context/base/font-tfm.lua b/tex/context/base/font-tfm.lua index 316b947a3..316e11a65 100644 --- a/tex/context/base/font-tfm.lua +++ b/tex/context/base/font-tfm.lua @@ -1,152 +1,152 @@ -if not modules then modules = { } end modules ['font-tfm'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local next = next -local match = string.match - -local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) -local trace_features = false trackers.register("tfm.features", function(v) trace_features = v end) - -local report_defining = logs.reporter("fonts","defining") -local report_tfm = logs.reporter("fonts","tfm loading") - -local findbinfile = resolvers.findbinfile - -local fonts = fonts -local handlers = fonts.handlers -local readers = fonts.readers -local constructors = fonts.constructors -local encodings = fonts.encodings - -local tfm = constructors.newhandler("tfm") - -local tfmfeatures = constructors.newfeatures("tfm") -local registertfmfeature = tfmfeatures.register - -constructors.resolvevirtualtoo = false -- wil be set in font-ctx.lua - -fonts.formats.tfm = "type1" -- we need to have at least a value here - ---[[ldx-- -

The next function encapsulates the standard loader as -supplied by .

---ldx]]-- - --- this might change: not scaling and then apply features and do scaling in the --- usual way with dummy descriptions but on the other hand .. we no longer use --- tfm so why bother - --- ofm directive blocks local path search unless set; btw, in context we --- don't support ofm files anyway as this format is obsolete - -function tfm.setfeatures(tfmdata,features) - local okay = constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm) - if okay then - return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm) - else - return { } -- will become false - end -end - -local function read_from_tfm(specification) - local filename = specification.filename - local size = specification.size - if trace_defining then - report_defining("loading tfm file %a at size %s",filename,size) - end - local tfmdata = font.read_tfm(filename,size) -- not cached, fast enough - if tfmdata then - local features = specification.features and specification.features.normal or { } - local resources = tfmdata.resources or { } - local properties = tfmdata.properties or { } - local parameters = tfmdata.parameters or { } - local shared = tfmdata.shared or { } - properties.name = tfmdata.name - properties.fontname = tfmdata.fontname - properties.psname = tfmdata.psname - properties.filename = specification.filename - parameters.size = size - shared.rawdata = { } - shared.features = features - shared.processes = next(features) and tfm.setfeatures(tfmdata,features) or nil - -- - tfmdata.properties = properties - tfmdata.resources = resources - tfmdata.parameters = parameters - tfmdata.shared = shared - -- - parameters.slant = parameters.slant or parameters[1] or 0 - parameters.space = parameters.space or parameters[2] or 0 - parameters.space_stretch = parameters.space_stretch or parameters[3] or 0 - parameters.space_shrink = parameters.space_shrink or parameters[4] or 0 - parameters.x_height = parameters.x_height or parameters[5] or 0 - parameters.quad = parameters.quad or parameters[6] or 0 - parameters.extra_space = parameters.extra_space or parameters[7] or 0 - -- - constructors.enhanceparameters(parameters) -- official copies for us - -- - if constructors.resolvevirtualtoo then - fonts.loggers.register(tfmdata,file.suffix(filename),specification) -- strange, why here - local vfname = findbinfile(specification.name, 'ovf') - if vfname and vfname ~= "" then - local vfdata = font.read_vf(vfname,size) -- not cached, fast enough - if vfdata then - local chars = tfmdata.characters - for k,v in next, vfdata.characters do - chars[k].commands = v.commands - end - properties.virtualized = true - tfmdata.fonts = vfdata.fonts - end - end - end - -- - local allfeatures = tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm) - if not features.encoding then - local encoding, filename = match(properties.filename,"^(.-)%-(.*)$") -- context: encoding-name.* - if filename and encoding and encodings.known[encoding] then - features.encoding = encoding - end - end - -- - return tfmdata - end -end - -local function check_tfm(specification,fullname) -- we could split up like afm/otf - local foundname = findbinfile(fullname, 'tfm') or "" - if foundname == "" then - foundname = findbinfile(fullname, 'ofm') or "" -- not needed in context - end - if foundname == "" then - foundname = fonts.names.getfilename(fullname,"tfm") or "" - end - if foundname ~= "" then - specification.filename = foundname - specification.format = "ofm" - return read_from_tfm(specification) - elseif trace_defining then - report_defining("loading tfm with name %a fails",specification.name) - end -end - -readers.check_tfm = check_tfm - -function readers.tfm(specification) - local fullname = specification.filename or "" - if fullname == "" then - local forced = specification.forced or "" - if forced ~= "" then - fullname = specification.name .. "." .. forced - else - fullname = specification.name - end - end - return check_tfm(specification,fullname) -end +if not modules then modules = { } end modules ['font-tfm'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local next = next +local match = string.match + +local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) +local trace_features = false trackers.register("tfm.features", function(v) trace_features = v end) + +local report_defining = logs.reporter("fonts","defining") +local report_tfm = logs.reporter("fonts","tfm loading") + +local findbinfile = resolvers.findbinfile + +local fonts = fonts +local handlers = fonts.handlers +local readers = fonts.readers +local constructors = fonts.constructors +local encodings = fonts.encodings + +local tfm = constructors.newhandler("tfm") + +local tfmfeatures = constructors.newfeatures("tfm") +local registertfmfeature = tfmfeatures.register + +constructors.resolvevirtualtoo = false -- wil be set in font-ctx.lua + +fonts.formats.tfm = "type1" -- we need to have at least a value here + +--[[ldx-- +

The next function encapsulates the standard loader as +supplied by .

+--ldx]]-- + +-- this might change: not scaling and then apply features and do scaling in the +-- usual way with dummy descriptions but on the other hand .. we no longer use +-- tfm so why bother + +-- ofm directive blocks local path search unless set; btw, in context we +-- don't support ofm files anyway as this format is obsolete + +function tfm.setfeatures(tfmdata,features) + local okay = constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm) + if okay then + return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm) + else + return { } -- will become false + end +end + +local function read_from_tfm(specification) + local filename = specification.filename + local size = specification.size + if trace_defining then + report_defining("loading tfm file %a at size %s",filename,size) + end + local tfmdata = font.read_tfm(filename,size) -- not cached, fast enough + if tfmdata then + local features = specification.features and specification.features.normal or { } + local resources = tfmdata.resources or { } + local properties = tfmdata.properties or { } + local parameters = tfmdata.parameters or { } + local shared = tfmdata.shared or { } + properties.name = tfmdata.name + properties.fontname = tfmdata.fontname + properties.psname = tfmdata.psname + properties.filename = specification.filename + parameters.size = size + shared.rawdata = { } + shared.features = features + shared.processes = next(features) and tfm.setfeatures(tfmdata,features) or nil + -- + tfmdata.properties = properties + tfmdata.resources = resources + tfmdata.parameters = parameters + tfmdata.shared = shared + -- + parameters.slant = parameters.slant or parameters[1] or 0 + parameters.space = parameters.space or parameters[2] or 0 + parameters.space_stretch = parameters.space_stretch or parameters[3] or 0 + parameters.space_shrink = parameters.space_shrink or parameters[4] or 0 + parameters.x_height = parameters.x_height or parameters[5] or 0 + parameters.quad = parameters.quad or parameters[6] or 0 + parameters.extra_space = parameters.extra_space or parameters[7] or 0 + -- + constructors.enhanceparameters(parameters) -- official copies for us + -- + if constructors.resolvevirtualtoo then + fonts.loggers.register(tfmdata,file.suffix(filename),specification) -- strange, why here + local vfname = findbinfile(specification.name, 'ovf') + if vfname and vfname ~= "" then + local vfdata = font.read_vf(vfname,size) -- not cached, fast enough + if vfdata then + local chars = tfmdata.characters + for k,v in next, vfdata.characters do + chars[k].commands = v.commands + end + properties.virtualized = true + tfmdata.fonts = vfdata.fonts + end + end + end + -- + local allfeatures = tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm) + if not features.encoding then + local encoding, filename = match(properties.filename,"^(.-)%-(.*)$") -- context: encoding-name.* + if filename and encoding and encodings.known[encoding] then + features.encoding = encoding + end + end + -- + return tfmdata + end +end + +local function check_tfm(specification,fullname) -- we could split up like afm/otf + local foundname = findbinfile(fullname, 'tfm') or "" + if foundname == "" then + foundname = findbinfile(fullname, 'ofm') or "" -- not needed in context + end + if foundname == "" then + foundname = fonts.names.getfilename(fullname,"tfm") or "" + end + if foundname ~= "" then + specification.filename = foundname + specification.format = "ofm" + return read_from_tfm(specification) + elseif trace_defining then + report_defining("loading tfm with name %a fails",specification.name) + end +end + +readers.check_tfm = check_tfm + +function readers.tfm(specification) + local fullname = specification.filename or "" + if fullname == "" then + local forced = specification.forced or "" + if forced ~= "" then + fullname = specification.name .. "." .. forced + else + fullname = specification.name + end + end + return check_tfm(specification,fullname) +end diff --git a/tex/context/base/font-trt.lua b/tex/context/base/font-trt.lua index 6fc8028d1..d382e62d7 100644 --- a/tex/context/base/font-trt.lua +++ b/tex/context/base/font-trt.lua @@ -1,57 +1,57 @@ -if not modules then modules = { } end modules ['font-trt'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local rawget, dofile, next = rawget, dofile, next - ---[[ldx-- -

We provide a simple treatment mechanism (mostly because I want to demonstrate -something in a manual). It's one of the few places where an lfg file gets loaded -outside the goodies manager.

---ldx]]-- - -local treatments = utilities.storage.allocate() -fonts.treatments = treatments -local treatmentdata = { } -treatments.data = treatmentdata -treatments.filename = "treatments.lfg" - --- function treatments.load(name) --- local filename = resolvers.findfile(name) --- if filename and filename ~= "" then --- local goodies = dofile(filename) --- if goodies then --- local treatments = goodies.treatments --- if treatments then --- for name, data in next, treatments do --- treatmentdata[name] = data -- always wins --- end --- end --- end --- end --- end - -table.setmetatableindex(treatmentdata,function(t,k) - local files = resolvers.findfiles(treatments.filename) - if files then - for i=1,#files do - local goodies = dofile(files[i]) - if goodies then - local treatments = goodies.treatments - if treatments then - for name, data in next, treatments do - if not rawget(t,name) then - t[name] = data - end - end - end - end - end - end - table.setmetatableindex(treatmentdata,nil) - return treatmentdata[k] -end) +if not modules then modules = { } end modules ['font-trt'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local rawget, dofile, next = rawget, dofile, next + +--[[ldx-- +

We provide a simple treatment mechanism (mostly because I want to demonstrate +something in a manual). It's one of the few places where an lfg file gets loaded +outside the goodies manager.

+--ldx]]-- + +local treatments = utilities.storage.allocate() +fonts.treatments = treatments +local treatmentdata = { } +treatments.data = treatmentdata +treatments.filename = "treatments.lfg" + +-- function treatments.load(name) +-- local filename = resolvers.findfile(name) +-- if filename and filename ~= "" then +-- local goodies = dofile(filename) +-- if goodies then +-- local treatments = goodies.treatments +-- if treatments then +-- for name, data in next, treatments do +-- treatmentdata[name] = data -- always wins +-- end +-- end +-- end +-- end +-- end + +table.setmetatableindex(treatmentdata,function(t,k) + local files = resolvers.findfiles(treatments.filename) + if files then + for i=1,#files do + local goodies = dofile(files[i]) + if goodies then + local treatments = goodies.treatments + if treatments then + for name, data in next, treatments do + if not rawget(t,name) then + t[name] = data + end + end + end + end + end + end + table.setmetatableindex(treatmentdata,nil) + return treatmentdata[k] +end) diff --git a/tex/context/base/font-vf.lua b/tex/context/base/font-vf.lua index 1fe6dd71c..bc6ed400e 100644 --- a/tex/context/base/font-vf.lua +++ b/tex/context/base/font-vf.lua @@ -1,205 +1,205 @@ -if not modules then modules = { } end modules ['font-vf'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

This is very experimental code! Not yet adapted to recent changes. This will change.

---ldx]]-- - --- present in the backend but unspecified: --- --- vf.rule vf.special vf.right vf.push vf.down vf.char vf.node vf.fontid vf.pop vf.image vf.nop - -local next = next - -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex -local fastcopy = table.fastcopy - -local fonts = fonts -local constructors = fonts.constructors -local vf = constructors.newhandler("vf") - --- general code - -function vf.find(name) - name = file.removesuffix(file.basename(name)) - if constructors.resolvevirtualtoo then - local format = fonts.loggers.format(name) - if format == 'tfm' or format == 'ofm' then - if trace_defining then - report_defining("locating vf for %a",name) - end - return findbinfile(name,"ovf") - else - if trace_defining then - report_defining("vf for %a is already taken care of",name) - end - return nil -- "" - end - else - if trace_defining then - report_defining("locating vf for %a",name) - end - return findbinfile(name,"ovf") - end -end - ---[[ldx-- -

We overload the reader.

---ldx]]-- - -callbacks.register('find_vf_file', vf.find, "locating virtual fonts, insofar needed") -- not that relevant any more - --- specific code (will move to other module) - -local definers = fonts.definers -local methods = definers.methods - -local variants = allocate() -local combinations = { } -local combiner = { } -local whatever = allocate() -local helpers = allocate() -local predefined = allocate { - dummy = { "comment" }, - push = { "push" }, - pop = { "pop" }, -} - -methods.variants = variants -- todo .. wrong namespace -vf.combinations = combinations -vf.combiner = combiner -vf.whatever = whatever -vf.helpers = helpers -vf.predefined = predefined - -setmetatableindex(whatever, function(t,k) local v = { } t[k] = v return v end) - -local function checkparameters(g,f) - if f and g and not g.parameters and #g.fonts > 0 then - local p = { } - for k,v in next, f.parameters do - p[k] = v - end - g.parameters = p - setmetatable(p, getmetatable(f.parameters)) - end -end - -function methods.install(tag, rules) - vf.combinations[tag] = rules - variants[tag] = function(specification) - return vf.combine(specification,tag) - end -end - -local function combine_load(g,name) - return constructors.readanddefine(name or g.specification.name,g.specification.size) -end - -local function combine_assign(g, name, from, to, start, force) - local f, id = combine_load(g,name) - if f and id then - -- optimize for whole range, then just g = f - if not from then from, to = 0, 0xFF00 end - if not to then to = from end - if not start then start = from end - local fc, gc = f.characters, g.characters - local fd, gd = f.descriptions, g.descriptions - local hn = #g.fonts+1 - g.fonts[hn] = { id = id } -- no need to be sparse - for i=from,to do - if fc[i] and (force or not gc[i]) then - gc[i] = fastcopy(fc[i],true) -- can be optimized - gc[i].commands = { { 'slot', hn, start } } - gd[i] = fd[i] - end - start = start + 1 - end - checkparameters(g,f) - end -end - -local function combine_process(g,list) - if list then - for _,v in next, list do - (combiner.commands[v[1]] or nop)(g,v) - end - end -end - -local function combine_names(g,name,force) - local f, id = constructors.readanddefine(name,g.specification.size) - if f and id then - local fc, gc = f.characters, g.characters - local fd, gd = f.descriptions, g.descriptions - g.fonts[#g.fonts+1] = { id = id } -- no need to be sparse - local hn = #g.fonts - for k, v in next, fc do - if force or not gc[k] then - gc[k] = fastcopy(v,true) - gc[k].commands = { { 'slot', hn, k } } - gd[i] = fd[i] - end - end - checkparameters(g,f) - end -end - -local combine_feature = function(g,v) - local key, value = v[2], v[3] - if key then - if value == nil then - value = true - end - local specification = g.specification - if specification then - local normalfeatures = specification.features.normal - if normalfeatures then - normalfeatures[key] = value -- otf? - end - end - end -end - ---~ combiner.load = combine_load ---~ combiner.assign = combine_assign ---~ combiner.process = combine_process ---~ combiner.names = combine_names ---~ combiner.feature = combine_feature - -combiner.commands = allocate { - ["initialize"] = function(g,v) combine_assign (g,g.properties.name) end, - ["include-method"] = function(g,v) combine_process (g,combinations[v[2]]) end, -- name - -- ["copy-parameters"] = function(g,v) combine_parameters(g,v[2]) end, -- name - ["copy-range"] = function(g,v) combine_assign (g,v[2],v[3],v[4],v[5],true) end, -- name, from-start, from-end, to-start - ["copy-char"] = function(g,v) combine_assign (g,v[2],v[3],v[3],v[4],true) end, -- name, from, to - ["fallback-range"] = function(g,v) combine_assign (g,v[2],v[3],v[4],v[5],false) end, -- name, from-start, from-end, to-start - ["fallback-char"] = function(g,v) combine_assign (g,v[2],v[3],v[3],v[4],false) end, -- name, from, to - ["copy-names"] = function(g,v) combine_names (g,v[2],true) end, - ["fallback-names"] = function(g,v) combine_names (g,v[2],false) end, - ["feature"] = combine_feature, -} - -function vf.combine(specification,tag) - local g = { - name = specification.name, - properties = { - virtualized = true, - }, - fonts = { - }, - characters = { - }, - descriptions = { - }, - specification = fastcopy(specification), - } - combine_process(g,combinations[tag]) - return g -end +if not modules then modules = { } end modules ['font-vf'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

This is very experimental code! Not yet adapted to recent changes. This will change.

+--ldx]]-- + +-- present in the backend but unspecified: +-- +-- vf.rule vf.special vf.right vf.push vf.down vf.char vf.node vf.fontid vf.pop vf.image vf.nop + +local next = next + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex +local fastcopy = table.fastcopy + +local fonts = fonts +local constructors = fonts.constructors +local vf = constructors.newhandler("vf") + +-- general code + +function vf.find(name) + name = file.removesuffix(file.basename(name)) + if constructors.resolvevirtualtoo then + local format = fonts.loggers.format(name) + if format == 'tfm' or format == 'ofm' then + if trace_defining then + report_defining("locating vf for %a",name) + end + return findbinfile(name,"ovf") + else + if trace_defining then + report_defining("vf for %a is already taken care of",name) + end + return nil -- "" + end + else + if trace_defining then + report_defining("locating vf for %a",name) + end + return findbinfile(name,"ovf") + end +end + +--[[ldx-- +

We overload the reader.

+--ldx]]-- + +callbacks.register('find_vf_file', vf.find, "locating virtual fonts, insofar needed") -- not that relevant any more + +-- specific code (will move to other module) + +local definers = fonts.definers +local methods = definers.methods + +local variants = allocate() +local combinations = { } +local combiner = { } +local whatever = allocate() +local helpers = allocate() +local predefined = allocate { + dummy = { "comment" }, + push = { "push" }, + pop = { "pop" }, +} + +methods.variants = variants -- todo .. wrong namespace +vf.combinations = combinations +vf.combiner = combiner +vf.whatever = whatever +vf.helpers = helpers +vf.predefined = predefined + +setmetatableindex(whatever, function(t,k) local v = { } t[k] = v return v end) + +local function checkparameters(g,f) + if f and g and not g.parameters and #g.fonts > 0 then + local p = { } + for k,v in next, f.parameters do + p[k] = v + end + g.parameters = p + setmetatable(p, getmetatable(f.parameters)) + end +end + +function methods.install(tag, rules) + vf.combinations[tag] = rules + variants[tag] = function(specification) + return vf.combine(specification,tag) + end +end + +local function combine_load(g,name) + return constructors.readanddefine(name or g.specification.name,g.specification.size) +end + +local function combine_assign(g, name, from, to, start, force) + local f, id = combine_load(g,name) + if f and id then + -- optimize for whole range, then just g = f + if not from then from, to = 0, 0xFF00 end + if not to then to = from end + if not start then start = from end + local fc, gc = f.characters, g.characters + local fd, gd = f.descriptions, g.descriptions + local hn = #g.fonts+1 + g.fonts[hn] = { id = id } -- no need to be sparse + for i=from,to do + if fc[i] and (force or not gc[i]) then + gc[i] = fastcopy(fc[i],true) -- can be optimized + gc[i].commands = { { 'slot', hn, start } } + gd[i] = fd[i] + end + start = start + 1 + end + checkparameters(g,f) + end +end + +local function combine_process(g,list) + if list then + for _,v in next, list do + (combiner.commands[v[1]] or nop)(g,v) + end + end +end + +local function combine_names(g,name,force) + local f, id = constructors.readanddefine(name,g.specification.size) + if f and id then + local fc, gc = f.characters, g.characters + local fd, gd = f.descriptions, g.descriptions + g.fonts[#g.fonts+1] = { id = id } -- no need to be sparse + local hn = #g.fonts + for k, v in next, fc do + if force or not gc[k] then + gc[k] = fastcopy(v,true) + gc[k].commands = { { 'slot', hn, k } } + gd[i] = fd[i] + end + end + checkparameters(g,f) + end +end + +local combine_feature = function(g,v) + local key, value = v[2], v[3] + if key then + if value == nil then + value = true + end + local specification = g.specification + if specification then + local normalfeatures = specification.features.normal + if normalfeatures then + normalfeatures[key] = value -- otf? + end + end + end +end + +--~ combiner.load = combine_load +--~ combiner.assign = combine_assign +--~ combiner.process = combine_process +--~ combiner.names = combine_names +--~ combiner.feature = combine_feature + +combiner.commands = allocate { + ["initialize"] = function(g,v) combine_assign (g,g.properties.name) end, + ["include-method"] = function(g,v) combine_process (g,combinations[v[2]]) end, -- name + -- ["copy-parameters"] = function(g,v) combine_parameters(g,v[2]) end, -- name + ["copy-range"] = function(g,v) combine_assign (g,v[2],v[3],v[4],v[5],true) end, -- name, from-start, from-end, to-start + ["copy-char"] = function(g,v) combine_assign (g,v[2],v[3],v[3],v[4],true) end, -- name, from, to + ["fallback-range"] = function(g,v) combine_assign (g,v[2],v[3],v[4],v[5],false) end, -- name, from-start, from-end, to-start + ["fallback-char"] = function(g,v) combine_assign (g,v[2],v[3],v[3],v[4],false) end, -- name, from, to + ["copy-names"] = function(g,v) combine_names (g,v[2],true) end, + ["fallback-names"] = function(g,v) combine_names (g,v[2],false) end, + ["feature"] = combine_feature, +} + +function vf.combine(specification,tag) + local g = { + name = specification.name, + properties = { + virtualized = true, + }, + fonts = { + }, + characters = { + }, + descriptions = { + }, + specification = fastcopy(specification), + } + combine_process(g,combinations[tag]) + return g +end diff --git a/tex/context/base/grph-epd.lua b/tex/context/base/grph-epd.lua index 4f9d46097..49022e464 100644 --- a/tex/context/base/grph-epd.lua +++ b/tex/context/base/grph-epd.lua @@ -1,25 +1,25 @@ -if not modules then modules = { } end modules ['grph-epd'] = { - version = 1.001, - comment = "companion to grph-epd.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local variables = interfaces.variables -local settings_to_hash = utilities.parsers.settings_to_hash - --- todo: page, name, file, url - -local codeinjections = backends.codeinjections - -function figures.mergegoodies(optionlist) - local options = settings_to_hash(optionlist) - local all = options[variables.all] or options[variables.yes] - if all or options[variables.reference] then - codeinjections.mergereferences() - end - if all or options[variables.layer] then - codeinjections.mergeviewerlayers() - end -end +if not modules then modules = { } end modules ['grph-epd'] = { + version = 1.001, + comment = "companion to grph-epd.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local variables = interfaces.variables +local settings_to_hash = utilities.parsers.settings_to_hash + +-- todo: page, name, file, url + +local codeinjections = backends.codeinjections + +function figures.mergegoodies(optionlist) + local options = settings_to_hash(optionlist) + local all = options[variables.all] or options[variables.yes] + if all or options[variables.reference] then + codeinjections.mergereferences() + end + if all or options[variables.layer] then + codeinjections.mergeviewerlayers() + end +end diff --git a/tex/context/base/grph-fil.lua b/tex/context/base/grph-fil.lua index 3449f1779..9ee90b07a 100644 --- a/tex/context/base/grph-fil.lua +++ b/tex/context/base/grph-fil.lua @@ -1,71 +1,71 @@ -if not modules then modules = { } end modules ['grph-fil'] = { - version = 1.001, - comment = "companion to grph-fig.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local type = type - -local trace_run = false trackers.register("graphic.runfile",function(v) trace_run = v end) -local report_run = logs.reporter("graphics","run") - --- Historically running files is part of graphics processing, so this is why it --- sits here but is part of the job namespace. - -local allocate = utilities.storage.allocate - -local collected = allocate() -local tobesaved = allocate() - -local jobfiles = { - collected = collected, - tobesaved = tobesaved, - forcerun = false, -- maybe a directive some day -} - -job.files = jobfiles - -local function initializer() - tobesaved = jobfiles.tobesaved - collected = jobfiles.collected -end - -job.register('job.files.collected', tobesaved, initializer) - -function jobfiles.run(name,action) - local oldchecksum = collected[name] - local newchecksum = file.checksum(name) - if jobfiles.forcerun or not oldchecksum or oldchecksum ~= newchecksum then - if trace_run then - report_run("processing file, changes in %a, processing forced",name) - end - local ta = type(action) - if ta == "function" then - action(name) - elseif ta == "string" and action ~= "" then - os.execute(action) - else - report_run("processing file, no action given for processing %a",name) - end - elseif trace_run then - report_run("processing file, no changes in %a, not processed",name) - end - tobesaved[name] = newchecksum -end - --- - -function jobfiles.context(name,options) - if type(name) == "table" then - local result = { } - for i=1,#name do - result[#result+1] = jobfiles.context(name[i],options) - end - return result - else - jobfiles.run(name,"context ".. (options or "") .. " " .. name) - return file.replacesuffix(name,"pdf") - end -end +if not modules then modules = { } end modules ['grph-fil'] = { + version = 1.001, + comment = "companion to grph-fig.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local type = type + +local trace_run = false trackers.register("graphic.runfile",function(v) trace_run = v end) +local report_run = logs.reporter("graphics","run") + +-- Historically running files is part of graphics processing, so this is why it +-- sits here but is part of the job namespace. + +local allocate = utilities.storage.allocate + +local collected = allocate() +local tobesaved = allocate() + +local jobfiles = { + collected = collected, + tobesaved = tobesaved, + forcerun = false, -- maybe a directive some day +} + +job.files = jobfiles + +local function initializer() + tobesaved = jobfiles.tobesaved + collected = jobfiles.collected +end + +job.register('job.files.collected', tobesaved, initializer) + +function jobfiles.run(name,action) + local oldchecksum = collected[name] + local newchecksum = file.checksum(name) + if jobfiles.forcerun or not oldchecksum or oldchecksum ~= newchecksum then + if trace_run then + report_run("processing file, changes in %a, processing forced",name) + end + local ta = type(action) + if ta == "function" then + action(name) + elseif ta == "string" and action ~= "" then + os.execute(action) + else + report_run("processing file, no action given for processing %a",name) + end + elseif trace_run then + report_run("processing file, no changes in %a, not processed",name) + end + tobesaved[name] = newchecksum +end + +-- + +function jobfiles.context(name,options) + if type(name) == "table" then + local result = { } + for i=1,#name do + result[#result+1] = jobfiles.context(name[i],options) + end + return result + else + jobfiles.run(name,"context ".. (options or "") .. " " .. name) + return file.replacesuffix(name,"pdf") + end +end diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua index 9603419ae..ae4d5642d 100644 --- a/tex/context/base/grph-inc.lua +++ b/tex/context/base/grph-inc.lua @@ -1,1609 +1,1609 @@ -if not modules then modules = { } end modules ['grph-inc'] = { - version = 1.001, - comment = "companion to grph-inc.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: empty filename or only suffix always false (not found) --- lowercase types --- mps tex tmp svg --- partly qualified --- dimensions --- use metatables --- figures.boxnumber can go as we now can use names --- avoid push --- move some to command namespace - ---[[ -The ConTeXt figure inclusion mechanisms are among the oldest code -in ConTeXt and evolved into a complex whole. One reason is that we -deal with backend in an abstract way. What complicates matters is -that we deal with internal graphics as well: TeX code, MetaPost code, -etc. Later on figure databases were introduced, which resulted in -a plug in model for locating images. On top of that runs a conversion -mechanism (with caching) and resource logging. - -Porting that to Lua is not that trivial because quite some -status information is kept between al these stages. Of course, image -reuse also has some price, and so I decided to implement the graphics -inclusion in several layers: detection, loading, inclusion, etc. - -Object sharing and scaling can happen at each stage, depending on the -way the resource is dealt with. - -The TeX-Lua mix is suboptimal. This has to do with the fact that we cannot -run TeX code from within Lua. Some more functionality will move to Lua. -]]-- - -local format, lower, find, match, gsub, gmatch = string.format, string.lower, string.find, string.match, string.gsub, string.gmatch -local texbox = tex.box -local contains = table.contains -local concat, insert, remove = table.concat, table.insert, table.remove -local todimen = string.todimen -local collapsepath = file.collapsepath -local formatters = string.formatters -local longtostring = string.longtostring -local expandfilename = dir.expandname - -local P, lpegmatch = lpeg.P, lpeg.match - -local settings_to_array = utilities.parsers.settings_to_array -local settings_to_hash = utilities.parsers.settings_to_hash -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex -local replacetemplate = utilities.templates.replace - -local variables = interfaces.variables -local codeinjections = backends.codeinjections -local nodeinjections = backends.nodeinjections - -local trace_figures = false trackers.register("graphics.locating", function(v) trace_figures = v end) -local trace_bases = false trackers.register("graphics.bases", function(v) trace_bases = v end) -local trace_programs = false trackers.register("graphics.programs", function(v) trace_programs = v end) -local trace_conversion = false trackers.register("graphics.conversion", function(v) trace_conversion = v end) -local trace_inclusion = false trackers.register("graphics.inclusion", function(v) trace_inclusion = v end) - -local report_inclusion = logs.reporter("graphics","inclusion") - -local context, img = context, img - -local f_hash_part = formatters["%s->%s->%s"] -local f_hash_full = formatters["%s->%s->%s->%s->%s->%s->%s"] - -local v_yes = variables.yes -local v_low = variables.low -local v_medium = variables.medium -local v_high = variables.high -local v_global = variables["global"] -local v_local = variables["local"] -local v_default = variables.default - -local maxdimen = 2^30-1 - -function img.check(figure) - if figure then - local width = figure.width - local height = figure.height - if height > width then - if height > maxdimen then - figure.height = maxdimen - figure.width = width * maxdimen/height - report_inclusion("limiting natural dimensions of %a (%s)",figure.filename,"height") - end - elseif width > maxdimen then - figure.width = maxdimen - figure.height = height * maxdimen/width - report_inclusion("limiting natural dimensions of %a (%s)",figure.filename,"width") - end - return figure - end -end - ---- some extra img functions --- can become luat-img.lua - -local imgkeys = img.keys() - -function img.totable(imgtable) - local result = { } - for k=1,#imgkeys do - local key = imgkeys[k] - result[key] = imgtable[key] - end - return result -end - -function img.serialize(i,...) - return table.serialize(img.totable(i),...) -end - -function img.print(i,...) - return table.print(img.totable(i),...) -end - -function img.clone(i,data) - i.width = data.width or i.width - i.height = data.height or i.height - -- attr etc - return i -end - -local validsizes = table.tohash(img.boxes()) -local validtypes = table.tohash(img.types()) - -function img.checksize(size) - if size then - size = gsub(size,"box","") - return validsizes[size] and size or "crop" - else - return "crop" - end -end - -local indexed = { } - -function img.ofindex(n) - return indexed[n] -end - ---- we can consider an grph-ini file - -figures = figures or { } -local figures = figures - -figures.boxnumber = figures.boxnumber or 0 -figures.defaultsearch = true -figures.defaultwidth = 0 -figures.defaultheight = 0 -figures.defaultdepth = 0 -figures.nofprocessed = 0 -figures.preferquality = true -- quality over location - -local figures_loaded = allocate() figures.loaded = figures_loaded -local figures_used = allocate() figures.used = figures_used -local figures_found = allocate() figures.found = figures_found -local figures_suffixes = allocate() figures.suffixes = figures_suffixes -local figures_patterns = allocate() figures.patterns = figures_patterns -local figures_resources = allocate() figures.resources = figures_resources - -local existers = allocate() figures.existers = existers -local checkers = allocate() figures.checkers = checkers -local includers = allocate() figures.includers = includers -local converters = allocate() figures.converters = converters -local identifiers = allocate() figures.identifiers = identifiers -local programs = allocate() figures.programs = programs - -local defaultformat = "pdf" -local defaultprefix = "m_k_i_v_" - -figures.localpaths = allocate { - ".", "..", "../.." -} - -figures.cachepaths = allocate { - prefix = "", - path = ".", - subpath = ".", -} - -local figure_paths = allocate(table.copy(figures.localpaths)) -figures.paths = figure_paths - -local figures_order = allocate { - "pdf", "mps", "jpg", "png", "jp2", "jbig", "svg", "eps", "tif", "gif", "mov", "buffer", "tex", "cld", "auto", -} - -local figures_formats = allocate { -- magic and order will move here - ["pdf"] = { list = { "pdf" } }, - ["mps"] = { patterns = { "mps", "%d+" } }, - ["jpg"] = { list = { "jpg", "jpeg" } }, - ["png"] = { list = { "png" } }, - ["jp2"] = { list = { "jp2" } }, - ["jbig"] = { list = { "jbig", "jbig2", "jb2" } }, - ["svg"] = { list = { "svg", "svgz" } }, - ["eps"] = { list = { "eps", "ai" } }, - ["gif"] = { list = { "gif" } }, - ["tif"] = { list = { "tif", "tiff" } }, - ["mov"] = { list = { "mov", "flv", "mp4" } }, -- "avi" is not supported - ["buffer"] = { list = { "tmp", "buffer", "buf" } }, - ["tex"] = { list = { "tex" } }, - ["cld"] = { list = { "cld" } }, - ["auto"] = { list = { "auto" } }, -} - -local figures_magics = allocate { - { format = "png", pattern = P("\137PNG\013\010\026\010") }, -- 89 50 4E 47 0D 0A 1A 0A, - { format = "jpg", pattern = P("\255\216\255") }, -- FF D8 FF - { format = "jp2", pattern = P("\000\000\000\012\106\080\032\032\013\010"), }, -- 00 00 00 0C 6A 50 20 20 0D 0A }, - { format = "gif", pattern = P("GIF") }, - { format = "pdf", pattern = (1 - P("%PDF"))^0 * P("%PDF") }, -} - -figures.formats = figures_formats -- frozen -figures.magics = figures_magics -- frozen -figures.order = figures_order -- frozen - --- We can set the order but only indirectly so that we can check for support. - -function figures.setorder(list) -- can be table or string - if type(list) == "string" then - list = settings_to_array(list) - end - if list and #list > 0 then - figures_order = allocate() - figures.order = figures_order - local done = { } -- just to be sure in case the list is generated - for i=1,#list do - local l = lower(list[i]) - if figures_formats[l] and not done[l] then - figures_order[#figures_order+1] = l - done[l] = true - end - end - report_inclusion("lookup order % a",figures_order) - else - -- invalid list - end -end - -function figures.guess(filename) - local f = io.open(filename,'rb') - if f then - local str = f:read(100) - f:close() - if str then - for i=1,#figures_magics do - local pattern = figures_magics[i] - if lpegmatch(pattern.pattern,str) then - local format = pattern.format - if trace_figures then - report_inclusion("file %a has format %a",filename,format) - end - return format - end - end - end - end -end - -local function setlookups() -- tobe redone .. just set locals - figures_suffixes = allocate() - figures_patterns = allocate() - for _, format in next, figures_order do - local data = figures_formats[format] - local list = data.list - if list then - for i=1,#list do - figures_suffixes[list[i]] = format -- hash - end - else - figures_suffixes[format] = format - end - local patterns = data.patterns - if patterns then - for i=1,#patterns do - figures_patterns[#figures_patterns+1] = { patterns[i], format } -- array - end - end - end - figures.suffixes = figures_suffixes - figures.patterns = figures_patterns -end - -setlookups() - -figures.setlookups = setlookups - -function figures.registerresource(t) - local n = #figures_resources + 1 - figures_resources[n] = t - return n -end - -local function register(tag,target,what) - local data = figures_formats[target] -- resolver etc - if not data then - data = { } - figures_formats[target] = data - end - local d = data[tag] -- list or pattern - if d and not contains(d,what) then - d[#d+1] = what -- suffix or patternspec - else - data[tag] = { what } - end - if not contains(figures_order,target) then - figures_order[#figures_order+1] = target - end - setlookups() -end - -function figures.registersuffix (suffix, target) register('list', target,suffix ) end -function figures.registerpattern(pattern,target) register('pattern',target,pattern) end - -local last_locationset = last_locationset or nil -local last_pathlist = last_pathlist or nil - -function figures.setpaths(locationset,pathlist) - if last_locationset == locationset and last_pathlist == pathlist then - -- this function can be called each graphic so we provide this optimization - return - end - local t, h = figure_paths, settings_to_hash(locationset) - if last_locationset ~= locationset then - -- change == reset (actually, a 'reset' would indeed reset - if h[v_local] then - t = table.fastcopy(figures.localpaths or { }) - else - t = { } - end - figures.defaultsearch = h[v_default] - last_locationset = locationset - end - if h[v_global] then - local list = settings_to_array(pathlist) - for i=1,#list do - local s = list[i] - if not contains(t,s) then - t[#t+1] = s - end - end - end - figure_paths = t - last_pathlist = pathlist - figures.paths = figure_paths - if trace_figures then - report_inclusion("using locations %a",last_locationset) - report_inclusion("using paths % a",figure_paths) - end -end - --- check conversions and handle it here - -function figures.hash(data) - local status = data and data.status - return (status and status.hash or tostring(status.private)) or "nohash" -- the -end - --- interfacing to tex - -local function new() -- we could use metatables status -> used -> request but it needs testing - local request = { - name = false, - label = false, - format = false, - page = false, - width = false, - height = false, - preview = false, - ["repeat"] = false, - controls = false, - display = false, - mask = false, - conversion = false, - resolution = false, - cache = false, - prefix = false, - size = false, - } - local used = { - fullname = false, - format = false, - name = false, - path = false, - suffix = false, - width = false, - height = false, - } - local status = { - status = 0, - converted = false, - cached = false, - fullname = false, - format = false, - } - -- this needs checking because we might check for nil, the test case - -- is getfiguredimensions which then should return ~= 0 - -- setmetatableindex(status, used) - -- setmetatableindex(used, request) - return { - request = request, - used = used, - status = status, - } -end - --- use table.insert|remove - -local lastfiguredata = nil -- will be topofstack or last so no { } (else problems with getfiguredimensions) -local callstack = { } - -function figures.initialize(request) - local figuredata = new() - if request then - -- request.width/height are strings and are only used when no natural dimensions - -- can be determined; at some point the handlers might set them to numbers instead - local w = tonumber(request.width) or 0 - local h = tonumber(request.height) or 0 - request.width = w > 0 and w or nil - request.height = h > 0 and h or nil - -- - request.page = math.max(tonumber(request.page) or 1,1) - request.size = img.checksize(request.size) - request.object = request.object == v_yes - request["repeat"] = request["repeat"] == v_yes - request.preview = request.preview == v_yes - request.cache = request.cache ~= "" and request.cache - request.prefix = request.prefix ~= "" and request.prefix - request.format = request.format ~= "" and request.format - table.merge(figuredata.request,request) - end - return figuredata -end - -function figures.push(request) - statistics.starttiming(figures) - local figuredata = figures.initialize(request) - insert(callstack,figuredata) - lastfiguredata = figuredata - return figuredata -end - -function figures.pop() - lastfiguredata = remove(callstack) or lastfiguredata - statistics.stoptiming(figures) -end - -function figures.current() - return callstack[#callstack] or lastfiguredata -end - -local function get(category,tag,default) - local value = lastfiguredata and lastfiguredata[category] - value = value and value[tag] - if not value or value == "" or value == true then - return default or "" - else - return value - end -end - -figures.get = get - -function commands.figurevariable(category,tag,default) - context(get(category,tag,default)) -end - -function commands.figurestatus (tag,default) context(get("status", tag,default)) end -function commands.figurerequest(tag,default) context(get("request",tag,default)) end -function commands.figureused (tag,default) context(get("used", tag,default)) end - -function commands.figurefilepath() context(file.dirname (get("used","fullname"))) end -function commands.figurefilename() context(file.nameonly(get("used","fullname"))) end -function commands.figurefiletype() context(file.extname (get("used","fullname"))) end - --- todo: local path or cache path - -local function forbiddenname(filename) - if not filename or filename == "" then - return false - end - local expandedfullname = collapsepath(filename,true) - local expandedinputname = collapsepath(file.addsuffix(environment.jobfilename,environment.jobfilesuffix),true) - if expandedfullname == expandedinputname then - report_inclusion("skipping graphic with same name as input filename %a, enforce suffix",expandedinputname) - return true - end - local expandedoutputname = collapsepath(codeinjections.getoutputfilename(),true) - if expandedfullname == expandedoutputname then - report_inclusion("skipping graphic with same name as output filename %a, enforce suffix",expandedoutputname) - return true - end -end - -local function register(askedname,specification) - if not specification then - specification = { } - elseif forbiddenname(specification.fullname) then - specification = { } - else - local format = specification.format - if format then - local conversion = specification.conversion - local resolution = specification.resolution - if conversion == "" then - conversion = nil - end - if resolution == "" then - resolution = nil - end - local newformat = conversion - if not newformat or newformat == "" then - newformat = defaultformat - end - if trace_conversion then - report_inclusion("checking conversion of %a, fullname %a, old format %a, new format %a, conversion %a, resolution %a", - askedname,specification.fullname,format,newformat,conversion or "default",resolution or "default") - end - -- quick hack - local converter = (newformat ~= format or resolution) and converters[format] - if converter then - if converter[newformat] then - converter = converter[newformat] - else - newformat = defaultformat - if converter[newformat] then - converter = converter[newformat] - else - converter = nil - newformat = defaultformat - end - end - elseif trace_conversion then - report_inclusion("no converter for %a to %a",format,newformat) - end - if converter then - local oldname = specification.fullname - local newpath = file.dirname(oldname) - local oldbase = file.basename(oldname) - -- - -- problem: we can have weird filenames, like a.b.c (no suffix) and a.b.c.gif - -- so we cannot safely remove a suffix (unless we do that for known suffixes) - -- - -- local newbase = file.removesuffix(oldbase) -- assumes a known suffix - -- - -- so we now have (also see *): - -- - local newbase = oldbase - -- - local fc = specification.cache or figures.cachepaths.path - if fc and fc ~= "" and fc ~= "." then - newpath = fc - else - newbase = defaultprefix .. newbase - end - if not file.is_writable(newpath) then - if trace_conversion then - report_inclusion("path %a is not writable, forcing conversion path %a",newpath,".") - end - newpath = "." - end - local subpath = specification.subpath or figures.cachepaths.subpath - if subpath and subpath ~= "" and subpath ~= "." then - newpath = newpath .. "/" .. subpath - end - local prefix = specification.prefix or figures.cachepaths.prefix - if prefix and prefix ~= "" then - newbase = prefix .. newbase - end - if resolution and resolution ~= "" then -- the order might change - newbase = newbase .. "_" .. resolution - end - -- - -- see *, we had: - -- - -- local newbase = file.addsuffix(newbase,newformat) - -- - -- but now have (result of Aditya's web image testing): - -- - -- as a side effect we can now have multiple fetches with different - -- original figures_formats, not that it matters much (apart from older conversions - -- sticking around) - -- - local newbase = newbase .. "." .. newformat - -- - local newname = file.join(newpath,newbase) - dir.makedirs(newpath) - oldname = collapsepath(oldname) - newname = collapsepath(newname) - local oldtime = lfs.attributes(oldname,'modification') or 0 - local newtime = lfs.attributes(newname,'modification') or 0 - if newtime == 0 or oldtime > newtime then - if trace_conversion then - report_inclusion("converting %a (%a) from %a to %a",askedname,oldname,format,newformat) - end - converter(oldname,newname,resolution or "") - else - if trace_conversion then - report_inclusion("no need to convert %a (%a) from %a to %a",askedname,oldname,format,newformat) - end - end - if io.exists(newname) and io.size(newname) > 0 then - specification.foundname = oldname - specification.fullname = newname - specification.prefix = prefix - specification.subpath = subpath - specification.converted = true - format = newformat - if not figures_suffixes[format] then - -- maybe the new format is lowres.png (saves entry in suffixes) - -- so let's do thsi extra check - local suffix = file.suffix(newformat) - if figures_suffixes[suffix] then - if trace_figures then - report_inclusion("using suffix %a as format for %a",suffix,format) - end - format = suffix - end - end - elseif io.exists(oldname) then - specification.fullname = oldname -- was newname - specification.converted = false - end - end - end - local found = figures_suffixes[format] -- validtypes[format] - if not found then - specification.found = false - if trace_figures then - report_inclusion("format %a is not supported",format) - end - else - specification.found = true - if trace_figures then - if validtypes[format] then -- format? - report_inclusion("format %a natively supported by backend",format) - else - report_inclusion("format %a supported by output file format",format) - end - end - end - end - specification.foundname = specification.foundname or specification.fullname - local askedhash = f_hash_part(askedname,specification.conversion or "default",specification.resolution or "default") - figures_found[askedhash] = specification - return specification -end - -local resolve_too = false -- true - -local internalschemes = { - file = true, -} - -local function locate(request) -- name, format, cache - -- not resolvers.cleanpath(request.name) as it fails on a!b.pdf and b~c.pdf - -- todo: more restricted cleanpath - local askedname = request.name - local askedhash = f_hash_part(askedname,request.conversion or "default",request.resolution or "default") - local foundname = figures_found[askedhash] - if foundname then - return foundname - end - -- - local askedcache = request.cache - local askedconversion = request.conversion - local askedresolution = request.resolution - -- - if request.format == "" or request.format == "unknown" then - request.format = nil - end - -- protocol check - local hashed = url.hashed(askedname) - if not hashed then - -- go on - elseif internalschemes[hashed.scheme] then - local path = hashed.path - if path and path ~= "" then - askedname = path - end - else - local foundname = resolvers.findbinfile(askedname) - if not foundname or not lfs.isfile(foundname) then -- foundname can be dummy - if trace_figures then - report_inclusion("unknown url %a",askedname) - end - -- url not found - return register(askedname) - end - local askedformat = request.format or file.suffix(askedname) or "" - local guessedformat = figures.guess(foundname) - if askedformat ~= guessedformat then - if trace_figures then - report_inclusion("url %a has unknown format",askedname) - end - -- url found, but wrong format - return register(askedname) - else - if trace_figures then - report_inclusion("url %a is resolved to %a",askedname,foundname) - end - return register(askedname, { - askedname = askedname, - fullname = foundname, - format = askedformat, - cache = askedcache, - conversion = askedconversion, - resolution = askedresolution, - }) - end - end - -- we could use the hashed data instead - local askedpath= file.is_rootbased_path(askedname) - local askedbase = file.basename(askedname) - local askedformat = request.format or file.suffix(askedname) or "" - if askedformat ~= "" then - askedformat = lower(askedformat) - if trace_figures then - report_inclusion("forcing format %a",askedformat) - end - local format = figures_suffixes[askedformat] - if not format then - for i=1,#figures_patterns do - local pattern = figures_patterns[i] - if find(askedformat,pattern[1]) then - format = pattern[2] - break - end - end - end - if format then - local foundname, quitscanning, forcedformat = figures.exists(askedname,format,resolve_too) -- not askedformat - if foundname then - return register(askedname, { - askedname = askedname, - fullname = foundname, -- askedname, - format = forcedformat or format, - cache = askedcache, - -- foundname = foundname, -- no - conversion = askedconversion, - resolution = askedresolution, - }) - elseif quitscanning then - return register(askedname) - end - elseif trace_figures then - report_inclusion("unknown format %a",askedformat) - end - if askedpath then - -- path and type given, todo: strip pieces of path - local foundname, quitscanning, forcedformat = figures.exists(askedname,askedformat,resolve_too) - if foundname then - return register(askedname, { - askedname = askedname, - fullname = foundname, -- askedname, - format = forcedformat or askedformat, - cache = askedcache, - conversion = askedconversion, - resolution = askedresolution, - }) - end - else - -- type given - for i=1,#figure_paths do - local path = figure_paths[i] - local check = path .. "/" .. askedname - -- we pass 'true' as it can be an url as well, as the type - -- is given we don't waste much time - local foundname, quitscanning, forcedformat = figures.exists(check,askedformat,resolve_too) - if foundname then - return register(check, { - askedname = askedname, - fullname = check, - format = askedformat, - cache = askedcache, - conversion = askedconversion, - resolution = askedresolution, - }) - end - end - if figures.defaultsearch then - local check = resolvers.findfile(askedname) - if check and check ~= "" then - return register(askedname, { - askedname = askedname, - fullname = check, - format = askedformat, - cache = askedcache, - conversion = askedconversion, - resolution = askedresolution, - }) - end - end - end - elseif askedpath then - if trace_figures then - report_inclusion("using rootbased path") - end - for i=1,#figures_order do - local format = figures_order[i] - local list = figures_formats[format].list or { format } - for j=1,#list do - local suffix = list[j] - local check = file.addsuffix(askedname,suffix) - local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too) - if foundname then - return register(askedname, { - askedname = askedname, - fullname = foundname, -- check, - format = forcedformat or format, - cache = askedcache, - conversion = askedconversion, - resolution = askedresolution, - }) - end - end - end - else - if figures.preferquality then - if trace_figures then - report_inclusion("unknown format, quality preferred") - end - for j=1,#figures_order do - local format = figures_order[j] - local list = figures_formats[format].list or { format } - for k=1,#list do - local suffix = list[k] - -- local name = file.replacesuffix(askedbase,suffix) - local name = file.replacesuffix(askedname,suffix) - for i=1,#figure_paths do - local path = figure_paths[i] - local check = path .. "/" .. name - local isfile = url.hashed(check).scheme == "file" - if not isfile then - if trace_figures then - report_inclusion("warning: skipping path %a",path) - end - else - local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too) -- true) - if foundname then - return register(askedname, { - askedname = askedname, - fullname = foundname, -- check - format = forcedformat or format, - cache = askedcache, - conversion = askedconversion, - resolution = askedresolution, - }) - end - end - end - end - end - else -- 'location' - if trace_figures then - report_inclusion("unknown format, using path strategy") - end - for i=1,#figure_paths do - local path = figure_paths[i] - for j=1,#figures_order do - local format = figures_order[j] - local list = figures_formats[format].list or { format } - for k=1,#list do - local suffix = list[k] - local check = path .. "/" .. file.replacesuffix(askedbase,suffix) - local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too) - if foundname then - return register(askedname, { - askedname = askedname, - fullname = foudname, -- check, - format = forcedformat or format, - cache = askedcache, - conversion = askedconversion, - resolution = askedresolution, - }) - end - end - end - end - end - if figures.defaultsearch then - if trace_figures then - report_inclusion("using default tex path") - end - for j=1,#figures_order do - local format = figures_order[j] - local list = figures_formats[format].list or { format } - for k=1,#list do - local suffix = list[k] - local check = resolvers.findfile(file.replacesuffix(askedname,suffix)) - if check and check ~= "" then - return register(askedname, { - askedname = askedname, - fullname = check, - format = format, - cache = askedcache, - conversion = askedconversion, - resolution = askedresolution, - }) - end - end - end - end - end - return register(askedname, { -- these two are needed for hashing 'found' - conversion = askedconversion, - resolution = askedresolution, - }) -end - --- -- -- plugins -- -- -- - -function identifiers.default(data) - local dr, du, ds = data.request, data.used, data.status - local l = locate(dr) - local foundname = l.foundname - local fullname = l.fullname or foundname - if fullname then - du.format = l.format or false - du.fullname = fullname -- can be cached - ds.fullname = foundname -- original - ds.format = l.format - ds.status = (l.found and 10) or 0 - end - return data -end - -function figures.identify(data) - data = data or callstack[#callstack] or lastfiguredata - local list = identifiers.list -- defined at the end - for i=1,#list do - local identifier = list[i] - data = identifier(data) - if data.status.status > 0 then - break - end - end - return data -end - -function figures.exists(askedname,format,resolve) - return (existers[format] or existers.generic)(askedname,resolve) -end - -function figures.check(data) - data = data or callstack[#callstack] or lastfiguredata - return (checkers[data.status.format] or checkers.generic)(data) -end - -function figures.include(data) - data = data or callstack[#callstack] or lastfiguredata - return (includers[data.status.format] or includers.generic)(data) -end - -function figures.scale(data) -- will become lua code - context.doscalefigure() - return data -end - -function figures.done(data) - figures.nofprocessed = figures.nofprocessed + 1 - data = data or callstack[#callstack] or lastfiguredata - local dr, du, ds, nr = data.request, data.used, data.status, figures.boxnumber - local box = texbox[nr] - ds.width = box.width - ds.height = box.height - ds.xscale = ds.width /(du.width or 1) - ds.yscale = ds.height/(du.height or 1) - ds.page = ds.page or du.page or dr.page -- sort of redundant but can be limited - return data -end - -function figures.dummy(data) - data = data or callstack[#callstack] or lastfiguredata - local dr, du, nr = data.request, data.used, figures.boxnumber - local box = node.hpack(node.new("hlist")) -- we need to set the dir (luatex 0.60 buglet) - du.width = du.width or figures.defaultwidth - du.height = du.height or figures.defaultheight - du.depth = du.depth or figures.defaultdepth - -- box.dir = "TLT" - box.width = du.width - box.height = du.height - box.depth = du.depth - texbox[nr] = box -- hm, should be global (to be checked for consistency) -end - --- -- -- generic -- -- -- - -function existers.generic(askedname,resolve) - -- not findbinfile - local result - if lfs.isfile(askedname) then - result = askedname - elseif resolve then - result = resolvers.findbinfile(askedname) or "" - if result == "" then result = false end - end - if trace_figures then - if result then - report_inclusion("%a resolved to %a",askedname,result) - else - report_inclusion("%a cannot be resolved",askedname) - end - end - return result -end - -function checkers.generic(data) - local dr, du, ds = data.request, data.used, data.status - local name = du.fullname or "unknown generic" - local page = du.page or dr.page - local size = dr.size or "crop" - local color = dr.color or "natural" - local mask = dr.mask or "none" - local conversion = dr.conversion - local resolution = dr.resolution - if not conversion or conversion == "" then - conversion = "unknown" - end - if not resolution or resolution == "" then - resolution = "unknown" - end - local hash = f_hash_full(name,page,size,color,conversion,resolution,mask) - local figure = figures_loaded[hash] - if figure == nil then - figure = img.new { - filename = name, - page = page, - pagebox = dr.size, - -- visiblefilename = "", -- this prohibits the full filename ending up in the file - } - codeinjections.setfigurecolorspace(data,figure) - codeinjections.setfiguremask(data,figure) - figure = figure and img.check(img.scan(figure)) or false - local f, d = codeinjections.setfigurealternative(data,figure) - figure, data = f or figure, d or data - figures_loaded[hash] = figure - if trace_conversion then - report_inclusion("new graphic, using hash %a",hash) - end - else - if trace_conversion then - report_inclusion("existing graphic, using hash %a",hash) - end - end - if figure then - du.width = figure.width - du.height = figure.height - du.pages = figure.pages - du.depth = figure.depth or 0 - du.colordepth = figure.colordepth or 0 - du.xresolution = figure.xres or 0 - du.yresolution = figure.yres or 0 - du.xsize = figure.xsize or 0 - du.ysize = figure.ysize or 0 - ds.private = figure - ds.hash = hash - end - return data -end - -function includers.generic(data) - local dr, du, ds = data.request, data.used, data.status - -- here we set the 'natural dimensions' - dr.width = du.width - dr.height = du.height - local hash = figures.hash(data) - local figure = figures_used[hash] - -- figures.registerresource { - -- filename = du.fullname, - -- width = dr.width, - -- height = dr.height, - -- } - if figure == nil then - figure = ds.private - if figure then - figure = img.copy(figure) - figure = figure and img.clone(figure,data.request) or false - end - figures_used[hash] = figure - end - if figure then - local nr = figures.boxnumber - -- it looks like we have a leak in attributes here .. todo - local box = node.hpack(img.node(figure)) -- img.node(figure) not longer valid - indexed[figure.index] = figure - box.width, box.height, box.depth = figure.width, figure.height, 0 -- new, hm, tricky, we need to do that in tex (yet) - texbox[nr] = box - ds.objectnumber = figure.objnum - context.relocateexternalfigure() - end - return data -end - --- -- -- nongeneric -- -- -- - -local function checkers_nongeneric(data,command) -- todo: macros and context.* - local dr, du, ds = data.request, data.used, data.status - local name = du.fullname or "unknown nongeneric" - local hash = name - if dr.object then - -- hm, bugged ... waiting for an xform interface - if not job.objects.get("FIG::"..hash) then - if type(command) == "function" then - command() - end - context.dosetfigureobject(hash) - end - context.doboxfigureobject(hash) - elseif type(command) == "function" then - command() - end - return data -end - -local function includers_nongeneric(data) - return data -end - -checkers.nongeneric = checkers_nongeneric -includers.nongeneric = includers_nongeneric - --- -- -- mov -- -- -- - -function checkers.mov(data) - local dr, du, ds = data.request, data.used, data.status - local width = todimen(dr.width or figures.defaultwidth) - local height = todimen(dr.height or figures.defaultheight) - local foundname = du.fullname - dr.width, dr.height = width, height - du.width, du.height, du.foundname = width, height, foundname - if trace_inclusion then - report_inclusion("including movie %a, width %p, height %p",foundname,width,height) - end - -- we need to push the node.write in between ... we could make a shared helper for this - context.startfoundexternalfigure(width .. "sp",height .. "sp") - context(function() - nodeinjections.insertmovie { - width = width, - height = height, - factor = number.dimenfactors.bp, - ["repeat"] = dr["repeat"], - controls = dr.controls, - preview = dr.preview, - label = dr.label, - foundname = foundname, - } - end) - context.stopfoundexternalfigure() - return data -end - -includers.mov = includers.nongeneric - --- -- -- mps -- -- -- - -internalschemes.mprun = true - -local function internal(askedname) - local spec, mprun, mpnum = match(lower(askedname),"mprun([:%.]?)(.-)%.(%d+)") - if spec ~= "" then - return mprun, mpnum - else - return "", mpnum - end -end - -function existers.mps(askedname) - local mprun, mpnum = internal(askedname) - if mpnum then - return askedname - else - return existers.generic(askedname) - end -end - -function checkers.mps(data) - local mprun, mpnum = internal(data.used.fullname) - if mpnum then - return checkers_nongeneric(data,function() context.docheckfiguremprun(mprun,mpnum) end) - else - return checkers_nongeneric(data,function() context.docheckfiguremps(data.used.fullname) end) - end -end - -includers.mps = includers.nongeneric - --- -- -- tex -- -- -- - -function existers.tex(askedname) - askedname = resolvers.findfile(askedname) - return askedname ~= "" and askedname or false -end - -function checkers.tex(data) - return checkers_nongeneric(data,function() context.docheckfiguretex(data.used.fullname) end) -end - -includers.tex = includers.nongeneric - --- -- -- buffer -- -- -- - -function existers.buffer(askedname) - local name = file.nameonly(askedname) - local okay = buffers.exists(name) - return okay and name, true -- always quit scanning -end - -function checkers.buffer(data) - return checkers_nongeneric(data,function() context.docheckfigurebuffer(file.nameonly(data.used.fullname)) end) -end - -includers.buffers = includers.nongeneric - --- -- -- auto -- -- -- - -function existers.auto(askedname) - local name = gsub(askedname, ".auto$", "") - local format = figures.guess(name) - if format then - report_inclusion("format guess %a for %a",format,name) - else - report_inclusion("format guess for %a is not possible",name) - end - return format and name, true, format -end - -checkers.auto = checkers.generic -includers.auto = includers.generic - --- -- -- cld -- -- -- - -existers.cld = existers.tex - -function checkers.cld(data) - return checkers_nongeneric(data,function() context.docheckfigurecld(data.used.fullname) end) -end - -includers.cld = includers.nongeneric - --- -- -- converters -- -- -- - -local function makeoptions(options) - local to = type(options) - return (to == "table" and concat(options," ")) or (to == "string" and options) or "" -end - --- programs.makeoptions = makeoptions - -local function runprogram(binary,argument,variables) - local binary = match(binary,"[%S]+") -- to be sure - if type(argument) == "table" then - argument = concat(argument," ") -- for old times sake - end - if not os.which(binary) then - report_inclusion("program %a is not installed, not running command: %s",binary,command) - elseif not argument or argument == "" then - report_inclusion("nothing to run, unknown program %a",binary) - else - local command = format([["%s" %s]],binary,replacetemplate(longtostring(argument),variables)) - if trace_conversion or trace_programs then - report_inclusion("running command: %s",command) - end - os.spawn(command) - end -end - -programs.run = runprogram - --- -- -- eps & pdf -- -- -- --- --- \externalfigure[cow.eps] --- \externalfigure[cow.pdf][conversion=stripped] - -local epsconverter = converters.eps or { } -converters.eps = epsconverter -converters.ps = epsconverter - -local epstopdf = { - resolutions = { - [v_low] = "screen", - [v_medium] = "ebook", - [v_high] = "prepress", - }, - command = os.type == "windows" and "gswin32c" or "gs", - -- -dProcessDSCComments=false - argument = [[ - -q - -sDEVICE=pdfwrite - -dNOPAUSE - -dNOCACHE - -dBATCH - -dAutoRotatePages=/None - -dPDFSETTINGS=/%presets% - -dEPSCrop - -sOutputFile=%newname% - %oldname% - -c quit - ]], -} - -programs.epstopdf = epstopdf -programs.gs = epstopdf - -function epsconverter.pdf(oldname,newname,resolution) -- the resolution interface might change - local epstopdf = programs.epstopdf -- can be changed - local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high - runprogram(epstopdf.command, epstopdf.argument, { - newname = newname, - oldname = oldname, - presets = presets, - } ) -end - -epsconverter.default = epsconverter.pdf - -local pdfconverter = converters.pdf or { } -converters.pdf = pdfconverter - -programs.pdftoeps = { - command = "pdftops", - argument = [[-eps "%oldname%" "%newname%]], -} - -pdfconverter.stripped = function(oldname,newname) - local pdftoeps = programs.pdftoeps -- can be changed - local epstopdf = programs.epstopdf -- can be changed - local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high - local tmpname = newname .. ".tmp" - runprogram(pdftoeps.command, pdftoeps.argument, { oldname = oldname, newname = tmpname, presets = presets }) - runprogram(epstopdf.command, epstopdf.argument, { oldname = tmpname, newname = newname, presets = presets }) - os.remove(tmpname) -end - -figures.registersuffix("stripped","pdf") - --- -- -- svg -- -- -- - -local svgconverter = { } -converters.svg = svgconverter -converters.svgz = svgconverter - --- inkscape on windows only works with complete paths - -programs.inkscape = { - command = "inkscape", - pdfargument = [[ - "%oldname%" - --export-dpi=600 - -A - "%newname%" - ]], - pngargument = [[ - "%oldname%" - --export-dpi=600 - --export-png="%newname%" - ]], -} - -function svgconverter.pdf(oldname,newname) - local inkscape = programs.inkscape -- can be changed - runprogram(inkscape.command, inkscape.pdfargument, { - newname = expandfilename(newname), - oldname = expandfilename(oldname), - } ) -end - -function svgconverter.png(oldname,newname) - local inkscape = programs.inkscape - runprogram(inkscape.command, inkscape.pngargument, { - newname = expandfilename(newname), - oldname = expandfilename(oldname), - } ) -end - -svgconverter.default = svgconverter.pdf - --- -- -- gif -- -- -- --- -- -- tif -- -- -- - -local gifconverter = converters.gif or { } -local tifconverter = converters.tif or { } -local bmpconverter = converters.bmp or { } - -converters.gif = gifconverter -converters.tif = tifconverter -converters.bmp = bmpconverter - -programs.convert = { - command = "gm", -- graphicmagick - argument = [[convert "%oldname%" "%newname%"]], -} - -local function converter(oldname,newname) - local convert = programs.convert - runprogram(convert.command, convert.argument, { - newname = newname, - oldname = oldname, - } ) -end - -tifconverter.pdf = converter -gifconverter.pdf = converter -bmpconverter.pdf = converter - -gifconverter.default = converter -tifconverter.default = converter -bmpconverter.default = converter - --- todo: lowres - --- -- -- bases -- -- -- - -local bases = allocate() -figures.bases = bases - -local bases_list = nil -- index => { basename, fullname, xmlroot } -local bases_used = nil -- [basename] => { basename, fullname, xmlroot } -- pointer to list -local bases_found = nil -local bases_enabled = false - -local function reset() - bases_list = allocate() - bases_used = allocate() - bases_found = allocate() - bases_enabled = false - bases.list = bases_list - bases.used = bases_used - bases.found = bases_found -end - -reset() - -function bases.use(basename) - if basename == "reset" then - reset() - else - basename = file.addsuffix(basename,"xml") - if not bases_used[basename] then - local t = { basename, nil, nil } - bases_used[basename] = t - bases_list[#bases_list+1] = t - if not bases_enabled then - bases_enabled = true - xml.registerns("rlx","http://www.pragma-ade.com/schemas/rlx") -- we should be able to do this per xml file - end - if trace_bases then - report_inclusion("registering base %a",basename) - end - end - end -end - -local function bases_find(basename,askedlabel) - if trace_bases then - report_inclusion("checking for %a in base %a",askedlabel,basename) - end - basename = file.addsuffix(basename,"xml") - local t = bases_found[askedlabel] - if t == nil then - local base = bases_used[basename] - local page = 0 - if base[2] == nil then - -- no yet located - for i=1,#figure_paths do - local path = figure_paths[i] - local xmlfile = path .. "/" .. basename - if io.exists(xmlfile) then - base[2] = xmlfile - base[3] = xml.load(xmlfile) - if trace_bases then - report_inclusion("base %a loaded",xmlfile) - end - break - end - end - end - t = false - if base[2] and base[3] then -- rlx:library - for e in xml.collected(base[3],"/(*:library|figurelibrary)/*:figure/*:label") do - page = page + 1 - if xml.text(e) == askedlabel then - t = { - base = file.replacesuffix(base[2],"pdf"), - format = "pdf", - name = xml.text(e,"../*:file"), -- to be checked - page = page, - } - bases_found[askedlabel] = t - if trace_bases then - report_inclusion("figure %a found in base %a",askedlabel,base[2]) - end - return t - end - end - if trace_bases and not t then - report_inclusion("figure %a not found in base %a",askedlabel,base[2]) - end - end - end - return t -end - --- we can access sequential or by name - -local function bases_locate(askedlabel) - for i=1,#bases_list do - local entry = bases_list[i] - local t = bases_find(entry[1],askedlabel) - if t then - return t - end - end - return false -end - -function identifiers.base(data) - if bases_enabled then - local dr, du, ds = data.request, data.used, data.status - local fbl = bases_locate(dr.name or dr.label) - if fbl then - du.page = fbl.page - du.format = fbl.format - du.fullname = fbl.base - ds.fullname = fbl.name - ds.format = fbl.format - ds.page = fbl.page - ds.status = 10 - end - end - return data -end - -bases.locate = bases_locate -bases.find = bases_find - -identifiers.list = { - identifiers.base, - identifiers.default -} - --- tracing - -statistics.register("graphics processing time", function() - local nofprocessed = figures.nofprocessed - if nofprocessed > 0 then - return format("%s seconds including tex, %s processed images", statistics.elapsedtime(figures),nofprocessed) - else - return nil - end -end) - --- helper - -function figures.applyratio(width,height,w,h) -- width and height are strings and w and h are numbers - if not width or width == "" then - if not height or height == "" then - return figures.defaultwidth, figures.defaultheight - else - height = todimen(height) - if w and h then - return height * w/h, height - else - return figures.defaultwidth, height - end - end - else - width = todimen(width) - if not height or height == "" then - if w and h then - return width, width * h/w - else - return width, figures.defaultheight - end - else - return width, todimen(height) - end - end -end - --- example of simple plugins: --- --- figures.converters.png = { --- png = function(oldname,newname,resolution) --- local command = string.format('gm convert -depth 1 "%s" "%s"',oldname,newname) --- logs.report(string.format("running command %s",command)) --- os.execute(command) --- end, --- } - --- local fig = figures.push { name = pdffile } --- figures.identify() --- figures.check() --- local nofpages = fig.used.pages --- figures.pop() - --- interfacing - -commands.setfigurelookuporder = figures.setorder +if not modules then modules = { } end modules ['grph-inc'] = { + version = 1.001, + comment = "companion to grph-inc.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: empty filename or only suffix always false (not found) +-- lowercase types +-- mps tex tmp svg +-- partly qualified +-- dimensions +-- use metatables +-- figures.boxnumber can go as we now can use names +-- avoid push +-- move some to command namespace + +--[[ +The ConTeXt figure inclusion mechanisms are among the oldest code +in ConTeXt and evolved into a complex whole. One reason is that we +deal with backend in an abstract way. What complicates matters is +that we deal with internal graphics as well: TeX code, MetaPost code, +etc. Later on figure databases were introduced, which resulted in +a plug in model for locating images. On top of that runs a conversion +mechanism (with caching) and resource logging. + +Porting that to Lua is not that trivial because quite some +status information is kept between al these stages. Of course, image +reuse also has some price, and so I decided to implement the graphics +inclusion in several layers: detection, loading, inclusion, etc. + +Object sharing and scaling can happen at each stage, depending on the +way the resource is dealt with. + +The TeX-Lua mix is suboptimal. This has to do with the fact that we cannot +run TeX code from within Lua. Some more functionality will move to Lua. +]]-- + +local format, lower, find, match, gsub, gmatch = string.format, string.lower, string.find, string.match, string.gsub, string.gmatch +local texbox = tex.box +local contains = table.contains +local concat, insert, remove = table.concat, table.insert, table.remove +local todimen = string.todimen +local collapsepath = file.collapsepath +local formatters = string.formatters +local longtostring = string.longtostring +local expandfilename = dir.expandname + +local P, lpegmatch = lpeg.P, lpeg.match + +local settings_to_array = utilities.parsers.settings_to_array +local settings_to_hash = utilities.parsers.settings_to_hash +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex +local replacetemplate = utilities.templates.replace + +local variables = interfaces.variables +local codeinjections = backends.codeinjections +local nodeinjections = backends.nodeinjections + +local trace_figures = false trackers.register("graphics.locating", function(v) trace_figures = v end) +local trace_bases = false trackers.register("graphics.bases", function(v) trace_bases = v end) +local trace_programs = false trackers.register("graphics.programs", function(v) trace_programs = v end) +local trace_conversion = false trackers.register("graphics.conversion", function(v) trace_conversion = v end) +local trace_inclusion = false trackers.register("graphics.inclusion", function(v) trace_inclusion = v end) + +local report_inclusion = logs.reporter("graphics","inclusion") + +local context, img = context, img + +local f_hash_part = formatters["%s->%s->%s"] +local f_hash_full = formatters["%s->%s->%s->%s->%s->%s->%s"] + +local v_yes = variables.yes +local v_low = variables.low +local v_medium = variables.medium +local v_high = variables.high +local v_global = variables["global"] +local v_local = variables["local"] +local v_default = variables.default + +local maxdimen = 2^30-1 + +function img.check(figure) + if figure then + local width = figure.width + local height = figure.height + if height > width then + if height > maxdimen then + figure.height = maxdimen + figure.width = width * maxdimen/height + report_inclusion("limiting natural dimensions of %a (%s)",figure.filename,"height") + end + elseif width > maxdimen then + figure.width = maxdimen + figure.height = height * maxdimen/width + report_inclusion("limiting natural dimensions of %a (%s)",figure.filename,"width") + end + return figure + end +end + +--- some extra img functions --- can become luat-img.lua + +local imgkeys = img.keys() + +function img.totable(imgtable) + local result = { } + for k=1,#imgkeys do + local key = imgkeys[k] + result[key] = imgtable[key] + end + return result +end + +function img.serialize(i,...) + return table.serialize(img.totable(i),...) +end + +function img.print(i,...) + return table.print(img.totable(i),...) +end + +function img.clone(i,data) + i.width = data.width or i.width + i.height = data.height or i.height + -- attr etc + return i +end + +local validsizes = table.tohash(img.boxes()) +local validtypes = table.tohash(img.types()) + +function img.checksize(size) + if size then + size = gsub(size,"box","") + return validsizes[size] and size or "crop" + else + return "crop" + end +end + +local indexed = { } + +function img.ofindex(n) + return indexed[n] +end + +--- we can consider an grph-ini file + +figures = figures or { } +local figures = figures + +figures.boxnumber = figures.boxnumber or 0 +figures.defaultsearch = true +figures.defaultwidth = 0 +figures.defaultheight = 0 +figures.defaultdepth = 0 +figures.nofprocessed = 0 +figures.preferquality = true -- quality over location + +local figures_loaded = allocate() figures.loaded = figures_loaded +local figures_used = allocate() figures.used = figures_used +local figures_found = allocate() figures.found = figures_found +local figures_suffixes = allocate() figures.suffixes = figures_suffixes +local figures_patterns = allocate() figures.patterns = figures_patterns +local figures_resources = allocate() figures.resources = figures_resources + +local existers = allocate() figures.existers = existers +local checkers = allocate() figures.checkers = checkers +local includers = allocate() figures.includers = includers +local converters = allocate() figures.converters = converters +local identifiers = allocate() figures.identifiers = identifiers +local programs = allocate() figures.programs = programs + +local defaultformat = "pdf" +local defaultprefix = "m_k_i_v_" + +figures.localpaths = allocate { + ".", "..", "../.." +} + +figures.cachepaths = allocate { + prefix = "", + path = ".", + subpath = ".", +} + +local figure_paths = allocate(table.copy(figures.localpaths)) +figures.paths = figure_paths + +local figures_order = allocate { + "pdf", "mps", "jpg", "png", "jp2", "jbig", "svg", "eps", "tif", "gif", "mov", "buffer", "tex", "cld", "auto", +} + +local figures_formats = allocate { -- magic and order will move here + ["pdf"] = { list = { "pdf" } }, + ["mps"] = { patterns = { "mps", "%d+" } }, + ["jpg"] = { list = { "jpg", "jpeg" } }, + ["png"] = { list = { "png" } }, + ["jp2"] = { list = { "jp2" } }, + ["jbig"] = { list = { "jbig", "jbig2", "jb2" } }, + ["svg"] = { list = { "svg", "svgz" } }, + ["eps"] = { list = { "eps", "ai" } }, + ["gif"] = { list = { "gif" } }, + ["tif"] = { list = { "tif", "tiff" } }, + ["mov"] = { list = { "mov", "flv", "mp4" } }, -- "avi" is not supported + ["buffer"] = { list = { "tmp", "buffer", "buf" } }, + ["tex"] = { list = { "tex" } }, + ["cld"] = { list = { "cld" } }, + ["auto"] = { list = { "auto" } }, +} + +local figures_magics = allocate { + { format = "png", pattern = P("\137PNG\013\010\026\010") }, -- 89 50 4E 47 0D 0A 1A 0A, + { format = "jpg", pattern = P("\255\216\255") }, -- FF D8 FF + { format = "jp2", pattern = P("\000\000\000\012\106\080\032\032\013\010"), }, -- 00 00 00 0C 6A 50 20 20 0D 0A }, + { format = "gif", pattern = P("GIF") }, + { format = "pdf", pattern = (1 - P("%PDF"))^0 * P("%PDF") }, +} + +figures.formats = figures_formats -- frozen +figures.magics = figures_magics -- frozen +figures.order = figures_order -- frozen + +-- We can set the order but only indirectly so that we can check for support. + +function figures.setorder(list) -- can be table or string + if type(list) == "string" then + list = settings_to_array(list) + end + if list and #list > 0 then + figures_order = allocate() + figures.order = figures_order + local done = { } -- just to be sure in case the list is generated + for i=1,#list do + local l = lower(list[i]) + if figures_formats[l] and not done[l] then + figures_order[#figures_order+1] = l + done[l] = true + end + end + report_inclusion("lookup order % a",figures_order) + else + -- invalid list + end +end + +function figures.guess(filename) + local f = io.open(filename,'rb') + if f then + local str = f:read(100) + f:close() + if str then + for i=1,#figures_magics do + local pattern = figures_magics[i] + if lpegmatch(pattern.pattern,str) then + local format = pattern.format + if trace_figures then + report_inclusion("file %a has format %a",filename,format) + end + return format + end + end + end + end +end + +local function setlookups() -- tobe redone .. just set locals + figures_suffixes = allocate() + figures_patterns = allocate() + for _, format in next, figures_order do + local data = figures_formats[format] + local list = data.list + if list then + for i=1,#list do + figures_suffixes[list[i]] = format -- hash + end + else + figures_suffixes[format] = format + end + local patterns = data.patterns + if patterns then + for i=1,#patterns do + figures_patterns[#figures_patterns+1] = { patterns[i], format } -- array + end + end + end + figures.suffixes = figures_suffixes + figures.patterns = figures_patterns +end + +setlookups() + +figures.setlookups = setlookups + +function figures.registerresource(t) + local n = #figures_resources + 1 + figures_resources[n] = t + return n +end + +local function register(tag,target,what) + local data = figures_formats[target] -- resolver etc + if not data then + data = { } + figures_formats[target] = data + end + local d = data[tag] -- list or pattern + if d and not contains(d,what) then + d[#d+1] = what -- suffix or patternspec + else + data[tag] = { what } + end + if not contains(figures_order,target) then + figures_order[#figures_order+1] = target + end + setlookups() +end + +function figures.registersuffix (suffix, target) register('list', target,suffix ) end +function figures.registerpattern(pattern,target) register('pattern',target,pattern) end + +local last_locationset = last_locationset or nil +local last_pathlist = last_pathlist or nil + +function figures.setpaths(locationset,pathlist) + if last_locationset == locationset and last_pathlist == pathlist then + -- this function can be called each graphic so we provide this optimization + return + end + local t, h = figure_paths, settings_to_hash(locationset) + if last_locationset ~= locationset then + -- change == reset (actually, a 'reset' would indeed reset + if h[v_local] then + t = table.fastcopy(figures.localpaths or { }) + else + t = { } + end + figures.defaultsearch = h[v_default] + last_locationset = locationset + end + if h[v_global] then + local list = settings_to_array(pathlist) + for i=1,#list do + local s = list[i] + if not contains(t,s) then + t[#t+1] = s + end + end + end + figure_paths = t + last_pathlist = pathlist + figures.paths = figure_paths + if trace_figures then + report_inclusion("using locations %a",last_locationset) + report_inclusion("using paths % a",figure_paths) + end +end + +-- check conversions and handle it here + +function figures.hash(data) + local status = data and data.status + return (status and status.hash or tostring(status.private)) or "nohash" -- the +end + +-- interfacing to tex + +local function new() -- we could use metatables status -> used -> request but it needs testing + local request = { + name = false, + label = false, + format = false, + page = false, + width = false, + height = false, + preview = false, + ["repeat"] = false, + controls = false, + display = false, + mask = false, + conversion = false, + resolution = false, + cache = false, + prefix = false, + size = false, + } + local used = { + fullname = false, + format = false, + name = false, + path = false, + suffix = false, + width = false, + height = false, + } + local status = { + status = 0, + converted = false, + cached = false, + fullname = false, + format = false, + } + -- this needs checking because we might check for nil, the test case + -- is getfiguredimensions which then should return ~= 0 + -- setmetatableindex(status, used) + -- setmetatableindex(used, request) + return { + request = request, + used = used, + status = status, + } +end + +-- use table.insert|remove + +local lastfiguredata = nil -- will be topofstack or last so no { } (else problems with getfiguredimensions) +local callstack = { } + +function figures.initialize(request) + local figuredata = new() + if request then + -- request.width/height are strings and are only used when no natural dimensions + -- can be determined; at some point the handlers might set them to numbers instead + local w = tonumber(request.width) or 0 + local h = tonumber(request.height) or 0 + request.width = w > 0 and w or nil + request.height = h > 0 and h or nil + -- + request.page = math.max(tonumber(request.page) or 1,1) + request.size = img.checksize(request.size) + request.object = request.object == v_yes + request["repeat"] = request["repeat"] == v_yes + request.preview = request.preview == v_yes + request.cache = request.cache ~= "" and request.cache + request.prefix = request.prefix ~= "" and request.prefix + request.format = request.format ~= "" and request.format + table.merge(figuredata.request,request) + end + return figuredata +end + +function figures.push(request) + statistics.starttiming(figures) + local figuredata = figures.initialize(request) + insert(callstack,figuredata) + lastfiguredata = figuredata + return figuredata +end + +function figures.pop() + lastfiguredata = remove(callstack) or lastfiguredata + statistics.stoptiming(figures) +end + +function figures.current() + return callstack[#callstack] or lastfiguredata +end + +local function get(category,tag,default) + local value = lastfiguredata and lastfiguredata[category] + value = value and value[tag] + if not value or value == "" or value == true then + return default or "" + else + return value + end +end + +figures.get = get + +function commands.figurevariable(category,tag,default) + context(get(category,tag,default)) +end + +function commands.figurestatus (tag,default) context(get("status", tag,default)) end +function commands.figurerequest(tag,default) context(get("request",tag,default)) end +function commands.figureused (tag,default) context(get("used", tag,default)) end + +function commands.figurefilepath() context(file.dirname (get("used","fullname"))) end +function commands.figurefilename() context(file.nameonly(get("used","fullname"))) end +function commands.figurefiletype() context(file.extname (get("used","fullname"))) end + +-- todo: local path or cache path + +local function forbiddenname(filename) + if not filename or filename == "" then + return false + end + local expandedfullname = collapsepath(filename,true) + local expandedinputname = collapsepath(file.addsuffix(environment.jobfilename,environment.jobfilesuffix),true) + if expandedfullname == expandedinputname then + report_inclusion("skipping graphic with same name as input filename %a, enforce suffix",expandedinputname) + return true + end + local expandedoutputname = collapsepath(codeinjections.getoutputfilename(),true) + if expandedfullname == expandedoutputname then + report_inclusion("skipping graphic with same name as output filename %a, enforce suffix",expandedoutputname) + return true + end +end + +local function register(askedname,specification) + if not specification then + specification = { } + elseif forbiddenname(specification.fullname) then + specification = { } + else + local format = specification.format + if format then + local conversion = specification.conversion + local resolution = specification.resolution + if conversion == "" then + conversion = nil + end + if resolution == "" then + resolution = nil + end + local newformat = conversion + if not newformat or newformat == "" then + newformat = defaultformat + end + if trace_conversion then + report_inclusion("checking conversion of %a, fullname %a, old format %a, new format %a, conversion %a, resolution %a", + askedname,specification.fullname,format,newformat,conversion or "default",resolution or "default") + end + -- quick hack + local converter = (newformat ~= format or resolution) and converters[format] + if converter then + if converter[newformat] then + converter = converter[newformat] + else + newformat = defaultformat + if converter[newformat] then + converter = converter[newformat] + else + converter = nil + newformat = defaultformat + end + end + elseif trace_conversion then + report_inclusion("no converter for %a to %a",format,newformat) + end + if converter then + local oldname = specification.fullname + local newpath = file.dirname(oldname) + local oldbase = file.basename(oldname) + -- + -- problem: we can have weird filenames, like a.b.c (no suffix) and a.b.c.gif + -- so we cannot safely remove a suffix (unless we do that for known suffixes) + -- + -- local newbase = file.removesuffix(oldbase) -- assumes a known suffix + -- + -- so we now have (also see *): + -- + local newbase = oldbase + -- + local fc = specification.cache or figures.cachepaths.path + if fc and fc ~= "" and fc ~= "." then + newpath = fc + else + newbase = defaultprefix .. newbase + end + if not file.is_writable(newpath) then + if trace_conversion then + report_inclusion("path %a is not writable, forcing conversion path %a",newpath,".") + end + newpath = "." + end + local subpath = specification.subpath or figures.cachepaths.subpath + if subpath and subpath ~= "" and subpath ~= "." then + newpath = newpath .. "/" .. subpath + end + local prefix = specification.prefix or figures.cachepaths.prefix + if prefix and prefix ~= "" then + newbase = prefix .. newbase + end + if resolution and resolution ~= "" then -- the order might change + newbase = newbase .. "_" .. resolution + end + -- + -- see *, we had: + -- + -- local newbase = file.addsuffix(newbase,newformat) + -- + -- but now have (result of Aditya's web image testing): + -- + -- as a side effect we can now have multiple fetches with different + -- original figures_formats, not that it matters much (apart from older conversions + -- sticking around) + -- + local newbase = newbase .. "." .. newformat + -- + local newname = file.join(newpath,newbase) + dir.makedirs(newpath) + oldname = collapsepath(oldname) + newname = collapsepath(newname) + local oldtime = lfs.attributes(oldname,'modification') or 0 + local newtime = lfs.attributes(newname,'modification') or 0 + if newtime == 0 or oldtime > newtime then + if trace_conversion then + report_inclusion("converting %a (%a) from %a to %a",askedname,oldname,format,newformat) + end + converter(oldname,newname,resolution or "") + else + if trace_conversion then + report_inclusion("no need to convert %a (%a) from %a to %a",askedname,oldname,format,newformat) + end + end + if io.exists(newname) and io.size(newname) > 0 then + specification.foundname = oldname + specification.fullname = newname + specification.prefix = prefix + specification.subpath = subpath + specification.converted = true + format = newformat + if not figures_suffixes[format] then + -- maybe the new format is lowres.png (saves entry in suffixes) + -- so let's do thsi extra check + local suffix = file.suffix(newformat) + if figures_suffixes[suffix] then + if trace_figures then + report_inclusion("using suffix %a as format for %a",suffix,format) + end + format = suffix + end + end + elseif io.exists(oldname) then + specification.fullname = oldname -- was newname + specification.converted = false + end + end + end + local found = figures_suffixes[format] -- validtypes[format] + if not found then + specification.found = false + if trace_figures then + report_inclusion("format %a is not supported",format) + end + else + specification.found = true + if trace_figures then + if validtypes[format] then -- format? + report_inclusion("format %a natively supported by backend",format) + else + report_inclusion("format %a supported by output file format",format) + end + end + end + end + specification.foundname = specification.foundname or specification.fullname + local askedhash = f_hash_part(askedname,specification.conversion or "default",specification.resolution or "default") + figures_found[askedhash] = specification + return specification +end + +local resolve_too = false -- true + +local internalschemes = { + file = true, +} + +local function locate(request) -- name, format, cache + -- not resolvers.cleanpath(request.name) as it fails on a!b.pdf and b~c.pdf + -- todo: more restricted cleanpath + local askedname = request.name + local askedhash = f_hash_part(askedname,request.conversion or "default",request.resolution or "default") + local foundname = figures_found[askedhash] + if foundname then + return foundname + end + -- + local askedcache = request.cache + local askedconversion = request.conversion + local askedresolution = request.resolution + -- + if request.format == "" or request.format == "unknown" then + request.format = nil + end + -- protocol check + local hashed = url.hashed(askedname) + if not hashed then + -- go on + elseif internalschemes[hashed.scheme] then + local path = hashed.path + if path and path ~= "" then + askedname = path + end + else + local foundname = resolvers.findbinfile(askedname) + if not foundname or not lfs.isfile(foundname) then -- foundname can be dummy + if trace_figures then + report_inclusion("unknown url %a",askedname) + end + -- url not found + return register(askedname) + end + local askedformat = request.format or file.suffix(askedname) or "" + local guessedformat = figures.guess(foundname) + if askedformat ~= guessedformat then + if trace_figures then + report_inclusion("url %a has unknown format",askedname) + end + -- url found, but wrong format + return register(askedname) + else + if trace_figures then + report_inclusion("url %a is resolved to %a",askedname,foundname) + end + return register(askedname, { + askedname = askedname, + fullname = foundname, + format = askedformat, + cache = askedcache, + conversion = askedconversion, + resolution = askedresolution, + }) + end + end + -- we could use the hashed data instead + local askedpath= file.is_rootbased_path(askedname) + local askedbase = file.basename(askedname) + local askedformat = request.format or file.suffix(askedname) or "" + if askedformat ~= "" then + askedformat = lower(askedformat) + if trace_figures then + report_inclusion("forcing format %a",askedformat) + end + local format = figures_suffixes[askedformat] + if not format then + for i=1,#figures_patterns do + local pattern = figures_patterns[i] + if find(askedformat,pattern[1]) then + format = pattern[2] + break + end + end + end + if format then + local foundname, quitscanning, forcedformat = figures.exists(askedname,format,resolve_too) -- not askedformat + if foundname then + return register(askedname, { + askedname = askedname, + fullname = foundname, -- askedname, + format = forcedformat or format, + cache = askedcache, + -- foundname = foundname, -- no + conversion = askedconversion, + resolution = askedresolution, + }) + elseif quitscanning then + return register(askedname) + end + elseif trace_figures then + report_inclusion("unknown format %a",askedformat) + end + if askedpath then + -- path and type given, todo: strip pieces of path + local foundname, quitscanning, forcedformat = figures.exists(askedname,askedformat,resolve_too) + if foundname then + return register(askedname, { + askedname = askedname, + fullname = foundname, -- askedname, + format = forcedformat or askedformat, + cache = askedcache, + conversion = askedconversion, + resolution = askedresolution, + }) + end + else + -- type given + for i=1,#figure_paths do + local path = figure_paths[i] + local check = path .. "/" .. askedname + -- we pass 'true' as it can be an url as well, as the type + -- is given we don't waste much time + local foundname, quitscanning, forcedformat = figures.exists(check,askedformat,resolve_too) + if foundname then + return register(check, { + askedname = askedname, + fullname = check, + format = askedformat, + cache = askedcache, + conversion = askedconversion, + resolution = askedresolution, + }) + end + end + if figures.defaultsearch then + local check = resolvers.findfile(askedname) + if check and check ~= "" then + return register(askedname, { + askedname = askedname, + fullname = check, + format = askedformat, + cache = askedcache, + conversion = askedconversion, + resolution = askedresolution, + }) + end + end + end + elseif askedpath then + if trace_figures then + report_inclusion("using rootbased path") + end + for i=1,#figures_order do + local format = figures_order[i] + local list = figures_formats[format].list or { format } + for j=1,#list do + local suffix = list[j] + local check = file.addsuffix(askedname,suffix) + local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too) + if foundname then + return register(askedname, { + askedname = askedname, + fullname = foundname, -- check, + format = forcedformat or format, + cache = askedcache, + conversion = askedconversion, + resolution = askedresolution, + }) + end + end + end + else + if figures.preferquality then + if trace_figures then + report_inclusion("unknown format, quality preferred") + end + for j=1,#figures_order do + local format = figures_order[j] + local list = figures_formats[format].list or { format } + for k=1,#list do + local suffix = list[k] + -- local name = file.replacesuffix(askedbase,suffix) + local name = file.replacesuffix(askedname,suffix) + for i=1,#figure_paths do + local path = figure_paths[i] + local check = path .. "/" .. name + local isfile = url.hashed(check).scheme == "file" + if not isfile then + if trace_figures then + report_inclusion("warning: skipping path %a",path) + end + else + local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too) -- true) + if foundname then + return register(askedname, { + askedname = askedname, + fullname = foundname, -- check + format = forcedformat or format, + cache = askedcache, + conversion = askedconversion, + resolution = askedresolution, + }) + end + end + end + end + end + else -- 'location' + if trace_figures then + report_inclusion("unknown format, using path strategy") + end + for i=1,#figure_paths do + local path = figure_paths[i] + for j=1,#figures_order do + local format = figures_order[j] + local list = figures_formats[format].list or { format } + for k=1,#list do + local suffix = list[k] + local check = path .. "/" .. file.replacesuffix(askedbase,suffix) + local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too) + if foundname then + return register(askedname, { + askedname = askedname, + fullname = foudname, -- check, + format = forcedformat or format, + cache = askedcache, + conversion = askedconversion, + resolution = askedresolution, + }) + end + end + end + end + end + if figures.defaultsearch then + if trace_figures then + report_inclusion("using default tex path") + end + for j=1,#figures_order do + local format = figures_order[j] + local list = figures_formats[format].list or { format } + for k=1,#list do + local suffix = list[k] + local check = resolvers.findfile(file.replacesuffix(askedname,suffix)) + if check and check ~= "" then + return register(askedname, { + askedname = askedname, + fullname = check, + format = format, + cache = askedcache, + conversion = askedconversion, + resolution = askedresolution, + }) + end + end + end + end + end + return register(askedname, { -- these two are needed for hashing 'found' + conversion = askedconversion, + resolution = askedresolution, + }) +end + +-- -- -- plugins -- -- -- + +function identifiers.default(data) + local dr, du, ds = data.request, data.used, data.status + local l = locate(dr) + local foundname = l.foundname + local fullname = l.fullname or foundname + if fullname then + du.format = l.format or false + du.fullname = fullname -- can be cached + ds.fullname = foundname -- original + ds.format = l.format + ds.status = (l.found and 10) or 0 + end + return data +end + +function figures.identify(data) + data = data or callstack[#callstack] or lastfiguredata + local list = identifiers.list -- defined at the end + for i=1,#list do + local identifier = list[i] + data = identifier(data) + if data.status.status > 0 then + break + end + end + return data +end + +function figures.exists(askedname,format,resolve) + return (existers[format] or existers.generic)(askedname,resolve) +end + +function figures.check(data) + data = data or callstack[#callstack] or lastfiguredata + return (checkers[data.status.format] or checkers.generic)(data) +end + +function figures.include(data) + data = data or callstack[#callstack] or lastfiguredata + return (includers[data.status.format] or includers.generic)(data) +end + +function figures.scale(data) -- will become lua code + context.doscalefigure() + return data +end + +function figures.done(data) + figures.nofprocessed = figures.nofprocessed + 1 + data = data or callstack[#callstack] or lastfiguredata + local dr, du, ds, nr = data.request, data.used, data.status, figures.boxnumber + local box = texbox[nr] + ds.width = box.width + ds.height = box.height + ds.xscale = ds.width /(du.width or 1) + ds.yscale = ds.height/(du.height or 1) + ds.page = ds.page or du.page or dr.page -- sort of redundant but can be limited + return data +end + +function figures.dummy(data) + data = data or callstack[#callstack] or lastfiguredata + local dr, du, nr = data.request, data.used, figures.boxnumber + local box = node.hpack(node.new("hlist")) -- we need to set the dir (luatex 0.60 buglet) + du.width = du.width or figures.defaultwidth + du.height = du.height or figures.defaultheight + du.depth = du.depth or figures.defaultdepth + -- box.dir = "TLT" + box.width = du.width + box.height = du.height + box.depth = du.depth + texbox[nr] = box -- hm, should be global (to be checked for consistency) +end + +-- -- -- generic -- -- -- + +function existers.generic(askedname,resolve) + -- not findbinfile + local result + if lfs.isfile(askedname) then + result = askedname + elseif resolve then + result = resolvers.findbinfile(askedname) or "" + if result == "" then result = false end + end + if trace_figures then + if result then + report_inclusion("%a resolved to %a",askedname,result) + else + report_inclusion("%a cannot be resolved",askedname) + end + end + return result +end + +function checkers.generic(data) + local dr, du, ds = data.request, data.used, data.status + local name = du.fullname or "unknown generic" + local page = du.page or dr.page + local size = dr.size or "crop" + local color = dr.color or "natural" + local mask = dr.mask or "none" + local conversion = dr.conversion + local resolution = dr.resolution + if not conversion or conversion == "" then + conversion = "unknown" + end + if not resolution or resolution == "" then + resolution = "unknown" + end + local hash = f_hash_full(name,page,size,color,conversion,resolution,mask) + local figure = figures_loaded[hash] + if figure == nil then + figure = img.new { + filename = name, + page = page, + pagebox = dr.size, + -- visiblefilename = "", -- this prohibits the full filename ending up in the file + } + codeinjections.setfigurecolorspace(data,figure) + codeinjections.setfiguremask(data,figure) + figure = figure and img.check(img.scan(figure)) or false + local f, d = codeinjections.setfigurealternative(data,figure) + figure, data = f or figure, d or data + figures_loaded[hash] = figure + if trace_conversion then + report_inclusion("new graphic, using hash %a",hash) + end + else + if trace_conversion then + report_inclusion("existing graphic, using hash %a",hash) + end + end + if figure then + du.width = figure.width + du.height = figure.height + du.pages = figure.pages + du.depth = figure.depth or 0 + du.colordepth = figure.colordepth or 0 + du.xresolution = figure.xres or 0 + du.yresolution = figure.yres or 0 + du.xsize = figure.xsize or 0 + du.ysize = figure.ysize or 0 + ds.private = figure + ds.hash = hash + end + return data +end + +function includers.generic(data) + local dr, du, ds = data.request, data.used, data.status + -- here we set the 'natural dimensions' + dr.width = du.width + dr.height = du.height + local hash = figures.hash(data) + local figure = figures_used[hash] + -- figures.registerresource { + -- filename = du.fullname, + -- width = dr.width, + -- height = dr.height, + -- } + if figure == nil then + figure = ds.private + if figure then + figure = img.copy(figure) + figure = figure and img.clone(figure,data.request) or false + end + figures_used[hash] = figure + end + if figure then + local nr = figures.boxnumber + -- it looks like we have a leak in attributes here .. todo + local box = node.hpack(img.node(figure)) -- img.node(figure) not longer valid + indexed[figure.index] = figure + box.width, box.height, box.depth = figure.width, figure.height, 0 -- new, hm, tricky, we need to do that in tex (yet) + texbox[nr] = box + ds.objectnumber = figure.objnum + context.relocateexternalfigure() + end + return data +end + +-- -- -- nongeneric -- -- -- + +local function checkers_nongeneric(data,command) -- todo: macros and context.* + local dr, du, ds = data.request, data.used, data.status + local name = du.fullname or "unknown nongeneric" + local hash = name + if dr.object then + -- hm, bugged ... waiting for an xform interface + if not job.objects.get("FIG::"..hash) then + if type(command) == "function" then + command() + end + context.dosetfigureobject(hash) + end + context.doboxfigureobject(hash) + elseif type(command) == "function" then + command() + end + return data +end + +local function includers_nongeneric(data) + return data +end + +checkers.nongeneric = checkers_nongeneric +includers.nongeneric = includers_nongeneric + +-- -- -- mov -- -- -- + +function checkers.mov(data) + local dr, du, ds = data.request, data.used, data.status + local width = todimen(dr.width or figures.defaultwidth) + local height = todimen(dr.height or figures.defaultheight) + local foundname = du.fullname + dr.width, dr.height = width, height + du.width, du.height, du.foundname = width, height, foundname + if trace_inclusion then + report_inclusion("including movie %a, width %p, height %p",foundname,width,height) + end + -- we need to push the node.write in between ... we could make a shared helper for this + context.startfoundexternalfigure(width .. "sp",height .. "sp") + context(function() + nodeinjections.insertmovie { + width = width, + height = height, + factor = number.dimenfactors.bp, + ["repeat"] = dr["repeat"], + controls = dr.controls, + preview = dr.preview, + label = dr.label, + foundname = foundname, + } + end) + context.stopfoundexternalfigure() + return data +end + +includers.mov = includers.nongeneric + +-- -- -- mps -- -- -- + +internalschemes.mprun = true + +local function internal(askedname) + local spec, mprun, mpnum = match(lower(askedname),"mprun([:%.]?)(.-)%.(%d+)") + if spec ~= "" then + return mprun, mpnum + else + return "", mpnum + end +end + +function existers.mps(askedname) + local mprun, mpnum = internal(askedname) + if mpnum then + return askedname + else + return existers.generic(askedname) + end +end + +function checkers.mps(data) + local mprun, mpnum = internal(data.used.fullname) + if mpnum then + return checkers_nongeneric(data,function() context.docheckfiguremprun(mprun,mpnum) end) + else + return checkers_nongeneric(data,function() context.docheckfiguremps(data.used.fullname) end) + end +end + +includers.mps = includers.nongeneric + +-- -- -- tex -- -- -- + +function existers.tex(askedname) + askedname = resolvers.findfile(askedname) + return askedname ~= "" and askedname or false +end + +function checkers.tex(data) + return checkers_nongeneric(data,function() context.docheckfiguretex(data.used.fullname) end) +end + +includers.tex = includers.nongeneric + +-- -- -- buffer -- -- -- + +function existers.buffer(askedname) + local name = file.nameonly(askedname) + local okay = buffers.exists(name) + return okay and name, true -- always quit scanning +end + +function checkers.buffer(data) + return checkers_nongeneric(data,function() context.docheckfigurebuffer(file.nameonly(data.used.fullname)) end) +end + +includers.buffers = includers.nongeneric + +-- -- -- auto -- -- -- + +function existers.auto(askedname) + local name = gsub(askedname, ".auto$", "") + local format = figures.guess(name) + if format then + report_inclusion("format guess %a for %a",format,name) + else + report_inclusion("format guess for %a is not possible",name) + end + return format and name, true, format +end + +checkers.auto = checkers.generic +includers.auto = includers.generic + +-- -- -- cld -- -- -- + +existers.cld = existers.tex + +function checkers.cld(data) + return checkers_nongeneric(data,function() context.docheckfigurecld(data.used.fullname) end) +end + +includers.cld = includers.nongeneric + +-- -- -- converters -- -- -- + +local function makeoptions(options) + local to = type(options) + return (to == "table" and concat(options," ")) or (to == "string" and options) or "" +end + +-- programs.makeoptions = makeoptions + +local function runprogram(binary,argument,variables) + local binary = match(binary,"[%S]+") -- to be sure + if type(argument) == "table" then + argument = concat(argument," ") -- for old times sake + end + if not os.which(binary) then + report_inclusion("program %a is not installed, not running command: %s",binary,command) + elseif not argument or argument == "" then + report_inclusion("nothing to run, unknown program %a",binary) + else + local command = format([["%s" %s]],binary,replacetemplate(longtostring(argument),variables)) + if trace_conversion or trace_programs then + report_inclusion("running command: %s",command) + end + os.spawn(command) + end +end + +programs.run = runprogram + +-- -- -- eps & pdf -- -- -- +-- +-- \externalfigure[cow.eps] +-- \externalfigure[cow.pdf][conversion=stripped] + +local epsconverter = converters.eps or { } +converters.eps = epsconverter +converters.ps = epsconverter + +local epstopdf = { + resolutions = { + [v_low] = "screen", + [v_medium] = "ebook", + [v_high] = "prepress", + }, + command = os.type == "windows" and "gswin32c" or "gs", + -- -dProcessDSCComments=false + argument = [[ + -q + -sDEVICE=pdfwrite + -dNOPAUSE + -dNOCACHE + -dBATCH + -dAutoRotatePages=/None + -dPDFSETTINGS=/%presets% + -dEPSCrop + -sOutputFile=%newname% + %oldname% + -c quit + ]], +} + +programs.epstopdf = epstopdf +programs.gs = epstopdf + +function epsconverter.pdf(oldname,newname,resolution) -- the resolution interface might change + local epstopdf = programs.epstopdf -- can be changed + local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high + runprogram(epstopdf.command, epstopdf.argument, { + newname = newname, + oldname = oldname, + presets = presets, + } ) +end + +epsconverter.default = epsconverter.pdf + +local pdfconverter = converters.pdf or { } +converters.pdf = pdfconverter + +programs.pdftoeps = { + command = "pdftops", + argument = [[-eps "%oldname%" "%newname%]], +} + +pdfconverter.stripped = function(oldname,newname) + local pdftoeps = programs.pdftoeps -- can be changed + local epstopdf = programs.epstopdf -- can be changed + local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high + local tmpname = newname .. ".tmp" + runprogram(pdftoeps.command, pdftoeps.argument, { oldname = oldname, newname = tmpname, presets = presets }) + runprogram(epstopdf.command, epstopdf.argument, { oldname = tmpname, newname = newname, presets = presets }) + os.remove(tmpname) +end + +figures.registersuffix("stripped","pdf") + +-- -- -- svg -- -- -- + +local svgconverter = { } +converters.svg = svgconverter +converters.svgz = svgconverter + +-- inkscape on windows only works with complete paths + +programs.inkscape = { + command = "inkscape", + pdfargument = [[ + "%oldname%" + --export-dpi=600 + -A + "%newname%" + ]], + pngargument = [[ + "%oldname%" + --export-dpi=600 + --export-png="%newname%" + ]], +} + +function svgconverter.pdf(oldname,newname) + local inkscape = programs.inkscape -- can be changed + runprogram(inkscape.command, inkscape.pdfargument, { + newname = expandfilename(newname), + oldname = expandfilename(oldname), + } ) +end + +function svgconverter.png(oldname,newname) + local inkscape = programs.inkscape + runprogram(inkscape.command, inkscape.pngargument, { + newname = expandfilename(newname), + oldname = expandfilename(oldname), + } ) +end + +svgconverter.default = svgconverter.pdf + +-- -- -- gif -- -- -- +-- -- -- tif -- -- -- + +local gifconverter = converters.gif or { } +local tifconverter = converters.tif or { } +local bmpconverter = converters.bmp or { } + +converters.gif = gifconverter +converters.tif = tifconverter +converters.bmp = bmpconverter + +programs.convert = { + command = "gm", -- graphicmagick + argument = [[convert "%oldname%" "%newname%"]], +} + +local function converter(oldname,newname) + local convert = programs.convert + runprogram(convert.command, convert.argument, { + newname = newname, + oldname = oldname, + } ) +end + +tifconverter.pdf = converter +gifconverter.pdf = converter +bmpconverter.pdf = converter + +gifconverter.default = converter +tifconverter.default = converter +bmpconverter.default = converter + +-- todo: lowres + +-- -- -- bases -- -- -- + +local bases = allocate() +figures.bases = bases + +local bases_list = nil -- index => { basename, fullname, xmlroot } +local bases_used = nil -- [basename] => { basename, fullname, xmlroot } -- pointer to list +local bases_found = nil +local bases_enabled = false + +local function reset() + bases_list = allocate() + bases_used = allocate() + bases_found = allocate() + bases_enabled = false + bases.list = bases_list + bases.used = bases_used + bases.found = bases_found +end + +reset() + +function bases.use(basename) + if basename == "reset" then + reset() + else + basename = file.addsuffix(basename,"xml") + if not bases_used[basename] then + local t = { basename, nil, nil } + bases_used[basename] = t + bases_list[#bases_list+1] = t + if not bases_enabled then + bases_enabled = true + xml.registerns("rlx","http://www.pragma-ade.com/schemas/rlx") -- we should be able to do this per xml file + end + if trace_bases then + report_inclusion("registering base %a",basename) + end + end + end +end + +local function bases_find(basename,askedlabel) + if trace_bases then + report_inclusion("checking for %a in base %a",askedlabel,basename) + end + basename = file.addsuffix(basename,"xml") + local t = bases_found[askedlabel] + if t == nil then + local base = bases_used[basename] + local page = 0 + if base[2] == nil then + -- no yet located + for i=1,#figure_paths do + local path = figure_paths[i] + local xmlfile = path .. "/" .. basename + if io.exists(xmlfile) then + base[2] = xmlfile + base[3] = xml.load(xmlfile) + if trace_bases then + report_inclusion("base %a loaded",xmlfile) + end + break + end + end + end + t = false + if base[2] and base[3] then -- rlx:library + for e in xml.collected(base[3],"/(*:library|figurelibrary)/*:figure/*:label") do + page = page + 1 + if xml.text(e) == askedlabel then + t = { + base = file.replacesuffix(base[2],"pdf"), + format = "pdf", + name = xml.text(e,"../*:file"), -- to be checked + page = page, + } + bases_found[askedlabel] = t + if trace_bases then + report_inclusion("figure %a found in base %a",askedlabel,base[2]) + end + return t + end + end + if trace_bases and not t then + report_inclusion("figure %a not found in base %a",askedlabel,base[2]) + end + end + end + return t +end + +-- we can access sequential or by name + +local function bases_locate(askedlabel) + for i=1,#bases_list do + local entry = bases_list[i] + local t = bases_find(entry[1],askedlabel) + if t then + return t + end + end + return false +end + +function identifiers.base(data) + if bases_enabled then + local dr, du, ds = data.request, data.used, data.status + local fbl = bases_locate(dr.name or dr.label) + if fbl then + du.page = fbl.page + du.format = fbl.format + du.fullname = fbl.base + ds.fullname = fbl.name + ds.format = fbl.format + ds.page = fbl.page + ds.status = 10 + end + end + return data +end + +bases.locate = bases_locate +bases.find = bases_find + +identifiers.list = { + identifiers.base, + identifiers.default +} + +-- tracing + +statistics.register("graphics processing time", function() + local nofprocessed = figures.nofprocessed + if nofprocessed > 0 then + return format("%s seconds including tex, %s processed images", statistics.elapsedtime(figures),nofprocessed) + else + return nil + end +end) + +-- helper + +function figures.applyratio(width,height,w,h) -- width and height are strings and w and h are numbers + if not width or width == "" then + if not height or height == "" then + return figures.defaultwidth, figures.defaultheight + else + height = todimen(height) + if w and h then + return height * w/h, height + else + return figures.defaultwidth, height + end + end + else + width = todimen(width) + if not height or height == "" then + if w and h then + return width, width * h/w + else + return width, figures.defaultheight + end + else + return width, todimen(height) + end + end +end + +-- example of simple plugins: +-- +-- figures.converters.png = { +-- png = function(oldname,newname,resolution) +-- local command = string.format('gm convert -depth 1 "%s" "%s"',oldname,newname) +-- logs.report(string.format("running command %s",command)) +-- os.execute(command) +-- end, +-- } + +-- local fig = figures.push { name = pdffile } +-- figures.identify() +-- figures.check() +-- local nofpages = fig.used.pages +-- figures.pop() + +-- interfacing + +commands.setfigurelookuporder = figures.setorder diff --git a/tex/context/base/grph-raw.lua b/tex/context/base/grph-raw.lua index 4c5b031ea..e2ffb689f 100644 --- a/tex/context/base/grph-raw.lua +++ b/tex/context/base/grph-raw.lua @@ -1,42 +1,42 @@ -if not modules then modules = { } end modules ['grph-raw'] = { - version = 1.001, - comment = "companion to grph-raw.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This module is for Mojca, who wanted something like this for --- her gnuplot project. It's somewhat premliminary code but it --- works ok for that purpose. - -local tonumber = tonumber - -local report_bitmap = logs.reporter("graphics","bitmaps") - -local context = context -local texsp = tex.sp - -function figures.bitmapimage(t) - local data = t.data - local xresolution = tonumber(t.xresolution) - local yresolution = tonumber(t.yresolution) - if data and xresolution and yresolution then - local width, height = t.width or "", t.height or "" - local n = backends.nodeinjections.injectbitmap { - xresolution = xresolution, - yresolution = yresolution, - width = width ~= "" and texsp(width) or nil, - height = height ~= "" and texsp(height) or nil, - data = data, - colorspace = t.colorspace, - } - if n then - context.hbox(n) - else - report_bitmap("format no supported by backend") - end - else - report_bitmap("invalid specification") - end -end +if not modules then modules = { } end modules ['grph-raw'] = { + version = 1.001, + comment = "companion to grph-raw.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This module is for Mojca, who wanted something like this for +-- her gnuplot project. It's somewhat premliminary code but it +-- works ok for that purpose. + +local tonumber = tonumber + +local report_bitmap = logs.reporter("graphics","bitmaps") + +local context = context +local texsp = tex.sp + +function figures.bitmapimage(t) + local data = t.data + local xresolution = tonumber(t.xresolution) + local yresolution = tonumber(t.yresolution) + if data and xresolution and yresolution then + local width, height = t.width or "", t.height or "" + local n = backends.nodeinjections.injectbitmap { + xresolution = xresolution, + yresolution = yresolution, + width = width ~= "" and texsp(width) or nil, + height = height ~= "" and texsp(height) or nil, + data = data, + colorspace = t.colorspace, + } + if n then + context.hbox(n) + else + report_bitmap("format no supported by backend") + end + else + report_bitmap("invalid specification") + end +end diff --git a/tex/context/base/grph-swf.lua b/tex/context/base/grph-swf.lua index 8c28b76af..58136f7fc 100644 --- a/tex/context/base/grph-swf.lua +++ b/tex/context/base/grph-swf.lua @@ -1,94 +1,94 @@ -if not modules then modules = { } end modules ['grph-swf'] = { - version = 1.001, - comment = "companion to grph-inc.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- maybe: backends.codeinjections.insertswf - -local sub, format, match, byte = string.sub, string.format, string.match, string.byte -local concat = table.concat -local floor = math.floor -local tonumber = tonumber - -local readstring = io.readstring -local readnumber = io.readnumber -local tobitstring = number.tobitstring -local todimen = number.todimen -local nodeinjections = backends.nodeinjections -local figures = figures -local context = context - -local function getheader(name) - local f = io.open(name,"rb") - if not f then - return - end - local signature = readstring(f,3) -- F=uncompressed, C=compressed (zlib) - local version = readnumber(f,1) - local filelength = readnumber(f,-4) - local compressed = sub(signature,1,1) == "C" - local buffer - if compressed then - buffer = zlib.decompress(f:read('*a')) - else - buffer = f:read(20) -- ('*a') - end - f:close() - buffer = { match(buffer,"(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)") } - for i=1,9 do - buffer[i] = tobitstring(byte(buffer[i])) - end - local framebits = concat(buffer,"",1,9) - local n = tonumber(sub(framebits,1,5),2) - local frame = { } -- xmin xmax ymin ymax - local xmin = tonumber(sub(framebits,6, 5 + n),2) - local xmax = tonumber(sub(framebits,6 + 1*n,5 + 2*n),2) - local ymin = tonumber(sub(framebits,6 + 2*n,5 + 3*n),2) - local ymax = tonumber(sub(framebits,6 + 3*n,5 + 4*n),2) - return { - filename = name, - version = version, - filelength = filelength, - framerate = tonumber(byte(buffer[10]) * 256 + byte(buffer[11])), - framecount = tonumber(byte(buffer[12]) * 256 + byte(buffer[13])), - -- framebits = framebits, - compressed = compressed, - width = floor((xmax - xmin) / 20), - height = floor((ymax - ymin) / 20), - rectangle = { - xmin = xmin, - xmax = xmax, - ymin = ymin, - ymax = ymax, - } - } -end - -function figures.checkers.swf(data) - local dr, du, ds = data.request, data.used, data.status - local foundname = du.fullname - local header = getheader(foundname) - local width, height = figures.applyratio(dr.width,dr.height,header.width,header.height) - dr.width, dr.height = width, height - du.width, du.height, du.foundname = width, height, foundname - context.startfoundexternalfigure(todimen(width),todimen(height)) - nodeinjections.insertswf { - foundname = foundname, - width = width, - height = height, - -- factor = number.dimenfactors.bp, - display = dr.display, - controls = dr.controls, - -- label = dr.label, - resources = dr.resources, - } - context.stopfoundexternalfigure() - return data -end - -figures.includers.swf = figures.includers.nongeneric - -figures.registersuffix("swf","swf") +if not modules then modules = { } end modules ['grph-swf'] = { + version = 1.001, + comment = "companion to grph-inc.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- maybe: backends.codeinjections.insertswf + +local sub, format, match, byte = string.sub, string.format, string.match, string.byte +local concat = table.concat +local floor = math.floor +local tonumber = tonumber + +local readstring = io.readstring +local readnumber = io.readnumber +local tobitstring = number.tobitstring +local todimen = number.todimen +local nodeinjections = backends.nodeinjections +local figures = figures +local context = context + +local function getheader(name) + local f = io.open(name,"rb") + if not f then + return + end + local signature = readstring(f,3) -- F=uncompressed, C=compressed (zlib) + local version = readnumber(f,1) + local filelength = readnumber(f,-4) + local compressed = sub(signature,1,1) == "C" + local buffer + if compressed then + buffer = zlib.decompress(f:read('*a')) + else + buffer = f:read(20) -- ('*a') + end + f:close() + buffer = { match(buffer,"(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)") } + for i=1,9 do + buffer[i] = tobitstring(byte(buffer[i])) + end + local framebits = concat(buffer,"",1,9) + local n = tonumber(sub(framebits,1,5),2) + local frame = { } -- xmin xmax ymin ymax + local xmin = tonumber(sub(framebits,6, 5 + n),2) + local xmax = tonumber(sub(framebits,6 + 1*n,5 + 2*n),2) + local ymin = tonumber(sub(framebits,6 + 2*n,5 + 3*n),2) + local ymax = tonumber(sub(framebits,6 + 3*n,5 + 4*n),2) + return { + filename = name, + version = version, + filelength = filelength, + framerate = tonumber(byte(buffer[10]) * 256 + byte(buffer[11])), + framecount = tonumber(byte(buffer[12]) * 256 + byte(buffer[13])), + -- framebits = framebits, + compressed = compressed, + width = floor((xmax - xmin) / 20), + height = floor((ymax - ymin) / 20), + rectangle = { + xmin = xmin, + xmax = xmax, + ymin = ymin, + ymax = ymax, + } + } +end + +function figures.checkers.swf(data) + local dr, du, ds = data.request, data.used, data.status + local foundname = du.fullname + local header = getheader(foundname) + local width, height = figures.applyratio(dr.width,dr.height,header.width,header.height) + dr.width, dr.height = width, height + du.width, du.height, du.foundname = width, height, foundname + context.startfoundexternalfigure(todimen(width),todimen(height)) + nodeinjections.insertswf { + foundname = foundname, + width = width, + height = height, + -- factor = number.dimenfactors.bp, + display = dr.display, + controls = dr.controls, + -- label = dr.label, + resources = dr.resources, + } + context.stopfoundexternalfigure() + return data +end + +figures.includers.swf = figures.includers.nongeneric + +figures.registersuffix("swf","swf") diff --git a/tex/context/base/grph-u3d.lua b/tex/context/base/grph-u3d.lua index 6961c5503..d141dc080 100644 --- a/tex/context/base/grph-u3d.lua +++ b/tex/context/base/grph-u3d.lua @@ -1,51 +1,51 @@ -if not modules then modules = { } end modules ['grph-u3d'] = { - version = 1.001, - comment = "companion to grph-inc.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- see lpdf-u3d.lua for comment - --- maybe: backends.codeinjections.insertu3d - -local trace_inclusion = false trackers.register("figures.inclusion", function(v) trace_inclusion = v end) - -local report_u3d = logs.reporter("graphics","u3d") - -local figures = figures -local context = context -local nodeinjections = backends.nodeinjections -local todimen = string.todimen - -function figures.checkers.u3d(data) - local dr, du, ds = data.request, data.used, data.status - local width = todimen(dr.width or figures.defaultwidth) - local height = todimen(dr.height or figures.defaultheight) - local foundname = du.fullname - dr.width, dr.height = width, height - du.width, du.height, du.foundname = width, height, foundname - if trace_inclusion then - report_u3d("including u3d %a, width %p, height %p",foundname,width,height) - end - context.startfoundexternalfigure(width .. "sp",height .. "sp") - context(function() - nodeinjections.insertu3d { - foundname = foundname, - width = width, - height = height, - factor = number.dimenfactors.bp, - display = dr.display, - controls = dr.controls, - label = dr.label, - } - end) - context.stopfoundexternalfigure() - return data -end - -figures.includers.u3d = figures.includers.nongeneric - -figures.registersuffix("u3d","u3d") -figures.registersuffix("prc","u3d") +if not modules then modules = { } end modules ['grph-u3d'] = { + version = 1.001, + comment = "companion to grph-inc.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- see lpdf-u3d.lua for comment + +-- maybe: backends.codeinjections.insertu3d + +local trace_inclusion = false trackers.register("figures.inclusion", function(v) trace_inclusion = v end) + +local report_u3d = logs.reporter("graphics","u3d") + +local figures = figures +local context = context +local nodeinjections = backends.nodeinjections +local todimen = string.todimen + +function figures.checkers.u3d(data) + local dr, du, ds = data.request, data.used, data.status + local width = todimen(dr.width or figures.defaultwidth) + local height = todimen(dr.height or figures.defaultheight) + local foundname = du.fullname + dr.width, dr.height = width, height + du.width, du.height, du.foundname = width, height, foundname + if trace_inclusion then + report_u3d("including u3d %a, width %p, height %p",foundname,width,height) + end + context.startfoundexternalfigure(width .. "sp",height .. "sp") + context(function() + nodeinjections.insertu3d { + foundname = foundname, + width = width, + height = height, + factor = number.dimenfactors.bp, + display = dr.display, + controls = dr.controls, + label = dr.label, + } + end) + context.stopfoundexternalfigure() + return data +end + +figures.includers.u3d = figures.includers.nongeneric + +figures.registersuffix("u3d","u3d") +figures.registersuffix("prc","u3d") diff --git a/tex/context/base/grph-wnd.lua b/tex/context/base/grph-wnd.lua index ebb9b1169..8b005b123 100644 --- a/tex/context/base/grph-wnd.lua +++ b/tex/context/base/grph-wnd.lua @@ -1,47 +1,47 @@ -if not modules then modules = { } end modules ['grph-wnd'] = { - version = 1.001, - comment = "companion to grph-inc.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- Thanks to Luigi Scarso for making graphic magic work in luatex. --- --- \externalfigure[hacker.jpeg][width=4cm,conversion=gray.jpg] - -local converters, suffixes = figures.converters, figures.suffixes - -local trace_conversion = false trackers.register("figures.conversion", function(v) trace_conversion = v end) - -local report_wand = logs.reporter("graphics","wand") - -local function togray(oldname,newname) - if lfs.isfile(oldname) then - require("gmwand") - if trace_conversion then - report_wand("converting %a to %a using gmwand",oldname,newname) - end - gmwand.InitializeMagick("./") -- What does this path do? - local wand = gmwand.NewMagickWand() - gmwand.MagickReadImage(wand,oldname) - gmwand.MagickSetImageColorspace(wand,gmwand.GRAYColorspace) - gmwand.MagickWriteImages(wand,newname,1) - gmwand.DestroyMagickWand(wand) - else - report_wand("unable to convert %a to %a using gmwand",oldname,newname) - end -end - -local formats = { "png", "jpg", "gif" } - -for i=1,#formats do - local oldformat = formats[i] - local newformat = "gray." .. oldformat - if trace_conversion then - report_wand("installing converter for %a to %a",oldformat,newformat) - end - converters[oldformat] = converters[oldformat] or { } - converters[oldformat][newformat] = togray - suffixes [newformat] = oldformat -end +if not modules then modules = { } end modules ['grph-wnd'] = { + version = 1.001, + comment = "companion to grph-inc.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Thanks to Luigi Scarso for making graphic magic work in luatex. +-- +-- \externalfigure[hacker.jpeg][width=4cm,conversion=gray.jpg] + +local converters, suffixes = figures.converters, figures.suffixes + +local trace_conversion = false trackers.register("figures.conversion", function(v) trace_conversion = v end) + +local report_wand = logs.reporter("graphics","wand") + +local function togray(oldname,newname) + if lfs.isfile(oldname) then + require("gmwand") + if trace_conversion then + report_wand("converting %a to %a using gmwand",oldname,newname) + end + gmwand.InitializeMagick("./") -- What does this path do? + local wand = gmwand.NewMagickWand() + gmwand.MagickReadImage(wand,oldname) + gmwand.MagickSetImageColorspace(wand,gmwand.GRAYColorspace) + gmwand.MagickWriteImages(wand,newname,1) + gmwand.DestroyMagickWand(wand) + else + report_wand("unable to convert %a to %a using gmwand",oldname,newname) + end +end + +local formats = { "png", "jpg", "gif" } + +for i=1,#formats do + local oldformat = formats[i] + local newformat = "gray." .. oldformat + if trace_conversion then + report_wand("installing converter for %a to %a",oldformat,newformat) + end + converters[oldformat] = converters[oldformat] or { } + converters[oldformat][newformat] = togray + suffixes [newformat] = oldformat +end diff --git a/tex/context/base/java-ini.lua b/tex/context/base/java-ini.lua index 321e4e24d..3f1fbd6cf 100644 --- a/tex/context/base/java-ini.lua +++ b/tex/context/base/java-ini.lua @@ -1,226 +1,226 @@ -if not modules then modules = { } end modules ['java-ini'] = { - version = 1.001, - comment = "companion to java-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format -local concat = table.concat -local lpegmatch, P, S, C, Carg, Cc = lpeg.match, lpeg.P, lpeg.S, lpeg.C, lpeg.Carg, lpeg.Cc - -local allocate = utilities.storage.allocate -local settings_to_array = utilities.parsers.settings_to_array -local variables = interfaces.variables -local formatters = string.formatters - --- todo: don't flush scripts if no JS key - -local trace_javascript = false trackers.register("backends.javascript", function(v) trace_javascript = v end) - -local report_javascripts = logs.reporter ("interactions","javascripts") -local status_javascripts = logs.messenger("interactions","javascripts") - -interactions.javascripts = interactions.javascripts or { } -local javascripts = interactions.javascripts - -javascripts.codes = allocate() -javascripts.preambles = allocate() -javascripts.functions = allocate() - -local codes, preambles, functions = javascripts.codes, javascripts.preambles, javascripts.functions - -local preambled = { } - -local function storefunction(s,preamble) - if trace_javascript then - report_javascripts("found function %a",s) - end - functions[s] = preamble -end - -local uses = P("uses") -local used = P("used") -local left = P("{") -local right = P("}") -local space = S(" \r\n") -local spaces = space^0 -local braced = left * C((1-right-space)^1) * right -local unbraced = C((1-space)^1) -local name = spaces * (braced + unbraced) * spaces -local any = P(1) -local script = C(any^1) -local funct = P("function") -local leftp = P("(") -local rightp = P(")") -local fname = spaces * funct * spaces * (C((1-space-left-leftp)^1) * Carg(1) / storefunction) * spaces * leftp - -local parsecode = name * ((uses * name) + Cc("")) * spaces * script -local parsepreamble = name * ((used * name) + Cc("")) * spaces * script -local parsefunctions = (fname + any)^0 - -function javascripts.storecode(str) - local name, uses, script = lpegmatch(parsecode,str) - if name and name ~= "" then - codes[name] = { uses, script } - end -end - -function javascripts.storepreamble(str) -- now later - local name, used, script = lpegmatch(parsepreamble,str) - if name and name ~= "" and not preambled[name] then - local n = #preambles + 1 - preambles[n] = { name, used, script } - preambled[name] = n - if trace_javascript then - report_javascripts("stored preamble %a, state %a, order %a",name,used,n) - end - lpegmatch(parsefunctions,script,1,n) - end -end - -function javascripts.setpreamble(name,script) -- now later - if name and name ~= "" and not preambled[name] then - local n = #preambles + 1 - preambles[n] = { name, "now", script } - preambled[name] = n - if trace_javascript then - report_javascripts("adapted preamble %a, state %a, order %a",name,"now",n) - end - lpegmatch(parsefunctions,script,1,n) - end -end - -function javascripts.addtopreamble(name,script) - if name and name ~= "" then - local p = preambled[name] - if p then - preambles[p] = { "now", preambles[p] .. " ;\n" .. script } - if trace_javascript then - report_javascripts("extended preamble %a, state %a, order %a",name,"now",p) - end - else - local n = #preambles + 1 - preambles[n] = { name, "now", script } - preambled[name] = n - if trace_javascript then - report_javascripts("stored preamble %a, state %a, order %a",name,"now",n) - end - lpegmatch(parsefunctions,script,1,n) - end - end -end - -function javascripts.usepreamblenow(name) -- now later - if name and name ~= "" and name ~= variables.reset then -- todo: reset - local names = settings_to_array(name) - for i=1,#names do - local somename = names[i] - if not preambled[somename] then - preambles[preambled[somename]][2] = "now" - if trace_javascript then - report_javascripts("used preamble %a, state %a, order %a",somename,"now","auto") - end - end - end - end -end - -local splitter = lpeg.tsplitat(lpeg.patterns.commaspacer) - -local used, reported = false, { } -- we can cache more - -function javascripts.code(name,arguments) - local c = codes[name] - if c then - local u, code = c[1], c[2] - if u ~= "" then - local p = preambled[u] - if p then - preambles[p][2] = "now" - if trace_javascript and not reported[name] then - reported[name] = true - report_javascripts("used code %a, preamble %a",name,u) - end - elseif trace_javascript and not reported[name] then - reported[name] = true - report_javascripts("used code %a",name) - end - elseif trace_javascript and not reported[name] then - reported[name] = true - report_javascripts("used code %a",name) - end - used = true - return code - end - local f = functions[name] - if f then - used = true - if trace_javascript and not reported[name] then - reported[name] = true - report_javascripts("used function %a",name) - end - preambles[f][2] = "now" -- automatically tag preambles that define the function (as later) - if arguments then - local args = lpegmatch(splitter,arguments) - for i=1,#args do -- can be a helper - args[i] = formatters["%q"](args[i]) - end - return formatters["%s(%s)"](name,concat(args,",")) - else - return formatters["%s()"](name) - end - end -end - -function javascripts.flushpreambles() - local t = { } --- if used then -- we want to be able to enforce inclusion - for i=1,#preambles do - local preamble = preambles[i] - if preamble[2] == "now" then - if trace_javascript then - report_javascripts("flushed preamble %a",preamble[1]) - end - t[#t+1] = { preamble[1], preamble[3] } - end - end --- end - return t -end - -local patterns = { "java-imp-%s.mkiv", "java-imp-%s.tex", "java-%s.mkiv", "java-%s.tex" } - -local function action(name,foundname) - context.startnointerference() - context.startreadingfile() - context.input(foundname) - status_javascripts("loaded: library %a",name) - context.stopreadingfile() - context.stopnointerference() -end - -local function failure(name) - report_javascripts("unknown library %a",name) -end - -function javascripts.usescripts(name) - if name ~= variables.reset then -- reset is obsolete - commands.uselibrary { - name = name, - patterns = patterns, - action = action, - failure = failure, - onlyonce = true, - } - end -end - --- interface - -commands.storejavascriptcode = interactions.javascripts.storecode -commands.storejavascriptpreamble = interactions.javascripts.storepreamble -commands.addtojavascriptpreamble = interactions.javascripts.addtopreamble -commands.usejavascriptpreamble = interactions.javascripts.usepreamblenow -commands.usejavascriptscripts = interactions.javascripts.usescripts +if not modules then modules = { } end modules ['java-ini'] = { + version = 1.001, + comment = "companion to java-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format +local concat = table.concat +local lpegmatch, P, S, C, Carg, Cc = lpeg.match, lpeg.P, lpeg.S, lpeg.C, lpeg.Carg, lpeg.Cc + +local allocate = utilities.storage.allocate +local settings_to_array = utilities.parsers.settings_to_array +local variables = interfaces.variables +local formatters = string.formatters + +-- todo: don't flush scripts if no JS key + +local trace_javascript = false trackers.register("backends.javascript", function(v) trace_javascript = v end) + +local report_javascripts = logs.reporter ("interactions","javascripts") +local status_javascripts = logs.messenger("interactions","javascripts") + +interactions.javascripts = interactions.javascripts or { } +local javascripts = interactions.javascripts + +javascripts.codes = allocate() +javascripts.preambles = allocate() +javascripts.functions = allocate() + +local codes, preambles, functions = javascripts.codes, javascripts.preambles, javascripts.functions + +local preambled = { } + +local function storefunction(s,preamble) + if trace_javascript then + report_javascripts("found function %a",s) + end + functions[s] = preamble +end + +local uses = P("uses") +local used = P("used") +local left = P("{") +local right = P("}") +local space = S(" \r\n") +local spaces = space^0 +local braced = left * C((1-right-space)^1) * right +local unbraced = C((1-space)^1) +local name = spaces * (braced + unbraced) * spaces +local any = P(1) +local script = C(any^1) +local funct = P("function") +local leftp = P("(") +local rightp = P(")") +local fname = spaces * funct * spaces * (C((1-space-left-leftp)^1) * Carg(1) / storefunction) * spaces * leftp + +local parsecode = name * ((uses * name) + Cc("")) * spaces * script +local parsepreamble = name * ((used * name) + Cc("")) * spaces * script +local parsefunctions = (fname + any)^0 + +function javascripts.storecode(str) + local name, uses, script = lpegmatch(parsecode,str) + if name and name ~= "" then + codes[name] = { uses, script } + end +end + +function javascripts.storepreamble(str) -- now later + local name, used, script = lpegmatch(parsepreamble,str) + if name and name ~= "" and not preambled[name] then + local n = #preambles + 1 + preambles[n] = { name, used, script } + preambled[name] = n + if trace_javascript then + report_javascripts("stored preamble %a, state %a, order %a",name,used,n) + end + lpegmatch(parsefunctions,script,1,n) + end +end + +function javascripts.setpreamble(name,script) -- now later + if name and name ~= "" and not preambled[name] then + local n = #preambles + 1 + preambles[n] = { name, "now", script } + preambled[name] = n + if trace_javascript then + report_javascripts("adapted preamble %a, state %a, order %a",name,"now",n) + end + lpegmatch(parsefunctions,script,1,n) + end +end + +function javascripts.addtopreamble(name,script) + if name and name ~= "" then + local p = preambled[name] + if p then + preambles[p] = { "now", preambles[p] .. " ;\n" .. script } + if trace_javascript then + report_javascripts("extended preamble %a, state %a, order %a",name,"now",p) + end + else + local n = #preambles + 1 + preambles[n] = { name, "now", script } + preambled[name] = n + if trace_javascript then + report_javascripts("stored preamble %a, state %a, order %a",name,"now",n) + end + lpegmatch(parsefunctions,script,1,n) + end + end +end + +function javascripts.usepreamblenow(name) -- now later + if name and name ~= "" and name ~= variables.reset then -- todo: reset + local names = settings_to_array(name) + for i=1,#names do + local somename = names[i] + if not preambled[somename] then + preambles[preambled[somename]][2] = "now" + if trace_javascript then + report_javascripts("used preamble %a, state %a, order %a",somename,"now","auto") + end + end + end + end +end + +local splitter = lpeg.tsplitat(lpeg.patterns.commaspacer) + +local used, reported = false, { } -- we can cache more + +function javascripts.code(name,arguments) + local c = codes[name] + if c then + local u, code = c[1], c[2] + if u ~= "" then + local p = preambled[u] + if p then + preambles[p][2] = "now" + if trace_javascript and not reported[name] then + reported[name] = true + report_javascripts("used code %a, preamble %a",name,u) + end + elseif trace_javascript and not reported[name] then + reported[name] = true + report_javascripts("used code %a",name) + end + elseif trace_javascript and not reported[name] then + reported[name] = true + report_javascripts("used code %a",name) + end + used = true + return code + end + local f = functions[name] + if f then + used = true + if trace_javascript and not reported[name] then + reported[name] = true + report_javascripts("used function %a",name) + end + preambles[f][2] = "now" -- automatically tag preambles that define the function (as later) + if arguments then + local args = lpegmatch(splitter,arguments) + for i=1,#args do -- can be a helper + args[i] = formatters["%q"](args[i]) + end + return formatters["%s(%s)"](name,concat(args,",")) + else + return formatters["%s()"](name) + end + end +end + +function javascripts.flushpreambles() + local t = { } +-- if used then -- we want to be able to enforce inclusion + for i=1,#preambles do + local preamble = preambles[i] + if preamble[2] == "now" then + if trace_javascript then + report_javascripts("flushed preamble %a",preamble[1]) + end + t[#t+1] = { preamble[1], preamble[3] } + end + end +-- end + return t +end + +local patterns = { "java-imp-%s.mkiv", "java-imp-%s.tex", "java-%s.mkiv", "java-%s.tex" } + +local function action(name,foundname) + context.startnointerference() + context.startreadingfile() + context.input(foundname) + status_javascripts("loaded: library %a",name) + context.stopreadingfile() + context.stopnointerference() +end + +local function failure(name) + report_javascripts("unknown library %a",name) +end + +function javascripts.usescripts(name) + if name ~= variables.reset then -- reset is obsolete + commands.uselibrary { + name = name, + patterns = patterns, + action = action, + failure = failure, + onlyonce = true, + } + end +end + +-- interface + +commands.storejavascriptcode = interactions.javascripts.storecode +commands.storejavascriptpreamble = interactions.javascripts.storepreamble +commands.addtojavascriptpreamble = interactions.javascripts.addtopreamble +commands.usejavascriptpreamble = interactions.javascripts.usepreamblenow +commands.usejavascriptscripts = interactions.javascripts.usescripts diff --git a/tex/context/base/l-boolean.lua b/tex/context/base/l-boolean.lua index f087f1a4c..ddac9b8a0 100644 --- a/tex/context/base/l-boolean.lua +++ b/tex/context/base/l-boolean.lua @@ -1,69 +1,69 @@ -if not modules then modules = { } end modules ['l-boolean'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local type, tonumber = type, tonumber - -boolean = boolean or { } -local boolean = boolean - -function boolean.tonumber(b) - if b then return 1 else return 0 end -- test and return or return -end - -function toboolean(str,tolerant) -- global - if str == nil then - return false - elseif str == false then - return false - elseif str == true then - return true - elseif str == "true" then - return true - elseif str == "false" then - return false - elseif not tolerant then - return false - elseif str == 0 then - return false - elseif (tonumber(str) or 0) > 0 then - return true - else - return str == "yes" or str == "on" or str == "t" - end -end - -string.toboolean = toboolean - -function string.booleanstring(str) - if str == "0" then - return false - elseif str == "1" then - return true - elseif str == "" then - return false - elseif str == "false" then - return false - elseif str == "true" then - return true - elseif (tonumber(str) or 0) > 0 then - return true - else - return str == "yes" or str == "on" or str == "t" - end -end - -function string.is_boolean(str,default) - if type(str) == "string" then - if str == "true" or str == "yes" or str == "on" or str == "t" then - return true - elseif str == "false" or str == "no" or str == "off" or str == "f" then - return false - end - end - return default -end +if not modules then modules = { } end modules ['l-boolean'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local type, tonumber = type, tonumber + +boolean = boolean or { } +local boolean = boolean + +function boolean.tonumber(b) + if b then return 1 else return 0 end -- test and return or return +end + +function toboolean(str,tolerant) -- global + if str == nil then + return false + elseif str == false then + return false + elseif str == true then + return true + elseif str == "true" then + return true + elseif str == "false" then + return false + elseif not tolerant then + return false + elseif str == 0 then + return false + elseif (tonumber(str) or 0) > 0 then + return true + else + return str == "yes" or str == "on" or str == "t" + end +end + +string.toboolean = toboolean + +function string.booleanstring(str) + if str == "0" then + return false + elseif str == "1" then + return true + elseif str == "" then + return false + elseif str == "false" then + return false + elseif str == "true" then + return true + elseif (tonumber(str) or 0) > 0 then + return true + else + return str == "yes" or str == "on" or str == "t" + end +end + +function string.is_boolean(str,default) + if type(str) == "string" then + if str == "true" or str == "yes" or str == "on" or str == "t" then + return true + elseif str == "false" or str == "no" or str == "off" or str == "f" then + return false + end + end + return default +end diff --git a/tex/context/base/l-dir.lua b/tex/context/base/l-dir.lua index 3d0576eeb..a58e5302e 100644 --- a/tex/context/base/l-dir.lua +++ b/tex/context/base/l-dir.lua @@ -1,470 +1,470 @@ -if not modules then modules = { } end modules ['l-dir'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- dir.expandname will be merged with cleanpath and collapsepath - -local type, select = type, select -local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub -local concat, insert, remove, unpack = table.concat, table.insert, table.remove, table.unpack -local lpegmatch = lpeg.match - -local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V - -dir = dir or { } -local dir = dir -local lfs = lfs - -local attributes = lfs.attributes -local walkdir = lfs.dir -local isdir = lfs.isdir -local isfile = lfs.isfile -local currentdir = lfs.currentdir -local chdir = lfs.chdir - --- in case we load outside luatex - -if not isdir then - function isdir(name) - local a = attributes(name) - return a and a.mode == "directory" - end - lfs.isdir = isdir -end - -if not isfile then - function isfile(name) - local a = attributes(name) - return a and a.mode == "file" - end - lfs.isfile = isfile -end - --- handy - -function dir.current() - return (gsub(currentdir(),"\\","/")) -end - --- optimizing for no find (*) does not save time - ---~ local function globpattern(path,patt,recurse,action) -- fails in recent luatex due to some change in lfs ---~ local ok, scanner ---~ if path == "/" then ---~ ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe ---~ else ---~ ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe ---~ end ---~ if ok and type(scanner) == "function" then ---~ if not find(path,"/$") then path = path .. '/' end ---~ for name in scanner do ---~ local full = path .. name ---~ local mode = attributes(full,'mode') ---~ if mode == 'file' then ---~ if find(full,patt) then ---~ action(full) ---~ end ---~ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then ---~ globpattern(full,patt,recurse,action) ---~ end ---~ end ---~ end ---~ end - -local lfsisdir = isdir - -local function isdir(path) - path = gsub(path,"[/\\]+$","") - return lfsisdir(path) -end - -lfs.isdir = isdir - -local function globpattern(path,patt,recurse,action) - if path == "/" then - path = path .. "." - elseif not find(path,"/$") then - path = path .. '/' - end - if isdir(path) then -- lfs.isdir does not like trailing / - for name in walkdir(path) do -- lfs.dir accepts trailing / - local full = path .. name - local mode = attributes(full,'mode') - if mode == 'file' then - if find(full,patt) then - action(full) - end - elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then - globpattern(full,patt,recurse,action) - end - end - end -end - -dir.globpattern = globpattern - -local function collectpattern(path,patt,recurse,result) - local ok, scanner - result = result or { } - if path == "/" then - ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe - else - ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe - end - if ok and type(scanner) == "function" then - if not find(path,"/$") then path = path .. '/' end - for name in scanner, first do - local full = path .. name - local attr = attributes(full) - local mode = attr.mode - if mode == 'file' then - if find(full,patt) then - result[name] = attr - end - elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then - attr.list = collectpattern(full,patt,recurse) - result[name] = attr - end - end - end - return result -end - -dir.collectpattern = collectpattern - -local pattern = Ct { - [1] = (C(P(".") + P("/")^1) + C(R("az","AZ") * P(":") * P("/")^0) + Cc("./")) * V(2) * V(3), - [2] = C(((1-S("*?/"))^0 * P("/"))^0), - [3] = C(P(1)^0) -} - -local filter = Cs ( ( - P("**") / ".*" + - P("*") / "[^/]*" + - P("?") / "[^/]" + - P(".") / "%%." + - P("+") / "%%+" + - P("-") / "%%-" + - P(1) -)^0 ) - -local function glob(str,t) - if type(t) == "function" then - if type(str) == "table" then - for s=1,#str do - glob(str[s],t) - end - elseif isfile(str) then - t(str) - else - local split = lpegmatch(pattern,str) -- we could use the file splitter - if split then - local root, path, base = split[1], split[2], split[3] - local recurse = find(base,"%*%*") - local start = root .. path - local result = lpegmatch(filter,start .. base) - globpattern(start,result,recurse,t) - end - end - else - if type(str) == "table" then - local t = t or { } - for s=1,#str do - glob(str[s],t) - end - return t - elseif isfile(str) then - if t then - t[#t+1] = str - return t - else - return { str } - end - else - local split = lpegmatch(pattern,str) -- we could use the file splitter - if split then - local t = t or { } - local action = action or function(name) t[#t+1] = name end - local root, path, base = split[1], split[2], split[3] - local recurse = find(base,"%*%*") - local start = root .. path - local result = lpegmatch(filter,start .. base) - globpattern(start,result,recurse,action) - return t - else - return { } - end - end - end -end - -dir.glob = glob - ---~ list = dir.glob("**/*.tif") ---~ list = dir.glob("/**/*.tif") ---~ list = dir.glob("./**/*.tif") ---~ list = dir.glob("oeps/**/*.tif") ---~ list = dir.glob("/oeps/**/*.tif") - -local function globfiles(path,recurse,func,files) -- func == pattern or function - if type(func) == "string" then - local s = func - func = function(name) return find(name,s) end - end - files = files or { } - local noffiles = #files - for name in walkdir(path) do - if find(name,"^%.") then - --- skip - else - local mode = attributes(name,'mode') - if mode == "directory" then - if recurse then - globfiles(path .. "/" .. name,recurse,func,files) - end - elseif mode == "file" then - if not func or func(name) then - noffiles = noffiles + 1 - files[noffiles] = path .. "/" .. name - end - end - end - end - return files -end - -dir.globfiles = globfiles - --- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex") --- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex") --- t = dir.glob("c:/data/develop/context/texmf/**/*.tex") --- t = dir.glob("f:/minimal/tex/**/*") --- print(dir.ls("f:/minimal/tex/**/*")) --- print(dir.ls("*.tex")) - -function dir.ls(pattern) - return concat(glob(pattern),"\n") -end - ---~ mkdirs("temp") ---~ mkdirs("a/b/c") ---~ mkdirs(".","/a/b/c") ---~ mkdirs("a","b","c") - -local make_indeed = true -- false - -local onwindows = os.type == "windows" or find(os.getenv("PATH"),";") - -if onwindows then - - function dir.mkdirs(...) - local str, pth = "", "" - for i=1,select("#",...) do - local s = select(i,...) - if s == "" then - -- skip - elseif str == "" then - str = s - else - str = str .. "/" .. s - end - end - local first, middle, last - local drive = false - first, middle, last = match(str,"^(//)(//*)(.*)$") - if first then - -- empty network path == local path - else - first, last = match(str,"^(//)/*(.-)$") - if first then - middle, last = match(str,"([^/]+)/+(.-)$") - if middle then - pth = "//" .. middle - else - pth = "//" .. last - last = "" - end - else - first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$") - if first then - pth, drive = first .. middle, true - else - middle, last = match(str,"^(/*)(.-)$") - if not middle then - last = str - end - end - end - end - for s in gmatch(last,"[^/]+") do - if pth == "" then - pth = s - elseif drive then - pth, drive = pth .. s, false - else - pth = pth .. "/" .. s - end - if make_indeed and not isdir(pth) then - lfs.mkdir(pth) - end - end - return pth, (isdir(pth) == true) - end - - --~ print(dir.mkdirs("","","a","c")) - --~ print(dir.mkdirs("a")) - --~ print(dir.mkdirs("a:")) - --~ print(dir.mkdirs("a:/b/c")) - --~ print(dir.mkdirs("a:b/c")) - --~ print(dir.mkdirs("a:/bbb/c")) - --~ print(dir.mkdirs("/a/b/c")) - --~ print(dir.mkdirs("/aaa/b/c")) - --~ print(dir.mkdirs("//a/b/c")) - --~ print(dir.mkdirs("///a/b/c")) - --~ print(dir.mkdirs("a/bbb//ccc/")) - -else - - function dir.mkdirs(...) - local str, pth = "", "" - for i=1,select("#",...) do - local s = select(i,...) - if s and s ~= "" then -- we catch nil and false - if str ~= "" then - str = str .. "/" .. s - else - str = s - end - end - end - str = gsub(str,"/+","/") - if find(str,"^/") then - pth = "/" - for s in gmatch(str,"[^/]+") do - local first = (pth == "/") - if first then - pth = pth .. s - else - pth = pth .. "/" .. s - end - if make_indeed and not first and not isdir(pth) then - lfs.mkdir(pth) - end - end - else - pth = "." - for s in gmatch(str,"[^/]+") do - pth = pth .. "/" .. s - if make_indeed and not isdir(pth) then - lfs.mkdir(pth) - end - end - end - return pth, (isdir(pth) == true) - end - - --~ print(dir.mkdirs("","","a","c")) - --~ print(dir.mkdirs("a")) - --~ print(dir.mkdirs("/a/b/c")) - --~ print(dir.mkdirs("/aaa/b/c")) - --~ print(dir.mkdirs("//a/b/c")) - --~ print(dir.mkdirs("///a/b/c")) - --~ print(dir.mkdirs("a/bbb//ccc/")) - -end - -dir.makedirs = dir.mkdirs - --- we can only define it here as it uses dir.current - -if onwindows then - - function dir.expandname(str) -- will be merged with cleanpath and collapsepath - local first, nothing, last = match(str,"^(//)(//*)(.*)$") - if first then - first = dir.current() .. "/" -- dir.current sanitizes - end - if not first then - first, last = match(str,"^(//)/*(.*)$") - end - if not first then - first, last = match(str,"^([a-zA-Z]:)(.*)$") - if first and not find(last,"^/") then - local d = currentdir() - if chdir(first) then - first = dir.current() - end - chdir(d) - end - end - if not first then - first, last = dir.current(), str - end - last = gsub(last,"//","/") - last = gsub(last,"/%./","/") - last = gsub(last,"^/*","") - first = gsub(first,"/*$","") - if last == "" or last == "." then - return first - else - return first .. "/" .. last - end - end - -else - - function dir.expandname(str) -- will be merged with cleanpath and collapsepath - if not find(str,"^/") then - str = currentdir() .. "/" .. str - end - str = gsub(str,"//","/") - str = gsub(str,"/%./","/") - str = gsub(str,"(.)/%.$","%1") - return str - end - -end - -file.expandname = dir.expandname -- for convenience - -local stack = { } - -function dir.push(newdir) - insert(stack,currentdir()) - if newdir and newdir ~= "" then - chdir(newdir) - end -end - -function dir.pop() - local d = remove(stack) - if d then - chdir(d) - end - return d -end - -local function found(...) -- can have nil entries - for i=1,select("#",...) do - local path = select(i,...) - local kind = type(path) - if kind == "string" then - if isdir(path) then - return path - end - elseif kind == "table" then - -- here we asume no holes, i.e. an indexed table - local path = found(unpack(path)) - if path then - return path - end - end - end - -- return nil -- if we want print("crappath") to show something -end - -dir.found = found +if not modules then modules = { } end modules ['l-dir'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- dir.expandname will be merged with cleanpath and collapsepath + +local type, select = type, select +local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local concat, insert, remove, unpack = table.concat, table.insert, table.remove, table.unpack +local lpegmatch = lpeg.match + +local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V + +dir = dir or { } +local dir = dir +local lfs = lfs + +local attributes = lfs.attributes +local walkdir = lfs.dir +local isdir = lfs.isdir +local isfile = lfs.isfile +local currentdir = lfs.currentdir +local chdir = lfs.chdir + +-- in case we load outside luatex + +if not isdir then + function isdir(name) + local a = attributes(name) + return a and a.mode == "directory" + end + lfs.isdir = isdir +end + +if not isfile then + function isfile(name) + local a = attributes(name) + return a and a.mode == "file" + end + lfs.isfile = isfile +end + +-- handy + +function dir.current() + return (gsub(currentdir(),"\\","/")) +end + +-- optimizing for no find (*) does not save time + +--~ local function globpattern(path,patt,recurse,action) -- fails in recent luatex due to some change in lfs +--~ local ok, scanner +--~ if path == "/" then +--~ ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe +--~ else +--~ ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe +--~ end +--~ if ok and type(scanner) == "function" then +--~ if not find(path,"/$") then path = path .. '/' end +--~ for name in scanner do +--~ local full = path .. name +--~ local mode = attributes(full,'mode') +--~ if mode == 'file' then +--~ if find(full,patt) then +--~ action(full) +--~ end +--~ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then +--~ globpattern(full,patt,recurse,action) +--~ end +--~ end +--~ end +--~ end + +local lfsisdir = isdir + +local function isdir(path) + path = gsub(path,"[/\\]+$","") + return lfsisdir(path) +end + +lfs.isdir = isdir + +local function globpattern(path,patt,recurse,action) + if path == "/" then + path = path .. "." + elseif not find(path,"/$") then + path = path .. '/' + end + if isdir(path) then -- lfs.isdir does not like trailing / + for name in walkdir(path) do -- lfs.dir accepts trailing / + local full = path .. name + local mode = attributes(full,'mode') + if mode == 'file' then + if find(full,patt) then + action(full) + end + elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then + globpattern(full,patt,recurse,action) + end + end + end +end + +dir.globpattern = globpattern + +local function collectpattern(path,patt,recurse,result) + local ok, scanner + result = result or { } + if path == "/" then + ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe + else + ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe + end + if ok and type(scanner) == "function" then + if not find(path,"/$") then path = path .. '/' end + for name in scanner, first do + local full = path .. name + local attr = attributes(full) + local mode = attr.mode + if mode == 'file' then + if find(full,patt) then + result[name] = attr + end + elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then + attr.list = collectpattern(full,patt,recurse) + result[name] = attr + end + end + end + return result +end + +dir.collectpattern = collectpattern + +local pattern = Ct { + [1] = (C(P(".") + P("/")^1) + C(R("az","AZ") * P(":") * P("/")^0) + Cc("./")) * V(2) * V(3), + [2] = C(((1-S("*?/"))^0 * P("/"))^0), + [3] = C(P(1)^0) +} + +local filter = Cs ( ( + P("**") / ".*" + + P("*") / "[^/]*" + + P("?") / "[^/]" + + P(".") / "%%." + + P("+") / "%%+" + + P("-") / "%%-" + + P(1) +)^0 ) + +local function glob(str,t) + if type(t) == "function" then + if type(str) == "table" then + for s=1,#str do + glob(str[s],t) + end + elseif isfile(str) then + t(str) + else + local split = lpegmatch(pattern,str) -- we could use the file splitter + if split then + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + globpattern(start,result,recurse,t) + end + end + else + if type(str) == "table" then + local t = t or { } + for s=1,#str do + glob(str[s],t) + end + return t + elseif isfile(str) then + if t then + t[#t+1] = str + return t + else + return { str } + end + else + local split = lpegmatch(pattern,str) -- we could use the file splitter + if split then + local t = t or { } + local action = action or function(name) t[#t+1] = name end + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + globpattern(start,result,recurse,action) + return t + else + return { } + end + end + end +end + +dir.glob = glob + +--~ list = dir.glob("**/*.tif") +--~ list = dir.glob("/**/*.tif") +--~ list = dir.glob("./**/*.tif") +--~ list = dir.glob("oeps/**/*.tif") +--~ list = dir.glob("/oeps/**/*.tif") + +local function globfiles(path,recurse,func,files) -- func == pattern or function + if type(func) == "string" then + local s = func + func = function(name) return find(name,s) end + end + files = files or { } + local noffiles = #files + for name in walkdir(path) do + if find(name,"^%.") then + --- skip + else + local mode = attributes(name,'mode') + if mode == "directory" then + if recurse then + globfiles(path .. "/" .. name,recurse,func,files) + end + elseif mode == "file" then + if not func or func(name) then + noffiles = noffiles + 1 + files[noffiles] = path .. "/" .. name + end + end + end + end + return files +end + +dir.globfiles = globfiles + +-- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex") +-- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex") +-- t = dir.glob("c:/data/develop/context/texmf/**/*.tex") +-- t = dir.glob("f:/minimal/tex/**/*") +-- print(dir.ls("f:/minimal/tex/**/*")) +-- print(dir.ls("*.tex")) + +function dir.ls(pattern) + return concat(glob(pattern),"\n") +end + +--~ mkdirs("temp") +--~ mkdirs("a/b/c") +--~ mkdirs(".","/a/b/c") +--~ mkdirs("a","b","c") + +local make_indeed = true -- false + +local onwindows = os.type == "windows" or find(os.getenv("PATH"),";") + +if onwindows then + + function dir.mkdirs(...) + local str, pth = "", "" + for i=1,select("#",...) do + local s = select(i,...) + if s == "" then + -- skip + elseif str == "" then + str = s + else + str = str .. "/" .. s + end + end + local first, middle, last + local drive = false + first, middle, last = match(str,"^(//)(//*)(.*)$") + if first then + -- empty network path == local path + else + first, last = match(str,"^(//)/*(.-)$") + if first then + middle, last = match(str,"([^/]+)/+(.-)$") + if middle then + pth = "//" .. middle + else + pth = "//" .. last + last = "" + end + else + first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$") + if first then + pth, drive = first .. middle, true + else + middle, last = match(str,"^(/*)(.-)$") + if not middle then + last = str + end + end + end + end + for s in gmatch(last,"[^/]+") do + if pth == "" then + pth = s + elseif drive then + pth, drive = pth .. s, false + else + pth = pth .. "/" .. s + end + if make_indeed and not isdir(pth) then + lfs.mkdir(pth) + end + end + return pth, (isdir(pth) == true) + end + + --~ print(dir.mkdirs("","","a","c")) + --~ print(dir.mkdirs("a")) + --~ print(dir.mkdirs("a:")) + --~ print(dir.mkdirs("a:/b/c")) + --~ print(dir.mkdirs("a:b/c")) + --~ print(dir.mkdirs("a:/bbb/c")) + --~ print(dir.mkdirs("/a/b/c")) + --~ print(dir.mkdirs("/aaa/b/c")) + --~ print(dir.mkdirs("//a/b/c")) + --~ print(dir.mkdirs("///a/b/c")) + --~ print(dir.mkdirs("a/bbb//ccc/")) + +else + + function dir.mkdirs(...) + local str, pth = "", "" + for i=1,select("#",...) do + local s = select(i,...) + if s and s ~= "" then -- we catch nil and false + if str ~= "" then + str = str .. "/" .. s + else + str = s + end + end + end + str = gsub(str,"/+","/") + if find(str,"^/") then + pth = "/" + for s in gmatch(str,"[^/]+") do + local first = (pth == "/") + if first then + pth = pth .. s + else + pth = pth .. "/" .. s + end + if make_indeed and not first and not isdir(pth) then + lfs.mkdir(pth) + end + end + else + pth = "." + for s in gmatch(str,"[^/]+") do + pth = pth .. "/" .. s + if make_indeed and not isdir(pth) then + lfs.mkdir(pth) + end + end + end + return pth, (isdir(pth) == true) + end + + --~ print(dir.mkdirs("","","a","c")) + --~ print(dir.mkdirs("a")) + --~ print(dir.mkdirs("/a/b/c")) + --~ print(dir.mkdirs("/aaa/b/c")) + --~ print(dir.mkdirs("//a/b/c")) + --~ print(dir.mkdirs("///a/b/c")) + --~ print(dir.mkdirs("a/bbb//ccc/")) + +end + +dir.makedirs = dir.mkdirs + +-- we can only define it here as it uses dir.current + +if onwindows then + + function dir.expandname(str) -- will be merged with cleanpath and collapsepath + local first, nothing, last = match(str,"^(//)(//*)(.*)$") + if first then + first = dir.current() .. "/" -- dir.current sanitizes + end + if not first then + first, last = match(str,"^(//)/*(.*)$") + end + if not first then + first, last = match(str,"^([a-zA-Z]:)(.*)$") + if first and not find(last,"^/") then + local d = currentdir() + if chdir(first) then + first = dir.current() + end + chdir(d) + end + end + if not first then + first, last = dir.current(), str + end + last = gsub(last,"//","/") + last = gsub(last,"/%./","/") + last = gsub(last,"^/*","") + first = gsub(first,"/*$","") + if last == "" or last == "." then + return first + else + return first .. "/" .. last + end + end + +else + + function dir.expandname(str) -- will be merged with cleanpath and collapsepath + if not find(str,"^/") then + str = currentdir() .. "/" .. str + end + str = gsub(str,"//","/") + str = gsub(str,"/%./","/") + str = gsub(str,"(.)/%.$","%1") + return str + end + +end + +file.expandname = dir.expandname -- for convenience + +local stack = { } + +function dir.push(newdir) + insert(stack,currentdir()) + if newdir and newdir ~= "" then + chdir(newdir) + end +end + +function dir.pop() + local d = remove(stack) + if d then + chdir(d) + end + return d +end + +local function found(...) -- can have nil entries + for i=1,select("#",...) do + local path = select(i,...) + local kind = type(path) + if kind == "string" then + if isdir(path) then + return path + end + elseif kind == "table" then + -- here we asume no holes, i.e. an indexed table + local path = found(unpack(path)) + if path then + return path + end + end + end + -- return nil -- if we want print("crappath") to show something +end + +dir.found = found diff --git a/tex/context/base/l-file.lua b/tex/context/base/l-file.lua index f25490749..2e47a3d1f 100644 --- a/tex/context/base/l-file.lua +++ b/tex/context/base/l-file.lua @@ -1,590 +1,590 @@ -if not modules then modules = { } end modules ['l-file'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- needs a cleanup - -file = file or { } -local file = file - -if not lfs then - lfs = optionalrequire("lfs") -end - -if not lfs then - - lfs = { - getcurrentdir = function() - return "." - end, - attributes = function() - return nil - end, - isfile = function(name) - local f = io.open(name,'rb') - if f then - f:close() - return true - end - end, - isdir = function(name) - print("you need to load lfs") - return false - end - } - -elseif not lfs.isfile then - - local attributes = lfs.attributes - - function lfs.isdir(name) - return attributes(name,"mode") == "directory" - end - - function lfs.isfile(name) - return attributes(name,"mode") == "file" - end - - -- function lfs.isdir(name) - -- local a = attributes(name) - -- return a and a.mode == "directory" - -- end - - -- function lfs.isfile(name) - -- local a = attributes(name) - -- return a and a.mode == "file" - -- end - -end - -local insert, concat = table.insert, table.concat -local match, find, gmatch = string.match, string.find, string.gmatch -local lpegmatch = lpeg.match -local getcurrentdir, attributes = lfs.currentdir, lfs.attributes -local checkedsplit = string.checkedsplit - --- local patterns = file.patterns or { } --- file.patterns = patterns - -local P, R, S, C, Cs, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc, lpeg.Ct - -local colon = P(":") -local period = P(".") -local periods = P("..") -local fwslash = P("/") -local bwslash = P("\\") -local slashes = S("\\/") -local noperiod = 1-period -local noslashes = 1-slashes -local name = noperiod^1 -local suffix = period/"" * (1-period-slashes)^1 * -1 - ------ pattern = C((noslashes^0 * slashes^1)^1) -local pattern = C((1 - (slashes^1 * noslashes^1 * -1))^1) * P(1) -- there must be a more efficient way - -local function pathpart(name,default) - return name and lpegmatch(pattern,name) or default or "" -end - -local pattern = (noslashes^0 * slashes)^1 * C(noslashes^1) * -1 - -local function basename(name) - return name and lpegmatch(pattern,name) or name -end - --- print(pathpart("file")) --- print(pathpart("dir/file")) --- print(pathpart("/dir/file")) --- print(basename("file")) --- print(basename("dir/file")) --- print(basename("/dir/file")) - -local pattern = (noslashes^0 * slashes^1)^0 * Cs((1-suffix)^1) * suffix^0 - -local function nameonly(name) - return name and lpegmatch(pattern,name) or name -end - -local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * C(noperiod^1) * -1 - -local function suffixonly(name) - return name and lpegmatch(pattern,name) or "" -end - -file.pathpart = pathpart -file.basename = basename -file.nameonly = nameonly -file.suffixonly = suffixonly -file.suffix = suffixonly - -file.dirname = pathpart -- obsolete -file.extname = suffixonly -- obsolete - --- actually these are schemes - -local drive = C(R("az","AZ")) * colon -local path = C((noslashes^0 * slashes)^0) -local suffix = period * C(P(1-period)^0 * P(-1)) -local base = C((1-suffix)^0) -local rest = C(P(1)^0) - -drive = drive + Cc("") -path = path + Cc("") -base = base + Cc("") -suffix = suffix + Cc("") - -local pattern_a = drive * path * base * suffix -local pattern_b = path * base * suffix -local pattern_c = C(drive * path) * C(base * suffix) -- trick: two extra captures -local pattern_d = path * rest - -function file.splitname(str,splitdrive) - if not str then - -- error - elseif splitdrive then - return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix - else - return lpegmatch(pattern_b,str) -- returns path, base, suffix - end -end - -function file.splitbase(str) - if str then - return lpegmatch(pattern_d,str) -- returns path, base+suffix (path has / appended, might change at some point) - else - return "", str -- assume no path - end -end - ----- stripslash = C((1 - P("/")^1*P(-1))^0) - -function file.nametotable(str,splitdrive) - if str then - local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str) - -- if path ~= "" then - -- path = lpegmatch(stripslash,path) -- unfortunate hack, maybe this becomes default - -- end - if splitdrive then - return { - path = path, - drive = drive, - subpath = subpath, - name = name, - base = base, - suffix = suffix, - } - else - return { - path = path, - name = name, - base = base, - suffix = suffix, - } - end - end -end - --- print(file.splitname("file")) --- print(file.splitname("dir/file")) --- print(file.splitname("/dir/file")) --- print(file.splitname("file")) --- print(file.splitname("dir/file")) --- print(file.splitname("/dir/file")) - --- inspect(file.nametotable("file.ext")) --- inspect(file.nametotable("dir/file.ext")) --- inspect(file.nametotable("/dir/file.ext")) --- inspect(file.nametotable("file.ext")) --- inspect(file.nametotable("dir/file.ext")) --- inspect(file.nametotable("/dir/file.ext")) - ------ pattern = Cs(((period * noperiod^1 * -1) / "" + 1)^1) -local pattern = Cs(((period * (1-period-slashes)^1 * -1) / "" + 1)^1) - -function file.removesuffix(name) - return name and lpegmatch(pattern,name) -end - --- local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * Cp() * noperiod^1 * -1 --- --- function file.addsuffix(name, suffix) --- local p = lpegmatch(pattern,name) --- if p then --- return name --- else --- return name .. "." .. suffix --- end --- end - -local suffix = period/"" * (1-period-slashes)^1 * -1 -local pattern = Cs((noslashes^0 * slashes^1)^0 * ((1-suffix)^1)) * Cs(suffix) - -function file.addsuffix(filename,suffix,criterium) - if not filename or not suffix or suffix == "" then - return filename - elseif criterium == true then - return filename .. "." .. suffix - elseif not criterium then - local n, s = lpegmatch(pattern,filename) - if not s or s == "" then - return filename .. "." .. suffix - else - return filename - end - else - local n, s = lpegmatch(pattern,filename) - if s and s ~= "" then - local t = type(criterium) - if t == "table" then - -- keep if in criterium - for i=1,#criterium do - if s == criterium[i] then - return filename - end - end - elseif t == "string" then - -- keep if criterium - if s == criterium then - return filename - end - end - end - return (n or filename) .. "." .. suffix - end -end - --- print("1 " .. file.addsuffix("name","new") .. " -> name.new") --- print("2 " .. file.addsuffix("name.old","new") .. " -> name.old") --- print("3 " .. file.addsuffix("name.old","new",true) .. " -> name.old.new") --- print("4 " .. file.addsuffix("name.old","new","new") .. " -> name.new") --- print("5 " .. file.addsuffix("name.old","new","old") .. " -> name.old") --- print("6 " .. file.addsuffix("name.old","new","foo") .. " -> name.new") --- print("7 " .. file.addsuffix("name.old","new",{"foo","bar"}) .. " -> name.new") --- print("8 " .. file.addsuffix("name.old","new",{"old","bar"}) .. " -> name.old") - -local suffix = period * (1-period-slashes)^1 * -1 -local pattern = Cs((1-suffix)^0) - -function file.replacesuffix(name,suffix) - if name and suffix and suffix ~= "" then - return lpegmatch(pattern,name) .. "." .. suffix - else - return name - end -end - --- - -local reslasher = lpeg.replacer(P("\\"),"/") - -function file.reslash(str) - return str and lpegmatch(reslasher,str) -end - --- We should be able to use: --- --- local writable = P(1) * P("w") * Cc(true) --- --- function file.is_writable(name) --- local a = attributes(name) or attributes(pathpart(name,".")) --- return a and lpegmatch(writable,a.permissions) or false --- end --- --- But after some testing Taco and I came up with the more robust --- variant: - -function file.is_writable(name) - if not name then - -- error - elseif lfs.isdir(name) then - name = name .. "/m_t_x_t_e_s_t.tmp" - local f = io.open(name,"wb") - if f then - f:close() - os.remove(name) - return true - end - elseif lfs.isfile(name) then - local f = io.open(name,"ab") - if f then - f:close() - return true - end - else - local f = io.open(name,"ab") - if f then - f:close() - os.remove(name) - return true - end - end - return false -end - -local readable = P("r") * Cc(true) - -function file.is_readable(name) - if name then - local a = attributes(name) - return a and lpegmatch(readable,a.permissions) or false - else - return false - end -end - -file.isreadable = file.is_readable -- depricated -file.iswritable = file.is_writable -- depricated - -function file.size(name) - if name then - local a = attributes(name) - return a and a.size or 0 - else - return 0 - end -end - -function file.splitpath(str,separator) -- string .. reslash is a bonus (we could do a direct split) - return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator) -end - -function file.joinpath(tab,separator) -- table - return tab and concat(tab,separator or io.pathseparator) -- can have trailing // -end - -local stripper = Cs(P(fwslash)^0/"" * reslasher) -local isnetwork = fwslash * fwslash * (1-fwslash) + (1-fwslash-colon)^1 * colon -local isroot = fwslash^1 * -1 -local hasroot = fwslash^1 - -local deslasher = lpeg.replacer(S("\\/")^1,"/") - --- If we have a network or prefix then there is a change that we end up with two --- // in the middle ... we could prevent this if we (1) expand prefixes: and (2) --- split and rebuild as url. Of course we could assume no network paths (which --- makes sense) adn assume either mapped drives (windows) or mounts (unix) but --- then we still have to deal with urls ... anyhow, multiple // are never a real --- problem but just ugly. - -function file.join(...) - local lst = { ... } - local one = lst[1] - if lpegmatch(isnetwork,one) then - local two = lpegmatch(deslasher,concat(lst,"/",2)) - return one .. "/" .. two - elseif lpegmatch(isroot,one) then - local two = lpegmatch(deslasher,concat(lst,"/",2)) - if lpegmatch(hasroot,two) then - return two - else - return "/" .. two - end - elseif one == "" then - return lpegmatch(stripper,concat(lst,"/",2)) - else - return lpegmatch(deslasher,concat(lst,"/")) - end -end - --- print(file.join("c:/whatever","name")) --- print(file.join("//","/y")) --- print(file.join("/","/y")) --- print(file.join("","/y")) --- print(file.join("/x/","/y")) --- print(file.join("x/","/y")) --- print(file.join("http://","/y")) --- print(file.join("http://a","/y")) --- print(file.join("http:///a","/y")) --- print(file.join("//nas-1","/y")) - --- The previous one fails on "a.b/c" so Taco came up with a split based --- variant. After some skyping we got it sort of compatible with the old --- one. After that the anchoring to currentdir was added in a better way. --- Of course there are some optimizations too. Finally we had to deal with --- windows drive prefixes and things like sys://. Eventually gsubs and --- finds were replaced by lpegs. - -local drivespec = R("az","AZ")^1 * colon -local anchors = fwslash + drivespec -local untouched = periods + (1-period)^1 * P(-1) -local splitstarter = (Cs(drivespec * (bwslash/"/" + fwslash)^0) + Cc(false)) * Ct(lpeg.splitat(S("/\\")^1)) -local absolute = fwslash - -function file.collapsepath(str,anchor) -- anchor: false|nil, true, "." - if not str then - return - end - if anchor == true and not lpegmatch(anchors,str) then - str = getcurrentdir() .. "/" .. str - end - if str == "" or str =="." then - return "." - elseif lpegmatch(untouched,str) then - return lpegmatch(reslasher,str) - end - local starter, oldelements = lpegmatch(splitstarter,str) - local newelements = { } - local i = #oldelements - while i > 0 do - local element = oldelements[i] - if element == '.' then - -- do nothing - elseif element == '..' then - local n = i - 1 - while n > 0 do - local element = oldelements[n] - if element ~= '..' and element ~= '.' then - oldelements[n] = '.' - break - else - n = n - 1 - end - end - if n < 1 then - insert(newelements,1,'..') - end - elseif element ~= "" then - insert(newelements,1,element) - end - i = i - 1 - end - if #newelements == 0 then - return starter or "." - elseif starter then - return starter .. concat(newelements, '/') - elseif lpegmatch(absolute,str) then - return "/" .. concat(newelements,'/') - else - newelements = concat(newelements, '/') - if anchor == "." and find(str,"^%./") then - return "./" .. newelements - else - return newelements - end - end -end - --- local function test(str,...) --- print(string.format("%-20s %-15s %-30s %-20s",str,file.collapsepath(str),file.collapsepath(str,true),file.collapsepath(str,"."))) --- end --- test("a/b.c/d") test("b.c/d") test("b.c/..") --- test("/") test("c:/..") test("sys://..") --- test("") test("./") test(".") test("..") test("./..") test("../..") --- test("a") test("./a") test("/a") test("a/../..") --- test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..") --- test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..") --- test("./a") - -local validchars = R("az","09","AZ","--","..") -local pattern_a = lpeg.replacer(1-validchars) -local pattern_a = Cs((validchars + P(1)/"-")^1) -local whatever = P("-")^0 / "" -local pattern_b = Cs(whatever * (1 - whatever * -1)^1) - -function file.robustname(str,strict) - if str then - str = lpegmatch(pattern_a,str) or str - if strict then - return lpegmatch(pattern_b,str) or str -- two step is cleaner (less backtracking) - else - return str - end - end -end - -file.readdata = io.loaddata -file.savedata = io.savedata - -function file.copy(oldname,newname) - if oldname and newname then - local data = io.loaddata(oldname) - if data and data ~= "" then - file.savedata(newname,data) - end - end -end - --- also rewrite previous - -local letter = R("az","AZ") + S("_-+") -local separator = P("://") - -local qualified = period^0 * fwslash - + letter * colon - + letter^1 * separator - + letter^1 * fwslash -local rootbased = fwslash - + letter * colon - -lpeg.patterns.qualified = qualified -lpeg.patterns.rootbased = rootbased - --- ./name ../name /name c: :// name/name - -function file.is_qualified_path(filename) - return filename and lpegmatch(qualified,filename) ~= nil -end - -function file.is_rootbased_path(filename) - return filename and lpegmatch(rootbased,filename) ~= nil -end - --- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end --- --- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" } --- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" } --- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" } --- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" } - --- -- maybe: --- --- if os.type == "windows" then --- local currentdir = getcurrentdir --- function getcurrentdir() --- return lpegmatch(reslasher,currentdir()) --- end --- end - --- for myself: - -function file.strip(name,dir) - if name then - local b, a = match(name,"^(.-)" .. dir .. "(.*)$") - return a ~= "" and a or name - end -end - --- local debuglist = { --- "pathpart", "basename", "nameonly", "suffixonly", "suffix", "dirname", "extname", --- "addsuffix", "removesuffix", "replacesuffix", "join", --- "strip","collapsepath", "joinpath", "splitpath", --- } - --- for i=1,#debuglist do --- local name = debuglist[i] --- local f = file[name] --- file[name] = function(...) --- print(name,f(...)) --- return f(...) --- end --- end - --- a goodie: a dumb version of mkdirs: - -function lfs.mkdirs(path) - local full - for sub in gmatch(path,"([^\\/]+)") do - if full then - full = full .. "/" .. sub - else - full = sub - end - if not lfs.isdir(full) then - lfs.mkdir(full) - end - end -end +if not modules then modules = { } end modules ['l-file'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- needs a cleanup + +file = file or { } +local file = file + +if not lfs then + lfs = optionalrequire("lfs") +end + +if not lfs then + + lfs = { + getcurrentdir = function() + return "." + end, + attributes = function() + return nil + end, + isfile = function(name) + local f = io.open(name,'rb') + if f then + f:close() + return true + end + end, + isdir = function(name) + print("you need to load lfs") + return false + end + } + +elseif not lfs.isfile then + + local attributes = lfs.attributes + + function lfs.isdir(name) + return attributes(name,"mode") == "directory" + end + + function lfs.isfile(name) + return attributes(name,"mode") == "file" + end + + -- function lfs.isdir(name) + -- local a = attributes(name) + -- return a and a.mode == "directory" + -- end + + -- function lfs.isfile(name) + -- local a = attributes(name) + -- return a and a.mode == "file" + -- end + +end + +local insert, concat = table.insert, table.concat +local match, find, gmatch = string.match, string.find, string.gmatch +local lpegmatch = lpeg.match +local getcurrentdir, attributes = lfs.currentdir, lfs.attributes +local checkedsplit = string.checkedsplit + +-- local patterns = file.patterns or { } +-- file.patterns = patterns + +local P, R, S, C, Cs, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc, lpeg.Ct + +local colon = P(":") +local period = P(".") +local periods = P("..") +local fwslash = P("/") +local bwslash = P("\\") +local slashes = S("\\/") +local noperiod = 1-period +local noslashes = 1-slashes +local name = noperiod^1 +local suffix = period/"" * (1-period-slashes)^1 * -1 + +----- pattern = C((noslashes^0 * slashes^1)^1) +local pattern = C((1 - (slashes^1 * noslashes^1 * -1))^1) * P(1) -- there must be a more efficient way + +local function pathpart(name,default) + return name and lpegmatch(pattern,name) or default or "" +end + +local pattern = (noslashes^0 * slashes)^1 * C(noslashes^1) * -1 + +local function basename(name) + return name and lpegmatch(pattern,name) or name +end + +-- print(pathpart("file")) +-- print(pathpart("dir/file")) +-- print(pathpart("/dir/file")) +-- print(basename("file")) +-- print(basename("dir/file")) +-- print(basename("/dir/file")) + +local pattern = (noslashes^0 * slashes^1)^0 * Cs((1-suffix)^1) * suffix^0 + +local function nameonly(name) + return name and lpegmatch(pattern,name) or name +end + +local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * C(noperiod^1) * -1 + +local function suffixonly(name) + return name and lpegmatch(pattern,name) or "" +end + +file.pathpart = pathpart +file.basename = basename +file.nameonly = nameonly +file.suffixonly = suffixonly +file.suffix = suffixonly + +file.dirname = pathpart -- obsolete +file.extname = suffixonly -- obsolete + +-- actually these are schemes + +local drive = C(R("az","AZ")) * colon +local path = C((noslashes^0 * slashes)^0) +local suffix = period * C(P(1-period)^0 * P(-1)) +local base = C((1-suffix)^0) +local rest = C(P(1)^0) + +drive = drive + Cc("") +path = path + Cc("") +base = base + Cc("") +suffix = suffix + Cc("") + +local pattern_a = drive * path * base * suffix +local pattern_b = path * base * suffix +local pattern_c = C(drive * path) * C(base * suffix) -- trick: two extra captures +local pattern_d = path * rest + +function file.splitname(str,splitdrive) + if not str then + -- error + elseif splitdrive then + return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix + else + return lpegmatch(pattern_b,str) -- returns path, base, suffix + end +end + +function file.splitbase(str) + if str then + return lpegmatch(pattern_d,str) -- returns path, base+suffix (path has / appended, might change at some point) + else + return "", str -- assume no path + end +end + +---- stripslash = C((1 - P("/")^1*P(-1))^0) + +function file.nametotable(str,splitdrive) + if str then + local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str) + -- if path ~= "" then + -- path = lpegmatch(stripslash,path) -- unfortunate hack, maybe this becomes default + -- end + if splitdrive then + return { + path = path, + drive = drive, + subpath = subpath, + name = name, + base = base, + suffix = suffix, + } + else + return { + path = path, + name = name, + base = base, + suffix = suffix, + } + end + end +end + +-- print(file.splitname("file")) +-- print(file.splitname("dir/file")) +-- print(file.splitname("/dir/file")) +-- print(file.splitname("file")) +-- print(file.splitname("dir/file")) +-- print(file.splitname("/dir/file")) + +-- inspect(file.nametotable("file.ext")) +-- inspect(file.nametotable("dir/file.ext")) +-- inspect(file.nametotable("/dir/file.ext")) +-- inspect(file.nametotable("file.ext")) +-- inspect(file.nametotable("dir/file.ext")) +-- inspect(file.nametotable("/dir/file.ext")) + +----- pattern = Cs(((period * noperiod^1 * -1) / "" + 1)^1) +local pattern = Cs(((period * (1-period-slashes)^1 * -1) / "" + 1)^1) + +function file.removesuffix(name) + return name and lpegmatch(pattern,name) +end + +-- local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * Cp() * noperiod^1 * -1 +-- +-- function file.addsuffix(name, suffix) +-- local p = lpegmatch(pattern,name) +-- if p then +-- return name +-- else +-- return name .. "." .. suffix +-- end +-- end + +local suffix = period/"" * (1-period-slashes)^1 * -1 +local pattern = Cs((noslashes^0 * slashes^1)^0 * ((1-suffix)^1)) * Cs(suffix) + +function file.addsuffix(filename,suffix,criterium) + if not filename or not suffix or suffix == "" then + return filename + elseif criterium == true then + return filename .. "." .. suffix + elseif not criterium then + local n, s = lpegmatch(pattern,filename) + if not s or s == "" then + return filename .. "." .. suffix + else + return filename + end + else + local n, s = lpegmatch(pattern,filename) + if s and s ~= "" then + local t = type(criterium) + if t == "table" then + -- keep if in criterium + for i=1,#criterium do + if s == criterium[i] then + return filename + end + end + elseif t == "string" then + -- keep if criterium + if s == criterium then + return filename + end + end + end + return (n or filename) .. "." .. suffix + end +end + +-- print("1 " .. file.addsuffix("name","new") .. " -> name.new") +-- print("2 " .. file.addsuffix("name.old","new") .. " -> name.old") +-- print("3 " .. file.addsuffix("name.old","new",true) .. " -> name.old.new") +-- print("4 " .. file.addsuffix("name.old","new","new") .. " -> name.new") +-- print("5 " .. file.addsuffix("name.old","new","old") .. " -> name.old") +-- print("6 " .. file.addsuffix("name.old","new","foo") .. " -> name.new") +-- print("7 " .. file.addsuffix("name.old","new",{"foo","bar"}) .. " -> name.new") +-- print("8 " .. file.addsuffix("name.old","new",{"old","bar"}) .. " -> name.old") + +local suffix = period * (1-period-slashes)^1 * -1 +local pattern = Cs((1-suffix)^0) + +function file.replacesuffix(name,suffix) + if name and suffix and suffix ~= "" then + return lpegmatch(pattern,name) .. "." .. suffix + else + return name + end +end + +-- + +local reslasher = lpeg.replacer(P("\\"),"/") + +function file.reslash(str) + return str and lpegmatch(reslasher,str) +end + +-- We should be able to use: +-- +-- local writable = P(1) * P("w") * Cc(true) +-- +-- function file.is_writable(name) +-- local a = attributes(name) or attributes(pathpart(name,".")) +-- return a and lpegmatch(writable,a.permissions) or false +-- end +-- +-- But after some testing Taco and I came up with the more robust +-- variant: + +function file.is_writable(name) + if not name then + -- error + elseif lfs.isdir(name) then + name = name .. "/m_t_x_t_e_s_t.tmp" + local f = io.open(name,"wb") + if f then + f:close() + os.remove(name) + return true + end + elseif lfs.isfile(name) then + local f = io.open(name,"ab") + if f then + f:close() + return true + end + else + local f = io.open(name,"ab") + if f then + f:close() + os.remove(name) + return true + end + end + return false +end + +local readable = P("r") * Cc(true) + +function file.is_readable(name) + if name then + local a = attributes(name) + return a and lpegmatch(readable,a.permissions) or false + else + return false + end +end + +file.isreadable = file.is_readable -- depricated +file.iswritable = file.is_writable -- depricated + +function file.size(name) + if name then + local a = attributes(name) + return a and a.size or 0 + else + return 0 + end +end + +function file.splitpath(str,separator) -- string .. reslash is a bonus (we could do a direct split) + return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator) +end + +function file.joinpath(tab,separator) -- table + return tab and concat(tab,separator or io.pathseparator) -- can have trailing // +end + +local stripper = Cs(P(fwslash)^0/"" * reslasher) +local isnetwork = fwslash * fwslash * (1-fwslash) + (1-fwslash-colon)^1 * colon +local isroot = fwslash^1 * -1 +local hasroot = fwslash^1 + +local deslasher = lpeg.replacer(S("\\/")^1,"/") + +-- If we have a network or prefix then there is a change that we end up with two +-- // in the middle ... we could prevent this if we (1) expand prefixes: and (2) +-- split and rebuild as url. Of course we could assume no network paths (which +-- makes sense) adn assume either mapped drives (windows) or mounts (unix) but +-- then we still have to deal with urls ... anyhow, multiple // are never a real +-- problem but just ugly. + +function file.join(...) + local lst = { ... } + local one = lst[1] + if lpegmatch(isnetwork,one) then + local two = lpegmatch(deslasher,concat(lst,"/",2)) + return one .. "/" .. two + elseif lpegmatch(isroot,one) then + local two = lpegmatch(deslasher,concat(lst,"/",2)) + if lpegmatch(hasroot,two) then + return two + else + return "/" .. two + end + elseif one == "" then + return lpegmatch(stripper,concat(lst,"/",2)) + else + return lpegmatch(deslasher,concat(lst,"/")) + end +end + +-- print(file.join("c:/whatever","name")) +-- print(file.join("//","/y")) +-- print(file.join("/","/y")) +-- print(file.join("","/y")) +-- print(file.join("/x/","/y")) +-- print(file.join("x/","/y")) +-- print(file.join("http://","/y")) +-- print(file.join("http://a","/y")) +-- print(file.join("http:///a","/y")) +-- print(file.join("//nas-1","/y")) + +-- The previous one fails on "a.b/c" so Taco came up with a split based +-- variant. After some skyping we got it sort of compatible with the old +-- one. After that the anchoring to currentdir was added in a better way. +-- Of course there are some optimizations too. Finally we had to deal with +-- windows drive prefixes and things like sys://. Eventually gsubs and +-- finds were replaced by lpegs. + +local drivespec = R("az","AZ")^1 * colon +local anchors = fwslash + drivespec +local untouched = periods + (1-period)^1 * P(-1) +local splitstarter = (Cs(drivespec * (bwslash/"/" + fwslash)^0) + Cc(false)) * Ct(lpeg.splitat(S("/\\")^1)) +local absolute = fwslash + +function file.collapsepath(str,anchor) -- anchor: false|nil, true, "." + if not str then + return + end + if anchor == true and not lpegmatch(anchors,str) then + str = getcurrentdir() .. "/" .. str + end + if str == "" or str =="." then + return "." + elseif lpegmatch(untouched,str) then + return lpegmatch(reslasher,str) + end + local starter, oldelements = lpegmatch(splitstarter,str) + local newelements = { } + local i = #oldelements + while i > 0 do + local element = oldelements[i] + if element == '.' then + -- do nothing + elseif element == '..' then + local n = i - 1 + while n > 0 do + local element = oldelements[n] + if element ~= '..' and element ~= '.' then + oldelements[n] = '.' + break + else + n = n - 1 + end + end + if n < 1 then + insert(newelements,1,'..') + end + elseif element ~= "" then + insert(newelements,1,element) + end + i = i - 1 + end + if #newelements == 0 then + return starter or "." + elseif starter then + return starter .. concat(newelements, '/') + elseif lpegmatch(absolute,str) then + return "/" .. concat(newelements,'/') + else + newelements = concat(newelements, '/') + if anchor == "." and find(str,"^%./") then + return "./" .. newelements + else + return newelements + end + end +end + +-- local function test(str,...) +-- print(string.format("%-20s %-15s %-30s %-20s",str,file.collapsepath(str),file.collapsepath(str,true),file.collapsepath(str,"."))) +-- end +-- test("a/b.c/d") test("b.c/d") test("b.c/..") +-- test("/") test("c:/..") test("sys://..") +-- test("") test("./") test(".") test("..") test("./..") test("../..") +-- test("a") test("./a") test("/a") test("a/../..") +-- test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..") +-- test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..") +-- test("./a") + +local validchars = R("az","09","AZ","--","..") +local pattern_a = lpeg.replacer(1-validchars) +local pattern_a = Cs((validchars + P(1)/"-")^1) +local whatever = P("-")^0 / "" +local pattern_b = Cs(whatever * (1 - whatever * -1)^1) + +function file.robustname(str,strict) + if str then + str = lpegmatch(pattern_a,str) or str + if strict then + return lpegmatch(pattern_b,str) or str -- two step is cleaner (less backtracking) + else + return str + end + end +end + +file.readdata = io.loaddata +file.savedata = io.savedata + +function file.copy(oldname,newname) + if oldname and newname then + local data = io.loaddata(oldname) + if data and data ~= "" then + file.savedata(newname,data) + end + end +end + +-- also rewrite previous + +local letter = R("az","AZ") + S("_-+") +local separator = P("://") + +local qualified = period^0 * fwslash + + letter * colon + + letter^1 * separator + + letter^1 * fwslash +local rootbased = fwslash + + letter * colon + +lpeg.patterns.qualified = qualified +lpeg.patterns.rootbased = rootbased + +-- ./name ../name /name c: :// name/name + +function file.is_qualified_path(filename) + return filename and lpegmatch(qualified,filename) ~= nil +end + +function file.is_rootbased_path(filename) + return filename and lpegmatch(rootbased,filename) ~= nil +end + +-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end +-- +-- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" } +-- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" } +-- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" } +-- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" } + +-- -- maybe: +-- +-- if os.type == "windows" then +-- local currentdir = getcurrentdir +-- function getcurrentdir() +-- return lpegmatch(reslasher,currentdir()) +-- end +-- end + +-- for myself: + +function file.strip(name,dir) + if name then + local b, a = match(name,"^(.-)" .. dir .. "(.*)$") + return a ~= "" and a or name + end +end + +-- local debuglist = { +-- "pathpart", "basename", "nameonly", "suffixonly", "suffix", "dirname", "extname", +-- "addsuffix", "removesuffix", "replacesuffix", "join", +-- "strip","collapsepath", "joinpath", "splitpath", +-- } + +-- for i=1,#debuglist do +-- local name = debuglist[i] +-- local f = file[name] +-- file[name] = function(...) +-- print(name,f(...)) +-- return f(...) +-- end +-- end + +-- a goodie: a dumb version of mkdirs: + +function lfs.mkdirs(path) + local full + for sub in gmatch(path,"([^\\/]+)") do + if full then + full = full .. "/" .. sub + else + full = sub + end + if not lfs.isdir(full) then + lfs.mkdir(full) + end + end +end diff --git a/tex/context/base/l-function.lua b/tex/context/base/l-function.lua index 7ded8ceec..cdb1d3def 100644 --- a/tex/context/base/l-function.lua +++ b/tex/context/base/l-function.lua @@ -1,11 +1,11 @@ -if not modules then modules = { } end modules ['l-functions'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -functions = functions or { } - -function functions.dummy() end +if not modules then modules = { } end modules ['l-functions'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +functions = functions or { } + +function functions.dummy() end diff --git a/tex/context/base/l-io.lua b/tex/context/base/l-io.lua index 06e1fb5ef..2ddfacaee 100644 --- a/tex/context/base/l-io.lua +++ b/tex/context/base/l-io.lua @@ -1,362 +1,362 @@ -if not modules then modules = { } end modules ['l-io'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local io = io -local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format -local concat = table.concat -local floor = math.floor -local type = type - -if string.find(os.getenv("PATH"),";") then - io.fileseparator, io.pathseparator = "\\", ";" -else - io.fileseparator, io.pathseparator = "/" , ":" -end - -local function readall(f) - return f:read("*all") -end - --- The next one is upto 50% faster on large files and less memory consumption due --- to less intermediate large allocations. This phenomena was discussed on the --- luatex dev list. - -local function readall(f) - local size = f:seek("end") - if size == 0 then - return "" - elseif size < 1024*1024 then - f:seek("set",0) - return f:read('*all') - else - local done = f:seek("set",0) - if size < 1024*1024 then - step = 1024 * 1024 - elseif size > 16*1024*1024 then - step = 16*1024*1024 - else - step = floor(size/(1024*1024)) * 1024 * 1024 / 8 - end - local data = { } - while true do - local r = f:read(step) - if not r then - return concat(data) - else - data[#data+1] = r - end - end - end -end - -io.readall = readall - -function io.loaddata(filename,textmode) -- return nil if empty - local f = io.open(filename,(textmode and 'r') or 'rb') - if f then --- local data = f:read('*all') - local data = readall(f) - f:close() - if #data > 0 then - return data - end - end -end - -function io.savedata(filename,data,joiner) - local f = io.open(filename,"wb") - if f then - if type(data) == "table" then - f:write(concat(data,joiner or "")) - elseif type(data) == "function" then - data(f) - else - f:write(data or "") - end - f:close() - io.flush() - return true - else - return false - end -end - --- we can also chunk this one if needed: io.lines(filename,chunksize,"*l") - -function io.loadlines(filename,n) -- return nil if empty - local f = io.open(filename,'r') - if not f then - -- no file - elseif n then - local lines = { } - for i=1,n do - local line = f:read("*lines") - if line then - lines[#lines+1] = line - else - break - end - end - f:close() - lines = concat(lines,"\n") - if #lines > 0 then - return lines - end - else - local line = f:read("*line") or "" - f:close() - if #line > 0 then - return line - end - end -end - -function io.loadchunk(filename,n) - local f = io.open(filename,'rb') - if f then - local data = f:read(n or 1024) - f:close() - if #data > 0 then - return data - end - end -end - -function io.exists(filename) - local f = io.open(filename) - if f == nil then - return false - else - f:close() - return true - end -end - -function io.size(filename) - local f = io.open(filename) - if f == nil then - return 0 - else - local s = f:seek("end") - f:close() - return s - end -end - -function io.noflines(f) - if type(f) == "string" then - local f = io.open(filename) - if f then - local n = f and io.noflines(f) or 0 - f:close() - return n - else - return 0 - end - else - local n = 0 - for _ in f:lines() do - n = n + 1 - end - f:seek('set',0) - return n - end -end - -local nextchar = { - [ 4] = function(f) - return f:read(1,1,1,1) - end, - [ 2] = function(f) - return f:read(1,1) - end, - [ 1] = function(f) - return f:read(1) - end, - [-2] = function(f) - local a, b = f:read(1,1) - return b, a - end, - [-4] = function(f) - local a, b, c, d = f:read(1,1,1,1) - return d, c, b, a - end -} - -function io.characters(f,n) - if f then - return nextchar[n or 1], f - end -end - -local nextbyte = { - [4] = function(f) - local a, b, c, d = f:read(1,1,1,1) - if d then - return byte(a), byte(b), byte(c), byte(d) - end - end, - [3] = function(f) - local a, b, c = f:read(1,1,1) - if b then - return byte(a), byte(b), byte(c) - end - end, - [2] = function(f) - local a, b = f:read(1,1) - if b then - return byte(a), byte(b) - end - end, - [1] = function (f) - local a = f:read(1) - if a then - return byte(a) - end - end, - [-2] = function (f) - local a, b = f:read(1,1) - if b then - return byte(b), byte(a) - end - end, - [-3] = function(f) - local a, b, c = f:read(1,1,1) - if b then - return byte(c), byte(b), byte(a) - end - end, - [-4] = function(f) - local a, b, c, d = f:read(1,1,1,1) - if d then - return byte(d), byte(c), byte(b), byte(a) - end - end -} - -function io.bytes(f,n) - if f then - return nextbyte[n or 1], f - else - return nil, nil - end -end - -function io.ask(question,default,options) - while true do - io.write(question) - if options then - io.write(format(" [%s]",concat(options,"|"))) - end - if default then - io.write(format(" [%s]",default)) - end - io.write(format(" ")) - io.flush() - local answer = io.read() - answer = gsub(answer,"^%s*(.*)%s*$","%1") - if answer == "" and default then - return default - elseif not options then - return answer - else - for k=1,#options do - if options[k] == answer then - return answer - end - end - local pattern = "^" .. answer - for k=1,#options do - local v = options[k] - if find(v,pattern) then - return v - end - end - end - end -end - -local function readnumber(f,n,m) - if m then - f:seek("set",n) - n = m - end - if n == 1 then - return byte(f:read(1)) - elseif n == 2 then - local a, b = byte(f:read(2),1,2) - return 256 * a + b - elseif n == 3 then - local a, b, c = byte(f:read(3),1,3) - return 256*256 * a + 256 * b + c - elseif n == 4 then - local a, b, c, d = byte(f:read(4),1,4) - return 256*256*256 * a + 256*256 * b + 256 * c + d - elseif n == 8 then - local a, b = readnumber(f,4), readnumber(f,4) - return 256 * a + b - elseif n == 12 then - local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4) - return 256*256 * a + 256 * b + c - elseif n == -2 then - local b, a = byte(f:read(2),1,2) - return 256*a + b - elseif n == -3 then - local c, b, a = byte(f:read(3),1,3) - return 256*256 * a + 256 * b + c - elseif n == -4 then - local d, c, b, a = byte(f:read(4),1,4) - return 256*256*256 * a + 256*256 * b + 256*c + d - elseif n == -8 then - local h, g, f, e, d, c, b, a = byte(f:read(8),1,8) - return 256*256*256*256*256*256*256 * a + - 256*256*256*256*256*256 * b + - 256*256*256*256*256 * c + - 256*256*256*256 * d + - 256*256*256 * e + - 256*256 * f + - 256 * g + - h - else - return 0 - end -end - -io.readnumber = readnumber - -function io.readstring(f,n,m) - if m then - f:seek("set",n) - n = m - end - local str = gsub(f:read(n),"\000","") - return str -end - --- - -if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely -if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely - --- This works quite ok: --- --- function io.piped(command,writer) --- local pipe = io.popen(command) --- -- for line in pipe:lines() do --- -- print(line) --- -- end --- while true do --- local line = pipe:read(1) --- if not line then --- break --- elseif line ~= "\n" then --- writer(line) --- end --- end --- return pipe:close() -- ok, status, (error)code --- end +if not modules then modules = { } end modules ['l-io'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local io = io +local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format +local concat = table.concat +local floor = math.floor +local type = type + +if string.find(os.getenv("PATH"),";") then + io.fileseparator, io.pathseparator = "\\", ";" +else + io.fileseparator, io.pathseparator = "/" , ":" +end + +local function readall(f) + return f:read("*all") +end + +-- The next one is upto 50% faster on large files and less memory consumption due +-- to less intermediate large allocations. This phenomena was discussed on the +-- luatex dev list. + +local function readall(f) + local size = f:seek("end") + if size == 0 then + return "" + elseif size < 1024*1024 then + f:seek("set",0) + return f:read('*all') + else + local done = f:seek("set",0) + if size < 1024*1024 then + step = 1024 * 1024 + elseif size > 16*1024*1024 then + step = 16*1024*1024 + else + step = floor(size/(1024*1024)) * 1024 * 1024 / 8 + end + local data = { } + while true do + local r = f:read(step) + if not r then + return concat(data) + else + data[#data+1] = r + end + end + end +end + +io.readall = readall + +function io.loaddata(filename,textmode) -- return nil if empty + local f = io.open(filename,(textmode and 'r') or 'rb') + if f then +-- local data = f:read('*all') + local data = readall(f) + f:close() + if #data > 0 then + return data + end + end +end + +function io.savedata(filename,data,joiner) + local f = io.open(filename,"wb") + if f then + if type(data) == "table" then + f:write(concat(data,joiner or "")) + elseif type(data) == "function" then + data(f) + else + f:write(data or "") + end + f:close() + io.flush() + return true + else + return false + end +end + +-- we can also chunk this one if needed: io.lines(filename,chunksize,"*l") + +function io.loadlines(filename,n) -- return nil if empty + local f = io.open(filename,'r') + if not f then + -- no file + elseif n then + local lines = { } + for i=1,n do + local line = f:read("*lines") + if line then + lines[#lines+1] = line + else + break + end + end + f:close() + lines = concat(lines,"\n") + if #lines > 0 then + return lines + end + else + local line = f:read("*line") or "" + f:close() + if #line > 0 then + return line + end + end +end + +function io.loadchunk(filename,n) + local f = io.open(filename,'rb') + if f then + local data = f:read(n or 1024) + f:close() + if #data > 0 then + return data + end + end +end + +function io.exists(filename) + local f = io.open(filename) + if f == nil then + return false + else + f:close() + return true + end +end + +function io.size(filename) + local f = io.open(filename) + if f == nil then + return 0 + else + local s = f:seek("end") + f:close() + return s + end +end + +function io.noflines(f) + if type(f) == "string" then + local f = io.open(filename) + if f then + local n = f and io.noflines(f) or 0 + f:close() + return n + else + return 0 + end + else + local n = 0 + for _ in f:lines() do + n = n + 1 + end + f:seek('set',0) + return n + end +end + +local nextchar = { + [ 4] = function(f) + return f:read(1,1,1,1) + end, + [ 2] = function(f) + return f:read(1,1) + end, + [ 1] = function(f) + return f:read(1) + end, + [-2] = function(f) + local a, b = f:read(1,1) + return b, a + end, + [-4] = function(f) + local a, b, c, d = f:read(1,1,1,1) + return d, c, b, a + end +} + +function io.characters(f,n) + if f then + return nextchar[n or 1], f + end +end + +local nextbyte = { + [4] = function(f) + local a, b, c, d = f:read(1,1,1,1) + if d then + return byte(a), byte(b), byte(c), byte(d) + end + end, + [3] = function(f) + local a, b, c = f:read(1,1,1) + if b then + return byte(a), byte(b), byte(c) + end + end, + [2] = function(f) + local a, b = f:read(1,1) + if b then + return byte(a), byte(b) + end + end, + [1] = function (f) + local a = f:read(1) + if a then + return byte(a) + end + end, + [-2] = function (f) + local a, b = f:read(1,1) + if b then + return byte(b), byte(a) + end + end, + [-3] = function(f) + local a, b, c = f:read(1,1,1) + if b then + return byte(c), byte(b), byte(a) + end + end, + [-4] = function(f) + local a, b, c, d = f:read(1,1,1,1) + if d then + return byte(d), byte(c), byte(b), byte(a) + end + end +} + +function io.bytes(f,n) + if f then + return nextbyte[n or 1], f + else + return nil, nil + end +end + +function io.ask(question,default,options) + while true do + io.write(question) + if options then + io.write(format(" [%s]",concat(options,"|"))) + end + if default then + io.write(format(" [%s]",default)) + end + io.write(format(" ")) + io.flush() + local answer = io.read() + answer = gsub(answer,"^%s*(.*)%s*$","%1") + if answer == "" and default then + return default + elseif not options then + return answer + else + for k=1,#options do + if options[k] == answer then + return answer + end + end + local pattern = "^" .. answer + for k=1,#options do + local v = options[k] + if find(v,pattern) then + return v + end + end + end + end +end + +local function readnumber(f,n,m) + if m then + f:seek("set",n) + n = m + end + if n == 1 then + return byte(f:read(1)) + elseif n == 2 then + local a, b = byte(f:read(2),1,2) + return 256 * a + b + elseif n == 3 then + local a, b, c = byte(f:read(3),1,3) + return 256*256 * a + 256 * b + c + elseif n == 4 then + local a, b, c, d = byte(f:read(4),1,4) + return 256*256*256 * a + 256*256 * b + 256 * c + d + elseif n == 8 then + local a, b = readnumber(f,4), readnumber(f,4) + return 256 * a + b + elseif n == 12 then + local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4) + return 256*256 * a + 256 * b + c + elseif n == -2 then + local b, a = byte(f:read(2),1,2) + return 256*a + b + elseif n == -3 then + local c, b, a = byte(f:read(3),1,3) + return 256*256 * a + 256 * b + c + elseif n == -4 then + local d, c, b, a = byte(f:read(4),1,4) + return 256*256*256 * a + 256*256 * b + 256*c + d + elseif n == -8 then + local h, g, f, e, d, c, b, a = byte(f:read(8),1,8) + return 256*256*256*256*256*256*256 * a + + 256*256*256*256*256*256 * b + + 256*256*256*256*256 * c + + 256*256*256*256 * d + + 256*256*256 * e + + 256*256 * f + + 256 * g + + h + else + return 0 + end +end + +io.readnumber = readnumber + +function io.readstring(f,n,m) + if m then + f:seek("set",n) + n = m + end + local str = gsub(f:read(n),"\000","") + return str +end + +-- + +if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely +if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely + +-- This works quite ok: +-- +-- function io.piped(command,writer) +-- local pipe = io.popen(command) +-- -- for line in pipe:lines() do +-- -- print(line) +-- -- end +-- while true do +-- local line = pipe:read(1) +-- if not line then +-- break +-- elseif line ~= "\n" then +-- writer(line) +-- end +-- end +-- return pipe:close() -- ok, status, (error)code +-- end diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua index 323c73b69..07926da86 100644 --- a/tex/context/base/l-lpeg.lua +++ b/tex/context/base/l-lpeg.lua @@ -1,852 +1,852 @@ -if not modules then modules = { } end modules ['l-lpeg'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1) - --- move utf -> l-unicode --- move string -> l-string or keep it here - -lpeg = require("lpeg") - --- tracing (only used when we encounter a problem in integration of lpeg in luatex) - --- some code will move to unicode and string - --- local lpmatch = lpeg.match --- local lpprint = lpeg.print --- local lpp = lpeg.P --- local lpr = lpeg.R --- local lps = lpeg.S --- local lpc = lpeg.C --- local lpb = lpeg.B --- local lpv = lpeg.V --- local lpcf = lpeg.Cf --- local lpcb = lpeg.Cb --- local lpcg = lpeg.Cg --- local lpct = lpeg.Ct --- local lpcs = lpeg.Cs --- local lpcc = lpeg.Cc --- local lpcmt = lpeg.Cmt --- local lpcarg = lpeg.Carg - --- function lpeg.match(l,...) print("LPEG MATCH") lpprint(l) return lpmatch(l,...) end - --- function lpeg.P (l) local p = lpp (l) print("LPEG P =") lpprint(l) return p end --- function lpeg.R (l) local p = lpr (l) print("LPEG R =") lpprint(l) return p end --- function lpeg.S (l) local p = lps (l) print("LPEG S =") lpprint(l) return p end --- function lpeg.C (l) local p = lpc (l) print("LPEG C =") lpprint(l) return p end --- function lpeg.B (l) local p = lpb (l) print("LPEG B =") lpprint(l) return p end --- function lpeg.V (l) local p = lpv (l) print("LPEG V =") lpprint(l) return p end --- function lpeg.Cf (l) local p = lpcf (l) print("LPEG Cf =") lpprint(l) return p end --- function lpeg.Cb (l) local p = lpcb (l) print("LPEG Cb =") lpprint(l) return p end --- function lpeg.Cg (l) local p = lpcg (l) print("LPEG Cg =") lpprint(l) return p end --- function lpeg.Ct (l) local p = lpct (l) print("LPEG Ct =") lpprint(l) return p end --- function lpeg.Cs (l) local p = lpcs (l) print("LPEG Cs =") lpprint(l) return p end --- function lpeg.Cc (l) local p = lpcc (l) print("LPEG Cc =") lpprint(l) return p end --- function lpeg.Cmt (l) local p = lpcmt (l) print("LPEG Cmt =") lpprint(l) return p end --- function lpeg.Carg (l) local p = lpcarg(l) print("LPEG Carg =") lpprint(l) return p end - -local type, next, tostring = type, next, tostring -local byte, char, gmatch, format = string.byte, string.char, string.gmatch, string.format ------ mod, div = math.mod, math.div -local floor = math.floor - -local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt -local lpegtype, lpegmatch, lpegprint = lpeg.type, lpeg.match, lpeg.print - --- let's start with an inspector: - -setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end) - --- Beware, we predefine a bunch of patterns here and one reason for doing so --- is that we get consistent behaviour in some of the visualizers. - -lpeg.patterns = lpeg.patterns or { } -- so that we can share -local patterns = lpeg.patterns - - -local anything = P(1) -local endofstring = P(-1) -local alwaysmatched = P(true) - -patterns.anything = anything -patterns.endofstring = endofstring -patterns.beginofstring = alwaysmatched -patterns.alwaysmatched = alwaysmatched - -local digit, sign = R('09'), S('+-') -local cr, lf, crlf = P("\r"), P("\n"), P("\r\n") -local newline = crlf + S("\r\n") -- cr + lf -local escaped = P("\\") * anything -local squote = P("'") -local dquote = P('"') -local space = P(" ") - -local utfbom_32_be = P('\000\000\254\255') -local utfbom_32_le = P('\255\254\000\000') -local utfbom_16_be = P('\255\254') -local utfbom_16_le = P('\254\255') -local utfbom_8 = P('\239\187\191') -local utfbom = utfbom_32_be + utfbom_32_le - + utfbom_16_be + utfbom_16_le - + utfbom_8 -local utftype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le") - + utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le") - + utfbom_8 * Cc("utf-8") + alwaysmatched * Cc("utf-8") -- assume utf8 -local utfoffset = utfbom_32_be * Cc(4) + utfbom_32_le * Cc(4) - + utfbom_16_be * Cc(2) + utfbom_16_le * Cc(2) - + utfbom_8 * Cc(3) + Cc(0) - -local utf8next = R("\128\191") - -patterns.utf8one = R("\000\127") -patterns.utf8two = R("\194\223") * utf8next -patterns.utf8three = R("\224\239") * utf8next * utf8next -patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next -patterns.utfbom = utfbom -patterns.utftype = utftype -patterns.utfoffset = utfoffset - -local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four -local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false) - -local utf8character = P(1) * R("\128\191")^0 -- unchecked but fast - -patterns.utf8 = utf8char -patterns.utf8char = utf8char -patterns.utf8character = utf8character -- this one can be used in most cases so we might use that one -patterns.validutf8 = validutf8char -patterns.validutf8char = validutf8char - -local eol = S("\n\r") -local spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto) -local whitespace = eol + spacer -local nonspacer = 1 - spacer -local nonwhitespace = 1 - whitespace - -patterns.eol = eol -patterns.spacer = spacer -patterns.whitespace = whitespace -patterns.nonspacer = nonspacer -patterns.nonwhitespace = nonwhitespace - -local stripper = spacer^0 * C((spacer^0 * nonspacer^1)^0) -- from example by roberto - ------ collapser = Cs(spacer^0/"" * ((spacer^1 * P(-1) / "") + (spacer^1/" ") + P(1))^0) -local collapser = Cs(spacer^0/"" * nonspacer^0 * ((spacer^0/" " * nonspacer^1)^0)) - -patterns.stripper = stripper -patterns.collapser = collapser - -patterns.digit = digit -patterns.sign = sign -patterns.cardinal = sign^0 * digit^1 -patterns.integer = sign^0 * digit^1 -patterns.unsigned = digit^0 * P('.') * digit^1 -patterns.float = sign^0 * patterns.unsigned -patterns.cunsigned = digit^0 * P(',') * digit^1 -patterns.cfloat = sign^0 * patterns.cunsigned -patterns.number = patterns.float + patterns.integer -patterns.cnumber = patterns.cfloat + patterns.integer -patterns.oct = P("0") * R("07")^1 -patterns.octal = patterns.oct -patterns.HEX = P("0x") * R("09","AF")^1 -patterns.hex = P("0x") * R("09","af")^1 -patterns.hexadecimal = P("0x") * R("09","AF","af")^1 -patterns.lowercase = R("az") -patterns.uppercase = R("AZ") -patterns.letter = patterns.lowercase + patterns.uppercase -patterns.space = space -patterns.tab = P("\t") -patterns.spaceortab = patterns.space + patterns.tab -patterns.newline = newline -patterns.emptyline = newline^1 -patterns.equal = P("=") -patterns.comma = P(",") -patterns.commaspacer = P(",") * spacer^0 -patterns.period = P(".") -patterns.colon = P(":") -patterns.semicolon = P(";") -patterns.underscore = P("_") -patterns.escaped = escaped -patterns.squote = squote -patterns.dquote = dquote -patterns.nosquote = (escaped + (1-squote))^0 -patterns.nodquote = (escaped + (1-dquote))^0 -patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"") -- will change to C in the middle -patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"") -- will change to C in the middle -patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble -patterns.unspacer = ((patterns.spacer^1)/"")^0 - -patterns.singlequoted = squote * patterns.nosquote * squote -patterns.doublequoted = dquote * patterns.nodquote * dquote -patterns.quoted = patterns.doublequoted + patterns.singlequoted - -patterns.propername = R("AZ","az","__") * R("09","AZ","az", "__")^0 * P(-1) - -patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1 -patterns.beginline = #(1-newline) - -patterns.longtostring = Cs(whitespace^0/"" * nonwhitespace^0 * ((whitespace^0/" " * (patterns.quoted + nonwhitespace)^1)^0)) - -local function anywhere(pattern) --slightly adapted from website - return P { P(pattern) + 1 * V(1) } -end - -lpeg.anywhere = anywhere - -function lpeg.instringchecker(p) - p = anywhere(p) - return function(str) - return lpegmatch(p,str) and true or false - end -end - -function lpeg.splitter(pattern, action) - return (((1-P(pattern))^1)/action+1)^0 -end - -function lpeg.tsplitter(pattern, action) - return Ct((((1-P(pattern))^1)/action+1)^0) -end - --- probleem: separator can be lpeg and that does not hash too well, but --- it's quite okay as the key is then not garbage collected - -local splitters_s, splitters_m, splitters_t = { }, { }, { } - -local function splitat(separator,single) - local splitter = (single and splitters_s[separator]) or splitters_m[separator] - if not splitter then - separator = P(separator) - local other = C((1 - separator)^0) - if single then - local any = anything - splitter = other * (separator * C(any^0) + "") -- ? - splitters_s[separator] = splitter - else - splitter = other * (separator * other)^0 - splitters_m[separator] = splitter - end - end - return splitter -end - -local function tsplitat(separator) - local splitter = splitters_t[separator] - if not splitter then - splitter = Ct(splitat(separator)) - splitters_t[separator] = splitter - end - return splitter -end - -lpeg.splitat = splitat -lpeg.tsplitat = tsplitat - -function string.splitup(str,separator) - if not separator then - separator = "," - end - return lpegmatch(splitters_m[separator] or splitat(separator),str) -end - --- local p = splitat("->",false) print(lpegmatch(p,"oeps->what->more")) -- oeps what more --- local p = splitat("->",true) print(lpegmatch(p,"oeps->what->more")) -- oeps what->more --- local p = splitat("->",false) print(lpegmatch(p,"oeps")) -- oeps --- local p = splitat("->",true) print(lpegmatch(p,"oeps")) -- oeps - -local cache = { } - -function lpeg.split(separator,str) - local c = cache[separator] - if not c then - c = tsplitat(separator) - cache[separator] = c - end - return lpegmatch(c,str) -end - -function string.split(str,separator) - if separator then - local c = cache[separator] - if not c then - c = tsplitat(separator) - cache[separator] = c - end - return lpegmatch(c,str) - else - return { str } - end -end - -local spacing = patterns.spacer^0 * newline -- sort of strip -local empty = spacing * Cc("") -local nonempty = Cs((1-spacing)^1) * spacing^-1 -local content = (empty + nonempty)^1 - -patterns.textline = content - -local linesplitter = tsplitat(newline) - -patterns.linesplitter = linesplitter - -function string.splitlines(str) - return lpegmatch(linesplitter,str) -end - --- lpeg.splitters = cache -- no longer public - -local cache = { } - -function lpeg.checkedsplit(separator,str) - local c = cache[separator] - if not c then - separator = P(separator) - local other = C((1 - separator)^1) - c = Ct(separator^0 * other * (separator^1 * other)^0) - cache[separator] = c - end - return lpegmatch(c,str) -end - -function string.checkedsplit(str,separator) - local c = cache[separator] - if not c then - separator = P(separator) - local other = C((1 - separator)^1) - c = Ct(separator^0 * other * (separator^1 * other)^0) - cache[separator] = c - end - return lpegmatch(c,str) -end - --- from roberto's site: - -local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end -local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end -local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end - -local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4 - -patterns.utf8byte = utf8byte - ---~ local str = " a b c d " - ---~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpegmatch(s,str).."]") ---~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpegmatch(s,str).."]") ---~ local s = lpeg.stripper("ab") print("["..lpegmatch(s,str).."]") ---~ local s = lpeg.keeper("ab") print("["..lpegmatch(s,str).."]") - -local cache = { } - -function lpeg.stripper(str) - if type(str) == "string" then - local s = cache[str] - if not s then - s = Cs(((S(str)^1)/"" + 1)^0) - cache[str] = s - end - return s - else - return Cs(((str^1)/"" + 1)^0) - end -end - -local cache = { } - -function lpeg.keeper(str) - if type(str) == "string" then - local s = cache[str] - if not s then - s = Cs((((1-S(str))^1)/"" + 1)^0) - cache[str] = s - end - return s - else - return Cs((((1-str)^1)/"" + 1)^0) - end -end - -function lpeg.frontstripper(str) -- or pattern (yet undocumented) - return (P(str) + P(true)) * Cs(anything^0) -end - -function lpeg.endstripper(str) -- or pattern (yet undocumented) - return Cs((1 - P(str) * endofstring)^0) -end - --- Just for fun I looked at the used bytecode and --- p = (p and p + pp) or pp gets one more (testset). - --- todo: cache when string - -function lpeg.replacer(one,two,makefunction,isutf) -- in principle we should sort the keys - local pattern - local u = isutf and utf8char or 1 - if type(one) == "table" then - local no = #one - local p = P(false) - if no == 0 then - for k, v in next, one do - p = p + P(k) / v - end - pattern = Cs((p + u)^0) - elseif no == 1 then - local o = one[1] - one, two = P(o[1]), o[2] - -- pattern = Cs(((1-one)^1 + one/two)^0) - pattern = Cs((one/two + u)^0) - else - for i=1,no do - local o = one[i] - p = p + P(o[1]) / o[2] - end - pattern = Cs((p + u)^0) - end - else - pattern = Cs((P(one)/(two or "") + u)^0) - end - if makefunction then - return function(str) - return lpegmatch(pattern,str) - end - else - return pattern - end -end - -function lpeg.finder(lst,makefunction) - local pattern - if type(lst) == "table" then - pattern = P(false) - if #lst == 0 then - for k, v in next, lst do - pattern = pattern + P(k) -- ignore key, so we can use a replacer table - end - else - for i=1,#lst do - pattern = pattern + P(lst[i]) - end - end - else - pattern = P(lst) - end - pattern = (1-pattern)^0 * pattern - if makefunction then - return function(str) - return lpegmatch(pattern,str) - end - else - return pattern - end -end - --- print(lpeg.match(lpeg.replacer("e","a"),"test test")) --- print(lpeg.match(lpeg.replacer{{"e","a"}},"test test")) --- print(lpeg.match(lpeg.replacer({ e = "a", t = "x" }),"test test")) - -local splitters_f, splitters_s = { }, { } - -function lpeg.firstofsplit(separator) -- always return value - local splitter = splitters_f[separator] - if not splitter then - separator = P(separator) - splitter = C((1 - separator)^0) - splitters_f[separator] = splitter - end - return splitter -end - -function lpeg.secondofsplit(separator) -- nil if not split - local splitter = splitters_s[separator] - if not splitter then - separator = P(separator) - splitter = (1 - separator)^0 * separator * C(anything^0) - splitters_s[separator] = splitter - end - return splitter -end - -function lpeg.balancer(left,right) - left, right = P(left), P(right) - return P { left * ((1 - left - right) + V(1))^0 * right } -end - --- print(1,lpegmatch(lpeg.firstofsplit(":"),"bc:de")) --- print(2,lpegmatch(lpeg.firstofsplit(":"),":de")) -- empty --- print(3,lpegmatch(lpeg.firstofsplit(":"),"bc")) --- print(4,lpegmatch(lpeg.secondofsplit(":"),"bc:de")) --- print(5,lpegmatch(lpeg.secondofsplit(":"),"bc:")) -- empty --- print(6,lpegmatch(lpeg.secondofsplit(":",""),"bc")) --- print(7,lpegmatch(lpeg.secondofsplit(":"),"bc")) --- print(9,lpegmatch(lpeg.secondofsplit(":","123"),"bc")) - --- -- slower: --- --- function lpeg.counter(pattern) --- local n, pattern = 0, (lpeg.P(pattern)/function() n = n + 1 end + lpeg.anything)^0 --- return function(str) n = 0 ; lpegmatch(pattern,str) ; return n end --- end - -local nany = utf8char/"" - -function lpeg.counter(pattern) - pattern = Cs((P(pattern)/" " + nany)^0) - return function(str) - return #lpegmatch(pattern,str) - end -end - --- utf extensies - -utf = utf or (unicode and unicode.utf8) or { } - -local utfcharacters = utf and utf.characters or string.utfcharacters -local utfgmatch = utf and utf.gmatch -local utfchar = utf and utf.char - -lpeg.UP = lpeg.P - -if utfcharacters then - - function lpeg.US(str) - local p = P(false) - for uc in utfcharacters(str) do - p = p + P(uc) - end - return p - end - - -elseif utfgmatch then - - function lpeg.US(str) - local p = P(false) - for uc in utfgmatch(str,".") do - p = p + P(uc) - end - return p - end - -else - - function lpeg.US(str) - local p = P(false) - local f = function(uc) - p = p + P(uc) - end - lpegmatch((utf8char/f)^0,str) - return p - end - -end - -local range = utf8byte * utf8byte + Cc(false) -- utf8byte is already a capture - -function lpeg.UR(str,more) - local first, last - if type(str) == "number" then - first = str - last = more or first - else - first, last = lpegmatch(range,str) - if not last then - return P(str) - end - end - if first == last then - return P(str) - elseif utfchar and (last - first < 8) then -- a somewhat arbitrary criterium - local p = P(false) - for i=first,last do - p = p + P(utfchar(i)) - end - return p -- nil when invalid range - else - local f = function(b) - return b >= first and b <= last - end - -- tricky, these nested captures - return utf8byte / f -- nil when invalid range - end -end - --- print(lpeg.match(lpeg.Cs((C(lpeg.UR("αω"))/{ ["χ"] = "OEPS" })^0),"αωχαω")) - --- lpeg.print(lpeg.R("ab","cd","gh")) --- lpeg.print(lpeg.P("a","b","c")) --- lpeg.print(lpeg.S("a","b","c")) - --- print(lpeg.count("äáàa",lpeg.P("á") + lpeg.P("à"))) --- print(lpeg.count("äáàa",lpeg.UP("áà"))) --- print(lpeg.count("äáàa",lpeg.US("àá"))) --- print(lpeg.count("äáàa",lpeg.UR("aá"))) --- print(lpeg.count("äáàa",lpeg.UR("àá"))) --- print(lpeg.count("äáàa",lpeg.UR(0x0000,0xFFFF))) - -function lpeg.is_lpeg(p) - return p and lpegtype(p) == "pattern" -end - -function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order - if type(list) ~= "table" then - list = { list, ... } - end - -- table.sort(list) -- longest match first - local p = P(list[1]) - for l=2,#list do - p = p + P(list[l]) - end - return p -end - --- For the moment here, but it might move to utilities. Beware, we need to --- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we --- loop back from the end cq. prepend. - -local sort = table.sort - -local function copyindexed(old) - local new = { } - for i=1,#old do - new[i] = old - end - return new -end - -local function sortedkeys(tab) - local keys, s = { }, 0 - for key,_ in next, tab do - s = s + 1 - keys[s] = key - end - sort(keys) - return keys -end - -function lpeg.append(list,pp,delayed,checked) - local p = pp - if #list > 0 then - local keys = copyindexed(list) - sort(keys) - for i=#keys,1,-1 do - local k = keys[i] - if p then - p = P(k) + p - else - p = P(k) - end - end - elseif delayed then -- hm, it looks like the lpeg parser resolves anyway - local keys = sortedkeys(list) - if p then - for i=1,#keys,1 do - local k = keys[i] - local v = list[k] - p = P(k)/list + p - end - else - for i=1,#keys do - local k = keys[i] - local v = list[k] - if p then - p = P(k) + p - else - p = P(k) - end - end - if p then - p = p / list - end - end - elseif checked then - -- problem: substitution gives a capture - local keys = sortedkeys(list) - for i=1,#keys do - local k = keys[i] - local v = list[k] - if p then - if k == v then - p = P(k) + p - else - p = P(k)/v + p - end - else - if k == v then - p = P(k) - else - p = P(k)/v - end - end - end - else - local keys = sortedkeys(list) - for i=1,#keys do - local k = keys[i] - local v = list[k] - if p then - p = P(k)/v + p - else - p = P(k)/v - end - end - end - return p -end - --- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true)) --- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true)) - --- function lpeg.exact_match(words,case_insensitive) --- local pattern = concat(words) --- if case_insensitive then --- local pattern = S(upper(characters)) + S(lower(characters)) --- local list = { } --- for i=1,#words do --- list[lower(words[i])] = true --- end --- return Cmt(pattern^1, function(_,i,s) --- return list[lower(s)] and i --- end) --- else --- local pattern = S(concat(words)) --- local list = { } --- for i=1,#words do --- list[words[i]] = true --- end --- return Cmt(pattern^1, function(_,i,s) --- return list[s] and i --- end) --- end --- end - --- experiment: - -local function make(t) - local p - local keys = sortedkeys(t) - for i=1,#keys do - local k = keys[i] - local v = t[k] - if not p then - if next(v) then - p = P(k) * make(v) - else - p = P(k) - end - else - if next(v) then - p = p + P(k) * make(v) - else - p = p + P(k) - end - end - end - return p -end - -function lpeg.utfchartabletopattern(list) -- goes to util-lpg - local tree = { } - for i=1,#list do - local t = tree - for c in gmatch(list[i],".") do - if not t[c] then - t[c] = { } - end - t = t[c] - end - end - return make(tree) -end - --- inspect ( lpeg.utfchartabletopattern { --- utfchar(0x00A0), -- nbsp --- utfchar(0x2000), -- enquad --- utfchar(0x2001), -- emquad --- utfchar(0x2002), -- enspace --- utfchar(0x2003), -- emspace --- utfchar(0x2004), -- threeperemspace --- utfchar(0x2005), -- fourperemspace --- utfchar(0x2006), -- sixperemspace --- utfchar(0x2007), -- figurespace --- utfchar(0x2008), -- punctuationspace --- utfchar(0x2009), -- breakablethinspace --- utfchar(0x200A), -- hairspace --- utfchar(0x200B), -- zerowidthspace --- utfchar(0x202F), -- narrownobreakspace --- utfchar(0x205F), -- math thinspace --- } ) - --- a few handy ones: --- --- faster than find(str,"[\n\r]") when match and # > 7 and always faster when # > 3 - -patterns.containseol = lpeg.finder(eol) -- (1-eol)^0 * eol - --- The next pattern^n variant is based on an approach suggested --- by Roberto: constructing a big repetition in chunks. --- --- Being sparse is not needed, and only complicate matters and --- the number of redundant entries is not that large. - -local function nextstep(n,step,result) - local m = n % step -- mod(n,step) - local d = floor(n/step) -- div(n,step) - if d > 0 then - local v = V(tostring(step)) - local s = result.start - for i=1,d do - if s then - s = v * s - else - s = v - end - end - result.start = s - end - if step > 1 and result.start then - local v = V(tostring(step/2)) - result[tostring(step)] = v * v - end - if step > 0 then - return nextstep(m,step/2,result) - else - return result - end -end - -function lpeg.times(pattern,n) - return P(nextstep(n,2^16,{ "start", ["1"] = pattern })) -end - --- local p = lpeg.Cs((1 - lpeg.times(lpeg.P("AB"),25))^1) --- local s = "12" .. string.rep("AB",20) .. "34" .. string.rep("AB",30) .. "56" --- inspect(p) --- print(lpeg.match(p,s)) - --- moved here (before util-str) - -local digit = R("09") -local period = P(".") -local zero = P("0") -local trailingzeros = zero^0 * -digit -- suggested by Roberto R -local case_1 = period * trailingzeros / "" -local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") -local number = digit^1 * (case_1 + case_2) -local stripper = Cs((number + 1)^0) - -lpeg.patterns.stripzeros = stripper - --- local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100" --- collectgarbage("collect") --- str = string.rep(sample,10000) --- local ts = os.clock() --- lpegmatch(stripper,str) --- print(#str, os.clock()-ts, lpegmatch(stripper,sample)) - +if not modules then modules = { } end modules ['l-lpeg'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1) + +-- move utf -> l-unicode +-- move string -> l-string or keep it here + +lpeg = require("lpeg") + +-- tracing (only used when we encounter a problem in integration of lpeg in luatex) + +-- some code will move to unicode and string + +-- local lpmatch = lpeg.match +-- local lpprint = lpeg.print +-- local lpp = lpeg.P +-- local lpr = lpeg.R +-- local lps = lpeg.S +-- local lpc = lpeg.C +-- local lpb = lpeg.B +-- local lpv = lpeg.V +-- local lpcf = lpeg.Cf +-- local lpcb = lpeg.Cb +-- local lpcg = lpeg.Cg +-- local lpct = lpeg.Ct +-- local lpcs = lpeg.Cs +-- local lpcc = lpeg.Cc +-- local lpcmt = lpeg.Cmt +-- local lpcarg = lpeg.Carg + +-- function lpeg.match(l,...) print("LPEG MATCH") lpprint(l) return lpmatch(l,...) end + +-- function lpeg.P (l) local p = lpp (l) print("LPEG P =") lpprint(l) return p end +-- function lpeg.R (l) local p = lpr (l) print("LPEG R =") lpprint(l) return p end +-- function lpeg.S (l) local p = lps (l) print("LPEG S =") lpprint(l) return p end +-- function lpeg.C (l) local p = lpc (l) print("LPEG C =") lpprint(l) return p end +-- function lpeg.B (l) local p = lpb (l) print("LPEG B =") lpprint(l) return p end +-- function lpeg.V (l) local p = lpv (l) print("LPEG V =") lpprint(l) return p end +-- function lpeg.Cf (l) local p = lpcf (l) print("LPEG Cf =") lpprint(l) return p end +-- function lpeg.Cb (l) local p = lpcb (l) print("LPEG Cb =") lpprint(l) return p end +-- function lpeg.Cg (l) local p = lpcg (l) print("LPEG Cg =") lpprint(l) return p end +-- function lpeg.Ct (l) local p = lpct (l) print("LPEG Ct =") lpprint(l) return p end +-- function lpeg.Cs (l) local p = lpcs (l) print("LPEG Cs =") lpprint(l) return p end +-- function lpeg.Cc (l) local p = lpcc (l) print("LPEG Cc =") lpprint(l) return p end +-- function lpeg.Cmt (l) local p = lpcmt (l) print("LPEG Cmt =") lpprint(l) return p end +-- function lpeg.Carg (l) local p = lpcarg(l) print("LPEG Carg =") lpprint(l) return p end + +local type, next, tostring = type, next, tostring +local byte, char, gmatch, format = string.byte, string.char, string.gmatch, string.format +----- mod, div = math.mod, math.div +local floor = math.floor + +local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt +local lpegtype, lpegmatch, lpegprint = lpeg.type, lpeg.match, lpeg.print + +-- let's start with an inspector: + +setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end) + +-- Beware, we predefine a bunch of patterns here and one reason for doing so +-- is that we get consistent behaviour in some of the visualizers. + +lpeg.patterns = lpeg.patterns or { } -- so that we can share +local patterns = lpeg.patterns + + +local anything = P(1) +local endofstring = P(-1) +local alwaysmatched = P(true) + +patterns.anything = anything +patterns.endofstring = endofstring +patterns.beginofstring = alwaysmatched +patterns.alwaysmatched = alwaysmatched + +local digit, sign = R('09'), S('+-') +local cr, lf, crlf = P("\r"), P("\n"), P("\r\n") +local newline = crlf + S("\r\n") -- cr + lf +local escaped = P("\\") * anything +local squote = P("'") +local dquote = P('"') +local space = P(" ") + +local utfbom_32_be = P('\000\000\254\255') +local utfbom_32_le = P('\255\254\000\000') +local utfbom_16_be = P('\255\254') +local utfbom_16_le = P('\254\255') +local utfbom_8 = P('\239\187\191') +local utfbom = utfbom_32_be + utfbom_32_le + + utfbom_16_be + utfbom_16_le + + utfbom_8 +local utftype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le") + + utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le") + + utfbom_8 * Cc("utf-8") + alwaysmatched * Cc("utf-8") -- assume utf8 +local utfoffset = utfbom_32_be * Cc(4) + utfbom_32_le * Cc(4) + + utfbom_16_be * Cc(2) + utfbom_16_le * Cc(2) + + utfbom_8 * Cc(3) + Cc(0) + +local utf8next = R("\128\191") + +patterns.utf8one = R("\000\127") +patterns.utf8two = R("\194\223") * utf8next +patterns.utf8three = R("\224\239") * utf8next * utf8next +patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next +patterns.utfbom = utfbom +patterns.utftype = utftype +patterns.utfoffset = utfoffset + +local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four +local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false) + +local utf8character = P(1) * R("\128\191")^0 -- unchecked but fast + +patterns.utf8 = utf8char +patterns.utf8char = utf8char +patterns.utf8character = utf8character -- this one can be used in most cases so we might use that one +patterns.validutf8 = validutf8char +patterns.validutf8char = validutf8char + +local eol = S("\n\r") +local spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto) +local whitespace = eol + spacer +local nonspacer = 1 - spacer +local nonwhitespace = 1 - whitespace + +patterns.eol = eol +patterns.spacer = spacer +patterns.whitespace = whitespace +patterns.nonspacer = nonspacer +patterns.nonwhitespace = nonwhitespace + +local stripper = spacer^0 * C((spacer^0 * nonspacer^1)^0) -- from example by roberto + +----- collapser = Cs(spacer^0/"" * ((spacer^1 * P(-1) / "") + (spacer^1/" ") + P(1))^0) +local collapser = Cs(spacer^0/"" * nonspacer^0 * ((spacer^0/" " * nonspacer^1)^0)) + +patterns.stripper = stripper +patterns.collapser = collapser + +patterns.digit = digit +patterns.sign = sign +patterns.cardinal = sign^0 * digit^1 +patterns.integer = sign^0 * digit^1 +patterns.unsigned = digit^0 * P('.') * digit^1 +patterns.float = sign^0 * patterns.unsigned +patterns.cunsigned = digit^0 * P(',') * digit^1 +patterns.cfloat = sign^0 * patterns.cunsigned +patterns.number = patterns.float + patterns.integer +patterns.cnumber = patterns.cfloat + patterns.integer +patterns.oct = P("0") * R("07")^1 +patterns.octal = patterns.oct +patterns.HEX = P("0x") * R("09","AF")^1 +patterns.hex = P("0x") * R("09","af")^1 +patterns.hexadecimal = P("0x") * R("09","AF","af")^1 +patterns.lowercase = R("az") +patterns.uppercase = R("AZ") +patterns.letter = patterns.lowercase + patterns.uppercase +patterns.space = space +patterns.tab = P("\t") +patterns.spaceortab = patterns.space + patterns.tab +patterns.newline = newline +patterns.emptyline = newline^1 +patterns.equal = P("=") +patterns.comma = P(",") +patterns.commaspacer = P(",") * spacer^0 +patterns.period = P(".") +patterns.colon = P(":") +patterns.semicolon = P(";") +patterns.underscore = P("_") +patterns.escaped = escaped +patterns.squote = squote +patterns.dquote = dquote +patterns.nosquote = (escaped + (1-squote))^0 +patterns.nodquote = (escaped + (1-dquote))^0 +patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"") -- will change to C in the middle +patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"") -- will change to C in the middle +patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble +patterns.unspacer = ((patterns.spacer^1)/"")^0 + +patterns.singlequoted = squote * patterns.nosquote * squote +patterns.doublequoted = dquote * patterns.nodquote * dquote +patterns.quoted = patterns.doublequoted + patterns.singlequoted + +patterns.propername = R("AZ","az","__") * R("09","AZ","az", "__")^0 * P(-1) + +patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1 +patterns.beginline = #(1-newline) + +patterns.longtostring = Cs(whitespace^0/"" * nonwhitespace^0 * ((whitespace^0/" " * (patterns.quoted + nonwhitespace)^1)^0)) + +local function anywhere(pattern) --slightly adapted from website + return P { P(pattern) + 1 * V(1) } +end + +lpeg.anywhere = anywhere + +function lpeg.instringchecker(p) + p = anywhere(p) + return function(str) + return lpegmatch(p,str) and true or false + end +end + +function lpeg.splitter(pattern, action) + return (((1-P(pattern))^1)/action+1)^0 +end + +function lpeg.tsplitter(pattern, action) + return Ct((((1-P(pattern))^1)/action+1)^0) +end + +-- probleem: separator can be lpeg and that does not hash too well, but +-- it's quite okay as the key is then not garbage collected + +local splitters_s, splitters_m, splitters_t = { }, { }, { } + +local function splitat(separator,single) + local splitter = (single and splitters_s[separator]) or splitters_m[separator] + if not splitter then + separator = P(separator) + local other = C((1 - separator)^0) + if single then + local any = anything + splitter = other * (separator * C(any^0) + "") -- ? + splitters_s[separator] = splitter + else + splitter = other * (separator * other)^0 + splitters_m[separator] = splitter + end + end + return splitter +end + +local function tsplitat(separator) + local splitter = splitters_t[separator] + if not splitter then + splitter = Ct(splitat(separator)) + splitters_t[separator] = splitter + end + return splitter +end + +lpeg.splitat = splitat +lpeg.tsplitat = tsplitat + +function string.splitup(str,separator) + if not separator then + separator = "," + end + return lpegmatch(splitters_m[separator] or splitat(separator),str) +end + +-- local p = splitat("->",false) print(lpegmatch(p,"oeps->what->more")) -- oeps what more +-- local p = splitat("->",true) print(lpegmatch(p,"oeps->what->more")) -- oeps what->more +-- local p = splitat("->",false) print(lpegmatch(p,"oeps")) -- oeps +-- local p = splitat("->",true) print(lpegmatch(p,"oeps")) -- oeps + +local cache = { } + +function lpeg.split(separator,str) + local c = cache[separator] + if not c then + c = tsplitat(separator) + cache[separator] = c + end + return lpegmatch(c,str) +end + +function string.split(str,separator) + if separator then + local c = cache[separator] + if not c then + c = tsplitat(separator) + cache[separator] = c + end + return lpegmatch(c,str) + else + return { str } + end +end + +local spacing = patterns.spacer^0 * newline -- sort of strip +local empty = spacing * Cc("") +local nonempty = Cs((1-spacing)^1) * spacing^-1 +local content = (empty + nonempty)^1 + +patterns.textline = content + +local linesplitter = tsplitat(newline) + +patterns.linesplitter = linesplitter + +function string.splitlines(str) + return lpegmatch(linesplitter,str) +end + +-- lpeg.splitters = cache -- no longer public + +local cache = { } + +function lpeg.checkedsplit(separator,str) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^1) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return lpegmatch(c,str) +end + +function string.checkedsplit(str,separator) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^1) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return lpegmatch(c,str) +end + +-- from roberto's site: + +local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end +local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end +local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end + +local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4 + +patterns.utf8byte = utf8byte + +--~ local str = " a b c d " + +--~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpegmatch(s,str).."]") +--~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpegmatch(s,str).."]") +--~ local s = lpeg.stripper("ab") print("["..lpegmatch(s,str).."]") +--~ local s = lpeg.keeper("ab") print("["..lpegmatch(s,str).."]") + +local cache = { } + +function lpeg.stripper(str) + if type(str) == "string" then + local s = cache[str] + if not s then + s = Cs(((S(str)^1)/"" + 1)^0) + cache[str] = s + end + return s + else + return Cs(((str^1)/"" + 1)^0) + end +end + +local cache = { } + +function lpeg.keeper(str) + if type(str) == "string" then + local s = cache[str] + if not s then + s = Cs((((1-S(str))^1)/"" + 1)^0) + cache[str] = s + end + return s + else + return Cs((((1-str)^1)/"" + 1)^0) + end +end + +function lpeg.frontstripper(str) -- or pattern (yet undocumented) + return (P(str) + P(true)) * Cs(anything^0) +end + +function lpeg.endstripper(str) -- or pattern (yet undocumented) + return Cs((1 - P(str) * endofstring)^0) +end + +-- Just for fun I looked at the used bytecode and +-- p = (p and p + pp) or pp gets one more (testset). + +-- todo: cache when string + +function lpeg.replacer(one,two,makefunction,isutf) -- in principle we should sort the keys + local pattern + local u = isutf and utf8char or 1 + if type(one) == "table" then + local no = #one + local p = P(false) + if no == 0 then + for k, v in next, one do + p = p + P(k) / v + end + pattern = Cs((p + u)^0) + elseif no == 1 then + local o = one[1] + one, two = P(o[1]), o[2] + -- pattern = Cs(((1-one)^1 + one/two)^0) + pattern = Cs((one/two + u)^0) + else + for i=1,no do + local o = one[i] + p = p + P(o[1]) / o[2] + end + pattern = Cs((p + u)^0) + end + else + pattern = Cs((P(one)/(two or "") + u)^0) + end + if makefunction then + return function(str) + return lpegmatch(pattern,str) + end + else + return pattern + end +end + +function lpeg.finder(lst,makefunction) + local pattern + if type(lst) == "table" then + pattern = P(false) + if #lst == 0 then + for k, v in next, lst do + pattern = pattern + P(k) -- ignore key, so we can use a replacer table + end + else + for i=1,#lst do + pattern = pattern + P(lst[i]) + end + end + else + pattern = P(lst) + end + pattern = (1-pattern)^0 * pattern + if makefunction then + return function(str) + return lpegmatch(pattern,str) + end + else + return pattern + end +end + +-- print(lpeg.match(lpeg.replacer("e","a"),"test test")) +-- print(lpeg.match(lpeg.replacer{{"e","a"}},"test test")) +-- print(lpeg.match(lpeg.replacer({ e = "a", t = "x" }),"test test")) + +local splitters_f, splitters_s = { }, { } + +function lpeg.firstofsplit(separator) -- always return value + local splitter = splitters_f[separator] + if not splitter then + separator = P(separator) + splitter = C((1 - separator)^0) + splitters_f[separator] = splitter + end + return splitter +end + +function lpeg.secondofsplit(separator) -- nil if not split + local splitter = splitters_s[separator] + if not splitter then + separator = P(separator) + splitter = (1 - separator)^0 * separator * C(anything^0) + splitters_s[separator] = splitter + end + return splitter +end + +function lpeg.balancer(left,right) + left, right = P(left), P(right) + return P { left * ((1 - left - right) + V(1))^0 * right } +end + +-- print(1,lpegmatch(lpeg.firstofsplit(":"),"bc:de")) +-- print(2,lpegmatch(lpeg.firstofsplit(":"),":de")) -- empty +-- print(3,lpegmatch(lpeg.firstofsplit(":"),"bc")) +-- print(4,lpegmatch(lpeg.secondofsplit(":"),"bc:de")) +-- print(5,lpegmatch(lpeg.secondofsplit(":"),"bc:")) -- empty +-- print(6,lpegmatch(lpeg.secondofsplit(":",""),"bc")) +-- print(7,lpegmatch(lpeg.secondofsplit(":"),"bc")) +-- print(9,lpegmatch(lpeg.secondofsplit(":","123"),"bc")) + +-- -- slower: +-- +-- function lpeg.counter(pattern) +-- local n, pattern = 0, (lpeg.P(pattern)/function() n = n + 1 end + lpeg.anything)^0 +-- return function(str) n = 0 ; lpegmatch(pattern,str) ; return n end +-- end + +local nany = utf8char/"" + +function lpeg.counter(pattern) + pattern = Cs((P(pattern)/" " + nany)^0) + return function(str) + return #lpegmatch(pattern,str) + end +end + +-- utf extensies + +utf = utf or (unicode and unicode.utf8) or { } + +local utfcharacters = utf and utf.characters or string.utfcharacters +local utfgmatch = utf and utf.gmatch +local utfchar = utf and utf.char + +lpeg.UP = lpeg.P + +if utfcharacters then + + function lpeg.US(str) + local p = P(false) + for uc in utfcharacters(str) do + p = p + P(uc) + end + return p + end + + +elseif utfgmatch then + + function lpeg.US(str) + local p = P(false) + for uc in utfgmatch(str,".") do + p = p + P(uc) + end + return p + end + +else + + function lpeg.US(str) + local p = P(false) + local f = function(uc) + p = p + P(uc) + end + lpegmatch((utf8char/f)^0,str) + return p + end + +end + +local range = utf8byte * utf8byte + Cc(false) -- utf8byte is already a capture + +function lpeg.UR(str,more) + local first, last + if type(str) == "number" then + first = str + last = more or first + else + first, last = lpegmatch(range,str) + if not last then + return P(str) + end + end + if first == last then + return P(str) + elseif utfchar and (last - first < 8) then -- a somewhat arbitrary criterium + local p = P(false) + for i=first,last do + p = p + P(utfchar(i)) + end + return p -- nil when invalid range + else + local f = function(b) + return b >= first and b <= last + end + -- tricky, these nested captures + return utf8byte / f -- nil when invalid range + end +end + +-- print(lpeg.match(lpeg.Cs((C(lpeg.UR("αω"))/{ ["χ"] = "OEPS" })^0),"αωχαω")) + +-- lpeg.print(lpeg.R("ab","cd","gh")) +-- lpeg.print(lpeg.P("a","b","c")) +-- lpeg.print(lpeg.S("a","b","c")) + +-- print(lpeg.count("äáàa",lpeg.P("á") + lpeg.P("à"))) +-- print(lpeg.count("äáàa",lpeg.UP("áà"))) +-- print(lpeg.count("äáàa",lpeg.US("àá"))) +-- print(lpeg.count("äáàa",lpeg.UR("aá"))) +-- print(lpeg.count("äáàa",lpeg.UR("àá"))) +-- print(lpeg.count("äáàa",lpeg.UR(0x0000,0xFFFF))) + +function lpeg.is_lpeg(p) + return p and lpegtype(p) == "pattern" +end + +function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order + if type(list) ~= "table" then + list = { list, ... } + end + -- table.sort(list) -- longest match first + local p = P(list[1]) + for l=2,#list do + p = p + P(list[l]) + end + return p +end + +-- For the moment here, but it might move to utilities. Beware, we need to +-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we +-- loop back from the end cq. prepend. + +local sort = table.sort + +local function copyindexed(old) + local new = { } + for i=1,#old do + new[i] = old + end + return new +end + +local function sortedkeys(tab) + local keys, s = { }, 0 + for key,_ in next, tab do + s = s + 1 + keys[s] = key + end + sort(keys) + return keys +end + +function lpeg.append(list,pp,delayed,checked) + local p = pp + if #list > 0 then + local keys = copyindexed(list) + sort(keys) + for i=#keys,1,-1 do + local k = keys[i] + if p then + p = P(k) + p + else + p = P(k) + end + end + elseif delayed then -- hm, it looks like the lpeg parser resolves anyway + local keys = sortedkeys(list) + if p then + for i=1,#keys,1 do + local k = keys[i] + local v = list[k] + p = P(k)/list + p + end + else + for i=1,#keys do + local k = keys[i] + local v = list[k] + if p then + p = P(k) + p + else + p = P(k) + end + end + if p then + p = p / list + end + end + elseif checked then + -- problem: substitution gives a capture + local keys = sortedkeys(list) + for i=1,#keys do + local k = keys[i] + local v = list[k] + if p then + if k == v then + p = P(k) + p + else + p = P(k)/v + p + end + else + if k == v then + p = P(k) + else + p = P(k)/v + end + end + end + else + local keys = sortedkeys(list) + for i=1,#keys do + local k = keys[i] + local v = list[k] + if p then + p = P(k)/v + p + else + p = P(k)/v + end + end + end + return p +end + +-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true)) +-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true)) + +-- function lpeg.exact_match(words,case_insensitive) +-- local pattern = concat(words) +-- if case_insensitive then +-- local pattern = S(upper(characters)) + S(lower(characters)) +-- local list = { } +-- for i=1,#words do +-- list[lower(words[i])] = true +-- end +-- return Cmt(pattern^1, function(_,i,s) +-- return list[lower(s)] and i +-- end) +-- else +-- local pattern = S(concat(words)) +-- local list = { } +-- for i=1,#words do +-- list[words[i]] = true +-- end +-- return Cmt(pattern^1, function(_,i,s) +-- return list[s] and i +-- end) +-- end +-- end + +-- experiment: + +local function make(t) + local p + local keys = sortedkeys(t) + for i=1,#keys do + local k = keys[i] + local v = t[k] + if not p then + if next(v) then + p = P(k) * make(v) + else + p = P(k) + end + else + if next(v) then + p = p + P(k) * make(v) + else + p = p + P(k) + end + end + end + return p +end + +function lpeg.utfchartabletopattern(list) -- goes to util-lpg + local tree = { } + for i=1,#list do + local t = tree + for c in gmatch(list[i],".") do + if not t[c] then + t[c] = { } + end + t = t[c] + end + end + return make(tree) +end + +-- inspect ( lpeg.utfchartabletopattern { +-- utfchar(0x00A0), -- nbsp +-- utfchar(0x2000), -- enquad +-- utfchar(0x2001), -- emquad +-- utfchar(0x2002), -- enspace +-- utfchar(0x2003), -- emspace +-- utfchar(0x2004), -- threeperemspace +-- utfchar(0x2005), -- fourperemspace +-- utfchar(0x2006), -- sixperemspace +-- utfchar(0x2007), -- figurespace +-- utfchar(0x2008), -- punctuationspace +-- utfchar(0x2009), -- breakablethinspace +-- utfchar(0x200A), -- hairspace +-- utfchar(0x200B), -- zerowidthspace +-- utfchar(0x202F), -- narrownobreakspace +-- utfchar(0x205F), -- math thinspace +-- } ) + +-- a few handy ones: +-- +-- faster than find(str,"[\n\r]") when match and # > 7 and always faster when # > 3 + +patterns.containseol = lpeg.finder(eol) -- (1-eol)^0 * eol + +-- The next pattern^n variant is based on an approach suggested +-- by Roberto: constructing a big repetition in chunks. +-- +-- Being sparse is not needed, and only complicate matters and +-- the number of redundant entries is not that large. + +local function nextstep(n,step,result) + local m = n % step -- mod(n,step) + local d = floor(n/step) -- div(n,step) + if d > 0 then + local v = V(tostring(step)) + local s = result.start + for i=1,d do + if s then + s = v * s + else + s = v + end + end + result.start = s + end + if step > 1 and result.start then + local v = V(tostring(step/2)) + result[tostring(step)] = v * v + end + if step > 0 then + return nextstep(m,step/2,result) + else + return result + end +end + +function lpeg.times(pattern,n) + return P(nextstep(n,2^16,{ "start", ["1"] = pattern })) +end + +-- local p = lpeg.Cs((1 - lpeg.times(lpeg.P("AB"),25))^1) +-- local s = "12" .. string.rep("AB",20) .. "34" .. string.rep("AB",30) .. "56" +-- inspect(p) +-- print(lpeg.match(p,s)) + +-- moved here (before util-str) + +local digit = R("09") +local period = P(".") +local zero = P("0") +local trailingzeros = zero^0 * -digit -- suggested by Roberto R +local case_1 = period * trailingzeros / "" +local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") +local number = digit^1 * (case_1 + case_2) +local stripper = Cs((number + 1)^0) + +lpeg.patterns.stripzeros = stripper + +-- local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100" +-- collectgarbage("collect") +-- str = string.rep(sample,10000) +-- local ts = os.clock() +-- lpegmatch(stripper,str) +-- print(#str, os.clock()-ts, lpegmatch(stripper,sample)) + diff --git a/tex/context/base/l-lua.lua b/tex/context/base/l-lua.lua index fc05afa67..486c14a5f 100644 --- a/tex/context/base/l-lua.lua +++ b/tex/context/base/l-lua.lua @@ -1,150 +1,150 @@ -if not modules then modules = { } end modules ['l-lua'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- compatibility hacksand helpers - -local major, minor = string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$") - -_MAJORVERSION = tonumber(major) or 5 -_MINORVERSION = tonumber(minor) or 1 -_LUAVERSION = _MAJORVERSION + _MINORVERSION/10 - --- lpeg - -if not lpeg then - lpeg = require("lpeg") -end - --- basics: - -if loadstring then - - local loadnormal = load - - function load(first,...) - if type(first) == "string" then - return loadstring(first,...) - else - return loadnormal(first,...) - end - end - -else - - loadstring = load - -end - --- table: - --- At some point it was announced that i[pairs would be dropped, which makes --- sense. As we already used the for loop and # in most places the impact on --- ConTeXt was not that large; the remaining ipairs already have been replaced. --- Hm, actually ipairs was retained, but we no longer use it anyway (nor --- pairs). --- --- Just in case, we provide the fallbacks as discussed in Programming --- in Lua (http://www.lua.org/pil/7.3.html): - -if not ipairs then - - -- for k, v in ipairs(t) do ... end - -- for k=1,#t do local v = t[k] ... end - - local function iterate(a,i) - i = i + 1 - local v = a[i] - if v ~= nil then - return i, v --, nil - end - end - - function ipairs(a) - return iterate, a, 0 - end - -end - -if not pairs then - - -- for k, v in pairs(t) do ... end - -- for k, v in next, t do ... end - - function pairs(t) - return next, t -- , nil - end - -end - --- The unpack function has been moved to the table table, and for compatiility --- reasons we provide both now. - -if not table.unpack then - - table.unpack = _G.unpack - -elseif not unpack then - - _G.unpack = table.unpack - -end - --- package: - --- if not package.seachers then --- --- package.searchers = package.loaders -- 5.2 --- --- elseif not package.loaders then --- --- package.loaders = package.searchers --- --- end - -if not package.loaders then -- brr, searchers is a special "loadlib function" userdata type - - package.loaders = package.searchers - -end - --- moved from util-deb to here: - -local print, select, tostring = print, select, tostring - -local inspectors = { } - -function setinspector(inspector) -- global function - inspectors[#inspectors+1] = inspector -end - -function inspect(...) -- global function - for s=1,select("#",...) do - local value = select(s,...) - local done = false - for i=1,#inspectors do - done = inspectors[i](value) - if done then - break - end - end - if not done then - print(tostring(value)) - end - end -end - --- - -local dummy = function() end - -function optionalrequire(...) - local ok, result = xpcall(require,dummy,...) - if ok then - return result - end -end +if not modules then modules = { } end modules ['l-lua'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- compatibility hacksand helpers + +local major, minor = string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$") + +_MAJORVERSION = tonumber(major) or 5 +_MINORVERSION = tonumber(minor) or 1 +_LUAVERSION = _MAJORVERSION + _MINORVERSION/10 + +-- lpeg + +if not lpeg then + lpeg = require("lpeg") +end + +-- basics: + +if loadstring then + + local loadnormal = load + + function load(first,...) + if type(first) == "string" then + return loadstring(first,...) + else + return loadnormal(first,...) + end + end + +else + + loadstring = load + +end + +-- table: + +-- At some point it was announced that i[pairs would be dropped, which makes +-- sense. As we already used the for loop and # in most places the impact on +-- ConTeXt was not that large; the remaining ipairs already have been replaced. +-- Hm, actually ipairs was retained, but we no longer use it anyway (nor +-- pairs). +-- +-- Just in case, we provide the fallbacks as discussed in Programming +-- in Lua (http://www.lua.org/pil/7.3.html): + +if not ipairs then + + -- for k, v in ipairs(t) do ... end + -- for k=1,#t do local v = t[k] ... end + + local function iterate(a,i) + i = i + 1 + local v = a[i] + if v ~= nil then + return i, v --, nil + end + end + + function ipairs(a) + return iterate, a, 0 + end + +end + +if not pairs then + + -- for k, v in pairs(t) do ... end + -- for k, v in next, t do ... end + + function pairs(t) + return next, t -- , nil + end + +end + +-- The unpack function has been moved to the table table, and for compatiility +-- reasons we provide both now. + +if not table.unpack then + + table.unpack = _G.unpack + +elseif not unpack then + + _G.unpack = table.unpack + +end + +-- package: + +-- if not package.seachers then +-- +-- package.searchers = package.loaders -- 5.2 +-- +-- elseif not package.loaders then +-- +-- package.loaders = package.searchers +-- +-- end + +if not package.loaders then -- brr, searchers is a special "loadlib function" userdata type + + package.loaders = package.searchers + +end + +-- moved from util-deb to here: + +local print, select, tostring = print, select, tostring + +local inspectors = { } + +function setinspector(inspector) -- global function + inspectors[#inspectors+1] = inspector +end + +function inspect(...) -- global function + for s=1,select("#",...) do + local value = select(s,...) + local done = false + for i=1,#inspectors do + done = inspectors[i](value) + if done then + break + end + end + if not done then + print(tostring(value)) + end + end +end + +-- + +local dummy = function() end + +function optionalrequire(...) + local ok, result = xpcall(require,dummy,...) + if ok then + return result + end +end diff --git a/tex/context/base/l-math.lua b/tex/context/base/l-math.lua index 43f60b56b..fb6bbbf5d 100644 --- a/tex/context/base/l-math.lua +++ b/tex/context/base/l-math.lua @@ -1,34 +1,34 @@ -if not modules then modules = { } end modules ['l-math'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan - -if not math.round then - function math.round(x) return floor(x + 0.5) end -end - -if not math.div then - function math.div(n,m) return floor(n/m) end -end - -if not math.mod then - function math.mod(n,m) return n % m end -end - -local pipi = 2*math.pi/360 - -if not math.sind then - function math.sind(d) return sin(d*pipi) end - function math.cosd(d) return cos(d*pipi) end - function math.tand(d) return tan(d*pipi) end -end - -if not math.odd then - function math.odd (n) return n % 2 ~= 0 end - function math.even(n) return n % 2 == 0 end -end +if not modules then modules = { } end modules ['l-math'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan + +if not math.round then + function math.round(x) return floor(x + 0.5) end +end + +if not math.div then + function math.div(n,m) return floor(n/m) end +end + +if not math.mod then + function math.mod(n,m) return n % m end +end + +local pipi = 2*math.pi/360 + +if not math.sind then + function math.sind(d) return sin(d*pipi) end + function math.cosd(d) return cos(d*pipi) end + function math.tand(d) return tan(d*pipi) end +end + +if not math.odd then + function math.odd (n) return n % 2 ~= 0 end + function math.even(n) return n % 2 == 0 end +end diff --git a/tex/context/base/l-md5.lua b/tex/context/base/l-md5.lua index 8ac20a5a5..731dc3fbe 100644 --- a/tex/context/base/l-md5.lua +++ b/tex/context/base/l-md5.lua @@ -1,117 +1,117 @@ -if not modules then modules = { } end modules ['l-md5'] = { - version = 1.001, - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This also provides file checksums and checkers. - -if not md5 then - md5 = optionalrequire("md5") -end - -if not md5 then - md5 = { - sum = function(str) print("error: md5 is not loaded (sum ignored)") return str end, - sumhexa = function(str) print("error: md5 is not loaded (sumhexa ignored)") return str end, - } -end - -local md5, file = md5, file -local gsub, format, byte = string.gsub, string.format, string.byte -local md5sum = md5.sum - -local function convert(str,fmt) - return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end)) -end - -if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end -if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end -if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end - --- local P, Cs, lpegmatch = lpeg.P, lpeg.Cs,lpeg.match --- --- if not md5.HEX then --- local function remap(chr) return format("%02X",byte(chr)) end --- function md5.HEX(str) return (gsub(md5.sum(str),".",remap)) end --- end --- --- if not md5.hex then --- local function remap(chr) return format("%02x",byte(chr)) end --- function md5.hex(str) return (gsub(md5.sum(str),".",remap)) end --- end --- --- if not md5.dec then --- local function remap(chr) return format("%03i",byte(chr)) end --- function md5.dec(str) return (gsub(md5.sum(str),".",remap)) end --- end - --- if not md5.HEX then --- local pattern_HEX = Cs( ( P(1) / function(chr) return format("%02X",byte(chr)) end)^0 ) --- function md5.HEX(str) return lpegmatch(pattern_HEX,md5.sum(str)) end --- end --- --- if not md5.hex then --- local pattern_hex = Cs( ( P(1) / function(chr) return format("%02x",byte(chr)) end)^0 ) --- function md5.hex(str) return lpegmatch(pattern_hex,md5.sum(str)) end --- end --- --- if not md5.dec then --- local pattern_dec = Cs( ( P(1) / function(chr) return format("%02i",byte(chr)) end)^0 ) --- function md5.dec(str) return lpegmatch(pattern_dec,md5.sum(str)) end --- end - -function file.needsupdating(oldname,newname,threshold) -- size modification access change - local oldtime = lfs.attributes(oldname,"modification") - if oldtime then - local newtime = lfs.attributes(newname,"modification") - if not newtime then - return true -- no new file, so no updating needed - elseif newtime >= oldtime then - return false -- new file definitely needs updating - elseif oldtime - newtime < (threshold or 1) then - return false -- new file is probably still okay - else - return true -- new file has to be updated - end - else - return false -- no old file, so no updating needed - end -end - -file.needs_updating = file.needsupdating - -function file.syncmtimes(oldname,newname) - local oldtime = lfs.attributes(oldname,"modification") - if oldtime and lfs.isfile(newname) then - lfs.touch(newname,oldtime,oldtime) - end -end - -function file.checksum(name) - if md5 then - local data = io.loaddata(name) - if data then - return md5.HEX(data) - end - end - return nil -end - -function file.loadchecksum(name) - if md5 then - local data = io.loaddata(name .. ".md5") - return data and (gsub(data,"%s","")) - end - return nil -end - -function file.savechecksum(name,checksum) - if not checksum then checksum = file.checksum(name) end - if checksum then - io.savedata(name .. ".md5",checksum) - return checksum - end - return nil -end +if not modules then modules = { } end modules ['l-md5'] = { + version = 1.001, + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This also provides file checksums and checkers. + +if not md5 then + md5 = optionalrequire("md5") +end + +if not md5 then + md5 = { + sum = function(str) print("error: md5 is not loaded (sum ignored)") return str end, + sumhexa = function(str) print("error: md5 is not loaded (sumhexa ignored)") return str end, + } +end + +local md5, file = md5, file +local gsub, format, byte = string.gsub, string.format, string.byte +local md5sum = md5.sum + +local function convert(str,fmt) + return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end)) +end + +if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end +if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end +if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end + +-- local P, Cs, lpegmatch = lpeg.P, lpeg.Cs,lpeg.match +-- +-- if not md5.HEX then +-- local function remap(chr) return format("%02X",byte(chr)) end +-- function md5.HEX(str) return (gsub(md5.sum(str),".",remap)) end +-- end +-- +-- if not md5.hex then +-- local function remap(chr) return format("%02x",byte(chr)) end +-- function md5.hex(str) return (gsub(md5.sum(str),".",remap)) end +-- end +-- +-- if not md5.dec then +-- local function remap(chr) return format("%03i",byte(chr)) end +-- function md5.dec(str) return (gsub(md5.sum(str),".",remap)) end +-- end + +-- if not md5.HEX then +-- local pattern_HEX = Cs( ( P(1) / function(chr) return format("%02X",byte(chr)) end)^0 ) +-- function md5.HEX(str) return lpegmatch(pattern_HEX,md5.sum(str)) end +-- end +-- +-- if not md5.hex then +-- local pattern_hex = Cs( ( P(1) / function(chr) return format("%02x",byte(chr)) end)^0 ) +-- function md5.hex(str) return lpegmatch(pattern_hex,md5.sum(str)) end +-- end +-- +-- if not md5.dec then +-- local pattern_dec = Cs( ( P(1) / function(chr) return format("%02i",byte(chr)) end)^0 ) +-- function md5.dec(str) return lpegmatch(pattern_dec,md5.sum(str)) end +-- end + +function file.needsupdating(oldname,newname,threshold) -- size modification access change + local oldtime = lfs.attributes(oldname,"modification") + if oldtime then + local newtime = lfs.attributes(newname,"modification") + if not newtime then + return true -- no new file, so no updating needed + elseif newtime >= oldtime then + return false -- new file definitely needs updating + elseif oldtime - newtime < (threshold or 1) then + return false -- new file is probably still okay + else + return true -- new file has to be updated + end + else + return false -- no old file, so no updating needed + end +end + +file.needs_updating = file.needsupdating + +function file.syncmtimes(oldname,newname) + local oldtime = lfs.attributes(oldname,"modification") + if oldtime and lfs.isfile(newname) then + lfs.touch(newname,oldtime,oldtime) + end +end + +function file.checksum(name) + if md5 then + local data = io.loaddata(name) + if data then + return md5.HEX(data) + end + end + return nil +end + +function file.loadchecksum(name) + if md5 then + local data = io.loaddata(name .. ".md5") + return data and (gsub(data,"%s","")) + end + return nil +end + +function file.savechecksum(name,checksum) + if not checksum then checksum = file.checksum(name) end + if checksum then + io.savedata(name .. ".md5",checksum) + return checksum + end + return nil +end diff --git a/tex/context/base/l-number.lua b/tex/context/base/l-number.lua index 001ca31f7..7db82173c 100644 --- a/tex/context/base/l-number.lua +++ b/tex/context/base/l-number.lua @@ -1,207 +1,207 @@ -if not modules then modules = { } end modules ['l-number'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this module will be replaced when we have the bit library .. the number based sets --- might go away - -local tostring, tonumber = tostring, tonumber -local format, floor, match, rep = string.format, math.floor, string.match, string.rep -local concat, insert = table.concat, table.insert -local lpegmatch = lpeg.match - -number = number or { } -local number = number - -if bit32 then -- I wonder if this is faster - - local btest, bor = bit32.btest, bit32.bor - - function number.bit(p) - return 2 ^ (p - 1) -- 1-based indexing - end - - number.hasbit = btest - number.setbit = bor - - function number.setbit(x,p) -- why not bor? - return btest(x,p) and x or x + p - end - - function number.clearbit(x,p) - return btest(x,p) and x - p or x - end - -else - - -- http://ricilake.blogspot.com/2007/10/iterating-bits-in-lua.html - - function number.bit(p) - return 2 ^ (p - 1) -- 1-based indexing - end - - function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ... - return x % (p + p) >= p - end - - function number.setbit(x, p) - return (x % (p + p) >= p) and x or x + p - end - - function number.clearbit(x, p) - return (x % (p + p) >= p) and x - p or x - end - -end - --- print(number.tobitstring(8)) --- print(number.tobitstring(14)) --- print(number.tobitstring(66)) --- print(number.tobitstring(0x00)) --- print(number.tobitstring(0xFF)) --- print(number.tobitstring(46260767936,4)) - -if bit32 then - - local bextract = bit32.extract - - local t = { - "0", "0", "0", "0", "0", "0", "0", "0", - "0", "0", "0", "0", "0", "0", "0", "0", - "0", "0", "0", "0", "0", "0", "0", "0", - "0", "0", "0", "0", "0", "0", "0", "0", - } - - function number.tobitstring(b,m) - -- if really needed we can speed this one up - -- because small numbers need less extraction - local n = 32 - for i=0,31 do - local v = bextract(b,i) - local k = 32 - i - if v == 1 then - n = k - t[k] = "1" - else - t[k] = "0" - end - end - if m then - m = 33 - m * 8 - if m < 1 then - m = 1 - end - return concat(t,"",m) - elseif n < 8 then - return concat(t) - elseif n < 16 then - return concat(t,"",9) - elseif n < 24 then - return concat(t,"",17) - else - return concat(t,"",25) - end - end - -else - - function number.tobitstring(n,m) - if n > 0 then - local t = { } - while n > 0 do - insert(t,1,n % 2 > 0 and 1 or 0) - n = floor(n/2) - end - local nn = 8 - #t % 8 - if nn > 0 and nn < 8 then - for i=1,nn do - insert(t,1,0) - end - end - if m then - m = m * 8 - #t - if m > 0 then - insert(t,1,rep("0",m)) - end - end - return concat(t) - elseif m then - rep("00000000",m) - else - return "00000000" - end - end - -end - -function number.valid(str,default) - return tonumber(str) or default or nil -end - -function number.toevenhex(n) - local s = format("%X",n) - if #s % 2 == 0 then - return s - else - return "0" .. s - end -end - --- a,b,c,d,e,f = number.toset(100101) --- --- function number.toset(n) --- return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") --- end --- --- -- the lpeg way is slower on 8 digits, but faster on 4 digits, some 7.5% --- -- on --- --- for i=1,1000000 do --- local a,b,c,d,e,f,g,h = number.toset(12345678) --- local a,b,c,d = number.toset(1234) --- local a,b,c = number.toset(123) --- local a,b,c = number.toset("123") --- end - -local one = lpeg.C(1-lpeg.S('')/tonumber)^1 - -function number.toset(n) - return lpegmatch(one,tostring(n)) -end - --- function number.bits(n,zero) --- local t, i = { }, (zero and 0) or 1 --- while n > 0 do --- local m = n % 2 --- if m > 0 then --- insert(t,1,i) --- end --- n = floor(n/2) --- i = i + 1 --- end --- return t --- end --- --- -- a bit faster - -local function bits(n,i,...) - if n > 0 then - local m = n % 2 - local n = floor(n/2) - if m > 0 then - return bits(n, i+1, i, ...) - else - return bits(n, i+1, ...) - end - else - return ... - end -end - -function number.bits(n) - return { bits(n,1) } -end +if not modules then modules = { } end modules ['l-number'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this module will be replaced when we have the bit library .. the number based sets +-- might go away + +local tostring, tonumber = tostring, tonumber +local format, floor, match, rep = string.format, math.floor, string.match, string.rep +local concat, insert = table.concat, table.insert +local lpegmatch = lpeg.match + +number = number or { } +local number = number + +if bit32 then -- I wonder if this is faster + + local btest, bor = bit32.btest, bit32.bor + + function number.bit(p) + return 2 ^ (p - 1) -- 1-based indexing + end + + number.hasbit = btest + number.setbit = bor + + function number.setbit(x,p) -- why not bor? + return btest(x,p) and x or x + p + end + + function number.clearbit(x,p) + return btest(x,p) and x - p or x + end + +else + + -- http://ricilake.blogspot.com/2007/10/iterating-bits-in-lua.html + + function number.bit(p) + return 2 ^ (p - 1) -- 1-based indexing + end + + function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ... + return x % (p + p) >= p + end + + function number.setbit(x, p) + return (x % (p + p) >= p) and x or x + p + end + + function number.clearbit(x, p) + return (x % (p + p) >= p) and x - p or x + end + +end + +-- print(number.tobitstring(8)) +-- print(number.tobitstring(14)) +-- print(number.tobitstring(66)) +-- print(number.tobitstring(0x00)) +-- print(number.tobitstring(0xFF)) +-- print(number.tobitstring(46260767936,4)) + +if bit32 then + + local bextract = bit32.extract + + local t = { + "0", "0", "0", "0", "0", "0", "0", "0", + "0", "0", "0", "0", "0", "0", "0", "0", + "0", "0", "0", "0", "0", "0", "0", "0", + "0", "0", "0", "0", "0", "0", "0", "0", + } + + function number.tobitstring(b,m) + -- if really needed we can speed this one up + -- because small numbers need less extraction + local n = 32 + for i=0,31 do + local v = bextract(b,i) + local k = 32 - i + if v == 1 then + n = k + t[k] = "1" + else + t[k] = "0" + end + end + if m then + m = 33 - m * 8 + if m < 1 then + m = 1 + end + return concat(t,"",m) + elseif n < 8 then + return concat(t) + elseif n < 16 then + return concat(t,"",9) + elseif n < 24 then + return concat(t,"",17) + else + return concat(t,"",25) + end + end + +else + + function number.tobitstring(n,m) + if n > 0 then + local t = { } + while n > 0 do + insert(t,1,n % 2 > 0 and 1 or 0) + n = floor(n/2) + end + local nn = 8 - #t % 8 + if nn > 0 and nn < 8 then + for i=1,nn do + insert(t,1,0) + end + end + if m then + m = m * 8 - #t + if m > 0 then + insert(t,1,rep("0",m)) + end + end + return concat(t) + elseif m then + rep("00000000",m) + else + return "00000000" + end + end + +end + +function number.valid(str,default) + return tonumber(str) or default or nil +end + +function number.toevenhex(n) + local s = format("%X",n) + if #s % 2 == 0 then + return s + else + return "0" .. s + end +end + +-- a,b,c,d,e,f = number.toset(100101) +-- +-- function number.toset(n) +-- return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") +-- end +-- +-- -- the lpeg way is slower on 8 digits, but faster on 4 digits, some 7.5% +-- -- on +-- +-- for i=1,1000000 do +-- local a,b,c,d,e,f,g,h = number.toset(12345678) +-- local a,b,c,d = number.toset(1234) +-- local a,b,c = number.toset(123) +-- local a,b,c = number.toset("123") +-- end + +local one = lpeg.C(1-lpeg.S('')/tonumber)^1 + +function number.toset(n) + return lpegmatch(one,tostring(n)) +end + +-- function number.bits(n,zero) +-- local t, i = { }, (zero and 0) or 1 +-- while n > 0 do +-- local m = n % 2 +-- if m > 0 then +-- insert(t,1,i) +-- end +-- n = floor(n/2) +-- i = i + 1 +-- end +-- return t +-- end +-- +-- -- a bit faster + +local function bits(n,i,...) + if n > 0 then + local m = n % 2 + local n = floor(n/2) + if m > 0 then + return bits(n, i+1, i, ...) + else + return bits(n, i+1, ...) + end + else + return ... + end +end + +function number.bits(n) + return { bits(n,1) } +end diff --git a/tex/context/base/l-os.lua b/tex/context/base/l-os.lua index 05ca0acdc..6b9ae12f9 100644 --- a/tex/context/base/l-os.lua +++ b/tex/context/base/l-os.lua @@ -1,474 +1,474 @@ -if not modules then modules = { } end modules ['l-os'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This file deals with some operating system issues. Please don't bother me --- with the pros and cons of operating systems as they all have their flaws --- and benefits. Bashing one of them won't help solving problems and fixing --- bugs faster and is a waste of time and energy. --- --- path separators: / or \ ... we can use / everywhere --- suffixes : dll so exe ... no big deal --- quotes : we can use "" in most cases --- expansion : unless "" are used * might give side effects --- piping/threads : somewhat different for each os --- locations : specific user file locations and settings can change over time --- --- os.type : windows | unix (new, we already guessed os.platform) --- os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) --- os.platform : extended os.name with architecture - --- os.sleep() => socket.sleep() --- math.randomseed(tonumber(string.sub(string.reverse(tostring(math.floor(socket.gettime()*10000))),1,6))) - --- maybe build io.flush in os.execute - -local os = os -local date, time = os.date, os.time -local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch -local concat = table.concat -local random, ceil, randomseed = math.random, math.ceil, math.randomseed -local rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring = rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring - --- The following code permits traversing the environment table, at least --- in luatex. Internally all environment names are uppercase. - --- The randomseed in Lua is not that random, although this depends on the operating system as well --- as the binary (Luatex is normally okay). But to be sure we set the seed anyway. - -math.initialseed = tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6)) - -randomseed(math.initialseed) - -if not os.__getenv__ then - - os.__getenv__ = os.getenv - os.__setenv__ = os.setenv - - if os.env then - - local osgetenv = os.getenv - local ossetenv = os.setenv - local osenv = os.env local _ = osenv.PATH -- initialize the table - - function os.setenv(k,v) - if v == nil then - v = "" - end - local K = upper(k) - osenv[K] = v - if type(v) == "table" then - v = concat(v,";") -- path - end - ossetenv(K,v) - end - - function os.getenv(k) - local K = upper(k) - local v = osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k) - if v == "" then - return nil - else - return v - end - end - - else - - local ossetenv = os.setenv - local osgetenv = os.getenv - local osenv = { } - - function os.setenv(k,v) - if v == nil then - v = "" - end - local K = upper(k) - osenv[K] = v - end - - function os.getenv(k) - local K = upper(k) - local v = osenv[K] or osgetenv(K) or osgetenv(k) - if v == "" then - return nil - else - return v - end - end - - local function __index(t,k) - return os.getenv(k) - end - local function __newindex(t,k,v) - os.setenv(k,v) - end - - os.env = { } - - setmetatable(os.env, { __index = __index, __newindex = __newindex } ) - - end - -end - --- end of environment hack - -local execute, spawn, exec, iopopen, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.popen, io.flush - -function os.execute(...) ioflush() return execute(...) end -function os.spawn (...) ioflush() return spawn (...) end -function os.exec (...) ioflush() return exec (...) end -function io.popen (...) ioflush() return iopopen(...) end - -function os.resultof(command) - local handle = io.popen(command,"r") - return handle and handle:read("*all") or "" -end - -if not io.fileseparator then - if find(os.getenv("PATH"),";") then - io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin" - else - io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix" - end -end - -os.type = os.type or (io.pathseparator == ";" and "windows") or "unix" -os.name = os.name or (os.type == "windows" and "mswin" ) or "linux" - -if os.type == "windows" then - os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' } -else - os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' } -end - -local launchers = { - windows = "start %s", - macosx = "open %s", - unix = "$BROWSER %s &> /dev/null &", -} - -function os.launch(str) - os.execute(format(launchers[os.name] or launchers.unix,str)) -end - -if not os.times then -- ? - -- utime = user time - -- stime = system time - -- cutime = children user time - -- cstime = children system time - function os.times() - return { - utime = os.gettimeofday(), -- user - stime = 0, -- system - cutime = 0, -- children user - cstime = 0, -- children system - } - end -end - -os.gettimeofday = os.gettimeofday or os.clock - -local startuptime = os.gettimeofday() - -function os.runtime() - return os.gettimeofday() - startuptime -end - ---~ print(os.gettimeofday()-os.time()) ---~ os.sleep(1.234) ---~ print (">>",os.runtime()) ---~ print(os.date("%H:%M:%S",os.gettimeofday())) ---~ print(os.date("%H:%M:%S",os.time())) - --- no need for function anymore as we have more clever code and helpers now --- this metatable trickery might as well disappear - -os.resolvers = os.resolvers or { } -- will become private - -local resolvers = os.resolvers - -setmetatable(os, { __index = function(t,k) - local r = resolvers[k] - return r and r(t,k) or nil -- no memoize -end }) - --- we can use HOSTTYPE on some platforms - -local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or "" - -local function guess() - local architecture = os.resultof("uname -m") or "" - if architecture ~= "" then - return architecture - end - architecture = os.getenv("HOSTTYPE") or "" - if architecture ~= "" then - return architecture - end - return os.resultof("echo $HOSTTYPE") or "" -end - -if platform ~= "" then - - os.platform = platform - -elseif os.type == "windows" then - - -- we could set the variable directly, no function needed here - - function os.resolvers.platform(t,k) - local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or "" - if find(architecture,"AMD64") then - platform = "mswin-64" - else - platform = "mswin" - end - os.setenv("MTX_PLATFORM",platform) - os.platform = platform - return platform - end - -elseif name == "linux" then - - function os.resolvers.platform(t,k) - -- we sometimes have HOSTTYPE set so let's check that first - local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" - if find(architecture,"x86_64") then - platform = "linux-64" - elseif find(architecture,"ppc") then - platform = "linux-ppc" - else - platform = "linux" - end - os.setenv("MTX_PLATFORM",platform) - os.platform = platform - return platform - end - -elseif name == "macosx" then - - --[[ - Identifying the architecture of OSX is quite a mess and this - is the best we can come up with. For some reason $HOSTTYPE is - a kind of pseudo environment variable, not known to the current - environment. And yes, uname cannot be trusted either, so there - is a change that you end up with a 32 bit run on a 64 bit system. - Also, some proper 64 bit intel macs are too cheap (low-end) and - therefore not permitted to run the 64 bit kernel. - ]]-- - - function os.resolvers.platform(t,k) - -- local platform, architecture = "", os.getenv("HOSTTYPE") or "" - -- if architecture == "" then - -- architecture = os.resultof("echo $HOSTTYPE") or "" - -- end - local platform, architecture = "", os.resultof("echo $HOSTTYPE") or "" - if architecture == "" then - -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n") - platform = "osx-intel" - elseif find(architecture,"i386") then - platform = "osx-intel" - elseif find(architecture,"x86_64") then - platform = "osx-64" - else - platform = "osx-ppc" - end - os.setenv("MTX_PLATFORM",platform) - os.platform = platform - return platform - end - -elseif name == "sunos" then - - function os.resolvers.platform(t,k) - local platform, architecture = "", os.resultof("uname -m") or "" - if find(architecture,"sparc") then - platform = "solaris-sparc" - else -- if architecture == 'i86pc' - platform = "solaris-intel" - end - os.setenv("MTX_PLATFORM",platform) - os.platform = platform - return platform - end - -elseif name == "freebsd" then - - function os.resolvers.platform(t,k) - local platform, architecture = "", os.resultof("uname -m") or "" - if find(architecture,"amd64") then - platform = "freebsd-amd64" - else - platform = "freebsd" - end - os.setenv("MTX_PLATFORM",platform) - os.platform = platform - return platform - end - -elseif name == "kfreebsd" then - - function os.resolvers.platform(t,k) - -- we sometimes have HOSTTYPE set so let's check that first - local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" - if find(architecture,"x86_64") then - platform = "kfreebsd-amd64" - else - platform = "kfreebsd-i386" - end - os.setenv("MTX_PLATFORM",platform) - os.platform = platform - return platform - end - -else - - -- platform = "linux" - -- os.setenv("MTX_PLATFORM",platform) - -- os.platform = platform - - function os.resolvers.platform(t,k) - local platform = "linux" - os.setenv("MTX_PLATFORM",platform) - os.platform = platform - return platform - end - -end - --- beware, we set the randomseed - --- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the --- version number as well as two reserved bits. All other bits are set using a random or pseudorandom --- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal --- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479. --- --- as we don't call this function too often there is not so much risk on repetition - -local t = { 8, 9, "a", "b" } - -function os.uuid() - return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x", - random(0xFFFF),random(0xFFFF), - random(0x0FFF), - t[ceil(random(4))] or 8,random(0x0FFF), - random(0xFFFF), - random(0xFFFF),random(0xFFFF),random(0xFFFF) - ) -end - -local d - -function os.timezone(delta) - d = d or tonumber(tonumber(date("%H")-date("!%H"))) - if delta then - if d > 0 then - return format("+%02i:00",d) - else - return format("-%02i:00",-d) - end - else - return 1 - end -end - -local timeformat = format("%%s%s",os.timezone(true)) -local dateformat = "!%Y-%m-%d %H:%M:%S" - -function os.fulltime(t,default) - t = tonumber(t) or 0 - if t > 0 then - -- valid time - elseif default then - return default - else - t = nil - end - return format(timeformat,date(dateformat,t)) -end - -local dateformat = "%Y-%m-%d %H:%M:%S" - -function os.localtime(t,default) - t = tonumber(t) or 0 - if t > 0 then - -- valid time - elseif default then - return default - else - t = nil - end - return date(dateformat,t) -end - -function os.converttime(t,default) - local t = tonumber(t) - if t and t > 0 then - return date(dateformat,t) - else - return default or "-" - end -end - -local memory = { } - -local function which(filename) - local fullname = memory[filename] - if fullname == nil then - local suffix = file.suffix(filename) - local suffixes = suffix == "" and os.binsuffixes or { suffix } - for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do - local df = file.join(directory,filename) - for i=1,#suffixes do - local dfs = file.addsuffix(df,suffixes[i]) - if io.exists(dfs) then - fullname = dfs - break - end - end - end - if not fullname then - fullname = false - end - memory[filename] = fullname - end - return fullname -end - -os.which = which -os.where = which - -function os.today() - return date("!*t") -- table with values -end - -function os.now() - return date("!%Y-%m-%d %H:%M:%S") -- 2011-12-04 14:59:12 -end - --- if not os.sleep and socket then --- os.sleep = socket.sleep --- end - -if not os.sleep then - local socket = socket - function os.sleep(n) - if not socket then - -- so we delay ... if os.sleep is really needed then one should also - -- be sure that socket can be found - socket = require("socket") - end - socket.sleep(n) - end -end - --- print(os.which("inkscape.exe")) --- print(os.which("inkscape")) --- print(os.which("gs.exe")) --- print(os.which("ps2pdf")) +if not modules then modules = { } end modules ['l-os'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This file deals with some operating system issues. Please don't bother me +-- with the pros and cons of operating systems as they all have their flaws +-- and benefits. Bashing one of them won't help solving problems and fixing +-- bugs faster and is a waste of time and energy. +-- +-- path separators: / or \ ... we can use / everywhere +-- suffixes : dll so exe ... no big deal +-- quotes : we can use "" in most cases +-- expansion : unless "" are used * might give side effects +-- piping/threads : somewhat different for each os +-- locations : specific user file locations and settings can change over time +-- +-- os.type : windows | unix (new, we already guessed os.platform) +-- os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) +-- os.platform : extended os.name with architecture + +-- os.sleep() => socket.sleep() +-- math.randomseed(tonumber(string.sub(string.reverse(tostring(math.floor(socket.gettime()*10000))),1,6))) + +-- maybe build io.flush in os.execute + +local os = os +local date, time = os.date, os.time +local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch +local concat = table.concat +local random, ceil, randomseed = math.random, math.ceil, math.randomseed +local rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring = rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring + +-- The following code permits traversing the environment table, at least +-- in luatex. Internally all environment names are uppercase. + +-- The randomseed in Lua is not that random, although this depends on the operating system as well +-- as the binary (Luatex is normally okay). But to be sure we set the seed anyway. + +math.initialseed = tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6)) + +randomseed(math.initialseed) + +if not os.__getenv__ then + + os.__getenv__ = os.getenv + os.__setenv__ = os.setenv + + if os.env then + + local osgetenv = os.getenv + local ossetenv = os.setenv + local osenv = os.env local _ = osenv.PATH -- initialize the table + + function os.setenv(k,v) + if v == nil then + v = "" + end + local K = upper(k) + osenv[K] = v + if type(v) == "table" then + v = concat(v,";") -- path + end + ossetenv(K,v) + end + + function os.getenv(k) + local K = upper(k) + local v = osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k) + if v == "" then + return nil + else + return v + end + end + + else + + local ossetenv = os.setenv + local osgetenv = os.getenv + local osenv = { } + + function os.setenv(k,v) + if v == nil then + v = "" + end + local K = upper(k) + osenv[K] = v + end + + function os.getenv(k) + local K = upper(k) + local v = osenv[K] or osgetenv(K) or osgetenv(k) + if v == "" then + return nil + else + return v + end + end + + local function __index(t,k) + return os.getenv(k) + end + local function __newindex(t,k,v) + os.setenv(k,v) + end + + os.env = { } + + setmetatable(os.env, { __index = __index, __newindex = __newindex } ) + + end + +end + +-- end of environment hack + +local execute, spawn, exec, iopopen, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.popen, io.flush + +function os.execute(...) ioflush() return execute(...) end +function os.spawn (...) ioflush() return spawn (...) end +function os.exec (...) ioflush() return exec (...) end +function io.popen (...) ioflush() return iopopen(...) end + +function os.resultof(command) + local handle = io.popen(command,"r") + return handle and handle:read("*all") or "" +end + +if not io.fileseparator then + if find(os.getenv("PATH"),";") then + io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin" + else + io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix" + end +end + +os.type = os.type or (io.pathseparator == ";" and "windows") or "unix" +os.name = os.name or (os.type == "windows" and "mswin" ) or "linux" + +if os.type == "windows" then + os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' } +else + os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' } +end + +local launchers = { + windows = "start %s", + macosx = "open %s", + unix = "$BROWSER %s &> /dev/null &", +} + +function os.launch(str) + os.execute(format(launchers[os.name] or launchers.unix,str)) +end + +if not os.times then -- ? + -- utime = user time + -- stime = system time + -- cutime = children user time + -- cstime = children system time + function os.times() + return { + utime = os.gettimeofday(), -- user + stime = 0, -- system + cutime = 0, -- children user + cstime = 0, -- children system + } + end +end + +os.gettimeofday = os.gettimeofday or os.clock + +local startuptime = os.gettimeofday() + +function os.runtime() + return os.gettimeofday() - startuptime +end + +--~ print(os.gettimeofday()-os.time()) +--~ os.sleep(1.234) +--~ print (">>",os.runtime()) +--~ print(os.date("%H:%M:%S",os.gettimeofday())) +--~ print(os.date("%H:%M:%S",os.time())) + +-- no need for function anymore as we have more clever code and helpers now +-- this metatable trickery might as well disappear + +os.resolvers = os.resolvers or { } -- will become private + +local resolvers = os.resolvers + +setmetatable(os, { __index = function(t,k) + local r = resolvers[k] + return r and r(t,k) or nil -- no memoize +end }) + +-- we can use HOSTTYPE on some platforms + +local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or "" + +local function guess() + local architecture = os.resultof("uname -m") or "" + if architecture ~= "" then + return architecture + end + architecture = os.getenv("HOSTTYPE") or "" + if architecture ~= "" then + return architecture + end + return os.resultof("echo $HOSTTYPE") or "" +end + +if platform ~= "" then + + os.platform = platform + +elseif os.type == "windows" then + + -- we could set the variable directly, no function needed here + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or "" + if find(architecture,"AMD64") then + platform = "mswin-64" + else + platform = "mswin" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "linux" then + + function os.resolvers.platform(t,k) + -- we sometimes have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "linux-64" + elseif find(architecture,"ppc") then + platform = "linux-ppc" + else + platform = "linux" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "macosx" then + + --[[ + Identifying the architecture of OSX is quite a mess and this + is the best we can come up with. For some reason $HOSTTYPE is + a kind of pseudo environment variable, not known to the current + environment. And yes, uname cannot be trusted either, so there + is a change that you end up with a 32 bit run on a 64 bit system. + Also, some proper 64 bit intel macs are too cheap (low-end) and + therefore not permitted to run the 64 bit kernel. + ]]-- + + function os.resolvers.platform(t,k) + -- local platform, architecture = "", os.getenv("HOSTTYPE") or "" + -- if architecture == "" then + -- architecture = os.resultof("echo $HOSTTYPE") or "" + -- end + local platform, architecture = "", os.resultof("echo $HOSTTYPE") or "" + if architecture == "" then + -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n") + platform = "osx-intel" + elseif find(architecture,"i386") then + platform = "osx-intel" + elseif find(architecture,"x86_64") then + platform = "osx-64" + else + platform = "osx-ppc" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "sunos" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"sparc") then + platform = "solaris-sparc" + else -- if architecture == 'i86pc' + platform = "solaris-intel" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "freebsd" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"amd64") then + platform = "freebsd-amd64" + else + platform = "freebsd" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "kfreebsd" then + + function os.resolvers.platform(t,k) + -- we sometimes have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "kfreebsd-amd64" + else + platform = "kfreebsd-i386" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +else + + -- platform = "linux" + -- os.setenv("MTX_PLATFORM",platform) + -- os.platform = platform + + function os.resolvers.platform(t,k) + local platform = "linux" + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +end + +-- beware, we set the randomseed + +-- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the +-- version number as well as two reserved bits. All other bits are set using a random or pseudorandom +-- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal +-- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479. +-- +-- as we don't call this function too often there is not so much risk on repetition + +local t = { 8, 9, "a", "b" } + +function os.uuid() + return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x", + random(0xFFFF),random(0xFFFF), + random(0x0FFF), + t[ceil(random(4))] or 8,random(0x0FFF), + random(0xFFFF), + random(0xFFFF),random(0xFFFF),random(0xFFFF) + ) +end + +local d + +function os.timezone(delta) + d = d or tonumber(tonumber(date("%H")-date("!%H"))) + if delta then + if d > 0 then + return format("+%02i:00",d) + else + return format("-%02i:00",-d) + end + else + return 1 + end +end + +local timeformat = format("%%s%s",os.timezone(true)) +local dateformat = "!%Y-%m-%d %H:%M:%S" + +function os.fulltime(t,default) + t = tonumber(t) or 0 + if t > 0 then + -- valid time + elseif default then + return default + else + t = nil + end + return format(timeformat,date(dateformat,t)) +end + +local dateformat = "%Y-%m-%d %H:%M:%S" + +function os.localtime(t,default) + t = tonumber(t) or 0 + if t > 0 then + -- valid time + elseif default then + return default + else + t = nil + end + return date(dateformat,t) +end + +function os.converttime(t,default) + local t = tonumber(t) + if t and t > 0 then + return date(dateformat,t) + else + return default or "-" + end +end + +local memory = { } + +local function which(filename) + local fullname = memory[filename] + if fullname == nil then + local suffix = file.suffix(filename) + local suffixes = suffix == "" and os.binsuffixes or { suffix } + for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do + local df = file.join(directory,filename) + for i=1,#suffixes do + local dfs = file.addsuffix(df,suffixes[i]) + if io.exists(dfs) then + fullname = dfs + break + end + end + end + if not fullname then + fullname = false + end + memory[filename] = fullname + end + return fullname +end + +os.which = which +os.where = which + +function os.today() + return date("!*t") -- table with values +end + +function os.now() + return date("!%Y-%m-%d %H:%M:%S") -- 2011-12-04 14:59:12 +end + +-- if not os.sleep and socket then +-- os.sleep = socket.sleep +-- end + +if not os.sleep then + local socket = socket + function os.sleep(n) + if not socket then + -- so we delay ... if os.sleep is really needed then one should also + -- be sure that socket can be found + socket = require("socket") + end + socket.sleep(n) + end +end + +-- print(os.which("inkscape.exe")) +-- print(os.which("inkscape")) +-- print(os.which("gs.exe")) +-- print(os.which("ps2pdf")) diff --git a/tex/context/base/l-package.lua b/tex/context/base/l-package.lua index 579fd3941..51da9f25d 100644 --- a/tex/context/base/l-package.lua +++ b/tex/context/base/l-package.lua @@ -1,340 +1,340 @@ -if not modules then modules = { } end modules ['l-package'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- Code moved from data-lua and changed into a plug-in. - --- We overload the regular loader. We do so because we operate mostly in --- tds and use our own loader code. Alternatively we could use a more --- extensive definition of package.path and package.cpath but even then --- we're not done. Also, we now have better tracing. --- --- -- local mylib = require("libtest") --- -- local mysql = require("luasql.mysql") - -local type = type -local gsub, format = string.gsub, string.format - -local P, S, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.Cs, lpeg.match - -local package = package -local searchers = package.searchers or package.loaders - --- dummies - -local filejoin = file and file.join or function(path,name) return path .. "/" .. name end -local isreadable = file and file.is_readable or function(name) local f = io.open(name) if f then f:close() return true end end -local addsuffix = file and file.addsuffix or function(name,suffix) return name .. "." .. suffix end - --- local separator, concatinator, placeholder, pathofexecutable, ignorebefore = string.match(package.config,"(.-)\n(.-)\n(.-)\n(.-)\n(.-)\n") --- --- local config = { --- separator = separator, -- \ or / --- concatinator = concatinator, -- ; --- placeholder = placeholder, -- ? becomes name --- pathofexecutable = pathofexecutable, -- ! becomes executables dir (on windows) --- ignorebefore = ignorebefore, -- - remove all before this when making lua_open --- } - -local function cleanpath(path) -- hm, don't we have a helper for this? - return path -end - -local pattern = Cs((((1-S("\\/"))^0 * (S("\\/")^1/"/"))^0 * (P(".")^1/"/"+P(1))^1) * -1) - -local function lualibfile(name) - return lpegmatch(pattern,name) or name -end - -local offset = luarocks and 1 or 0 -- todo: also check other extras - -local helpers = package.helpers or { - cleanpath = cleanpath, - lualibfile = lualibfile, - trace = false, - report = function(...) print(format(...)) end, - builtin = { - ["preload table"] = searchers[1+offset], -- special case, built-in libs - ["path specification"] = searchers[2+offset], - ["cpath specification"] = searchers[3+offset], - ["all in one fallback"] = searchers[4+offset], -- special case, combined libs - }, - methods = { - }, - sequence = { - "already loaded", - "preload table", - "lua extra list", - "lib extra list", - "path specification", - "cpath specification", - "all in one fallback", - "not loaded", - } -} - -package.helpers = helpers - -local methods = helpers.methods -local builtin = helpers.builtin - --- extra tds/ctx paths ... a bit of overhead for efficient tracing - -local extraluapaths = { } -local extralibpaths = { } -local luapaths = nil -- delayed -local libpaths = nil -- delayed -local oldluapath = nil -local oldlibpath = nil - -local nofextralua = -1 -local nofextralib = -1 -local nofpathlua = -1 -local nofpathlib = -1 - -local function listpaths(what,paths) - local nofpaths = #paths - if nofpaths > 0 then - for i=1,nofpaths do - helpers.report("using %s path %i: %s",what,i,paths[i]) - end - else - helpers.report("no %s paths defined",what) - end - return nofpaths -end - -local function getextraluapaths() - if helpers.trace and #extraluapaths ~= nofextralua then - nofextralua = listpaths("extra lua",extraluapaths) - end - return extraluapaths -end - -local function getextralibpaths() - if helpers.trace and #extralibpaths ~= nofextralib then - nofextralib = listpaths("extra lib",extralibpaths) - end - return extralibpaths -end - -local function getluapaths() - local luapath = package.path or "" - if oldluapath ~= luapath then - luapaths = file.splitpath(luapath,";") - oldluapath = luapath - nofpathlua = -1 - end - if helpers.trace and #luapaths ~= nofpathlua then - nofpathlua = listpaths("builtin lua",luapaths) - end - return luapaths -end - -local function getlibpaths() - local libpath = package.cpath or "" - if oldlibpath ~= libpath then - libpaths = file.splitpath(libpath,";") - oldlibpath = libpath - nofpathlib = -1 - end - if helpers.trace and #libpaths ~= nofpathlib then - nofpathlib = listpaths("builtin lib",libpaths) - end - return libpaths -end - -package.luapaths = getluapaths -package.libpaths = getlibpaths -package.extraluapaths = getextraluapaths -package.extralibpaths = getextralibpaths - -local hashes = { - lua = { }, - lib = { }, -} - -local function registerpath(tag,what,target,...) - local pathlist = { ... } - local cleanpath = helpers.cleanpath - local trace = helpers.trace - local report = helpers.report - local hash = hashes[what] - -- - local function add(path) - local path = cleanpath(path) - if not hash[path] then - target[#target+1] = path - hash[path] = true - if trace then - report("registered %s path %s: %s",tag,#target,path) - end - else - if trace then - report("duplicate %s path: %s",tag,path) - end - end - end - -- - for p=1,#pathlist do - local path = pathlist[p] - if type(path) == "table" then - for i=1,#path do - add(path[i]) - end - else - add(path) - end - end - return paths -end - -helpers.registerpath = registerpath - -function package.extraluapath(...) - registerpath("extra lua","lua",extraluapaths,...) -end - -function package.extralibpath(...) - registerpath("extra lib","lib",extralibpaths,...) -end - --- lib loader (used elsewhere) - -local function loadedaslib(resolved,rawname) -- todo: strip all before first - - local base = gsub(rawname,"%.","_") - -- so, we can do a require("foo/bar") and initialize bar - -- local base = gsub(file.basename(rawname),"%.","_") - local init = "luaopen_" .. gsub(base,"%.","_") - if helpers.trace then - helpers.report("calling loadlib with '%s' with init '%s'",resolved,init) - end - return package.loadlib(resolved,init) -end - -helpers.loadedaslib = loadedaslib - --- wrapped and new loaders - -local function loadedbypath(name,rawname,paths,islib,what) - local trace = helpers.trace - for p=1,#paths do - local path = paths[p] - local resolved = filejoin(path,name) - if trace then - helpers.report("%s path, identifying '%s' on '%s'",what,name,path) - end - if isreadable(resolved) then - if trace then - helpers.report("%s path, '%s' found on '%s'",what,name,resolved) - end - if islib then - return loadedaslib(resolved,rawname) - else - return loadfile(resolved) - end - end - end -end - -helpers.loadedbypath = loadedbypath - -methods["already loaded"] = function(name) - return package.loaded[name] -end - -methods["preload table"] = function(name) - return builtin["preload table"](name) -end - -methods["lua extra list"] = function(name) - return loadedbypath(addsuffix(lualibfile(name),"lua" ),name,getextraluapaths(),false,"lua") -end - -methods["lib extra list"] = function(name) - return loadedbypath(addsuffix(lualibfile(name),os.libsuffix),name,getextralibpaths(),true, "lib") -end - -methods["path specification"] = function(name) - getluapaths() -- triggers list building and tracing - return builtin["path specification"](name) -end - -methods["cpath specification"] = function(name) - getlibpaths() -- triggers list building and tracing - return builtin["cpath specification"](name) -end - -methods["all in one fallback"] = function(name) - return builtin["all in one fallback"](name) -end - -methods["not loaded"] = function(name) - if helpers.trace then - helpers.report("unable to locate '%s'",name or "?") - end - return nil -end - -local level = 0 -local used = { } - -helpers.traceused = false - -function helpers.loaded(name) - local sequence = helpers.sequence - level = level + 1 - for i=1,#sequence do - local method = sequence[i] - if helpers.trace then - helpers.report("%s, level '%s', method '%s', name '%s'","locating",level,method,name) - end - local result, rest = methods[method](name) - if type(result) == "function" then - if helpers.trace then - helpers.report("%s, level '%s', method '%s', name '%s'","found",level,method,name) - end - if helpers.traceused then - used[#used+1] = { level = level, name = name } - end - level = level - 1 - return result, rest - end - end - -- safeguard, we never come here - level = level - 1 - return nil -end - -function helpers.showused() - local n = #used - if n > 0 then - helpers.report("%s libraries loaded:",n) - helpers.report() - for i=1,n do - local u = used[i] - helpers.report("%i %a",u.level,u.name) - end - helpers.report() - end -end - -function helpers.unload(name) - if helpers.trace then - if package.loaded[name] then - helpers.report("unloading, name '%s', %s",name,"done") - else - helpers.report("unloading, name '%s', %s",name,"not loaded") - end - end - package.loaded[name] = nil -end - --- overloading require does not work out well so we need to push it in --- front .. - -table.insert(searchers,1,helpers.loaded) +if not modules then modules = { } end modules ['l-package'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Code moved from data-lua and changed into a plug-in. + +-- We overload the regular loader. We do so because we operate mostly in +-- tds and use our own loader code. Alternatively we could use a more +-- extensive definition of package.path and package.cpath but even then +-- we're not done. Also, we now have better tracing. +-- +-- -- local mylib = require("libtest") +-- -- local mysql = require("luasql.mysql") + +local type = type +local gsub, format = string.gsub, string.format + +local P, S, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.Cs, lpeg.match + +local package = package +local searchers = package.searchers or package.loaders + +-- dummies + +local filejoin = file and file.join or function(path,name) return path .. "/" .. name end +local isreadable = file and file.is_readable or function(name) local f = io.open(name) if f then f:close() return true end end +local addsuffix = file and file.addsuffix or function(name,suffix) return name .. "." .. suffix end + +-- local separator, concatinator, placeholder, pathofexecutable, ignorebefore = string.match(package.config,"(.-)\n(.-)\n(.-)\n(.-)\n(.-)\n") +-- +-- local config = { +-- separator = separator, -- \ or / +-- concatinator = concatinator, -- ; +-- placeholder = placeholder, -- ? becomes name +-- pathofexecutable = pathofexecutable, -- ! becomes executables dir (on windows) +-- ignorebefore = ignorebefore, -- - remove all before this when making lua_open +-- } + +local function cleanpath(path) -- hm, don't we have a helper for this? + return path +end + +local pattern = Cs((((1-S("\\/"))^0 * (S("\\/")^1/"/"))^0 * (P(".")^1/"/"+P(1))^1) * -1) + +local function lualibfile(name) + return lpegmatch(pattern,name) or name +end + +local offset = luarocks and 1 or 0 -- todo: also check other extras + +local helpers = package.helpers or { + cleanpath = cleanpath, + lualibfile = lualibfile, + trace = false, + report = function(...) print(format(...)) end, + builtin = { + ["preload table"] = searchers[1+offset], -- special case, built-in libs + ["path specification"] = searchers[2+offset], + ["cpath specification"] = searchers[3+offset], + ["all in one fallback"] = searchers[4+offset], -- special case, combined libs + }, + methods = { + }, + sequence = { + "already loaded", + "preload table", + "lua extra list", + "lib extra list", + "path specification", + "cpath specification", + "all in one fallback", + "not loaded", + } +} + +package.helpers = helpers + +local methods = helpers.methods +local builtin = helpers.builtin + +-- extra tds/ctx paths ... a bit of overhead for efficient tracing + +local extraluapaths = { } +local extralibpaths = { } +local luapaths = nil -- delayed +local libpaths = nil -- delayed +local oldluapath = nil +local oldlibpath = nil + +local nofextralua = -1 +local nofextralib = -1 +local nofpathlua = -1 +local nofpathlib = -1 + +local function listpaths(what,paths) + local nofpaths = #paths + if nofpaths > 0 then + for i=1,nofpaths do + helpers.report("using %s path %i: %s",what,i,paths[i]) + end + else + helpers.report("no %s paths defined",what) + end + return nofpaths +end + +local function getextraluapaths() + if helpers.trace and #extraluapaths ~= nofextralua then + nofextralua = listpaths("extra lua",extraluapaths) + end + return extraluapaths +end + +local function getextralibpaths() + if helpers.trace and #extralibpaths ~= nofextralib then + nofextralib = listpaths("extra lib",extralibpaths) + end + return extralibpaths +end + +local function getluapaths() + local luapath = package.path or "" + if oldluapath ~= luapath then + luapaths = file.splitpath(luapath,";") + oldluapath = luapath + nofpathlua = -1 + end + if helpers.trace and #luapaths ~= nofpathlua then + nofpathlua = listpaths("builtin lua",luapaths) + end + return luapaths +end + +local function getlibpaths() + local libpath = package.cpath or "" + if oldlibpath ~= libpath then + libpaths = file.splitpath(libpath,";") + oldlibpath = libpath + nofpathlib = -1 + end + if helpers.trace and #libpaths ~= nofpathlib then + nofpathlib = listpaths("builtin lib",libpaths) + end + return libpaths +end + +package.luapaths = getluapaths +package.libpaths = getlibpaths +package.extraluapaths = getextraluapaths +package.extralibpaths = getextralibpaths + +local hashes = { + lua = { }, + lib = { }, +} + +local function registerpath(tag,what,target,...) + local pathlist = { ... } + local cleanpath = helpers.cleanpath + local trace = helpers.trace + local report = helpers.report + local hash = hashes[what] + -- + local function add(path) + local path = cleanpath(path) + if not hash[path] then + target[#target+1] = path + hash[path] = true + if trace then + report("registered %s path %s: %s",tag,#target,path) + end + else + if trace then + report("duplicate %s path: %s",tag,path) + end + end + end + -- + for p=1,#pathlist do + local path = pathlist[p] + if type(path) == "table" then + for i=1,#path do + add(path[i]) + end + else + add(path) + end + end + return paths +end + +helpers.registerpath = registerpath + +function package.extraluapath(...) + registerpath("extra lua","lua",extraluapaths,...) +end + +function package.extralibpath(...) + registerpath("extra lib","lib",extralibpaths,...) +end + +-- lib loader (used elsewhere) + +local function loadedaslib(resolved,rawname) -- todo: strip all before first - + local base = gsub(rawname,"%.","_") + -- so, we can do a require("foo/bar") and initialize bar + -- local base = gsub(file.basename(rawname),"%.","_") + local init = "luaopen_" .. gsub(base,"%.","_") + if helpers.trace then + helpers.report("calling loadlib with '%s' with init '%s'",resolved,init) + end + return package.loadlib(resolved,init) +end + +helpers.loadedaslib = loadedaslib + +-- wrapped and new loaders + +local function loadedbypath(name,rawname,paths,islib,what) + local trace = helpers.trace + for p=1,#paths do + local path = paths[p] + local resolved = filejoin(path,name) + if trace then + helpers.report("%s path, identifying '%s' on '%s'",what,name,path) + end + if isreadable(resolved) then + if trace then + helpers.report("%s path, '%s' found on '%s'",what,name,resolved) + end + if islib then + return loadedaslib(resolved,rawname) + else + return loadfile(resolved) + end + end + end +end + +helpers.loadedbypath = loadedbypath + +methods["already loaded"] = function(name) + return package.loaded[name] +end + +methods["preload table"] = function(name) + return builtin["preload table"](name) +end + +methods["lua extra list"] = function(name) + return loadedbypath(addsuffix(lualibfile(name),"lua" ),name,getextraluapaths(),false,"lua") +end + +methods["lib extra list"] = function(name) + return loadedbypath(addsuffix(lualibfile(name),os.libsuffix),name,getextralibpaths(),true, "lib") +end + +methods["path specification"] = function(name) + getluapaths() -- triggers list building and tracing + return builtin["path specification"](name) +end + +methods["cpath specification"] = function(name) + getlibpaths() -- triggers list building and tracing + return builtin["cpath specification"](name) +end + +methods["all in one fallback"] = function(name) + return builtin["all in one fallback"](name) +end + +methods["not loaded"] = function(name) + if helpers.trace then + helpers.report("unable to locate '%s'",name or "?") + end + return nil +end + +local level = 0 +local used = { } + +helpers.traceused = false + +function helpers.loaded(name) + local sequence = helpers.sequence + level = level + 1 + for i=1,#sequence do + local method = sequence[i] + if helpers.trace then + helpers.report("%s, level '%s', method '%s', name '%s'","locating",level,method,name) + end + local result, rest = methods[method](name) + if type(result) == "function" then + if helpers.trace then + helpers.report("%s, level '%s', method '%s', name '%s'","found",level,method,name) + end + if helpers.traceused then + used[#used+1] = { level = level, name = name } + end + level = level - 1 + return result, rest + end + end + -- safeguard, we never come here + level = level - 1 + return nil +end + +function helpers.showused() + local n = #used + if n > 0 then + helpers.report("%s libraries loaded:",n) + helpers.report() + for i=1,n do + local u = used[i] + helpers.report("%i %a",u.level,u.name) + end + helpers.report() + end +end + +function helpers.unload(name) + if helpers.trace then + if package.loaded[name] then + helpers.report("unloading, name '%s', %s",name,"done") + else + helpers.report("unloading, name '%s', %s",name,"not loaded") + end + end + package.loaded[name] = nil +end + +-- overloading require does not work out well so we need to push it in +-- front .. + +table.insert(searchers,1,helpers.loaded) diff --git a/tex/context/base/l-pdfview.lua b/tex/context/base/l-pdfview.lua index 80033900f..643d538e7 100644 --- a/tex/context/base/l-pdfview.lua +++ b/tex/context/base/l-pdfview.lua @@ -1,143 +1,143 @@ -if not modules then modules = { } end modules ['l-pdfview'] = { - version = 1.001, - comment = "companion to mtx-context.lua", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- Todo: figure out pdfopen/pdfclose on linux. Calling e.g. okular directly --- doesn't work in linux when issued from scite as it blocks the editor (no --- & possible or so). Unfortunately pdfopen keeps changing with not keeping --- downward compatibility (command line arguments and so). - --- no 2>&1 any more, needs checking on windows - -local format, concat = string.format, table.concat - -pdfview = pdfview or { } - -local opencalls, closecalls, allcalls, runner - -if os.type == "windows" then - - opencalls = { - ['default'] = "pdfopen --rxi --file", - ['acrobat'] = "pdfopen --rxi --file", - ['fullacrobat'] = "pdfopen --axi --file", - ['okular'] = 'start "test" "c:/data/system/kde/bin/okular.exe" --unique', -- todo! - ['sumatra'] = 'start "test" "c:/data/system/sumatrapdf/sumatrapdf.exe" -reuse-instance', - ['okular'] = 'start "test" "okular.exe" --unique', - ['sumatra'] = 'start "test" "sumatrapdf.exe" -reuse-instance -bg-color 0xCCCCCC', - } - closecalls= { - ['default'] = "pdfclose --file", - ['acrobat'] = "pdfclose --file", - ['okular'] = false, - ['sumatra'] = false, - } - allcalls = { - ['default'] = "pdfclose --all", - ['acrobat'] = "pdfclose --all", - ['okular'] = false, - ['sumatra'] = false, - } - - pdfview.method = "acrobat" - - runner = function(...) --- os.spawn(...) - os.execute(...) - end - -else - - opencalls = { - ['default'] = "pdfopen", -- we could pass the default here - ['okular'] = 'okular --unique' - } - closecalls= { - ['default'] = "pdfclose --file", - ['okular'] = false, - } - allcalls = { - ['default'] = "pdfclose --all", - ['okular'] = false, - } - - pdfview.method = "okular" - - runner = function(...) - os.spawn(...) - end - -end - -directives.register("pdfview.method", function(v) - pdfview.method = (opencalls[v] and v) or 'default' -end) - -function pdfview.setmethod(method) - if method and opencalls[method] then - pdfview.method = method - end -end - -function pdfview.methods() - return concat(table.sortedkeys(opencalls), " ") -end - -function pdfview.status() - return format("pdfview methods: %s, current method: %s (directives_pdfview_method)",pdfview.methods(),tostring(pdfview.method)) -end - --- local openedfiles = { } - -local function fullname(name) - return file.addsuffix(name,"pdf") -end - -function pdfview.open(...) - local opencall = opencalls[pdfview.method] - if opencall then - local t = { ... } - for i=1,#t do - local name = fullname(t[i]) - if io.exists(name) then - runner(format('%s "%s"', opencall, name)) - -- openedfiles[name] = true - end - end - end -end - -function pdfview.close(...) - local closecall = closecalls[pdfview.method] - if closecall then - local t = { ... } - for i=1,#t do - local name = fullname(t[i]) - -- if openedfiles[name] then - runner(format('%s "%s"', closecall, name)) - -- openedfiles[name] = nil - -- else - -- pdfview.closeall() - -- break - -- end - end - end -end - -function pdfview.closeall() - local allcall = allcalls[pdfview.method] - if allcall then - runner(format('%s', allcall)) - end - -- openedfiles = { } -end - ---~ pdfview.open("t:/document/show-exa.pdf") ---~ os.sleep(3) ---~ pdfview.close("t:/document/show-exa.pdf") - -return pdfview +if not modules then modules = { } end modules ['l-pdfview'] = { + version = 1.001, + comment = "companion to mtx-context.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Todo: figure out pdfopen/pdfclose on linux. Calling e.g. okular directly +-- doesn't work in linux when issued from scite as it blocks the editor (no +-- & possible or so). Unfortunately pdfopen keeps changing with not keeping +-- downward compatibility (command line arguments and so). + +-- no 2>&1 any more, needs checking on windows + +local format, concat = string.format, table.concat + +pdfview = pdfview or { } + +local opencalls, closecalls, allcalls, runner + +if os.type == "windows" then + + opencalls = { + ['default'] = "pdfopen --rxi --file", + ['acrobat'] = "pdfopen --rxi --file", + ['fullacrobat'] = "pdfopen --axi --file", + ['okular'] = 'start "test" "c:/data/system/kde/bin/okular.exe" --unique', -- todo! + ['sumatra'] = 'start "test" "c:/data/system/sumatrapdf/sumatrapdf.exe" -reuse-instance', + ['okular'] = 'start "test" "okular.exe" --unique', + ['sumatra'] = 'start "test" "sumatrapdf.exe" -reuse-instance -bg-color 0xCCCCCC', + } + closecalls= { + ['default'] = "pdfclose --file", + ['acrobat'] = "pdfclose --file", + ['okular'] = false, + ['sumatra'] = false, + } + allcalls = { + ['default'] = "pdfclose --all", + ['acrobat'] = "pdfclose --all", + ['okular'] = false, + ['sumatra'] = false, + } + + pdfview.method = "acrobat" + + runner = function(...) +-- os.spawn(...) + os.execute(...) + end + +else + + opencalls = { + ['default'] = "pdfopen", -- we could pass the default here + ['okular'] = 'okular --unique' + } + closecalls= { + ['default'] = "pdfclose --file", + ['okular'] = false, + } + allcalls = { + ['default'] = "pdfclose --all", + ['okular'] = false, + } + + pdfview.method = "okular" + + runner = function(...) + os.spawn(...) + end + +end + +directives.register("pdfview.method", function(v) + pdfview.method = (opencalls[v] and v) or 'default' +end) + +function pdfview.setmethod(method) + if method and opencalls[method] then + pdfview.method = method + end +end + +function pdfview.methods() + return concat(table.sortedkeys(opencalls), " ") +end + +function pdfview.status() + return format("pdfview methods: %s, current method: %s (directives_pdfview_method)",pdfview.methods(),tostring(pdfview.method)) +end + +-- local openedfiles = { } + +local function fullname(name) + return file.addsuffix(name,"pdf") +end + +function pdfview.open(...) + local opencall = opencalls[pdfview.method] + if opencall then + local t = { ... } + for i=1,#t do + local name = fullname(t[i]) + if io.exists(name) then + runner(format('%s "%s"', opencall, name)) + -- openedfiles[name] = true + end + end + end +end + +function pdfview.close(...) + local closecall = closecalls[pdfview.method] + if closecall then + local t = { ... } + for i=1,#t do + local name = fullname(t[i]) + -- if openedfiles[name] then + runner(format('%s "%s"', closecall, name)) + -- openedfiles[name] = nil + -- else + -- pdfview.closeall() + -- break + -- end + end + end +end + +function pdfview.closeall() + local allcall = allcalls[pdfview.method] + if allcall then + runner(format('%s', allcall)) + end + -- openedfiles = { } +end + +--~ pdfview.open("t:/document/show-exa.pdf") +--~ os.sleep(3) +--~ pdfview.close("t:/document/show-exa.pdf") + +return pdfview diff --git a/tex/context/base/l-set.lua b/tex/context/base/l-set.lua index 2370f0139..dfaf89284 100644 --- a/tex/context/base/l-set.lua +++ b/tex/context/base/l-set.lua @@ -1,87 +1,87 @@ -if not modules then modules = { } end modules ['l-set'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This will become obsolete when we have the bitset library embedded. - -set = set or { } - -local nums = { } -local tabs = { } -local concat = table.concat -local next, type = next, type - -set.create = table.tohash - -function set.tonumber(t) - if next(t) then - local s = "" - -- we could save mem by sorting, but it slows down - for k, v in next, t do - if v then - -- why bother about the leading space - s = s .. " " .. k - end - end - local n = nums[s] - if not n then - n = #tabs + 1 - tabs[n] = t - nums[s] = n - end - return n - else - return 0 - end -end - -function set.totable(n) - if n == 0 then - return { } - else - return tabs[n] or { } - end -end - -function set.tolist(n) - if n == 0 or not tabs[n] then - return "" - else - local t, n = { }, 0 - for k, v in next, tabs[n] do - if v then - n = n + 1 - t[n] = k - end - end - return concat(t," ") - end -end - -function set.contains(n,s) - if type(n) == "table" then - return n[s] - elseif n == 0 then - return false - else - local t = tabs[n] - return t and t[s] - end -end - ---~ local c = set.create{'aap','noot','mies'} ---~ local s = set.tonumber(c) ---~ local t = set.totable(s) ---~ print(t['aap']) ---~ local c = set.create{'zus','wim','jet'} ---~ local s = set.tonumber(c) ---~ local t = set.totable(s) ---~ print(t['aap']) ---~ print(t['jet']) ---~ print(set.contains(t,'jet')) ---~ print(set.contains(t,'aap')) - +if not modules then modules = { } end modules ['l-set'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This will become obsolete when we have the bitset library embedded. + +set = set or { } + +local nums = { } +local tabs = { } +local concat = table.concat +local next, type = next, type + +set.create = table.tohash + +function set.tonumber(t) + if next(t) then + local s = "" + -- we could save mem by sorting, but it slows down + for k, v in next, t do + if v then + -- why bother about the leading space + s = s .. " " .. k + end + end + local n = nums[s] + if not n then + n = #tabs + 1 + tabs[n] = t + nums[s] = n + end + return n + else + return 0 + end +end + +function set.totable(n) + if n == 0 then + return { } + else + return tabs[n] or { } + end +end + +function set.tolist(n) + if n == 0 or not tabs[n] then + return "" + else + local t, n = { }, 0 + for k, v in next, tabs[n] do + if v then + n = n + 1 + t[n] = k + end + end + return concat(t," ") + end +end + +function set.contains(n,s) + if type(n) == "table" then + return n[s] + elseif n == 0 then + return false + else + local t = tabs[n] + return t and t[s] + end +end + +--~ local c = set.create{'aap','noot','mies'} +--~ local s = set.tonumber(c) +--~ local t = set.totable(s) +--~ print(t['aap']) +--~ local c = set.create{'zus','wim','jet'} +--~ local s = set.tonumber(c) +--~ local t = set.totable(s) +--~ print(t['aap']) +--~ print(t['jet']) +--~ print(set.contains(t,'jet')) +--~ print(set.contains(t,'aap')) + diff --git a/tex/context/base/l-string.lua b/tex/context/base/l-string.lua index 77c076cc5..c87c57521 100644 --- a/tex/context/base/l-string.lua +++ b/tex/context/base/l-string.lua @@ -1,205 +1,205 @@ -if not modules then modules = { } end modules ['l-string'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local string = string -local sub, gmatch, format, char, byte, rep, lower = string.sub, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower -local lpegmatch, patterns = lpeg.match, lpeg.patterns -local P, S, C, Ct, Cc, Cs = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.Cs - --- Some functions are already defined in l-lpeg and maybe some from here will --- move there (unless we also expose caches). - --- if not string.split then --- --- function string.split(str,pattern) --- local t = { } --- if #str > 0 then --- local n = 1 --- for s in gmatch(str..pattern,"(.-)"..pattern) do --- t[n] = s --- n = n + 1 --- end --- end --- return t --- end --- --- end - --- function string.unquoted(str) --- return (gsub(str,"^([\"\'])(.*)%1$","%2")) -- interesting pattern --- end - -local unquoted = patterns.squote * C(patterns.nosquote) * patterns.squote - + patterns.dquote * C(patterns.nodquote) * patterns.dquote - -function string.unquoted(str) - return lpegmatch(unquoted,str) or str -end - --- print(string.unquoted("test")) --- print(string.unquoted([["t\"est"]])) --- print(string.unquoted([["t\"est"x]])) --- print(string.unquoted("\'test\'")) --- print(string.unquoted('"test"')) --- print(string.unquoted('"test"')) - -function string.quoted(str) - return format("%q",str) -- always double quote -end - -function string.count(str,pattern) -- variant 3 - local n = 0 - for _ in gmatch(str,pattern) do -- not for utf - n = n + 1 - end - return n -end - -function string.limit(str,n,sentinel) -- not utf proof - if #str > n then - sentinel = sentinel or "..." - return sub(str,1,(n-#sentinel)) .. sentinel - else - return str - end -end - -local stripper = patterns.stripper -local collapser = patterns.collapser -local longtostring = patterns.longtostring - -function string.strip(str) - return lpegmatch(stripper,str) or "" -end - -function string.collapsespaces(str) - return lpegmatch(collapser,str) or "" -end - -function string.longtostring(str) - return lpegmatch(longtostring,str) or "" -end - --- function string.is_empty(str) --- return not find(str,"%S") --- end - -local pattern = P(" ")^0 * P(-1) - -function string.is_empty(str) - if str == "" then - return true - else - return lpegmatch(pattern,str) and true or false - end -end - --- if not string.escapedpattern then --- --- local patterns_escapes = { --- ["%"] = "%%", --- ["."] = "%.", --- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*", --- ["["] = "%[", ["]"] = "%]", --- ["("] = "%(", [")"] = "%)", --- -- ["{"] = "%{", ["}"] = "%}" --- -- ["^"] = "%^", ["$"] = "%$", --- } --- --- local simple_escapes = { --- ["-"] = "%-", --- ["."] = "%.", --- ["?"] = ".", --- ["*"] = ".*", --- } --- --- function string.escapedpattern(str,simple) --- return (gsub(str,".",simple and simple_escapes or patterns_escapes)) --- end --- --- function string.topattern(str,lowercase,strict) --- if str == "" then --- return ".*" --- else --- str = gsub(str,".",simple_escapes) --- if lowercase then --- str = lower(str) --- end --- if strict then --- return "^" .. str .. "$" --- else --- return str --- end --- end --- end --- --- end - ---- needs checking - -local anything = patterns.anything -local allescapes = Cc("%") * S(".-+%?()[]*") -- also {} and ^$ ? -local someescapes = Cc("%") * S(".-+%()[]") -- also {} and ^$ ? -local matchescapes = Cc(".") * S("*?") -- wildcard and single match - -local pattern_a = Cs ( ( allescapes + anything )^0 ) -local pattern_b = Cs ( ( someescapes + matchescapes + anything )^0 ) -local pattern_c = Cs ( Cc("^") * ( someescapes + matchescapes + anything )^0 * Cc("$") ) - -function string.escapedpattern(str,simple) - return lpegmatch(simple and pattern_b or pattern_a,str) -end - -function string.topattern(str,lowercase,strict) - if str=="" or type(str) ~= "string" then - return ".*" - elseif strict then - str = lpegmatch(pattern_c,str) - else - str = lpegmatch(pattern_b,str) - end - if lowercase then - return lower(str) - else - return str - end -end - --- print(string.escapedpattern("12+34*.tex",false)) --- print(string.escapedpattern("12+34*.tex",true)) --- print(string.topattern ("12+34*.tex",false,false)) --- print(string.topattern ("12+34*.tex",false,true)) - -function string.valid(str,default) - return (type(str) == "string" and str ~= "" and str) or default or nil -end - --- handy fallback - -string.itself = function(s) return s end - --- also handy (see utf variant) - -local pattern = Ct(C(1)^0) -- string and not utf ! - -function string.totable(str) - return lpegmatch(pattern,str) -end - --- handy from within tex: - -local replacer = lpeg.replacer("@","%%") -- Watch the escaped % in lpeg! - -function string.tformat(fmt,...) - return format(lpegmatch(replacer,fmt),...) -end - --- obsolete names: - -string.quote = string.quoted -string.unquote = string.unquoted +if not modules then modules = { } end modules ['l-string'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local string = string +local sub, gmatch, format, char, byte, rep, lower = string.sub, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower +local lpegmatch, patterns = lpeg.match, lpeg.patterns +local P, S, C, Ct, Cc, Cs = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.Cs + +-- Some functions are already defined in l-lpeg and maybe some from here will +-- move there (unless we also expose caches). + +-- if not string.split then +-- +-- function string.split(str,pattern) +-- local t = { } +-- if #str > 0 then +-- local n = 1 +-- for s in gmatch(str..pattern,"(.-)"..pattern) do +-- t[n] = s +-- n = n + 1 +-- end +-- end +-- return t +-- end +-- +-- end + +-- function string.unquoted(str) +-- return (gsub(str,"^([\"\'])(.*)%1$","%2")) -- interesting pattern +-- end + +local unquoted = patterns.squote * C(patterns.nosquote) * patterns.squote + + patterns.dquote * C(patterns.nodquote) * patterns.dquote + +function string.unquoted(str) + return lpegmatch(unquoted,str) or str +end + +-- print(string.unquoted("test")) +-- print(string.unquoted([["t\"est"]])) +-- print(string.unquoted([["t\"est"x]])) +-- print(string.unquoted("\'test\'")) +-- print(string.unquoted('"test"')) +-- print(string.unquoted('"test"')) + +function string.quoted(str) + return format("%q",str) -- always double quote +end + +function string.count(str,pattern) -- variant 3 + local n = 0 + for _ in gmatch(str,pattern) do -- not for utf + n = n + 1 + end + return n +end + +function string.limit(str,n,sentinel) -- not utf proof + if #str > n then + sentinel = sentinel or "..." + return sub(str,1,(n-#sentinel)) .. sentinel + else + return str + end +end + +local stripper = patterns.stripper +local collapser = patterns.collapser +local longtostring = patterns.longtostring + +function string.strip(str) + return lpegmatch(stripper,str) or "" +end + +function string.collapsespaces(str) + return lpegmatch(collapser,str) or "" +end + +function string.longtostring(str) + return lpegmatch(longtostring,str) or "" +end + +-- function string.is_empty(str) +-- return not find(str,"%S") +-- end + +local pattern = P(" ")^0 * P(-1) + +function string.is_empty(str) + if str == "" then + return true + else + return lpegmatch(pattern,str) and true or false + end +end + +-- if not string.escapedpattern then +-- +-- local patterns_escapes = { +-- ["%"] = "%%", +-- ["."] = "%.", +-- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*", +-- ["["] = "%[", ["]"] = "%]", +-- ["("] = "%(", [")"] = "%)", +-- -- ["{"] = "%{", ["}"] = "%}" +-- -- ["^"] = "%^", ["$"] = "%$", +-- } +-- +-- local simple_escapes = { +-- ["-"] = "%-", +-- ["."] = "%.", +-- ["?"] = ".", +-- ["*"] = ".*", +-- } +-- +-- function string.escapedpattern(str,simple) +-- return (gsub(str,".",simple and simple_escapes or patterns_escapes)) +-- end +-- +-- function string.topattern(str,lowercase,strict) +-- if str == "" then +-- return ".*" +-- else +-- str = gsub(str,".",simple_escapes) +-- if lowercase then +-- str = lower(str) +-- end +-- if strict then +-- return "^" .. str .. "$" +-- else +-- return str +-- end +-- end +-- end +-- +-- end + +--- needs checking + +local anything = patterns.anything +local allescapes = Cc("%") * S(".-+%?()[]*") -- also {} and ^$ ? +local someescapes = Cc("%") * S(".-+%()[]") -- also {} and ^$ ? +local matchescapes = Cc(".") * S("*?") -- wildcard and single match + +local pattern_a = Cs ( ( allescapes + anything )^0 ) +local pattern_b = Cs ( ( someescapes + matchescapes + anything )^0 ) +local pattern_c = Cs ( Cc("^") * ( someescapes + matchescapes + anything )^0 * Cc("$") ) + +function string.escapedpattern(str,simple) + return lpegmatch(simple and pattern_b or pattern_a,str) +end + +function string.topattern(str,lowercase,strict) + if str=="" or type(str) ~= "string" then + return ".*" + elseif strict then + str = lpegmatch(pattern_c,str) + else + str = lpegmatch(pattern_b,str) + end + if lowercase then + return lower(str) + else + return str + end +end + +-- print(string.escapedpattern("12+34*.tex",false)) +-- print(string.escapedpattern("12+34*.tex",true)) +-- print(string.topattern ("12+34*.tex",false,false)) +-- print(string.topattern ("12+34*.tex",false,true)) + +function string.valid(str,default) + return (type(str) == "string" and str ~= "" and str) or default or nil +end + +-- handy fallback + +string.itself = function(s) return s end + +-- also handy (see utf variant) + +local pattern = Ct(C(1)^0) -- string and not utf ! + +function string.totable(str) + return lpegmatch(pattern,str) +end + +-- handy from within tex: + +local replacer = lpeg.replacer("@","%%") -- Watch the escaped % in lpeg! + +function string.tformat(fmt,...) + return format(lpegmatch(replacer,fmt),...) +end + +-- obsolete names: + +string.quote = string.quoted +string.unquote = string.unquoted diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua index 9a1b97fff..54c2b86e3 100644 --- a/tex/context/base/l-table.lua +++ b/tex/context/base/l-table.lua @@ -1,1362 +1,1362 @@ -if not modules then modules = { } end modules ['l-table'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local type, next, tostring, tonumber, ipairs, select = type, next, tostring, tonumber, ipairs, select -local table, string = table, string -local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove -local format, lower, dump = string.format, string.lower, string.dump -local getmetatable, setmetatable = getmetatable, setmetatable -local getinfo = debug.getinfo -local lpegmatch, patterns = lpeg.match, lpeg.patterns -local floor = math.floor - --- extra functions, some might go (when not used) - -local stripper = patterns.stripper - -function table.strip(tab) - local lst, l = { }, 0 - for i=1,#tab do - local s = lpegmatch(stripper,tab[i]) or "" - if s == "" then - -- skip this one - else - l = l + 1 - lst[l] = s - end - end - return lst -end - -function table.keys(t) - if t then - local keys, k = { }, 0 - for key, _ in next, t do - k = k + 1 - keys[k] = key - end - return keys - else - return { } - end -end - -local function compare(a,b) - local ta, tb = type(a), type(b) -- needed, else 11 < 2 - if ta == tb then - return a < b - else - return tostring(a) < tostring(b) - end -end - -local function sortedkeys(tab) - if tab then - local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed - for key,_ in next, tab do - s = s + 1 - srt[s] = key - if category == 3 then - -- no further check - else - local tkey = type(key) - if tkey == "string" then - category = (category == 2 and 3) or 1 - elseif tkey == "number" then - category = (category == 1 and 3) or 2 - else - category = 3 - end - end - end - if category == 0 or category == 3 then - sort(srt,compare) - else - sort(srt) - end - return srt - else - return { } - end -end - -local function sortedhashkeys(tab,cmp) -- fast one - if tab then - local srt, s = { }, 0 - for key,_ in next, tab do - if key then - s= s + 1 - srt[s] = key - end - end - sort(srt,cmp) - return srt - else - return { } - end -end - -function table.allkeys(t) - local keys = { } - for k, v in next, t do - for k, v in next, v do - keys[k] = true - end - end - return sortedkeys(keys) -end - -table.sortedkeys = sortedkeys -table.sortedhashkeys = sortedhashkeys - -local function nothing() end - -local function sortedhash(t,cmp) - if t then - local s - if cmp then - -- it would be nice if the sort function would accept a third argument (or nicer, an optional first) - s = sortedhashkeys(t,function(a,b) return cmp(t,a,b) end) - else - s = sortedkeys(t) -- the robust one - end - local n = 0 - local function kv(s) - n = n + 1 - local k = s[n] - return k, t[k] - end - return kv, s - else - return nothing - end -end - -table.sortedhash = sortedhash -table.sortedpairs = sortedhash -- obsolete - -function table.append(t,list) - local n = #t - for i=1,#list do - n = n + 1 - t[n] = list[i] - end - return t -end - -function table.prepend(t, list) - local nl = #list - local nt = nl + #t - for i=#t,1,-1 do - t[nt] = t[i] - nt = nt - 1 - end - for i=1,#list do - t[i] = list[i] - end - return t -end - --- function table.merge(t, ...) -- first one is target --- t = t or { } --- local lst = { ... } --- for i=1,#lst do --- for k, v in next, lst[i] do --- t[k] = v --- end --- end --- return t --- end - -function table.merge(t, ...) -- first one is target - t = t or { } - for i=1,select("#",...) do - for k, v in next, (select(i,...)) do - t[k] = v - end - end - return t -end - --- function table.merged(...) --- local tmp, lst = { }, { ... } --- for i=1,#lst do --- for k, v in next, lst[i] do --- tmp[k] = v --- end --- end --- return tmp --- end - -function table.merged(...) - local t = { } - for i=1,select("#",...) do - for k, v in next, (select(i,...)) do - t[k] = v - end - end - return t -end - --- function table.imerge(t, ...) --- local lst, nt = { ... }, #t --- for i=1,#lst do --- local nst = lst[i] --- for j=1,#nst do --- nt = nt + 1 --- t[nt] = nst[j] --- end --- end --- return t --- end - -function table.imerge(t, ...) - local nt = #t - for i=1,select("#",...) do - local nst = select(i,...) - for j=1,#nst do - nt = nt + 1 - t[nt] = nst[j] - end - end - return t -end - --- function table.imerged(...) --- local tmp, ntmp, lst = { }, 0, {...} --- for i=1,#lst do --- local nst = lst[i] --- for j=1,#nst do --- ntmp = ntmp + 1 --- tmp[ntmp] = nst[j] --- end --- end --- return tmp --- end - -function table.imerged(...) - local tmp, ntmp = { }, 0 - for i=1,select("#",...) do - local nst = select(i,...) - for j=1,#nst do - ntmp = ntmp + 1 - tmp[ntmp] = nst[j] - end - end - return tmp -end - -local function fastcopy(old,metatabletoo) -- fast one - if old then - local new = { } - for k, v in next, old do - if type(v) == "table" then - new[k] = fastcopy(v,metatabletoo) -- was just table.copy - else - new[k] = v - end - end - if metatabletoo then - -- optional second arg - local mt = getmetatable(old) - if mt then - setmetatable(new,mt) - end - end - return new - else - return { } - end -end - --- todo : copy without metatable - -local function copy(t, tables) -- taken from lua wiki, slightly adapted - tables = tables or { } - local tcopy = {} - if not tables[t] then - tables[t] = tcopy - end - for i,v in next, t do -- brrr, what happens with sparse indexed - if type(i) == "table" then - if tables[i] then - i = tables[i] - else - i = copy(i, tables) - end - end - if type(v) ~= "table" then - tcopy[i] = v - elseif tables[v] then - tcopy[i] = tables[v] - else - tcopy[i] = copy(v, tables) - end - end - local mt = getmetatable(t) - if mt then - setmetatable(tcopy,mt) - end - return tcopy -end - -table.fastcopy = fastcopy -table.copy = copy - -function table.derive(parent) -- for the moment not public - local child = { } - if parent then - setmetatable(child,{ __index = parent }) - end - return child -end - -function table.tohash(t,value) - local h = { } - if t then - if value == nil then value = true end - for _, v in next, t do -- no ipairs here - h[v] = value - end - end - return h -end - -function table.fromhash(t) - local hsh, h = { }, 0 - for k, v in next, t do -- no ipairs here - if v then - h = h + 1 - hsh[h] = k - end - end - return hsh -end - -local noquotes, hexify, handle, reduce, compact, inline, functions - -local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key - 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if', - 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while', -} - -local function simple_table(t) - if #t > 0 then - local n = 0 - for _,v in next, t do - n = n + 1 - end - if n == #t then - local tt, nt = { }, 0 - for i=1,#t do - local v = t[i] - local tv = type(v) - if tv == "number" then - nt = nt + 1 - if hexify then - tt[nt] = format("0x%04X",v) - else - tt[nt] = tostring(v) -- tostring not needed - end - elseif tv == "boolean" then - nt = nt + 1 - tt[nt] = tostring(v) - elseif tv == "string" then - nt = nt + 1 - tt[nt] = format("%q",v) - else - tt = nil - break - end - end - return tt - end - end - return nil -end - --- Because this is a core function of mkiv I moved some function calls --- inline. --- --- twice as fast in a test: --- --- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) ) - --- problem: there no good number_to_string converter with the best resolution - --- probably using .. is faster than format --- maybe split in a few cases (yes/no hexify) - --- todo: %g faster on numbers than %s - --- we can speed this up with repeaters and formatters (is indeed faster) - -local propername = patterns.propername -- was find(name,"^%a[%w%_]*$") - -local function dummy() end - -local function do_serialize(root,name,depth,level,indexed) - if level > 0 then - depth = depth .. " " - if indexed then - handle(format("%s{",depth)) - else - local tn = type(name) - if tn == "number" then - if hexify then - handle(format("%s[0x%04X]={",depth,name)) - else - handle(format("%s[%s]={",depth,name)) - end - elseif tn == "string" then - if noquotes and not reserved[name] and lpegmatch(propername,name) then - handle(format("%s%s={",depth,name)) - else - handle(format("%s[%q]={",depth,name)) - end - elseif tn == "boolean" then - handle(format("%s[%s]={",depth,tostring(name))) - else - handle(format("%s{",depth)) - end - end - end - -- we could check for k (index) being number (cardinal) - if root and next(root) then - -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone) - -- if compact then - -- -- NOT: for k=1,#root do (we need to quit at nil) - -- for k,v in ipairs(root) do -- can we use next? - -- if not first then first = k end - -- last = last + 1 - -- end - -- end - local first, last = nil, 0 - if compact then - last = #root - for k=1,last do - if root[k] == nil then - last = k - 1 - break - end - end - if last > 0 then - first = 1 - end - end - local sk = sortedkeys(root) - for i=1,#sk do - local k = sk[i] - local v = root[k] - --~ if v == root then - -- circular - --~ else - local t, tk = type(v), type(k) - if compact and first and tk == "number" and k >= first and k <= last then - if t == "number" then - if hexify then - handle(format("%s 0x%04X,",depth,v)) - else - handle(format("%s %s,",depth,v)) -- %.99g - end - elseif t == "string" then - if reduce and tonumber(v) then - handle(format("%s %s,",depth,v)) - else - handle(format("%s %q,",depth,v)) - end - elseif t == "table" then - if not next(v) then - handle(format("%s {},",depth)) - elseif inline then -- and #t > 0 - local st = simple_table(v) - if st then - handle(format("%s { %s },",depth,concat(st,", "))) - else - do_serialize(v,k,depth,level+1,true) - end - else - do_serialize(v,k,depth,level+1,true) - end - elseif t == "boolean" then - handle(format("%s %s,",depth,tostring(v))) - elseif t == "function" then - if functions then - handle(format('%s load(%q),',depth,dump(v))) - else - handle(format('%s "function",',depth)) - end - else - handle(format("%s %q,",depth,tostring(v))) - end - elseif k == "__p__" then -- parent - if false then - handle(format("%s __p__=nil,",depth)) - end - elseif t == "number" then - if tk == "number" then - if hexify then - handle(format("%s [0x%04X]=0x%04X,",depth,k,v)) - else - handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g - end - elseif tk == "boolean" then - if hexify then - handle(format("%s [%s]=0x%04X,",depth,tostring(k),v)) - else - handle(format("%s [%s]=%s,",depth,tostring(k),v)) -- %.99g - end - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - if hexify then - handle(format("%s %s=0x%04X,",depth,k,v)) - else - handle(format("%s %s=%s,",depth,k,v)) -- %.99g - end - else - if hexify then - handle(format("%s [%q]=0x%04X,",depth,k,v)) - else - handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g - end - end - elseif t == "string" then - if reduce and tonumber(v) then - if tk == "number" then - if hexify then - handle(format("%s [0x%04X]=%s,",depth,k,v)) - else - handle(format("%s [%s]=%s,",depth,k,v)) - end - elseif tk == "boolean" then - handle(format("%s [%s]=%s,",depth,tostring(k),v)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%s,",depth,k,v)) - else - handle(format("%s [%q]=%s,",depth,k,v)) - end - else - if tk == "number" then - if hexify then - handle(format("%s [0x%04X]=%q,",depth,k,v)) - else - handle(format("%s [%s]=%q,",depth,k,v)) - end - elseif tk == "boolean" then - handle(format("%s [%s]=%q,",depth,tostring(k),v)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%q,",depth,k,v)) - else - handle(format("%s [%q]=%q,",depth,k,v)) - end - end - elseif t == "table" then - if not next(v) then - if tk == "number" then - if hexify then - handle(format("%s [0x%04X]={},",depth,k)) - else - handle(format("%s [%s]={},",depth,k)) - end - elseif tk == "boolean" then - handle(format("%s [%s]={},",depth,tostring(k))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s={},",depth,k)) - else - handle(format("%s [%q]={},",depth,k)) - end - elseif inline then - local st = simple_table(v) - if st then - if tk == "number" then - if hexify then - handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", "))) - else - handle(format("%s [%s]={ %s },",depth,k,concat(st,", "))) - end - elseif tk == "boolean" then - handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", "))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s={ %s },",depth,k,concat(st,", "))) - else - handle(format("%s [%q]={ %s },",depth,k,concat(st,", "))) - end - else - do_serialize(v,k,depth,level+1) - end - else - do_serialize(v,k,depth,level+1) - end - elseif t == "boolean" then - if tk == "number" then - if hexify then - handle(format("%s [0x%04X]=%s,",depth,k,tostring(v))) - else - handle(format("%s [%s]=%s,",depth,k,tostring(v))) - end - elseif tk == "boolean" then - handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%s,",depth,k,tostring(v))) - else - handle(format("%s [%q]=%s,",depth,k,tostring(v))) - end - elseif t == "function" then - if functions then - local f = getinfo(v).what == "C" and dump(dummy) or dump(v) - -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v) - if tk == "number" then - if hexify then - handle(format("%s [0x%04X]=load(%q),",depth,k,f)) - else - handle(format("%s [%s]=load(%q),",depth,k,f)) - end - elseif tk == "boolean" then - handle(format("%s [%s]=load(%q),",depth,tostring(k),f)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=load(%q),",depth,k,f)) - else - handle(format("%s [%q]=load(%q),",depth,k,f)) - end - end - else - if tk == "number" then - if hexify then - handle(format("%s [0x%04X]=%q,",depth,k,tostring(v))) - else - handle(format("%s [%s]=%q,",depth,k,tostring(v))) - end - elseif tk == "boolean" then - handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%q,",depth,k,tostring(v))) - else - handle(format("%s [%q]=%q,",depth,k,tostring(v))) - end - end - --~ end - end - end - if level > 0 then - handle(format("%s},",depth)) - end -end - --- replacing handle by a direct t[#t+1] = ... (plus test) is not much --- faster (0.03 on 1.00 for zapfino.tma) - -local function serialize(_handle,root,name,specification) -- handle wins - local tname = type(name) - if type(specification) == "table" then - noquotes = specification.noquotes - hexify = specification.hexify - handle = _handle or specification.handle or print - reduce = specification.reduce or false - functions = specification.functions - compact = specification.compact - inline = specification.inline and compact - if functions == nil then - functions = true - end - if compact == nil then - compact = true - end - if inline == nil then - inline = compact - end - else - noquotes = false - hexify = false - handle = _handle or print - reduce = false - compact = true - inline = true - functions = true - end - if tname == "string" then - if name == "return" then - handle("return {") - else - handle(name .. "={") - end - elseif tname == "number" then - if hexify then - handle(format("[0x%04X]={",name)) - else - handle("[" .. name .. "]={") - end - elseif tname == "boolean" then - if name then - handle("return {") - else - handle("{") - end - else - handle("t={") - end - if root then - -- The dummy access will initialize a table that has a delayed initialization - -- using a metatable. (maybe explicitly test for metatable) - if getmetatable(root) then -- todo: make this an option, maybe even per subtable - local dummy = root._w_h_a_t_e_v_e_r_ - root._w_h_a_t_e_v_e_r_ = nil - end - -- Let's forget about empty tables. - if next(root) then - do_serialize(root,name,"",0) - end - end - handle("}") -end - --- -- This is some 20% faster than using format (because formatters are much faster) but --- -- of course, inlining the format using .. is then again faster .. anyway, as we do --- -- some pretty printing as well there is not that much to gain unless we make a 'fast' --- -- ugly variant as well. But, we would have to move the formatter to l-string then. - --- local formatters = string.formatters - --- local function do_serialize(root,name,level,indexed) --- if level > 0 then --- if indexed then --- handle(formatters["%w{"](level)) --- else --- local tn = type(name) --- if tn == "number" then --- if hexify then --- handle(formatters["%w[%04H]={"](level,name)) --- else --- handle(formatters["%w[%s]={"](level,name)) --- end --- elseif tn == "string" then --- if noquotes and not reserved[name] and lpegmatch(propername,name) then --- handle(formatters["%w%s={"](level,name)) --- else --- handle(formatters["%w[%q]={"](level,name)) --- end --- elseif tn == "boolean" then --- handle(formatters["%w[%S]={"](level,name)) --- else --- handle(formatters["%w{"](level)) --- end --- end --- end --- -- we could check for k (index) being number (cardinal) --- if root and next(root) then --- -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone) --- -- if compact then --- -- -- NOT: for k=1,#root do (we need to quit at nil) --- -- for k,v in ipairs(root) do -- can we use next? --- -- if not first then first = k end --- -- last = last + 1 --- -- end --- -- end --- local first, last = nil, 0 --- if compact then --- last = #root --- for k=1,last do --- if root[k] == nil then --- last = k - 1 --- break --- end --- end --- if last > 0 then --- first = 1 --- end --- end --- local sk = sortedkeys(root) --- for i=1,#sk do --- local k = sk[i] --- local v = root[k] --- --~ if v == root then --- -- circular --- --~ else --- local t, tk = type(v), type(k) --- if compact and first and tk == "number" and k >= first and k <= last then --- if t == "number" then --- if hexify then --- handle(formatters["%w %04H,"](level,v)) --- else --- handle(formatters["%w %s,"](level,v)) -- %.99g --- end --- elseif t == "string" then --- if reduce and tonumber(v) then --- handle(formatters["%w %s,"](level,v)) --- else --- handle(formatters["%w %q,"](level,v)) --- end --- elseif t == "table" then --- if not next(v) then --- handle(formatters["%w {},"](level)) --- elseif inline then -- and #t > 0 --- local st = simple_table(v) --- if st then --- handle(formatters["%w { %, t },"](level,st)) --- else --- do_serialize(v,k,level+1,true) --- end --- else --- do_serialize(v,k,level+1,true) --- end --- elseif t == "boolean" then --- handle(formatters["%w %S,"](level,v)) --- elseif t == "function" then --- if functions then --- handle(formatters['%w load(%q),'](level,dump(v))) --- else --- handle(formatters['%w "function",'](level)) --- end --- else --- handle(formatters["%w %Q,"](level,v)) --- end --- elseif k == "__p__" then -- parent --- if false then --- handle(formatters["%w __p__=nil,"](level)) --- end --- elseif t == "number" then --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]=%04H,"](level,k,v)) --- else --- handle(formatters["%w [%s]=%s,"](level,k,v)) -- %.99g --- end --- elseif tk == "boolean" then --- if hexify then --- handle(formatters["%w [%S]=%04H,"](level,k,v)) --- else --- handle(formatters["%w [%S]=%s,"](level,k,v)) -- %.99g --- end --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- if hexify then --- handle(formatters["%w %s=%04H,"](level,k,v)) --- else --- handle(formatters["%w %s=%s,"](level,k,v)) -- %.99g --- end --- else --- if hexify then --- handle(formatters["%w [%q]=%04H,"](level,k,v)) --- else --- handle(formatters["%w [%q]=%s,"](level,k,v)) -- %.99g --- end --- end --- elseif t == "string" then --- if reduce and tonumber(v) then --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]=%s,"](level,k,v)) --- else --- handle(formatters["%w [%s]=%s,"](level,k,v)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]=%s,"](level,k,v)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s=%s,"](level,k,v)) --- else --- handle(formatters["%w [%q]=%s,"](level,k,v)) --- end --- else --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]=%q,"](level,k,v)) --- else --- handle(formatters["%w [%s]=%q,"](level,k,v)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]=%q,"](level,k,v)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s=%q,"](level,k,v)) --- else --- handle(formatters["%w [%q]=%q,"](level,k,v)) --- end --- end --- elseif t == "table" then --- if not next(v) then --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]={},"](level,k)) --- else --- handle(formatters["%w [%s]={},"](level,k)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]={},"](level,k)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s={},"](level,k)) --- else --- handle(formatters["%w [%q]={},"](level,k)) --- end --- elseif inline then --- local st = simple_table(v) --- if st then --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]={ %, t },"](level,k,st)) --- else --- handle(formatters["%w [%s]={ %, t },"](level,k,st)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]={ %, t },"](level,k,st)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s={ %, t },"](level,k,st)) --- else --- handle(formatters["%w [%q]={ %, t },"](level,k,st)) --- end --- else --- do_serialize(v,k,level+1) --- end --- else --- do_serialize(v,k,level+1) --- end --- elseif t == "boolean" then --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]=%S,"](level,k,v)) --- else --- handle(formatters["%w [%s]=%S,"](level,k,v)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]=%S,"](level,k,v)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s=%S,"](level,k,v)) --- else --- handle(formatters["%w [%q]=%S,"](level,k,v)) --- end --- elseif t == "function" then --- if functions then --- local f = getinfo(v).what == "C" and dump(dummy) or dump(v) --- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v) --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]=load(%q),"](level,k,f)) --- else --- handle(formatters["%w [%s]=load(%q),"](level,k,f)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]=load(%q),"](level,k,f)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s=load(%q),"](level,k,f)) --- else --- handle(formatters["%w [%q]=load(%q),"](level,k,f)) --- end --- end --- else --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]=%Q,"](level,k,v)) --- else --- handle(formatters["%w [%s]=%Q,"](level,k,v)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]=%Q,"](level,k,v)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s=%Q,"](level,k,v)) --- else --- handle(formatters["%w [%q]=%Q,"](level,k,v)) --- end --- end --- --~ end --- end --- end --- if level > 0 then --- handle(formatters["%w}"](level)) --- end --- end - --- local function serialize(_handle,root,name,specification) -- handle wins --- local tname = type(name) --- if type(specification) == "table" then --- noquotes = specification.noquotes --- hexify = specification.hexify --- handle = _handle or specification.handle or print --- reduce = specification.reduce or false --- functions = specification.functions --- compact = specification.compact --- inline = specification.inline and compact --- if functions == nil then --- functions = true --- end --- if compact == nil then --- compact = true --- end --- if inline == nil then --- inline = compact --- end --- else --- noquotes = false --- hexify = false --- handle = _handle or print --- reduce = false --- compact = true --- inline = true --- functions = true --- end --- if tname == "string" then --- if name == "return" then --- handle("return {") --- else --- handle(name .. "={") --- end --- elseif tname == "number" then --- if hexify then --- handle(format("[0x%04X]={",name)) --- else --- handle("[" .. name .. "]={") --- end --- elseif tname == "boolean" then --- if name then --- handle("return {") --- else --- handle("{") --- end --- else --- handle("t={") --- end --- if root then --- -- The dummy access will initialize a table that has a delayed initialization --- -- using a metatable. (maybe explicitly test for metatable) --- if getmetatable(root) then -- todo: make this an option, maybe even per subtable --- local dummy = root._w_h_a_t_e_v_e_r_ --- root._w_h_a_t_e_v_e_r_ = nil --- end --- -- Let's forget about empty tables. --- if next(root) then --- do_serialize(root,name,0) --- end --- end --- handle("}") --- end - --- name: --- --- true : return { } --- false : { } --- nil : t = { } --- string : string = { } --- "return" : return { } --- number : [number] = { } - -function table.serialize(root,name,specification) - local t, n = { }, 0 - local function flush(s) - n = n + 1 - t[n] = s - end - serialize(flush,root,name,specification) - return concat(t,"\n") -end - --- local a = { e = { 1,2,3,4,5,6}, a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc" } } } --- local t = os.clock() --- for i=1,10000 do --- table.serialize(a) --- end --- print(os.clock()-t,table.serialize(a)) - -table.tohandle = serialize - --- sometimes tables are real use (zapfino extra pro is some 85M) in which --- case a stepwise serialization is nice; actually, we could consider: --- --- for line in table.serializer(root,name,reduce,noquotes) do --- ...(line) --- end --- --- so this is on the todo list - -local maxtab = 2*1024 - -function table.tofile(filename,root,name,specification) - local f = io.open(filename,'w') - if f then - if maxtab > 1 then - local t, n = { }, 0 - local function flush(s) - n = n + 1 - t[n] = s - if n > maxtab then - f:write(concat(t,"\n"),"\n") -- hm, write(sometable) should be nice - t, n = { }, 0 -- we could recycle t if needed - end - end - serialize(flush,root,name,specification) - f:write(concat(t,"\n"),"\n") - else - local function flush(s) - f:write(s,"\n") - end - serialize(flush,root,name,specification) - end - f:close() - io.flush() - end -end - -local function flattened(t,f,depth) -- also handles { nil, 1, nil, 2 } - if f == nil then - f = { } - depth = 0xFFFF - elseif tonumber(f) then - -- assume that only two arguments are given - depth = f - f = { } - elseif not depth then - depth = 0xFFFF - end - for k, v in next, t do - if type(k) ~= "number" then - if depth > 0 and type(v) == "table" then - flattened(v,f,depth-1) - else - f[#f+1] = v - end - end - end - for k=1,#t do - local v = t[k] - if depth > 0 and type(v) == "table" then - flattened(v,f,depth-1) - else - f[#f+1] = v - end - end - return f -end - -table.flattened = flattened - -local function unnest(t,f) -- only used in mk, for old times sake - if not f then -- and only relevant for token lists - f = { } -- this one can become obsolete - end - for i=1,#t do - local v = t[i] - if type(v) == "table" then - if type(v[1]) == "table" then - unnest(v,f) - else - f[#f+1] = v - end - else - f[#f+1] = v - end - end - return f -end - -function table.unnest(t) -- bad name - return unnest(t) -end - -local function are_equal(a,b,n,m) -- indexed - if a and b and #a == #b then - n = n or 1 - m = m or #a - for i=n,m do - local ai, bi = a[i], b[i] - if ai==bi then - -- same - elseif type(ai) == "table" and type(bi) == "table" then - if not are_equal(ai,bi) then - return false - end - else - return false - end - end - return true - else - return false - end -end - -local function identical(a,b) -- assumes same structure - for ka, va in next, a do - local vb = b[ka] - if va == vb then - -- same - elseif type(va) == "table" and type(vb) == "table" then - if not identical(va,vb) then - return false - end - else - return false - end - end - return true -end - -table.identical = identical -table.are_equal = are_equal - --- maybe also make a combined one - -function table.compact(t) -- remove empty tables, assumes subtables - if t then - for k, v in next, t do - if not next(v) then -- no type checking - t[k] = nil - end - end - end -end - -function table.contains(t, v) - if t then - for i=1, #t do - if t[i] == v then - return i - end - end - end - return false -end - -function table.count(t) - local n = 0 - for k, v in next, t do - n = n + 1 - end - return n -end - -function table.swapped(t,s) -- hash - local n = { } - if s then - for k, v in next, s do - n[k] = v - end - end - for k, v in next, t do - n[v] = k - end - return n -end - -function table.mirrored(t) -- hash - local n = { } - for k, v in next, t do - n[v] = k - n[k] = v - end - return n -end - -function table.reversed(t) - if t then - local tt, tn = { }, #t - if tn > 0 then - local ttn = 0 - for i=tn,1,-1 do - ttn = ttn + 1 - tt[ttn] = t[i] - end - end - return tt - end -end - -function table.reverse(t) - if t then - local n = #t - for i=1,floor(n/2) do - local j = n - i + 1 - t[i], t[j] = t[j], t[i] - end - return t - end -end - -function table.sequenced(t,sep,simple) -- hash only - if not t then - return "" - end - local n = #t - local s = { } - if n > 0 then - -- indexed - for i=1,n do - s[i] = tostring(t[i]) - end - else - -- hashed - n = 0 - for k, v in sortedhash(t) do - if simple then - if v == true then - n = n + 1 - s[n] = k - elseif v and v~= "" then - n = n + 1 - s[n] = k .. "=" .. tostring(v) - end - else - n = n + 1 - s[n] = k .. "=" .. tostring(v) - end - end - end - return concat(s,sep or " | ") -end - -function table.print(t,...) - if type(t) ~= "table" then - print(tostring(t)) - else - serialize(print,t,...) - end -end - -setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end) - --- -- -- obsolete but we keep them for a while and might comment them later -- -- -- - --- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack) - -function table.sub(t,i,j) - return { unpack(t,i,j) } -end - --- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice) - -function table.is_empty(t) - return not t or not next(t) -end - -function table.has_one_entry(t) - return t and not next(t,next(t)) -end - --- new - -function table.loweredkeys(t) -- maybe utf - local l = { } - for k, v in next, t do - l[lower(k)] = v - end - return l -end - --- new, might move (maybe duplicate) - -function table.unique(old) - local hash = { } - local new = { } - local n = 0 - for i=1,#old do - local oi = old[i] - if not hash[oi] then - n = n + 1 - new[n] = oi - hash[oi] = true - end - end - return new -end - -function table.sorted(t,...) - sort(t,...) - return t -- still sorts in-place -end +if not modules then modules = { } end modules ['l-table'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local type, next, tostring, tonumber, ipairs, select = type, next, tostring, tonumber, ipairs, select +local table, string = table, string +local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove +local format, lower, dump = string.format, string.lower, string.dump +local getmetatable, setmetatable = getmetatable, setmetatable +local getinfo = debug.getinfo +local lpegmatch, patterns = lpeg.match, lpeg.patterns +local floor = math.floor + +-- extra functions, some might go (when not used) + +local stripper = patterns.stripper + +function table.strip(tab) + local lst, l = { }, 0 + for i=1,#tab do + local s = lpegmatch(stripper,tab[i]) or "" + if s == "" then + -- skip this one + else + l = l + 1 + lst[l] = s + end + end + return lst +end + +function table.keys(t) + if t then + local keys, k = { }, 0 + for key, _ in next, t do + k = k + 1 + keys[k] = key + end + return keys + else + return { } + end +end + +local function compare(a,b) + local ta, tb = type(a), type(b) -- needed, else 11 < 2 + if ta == tb then + return a < b + else + return tostring(a) < tostring(b) + end +end + +local function sortedkeys(tab) + if tab then + local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed + for key,_ in next, tab do + s = s + 1 + srt[s] = key + if category == 3 then + -- no further check + else + local tkey = type(key) + if tkey == "string" then + category = (category == 2 and 3) or 1 + elseif tkey == "number" then + category = (category == 1 and 3) or 2 + else + category = 3 + end + end + end + if category == 0 or category == 3 then + sort(srt,compare) + else + sort(srt) + end + return srt + else + return { } + end +end + +local function sortedhashkeys(tab,cmp) -- fast one + if tab then + local srt, s = { }, 0 + for key,_ in next, tab do + if key then + s= s + 1 + srt[s] = key + end + end + sort(srt,cmp) + return srt + else + return { } + end +end + +function table.allkeys(t) + local keys = { } + for k, v in next, t do + for k, v in next, v do + keys[k] = true + end + end + return sortedkeys(keys) +end + +table.sortedkeys = sortedkeys +table.sortedhashkeys = sortedhashkeys + +local function nothing() end + +local function sortedhash(t,cmp) + if t then + local s + if cmp then + -- it would be nice if the sort function would accept a third argument (or nicer, an optional first) + s = sortedhashkeys(t,function(a,b) return cmp(t,a,b) end) + else + s = sortedkeys(t) -- the robust one + end + local n = 0 + local function kv(s) + n = n + 1 + local k = s[n] + return k, t[k] + end + return kv, s + else + return nothing + end +end + +table.sortedhash = sortedhash +table.sortedpairs = sortedhash -- obsolete + +function table.append(t,list) + local n = #t + for i=1,#list do + n = n + 1 + t[n] = list[i] + end + return t +end + +function table.prepend(t, list) + local nl = #list + local nt = nl + #t + for i=#t,1,-1 do + t[nt] = t[i] + nt = nt - 1 + end + for i=1,#list do + t[i] = list[i] + end + return t +end + +-- function table.merge(t, ...) -- first one is target +-- t = t or { } +-- local lst = { ... } +-- for i=1,#lst do +-- for k, v in next, lst[i] do +-- t[k] = v +-- end +-- end +-- return t +-- end + +function table.merge(t, ...) -- first one is target + t = t or { } + for i=1,select("#",...) do + for k, v in next, (select(i,...)) do + t[k] = v + end + end + return t +end + +-- function table.merged(...) +-- local tmp, lst = { }, { ... } +-- for i=1,#lst do +-- for k, v in next, lst[i] do +-- tmp[k] = v +-- end +-- end +-- return tmp +-- end + +function table.merged(...) + local t = { } + for i=1,select("#",...) do + for k, v in next, (select(i,...)) do + t[k] = v + end + end + return t +end + +-- function table.imerge(t, ...) +-- local lst, nt = { ... }, #t +-- for i=1,#lst do +-- local nst = lst[i] +-- for j=1,#nst do +-- nt = nt + 1 +-- t[nt] = nst[j] +-- end +-- end +-- return t +-- end + +function table.imerge(t, ...) + local nt = #t + for i=1,select("#",...) do + local nst = select(i,...) + for j=1,#nst do + nt = nt + 1 + t[nt] = nst[j] + end + end + return t +end + +-- function table.imerged(...) +-- local tmp, ntmp, lst = { }, 0, {...} +-- for i=1,#lst do +-- local nst = lst[i] +-- for j=1,#nst do +-- ntmp = ntmp + 1 +-- tmp[ntmp] = nst[j] +-- end +-- end +-- return tmp +-- end + +function table.imerged(...) + local tmp, ntmp = { }, 0 + for i=1,select("#",...) do + local nst = select(i,...) + for j=1,#nst do + ntmp = ntmp + 1 + tmp[ntmp] = nst[j] + end + end + return tmp +end + +local function fastcopy(old,metatabletoo) -- fast one + if old then + local new = { } + for k, v in next, old do + if type(v) == "table" then + new[k] = fastcopy(v,metatabletoo) -- was just table.copy + else + new[k] = v + end + end + if metatabletoo then + -- optional second arg + local mt = getmetatable(old) + if mt then + setmetatable(new,mt) + end + end + return new + else + return { } + end +end + +-- todo : copy without metatable + +local function copy(t, tables) -- taken from lua wiki, slightly adapted + tables = tables or { } + local tcopy = {} + if not tables[t] then + tables[t] = tcopy + end + for i,v in next, t do -- brrr, what happens with sparse indexed + if type(i) == "table" then + if tables[i] then + i = tables[i] + else + i = copy(i, tables) + end + end + if type(v) ~= "table" then + tcopy[i] = v + elseif tables[v] then + tcopy[i] = tables[v] + else + tcopy[i] = copy(v, tables) + end + end + local mt = getmetatable(t) + if mt then + setmetatable(tcopy,mt) + end + return tcopy +end + +table.fastcopy = fastcopy +table.copy = copy + +function table.derive(parent) -- for the moment not public + local child = { } + if parent then + setmetatable(child,{ __index = parent }) + end + return child +end + +function table.tohash(t,value) + local h = { } + if t then + if value == nil then value = true end + for _, v in next, t do -- no ipairs here + h[v] = value + end + end + return h +end + +function table.fromhash(t) + local hsh, h = { }, 0 + for k, v in next, t do -- no ipairs here + if v then + h = h + 1 + hsh[h] = k + end + end + return hsh +end + +local noquotes, hexify, handle, reduce, compact, inline, functions + +local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key + 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if', + 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while', +} + +local function simple_table(t) + if #t > 0 then + local n = 0 + for _,v in next, t do + n = n + 1 + end + if n == #t then + local tt, nt = { }, 0 + for i=1,#t do + local v = t[i] + local tv = type(v) + if tv == "number" then + nt = nt + 1 + if hexify then + tt[nt] = format("0x%04X",v) + else + tt[nt] = tostring(v) -- tostring not needed + end + elseif tv == "boolean" then + nt = nt + 1 + tt[nt] = tostring(v) + elseif tv == "string" then + nt = nt + 1 + tt[nt] = format("%q",v) + else + tt = nil + break + end + end + return tt + end + end + return nil +end + +-- Because this is a core function of mkiv I moved some function calls +-- inline. +-- +-- twice as fast in a test: +-- +-- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) ) + +-- problem: there no good number_to_string converter with the best resolution + +-- probably using .. is faster than format +-- maybe split in a few cases (yes/no hexify) + +-- todo: %g faster on numbers than %s + +-- we can speed this up with repeaters and formatters (is indeed faster) + +local propername = patterns.propername -- was find(name,"^%a[%w%_]*$") + +local function dummy() end + +local function do_serialize(root,name,depth,level,indexed) + if level > 0 then + depth = depth .. " " + if indexed then + handle(format("%s{",depth)) + else + local tn = type(name) + if tn == "number" then + if hexify then + handle(format("%s[0x%04X]={",depth,name)) + else + handle(format("%s[%s]={",depth,name)) + end + elseif tn == "string" then + if noquotes and not reserved[name] and lpegmatch(propername,name) then + handle(format("%s%s={",depth,name)) + else + handle(format("%s[%q]={",depth,name)) + end + elseif tn == "boolean" then + handle(format("%s[%s]={",depth,tostring(name))) + else + handle(format("%s{",depth)) + end + end + end + -- we could check for k (index) being number (cardinal) + if root and next(root) then + -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone) + -- if compact then + -- -- NOT: for k=1,#root do (we need to quit at nil) + -- for k,v in ipairs(root) do -- can we use next? + -- if not first then first = k end + -- last = last + 1 + -- end + -- end + local first, last = nil, 0 + if compact then + last = #root + for k=1,last do + if root[k] == nil then + last = k - 1 + break + end + end + if last > 0 then + first = 1 + end + end + local sk = sortedkeys(root) + for i=1,#sk do + local k = sk[i] + local v = root[k] + --~ if v == root then + -- circular + --~ else + local t, tk = type(v), type(k) + if compact and first and tk == "number" and k >= first and k <= last then + if t == "number" then + if hexify then + handle(format("%s 0x%04X,",depth,v)) + else + handle(format("%s %s,",depth,v)) -- %.99g + end + elseif t == "string" then + if reduce and tonumber(v) then + handle(format("%s %s,",depth,v)) + else + handle(format("%s %q,",depth,v)) + end + elseif t == "table" then + if not next(v) then + handle(format("%s {},",depth)) + elseif inline then -- and #t > 0 + local st = simple_table(v) + if st then + handle(format("%s { %s },",depth,concat(st,", "))) + else + do_serialize(v,k,depth,level+1,true) + end + else + do_serialize(v,k,depth,level+1,true) + end + elseif t == "boolean" then + handle(format("%s %s,",depth,tostring(v))) + elseif t == "function" then + if functions then + handle(format('%s load(%q),',depth,dump(v))) + else + handle(format('%s "function",',depth)) + end + else + handle(format("%s %q,",depth,tostring(v))) + end + elseif k == "__p__" then -- parent + if false then + handle(format("%s __p__=nil,",depth)) + end + elseif t == "number" then + if tk == "number" then + if hexify then + handle(format("%s [0x%04X]=0x%04X,",depth,k,v)) + else + handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g + end + elseif tk == "boolean" then + if hexify then + handle(format("%s [%s]=0x%04X,",depth,tostring(k),v)) + else + handle(format("%s [%s]=%s,",depth,tostring(k),v)) -- %.99g + end + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + if hexify then + handle(format("%s %s=0x%04X,",depth,k,v)) + else + handle(format("%s %s=%s,",depth,k,v)) -- %.99g + end + else + if hexify then + handle(format("%s [%q]=0x%04X,",depth,k,v)) + else + handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g + end + end + elseif t == "string" then + if reduce and tonumber(v) then + if tk == "number" then + if hexify then + handle(format("%s [0x%04X]=%s,",depth,k,v)) + else + handle(format("%s [%s]=%s,",depth,k,v)) + end + elseif tk == "boolean" then + handle(format("%s [%s]=%s,",depth,tostring(k),v)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%s,",depth,k,v)) + else + handle(format("%s [%q]=%s,",depth,k,v)) + end + else + if tk == "number" then + if hexify then + handle(format("%s [0x%04X]=%q,",depth,k,v)) + else + handle(format("%s [%s]=%q,",depth,k,v)) + end + elseif tk == "boolean" then + handle(format("%s [%s]=%q,",depth,tostring(k),v)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%q,",depth,k,v)) + else + handle(format("%s [%q]=%q,",depth,k,v)) + end + end + elseif t == "table" then + if not next(v) then + if tk == "number" then + if hexify then + handle(format("%s [0x%04X]={},",depth,k)) + else + handle(format("%s [%s]={},",depth,k)) + end + elseif tk == "boolean" then + handle(format("%s [%s]={},",depth,tostring(k))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s={},",depth,k)) + else + handle(format("%s [%q]={},",depth,k)) + end + elseif inline then + local st = simple_table(v) + if st then + if tk == "number" then + if hexify then + handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", "))) + else + handle(format("%s [%s]={ %s },",depth,k,concat(st,", "))) + end + elseif tk == "boolean" then + handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", "))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s={ %s },",depth,k,concat(st,", "))) + else + handle(format("%s [%q]={ %s },",depth,k,concat(st,", "))) + end + else + do_serialize(v,k,depth,level+1) + end + else + do_serialize(v,k,depth,level+1) + end + elseif t == "boolean" then + if tk == "number" then + if hexify then + handle(format("%s [0x%04X]=%s,",depth,k,tostring(v))) + else + handle(format("%s [%s]=%s,",depth,k,tostring(v))) + end + elseif tk == "boolean" then + handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%s,",depth,k,tostring(v))) + else + handle(format("%s [%q]=%s,",depth,k,tostring(v))) + end + elseif t == "function" then + if functions then + local f = getinfo(v).what == "C" and dump(dummy) or dump(v) + -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v) + if tk == "number" then + if hexify then + handle(format("%s [0x%04X]=load(%q),",depth,k,f)) + else + handle(format("%s [%s]=load(%q),",depth,k,f)) + end + elseif tk == "boolean" then + handle(format("%s [%s]=load(%q),",depth,tostring(k),f)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=load(%q),",depth,k,f)) + else + handle(format("%s [%q]=load(%q),",depth,k,f)) + end + end + else + if tk == "number" then + if hexify then + handle(format("%s [0x%04X]=%q,",depth,k,tostring(v))) + else + handle(format("%s [%s]=%q,",depth,k,tostring(v))) + end + elseif tk == "boolean" then + handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%q,",depth,k,tostring(v))) + else + handle(format("%s [%q]=%q,",depth,k,tostring(v))) + end + end + --~ end + end + end + if level > 0 then + handle(format("%s},",depth)) + end +end + +-- replacing handle by a direct t[#t+1] = ... (plus test) is not much +-- faster (0.03 on 1.00 for zapfino.tma) + +local function serialize(_handle,root,name,specification) -- handle wins + local tname = type(name) + if type(specification) == "table" then + noquotes = specification.noquotes + hexify = specification.hexify + handle = _handle or specification.handle or print + reduce = specification.reduce or false + functions = specification.functions + compact = specification.compact + inline = specification.inline and compact + if functions == nil then + functions = true + end + if compact == nil then + compact = true + end + if inline == nil then + inline = compact + end + else + noquotes = false + hexify = false + handle = _handle or print + reduce = false + compact = true + inline = true + functions = true + end + if tname == "string" then + if name == "return" then + handle("return {") + else + handle(name .. "={") + end + elseif tname == "number" then + if hexify then + handle(format("[0x%04X]={",name)) + else + handle("[" .. name .. "]={") + end + elseif tname == "boolean" then + if name then + handle("return {") + else + handle("{") + end + else + handle("t={") + end + if root then + -- The dummy access will initialize a table that has a delayed initialization + -- using a metatable. (maybe explicitly test for metatable) + if getmetatable(root) then -- todo: make this an option, maybe even per subtable + local dummy = root._w_h_a_t_e_v_e_r_ + root._w_h_a_t_e_v_e_r_ = nil + end + -- Let's forget about empty tables. + if next(root) then + do_serialize(root,name,"",0) + end + end + handle("}") +end + +-- -- This is some 20% faster than using format (because formatters are much faster) but +-- -- of course, inlining the format using .. is then again faster .. anyway, as we do +-- -- some pretty printing as well there is not that much to gain unless we make a 'fast' +-- -- ugly variant as well. But, we would have to move the formatter to l-string then. + +-- local formatters = string.formatters + +-- local function do_serialize(root,name,level,indexed) +-- if level > 0 then +-- if indexed then +-- handle(formatters["%w{"](level)) +-- else +-- local tn = type(name) +-- if tn == "number" then +-- if hexify then +-- handle(formatters["%w[%04H]={"](level,name)) +-- else +-- handle(formatters["%w[%s]={"](level,name)) +-- end +-- elseif tn == "string" then +-- if noquotes and not reserved[name] and lpegmatch(propername,name) then +-- handle(formatters["%w%s={"](level,name)) +-- else +-- handle(formatters["%w[%q]={"](level,name)) +-- end +-- elseif tn == "boolean" then +-- handle(formatters["%w[%S]={"](level,name)) +-- else +-- handle(formatters["%w{"](level)) +-- end +-- end +-- end +-- -- we could check for k (index) being number (cardinal) +-- if root and next(root) then +-- -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone) +-- -- if compact then +-- -- -- NOT: for k=1,#root do (we need to quit at nil) +-- -- for k,v in ipairs(root) do -- can we use next? +-- -- if not first then first = k end +-- -- last = last + 1 +-- -- end +-- -- end +-- local first, last = nil, 0 +-- if compact then +-- last = #root +-- for k=1,last do +-- if root[k] == nil then +-- last = k - 1 +-- break +-- end +-- end +-- if last > 0 then +-- first = 1 +-- end +-- end +-- local sk = sortedkeys(root) +-- for i=1,#sk do +-- local k = sk[i] +-- local v = root[k] +-- --~ if v == root then +-- -- circular +-- --~ else +-- local t, tk = type(v), type(k) +-- if compact and first and tk == "number" and k >= first and k <= last then +-- if t == "number" then +-- if hexify then +-- handle(formatters["%w %04H,"](level,v)) +-- else +-- handle(formatters["%w %s,"](level,v)) -- %.99g +-- end +-- elseif t == "string" then +-- if reduce and tonumber(v) then +-- handle(formatters["%w %s,"](level,v)) +-- else +-- handle(formatters["%w %q,"](level,v)) +-- end +-- elseif t == "table" then +-- if not next(v) then +-- handle(formatters["%w {},"](level)) +-- elseif inline then -- and #t > 0 +-- local st = simple_table(v) +-- if st then +-- handle(formatters["%w { %, t },"](level,st)) +-- else +-- do_serialize(v,k,level+1,true) +-- end +-- else +-- do_serialize(v,k,level+1,true) +-- end +-- elseif t == "boolean" then +-- handle(formatters["%w %S,"](level,v)) +-- elseif t == "function" then +-- if functions then +-- handle(formatters['%w load(%q),'](level,dump(v))) +-- else +-- handle(formatters['%w "function",'](level)) +-- end +-- else +-- handle(formatters["%w %Q,"](level,v)) +-- end +-- elseif k == "__p__" then -- parent +-- if false then +-- handle(formatters["%w __p__=nil,"](level)) +-- end +-- elseif t == "number" then +-- if tk == "number" then +-- if hexify then +-- handle(formatters["%w [%04H]=%04H,"](level,k,v)) +-- else +-- handle(formatters["%w [%s]=%s,"](level,k,v)) -- %.99g +-- end +-- elseif tk == "boolean" then +-- if hexify then +-- handle(formatters["%w [%S]=%04H,"](level,k,v)) +-- else +-- handle(formatters["%w [%S]=%s,"](level,k,v)) -- %.99g +-- end +-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then +-- if hexify then +-- handle(formatters["%w %s=%04H,"](level,k,v)) +-- else +-- handle(formatters["%w %s=%s,"](level,k,v)) -- %.99g +-- end +-- else +-- if hexify then +-- handle(formatters["%w [%q]=%04H,"](level,k,v)) +-- else +-- handle(formatters["%w [%q]=%s,"](level,k,v)) -- %.99g +-- end +-- end +-- elseif t == "string" then +-- if reduce and tonumber(v) then +-- if tk == "number" then +-- if hexify then +-- handle(formatters["%w [%04H]=%s,"](level,k,v)) +-- else +-- handle(formatters["%w [%s]=%s,"](level,k,v)) +-- end +-- elseif tk == "boolean" then +-- handle(formatters["%w [%S]=%s,"](level,k,v)) +-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then +-- handle(formatters["%w %s=%s,"](level,k,v)) +-- else +-- handle(formatters["%w [%q]=%s,"](level,k,v)) +-- end +-- else +-- if tk == "number" then +-- if hexify then +-- handle(formatters["%w [%04H]=%q,"](level,k,v)) +-- else +-- handle(formatters["%w [%s]=%q,"](level,k,v)) +-- end +-- elseif tk == "boolean" then +-- handle(formatters["%w [%S]=%q,"](level,k,v)) +-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then +-- handle(formatters["%w %s=%q,"](level,k,v)) +-- else +-- handle(formatters["%w [%q]=%q,"](level,k,v)) +-- end +-- end +-- elseif t == "table" then +-- if not next(v) then +-- if tk == "number" then +-- if hexify then +-- handle(formatters["%w [%04H]={},"](level,k)) +-- else +-- handle(formatters["%w [%s]={},"](level,k)) +-- end +-- elseif tk == "boolean" then +-- handle(formatters["%w [%S]={},"](level,k)) +-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then +-- handle(formatters["%w %s={},"](level,k)) +-- else +-- handle(formatters["%w [%q]={},"](level,k)) +-- end +-- elseif inline then +-- local st = simple_table(v) +-- if st then +-- if tk == "number" then +-- if hexify then +-- handle(formatters["%w [%04H]={ %, t },"](level,k,st)) +-- else +-- handle(formatters["%w [%s]={ %, t },"](level,k,st)) +-- end +-- elseif tk == "boolean" then +-- handle(formatters["%w [%S]={ %, t },"](level,k,st)) +-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then +-- handle(formatters["%w %s={ %, t },"](level,k,st)) +-- else +-- handle(formatters["%w [%q]={ %, t },"](level,k,st)) +-- end +-- else +-- do_serialize(v,k,level+1) +-- end +-- else +-- do_serialize(v,k,level+1) +-- end +-- elseif t == "boolean" then +-- if tk == "number" then +-- if hexify then +-- handle(formatters["%w [%04H]=%S,"](level,k,v)) +-- else +-- handle(formatters["%w [%s]=%S,"](level,k,v)) +-- end +-- elseif tk == "boolean" then +-- handle(formatters["%w [%S]=%S,"](level,k,v)) +-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then +-- handle(formatters["%w %s=%S,"](level,k,v)) +-- else +-- handle(formatters["%w [%q]=%S,"](level,k,v)) +-- end +-- elseif t == "function" then +-- if functions then +-- local f = getinfo(v).what == "C" and dump(dummy) or dump(v) +-- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v) +-- if tk == "number" then +-- if hexify then +-- handle(formatters["%w [%04H]=load(%q),"](level,k,f)) +-- else +-- handle(formatters["%w [%s]=load(%q),"](level,k,f)) +-- end +-- elseif tk == "boolean" then +-- handle(formatters["%w [%S]=load(%q),"](level,k,f)) +-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then +-- handle(formatters["%w %s=load(%q),"](level,k,f)) +-- else +-- handle(formatters["%w [%q]=load(%q),"](level,k,f)) +-- end +-- end +-- else +-- if tk == "number" then +-- if hexify then +-- handle(formatters["%w [%04H]=%Q,"](level,k,v)) +-- else +-- handle(formatters["%w [%s]=%Q,"](level,k,v)) +-- end +-- elseif tk == "boolean" then +-- handle(formatters["%w [%S]=%Q,"](level,k,v)) +-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then +-- handle(formatters["%w %s=%Q,"](level,k,v)) +-- else +-- handle(formatters["%w [%q]=%Q,"](level,k,v)) +-- end +-- end +-- --~ end +-- end +-- end +-- if level > 0 then +-- handle(formatters["%w}"](level)) +-- end +-- end + +-- local function serialize(_handle,root,name,specification) -- handle wins +-- local tname = type(name) +-- if type(specification) == "table" then +-- noquotes = specification.noquotes +-- hexify = specification.hexify +-- handle = _handle or specification.handle or print +-- reduce = specification.reduce or false +-- functions = specification.functions +-- compact = specification.compact +-- inline = specification.inline and compact +-- if functions == nil then +-- functions = true +-- end +-- if compact == nil then +-- compact = true +-- end +-- if inline == nil then +-- inline = compact +-- end +-- else +-- noquotes = false +-- hexify = false +-- handle = _handle or print +-- reduce = false +-- compact = true +-- inline = true +-- functions = true +-- end +-- if tname == "string" then +-- if name == "return" then +-- handle("return {") +-- else +-- handle(name .. "={") +-- end +-- elseif tname == "number" then +-- if hexify then +-- handle(format("[0x%04X]={",name)) +-- else +-- handle("[" .. name .. "]={") +-- end +-- elseif tname == "boolean" then +-- if name then +-- handle("return {") +-- else +-- handle("{") +-- end +-- else +-- handle("t={") +-- end +-- if root then +-- -- The dummy access will initialize a table that has a delayed initialization +-- -- using a metatable. (maybe explicitly test for metatable) +-- if getmetatable(root) then -- todo: make this an option, maybe even per subtable +-- local dummy = root._w_h_a_t_e_v_e_r_ +-- root._w_h_a_t_e_v_e_r_ = nil +-- end +-- -- Let's forget about empty tables. +-- if next(root) then +-- do_serialize(root,name,0) +-- end +-- end +-- handle("}") +-- end + +-- name: +-- +-- true : return { } +-- false : { } +-- nil : t = { } +-- string : string = { } +-- "return" : return { } +-- number : [number] = { } + +function table.serialize(root,name,specification) + local t, n = { }, 0 + local function flush(s) + n = n + 1 + t[n] = s + end + serialize(flush,root,name,specification) + return concat(t,"\n") +end + +-- local a = { e = { 1,2,3,4,5,6}, a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc" } } } +-- local t = os.clock() +-- for i=1,10000 do +-- table.serialize(a) +-- end +-- print(os.clock()-t,table.serialize(a)) + +table.tohandle = serialize + +-- sometimes tables are real use (zapfino extra pro is some 85M) in which +-- case a stepwise serialization is nice; actually, we could consider: +-- +-- for line in table.serializer(root,name,reduce,noquotes) do +-- ...(line) +-- end +-- +-- so this is on the todo list + +local maxtab = 2*1024 + +function table.tofile(filename,root,name,specification) + local f = io.open(filename,'w') + if f then + if maxtab > 1 then + local t, n = { }, 0 + local function flush(s) + n = n + 1 + t[n] = s + if n > maxtab then + f:write(concat(t,"\n"),"\n") -- hm, write(sometable) should be nice + t, n = { }, 0 -- we could recycle t if needed + end + end + serialize(flush,root,name,specification) + f:write(concat(t,"\n"),"\n") + else + local function flush(s) + f:write(s,"\n") + end + serialize(flush,root,name,specification) + end + f:close() + io.flush() + end +end + +local function flattened(t,f,depth) -- also handles { nil, 1, nil, 2 } + if f == nil then + f = { } + depth = 0xFFFF + elseif tonumber(f) then + -- assume that only two arguments are given + depth = f + f = { } + elseif not depth then + depth = 0xFFFF + end + for k, v in next, t do + if type(k) ~= "number" then + if depth > 0 and type(v) == "table" then + flattened(v,f,depth-1) + else + f[#f+1] = v + end + end + end + for k=1,#t do + local v = t[k] + if depth > 0 and type(v) == "table" then + flattened(v,f,depth-1) + else + f[#f+1] = v + end + end + return f +end + +table.flattened = flattened + +local function unnest(t,f) -- only used in mk, for old times sake + if not f then -- and only relevant for token lists + f = { } -- this one can become obsolete + end + for i=1,#t do + local v = t[i] + if type(v) == "table" then + if type(v[1]) == "table" then + unnest(v,f) + else + f[#f+1] = v + end + else + f[#f+1] = v + end + end + return f +end + +function table.unnest(t) -- bad name + return unnest(t) +end + +local function are_equal(a,b,n,m) -- indexed + if a and b and #a == #b then + n = n or 1 + m = m or #a + for i=n,m do + local ai, bi = a[i], b[i] + if ai==bi then + -- same + elseif type(ai) == "table" and type(bi) == "table" then + if not are_equal(ai,bi) then + return false + end + else + return false + end + end + return true + else + return false + end +end + +local function identical(a,b) -- assumes same structure + for ka, va in next, a do + local vb = b[ka] + if va == vb then + -- same + elseif type(va) == "table" and type(vb) == "table" then + if not identical(va,vb) then + return false + end + else + return false + end + end + return true +end + +table.identical = identical +table.are_equal = are_equal + +-- maybe also make a combined one + +function table.compact(t) -- remove empty tables, assumes subtables + if t then + for k, v in next, t do + if not next(v) then -- no type checking + t[k] = nil + end + end + end +end + +function table.contains(t, v) + if t then + for i=1, #t do + if t[i] == v then + return i + end + end + end + return false +end + +function table.count(t) + local n = 0 + for k, v in next, t do + n = n + 1 + end + return n +end + +function table.swapped(t,s) -- hash + local n = { } + if s then + for k, v in next, s do + n[k] = v + end + end + for k, v in next, t do + n[v] = k + end + return n +end + +function table.mirrored(t) -- hash + local n = { } + for k, v in next, t do + n[v] = k + n[k] = v + end + return n +end + +function table.reversed(t) + if t then + local tt, tn = { }, #t + if tn > 0 then + local ttn = 0 + for i=tn,1,-1 do + ttn = ttn + 1 + tt[ttn] = t[i] + end + end + return tt + end +end + +function table.reverse(t) + if t then + local n = #t + for i=1,floor(n/2) do + local j = n - i + 1 + t[i], t[j] = t[j], t[i] + end + return t + end +end + +function table.sequenced(t,sep,simple) -- hash only + if not t then + return "" + end + local n = #t + local s = { } + if n > 0 then + -- indexed + for i=1,n do + s[i] = tostring(t[i]) + end + else + -- hashed + n = 0 + for k, v in sortedhash(t) do + if simple then + if v == true then + n = n + 1 + s[n] = k + elseif v and v~= "" then + n = n + 1 + s[n] = k .. "=" .. tostring(v) + end + else + n = n + 1 + s[n] = k .. "=" .. tostring(v) + end + end + end + return concat(s,sep or " | ") +end + +function table.print(t,...) + if type(t) ~= "table" then + print(tostring(t)) + else + serialize(print,t,...) + end +end + +setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end) + +-- -- -- obsolete but we keep them for a while and might comment them later -- -- -- + +-- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack) + +function table.sub(t,i,j) + return { unpack(t,i,j) } +end + +-- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice) + +function table.is_empty(t) + return not t or not next(t) +end + +function table.has_one_entry(t) + return t and not next(t,next(t)) +end + +-- new + +function table.loweredkeys(t) -- maybe utf + local l = { } + for k, v in next, t do + l[lower(k)] = v + end + return l +end + +-- new, might move (maybe duplicate) + +function table.unique(old) + local hash = { } + local new = { } + local n = 0 + for i=1,#old do + local oi = old[i] + if not hash[oi] then + n = n + 1 + new[n] = oi + hash[oi] = true + end + end + return new +end + +function table.sorted(t,...) + sort(t,...) + return t -- still sorts in-place +end diff --git a/tex/context/base/l-unicode.lua b/tex/context/base/l-unicode.lua index 813ffd54b..d38d4cbd1 100644 --- a/tex/context/base/l-unicode.lua +++ b/tex/context/base/l-unicode.lua @@ -1,942 +1,942 @@ -if not modules then modules = { } end modules ['l-unicode'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this module will be reorganized - --- todo: utf.sub replacement (used in syst-aux) - --- we put these in the utf namespace: - -utf = utf or (unicode and unicode.utf8) or { } - -utf.characters = utf.characters or string.utfcharacters -utf.values = utf.values or string.utfvalues - --- string.utfvalues --- string.utfcharacters --- string.characters --- string.characterpairs --- string.bytes --- string.bytepairs - -local type = type -local char, byte, format, sub = string.char, string.byte, string.format, string.sub -local concat = table.concat -local P, C, R, Cs, Ct, Cmt, Cc, Carg, Cp = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Carg, lpeg.Cp -local lpegmatch, patterns = lpeg.match, lpeg.patterns - -local bytepairs = string.bytepairs - -local finder = lpeg.finder -local replacer = lpeg.replacer - -local utfvalues = utf.values -local utfgmatch = utf.gmatch -- not always present - -local p_utftype = patterns.utftype -local p_utfoffset = patterns.utfoffset -local p_utf8char = patterns.utf8char -local p_utf8byte = patterns.utf8byte -local p_utfbom = patterns.utfbom -local p_newline = patterns.newline -local p_whitespace = patterns.whitespace - -if not unicode then - - unicode = { utf = utf } -- for a while - -end - -if not utf.char then - - local floor, char = math.floor, string.char - - function utf.char(n) - if n < 0x80 then - -- 0aaaaaaa : 0x80 - return char(n) - elseif n < 0x800 then - -- 110bbbaa : 0xC0 : n >> 6 - -- 10aaaaaa : 0x80 : n & 0x3F - return char( - 0xC0 + floor(n/0x40), - 0x80 + (n % 0x40) - ) - elseif n < 0x10000 then - -- 1110bbbb : 0xE0 : n >> 12 - -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F - -- 10aaaaaa : 0x80 : n & 0x3F - return char( - 0xE0 + floor(n/0x1000), - 0x80 + (floor(n/0x40) % 0x40), - 0x80 + (n % 0x40) - ) - elseif n < 0x200000 then - -- 11110ccc : 0xF0 : n >> 18 - -- 10ccbbbb : 0x80 : (n >> 12) & 0x3F - -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F - -- 10aaaaaa : 0x80 : n & 0x3F - -- dddd : ccccc - 1 - return char( - 0xF0 + floor(n/0x40000), - 0x80 + (floor(n/0x1000) % 0x40), - 0x80 + (floor(n/0x40) % 0x40), - 0x80 + (n % 0x40) - ) - else - return "" - end - end - -end - -if not utf.byte then - - local utf8byte = patterns.utf8byte - - function utf.byte(c) - return lpegmatch(utf8byte,c) - end - -end - -local utfchar, utfbyte = utf.char, utf.byte - --- As we want to get rid of the (unmaintained) utf library we implement our own --- variants (in due time an independent module): - -function utf.filetype(data) - return data and lpegmatch(p_utftype,data) or "unknown" -end - -local toentities = Cs ( - ( - patterns.utf8one - + ( - patterns.utf8two - + patterns.utf8three - + patterns.utf8four - ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end - )^0 -) - -patterns.toentities = toentities - -function utf.toentities(str) - return lpegmatch(toentities,str) -end - --- local utfchr = { } -- 60K -> 2.638 M extra mem but currently not called that often (on latin) --- --- setmetatable(utfchr, { __index = function(t,k) local v = utfchar(k) t[k] = v return v end } ) --- --- collectgarbage("collect") --- local u = collectgarbage("count")*1024 --- local t = os.clock() --- for i=1,1000 do --- for i=1,600 do --- local a = utfchr[i] --- end --- end --- print(os.clock()-t,collectgarbage("count")*1024-u) - --- collectgarbage("collect") --- local t = os.clock() --- for i=1,1000 do --- for i=1,600 do --- local a = utfchar(i) --- end --- end --- print(os.clock()-t,collectgarbage("count")*1024-u) - --- local byte = string.byte --- local utfchar = utf.char - -local one = P(1) -local two = C(1) * C(1) -local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1) - --- actually one of them is already utf ... sort of useless this one - --- function utf.char(n) --- if n < 0x80 then --- return char(n) --- elseif n < 0x800 then --- return char( --- 0xC0 + floor(n/0x40), --- 0x80 + (n % 0x40) --- ) --- elseif n < 0x10000 then --- return char( --- 0xE0 + floor(n/0x1000), --- 0x80 + (floor(n/0x40) % 0x40), --- 0x80 + (n % 0x40) --- ) --- elseif n < 0x40000 then --- return char( --- 0xF0 + floor(n/0x40000), --- 0x80 + floor(n/0x1000), --- 0x80 + (floor(n/0x40) % 0x40), --- 0x80 + (n % 0x40) --- ) --- else --- -- return char( --- -- 0xF1 + floor(n/0x1000000), --- -- 0x80 + floor(n/0x40000), --- -- 0x80 + floor(n/0x1000), --- -- 0x80 + (floor(n/0x40) % 0x40), --- -- 0x80 + (n % 0x40) --- -- ) --- return "?" --- end --- end --- --- merge into: - -local pattern = P("\254\255") * Cs( ( - four / function(a,b,c,d) - local ab = 0xFF * byte(a) + byte(b) - local cd = 0xFF * byte(c) + byte(d) - return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000) - end - + two / function(a,b) - return utfchar(byte(a)*256 + byte(b)) - end - + one - )^1 ) - + P("\255\254") * Cs( ( - four / function(b,a,d,c) - local ab = 0xFF * byte(a) + byte(b) - local cd = 0xFF * byte(c) + byte(d) - return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000) - end - + two / function(b,a) - return utfchar(byte(a)*256 + byte(b)) - end - + one - )^1 ) - -function string.toutf(s) -- in string namespace - return lpegmatch(pattern,s) or s -- todo: utf32 -end - -local validatedutf = Cs ( - ( - patterns.utf8one - + patterns.utf8two - + patterns.utf8three - + patterns.utf8four - + P(1) / "�" - )^0 -) - -patterns.validatedutf = validatedutf - -function utf.is_valid(str) - return type(str) == "string" and lpegmatch(validatedutf,str) or false -end - -if not utf.len then - - -- -- alternative 1: 0.77 - -- - -- local utfcharcounter = utfbom^-1 * Cs((p_utf8char/'!')^0) - -- - -- function utf.len(str) - -- return #lpegmatch(utfcharcounter,str or "") - -- end - -- - -- -- alternative 2: 1.70 - -- - -- local n = 0 - -- - -- local utfcharcounter = utfbom^-1 * (p_utf8char/function() n = n + 1 end)^0 -- slow - -- - -- function utf.length(str) - -- n = 0 - -- lpegmatch(utfcharcounter,str or "") - -- return n - -- end - -- - -- -- alternative 3: 0.24 (native unicode.utf8.len: 0.047) - - -- local n = 0 - -- - -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( ( Cp() * ( - -- -- patterns.utf8one ^1 * Cc(1) - -- -- + patterns.utf8two ^1 * Cc(2) - -- -- + patterns.utf8three^1 * Cc(3) - -- -- + patterns.utf8four ^1 * Cc(4) ) * Cp() / function(f,d,t) n = n + (t - f)/d end - -- -- )^0 ) -- just as many captures as below - -- - -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( ( - -- -- (Cmt(patterns.utf8one ^1,function(_,_,s) n = n + #s return true end)) - -- -- + (Cmt(patterns.utf8two ^1,function(_,_,s) n = n + #s/2 return true end)) - -- -- + (Cmt(patterns.utf8three^1,function(_,_,s) n = n + #s/3 return true end)) - -- -- + (Cmt(patterns.utf8four ^1,function(_,_,s) n = n + #s/4 return true end)) - -- -- )^0 ) -- not interesting as it creates strings but sometimes faster - -- - -- -- The best so far: - -- - -- local utfcharcounter = utfbom^-1 * P ( ( - -- Cp() * (patterns.utf8one )^1 * Cp() / function(f,t) n = n + t - f end - -- + Cp() * (patterns.utf8two )^1 * Cp() / function(f,t) n = n + (t - f)/2 end - -- + Cp() * (patterns.utf8three)^1 * Cp() / function(f,t) n = n + (t - f)/3 end - -- + Cp() * (patterns.utf8four )^1 * Cp() / function(f,t) n = n + (t - f)/4 end - -- )^0 ) - - -- function utf.len(str) - -- n = 0 - -- lpegmatch(utfcharcounter,str or "") - -- return n - -- end - - local n, f = 0, 1 - - local utfcharcounter = patterns.utfbom^-1 * Cmt ( - Cc(1) * patterns.utf8one ^1 - + Cc(2) * patterns.utf8two ^1 - + Cc(3) * patterns.utf8three^1 - + Cc(4) * patterns.utf8four ^1, - function(_,t,d) -- due to Cc no string captures, so faster - n = n + (t - f)/d - f = t - return true - end - )^0 - - function utf.len(str) - n, f = 0, 1 - lpegmatch(utfcharcounter,str or "") - return n - end - - -- -- these are quite a bit slower: - - -- utfcharcounter = utfbom^-1 * (Cmt(P(1) * R("\128\191")^0, function() n = n + 1 return true end))^0 -- 50+ times slower - -- utfcharcounter = utfbom^-1 * (Cmt(P(1), function() n = n + 1 return true end) * R("\128\191")^0)^0 -- 50- times slower - -end - -utf.length = utf.len - -if not utf.sub then - - -- inefficient as lpeg just copies ^n - - -- local function sub(str,start,stop) - -- local pattern = p_utf8char^-(start-1) * C(p_utf8char^-(stop-start+1)) - -- inspect(pattern) - -- return lpegmatch(pattern,str) or "" - -- end - - -- local b, e, n, first, last = 0, 0, 0, 0, 0 - -- - -- local function slide(s,p) - -- n = n + 1 - -- if n == first then - -- b = p - -- if not last then - -- return nil - -- end - -- end - -- if n == last then - -- e = p - -- return nil - -- else - -- return p - -- end - -- end - -- - -- local pattern = Cmt(p_utf8char,slide)^0 - -- - -- function utf.sub(str,start,stop) -- todo: from the end - -- if not start then - -- return str - -- end - -- b, e, n, first, last = 0, 0, 0, start, stop - -- lpegmatch(pattern,str) - -- if not stop then - -- return sub(str,b) - -- else - -- return sub(str,b,e-1) - -- end - -- end - - -- print(utf.sub("Hans Hagen is my name")) - -- print(utf.sub("Hans Hagen is my name",5)) - -- print(utf.sub("Hans Hagen is my name",5,10)) - - local utflength = utf.length - - -- also negative indices, upto 10 times slower than a c variant - - local b, e, n, first, last = 0, 0, 0, 0, 0 - - local function slide_zero(s,p) - n = n + 1 - if n >= last then - e = p - 1 - else - return p - end - end - - local function slide_one(s,p) - n = n + 1 - if n == first then - b = p - end - if n >= last then - e = p - 1 - else - return p - end - end - - local function slide_two(s,p) - n = n + 1 - if n == first then - b = p - else - return true - end - end - - local pattern_zero = Cmt(p_utf8char,slide_zero)^0 - local pattern_one = Cmt(p_utf8char,slide_one )^0 - local pattern_two = Cmt(p_utf8char,slide_two )^0 - - function utf.sub(str,start,stop) - if not start then - return str - end - if start == 0 then - start = 1 - end - if not stop then - if start < 0 then - local l = utflength(str) -- we can inline this function if needed - start = l + start - else - start = start - 1 - end - b, n, first = 0, 0, start - lpegmatch(pattern_two,str) - if n >= first then - return sub(str,b) - else - return "" - end - end - if start < 0 or stop < 0 then - local l = utf.length(str) - if start < 0 then - start = l + start - if start <= 0 then - start = 1 - else - start = start + 1 - end - end - if stop < 0 then - stop = l + stop - if stop == 0 then - stop = 1 - else - stop = stop + 1 - end - end - end - if start > stop then - return "" - elseif start > 1 then - b, e, n, first, last = 0, 0, 0, start - 1, stop - lpegmatch(pattern_one,str) - if n >= first and e == 0 then - e = #str - end - return sub(str,b,e) - else - b, e, n, last = 1, 0, 0, stop - lpegmatch(pattern_zero,str) - if e == 0 then - e = #str - end - return sub(str,b,e) - end - end - - -- local n = 100000 - -- local str = string.rep("123456àáâãäå",100) - -- - -- for i=-15,15,1 do - -- for j=-15,15,1 do - -- if utf.xsub(str,i,j) ~= utf.sub(str,i,j) then - -- print("error",i,j,"l>"..utf.xsub(str,i,j),"s>"..utf.sub(str,i,j)) - -- end - -- end - -- if utf.xsub(str,i) ~= utf.sub(str,i) then - -- print("error",i,"l>"..utf.xsub(str,i),"s>"..utf.sub(str,i)) - -- end - -- end - - -- print(" 1, 7",utf.xsub(str, 1, 7),utf.sub(str, 1, 7)) - -- print(" 0, 7",utf.xsub(str, 0, 7),utf.sub(str, 0, 7)) - -- print(" 0, 9",utf.xsub(str, 0, 9),utf.sub(str, 0, 9)) - -- print(" 4 ",utf.xsub(str, 4 ),utf.sub(str, 4 )) - -- print(" 0 ",utf.xsub(str, 0 ),utf.sub(str, 0 )) - -- print(" 0, 0",utf.xsub(str, 0, 0),utf.sub(str, 0, 0)) - -- print(" 4, 4",utf.xsub(str, 4, 4),utf.sub(str, 4, 4)) - -- print(" 4, 0",utf.xsub(str, 4, 0),utf.sub(str, 4, 0)) - -- print("-3, 0",utf.xsub(str,-3, 0),utf.sub(str,-3, 0)) - -- print(" 0,-3",utf.xsub(str, 0,-3),utf.sub(str, 0,-3)) - -- print(" 5,-3",utf.xsub(str,-5,-3),utf.sub(str,-5,-3)) - -- print("-3 ",utf.xsub(str,-3 ),utf.sub(str,-3 )) - -end - --- a replacement for simple gsubs: - -function utf.remapper(mapping) - local pattern = Cs((p_utf8char/mapping)^0) - return function(str) - if not str or str == "" then - return "" - else - return lpegmatch(pattern,str) - end - end, pattern -end - --- local remap = utf.remapper { a = 'd', b = "c", c = "b", d = "a" } --- print(remap("abcd 1234 abcd")) - --- - -function utf.replacer(t) -- no precheck, always string builder - local r = replacer(t,false,false,true) - return function(str) - return lpegmatch(r,str) - end -end - -function utf.subtituter(t) -- with precheck and no building if no match - local f = finder (t) - local r = replacer(t,false,false,true) - return function(str) - local i = lpegmatch(f,str) - if not i then - return str - elseif i > #str then - return str - else - -- return sub(str,1,i-2) .. lpegmatch(r,str,i-1) -- slower - return lpegmatch(r,str) - end - end -end - --- inspect(utf.split("a b c d")) --- inspect(utf.split("a b c d",true)) - -local utflinesplitter = p_utfbom^-1 * lpeg.tsplitat(p_newline) -local utfcharsplitter_ows = p_utfbom^-1 * Ct(C(p_utf8char)^0) -local utfcharsplitter_iws = p_utfbom^-1 * Ct((p_whitespace^1 + C(p_utf8char))^0) -local utfcharsplitter_raw = Ct(C(p_utf8char)^0) - -patterns.utflinesplitter = utflinesplitter - -function utf.splitlines(str) - return lpegmatch(utflinesplitter,str or "") -end - -function utf.split(str,ignorewhitespace) -- new - if ignorewhitespace then - return lpegmatch(utfcharsplitter_iws,str or "") - else - return lpegmatch(utfcharsplitter_ows,str or "") - end -end - -function utf.totable(str) -- keeps bom - return lpegmatch(utfcharsplitter_raw,str) -end - --- 0 EF BB BF UTF-8 --- 1 FF FE UTF-16-little-endian --- 2 FE FF UTF-16-big-endian --- 3 FF FE 00 00 UTF-32-little-endian --- 4 00 00 FE FF UTF-32-big-endian --- --- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated - --- utf.name = { --- [0] = 'utf-8', --- [1] = 'utf-16-le', --- [2] = 'utf-16-be', --- [3] = 'utf-32-le', --- [4] = 'utf-32-be' --- } --- --- function utf.magic(f) --- local str = f:read(4) --- if not str then --- f:seek('set') --- return 0 --- -- elseif find(str,"^%z%z\254\255") then -- depricated --- -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged --- elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH) --- return 4 --- -- elseif find(str,"^\255\254%z%z") then -- depricated --- -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged --- elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH) --- return 3 --- elseif find(str,"^\254\255") then --- f:seek('set',2) --- return 2 --- elseif find(str,"^\255\254") then --- f:seek('set',2) --- return 1 --- elseif find(str,"^\239\187\191") then --- f:seek('set',3) --- return 0 --- else --- f:seek('set') --- return 0 --- end --- end - -function utf.magic(f) -- not used - local str = f:read(4) or "" - local off = lpegmatch(p_utfoffset,str) - if off < 4 then - f:seek('set',off) - end - return lpegmatch(p_utftype,str) -end - -local function utf16_to_utf8_be(t) - if type(t) == "string" then - t = lpegmatch(utflinesplitter,t) - end - local result = { } -- we reuse result - for i=1,#t do - local r, more = 0, 0 - for left, right in bytepairs(t[i]) do - if right then - local now = 256*left + right - if more > 0 then - now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong - more = 0 - r = r + 1 - result[r] = utfchar(now) - elseif now >= 0xD800 and now <= 0xDBFF then - more = now - else - r = r + 1 - result[r] = utfchar(now) - end - end - end - t[i] = concat(result,"",1,r) -- we reused tmp, hence t - end - return t -end - -local function utf16_to_utf8_le(t) - if type(t) == "string" then - t = lpegmatch(utflinesplitter,t) - end - local result = { } -- we reuse result - for i=1,#t do - local r, more = 0, 0 - for left, right in bytepairs(t[i]) do - if right then - local now = 256*right + left - if more > 0 then - now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong - more = 0 - r = r + 1 - result[r] = utfchar(now) - elseif now >= 0xD800 and now <= 0xDBFF then - more = now - else - r = r + 1 - result[r] = utfchar(now) - end - end - end - t[i] = concat(result,"",1,r) -- we reused tmp, hence t - end - return t -end - -local function utf32_to_utf8_be(t) - if type(t) == "string" then - t = lpegmatch(utflinesplitter,t) - end - local result = { } -- we reuse result - for i=1,#t do - local r, more = 0, -1 - for a,b in bytepairs(t[i]) do - if a and b then - if more < 0 then - more = 256*256*256*a + 256*256*b - else - r = r + 1 - result[t] = utfchar(more + 256*a + b) - more = -1 - end - else - break - end - end - t[i] = concat(result,"",1,r) - end - return t -end - -local function utf32_to_utf8_le(t) - if type(t) == "string" then - t = lpegmatch(utflinesplitter,t) - end - local result = { } -- we reuse result - for i=1,#t do - local r, more = 0, -1 - for a,b in bytepairs(t[i]) do - if a and b then - if more < 0 then - more = 256*b + a - else - r = r + 1 - result[t] = utfchar(more + 256*256*256*b + 256*256*a) - more = -1 - end - else - break - end - end - t[i] = concat(result,"",1,r) - end - return t -end - -utf.utf32_to_utf8_be = utf32_to_utf8_be -utf.utf32_to_utf8_le = utf32_to_utf8_le -utf.utf16_to_utf8_be = utf16_to_utf8_be -utf.utf16_to_utf8_le = utf16_to_utf8_le - -function utf.utf8_to_utf8(t) - return type(t) == "string" and lpegmatch(utflinesplitter,t) or t -end - -function utf.utf16_to_utf8(t,endian) - return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t -end - -function utf.utf32_to_utf8(t,endian) - return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t -end - -local function little(c) - local b = byte(c) - if b < 0x10000 then - return char(b%256,b/256) - else - b = b - 0x10000 - local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00 - return char(b1%256,b1/256,b2%256,b2/256) - end -end - -local function big(c) - local b = byte(c) - if b < 0x10000 then - return char(b/256,b%256) - else - b = b - 0x10000 - local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00 - return char(b1/256,b1%256,b2/256,b2%256) - end -end - --- function utf.utf8_to_utf16(str,littleendian) --- if littleendian then --- return char(255,254) .. utfgsub(str,".",little) --- else --- return char(254,255) .. utfgsub(str,".",big) --- end --- end - -local _, l_remap = utf.remapper(little) -local _, b_remap = utf.remapper(big) - -function utf.utf8_to_utf16(str,littleendian) - if littleendian then - return char(255,254) .. lpegmatch(l_remap,str) - else - return char(254,255) .. lpegmatch(b_remap,str) - end -end - --- function utf.tocodes(str,separator) -- can be sped up with an lpeg --- local t, n = { }, 0 --- for u in utfvalues(str) do --- n = n + 1 --- t[n] = format("0x%04X",u) --- end --- return concat(t,separator or " ") --- end - -local pattern = Cs ( - (p_utf8byte / function(unicode ) return format( "0x%04X", unicode) end) * - (p_utf8byte * Carg(1) / function(unicode,separator) return format("%s0x%04X",separator,unicode) end)^0 -) - -function utf.tocodes(str,separator) - return lpegmatch(pattern,str,1,separator or " ") -end - -function utf.ustring(s) - return format("U+%05X",type(s) == "number" and s or utfbyte(s)) -end - -function utf.xstring(s) - return format("0x%05X",type(s) == "number" and s or utfbyte(s)) -end - --- - -local p_nany = p_utf8char / "" - -if utfgmatch then - - function utf.count(str,what) - if type(what) == "string" then - local n = 0 - for _ in utfgmatch(str,what) do - n = n + 1 - end - return n - else -- 4 times slower but still faster than / function - return #lpegmatch(Cs((P(what)/" " + p_nany)^0),str) - end - end - -else - - local cache = { } - - function utf.count(str,what) - if type(what) == "string" then - local p = cache[what] - if not p then - p = Cs((P(what)/" " + p_nany)^0) - cache[p] = p - end - return #lpegmatch(p,str) - else -- 4 times slower but still faster than / function - return #lpegmatch(Cs((P(what)/" " + p_nany)^0),str) - end - end - -end - --- maybe also register as string.utf* - - -if not utf.characters then - - -- New: this gmatch hack is taken from the Lua 5.2 book. It's about two times slower - -- than the built-in string.utfcharacters. - - function utf.characters(str) - return gmatch(str,".[\128-\191]*") - end - - string.utfcharacters = utf.characters - -end - -if not utf.values then - - -- So, a logical next step is to check for the values variant. It over five times - -- slower than the built-in string.utfvalues. I optimized it a bit for n=0,1. - - ----- wrap, yield, gmatch = coroutine.wrap, coroutine.yield, string.gmatch - local find = string.find - - local dummy = function() - -- we share this one - end - - -- function utf.values(str) - -- local n = #str - -- if n == 0 then - -- return wrap(dummy) - -- elseif n == 1 then - -- return wrap(function() yield(utfbyte(str)) end) - -- else - -- return wrap(function() for s in gmatch(str,".[\128-\191]*") do - -- yield(utfbyte(s)) - -- end end) - -- end - -- end - -- - -- faster: - - function utf.values(str) - local n = #str - if n == 0 then - return dummy - elseif n == 1 then - return function() return utfbyte(str) end - else - local p = 1 - -- local n = #str - return function() - -- if p <= n then -- slower than the last find - local b, e = find(str,".[\128-\191]*",p) - if b then - p = e + 1 - return utfbyte(sub(str,b,e)) - end - -- end - end - end - end - - -- slower: - -- - -- local pattern = C(patterns.utf8character) * Cp() - -- ----- pattern = patterns.utf8character/utfbyte * Cp() - -- ----- pattern = patterns.utf8byte * Cp() - -- - -- function utf.values(str) -- one of the cases where a find is faster than an lpeg - -- local n = #str - -- if n == 0 then - -- return dummy - -- elseif n == 1 then - -- return function() return utfbyte(str) end - -- else - -- local p = 1 - -- return function() - -- local s, e = lpegmatch(pattern,str,p) - -- if e then - -- p = e - -- return utfbyte(s) - -- -- return s - -- end - -- end - -- end - -- end - - string.utfvalues = utf.values - -end +if not modules then modules = { } end modules ['l-unicode'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this module will be reorganized + +-- todo: utf.sub replacement (used in syst-aux) + +-- we put these in the utf namespace: + +utf = utf or (unicode and unicode.utf8) or { } + +utf.characters = utf.characters or string.utfcharacters +utf.values = utf.values or string.utfvalues + +-- string.utfvalues +-- string.utfcharacters +-- string.characters +-- string.characterpairs +-- string.bytes +-- string.bytepairs + +local type = type +local char, byte, format, sub = string.char, string.byte, string.format, string.sub +local concat = table.concat +local P, C, R, Cs, Ct, Cmt, Cc, Carg, Cp = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Carg, lpeg.Cp +local lpegmatch, patterns = lpeg.match, lpeg.patterns + +local bytepairs = string.bytepairs + +local finder = lpeg.finder +local replacer = lpeg.replacer + +local utfvalues = utf.values +local utfgmatch = utf.gmatch -- not always present + +local p_utftype = patterns.utftype +local p_utfoffset = patterns.utfoffset +local p_utf8char = patterns.utf8char +local p_utf8byte = patterns.utf8byte +local p_utfbom = patterns.utfbom +local p_newline = patterns.newline +local p_whitespace = patterns.whitespace + +if not unicode then + + unicode = { utf = utf } -- for a while + +end + +if not utf.char then + + local floor, char = math.floor, string.char + + function utf.char(n) + if n < 0x80 then + -- 0aaaaaaa : 0x80 + return char(n) + elseif n < 0x800 then + -- 110bbbaa : 0xC0 : n >> 6 + -- 10aaaaaa : 0x80 : n & 0x3F + return char( + 0xC0 + floor(n/0x40), + 0x80 + (n % 0x40) + ) + elseif n < 0x10000 then + -- 1110bbbb : 0xE0 : n >> 12 + -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F + -- 10aaaaaa : 0x80 : n & 0x3F + return char( + 0xE0 + floor(n/0x1000), + 0x80 + (floor(n/0x40) % 0x40), + 0x80 + (n % 0x40) + ) + elseif n < 0x200000 then + -- 11110ccc : 0xF0 : n >> 18 + -- 10ccbbbb : 0x80 : (n >> 12) & 0x3F + -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F + -- 10aaaaaa : 0x80 : n & 0x3F + -- dddd : ccccc - 1 + return char( + 0xF0 + floor(n/0x40000), + 0x80 + (floor(n/0x1000) % 0x40), + 0x80 + (floor(n/0x40) % 0x40), + 0x80 + (n % 0x40) + ) + else + return "" + end + end + +end + +if not utf.byte then + + local utf8byte = patterns.utf8byte + + function utf.byte(c) + return lpegmatch(utf8byte,c) + end + +end + +local utfchar, utfbyte = utf.char, utf.byte + +-- As we want to get rid of the (unmaintained) utf library we implement our own +-- variants (in due time an independent module): + +function utf.filetype(data) + return data and lpegmatch(p_utftype,data) or "unknown" +end + +local toentities = Cs ( + ( + patterns.utf8one + + ( + patterns.utf8two + + patterns.utf8three + + patterns.utf8four + ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end + )^0 +) + +patterns.toentities = toentities + +function utf.toentities(str) + return lpegmatch(toentities,str) +end + +-- local utfchr = { } -- 60K -> 2.638 M extra mem but currently not called that often (on latin) +-- +-- setmetatable(utfchr, { __index = function(t,k) local v = utfchar(k) t[k] = v return v end } ) +-- +-- collectgarbage("collect") +-- local u = collectgarbage("count")*1024 +-- local t = os.clock() +-- for i=1,1000 do +-- for i=1,600 do +-- local a = utfchr[i] +-- end +-- end +-- print(os.clock()-t,collectgarbage("count")*1024-u) + +-- collectgarbage("collect") +-- local t = os.clock() +-- for i=1,1000 do +-- for i=1,600 do +-- local a = utfchar(i) +-- end +-- end +-- print(os.clock()-t,collectgarbage("count")*1024-u) + +-- local byte = string.byte +-- local utfchar = utf.char + +local one = P(1) +local two = C(1) * C(1) +local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1) + +-- actually one of them is already utf ... sort of useless this one + +-- function utf.char(n) +-- if n < 0x80 then +-- return char(n) +-- elseif n < 0x800 then +-- return char( +-- 0xC0 + floor(n/0x40), +-- 0x80 + (n % 0x40) +-- ) +-- elseif n < 0x10000 then +-- return char( +-- 0xE0 + floor(n/0x1000), +-- 0x80 + (floor(n/0x40) % 0x40), +-- 0x80 + (n % 0x40) +-- ) +-- elseif n < 0x40000 then +-- return char( +-- 0xF0 + floor(n/0x40000), +-- 0x80 + floor(n/0x1000), +-- 0x80 + (floor(n/0x40) % 0x40), +-- 0x80 + (n % 0x40) +-- ) +-- else +-- -- return char( +-- -- 0xF1 + floor(n/0x1000000), +-- -- 0x80 + floor(n/0x40000), +-- -- 0x80 + floor(n/0x1000), +-- -- 0x80 + (floor(n/0x40) % 0x40), +-- -- 0x80 + (n % 0x40) +-- -- ) +-- return "?" +-- end +-- end +-- +-- merge into: + +local pattern = P("\254\255") * Cs( ( + four / function(a,b,c,d) + local ab = 0xFF * byte(a) + byte(b) + local cd = 0xFF * byte(c) + byte(d) + return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000) + end + + two / function(a,b) + return utfchar(byte(a)*256 + byte(b)) + end + + one + )^1 ) + + P("\255\254") * Cs( ( + four / function(b,a,d,c) + local ab = 0xFF * byte(a) + byte(b) + local cd = 0xFF * byte(c) + byte(d) + return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000) + end + + two / function(b,a) + return utfchar(byte(a)*256 + byte(b)) + end + + one + )^1 ) + +function string.toutf(s) -- in string namespace + return lpegmatch(pattern,s) or s -- todo: utf32 +end + +local validatedutf = Cs ( + ( + patterns.utf8one + + patterns.utf8two + + patterns.utf8three + + patterns.utf8four + + P(1) / "�" + )^0 +) + +patterns.validatedutf = validatedutf + +function utf.is_valid(str) + return type(str) == "string" and lpegmatch(validatedutf,str) or false +end + +if not utf.len then + + -- -- alternative 1: 0.77 + -- + -- local utfcharcounter = utfbom^-1 * Cs((p_utf8char/'!')^0) + -- + -- function utf.len(str) + -- return #lpegmatch(utfcharcounter,str or "") + -- end + -- + -- -- alternative 2: 1.70 + -- + -- local n = 0 + -- + -- local utfcharcounter = utfbom^-1 * (p_utf8char/function() n = n + 1 end)^0 -- slow + -- + -- function utf.length(str) + -- n = 0 + -- lpegmatch(utfcharcounter,str or "") + -- return n + -- end + -- + -- -- alternative 3: 0.24 (native unicode.utf8.len: 0.047) + + -- local n = 0 + -- + -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( ( Cp() * ( + -- -- patterns.utf8one ^1 * Cc(1) + -- -- + patterns.utf8two ^1 * Cc(2) + -- -- + patterns.utf8three^1 * Cc(3) + -- -- + patterns.utf8four ^1 * Cc(4) ) * Cp() / function(f,d,t) n = n + (t - f)/d end + -- -- )^0 ) -- just as many captures as below + -- + -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( ( + -- -- (Cmt(patterns.utf8one ^1,function(_,_,s) n = n + #s return true end)) + -- -- + (Cmt(patterns.utf8two ^1,function(_,_,s) n = n + #s/2 return true end)) + -- -- + (Cmt(patterns.utf8three^1,function(_,_,s) n = n + #s/3 return true end)) + -- -- + (Cmt(patterns.utf8four ^1,function(_,_,s) n = n + #s/4 return true end)) + -- -- )^0 ) -- not interesting as it creates strings but sometimes faster + -- + -- -- The best so far: + -- + -- local utfcharcounter = utfbom^-1 * P ( ( + -- Cp() * (patterns.utf8one )^1 * Cp() / function(f,t) n = n + t - f end + -- + Cp() * (patterns.utf8two )^1 * Cp() / function(f,t) n = n + (t - f)/2 end + -- + Cp() * (patterns.utf8three)^1 * Cp() / function(f,t) n = n + (t - f)/3 end + -- + Cp() * (patterns.utf8four )^1 * Cp() / function(f,t) n = n + (t - f)/4 end + -- )^0 ) + + -- function utf.len(str) + -- n = 0 + -- lpegmatch(utfcharcounter,str or "") + -- return n + -- end + + local n, f = 0, 1 + + local utfcharcounter = patterns.utfbom^-1 * Cmt ( + Cc(1) * patterns.utf8one ^1 + + Cc(2) * patterns.utf8two ^1 + + Cc(3) * patterns.utf8three^1 + + Cc(4) * patterns.utf8four ^1, + function(_,t,d) -- due to Cc no string captures, so faster + n = n + (t - f)/d + f = t + return true + end + )^0 + + function utf.len(str) + n, f = 0, 1 + lpegmatch(utfcharcounter,str or "") + return n + end + + -- -- these are quite a bit slower: + + -- utfcharcounter = utfbom^-1 * (Cmt(P(1) * R("\128\191")^0, function() n = n + 1 return true end))^0 -- 50+ times slower + -- utfcharcounter = utfbom^-1 * (Cmt(P(1), function() n = n + 1 return true end) * R("\128\191")^0)^0 -- 50- times slower + +end + +utf.length = utf.len + +if not utf.sub then + + -- inefficient as lpeg just copies ^n + + -- local function sub(str,start,stop) + -- local pattern = p_utf8char^-(start-1) * C(p_utf8char^-(stop-start+1)) + -- inspect(pattern) + -- return lpegmatch(pattern,str) or "" + -- end + + -- local b, e, n, first, last = 0, 0, 0, 0, 0 + -- + -- local function slide(s,p) + -- n = n + 1 + -- if n == first then + -- b = p + -- if not last then + -- return nil + -- end + -- end + -- if n == last then + -- e = p + -- return nil + -- else + -- return p + -- end + -- end + -- + -- local pattern = Cmt(p_utf8char,slide)^0 + -- + -- function utf.sub(str,start,stop) -- todo: from the end + -- if not start then + -- return str + -- end + -- b, e, n, first, last = 0, 0, 0, start, stop + -- lpegmatch(pattern,str) + -- if not stop then + -- return sub(str,b) + -- else + -- return sub(str,b,e-1) + -- end + -- end + + -- print(utf.sub("Hans Hagen is my name")) + -- print(utf.sub("Hans Hagen is my name",5)) + -- print(utf.sub("Hans Hagen is my name",5,10)) + + local utflength = utf.length + + -- also negative indices, upto 10 times slower than a c variant + + local b, e, n, first, last = 0, 0, 0, 0, 0 + + local function slide_zero(s,p) + n = n + 1 + if n >= last then + e = p - 1 + else + return p + end + end + + local function slide_one(s,p) + n = n + 1 + if n == first then + b = p + end + if n >= last then + e = p - 1 + else + return p + end + end + + local function slide_two(s,p) + n = n + 1 + if n == first then + b = p + else + return true + end + end + + local pattern_zero = Cmt(p_utf8char,slide_zero)^0 + local pattern_one = Cmt(p_utf8char,slide_one )^0 + local pattern_two = Cmt(p_utf8char,slide_two )^0 + + function utf.sub(str,start,stop) + if not start then + return str + end + if start == 0 then + start = 1 + end + if not stop then + if start < 0 then + local l = utflength(str) -- we can inline this function if needed + start = l + start + else + start = start - 1 + end + b, n, first = 0, 0, start + lpegmatch(pattern_two,str) + if n >= first then + return sub(str,b) + else + return "" + end + end + if start < 0 or stop < 0 then + local l = utf.length(str) + if start < 0 then + start = l + start + if start <= 0 then + start = 1 + else + start = start + 1 + end + end + if stop < 0 then + stop = l + stop + if stop == 0 then + stop = 1 + else + stop = stop + 1 + end + end + end + if start > stop then + return "" + elseif start > 1 then + b, e, n, first, last = 0, 0, 0, start - 1, stop + lpegmatch(pattern_one,str) + if n >= first and e == 0 then + e = #str + end + return sub(str,b,e) + else + b, e, n, last = 1, 0, 0, stop + lpegmatch(pattern_zero,str) + if e == 0 then + e = #str + end + return sub(str,b,e) + end + end + + -- local n = 100000 + -- local str = string.rep("123456àáâãäå",100) + -- + -- for i=-15,15,1 do + -- for j=-15,15,1 do + -- if utf.xsub(str,i,j) ~= utf.sub(str,i,j) then + -- print("error",i,j,"l>"..utf.xsub(str,i,j),"s>"..utf.sub(str,i,j)) + -- end + -- end + -- if utf.xsub(str,i) ~= utf.sub(str,i) then + -- print("error",i,"l>"..utf.xsub(str,i),"s>"..utf.sub(str,i)) + -- end + -- end + + -- print(" 1, 7",utf.xsub(str, 1, 7),utf.sub(str, 1, 7)) + -- print(" 0, 7",utf.xsub(str, 0, 7),utf.sub(str, 0, 7)) + -- print(" 0, 9",utf.xsub(str, 0, 9),utf.sub(str, 0, 9)) + -- print(" 4 ",utf.xsub(str, 4 ),utf.sub(str, 4 )) + -- print(" 0 ",utf.xsub(str, 0 ),utf.sub(str, 0 )) + -- print(" 0, 0",utf.xsub(str, 0, 0),utf.sub(str, 0, 0)) + -- print(" 4, 4",utf.xsub(str, 4, 4),utf.sub(str, 4, 4)) + -- print(" 4, 0",utf.xsub(str, 4, 0),utf.sub(str, 4, 0)) + -- print("-3, 0",utf.xsub(str,-3, 0),utf.sub(str,-3, 0)) + -- print(" 0,-3",utf.xsub(str, 0,-3),utf.sub(str, 0,-3)) + -- print(" 5,-3",utf.xsub(str,-5,-3),utf.sub(str,-5,-3)) + -- print("-3 ",utf.xsub(str,-3 ),utf.sub(str,-3 )) + +end + +-- a replacement for simple gsubs: + +function utf.remapper(mapping) + local pattern = Cs((p_utf8char/mapping)^0) + return function(str) + if not str or str == "" then + return "" + else + return lpegmatch(pattern,str) + end + end, pattern +end + +-- local remap = utf.remapper { a = 'd', b = "c", c = "b", d = "a" } +-- print(remap("abcd 1234 abcd")) + +-- + +function utf.replacer(t) -- no precheck, always string builder + local r = replacer(t,false,false,true) + return function(str) + return lpegmatch(r,str) + end +end + +function utf.subtituter(t) -- with precheck and no building if no match + local f = finder (t) + local r = replacer(t,false,false,true) + return function(str) + local i = lpegmatch(f,str) + if not i then + return str + elseif i > #str then + return str + else + -- return sub(str,1,i-2) .. lpegmatch(r,str,i-1) -- slower + return lpegmatch(r,str) + end + end +end + +-- inspect(utf.split("a b c d")) +-- inspect(utf.split("a b c d",true)) + +local utflinesplitter = p_utfbom^-1 * lpeg.tsplitat(p_newline) +local utfcharsplitter_ows = p_utfbom^-1 * Ct(C(p_utf8char)^0) +local utfcharsplitter_iws = p_utfbom^-1 * Ct((p_whitespace^1 + C(p_utf8char))^0) +local utfcharsplitter_raw = Ct(C(p_utf8char)^0) + +patterns.utflinesplitter = utflinesplitter + +function utf.splitlines(str) + return lpegmatch(utflinesplitter,str or "") +end + +function utf.split(str,ignorewhitespace) -- new + if ignorewhitespace then + return lpegmatch(utfcharsplitter_iws,str or "") + else + return lpegmatch(utfcharsplitter_ows,str or "") + end +end + +function utf.totable(str) -- keeps bom + return lpegmatch(utfcharsplitter_raw,str) +end + +-- 0 EF BB BF UTF-8 +-- 1 FF FE UTF-16-little-endian +-- 2 FE FF UTF-16-big-endian +-- 3 FF FE 00 00 UTF-32-little-endian +-- 4 00 00 FE FF UTF-32-big-endian +-- +-- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated + +-- utf.name = { +-- [0] = 'utf-8', +-- [1] = 'utf-16-le', +-- [2] = 'utf-16-be', +-- [3] = 'utf-32-le', +-- [4] = 'utf-32-be' +-- } +-- +-- function utf.magic(f) +-- local str = f:read(4) +-- if not str then +-- f:seek('set') +-- return 0 +-- -- elseif find(str,"^%z%z\254\255") then -- depricated +-- -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged +-- elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH) +-- return 4 +-- -- elseif find(str,"^\255\254%z%z") then -- depricated +-- -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged +-- elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH) +-- return 3 +-- elseif find(str,"^\254\255") then +-- f:seek('set',2) +-- return 2 +-- elseif find(str,"^\255\254") then +-- f:seek('set',2) +-- return 1 +-- elseif find(str,"^\239\187\191") then +-- f:seek('set',3) +-- return 0 +-- else +-- f:seek('set') +-- return 0 +-- end +-- end + +function utf.magic(f) -- not used + local str = f:read(4) or "" + local off = lpegmatch(p_utfoffset,str) + if off < 4 then + f:seek('set',off) + end + return lpegmatch(p_utftype,str) +end + +local function utf16_to_utf8_be(t) + if type(t) == "string" then + t = lpegmatch(utflinesplitter,t) + end + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, 0 + for left, right in bytepairs(t[i]) do + if right then + local now = 256*left + right + if more > 0 then + now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong + more = 0 + r = r + 1 + result[r] = utfchar(now) + elseif now >= 0xD800 and now <= 0xDBFF then + more = now + else + r = r + 1 + result[r] = utfchar(now) + end + end + end + t[i] = concat(result,"",1,r) -- we reused tmp, hence t + end + return t +end + +local function utf16_to_utf8_le(t) + if type(t) == "string" then + t = lpegmatch(utflinesplitter,t) + end + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, 0 + for left, right in bytepairs(t[i]) do + if right then + local now = 256*right + left + if more > 0 then + now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong + more = 0 + r = r + 1 + result[r] = utfchar(now) + elseif now >= 0xD800 and now <= 0xDBFF then + more = now + else + r = r + 1 + result[r] = utfchar(now) + end + end + end + t[i] = concat(result,"",1,r) -- we reused tmp, hence t + end + return t +end + +local function utf32_to_utf8_be(t) + if type(t) == "string" then + t = lpegmatch(utflinesplitter,t) + end + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, -1 + for a,b in bytepairs(t[i]) do + if a and b then + if more < 0 then + more = 256*256*256*a + 256*256*b + else + r = r + 1 + result[t] = utfchar(more + 256*a + b) + more = -1 + end + else + break + end + end + t[i] = concat(result,"",1,r) + end + return t +end + +local function utf32_to_utf8_le(t) + if type(t) == "string" then + t = lpegmatch(utflinesplitter,t) + end + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, -1 + for a,b in bytepairs(t[i]) do + if a and b then + if more < 0 then + more = 256*b + a + else + r = r + 1 + result[t] = utfchar(more + 256*256*256*b + 256*256*a) + more = -1 + end + else + break + end + end + t[i] = concat(result,"",1,r) + end + return t +end + +utf.utf32_to_utf8_be = utf32_to_utf8_be +utf.utf32_to_utf8_le = utf32_to_utf8_le +utf.utf16_to_utf8_be = utf16_to_utf8_be +utf.utf16_to_utf8_le = utf16_to_utf8_le + +function utf.utf8_to_utf8(t) + return type(t) == "string" and lpegmatch(utflinesplitter,t) or t +end + +function utf.utf16_to_utf8(t,endian) + return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t +end + +function utf.utf32_to_utf8(t,endian) + return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t +end + +local function little(c) + local b = byte(c) + if b < 0x10000 then + return char(b%256,b/256) + else + b = b - 0x10000 + local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00 + return char(b1%256,b1/256,b2%256,b2/256) + end +end + +local function big(c) + local b = byte(c) + if b < 0x10000 then + return char(b/256,b%256) + else + b = b - 0x10000 + local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00 + return char(b1/256,b1%256,b2/256,b2%256) + end +end + +-- function utf.utf8_to_utf16(str,littleendian) +-- if littleendian then +-- return char(255,254) .. utfgsub(str,".",little) +-- else +-- return char(254,255) .. utfgsub(str,".",big) +-- end +-- end + +local _, l_remap = utf.remapper(little) +local _, b_remap = utf.remapper(big) + +function utf.utf8_to_utf16(str,littleendian) + if littleendian then + return char(255,254) .. lpegmatch(l_remap,str) + else + return char(254,255) .. lpegmatch(b_remap,str) + end +end + +-- function utf.tocodes(str,separator) -- can be sped up with an lpeg +-- local t, n = { }, 0 +-- for u in utfvalues(str) do +-- n = n + 1 +-- t[n] = format("0x%04X",u) +-- end +-- return concat(t,separator or " ") +-- end + +local pattern = Cs ( + (p_utf8byte / function(unicode ) return format( "0x%04X", unicode) end) * + (p_utf8byte * Carg(1) / function(unicode,separator) return format("%s0x%04X",separator,unicode) end)^0 +) + +function utf.tocodes(str,separator) + return lpegmatch(pattern,str,1,separator or " ") +end + +function utf.ustring(s) + return format("U+%05X",type(s) == "number" and s or utfbyte(s)) +end + +function utf.xstring(s) + return format("0x%05X",type(s) == "number" and s or utfbyte(s)) +end + +-- + +local p_nany = p_utf8char / "" + +if utfgmatch then + + function utf.count(str,what) + if type(what) == "string" then + local n = 0 + for _ in utfgmatch(str,what) do + n = n + 1 + end + return n + else -- 4 times slower but still faster than / function + return #lpegmatch(Cs((P(what)/" " + p_nany)^0),str) + end + end + +else + + local cache = { } + + function utf.count(str,what) + if type(what) == "string" then + local p = cache[what] + if not p then + p = Cs((P(what)/" " + p_nany)^0) + cache[p] = p + end + return #lpegmatch(p,str) + else -- 4 times slower but still faster than / function + return #lpegmatch(Cs((P(what)/" " + p_nany)^0),str) + end + end + +end + +-- maybe also register as string.utf* + + +if not utf.characters then + + -- New: this gmatch hack is taken from the Lua 5.2 book. It's about two times slower + -- than the built-in string.utfcharacters. + + function utf.characters(str) + return gmatch(str,".[\128-\191]*") + end + + string.utfcharacters = utf.characters + +end + +if not utf.values then + + -- So, a logical next step is to check for the values variant. It over five times + -- slower than the built-in string.utfvalues. I optimized it a bit for n=0,1. + + ----- wrap, yield, gmatch = coroutine.wrap, coroutine.yield, string.gmatch + local find = string.find + + local dummy = function() + -- we share this one + end + + -- function utf.values(str) + -- local n = #str + -- if n == 0 then + -- return wrap(dummy) + -- elseif n == 1 then + -- return wrap(function() yield(utfbyte(str)) end) + -- else + -- return wrap(function() for s in gmatch(str,".[\128-\191]*") do + -- yield(utfbyte(s)) + -- end end) + -- end + -- end + -- + -- faster: + + function utf.values(str) + local n = #str + if n == 0 then + return dummy + elseif n == 1 then + return function() return utfbyte(str) end + else + local p = 1 + -- local n = #str + return function() + -- if p <= n then -- slower than the last find + local b, e = find(str,".[\128-\191]*",p) + if b then + p = e + 1 + return utfbyte(sub(str,b,e)) + end + -- end + end + end + end + + -- slower: + -- + -- local pattern = C(patterns.utf8character) * Cp() + -- ----- pattern = patterns.utf8character/utfbyte * Cp() + -- ----- pattern = patterns.utf8byte * Cp() + -- + -- function utf.values(str) -- one of the cases where a find is faster than an lpeg + -- local n = #str + -- if n == 0 then + -- return dummy + -- elseif n == 1 then + -- return function() return utfbyte(str) end + -- else + -- local p = 1 + -- return function() + -- local s, e = lpegmatch(pattern,str,p) + -- if e then + -- p = e + -- return utfbyte(s) + -- -- return s + -- end + -- end + -- end + -- end + + string.utfvalues = utf.values + +end diff --git a/tex/context/base/l-url.lua b/tex/context/base/l-url.lua index 4624a0507..5cfeb252c 100644 --- a/tex/context/base/l-url.lua +++ b/tex/context/base/l-url.lua @@ -1,344 +1,344 @@ -if not modules then modules = { } end modules ['l-url'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local char, format, byte = string.char, string.format, string.byte -local concat = table.concat -local tonumber, type = tonumber, type -local P, C, R, S, Cs, Cc, Ct, Cf, Cg, V = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.V -local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer - --- from wikipedia: --- --- foo://username:password@example.com:8042/over/there/index.dtb?type=animal;name=narwhal#nose --- \_/ \_______________/ \_________/ \__/ \___/ \_/ \______________________/ \__/ --- | | | | | | | | --- | userinfo hostname port | | query fragment --- | \________________________________/\_____________|____|/ --- scheme | | | | --- | authority path | | --- | | | --- | path interpretable as filename --- | ___________|____________ | --- / \ / \ | --- urn:example:animal:ferret:nose interpretable as extension - -url = url or { } -local url = url - -local tochar = function(s) return char(tonumber(s,16)) end - -local colon = P(":") -local qmark = P("?") -local hash = P("#") -local slash = P("/") -local percent = P("%") -local endofstring = P(-1) - -local hexdigit = R("09","AF","af") -local plus = P("+") -local nothing = Cc("") -local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar -local escaped = (plus / " ") + escapedchar - -local noslash = P("/") / "" - --- we assume schemes with more than 1 character (in order to avoid problems with windows disks) --- we also assume that when we have a scheme, we also have an authority --- --- maybe we should already split the query (better for unescaping as = & can be part of a value - -local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2) -local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0) -local pathstr = Cs((escaped+(1- qmark-hash))^0) ------ querystr = Cs((escaped+(1- hash))^0) -local querystr = Cs(( (1- hash))^0) -local fragmentstr = Cs((escaped+(1- endofstring))^0) - -local scheme = schemestr * colon + nothing -local authority = slash * slash * authoritystr + nothing -local path = slash * pathstr + nothing -local query = qmark * querystr + nothing -local fragment = hash * fragmentstr + nothing - -local validurl = scheme * authority * path * query * fragment -local parser = Ct(validurl) - -lpegpatterns.url = validurl -lpegpatterns.urlsplitter = parser - -local escapes = { } - -setmetatable(escapes, { __index = function(t,k) - local v = format("%%%02X",byte(k)) - t[k] = v - return v -end }) - -local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_")^1 + P(1) / escapes)^0) -- space happens most -local unescaper = Cs((escapedchar + 1)^0) - -lpegpatterns.urlunescaped = escapedchar -lpegpatterns.urlescaper = escaper -lpegpatterns.urlunescaper = unescaper - --- todo: reconsider Ct as we can as well have five return values (saves a table) --- so we can have two parsers, one with and one without - -local function split(str) - return (type(str) == "string" and lpegmatch(parser,str)) or str -end - -local isscheme = schemestr * colon * slash * slash -- this test also assumes authority - -local function hasscheme(str) - if str then - local scheme = lpegmatch(isscheme,str) -- at least one character - return scheme ~= "" and scheme or false - else - return false - end -end - ---~ print(hasscheme("home:")) ---~ print(hasscheme("home://")) - --- todo: cache them - -local rootletter = R("az","AZ") - + S("_-+") -local separator = P("://") -local qualified = P(".")^0 * P("/") - + rootletter * P(":") - + rootletter^1 * separator - + rootletter^1 * P("/") -local rootbased = P("/") - + rootletter * P(":") - -local barswapper = replacer("|",":") -local backslashswapper = replacer("\\","/") - --- queries: - -local equal = P("=") -local amp = P("&") -local key = Cs(((escapedchar+1)-equal )^0) -local value = Cs(((escapedchar+1)-amp -endofstring)^0) - -local splitquery = Cf ( Ct("") * P { "sequence", - sequence = V("pair") * (amp * V("pair"))^0, - pair = Cg(key * equal * value), -}, rawset) - --- hasher - -local function hashed(str) -- not yet ok (/test?test) - if str == "" then - return { - scheme = "invalid", - original = str, - } - end - local s = split(str) - local rawscheme = s[1] - local rawquery = s[4] - local somescheme = rawscheme ~= "" - local somequery = rawquery ~= "" - if not somescheme and not somequery then - s = { - scheme = "file", - authority = "", - path = str, - query = "", - fragment = "", - original = str, - noscheme = true, - filename = str, - } - else -- not always a filename but handy anyway - local authority, path, filename = s[2], s[3] - if authority == "" then - filename = path - elseif path == "" then - filename = "" - else - filename = authority .. "/" .. path - end - s = { - scheme = rawscheme, - authority = authority, - path = path, - query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and = - queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped - fragment = s[5], - original = str, - noscheme = false, - filename = filename, - } - end - return s -end - --- inspect(hashed("template://test")) - --- Here we assume: --- --- files: /// = relative --- files: //// = absolute (!) - ---~ table.print(hashed("file://c:/opt/tex/texmf-local")) -- c:/opt/tex/texmf-local ---~ table.print(hashed("file://opt/tex/texmf-local" )) -- opt/tex/texmf-local ---~ table.print(hashed("file:///opt/tex/texmf-local" )) -- opt/tex/texmf-local ---~ table.print(hashed("file:////opt/tex/texmf-local" )) -- /opt/tex/texmf-local ---~ table.print(hashed("file:///./opt/tex/texmf-local" )) -- ./opt/tex/texmf-local - ---~ table.print(hashed("c:/opt/tex/texmf-local" )) -- c:/opt/tex/texmf-local ---~ table.print(hashed("opt/tex/texmf-local" )) -- opt/tex/texmf-local ---~ table.print(hashed("/opt/tex/texmf-local" )) -- /opt/tex/texmf-local - -url.split = split -url.hasscheme = hasscheme -url.hashed = hashed - -function url.addscheme(str,scheme) -- no authority - if hasscheme(str) then - return str - elseif not scheme then - return "file:///" .. str - else - return scheme .. ":///" .. str - end -end - -function url.construct(hash) -- dodo: we need to escape ! - local fullurl, f = { }, 0 - local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment - if scheme and scheme ~= "" then - f = f + 1 ; fullurl[f] = scheme .. "://" - end - if authority and authority ~= "" then - f = f + 1 ; fullurl[f] = authority - end - if path and path ~= "" then - f = f + 1 ; fullurl[f] = "/" .. path - end - if query and query ~= "" then - f = f + 1 ; fullurl[f] = "?".. query - end - if fragment and fragment ~= "" then - f = f + 1 ; fullurl[f] = "#".. fragment - end - return lpegmatch(escaper,concat(fullurl)) -end - -local pattern = Cs(noslash * R("az","AZ") * (S(":|")/":") * noslash * P(1)^0) - -function url.filename(filename) - local spec = hashed(filename) - local path = spec.path - return (spec.scheme == "file" and path and lpegmatch(pattern,path)) or filename -end - --- print(url.filename("/c|/test")) --- print(url.filename("/c/test")) - -local function escapestring(str) - return lpegmatch(escaper,str) -end - -url.escape = escapestring - -function url.query(str) - if type(str) == "string" then - return lpegmatch(splitquery,str) or "" - else - return str - end -end - -function url.toquery(data) - local td = type(data) - if td == "string" then - return #str and escape(data) or nil -- beware of double escaping - elseif td == "table" then - if next(data) then - local t = { } - for k, v in next, data do - t[#t+1] = format("%s=%s",k,escapestring(v)) - end - return concat(t,"&") - end - else - -- nil is a signal that no query - end -end - --- /test/ | /test | test/ | test => test - -local pattern = Cs(noslash^0 * (1 - noslash * P(-1))^0) - -function url.barepath(path) - if not path or path == "" then - return "" - else - return lpegmatch(pattern,path) - end -end - --- print(url.barepath("/test"),url.barepath("test/"),url.barepath("/test/"),url.barepath("test")) --- print(url.barepath("/x/yz"),url.barepath("x/yz/"),url.barepath("/x/yz/"),url.barepath("x/yz")) - ---~ print(url.filename("file:///c:/oeps.txt")) ---~ print(url.filename("c:/oeps.txt")) ---~ print(url.filename("file:///oeps.txt")) ---~ print(url.filename("file:///etc/test.txt")) ---~ print(url.filename("/oeps.txt")) - ---~ from the spec on the web (sort of): - ---~ local function test(str) ---~ local t = url.hashed(str) ---~ t.constructed = url.construct(t) ---~ print(table.serialize(t)) ---~ end - ---~ inspect(url.hashed("http://www.pragma-ade.com/test%20test?test=test%20test&x=123%3d45")) ---~ inspect(url.hashed("http://www.pragma-ade.com/test%20test?test=test%20test&x=123%3d45")) - ---~ test("sys:///./colo-rgb") - ---~ test("/data/site/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733/figuur-cow.jpg") ---~ test("file:///M:/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733") ---~ test("M:/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733") ---~ test("file:///q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733") ---~ test("/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733") - ---~ test("file:///cow%20with%20spaces") ---~ test("file:///cow%20with%20spaces.pdf") ---~ test("cow%20with%20spaces.pdf") ---~ test("some%20file") ---~ test("/etc/passwords") ---~ test("http://www.myself.com/some%20words.html") ---~ test("file:///c:/oeps.txt") ---~ test("file:///c|/oeps.txt") ---~ test("file:///etc/oeps.txt") ---~ test("file://./etc/oeps.txt") ---~ test("file:////etc/oeps.txt") ---~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt") ---~ test("http://www.ietf.org/rfc/rfc2396.txt") ---~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what") ---~ test("mailto:John.Doe@example.com") ---~ test("news:comp.infosystems.www.servers.unix") ---~ test("tel:+1-816-555-1212") ---~ test("telnet://192.0.2.16:80/") ---~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2") ---~ test("http://www.pragma-ade.com/spaced%20name") - ---~ test("zip:///oeps/oeps.zip#bla/bla.tex") ---~ test("zip:///oeps/oeps.zip?bla/bla.tex") - ---~ table.print(url.hashed("/test?test")) +if not modules then modules = { } end modules ['l-url'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local char, format, byte = string.char, string.format, string.byte +local concat = table.concat +local tonumber, type = tonumber, type +local P, C, R, S, Cs, Cc, Ct, Cf, Cg, V = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.V +local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer + +-- from wikipedia: +-- +-- foo://username:password@example.com:8042/over/there/index.dtb?type=animal;name=narwhal#nose +-- \_/ \_______________/ \_________/ \__/ \___/ \_/ \______________________/ \__/ +-- | | | | | | | | +-- | userinfo hostname port | | query fragment +-- | \________________________________/\_____________|____|/ +-- scheme | | | | +-- | authority path | | +-- | | | +-- | path interpretable as filename +-- | ___________|____________ | +-- / \ / \ | +-- urn:example:animal:ferret:nose interpretable as extension + +url = url or { } +local url = url + +local tochar = function(s) return char(tonumber(s,16)) end + +local colon = P(":") +local qmark = P("?") +local hash = P("#") +local slash = P("/") +local percent = P("%") +local endofstring = P(-1) + +local hexdigit = R("09","AF","af") +local plus = P("+") +local nothing = Cc("") +local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar +local escaped = (plus / " ") + escapedchar + +local noslash = P("/") / "" + +-- we assume schemes with more than 1 character (in order to avoid problems with windows disks) +-- we also assume that when we have a scheme, we also have an authority +-- +-- maybe we should already split the query (better for unescaping as = & can be part of a value + +local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2) +local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0) +local pathstr = Cs((escaped+(1- qmark-hash))^0) +----- querystr = Cs((escaped+(1- hash))^0) +local querystr = Cs(( (1- hash))^0) +local fragmentstr = Cs((escaped+(1- endofstring))^0) + +local scheme = schemestr * colon + nothing +local authority = slash * slash * authoritystr + nothing +local path = slash * pathstr + nothing +local query = qmark * querystr + nothing +local fragment = hash * fragmentstr + nothing + +local validurl = scheme * authority * path * query * fragment +local parser = Ct(validurl) + +lpegpatterns.url = validurl +lpegpatterns.urlsplitter = parser + +local escapes = { } + +setmetatable(escapes, { __index = function(t,k) + local v = format("%%%02X",byte(k)) + t[k] = v + return v +end }) + +local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_")^1 + P(1) / escapes)^0) -- space happens most +local unescaper = Cs((escapedchar + 1)^0) + +lpegpatterns.urlunescaped = escapedchar +lpegpatterns.urlescaper = escaper +lpegpatterns.urlunescaper = unescaper + +-- todo: reconsider Ct as we can as well have five return values (saves a table) +-- so we can have two parsers, one with and one without + +local function split(str) + return (type(str) == "string" and lpegmatch(parser,str)) or str +end + +local isscheme = schemestr * colon * slash * slash -- this test also assumes authority + +local function hasscheme(str) + if str then + local scheme = lpegmatch(isscheme,str) -- at least one character + return scheme ~= "" and scheme or false + else + return false + end +end + +--~ print(hasscheme("home:")) +--~ print(hasscheme("home://")) + +-- todo: cache them + +local rootletter = R("az","AZ") + + S("_-+") +local separator = P("://") +local qualified = P(".")^0 * P("/") + + rootletter * P(":") + + rootletter^1 * separator + + rootletter^1 * P("/") +local rootbased = P("/") + + rootletter * P(":") + +local barswapper = replacer("|",":") +local backslashswapper = replacer("\\","/") + +-- queries: + +local equal = P("=") +local amp = P("&") +local key = Cs(((escapedchar+1)-equal )^0) +local value = Cs(((escapedchar+1)-amp -endofstring)^0) + +local splitquery = Cf ( Ct("") * P { "sequence", + sequence = V("pair") * (amp * V("pair"))^0, + pair = Cg(key * equal * value), +}, rawset) + +-- hasher + +local function hashed(str) -- not yet ok (/test?test) + if str == "" then + return { + scheme = "invalid", + original = str, + } + end + local s = split(str) + local rawscheme = s[1] + local rawquery = s[4] + local somescheme = rawscheme ~= "" + local somequery = rawquery ~= "" + if not somescheme and not somequery then + s = { + scheme = "file", + authority = "", + path = str, + query = "", + fragment = "", + original = str, + noscheme = true, + filename = str, + } + else -- not always a filename but handy anyway + local authority, path, filename = s[2], s[3] + if authority == "" then + filename = path + elseif path == "" then + filename = "" + else + filename = authority .. "/" .. path + end + s = { + scheme = rawscheme, + authority = authority, + path = path, + query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and = + queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped + fragment = s[5], + original = str, + noscheme = false, + filename = filename, + } + end + return s +end + +-- inspect(hashed("template://test")) + +-- Here we assume: +-- +-- files: /// = relative +-- files: //// = absolute (!) + +--~ table.print(hashed("file://c:/opt/tex/texmf-local")) -- c:/opt/tex/texmf-local +--~ table.print(hashed("file://opt/tex/texmf-local" )) -- opt/tex/texmf-local +--~ table.print(hashed("file:///opt/tex/texmf-local" )) -- opt/tex/texmf-local +--~ table.print(hashed("file:////opt/tex/texmf-local" )) -- /opt/tex/texmf-local +--~ table.print(hashed("file:///./opt/tex/texmf-local" )) -- ./opt/tex/texmf-local + +--~ table.print(hashed("c:/opt/tex/texmf-local" )) -- c:/opt/tex/texmf-local +--~ table.print(hashed("opt/tex/texmf-local" )) -- opt/tex/texmf-local +--~ table.print(hashed("/opt/tex/texmf-local" )) -- /opt/tex/texmf-local + +url.split = split +url.hasscheme = hasscheme +url.hashed = hashed + +function url.addscheme(str,scheme) -- no authority + if hasscheme(str) then + return str + elseif not scheme then + return "file:///" .. str + else + return scheme .. ":///" .. str + end +end + +function url.construct(hash) -- dodo: we need to escape ! + local fullurl, f = { }, 0 + local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment + if scheme and scheme ~= "" then + f = f + 1 ; fullurl[f] = scheme .. "://" + end + if authority and authority ~= "" then + f = f + 1 ; fullurl[f] = authority + end + if path and path ~= "" then + f = f + 1 ; fullurl[f] = "/" .. path + end + if query and query ~= "" then + f = f + 1 ; fullurl[f] = "?".. query + end + if fragment and fragment ~= "" then + f = f + 1 ; fullurl[f] = "#".. fragment + end + return lpegmatch(escaper,concat(fullurl)) +end + +local pattern = Cs(noslash * R("az","AZ") * (S(":|")/":") * noslash * P(1)^0) + +function url.filename(filename) + local spec = hashed(filename) + local path = spec.path + return (spec.scheme == "file" and path and lpegmatch(pattern,path)) or filename +end + +-- print(url.filename("/c|/test")) +-- print(url.filename("/c/test")) + +local function escapestring(str) + return lpegmatch(escaper,str) +end + +url.escape = escapestring + +function url.query(str) + if type(str) == "string" then + return lpegmatch(splitquery,str) or "" + else + return str + end +end + +function url.toquery(data) + local td = type(data) + if td == "string" then + return #str and escape(data) or nil -- beware of double escaping + elseif td == "table" then + if next(data) then + local t = { } + for k, v in next, data do + t[#t+1] = format("%s=%s",k,escapestring(v)) + end + return concat(t,"&") + end + else + -- nil is a signal that no query + end +end + +-- /test/ | /test | test/ | test => test + +local pattern = Cs(noslash^0 * (1 - noslash * P(-1))^0) + +function url.barepath(path) + if not path or path == "" then + return "" + else + return lpegmatch(pattern,path) + end +end + +-- print(url.barepath("/test"),url.barepath("test/"),url.barepath("/test/"),url.barepath("test")) +-- print(url.barepath("/x/yz"),url.barepath("x/yz/"),url.barepath("/x/yz/"),url.barepath("x/yz")) + +--~ print(url.filename("file:///c:/oeps.txt")) +--~ print(url.filename("c:/oeps.txt")) +--~ print(url.filename("file:///oeps.txt")) +--~ print(url.filename("file:///etc/test.txt")) +--~ print(url.filename("/oeps.txt")) + +--~ from the spec on the web (sort of): + +--~ local function test(str) +--~ local t = url.hashed(str) +--~ t.constructed = url.construct(t) +--~ print(table.serialize(t)) +--~ end + +--~ inspect(url.hashed("http://www.pragma-ade.com/test%20test?test=test%20test&x=123%3d45")) +--~ inspect(url.hashed("http://www.pragma-ade.com/test%20test?test=test%20test&x=123%3d45")) + +--~ test("sys:///./colo-rgb") + +--~ test("/data/site/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733/figuur-cow.jpg") +--~ test("file:///M:/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733") +--~ test("M:/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733") +--~ test("file:///q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733") +--~ test("/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733") + +--~ test("file:///cow%20with%20spaces") +--~ test("file:///cow%20with%20spaces.pdf") +--~ test("cow%20with%20spaces.pdf") +--~ test("some%20file") +--~ test("/etc/passwords") +--~ test("http://www.myself.com/some%20words.html") +--~ test("file:///c:/oeps.txt") +--~ test("file:///c|/oeps.txt") +--~ test("file:///etc/oeps.txt") +--~ test("file://./etc/oeps.txt") +--~ test("file:////etc/oeps.txt") +--~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt") +--~ test("http://www.ietf.org/rfc/rfc2396.txt") +--~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what") +--~ test("mailto:John.Doe@example.com") +--~ test("news:comp.infosystems.www.servers.unix") +--~ test("tel:+1-816-555-1212") +--~ test("telnet://192.0.2.16:80/") +--~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2") +--~ test("http://www.pragma-ade.com/spaced%20name") + +--~ test("zip:///oeps/oeps.zip#bla/bla.tex") +--~ test("zip:///oeps/oeps.zip?bla/bla.tex") + +--~ table.print(url.hashed("/test?test")) diff --git a/tex/context/base/l-xml.lua b/tex/context/base/l-xml.lua index 14e97337b..d8cc4a984 100644 --- a/tex/context/base/l-xml.lua +++ b/tex/context/base/l-xml.lua @@ -1,23 +1,23 @@ -if not modules then modules = { } end modules ['l-xml'] = { - version = 1.001, - comment = "this module is replaced by the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- We asume that the helper modules l-*.lua are loaded --- already. But anyway if you use mtxrun to run your script --- all is taken care of. - -if not trackers then - require('trac-tra') -end - -if not xml then - require('lxml-tab') - require('lxml-lpt') - require('lxml-mis') - require('lxml-aux') - require('lxml-xml') -end +if not modules then modules = { } end modules ['l-xml'] = { + version = 1.001, + comment = "this module is replaced by the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- We asume that the helper modules l-*.lua are loaded +-- already. But anyway if you use mtxrun to run your script +-- all is taken care of. + +if not trackers then + require('trac-tra') +end + +if not xml then + require('lxml-tab') + require('lxml-lpt') + require('lxml-mis') + require('lxml-aux') + require('lxml-xml') +end diff --git a/tex/context/base/lang-def.lua b/tex/context/base/lang-def.lua index c0c3981f7..274bb8090 100644 --- a/tex/context/base/lang-def.lua +++ b/tex/context/base/lang-def.lua @@ -1,466 +1,466 @@ -if not modules then modules = { } end modules ['lang-def'] = { - version = 1.001, - comment = "companion to lang-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" - -- dataonly = true, -- saves 10K -} - -local rawget = rawget -local lower = string.lower - -languages = languages or { } -local languages = languages -languages.data = languages.data or { } -local data = languages.data - -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex - --- The specifications are based on an analysis done by Arthur. The --- names of tags were changed by Hans. The data is not yet used but --- will be some day. --- --- description --- --- The description is only meant as an indication; for example 'no' is --- "Norwegian, undetermined" because that's really what it is. --- --- script --- --- This is the 4-letter script tag according to ISO 15924, the --- official standard. --- --- bibliographical and terminological --- --- Then we have *two* ISO-639 3-letter tags: one is supposed to be used --- for "bibliographical" purposes, the other for "terminological". The --- first one is quite special (and mostly used in American libraries), --- and the more interesting one is the other (apparently it's that one --- we find everywhere). --- --- context --- --- These are the ones used in ConteXt. Kind of numberplate ones. --- --- opentype --- --- This is the 3-letter OpenType language tag, obviously. --- --- variant --- --- This is actually the rfc4646: an extension of ISO-639 that also defines --- codes for variants like de-1901 for "German, 1901 orthography" or zh-Hans for --- "Chinese, simplified characters" ('Hans' is the ISO-15924 tag for --- "HAN ideographs, Simplified" :-) As I said yesterday, I think this --- should be the reference since it's exactly what we want: it's really --- standard (it's a RFC) and it's more than simply languages. To my --- knowledge this is the only system that addresses this issue. --- --- Warning: it's not unique! Because we have two "German" languages --- (and could, potentially, have two Chinese, etc.) --- --- Beware: the abbreviations are lowercased, which makes it more --- convenient to use them. --- --- todo: add default features - -local specifications = allocate { - { - ["description"] = "Dutch", - ["script"] = "latn", - -- ["bibliographical"] = "nld", - -- ["terminological"] = "nld", - ["context"] = "nl", - ["opentype"] = "nld", - ["variant"] = "nl", - }, - { - ["description"] = "Basque", - ["script"] = "latn", - ["bibliographical"] = "baq", - ["terminological"] = "eus", - ["context"] = "ba", - ["opentype"] = "euq", - ["variant"] = "eu", - }, - { - ["description"] = "Welsh", - ["script"] = "latn", - ["bibliographical"] = "wel", - ["terminological"] = "cym", - ["context"] = "cy", - ["opentype"] = "wel", - ["variant"] = "cy", - }, - { - ["description"] = "Icelandic", - ["script"] = "latn", - ["bibliographical"] = "ice", - ["terminological"] = "isl", - ["context"] = "is", - ["opentype"] = "isl", - ["variant"] = "is", - }, - { - ["description"] = "Norwegian, undetermined", - ["script"] = "latn", - ["bibliographical"] = "nor", - ["terminological"] = "nor", - ["context"] = "no", - ["variant"] = "no", - }, - { - ["description"] = "Norwegian bokmal", - ["script"] = "latn", - ["bibliographical"] = "nob", - ["terminological"] = "nob", - ["opentype"] = "nor", -- not sure! - ["variant"] = "nb", - }, - { - ["description"] = "Norwegian nynorsk", - ["script"] = "latn", - ["bibliographical"] = "nno", - ["terminological"] = "nno", - ["opentype"] = "nny", - ["variant"] = "nn", - }, - { - ["description"] = "Ancient Greek", - ["script"] = "grek", - ["bibliographical"] = "grc", - ["terminological"] = "grc", - ["context"] = "agr", - ["variant"] = "grc", - }, - { - ["description"] = "German, 1901 orthography", - ["script"] = "latn", - ["terminological"] = "deu", - ["context"] = "deo", - ["opentype"] = "deu", - ["variant"] = "de-1901", - }, - { - ["description"] = "German, 1996 orthography", - ["script"] = "latn", - ["bibliographical"] = "ger", - ["terminological"] = "deu", - ["context"] = "de", - ["opentype"] = "deu", - ["variant"] = "de-1996", - }, - { - ["description"] = "Afrikaans", - ["script"] = "latn", - ["bibliographical"] = "afr", - ["terminological"] = "afr", - ["context"] = "af", - ["opentype"] = "afk", - ["variant"] = "af", - }, - { - ["description"] = "Catalan", - ["script"] = "latn", - ["bibliographical"] = "cat", - ["terminological"] = "cat", - ["context"] = "ca", - ["opentype"] = "cat", - ["variant"] = "ca", - }, - { - ["description"] = "Czech", - ["script"] = "latn", - ["bibliographical"] = "cze", - ["terminological"] = "ces", - ["context"] = "cz", - ["opentype"] = "csy", - ["variant"] = "cs", - }, - { - ["description"] = "Greek", - ["script"] = "grek", - ["bibliographical"] = "gre", - ["terminological"] = "ell", - ["context"] = "gr", - ["opentype"] = "ell", - ["variant"] = "el", - }, - { - ["description"] = "American English", - ["script"] = "latn", - ["bibliographical"] = "eng", - ["terminological"] = "eng", - ["context"] = "us", - ["opentype"] = "eng", - ["variant"] = "en-US", - }, - { - ["description"] = "British English", - ["script"] = "latn", - ["bibliographical"] = "eng", - ["terminological"] = "eng", - ["context"] = "uk", - ["opentype"] = "eng", - ["variant"] = "en-UK", -- Could be en-GB as well ... - }, - { - ["description"] = "Spanish", - ["script"] = "latn", - ["bibliographical"] = "spa", - ["terminological"] = "spa", - ["context"] = "es", - ["opentype"] = "esp", - ["variant"] = "es", - }, - { - ["description"] = "Finnish", - ["script"] = "latn", - ["bibliographical"] = "fin", - ["terminological"] = "fin", - ["context"] = "fi", - ["opentype"] = "fin", - ["variant"] = "fi", - }, - { - ["description"] = "French", - ["script"] = "latn", - ["bibliographical"] = "fre", - ["terminological"] = "fra", - ["context"] = "fr", - ["opentype"] = "fra", - ["variant"] = "fr", - }, - { - ["description"] = "Croatian", - ["script"] = "latn", - ["bibliographical"] = "scr", - ["terminological"] = "hrv", - ["context"] = "hr", - ["opentype"] = "hrv", - ["variant"] = "hr", - }, - { - ["description"] = "Hungarian", - ["script"] = "latn", - ["bibliographical"] = "hun", - ["terminological"] = "hun", - ["context"] = "hu", - ["opentype"] = "hun", - ["variant"] = "hu", - }, - { - ["description"] = "Italian", - ["script"] = "latn", - ["bibliographical"] = "ita", - ["terminological"] = "ita", - ["context"] = "it", - ["opentype"] = "ita", - ["variant"] = "it", - }, - { - ["description"] = "Japanese", - ["script"] = "jpan", - ["bibliographical"] = "jpn", - ["terminological"] = "jpn", - ["context"] = "ja", - ["opentype"] = "jan", - ["variant"] = "ja", - }, - { - ["description"] = "Latin", - ["script"] = "latn", - ["bibliographical"] = "lat", - ["terminological"] = "lat", - ["context"] = "la", - ["opentype"] = "lat", - ["variant"] = "la", - }, - { - ["description"] = "Portuguese", - ["script"] = "latn", - ["bibliographical"] = "por", - ["terminological"] = "por", - ["context"] = "pt", - ["opentype"] = "ptg", - ["variant"] = "pt", - }, - { - ["description"] = "Polish", - ["script"] = "latn", - ["bibliographical"] = "pol", - ["terminological"] = "pol", - ["context"] = "pl", - ["opentype"] = "plk", - ["variant"] = "pl", - }, - { - ["description"] = "Romanian", - ["script"] = "latn", - ["bibliographical"] = "rum", - ["terminological"] = "ron", - ["context"] = "ro", - ["opentype"] = "rom", - ["variant"] = "ro", - }, - { - ["description"] = "Russian", - ["script"] = "cyrl", - ["bibliographical"] = "rus", - ["terminological"] = "rus", - ["context"] = "ru", - ["opentype"] = "rus", - ["variant"] = "ru", - }, - { - ["description"] = "Slovak", - ["script"] = "latn", - ["bibliographical"] = "slo", - ["terminological"] = "slk", - ["context"] = "sk", - ["opentype"] = "sky", - ["variant"] = "sk", - }, - { - ["description"] = "Slovenian", - ["script"] = "latn", - ["bibliographical"] = "slv", - ["terminological"] = "slv", - ["context"] = "sl", - ["opentype"] = "slv", - ["variant"] = "sl", - }, - { - ["description"] = "Swedish", - ["script"] = "latn", - ["bibliographical"] = "swe", - ["terminological"] = "swe", - ["context"] = "sv", - ["opentype"] = "sve", - ["variant"] = "sv", - }, - { - ["description"] = "Thai", - ["script"] = "thai", - -- ["bibliographical"] = "", - -- ["terminological"] = "", - ["context"] = "th", - ["opentype"] = "tha", - -- ["variant"] = "", - }, - { - ["description"] = "Turkish", - ["script"] = "latn", - ["bibliographical"] = "tur", - ["terminological"] = "tur", - ["context"] = "tr", - ["opentype"] = "trk", - ["variant"] = "tr", - }, - { - ["description"] = "Vietnamese", - ["script"] = "latn", - ["bibliographical"] = "vie", - ["terminological"] = "vie", - ["context"] = "vn", - ["opentype"] = "vit", - ["variant"] = "vi", - }, - { - ["description"] = "Chinese, simplified", - ["script"] = "hans", - ["opentypescript"] = "hani", - ["bibliographical"] = "chi", - ["terminological"] = "zho", - ["context"] = "cn", - ["opentype"] = "zhs", - ["variant"] = "zh-hans", - }, -} - -data.specifications = specifications - -local variants = { } data.variants = variants -local contexts = { } data.contexts = contexts -local records = { } data.records = records -local scripts = { } data.scripts = scripts -local opentypes = { } data.opentypes = opentypes -local opentypescripts = { } data.opentypescripts = opentypescripts - -for k=1,#specifications do - local specification = specifications[k] - local variant = specification.variant - if variant then - variants[lower(variant)] = specification - end - local opentype = specification.opentype - if opentype then - opentypes[lower(opentype)] = specification - end - local script = specification.script - if script then - scripts[lower(script)] = specification - end - local opentypescript = specification.opentypescript - if opentypescript then - opentypescripts[lower(opentypescript)] = specification - end - local context = context - if context then - if type(context) == "table" then - for k=1,#context do - contexts[context[k]] = specification - end - else - contexts[context] = specification - end - end -end - -local defaultvariant = variants["en-us"] - -local function get(k,key) - local v = rawget(variants,k) or rawget(opentypes,k) or rawget(contexts,k) - return v and v[key] -end - -setmetatableindex(variants, function(t,k) - k = lower(k) - local v = get(k,"language") or defaultvariant.language - t[k] = v - return v -end) - -setmetatableindex(opentypes, function(t,k) - k = lower(k) - local v = get(k,"opentype") or "dflt" - t[k] = v - return v -end) - -setmetatableindex(opentypescripts, function(t,k) - k = lower(k) - local v = get(k,"opentypescript") or get(k,"script") or defaultvariant.opentypescript or defaultvariant.script - t[k] = v - return v -end) - -setmetatableindex(contexts, function(t,k) - k = lower(str) - local v = get(k,"context") or defaultvariant.context - v = type(v) == "table" and v[1] or v - t[k] = v - return v -end) - -setmetatableindex(records, function(t,k) -- how useful is this one? - k = lower(k) - local v = get(k) or defaultvariant - t[k] = v - return v -end) - --- print(opentypes.nl,opentypescripts.nl) --- print(opentypes.de,opentypescripts.de) +if not modules then modules = { } end modules ['lang-def'] = { + version = 1.001, + comment = "companion to lang-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" + -- dataonly = true, -- saves 10K +} + +local rawget = rawget +local lower = string.lower + +languages = languages or { } +local languages = languages +languages.data = languages.data or { } +local data = languages.data + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex + +-- The specifications are based on an analysis done by Arthur. The +-- names of tags were changed by Hans. The data is not yet used but +-- will be some day. +-- +-- description +-- +-- The description is only meant as an indication; for example 'no' is +-- "Norwegian, undetermined" because that's really what it is. +-- +-- script +-- +-- This is the 4-letter script tag according to ISO 15924, the +-- official standard. +-- +-- bibliographical and terminological +-- +-- Then we have *two* ISO-639 3-letter tags: one is supposed to be used +-- for "bibliographical" purposes, the other for "terminological". The +-- first one is quite special (and mostly used in American libraries), +-- and the more interesting one is the other (apparently it's that one +-- we find everywhere). +-- +-- context +-- +-- These are the ones used in ConteXt. Kind of numberplate ones. +-- +-- opentype +-- +-- This is the 3-letter OpenType language tag, obviously. +-- +-- variant +-- +-- This is actually the rfc4646: an extension of ISO-639 that also defines +-- codes for variants like de-1901 for "German, 1901 orthography" or zh-Hans for +-- "Chinese, simplified characters" ('Hans' is the ISO-15924 tag for +-- "HAN ideographs, Simplified" :-) As I said yesterday, I think this +-- should be the reference since it's exactly what we want: it's really +-- standard (it's a RFC) and it's more than simply languages. To my +-- knowledge this is the only system that addresses this issue. +-- +-- Warning: it's not unique! Because we have two "German" languages +-- (and could, potentially, have two Chinese, etc.) +-- +-- Beware: the abbreviations are lowercased, which makes it more +-- convenient to use them. +-- +-- todo: add default features + +local specifications = allocate { + { + ["description"] = "Dutch", + ["script"] = "latn", + -- ["bibliographical"] = "nld", + -- ["terminological"] = "nld", + ["context"] = "nl", + ["opentype"] = "nld", + ["variant"] = "nl", + }, + { + ["description"] = "Basque", + ["script"] = "latn", + ["bibliographical"] = "baq", + ["terminological"] = "eus", + ["context"] = "ba", + ["opentype"] = "euq", + ["variant"] = "eu", + }, + { + ["description"] = "Welsh", + ["script"] = "latn", + ["bibliographical"] = "wel", + ["terminological"] = "cym", + ["context"] = "cy", + ["opentype"] = "wel", + ["variant"] = "cy", + }, + { + ["description"] = "Icelandic", + ["script"] = "latn", + ["bibliographical"] = "ice", + ["terminological"] = "isl", + ["context"] = "is", + ["opentype"] = "isl", + ["variant"] = "is", + }, + { + ["description"] = "Norwegian, undetermined", + ["script"] = "latn", + ["bibliographical"] = "nor", + ["terminological"] = "nor", + ["context"] = "no", + ["variant"] = "no", + }, + { + ["description"] = "Norwegian bokmal", + ["script"] = "latn", + ["bibliographical"] = "nob", + ["terminological"] = "nob", + ["opentype"] = "nor", -- not sure! + ["variant"] = "nb", + }, + { + ["description"] = "Norwegian nynorsk", + ["script"] = "latn", + ["bibliographical"] = "nno", + ["terminological"] = "nno", + ["opentype"] = "nny", + ["variant"] = "nn", + }, + { + ["description"] = "Ancient Greek", + ["script"] = "grek", + ["bibliographical"] = "grc", + ["terminological"] = "grc", + ["context"] = "agr", + ["variant"] = "grc", + }, + { + ["description"] = "German, 1901 orthography", + ["script"] = "latn", + ["terminological"] = "deu", + ["context"] = "deo", + ["opentype"] = "deu", + ["variant"] = "de-1901", + }, + { + ["description"] = "German, 1996 orthography", + ["script"] = "latn", + ["bibliographical"] = "ger", + ["terminological"] = "deu", + ["context"] = "de", + ["opentype"] = "deu", + ["variant"] = "de-1996", + }, + { + ["description"] = "Afrikaans", + ["script"] = "latn", + ["bibliographical"] = "afr", + ["terminological"] = "afr", + ["context"] = "af", + ["opentype"] = "afk", + ["variant"] = "af", + }, + { + ["description"] = "Catalan", + ["script"] = "latn", + ["bibliographical"] = "cat", + ["terminological"] = "cat", + ["context"] = "ca", + ["opentype"] = "cat", + ["variant"] = "ca", + }, + { + ["description"] = "Czech", + ["script"] = "latn", + ["bibliographical"] = "cze", + ["terminological"] = "ces", + ["context"] = "cz", + ["opentype"] = "csy", + ["variant"] = "cs", + }, + { + ["description"] = "Greek", + ["script"] = "grek", + ["bibliographical"] = "gre", + ["terminological"] = "ell", + ["context"] = "gr", + ["opentype"] = "ell", + ["variant"] = "el", + }, + { + ["description"] = "American English", + ["script"] = "latn", + ["bibliographical"] = "eng", + ["terminological"] = "eng", + ["context"] = "us", + ["opentype"] = "eng", + ["variant"] = "en-US", + }, + { + ["description"] = "British English", + ["script"] = "latn", + ["bibliographical"] = "eng", + ["terminological"] = "eng", + ["context"] = "uk", + ["opentype"] = "eng", + ["variant"] = "en-UK", -- Could be en-GB as well ... + }, + { + ["description"] = "Spanish", + ["script"] = "latn", + ["bibliographical"] = "spa", + ["terminological"] = "spa", + ["context"] = "es", + ["opentype"] = "esp", + ["variant"] = "es", + }, + { + ["description"] = "Finnish", + ["script"] = "latn", + ["bibliographical"] = "fin", + ["terminological"] = "fin", + ["context"] = "fi", + ["opentype"] = "fin", + ["variant"] = "fi", + }, + { + ["description"] = "French", + ["script"] = "latn", + ["bibliographical"] = "fre", + ["terminological"] = "fra", + ["context"] = "fr", + ["opentype"] = "fra", + ["variant"] = "fr", + }, + { + ["description"] = "Croatian", + ["script"] = "latn", + ["bibliographical"] = "scr", + ["terminological"] = "hrv", + ["context"] = "hr", + ["opentype"] = "hrv", + ["variant"] = "hr", + }, + { + ["description"] = "Hungarian", + ["script"] = "latn", + ["bibliographical"] = "hun", + ["terminological"] = "hun", + ["context"] = "hu", + ["opentype"] = "hun", + ["variant"] = "hu", + }, + { + ["description"] = "Italian", + ["script"] = "latn", + ["bibliographical"] = "ita", + ["terminological"] = "ita", + ["context"] = "it", + ["opentype"] = "ita", + ["variant"] = "it", + }, + { + ["description"] = "Japanese", + ["script"] = "jpan", + ["bibliographical"] = "jpn", + ["terminological"] = "jpn", + ["context"] = "ja", + ["opentype"] = "jan", + ["variant"] = "ja", + }, + { + ["description"] = "Latin", + ["script"] = "latn", + ["bibliographical"] = "lat", + ["terminological"] = "lat", + ["context"] = "la", + ["opentype"] = "lat", + ["variant"] = "la", + }, + { + ["description"] = "Portuguese", + ["script"] = "latn", + ["bibliographical"] = "por", + ["terminological"] = "por", + ["context"] = "pt", + ["opentype"] = "ptg", + ["variant"] = "pt", + }, + { + ["description"] = "Polish", + ["script"] = "latn", + ["bibliographical"] = "pol", + ["terminological"] = "pol", + ["context"] = "pl", + ["opentype"] = "plk", + ["variant"] = "pl", + }, + { + ["description"] = "Romanian", + ["script"] = "latn", + ["bibliographical"] = "rum", + ["terminological"] = "ron", + ["context"] = "ro", + ["opentype"] = "rom", + ["variant"] = "ro", + }, + { + ["description"] = "Russian", + ["script"] = "cyrl", + ["bibliographical"] = "rus", + ["terminological"] = "rus", + ["context"] = "ru", + ["opentype"] = "rus", + ["variant"] = "ru", + }, + { + ["description"] = "Slovak", + ["script"] = "latn", + ["bibliographical"] = "slo", + ["terminological"] = "slk", + ["context"] = "sk", + ["opentype"] = "sky", + ["variant"] = "sk", + }, + { + ["description"] = "Slovenian", + ["script"] = "latn", + ["bibliographical"] = "slv", + ["terminological"] = "slv", + ["context"] = "sl", + ["opentype"] = "slv", + ["variant"] = "sl", + }, + { + ["description"] = "Swedish", + ["script"] = "latn", + ["bibliographical"] = "swe", + ["terminological"] = "swe", + ["context"] = "sv", + ["opentype"] = "sve", + ["variant"] = "sv", + }, + { + ["description"] = "Thai", + ["script"] = "thai", + -- ["bibliographical"] = "", + -- ["terminological"] = "", + ["context"] = "th", + ["opentype"] = "tha", + -- ["variant"] = "", + }, + { + ["description"] = "Turkish", + ["script"] = "latn", + ["bibliographical"] = "tur", + ["terminological"] = "tur", + ["context"] = "tr", + ["opentype"] = "trk", + ["variant"] = "tr", + }, + { + ["description"] = "Vietnamese", + ["script"] = "latn", + ["bibliographical"] = "vie", + ["terminological"] = "vie", + ["context"] = "vn", + ["opentype"] = "vit", + ["variant"] = "vi", + }, + { + ["description"] = "Chinese, simplified", + ["script"] = "hans", + ["opentypescript"] = "hani", + ["bibliographical"] = "chi", + ["terminological"] = "zho", + ["context"] = "cn", + ["opentype"] = "zhs", + ["variant"] = "zh-hans", + }, +} + +data.specifications = specifications + +local variants = { } data.variants = variants +local contexts = { } data.contexts = contexts +local records = { } data.records = records +local scripts = { } data.scripts = scripts +local opentypes = { } data.opentypes = opentypes +local opentypescripts = { } data.opentypescripts = opentypescripts + +for k=1,#specifications do + local specification = specifications[k] + local variant = specification.variant + if variant then + variants[lower(variant)] = specification + end + local opentype = specification.opentype + if opentype then + opentypes[lower(opentype)] = specification + end + local script = specification.script + if script then + scripts[lower(script)] = specification + end + local opentypescript = specification.opentypescript + if opentypescript then + opentypescripts[lower(opentypescript)] = specification + end + local context = context + if context then + if type(context) == "table" then + for k=1,#context do + contexts[context[k]] = specification + end + else + contexts[context] = specification + end + end +end + +local defaultvariant = variants["en-us"] + +local function get(k,key) + local v = rawget(variants,k) or rawget(opentypes,k) or rawget(contexts,k) + return v and v[key] +end + +setmetatableindex(variants, function(t,k) + k = lower(k) + local v = get(k,"language") or defaultvariant.language + t[k] = v + return v +end) + +setmetatableindex(opentypes, function(t,k) + k = lower(k) + local v = get(k,"opentype") or "dflt" + t[k] = v + return v +end) + +setmetatableindex(opentypescripts, function(t,k) + k = lower(k) + local v = get(k,"opentypescript") or get(k,"script") or defaultvariant.opentypescript or defaultvariant.script + t[k] = v + return v +end) + +setmetatableindex(contexts, function(t,k) + k = lower(str) + local v = get(k,"context") or defaultvariant.context + v = type(v) == "table" and v[1] or v + t[k] = v + return v +end) + +setmetatableindex(records, function(t,k) -- how useful is this one? + k = lower(k) + local v = get(k) or defaultvariant + t[k] = v + return v +end) + +-- print(opentypes.nl,opentypescripts.nl) +-- print(opentypes.de,opentypescripts.de) diff --git a/tex/context/base/lang-frq-de.lua b/tex/context/base/lang-frq-de.lua index 3733f39f9..4e54db2c8 100644 --- a/tex/context/base/lang-frq-de.lua +++ b/tex/context/base/lang-frq-de.lua @@ -1,12 +1,12 @@ -return { - language = "de", - source = "http://www.blankenburg.de/gat/pages/fach/info/analyse2.htm", - frequencies = { - [0x0061] = 6.47, [0x0062] = 1.93, [0x0063] = 2.68, [0x0064] = 4.83, [0x0065] = 17.48, - [0x0066] = 1.65, [0x0067] = 3.06, [0x0068] = 4.23, [0x0069] = 7.73, [0x006A] = 0.27, - [0x006B] = 1.46, [0x006C] = 3.49, [0x006D] = 2.58, [0x006E] = 9.84, [0x006F] = 2.98, - [0x0070] = 0.96, [0x0071] = 0.02, [0x0072] = 7.54, [0x0073] = 6.83, [0x0074] = 6.13, - [0x0075] = 4.17, [0x0076] = 0.94, [0x0077] = 1.48, [0x0078] = 0.04, [0x0079] = 0.08, - [0x007A] = 1.14, - } -} +return { + language = "de", + source = "http://www.blankenburg.de/gat/pages/fach/info/analyse2.htm", + frequencies = { + [0x0061] = 6.47, [0x0062] = 1.93, [0x0063] = 2.68, [0x0064] = 4.83, [0x0065] = 17.48, + [0x0066] = 1.65, [0x0067] = 3.06, [0x0068] = 4.23, [0x0069] = 7.73, [0x006A] = 0.27, + [0x006B] = 1.46, [0x006C] = 3.49, [0x006D] = 2.58, [0x006E] = 9.84, [0x006F] = 2.98, + [0x0070] = 0.96, [0x0071] = 0.02, [0x0072] = 7.54, [0x0073] = 6.83, [0x0074] = 6.13, + [0x0075] = 4.17, [0x0076] = 0.94, [0x0077] = 1.48, [0x0078] = 0.04, [0x0079] = 0.08, + [0x007A] = 1.14, + } +} diff --git a/tex/context/base/lang-frq-en.lua b/tex/context/base/lang-frq-en.lua index 9e18d7166..ee122c9da 100644 --- a/tex/context/base/lang-frq-en.lua +++ b/tex/context/base/lang-frq-en.lua @@ -1,26 +1,26 @@ --- return { --- language = "en", --- source = "http://caislab.icu.ac.kr/course/2001/spring/ice605/down/010306.pdf", --- frequencies = { --- [0x0061] = 8.2, [0x0062] = 1.5, [0x0063] = 2.8, [0x0064] = 4.3, [0x0065] = 12.7, --- [0x0066] = 2.2, [0x0067] = 2.0, [0x0068] = 6.1, [0x0069] = 7.0, [0x006A] = 0.2, --- [0x006B] = 0.8, [0x006C] = 4.0, [0x006D] = 2.4, [0x006E] = 6.7, [0x006F] = 7.5, --- [0x0070] = 1.9, [0x0071] = 0.1, [0x0072] = 6.0, [0x0073] = 6.3, [0x0074] = 9.1, --- [0x0075] = 2.8, [0x0076] = 1.0, [0x0077] = 2.3, [0x0078] = 0.1, [0x0079] = 2.0, --- [0x007A] = 0.1, --- } --- } - -return { - language = "en", - source = "http://www.blankenburg.de/gat/pages/fach/info/analyse2.htm", - frequencies = { - [0x0061] = 8.04, [0x0062] = 1.54, [0x0063] = 3.06, [0x0064] = 3.99, [0x0065] = 12.51, - [0x0066] = 2.30, [0x0067] = 1.96, [0x0068] = 5.49, [0x0069] = 7.26, [0x006A] = 0.16, - [0x006B] = 0.67, [0x006C] = 4.14, [0x006D] = 2.53, [0x006E] = 7.09, [0x006F] = 7.60, - [0x0070] = 2.00, [0x0071] = 0.11, [0x0072] = 6.12, [0x0073] = 6.54, [0x0074] = 9.25, - [0x0075] = 2.71, [0x0076] = 0.99, [0x0077] = 1.92, [0x0078] = 0.19, [0x0079] = 1.73, - [0x007A] = 0.09, - } -} - +-- return { +-- language = "en", +-- source = "http://caislab.icu.ac.kr/course/2001/spring/ice605/down/010306.pdf", +-- frequencies = { +-- [0x0061] = 8.2, [0x0062] = 1.5, [0x0063] = 2.8, [0x0064] = 4.3, [0x0065] = 12.7, +-- [0x0066] = 2.2, [0x0067] = 2.0, [0x0068] = 6.1, [0x0069] = 7.0, [0x006A] = 0.2, +-- [0x006B] = 0.8, [0x006C] = 4.0, [0x006D] = 2.4, [0x006E] = 6.7, [0x006F] = 7.5, +-- [0x0070] = 1.9, [0x0071] = 0.1, [0x0072] = 6.0, [0x0073] = 6.3, [0x0074] = 9.1, +-- [0x0075] = 2.8, [0x0076] = 1.0, [0x0077] = 2.3, [0x0078] = 0.1, [0x0079] = 2.0, +-- [0x007A] = 0.1, +-- } +-- } + +return { + language = "en", + source = "http://www.blankenburg.de/gat/pages/fach/info/analyse2.htm", + frequencies = { + [0x0061] = 8.04, [0x0062] = 1.54, [0x0063] = 3.06, [0x0064] = 3.99, [0x0065] = 12.51, + [0x0066] = 2.30, [0x0067] = 1.96, [0x0068] = 5.49, [0x0069] = 7.26, [0x006A] = 0.16, + [0x006B] = 0.67, [0x006C] = 4.14, [0x006D] = 2.53, [0x006E] = 7.09, [0x006F] = 7.60, + [0x0070] = 2.00, [0x0071] = 0.11, [0x0072] = 6.12, [0x0073] = 6.54, [0x0074] = 9.25, + [0x0075] = 2.71, [0x0076] = 0.99, [0x0077] = 1.92, [0x0078] = 0.19, [0x0079] = 1.73, + [0x007A] = 0.09, + } +} + diff --git a/tex/context/base/lang-frq-nl.lua b/tex/context/base/lang-frq-nl.lua index 7b640b779..fa4851e63 100644 --- a/tex/context/base/lang-frq-nl.lua +++ b/tex/context/base/lang-frq-nl.lua @@ -1,12 +1,12 @@ -return { - language = "nl", - source = "http://www.onzetaal.nl/advies/letterfreq.html", - frequencies = { - [0x0061] = 7.47, [0x0062] = 1.58, [0x0063] = 1.24, [0x0064] = 5.93, [0x0065] = 18.91, - [0x0066] = 0.81, [0x0067] = 3.40, [0x0068] = 2.38, [0x0069] = 6.50, [0x006A] = 1.46, - [0x006B] = 2.25, [0x006C] = 3.57, [0x006D] = 2.21, [0x006E] = 10.03, [0x006F] = 6.06, - [0x0070] = 1.57, [0x0071] = 0.009, [0x0072] = 6.41, [0x0073] = 3.73, [0x0074] = 6.79, - [0x0075] = 1.99, [0x0076] = 2.85, [0x0077] = 1.52, [0x0078] = 0.04, [0x0079] = 0.035, - [0x007A] = 1.39, - } -} +return { + language = "nl", + source = "http://www.onzetaal.nl/advies/letterfreq.html", + frequencies = { + [0x0061] = 7.47, [0x0062] = 1.58, [0x0063] = 1.24, [0x0064] = 5.93, [0x0065] = 18.91, + [0x0066] = 0.81, [0x0067] = 3.40, [0x0068] = 2.38, [0x0069] = 6.50, [0x006A] = 1.46, + [0x006B] = 2.25, [0x006C] = 3.57, [0x006D] = 2.21, [0x006E] = 10.03, [0x006F] = 6.06, + [0x0070] = 1.57, [0x0071] = 0.009, [0x0072] = 6.41, [0x0073] = 3.73, [0x0074] = 6.79, + [0x0075] = 1.99, [0x0076] = 2.85, [0x0077] = 1.52, [0x0078] = 0.04, [0x0079] = 0.035, + [0x007A] = 1.39, + } +} diff --git a/tex/context/base/lang-ini.lua b/tex/context/base/lang-ini.lua index 4ae7656d3..b5bdfd894 100644 --- a/tex/context/base/lang-ini.lua +++ b/tex/context/base/lang-ini.lua @@ -1,355 +1,376 @@ -if not modules then modules = { } end modules ['lang-ini'] = { - version = 1.001, - comment = "companion to lang-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- needs a cleanup (share locals) --- discard language when redefined - --- 002D : hyphen-minus (ascii) --- 2010 : hyphen --- 2011 : nonbreakable hyphen --- 2013 : endash (compound hyphen) - ---~ lang:hyphenation(string) string = lang:hyphenation() lang:clear_hyphenation() - -local type, tonumber = type, tonumber -local utfbyte = utf.byte -local format, gsub = string.format, string.gsub -local concat, sortedkeys, sortedpairs = table.concat, table.sortedkeys, table.sortedpairs -local lpegmatch = lpeg.match - -local settings_to_array = utilities.parsers.settings_to_array - -local trace_patterns = false trackers.register("languages.patterns", function(v) trace_patterns = v end) - -local report_initialization = logs.reporter("languages","initialization") - -local prehyphenchar = lang.prehyphenchar -- global per language -local posthyphenchar = lang.posthyphenchar -- global per language -local lefthyphenmin = lang.lefthyphenmin -local righthyphenmin = lang.righthyphenmin - -local lang = lang -lang.exceptions = lang.hyphenation -local new_langage = lang.new - -languages = languages or {} -local languages = languages - -languages.version = 1.010 - -languages.registered = languages.registered or { } -local registered = languages.registered - -languages.associated = languages.associated or { } -local associated = languages.associated - -languages.numbers = languages.numbers or { } -local numbers = languages.numbers - -languages.data = languages.data or { } -local data = languages.data - -storage.register("languages/numbers", numbers, "languages.numbers") -storage.register("languages/registered",registered,"languages.registered") -storage.register("languages/associated",associated,"languages.associated") -storage.register("languages/data", data, "languages.data") - -local nofloaded = 0 - -local function resolve(tag) - local data, instance = registered[tag], nil - if data then - instance = data.instance - if not instance then - instance = new_langage(data.number) - data.instance = instance - end - end - return data, instance -end - -local function tolang(what) -- returns lang object - local tag = numbers[what] - local data = tag and registered[tag] or registered[what] - if data then - local instance = data.lang - if not instance then - instance = new_langage(data.number) - data.instance = instance - end - return instance - end -end - --- languages.tolang = tolang - --- patterns=en --- patterns=en,de - -local function loaddefinitions(tag,specification) - statistics.starttiming(languages) - local data, instance = resolve(tag) - local definitions = settings_to_array(specification.patterns or "") - if #definitions > 0 then - if trace_patterns then - report_initialization("pattern specification for language %a: %s",tag,specification.patterns) - end - local dataused, ok = data.used, false - for i=1,#definitions do - local definition = definitions[i] - if definition == "" then - -- error - elseif definition == "reset" then -- interfaces.variables.reset - if trace_patterns then - report_initialization("clearing patterns for language %a",tag) - end - instance:clear_patterns() - elseif not dataused[definition] then - dataused[definition] = definition - local filename = "lang-" .. definition .. ".lua" - local fullname = resolvers.findfile(filename) or "" - if fullname ~= "" then - if trace_patterns then - report_initialization("loading definition %a for language %a from %a",definition,tag,fullname) - end - local defs = dofile(fullname) -- use regular loader instead - if defs then -- todo: version test - ok, nofloaded = true, nofloaded + 1 - instance:patterns (defs.patterns and defs.patterns .data or "") - instance:hyphenation(defs.exceptions and defs.exceptions.data or "") - else - report_initialization("invalid definition %a for language %a in %a",definition,tag,filename) - end - elseif trace_patterns then - report_initialization("invalid definition %a for language %a in %a",definition,tag,filename) - end - elseif trace_patterns then - report_initialization("definition %a for language %a already loaded",definition,tag) - end - end - return ok - elseif trace_patterns then - report_initialization("no definitions for language %a",tag) - end - statistics.stoptiming(languages) -end - -storage.shared.noflanguages = storage.shared.noflanguages or 0 - -local noflanguages = storage.shared.noflanguages - -function languages.define(tag,parent) - noflanguages = noflanguages + 1 - if trace_patterns then - report_initialization("assigning number %a to %a",noflanguages,tag) - end - numbers[noflanguages] = tag - registered[tag] = { - tag = tag, - parent = parent or "", - patterns = "", - loaded = false, - used = { }, - dirty = true, - number = noflanguages, - instance = nil, -- luatex data structure - synonyms = { }, - } - storage.shared.noflanguages = noflanguages -end - -function languages.setsynonym(synonym,tag) -- convenience function - local l = registered[tag] - if l then - l.synonyms[synonym] = true -- maybe some day more info - end -end - -function languages.installed(separator) - return concat(sortedkeys(registered),separator or ",") -end - -function languages.current(n) - return numbers[n and tonumber(n) or tex.language] -end - -function languages.associate(tag,script,language) -- not yet used - associated[tag] = { script, language } -end - -function languages.association(tag) -- not yet used - if type(tag) == "number" then - tag = numbers[tag] - end - local lat = tag and associated[tag] - if lat then - return lat[1], lat[2] - end -end - -function languages.loadable(tag,defaultlanguage) -- hack - local l = registered[tag] -- no synonyms - if l and resolvers.findfile("lang-"..l.patterns..".lua") then - return true - else - return false - end -end - --- a bit messy, we will do all language setting in lua as we can now assign --- and 'patterns' will go away here. - -function languages.unload(tag) - local l = registered[tag] - if l then - l.dirty = true - end -end - -if environment.initex then - - function languages.getnumber() - return 0 - end - -else - - function languages.getnumber(tag,default,patterns) - local l = registered[tag] - if l then - if l.dirty then - if trace_patterns then - report_initialization("checking patterns for %a with default %a",tag,default) - end - -- patterns is already resolved to parent patterns if applicable - if patterns and patterns ~= "" then - if l.patterns ~= patterns then - l.patterns = patterns - if trace_patterns then - report_initialization("loading patterns for %a using specification %a",tag,patterns) - end - loaddefinitions(tag,l) - else - -- unchanged - end - elseif l.patterns == "" then - l.patterns = tag - if trace_patterns then - report_initialization("loading patterns for %a using tag",tag) - end - local ok = loaddefinitions(tag,l) - if not ok and tag ~= default then - l.patterns = default - if trace_patterns then - report_initialization("loading patterns for %a using default",tag) - end - loaddefinitions(tag,l) - end - end - l.loaded = true - l.dirty = false - end - return l.number - else - return 0 - end - end - -end - --- not that usefull, global values - -function languages.prehyphenchar (what) return prehyphenchar (tolang(what)) end -function languages.posthyphenchar(what) return posthyphenchar(tolang(what)) end -function languages.lefthyphenmin (what) return lefthyphenmin (tolang(what)) end -function languages.righthyphenmin(what) return righthyphenmin(tolang(what)) end - --- e['implementer']= 'imple{m}{-}{-}menter' --- e['manual'] = 'man{}{}{}' --- e['as'] = 'a-s' --- e['user-friendly'] = 'user=friend-ly' --- e['exceptionally-friendly'] = 'excep-tionally=friend-ly' - -function languages.loadwords(tag,filename) - local data, instance = resolve(tag) - if data then - statistics.starttiming(languages) - instance:hyphenation(io.loaddata(filename) or "") - statistics.stoptiming(languages) - end -end - -function languages.setexceptions(tag,str) - local data, instance = resolve(tag) - if data then - instance:hyphenation(string.strip(str)) -- we need to strip leading spaces - end -end - -function languages.hyphenate(tag,str) - -- todo: does this still work? - local data, instance = resolve(tag) - if data then - return instance:hyphenate(str) - else - return str - end -end - --- hyphenation.define ("zerolanguage") --- hyphenation.loadpatterns ("zerolanguage") -- else bug --- hyphenation.loadexceptions("zerolanguage") -- else bug - -languages.logger = languages.logger or { } - -function languages.logger.report() - local result, r = { }, 0 - for tag, l in sortedpairs(registered) do - if l.loaded then - r = r + 1 - result[r] = format("%s:%s:%s",tag,l.parent,l.number) - end - end - return r > 0 and concat(result," ") or "none" -end - --- must happen at the tex end .. will use lang-def.lua - -languages.associate('en','latn','eng') -languages.associate('uk','latn','eng') -languages.associate('nl','latn','nld') -languages.associate('de','latn','deu') -languages.associate('fr','latn','fra') - -statistics.register("loaded patterns", function() - local result = languages.logger.report() - if result ~= "none" then - return result - end -end) - -statistics.register("language load time", function() - return statistics.elapsedseconds(languages, format(", nofpatterns: %s",nofloaded)) -end) - --- interface - -local getnumber = languages.getnumber - -function commands.languagenumber(tag,default,patterns) - context(getnumber(tag,default,patterns)) -end - -function commands.installedlanguages(separator) - context(languages.installed(separator)) -end - -commands.definelanguage = languages.define -commands.setlanguagesynonym = languages.setsynonym -commands.unloadlanguage = languages.unload -commands.setlanguageexceptions = languages.setexceptions +if not modules then modules = { } end modules ['lang-ini'] = { + version = 1.001, + comment = "companion to lang-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- needs a cleanup (share locals) +-- discard language when redefined + +-- 002D : hyphen-minus (ascii) +-- 2010 : hyphen +-- 2011 : nonbreakable hyphen +-- 2013 : endash (compound hyphen) + +--~ lang:hyphenation(string) string = lang:hyphenation() lang:clear_hyphenation() + +local type, tonumber = type, tonumber +local utfbyte = utf.byte +local format, gsub = string.format, string.gsub +local concat, sortedkeys, sortedpairs = table.concat, table.sortedkeys, table.sortedpairs +local lpegmatch = lpeg.match + +local settings_to_array = utilities.parsers.settings_to_array + +local trace_patterns = false trackers.register("languages.patterns", function(v) trace_patterns = v end) + +local report_initialization = logs.reporter("languages","initialization") + +local prehyphenchar = lang.prehyphenchar -- global per language +local posthyphenchar = lang.posthyphenchar -- global per language +local lefthyphenmin = lang.lefthyphenmin +local righthyphenmin = lang.righthyphenmin + +local lang = lang +lang.exceptions = lang.hyphenation +local new_langage = lang.new + +languages = languages or {} +local languages = languages + +languages.version = 1.010 + +languages.registered = languages.registered or { } +local registered = languages.registered + +languages.associated = languages.associated or { } +local associated = languages.associated + +languages.numbers = languages.numbers or { } +local numbers = languages.numbers + +languages.data = languages.data or { } +local data = languages.data + +storage.register("languages/numbers", numbers, "languages.numbers") +storage.register("languages/registered",registered,"languages.registered") +storage.register("languages/associated",associated,"languages.associated") +storage.register("languages/data", data, "languages.data") + +local nofloaded = 0 + +local function resolve(tag) + local data, instance = registered[tag], nil + if data then + instance = data.instance + if not instance then + instance = new_langage(data.number) + data.instance = instance + end + end + return data, instance +end + +local function tolang(what) -- returns lang object + local tag = numbers[what] + local data = tag and registered[tag] or registered[what] + if data then + local instance = data.lang + if not instance then + instance = new_langage(data.number) + data.instance = instance + end + return instance + end +end + +-- languages.tolang = tolang + +-- patterns=en +-- patterns=en,de + +local function validdata(dataset,what,tag) + if dataset then + local data = dataset.data + if not data or data == "" then + return nil + elseif dataset.compression == "zlib" then + data = zlib.decompress(data) + if dataset.length and dataset.length ~= #data then + report_initialization("compression error in %a for language %a","patterns",what,tag) + end + return data + else + return data + end + end +end + +local function loaddefinitions(tag,specification) + statistics.starttiming(languages) + local data, instance = resolve(tag) + local definitions = settings_to_array(specification.patterns or "") + if #definitions > 0 then + if trace_patterns then + report_initialization("pattern specification for language %a: %s",tag,specification.patterns) + end + local dataused, ok = data.used, false + for i=1,#definitions do + local definition = definitions[i] + if definition == "" then + -- error + elseif definition == "reset" then -- interfaces.variables.reset + if trace_patterns then + report_initialization("clearing patterns for language %a",tag) + end + instance:clear_patterns() + elseif not dataused[definition] then + dataused[definition] = definition + local filename = "lang-" .. definition .. ".lua" + local fullname = resolvers.findfile(filename) or "" + if fullname ~= "" then + if trace_patterns then + report_initialization("loading definition %a for language %a from %a",definition,tag,fullname) + end + local defs = dofile(fullname) -- use regular loader instead + if defs then -- todo: version test + ok, nofloaded = true, nofloaded + 1 + -- instance:patterns (defs.patterns and defs.patterns .data or "") + -- instance:hyphenation(defs.exceptions and defs.exceptions.data or "") + instance:patterns (validdata(defs.patterns, "patterns", tag) or "") + instance:hyphenation(validdata(defs.exceptions,"exceptions",tag) or "") + else + report_initialization("invalid definition %a for language %a in %a",definition,tag,filename) + end + elseif trace_patterns then + report_initialization("invalid definition %a for language %a in %a",definition,tag,filename) + end + elseif trace_patterns then + report_initialization("definition %a for language %a already loaded",definition,tag) + end + end + return ok + elseif trace_patterns then + report_initialization("no definitions for language %a",tag) + end + statistics.stoptiming(languages) +end + +storage.shared.noflanguages = storage.shared.noflanguages or 0 + +local noflanguages = storage.shared.noflanguages + +function languages.define(tag,parent) + noflanguages = noflanguages + 1 + if trace_patterns then + report_initialization("assigning number %a to %a",noflanguages,tag) + end + numbers[noflanguages] = tag + registered[tag] = { + tag = tag, + parent = parent or "", + patterns = "", + loaded = false, + used = { }, + dirty = true, + number = noflanguages, + instance = nil, -- luatex data structure + synonyms = { }, + } + storage.shared.noflanguages = noflanguages +end + +function languages.setsynonym(synonym,tag) -- convenience function + local l = registered[tag] + if l then + l.synonyms[synonym] = true -- maybe some day more info + end +end + +function languages.installed(separator) + return concat(sortedkeys(registered),separator or ",") +end + +function languages.current(n) + return numbers[n and tonumber(n) or tex.language] +end + +function languages.associate(tag,script,language) -- not yet used + associated[tag] = { script, language } +end + +function languages.association(tag) -- not yet used + if type(tag) == "number" then + tag = numbers[tag] + end + local lat = tag and associated[tag] + if lat then + return lat[1], lat[2] + end +end + +function languages.loadable(tag,defaultlanguage) -- hack + local l = registered[tag] -- no synonyms + if l and resolvers.findfile("lang-"..l.patterns..".lua") then + return true + else + return false + end +end + +-- a bit messy, we will do all language setting in lua as we can now assign +-- and 'patterns' will go away here. + +function languages.unload(tag) + local l = registered[tag] + if l then + l.dirty = true + end +end + +if environment.initex then + + function languages.getnumber() + return 0 + end + +else + + function languages.getnumber(tag,default,patterns) + local l = registered[tag] + if l then + if l.dirty then + if trace_patterns then + report_initialization("checking patterns for %a with default %a",tag,default) + end + -- patterns is already resolved to parent patterns if applicable + if patterns and patterns ~= "" then + if l.patterns ~= patterns then + l.patterns = patterns + if trace_patterns then + report_initialization("loading patterns for %a using specification %a",tag,patterns) + end + loaddefinitions(tag,l) + else + -- unchanged + end + elseif l.patterns == "" then + l.patterns = tag + if trace_patterns then + report_initialization("loading patterns for %a using tag",tag) + end + local ok = loaddefinitions(tag,l) + if not ok and tag ~= default then + l.patterns = default + if trace_patterns then + report_initialization("loading patterns for %a using default",tag) + end + loaddefinitions(tag,l) + end + end + l.loaded = true + l.dirty = false + end + return l.number + else + return 0 + end + end + +end + +-- not that usefull, global values + +function languages.prehyphenchar (what) return prehyphenchar (tolang(what)) end +function languages.posthyphenchar(what) return posthyphenchar(tolang(what)) end +function languages.lefthyphenmin (what) return lefthyphenmin (tolang(what)) end +function languages.righthyphenmin(what) return righthyphenmin(tolang(what)) end + +-- e['implementer']= 'imple{m}{-}{-}menter' +-- e['manual'] = 'man{}{}{}' +-- e['as'] = 'a-s' +-- e['user-friendly'] = 'user=friend-ly' +-- e['exceptionally-friendly'] = 'excep-tionally=friend-ly' + +function languages.loadwords(tag,filename) + local data, instance = resolve(tag) + if data then + statistics.starttiming(languages) + instance:hyphenation(io.loaddata(filename) or "") + statistics.stoptiming(languages) + end +end + +function languages.setexceptions(tag,str) + local data, instance = resolve(tag) + if data then + instance:hyphenation(string.strip(str)) -- we need to strip leading spaces + end +end + +function languages.hyphenate(tag,str) + -- todo: does this still work? + local data, instance = resolve(tag) + if data then + return instance:hyphenate(str) + else + return str + end +end + +-- hyphenation.define ("zerolanguage") +-- hyphenation.loadpatterns ("zerolanguage") -- else bug +-- hyphenation.loadexceptions("zerolanguage") -- else bug + +languages.logger = languages.logger or { } + +function languages.logger.report() + local result, r = { }, 0 + for tag, l in sortedpairs(registered) do + if l.loaded then + r = r + 1 + result[r] = format("%s:%s:%s",tag,l.parent,l.number) + end + end + return r > 0 and concat(result," ") or "none" +end + +-- must happen at the tex end .. will use lang-def.lua + +languages.associate('en','latn','eng') +languages.associate('uk','latn','eng') +languages.associate('nl','latn','nld') +languages.associate('de','latn','deu') +languages.associate('fr','latn','fra') + +statistics.register("loaded patterns", function() + local result = languages.logger.report() + if result ~= "none" then +-- return result + return format("%s, load time: %s",result,statistics.elapsedtime(languages)) + end +end) + +-- statistics.register("language load time", function() +-- -- often zero so we can merge that in the above +-- return statistics.elapsedseconds(languages, format(", nofpatterns: %s",nofloaded)) +-- end) + +-- interface + +local getnumber = languages.getnumber + +function commands.languagenumber(tag,default,patterns) + context(getnumber(tag,default,patterns)) +end + +function commands.installedlanguages(separator) + context(languages.installed(separator)) +end + +commands.definelanguage = languages.define +commands.setlanguagesynonym = languages.setsynonym +commands.unloadlanguage = languages.unload +commands.setlanguageexceptions = languages.setexceptions diff --git a/tex/context/base/lang-lab.lua b/tex/context/base/lang-lab.lua index 91c258418..c83cd8bc8 100644 --- a/tex/context/base/lang-lab.lua +++ b/tex/context/base/lang-lab.lua @@ -1,142 +1,142 @@ -if not modules then modules = { } end modules ['lang-lab'] = { - version = 1.001, - comment = "companion to lang-lab.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format, find = string.format, string.find -local next, rawget, type = next, rawget, type -local lpegmatch = lpeg.match -local formatters = string.formatters - -local prtcatcodes = catcodes.numbers.prtcatcodes -- todo: use different method - -local trace_labels = false trackers.register("languages.labels", function(v) trace_labels = v end) -local report_labels = logs.reporter("languages","labels") - -languages.labels = languages.labels or { } -local labels = languages.labels - -local variables = interfaces.variables -local settings_to_array = utilities.parsers.settings_to_array - -local splitter = lpeg.splitat(":") - -local function split(tag) - return lpegmatch(splitter,tag) -end - -labels.split = split - -local contextsprint = context.sprint - -local function definelanguagelabels(data,class,tag,rawtag) - for language, text in next, data.labels do - if text == "" then - -- skip - elseif type(text) == "table" then - contextsprint(prtcatcodes,"\\setlabeltextpair{",class,"}{",language,"}{",tag,"}{",text[1],"}{",text[2],"}") - if trace_labels then - report_labels("language %a, defining label %a as %a and %a",language,rawtag,text[1],text[2]) - end - else - contextsprint(prtcatcodes,"\\setlabeltextpair{",class,"}{",language,"}{",tag,"}{",text,"}{}") - if trace_labels then - report_labels("language %a, defining label %a as %a",language,rawtag,text) - end - end - end -end - -function labels.define(class,name,prefixed) - local list = languages.data.labels[name] - if list then - report_labels("defining label set %a",name) - for tag, data in next, list do - if data.hidden then - -- skip - elseif prefixed then - local first, second = lpegmatch(splitter,tag) - if second then - if rawget(variables,first) then - if rawget(variables,second) then - definelanguagelabels(data,class,formatters["\\v!%s:\\v!%s"](first,second),tag) - else - definelanguagelabels(data,class,formatters["\\v!%s:%s"](first,second),tag) - end - elseif rawget(variables,second) then - definelanguagelabels(data,class,formatters["%s:\\v!%s"](first,second),tag) - else - definelanguagelabels(data,class,formatters["%s:%s"](first,second),tag) - end - elseif rawget(variables,rawtag) then - definelanguagelabels(data,class,formatters["\\v!%s"](tag),tag) - else - definelanguagelabels(data,class,tag,tag) - end - else - definelanguagelabels(data,class,tag,tag) - end - end - else - report_labels("unknown label set %a",name) - end -end - --- function labels.check() --- for category, list in next, languages.data.labels do --- for tag, specification in next, list do --- for language, text in next, specification.labels do --- if type(text) == "string" and find(text,",") then --- report_labels("warning: label with comma found, category %a, language %a, tag %a, text %a", --- category, language, tag, text) --- end --- end --- end --- end --- end --- --- labels.check() - --- interface - -commands.definelabels = labels.define - --- function commands.setstrippedtextprefix(str) --- context(string.strip(str)) --- end - --- list : { "a", "b", "c" } --- separator : ", " --- last : " and " - --- text : "a,b,c" --- separators : "{, },{ and }" - -function commands.concatcommalist(settings) -- it's too easy to forget that this one is there - local list = settings.list or settings_to_array(settings.text or "") - local size = #list - local command = settings.command and context[settings.command] or context - if size > 1 then - local separator, last = " ", " " - if settings.separators then - local set = settings_to_array(settings.separators) - separator = set[1] or settings.separator or separator - last = set[2] or settings.last or last - else - separator = settings.separator or separator - last = settings.last or last - end - command(list[1]) - for i=2,size-1 do - context(separator) - command(list[i]) - end - context(last) - end - if size > 0 then - command(list[size]) - end -end +if not modules then modules = { } end modules ['lang-lab'] = { + version = 1.001, + comment = "companion to lang-lab.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, find = string.format, string.find +local next, rawget, type = next, rawget, type +local lpegmatch = lpeg.match +local formatters = string.formatters + +local prtcatcodes = catcodes.numbers.prtcatcodes -- todo: use different method + +local trace_labels = false trackers.register("languages.labels", function(v) trace_labels = v end) +local report_labels = logs.reporter("languages","labels") + +languages.labels = languages.labels or { } +local labels = languages.labels + +local variables = interfaces.variables +local settings_to_array = utilities.parsers.settings_to_array + +local splitter = lpeg.splitat(":") + +local function split(tag) + return lpegmatch(splitter,tag) +end + +labels.split = split + +local contextsprint = context.sprint + +local function definelanguagelabels(data,class,tag,rawtag) + for language, text in next, data.labels do + if text == "" then + -- skip + elseif type(text) == "table" then + contextsprint(prtcatcodes,"\\setlabeltextpair{",class,"}{",language,"}{",tag,"}{",text[1],"}{",text[2],"}") + if trace_labels then + report_labels("language %a, defining label %a as %a and %a",language,rawtag,text[1],text[2]) + end + else + contextsprint(prtcatcodes,"\\setlabeltextpair{",class,"}{",language,"}{",tag,"}{",text,"}{}") + if trace_labels then + report_labels("language %a, defining label %a as %a",language,rawtag,text) + end + end + end +end + +function labels.define(class,name,prefixed) + local list = languages.data.labels[name] + if list then + report_labels("defining label set %a",name) + for tag, data in next, list do + if data.hidden then + -- skip + elseif prefixed then + local first, second = lpegmatch(splitter,tag) + if second then + if rawget(variables,first) then + if rawget(variables,second) then + definelanguagelabels(data,class,formatters["\\v!%s:\\v!%s"](first,second),tag) + else + definelanguagelabels(data,class,formatters["\\v!%s:%s"](first,second),tag) + end + elseif rawget(variables,second) then + definelanguagelabels(data,class,formatters["%s:\\v!%s"](first,second),tag) + else + definelanguagelabels(data,class,formatters["%s:%s"](first,second),tag) + end + elseif rawget(variables,rawtag) then + definelanguagelabels(data,class,formatters["\\v!%s"](tag),tag) + else + definelanguagelabels(data,class,tag,tag) + end + else + definelanguagelabels(data,class,tag,tag) + end + end + else + report_labels("unknown label set %a",name) + end +end + +-- function labels.check() +-- for category, list in next, languages.data.labels do +-- for tag, specification in next, list do +-- for language, text in next, specification.labels do +-- if type(text) == "string" and find(text,",") then +-- report_labels("warning: label with comma found, category %a, language %a, tag %a, text %a", +-- category, language, tag, text) +-- end +-- end +-- end +-- end +-- end +-- +-- labels.check() + +-- interface + +commands.definelabels = labels.define + +-- function commands.setstrippedtextprefix(str) +-- context(string.strip(str)) +-- end + +-- list : { "a", "b", "c" } +-- separator : ", " +-- last : " and " + +-- text : "a,b,c" +-- separators : "{, },{ and }" + +function commands.concatcommalist(settings) -- it's too easy to forget that this one is there + local list = settings.list or settings_to_array(settings.text or "") + local size = #list + local command = settings.command and context[settings.command] or context + if size > 1 then + local separator, last = " ", " " + if settings.separators then + local set = settings_to_array(settings.separators) + separator = set[1] or settings.separator or separator + last = set[2] or settings.last or last + else + separator = settings.separator or separator + last = settings.last or last + end + command(list[1]) + for i=2,size-1 do + context(separator) + command(list[i]) + end + context(last) + end + if size > 0 then + command(list[size]) + end +end diff --git a/tex/context/base/lang-url.lua b/tex/context/base/lang-url.lua index 35381e672..86733c876 100644 --- a/tex/context/base/lang-url.lua +++ b/tex/context/base/lang-url.lua @@ -1,113 +1,113 @@ -if not modules then modules = { } end modules ['lang-url'] = { - version = 1.001, - comment = "companion to lang-url.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local utfcharacters, utfvalues, utfbyte, utfchar = utf.characters, utf.values, utf.byte, utf.char - -context = context - -commands = commands or { } -local commands = commands - ---[[ -

Hyphenating 's is somewhat tricky and a matter of taste. I did -consider using a dedicated hyphenation pattern or dealing with it by node -parsing, but the following solution suits as well. After all, we're mostly -dealing with characters.

-]]-- - -commands.hyphenatedurl = commands.hyphenatedurl or { } -local hyphenatedurl = commands.hyphenatedurl - -local characters = utilities.storage.allocate { - ["!"] = 1, - ["\""] = 1, - ["#"] = 1, - ["$"] = 1, - ["%"] = 1, - ["&"] = 1, - ["("] = 1, - ["*"] = 1, - ["+"] = 1, - [","] = 1, - ["-"] = 1, - ["."] = 1, - ["/"] = 1, - [":"] = 1, - [";"] = 1, - ["<"] = 1, - ["="] = 1, - [">"] = 1, - ["?"] = 1, - ["@"] = 1, - ["["] = 1, - ["\\"] = 1, - ["^"] = 1, - ["_"] = 1, - ["`"] = 1, - ["{"] = 1, - ["|"] = 1, - ["~"] = 1, - - ["'"] = 2, - [")"] = 2, - ["]"] = 2, - ["}"] = 2, -} - -local mapping = utilities.storage.allocate { - -- [utfchar(0xA0)] = "~", -- nbsp (catch) -} - -hyphenatedurl.characters = characters -hyphenatedurl.mapping = mapping -hyphenatedurl.lefthyphenmin = 2 -hyphenatedurl.righthyphenmin = 3 -hyphenatedurl.discretionary = nil - --- more fun is to write nodes .. maybe it's nicer to do this --- in an attribute handler anyway - -local function action(hyphenatedurl,str,left,right,disc) - local n = 0 - local b = math.max( left or hyphenatedurl.lefthyphenmin, 2) - local e = math.min(#str-(right or hyphenatedurl.righthyphenmin)+2,#str) - local d = disc or hyphenatedurl.discretionary - for s in utfcharacters(str) do - n = n + 1 - s = mapping[s] or s - if n > 1 then - context.s() -- can be option - end - if s == d then - context.d(utfbyte(s)) - else - local c = characters[s] - if not c or n<=b or n>=e then - context.n(utfbyte(s)) - elseif c == 1 then - context.b(utfbyte(s)) - elseif c == 2 then - context.a(utfbyte(s)) - end - end - end -end - --- hyphenatedurl.action = function(_,...) action(...) end -- sort of obsolete - -table.setmetatablecall(hyphenatedurl,action) -- watch out: a caller - --- todo, no interface in mkiv yet - -function hyphenatedurl.setcharacters(str,value) -- 1, 2 == before, after - for s in utfcharacters(str) do - characters[s] = value or 1 - end -end - --- .hyphenatedurl.setcharacters("')]}",2) +if not modules then modules = { } end modules ['lang-url'] = { + version = 1.001, + comment = "companion to lang-url.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local utfcharacters, utfvalues, utfbyte, utfchar = utf.characters, utf.values, utf.byte, utf.char + +context = context + +commands = commands or { } +local commands = commands + +--[[ +

Hyphenating 's is somewhat tricky and a matter of taste. I did +consider using a dedicated hyphenation pattern or dealing with it by node +parsing, but the following solution suits as well. After all, we're mostly +dealing with characters.

+]]-- + +commands.hyphenatedurl = commands.hyphenatedurl or { } +local hyphenatedurl = commands.hyphenatedurl + +local characters = utilities.storage.allocate { + ["!"] = 1, + ["\""] = 1, + ["#"] = 1, + ["$"] = 1, + ["%"] = 1, + ["&"] = 1, + ["("] = 1, + ["*"] = 1, + ["+"] = 1, + [","] = 1, + ["-"] = 1, + ["."] = 1, + ["/"] = 1, + [":"] = 1, + [";"] = 1, + ["<"] = 1, + ["="] = 1, + [">"] = 1, + ["?"] = 1, + ["@"] = 1, + ["["] = 1, + ["\\"] = 1, + ["^"] = 1, + ["_"] = 1, + ["`"] = 1, + ["{"] = 1, + ["|"] = 1, + ["~"] = 1, + + ["'"] = 2, + [")"] = 2, + ["]"] = 2, + ["}"] = 2, +} + +local mapping = utilities.storage.allocate { + -- [utfchar(0xA0)] = "~", -- nbsp (catch) +} + +hyphenatedurl.characters = characters +hyphenatedurl.mapping = mapping +hyphenatedurl.lefthyphenmin = 2 +hyphenatedurl.righthyphenmin = 3 +hyphenatedurl.discretionary = nil + +-- more fun is to write nodes .. maybe it's nicer to do this +-- in an attribute handler anyway + +local function action(hyphenatedurl,str,left,right,disc) + local n = 0 + local b = math.max( left or hyphenatedurl.lefthyphenmin, 2) + local e = math.min(#str-(right or hyphenatedurl.righthyphenmin)+2,#str) + local d = disc or hyphenatedurl.discretionary + for s in utfcharacters(str) do + n = n + 1 + s = mapping[s] or s + if n > 1 then + context.s() -- can be option + end + if s == d then + context.d(utfbyte(s)) + else + local c = characters[s] + if not c or n<=b or n>=e then + context.n(utfbyte(s)) + elseif c == 1 then + context.b(utfbyte(s)) + elseif c == 2 then + context.a(utfbyte(s)) + end + end + end +end + +-- hyphenatedurl.action = function(_,...) action(...) end -- sort of obsolete + +table.setmetatablecall(hyphenatedurl,action) -- watch out: a caller + +-- todo, no interface in mkiv yet + +function hyphenatedurl.setcharacters(str,value) -- 1, 2 == before, after + for s in utfcharacters(str) do + characters[s] = value or 1 + end +end + +-- .hyphenatedurl.setcharacters("')]}",2) diff --git a/tex/context/base/lang-wrd.lua b/tex/context/base/lang-wrd.lua index 06a2311a6..6a9b39fdf 100644 --- a/tex/context/base/lang-wrd.lua +++ b/tex/context/base/lang-wrd.lua @@ -1,353 +1,353 @@ -if not modules then modules = { } end modules ['lang-wrd'] = { - version = 1.001, - comment = "companion to lang-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local lower = string.lower -local utfchar = utf.char -local concat = table.concat -local lpegmatch = lpeg.match -local P, S, Cs = lpeg.P, lpeg.S, lpeg.Cs - -local report_words = logs.reporter("languages","words") - -local nodes, node, languages = nodes, node, languages - -languages.words = languages.words or { } -local words = languages.words - -words.data = words.data or { } -words.enables = false -words.threshold = 4 - -local numbers = languages.numbers -local registered = languages.registered - -local traverse_nodes = node.traverse -local wordsdata = words.data -local chardata = characters.data -local tasks = nodes.tasks - -local unsetvalue = attributes.unsetvalue - -local nodecodes = nodes.nodecodes -local kerncodes = nodes.kerncodes - -local glyph_code = nodecodes.glyph -local disc_code = nodecodes.disc -local kern_code = nodecodes.kern - -local kerning_code = kerncodes.kerning -local lowerchar = characters.lower - -local a_color = attributes.private('color') -local colist = attributes.list[a_color] - -local is_letter = characters.is_letter -- maybe is_character as variant - -local spacing = S(" \n\r\t") -local markup = S("-=") -local lbrace = P("{") -local rbrace = P("}") -local disc = (lbrace * (1-rbrace)^0 * rbrace)^1 -- or just 3 times, time this -local word = Cs((markup/"" + disc/"" + (1-spacing))^1) - -local loaded = { } -- we share lists - -function words.load(tag,filename) - local fullname = resolvers.findfile(filename,'other text file') or "" - if fullname ~= "" then - report_words("loading word file %a",fullname) - statistics.starttiming(languages) - local list = loaded[fullname] - if not list then - list = wordsdata[tag] or { } - local parser = (spacing + word/function(s) list[s] = true end)^0 - lpegmatch(parser,io.loaddata(fullname) or "") - loaded[fullname] = list - end - wordsdata[tag] = list - statistics.stoptiming(languages) - else - report_words("missing word file %a",filename) - end -end - -function words.found(id, str) - local tag = languages.numbers[id] - if tag then - local data = wordsdata[tag] - if data then - if data[str] then - return 1 - elseif data[lower(str)] then - return 2 - end - end - end -end - --- The following code is an adaption of experimental code for hyphenating and --- spell checking. - --- there is an n=1 problem somewhere in nested boxes - -local function mark_words(head,whenfound) -- can be optimized and shared - local current, language, done = head, nil, nil, 0, false - local str, s, nds, n = { }, 0, { }, 0 -- n could also be a table, saves calls - local function action() - if s > 0 then - local word = concat(str,"",1,s) - local mark = whenfound(language,word) - if mark then - done = true - for i=1,n do - mark(nds[i]) - end - end - end - n, s = 0, 0 - end - while current do - local id = current.id - if id == glyph_code then - local a = current.lang - if a then - if a ~= language then - if s > 0 then - action() - end - language = a - end - elseif s > 0 then - action() - language = a - end - local components = current.components - if components then - n = n + 1 - nds[n] = current - for g in traverse_nodes(components) do - s = s + 1 - str[s] = utfchar(g.char) - end - else - local code = current.char - local data = chardata[code] - if is_letter[data.category] then - n = n + 1 - nds[n] = current - s = s + 1 - str[s] = utfchar(code) - elseif s > 0 then - action() - end - end - elseif id == disc_code then -- take the replace - if n > 0 then - n = n + 1 - nds[n] = current - end - elseif id == kern_code and current.subtype == kerning_code and s > 0 then - -- ok - elseif s > 0 then - action() - end - current = current.next - end - if s > 0 then - action() - end - return head, done -end - -local methods = { } -words.methods = methods - -local enablers = { } -words.enablers = enablers - -local wordmethod = 1 -local enabled = false - -function words.check(head) - if enabled then - return methods[wordmethod](head) - else - return head, false - end -end - -function words.enable(settings) - local method = settings.method - wordmethod = method and tonumber(method) or wordmethod or 1 - local e = enablers[wordmethod] - if e then e(settings) end - tasks.enableaction("processors","languages.words.check") - enabled = true -end - -function words.disable() - enabled = false -end - --- colors - -local cache = { } -- can also be done with method 1 -- frozen colors once used - -table.setmetatableindex(cache, function(t,k) -- k == language, numbers[k] == tag - local c - if type(k) == "string" then - c = colist[k] - elseif k < 0 then - c = colist["word:unset"] - else - c = colist["word:" .. (numbers[k] or "unset")] or colist["word:unknown"] - end - local v = c and function(n) n[a_color] = c end or false - t[k] = v - return v -end) - --- method 1 - -local function sweep(language,str) - if #str < words.threshold then - return false - elseif words.found(language,str) then -- can become a local wordsfound - return cache["word:yes"] -- maybe variables.yes - else - return cache["word:no"] - end -end - -methods[1] = function(head) - for n in traverse_nodes(head) do - n[a_color] = unsetvalue -- hm, not that selective (reset color) - end - return mark_words(head,sweep) -end - --- method 2 - -local dumpname = nil -local dumpthem = false -local listname = "document" - -local category = { } -local categories = { } - -setmetatable(categories, { - __index = function(t,k) - local languages = { } - setmetatable(languages, { - __index = function(t,k) - local r = registered[k] - local v = { - number = language, - parent = r and r.parent or nil, - patterns = r and r.patterns or nil, - tag = r and r.tag or nil, - list = { }, - total = 0, - unique = 0, - } - t[k] = v - return v - end - } ) - local v = { - languages = languages, - total = 0, - } - t[k] = v - return v - end -} ) - -local collected = { - total = 0, - version = 1.000, - categories = categories, -} - -enablers[2] = function(settings) - local name = settings.list - listname = name and name ~= "" and name or "document" - category = collected.categories[listname] -end - -local function sweep(language,str) - if #str >= words.threshold then - str = lowerchar(str) - local words = category.languages[numbers[language] or "unset"] - local list = words.list - local ls = list[str] - if ls then - list[str] = ls + 1 - else - list[str] = 1 - words.unique = words.unique + 1 - end - collected.total = collected.total + 1 - category.total = category.total + 1 - words.total = words.total + 1 - end -end - -methods[2] = function(head) - dumpthem = true - return mark_words(head,sweep) -end - -local function dumpusedwords() - if dumpthem then - collected.threshold = words.threshold - dumpname = dumpname or file.addsuffix(tex.jobname,"words") - report_words("saving list of used words in %a",dumpname) - io.savedata(dumpname,table.serialize(collected,true)) - -- table.tofile(dumpname,list,true) - end -end - -directives.register("languages.words.dump", function(v) - dumpname = type(v) == "string" and v ~= "" and v -end) - -luatex.registerstopactions(dumpusedwords) - --- method 3 - -local function sweep(language,str) - return cache[language] -end - -methods[3] = function(head) - for n in traverse_nodes(head) do - n[a_color] = unsetvalue - end - return mark_words(head,sweep) -end - --- for the moment we hook it into the attribute handler - ---~ languagehacks = { } - ---~ function languagehacks.process(namespace,attribute,head) ---~ return languages.check(head) ---~ end - ---~ chars.plugins[chars.plugins+1] = { ---~ name = "language", ---~ namespace = languagehacks, ---~ processor = languagehacks.process ---~ } - --- interface - -commands.enablespellchecking = words.enable -commands.disablespellchecking = words.disable -commands.loadspellchecklist = words.load +if not modules then modules = { } end modules ['lang-wrd'] = { + version = 1.001, + comment = "companion to lang-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local lower = string.lower +local utfchar = utf.char +local concat = table.concat +local lpegmatch = lpeg.match +local P, S, Cs = lpeg.P, lpeg.S, lpeg.Cs + +local report_words = logs.reporter("languages","words") + +local nodes, node, languages = nodes, node, languages + +languages.words = languages.words or { } +local words = languages.words + +words.data = words.data or { } +words.enables = false +words.threshold = 4 + +local numbers = languages.numbers +local registered = languages.registered + +local traverse_nodes = node.traverse +local wordsdata = words.data +local chardata = characters.data +local tasks = nodes.tasks + +local unsetvalue = attributes.unsetvalue + +local nodecodes = nodes.nodecodes +local kerncodes = nodes.kerncodes + +local glyph_code = nodecodes.glyph +local disc_code = nodecodes.disc +local kern_code = nodecodes.kern + +local kerning_code = kerncodes.kerning +local lowerchar = characters.lower + +local a_color = attributes.private('color') +local colist = attributes.list[a_color] + +local is_letter = characters.is_letter -- maybe is_character as variant + +local spacing = S(" \n\r\t") +local markup = S("-=") +local lbrace = P("{") +local rbrace = P("}") +local disc = (lbrace * (1-rbrace)^0 * rbrace)^1 -- or just 3 times, time this +local word = Cs((markup/"" + disc/"" + (1-spacing))^1) + +local loaded = { } -- we share lists + +function words.load(tag,filename) + local fullname = resolvers.findfile(filename,'other text file') or "" + if fullname ~= "" then + report_words("loading word file %a",fullname) + statistics.starttiming(languages) + local list = loaded[fullname] + if not list then + list = wordsdata[tag] or { } + local parser = (spacing + word/function(s) list[s] = true end)^0 + lpegmatch(parser,io.loaddata(fullname) or "") + loaded[fullname] = list + end + wordsdata[tag] = list + statistics.stoptiming(languages) + else + report_words("missing word file %a",filename) + end +end + +function words.found(id, str) + local tag = languages.numbers[id] + if tag then + local data = wordsdata[tag] + if data then + if data[str] then + return 1 + elseif data[lower(str)] then + return 2 + end + end + end +end + +-- The following code is an adaption of experimental code for hyphenating and +-- spell checking. + +-- there is an n=1 problem somewhere in nested boxes + +local function mark_words(head,whenfound) -- can be optimized and shared + local current, language, done = head, nil, nil, 0, false + local str, s, nds, n = { }, 0, { }, 0 -- n could also be a table, saves calls + local function action() + if s > 0 then + local word = concat(str,"",1,s) + local mark = whenfound(language,word) + if mark then + done = true + for i=1,n do + mark(nds[i]) + end + end + end + n, s = 0, 0 + end + while current do + local id = current.id + if id == glyph_code then + local a = current.lang + if a then + if a ~= language then + if s > 0 then + action() + end + language = a + end + elseif s > 0 then + action() + language = a + end + local components = current.components + if components then + n = n + 1 + nds[n] = current + for g in traverse_nodes(components) do + s = s + 1 + str[s] = utfchar(g.char) + end + else + local code = current.char + local data = chardata[code] + if is_letter[data.category] then + n = n + 1 + nds[n] = current + s = s + 1 + str[s] = utfchar(code) + elseif s > 0 then + action() + end + end + elseif id == disc_code then -- take the replace + if n > 0 then + n = n + 1 + nds[n] = current + end + elseif id == kern_code and current.subtype == kerning_code and s > 0 then + -- ok + elseif s > 0 then + action() + end + current = current.next + end + if s > 0 then + action() + end + return head, done +end + +local methods = { } +words.methods = methods + +local enablers = { } +words.enablers = enablers + +local wordmethod = 1 +local enabled = false + +function words.check(head) + if enabled then + return methods[wordmethod](head) + else + return head, false + end +end + +function words.enable(settings) + local method = settings.method + wordmethod = method and tonumber(method) or wordmethod or 1 + local e = enablers[wordmethod] + if e then e(settings) end + tasks.enableaction("processors","languages.words.check") + enabled = true +end + +function words.disable() + enabled = false +end + +-- colors + +local cache = { } -- can also be done with method 1 -- frozen colors once used + +table.setmetatableindex(cache, function(t,k) -- k == language, numbers[k] == tag + local c + if type(k) == "string" then + c = colist[k] + elseif k < 0 then + c = colist["word:unset"] + else + c = colist["word:" .. (numbers[k] or "unset")] or colist["word:unknown"] + end + local v = c and function(n) n[a_color] = c end or false + t[k] = v + return v +end) + +-- method 1 + +local function sweep(language,str) + if #str < words.threshold then + return false + elseif words.found(language,str) then -- can become a local wordsfound + return cache["word:yes"] -- maybe variables.yes + else + return cache["word:no"] + end +end + +methods[1] = function(head) + for n in traverse_nodes(head) do + n[a_color] = unsetvalue -- hm, not that selective (reset color) + end + return mark_words(head,sweep) +end + +-- method 2 + +local dumpname = nil +local dumpthem = false +local listname = "document" + +local category = { } +local categories = { } + +setmetatable(categories, { + __index = function(t,k) + local languages = { } + setmetatable(languages, { + __index = function(t,k) + local r = registered[k] + local v = { + number = language, + parent = r and r.parent or nil, + patterns = r and r.patterns or nil, + tag = r and r.tag or nil, + list = { }, + total = 0, + unique = 0, + } + t[k] = v + return v + end + } ) + local v = { + languages = languages, + total = 0, + } + t[k] = v + return v + end +} ) + +local collected = { + total = 0, + version = 1.000, + categories = categories, +} + +enablers[2] = function(settings) + local name = settings.list + listname = name and name ~= "" and name or "document" + category = collected.categories[listname] +end + +local function sweep(language,str) + if #str >= words.threshold then + str = lowerchar(str) + local words = category.languages[numbers[language] or "unset"] + local list = words.list + local ls = list[str] + if ls then + list[str] = ls + 1 + else + list[str] = 1 + words.unique = words.unique + 1 + end + collected.total = collected.total + 1 + category.total = category.total + 1 + words.total = words.total + 1 + end +end + +methods[2] = function(head) + dumpthem = true + return mark_words(head,sweep) +end + +local function dumpusedwords() + if dumpthem then + collected.threshold = words.threshold + dumpname = dumpname or file.addsuffix(tex.jobname,"words") + report_words("saving list of used words in %a",dumpname) + io.savedata(dumpname,table.serialize(collected,true)) + -- table.tofile(dumpname,list,true) + end +end + +directives.register("languages.words.dump", function(v) + dumpname = type(v) == "string" and v ~= "" and v +end) + +luatex.registerstopactions(dumpusedwords) + +-- method 3 + +local function sweep(language,str) + return cache[language] +end + +methods[3] = function(head) + for n in traverse_nodes(head) do + n[a_color] = unsetvalue + end + return mark_words(head,sweep) +end + +-- for the moment we hook it into the attribute handler + +--~ languagehacks = { } + +--~ function languagehacks.process(namespace,attribute,head) +--~ return languages.check(head) +--~ end + +--~ chars.plugins[chars.plugins+1] = { +--~ name = "language", +--~ namespace = languagehacks, +--~ processor = languagehacks.process +--~ } + +-- interface + +commands.enablespellchecking = words.enable +commands.disablespellchecking = words.disable +commands.loadspellchecklist = words.load diff --git a/tex/context/base/layo-ini.lua b/tex/context/base/layo-ini.lua index 56ced2c0b..cc483aa3b 100644 --- a/tex/context/base/layo-ini.lua +++ b/tex/context/base/layo-ini.lua @@ -1,61 +1,61 @@ -if not modules then modules = { } end modules ['layo-ini'] = { - version = 1.001, - comment = "companion to layo-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- We need to share information between the TeX and Lua end --- about the typographical model. This happens here. --- --- Code might move. - --- conditionals.layoutisdoublesided --- conditionals.layoutissinglesided --- texcount.pagenoshift --- texcount.realpageno - -local texcount = tex.count -local conditionals = tex.conditionals - -layouts = { - status = { }, -} - -local status = layouts.status - -function status.leftorrightpagection(left,right) - if left == nil then - left, right = false, true - end - if not conditionals.layoutisdoublesided then - return left, right - elseif conditionals.layoutissinglesided then - return left, right - elseif texcount.pagenoshift % 2 == 0 then - if texcount.realpageno % 2 == 0 then - return right, left - else - return left, right - end - else - if texcount.realpageno % 2 == 0 then - return left, right - else - return right, left - end - end -end - -function status.isleftpage() - if not conditionals.layoutisdoublesided then - return false - elseif conditionals.layoutissinglesided then - return false - elseif texcount.pagenoshift % 2 == 0 then - return texcount.realpageno % 2 == 0 - else - return not texcount.realpageno % 2 == 0 - end -end +if not modules then modules = { } end modules ['layo-ini'] = { + version = 1.001, + comment = "companion to layo-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- We need to share information between the TeX and Lua end +-- about the typographical model. This happens here. +-- +-- Code might move. + +-- conditionals.layoutisdoublesided +-- conditionals.layoutissinglesided +-- texcount.pagenoshift +-- texcount.realpageno + +local texcount = tex.count +local conditionals = tex.conditionals + +layouts = { + status = { }, +} + +local status = layouts.status + +function status.leftorrightpagection(left,right) + if left == nil then + left, right = false, true + end + if not conditionals.layoutisdoublesided then + return left, right + elseif conditionals.layoutissinglesided then + return left, right + elseif texcount.pagenoshift % 2 == 0 then + if texcount.realpageno % 2 == 0 then + return right, left + else + return left, right + end + else + if texcount.realpageno % 2 == 0 then + return left, right + else + return right, left + end + end +end + +function status.isleftpage() + if not conditionals.layoutisdoublesided then + return false + elseif conditionals.layoutissinglesided then + return false + elseif texcount.pagenoshift % 2 == 0 then + return texcount.realpageno % 2 == 0 + else + return not texcount.realpageno % 2 == 0 + end +end diff --git a/tex/context/base/lpdf-ano.lua b/tex/context/base/lpdf-ano.lua index adfea3812..ee9cb851b 100644 --- a/tex/context/base/lpdf-ano.lua +++ b/tex/context/base/lpdf-ano.lua @@ -1,753 +1,753 @@ -if not modules then modules = { } end modules ['lpdf-ano'] = { - version = 1.001, - comment = "companion to lpdf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- when using rotation: \disabledirectives[refences.sharelinks] (maybe flag links) - --- todo: /AA << WC << ... >> >> : WillClose actions etc - -local next, tostring = next, tostring -local rep, format = string.rep, string.format -local texcount = tex.count -local lpegmatch = lpeg.match -local formatters = string.formatters - -local backends, lpdf = backends, lpdf - -local trace_references = false trackers.register("references.references", function(v) trace_references = v end) -local trace_destinations = false trackers.register("references.destinations", function(v) trace_destinations = v end) -local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end) - -local report_reference = logs.reporter("backend","references") -local report_destination = logs.reporter("backend","destinations") -local report_bookmark = logs.reporter("backend","bookmarks") - -local variables = interfaces.variables -local constants = interfaces.constants - -local settings_to_array = utilities.parsers.settings_to_array - -local nodeinjections = backends.pdf.nodeinjections -local codeinjections = backends.pdf.codeinjections -local registrations = backends.pdf.registrations - -local javascriptcode = interactions.javascripts.code - -local references = structures.references -local bookmarks = structures.bookmarks - -local runners = references.runners -local specials = references.specials -local handlers = references.handlers -local executers = references.executers -local getinnermethod = references.getinnermethod - -local nodepool = nodes.pool - -local pdfannotation_node = nodepool.pdfannotation -local pdfdestination_node = nodepool.pdfdestination -local latelua_node = nodepool.latelua - -local pdfdictionary = lpdf.dictionary -local pdfarray = lpdf.array -local pdfreference = lpdf.reference -local pdfunicode = lpdf.unicode -local pdfconstant = lpdf.constant -local pdfflushobject = lpdf.flushobject -local pdfshareobjectreference = lpdf.shareobjectreference -local pdfreserveobject = lpdf.reserveobject -local pdfpagereference = lpdf.pagereference -local pdfdelayedobject = lpdf.delayedobject -local pdfregisterannotation = lpdf.registerannotation - --- todo: 3dview - -local pdf_annot = pdfconstant("Annot") -local pdf_uri = pdfconstant("URI") -local pdf_gotor = pdfconstant("GoToR") -local pdf_goto = pdfconstant("GoTo") -local pdf_launch = pdfconstant("Launch") -local pdf_javascript = pdfconstant("JavaScript") -local pdf_link = pdfconstant("Link") -local pdf_n = pdfconstant("N") -local pdf_t = pdfconstant("T") -local pdf_fit = pdfconstant("Fit") -local pdf_named = pdfconstant("Named") - -local pdf_border = pdfarray { 0, 0, 0 } - -local cache = { } - -local function pagedestination(n) -- only cache fit - if n > 0 then - local pd = cache[n] - if not pd then - local a = pdfarray { - pdfreference(pdfpagereference(n)), - pdf_fit, - } - pd = pdfshareobjectreference(a) - cache[n] = pd - end - return pd - end -end - -lpdf.pagedestination = pagedestination - -local defaultdestination = pdfarray { 0, pdf_fit } - -local function link(url,filename,destination,page,actions) - if filename and filename ~= "" then - if file.basename(filename) == tex.jobname then - return false - else - filename = file.addsuffix(filename,"pdf") - end - end - if url and url ~= "" then - if filename and filename ~= "" then - if destination and destination ~= "" then - url = file.join(url,filename).."#"..destination - else - url = file.join(url,filename) - end - end - return pdfdictionary { - S = pdf_uri, - URI = url, - } - elseif filename and filename ~= "" then - -- no page ? - if destination == "" then - destination = nil - end - if not destination and page then - destination = pdfarray { page - 1, pdf_fit } - end - return pdfdictionary { - S = pdf_gotor, -- can also be pdf_launch - F = filename, - D = destination or defaultdestination, -- D is mandate - NewWindow = (actions.newwindow and true) or nil, - } - elseif destination and destination ~= "" then - return pdfdictionary { -- can be cached - S = pdf_goto, - D = destination, - } - else - local p = tonumber(page) - if p and p > 0 then - return pdfdictionary { -- can be cached - S = pdf_goto, - D = pdfarray { - pdfreference(pdfpagereference(p)), - pdf_fit, - } - } - elseif trace_references then - report_reference("invalid page reference %a",page) - end - end - return false -end - -lpdf.link = link - -function lpdf.launch(program,parameters) - if program and program ~= "" then - local d = pdfdictionary { - S = pdf_launch, - F = program, - D = ".", - } - if parameters and parameters ~= "" then - d.P = parameters - end - return d - end -end - -function lpdf.javascript(name,arguments) - local script = javascriptcode(name,arguments) -- make into object (hash) - if script then - return pdfdictionary { - S = pdf_javascript, - JS = script, - } - end -end - -local function pdfaction(actions) - local nofactions = #actions - if nofactions > 0 then - local a = actions[1] - local action = runners[a.kind] - if action then - action = action(a,actions) - end - if action then - local first = action - for i=2,nofactions do - local a = actions[i] - local what = runners[a.kind] - if what then - what = what(a,actions) - end - if what then - action.Next = what - action = what - else - -- error - return nil - end - end - return first, actions.n - end - end -end - -lpdf.action = pdfaction - -function codeinjections.prerollreference(actions) -- share can become option - if actions then - local main, n = pdfaction(actions) - if main then - main = pdfdictionary { - Subtype = pdf_link, - Border = pdf_border, - H = (not actions.highlight and pdf_n) or nil, - A = pdfshareobjectreference(main), - F = 4, -- print (mandate in pdf/a) - } - return main("A"), n - end - end -end - -local function use_normal_annotations() - - local function reference(width,height,depth,prerolled) -- keep this one - if prerolled then - if trace_references then - report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled) - end - return pdfannotation_node(width,height,depth,prerolled) - end - end - - local function finishreference() - end - - return reference, finishreference - -end - --- eventually we can do this for special refs only - -local hashed, nofunique, nofused = { }, 0, 0 - -local f_annot = formatters["<< /Type /Annot %s /Rect [%0.3f %0.3f %0.3f %0.3f] >>"] -local f_bpnf = formatters["_bpnf_(%s,%s,%s,'%s')"] - -local function use_shared_annotations() - - local factor = number.dimenfactors.bp - - local function finishreference(width,height,depth,prerolled) -- %0.2f looks okay enough (no scaling anyway) - local h, v = pdf.h, pdf.v - local llx, lly = h*factor, (v - depth)*factor - local urx, ury = (h + width)*factor, (v + height)*factor - local annot = f_annot(prerolled,llx,lly,urx,ury) - local n = hashed[annot] - if not n then - n = pdfdelayedobject(annot) - hashed[annot] = n - nofunique = nofunique + 1 - end - nofused = nofused + 1 - pdfregisterannotation(n) - end - - _bpnf_ = finishreference - - local function reference(width,height,depth,prerolled) - if prerolled then - if trace_references then - report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled) - end - local luacode = f_bpnf(width,height,depth,prerolled) - return latelua_node(luacode) - end - end - - statistics.register("pdf annotations", function() - if nofused > 0 then - return format("%s embedded, %s unique",nofused,nofunique) - else - return nil - end - end) - - - return reference, finishreference - -end - -local lln = latelua_node() if node.has_field(lln,'string') then - - directives.register("refences.sharelinks", function(v) - if v then - nodeinjections.reference, codeinjections.finishreference = use_shared_annotations() - else - nodeinjections.reference, codeinjections.finishreference = use_normal_annotations() - end - end) - - nodeinjections.reference, codeinjections.finishreference = use_shared_annotations() - -else - - nodeinjections.reference, codeinjections.finishreference = use_normal_annotations() - -end node.free(lln) - --- -- -- -- --- -- -- -- - -local done = { } -- prevent messages - -function nodeinjections.destination(width,height,depth,name,view) - if not done[name] then - done[name] = true - if trace_destinations then - report_destination("width %p, height %p, depth %p, name %a, view %a",width,height,depth,name,view) - end - return pdfdestination_node(width,height,depth,name,view) -- can be begin/end node - end -end - --- runners and specials - -runners["inner"] = function(var,actions) - if getinnermethod() == "names" then - local vi = var.i - if vi then - local vir = vi.references - if vir then - local internal = vir.internal - if internal then - var.inner = "aut:" .. internal - end - end - end - else - var.inner = nil - end - local prefix = var.p - local inner = var.inner - if inner and prefix and prefix ~= "" then - inner = prefix .. ":" .. inner -- might not always be ok - end - return link(nil,nil,inner,var.r,actions) -end - -runners["inner with arguments"] = function(var,actions) - report_reference("todo: inner with arguments") - return false -end - -runners["outer"] = function(var,actions) - local file, url = references.checkedfileorurl(var.outer,var.outer) - return link(url,file,var.arguments,nil,actions) -end - -runners["outer with inner"] = function(var,actions) - local file = references.checkedfile(var.outer) -- was var.f but fails ... why - return link(nil,file,var.inner,var.r,actions) -end - -runners["special outer with operation"] = function(var,actions) - local handler = specials[var.special] - return handler and handler(var,actions) -end - -runners["special outer"] = function(var,actions) - report_reference("todo: special outer") - return false -end - -runners["special"] = function(var,actions) - local handler = specials[var.special] - return handler and handler(var,actions) -end - -runners["outer with inner with arguments"] = function(var,actions) - report_reference("todo: outer with inner with arguments") - return false -end - -runners["outer with special and operation and arguments"] = function(var,actions) - report_reference("todo: outer with special and operation and arguments") - return false -end - -runners["outer with special"] = function(var,actions) - report_reference("todo: outer with special") - return false -end - -runners["outer with special and operation"] = function(var,actions) - report_reference("todo: outer with special and operation") - return false -end - -runners["special operation"] = runners["special"] -runners["special operation with arguments"] = runners["special"] - -function specials.internal(var,actions) -- better resolve in strc-ref - local i = tonumber(var.operation) - local v = i and references.internals[i] - if not v then - -- error - report_reference("no internal reference %a",i) - elseif getinnermethod() == "names" then - -- named - return link(nil,nil,"aut:"..i,v.references.realpage,actions) - else - -- page - return link(nil,nil,nil,v.references.realpage,actions) - end -end - --- realpage already resolved - -specials.i = specials.internal - -local pages = references.pages - -function specials.page(var,actions) - local file = var.f - if file then - file = references.checkedfile(file) - return link(nil,file,nil,var.operation,actions) - else - local p = var.r - if not p then -- todo: call special from reference code - p = pages[var.operation] - if type(p) == "function" then -- double - p = p() - else - p = references.realpageofpage(tonumber(p)) - end - -- if p then - -- var.r = p - -- end - end - return link(nil,nil,nil,p or var.operation,actions) - end -end - -function specials.realpage(var,actions) - local file = var.f - if file then - file = references.checkedfile(file) - return link(nil,file,nil,var.operation,actions) - else - return link(nil,nil,nil,var.operation,actions) - end -end - -function specials.userpage(var,actions) - local file = var.f - if file then - file = references.checkedfile(file) - return link(nil,file,nil,var.operation,actions) - else - local p = var.r - if not p then -- todo: call special from reference code - p = var.operation - if p then -- no function and special check here. only numbers - p = references.realpageofpage(tonumber(p)) - end - -- if p then - -- var.r = p - -- end - end - return link(nil,nil,nil,p or var.operation,actions) - end -end - -function specials.deltapage(var,actions) - local p = tonumber(var.operation) - if p then - p = references.checkedrealpage(p + texcount.realpageno) - return link(nil,nil,nil,p,actions) - end -end - --- sections - --- function specials.section(var,actions) --- local sectionname = var.operation --- local destination = var.arguments --- local internal = structures.sections.internalreference(sectionname,destination) --- if internal then --- var.special = "internal" --- var.operation = internal --- var.arguments = nil --- specials.internal(var,actions) --- end --- end - -specials.section = specials.internal -- specials.section just need to have a value as it's checked - --- todo, do this in references namespace ordered instead (this is an experiment) - -local splitter = lpeg.splitat(":") - -function specials.order(var,actions) -- references.specials ! - local operation = var.operation - if operation then - local kind, name, n = lpegmatch(splitter,operation) - local order = structures.lists.ordered[kind] - order = order and order[name] - local v = order[tonumber(n)] - local r = v and v.references.realpage - if r then - var.operation = r -- brrr, but test anyway - return specials.page(var,actions) - end - end -end - -function specials.url(var,actions) - local url = references.checkedurl(var.operation) - return link(url,nil,var.arguments,nil,actions) -end - -function specials.file(var,actions) - local file = references.checkedfile(var.operation) - return link(nil,file,var.arguments,nil,actions) -end - -function specials.fileorurl(var,actions) - local file, url = references.checkedfileorurl(var.operation,var.operation) - return link(url,file,var.arguments,nil,actions) -end - -function specials.program(var,content) - local program = references.checkedprogram(var.operation) - return lpdf.launch(program,var.arguments) -end - -function specials.javascript(var) - return lpdf.javascript(var.operation,var.arguments) -end - -specials.JS = specials.javascript - -executers.importform = pdfdictionary { S = pdf_named, N = pdfconstant("AcroForm:ImportFDF") } -executers.exportform = pdfdictionary { S = pdf_named, N = pdfconstant("AcroForm:ExportFDF") } -executers.first = pdfdictionary { S = pdf_named, N = pdfconstant("FirstPage") } -executers.previous = pdfdictionary { S = pdf_named, N = pdfconstant("PrevPage") } -executers.next = pdfdictionary { S = pdf_named, N = pdfconstant("NextPage") } -executers.last = pdfdictionary { S = pdf_named, N = pdfconstant("LastPage") } -executers.backward = pdfdictionary { S = pdf_named, N = pdfconstant("GoBack") } -executers.forward = pdfdictionary { S = pdf_named, N = pdfconstant("GoForward") } -executers.print = pdfdictionary { S = pdf_named, N = pdfconstant("Print") } -executers.exit = pdfdictionary { S = pdf_named, N = pdfconstant("Quit") } -executers.close = pdfdictionary { S = pdf_named, N = pdfconstant("Close") } -executers.save = pdfdictionary { S = pdf_named, N = pdfconstant("Save") } -executers.savenamed = pdfdictionary { S = pdf_named, N = pdfconstant("SaveAs") } -executers.opennamed = pdfdictionary { S = pdf_named, N = pdfconstant("Open") } -executers.help = pdfdictionary { S = pdf_named, N = pdfconstant("HelpUserGuide") } -executers.toggle = pdfdictionary { S = pdf_named, N = pdfconstant("FullScreen") } -executers.search = pdfdictionary { S = pdf_named, N = pdfconstant("Find") } -executers.searchagain = pdfdictionary { S = pdf_named, N = pdfconstant("FindAgain") } -executers.gotopage = pdfdictionary { S = pdf_named, N = pdfconstant("GoToPage") } -executers.query = pdfdictionary { S = pdf_named, N = pdfconstant("AcroSrch:Query") } -executers.queryagain = pdfdictionary { S = pdf_named, N = pdfconstant("AcroSrch:NextHit") } -executers.fitwidth = pdfdictionary { S = pdf_named, N = pdfconstant("FitWidth") } -executers.fitheight = pdfdictionary { S = pdf_named, N = pdfconstant("FitHeight") } - -local function fieldset(arguments) - -- [\dogetfieldset{#1}] - return nil -end - -function executers.resetform(arguments) - arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments) - return pdfdictionary { - S = pdfconstant("ResetForm"), - Field = fieldset(arguments[1]) - } -end - -local formmethod = "post" -- "get" "post" -local formformat = "xml" -- "xml" "html" "fdf" - --- bit 3 = html bit 6 = xml bit 4 = get - -local flags = { - get = { - html = 12, fdf = 8, xml = 40, - }, - post = { - html = 4, fdf = 0, xml = 32, - } -} - -function executers.submitform(arguments) - arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments) - local flag = flags[formmethod] or flags.post - flag = (flag and (flag[formformat] or flag.xml)) or 32 -- default: post, xml - return pdfdictionary { - S = pdfconstant("SubmitForm"), - F = arguments[1], - Field = fieldset(arguments[2]), - Flags = flag, - -- \PDFsubmitfiller - } -end - -local pdf_hide = pdfconstant("Hide") - -function executers.hide(arguments) - return pdfdictionary { - S = pdf_hide, - H = true, - T = arguments, - } -end - -function executers.show(arguments) - return pdfdictionary { - S = pdf_hide, - H = false, - T = arguments, - } -end - -local pdf_movie = pdfconstant("Movie") -local pdf_start = pdfconstant("Start") -local pdf_stop = pdfconstant("Stop") -local pdf_resume = pdfconstant("Resume") -local pdf_pause = pdfconstant("Pause") - -local function movie_or_sound(operation,arguments) - arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments) - return pdfdictionary { - S = pdf_movie, - T = format("movie %s",arguments[1] or "noname"), - Operation = operation, - } -end - -function executers.startmovie (arguments) return movie_or_sound(pdf_start ,arguments) end -function executers.stopmovie (arguments) return movie_or_sound(pdf_stop ,arguments) end -function executers.resumemovie(arguments) return movie_or_sound(pdf_resume,arguments) end -function executers.pausemovie (arguments) return movie_or_sound(pdf_pause ,arguments) end - -function executers.startsound (arguments) return movie_or_sound(pdf_start ,arguments) end -function executers.stopsound (arguments) return movie_or_sound(pdf_stop ,arguments) end -function executers.resumesound(arguments) return movie_or_sound(pdf_resume,arguments) end -function executers.pausesound (arguments) return movie_or_sound(pdf_pause ,arguments) end - -function specials.action(var) - local operation = var.operation - if var.operation and operation ~= "" then - local e = executers[operation] - if type(e) == "table" then - return e - elseif type(e) == "function" then - return e(var.arguments) - end - end -end - ---~ entry.A = pdfdictionary { ---~ S = pdf_goto, ---~ D = .... ---~ } - -local function build(levels,start,parent,method) - local startlevel = levels[start][1] - local i, n = start, 0 - local child, entry, m, prev, first, last, f, l - while i and i <= #levels do - local li = levels[i] - local level, title, reference, open = li[1], li[2], li[3], li[4] - if level < startlevel then - pdfflushobject(child,entry) - return i, n, first, last - elseif level == startlevel then - if trace_bookmarks then - report_bookmark("%3i %w%s %s",reference.realpage,(level-1)*2,(open and "+") or "-",title) - end - local prev = child - child = pdfreserveobject() - if entry then - entry.Next = child and pdfreference(child) - pdfflushobject(prev,entry) - end - entry = pdfdictionary { - Title = pdfunicode(title), - Parent = parent, - Prev = prev and pdfreference(prev), - } - if method == "internal" then - entry.Dest = "aut:" .. reference.internal - else -- if method == "page" then - entry.Dest = pagedestination(reference.realpage) - end - if not first then first, last = child, child end - prev = child - last = prev - n = n + 1 - i = i + 1 - elseif i < #levels and level > startlevel then - i, m, f, l = build(levels,i,pdfreference(child),method) - entry.Count = (open and m) or -m - if m > 0 then - entry.First, entry.Last = pdfreference(f), pdfreference(l) - end - else - -- missing intermediate level but ok - i, m, f, l = build(levels,i,pdfreference(child),method) - entry.Count = (open and m) or -m - if m > 0 then - entry.First, entry.Last = pdfreference(f), pdfreference(l) - end - pdfflushobject(child,entry) - return i, n, first, last - end - end - pdfflushobject(child,entry) - return nil, n, first, last -end - -function codeinjections.addbookmarks(levels,method) - if #levels > 0 then - structures.bookmarks.flatten(levels) -- dirty trick for lack of structure - local parent = pdfreserveobject() - local _, m, first, last = build(levels,1,pdfreference(parent),method or "internal") - local dict = pdfdictionary { - Type = pdfconstant("Outlines"), - First = pdfreference(first), - Last = pdfreference(last), - Count = m, - } - pdfflushobject(parent,dict) - lpdf.addtocatalog("Outlines",lpdf.reference(parent)) - end -end - --- this could also be hooked into the frontend finalizer - -lpdf.registerdocumentfinalizer(function() bookmarks.place() end,1,"bookmarks") +if not modules then modules = { } end modules ['lpdf-ano'] = { + version = 1.001, + comment = "companion to lpdf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- when using rotation: \disabledirectives[refences.sharelinks] (maybe flag links) + +-- todo: /AA << WC << ... >> >> : WillClose actions etc + +local next, tostring = next, tostring +local rep, format = string.rep, string.format +local texcount = tex.count +local lpegmatch = lpeg.match +local formatters = string.formatters + +local backends, lpdf = backends, lpdf + +local trace_references = false trackers.register("references.references", function(v) trace_references = v end) +local trace_destinations = false trackers.register("references.destinations", function(v) trace_destinations = v end) +local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end) + +local report_reference = logs.reporter("backend","references") +local report_destination = logs.reporter("backend","destinations") +local report_bookmark = logs.reporter("backend","bookmarks") + +local variables = interfaces.variables +local constants = interfaces.constants + +local settings_to_array = utilities.parsers.settings_to_array + +local nodeinjections = backends.pdf.nodeinjections +local codeinjections = backends.pdf.codeinjections +local registrations = backends.pdf.registrations + +local javascriptcode = interactions.javascripts.code + +local references = structures.references +local bookmarks = structures.bookmarks + +local runners = references.runners +local specials = references.specials +local handlers = references.handlers +local executers = references.executers +local getinnermethod = references.getinnermethod + +local nodepool = nodes.pool + +local pdfannotation_node = nodepool.pdfannotation +local pdfdestination_node = nodepool.pdfdestination +local latelua_node = nodepool.latelua + +local pdfdictionary = lpdf.dictionary +local pdfarray = lpdf.array +local pdfreference = lpdf.reference +local pdfunicode = lpdf.unicode +local pdfconstant = lpdf.constant +local pdfflushobject = lpdf.flushobject +local pdfshareobjectreference = lpdf.shareobjectreference +local pdfreserveobject = lpdf.reserveobject +local pdfpagereference = lpdf.pagereference +local pdfdelayedobject = lpdf.delayedobject +local pdfregisterannotation = lpdf.registerannotation + +-- todo: 3dview + +local pdf_annot = pdfconstant("Annot") +local pdf_uri = pdfconstant("URI") +local pdf_gotor = pdfconstant("GoToR") +local pdf_goto = pdfconstant("GoTo") +local pdf_launch = pdfconstant("Launch") +local pdf_javascript = pdfconstant("JavaScript") +local pdf_link = pdfconstant("Link") +local pdf_n = pdfconstant("N") +local pdf_t = pdfconstant("T") +local pdf_fit = pdfconstant("Fit") +local pdf_named = pdfconstant("Named") + +local pdf_border = pdfarray { 0, 0, 0 } + +local cache = { } + +local function pagedestination(n) -- only cache fit + if n > 0 then + local pd = cache[n] + if not pd then + local a = pdfarray { + pdfreference(pdfpagereference(n)), + pdf_fit, + } + pd = pdfshareobjectreference(a) + cache[n] = pd + end + return pd + end +end + +lpdf.pagedestination = pagedestination + +local defaultdestination = pdfarray { 0, pdf_fit } + +local function link(url,filename,destination,page,actions) + if filename and filename ~= "" then + if file.basename(filename) == tex.jobname then + return false + else + filename = file.addsuffix(filename,"pdf") + end + end + if url and url ~= "" then + if filename and filename ~= "" then + if destination and destination ~= "" then + url = file.join(url,filename).."#"..destination + else + url = file.join(url,filename) + end + end + return pdfdictionary { + S = pdf_uri, + URI = url, + } + elseif filename and filename ~= "" then + -- no page ? + if destination == "" then + destination = nil + end + if not destination and page then + destination = pdfarray { page - 1, pdf_fit } + end + return pdfdictionary { + S = pdf_gotor, -- can also be pdf_launch + F = filename, + D = destination or defaultdestination, -- D is mandate + NewWindow = (actions.newwindow and true) or nil, + } + elseif destination and destination ~= "" then + return pdfdictionary { -- can be cached + S = pdf_goto, + D = destination, + } + else + local p = tonumber(page) + if p and p > 0 then + return pdfdictionary { -- can be cached + S = pdf_goto, + D = pdfarray { + pdfreference(pdfpagereference(p)), + pdf_fit, + } + } + elseif trace_references then + report_reference("invalid page reference %a",page) + end + end + return false +end + +lpdf.link = link + +function lpdf.launch(program,parameters) + if program and program ~= "" then + local d = pdfdictionary { + S = pdf_launch, + F = program, + D = ".", + } + if parameters and parameters ~= "" then + d.P = parameters + end + return d + end +end + +function lpdf.javascript(name,arguments) + local script = javascriptcode(name,arguments) -- make into object (hash) + if script then + return pdfdictionary { + S = pdf_javascript, + JS = script, + } + end +end + +local function pdfaction(actions) + local nofactions = #actions + if nofactions > 0 then + local a = actions[1] + local action = runners[a.kind] + if action then + action = action(a,actions) + end + if action then + local first = action + for i=2,nofactions do + local a = actions[i] + local what = runners[a.kind] + if what then + what = what(a,actions) + end + if what then + action.Next = what + action = what + else + -- error + return nil + end + end + return first, actions.n + end + end +end + +lpdf.action = pdfaction + +function codeinjections.prerollreference(actions) -- share can become option + if actions then + local main, n = pdfaction(actions) + if main then + main = pdfdictionary { + Subtype = pdf_link, + Border = pdf_border, + H = (not actions.highlight and pdf_n) or nil, + A = pdfshareobjectreference(main), + F = 4, -- print (mandate in pdf/a) + } + return main("A"), n + end + end +end + +local function use_normal_annotations() + + local function reference(width,height,depth,prerolled) -- keep this one + if prerolled then + if trace_references then + report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled) + end + return pdfannotation_node(width,height,depth,prerolled) + end + end + + local function finishreference() + end + + return reference, finishreference + +end + +-- eventually we can do this for special refs only + +local hashed, nofunique, nofused = { }, 0, 0 + +local f_annot = formatters["<< /Type /Annot %s /Rect [%0.3f %0.3f %0.3f %0.3f] >>"] +local f_bpnf = formatters["_bpnf_(%s,%s,%s,'%s')"] + +local function use_shared_annotations() + + local factor = number.dimenfactors.bp + + local function finishreference(width,height,depth,prerolled) -- %0.2f looks okay enough (no scaling anyway) + local h, v = pdf.h, pdf.v + local llx, lly = h*factor, (v - depth)*factor + local urx, ury = (h + width)*factor, (v + height)*factor + local annot = f_annot(prerolled,llx,lly,urx,ury) + local n = hashed[annot] + if not n then + n = pdfdelayedobject(annot) + hashed[annot] = n + nofunique = nofunique + 1 + end + nofused = nofused + 1 + pdfregisterannotation(n) + end + + _bpnf_ = finishreference + + local function reference(width,height,depth,prerolled) + if prerolled then + if trace_references then + report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled) + end + local luacode = f_bpnf(width,height,depth,prerolled) + return latelua_node(luacode) + end + end + + statistics.register("pdf annotations", function() + if nofused > 0 then + return format("%s embedded, %s unique",nofused,nofunique) + else + return nil + end + end) + + + return reference, finishreference + +end + +local lln = latelua_node() if node.has_field(lln,'string') then + + directives.register("refences.sharelinks", function(v) + if v then + nodeinjections.reference, codeinjections.finishreference = use_shared_annotations() + else + nodeinjections.reference, codeinjections.finishreference = use_normal_annotations() + end + end) + + nodeinjections.reference, codeinjections.finishreference = use_shared_annotations() + +else + + nodeinjections.reference, codeinjections.finishreference = use_normal_annotations() + +end node.free(lln) + +-- -- -- -- +-- -- -- -- + +local done = { } -- prevent messages + +function nodeinjections.destination(width,height,depth,name,view) + if not done[name] then + done[name] = true + if trace_destinations then + report_destination("width %p, height %p, depth %p, name %a, view %a",width,height,depth,name,view) + end + return pdfdestination_node(width,height,depth,name,view) -- can be begin/end node + end +end + +-- runners and specials + +runners["inner"] = function(var,actions) + if getinnermethod() == "names" then + local vi = var.i + if vi then + local vir = vi.references + if vir then + local internal = vir.internal + if internal then + var.inner = "aut:" .. internal + end + end + end + else + var.inner = nil + end + local prefix = var.p + local inner = var.inner + if inner and prefix and prefix ~= "" then + inner = prefix .. ":" .. inner -- might not always be ok + end + return link(nil,nil,inner,var.r,actions) +end + +runners["inner with arguments"] = function(var,actions) + report_reference("todo: inner with arguments") + return false +end + +runners["outer"] = function(var,actions) + local file, url = references.checkedfileorurl(var.outer,var.outer) + return link(url,file,var.arguments,nil,actions) +end + +runners["outer with inner"] = function(var,actions) + local file = references.checkedfile(var.outer) -- was var.f but fails ... why + return link(nil,file,var.inner,var.r,actions) +end + +runners["special outer with operation"] = function(var,actions) + local handler = specials[var.special] + return handler and handler(var,actions) +end + +runners["special outer"] = function(var,actions) + report_reference("todo: special outer") + return false +end + +runners["special"] = function(var,actions) + local handler = specials[var.special] + return handler and handler(var,actions) +end + +runners["outer with inner with arguments"] = function(var,actions) + report_reference("todo: outer with inner with arguments") + return false +end + +runners["outer with special and operation and arguments"] = function(var,actions) + report_reference("todo: outer with special and operation and arguments") + return false +end + +runners["outer with special"] = function(var,actions) + report_reference("todo: outer with special") + return false +end + +runners["outer with special and operation"] = function(var,actions) + report_reference("todo: outer with special and operation") + return false +end + +runners["special operation"] = runners["special"] +runners["special operation with arguments"] = runners["special"] + +function specials.internal(var,actions) -- better resolve in strc-ref + local i = tonumber(var.operation) + local v = i and references.internals[i] + if not v then + -- error + report_reference("no internal reference %a",i) + elseif getinnermethod() == "names" then + -- named + return link(nil,nil,"aut:"..i,v.references.realpage,actions) + else + -- page + return link(nil,nil,nil,v.references.realpage,actions) + end +end + +-- realpage already resolved + +specials.i = specials.internal + +local pages = references.pages + +function specials.page(var,actions) + local file = var.f + if file then + file = references.checkedfile(file) + return link(nil,file,nil,var.operation,actions) + else + local p = var.r + if not p then -- todo: call special from reference code + p = pages[var.operation] + if type(p) == "function" then -- double + p = p() + else + p = references.realpageofpage(tonumber(p)) + end + -- if p then + -- var.r = p + -- end + end + return link(nil,nil,nil,p or var.operation,actions) + end +end + +function specials.realpage(var,actions) + local file = var.f + if file then + file = references.checkedfile(file) + return link(nil,file,nil,var.operation,actions) + else + return link(nil,nil,nil,var.operation,actions) + end +end + +function specials.userpage(var,actions) + local file = var.f + if file then + file = references.checkedfile(file) + return link(nil,file,nil,var.operation,actions) + else + local p = var.r + if not p then -- todo: call special from reference code + p = var.operation + if p then -- no function and special check here. only numbers + p = references.realpageofpage(tonumber(p)) + end + -- if p then + -- var.r = p + -- end + end + return link(nil,nil,nil,p or var.operation,actions) + end +end + +function specials.deltapage(var,actions) + local p = tonumber(var.operation) + if p then + p = references.checkedrealpage(p + texcount.realpageno) + return link(nil,nil,nil,p,actions) + end +end + +-- sections + +-- function specials.section(var,actions) +-- local sectionname = var.operation +-- local destination = var.arguments +-- local internal = structures.sections.internalreference(sectionname,destination) +-- if internal then +-- var.special = "internal" +-- var.operation = internal +-- var.arguments = nil +-- specials.internal(var,actions) +-- end +-- end + +specials.section = specials.internal -- specials.section just need to have a value as it's checked + +-- todo, do this in references namespace ordered instead (this is an experiment) + +local splitter = lpeg.splitat(":") + +function specials.order(var,actions) -- references.specials ! + local operation = var.operation + if operation then + local kind, name, n = lpegmatch(splitter,operation) + local order = structures.lists.ordered[kind] + order = order and order[name] + local v = order[tonumber(n)] + local r = v and v.references.realpage + if r then + var.operation = r -- brrr, but test anyway + return specials.page(var,actions) + end + end +end + +function specials.url(var,actions) + local url = references.checkedurl(var.operation) + return link(url,nil,var.arguments,nil,actions) +end + +function specials.file(var,actions) + local file = references.checkedfile(var.operation) + return link(nil,file,var.arguments,nil,actions) +end + +function specials.fileorurl(var,actions) + local file, url = references.checkedfileorurl(var.operation,var.operation) + return link(url,file,var.arguments,nil,actions) +end + +function specials.program(var,content) + local program = references.checkedprogram(var.operation) + return lpdf.launch(program,var.arguments) +end + +function specials.javascript(var) + return lpdf.javascript(var.operation,var.arguments) +end + +specials.JS = specials.javascript + +executers.importform = pdfdictionary { S = pdf_named, N = pdfconstant("AcroForm:ImportFDF") } +executers.exportform = pdfdictionary { S = pdf_named, N = pdfconstant("AcroForm:ExportFDF") } +executers.first = pdfdictionary { S = pdf_named, N = pdfconstant("FirstPage") } +executers.previous = pdfdictionary { S = pdf_named, N = pdfconstant("PrevPage") } +executers.next = pdfdictionary { S = pdf_named, N = pdfconstant("NextPage") } +executers.last = pdfdictionary { S = pdf_named, N = pdfconstant("LastPage") } +executers.backward = pdfdictionary { S = pdf_named, N = pdfconstant("GoBack") } +executers.forward = pdfdictionary { S = pdf_named, N = pdfconstant("GoForward") } +executers.print = pdfdictionary { S = pdf_named, N = pdfconstant("Print") } +executers.exit = pdfdictionary { S = pdf_named, N = pdfconstant("Quit") } +executers.close = pdfdictionary { S = pdf_named, N = pdfconstant("Close") } +executers.save = pdfdictionary { S = pdf_named, N = pdfconstant("Save") } +executers.savenamed = pdfdictionary { S = pdf_named, N = pdfconstant("SaveAs") } +executers.opennamed = pdfdictionary { S = pdf_named, N = pdfconstant("Open") } +executers.help = pdfdictionary { S = pdf_named, N = pdfconstant("HelpUserGuide") } +executers.toggle = pdfdictionary { S = pdf_named, N = pdfconstant("FullScreen") } +executers.search = pdfdictionary { S = pdf_named, N = pdfconstant("Find") } +executers.searchagain = pdfdictionary { S = pdf_named, N = pdfconstant("FindAgain") } +executers.gotopage = pdfdictionary { S = pdf_named, N = pdfconstant("GoToPage") } +executers.query = pdfdictionary { S = pdf_named, N = pdfconstant("AcroSrch:Query") } +executers.queryagain = pdfdictionary { S = pdf_named, N = pdfconstant("AcroSrch:NextHit") } +executers.fitwidth = pdfdictionary { S = pdf_named, N = pdfconstant("FitWidth") } +executers.fitheight = pdfdictionary { S = pdf_named, N = pdfconstant("FitHeight") } + +local function fieldset(arguments) + -- [\dogetfieldset{#1}] + return nil +end + +function executers.resetform(arguments) + arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments) + return pdfdictionary { + S = pdfconstant("ResetForm"), + Field = fieldset(arguments[1]) + } +end + +local formmethod = "post" -- "get" "post" +local formformat = "xml" -- "xml" "html" "fdf" + +-- bit 3 = html bit 6 = xml bit 4 = get + +local flags = { + get = { + html = 12, fdf = 8, xml = 40, + }, + post = { + html = 4, fdf = 0, xml = 32, + } +} + +function executers.submitform(arguments) + arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments) + local flag = flags[formmethod] or flags.post + flag = (flag and (flag[formformat] or flag.xml)) or 32 -- default: post, xml + return pdfdictionary { + S = pdfconstant("SubmitForm"), + F = arguments[1], + Field = fieldset(arguments[2]), + Flags = flag, + -- \PDFsubmitfiller + } +end + +local pdf_hide = pdfconstant("Hide") + +function executers.hide(arguments) + return pdfdictionary { + S = pdf_hide, + H = true, + T = arguments, + } +end + +function executers.show(arguments) + return pdfdictionary { + S = pdf_hide, + H = false, + T = arguments, + } +end + +local pdf_movie = pdfconstant("Movie") +local pdf_start = pdfconstant("Start") +local pdf_stop = pdfconstant("Stop") +local pdf_resume = pdfconstant("Resume") +local pdf_pause = pdfconstant("Pause") + +local function movie_or_sound(operation,arguments) + arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments) + return pdfdictionary { + S = pdf_movie, + T = format("movie %s",arguments[1] or "noname"), + Operation = operation, + } +end + +function executers.startmovie (arguments) return movie_or_sound(pdf_start ,arguments) end +function executers.stopmovie (arguments) return movie_or_sound(pdf_stop ,arguments) end +function executers.resumemovie(arguments) return movie_or_sound(pdf_resume,arguments) end +function executers.pausemovie (arguments) return movie_or_sound(pdf_pause ,arguments) end + +function executers.startsound (arguments) return movie_or_sound(pdf_start ,arguments) end +function executers.stopsound (arguments) return movie_or_sound(pdf_stop ,arguments) end +function executers.resumesound(arguments) return movie_or_sound(pdf_resume,arguments) end +function executers.pausesound (arguments) return movie_or_sound(pdf_pause ,arguments) end + +function specials.action(var) + local operation = var.operation + if var.operation and operation ~= "" then + local e = executers[operation] + if type(e) == "table" then + return e + elseif type(e) == "function" then + return e(var.arguments) + end + end +end + +--~ entry.A = pdfdictionary { +--~ S = pdf_goto, +--~ D = .... +--~ } + +local function build(levels,start,parent,method) + local startlevel = levels[start][1] + local i, n = start, 0 + local child, entry, m, prev, first, last, f, l + while i and i <= #levels do + local li = levels[i] + local level, title, reference, open = li[1], li[2], li[3], li[4] + if level < startlevel then + pdfflushobject(child,entry) + return i, n, first, last + elseif level == startlevel then + if trace_bookmarks then + report_bookmark("%3i %w%s %s",reference.realpage,(level-1)*2,(open and "+") or "-",title) + end + local prev = child + child = pdfreserveobject() + if entry then + entry.Next = child and pdfreference(child) + pdfflushobject(prev,entry) + end + entry = pdfdictionary { + Title = pdfunicode(title), + Parent = parent, + Prev = prev and pdfreference(prev), + } + if method == "internal" then + entry.Dest = "aut:" .. reference.internal + else -- if method == "page" then + entry.Dest = pagedestination(reference.realpage) + end + if not first then first, last = child, child end + prev = child + last = prev + n = n + 1 + i = i + 1 + elseif i < #levels and level > startlevel then + i, m, f, l = build(levels,i,pdfreference(child),method) + entry.Count = (open and m) or -m + if m > 0 then + entry.First, entry.Last = pdfreference(f), pdfreference(l) + end + else + -- missing intermediate level but ok + i, m, f, l = build(levels,i,pdfreference(child),method) + entry.Count = (open and m) or -m + if m > 0 then + entry.First, entry.Last = pdfreference(f), pdfreference(l) + end + pdfflushobject(child,entry) + return i, n, first, last + end + end + pdfflushobject(child,entry) + return nil, n, first, last +end + +function codeinjections.addbookmarks(levels,method) + if #levels > 0 then + structures.bookmarks.flatten(levels) -- dirty trick for lack of structure + local parent = pdfreserveobject() + local _, m, first, last = build(levels,1,pdfreference(parent),method or "internal") + local dict = pdfdictionary { + Type = pdfconstant("Outlines"), + First = pdfreference(first), + Last = pdfreference(last), + Count = m, + } + pdfflushobject(parent,dict) + lpdf.addtocatalog("Outlines",lpdf.reference(parent)) + end +end + +-- this could also be hooked into the frontend finalizer + +lpdf.registerdocumentfinalizer(function() bookmarks.place() end,1,"bookmarks") diff --git a/tex/context/base/lpdf-enc.lua b/tex/context/base/lpdf-enc.lua index 090fb15cd..6dd286191 100644 --- a/tex/context/base/lpdf-enc.lua +++ b/tex/context/base/lpdf-enc.lua @@ -1,157 +1,157 @@ -if not modules then modules = { } end modules ['lpdf-enc'] = { - version = 1.001, - comment = "companion to lpdf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- delayed loading - -local pdfconstant = lpdf.constant - -return lpdf.dictionary { - Type = pdfconstant("Encoding"), - Differences = lpdf.array { - 24, - pdfconstant("breve"), - pdfconstant("caron"), - pdfconstant("circumflex"), - pdfconstant("dotaccent"), - pdfconstant("hungarumlaut"), - pdfconstant("ogonek"), - pdfconstant("ring"), - pdfconstant("tilde"), - 39, - pdfconstant("quotesingle"), - 96, - pdfconstant("grave"), - 128, - pdfconstant("bullet"), - pdfconstant("dagger"), - pdfconstant("daggerdbl"), - pdfconstant("ellipsis"), - pdfconstant("emdash"), - pdfconstant("endash"), - pdfconstant("florin"), - pdfconstant("fraction"), - pdfconstant("guilsinglleft"), - pdfconstant("guilsinglright"), - pdfconstant("minus"), - pdfconstant("perthousand"), - pdfconstant("quotedblbase"), - pdfconstant("quotedblleft"), - pdfconstant("quotedblright"), - pdfconstant("quoteleft"), - pdfconstant("quoteright"), - pdfconstant("quotesinglbase"), - pdfconstant("trademark"), - pdfconstant("fi"), - pdfconstant("fl"), - pdfconstant("Lslash"), - pdfconstant("OE"), - pdfconstant("Scaron"), - pdfconstant("Ydieresis"), - pdfconstant("Zcaron"), - pdfconstant("dotlessi"), - pdfconstant("lslash"), - pdfconstant("oe"), - pdfconstant("scaron"), - pdfconstant("zcaron"), - 160, - pdfconstant("Euro"), - 164, - pdfconstant("currency"), - 166, - pdfconstant("brokenbar"), - 168, - pdfconstant("dieresis"), - pdfconstant("copyright"), - pdfconstant("ordfeminine"), - 172, - pdfconstant("logicalnot"), - pdfconstant(".notdef"), - pdfconstant("registered"), - pdfconstant("macron"), - pdfconstant("degree"), - pdfconstant("plusminus"), - pdfconstant("twosuperior"), - pdfconstant("threesuperior"), - pdfconstant("acute"), - pdfconstant("mu"), - 183, - pdfconstant("periodcentered"), - pdfconstant("cedilla"), - pdfconstant("onesuperior"), - pdfconstant("ordmasculine"), - 188, - pdfconstant("onequarter"), - pdfconstant("onehalf"), - pdfconstant("threequarters"), - 192, - pdfconstant("Agrave"), - pdfconstant("Aacute"), - pdfconstant("Acircumflex"), - pdfconstant("Atilde"), - pdfconstant("Adieresis"), - pdfconstant("Aring"), - pdfconstant("AE"), - pdfconstant("Ccedilla"), - pdfconstant("Egrave"), - pdfconstant("Eacute"), - pdfconstant("Ecircumflex"), - pdfconstant("Edieresis"), - pdfconstant("Igrave"), - pdfconstant("Iacute"), - pdfconstant("Icircumflex"), - pdfconstant("Idieresis"), - pdfconstant("Eth"), - pdfconstant("Ntilde"), - pdfconstant("Ograve"), - pdfconstant("Oacute"), - pdfconstant("Ocircumflex"), - pdfconstant("Otilde"), - pdfconstant("Odieresis"), - pdfconstant("multiply"), - pdfconstant("Oslash"), - pdfconstant("Ugrave"), - pdfconstant("Uacute"), - pdfconstant("Ucircumflex"), - pdfconstant("Udieresis"), - pdfconstant("Yacute"), - pdfconstant("Thorn"), - pdfconstant("germandbls"), - pdfconstant("agrave"), - pdfconstant("aacute"), - pdfconstant("acircumflex"), - pdfconstant("atilde"), - pdfconstant("adieresis"), - pdfconstant("aring"), - pdfconstant("ae"), - pdfconstant("ccedilla"), - pdfconstant("egrave"), - pdfconstant("eacute"), - pdfconstant("ecircumflex"), - pdfconstant("edieresis"), - pdfconstant("igrave"), - pdfconstant("iacute"), - pdfconstant("icircumflex"), - pdfconstant("idieresis"), - pdfconstant("eth"), - pdfconstant("ntilde"), - pdfconstant("ograve"), - pdfconstant("oacute"), - pdfconstant("ocircumflex"), - pdfconstant("otilde"), - pdfconstant("odieresis"), - pdfconstant("divide"), - pdfconstant("oslash"), - pdfconstant("ugrave"), - pdfconstant("uacute"), - pdfconstant("ucircumflex"), - pdfconstant("udieresis"), - pdfconstant("yacute"), - pdfconstant("thorn"), - pdfconstant("ydieresis"), - }, -} +if not modules then modules = { } end modules ['lpdf-enc'] = { + version = 1.001, + comment = "companion to lpdf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- delayed loading + +local pdfconstant = lpdf.constant + +return lpdf.dictionary { + Type = pdfconstant("Encoding"), + Differences = lpdf.array { + 24, + pdfconstant("breve"), + pdfconstant("caron"), + pdfconstant("circumflex"), + pdfconstant("dotaccent"), + pdfconstant("hungarumlaut"), + pdfconstant("ogonek"), + pdfconstant("ring"), + pdfconstant("tilde"), + 39, + pdfconstant("quotesingle"), + 96, + pdfconstant("grave"), + 128, + pdfconstant("bullet"), + pdfconstant("dagger"), + pdfconstant("daggerdbl"), + pdfconstant("ellipsis"), + pdfconstant("emdash"), + pdfconstant("endash"), + pdfconstant("florin"), + pdfconstant("fraction"), + pdfconstant("guilsinglleft"), + pdfconstant("guilsinglright"), + pdfconstant("minus"), + pdfconstant("perthousand"), + pdfconstant("quotedblbase"), + pdfconstant("quotedblleft"), + pdfconstant("quotedblright"), + pdfconstant("quoteleft"), + pdfconstant("quoteright"), + pdfconstant("quotesinglbase"), + pdfconstant("trademark"), + pdfconstant("fi"), + pdfconstant("fl"), + pdfconstant("Lslash"), + pdfconstant("OE"), + pdfconstant("Scaron"), + pdfconstant("Ydieresis"), + pdfconstant("Zcaron"), + pdfconstant("dotlessi"), + pdfconstant("lslash"), + pdfconstant("oe"), + pdfconstant("scaron"), + pdfconstant("zcaron"), + 160, + pdfconstant("Euro"), + 164, + pdfconstant("currency"), + 166, + pdfconstant("brokenbar"), + 168, + pdfconstant("dieresis"), + pdfconstant("copyright"), + pdfconstant("ordfeminine"), + 172, + pdfconstant("logicalnot"), + pdfconstant(".notdef"), + pdfconstant("registered"), + pdfconstant("macron"), + pdfconstant("degree"), + pdfconstant("plusminus"), + pdfconstant("twosuperior"), + pdfconstant("threesuperior"), + pdfconstant("acute"), + pdfconstant("mu"), + 183, + pdfconstant("periodcentered"), + pdfconstant("cedilla"), + pdfconstant("onesuperior"), + pdfconstant("ordmasculine"), + 188, + pdfconstant("onequarter"), + pdfconstant("onehalf"), + pdfconstant("threequarters"), + 192, + pdfconstant("Agrave"), + pdfconstant("Aacute"), + pdfconstant("Acircumflex"), + pdfconstant("Atilde"), + pdfconstant("Adieresis"), + pdfconstant("Aring"), + pdfconstant("AE"), + pdfconstant("Ccedilla"), + pdfconstant("Egrave"), + pdfconstant("Eacute"), + pdfconstant("Ecircumflex"), + pdfconstant("Edieresis"), + pdfconstant("Igrave"), + pdfconstant("Iacute"), + pdfconstant("Icircumflex"), + pdfconstant("Idieresis"), + pdfconstant("Eth"), + pdfconstant("Ntilde"), + pdfconstant("Ograve"), + pdfconstant("Oacute"), + pdfconstant("Ocircumflex"), + pdfconstant("Otilde"), + pdfconstant("Odieresis"), + pdfconstant("multiply"), + pdfconstant("Oslash"), + pdfconstant("Ugrave"), + pdfconstant("Uacute"), + pdfconstant("Ucircumflex"), + pdfconstant("Udieresis"), + pdfconstant("Yacute"), + pdfconstant("Thorn"), + pdfconstant("germandbls"), + pdfconstant("agrave"), + pdfconstant("aacute"), + pdfconstant("acircumflex"), + pdfconstant("atilde"), + pdfconstant("adieresis"), + pdfconstant("aring"), + pdfconstant("ae"), + pdfconstant("ccedilla"), + pdfconstant("egrave"), + pdfconstant("eacute"), + pdfconstant("ecircumflex"), + pdfconstant("edieresis"), + pdfconstant("igrave"), + pdfconstant("iacute"), + pdfconstant("icircumflex"), + pdfconstant("idieresis"), + pdfconstant("eth"), + pdfconstant("ntilde"), + pdfconstant("ograve"), + pdfconstant("oacute"), + pdfconstant("ocircumflex"), + pdfconstant("otilde"), + pdfconstant("odieresis"), + pdfconstant("divide"), + pdfconstant("oslash"), + pdfconstant("ugrave"), + pdfconstant("uacute"), + pdfconstant("ucircumflex"), + pdfconstant("udieresis"), + pdfconstant("yacute"), + pdfconstant("thorn"), + pdfconstant("ydieresis"), + }, +} diff --git a/tex/context/base/lpdf-epa.lua b/tex/context/base/lpdf-epa.lua index 034e6d7e2..8d00c8c26 100644 --- a/tex/context/base/lpdf-epa.lua +++ b/tex/context/base/lpdf-epa.lua @@ -1,226 +1,226 @@ -if not modules then modules = { } end modules ['lpdf-epa'] = { - version = 1.001, - comment = "companion to lpdf-epa.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This is a rather experimental feature and the code will probably --- change. - -local type, tonumber = type, tonumber -local format, gsub = string.format, string.gsub -local formatters = string.formatters - ------ lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns - -local trace_links = false trackers.register("figures.links", function(v) trace_links = v end) - -local report_link = logs.reporter("backend","merging") - -local backends, lpdf = backends, lpdf - -local variables = interfaces.variables -local codeinjections = backends.pdf.codeinjections ------ urlescaper = lpegpatterns.urlescaper ------ utftohigh = lpegpatterns.utftohigh -local escapetex = characters.filters.utf.private.escape - -local layerspec = { -- predefining saves time - "epdflinks" -} - -local function makenamespace(filename) - return format("lpdf-epa-%s-",file.removesuffix(file.basename(filename))) -end - -local function add_link(x,y,w,h,destination,what) - x = x .. "bp" - y = y .. "bp" - w = w .. "bp" - h = h .. "bp" - if trace_links then - report_link("destination %a, type %a, dx %s, dy %s, wd %s, ht %s",destination,what,x,y,w,h) - end - local locationspec = { -- predefining saves time - x = x, - y = y, - preset = "leftbottom", - } - local buttonspec = { - width = w, - height = h, - offset = variables.overlay, - frame = trace_links and variables.on or variables.off, - } - context.setlayer ( - layerspec, - locationspec, - function() context.button ( buttonspec, "", { destination } ) end - -- context.nested.button(buttonspec, "", { destination }) -- time this - ) -end - -local function link_goto(x,y,w,h,document,annotation,pagedata,namespace) - local a = annotation.A - if a then - local destination = a.D -- [ 18 0 R /Fit ] - local what = "page" - if type(destination) == "string" then - local destinations = document.destinations - local wanted = destinations[destination] - destination = wanted and wanted.D - if destination then what = "named" end - end - local pagedata = destination and destination[1] - if pagedata then - local destinationpage = pagedata.number - if destinationpage then - add_link(x,y,w,h,namespace .. destinationpage,what) - end - end - end -end - -local function link_uri(x,y,w,h,document,annotation) - local url = annotation.A.URI - if url then - -- url = lpegmatch(urlescaper,url) - -- url = lpegmatch(utftohigh,url) - url = escapetex(url) - add_link(x,y,w,h,formatters["url(%s)"](url),"url") - end -end - -local function link_file(x,y,w,h,document,annotation) - local a = annotation.A - if a then - local filename = a.F - if filename then - filename = escapetex(filename) - local destination = a.D - if not destination then - add_link(x,y,w,h,formatters["file(%s)"](filename),"file") - elseif type(destination) == "string" then - add_link(x,y,w,h,formatters["%s::%s"](filename,destination),"file (named)") - else - destination = destination[1] -- array - if tonumber(destination) then - add_link(x,y,w,h,formatters["%s::page(%s)"](filename,destination),"file (page)") - else - add_link(x,y,w,h,formatters["file(%s)"](filename),"file") - end - end - end - end -end - -function codeinjections.mergereferences(specification) - if figures and not specification then - specification = figures and figures.current() - specification = specification and specification.status - end - if specification then - local fullname = specification.fullname - local document = lpdf.epdf.load(fullname) - if document then - local pagenumber = specification.page or 1 - local xscale = specification.yscale or 1 - local yscale = specification.yscale or 1 - local size = specification.size or "crop" -- todo - local pagedata = document.pages[pagenumber] - local annotations = pagedata and pagedata.Annots - if annotations and annotations.n > 0 then - local namespace = format("lpdf-epa-%s-",file.removesuffix(file.basename(fullname))) - local reference = namespace .. pagenumber - local mediabox = pagedata.MediaBox - local llx, lly, urx, ury = mediabox[1], mediabox[2], mediabox[3], mediabox[4] - local width, height = xscale * (urx - llx), yscale * (ury - lly) -- \\overlaywidth, \\overlayheight - context.definelayer( { "epdflinks" }, { height = height.."bp" , width = width.."bp" }) - for i=1,annotations.n do - local annotation = annotations[i] - if annotation then - local subtype = annotation.Subtype - local rectangle = annotation.Rect - local a_llx, a_lly, a_urx, a_ury = rectangle[1], rectangle[2], rectangle[3], rectangle[4] - local x, y = xscale * (a_llx - llx), yscale * (a_lly - lly) - local w, h = xscale * (a_urx - a_llx), yscale * (a_ury - a_lly) - if subtype == "Link" then - local a = annotation.A - if a then - local linktype = a.S - if linktype == "GoTo" then - link_goto(x,y,w,h,document,annotation,pagedata,namespace) - elseif linktype == "GoToR" then - link_file(x,y,w,h,document,annotation) - elseif linktype == "URI" then - link_uri(x,y,w,h,document,annotation) - elseif trace_links then - report_link("unsupported link annotation %a",linktype) - end - else - report_link("mising link annotation") - end - elseif trace_links then - report_link("unsupported annotation %a",subtype) - end - elseif trace_links then - report_link("broken annotation, index %a",i) - end - end - context.flushlayer { "epdflinks" } - -- context("\\gdef\\figurereference{%s}",reference) -- global - context.setgvalue("figurereference",reference) -- global - if trace_links then - report_link("setting figure reference to %a",reference) - end - specification.reference = reference - return namespace - end - end - end - return ""-- no namespace, empty, not nil -end - -function codeinjections.mergeviewerlayers(specification) - -- todo: parse included page for layers - if true then - return - end - if not specification then - specification = figures and figures.current() - specification = specification and specification.status - end - if specification then - local fullname = specification.fullname - local document = lpdf.epdf.load(fullname) - if document then - local namespace = format("lpdf:epa:%s:",file.removesuffix(file.basename(fullname))) - local layers = document.layers - if layers then - for i=1,layers.n do - local layer = layers[i] - if layer then - local tag = namespace .. gsub(layer," ",":") - local title = tag - if trace_links then - report_link("using layer %a",tag) - end - attributes.viewerlayers.define { -- also does some cleaning - tag = tag, -- todo: #3A or so - title = title, - visible = variables.start, - editable = variables.yes, - printable = variables.yes, - } - codeinjections.useviewerlayer(tag) - elseif trace_links then - report_link("broken layer, index %a",i) - end - end - end - end - end -end - +if not modules then modules = { } end modules ['lpdf-epa'] = { + version = 1.001, + comment = "companion to lpdf-epa.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This is a rather experimental feature and the code will probably +-- change. + +local type, tonumber = type, tonumber +local format, gsub = string.format, string.gsub +local formatters = string.formatters + +----- lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns + +local trace_links = false trackers.register("figures.links", function(v) trace_links = v end) + +local report_link = logs.reporter("backend","merging") + +local backends, lpdf = backends, lpdf + +local variables = interfaces.variables +local codeinjections = backends.pdf.codeinjections +----- urlescaper = lpegpatterns.urlescaper +----- utftohigh = lpegpatterns.utftohigh +local escapetex = characters.filters.utf.private.escape + +local layerspec = { -- predefining saves time + "epdflinks" +} + +local function makenamespace(filename) + return format("lpdf-epa-%s-",file.removesuffix(file.basename(filename))) +end + +local function add_link(x,y,w,h,destination,what) + x = x .. "bp" + y = y .. "bp" + w = w .. "bp" + h = h .. "bp" + if trace_links then + report_link("destination %a, type %a, dx %s, dy %s, wd %s, ht %s",destination,what,x,y,w,h) + end + local locationspec = { -- predefining saves time + x = x, + y = y, + preset = "leftbottom", + } + local buttonspec = { + width = w, + height = h, + offset = variables.overlay, + frame = trace_links and variables.on or variables.off, + } + context.setlayer ( + layerspec, + locationspec, + function() context.button ( buttonspec, "", { destination } ) end + -- context.nested.button(buttonspec, "", { destination }) -- time this + ) +end + +local function link_goto(x,y,w,h,document,annotation,pagedata,namespace) + local a = annotation.A + if a then + local destination = a.D -- [ 18 0 R /Fit ] + local what = "page" + if type(destination) == "string" then + local destinations = document.destinations + local wanted = destinations[destination] + destination = wanted and wanted.D + if destination then what = "named" end + end + local pagedata = destination and destination[1] + if pagedata then + local destinationpage = pagedata.number + if destinationpage then + add_link(x,y,w,h,namespace .. destinationpage,what) + end + end + end +end + +local function link_uri(x,y,w,h,document,annotation) + local url = annotation.A.URI + if url then + -- url = lpegmatch(urlescaper,url) + -- url = lpegmatch(utftohigh,url) + url = escapetex(url) + add_link(x,y,w,h,formatters["url(%s)"](url),"url") + end +end + +local function link_file(x,y,w,h,document,annotation) + local a = annotation.A + if a then + local filename = a.F + if filename then + filename = escapetex(filename) + local destination = a.D + if not destination then + add_link(x,y,w,h,formatters["file(%s)"](filename),"file") + elseif type(destination) == "string" then + add_link(x,y,w,h,formatters["%s::%s"](filename,destination),"file (named)") + else + destination = destination[1] -- array + if tonumber(destination) then + add_link(x,y,w,h,formatters["%s::page(%s)"](filename,destination),"file (page)") + else + add_link(x,y,w,h,formatters["file(%s)"](filename),"file") + end + end + end + end +end + +function codeinjections.mergereferences(specification) + if figures and not specification then + specification = figures and figures.current() + specification = specification and specification.status + end + if specification then + local fullname = specification.fullname + local document = lpdf.epdf.load(fullname) + if document then + local pagenumber = specification.page or 1 + local xscale = specification.yscale or 1 + local yscale = specification.yscale or 1 + local size = specification.size or "crop" -- todo + local pagedata = document.pages[pagenumber] + local annotations = pagedata and pagedata.Annots + if annotations and annotations.n > 0 then + local namespace = format("lpdf-epa-%s-",file.removesuffix(file.basename(fullname))) + local reference = namespace .. pagenumber + local mediabox = pagedata.MediaBox + local llx, lly, urx, ury = mediabox[1], mediabox[2], mediabox[3], mediabox[4] + local width, height = xscale * (urx - llx), yscale * (ury - lly) -- \\overlaywidth, \\overlayheight + context.definelayer( { "epdflinks" }, { height = height.."bp" , width = width.."bp" }) + for i=1,annotations.n do + local annotation = annotations[i] + if annotation then + local subtype = annotation.Subtype + local rectangle = annotation.Rect + local a_llx, a_lly, a_urx, a_ury = rectangle[1], rectangle[2], rectangle[3], rectangle[4] + local x, y = xscale * (a_llx - llx), yscale * (a_lly - lly) + local w, h = xscale * (a_urx - a_llx), yscale * (a_ury - a_lly) + if subtype == "Link" then + local a = annotation.A + if a then + local linktype = a.S + if linktype == "GoTo" then + link_goto(x,y,w,h,document,annotation,pagedata,namespace) + elseif linktype == "GoToR" then + link_file(x,y,w,h,document,annotation) + elseif linktype == "URI" then + link_uri(x,y,w,h,document,annotation) + elseif trace_links then + report_link("unsupported link annotation %a",linktype) + end + else + report_link("mising link annotation") + end + elseif trace_links then + report_link("unsupported annotation %a",subtype) + end + elseif trace_links then + report_link("broken annotation, index %a",i) + end + end + context.flushlayer { "epdflinks" } + -- context("\\gdef\\figurereference{%s}",reference) -- global + context.setgvalue("figurereference",reference) -- global + if trace_links then + report_link("setting figure reference to %a",reference) + end + specification.reference = reference + return namespace + end + end + end + return ""-- no namespace, empty, not nil +end + +function codeinjections.mergeviewerlayers(specification) + -- todo: parse included page for layers + if true then + return + end + if not specification then + specification = figures and figures.current() + specification = specification and specification.status + end + if specification then + local fullname = specification.fullname + local document = lpdf.epdf.load(fullname) + if document then + local namespace = format("lpdf:epa:%s:",file.removesuffix(file.basename(fullname))) + local layers = document.layers + if layers then + for i=1,layers.n do + local layer = layers[i] + if layer then + local tag = namespace .. gsub(layer," ",":") + local title = tag + if trace_links then + report_link("using layer %a",tag) + end + attributes.viewerlayers.define { -- also does some cleaning + tag = tag, -- todo: #3A or so + title = title, + visible = variables.start, + editable = variables.yes, + printable = variables.yes, + } + codeinjections.useviewerlayer(tag) + elseif trace_links then + report_link("broken layer, index %a",i) + end + end + end + end + end +end + diff --git a/tex/context/base/lpdf-epd.lua b/tex/context/base/lpdf-epd.lua index b9f8cfc7c..4bf98edcc 100644 --- a/tex/context/base/lpdf-epd.lua +++ b/tex/context/base/lpdf-epd.lua @@ -1,351 +1,351 @@ -if not modules then modules = { } end modules ['lpdf-epd'] = { - version = 1.001, - comment = "companion to lpdf-epa.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This is an experimental layer around the epdf library. The reason for --- this layer is that I want to be independent of the library (which --- implements a selection of what a file provides) and also because I --- want an interface closer to Lua's table model while the API stays --- close to the original xpdf library. Of course, after prototyping a --- solution, we can optimize it using the low level epdf accessors. - --- It will be handy when we have a __length and __next that can trigger --- the resolve till then we will provide .n as #. - --- As there can be references to the parent we cannot expand a tree. I --- played with some expansion variants but it does to pay off. - --- Maybe we need a close(). --- We cannot access all destinations in one run. - -local setmetatable, rawset, rawget, tostring, tonumber = setmetatable, rawset, rawget, tostring, tonumber -local lower, match, char, find, sub = string.lower, string.match, string.char, string.find, string.sub -local concat = table.concat -local toutf = string.toutf - -local report_epdf = logs.reporter("epdf") - --- a bit of protection - -local limited = false - -directives.register("system.inputmode", function(v) - if not limited then - local i_limiter = io.i_limiter(v) - if i_limiter then - epdf.open = i_limiter.protect(epdf.open) - limited = true - end - end -end) - --- - -function epdf.type(o) - local t = lower(match(tostring(o),"[^ :]+")) - return t or "?" -end - -lpdf = lpdf or { } -local lpdf = lpdf - -lpdf.epdf = { } - -local checked_access - -local function prepare(document,d,t,n,k) - for i=1,n do - local v = d:getVal(i) - local r = d:getValNF(i) - if r:getTypeName() == "ref" then - r = r:getRef().num - local c = document.cache[r] - if c then - -- - else - c = checked_access[v:getTypeName()](v,document,r) - if c then - document.cache[r] = c - document.xrefs[c] = r - end - end - t[d:getKey(i)] = c - else - t[d:getKey(i)] = checked_access[v:getTypeName()](v,document) - end - end - getmetatable(t).__index = nil - return t[k] -end - -local function some_dictionary(d,document,r) - local n = d and d:getLength() or 0 - if n > 0 then - local t = { } - setmetatable(t, { __index = function(t,k) return prepare(document,d,t,n,k) end } ) - return t - end -end - -local done = { } - -local function prepare(document,a,t,n,k) - for i=1,n do - local v = a:get(i) - local r = a:getNF(i) - if v:getTypeName() == "null" then - -- TH: weird, but appears possible - elseif r:getTypeName() == "ref" then - r = r:getRef().num - local c = document.cache[r] - if c then - -- - else - c = checked_access[v:getTypeName()](v,document,r) - document.cache[r] = c - document.xrefs[c] = r - end - t[i] = c - else - t[i] = checked_access[v:getTypeName()](v,document) - end - end - getmetatable(t).__index = nil - return t[k] -end - -local function some_array(a,document,r) - local n = a and a:getLength() or 0 - if n > 0 then - local t = { n = n } - setmetatable(t, { __index = function(t,k) return prepare(document,a,t,n,k) end } ) - return t - end -end - -local function streamaccess(s,_,what) - if not what or what == "all" or what == "*all" then - local t, n = { }, 0 - s:streamReset() - while true do - local c = s:streamGetChar() - if c < 0 then - break - else - n = n + 1 - t[n] = char(c) - end - end - return concat(t) - end -end - -local function some_stream(d,document,r) - if d then - d:streamReset() - local s = some_dictionary(d:streamGetDict(),document,r) - getmetatable(s).__call = function(...) return streamaccess(d,...) end - return s - end -end - --- we need epdf.getBool - -checked_access = { - dictionary = function(d,document,r) - return some_dictionary(d:getDict(),document,r) - end, - array = function(a,document,r) - return some_array(a:getArray(),document,r) - end, - stream = function(v,document,r) - return some_stream(v,document,r) - end, - real = function(v) - return v:getReal() - end, - integer = function(v) - return v:getNum() - end, - string = function(v) - return toutf(v:getString()) - end, - boolean = function(v) - return v:getBool() - end, - name = function(v) - return v:getName() - end, - ref = function(v) - return v:getRef() - end, - null = function() - return nil - end, -} - --- checked_access.real = epdf.real --- checked_access.integer = epdf.integer --- checked_access.string = epdf.string --- checked_access.boolean = epdf.boolean --- checked_access.name = epdf.name --- checked_access.ref = epdf.ref - -local function getnames(document,n,target) -- direct - if n then - local Names = n.Names - if Names then - if not target then - target = { } - end - for i=1,Names.n,2 do - target[Names[i]] = Names[i+1] - end - else - local Kids = n.Kids - if Kids then - for i=1,Kids.n do - target = getnames(document,Kids[i],target) - end - end - end - return target - end -end - -local function getkids(document,n,target) -- direct - if n then - local Kids = n.Kids - if Kids then - for i=1,Kids.n do - target = getkids(document,Kids[i],target) - end - elseif target then - target[#target+1] = n - else - target = { n } - end - return target - end -end - --- /OCProperties << --- /OCGs [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ] --- /D << --- /Order [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ] --- /ON [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ] --- /OFF [ ] --- >> --- >> - -local function getlayers(document) - local properties = document.Catalog.OCProperties - if properties then - local layers = properties.OCGs - if layers then - local t = { } - local n = layers.n - for i=1,n do - local layer = layers[i] ---~ print(document.xrefs[layer]) - t[i] = layer.Name - end - t.n = n - return t - end - end -end - -local function getpages(document) - local data = document.data - local xrefs = document.xrefs - local cache = document.cache - local cata = data:getCatalog() - local xref = data:getXRef() - local pages = { } - local nofpages = cata:getNumPages() - for pagenumber=1,nofpages do - local pagereference = cata:getPageRef(pagenumber).num - local pagedata = some_dictionary(xref:fetch(pagereference,0):getDict(),document,pagereference) - if pagedata then - pagedata.number = pagenumber - pages[pagenumber] = pagedata - xrefs[pagedata] = pagereference - cache[pagereference] = pagedata - else - report_epdf("missing pagedata at slot %i",i) - end - end - pages.n = nofpages - return pages -end - --- loader - -local function delayed(document,tag,f) - local t = { } - setmetatable(t, { __index = function(t,k) - local result = f() - if result then - document[tag] = result - return result[k] - end - end } ) - return t -end - -local loaded = { } - -function lpdf.epdf.load(filename) - local document = loaded[filename] - if not document then - statistics.starttiming(lpdf.epdf) - local data = epdf.open(filename) -- maybe resolvers.find_file - if data then - document = { - filename = filename, - cache = { }, - xrefs = { }, - data = data, - } - local Catalog = some_dictionary(data:getXRef():getCatalog():getDict(),document) - local Info = some_dictionary(data:getXRef():getDocInfo():getDict(),document) - document.Catalog = Catalog - document.Info = Info - -- document.catalog = Catalog - -- a few handy helper tables - document.pages = delayed(document,"pages", function() return getpages(document) end) - document.destinations = delayed(document,"destinations", function() return getnames(document,Catalog.Names and Catalog.Names.Dests) end) - document.javascripts = delayed(document,"javascripts", function() return getnames(document,Catalog.Names and Catalog.Names.JS) end) - document.widgets = delayed(document,"widgets", function() return getnames(document,Catalog.Names and Catalog.Names.AcroForm) end) - document.embeddedfiles = delayed(document,"embeddedfiles",function() return getnames(document,Catalog.Names and Catalog.Names.EmbeddedFiles) end) - document.layers = delayed(document,"layers", function() return getlayers(document) end) - else - document = false - end - loaded[filename] = document - statistics.stoptiming(lpdf.epdf) - -- print(statistics.elapsedtime(lpdf.epdf)) - end - return document -end - --- for k, v in next, expand(t) do - -function lpdf.epdf.expand(t) - if type(t) == "table" then - local dummy = t.dummy - end - return t -end - --- helpers - --- function lpdf.epdf.getdestinationpage(document,name) --- local destination = document.data:findDest(name) --- return destination and destination.number --- end +if not modules then modules = { } end modules ['lpdf-epd'] = { + version = 1.001, + comment = "companion to lpdf-epa.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This is an experimental layer around the epdf library. The reason for +-- this layer is that I want to be independent of the library (which +-- implements a selection of what a file provides) and also because I +-- want an interface closer to Lua's table model while the API stays +-- close to the original xpdf library. Of course, after prototyping a +-- solution, we can optimize it using the low level epdf accessors. + +-- It will be handy when we have a __length and __next that can trigger +-- the resolve till then we will provide .n as #. + +-- As there can be references to the parent we cannot expand a tree. I +-- played with some expansion variants but it does to pay off. + +-- Maybe we need a close(). +-- We cannot access all destinations in one run. + +local setmetatable, rawset, rawget, tostring, tonumber = setmetatable, rawset, rawget, tostring, tonumber +local lower, match, char, find, sub = string.lower, string.match, string.char, string.find, string.sub +local concat = table.concat +local toutf = string.toutf + +local report_epdf = logs.reporter("epdf") + +-- a bit of protection + +local limited = false + +directives.register("system.inputmode", function(v) + if not limited then + local i_limiter = io.i_limiter(v) + if i_limiter then + epdf.open = i_limiter.protect(epdf.open) + limited = true + end + end +end) + +-- + +function epdf.type(o) + local t = lower(match(tostring(o),"[^ :]+")) + return t or "?" +end + +lpdf = lpdf or { } +local lpdf = lpdf + +lpdf.epdf = { } + +local checked_access + +local function prepare(document,d,t,n,k) + for i=1,n do + local v = d:getVal(i) + local r = d:getValNF(i) + if r:getTypeName() == "ref" then + r = r:getRef().num + local c = document.cache[r] + if c then + -- + else + c = checked_access[v:getTypeName()](v,document,r) + if c then + document.cache[r] = c + document.xrefs[c] = r + end + end + t[d:getKey(i)] = c + else + t[d:getKey(i)] = checked_access[v:getTypeName()](v,document) + end + end + getmetatable(t).__index = nil + return t[k] +end + +local function some_dictionary(d,document,r) + local n = d and d:getLength() or 0 + if n > 0 then + local t = { } + setmetatable(t, { __index = function(t,k) return prepare(document,d,t,n,k) end } ) + return t + end +end + +local done = { } + +local function prepare(document,a,t,n,k) + for i=1,n do + local v = a:get(i) + local r = a:getNF(i) + if v:getTypeName() == "null" then + -- TH: weird, but appears possible + elseif r:getTypeName() == "ref" then + r = r:getRef().num + local c = document.cache[r] + if c then + -- + else + c = checked_access[v:getTypeName()](v,document,r) + document.cache[r] = c + document.xrefs[c] = r + end + t[i] = c + else + t[i] = checked_access[v:getTypeName()](v,document) + end + end + getmetatable(t).__index = nil + return t[k] +end + +local function some_array(a,document,r) + local n = a and a:getLength() or 0 + if n > 0 then + local t = { n = n } + setmetatable(t, { __index = function(t,k) return prepare(document,a,t,n,k) end } ) + return t + end +end + +local function streamaccess(s,_,what) + if not what or what == "all" or what == "*all" then + local t, n = { }, 0 + s:streamReset() + while true do + local c = s:streamGetChar() + if c < 0 then + break + else + n = n + 1 + t[n] = char(c) + end + end + return concat(t) + end +end + +local function some_stream(d,document,r) + if d then + d:streamReset() + local s = some_dictionary(d:streamGetDict(),document,r) + getmetatable(s).__call = function(...) return streamaccess(d,...) end + return s + end +end + +-- we need epdf.getBool + +checked_access = { + dictionary = function(d,document,r) + return some_dictionary(d:getDict(),document,r) + end, + array = function(a,document,r) + return some_array(a:getArray(),document,r) + end, + stream = function(v,document,r) + return some_stream(v,document,r) + end, + real = function(v) + return v:getReal() + end, + integer = function(v) + return v:getNum() + end, + string = function(v) + return toutf(v:getString()) + end, + boolean = function(v) + return v:getBool() + end, + name = function(v) + return v:getName() + end, + ref = function(v) + return v:getRef() + end, + null = function() + return nil + end, +} + +-- checked_access.real = epdf.real +-- checked_access.integer = epdf.integer +-- checked_access.string = epdf.string +-- checked_access.boolean = epdf.boolean +-- checked_access.name = epdf.name +-- checked_access.ref = epdf.ref + +local function getnames(document,n,target) -- direct + if n then + local Names = n.Names + if Names then + if not target then + target = { } + end + for i=1,Names.n,2 do + target[Names[i]] = Names[i+1] + end + else + local Kids = n.Kids + if Kids then + for i=1,Kids.n do + target = getnames(document,Kids[i],target) + end + end + end + return target + end +end + +local function getkids(document,n,target) -- direct + if n then + local Kids = n.Kids + if Kids then + for i=1,Kids.n do + target = getkids(document,Kids[i],target) + end + elseif target then + target[#target+1] = n + else + target = { n } + end + return target + end +end + +-- /OCProperties << +-- /OCGs [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ] +-- /D << +-- /Order [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ] +-- /ON [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ] +-- /OFF [ ] +-- >> +-- >> + +local function getlayers(document) + local properties = document.Catalog.OCProperties + if properties then + local layers = properties.OCGs + if layers then + local t = { } + local n = layers.n + for i=1,n do + local layer = layers[i] +--~ print(document.xrefs[layer]) + t[i] = layer.Name + end + t.n = n + return t + end + end +end + +local function getpages(document) + local data = document.data + local xrefs = document.xrefs + local cache = document.cache + local cata = data:getCatalog() + local xref = data:getXRef() + local pages = { } + local nofpages = cata:getNumPages() + for pagenumber=1,nofpages do + local pagereference = cata:getPageRef(pagenumber).num + local pagedata = some_dictionary(xref:fetch(pagereference,0):getDict(),document,pagereference) + if pagedata then + pagedata.number = pagenumber + pages[pagenumber] = pagedata + xrefs[pagedata] = pagereference + cache[pagereference] = pagedata + else + report_epdf("missing pagedata at slot %i",i) + end + end + pages.n = nofpages + return pages +end + +-- loader + +local function delayed(document,tag,f) + local t = { } + setmetatable(t, { __index = function(t,k) + local result = f() + if result then + document[tag] = result + return result[k] + end + end } ) + return t +end + +local loaded = { } + +function lpdf.epdf.load(filename) + local document = loaded[filename] + if not document then + statistics.starttiming(lpdf.epdf) + local data = epdf.open(filename) -- maybe resolvers.find_file + if data then + document = { + filename = filename, + cache = { }, + xrefs = { }, + data = data, + } + local Catalog = some_dictionary(data:getXRef():getCatalog():getDict(),document) + local Info = some_dictionary(data:getXRef():getDocInfo():getDict(),document) + document.Catalog = Catalog + document.Info = Info + -- document.catalog = Catalog + -- a few handy helper tables + document.pages = delayed(document,"pages", function() return getpages(document) end) + document.destinations = delayed(document,"destinations", function() return getnames(document,Catalog.Names and Catalog.Names.Dests) end) + document.javascripts = delayed(document,"javascripts", function() return getnames(document,Catalog.Names and Catalog.Names.JS) end) + document.widgets = delayed(document,"widgets", function() return getnames(document,Catalog.Names and Catalog.Names.AcroForm) end) + document.embeddedfiles = delayed(document,"embeddedfiles",function() return getnames(document,Catalog.Names and Catalog.Names.EmbeddedFiles) end) + document.layers = delayed(document,"layers", function() return getlayers(document) end) + else + document = false + end + loaded[filename] = document + statistics.stoptiming(lpdf.epdf) + -- print(statistics.elapsedtime(lpdf.epdf)) + end + return document +end + +-- for k, v in next, expand(t) do + +function lpdf.epdf.expand(t) + if type(t) == "table" then + local dummy = t.dummy + end + return t +end + +-- helpers + +-- function lpdf.epdf.getdestinationpage(document,name) +-- local destination = document.data:findDest(name) +-- return destination and destination.number +-- end diff --git a/tex/context/base/lpdf-fld.lua b/tex/context/base/lpdf-fld.lua index a9b9fd72d..0a15bb850 100644 --- a/tex/context/base/lpdf-fld.lua +++ b/tex/context/base/lpdf-fld.lua @@ -1,1305 +1,1305 @@ -if not modules then modules = { } end modules ['lpdf-fld'] = { - version = 1.001, - comment = "companion to lpdf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- The problem with widgets is that so far each version of acrobat --- has some rendering problem. I tried to keep up with this but --- it makes no sense to do so as one cannot rely on the viewer --- not changing. Especially Btn fields are tricky as their appearences --- need to be synchronized in the case of children but e.g. acrobat --- 10 does not retain the state and forces a check symbol. If you --- make a file in acrobat then it has MK entries that seem to overload --- the already present appearance streams (they're probably only meant for --- printing) as it looks like the viewer has some fallback on (auto --- generated) MK behaviour built in. So ... hard to test. Unfortunately --- not even the default appearance is generated. This will probably be --- solved at some point. --- --- Also, for some reason the viewer does not always show custom appearances --- when fields are being rolled over or clicked upon, and circles or checks --- pop up when you don't expect them. I fear that this kind of instability --- eventually will kill pdf forms. After all, the manual says: "individual --- annotation handlers may ignore this entry and provide their own appearances" --- and one might wonder what 'individual' means here, but effectively this --- renders the whole concept of appearances useless. --- --- Okay, here is one observation. A pdf file contains objects and one might --- consider each one to be a static entity when read in. However, acrobat --- starts rendering and seems to manipulate (appearance streams) of objects --- in place (this is visible when the file is saved again). And, combined --- with some other caching and hashing, this might give side effects for --- shared objects. So, it seems that for some cases one can best be not too --- clever and not share but duplicate information. Of course this defeats the --- whole purpose of these objects. Of course I can be wrong. --- --- A rarther weird side effect of the viewer is that the highlighting of fields --- obscures values, unless you uses one of the BS variants, and this makes --- custum appearances rather useless as there is no way to control this apart --- from changing the viewer preferences. It could of course be a bug but it would --- be nice if the highlighting was at least transparent. I have no clue why the --- built in shapes work ok (some xform based appearances are generated) while --- equally valid other xforms fail. It looks like acrobat appearances come on --- top (being refered to in the MK) while custom ones are behind the highlight --- rectangle. One can disable the "Show border hover color for fields" option --- in the preferences. If you load java-imp-rhh this side effect gets disabled --- and you get what you expect (it took me a while to figure out this hack). --- --- When highlighting is enabled, those default symbols flash up, so it looks --- like we have some inteference between this setting and custom appearances. --- --- Anyhow, the NeedAppearances is really needed in order to get a rendering --- for printing especially when highlighting (those colorfull foregrounds) is --- on. - -local gmatch, lower, format = string.gmatch, string.lower, string.format -local lpegmatch = lpeg.match -local utfchar = utf.char -local bpfactor, todimen = number.dimenfactors.bp, string.todimen - -local trace_fields = false trackers.register("backends.fields", function(v) trace_fields = v end) - -local report_fields = logs.reporter("backend","fields") - -local backends, lpdf = backends, lpdf - -local variables = interfaces.variables -local context = context - -local references = structures.references -local settings_to_array = utilities.parsers.settings_to_array - -local pdfbackend = backends.pdf - -local nodeinjections = pdfbackend.nodeinjections -local codeinjections = pdfbackend.codeinjections -local registrations = pdfbackend.registrations - -local registeredsymbol = codeinjections.registeredsymbol - -local pdfstream = lpdf.stream -local pdfdictionary = lpdf.dictionary -local pdfarray = lpdf.array -local pdfreference = lpdf.reference -local pdfunicode = lpdf.unicode -local pdfstring = lpdf.string -local pdfconstant = lpdf.constant -local pdftoeight = lpdf.toeight -local pdfflushobject = lpdf.flushobject -local pdfshareobjectreference = lpdf.shareobjectreference -local pdfshareobject = lpdf.shareobject -local pdfreserveobject = lpdf.reserveobject -local pdfreserveannotation = lpdf.reserveannotation -local pdfaction = lpdf.action - -local hpack_node = node.hpack - -local nodepool = nodes.pool - -local pdfannotation_node = nodepool.pdfannotation - -local submitoutputformat = 0 -- 0=unknown 1=HTML 2=FDF 3=XML => not yet used, needs to be checked - -local pdf_widget = pdfconstant("Widget") -local pdf_tx = pdfconstant("Tx") -local pdf_ch = pdfconstant("Ch") -local pdf_btn = pdfconstant("Btn") ------ pdf_yes = pdfconstant("Yes") -local pdf_off = pdfconstant("Off") -local pdf_p = pdfconstant("P") -- None Invert Outline Push -local pdf_n = pdfconstant("N") -- None Invert Outline Push --- -local pdf_no_rect = pdfarray { 0, 0, 0, 0 } - -local splitter = lpeg.splitat("=>") - -local formats = { - html = 1, fdf = 2, xml = 3, -} - -function codeinjections.setformsmethod(name) - submitoutputformat = formats[lower(name)] or formats.xml -end - -local flag = { -- /Ff - ReadOnly = 1, -- 1 - Required = 2, -- 2 - NoExport = 4, -- 3 - MultiLine = 4096, -- 13 - Password = 8192, -- 14 - NoToggleToOff = 16384, -- 15 - Radio = 32768, -- 16 - PushButton = 65536, -- 17 - PopUp = 131072, -- 18 - Edit = 262144, -- 19 - Sort = 524288, -- 20 - FileSelect = 1048576, -- 21 - DoNotSpellCheck = 4194304, -- 23 - DoNotScroll = 8388608, -- 24 - Comb = 16777216, -- 25 - RichText = 33554432, -- 26 - RadiosInUnison = 33554432, -- 26 - CommitOnSelChange = 67108864, -- 27 -} - -local plus = { -- /F - Invisible = 1, -- 1 - Hidden = 2, -- 2 - Printable = 4, -- 3 - Print = 4, -- 3 - NoZoom = 8, -- 4 - NoRotate = 16, -- 5 - NoView = 32, -- 6 - ReadOnly = 64, -- 7 - Locked = 128, -- 8 - ToggleNoView = 256, -- 9 - LockedContents = 512, -- 10, - AutoView = 256, -- 288 (6+9) -} - --- todo: check what is interfaced - -flag.readonly = flag.ReadOnly -flag.required = flag.Required -flag.protected = flag.Password -flag.sorted = flag.Sort -flag.unavailable = flag.NoExport -flag.nocheck = flag.DoNotSpellCheck -flag.fixed = flag.DoNotScroll -flag.file = flag.FileSelect - -plus.hidden = plus.Hidden -plus.printable = plus.Printable -plus.auto = plus.AutoView - --- some day .. lpeg with function or table - -local function fieldflag(specification) -- /Ff - local o, n = specification.option, 0 - if o and o ~= "" then - for f in gmatch(o,"[^, ]+") do - n = n + (flag[f] or 0) - end - end - return n -end - -local function fieldplus(specification) -- /F - local o, n = specification.option, 0 - if o and o ~= "" then - for p in gmatch(o,"[^, ]+") do - n = n + (plus[p] or 0) - end - end --- n = n + 4 - return n -end - -local function checked(what) - local set, bug = references.identify("",what) - if not bug and #set > 0 then - local r, n = pdfaction(set) - return pdfshareobjectreference(r) - end -end - -local function fieldactions(specification) -- share actions - local d, a = { }, nil - a = specification.mousedown - or specification.clickin if a and a ~= "" then d.D = checked(a) end - a = specification.mouseup - or specification.clickout if a and a ~= "" then d.U = checked(a) end - a = specification.regionin if a and a ~= "" then d.E = checked(a) end -- Enter - a = specification.regionout if a and a ~= "" then d.X = checked(a) end -- eXit - a = specification.afterkey if a and a ~= "" then d.K = checked(a) end - a = specification.format if a and a ~= "" then d.F = checked(a) end - a = specification.validate if a and a ~= "" then d.V = checked(a) end - a = specification.calculate if a and a ~= "" then d.C = checked(a) end - a = specification.focusin if a and a ~= "" then d.Fo = checked(a) end - a = specification.focusout if a and a ~= "" then d.Bl = checked(a) end - a = specification.openpage if a and a ~= "" then d.PO = checked(a) end - a = specification.closepage if a and a ~= "" then d.PC = checked(a) end - -- a = specification.visiblepage if a and a ~= "" then d.PV = checked(a) end - -- a = specification.invisiblepage if a and a ~= "" then d.PI = checked(a) end - return next(d) and pdfdictionary(d) -end - --- fonts and color - -local pdfdocencodingvector, pdfdocencodingcapsule - --- The pdf doc encoding vector is needed in order to --- trigger propper unicode. Interesting is that when --- a glyph is not in the vector, it is still visible --- as it is taken from some other font. Messy. - --- To be checked: only when text/line fields. - -local function checkpdfdocencoding() - report_fields("adding pdfdoc encoding vector") - local encoding = dofile(resolvers.findfile("lpdf-enc.lua")) -- no checking, fatal if not present - pdfdocencodingvector = pdfreference(pdfflushobject(encoding)) - local capsule = pdfdictionary { - PDFDocEncoding = pdfdocencodingvector - } - pdfdocencodingcapsule = pdfreference(pdfflushobject(capsule)) - checkpdfdocencoding = function() end -end - -local fontnames = { - rm = { - tf = "Times-Roman", - bf = "Times-Bold", - it = "Times-Italic", - sl = "Times-Italic", - bi = "Times-BoldItalic", - bs = "Times-BoldItalic", - }, - ss = { - tf = "Helvetica", - bf = "Helvetica-Bold", - it = "Helvetica-Oblique", - sl = "Helvetica-Oblique", - bi = "Helvetica-BoldOblique", - bs = "Helvetica-BoldOblique", - }, - tt = { - tf = "Courier", - bf = "Courier-Bold", - it = "Courier-Oblique", - sl = "Courier-Oblique", - bi = "Courier-BoldOblique", - bs = "Courier-BoldOblique", - }, - symbol = { - dingbats = "ZapfDingbats", - } -} - -local usedfonts = { } - -local function fieldsurrounding(specification) - local fontsize = specification.fontsize or "12pt" - local fontstyle = specification.fontstyle or "rm" - local fontalternative = specification.fontalternative or "tf" - local colorvalue = specification.colorvalue - local s = fontnames[fontstyle] - if not s then - fontstyle, s = "rm", fontnames.rm - end - local a = s[fontalternative] - if not a then - alternative, a = "tf", s.tf - end - local tag = fontstyle .. fontalternative - fontsize = todimen(fontsize) - fontsize = fontsize and (bpfactor * fontsize) or 12 - fontraise = 0.1 * fontsize -- todo: figure out what the natural one is and compensate for strutdp - local fontcode = format("%0.4f Tf %0.4f Ts",fontsize,fontraise) - -- we could test for colorvalue being 1 (black) and omit it then - local colorcode = lpdf.color(3,colorvalue) -- we force an rgb color space - if trace_fields then - report_fields("using font, style %a, alternative %a, size %p, tag %a, code %a",fontstyle,fontalternative,fontsize,tag,fontcode) - report_fields("using color, value %a, code %a",colorvalue,colorcode) - end - local stream = pdfstream { - pdfconstant(tag), - format("%s %s",fontcode,colorcode) - } - usedfonts[tag] = a -- the name - -- move up with "x.y Ts" - return tostring(stream) -end - -local function registerfonts() - if next(usedfonts) then - checkpdfdocencoding() -- already done - local d = pdfdictionary() - local pdffonttype, pdffontsubtype = pdfconstant("Font"), pdfconstant("Type1") - for tag, name in next, usedfonts do - local f = pdfdictionary { - Type = pdffonttype, - Subtype = pdffontsubtype, - Name = pdfconstant(tag), - BaseFont = pdfconstant(name), - Encoding = pdfdocencodingvector, - } - d[tag] = pdfreference(pdfflushobject(f)) - end - return d - end -end - --- symbols - -local function fieldappearances(specification) - -- todo: caching - local values = specification.values - local default = specification.default -- todo - if not values then - -- error - return - end - local v = settings_to_array(values) - local n, r, d - if #v == 1 then - n, r, d = v[1], v[1], v[1] - elseif #v == 2 then - n, r, d = v[1], v[1], v[2] - else - n, r, d = v[1], v[2], v[3] - end - local appearance = pdfdictionary { - N = registeredsymbol(n), R = registeredsymbol(r), D = registeredsymbol(d), - } - return pdfshareobjectreference(appearance) --- return pdfreference(pdfflushobject(appearance)) -end - -local YesorOn = "Yes" -- somehow On is not always working out well any longer (why o why this change) - --- beware ... maybe we should have unique /Yes1 ... we will probably --- change this one too. --- --- TODO: the same as radio .. play safe and use different names. - -local function fieldstates_check(specification,forceyes,values,default,yesdefault) - -- we don't use Opt here (too messy for radio buttons) - local values, default = values or specification.values, default or specification.default - if not values or values == "" then - -- error - return - end - local v = settings_to_array(values) - local yes, off, yesn, yesr, yesd, offn, offr, offd - if #v == 1 then - yes, off = v[1], v[1] - else - yes, off = v[1], v[2] - end - local yesshown, yesvalue = lpegmatch(splitter,yes) - if not (yesshown and yesvalue) then - yesshown = yes, yes - end - yes = settings_to_array(yesshown) - local offshown, offvalue = lpegmatch(splitter,off) - if not (offshown and offvalue) then - offshown = off, off - end - off = settings_to_array(offshown) - if #yes == 1 then - yesn, yesr, yesd = yes[1], yes[1], yes[1] - elseif #yes == 2 then - yesn, yesr, yesd = yes[1], yes[1], yes[2] - else - yesn, yesr, yesd = yes[1], yes[2], yes[3] - end - if #off == 1 then - offn, offr, offd = off[1], off[1], off[1] - elseif #off == 2 then - offn, offr, offd = off[1], off[1], off[2] - else - offn, offr, offd = off[1], off[2], off[3] - end - if not yesvalue then - yesvalue = yesdefault or yesn - end - if not offvalue then - offvalue = offn - end - if forceyes == true then - forceyes = YesorOn -- spec likes Yes more but we've used On for ages now - else - -- false or string - end - if default == yesn then - default = pdfconstant(forceyes or yesn) - else - default = pdf_off - end - local appearance - if false then -- needs testing - appearance = pdfdictionary { -- maybe also cache components - N = pdfshareobjectreference(pdfdictionary { [forceyes or yesn] = registeredsymbol(yesn), Off = registeredsymbol(offn) }), - R = pdfshareobjectreference(pdfdictionary { [forceyes or yesr] = registeredsymbol(yesr), Off = registeredsymbol(offr) }), - D = pdfshareobjectreference(pdfdictionary { [forceyes or yesd] = registeredsymbol(yesd), Off = registeredsymbol(offd) }), - } - else - appearance = pdfdictionary { -- maybe also cache components - N = pdfdictionary { [forceyes or yesn] = registeredsymbol(yesn), Off = registeredsymbol(offn) }, - R = pdfdictionary { [forceyes or yesr] = registeredsymbol(yesr), Off = registeredsymbol(offr) }, - D = pdfdictionary { [forceyes or yesd] = registeredsymbol(yesd), Off = registeredsymbol(offd) } - } - end - local appearanceref = pdfshareobjectreference(appearance) - -- local appearanceref = pdfreference(pdfflushobject(appearance)) - return appearanceref, default, yesvalue -end - --- It looks like there is always a (MK related) symbol used and that --- the appearances are only used as ornaments behind a symbol. So, --- contrary to what we did when widgets showed up, we now limit --- ourself to more dumb definitions. Especially when highlighting is --- enabled weird interferences happen. So, we play safe (some nice code --- has been removed that worked well till recently). - -local function fieldstates_radio(specification,name,parent) - local values = values or specification.values - local default = default or parent.default -- specification.default - if not values or values == "" then - -- error - return - end - local v = settings_to_array(values) - local yes, off, yesn, yesr, yesd, offn, offr, offd - if #v == 1 then - yes, off = v[1], v[1] - else - yes, off = v[1], v[2] - end - -- yes keys might be the same in the three appearances within a field - -- but can best be different among fields ... don't ask why - local yessymbols, yesvalue = lpegmatch(splitter,yes) -- n,r,d=>x - if not (yessymbols and yesvalue) then - yessymbols = yes - end - if not yesvalue then - yesvalue = name - end - yessymbols = settings_to_array(yessymbols) - if #yessymbols == 1 then - yesn = yessymbols[1] - yesr = yesn - yesd = yesr - elseif #yessymbols == 2 then - yesn = yessymbols[1] - yesr = yessymbols[2] - yesd = yesr - else - yesn = yessymbols[1] - yesr = yessymbols[2] - yesd = yessymbols[3] - end - -- we don't care about names, as all will be /Off - local offsymbols = lpegmatch(splitter,off) or off - offsymbols = settings_to_array(offsymbols) - if #offsymbols == 1 then - offn = offsymbols[1] - offr = offn - offd = offr - elseif #offsymbols == 2 then - offn = offsymbols[1] - offr = offsymbols[2] - offd = offr - else - offn = offsymbols[1] - offr = offsymbols[2] - offd = offsymbols[3] - end - if default == name then - default = pdfconstant(name) - else - default = pdf_off - end - -- - local appearance - if false then -- needs testing - appearance = pdfdictionary { -- maybe also cache components - N = pdfshareobjectreference(pdfdictionary { [name] = registeredsymbol(yesn), Off = registeredsymbol(offn) }), - R = pdfshareobjectreference(pdfdictionary { [name] = registeredsymbol(yesr), Off = registeredsymbol(offr) }), - D = pdfshareobjectreference(pdfdictionary { [name] = registeredsymbol(yesd), Off = registeredsymbol(offd) }), - } - else - appearance = pdfdictionary { -- maybe also cache components - N = pdfdictionary { [name] = registeredsymbol(yesn), Off = registeredsymbol(offn) }, - R = pdfdictionary { [name] = registeredsymbol(yesr), Off = registeredsymbol(offr) }, - D = pdfdictionary { [name] = registeredsymbol(yesd), Off = registeredsymbol(offd) } - } - end - local appearanceref = pdfshareobjectreference(appearance) -- pdfreference(pdfflushobject(appearance)) - return appearanceref, default, yesvalue -end - -local function fielddefault(field) - local default = field.default - if not default or default == "" then - local values = settings_to_array(field.values) - default = values[1] - end - if not default or default == "" then - return pdf_off - else - return pdfconstant(default) - end -end - -local function fieldoptions(specification) - local values = specification.values - local default = specification.default - if values then - local v = settings_to_array(values) - for i=1,#v do - local vi = v[i] - local shown, value = lpegmatch(splitter,vi) - if shown and value then - v[i] = pdfarray { pdfunicode(value), shown } - else - v[i] = pdfunicode(v[i]) - end - end - return pdfarray(v) - end -end - -local mapping = { - -- acrobat compliant (messy, probably some pdfdoc encoding interference here) - check = "4", -- 0x34 - circle = "l", -- 0x6C - cross = "8", -- 0x38 - diamond = "u", -- 0x75 - square = "n", -- 0x6E - star = "H", -- 0x48 -} - -local function todingbat(n) - if n and n ~= "" then - return mapping[n] or "" - end -end - --- local zero_bc = pdfarray { 0, 0, 0 } --- local zero_bg = pdfarray { 1, 1, 1 } - -local function fieldrendering(specification) - local bvalue = tonumber(specification.backgroundcolorvalue) - local fvalue = tonumber(specification.framecolorvalue) - local svalue = specification.fontsymbol - if bvalue or fvalue or (svalue and svalue ~= "") then - return pdfdictionary { - BG = bvalue and pdfarray { lpdf.colorvalues(3,bvalue) } or nil, -- or zero_bg, - BC = fvalue and pdfarray { lpdf.colorvalues(3,fvalue) } or nil, -- or zero_bc, - CA = svalue and pdfstring (svalue) or nil, - } - end -end - --- layers - -local function fieldlayer(specification) -- we can move this in line - local layer = specification.layer - return (layer and lpdf.layerreference(layer)) or nil -end - --- defining - -local fields, radios, clones, fieldsets, calculationset = { }, { }, { }, { }, nil - -local xfdftemplate = [[ - - - - - -%s - - -]] - -function codeinjections.exportformdata(name) - local result = { } - for k, v in table.sortedhash(fields) do - result[#result+1] = format(" %s",v.name or k,v.default or "") - end - local base = file.basename(tex.jobname) - local xfdf = format(xfdftemplate,base,table.concat(result,"\n")) - if not name or name == "" then - name = base - end - io.savedata(file.addsuffix(name,"xfdf"),xfdf) -end - -function codeinjections.definefieldset(tag,list) - fieldsets[tag] = list -end - -function codeinjections.getfieldset(tag) - return fieldsets[tag] -end - -local function fieldsetlist(tag) - if tag then - local ft = fieldsets[tag] - if ft then - local a = pdfarray() - for name in gmatch(list,"[^, ]+") do - local f = field[name] - if f and f.pobj then - a[#a+1] = pdfreference(f.pobj) - end - end - return a - end - end -end - -function codeinjections.setfieldcalculationset(tag) - calculationset = tag -end - -local function predefinesymbols(specification) - local values = specification.values - if values then - local symbols = settings_to_array(values) - for i=1,#symbols do - local symbol = symbols[i] - local a, b = lpegmatch(splitter,symbol) - codeinjections.presetsymbol(a or symbol) - end - end -end - -function codeinjections.getdefaultfieldvalue(name) - local f = fields[name] - if f then - local values = f.values - local default = f.default - if not default or default == "" then - local symbols = settings_to_array(values) - local symbol = symbols[1] - if symbol then - local a, b = lpegmatch(splitter,symbol) -- splits at => - default = a or symbol - end - end - return default - end -end - -function codeinjections.definefield(specification) - local n = specification.name - local f = fields[n] - if not f then - local fieldtype = specification.type - if not fieldtype then - if trace_fields then - report_fields("invalid definition for %a, unknown type",n) - end - elseif fieldtype == "radio" then - local values = specification.values - if values and values ~= "" then - values = settings_to_array(values) - for v=1,#values do - radios[values[v]] = { parent = n } - end - fields[n] = specification - if trace_fields then - report_fields("defining %a as type %a",n,"radio") - end - elseif trace_fields then - report_fields("invalid definition of radio %a, missing values",n) - end - elseif fieldtype == "sub" then - -- not in main field list ! - local radio = radios[n] - if radio then - -- merge specification - for key, value in next, specification do - radio[key] = value - end - if trace_fields then - local p = radios[n] and radios[n].parent - report_fields("defining %a as type sub of radio %a",n,p) - end - elseif trace_fields then - report_fields("invalid definition of radio sub %a, no parent given",n) - end - predefinesymbols(specification) - elseif fieldtype == "text" or fieldtype == "line" then - fields[n] = specification - if trace_fields then - report_fields("defining %a as type %a",n,fieldtype) - end - if specification.values ~= "" and specification.default == "" then - specification.default, specification.values = specification.values, nil - end - else - fields[n] = specification - if trace_fields then - report_fields("defining %a as type %a",n,fieldtype) - end - predefinesymbols(specification) - end - elseif trace_fields then - report_fields("invalid definition for %a, already defined",n) - end -end - -function codeinjections.clonefield(specification) -- obsolete - local p, c, v = specification.parent, specification.children, specification.alternative - if not p or not c then - if trace_fields then - report_fields("invalid clone, children %a, parent %a, alternative %a",c,p,v) - end - return - end - local x = fields[p] or radios[p] - if not x then - if trace_fields then - report_fields("invalid clone, unknown parent %a",p) - end - return - end - for n in gmatch(c,"[^, ]+") do - local f, r, c = fields[n], radios[n], clones[n] - if f or r or c then - if trace_fields then - report_fields("already cloned, child %a, parent %a, alternative %a",n,p,v) - end - else - if trace_fields then - report_fields("cloning, child %a, parent %a, alternative %a",n,p,v) - end - clones[n] = specification - predefinesymbols(specification) - end - end -end - -function codeinjections.getfieldcategory(name) - local f = fields[name] or radios[name] or clones[name] - if f then - local g = f.category - if not g or g == "" then - local v, p, t = f.alternative, f.parent, f.type - if v == "clone" or v == "copy" then - f = fields[p] or radios[p] - g = f and f.category - elseif t == "sub" then - f = fields[p] - g = f and f.category - end - end - return g - end -end - --- - -function codeinjections.validfieldcategory(name) - return fields[name] or radios[name] or clones[name] -end - -function codeinjections.validfieldset(name) - return fieldsets[tag] -end - -function codeinjections.validfield(name) - return fields[name] -end - --- - -local alignments = { - flushleft = 0, right = 0, - center = 1, middle = 1, - flushright = 2, left = 2, -} - -local function fieldalignment(specification) - return alignments[specification.align] or 0 -end - -local function enhance(specification,option) - local so = specification.option - if so and so ~= "" then - specification.option = so .. "," .. option - else - specification.option = option - end - return specification -end - --- finish - -local collected = pdfarray() -local forceencoding = false - -local function finishfields() - local sometext = forceencoding - for name, field in next, fields do - local kids = field.kids - if kids then - pdfflushobject(field.kidsnum,kids) - end - local opt = field.opt - if opt then - pdfflushobject(field.optnum,opt) - end - local type = field.type - if not sometext and (type == "text" or type == "line") then - sometext = true - end - end - for name, field in next, radios do - local kids = field.kids - if kids then - pdfflushobject(field.kidsnum,kids) - end - local opt = field.opt - if opt then - pdfflushobject(field.optnum,opt) - end - end - if #collected > 0 then - local acroform = pdfdictionary { - NeedAppearances = true, - Fields = pdfreference(pdfflushobject(collected)), - CO = fieldsetlist(calculationset), - } - if sometext then - checkpdfdocencoding() - usedfonts.tttf = fontnames.tt.tf - acroform.DA = "/tttf 12 Tf 0 g" - acroform.DR = pdfdictionary { - Font = registerfonts(), - Encoding = pdfdocencodingcapsule, - } - end - lpdf.addtocatalog("AcroForm",pdfreference(pdfflushobject(acroform))) - end -end - -lpdf.registerdocumentfinalizer(finishfields,"form fields") - -local methods = { } - -function nodeinjections.typesetfield(name,specification) - local field = fields[name] or radios[name] or clones[name] - if not field then - report_fields( "unknown child %a",name) - -- unknown field - return - end - local alternative, parent = field.alternative, field.parent - if alternative == "copy" or alternative == "clone" then -- only in clones - field = fields[parent] or radios[parent] - end - local method = methods[field.type] - if method then - return method(name,specification,alternative) - else - report_fields( "unknown method %a for child %a",field.type,name) - end -end - -local function save_parent(field,specification,d,hasopt) - local kidsnum = pdfreserveobject() - d.Kids = pdfreference(kidsnum) - field.kidsnum = kidsnum - field.kids = pdfarray() - if hasopt then - local optnum = pdfreserveobject() - d.Opt = pdfreference(optnum) - field.optnum = optnum - field.opt = pdfarray() - end - local pnum = pdfflushobject(d) - field.pobj = pnum - collected[#collected+1] = pdfreference(pnum) -end - -local function save_kid(field,specification,d,optname) - local kn = pdfreserveannotation() - field.kids[#field.kids+1] = pdfreference(kn) - if optname then - local opt = field.opt - if opt then - opt[#opt+1] = optname - end - end - local width, height, depth = specification.width or 0, specification.height or 0, specification.depth - local box = hpack_node(pdfannotation_node(width,height,depth,d(),kn)) - box.width, box.height, box.depth = width, height, depth -- redundant - return box -end - -local function makelineparent(field,specification) - local text = pdfunicode(field.default) - local length = tonumber(specification.length or 0) or 0 - local d = pdfdictionary { - Subtype = pdf_widget, - T = pdfunicode(specification.title), - F = fieldplus(specification), - Ff = fieldflag(specification), - OC = fieldlayer(specification), - DA = fieldsurrounding(specification), - AA = fieldactions(specification), - FT = pdf_tx, - Q = fieldalignment(specification), - MaxLen = length == 0 and 1000 or length, - DV = text, - V = text, - } - save_parent(field,specification,d) -end - -local function makelinechild(name,specification) - local field, parent = clones[name], nil - if field then - parent = fields[field.parent] - if not parent.pobj then - if trace_fields then - report_fields("forcing parent text %a",parent.name) - end - makelineparent(parent,specification) - end - else - parent = fields[name] - field = parent - if not parent.pobj then - if trace_fields then - report_fields("using parent text %a",name) - end - makelineparent(parent,specification) - end - end - if trace_fields then - report_fields("using child text %a",name) - end - local d = pdfdictionary { - Subtype = pdf_widget, - Parent = pdfreference(parent.pobj), - F = fieldplus(specification), - OC = fieldlayer(specification), - DA = fieldsurrounding(specification), - AA = fieldactions(specification), - MK = fieldrendering(specification), - Q = fieldalignment(specification), - } - return save_kid(parent,specification,d) -end - -function methods.line(name,specification) - return makelinechild(name,specification) -end - -function methods.text(name,specification) - return makelinechild(name,enhance(specification,"MultiLine")) -end - -local function makechoiceparent(field,specification) - local d = pdfdictionary { - Subtype = pdf_widget, - T = pdfunicode(specification.title), - F = fieldplus(specification), - Ff = fieldflag(specification), - OC = fieldlayer(specification), - AA = fieldactions(specification), - FT = pdf_ch, - Opt = fieldoptions(field), -- todo - } - save_parent(field,specification,d) -end - -local function makechoicechild(name,specification) - local field, parent = clones[name], nil - if field then - parent = fields[field.parent] - if not parent.pobj then - if trace_fields then - report_fields("forcing parent choice %a",parent.name) - end - makechoiceparent(parent,specification,extras) - end - else - parent = fields[name] - field = parent - if not parent.pobj then - if trace_fields then - report_fields("using parent choice %a",name) - end - makechoiceparent(parent,specification,extras) - end - end - if trace_fields then - report_fields("using child choice %a",name) - end - local d = pdfdictionary { - Subtype = pdf_widget, - Parent = pdfreference(parent.pobj), - F = fieldplus(specification), - OC = fieldlayer(specification), - AA = fieldactions(specification), - } - return save_kid(parent,specification,d) -- do opt here -end - -function methods.choice(name,specification) - return makechoicechild(name,specification) -end - -function methods.popup(name,specification) - return makechoicechild(name,enhance(specification,"PopUp")) -end - -function methods.combo(name,specification) - return makechoicechild(name,enhance(specification,"PopUp,Edit")) -end - -local function makecheckparent(field,specification) - local d = pdfdictionary { - T = pdfunicode(specification.title), -- todo: when tracing use a string - F = fieldplus(specification), - Ff = fieldflag(specification), - OC = fieldlayer(specification), - AA = fieldactions(specification), - FT = pdf_btn, - V = fielddefault(field), - } - save_parent(field,specification,d,true) -end - -local function makecheckchild(name,specification) - local field, parent = clones[name], nil - if field then - parent = fields[field.parent] - if not parent.pobj then - if trace_fields then - report_fields("forcing parent check %a",parent.name) - end - makecheckparent(parent,specification,extras) - end - else - parent = fields[name] - field = parent - if not parent.pobj then - if trace_fields then - report_fields("using parent check %a",name) - end - makecheckparent(parent,specification,extras) - end - end - if trace_fields then - report_fields("using child check %a",name) - end - local d = pdfdictionary { - Subtype = pdf_widget, - Parent = pdfreference(parent.pobj), - F = fieldplus(specification), - OC = fieldlayer(specification), - AA = fieldactions(specification), - H = pdf_n, - } - local fontsymbol = specification.fontsymbol - if fontsymbol and fontsymbol ~= "" then - specification.fontsymbol = todingbat(fontsymbol) - specification.fontstyle = "symbol" - specification.fontalternative = "dingbats" - d.DA = fieldsurrounding(specification) - d.MK = fieldrendering(specification) - return save_kid(parent,specification,d) - else - local appearance, default, value = fieldstates_check(field,true) - d.AS = default - d.AP = appearance - return save_kid(parent,specification,d,value) - end -end - -function methods.check(name,specification) - return makecheckchild(name,specification) -end - -local function makepushparent(field,specification) -- check if we can share with the previous - local d = pdfdictionary { - Subtype = pdf_widget, - T = pdfunicode(specification.title), - F = fieldplus(specification), - Ff = fieldflag(specification), - OC = fieldlayer(specification), - AA = fieldactions(specification), - FT = pdf_btn, - AP = fieldappearances(field), - H = pdf_p, - } - save_parent(field,specification,d) -end - -local function makepushchild(name,specification) - local field, parent = clones[name], nil - if field then - parent = fields[field.parent] - if not parent.pobj then - if trace_fields then - report_fields("forcing parent push %a",parent.name) - end - makepushparent(parent,specification) - end - else - parent = fields[name] - field = parent - if not parent.pobj then - if trace_fields then - report_fields("using parent push %a",name) - end - makepushparent(parent,specification) - end - end - if trace_fields then - report_fields("using child push %a",name) - end - local fontsymbol = specification.fontsymbol - local d = pdfdictionary { - Subtype = pdf_widget, - Parent = pdfreference(field.pobj), - F = fieldplus(specification), - OC = fieldlayer(specification), - AA = fieldactions(specification), - H = pdf_p, - } - if fontsymbol and fontsymbol ~= "" then - specification.fontsymbol = todingbat(fontsymbol) - specification.fontstyle = "symbol" - specification.fontalternative = "dingbats" - d.DA = fieldsurrounding(specification) - d.MK = fieldrendering(specification) - else - d.AP = fieldappearances(field) - end - return save_kid(parent,specification,d) -end - -function methods.push(name,specification) - return makepushchild(name,enhance(specification,"PushButton")) -end - -local function makeradioparent(field,specification) --- specification = enhance(specification,"Radio,RadiosInUnison") - specification = enhance(specification,"Radio,RadiosInUnison,Print,NoToggleToOff") --- specification = enhance(specification,"Radio,Print,NoToggleToOff") - local d = pdfdictionary { - T = field.name, - FT = pdf_btn, --- F = fieldplus(specification), - Ff = fieldflag(specification), --- H = pdf_n, - V = fielddefault(field), - } - save_parent(field,specification,d,true) -end - --- local function makeradiochild(name,specification) --- local field, parent = clones[name], nil --- if field then --- field = radios[field.parent] --- parent = fields[field.parent] --- if not parent.pobj then --- if trace_fields then --- report_fields("forcing parent radio %a",parent.name) --- end --- makeradioparent(parent,parent) --- end --- else --- field = radios[name] --- if not field then --- report_fields("there is some problem with field %a",name) --- return nil --- end --- parent = fields[field.parent] --- if not parent.pobj then --- if trace_fields then --- report_fields("using parent radio %a",name) --- end --- makeradioparent(parent,parent) --- end --- end --- if trace_fields then --- report_fields("using child radio %a with values %a and default %a",name,field.values,field.default) --- end --- local fontsymbol = specification.fontsymbol --- fontsymbol="star" --- local d = pdfdictionary { --- Subtype = pdf_widget, --- Parent = pdfreference(parent.pobj), --- F = fieldplus(specification), --- OC = fieldlayer(specification), --- AA = fieldactions(specification), --- H = pdf_n, --- } --- if fontsymbol and fontsymbol ~= "" then --- local appearance, default, value = fieldstates_radio(field,true,false,false,name) -- false is also ok --- specification.fontsymbol = todingbat(fontsymbol) --- specification.fontstyle = "symbol" --- specification.fontalternative = "dingbats" --- d.DA = fieldsurrounding(specification) --- d.MK = fieldrendering(specification) --- d.AS = pdfconstant(value) -- default -- mandate when AP but confuses viewers --- d.AP = appearance --- return save_kid(parent,specification,d,value) --- -- return save_kid(parent,specification,d,name) --- else --- -- local appearance, default, value = fieldstates_radio(field,true) -- false is also ok --- local appearance, default, value = fieldstates_radio(field,true,false,false,name) -- false is also ok --- d.AS = default -- mandate when AP but confuses viewers --- d.AP = appearance --- return save_kid(parent,specification,d,value) --- end --- end - -local function makeradiochild(name,specification) - local field, parent = clones[name], nil - if field then - field = radios[field.parent] - parent = fields[field.parent] - if not parent.pobj then - if trace_fields then - report_fields("forcing parent radio %a",parent.name) - end - makeradioparent(parent,parent) - end - else - field = radios[name] - if not field then - report_fields("there is some problem with field %a",name) - return nil - end - parent = fields[field.parent] - if not parent.pobj then - if trace_fields then - report_fields("using parent radio %a",name) - end - makeradioparent(parent,parent) - end - end - if trace_fields then - report_fields("using child radio %a with values %a and default %a",name,field.values,field.default) - end - local fontsymbol = specification.fontsymbol - -- fontsymbol = "circle" - local d = pdfdictionary { - Subtype = pdf_widget, - Parent = pdfreference(parent.pobj), - F = fieldplus(specification), - OC = fieldlayer(specification), - AA = fieldactions(specification), - H = pdf_n, - } - if fontsymbol and fontsymbol ~= "" then - specification.fontsymbol = todingbat(fontsymbol) - specification.fontstyle = "symbol" - specification.fontalternative = "dingbats" - d.DA = fieldsurrounding(specification) - d.MK = fieldrendering(specification) - end - local appearance, default, value = fieldstates_radio(field,name,fields[field.parent]) - d.AP = appearance - d.AS = default -- /Whatever - return save_kid(parent,specification,d,value) -end - -function methods.sub(name,specification) - return makeradiochild(name,enhance(specification,"Radio,RadiosInUnison")) -end +if not modules then modules = { } end modules ['lpdf-fld'] = { + version = 1.001, + comment = "companion to lpdf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- The problem with widgets is that so far each version of acrobat +-- has some rendering problem. I tried to keep up with this but +-- it makes no sense to do so as one cannot rely on the viewer +-- not changing. Especially Btn fields are tricky as their appearences +-- need to be synchronized in the case of children but e.g. acrobat +-- 10 does not retain the state and forces a check symbol. If you +-- make a file in acrobat then it has MK entries that seem to overload +-- the already present appearance streams (they're probably only meant for +-- printing) as it looks like the viewer has some fallback on (auto +-- generated) MK behaviour built in. So ... hard to test. Unfortunately +-- not even the default appearance is generated. This will probably be +-- solved at some point. +-- +-- Also, for some reason the viewer does not always show custom appearances +-- when fields are being rolled over or clicked upon, and circles or checks +-- pop up when you don't expect them. I fear that this kind of instability +-- eventually will kill pdf forms. After all, the manual says: "individual +-- annotation handlers may ignore this entry and provide their own appearances" +-- and one might wonder what 'individual' means here, but effectively this +-- renders the whole concept of appearances useless. +-- +-- Okay, here is one observation. A pdf file contains objects and one might +-- consider each one to be a static entity when read in. However, acrobat +-- starts rendering and seems to manipulate (appearance streams) of objects +-- in place (this is visible when the file is saved again). And, combined +-- with some other caching and hashing, this might give side effects for +-- shared objects. So, it seems that for some cases one can best be not too +-- clever and not share but duplicate information. Of course this defeats the +-- whole purpose of these objects. Of course I can be wrong. +-- +-- A rarther weird side effect of the viewer is that the highlighting of fields +-- obscures values, unless you uses one of the BS variants, and this makes +-- custum appearances rather useless as there is no way to control this apart +-- from changing the viewer preferences. It could of course be a bug but it would +-- be nice if the highlighting was at least transparent. I have no clue why the +-- built in shapes work ok (some xform based appearances are generated) while +-- equally valid other xforms fail. It looks like acrobat appearances come on +-- top (being refered to in the MK) while custom ones are behind the highlight +-- rectangle. One can disable the "Show border hover color for fields" option +-- in the preferences. If you load java-imp-rhh this side effect gets disabled +-- and you get what you expect (it took me a while to figure out this hack). +-- +-- When highlighting is enabled, those default symbols flash up, so it looks +-- like we have some inteference between this setting and custom appearances. +-- +-- Anyhow, the NeedAppearances is really needed in order to get a rendering +-- for printing especially when highlighting (those colorfull foregrounds) is +-- on. + +local gmatch, lower, format = string.gmatch, string.lower, string.format +local lpegmatch = lpeg.match +local utfchar = utf.char +local bpfactor, todimen = number.dimenfactors.bp, string.todimen + +local trace_fields = false trackers.register("backends.fields", function(v) trace_fields = v end) + +local report_fields = logs.reporter("backend","fields") + +local backends, lpdf = backends, lpdf + +local variables = interfaces.variables +local context = context + +local references = structures.references +local settings_to_array = utilities.parsers.settings_to_array + +local pdfbackend = backends.pdf + +local nodeinjections = pdfbackend.nodeinjections +local codeinjections = pdfbackend.codeinjections +local registrations = pdfbackend.registrations + +local registeredsymbol = codeinjections.registeredsymbol + +local pdfstream = lpdf.stream +local pdfdictionary = lpdf.dictionary +local pdfarray = lpdf.array +local pdfreference = lpdf.reference +local pdfunicode = lpdf.unicode +local pdfstring = lpdf.string +local pdfconstant = lpdf.constant +local pdftoeight = lpdf.toeight +local pdfflushobject = lpdf.flushobject +local pdfshareobjectreference = lpdf.shareobjectreference +local pdfshareobject = lpdf.shareobject +local pdfreserveobject = lpdf.reserveobject +local pdfreserveannotation = lpdf.reserveannotation +local pdfaction = lpdf.action + +local hpack_node = node.hpack + +local nodepool = nodes.pool + +local pdfannotation_node = nodepool.pdfannotation + +local submitoutputformat = 0 -- 0=unknown 1=HTML 2=FDF 3=XML => not yet used, needs to be checked + +local pdf_widget = pdfconstant("Widget") +local pdf_tx = pdfconstant("Tx") +local pdf_ch = pdfconstant("Ch") +local pdf_btn = pdfconstant("Btn") +----- pdf_yes = pdfconstant("Yes") +local pdf_off = pdfconstant("Off") +local pdf_p = pdfconstant("P") -- None Invert Outline Push +local pdf_n = pdfconstant("N") -- None Invert Outline Push +-- +local pdf_no_rect = pdfarray { 0, 0, 0, 0 } + +local splitter = lpeg.splitat("=>") + +local formats = { + html = 1, fdf = 2, xml = 3, +} + +function codeinjections.setformsmethod(name) + submitoutputformat = formats[lower(name)] or formats.xml +end + +local flag = { -- /Ff + ReadOnly = 1, -- 1 + Required = 2, -- 2 + NoExport = 4, -- 3 + MultiLine = 4096, -- 13 + Password = 8192, -- 14 + NoToggleToOff = 16384, -- 15 + Radio = 32768, -- 16 + PushButton = 65536, -- 17 + PopUp = 131072, -- 18 + Edit = 262144, -- 19 + Sort = 524288, -- 20 + FileSelect = 1048576, -- 21 + DoNotSpellCheck = 4194304, -- 23 + DoNotScroll = 8388608, -- 24 + Comb = 16777216, -- 25 + RichText = 33554432, -- 26 + RadiosInUnison = 33554432, -- 26 + CommitOnSelChange = 67108864, -- 27 +} + +local plus = { -- /F + Invisible = 1, -- 1 + Hidden = 2, -- 2 + Printable = 4, -- 3 + Print = 4, -- 3 + NoZoom = 8, -- 4 + NoRotate = 16, -- 5 + NoView = 32, -- 6 + ReadOnly = 64, -- 7 + Locked = 128, -- 8 + ToggleNoView = 256, -- 9 + LockedContents = 512, -- 10, + AutoView = 256, -- 288 (6+9) +} + +-- todo: check what is interfaced + +flag.readonly = flag.ReadOnly +flag.required = flag.Required +flag.protected = flag.Password +flag.sorted = flag.Sort +flag.unavailable = flag.NoExport +flag.nocheck = flag.DoNotSpellCheck +flag.fixed = flag.DoNotScroll +flag.file = flag.FileSelect + +plus.hidden = plus.Hidden +plus.printable = plus.Printable +plus.auto = plus.AutoView + +-- some day .. lpeg with function or table + +local function fieldflag(specification) -- /Ff + local o, n = specification.option, 0 + if o and o ~= "" then + for f in gmatch(o,"[^, ]+") do + n = n + (flag[f] or 0) + end + end + return n +end + +local function fieldplus(specification) -- /F + local o, n = specification.option, 0 + if o and o ~= "" then + for p in gmatch(o,"[^, ]+") do + n = n + (plus[p] or 0) + end + end +-- n = n + 4 + return n +end + +local function checked(what) + local set, bug = references.identify("",what) + if not bug and #set > 0 then + local r, n = pdfaction(set) + return pdfshareobjectreference(r) + end +end + +local function fieldactions(specification) -- share actions + local d, a = { }, nil + a = specification.mousedown + or specification.clickin if a and a ~= "" then d.D = checked(a) end + a = specification.mouseup + or specification.clickout if a and a ~= "" then d.U = checked(a) end + a = specification.regionin if a and a ~= "" then d.E = checked(a) end -- Enter + a = specification.regionout if a and a ~= "" then d.X = checked(a) end -- eXit + a = specification.afterkey if a and a ~= "" then d.K = checked(a) end + a = specification.format if a and a ~= "" then d.F = checked(a) end + a = specification.validate if a and a ~= "" then d.V = checked(a) end + a = specification.calculate if a and a ~= "" then d.C = checked(a) end + a = specification.focusin if a and a ~= "" then d.Fo = checked(a) end + a = specification.focusout if a and a ~= "" then d.Bl = checked(a) end + a = specification.openpage if a and a ~= "" then d.PO = checked(a) end + a = specification.closepage if a and a ~= "" then d.PC = checked(a) end + -- a = specification.visiblepage if a and a ~= "" then d.PV = checked(a) end + -- a = specification.invisiblepage if a and a ~= "" then d.PI = checked(a) end + return next(d) and pdfdictionary(d) +end + +-- fonts and color + +local pdfdocencodingvector, pdfdocencodingcapsule + +-- The pdf doc encoding vector is needed in order to +-- trigger propper unicode. Interesting is that when +-- a glyph is not in the vector, it is still visible +-- as it is taken from some other font. Messy. + +-- To be checked: only when text/line fields. + +local function checkpdfdocencoding() + report_fields("adding pdfdoc encoding vector") + local encoding = dofile(resolvers.findfile("lpdf-enc.lua")) -- no checking, fatal if not present + pdfdocencodingvector = pdfreference(pdfflushobject(encoding)) + local capsule = pdfdictionary { + PDFDocEncoding = pdfdocencodingvector + } + pdfdocencodingcapsule = pdfreference(pdfflushobject(capsule)) + checkpdfdocencoding = function() end +end + +local fontnames = { + rm = { + tf = "Times-Roman", + bf = "Times-Bold", + it = "Times-Italic", + sl = "Times-Italic", + bi = "Times-BoldItalic", + bs = "Times-BoldItalic", + }, + ss = { + tf = "Helvetica", + bf = "Helvetica-Bold", + it = "Helvetica-Oblique", + sl = "Helvetica-Oblique", + bi = "Helvetica-BoldOblique", + bs = "Helvetica-BoldOblique", + }, + tt = { + tf = "Courier", + bf = "Courier-Bold", + it = "Courier-Oblique", + sl = "Courier-Oblique", + bi = "Courier-BoldOblique", + bs = "Courier-BoldOblique", + }, + symbol = { + dingbats = "ZapfDingbats", + } +} + +local usedfonts = { } + +local function fieldsurrounding(specification) + local fontsize = specification.fontsize or "12pt" + local fontstyle = specification.fontstyle or "rm" + local fontalternative = specification.fontalternative or "tf" + local colorvalue = specification.colorvalue + local s = fontnames[fontstyle] + if not s then + fontstyle, s = "rm", fontnames.rm + end + local a = s[fontalternative] + if not a then + alternative, a = "tf", s.tf + end + local tag = fontstyle .. fontalternative + fontsize = todimen(fontsize) + fontsize = fontsize and (bpfactor * fontsize) or 12 + fontraise = 0.1 * fontsize -- todo: figure out what the natural one is and compensate for strutdp + local fontcode = format("%0.4f Tf %0.4f Ts",fontsize,fontraise) + -- we could test for colorvalue being 1 (black) and omit it then + local colorcode = lpdf.color(3,colorvalue) -- we force an rgb color space + if trace_fields then + report_fields("using font, style %a, alternative %a, size %p, tag %a, code %a",fontstyle,fontalternative,fontsize,tag,fontcode) + report_fields("using color, value %a, code %a",colorvalue,colorcode) + end + local stream = pdfstream { + pdfconstant(tag), + format("%s %s",fontcode,colorcode) + } + usedfonts[tag] = a -- the name + -- move up with "x.y Ts" + return tostring(stream) +end + +local function registerfonts() + if next(usedfonts) then + checkpdfdocencoding() -- already done + local d = pdfdictionary() + local pdffonttype, pdffontsubtype = pdfconstant("Font"), pdfconstant("Type1") + for tag, name in next, usedfonts do + local f = pdfdictionary { + Type = pdffonttype, + Subtype = pdffontsubtype, + Name = pdfconstant(tag), + BaseFont = pdfconstant(name), + Encoding = pdfdocencodingvector, + } + d[tag] = pdfreference(pdfflushobject(f)) + end + return d + end +end + +-- symbols + +local function fieldappearances(specification) + -- todo: caching + local values = specification.values + local default = specification.default -- todo + if not values then + -- error + return + end + local v = settings_to_array(values) + local n, r, d + if #v == 1 then + n, r, d = v[1], v[1], v[1] + elseif #v == 2 then + n, r, d = v[1], v[1], v[2] + else + n, r, d = v[1], v[2], v[3] + end + local appearance = pdfdictionary { + N = registeredsymbol(n), R = registeredsymbol(r), D = registeredsymbol(d), + } + return pdfshareobjectreference(appearance) +-- return pdfreference(pdfflushobject(appearance)) +end + +local YesorOn = "Yes" -- somehow On is not always working out well any longer (why o why this change) + +-- beware ... maybe we should have unique /Yes1 ... we will probably +-- change this one too. +-- +-- TODO: the same as radio .. play safe and use different names. + +local function fieldstates_check(specification,forceyes,values,default,yesdefault) + -- we don't use Opt here (too messy for radio buttons) + local values, default = values or specification.values, default or specification.default + if not values or values == "" then + -- error + return + end + local v = settings_to_array(values) + local yes, off, yesn, yesr, yesd, offn, offr, offd + if #v == 1 then + yes, off = v[1], v[1] + else + yes, off = v[1], v[2] + end + local yesshown, yesvalue = lpegmatch(splitter,yes) + if not (yesshown and yesvalue) then + yesshown = yes, yes + end + yes = settings_to_array(yesshown) + local offshown, offvalue = lpegmatch(splitter,off) + if not (offshown and offvalue) then + offshown = off, off + end + off = settings_to_array(offshown) + if #yes == 1 then + yesn, yesr, yesd = yes[1], yes[1], yes[1] + elseif #yes == 2 then + yesn, yesr, yesd = yes[1], yes[1], yes[2] + else + yesn, yesr, yesd = yes[1], yes[2], yes[3] + end + if #off == 1 then + offn, offr, offd = off[1], off[1], off[1] + elseif #off == 2 then + offn, offr, offd = off[1], off[1], off[2] + else + offn, offr, offd = off[1], off[2], off[3] + end + if not yesvalue then + yesvalue = yesdefault or yesn + end + if not offvalue then + offvalue = offn + end + if forceyes == true then + forceyes = YesorOn -- spec likes Yes more but we've used On for ages now + else + -- false or string + end + if default == yesn then + default = pdfconstant(forceyes or yesn) + else + default = pdf_off + end + local appearance + if false then -- needs testing + appearance = pdfdictionary { -- maybe also cache components + N = pdfshareobjectreference(pdfdictionary { [forceyes or yesn] = registeredsymbol(yesn), Off = registeredsymbol(offn) }), + R = pdfshareobjectreference(pdfdictionary { [forceyes or yesr] = registeredsymbol(yesr), Off = registeredsymbol(offr) }), + D = pdfshareobjectreference(pdfdictionary { [forceyes or yesd] = registeredsymbol(yesd), Off = registeredsymbol(offd) }), + } + else + appearance = pdfdictionary { -- maybe also cache components + N = pdfdictionary { [forceyes or yesn] = registeredsymbol(yesn), Off = registeredsymbol(offn) }, + R = pdfdictionary { [forceyes or yesr] = registeredsymbol(yesr), Off = registeredsymbol(offr) }, + D = pdfdictionary { [forceyes or yesd] = registeredsymbol(yesd), Off = registeredsymbol(offd) } + } + end + local appearanceref = pdfshareobjectreference(appearance) + -- local appearanceref = pdfreference(pdfflushobject(appearance)) + return appearanceref, default, yesvalue +end + +-- It looks like there is always a (MK related) symbol used and that +-- the appearances are only used as ornaments behind a symbol. So, +-- contrary to what we did when widgets showed up, we now limit +-- ourself to more dumb definitions. Especially when highlighting is +-- enabled weird interferences happen. So, we play safe (some nice code +-- has been removed that worked well till recently). + +local function fieldstates_radio(specification,name,parent) + local values = values or specification.values + local default = default or parent.default -- specification.default + if not values or values == "" then + -- error + return + end + local v = settings_to_array(values) + local yes, off, yesn, yesr, yesd, offn, offr, offd + if #v == 1 then + yes, off = v[1], v[1] + else + yes, off = v[1], v[2] + end + -- yes keys might be the same in the three appearances within a field + -- but can best be different among fields ... don't ask why + local yessymbols, yesvalue = lpegmatch(splitter,yes) -- n,r,d=>x + if not (yessymbols and yesvalue) then + yessymbols = yes + end + if not yesvalue then + yesvalue = name + end + yessymbols = settings_to_array(yessymbols) + if #yessymbols == 1 then + yesn = yessymbols[1] + yesr = yesn + yesd = yesr + elseif #yessymbols == 2 then + yesn = yessymbols[1] + yesr = yessymbols[2] + yesd = yesr + else + yesn = yessymbols[1] + yesr = yessymbols[2] + yesd = yessymbols[3] + end + -- we don't care about names, as all will be /Off + local offsymbols = lpegmatch(splitter,off) or off + offsymbols = settings_to_array(offsymbols) + if #offsymbols == 1 then + offn = offsymbols[1] + offr = offn + offd = offr + elseif #offsymbols == 2 then + offn = offsymbols[1] + offr = offsymbols[2] + offd = offr + else + offn = offsymbols[1] + offr = offsymbols[2] + offd = offsymbols[3] + end + if default == name then + default = pdfconstant(name) + else + default = pdf_off + end + -- + local appearance + if false then -- needs testing + appearance = pdfdictionary { -- maybe also cache components + N = pdfshareobjectreference(pdfdictionary { [name] = registeredsymbol(yesn), Off = registeredsymbol(offn) }), + R = pdfshareobjectreference(pdfdictionary { [name] = registeredsymbol(yesr), Off = registeredsymbol(offr) }), + D = pdfshareobjectreference(pdfdictionary { [name] = registeredsymbol(yesd), Off = registeredsymbol(offd) }), + } + else + appearance = pdfdictionary { -- maybe also cache components + N = pdfdictionary { [name] = registeredsymbol(yesn), Off = registeredsymbol(offn) }, + R = pdfdictionary { [name] = registeredsymbol(yesr), Off = registeredsymbol(offr) }, + D = pdfdictionary { [name] = registeredsymbol(yesd), Off = registeredsymbol(offd) } + } + end + local appearanceref = pdfshareobjectreference(appearance) -- pdfreference(pdfflushobject(appearance)) + return appearanceref, default, yesvalue +end + +local function fielddefault(field) + local default = field.default + if not default or default == "" then + local values = settings_to_array(field.values) + default = values[1] + end + if not default or default == "" then + return pdf_off + else + return pdfconstant(default) + end +end + +local function fieldoptions(specification) + local values = specification.values + local default = specification.default + if values then + local v = settings_to_array(values) + for i=1,#v do + local vi = v[i] + local shown, value = lpegmatch(splitter,vi) + if shown and value then + v[i] = pdfarray { pdfunicode(value), shown } + else + v[i] = pdfunicode(v[i]) + end + end + return pdfarray(v) + end +end + +local mapping = { + -- acrobat compliant (messy, probably some pdfdoc encoding interference here) + check = "4", -- 0x34 + circle = "l", -- 0x6C + cross = "8", -- 0x38 + diamond = "u", -- 0x75 + square = "n", -- 0x6E + star = "H", -- 0x48 +} + +local function todingbat(n) + if n and n ~= "" then + return mapping[n] or "" + end +end + +-- local zero_bc = pdfarray { 0, 0, 0 } +-- local zero_bg = pdfarray { 1, 1, 1 } + +local function fieldrendering(specification) + local bvalue = tonumber(specification.backgroundcolorvalue) + local fvalue = tonumber(specification.framecolorvalue) + local svalue = specification.fontsymbol + if bvalue or fvalue or (svalue and svalue ~= "") then + return pdfdictionary { + BG = bvalue and pdfarray { lpdf.colorvalues(3,bvalue) } or nil, -- or zero_bg, + BC = fvalue and pdfarray { lpdf.colorvalues(3,fvalue) } or nil, -- or zero_bc, + CA = svalue and pdfstring (svalue) or nil, + } + end +end + +-- layers + +local function fieldlayer(specification) -- we can move this in line + local layer = specification.layer + return (layer and lpdf.layerreference(layer)) or nil +end + +-- defining + +local fields, radios, clones, fieldsets, calculationset = { }, { }, { }, { }, nil + +local xfdftemplate = [[ + + + + + +%s + + +]] + +function codeinjections.exportformdata(name) + local result = { } + for k, v in table.sortedhash(fields) do + result[#result+1] = format(" %s",v.name or k,v.default or "") + end + local base = file.basename(tex.jobname) + local xfdf = format(xfdftemplate,base,table.concat(result,"\n")) + if not name or name == "" then + name = base + end + io.savedata(file.addsuffix(name,"xfdf"),xfdf) +end + +function codeinjections.definefieldset(tag,list) + fieldsets[tag] = list +end + +function codeinjections.getfieldset(tag) + return fieldsets[tag] +end + +local function fieldsetlist(tag) + if tag then + local ft = fieldsets[tag] + if ft then + local a = pdfarray() + for name in gmatch(list,"[^, ]+") do + local f = field[name] + if f and f.pobj then + a[#a+1] = pdfreference(f.pobj) + end + end + return a + end + end +end + +function codeinjections.setfieldcalculationset(tag) + calculationset = tag +end + +local function predefinesymbols(specification) + local values = specification.values + if values then + local symbols = settings_to_array(values) + for i=1,#symbols do + local symbol = symbols[i] + local a, b = lpegmatch(splitter,symbol) + codeinjections.presetsymbol(a or symbol) + end + end +end + +function codeinjections.getdefaultfieldvalue(name) + local f = fields[name] + if f then + local values = f.values + local default = f.default + if not default or default == "" then + local symbols = settings_to_array(values) + local symbol = symbols[1] + if symbol then + local a, b = lpegmatch(splitter,symbol) -- splits at => + default = a or symbol + end + end + return default + end +end + +function codeinjections.definefield(specification) + local n = specification.name + local f = fields[n] + if not f then + local fieldtype = specification.type + if not fieldtype then + if trace_fields then + report_fields("invalid definition for %a, unknown type",n) + end + elseif fieldtype == "radio" then + local values = specification.values + if values and values ~= "" then + values = settings_to_array(values) + for v=1,#values do + radios[values[v]] = { parent = n } + end + fields[n] = specification + if trace_fields then + report_fields("defining %a as type %a",n,"radio") + end + elseif trace_fields then + report_fields("invalid definition of radio %a, missing values",n) + end + elseif fieldtype == "sub" then + -- not in main field list ! + local radio = radios[n] + if radio then + -- merge specification + for key, value in next, specification do + radio[key] = value + end + if trace_fields then + local p = radios[n] and radios[n].parent + report_fields("defining %a as type sub of radio %a",n,p) + end + elseif trace_fields then + report_fields("invalid definition of radio sub %a, no parent given",n) + end + predefinesymbols(specification) + elseif fieldtype == "text" or fieldtype == "line" then + fields[n] = specification + if trace_fields then + report_fields("defining %a as type %a",n,fieldtype) + end + if specification.values ~= "" and specification.default == "" then + specification.default, specification.values = specification.values, nil + end + else + fields[n] = specification + if trace_fields then + report_fields("defining %a as type %a",n,fieldtype) + end + predefinesymbols(specification) + end + elseif trace_fields then + report_fields("invalid definition for %a, already defined",n) + end +end + +function codeinjections.clonefield(specification) -- obsolete + local p, c, v = specification.parent, specification.children, specification.alternative + if not p or not c then + if trace_fields then + report_fields("invalid clone, children %a, parent %a, alternative %a",c,p,v) + end + return + end + local x = fields[p] or radios[p] + if not x then + if trace_fields then + report_fields("invalid clone, unknown parent %a",p) + end + return + end + for n in gmatch(c,"[^, ]+") do + local f, r, c = fields[n], radios[n], clones[n] + if f or r or c then + if trace_fields then + report_fields("already cloned, child %a, parent %a, alternative %a",n,p,v) + end + else + if trace_fields then + report_fields("cloning, child %a, parent %a, alternative %a",n,p,v) + end + clones[n] = specification + predefinesymbols(specification) + end + end +end + +function codeinjections.getfieldcategory(name) + local f = fields[name] or radios[name] or clones[name] + if f then + local g = f.category + if not g or g == "" then + local v, p, t = f.alternative, f.parent, f.type + if v == "clone" or v == "copy" then + f = fields[p] or radios[p] + g = f and f.category + elseif t == "sub" then + f = fields[p] + g = f and f.category + end + end + return g + end +end + +-- + +function codeinjections.validfieldcategory(name) + return fields[name] or radios[name] or clones[name] +end + +function codeinjections.validfieldset(name) + return fieldsets[tag] +end + +function codeinjections.validfield(name) + return fields[name] +end + +-- + +local alignments = { + flushleft = 0, right = 0, + center = 1, middle = 1, + flushright = 2, left = 2, +} + +local function fieldalignment(specification) + return alignments[specification.align] or 0 +end + +local function enhance(specification,option) + local so = specification.option + if so and so ~= "" then + specification.option = so .. "," .. option + else + specification.option = option + end + return specification +end + +-- finish + +local collected = pdfarray() +local forceencoding = false + +local function finishfields() + local sometext = forceencoding + for name, field in next, fields do + local kids = field.kids + if kids then + pdfflushobject(field.kidsnum,kids) + end + local opt = field.opt + if opt then + pdfflushobject(field.optnum,opt) + end + local type = field.type + if not sometext and (type == "text" or type == "line") then + sometext = true + end + end + for name, field in next, radios do + local kids = field.kids + if kids then + pdfflushobject(field.kidsnum,kids) + end + local opt = field.opt + if opt then + pdfflushobject(field.optnum,opt) + end + end + if #collected > 0 then + local acroform = pdfdictionary { + NeedAppearances = true, + Fields = pdfreference(pdfflushobject(collected)), + CO = fieldsetlist(calculationset), + } + if sometext then + checkpdfdocencoding() + usedfonts.tttf = fontnames.tt.tf + acroform.DA = "/tttf 12 Tf 0 g" + acroform.DR = pdfdictionary { + Font = registerfonts(), + Encoding = pdfdocencodingcapsule, + } + end + lpdf.addtocatalog("AcroForm",pdfreference(pdfflushobject(acroform))) + end +end + +lpdf.registerdocumentfinalizer(finishfields,"form fields") + +local methods = { } + +function nodeinjections.typesetfield(name,specification) + local field = fields[name] or radios[name] or clones[name] + if not field then + report_fields( "unknown child %a",name) + -- unknown field + return + end + local alternative, parent = field.alternative, field.parent + if alternative == "copy" or alternative == "clone" then -- only in clones + field = fields[parent] or radios[parent] + end + local method = methods[field.type] + if method then + return method(name,specification,alternative) + else + report_fields( "unknown method %a for child %a",field.type,name) + end +end + +local function save_parent(field,specification,d,hasopt) + local kidsnum = pdfreserveobject() + d.Kids = pdfreference(kidsnum) + field.kidsnum = kidsnum + field.kids = pdfarray() + if hasopt then + local optnum = pdfreserveobject() + d.Opt = pdfreference(optnum) + field.optnum = optnum + field.opt = pdfarray() + end + local pnum = pdfflushobject(d) + field.pobj = pnum + collected[#collected+1] = pdfreference(pnum) +end + +local function save_kid(field,specification,d,optname) + local kn = pdfreserveannotation() + field.kids[#field.kids+1] = pdfreference(kn) + if optname then + local opt = field.opt + if opt then + opt[#opt+1] = optname + end + end + local width, height, depth = specification.width or 0, specification.height or 0, specification.depth + local box = hpack_node(pdfannotation_node(width,height,depth,d(),kn)) + box.width, box.height, box.depth = width, height, depth -- redundant + return box +end + +local function makelineparent(field,specification) + local text = pdfunicode(field.default) + local length = tonumber(specification.length or 0) or 0 + local d = pdfdictionary { + Subtype = pdf_widget, + T = pdfunicode(specification.title), + F = fieldplus(specification), + Ff = fieldflag(specification), + OC = fieldlayer(specification), + DA = fieldsurrounding(specification), + AA = fieldactions(specification), + FT = pdf_tx, + Q = fieldalignment(specification), + MaxLen = length == 0 and 1000 or length, + DV = text, + V = text, + } + save_parent(field,specification,d) +end + +local function makelinechild(name,specification) + local field, parent = clones[name], nil + if field then + parent = fields[field.parent] + if not parent.pobj then + if trace_fields then + report_fields("forcing parent text %a",parent.name) + end + makelineparent(parent,specification) + end + else + parent = fields[name] + field = parent + if not parent.pobj then + if trace_fields then + report_fields("using parent text %a",name) + end + makelineparent(parent,specification) + end + end + if trace_fields then + report_fields("using child text %a",name) + end + local d = pdfdictionary { + Subtype = pdf_widget, + Parent = pdfreference(parent.pobj), + F = fieldplus(specification), + OC = fieldlayer(specification), + DA = fieldsurrounding(specification), + AA = fieldactions(specification), + MK = fieldrendering(specification), + Q = fieldalignment(specification), + } + return save_kid(parent,specification,d) +end + +function methods.line(name,specification) + return makelinechild(name,specification) +end + +function methods.text(name,specification) + return makelinechild(name,enhance(specification,"MultiLine")) +end + +local function makechoiceparent(field,specification) + local d = pdfdictionary { + Subtype = pdf_widget, + T = pdfunicode(specification.title), + F = fieldplus(specification), + Ff = fieldflag(specification), + OC = fieldlayer(specification), + AA = fieldactions(specification), + FT = pdf_ch, + Opt = fieldoptions(field), -- todo + } + save_parent(field,specification,d) +end + +local function makechoicechild(name,specification) + local field, parent = clones[name], nil + if field then + parent = fields[field.parent] + if not parent.pobj then + if trace_fields then + report_fields("forcing parent choice %a",parent.name) + end + makechoiceparent(parent,specification,extras) + end + else + parent = fields[name] + field = parent + if not parent.pobj then + if trace_fields then + report_fields("using parent choice %a",name) + end + makechoiceparent(parent,specification,extras) + end + end + if trace_fields then + report_fields("using child choice %a",name) + end + local d = pdfdictionary { + Subtype = pdf_widget, + Parent = pdfreference(parent.pobj), + F = fieldplus(specification), + OC = fieldlayer(specification), + AA = fieldactions(specification), + } + return save_kid(parent,specification,d) -- do opt here +end + +function methods.choice(name,specification) + return makechoicechild(name,specification) +end + +function methods.popup(name,specification) + return makechoicechild(name,enhance(specification,"PopUp")) +end + +function methods.combo(name,specification) + return makechoicechild(name,enhance(specification,"PopUp,Edit")) +end + +local function makecheckparent(field,specification) + local d = pdfdictionary { + T = pdfunicode(specification.title), -- todo: when tracing use a string + F = fieldplus(specification), + Ff = fieldflag(specification), + OC = fieldlayer(specification), + AA = fieldactions(specification), + FT = pdf_btn, + V = fielddefault(field), + } + save_parent(field,specification,d,true) +end + +local function makecheckchild(name,specification) + local field, parent = clones[name], nil + if field then + parent = fields[field.parent] + if not parent.pobj then + if trace_fields then + report_fields("forcing parent check %a",parent.name) + end + makecheckparent(parent,specification,extras) + end + else + parent = fields[name] + field = parent + if not parent.pobj then + if trace_fields then + report_fields("using parent check %a",name) + end + makecheckparent(parent,specification,extras) + end + end + if trace_fields then + report_fields("using child check %a",name) + end + local d = pdfdictionary { + Subtype = pdf_widget, + Parent = pdfreference(parent.pobj), + F = fieldplus(specification), + OC = fieldlayer(specification), + AA = fieldactions(specification), + H = pdf_n, + } + local fontsymbol = specification.fontsymbol + if fontsymbol and fontsymbol ~= "" then + specification.fontsymbol = todingbat(fontsymbol) + specification.fontstyle = "symbol" + specification.fontalternative = "dingbats" + d.DA = fieldsurrounding(specification) + d.MK = fieldrendering(specification) + return save_kid(parent,specification,d) + else + local appearance, default, value = fieldstates_check(field,true) + d.AS = default + d.AP = appearance + return save_kid(parent,specification,d,value) + end +end + +function methods.check(name,specification) + return makecheckchild(name,specification) +end + +local function makepushparent(field,specification) -- check if we can share with the previous + local d = pdfdictionary { + Subtype = pdf_widget, + T = pdfunicode(specification.title), + F = fieldplus(specification), + Ff = fieldflag(specification), + OC = fieldlayer(specification), + AA = fieldactions(specification), + FT = pdf_btn, + AP = fieldappearances(field), + H = pdf_p, + } + save_parent(field,specification,d) +end + +local function makepushchild(name,specification) + local field, parent = clones[name], nil + if field then + parent = fields[field.parent] + if not parent.pobj then + if trace_fields then + report_fields("forcing parent push %a",parent.name) + end + makepushparent(parent,specification) + end + else + parent = fields[name] + field = parent + if not parent.pobj then + if trace_fields then + report_fields("using parent push %a",name) + end + makepushparent(parent,specification) + end + end + if trace_fields then + report_fields("using child push %a",name) + end + local fontsymbol = specification.fontsymbol + local d = pdfdictionary { + Subtype = pdf_widget, + Parent = pdfreference(field.pobj), + F = fieldplus(specification), + OC = fieldlayer(specification), + AA = fieldactions(specification), + H = pdf_p, + } + if fontsymbol and fontsymbol ~= "" then + specification.fontsymbol = todingbat(fontsymbol) + specification.fontstyle = "symbol" + specification.fontalternative = "dingbats" + d.DA = fieldsurrounding(specification) + d.MK = fieldrendering(specification) + else + d.AP = fieldappearances(field) + end + return save_kid(parent,specification,d) +end + +function methods.push(name,specification) + return makepushchild(name,enhance(specification,"PushButton")) +end + +local function makeradioparent(field,specification) +-- specification = enhance(specification,"Radio,RadiosInUnison") + specification = enhance(specification,"Radio,RadiosInUnison,Print,NoToggleToOff") +-- specification = enhance(specification,"Radio,Print,NoToggleToOff") + local d = pdfdictionary { + T = field.name, + FT = pdf_btn, +-- F = fieldplus(specification), + Ff = fieldflag(specification), +-- H = pdf_n, + V = fielddefault(field), + } + save_parent(field,specification,d,true) +end + +-- local function makeradiochild(name,specification) +-- local field, parent = clones[name], nil +-- if field then +-- field = radios[field.parent] +-- parent = fields[field.parent] +-- if not parent.pobj then +-- if trace_fields then +-- report_fields("forcing parent radio %a",parent.name) +-- end +-- makeradioparent(parent,parent) +-- end +-- else +-- field = radios[name] +-- if not field then +-- report_fields("there is some problem with field %a",name) +-- return nil +-- end +-- parent = fields[field.parent] +-- if not parent.pobj then +-- if trace_fields then +-- report_fields("using parent radio %a",name) +-- end +-- makeradioparent(parent,parent) +-- end +-- end +-- if trace_fields then +-- report_fields("using child radio %a with values %a and default %a",name,field.values,field.default) +-- end +-- local fontsymbol = specification.fontsymbol +-- fontsymbol="star" +-- local d = pdfdictionary { +-- Subtype = pdf_widget, +-- Parent = pdfreference(parent.pobj), +-- F = fieldplus(specification), +-- OC = fieldlayer(specification), +-- AA = fieldactions(specification), +-- H = pdf_n, +-- } +-- if fontsymbol and fontsymbol ~= "" then +-- local appearance, default, value = fieldstates_radio(field,true,false,false,name) -- false is also ok +-- specification.fontsymbol = todingbat(fontsymbol) +-- specification.fontstyle = "symbol" +-- specification.fontalternative = "dingbats" +-- d.DA = fieldsurrounding(specification) +-- d.MK = fieldrendering(specification) +-- d.AS = pdfconstant(value) -- default -- mandate when AP but confuses viewers +-- d.AP = appearance +-- return save_kid(parent,specification,d,value) +-- -- return save_kid(parent,specification,d,name) +-- else +-- -- local appearance, default, value = fieldstates_radio(field,true) -- false is also ok +-- local appearance, default, value = fieldstates_radio(field,true,false,false,name) -- false is also ok +-- d.AS = default -- mandate when AP but confuses viewers +-- d.AP = appearance +-- return save_kid(parent,specification,d,value) +-- end +-- end + +local function makeradiochild(name,specification) + local field, parent = clones[name], nil + if field then + field = radios[field.parent] + parent = fields[field.parent] + if not parent.pobj then + if trace_fields then + report_fields("forcing parent radio %a",parent.name) + end + makeradioparent(parent,parent) + end + else + field = radios[name] + if not field then + report_fields("there is some problem with field %a",name) + return nil + end + parent = fields[field.parent] + if not parent.pobj then + if trace_fields then + report_fields("using parent radio %a",name) + end + makeradioparent(parent,parent) + end + end + if trace_fields then + report_fields("using child radio %a with values %a and default %a",name,field.values,field.default) + end + local fontsymbol = specification.fontsymbol + -- fontsymbol = "circle" + local d = pdfdictionary { + Subtype = pdf_widget, + Parent = pdfreference(parent.pobj), + F = fieldplus(specification), + OC = fieldlayer(specification), + AA = fieldactions(specification), + H = pdf_n, + } + if fontsymbol and fontsymbol ~= "" then + specification.fontsymbol = todingbat(fontsymbol) + specification.fontstyle = "symbol" + specification.fontalternative = "dingbats" + d.DA = fieldsurrounding(specification) + d.MK = fieldrendering(specification) + end + local appearance, default, value = fieldstates_radio(field,name,fields[field.parent]) + d.AP = appearance + d.AS = default -- /Whatever + return save_kid(parent,specification,d,value) +end + +function methods.sub(name,specification) + return makeradiochild(name,enhance(specification,"Radio,RadiosInUnison")) +end diff --git a/tex/context/base/lpdf-grp.lua b/tex/context/base/lpdf-grp.lua index fed5e6a46..a255658ed 100644 --- a/tex/context/base/lpdf-grp.lua +++ b/tex/context/base/lpdf-grp.lua @@ -1,244 +1,244 @@ -if not modules then modules = { } end modules ['lpdf-grp'] = { - version = 1.001, - comment = "companion to lpdf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format, gsub = string.format, string.gsub -local concat = table.concat -local round = math.round - -local backends, lpdf = backends, lpdf - -local nodeinjections = backends.pdf.nodeinjections - -local colors = attributes.colors -local basepoints = number.dimenfactors["bp"] -local inches = number.dimenfactors["in"] - -local nodeinjections = backends.pdf.nodeinjections -local codeinjections = backends.pdf.codeinjections -local registrations = backends.pdf.registrations - -local pdfdictionary = lpdf.dictionary -local pdfarray = lpdf.array -local pdfconstant = lpdf.constant -local pdfboolean = lpdf.boolean -local pdfreference = lpdf.reference -local pdfflushobject = lpdf.flushobject - --- can also be done indirectly: --- --- 12 : << /AntiAlias false /ColorSpace 8 0 R /Coords [ 0.0 0.0 1.0 0.0 ] /Domain [ 0.0 1.0 ] /Extend [ true true ] /Function 22 0 R /ShadingType 2 >> --- 22 : << /Bounds [ ] /Domain [ 0.0 1.0 ] /Encode [ 0.0 1.0 ] /FunctionType 3 /Functions [ 31 0 R ] >> --- 31 : << /C0 [ 1.0 0.0 ] /C1 [ 0.0 1.0 ] /Domain [ 0.0 1.0 ] /FunctionType 2 /N 1.0 >> - -local function shade(stype,name,domain,color_a,color_b,n,colorspace,coordinates,separation) - local f = pdfdictionary { - FunctionType = 2, - Domain = pdfarray(domain), -- domain is actually a string - C0 = pdfarray(color_a), - C1 = pdfarray(color_b), - N = tonumber(n), - } - separation = separation and registrations.getspotcolorreference(separation) - local s = pdfdictionary { - ShadingType = stype, - ColorSpace = separation and pdfreference(separation) or pdfconstant(colorspace), - Function = pdfreference(pdfflushobject(f)), - Coords = pdfarray(coordinates), - Extend = pdfarray { true, true }, - AntiAlias = pdfboolean(true), - } - lpdf.adddocumentshade(name,pdfreference(pdfflushobject(s))) -end - -function lpdf.circularshade(name,domain,color_a,color_b,n,colorspace,coordinates,separation) - shade(3,name,domain,color_a,color_b,n,colorspace,coordinates,separation) -end - -function lpdf.linearshade(name,domain,color_a,color_b,n,colorspace,coordinates,separation) - shade(2,name,domain,color_a,color_b,n,colorspace,coordinates,separation) -end - --- inline bitmaps but xform'd --- --- we could derive the colorspace if we strip the data --- and divide by x*y - -local template = "q BI %s ID %s > EI Q" -local factor = 72/300 - -function nodeinjections.injectbitmap(t) - -- encoding is ascii hex, no checking here - local xresolution, yresolution = t.xresolution or 0, t.yresolution or 0 - if xresolution == 0 or yresolution == 0 then - return -- fatal error - end - local colorspace = t.colorspace - if colorspace ~= "rgb" and colorspace ~= "cmyk" and colorspace ~= "gray" then - -- not that efficient but ok - local d = gsub(t.data,"[^0-9a-f]","") - local b = math.round(#d / (xresolution * yresolution)) - if b == 2 then - colorspace = "gray" - elseif b == 6 then - colorspace = "rgb" - elseif b == 8 then - colorspace = "cmyk" - end - end - colorspace = lpdf.colorspaceconstants[colorspace] - if not colorspace then - return -- fatal error - end - local d = pdfdictionary { - W = xresolution, - H = yresolution, - CS = colorspace, - BPC = 8, - F = pdfconstant("AHx"), ---~ CS = nil, ---~ BPC = 1, ---~ IM = true, - } - -- for some reasons it only works well if we take a 1bp boundingbox - local urx, ury = 1/basepoints, 1/basepoints - -- urx = (xresolution/300)/basepoints - -- ury = (yresolution/300)/basepoints - local width, height = t.width or 0, t.height or 0 - if width == 0 and height == 0 then - width = factor * xresolution / basepoints - height = factor * yresolution / basepoints - elseif width == 0 then - width = height * xresolution / yresolution - elseif height == 0 then - height = width * yresolution / xresolution - end - local image = img.new { - stream = format(template,d(),t.data), - width = width, - height = height, - bbox = { 0, 0, urx, ury }, - } - return img.node(image) -end - --- general graphic helpers - -function codeinjections.setfigurealternative(data,figure) - local request = data.request - local display = request.display - if display and display ~= "" then - local nested = figures.push { - name = display, - page = request.page, - size = request.size, - prefix = request.prefix, - cache = request.cache, - width = request.width, - height = request.height, - } - figures.identify() - local displayfigure = figures.check() - if displayfigure then - -- figure.aform = true - img.immediatewrite(figure) - local a = pdfarray { - pdfdictionary { - Image = pdfreference(figure.objnum), - DefaultForPrinting = true, - } - } - local d = pdfdictionary { - Alternates = pdfreference(pdfflushobject(a)), - } - displayfigure.attr = d() - figures.pop() - return displayfigure, nested - else - figures.pop() - end - end -end - -function codeinjections.getpreviewfigure(request) - local figure = figures.initialize(request) - if not figure then - return - end - figure = figures.identify(figure) - if not (figure and figure.status and figure.status.fullname) then - return - end - figure = figures.check(figure) - if not (figure and figure.status and figure.status.fullname) then - return - end - local image = figure.status.private - if image then - img.immediatewrite(image) - end - return figure -end - -function codeinjections.setfiguremask(data,figure) -- mark - local request = data.request - local mask = request.mask - if mask and mask ~= "" then - figures.push { - name = mask, - page = request.page, - size = request.size, - prefix = request.prefix, - cache = request.cache, - width = request.width, - height = request.height, - } - figures.identify() - local maskfigure = figures.check() - if maskfigure then - local image = maskfigure.status.private - if image then - img.immediatewrite(image) - local d = pdfdictionary { - Interpolate = false, - SMask = pdfreference(image.objnum), - } - figure.attr = d() - end - end - figures.pop() - end -end - --- temp hack - -local factor = number.dimenfactors.bp - -function img.package(image) -- see lpdf-u3d ** - local boundingbox = image.bbox - local imagetag = "Im" .. image.index - local resources = pdfdictionary { - ProcSet = pdfarray { - pdfconstant("PDF"), - pdfconstant("ImageC") - }, - Resources = pdfdictionary { - XObject = pdfdictionary { - [imagetag] = pdfreference(image.objnum) - } - } - } - local width = boundingbox[3] - local height = boundingbox[4] - local xform = img.scan { - attr = resources(), - stream = format("%f 0 0 %f 0 0 cm /%s Do",width,height,imagetag), - bbox = { 0, 0, width/factor, height/factor }, - } - img.immediatewrite(xform) - return xform -end +if not modules then modules = { } end modules ['lpdf-grp'] = { + version = 1.001, + comment = "companion to lpdf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, gsub = string.format, string.gsub +local concat = table.concat +local round = math.round + +local backends, lpdf = backends, lpdf + +local nodeinjections = backends.pdf.nodeinjections + +local colors = attributes.colors +local basepoints = number.dimenfactors["bp"] +local inches = number.dimenfactors["in"] + +local nodeinjections = backends.pdf.nodeinjections +local codeinjections = backends.pdf.codeinjections +local registrations = backends.pdf.registrations + +local pdfdictionary = lpdf.dictionary +local pdfarray = lpdf.array +local pdfconstant = lpdf.constant +local pdfboolean = lpdf.boolean +local pdfreference = lpdf.reference +local pdfflushobject = lpdf.flushobject + +-- can also be done indirectly: +-- +-- 12 : << /AntiAlias false /ColorSpace 8 0 R /Coords [ 0.0 0.0 1.0 0.0 ] /Domain [ 0.0 1.0 ] /Extend [ true true ] /Function 22 0 R /ShadingType 2 >> +-- 22 : << /Bounds [ ] /Domain [ 0.0 1.0 ] /Encode [ 0.0 1.0 ] /FunctionType 3 /Functions [ 31 0 R ] >> +-- 31 : << /C0 [ 1.0 0.0 ] /C1 [ 0.0 1.0 ] /Domain [ 0.0 1.0 ] /FunctionType 2 /N 1.0 >> + +local function shade(stype,name,domain,color_a,color_b,n,colorspace,coordinates,separation) + local f = pdfdictionary { + FunctionType = 2, + Domain = pdfarray(domain), -- domain is actually a string + C0 = pdfarray(color_a), + C1 = pdfarray(color_b), + N = tonumber(n), + } + separation = separation and registrations.getspotcolorreference(separation) + local s = pdfdictionary { + ShadingType = stype, + ColorSpace = separation and pdfreference(separation) or pdfconstant(colorspace), + Function = pdfreference(pdfflushobject(f)), + Coords = pdfarray(coordinates), + Extend = pdfarray { true, true }, + AntiAlias = pdfboolean(true), + } + lpdf.adddocumentshade(name,pdfreference(pdfflushobject(s))) +end + +function lpdf.circularshade(name,domain,color_a,color_b,n,colorspace,coordinates,separation) + shade(3,name,domain,color_a,color_b,n,colorspace,coordinates,separation) +end + +function lpdf.linearshade(name,domain,color_a,color_b,n,colorspace,coordinates,separation) + shade(2,name,domain,color_a,color_b,n,colorspace,coordinates,separation) +end + +-- inline bitmaps but xform'd +-- +-- we could derive the colorspace if we strip the data +-- and divide by x*y + +local template = "q BI %s ID %s > EI Q" +local factor = 72/300 + +function nodeinjections.injectbitmap(t) + -- encoding is ascii hex, no checking here + local xresolution, yresolution = t.xresolution or 0, t.yresolution or 0 + if xresolution == 0 or yresolution == 0 then + return -- fatal error + end + local colorspace = t.colorspace + if colorspace ~= "rgb" and colorspace ~= "cmyk" and colorspace ~= "gray" then + -- not that efficient but ok + local d = gsub(t.data,"[^0-9a-f]","") + local b = math.round(#d / (xresolution * yresolution)) + if b == 2 then + colorspace = "gray" + elseif b == 6 then + colorspace = "rgb" + elseif b == 8 then + colorspace = "cmyk" + end + end + colorspace = lpdf.colorspaceconstants[colorspace] + if not colorspace then + return -- fatal error + end + local d = pdfdictionary { + W = xresolution, + H = yresolution, + CS = colorspace, + BPC = 8, + F = pdfconstant("AHx"), +--~ CS = nil, +--~ BPC = 1, +--~ IM = true, + } + -- for some reasons it only works well if we take a 1bp boundingbox + local urx, ury = 1/basepoints, 1/basepoints + -- urx = (xresolution/300)/basepoints + -- ury = (yresolution/300)/basepoints + local width, height = t.width or 0, t.height or 0 + if width == 0 and height == 0 then + width = factor * xresolution / basepoints + height = factor * yresolution / basepoints + elseif width == 0 then + width = height * xresolution / yresolution + elseif height == 0 then + height = width * yresolution / xresolution + end + local image = img.new { + stream = format(template,d(),t.data), + width = width, + height = height, + bbox = { 0, 0, urx, ury }, + } + return img.node(image) +end + +-- general graphic helpers + +function codeinjections.setfigurealternative(data,figure) + local request = data.request + local display = request.display + if display and display ~= "" then + local nested = figures.push { + name = display, + page = request.page, + size = request.size, + prefix = request.prefix, + cache = request.cache, + width = request.width, + height = request.height, + } + figures.identify() + local displayfigure = figures.check() + if displayfigure then + -- figure.aform = true + img.immediatewrite(figure) + local a = pdfarray { + pdfdictionary { + Image = pdfreference(figure.objnum), + DefaultForPrinting = true, + } + } + local d = pdfdictionary { + Alternates = pdfreference(pdfflushobject(a)), + } + displayfigure.attr = d() + figures.pop() + return displayfigure, nested + else + figures.pop() + end + end +end + +function codeinjections.getpreviewfigure(request) + local figure = figures.initialize(request) + if not figure then + return + end + figure = figures.identify(figure) + if not (figure and figure.status and figure.status.fullname) then + return + end + figure = figures.check(figure) + if not (figure and figure.status and figure.status.fullname) then + return + end + local image = figure.status.private + if image then + img.immediatewrite(image) + end + return figure +end + +function codeinjections.setfiguremask(data,figure) -- mark + local request = data.request + local mask = request.mask + if mask and mask ~= "" then + figures.push { + name = mask, + page = request.page, + size = request.size, + prefix = request.prefix, + cache = request.cache, + width = request.width, + height = request.height, + } + figures.identify() + local maskfigure = figures.check() + if maskfigure then + local image = maskfigure.status.private + if image then + img.immediatewrite(image) + local d = pdfdictionary { + Interpolate = false, + SMask = pdfreference(image.objnum), + } + figure.attr = d() + end + end + figures.pop() + end +end + +-- temp hack + +local factor = number.dimenfactors.bp + +function img.package(image) -- see lpdf-u3d ** + local boundingbox = image.bbox + local imagetag = "Im" .. image.index + local resources = pdfdictionary { + ProcSet = pdfarray { + pdfconstant("PDF"), + pdfconstant("ImageC") + }, + Resources = pdfdictionary { + XObject = pdfdictionary { + [imagetag] = pdfreference(image.objnum) + } + } + } + local width = boundingbox[3] + local height = boundingbox[4] + local xform = img.scan { + attr = resources(), + stream = format("%f 0 0 %f 0 0 cm /%s Do",width,height,imagetag), + bbox = { 0, 0, width/factor, height/factor }, + } + img.immediatewrite(xform) + return xform +end diff --git a/tex/context/base/lpdf-ini.lua b/tex/context/base/lpdf-ini.lua index cd601f21f..77ccd85fc 100644 --- a/tex/context/base/lpdf-ini.lua +++ b/tex/context/base/lpdf-ini.lua @@ -1,822 +1,822 @@ -if not modules then modules = { } end modules ['lpdf-ini'] = { - version = 1.001, - comment = "companion to lpdf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local setmetatable, getmetatable, type, next, tostring, tonumber, rawset = setmetatable, getmetatable, type, next, tostring, tonumber, rawset -local char, byte, format, gsub, concat, match, sub, gmatch = string.char, string.byte, string.format, string.gsub, table.concat, string.match, string.sub, string.gmatch -local utfchar, utfvalues = utf.char, utf.values -local sind, cosd = math.sind, math.cosd -local lpegmatch, P, C, R, S, Cc, Cs = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc, lpeg.Cs -local formatters = string.formatters - -local pdfreserveobject = pdf.reserveobj -local pdfimmediateobject = pdf.immediateobj -local pdfdeferredobject = pdf.obj -local pdfreferenceobject = pdf.refobj - -local trace_finalizers = false trackers.register("backend.finalizers", function(v) trace_finalizers = v end) -local trace_resources = false trackers.register("backend.resources", function(v) trace_resources = v end) -local trace_objects = false trackers.register("backend.objects", function(v) trace_objects = v end) -local trace_detail = false trackers.register("backend.detail", function(v) trace_detail = v end) - -local report_objects = logs.reporter("backend","objects") -local report_finalizing = logs.reporter("backend","finalizing") - -local backends = backends - -backends.pdf = backends.pdf or { - comment = "backend for directly generating pdf output", - nodeinjections = { }, - codeinjections = { }, - registrations = { }, - tables = { }, -} - -lpdf = lpdf or { } -local lpdf = lpdf - -local function tosixteen(str) -- an lpeg might be faster (no table) - if not str or str == "" then - return "" -- not () as we want an indication that it's unicode - else - local r, n = { ""] = "\\>", --- ["["] = "\\[", ["]"] = "\\]", --- ["("] = "\\(", [")"] = "\\)", --- } --- --- local escaped = Cs(Cc("(") * (S("\\/#<>[]()")/escapes + P(1))^0 * Cc(")")) --- --- local function toeight(str) --- if not str or str == "" then --- return "()" --- else --- return lpegmatch(escaped,str) --- end --- end --- --- -- no need for escaping .. just use unicode instead - --- \0 \t \n \r \f ( ) [ ] { } / % - -local function toeight(str) - return "(" .. str .. ")" -end - -lpdf.toeight = toeight - ---~ local escaped = lpeg.Cs((lpeg.S("\0\t\n\r\f ()[]{}/%")/function(s) return format("#%02X",byte(s)) end + lpeg.P(1))^0) - ---~ local function cleaned(str) ---~ return (str and str ~= "" and lpegmatch(escaped,str)) or "" ---~ end - ---~ lpdf.cleaned = cleaned -- not public yet - -local function merge_t(a,b) - local t = { } - for k,v in next, a do t[k] = v end - for k,v in next, b do t[k] = v end - return setmetatable(t,getmetatable(a)) -end - -local f_key_value = formatters["/%s %s"] -local f_key_dictionary = formatters["/%s << % t >>"] -local f_dictionary = formatters["<< % t >>"] -local f_key_array = formatters["/%s [ % t ]"] -local f_array = formatters["[ % t ]"] - -local tostring_a, tostring_d - -tostring_d = function(t,contentonly,key) - if not next(t) then - if contentonly then - return "" - else - return "<< >>" - end - else - local r, rn = { }, 0 - for k, v in next, t do - rn = rn + 1 - local tv = type(v) - if tv == "string" then - r[rn] = f_key_value(k,toeight(v)) - elseif tv == "unicode" then - r[rn] = f_key_value(k,tosixteen(v)) - elseif tv == "table" then - local mv = getmetatable(v) - if mv and mv.__lpdftype then - r[rn] = f_key_value(k,tostring(v)) - elseif v[1] then - r[rn] = f_key_value(k,tostring_a(v)) - else - r[rn] = f_key_value(k,tostring_d(v)) - end - else - r[rn] = f_key_value(k,tostring(v)) - end - end - if contentonly then - return concat(r," ") - elseif key then - return f_key_dictionary(key,r) - else - return f_dictionary(r) - end - end -end - -tostring_a = function(t,contentonly,key) - local tn = #t - if tn == 0 then - if contentonly then - return "" - else - return "[ ]" - end - else - local r = { } - for k=1,tn do - local v = t[k] - local tv = type(v) - if tv == "string" then - r[k] = toeight(v) - elseif tv == "unicode" then - r[k] = tosixteen(v) - elseif tv == "table" then - local mv = getmetatable(v) - local mt = mv and mv.__lpdftype - if mt then - r[k] = tostring(v) - elseif v[1] then - r[k] = tostring_a(v) - else - r[k] = tostring_d(v) - end - else - r[k] = tostring(v) - end - end - if contentonly then - return concat(r, " ") - elseif key then - return f_key_array(key,r) - else - return f_array(r) - end - end -end - -local tostring_x = function(t) return concat(t, " ") end -local tostring_s = function(t) return toeight(t[1]) end -local tostring_u = function(t) return tosixteen(t[1]) end -local tostring_n = function(t) return tostring(t[1]) end -- tostring not needed -local tostring_c = function(t) return t[1] end -- already prefixed (hashed) -local tostring_z = function() return "null" end -local tostring_t = function() return "true" end -local tostring_f = function() return "false" end -local tostring_r = function(t) local n = t[1] return n and n > 0 and (n .. " 0 R") or "NULL" end - -local tostring_v = function(t) - local s = t[1] - if type(s) == "table" then - return concat(s,"") - else - return s - end -end - -local function value_x(t) return t end -- the call is experimental -local function value_s(t,key) return t[1] end -- the call is experimental -local function value_u(t,key) return t[1] end -- the call is experimental -local function value_n(t,key) return t[1] end -- the call is experimental -local function value_c(t) return sub(t[1],2) end -- the call is experimental -local function value_d(t) return tostring_d(t,true) end -- the call is experimental -local function value_a(t) return tostring_a(t,true) end -- the call is experimental -local function value_z() return nil end -- the call is experimental -local function value_t(t) return t.value or true end -- the call is experimental -local function value_f(t) return t.value or false end -- the call is experimental -local function value_r() return t[1] or 0 end -- the call is experimental -- NULL -local function value_v() return t[1] end -- the call is experimental - -local function add_x(t,k,v) rawset(t,k,tostring(v)) end - -local mt_x = { __lpdftype = "stream", __tostring = tostring_x, __call = value_x, __newindex = add_x } -local mt_d = { __lpdftype = "dictionary", __tostring = tostring_d, __call = value_d } -local mt_a = { __lpdftype = "array", __tostring = tostring_a, __call = value_a } -local mt_u = { __lpdftype = "unicode", __tostring = tostring_u, __call = value_u } -local mt_s = { __lpdftype = "string", __tostring = tostring_s, __call = value_s } -local mt_n = { __lpdftype = "number", __tostring = tostring_n, __call = value_n } -local mt_c = { __lpdftype = "constant", __tostring = tostring_c, __call = value_c } -local mt_z = { __lpdftype = "null", __tostring = tostring_z, __call = value_z } -local mt_t = { __lpdftype = "true", __tostring = tostring_t, __call = value_t } -local mt_f = { __lpdftype = "false", __tostring = tostring_f, __call = value_f } -local mt_r = { __lpdftype = "reference", __tostring = tostring_r, __call = value_r } -local mt_v = { __lpdftype = "verbose", __tostring = tostring_v, __call = value_v } - -local function pdfstream(t) -- we need to add attributes - if t then - for i=1,#t do - t[i] = tostring(t[i]) - end - end - return setmetatable(t or { },mt_x) -end - -local function pdfdictionary(t) - return setmetatable(t or { },mt_d) -end - -local function pdfarray(t) - if type(t) == "string" then - return setmetatable({ t },mt_a) - else - return setmetatable(t or { },mt_a) - end -end - -local function pdfstring(str,default) - return setmetatable({ str or default or "" },mt_s) -end - -local function pdfunicode(str,default) - return setmetatable({ str or default or "" },mt_u) -end - -local cache = { } -- can be weak - -local function pdfnumber(n,default) -- 0-10 - n = n or default - local c = cache[n] - if not c then - c = setmetatable({ n },mt_n) - -- cache[n] = c -- too many numbers - end - return c -end - -for i=-1,9 do cache[i] = pdfnumber(i) end - -local cache = { } -- can be weak - -local forbidden, replacements = "\0\t\n\r\f ()[]{}/%%#\\", { } -- table faster than function - -for s in gmatch(forbidden,".") do - replacements[s] = format("#%02x",byte(s)) -end - -local escaped = Cs(Cc("/") * (S(forbidden)/replacements + P(1))^0) - -local function pdfconstant(str,default) - str = str or default or "" - local c = cache[str] - if not c then - -- c = setmetatable({ "/" .. str },mt_c) - c = setmetatable({ lpegmatch(escaped,str) },mt_c) - cache[str] = c - end - return c -end - -local p_null = { } setmetatable(p_null, mt_z) -local p_true = { } setmetatable(p_true, mt_t) -local p_false = { } setmetatable(p_false,mt_f) - -local function pdfnull() - return p_null -end - ---~ print(pdfboolean(false),pdfboolean(false,false),pdfboolean(false,true)) ---~ print(pdfboolean(true),pdfboolean(true,false),pdfboolean(true,true)) ---~ print(pdfboolean(nil,true),pdfboolean(nil,false)) - -local function pdfboolean(b,default) - if type(b) == "boolean" then - return b and p_true or p_false - else - return default and p_true or p_false - end -end - -local function pdfreference(r) - return setmetatable({ r or 0 },mt_r) -end - -local function pdfverbose(t) -- maybe check for type - return setmetatable({ t or "" },mt_v) -end - -lpdf.stream = pdfstream -- THIS WILL PROBABLY CHANGE -lpdf.dictionary = pdfdictionary -lpdf.array = pdfarray -lpdf.string = pdfstring -lpdf.unicode = pdfunicode -lpdf.number = pdfnumber -lpdf.constant = pdfconstant -lpdf.null = pdfnull -lpdf.boolean = pdfboolean -lpdf.reference = pdfreference -lpdf.verbose = pdfverbose - --- n = pdf.obj(n, str) --- n = pdf.obj(n, "file", filename) --- n = pdf.obj(n, "stream", streamtext, attrtext) --- n = pdf.obj(n, "streamfile", filename, attrtext) - --- we only use immediate objects - --- todo: tracing - -local names, cache = { }, { } - -function lpdf.reserveobject(name) - if name == "annot" then - -- catch misuse - return pdfreserveobject("annot") - else - local r = pdfreserveobject() - if name then - names[name] = r - if trace_objects then - report_objects("reserving number %a under name %a",r,name) - end - elseif trace_objects then - report_objects("reserving number %a",r) - end - return r - end -end - -function lpdf.reserveannotation() - return pdfreserveobject("annot") -end - --- lpdf.immediateobject = pdfimmediateobject --- lpdf.deferredobject = pdfdeferredobject --- lpdf.object = pdfdeferredobject --- lpdf.referenceobject = pdfreferenceobject - -lpdf.pagereference = pdf.pageref or tex.pdfpageref -lpdf.registerannotation = pdf.registerannot - -function lpdf.delayedobject(data) -- we will get rid of this one - local n = pdfdeferredobject(data) - pdfreferenceobject(n) - return n -end - -function lpdf.flushobject(name,data) - if data then - local named = names[name] - if named then - if not trace_objects then - elseif trace_detail then - report_objects("flushing data to reserved object with name %a, data: %S",name,data) - else - report_objects("flushing data to reserved object with name %a",name) - end - return pdfimmediateobject(named,tostring(data)) - else - if not trace_objects then - elseif trace_detail then - report_objects("flushing data to reserved object with number %s, data: %S",name,data) - else - report_objects("flushing data to reserved object with number %s",name) - end - return pdfimmediateobject(name,tostring(data)) - end - else - if trace_objects and trace_detail then - report_objects("flushing data: %S",name) - end - return pdfimmediateobject(tostring(name)) - end -end - - -function lpdf.flushstreamobject(data,dict,compressed) -- default compressed - if trace_objects then - report_objects("flushing stream object of %s bytes",#data) - end - local dtype = type(dict) - return pdfdeferredobject { - immediate = true, - compresslevel = compressed == false and 0 or nil, - type = "stream", - string = data, - attr = (dtype == "string" and dict) or (dtype == "table" and dict()) or nil, - } -end - -function lpdf.flushstreamfileobject(filename,dict,compressed) -- default compressed - if trace_objects then - report_objects("flushing stream file object %a",filename) - end - local dtype = type(dict) - return pdfdeferredobject { - immediate = true, - compresslevel = compressed == false and 0 or nil, - type = "stream", - file = filename, - attr = (dtype == "string" and dict) or (dtype == "table" and dict()) or nil, - } -end - -local shareobjectcache, shareobjectreferencecache = { }, { } - -function lpdf.shareobject(content) - if content == nil then - -- invalid object not created - else - content = tostring(content) - local o = shareobjectcache[content] - if not o then - o = pdfimmediateobject(content) - shareobjectcache[content] = o - end - return o - end -end - -function lpdf.shareobjectreference(content) - if content == nil then - -- invalid object not created - else - content = tostring(content) - local r = shareobjectreferencecache[content] - if not r then - local o = shareobjectcache[content] - if not o then - o = pdfimmediateobject(content) - shareobjectcache[content] = o - end - r = pdfreference(o) - shareobjectreferencecache[content] = r - end - return r - end -end - ---~ local d = lpdf.dictionary() ---~ local e = lpdf.dictionary { ["e"] = "abc", x = lpdf.dictionary { ["f"] = "ABC" } } ---~ local f = lpdf.dictionary { ["f"] = "ABC" } ---~ local a = lpdf.array { lpdf.array { lpdf.string("xxx") } } - ---~ print(a) ---~ os.exit() - ---~ d["test"] = lpdf.string ("test") ---~ d["more"] = "more" ---~ d["bool"] = true ---~ d["numb"] = 1234 ---~ d["oeps"] = lpdf.dictionary { ["hans"] = "ton" } ---~ d["whow"] = lpdf.array { lpdf.string("ton") } - ---~ a[#a+1] = lpdf.string("xxx") ---~ a[#a+1] = lpdf.string("yyy") - ---~ d.what = a - ---~ print(e) - ---~ local d = lpdf.dictionary() ---~ d["abcd"] = { 1, 2, 3, "test" } ---~ print(d) ---~ print(d()) - ---~ local d = lpdf.array() ---~ d[#d+1] = 1 ---~ d[#d+1] = 2 ---~ d[#d+1] = 3 ---~ d[#d+1] = "test" ---~ print(d) - ---~ local d = lpdf.array() ---~ d[#d+1] = { 1, 2, 3, "test" } ---~ print(d) - ---~ local d = lpdf.array() ---~ d[#d+1] = { a=1, b=2, c=3, d="test" } ---~ print(d) - ---~ local s = lpdf.constant("xx") ---~ print(s) -- fails somehow ---~ print(s()) -- fails somehow - ---~ local s = lpdf.boolean(false) ---~ s.value = true ---~ print(s) ---~ print(s()) - --- three priority levels, default=2 - -local pagefinalizers, documentfinalizers = { { }, { }, { } }, { { }, { }, { } } - -local pageresources, pageattributes, pagesattributes - -local function resetpageproperties() - pageresources = pdfdictionary() - pageattributes = pdfdictionary() - pagesattributes = pdfdictionary() -end - -resetpageproperties() - -local function setpageproperties() - pdf.pageresources = pageresources () - pdf.pageattributes = pageattributes () - pdf.pagesattributes = pagesattributes() -end - -local function addtopageresources (k,v) pageresources [k] = v end -local function addtopageattributes (k,v) pageattributes [k] = v end -local function addtopagesattributes(k,v) pagesattributes[k] = v end - -lpdf.addtopageresources = addtopageresources -lpdf.addtopageattributes = addtopageattributes -lpdf.addtopagesattributes = addtopagesattributes - -local function set(where,what,f,when,comment) - if type(when) == "string" then - when, comment = 2, when - elseif not when then - when = 2 - end - local w = where[when] - w[#w+1] = { f, comment } - if trace_finalizers then - report_finalizing("%s set: [%s,%s]",what,when,#w) - end -end - -local function run(where,what) - if trace_finalizers then - report_finalizing("start backend, category %a, n %a",what,#where) - end - for i=1,#where do - local w = where[i] - for j=1,#w do - local wj = w[j] - if trace_finalizers then - report_finalizing("%s finalizer: [%s,%s] %s",what,i,j,wj[2] or "") - end - wj[1]() - end - end - if trace_finalizers then - report_finalizing("stop finalizing") - end -end - -local function registerpagefinalizer(f,when,comment) - set(pagefinalizers,"page",f,when,comment) -end - -local function registerdocumentfinalizer(f,when,comment) - set(documentfinalizers,"document",f,when,comment) -end - -lpdf.registerpagefinalizer = registerpagefinalizer -lpdf.registerdocumentfinalizer = registerdocumentfinalizer - -function lpdf.finalizepage() - if not environment.initex then - -- resetpageproperties() -- maybe better before - run(pagefinalizers,"page") - setpageproperties() - resetpageproperties() -- maybe better before - end -end - -function lpdf.finalizedocument() - if not environment.initex then - run(documentfinalizers,"document") - function lpdf.finalizedocument() - report_finalizing("serious error: the document is finalized multiple times") - function lpdf.finalizedocument() end - end - end -end - -backends.pdf.codeinjections.finalizepage = lpdf.finalizepage -- will go when we have hook - ---~ callbacks.register("finish_pdfpage", lpdf.finalizepage) -callbacks.register("finish_pdffile", lpdf.finalizedocument) - --- some minimal tracing, handy for checking the order - -local function trace_set(what,key) - if trace_resources then - report_finalizing("setting key %a in %a",key,what) - end -end -local function trace_flush(what) - if trace_resources then - report_finalizing("flushing %a",what) - end -end - -lpdf.protectresources = true - -local catalog = pdfdictionary { Type = pdfconstant("Catalog") } -- nicer, but when we assign we nil the Type -local info = pdfdictionary { Type = pdfconstant("Info") } -- nicer, but when we assign we nil the Type -local names = pdfdictionary { Type = pdfconstant("Names") } -- nicer, but when we assign we nil the Type - -local function flushcatalog() if not environment.initex then trace_flush("catalog") catalog.Type = nil pdf.catalog = catalog() end end -local function flushinfo () if not environment.initex then trace_flush("info") info .Type = nil pdf.info = info () end end -local function flushnames () if not environment.initex then trace_flush("names") names .Type = nil pdf.names = names () end end - -function lpdf.addtocatalog(k,v) if not (lpdf.protectresources and catalog[k]) then trace_set("catalog",k) catalog[k] = v end end -function lpdf.addtoinfo (k,v) if not (lpdf.protectresources and info [k]) then trace_set("info", k) info [k] = v end end -function lpdf.addtonames (k,v) if not (lpdf.protectresources and names [k]) then trace_set("names", k) names [k] = v end end - -local dummy = pdfreserveobject() -- else bug in hvmd due so some internal luatex conflict - --- Some day I will implement a proper minimalized resource management. - -local r_extgstates, d_extgstates = pdfreserveobject(), pdfdictionary() local p_extgstates = pdfreference(r_extgstates) -local r_colorspaces, d_colorspaces = pdfreserveobject(), pdfdictionary() local p_colorspaces = pdfreference(r_colorspaces) -local r_patterns, d_patterns = pdfreserveobject(), pdfdictionary() local p_patterns = pdfreference(r_patterns) -local r_shades, d_shades = pdfreserveobject(), pdfdictionary() local p_shades = pdfreference(r_shades) - -local function checkextgstates () if next(d_extgstates ) then addtopageresources("ExtGState", p_extgstates ) end end -local function checkcolorspaces() if next(d_colorspaces) then addtopageresources("ColorSpace",p_colorspaces) end end -local function checkpatterns () if next(d_patterns ) then addtopageresources("Pattern", p_patterns ) end end -local function checkshades () if next(d_shades ) then addtopageresources("Shading", p_shades ) end end - -local function flushextgstates () if next(d_extgstates ) then trace_flush("extgstates") pdfimmediateobject(r_extgstates, tostring(d_extgstates )) end end -local function flushcolorspaces() if next(d_colorspaces) then trace_flush("colorspaces") pdfimmediateobject(r_colorspaces,tostring(d_colorspaces)) end end -local function flushpatterns () if next(d_patterns ) then trace_flush("patterns") pdfimmediateobject(r_patterns, tostring(d_patterns )) end end -local function flushshades () if next(d_shades ) then trace_flush("shades") pdfimmediateobject(r_shades, tostring(d_shades )) end end - -function lpdf.collectedresources() - local ExtGState = next(d_extgstates ) and p_extgstates - local ColorSpace = next(d_colorspaces) and p_colorspaces - local Pattern = next(d_patterns ) and p_patterns - local Shading = next(d_shades ) and p_shades - if ExtGState or ColorSpace or Pattern or Shading then - local collected = pdfdictionary { - ExtGState = ExtGState, - ColorSpace = ColorSpace, - Pattern = Pattern, - Shading = Shading, - -- ProcSet = pdfarray { pdfconstant("PDF") }, - } - return collected() - else - return "" - end -end - -function lpdf.adddocumentextgstate (k,v) d_extgstates [k] = v end -function lpdf.adddocumentcolorspace(k,v) d_colorspaces[k] = v end -function lpdf.adddocumentpattern (k,v) d_patterns [k] = v end -function lpdf.adddocumentshade (k,v) d_shades [k] = v end - -registerdocumentfinalizer(flushextgstates,3,"extended graphic states") -registerdocumentfinalizer(flushcolorspaces,3,"color spaces") -registerdocumentfinalizer(flushpatterns,3,"patterns") -registerdocumentfinalizer(flushshades,3,"shades") - -registerdocumentfinalizer(flushcatalog,3,"catalog") -registerdocumentfinalizer(flushinfo,3,"info") -registerdocumentfinalizer(flushnames,3,"names") -- before catalog - -registerpagefinalizer(checkextgstates,3,"extended graphic states") -registerpagefinalizer(checkcolorspaces,3,"color spaces") -registerpagefinalizer(checkpatterns,3,"patterns") -registerpagefinalizer(checkshades,3,"shades") - --- in strc-bkm: lpdf.registerdocumentfinalizer(function() structures.bookmarks.place() end,1) - -function lpdf.rotationcm(a) - local s, c = sind(a), cosd(a) - return format("%0.6f %0.6f %0.6f %0.6f 0 0 cm",c,s,-s,c) -end - --- ! -> universaltime - -local timestamp = os.date("%Y-%m-%dT%X") .. os.timezone(true) - -function lpdf.timestamp() - return timestamp -end - -function lpdf.pdftimestamp(str) - local Y, M, D, h, m, s, Zs, Zh, Zm = match(str,"^(%d%d%d%d)%-(%d%d)%-(%d%d)T(%d%d):(%d%d):(%d%d)([%+%-])(%d%d):(%d%d)$") - return Y and format("D:%s%s%s%s%s%s%s%s'%s'",Y,M,D,h,m,s,Zs,Zh,Zm) -end - -function lpdf.id() - return format("%s.%s",tex.jobname,timestamp) -end - -function lpdf.checkedkey(t,key,variant) - local pn = t and t[key] - if pn then - local tn = type(pn) - if tn == variant then - if variant == "string" then - return pn ~= "" and pn or nil - elseif variant == "table" then - return next(pn) and pn or nil - else - return pn - end - elseif tn == "string" and variant == "number" then - return tonumber(pn) - end - end -end - -function lpdf.checkedvalue(value,variant) -- code not shared - if value then - local tv = type(value) - if tv == variant then - if variant == "string" then - return value ~= "" and value - elseif variant == "table" then - return next(value) and value - else - return value - end - elseif tv == "string" and variant == "number" then - return tonumber(value) - end - end -end - -function lpdf.limited(n,min,max,default) - if not n then - return default - else - n = tonumber(n) - if not n then - return default - elseif n > max then - return max - elseif n < min then - return min - else - return n - end - end -end - --- lpdf.addtoinfo("ConTeXt.Version", tex.contextversiontoks) --- lpdf.addtoinfo("ConTeXt.Time", os.date("%Y.%m.%d %H:%M")) -- :%S --- lpdf.addtoinfo("ConTeXt.Jobname", environment.jobname) --- lpdf.addtoinfo("ConTeXt.Url", "www.pragma-ade.com") - -if not pdfreferenceobject then - - local delayed = { } - - local function flush() - local n = 0 - for k,v in next, delayed do - pdfimmediateobject(k,v) - n = n + 1 - end - if trace_objects then - report_objects("%s objects flushed",n) - end - delayed = { } - end - - lpdf.registerdocumentfinalizer(flush,3,"objects") -- so we need a final flush too - lpdf.registerpagefinalizer (flush,3,"objects") -- somehow this lags behind .. I need to look into that some day - - function lpdf.delayedobject(data) - local n = pdfreserveobject() - delayed[n] = data - return n - end - -end +if not modules then modules = { } end modules ['lpdf-ini'] = { + version = 1.001, + comment = "companion to lpdf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local setmetatable, getmetatable, type, next, tostring, tonumber, rawset = setmetatable, getmetatable, type, next, tostring, tonumber, rawset +local char, byte, format, gsub, concat, match, sub, gmatch = string.char, string.byte, string.format, string.gsub, table.concat, string.match, string.sub, string.gmatch +local utfchar, utfvalues = utf.char, utf.values +local sind, cosd = math.sind, math.cosd +local lpegmatch, P, C, R, S, Cc, Cs = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc, lpeg.Cs +local formatters = string.formatters + +local pdfreserveobject = pdf.reserveobj +local pdfimmediateobject = pdf.immediateobj +local pdfdeferredobject = pdf.obj +local pdfreferenceobject = pdf.refobj + +local trace_finalizers = false trackers.register("backend.finalizers", function(v) trace_finalizers = v end) +local trace_resources = false trackers.register("backend.resources", function(v) trace_resources = v end) +local trace_objects = false trackers.register("backend.objects", function(v) trace_objects = v end) +local trace_detail = false trackers.register("backend.detail", function(v) trace_detail = v end) + +local report_objects = logs.reporter("backend","objects") +local report_finalizing = logs.reporter("backend","finalizing") + +local backends = backends + +backends.pdf = backends.pdf or { + comment = "backend for directly generating pdf output", + nodeinjections = { }, + codeinjections = { }, + registrations = { }, + tables = { }, +} + +lpdf = lpdf or { } +local lpdf = lpdf + +local function tosixteen(str) -- an lpeg might be faster (no table) + if not str or str == "" then + return "" -- not () as we want an indication that it's unicode + else + local r, n = { ""] = "\\>", +-- ["["] = "\\[", ["]"] = "\\]", +-- ["("] = "\\(", [")"] = "\\)", +-- } +-- +-- local escaped = Cs(Cc("(") * (S("\\/#<>[]()")/escapes + P(1))^0 * Cc(")")) +-- +-- local function toeight(str) +-- if not str or str == "" then +-- return "()" +-- else +-- return lpegmatch(escaped,str) +-- end +-- end +-- +-- -- no need for escaping .. just use unicode instead + +-- \0 \t \n \r \f ( ) [ ] { } / % + +local function toeight(str) + return "(" .. str .. ")" +end + +lpdf.toeight = toeight + +--~ local escaped = lpeg.Cs((lpeg.S("\0\t\n\r\f ()[]{}/%")/function(s) return format("#%02X",byte(s)) end + lpeg.P(1))^0) + +--~ local function cleaned(str) +--~ return (str and str ~= "" and lpegmatch(escaped,str)) or "" +--~ end + +--~ lpdf.cleaned = cleaned -- not public yet + +local function merge_t(a,b) + local t = { } + for k,v in next, a do t[k] = v end + for k,v in next, b do t[k] = v end + return setmetatable(t,getmetatable(a)) +end + +local f_key_value = formatters["/%s %s"] +local f_key_dictionary = formatters["/%s << % t >>"] +local f_dictionary = formatters["<< % t >>"] +local f_key_array = formatters["/%s [ % t ]"] +local f_array = formatters["[ % t ]"] + +local tostring_a, tostring_d + +tostring_d = function(t,contentonly,key) + if not next(t) then + if contentonly then + return "" + else + return "<< >>" + end + else + local r, rn = { }, 0 + for k, v in next, t do + rn = rn + 1 + local tv = type(v) + if tv == "string" then + r[rn] = f_key_value(k,toeight(v)) + elseif tv == "unicode" then + r[rn] = f_key_value(k,tosixteen(v)) + elseif tv == "table" then + local mv = getmetatable(v) + if mv and mv.__lpdftype then + r[rn] = f_key_value(k,tostring(v)) + elseif v[1] then + r[rn] = f_key_value(k,tostring_a(v)) + else + r[rn] = f_key_value(k,tostring_d(v)) + end + else + r[rn] = f_key_value(k,tostring(v)) + end + end + if contentonly then + return concat(r," ") + elseif key then + return f_key_dictionary(key,r) + else + return f_dictionary(r) + end + end +end + +tostring_a = function(t,contentonly,key) + local tn = #t + if tn == 0 then + if contentonly then + return "" + else + return "[ ]" + end + else + local r = { } + for k=1,tn do + local v = t[k] + local tv = type(v) + if tv == "string" then + r[k] = toeight(v) + elseif tv == "unicode" then + r[k] = tosixteen(v) + elseif tv == "table" then + local mv = getmetatable(v) + local mt = mv and mv.__lpdftype + if mt then + r[k] = tostring(v) + elseif v[1] then + r[k] = tostring_a(v) + else + r[k] = tostring_d(v) + end + else + r[k] = tostring(v) + end + end + if contentonly then + return concat(r, " ") + elseif key then + return f_key_array(key,r) + else + return f_array(r) + end + end +end + +local tostring_x = function(t) return concat(t, " ") end +local tostring_s = function(t) return toeight(t[1]) end +local tostring_u = function(t) return tosixteen(t[1]) end +local tostring_n = function(t) return tostring(t[1]) end -- tostring not needed +local tostring_c = function(t) return t[1] end -- already prefixed (hashed) +local tostring_z = function() return "null" end +local tostring_t = function() return "true" end +local tostring_f = function() return "false" end +local tostring_r = function(t) local n = t[1] return n and n > 0 and (n .. " 0 R") or "NULL" end + +local tostring_v = function(t) + local s = t[1] + if type(s) == "table" then + return concat(s,"") + else + return s + end +end + +local function value_x(t) return t end -- the call is experimental +local function value_s(t,key) return t[1] end -- the call is experimental +local function value_u(t,key) return t[1] end -- the call is experimental +local function value_n(t,key) return t[1] end -- the call is experimental +local function value_c(t) return sub(t[1],2) end -- the call is experimental +local function value_d(t) return tostring_d(t,true) end -- the call is experimental +local function value_a(t) return tostring_a(t,true) end -- the call is experimental +local function value_z() return nil end -- the call is experimental +local function value_t(t) return t.value or true end -- the call is experimental +local function value_f(t) return t.value or false end -- the call is experimental +local function value_r() return t[1] or 0 end -- the call is experimental -- NULL +local function value_v() return t[1] end -- the call is experimental + +local function add_x(t,k,v) rawset(t,k,tostring(v)) end + +local mt_x = { __lpdftype = "stream", __tostring = tostring_x, __call = value_x, __newindex = add_x } +local mt_d = { __lpdftype = "dictionary", __tostring = tostring_d, __call = value_d } +local mt_a = { __lpdftype = "array", __tostring = tostring_a, __call = value_a } +local mt_u = { __lpdftype = "unicode", __tostring = tostring_u, __call = value_u } +local mt_s = { __lpdftype = "string", __tostring = tostring_s, __call = value_s } +local mt_n = { __lpdftype = "number", __tostring = tostring_n, __call = value_n } +local mt_c = { __lpdftype = "constant", __tostring = tostring_c, __call = value_c } +local mt_z = { __lpdftype = "null", __tostring = tostring_z, __call = value_z } +local mt_t = { __lpdftype = "true", __tostring = tostring_t, __call = value_t } +local mt_f = { __lpdftype = "false", __tostring = tostring_f, __call = value_f } +local mt_r = { __lpdftype = "reference", __tostring = tostring_r, __call = value_r } +local mt_v = { __lpdftype = "verbose", __tostring = tostring_v, __call = value_v } + +local function pdfstream(t) -- we need to add attributes + if t then + for i=1,#t do + t[i] = tostring(t[i]) + end + end + return setmetatable(t or { },mt_x) +end + +local function pdfdictionary(t) + return setmetatable(t or { },mt_d) +end + +local function pdfarray(t) + if type(t) == "string" then + return setmetatable({ t },mt_a) + else + return setmetatable(t or { },mt_a) + end +end + +local function pdfstring(str,default) + return setmetatable({ str or default or "" },mt_s) +end + +local function pdfunicode(str,default) + return setmetatable({ str or default or "" },mt_u) +end + +local cache = { } -- can be weak + +local function pdfnumber(n,default) -- 0-10 + n = n or default + local c = cache[n] + if not c then + c = setmetatable({ n },mt_n) + -- cache[n] = c -- too many numbers + end + return c +end + +for i=-1,9 do cache[i] = pdfnumber(i) end + +local cache = { } -- can be weak + +local forbidden, replacements = "\0\t\n\r\f ()[]{}/%%#\\", { } -- table faster than function + +for s in gmatch(forbidden,".") do + replacements[s] = format("#%02x",byte(s)) +end + +local escaped = Cs(Cc("/") * (S(forbidden)/replacements + P(1))^0) + +local function pdfconstant(str,default) + str = str or default or "" + local c = cache[str] + if not c then + -- c = setmetatable({ "/" .. str },mt_c) + c = setmetatable({ lpegmatch(escaped,str) },mt_c) + cache[str] = c + end + return c +end + +local p_null = { } setmetatable(p_null, mt_z) +local p_true = { } setmetatable(p_true, mt_t) +local p_false = { } setmetatable(p_false,mt_f) + +local function pdfnull() + return p_null +end + +--~ print(pdfboolean(false),pdfboolean(false,false),pdfboolean(false,true)) +--~ print(pdfboolean(true),pdfboolean(true,false),pdfboolean(true,true)) +--~ print(pdfboolean(nil,true),pdfboolean(nil,false)) + +local function pdfboolean(b,default) + if type(b) == "boolean" then + return b and p_true or p_false + else + return default and p_true or p_false + end +end + +local function pdfreference(r) + return setmetatable({ r or 0 },mt_r) +end + +local function pdfverbose(t) -- maybe check for type + return setmetatable({ t or "" },mt_v) +end + +lpdf.stream = pdfstream -- THIS WILL PROBABLY CHANGE +lpdf.dictionary = pdfdictionary +lpdf.array = pdfarray +lpdf.string = pdfstring +lpdf.unicode = pdfunicode +lpdf.number = pdfnumber +lpdf.constant = pdfconstant +lpdf.null = pdfnull +lpdf.boolean = pdfboolean +lpdf.reference = pdfreference +lpdf.verbose = pdfverbose + +-- n = pdf.obj(n, str) +-- n = pdf.obj(n, "file", filename) +-- n = pdf.obj(n, "stream", streamtext, attrtext) +-- n = pdf.obj(n, "streamfile", filename, attrtext) + +-- we only use immediate objects + +-- todo: tracing + +local names, cache = { }, { } + +function lpdf.reserveobject(name) + if name == "annot" then + -- catch misuse + return pdfreserveobject("annot") + else + local r = pdfreserveobject() + if name then + names[name] = r + if trace_objects then + report_objects("reserving number %a under name %a",r,name) + end + elseif trace_objects then + report_objects("reserving number %a",r) + end + return r + end +end + +function lpdf.reserveannotation() + return pdfreserveobject("annot") +end + +-- lpdf.immediateobject = pdfimmediateobject +-- lpdf.deferredobject = pdfdeferredobject +-- lpdf.object = pdfdeferredobject +-- lpdf.referenceobject = pdfreferenceobject + +lpdf.pagereference = pdf.pageref or tex.pdfpageref +lpdf.registerannotation = pdf.registerannot + +function lpdf.delayedobject(data) -- we will get rid of this one + local n = pdfdeferredobject(data) + pdfreferenceobject(n) + return n +end + +function lpdf.flushobject(name,data) + if data then + local named = names[name] + if named then + if not trace_objects then + elseif trace_detail then + report_objects("flushing data to reserved object with name %a, data: %S",name,data) + else + report_objects("flushing data to reserved object with name %a",name) + end + return pdfimmediateobject(named,tostring(data)) + else + if not trace_objects then + elseif trace_detail then + report_objects("flushing data to reserved object with number %s, data: %S",name,data) + else + report_objects("flushing data to reserved object with number %s",name) + end + return pdfimmediateobject(name,tostring(data)) + end + else + if trace_objects and trace_detail then + report_objects("flushing data: %S",name) + end + return pdfimmediateobject(tostring(name)) + end +end + + +function lpdf.flushstreamobject(data,dict,compressed) -- default compressed + if trace_objects then + report_objects("flushing stream object of %s bytes",#data) + end + local dtype = type(dict) + return pdfdeferredobject { + immediate = true, + compresslevel = compressed == false and 0 or nil, + type = "stream", + string = data, + attr = (dtype == "string" and dict) or (dtype == "table" and dict()) or nil, + } +end + +function lpdf.flushstreamfileobject(filename,dict,compressed) -- default compressed + if trace_objects then + report_objects("flushing stream file object %a",filename) + end + local dtype = type(dict) + return pdfdeferredobject { + immediate = true, + compresslevel = compressed == false and 0 or nil, + type = "stream", + file = filename, + attr = (dtype == "string" and dict) or (dtype == "table" and dict()) or nil, + } +end + +local shareobjectcache, shareobjectreferencecache = { }, { } + +function lpdf.shareobject(content) + if content == nil then + -- invalid object not created + else + content = tostring(content) + local o = shareobjectcache[content] + if not o then + o = pdfimmediateobject(content) + shareobjectcache[content] = o + end + return o + end +end + +function lpdf.shareobjectreference(content) + if content == nil then + -- invalid object not created + else + content = tostring(content) + local r = shareobjectreferencecache[content] + if not r then + local o = shareobjectcache[content] + if not o then + o = pdfimmediateobject(content) + shareobjectcache[content] = o + end + r = pdfreference(o) + shareobjectreferencecache[content] = r + end + return r + end +end + +--~ local d = lpdf.dictionary() +--~ local e = lpdf.dictionary { ["e"] = "abc", x = lpdf.dictionary { ["f"] = "ABC" } } +--~ local f = lpdf.dictionary { ["f"] = "ABC" } +--~ local a = lpdf.array { lpdf.array { lpdf.string("xxx") } } + +--~ print(a) +--~ os.exit() + +--~ d["test"] = lpdf.string ("test") +--~ d["more"] = "more" +--~ d["bool"] = true +--~ d["numb"] = 1234 +--~ d["oeps"] = lpdf.dictionary { ["hans"] = "ton" } +--~ d["whow"] = lpdf.array { lpdf.string("ton") } + +--~ a[#a+1] = lpdf.string("xxx") +--~ a[#a+1] = lpdf.string("yyy") + +--~ d.what = a + +--~ print(e) + +--~ local d = lpdf.dictionary() +--~ d["abcd"] = { 1, 2, 3, "test" } +--~ print(d) +--~ print(d()) + +--~ local d = lpdf.array() +--~ d[#d+1] = 1 +--~ d[#d+1] = 2 +--~ d[#d+1] = 3 +--~ d[#d+1] = "test" +--~ print(d) + +--~ local d = lpdf.array() +--~ d[#d+1] = { 1, 2, 3, "test" } +--~ print(d) + +--~ local d = lpdf.array() +--~ d[#d+1] = { a=1, b=2, c=3, d="test" } +--~ print(d) + +--~ local s = lpdf.constant("xx") +--~ print(s) -- fails somehow +--~ print(s()) -- fails somehow + +--~ local s = lpdf.boolean(false) +--~ s.value = true +--~ print(s) +--~ print(s()) + +-- three priority levels, default=2 + +local pagefinalizers, documentfinalizers = { { }, { }, { } }, { { }, { }, { } } + +local pageresources, pageattributes, pagesattributes + +local function resetpageproperties() + pageresources = pdfdictionary() + pageattributes = pdfdictionary() + pagesattributes = pdfdictionary() +end + +resetpageproperties() + +local function setpageproperties() + pdf.pageresources = pageresources () + pdf.pageattributes = pageattributes () + pdf.pagesattributes = pagesattributes() +end + +local function addtopageresources (k,v) pageresources [k] = v end +local function addtopageattributes (k,v) pageattributes [k] = v end +local function addtopagesattributes(k,v) pagesattributes[k] = v end + +lpdf.addtopageresources = addtopageresources +lpdf.addtopageattributes = addtopageattributes +lpdf.addtopagesattributes = addtopagesattributes + +local function set(where,what,f,when,comment) + if type(when) == "string" then + when, comment = 2, when + elseif not when then + when = 2 + end + local w = where[when] + w[#w+1] = { f, comment } + if trace_finalizers then + report_finalizing("%s set: [%s,%s]",what,when,#w) + end +end + +local function run(where,what) + if trace_finalizers then + report_finalizing("start backend, category %a, n %a",what,#where) + end + for i=1,#where do + local w = where[i] + for j=1,#w do + local wj = w[j] + if trace_finalizers then + report_finalizing("%s finalizer: [%s,%s] %s",what,i,j,wj[2] or "") + end + wj[1]() + end + end + if trace_finalizers then + report_finalizing("stop finalizing") + end +end + +local function registerpagefinalizer(f,when,comment) + set(pagefinalizers,"page",f,when,comment) +end + +local function registerdocumentfinalizer(f,when,comment) + set(documentfinalizers,"document",f,when,comment) +end + +lpdf.registerpagefinalizer = registerpagefinalizer +lpdf.registerdocumentfinalizer = registerdocumentfinalizer + +function lpdf.finalizepage() + if not environment.initex then + -- resetpageproperties() -- maybe better before + run(pagefinalizers,"page") + setpageproperties() + resetpageproperties() -- maybe better before + end +end + +function lpdf.finalizedocument() + if not environment.initex then + run(documentfinalizers,"document") + function lpdf.finalizedocument() + report_finalizing("serious error: the document is finalized multiple times") + function lpdf.finalizedocument() end + end + end +end + +backends.pdf.codeinjections.finalizepage = lpdf.finalizepage -- will go when we have hook + +--~ callbacks.register("finish_pdfpage", lpdf.finalizepage) +callbacks.register("finish_pdffile", lpdf.finalizedocument) + +-- some minimal tracing, handy for checking the order + +local function trace_set(what,key) + if trace_resources then + report_finalizing("setting key %a in %a",key,what) + end +end +local function trace_flush(what) + if trace_resources then + report_finalizing("flushing %a",what) + end +end + +lpdf.protectresources = true + +local catalog = pdfdictionary { Type = pdfconstant("Catalog") } -- nicer, but when we assign we nil the Type +local info = pdfdictionary { Type = pdfconstant("Info") } -- nicer, but when we assign we nil the Type +local names = pdfdictionary { Type = pdfconstant("Names") } -- nicer, but when we assign we nil the Type + +local function flushcatalog() if not environment.initex then trace_flush("catalog") catalog.Type = nil pdf.catalog = catalog() end end +local function flushinfo () if not environment.initex then trace_flush("info") info .Type = nil pdf.info = info () end end +local function flushnames () if not environment.initex then trace_flush("names") names .Type = nil pdf.names = names () end end + +function lpdf.addtocatalog(k,v) if not (lpdf.protectresources and catalog[k]) then trace_set("catalog",k) catalog[k] = v end end +function lpdf.addtoinfo (k,v) if not (lpdf.protectresources and info [k]) then trace_set("info", k) info [k] = v end end +function lpdf.addtonames (k,v) if not (lpdf.protectresources and names [k]) then trace_set("names", k) names [k] = v end end + +local dummy = pdfreserveobject() -- else bug in hvmd due so some internal luatex conflict + +-- Some day I will implement a proper minimalized resource management. + +local r_extgstates, d_extgstates = pdfreserveobject(), pdfdictionary() local p_extgstates = pdfreference(r_extgstates) +local r_colorspaces, d_colorspaces = pdfreserveobject(), pdfdictionary() local p_colorspaces = pdfreference(r_colorspaces) +local r_patterns, d_patterns = pdfreserveobject(), pdfdictionary() local p_patterns = pdfreference(r_patterns) +local r_shades, d_shades = pdfreserveobject(), pdfdictionary() local p_shades = pdfreference(r_shades) + +local function checkextgstates () if next(d_extgstates ) then addtopageresources("ExtGState", p_extgstates ) end end +local function checkcolorspaces() if next(d_colorspaces) then addtopageresources("ColorSpace",p_colorspaces) end end +local function checkpatterns () if next(d_patterns ) then addtopageresources("Pattern", p_patterns ) end end +local function checkshades () if next(d_shades ) then addtopageresources("Shading", p_shades ) end end + +local function flushextgstates () if next(d_extgstates ) then trace_flush("extgstates") pdfimmediateobject(r_extgstates, tostring(d_extgstates )) end end +local function flushcolorspaces() if next(d_colorspaces) then trace_flush("colorspaces") pdfimmediateobject(r_colorspaces,tostring(d_colorspaces)) end end +local function flushpatterns () if next(d_patterns ) then trace_flush("patterns") pdfimmediateobject(r_patterns, tostring(d_patterns )) end end +local function flushshades () if next(d_shades ) then trace_flush("shades") pdfimmediateobject(r_shades, tostring(d_shades )) end end + +function lpdf.collectedresources() + local ExtGState = next(d_extgstates ) and p_extgstates + local ColorSpace = next(d_colorspaces) and p_colorspaces + local Pattern = next(d_patterns ) and p_patterns + local Shading = next(d_shades ) and p_shades + if ExtGState or ColorSpace or Pattern or Shading then + local collected = pdfdictionary { + ExtGState = ExtGState, + ColorSpace = ColorSpace, + Pattern = Pattern, + Shading = Shading, + -- ProcSet = pdfarray { pdfconstant("PDF") }, + } + return collected() + else + return "" + end +end + +function lpdf.adddocumentextgstate (k,v) d_extgstates [k] = v end +function lpdf.adddocumentcolorspace(k,v) d_colorspaces[k] = v end +function lpdf.adddocumentpattern (k,v) d_patterns [k] = v end +function lpdf.adddocumentshade (k,v) d_shades [k] = v end + +registerdocumentfinalizer(flushextgstates,3,"extended graphic states") +registerdocumentfinalizer(flushcolorspaces,3,"color spaces") +registerdocumentfinalizer(flushpatterns,3,"patterns") +registerdocumentfinalizer(flushshades,3,"shades") + +registerdocumentfinalizer(flushcatalog,3,"catalog") +registerdocumentfinalizer(flushinfo,3,"info") +registerdocumentfinalizer(flushnames,3,"names") -- before catalog + +registerpagefinalizer(checkextgstates,3,"extended graphic states") +registerpagefinalizer(checkcolorspaces,3,"color spaces") +registerpagefinalizer(checkpatterns,3,"patterns") +registerpagefinalizer(checkshades,3,"shades") + +-- in strc-bkm: lpdf.registerdocumentfinalizer(function() structures.bookmarks.place() end,1) + +function lpdf.rotationcm(a) + local s, c = sind(a), cosd(a) + return format("%0.6f %0.6f %0.6f %0.6f 0 0 cm",c,s,-s,c) +end + +-- ! -> universaltime + +local timestamp = os.date("%Y-%m-%dT%X") .. os.timezone(true) + +function lpdf.timestamp() + return timestamp +end + +function lpdf.pdftimestamp(str) + local Y, M, D, h, m, s, Zs, Zh, Zm = match(str,"^(%d%d%d%d)%-(%d%d)%-(%d%d)T(%d%d):(%d%d):(%d%d)([%+%-])(%d%d):(%d%d)$") + return Y and format("D:%s%s%s%s%s%s%s%s'%s'",Y,M,D,h,m,s,Zs,Zh,Zm) +end + +function lpdf.id() + return format("%s.%s",tex.jobname,timestamp) +end + +function lpdf.checkedkey(t,key,variant) + local pn = t and t[key] + if pn then + local tn = type(pn) + if tn == variant then + if variant == "string" then + return pn ~= "" and pn or nil + elseif variant == "table" then + return next(pn) and pn or nil + else + return pn + end + elseif tn == "string" and variant == "number" then + return tonumber(pn) + end + end +end + +function lpdf.checkedvalue(value,variant) -- code not shared + if value then + local tv = type(value) + if tv == variant then + if variant == "string" then + return value ~= "" and value + elseif variant == "table" then + return next(value) and value + else + return value + end + elseif tv == "string" and variant == "number" then + return tonumber(value) + end + end +end + +function lpdf.limited(n,min,max,default) + if not n then + return default + else + n = tonumber(n) + if not n then + return default + elseif n > max then + return max + elseif n < min then + return min + else + return n + end + end +end + +-- lpdf.addtoinfo("ConTeXt.Version", tex.contextversiontoks) +-- lpdf.addtoinfo("ConTeXt.Time", os.date("%Y.%m.%d %H:%M")) -- :%S +-- lpdf.addtoinfo("ConTeXt.Jobname", environment.jobname) +-- lpdf.addtoinfo("ConTeXt.Url", "www.pragma-ade.com") + +if not pdfreferenceobject then + + local delayed = { } + + local function flush() + local n = 0 + for k,v in next, delayed do + pdfimmediateobject(k,v) + n = n + 1 + end + if trace_objects then + report_objects("%s objects flushed",n) + end + delayed = { } + end + + lpdf.registerdocumentfinalizer(flush,3,"objects") -- so we need a final flush too + lpdf.registerpagefinalizer (flush,3,"objects") -- somehow this lags behind .. I need to look into that some day + + function lpdf.delayedobject(data) + local n = pdfreserveobject() + delayed[n] = data + return n + end + +end diff --git a/tex/context/base/lpdf-mov.lua b/tex/context/base/lpdf-mov.lua index 41db97e0c..2f0033d1a 100644 --- a/tex/context/base/lpdf-mov.lua +++ b/tex/context/base/lpdf-mov.lua @@ -1,63 +1,63 @@ -if not modules then modules = { } end modules ['lpdf-mov'] = { - version = 1.001, - comment = "companion to lpdf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format - -local lpdf = lpdf - -local nodeinjections = backends.pdf.nodeinjections -local pdfannotation_node = nodes.pool.pdfannotation -local pdfconstant = lpdf.constant -local pdfdictionary = lpdf.dictionary -local pdfarray = lpdf.array -local write_node = node.write - -function nodeinjections.insertmovie(specification) - -- managed in figure inclusion: width, height, factor, repeat, controls, preview, label, foundname - local width = specification.width - local height = specification.height - local factor = specification.factor or number.dimenfactors.bp - local moviedict = pdfdictionary { - F = specification.foundname, - Aspect = pdfarray { factor * width, factor * height }, - Poster = (specification.preview and true) or false, - } - local controldict = pdfdictionary { - ShowControls = (specification.controls and true) or false, - Mode = (specification["repeat"] and pdfconstant("Repeat")) or nil, - } - local action = pdfdictionary { - Subtype = pdfconstant("Movie"), - Border = pdfarray { 0, 0, 0 }, - T = format("movie %s",specification.label), - Movie = moviedict, - A = controldict, - } - write_node(pdfannotation_node(width,height,0,action())) -- test: context(...) -end - -function nodeinjections.insertsound(specification) - -- rmanaged in interaction: repeat, label, foundname - local soundclip = interactions.soundclips.soundclip(specification.label) - if soundclip then - local controldict = pdfdictionary { - Mode = (specification["repeat"] and pdfconstant("Repeat")) or nil - } - local sounddict = pdfdictionary { - F = soundclip.filename - } - local action = pdfdictionary { - Subtype = pdfconstant("Movie"), - Border = pdfarray { 0, 0, 0 }, - T = format("sound %s",specification.label), - Movie = sounddict, - A = controldict, - } - write_node(pdfannotation_node(0,0,0,action())) -- test: context(...) - end -end +if not modules then modules = { } end modules ['lpdf-mov'] = { + version = 1.001, + comment = "companion to lpdf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format + +local lpdf = lpdf + +local nodeinjections = backends.pdf.nodeinjections +local pdfannotation_node = nodes.pool.pdfannotation +local pdfconstant = lpdf.constant +local pdfdictionary = lpdf.dictionary +local pdfarray = lpdf.array +local write_node = node.write + +function nodeinjections.insertmovie(specification) + -- managed in figure inclusion: width, height, factor, repeat, controls, preview, label, foundname + local width = specification.width + local height = specification.height + local factor = specification.factor or number.dimenfactors.bp + local moviedict = pdfdictionary { + F = specification.foundname, + Aspect = pdfarray { factor * width, factor * height }, + Poster = (specification.preview and true) or false, + } + local controldict = pdfdictionary { + ShowControls = (specification.controls and true) or false, + Mode = (specification["repeat"] and pdfconstant("Repeat")) or nil, + } + local action = pdfdictionary { + Subtype = pdfconstant("Movie"), + Border = pdfarray { 0, 0, 0 }, + T = format("movie %s",specification.label), + Movie = moviedict, + A = controldict, + } + write_node(pdfannotation_node(width,height,0,action())) -- test: context(...) +end + +function nodeinjections.insertsound(specification) + -- rmanaged in interaction: repeat, label, foundname + local soundclip = interactions.soundclips.soundclip(specification.label) + if soundclip then + local controldict = pdfdictionary { + Mode = (specification["repeat"] and pdfconstant("Repeat")) or nil + } + local sounddict = pdfdictionary { + F = soundclip.filename + } + local action = pdfdictionary { + Subtype = pdfconstant("Movie"), + Border = pdfarray { 0, 0, 0 }, + T = format("sound %s",specification.label), + Movie = sounddict, + A = controldict, + } + write_node(pdfannotation_node(0,0,0,action())) -- test: context(...) + end +end diff --git a/tex/context/base/lpdf-nod.lua b/tex/context/base/lpdf-nod.lua index 60d3fcd5b..9c57d6289 100644 --- a/tex/context/base/lpdf-nod.lua +++ b/tex/context/base/lpdf-nod.lua @@ -1,136 +1,136 @@ -if not modules then modules = { } end modules ['lpdf-nod'] = { - version = 1.001, - comment = "companion to lpdf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format - -local copy_node = node.copy -local new_node = node.new - -local nodepool = nodes.pool -local register = nodepool.register -local whatsitcodes = nodes.whatsitcodes -local nodeinjections = backends.nodeinjections - -local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) pdfliteral.mode = 1 -local pdfsave = register(new_node("whatsit", whatsitcodes.pdfsave)) -local pdfrestore = register(new_node("whatsit", whatsitcodes.pdfrestore)) -local pdfsetmatrix = register(new_node("whatsit", whatsitcodes.pdfsetmatrix)) -local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) pdfdest.named_id = 1 -- xyz_zoom untouched -local pdfannot = register(new_node("whatsit", whatsitcodes.pdfannot)) - -local variables = interfaces.variables - -local views = { -- beware, we do support the pdf keys but this is *not* official - xyz = 0, [variables.standard] = 0, - fit = 1, [variables.fit] = 1, - fith = 2, [variables.width] = 2, - fitv = 3, [variables.height] = 3, - fitb = 4, - fitbh = 5, [variables.minwidth] = 5, - fitbv = 6, [variables.minheight] = 6, - fitr = 7, -} - -function nodepool.pdfliteral(str) - local t = copy_node(pdfliteral) - t.data = str - return t -end - -function nodepool.pdfdirect(str) - local t = copy_node(pdfliteral) - t.data = str - t.mode = 1 - return t -end - -function nodepool.pdfsave() - return copy_node(pdfsave) -end - -function nodepool.pdfrestore() - return copy_node(pdfrestore) -end - -function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty) - local t = copy_node(pdfsetmatrix) - t.data = format("%s %s %s %s",rx or 0,sx or 0,sy or 0,ry or 0) -- todo: tx ty - return t -end - -nodeinjections.save = nodepool.pdfsave -nodeinjections.restore = nodepool.pdfrestore -nodeinjections.transform = nodepool.pdfsetmatrix - -function nodepool.pdfannotation(w,h,d,data,n) - local t = copy_node(pdfannot) - if w and w ~= 0 then - t.width = w - end - if h and h ~= 0 then - t.height = h - end - if d and d ~= 0 then - t.depth = d - end - if n then - t.objnum = n - end - if data and data ~= "" then - t.data = data - end - return t -end - --- (!) The next code in pdfdest.w is wrong: --- --- case pdf_dest_xyz: --- if (matrixused()) { --- set_rect_dimens(pdf, p, parent_box, cur, alt_rule, pdf_dest_margin) ; --- } else { --- pdf_ann_left(p) = pos.h ; --- pdf_ann_top (p) = pos.v ; --- } --- break ; --- --- so we need to force a matrix. - -function nodepool.pdfdestination(w,h,d,name,view,n) - local t = copy_node(pdfdest) - local hasdimensions = false - if w and w ~= 0 then - t.width = w - hasdimensions = true - end - if h and h ~= 0 then - t.height = h - hasdimensions = true - end - if d and d ~= 0 then - t.depth = d - hasdimensions = true - end - if n then - t.objnum = n - end - view = views[view] or view or 1 -- fit is default - t.dest_id = name - t.dest_type = view - if hasdimensions and view == 0 then -- xyz - -- see (!) s -> m -> t -> r - local s = copy_node(pdfsave) - local m = copy_node(pdfsetmatrix) - local r = copy_node(pdfrestore) - m.data = "1 0 0 1" - s.next = m m.next = t t.next = r - m.prev = s t.prev = m r.prev = t - return s -- a list - else - return t - end -end +if not modules then modules = { } end modules ['lpdf-nod'] = { + version = 1.001, + comment = "companion to lpdf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format + +local copy_node = node.copy +local new_node = node.new + +local nodepool = nodes.pool +local register = nodepool.register +local whatsitcodes = nodes.whatsitcodes +local nodeinjections = backends.nodeinjections + +local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) pdfliteral.mode = 1 +local pdfsave = register(new_node("whatsit", whatsitcodes.pdfsave)) +local pdfrestore = register(new_node("whatsit", whatsitcodes.pdfrestore)) +local pdfsetmatrix = register(new_node("whatsit", whatsitcodes.pdfsetmatrix)) +local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) pdfdest.named_id = 1 -- xyz_zoom untouched +local pdfannot = register(new_node("whatsit", whatsitcodes.pdfannot)) + +local variables = interfaces.variables + +local views = { -- beware, we do support the pdf keys but this is *not* official + xyz = 0, [variables.standard] = 0, + fit = 1, [variables.fit] = 1, + fith = 2, [variables.width] = 2, + fitv = 3, [variables.height] = 3, + fitb = 4, + fitbh = 5, [variables.minwidth] = 5, + fitbv = 6, [variables.minheight] = 6, + fitr = 7, +} + +function nodepool.pdfliteral(str) + local t = copy_node(pdfliteral) + t.data = str + return t +end + +function nodepool.pdfdirect(str) + local t = copy_node(pdfliteral) + t.data = str + t.mode = 1 + return t +end + +function nodepool.pdfsave() + return copy_node(pdfsave) +end + +function nodepool.pdfrestore() + return copy_node(pdfrestore) +end + +function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty) + local t = copy_node(pdfsetmatrix) + t.data = format("%s %s %s %s",rx or 0,sx or 0,sy or 0,ry or 0) -- todo: tx ty + return t +end + +nodeinjections.save = nodepool.pdfsave +nodeinjections.restore = nodepool.pdfrestore +nodeinjections.transform = nodepool.pdfsetmatrix + +function nodepool.pdfannotation(w,h,d,data,n) + local t = copy_node(pdfannot) + if w and w ~= 0 then + t.width = w + end + if h and h ~= 0 then + t.height = h + end + if d and d ~= 0 then + t.depth = d + end + if n then + t.objnum = n + end + if data and data ~= "" then + t.data = data + end + return t +end + +-- (!) The next code in pdfdest.w is wrong: +-- +-- case pdf_dest_xyz: +-- if (matrixused()) { +-- set_rect_dimens(pdf, p, parent_box, cur, alt_rule, pdf_dest_margin) ; +-- } else { +-- pdf_ann_left(p) = pos.h ; +-- pdf_ann_top (p) = pos.v ; +-- } +-- break ; +-- +-- so we need to force a matrix. + +function nodepool.pdfdestination(w,h,d,name,view,n) + local t = copy_node(pdfdest) + local hasdimensions = false + if w and w ~= 0 then + t.width = w + hasdimensions = true + end + if h and h ~= 0 then + t.height = h + hasdimensions = true + end + if d and d ~= 0 then + t.depth = d + hasdimensions = true + end + if n then + t.objnum = n + end + view = views[view] or view or 1 -- fit is default + t.dest_id = name + t.dest_type = view + if hasdimensions and view == 0 then -- xyz + -- see (!) s -> m -> t -> r + local s = copy_node(pdfsave) + local m = copy_node(pdfsetmatrix) + local r = copy_node(pdfrestore) + m.data = "1 0 0 1" + s.next = m m.next = t t.next = r + m.prev = s t.prev = m r.prev = t + return s -- a list + else + return t + end +end diff --git a/tex/context/base/lpdf-ren.lua b/tex/context/base/lpdf-ren.lua index 6af65f9de..19582817d 100644 --- a/tex/context/base/lpdf-ren.lua +++ b/tex/context/base/lpdf-ren.lua @@ -1,349 +1,349 @@ -if not modules then modules = { } end modules ['lpdf-ren'] = { - version = 1.001, - comment = "companion to lpdf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- rendering - -local tostring, tonumber, next = tostring, tonumber, next -local format, rep = string.format, string.rep -local concat = table.concat -local settings_to_array = utilities.parsers.settings_to_array - -local backends, lpdf, nodes, node = backends, lpdf, nodes, node - -local nodeinjections = backends.pdf.nodeinjections -local codeinjections = backends.pdf.codeinjections -local registrations = backends.pdf.registrations -local viewerlayers = attributes.viewerlayers - -local references = structures.references - -references.executers = references.executers or { } -local executers = references.executers - -local variables = interfaces.variables - -local v_no = variables.no -local v_yes = variables.yes -local v_start = variables.start -local v_stop = variables.stop -local v_reset = variables.reset -local v_auto = variables.auto -local v_random = variables.random - -local pdfconstant = lpdf.constant -local pdfdictionary = lpdf.dictionary -local pdfarray = lpdf.array -local pdfreference = lpdf.reference -local pdfflushobject = lpdf.flushobject -local pdfreserveobject = lpdf.reserveobject - -local nodepool = nodes.pool -local register = nodepool.register -local pdfliteral = nodepool.pdfliteral - -local pdf_ocg = pdfconstant("OCG") -local pdf_ocmd = pdfconstant("OCMD") -local pdf_off = pdfconstant("OFF") -local pdf_on = pdfconstant("ON") -local pdf_toggle = pdfconstant("Toggle") -local pdf_setocgstate = pdfconstant("SetOCGState") - -local copy_node = node.copy - -local lpdf_usage = pdfdictionary { Print = pdfdictionary { PrintState = pdf_off } } - --- We can have references to layers before they are places, for instance from --- hide and vide actions. This is why we need to be able to force usage of layers --- at several moments. - --- management - -local pdfln, pdfld = { }, { } -local textlayers, hidelayers, videlayers = pdfarray(), pdfarray(), pdfarray() -local pagelayers, pagelayersreference, cache = nil, nil, { } -local alphabetic = { } - -local specifications = { } -local initialized = { } - -function codeinjections.defineviewerlayer(specification) - if viewerlayers.supported and textlayers then - local tag = specification.tag - if not specifications[tag] then - specifications[tag] = specification - end - end -end - -local function useviewerlayer(name) -- move up so that we can use it as local - if not environment.initex and not initialized[name] then - local specification = specifications[name] - if specification then - specifications[name] = nil -- or not - initialized [name] = true - if not pagelayers then - pagelayers = pdfdictionary() - pagelayersreference = pdfreserveobject() - end - local tag = specification.tag - -- todo: reserve - local nn = pdfreserveobject() - local nr = pdfreference(nn) - local nd = pdfdictionary { - Type = pdf_ocg, - Name = specification.title or "unknown", - Intent = ((specification.editable ~= v_no) and pdf_design) or nil, -- disable layer hiding by user - Usage = ((specification.printable == v_no) and lpdf_usage) or nil, -- printable or not - } - cache[#cache+1] = { nn, nd } - pdfln[tag] = nr -- was n - local dn = pdfreserveobject() - local dr = pdfreference(dn) - local dd = pdfdictionary { - Type = pdf_ocmd, - OCGs = pdfarray { nr }, - } - cache[#cache+1] = { dn, dd } - pdfld[tag] = dr - textlayers[#textlayers+1] = nr - alphabetic[tag] = nr - if specification.visible == v_start then - videlayers[#videlayers+1] = nr - else - hidelayers[#hidelayers+1] = nr - end - pagelayers[tag] = dr -- check - else - -- todo: message - end - end -end - -codeinjections.useviewerlayer = useviewerlayer - -local function layerreference(name) - local r = pdfln[name] - if r then - return r - else - useviewerlayer(name) - return pdfln[name] - end -end - -lpdf.layerreference = layerreference -- also triggered when a hide or vide happens - -local function flushtextlayers() - if viewerlayers.supported then - if pagelayers then - pdfflushobject(pagelayersreference,pagelayers) - end - for i=1,#cache do - local ci = cache[i] - pdfflushobject(ci[1],ci[2]) - end - if textlayers and #textlayers > 0 then -- we can group them if needed, like: layout - local sortedlayers = { } - for k, v in table.sortedhash(alphabetic) do - sortedlayers[#sortedlayers+1] = v -- maybe do a proper numeric sort as well - end - local d = pdfdictionary { - OCGs = textlayers, - D = pdfdictionary { - Name = "Document", - -- Order = (viewerlayers.hasorder and textlayers) or nil, - Order = (viewerlayers.hasorder and sortedlayers) or nil, - ON = videlayers, - OFF = hidelayers, - BaseState = pdf_on, - }, - } - lpdf.addtocatalog("OCProperties",d) - textlayers = nil - end - end -end - -local function flushpagelayers() -- we can share these - if pagelayers then - lpdf.addtopageresources("Properties",pdfreference(pagelayersreference)) -- we could cache this - end -end - -lpdf.registerpagefinalizer (flushpagelayers,"layers") -lpdf.registerdocumentfinalizer(flushtextlayers,"layers") - -local function setlayer(what,arguments) - -- maybe just a gmatch of even better, earlier in lpeg - arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments) - local state = pdfarray { what } - for i=1,#arguments do - local p = layerreference(arguments[i]) - if p then - state[#state+1] = p - end - end - return pdfdictionary { - S = pdf_setocgstate, - State = state, - } -end - -function executers.hidelayer (arguments) return setlayer(pdf_off, arguments) end -function executers.videlayer (arguments) return setlayer(pdf_on, arguments) end -function executers.togglelayer(arguments) return setlayer(pdf_toggle,arguments) end - --- injection - -function codeinjections.startlayer(name) -- used in mp - if not name then - name = "unknown" - end - useviewerlayer(name) - return format("/OC /%s BDC",name) -end - -function codeinjections.stoplayer(name) -- used in mp - return "EMC" -end - -local cache = { } - -function nodeinjections.startlayer(name) - local c = cache[name] - if not c then - useviewerlayer(name) - c = register(pdfliteral(format("/OC /%s BDC",name))) - cache[name] = c - end - return copy_node(c) -end - -local stop = register(pdfliteral("EMC")) - -function nodeinjections.stoplayer() - return copy_node(stop) -end - --- experimental stacker code (slow, can be optimized): !!!! TEST CODE !!!! - -local values = viewerlayers.values -local startlayer = codeinjections.startlayer -local stoplayer = codeinjections.stoplayer - -function nodeinjections.startstackedlayer(s,t,first,last) - local r = { } - for i=first,last do - r[#r+1] = startlayer(values[t[i]]) - end - r = concat(r," ") - return pdfliteral(r) -end - -function nodeinjections.stopstackedlayer(s,t,first,last) - local r = { } - for i=last,first,-1 do - r[#r+1] = stoplayer() - end - r = concat(r," ") - return pdfliteral(r) -end - -function nodeinjections.changestackedlayer(s,t1,first1,last1,t2,first2,last2) - local r = { } - for i=last1,first1,-1 do - r[#r+1] = stoplayer() - end - for i=first2,last2 do - r[#r+1] = startlayer(values[t2[i]]) - end - r = concat(r," ") - return pdfliteral(r) -end - --- transitions - -local pagetransitions = { - {"split","in","vertical"}, {"split","in","horizontal"}, - {"split","out","vertical"}, {"split","out","horizontal"}, - {"blinds","horizontal"}, {"blinds","vertical"}, - {"box","in"}, {"box","out"}, - {"wipe","east"}, {"wipe","west"}, {"wipe","north"}, {"wipe","south"}, - {"dissolve"}, - {"glitter","east"}, {"glitter","south"}, - {"fly","in","east"}, {"fly","in","west"}, {"fly","in","north"}, {"fly","in","south"}, - {"fly","out","east"}, {"fly","out","west"}, {"fly","out","north"}, {"fly","out","south"}, - {"push","east"}, {"push","west"}, {"push","north"}, {"push","south"}, - {"cover","east"}, {"cover","west"}, {"cover","north"}, {"cover","south"}, - {"uncover","east"}, {"uncover","west"}, {"uncover","north"}, {"uncover","south"}, - {"fade"}, -} - -local mapping = { - split = { "S" , pdfconstant("Split") }, - blinds = { "S" , pdfconstant("Blinds") }, - box = { "S" , pdfconstant("Box") }, - wipe = { "S" , pdfconstant("Wipe") }, - dissolve = { "S" , pdfconstant("Dissolve") }, - glitter = { "S" , pdfconstant("Glitter") }, - replace = { "S" , pdfconstant("R") }, - fly = { "S" , pdfconstant("Fly") }, - push = { "S" , pdfconstant("Push") }, - cover = { "S" , pdfconstant("Cover") }, - uncover = { "S" , pdfconstant("Uncover") }, - fade = { "S" , pdfconstant("Fade") }, - horizontal = { "Dm" , pdfconstant("H") }, - vertical = { "Dm" , pdfconstant("V") }, - ["in"] = { "M" , pdfconstant("I") }, - out = { "M" , pdfconstant("O") }, - east = { "Di" , 0 }, - north = { "Di" , 90 }, - west = { "Di" , 180 }, - south = { "Di" , 270 }, -} - -local last = 0 - --- n: number, "stop", "reset", "random", "a,b,c" delay: number, "none" - -function codeinjections.setpagetransition(specification) - local n, delay = specification.n, specification.delay - if not n or n == "" then - return -- let's forget about it - elseif n == v_auto then - if last >= #pagetransitions then - last = 0 - end - n = last + 1 - elseif n == v_stop then - return - elseif n == v_reset then - last = 0 - return - elseif n == v_random then - n = math.random(1,#pagetransitions) - else - n = tonumber(n) - end - local t = n and pagetransitions[n] or pagetransitions[1] - if not t then - t = settings_to_array(n) - end - if t and #t > 0 then - local d = pdfdictionary() - for i=1,#t do - local m = mapping[t[i]] - d[m[1]] = m[2] - end - delay = tonumber(delay) - if delay and delay > 0 then - lpdf.addtopageattributes("Dur",delay) - end - lpdf.addtopageattributes("Trans",d) - end -end +if not modules then modules = { } end modules ['lpdf-ren'] = { + version = 1.001, + comment = "companion to lpdf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- rendering + +local tostring, tonumber, next = tostring, tonumber, next +local format, rep = string.format, string.rep +local concat = table.concat +local settings_to_array = utilities.parsers.settings_to_array + +local backends, lpdf, nodes, node = backends, lpdf, nodes, node + +local nodeinjections = backends.pdf.nodeinjections +local codeinjections = backends.pdf.codeinjections +local registrations = backends.pdf.registrations +local viewerlayers = attributes.viewerlayers + +local references = structures.references + +references.executers = references.executers or { } +local executers = references.executers + +local variables = interfaces.variables + +local v_no = variables.no +local v_yes = variables.yes +local v_start = variables.start +local v_stop = variables.stop +local v_reset = variables.reset +local v_auto = variables.auto +local v_random = variables.random + +local pdfconstant = lpdf.constant +local pdfdictionary = lpdf.dictionary +local pdfarray = lpdf.array +local pdfreference = lpdf.reference +local pdfflushobject = lpdf.flushobject +local pdfreserveobject = lpdf.reserveobject + +local nodepool = nodes.pool +local register = nodepool.register +local pdfliteral = nodepool.pdfliteral + +local pdf_ocg = pdfconstant("OCG") +local pdf_ocmd = pdfconstant("OCMD") +local pdf_off = pdfconstant("OFF") +local pdf_on = pdfconstant("ON") +local pdf_toggle = pdfconstant("Toggle") +local pdf_setocgstate = pdfconstant("SetOCGState") + +local copy_node = node.copy + +local lpdf_usage = pdfdictionary { Print = pdfdictionary { PrintState = pdf_off } } + +-- We can have references to layers before they are places, for instance from +-- hide and vide actions. This is why we need to be able to force usage of layers +-- at several moments. + +-- management + +local pdfln, pdfld = { }, { } +local textlayers, hidelayers, videlayers = pdfarray(), pdfarray(), pdfarray() +local pagelayers, pagelayersreference, cache = nil, nil, { } +local alphabetic = { } + +local specifications = { } +local initialized = { } + +function codeinjections.defineviewerlayer(specification) + if viewerlayers.supported and textlayers then + local tag = specification.tag + if not specifications[tag] then + specifications[tag] = specification + end + end +end + +local function useviewerlayer(name) -- move up so that we can use it as local + if not environment.initex and not initialized[name] then + local specification = specifications[name] + if specification then + specifications[name] = nil -- or not + initialized [name] = true + if not pagelayers then + pagelayers = pdfdictionary() + pagelayersreference = pdfreserveobject() + end + local tag = specification.tag + -- todo: reserve + local nn = pdfreserveobject() + local nr = pdfreference(nn) + local nd = pdfdictionary { + Type = pdf_ocg, + Name = specification.title or "unknown", + Intent = ((specification.editable ~= v_no) and pdf_design) or nil, -- disable layer hiding by user + Usage = ((specification.printable == v_no) and lpdf_usage) or nil, -- printable or not + } + cache[#cache+1] = { nn, nd } + pdfln[tag] = nr -- was n + local dn = pdfreserveobject() + local dr = pdfreference(dn) + local dd = pdfdictionary { + Type = pdf_ocmd, + OCGs = pdfarray { nr }, + } + cache[#cache+1] = { dn, dd } + pdfld[tag] = dr + textlayers[#textlayers+1] = nr + alphabetic[tag] = nr + if specification.visible == v_start then + videlayers[#videlayers+1] = nr + else + hidelayers[#hidelayers+1] = nr + end + pagelayers[tag] = dr -- check + else + -- todo: message + end + end +end + +codeinjections.useviewerlayer = useviewerlayer + +local function layerreference(name) + local r = pdfln[name] + if r then + return r + else + useviewerlayer(name) + return pdfln[name] + end +end + +lpdf.layerreference = layerreference -- also triggered when a hide or vide happens + +local function flushtextlayers() + if viewerlayers.supported then + if pagelayers then + pdfflushobject(pagelayersreference,pagelayers) + end + for i=1,#cache do + local ci = cache[i] + pdfflushobject(ci[1],ci[2]) + end + if textlayers and #textlayers > 0 then -- we can group them if needed, like: layout + local sortedlayers = { } + for k, v in table.sortedhash(alphabetic) do + sortedlayers[#sortedlayers+1] = v -- maybe do a proper numeric sort as well + end + local d = pdfdictionary { + OCGs = textlayers, + D = pdfdictionary { + Name = "Document", + -- Order = (viewerlayers.hasorder and textlayers) or nil, + Order = (viewerlayers.hasorder and sortedlayers) or nil, + ON = videlayers, + OFF = hidelayers, + BaseState = pdf_on, + }, + } + lpdf.addtocatalog("OCProperties",d) + textlayers = nil + end + end +end + +local function flushpagelayers() -- we can share these + if pagelayers then + lpdf.addtopageresources("Properties",pdfreference(pagelayersreference)) -- we could cache this + end +end + +lpdf.registerpagefinalizer (flushpagelayers,"layers") +lpdf.registerdocumentfinalizer(flushtextlayers,"layers") + +local function setlayer(what,arguments) + -- maybe just a gmatch of even better, earlier in lpeg + arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments) + local state = pdfarray { what } + for i=1,#arguments do + local p = layerreference(arguments[i]) + if p then + state[#state+1] = p + end + end + return pdfdictionary { + S = pdf_setocgstate, + State = state, + } +end + +function executers.hidelayer (arguments) return setlayer(pdf_off, arguments) end +function executers.videlayer (arguments) return setlayer(pdf_on, arguments) end +function executers.togglelayer(arguments) return setlayer(pdf_toggle,arguments) end + +-- injection + +function codeinjections.startlayer(name) -- used in mp + if not name then + name = "unknown" + end + useviewerlayer(name) + return format("/OC /%s BDC",name) +end + +function codeinjections.stoplayer(name) -- used in mp + return "EMC" +end + +local cache = { } + +function nodeinjections.startlayer(name) + local c = cache[name] + if not c then + useviewerlayer(name) + c = register(pdfliteral(format("/OC /%s BDC",name))) + cache[name] = c + end + return copy_node(c) +end + +local stop = register(pdfliteral("EMC")) + +function nodeinjections.stoplayer() + return copy_node(stop) +end + +-- experimental stacker code (slow, can be optimized): !!!! TEST CODE !!!! + +local values = viewerlayers.values +local startlayer = codeinjections.startlayer +local stoplayer = codeinjections.stoplayer + +function nodeinjections.startstackedlayer(s,t,first,last) + local r = { } + for i=first,last do + r[#r+1] = startlayer(values[t[i]]) + end + r = concat(r," ") + return pdfliteral(r) +end + +function nodeinjections.stopstackedlayer(s,t,first,last) + local r = { } + for i=last,first,-1 do + r[#r+1] = stoplayer() + end + r = concat(r," ") + return pdfliteral(r) +end + +function nodeinjections.changestackedlayer(s,t1,first1,last1,t2,first2,last2) + local r = { } + for i=last1,first1,-1 do + r[#r+1] = stoplayer() + end + for i=first2,last2 do + r[#r+1] = startlayer(values[t2[i]]) + end + r = concat(r," ") + return pdfliteral(r) +end + +-- transitions + +local pagetransitions = { + {"split","in","vertical"}, {"split","in","horizontal"}, + {"split","out","vertical"}, {"split","out","horizontal"}, + {"blinds","horizontal"}, {"blinds","vertical"}, + {"box","in"}, {"box","out"}, + {"wipe","east"}, {"wipe","west"}, {"wipe","north"}, {"wipe","south"}, + {"dissolve"}, + {"glitter","east"}, {"glitter","south"}, + {"fly","in","east"}, {"fly","in","west"}, {"fly","in","north"}, {"fly","in","south"}, + {"fly","out","east"}, {"fly","out","west"}, {"fly","out","north"}, {"fly","out","south"}, + {"push","east"}, {"push","west"}, {"push","north"}, {"push","south"}, + {"cover","east"}, {"cover","west"}, {"cover","north"}, {"cover","south"}, + {"uncover","east"}, {"uncover","west"}, {"uncover","north"}, {"uncover","south"}, + {"fade"}, +} + +local mapping = { + split = { "S" , pdfconstant("Split") }, + blinds = { "S" , pdfconstant("Blinds") }, + box = { "S" , pdfconstant("Box") }, + wipe = { "S" , pdfconstant("Wipe") }, + dissolve = { "S" , pdfconstant("Dissolve") }, + glitter = { "S" , pdfconstant("Glitter") }, + replace = { "S" , pdfconstant("R") }, + fly = { "S" , pdfconstant("Fly") }, + push = { "S" , pdfconstant("Push") }, + cover = { "S" , pdfconstant("Cover") }, + uncover = { "S" , pdfconstant("Uncover") }, + fade = { "S" , pdfconstant("Fade") }, + horizontal = { "Dm" , pdfconstant("H") }, + vertical = { "Dm" , pdfconstant("V") }, + ["in"] = { "M" , pdfconstant("I") }, + out = { "M" , pdfconstant("O") }, + east = { "Di" , 0 }, + north = { "Di" , 90 }, + west = { "Di" , 180 }, + south = { "Di" , 270 }, +} + +local last = 0 + +-- n: number, "stop", "reset", "random", "a,b,c" delay: number, "none" + +function codeinjections.setpagetransition(specification) + local n, delay = specification.n, specification.delay + if not n or n == "" then + return -- let's forget about it + elseif n == v_auto then + if last >= #pagetransitions then + last = 0 + end + n = last + 1 + elseif n == v_stop then + return + elseif n == v_reset then + last = 0 + return + elseif n == v_random then + n = math.random(1,#pagetransitions) + else + n = tonumber(n) + end + local t = n and pagetransitions[n] or pagetransitions[1] + if not t then + t = settings_to_array(n) + end + if t and #t > 0 then + local d = pdfdictionary() + for i=1,#t do + local m = mapping[t[i]] + d[m[1]] = m[2] + end + delay = tonumber(delay) + if delay and delay > 0 then + lpdf.addtopageattributes("Dur",delay) + end + lpdf.addtopageattributes("Trans",d) + end +end diff --git a/tex/context/base/lpdf-swf.lua b/tex/context/base/lpdf-swf.lua index 12c80036f..0267e5255 100644 --- a/tex/context/base/lpdf-swf.lua +++ b/tex/context/base/lpdf-swf.lua @@ -1,306 +1,306 @@ -if not modules then modules = { } end modules ['lpdf-swf'] = { - version = 1.001, - comment = "companion to lpdf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- The following code is based on tests by Luigi Scarso. His prototype --- was using tex code. This is the official implementation. - -local format, gsub = string.format, string.gsub - -local backends, lpdf = backends, lpdf - -local pdfconstant = lpdf.constant -local pdfboolean = lpdf.boolean -local pdfstring = lpdf.string -local pdfunicode = lpdf.unicode -local pdfdictionary = lpdf.dictionary -local pdfarray = lpdf.array -local pdfnull = lpdf.null -local pdfreference = lpdf.reference -local pdfflushobject = lpdf.flushobject - -local checkedkey = lpdf.checkedkey - -local codeinjections = backends.pdf.codeinjections -local nodeinjections = backends.pdf.nodeinjections - -local pdfannotation_node = nodes.pool.pdfannotation - -local trace_swf = false trackers.register("backend.swf", function(v) trace_swf = v end) - -local report_swf = logs.reporter("backend","swf") - -local activations = { - click = "XA", - page = "PO", - focus = "PV", -} - -local deactivations = { - click = "XD", - page = "PI", - focus = "PC", -} - -table.setmetatableindex(activations, function() return activations .click end) -table.setmetatableindex(deactivations,function() return deactivations.focus end) - -local function insertswf(spec) - - local width = spec.width - local height = spec.height - local filename = spec.foundname - local resources = spec.resources - local display = spec.display - local controls = spec.controls - - local resources = resources and parametersets[resources] - local display = display and parametersets[display] - local controls = controls and parametersets[controls] -- not yet used - - local preview = checkedkey(display,"preview","string") - local toolbar = checkedkey(display,"toolbar","boolean") - - local embeddedreference = codeinjections.embedfile { file = filename } - - local flash = pdfdictionary { - Subtype = pdfconstant("Flash"), - Instances = pdfarray { - pdfdictionary { - Asset = embeddedreference, - Params = pdfdictionary { - Binding = pdfconstant("Background") -- Foreground makes swf behave erratic - } - }, - }, - } - - local flashreference = pdfreference(pdfflushobject(flash)) - - local configuration = pdfdictionary { - Configurations = pdfarray { flashreference }, - Assets = pdfdictionary { - Names = pdfarray { - pdfstring(filename), - embeddedreference, - } - }, - } - - -- todo: check op subpath figuur (relatief) - - -- filename : ./test.swf (graphic) - -- root : . - -- prefix : ^%./ - -- fullname : ./assets/whatever.xml - -- usedname : assets/whatever.xml - -- filename : assets/whatever.xml - - local root = file.dirname(filename) - local relativepaths = nil - local paths = nil - - if resources then - local names = configuration.Assets.Names - local prefix = false - if root ~= "" and root ~= "." then - prefix = format("^%s/",string.topattern(root)) - end - if prefix and trace_swf then - report_swf("using strip pattern %a",prefix) - end - local function add(fullname,strip) - local filename = gsub(fullname,"^%./","") - local usedname = strip and prefix and gsub(filename,prefix,"") or filename - local embeddedreference = codeinjections.embedfile { - file = fullname, - usedname = usedname, - keepdir = true, - } - names[#names+1] = pdfstring(filename) - names[#names+1] = embeddedreference - if trace_swf then - report_swf("embedding file %a as %a",fullname,usedname) - end - end - relativepaths = resources.relativepaths - if relativepaths then - if trace_swf then - report_swf("checking %s relative paths",#relativepaths) - end - for i=1,#relativepaths do - local relativepath = relativepaths[i] - if trace_swf then - report_swf("checking path %a relative to %a",relativepath,root) - end - local path = file.join(root == "" and "." or root,relativepath) - local files = dir.glob(path .. "/**") - for i=1,#files do - add(files[i],true) - end - end - end - paths = resources.paths - if paths then - if trace_swf then - report_swf("checking absolute %s paths",#paths) - end - for i=1,#paths do - local path = paths[i] - if trace_swf then - report_swf("checking path %a",path) - end - local files = dir.glob(path .. "/**") - for i=1,#files do - add(files[i],false) - end - end - end - local relativefiles = resources.relativefiles - if relativefiles then - if trace_swf then - report_swf("checking %s relative files",#relativefiles) - end - for i=1,#relativefiles do - add(relativefiles[i],true) - end - end - local files = resources.files - if files then - if trace_swf then - report_swf("checking absolute %s files",#files) - end - for i=1,#files do - add(files[i],false) - end - end - end - - local opendisplay = display and display.open or false - local closedisplay = display and display.close or false - - local configurationreference = pdfreference(pdfflushobject(configuration)) - - local activation = pdfdictionary { - Type = pdfconstant("RichMediaActivation"), - Condition = pdfconstant(activations[opendisplay]), - Configuration = flashreference, - Animation = pdfdictionary { - Subtype = pdfconstant("Linear"), - Speed = 1, - Playcount = 1, - }, - Presentation = pdfdictionary { - PassContextClick = false, - Style = pdfconstant("Embedded"), - Toolbar = toolbar, - NavigationPane = false, - Transparent = true, - Window = pdfdictionary { - Type = pdfconstant("RichMediaWindow"), - Width = pdfdictionary { - Default = 100, - Min = 100, - Max = 100, - }, - Height = pdfdictionary { - Default = 100, - Min = 100, - Max = 100, - }, - Position = pdfdictionary { - Type = pdfconstant("RichMediaPosition"), - HAlign = pdfconstant("Near"), - VAlign = pdfconstant("Near"), - HOffset = 0, - VOffset = 0, - } - } - }, - -- View - -- Scripts - } - - local deactivation = pdfdictionary { - Type = pdfconstant("RichMediaDeactivation"), - Condition = pdfconstant(deactivations[closedisplay]), - } - - local richmediasettings = pdfdictionary { - Type = pdfconstant("RichMediaSettings"), - Activation = activation, - Deactivation = deactivation, - } - - local settingsreference = pdfreference(pdfflushobject(richmediasettings)) - - local appearance - - if preview then - preview = gsub(preview,"%*",file.nameonly(filename)) - local figure = codeinjections.getpreviewfigure { name = preview, width = width, height = height } - if relativepaths and not figure then - for i=1,#relativepaths do - local path = file.join(root == "" and "." or root,relativepaths[i]) - if trace_swf then - report_swf("checking preview on relative path %s",path) - end - local p = file.join(path,preview) - figure = codeinjections.getpreviewfigure { name = p, width = width, height = height } - if figure then - preview = p - break - end - end - end - if paths and not figure then - for i=1,#paths do - local path = paths[i] - if trace_swf then - report_swf("checking preview on absolute path %s",path) - end - local p = file.join(path,preview) - figure = codeinjections.getpreviewfigure { name = p, width = width, height = height } - if figure then - preview = p - break - end - end - end - if figure then - local image = img.package(figure.status.private) - appearance = pdfdictionary { N = pdfreference(image.objnum) } - if trace_swf then - report_swf("using preview %s",preview) - end - end - end - - local annotation = pdfdictionary { - Subtype = pdfconstant("RichMedia"), - RichMediaContent = configurationreference, - RichMediaSettings = settingsreference, - AP = appearance, - } - - return annotation, nil, nil - -end - -function backends.pdf.nodeinjections.insertswf(spec) - local annotation, preview, ref = insertswf { - foundname = spec.foundname, - width = spec.width, - height = spec.height, - display = spec.display, - controls = spec.controls, - resources = spec.resources, - -- factor = spec.factor, - -- label = spec.label, - } - context(pdfannotation_node(spec.width,spec.height,0,annotation())) -- the context wrap is probably also needed elsewhere -end +if not modules then modules = { } end modules ['lpdf-swf'] = { + version = 1.001, + comment = "companion to lpdf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- The following code is based on tests by Luigi Scarso. His prototype +-- was using tex code. This is the official implementation. + +local format, gsub = string.format, string.gsub + +local backends, lpdf = backends, lpdf + +local pdfconstant = lpdf.constant +local pdfboolean = lpdf.boolean +local pdfstring = lpdf.string +local pdfunicode = lpdf.unicode +local pdfdictionary = lpdf.dictionary +local pdfarray = lpdf.array +local pdfnull = lpdf.null +local pdfreference = lpdf.reference +local pdfflushobject = lpdf.flushobject + +local checkedkey = lpdf.checkedkey + +local codeinjections = backends.pdf.codeinjections +local nodeinjections = backends.pdf.nodeinjections + +local pdfannotation_node = nodes.pool.pdfannotation + +local trace_swf = false trackers.register("backend.swf", function(v) trace_swf = v end) + +local report_swf = logs.reporter("backend","swf") + +local activations = { + click = "XA", + page = "PO", + focus = "PV", +} + +local deactivations = { + click = "XD", + page = "PI", + focus = "PC", +} + +table.setmetatableindex(activations, function() return activations .click end) +table.setmetatableindex(deactivations,function() return deactivations.focus end) + +local function insertswf(spec) + + local width = spec.width + local height = spec.height + local filename = spec.foundname + local resources = spec.resources + local display = spec.display + local controls = spec.controls + + local resources = resources and parametersets[resources] + local display = display and parametersets[display] + local controls = controls and parametersets[controls] -- not yet used + + local preview = checkedkey(display,"preview","string") + local toolbar = checkedkey(display,"toolbar","boolean") + + local embeddedreference = codeinjections.embedfile { file = filename } + + local flash = pdfdictionary { + Subtype = pdfconstant("Flash"), + Instances = pdfarray { + pdfdictionary { + Asset = embeddedreference, + Params = pdfdictionary { + Binding = pdfconstant("Background") -- Foreground makes swf behave erratic + } + }, + }, + } + + local flashreference = pdfreference(pdfflushobject(flash)) + + local configuration = pdfdictionary { + Configurations = pdfarray { flashreference }, + Assets = pdfdictionary { + Names = pdfarray { + pdfstring(filename), + embeddedreference, + } + }, + } + + -- todo: check op subpath figuur (relatief) + + -- filename : ./test.swf (graphic) + -- root : . + -- prefix : ^%./ + -- fullname : ./assets/whatever.xml + -- usedname : assets/whatever.xml + -- filename : assets/whatever.xml + + local root = file.dirname(filename) + local relativepaths = nil + local paths = nil + + if resources then + local names = configuration.Assets.Names + local prefix = false + if root ~= "" and root ~= "." then + prefix = format("^%s/",string.topattern(root)) + end + if prefix and trace_swf then + report_swf("using strip pattern %a",prefix) + end + local function add(fullname,strip) + local filename = gsub(fullname,"^%./","") + local usedname = strip and prefix and gsub(filename,prefix,"") or filename + local embeddedreference = codeinjections.embedfile { + file = fullname, + usedname = usedname, + keepdir = true, + } + names[#names+1] = pdfstring(filename) + names[#names+1] = embeddedreference + if trace_swf then + report_swf("embedding file %a as %a",fullname,usedname) + end + end + relativepaths = resources.relativepaths + if relativepaths then + if trace_swf then + report_swf("checking %s relative paths",#relativepaths) + end + for i=1,#relativepaths do + local relativepath = relativepaths[i] + if trace_swf then + report_swf("checking path %a relative to %a",relativepath,root) + end + local path = file.join(root == "" and "." or root,relativepath) + local files = dir.glob(path .. "/**") + for i=1,#files do + add(files[i],true) + end + end + end + paths = resources.paths + if paths then + if trace_swf then + report_swf("checking absolute %s paths",#paths) + end + for i=1,#paths do + local path = paths[i] + if trace_swf then + report_swf("checking path %a",path) + end + local files = dir.glob(path .. "/**") + for i=1,#files do + add(files[i],false) + end + end + end + local relativefiles = resources.relativefiles + if relativefiles then + if trace_swf then + report_swf("checking %s relative files",#relativefiles) + end + for i=1,#relativefiles do + add(relativefiles[i],true) + end + end + local files = resources.files + if files then + if trace_swf then + report_swf("checking absolute %s files",#files) + end + for i=1,#files do + add(files[i],false) + end + end + end + + local opendisplay = display and display.open or false + local closedisplay = display and display.close or false + + local configurationreference = pdfreference(pdfflushobject(configuration)) + + local activation = pdfdictionary { + Type = pdfconstant("RichMediaActivation"), + Condition = pdfconstant(activations[opendisplay]), + Configuration = flashreference, + Animation = pdfdictionary { + Subtype = pdfconstant("Linear"), + Speed = 1, + Playcount = 1, + }, + Presentation = pdfdictionary { + PassContextClick = false, + Style = pdfconstant("Embedded"), + Toolbar = toolbar, + NavigationPane = false, + Transparent = true, + Window = pdfdictionary { + Type = pdfconstant("RichMediaWindow"), + Width = pdfdictionary { + Default = 100, + Min = 100, + Max = 100, + }, + Height = pdfdictionary { + Default = 100, + Min = 100, + Max = 100, + }, + Position = pdfdictionary { + Type = pdfconstant("RichMediaPosition"), + HAlign = pdfconstant("Near"), + VAlign = pdfconstant("Near"), + HOffset = 0, + VOffset = 0, + } + } + }, + -- View + -- Scripts + } + + local deactivation = pdfdictionary { + Type = pdfconstant("RichMediaDeactivation"), + Condition = pdfconstant(deactivations[closedisplay]), + } + + local richmediasettings = pdfdictionary { + Type = pdfconstant("RichMediaSettings"), + Activation = activation, + Deactivation = deactivation, + } + + local settingsreference = pdfreference(pdfflushobject(richmediasettings)) + + local appearance + + if preview then + preview = gsub(preview,"%*",file.nameonly(filename)) + local figure = codeinjections.getpreviewfigure { name = preview, width = width, height = height } + if relativepaths and not figure then + for i=1,#relativepaths do + local path = file.join(root == "" and "." or root,relativepaths[i]) + if trace_swf then + report_swf("checking preview on relative path %s",path) + end + local p = file.join(path,preview) + figure = codeinjections.getpreviewfigure { name = p, width = width, height = height } + if figure then + preview = p + break + end + end + end + if paths and not figure then + for i=1,#paths do + local path = paths[i] + if trace_swf then + report_swf("checking preview on absolute path %s",path) + end + local p = file.join(path,preview) + figure = codeinjections.getpreviewfigure { name = p, width = width, height = height } + if figure then + preview = p + break + end + end + end + if figure then + local image = img.package(figure.status.private) + appearance = pdfdictionary { N = pdfreference(image.objnum) } + if trace_swf then + report_swf("using preview %s",preview) + end + end + end + + local annotation = pdfdictionary { + Subtype = pdfconstant("RichMedia"), + RichMediaContent = configurationreference, + RichMediaSettings = settingsreference, + AP = appearance, + } + + return annotation, nil, nil + +end + +function backends.pdf.nodeinjections.insertswf(spec) + local annotation, preview, ref = insertswf { + foundname = spec.foundname, + width = spec.width, + height = spec.height, + display = spec.display, + controls = spec.controls, + resources = spec.resources, + -- factor = spec.factor, + -- label = spec.label, + } + context(pdfannotation_node(spec.width,spec.height,0,annotation())) -- the context wrap is probably also needed elsewhere +end diff --git a/tex/context/base/lpdf-tag.lua b/tex/context/base/lpdf-tag.lua index 8cdb5f6a4..f5766996c 100644 --- a/tex/context/base/lpdf-tag.lua +++ b/tex/context/base/lpdf-tag.lua @@ -1,313 +1,313 @@ -if not modules then modules = { } end modules ['lpdf-tag'] = { - version = 1.001, - comment = "companion to lpdf-tag.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format, match, concat = string.format, string.match, table.concat -local lpegmatch = lpeg.match -local utfchar = utf.char - -local trace_tags = false trackers.register("structures.tags", function(v) trace_tags = v end) - -local report_tags = logs.reporter("backend","tags") - -local backends, lpdf, nodes = backends, lpdf, nodes - -local nodeinjections = backends.pdf.nodeinjections -local codeinjections = backends.pdf.codeinjections - -local tasks = nodes.tasks - -local pdfdictionary = lpdf.dictionary -local pdfarray = lpdf.array -local pdfboolean = lpdf.boolean -local pdfconstant = lpdf.constant -local pdfreference = lpdf.reference -local pdfunicode = lpdf.unicode -local pdfstring = lpdf.string -local pdfflushobject = lpdf.flushobject -local pdfreserveobject = lpdf.reserveobject -local pdfpagereference = lpdf.pagereference - -local nodepool = nodes.pool - -local pdfliteral = nodepool.pdfliteral - -local nodecodes = nodes.nodecodes - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local glyph_code = nodecodes.glyph - -local a_tagged = attributes.private('tagged') -local a_image = attributes.private('image') - -local traverse_nodes = node.traverse -local traverse_id = node.traverse_id -local tosequence = nodes.tosequence -local copy_node = node.copy -local slide_nodelist = node.slide - -local structure_stack = { } -local structure_kids = pdfarray() -local structure_ref = pdfreserveobject() -local parent_ref = pdfreserveobject() -local root = { pref = pdfreference(structure_ref), kids = structure_kids } -local tree = { } -local elements = { } -local names = pdfarray() -local taglist = structures.tags.taglist -local usedlabels = structures.tags.labels -local properties = structures.tags.properties -local usedmapping = { } - -local colonsplitter = lpeg.splitat(":") -local dashsplitter = lpeg.splitat("-") - -local add_ids = false -- true - - ---~ function codeinjections.maptag(original,target,kind) ---~ mapping[original] = { target, kind or "inline" } ---~ end - -local function finishstructure() - if #structure_kids > 0 then - local nums, n = pdfarray(), 0 - for i=1,#tree do - n = n + 1 ; nums[n] = i-1 - n = n + 1 ; nums[n] = pdfreference(pdfflushobject(tree[i])) - end - local parenttree = pdfdictionary { - Nums = nums - } - -- we need to split names into smaller parts (e.g. alphabetic or so) - if add_ids then - local kids = pdfdictionary { - Limits = pdfarray { names[1], names[#names-1] }, - Names = names, - } - local idtree = pdfdictionary { - Kids = pdfarray { pdfreference(pdfflushobject(kids)) }, - } - end - -- - local rolemap = pdfdictionary() - for k, v in next, usedmapping do - k = usedlabels[k] or k - local p = properties[k] - rolemap[k] = pdfconstant(p and p.pdf or "Span") -- or "Div" - end - local structuretree = pdfdictionary { - Type = pdfconstant("StructTreeRoot"), - K = pdfreference(pdfflushobject(structure_kids)), - ParentTree = pdfreference(pdfflushobject(parent_ref,parenttree)), - IDTree = (add_ids and pdfreference(pdfflushobject(idtree))) or nil, - RoleMap = rolemap, - } - pdfflushobject(structure_ref,structuretree) - lpdf.addtocatalog("StructTreeRoot",pdfreference(structure_ref)) - -- - local markinfo = pdfdictionary { - Marked = pdfboolean(true), - -- UserProperties = pdfboolean(true), - -- Suspects = pdfboolean(true), - } - lpdf.addtocatalog("MarkInfo",pdfreference(pdfflushobject(markinfo))) - -- - for fulltag, element in next, elements do - pdfflushobject(element.knum,element.kids) - end - end -end - -lpdf.registerdocumentfinalizer(finishstructure,"document structure") - -local index, pageref, pagenum, list = 0, nil, 0, nil - -local pdf_mcr = pdfconstant("MCR") -local pdf_struct_element = pdfconstant("StructElem") - -local function initializepage() - index = 0 - pagenum = tex.count.realpageno - pageref = pdfreference(pdfpagereference(pagenum)) - list = pdfarray() - tree[pagenum] = list -- we can flush after done, todo -end - -local function finishpage() - -- flush what can be flushed - lpdf.addtopageattributes("StructParents",pagenum-1) -end - --- here we can flush and free elements that are finished - -local function makeelement(fulltag,parent) - local tag, n = lpegmatch(dashsplitter,fulltag) - local tg, detail = lpegmatch(colonsplitter,tag) - local k, r = pdfarray(), pdfreserveobject() - usedmapping[tg] = true - tg = usedlabels[tg] or tg - local d = pdfdictionary { - Type = pdf_struct_element, - S = pdfconstant(tg), - ID = (add_ids and fulltag) or nil, - T = detail and detail or nil, - P = parent.pref, - Pg = pageref, - K = pdfreference(r), - -- Alt = " Who cares ", - -- ActualText = " Hi Hans ", - } - local s = pdfreference(pdfflushobject(d)) - if add_ids then - names[#names+1] = fulltag - names[#names+1] = s - end - local kids = parent.kids - kids[#kids+1] = s - elements[fulltag] = { tag = tag, pref = s, kids = k, knum = r, pnum = pagenum } -end - -local function makecontent(parent,start,stop,slist,id) - local tag, kids = parent.tag, parent.kids - local last = index - if id == "image" then - local d = pdfdictionary { - Type = pdf_mcr, - Pg = pageref, - MCID = last, - Alt = "image", - } - kids[#kids+1] = d - elseif pagenum == parent.pnum then - kids[#kids+1] = last - else - local d = pdfdictionary { - Type = pdf_mcr, - Pg = pageref, - MCID = last, - } - -- kids[#kids+1] = pdfreference(pdfflushobject(d)) - kids[#kids+1] = d - end - -- - local bliteral = pdfliteral(format("/%s <>BDC",tag,last)) - local prev = start.prev - if prev then - prev.next, bliteral.prev = bliteral, prev - end - start.prev, bliteral.next = bliteral, start - if slist and slist.list == start then - slist.list = bliteral - elseif not prev then - report_tags("this can't happen: injection in front of nothing") - end - -- - local eliteral = pdfliteral("EMC") - local next = stop.next - if next then - next.prev, eliteral.next = eliteral, next - end - stop.next, eliteral.prev = eliteral, stop - -- - index = index + 1 - list[index] = parent.pref - return bliteral, eliteral -end - --- -- -- - -local level, last, ranges, range = 0, nil, { }, nil - -local function collectranges(head,list) - for n in traverse_nodes(head) do - local id = n.id -- 14: image, 8: literal (mp) - if id == glyph_code then - local at = n[a_tagged] - if not at then - range = nil - elseif last ~= at then - range = { at, "glyph", n, n, list } -- attr id start stop list - ranges[#ranges+1] = range - last = at - elseif range then - range[4] = n -- stop - end - elseif id == hlist_code or id == vlist_code then - local at = n[a_image] - if at then - local at = n[a_tagged] - if not at then - range = nil - else - ranges[#ranges+1] = { at, "image", n, n, list } -- attr id start stop list - end - last = nil - else - local nl = n.list - slide_nodelist(nl) -- temporary hack till math gets slided (tracker item) - collectranges(nl,n) - end - end - end -end - -function nodeinjections.addtags(head) - -- no need to adapt head, as we always operate on lists - level, last, ranges, range = 0, nil, { }, nil - initializepage() - collectranges(head) - if trace_tags then - for i=1,#ranges do - local range = ranges[i] - local attr, id, start, stop = range[1], range[2], range[3], range[4] - local tags = taglist[attr] - if tags then -- not ok ... only first lines - report_tags("%s => %s : %05i % t",tosequence(start,start),tosequence(stop,stop),attr,tags) - end - end - end - for i=1,#ranges do - local range = ranges[i] - local attr, id, start, stop, list = range[1], range[2], range[3], range[4], range[5] - local tags = taglist[attr] - local prev = root - local noftags, tag = #tags, nil - for j=1,noftags do - local tag = tags[j] - if not elements[tag] then - makeelement(tag,prev) - end - prev = elements[tag] - end - local b, e = makecontent(prev,start,stop,list,id) - if start == head then - report_tags("this can't happen: parent list gets tagged") - head = b - end - end - finishpage() - -- can be separate feature - -- - -- injectspans(head) -- does to work yet - -- - return head, true -end - --- this belongs elsewhere (export is not pdf related) - -function codeinjections.enabletags(tg,lb) - structures.tags.handler = nodeinjections.addtags - tasks.enableaction("shipouts","structures.tags.handler") - tasks.enableaction("shipouts","nodes.handlers.accessibility") - tasks.enableaction("math","noads.handlers.tags") - -- maybe also textblock - if trace_tags then - report_tags("enabling structure tags") - end -end +if not modules then modules = { } end modules ['lpdf-tag'] = { + version = 1.001, + comment = "companion to lpdf-tag.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, match, concat = string.format, string.match, table.concat +local lpegmatch = lpeg.match +local utfchar = utf.char + +local trace_tags = false trackers.register("structures.tags", function(v) trace_tags = v end) + +local report_tags = logs.reporter("backend","tags") + +local backends, lpdf, nodes = backends, lpdf, nodes + +local nodeinjections = backends.pdf.nodeinjections +local codeinjections = backends.pdf.codeinjections + +local tasks = nodes.tasks + +local pdfdictionary = lpdf.dictionary +local pdfarray = lpdf.array +local pdfboolean = lpdf.boolean +local pdfconstant = lpdf.constant +local pdfreference = lpdf.reference +local pdfunicode = lpdf.unicode +local pdfstring = lpdf.string +local pdfflushobject = lpdf.flushobject +local pdfreserveobject = lpdf.reserveobject +local pdfpagereference = lpdf.pagereference + +local nodepool = nodes.pool + +local pdfliteral = nodepool.pdfliteral + +local nodecodes = nodes.nodecodes + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local glyph_code = nodecodes.glyph + +local a_tagged = attributes.private('tagged') +local a_image = attributes.private('image') + +local traverse_nodes = node.traverse +local traverse_id = node.traverse_id +local tosequence = nodes.tosequence +local copy_node = node.copy +local slide_nodelist = node.slide + +local structure_stack = { } +local structure_kids = pdfarray() +local structure_ref = pdfreserveobject() +local parent_ref = pdfreserveobject() +local root = { pref = pdfreference(structure_ref), kids = structure_kids } +local tree = { } +local elements = { } +local names = pdfarray() +local taglist = structures.tags.taglist +local usedlabels = structures.tags.labels +local properties = structures.tags.properties +local usedmapping = { } + +local colonsplitter = lpeg.splitat(":") +local dashsplitter = lpeg.splitat("-") + +local add_ids = false -- true + + +--~ function codeinjections.maptag(original,target,kind) +--~ mapping[original] = { target, kind or "inline" } +--~ end + +local function finishstructure() + if #structure_kids > 0 then + local nums, n = pdfarray(), 0 + for i=1,#tree do + n = n + 1 ; nums[n] = i-1 + n = n + 1 ; nums[n] = pdfreference(pdfflushobject(tree[i])) + end + local parenttree = pdfdictionary { + Nums = nums + } + -- we need to split names into smaller parts (e.g. alphabetic or so) + if add_ids then + local kids = pdfdictionary { + Limits = pdfarray { names[1], names[#names-1] }, + Names = names, + } + local idtree = pdfdictionary { + Kids = pdfarray { pdfreference(pdfflushobject(kids)) }, + } + end + -- + local rolemap = pdfdictionary() + for k, v in next, usedmapping do + k = usedlabels[k] or k + local p = properties[k] + rolemap[k] = pdfconstant(p and p.pdf or "Span") -- or "Div" + end + local structuretree = pdfdictionary { + Type = pdfconstant("StructTreeRoot"), + K = pdfreference(pdfflushobject(structure_kids)), + ParentTree = pdfreference(pdfflushobject(parent_ref,parenttree)), + IDTree = (add_ids and pdfreference(pdfflushobject(idtree))) or nil, + RoleMap = rolemap, + } + pdfflushobject(structure_ref,structuretree) + lpdf.addtocatalog("StructTreeRoot",pdfreference(structure_ref)) + -- + local markinfo = pdfdictionary { + Marked = pdfboolean(true), + -- UserProperties = pdfboolean(true), + -- Suspects = pdfboolean(true), + } + lpdf.addtocatalog("MarkInfo",pdfreference(pdfflushobject(markinfo))) + -- + for fulltag, element in next, elements do + pdfflushobject(element.knum,element.kids) + end + end +end + +lpdf.registerdocumentfinalizer(finishstructure,"document structure") + +local index, pageref, pagenum, list = 0, nil, 0, nil + +local pdf_mcr = pdfconstant("MCR") +local pdf_struct_element = pdfconstant("StructElem") + +local function initializepage() + index = 0 + pagenum = tex.count.realpageno + pageref = pdfreference(pdfpagereference(pagenum)) + list = pdfarray() + tree[pagenum] = list -- we can flush after done, todo +end + +local function finishpage() + -- flush what can be flushed + lpdf.addtopageattributes("StructParents",pagenum-1) +end + +-- here we can flush and free elements that are finished + +local function makeelement(fulltag,parent) + local tag, n = lpegmatch(dashsplitter,fulltag) + local tg, detail = lpegmatch(colonsplitter,tag) + local k, r = pdfarray(), pdfreserveobject() + usedmapping[tg] = true + tg = usedlabels[tg] or tg + local d = pdfdictionary { + Type = pdf_struct_element, + S = pdfconstant(tg), + ID = (add_ids and fulltag) or nil, + T = detail and detail or nil, + P = parent.pref, + Pg = pageref, + K = pdfreference(r), + -- Alt = " Who cares ", + -- ActualText = " Hi Hans ", + } + local s = pdfreference(pdfflushobject(d)) + if add_ids then + names[#names+1] = fulltag + names[#names+1] = s + end + local kids = parent.kids + kids[#kids+1] = s + elements[fulltag] = { tag = tag, pref = s, kids = k, knum = r, pnum = pagenum } +end + +local function makecontent(parent,start,stop,slist,id) + local tag, kids = parent.tag, parent.kids + local last = index + if id == "image" then + local d = pdfdictionary { + Type = pdf_mcr, + Pg = pageref, + MCID = last, + Alt = "image", + } + kids[#kids+1] = d + elseif pagenum == parent.pnum then + kids[#kids+1] = last + else + local d = pdfdictionary { + Type = pdf_mcr, + Pg = pageref, + MCID = last, + } + -- kids[#kids+1] = pdfreference(pdfflushobject(d)) + kids[#kids+1] = d + end + -- + local bliteral = pdfliteral(format("/%s <>BDC",tag,last)) + local prev = start.prev + if prev then + prev.next, bliteral.prev = bliteral, prev + end + start.prev, bliteral.next = bliteral, start + if slist and slist.list == start then + slist.list = bliteral + elseif not prev then + report_tags("this can't happen: injection in front of nothing") + end + -- + local eliteral = pdfliteral("EMC") + local next = stop.next + if next then + next.prev, eliteral.next = eliteral, next + end + stop.next, eliteral.prev = eliteral, stop + -- + index = index + 1 + list[index] = parent.pref + return bliteral, eliteral +end + +-- -- -- + +local level, last, ranges, range = 0, nil, { }, nil + +local function collectranges(head,list) + for n in traverse_nodes(head) do + local id = n.id -- 14: image, 8: literal (mp) + if id == glyph_code then + local at = n[a_tagged] + if not at then + range = nil + elseif last ~= at then + range = { at, "glyph", n, n, list } -- attr id start stop list + ranges[#ranges+1] = range + last = at + elseif range then + range[4] = n -- stop + end + elseif id == hlist_code or id == vlist_code then + local at = n[a_image] + if at then + local at = n[a_tagged] + if not at then + range = nil + else + ranges[#ranges+1] = { at, "image", n, n, list } -- attr id start stop list + end + last = nil + else + local nl = n.list + slide_nodelist(nl) -- temporary hack till math gets slided (tracker item) + collectranges(nl,n) + end + end + end +end + +function nodeinjections.addtags(head) + -- no need to adapt head, as we always operate on lists + level, last, ranges, range = 0, nil, { }, nil + initializepage() + collectranges(head) + if trace_tags then + for i=1,#ranges do + local range = ranges[i] + local attr, id, start, stop = range[1], range[2], range[3], range[4] + local tags = taglist[attr] + if tags then -- not ok ... only first lines + report_tags("%s => %s : %05i % t",tosequence(start,start),tosequence(stop,stop),attr,tags) + end + end + end + for i=1,#ranges do + local range = ranges[i] + local attr, id, start, stop, list = range[1], range[2], range[3], range[4], range[5] + local tags = taglist[attr] + local prev = root + local noftags, tag = #tags, nil + for j=1,noftags do + local tag = tags[j] + if not elements[tag] then + makeelement(tag,prev) + end + prev = elements[tag] + end + local b, e = makecontent(prev,start,stop,list,id) + if start == head then + report_tags("this can't happen: parent list gets tagged") + head = b + end + end + finishpage() + -- can be separate feature + -- + -- injectspans(head) -- does to work yet + -- + return head, true +end + +-- this belongs elsewhere (export is not pdf related) + +function codeinjections.enabletags(tg,lb) + structures.tags.handler = nodeinjections.addtags + tasks.enableaction("shipouts","structures.tags.handler") + tasks.enableaction("shipouts","nodes.handlers.accessibility") + tasks.enableaction("math","noads.handlers.tags") + -- maybe also textblock + if trace_tags then + report_tags("enabling structure tags") + end +end diff --git a/tex/context/base/lpdf-u3d.lua b/tex/context/base/lpdf-u3d.lua index 33269486c..464ea6fa7 100644 --- a/tex/context/base/lpdf-u3d.lua +++ b/tex/context/base/lpdf-u3d.lua @@ -1,488 +1,488 @@ -if not modules then modules = { } end modules ['lpdf-u3d'] = { - version = 1.001, - comment = "companion to lpdf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- The following code is based on a working prototype provided --- by Michael Vidiassov. It is rewritten using the lpdf library --- and different checking is used. The macro calls are adapted --- (and will eventually be removed). The user interface needs --- an overhaul. There are some messy leftovers that will be --- removed in future versions. - --- For some reason no one really tested this code so at some --- point we will end up with a reimplementation. For instance --- it makes sense to add the same activation code as with swf. - -local format, find = string.format, string.find -local cos, sin, sqrt, pi, atan2, abs = math.cos, math.sin, math.sqrt, math.pi, math.atan2, math.abs - -local backends, lpdf = backends, lpdf - -local nodeinjections = backends.pdf.nodeinjections - -local pdfconstant = lpdf.constant -local pdfboolean = lpdf.boolean -local pdfnumber = lpdf.number -local pdfunicode = lpdf.unicode -local pdfdictionary = lpdf.dictionary -local pdfarray = lpdf.array -local pdfnull = lpdf.null -local pdfreference = lpdf.reference -local pdfflushstreamobject = lpdf.flushstreamobject -local pdfflushstreamfileobject = lpdf.flushstreamfileobject - -local checkedkey = lpdf.checkedkey -local limited = lpdf.limited - -local pdfannotation_node = nodes.pool.pdfannotation - -local schemes = table.tohash { - "Artwork", "None", "White", "Day", "Night", "Hard", - "Primary", "Blue", "Red", "Cube", "CAD", "Headlamp", -} - -local modes = table.tohash { - "Solid", "SolidWireframe", "Transparent", "TransparentWireframe", "BoundingBox", - "TransparentBoundingBox", "TransparentBoundingBoxOutline", "Wireframe", - "ShadedWireframe", "HiddenWireframe", "Vertices", "ShadedVertices", "Illustration", - "SolidOutline", "ShadedIllustration", -} - -local function normalize(x, y, z) - local modulo = sqrt(x*x + y*y + z*z); - if modulo ~= 0 then - return x/modulo, y/modulo, z/modulo - else - return x, y, z - end -end - -local function rotate(vect_x,vect_y,vect_z, tet, axis_x,axis_y,axis_z) - -- rotate vect by tet about axis counterclockwise - local c, s = cos(tet*pi/180), sin(tet*pi/180) - local r = 1 - c - local n = sqrt(axis_x*axis_x+axis_y*axis_y+axis_z*axis_z) - axis_x, axis_y, axis_z = axis_x/n, axis_y/n, axis_z/n - return - (axis_x*axis_x*r+c )*vect_x + (axis_x*axis_y*r-axis_z*s)*vect_y + (axis_x*axis_z*r+axis_y*s)*vect_z, - (axis_x*axis_y*r+axis_z*s)*vect_x + (axis_y*axis_y*r+c )*vect_y + (axis_y*axis_z*r-axis_x*s)*vect_z, - (axis_x*axis_z*r-axis_y*s)*vect_x + (axis_y*axis_z*r+axis_x*s)*vect_y + (axis_z*axis_z*r+c )*vect_z -end - -local function make3dview(view) - - local name = view.name - local name = pdfunicode(name ~= "" and name or "unknown view") - - local viewdict = pdfdictionary { - Type = pdfconstant("3DView"), - XN = name, - IN = name, - NR = true, - } - - local bg = checkedkey(view,"bg","table") - if bg then - viewdict.BG = pdfdictionary { - Type = pdfconstant("3DBG"), - C = pdfarray { limited(bg[1],1,1,1), limited(bg[2],1,1,1), limited(bg[3],1,1,1) }, - } - end - - local lights = checkedkey(view,"lights","string") - if lights and schemes[lights] then - viewdict.LS = pdfdictionary { - Type = pdfconstant("3DLightingScheme"), - Subtype = pdfconstant(lights), - } - end - - -- camera position is taken from 3d model - - local u3dview = checkedkey(view, "u3dview", "string") - if u3dview then - viewdict.MS = pdfconstant("U3D") - viewdict.U3DPath = u3dview - end - - -- position the camera as given - - local c2c = checkedkey(view, "c2c", "table") - local coo = checkedkey(view, "coo", "table") - local roo = checkedkey(view, "roo", "number") - local azimuth = checkedkey(view, "azimuth", "number") - local altitude = checkedkey(view, "altitude", "number") - - if c2c or coo or roo or azimuth or altitude then - - local pos = checkedkey(view, "pos", "table") - local dir = checkedkey(view, "dir", "table") - local upv = checkedkey(view, "upv", "table") - local roll = checkedkey(view, "roll", "table") - - local coo_x, coo_y, coo_z = 0, 0, 0 - local dir_x, dir_y, dir_z = 0, 0, 0 - local trans_x, trans_y, trans_z = 0, 0, 0 - local left_x, left_y, left_z = 0, 0, 0 - local up_x, up_y, up_z = 0, 0, 0 - - -- point camera is aimed at - - if coo then - coo_x, coo_y, coo_z = tonumber(coo[1]) or 0, tonumber(coo[2]) or 0, tonumber(coo[3]) or 0 - end - - -- distance from camera to target - - if roo then - roo = abs(roo) - end - if not roo or roo == 0 then - roo = 0.000000000000000001 - end - - -- set it via camera position - - if pos then - dir_x = coo_x - (tonumber(pos[1]) or 0) - dir_y = coo_y - (tonumber(pos[2]) or 0) - dir_z = coo_z - (tonumber(pos[3]) or 0) - if not roo then - roo = sqrt(dir_x*dir_x + dir_y*dir_y + dir_z*dir_z) - end - if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end - dir_x, dir_y, dir_z = normalize(dir_x,dir_y,dir_z) - end - - -- set it directly - - if dir then - dir_x, dir_y, dir_z = tonumber(dir[1] or 0), tonumber(dir[2] or 0), tonumber(dir[3] or 0) - if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end - dir_x, dir_y, dir_z = normalize(dir_x,dir_y,dir_z) - end - - -- set it movie15 style with vector from target to camera - - if c2c then - dir_x, dir_y, dir_z = - tonumber(c2c[1] or 0), - tonumber(c2c[2] or 0), - tonumber(c2c[3] or 0) - if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end - dir_x, dir_y, dir_z = normalize(dir_x,dir_y,dir_z) - end - - -- set it with azimuth and altitutde - - if altitude or azimuth then - dir_x, dir_y, dir_z = -1, 0, 0 - if altitude then dir_x, dir_y, dir_z = rotate(dir_x,dir_y,dir_z, -altitude, 0,1,0) end - if azimuth then dir_x, dir_y, dir_z = rotate(dir_x,dir_y,dir_z, azimuth, 0,0,1) end - end - - -- set it with rotation like in MathGL - - if rot then - if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_z = -1 end - dir_x,dir_y,dir_z = rotate(dir_x,dir_y,dir_z, tonumber(rot[1]) or 0, 1,0,0) - dir_x,dir_y,dir_z = rotate(dir_x,dir_y,dir_z, tonumber(rot[2]) or 0, 0,1,0) - dir_x,dir_y,dir_z = rotate(dir_x,dir_y,dir_z, tonumber(rot[3]) or 0, 0,0,1) - end - - -- set it with default movie15 orientation looking up y axis - - if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end - - -- left-vector - -- up-vector - - if upv then - up_x, up_y, up_z = tonumber(upv[1]) or 0, tonumber(upv[2]) or 0, tonumber(upv[3]) or 0 - else - -- set default up-vector - if abs(dir_x) == 0 and abs(dir_y) == 0 then - if dir_z < 0 then - up_y = 1 -- top view - else - up_y = -1 -- bottom view - end - else - -- other camera positions than top and bottom, up-vector = up_world - (up_world dot dir) dir - up_x, up_y, up_z = - dir_z*dir_x, - dir_z*dir_y, - dir_z*dir_z + 1 - end - end - - -- normalize up-vector - - up_x, up_y, up_z = normalize(up_x,up_y,up_z) - - -- left vector = up x dir - - left_x, left_y, left_z = dir_z*up_y - dir_y*up_z, dir_x*up_z - dir_z*up_x, dir_y*up_x - dir_x*up_y - - -- normalize left vector - - left_x, left_y, left_z = normalize(left_x,left_y,left_z) - - -- apply camera roll - - if roll then - local sinroll = sin((roll/180.0)*pi) - local cosroll = cos((roll/180.0)*pi) - left_x = left_x*cosroll + up_x*sinroll - left_y = left_y*cosroll + up_y*sinroll - left_z = left_z*cosroll + up_z*sinroll - up_x = up_x*cosroll + left_x*sinroll - up_y = up_y*cosroll + left_y*sinroll - up_z = up_z*cosroll + left_z*sinroll - end - - -- translation vector - - trans_x, trans_y, trans_z = coo_x - roo*dir_x, coo_y - roo*dir_y, coo_z - roo*dir_z - - viewdict.MS = pdfconstant("M") - viewdict.CO = roo - viewdict.C2W = pdfarray { - left_x, left_y, left_z, - up_x, up_y, up_z, - dir_x, dir_y, dir_z, - trans_x, trans_y, trans_z, - } - - end - - local aac = tonumber(view.aac) -- perspective projection - local mag = tonumber(view.mag) -- ortho projection - - if aac and aac > 0 and aac < 180 then - viewdict.P = pdfdictionary { - Subtype = pdfconstant("P"), - PS = pdfconstant("Min"), - FOV = aac, - } - elseif mag and mag > 0 then - viewdict.P = pdfdictionary { - Subtype = pdfconstant("O"), - OS = mag, - } - end - - local mode = modes[view.rendermode] - if mode then - pdfdictionary { - Type = pdfconstant("3DRenderMode"), - Subtype = pdfconstant(mode), - } - end - - -- crosssection - - local crosssection = checkedkey(view,"crosssection","table") - if crosssection then - local crossdict = pdfdictionary { - Type = pdfconstant("3DCrossSection") - } - - local c = checkedkey(crosssection,"point","table") or checkedkey(crosssection,"center","table") - if c then - crossdict.C = pdfarray { tonumber(c[1]) or 0, tonumber(c[2]) or 0, tonumber(c[3]) or 0 } - end - - local normal = checkedkey(crosssection,"normal","table") - if normal then - local x, y, z = tonumber(normal[1] or 0), tonumber(normal[2] or 0), tonumber(normal[3] or 0) - if sqrt(x*x + y*y + z*z) == 0 then - x, y, z = 1, 0, 0 - end - crossdict.O = pdfarray { - pdfnull, - atan2(-z,sqrt(x*x + y*y))*180/pi, - atan2(y,x)*180/pi, - } - end - - local orient = checkedkey(crosssection,"orient","table") - if orient then - crossdict.O = pdfarray { - tonumber(orient[1]) or 1, - tonumber(orient[2]) or 0, - tonumber(orient[3]) or 0, - } - end - - crossdict.IV = cross.intersection or false - crossdict.ST = cross.transparent or false - - viewdict.SA = next(crossdict) and pdfarray { crossdict } -- maybe test if # > 1 - end - - local nodes = checkedkey(view,"nodes","table") - if nodes then - local nodelist = pdfarray() - for i=1,#nodes do - local node = checkedkey(nodes,i,"table") - if node then - local position = checkedkey(node,"position","table") - nodelist[#nodelist+1] = pdfdictionary { - Type = pdfconstant("3DNode"), - N = node.name or ("node_" .. i), -- pdfunicode ? - M = position and #position == 12 and pdfarray(position), - V = node.visible or true, - O = node.opacity or 0, - RM = pdfdictionary { - Type = pdfconstant("3DRenderMode"), - Subtype = pdfconstant(node.rendermode or "Solid"), - }, - } - end - end - viewdict.NA = nodelist - end - - return viewdict - -end - -local stored_js, stored_3d, stored_pr, streams = { }, { }, { }, { } - -local function insert3d(spec) -- width, height, factor, display, controls, label, foundname - - local width, height, factor = spec.width, spec.height, spec.factor or number.dimenfactors.bp - local display, controls, label, foundname = spec.display, spec.controls, spec.label, spec.foundname - - local param = (display and parametersets[display]) or { } - local streamparam = (controls and parametersets[controls]) or { } - local name = "3D Artwork " .. (param.name or label or "Unknown") - - local activationdict = pdfdictionary { - TB = pdfboolean(param.toolbar,true), - NP = pdfboolean(param.tree,false), - } - - local stream = streams[label] - if not stream then - - local subtype, subdata = "U3D", io.loaddata(foundname) or "" - if find(subdata,"^PRC") then - subtype = "PRC" - elseif find(subdata,"^U3D") then - subtype = "U3D" - elseif file.suffix(foundname) == "prc" then - subtype = "PRC" - end - - local attr = pdfdictionary { - Type = pdfconstant("3D"), - Subtype = pdfconstant(subtype), - } - local streamviews = checkedkey(streamparam, "views", "table") - if streamviews then - local list = pdfarray() - for i=1,#streamviews do - local v = checkedkey(streamviews, i, "table") - if v then - list[#list+1] = make3dview(v) - end - end - attr.VA = list - end - if checkedkey(streamparam, "view", "table") then - attr.DV = make3dview(streamparam.view) - elseif checkedkey(streamparam, "view", "string") then - attr.DV = streamparam.view - end - local js = checkedkey(streamparam, "js", "string") - if js then - local jsref = stored_js[js] - if not jsref then - jsref = pdfflushstreamfileobject(js) - stored_js[js] = jsref - end - attr.OnInstantiate = pdfreference(jsref) - end - stored_3d[label] = pdfflushstreamfileobject(foundname,attr) - stream = 1 - else - stream = stream + 1 - end - streams[label] = stream - - local name = pdfunicode(name) - - local annot = pdfdictionary { - Subtype = pdfconstant("3D"), - T = name, - Contents = name, - NM = name, - ["3DD"] = pdfreference(stored_3d[label]), - ["3DA"] = activationdict, - } - if checkedkey(param,"view","table") then - annot["3DV"] = make3dview(param.view) - elseif checkedkey(param,"view","string") then - annot["3DV"] = param.view - end - - local preview = checkedkey(param,"preview","string") - if preview then - activationdict.A = pdfconstant("XA") - local tag = format("%s:%s:%s",label,stream,preview) - local ref = stored_pr[tag] - if not ref then - local figure = img.immediatewrite { - filename = preview, - width = width, - height = height - } - ref = figure.objnum - stored_pr[tag] = ref - end - if ref then -- see back-pdf ** .. here we have a local /IM ! - local zero, one = pdfnumber(0), pdfnumber(1) -- not really needed - local pw = pdfdictionary { - Type = pdfconstant("XObject"), - Subtype = pdfconstant("Form"), - FormType = one, - BBox = pdfarray { zero, zero, pdfnumber(factor*width), pdfnumber(factor*height) }, - Matrix = pdfarray { one, zero, zero, one, zero, zero }, - Resources = pdfdictionary { - XObject = pdfdictionary { - IM = pdfreference(ref) - } - }, - ExtGState = pdfdictionary { - GS = pdfdictionary { - Type = pdfconstant("ExtGState"), - CA = one, - ca = one, - } - }, - ProcSet = pdfarray { pdfconstant("PDF"), pdfconstant("ImageC") }, - } - local pwd = pdfflushstreamobject(format("q /GS gs %f 0 0 %f 0 0 cm /IM Do Q",factor*width,factor*height),pw) - annot.AP = pdfdictionary { - N = pdfreference(pwd) - } - end - return annot, figure, ref - else - activationdict.A = pdfconstant("PV") - return annot, nil, nil - end -end - -function nodeinjections.insertu3d(spec) - local annotation, preview, ref = insert3d { -- just spec - foundname = spec.foundname, - width = spec.width, - height = spec.height, - factor = spec.factor, - display = spec.display, - controls = spec.controls, - label = spec.label, - } - node.write(pdfannotation_node(spec.width,spec.height,0,annotation())) -end +if not modules then modules = { } end modules ['lpdf-u3d'] = { + version = 1.001, + comment = "companion to lpdf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- The following code is based on a working prototype provided +-- by Michael Vidiassov. It is rewritten using the lpdf library +-- and different checking is used. The macro calls are adapted +-- (and will eventually be removed). The user interface needs +-- an overhaul. There are some messy leftovers that will be +-- removed in future versions. + +-- For some reason no one really tested this code so at some +-- point we will end up with a reimplementation. For instance +-- it makes sense to add the same activation code as with swf. + +local format, find = string.format, string.find +local cos, sin, sqrt, pi, atan2, abs = math.cos, math.sin, math.sqrt, math.pi, math.atan2, math.abs + +local backends, lpdf = backends, lpdf + +local nodeinjections = backends.pdf.nodeinjections + +local pdfconstant = lpdf.constant +local pdfboolean = lpdf.boolean +local pdfnumber = lpdf.number +local pdfunicode = lpdf.unicode +local pdfdictionary = lpdf.dictionary +local pdfarray = lpdf.array +local pdfnull = lpdf.null +local pdfreference = lpdf.reference +local pdfflushstreamobject = lpdf.flushstreamobject +local pdfflushstreamfileobject = lpdf.flushstreamfileobject + +local checkedkey = lpdf.checkedkey +local limited = lpdf.limited + +local pdfannotation_node = nodes.pool.pdfannotation + +local schemes = table.tohash { + "Artwork", "None", "White", "Day", "Night", "Hard", + "Primary", "Blue", "Red", "Cube", "CAD", "Headlamp", +} + +local modes = table.tohash { + "Solid", "SolidWireframe", "Transparent", "TransparentWireframe", "BoundingBox", + "TransparentBoundingBox", "TransparentBoundingBoxOutline", "Wireframe", + "ShadedWireframe", "HiddenWireframe", "Vertices", "ShadedVertices", "Illustration", + "SolidOutline", "ShadedIllustration", +} + +local function normalize(x, y, z) + local modulo = sqrt(x*x + y*y + z*z); + if modulo ~= 0 then + return x/modulo, y/modulo, z/modulo + else + return x, y, z + end +end + +local function rotate(vect_x,vect_y,vect_z, tet, axis_x,axis_y,axis_z) + -- rotate vect by tet about axis counterclockwise + local c, s = cos(tet*pi/180), sin(tet*pi/180) + local r = 1 - c + local n = sqrt(axis_x*axis_x+axis_y*axis_y+axis_z*axis_z) + axis_x, axis_y, axis_z = axis_x/n, axis_y/n, axis_z/n + return + (axis_x*axis_x*r+c )*vect_x + (axis_x*axis_y*r-axis_z*s)*vect_y + (axis_x*axis_z*r+axis_y*s)*vect_z, + (axis_x*axis_y*r+axis_z*s)*vect_x + (axis_y*axis_y*r+c )*vect_y + (axis_y*axis_z*r-axis_x*s)*vect_z, + (axis_x*axis_z*r-axis_y*s)*vect_x + (axis_y*axis_z*r+axis_x*s)*vect_y + (axis_z*axis_z*r+c )*vect_z +end + +local function make3dview(view) + + local name = view.name + local name = pdfunicode(name ~= "" and name or "unknown view") + + local viewdict = pdfdictionary { + Type = pdfconstant("3DView"), + XN = name, + IN = name, + NR = true, + } + + local bg = checkedkey(view,"bg","table") + if bg then + viewdict.BG = pdfdictionary { + Type = pdfconstant("3DBG"), + C = pdfarray { limited(bg[1],1,1,1), limited(bg[2],1,1,1), limited(bg[3],1,1,1) }, + } + end + + local lights = checkedkey(view,"lights","string") + if lights and schemes[lights] then + viewdict.LS = pdfdictionary { + Type = pdfconstant("3DLightingScheme"), + Subtype = pdfconstant(lights), + } + end + + -- camera position is taken from 3d model + + local u3dview = checkedkey(view, "u3dview", "string") + if u3dview then + viewdict.MS = pdfconstant("U3D") + viewdict.U3DPath = u3dview + end + + -- position the camera as given + + local c2c = checkedkey(view, "c2c", "table") + local coo = checkedkey(view, "coo", "table") + local roo = checkedkey(view, "roo", "number") + local azimuth = checkedkey(view, "azimuth", "number") + local altitude = checkedkey(view, "altitude", "number") + + if c2c or coo or roo or azimuth or altitude then + + local pos = checkedkey(view, "pos", "table") + local dir = checkedkey(view, "dir", "table") + local upv = checkedkey(view, "upv", "table") + local roll = checkedkey(view, "roll", "table") + + local coo_x, coo_y, coo_z = 0, 0, 0 + local dir_x, dir_y, dir_z = 0, 0, 0 + local trans_x, trans_y, trans_z = 0, 0, 0 + local left_x, left_y, left_z = 0, 0, 0 + local up_x, up_y, up_z = 0, 0, 0 + + -- point camera is aimed at + + if coo then + coo_x, coo_y, coo_z = tonumber(coo[1]) or 0, tonumber(coo[2]) or 0, tonumber(coo[3]) or 0 + end + + -- distance from camera to target + + if roo then + roo = abs(roo) + end + if not roo or roo == 0 then + roo = 0.000000000000000001 + end + + -- set it via camera position + + if pos then + dir_x = coo_x - (tonumber(pos[1]) or 0) + dir_y = coo_y - (tonumber(pos[2]) or 0) + dir_z = coo_z - (tonumber(pos[3]) or 0) + if not roo then + roo = sqrt(dir_x*dir_x + dir_y*dir_y + dir_z*dir_z) + end + if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end + dir_x, dir_y, dir_z = normalize(dir_x,dir_y,dir_z) + end + + -- set it directly + + if dir then + dir_x, dir_y, dir_z = tonumber(dir[1] or 0), tonumber(dir[2] or 0), tonumber(dir[3] or 0) + if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end + dir_x, dir_y, dir_z = normalize(dir_x,dir_y,dir_z) + end + + -- set it movie15 style with vector from target to camera + + if c2c then + dir_x, dir_y, dir_z = - tonumber(c2c[1] or 0), - tonumber(c2c[2] or 0), - tonumber(c2c[3] or 0) + if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end + dir_x, dir_y, dir_z = normalize(dir_x,dir_y,dir_z) + end + + -- set it with azimuth and altitutde + + if altitude or azimuth then + dir_x, dir_y, dir_z = -1, 0, 0 + if altitude then dir_x, dir_y, dir_z = rotate(dir_x,dir_y,dir_z, -altitude, 0,1,0) end + if azimuth then dir_x, dir_y, dir_z = rotate(dir_x,dir_y,dir_z, azimuth, 0,0,1) end + end + + -- set it with rotation like in MathGL + + if rot then + if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_z = -1 end + dir_x,dir_y,dir_z = rotate(dir_x,dir_y,dir_z, tonumber(rot[1]) or 0, 1,0,0) + dir_x,dir_y,dir_z = rotate(dir_x,dir_y,dir_z, tonumber(rot[2]) or 0, 0,1,0) + dir_x,dir_y,dir_z = rotate(dir_x,dir_y,dir_z, tonumber(rot[3]) or 0, 0,0,1) + end + + -- set it with default movie15 orientation looking up y axis + + if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end + + -- left-vector + -- up-vector + + if upv then + up_x, up_y, up_z = tonumber(upv[1]) or 0, tonumber(upv[2]) or 0, tonumber(upv[3]) or 0 + else + -- set default up-vector + if abs(dir_x) == 0 and abs(dir_y) == 0 then + if dir_z < 0 then + up_y = 1 -- top view + else + up_y = -1 -- bottom view + end + else + -- other camera positions than top and bottom, up-vector = up_world - (up_world dot dir) dir + up_x, up_y, up_z = - dir_z*dir_x, - dir_z*dir_y, - dir_z*dir_z + 1 + end + end + + -- normalize up-vector + + up_x, up_y, up_z = normalize(up_x,up_y,up_z) + + -- left vector = up x dir + + left_x, left_y, left_z = dir_z*up_y - dir_y*up_z, dir_x*up_z - dir_z*up_x, dir_y*up_x - dir_x*up_y + + -- normalize left vector + + left_x, left_y, left_z = normalize(left_x,left_y,left_z) + + -- apply camera roll + + if roll then + local sinroll = sin((roll/180.0)*pi) + local cosroll = cos((roll/180.0)*pi) + left_x = left_x*cosroll + up_x*sinroll + left_y = left_y*cosroll + up_y*sinroll + left_z = left_z*cosroll + up_z*sinroll + up_x = up_x*cosroll + left_x*sinroll + up_y = up_y*cosroll + left_y*sinroll + up_z = up_z*cosroll + left_z*sinroll + end + + -- translation vector + + trans_x, trans_y, trans_z = coo_x - roo*dir_x, coo_y - roo*dir_y, coo_z - roo*dir_z + + viewdict.MS = pdfconstant("M") + viewdict.CO = roo + viewdict.C2W = pdfarray { + left_x, left_y, left_z, + up_x, up_y, up_z, + dir_x, dir_y, dir_z, + trans_x, trans_y, trans_z, + } + + end + + local aac = tonumber(view.aac) -- perspective projection + local mag = tonumber(view.mag) -- ortho projection + + if aac and aac > 0 and aac < 180 then + viewdict.P = pdfdictionary { + Subtype = pdfconstant("P"), + PS = pdfconstant("Min"), + FOV = aac, + } + elseif mag and mag > 0 then + viewdict.P = pdfdictionary { + Subtype = pdfconstant("O"), + OS = mag, + } + end + + local mode = modes[view.rendermode] + if mode then + pdfdictionary { + Type = pdfconstant("3DRenderMode"), + Subtype = pdfconstant(mode), + } + end + + -- crosssection + + local crosssection = checkedkey(view,"crosssection","table") + if crosssection then + local crossdict = pdfdictionary { + Type = pdfconstant("3DCrossSection") + } + + local c = checkedkey(crosssection,"point","table") or checkedkey(crosssection,"center","table") + if c then + crossdict.C = pdfarray { tonumber(c[1]) or 0, tonumber(c[2]) or 0, tonumber(c[3]) or 0 } + end + + local normal = checkedkey(crosssection,"normal","table") + if normal then + local x, y, z = tonumber(normal[1] or 0), tonumber(normal[2] or 0), tonumber(normal[3] or 0) + if sqrt(x*x + y*y + z*z) == 0 then + x, y, z = 1, 0, 0 + end + crossdict.O = pdfarray { + pdfnull, + atan2(-z,sqrt(x*x + y*y))*180/pi, + atan2(y,x)*180/pi, + } + end + + local orient = checkedkey(crosssection,"orient","table") + if orient then + crossdict.O = pdfarray { + tonumber(orient[1]) or 1, + tonumber(orient[2]) or 0, + tonumber(orient[3]) or 0, + } + end + + crossdict.IV = cross.intersection or false + crossdict.ST = cross.transparent or false + + viewdict.SA = next(crossdict) and pdfarray { crossdict } -- maybe test if # > 1 + end + + local nodes = checkedkey(view,"nodes","table") + if nodes then + local nodelist = pdfarray() + for i=1,#nodes do + local node = checkedkey(nodes,i,"table") + if node then + local position = checkedkey(node,"position","table") + nodelist[#nodelist+1] = pdfdictionary { + Type = pdfconstant("3DNode"), + N = node.name or ("node_" .. i), -- pdfunicode ? + M = position and #position == 12 and pdfarray(position), + V = node.visible or true, + O = node.opacity or 0, + RM = pdfdictionary { + Type = pdfconstant("3DRenderMode"), + Subtype = pdfconstant(node.rendermode or "Solid"), + }, + } + end + end + viewdict.NA = nodelist + end + + return viewdict + +end + +local stored_js, stored_3d, stored_pr, streams = { }, { }, { }, { } + +local function insert3d(spec) -- width, height, factor, display, controls, label, foundname + + local width, height, factor = spec.width, spec.height, spec.factor or number.dimenfactors.bp + local display, controls, label, foundname = spec.display, spec.controls, spec.label, spec.foundname + + local param = (display and parametersets[display]) or { } + local streamparam = (controls and parametersets[controls]) or { } + local name = "3D Artwork " .. (param.name or label or "Unknown") + + local activationdict = pdfdictionary { + TB = pdfboolean(param.toolbar,true), + NP = pdfboolean(param.tree,false), + } + + local stream = streams[label] + if not stream then + + local subtype, subdata = "U3D", io.loaddata(foundname) or "" + if find(subdata,"^PRC") then + subtype = "PRC" + elseif find(subdata,"^U3D") then + subtype = "U3D" + elseif file.suffix(foundname) == "prc" then + subtype = "PRC" + end + + local attr = pdfdictionary { + Type = pdfconstant("3D"), + Subtype = pdfconstant(subtype), + } + local streamviews = checkedkey(streamparam, "views", "table") + if streamviews then + local list = pdfarray() + for i=1,#streamviews do + local v = checkedkey(streamviews, i, "table") + if v then + list[#list+1] = make3dview(v) + end + end + attr.VA = list + end + if checkedkey(streamparam, "view", "table") then + attr.DV = make3dview(streamparam.view) + elseif checkedkey(streamparam, "view", "string") then + attr.DV = streamparam.view + end + local js = checkedkey(streamparam, "js", "string") + if js then + local jsref = stored_js[js] + if not jsref then + jsref = pdfflushstreamfileobject(js) + stored_js[js] = jsref + end + attr.OnInstantiate = pdfreference(jsref) + end + stored_3d[label] = pdfflushstreamfileobject(foundname,attr) + stream = 1 + else + stream = stream + 1 + end + streams[label] = stream + + local name = pdfunicode(name) + + local annot = pdfdictionary { + Subtype = pdfconstant("3D"), + T = name, + Contents = name, + NM = name, + ["3DD"] = pdfreference(stored_3d[label]), + ["3DA"] = activationdict, + } + if checkedkey(param,"view","table") then + annot["3DV"] = make3dview(param.view) + elseif checkedkey(param,"view","string") then + annot["3DV"] = param.view + end + + local preview = checkedkey(param,"preview","string") + if preview then + activationdict.A = pdfconstant("XA") + local tag = format("%s:%s:%s",label,stream,preview) + local ref = stored_pr[tag] + if not ref then + local figure = img.immediatewrite { + filename = preview, + width = width, + height = height + } + ref = figure.objnum + stored_pr[tag] = ref + end + if ref then -- see back-pdf ** .. here we have a local /IM ! + local zero, one = pdfnumber(0), pdfnumber(1) -- not really needed + local pw = pdfdictionary { + Type = pdfconstant("XObject"), + Subtype = pdfconstant("Form"), + FormType = one, + BBox = pdfarray { zero, zero, pdfnumber(factor*width), pdfnumber(factor*height) }, + Matrix = pdfarray { one, zero, zero, one, zero, zero }, + Resources = pdfdictionary { + XObject = pdfdictionary { + IM = pdfreference(ref) + } + }, + ExtGState = pdfdictionary { + GS = pdfdictionary { + Type = pdfconstant("ExtGState"), + CA = one, + ca = one, + } + }, + ProcSet = pdfarray { pdfconstant("PDF"), pdfconstant("ImageC") }, + } + local pwd = pdfflushstreamobject(format("q /GS gs %f 0 0 %f 0 0 cm /IM Do Q",factor*width,factor*height),pw) + annot.AP = pdfdictionary { + N = pdfreference(pwd) + } + end + return annot, figure, ref + else + activationdict.A = pdfconstant("PV") + return annot, nil, nil + end +end + +function nodeinjections.insertu3d(spec) + local annotation, preview, ref = insert3d { -- just spec + foundname = spec.foundname, + width = spec.width, + height = spec.height, + factor = spec.factor, + display = spec.display, + controls = spec.controls, + label = spec.label, + } + node.write(pdfannotation_node(spec.width,spec.height,0,annotation())) +end diff --git a/tex/context/base/lpdf-wid.lua b/tex/context/base/lpdf-wid.lua index 9ea4744f1..20fc14679 100644 --- a/tex/context/base/lpdf-wid.lua +++ b/tex/context/base/lpdf-wid.lua @@ -1,645 +1,645 @@ -if not modules then modules = { } end modules ['lpdf-wid'] = { - version = 1.001, - comment = "companion to lpdf-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local gmatch, gsub, find, lower, format = string.gmatch, string.gsub, string.find, string.lower, string.format -local stripstring = string.strip -local texbox, texcount = tex.box, tex.count -local settings_to_array = utilities.parsers.settings_to_array -local settings_to_hash = utilities.parsers.settings_to_hash - -local report_media = logs.reporter("backend","media") -local report_attachment = logs.reporter("backend","attachment") - -local backends, lpdf, nodes = backends, lpdf, nodes - -local nodeinjections = backends.pdf.nodeinjections -local codeinjections = backends.pdf.codeinjections -local registrations = backends.pdf.registrations - -local executers = structures.references.executers -local variables = interfaces.variables - -local v_hidden = variables.hidden -local v_normal = variables.normal -local v_auto = variables.auto -local v_embed = variables.embed -local v_unknown = variables.unknown -local v_max = variables.max - -local pdfconstant = lpdf.constant -local pdfdictionary = lpdf.dictionary -local pdfarray = lpdf.array -local pdfreference = lpdf.reference -local pdfunicode = lpdf.unicode -local pdfstring = lpdf.string -local pdfboolean = lpdf.boolean -local pdfcolorspec = lpdf.colorspec -local pdfflushobject = lpdf.flushobject -local pdfflushstreamobject = lpdf.flushstreamobject -local pdfflushstreamfileobject = lpdf.flushstreamfileobject -local pdfreserveannotation = lpdf.reserveannotation -local pdfreserveobject = lpdf.reserveobject -local pdfpagereference = lpdf.pagereference -local pdfshareobjectreference = lpdf.shareobjectreference - -local nodepool = nodes.pool - -local pdfannotation_node = nodepool.pdfannotation - -local hpack_node = node.hpack -local write_node = node.write -- test context(...) instead - -local pdf_border = pdfarray { 0, 0, 0 } -- can be shared - --- symbols - -local presets = { } -- xforms - -local function registersymbol(name,n) - presets[name] = pdfreference(n) -end - -local function registeredsymbol(name) - return presets[name] -end - -local function presetsymbol(symbol) - if not presets[symbol] then - context.predefinesymbol { symbol } - end -end - -local function presetsymbollist(list) - if list then - for symbol in gmatch(list,"[^, ]+") do - presetsymbol(symbol) - end - end -end - -codeinjections.registersymbol = registersymbol -codeinjections.registeredsymbol = registeredsymbol -codeinjections.presetsymbol = presetsymbol -codeinjections.presetsymbollist = presetsymbollist - --- comments - --- local symbols = { --- Addition = pdfconstant("NewParagraph"), --- Attachment = pdfconstant("Attachment"), --- Balloon = pdfconstant("Comment"), --- Check = pdfconstant("Check Mark"), --- CheckMark = pdfconstant("Check Mark"), --- Circle = pdfconstant("Circle"), --- Cross = pdfconstant("Cross"), --- CrossHairs = pdfconstant("Cross Hairs"), --- Graph = pdfconstant("Graph"), --- InsertText = pdfconstant("Insert Text"), --- New = pdfconstant("Insert"), --- Paperclip = pdfconstant("Paperclip"), --- RightArrow = pdfconstant("Right Arrow"), --- RightPointer = pdfconstant("Right Pointer"), --- Star = pdfconstant("Star"), --- Tag = pdfconstant("Tag"), --- Text = pdfconstant("Note"), --- TextNote = pdfconstant("Text Note"), --- UpArrow = pdfconstant("Up Arrow"), --- UpLeftArrow = pdfconstant("Up-Left Arrow"), --- } - -local attachment_symbols = { - Graph = pdfconstant("GraphPushPin"), - Paperclip = pdfconstant("PaperclipTag"), - Pushpin = pdfconstant("PushPin"), -} - -attachment_symbols.PushPin = attachment_symbols.Pushpin -attachment_symbols.Default = attachment_symbols.Pushpin - -local comment_symbols = { - Comment = pdfconstant("Comment"), - Help = pdfconstant("Help"), - Insert = pdfconstant("Insert"), - Key = pdfconstant("Key"), - Newparagraph = pdfconstant("NewParagraph"), - Note = pdfconstant("Note"), - Paragraph = pdfconstant("Paragraph"), -} - -comment_symbols.NewParagraph = Newparagraph -comment_symbols.Default = Note - -local function analyzesymbol(symbol,collection) - if not symbol or symbol == "" then - return collection.Default, nil - elseif collection[symbol] then - return collection[symbol], nil - else - local setn, setr, setd - local set = settings_to_array(symbol) - if #set == 1 then - setn, setr, setd = set[1], set[1], set[1] - elseif #set == 2 then - setn, setr, setd = set[1], set[1], set[2] - else - setn, setr, setd = set[1], set[2], set[3] - end - local appearance = pdfdictionary { - N = setn and registeredsymbol(setn), - R = setr and registeredsymbol(setr), - D = setd and registeredsymbol(setd), - } - local appearanceref = pdfshareobjectreference(appearance) - return nil, appearanceref - end -end - -local function analyzelayer(layer) - -- todo: (specification.layer ~= "" and pdfreference(specification.layer)) or nil, -- todo: ref to layer -end - -local function analyzecolor(colorvalue,colormodel) - local cvalue = colorvalue and tonumber(colorvalue) - local cmodel = colormodel and tonumber(colormodel) or 3 - return cvalue and pdfarray { lpdf.colorvalues(cmodel,cvalue) } or nil -end - -local function analyzetransparency(transparencyvalue) - local tvalue = transparencyvalue and tonumber(transparencyvalue) - return tvalue and lpdf.transparencyvalue(tvalue) or nil -end - --- Attachments - -local nofattachments, attachments, filestreams, referenced = 0, { }, { }, { } - -local ignorereferenced = true -- fuzzy pdf spec .. twice in attachment list, can become an option - -local function flushembeddedfiles() - if next(filestreams) then - local e = pdfarray() - for tag, reference in next, filestreams do - if not reference then - report_attachment("unreferenced file, tag %a",tag) - elseif referenced[tag] == "hidden" then - e[#e+1] = pdfstring(tag) - e[#e+1] = reference -- already a reference - else - -- messy spec ... when annot not in named else twice in menu list acrobat - end - end - lpdf.addtonames("EmbeddedFiles",pdfreference(pdfflushobject(pdfdictionary{ Names = e }))) - end -end - -lpdf.registerdocumentfinalizer(flushembeddedfiles,"embeddedfiles") - -function codeinjections.embedfile(specification) - local data = specification.data - local filename = specification.file - local name = specification.name or "" - local title = specification.title or "" - local hash = specification.hash or filename - local keepdir = specification.keepdir -- can change - local usedname = specification.usedname - if filename == "" then - filename = nil - end - if data then - local r = filestreams[hash] - if r == false then - return nil - elseif r then - return r - elseif not filename then - filename = specification.tag - if not filename or filename == "" then - filename = specification.registered - end - if not filename or filename == "" then - filename = hash - end - end - else - if not filename then - return nil - end - local r = filestreams[hash] - if r == false then - return nil - elseif r then - return r - else - local foundname = resolvers.findbinfile(filename) or "" - if foundname == "" or not lfs.isfile(foundname) then - filestreams[filename] = false - return nil - else - specification.foundname = foundname - end - end - end - usedname = usedname ~= "" and usedname or filename - local basename = keepdir == true and usedname or file.basename(usedname) -local basename = gsub(basename,"%./","") - local savename = file.addsuffix(name ~= "" and name or basename,"txt") -- else no valid file - local a = pdfdictionary { Type = pdfconstant("EmbeddedFile") } - local f - if data then - f = pdfflushstreamobject(data,a) - specification.data = true -- signal that still data but already flushed - else - local foundname = specification.foundname or filename - f = pdfflushstreamfileobject(foundname,a) - end - local d = pdfdictionary { - Type = pdfconstant("Filespec"), - F = pdfstring(savename), - UF = pdfstring(savename), - EF = pdfdictionary { F = pdfreference(f) }, - Desc = title ~= "" and pdfunicode(title) or nil, - } - local r = pdfreference(pdfflushobject(d)) - filestreams[hash] = r - return r -end - -function nodeinjections.attachfile(specification) - local registered = specification.registered or "" - local data = specification.data - local hash - local filename - if data then - hash = md5.HEX(data) - else - filename = specification.file - if not filename or filename == "" then - report_attachment("no file specified, using registered %a instead",registered) - filename = registered - specification.file = registered - end - local foundname = resolvers.findbinfile(filename) or "" - if foundname == "" or not lfs.isfile(foundname) then - report_attachment("invalid filename %a, ignoring registered %a",filename,registered) - return nil - else - specification.foundname = foundname - end - hash = filename - end - specification.hash = hash - nofattachments = nofattachments + 1 - local registered = specification.registered or "" - local title = specification.title or "" - local subtitle = specification.subtitle or "" - local author = specification.author or "" - if registered == "" then - registered = filename - end - if author == "" then - author = title - title = "" - end - if author == "" then - author = filename or "" - end - if title == "" then - title = registered - end - local aref = attachments[registered] - if not aref then - aref = codeinjections.embedfile(specification) - attachments[registered] = aref - end - if not aref then - report_attachment("skipping attachment, registered %a",registered) - -- already reported - elseif specification.method == v_hidden then - referenced[hash] = "hidden" - else - referenced[hash] = "annotation" - local name, appearance = analyzesymbol(specification.symbol,attachment_symbols) - local d = pdfdictionary { - Subtype = pdfconstant("FileAttachment"), - FS = aref, - Contents = pdfunicode(title), - Name = name, - NM = pdfstring(format("attachment:%s",nofattachments)), - T = author ~= "" and pdfunicode(author) or nil, - Subj = subtitle ~= "" and pdfunicode(subtitle) or nil, - C = analyzecolor(specification.colorvalue,specification.colormodel), - CA = analyzetransparency(specification.transparencyvalue), - AP = appearance, - OC = analyzelayer(specification.layer), - } - local width, height, depth = specification.width or 0, specification.height or 0, specification.depth - local box = hpack_node(pdfannotation_node(width,height,depth,d())) - box.width, box.height, box.depth = width, height, depth - return box - end -end - -function codeinjections.attachmentid(filename) -- not used in context - return filestreams[filename] -end - -local nofcomments, usepopupcomments, stripleading = 0, false, true - -local defaultattributes = { - ["xmlns"] = "http://www.w3.org/1999/xhtml", - ["xmlns:xfa"] = "http://www.xfa.org/schema/xfa-data/1.0/", - ["xfa:contentType"] = "text/html", - ["xfa:APIVersion"] = "Acrobat:8.0.0", - ["xfa:spec"] = "2.4", -} - -local function checkcontent(text,option) - if option and option.xml then - local root = xml.convert(text) - if root and not root.er then - xml.checkbom(root) - local body = xml.first(root,"/body") - if body then - local at = body.at - for k, v in next, defaultattributes do - if not at[k] then - at[k] = v - end - end - -- local content = xml.textonly(root) - local richcontent = xml.tostring(root) - return nil, pdfunicode(richcontent) - end - end - end - return pdfunicode(text) -end - -function nodeinjections.comment(specification) -- brrr: seems to be done twice - nofcomments = nofcomments + 1 - local text = stripstring(specification.data or "") - if stripleading then - text = gsub(text,"[\n\r] *","\n") - end - local name, appearance = analyzesymbol(specification.symbol,comment_symbols) - local tag = specification.tag or "" -- this is somewhat messy as recent - local title = specification.title or "" -- versions of acrobat see the title - local subtitle = specification.subtitle or "" -- as author - local author = specification.author or "" - local option = settings_to_hash(specification.option or "") - if author == "" then - if title == "" then - title = tag - end - else - if subtitle == "" then - subtitle = title - elseif title ~= "" then - subtitle = subtitle .. ", " .. title - end - title = author - end - local content, richcontent = checkcontent(text,option) - local d = pdfdictionary { - Subtype = pdfconstant("Text"), - Open = option[v_max] and pdfboolean(true) or nil, - Contents = content, - RC = richcontent, - T = title ~= "" and pdfunicode(title) or nil, - Subj = subtitle ~= "" and pdfunicode(subtitle) or nil, - C = analyzecolor(specification.colorvalue,specification.colormodel), - CA = analyzetransparency(specification.transparencyvalue), - OC = analyzelayer(specification.layer), - Name = name, - NM = pdfstring(format("comment:%s",nofcomments)), - AP = appearance, - } - local width, height, depth = specification.width or 0, specification.height or 0, specification.depth - local box - if usepopupcomments then - -- rather useless as we can hide/vide - local nd = pdfreserveannotation() - local nc = pdfreserveannotation() - local c = pdfdictionary { - Subtype = pdfconstant("Popup"), - Parent = pdfreference(nd), - } - d.Popup = pdfreference(nc) - box = hpack_node( - pdfannotation_node(0,0,0,d(),nd), - pdfannotation_node(width,height,depth,c(),nc) - ) - else - box = hpack_node(pdfannotation_node(width,height,depth,d())) - end - box.width, box.height, box.depth = width, height, depth -- redundant - return box -end - --- rendering stuff --- --- object_1 -> <> >> --- object_2 -> <> >> --- rendering -> <> --- --- we only work foreward here (currently) --- annotation is to be packed at the tex end - --- aiff audio/aiff --- au audio/basic --- avi video/avi --- mid audio/midi --- mov video/quicktime --- mp3 audio/x-mp3 (mpeg) --- mp4 audio/mp4 --- mp4 video/mp4 --- mpeg video/mpeg --- smil application/smil --- swf application/x-shockwave-flash - --- P media play parameters (evt /BE for controls etc --- A boolean (audio) --- C boolean (captions) --- O boolean (overdubs) --- S boolean (subtitles) --- PL pdfconstant("ADBE_MCI"), - --- F = flags, --- T = title, --- Contents = rubish, --- AP = irrelevant, - --- sound is different, no window (or zero) so we need to collect them and --- force them if not set - -local ms, mu, mf = { }, { }, { } - -local function delayed(label) - local a = pdfreserveannotation() - mu[label] = a - return pdfreference(a) -end - -local function insertrenderingwindow(specification) - local label = specification.label ---~ local openpage = specification.openpage ---~ local closepage = specification.closepage - if specification.option == v_auto then - if openpageaction then - -- \handlereferenceactions{\v!StartRendering{#2}} - end - if closepageaction then - -- \handlereferenceactions{\v!StopRendering {#2}} - end - end - local actions = nil - if openpage or closepage then - actions = pdfdictionary { - PO = (openpage and lpdf.action(openpage )) or nil, - PC = (closepage and lpdf.action(closepage)) or nil, - } - end - local page = tonumber(specification.page) or texcount.realpageno -- todo - local r = mu[label] or pdfreserveannotation() -- why the reserve here? - local a = pdfdictionary { - S = pdfconstant("Rendition"), - R = mf[label], - OP = 0, - AN = pdfreference(r), - } - local d = pdfdictionary { - Subtype = pdfconstant("Screen"), - P = pdfreference(pdfpagereference(page)), - A = a, -- needed in order to make the annotation clickable (i.e. don't bark) - Border = pdf_border, - AA = actions, - } - local width = specification.width or 0 - local height = specification.height or 0 - if height == 0 or width == 0 then - -- todo: sound needs no window - end - write_node(pdfannotation_node(width,height,0,d(),r)) -- save ref - return pdfreference(r) -end - --- some dictionaries can have a MH (must honor) or BE (best effort) capsule - -local function insertrendering(specification) - local label = specification.label - local option = settings_to_hash(specification.option) - if not mf[label] then - local filename = specification.filename - local isurl = find(filename,"://") - --~ local start = pdfdictionary { - --~ Type = pdfconstant("MediaOffset"), - --~ S = pdfconstant("T"), -- time - --~ T = pdfdictionary { -- time - --~ Type = pdfconstant("Timespan"), - --~ S = pdfconstant("S"), - --~ V = 3, -- time in seconds - --~ }, - --~ } - --~ local start = pdfdictionary { - --~ Type = pdfconstant("MediaOffset"), - --~ S = pdfconstant("F"), -- frame - --~ F = 100 -- framenumber - --~ } - --~ local start = pdfdictionary { - --~ Type = pdfconstant("MediaOffset"), - --~ S = pdfconstant("M"), -- mark - --~ M = "somemark", - --~ } - --~ local parameters = pdfdictionary { - --~ BE = pdfdictionary { - --~ B = start, - --~ } - --~ } - --~ local parameters = pdfdictionary { - --~ Type = pdfconstant(MediaPermissions), - --~ TF = pdfstring("TEMPALWAYS") }, -- TEMPNEVER TEMPEXTRACT TEMPACCESS TEMPALWAYS - --~ } - local descriptor = pdfdictionary { - Type = pdfconstant("Filespec"), - F = filename, - } - if isurl then - descriptor.FS = pdfconstant("URL") - elseif option[v_embed] then - descriptor.EF = codeinjections.embedfile { file = filename } - end - local clip = pdfdictionary { - Type = pdfconstant("MediaClip"), - S = pdfconstant("MCD"), - N = label, - CT = specification.mime, - Alt = pdfarray { "", "file not found" }, -- language id + message - D = pdfreference(pdfflushobject(descriptor)), - -- P = pdfreference(pdfflushobject(parameters)), - } - local rendition = pdfdictionary { - Type = pdfconstant("Rendition"), - S = pdfconstant("MR"), - N = label, - C = pdfreference(pdfflushobject(clip)), - } - mf[label] = pdfreference(pdfflushobject(rendition)) - end -end - -local function insertrenderingobject(specification) -- todo - local label = specification.label - if not mf[label] then - report_media("unknown medium, label %a",label) - local clip = pdfdictionary { -- does not work that well one level up - Type = pdfconstant("MediaClip"), - S = pdfconstant("MCD"), - N = label, - D = pdfreference(unknown), -- not label but objectname, hm .. todo? - } - local rendition = pdfdictionary { - Type = pdfconstant("Rendition"), - S = pdfconstant("MR"), - N = label, - C = pdfreference(pdfflushobject(clip)), - } - mf[label] = pdfreference(pdfflushobject(rendition)) - end -end - -function codeinjections.processrendering(label) - local specification = interactions.renderings.rendering(label) - if not specification then - -- error - elseif specification.type == "external" then - insertrendering(specification) - else - insertrenderingobject(specification) - end -end - -function codeinjections.insertrenderingwindow(specification) - local label = specification.label - codeinjections.processrendering(label) - ms[label] = insertrenderingwindow(specification) -end - -local function set(operation,arguments) - codeinjections.processrendering(arguments) - return pdfdictionary { - S = pdfconstant("Rendition"), - OP = operation, - R = mf[arguments], - AN = ms[arguments] or delayed(arguments), - } -end - -function executers.startrendering (arguments) return set(0,arguments) end -function executers.stoprendering (arguments) return set(1,arguments) end -function executers.pauserendering (arguments) return set(2,arguments) end -function executers.resumerendering(arguments) return set(3,arguments) end +if not modules then modules = { } end modules ['lpdf-wid'] = { + version = 1.001, + comment = "companion to lpdf-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local gmatch, gsub, find, lower, format = string.gmatch, string.gsub, string.find, string.lower, string.format +local stripstring = string.strip +local texbox, texcount = tex.box, tex.count +local settings_to_array = utilities.parsers.settings_to_array +local settings_to_hash = utilities.parsers.settings_to_hash + +local report_media = logs.reporter("backend","media") +local report_attachment = logs.reporter("backend","attachment") + +local backends, lpdf, nodes = backends, lpdf, nodes + +local nodeinjections = backends.pdf.nodeinjections +local codeinjections = backends.pdf.codeinjections +local registrations = backends.pdf.registrations + +local executers = structures.references.executers +local variables = interfaces.variables + +local v_hidden = variables.hidden +local v_normal = variables.normal +local v_auto = variables.auto +local v_embed = variables.embed +local v_unknown = variables.unknown +local v_max = variables.max + +local pdfconstant = lpdf.constant +local pdfdictionary = lpdf.dictionary +local pdfarray = lpdf.array +local pdfreference = lpdf.reference +local pdfunicode = lpdf.unicode +local pdfstring = lpdf.string +local pdfboolean = lpdf.boolean +local pdfcolorspec = lpdf.colorspec +local pdfflushobject = lpdf.flushobject +local pdfflushstreamobject = lpdf.flushstreamobject +local pdfflushstreamfileobject = lpdf.flushstreamfileobject +local pdfreserveannotation = lpdf.reserveannotation +local pdfreserveobject = lpdf.reserveobject +local pdfpagereference = lpdf.pagereference +local pdfshareobjectreference = lpdf.shareobjectreference + +local nodepool = nodes.pool + +local pdfannotation_node = nodepool.pdfannotation + +local hpack_node = node.hpack +local write_node = node.write -- test context(...) instead + +local pdf_border = pdfarray { 0, 0, 0 } -- can be shared + +-- symbols + +local presets = { } -- xforms + +local function registersymbol(name,n) + presets[name] = pdfreference(n) +end + +local function registeredsymbol(name) + return presets[name] +end + +local function presetsymbol(symbol) + if not presets[symbol] then + context.predefinesymbol { symbol } + end +end + +local function presetsymbollist(list) + if list then + for symbol in gmatch(list,"[^, ]+") do + presetsymbol(symbol) + end + end +end + +codeinjections.registersymbol = registersymbol +codeinjections.registeredsymbol = registeredsymbol +codeinjections.presetsymbol = presetsymbol +codeinjections.presetsymbollist = presetsymbollist + +-- comments + +-- local symbols = { +-- Addition = pdfconstant("NewParagraph"), +-- Attachment = pdfconstant("Attachment"), +-- Balloon = pdfconstant("Comment"), +-- Check = pdfconstant("Check Mark"), +-- CheckMark = pdfconstant("Check Mark"), +-- Circle = pdfconstant("Circle"), +-- Cross = pdfconstant("Cross"), +-- CrossHairs = pdfconstant("Cross Hairs"), +-- Graph = pdfconstant("Graph"), +-- InsertText = pdfconstant("Insert Text"), +-- New = pdfconstant("Insert"), +-- Paperclip = pdfconstant("Paperclip"), +-- RightArrow = pdfconstant("Right Arrow"), +-- RightPointer = pdfconstant("Right Pointer"), +-- Star = pdfconstant("Star"), +-- Tag = pdfconstant("Tag"), +-- Text = pdfconstant("Note"), +-- TextNote = pdfconstant("Text Note"), +-- UpArrow = pdfconstant("Up Arrow"), +-- UpLeftArrow = pdfconstant("Up-Left Arrow"), +-- } + +local attachment_symbols = { + Graph = pdfconstant("GraphPushPin"), + Paperclip = pdfconstant("PaperclipTag"), + Pushpin = pdfconstant("PushPin"), +} + +attachment_symbols.PushPin = attachment_symbols.Pushpin +attachment_symbols.Default = attachment_symbols.Pushpin + +local comment_symbols = { + Comment = pdfconstant("Comment"), + Help = pdfconstant("Help"), + Insert = pdfconstant("Insert"), + Key = pdfconstant("Key"), + Newparagraph = pdfconstant("NewParagraph"), + Note = pdfconstant("Note"), + Paragraph = pdfconstant("Paragraph"), +} + +comment_symbols.NewParagraph = Newparagraph +comment_symbols.Default = Note + +local function analyzesymbol(symbol,collection) + if not symbol or symbol == "" then + return collection.Default, nil + elseif collection[symbol] then + return collection[symbol], nil + else + local setn, setr, setd + local set = settings_to_array(symbol) + if #set == 1 then + setn, setr, setd = set[1], set[1], set[1] + elseif #set == 2 then + setn, setr, setd = set[1], set[1], set[2] + else + setn, setr, setd = set[1], set[2], set[3] + end + local appearance = pdfdictionary { + N = setn and registeredsymbol(setn), + R = setr and registeredsymbol(setr), + D = setd and registeredsymbol(setd), + } + local appearanceref = pdfshareobjectreference(appearance) + return nil, appearanceref + end +end + +local function analyzelayer(layer) + -- todo: (specification.layer ~= "" and pdfreference(specification.layer)) or nil, -- todo: ref to layer +end + +local function analyzecolor(colorvalue,colormodel) + local cvalue = colorvalue and tonumber(colorvalue) + local cmodel = colormodel and tonumber(colormodel) or 3 + return cvalue and pdfarray { lpdf.colorvalues(cmodel,cvalue) } or nil +end + +local function analyzetransparency(transparencyvalue) + local tvalue = transparencyvalue and tonumber(transparencyvalue) + return tvalue and lpdf.transparencyvalue(tvalue) or nil +end + +-- Attachments + +local nofattachments, attachments, filestreams, referenced = 0, { }, { }, { } + +local ignorereferenced = true -- fuzzy pdf spec .. twice in attachment list, can become an option + +local function flushembeddedfiles() + if next(filestreams) then + local e = pdfarray() + for tag, reference in next, filestreams do + if not reference then + report_attachment("unreferenced file, tag %a",tag) + elseif referenced[tag] == "hidden" then + e[#e+1] = pdfstring(tag) + e[#e+1] = reference -- already a reference + else + -- messy spec ... when annot not in named else twice in menu list acrobat + end + end + lpdf.addtonames("EmbeddedFiles",pdfreference(pdfflushobject(pdfdictionary{ Names = e }))) + end +end + +lpdf.registerdocumentfinalizer(flushembeddedfiles,"embeddedfiles") + +function codeinjections.embedfile(specification) + local data = specification.data + local filename = specification.file + local name = specification.name or "" + local title = specification.title or "" + local hash = specification.hash or filename + local keepdir = specification.keepdir -- can change + local usedname = specification.usedname + if filename == "" then + filename = nil + end + if data then + local r = filestreams[hash] + if r == false then + return nil + elseif r then + return r + elseif not filename then + filename = specification.tag + if not filename or filename == "" then + filename = specification.registered + end + if not filename or filename == "" then + filename = hash + end + end + else + if not filename then + return nil + end + local r = filestreams[hash] + if r == false then + return nil + elseif r then + return r + else + local foundname = resolvers.findbinfile(filename) or "" + if foundname == "" or not lfs.isfile(foundname) then + filestreams[filename] = false + return nil + else + specification.foundname = foundname + end + end + end + usedname = usedname ~= "" and usedname or filename + local basename = keepdir == true and usedname or file.basename(usedname) +local basename = gsub(basename,"%./","") + local savename = file.addsuffix(name ~= "" and name or basename,"txt") -- else no valid file + local a = pdfdictionary { Type = pdfconstant("EmbeddedFile") } + local f + if data then + f = pdfflushstreamobject(data,a) + specification.data = true -- signal that still data but already flushed + else + local foundname = specification.foundname or filename + f = pdfflushstreamfileobject(foundname,a) + end + local d = pdfdictionary { + Type = pdfconstant("Filespec"), + F = pdfstring(savename), + UF = pdfstring(savename), + EF = pdfdictionary { F = pdfreference(f) }, + Desc = title ~= "" and pdfunicode(title) or nil, + } + local r = pdfreference(pdfflushobject(d)) + filestreams[hash] = r + return r +end + +function nodeinjections.attachfile(specification) + local registered = specification.registered or "" + local data = specification.data + local hash + local filename + if data then + hash = md5.HEX(data) + else + filename = specification.file + if not filename or filename == "" then + report_attachment("no file specified, using registered %a instead",registered) + filename = registered + specification.file = registered + end + local foundname = resolvers.findbinfile(filename) or "" + if foundname == "" or not lfs.isfile(foundname) then + report_attachment("invalid filename %a, ignoring registered %a",filename,registered) + return nil + else + specification.foundname = foundname + end + hash = filename + end + specification.hash = hash + nofattachments = nofattachments + 1 + local registered = specification.registered or "" + local title = specification.title or "" + local subtitle = specification.subtitle or "" + local author = specification.author or "" + if registered == "" then + registered = filename + end + if author == "" then + author = title + title = "" + end + if author == "" then + author = filename or "" + end + if title == "" then + title = registered + end + local aref = attachments[registered] + if not aref then + aref = codeinjections.embedfile(specification) + attachments[registered] = aref + end + if not aref then + report_attachment("skipping attachment, registered %a",registered) + -- already reported + elseif specification.method == v_hidden then + referenced[hash] = "hidden" + else + referenced[hash] = "annotation" + local name, appearance = analyzesymbol(specification.symbol,attachment_symbols) + local d = pdfdictionary { + Subtype = pdfconstant("FileAttachment"), + FS = aref, + Contents = pdfunicode(title), + Name = name, + NM = pdfstring(format("attachment:%s",nofattachments)), + T = author ~= "" and pdfunicode(author) or nil, + Subj = subtitle ~= "" and pdfunicode(subtitle) or nil, + C = analyzecolor(specification.colorvalue,specification.colormodel), + CA = analyzetransparency(specification.transparencyvalue), + AP = appearance, + OC = analyzelayer(specification.layer), + } + local width, height, depth = specification.width or 0, specification.height or 0, specification.depth + local box = hpack_node(pdfannotation_node(width,height,depth,d())) + box.width, box.height, box.depth = width, height, depth + return box + end +end + +function codeinjections.attachmentid(filename) -- not used in context + return filestreams[filename] +end + +local nofcomments, usepopupcomments, stripleading = 0, false, true + +local defaultattributes = { + ["xmlns"] = "http://www.w3.org/1999/xhtml", + ["xmlns:xfa"] = "http://www.xfa.org/schema/xfa-data/1.0/", + ["xfa:contentType"] = "text/html", + ["xfa:APIVersion"] = "Acrobat:8.0.0", + ["xfa:spec"] = "2.4", +} + +local function checkcontent(text,option) + if option and option.xml then + local root = xml.convert(text) + if root and not root.er then + xml.checkbom(root) + local body = xml.first(root,"/body") + if body then + local at = body.at + for k, v in next, defaultattributes do + if not at[k] then + at[k] = v + end + end + -- local content = xml.textonly(root) + local richcontent = xml.tostring(root) + return nil, pdfunicode(richcontent) + end + end + end + return pdfunicode(text) +end + +function nodeinjections.comment(specification) -- brrr: seems to be done twice + nofcomments = nofcomments + 1 + local text = stripstring(specification.data or "") + if stripleading then + text = gsub(text,"[\n\r] *","\n") + end + local name, appearance = analyzesymbol(specification.symbol,comment_symbols) + local tag = specification.tag or "" -- this is somewhat messy as recent + local title = specification.title or "" -- versions of acrobat see the title + local subtitle = specification.subtitle or "" -- as author + local author = specification.author or "" + local option = settings_to_hash(specification.option or "") + if author == "" then + if title == "" then + title = tag + end + else + if subtitle == "" then + subtitle = title + elseif title ~= "" then + subtitle = subtitle .. ", " .. title + end + title = author + end + local content, richcontent = checkcontent(text,option) + local d = pdfdictionary { + Subtype = pdfconstant("Text"), + Open = option[v_max] and pdfboolean(true) or nil, + Contents = content, + RC = richcontent, + T = title ~= "" and pdfunicode(title) or nil, + Subj = subtitle ~= "" and pdfunicode(subtitle) or nil, + C = analyzecolor(specification.colorvalue,specification.colormodel), + CA = analyzetransparency(specification.transparencyvalue), + OC = analyzelayer(specification.layer), + Name = name, + NM = pdfstring(format("comment:%s",nofcomments)), + AP = appearance, + } + local width, height, depth = specification.width or 0, specification.height or 0, specification.depth + local box + if usepopupcomments then + -- rather useless as we can hide/vide + local nd = pdfreserveannotation() + local nc = pdfreserveannotation() + local c = pdfdictionary { + Subtype = pdfconstant("Popup"), + Parent = pdfreference(nd), + } + d.Popup = pdfreference(nc) + box = hpack_node( + pdfannotation_node(0,0,0,d(),nd), + pdfannotation_node(width,height,depth,c(),nc) + ) + else + box = hpack_node(pdfannotation_node(width,height,depth,d())) + end + box.width, box.height, box.depth = width, height, depth -- redundant + return box +end + +-- rendering stuff +-- +-- object_1 -> <> >> +-- object_2 -> <> >> +-- rendering -> <> +-- +-- we only work foreward here (currently) +-- annotation is to be packed at the tex end + +-- aiff audio/aiff +-- au audio/basic +-- avi video/avi +-- mid audio/midi +-- mov video/quicktime +-- mp3 audio/x-mp3 (mpeg) +-- mp4 audio/mp4 +-- mp4 video/mp4 +-- mpeg video/mpeg +-- smil application/smil +-- swf application/x-shockwave-flash + +-- P media play parameters (evt /BE for controls etc +-- A boolean (audio) +-- C boolean (captions) +-- O boolean (overdubs) +-- S boolean (subtitles) +-- PL pdfconstant("ADBE_MCI"), + +-- F = flags, +-- T = title, +-- Contents = rubish, +-- AP = irrelevant, + +-- sound is different, no window (or zero) so we need to collect them and +-- force them if not set + +local ms, mu, mf = { }, { }, { } + +local function delayed(label) + local a = pdfreserveannotation() + mu[label] = a + return pdfreference(a) +end + +local function insertrenderingwindow(specification) + local label = specification.label +--~ local openpage = specification.openpage +--~ local closepage = specification.closepage + if specification.option == v_auto then + if openpageaction then + -- \handlereferenceactions{\v!StartRendering{#2}} + end + if closepageaction then + -- \handlereferenceactions{\v!StopRendering {#2}} + end + end + local actions = nil + if openpage or closepage then + actions = pdfdictionary { + PO = (openpage and lpdf.action(openpage )) or nil, + PC = (closepage and lpdf.action(closepage)) or nil, + } + end + local page = tonumber(specification.page) or texcount.realpageno -- todo + local r = mu[label] or pdfreserveannotation() -- why the reserve here? + local a = pdfdictionary { + S = pdfconstant("Rendition"), + R = mf[label], + OP = 0, + AN = pdfreference(r), + } + local d = pdfdictionary { + Subtype = pdfconstant("Screen"), + P = pdfreference(pdfpagereference(page)), + A = a, -- needed in order to make the annotation clickable (i.e. don't bark) + Border = pdf_border, + AA = actions, + } + local width = specification.width or 0 + local height = specification.height or 0 + if height == 0 or width == 0 then + -- todo: sound needs no window + end + write_node(pdfannotation_node(width,height,0,d(),r)) -- save ref + return pdfreference(r) +end + +-- some dictionaries can have a MH (must honor) or BE (best effort) capsule + +local function insertrendering(specification) + local label = specification.label + local option = settings_to_hash(specification.option) + if not mf[label] then + local filename = specification.filename + local isurl = find(filename,"://") + --~ local start = pdfdictionary { + --~ Type = pdfconstant("MediaOffset"), + --~ S = pdfconstant("T"), -- time + --~ T = pdfdictionary { -- time + --~ Type = pdfconstant("Timespan"), + --~ S = pdfconstant("S"), + --~ V = 3, -- time in seconds + --~ }, + --~ } + --~ local start = pdfdictionary { + --~ Type = pdfconstant("MediaOffset"), + --~ S = pdfconstant("F"), -- frame + --~ F = 100 -- framenumber + --~ } + --~ local start = pdfdictionary { + --~ Type = pdfconstant("MediaOffset"), + --~ S = pdfconstant("M"), -- mark + --~ M = "somemark", + --~ } + --~ local parameters = pdfdictionary { + --~ BE = pdfdictionary { + --~ B = start, + --~ } + --~ } + --~ local parameters = pdfdictionary { + --~ Type = pdfconstant(MediaPermissions), + --~ TF = pdfstring("TEMPALWAYS") }, -- TEMPNEVER TEMPEXTRACT TEMPACCESS TEMPALWAYS + --~ } + local descriptor = pdfdictionary { + Type = pdfconstant("Filespec"), + F = filename, + } + if isurl then + descriptor.FS = pdfconstant("URL") + elseif option[v_embed] then + descriptor.EF = codeinjections.embedfile { file = filename } + end + local clip = pdfdictionary { + Type = pdfconstant("MediaClip"), + S = pdfconstant("MCD"), + N = label, + CT = specification.mime, + Alt = pdfarray { "", "file not found" }, -- language id + message + D = pdfreference(pdfflushobject(descriptor)), + -- P = pdfreference(pdfflushobject(parameters)), + } + local rendition = pdfdictionary { + Type = pdfconstant("Rendition"), + S = pdfconstant("MR"), + N = label, + C = pdfreference(pdfflushobject(clip)), + } + mf[label] = pdfreference(pdfflushobject(rendition)) + end +end + +local function insertrenderingobject(specification) -- todo + local label = specification.label + if not mf[label] then + report_media("unknown medium, label %a",label) + local clip = pdfdictionary { -- does not work that well one level up + Type = pdfconstant("MediaClip"), + S = pdfconstant("MCD"), + N = label, + D = pdfreference(unknown), -- not label but objectname, hm .. todo? + } + local rendition = pdfdictionary { + Type = pdfconstant("Rendition"), + S = pdfconstant("MR"), + N = label, + C = pdfreference(pdfflushobject(clip)), + } + mf[label] = pdfreference(pdfflushobject(rendition)) + end +end + +function codeinjections.processrendering(label) + local specification = interactions.renderings.rendering(label) + if not specification then + -- error + elseif specification.type == "external" then + insertrendering(specification) + else + insertrenderingobject(specification) + end +end + +function codeinjections.insertrenderingwindow(specification) + local label = specification.label + codeinjections.processrendering(label) + ms[label] = insertrenderingwindow(specification) +end + +local function set(operation,arguments) + codeinjections.processrendering(arguments) + return pdfdictionary { + S = pdfconstant("Rendition"), + OP = operation, + R = mf[arguments], + AN = ms[arguments] or delayed(arguments), + } +end + +function executers.startrendering (arguments) return set(0,arguments) end +function executers.stoprendering (arguments) return set(1,arguments) end +function executers.pauserendering (arguments) return set(2,arguments) end +function executers.resumerendering(arguments) return set(3,arguments) end diff --git a/tex/context/base/luat-bwc.lua b/tex/context/base/luat-bwc.lua index 993de7bf3..b8672469e 100644 --- a/tex/context/base/luat-bwc.lua +++ b/tex/context/base/luat-bwc.lua @@ -1,32 +1,32 @@ -if not modules then modules = { } end modules ['luat-bwc'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- backward compatibility - -if not tex.wd then - - local box = tex.box - - local wd = { } setmetatable(wd, { - __index = function(t,k) local bk = box[k] return bk and bk.width or 0 end, - __newindex = function(t,k,v) local bk = box[k] if bk then bk.width = v end end, - } ) - - local ht = { } setmetatable(ht, { - __index = function(t,k) local bk = box[k] return bk and bk.height or 0 end, - __newindex = function(t,k,v) local bk = box[k] if bk then bk.height = v end end, - } ) - - local dp = { } setmetatable(dp, { - __index = function(t,k) local bk = box[k] return bk and bk.depth or 0 end, - __newindex = function(t,k,v) local bk = box[k] if bk then bk.depth = v end end, - } ) - - -- tex.wd, tex.ht, tex.dp = wd, ht, dp - -end +if not modules then modules = { } end modules ['luat-bwc'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- backward compatibility + +if not tex.wd then + + local box = tex.box + + local wd = { } setmetatable(wd, { + __index = function(t,k) local bk = box[k] return bk and bk.width or 0 end, + __newindex = function(t,k,v) local bk = box[k] if bk then bk.width = v end end, + } ) + + local ht = { } setmetatable(ht, { + __index = function(t,k) local bk = box[k] return bk and bk.height or 0 end, + __newindex = function(t,k,v) local bk = box[k] if bk then bk.height = v end end, + } ) + + local dp = { } setmetatable(dp, { + __index = function(t,k) local bk = box[k] return bk and bk.depth or 0 end, + __newindex = function(t,k,v) local bk = box[k] if bk then bk.depth = v end end, + } ) + + -- tex.wd, tex.ht, tex.dp = wd, ht, dp + +end diff --git a/tex/context/base/luat-cbk.lua b/tex/context/base/luat-cbk.lua index 5aa12005b..4a88cfed7 100644 --- a/tex/context/base/luat-cbk.lua +++ b/tex/context/base/luat-cbk.lua @@ -1,320 +1,320 @@ -if not modules then modules = { } end modules ['luat-cbk'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local insert, remove, find, format = table.insert, table.remove, string.find, string.format -local collectgarbage, type, next = collectgarbage, type, next -local round = math.round -local sortedhash, tohash = table.sortedhash, table.tohash - -local trace_checking = false trackers.register("memory.checking", function(v) trace_checking = v end) - -local report_callbacks = logs.reporter("system","callbacks") -local report_memory = logs.reporter("system","memory") - ---[[ldx-- -

Callbacks are the real asset of . They permit you to hook -your own code into the engine. Here we implement a few handy -auxiliary functions.

---ldx]]-- - -callbacks = callbacks or { } -local callbacks = callbacks - ---[[ldx-- -

When you (temporarily) want to install a callback function, and after a -while wants to revert to the original one, you can use the following two -functions.

---ldx]]-- - -local trace_callbacks = false trackers.register("system.callbacks", function(v) trace_callbacks = v end) -local trace_calls = false -- only used when analyzing performance and initializations - -local register_callback = callback.register -local find_callback = callback.find -local list_callbacks = callback.list - -local frozen, stack, list = { }, { }, callbacks.list - -if not list then -- otherwise counters get reset - - list = utilities.storage.allocate(list_callbacks()) - - for k, _ in next, list do - list[k] = 0 - end - - callbacks.list = list - -end - -local delayed = tohash { - "buildpage_filter", -} - - -if trace_calls then - - local functions = { } - local original = register_callback - - register_callback = function(name,func) - if type(func) == "function" then - if functions[name] then - functions[name] = func - return find_callback(name) - else - functions[name] = func - local cnuf = function(...) - list[name] = list[name] + 1 - return functions[name](...) - end - return original(name,cnuf) - end - else - return original(name,func) - end - end - -end - -local function frozen_message(what,name) - report_callbacks("not %s frozen %a to %a",what,name,frozen[name]) -end - -local function frozen_callback(name) - return nil, format("callback '%s' is frozen to '%s'",name,frozen[name]) -- no formatter yet -end - -local function state(name) - local f = find_callback(name) - if f == false then - return "disabled" - elseif f then - return "enabled" - else - return "undefined" - end -end - -function callbacks.known(name) - return list[name] -end - -function callbacks.report() - for name, _ in sortedhash(list) do - local str = frozen[name] - if str then - report_callbacks("%s: %s -> %s",state(name),name,str) - else - report_callbacks("%s: %s",state(name),name) - end - end -end - -function callbacks.freeze(name,freeze) - freeze = type(freeze) == "string" and freeze - if find(name,"%*") then - local pattern = name - for name, _ in next, list do - if find(name,pattern) then - frozen[name] = freeze or frozen[name] or "frozen" - end - end - else - frozen[name] = freeze or frozen[name] or "frozen" - end -end - -function callbacks.register(name,func,freeze) - if frozen[name] then - if trace_callbacks then - frozen_message("registering",name) - end - return frozen_callback(name) - elseif freeze then - frozen[name] = type(freeze) == "string" and freeze or "registered" - end - if delayed[name] and environment.initex then - return nil - end - return register_callback(name,func) -end - -function callback.register(name,func) -- original - if not frozen[name] then - return register_callback(name,func) - elseif trace_callbacks then - frozen_message("registering",name) - end - return frozen_callback(name) -end - -function callbacks.push(name,func) - if not frozen[name] then - local sn = stack[name] - if not sn then - sn = { } - stack[name] = sn - end - insert(sn,find_callback(name)) - register_callback(name, func) - elseif trace_callbacks then - frozen_message("pushing",name) - end -end - -function callbacks.pop(name) - if not frozen[name] then - local sn = stack[name] - if not sn or #sn == 0 then - -- some error - register_callback(name, nil) -- ! really needed - else - -- this fails: register_callback(name, remove(stack[name])) - local func = remove(sn) - register_callback(name, func) - end - end -end - -if trace_calls then - statistics.register("callback details", function() - local t = { } -- todo: pass function to register and quit at nil - for name, n in sortedhash(list) do - if n > 0 then - t[#t+1] = format("%s -> %s",name,n) - end - end - return t - end) -end - --- -- somehow crashes later on --- --- callbacks.freeze("find_.*_file","finding file") --- callbacks.freeze("read_.*_file","reading file") --- callbacks.freeze("open_.*_file","opening file") - ---[[ldx-- -

The simple case is to remove the callback:

- - -callbacks.push('linebreak_filter') -... some actions ... -callbacks.pop('linebreak_filter') - - -

Often, in such case, another callback or a macro call will pop -the original.

- -

In practice one will install a new handler, like in:

- - -callbacks.push('linebreak_filter', function(...) - return something_done(...) -end) - - -

Even more interesting is:

- - -callbacks.push('linebreak_filter', function(...) - callbacks.pop('linebreak_filter') - return something_done(...) -end) - - -

This does a one-shot.

---ldx]]-- - ---[[ldx-- -

Callbacks may result in doing some hard work -which takes time and above all resourses. Sometimes it makes -sense to disable or tune the garbage collector in order to -keep the use of resources acceptable.

- -

At some point in the development we did some tests with counting -nodes (in this case 121049).

- - - - - - - - - - -
setstepmulsecondsmegabytes
20024.080.5
17521.078.2
15022.074.6
16022.074.6
16521.077.6
12521.589.2
10021.588.4
- -

The following code is kind of experimental. In the documents -that describe the development of we report -on speed tests. One observation is thta it sometimes helps to -restart the collector. Okay, experimental code has been removed, -because messing aroudn with the gc is too unpredictable.

---ldx]]-- - --- For the moment we keep this here and not in util-gbc.lua or so. - -utilities = utilities or { } -utilities.garbagecollector = utilities.garbagecollector or { } -local garbagecollector = utilities.garbagecollector - -garbagecollector.enabled = false -- could become a directive -garbagecollector.criterium = 4*1024*1024 - --- Lua allocates up to 12 times the amount of memory needed for --- handling a string, and for large binary chunks (like chinese otf --- files) we get a prominent memory consumption. Even when a variable --- is nilled, there is some delay in freeing the associated memory (the --- hashed string) because if we do the same thing directly afterwards, --- we see only a slight increase in memory. For that reason it makes --- sense to do a collector pass after a huge file. --- --- test file: --- --- function test() --- local b = collectgarbage("count") --- local s = io.loaddata("some font table, e.g. a big tmc file") --- local a = collectgarbage("count") --- print(">>> STATUS",b,a,a-b,#s,1000*(a-b)/#s) --- end --- --- test() test() test() test() collectgarbage("collect") test() test() test() test() --- --- As a result of this, LuaTeX now uses an optimized version of f:read("*a"), --- one that does not use the 4K allocations but allocates in one step. - -function garbagecollector.check(size,criterium) - if garbagecollector.enabled then - criterium = criterium or garbagecollector.criterium - if not size or (criterium and criterium > 0 and size > criterium) then - if trace_checking then - local b = collectgarbage("count") - collectgarbage("collect") - local a = collectgarbage("count") - report_memory("forced sweep, collected: %s MB, used: %s MB",round((b-a)/1000),round(a/1000)) - else - collectgarbage("collect") - end - end - end -end - --- this will move - -commands = commands or { } - -function commands.showcallbacks() - local NC, NR, verbatim = context.NC, context.NR, context.type - context.starttabulate { "|l|l|p|" } - for name, _ in sortedhash(list) do - NC() verbatim(name) NC() verbatim(state(name)) NC() context(frozen[name] or "") NC() NR() - end - context.stoptabulate() -end +if not modules then modules = { } end modules ['luat-cbk'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local insert, remove, find, format = table.insert, table.remove, string.find, string.format +local collectgarbage, type, next = collectgarbage, type, next +local round = math.round +local sortedhash, tohash = table.sortedhash, table.tohash + +local trace_checking = false trackers.register("memory.checking", function(v) trace_checking = v end) + +local report_callbacks = logs.reporter("system","callbacks") +local report_memory = logs.reporter("system","memory") + +--[[ldx-- +

Callbacks are the real asset of . They permit you to hook +your own code into the engine. Here we implement a few handy +auxiliary functions.

+--ldx]]-- + +callbacks = callbacks or { } +local callbacks = callbacks + +--[[ldx-- +

When you (temporarily) want to install a callback function, and after a +while wants to revert to the original one, you can use the following two +functions.

+--ldx]]-- + +local trace_callbacks = false trackers.register("system.callbacks", function(v) trace_callbacks = v end) +local trace_calls = false -- only used when analyzing performance and initializations + +local register_callback = callback.register +local find_callback = callback.find +local list_callbacks = callback.list + +local frozen, stack, list = { }, { }, callbacks.list + +if not list then -- otherwise counters get reset + + list = utilities.storage.allocate(list_callbacks()) + + for k, _ in next, list do + list[k] = 0 + end + + callbacks.list = list + +end + +local delayed = tohash { + "buildpage_filter", +} + + +if trace_calls then + + local functions = { } + local original = register_callback + + register_callback = function(name,func) + if type(func) == "function" then + if functions[name] then + functions[name] = func + return find_callback(name) + else + functions[name] = func + local cnuf = function(...) + list[name] = list[name] + 1 + return functions[name](...) + end + return original(name,cnuf) + end + else + return original(name,func) + end + end + +end + +local function frozen_message(what,name) + report_callbacks("not %s frozen %a to %a",what,name,frozen[name]) +end + +local function frozen_callback(name) + return nil, format("callback '%s' is frozen to '%s'",name,frozen[name]) -- no formatter yet +end + +local function state(name) + local f = find_callback(name) + if f == false then + return "disabled" + elseif f then + return "enabled" + else + return "undefined" + end +end + +function callbacks.known(name) + return list[name] +end + +function callbacks.report() + for name, _ in sortedhash(list) do + local str = frozen[name] + if str then + report_callbacks("%s: %s -> %s",state(name),name,str) + else + report_callbacks("%s: %s",state(name),name) + end + end +end + +function callbacks.freeze(name,freeze) + freeze = type(freeze) == "string" and freeze + if find(name,"%*") then + local pattern = name + for name, _ in next, list do + if find(name,pattern) then + frozen[name] = freeze or frozen[name] or "frozen" + end + end + else + frozen[name] = freeze or frozen[name] or "frozen" + end +end + +function callbacks.register(name,func,freeze) + if frozen[name] then + if trace_callbacks then + frozen_message("registering",name) + end + return frozen_callback(name) + elseif freeze then + frozen[name] = type(freeze) == "string" and freeze or "registered" + end + if delayed[name] and environment.initex then + return nil + end + return register_callback(name,func) +end + +function callback.register(name,func) -- original + if not frozen[name] then + return register_callback(name,func) + elseif trace_callbacks then + frozen_message("registering",name) + end + return frozen_callback(name) +end + +function callbacks.push(name,func) + if not frozen[name] then + local sn = stack[name] + if not sn then + sn = { } + stack[name] = sn + end + insert(sn,find_callback(name)) + register_callback(name, func) + elseif trace_callbacks then + frozen_message("pushing",name) + end +end + +function callbacks.pop(name) + if not frozen[name] then + local sn = stack[name] + if not sn or #sn == 0 then + -- some error + register_callback(name, nil) -- ! really needed + else + -- this fails: register_callback(name, remove(stack[name])) + local func = remove(sn) + register_callback(name, func) + end + end +end + +if trace_calls then + statistics.register("callback details", function() + local t = { } -- todo: pass function to register and quit at nil + for name, n in sortedhash(list) do + if n > 0 then + t[#t+1] = format("%s -> %s",name,n) + end + end + return t + end) +end + +-- -- somehow crashes later on +-- +-- callbacks.freeze("find_.*_file","finding file") +-- callbacks.freeze("read_.*_file","reading file") +-- callbacks.freeze("open_.*_file","opening file") + +--[[ldx-- +

The simple case is to remove the callback:

+ + +callbacks.push('linebreak_filter') +... some actions ... +callbacks.pop('linebreak_filter') + + +

Often, in such case, another callback or a macro call will pop +the original.

+ +

In practice one will install a new handler, like in:

+ + +callbacks.push('linebreak_filter', function(...) + return something_done(...) +end) + + +

Even more interesting is:

+ + +callbacks.push('linebreak_filter', function(...) + callbacks.pop('linebreak_filter') + return something_done(...) +end) + + +

This does a one-shot.

+--ldx]]-- + +--[[ldx-- +

Callbacks may result in doing some hard work +which takes time and above all resourses. Sometimes it makes +sense to disable or tune the garbage collector in order to +keep the use of resources acceptable.

+ +

At some point in the development we did some tests with counting +nodes (in this case 121049).

+ + + + + + + + + + +
setstepmulsecondsmegabytes
20024.080.5
17521.078.2
15022.074.6
16022.074.6
16521.077.6
12521.589.2
10021.588.4
+ +

The following code is kind of experimental. In the documents +that describe the development of we report +on speed tests. One observation is thta it sometimes helps to +restart the collector. Okay, experimental code has been removed, +because messing aroudn with the gc is too unpredictable.

+--ldx]]-- + +-- For the moment we keep this here and not in util-gbc.lua or so. + +utilities = utilities or { } +utilities.garbagecollector = utilities.garbagecollector or { } +local garbagecollector = utilities.garbagecollector + +garbagecollector.enabled = false -- could become a directive +garbagecollector.criterium = 4*1024*1024 + +-- Lua allocates up to 12 times the amount of memory needed for +-- handling a string, and for large binary chunks (like chinese otf +-- files) we get a prominent memory consumption. Even when a variable +-- is nilled, there is some delay in freeing the associated memory (the +-- hashed string) because if we do the same thing directly afterwards, +-- we see only a slight increase in memory. For that reason it makes +-- sense to do a collector pass after a huge file. +-- +-- test file: +-- +-- function test() +-- local b = collectgarbage("count") +-- local s = io.loaddata("some font table, e.g. a big tmc file") +-- local a = collectgarbage("count") +-- print(">>> STATUS",b,a,a-b,#s,1000*(a-b)/#s) +-- end +-- +-- test() test() test() test() collectgarbage("collect") test() test() test() test() +-- +-- As a result of this, LuaTeX now uses an optimized version of f:read("*a"), +-- one that does not use the 4K allocations but allocates in one step. + +function garbagecollector.check(size,criterium) + if garbagecollector.enabled then + criterium = criterium or garbagecollector.criterium + if not size or (criterium and criterium > 0 and size > criterium) then + if trace_checking then + local b = collectgarbage("count") + collectgarbage("collect") + local a = collectgarbage("count") + report_memory("forced sweep, collected: %s MB, used: %s MB",round((b-a)/1000),round(a/1000)) + else + collectgarbage("collect") + end + end + end +end + +-- this will move + +commands = commands or { } + +function commands.showcallbacks() + local NC, NR, verbatim = context.NC, context.NR, context.type + context.starttabulate { "|l|l|p|" } + for name, _ in sortedhash(list) do + NC() verbatim(name) NC() verbatim(state(name)) NC() context(frozen[name] or "") NC() NR() + end + context.stoptabulate() +end diff --git a/tex/context/base/luat-cnf.lua b/tex/context/base/luat-cnf.lua index 3672c603e..4020f0b12 100644 --- a/tex/context/base/luat-cnf.lua +++ b/tex/context/base/luat-cnf.lua @@ -1,197 +1,197 @@ -if not modules then modules = { } end modules ['luat-cnf'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local type, next, tostring, tonumber = type, next, tostring, tonumber -local format, concat, find = string.format, table.concat, string.find - -local allocate = utilities.storage.allocate - -texconfig.kpse_init = false -texconfig.shell_escape = 't' - -luatex = luatex or { } -local luatex = luatex - -texconfig.error_line = 79 -- 79 -- obsolete -texconfig.half_error_line = 50 -- 50 -- obsolete - -texconfig.expand_depth = 10000 -- 10000 -texconfig.hash_extra = 100000 -- 0 -texconfig.nest_size = 1000 -- 50 -texconfig.max_in_open = 500 -- 15 -texconfig.max_print_line = 10000 -- 79 -texconfig.max_strings = 500000 -- 15000 -texconfig.param_size = 25000 -- 60 -texconfig.save_size = 50000 -- 4000 -texconfig.stack_size = 10000 -- 300 - --- local function initialize() --- local t, variable = allocate(), resolvers.variable --- for name, default in next, variablenames do --- local name = variablenames[i] --- local value = variable(name) --- value = tonumber(value) --- if not value or value == "" or value == 0 then --- value = default --- end --- texconfig[name], t[name] = value, value --- end --- initialize = nil --- return t --- end --- --- luatex.variables = initialize() - -local stub = [[ - --- checking - -storage = storage or { } -luatex = luatex or { } - --- we provide our own file handling - -texconfig.kpse_init = false -texconfig.shell_escape = 't' - --- as soon as possible - -luatex.starttime = os.gettimeofday() - --- this will happen after the format is loaded - -function texconfig.init() - - -- development - - local builtin, globals = { }, { } - - libraries = { -- we set it here as we want libraries also 'indexed' - basiclua = { - "string", "table", "coroutine", "debug", "file", "io", "lpeg", "math", "os", "package", "bit32", - }, - basictex = { -- noad - "callback", "font", "img", "lang", "lua", "node", "pdf", "status", "tex", "texconfig", "texio", "token", - }, - extralua = { - "gzip", "zip", "zlib", "lfs", "ltn12", "mime", "socket", "md5", "profiler", "unicode", "utf", - }, - extratex = { - "epdf", "fontloader", "kpse", "mplib", - }, - obsolete = { - "fontforge", -- can be filled by luat-log - "kpse", - }, - functions = { - "assert", "pcall", "xpcall", "error", "collectgarbage", - "dofile", "load","loadfile", "require", "module", - "getmetatable", "setmetatable", - "ipairs", "pairs", "rawequal", "rawget", "rawset", "next", - "tonumber", "tostring", - "type", "unpack", "select", "print", - }, - builtin = builtin, -- to be filled - globals = globals, -- to be filled - } - - for k, v in next, _G do - globals[k] = tostring(v) - end - - local function collect(t,fnc) - local lib = { } - for k, v in next, t do - if fnc then - lib[v] = _G[v] - else - local keys = { } - local gv = _G[v] - local tv = type(gv) - if tv == "table" then - for k, v in next, gv do - keys[k] = tostring(v) -- true -- by tostring we cannot call overloades functions (security) - end - end - lib[v] = keys - builtin[v] = keys - end - end - return lib - end - - libraries.basiclua = collect(libraries.basiclua) - libraries.basictex = collect(libraries.basictex) - libraries.extralua = collect(libraries.extralua) - libraries.extratex = collect(libraries.extratex) - libraries.functions = collect(libraries.functions,true) - libraries.obsolete = collect(libraries.obsolete) - - -- shortcut and helper - - local function init(start) - local b = lua.bytecode - local i = start - local t = os.clock() - while b[i] do - b[i]() ; - b[i] = nil ; - i = i + 1 - -- collectgarbage('step') - end - return i - start, os.clock() - t - end - - -- the stored tables and modules - - storage.noftables , storage.toftables = init(0) - storage.nofmodules, storage.tofmodules = init(%s) - - if modules then - local loaded = package.loaded - for module, _ in next, modules do - loaded[module] = true - end - end - -end - --- we provide a qualified path - -callback.register('find_format_file',function(name) - texconfig.formatname = name - return name -end) - --- done, from now on input and callbacks are internal -]] - -local variablenames = { - "error_line", "half_error_line", - "expand_depth", "hash_extra", "nest_size", - "max_in_open", "max_print_line", "max_strings", - "param_size", "save_size", "stack_size", -} - -local function makestub() - name = name or (environment.jobname .. ".lui") - firsttable = firsttable or lua.firstbytecode - local t = { - "-- this file is generated, don't change it\n", - "-- configuration (can be overloaded later)\n" - } - for _,v in next, variablenames do - local tv = texconfig[v] - if tv and tv ~= "" then - t[#t+1] = format("texconfig.%s=%s",v,tv) - end - end - io.savedata(name,format("%s\n\n%s",concat(t,"\n"),format(stub,firsttable))) -end - -lua.registerfinalizer(makestub,"create stub file") +if not modules then modules = { } end modules ['luat-cnf'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local type, next, tostring, tonumber = type, next, tostring, tonumber +local format, concat, find = string.format, table.concat, string.find + +local allocate = utilities.storage.allocate + +texconfig.kpse_init = false +texconfig.shell_escape = 't' + +luatex = luatex or { } +local luatex = luatex + +texconfig.error_line = 79 -- 79 -- obsolete +texconfig.half_error_line = 50 -- 50 -- obsolete + +texconfig.expand_depth = 10000 -- 10000 +texconfig.hash_extra = 100000 -- 0 +texconfig.nest_size = 1000 -- 50 +texconfig.max_in_open = 500 -- 15 +texconfig.max_print_line = 10000 -- 79 +texconfig.max_strings = 500000 -- 15000 +texconfig.param_size = 25000 -- 60 +texconfig.save_size = 50000 -- 4000 +texconfig.stack_size = 10000 -- 300 + +-- local function initialize() +-- local t, variable = allocate(), resolvers.variable +-- for name, default in next, variablenames do +-- local name = variablenames[i] +-- local value = variable(name) +-- value = tonumber(value) +-- if not value or value == "" or value == 0 then +-- value = default +-- end +-- texconfig[name], t[name] = value, value +-- end +-- initialize = nil +-- return t +-- end +-- +-- luatex.variables = initialize() + +local stub = [[ + +-- checking + +storage = storage or { } +luatex = luatex or { } + +-- we provide our own file handling + +texconfig.kpse_init = false +texconfig.shell_escape = 't' + +-- as soon as possible + +luatex.starttime = os.gettimeofday() + +-- this will happen after the format is loaded + +function texconfig.init() + + -- development + + local builtin, globals = { }, { } + + libraries = { -- we set it here as we want libraries also 'indexed' + basiclua = { + "string", "table", "coroutine", "debug", "file", "io", "lpeg", "math", "os", "package", "bit32", + }, + basictex = { -- noad + "callback", "font", "img", "lang", "lua", "node", "pdf", "status", "tex", "texconfig", "texio", "token", + }, + extralua = { + "gzip", "zip", "zlib", "lfs", "ltn12", "mime", "socket", "md5", "profiler", "unicode", "utf", + }, + extratex = { + "epdf", "fontloader", "kpse", "mplib", + }, + obsolete = { + "fontforge", -- can be filled by luat-log + "kpse", + }, + functions = { + "assert", "pcall", "xpcall", "error", "collectgarbage", + "dofile", "load","loadfile", "require", "module", + "getmetatable", "setmetatable", + "ipairs", "pairs", "rawequal", "rawget", "rawset", "next", + "tonumber", "tostring", + "type", "unpack", "select", "print", + }, + builtin = builtin, -- to be filled + globals = globals, -- to be filled + } + + for k, v in next, _G do + globals[k] = tostring(v) + end + + local function collect(t,fnc) + local lib = { } + for k, v in next, t do + if fnc then + lib[v] = _G[v] + else + local keys = { } + local gv = _G[v] + local tv = type(gv) + if tv == "table" then + for k, v in next, gv do + keys[k] = tostring(v) -- true -- by tostring we cannot call overloades functions (security) + end + end + lib[v] = keys + builtin[v] = keys + end + end + return lib + end + + libraries.basiclua = collect(libraries.basiclua) + libraries.basictex = collect(libraries.basictex) + libraries.extralua = collect(libraries.extralua) + libraries.extratex = collect(libraries.extratex) + libraries.functions = collect(libraries.functions,true) + libraries.obsolete = collect(libraries.obsolete) + + -- shortcut and helper + + local function init(start) + local b = lua.bytecode + local i = start + local t = os.clock() + while b[i] do + b[i]() ; + b[i] = nil ; + i = i + 1 + -- collectgarbage('step') + end + return i - start, os.clock() - t + end + + -- the stored tables and modules + + storage.noftables , storage.toftables = init(0) + storage.nofmodules, storage.tofmodules = init(%s) + + if modules then + local loaded = package.loaded + for module, _ in next, modules do + loaded[module] = true + end + end + +end + +-- we provide a qualified path + +callback.register('find_format_file',function(name) + texconfig.formatname = name + return name +end) + +-- done, from now on input and callbacks are internal +]] + +local variablenames = { + "error_line", "half_error_line", + "expand_depth", "hash_extra", "nest_size", + "max_in_open", "max_print_line", "max_strings", + "param_size", "save_size", "stack_size", +} + +local function makestub() + name = name or (environment.jobname .. ".lui") + firsttable = firsttable or lua.firstbytecode + local t = { + "-- this file is generated, don't change it\n", + "-- configuration (can be overloaded later)\n" + } + for _,v in next, variablenames do + local tv = texconfig[v] + if tv and tv ~= "" then + t[#t+1] = format("texconfig.%s=%s",v,tv) + end + end + io.savedata(name,format("%s\n\n%s",concat(t,"\n"),format(stub,firsttable))) +end + +lua.registerfinalizer(makestub,"create stub file") diff --git a/tex/context/base/luat-cod.lua b/tex/context/base/luat-cod.lua index 8b015477f..8fc94779c 100644 --- a/tex/context/base/luat-cod.lua +++ b/tex/context/base/luat-cod.lua @@ -1,181 +1,181 @@ -if not modules then modules = { } end modules ['luat-cod'] = { - version = 1.001, - comment = "companion to luat-cod.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local type, loadfile = type, loadfile -local match, gsub, find, format = string.match, string.gsub, string.find, string.format - -local texconfig, lua = texconfig, lua - --- some basic housekeeping - -texconfig.kpse_init = false -texconfig.shell_escape = 't' -texconfig.max_print_line = 100000 -texconfig.max_in_open = 127 - --- registering bytecode chunks - -local bytecode = lua.bytecode or { } -local bytedata = lua.bytedata or { } -local bytedone = lua.bytedone or { } - -lua.bytecode = bytecode -- built in anyway -lua.bytedata = bytedata -lua.bytedone = bytedone - -lua.firstbytecode = 501 -lua.lastbytecode = lua.lastbytecode or (lua.firstbytecode - 1) -- as we load ourselves again ... maybe return earlier - -function lua.registeredcodes() - return lua.lastbytecode - lua.firstbytecode + 1 -end - --- no file.* functions yet - -function lua.registercode(filename,version) - local barename = gsub(filename,"%.[%a%d]+$","") - if barename == filename then filename = filename .. ".lua" end - local basename = match(barename,"^.+[/\\](.-)$") or barename - if not bytedone[basename] then - local code = environment.luafilechunk(filename) - if code then - bytedone[basename] = true - if environment.initex then - local n = lua.lastbytecode + 1 - bytedata[n] = { barename, version or "0.000" } - bytecode[n] = code - lua.lastbytecode = n - end - end - end -end - -local finalizers = { } - -function lua.registerfinalizer(f,comment) - comment = comment or "unknown" - if type(f) == "function" then - finalizers[#finalizers+1] = { action = f, comment = comment } - else - print(format("\nfatal error: invalid finalizer, action: %s\n",comment)) - os.exit() - end -end - -function lua.finalize(logger) - for i=1,#finalizers do - local finalizer = finalizers[i] - finalizer.action() - if logger then - logger("finalize action: %s",finalizer.comment) - end - end -end - --- A first start with environments. This will be overloaded later. - -environment = environment or { } -local environment = environment - --- no string.unquoted yet - -local sourcefile = gsub(arg and arg[1] or "","^\"(.*)\"$","%1") -local sourcepath = find(sourcefile,"/") and gsub(sourcefile,"/[^/]+$","") or "" -local targetpath = "." - --- delayed (via metatable): --- --- environment.jobname = tex.jobname --- environment.version = tostring(tex.toks.contextversiontoks) - -environment.initex = tex.formatname == "" - -if not environment.luafilechunk then - - function environment.luafilechunk(filename) - if sourcepath ~= "" then - filename = sourcepath .. "/" .. filename - end - local data = loadfile(filename) - texio.write("<",data and "+ " or "- ",filename,">") - if data then - data() - end - return data - end - -end - -if not environment.engineflags then -- raw flags - - local engineflags = { } - - for i=-10,#arg do - local a = arg[i] - if a then - local flag, content = match(a,"^%-%-([^=]+)=?(.-)$") - if flag then - engineflags[flag] = content or "" - end - end - end - - environment.engineflags = engineflags - -end - --- We need a few premature callbacks in the format generator. We --- also do this when the format is loaded as otherwise we get --- a kpse error when disabled. This is an engine issue that will --- be sorted out in due time. - -local isfile = lfs.isfile - -local function source_file(name) - local fullname = sourcepath .. "/" .. name - if isfile(fullname) then - return fullname - end - fullname = fullname .. ".tex" - if isfile(fullname) then - return fullname - end - if isfile(name) then - return name - end - name = name .. ".tex" - if isfile(name) then - return name - end - return nil -end - -local function target_file(name) - return targetpath .. "/" .. name -end - -local function find_read_file (id,name) - return source_file(name) -end - -local function find_write_file(id,name) - return target_file(name) -end - -local function open_read_file(name) - local f = io.open(name,'rb') - return { - reader = function() - return f:read("*line") - end - } -end - -callback.register('find_read_file' , find_read_file ) -callback.register('open_read_file' , open_read_file ) -callback.register('find_write_file', find_write_file) +if not modules then modules = { } end modules ['luat-cod'] = { + version = 1.001, + comment = "companion to luat-cod.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local type, loadfile = type, loadfile +local match, gsub, find, format = string.match, string.gsub, string.find, string.format + +local texconfig, lua = texconfig, lua + +-- some basic housekeeping + +texconfig.kpse_init = false +texconfig.shell_escape = 't' +texconfig.max_print_line = 100000 +texconfig.max_in_open = 127 + +-- registering bytecode chunks + +local bytecode = lua.bytecode or { } +local bytedata = lua.bytedata or { } +local bytedone = lua.bytedone or { } + +lua.bytecode = bytecode -- built in anyway +lua.bytedata = bytedata +lua.bytedone = bytedone + +lua.firstbytecode = 501 +lua.lastbytecode = lua.lastbytecode or (lua.firstbytecode - 1) -- as we load ourselves again ... maybe return earlier + +function lua.registeredcodes() + return lua.lastbytecode - lua.firstbytecode + 1 +end + +-- no file.* functions yet + +function lua.registercode(filename,version) + local barename = gsub(filename,"%.[%a%d]+$","") + if barename == filename then filename = filename .. ".lua" end + local basename = match(barename,"^.+[/\\](.-)$") or barename + if not bytedone[basename] then + local code = environment.luafilechunk(filename) + if code then + bytedone[basename] = true + if environment.initex then + local n = lua.lastbytecode + 1 + bytedata[n] = { barename, version or "0.000" } + bytecode[n] = code + lua.lastbytecode = n + end + end + end +end + +local finalizers = { } + +function lua.registerfinalizer(f,comment) + comment = comment or "unknown" + if type(f) == "function" then + finalizers[#finalizers+1] = { action = f, comment = comment } + else + print(format("\nfatal error: invalid finalizer, action: %s\n",comment)) + os.exit() + end +end + +function lua.finalize(logger) + for i=1,#finalizers do + local finalizer = finalizers[i] + finalizer.action() + if logger then + logger("finalize action: %s",finalizer.comment) + end + end +end + +-- A first start with environments. This will be overloaded later. + +environment = environment or { } +local environment = environment + +-- no string.unquoted yet + +local sourcefile = gsub(arg and arg[1] or "","^\"(.*)\"$","%1") +local sourcepath = find(sourcefile,"/") and gsub(sourcefile,"/[^/]+$","") or "" +local targetpath = "." + +-- delayed (via metatable): +-- +-- environment.jobname = tex.jobname +-- environment.version = tostring(tex.toks.contextversiontoks) + +environment.initex = tex.formatname == "" + +if not environment.luafilechunk then + + function environment.luafilechunk(filename) + if sourcepath ~= "" then + filename = sourcepath .. "/" .. filename + end + local data = loadfile(filename) + texio.write("<",data and "+ " or "- ",filename,">") + if data then + data() + end + return data + end + +end + +if not environment.engineflags then -- raw flags + + local engineflags = { } + + for i=-10,#arg do + local a = arg[i] + if a then + local flag, content = match(a,"^%-%-([^=]+)=?(.-)$") + if flag then + engineflags[flag] = content or "" + end + end + end + + environment.engineflags = engineflags + +end + +-- We need a few premature callbacks in the format generator. We +-- also do this when the format is loaded as otherwise we get +-- a kpse error when disabled. This is an engine issue that will +-- be sorted out in due time. + +local isfile = lfs.isfile + +local function source_file(name) + local fullname = sourcepath .. "/" .. name + if isfile(fullname) then + return fullname + end + fullname = fullname .. ".tex" + if isfile(fullname) then + return fullname + end + if isfile(name) then + return name + end + name = name .. ".tex" + if isfile(name) then + return name + end + return nil +end + +local function target_file(name) + return targetpath .. "/" .. name +end + +local function find_read_file (id,name) + return source_file(name) +end + +local function find_write_file(id,name) + return target_file(name) +end + +local function open_read_file(name) + local f = io.open(name,'rb') + return { + reader = function() + return f:read("*line") + end + } +end + +callback.register('find_read_file' , find_read_file ) +callback.register('open_read_file' , open_read_file ) +callback.register('find_write_file', find_write_file) diff --git a/tex/context/base/luat-env.lua b/tex/context/base/luat-env.lua index 8753972c6..2f8f9e28d 100644 --- a/tex/context/base/luat-env.lua +++ b/tex/context/base/luat-env.lua @@ -1,176 +1,176 @@ - if not modules then modules = { } end modules ['luat-env'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- A former version provided functionality for non embeded core scripts i.e. runtime --- library loading. Given the amount of Lua code we use now, this no longer makes --- sense. Much of this evolved before bytecode arrays were available and so a lot of --- code has disappeared already. - -local rawset, rawget, loadfile, assert = rawset, rawget, loadfile, assert - -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) - -local report_lua = logs.reporter("resolvers","lua") - -local luautilities = utilities.lua -local luasuffixes = luautilities.suffixes - -environment = environment or { } -local environment = environment - --- environment - -local mt = { - __index = function(_,k) - if k == "version" then - local version = tex.toks and tex.toks.contextversiontoks - if version and version ~= "" then - rawset(environment,"version",version) - return version - else - return "unknown" - end - elseif k == "kind" then - local kind = tex.toks and tex.toks.contextkindtoks - if kind and kind ~= "" then - rawset(environment,"kind",kind) - return kind - else - return "unknown" - end - elseif k == "jobname" or k == "formatname" then - local name = tex and tex[k] - if name or name== "" then - rawset(environment,k,name) - return name - else - return "unknown" - end - elseif k == "outputfilename" then - local name = environment.jobname - rawset(environment,k,name) - return name - end - end -} - -setmetatable(environment,mt) - --- weird place ... depends on a not yet loaded module - -function environment.texfile(filename) - return resolvers.findfile(filename,'tex') -end - -function environment.luafile(filename) -- needs checking - local resolved = resolvers.findfile(filename,'tex') or "" - if resolved ~= "" then - return resolved - end - resolved = resolvers.findfile(filename,'texmfscripts') or "" - if resolved ~= "" then - return resolved - end - return resolvers.findfile(filename,'luatexlibs') or "" -end - --- local function checkstrip(filename) --- local modu = modules[file.nameonly(filename)] --- return modu and modu.dataonly --- end - -local stripindeed = false directives.register("system.compile.strip", function(v) stripindeed = v end) - -local function strippable(filename) - if stripindeed then - local modu = modules[file.nameonly(filename)] - return modu and modu.dataonly - else - return false - end -end - -function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format - filename = file.replacesuffix(filename, "lua") - local fullname = environment.luafile(filename) - if fullname and fullname ~= "" then - local data = luautilities.loadedluacode(fullname,strippable,filename) -- can be overloaded - if trace_locating then - report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded") - elseif not silent then - texio.write("<",data and "+ " or "- ",fullname,">") - end - return data - else - if trace_locating then - report_lua("unknown file %a",filename) - end - return nil - end -end - --- the next ones can use the previous ones / combine - -function environment.loadluafile(filename, version) - local lucname, luaname, chunk - local basename = file.removesuffix(filename) - if basename == filename then - luaname = file.addsuffix(basename,luasuffixes.lua) - lucname = file.addsuffix(basename,luasuffixes.luc) - else - luaname = basename -- forced suffix - lucname = nil - end - -- when not overloaded by explicit suffix we look for a luc file first - local fullname = (lucname and environment.luafile(lucname)) or "" - if fullname ~= "" then - if trace_locating then - report_lua("loading %a",fullname) - end - -- maybe: package.loaded[file.nameonly(fullname)] = true - chunk = loadfile(fullname) -- this way we don't need a file exists check - end - if chunk then - assert(chunk)() - if version then - -- we check of the version number of this chunk matches - local v = version -- can be nil - if modules and modules[filename] then - v = modules[filename].version -- new method - elseif versions and versions[filename] then - v = versions[filename] -- old method - end - if v == version then - return true - else - if trace_locating then - report_lua("version mismatch for %a, lua version %a, luc version %a",filename,v,version) - end - environment.loadluafile(filename) - end - else - return true - end - end - fullname = (luaname and environment.luafile(luaname)) or "" - if fullname ~= "" then - if trace_locating then - report_lua("loading %a",fullname) - end - chunk = loadfile(fullname) -- this way we don't need a file exists check - if not chunk then - if trace_locating then - report_lua("unknown file %a",filename) - end - else - assert(chunk)() - return true - end - end - return false -end + if not modules then modules = { } end modules ['luat-env'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- A former version provided functionality for non embeded core scripts i.e. runtime +-- library loading. Given the amount of Lua code we use now, this no longer makes +-- sense. Much of this evolved before bytecode arrays were available and so a lot of +-- code has disappeared already. + +local rawset, rawget, loadfile, assert = rawset, rawget, loadfile, assert + +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) + +local report_lua = logs.reporter("resolvers","lua") + +local luautilities = utilities.lua +local luasuffixes = luautilities.suffixes + +environment = environment or { } +local environment = environment + +-- environment + +local mt = { + __index = function(_,k) + if k == "version" then + local version = tex.toks and tex.toks.contextversiontoks + if version and version ~= "" then + rawset(environment,"version",version) + return version + else + return "unknown" + end + elseif k == "kind" then + local kind = tex.toks and tex.toks.contextkindtoks + if kind and kind ~= "" then + rawset(environment,"kind",kind) + return kind + else + return "unknown" + end + elseif k == "jobname" or k == "formatname" then + local name = tex and tex[k] + if name or name== "" then + rawset(environment,k,name) + return name + else + return "unknown" + end + elseif k == "outputfilename" then + local name = environment.jobname + rawset(environment,k,name) + return name + end + end +} + +setmetatable(environment,mt) + +-- weird place ... depends on a not yet loaded module + +function environment.texfile(filename) + return resolvers.findfile(filename,'tex') +end + +function environment.luafile(filename) -- needs checking + local resolved = resolvers.findfile(filename,'tex') or "" + if resolved ~= "" then + return resolved + end + resolved = resolvers.findfile(filename,'texmfscripts') or "" + if resolved ~= "" then + return resolved + end + return resolvers.findfile(filename,'luatexlibs') or "" +end + +-- local function checkstrip(filename) +-- local modu = modules[file.nameonly(filename)] +-- return modu and modu.dataonly +-- end + +local stripindeed = false directives.register("system.compile.strip", function(v) stripindeed = v end) + +local function strippable(filename) + if stripindeed then + local modu = modules[file.nameonly(filename)] + return modu and modu.dataonly + else + return false + end +end + +function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format + filename = file.replacesuffix(filename, "lua") + local fullname = environment.luafile(filename) + if fullname and fullname ~= "" then + local data = luautilities.loadedluacode(fullname,strippable,filename) -- can be overloaded + if trace_locating then + report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded") + elseif not silent then + texio.write("<",data and "+ " or "- ",fullname,">") + end + return data + else + if trace_locating then + report_lua("unknown file %a",filename) + end + return nil + end +end + +-- the next ones can use the previous ones / combine + +function environment.loadluafile(filename, version) + local lucname, luaname, chunk + local basename = file.removesuffix(filename) + if basename == filename then + luaname = file.addsuffix(basename,luasuffixes.lua) + lucname = file.addsuffix(basename,luasuffixes.luc) + else + luaname = basename -- forced suffix + lucname = nil + end + -- when not overloaded by explicit suffix we look for a luc file first + local fullname = (lucname and environment.luafile(lucname)) or "" + if fullname ~= "" then + if trace_locating then + report_lua("loading %a",fullname) + end + -- maybe: package.loaded[file.nameonly(fullname)] = true + chunk = loadfile(fullname) -- this way we don't need a file exists check + end + if chunk then + assert(chunk)() + if version then + -- we check of the version number of this chunk matches + local v = version -- can be nil + if modules and modules[filename] then + v = modules[filename].version -- new method + elseif versions and versions[filename] then + v = versions[filename] -- old method + end + if v == version then + return true + else + if trace_locating then + report_lua("version mismatch for %a, lua version %a, luc version %a",filename,v,version) + end + environment.loadluafile(filename) + end + else + return true + end + end + fullname = (luaname and environment.luafile(luaname)) or "" + if fullname ~= "" then + if trace_locating then + report_lua("loading %a",fullname) + end + chunk = loadfile(fullname) -- this way we don't need a file exists check + if not chunk then + if trace_locating then + report_lua("unknown file %a",filename) + end + else + assert(chunk)() + return true + end + end + return false +end diff --git a/tex/context/base/luat-exe.lua b/tex/context/base/luat-exe.lua index a57a5a006..6f7137cad 100644 --- a/tex/context/base/luat-exe.lua +++ b/tex/context/base/luat-exe.lua @@ -1,126 +1,126 @@ -if not modules then modules = { } end modules ['luat-exe'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this module needs checking (very old and never really used, not even enabled) - -local match, find, gmatch = string.match, string.find, string.gmatch -local concat = table.concat -local select = select - -local report_executers = logs.reporter("system","executers") - -resolvers.executers = resolvers.executers or { } -local executers = resolvers.executers - -local permitted = { } - -local osexecute = os.execute -local osexec = os.exec -local osspawn = os.spawn -local iopopen = io.popen - -local execute = osexecute -local exec = osexec -local spawn = osspawn -local popen = iopopen - -local function register(...) - for k=1,select("#",...) do - local v = select(k,...) - permitted[#permitted+1] = v == "*" and ".*" or v - end -end - -local function prepare(...) - -- todo: make more clever first split - local t = { ... } - local n = #n - local one = t[1] - if n == 1 then - if type(one) == 'table' then - return one, concat(t," ",2,n) - else - local name, arguments = match(one,"^(.-)%s+(.+)$") - if name and arguments then - return name, arguments - else - return one, "" - end - end - else - return one, concat(t," ",2,n) - end -end - -local function executer(action) - return function(...) - local name, arguments = prepare(...) - for k=1,#permitted do - local v = permitted[k] - if find(name,v) then - return action(name .. " " .. arguments) - else - report_executers("not permitted: %s %s",name,arguments) - end - end - return action("") - end -end - -local function finalize() -- todo: os.exec, todo: report ipv print - execute = executer(osexecute) - exec = executer(osexec) - spawn = executer(osspawn) - popen = executer(iopopen) - finalize = function() - report_executers("already finalized") - end - register = function() - report_executers("already finalized, no registration permitted") - end - os.execute = execute - os.exec = exec - os.spawn = spawn - io.popen = popen -end - -executers.finalize = function(...) return finalize(...) end -executers.register = function(...) return register(...) end -executers.execute = function(...) return execute (...) end -executers.exec = function(...) return exec (...) end -executers.spawn = function(...) return spawn (...) end -executers.popen = function(...) return popen (...) end - -local execution_mode directives.register("system.executionmode", function(v) execution_mode = v end) -local execution_list directives.register("system.executionlist", function(v) execution_list = v end) - -function executers.check() - if execution_mode == "none" then - finalize() - elseif execution_mode == "list" and execution_list ~= "" then - for s in gmatch("[^%s,]",execution_list) do - register(s) - end - finalize() - else - -- all - end -end - ---~ resolvers.executers.register('.*') ---~ resolvers.executers.register('*') ---~ resolvers.executers.register('dir','ls') ---~ resolvers.executers.register('dir') - ---~ resolvers.executers.finalize() ---~ resolvers.executers.execute('dir',"*.tex") ---~ resolvers.executers.execute("dir *.tex") ---~ resolvers.executers.execute("ls *.tex") ---~ os.execute('ls') - ---~ resolvers.executers.check() +if not modules then modules = { } end modules ['luat-exe'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this module needs checking (very old and never really used, not even enabled) + +local match, find, gmatch = string.match, string.find, string.gmatch +local concat = table.concat +local select = select + +local report_executers = logs.reporter("system","executers") + +resolvers.executers = resolvers.executers or { } +local executers = resolvers.executers + +local permitted = { } + +local osexecute = os.execute +local osexec = os.exec +local osspawn = os.spawn +local iopopen = io.popen + +local execute = osexecute +local exec = osexec +local spawn = osspawn +local popen = iopopen + +local function register(...) + for k=1,select("#",...) do + local v = select(k,...) + permitted[#permitted+1] = v == "*" and ".*" or v + end +end + +local function prepare(...) + -- todo: make more clever first split + local t = { ... } + local n = #n + local one = t[1] + if n == 1 then + if type(one) == 'table' then + return one, concat(t," ",2,n) + else + local name, arguments = match(one,"^(.-)%s+(.+)$") + if name and arguments then + return name, arguments + else + return one, "" + end + end + else + return one, concat(t," ",2,n) + end +end + +local function executer(action) + return function(...) + local name, arguments = prepare(...) + for k=1,#permitted do + local v = permitted[k] + if find(name,v) then + return action(name .. " " .. arguments) + else + report_executers("not permitted: %s %s",name,arguments) + end + end + return action("") + end +end + +local function finalize() -- todo: os.exec, todo: report ipv print + execute = executer(osexecute) + exec = executer(osexec) + spawn = executer(osspawn) + popen = executer(iopopen) + finalize = function() + report_executers("already finalized") + end + register = function() + report_executers("already finalized, no registration permitted") + end + os.execute = execute + os.exec = exec + os.spawn = spawn + io.popen = popen +end + +executers.finalize = function(...) return finalize(...) end +executers.register = function(...) return register(...) end +executers.execute = function(...) return execute (...) end +executers.exec = function(...) return exec (...) end +executers.spawn = function(...) return spawn (...) end +executers.popen = function(...) return popen (...) end + +local execution_mode directives.register("system.executionmode", function(v) execution_mode = v end) +local execution_list directives.register("system.executionlist", function(v) execution_list = v end) + +function executers.check() + if execution_mode == "none" then + finalize() + elseif execution_mode == "list" and execution_list ~= "" then + for s in gmatch("[^%s,]",execution_list) do + register(s) + end + finalize() + else + -- all + end +end + +--~ resolvers.executers.register('.*') +--~ resolvers.executers.register('*') +--~ resolvers.executers.register('dir','ls') +--~ resolvers.executers.register('dir') + +--~ resolvers.executers.finalize() +--~ resolvers.executers.execute('dir',"*.tex") +--~ resolvers.executers.execute("dir *.tex") +--~ resolvers.executers.execute("ls *.tex") +--~ os.execute('ls') + +--~ resolvers.executers.check() diff --git a/tex/context/base/luat-fio.lua b/tex/context/base/luat-fio.lua index d61c6f142..bc8c6677b 100644 --- a/tex/context/base/luat-fio.lua +++ b/tex/context/base/luat-fio.lua @@ -1,117 +1,117 @@ -if not modules then modules = { } end modules ['luat-fio'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format -local concat = table.concat -local sequenced = table.sequenced - -texconfig.kpse_init = false -texconfig.shell_escape = 't' -texconfig.max_in_open = 127 -texconfig.max_print_line = 100000 - -if not resolvers.instance then - - resolvers.reset() - - resolvers.instance.validfile = resolvers.validctxfile - - -- we now load the file database as we might need files other than - -- tex and lua file on the given path - - -- trackers.enable("resolvers.*") - resolvers.load() - -- trackers.disable("resolvers.*") - - local findbinfile, loadbinfile = resolvers.findbinfile, resolvers.loadbinfile - local findtexfile, opentexfile = resolvers.findtexfile, resolvers.opentexfile - - if callback then - - local register = callbacks.register - - -- register('process_jobname' , function(name) return name end, true) - - register('find_read_file' , function(id,name) return findtexfile(name) end, true) - register('open_read_file' , function( name) return opentexfile(name) end, true) - - register('find_data_file' , function(name) return findbinfile(name,"tex") end, true) - register('find_enc_file' , function(name) return findbinfile(name,"enc") end, true) - register('find_font_file' , function(name) return findbinfile(name,"tfm") end, true) - register('find_format_file' , function(name) return findbinfile(name,"fmt") end, true) - register('find_image_file' , function(name) return findbinfile(name,"tex") end, true) - register('find_map_file' , function(name) return findbinfile(name,"map") end, true) - register('find_opentype_file' , function(name) return findbinfile(name,"otf") end, true) - register('find_output_file' , function(name) return name end, true) - register('find_pk_file' , function(name) return findbinfile(name,"pk") end, true) - register('find_sfd_file' , function(name) return findbinfile(name,"sfd") end, true) - register('find_truetype_file' , function(name) return findbinfile(name,"ttf") end, true) - register('find_type1_file' , function(name) return findbinfile(name,"pfb") end, true) - register('find_vf_file' , function(name) return findbinfile(name,"vf") end, true) - register('find_cidmap_file' , function(name) return findbinfile(name,"cidmap") end, true) - - register('read_data_file' , function(file) return loadbinfile(file,"tex") end, true) - register('read_enc_file' , function(file) return loadbinfile(file,"enc") end, true) - register('read_font_file' , function(file) return loadbinfile(file,"tfm") end, true) - -- format - -- image - register('read_map_file' , function(file) return loadbinfile(file,"map") end, true) - -- output - register('read_pk_file' , function(file) return loadbinfile(file,"pk") end, true) -- 600dpi/manfnt.720pk - register('read_sfd_file' , function(file) return loadbinfile(file,"sfd") end, true) - register('read_vf_file' , function(file) return loadbinfile(file,"vf" ) end, true) - - register('find_font_file' , function(name) return findbinfile(name,"ofm") end, true) - register('find_vf_file' , function(name) return findbinfile(name,"ovf") end, true) - - register('read_font_file' , function(file) return loadbinfile(file,"ofm") end, true) - register('read_vf_file' , function(file) return loadbinfile(file,"ovf") end, true) - - -- register('read_opentype_file' , function(file) return loadbinfile(file,"otf") end, true) - -- register('read_truetype_file' , function(file) return loadbinfile(file,"ttf") end, true) - -- register('read_type1_file' , function(file) return loadbinfile(file,"pfb") end, true) - -- register('read_cidmap_file' , function(file) return loadbinfile(file,"cidmap") end, true) - - register('find_write_file' , function(id,name) return name end, true) - register('find_format_file' , function(name) return name end, true) - - end - -end - -local report_system = logs.reporter("system","files") -local report_files = logs.reporter("used files") - -luatex.registerstopactions(function() - local foundintrees = resolvers.instance.foundintrees - if #foundintrees > 0 then - logs.pushtarget("logfile") - logs.newline() - report_system("start used files") - logs.newline() - for i=1,#foundintrees do - report_files("%4i: % T",i,foundintrees[i]) - end - logs.newline() - report_system("stop used files") - logs.newline() - logs.poptarget() - end -end) - -statistics.register("resource resolver", function() - local scandata = resolvers.scandata() - return format("loadtime %s seconds, %s scans with scantime %s seconds, %s shared scans, %s found files, scanned paths: %s", - resolvers.loadtime(), - scandata.n, - scandata.time, - scandata.shared, - #resolvers.instance.foundintrees, - #scandata.paths > 0 and concat(scandata.paths," ") or "" - ) -end) +if not modules then modules = { } end modules ['luat-fio'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format +local concat = table.concat +local sequenced = table.sequenced + +texconfig.kpse_init = false +texconfig.shell_escape = 't' +texconfig.max_in_open = 127 +texconfig.max_print_line = 100000 + +if not resolvers.instance then + + resolvers.reset() + + resolvers.instance.validfile = resolvers.validctxfile + + -- we now load the file database as we might need files other than + -- tex and lua file on the given path + + -- trackers.enable("resolvers.*") + resolvers.load() + -- trackers.disable("resolvers.*") + + local findbinfile, loadbinfile = resolvers.findbinfile, resolvers.loadbinfile + local findtexfile, opentexfile = resolvers.findtexfile, resolvers.opentexfile + + if callback then + + local register = callbacks.register + + -- register('process_jobname' , function(name) return name end, true) + + register('find_read_file' , function(id,name) return findtexfile(name) end, true) + register('open_read_file' , function( name) return opentexfile(name) end, true) + + register('find_data_file' , function(name) return findbinfile(name,"tex") end, true) + register('find_enc_file' , function(name) return findbinfile(name,"enc") end, true) + register('find_font_file' , function(name) return findbinfile(name,"tfm") end, true) + register('find_format_file' , function(name) return findbinfile(name,"fmt") end, true) + register('find_image_file' , function(name) return findbinfile(name,"tex") end, true) + register('find_map_file' , function(name) return findbinfile(name,"map") end, true) + register('find_opentype_file' , function(name) return findbinfile(name,"otf") end, true) + register('find_output_file' , function(name) return name end, true) + register('find_pk_file' , function(name) return findbinfile(name,"pk") end, true) + register('find_sfd_file' , function(name) return findbinfile(name,"sfd") end, true) + register('find_truetype_file' , function(name) return findbinfile(name,"ttf") end, true) + register('find_type1_file' , function(name) return findbinfile(name,"pfb") end, true) + register('find_vf_file' , function(name) return findbinfile(name,"vf") end, true) + register('find_cidmap_file' , function(name) return findbinfile(name,"cidmap") end, true) + + register('read_data_file' , function(file) return loadbinfile(file,"tex") end, true) + register('read_enc_file' , function(file) return loadbinfile(file,"enc") end, true) + register('read_font_file' , function(file) return loadbinfile(file,"tfm") end, true) + -- format + -- image + register('read_map_file' , function(file) return loadbinfile(file,"map") end, true) + -- output + register('read_pk_file' , function(file) return loadbinfile(file,"pk") end, true) -- 600dpi/manfnt.720pk + register('read_sfd_file' , function(file) return loadbinfile(file,"sfd") end, true) + register('read_vf_file' , function(file) return loadbinfile(file,"vf" ) end, true) + + register('find_font_file' , function(name) return findbinfile(name,"ofm") end, true) + register('find_vf_file' , function(name) return findbinfile(name,"ovf") end, true) + + register('read_font_file' , function(file) return loadbinfile(file,"ofm") end, true) + register('read_vf_file' , function(file) return loadbinfile(file,"ovf") end, true) + + -- register('read_opentype_file' , function(file) return loadbinfile(file,"otf") end, true) + -- register('read_truetype_file' , function(file) return loadbinfile(file,"ttf") end, true) + -- register('read_type1_file' , function(file) return loadbinfile(file,"pfb") end, true) + -- register('read_cidmap_file' , function(file) return loadbinfile(file,"cidmap") end, true) + + register('find_write_file' , function(id,name) return name end, true) + register('find_format_file' , function(name) return name end, true) + + end + +end + +local report_system = logs.reporter("system","files") +local report_files = logs.reporter("used files") + +luatex.registerstopactions(function() + local foundintrees = resolvers.instance.foundintrees + if #foundintrees > 0 then + logs.pushtarget("logfile") + logs.newline() + report_system("start used files") + logs.newline() + for i=1,#foundintrees do + report_files("%4i: % T",i,foundintrees[i]) + end + logs.newline() + report_system("stop used files") + logs.newline() + logs.poptarget() + end +end) + +statistics.register("resource resolver", function() + local scandata = resolvers.scandata() + return format("loadtime %s seconds, %s scans with scantime %s seconds, %s shared scans, %s found files, scanned paths: %s", + resolvers.loadtime(), + scandata.n, + scandata.time, + scandata.shared, + #resolvers.instance.foundintrees, + #scandata.paths > 0 and concat(scandata.paths," ") or "" + ) +end) diff --git a/tex/context/base/luat-fmt.lua b/tex/context/base/luat-fmt.lua index 20a4a8fcd..2eb5b89c9 100644 --- a/tex/context/base/luat-fmt.lua +++ b/tex/context/base/luat-fmt.lua @@ -1,140 +1,140 @@ -if not modules then modules = { } end modules ['luat-fmt'] = { - version = 1.001, - comment = "companion to mtxrun", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format -local concat = table.concat -local quoted = string.quoted -local luasuffixes = utilities.lua.suffixes - -local report_format = logs.reporter("resolvers","formats") - -local function primaryflags() -- not yet ok - local trackers = environment.argument("trackers") - local directives = environment.argument("directives") - local flags = { } - if trackers and trackers ~= "" then - flags = { "--trackers=" .. quoted(trackers) } - end - if directives and directives ~= "" then - flags = { "--directives=" .. quoted(directives) } - end - if environment.argument("jit") then - flags = { "--jiton" } - end - return concat(flags," ") -end - -function environment.make_format(name) - local engine = environment.ownmain or "luatex" - -- change to format path (early as we need expanded paths) - local olddir = dir.current() - local path = caches.getwritablepath("formats",engine) or "" -- maybe platform - if path ~= "" then - lfs.chdir(path) - end - report_format("using format path %a",dir.current()) - -- check source file - local texsourcename = file.addsuffix(name,"mkiv") - local fulltexsourcename = resolvers.findfile(texsourcename,"tex") or "" - if fulltexsourcename == "" then - texsourcename = file.addsuffix(name,"tex") - fulltexsourcename = resolvers.findfile(texsourcename,"tex") or "" - end - if fulltexsourcename == "" then - report_format("no tex source file with name %a (mkiv or tex)",name) - lfs.chdir(olddir) - return - else - report_format("using tex source file %a",fulltexsourcename) - end - local texsourcepath = dir.expandname(file.dirname(fulltexsourcename)) -- really needed - -- check specification - local specificationname = file.replacesuffix(fulltexsourcename,"lus") - local fullspecificationname = resolvers.findfile(specificationname,"tex") or "" - if fullspecificationname == "" then - specificationname = file.join(texsourcepath,"context.lus") - fullspecificationname = resolvers.findfile(specificationname,"tex") or "" - end - if fullspecificationname == "" then - report_format("unknown stub specification %a",specificationname) - lfs.chdir(olddir) - return - end - local specificationpath = file.dirname(fullspecificationname) - -- load specification - local usedluastub = nil - local usedlualibs = dofile(fullspecificationname) - if type(usedlualibs) == "string" then - usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs) - elseif type(usedlualibs) == "table" then - report_format("using stub specification %a",fullspecificationname) - local texbasename = file.basename(name) - local luastubname = file.addsuffix(texbasename,luasuffixes.lua) - local lucstubname = file.addsuffix(texbasename,luasuffixes.luc) - -- pack libraries in stub - report_format("creating initialization file %a",luastubname) - utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname) - -- compile stub file (does not save that much as we don't use this stub at startup any more) - if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then - report_format("using compiled initialization file %a",lucstubname) - usedluastub = lucstubname - else - report_format("using uncompiled initialization file %a",luastubname) - usedluastub = luastubname - end - else - report_format("invalid stub specification %a",fullspecificationname) - lfs.chdir(olddir) - return - end - -- generate format - local command = format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\") - report_format("running command: %s\n",command) - os.spawn(command) - -- remove related mem files - local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem" - -- report_format("removing related mplib format with pattern %a", pattern) - local mp = dir.glob(pattern) - if mp then - for i=1,#mp do - local name = mp[i] - report_format("removing related mplib format %a", file.basename(name)) - os.remove(name) - end - end - lfs.chdir(olddir) -end - -function environment.run_format(name,data,more) - if name and name ~= "" then - local engine = environment.ownmain or "luatex" - local barename = file.removesuffix(name) - local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats",engine) - if fmtname == "" then - fmtname = resolvers.findfile(file.addsuffix(barename,"fmt")) or "" - end - fmtname = resolvers.cleanpath(fmtname) - if fmtname == "" then - report_format("no format with name %a",name) - else - local barename = file.removesuffix(name) -- expanded name - local luaname = file.addsuffix(barename,"luc") - if not lfs.isfile(luaname) then - luaname = file.addsuffix(barename,"lua") - end - if not lfs.isfile(luaname) then - report_format("using format name %a",fmtname) - report_format("no luc/lua file with name %a",barename) - else - local command = format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more ~= "" and quoted(more) or "") - report_format("running command: %s",command) - os.spawn(command) - end - end - end -end +if not modules then modules = { } end modules ['luat-fmt'] = { + version = 1.001, + comment = "companion to mtxrun", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format +local concat = table.concat +local quoted = string.quoted +local luasuffixes = utilities.lua.suffixes + +local report_format = logs.reporter("resolvers","formats") + +local function primaryflags() -- not yet ok + local trackers = environment.argument("trackers") + local directives = environment.argument("directives") + local flags = { } + if trackers and trackers ~= "" then + flags = { "--trackers=" .. quoted(trackers) } + end + if directives and directives ~= "" then + flags = { "--directives=" .. quoted(directives) } + end + if environment.argument("jit") then + flags = { "--jiton" } + end + return concat(flags," ") +end + +function environment.make_format(name) + local engine = environment.ownmain or "luatex" + -- change to format path (early as we need expanded paths) + local olddir = dir.current() + local path = caches.getwritablepath("formats",engine) or "" -- maybe platform + if path ~= "" then + lfs.chdir(path) + end + report_format("using format path %a",dir.current()) + -- check source file + local texsourcename = file.addsuffix(name,"mkiv") + local fulltexsourcename = resolvers.findfile(texsourcename,"tex") or "" + if fulltexsourcename == "" then + texsourcename = file.addsuffix(name,"tex") + fulltexsourcename = resolvers.findfile(texsourcename,"tex") or "" + end + if fulltexsourcename == "" then + report_format("no tex source file with name %a (mkiv or tex)",name) + lfs.chdir(olddir) + return + else + report_format("using tex source file %a",fulltexsourcename) + end + local texsourcepath = dir.expandname(file.dirname(fulltexsourcename)) -- really needed + -- check specification + local specificationname = file.replacesuffix(fulltexsourcename,"lus") + local fullspecificationname = resolvers.findfile(specificationname,"tex") or "" + if fullspecificationname == "" then + specificationname = file.join(texsourcepath,"context.lus") + fullspecificationname = resolvers.findfile(specificationname,"tex") or "" + end + if fullspecificationname == "" then + report_format("unknown stub specification %a",specificationname) + lfs.chdir(olddir) + return + end + local specificationpath = file.dirname(fullspecificationname) + -- load specification + local usedluastub = nil + local usedlualibs = dofile(fullspecificationname) + if type(usedlualibs) == "string" then + usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs) + elseif type(usedlualibs) == "table" then + report_format("using stub specification %a",fullspecificationname) + local texbasename = file.basename(name) + local luastubname = file.addsuffix(texbasename,luasuffixes.lua) + local lucstubname = file.addsuffix(texbasename,luasuffixes.luc) + -- pack libraries in stub + report_format("creating initialization file %a",luastubname) + utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname) + -- compile stub file (does not save that much as we don't use this stub at startup any more) + if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then + report_format("using compiled initialization file %a",lucstubname) + usedluastub = lucstubname + else + report_format("using uncompiled initialization file %a",luastubname) + usedluastub = luastubname + end + else + report_format("invalid stub specification %a",fullspecificationname) + lfs.chdir(olddir) + return + end + -- generate format + local command = format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\") + report_format("running command: %s\n",command) + os.spawn(command) + -- remove related mem files + local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem" + -- report_format("removing related mplib format with pattern %a", pattern) + local mp = dir.glob(pattern) + if mp then + for i=1,#mp do + local name = mp[i] + report_format("removing related mplib format %a", file.basename(name)) + os.remove(name) + end + end + lfs.chdir(olddir) +end + +function environment.run_format(name,data,more) + if name and name ~= "" then + local engine = environment.ownmain or "luatex" + local barename = file.removesuffix(name) + local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats",engine) + if fmtname == "" then + fmtname = resolvers.findfile(file.addsuffix(barename,"fmt")) or "" + end + fmtname = resolvers.cleanpath(fmtname) + if fmtname == "" then + report_format("no format with name %a",name) + else + local barename = file.removesuffix(name) -- expanded name + local luaname = file.addsuffix(barename,"luc") + if not lfs.isfile(luaname) then + luaname = file.addsuffix(barename,"lua") + end + if not lfs.isfile(luaname) then + report_format("using format name %a",fmtname) + report_format("no luc/lua file with name %a",barename) + else + local command = format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more ~= "" and quoted(more) or "") + report_format("running command: %s",command) + os.spawn(command) + end + end + end +end diff --git a/tex/context/base/luat-ini.lua b/tex/context/base/luat-ini.lua index 587214b93..d4eee7123 100644 --- a/tex/context/base/luat-ini.lua +++ b/tex/context/base/luat-ini.lua @@ -1,206 +1,206 @@ -if not modules then modules = { } end modules ['luat-ini'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- rather experimental down here ... adapted to lua 5.2 ... but still --- experimental - -local debug = require("debug") - -local string, table, lpeg, math, io, system = string, table, lpeg, math, io, system -local rawset, rawget, next, setmetatable = rawset, rawget, next, setmetatable - ---[[ldx-- -

We cannot load anything yet. However what we will do us reserve a few tables. -These can be used for runtime user data or third party modules and will not be -cluttered by macro package code.

---ldx]]-- - -userdata = userdata or { } -- for users (e.g. functions etc) -thirddata = thirddata or { } -- only for third party modules -moduledata = moduledata or { } -- only for development team -documentdata = documentdata or { } -- for users (e.g. raw data) -parametersets = parametersets or { } -- experimental for team - -table.setmetatableindex(moduledata,table.autokey) -table.setmetatableindex(thirddata, table.autokey) - ---[[ldx-- -

Please create a namespace within these tables before using them!

- - -userdata ['my.name'] = { } -thirddata['tricks' ] = { } - ---ldx]]-- - ---[[ldx-- -

We could cook up a readonly model for global tables but it makes more sense -to invite users to use one of the predefined namespaces. One can redefine the -protector. After all, it's just a lightweight suggestive system, not a -watertight one.

---ldx]]-- - -local global = _G -global.global = global - -local dummy = function() end - ---[[ldx-- -

Another approach is to freeze tables by using a metatable, this will be -implemented stepwise.

---ldx]]-- - --- moduledata : no need for protection (only for developers) --- isolatedata : full protection --- userdata : protected --- thirddata : protected - ---[[ldx-- -

We could have a metatable that automaticaly creates a top level namespace.

---ldx]]-- - -local luanames = lua.name -- luatex itself - -lua.numbers = lua.numbers or { } local numbers = lua.numbers -lua.messages = lua.messages or { } local messages = lua.messages - -storage.register("lua/numbers", numbers, "lua.numbers" ) -storage.register("lua/messages", messages, "lua.messages") - -local setfenv = setfenv or debug.setfenv -- < 5.2 - -if setfenv then - - local protected = { - -- global table - global = global, - -- user tables - -- moduledata = moduledata, - userdata = userdata, - thirddata = thirddata, - documentdata = documentdata, - -- reserved - protect = dummy, - unprotect = dummy, - -- luatex - tex = tex, - -- lua - string = string, - table = table, - lpeg = lpeg, - math = math, - io = io, - file = file, - bit32 = bit32, - -- - context = context, - } - - local protect_full = function(name) - local t = { } - for k, v in next, protected do - t[k] = v - end - return t - end - - local protect_part = function(name) -- adds - local t = rawget(global,name) - if not t then - t = { } - for k, v in next, protected do - t[k] = v - end - rawset(global,name,t) - end - return t - end - - protect = function(name) - if name == "isolateddata" then - setfenv(2,protect_full(name)) - else - setfenv(2,protect_part(name or "shareddata")) - end - end - - function lua.registername(name,message) - local lnn = lua.numbers[name] - if not lnn then - lnn = #messages + 1 - messages[lnn] = message - numbers[name] = lnn - end - luanames[lnn] = message - context(lnn) - -- initialize once - if name ~= "isolateddata" then - protect_full(name or "shareddata") - end - end - -elseif libraries then -- assume >= 5.2 - - local shared - - protect = function(name) - if not shared then - -- e.g. context is not yet known - local public = { - global = global, - -- moduledata = moduledata, - userdata = userdata, - thirddata = thirddata, - documentdata = documentdata, - protect = dummy, - unprotect = dummy, - context = context, - } - -- - for k, v in next, libraries.builtin do public[k] = v end - for k, v in next, libraries.functions do public[k] = v end - for k, v in next, libraries.obsolete do public[k] = nil end - -- - shared = { __index = public } - protect = function(name) - local t = global[name] or { } - setmetatable(t,shared) -- set each time - return t - end - end - return protect(name) - end - - function lua.registername(name,message) - local lnn = lua.numbers[name] - if not lnn then - lnn = #messages + 1 - messages[lnn] = message - numbers[name] = lnn - end - luanames[lnn] = message - context(lnn) - end - -else - - protect = dummy - - function lua.registername(name,message) - local lnn = lua.numbers[name] - if not lnn then - lnn = #messages + 1 - messages[lnn] = message - numbers[name] = lnn - end - luanames[lnn] = message - context(lnn) - end - -end - +if not modules then modules = { } end modules ['luat-ini'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- rather experimental down here ... adapted to lua 5.2 ... but still +-- experimental + +local debug = require("debug") + +local string, table, lpeg, math, io, system = string, table, lpeg, math, io, system +local rawset, rawget, next, setmetatable = rawset, rawget, next, setmetatable + +--[[ldx-- +

We cannot load anything yet. However what we will do us reserve a few tables. +These can be used for runtime user data or third party modules and will not be +cluttered by macro package code.

+--ldx]]-- + +userdata = userdata or { } -- for users (e.g. functions etc) +thirddata = thirddata or { } -- only for third party modules +moduledata = moduledata or { } -- only for development team +documentdata = documentdata or { } -- for users (e.g. raw data) +parametersets = parametersets or { } -- experimental for team + +table.setmetatableindex(moduledata,table.autokey) +table.setmetatableindex(thirddata, table.autokey) + +--[[ldx-- +

Please create a namespace within these tables before using them!

+ + +userdata ['my.name'] = { } +thirddata['tricks' ] = { } + +--ldx]]-- + +--[[ldx-- +

We could cook up a readonly model for global tables but it makes more sense +to invite users to use one of the predefined namespaces. One can redefine the +protector. After all, it's just a lightweight suggestive system, not a +watertight one.

+--ldx]]-- + +local global = _G +global.global = global + +local dummy = function() end + +--[[ldx-- +

Another approach is to freeze tables by using a metatable, this will be +implemented stepwise.

+--ldx]]-- + +-- moduledata : no need for protection (only for developers) +-- isolatedata : full protection +-- userdata : protected +-- thirddata : protected + +--[[ldx-- +

We could have a metatable that automaticaly creates a top level namespace.

+--ldx]]-- + +local luanames = lua.name -- luatex itself + +lua.numbers = lua.numbers or { } local numbers = lua.numbers +lua.messages = lua.messages or { } local messages = lua.messages + +storage.register("lua/numbers", numbers, "lua.numbers" ) +storage.register("lua/messages", messages, "lua.messages") + +local setfenv = setfenv or debug.setfenv -- < 5.2 + +if setfenv then + + local protected = { + -- global table + global = global, + -- user tables + -- moduledata = moduledata, + userdata = userdata, + thirddata = thirddata, + documentdata = documentdata, + -- reserved + protect = dummy, + unprotect = dummy, + -- luatex + tex = tex, + -- lua + string = string, + table = table, + lpeg = lpeg, + math = math, + io = io, + file = file, + bit32 = bit32, + -- + context = context, + } + + local protect_full = function(name) + local t = { } + for k, v in next, protected do + t[k] = v + end + return t + end + + local protect_part = function(name) -- adds + local t = rawget(global,name) + if not t then + t = { } + for k, v in next, protected do + t[k] = v + end + rawset(global,name,t) + end + return t + end + + protect = function(name) + if name == "isolateddata" then + setfenv(2,protect_full(name)) + else + setfenv(2,protect_part(name or "shareddata")) + end + end + + function lua.registername(name,message) + local lnn = lua.numbers[name] + if not lnn then + lnn = #messages + 1 + messages[lnn] = message + numbers[name] = lnn + end + luanames[lnn] = message + context(lnn) + -- initialize once + if name ~= "isolateddata" then + protect_full(name or "shareddata") + end + end + +elseif libraries then -- assume >= 5.2 + + local shared + + protect = function(name) + if not shared then + -- e.g. context is not yet known + local public = { + global = global, + -- moduledata = moduledata, + userdata = userdata, + thirddata = thirddata, + documentdata = documentdata, + protect = dummy, + unprotect = dummy, + context = context, + } + -- + for k, v in next, libraries.builtin do public[k] = v end + for k, v in next, libraries.functions do public[k] = v end + for k, v in next, libraries.obsolete do public[k] = nil end + -- + shared = { __index = public } + protect = function(name) + local t = global[name] or { } + setmetatable(t,shared) -- set each time + return t + end + end + return protect(name) + end + + function lua.registername(name,message) + local lnn = lua.numbers[name] + if not lnn then + lnn = #messages + 1 + messages[lnn] = message + numbers[name] = lnn + end + luanames[lnn] = message + context(lnn) + end + +else + + protect = dummy + + function lua.registername(name,message) + local lnn = lua.numbers[name] + if not lnn then + lnn = #messages + 1 + messages[lnn] = message + numbers[name] = lnn + end + luanames[lnn] = message + context(lnn) + end + +end + diff --git a/tex/context/base/luat-iop.lua b/tex/context/base/luat-iop.lua index 52f14683e..bcbfac73a 100644 --- a/tex/context/base/luat-iop.lua +++ b/tex/context/base/luat-iop.lua @@ -1,195 +1,195 @@ -if not modules then modules = { } end modules ['luat-iop'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this paranoid stuff in web2c ... we cannot hook checks into the --- input functions because one can always change the callback but --- we can feed back specific patterns and paths into the next --- mechanism - --- os.execute os.exec os.spawn io.fopen --- os.remove lfs.chdir lfs.mkdir --- io.open zip.open epdf.open mlib.new - --- cache - -local topattern, find = string.topattern, string.find - -local report_limiter = logs.reporter("system","limiter") - --- the basic methods - -local function match(ruleset,name) - local n = #ruleset - if n > 0 then - for i=1,n do - local r = ruleset[i] - if find(name,r[1]) then - return r[2] - end - end - return false - else - -- nothing defined (or any) - return true - end -end - -local function protect(ruleset,proc) - return function(name,...) - if name == "" then - -- report_limiter("no access permitted: ") -- can happen in mplib code - return nil, "no name given" - elseif match(ruleset,name) then - return proc(name,...) - else - report_limiter("no access permitted for %a",name) - return nil, name .. ": no access permitted" - end - end -end - -function io.limiter(preset) - preset = preset or { } - local ruleset = { } - for i=1,#preset do - local p = preset[i] - local what, spec = p[1] or "", p[2] or "" - if spec == "" then - -- skip 'm - elseif what == "tree" then - resolvers.dowithpath(spec, function(r) - local spec = resolvers.resolve(r) or "" - if spec ~= "" then - ruleset[#ruleset+1] = { topattern(spec,true), true } - end - end) - elseif what == "permit" then - ruleset[#ruleset+1] = { topattern(spec,true), true } - elseif what == "forbid" then - ruleset[#ruleset+1] = { topattern(spec,true), false } - end - end - if #ruleset > 0 then - return { - match = function(name) return match (ruleset,name) end, - protect = function(proc) return protect(ruleset,proc) end, - } - else - return { - match = function(name) return true end, - protect = proc, - } - end -end - --- a few handlers - -io.i_limiters = { } -io.o_limiters = { } - -function io.i_limiter(v) - local i = io.i_limiters[v] - if i then - local i_limiter = io.limiter(i) - function io.i_limiter() - return i_limiter - end - return i_limiter - end -end - -function io.o_limiter(v) - local o = io.o_limiters[v] - if o then - local o_limiter = io.limiter(o) - function io.o_limiter() - return o_limiter - end - return o_limiter - end -end - --- the real thing (somewhat fuzzy as we need to know what gets done) - -local i_opener, i_limited = io.open, false -local o_opener, o_limited = io.open, false - -local function i_register(v) - if not i_limited then - local i_limiter = io.i_limiter(v) - if i_limiter then - local protect = i_limiter.protect - i_opener = protect(i_opener) - i_limited = true - report_limiter("input mode set to %a",v) - end - end -end - -local function o_register(v) - if not o_limited then - local o_limiter = io.o_limiter(v) - if o_limiter then - local protect = o_limiter.protect - o_opener = protect(o_opener) - o_limited = true - report_limiter("output mode set to %a",v) - end - end -end - -function io.open(name,method) - if method and find(method,"[wa]") then - return o_opener(name,method) - else - return i_opener(name,method) - end -end - -directives.register("system.inputmode", i_register) -directives.register("system.outputmode", o_register) - -local i_limited = false -local o_limited = false - -local function i_register(v) - if not i_limited then - local i_limiter = io.i_limiter(v) - if i_limiter then - local protect = i_limiter.protect - lfs.chdir = protect(lfs.chdir) -- needs checking - i_limited = true - end - end -end - -local function o_register(v) - if not o_limited then - local o_limiter = io.o_limiter(v) - if o_limiter then - local protect = o_limiter.protect - os.remove = protect(os.remove) -- rather okay - lfs.chdir = protect(lfs.chdir) -- needs checking - lfs.mkdir = protect(lfs.mkdir) -- needs checking - o_limited = true - end - end -end - -directives.register("system.inputmode", i_register) -directives.register("system.outputmode", o_register) - --- the definitions - -local limiters = resolvers.variable("limiters") - -if limiters then - io.i_limiters = limiters.input or { } - io.o_limiters = limiters.output or { } -end - +if not modules then modules = { } end modules ['luat-iop'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this paranoid stuff in web2c ... we cannot hook checks into the +-- input functions because one can always change the callback but +-- we can feed back specific patterns and paths into the next +-- mechanism + +-- os.execute os.exec os.spawn io.fopen +-- os.remove lfs.chdir lfs.mkdir +-- io.open zip.open epdf.open mlib.new + +-- cache + +local topattern, find = string.topattern, string.find + +local report_limiter = logs.reporter("system","limiter") + +-- the basic methods + +local function match(ruleset,name) + local n = #ruleset + if n > 0 then + for i=1,n do + local r = ruleset[i] + if find(name,r[1]) then + return r[2] + end + end + return false + else + -- nothing defined (or any) + return true + end +end + +local function protect(ruleset,proc) + return function(name,...) + if name == "" then + -- report_limiter("no access permitted: ") -- can happen in mplib code + return nil, "no name given" + elseif match(ruleset,name) then + return proc(name,...) + else + report_limiter("no access permitted for %a",name) + return nil, name .. ": no access permitted" + end + end +end + +function io.limiter(preset) + preset = preset or { } + local ruleset = { } + for i=1,#preset do + local p = preset[i] + local what, spec = p[1] or "", p[2] or "" + if spec == "" then + -- skip 'm + elseif what == "tree" then + resolvers.dowithpath(spec, function(r) + local spec = resolvers.resolve(r) or "" + if spec ~= "" then + ruleset[#ruleset+1] = { topattern(spec,true), true } + end + end) + elseif what == "permit" then + ruleset[#ruleset+1] = { topattern(spec,true), true } + elseif what == "forbid" then + ruleset[#ruleset+1] = { topattern(spec,true), false } + end + end + if #ruleset > 0 then + return { + match = function(name) return match (ruleset,name) end, + protect = function(proc) return protect(ruleset,proc) end, + } + else + return { + match = function(name) return true end, + protect = proc, + } + end +end + +-- a few handlers + +io.i_limiters = { } +io.o_limiters = { } + +function io.i_limiter(v) + local i = io.i_limiters[v] + if i then + local i_limiter = io.limiter(i) + function io.i_limiter() + return i_limiter + end + return i_limiter + end +end + +function io.o_limiter(v) + local o = io.o_limiters[v] + if o then + local o_limiter = io.limiter(o) + function io.o_limiter() + return o_limiter + end + return o_limiter + end +end + +-- the real thing (somewhat fuzzy as we need to know what gets done) + +local i_opener, i_limited = io.open, false +local o_opener, o_limited = io.open, false + +local function i_register(v) + if not i_limited then + local i_limiter = io.i_limiter(v) + if i_limiter then + local protect = i_limiter.protect + i_opener = protect(i_opener) + i_limited = true + report_limiter("input mode set to %a",v) + end + end +end + +local function o_register(v) + if not o_limited then + local o_limiter = io.o_limiter(v) + if o_limiter then + local protect = o_limiter.protect + o_opener = protect(o_opener) + o_limited = true + report_limiter("output mode set to %a",v) + end + end +end + +function io.open(name,method) + if method and find(method,"[wa]") then + return o_opener(name,method) + else + return i_opener(name,method) + end +end + +directives.register("system.inputmode", i_register) +directives.register("system.outputmode", o_register) + +local i_limited = false +local o_limited = false + +local function i_register(v) + if not i_limited then + local i_limiter = io.i_limiter(v) + if i_limiter then + local protect = i_limiter.protect + lfs.chdir = protect(lfs.chdir) -- needs checking + i_limited = true + end + end +end + +local function o_register(v) + if not o_limited then + local o_limiter = io.o_limiter(v) + if o_limiter then + local protect = o_limiter.protect + os.remove = protect(os.remove) -- rather okay + lfs.chdir = protect(lfs.chdir) -- needs checking + lfs.mkdir = protect(lfs.mkdir) -- needs checking + o_limited = true + end + end +end + +directives.register("system.inputmode", i_register) +directives.register("system.outputmode", o_register) + +-- the definitions + +local limiters = resolvers.variable("limiters") + +if limiters then + io.i_limiters = limiters.input or { } + io.o_limiters = limiters.output or { } +end + diff --git a/tex/context/base/luat-lua.lua b/tex/context/base/luat-lua.lua index 972004e88..fd899871f 100644 --- a/tex/context/base/luat-lua.lua +++ b/tex/context/base/luat-lua.lua @@ -1,45 +1,45 @@ -if not modules then modules = { } end modules ['luat-lua'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if lua then do - - local delayed = { } - - function lua.flushdelayed(...) - local t = delayed - delayed = { } - for i=1, #t do - t[i](...) - end - end - - function lua.delay(f) - delayed[#delayed+1] = f - end - - function lua.flush(...) - context.directlua("lua.flushdelayed(%,t)",{...}) - end - -end end - --- See mk.pdf for an explanation of the following code: --- --- function test(n) --- lua.delay(function(...) --- context("pi: %s %s %s",...) --- context.par() --- end) --- lua.delay(function(...) --- context("more pi: %s %s %s",...) --- context.par() --- end) --- context("\\setbox0=\\hbox{%s}",math.pi*n) --- local box = tex.box[0] --- lua.flush(box.width,box.height,box.depth) --- end +if not modules then modules = { } end modules ['luat-lua'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if lua then do + + local delayed = { } + + function lua.flushdelayed(...) + local t = delayed + delayed = { } + for i=1, #t do + t[i](...) + end + end + + function lua.delay(f) + delayed[#delayed+1] = f + end + + function lua.flush(...) + context.directlua("lua.flushdelayed(%,t)",{...}) + end + +end end + +-- See mk.pdf for an explanation of the following code: +-- +-- function test(n) +-- lua.delay(function(...) +-- context("pi: %s %s %s",...) +-- context.par() +-- end) +-- lua.delay(function(...) +-- context("more pi: %s %s %s",...) +-- context.par() +-- end) +-- context("\\setbox0=\\hbox{%s}",math.pi*n) +-- local box = tex.box[0] +-- lua.flush(box.width,box.height,box.depth) +-- end diff --git a/tex/context/base/luat-mac.lua b/tex/context/base/luat-mac.lua index c8be06b63..19f4d108b 100644 --- a/tex/context/base/luat-mac.lua +++ b/tex/context/base/luat-mac.lua @@ -1,434 +1,434 @@ -if not modules then modules = { } end modules ['luat-mac'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- Sometimes we run into situations like: --- --- \def\foo#1{\expandafter\def\csname#1\endcsname} --- --- As this confuses the parser, the following should be used instead: --- --- \def\foo#1{\expandafter\normaldef\csname#1\endcsname} - -local P, V, S, R, C, Cs, Cmt, Carg = lpeg.P, lpeg.V, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.Cmt, lpeg.Carg -local lpegmatch, patterns = lpeg.match, lpeg.patterns - -local insert, remove = table.insert, table.remove -local rep, sub = string.rep, string.sub -local setmetatable = setmetatable -local filesuffix = file.suffix -local convertlmxstring = lmx and lmx.convertstring - -local pushtarget, poptarget = logs.pushtarget, logs.poptarget - -local report_macros = logs.reporter("interface","macros") - -local stack, top, n, hashes = { }, nil, 0, { } - -local function set(s) - if top then - n = n + 1 - if n > 9 then - report_macros("number of arguments > 9, ignoring %s",s) - else - local ns = #stack - local h = hashes[ns] - if not h then - h = rep("#",2^(ns-1)) - hashes[ns] = h - end - m = h .. n - top[s] = m - return m - end - end -end - -local function get(s) - if not top then - report_macros("keeping #%s, no stack",s) - return "#" .. s -- can be lua - end - local m = top[s] - if m then - return m - else - report_macros("keeping #%s, not on stack",s) - return "#" .. s -- quite likely an error - end -end - -local function push() - top = { } - n = 0 - local s = stack[#stack] - if s then - setmetatable(top,{ __index = s }) - end - insert(stack,top) -end - -local function pop() - top = remove(stack) -end - -local leftbrace = P("{") -- will be in patterns -local rightbrace = P("}") -local escape = P("\\") - -local space = patterns.space -local spaces = space^1 -local newline = patterns.newline -local nobrace = 1 - leftbrace - rightbrace - -local longleft = leftbrace -- P("(") -local longright = rightbrace -- P(")") -local nolong = 1 - longleft - longright - -local name = R("AZ","az")^1 -local csname = (R("AZ","az") + S("@?!_"))^1 -local longname = (longleft/"") * (nolong^1) * (longright/"") -local variable = P("#") * Cs(name + longname) -local escapedname = escape * csname -local definer = escape * (P("def") + S("egx") * P("def")) -- tex -local setter = escape * P("set") * (P("u")^-1 * S("egx")^-1) * P("value") -- context specific ---- + escape * P("install") * (1-P("handler"))^1 * P("handler") -- context specific -local startcode = P("\\starttexdefinition") -- context specific -local stopcode = P("\\stoptexdefinition") -- context specific -local anything = patterns.anything -local always = patterns.alwaysmatched - -local definer = escape * (P("u")^-1 * S("egx")^-1 * P("def")) -- tex - --- The comment nilling can become an option but it nicely compensates the Lua --- parsing here with less parsing at the TeX end. We keep lines so the errors --- get reported all right, but comments are never seen there anyway. We keep --- comment that starts inline as it can be something special with a % (at some --- point we can do that as well, esp if we never use \% or `% somewhere --- unpredictable). We need to skip comments anyway. Hm, too tricky, this --- stripping as we can have Lua code etc. - -local commenttoken = P("%") -local crorlf = S("\n\r") ------ commentline = commenttoken * ((Carg(1) * C((1-crorlf)^0))/function(strip,s) return strip and "" or s end) -local commentline = commenttoken * ((1-crorlf)^0) -local leadingcomment = (commentline * crorlf^1)^1 -local furthercomment = (crorlf^1 * commentline)^1 - -local pushlocal = always / push -local poplocal = always / pop -local declaration = variable / set -local identifier = variable / get - -local argument = P { leftbrace * ((identifier + V(1) + (1 - leftbrace - rightbrace))^0) * rightbrace } - -local function matcherror(str,pos) - report_macros("runaway definition at: %s",sub(str,pos-30,pos)) -end - -local csname_endcsname = P("\\csname") * (identifier + (1 - P("\\endcsname")))^1 - -local grammar = { "converter", - texcode = pushlocal - * startcode - * spaces - * (csname * spaces)^1 -- new: multiple, new:csname instead of name - -- * (declaration + furthercomment + (1 - newline - space))^0 - * ((declaration * (space^0/""))^1 + furthercomment + (1 - newline - space))^0 -- accepts #a #b #c - * V("texbody") - * stopcode - * poplocal, - texbody = ( V("definition") - + identifier - + V("braced") - + (1 - stopcode) - )^0, - definition = pushlocal - * definer - * spaces^0 - * escapedname --- * (declaration + furthercomment + commentline + (1-leftbrace))^0 - * (declaration + furthercomment + commentline + csname_endcsname + (1-leftbrace))^0 - * V("braced") - * poplocal, - setcode = pushlocal - * setter - * argument - * (declaration + furthercomment + commentline + (1-leftbrace))^0 - * V("braced") - * poplocal, - braced = leftbrace - * ( V("definition") - + identifier - + V("setcode") - + V("texcode") - + V("braced") - + furthercomment - + leadingcomment -- new per 2012-05-15 (message on mailing list) - + nobrace - )^0 - -- * rightbrace^-1, -- the -1 catches errors - * (rightbrace + Cmt(always,matcherror)), - - pattern = leadingcomment - + V("definition") - + V("setcode") - + V("texcode") - + furthercomment - + anything, - - converter = V("pattern")^1, -} - -local parser = Cs(grammar) - -local checker = P("%") * (1 - newline - P("macros"))^0 - * P("macros") * space^0 * P("=") * space^0 * C(patterns.letter^1) - --- maybe namespace - -local macros = { } resolvers.macros = macros - -function macros.preprocessed(str,strip) - return lpegmatch(parser,str,1,strip) -end - -function macros.convertfile(oldname,newname) -- beware, no testing on oldname == newname - local data = resolvers.loadtexfile(oldname) - data = interfaces.preprocessed(data) or "" - io.savedata(newname,data) -end - -function macros.version(data) - return lpegmatch(checker,data) -end - --- function macros.processmkvi(str,filename) --- if filename and filesuffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then --- local oldsize = #str --- str = lpegmatch(parser,str,1,true) or str --- pushtarget("log") --- report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str) --- poptarget("log") --- end --- return str --- end --- --- utilities.sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmkvi") - --- the document variables hack is temporary - -local processors = { } - -function processors.mkvi(str,filename) - local oldsize = #str - str = lpegmatch(parser,str,1,true) or str - pushtarget("log") - report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str) - poptarget("log") - return str -end - -function processors.mkix(str,filename) -- we could intercept earlier so that caching works better - if not document then -- because now we hash the string as well as the - document = { } - end - if not document.variables then - document.variables = { } - end - local oldsize = #str - str = convertlmxstring(str,document.variables,false) or str - pushtarget("log") - report_macros("processed mkix file %a, delta %s",filename,oldsize-#str) - poptarget("log") - return str -end - -function processors.mkxi(str,filename) - if not document then - document = { } - end - if not document.variables then - document.variables = { } - end - local oldsize = #str - str = convertlmxstring(str,document.variables,false) or str - str = lpegmatch(parser,str,1,true) or str - pushtarget("log") - report_macros("processed mkxi file %a, delta %s",filename,oldsize-#str) - poptarget("log") - return str -end - -function macros.processmk(str,filename) - if filename then - local suffix = filesuffix(filename) - local processor = processors[suffix] or processors[lpegmatch(checker,str)] - if processor then - str = processor(str,filename) - end - end - return str -end - -function macros.processmkvi(str,filename) - if filename and filesuffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then - local oldsize = #str - str = lpegmatch(parser,str,1,true) or str - pushtarget("log") - report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str) - poptarget("log") - end - return str -end - -local sequencers = utilities.sequencers - -if sequencers then - - sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmk") - sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmkvi") - -end - --- bonus - -if resolvers.schemes then - - local function handler(protocol,name,cachename) - local hashed = url.hashed(name) - local path = hashed.path - if path and path ~= "" then - local str = resolvers.loadtexfile(path) - if filesuffix(path) == "mkvi" or lpegmatch(checker,str) == "mkvi" then - -- already done automatically - io.savedata(cachename,str) - else - local result = lpegmatch(parser,str,1,true) or str - pushtarget("log") - report_macros("processed scheme %a, delta %s",filename,#str-#result) - poptarget("log") - io.savedata(cachename,result) - end - end - return cachename - end - - resolvers.schemes.install('mkvi',handler,1) -- this will cache ! - -end - --- print(macros.preprocessed( --- [[ --- \starttexdefinition unexpanded test #aa #bb #cc --- test --- \stoptexdefinition --- ]])) - --- print(macros.preprocessed([[\checked \def \bla #bla{bla#{bla}}]])) --- print(macros.preprocessed([[\def\bla#bla{#{bla}bla}]])) --- print(macros.preprocessed([[\def\blä#{blá}{blà:#{blá}}]])) --- print(macros.preprocessed([[\def\blä#bla{blà:#bla}]])) --- print(macros.preprocessed([[\setvalue{xx}#bla{blà:#bla}]])) --- print(macros.preprocessed([[\def\foo#bar{\setvalue{xx#bar}{#bar}}]])) --- print(macros.preprocessed([[\def\bla#bla{bla:#{bla}}]])) --- print(macros.preprocessed([[\def\bla_bla#bla{bla:#bla}]])) --- print(macros.preprocessed([[\def\test#oeps{test:#oeps}]])) --- print(macros.preprocessed([[\def\test_oeps#oeps{test:#oeps}]])) --- print(macros.preprocessed([[\def\test#oeps{test:#{oeps}}]])) --- print(macros.preprocessed([[\def\test#{oeps:1}{test:#{oeps:1}}]])) --- print(macros.preprocessed([[\def\test#{oeps}{test:#oeps}]])) --- print(macros.preprocessed([[\def\x[#a][#b][#c]{\setvalue{\y{#a}\z{#b}}{#c}}]])) --- print(macros.preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}]])) --- print(macros.preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}}]])) --- print(macros.preprocessed([[% test --- \def\test#oeps{#oeps} % {test} --- % test --- --- % test --- two --- %test]])) --- print(macros.preprocessed([[ --- \def\scrn_button_make_normal#namespace#current#currentparameter#text% --- {\ctxlua{structures.references.injectcurrentset(nil,nil)}% --- % \hbox attr \referenceattribute \lastreferenceattribute {\localframed[#namespace:#current]{#text}}} --- \hbox attr \referenceattribute \lastreferenceattribute {\directlocalframed[#namespace:#current]{#text}}} --- ]])) --- --- print(macros.preprocessed([[ --- \def\definefoo[#name]% --- {\setvalue{start#name}{\dostartfoo{#name}}} --- \def\dostartfoo#name% --- {\def\noexpand\next#content\expandafter\noexpand\csname stop#name\endcsname{#name : #content}% --- \next} --- \def\dostartfoo#name% --- {\normalexpanded{\def\noexpand\next#content\expandafter\noexpand\csname stop#name\endcsname}{#name : #content}% --- \next} --- ]])) --- --- print(macros.preprocessed([[ --- \def\dosomething#content{%%% {{ --- % { }{{ %% --- \bgroup\italic#content\egroup --- } --- ]])) --- --- print(macros.preprocessed([[ --- \unexpanded\def\start#tag#stoptag% --- {\initialize{#tag}% --- \normalexpanded --- {\def\yes[#one]#two\csname\e!stop#stoptag\endcsname{\command_yes[#one]{#two}}% --- \def\nop #one\csname\e!stop#stoptag\endcsname{\command_nop {#one}}}% --- \doifnextoptionalelse\yes\nop} --- ]])) --- --- print(macros.preprocessed([[ --- \normalexpanded{\long\def\expandafter\noexpand\csname\e!start\v!interactionmenu\endcsname[#tag]#content\expandafter\noexpand\csname\e!stop\v!interactionmenu\endcsname}% --- {\def\currentinteractionmenu{#tag}% --- \expandafter\settrue\csname\??menustate\interactionmenuparameter\c!category\endcsname --- \setinteractionmenuparameter\c!menu{#content}} --- ]])) --- --- Just an experiment: --- --- \catcode\numexpr"10FF25=\commentcatcode %% > 110000 is invalid --- --- We could have a push/pop mechanism but binding to txtcatcodes --- is okay too. - -local txtcatcodes = false -- also signal and yet unknown - -local commentsignal = utf.char(0x10FF25) - -local encodecomment = P("%%") / commentsignal -- ------ encodepattern = Cs(((1-encodecomment)^0 * encodecomment)) -- strips but not nice for verbatim -local encodepattern = Cs((encodecomment + 1)^0) -local decodecomment = P(commentsignal) / "%%%%" -- why doubles here? -local decodepattern = Cs((decodecomment + 1)^0) - -function resolvers.macros.encodecomment(str) - if txtcatcodes and tex.catcodetable == txtcatcodes then - return lpegmatch(encodepattern,str) or str - else - return str - end -end - -function resolvers.macros.decodecomment(str) -- normally not needed - return txtcatcodes and lpegmatch(decodepattern,str) or str -end - --- resolvers.macros.commentsignal = commentsignal --- resolvers.macros.encodecommentpattern = encodepattern --- resolvers.macros.decodecommentpattern = decodepattern - -function resolvers.macros.enablecomment(thecatcodes) - if not txtcatcodes then - txtcatcodes = thecatcodes or catcodes.numbers.txtcatcodes - utilities.sequencers.appendaction(resolvers.openers.helpers.textlineactions,"system","resolvers.macros.encodecomment") - end -end +if not modules then modules = { } end modules ['luat-mac'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Sometimes we run into situations like: +-- +-- \def\foo#1{\expandafter\def\csname#1\endcsname} +-- +-- As this confuses the parser, the following should be used instead: +-- +-- \def\foo#1{\expandafter\normaldef\csname#1\endcsname} + +local P, V, S, R, C, Cs, Cmt, Carg = lpeg.P, lpeg.V, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.Cmt, lpeg.Carg +local lpegmatch, patterns = lpeg.match, lpeg.patterns + +local insert, remove = table.insert, table.remove +local rep, sub = string.rep, string.sub +local setmetatable = setmetatable +local filesuffix = file.suffix +local convertlmxstring = lmx and lmx.convertstring + +local pushtarget, poptarget = logs.pushtarget, logs.poptarget + +local report_macros = logs.reporter("interface","macros") + +local stack, top, n, hashes = { }, nil, 0, { } + +local function set(s) + if top then + n = n + 1 + if n > 9 then + report_macros("number of arguments > 9, ignoring %s",s) + else + local ns = #stack + local h = hashes[ns] + if not h then + h = rep("#",2^(ns-1)) + hashes[ns] = h + end + m = h .. n + top[s] = m + return m + end + end +end + +local function get(s) + if not top then + report_macros("keeping #%s, no stack",s) + return "#" .. s -- can be lua + end + local m = top[s] + if m then + return m + else + report_macros("keeping #%s, not on stack",s) + return "#" .. s -- quite likely an error + end +end + +local function push() + top = { } + n = 0 + local s = stack[#stack] + if s then + setmetatable(top,{ __index = s }) + end + insert(stack,top) +end + +local function pop() + top = remove(stack) +end + +local leftbrace = P("{") -- will be in patterns +local rightbrace = P("}") +local escape = P("\\") + +local space = patterns.space +local spaces = space^1 +local newline = patterns.newline +local nobrace = 1 - leftbrace - rightbrace + +local longleft = leftbrace -- P("(") +local longright = rightbrace -- P(")") +local nolong = 1 - longleft - longright + +local name = R("AZ","az")^1 +local csname = (R("AZ","az") + S("@?!_"))^1 +local longname = (longleft/"") * (nolong^1) * (longright/"") +local variable = P("#") * Cs(name + longname) +local escapedname = escape * csname +local definer = escape * (P("def") + S("egx") * P("def")) -- tex +local setter = escape * P("set") * (P("u")^-1 * S("egx")^-1) * P("value") -- context specific +--- + escape * P("install") * (1-P("handler"))^1 * P("handler") -- context specific +local startcode = P("\\starttexdefinition") -- context specific +local stopcode = P("\\stoptexdefinition") -- context specific +local anything = patterns.anything +local always = patterns.alwaysmatched + +local definer = escape * (P("u")^-1 * S("egx")^-1 * P("def")) -- tex + +-- The comment nilling can become an option but it nicely compensates the Lua +-- parsing here with less parsing at the TeX end. We keep lines so the errors +-- get reported all right, but comments are never seen there anyway. We keep +-- comment that starts inline as it can be something special with a % (at some +-- point we can do that as well, esp if we never use \% or `% somewhere +-- unpredictable). We need to skip comments anyway. Hm, too tricky, this +-- stripping as we can have Lua code etc. + +local commenttoken = P("%") +local crorlf = S("\n\r") +----- commentline = commenttoken * ((Carg(1) * C((1-crorlf)^0))/function(strip,s) return strip and "" or s end) +local commentline = commenttoken * ((1-crorlf)^0) +local leadingcomment = (commentline * crorlf^1)^1 +local furthercomment = (crorlf^1 * commentline)^1 + +local pushlocal = always / push +local poplocal = always / pop +local declaration = variable / set +local identifier = variable / get + +local argument = P { leftbrace * ((identifier + V(1) + (1 - leftbrace - rightbrace))^0) * rightbrace } + +local function matcherror(str,pos) + report_macros("runaway definition at: %s",sub(str,pos-30,pos)) +end + +local csname_endcsname = P("\\csname") * (identifier + (1 - P("\\endcsname")))^1 + +local grammar = { "converter", + texcode = pushlocal + * startcode + * spaces + * (csname * spaces)^1 -- new: multiple, new:csname instead of name + -- * (declaration + furthercomment + (1 - newline - space))^0 + * ((declaration * (space^0/""))^1 + furthercomment + (1 - newline - space))^0 -- accepts #a #b #c + * V("texbody") + * stopcode + * poplocal, + texbody = ( V("definition") + + identifier + + V("braced") + + (1 - stopcode) + )^0, + definition = pushlocal + * definer + * spaces^0 + * escapedname +-- * (declaration + furthercomment + commentline + (1-leftbrace))^0 + * (declaration + furthercomment + commentline + csname_endcsname + (1-leftbrace))^0 + * V("braced") + * poplocal, + setcode = pushlocal + * setter + * argument + * (declaration + furthercomment + commentline + (1-leftbrace))^0 + * V("braced") + * poplocal, + braced = leftbrace + * ( V("definition") + + identifier + + V("setcode") + + V("texcode") + + V("braced") + + furthercomment + + leadingcomment -- new per 2012-05-15 (message on mailing list) + + nobrace + )^0 + -- * rightbrace^-1, -- the -1 catches errors + * (rightbrace + Cmt(always,matcherror)), + + pattern = leadingcomment + + V("definition") + + V("setcode") + + V("texcode") + + furthercomment + + anything, + + converter = V("pattern")^1, +} + +local parser = Cs(grammar) + +local checker = P("%") * (1 - newline - P("macros"))^0 + * P("macros") * space^0 * P("=") * space^0 * C(patterns.letter^1) + +-- maybe namespace + +local macros = { } resolvers.macros = macros + +function macros.preprocessed(str,strip) + return lpegmatch(parser,str,1,strip) +end + +function macros.convertfile(oldname,newname) -- beware, no testing on oldname == newname + local data = resolvers.loadtexfile(oldname) + data = interfaces.preprocessed(data) or "" + io.savedata(newname,data) +end + +function macros.version(data) + return lpegmatch(checker,data) +end + +-- function macros.processmkvi(str,filename) +-- if filename and filesuffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then +-- local oldsize = #str +-- str = lpegmatch(parser,str,1,true) or str +-- pushtarget("log") +-- report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str) +-- poptarget("log") +-- end +-- return str +-- end +-- +-- utilities.sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmkvi") + +-- the document variables hack is temporary + +local processors = { } + +function processors.mkvi(str,filename) + local oldsize = #str + str = lpegmatch(parser,str,1,true) or str + pushtarget("log") + report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str) + poptarget("log") + return str +end + +function processors.mkix(str,filename) -- we could intercept earlier so that caching works better + if not document then -- because now we hash the string as well as the + document = { } + end + if not document.variables then + document.variables = { } + end + local oldsize = #str + str = convertlmxstring(str,document.variables,false) or str + pushtarget("log") + report_macros("processed mkix file %a, delta %s",filename,oldsize-#str) + poptarget("log") + return str +end + +function processors.mkxi(str,filename) + if not document then + document = { } + end + if not document.variables then + document.variables = { } + end + local oldsize = #str + str = convertlmxstring(str,document.variables,false) or str + str = lpegmatch(parser,str,1,true) or str + pushtarget("log") + report_macros("processed mkxi file %a, delta %s",filename,oldsize-#str) + poptarget("log") + return str +end + +function macros.processmk(str,filename) + if filename then + local suffix = filesuffix(filename) + local processor = processors[suffix] or processors[lpegmatch(checker,str)] + if processor then + str = processor(str,filename) + end + end + return str +end + +function macros.processmkvi(str,filename) + if filename and filesuffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then + local oldsize = #str + str = lpegmatch(parser,str,1,true) or str + pushtarget("log") + report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str) + poptarget("log") + end + return str +end + +local sequencers = utilities.sequencers + +if sequencers then + + sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmk") + sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmkvi") + +end + +-- bonus + +if resolvers.schemes then + + local function handler(protocol,name,cachename) + local hashed = url.hashed(name) + local path = hashed.path + if path and path ~= "" then + local str = resolvers.loadtexfile(path) + if filesuffix(path) == "mkvi" or lpegmatch(checker,str) == "mkvi" then + -- already done automatically + io.savedata(cachename,str) + else + local result = lpegmatch(parser,str,1,true) or str + pushtarget("log") + report_macros("processed scheme %a, delta %s",filename,#str-#result) + poptarget("log") + io.savedata(cachename,result) + end + end + return cachename + end + + resolvers.schemes.install('mkvi',handler,1) -- this will cache ! + +end + +-- print(macros.preprocessed( +-- [[ +-- \starttexdefinition unexpanded test #aa #bb #cc +-- test +-- \stoptexdefinition +-- ]])) + +-- print(macros.preprocessed([[\checked \def \bla #bla{bla#{bla}}]])) +-- print(macros.preprocessed([[\def\bla#bla{#{bla}bla}]])) +-- print(macros.preprocessed([[\def\blä#{blá}{blà:#{blá}}]])) +-- print(macros.preprocessed([[\def\blä#bla{blà:#bla}]])) +-- print(macros.preprocessed([[\setvalue{xx}#bla{blà:#bla}]])) +-- print(macros.preprocessed([[\def\foo#bar{\setvalue{xx#bar}{#bar}}]])) +-- print(macros.preprocessed([[\def\bla#bla{bla:#{bla}}]])) +-- print(macros.preprocessed([[\def\bla_bla#bla{bla:#bla}]])) +-- print(macros.preprocessed([[\def\test#oeps{test:#oeps}]])) +-- print(macros.preprocessed([[\def\test_oeps#oeps{test:#oeps}]])) +-- print(macros.preprocessed([[\def\test#oeps{test:#{oeps}}]])) +-- print(macros.preprocessed([[\def\test#{oeps:1}{test:#{oeps:1}}]])) +-- print(macros.preprocessed([[\def\test#{oeps}{test:#oeps}]])) +-- print(macros.preprocessed([[\def\x[#a][#b][#c]{\setvalue{\y{#a}\z{#b}}{#c}}]])) +-- print(macros.preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}]])) +-- print(macros.preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}}]])) +-- print(macros.preprocessed([[% test +-- \def\test#oeps{#oeps} % {test} +-- % test +-- +-- % test +-- two +-- %test]])) +-- print(macros.preprocessed([[ +-- \def\scrn_button_make_normal#namespace#current#currentparameter#text% +-- {\ctxlua{structures.references.injectcurrentset(nil,nil)}% +-- % \hbox attr \referenceattribute \lastreferenceattribute {\localframed[#namespace:#current]{#text}}} +-- \hbox attr \referenceattribute \lastreferenceattribute {\directlocalframed[#namespace:#current]{#text}}} +-- ]])) +-- +-- print(macros.preprocessed([[ +-- \def\definefoo[#name]% +-- {\setvalue{start#name}{\dostartfoo{#name}}} +-- \def\dostartfoo#name% +-- {\def\noexpand\next#content\expandafter\noexpand\csname stop#name\endcsname{#name : #content}% +-- \next} +-- \def\dostartfoo#name% +-- {\normalexpanded{\def\noexpand\next#content\expandafter\noexpand\csname stop#name\endcsname}{#name : #content}% +-- \next} +-- ]])) +-- +-- print(macros.preprocessed([[ +-- \def\dosomething#content{%%% {{ +-- % { }{{ %% +-- \bgroup\italic#content\egroup +-- } +-- ]])) +-- +-- print(macros.preprocessed([[ +-- \unexpanded\def\start#tag#stoptag% +-- {\initialize{#tag}% +-- \normalexpanded +-- {\def\yes[#one]#two\csname\e!stop#stoptag\endcsname{\command_yes[#one]{#two}}% +-- \def\nop #one\csname\e!stop#stoptag\endcsname{\command_nop {#one}}}% +-- \doifnextoptionalelse\yes\nop} +-- ]])) +-- +-- print(macros.preprocessed([[ +-- \normalexpanded{\long\def\expandafter\noexpand\csname\e!start\v!interactionmenu\endcsname[#tag]#content\expandafter\noexpand\csname\e!stop\v!interactionmenu\endcsname}% +-- {\def\currentinteractionmenu{#tag}% +-- \expandafter\settrue\csname\??menustate\interactionmenuparameter\c!category\endcsname +-- \setinteractionmenuparameter\c!menu{#content}} +-- ]])) +-- +-- Just an experiment: +-- +-- \catcode\numexpr"10FF25=\commentcatcode %% > 110000 is invalid +-- +-- We could have a push/pop mechanism but binding to txtcatcodes +-- is okay too. + +local txtcatcodes = false -- also signal and yet unknown + +local commentsignal = utf.char(0x10FF25) + +local encodecomment = P("%%") / commentsignal -- +----- encodepattern = Cs(((1-encodecomment)^0 * encodecomment)) -- strips but not nice for verbatim +local encodepattern = Cs((encodecomment + 1)^0) +local decodecomment = P(commentsignal) / "%%%%" -- why doubles here? +local decodepattern = Cs((decodecomment + 1)^0) + +function resolvers.macros.encodecomment(str) + if txtcatcodes and tex.catcodetable == txtcatcodes then + return lpegmatch(encodepattern,str) or str + else + return str + end +end + +function resolvers.macros.decodecomment(str) -- normally not needed + return txtcatcodes and lpegmatch(decodepattern,str) or str +end + +-- resolvers.macros.commentsignal = commentsignal +-- resolvers.macros.encodecommentpattern = encodepattern +-- resolvers.macros.decodecommentpattern = decodepattern + +function resolvers.macros.enablecomment(thecatcodes) + if not txtcatcodes then + txtcatcodes = thecatcodes or catcodes.numbers.txtcatcodes + utilities.sequencers.appendaction(resolvers.openers.helpers.textlineactions,"system","resolvers.macros.encodecomment") + end +end diff --git a/tex/context/base/luat-run.lua b/tex/context/base/luat-run.lua index eaede1030..6291fef1b 100644 --- a/tex/context/base/luat-run.lua +++ b/tex/context/base/luat-run.lua @@ -1,158 +1,158 @@ -if not modules then modules = { } end modules ['luat-run'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format -local insert = table.insert - --- trace_job_status is also controlled by statistics.enable that is set via the directive system.nostatistics - -local trace_lua_dump = false trackers.register("system.dump", function(v) trace_lua_dump = v end) -local trace_temp_files = false trackers.register("system.tempfiles", function(v) trace_temp_files = v end) -local trace_job_status = true trackers.register("system.jobstatus", function(v) trace_job_status = v end) -local trace_tex_status = false trackers.register("system.texstatus", function(v) trace_tex_status = v end) - -local report_lua = logs.reporter("system","lua") -local report_tex = logs.reporter("system","status") -local report_tempfiles = logs.reporter("resolvers","tempfiles") - -luatex = luatex or { } -local luatex = luatex - -local startactions = { } -local stopactions = { } - -function luatex.registerstartactions(...) insert(startactions, ...) end -function luatex.registerstopactions (...) insert(stopactions, ...) end - -local function start_run() - if logs.start_run then - logs.start_run() - end - for i=1,#startactions do - startactions[i]() - end -end - -local function stop_run() - for i=1,#stopactions do - stopactions[i]() - end - if trace_job_status then - statistics.show() - end - if trace_tex_status then - for k, v in table.sortedhash(status.list()) do - report_tex("%S=%S",k,v) - end - end - if logs.stop_run then - logs.stop_run() - end -end - -local function start_shipout_page() - logs.start_page_number() -end - -local function stop_shipout_page() - logs.stop_page_number() -end - -local function report_output_pages() -end - -local function report_output_log() -end - --- local function show_open() --- end - --- local function show_close() --- end - -local function pre_dump_actions() - lua.finalize(trace_lua_dump and report_lua or nil) - -- statistics.savefmtstatus("\jobname","\contextversion","context.tex") -end - --- this can be done later - -callbacks.register('start_run', start_run, "actions performed at the beginning of a run") -callbacks.register('stop_run', stop_run, "actions performed at the end of a run") - ----------.register('show_open', show_open, "actions performed when opening a file") ----------.register('show_close', show_close, "actions performed when closing a file") - -callbacks.register('report_output_pages', report_output_pages, "actions performed when reporting pages") -callbacks.register('report_output_log', report_output_log, "actions performed when reporting log file") - -callbacks.register('start_page_number', start_shipout_page, "actions performed at the beginning of a shipout") -callbacks.register('stop_page_number', stop_shipout_page, "actions performed at the end of a shipout") - -callbacks.register('process_input_buffer', false, "actions performed when reading data") -callbacks.register('process_output_buffer', false, "actions performed when writing data") - -callbacks.register("pre_dump", pre_dump_actions, "lua related finalizers called before we dump the format") -- comes after \everydump - --- an example: - -local tempfiles = { } - -function luatex.registertempfile(name,extrasuffix) - if extrasuffix then - name = name .. ".mkiv-tmp" -- maybe just .tmp - end - if trace_temp_files and not tempfiles[name] then - report_tempfiles("registering temporary file %a",name) - end - tempfiles[name] = true - return name -end - -function luatex.cleanuptempfiles() - for name, _ in next, tempfiles do - if trace_temp_files then - report_tempfiles("removing temporary file %a",name) - end - os.remove(name) - end - tempfiles = { } -end - -luatex.registerstopactions(luatex.cleanuptempfiles) - --- for the moment here - -local synctex = false - -local report_system = logs.reporter("system") - -directives.register("system.synctex", function(v) - synctex = v - if v then - report_system("synctex functionality is enabled!") - else - report_system("synctex functionality is disabled!") - end - synctex = tonumber(synctex) or (toboolean(synctex,true) and 1) or (synctex == "zipped" and 1) or (synctex == "unzipped" and -1) or false - -- currently this is bugged: - tex.synctex = synctex - -- so for the moment we need: - context.normalsynctex() - if synctex then - context.plusone() - else - context.zerocount() - end -end) - -statistics.register("synctex tracing",function() - if synctex or tex.synctex ~= 0 then - return "synctex has been enabled (extra log file generated)" - end -end) +if not modules then modules = { } end modules ['luat-run'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format +local insert = table.insert + +-- trace_job_status is also controlled by statistics.enable that is set via the directive system.nostatistics + +local trace_lua_dump = false trackers.register("system.dump", function(v) trace_lua_dump = v end) +local trace_temp_files = false trackers.register("system.tempfiles", function(v) trace_temp_files = v end) +local trace_job_status = true trackers.register("system.jobstatus", function(v) trace_job_status = v end) +local trace_tex_status = false trackers.register("system.texstatus", function(v) trace_tex_status = v end) + +local report_lua = logs.reporter("system","lua") +local report_tex = logs.reporter("system","status") +local report_tempfiles = logs.reporter("resolvers","tempfiles") + +luatex = luatex or { } +local luatex = luatex + +local startactions = { } +local stopactions = { } + +function luatex.registerstartactions(...) insert(startactions, ...) end +function luatex.registerstopactions (...) insert(stopactions, ...) end + +local function start_run() + if logs.start_run then + logs.start_run() + end + for i=1,#startactions do + startactions[i]() + end +end + +local function stop_run() + for i=1,#stopactions do + stopactions[i]() + end + if trace_job_status then + statistics.show() + end + if trace_tex_status then + for k, v in table.sortedhash(status.list()) do + report_tex("%S=%S",k,v) + end + end + if logs.stop_run then + logs.stop_run() + end +end + +local function start_shipout_page() + logs.start_page_number() +end + +local function stop_shipout_page() + logs.stop_page_number() +end + +local function report_output_pages() +end + +local function report_output_log() +end + +-- local function show_open() +-- end + +-- local function show_close() +-- end + +local function pre_dump_actions() + lua.finalize(trace_lua_dump and report_lua or nil) + -- statistics.savefmtstatus("\jobname","\contextversion","context.tex") +end + +-- this can be done later + +callbacks.register('start_run', start_run, "actions performed at the beginning of a run") +callbacks.register('stop_run', stop_run, "actions performed at the end of a run") + +---------.register('show_open', show_open, "actions performed when opening a file") +---------.register('show_close', show_close, "actions performed when closing a file") + +callbacks.register('report_output_pages', report_output_pages, "actions performed when reporting pages") +callbacks.register('report_output_log', report_output_log, "actions performed when reporting log file") + +callbacks.register('start_page_number', start_shipout_page, "actions performed at the beginning of a shipout") +callbacks.register('stop_page_number', stop_shipout_page, "actions performed at the end of a shipout") + +callbacks.register('process_input_buffer', false, "actions performed when reading data") +callbacks.register('process_output_buffer', false, "actions performed when writing data") + +callbacks.register("pre_dump", pre_dump_actions, "lua related finalizers called before we dump the format") -- comes after \everydump + +-- an example: + +local tempfiles = { } + +function luatex.registertempfile(name,extrasuffix) + if extrasuffix then + name = name .. ".mkiv-tmp" -- maybe just .tmp + end + if trace_temp_files and not tempfiles[name] then + report_tempfiles("registering temporary file %a",name) + end + tempfiles[name] = true + return name +end + +function luatex.cleanuptempfiles() + for name, _ in next, tempfiles do + if trace_temp_files then + report_tempfiles("removing temporary file %a",name) + end + os.remove(name) + end + tempfiles = { } +end + +luatex.registerstopactions(luatex.cleanuptempfiles) + +-- for the moment here + +local synctex = false + +local report_system = logs.reporter("system") + +directives.register("system.synctex", function(v) + synctex = v + if v then + report_system("synctex functionality is enabled!") + else + report_system("synctex functionality is disabled!") + end + synctex = tonumber(synctex) or (toboolean(synctex,true) and 1) or (synctex == "zipped" and 1) or (synctex == "unzipped" and -1) or false + -- currently this is bugged: + tex.synctex = synctex + -- so for the moment we need: + context.normalsynctex() + if synctex then + context.plusone() + else + context.zerocount() + end +end) + +statistics.register("synctex tracing",function() + if synctex or tex.synctex ~= 0 then + return "synctex has been enabled (extra log file generated)" + end +end) diff --git a/tex/context/base/luat-sta.lua b/tex/context/base/luat-sta.lua index 8b58774d3..1e83083cd 100644 --- a/tex/context/base/luat-sta.lua +++ b/tex/context/base/luat-sta.lua @@ -1,211 +1,211 @@ -if not modules then modules = { } end modules ['luat-sta'] = { - version = 1.001, - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this code is used in the updater - -local gmatch, match = string.gmatch, string.match -local type = type - -states = states or { } -local states = states - -states.data = states.data or { } -local data = states.data - -states.hash = states.hash or { } -local hash = states.hash - -states.tag = states.tag or "" -states.filename = states.filename or "" - -function states.save(filename,tag) - tag = tag or states.tag - filename = file.addsuffix(filename or states.filename,'lus') - io.savedata(filename, - "-- generator : luat-sta.lua\n" .. - "-- state tag : " .. tag .. "\n\n" .. - table.serialize(data[tag or states.tag] or {},true) - ) -end - -function states.load(filename,tag) - states.filename = filename - states.tag = tag or "whatever" - states.filename = file.addsuffix(states.filename,'lus') - data[states.tag], hash[states.tag] = (io.exists(filename) and dofile(filename)) or { }, { } -end - -local function set_by_tag(tag,key,value,default,persistent) - local d, h = data[tag], hash[tag] - if d then - if type(d) == "table" then - local dkey, hkey = key, key - local pre, post = match(key,"(.+)%.([^%.]+)$") - if pre and post then - for k in gmatch(pre,"[^%.]+") do - local dk = d[k] - if not dk then - dk = { } - d[k] = dk - elseif type(dk) == "string" then - -- invalid table, unable to upgrade structure - -- hope for the best or delete the state file - break - end - d = dk - end - dkey, hkey = post, key - end - if value == nil then - value = default - elseif value == false then - -- special case - elseif persistent then - value = value or d[dkey] or default - else - value = value or default - end - d[dkey], h[hkey] = value, value - elseif type(d) == "string" then - -- weird - data[tag], hash[tag] = value, value - end - end -end - -local function get_by_tag(tag,key,default) - local h = hash[tag] - if h and h[key] then - return h[key] - else - local d = data[tag] - if d then - for k in gmatch(key,"[^%.]+") do - local dk = d[k] - if dk ~= nil then - d = dk - else - return default - end - end - if d == false then - return false - else - return d or default - end - end - end -end - -states.set_by_tag = set_by_tag -states.get_by_tag = get_by_tag - -function states.set(key,value,default,persistent) - set_by_tag(states.tag,key,value,default,persistent) -end - -function states.get(key,default) - return get_by_tag(states.tag,key,default) -end - ---~ data.update = { ---~ ["version"] = { ---~ ["major"] = 0, ---~ ["minor"] = 1, ---~ }, ---~ ["rsync"] = { ---~ ["server"] = "contextgarden.net", ---~ ["module"] = "minimals", ---~ ["repository"] = "current", ---~ ["flags"] = "-rpztlv --stats", ---~ }, ---~ ["tasks"] = { ---~ ["update"] = true, ---~ ["make"] = true, ---~ ["delete"] = false, ---~ }, ---~ ["platform"] = { ---~ ["host"] = true, ---~ ["other"] = { ---~ ["mswin"] = false, ---~ ["linux"] = false, ---~ ["linux-64"] = false, ---~ ["osx-intel"] = false, ---~ ["osx-ppc"] = false, ---~ ["sun"] = false, ---~ }, ---~ }, ---~ ["context"] = { ---~ ["available"] = {"current", "beta", "alpha", "experimental"}, ---~ ["selected"] = "current", ---~ }, ---~ ["formats"] = { ---~ ["cont-en"] = true, ---~ ["cont-nl"] = true, ---~ ["cont-de"] = false, ---~ ["cont-cz"] = false, ---~ ["cont-fr"] = false, ---~ ["cont-ro"] = false, ---~ }, ---~ ["engine"] = { ---~ ["pdftex"] = { ---~ ["install"] = true, ---~ ["formats"] = { ---~ ["pdftex"] = true, ---~ }, ---~ }, ---~ ["luatex"] = { ---~ ["install"] = true, ---~ ["formats"] = { ---~ }, ---~ }, ---~ ["xetex"] = { ---~ ["install"] = true, ---~ ["formats"] = { ---~ ["xetex"] = false, ---~ }, ---~ }, ---~ ["metapost"] = { ---~ ["install"] = true, ---~ ["formats"] = { ---~ ["mpost"] = true, ---~ ["metafun"] = true, ---~ }, ---~ }, ---~ }, ---~ ["fonts"] = { ---~ }, ---~ ["doc"] = { ---~ }, ---~ ["modules"] = { ---~ ["f-urwgaramond"] = false, ---~ ["f-urwgothic"] = false, ---~ ["t-bnf"] = false, ---~ ["t-chromato"] = false, ---~ ["t-cmscbf"] = false, ---~ ["t-cmttbf"] = false, ---~ ["t-construction-plan"] = false, ---~ ["t-degrade"] = false, ---~ ["t-french"] = false, ---~ ["t-lettrine"] = false, ---~ ["t-lilypond"] = false, ---~ ["t-mathsets"] = false, ---~ ["t-tikz"] = false, ---~ ["t-typearea"] = false, ---~ ["t-vim"] = false, ---~ }, ---~ } - ---~ states.save("teststate", "update") ---~ states.load("teststate", "update") - ---~ print(states.get_by_tag("update","rsync.server","unknown")) ---~ states.set_by_tag("update","rsync.server","oeps") ---~ print(states.get_by_tag("update","rsync.server","unknown")) ---~ states.save("teststate", "update") ---~ states.load("teststate", "update") ---~ print(states.get_by_tag("update","rsync.server","unknown")) +if not modules then modules = { } end modules ['luat-sta'] = { + version = 1.001, + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this code is used in the updater + +local gmatch, match = string.gmatch, string.match +local type = type + +states = states or { } +local states = states + +states.data = states.data or { } +local data = states.data + +states.hash = states.hash or { } +local hash = states.hash + +states.tag = states.tag or "" +states.filename = states.filename or "" + +function states.save(filename,tag) + tag = tag or states.tag + filename = file.addsuffix(filename or states.filename,'lus') + io.savedata(filename, + "-- generator : luat-sta.lua\n" .. + "-- state tag : " .. tag .. "\n\n" .. + table.serialize(data[tag or states.tag] or {},true) + ) +end + +function states.load(filename,tag) + states.filename = filename + states.tag = tag or "whatever" + states.filename = file.addsuffix(states.filename,'lus') + data[states.tag], hash[states.tag] = (io.exists(filename) and dofile(filename)) or { }, { } +end + +local function set_by_tag(tag,key,value,default,persistent) + local d, h = data[tag], hash[tag] + if d then + if type(d) == "table" then + local dkey, hkey = key, key + local pre, post = match(key,"(.+)%.([^%.]+)$") + if pre and post then + for k in gmatch(pre,"[^%.]+") do + local dk = d[k] + if not dk then + dk = { } + d[k] = dk + elseif type(dk) == "string" then + -- invalid table, unable to upgrade structure + -- hope for the best or delete the state file + break + end + d = dk + end + dkey, hkey = post, key + end + if value == nil then + value = default + elseif value == false then + -- special case + elseif persistent then + value = value or d[dkey] or default + else + value = value or default + end + d[dkey], h[hkey] = value, value + elseif type(d) == "string" then + -- weird + data[tag], hash[tag] = value, value + end + end +end + +local function get_by_tag(tag,key,default) + local h = hash[tag] + if h and h[key] then + return h[key] + else + local d = data[tag] + if d then + for k in gmatch(key,"[^%.]+") do + local dk = d[k] + if dk ~= nil then + d = dk + else + return default + end + end + if d == false then + return false + else + return d or default + end + end + end +end + +states.set_by_tag = set_by_tag +states.get_by_tag = get_by_tag + +function states.set(key,value,default,persistent) + set_by_tag(states.tag,key,value,default,persistent) +end + +function states.get(key,default) + return get_by_tag(states.tag,key,default) +end + +--~ data.update = { +--~ ["version"] = { +--~ ["major"] = 0, +--~ ["minor"] = 1, +--~ }, +--~ ["rsync"] = { +--~ ["server"] = "contextgarden.net", +--~ ["module"] = "minimals", +--~ ["repository"] = "current", +--~ ["flags"] = "-rpztlv --stats", +--~ }, +--~ ["tasks"] = { +--~ ["update"] = true, +--~ ["make"] = true, +--~ ["delete"] = false, +--~ }, +--~ ["platform"] = { +--~ ["host"] = true, +--~ ["other"] = { +--~ ["mswin"] = false, +--~ ["linux"] = false, +--~ ["linux-64"] = false, +--~ ["osx-intel"] = false, +--~ ["osx-ppc"] = false, +--~ ["sun"] = false, +--~ }, +--~ }, +--~ ["context"] = { +--~ ["available"] = {"current", "beta", "alpha", "experimental"}, +--~ ["selected"] = "current", +--~ }, +--~ ["formats"] = { +--~ ["cont-en"] = true, +--~ ["cont-nl"] = true, +--~ ["cont-de"] = false, +--~ ["cont-cz"] = false, +--~ ["cont-fr"] = false, +--~ ["cont-ro"] = false, +--~ }, +--~ ["engine"] = { +--~ ["pdftex"] = { +--~ ["install"] = true, +--~ ["formats"] = { +--~ ["pdftex"] = true, +--~ }, +--~ }, +--~ ["luatex"] = { +--~ ["install"] = true, +--~ ["formats"] = { +--~ }, +--~ }, +--~ ["xetex"] = { +--~ ["install"] = true, +--~ ["formats"] = { +--~ ["xetex"] = false, +--~ }, +--~ }, +--~ ["metapost"] = { +--~ ["install"] = true, +--~ ["formats"] = { +--~ ["mpost"] = true, +--~ ["metafun"] = true, +--~ }, +--~ }, +--~ }, +--~ ["fonts"] = { +--~ }, +--~ ["doc"] = { +--~ }, +--~ ["modules"] = { +--~ ["f-urwgaramond"] = false, +--~ ["f-urwgothic"] = false, +--~ ["t-bnf"] = false, +--~ ["t-chromato"] = false, +--~ ["t-cmscbf"] = false, +--~ ["t-cmttbf"] = false, +--~ ["t-construction-plan"] = false, +--~ ["t-degrade"] = false, +--~ ["t-french"] = false, +--~ ["t-lettrine"] = false, +--~ ["t-lilypond"] = false, +--~ ["t-mathsets"] = false, +--~ ["t-tikz"] = false, +--~ ["t-typearea"] = false, +--~ ["t-vim"] = false, +--~ }, +--~ } + +--~ states.save("teststate", "update") +--~ states.load("teststate", "update") + +--~ print(states.get_by_tag("update","rsync.server","unknown")) +--~ states.set_by_tag("update","rsync.server","oeps") +--~ print(states.get_by_tag("update","rsync.server","unknown")) +--~ states.save("teststate", "update") +--~ states.load("teststate", "update") +--~ print(states.get_by_tag("update","rsync.server","unknown")) diff --git a/tex/context/base/luat-sto.lua b/tex/context/base/luat-sto.lua index 7a11b7f5e..da2467708 100644 --- a/tex/context/base/luat-sto.lua +++ b/tex/context/base/luat-sto.lua @@ -1,169 +1,169 @@ -if not modules then modules = { } end modules ['luat-sto'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- we could nil some function in the productionrun - -local type, next, setmetatable, getmetatable, collectgarbage = type, next, setmetatable, getmetatable, collectgarbage -local gmatch, format = string.gmatch, string.format -local serialize, concat, sortedhash = table.serialize, table.concat, table.sortedhash -local bytecode = lua.bytecode -local strippedloadstring = utilities.lua.strippedloadstring - -local trace_storage = false -local report_storage = logs.reporter("system","storage") - -storage = storage or { } -local storage = storage - -local data = { } -storage.data = data - -storage.min = 0 -- 500 -storage.max = storage.min - 1 -storage.noftables = storage.noftables or 0 -storage.nofmodules = storage.nofmodules or 0 - -storage.mark = utilities.storage.mark -storage.allocate = utilities.storage.allocate -storage.marked = utilities.storage.marked -storage.strip = false - -directives.register("system.compile.strip", function(v) storage.strip = v end) - -function storage.register(...) - local t = { ... } - local d = t[2] - if d then - storage.mark(d) - else - report_storage("fatal error: invalid storage %a",t[1]) - os.exit() - end - data[#data+1] = t - return t -end - -local n = 0 -local function dump() - local max = storage.max - for i=1,#data do - local d = data[i] - local message, original, target = d[1], d[2] ,d[3] - local c, code, name = 0, { }, nil - -- we have a nice definer for this - for str in gmatch(target,"([^%.]+)") do - if name then - name = name .. "." .. str - else - name = str - end - c = c + 1 ; code[c] = format("%s = %s or { }",name,name) - end - max = max + 1 - if trace_storage then - c = c + 1 ; code[c] = format("print('restoring %s from slot %s')",message,max) - end - c = c + 1 ; code[c] = serialize(original,name) - if trace_storage then - report_storage('saving %a in slot %a, size %s',message,max,#code[c]) - end - -- we don't need tracing in such tables - bytecode[max] = strippedloadstring(concat(code,"\n"),storage.strip,format("slot %s (%s)",max,name)) - collectgarbage("step") - end - storage.max = max -end - -lua.registerfinalizer(dump,"dump storage") - --- to be tested with otf caching: - -function lua.collectgarbage(threshold) - local current = collectgarbage("count") - local threshold = threshold or 256 * 1024 - while true do - collectgarbage("collect") - local previous = collectgarbage("count") - if current - previous < threshold then - break - else - current = previous - end - end -end - --- -- we also need to count at generation time (nicer for message) --- --- if lua.bytecode then -- from 0 upwards --- local i, b = storage.min, lua.bytecode --- while b[i] do --- storage.noftables = i --- b[i]() --- b[i] = nil --- i = i + 1 --- end --- end - -statistics.register("stored bytecode data", function() - local nofmodules = (storage.nofmodules > 0 and storage.nofmodules) or (status.luabytecodes - lua.firstbytecode - 1) - local nofdumps = (storage.noftables > 0 and storage.noftables ) or storage.max-storage.min + 1 - local tofmodules = storage.tofmodules or 0 - local tofdumps = storage.toftables or 0 - if environment.initex then - local luautilities = utilities.lua - local nofstrippedbytes = luautilities.nofstrippedbytes - local nofstrippedchunks = luautilities.nofstrippedchunks - if nofstrippedbytes > 0 then - return format("%s modules, %s tables, %s chunks, %s chunks stripped (%s bytes)", - nofmodules, - nofdumps, - nofmodules + nofdumps, - nofstrippedchunks, - nofstrippedbytes - ) - elseif nofstrippedchunks > 0 then - return format("%s modules, %s tables, %s chunks, %s chunks stripped", - nofmodules, - nofdumps, - nofmodules + nofdumps, - nofstrippedchunks - ) - else - return format("%s modules, %s tables, %s chunks", - nofmodules, - nofdumps, - nofmodules + nofdumps - ) - end - else - return format("%s modules (%0.3f sec), %s tables (%0.3f sec), %s chunks (%0.3f sec)", - nofmodules, tofmodules, - nofdumps, tofdumps, - nofmodules + nofdumps, tofmodules + tofdumps - ) - end -end) - -if lua.bytedata then - storage.register("lua/bytedata",lua.bytedata,"lua.bytedata") -end - --- Because the storage mechanism assumes tables, we define a table for storing --- (non table) values. - -storage.shared = storage.shared or { } - -storage.register("storage/shared", storage.shared, "storage.shared") - -local mark = storage.mark - -if string.patterns then mark(string.patterns) end -if lpeg.patterns then mark(lpeg.patterns) end -if os.env then mark(os.env) end -if number.dimenfactors then mark(number.dimenfactors) end -if libraries then for k,v in next, libraries do mark(v) end end +if not modules then modules = { } end modules ['luat-sto'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- we could nil some function in the productionrun + +local type, next, setmetatable, getmetatable, collectgarbage = type, next, setmetatable, getmetatable, collectgarbage +local gmatch, format = string.gmatch, string.format +local serialize, concat, sortedhash = table.serialize, table.concat, table.sortedhash +local bytecode = lua.bytecode +local strippedloadstring = utilities.lua.strippedloadstring + +local trace_storage = false +local report_storage = logs.reporter("system","storage") + +storage = storage or { } +local storage = storage + +local data = { } +storage.data = data + +storage.min = 0 -- 500 +storage.max = storage.min - 1 +storage.noftables = storage.noftables or 0 +storage.nofmodules = storage.nofmodules or 0 + +storage.mark = utilities.storage.mark +storage.allocate = utilities.storage.allocate +storage.marked = utilities.storage.marked +storage.strip = false + +directives.register("system.compile.strip", function(v) storage.strip = v end) + +function storage.register(...) + local t = { ... } + local d = t[2] + if d then + storage.mark(d) + else + report_storage("fatal error: invalid storage %a",t[1]) + os.exit() + end + data[#data+1] = t + return t +end + +local n = 0 +local function dump() + local max = storage.max + for i=1,#data do + local d = data[i] + local message, original, target = d[1], d[2] ,d[3] + local c, code, name = 0, { }, nil + -- we have a nice definer for this + for str in gmatch(target,"([^%.]+)") do + if name then + name = name .. "." .. str + else + name = str + end + c = c + 1 ; code[c] = format("%s = %s or { }",name,name) + end + max = max + 1 + if trace_storage then + c = c + 1 ; code[c] = format("print('restoring %s from slot %s')",message,max) + end + c = c + 1 ; code[c] = serialize(original,name) + if trace_storage then + report_storage('saving %a in slot %a, size %s',message,max,#code[c]) + end + -- we don't need tracing in such tables + bytecode[max] = strippedloadstring(concat(code,"\n"),storage.strip,format("slot %s (%s)",max,name)) + collectgarbage("step") + end + storage.max = max +end + +lua.registerfinalizer(dump,"dump storage") + +-- to be tested with otf caching: + +function lua.collectgarbage(threshold) + local current = collectgarbage("count") + local threshold = threshold or 256 * 1024 + while true do + collectgarbage("collect") + local previous = collectgarbage("count") + if current - previous < threshold then + break + else + current = previous + end + end +end + +-- -- we also need to count at generation time (nicer for message) +-- +-- if lua.bytecode then -- from 0 upwards +-- local i, b = storage.min, lua.bytecode +-- while b[i] do +-- storage.noftables = i +-- b[i]() +-- b[i] = nil +-- i = i + 1 +-- end +-- end + +statistics.register("stored bytecode data", function() + local nofmodules = (storage.nofmodules > 0 and storage.nofmodules) or (status.luabytecodes - lua.firstbytecode - 1) + local nofdumps = (storage.noftables > 0 and storage.noftables ) or storage.max-storage.min + 1 + local tofmodules = storage.tofmodules or 0 + local tofdumps = storage.toftables or 0 + if environment.initex then + local luautilities = utilities.lua + local nofstrippedbytes = luautilities.nofstrippedbytes + local nofstrippedchunks = luautilities.nofstrippedchunks + if nofstrippedbytes > 0 then + return format("%s modules, %s tables, %s chunks, %s chunks stripped (%s bytes)", + nofmodules, + nofdumps, + nofmodules + nofdumps, + nofstrippedchunks, + nofstrippedbytes + ) + elseif nofstrippedchunks > 0 then + return format("%s modules, %s tables, %s chunks, %s chunks stripped", + nofmodules, + nofdumps, + nofmodules + nofdumps, + nofstrippedchunks + ) + else + return format("%s modules, %s tables, %s chunks", + nofmodules, + nofdumps, + nofmodules + nofdumps + ) + end + else + return format("%s modules (%0.3f sec), %s tables (%0.3f sec), %s chunks (%0.3f sec)", + nofmodules, tofmodules, + nofdumps, tofdumps, + nofmodules + nofdumps, tofmodules + tofdumps + ) + end +end) + +if lua.bytedata then + storage.register("lua/bytedata",lua.bytedata,"lua.bytedata") +end + +-- Because the storage mechanism assumes tables, we define a table for storing +-- (non table) values. + +storage.shared = storage.shared or { } + +storage.register("storage/shared", storage.shared, "storage.shared") + +local mark = storage.mark + +if string.patterns then mark(string.patterns) end +if lpeg.patterns then mark(lpeg.patterns) end +if os.env then mark(os.env) end +if number.dimenfactors then mark(number.dimenfactors) end +if libraries then for k,v in next, libraries do mark(v) end end diff --git a/tex/context/base/lxml-aux.lua b/tex/context/base/lxml-aux.lua index 0fffe261a..812b14d50 100644 --- a/tex/context/base/lxml-aux.lua +++ b/tex/context/base/lxml-aux.lua @@ -1,811 +1,811 @@ -if not modules then modules = { } end modules ['lxml-aux'] = { - version = 1.001, - comment = "this module is the basis for the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- not all functions here make sense anymore vbut we keep them for --- compatibility reasons - -local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end) - -local report_xml = logs.reporter("xml") - -local xml = xml - -local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name -local xmlinheritedconvert = xml.inheritedconvert -local xmlapplylpath = xml.applylpath -local xmlfilter = xml.filter - -local type, setmetatable, getmetatable = type, setmetatable, getmetatable -local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat -local gmatch, gsub, format, find, strip = string.gmatch, string.gsub, string.format, string.find, string.strip -local utfbyte = utf.byte - -local function report(what,pattern,c,e) - report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern) -end - -local function withelements(e,handle,depth) - if e and handle then - local edt = e.dt - if edt then - depth = depth or 0 - for i=1,#edt do - local e = edt[i] - if type(e) == "table" then - handle(e,depth) - withelements(e,handle,depth+1) - end - end - end - end -end - -xml.withelements = withelements - -function xml.withelement(e,n,handle) -- slow - if e and n ~= 0 and handle then - local edt = e.dt - if edt then - if n > 0 then - for i=1,#edt do - local ei = edt[i] - if type(ei) == "table" then - if n == 1 then - handle(ei) - return - else - n = n - 1 - end - end - end - elseif n < 0 then - for i=#edt,1,-1 do - local ei = edt[i] - if type(ei) == "table" then - if n == -1 then - handle(ei) - return - else - n = n + 1 - end - end - end - end - end - end -end - -function xml.each(root,pattern,handle,reverse) - local collected = xmlapplylpath(root,pattern) - if collected then - if reverse then - for c=#collected,1,-1 do - handle(collected[c]) - end - else - for c=1,#collected do - handle(collected[c]) - end - end - return collected - end -end - -function xml.processattributes(root,pattern,handle) - local collected = xmlapplylpath(root,pattern) - if collected and handle then - for c=1,#collected do - handle(collected[c].at) - end - end - return collected -end - ---[[ldx-- -

The following functions collect elements and texts.

---ldx]]-- - --- are these still needed -> lxml-cmp.lua - -function xml.collect(root, pattern) - return xmlapplylpath(root,pattern) -end - -function xml.collecttexts(root, pattern, flatten) -- todo: variant with handle - local collected = xmlapplylpath(root,pattern) - if collected and flatten then - local xmltostring = xml.tostring - for c=1,#collected do - collected[c] = xmltostring(collected[c].dt) - end - end - return collected or { } -end - -function xml.collect_tags(root, pattern, nonamespace) - local collected = xmlapplylpath(root,pattern) - if collected then - local t, n = { }, 0 - for c=1,#collected do - local e = collected[c] - local ns, tg = e.ns, e.tg - n = n + 1 - if nonamespace then - t[n] = tg - elseif ns == "" then - t[n] = tg - else - t[n] = ns .. ":" .. tg - end - end - return t - end -end - ---[[ldx-- -

We've now arrived at the functions that manipulate the tree.

---ldx]]-- - -local no_root = { no_root = true } - -local function redo_ni(d) - for k=1,#d do - local dk = d[k] - if type(dk) == "table" then - dk.ni = k - end - end -end - -local function xmltoelement(whatever,root) - if not whatever then - return nil - end - local element - if type(whatever) == "string" then - element = xmlinheritedconvert(whatever,root) -- beware, not really a root - else - element = whatever -- we assume a table - end - if element.error then - return whatever -- string - end - if element then - --~ if element.ri then - --~ element = element.dt[element.ri].dt - --~ else - --~ element = element.dt - --~ end - end - return element -end - -xml.toelement = xmltoelement - -local function copiedelement(element,newparent) - if type(element) == "string" then - return element - else - element = xmlcopy(element).dt - if newparent and type(element) == "table" then - element.__p__ = newparent - end - return element - end -end - -function xml.delete(root,pattern) - if not pattern or pattern == "" then - local p = root.__p__ - if p then - if trace_manipulations then - report('deleting',"--",c,root) - end - local d = p.dt - remove(d,root.ni) - redo_ni(d) -- can be made faster and inlined - end - else - local collected = xmlapplylpath(root,pattern) - if collected then - for c=1,#collected do - local e = collected[c] - local p = e.__p__ - if p then - if trace_manipulations then - report('deleting',pattern,c,e) - end - local d = p.dt - remove(d,e.ni) - redo_ni(d) -- can be made faster and inlined - end - end - end - end -end - -function xml.replace(root,pattern,whatever) - local element = root and xmltoelement(whatever,root) - local collected = element and xmlapplylpath(root,pattern) - if collected then - for c=1,#collected do - local e = collected[c] - local p = e.__p__ - if p then - if trace_manipulations then - report('replacing',pattern,c,e) - end - local d = p.dt - d[e.ni] = copiedelement(element,p) - redo_ni(d) -- probably not needed - end - end - end -end - -local function wrap(e,wrapper) - local t = { - rn = e.rn, - tg = e.tg, - ns = e.ns, - at = e.at, - dt = e.dt, - __p__ = e, - } - setmetatable(t,getmetatable(e)) - e.rn = wrapper.rn or e.rn or "" - e.tg = wrapper.tg or e.tg or "" - e.ns = wrapper.ns or e.ns or "" - e.at = fastcopy(wrapper.at) - e.dt = { t } -end - -function xml.wrap(root,pattern,whatever) - if whatever then - local wrapper = xmltoelement(whatever,root) - local collected = xmlapplylpath(root,pattern) - if collected then - for c=1,#collected do - local e = collected[c] - if trace_manipulations then - report('wrapping',pattern,c,e) - end - wrap(e,wrapper) - end - end - else - wrap(root,xmltoelement(pattern)) - end -end - -local function inject_element(root,pattern,whatever,prepend) - local element = root and xmltoelement(whatever,root) - local collected = element and xmlapplylpath(root,pattern) - local function inject_e(e) - local r = e.__p__ - local d, k, rri = r.dt, e.ni, r.ri - local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt) - if edt then - local be, af - local cp = copiedelement(element,e) - if prepend then - be, af = cp, edt - else - be, af = edt, cp - end - local bn = #be - for i=1,#af do - bn = bn + 1 - be[bn] = af[i] - end - if rri then - r.dt[rri].dt = be - else - d[k].dt = be - end - redo_ni(d) - end - end - if not collected then - -- nothing - elseif collected.tg then - -- first or so - inject_e(collected) - else - for c=1,#collected do - inject_e(collected[c]) - end - end -end - -local function insert_element(root,pattern,whatever,before) -- todo: element als functie - local element = root and xmltoelement(whatever,root) - local collected = element and xmlapplylpath(root,pattern) - local function insert_e(e) - local r = e.__p__ - local d, k = r.dt, e.ni - if not before then - k = k + 1 - end - insert(d,k,copiedelement(element,r)) - redo_ni(d) - end - if not collected then - -- nothing - elseif collected.tg then - -- first or so - insert_e(collected) - else - for c=1,#collected do - insert_e(collected[c]) - end - end -end - -xml.insert_element = insert_element -xml.insertafter = insert_element -xml.insertbefore = function(r,p,e) insert_element(r,p,e,true) end -xml.injectafter = inject_element -xml.injectbefore = function(r,p,e) inject_element(r,p,e,true) end - -local function include(xmldata,pattern,attribute,recursive,loaddata) - -- parse="text" (default: xml), encoding="" (todo) - -- attribute = attribute or 'href' - pattern = pattern or 'include' - loaddata = loaddata or io.loaddata - local collected = xmlapplylpath(xmldata,pattern) - if collected then - for c=1,#collected do - local ek = collected[c] - local name = nil - local ekdt = ek.dt - local ekat = ek.at - local epdt = ek.__p__.dt - if not attribute or attribute == "" then - name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- check, probably always tab or str - end - if not name then - for a in gmatch(attribute or "href","([^|]+)") do - name = ekat[a] - if name then break end - end - end - local data = (name and name ~= "" and loaddata(name)) or "" - if data == "" then - epdt[ek.ni] = "" -- xml.empty(d,k) - elseif ekat["parse"] == "text" then - -- for the moment hard coded - epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data) - else ---~ local settings = xmldata.settings ---~ settings.parent_root = xmldata -- to be tested ---~ local xi = xmlconvert(data,settings) - local xi = xmlinheritedconvert(data,xmldata) - if not xi then - epdt[ek.ni] = "" -- xml.empty(d,k) - else - if recursive then - include(xi,pattern,attribute,recursive,loaddata) - end - epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi) - end - end - end - end -end - -xml.include = include - -local function stripelement(e,nolines,anywhere) - local edt = e.dt - if edt then - if anywhere then - local t, n = { }, 0 - for e=1,#edt do - local str = edt[e] - if type(str) ~= "string" then - n = n + 1 - t[n] = str - elseif str ~= "" then - -- todo: lpeg for each case - if nolines then - str = gsub(str,"%s+"," ") - end - str = gsub(str,"^%s*(.-)%s*$","%1") - if str ~= "" then - n = n + 1 - t[n] = str - end - end - end - e.dt = t - else - -- we can assume a regular sparse xml table with no successive strings - -- otherwise we should use a while loop - if #edt > 0 then - -- strip front - local str = edt[1] - if type(str) ~= "string" then - -- nothing - elseif str == "" then - remove(edt,1) - else - if nolines then - str = gsub(str,"%s+"," ") - end - str = gsub(str,"^%s+","") - if str == "" then - remove(edt,1) - else - edt[1] = str - end - end - end - local nedt = #edt - if nedt > 0 then - -- strip end - local str = edt[nedt] - if type(str) ~= "string" then - -- nothing - elseif str == "" then - remove(edt) - else - if nolines then - str = gsub(str,"%s+"," ") - end - str = gsub(str,"%s+$","") - if str == "" then - remove(edt) - else - edt[nedt] = str - end - end - end - end - end - return e -- convenient -end - -xml.stripelement = stripelement - -function xml.strip(root,pattern,nolines,anywhere) -- strips all leading and trailing spacing - local collected = xmlapplylpath(root,pattern) -- beware, indices no longer are valid now - if collected then - for i=1,#collected do - stripelement(collected[i],nolines,anywhere) - end - end -end - -local function renamespace(root, oldspace, newspace) -- fast variant - local ndt = #root.dt - for i=1,ndt or 0 do - local e = root[i] - if type(e) == "table" then - if e.ns == oldspace then - e.ns = newspace - if e.rn then - e.rn = newspace - end - end - local edt = e.dt - if edt then - renamespace(edt, oldspace, newspace) - end - end - end -end - -xml.renamespace = renamespace - -function xml.remaptag(root, pattern, newtg) - local collected = xmlapplylpath(root,pattern) - if collected then - for c=1,#collected do - collected[c].tg = newtg - end - end -end - -function xml.remapnamespace(root, pattern, newns) - local collected = xmlapplylpath(root,pattern) - if collected then - for c=1,#collected do - collected[c].ns = newns - end - end -end - -function xml.checknamespace(root, pattern, newns) - local collected = xmlapplylpath(root,pattern) - if collected then - for c=1,#collected do - local e = collected[c] - if (not e.rn or e.rn == "") and e.ns == "" then - e.rn = newns - end - end - end -end - -function xml.remapname(root, pattern, newtg, newns, newrn) - local collected = xmlapplylpath(root,pattern) - if collected then - for c=1,#collected do - local e = collected[c] - e.tg, e.ns, e.rn = newtg, newns, newrn - end - end -end - ---[[ldx-- -

Helper (for q2p).

---ldx]]-- - -function xml.cdatatotext(e) - local dt = e.dt - if #dt == 1 then - local first = dt[1] - if first.tg == "@cd@" then - e.dt = first.dt - end - else - -- maybe option - end -end - --- local x = xml.convert("123") --- xml.texttocdata(xml.first(x,"a")) --- print(x) -- 23]]> - -function xml.texttocdata(e) -- could be a finalizer - local dt = e.dt - local s = xml.tostring(dt) -- no shortcut? - e.tg = "@cd@" - e.special = true - e.ns = "" - e.rn = "" - e.dt = { s } - e.at = nil -end - --- local x = xml.convert("123") --- xml.tocdata(xml.first(x,"a")) --- print(x) -- 123]]> - -function xml.elementtocdata(e) -- could be a finalizer - local dt = e.dt - local s = xml.tostring(e) -- no shortcut? - e.tg = "@cd@" - e.special = true - e.ns = "" - e.rn = "" - e.dt = { s } - e.at = nil -end - -xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share - -local entities = characters and characters.entities or nil -local builtinentities = xml.builtinentities - -function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex - if not entities then - require("char-ent") - entities = characters.entities - end - if entities and root and root.tg == "@rt@" and root.statistics then - local list = { } - local hexify = option == "hexadecimal" - for k, v in table.sortedhash(root.statistics.entities.names) do - if not builtinentities[k] then - local e = entities[k] - if not e then - e = format("[%s]",k) - elseif hexify then - e = format("&#%05X;",utfbyte(k)) - end - list[#list+1] = format(" ",k,e) - end - end - local dt = root.dt - local n = dt[1].tg == "@pi@" and 2 or 1 - if #list > 0 then - insert(dt, n, { "\n" }) - insert(dt, n, { - tg = "@dt@", -- beware, doctype is unparsed - dt = { format("Something [\n%s\n] ",concat(list)) }, - ns = "", - special = true, - }) - insert(dt, n, { "\n\n" }) - else - -- insert(dt, n, { table.serialize(root.statistics) }) - end - end -end - --- local str = [==[ --- --- --- test   test { test --- --- --- ]==] --- --- local x = xml.convert(str) --- xml.addentitiesdoctype(x,"hexadecimal") --- print(x) - ---[[ldx-- -

Here are a few synonyms.

---ldx]]-- - -xml.all = xml.each -xml.insert = xml.insertafter -xml.inject = xml.injectafter -xml.after = xml.insertafter -xml.before = xml.insertbefore -xml.process = xml.each - --- obsolete - -xml.obsolete = xml.obsolete or { } -local obsolete = xml.obsolete - -xml.strip_whitespace = xml.strip obsolete.strip_whitespace = xml.strip -xml.collect_elements = xml.collect obsolete.collect_elements = xml.collect -xml.delete_element = xml.delete obsolete.delete_element = xml.delete -xml.replace_element = xml.replace obsolete.replace_element = xml.replacet -xml.each_element = xml.each obsolete.each_element = xml.each -xml.process_elements = xml.process obsolete.process_elements = xml.process -xml.insert_element_after = xml.insertafter obsolete.insert_element_after = xml.insertafter -xml.insert_element_before = xml.insertbefore obsolete.insert_element_before = xml.insertbefore -xml.inject_element_after = xml.injectafter obsolete.inject_element_after = xml.injectafter -xml.inject_element_before = xml.injectbefore obsolete.inject_element_before = xml.injectbefore -xml.process_attributes = xml.processattributes obsolete.process_attributes = xml.processattributes -xml.collect_texts = xml.collecttexts obsolete.collect_texts = xml.collecttexts -xml.inject_element = xml.inject obsolete.inject_element = xml.inject -xml.remap_tag = xml.remaptag obsolete.remap_tag = xml.remaptag -xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname -xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace - --- new (probably ok) - -function xml.cdata(e) - if e then - local dt = e.dt - if dt and #dt == 1 then - local first = dt[1] - return first.tg == "@cd@" and first.dt[1] or "" - end - end - return "" -end - -function xml.finalizers.xml.cdata(collected) - if collected then - local e = collected[1] - if e then - local dt = e.dt - if dt and #dt == 1 then - local first = dt[1] - return first.tg == "@cd@" and first.dt[1] or "" - end - end - end - return "" -end - -function xml.insertcomment(e,str,n) -- also insertcdata - table.insert(e.dt,n or 1,{ - tg = "@cm@", - ns = "", - special = true, - at = { }, - dt = { str }, - }) -end - -function xml.setcdata(e,str) -- also setcomment - e.dt = { { - tg = "@cd@", - ns = "", - special = true, - at = { }, - dt = { str }, - } } -end - --- maybe helpers like this will move to an autoloader - -function xml.separate(x,pattern) - local collected = xmlapplylpath(x,pattern) - if collected then - for c=1,#collected do - local e = collected[c] - local d = e.dt - if d == x then - report_xml("warning: xml.separate changes root") - x = d - end - local t, n = { "\n" }, 1 - local i, nd = 1, #d - while i <= nd do - while i <= nd do - local di = d[i] - if type(di) == "string" then - if di == "\n" or find(di,"^%s+$") then -- first test is speedup - i = i + 1 - else - d[i] = strip(di) - break - end - else - break - end - end - if i > nd then - break - end - t[n+1] = "\n" - t[n+2] = d[i] - t[n+3] = "\n" - n = n + 3 - i = i + 1 - end - t[n+1] = "\n" - setmetatable(t,getmetatable(d)) - e.dt = t - end - end - return x -end - --- - -local helpers = xml.helpers or { } -xml.helpers = helpers - -local function normal(e,action) - local edt = e.dt - if edt then - for i=1,#edt do - local str = edt[i] - if type(str) == "string" and str ~= "" then - edt[i] = action(str) - end - end - end -end - -local function recurse(e,action) - local edt = e.dt - if edt then - for i=1,#edt do - local str = edt[i] - if type(str) ~= "string" then - recurse(str,action,recursive) - elseif str ~= "" then - edt[i] = action(str) - end - end - end -end - -function helpers.recursetext(collected,action,recursive) - if recursive then - for i=1,#collected do - recurse(collected[i],action) - end - else - for i=1,#collected do - normal(collected[i],action) - end - end -end +if not modules then modules = { } end modules ['lxml-aux'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- not all functions here make sense anymore vbut we keep them for +-- compatibility reasons + +local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end) + +local report_xml = logs.reporter("xml") + +local xml = xml + +local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name +local xmlinheritedconvert = xml.inheritedconvert +local xmlapplylpath = xml.applylpath +local xmlfilter = xml.filter + +local type, setmetatable, getmetatable = type, setmetatable, getmetatable +local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat +local gmatch, gsub, format, find, strip = string.gmatch, string.gsub, string.format, string.find, string.strip +local utfbyte = utf.byte + +local function report(what,pattern,c,e) + report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern) +end + +local function withelements(e,handle,depth) + if e and handle then + local edt = e.dt + if edt then + depth = depth or 0 + for i=1,#edt do + local e = edt[i] + if type(e) == "table" then + handle(e,depth) + withelements(e,handle,depth+1) + end + end + end + end +end + +xml.withelements = withelements + +function xml.withelement(e,n,handle) -- slow + if e and n ~= 0 and handle then + local edt = e.dt + if edt then + if n > 0 then + for i=1,#edt do + local ei = edt[i] + if type(ei) == "table" then + if n == 1 then + handle(ei) + return + else + n = n - 1 + end + end + end + elseif n < 0 then + for i=#edt,1,-1 do + local ei = edt[i] + if type(ei) == "table" then + if n == -1 then + handle(ei) + return + else + n = n + 1 + end + end + end + end + end + end +end + +function xml.each(root,pattern,handle,reverse) + local collected = xmlapplylpath(root,pattern) + if collected then + if reverse then + for c=#collected,1,-1 do + handle(collected[c]) + end + else + for c=1,#collected do + handle(collected[c]) + end + end + return collected + end +end + +function xml.processattributes(root,pattern,handle) + local collected = xmlapplylpath(root,pattern) + if collected and handle then + for c=1,#collected do + handle(collected[c].at) + end + end + return collected +end + +--[[ldx-- +

The following functions collect elements and texts.

+--ldx]]-- + +-- are these still needed -> lxml-cmp.lua + +function xml.collect(root, pattern) + return xmlapplylpath(root,pattern) +end + +function xml.collecttexts(root, pattern, flatten) -- todo: variant with handle + local collected = xmlapplylpath(root,pattern) + if collected and flatten then + local xmltostring = xml.tostring + for c=1,#collected do + collected[c] = xmltostring(collected[c].dt) + end + end + return collected or { } +end + +function xml.collect_tags(root, pattern, nonamespace) + local collected = xmlapplylpath(root,pattern) + if collected then + local t, n = { }, 0 + for c=1,#collected do + local e = collected[c] + local ns, tg = e.ns, e.tg + n = n + 1 + if nonamespace then + t[n] = tg + elseif ns == "" then + t[n] = tg + else + t[n] = ns .. ":" .. tg + end + end + return t + end +end + +--[[ldx-- +

We've now arrived at the functions that manipulate the tree.

+--ldx]]-- + +local no_root = { no_root = true } + +local function redo_ni(d) + for k=1,#d do + local dk = d[k] + if type(dk) == "table" then + dk.ni = k + end + end +end + +local function xmltoelement(whatever,root) + if not whatever then + return nil + end + local element + if type(whatever) == "string" then + element = xmlinheritedconvert(whatever,root) -- beware, not really a root + else + element = whatever -- we assume a table + end + if element.error then + return whatever -- string + end + if element then + --~ if element.ri then + --~ element = element.dt[element.ri].dt + --~ else + --~ element = element.dt + --~ end + end + return element +end + +xml.toelement = xmltoelement + +local function copiedelement(element,newparent) + if type(element) == "string" then + return element + else + element = xmlcopy(element).dt + if newparent and type(element) == "table" then + element.__p__ = newparent + end + return element + end +end + +function xml.delete(root,pattern) + if not pattern or pattern == "" then + local p = root.__p__ + if p then + if trace_manipulations then + report('deleting',"--",c,root) + end + local d = p.dt + remove(d,root.ni) + redo_ni(d) -- can be made faster and inlined + end + else + local collected = xmlapplylpath(root,pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local p = e.__p__ + if p then + if trace_manipulations then + report('deleting',pattern,c,e) + end + local d = p.dt + remove(d,e.ni) + redo_ni(d) -- can be made faster and inlined + end + end + end + end +end + +function xml.replace(root,pattern,whatever) + local element = root and xmltoelement(whatever,root) + local collected = element and xmlapplylpath(root,pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local p = e.__p__ + if p then + if trace_manipulations then + report('replacing',pattern,c,e) + end + local d = p.dt + d[e.ni] = copiedelement(element,p) + redo_ni(d) -- probably not needed + end + end + end +end + +local function wrap(e,wrapper) + local t = { + rn = e.rn, + tg = e.tg, + ns = e.ns, + at = e.at, + dt = e.dt, + __p__ = e, + } + setmetatable(t,getmetatable(e)) + e.rn = wrapper.rn or e.rn or "" + e.tg = wrapper.tg or e.tg or "" + e.ns = wrapper.ns or e.ns or "" + e.at = fastcopy(wrapper.at) + e.dt = { t } +end + +function xml.wrap(root,pattern,whatever) + if whatever then + local wrapper = xmltoelement(whatever,root) + local collected = xmlapplylpath(root,pattern) + if collected then + for c=1,#collected do + local e = collected[c] + if trace_manipulations then + report('wrapping',pattern,c,e) + end + wrap(e,wrapper) + end + end + else + wrap(root,xmltoelement(pattern)) + end +end + +local function inject_element(root,pattern,whatever,prepend) + local element = root and xmltoelement(whatever,root) + local collected = element and xmlapplylpath(root,pattern) + local function inject_e(e) + local r = e.__p__ + local d, k, rri = r.dt, e.ni, r.ri + local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt) + if edt then + local be, af + local cp = copiedelement(element,e) + if prepend then + be, af = cp, edt + else + be, af = edt, cp + end + local bn = #be + for i=1,#af do + bn = bn + 1 + be[bn] = af[i] + end + if rri then + r.dt[rri].dt = be + else + d[k].dt = be + end + redo_ni(d) + end + end + if not collected then + -- nothing + elseif collected.tg then + -- first or so + inject_e(collected) + else + for c=1,#collected do + inject_e(collected[c]) + end + end +end + +local function insert_element(root,pattern,whatever,before) -- todo: element als functie + local element = root and xmltoelement(whatever,root) + local collected = element and xmlapplylpath(root,pattern) + local function insert_e(e) + local r = e.__p__ + local d, k = r.dt, e.ni + if not before then + k = k + 1 + end + insert(d,k,copiedelement(element,r)) + redo_ni(d) + end + if not collected then + -- nothing + elseif collected.tg then + -- first or so + insert_e(collected) + else + for c=1,#collected do + insert_e(collected[c]) + end + end +end + +xml.insert_element = insert_element +xml.insertafter = insert_element +xml.insertbefore = function(r,p,e) insert_element(r,p,e,true) end +xml.injectafter = inject_element +xml.injectbefore = function(r,p,e) inject_element(r,p,e,true) end + +local function include(xmldata,pattern,attribute,recursive,loaddata) + -- parse="text" (default: xml), encoding="" (todo) + -- attribute = attribute or 'href' + pattern = pattern or 'include' + loaddata = loaddata or io.loaddata + local collected = xmlapplylpath(xmldata,pattern) + if collected then + for c=1,#collected do + local ek = collected[c] + local name = nil + local ekdt = ek.dt + local ekat = ek.at + local epdt = ek.__p__.dt + if not attribute or attribute == "" then + name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- check, probably always tab or str + end + if not name then + for a in gmatch(attribute or "href","([^|]+)") do + name = ekat[a] + if name then break end + end + end + local data = (name and name ~= "" and loaddata(name)) or "" + if data == "" then + epdt[ek.ni] = "" -- xml.empty(d,k) + elseif ekat["parse"] == "text" then + -- for the moment hard coded + epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data) + else +--~ local settings = xmldata.settings +--~ settings.parent_root = xmldata -- to be tested +--~ local xi = xmlconvert(data,settings) + local xi = xmlinheritedconvert(data,xmldata) + if not xi then + epdt[ek.ni] = "" -- xml.empty(d,k) + else + if recursive then + include(xi,pattern,attribute,recursive,loaddata) + end + epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi) + end + end + end + end +end + +xml.include = include + +local function stripelement(e,nolines,anywhere) + local edt = e.dt + if edt then + if anywhere then + local t, n = { }, 0 + for e=1,#edt do + local str = edt[e] + if type(str) ~= "string" then + n = n + 1 + t[n] = str + elseif str ~= "" then + -- todo: lpeg for each case + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"^%s*(.-)%s*$","%1") + if str ~= "" then + n = n + 1 + t[n] = str + end + end + end + e.dt = t + else + -- we can assume a regular sparse xml table with no successive strings + -- otherwise we should use a while loop + if #edt > 0 then + -- strip front + local str = edt[1] + if type(str) ~= "string" then + -- nothing + elseif str == "" then + remove(edt,1) + else + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"^%s+","") + if str == "" then + remove(edt,1) + else + edt[1] = str + end + end + end + local nedt = #edt + if nedt > 0 then + -- strip end + local str = edt[nedt] + if type(str) ~= "string" then + -- nothing + elseif str == "" then + remove(edt) + else + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"%s+$","") + if str == "" then + remove(edt) + else + edt[nedt] = str + end + end + end + end + end + return e -- convenient +end + +xml.stripelement = stripelement + +function xml.strip(root,pattern,nolines,anywhere) -- strips all leading and trailing spacing + local collected = xmlapplylpath(root,pattern) -- beware, indices no longer are valid now + if collected then + for i=1,#collected do + stripelement(collected[i],nolines,anywhere) + end + end +end + +local function renamespace(root, oldspace, newspace) -- fast variant + local ndt = #root.dt + for i=1,ndt or 0 do + local e = root[i] + if type(e) == "table" then + if e.ns == oldspace then + e.ns = newspace + if e.rn then + e.rn = newspace + end + end + local edt = e.dt + if edt then + renamespace(edt, oldspace, newspace) + end + end + end +end + +xml.renamespace = renamespace + +function xml.remaptag(root, pattern, newtg) + local collected = xmlapplylpath(root,pattern) + if collected then + for c=1,#collected do + collected[c].tg = newtg + end + end +end + +function xml.remapnamespace(root, pattern, newns) + local collected = xmlapplylpath(root,pattern) + if collected then + for c=1,#collected do + collected[c].ns = newns + end + end +end + +function xml.checknamespace(root, pattern, newns) + local collected = xmlapplylpath(root,pattern) + if collected then + for c=1,#collected do + local e = collected[c] + if (not e.rn or e.rn == "") and e.ns == "" then + e.rn = newns + end + end + end +end + +function xml.remapname(root, pattern, newtg, newns, newrn) + local collected = xmlapplylpath(root,pattern) + if collected then + for c=1,#collected do + local e = collected[c] + e.tg, e.ns, e.rn = newtg, newns, newrn + end + end +end + +--[[ldx-- +

Helper (for q2p).

+--ldx]]-- + +function xml.cdatatotext(e) + local dt = e.dt + if #dt == 1 then + local first = dt[1] + if first.tg == "@cd@" then + e.dt = first.dt + end + else + -- maybe option + end +end + +-- local x = xml.convert("123") +-- xml.texttocdata(xml.first(x,"a")) +-- print(x) -- 23]]> + +function xml.texttocdata(e) -- could be a finalizer + local dt = e.dt + local s = xml.tostring(dt) -- no shortcut? + e.tg = "@cd@" + e.special = true + e.ns = "" + e.rn = "" + e.dt = { s } + e.at = nil +end + +-- local x = xml.convert("123") +-- xml.tocdata(xml.first(x,"a")) +-- print(x) -- 123]]> + +function xml.elementtocdata(e) -- could be a finalizer + local dt = e.dt + local s = xml.tostring(e) -- no shortcut? + e.tg = "@cd@" + e.special = true + e.ns = "" + e.rn = "" + e.dt = { s } + e.at = nil +end + +xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share + +local entities = characters and characters.entities or nil +local builtinentities = xml.builtinentities + +function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex + if not entities then + require("char-ent") + entities = characters.entities + end + if entities and root and root.tg == "@rt@" and root.statistics then + local list = { } + local hexify = option == "hexadecimal" + for k, v in table.sortedhash(root.statistics.entities.names) do + if not builtinentities[k] then + local e = entities[k] + if not e then + e = format("[%s]",k) + elseif hexify then + e = format("&#%05X;",utfbyte(k)) + end + list[#list+1] = format(" ",k,e) + end + end + local dt = root.dt + local n = dt[1].tg == "@pi@" and 2 or 1 + if #list > 0 then + insert(dt, n, { "\n" }) + insert(dt, n, { + tg = "@dt@", -- beware, doctype is unparsed + dt = { format("Something [\n%s\n] ",concat(list)) }, + ns = "", + special = true, + }) + insert(dt, n, { "\n\n" }) + else + -- insert(dt, n, { table.serialize(root.statistics) }) + end + end +end + +-- local str = [==[ +-- +-- +-- test   test { test +-- +-- +-- ]==] +-- +-- local x = xml.convert(str) +-- xml.addentitiesdoctype(x,"hexadecimal") +-- print(x) + +--[[ldx-- +

Here are a few synonyms.

+--ldx]]-- + +xml.all = xml.each +xml.insert = xml.insertafter +xml.inject = xml.injectafter +xml.after = xml.insertafter +xml.before = xml.insertbefore +xml.process = xml.each + +-- obsolete + +xml.obsolete = xml.obsolete or { } +local obsolete = xml.obsolete + +xml.strip_whitespace = xml.strip obsolete.strip_whitespace = xml.strip +xml.collect_elements = xml.collect obsolete.collect_elements = xml.collect +xml.delete_element = xml.delete obsolete.delete_element = xml.delete +xml.replace_element = xml.replace obsolete.replace_element = xml.replacet +xml.each_element = xml.each obsolete.each_element = xml.each +xml.process_elements = xml.process obsolete.process_elements = xml.process +xml.insert_element_after = xml.insertafter obsolete.insert_element_after = xml.insertafter +xml.insert_element_before = xml.insertbefore obsolete.insert_element_before = xml.insertbefore +xml.inject_element_after = xml.injectafter obsolete.inject_element_after = xml.injectafter +xml.inject_element_before = xml.injectbefore obsolete.inject_element_before = xml.injectbefore +xml.process_attributes = xml.processattributes obsolete.process_attributes = xml.processattributes +xml.collect_texts = xml.collecttexts obsolete.collect_texts = xml.collecttexts +xml.inject_element = xml.inject obsolete.inject_element = xml.inject +xml.remap_tag = xml.remaptag obsolete.remap_tag = xml.remaptag +xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname +xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace + +-- new (probably ok) + +function xml.cdata(e) + if e then + local dt = e.dt + if dt and #dt == 1 then + local first = dt[1] + return first.tg == "@cd@" and first.dt[1] or "" + end + end + return "" +end + +function xml.finalizers.xml.cdata(collected) + if collected then + local e = collected[1] + if e then + local dt = e.dt + if dt and #dt == 1 then + local first = dt[1] + return first.tg == "@cd@" and first.dt[1] or "" + end + end + end + return "" +end + +function xml.insertcomment(e,str,n) -- also insertcdata + table.insert(e.dt,n or 1,{ + tg = "@cm@", + ns = "", + special = true, + at = { }, + dt = { str }, + }) +end + +function xml.setcdata(e,str) -- also setcomment + e.dt = { { + tg = "@cd@", + ns = "", + special = true, + at = { }, + dt = { str }, + } } +end + +-- maybe helpers like this will move to an autoloader + +function xml.separate(x,pattern) + local collected = xmlapplylpath(x,pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local d = e.dt + if d == x then + report_xml("warning: xml.separate changes root") + x = d + end + local t, n = { "\n" }, 1 + local i, nd = 1, #d + while i <= nd do + while i <= nd do + local di = d[i] + if type(di) == "string" then + if di == "\n" or find(di,"^%s+$") then -- first test is speedup + i = i + 1 + else + d[i] = strip(di) + break + end + else + break + end + end + if i > nd then + break + end + t[n+1] = "\n" + t[n+2] = d[i] + t[n+3] = "\n" + n = n + 3 + i = i + 1 + end + t[n+1] = "\n" + setmetatable(t,getmetatable(d)) + e.dt = t + end + end + return x +end + +-- + +local helpers = xml.helpers or { } +xml.helpers = helpers + +local function normal(e,action) + local edt = e.dt + if edt then + for i=1,#edt do + local str = edt[i] + if type(str) == "string" and str ~= "" then + edt[i] = action(str) + end + end + end +end + +local function recurse(e,action) + local edt = e.dt + if edt then + for i=1,#edt do + local str = edt[i] + if type(str) ~= "string" then + recurse(str,action,recursive) + elseif str ~= "" then + edt[i] = action(str) + end + end + end +end + +function helpers.recursetext(collected,action,recursive) + if recursive then + for i=1,#collected do + recurse(collected[i],action) + end + else + for i=1,#collected do + normal(collected[i],action) + end + end +end diff --git a/tex/context/base/lxml-css.lua b/tex/context/base/lxml-css.lua index c5a85c2bd..f9542029f 100644 --- a/tex/context/base/lxml-css.lua +++ b/tex/context/base/lxml-css.lua @@ -1,158 +1,158 @@ -if not modules then modules = { } end modules ['lxml-css'] = { - version = 1.001, - comment = "companion to lxml-css.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local tonumber, rawset = tonumber, rawset -local lower, format = string.lower, string.format -local P, S, C, R, Cb, Cg, Carg, Ct, Cc, Cf = lpeg.P, lpeg.S, lpeg.C, lpeg.R, lpeg.Cb, lpeg.Cg, lpeg.Carg, lpeg.Ct, lpeg.Cc, lpeg.Cf -local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns - -xml.css = xml.css or { } -local css = xml.css - -if not number.dimenfactors then - require("util-dim.lua") -end - -local dimenfactors = number.dimenfactors -local bpf = 1/dimenfactors.bp -local cmf = 1/dimenfactors.cm -local mmf = 1/dimenfactors.mm -local inf = 1/dimenfactors["in"] - -local percentage, exheight, emwidth, pixels - -if tex then - - local exheights = fonts.hashes.exheights - local emwidths = fonts.hashes.emwidths - - percentage = function(s,pcf) return tonumber(s) * (pcf or tex.hsize) end - exheight = function(s,exf) return tonumber(s) * (exf or exheights[true]) end - emwidth = function(s,emf) return tonumber(s) * (emf or emwidths[true]) end - pixels = function(s,pxf) return tonumber(s) * (pxf or emwidths[true]/300) end - -else - - local function generic(s,unit) return tonumber(s) * unit end - - percentage = generic - exheight = generic - emwidth = generic - pixels = generic - -end - -local validdimen = Cg(lpegpatterns.number,'a') * ( - Cb('a') * P("pt") / function(s) return tonumber(s) * bpf end - + Cb('a') * P("cm") / function(s) return tonumber(s) * cmf end - + Cb('a') * P("mm") / function(s) return tonumber(s) * mmf end - + Cb('a') * P("in") / function(s) return tonumber(s) * inf end - + Cb('a') * P("px") * Carg(1) / pixels - + Cb('a') * P("%") * Carg(2) / percentage - + Cb('a') * P("ex") * Carg(3) / exheight - + Cb('a') * P("em") * Carg(4) / emwidth - + Cb('a') * Carg(1) / pixels - ) - -local pattern = (validdimen * lpegpatterns.whitespace^0)^1 - --- todo: default if "" - -local function dimension(str,pixel,percent,exheight,emwidth) - return (lpegmatch(pattern,str,1,pixel,percent,exheight,emwidth)) -end - -local function padding(str,pixel,percent,exheight,emwidth) - local top, bottom, left, right = lpegmatch(pattern,str,1,pixel,percent,exheight,emwidth) - if not bottom then - bottom, left, right = top, top, top - elseif not left then - bottom, left, right = top, bottom, bottom - elseif not right then - bottom, left, right = left, bottom, bottom - end - return top, bottom, left, right -end - -css.dimension = dimension -css.padding = padding - --- local hsize = 655360*100 --- local exheight = 65536*4 --- local emwidth = 65536*10 --- local pixel = emwidth/100 --- --- print(padding("10px",pixel,hsize,exheight,emwidth)) --- print(padding("10px 20px",pixel,hsize,exheight,emwidth)) --- print(padding("10px 20px 30px",pixel,hsize,exheight,emwidth)) --- print(padding("10px 20px 30px 40px",pixel,hsize,exheight,emwidth)) --- --- print(padding("10%",pixel,hsize,exheight,emwidth)) --- print(padding("10% 20%",pixel,hsize,exheight,emwidth)) --- print(padding("10% 20% 30%",pixel,hsize,exheight,emwidth)) --- print(padding("10% 20% 30% 40%",pixel,hsize,exheight,emwidth)) --- --- print(padding("10",pixel,hsize,exheight,emwidth)) --- print(padding("10 20",pixel,hsize,exheight,emwidth)) --- print(padding("10 20 30",pixel,hsize,exheight,emwidth)) --- print(padding("10 20 30 40",pixel,hsize,exheight,emwidth)) --- --- print(padding("10pt",pixel,hsize,exheight,emwidth)) --- print(padding("10pt 20pt",pixel,hsize,exheight,emwidth)) --- print(padding("10pt 20pt 30pt",pixel,hsize,exheight,emwidth)) --- print(padding("10pt 20pt 30pt 40pt",pixel,hsize,exheight,emwidth)) - --- print(padding("0",pixel,hsize,exheight,emwidth)) - --- local currentfont = font.current --- local texdimen = tex.dimen --- local hashes = fonts.hashes --- local quads = hashes.quads --- local xheights = hashes.xheights --- --- local function padding(str) --- local font = currentfont() --- local exheight = xheights[font] --- local emwidth = quads[font] --- local hsize = texdimen.hsize/100 --- local pixel = emwidth/100 --- return padding(str,pixel,hsize,exheight,emwidth) --- end --- --- function css.simplepadding(str) --- context("%ssp",padding(str,pixel,hsize,exheight,emwidth)) --- end - -local pattern = Cf( Ct("") * ( - Cg( - Cc("style") * ( - C("italic") - + C("oblique") - + C("slanted") / "oblique" - ) - + Cc("variant") * ( - (C("smallcaps") + C("caps")) / "small-caps" - ) - + Cc("weight") * - C("bold") - + Cc("family") * ( - (C("mono") + C("type")) / "monospace" -- just ignore the "space(d)" - + (C("sansserif") + C("sans")) / "sans-serif" -- match before serif - + C("serif") - ) - ) + P(1) -)^0 , rawset) - -function css.fontspecification(str) - return str and lpegmatch(pattern,lower(str)) -end - -function css.colorspecification(str) - local c = str and attributes.colors.values[tonumber(str)] - return c and format("rgb(%s%%,%s%%,%s%%)",c[3]*100,c[4]*100,c[5]*100) -end +if not modules then modules = { } end modules ['lxml-css'] = { + version = 1.001, + comment = "companion to lxml-css.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local tonumber, rawset = tonumber, rawset +local lower, format = string.lower, string.format +local P, S, C, R, Cb, Cg, Carg, Ct, Cc, Cf = lpeg.P, lpeg.S, lpeg.C, lpeg.R, lpeg.Cb, lpeg.Cg, lpeg.Carg, lpeg.Ct, lpeg.Cc, lpeg.Cf +local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns + +xml.css = xml.css or { } +local css = xml.css + +if not number.dimenfactors then + require("util-dim.lua") +end + +local dimenfactors = number.dimenfactors +local bpf = 1/dimenfactors.bp +local cmf = 1/dimenfactors.cm +local mmf = 1/dimenfactors.mm +local inf = 1/dimenfactors["in"] + +local percentage, exheight, emwidth, pixels + +if tex then + + local exheights = fonts.hashes.exheights + local emwidths = fonts.hashes.emwidths + + percentage = function(s,pcf) return tonumber(s) * (pcf or tex.hsize) end + exheight = function(s,exf) return tonumber(s) * (exf or exheights[true]) end + emwidth = function(s,emf) return tonumber(s) * (emf or emwidths[true]) end + pixels = function(s,pxf) return tonumber(s) * (pxf or emwidths[true]/300) end + +else + + local function generic(s,unit) return tonumber(s) * unit end + + percentage = generic + exheight = generic + emwidth = generic + pixels = generic + +end + +local validdimen = Cg(lpegpatterns.number,'a') * ( + Cb('a') * P("pt") / function(s) return tonumber(s) * bpf end + + Cb('a') * P("cm") / function(s) return tonumber(s) * cmf end + + Cb('a') * P("mm") / function(s) return tonumber(s) * mmf end + + Cb('a') * P("in") / function(s) return tonumber(s) * inf end + + Cb('a') * P("px") * Carg(1) / pixels + + Cb('a') * P("%") * Carg(2) / percentage + + Cb('a') * P("ex") * Carg(3) / exheight + + Cb('a') * P("em") * Carg(4) / emwidth + + Cb('a') * Carg(1) / pixels + ) + +local pattern = (validdimen * lpegpatterns.whitespace^0)^1 + +-- todo: default if "" + +local function dimension(str,pixel,percent,exheight,emwidth) + return (lpegmatch(pattern,str,1,pixel,percent,exheight,emwidth)) +end + +local function padding(str,pixel,percent,exheight,emwidth) + local top, bottom, left, right = lpegmatch(pattern,str,1,pixel,percent,exheight,emwidth) + if not bottom then + bottom, left, right = top, top, top + elseif not left then + bottom, left, right = top, bottom, bottom + elseif not right then + bottom, left, right = left, bottom, bottom + end + return top, bottom, left, right +end + +css.dimension = dimension +css.padding = padding + +-- local hsize = 655360*100 +-- local exheight = 65536*4 +-- local emwidth = 65536*10 +-- local pixel = emwidth/100 +-- +-- print(padding("10px",pixel,hsize,exheight,emwidth)) +-- print(padding("10px 20px",pixel,hsize,exheight,emwidth)) +-- print(padding("10px 20px 30px",pixel,hsize,exheight,emwidth)) +-- print(padding("10px 20px 30px 40px",pixel,hsize,exheight,emwidth)) +-- +-- print(padding("10%",pixel,hsize,exheight,emwidth)) +-- print(padding("10% 20%",pixel,hsize,exheight,emwidth)) +-- print(padding("10% 20% 30%",pixel,hsize,exheight,emwidth)) +-- print(padding("10% 20% 30% 40%",pixel,hsize,exheight,emwidth)) +-- +-- print(padding("10",pixel,hsize,exheight,emwidth)) +-- print(padding("10 20",pixel,hsize,exheight,emwidth)) +-- print(padding("10 20 30",pixel,hsize,exheight,emwidth)) +-- print(padding("10 20 30 40",pixel,hsize,exheight,emwidth)) +-- +-- print(padding("10pt",pixel,hsize,exheight,emwidth)) +-- print(padding("10pt 20pt",pixel,hsize,exheight,emwidth)) +-- print(padding("10pt 20pt 30pt",pixel,hsize,exheight,emwidth)) +-- print(padding("10pt 20pt 30pt 40pt",pixel,hsize,exheight,emwidth)) + +-- print(padding("0",pixel,hsize,exheight,emwidth)) + +-- local currentfont = font.current +-- local texdimen = tex.dimen +-- local hashes = fonts.hashes +-- local quads = hashes.quads +-- local xheights = hashes.xheights +-- +-- local function padding(str) +-- local font = currentfont() +-- local exheight = xheights[font] +-- local emwidth = quads[font] +-- local hsize = texdimen.hsize/100 +-- local pixel = emwidth/100 +-- return padding(str,pixel,hsize,exheight,emwidth) +-- end +-- +-- function css.simplepadding(str) +-- context("%ssp",padding(str,pixel,hsize,exheight,emwidth)) +-- end + +local pattern = Cf( Ct("") * ( + Cg( + Cc("style") * ( + C("italic") + + C("oblique") + + C("slanted") / "oblique" + ) + + Cc("variant") * ( + (C("smallcaps") + C("caps")) / "small-caps" + ) + + Cc("weight") * + C("bold") + + Cc("family") * ( + (C("mono") + C("type")) / "monospace" -- just ignore the "space(d)" + + (C("sansserif") + C("sans")) / "sans-serif" -- match before serif + + C("serif") + ) + ) + P(1) +)^0 , rawset) + +function css.fontspecification(str) + return str and lpegmatch(pattern,lower(str)) +end + +function css.colorspecification(str) + local c = str and attributes.colors.values[tonumber(str)] + return c and format("rgb(%s%%,%s%%,%s%%)",c[3]*100,c[4]*100,c[5]*100) +end diff --git a/tex/context/base/lxml-ctx.lua b/tex/context/base/lxml-ctx.lua index 968dbda71..2694839dd 100644 --- a/tex/context/base/lxml-ctx.lua +++ b/tex/context/base/lxml-ctx.lua @@ -1,135 +1,135 @@ -if not modules then modules = { } end modules ['lxml-ctx'] = { - version = 1.001, - comment = "companion to lxml-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- is this still used? - -local format, find = string.format, string.find - -local xml = xml - -xml.ctx = { } -xml.ctx.enhancers = { } - --- hashen - -function xml.ctx.enhancers.compound(root,lpath,before,tokens,after) -- todo lpeg - local before = before or "[%a%d][%a%d][%a%d]" - local tokens = tokens or "[%/%-]" - local after = after or "[%a%d][%a%d][%a%d]" - local pattern = "(" .. before .. ")(" .. tokens .. ")(" .. after .. ")" - local action = function(a,b,c) - return a .. "" .. c -- formatters["%s%s"](a,b,c) - end - xml.enhance(root,lpath,pattern,action) -- still present? -end - -local loaded = { } - -local nodesettostring = xml.nodesettostring - --- maybe use detokenize instead of \type - -function xml.ctx.tshow(specification) - local pattern = specification.pattern - local xmlroot = specification.xmlroot - local attribute = specification.attribute - if context then - local xmlpattern = pattern - if not find(xmlpattern,"^[%a]+://") then - xmlpattern = "xml://" .. pattern - end - local parsed = xml.lpath(xmlpattern) - local titlecommand = specification.title or "type" - if parsed.state then - context[titlecommand]("pattern: " .. pattern .. " (".. parsed.state .. ")") - else - context[titlecommand]("pattern: " .. pattern) - end - context.starttabulate({ "|Tr|Tl|Tp|" } ) - if specification.warning then - local comment = parsed.comment - if comment then - for k=1,#comment do - context.NC() - context("!") - context.NC() - context.rlap(comment[k]) - context.NR() - end - context.TB() - end - end - for p=1,#parsed do - local pp = parsed[p] - local kind = pp.kind - context.NC() - context(p) - context.NC() - context(kind) - context.NC() - if kind == "axis" then - context(pp.axis) - elseif kind == "nodes" then - context(nodesettostring(pp.nodes,pp.nodetest)) - elseif kind == "expression" then ---~ context("%s => %s",pp.expression,pp.converted) - context(pp.expression) - elseif kind == "finalizer" then - context("%s(%s)",pp.name,pp.arguments) - elseif kind == "error" and pp.error then - context(pp.error) - end - context.NC() - context.NR() - end - context.stoptabulate() - if xmlroot and xmlroot ~= "" then - if not loaded[xmlroot] then - loaded[xmlroot] = xml.convert(buffers.getcontent(xmlroot)) - end - local collected = xml.filter(loaded[xmlroot],xmlpattern) - if collected then - local tc = type(collected) - if not tc then - -- skip - else - context.blank() - context.type("result : ") - if tc == "string" then - context.type(collected) - elseif tc == "table" then - if collected.tg then - collected = { collected } - end - for c=1,#collected do - local cc = collected[c] - if attribute and attribute ~= "" then - local ccat = cc.at - local a = ccat and ccat[attribute] - if a and a ~= "" then - context.type(a) - context.type(">") - end - end - local ccns = cc.ns - if ccns == "" then - context.type(cc.tg) - else - context.type(ccns .. ":" .. cc.tg) - end - context.space() - end - else - context.type(tostring(tc)) - end - context.blank() - end - end - end - end -end +if not modules then modules = { } end modules ['lxml-ctx'] = { + version = 1.001, + comment = "companion to lxml-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- is this still used? + +local format, find = string.format, string.find + +local xml = xml + +xml.ctx = { } +xml.ctx.enhancers = { } + +-- hashen + +function xml.ctx.enhancers.compound(root,lpath,before,tokens,after) -- todo lpeg + local before = before or "[%a%d][%a%d][%a%d]" + local tokens = tokens or "[%/%-]" + local after = after or "[%a%d][%a%d][%a%d]" + local pattern = "(" .. before .. ")(" .. tokens .. ")(" .. after .. ")" + local action = function(a,b,c) + return a .. "" .. c -- formatters["%s%s"](a,b,c) + end + xml.enhance(root,lpath,pattern,action) -- still present? +end + +local loaded = { } + +local nodesettostring = xml.nodesettostring + +-- maybe use detokenize instead of \type + +function xml.ctx.tshow(specification) + local pattern = specification.pattern + local xmlroot = specification.xmlroot + local attribute = specification.attribute + if context then + local xmlpattern = pattern + if not find(xmlpattern,"^[%a]+://") then + xmlpattern = "xml://" .. pattern + end + local parsed = xml.lpath(xmlpattern) + local titlecommand = specification.title or "type" + if parsed.state then + context[titlecommand]("pattern: " .. pattern .. " (".. parsed.state .. ")") + else + context[titlecommand]("pattern: " .. pattern) + end + context.starttabulate({ "|Tr|Tl|Tp|" } ) + if specification.warning then + local comment = parsed.comment + if comment then + for k=1,#comment do + context.NC() + context("!") + context.NC() + context.rlap(comment[k]) + context.NR() + end + context.TB() + end + end + for p=1,#parsed do + local pp = parsed[p] + local kind = pp.kind + context.NC() + context(p) + context.NC() + context(kind) + context.NC() + if kind == "axis" then + context(pp.axis) + elseif kind == "nodes" then + context(nodesettostring(pp.nodes,pp.nodetest)) + elseif kind == "expression" then +--~ context("%s => %s",pp.expression,pp.converted) + context(pp.expression) + elseif kind == "finalizer" then + context("%s(%s)",pp.name,pp.arguments) + elseif kind == "error" and pp.error then + context(pp.error) + end + context.NC() + context.NR() + end + context.stoptabulate() + if xmlroot and xmlroot ~= "" then + if not loaded[xmlroot] then + loaded[xmlroot] = xml.convert(buffers.getcontent(xmlroot)) + end + local collected = xml.filter(loaded[xmlroot],xmlpattern) + if collected then + local tc = type(collected) + if not tc then + -- skip + else + context.blank() + context.type("result : ") + if tc == "string" then + context.type(collected) + elseif tc == "table" then + if collected.tg then + collected = { collected } + end + for c=1,#collected do + local cc = collected[c] + if attribute and attribute ~= "" then + local ccat = cc.at + local a = ccat and ccat[attribute] + if a and a ~= "" then + context.type(a) + context.type(">") + end + end + local ccns = cc.ns + if ccns == "" then + context.type(cc.tg) + else + context.type(ccns .. ":" .. cc.tg) + end + context.space() + end + else + context.type(tostring(tc)) + end + context.blank() + end + end + end + end +end diff --git a/tex/context/base/lxml-dir.lua b/tex/context/base/lxml-dir.lua index 3c68664ae..4f0f61b71 100644 --- a/tex/context/base/lxml-dir.lua +++ b/tex/context/base/lxml-dir.lua @@ -1,114 +1,114 @@ -if not modules then modules = { } end modules ['lxml-dir'] = { - version = 1.001, - comment = "this module is the basis for the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local gsub = string.gsub -local formatters = string.formatters - --- --- --- --- --- --- --- - -local lxml, context = lxml, context - -local getid = lxml.getid - -lxml.directives = lxml.directives or { } -local directives = lxml.directives - -local report_lxml = logs.reporter("xml","tex") - -local data = { - setup = { }, - before = { }, - after = { } -} - -local function load_setup(filename) - local fullname = resolvers.findtexfile(filename) or "" - if fullname ~= "" then - filename = fullname - end - local collection = xml.applylpath({ getid(xml.load(filename)) },"directive") -- is { } needed ? - if collection then - local valid = 0 - for i=1,#collection do - local at = collection[i].at - local attribute, value, element = at.attribute or "", at.value or "", at.element or '*' - local setup, before, after = at.setup or "", at.before or "", at.after or "" - if attribute ~= "" and value ~= "" then - local key = formatters["%s::%s::%s"](element,attribute,value) - local t = data[key] or { } - if setup ~= "" then t.setup = setup end - if before ~= "" then t.before = before end - if after ~= "" then t.after = after end - data[key] = t - valid = valid + 1 - end - end - report_lxml("%s directives found in %a, valid %s",#collection,filename,valid) - else - report_lxml("no directives found in %a",filename) - end -end - -local function handle_setup(category,root,attribute,element) - root = getid(root) - if attribute then - local value = root.at[attribute] - if value then - if not element then - local ns, tg = root.rn or root.ns, root.tg - if ns == "" then - element = tg - else - element = ns .. ':' .. tg - end - end - local setup = data[formatters["%s::%s::%s"](element,attribute,value)] - if setup then - setup = setup[category] - end - if setup then - context.directsetup(setup) - else - setup = data[formatters["%s::%s::*"](element,attribute)] - if setup then - setup = setup[category] - end - if setup then - setup = gsub(setup,'%*',value) - context.directsetup(setup) - end - end - end - end -end - -directives.load = load_setup -directives.handle = handle_setup - -function directives.setup(root,attribute,element) - handle_setup('setup',root,attribute,element) -end -function directives.before(root,attribute,element) - handle_setup('before',root,attribute,element) -end -function directives.after(root,attribute,element) - handle_setup('after',root,attribute,element) -end +if not modules then modules = { } end modules ['lxml-dir'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local gsub = string.gsub +local formatters = string.formatters + +-- +-- +-- +-- +-- +-- +-- + +local lxml, context = lxml, context + +local getid = lxml.getid + +lxml.directives = lxml.directives or { } +local directives = lxml.directives + +local report_lxml = logs.reporter("xml","tex") + +local data = { + setup = { }, + before = { }, + after = { } +} + +local function load_setup(filename) + local fullname = resolvers.findtexfile(filename) or "" + if fullname ~= "" then + filename = fullname + end + local collection = xml.applylpath({ getid(xml.load(filename)) },"directive") -- is { } needed ? + if collection then + local valid = 0 + for i=1,#collection do + local at = collection[i].at + local attribute, value, element = at.attribute or "", at.value or "", at.element or '*' + local setup, before, after = at.setup or "", at.before or "", at.after or "" + if attribute ~= "" and value ~= "" then + local key = formatters["%s::%s::%s"](element,attribute,value) + local t = data[key] or { } + if setup ~= "" then t.setup = setup end + if before ~= "" then t.before = before end + if after ~= "" then t.after = after end + data[key] = t + valid = valid + 1 + end + end + report_lxml("%s directives found in %a, valid %s",#collection,filename,valid) + else + report_lxml("no directives found in %a",filename) + end +end + +local function handle_setup(category,root,attribute,element) + root = getid(root) + if attribute then + local value = root.at[attribute] + if value then + if not element then + local ns, tg = root.rn or root.ns, root.tg + if ns == "" then + element = tg + else + element = ns .. ':' .. tg + end + end + local setup = data[formatters["%s::%s::%s"](element,attribute,value)] + if setup then + setup = setup[category] + end + if setup then + context.directsetup(setup) + else + setup = data[formatters["%s::%s::*"](element,attribute)] + if setup then + setup = setup[category] + end + if setup then + setup = gsub(setup,'%*',value) + context.directsetup(setup) + end + end + end + end +end + +directives.load = load_setup +directives.handle = handle_setup + +function directives.setup(root,attribute,element) + handle_setup('setup',root,attribute,element) +end +function directives.before(root,attribute,element) + handle_setup('before',root,attribute,element) +end +function directives.after(root,attribute,element) + handle_setup('after',root,attribute,element) +end diff --git a/tex/context/base/lxml-ent.lua b/tex/context/base/lxml-ent.lua index a5c5bc389..e9fb0e2b8 100644 --- a/tex/context/base/lxml-ent.lua +++ b/tex/context/base/lxml-ent.lua @@ -1,57 +1,57 @@ -if not modules then modules = { } end modules ['lxml-ent'] = { - version = 1.001, - comment = "this module is the basis for the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local type, next, tonumber = type, next, tonumber -local byte, format = string.byte, string.format -local utfchar = utf.char -local lpegmatch = lpeg.match - ---[[ldx-- -

We provide (at least here) two entity handlers. The more extensive -resolver consults a hash first, tries to convert to next, -and finaly calls a handler when defines. When this all fails, the -original entity is returned.

- -

We do things different now but it's still somewhat experimental

---ldx]]-- - -local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end) - -local report_xml = logs.reporter("xml") - -local xml = xml - -xml.entities = xml.entities or { } - -storage.register("xml/entities", xml.entities, "xml.entities" ) - -local entities = xml.entities -- maybe some day properties - -function xml.registerentity(key,value) - entities[key] = value - if trace_entities then - report_xml("registering entity %a as %a",key,value) - end -end - -if characters and characters.entities then - - function characters.registerentities(forcecopy) - if forcecopy then - table.setmetatableindex(entities,nil) - for name, value in next, characters.entities do - if not entities[name] then - entities[name] = value - end - end - else - table.setmetatableindex(entities,characters.entities) - end - end - -end +if not modules then modules = { } end modules ['lxml-ent'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local type, next, tonumber = type, next, tonumber +local byte, format = string.byte, string.format +local utfchar = utf.char +local lpegmatch = lpeg.match + +--[[ldx-- +

We provide (at least here) two entity handlers. The more extensive +resolver consults a hash first, tries to convert to next, +and finaly calls a handler when defines. When this all fails, the +original entity is returned.

+ +

We do things different now but it's still somewhat experimental

+--ldx]]-- + +local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end) + +local report_xml = logs.reporter("xml") + +local xml = xml + +xml.entities = xml.entities or { } + +storage.register("xml/entities", xml.entities, "xml.entities" ) + +local entities = xml.entities -- maybe some day properties + +function xml.registerentity(key,value) + entities[key] = value + if trace_entities then + report_xml("registering entity %a as %a",key,value) + end +end + +if characters and characters.entities then + + function characters.registerentities(forcecopy) + if forcecopy then + table.setmetatableindex(entities,nil) + for name, value in next, characters.entities do + if not entities[name] then + entities[name] = value + end + end + else + table.setmetatableindex(entities,characters.entities) + end + end + +end diff --git a/tex/context/base/lxml-inf.lua b/tex/context/base/lxml-inf.lua index 8f1157c7d..2c130791a 100644 --- a/tex/context/base/lxml-inf.lua +++ b/tex/context/base/lxml-inf.lua @@ -1,58 +1,58 @@ -if not modules then modules = { } end modules ['lxml-inf'] = { - version = 1.001, - comment = "this module is the basis for the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This file will be loaded runtime by x-pending.tex. - -local concat = table.concat - -local xmlwithelements = xml.withelements -local getid = lxml.getid - -local status, stack - -local function get(e,d) - local ns, tg = e.ns, e.tg - local name = tg - if ns ~= "" then name = ns .. ":" .. tg end - stack[d] = name - local ec = e.command - if ec == true then - ec = "system: text" - elseif ec == false then - ec = "system: skip" - elseif ec == nil then - ec = "system: not set" - elseif type(ec) == "string" then - ec = "setup: " .. ec - else -- function - ec = tostring(ec) - end - local tag = concat(stack," => ",1,d) - local s = status[tag] - if not s then - s = { } - status[tag] = s - end - s[ec] = (s[ec] or 0) + 1 -end - -local function get_command_status(id) - status, stack = {}, {} - if id then - xmlwithelements(getid(id),get) - return status - else - local t = { } - for id, _ in next, loaded do - t[id] = get_command_status(id) - end - return t - end -end - -lxml.get_command_status = get_command_status +if not modules then modules = { } end modules ['lxml-inf'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This file will be loaded runtime by x-pending.tex. + +local concat = table.concat + +local xmlwithelements = xml.withelements +local getid = lxml.getid + +local status, stack + +local function get(e,d) + local ns, tg = e.ns, e.tg + local name = tg + if ns ~= "" then name = ns .. ":" .. tg end + stack[d] = name + local ec = e.command + if ec == true then + ec = "system: text" + elseif ec == false then + ec = "system: skip" + elseif ec == nil then + ec = "system: not set" + elseif type(ec) == "string" then + ec = "setup: " .. ec + else -- function + ec = tostring(ec) + end + local tag = concat(stack," => ",1,d) + local s = status[tag] + if not s then + s = { } + status[tag] = s + end + s[ec] = (s[ec] or 0) + 1 +end + +local function get_command_status(id) + status, stack = {}, {} + if id then + xmlwithelements(getid(id),get) + return status + else + local t = { } + for id, _ in next, loaded do + t[id] = get_command_status(id) + end + return t + end +end + +lxml.get_command_status = get_command_status diff --git a/tex/context/base/lxml-lpt.lua b/tex/context/base/lxml-lpt.lua index 51ab321b9..2f57ced5b 100644 --- a/tex/context/base/lxml-lpt.lua +++ b/tex/context/base/lxml-lpt.lua @@ -1,1466 +1,1466 @@ -if not modules then modules = { } end modules ['lxml-lpt'] = { - version = 1.001, - comment = "this module is the basis for the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- e.ni is only valid after a filter run --- todo: B/C/[get first match] - -local concat, remove, insert = table.concat, table.remove, table.insert -local type, next, tonumber, tostring, setmetatable, load, select = type, next, tonumber, tostring, setmetatable, load, select -local format, upper, lower, gmatch, gsub, find, rep = string.format, string.upper, string.lower, string.gmatch, string.gsub, string.find, string.rep -local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns - -local setmetatableindex = table.setmetatableindex -local formatters = string.formatters -- no need (yet) as paths are cached anyway - --- beware, this is not xpath ... e.g. position is different (currently) and --- we have reverse-sibling as reversed preceding sibling - ---[[ldx-- -

This module can be used stand alone but also inside in -which case it hooks into the tracker code. Therefore we provide a few -functions that set the tracers. Here we overload a previously defined -function.

-

If I can get in the mood I will make a variant that is XSLT compliant -but I wonder if it makes sense.

---ldx]]-- - ---[[ldx-- -

Expecially the lpath code is experimental, we will support some of xpath, but -only things that make sense for us; as compensation it is possible to hook in your -own functions. Apart from preprocessing content for we also need -this module for process management, like handling and -files.

- - -a/b/c /*/c -a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n) -a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n) - ---ldx]]-- - -local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end -local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end -local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end - -local report_lpath = logs.reporter("xml","lpath") - ---[[ldx-- -

We've now arrived at an interesting part: accessing the tree using a subset -of and since we're not compatible we call it . We -will explain more about its usage in other documents.

---ldx]]-- - -local xml = xml - -local lpathcalls = 0 function xml.lpathcalls () return lpathcalls end -local lpathcached = 0 function xml.lpathcached() return lpathcached end - -xml.functions = xml.functions or { } -- internal -local functions = xml.functions - -xml.expressions = xml.expressions or { } -- in expressions -local expressions = xml.expressions - -xml.finalizers = xml.finalizers or { } -- fast do-with ... (with return value other than collection) -local finalizers = xml.finalizers - -xml.specialhandler = xml.specialhandler or { } -local specialhandler = xml.specialhandler - -lpegpatterns.xml = lpegpatterns.xml or { } -local xmlpatterns = lpegpatterns.xml - -finalizers.xml = finalizers.xml or { } -finalizers.tex = finalizers.tex or { } - -local function fallback (t, name) - local fn = finalizers[name] - if fn then - t[name] = fn - else - report_lpath("unknown sub finalizer %a",name) - fn = function() end - end - return fn -end - -setmetatableindex(finalizers.xml, fallback) -setmetatableindex(finalizers.tex, fallback) - -xml.defaultprotocol = "xml" - --- as xsl does not follow xpath completely here we will also --- be more liberal especially with regards to the use of | and --- the rootpath: --- --- test : all 'test' under current --- /test : 'test' relative to current --- a|b|c : set of names --- (a|b|c) : idem --- ! : not --- --- after all, we're not doing transformations but filtering. in --- addition we provide filter functions (last bit) --- --- todo: optimizer --- --- .. : parent --- * : all kids --- / : anchor here --- // : /**/ --- ** : all in between --- --- so far we had (more practical as we don't transform) --- --- {/test} : kids 'test' under current node --- {test} : any kid with tag 'test' --- {//test} : same as above - --- evaluator (needs to be redone, for the moment copied) - --- todo: apply_axis(list,notable) and collection vs single - -local apply_axis = { } - -apply_axis['root'] = function(list) - local collected = { } - for l=1,#list do - local ll = list[l] - local rt = ll - while ll do - ll = ll.__p__ - if ll then - rt = ll - end - end - collected[l] = rt - end - return collected -end - -apply_axis['self'] = function(list) ---~ local collected = { } ---~ for l=1,#list do ---~ collected[l] = list[l] ---~ end ---~ return collected - return list -end - -apply_axis['child'] = function(list) - local collected, c = { }, 0 - for l=1,#list do - local ll = list[l] - local dt = ll.dt - if dt then -- weird that this is needed - local en = 0 - for k=1,#dt do - local dk = dt[k] - if dk.tg then - c = c + 1 - collected[c] = dk - dk.ni = k -- refresh - en = en + 1 - dk.ei = en - end - end - ll.en = en - end - end - return collected -end - -local function collect(list,collected,c) - local dt = list.dt - if dt then - local en = 0 - for k=1,#dt do - local dk = dt[k] - if dk.tg then - c = c + 1 - collected[c] = dk - dk.ni = k -- refresh - en = en + 1 - dk.ei = en - c = collect(dk,collected,c) - end - end - list.en = en - end - return c -end - -apply_axis['descendant'] = function(list) - local collected, c = { }, 0 - for l=1,#list do - c = collect(list[l],collected,c) - end - return collected -end - -local function collect(list,collected,c) - local dt = list.dt - if dt then - local en = 0 - for k=1,#dt do - local dk = dt[k] - if dk.tg then - c = c + 1 - collected[c] = dk - dk.ni = k -- refresh - en = en + 1 - dk.ei = en - c = collect(dk,collected,c) - end - end - list.en = en - end - return c -end -apply_axis['descendant-or-self'] = function(list) - local collected, c = { }, 0 - for l=1,#list do - local ll = list[l] - if ll.special ~= true then -- catch double root - c = c + 1 - collected[c] = ll - end - c = collect(ll,collected,c) - end - return collected -end - -apply_axis['ancestor'] = function(list) - local collected, c = { }, 0 - for l=1,#list do - local ll = list[l] - while ll do - ll = ll.__p__ - if ll then - c = c + 1 - collected[c] = ll - end - end - end - return collected -end - -apply_axis['ancestor-or-self'] = function(list) - local collected, c = { }, 0 - for l=1,#list do - local ll = list[l] - c = c + 1 - collected[c] = ll - while ll do - ll = ll.__p__ - if ll then - c = c + 1 - collected[c] = ll - end - end - end - return collected -end - -apply_axis['parent'] = function(list) - local collected, c = { }, 0 - for l=1,#list do - local pl = list[l].__p__ - if pl then - c = c + 1 - collected[c] = pl - end - end - return collected -end - -apply_axis['attribute'] = function(list) - return { } -end - -apply_axis['namespace'] = function(list) - return { } -end - -apply_axis['following'] = function(list) -- incomplete ---~ local collected, c = { }, 0 ---~ for l=1,#list do ---~ local ll = list[l] ---~ local p = ll.__p__ ---~ local d = p.dt ---~ for i=ll.ni+1,#d do ---~ local di = d[i] ---~ if type(di) == "table" then ---~ c = c + 1 ---~ collected[c] = di ---~ break ---~ end ---~ end ---~ end ---~ return collected - return { } -end - -apply_axis['preceding'] = function(list) -- incomplete ---~ local collected, c = { }, 0 ---~ for l=1,#list do ---~ local ll = list[l] ---~ local p = ll.__p__ ---~ local d = p.dt ---~ for i=ll.ni-1,1,-1 do ---~ local di = d[i] ---~ if type(di) == "table" then ---~ c = c + 1 ---~ collected[c] = di ---~ break ---~ end ---~ end ---~ end ---~ return collected - return { } -end - -apply_axis['following-sibling'] = function(list) - local collected, c = { }, 0 - for l=1,#list do - local ll = list[l] - local p = ll.__p__ - local d = p.dt - for i=ll.ni+1,#d do - local di = d[i] - if type(di) == "table" then - c = c + 1 - collected[c] = di - end - end - end - return collected -end - -apply_axis['preceding-sibling'] = function(list) - local collected, c = { }, 0 - for l=1,#list do - local ll = list[l] - local p = ll.__p__ - local d = p.dt - for i=1,ll.ni-1 do - local di = d[i] - if type(di) == "table" then - c = c + 1 - collected[c] = di - end - end - end - return collected -end - -apply_axis['reverse-sibling'] = function(list) -- reverse preceding - local collected, c = { }, 0 - for l=1,#list do - local ll = list[l] - local p = ll.__p__ - local d = p.dt - for i=ll.ni-1,1,-1 do - local di = d[i] - if type(di) == "table" then - c = c + 1 - collected[c] = di - end - end - end - return collected -end - -apply_axis['auto-descendant-or-self'] = apply_axis['descendant-or-self'] -apply_axis['auto-descendant'] = apply_axis['descendant'] -apply_axis['auto-child'] = apply_axis['child'] -apply_axis['auto-self'] = apply_axis['self'] -apply_axis['initial-child'] = apply_axis['child'] - -local function apply_nodes(list,directive,nodes) - -- todo: nodes[1] etc ... negated node name in set ... when needed - -- ... currently ignored - local maxn = #nodes - if maxn == 3 then --optimized loop - local nns, ntg = nodes[2], nodes[3] - if not nns and not ntg then -- wildcard - if directive then - return list - else - return { } - end - else - local collected, c, m, p = { }, 0, 0, nil - if not nns then -- only check tag - for l=1,#list do - local ll = list[l] - local ltg = ll.tg - if ltg then - if directive then - if ntg == ltg then - local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end - c = c + 1 - collected[c], ll.mi = ll, m - end - elseif ntg ~= ltg then - local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end - c = c + 1 - collected[c], ll.mi = ll, m - end - end - end - elseif not ntg then -- only check namespace - for l=1,#list do - local ll = list[l] - local lns = ll.rn or ll.ns - if lns then - if directive then - if lns == nns then - local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end - c = c + 1 - collected[c], ll.mi = ll, m - end - elseif lns ~= nns then - local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end - c = c + 1 - collected[c], ll.mi = ll, m - end - end - end - else -- check both - for l=1,#list do - local ll = list[l] - local ltg = ll.tg - if ltg then - local lns = ll.rn or ll.ns - local ok = ltg == ntg and lns == nns - if directive then - if ok then - local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end - c = c + 1 - collected[c], ll.mi = ll, m - end - elseif not ok then - local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end - c = c + 1 - collected[c], ll.mi = ll, m - end - end - end - end - return collected - end - else - local collected, c, m, p = { }, 0, 0, nil - for l=1,#list do - local ll = list[l] - local ltg = ll.tg - if ltg then - local lns = ll.rn or ll.ns - local ok = false - for n=1,maxn,3 do - local nns, ntg = nodes[n+1], nodes[n+2] - ok = (not ntg or ltg == ntg) and (not nns or lns == nns) - if ok then - break - end - end - if directive then - if ok then - local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end - c = c + 1 - collected[c], ll.mi = ll, m - end - elseif not ok then - local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end - c = c + 1 - collected[c], ll.mi = ll, m - end - end - end - return collected - end -end - -local quit_expression = false - -local function apply_expression(list,expression,order) - local collected, c = { }, 0 - quit_expression = false - for l=1,#list do - local ll = list[l] - if expression(list,ll,l,order) then -- nasty, order alleen valid als n=1 - c = c + 1 - collected[c] = ll - end - if quit_expression then - break - end - end - return collected -end - -local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb - -local spaces = S(" \n\r\t\f")^0 -local lp_space = S(" \n\r\t\f") -local lp_any = P(1) -local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==") -local lp_doequal = P("=") / "==" -local lp_or = P("|") / " or " -local lp_and = P("&") / " and " - -local lp_builtin = P ( - P("text") / "(ll.dt[1] or '')" + -- fragile - P("content") / "ll.dt" + - -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" + - P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" + - P("tag") / "ll.tg" + - P("position") / "l" + -- is element in finalizer - P("firstindex") / "1" + - P("lastindex") / "(#ll.__p__.dt or 1)" + - P("firstelement") / "1" + - P("lastelement") / "(ll.__p__.en or 1)" + - P("first") / "1" + - P("last") / "#list" + - P("rootposition") / "order" + - P("order") / "order" + - P("element") / "(ll.ei or 1)" + - P("index") / "(ll.ni or 1)" + - P("match") / "(ll.mi or 1)" + - -- P("namespace") / "ll.ns" + - P("ns") / "ll.ns" - ) * ((spaces * P("(") * spaces * P(")"))/"") - --- for the moment we keep namespaces with attributes - -local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])") - --- lp_fastpos_p = (P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end --- lp_fastpos_n = (P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end - -lp_fastpos_p = P("+")^0 * R("09")^1 * P(-1) / "l==%0" -lp_fastpos_n = P("-") * R("09")^1 * P(-1) / "(%0<0 and (#list+%0==l))" - -local lp_fastpos = lp_fastpos_n + lp_fastpos_p - -local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false") - --- local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling --- return t .. "(" --- end - --- local lp_lua_function = (R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / "%0(" -local lp_lua_function = Cs((R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(")) / "%0" - -local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling - if expressions[t] then - return "expr." .. t .. "(" - else - return "expr.error(" - end -end - -local lparent = P("(") -local rparent = P(")") -local noparent = 1 - (lparent+rparent) -local nested = P{lparent * (noparent + V(1))^0 * rparent} -local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"} - -local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')") -local lp_number = S("+-") * R("09")^1 -local lp_string = Cc("'") * R("az","AZ","--","__")^1 * Cc("'") -local lp_content = (P("'") * (1-P("'"))^0 * P("'") + P('"') * (1-P('"'))^0 * P('"')) - -local cleaner - -local lp_special = (C(P("name")+P("text")+P("tag")+P("count")+P("child"))) * value / function(t,s) - if expressions[t] then - s = s and s ~= "" and lpegmatch(cleaner,s) - if s and s ~= "" then - return "expr." .. t .. "(ll," .. s ..")" - else - return "expr." .. t .. "(ll)" - end - else - return "expr.error(" .. t .. ")" - end -end - -local content = - lp_builtin + - lp_attribute + - lp_special + - lp_noequal + lp_doequal + - lp_or + lp_and + - lp_reserved + - lp_lua_function + lp_function + - lp_content + -- too fragile - lp_child + - lp_any - -local converter = Cs ( - lp_fastpos + (P { lparent * (V(1))^0 * rparent + content } )^0 -) - -cleaner = Cs ( ( - -- lp_fastpos + - lp_reserved + - lp_number + - lp_string + -1 )^1 ) - -local template_e = [[ - local expr = xml.expressions - return function(list,ll,l,order) - return %s - end -]] - -local template_f_y = [[ - local finalizer = xml.finalizers['%s']['%s'] - return function(collection) - return finalizer(collection,%s) - end -]] - -local template_f_n = [[ - return xml.finalizers['%s']['%s'] -]] - --- - -local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] } -local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] } -local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] } -local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] } -local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] } -local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] } -local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] } -local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] } -local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] } -local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] } -local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] } -local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] } -local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] } -local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] } -local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] } - -local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] } -local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] } -local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] } -local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] } - -local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] } - -local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } } - -local skip = { } - -local function errorrunner_e(str,cnv) - if not skip[str] then - report_lpath("error in expression: %s => %s",str,cnv) - skip[str] = cnv or str - end - return false -end - -local function errorrunner_f(str,arg) - report_lpath("error in finalizer: %s(%s)",str,arg or "") - return false -end - -local function register_nodes(nodetest,nodes) - return { kind = "nodes", nodetest = nodetest, nodes = nodes } -end - -local function register_expression(expression) - local converted = lpegmatch(converter,expression) - local runner = load(format(template_e,converted)) - runner = (runner and runner()) or function() errorrunner_e(expression,converted) end - return { kind = "expression", expression = expression, converted = converted, evaluator = runner } -end - -local function register_finalizer(protocol,name,arguments) - local runner - if arguments and arguments ~= "" then - runner = load(format(template_f_y,protocol or xml.defaultprotocol,name,arguments)) - else - runner = load(format(template_f_n,protocol or xml.defaultprotocol,name)) - end - runner = (runner and runner()) or function() errorrunner_f(name,arguments) end - return { kind = "finalizer", name = name, arguments = arguments, finalizer = runner } -end - -local expression = P { "ex", - ex = "[" * C((V("sq") + V("dq") + (1 - S("[]")) + V("ex"))^0) * "]", - sq = "'" * (1 - S("'"))^0 * "'", - dq = '"' * (1 - S('"'))^0 * '"', -} - -local arguments = P { "ar", - ar = "(" * Cs((V("sq") + V("dq") + V("nq") + P(1-P(")")))^0) * ")", - nq = ((1 - S("),'\""))^1) / function(s) return format("%q",s) end, - sq = P("'") * (1 - P("'"))^0 * P("'"), - dq = P('"') * (1 - P('"'))^0 * P('"'), -} - --- todo: better arg parser - -local function register_error(str) - return { kind = "error", error = format("unparsed: %s",str) } -end - --- there is a difference in * and /*/ and so we need to catch a few special cases - -local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed -local special_2 = P("/") * Cc(register_auto_self) -local special_3 = P("") * Cc(register_auto_self) - -local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1) -local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1) - -local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside - - patterns = spaces * V("protocol") * spaces * ( - ( V("special") * spaces * P(-1) ) + - ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 ) - ), - - protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"), - - -- the / is needed for // as descendant or self is somewhat special - -- step = (V("shortcuts") + V("axis") * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0, - step = ((V("shortcuts") + P("/") + V("axis")) * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0, - - axis = V("descendant") + V("child") + V("parent") + V("self") + V("root") + V("ancestor") + - V("descendant_or_self") + V("following_sibling") + V("following") + - V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") + - #(1-P(-1)) * Cc(register_auto_child), - - special = special_1 + special_2 + special_3, - - initial = (P("/") * spaces * Cc(register_initial_child))^-1, - - error = (P(1)^1) / register_error, - - shortcuts_a = V("s_descendant_or_self") + V("s_descendant") + V("s_child") + V("s_parent") + V("s_self") + V("s_root") + V("s_ancestor"), - - shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0, - - s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus - s_descendant = P("**") * Cc(register_descendant), - s_child = P("*") * no_nextcolon * Cc(register_child ), - s_parent = P("..") * Cc(register_parent ), - s_self = P("." ) * Cc(register_self ), - s_root = P("^^") * Cc(register_root ), - s_ancestor = P("^") * Cc(register_ancestor ), - - descendant = P("descendant::") * Cc(register_descendant ), - child = P("child::") * Cc(register_child ), - parent = P("parent::") * Cc(register_parent ), - self = P("self::") * Cc(register_self ), - root = P('root::') * Cc(register_root ), - ancestor = P('ancestor::') * Cc(register_ancestor ), - descendant_or_self = P('descendant-or-self::') * Cc(register_descendant_or_self ), - ancestor_or_self = P('ancestor-or-self::') * Cc(register_ancestor_or_self ), - -- attribute = P('attribute::') * Cc(register_attribute ), - -- namespace = P('namespace::') * Cc(register_namespace ), - following = P('following::') * Cc(register_following ), - following_sibling = P('following-sibling::') * Cc(register_following_sibling ), - preceding = P('preceding::') * Cc(register_preceding ), - preceding_sibling = P('preceding-sibling::') * Cc(register_preceding_sibling ), - reverse_sibling = P('reverse-sibling::') * Cc(register_reverse_sibling ), - - nodes = (V("nodefunction") * spaces * P("(") * V("nodeset") * P(")") + V("nodetest") * V("nodeset")) / register_nodes, - - expressions = expression / register_expression, - - letters = R("az")^1, - name = (1-S("/[]()|:*!"))^1, -- make inline - negate = P("!") * Cc(false), - - nodefunction = V("negate") + P("not") * Cc(false) + Cc(true), - nodetest = V("negate") + Cc(true), - nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))), - wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent, - nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces, - - finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer, - -} - -xmlpatterns.pathparser = pathparser - -local cache = { } - -local function nodesettostring(set,nodetest) - local t = { } - for i=1,#set,3 do - local directive, ns, tg = set[i], set[i+1], set[i+2] - if not ns or ns == "" then ns = "*" end - if not tg or tg == "" then tg = "*" end - tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg) - t[i] = (directive and tg) or format("not(%s)",tg) - end - if nodetest == false then - return format("not(%s)",concat(t,"|")) - else - return concat(t,"|") - end -end - -local function tagstostring(list) - if #list == 0 then - return "no elements" - else - local t = { } - for i=1, #list do - local li = list[i] - local ns, tg = li.ns, li.tg - if not ns or ns == "" then ns = "*" end - if not tg or tg == "" then tg = "*" end - t[i] = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg) - end - return concat(t," ") - end -end - -xml.nodesettostring = nodesettostring - -local lpath -- we have a harmless kind of circular reference - -local lshowoptions = { functions = false } - -local function lshow(parsed) - if type(parsed) == "string" then - parsed = lpath(parsed) - end - report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern, - table.serialize(parsed,false,lshowoptions)) -end - -xml.lshow = lshow - -local function add_comment(p,str) - local pc = p.comment - if not pc then - p.comment = { str } - else - pc[#pc+1] = str - end -end - -lpath = function (pattern) -- the gain of caching is rather minimal - lpathcalls = lpathcalls + 1 - if type(pattern) == "table" then - return pattern - else - local parsed = cache[pattern] - if parsed then - lpathcached = lpathcached + 1 - else - parsed = lpegmatch(pathparser,pattern) - if parsed then - parsed.pattern = pattern - local np = #parsed - if np == 0 then - parsed = { pattern = pattern, register_self, state = "parsing error" } - report_lpath("parsing error in pattern: %s",pattern) - lshow(parsed) - else - -- we could have done this with a more complex parser but this - -- is cleaner - local pi = parsed[1] - if pi.axis == "auto-child" then - if false then - add_comment(parsed, "auto-child replaced by auto-descendant-or-self") - parsed[1] = register_auto_descendant_or_self - else - add_comment(parsed, "auto-child replaced by auto-descendant") - parsed[1] = register_auto_descendant - end - elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then - add_comment(parsed, "initial-child removed") -- we could also make it a auto-self - remove(parsed,1) - end - local np = #parsed -- can have changed - if np > 1 then - local pnp = parsed[np] - if pnp.kind == "nodes" and pnp.nodetest == true then - local nodes = pnp.nodes - if nodes[1] == true and nodes[2] == false and nodes[3] == false then - add_comment(parsed, "redundant final wildcard filter removed") - remove(parsed,np) - end - end - end - end - else - parsed = { pattern = pattern } - end - cache[pattern] = parsed - if trace_lparse and not trace_lprofile then - lshow(parsed) - end - end - return parsed - end -end - -xml.lpath = lpath - --- we can move all calls inline and then merge the trace back --- technically we can combine axis and the next nodes which is --- what we did before but this a bit cleaner (but slower too) --- but interesting is that it's not that much faster when we --- go inline --- --- beware: we need to return a collection even when we filter --- else the (simple) cache gets messed up - --- caching found lookups saves not that much (max .1 sec on a 8 sec run) --- and it also messes up finalizers - --- watch out: when there is a finalizer, it's always called as there --- can be cases that a finalizer returns (or does) something in case --- there is no match; an example of this is count() - -local profiled = { } xml.profiled = profiled - -local function profiled_apply(list,parsed,nofparsed,order) - local p = profiled[parsed.pattern] - if p then - p.tested = p.tested + 1 - else - p = { tested = 1, matched = 0, finalized = 0 } - profiled[parsed.pattern] = p - end - local collected = list - for i=1,nofparsed do - local pi = parsed[i] - local kind = pi.kind - if kind == "axis" then - collected = apply_axis[pi.axis](collected) - elseif kind == "nodes" then - collected = apply_nodes(collected,pi.nodetest,pi.nodes) - elseif kind == "expression" then - collected = apply_expression(collected,pi.evaluator,order) - elseif kind == "finalizer" then - collected = pi.finalizer(collected) -- no check on # here - p.matched = p.matched + 1 - p.finalized = p.finalized + 1 - return collected - end - if not collected or #collected == 0 then - local pn = i < nofparsed and parsed[nofparsed] - if pn and pn.kind == "finalizer" then - collected = pn.finalizer(collected) - p.finalized = p.finalized + 1 - return collected - end - return nil - end - end - if collected then - p.matched = p.matched + 1 - end - return collected -end - -local function traced_apply(list,parsed,nofparsed,order) - if trace_lparse then - lshow(parsed) - end - report_lpath("collecting: %s",parsed.pattern) - report_lpath("root tags : %s",tagstostring(list)) - report_lpath("order : %s",order or "unset") - local collected = list - for i=1,nofparsed do - local pi = parsed[i] - local kind = pi.kind - if kind == "axis" then - collected = apply_axis[pi.axis](collected) - report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis) - elseif kind == "nodes" then - collected = apply_nodes(collected,pi.nodetest,pi.nodes) - report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest)) - elseif kind == "expression" then - collected = apply_expression(collected,pi.evaluator,order) - report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted) - elseif kind == "finalizer" then - collected = pi.finalizer(collected) - report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "") - return collected - end - if not collected or #collected == 0 then - local pn = i < nofparsed and parsed[nofparsed] - if pn and pn.kind == "finalizer" then - collected = pn.finalizer(collected) - report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "") - return collected - end - return nil - end - end - return collected -end - -local function normal_apply(list,parsed,nofparsed,order) - local collected = list - for i=1,nofparsed do - local pi = parsed[i] - local kind = pi.kind - if kind == "axis" then - local axis = pi.axis - if axis ~= "self" then - collected = apply_axis[axis](collected) - end - elseif kind == "nodes" then - collected = apply_nodes(collected,pi.nodetest,pi.nodes) - elseif kind == "expression" then - collected = apply_expression(collected,pi.evaluator,order) - elseif kind == "finalizer" then - return pi.finalizer(collected) - end - if not collected or #collected == 0 then - local pf = i < nofparsed and parsed[nofparsed].finalizer - if pf then - return pf(collected) -- can be anything - end - return nil - end - end - return collected -end - ---~ local function applylpath(list,pattern) ---~ -- we avoid an extra call ---~ local parsed = cache[pattern] ---~ if parsed then ---~ lpathcalls = lpathcalls + 1 ---~ lpathcached = lpathcached + 1 ---~ elseif type(pattern) == "table" then ---~ lpathcalls = lpathcalls + 1 ---~ parsed = pattern ---~ else ---~ parsed = lpath(pattern) or pattern ---~ end ---~ if not parsed then ---~ return ---~ end ---~ local nofparsed = #parsed ---~ if nofparsed == 0 then ---~ return -- something is wrong ---~ end ---~ local one = list[1] -- we could have a third argument: isroot and list or list[1] or whatever we like ... todo ---~ if not one then ---~ return -- something is wrong ---~ elseif not trace_lpath then ---~ return normal_apply(list,parsed,nofparsed,one.mi) ---~ elseif trace_lprofile then ---~ return profiled_apply(list,parsed,nofparsed,one.mi) ---~ else ---~ return traced_apply(list,parsed,nofparsed,one.mi) ---~ end ---~ end - -local function applylpath(list,pattern) - if not list then - return - end - local parsed = cache[pattern] - if parsed then - lpathcalls = lpathcalls + 1 - lpathcached = lpathcached + 1 - elseif type(pattern) == "table" then - lpathcalls = lpathcalls + 1 - parsed = pattern - else - parsed = lpath(pattern) or pattern - end - if not parsed then - return - end - local nofparsed = #parsed - if nofparsed == 0 then - return -- something is wrong - end - if not trace_lpath then - return normal_apply ({ list },parsed,nofparsed,list.mi) - elseif trace_lprofile then - return profiled_apply({ list },parsed,nofparsed,list.mi) - else - return traced_apply ({ list },parsed,nofparsed,list.mi) - end -end - -xml.applylpath = applylpath -- takes a table as first argment, which is what xml.filter will do - ---[[ldx-- -

This is the main filter function. It returns whatever is asked for.

---ldx]]-- - -function xml.filter(root,pattern) -- no longer funny attribute handling here - return applylpath(root,pattern) -end - --- internal (parsed) - -expressions.child = function(e,pattern) - return applylpath(e,pattern) -- todo: cache -end - -expressions.count = function(e,pattern) -- what if pattern == empty or nil - local collected = applylpath(e,pattern) -- todo: cache - return pattern and (collected and #collected) or 0 -end - --- external - --- expressions.oneof = function(s,...) --- local t = {...} --- for i=1,#t do --- if s == t[i] then --- return true --- end --- end --- return false --- end - -expressions.oneof = function(s,...) - for i=1,select("#",...) do - if s == select(i,...) then - return true - end - end - return false -end - -expressions.error = function(str) - xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?"))) - return false -end - -expressions.undefined = function(s) - return s == nil -end - -expressions.quit = function(s) - if s or s == nil then - quit_expression = true - end - return true -end - -expressions.print = function(...) - print(...) - return true -end - -expressions.contains = find -expressions.find = find -expressions.upper = upper -expressions.lower = lower -expressions.number = tonumber -expressions.boolean = toboolean - -function expressions.contains(str,pattern) - local t = type(str) - if t == "string" then - if find(str,pattern) then - return true - end - elseif t == "table" then - for i=1,#str do - local d = str[i] - if type(d) == "string" and find(d,pattern) then - return true - end - end - end - return false -end - --- user interface - -local function traverse(root,pattern,handle) - -- report_lpath("use 'xml.selection' instead for pattern: %s",pattern) - local collected = applylpath(root,pattern) - if collected then - for c=1,#collected do - local e = collected[c] - local r = e.__p__ - handle(r,r.dt,e.ni) - end - end -end - -local function selection(root,pattern,handle) - local collected = applylpath(root,pattern) - if collected then - if handle then - for c=1,#collected do - handle(collected[c]) - end - else - return collected - end - end -end - -xml.traverse = traverse -- old method, r, d, k -xml.selection = selection -- new method, simple handle - ---~ function xml.cachedpatterns() ---~ return cache ---~ end - --- generic function finalizer (independant namespace) - -local function dofunction(collected,fnc,...) - if collected then - local f = functions[fnc] - if f then - for c=1,#collected do - f(collected[c],...) - end - else - report_lpath("unknown function %a",fnc) - end - end -end - -finalizers.xml["function"] = dofunction -finalizers.tex["function"] = dofunction - --- functions - -expressions.text = function(e,n) - local rdt = e.__p__.dt - return rdt and rdt[n] or "" -end - -expressions.name = function(e,n) -- ns + tg - local found = false - n = tonumber(n) or 0 - if n == 0 then - found = type(e) == "table" and e - elseif n < 0 then - local d, k = e.__p__.dt, e.ni - for i=k-1,1,-1 do - local di = d[i] - if type(di) == "table" then - if n == -1 then - found = di - break - else - n = n + 1 - end - end - end - else - local d, k = e.__p__.dt, e.ni - for i=k+1,#d,1 do - local di = d[i] - if type(di) == "table" then - if n == 1 then - found = di - break - else - n = n - 1 - end - end - end - end - if found then - local ns, tg = found.rn or found.ns or "", found.tg - if ns ~= "" then - return ns .. ":" .. tg - else - return tg - end - else - return "" - end -end - -expressions.tag = function(e,n) -- only tg - if not e then - return "" - else - local found = false - n = tonumber(n) or 0 - if n == 0 then - found = (type(e) == "table") and e -- seems to fail - elseif n < 0 then - local d, k = e.__p__.dt, e.ni - for i=k-1,1,-1 do - local di = d[i] - if type(di) == "table" then - if n == -1 then - found = di - break - else - n = n + 1 - end - end - end - else - local d, k = e.__p__.dt, e.ni - for i=k+1,#d,1 do - local di = d[i] - if type(di) == "table" then - if n == 1 then - found = di - break - else - n = n - 1 - end - end - end - end - return (found and found.tg) or "" - end -end - ---[[ldx-- -

Often using an iterators looks nicer in the code than passing handler -functions. The book describes how to use coroutines for that -purpose (). This permits -code like:

- - -for r, d, k in xml.elements(xml.load('text.xml'),"title") do - print(d[k]) -- old method -end -for e in xml.collected(xml.load('text.xml'),"title") do - print(e) -- new one -end - ---ldx]]-- - --- local wrap, yield = coroutine.wrap, coroutine.yield --- local dummy = function() end --- --- function xml.elements(root,pattern,reverse) -- r, d, k --- local collected = applylpath(root,pattern) --- if collected then --- if reverse then --- return wrap(function() for c=#collected,1,-1 do --- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni) --- end end) --- else --- return wrap(function() for c=1,#collected do --- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni) --- end end) --- end --- end --- return wrap(dummy) --- end --- --- function xml.collected(root,pattern,reverse) -- e --- local collected = applylpath(root,pattern) --- if collected then --- if reverse then --- return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end) --- else --- return wrap(function() for c=1,#collected do yield(collected[c]) end end) --- end --- end --- return wrap(dummy) --- end - --- faster: - -local dummy = function() end - -function xml.elements(root,pattern,reverse) -- r, d, k - local collected = applylpath(root,pattern) - if not collected then - return dummy - elseif reverse then - local c = #collected + 1 - return function() - if c > 1 then - c = c - 1 - local e = collected[c] - local r = e.__p__ - return r, r.dt, e.ni - end - end - else - local n, c = #collected, 0 - return function() - if c < n then - c = c + 1 - local e = collected[c] - local r = e.__p__ - return r, r.dt, e.ni - end - end - end -end - -function xml.collected(root,pattern,reverse) -- e - local collected = applylpath(root,pattern) - if not collected then - return dummy - elseif reverse then - local c = #collected + 1 - return function() - if c > 1 then - c = c - 1 - return collected[c] - end - end - else - local n, c = #collected, 0 - return function() - if c < n then - c = c + 1 - return collected[c] - end - end - end -end - --- handy - -function xml.inspect(collection,pattern) - pattern = pattern or "." - for e in xml.collected(collection,pattern or ".") do - report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e)) - end -end - --- texy (see xfdf): - -local function split(e) - local dt = e.dt - if dt then - for i=1,#dt do - local dti = dt[i] - if type(dti) == "string" then - dti = gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1") - dti = gsub(dti,"[\n\r]+","\n\n") - dt[i] = dti - else - split(dti) - end - end - end - return e -end - -function xml.finalizers.paragraphs(c) - for i=1,#c do - split(c[i]) - end - return c -end +if not modules then modules = { } end modules ['lxml-lpt'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- e.ni is only valid after a filter run +-- todo: B/C/[get first match] + +local concat, remove, insert = table.concat, table.remove, table.insert +local type, next, tonumber, tostring, setmetatable, load, select = type, next, tonumber, tostring, setmetatable, load, select +local format, upper, lower, gmatch, gsub, find, rep = string.format, string.upper, string.lower, string.gmatch, string.gsub, string.find, string.rep +local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns + +local setmetatableindex = table.setmetatableindex +local formatters = string.formatters -- no need (yet) as paths are cached anyway + +-- beware, this is not xpath ... e.g. position is different (currently) and +-- we have reverse-sibling as reversed preceding sibling + +--[[ldx-- +

This module can be used stand alone but also inside in +which case it hooks into the tracker code. Therefore we provide a few +functions that set the tracers. Here we overload a previously defined +function.

+

If I can get in the mood I will make a variant that is XSLT compliant +but I wonder if it makes sense.

+--ldx]]-- + +--[[ldx-- +

Expecially the lpath code is experimental, we will support some of xpath, but +only things that make sense for us; as compensation it is possible to hook in your +own functions. Apart from preprocessing content for we also need +this module for process management, like handling and +files.

+ + +a/b/c /*/c +a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n) +a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n) + +--ldx]]-- + +local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end +local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end +local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end + +local report_lpath = logs.reporter("xml","lpath") + +--[[ldx-- +

We've now arrived at an interesting part: accessing the tree using a subset +of and since we're not compatible we call it . We +will explain more about its usage in other documents.

+--ldx]]-- + +local xml = xml + +local lpathcalls = 0 function xml.lpathcalls () return lpathcalls end +local lpathcached = 0 function xml.lpathcached() return lpathcached end + +xml.functions = xml.functions or { } -- internal +local functions = xml.functions + +xml.expressions = xml.expressions or { } -- in expressions +local expressions = xml.expressions + +xml.finalizers = xml.finalizers or { } -- fast do-with ... (with return value other than collection) +local finalizers = xml.finalizers + +xml.specialhandler = xml.specialhandler or { } +local specialhandler = xml.specialhandler + +lpegpatterns.xml = lpegpatterns.xml or { } +local xmlpatterns = lpegpatterns.xml + +finalizers.xml = finalizers.xml or { } +finalizers.tex = finalizers.tex or { } + +local function fallback (t, name) + local fn = finalizers[name] + if fn then + t[name] = fn + else + report_lpath("unknown sub finalizer %a",name) + fn = function() end + end + return fn +end + +setmetatableindex(finalizers.xml, fallback) +setmetatableindex(finalizers.tex, fallback) + +xml.defaultprotocol = "xml" + +-- as xsl does not follow xpath completely here we will also +-- be more liberal especially with regards to the use of | and +-- the rootpath: +-- +-- test : all 'test' under current +-- /test : 'test' relative to current +-- a|b|c : set of names +-- (a|b|c) : idem +-- ! : not +-- +-- after all, we're not doing transformations but filtering. in +-- addition we provide filter functions (last bit) +-- +-- todo: optimizer +-- +-- .. : parent +-- * : all kids +-- / : anchor here +-- // : /**/ +-- ** : all in between +-- +-- so far we had (more practical as we don't transform) +-- +-- {/test} : kids 'test' under current node +-- {test} : any kid with tag 'test' +-- {//test} : same as above + +-- evaluator (needs to be redone, for the moment copied) + +-- todo: apply_axis(list,notable) and collection vs single + +local apply_axis = { } + +apply_axis['root'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + local rt = ll + while ll do + ll = ll.__p__ + if ll then + rt = ll + end + end + collected[l] = rt + end + return collected +end + +apply_axis['self'] = function(list) +--~ local collected = { } +--~ for l=1,#list do +--~ collected[l] = list[l] +--~ end +--~ return collected + return list +end + +apply_axis['child'] = function(list) + local collected, c = { }, 0 + for l=1,#list do + local ll = list[l] + local dt = ll.dt + if dt then -- weird that this is needed + local en = 0 + for k=1,#dt do + local dk = dt[k] + if dk.tg then + c = c + 1 + collected[c] = dk + dk.ni = k -- refresh + en = en + 1 + dk.ei = en + end + end + ll.en = en + end + end + return collected +end + +local function collect(list,collected,c) + local dt = list.dt + if dt then + local en = 0 + for k=1,#dt do + local dk = dt[k] + if dk.tg then + c = c + 1 + collected[c] = dk + dk.ni = k -- refresh + en = en + 1 + dk.ei = en + c = collect(dk,collected,c) + end + end + list.en = en + end + return c +end + +apply_axis['descendant'] = function(list) + local collected, c = { }, 0 + for l=1,#list do + c = collect(list[l],collected,c) + end + return collected +end + +local function collect(list,collected,c) + local dt = list.dt + if dt then + local en = 0 + for k=1,#dt do + local dk = dt[k] + if dk.tg then + c = c + 1 + collected[c] = dk + dk.ni = k -- refresh + en = en + 1 + dk.ei = en + c = collect(dk,collected,c) + end + end + list.en = en + end + return c +end +apply_axis['descendant-or-self'] = function(list) + local collected, c = { }, 0 + for l=1,#list do + local ll = list[l] + if ll.special ~= true then -- catch double root + c = c + 1 + collected[c] = ll + end + c = collect(ll,collected,c) + end + return collected +end + +apply_axis['ancestor'] = function(list) + local collected, c = { }, 0 + for l=1,#list do + local ll = list[l] + while ll do + ll = ll.__p__ + if ll then + c = c + 1 + collected[c] = ll + end + end + end + return collected +end + +apply_axis['ancestor-or-self'] = function(list) + local collected, c = { }, 0 + for l=1,#list do + local ll = list[l] + c = c + 1 + collected[c] = ll + while ll do + ll = ll.__p__ + if ll then + c = c + 1 + collected[c] = ll + end + end + end + return collected +end + +apply_axis['parent'] = function(list) + local collected, c = { }, 0 + for l=1,#list do + local pl = list[l].__p__ + if pl then + c = c + 1 + collected[c] = pl + end + end + return collected +end + +apply_axis['attribute'] = function(list) + return { } +end + +apply_axis['namespace'] = function(list) + return { } +end + +apply_axis['following'] = function(list) -- incomplete +--~ local collected, c = { }, 0 +--~ for l=1,#list do +--~ local ll = list[l] +--~ local p = ll.__p__ +--~ local d = p.dt +--~ for i=ll.ni+1,#d do +--~ local di = d[i] +--~ if type(di) == "table" then +--~ c = c + 1 +--~ collected[c] = di +--~ break +--~ end +--~ end +--~ end +--~ return collected + return { } +end + +apply_axis['preceding'] = function(list) -- incomplete +--~ local collected, c = { }, 0 +--~ for l=1,#list do +--~ local ll = list[l] +--~ local p = ll.__p__ +--~ local d = p.dt +--~ for i=ll.ni-1,1,-1 do +--~ local di = d[i] +--~ if type(di) == "table" then +--~ c = c + 1 +--~ collected[c] = di +--~ break +--~ end +--~ end +--~ end +--~ return collected + return { } +end + +apply_axis['following-sibling'] = function(list) + local collected, c = { }, 0 + for l=1,#list do + local ll = list[l] + local p = ll.__p__ + local d = p.dt + for i=ll.ni+1,#d do + local di = d[i] + if type(di) == "table" then + c = c + 1 + collected[c] = di + end + end + end + return collected +end + +apply_axis['preceding-sibling'] = function(list) + local collected, c = { }, 0 + for l=1,#list do + local ll = list[l] + local p = ll.__p__ + local d = p.dt + for i=1,ll.ni-1 do + local di = d[i] + if type(di) == "table" then + c = c + 1 + collected[c] = di + end + end + end + return collected +end + +apply_axis['reverse-sibling'] = function(list) -- reverse preceding + local collected, c = { }, 0 + for l=1,#list do + local ll = list[l] + local p = ll.__p__ + local d = p.dt + for i=ll.ni-1,1,-1 do + local di = d[i] + if type(di) == "table" then + c = c + 1 + collected[c] = di + end + end + end + return collected +end + +apply_axis['auto-descendant-or-self'] = apply_axis['descendant-or-self'] +apply_axis['auto-descendant'] = apply_axis['descendant'] +apply_axis['auto-child'] = apply_axis['child'] +apply_axis['auto-self'] = apply_axis['self'] +apply_axis['initial-child'] = apply_axis['child'] + +local function apply_nodes(list,directive,nodes) + -- todo: nodes[1] etc ... negated node name in set ... when needed + -- ... currently ignored + local maxn = #nodes + if maxn == 3 then --optimized loop + local nns, ntg = nodes[2], nodes[3] + if not nns and not ntg then -- wildcard + if directive then + return list + else + return { } + end + else + local collected, c, m, p = { }, 0, 0, nil + if not nns then -- only check tag + for l=1,#list do + local ll = list[l] + local ltg = ll.tg + if ltg then + if directive then + if ntg == ltg then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + c = c + 1 + collected[c], ll.mi = ll, m + end + elseif ntg ~= ltg then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + c = c + 1 + collected[c], ll.mi = ll, m + end + end + end + elseif not ntg then -- only check namespace + for l=1,#list do + local ll = list[l] + local lns = ll.rn or ll.ns + if lns then + if directive then + if lns == nns then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + c = c + 1 + collected[c], ll.mi = ll, m + end + elseif lns ~= nns then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + c = c + 1 + collected[c], ll.mi = ll, m + end + end + end + else -- check both + for l=1,#list do + local ll = list[l] + local ltg = ll.tg + if ltg then + local lns = ll.rn or ll.ns + local ok = ltg == ntg and lns == nns + if directive then + if ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + c = c + 1 + collected[c], ll.mi = ll, m + end + elseif not ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + c = c + 1 + collected[c], ll.mi = ll, m + end + end + end + end + return collected + end + else + local collected, c, m, p = { }, 0, 0, nil + for l=1,#list do + local ll = list[l] + local ltg = ll.tg + if ltg then + local lns = ll.rn or ll.ns + local ok = false + for n=1,maxn,3 do + local nns, ntg = nodes[n+1], nodes[n+2] + ok = (not ntg or ltg == ntg) and (not nns or lns == nns) + if ok then + break + end + end + if directive then + if ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + c = c + 1 + collected[c], ll.mi = ll, m + end + elseif not ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + c = c + 1 + collected[c], ll.mi = ll, m + end + end + end + return collected + end +end + +local quit_expression = false + +local function apply_expression(list,expression,order) + local collected, c = { }, 0 + quit_expression = false + for l=1,#list do + local ll = list[l] + if expression(list,ll,l,order) then -- nasty, order alleen valid als n=1 + c = c + 1 + collected[c] = ll + end + if quit_expression then + break + end + end + return collected +end + +local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb + +local spaces = S(" \n\r\t\f")^0 +local lp_space = S(" \n\r\t\f") +local lp_any = P(1) +local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==") +local lp_doequal = P("=") / "==" +local lp_or = P("|") / " or " +local lp_and = P("&") / " and " + +local lp_builtin = P ( + P("text") / "(ll.dt[1] or '')" + -- fragile + P("content") / "ll.dt" + + -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" + + P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" + + P("tag") / "ll.tg" + + P("position") / "l" + -- is element in finalizer + P("firstindex") / "1" + + P("lastindex") / "(#ll.__p__.dt or 1)" + + P("firstelement") / "1" + + P("lastelement") / "(ll.__p__.en or 1)" + + P("first") / "1" + + P("last") / "#list" + + P("rootposition") / "order" + + P("order") / "order" + + P("element") / "(ll.ei or 1)" + + P("index") / "(ll.ni or 1)" + + P("match") / "(ll.mi or 1)" + + -- P("namespace") / "ll.ns" + + P("ns") / "ll.ns" + ) * ((spaces * P("(") * spaces * P(")"))/"") + +-- for the moment we keep namespaces with attributes + +local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])") + +-- lp_fastpos_p = (P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end +-- lp_fastpos_n = (P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end + +lp_fastpos_p = P("+")^0 * R("09")^1 * P(-1) / "l==%0" +lp_fastpos_n = P("-") * R("09")^1 * P(-1) / "(%0<0 and (#list+%0==l))" + +local lp_fastpos = lp_fastpos_n + lp_fastpos_p + +local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false") + +-- local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling +-- return t .. "(" +-- end + +-- local lp_lua_function = (R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / "%0(" +local lp_lua_function = Cs((R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(")) / "%0" + +local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling + if expressions[t] then + return "expr." .. t .. "(" + else + return "expr.error(" + end +end + +local lparent = P("(") +local rparent = P(")") +local noparent = 1 - (lparent+rparent) +local nested = P{lparent * (noparent + V(1))^0 * rparent} +local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"} + +local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')") +local lp_number = S("+-") * R("09")^1 +local lp_string = Cc("'") * R("az","AZ","--","__")^1 * Cc("'") +local lp_content = (P("'") * (1-P("'"))^0 * P("'") + P('"') * (1-P('"'))^0 * P('"')) + +local cleaner + +local lp_special = (C(P("name")+P("text")+P("tag")+P("count")+P("child"))) * value / function(t,s) + if expressions[t] then + s = s and s ~= "" and lpegmatch(cleaner,s) + if s and s ~= "" then + return "expr." .. t .. "(ll," .. s ..")" + else + return "expr." .. t .. "(ll)" + end + else + return "expr.error(" .. t .. ")" + end +end + +local content = + lp_builtin + + lp_attribute + + lp_special + + lp_noequal + lp_doequal + + lp_or + lp_and + + lp_reserved + + lp_lua_function + lp_function + + lp_content + -- too fragile + lp_child + + lp_any + +local converter = Cs ( + lp_fastpos + (P { lparent * (V(1))^0 * rparent + content } )^0 +) + +cleaner = Cs ( ( + -- lp_fastpos + + lp_reserved + + lp_number + + lp_string + +1 )^1 ) + +local template_e = [[ + local expr = xml.expressions + return function(list,ll,l,order) + return %s + end +]] + +local template_f_y = [[ + local finalizer = xml.finalizers['%s']['%s'] + return function(collection) + return finalizer(collection,%s) + end +]] + +local template_f_n = [[ + return xml.finalizers['%s']['%s'] +]] + +-- + +local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] } +local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] } +local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] } +local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] } +local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] } +local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] } +local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] } +local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] } +local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] } +local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] } +local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] } +local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] } +local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] } +local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] } +local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] } + +local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] } +local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] } +local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] } +local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] } + +local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] } + +local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } } + +local skip = { } + +local function errorrunner_e(str,cnv) + if not skip[str] then + report_lpath("error in expression: %s => %s",str,cnv) + skip[str] = cnv or str + end + return false +end + +local function errorrunner_f(str,arg) + report_lpath("error in finalizer: %s(%s)",str,arg or "") + return false +end + +local function register_nodes(nodetest,nodes) + return { kind = "nodes", nodetest = nodetest, nodes = nodes } +end + +local function register_expression(expression) + local converted = lpegmatch(converter,expression) + local runner = load(format(template_e,converted)) + runner = (runner and runner()) or function() errorrunner_e(expression,converted) end + return { kind = "expression", expression = expression, converted = converted, evaluator = runner } +end + +local function register_finalizer(protocol,name,arguments) + local runner + if arguments and arguments ~= "" then + runner = load(format(template_f_y,protocol or xml.defaultprotocol,name,arguments)) + else + runner = load(format(template_f_n,protocol or xml.defaultprotocol,name)) + end + runner = (runner and runner()) or function() errorrunner_f(name,arguments) end + return { kind = "finalizer", name = name, arguments = arguments, finalizer = runner } +end + +local expression = P { "ex", + ex = "[" * C((V("sq") + V("dq") + (1 - S("[]")) + V("ex"))^0) * "]", + sq = "'" * (1 - S("'"))^0 * "'", + dq = '"' * (1 - S('"'))^0 * '"', +} + +local arguments = P { "ar", + ar = "(" * Cs((V("sq") + V("dq") + V("nq") + P(1-P(")")))^0) * ")", + nq = ((1 - S("),'\""))^1) / function(s) return format("%q",s) end, + sq = P("'") * (1 - P("'"))^0 * P("'"), + dq = P('"') * (1 - P('"'))^0 * P('"'), +} + +-- todo: better arg parser + +local function register_error(str) + return { kind = "error", error = format("unparsed: %s",str) } +end + +-- there is a difference in * and /*/ and so we need to catch a few special cases + +local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed +local special_2 = P("/") * Cc(register_auto_self) +local special_3 = P("") * Cc(register_auto_self) + +local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1) +local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1) + +local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside + + patterns = spaces * V("protocol") * spaces * ( + ( V("special") * spaces * P(-1) ) + + ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 ) + ), + + protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"), + + -- the / is needed for // as descendant or self is somewhat special + -- step = (V("shortcuts") + V("axis") * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0, + step = ((V("shortcuts") + P("/") + V("axis")) * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0, + + axis = V("descendant") + V("child") + V("parent") + V("self") + V("root") + V("ancestor") + + V("descendant_or_self") + V("following_sibling") + V("following") + + V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") + + #(1-P(-1)) * Cc(register_auto_child), + + special = special_1 + special_2 + special_3, + + initial = (P("/") * spaces * Cc(register_initial_child))^-1, + + error = (P(1)^1) / register_error, + + shortcuts_a = V("s_descendant_or_self") + V("s_descendant") + V("s_child") + V("s_parent") + V("s_self") + V("s_root") + V("s_ancestor"), + + shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0, + + s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus + s_descendant = P("**") * Cc(register_descendant), + s_child = P("*") * no_nextcolon * Cc(register_child ), + s_parent = P("..") * Cc(register_parent ), + s_self = P("." ) * Cc(register_self ), + s_root = P("^^") * Cc(register_root ), + s_ancestor = P("^") * Cc(register_ancestor ), + + descendant = P("descendant::") * Cc(register_descendant ), + child = P("child::") * Cc(register_child ), + parent = P("parent::") * Cc(register_parent ), + self = P("self::") * Cc(register_self ), + root = P('root::') * Cc(register_root ), + ancestor = P('ancestor::') * Cc(register_ancestor ), + descendant_or_self = P('descendant-or-self::') * Cc(register_descendant_or_self ), + ancestor_or_self = P('ancestor-or-self::') * Cc(register_ancestor_or_self ), + -- attribute = P('attribute::') * Cc(register_attribute ), + -- namespace = P('namespace::') * Cc(register_namespace ), + following = P('following::') * Cc(register_following ), + following_sibling = P('following-sibling::') * Cc(register_following_sibling ), + preceding = P('preceding::') * Cc(register_preceding ), + preceding_sibling = P('preceding-sibling::') * Cc(register_preceding_sibling ), + reverse_sibling = P('reverse-sibling::') * Cc(register_reverse_sibling ), + + nodes = (V("nodefunction") * spaces * P("(") * V("nodeset") * P(")") + V("nodetest") * V("nodeset")) / register_nodes, + + expressions = expression / register_expression, + + letters = R("az")^1, + name = (1-S("/[]()|:*!"))^1, -- make inline + negate = P("!") * Cc(false), + + nodefunction = V("negate") + P("not") * Cc(false) + Cc(true), + nodetest = V("negate") + Cc(true), + nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))), + wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent, + nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces, + + finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer, + +} + +xmlpatterns.pathparser = pathparser + +local cache = { } + +local function nodesettostring(set,nodetest) + local t = { } + for i=1,#set,3 do + local directive, ns, tg = set[i], set[i+1], set[i+2] + if not ns or ns == "" then ns = "*" end + if not tg or tg == "" then tg = "*" end + tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg) + t[i] = (directive and tg) or format("not(%s)",tg) + end + if nodetest == false then + return format("not(%s)",concat(t,"|")) + else + return concat(t,"|") + end +end + +local function tagstostring(list) + if #list == 0 then + return "no elements" + else + local t = { } + for i=1, #list do + local li = list[i] + local ns, tg = li.ns, li.tg + if not ns or ns == "" then ns = "*" end + if not tg or tg == "" then tg = "*" end + t[i] = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg) + end + return concat(t," ") + end +end + +xml.nodesettostring = nodesettostring + +local lpath -- we have a harmless kind of circular reference + +local lshowoptions = { functions = false } + +local function lshow(parsed) + if type(parsed) == "string" then + parsed = lpath(parsed) + end + report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern, + table.serialize(parsed,false,lshowoptions)) +end + +xml.lshow = lshow + +local function add_comment(p,str) + local pc = p.comment + if not pc then + p.comment = { str } + else + pc[#pc+1] = str + end +end + +lpath = function (pattern) -- the gain of caching is rather minimal + lpathcalls = lpathcalls + 1 + if type(pattern) == "table" then + return pattern + else + local parsed = cache[pattern] + if parsed then + lpathcached = lpathcached + 1 + else + parsed = lpegmatch(pathparser,pattern) + if parsed then + parsed.pattern = pattern + local np = #parsed + if np == 0 then + parsed = { pattern = pattern, register_self, state = "parsing error" } + report_lpath("parsing error in pattern: %s",pattern) + lshow(parsed) + else + -- we could have done this with a more complex parser but this + -- is cleaner + local pi = parsed[1] + if pi.axis == "auto-child" then + if false then + add_comment(parsed, "auto-child replaced by auto-descendant-or-self") + parsed[1] = register_auto_descendant_or_self + else + add_comment(parsed, "auto-child replaced by auto-descendant") + parsed[1] = register_auto_descendant + end + elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then + add_comment(parsed, "initial-child removed") -- we could also make it a auto-self + remove(parsed,1) + end + local np = #parsed -- can have changed + if np > 1 then + local pnp = parsed[np] + if pnp.kind == "nodes" and pnp.nodetest == true then + local nodes = pnp.nodes + if nodes[1] == true and nodes[2] == false and nodes[3] == false then + add_comment(parsed, "redundant final wildcard filter removed") + remove(parsed,np) + end + end + end + end + else + parsed = { pattern = pattern } + end + cache[pattern] = parsed + if trace_lparse and not trace_lprofile then + lshow(parsed) + end + end + return parsed + end +end + +xml.lpath = lpath + +-- we can move all calls inline and then merge the trace back +-- technically we can combine axis and the next nodes which is +-- what we did before but this a bit cleaner (but slower too) +-- but interesting is that it's not that much faster when we +-- go inline +-- +-- beware: we need to return a collection even when we filter +-- else the (simple) cache gets messed up + +-- caching found lookups saves not that much (max .1 sec on a 8 sec run) +-- and it also messes up finalizers + +-- watch out: when there is a finalizer, it's always called as there +-- can be cases that a finalizer returns (or does) something in case +-- there is no match; an example of this is count() + +local profiled = { } xml.profiled = profiled + +local function profiled_apply(list,parsed,nofparsed,order) + local p = profiled[parsed.pattern] + if p then + p.tested = p.tested + 1 + else + p = { tested = 1, matched = 0, finalized = 0 } + profiled[parsed.pattern] = p + end + local collected = list + for i=1,nofparsed do + local pi = parsed[i] + local kind = pi.kind + if kind == "axis" then + collected = apply_axis[pi.axis](collected) + elseif kind == "nodes" then + collected = apply_nodes(collected,pi.nodetest,pi.nodes) + elseif kind == "expression" then + collected = apply_expression(collected,pi.evaluator,order) + elseif kind == "finalizer" then + collected = pi.finalizer(collected) -- no check on # here + p.matched = p.matched + 1 + p.finalized = p.finalized + 1 + return collected + end + if not collected or #collected == 0 then + local pn = i < nofparsed and parsed[nofparsed] + if pn and pn.kind == "finalizer" then + collected = pn.finalizer(collected) + p.finalized = p.finalized + 1 + return collected + end + return nil + end + end + if collected then + p.matched = p.matched + 1 + end + return collected +end + +local function traced_apply(list,parsed,nofparsed,order) + if trace_lparse then + lshow(parsed) + end + report_lpath("collecting: %s",parsed.pattern) + report_lpath("root tags : %s",tagstostring(list)) + report_lpath("order : %s",order or "unset") + local collected = list + for i=1,nofparsed do + local pi = parsed[i] + local kind = pi.kind + if kind == "axis" then + collected = apply_axis[pi.axis](collected) + report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis) + elseif kind == "nodes" then + collected = apply_nodes(collected,pi.nodetest,pi.nodes) + report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest)) + elseif kind == "expression" then + collected = apply_expression(collected,pi.evaluator,order) + report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted) + elseif kind == "finalizer" then + collected = pi.finalizer(collected) + report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "") + return collected + end + if not collected or #collected == 0 then + local pn = i < nofparsed and parsed[nofparsed] + if pn and pn.kind == "finalizer" then + collected = pn.finalizer(collected) + report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "") + return collected + end + return nil + end + end + return collected +end + +local function normal_apply(list,parsed,nofparsed,order) + local collected = list + for i=1,nofparsed do + local pi = parsed[i] + local kind = pi.kind + if kind == "axis" then + local axis = pi.axis + if axis ~= "self" then + collected = apply_axis[axis](collected) + end + elseif kind == "nodes" then + collected = apply_nodes(collected,pi.nodetest,pi.nodes) + elseif kind == "expression" then + collected = apply_expression(collected,pi.evaluator,order) + elseif kind == "finalizer" then + return pi.finalizer(collected) + end + if not collected or #collected == 0 then + local pf = i < nofparsed and parsed[nofparsed].finalizer + if pf then + return pf(collected) -- can be anything + end + return nil + end + end + return collected +end + +--~ local function applylpath(list,pattern) +--~ -- we avoid an extra call +--~ local parsed = cache[pattern] +--~ if parsed then +--~ lpathcalls = lpathcalls + 1 +--~ lpathcached = lpathcached + 1 +--~ elseif type(pattern) == "table" then +--~ lpathcalls = lpathcalls + 1 +--~ parsed = pattern +--~ else +--~ parsed = lpath(pattern) or pattern +--~ end +--~ if not parsed then +--~ return +--~ end +--~ local nofparsed = #parsed +--~ if nofparsed == 0 then +--~ return -- something is wrong +--~ end +--~ local one = list[1] -- we could have a third argument: isroot and list or list[1] or whatever we like ... todo +--~ if not one then +--~ return -- something is wrong +--~ elseif not trace_lpath then +--~ return normal_apply(list,parsed,nofparsed,one.mi) +--~ elseif trace_lprofile then +--~ return profiled_apply(list,parsed,nofparsed,one.mi) +--~ else +--~ return traced_apply(list,parsed,nofparsed,one.mi) +--~ end +--~ end + +local function applylpath(list,pattern) + if not list then + return + end + local parsed = cache[pattern] + if parsed then + lpathcalls = lpathcalls + 1 + lpathcached = lpathcached + 1 + elseif type(pattern) == "table" then + lpathcalls = lpathcalls + 1 + parsed = pattern + else + parsed = lpath(pattern) or pattern + end + if not parsed then + return + end + local nofparsed = #parsed + if nofparsed == 0 then + return -- something is wrong + end + if not trace_lpath then + return normal_apply ({ list },parsed,nofparsed,list.mi) + elseif trace_lprofile then + return profiled_apply({ list },parsed,nofparsed,list.mi) + else + return traced_apply ({ list },parsed,nofparsed,list.mi) + end +end + +xml.applylpath = applylpath -- takes a table as first argment, which is what xml.filter will do + +--[[ldx-- +

This is the main filter function. It returns whatever is asked for.

+--ldx]]-- + +function xml.filter(root,pattern) -- no longer funny attribute handling here + return applylpath(root,pattern) +end + +-- internal (parsed) + +expressions.child = function(e,pattern) + return applylpath(e,pattern) -- todo: cache +end + +expressions.count = function(e,pattern) -- what if pattern == empty or nil + local collected = applylpath(e,pattern) -- todo: cache + return pattern and (collected and #collected) or 0 +end + +-- external + +-- expressions.oneof = function(s,...) +-- local t = {...} +-- for i=1,#t do +-- if s == t[i] then +-- return true +-- end +-- end +-- return false +-- end + +expressions.oneof = function(s,...) + for i=1,select("#",...) do + if s == select(i,...) then + return true + end + end + return false +end + +expressions.error = function(str) + xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?"))) + return false +end + +expressions.undefined = function(s) + return s == nil +end + +expressions.quit = function(s) + if s or s == nil then + quit_expression = true + end + return true +end + +expressions.print = function(...) + print(...) + return true +end + +expressions.contains = find +expressions.find = find +expressions.upper = upper +expressions.lower = lower +expressions.number = tonumber +expressions.boolean = toboolean + +function expressions.contains(str,pattern) + local t = type(str) + if t == "string" then + if find(str,pattern) then + return true + end + elseif t == "table" then + for i=1,#str do + local d = str[i] + if type(d) == "string" and find(d,pattern) then + return true + end + end + end + return false +end + +-- user interface + +local function traverse(root,pattern,handle) + -- report_lpath("use 'xml.selection' instead for pattern: %s",pattern) + local collected = applylpath(root,pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local r = e.__p__ + handle(r,r.dt,e.ni) + end + end +end + +local function selection(root,pattern,handle) + local collected = applylpath(root,pattern) + if collected then + if handle then + for c=1,#collected do + handle(collected[c]) + end + else + return collected + end + end +end + +xml.traverse = traverse -- old method, r, d, k +xml.selection = selection -- new method, simple handle + +--~ function xml.cachedpatterns() +--~ return cache +--~ end + +-- generic function finalizer (independant namespace) + +local function dofunction(collected,fnc,...) + if collected then + local f = functions[fnc] + if f then + for c=1,#collected do + f(collected[c],...) + end + else + report_lpath("unknown function %a",fnc) + end + end +end + +finalizers.xml["function"] = dofunction +finalizers.tex["function"] = dofunction + +-- functions + +expressions.text = function(e,n) + local rdt = e.__p__.dt + return rdt and rdt[n] or "" +end + +expressions.name = function(e,n) -- ns + tg + local found = false + n = tonumber(n) or 0 + if n == 0 then + found = type(e) == "table" and e + elseif n < 0 then + local d, k = e.__p__.dt, e.ni + for i=k-1,1,-1 do + local di = d[i] + if type(di) == "table" then + if n == -1 then + found = di + break + else + n = n + 1 + end + end + end + else + local d, k = e.__p__.dt, e.ni + for i=k+1,#d,1 do + local di = d[i] + if type(di) == "table" then + if n == 1 then + found = di + break + else + n = n - 1 + end + end + end + end + if found then + local ns, tg = found.rn or found.ns or "", found.tg + if ns ~= "" then + return ns .. ":" .. tg + else + return tg + end + else + return "" + end +end + +expressions.tag = function(e,n) -- only tg + if not e then + return "" + else + local found = false + n = tonumber(n) or 0 + if n == 0 then + found = (type(e) == "table") and e -- seems to fail + elseif n < 0 then + local d, k = e.__p__.dt, e.ni + for i=k-1,1,-1 do + local di = d[i] + if type(di) == "table" then + if n == -1 then + found = di + break + else + n = n + 1 + end + end + end + else + local d, k = e.__p__.dt, e.ni + for i=k+1,#d,1 do + local di = d[i] + if type(di) == "table" then + if n == 1 then + found = di + break + else + n = n - 1 + end + end + end + end + return (found and found.tg) or "" + end +end + +--[[ldx-- +

Often using an iterators looks nicer in the code than passing handler +functions. The book describes how to use coroutines for that +purpose (). This permits +code like:

+ + +for r, d, k in xml.elements(xml.load('text.xml'),"title") do + print(d[k]) -- old method +end +for e in xml.collected(xml.load('text.xml'),"title") do + print(e) -- new one +end + +--ldx]]-- + +-- local wrap, yield = coroutine.wrap, coroutine.yield +-- local dummy = function() end +-- +-- function xml.elements(root,pattern,reverse) -- r, d, k +-- local collected = applylpath(root,pattern) +-- if collected then +-- if reverse then +-- return wrap(function() for c=#collected,1,-1 do +-- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni) +-- end end) +-- else +-- return wrap(function() for c=1,#collected do +-- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni) +-- end end) +-- end +-- end +-- return wrap(dummy) +-- end +-- +-- function xml.collected(root,pattern,reverse) -- e +-- local collected = applylpath(root,pattern) +-- if collected then +-- if reverse then +-- return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end) +-- else +-- return wrap(function() for c=1,#collected do yield(collected[c]) end end) +-- end +-- end +-- return wrap(dummy) +-- end + +-- faster: + +local dummy = function() end + +function xml.elements(root,pattern,reverse) -- r, d, k + local collected = applylpath(root,pattern) + if not collected then + return dummy + elseif reverse then + local c = #collected + 1 + return function() + if c > 1 then + c = c - 1 + local e = collected[c] + local r = e.__p__ + return r, r.dt, e.ni + end + end + else + local n, c = #collected, 0 + return function() + if c < n then + c = c + 1 + local e = collected[c] + local r = e.__p__ + return r, r.dt, e.ni + end + end + end +end + +function xml.collected(root,pattern,reverse) -- e + local collected = applylpath(root,pattern) + if not collected then + return dummy + elseif reverse then + local c = #collected + 1 + return function() + if c > 1 then + c = c - 1 + return collected[c] + end + end + else + local n, c = #collected, 0 + return function() + if c < n then + c = c + 1 + return collected[c] + end + end + end +end + +-- handy + +function xml.inspect(collection,pattern) + pattern = pattern or "." + for e in xml.collected(collection,pattern or ".") do + report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e)) + end +end + +-- texy (see xfdf): + +local function split(e) + local dt = e.dt + if dt then + for i=1,#dt do + local dti = dt[i] + if type(dti) == "string" then + dti = gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1") + dti = gsub(dti,"[\n\r]+","\n\n") + dt[i] = dti + else + split(dti) + end + end + end + return e +end + +function xml.finalizers.paragraphs(c) + for i=1,#c do + split(c[i]) + end + return c +end diff --git a/tex/context/base/lxml-mis.lua b/tex/context/base/lxml-mis.lua index 94a26b974..6afc45002 100644 --- a/tex/context/base/lxml-mis.lua +++ b/tex/context/base/lxml-mis.lua @@ -1,103 +1,103 @@ -if not modules then modules = { } end modules ['lxml-mis'] = { - version = 1.001, - comment = "this module is the basis for the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local xml, lpeg, string = xml, lpeg, string - -local concat = table.concat -local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring -local format, gsub, match = string.format, string.gsub, string.match -local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns -local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs - -lpegpatterns.xml = lpegpatterns.xml or { } -local xmlpatterns = lpegpatterns.xml - ---[[ldx-- -

The following helper functions best belong to the lxml-ini -module. Some are here because we need then in the mk -document and other manuals, others came up when playing with -this module. Since this module is also used in we've -put them here instead of loading mode modules there then needed.

---ldx]]-- - -local function xmlgsub(t,old,new) -- will be replaced - local dt = t.dt - if dt then - for k=1,#dt do - local v = dt[k] - if type(v) == "string" then - dt[k] = gsub(v,old,new) - else - xmlgsub(v,old,new) - end - end - end -end - ---~ xml.gsub = xmlgsub - -function xml.stripleadingspaces(dk,d,k) -- cosmetic, for manual - if d and k then - local dkm = d[k-1] - if dkm and type(dkm) == "string" then - local s = match(dkm,"\n(%s+)") - xmlgsub(dk,"\n"..rep(" ",#s),"\n") - end - end -end - ---~ xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' } ---~ xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end - ---~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end ---~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end ---~ function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>" - --- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg --- --- 1021:0335:0287:0247 - --- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ" --- --- 1559:0257:0288:0190 (last one suggested by roberto) - --- escaped = Cs((S("<&>") / xml.escapes + 1)^0) --- escaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0) -local normal = (1 - S("<&>"))^0 -local special = P("<")/"<" + P(">")/">" + P("&")/"&" -local escaped = Cs(normal * (special * normal)^0) - --- 100 * 1000 * "oeps< oeps> oeps&" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto) - -local normal = (1 - S"&")^0 -local special = P("<")/"<" + P(">")/">" + P("&")/"&" -local unescaped = Cs(normal * (special * normal)^0) - --- 100 * 5000 * "oeps oeps oeps " : gsub:lpeg == 623:501 msec (short tags, less difference) - -local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0) - -xmlpatterns.escaped = escaped -xmlpatterns.unescaped = unescaped -xmlpatterns.cleansed = cleansed - -function xml.escaped (str) return lpegmatch(escaped,str) end -function xml.unescaped(str) return lpegmatch(unescaped,str) end -function xml.cleansed (str) return lpegmatch(cleansed,str) end - --- this might move - -function xml.fillin(root,pattern,str,check) - local e = xml.first(root,pattern) - if e then - local n = #e.dt - if not check or n == 0 or (n == 1 and e.dt[1] == "") then - e.dt = { str } - end - end -end +if not modules then modules = { } end modules ['lxml-mis'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local xml, lpeg, string = xml, lpeg, string + +local concat = table.concat +local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring +local format, gsub, match = string.format, string.gsub, string.match +local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns +local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs + +lpegpatterns.xml = lpegpatterns.xml or { } +local xmlpatterns = lpegpatterns.xml + +--[[ldx-- +

The following helper functions best belong to the lxml-ini +module. Some are here because we need then in the mk +document and other manuals, others came up when playing with +this module. Since this module is also used in we've +put them here instead of loading mode modules there then needed.

+--ldx]]-- + +local function xmlgsub(t,old,new) -- will be replaced + local dt = t.dt + if dt then + for k=1,#dt do + local v = dt[k] + if type(v) == "string" then + dt[k] = gsub(v,old,new) + else + xmlgsub(v,old,new) + end + end + end +end + +--~ xml.gsub = xmlgsub + +function xml.stripleadingspaces(dk,d,k) -- cosmetic, for manual + if d and k then + local dkm = d[k-1] + if dkm and type(dkm) == "string" then + local s = match(dkm,"\n(%s+)") + xmlgsub(dk,"\n"..rep(" ",#s),"\n") + end + end +end + +--~ xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' } +--~ xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end + +--~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end +--~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end +--~ function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>" + +-- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg +-- +-- 1021:0335:0287:0247 + +-- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ" +-- +-- 1559:0257:0288:0190 (last one suggested by roberto) + +-- escaped = Cs((S("<&>") / xml.escapes + 1)^0) +-- escaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0) +local normal = (1 - S("<&>"))^0 +local special = P("<")/"<" + P(">")/">" + P("&")/"&" +local escaped = Cs(normal * (special * normal)^0) + +-- 100 * 1000 * "oeps< oeps> oeps&" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto) + +local normal = (1 - S"&")^0 +local special = P("<")/"<" + P(">")/">" + P("&")/"&" +local unescaped = Cs(normal * (special * normal)^0) + +-- 100 * 5000 * "oeps oeps oeps " : gsub:lpeg == 623:501 msec (short tags, less difference) + +local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0) + +xmlpatterns.escaped = escaped +xmlpatterns.unescaped = unescaped +xmlpatterns.cleansed = cleansed + +function xml.escaped (str) return lpegmatch(escaped,str) end +function xml.unescaped(str) return lpegmatch(unescaped,str) end +function xml.cleansed (str) return lpegmatch(cleansed,str) end + +-- this might move + +function xml.fillin(root,pattern,str,check) + local e = xml.first(root,pattern) + if e then + local n = #e.dt + if not check or n == 0 or (n == 1 and e.dt[1] == "") then + e.dt = { str } + end + end +end diff --git a/tex/context/base/lxml-sor.lua b/tex/context/base/lxml-sor.lua index 951017bcd..a31d0ebb8 100644 --- a/tex/context/base/lxml-sor.lua +++ b/tex/context/base/lxml-sor.lua @@ -1,159 +1,159 @@ -if not modules then modules = { } end modules ['lxml-sor'] = { - version = 1.001, - comment = "companion to lxml-sor.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format, concat, rep = string.format, table.concat, string.rep -local lpegmatch = lpeg.match - -local xml, lxml = xml, lxml - -lxml.sorters = lxml.sorters or { } - -if not lxml.splitid then - local splitter = lpeg.C((1-lpeg.P(":"))^1) * lpeg.P("::") * lpeg.C(lpeg.P(1)^1) - function lxml.splitid(id) - local d, i = lpegmatch(splitter,id) - if d then - return d, i - else - return "", id - end - end -end - -local lists = { } - -function lxml.sorters.reset(name) - lists[name] = { - sorted = false, - entries = { }, - reverse = { }, - results = { }, - } -end - -function lxml.sorters.add(name,n,key) - local list = lists[name] - if list.sorted then - -- reverse is messed up, we could regenerate it and go on - else - local entries = list and list.entries - if entries then - local reverse = list.reverse - local e = reverse[n] - if e then - local keys = entries[e][2] - keys[#keys+1] = key - else - entries[#entries+1] = { n, { key } } - reverse[n] = #entries - end - end - end -end - -function lxml.sorters.show(name) - local list = lists[name] - local entries = list and list.entries - local NC, NR, bold = context.NC, context.NR, context.bold -- somehow bold is not working - if entries then - local maxn = 1 - for i=1,#entries do - if #entries[i][2] > maxn then maxn = #entries[i][2] end - end - context.starttabulate { "|Tr|Tr|" .. rep("Tlp|",maxn) } - NC() bold("n") - NC() bold("id") - if maxn > 1 then - for i=1,maxn do - NC() bold("entry " .. i) - end - else - NC() bold("entry") - end - NC() NR() - context.HL() - for i=1,#entries do - local entry = entries[i] - local document, node = lxml.splitid(entry[1]) - NC() context(i) - NC() context(node) - local e = entry[2] - for i=1,#e do - NC() context.detokenize(e[i]) - end - NC() NR() - end - context.stoptabulate() - end -end - -lxml.sorters.compare = sorters.comparers.basic -- (a,b) - -function lxml.sorters.sort(name) - local list = lists[name] - local entries = list and list.entries - if entries then - -- filtering - local results = { } - list.results = results - for i=1,#entries do - local entry = entries[i] - results[i] = { - entry = entry[1], - key = concat(entry[2], " "), - } - end - -- preparation - local strip = sorters.strip - local splitter = sorters.splitters.utf - local firstofsplit = sorters.firstofsplit - for i=1, #results do - local r = results[i] - r.split = splitter(strip(r.key)) - end - -- sorting - sorters.sort(results,lxml.sorters.compare) - -- finalizing - list.nofsorted = #results - local split = { } - for k=1,#results do -- rather generic so maybe we need a function - local v = results[k] - local entry, tag = firstofsplit(v) - local s = split[entry] -- keeps track of change - if not s then - s = { tag = tag, data = { } } - split[entry] = s - end - s.data[#s.data+1] = v - end - list.results = split - -- done - list.sorted = true - end -end - -function lxml.sorters.flush(name,setup) - local list = lists[name] - local results = list and list.results - local xmlw = context.xmlw - if results and next(results) then - for key, result in next, results do - local tag, data = result.tag, result.data - for d=1,#data do - xmlw(setup,data[d].entry) - end - end - else - local entries = list and list.entries - if entries then - for i=1,#entries do - xmlw(setup,entries[i][1]) - end - end - end -end +if not modules then modules = { } end modules ['lxml-sor'] = { + version = 1.001, + comment = "companion to lxml-sor.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, concat, rep = string.format, table.concat, string.rep +local lpegmatch = lpeg.match + +local xml, lxml = xml, lxml + +lxml.sorters = lxml.sorters or { } + +if not lxml.splitid then + local splitter = lpeg.C((1-lpeg.P(":"))^1) * lpeg.P("::") * lpeg.C(lpeg.P(1)^1) + function lxml.splitid(id) + local d, i = lpegmatch(splitter,id) + if d then + return d, i + else + return "", id + end + end +end + +local lists = { } + +function lxml.sorters.reset(name) + lists[name] = { + sorted = false, + entries = { }, + reverse = { }, + results = { }, + } +end + +function lxml.sorters.add(name,n,key) + local list = lists[name] + if list.sorted then + -- reverse is messed up, we could regenerate it and go on + else + local entries = list and list.entries + if entries then + local reverse = list.reverse + local e = reverse[n] + if e then + local keys = entries[e][2] + keys[#keys+1] = key + else + entries[#entries+1] = { n, { key } } + reverse[n] = #entries + end + end + end +end + +function lxml.sorters.show(name) + local list = lists[name] + local entries = list and list.entries + local NC, NR, bold = context.NC, context.NR, context.bold -- somehow bold is not working + if entries then + local maxn = 1 + for i=1,#entries do + if #entries[i][2] > maxn then maxn = #entries[i][2] end + end + context.starttabulate { "|Tr|Tr|" .. rep("Tlp|",maxn) } + NC() bold("n") + NC() bold("id") + if maxn > 1 then + for i=1,maxn do + NC() bold("entry " .. i) + end + else + NC() bold("entry") + end + NC() NR() + context.HL() + for i=1,#entries do + local entry = entries[i] + local document, node = lxml.splitid(entry[1]) + NC() context(i) + NC() context(node) + local e = entry[2] + for i=1,#e do + NC() context.detokenize(e[i]) + end + NC() NR() + end + context.stoptabulate() + end +end + +lxml.sorters.compare = sorters.comparers.basic -- (a,b) + +function lxml.sorters.sort(name) + local list = lists[name] + local entries = list and list.entries + if entries then + -- filtering + local results = { } + list.results = results + for i=1,#entries do + local entry = entries[i] + results[i] = { + entry = entry[1], + key = concat(entry[2], " "), + } + end + -- preparation + local strip = sorters.strip + local splitter = sorters.splitters.utf + local firstofsplit = sorters.firstofsplit + for i=1, #results do + local r = results[i] + r.split = splitter(strip(r.key)) + end + -- sorting + sorters.sort(results,lxml.sorters.compare) + -- finalizing + list.nofsorted = #results + local split = { } + for k=1,#results do -- rather generic so maybe we need a function + local v = results[k] + local entry, tag = firstofsplit(v) + local s = split[entry] -- keeps track of change + if not s then + s = { tag = tag, data = { } } + split[entry] = s + end + s.data[#s.data+1] = v + end + list.results = split + -- done + list.sorted = true + end +end + +function lxml.sorters.flush(name,setup) + local list = lists[name] + local results = list and list.results + local xmlw = context.xmlw + if results and next(results) then + for key, result in next, results do + local tag, data = result.tag, result.data + for d=1,#data do + xmlw(setup,data[d].entry) + end + end + else + local entries = list and list.entries + if entries then + for i=1,#entries do + xmlw(setup,entries[i][1]) + end + end + end +end diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua index 2bb5844fc..b6c2b1b13 100644 --- a/tex/context/base/lxml-tab.lua +++ b/tex/context/base/lxml-tab.lua @@ -1,1367 +1,1367 @@ -if not modules then modules = { } end modules ['lxml-tab'] = { - version = 1.001, - comment = "this module is the basis for the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this module needs a cleanup: check latest lpeg, passing args, (sub)grammar, etc etc --- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the --- trouble - --- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua) --- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit --- of work so we delay this till we cleanup - -local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end) - -local report_xml = logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end - ---[[ldx-- -

The parser used here is inspired by the variant discussed in the lua book, but -handles comment and processing instructions, has a different structure, provides -parent access; a first version used different trickery but was less optimized to we -went this route. First we had a find based parser, now we have an based one. -The find based parser can be found in l-xml-edu.lua along with other older code.

- -

Beware, the interface may change. For instance at, ns, tg, dt may get more -verbose names. Once the code is stable we will also remove some tracing and -optimize the code.

- -

I might even decide to reimplement the parser using the latest trickery -as the current variant was written when showed up and it's easier now to -build tables in one go.

---ldx]]-- - -xml = xml or { } -local xml = xml - ---~ local xml = xml - -local concat, remove, insert = table.concat, table.remove, table.insert -local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber -local lower, find, match, gsub = string.lower, string.find, string.match, string.gsub -local utfchar = utf.char -local lpegmatch = lpeg.match -local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs -local formatters = string.formatters - ---[[ldx-- -

First a hack to enable namespace resolving. A namespace is characterized by -a . The following function associates a namespace prefix with a -pattern. We use , which in this case is more than twice as fast as a -find based solution where we loop over an array of patterns. Less code and -much cleaner.

---ldx]]-- - -xml.xmlns = xml.xmlns or { } - -local check = P(false) -local parse = check - ---[[ldx-- -

The next function associates a namespace prefix with an . This -normally happens independent of parsing.

- - -xml.registerns("mml","mathml") - ---ldx]]-- - -function xml.registerns(namespace, pattern) -- pattern can be an lpeg - check = check + C(P(lower(pattern))) / namespace - parse = P { P(check) + 1 * V(1) } -end - ---[[ldx-- -

The next function also registers a namespace, but this time we map a -given namespace prefix onto a registered one, using the given -. This used for attributes like xmlns:m.

- - -xml.checkns("m","http://www.w3.org/mathml") - ---ldx]]-- - -function xml.checkns(namespace,url) - local ns = lpegmatch(parse,lower(url)) - if ns and namespace ~= ns then - xml.xmlns[namespace] = ns - end -end - ---[[ldx-- -

Next we provide a way to turn an into a registered -namespace. This used for the xmlns attribute.

- - -resolvedns = xml.resolvens("http://www.w3.org/mathml") - - -This returns mml. ---ldx]]-- - -function xml.resolvens(url) - return lpegmatch(parse,lower(url)) or "" -end - ---[[ldx-- -

A namespace in an element can be remapped onto the registered -one efficiently by using the xml.xmlns table.

---ldx]]-- - ---[[ldx-- -

This version uses . We follow the same approach as before, stack and top and -such. This version is about twice as fast which is mostly due to the fact that -we don't have to prepare the stream for cdata, doctype etc etc. This variant is -is dedicated to Luigi Scarso, who challenged me with 40 megabyte files that -took 12.5 seconds to load (1.5 for file io and the rest for tree building). With -the implementation we got that down to less 7.3 seconds. Loading the 14 - interface definition files (2.6 meg) went down from 1.05 seconds to 0.55.

- -

Next comes the parser. The rather messy doctype definition comes in many -disguises so it is no surprice that later on have to dedicate quite some - code to it.

- - - - - - - - - - -

The code may look a bit complex but this is mostly due to the fact that we -resolve namespaces and attach metatables. There is only one public function:

- - -local x = xml.convert(somestring) - - -

An optional second boolean argument tells this function not to create a root -element.

- -

Valid entities are:

- - - - - - ---ldx]]-- - --- not just one big nested table capture (lpeg overflow) - -local nsremap, resolvens = xml.xmlns, xml.resolvens - -local stack = { } -local top = { } -local dt = { } -local at = { } -local xmlns = { } -local errorstr = nil -local entities = { } -local strip = false -local cleanup = false -local utfize = false -local resolve_predefined = false -local unify_predefined = false - -local dcache = { } -local hcache = { } -local acache = { } - -local mt = { } - -local function initialize_mt(root) - mt = { __index = root } -- will be redefined later -end - -function xml.setproperty(root,k,v) - getmetatable(root).__index[k] = v -end - -function xml.checkerror(top,toclose) - return "" -- can be set -end - -local function add_attribute(namespace,tag,value) - if cleanup and #value > 0 then - value = cleanup(value) -- new - end - if tag == "xmlns" then - xmlns[#xmlns+1] = resolvens(value) - at[tag] = value - elseif namespace == "" then - at[tag] = value - elseif namespace == "xmlns" then - xml.checkns(tag,value) - at["xmlns:" .. tag] = value - else - -- for the moment this way: - at[namespace .. ":" .. tag] = value - end -end - -local function add_empty(spacing, namespace, tag) - if #spacing > 0 then - dt[#dt+1] = spacing - end - local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace - top = stack[#stack] - dt = top.dt - local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top } - dt[#dt+1] = t - setmetatable(t, mt) - if at.xmlns then - remove(xmlns) - end - at = { } -end - -local function add_begin(spacing, namespace, tag) - if #spacing > 0 then - dt[#dt+1] = spacing - end - local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace - top = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] } - setmetatable(top, mt) - dt = top.dt - stack[#stack+1] = top - at = { } -end - -local function add_end(spacing, namespace, tag) - if #spacing > 0 then - dt[#dt+1] = spacing - end - local toclose = remove(stack) - top = stack[#stack] - if #stack < 1 then - errorstr = formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "") - elseif toclose.tg ~= tag then -- no namespace check - errorstr = formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "") - end - dt = top.dt - dt[#dt+1] = toclose - -- dt[0] = top -- nasty circular reference when serializing table - if toclose.at.xmlns then - remove(xmlns) - end -end - -local function add_text(text) - if cleanup and #text > 0 then - dt[#dt+1] = cleanup(text) - else - dt[#dt+1] = text - end -end - -local function add_special(what, spacing, text) - if #spacing > 0 then - dt[#dt+1] = spacing - end - if strip and (what == "@cm@" or what == "@dt@") then - -- forget it - else - dt[#dt+1] = { special=true, ns="", tg=what, dt={ text } } - end -end - -local function set_message(txt) - errorstr = "garbage at the end of the file: " .. gsub(txt,"([ \n\r\t]*)","") -end - -local reported_attribute_errors = { } - -local function attribute_value_error(str) - if not reported_attribute_errors[str] then - report_xml("invalid attribute value %a",str) - reported_attribute_errors[str] = true - at._error_ = str - end - return str -end - -local function attribute_specification_error(str) - if not reported_attribute_errors[str] then - report_xml("invalid attribute specification %a",str) - reported_attribute_errors[str] = true - at._error_ = str - end - return str -end - -xml.placeholders = { - unknown_dec_entity = function(str) return str == "" and "&error;" or formatters["&%s;"](str) end, - unknown_hex_entity = function(str) return formatters["&#x%s;"](str) end, - unknown_any_entity = function(str) return formatters["&#x%s;"](str) end, -} - -local placeholders = xml.placeholders - -local function fromhex(s) - local n = tonumber(s,16) - if n then - return utfchar(n) - else - return formatters["h:%s"](s), true - end -end - -local function fromdec(s) - local n = tonumber(s) - if n then - return utfchar(n) - else - return formatters["d:%s"](s), true - end -end - --- one level expansion (simple case), no checking done - -local rest = (1-P(";"))^0 -local many = P(1)^0 - -local parsedentity = - P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) + - (P("#x")*(many/fromhex) + P("#")*(many/fromdec)) - --- parsing in the xml file - -local predefined_unified = { - [38] = "&", - [42] = """, - [47] = "'", - [74] = "<", - [76] = ">", -} - -local predefined_simplified = { - [38] = "&", amp = "&", - [42] = '"', quot = '"', - [47] = "'", apos = "'", - [74] = "<", lt = "<", - [76] = ">", gt = ">", -} - -local nofprivates = 0xF0000 -- shared but seldom used - -local privates_u = { -- unescaped - [ [[&]] ] = "&", - [ [["]] ] = """, - [ [[']] ] = "'", - [ [[<]] ] = "<", - [ [[>]] ] = ">", -} - -local privates_p = { -} - -local privates_n = { - -- keeps track of defined ones -} - -local escaped = utf.remapper(privates_u) - -local function unescaped(s) - local p = privates_n[s] - if not p then - nofprivates = nofprivates + 1 - p = utfchar(nofprivates) - privates_n[s] = p - s = "&" .. s .. ";" -- todo: use char-ent to map to hex - privates_u[p] = s - privates_p[p] = s - end - return p -end - -local unprivatized = utf.remapper(privates_p) - -xml.privatetoken = unescaped -xml.unprivatized = unprivatized -xml.privatecodes = privates_n - -local function handle_hex_entity(str) - local h = hcache[str] - if not h then - local n = tonumber(str,16) - h = unify_predefined and predefined_unified[n] - if h then - if trace_entities then - report_xml("utfize, converting hex entity &#x%s; into %a",str,h) - end - elseif utfize then - h = (n and utfchar(n)) or xml.unknown_hex_entity(str) or "" - if not n then - report_xml("utfize, ignoring hex entity &#x%s;",str) - elseif trace_entities then - report_xml("utfize, converting hex entity &#x%s; into %a",str,h) - end - else - if trace_entities then - report_xml("found entity &#x%s;",str) - end - h = "&#x" .. str .. ";" - end - hcache[str] = h - end - return h -end - -local function handle_dec_entity(str) - local d = dcache[str] - if not d then - local n = tonumber(str) - d = unify_predefined and predefined_unified[n] - if d then - if trace_entities then - report_xml("utfize, converting dec entity &#%s; into %a",str,d) - end - elseif utfize then - d = (n and utfchar(n)) or placeholders.unknown_dec_entity(str) or "" - if not n then - report_xml("utfize, ignoring dec entity &#%s;",str) - elseif trace_entities then - report_xml("utfize, converting dec entity &#%s; into %a",str,d) - end - else - if trace_entities then - report_xml("found entity &#%s;",str) - end - d = "&#" .. str .. ";" - end - dcache[str] = d - end - return d -end - -xml.parsedentitylpeg = parsedentity - -local function handle_any_entity(str) - if resolve then - local a = acache[str] -- per instance ! todo - if not a then - a = resolve_predefined and predefined_simplified[str] - if a then - if trace_entities then - report_xml("resolving entity &%s; to predefined %a",str,a) - end - else - if type(resolve) == "function" then - a = resolve(str) or entities[str] - else - a = entities[str] - end - if a then - if type(a) == "function" then - if trace_entities then - report_xml("expanding entity &%s; to function call",str) - end - a = a(str) or "" - end - a = lpegmatch(parsedentity,a) or a -- for nested - if trace_entities then - report_xml("resolving entity &%s; to internal %a",str,a) - end - else - local unknown_any_entity = placeholders.unknown_any_entity - if unknown_any_entity then - a = unknown_any_entity(str) or "" - end - if a then - if trace_entities then - report_xml("resolving entity &%s; to external %s",str,a) - end - else - if trace_entities then - report_xml("keeping entity &%s;",str) - end - if str == "" then - a = "&error;" - else - a = "&" .. str .. ";" - end - end - end - end - acache[str] = a - elseif trace_entities then - if not acache[str] then - report_xml("converting entity &%s; to %a",str,a) - acache[str] = a - end - end - return a - else - local a = acache[str] - if not a then - a = resolve_predefined and predefined_simplified[str] - if a then - -- one of the predefined - acache[str] = a - if trace_entities then - report_xml("entity &%s; becomes %a",str,a) - end - elseif str == "" then - if trace_entities then - report_xml("invalid entity &%s;",str) - end - a = "&error;" - acache[str] = a - else - if trace_entities then - report_xml("entity &%s; is made private",str) - end - -- a = "&" .. str .. ";" - a = unescaped(str) - acache[str] = a - end - end - return a - end -end - -local function handle_end_entity(chr) - report_xml("error in entity, %a found instead of %a",chr,";") -end - -local space = S(' \r\n\t') -local open = P('<') -local close = P('>') -local squote = S("'") -local dquote = S('"') -local equal = P('=') -local slash = P('/') -local colon = P(':') -local semicolon = P(';') -local ampersand = P('&') -local valid = R('az', 'AZ', '09') + S('_-.') -local name_yes = C(valid^1) * colon * C(valid^1) -local name_nop = C(P(true)) * C(valid^1) -local name = name_yes + name_nop -local utfbom = lpeg.patterns.utfbom -- no capture -local spacing = C(space^0) - ------ entitycontent = (1-open-semicolon)^0 -local anyentitycontent = (1-open-semicolon-space-close)^0 -local hexentitycontent = R("AF","af","09")^0 -local decentitycontent = R("09")^0 -local parsedentity = P("#")/"" * ( - P("x")/"" * (hexentitycontent/handle_hex_entity) + - (decentitycontent/handle_dec_entity) - ) + (anyentitycontent/handle_any_entity) -local entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity)) - -local text_unparsed = C((1-open)^1) -local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1) - -local somespace = space^1 -local optionalspace = space^0 - ------ value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -- ampersand and < also invalid in value -local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dquote * Cs((entity + (1 - dquote))^0) * dquote) -- ampersand and < also invalid in value - -local endofattributes = slash * close + close -- recovery of flacky html -local whatever = space * name * optionalspace * equal ------ wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error ------ wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error ------ wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error -local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error - -local attributevalue = value + wrongvalue - -local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute ------ attributes = (attribute)^0 - -local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0 - -local parsedtext = text_parsed / add_text -local unparsedtext = text_unparsed / add_text -local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example - -local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty -local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin -local endelement = (spacing * open * slash * name * optionalspace * close) / add_end - -local begincomment = open * P("!--") -local endcomment = P("--") * close -local begininstruction = open * P("?") -local endinstruction = P("?") * close -local begincdata = open * P("![CDATA[") -local endcdata = P("]]") * close - -local someinstruction = C((1 - endinstruction)^0) -local somecomment = C((1 - endcomment )^0) -local somecdata = C((1 - endcdata )^0) - -local function normalentity(k,v ) entities[k] = v end -local function systementity(k,v,n) entities[k] = v end -local function publicentity(k,v,n) entities[k] = v end - --- todo: separate dtd parser - -local begindoctype = open * P("!DOCTYPE") -local enddoctype = close -local beginset = P("[") -local endset = P("]") -local doctypename = C((1-somespace-close)^0) -local elementdoctype = optionalspace * P(" & - cleanup = settings.text_cleanup - entities = settings.entities or { } - -- - if utfize == nil then - settings.utfize_entities = true - utfize = true - end - if resolve_predefined == nil then - settings.resolve_predefined_entities = true - resolve_predefined = true - end - -- - stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil - acache, hcache, dcache = { }, { }, { } -- not stored - reported_attribute_errors = { } - if settings.parent_root then - mt = getmetatable(settings.parent_root) - else - initialize_mt(top) - end - stack[#stack+1] = top - top.dt = { } - dt = top.dt - if not data or data == "" then - errorstr = "empty xml file" - elseif utfize or resolve then - if lpegmatch(grammar_parsed_text,data) then - errorstr = "" - else - errorstr = "invalid xml file - parsed text" - end - elseif type(data) == "string" then - if lpegmatch(grammar_unparsed_text,data) then - errorstr = "" - else - errorstr = "invalid xml file - unparsed text" - end - else - errorstr = "invalid xml file - no text at all" - end - local result - if errorstr and errorstr ~= "" then - result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } } - setmetatable(stack, mt) - local errorhandler = settings.error_handler - if errorhandler == false then - -- no error message - else - errorhandler = errorhandler or xml.errorhandler - if errorhandler then - local currentresource = settings.currentresource - if currentresource and currentresource ~= "" then - xml.errorhandler(formatters["load error in [%s]: %s"](currentresource,errorstr)) - else - xml.errorhandler(formatters["load error: %s"](errorstr)) - end - end - end - else - result = stack[1] - end - if not settings.no_root then - result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={ }, entities = entities, settings = settings } - setmetatable(result, mt) - local rdt = result.dt - for k=1,#rdt do - local v = rdt[k] - if type(v) == "table" and not v.special then -- always table -) - result.ri = k -- rootindex - v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this ! - break - end - end - end - if errorstr and errorstr ~= "" then - result.error = true - end - result.statistics = { - entities = { - decimals = dcache, - hexadecimals = hcache, - names = acache, - } - } - strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil - unify_predefined, cleanup, entities = nil, nil, nil - stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil - acache, hcache, dcache = nil, nil, nil - reported_attribute_errors, mt, errorhandler = nil, nil, nil - return result -end - --- Because we can have a crash (stack issues) with faulty xml, we wrap this one --- in a protector: - -function xmlconvert(data,settings) - local ok, result = pcall(function() return _xmlconvert_(data,settings) end) - if ok then - return result - else - return _xmlconvert_("",settings) - end -end - -xml.convert = xmlconvert - -function xml.inheritedconvert(data,xmldata) -- xmldata is parent - local settings = xmldata.settings - if settings then - settings.parent_root = xmldata -- to be tested - end - -- settings.no_root = true - local xc = xmlconvert(data,settings) -- hm, we might need to locate settings - -- xc.settings = nil - -- xc.entities = nil - -- xc.special = nil - -- xc.ri = nil - -- print(xc.tg) - return xc -end - ---[[ldx-- -

Packaging data in an xml like table is done with the following -function. Maybe it will go away (when not used).

---ldx]]-- - -function xml.is_valid(root) - return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er -end - -function xml.package(tag,attributes,data) - local ns, tg = match(tag,"^(.-):?([^:]+)$") - local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} } - setmetatable(t, mt) - return t -end - -function xml.is_valid(root) - return root and not root.error -end - -xml.errorhandler = report_xml - ---[[ldx-- -

We cannot load an from a filehandle so we need to load -the whole file first. The function accepts a string representing -a filename or a file handle.

---ldx]]-- - -function xml.load(filename,settings) - local data = "" - if type(filename) == "string" then - -- local data = io.loaddata(filename) - -todo: check type in io.loaddata - local f = io.open(filename,'r') -- why not 'rb' - if f then - data = f:read("*all") -- io.readall(f) ... only makes sense for large files - f:close() - end - elseif filename then -- filehandle - data = filename:read("*all") -- io.readall(f) ... only makes sense for large files - end - if settings then - settings.currentresource = filename - local result = xmlconvert(data,settings) - settings.currentresource = nil - return result - else - return xmlconvert(data,{ currentresource = filename }) - end -end - ---[[ldx-- -

When we inject new elements, we need to convert strings to -valid trees, which is what the next function does.

---ldx]]-- - -local no_root = { no_root = true } - -function xml.toxml(data) - if type(data) == "string" then - local root = { xmlconvert(data,no_root) } - return (#root > 1 and root) or root[1] - else - return data - end -end - ---[[ldx-- -

For copying a tree we use a dedicated function instead of the -generic table copier. Since we know what we're dealing with we -can speed up things a bit. The second argument is not to be used!

---ldx]]-- - -local function copy(old,tables) - if old then - tables = tables or { } - local new = { } - if not tables[old] then - tables[old] = new - end - for k,v in next, old do - new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v - end - local mt = getmetatable(old) - if mt then - setmetatable(new,mt) - end - return new - else - return { } - end -end - -xml.copy = copy - ---[[ldx-- -

In serializing the tree or parts of the tree is a major -actitivity which is why the following function is pretty optimized resulting -in a few more lines of code than needed. The variant that uses the formatting -function for all components is about 15% slower than the concatinating -alternative.

---ldx]]-- - --- todo: add when not present - -function xml.checkbom(root) -- can be made faster - if root.ri then - local dt = root.dt - for k=1,#dt do - local v = dt[k] - if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then - return - end - end - insert(dt, 1, { special = true, ns = "", tg = "@pi@", dt = { "xml version='1.0' standalone='yes'" } } ) - insert(dt, 2, "\n" ) - end -end - ---[[ldx-- -

At the cost of some 25% runtime overhead you can first convert the tree to a string -and then handle the lot.

---ldx]]-- - --- new experimental reorganized serialize - -local function verbose_element(e,handlers) -- options - local handle = handlers.handle - local serialize = handlers.serialize - local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn - local ats = eat and next(eat) and { } - if ats then - for k,v in next, eat do - ats[#ats+1] = formatters['%s=%q'](k,escaped(v)) - end - end - if ern and trace_entities and ern ~= ens then - ens = ern - end - if ens ~= "" then - if edt and #edt > 0 then - if ats then - handle("<",ens,":",etg," ",concat(ats," "),">") - else - handle("<",ens,":",etg,">") - end - for i=1,#edt do - local e = edt[i] - if type(e) == "string" then - handle(escaped(e)) - else - serialize(e,handlers) - end - end - handle("") - else - if ats then - handle("<",ens,":",etg," ",concat(ats," "),"/>") - else - handle("<",ens,":",etg,"/>") - end - end - else - if edt and #edt > 0 then - if ats then - handle("<",etg," ",concat(ats," "),">") - else - handle("<",etg,">") - end - for i=1,#edt do - local e = edt[i] - if type(e) == "string" then - handle(escaped(e)) -- option: hexify escaped entities - else - serialize(e,handlers) - end - end - handle("") - else - if ats then - handle("<",etg," ",concat(ats," "),"/>") - else - handle("<",etg,"/>") - end - end - end -end - -local function verbose_pi(e,handlers) - handlers.handle("") -end - -local function verbose_comment(e,handlers) - handlers.handle("") -end - -local function verbose_cdata(e,handlers) - handlers.handle("") -end - -local function verbose_doctype(e,handlers) - handlers.handle("") -end - -local function verbose_root(e,handlers) - handlers.serialize(e.dt,handlers) -end - -local function verbose_text(e,handlers) - handlers.handle(escaped(e)) -end - -local function verbose_document(e,handlers) - local serialize = handlers.serialize - local functions = handlers.functions - for i=1,#e do - local ei = e[i] - if type(ei) == "string" then - functions["@tx@"](ei,handlers) - else - serialize(ei,handlers) - end - end -end - -local function serialize(e,handlers,...) - local initialize = handlers.initialize - local finalize = handlers.finalize - local functions = handlers.functions - if initialize then - local state = initialize(...) - if not state == true then - return state - end - end - local etg = e.tg - if etg then - (functions[etg] or functions["@el@"])(e,handlers) - -- elseif type(e) == "string" then - -- functions["@tx@"](e,handlers) - else - functions["@dc@"](e,handlers) -- dc ? - end - if finalize then - return finalize() - end -end - -local function xserialize(e,handlers) - local functions = handlers.functions - local etg = e.tg - if etg then - (functions[etg] or functions["@el@"])(e,handlers) - -- elseif type(e) == "string" then - -- functions["@tx@"](e,handlers) - else - functions["@dc@"](e,handlers) - end -end - -local handlers = { } - -local function newhandlers(settings) - local t = table.copy(handlers[settings and settings.parent or "verbose"] or { }) -- merge - if settings then - for k,v in next, settings do - if type(v) == "table" then - local tk = t[k] if not tk then tk = { } t[k] = tk end - for kk,vv in next, v do - tk[kk] = vv - end - else - t[k] = v - end - end - if settings.name then - handlers[settings.name] = t - end - end - utilities.storage.mark(t) - return t -end - -local nofunction = function() end - -function xml.sethandlersfunction(handler,name,fnc) - handler.functions[name] = fnc or nofunction -end - -function xml.gethandlersfunction(handler,name) - return handler.functions[name] -end - -function xml.gethandlers(name) - return handlers[name] -end - -newhandlers { - name = "verbose", - initialize = false, -- faster than nil and mt lookup - finalize = false, -- faster than nil and mt lookup - serialize = xserialize, - handle = print, - functions = { - ["@dc@"] = verbose_document, - ["@dt@"] = verbose_doctype, - ["@rt@"] = verbose_root, - ["@el@"] = verbose_element, - ["@pi@"] = verbose_pi, - ["@cm@"] = verbose_comment, - ["@cd@"] = verbose_cdata, - ["@tx@"] = verbose_text, - } -} - ---[[ldx-- -

How you deal with saving data depends on your preferences. For a 40 MB database -file the timing on a 2.3 Core Duo are as follows (time in seconds):

- - -1.3 : load data from file to string -6.1 : convert string into tree -5.3 : saving in file using xmlsave -6.8 : converting to string using xml.tostring -3.6 : saving converted string in file - - -

Beware, these were timing with the old routine but measurements will not be that -much different I guess.

---ldx]]-- - --- maybe this will move to lxml-xml - -local result - -local xmlfilehandler = newhandlers { - name = "file", - initialize = function(name) - result = io.open(name,"wb") - return result - end, - finalize = function() - result:close() - return true - end, - handle = function(...) - result:write(...) - end, -} - --- no checking on writeability here but not faster either --- --- local xmlfilehandler = newhandlers { --- initialize = function(name) --- io.output(name,"wb") --- return true --- end, --- finalize = function() --- io.close() --- return true --- end, --- handle = io.write, --- } - -function xml.save(root,name) - serialize(root,xmlfilehandler,name) -end - -local result - -local xmlstringhandler = newhandlers { - name = "string", - initialize = function() - result = { } - return result - end, - finalize = function() - return concat(result) - end, - handle = function(...) - result[#result+1] = concat { ... } - end, -} - -local function xmltostring(root) -- 25% overhead due to collecting - if not root then - return "" - elseif type(root) == "string" then - return root - else -- if next(root) then -- next is faster than type (and >0 test) - return serialize(root,xmlstringhandler) or "" - end -end - -local function __tostring(root) -- inline - return (root and xmltostring(root)) or "" -end - -initialize_mt = function(root) -- redefinition - mt = { __tostring = __tostring, __index = root } -end - -xml.defaulthandlers = handlers -xml.newhandlers = newhandlers -xml.serialize = serialize -xml.tostring = xmltostring - ---[[ldx-- -

The next function operated on the content only and needs a handle function -that accepts a string.

---ldx]]-- - -local function xmlstring(e,handle) - if not handle or (e.special and e.tg ~= "@rt@") then - -- nothing - elseif e.tg then - local edt = e.dt - if edt then - for i=1,#edt do - xmlstring(edt[i],handle) - end - end - else - handle(e) - end -end - -xml.string = xmlstring - ---[[ldx-- -

A few helpers:

---ldx]]-- - ---~ xmlsetproperty(root,"settings",settings) - -function xml.settings(e) - while e do - local s = e.settings - if s then - return s - else - e = e.__p__ - end - end - return nil -end - -function xml.root(e) - local r = e - while e do - e = e.__p__ - if e then - r = e - end - end - return r -end - -function xml.parent(root) - return root.__p__ -end - -function xml.body(root) - return root.ri and root.dt[root.ri] or root -- not ok yet -end - -function xml.name(root) - if not root then - return "" - end - local ns = root.ns - local tg = root.tg - if ns == "" then - return tg - else - return ns .. ":" .. tg - end -end - ---[[ldx-- -

The next helper erases an element but keeps the table as it is, -and since empty strings are not serialized (effectively) it does -not harm. Copying the table would take more time. Usage:

---ldx]]-- - -function xml.erase(dt,k) - if dt then - if k then - dt[k] = "" - else for k=1,#dt do - dt[1] = { "" } - end end - end -end - ---[[ldx-- -

The next helper assigns a tree (or string). Usage:

- - -dt[k] = xml.assign(root) or xml.assign(dt,k,root) - ---ldx]]-- - -function xml.assign(dt,k,root) - if dt and k then - dt[k] = type(root) == "table" and xml.body(root) or root - return dt[k] - else - return xml.body(root) - end -end - --- the following helpers may move - ---[[ldx-- -

The next helper assigns a tree (or string). Usage:

- -xml.tocdata(e) -xml.tocdata(e,"error") - ---ldx]]-- - -function xml.tocdata(e,wrapper) -- a few more in the aux module - local whatever = type(e) == "table" and xmltostring(e.dt) or e or "" - if wrapper then - whatever = formatters["<%s>%s"](wrapper,whatever,wrapper) - end - local t = { special = true, ns = "", tg = "@cd@", at = { }, rn = "", dt = { whatever }, __p__ = e } - setmetatable(t,getmetatable(e)) - e.dt = { t } -end - -function xml.makestandalone(root) - if root.ri then - local dt = root.dt - for k=1,#dt do - local v = dt[k] - if type(v) == "table" and v.special and v.tg == "@pi@" then - local txt = v.dt[1] - if find(txt,"xml.*version=") then - v.dt[1] = txt .. " standalone='yes'" - break - end - end - end - end - return root -end - -function xml.kind(e) - local dt = e and e.dt - if dt then - local n = #dt - if n == 1 then - local d = dt[1] - if d.special then - local tg = d.tg - if tg == "@cd@" then - return "cdata" - elseif tg == "@cm" then - return "comment" - elseif tg == "@pi@" then - return "instruction" - elseif tg == "@dt@" then - return "declaration" - end - elseif type(d) == "string" then - return "text" - end - return "element" - elseif n > 0 then - return "mixed" - end - end - return "empty" -end +if not modules then modules = { } end modules ['lxml-tab'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this module needs a cleanup: check latest lpeg, passing args, (sub)grammar, etc etc +-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the +-- trouble + +-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua) +-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit +-- of work so we delay this till we cleanup + +local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end) + +local report_xml = logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end + +--[[ldx-- +

The parser used here is inspired by the variant discussed in the lua book, but +handles comment and processing instructions, has a different structure, provides +parent access; a first version used different trickery but was less optimized to we +went this route. First we had a find based parser, now we have an based one. +The find based parser can be found in l-xml-edu.lua along with other older code.

+ +

Beware, the interface may change. For instance at, ns, tg, dt may get more +verbose names. Once the code is stable we will also remove some tracing and +optimize the code.

+ +

I might even decide to reimplement the parser using the latest trickery +as the current variant was written when showed up and it's easier now to +build tables in one go.

+--ldx]]-- + +xml = xml or { } +local xml = xml + +--~ local xml = xml + +local concat, remove, insert = table.concat, table.remove, table.insert +local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber +local lower, find, match, gsub = string.lower, string.find, string.match, string.gsub +local utfchar = utf.char +local lpegmatch = lpeg.match +local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs +local formatters = string.formatters + +--[[ldx-- +

First a hack to enable namespace resolving. A namespace is characterized by +a . The following function associates a namespace prefix with a +pattern. We use , which in this case is more than twice as fast as a +find based solution where we loop over an array of patterns. Less code and +much cleaner.

+--ldx]]-- + +xml.xmlns = xml.xmlns or { } + +local check = P(false) +local parse = check + +--[[ldx-- +

The next function associates a namespace prefix with an . This +normally happens independent of parsing.

+ + +xml.registerns("mml","mathml") + +--ldx]]-- + +function xml.registerns(namespace, pattern) -- pattern can be an lpeg + check = check + C(P(lower(pattern))) / namespace + parse = P { P(check) + 1 * V(1) } +end + +--[[ldx-- +

The next function also registers a namespace, but this time we map a +given namespace prefix onto a registered one, using the given +. This used for attributes like xmlns:m.

+ + +xml.checkns("m","http://www.w3.org/mathml") + +--ldx]]-- + +function xml.checkns(namespace,url) + local ns = lpegmatch(parse,lower(url)) + if ns and namespace ~= ns then + xml.xmlns[namespace] = ns + end +end + +--[[ldx-- +

Next we provide a way to turn an into a registered +namespace. This used for the xmlns attribute.

+ + +resolvedns = xml.resolvens("http://www.w3.org/mathml") + + +This returns mml. +--ldx]]-- + +function xml.resolvens(url) + return lpegmatch(parse,lower(url)) or "" +end + +--[[ldx-- +

A namespace in an element can be remapped onto the registered +one efficiently by using the xml.xmlns table.

+--ldx]]-- + +--[[ldx-- +

This version uses . We follow the same approach as before, stack and top and +such. This version is about twice as fast which is mostly due to the fact that +we don't have to prepare the stream for cdata, doctype etc etc. This variant is +is dedicated to Luigi Scarso, who challenged me with 40 megabyte files that +took 12.5 seconds to load (1.5 for file io and the rest for tree building). With +the implementation we got that down to less 7.3 seconds. Loading the 14 + interface definition files (2.6 meg) went down from 1.05 seconds to 0.55.

+ +

Next comes the parser. The rather messy doctype definition comes in many +disguises so it is no surprice that later on have to dedicate quite some + code to it.

+ + + + + + + + + + +

The code may look a bit complex but this is mostly due to the fact that we +resolve namespaces and attach metatables. There is only one public function:

+ + +local x = xml.convert(somestring) + + +

An optional second boolean argument tells this function not to create a root +element.

+ +

Valid entities are:

+ + + + + + +--ldx]]-- + +-- not just one big nested table capture (lpeg overflow) + +local nsremap, resolvens = xml.xmlns, xml.resolvens + +local stack = { } +local top = { } +local dt = { } +local at = { } +local xmlns = { } +local errorstr = nil +local entities = { } +local strip = false +local cleanup = false +local utfize = false +local resolve_predefined = false +local unify_predefined = false + +local dcache = { } +local hcache = { } +local acache = { } + +local mt = { } + +local function initialize_mt(root) + mt = { __index = root } -- will be redefined later +end + +function xml.setproperty(root,k,v) + getmetatable(root).__index[k] = v +end + +function xml.checkerror(top,toclose) + return "" -- can be set +end + +local function add_attribute(namespace,tag,value) + if cleanup and #value > 0 then + value = cleanup(value) -- new + end + if tag == "xmlns" then + xmlns[#xmlns+1] = resolvens(value) + at[tag] = value + elseif namespace == "" then + at[tag] = value + elseif namespace == "xmlns" then + xml.checkns(tag,value) + at["xmlns:" .. tag] = value + else + -- for the moment this way: + at[namespace .. ":" .. tag] = value + end +end + +local function add_empty(spacing, namespace, tag) + if #spacing > 0 then + dt[#dt+1] = spacing + end + local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace + top = stack[#stack] + dt = top.dt + local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top } + dt[#dt+1] = t + setmetatable(t, mt) + if at.xmlns then + remove(xmlns) + end + at = { } +end + +local function add_begin(spacing, namespace, tag) + if #spacing > 0 then + dt[#dt+1] = spacing + end + local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace + top = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] } + setmetatable(top, mt) + dt = top.dt + stack[#stack+1] = top + at = { } +end + +local function add_end(spacing, namespace, tag) + if #spacing > 0 then + dt[#dt+1] = spacing + end + local toclose = remove(stack) + top = stack[#stack] + if #stack < 1 then + errorstr = formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "") + elseif toclose.tg ~= tag then -- no namespace check + errorstr = formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "") + end + dt = top.dt + dt[#dt+1] = toclose + -- dt[0] = top -- nasty circular reference when serializing table + if toclose.at.xmlns then + remove(xmlns) + end +end + +local function add_text(text) + if cleanup and #text > 0 then + dt[#dt+1] = cleanup(text) + else + dt[#dt+1] = text + end +end + +local function add_special(what, spacing, text) + if #spacing > 0 then + dt[#dt+1] = spacing + end + if strip and (what == "@cm@" or what == "@dt@") then + -- forget it + else + dt[#dt+1] = { special=true, ns="", tg=what, dt={ text } } + end +end + +local function set_message(txt) + errorstr = "garbage at the end of the file: " .. gsub(txt,"([ \n\r\t]*)","") +end + +local reported_attribute_errors = { } + +local function attribute_value_error(str) + if not reported_attribute_errors[str] then + report_xml("invalid attribute value %a",str) + reported_attribute_errors[str] = true + at._error_ = str + end + return str +end + +local function attribute_specification_error(str) + if not reported_attribute_errors[str] then + report_xml("invalid attribute specification %a",str) + reported_attribute_errors[str] = true + at._error_ = str + end + return str +end + +xml.placeholders = { + unknown_dec_entity = function(str) return str == "" and "&error;" or formatters["&%s;"](str) end, + unknown_hex_entity = function(str) return formatters["&#x%s;"](str) end, + unknown_any_entity = function(str) return formatters["&#x%s;"](str) end, +} + +local placeholders = xml.placeholders + +local function fromhex(s) + local n = tonumber(s,16) + if n then + return utfchar(n) + else + return formatters["h:%s"](s), true + end +end + +local function fromdec(s) + local n = tonumber(s) + if n then + return utfchar(n) + else + return formatters["d:%s"](s), true + end +end + +-- one level expansion (simple case), no checking done + +local rest = (1-P(";"))^0 +local many = P(1)^0 + +local parsedentity = + P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) + + (P("#x")*(many/fromhex) + P("#")*(many/fromdec)) + +-- parsing in the xml file + +local predefined_unified = { + [38] = "&", + [42] = """, + [47] = "'", + [74] = "<", + [76] = ">", +} + +local predefined_simplified = { + [38] = "&", amp = "&", + [42] = '"', quot = '"', + [47] = "'", apos = "'", + [74] = "<", lt = "<", + [76] = ">", gt = ">", +} + +local nofprivates = 0xF0000 -- shared but seldom used + +local privates_u = { -- unescaped + [ [[&]] ] = "&", + [ [["]] ] = """, + [ [[']] ] = "'", + [ [[<]] ] = "<", + [ [[>]] ] = ">", +} + +local privates_p = { +} + +local privates_n = { + -- keeps track of defined ones +} + +local escaped = utf.remapper(privates_u) + +local function unescaped(s) + local p = privates_n[s] + if not p then + nofprivates = nofprivates + 1 + p = utfchar(nofprivates) + privates_n[s] = p + s = "&" .. s .. ";" -- todo: use char-ent to map to hex + privates_u[p] = s + privates_p[p] = s + end + return p +end + +local unprivatized = utf.remapper(privates_p) + +xml.privatetoken = unescaped +xml.unprivatized = unprivatized +xml.privatecodes = privates_n + +local function handle_hex_entity(str) + local h = hcache[str] + if not h then + local n = tonumber(str,16) + h = unify_predefined and predefined_unified[n] + if h then + if trace_entities then + report_xml("utfize, converting hex entity &#x%s; into %a",str,h) + end + elseif utfize then + h = (n and utfchar(n)) or xml.unknown_hex_entity(str) or "" + if not n then + report_xml("utfize, ignoring hex entity &#x%s;",str) + elseif trace_entities then + report_xml("utfize, converting hex entity &#x%s; into %a",str,h) + end + else + if trace_entities then + report_xml("found entity &#x%s;",str) + end + h = "&#x" .. str .. ";" + end + hcache[str] = h + end + return h +end + +local function handle_dec_entity(str) + local d = dcache[str] + if not d then + local n = tonumber(str) + d = unify_predefined and predefined_unified[n] + if d then + if trace_entities then + report_xml("utfize, converting dec entity &#%s; into %a",str,d) + end + elseif utfize then + d = (n and utfchar(n)) or placeholders.unknown_dec_entity(str) or "" + if not n then + report_xml("utfize, ignoring dec entity &#%s;",str) + elseif trace_entities then + report_xml("utfize, converting dec entity &#%s; into %a",str,d) + end + else + if trace_entities then + report_xml("found entity &#%s;",str) + end + d = "&#" .. str .. ";" + end + dcache[str] = d + end + return d +end + +xml.parsedentitylpeg = parsedentity + +local function handle_any_entity(str) + if resolve then + local a = acache[str] -- per instance ! todo + if not a then + a = resolve_predefined and predefined_simplified[str] + if a then + if trace_entities then + report_xml("resolving entity &%s; to predefined %a",str,a) + end + else + if type(resolve) == "function" then + a = resolve(str) or entities[str] + else + a = entities[str] + end + if a then + if type(a) == "function" then + if trace_entities then + report_xml("expanding entity &%s; to function call",str) + end + a = a(str) or "" + end + a = lpegmatch(parsedentity,a) or a -- for nested + if trace_entities then + report_xml("resolving entity &%s; to internal %a",str,a) + end + else + local unknown_any_entity = placeholders.unknown_any_entity + if unknown_any_entity then + a = unknown_any_entity(str) or "" + end + if a then + if trace_entities then + report_xml("resolving entity &%s; to external %s",str,a) + end + else + if trace_entities then + report_xml("keeping entity &%s;",str) + end + if str == "" then + a = "&error;" + else + a = "&" .. str .. ";" + end + end + end + end + acache[str] = a + elseif trace_entities then + if not acache[str] then + report_xml("converting entity &%s; to %a",str,a) + acache[str] = a + end + end + return a + else + local a = acache[str] + if not a then + a = resolve_predefined and predefined_simplified[str] + if a then + -- one of the predefined + acache[str] = a + if trace_entities then + report_xml("entity &%s; becomes %a",str,a) + end + elseif str == "" then + if trace_entities then + report_xml("invalid entity &%s;",str) + end + a = "&error;" + acache[str] = a + else + if trace_entities then + report_xml("entity &%s; is made private",str) + end + -- a = "&" .. str .. ";" + a = unescaped(str) + acache[str] = a + end + end + return a + end +end + +local function handle_end_entity(chr) + report_xml("error in entity, %a found instead of %a",chr,";") +end + +local space = S(' \r\n\t') +local open = P('<') +local close = P('>') +local squote = S("'") +local dquote = S('"') +local equal = P('=') +local slash = P('/') +local colon = P(':') +local semicolon = P(';') +local ampersand = P('&') +local valid = R('az', 'AZ', '09') + S('_-.') +local name_yes = C(valid^1) * colon * C(valid^1) +local name_nop = C(P(true)) * C(valid^1) +local name = name_yes + name_nop +local utfbom = lpeg.patterns.utfbom -- no capture +local spacing = C(space^0) + +----- entitycontent = (1-open-semicolon)^0 +local anyentitycontent = (1-open-semicolon-space-close)^0 +local hexentitycontent = R("AF","af","09")^0 +local decentitycontent = R("09")^0 +local parsedentity = P("#")/"" * ( + P("x")/"" * (hexentitycontent/handle_hex_entity) + + (decentitycontent/handle_dec_entity) + ) + (anyentitycontent/handle_any_entity) +local entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity)) + +local text_unparsed = C((1-open)^1) +local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1) + +local somespace = space^1 +local optionalspace = space^0 + +----- value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -- ampersand and < also invalid in value +local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dquote * Cs((entity + (1 - dquote))^0) * dquote) -- ampersand and < also invalid in value + +local endofattributes = slash * close + close -- recovery of flacky html +local whatever = space * name * optionalspace * equal +----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error +----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error +----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error +local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error + +local attributevalue = value + wrongvalue + +local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute +----- attributes = (attribute)^0 + +local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0 + +local parsedtext = text_parsed / add_text +local unparsedtext = text_unparsed / add_text +local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example + +local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty +local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin +local endelement = (spacing * open * slash * name * optionalspace * close) / add_end + +local begincomment = open * P("!--") +local endcomment = P("--") * close +local begininstruction = open * P("?") +local endinstruction = P("?") * close +local begincdata = open * P("![CDATA[") +local endcdata = P("]]") * close + +local someinstruction = C((1 - endinstruction)^0) +local somecomment = C((1 - endcomment )^0) +local somecdata = C((1 - endcdata )^0) + +local function normalentity(k,v ) entities[k] = v end +local function systementity(k,v,n) entities[k] = v end +local function publicentity(k,v,n) entities[k] = v end + +-- todo: separate dtd parser + +local begindoctype = open * P("!DOCTYPE") +local enddoctype = close +local beginset = P("[") +local endset = P("]") +local doctypename = C((1-somespace-close)^0) +local elementdoctype = optionalspace * P(" & + cleanup = settings.text_cleanup + entities = settings.entities or { } + -- + if utfize == nil then + settings.utfize_entities = true + utfize = true + end + if resolve_predefined == nil then + settings.resolve_predefined_entities = true + resolve_predefined = true + end + -- + stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil + acache, hcache, dcache = { }, { }, { } -- not stored + reported_attribute_errors = { } + if settings.parent_root then + mt = getmetatable(settings.parent_root) + else + initialize_mt(top) + end + stack[#stack+1] = top + top.dt = { } + dt = top.dt + if not data or data == "" then + errorstr = "empty xml file" + elseif utfize or resolve then + if lpegmatch(grammar_parsed_text,data) then + errorstr = "" + else + errorstr = "invalid xml file - parsed text" + end + elseif type(data) == "string" then + if lpegmatch(grammar_unparsed_text,data) then + errorstr = "" + else + errorstr = "invalid xml file - unparsed text" + end + else + errorstr = "invalid xml file - no text at all" + end + local result + if errorstr and errorstr ~= "" then + result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } } + setmetatable(stack, mt) + local errorhandler = settings.error_handler + if errorhandler == false then + -- no error message + else + errorhandler = errorhandler or xml.errorhandler + if errorhandler then + local currentresource = settings.currentresource + if currentresource and currentresource ~= "" then + xml.errorhandler(formatters["load error in [%s]: %s"](currentresource,errorstr)) + else + xml.errorhandler(formatters["load error: %s"](errorstr)) + end + end + end + else + result = stack[1] + end + if not settings.no_root then + result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={ }, entities = entities, settings = settings } + setmetatable(result, mt) + local rdt = result.dt + for k=1,#rdt do + local v = rdt[k] + if type(v) == "table" and not v.special then -- always table -) + result.ri = k -- rootindex + v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this ! + break + end + end + end + if errorstr and errorstr ~= "" then + result.error = true + end + result.statistics = { + entities = { + decimals = dcache, + hexadecimals = hcache, + names = acache, + } + } + strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil + unify_predefined, cleanup, entities = nil, nil, nil + stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil + acache, hcache, dcache = nil, nil, nil + reported_attribute_errors, mt, errorhandler = nil, nil, nil + return result +end + +-- Because we can have a crash (stack issues) with faulty xml, we wrap this one +-- in a protector: + +function xmlconvert(data,settings) + local ok, result = pcall(function() return _xmlconvert_(data,settings) end) + if ok then + return result + else + return _xmlconvert_("",settings) + end +end + +xml.convert = xmlconvert + +function xml.inheritedconvert(data,xmldata) -- xmldata is parent + local settings = xmldata.settings + if settings then + settings.parent_root = xmldata -- to be tested + end + -- settings.no_root = true + local xc = xmlconvert(data,settings) -- hm, we might need to locate settings + -- xc.settings = nil + -- xc.entities = nil + -- xc.special = nil + -- xc.ri = nil + -- print(xc.tg) + return xc +end + +--[[ldx-- +

Packaging data in an xml like table is done with the following +function. Maybe it will go away (when not used).

+--ldx]]-- + +function xml.is_valid(root) + return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er +end + +function xml.package(tag,attributes,data) + local ns, tg = match(tag,"^(.-):?([^:]+)$") + local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} } + setmetatable(t, mt) + return t +end + +function xml.is_valid(root) + return root and not root.error +end + +xml.errorhandler = report_xml + +--[[ldx-- +

We cannot load an from a filehandle so we need to load +the whole file first. The function accepts a string representing +a filename or a file handle.

+--ldx]]-- + +function xml.load(filename,settings) + local data = "" + if type(filename) == "string" then + -- local data = io.loaddata(filename) - -todo: check type in io.loaddata + local f = io.open(filename,'r') -- why not 'rb' + if f then + data = f:read("*all") -- io.readall(f) ... only makes sense for large files + f:close() + end + elseif filename then -- filehandle + data = filename:read("*all") -- io.readall(f) ... only makes sense for large files + end + if settings then + settings.currentresource = filename + local result = xmlconvert(data,settings) + settings.currentresource = nil + return result + else + return xmlconvert(data,{ currentresource = filename }) + end +end + +--[[ldx-- +

When we inject new elements, we need to convert strings to +valid trees, which is what the next function does.

+--ldx]]-- + +local no_root = { no_root = true } + +function xml.toxml(data) + if type(data) == "string" then + local root = { xmlconvert(data,no_root) } + return (#root > 1 and root) or root[1] + else + return data + end +end + +--[[ldx-- +

For copying a tree we use a dedicated function instead of the +generic table copier. Since we know what we're dealing with we +can speed up things a bit. The second argument is not to be used!

+--ldx]]-- + +local function copy(old,tables) + if old then + tables = tables or { } + local new = { } + if not tables[old] then + tables[old] = new + end + for k,v in next, old do + new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v + end + local mt = getmetatable(old) + if mt then + setmetatable(new,mt) + end + return new + else + return { } + end +end + +xml.copy = copy + +--[[ldx-- +

In serializing the tree or parts of the tree is a major +actitivity which is why the following function is pretty optimized resulting +in a few more lines of code than needed. The variant that uses the formatting +function for all components is about 15% slower than the concatinating +alternative.

+--ldx]]-- + +-- todo: add when not present + +function xml.checkbom(root) -- can be made faster + if root.ri then + local dt = root.dt + for k=1,#dt do + local v = dt[k] + if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then + return + end + end + insert(dt, 1, { special = true, ns = "", tg = "@pi@", dt = { "xml version='1.0' standalone='yes'" } } ) + insert(dt, 2, "\n" ) + end +end + +--[[ldx-- +

At the cost of some 25% runtime overhead you can first convert the tree to a string +and then handle the lot.

+--ldx]]-- + +-- new experimental reorganized serialize + +local function verbose_element(e,handlers) -- options + local handle = handlers.handle + local serialize = handlers.serialize + local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn + local ats = eat and next(eat) and { } + if ats then + for k,v in next, eat do + ats[#ats+1] = formatters['%s=%q'](k,escaped(v)) + end + end + if ern and trace_entities and ern ~= ens then + ens = ern + end + if ens ~= "" then + if edt and #edt > 0 then + if ats then + handle("<",ens,":",etg," ",concat(ats," "),">") + else + handle("<",ens,":",etg,">") + end + for i=1,#edt do + local e = edt[i] + if type(e) == "string" then + handle(escaped(e)) + else + serialize(e,handlers) + end + end + handle("") + else + if ats then + handle("<",ens,":",etg," ",concat(ats," "),"/>") + else + handle("<",ens,":",etg,"/>") + end + end + else + if edt and #edt > 0 then + if ats then + handle("<",etg," ",concat(ats," "),">") + else + handle("<",etg,">") + end + for i=1,#edt do + local e = edt[i] + if type(e) == "string" then + handle(escaped(e)) -- option: hexify escaped entities + else + serialize(e,handlers) + end + end + handle("") + else + if ats then + handle("<",etg," ",concat(ats," "),"/>") + else + handle("<",etg,"/>") + end + end + end +end + +local function verbose_pi(e,handlers) + handlers.handle("") +end + +local function verbose_comment(e,handlers) + handlers.handle("") +end + +local function verbose_cdata(e,handlers) + handlers.handle("") +end + +local function verbose_doctype(e,handlers) + handlers.handle("") +end + +local function verbose_root(e,handlers) + handlers.serialize(e.dt,handlers) +end + +local function verbose_text(e,handlers) + handlers.handle(escaped(e)) +end + +local function verbose_document(e,handlers) + local serialize = handlers.serialize + local functions = handlers.functions + for i=1,#e do + local ei = e[i] + if type(ei) == "string" then + functions["@tx@"](ei,handlers) + else + serialize(ei,handlers) + end + end +end + +local function serialize(e,handlers,...) + local initialize = handlers.initialize + local finalize = handlers.finalize + local functions = handlers.functions + if initialize then + local state = initialize(...) + if not state == true then + return state + end + end + local etg = e.tg + if etg then + (functions[etg] or functions["@el@"])(e,handlers) + -- elseif type(e) == "string" then + -- functions["@tx@"](e,handlers) + else + functions["@dc@"](e,handlers) -- dc ? + end + if finalize then + return finalize() + end +end + +local function xserialize(e,handlers) + local functions = handlers.functions + local etg = e.tg + if etg then + (functions[etg] or functions["@el@"])(e,handlers) + -- elseif type(e) == "string" then + -- functions["@tx@"](e,handlers) + else + functions["@dc@"](e,handlers) + end +end + +local handlers = { } + +local function newhandlers(settings) + local t = table.copy(handlers[settings and settings.parent or "verbose"] or { }) -- merge + if settings then + for k,v in next, settings do + if type(v) == "table" then + local tk = t[k] if not tk then tk = { } t[k] = tk end + for kk,vv in next, v do + tk[kk] = vv + end + else + t[k] = v + end + end + if settings.name then + handlers[settings.name] = t + end + end + utilities.storage.mark(t) + return t +end + +local nofunction = function() end + +function xml.sethandlersfunction(handler,name,fnc) + handler.functions[name] = fnc or nofunction +end + +function xml.gethandlersfunction(handler,name) + return handler.functions[name] +end + +function xml.gethandlers(name) + return handlers[name] +end + +newhandlers { + name = "verbose", + initialize = false, -- faster than nil and mt lookup + finalize = false, -- faster than nil and mt lookup + serialize = xserialize, + handle = print, + functions = { + ["@dc@"] = verbose_document, + ["@dt@"] = verbose_doctype, + ["@rt@"] = verbose_root, + ["@el@"] = verbose_element, + ["@pi@"] = verbose_pi, + ["@cm@"] = verbose_comment, + ["@cd@"] = verbose_cdata, + ["@tx@"] = verbose_text, + } +} + +--[[ldx-- +

How you deal with saving data depends on your preferences. For a 40 MB database +file the timing on a 2.3 Core Duo are as follows (time in seconds):

+ + +1.3 : load data from file to string +6.1 : convert string into tree +5.3 : saving in file using xmlsave +6.8 : converting to string using xml.tostring +3.6 : saving converted string in file + + +

Beware, these were timing with the old routine but measurements will not be that +much different I guess.

+--ldx]]-- + +-- maybe this will move to lxml-xml + +local result + +local xmlfilehandler = newhandlers { + name = "file", + initialize = function(name) + result = io.open(name,"wb") + return result + end, + finalize = function() + result:close() + return true + end, + handle = function(...) + result:write(...) + end, +} + +-- no checking on writeability here but not faster either +-- +-- local xmlfilehandler = newhandlers { +-- initialize = function(name) +-- io.output(name,"wb") +-- return true +-- end, +-- finalize = function() +-- io.close() +-- return true +-- end, +-- handle = io.write, +-- } + +function xml.save(root,name) + serialize(root,xmlfilehandler,name) +end + +local result + +local xmlstringhandler = newhandlers { + name = "string", + initialize = function() + result = { } + return result + end, + finalize = function() + return concat(result) + end, + handle = function(...) + result[#result+1] = concat { ... } + end, +} + +local function xmltostring(root) -- 25% overhead due to collecting + if not root then + return "" + elseif type(root) == "string" then + return root + else -- if next(root) then -- next is faster than type (and >0 test) + return serialize(root,xmlstringhandler) or "" + end +end + +local function __tostring(root) -- inline + return (root and xmltostring(root)) or "" +end + +initialize_mt = function(root) -- redefinition + mt = { __tostring = __tostring, __index = root } +end + +xml.defaulthandlers = handlers +xml.newhandlers = newhandlers +xml.serialize = serialize +xml.tostring = xmltostring + +--[[ldx-- +

The next function operated on the content only and needs a handle function +that accepts a string.

+--ldx]]-- + +local function xmlstring(e,handle) + if not handle or (e.special and e.tg ~= "@rt@") then + -- nothing + elseif e.tg then + local edt = e.dt + if edt then + for i=1,#edt do + xmlstring(edt[i],handle) + end + end + else + handle(e) + end +end + +xml.string = xmlstring + +--[[ldx-- +

A few helpers:

+--ldx]]-- + +--~ xmlsetproperty(root,"settings",settings) + +function xml.settings(e) + while e do + local s = e.settings + if s then + return s + else + e = e.__p__ + end + end + return nil +end + +function xml.root(e) + local r = e + while e do + e = e.__p__ + if e then + r = e + end + end + return r +end + +function xml.parent(root) + return root.__p__ +end + +function xml.body(root) + return root.ri and root.dt[root.ri] or root -- not ok yet +end + +function xml.name(root) + if not root then + return "" + end + local ns = root.ns + local tg = root.tg + if ns == "" then + return tg + else + return ns .. ":" .. tg + end +end + +--[[ldx-- +

The next helper erases an element but keeps the table as it is, +and since empty strings are not serialized (effectively) it does +not harm. Copying the table would take more time. Usage:

+--ldx]]-- + +function xml.erase(dt,k) + if dt then + if k then + dt[k] = "" + else for k=1,#dt do + dt[1] = { "" } + end end + end +end + +--[[ldx-- +

The next helper assigns a tree (or string). Usage:

+ + +dt[k] = xml.assign(root) or xml.assign(dt,k,root) + +--ldx]]-- + +function xml.assign(dt,k,root) + if dt and k then + dt[k] = type(root) == "table" and xml.body(root) or root + return dt[k] + else + return xml.body(root) + end +end + +-- the following helpers may move + +--[[ldx-- +

The next helper assigns a tree (or string). Usage:

+ +xml.tocdata(e) +xml.tocdata(e,"error") + +--ldx]]-- + +function xml.tocdata(e,wrapper) -- a few more in the aux module + local whatever = type(e) == "table" and xmltostring(e.dt) or e or "" + if wrapper then + whatever = formatters["<%s>%s"](wrapper,whatever,wrapper) + end + local t = { special = true, ns = "", tg = "@cd@", at = { }, rn = "", dt = { whatever }, __p__ = e } + setmetatable(t,getmetatable(e)) + e.dt = { t } +end + +function xml.makestandalone(root) + if root.ri then + local dt = root.dt + for k=1,#dt do + local v = dt[k] + if type(v) == "table" and v.special and v.tg == "@pi@" then + local txt = v.dt[1] + if find(txt,"xml.*version=") then + v.dt[1] = txt .. " standalone='yes'" + break + end + end + end + end + return root +end + +function xml.kind(e) + local dt = e and e.dt + if dt then + local n = #dt + if n == 1 then + local d = dt[1] + if d.special then + local tg = d.tg + if tg == "@cd@" then + return "cdata" + elseif tg == "@cm" then + return "comment" + elseif tg == "@pi@" then + return "instruction" + elseif tg == "@dt@" then + return "declaration" + end + elseif type(d) == "string" then + return "text" + end + return "element" + elseif n > 0 then + return "mixed" + end + end + return "empty" +end diff --git a/tex/context/base/lxml-tex.lua b/tex/context/base/lxml-tex.lua index 112f62751..936a96041 100644 --- a/tex/context/base/lxml-tex.lua +++ b/tex/context/base/lxml-tex.lua @@ -1,1686 +1,1686 @@ -if not modules then modules = { } end modules ['lxml-tex'] = { - version = 1.001, - comment = "companion to lxml-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- Because we split and resolve entities we use the direct printing --- interface and not the context one. If we ever do that there will --- be an cldf-xml helper library. - -local utfchar = utf.char -local concat, insert, remove = table.concat, table.insert, table.remove -local format, sub, gsub, find, gmatch, match = string.format, string.sub, string.gsub, string.find, string.gmatch, string.match -local type, next, tonumber, tostring, select = type, next, tonumber, tostring, select -local lpegmatch = lpeg.match -local P, S, C, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc - -local tex, xml = tex, xml -local lowerchars, upperchars, lettered = characters.lower, characters.upper, characters.lettered - -lxml = lxml or { } -local lxml = lxml - -local catcodenumbers = catcodes.numbers -local ctxcatcodes = catcodenumbers.ctxcatcodes -- todo: use different method -local notcatcodes = catcodenumbers.notcatcodes -- todo: use different method - -local context = context -local contextsprint = context.sprint -- with catcodes (here we use fast variants, but with option for tracing) - -local xmlelements, xmlcollected, xmlsetproperty = xml.elements, xml.collected, xml.setproperty -local xmlwithelements = xml.withelements -local xmlserialize, xmlcollect, xmltext, xmltostring = xml.serialize, xml.collect, xml.text, xml.tostring -local xmlapplylpath = xml.applylpath -local xmlunprivatized, xmlprivatetoken, xmlprivatecodes = xml.unprivatized, xml.privatetoken, xml.privatecodes - -local variables = (interfaces and interfaces.variables) or { } - -local insertbeforevalue, insertaftervalue = utilities.tables.insertbeforevalue, utilities.tables.insertaftervalue - -local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming - -local trace_setups = false trackers.register("lxml.setups", function(v) trace_setups = v end) -local trace_loading = false trackers.register("lxml.loading", function(v) trace_loading = v end) -local trace_access = false trackers.register("lxml.access", function(v) trace_access = v end) -local trace_comments = false trackers.register("lxml.comments", function(v) trace_comments = v end) -local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end) - -local report_lxml = logs.reporter("xml","tex") -local report_xml = logs.reporter("xml","tex") - -local forceraw, rawroot = false, nil - --- tex entities --- --- todo: unprivatize attributes - -lxml.entities = lxml.entities or { } - -storage.register("lxml/entities",lxml.entities,"lxml.entities") - ---~ xml.placeholders.unknown_any_entity = nil -- has to be per xml - -local xmlentities = xml.entities -local texentities = lxml.entities -local parsedentity = xml.parsedentitylpeg - -function lxml.registerentity(key,value) - texentities[key] = value - if trace_entities then - report_xml("registering tex entity %a as %a",key,value) - end -end - -function lxml.resolvedentity(str) - if forceraw then - if trace_entities then - report_xml("passing entity %a as &%s;",str,str) - end - context("&%s;",str) - else - local e = texentities[str] - if e then - local te = type(e) - if te == "function" then - if trace_entities then - report_xml("passing entity %a using function",str) - end - e(str) - elseif e then - if trace_entities then - report_xml("passing entity %a as %a using %a",str,e,"ctxcatcodes") - end - context(e) - end - return - end - local e = xmlentities[str] - if e then - local te = type(e) - if te == "function" then - e = e(str) - end - if e then - if trace_entities then - report_xml("passing entity %a as %a using %a",str,e,"notcatcodes") - end - contextsprint(notcatcodes,e) - return - end - end - -- resolve hex and dec, todo: escape # & etc for ctxcatcodes - -- normally this is already solved while loading the file - local chr, err = lpegmatch(parsedentity,str) - if chr then - if trace_entities then - report_xml("passing entity %a as %a using %a",str,chr,"ctxcatcodes") - end - context(chr) - elseif err then - if trace_entities then - report_xml("passing faulty entity %a as %a",str,err) - end - context(err) - else - local tag = upperchars(str) - if trace_entities then - report_xml("passing entity %a to \\xmle using tag %a",str,tag) - end - context.xmle(str,tag) -- we need to use our own upper - end - end -end - --- tex interface - -lxml.loaded = lxml.loaded or { } -local loaded = lxml.loaded - --- print(contextdirective("context-mathml-directive function reduction yes ")) --- print(contextdirective("context-mathml-directive function ")) - -xml.defaultprotocol = "tex" - -local finalizers = xml.finalizers - -finalizers.xml = finalizers.xml or { } -finalizers.tex = finalizers.tex or { } - -local xmlfinalizers = finalizers.xml -local texfinalizers = finalizers.tex - --- serialization with entity handling - -local ampersand = P("&") -local semicolon = P(";") -local entity = ampersand * C((1-semicolon)^1) * semicolon / lxml.resolvedentity -- context.bold - -local _, xmltextcapture = context.newtexthandler { - exception = entity, - catcodes = notcatcodes, -} - -local _, xmlspacecapture = context.newtexthandler { - endofline = context.xmlcdataobeyedline, - emptyline = context.xmlcdataobeyedline, - simpleline = context.xmlcdataobeyedline, - space = context.xmlcdataobeyedspace, - exception = entity, - catcodes = notcatcodes, -} - -local _, xmllinecapture = context.newtexthandler { - endofline = context.xmlcdataobeyedline, - emptyline = context.xmlcdataobeyedline, - simpleline = context.xmlcdataobeyedline, - exception = entity, - catcodes = notcatcodes, -} - -local _, ctxtextcapture = context.newtexthandler { - exception = entity, - catcodes = ctxcatcodes, -} - --- cdata - -local toverbatim = context.newverbosehandler { - line = context.xmlcdataobeyedline, - space = context.xmlcdataobeyedspace, - before = context.xmlcdatabefore, - after = context.xmlcdataafter, -} - -lxml.toverbatim = context.newverbosehandler { - line = context.xmlcdataobeyedline, - space = context.xmlcdataobeyedspace, - before = context.xmlcdatabefore, - after = context.xmlcdataafter, - strip = true, -} - --- raw flushing - -function lxml.startraw() - forceraw = true -end - -function lxml.stopraw() - forceraw = false -end - -function lxml.rawroot() - return rawroot -end - --- storage - -function lxml.store(id,root,filename) - loaded[id] = root - xmlsetproperty(root,"name",id) - if filename then - xmlsetproperty(root,"filename",filename) - end -end - -local splitter = lpeg.splitat("::") - -lxml.idsplitter = splitter - -function lxml.splitid(id) - local d, i = lpegmatch(splitter,id) - if d then - return d, i - else - return "", id - end -end - -local function getid(id, qualified) - if id then - local lid = loaded[id] - if lid then - return lid - elseif type(id) == "table" then - return id - else - local d, i = lpegmatch(splitter,id) - if d then - local ld = loaded[d] - if ld then - local ldi = ld.index - if ldi then - local root = ldi[tonumber(i)] - if root then - if qualified then -- we need this else two args that confuse others - return root, d - else - return root - end - elseif trace_access then - report_lxml("%a has no index entry %a",d,i) - end - elseif trace_access then - report_lxml("%a has no index",d) - end - elseif trace_access then - report_lxml("%a is not loaded",d) - end - elseif trace_access then - report_lxml("%a is not loaded",i) - end - end - elseif trace_access then - report_lxml("invalid id (nil)") - end -end - -lxml.id = getid -- we provide two names as locals can already use such -lxml.getid = getid -- names and we don't want clashes - -function lxml.root(id) - return loaded[id] -end - --- index - -local nofindices = 0 - -local function addindex(name,check_sum,force) - local root = getid(name) - if root and (not root.index or force) then -- weird, only called once - local n, index, maxindex, check = 0, root.index or { }, root.maxindex or 0, root.check or { } - local function nest(root) - local dt = root.dt - if not root.ix then - maxindex = maxindex + 1 - root.ix = maxindex - check[maxindex] = root.tg -- still needed ? - index[maxindex] = root - n = n + 1 - end - if dt then - for k=1,#dt do - local dk = dt[k] - if type(dk) == "table" then - nest(dk) - end - end - end - end - nest(root) - nofindices = nofindices + n - -- - if type(name) ~= "string" then - name = "unknown" - end - root.index = index - root.maxindex = maxindex - if trace_access then - report_lxml("indexed entries %a, found nodes %a",tostring(name),maxindex) - end - end -end - -lxml.addindex = addindex - --- another cache - -local function lxmlapplylpath(id,pattern) -- better inline, saves call - return xmlapplylpath(getid(id),pattern) -end - -lxml.filter = lxmlapplylpath - -function lxml.filterlist(list,pattern) - for s in gmatch(list,"[^, ]+") do -- we could cache a table - xmlapplylpath(getid(s),pattern) - end -end - -function lxml.applyfunction(id,name) - local f = xml.functions[name] - return f and f(getid(id)) -end - --- rather new, indexed storage (backward refs), maybe i will merge this - -function lxml.checkindex(name) - local root = getid(name) - return (root and root.index) or 0 -end - -function lxml.withindex(name,n,command) -- will change as name is always there now - local i, p = lpegmatch(splitter,n) - if p then - contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",n,"}") - else - contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",name,"::",n,"}") - end -end - -function lxml.getindex(name,n) -- will change as name is always there now - local i, p = lpegmatch(splitter,n) - if p then - contextsprint(ctxcatcodes,n) - else - contextsprint(ctxcatcodes,name,"::",n) - end -end - --- loading (to be redone, no overload) .. best use different methods and --- keep raw xml (at least as option) - -xml.originalload = xml.originalload or xml.load - -local noffiles, nofconverted = 0, 0 - -function xml.load(filename,settings) - noffiles, nofconverted = noffiles + 1, nofconverted + 1 - starttiming(xml) - local ok, data = resolvers.loadbinfile(filename) - settings = settings or { } - settings.currentresource = filename - local xmltable = xml.convert((ok and data) or "",settings) - settings.currentresource = nil - stoptiming(xml) - return xmltable -end - -local function entityconverter(id,str) - return xmlentities[str] or xmlprivatetoken(str) or "" -- roundtrip handler -end - -function lxml.convert(id,data,entities,compress,currentresource) - local settings = { -- we're now roundtrip anyway - unify_predefined_entities = true, - utfize_entities = true, - resolve_predefined_entities = true, - resolve_entities = function(str) return entityconverter(id,str) end, -- needed for mathml - currentresource = tostring(currentresource or id), - } - if compress and compress == variables.yes then - settings.strip_cm_and_dt = true - end - -- if entities and entities == variables.yes then - -- settings.utfize_entities = true - -- -- settings.resolve_entities = function (str) return entityconverter(id,str) end - -- end - return xml.convert(data,settings) -end - -function lxml.load(id,filename,compress,entities) - filename = commands.preparedfile(filename) -- not commands! - if trace_loading then - report_lxml("loading file %a as %a",filename,id) - end - noffiles, nofconverted = noffiles + 1, nofconverted + 1 - -- local xmltable = xml.load(filename) - starttiming(xml) - local ok, data = resolvers.loadbinfile(filename) - local xmltable = lxml.convert(id,(ok and data) or "",compress,entities,format("id: %s, file: %s",id,filename)) - stoptiming(xml) - lxml.store(id,xmltable,filename) - return xmltable, filename -end - -function lxml.register(id,xmltable,filename) - lxml.store(id,xmltable,filename) - return xmltable -end - -function lxml.include(id,pattern,attribute,recurse) - starttiming(xml) - local root = getid(id) - xml.include(root,pattern,attribute,recurse,function(filename) - if filename then - filename = commands.preparedfile(filename) - if file.dirname(filename) == "" and root.filename then - local dn = file.dirname(root.filename) - if dn ~= "" then - filename = file.join(dn,filename) - end - end - if trace_loading then - report_lxml("including file %a",filename) - end - noffiles, nofconverted = noffiles + 1, nofconverted + 1 - return resolvers.loadtexfile(filename) or "" - else - return "" - end - end) - stoptiming(xml) -end - -function xml.getbuffer(name,compress,entities) -- we need to make sure that commands are processed - if not name or name == "" then - name = tex.jobname - end - nofconverted = nofconverted + 1 - local data = buffers.getcontent(name) - xmltostring(lxml.convert(name,data,compress,entities,format("buffer: %s",tostring(name or "?")))) -- one buffer -end - -function lxml.loadbuffer(id,name,compress,entities) - starttiming(xml) - nofconverted = nofconverted + 1 - local data = buffers.collectcontent(name or id) -- name can be list - local xmltable = lxml.convert(id,data,compress,entities,format("buffer: %s",tostring(name or id or "?"))) - lxml.store(id,xmltable) - stoptiming(xml) - return xmltable, name or id -end - -function lxml.loaddata(id,str,compress,entities) - starttiming(xml) - nofconverted = nofconverted + 1 - local xmltable = lxml.convert(id,str or "",compress,entities,format("id: %s",id)) - lxml.store(id,xmltable) - stoptiming(xml) - return xmltable, id -end - -function lxml.loadregistered(id) - return loaded[id], id -end - --- e.command: --- --- string : setup --- true : text (no ) --- false : ignore --- function : call - -local function tex_doctype(e,handlers) - -- ignore -end - -local function tex_comment(e,handlers) - if trace_comments then - report_lxml("comment %a",e.dt[1]) - end -end - -local default_element_handler = xml.gethandlers("verbose").functions["@el@"] - -local function tex_element(e,handlers) - local command = e.command - if command == nil then - default_element_handler(e,handlers) - elseif command == true then - -- text (no ) / so, no mkii fallback then - handlers.serialize(e.dt,handlers) - elseif command == false then - -- ignore - else - local tc = type(command) - if tc == "string" then - local rootname, ix = e.name, e.ix - if rootname then - if not ix then - addindex(rootname,false,true) - ix = e.ix - end - -- faster than context.xmlw - contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",rootname,"::",ix,"}") - else - report_lxml("fatal error: no index for %a",command) - contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",ix or 0,"}") - end - elseif tc == "function" then - command(e) - end - end -end - -local pihandlers = { } xml.pihandlers = pihandlers - -local category = P("context-") * C((1-P("-"))^1) * P("-directive") -local space = S(" \n\r") -local spaces = space^0 -local class = C((1-space)^0) -local key = class -local value = C(P(1-(space * -1))^0) - -local parser = category * spaces * class * spaces * key * spaces * value - -pihandlers[#pihandlers+1] = function(str) - if str then - local a, b, c, d = lpegmatch(parser,str) - if d then - contextsprint(ctxcatcodes,"\\xmlcontextdirective{",a,"}{",b,"}{",c,"}{",d,"}") - end - end -end - -local function tex_pi(e,handlers) - local str = e.dt[1] - for i=1,#pihandlers do - pihandlers[i](str) - end -end - -local obeycdata = true - -function lxml.setcdata() - obeycdata = true -end - -function lxml.resetcdata() - obeycdata = false -end - -local function tex_cdata(e,handlers) - if obeycdata then - toverbatim(e.dt[1]) - end -end - -local function tex_text(e) - e = xmlunprivatized(e) - lpegmatch(xmltextcapture,e) -end - -local function ctx_text(e) -- can be just context(e) as we split there - lpegmatch(ctxtextcapture,e) -end - -local function tex_handle(...) - contextsprint(ctxcatcodes,...) -- notcatcodes is active anyway -end - -local xmltexhandler = xml.newhandlers { - name = "tex", - handle = tex_handle, - functions = { - -- ["@dc@"] = tex_document, - ["@dt@"] = tex_doctype, - -- ["@rt@"] = tex_root, - ["@el@"] = tex_element, - ["@pi@"] = tex_pi, - ["@cm@"] = tex_comment, - ["@cd@"] = tex_cdata, - ["@tx@"] = tex_text, - } -} - -lxml.xmltexhandler = xmltexhandler - --- begin of test - -local function tex_space(e) - e = xmlunprivatized(e) - lpegmatch(xmlspacecapture,e) -end - -local xmltexspacehandler = xml.newhandlers { - name = "texspace", - handle = tex_handle, - functions = { - ["@dt@"] = tex_doctype, - ["@el@"] = tex_element, - ["@pi@"] = tex_pi, - ["@cm@"] = tex_comment, - ["@cd@"] = tex_cdata, - ["@tx@"] = tex_space, - } -} - -local function tex_line(e) - e = xmlunprivatized(e) - lpegmatch(xmllinecapture,e) -end - -local xmltexlinehandler = xml.newhandlers { - name = "texline", - handle = tex_handle, - functions = { - ["@dt@"] = tex_doctype, - ["@el@"] = tex_element, - ["@pi@"] = tex_pi, - ["@cm@"] = tex_comment, - ["@cd@"] = tex_cdata, - ["@tx@"] = tex_line, - } -} - -function lxml.flushspacewise(id) -- keeps spaces and lines - id = getid(id) - local dt = id and id.dt - if dt then - xmlserialize(dt,xmltexspacehandler) - end -end - -function lxml.flushlinewise(id) -- keeps lines - id = getid(id) - local dt = id and id.dt - if dt then - xmlserialize(dt,xmltexlinehandler) - end -end - --- end of test - -function lxml.serialize(root) - xmlserialize(root,xmltexhandler) -end - -function lxml.setaction(id,pattern,action) - local collected = xmlapplylpath(getid(id),pattern) - if collected then - local nc = #collected - if nc > 0 then - for c=1,nc do - collected[c].command = action - end - end - end -end - -local function sprint(root) -- check rawroot usage - if root then - local tr = type(root) - if tr == "string" then -- can also be result of lpath - -- rawroot = false -- ? - root = xmlunprivatized(root) - lpegmatch(xmltextcapture,root) - elseif tr == "table" then - if forceraw then - rawroot = root - -- contextsprint(ctxcatcodes,xmltostring(root)) -- goes wrong with % etc - root = xmlunprivatized(xmltostring(root)) - lpegmatch(xmltextcapture,root) -- goes to toc - else - xmlserialize(root,xmltexhandler) - end - end - end -end - -local function tprint(root) -- we can move sprint inline - local tr = type(root) - if tr == "table" then - local n = #root - if n == 0 then - -- skip - else - for i=1,n do - sprint(root[i]) - end - end - elseif tr == "string" then - root = xmlunprivatized(root) - lpegmatch(xmltextcapture,root) - end -end - -local function cprint(root) -- content - if not root then - -- rawroot = false - -- quit - elseif type(root) == 'string' then - -- rawroot = false - root = xmlunprivatized(root) - lpegmatch(xmltextcapture,root) - else - local rootdt = root.dt - if forceraw then - rawroot = root - -- contextsprint(ctxcatcodes,xmltostring(rootdt or root)) - root = xmlunprivatized(xmltostring(root)) - lpegmatch(xmltextcapture,root) -- goes to toc - else - xmlserialize(rootdt or root,xmltexhandler) - end - end -end - -xml.sprint = sprint local xmlsprint = sprint -- calls ct mathml -> will be replaced -xml.tprint = tprint local xmltprint = tprint -- only used here -xml.cprint = cprint local xmlcprint = cprint -- calls ct mathml -> will be replaced - --- now we can flush - -function lxml.main(id) - xmlserialize(getid(id),xmltexhandler) -- the real root (@rt@) -end - --- -- lines (untested) --- --- local buffer = { } --- --- local xmllinescapture = ( --- newline^2 / function() buffer[#buffer+1] = "" end + --- newline / function() buffer[#buffer] = buffer[#buffer] .. " " end + --- content / function(s) buffer[#buffer] = buffer[#buffer] .. s end --- )^0 --- --- local xmllineshandler = table.copy(xmltexhandler) --- --- xmllineshandler.handle = function(...) lpegmatch(xmllinescapture,concat{ ... }) end --- --- function lines(root) --- if not root then --- -- rawroot = false --- -- quit --- elseif type(root) == 'string' then --- -- rawroot = false --- lpegmatch(xmllinescapture,root) --- elseif next(root) then -- tr == 'table' --- xmlserialize(root,xmllineshandler) --- end --- end --- --- function xml.lines(root) -- used at all? --- buffer = { "" } --- lines(root) --- return result --- end - -local function to_text(e) - if e.command == nil then - local etg = e.tg - if etg and e.special and etg ~= "@rt@" then - e.command = false -- i.e. skip - else - e.command = true -- i.e. no - end - end -end - -local function to_none(e) - if e.command == nil then - e.command = false -- i.e. skip - end -end - --- setups - -local setups = { } - -function lxml.setcommandtotext(id) - xmlwithelements(getid(id),to_text) -end - -function lxml.setcommandtonone(id) - xmlwithelements(getid(id),to_none) -end - -function lxml.installsetup(what,document,setup,where) - document = document or "*" - local sd = setups[document] - if not sd then sd = { } setups[document] = sd end - for k=1,#sd do - if sd[k] == setup then sd[k] = nil break end - end - if what == 1 then - if trace_loading then - report_lxml("prepending setup %a for %a",setup,document) - end - insert(sd,1,setup) - elseif what == 2 then - if trace_loading then - report_lxml("appending setup %a for %a",setup,document) - end - insert(sd,setup) - elseif what == 3 then - if trace_loading then - report_lxml("inserting setup %a for %a before %a",setup,document,where) - end - insertbeforevalue(sd,setup,where) - elseif what == 4 then - if trace_loading then - report_lxml("inserting setup %a for %a after %a",setup,document,where) - end - insertaftervalue(sd,setup,where) - end -end - -function lxml.flushsetups(id,...) - local done = { } - for i=1,select("#",...) do - local document = select(i,...) - local sd = setups[document] - if sd then - for k=1,#sd do - local v= sd[k] - if not done[v] then - if trace_loading then - report_lxml("applying setup %02i : %a to %a",k,v,document) - end - contextsprint(ctxcatcodes,"\\xmlsetup{",id,"}{",v,"}") - done[v] = true - end - end - elseif trace_loading then - report_lxml("no setups for %a",document) - end - end -end - -function lxml.resetsetups(document) - if trace_loading then - report_lxml("resetting all setups for %a",document) - end - setups[document] = { } -end - -function lxml.removesetup(document,setup) - local s = setups[document] - if s then - for i=1,#s do - if s[i] == setup then - if trace_loading then - report_lxml("removing setup %a for %a",setup,document) - end - remove(t,i) - break - end - end - end -end - -function lxml.setsetup(id,pattern,setup) - if not setup or setup == "" or setup == "*" or setup == "-" or setup == "+" then - local collected = xmlapplylpath(getid(id),pattern) - if collected then - local nc = #collected - if nc > 0 then - if trace_setups then - for c=1,nc do - local e = collected[c] - local ix = e.ix or 0 - if setup == "-" then - e.command = false - report_lxml("lpath matched (a) %5i: %s = %s -> skipped",c,ix,setup) - elseif setup == "+" then - e.command = true - report_lxml("lpath matched (b) %5i: %s = %s -> text",c,ix,setup) - else - local tg = e.tg - if tg then -- to be sure - e.command = tg - local ns = e.rn or e.ns - if ns == "" then - report_lxml("lpath matched (c) %5i: %s = %s -> %s",c,ix,tg,tg) - else - report_lxml("lpath matched (d) %5i: %s = %s:%s -> %s",c,ix,ns,tg,tg) - end - end - end - end - else - for c=1,nc do - local e = collected[c] - if setup == "-" then - e.command = false - elseif setup == "+" then - e.command = true - else - e.command = e.tg - end - end - end - elseif trace_setups then - report_lxml("%s lpath matches for pattern: %s","zero",pattern) - end - elseif trace_setups then - report_lxml("%s lpath matches for pattern: %s","no",pattern) - end - else - local a, b = match(setup,"^(.+:)([%*%-])$") - if a and b then - local collected = xmlapplylpath(getid(id),pattern) - if collected then - local nc = #collected - if nc > 0 then - if trace_setups then - for c=1,nc do - local e = collected[c] - local ns, tg, ix = e.rn or e.ns, e.tg, e.ix or 0 - if b == "-" then - e.command = false - if ns == "" then - report_lxml("lpath matched (e) %5i: %s = %s -> skipped",c,ix,tg) - else - report_lxml("lpath matched (f) %5i: %s = %s:%s -> skipped",c,ix,ns,tg) - end - elseif b == "+" then - e.command = true - if ns == "" then - report_lxml("lpath matched (g) %5i: %s = %s -> text",c,ix,tg) - else - report_lxml("lpath matched (h) %5i: %s = %s:%s -> text",c,ix,ns,tg) - end - else - e.command = a .. tg - if ns == "" then - report_lxml("lpath matched (i) %5i: %s = %s -> %s",c,ix,tg,e.command) - else - report_lxml("lpath matched (j) %5i: %s = %s:%s -> %s",c,ix,ns,tg,e.command) - end - end - end - else - for c=1,nc do - local e = collected[c] - if b == "-" then - e.command = false - elseif b == "+" then - e.command = true - else - e.command = a .. e.tg - end - end - end - elseif trace_setups then - report_lxml("%s lpath matches for pattern: %s","zero",pattern) - end - elseif trace_setups then - report_lxml("%s lpath matches for pattern: %s","no",pattern) - end - else - local collected = xmlapplylpath(getid(id),pattern) - if collected then - local nc = #collected - if nc > 0 then - if trace_setups then - for c=1,nc do - local e = collected[c] - e.command = setup - local ns, tg, ix = e.rn or e.ns, e.tg, e.ix or 0 - if ns == "" then - report_lxml("lpath matched (k) %5i: %s = %s -> %s",c,ix,tg,setup) - else - report_lxml("lpath matched (l) %5i: %s = %s:%s -> %s",c,ix,ns,tg,setup) - end - end - else - for c=1,nc do - collected[c].command = setup - end - end - elseif trace_setups then - report_lxml("%s lpath matches for pattern: %s","zero",pattern) - end - elseif trace_setups then - report_lxml("%s lpath matches for pattern: %s","no",pattern) - end - end - end -end - --- finalizers - -local function first(collected) - if collected and #collected > 0 then - xmlsprint(collected[1]) - end -end - -local function last(collected) - if collected then - local nc = #collected - if nc > 0 then - xmlsprint(collected[nc]) - end - end -end - -local function all(collected) - if collected then - local nc = #collected - if nc > 0 then - for c=1,nc do - xmlsprint(collected[c]) - end - end - end -end - -local function reverse(collected) - if collected then - local nc = #collected - if nc >0 then - for c=nc,1,-1 do - xmlsprint(collected[c]) - end - end - end -end - -local function count(collected) - contextsprint(ctxcatcodes,(collected and #collected) or 0) -- why ctxcatcodes -end - -local function position(collected,n) - -- todo: if not n then == match - if collected then - local nc = #collected - if nc > 0 then - n = tonumber(n) or 0 - if n < 0 then - n = nc + n + 1 - end - if n > 0 then - local cn = collected[n] - if cn then - xmlsprint(cn) - return - end - end - end - end -end - -local function match(collected) -- is match in preceding collected, never change, see bibxml - local m = collected and collected[1] - contextsprint(ctxcatcodes,m and m.mi or 0) -- why ctxcatcodes -end - -local function index(collected,n) - if collected then - local nc = #collected - if nc > 0 then - n = tonumber(n) or 0 - if n < 0 then - n = nc + n + 1 -- brrr - end - if n > 0 then - local cn = collected[n] - if cn then - contextsprint(ctxcatcodes,cn.ni or 0) -- why ctxcatcodes - return - end - end - end - end - contextsprint(ctxcatcodes,0) -- why ctxcatcodes -end - -local function command(collected,cmd,otherwise) - local n = collected and #collected - if n and n > 0 then - local wildcard = find(cmd,"%*") - for c=1,n do -- maybe optimize for n=1 - local e = collected[c] - local ix = e.ix - local name = e.name - if not ix then - lxml.addindex(name,false,true) - ix = e.ix - end - if wildcard then - contextsprint(ctxcatcodes,"\\xmlw{",(gsub(cmd,"%*",e.tg)),"}{",name,"::",ix,"}") - else - contextsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",name,"::",ix,"}") - end - end - elseif otherwise then - contextsprint(ctxcatcodes,"\\xmlw{",otherwise,"}{#1}") - end -end - -local function attribute(collected,a,default) - if collected and #collected > 0 then - local at = collected[1].at - local str = (at and at[a]) or default - if str and str ~= "" then - contextsprint(notcatcodes,str) - end - elseif default then - contextsprint(notcatcodes,default) - end -end - -local function chainattribute(collected,arguments) -- todo: optional levels - if collected and #collected > 0 then - local e = collected[1] - while e do - local at = e.at - if at then - local a = at[arguments] - if a then - contextsprint(notcatcodes,a) - end - else - break -- error - end - e = e.__p__ - end - end -end - -local function text(collected) - if collected then - local nc = #collected - if nc == 0 then - -- nothing - elseif nc == 1 then -- hardly any gain so this will go - cprint(collected[1]) - else for c=1,nc do - cprint(collected[c]) - end end - end -end - -local function ctxtext(collected) - if collected then - local nc = #collected - if nc > 0 then - for c=1,nc do - contextsprint(ctxcatcodes,collected[c].dt) - end - end - end -end - -local function stripped(collected) -- tricky as we strip in place - if collected then - local nc = #collected - if nc > 0 then - for c=1,nc do - cprint(xml.stripelement(collected[c])) - end - end - end -end - -local function lower(collected) - if not collected then - local nc = #collected - if nc > 0 then - for c=1,nc do - contextsprint(ctxcatcodes,lowerchars(collected[c].dt[1])) - end - end - end -end - -local function upper(collected) - if collected then - local nc = #collected - if nc > 0 then - for c=1,nc do - contextsprint(ctxcatcodes,upperchars(collected[c].dt[1])) - end - end - end -end - -local function number(collected) - local nc = collected and #collected or 0 - local n = 0 - if nc > 0 then - for c=1,nc do - n = n + tonumber(collected[c].dt[1] or 0) - end - end - contextsprint(ctxcatcodes,n) -end - -local function concatrange(collected,start,stop,separator,lastseparator,textonly) -- test this on mml - if collected then - local nofcollected = #collected - if nofcollected > 0 then - local separator = separator or "" - local lastseparator = lastseparator or separator or "" - start, stop = (start == "" and 1) or tonumber(start) or 1, (stop == "" and nofcollected) or tonumber(stop) or nofcollected - if stop < 0 then stop = nofcollected + stop end -- -1 == last-1 - for i=start,stop do - if textonly then - xmlcprint(collected[i]) - else - xmlsprint(collected[i]) - end - if i == nofcollected then - -- nothing - elseif i == nofcollected-1 and lastseparator ~= "" then - contextsprint(ctxcatcodes,lastseparator) - elseif separator ~= "" then - contextsprint(ctxcatcodes,separator) - end - end - end - end -end - -local function concat(collected,separator,lastseparator,textonly) -- test this on mml - concatrange(collected,false,false,separator,lastseparator,textonly) -end - -texfinalizers.first = first -texfinalizers.last = last -texfinalizers.all = all -texfinalizers.reverse = reverse -texfinalizers.count = count -texfinalizers.command = command -texfinalizers.attribute = attribute -texfinalizers.text = text -texfinalizers.stripped = stripped -texfinalizers.lower = lower -texfinalizers.upper = upper -texfinalizers.ctxtext = ctxtext -texfinalizers.context = ctxtext -texfinalizers.position = position -texfinalizers.match = match -texfinalizers.index = index -texfinalizers.concat = concat -texfinalizers.concatrange = concatrange -texfinalizers.chainattribute = chainattribute -texfinalizers.default = all -- !! - -local concat = table.concat - -function texfinalizers.tag(collected,n) - if collected then - local nc = #collected - if nc > 0 then - n = tonumber(n) or 0 - local c - if n == 0 then - c = collected[1] - elseif n > 1 then - c = collected[n] - else - c = collected[nc-n+1] - end - if c then - contextsprint(ctxcatcodes,c.tg) - end - end - end -end - -function texfinalizers.name(collected,n) - if collected then - local nc = #collected - if nc > 0 then - local c - if n == 0 or not n then - c = collected[1] - elseif n > 1 then - c = collected[n] - else - c = collected[nc-n+1] - end - if c then - if c.ns == "" then - contextsprint(ctxcatcodes,c.tg) - else - contextsprint(ctxcatcodes,c.ns,":",c.tg) - end - end - end - end -end - -function texfinalizers.tags(collected,nonamespace) - if collected then - local nc = #collected - if nc > 0 then - for c=1,nc do - local e = collected[c] - local ns, tg = e.ns, e.tg - if nonamespace or ns == "" then - contextsprint(ctxcatcodes,tg) - else - contextsprint(ctxcatcodes,ns,":",tg) - end - end - end - end -end - --- - -local function verbatim(id,before,after) - local root = getid(id) - if root then - if before then contextsprint(ctxcatcodes,before,"[",root.tg or "?","]") end - lxml.toverbatim(xmltostring(root.dt)) ---~ lxml.toverbatim(xml.totext(root.dt)) - if after then contextsprint(ctxcatcodes,after) end - end -end - -function lxml.inlineverbatim(id) - verbatim(id,"\\startxmlinlineverbatim","\\stopxmlinlineverbatim") -end - -function lxml.displayverbatim(id) - verbatim(id,"\\startxmldisplayverbatim","\\stopxmldisplayverbatim") -end - -lxml.verbatim = verbatim - --- helpers - -function lxml.first(id,pattern) - local collected = xmlapplylpath(getid(id),pattern) - if collected then - first(collected) - end -end - -function lxml.last(id,pattern) - local collected = xmlapplylpath(getid(id),pattern) - if collected then - last(collected) - end -end - -function lxml.all(id,pattern) - local collected = xmlapplylpath(getid(id),pattern) - if collected then - all(collected) - end -end - -function lxml.count(id,pattern) - -- always needs to produce a result so no test here - count(xmlapplylpath(getid(id),pattern)) -end - -function lxml.attribute(id,pattern,a,default) - local collected = xmlapplylpath(getid(id),pattern) - if collected then - attribute(collected,a,default) - end -end - -function lxml.raw(id,pattern) -- the content, untouched by commands - local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id) - if collected and #collected > 0 then - contextsprint(notcatcodes,xmltostring(collected[1].dt)) - end -end - -function lxml.context(id,pattern) -- the content, untouched by commands - if pattern then - local collected = xmlapplylpath(getid(id),pattern) or getid(id) - if collected and #collected > 0 then - contextsprint(ctxcatcodes,collected[1].dt) - end - else - local collected = getid(id) - if collected then - local dt = collected.dt - if #dt > 0 then - ctx_text(dt[1]) - end - end - end -end - -function lxml.text(id,pattern) - local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id) - if collected and #collected > 0 then - text(collected) - end -end - -lxml.content = text - -function lxml.position(id,pattern,n) - position(xmlapplylpath(getid(id),pattern),n) -end - -function lxml.chainattribute(id,pattern,a,default) - chainattribute(xmlapplylpath(getid(id),pattern),a,default) -end - -function lxml.concatrange(id,pattern,start,stop,separator,lastseparator,textonly) -- test this on mml - concatrange(xmlapplylpath(getid(id),pattern),start,stop,separator,lastseparator,textonly) -end - -function lxml.concat(id,pattern,separator,lastseparator,textonly) - concatrange(xmlapplylpath(getid(id),pattern),false,false,separator,lastseparator,textonly) -end - -function lxml.element(id,n) - position(xmlapplylpath(getid(id),"/*"),n) -end - -lxml.index = lxml.position - -function lxml.pos(id) - local root = getid(id) - contextsprint(ctxcatcodes,(root and root.ni) or 0) -end - -function lxml.att(id,a,default) - local root = getid(id) - if root then - local at = root.at - local str = (at and at[a]) or default - if str and str ~= "" then - contextsprint(notcatcodes,str) - end - elseif default then - contextsprint(notcatcodes,default) - end -end - -function lxml.name(id) -- or remapped name? -> lxml.info, combine - local r = getid(id) - local ns = r.rn or r.ns or "" - if ns ~= "" then - contextsprint(ctxcatcodes,ns,":",r.tg) - else - contextsprint(ctxcatcodes,r.tg) - end -end - -function lxml.match(id) -- or remapped name? -> lxml.info, combine - contextsprint(ctxcatcodes,getid(id).mi or 0) -end - -function lxml.tag(id) -- tag vs name -> also in l-xml tag->name - contextsprint(ctxcatcodes,getid(id).tg or "") -end - -function lxml.namespace(id) -- or remapped name? - local root = getid(id) - contextsprint(ctxcatcodes,root.rn or root.ns or "") -end - -function lxml.flush(id) - id = getid(id) - local dt = id and id.dt - if dt then - xmlsprint(dt) - end -end - -function lxml.snippet(id,i) - local e = getid(id) - if e then - local edt = e.dt - if edt then - xmlsprint(edt[i]) - end - end -end - -function lxml.direct(id) - xmlsprint(getid(id)) -end - -function lxml.command(id,pattern,cmd) - local i, p = getid(id,true) - local collected = xmlapplylpath(getid(i),pattern) - if collected then - local nc = #collected - if nc > 0 then - local rootname = p or i.name - for c=1,nc do - local e = collected[c] - local ix = e.ix - if not ix then - addindex(rootname,false,true) - ix = e.ix - end - contextsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",rootname,"::",ix,"}") - end - end - end -end - --- loops - -function lxml.collected(id,pattern,reverse) - return xmlcollected(getid(id),pattern,reverse) -end - -function lxml.elements(id,pattern,reverse) - return xmlelements(getid(id),pattern,reverse) -end - --- obscure ones - -lxml.info = lxml.name - --- testers - -local found, empty = xml.found, xml.empty - -local doif, doifnot, doifelse = commands.doif, commands.doifnot, commands.doifelse - -function lxml.doif (id,pattern) doif (found(getid(id),pattern)) end -function lxml.doifnot (id,pattern) doifnot (found(getid(id),pattern)) end -function lxml.doifelse (id,pattern) doifelse(found(getid(id),pattern)) end -function lxml.doiftext (id,pattern) doif (not empty(getid(id),pattern)) end -function lxml.doifnottext (id,pattern) doifnot (not empty(getid(id),pattern)) end -function lxml.doifelsetext (id,pattern) doifelse(not empty(getid(id),pattern)) end - --- special case: "*" and "" -> self else lpath lookup - ---~ function lxml.doifelseempty(id,pattern) doifelse(isempty(getid(id),pattern ~= "" and pattern ~= nil)) end -- not yet done, pattern - --- status info - -statistics.register("xml load time", function() - if noffiles > 0 or nofconverted > 0 then - return format("%s seconds, %s files, %s converted", statistics.elapsedtime(xml), noffiles, nofconverted) - else - return nil - end -end) - -statistics.register("lxml preparation time", function() - local calls, cached = xml.lpathcalls(), xml.lpathcached() - if calls > 0 or cached > 0 then - return format("%s seconds, %s nodes, %s lpath calls, %s cached calls", - statistics.elapsedtime(lxml), nofindices, calls, cached) - else - return nil - end -end) - -statistics.register("lxml lpath profile", function() - local p = xml.profiled - if p and next(p) then - local s = table.sortedkeys(p) - local tested, matched, finalized = 0, 0, 0 - logs.pushtarget("logfile") - logs.writer("\nbegin of lxml profile\n") - logs.writer("\n tested matched finalized pattern\n\n") - for i=1,#s do - local pattern = s[i] - local pp = p[pattern] - local t, m, f = pp.tested, pp.matched, pp.finalized - tested, matched, finalized = tested + t, matched + m, finalized + f - logs.writer(format("%9i %9i %9i %s",t,m,f,pattern)) - end - logs.writer("\nend of lxml profile\n") - logs.poptarget() - return format("%s patterns, %s tested, %s matched, %s finalized (see log for details)",#s,tested,matched,finalized) - else - return nil - end -end) - --- misc - -function lxml.nonspace(id,pattern) -- slow, todo loop - xmltprint(xmlcollect(getid(id),pattern,true)) -end - -function lxml.strip(id,pattern,nolines,anywhere) - xml.strip(getid(id),pattern,nolines,anywhere) -end - -function lxml.stripped(id,pattern,nolines) - local str = xmltext(getid(id),pattern) or "" - str = gsub(str,"^%s*(.-)%s*$","%1") - if nolines then - str = gsub(str,"%s+"," ") - end - xmlsprint(str) -end - -function lxml.delete(id,pattern) - xml.delete(getid(id),pattern) -end - -lxml.obsolete = { } - -lxml.get_id = getid lxml.obsolete.get_id = getid - --- goodies: - -function texfinalizers.lettered(collected) - if collected then - local nc = #collected - if nc > 0 then - for c=1,nc do - contextsprint(ctxcatcodes,lettered(collected[c].dt[1])) - end - end - end -end - ---~ function texfinalizers.apply(collected,what) -- to be tested ---~ if collected then ---~ for c=1,#collected do ---~ contextsprint(ctxcatcodes,what(collected[c].dt[1])) ---~ end ---~ end ---~ end - -function lxml.toparameters(id) - local e = getid(id) - if e then - local a = e.at - if a and next(a) then - local setups, s = { }, 0 - for k, v in next, a do - s = s + 1 - setups[s] = k .. "=" .. v - end - setups = concat(setups,",") - -- tracing - context(setups) - end - end -end - -local template = '\n\n\n\n%s' - -function lxml.tofile(id,pattern,filename,comment) - local collected = xmlapplylpath(getid(id),pattern) - if collected then - io.savedata(filename,format(template,comment or "exported fragment",tostring(collected[1]))) - else - os.remove(filename) -- get rid of old content - end -end - -texfinalizers.upperall = xmlfinalizers.upperall -texfinalizers.lowerall = xmlfinalizers.lowerall +if not modules then modules = { } end modules ['lxml-tex'] = { + version = 1.001, + comment = "companion to lxml-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Because we split and resolve entities we use the direct printing +-- interface and not the context one. If we ever do that there will +-- be an cldf-xml helper library. + +local utfchar = utf.char +local concat, insert, remove = table.concat, table.insert, table.remove +local format, sub, gsub, find, gmatch, match = string.format, string.sub, string.gsub, string.find, string.gmatch, string.match +local type, next, tonumber, tostring, select = type, next, tonumber, tostring, select +local lpegmatch = lpeg.match +local P, S, C, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc + +local tex, xml = tex, xml +local lowerchars, upperchars, lettered = characters.lower, characters.upper, characters.lettered + +lxml = lxml or { } +local lxml = lxml + +local catcodenumbers = catcodes.numbers +local ctxcatcodes = catcodenumbers.ctxcatcodes -- todo: use different method +local notcatcodes = catcodenumbers.notcatcodes -- todo: use different method + +local context = context +local contextsprint = context.sprint -- with catcodes (here we use fast variants, but with option for tracing) + +local xmlelements, xmlcollected, xmlsetproperty = xml.elements, xml.collected, xml.setproperty +local xmlwithelements = xml.withelements +local xmlserialize, xmlcollect, xmltext, xmltostring = xml.serialize, xml.collect, xml.text, xml.tostring +local xmlapplylpath = xml.applylpath +local xmlunprivatized, xmlprivatetoken, xmlprivatecodes = xml.unprivatized, xml.privatetoken, xml.privatecodes + +local variables = (interfaces and interfaces.variables) or { } + +local insertbeforevalue, insertaftervalue = utilities.tables.insertbeforevalue, utilities.tables.insertaftervalue + +local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming + +local trace_setups = false trackers.register("lxml.setups", function(v) trace_setups = v end) +local trace_loading = false trackers.register("lxml.loading", function(v) trace_loading = v end) +local trace_access = false trackers.register("lxml.access", function(v) trace_access = v end) +local trace_comments = false trackers.register("lxml.comments", function(v) trace_comments = v end) +local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end) + +local report_lxml = logs.reporter("xml","tex") +local report_xml = logs.reporter("xml","tex") + +local forceraw, rawroot = false, nil + +-- tex entities +-- +-- todo: unprivatize attributes + +lxml.entities = lxml.entities or { } + +storage.register("lxml/entities",lxml.entities,"lxml.entities") + +--~ xml.placeholders.unknown_any_entity = nil -- has to be per xml + +local xmlentities = xml.entities +local texentities = lxml.entities +local parsedentity = xml.parsedentitylpeg + +function lxml.registerentity(key,value) + texentities[key] = value + if trace_entities then + report_xml("registering tex entity %a as %a",key,value) + end +end + +function lxml.resolvedentity(str) + if forceraw then + if trace_entities then + report_xml("passing entity %a as &%s;",str,str) + end + context("&%s;",str) + else + local e = texentities[str] + if e then + local te = type(e) + if te == "function" then + if trace_entities then + report_xml("passing entity %a using function",str) + end + e(str) + elseif e then + if trace_entities then + report_xml("passing entity %a as %a using %a",str,e,"ctxcatcodes") + end + context(e) + end + return + end + local e = xmlentities[str] + if e then + local te = type(e) + if te == "function" then + e = e(str) + end + if e then + if trace_entities then + report_xml("passing entity %a as %a using %a",str,e,"notcatcodes") + end + contextsprint(notcatcodes,e) + return + end + end + -- resolve hex and dec, todo: escape # & etc for ctxcatcodes + -- normally this is already solved while loading the file + local chr, err = lpegmatch(parsedentity,str) + if chr then + if trace_entities then + report_xml("passing entity %a as %a using %a",str,chr,"ctxcatcodes") + end + context(chr) + elseif err then + if trace_entities then + report_xml("passing faulty entity %a as %a",str,err) + end + context(err) + else + local tag = upperchars(str) + if trace_entities then + report_xml("passing entity %a to \\xmle using tag %a",str,tag) + end + context.xmle(str,tag) -- we need to use our own upper + end + end +end + +-- tex interface + +lxml.loaded = lxml.loaded or { } +local loaded = lxml.loaded + +-- print(contextdirective("context-mathml-directive function reduction yes ")) +-- print(contextdirective("context-mathml-directive function ")) + +xml.defaultprotocol = "tex" + +local finalizers = xml.finalizers + +finalizers.xml = finalizers.xml or { } +finalizers.tex = finalizers.tex or { } + +local xmlfinalizers = finalizers.xml +local texfinalizers = finalizers.tex + +-- serialization with entity handling + +local ampersand = P("&") +local semicolon = P(";") +local entity = ampersand * C((1-semicolon)^1) * semicolon / lxml.resolvedentity -- context.bold + +local _, xmltextcapture = context.newtexthandler { + exception = entity, + catcodes = notcatcodes, +} + +local _, xmlspacecapture = context.newtexthandler { + endofline = context.xmlcdataobeyedline, + emptyline = context.xmlcdataobeyedline, + simpleline = context.xmlcdataobeyedline, + space = context.xmlcdataobeyedspace, + exception = entity, + catcodes = notcatcodes, +} + +local _, xmllinecapture = context.newtexthandler { + endofline = context.xmlcdataobeyedline, + emptyline = context.xmlcdataobeyedline, + simpleline = context.xmlcdataobeyedline, + exception = entity, + catcodes = notcatcodes, +} + +local _, ctxtextcapture = context.newtexthandler { + exception = entity, + catcodes = ctxcatcodes, +} + +-- cdata + +local toverbatim = context.newverbosehandler { + line = context.xmlcdataobeyedline, + space = context.xmlcdataobeyedspace, + before = context.xmlcdatabefore, + after = context.xmlcdataafter, +} + +lxml.toverbatim = context.newverbosehandler { + line = context.xmlcdataobeyedline, + space = context.xmlcdataobeyedspace, + before = context.xmlcdatabefore, + after = context.xmlcdataafter, + strip = true, +} + +-- raw flushing + +function lxml.startraw() + forceraw = true +end + +function lxml.stopraw() + forceraw = false +end + +function lxml.rawroot() + return rawroot +end + +-- storage + +function lxml.store(id,root,filename) + loaded[id] = root + xmlsetproperty(root,"name",id) + if filename then + xmlsetproperty(root,"filename",filename) + end +end + +local splitter = lpeg.splitat("::") + +lxml.idsplitter = splitter + +function lxml.splitid(id) + local d, i = lpegmatch(splitter,id) + if d then + return d, i + else + return "", id + end +end + +local function getid(id, qualified) + if id then + local lid = loaded[id] + if lid then + return lid + elseif type(id) == "table" then + return id + else + local d, i = lpegmatch(splitter,id) + if d then + local ld = loaded[d] + if ld then + local ldi = ld.index + if ldi then + local root = ldi[tonumber(i)] + if root then + if qualified then -- we need this else two args that confuse others + return root, d + else + return root + end + elseif trace_access then + report_lxml("%a has no index entry %a",d,i) + end + elseif trace_access then + report_lxml("%a has no index",d) + end + elseif trace_access then + report_lxml("%a is not loaded",d) + end + elseif trace_access then + report_lxml("%a is not loaded",i) + end + end + elseif trace_access then + report_lxml("invalid id (nil)") + end +end + +lxml.id = getid -- we provide two names as locals can already use such +lxml.getid = getid -- names and we don't want clashes + +function lxml.root(id) + return loaded[id] +end + +-- index + +local nofindices = 0 + +local function addindex(name,check_sum,force) + local root = getid(name) + if root and (not root.index or force) then -- weird, only called once + local n, index, maxindex, check = 0, root.index or { }, root.maxindex or 0, root.check or { } + local function nest(root) + local dt = root.dt + if not root.ix then + maxindex = maxindex + 1 + root.ix = maxindex + check[maxindex] = root.tg -- still needed ? + index[maxindex] = root + n = n + 1 + end + if dt then + for k=1,#dt do + local dk = dt[k] + if type(dk) == "table" then + nest(dk) + end + end + end + end + nest(root) + nofindices = nofindices + n + -- + if type(name) ~= "string" then + name = "unknown" + end + root.index = index + root.maxindex = maxindex + if trace_access then + report_lxml("indexed entries %a, found nodes %a",tostring(name),maxindex) + end + end +end + +lxml.addindex = addindex + +-- another cache + +local function lxmlapplylpath(id,pattern) -- better inline, saves call + return xmlapplylpath(getid(id),pattern) +end + +lxml.filter = lxmlapplylpath + +function lxml.filterlist(list,pattern) + for s in gmatch(list,"[^, ]+") do -- we could cache a table + xmlapplylpath(getid(s),pattern) + end +end + +function lxml.applyfunction(id,name) + local f = xml.functions[name] + return f and f(getid(id)) +end + +-- rather new, indexed storage (backward refs), maybe i will merge this + +function lxml.checkindex(name) + local root = getid(name) + return (root and root.index) or 0 +end + +function lxml.withindex(name,n,command) -- will change as name is always there now + local i, p = lpegmatch(splitter,n) + if p then + contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",n,"}") + else + contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",name,"::",n,"}") + end +end + +function lxml.getindex(name,n) -- will change as name is always there now + local i, p = lpegmatch(splitter,n) + if p then + contextsprint(ctxcatcodes,n) + else + contextsprint(ctxcatcodes,name,"::",n) + end +end + +-- loading (to be redone, no overload) .. best use different methods and +-- keep raw xml (at least as option) + +xml.originalload = xml.originalload or xml.load + +local noffiles, nofconverted = 0, 0 + +function xml.load(filename,settings) + noffiles, nofconverted = noffiles + 1, nofconverted + 1 + starttiming(xml) + local ok, data = resolvers.loadbinfile(filename) + settings = settings or { } + settings.currentresource = filename + local xmltable = xml.convert((ok and data) or "",settings) + settings.currentresource = nil + stoptiming(xml) + return xmltable +end + +local function entityconverter(id,str) + return xmlentities[str] or xmlprivatetoken(str) or "" -- roundtrip handler +end + +function lxml.convert(id,data,entities,compress,currentresource) + local settings = { -- we're now roundtrip anyway + unify_predefined_entities = true, + utfize_entities = true, + resolve_predefined_entities = true, + resolve_entities = function(str) return entityconverter(id,str) end, -- needed for mathml + currentresource = tostring(currentresource or id), + } + if compress and compress == variables.yes then + settings.strip_cm_and_dt = true + end + -- if entities and entities == variables.yes then + -- settings.utfize_entities = true + -- -- settings.resolve_entities = function (str) return entityconverter(id,str) end + -- end + return xml.convert(data,settings) +end + +function lxml.load(id,filename,compress,entities) + filename = commands.preparedfile(filename) -- not commands! + if trace_loading then + report_lxml("loading file %a as %a",filename,id) + end + noffiles, nofconverted = noffiles + 1, nofconverted + 1 + -- local xmltable = xml.load(filename) + starttiming(xml) + local ok, data = resolvers.loadbinfile(filename) + local xmltable = lxml.convert(id,(ok and data) or "",compress,entities,format("id: %s, file: %s",id,filename)) + stoptiming(xml) + lxml.store(id,xmltable,filename) + return xmltable, filename +end + +function lxml.register(id,xmltable,filename) + lxml.store(id,xmltable,filename) + return xmltable +end + +function lxml.include(id,pattern,attribute,recurse) + starttiming(xml) + local root = getid(id) + xml.include(root,pattern,attribute,recurse,function(filename) + if filename then + filename = commands.preparedfile(filename) + if file.dirname(filename) == "" and root.filename then + local dn = file.dirname(root.filename) + if dn ~= "" then + filename = file.join(dn,filename) + end + end + if trace_loading then + report_lxml("including file %a",filename) + end + noffiles, nofconverted = noffiles + 1, nofconverted + 1 + return resolvers.loadtexfile(filename) or "" + else + return "" + end + end) + stoptiming(xml) +end + +function xml.getbuffer(name,compress,entities) -- we need to make sure that commands are processed + if not name or name == "" then + name = tex.jobname + end + nofconverted = nofconverted + 1 + local data = buffers.getcontent(name) + xmltostring(lxml.convert(name,data,compress,entities,format("buffer: %s",tostring(name or "?")))) -- one buffer +end + +function lxml.loadbuffer(id,name,compress,entities) + starttiming(xml) + nofconverted = nofconverted + 1 + local data = buffers.collectcontent(name or id) -- name can be list + local xmltable = lxml.convert(id,data,compress,entities,format("buffer: %s",tostring(name or id or "?"))) + lxml.store(id,xmltable) + stoptiming(xml) + return xmltable, name or id +end + +function lxml.loaddata(id,str,compress,entities) + starttiming(xml) + nofconverted = nofconverted + 1 + local xmltable = lxml.convert(id,str or "",compress,entities,format("id: %s",id)) + lxml.store(id,xmltable) + stoptiming(xml) + return xmltable, id +end + +function lxml.loadregistered(id) + return loaded[id], id +end + +-- e.command: +-- +-- string : setup +-- true : text (no ) +-- false : ignore +-- function : call + +local function tex_doctype(e,handlers) + -- ignore +end + +local function tex_comment(e,handlers) + if trace_comments then + report_lxml("comment %a",e.dt[1]) + end +end + +local default_element_handler = xml.gethandlers("verbose").functions["@el@"] + +local function tex_element(e,handlers) + local command = e.command + if command == nil then + default_element_handler(e,handlers) + elseif command == true then + -- text (no ) / so, no mkii fallback then + handlers.serialize(e.dt,handlers) + elseif command == false then + -- ignore + else + local tc = type(command) + if tc == "string" then + local rootname, ix = e.name, e.ix + if rootname then + if not ix then + addindex(rootname,false,true) + ix = e.ix + end + -- faster than context.xmlw + contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",rootname,"::",ix,"}") + else + report_lxml("fatal error: no index for %a",command) + contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",ix or 0,"}") + end + elseif tc == "function" then + command(e) + end + end +end + +local pihandlers = { } xml.pihandlers = pihandlers + +local category = P("context-") * C((1-P("-"))^1) * P("-directive") +local space = S(" \n\r") +local spaces = space^0 +local class = C((1-space)^0) +local key = class +local value = C(P(1-(space * -1))^0) + +local parser = category * spaces * class * spaces * key * spaces * value + +pihandlers[#pihandlers+1] = function(str) + if str then + local a, b, c, d = lpegmatch(parser,str) + if d then + contextsprint(ctxcatcodes,"\\xmlcontextdirective{",a,"}{",b,"}{",c,"}{",d,"}") + end + end +end + +local function tex_pi(e,handlers) + local str = e.dt[1] + for i=1,#pihandlers do + pihandlers[i](str) + end +end + +local obeycdata = true + +function lxml.setcdata() + obeycdata = true +end + +function lxml.resetcdata() + obeycdata = false +end + +local function tex_cdata(e,handlers) + if obeycdata then + toverbatim(e.dt[1]) + end +end + +local function tex_text(e) + e = xmlunprivatized(e) + lpegmatch(xmltextcapture,e) +end + +local function ctx_text(e) -- can be just context(e) as we split there + lpegmatch(ctxtextcapture,e) +end + +local function tex_handle(...) + contextsprint(ctxcatcodes,...) -- notcatcodes is active anyway +end + +local xmltexhandler = xml.newhandlers { + name = "tex", + handle = tex_handle, + functions = { + -- ["@dc@"] = tex_document, + ["@dt@"] = tex_doctype, + -- ["@rt@"] = tex_root, + ["@el@"] = tex_element, + ["@pi@"] = tex_pi, + ["@cm@"] = tex_comment, + ["@cd@"] = tex_cdata, + ["@tx@"] = tex_text, + } +} + +lxml.xmltexhandler = xmltexhandler + +-- begin of test + +local function tex_space(e) + e = xmlunprivatized(e) + lpegmatch(xmlspacecapture,e) +end + +local xmltexspacehandler = xml.newhandlers { + name = "texspace", + handle = tex_handle, + functions = { + ["@dt@"] = tex_doctype, + ["@el@"] = tex_element, + ["@pi@"] = tex_pi, + ["@cm@"] = tex_comment, + ["@cd@"] = tex_cdata, + ["@tx@"] = tex_space, + } +} + +local function tex_line(e) + e = xmlunprivatized(e) + lpegmatch(xmllinecapture,e) +end + +local xmltexlinehandler = xml.newhandlers { + name = "texline", + handle = tex_handle, + functions = { + ["@dt@"] = tex_doctype, + ["@el@"] = tex_element, + ["@pi@"] = tex_pi, + ["@cm@"] = tex_comment, + ["@cd@"] = tex_cdata, + ["@tx@"] = tex_line, + } +} + +function lxml.flushspacewise(id) -- keeps spaces and lines + id = getid(id) + local dt = id and id.dt + if dt then + xmlserialize(dt,xmltexspacehandler) + end +end + +function lxml.flushlinewise(id) -- keeps lines + id = getid(id) + local dt = id and id.dt + if dt then + xmlserialize(dt,xmltexlinehandler) + end +end + +-- end of test + +function lxml.serialize(root) + xmlserialize(root,xmltexhandler) +end + +function lxml.setaction(id,pattern,action) + local collected = xmlapplylpath(getid(id),pattern) + if collected then + local nc = #collected + if nc > 0 then + for c=1,nc do + collected[c].command = action + end + end + end +end + +local function sprint(root) -- check rawroot usage + if root then + local tr = type(root) + if tr == "string" then -- can also be result of lpath + -- rawroot = false -- ? + root = xmlunprivatized(root) + lpegmatch(xmltextcapture,root) + elseif tr == "table" then + if forceraw then + rawroot = root + -- contextsprint(ctxcatcodes,xmltostring(root)) -- goes wrong with % etc + root = xmlunprivatized(xmltostring(root)) + lpegmatch(xmltextcapture,root) -- goes to toc + else + xmlserialize(root,xmltexhandler) + end + end + end +end + +local function tprint(root) -- we can move sprint inline + local tr = type(root) + if tr == "table" then + local n = #root + if n == 0 then + -- skip + else + for i=1,n do + sprint(root[i]) + end + end + elseif tr == "string" then + root = xmlunprivatized(root) + lpegmatch(xmltextcapture,root) + end +end + +local function cprint(root) -- content + if not root then + -- rawroot = false + -- quit + elseif type(root) == 'string' then + -- rawroot = false + root = xmlunprivatized(root) + lpegmatch(xmltextcapture,root) + else + local rootdt = root.dt + if forceraw then + rawroot = root + -- contextsprint(ctxcatcodes,xmltostring(rootdt or root)) + root = xmlunprivatized(xmltostring(root)) + lpegmatch(xmltextcapture,root) -- goes to toc + else + xmlserialize(rootdt or root,xmltexhandler) + end + end +end + +xml.sprint = sprint local xmlsprint = sprint -- calls ct mathml -> will be replaced +xml.tprint = tprint local xmltprint = tprint -- only used here +xml.cprint = cprint local xmlcprint = cprint -- calls ct mathml -> will be replaced + +-- now we can flush + +function lxml.main(id) + xmlserialize(getid(id),xmltexhandler) -- the real root (@rt@) +end + +-- -- lines (untested) +-- +-- local buffer = { } +-- +-- local xmllinescapture = ( +-- newline^2 / function() buffer[#buffer+1] = "" end + +-- newline / function() buffer[#buffer] = buffer[#buffer] .. " " end + +-- content / function(s) buffer[#buffer] = buffer[#buffer] .. s end +-- )^0 +-- +-- local xmllineshandler = table.copy(xmltexhandler) +-- +-- xmllineshandler.handle = function(...) lpegmatch(xmllinescapture,concat{ ... }) end +-- +-- function lines(root) +-- if not root then +-- -- rawroot = false +-- -- quit +-- elseif type(root) == 'string' then +-- -- rawroot = false +-- lpegmatch(xmllinescapture,root) +-- elseif next(root) then -- tr == 'table' +-- xmlserialize(root,xmllineshandler) +-- end +-- end +-- +-- function xml.lines(root) -- used at all? +-- buffer = { "" } +-- lines(root) +-- return result +-- end + +local function to_text(e) + if e.command == nil then + local etg = e.tg + if etg and e.special and etg ~= "@rt@" then + e.command = false -- i.e. skip + else + e.command = true -- i.e. no + end + end +end + +local function to_none(e) + if e.command == nil then + e.command = false -- i.e. skip + end +end + +-- setups + +local setups = { } + +function lxml.setcommandtotext(id) + xmlwithelements(getid(id),to_text) +end + +function lxml.setcommandtonone(id) + xmlwithelements(getid(id),to_none) +end + +function lxml.installsetup(what,document,setup,where) + document = document or "*" + local sd = setups[document] + if not sd then sd = { } setups[document] = sd end + for k=1,#sd do + if sd[k] == setup then sd[k] = nil break end + end + if what == 1 then + if trace_loading then + report_lxml("prepending setup %a for %a",setup,document) + end + insert(sd,1,setup) + elseif what == 2 then + if trace_loading then + report_lxml("appending setup %a for %a",setup,document) + end + insert(sd,setup) + elseif what == 3 then + if trace_loading then + report_lxml("inserting setup %a for %a before %a",setup,document,where) + end + insertbeforevalue(sd,setup,where) + elseif what == 4 then + if trace_loading then + report_lxml("inserting setup %a for %a after %a",setup,document,where) + end + insertaftervalue(sd,setup,where) + end +end + +function lxml.flushsetups(id,...) + local done = { } + for i=1,select("#",...) do + local document = select(i,...) + local sd = setups[document] + if sd then + for k=1,#sd do + local v= sd[k] + if not done[v] then + if trace_loading then + report_lxml("applying setup %02i : %a to %a",k,v,document) + end + contextsprint(ctxcatcodes,"\\xmlsetup{",id,"}{",v,"}") + done[v] = true + end + end + elseif trace_loading then + report_lxml("no setups for %a",document) + end + end +end + +function lxml.resetsetups(document) + if trace_loading then + report_lxml("resetting all setups for %a",document) + end + setups[document] = { } +end + +function lxml.removesetup(document,setup) + local s = setups[document] + if s then + for i=1,#s do + if s[i] == setup then + if trace_loading then + report_lxml("removing setup %a for %a",setup,document) + end + remove(t,i) + break + end + end + end +end + +function lxml.setsetup(id,pattern,setup) + if not setup or setup == "" or setup == "*" or setup == "-" or setup == "+" then + local collected = xmlapplylpath(getid(id),pattern) + if collected then + local nc = #collected + if nc > 0 then + if trace_setups then + for c=1,nc do + local e = collected[c] + local ix = e.ix or 0 + if setup == "-" then + e.command = false + report_lxml("lpath matched (a) %5i: %s = %s -> skipped",c,ix,setup) + elseif setup == "+" then + e.command = true + report_lxml("lpath matched (b) %5i: %s = %s -> text",c,ix,setup) + else + local tg = e.tg + if tg then -- to be sure + e.command = tg + local ns = e.rn or e.ns + if ns == "" then + report_lxml("lpath matched (c) %5i: %s = %s -> %s",c,ix,tg,tg) + else + report_lxml("lpath matched (d) %5i: %s = %s:%s -> %s",c,ix,ns,tg,tg) + end + end + end + end + else + for c=1,nc do + local e = collected[c] + if setup == "-" then + e.command = false + elseif setup == "+" then + e.command = true + else + e.command = e.tg + end + end + end + elseif trace_setups then + report_lxml("%s lpath matches for pattern: %s","zero",pattern) + end + elseif trace_setups then + report_lxml("%s lpath matches for pattern: %s","no",pattern) + end + else + local a, b = match(setup,"^(.+:)([%*%-])$") + if a and b then + local collected = xmlapplylpath(getid(id),pattern) + if collected then + local nc = #collected + if nc > 0 then + if trace_setups then + for c=1,nc do + local e = collected[c] + local ns, tg, ix = e.rn or e.ns, e.tg, e.ix or 0 + if b == "-" then + e.command = false + if ns == "" then + report_lxml("lpath matched (e) %5i: %s = %s -> skipped",c,ix,tg) + else + report_lxml("lpath matched (f) %5i: %s = %s:%s -> skipped",c,ix,ns,tg) + end + elseif b == "+" then + e.command = true + if ns == "" then + report_lxml("lpath matched (g) %5i: %s = %s -> text",c,ix,tg) + else + report_lxml("lpath matched (h) %5i: %s = %s:%s -> text",c,ix,ns,tg) + end + else + e.command = a .. tg + if ns == "" then + report_lxml("lpath matched (i) %5i: %s = %s -> %s",c,ix,tg,e.command) + else + report_lxml("lpath matched (j) %5i: %s = %s:%s -> %s",c,ix,ns,tg,e.command) + end + end + end + else + for c=1,nc do + local e = collected[c] + if b == "-" then + e.command = false + elseif b == "+" then + e.command = true + else + e.command = a .. e.tg + end + end + end + elseif trace_setups then + report_lxml("%s lpath matches for pattern: %s","zero",pattern) + end + elseif trace_setups then + report_lxml("%s lpath matches for pattern: %s","no",pattern) + end + else + local collected = xmlapplylpath(getid(id),pattern) + if collected then + local nc = #collected + if nc > 0 then + if trace_setups then + for c=1,nc do + local e = collected[c] + e.command = setup + local ns, tg, ix = e.rn or e.ns, e.tg, e.ix or 0 + if ns == "" then + report_lxml("lpath matched (k) %5i: %s = %s -> %s",c,ix,tg,setup) + else + report_lxml("lpath matched (l) %5i: %s = %s:%s -> %s",c,ix,ns,tg,setup) + end + end + else + for c=1,nc do + collected[c].command = setup + end + end + elseif trace_setups then + report_lxml("%s lpath matches for pattern: %s","zero",pattern) + end + elseif trace_setups then + report_lxml("%s lpath matches for pattern: %s","no",pattern) + end + end + end +end + +-- finalizers + +local function first(collected) + if collected and #collected > 0 then + xmlsprint(collected[1]) + end +end + +local function last(collected) + if collected then + local nc = #collected + if nc > 0 then + xmlsprint(collected[nc]) + end + end +end + +local function all(collected) + if collected then + local nc = #collected + if nc > 0 then + for c=1,nc do + xmlsprint(collected[c]) + end + end + end +end + +local function reverse(collected) + if collected then + local nc = #collected + if nc >0 then + for c=nc,1,-1 do + xmlsprint(collected[c]) + end + end + end +end + +local function count(collected) + contextsprint(ctxcatcodes,(collected and #collected) or 0) -- why ctxcatcodes +end + +local function position(collected,n) + -- todo: if not n then == match + if collected then + local nc = #collected + if nc > 0 then + n = tonumber(n) or 0 + if n < 0 then + n = nc + n + 1 + end + if n > 0 then + local cn = collected[n] + if cn then + xmlsprint(cn) + return + end + end + end + end +end + +local function match(collected) -- is match in preceding collected, never change, see bibxml + local m = collected and collected[1] + contextsprint(ctxcatcodes,m and m.mi or 0) -- why ctxcatcodes +end + +local function index(collected,n) + if collected then + local nc = #collected + if nc > 0 then + n = tonumber(n) or 0 + if n < 0 then + n = nc + n + 1 -- brrr + end + if n > 0 then + local cn = collected[n] + if cn then + contextsprint(ctxcatcodes,cn.ni or 0) -- why ctxcatcodes + return + end + end + end + end + contextsprint(ctxcatcodes,0) -- why ctxcatcodes +end + +local function command(collected,cmd,otherwise) + local n = collected and #collected + if n and n > 0 then + local wildcard = find(cmd,"%*") + for c=1,n do -- maybe optimize for n=1 + local e = collected[c] + local ix = e.ix + local name = e.name + if not ix then + lxml.addindex(name,false,true) + ix = e.ix + end + if wildcard then + contextsprint(ctxcatcodes,"\\xmlw{",(gsub(cmd,"%*",e.tg)),"}{",name,"::",ix,"}") + else + contextsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",name,"::",ix,"}") + end + end + elseif otherwise then + contextsprint(ctxcatcodes,"\\xmlw{",otherwise,"}{#1}") + end +end + +local function attribute(collected,a,default) + if collected and #collected > 0 then + local at = collected[1].at + local str = (at and at[a]) or default + if str and str ~= "" then + contextsprint(notcatcodes,str) + end + elseif default then + contextsprint(notcatcodes,default) + end +end + +local function chainattribute(collected,arguments) -- todo: optional levels + if collected and #collected > 0 then + local e = collected[1] + while e do + local at = e.at + if at then + local a = at[arguments] + if a then + contextsprint(notcatcodes,a) + end + else + break -- error + end + e = e.__p__ + end + end +end + +local function text(collected) + if collected then + local nc = #collected + if nc == 0 then + -- nothing + elseif nc == 1 then -- hardly any gain so this will go + cprint(collected[1]) + else for c=1,nc do + cprint(collected[c]) + end end + end +end + +local function ctxtext(collected) + if collected then + local nc = #collected + if nc > 0 then + for c=1,nc do + contextsprint(ctxcatcodes,collected[c].dt) + end + end + end +end + +local function stripped(collected) -- tricky as we strip in place + if collected then + local nc = #collected + if nc > 0 then + for c=1,nc do + cprint(xml.stripelement(collected[c])) + end + end + end +end + +local function lower(collected) + if not collected then + local nc = #collected + if nc > 0 then + for c=1,nc do + contextsprint(ctxcatcodes,lowerchars(collected[c].dt[1])) + end + end + end +end + +local function upper(collected) + if collected then + local nc = #collected + if nc > 0 then + for c=1,nc do + contextsprint(ctxcatcodes,upperchars(collected[c].dt[1])) + end + end + end +end + +local function number(collected) + local nc = collected and #collected or 0 + local n = 0 + if nc > 0 then + for c=1,nc do + n = n + tonumber(collected[c].dt[1] or 0) + end + end + contextsprint(ctxcatcodes,n) +end + +local function concatrange(collected,start,stop,separator,lastseparator,textonly) -- test this on mml + if collected then + local nofcollected = #collected + if nofcollected > 0 then + local separator = separator or "" + local lastseparator = lastseparator or separator or "" + start, stop = (start == "" and 1) or tonumber(start) or 1, (stop == "" and nofcollected) or tonumber(stop) or nofcollected + if stop < 0 then stop = nofcollected + stop end -- -1 == last-1 + for i=start,stop do + if textonly then + xmlcprint(collected[i]) + else + xmlsprint(collected[i]) + end + if i == nofcollected then + -- nothing + elseif i == nofcollected-1 and lastseparator ~= "" then + contextsprint(ctxcatcodes,lastseparator) + elseif separator ~= "" then + contextsprint(ctxcatcodes,separator) + end + end + end + end +end + +local function concat(collected,separator,lastseparator,textonly) -- test this on mml + concatrange(collected,false,false,separator,lastseparator,textonly) +end + +texfinalizers.first = first +texfinalizers.last = last +texfinalizers.all = all +texfinalizers.reverse = reverse +texfinalizers.count = count +texfinalizers.command = command +texfinalizers.attribute = attribute +texfinalizers.text = text +texfinalizers.stripped = stripped +texfinalizers.lower = lower +texfinalizers.upper = upper +texfinalizers.ctxtext = ctxtext +texfinalizers.context = ctxtext +texfinalizers.position = position +texfinalizers.match = match +texfinalizers.index = index +texfinalizers.concat = concat +texfinalizers.concatrange = concatrange +texfinalizers.chainattribute = chainattribute +texfinalizers.default = all -- !! + +local concat = table.concat + +function texfinalizers.tag(collected,n) + if collected then + local nc = #collected + if nc > 0 then + n = tonumber(n) or 0 + local c + if n == 0 then + c = collected[1] + elseif n > 1 then + c = collected[n] + else + c = collected[nc-n+1] + end + if c then + contextsprint(ctxcatcodes,c.tg) + end + end + end +end + +function texfinalizers.name(collected,n) + if collected then + local nc = #collected + if nc > 0 then + local c + if n == 0 or not n then + c = collected[1] + elseif n > 1 then + c = collected[n] + else + c = collected[nc-n+1] + end + if c then + if c.ns == "" then + contextsprint(ctxcatcodes,c.tg) + else + contextsprint(ctxcatcodes,c.ns,":",c.tg) + end + end + end + end +end + +function texfinalizers.tags(collected,nonamespace) + if collected then + local nc = #collected + if nc > 0 then + for c=1,nc do + local e = collected[c] + local ns, tg = e.ns, e.tg + if nonamespace or ns == "" then + contextsprint(ctxcatcodes,tg) + else + contextsprint(ctxcatcodes,ns,":",tg) + end + end + end + end +end + +-- + +local function verbatim(id,before,after) + local root = getid(id) + if root then + if before then contextsprint(ctxcatcodes,before,"[",root.tg or "?","]") end + lxml.toverbatim(xmltostring(root.dt)) +--~ lxml.toverbatim(xml.totext(root.dt)) + if after then contextsprint(ctxcatcodes,after) end + end +end + +function lxml.inlineverbatim(id) + verbatim(id,"\\startxmlinlineverbatim","\\stopxmlinlineverbatim") +end + +function lxml.displayverbatim(id) + verbatim(id,"\\startxmldisplayverbatim","\\stopxmldisplayverbatim") +end + +lxml.verbatim = verbatim + +-- helpers + +function lxml.first(id,pattern) + local collected = xmlapplylpath(getid(id),pattern) + if collected then + first(collected) + end +end + +function lxml.last(id,pattern) + local collected = xmlapplylpath(getid(id),pattern) + if collected then + last(collected) + end +end + +function lxml.all(id,pattern) + local collected = xmlapplylpath(getid(id),pattern) + if collected then + all(collected) + end +end + +function lxml.count(id,pattern) + -- always needs to produce a result so no test here + count(xmlapplylpath(getid(id),pattern)) +end + +function lxml.attribute(id,pattern,a,default) + local collected = xmlapplylpath(getid(id),pattern) + if collected then + attribute(collected,a,default) + end +end + +function lxml.raw(id,pattern) -- the content, untouched by commands + local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id) + if collected and #collected > 0 then + contextsprint(notcatcodes,xmltostring(collected[1].dt)) + end +end + +function lxml.context(id,pattern) -- the content, untouched by commands + if pattern then + local collected = xmlapplylpath(getid(id),pattern) or getid(id) + if collected and #collected > 0 then + contextsprint(ctxcatcodes,collected[1].dt) + end + else + local collected = getid(id) + if collected then + local dt = collected.dt + if #dt > 0 then + ctx_text(dt[1]) + end + end + end +end + +function lxml.text(id,pattern) + local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id) + if collected and #collected > 0 then + text(collected) + end +end + +lxml.content = text + +function lxml.position(id,pattern,n) + position(xmlapplylpath(getid(id),pattern),n) +end + +function lxml.chainattribute(id,pattern,a,default) + chainattribute(xmlapplylpath(getid(id),pattern),a,default) +end + +function lxml.concatrange(id,pattern,start,stop,separator,lastseparator,textonly) -- test this on mml + concatrange(xmlapplylpath(getid(id),pattern),start,stop,separator,lastseparator,textonly) +end + +function lxml.concat(id,pattern,separator,lastseparator,textonly) + concatrange(xmlapplylpath(getid(id),pattern),false,false,separator,lastseparator,textonly) +end + +function lxml.element(id,n) + position(xmlapplylpath(getid(id),"/*"),n) +end + +lxml.index = lxml.position + +function lxml.pos(id) + local root = getid(id) + contextsprint(ctxcatcodes,(root and root.ni) or 0) +end + +function lxml.att(id,a,default) + local root = getid(id) + if root then + local at = root.at + local str = (at and at[a]) or default + if str and str ~= "" then + contextsprint(notcatcodes,str) + end + elseif default then + contextsprint(notcatcodes,default) + end +end + +function lxml.name(id) -- or remapped name? -> lxml.info, combine + local r = getid(id) + local ns = r.rn or r.ns or "" + if ns ~= "" then + contextsprint(ctxcatcodes,ns,":",r.tg) + else + contextsprint(ctxcatcodes,r.tg) + end +end + +function lxml.match(id) -- or remapped name? -> lxml.info, combine + contextsprint(ctxcatcodes,getid(id).mi or 0) +end + +function lxml.tag(id) -- tag vs name -> also in l-xml tag->name + contextsprint(ctxcatcodes,getid(id).tg or "") +end + +function lxml.namespace(id) -- or remapped name? + local root = getid(id) + contextsprint(ctxcatcodes,root.rn or root.ns or "") +end + +function lxml.flush(id) + id = getid(id) + local dt = id and id.dt + if dt then + xmlsprint(dt) + end +end + +function lxml.snippet(id,i) + local e = getid(id) + if e then + local edt = e.dt + if edt then + xmlsprint(edt[i]) + end + end +end + +function lxml.direct(id) + xmlsprint(getid(id)) +end + +function lxml.command(id,pattern,cmd) + local i, p = getid(id,true) + local collected = xmlapplylpath(getid(i),pattern) + if collected then + local nc = #collected + if nc > 0 then + local rootname = p or i.name + for c=1,nc do + local e = collected[c] + local ix = e.ix + if not ix then + addindex(rootname,false,true) + ix = e.ix + end + contextsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",rootname,"::",ix,"}") + end + end + end +end + +-- loops + +function lxml.collected(id,pattern,reverse) + return xmlcollected(getid(id),pattern,reverse) +end + +function lxml.elements(id,pattern,reverse) + return xmlelements(getid(id),pattern,reverse) +end + +-- obscure ones + +lxml.info = lxml.name + +-- testers + +local found, empty = xml.found, xml.empty + +local doif, doifnot, doifelse = commands.doif, commands.doifnot, commands.doifelse + +function lxml.doif (id,pattern) doif (found(getid(id),pattern)) end +function lxml.doifnot (id,pattern) doifnot (found(getid(id),pattern)) end +function lxml.doifelse (id,pattern) doifelse(found(getid(id),pattern)) end +function lxml.doiftext (id,pattern) doif (not empty(getid(id),pattern)) end +function lxml.doifnottext (id,pattern) doifnot (not empty(getid(id),pattern)) end +function lxml.doifelsetext (id,pattern) doifelse(not empty(getid(id),pattern)) end + +-- special case: "*" and "" -> self else lpath lookup + +--~ function lxml.doifelseempty(id,pattern) doifelse(isempty(getid(id),pattern ~= "" and pattern ~= nil)) end -- not yet done, pattern + +-- status info + +statistics.register("xml load time", function() + if noffiles > 0 or nofconverted > 0 then + return format("%s seconds, %s files, %s converted", statistics.elapsedtime(xml), noffiles, nofconverted) + else + return nil + end +end) + +statistics.register("lxml preparation time", function() + local calls, cached = xml.lpathcalls(), xml.lpathcached() + if calls > 0 or cached > 0 then + return format("%s seconds, %s nodes, %s lpath calls, %s cached calls", + statistics.elapsedtime(lxml), nofindices, calls, cached) + else + return nil + end +end) + +statistics.register("lxml lpath profile", function() + local p = xml.profiled + if p and next(p) then + local s = table.sortedkeys(p) + local tested, matched, finalized = 0, 0, 0 + logs.pushtarget("logfile") + logs.writer("\nbegin of lxml profile\n") + logs.writer("\n tested matched finalized pattern\n\n") + for i=1,#s do + local pattern = s[i] + local pp = p[pattern] + local t, m, f = pp.tested, pp.matched, pp.finalized + tested, matched, finalized = tested + t, matched + m, finalized + f + logs.writer(format("%9i %9i %9i %s",t,m,f,pattern)) + end + logs.writer("\nend of lxml profile\n") + logs.poptarget() + return format("%s patterns, %s tested, %s matched, %s finalized (see log for details)",#s,tested,matched,finalized) + else + return nil + end +end) + +-- misc + +function lxml.nonspace(id,pattern) -- slow, todo loop + xmltprint(xmlcollect(getid(id),pattern,true)) +end + +function lxml.strip(id,pattern,nolines,anywhere) + xml.strip(getid(id),pattern,nolines,anywhere) +end + +function lxml.stripped(id,pattern,nolines) + local str = xmltext(getid(id),pattern) or "" + str = gsub(str,"^%s*(.-)%s*$","%1") + if nolines then + str = gsub(str,"%s+"," ") + end + xmlsprint(str) +end + +function lxml.delete(id,pattern) + xml.delete(getid(id),pattern) +end + +lxml.obsolete = { } + +lxml.get_id = getid lxml.obsolete.get_id = getid + +-- goodies: + +function texfinalizers.lettered(collected) + if collected then + local nc = #collected + if nc > 0 then + for c=1,nc do + contextsprint(ctxcatcodes,lettered(collected[c].dt[1])) + end + end + end +end + +--~ function texfinalizers.apply(collected,what) -- to be tested +--~ if collected then +--~ for c=1,#collected do +--~ contextsprint(ctxcatcodes,what(collected[c].dt[1])) +--~ end +--~ end +--~ end + +function lxml.toparameters(id) + local e = getid(id) + if e then + local a = e.at + if a and next(a) then + local setups, s = { }, 0 + for k, v in next, a do + s = s + 1 + setups[s] = k .. "=" .. v + end + setups = concat(setups,",") + -- tracing + context(setups) + end + end +end + +local template = '\n\n\n\n%s' + +function lxml.tofile(id,pattern,filename,comment) + local collected = xmlapplylpath(getid(id),pattern) + if collected then + io.savedata(filename,format(template,comment or "exported fragment",tostring(collected[1]))) + else + os.remove(filename) -- get rid of old content + end +end + +texfinalizers.upperall = xmlfinalizers.upperall +texfinalizers.lowerall = xmlfinalizers.lowerall diff --git a/tex/context/base/lxml-xml.lua b/tex/context/base/lxml-xml.lua index d0e256078..d4e103206 100644 --- a/tex/context/base/lxml-xml.lua +++ b/tex/context/base/lxml-xml.lua @@ -1,445 +1,445 @@ -if not modules then modules = { } end modules ['lxml-xml'] = { - version = 1.001, - comment = "this module is the basis for the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local concat = table.concat -local find, lower, upper = string.find, string.lower, string.upper - -local xml = xml - -local finalizers = xml.finalizers.xml -local xmlfilter = xml.filter -- we could inline this one for speed -local xmltostring = xml.tostring -local xmlserialize = xml.serialize -local xmlcollected = xml.collected -local xmlnewhandlers = xml.newhandlers - -local function first(collected) -- wrong ? - return collected and collected[1] -end - -local function last(collected) - return collected and collected[#collected] -end - -local function all(collected) - return collected -end - --- local function reverse(collected) --- if collected then --- local nc = #collected --- if nc > 0 then --- local reversed, r = { }, 0 --- for c=nc,1,-1 do --- r = r + 1 --- reversed[r] = collected[c] --- end --- return reversed --- else --- return collected --- end --- end --- end - -local reverse = table.reversed - -local function attribute(collected,name) - if collected and #collected > 0 then - local at = collected[1].at - return at and at[name] - end -end - -local function att(id,name) - local at = id.at - return at and at[name] -end - -local function count(collected) - return collected and #collected or 0 -end - -local function position(collected,n) - if not collected then - return 0 - end - local nc = #collected - if nc == 0 then - return 0 - end - n = tonumber(n) or 0 - if n < 0 then - return collected[nc + n + 1] - elseif n > 0 then - return collected[n] - else - return collected[1].mi or 0 - end -end - -local function match(collected) - return collected and #collected > 0 and collected[1].mi or 0 -- match -end - -local function index(collected) - return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new -end - -local function attributes(collected,arguments) - if collected and #collected > 0 then - local at = collected[1].at - if arguments then - return at[arguments] - elseif next(at) then - return at -- all of them - end - end -end - -local function chainattribute(collected,arguments) -- todo: optional levels - if collected and #collected > 0 then - local e = collected[1] - while e do - local at = e.at - if at then - local a = at[arguments] - if a then - return a - end - else - break -- error - end - e = e.__p__ - end - end - return "" -end - -local function raw(collected) -- hybrid (not much different from text so it might go) - if collected and #collected > 0 then - local e = collected[1] or collected - return e and xmltostring(e) or "" -- only first as we cannot concat function - else - return "" - end -end - --- - -local xmltexthandler = xmlnewhandlers { - name = "string", - initialize = function() - result = { } - return result - end, - finalize = function() - return concat(result) - end, - handle = function(...) - result[#result+1] = concat { ... } - end, - escape = false, -} - -local function xmltotext(root) - local dt = root.dt - if not dt then - return "" - end - local nt = #dt -- string or table - if nt == 0 then - return "" - elseif nt == 1 and type(dt[1]) == "string" then - return dt[1] -- no escaping of " ' < > & - else - return xmlserialize(root,xmltexthandler) or "" - end -end - --- - -local function text(collected) -- hybrid - if collected then -- no # test here ! - local e = collected[1] or collected -- why fallback to element, how about cdata - return e and xmltotext(e) or "" - else - return "" - end -end - -local function texts(collected) - if not collected then - return { } -- why no nil - end - local nc = #collected - if nc == 0 then - return { } -- why no nil - end - local t, n = { }, 0 - for c=1,nc do - local e = collected[c] - if e and e.dt then - n = n + 1 - t[n] = e.dt - end - end - return t -end - -local function tag(collected,n) - if not collected then - return - end - local nc = #collected - if nc == 0 then - return - end - local c - if n == 0 or not n then - c = collected[1] - elseif n > 1 then - c = collected[n] - else - c = collected[nc-n+1] - end - return c and c.tg -end - -local function name(collected,n) - if not collected then - return - end - local nc = #collected - if nc == 0 then - return - end - local c - if n == 0 or not n then - c = collected[1] - elseif n > 1 then - c = collected[n] - else - c = collected[nc-n+1] - end - if not c then - -- sorry - elseif c.ns == "" then - return c.tg - else - return c.ns .. ":" .. c.tg - end -end - -local function tags(collected,nonamespace) - if not collected then - return - end - local nc = #collected - if nc == 0 then - return - end - local t, n = { }, 0 - for c=1,nc do - local e = collected[c] - local ns, tg = e.ns, e.tg - n = n + 1 - if nonamespace or ns == "" then - t[n] = tg - else - t[n] = ns .. ":" .. tg - end - end - return t -end - -local function empty(collected,spacesonly) - if not collected then - return true - end - local nc = #collected - if nc == 0 then - return true - end - for c=1,nc do - local e = collected[c] - if e then - local edt = e.dt - if edt then - local n = #edt - if n == 1 then - local edk = edt[1] - local typ = type(edk) - if typ == "table" then - return false - elseif edk ~= "" then - return false - elseif spacesonly and not find(edk,"%S") then - return false - end - elseif n > 1 then - return false - end - end - end - end - return true -end - -finalizers.first = first -finalizers.last = last -finalizers.all = all -finalizers.reverse = reverse -finalizers.elements = all -finalizers.default = all -finalizers.attribute = attribute -finalizers.att = att -finalizers.count = count -finalizers.position = position -finalizers.match = match -finalizers.index = index -finalizers.attributes = attributes -finalizers.chainattribute = chainattribute -finalizers.text = text -finalizers.texts = texts -finalizers.tag = tag -finalizers.name = name -finalizers.tags = tags -finalizers.empty = empty - --- shortcuts -- we could support xmlfilter(id,pattern,first) - -function xml.first(id,pattern) - return first(xmlfilter(id,pattern)) -end - -function xml.last(id,pattern) - return last(xmlfilter(id,pattern)) -end - -function xml.count(id,pattern) - return count(xmlfilter(id,pattern)) -end - -function xml.attribute(id,pattern,a,default) - return attribute(xmlfilter(id,pattern),a,default) -end - -function xml.raw(id,pattern) - if pattern then - return raw(xmlfilter(id,pattern)) - else - return raw(id) - end -end - -function xml.text(id,pattern) -- brrr either content or element (when cdata) - if pattern then - -- return text(xmlfilter(id,pattern)) - local collected = xmlfilter(id,pattern) - return collected and #collected > 0 and xmltotext(collected[1]) or "" - elseif id then - -- return text(id) - return xmltotext(id) or "" - else - return "" - end -end - -xml.content = text - --- - -function xml.position(id,pattern,n) -- element - return position(xmlfilter(id,pattern),n) -end - -function xml.match(id,pattern) -- number - return match(xmlfilter(id,pattern)) -end - -function xml.empty(id,pattern,spacesonly) - return empty(xmlfilter(id,pattern),spacesonly) -end - -xml.all = xml.filter -xml.index = xml.position -xml.found = xml.filter - --- a nice one: - -local function totable(x) - local t = { } - for e in xmlcollected(x[1] or x,"/*") do - t[e.tg] = xmltostring(e.dt) or "" - end - return next(t) and t or nil -end - -xml.table = totable -finalizers.table = totable - -local function textonly(e,t) - if e then - local edt = e.dt - if edt then - for i=1,#edt do - local e = edt[i] - if type(e) == "table" then - textonly(e,t) - else - t[#t+1] = e - end - end - end - end - return t -end - -function xml.textonly(e) -- no pattern - return concat(textonly(e,{})) -end - --- - --- local x = xml.convert("123") --- xml.filter(x,"**/lowerall()") print(x) --- xml.filter(x,"**/upperall()") print(x) - -function finalizers.lowerall(collected) - for c=1,#collected do - local e = collected[c] - if not e.special then - e.tg = lower(e.tg) - local eat = e.at - if eat then - local t = { } - for k,v in next, eat do - t[lower(k)] = v - end - e.at = t - end - end - end -end - -function finalizers.upperall(collected) - for c=1,#collected do - local e = collected[c] - if not e.special then - e.tg = upper(e.tg) - local eat = e.at - if eat then - local t = { } - for k,v in next, eat do - t[upper(k)] = v - end - e.at = t - end - end - end -end +if not modules then modules = { } end modules ['lxml-xml'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local concat = table.concat +local find, lower, upper = string.find, string.lower, string.upper + +local xml = xml + +local finalizers = xml.finalizers.xml +local xmlfilter = xml.filter -- we could inline this one for speed +local xmltostring = xml.tostring +local xmlserialize = xml.serialize +local xmlcollected = xml.collected +local xmlnewhandlers = xml.newhandlers + +local function first(collected) -- wrong ? + return collected and collected[1] +end + +local function last(collected) + return collected and collected[#collected] +end + +local function all(collected) + return collected +end + +-- local function reverse(collected) +-- if collected then +-- local nc = #collected +-- if nc > 0 then +-- local reversed, r = { }, 0 +-- for c=nc,1,-1 do +-- r = r + 1 +-- reversed[r] = collected[c] +-- end +-- return reversed +-- else +-- return collected +-- end +-- end +-- end + +local reverse = table.reversed + +local function attribute(collected,name) + if collected and #collected > 0 then + local at = collected[1].at + return at and at[name] + end +end + +local function att(id,name) + local at = id.at + return at and at[name] +end + +local function count(collected) + return collected and #collected or 0 +end + +local function position(collected,n) + if not collected then + return 0 + end + local nc = #collected + if nc == 0 then + return 0 + end + n = tonumber(n) or 0 + if n < 0 then + return collected[nc + n + 1] + elseif n > 0 then + return collected[n] + else + return collected[1].mi or 0 + end +end + +local function match(collected) + return collected and #collected > 0 and collected[1].mi or 0 -- match +end + +local function index(collected) + return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new +end + +local function attributes(collected,arguments) + if collected and #collected > 0 then + local at = collected[1].at + if arguments then + return at[arguments] + elseif next(at) then + return at -- all of them + end + end +end + +local function chainattribute(collected,arguments) -- todo: optional levels + if collected and #collected > 0 then + local e = collected[1] + while e do + local at = e.at + if at then + local a = at[arguments] + if a then + return a + end + else + break -- error + end + e = e.__p__ + end + end + return "" +end + +local function raw(collected) -- hybrid (not much different from text so it might go) + if collected and #collected > 0 then + local e = collected[1] or collected + return e and xmltostring(e) or "" -- only first as we cannot concat function + else + return "" + end +end + +-- + +local xmltexthandler = xmlnewhandlers { + name = "string", + initialize = function() + result = { } + return result + end, + finalize = function() + return concat(result) + end, + handle = function(...) + result[#result+1] = concat { ... } + end, + escape = false, +} + +local function xmltotext(root) + local dt = root.dt + if not dt then + return "" + end + local nt = #dt -- string or table + if nt == 0 then + return "" + elseif nt == 1 and type(dt[1]) == "string" then + return dt[1] -- no escaping of " ' < > & + else + return xmlserialize(root,xmltexthandler) or "" + end +end + +-- + +local function text(collected) -- hybrid + if collected then -- no # test here ! + local e = collected[1] or collected -- why fallback to element, how about cdata + return e and xmltotext(e) or "" + else + return "" + end +end + +local function texts(collected) + if not collected then + return { } -- why no nil + end + local nc = #collected + if nc == 0 then + return { } -- why no nil + end + local t, n = { }, 0 + for c=1,nc do + local e = collected[c] + if e and e.dt then + n = n + 1 + t[n] = e.dt + end + end + return t +end + +local function tag(collected,n) + if not collected then + return + end + local nc = #collected + if nc == 0 then + return + end + local c + if n == 0 or not n then + c = collected[1] + elseif n > 1 then + c = collected[n] + else + c = collected[nc-n+1] + end + return c and c.tg +end + +local function name(collected,n) + if not collected then + return + end + local nc = #collected + if nc == 0 then + return + end + local c + if n == 0 or not n then + c = collected[1] + elseif n > 1 then + c = collected[n] + else + c = collected[nc-n+1] + end + if not c then + -- sorry + elseif c.ns == "" then + return c.tg + else + return c.ns .. ":" .. c.tg + end +end + +local function tags(collected,nonamespace) + if not collected then + return + end + local nc = #collected + if nc == 0 then + return + end + local t, n = { }, 0 + for c=1,nc do + local e = collected[c] + local ns, tg = e.ns, e.tg + n = n + 1 + if nonamespace or ns == "" then + t[n] = tg + else + t[n] = ns .. ":" .. tg + end + end + return t +end + +local function empty(collected,spacesonly) + if not collected then + return true + end + local nc = #collected + if nc == 0 then + return true + end + for c=1,nc do + local e = collected[c] + if e then + local edt = e.dt + if edt then + local n = #edt + if n == 1 then + local edk = edt[1] + local typ = type(edk) + if typ == "table" then + return false + elseif edk ~= "" then + return false + elseif spacesonly and not find(edk,"%S") then + return false + end + elseif n > 1 then + return false + end + end + end + end + return true +end + +finalizers.first = first +finalizers.last = last +finalizers.all = all +finalizers.reverse = reverse +finalizers.elements = all +finalizers.default = all +finalizers.attribute = attribute +finalizers.att = att +finalizers.count = count +finalizers.position = position +finalizers.match = match +finalizers.index = index +finalizers.attributes = attributes +finalizers.chainattribute = chainattribute +finalizers.text = text +finalizers.texts = texts +finalizers.tag = tag +finalizers.name = name +finalizers.tags = tags +finalizers.empty = empty + +-- shortcuts -- we could support xmlfilter(id,pattern,first) + +function xml.first(id,pattern) + return first(xmlfilter(id,pattern)) +end + +function xml.last(id,pattern) + return last(xmlfilter(id,pattern)) +end + +function xml.count(id,pattern) + return count(xmlfilter(id,pattern)) +end + +function xml.attribute(id,pattern,a,default) + return attribute(xmlfilter(id,pattern),a,default) +end + +function xml.raw(id,pattern) + if pattern then + return raw(xmlfilter(id,pattern)) + else + return raw(id) + end +end + +function xml.text(id,pattern) -- brrr either content or element (when cdata) + if pattern then + -- return text(xmlfilter(id,pattern)) + local collected = xmlfilter(id,pattern) + return collected and #collected > 0 and xmltotext(collected[1]) or "" + elseif id then + -- return text(id) + return xmltotext(id) or "" + else + return "" + end +end + +xml.content = text + +-- + +function xml.position(id,pattern,n) -- element + return position(xmlfilter(id,pattern),n) +end + +function xml.match(id,pattern) -- number + return match(xmlfilter(id,pattern)) +end + +function xml.empty(id,pattern,spacesonly) + return empty(xmlfilter(id,pattern),spacesonly) +end + +xml.all = xml.filter +xml.index = xml.position +xml.found = xml.filter + +-- a nice one: + +local function totable(x) + local t = { } + for e in xmlcollected(x[1] or x,"/*") do + t[e.tg] = xmltostring(e.dt) or "" + end + return next(t) and t or nil +end + +xml.table = totable +finalizers.table = totable + +local function textonly(e,t) + if e then + local edt = e.dt + if edt then + for i=1,#edt do + local e = edt[i] + if type(e) == "table" then + textonly(e,t) + else + t[#t+1] = e + end + end + end + end + return t +end + +function xml.textonly(e) -- no pattern + return concat(textonly(e,{})) +end + +-- + +-- local x = xml.convert("123") +-- xml.filter(x,"**/lowerall()") print(x) +-- xml.filter(x,"**/upperall()") print(x) + +function finalizers.lowerall(collected) + for c=1,#collected do + local e = collected[c] + if not e.special then + e.tg = lower(e.tg) + local eat = e.at + if eat then + local t = { } + for k,v in next, eat do + t[lower(k)] = v + end + e.at = t + end + end + end +end + +function finalizers.upperall(collected) + for c=1,#collected do + local e = collected[c] + if not e.special then + e.tg = upper(e.tg) + local eat = e.at + if eat then + local t = { } + for k,v in next, eat do + t[upper(k)] = v + end + e.at = t + end + end + end +end diff --git a/tex/context/base/m-chart.lua b/tex/context/base/m-chart.lua index c4da2eb63..34f77c074 100644 --- a/tex/context/base/m-chart.lua +++ b/tex/context/base/m-chart.lua @@ -1,916 +1,916 @@ -if not modules then modules = { } end modules ['x-flow'] = { - version = 1.001, - comment = "companion to m-flow.mkvi", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- when we can resolve mpcolor at the lua end we will --- use metapost.graphic(....) directly - --- todo: labels - -moduledata.charts = moduledata.charts or { } - -local gsub, match, find, format, lower = string.gsub, string.match, string.find, string.format, string.lower -local setmetatableindex = table.setmetatableindex -local P, S, C, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.match - -local report_chart = logs.reporter("chart") - -local points = number.points - -local variables = interfaces.variables - -local v_yes = variables.yes -local v_no = variables.no -local v_none = variables.none -local v_standard = variables.standard -local v_overlay = variables.overlay -local v_round = variables.round -local v_test = variables.test - -local defaults = { - chart = { - name = "", - option = "", - backgroundcolor = "", - width = 100*65536, - height = 50*65536, - dx = 30*65536, - dy = 30*65536, - offset = 0, - bodyfont = "", - dot = "", - hcompact = variables_no, - vcompact = variables_no, - autofocus = "", - focus = "", - labeloffset = 5*65536, - commentoffset = 5*65536, - exitoffset = 0, - - }, - shape = { -- FLOS - rulethickness = 65536, - default = "", - framecolor = "darkblue", - backgroundcolor = "lightgray", - }, - focus = { -- FLOF - rulethickness = 65536, - framecolor = "darkred", - backgroundcolor = "gray", - }, - line = { -- FLOL - rulethickness = 65536, - radius = 10*65536, - color = "darkgreen", - corner = "", - dash = "", - arrow = "", - offset = "", - }, - set = { -- FLOX - }, - split = { - nx = 3, - ny = 3, - command = "", - marking = "", - before = "", - after = "", - } -} - -local validshapes = { - ["node"] = { kind = "shape", number = 0 }, - ["action"] = { kind = "shape", number = 24 }, - ["procedure"] = { kind = "shape", number = 5 }, - ["product"] = { kind = "shape", number = 12 }, - ["decision"] = { kind = "shape", number = 14 }, - ["archive"] = { kind = "shape", number = 19 }, - ["loop"] = { kind = "shape", number = 35 }, - ["wait"] = { kind = "shape", number = 6 }, - ["subprocedure"] = { kind = "shape", number = 20 }, - ["singledocument"] = { kind = "shape", number = 32 }, - ["multidocument"] = { kind = "shape", number = 33 }, - - ["right"] = { kind = "line", number = 66 }, - ["left"] = { kind = "line", number = 67 }, - ["up"] = { kind = "line", number = 68 }, - ["down"] = { kind = "line", number = 69 }, -} - -local validlabellocations = { - l = "l", left = "l", - r = "r", right = "r", - t = "t", top = "t", - b = "b", bottom = "b", - lt = "lt", - rt = "rt", - lb = "lb", - rb = "rb", - tl = "tl", - tr = "tr", - bl = "bl", - br = "br", -} - -local validcommentlocations = { - l = "l", left = "l", - r = "r", right = "r", - t = "t", top = "t", - b = "b", bottom = "b", - lt = "lt", - rt = "rt", - lb = "lb", - rb = "rb", - tl = "tl", - tr = "tr", - bl = "bl", - br = "br", -} - -local validtextlocations = { - l = "l", left = "l", - r = "r", right = "r", - t = "t", top = "t", - b = "b", bottom = "b", - c = "c", center = "c", - m = "c", middle = "m", - lt = "lt", - rt = "rt", - lb = "lb", - rb = "rb", - tl = "lt", - tr = "rt", - bl = "lb", - br = "rb", -} - -setmetatableindex(validshapes,function(t,k) - local l = gsub(lower(k)," ","") - local v = rawget(t,l) - if not v then - local n = tonumber(k) - if n then - v = { kind = "shape", number = n } - else - v = rawget(t,"action") - end - end - t[k] = v - return v -end) - -local charts = { } - -local data, hash, temp, last_x, last_y, name - -function commands.flow_start_chart(chartname) - data = { } - hash = { } - last_x, last_y = 0, 0 - name = chartname -end - -function commands.flow_stop_chart() - charts[name] = { - data = data, - hash = hash, - last_x = last_x, - last_y = last_y, - } - data, hash, temp = nil, nil, nil -end - --- function commands.flow_set(chartname,chartdata) --- local hash = { } --- local data = { } --- charts[name] = { --- data = data, --- hash = hash, --- } --- for i=1,#chartdata do --- local di = data[i] --- local name = di.name or "" --- if name then --- data[#data+1] = { --- name = name, --- labels = di.labels or { }, --- comments = di.comments or { }, --- exits = di.exits or { }, --- connections = di.connections or { }, --- settings = di.settings or { }, --- x = di.x or 1, --- y = di.y or 1, --- } --- hash[name] = i --- end --- end --- end - -function commands.flow_reset(chartname) - charts[name] = nil -end - -function commands.flow_set_current_cell(n) - temp = data[tonumber(n)] or { } -end - -function commands.flow_start_cell(settings) - temp = { - texts = { }, - labels = { }, - exits = { }, - connections = { }, - settings = settings, - x = 1, - y = 1, - name = "", - } -end - -function commands.flow_stop_cell() - data[#data+1] = temp - hash[temp.name or #data] = temp -end - -function commands.flow_set_name(str) - temp.name = str -end - -function commands.flow_set_shape(str) - temp.shape = str -end - -function commands.flow_set_destination(str) - temp.destination = str -end - -function commands.flow_set_text(align,str) - temp.texts[#temp.texts+1] = { - location = align, - text = str, - } -end - -function commands.flow_set_overlay(str) - temp.overlay = str -end - -function commands.flow_set_focus(str) - temp.focus = str -end - -function commands.flow_set_figure(str) - temp.figure = str -end - -function commands.flow_set_label(location,text) - temp.labels[#temp.labels+1] = { - location = location, - text = text, - } -end - -function commands.flow_set_comment(location,text) - local connections = temp.connections - if connections then - local connection = connections[#connections] - if connection then - local comments = connection.comments - if comments then - comments[#comments+1] = { - location = location, - text = text, - } - end - end - end -end - -function commands.flow_set_exit(location,text) - temp.exits[#temp.exits+1] = { - location = location, - text = text, - } -end - -function commands.flow_set_include(name,x,y,settings) - data[#data+1] = { - include = name, - x = x, - y = y, - -- settings = settings, - } -end - -local function inject(includedata,data,hash) - local subchart = charts[includedata.include] - if not subchart then - return - end - local subdata = subchart.data - if not subdata then - return - end - local xoffset = (includedata.x or 1) - 1 - local yoffset = (includedata.y or 1) - 1 - local settings = includedata.settings - for i=1,#subdata do - local si = subdata[i] - if si.include then - inject(si,data,hash) - else - local t = { - x = si.x + xoffset, - y = si.y + yoffset, - settings = settings, - } - setmetatableindex(t,si) - data[#data+1] = t - hash[si.name or #data] = t - end - end -end - -local function pack(data,field) - local list, max = { }, 0 - for e=1,#data do - local d = data[e] - local f = d[field] - list[f] = true - if f > max then - max = f - end - end - for i=1,max do - if not list[i] then - for e=1,#data do - local d = data[e] - local f = d[field] - if f > i then - d[field] = f - 1 - end - end - end - end -end - -local function expanded(chart,chartsettings) - local expandeddata = { } - local expandedhash = { } - local expandedchart = { - data = expandeddata, - hash = expandedhash, - } - setmetatableindex(expandedchart,chart) - local data = chart.data - local hash = chart.hash - for i=1,#data do - local di = data[i] - if di.include then - inject(di,expandeddata,expandedhash) - else - expandeddata[#expandeddata+1] = di - expandedhash[di.name or #expandeddata] = di - end - end - -- - expandedchart.settings = chartsettings or { } - -- make locals - chartsettings.shape = chartsettings.shape or { } - chartsettings.focus = chartsettings.focus or { } - chartsettings.line = chartsettings.line or { } - chartsettings.set = chartsettings.set or { } - chartsettings.split = chartsettings.split or { } - chartsettings.chart = chartsettings.chart or { } - setmetatableindex(chartsettings.shape,defaults.shape) - setmetatableindex(chartsettings.focus,defaults.focus) - setmetatableindex(chartsettings.line ,defaults.line ) - setmetatableindex(chartsettings.set ,defaults.set ) - setmetatableindex(chartsettings.split,defaults.split) - setmetatableindex(chartsettings.chart,defaults.chart) - -- - if chartsettings.chart.vcompact == v_yes then - pack(expandeddata,"y") - end - if chartsettings.chart.hcompact == v_yes then - pack(expandeddata,"x") - end - -- - for i=1,#expandeddata do - local cell = expandeddata[i] - local settings = cell.settings - if not settings then - cell.settings = chartsettings - else - settings.shape = settings.shape or { } - settings.focus = settings.focus or { } - settings.line = settings.line or { } - setmetatableindex(settings.shape,chartsettings.shape) - setmetatableindex(settings.focus,chartsettings.focus) - setmetatableindex(settings.line ,chartsettings.line) - end - end - return expandedchart -end - -local splitter = lpeg.splitat(",") - -function commands.flow_set_location(x,y) - if type(x) == "string" and not y then - x, y = lpegmatch(splitter,x) - end - if not x or x == "" then - x = last_x - elseif type(x) == "number" then - -- ok - elseif x == "+" then - x = last_x + 1 - elseif x == "-" then - x = last_x - 1 - elseif find(x,"^[%+%-]") then - x = last_x + (tonumber(x) or 0) - else - x = tonumber(x) - end - if not y or y == "" then - y = last_y - elseif type(y) == "number" then - -- ok - elseif y == "+" then - y = last_y + 1 - elseif x == "-" then - y = last_y - 1 - elseif find(y,"^[%+%-]") then - y = last_y + (tonumber(y) or 0) - else - y = tonumber(y) - end - temp.x = x or 1 - temp.y = y or 1 - last_x = x or last_x - last_y = y or last_y -end - -function commands.flow_set_connection(location,displacement,name) - local dx, dy = lpegmatch(splitter,displacement) - dx = tonumber(dx) - dy = tonumber(dy) - temp.connections[#temp.connections+1] = { - location = location, - dx = dx or 0, - dy = dy or 0, - name = name, - comments = { }, - } -end - -local function visible(chart,cell) - local x, y = cell.x, cell.y - return - x >= chart.from_x and x <= chart.to_x and - y >= chart.from_y and y <= chart.to_y and cell -end - -local function process_cells(chart,xoffset,yoffset) - local data = chart.data - if not data then - return - end - local focus = utilities.parsers.settings_to_hash(chart.settings.chart.focus or "") - for i=1,#data do - local cell = visible(chart,data[i]) - if cell then - local settings = cell.settings - local shapesettings = settings.shape - local shape = cell.shape - if not shape or shape == "" then - shape = shapesettings.default or "none" - end - if shape ~= v_none then - local shapedata = validshapes[shape] - context("flow_begin_sub_chart ;") -- when is this needed - if shapedata.kind == "line" then - local linesettings = settings.line - context("flow_shape_line_color := \\MPcolor{%s} ;", linesettings.color) - context("flow_shape_fill_color := \\MPcolor{%s} ;", linesettings.backgroundcolor) - context("flow_shape_line_width := %s ; ", points(linesettingsrulethickness)) - elseif focus[cell.focus] or focus[cell.name] then - local focussettings = settings.focus - context("flow_shape_line_color := \\MPcolor{%s} ;", focussettings.framecolor) - context("flow_shape_fill_color := \\MPcolor{%s} ;", focussettings.backgroundcolor) - context("flow_shape_line_width := %s ; ", points(focussettings.rulethickness)) - else - local shapesettings = settings.shape - context("flow_shape_line_color := \\MPcolor{%s} ;", shapesettings.framecolor) - context("flow_shape_fill_color := \\MPcolor{%s} ;", shapesettings.backgroundcolor) - context("flow_shape_line_width := %s ; " , points(shapesettings.rulethickness)) - end - context("flow_peepshape := false ;") -- todo - context("flow_new_shape(%s,%s,%s) ;",cell.x+xoffset,cell.y+yoffset,shapedata.number) - context("flow_end_sub_chart ;") - end - end - end -end - --- todo : make lpeg for splitter - -local sign = S("+p") / "1" - + S("-m") / "-1" - -local full = C(P("left")) - + C(P("right")) - + C(P("top")) - + C(P("bottom")) - -local char = P("l") / "left" - + P("r") / "right" - + P("t") / "top" - + P("b") / "bottom" - -local space = P(" ")^0 - -local what = space - * (sign + Cc("0")) - * space - * (full + char) - * space - * (sign + Cc("0")) - * space - * (full + char) - * space - * P(-1) - --- print(lpegmatch(what,"lr")) --- print(lpegmatch(what,"+l+r")) --- print(lpegmatch(what,"+l")) --- print(lpegmatch(what,"+ left+r ")) - -local function process_connections(chart,xoffset,yoffset) - local data = chart.data - local hash = chart.hash - if not data then - return - end - local settings = chart.settings - for i=1,#data do - local cell = visible(chart,data[i]) - if cell then - local connections = cell.connections - for j=1,#connections do - local connection = connections[j] - local othername = connection.name - local othercell = hash[othername] - if othercell then -- and visible(chart,data[i]) then - local cellx, celly = cell.x, cell.y - local otherx, othery, location = othercell.x, othercell.y, connection.location - if otherx > 0 and othery > 0 and cellx > 0 and celly > 0 and connection.location then - local what_cell, where_cell, what_other, where_other = lpegmatch(what,location) - if what_cell and where_cell and what_other and where_other then - local linesettings = settings.line - context("flow_smooth := %s ;", linesettings.corner == v_round and "true" or "false") - context("flow_dashline := %s ;", linesettings.dash == v_yes and "true" or "false") - context("flow_arrowtip := %s ;", linesettings.arrow == v_yes and "true" or "false") - context("flow_touchshape := %s ;", linesettings.offset == v_none and "true" or "false") - context("flow_dsp_x := %s ; flow_dsp_y := %s ;",connection.dx or 0, connection.dy or 0) - context("flow_connection_line_color := \\MPcolor{%s} ;",linesettings.color) - context("flow_connection_line_width := 2pt ;",points(linesettings.rulethickness)) - context("flow_connect_%s_%s (%s) (%s,%s,%s) (%s,%s,%s) ;",where_cell,where_other,j,cellx,celly,what_cell,otherx,othery,what_other) - context("flow_dsp_x := 0 ; flow_dsp_y := 0 ;") - end - end - end - end - end - end -end - -local texttemplate = "\\setvariables[flowcell:text][x=%s,y=%s,text={%s},align={%s},figure={%s},destination={%s}]" - -local splitter = lpeg.splitat(":") - -local function process_texts(chart,xoffset,yoffset) - local data = chart.data - local hash = chart.hash - if not data then - return - end - for i=1,#data do - local cell = visible(chart,data[i]) - if cell then - local x = cell.x or 1 - local y = cell.y or 1 - local texts = cell.texts - for i=1,#texts do - local text = texts[i] - local data = text.text - local align = validlabellocations[text.align or ""] or text.align or "" - local figure = i == 1 and cell.figure or "" - local destination = i == 1 and cell.destination or "" - context('flow_chart_draw_text(%s,%s,textext("%s")) ;',x,y,format(texttemplate,x,y,data,align,figure,destination)) - end - local labels = cell.labels - for i=1,#labels do - local label = labels[i] - local text = label.text - local location = validlabellocations[label.location or ""] or label.location or "" - if text and location then - context('flow_chart_draw_label(%s,%s,"%s",textext("\\strut %s")) ;',x,y,location,text) - end - end - local exits = cell.exits - for i=1,#exits do - local exit = exits[i] - local text = exit.text - local location = validlabellocations[exit.location or ""] - if text and location then - -- maybe make autoexit an option - if location == "l" and x == chart.from_x + 1 or - location == "r" and x == chart.to_x - 1 or - location == "t" and y == chart.to_y - 1 or - location == "b" and y == chart.from_y + 1 then - context('flow_chart_draw_exit(%s,%s,"%s",textext("\\strut %s")) ;',x,y,location,text) - end - end - end - local connections = cell.connections - for i=1,#connections do - local comments = connections[i].comments - for j=1,#comments do - local comment = comments[j] - local text = comment.text - local location = comment.location or "" - local length = 0 - -- "tl" "tl:*" "tl:0.5" - local loc, len = lpegmatch(splitter,location) -- do the following in lpeg - if len == "*" then - location = validcommentlocations[loc] or "" - if location == "" then - location = "*" - else - location = location .. ":*" - end - elseif loc then - location = validcommentlocations[loc] or "*" - length = tonumber(len) or 0 - else - location = validcommentlocations[location] or "" - end - if text and location then - context('flow_chart_draw_comment(%s,%s,%s,"%s",%s,textext("\\strut %s")) ;',x,y,i,location,length,text) - end - end - end - end - end -end - -local function getchart(settings,forced_x,forced_y,forced_nx,forced_ny) - if not settings then - print("no settings given") - return - end - local chartname = settings.chart.name - if not chartname then - print("no name given") - return - end - local chart = charts[chartname] - if not chart then - print("no such chart",chartname) - return - end - chart = expanded(chart,settings) - local chartsettings = chart.settings.chart - local autofocus = chart.settings.chart.autofocus - if autofocus then - autofocus = utilities.parsers.settings_to_hash(autofocus) - if not next(autofocus) then - autofocus = false - end - end - -- check natural window - local x = forced_x or tonumber(chartsettings.x) - local y = forced_y or tonumber(chartsettings.y) - local nx = forced_nx or tonumber(chartsettings.nx) - local ny = forced_ny or tonumber(chartsettings.ny) - -- - local minx, miny, maxx, maxy = 0, 0, 0, 0 - local data = chart.data - for i=1,#data do - local cell = data[i] - if not autofocus or autofocus[cell.name] then -- offsets probably interfere with autofocus - local x = cell.x - local y = cell.y - if minx == 0 or x < minx then minx = x end - if miny == 0 or y < miny then miny = y end - if minx == 0 or x > maxx then maxx = x end - if miny == 0 or y > maxy then maxy = y end - end - end - -- print("1>",x,y,nx,ny) - -- print("2>",minx, miny, maxx, maxy) - -- check of window should be larger (maybe autofocus + nx/ny?) - if autofocus then - -- x and y are ignored - if nx and nx > 0 then - maxx = minx + nx - 1 - end - if ny and ny > 0 then - maxy = miny + ny - 1 - end - else - if x and x > 0 then - minx = x - end - if y and y > 0 then - miny = y - end - if nx and nx > 0 then - maxx = minx + nx - 1 - end - if ny and ny > 0 then - maxy = miny + ny - 1 - end - end --- print("3>",minx, miny, maxx, maxy) - -- - local nx = maxx - minx + 1 - local ny = maxy - miny + 1 - -- relocate cells - for i=1,#data do - local cell = data[i] - cell.x = cell.x - minx + 1 - cell.y = cell.y - miny + 1 - end - chart.from_x = 1 - chart.from_y = 1 - chart.to_x = nx - chart.to_y = ny - chart.nx = nx - chart.ny = ny - -- - -- inspect(chart) - return chart -end - -local function makechart(chart) - local settings = chart.settings - local chartsettings = settings.chart - -- - context.begingroup() - context.forgetall() - -- - context.startMPcode() - context("if unknown context_flow : input mp-char.mpiv ; fi ;") - context("flow_begin_chart(0,%s,%s);",chart.nx,chart.ny) - -- - if chartsettings.option == v_test or chartsettings.dot == v_yes then - context("flow_show_con_points := true ;") - context("flow_show_mid_points := true ;") - context("flow_show_all_points := true ;") - elseif chartsettings.dot ~= "" then -- no checking done, private option - context("flow_show_%s_points := true ;",chartsettings.dot) - end - -- - local backgroundcolor = chartsettings.backgroundcolor - if backgroundcolor and backgroundcolor ~= "" then - context("flow_chart_background_color := \\MPcolor{%s} ;",backgroundcolor) - end - -- - local shapewidth = chartsettings.width - local gridwidth = shapewidth + 2*chartsettings.dx - local shapeheight = chartsettings.height - local gridheight = shapeheight + 2*chartsettings.dy - local chartoffset = chartsettings.offset - local labeloffset = chartsettings.labeloffset - local exitoffset = chartsettings.exitoffset - local commentoffset = chartsettings.commentoffset - context("flow_grid_width := %s ;", points(gridwidth)) - context("flow_grid_height := %s ;", points(gridheight)) - context("flow_shape_width := %s ;", points(shapewidth)) - context("flow_shape_height := %s ;", points(shapeheight)) - context("flow_chart_offset := %s ;", points(chartoffset)) - context("flow_label_offset := %s ;", points(labeloffset)) - context("flow_exit_offset := %s ;", points(exitoffset)) - context("flow_comment_offset := %s ;", points(commentoffset)) - -- - local radius = settings.line.radius - local rulethickness = settings.line.rulethickness - local dx = chartsettings.dx - local dy = chartsettings.dy - if radius < rulethickness then - radius = 2.5*rulethickness - if radius > dx then - radius = dx - end - if radius > dy then - radius = dy - end - end - context("flow_connection_line_width := %s ;", points(rulethickness)) - context("flow_connection_smooth_size := %s ;", points(radius)) - context("flow_connection_arrow_size := %s ;", points(radius)) - context("flow_connection_dash_size := %s ;", points(radius)) - -- - local offset = chartsettings.offset -- todo: pass string - if offset == v_none or offset == v_overlay or offset == "" then - offset = -2.5 * radius -- or rulethickness? - elseif offset == v_standard then - offset = radius -- or rulethickness? - end - context("flow_chart_offset := %s ;",points(offset)) - -- - context("flow_reverse_y := true ;") - process_cells(chart,0,0) - process_connections(chart,0,0) - process_texts(chart,0,0) - -- context("clip_chart(%s,%s,%s,%s) ;",x,y,nx,ny) -- todo: draw lines but not shapes - context("flow_end_chart ;") - context.stopMPcode() - context.endgroup() -end - -local function splitchart(chart) - local settings = chart.settings - local splitsettings = settings.split - local chartsettings = settings.chart - -- - local name = chartsettings.name - -- - local from_x = chart.from_x - local from_y = chart.from_y - local to_x = chart.to_x - local to_y = chart.to_y - -- - local step_x = splitsettings.nx or to_x - local step_y = splitsettings.ny or to_y - local delta_x = splitsettings.dx or 0 - local delta_y = splitsettings.dy or 0 - -- - report_chart("spliting %a from (%s,%s) upto (%s,%s) into (%s,%s) with overlap (%s,%s)", - name,from_x,from_y,to_x,to_y,step_x,step_y,delta_x,delta_y) - -- - local part_x = 0 - local first_x = from_x - while true do - part_x = part_x + 1 - local last_x = first_x + step_x - 1 - local done = last_x >= to_x - if done then - last_x = to_x - end - local part_y = 0 - local first_y = from_y - while true do - part_y = part_y + 1 - local last_y = first_y + step_y - 1 - local done = last_y >= to_y - if done then - last_y = to_y - end - -- - report_chart("part (%s,%s) of %a is split from (%s,%s) -> (%s,%s)",part_x,part_y,name,first_x,first_y,last_x,last_y) - local x, y, nx, ny = first_x, first_y, last_x - first_x + 1,last_y - first_y + 1 - context.beforeFLOWsplit() - context.handleFLOWsplit(function() - makechart(getchart(settings,x,y,nx,ny)) -- we need to pass frozen settings ! - end) - context.afterFLOWsplit() - -- - if done then - break - else - first_y = last_y + 1 - delta_y - end - end - if done then - break - else - first_x = last_x + 1 - delta_x - end - end -end - -function commands.flow_make_chart(settings) - local chart = getchart(settings) - if chart then - local settings = chart.settings - if settings then - local chartsettings = settings.chart - if chartsettings and chartsettings.split == v_yes then - splitchart(chart) - else - makechart(chart) - end - else - makechart(chart) - end - end -end +if not modules then modules = { } end modules ['x-flow'] = { + version = 1.001, + comment = "companion to m-flow.mkvi", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- when we can resolve mpcolor at the lua end we will +-- use metapost.graphic(....) directly + +-- todo: labels + +moduledata.charts = moduledata.charts or { } + +local gsub, match, find, format, lower = string.gsub, string.match, string.find, string.format, string.lower +local setmetatableindex = table.setmetatableindex +local P, S, C, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.match + +local report_chart = logs.reporter("chart") + +local points = number.points + +local variables = interfaces.variables + +local v_yes = variables.yes +local v_no = variables.no +local v_none = variables.none +local v_standard = variables.standard +local v_overlay = variables.overlay +local v_round = variables.round +local v_test = variables.test + +local defaults = { + chart = { + name = "", + option = "", + backgroundcolor = "", + width = 100*65536, + height = 50*65536, + dx = 30*65536, + dy = 30*65536, + offset = 0, + bodyfont = "", + dot = "", + hcompact = variables_no, + vcompact = variables_no, + autofocus = "", + focus = "", + labeloffset = 5*65536, + commentoffset = 5*65536, + exitoffset = 0, + + }, + shape = { -- FLOS + rulethickness = 65536, + default = "", + framecolor = "darkblue", + backgroundcolor = "lightgray", + }, + focus = { -- FLOF + rulethickness = 65536, + framecolor = "darkred", + backgroundcolor = "gray", + }, + line = { -- FLOL + rulethickness = 65536, + radius = 10*65536, + color = "darkgreen", + corner = "", + dash = "", + arrow = "", + offset = "", + }, + set = { -- FLOX + }, + split = { + nx = 3, + ny = 3, + command = "", + marking = "", + before = "", + after = "", + } +} + +local validshapes = { + ["node"] = { kind = "shape", number = 0 }, + ["action"] = { kind = "shape", number = 24 }, + ["procedure"] = { kind = "shape", number = 5 }, + ["product"] = { kind = "shape", number = 12 }, + ["decision"] = { kind = "shape", number = 14 }, + ["archive"] = { kind = "shape", number = 19 }, + ["loop"] = { kind = "shape", number = 35 }, + ["wait"] = { kind = "shape", number = 6 }, + ["subprocedure"] = { kind = "shape", number = 20 }, + ["singledocument"] = { kind = "shape", number = 32 }, + ["multidocument"] = { kind = "shape", number = 33 }, + + ["right"] = { kind = "line", number = 66 }, + ["left"] = { kind = "line", number = 67 }, + ["up"] = { kind = "line", number = 68 }, + ["down"] = { kind = "line", number = 69 }, +} + +local validlabellocations = { + l = "l", left = "l", + r = "r", right = "r", + t = "t", top = "t", + b = "b", bottom = "b", + lt = "lt", + rt = "rt", + lb = "lb", + rb = "rb", + tl = "tl", + tr = "tr", + bl = "bl", + br = "br", +} + +local validcommentlocations = { + l = "l", left = "l", + r = "r", right = "r", + t = "t", top = "t", + b = "b", bottom = "b", + lt = "lt", + rt = "rt", + lb = "lb", + rb = "rb", + tl = "tl", + tr = "tr", + bl = "bl", + br = "br", +} + +local validtextlocations = { + l = "l", left = "l", + r = "r", right = "r", + t = "t", top = "t", + b = "b", bottom = "b", + c = "c", center = "c", + m = "c", middle = "m", + lt = "lt", + rt = "rt", + lb = "lb", + rb = "rb", + tl = "lt", + tr = "rt", + bl = "lb", + br = "rb", +} + +setmetatableindex(validshapes,function(t,k) + local l = gsub(lower(k)," ","") + local v = rawget(t,l) + if not v then + local n = tonumber(k) + if n then + v = { kind = "shape", number = n } + else + v = rawget(t,"action") + end + end + t[k] = v + return v +end) + +local charts = { } + +local data, hash, temp, last_x, last_y, name + +function commands.flow_start_chart(chartname) + data = { } + hash = { } + last_x, last_y = 0, 0 + name = chartname +end + +function commands.flow_stop_chart() + charts[name] = { + data = data, + hash = hash, + last_x = last_x, + last_y = last_y, + } + data, hash, temp = nil, nil, nil +end + +-- function commands.flow_set(chartname,chartdata) +-- local hash = { } +-- local data = { } +-- charts[name] = { +-- data = data, +-- hash = hash, +-- } +-- for i=1,#chartdata do +-- local di = data[i] +-- local name = di.name or "" +-- if name then +-- data[#data+1] = { +-- name = name, +-- labels = di.labels or { }, +-- comments = di.comments or { }, +-- exits = di.exits or { }, +-- connections = di.connections or { }, +-- settings = di.settings or { }, +-- x = di.x or 1, +-- y = di.y or 1, +-- } +-- hash[name] = i +-- end +-- end +-- end + +function commands.flow_reset(chartname) + charts[name] = nil +end + +function commands.flow_set_current_cell(n) + temp = data[tonumber(n)] or { } +end + +function commands.flow_start_cell(settings) + temp = { + texts = { }, + labels = { }, + exits = { }, + connections = { }, + settings = settings, + x = 1, + y = 1, + name = "", + } +end + +function commands.flow_stop_cell() + data[#data+1] = temp + hash[temp.name or #data] = temp +end + +function commands.flow_set_name(str) + temp.name = str +end + +function commands.flow_set_shape(str) + temp.shape = str +end + +function commands.flow_set_destination(str) + temp.destination = str +end + +function commands.flow_set_text(align,str) + temp.texts[#temp.texts+1] = { + location = align, + text = str, + } +end + +function commands.flow_set_overlay(str) + temp.overlay = str +end + +function commands.flow_set_focus(str) + temp.focus = str +end + +function commands.flow_set_figure(str) + temp.figure = str +end + +function commands.flow_set_label(location,text) + temp.labels[#temp.labels+1] = { + location = location, + text = text, + } +end + +function commands.flow_set_comment(location,text) + local connections = temp.connections + if connections then + local connection = connections[#connections] + if connection then + local comments = connection.comments + if comments then + comments[#comments+1] = { + location = location, + text = text, + } + end + end + end +end + +function commands.flow_set_exit(location,text) + temp.exits[#temp.exits+1] = { + location = location, + text = text, + } +end + +function commands.flow_set_include(name,x,y,settings) + data[#data+1] = { + include = name, + x = x, + y = y, + -- settings = settings, + } +end + +local function inject(includedata,data,hash) + local subchart = charts[includedata.include] + if not subchart then + return + end + local subdata = subchart.data + if not subdata then + return + end + local xoffset = (includedata.x or 1) - 1 + local yoffset = (includedata.y or 1) - 1 + local settings = includedata.settings + for i=1,#subdata do + local si = subdata[i] + if si.include then + inject(si,data,hash) + else + local t = { + x = si.x + xoffset, + y = si.y + yoffset, + settings = settings, + } + setmetatableindex(t,si) + data[#data+1] = t + hash[si.name or #data] = t + end + end +end + +local function pack(data,field) + local list, max = { }, 0 + for e=1,#data do + local d = data[e] + local f = d[field] + list[f] = true + if f > max then + max = f + end + end + for i=1,max do + if not list[i] then + for e=1,#data do + local d = data[e] + local f = d[field] + if f > i then + d[field] = f - 1 + end + end + end + end +end + +local function expanded(chart,chartsettings) + local expandeddata = { } + local expandedhash = { } + local expandedchart = { + data = expandeddata, + hash = expandedhash, + } + setmetatableindex(expandedchart,chart) + local data = chart.data + local hash = chart.hash + for i=1,#data do + local di = data[i] + if di.include then + inject(di,expandeddata,expandedhash) + else + expandeddata[#expandeddata+1] = di + expandedhash[di.name or #expandeddata] = di + end + end + -- + expandedchart.settings = chartsettings or { } + -- make locals + chartsettings.shape = chartsettings.shape or { } + chartsettings.focus = chartsettings.focus or { } + chartsettings.line = chartsettings.line or { } + chartsettings.set = chartsettings.set or { } + chartsettings.split = chartsettings.split or { } + chartsettings.chart = chartsettings.chart or { } + setmetatableindex(chartsettings.shape,defaults.shape) + setmetatableindex(chartsettings.focus,defaults.focus) + setmetatableindex(chartsettings.line ,defaults.line ) + setmetatableindex(chartsettings.set ,defaults.set ) + setmetatableindex(chartsettings.split,defaults.split) + setmetatableindex(chartsettings.chart,defaults.chart) + -- + if chartsettings.chart.vcompact == v_yes then + pack(expandeddata,"y") + end + if chartsettings.chart.hcompact == v_yes then + pack(expandeddata,"x") + end + -- + for i=1,#expandeddata do + local cell = expandeddata[i] + local settings = cell.settings + if not settings then + cell.settings = chartsettings + else + settings.shape = settings.shape or { } + settings.focus = settings.focus or { } + settings.line = settings.line or { } + setmetatableindex(settings.shape,chartsettings.shape) + setmetatableindex(settings.focus,chartsettings.focus) + setmetatableindex(settings.line ,chartsettings.line) + end + end + return expandedchart +end + +local splitter = lpeg.splitat(",") + +function commands.flow_set_location(x,y) + if type(x) == "string" and not y then + x, y = lpegmatch(splitter,x) + end + if not x or x == "" then + x = last_x + elseif type(x) == "number" then + -- ok + elseif x == "+" then + x = last_x + 1 + elseif x == "-" then + x = last_x - 1 + elseif find(x,"^[%+%-]") then + x = last_x + (tonumber(x) or 0) + else + x = tonumber(x) + end + if not y or y == "" then + y = last_y + elseif type(y) == "number" then + -- ok + elseif y == "+" then + y = last_y + 1 + elseif x == "-" then + y = last_y - 1 + elseif find(y,"^[%+%-]") then + y = last_y + (tonumber(y) or 0) + else + y = tonumber(y) + end + temp.x = x or 1 + temp.y = y or 1 + last_x = x or last_x + last_y = y or last_y +end + +function commands.flow_set_connection(location,displacement,name) + local dx, dy = lpegmatch(splitter,displacement) + dx = tonumber(dx) + dy = tonumber(dy) + temp.connections[#temp.connections+1] = { + location = location, + dx = dx or 0, + dy = dy or 0, + name = name, + comments = { }, + } +end + +local function visible(chart,cell) + local x, y = cell.x, cell.y + return + x >= chart.from_x and x <= chart.to_x and + y >= chart.from_y and y <= chart.to_y and cell +end + +local function process_cells(chart,xoffset,yoffset) + local data = chart.data + if not data then + return + end + local focus = utilities.parsers.settings_to_hash(chart.settings.chart.focus or "") + for i=1,#data do + local cell = visible(chart,data[i]) + if cell then + local settings = cell.settings + local shapesettings = settings.shape + local shape = cell.shape + if not shape or shape == "" then + shape = shapesettings.default or "none" + end + if shape ~= v_none then + local shapedata = validshapes[shape] + context("flow_begin_sub_chart ;") -- when is this needed + if shapedata.kind == "line" then + local linesettings = settings.line + context("flow_shape_line_color := \\MPcolor{%s} ;", linesettings.color) + context("flow_shape_fill_color := \\MPcolor{%s} ;", linesettings.backgroundcolor) + context("flow_shape_line_width := %s ; ", points(linesettingsrulethickness)) + elseif focus[cell.focus] or focus[cell.name] then + local focussettings = settings.focus + context("flow_shape_line_color := \\MPcolor{%s} ;", focussettings.framecolor) + context("flow_shape_fill_color := \\MPcolor{%s} ;", focussettings.backgroundcolor) + context("flow_shape_line_width := %s ; ", points(focussettings.rulethickness)) + else + local shapesettings = settings.shape + context("flow_shape_line_color := \\MPcolor{%s} ;", shapesettings.framecolor) + context("flow_shape_fill_color := \\MPcolor{%s} ;", shapesettings.backgroundcolor) + context("flow_shape_line_width := %s ; " , points(shapesettings.rulethickness)) + end + context("flow_peepshape := false ;") -- todo + context("flow_new_shape(%s,%s,%s) ;",cell.x+xoffset,cell.y+yoffset,shapedata.number) + context("flow_end_sub_chart ;") + end + end + end +end + +-- todo : make lpeg for splitter + +local sign = S("+p") / "1" + + S("-m") / "-1" + +local full = C(P("left")) + + C(P("right")) + + C(P("top")) + + C(P("bottom")) + +local char = P("l") / "left" + + P("r") / "right" + + P("t") / "top" + + P("b") / "bottom" + +local space = P(" ")^0 + +local what = space + * (sign + Cc("0")) + * space + * (full + char) + * space + * (sign + Cc("0")) + * space + * (full + char) + * space + * P(-1) + +-- print(lpegmatch(what,"lr")) +-- print(lpegmatch(what,"+l+r")) +-- print(lpegmatch(what,"+l")) +-- print(lpegmatch(what,"+ left+r ")) + +local function process_connections(chart,xoffset,yoffset) + local data = chart.data + local hash = chart.hash + if not data then + return + end + local settings = chart.settings + for i=1,#data do + local cell = visible(chart,data[i]) + if cell then + local connections = cell.connections + for j=1,#connections do + local connection = connections[j] + local othername = connection.name + local othercell = hash[othername] + if othercell then -- and visible(chart,data[i]) then + local cellx, celly = cell.x, cell.y + local otherx, othery, location = othercell.x, othercell.y, connection.location + if otherx > 0 and othery > 0 and cellx > 0 and celly > 0 and connection.location then + local what_cell, where_cell, what_other, where_other = lpegmatch(what,location) + if what_cell and where_cell and what_other and where_other then + local linesettings = settings.line + context("flow_smooth := %s ;", linesettings.corner == v_round and "true" or "false") + context("flow_dashline := %s ;", linesettings.dash == v_yes and "true" or "false") + context("flow_arrowtip := %s ;", linesettings.arrow == v_yes and "true" or "false") + context("flow_touchshape := %s ;", linesettings.offset == v_none and "true" or "false") + context("flow_dsp_x := %s ; flow_dsp_y := %s ;",connection.dx or 0, connection.dy or 0) + context("flow_connection_line_color := \\MPcolor{%s} ;",linesettings.color) + context("flow_connection_line_width := 2pt ;",points(linesettings.rulethickness)) + context("flow_connect_%s_%s (%s) (%s,%s,%s) (%s,%s,%s) ;",where_cell,where_other,j,cellx,celly,what_cell,otherx,othery,what_other) + context("flow_dsp_x := 0 ; flow_dsp_y := 0 ;") + end + end + end + end + end + end +end + +local texttemplate = "\\setvariables[flowcell:text][x=%s,y=%s,text={%s},align={%s},figure={%s},destination={%s}]" + +local splitter = lpeg.splitat(":") + +local function process_texts(chart,xoffset,yoffset) + local data = chart.data + local hash = chart.hash + if not data then + return + end + for i=1,#data do + local cell = visible(chart,data[i]) + if cell then + local x = cell.x or 1 + local y = cell.y or 1 + local texts = cell.texts + for i=1,#texts do + local text = texts[i] + local data = text.text + local align = validlabellocations[text.align or ""] or text.align or "" + local figure = i == 1 and cell.figure or "" + local destination = i == 1 and cell.destination or "" + context('flow_chart_draw_text(%s,%s,textext("%s")) ;',x,y,format(texttemplate,x,y,data,align,figure,destination)) + end + local labels = cell.labels + for i=1,#labels do + local label = labels[i] + local text = label.text + local location = validlabellocations[label.location or ""] or label.location or "" + if text and location then + context('flow_chart_draw_label(%s,%s,"%s",textext("\\strut %s")) ;',x,y,location,text) + end + end + local exits = cell.exits + for i=1,#exits do + local exit = exits[i] + local text = exit.text + local location = validlabellocations[exit.location or ""] + if text and location then + -- maybe make autoexit an option + if location == "l" and x == chart.from_x + 1 or + location == "r" and x == chart.to_x - 1 or + location == "t" and y == chart.to_y - 1 or + location == "b" and y == chart.from_y + 1 then + context('flow_chart_draw_exit(%s,%s,"%s",textext("\\strut %s")) ;',x,y,location,text) + end + end + end + local connections = cell.connections + for i=1,#connections do + local comments = connections[i].comments + for j=1,#comments do + local comment = comments[j] + local text = comment.text + local location = comment.location or "" + local length = 0 + -- "tl" "tl:*" "tl:0.5" + local loc, len = lpegmatch(splitter,location) -- do the following in lpeg + if len == "*" then + location = validcommentlocations[loc] or "" + if location == "" then + location = "*" + else + location = location .. ":*" + end + elseif loc then + location = validcommentlocations[loc] or "*" + length = tonumber(len) or 0 + else + location = validcommentlocations[location] or "" + end + if text and location then + context('flow_chart_draw_comment(%s,%s,%s,"%s",%s,textext("\\strut %s")) ;',x,y,i,location,length,text) + end + end + end + end + end +end + +local function getchart(settings,forced_x,forced_y,forced_nx,forced_ny) + if not settings then + print("no settings given") + return + end + local chartname = settings.chart.name + if not chartname then + print("no name given") + return + end + local chart = charts[chartname] + if not chart then + print("no such chart",chartname) + return + end + chart = expanded(chart,settings) + local chartsettings = chart.settings.chart + local autofocus = chart.settings.chart.autofocus + if autofocus then + autofocus = utilities.parsers.settings_to_hash(autofocus) + if not next(autofocus) then + autofocus = false + end + end + -- check natural window + local x = forced_x or tonumber(chartsettings.x) + local y = forced_y or tonumber(chartsettings.y) + local nx = forced_nx or tonumber(chartsettings.nx) + local ny = forced_ny or tonumber(chartsettings.ny) + -- + local minx, miny, maxx, maxy = 0, 0, 0, 0 + local data = chart.data + for i=1,#data do + local cell = data[i] + if not autofocus or autofocus[cell.name] then -- offsets probably interfere with autofocus + local x = cell.x + local y = cell.y + if minx == 0 or x < minx then minx = x end + if miny == 0 or y < miny then miny = y end + if minx == 0 or x > maxx then maxx = x end + if miny == 0 or y > maxy then maxy = y end + end + end + -- print("1>",x,y,nx,ny) + -- print("2>",minx, miny, maxx, maxy) + -- check of window should be larger (maybe autofocus + nx/ny?) + if autofocus then + -- x and y are ignored + if nx and nx > 0 then + maxx = minx + nx - 1 + end + if ny and ny > 0 then + maxy = miny + ny - 1 + end + else + if x and x > 0 then + minx = x + end + if y and y > 0 then + miny = y + end + if nx and nx > 0 then + maxx = minx + nx - 1 + end + if ny and ny > 0 then + maxy = miny + ny - 1 + end + end +-- print("3>",minx, miny, maxx, maxy) + -- + local nx = maxx - minx + 1 + local ny = maxy - miny + 1 + -- relocate cells + for i=1,#data do + local cell = data[i] + cell.x = cell.x - minx + 1 + cell.y = cell.y - miny + 1 + end + chart.from_x = 1 + chart.from_y = 1 + chart.to_x = nx + chart.to_y = ny + chart.nx = nx + chart.ny = ny + -- + -- inspect(chart) + return chart +end + +local function makechart(chart) + local settings = chart.settings + local chartsettings = settings.chart + -- + context.begingroup() + context.forgetall() + -- + context.startMPcode() + context("if unknown context_flow : input mp-char.mpiv ; fi ;") + context("flow_begin_chart(0,%s,%s);",chart.nx,chart.ny) + -- + if chartsettings.option == v_test or chartsettings.dot == v_yes then + context("flow_show_con_points := true ;") + context("flow_show_mid_points := true ;") + context("flow_show_all_points := true ;") + elseif chartsettings.dot ~= "" then -- no checking done, private option + context("flow_show_%s_points := true ;",chartsettings.dot) + end + -- + local backgroundcolor = chartsettings.backgroundcolor + if backgroundcolor and backgroundcolor ~= "" then + context("flow_chart_background_color := \\MPcolor{%s} ;",backgroundcolor) + end + -- + local shapewidth = chartsettings.width + local gridwidth = shapewidth + 2*chartsettings.dx + local shapeheight = chartsettings.height + local gridheight = shapeheight + 2*chartsettings.dy + local chartoffset = chartsettings.offset + local labeloffset = chartsettings.labeloffset + local exitoffset = chartsettings.exitoffset + local commentoffset = chartsettings.commentoffset + context("flow_grid_width := %s ;", points(gridwidth)) + context("flow_grid_height := %s ;", points(gridheight)) + context("flow_shape_width := %s ;", points(shapewidth)) + context("flow_shape_height := %s ;", points(shapeheight)) + context("flow_chart_offset := %s ;", points(chartoffset)) + context("flow_label_offset := %s ;", points(labeloffset)) + context("flow_exit_offset := %s ;", points(exitoffset)) + context("flow_comment_offset := %s ;", points(commentoffset)) + -- + local radius = settings.line.radius + local rulethickness = settings.line.rulethickness + local dx = chartsettings.dx + local dy = chartsettings.dy + if radius < rulethickness then + radius = 2.5*rulethickness + if radius > dx then + radius = dx + end + if radius > dy then + radius = dy + end + end + context("flow_connection_line_width := %s ;", points(rulethickness)) + context("flow_connection_smooth_size := %s ;", points(radius)) + context("flow_connection_arrow_size := %s ;", points(radius)) + context("flow_connection_dash_size := %s ;", points(radius)) + -- + local offset = chartsettings.offset -- todo: pass string + if offset == v_none or offset == v_overlay or offset == "" then + offset = -2.5 * radius -- or rulethickness? + elseif offset == v_standard then + offset = radius -- or rulethickness? + end + context("flow_chart_offset := %s ;",points(offset)) + -- + context("flow_reverse_y := true ;") + process_cells(chart,0,0) + process_connections(chart,0,0) + process_texts(chart,0,0) + -- context("clip_chart(%s,%s,%s,%s) ;",x,y,nx,ny) -- todo: draw lines but not shapes + context("flow_end_chart ;") + context.stopMPcode() + context.endgroup() +end + +local function splitchart(chart) + local settings = chart.settings + local splitsettings = settings.split + local chartsettings = settings.chart + -- + local name = chartsettings.name + -- + local from_x = chart.from_x + local from_y = chart.from_y + local to_x = chart.to_x + local to_y = chart.to_y + -- + local step_x = splitsettings.nx or to_x + local step_y = splitsettings.ny or to_y + local delta_x = splitsettings.dx or 0 + local delta_y = splitsettings.dy or 0 + -- + report_chart("spliting %a from (%s,%s) upto (%s,%s) into (%s,%s) with overlap (%s,%s)", + name,from_x,from_y,to_x,to_y,step_x,step_y,delta_x,delta_y) + -- + local part_x = 0 + local first_x = from_x + while true do + part_x = part_x + 1 + local last_x = first_x + step_x - 1 + local done = last_x >= to_x + if done then + last_x = to_x + end + local part_y = 0 + local first_y = from_y + while true do + part_y = part_y + 1 + local last_y = first_y + step_y - 1 + local done = last_y >= to_y + if done then + last_y = to_y + end + -- + report_chart("part (%s,%s) of %a is split from (%s,%s) -> (%s,%s)",part_x,part_y,name,first_x,first_y,last_x,last_y) + local x, y, nx, ny = first_x, first_y, last_x - first_x + 1,last_y - first_y + 1 + context.beforeFLOWsplit() + context.handleFLOWsplit(function() + makechart(getchart(settings,x,y,nx,ny)) -- we need to pass frozen settings ! + end) + context.afterFLOWsplit() + -- + if done then + break + else + first_y = last_y + 1 - delta_y + end + end + if done then + break + else + first_x = last_x + 1 - delta_x + end + end +end + +function commands.flow_make_chart(settings) + local chart = getchart(settings) + if chart then + local settings = chart.settings + if settings then + local chartsettings = settings.chart + if chartsettings and chartsettings.split == v_yes then + splitchart(chart) + else + makechart(chart) + end + else + makechart(chart) + end + end +end diff --git a/tex/context/base/m-database.lua b/tex/context/base/m-database.lua index 47854daa0..c287f4926 100644 --- a/tex/context/base/m-database.lua +++ b/tex/context/base/m-database.lua @@ -1,137 +1,137 @@ -if not modules then modules = { } end modules ['m-database'] = { - version = 1.001, - comment = "companion to m-database.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local sub, gmatch, format = string.sub, string.gmatch, string.format -local concat = table.concat -local lpegpatterns, lpegmatch, lpegsplitat = lpeg.patterns, lpeg.match, lpeg.splitat -local lpegP, lpegC, lpegS, lpegCt = lpeg.P, lpeg.C, lpeg.S, lpeg.Ct -local stripstring = string.strip - --- One also needs to enable context.trace, here we only plug in some code (maybe --- some day this tracker will also toggle the main context tracer. - -local trace_flush = false trackers.register("module.database.flush", function(v) trace_flush = v end) - -local report_database = logs.reporter("database") - -buffers.database = buffers.database or { } - -local l_tab = lpegpatterns.tab -local l_space = lpegpatterns.space -local l_comma = lpegpatterns.comma -local l_empty = lpegS("\t\n\r ")^0 * lpegP(-1) - -local v_yes = interfaces.variables.yes - -local separators = { -- not interfaced - tab = l_tab, - tabs = l_tab^1, - comma = l_comma, - space = l_space, - spaces = l_space^1, -} - -function buffers.database.process(settings) - local data - if settings.type == "file" then - local filename = resolvers.finders.byscheme("any",settings.database) - data = filename ~= "" and io.loaddata(filename) - data = data and string.splitlines(data) - else - data = buffers.getlines(settings.database) - end - if data and #data > 0 then - if trace_flush then - context.pushlogger(report_database) - end - local separatorchar, quotechar, commentchar = settings.separator, settings.quotechar, settings.commentchar - local before, after = settings.before or "", settings.after or "" - local first, last = settings.first or "", settings.last or "" - local left, right = settings.left or "", settings.right or "" - local setups = settings.setups or "" - local strip = settings.strip == v_yes or false - local command = settings.command - separatorchar = (not separatorchar and ",") or separators[separatorchar] or separatorchar - local separator = type(separatorchar) == "string" and lpegS(separatorchar) or separatorchar - local whatever = lpegC((1 - separator)^0) - if quotechar and quotechar ~= "" then - local quotedata = nil - for chr in gmatch(quotechar,".") do - local quotechar = lpegP(chr) - local quoteword = l_space^0 * quotechar * lpegC((1 - quotechar)^0) * quotechar * l_space^0 - if quotedata then - quotedata = quotedata + quoteword - else - quotedata = quoteword - end - end - whatever = quotedata + whatever - end - local checker = commentchar ~= "" and lpegS(commentchar) - local splitter = lpegCt(whatever * (separator * whatever)^0) - local found = false - for i=1,#data do - local line = data[i] - if not lpegmatch(l_empty,line) and (not checker or not lpegmatch(checker,line)) then - local list = lpegmatch(splitter,line) - if not found then - if setups ~= "" then - context.begingroup() - context.setups { setups } - end - context(before) - found = true - end - if trace_flush then - local result, r = { }, 0 - r = r + 1 ; result[r] = first - for j=1,#list do - local str = strip and stripstring(list[j]) or list[j] - r = r + 1 ; result[r] = left - if command == "" then - r = r + 1 ; result[r] = str - else - r = r + 1 ; result[r] = command - r = r + 1 ; result[r] = "{" - r = r + 1 ; result[r] = str - r = r + 1 ; result[r] = "}" - end - r = r + 1 ; result[r] = right - end - r = r + 1 ; result[r] = last - context(concat(result)) - else - context(first) - for j=1,#list do - local str = strip and stripstring(list[j]) or list[j] - context(left) - if command == "" then - context(str) - else - context(command) - context(false,str) - end - context(right) - end - context(last) - end - end - end - if found then - context(after) - if setups ~= "" then - context.endgroup() - end - end - if trace_flush then - context.poplogger() - end - else - -- message - end -end +if not modules then modules = { } end modules ['m-database'] = { + version = 1.001, + comment = "companion to m-database.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local sub, gmatch, format = string.sub, string.gmatch, string.format +local concat = table.concat +local lpegpatterns, lpegmatch, lpegsplitat = lpeg.patterns, lpeg.match, lpeg.splitat +local lpegP, lpegC, lpegS, lpegCt = lpeg.P, lpeg.C, lpeg.S, lpeg.Ct +local stripstring = string.strip + +-- One also needs to enable context.trace, here we only plug in some code (maybe +-- some day this tracker will also toggle the main context tracer. + +local trace_flush = false trackers.register("module.database.flush", function(v) trace_flush = v end) + +local report_database = logs.reporter("database") + +buffers.database = buffers.database or { } + +local l_tab = lpegpatterns.tab +local l_space = lpegpatterns.space +local l_comma = lpegpatterns.comma +local l_empty = lpegS("\t\n\r ")^0 * lpegP(-1) + +local v_yes = interfaces.variables.yes + +local separators = { -- not interfaced + tab = l_tab, + tabs = l_tab^1, + comma = l_comma, + space = l_space, + spaces = l_space^1, +} + +function buffers.database.process(settings) + local data + if settings.type == "file" then + local filename = resolvers.finders.byscheme("any",settings.database) + data = filename ~= "" and io.loaddata(filename) + data = data and string.splitlines(data) + else + data = buffers.getlines(settings.database) + end + if data and #data > 0 then + if trace_flush then + context.pushlogger(report_database) + end + local separatorchar, quotechar, commentchar = settings.separator, settings.quotechar, settings.commentchar + local before, after = settings.before or "", settings.after or "" + local first, last = settings.first or "", settings.last or "" + local left, right = settings.left or "", settings.right or "" + local setups = settings.setups or "" + local strip = settings.strip == v_yes or false + local command = settings.command + separatorchar = (not separatorchar and ",") or separators[separatorchar] or separatorchar + local separator = type(separatorchar) == "string" and lpegS(separatorchar) or separatorchar + local whatever = lpegC((1 - separator)^0) + if quotechar and quotechar ~= "" then + local quotedata = nil + for chr in gmatch(quotechar,".") do + local quotechar = lpegP(chr) + local quoteword = l_space^0 * quotechar * lpegC((1 - quotechar)^0) * quotechar * l_space^0 + if quotedata then + quotedata = quotedata + quoteword + else + quotedata = quoteword + end + end + whatever = quotedata + whatever + end + local checker = commentchar ~= "" and lpegS(commentchar) + local splitter = lpegCt(whatever * (separator * whatever)^0) + local found = false + for i=1,#data do + local line = data[i] + if not lpegmatch(l_empty,line) and (not checker or not lpegmatch(checker,line)) then + local list = lpegmatch(splitter,line) + if not found then + if setups ~= "" then + context.begingroup() + context.setups { setups } + end + context(before) + found = true + end + if trace_flush then + local result, r = { }, 0 + r = r + 1 ; result[r] = first + for j=1,#list do + local str = strip and stripstring(list[j]) or list[j] + r = r + 1 ; result[r] = left + if command == "" then + r = r + 1 ; result[r] = str + else + r = r + 1 ; result[r] = command + r = r + 1 ; result[r] = "{" + r = r + 1 ; result[r] = str + r = r + 1 ; result[r] = "}" + end + r = r + 1 ; result[r] = right + end + r = r + 1 ; result[r] = last + context(concat(result)) + else + context(first) + for j=1,#list do + local str = strip and stripstring(list[j]) or list[j] + context(left) + if command == "" then + context(str) + else + context(command) + context(false,str) + end + context(right) + end + context(last) + end + end + end + if found then + context(after) + if setups ~= "" then + context.endgroup() + end + end + if trace_flush then + context.poplogger() + end + else + -- message + end +end diff --git a/tex/context/base/m-markdown.lua b/tex/context/base/m-markdown.lua index 1f9402f60..6c9c44d78 100644 --- a/tex/context/base/m-markdown.lua +++ b/tex/context/base/m-markdown.lua @@ -1,824 +1,824 @@ -if not modules then modules = { } end modules ['m-markdown'] = { - version = 1.002, - comment = "companion to m-markdown.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "see below", - license = "see context related readme files" -} - ---[[ -Copyright (C) 2009 John MacFarlane / Khaled Hosny / Hans Hagen - -The main parser is derived from the lunamark parser written by John MacFarlane. You -can download lunamark from: - - http://github.com/jgm/lunamark.git - -Khaled Hosny provided the context writer for lunamark and that was used as starting -point for the mapping. The original code can be fetched from the above location. - -While playing with the original code I got the feeling that lpeg could perform better. -The slowdown was due to the fact that the parser's lpeg was reconstructed each time a -nested parse was needed. After changing that code a bit I could bring down parsing of -some test code from 2 seconds to less than 0.1 second so I decided to stick to this -parser instead of writing my own. After all, the peg code looks pretty impressive and -visiting Johns pandoc pages is worth the effort: - - http://johnmacfarlane.net/pandoc/ - -The code here is mostly meant for processing snippets embedded in a context -documents and is no replacement for pandoc at all. Therefore an alternative is to use -pandoc in combination with Aditya's filter module. - -As I changed (and optimized) the original code, it will be clear that all errors -are mine. Eventually I might also adapt the parser code a bit more. When I ran into of -closure stack limitations I decided to flatten the code. The following implementation -seems to be a couple of hundred times faster than what I started with which is not that -bad. - -This is a second rewrite. The mentioned speed gain largely depended on the kind of -content: blocks, references and items can be rather demanding. Also, There were -some limitations with respect to the captures. So, table storage has been removed in -favor of strings, and nesting has been simplified. The first example at the end of this -file now takes .33 seconds for 567KB code (resulting in over 1MB) so we're getting there. - -There will be a third rewrite eventually. -]]-- - --- todo: we have better quote and tag scanners in ctx --- todo: provide an xhtml mapping --- todo: add a couple of extensions --- todo: check patches to the real peg - -local type, next, tonumber = type, next, tonumber -local lower, upper, gsub, rep, gmatch, format, length = string.lower, string.upper, string.gsub, string.rep, string.gmatch, string.format, string.len -local concat = table.concat -local P, R, S, V, C, Ct, Cg, Cb, Cmt, Cc, Cf, Cs = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cg, lpeg.Cb, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cs -local lpegmatch = lpeg.match -local utfbyte, utfchar = utf.byte, utf.char - -moduledata = moduledata or { } -moduledata.markdown = moduledata.markdown or { } -local markdown = moduledata.markdown - -local nofruns, nofbytes, nofhtmlblobs = 0, 0, 0 - ---------------------------------------------------------------------------------------------- - -local nestedparser -local syntax - -nestedparser = function(str) return lpegmatch(syntax,str) end - ---------------------------------------------------------------------------------------------- - -local asterisk = P("*") -local dash = P("-") -local plus = P("+") -local underscore = P("_") -local period = P(".") -local hash = P("#") -local ampersand = P("&") -local backtick = P("`") -local less = P("<") -local more = P(">") -local space = P(" ") -local squote = P("'") -local dquote = P('"') -local lparent = P("(") -local rparent = P(")") -local lbracket = P("[") -local rbracket = P("]") -local slash = P("/") -local equal = P("=") -local colon = P(":") -local semicolon = P(";") -local exclamation = P("!") - -local digit = R("09") -local hexdigit = R("09","af","AF") -local alphanumeric = R("AZ","az","09") - -local doubleasterisks = P("**") -local doubleunderscores = P("__") -local fourspaces = P(" ") - -local any = P(1) -local always = P("") - -local tab = P("\t") -local spacechar = S("\t ") -local spacing = S(" \n\r\t") -local newline = P("\r")^-1 * P("\n") -local spaceornewline = spacechar + newline -local nonspacechar = any - spaceornewline -local optionalspace = spacechar^0 -local spaces = spacechar^1 -local eof = - any -local nonindentspace = space^-3 -local blankline = optionalspace * C(newline) -local blanklines = blankline^0 -local skipblanklines = (optionalspace * newline)^0 -local linechar = P(1 - newline) -local indent = fourspaces + (nonindentspace * tab) / "" -local indentedline = indent /"" * C(linechar^1 * (newline + eof)) -local optionallyindentedline = indent^-1 /"" * C(linechar^1 * (newline + eof)) -local spnl = optionalspace * (newline * optionalspace)^-1 -local specialchar = S("*_`*&[] --- [3]:http://example.com/ (Optional Title Here) --- [2]: http://example.com/ 'Optional Title Here' --- [a]: http://example.com/ "Optional *oeps* Title Here" --- ]] --- --- local linktest = [[ --- [This link] (http://example.net/) --- [an example] (http://example.com/ "Title") --- [an example][1] --- [an example] [2] --- ]] --- --- lpeg.match((define_reference_parser+1)^0,reftest) --- --- inspect(references) --- --- lpeg.match((direct_link_parser/print + indirect_link_parser/print + 1)^0,linktest) - ---------------------------------------------------------------------------------------------- - -local blocktags = table.tohash { - "address", "blockquote" , "center", "dir", "div", "p", "pre", - "li", "ol", "ul", "dl", "dd", - "form", "fieldset", "isindex", "menu", "noframes", "frameset", - "h1", "h2", "h3", "h4", "h5", "h6", - "hr", "ht", "script", "noscript", - "table", "tbody", "tfoot", "thead", "th", "td", "tr", -} - ------ htmlattributevalue = squote * C((any - (blankline + squote))^0) * squote ------ + dquote * C((any - (blankline + dquote))^0) * dquote ------ + (any - S("\t >"))^1 -- any - tab - space - more ------ htmlattribute = (alphanumeric + S("_-"))^1 * spnl * (equal * spnl * htmlattributevalue)^-1 * spnl ------ htmlcomment = P(""))^0 * P("-->") - ------ htmltag = less * spnl * slash^-1 * alphanumeric^1 * spnl * htmlattribute^0 * slash^-1 * spnl * more ------ ------ blocktag = Cmt(C(alphanumeric^1), function(s,i,a) return blocktags[lower(a)] and i, a end) ------ ------ openblocktag = less * Cg(blocktag, "opentag") * spnl * htmlattribute^0 * more ------ closeblocktag = less * slash * Cmt(C(alphanumeric^1) * Cb("opentag"), function(s,i,a,b) return lower(a) == lower(b) and i end) * spnl * more ------ selfclosingblocktag = less * blocktag * spnl * htmlattribute^0 * slash * more ------ ------ displayhtml = Cs { "HtmlBlock", ------ InBlockTags = openblocktag * (V("HtmlBlock") + (any - closeblocktag))^0 * closeblocktag, ------ HtmlBlock = C(V("InBlockTags") + selfclosingblocktag + htmlcomment), ------ } ------ ------ inlinehtml = Cs(htmlcomment + htmltag) - --- There is no reason to support crappy html, so we expect proper attributes. - -local htmlattributevalue = squote * C((any - (blankline + squote))^0) * squote - + dquote * C((any - (blankline + dquote))^0) * dquote -local htmlattribute = (alphanumeric + S("_-"))^1 * spnl * equal * spnl * htmlattributevalue * spnl - -local htmlcomment = P(""))^0 * P("-->") -local htmlinstruction = P("" ))^0 * P("?>" ) - --- We don't care too much about matching elements and there is no reason why display elements could not --- have inline elements so the above should be patched then. Well, markdown mixed with html is not meant --- for anything else than webpages anyway. - -local blocktag = Cmt(C(alphanumeric^1), function(s,i,a) return blocktags[lower(a)] and i, a end) - -local openelement = less * alphanumeric^1 * spnl * htmlattribute^0 * more -local closeelement = less * slash * alphanumeric^1 * spnl * more -local emptyelement = less * alphanumeric^1 * spnl * htmlattribute^0 * slash * more - -local displaytext = (any - less)^1 -local inlinetext = displaytext / nestedparser - -local displayhtml = #(less * blocktag * spnl * htmlattribute^0 * more) - * Cs { "HtmlBlock", - InBlockTags = openelement * (V("HtmlBlock") + displaytext)^0 * closeelement, - HtmlBlock = (V("InBlockTags") + emptyelement + htmlcomment + htmlinstruction), - } - -local inlinehtml = Cs { "HtmlBlock", - InBlockTags = openelement * (V("HtmlBlock") + inlinetext)^0 * closeelement, - HtmlBlock = (V("InBlockTags") + emptyelement + htmlcomment + htmlinstruction), - } - ---------------------------------------------------------------------------------------------- - -local hexentity = ampersand * hash * S("Xx") * C(hexdigit ^1) * semicolon -local decentity = ampersand * hash * C(digit ^1) * semicolon -local tagentity = ampersand * C(alphanumeric^1) * semicolon - ---------------------------------------------------------------------------------------------- - --- --[[ - -local escaped = { - ["{" ] = "", - ["}" ] = "", - ["$" ] = "", - ["&" ] = "", - ["#" ] = "", - ["~" ] = "", - ["|" ] = "", - ["%%"] = "", - ["\\"] = "", -} - -for k, v in next, escaped do - escaped[k] = "\\char" .. utfbyte(k) .. "{}" -end - -local function c_string(s) -- has to be done more often - return (gsub(s,".",escaped)) -end - -local c_linebreak = "\\crlf\n" -- is this ok? -local c_space = " " - -local function c_paragraph(c) - return c .. "\n\n" -- { "\\startparagraph ", c, " \\stopparagraph\n" } -end - -local function listitem(c) - return format("\n\\startitem\n%s\n\\stopitem\n",nestedparser(c)) -end - -local function c_tightbulletlist(c) - return format("\n\\startmarkdownitemize[packed]\n%s\\stopmarkdownitemize\n",c) -end - -local function c_loosebulletlist(c) - return format("\n\\startmarkdownitemize\n\\stopmarkdownitemize\n",c) -end - -local function c_tightorderedlist(c) - return format("\n\\startmarkdownitemize[n,packed]\n%s\\stopmarkdownitemize\n",c) -end - -local function c_looseorderedlist(c) - return format("\n\\startmarkdownitemize[n]\n%s\\stopmarkdownitemize\n",c) -end - -local function c_inline_html(content) - nofhtmlblobs = nofhtmlblobs + 1 - return format("\\markdowninlinehtml{%s}",content) -end - -local function c_display_html(content) - nofhtmlblobs = nofhtmlblobs + 1 - return format("\\startmarkdowndisplayhtml\n%s\n\\stopmarkdowndisplayhtml",content) -end - -local function c_emphasis(c) - return format("\\markdownemphasis{%s}",c) -end - -local function c_strong(c) - return format("\\markdownstrong{%s}",c) -end - -local function c_blockquote(c) - return format("\\startmarkdownblockquote\n%s\\stopmarkdownblockquote\n",nestedparser(c)) -end - -local function c_verbatim(c) - return format("\\startmarkdowntyping\n%s\\stopmarkdowntyping\n",c) -end - -local function c_code(c) - return format("\\markdowntype{%s}",c) -end - -local levels = { "", "", "", "", "", "" } - -local function c_start_document() - levels = { "", "", "", "", "", "" } - return "" -end - -local function c_stop_document() - return concat(levels,"\n") or "" -end - -local function c_heading(level,c) - if level > #levels then - level = #levels - end - local finish = concat(levels,"\n",level) or "" - for i=level+1,#levels do - levels[i] = "" - end - levels[level] = "\\stopstructurelevel" - return format("%s\\startstructurelevel[markdown][title={%s}]\n",finish,c) -end - -local function c_hrule() - return "\\markdownrule\n" -end - -local function c_link(lab,src,tit) - return format("\\goto{%s}[url(%s)]",nestedparser(lab),src) -end - -local function c_image(lab,src,tit) - return format("\\externalfigure[%s]",src) -end - -local function c_email_link(address) - return format("\\goto{%s}[url(mailto:%s)]",c_string(address),address) -end - -local function c_url_link(url) - return format("\\goto{%s}[url(%s)]",c_string(url),url) -end - -local function f_heading(c,n) - return c_heading(n,c) -end - -local function c_hex_entity(s) - return utfchar(tonumber(s,16)) -end - -local function c_dec_entity(s) - return utfchar(tonumber(s)) -end - -local function c_tag_entity(s) - return s -- we can use the default resolver -end - ---]] - ---------------------------------------------------------------------------------------------- - ---[[ - -local escaped = { - ["<"] = "<", - [">"] = ">", - ["&"] = "&", - ['"'] = """, -} - -local function c_string(s) -- has to be done more often - return (gsub(s,".",escaped)) -end - -local c_linebreak = "
" -local c_space = " " - -local function c_paragraph(c) - return format("

%s

\n", c) -end - -local function listitem(c) - return format("
  • %s
  • ",nestedparser(c)) -end - -local function c_tightbulletlist(c) - return format("
      \n%s\n
    \n",c) -end - -local function c_loosebulletlist(c) - return format("
      \n%s\n
    \n",c) -end - -local function c_tightorderedlist(c) - return format("
      \n%s\n
    \n",c) -end - -local function c_looseorderedlist(c) - return format("
      \n%s\n
    \n",c) -end - -local function c_inline_html(content) - nofhtmlblobs = nofhtmlblobs + 1 - return content -end - -local function c_display_html(content) - nofhtmlblobs = nofhtmlblobs + 1 - return format("\n%s\n",content) -end - -local function c_emphasis(c) - return format("%s",c) -end - -local function c_strong(c) - return format("%s",c) -end - -local function c_blockquote(c) - return format("
    \n%s\n
    ",nestedparser(c)) -end - -local function c_verbatim(c) - return format("
    %s
    ",c) -end - -local function c_code(c) - return format("%s",c) -end - -local c_start_document = "" -local c_stop_document = "" - -local function c_heading(level,c) - return format("%s\n",level,c,level) -end - -local function c_hrule() - return "
    \n" -end - -local function c_link(lab,src,tit) - local titattr = #tit > 0 and format(" title=%q",tit) or "" - return format("%s",src,titattr,nestedparser(lab)) -end - -local function c_image(lab,src,tit) - return format("%s",src,tit,nestedparser(lab)) -end - -local function c_email_link(address) - return format("%s","mailto:",address,c_escape(address)) -end - -local function c_url_link(url) - return format("%s",url,c_string(url)) -end - -local function f_heading(c,n) - return c_heading(n,c) -end - -local function c_hex_entity(s) - return utfchar(tonumber(s,16)) -end - -local function c_dec_entity(s) - return utfchar(tonumber(s)) -end - -local function c_tag_entity(s) - return format("&%s;",s) -end - ---]] - ---------------------------------------------------------------------------------------------- - -local Str = normalchar^1 / c_string -local Space = spacechar^1 / c_space -local Symbol = specialchar / c_string -local Code = inticks / c_code - -local HeadingStart = C(hash * hash^-5) / length -local HeadingStop = optionalspace * hash^0 * optionalspace * newline * blanklines -local HeadingLevel = equal^3 * Cc(1) - + dash ^3 * Cc(2) - -local NormalEndline = optionalspace * newline * -( - blankline - + more - + HeadingStart - + ( line * (P("===")^3 + P("---")^3) * newline ) - ) / c_space - -local LineBreak = P(" ") * NormalEndline / c_linebreak - -local TerminalEndline = optionalspace * newline * eof / "" - -local Endline = LineBreak - + TerminalEndline - + NormalEndline - -local AutoLinkUrl = less * C(alphanumeric^1 * P("://") * (any - (newline + more))^1) * more / c_url_link -local AutoLinkEmail = less * C((alphanumeric + S("-_+"))^1 * P("@") * (any - (newline + more))^1) * more / c_email_link - -local DirectLink = direct_link_parser / c_link -local IndirectLink = indirect_link_parser / c_link - -local ImageLink = exclamation * (direct_link_parser + indirect_link_parser) / c_image -- we can combine this with image ... smaller lpeg - -local UlOrStarLine = asterisk^4 - + underscore^4 - + (spaces * S("*_")^1 * #spaces) / c_string - -local EscapedChar = P("\\") * C(P(1 - newline)) / c_string - -local InlineHtml = inlinehtml / c_inline_html -local DisplayHtml = displayhtml / c_display_html -local HtmlEntity = hexentity / c_hex_entity - + decentity / c_dec_entity - + tagentity / c_tag_entity - -local NestedList = Cs(optionallyindentedline - (bullet + enumerator))^1 / nestedparser - -local ListBlockLine = -blankline * -(indent^-1 * (bullet + enumerator)) * optionallyindentedline - -local Verbatim = Cs(blanklines * (indentedline - blankline)^1) / c_verbatim - * (blankline^1 + eof) -- not really needed, probably capture trailing? we can do that beforehand - -local Blockquote = Cs(( - ((nonindentspace * more * space^-1)/"" * linechar^0 * newline)^1 - * ((linechar - blankline)^1 * newline)^0 - * blankline^0 - )^1) / c_blockquote - -local HorizontalRule = (lineof_asterisks + lineof_dashes + lineof_underscores) / c_hrule - -local Reference = define_reference_parser / "" - --- could be a mini grammar - -local ListBlock = line * ListBlockLine^0 -local ListContinuationBlock = blanklines * indent * ListBlock -local ListItem = Cs(ListBlock * (NestedList + ListContinuationBlock^0)) / listitem - ----- LeadingLines = blankline^0 / "" ----- TrailingLines = blankline^1 * #(any) / "\n" - -syntax = Cs { "Document", - - Document = V("Display")^0, - - Display = blankline -- ^1/"\n" - + Blockquote - + Verbatim - + Reference - + HorizontalRule - + HeadingStart * optionalspace * Cs((V("Inline") - HeadingStop)^1) * HeadingStop / c_heading - + Cs((V("Inline") - Endline)^1) * newline * HeadingLevel * newline * blanklines / f_heading - + Cs((bullet /"" * ListItem)^1) * blanklines * -bullet / c_tightbulletlist - + Cs((bullet /"" * ListItem * C(blanklines))^1) / c_loosebulletlist - + Cs((enumerator /"" * ListItem)^1) * blanklines * -enumerator / c_tightorderedlist - + Cs((enumerator /"" * ListItem * C(blanklines))^1) / c_looseorderedlist - + DisplayHtml - + nonindentspace * Cs(V("Inline")^1)* newline * blankline^1 / c_paragraph - + V("Inline")^1, - - Inline = Str - + Space - + Endline - + UlOrStarLine -- still needed ? - + doubleasterisks * -spaceornewline * Cs((V("Inline") - doubleasterisks )^1) * doubleasterisks / c_strong - + doubleunderscores * -spaceornewline * Cs((V("Inline") - doubleunderscores)^1) * doubleunderscores / c_strong - + asterisk * -spaceornewline * Cs((V("Inline") - asterisk )^1) * asterisk / c_emphasis - + underscore * -spaceornewline * Cs((V("Inline") - underscore )^1) * underscore / c_emphasis - + ImageLink - + DirectLink - + IndirectLink - + AutoLinkUrl - + AutoLinkEmail - + Code - + InlineHtml - + HtmlEntity - + EscapedChar - + Symbol, - -} - ---------------------------------------------------------------------------------------------- - -local function convert(str) - nofruns = nofruns + 1 - nofbytes = nofbytes + #str - statistics.starttiming(markdown) - referenceparser(str) - local result = c_start_document() .. nestedparser(str) .. c_stop_document() - statistics.stoptiming(markdown) - return result -end - -markdown.convert = convert - -function markdown.typesetstring(data) - if data and data ~= "" then - local result = convert(data) - context.viafile(result) - end -end - -function markdown.typesetbuffer(name) - markdown.typesetstring(buffers.getcontent(name)) -end - -function markdown.typesetfile(name) - local fullname = resolvers.findctxfile(name) - if fullname and fullname ~= "" then - markdown.typesetstring(io.loaddata(fullname)) - end -end - -statistics.register("markdown",function() - if nofruns > 0 then - return format("%s bytes converted, %s runs, %s html blobs, %s seconds used", - nofbytes, nofruns, nofhtmlblobs, statistics.elapsedtime(markdown)) - end -end) - ---------------------------------------------------------------------------------------------- - ---~ context.starttext() ---~ moduledata.markdown.convert(str) ---~ context.stoptext() - -if not tex.jobname then - - local one = [[ -Test *123* -========== - -BOLD *BOLD* BOLD - -
    PRE PRE PRE
    - - -* Test -** Test -* Test1 - * Test2 -* Test - -Test -==== - -> test -> test **123** *123* -> test `code` - -test - -Test -==== - -> test -> test -> test - -test -oeps - -more - - code - code - -oeps - -[an example][a] - -[an example] [2] - -[a]: http://example.com/ "Optional *oeps* Title Here" -[2]: http://example.com/ 'Optional Title Here' -[3]: http://example.com/ (Optional Title Here) - -[an example][a] - -[an example] [2] - -[an [tricky] example](http://example.com/ "Title") - -[This **xx** link](http://example.net/) - ]] - --- This snippet takes some 4 seconds in the original parser (the one that is --- a bit clearer from the perspective of grammars but somewhat messy with --- respect to the captures. In the above parser it takes .1 second. Also, --- in the later case only memory is the limit. - - local two = [[ -Test -==== -* Test -** Test -* Test -** Test -* Test - -Test -==== - -> test -> test -> test - -test - -Test -==== - -> test -> test -> test - -test - ]] - - local function test(str) - local n = 1 -- 000 - local t = os.clock() - local one = convert(str) - -- print("runtime",1,#str,#one,os.clock()-t) - str = string.rep(str,n) - local t = os.clock() - local two = convert(str) - print(two) - -- print("runtime",n,#str,#two,os.clock()-t) - -- print(format("==============\n%s\n==============",one)) - end - - -- test(one) - -- test(two) - -- test(io.read("*all")) - - -end +if not modules then modules = { } end modules ['m-markdown'] = { + version = 1.002, + comment = "companion to m-markdown.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "see below", + license = "see context related readme files" +} + +--[[ +Copyright (C) 2009 John MacFarlane / Khaled Hosny / Hans Hagen + +The main parser is derived from the lunamark parser written by John MacFarlane. You +can download lunamark from: + + http://github.com/jgm/lunamark.git + +Khaled Hosny provided the context writer for lunamark and that was used as starting +point for the mapping. The original code can be fetched from the above location. + +While playing with the original code I got the feeling that lpeg could perform better. +The slowdown was due to the fact that the parser's lpeg was reconstructed each time a +nested parse was needed. After changing that code a bit I could bring down parsing of +some test code from 2 seconds to less than 0.1 second so I decided to stick to this +parser instead of writing my own. After all, the peg code looks pretty impressive and +visiting Johns pandoc pages is worth the effort: + + http://johnmacfarlane.net/pandoc/ + +The code here is mostly meant for processing snippets embedded in a context +documents and is no replacement for pandoc at all. Therefore an alternative is to use +pandoc in combination with Aditya's filter module. + +As I changed (and optimized) the original code, it will be clear that all errors +are mine. Eventually I might also adapt the parser code a bit more. When I ran into of +closure stack limitations I decided to flatten the code. The following implementation +seems to be a couple of hundred times faster than what I started with which is not that +bad. + +This is a second rewrite. The mentioned speed gain largely depended on the kind of +content: blocks, references and items can be rather demanding. Also, There were +some limitations with respect to the captures. So, table storage has been removed in +favor of strings, and nesting has been simplified. The first example at the end of this +file now takes .33 seconds for 567KB code (resulting in over 1MB) so we're getting there. + +There will be a third rewrite eventually. +]]-- + +-- todo: we have better quote and tag scanners in ctx +-- todo: provide an xhtml mapping +-- todo: add a couple of extensions +-- todo: check patches to the real peg + +local type, next, tonumber = type, next, tonumber +local lower, upper, gsub, rep, gmatch, format, length = string.lower, string.upper, string.gsub, string.rep, string.gmatch, string.format, string.len +local concat = table.concat +local P, R, S, V, C, Ct, Cg, Cb, Cmt, Cc, Cf, Cs = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cg, lpeg.Cb, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cs +local lpegmatch = lpeg.match +local utfbyte, utfchar = utf.byte, utf.char + +moduledata = moduledata or { } +moduledata.markdown = moduledata.markdown or { } +local markdown = moduledata.markdown + +local nofruns, nofbytes, nofhtmlblobs = 0, 0, 0 + +--------------------------------------------------------------------------------------------- + +local nestedparser +local syntax + +nestedparser = function(str) return lpegmatch(syntax,str) end + +--------------------------------------------------------------------------------------------- + +local asterisk = P("*") +local dash = P("-") +local plus = P("+") +local underscore = P("_") +local period = P(".") +local hash = P("#") +local ampersand = P("&") +local backtick = P("`") +local less = P("<") +local more = P(">") +local space = P(" ") +local squote = P("'") +local dquote = P('"') +local lparent = P("(") +local rparent = P(")") +local lbracket = P("[") +local rbracket = P("]") +local slash = P("/") +local equal = P("=") +local colon = P(":") +local semicolon = P(";") +local exclamation = P("!") + +local digit = R("09") +local hexdigit = R("09","af","AF") +local alphanumeric = R("AZ","az","09") + +local doubleasterisks = P("**") +local doubleunderscores = P("__") +local fourspaces = P(" ") + +local any = P(1) +local always = P("") + +local tab = P("\t") +local spacechar = S("\t ") +local spacing = S(" \n\r\t") +local newline = P("\r")^-1 * P("\n") +local spaceornewline = spacechar + newline +local nonspacechar = any - spaceornewline +local optionalspace = spacechar^0 +local spaces = spacechar^1 +local eof = - any +local nonindentspace = space^-3 +local blankline = optionalspace * C(newline) +local blanklines = blankline^0 +local skipblanklines = (optionalspace * newline)^0 +local linechar = P(1 - newline) +local indent = fourspaces + (nonindentspace * tab) / "" +local indentedline = indent /"" * C(linechar^1 * (newline + eof)) +local optionallyindentedline = indent^-1 /"" * C(linechar^1 * (newline + eof)) +local spnl = optionalspace * (newline * optionalspace)^-1 +local specialchar = S("*_`*&[] +-- [3]:http://example.com/ (Optional Title Here) +-- [2]: http://example.com/ 'Optional Title Here' +-- [a]: http://example.com/ "Optional *oeps* Title Here" +-- ]] +-- +-- local linktest = [[ +-- [This link] (http://example.net/) +-- [an example] (http://example.com/ "Title") +-- [an example][1] +-- [an example] [2] +-- ]] +-- +-- lpeg.match((define_reference_parser+1)^0,reftest) +-- +-- inspect(references) +-- +-- lpeg.match((direct_link_parser/print + indirect_link_parser/print + 1)^0,linktest) + +--------------------------------------------------------------------------------------------- + +local blocktags = table.tohash { + "address", "blockquote" , "center", "dir", "div", "p", "pre", + "li", "ol", "ul", "dl", "dd", + "form", "fieldset", "isindex", "menu", "noframes", "frameset", + "h1", "h2", "h3", "h4", "h5", "h6", + "hr", "ht", "script", "noscript", + "table", "tbody", "tfoot", "thead", "th", "td", "tr", +} + +----- htmlattributevalue = squote * C((any - (blankline + squote))^0) * squote +----- + dquote * C((any - (blankline + dquote))^0) * dquote +----- + (any - S("\t >"))^1 -- any - tab - space - more +----- htmlattribute = (alphanumeric + S("_-"))^1 * spnl * (equal * spnl * htmlattributevalue)^-1 * spnl +----- htmlcomment = P(""))^0 * P("-->") + +----- htmltag = less * spnl * slash^-1 * alphanumeric^1 * spnl * htmlattribute^0 * slash^-1 * spnl * more +----- +----- blocktag = Cmt(C(alphanumeric^1), function(s,i,a) return blocktags[lower(a)] and i, a end) +----- +----- openblocktag = less * Cg(blocktag, "opentag") * spnl * htmlattribute^0 * more +----- closeblocktag = less * slash * Cmt(C(alphanumeric^1) * Cb("opentag"), function(s,i,a,b) return lower(a) == lower(b) and i end) * spnl * more +----- selfclosingblocktag = less * blocktag * spnl * htmlattribute^0 * slash * more +----- +----- displayhtml = Cs { "HtmlBlock", +----- InBlockTags = openblocktag * (V("HtmlBlock") + (any - closeblocktag))^0 * closeblocktag, +----- HtmlBlock = C(V("InBlockTags") + selfclosingblocktag + htmlcomment), +----- } +----- +----- inlinehtml = Cs(htmlcomment + htmltag) + +-- There is no reason to support crappy html, so we expect proper attributes. + +local htmlattributevalue = squote * C((any - (blankline + squote))^0) * squote + + dquote * C((any - (blankline + dquote))^0) * dquote +local htmlattribute = (alphanumeric + S("_-"))^1 * spnl * equal * spnl * htmlattributevalue * spnl + +local htmlcomment = P(""))^0 * P("-->") +local htmlinstruction = P("" ))^0 * P("?>" ) + +-- We don't care too much about matching elements and there is no reason why display elements could not +-- have inline elements so the above should be patched then. Well, markdown mixed with html is not meant +-- for anything else than webpages anyway. + +local blocktag = Cmt(C(alphanumeric^1), function(s,i,a) return blocktags[lower(a)] and i, a end) + +local openelement = less * alphanumeric^1 * spnl * htmlattribute^0 * more +local closeelement = less * slash * alphanumeric^1 * spnl * more +local emptyelement = less * alphanumeric^1 * spnl * htmlattribute^0 * slash * more + +local displaytext = (any - less)^1 +local inlinetext = displaytext / nestedparser + +local displayhtml = #(less * blocktag * spnl * htmlattribute^0 * more) + * Cs { "HtmlBlock", + InBlockTags = openelement * (V("HtmlBlock") + displaytext)^0 * closeelement, + HtmlBlock = (V("InBlockTags") + emptyelement + htmlcomment + htmlinstruction), + } + +local inlinehtml = Cs { "HtmlBlock", + InBlockTags = openelement * (V("HtmlBlock") + inlinetext)^0 * closeelement, + HtmlBlock = (V("InBlockTags") + emptyelement + htmlcomment + htmlinstruction), + } + +--------------------------------------------------------------------------------------------- + +local hexentity = ampersand * hash * S("Xx") * C(hexdigit ^1) * semicolon +local decentity = ampersand * hash * C(digit ^1) * semicolon +local tagentity = ampersand * C(alphanumeric^1) * semicolon + +--------------------------------------------------------------------------------------------- + +-- --[[ + +local escaped = { + ["{" ] = "", + ["}" ] = "", + ["$" ] = "", + ["&" ] = "", + ["#" ] = "", + ["~" ] = "", + ["|" ] = "", + ["%%"] = "", + ["\\"] = "", +} + +for k, v in next, escaped do + escaped[k] = "\\char" .. utfbyte(k) .. "{}" +end + +local function c_string(s) -- has to be done more often + return (gsub(s,".",escaped)) +end + +local c_linebreak = "\\crlf\n" -- is this ok? +local c_space = " " + +local function c_paragraph(c) + return c .. "\n\n" -- { "\\startparagraph ", c, " \\stopparagraph\n" } +end + +local function listitem(c) + return format("\n\\startitem\n%s\n\\stopitem\n",nestedparser(c)) +end + +local function c_tightbulletlist(c) + return format("\n\\startmarkdownitemize[packed]\n%s\\stopmarkdownitemize\n",c) +end + +local function c_loosebulletlist(c) + return format("\n\\startmarkdownitemize\n\\stopmarkdownitemize\n",c) +end + +local function c_tightorderedlist(c) + return format("\n\\startmarkdownitemize[n,packed]\n%s\\stopmarkdownitemize\n",c) +end + +local function c_looseorderedlist(c) + return format("\n\\startmarkdownitemize[n]\n%s\\stopmarkdownitemize\n",c) +end + +local function c_inline_html(content) + nofhtmlblobs = nofhtmlblobs + 1 + return format("\\markdowninlinehtml{%s}",content) +end + +local function c_display_html(content) + nofhtmlblobs = nofhtmlblobs + 1 + return format("\\startmarkdowndisplayhtml\n%s\n\\stopmarkdowndisplayhtml",content) +end + +local function c_emphasis(c) + return format("\\markdownemphasis{%s}",c) +end + +local function c_strong(c) + return format("\\markdownstrong{%s}",c) +end + +local function c_blockquote(c) + return format("\\startmarkdownblockquote\n%s\\stopmarkdownblockquote\n",nestedparser(c)) +end + +local function c_verbatim(c) + return format("\\startmarkdowntyping\n%s\\stopmarkdowntyping\n",c) +end + +local function c_code(c) + return format("\\markdowntype{%s}",c) +end + +local levels = { "", "", "", "", "", "" } + +local function c_start_document() + levels = { "", "", "", "", "", "" } + return "" +end + +local function c_stop_document() + return concat(levels,"\n") or "" +end + +local function c_heading(level,c) + if level > #levels then + level = #levels + end + local finish = concat(levels,"\n",level) or "" + for i=level+1,#levels do + levels[i] = "" + end + levels[level] = "\\stopstructurelevel" + return format("%s\\startstructurelevel[markdown][title={%s}]\n",finish,c) +end + +local function c_hrule() + return "\\markdownrule\n" +end + +local function c_link(lab,src,tit) + return format("\\goto{%s}[url(%s)]",nestedparser(lab),src) +end + +local function c_image(lab,src,tit) + return format("\\externalfigure[%s]",src) +end + +local function c_email_link(address) + return format("\\goto{%s}[url(mailto:%s)]",c_string(address),address) +end + +local function c_url_link(url) + return format("\\goto{%s}[url(%s)]",c_string(url),url) +end + +local function f_heading(c,n) + return c_heading(n,c) +end + +local function c_hex_entity(s) + return utfchar(tonumber(s,16)) +end + +local function c_dec_entity(s) + return utfchar(tonumber(s)) +end + +local function c_tag_entity(s) + return s -- we can use the default resolver +end + +--]] + +--------------------------------------------------------------------------------------------- + +--[[ + +local escaped = { + ["<"] = "<", + [">"] = ">", + ["&"] = "&", + ['"'] = """, +} + +local function c_string(s) -- has to be done more often + return (gsub(s,".",escaped)) +end + +local c_linebreak = "
    " +local c_space = " " + +local function c_paragraph(c) + return format("

    %s

    \n", c) +end + +local function listitem(c) + return format("
  • %s
  • ",nestedparser(c)) +end + +local function c_tightbulletlist(c) + return format("
      \n%s\n
    \n",c) +end + +local function c_loosebulletlist(c) + return format("
      \n%s\n
    \n",c) +end + +local function c_tightorderedlist(c) + return format("
      \n%s\n
    \n",c) +end + +local function c_looseorderedlist(c) + return format("
      \n%s\n
    \n",c) +end + +local function c_inline_html(content) + nofhtmlblobs = nofhtmlblobs + 1 + return content +end + +local function c_display_html(content) + nofhtmlblobs = nofhtmlblobs + 1 + return format("\n%s\n",content) +end + +local function c_emphasis(c) + return format("%s",c) +end + +local function c_strong(c) + return format("%s",c) +end + +local function c_blockquote(c) + return format("
    \n%s\n
    ",nestedparser(c)) +end + +local function c_verbatim(c) + return format("
    %s
    ",c) +end + +local function c_code(c) + return format("%s",c) +end + +local c_start_document = "" +local c_stop_document = "" + +local function c_heading(level,c) + return format("%s\n",level,c,level) +end + +local function c_hrule() + return "
    \n" +end + +local function c_link(lab,src,tit) + local titattr = #tit > 0 and format(" title=%q",tit) or "" + return format("%s",src,titattr,nestedparser(lab)) +end + +local function c_image(lab,src,tit) + return format("%s",src,tit,nestedparser(lab)) +end + +local function c_email_link(address) + return format("%s","mailto:",address,c_escape(address)) +end + +local function c_url_link(url) + return format("%s",url,c_string(url)) +end + +local function f_heading(c,n) + return c_heading(n,c) +end + +local function c_hex_entity(s) + return utfchar(tonumber(s,16)) +end + +local function c_dec_entity(s) + return utfchar(tonumber(s)) +end + +local function c_tag_entity(s) + return format("&%s;",s) +end + +--]] + +--------------------------------------------------------------------------------------------- + +local Str = normalchar^1 / c_string +local Space = spacechar^1 / c_space +local Symbol = specialchar / c_string +local Code = inticks / c_code + +local HeadingStart = C(hash * hash^-5) / length +local HeadingStop = optionalspace * hash^0 * optionalspace * newline * blanklines +local HeadingLevel = equal^3 * Cc(1) + + dash ^3 * Cc(2) + +local NormalEndline = optionalspace * newline * -( + blankline + + more + + HeadingStart + + ( line * (P("===")^3 + P("---")^3) * newline ) + ) / c_space + +local LineBreak = P(" ") * NormalEndline / c_linebreak + +local TerminalEndline = optionalspace * newline * eof / "" + +local Endline = LineBreak + + TerminalEndline + + NormalEndline + +local AutoLinkUrl = less * C(alphanumeric^1 * P("://") * (any - (newline + more))^1) * more / c_url_link +local AutoLinkEmail = less * C((alphanumeric + S("-_+"))^1 * P("@") * (any - (newline + more))^1) * more / c_email_link + +local DirectLink = direct_link_parser / c_link +local IndirectLink = indirect_link_parser / c_link + +local ImageLink = exclamation * (direct_link_parser + indirect_link_parser) / c_image -- we can combine this with image ... smaller lpeg + +local UlOrStarLine = asterisk^4 + + underscore^4 + + (spaces * S("*_")^1 * #spaces) / c_string + +local EscapedChar = P("\\") * C(P(1 - newline)) / c_string + +local InlineHtml = inlinehtml / c_inline_html +local DisplayHtml = displayhtml / c_display_html +local HtmlEntity = hexentity / c_hex_entity + + decentity / c_dec_entity + + tagentity / c_tag_entity + +local NestedList = Cs(optionallyindentedline - (bullet + enumerator))^1 / nestedparser + +local ListBlockLine = -blankline * -(indent^-1 * (bullet + enumerator)) * optionallyindentedline + +local Verbatim = Cs(blanklines * (indentedline - blankline)^1) / c_verbatim + * (blankline^1 + eof) -- not really needed, probably capture trailing? we can do that beforehand + +local Blockquote = Cs(( + ((nonindentspace * more * space^-1)/"" * linechar^0 * newline)^1 + * ((linechar - blankline)^1 * newline)^0 + * blankline^0 + )^1) / c_blockquote + +local HorizontalRule = (lineof_asterisks + lineof_dashes + lineof_underscores) / c_hrule + +local Reference = define_reference_parser / "" + +-- could be a mini grammar + +local ListBlock = line * ListBlockLine^0 +local ListContinuationBlock = blanklines * indent * ListBlock +local ListItem = Cs(ListBlock * (NestedList + ListContinuationBlock^0)) / listitem + +---- LeadingLines = blankline^0 / "" +---- TrailingLines = blankline^1 * #(any) / "\n" + +syntax = Cs { "Document", + + Document = V("Display")^0, + + Display = blankline -- ^1/"\n" + + Blockquote + + Verbatim + + Reference + + HorizontalRule + + HeadingStart * optionalspace * Cs((V("Inline") - HeadingStop)^1) * HeadingStop / c_heading + + Cs((V("Inline") - Endline)^1) * newline * HeadingLevel * newline * blanklines / f_heading + + Cs((bullet /"" * ListItem)^1) * blanklines * -bullet / c_tightbulletlist + + Cs((bullet /"" * ListItem * C(blanklines))^1) / c_loosebulletlist + + Cs((enumerator /"" * ListItem)^1) * blanklines * -enumerator / c_tightorderedlist + + Cs((enumerator /"" * ListItem * C(blanklines))^1) / c_looseorderedlist + + DisplayHtml + + nonindentspace * Cs(V("Inline")^1)* newline * blankline^1 / c_paragraph + + V("Inline")^1, + + Inline = Str + + Space + + Endline + + UlOrStarLine -- still needed ? + + doubleasterisks * -spaceornewline * Cs((V("Inline") - doubleasterisks )^1) * doubleasterisks / c_strong + + doubleunderscores * -spaceornewline * Cs((V("Inline") - doubleunderscores)^1) * doubleunderscores / c_strong + + asterisk * -spaceornewline * Cs((V("Inline") - asterisk )^1) * asterisk / c_emphasis + + underscore * -spaceornewline * Cs((V("Inline") - underscore )^1) * underscore / c_emphasis + + ImageLink + + DirectLink + + IndirectLink + + AutoLinkUrl + + AutoLinkEmail + + Code + + InlineHtml + + HtmlEntity + + EscapedChar + + Symbol, + +} + +--------------------------------------------------------------------------------------------- + +local function convert(str) + nofruns = nofruns + 1 + nofbytes = nofbytes + #str + statistics.starttiming(markdown) + referenceparser(str) + local result = c_start_document() .. nestedparser(str) .. c_stop_document() + statistics.stoptiming(markdown) + return result +end + +markdown.convert = convert + +function markdown.typesetstring(data) + if data and data ~= "" then + local result = convert(data) + context.viafile(result) + end +end + +function markdown.typesetbuffer(name) + markdown.typesetstring(buffers.getcontent(name)) +end + +function markdown.typesetfile(name) + local fullname = resolvers.findctxfile(name) + if fullname and fullname ~= "" then + markdown.typesetstring(io.loaddata(fullname)) + end +end + +statistics.register("markdown",function() + if nofruns > 0 then + return format("%s bytes converted, %s runs, %s html blobs, %s seconds used", + nofbytes, nofruns, nofhtmlblobs, statistics.elapsedtime(markdown)) + end +end) + +--------------------------------------------------------------------------------------------- + +--~ context.starttext() +--~ moduledata.markdown.convert(str) +--~ context.stoptext() + +if not tex.jobname then + + local one = [[ +Test *123* +========== + +BOLD *BOLD* BOLD + +
    PRE PRE PRE
    + + +* Test +** Test +* Test1 + * Test2 +* Test + +Test +==== + +> test +> test **123** *123* +> test `code` + +test + +Test +==== + +> test +> test +> test + +test +oeps + +more + + code + code + +oeps + +[an example][a] + +[an example] [2] + +[a]: http://example.com/ "Optional *oeps* Title Here" +[2]: http://example.com/ 'Optional Title Here' +[3]: http://example.com/ (Optional Title Here) + +[an example][a] + +[an example] [2] + +[an [tricky] example](http://example.com/ "Title") + +[This **xx** link](http://example.net/) + ]] + +-- This snippet takes some 4 seconds in the original parser (the one that is +-- a bit clearer from the perspective of grammars but somewhat messy with +-- respect to the captures. In the above parser it takes .1 second. Also, +-- in the later case only memory is the limit. + + local two = [[ +Test +==== +* Test +** Test +* Test +** Test +* Test + +Test +==== + +> test +> test +> test + +test + +Test +==== + +> test +> test +> test + +test + ]] + + local function test(str) + local n = 1 -- 000 + local t = os.clock() + local one = convert(str) + -- print("runtime",1,#str,#one,os.clock()-t) + str = string.rep(str,n) + local t = os.clock() + local two = convert(str) + print(two) + -- print("runtime",n,#str,#two,os.clock()-t) + -- print(format("==============\n%s\n==============",one)) + end + + -- test(one) + -- test(two) + -- test(io.read("*all")) + + +end diff --git a/tex/context/base/m-pstricks.lua b/tex/context/base/m-pstricks.lua index b151e313a..2c01ed898 100644 --- a/tex/context/base/m-pstricks.lua +++ b/tex/context/base/m-pstricks.lua @@ -1,74 +1,74 @@ -if not modules then modules = { } end modules ['m-pstricks'] = { - version = 1.001, - comment = "companion to m-pstricks.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- The following will be done when I need ps tricks figures --- in large quantities: --- --- + hash graphics and only process them once --- + save md5 checksums in tuc file --- --- It's no big deal but has a low priority. - -local format, lower, concat, gmatch = string.format, string.lower, table.concat, string.gmatch -local variables = interfaces.variables - -moduledata.pstricks = moduledata.pstricks or { } - -local report_pstricks = logs.reporter("pstricks") - -local template = [[ -\starttext - \pushcatcodetable - \setcatcodetable\texcatcodes - \usemodule[pstric] - %s - \popcatcodetable - \startTEXpage - \hbox\bgroup - \ignorespaces - %s - \removeunwantedspaces - \egroup - \obeydepth %% temp hack as we need to figure this out - \stopTEXpage -\stoptext -]] - -local loaded = { } -local graphics = 0 - -function moduledata.pstricks.usemodule(names) - for name in gmatch(names,"([^%s,]+)") do - loaded[#loaded+1] = format([[\readfile{%s}{}{}]],name) - end -end - -function moduledata.pstricks.process(n) - graphics = graphics + 1 - local name = format("%s-pstricks-%04i",tex.jobname,graphics) - local data = buffers.collectcontent("def-"..n) - local tmpfile = name .. ".tmp" - local epsfile = name .. ".ps" - local pdffile = name .. ".pdf" - local loaded = concat(loaded,"\n") - os.remove(epsfile) - os.remove(pdffile) - io.savedata(tmpfile,format(template,loaded,data)) - os.execute(format("mtxrun --script texexec %s --once --dvips",tmpfile)) - if lfs.isfile(epsfile) then - os.execute(format("ps2pdf %s %s",epsfile,pdffile)) - -- todo: direct call but not now - if lfs.isfile(pdffile) then - context.externalfigure( { pdffile }, { object = variables.no } ) - else - report_pstricks("run failed, no pdf file") - end - else - report_pstricks("run failed, no ps file") - end -end +if not modules then modules = { } end modules ['m-pstricks'] = { + version = 1.001, + comment = "companion to m-pstricks.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- The following will be done when I need ps tricks figures +-- in large quantities: +-- +-- + hash graphics and only process them once +-- + save md5 checksums in tuc file +-- +-- It's no big deal but has a low priority. + +local format, lower, concat, gmatch = string.format, string.lower, table.concat, string.gmatch +local variables = interfaces.variables + +moduledata.pstricks = moduledata.pstricks or { } + +local report_pstricks = logs.reporter("pstricks") + +local template = [[ +\starttext + \pushcatcodetable + \setcatcodetable\texcatcodes + \usemodule[pstric] + %s + \popcatcodetable + \startTEXpage + \hbox\bgroup + \ignorespaces + %s + \removeunwantedspaces + \egroup + \obeydepth %% temp hack as we need to figure this out + \stopTEXpage +\stoptext +]] + +local loaded = { } +local graphics = 0 + +function moduledata.pstricks.usemodule(names) + for name in gmatch(names,"([^%s,]+)") do + loaded[#loaded+1] = format([[\readfile{%s}{}{}]],name) + end +end + +function moduledata.pstricks.process(n) + graphics = graphics + 1 + local name = format("%s-pstricks-%04i",tex.jobname,graphics) + local data = buffers.collectcontent("def-"..n) + local tmpfile = name .. ".tmp" + local epsfile = name .. ".ps" + local pdffile = name .. ".pdf" + local loaded = concat(loaded,"\n") + os.remove(epsfile) + os.remove(pdffile) + io.savedata(tmpfile,format(template,loaded,data)) + os.execute(format("mtxrun --script texexec %s --once --dvips",tmpfile)) + if lfs.isfile(epsfile) then + os.execute(format("ps2pdf %s %s",epsfile,pdffile)) + -- todo: direct call but not now + if lfs.isfile(pdffile) then + context.externalfigure( { pdffile }, { object = variables.no } ) + else + report_pstricks("run failed, no pdf file") + end + else + report_pstricks("run failed, no ps file") + end +end diff --git a/tex/context/base/m-spreadsheet.lua b/tex/context/base/m-spreadsheet.lua index 9d5106e35..dcd4ea1c4 100644 --- a/tex/context/base/m-spreadsheet.lua +++ b/tex/context/base/m-spreadsheet.lua @@ -1,332 +1,332 @@ -if not modules then modules = { } end modules ['m-spreadsheet'] = { - version = 1.001, - comment = "companion to m-spreadsheet.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local byte, format, gsub, find = string.byte, string.format, string.gsub, string.find -local R, P, S, C, V, Cs, Cc, Ct, Cg, Cf, Carg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cg, lpeg.Cf, lpeg.Carg -local lpegmatch, patterns = lpeg.match, lpeg.patterns -local setmetatable, loadstring, next, tostring, tonumber,rawget = setmetatable, loadstring, next, tostring, tonumber, rawget -local formatters = string.formatters - -local context = context - -local splitthousands = utilities.parsers.splitthousands -local variables = interfaces.variables - -local v_yes = variables.yes - -moduledata = moduledata or { } - -local spreadsheets = { } -moduledata.spreadsheets = spreadsheets - -local data = { - -- nothing yet -} - -local settings = { - period = ".", - comma = ",", -} - -spreadsheets.data = data -spreadsheets.settings = settings - -local defaultname = "default" -local stack = { } -local current = defaultname - -local d_mt ; d_mt = { - __index = function(t,k) - local v = { } - setmetatable(v,d_mt) - t[k] = v - return v - end, -} - -local s_mt ; s_mt = { - __index = function(t,k) - local v = settings[k] - t[k] = v - return v - end, -} - -function spreadsheets.setup(t) - for k, v in next, t do - settings[k] = v - end -end - -local function emptydata(name,settings) - local data = { } - local specifications = { } - local settings = settings or { } - setmetatable(data,d_mt) - setmetatable(specifications,d_mt) - setmetatable(settings,s_mt) - return { - name = name, - data = data, - maxcol = 0, - maxrow = 0, - settings = settings, - temp = { }, -- for local usage - specifications = specifications, - } -end - -function spreadsheets.reset(name) - if not name or name == "" then name = defaultname end - data[name] = emptydata(name,data[name] and data[name].settings) -end - -function spreadsheets.start(name,s) - if not name or name == "" then - name = defaultname - end - if not s then - s = { } - end - table.insert(stack,current) - current = name - if data[current] then - setmetatable(s,s_mt) - data[current].settings = s - else - data[current] = emptydata(name,s) - end -end - -function spreadsheets.stop() - current = table.remove(stack) -end - -spreadsheets.reset() - -local offset = byte("A") - 1 - -local function assign(s,n) - return formatters["moduledata.spreadsheets.data['%s'].data[%s]"](n,byte(s)-offset) -end - -function datacell(a,b,...) - local n = 0 - if b then - local t = { a, b, ... } - for i=1,#t do - n = n * (i-1) * 26 + byte(t[i]) - offset - end - else - n = byte(a) - offset - end - return formatters["dat[%s]"](n) -end - -local function checktemplate(s) - if find(s,"%%") then - -- normal template - return s - elseif find(s,"@") then - -- tex specific template - return gsub(s,"@","%%") - else - -- tex specific quick template - return "%" .. s - end -end - -local quoted = Cs(patterns.unquoted) -local spaces = patterns.whitespace^0 -local cell = C(R("AZ"))^1 / datacell * (Cc("[") * (R("09")^1) * Cc("]") + #P(1)) - --- A nasty aspect of lpeg: Cf ( spaces * Cc("") * { "start" ... this will create a table that will --- be reused, so we accumulate! - -local pattern = Cf ( spaces * Ct("") * { "start", - start = V("value") + V("set") + V("format") + V("string") + V("code"), - value = Cg(P([[=]]) * spaces * Cc("kind") * Cc("value")) * V("code"), - set = Cg(P([[!]]) * spaces * Cc("kind") * Cc("set")) * V("code"), - format = Cg(P([[@]]) * spaces * Cc("kind") * Cc("format")) * spaces * Cg(Cc("template") * Cs(quoted/checktemplate)) * V("code"), - string = Cg(#S([["']]) * Cc("kind") * Cc("string")) * Cg(Cc("content") * quoted), - code = spaces * Cg(Cc("code") * Cs((cell + P(1))^0)), -}, rawset) - -local functions = { } -spreadsheets.functions = functions - -function functions._s_(row,col,c,f,t) - local r = 0 - if f and t then -- f..t - -- ok - elseif f then -- 1..f - f, t = 1, f - else - f, t = 1, row - 1 - end - for i=f,t do - local ci = c[i] - if type(ci) == "number" then - r = r + c[i] - end - end - return r -end - -functions.fmt = string.tformat - -local f_code = formatters [ [[ - local _m_ = moduledata.spreadsheets - local dat = _m_.data['%s'].data - local tmp = _m_.temp - local fnc = _m_.functions - local row = %s - local col = %s - function fnc.sum(...) return fnc._s_(row,col,...) end - local sum = fnc.sum - local fmt = fnc.fmt - return %s -]] ] - --- to be considered: a weak cache - -local function propername(name) - if name ~= "" then - return name - elseif current ~= "" then - return current - else - return defaultname - end -end - --- if name == "" then name = current if name == "" then name = defaultname end end - -local function execute(name,r,c,str) - if str ~= "" then - local d = data[name] - if c > d.maxcol then - d.maxcol = c - end - if r > d.maxrow then - d.maxrow = r - end - local specification = lpegmatch(pattern,str,1,name) - d.specifications[c][r] = specification - local kind = specification.kind - if kind == "string" then - return specification.content or "" - else - local code = specification.code - if code and code ~= "" then - code = f_code(name,r,c,code or "") - local result = loadstring(code) -- utilities.lua.strippedloadstring(code,true) -- when tracing - result = result and result() - if type(result) == "function" then - result = result() - end - if type(result) == "number" then - d.data[c][r] = result - end - if not result then - -- nothing - elseif kind == "set" then - -- no return - elseif kind == "format" then - return formatters[specification.template](result) - else - return result - end - end - end - end -end - -function spreadsheets.set(name,r,c,str) - name = propername(name) - execute(name,r,c,str) -end - -function spreadsheets.get(name,r,c,str) - name = propername(name) - local dname = data[name] - if not dname then - -- nothing - elseif not str or str == "" then - context(dname.data[c][r] or 0) - else - local result = execute(name,r,c,str) - if result then --- if type(result) == "number" then --- dname.data[c][r] = result --- result = tostring(result) --- end - local settings = dname.settings - local split = settings.split - local period = settings.period - local comma = settings.comma - if split == v_yes then - result = splitthousands(result) - end - if period == "" then period = nil end - if comma == "" then comma = nil end - result = gsub(result,".",{ ["."] = period, [","] = comma }) - context(result) - end - end -end - -function spreadsheets.doifelsecell(name,r,c) - name = propername(name) - local d = data[name] - local d = d and d.data - local r = d and rawget(d,r) - local c = r and rawget(r,c) - commands.doifelse(c) -end - -local function simplify(name) - name = propername(name) - local data = data[name] - if data then - data = data.data - local temp = { } - for k, v in next, data do - local t = { } - temp[k] = t - for kk, vv in next, v do - if type(vv) == "function" then - t[kk] = "" - else - t[kk] = vv - end - end - end - return temp - end -end - -local function serialize(name) - local s = simplify(name) - if s then - return table.serialize(s,name) - else - return formatters[""](name) - end -end - -spreadsheets.simplify = simplify -spreadsheets.serialize = serialize - -function spreadsheets.inspect(name) - inspect(serialize(name)) -end - -function spreadsheets.tocontext(name) - context.tocontext(simplify(name)) -end +if not modules then modules = { } end modules ['m-spreadsheet'] = { + version = 1.001, + comment = "companion to m-spreadsheet.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local byte, format, gsub, find = string.byte, string.format, string.gsub, string.find +local R, P, S, C, V, Cs, Cc, Ct, Cg, Cf, Carg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cg, lpeg.Cf, lpeg.Carg +local lpegmatch, patterns = lpeg.match, lpeg.patterns +local setmetatable, loadstring, next, tostring, tonumber,rawget = setmetatable, loadstring, next, tostring, tonumber, rawget +local formatters = string.formatters + +local context = context + +local splitthousands = utilities.parsers.splitthousands +local variables = interfaces.variables + +local v_yes = variables.yes + +moduledata = moduledata or { } + +local spreadsheets = { } +moduledata.spreadsheets = spreadsheets + +local data = { + -- nothing yet +} + +local settings = { + period = ".", + comma = ",", +} + +spreadsheets.data = data +spreadsheets.settings = settings + +local defaultname = "default" +local stack = { } +local current = defaultname + +local d_mt ; d_mt = { + __index = function(t,k) + local v = { } + setmetatable(v,d_mt) + t[k] = v + return v + end, +} + +local s_mt ; s_mt = { + __index = function(t,k) + local v = settings[k] + t[k] = v + return v + end, +} + +function spreadsheets.setup(t) + for k, v in next, t do + settings[k] = v + end +end + +local function emptydata(name,settings) + local data = { } + local specifications = { } + local settings = settings or { } + setmetatable(data,d_mt) + setmetatable(specifications,d_mt) + setmetatable(settings,s_mt) + return { + name = name, + data = data, + maxcol = 0, + maxrow = 0, + settings = settings, + temp = { }, -- for local usage + specifications = specifications, + } +end + +function spreadsheets.reset(name) + if not name or name == "" then name = defaultname end + data[name] = emptydata(name,data[name] and data[name].settings) +end + +function spreadsheets.start(name,s) + if not name or name == "" then + name = defaultname + end + if not s then + s = { } + end + table.insert(stack,current) + current = name + if data[current] then + setmetatable(s,s_mt) + data[current].settings = s + else + data[current] = emptydata(name,s) + end +end + +function spreadsheets.stop() + current = table.remove(stack) +end + +spreadsheets.reset() + +local offset = byte("A") - 1 + +local function assign(s,n) + return formatters["moduledata.spreadsheets.data['%s'].data[%s]"](n,byte(s)-offset) +end + +function datacell(a,b,...) + local n = 0 + if b then + local t = { a, b, ... } + for i=1,#t do + n = n * (i-1) * 26 + byte(t[i]) - offset + end + else + n = byte(a) - offset + end + return formatters["dat[%s]"](n) +end + +local function checktemplate(s) + if find(s,"%%") then + -- normal template + return s + elseif find(s,"@") then + -- tex specific template + return gsub(s,"@","%%") + else + -- tex specific quick template + return "%" .. s + end +end + +local quoted = Cs(patterns.unquoted) +local spaces = patterns.whitespace^0 +local cell = C(R("AZ"))^1 / datacell * (Cc("[") * (R("09")^1) * Cc("]") + #P(1)) + +-- A nasty aspect of lpeg: Cf ( spaces * Cc("") * { "start" ... this will create a table that will +-- be reused, so we accumulate! + +local pattern = Cf ( spaces * Ct("") * { "start", + start = V("value") + V("set") + V("format") + V("string") + V("code"), + value = Cg(P([[=]]) * spaces * Cc("kind") * Cc("value")) * V("code"), + set = Cg(P([[!]]) * spaces * Cc("kind") * Cc("set")) * V("code"), + format = Cg(P([[@]]) * spaces * Cc("kind") * Cc("format")) * spaces * Cg(Cc("template") * Cs(quoted/checktemplate)) * V("code"), + string = Cg(#S([["']]) * Cc("kind") * Cc("string")) * Cg(Cc("content") * quoted), + code = spaces * Cg(Cc("code") * Cs((cell + P(1))^0)), +}, rawset) + +local functions = { } +spreadsheets.functions = functions + +function functions._s_(row,col,c,f,t) + local r = 0 + if f and t then -- f..t + -- ok + elseif f then -- 1..f + f, t = 1, f + else + f, t = 1, row - 1 + end + for i=f,t do + local ci = c[i] + if type(ci) == "number" then + r = r + c[i] + end + end + return r +end + +functions.fmt = string.tformat + +local f_code = formatters [ [[ + local _m_ = moduledata.spreadsheets + local dat = _m_.data['%s'].data + local tmp = _m_.temp + local fnc = _m_.functions + local row = %s + local col = %s + function fnc.sum(...) return fnc._s_(row,col,...) end + local sum = fnc.sum + local fmt = fnc.fmt + return %s +]] ] + +-- to be considered: a weak cache + +local function propername(name) + if name ~= "" then + return name + elseif current ~= "" then + return current + else + return defaultname + end +end + +-- if name == "" then name = current if name == "" then name = defaultname end end + +local function execute(name,r,c,str) + if str ~= "" then + local d = data[name] + if c > d.maxcol then + d.maxcol = c + end + if r > d.maxrow then + d.maxrow = r + end + local specification = lpegmatch(pattern,str,1,name) + d.specifications[c][r] = specification + local kind = specification.kind + if kind == "string" then + return specification.content or "" + else + local code = specification.code + if code and code ~= "" then + code = f_code(name,r,c,code or "") + local result = loadstring(code) -- utilities.lua.strippedloadstring(code,true) -- when tracing + result = result and result() + if type(result) == "function" then + result = result() + end + if type(result) == "number" then + d.data[c][r] = result + end + if not result then + -- nothing + elseif kind == "set" then + -- no return + elseif kind == "format" then + return formatters[specification.template](result) + else + return result + end + end + end + end +end + +function spreadsheets.set(name,r,c,str) + name = propername(name) + execute(name,r,c,str) +end + +function spreadsheets.get(name,r,c,str) + name = propername(name) + local dname = data[name] + if not dname then + -- nothing + elseif not str or str == "" then + context(dname.data[c][r] or 0) + else + local result = execute(name,r,c,str) + if result then +-- if type(result) == "number" then +-- dname.data[c][r] = result +-- result = tostring(result) +-- end + local settings = dname.settings + local split = settings.split + local period = settings.period + local comma = settings.comma + if split == v_yes then + result = splitthousands(result) + end + if period == "" then period = nil end + if comma == "" then comma = nil end + result = gsub(result,".",{ ["."] = period, [","] = comma }) + context(result) + end + end +end + +function spreadsheets.doifelsecell(name,r,c) + name = propername(name) + local d = data[name] + local d = d and d.data + local r = d and rawget(d,r) + local c = r and rawget(r,c) + commands.doifelse(c) +end + +local function simplify(name) + name = propername(name) + local data = data[name] + if data then + data = data.data + local temp = { } + for k, v in next, data do + local t = { } + temp[k] = t + for kk, vv in next, v do + if type(vv) == "function" then + t[kk] = "" + else + t[kk] = vv + end + end + end + return temp + end +end + +local function serialize(name) + local s = simplify(name) + if s then + return table.serialize(s,name) + else + return formatters[""](name) + end +end + +spreadsheets.simplify = simplify +spreadsheets.serialize = serialize + +function spreadsheets.inspect(name) + inspect(serialize(name)) +end + +function spreadsheets.tocontext(name) + context.tocontext(simplify(name)) +end diff --git a/tex/context/base/m-steps.lua b/tex/context/base/m-steps.lua index 97759b799..caf765a56 100644 --- a/tex/context/base/m-steps.lua +++ b/tex/context/base/m-steps.lua @@ -1,227 +1,227 @@ -if not modules then modules = { } end modules ['x-flow'] = { - version = 1.001, - comment = "companion to m-flow.mkvi", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- when we can resolve mpcolor at the lua end we will use metapost.graphic(....) directly - -moduledata.steps = moduledata.steps or { } - -local points = number.points -- number.pt -local variables = interfaces.variables - -local trace_charts = false - -local defaults = { - chart = { - dx = 10*65436, - dy = 10*65436, - }, - cell = { - alternative = 1, - offset = 2*65436, - rulethickness = 65436, - framecolor = "blue", - backgroundcolor = "gray", - }, - text = { - alternative = 1, - offset = 2*65436, - distance = 4*65436, - rulethickness = 65436, - framecolor = "red", - backgroundcolor = "gray", - }, - line = { - alternative = 1, - rulethickness = 65436, - height = 30*65436, - distance = 10*65436, - offset = 5*65436, - color = "green", - }, -} - --- todo : name (no name then direct) --- maybe: includes --- maybe: flush ranges - -local charts = { } -local steps = { } - -function commands.step_start_chart(name) - name = name or "" - steps = { } - charts[name] = { - steps = steps, - } -end - -function commands.step_stop_chart() -end - -function commands.step_make_chart(settings) - local chartsettings = settings.chart - if not chartsettings then - print("no chart") - return - end - local chartname = chartsettings.name - if not chartname then - print("no name given") - return - end - local chart = charts[chartname] - if not chart then - print("no such chart",chartname) - return - end - local steps = chart.steps or { } - -- - table.setmetatableindex(settings,defaults) - -- - if trace_charts then - inspect(steps) - end - -- - local textsettings = settings.text - local cellsettings = settings.cell - local linesettings = settings.line - -- - context.startMPcode() - context("if unknown context_cell : input mp-step.mpiv ; fi ;") - context("step_begin_chart ;") - -- - if chartsettings.alternative == variables.vertical then - context("chart_vertical := true ;") - end - -- - context("text_line_color := \\MPcolor{%s} ;", textsettings.framecolor) - context("text_line_width := %s ;", points(textsettings.rulethickness)) - context("text_fill_color := \\MPcolor{%s} ;", textsettings.backgroundcolor) - context("text_offset := %s ;", points(textsettings.offset)) - context("text_distance_set := %s ;", points(textsettings.distance)) - -- - context("cell_line_color := \\MPcolor{%s} ;", cellsettings.framecolor) - context("cell_line_width := %s ;", points(cellsettings.rulethickness)) - context("cell_fill_color := \\MPcolor{%s} ;", cellsettings.backgroundcolor) - context("cell_offset := %s ;", points(cellsettings.offset)) - context("cell_distance_x := %s ;", points(cellsettings.dx)) - context("cell_distance_y := %s ;", points(cellsettings.dy)) - -- - context("line_line_color := \\MPcolor{%s} ;", linesettings.color) - context("line_line_width := %s ;", points(linesettings.rulethickness)) - context("line_distance := %s ;", points(linesettings.distance)) - context("line_offset := %s ;", points(linesettings.offset)) - -- - for i=1,#steps do - local step = steps[i] - context("step_begin_cell ;") - if step.cell_top ~= "" then - context('step_cell_top("%s") ;',string.strip(step.cell_top)) - end - if step.cell_bot ~= "" then - context('step_cell_bot("%s") ;',string.strip(step.cell_bot)) - end - if step.text_top ~= "" then - context('step_text_top("%s") ;',string.strip(step.text_top)) - end - if step.text_mid ~= "" then - context('step_text_mid("%s") ;',string.strip(step.text_mid)) - end - if step.text_bot ~= "" then - context('step_text_bot("%s") ;',string.strip(step.text_bot)) - end - context("step_end_cell ;") - end - -- - context("step_end_chart ;") - context.stopMPcode() -end - -function commands.step_cells(top,bot) - steps[#steps+1] = { - cell_top = top or "", - cell_bot = bot or "", - text_top = "", - text_mid = "", - text_bot = "", - } -end - -function commands.step_texts(top,bot) - if #steps > 0 then - steps[#steps].text_top = top or "" - steps[#steps].text_bot = bot or "" - end -end - -function commands.step_cell(top) - steps[#steps+1] = { - cell_top = top or "", - cell_bot = "", - text_top = "", - text_mid = "", - text_bot = "", - } -end - -function commands.step_text(top) - if #steps > 0 then - steps[#steps].text_top = top or "" - end -end - -function commands.step_textset(left,middle,right) - if #steps > 0 then - steps[#steps].text_top = left or "" - steps[#steps].text_mid = middle or "" - steps[#steps].text_bot = right or "" - end -end - -function commands.step_start_cell() - steps[#steps+1] = { - cell_top = "", - cell_bot = "", - text_top = "", - text_mid = "", - text_bot = "", - } -end - -function commands.step_stop_cell() -end - -function commands.step_text_top(str) - if #steps > 0 then - steps[#steps].text_top = str or "" - end -end - -function commands.step_text_mid(str) - if #steps > 0 then - steps[#steps].text_mid = str or "" - end -end - -function commands.step_text_bot(str) - if #steps > 0 then - steps[#steps].text_bot = str or "" - end -end - -function commands.step_cell_top(str) - if #steps > 0 then - steps[#steps].cell_top = str or "" - end -end - -function commands.step_cell_bot(str) - if #steps > 0 then - steps[#steps].cell_bot = str or "" - end -end +if not modules then modules = { } end modules ['x-flow'] = { + version = 1.001, + comment = "companion to m-flow.mkvi", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- when we can resolve mpcolor at the lua end we will use metapost.graphic(....) directly + +moduledata.steps = moduledata.steps or { } + +local points = number.points -- number.pt +local variables = interfaces.variables + +local trace_charts = false + +local defaults = { + chart = { + dx = 10*65436, + dy = 10*65436, + }, + cell = { + alternative = 1, + offset = 2*65436, + rulethickness = 65436, + framecolor = "blue", + backgroundcolor = "gray", + }, + text = { + alternative = 1, + offset = 2*65436, + distance = 4*65436, + rulethickness = 65436, + framecolor = "red", + backgroundcolor = "gray", + }, + line = { + alternative = 1, + rulethickness = 65436, + height = 30*65436, + distance = 10*65436, + offset = 5*65436, + color = "green", + }, +} + +-- todo : name (no name then direct) +-- maybe: includes +-- maybe: flush ranges + +local charts = { } +local steps = { } + +function commands.step_start_chart(name) + name = name or "" + steps = { } + charts[name] = { + steps = steps, + } +end + +function commands.step_stop_chart() +end + +function commands.step_make_chart(settings) + local chartsettings = settings.chart + if not chartsettings then + print("no chart") + return + end + local chartname = chartsettings.name + if not chartname then + print("no name given") + return + end + local chart = charts[chartname] + if not chart then + print("no such chart",chartname) + return + end + local steps = chart.steps or { } + -- + table.setmetatableindex(settings,defaults) + -- + if trace_charts then + inspect(steps) + end + -- + local textsettings = settings.text + local cellsettings = settings.cell + local linesettings = settings.line + -- + context.startMPcode() + context("if unknown context_cell : input mp-step.mpiv ; fi ;") + context("step_begin_chart ;") + -- + if chartsettings.alternative == variables.vertical then + context("chart_vertical := true ;") + end + -- + context("text_line_color := \\MPcolor{%s} ;", textsettings.framecolor) + context("text_line_width := %s ;", points(textsettings.rulethickness)) + context("text_fill_color := \\MPcolor{%s} ;", textsettings.backgroundcolor) + context("text_offset := %s ;", points(textsettings.offset)) + context("text_distance_set := %s ;", points(textsettings.distance)) + -- + context("cell_line_color := \\MPcolor{%s} ;", cellsettings.framecolor) + context("cell_line_width := %s ;", points(cellsettings.rulethickness)) + context("cell_fill_color := \\MPcolor{%s} ;", cellsettings.backgroundcolor) + context("cell_offset := %s ;", points(cellsettings.offset)) + context("cell_distance_x := %s ;", points(cellsettings.dx)) + context("cell_distance_y := %s ;", points(cellsettings.dy)) + -- + context("line_line_color := \\MPcolor{%s} ;", linesettings.color) + context("line_line_width := %s ;", points(linesettings.rulethickness)) + context("line_distance := %s ;", points(linesettings.distance)) + context("line_offset := %s ;", points(linesettings.offset)) + -- + for i=1,#steps do + local step = steps[i] + context("step_begin_cell ;") + if step.cell_top ~= "" then + context('step_cell_top("%s") ;',string.strip(step.cell_top)) + end + if step.cell_bot ~= "" then + context('step_cell_bot("%s") ;',string.strip(step.cell_bot)) + end + if step.text_top ~= "" then + context('step_text_top("%s") ;',string.strip(step.text_top)) + end + if step.text_mid ~= "" then + context('step_text_mid("%s") ;',string.strip(step.text_mid)) + end + if step.text_bot ~= "" then + context('step_text_bot("%s") ;',string.strip(step.text_bot)) + end + context("step_end_cell ;") + end + -- + context("step_end_chart ;") + context.stopMPcode() +end + +function commands.step_cells(top,bot) + steps[#steps+1] = { + cell_top = top or "", + cell_bot = bot or "", + text_top = "", + text_mid = "", + text_bot = "", + } +end + +function commands.step_texts(top,bot) + if #steps > 0 then + steps[#steps].text_top = top or "" + steps[#steps].text_bot = bot or "" + end +end + +function commands.step_cell(top) + steps[#steps+1] = { + cell_top = top or "", + cell_bot = "", + text_top = "", + text_mid = "", + text_bot = "", + } +end + +function commands.step_text(top) + if #steps > 0 then + steps[#steps].text_top = top or "" + end +end + +function commands.step_textset(left,middle,right) + if #steps > 0 then + steps[#steps].text_top = left or "" + steps[#steps].text_mid = middle or "" + steps[#steps].text_bot = right or "" + end +end + +function commands.step_start_cell() + steps[#steps+1] = { + cell_top = "", + cell_bot = "", + text_top = "", + text_mid = "", + text_bot = "", + } +end + +function commands.step_stop_cell() +end + +function commands.step_text_top(str) + if #steps > 0 then + steps[#steps].text_top = str or "" + end +end + +function commands.step_text_mid(str) + if #steps > 0 then + steps[#steps].text_mid = str or "" + end +end + +function commands.step_text_bot(str) + if #steps > 0 then + steps[#steps].text_bot = str or "" + end +end + +function commands.step_cell_top(str) + if #steps > 0 then + steps[#steps].cell_top = str or "" + end +end + +function commands.step_cell_bot(str) + if #steps > 0 then + steps[#steps].cell_bot = str or "" + end +end diff --git a/tex/context/base/math-act.lua b/tex/context/base/math-act.lua index 4f9b3b7e8..875e200c1 100644 --- a/tex/context/base/math-act.lua +++ b/tex/context/base/math-act.lua @@ -1,404 +1,404 @@ -if not modules then modules = { } end modules ['math-act'] = { - version = 1.001, - comment = "companion to math-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- Here we tweak some font properties (if needed). - -local type, next = type, next -local fastcopy = table.fastcopy - -local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end) -local report_math = logs.reporter("mathematics","initializing") - -local context = context -local commands = commands -local mathematics = mathematics -local texdimen = tex.dimen -local abs = math.abs - -local sequencers = utilities.sequencers -local appendgroup = sequencers.appendgroup -local appendaction = sequencers.appendaction - -local mathfontparameteractions = sequencers.new { - name = "mathparameters", - arguments = "target,original", -} - -appendgroup("mathparameters","before") -- user -appendgroup("mathparameters","system") -- private -appendgroup("mathparameters","after" ) -- user - -function fonts.constructors.assignmathparameters(original,target) - local runner = mathfontparameteractions.runner - if runner then - runner(original,target) - end -end - -function mathematics.initializeparameters(target,original) - local mathparameters = original.mathparameters - if mathparameters and next(mathparameters) then - target.mathparameters = mathematics.dimensions(mathparameters) - end -end - -sequencers.appendaction("mathparameters","system","mathematics.initializeparameters") - -local how = { - -- RadicalKernBeforeDegree = "horizontal", - -- RadicalKernAfterDegree = "horizontal", - ScriptPercentScaleDown = "unscaled", - ScriptScriptPercentScaleDown = "unscaled", - RadicalDegreeBottomRaisePercent = "unscaled" -} - -function mathematics.scaleparameters(target,original) - if not target.properties.math_is_scaled then - local mathparameters = target.mathparameters - if mathparameters and next(mathparameters) then - local parameters = target.parameters - local factor = parameters.factor - local hfactor = parameters.hfactor - local vfactor = parameters.vfactor - for name, value in next, mathparameters do - local h = how[name] - if h == "unscaled" then - -- kept - elseif h == "horizontal" then - value = value * hfactor - elseif h == "vertical"then - value = value * vfactor - else - value = value * factor - end - mathparameters[name] = value - end - end - target.properties.math_is_scaled = true - end -end - -sequencers.appendaction("mathparameters","system","mathematics.scaleparameters") - -function mathematics.checkaccentbaseheight(target,original) - local mathparameters = target.mathparameters - if mathparameters and mathparameters.AccentBaseHeight == 0 then - mathparameters.AccentBaseHeight = target.parameters.x_height -- needs checking - end -end - -sequencers.appendaction("mathparameters","system","mathematics.checkaccentbaseheight") -- should go in lfg instead - -function mathematics.checkprivateparameters(target,original) - local mathparameters = target.mathparameters - if mathparameters then - local parameters = target.parameters - if parameters then - if not mathparameters.FractionDelimiterSize then - mathparameters.FractionDelimiterSize = 1.01 * parameters.size - end - if not mathparameters.FractionDelimiterDisplayStyleSize then - mathparameters.FractionDelimiterDisplayStyleSize = 2.40 * parameters.size - end - elseif target.properties then - report_math("no parameters in font %a",target.properties.fullname or "?") - else - report_math("no parameters and properties in font") - end - end -end - -sequencers.appendaction("mathparameters","system","mathematics.checkprivateparameters") - -function mathematics.overloadparameters(target,original) - local mathparameters = target.mathparameters - if mathparameters and next(mathparameters) then - local goodies = target.goodies - if goodies then - for i=1,#goodies do - local goodie = goodies[i] - local mathematics = goodie.mathematics - local parameters = mathematics and mathematics.parameters - if parameters then - if trace_defining then - report_math("overloading math parameters in %a @ %p",target.properties.fullname,target.parameters.size) - end - for name, value in next, parameters do - local tvalue = type(value) - if tvalue == "string" then - report_math("comment for math parameter %a: %s",name,value) - else - local oldvalue = mathparameters[name] - local newvalue = oldvalue - if oldvalue then - if tvalue == "number" then - newvalue = value - elseif tvalue == "function" then - newvalue = value(oldvalue,target,original) - elseif not tvalue then - newvalue = nil - end - if trace_defining and oldvalue ~= newvalue then - report_math("overloading math parameter %a: %S => %S",name,oldvalue,newvalue) - end - else - report_math("invalid math parameter %a",name) - end - mathparameters[name] = newvalue - end - end - end - end - end - end -end - -sequencers.appendaction("mathparameters","system","mathematics.overloadparameters") - -local function applytweaks(when,target,original) - local goodies = original.goodies - if goodies then - for i=1,#goodies do - local goodie = goodies[i] - local mathematics = goodie.mathematics - local tweaks = mathematics and mathematics.tweaks - if tweaks then - tweaks = tweaks[when] - if tweaks then - if trace_defining then - report_math("tweaking math of %a @ %p (%s)",target.properties.fullname,target.parameters.size,when) - end - for i=1,#tweaks do - local tweak= tweaks[i] - local tvalue = type(tweak) - if tvalue == "function" then - tweak(target,original) - end - end - end - end - end - end -end - -function mathematics.tweakbeforecopyingfont(target,original) - local mathparameters = target.mathparameters -- why not hasmath - if mathparameters then - applytweaks("beforecopying",target,original) - end -end - -function mathematics.tweakaftercopyingfont(target,original) - local mathparameters = target.mathparameters -- why not hasmath - if mathparameters then - applytweaks("aftercopying",target,original) - end -end - -sequencers.appendaction("beforecopyingcharacters","system","mathematics.tweakbeforecopyingfont") -sequencers.appendaction("aftercopyingcharacters", "system","mathematics.tweakaftercopyingfont") - -function mathematics.overloaddimensions(target,original,set) - local goodies = target.goodies - if goodies then - for i=1,#goodies do - local goodie = goodies[i] - local mathematics = goodie.mathematics - local dimensions = mathematics and mathematics.dimensions - if dimensions then - if trace_defining then - report_math("overloading dimensions in %a @ %p",target.properties.fullname,target.parameters.size) - end - local characters = target.characters - local parameters = target.parameters - local factor = parameters.factor - local hfactor = parameters.hfactor - local vfactor = parameters.vfactor - local addprivate = fonts.helpers.addprivate - local function overload(dimensions) - for unicode, data in next, dimensions do - local character = characters[unicode] - if character then - -- - local width = data.width - local height = data.height - local depth = data.depth - if trace_defining and (width or height or depth) then - report_math("overloading dimensions of %C, width %a, height %a, depth %a",unicode,width,height,depth) - end - if width then character.width = width * hfactor end - if height then character.height = height * vfactor end - if depth then character.depth = depth * vfactor end - -- - local xoffset = data.xoffset - local yoffset = data.yoffset - if xoffset then - xoffset = { "right", xoffset * hfactor } - end - if yoffset then - yoffset = { "down", -yoffset * vfactor } - end - if xoffset or yoffset then - local slot = { "slot", 1, addprivate(target,nil,fastcopy(character)) } - if xoffset and yoffset then - character.commands = { xoffset, yoffset, slot } - elseif xoffset then - character.commands = { xoffset, slot } - else - character.commands = { yoffset, slot } - end - character.index = nil - end - elseif trace_defining then - report_math("no overloading dimensions of %C, not in font",unicode) - end - end - end - if set == nil then - set = { "default" } - end - if set == "all" or set == true then - for name, set in next, dimensions do - overload(set) - end - else - if type(set) == "string" then - set = utilities.parsers.settings_to_array(set) - end - if type(set) == "table" then - for i=1,#set do - local d = dimensions[set[i]] - if d then - overload(d) - end - end - end - end - end - end - end -end - -sequencers.appendaction("aftercopyingcharacters", "system","mathematics.overloaddimensions") - --- a couple of predefined tewaks: - -local tweaks = { } -mathematics.tweaks = tweaks - -function tweaks.fixbadprime(target,original) - target.characters[0xFE325] = target.characters[0x2032] -end - --- helpers - -local setmetatableindex = table.setmetatableindex -local family_font = node.family_font - -local fontcharacters = fonts.hashes.characters -local extensibles = utilities.storage.allocate() -fonts.hashes.extensibles = extensibles - -local chardata = characters.data -local extensibles = mathematics.extensibles - --- we use numbers at the tex end (otherwise we could stick to chars) - -local e_left = extensibles.left -local e_right = extensibles.right -local e_horizontal = extensibles.horizontal -local e_vertical = extensibles.vertical -local e_mixed = extensibles.mixed -local e_unknown = extensibles.unknown - -local unknown = { e_unknown, false, false } - -local function extensiblecode(font,unicode) - local characters = fontcharacters[font] - local character = characters[unicode] - if not character then - return unknown - end - local code = unicode - local next = character.next - while next do - code = next - character = characters[next] - next = character.next - end - local char = chardata[unicode] - local mathextensible = char and char.mathextensible - if character.horiz_variants then - if character.vert_variants then - return { e_mixed, code, character } - else - local e = mathextensible and extensibles[mathextensible] - return e and { e, code, character } or unknown - end - elseif character.vert_variants then - local e = mathextensible and extensibles[mathextensible] - return e and { e, code, character } or unknown - else - return unknown - end -end - -setmetatableindex(extensibles,function(extensibles,font) - local codes = { } - setmetatableindex(codes, function(codes,unicode) - local status = extensiblecode(font,unicode) - codes[unicode] = status - return status - end) - extensibles[font] = codes - return codes -end) - -function mathematics.extensiblecode(family,unicode) - return extensibles[family_font(family or 0)][unicode][1] -end - -function commands.extensiblecode(family,unicode) - context(extensibles[family_font(family or 0)][unicode][1]) -end - --- left : [head] ... --- right : ... [head] --- horizontal : [head] ... [head] --- --- abs(right["start"] - right["end"]) | right.advance | characters[right.glyph].width - -function commands.horizontalcode(family,unicode) - local font = family_font(family or 0) - local data = extensibles[font][unicode] - local kind = data[1] - if kind == e_left then - local charlist = data[3].horiz_variants - local characters = fontcharacters[font] - local left = charlist[1] - texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0)) - texdimen.scratchrightoffset = 0 - elseif kind == e_right then - local charlist = data[3].horiz_variants - local characters = fontcharacters[font] - local right = charlist[#charlist] - texdimen.scratchleftoffset = 0 - texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0)) - elseif kind == e_horizontal then - local charlist = data[3].horiz_variants - local characters = fontcharacters[font] - local left = charlist[1] - local right = charlist[#charlist] - texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0)) - texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0)) - else - texdimen.scratchleftoffset = 0 - texdimen.scratchrightoffset = 0 - end - context(kind) -end +if not modules then modules = { } end modules ['math-act'] = { + version = 1.001, + comment = "companion to math-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Here we tweak some font properties (if needed). + +local type, next = type, next +local fastcopy = table.fastcopy + +local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end) +local report_math = logs.reporter("mathematics","initializing") + +local context = context +local commands = commands +local mathematics = mathematics +local texdimen = tex.dimen +local abs = math.abs + +local sequencers = utilities.sequencers +local appendgroup = sequencers.appendgroup +local appendaction = sequencers.appendaction + +local mathfontparameteractions = sequencers.new { + name = "mathparameters", + arguments = "target,original", +} + +appendgroup("mathparameters","before") -- user +appendgroup("mathparameters","system") -- private +appendgroup("mathparameters","after" ) -- user + +function fonts.constructors.assignmathparameters(original,target) + local runner = mathfontparameteractions.runner + if runner then + runner(original,target) + end +end + +function mathematics.initializeparameters(target,original) + local mathparameters = original.mathparameters + if mathparameters and next(mathparameters) then + target.mathparameters = mathematics.dimensions(mathparameters) + end +end + +sequencers.appendaction("mathparameters","system","mathematics.initializeparameters") + +local how = { + -- RadicalKernBeforeDegree = "horizontal", + -- RadicalKernAfterDegree = "horizontal", + ScriptPercentScaleDown = "unscaled", + ScriptScriptPercentScaleDown = "unscaled", + RadicalDegreeBottomRaisePercent = "unscaled" +} + +function mathematics.scaleparameters(target,original) + if not target.properties.math_is_scaled then + local mathparameters = target.mathparameters + if mathparameters and next(mathparameters) then + local parameters = target.parameters + local factor = parameters.factor + local hfactor = parameters.hfactor + local vfactor = parameters.vfactor + for name, value in next, mathparameters do + local h = how[name] + if h == "unscaled" then + -- kept + elseif h == "horizontal" then + value = value * hfactor + elseif h == "vertical"then + value = value * vfactor + else + value = value * factor + end + mathparameters[name] = value + end + end + target.properties.math_is_scaled = true + end +end + +sequencers.appendaction("mathparameters","system","mathematics.scaleparameters") + +function mathematics.checkaccentbaseheight(target,original) + local mathparameters = target.mathparameters + if mathparameters and mathparameters.AccentBaseHeight == 0 then + mathparameters.AccentBaseHeight = target.parameters.x_height -- needs checking + end +end + +sequencers.appendaction("mathparameters","system","mathematics.checkaccentbaseheight") -- should go in lfg instead + +function mathematics.checkprivateparameters(target,original) + local mathparameters = target.mathparameters + if mathparameters then + local parameters = target.parameters + if parameters then + if not mathparameters.FractionDelimiterSize then + mathparameters.FractionDelimiterSize = 1.01 * parameters.size + end + if not mathparameters.FractionDelimiterDisplayStyleSize then + mathparameters.FractionDelimiterDisplayStyleSize = 2.40 * parameters.size + end + elseif target.properties then + report_math("no parameters in font %a",target.properties.fullname or "?") + else + report_math("no parameters and properties in font") + end + end +end + +sequencers.appendaction("mathparameters","system","mathematics.checkprivateparameters") + +function mathematics.overloadparameters(target,original) + local mathparameters = target.mathparameters + if mathparameters and next(mathparameters) then + local goodies = target.goodies + if goodies then + for i=1,#goodies do + local goodie = goodies[i] + local mathematics = goodie.mathematics + local parameters = mathematics and mathematics.parameters + if parameters then + if trace_defining then + report_math("overloading math parameters in %a @ %p",target.properties.fullname,target.parameters.size) + end + for name, value in next, parameters do + local tvalue = type(value) + if tvalue == "string" then + report_math("comment for math parameter %a: %s",name,value) + else + local oldvalue = mathparameters[name] + local newvalue = oldvalue + if oldvalue then + if tvalue == "number" then + newvalue = value + elseif tvalue == "function" then + newvalue = value(oldvalue,target,original) + elseif not tvalue then + newvalue = nil + end + if trace_defining and oldvalue ~= newvalue then + report_math("overloading math parameter %a: %S => %S",name,oldvalue,newvalue) + end + else + report_math("invalid math parameter %a",name) + end + mathparameters[name] = newvalue + end + end + end + end + end + end +end + +sequencers.appendaction("mathparameters","system","mathematics.overloadparameters") + +local function applytweaks(when,target,original) + local goodies = original.goodies + if goodies then + for i=1,#goodies do + local goodie = goodies[i] + local mathematics = goodie.mathematics + local tweaks = mathematics and mathematics.tweaks + if tweaks then + tweaks = tweaks[when] + if tweaks then + if trace_defining then + report_math("tweaking math of %a @ %p (%s)",target.properties.fullname,target.parameters.size,when) + end + for i=1,#tweaks do + local tweak= tweaks[i] + local tvalue = type(tweak) + if tvalue == "function" then + tweak(target,original) + end + end + end + end + end + end +end + +function mathematics.tweakbeforecopyingfont(target,original) + local mathparameters = target.mathparameters -- why not hasmath + if mathparameters then + applytweaks("beforecopying",target,original) + end +end + +function mathematics.tweakaftercopyingfont(target,original) + local mathparameters = target.mathparameters -- why not hasmath + if mathparameters then + applytweaks("aftercopying",target,original) + end +end + +sequencers.appendaction("beforecopyingcharacters","system","mathematics.tweakbeforecopyingfont") +sequencers.appendaction("aftercopyingcharacters", "system","mathematics.tweakaftercopyingfont") + +function mathematics.overloaddimensions(target,original,set) + local goodies = target.goodies + if goodies then + for i=1,#goodies do + local goodie = goodies[i] + local mathematics = goodie.mathematics + local dimensions = mathematics and mathematics.dimensions + if dimensions then + if trace_defining then + report_math("overloading dimensions in %a @ %p",target.properties.fullname,target.parameters.size) + end + local characters = target.characters + local parameters = target.parameters + local factor = parameters.factor + local hfactor = parameters.hfactor + local vfactor = parameters.vfactor + local addprivate = fonts.helpers.addprivate + local function overload(dimensions) + for unicode, data in next, dimensions do + local character = characters[unicode] + if character then + -- + local width = data.width + local height = data.height + local depth = data.depth + if trace_defining and (width or height or depth) then + report_math("overloading dimensions of %C, width %a, height %a, depth %a",unicode,width,height,depth) + end + if width then character.width = width * hfactor end + if height then character.height = height * vfactor end + if depth then character.depth = depth * vfactor end + -- + local xoffset = data.xoffset + local yoffset = data.yoffset + if xoffset then + xoffset = { "right", xoffset * hfactor } + end + if yoffset then + yoffset = { "down", -yoffset * vfactor } + end + if xoffset or yoffset then + local slot = { "slot", 1, addprivate(target,nil,fastcopy(character)) } + if xoffset and yoffset then + character.commands = { xoffset, yoffset, slot } + elseif xoffset then + character.commands = { xoffset, slot } + else + character.commands = { yoffset, slot } + end + character.index = nil + end + elseif trace_defining then + report_math("no overloading dimensions of %C, not in font",unicode) + end + end + end + if set == nil then + set = { "default" } + end + if set == "all" or set == true then + for name, set in next, dimensions do + overload(set) + end + else + if type(set) == "string" then + set = utilities.parsers.settings_to_array(set) + end + if type(set) == "table" then + for i=1,#set do + local d = dimensions[set[i]] + if d then + overload(d) + end + end + end + end + end + end + end +end + +sequencers.appendaction("aftercopyingcharacters", "system","mathematics.overloaddimensions") + +-- a couple of predefined tewaks: + +local tweaks = { } +mathematics.tweaks = tweaks + +function tweaks.fixbadprime(target,original) + target.characters[0xFE325] = target.characters[0x2032] +end + +-- helpers + +local setmetatableindex = table.setmetatableindex +local family_font = node.family_font + +local fontcharacters = fonts.hashes.characters +local extensibles = utilities.storage.allocate() +fonts.hashes.extensibles = extensibles + +local chardata = characters.data +local extensibles = mathematics.extensibles + +-- we use numbers at the tex end (otherwise we could stick to chars) + +local e_left = extensibles.left +local e_right = extensibles.right +local e_horizontal = extensibles.horizontal +local e_vertical = extensibles.vertical +local e_mixed = extensibles.mixed +local e_unknown = extensibles.unknown + +local unknown = { e_unknown, false, false } + +local function extensiblecode(font,unicode) + local characters = fontcharacters[font] + local character = characters[unicode] + if not character then + return unknown + end + local code = unicode + local next = character.next + while next do + code = next + character = characters[next] + next = character.next + end + local char = chardata[unicode] + local mathextensible = char and char.mathextensible + if character.horiz_variants then + if character.vert_variants then + return { e_mixed, code, character } + else + local e = mathextensible and extensibles[mathextensible] + return e and { e, code, character } or unknown + end + elseif character.vert_variants then + local e = mathextensible and extensibles[mathextensible] + return e and { e, code, character } or unknown + else + return unknown + end +end + +setmetatableindex(extensibles,function(extensibles,font) + local codes = { } + setmetatableindex(codes, function(codes,unicode) + local status = extensiblecode(font,unicode) + codes[unicode] = status + return status + end) + extensibles[font] = codes + return codes +end) + +function mathematics.extensiblecode(family,unicode) + return extensibles[family_font(family or 0)][unicode][1] +end + +function commands.extensiblecode(family,unicode) + context(extensibles[family_font(family or 0)][unicode][1]) +end + +-- left : [head] ... +-- right : ... [head] +-- horizontal : [head] ... [head] +-- +-- abs(right["start"] - right["end"]) | right.advance | characters[right.glyph].width + +function commands.horizontalcode(family,unicode) + local font = family_font(family or 0) + local data = extensibles[font][unicode] + local kind = data[1] + if kind == e_left then + local charlist = data[3].horiz_variants + local characters = fontcharacters[font] + local left = charlist[1] + texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0)) + texdimen.scratchrightoffset = 0 + elseif kind == e_right then + local charlist = data[3].horiz_variants + local characters = fontcharacters[font] + local right = charlist[#charlist] + texdimen.scratchleftoffset = 0 + texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0)) + elseif kind == e_horizontal then + local charlist = data[3].horiz_variants + local characters = fontcharacters[font] + local left = charlist[1] + local right = charlist[#charlist] + texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0)) + texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0)) + else + texdimen.scratchleftoffset = 0 + texdimen.scratchrightoffset = 0 + end + context(kind) +end diff --git a/tex/context/base/math-dim.lua b/tex/context/base/math-dim.lua index f4fc7905e..babed0afd 100644 --- a/tex/context/base/math-dim.lua +++ b/tex/context/base/math-dim.lua @@ -1,240 +1,240 @@ -if not modules then modules = { } end modules ['math-dim'] = { - version = 1.001, - comment = "companion to math-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- Beware: only Taco and Ulrik really understands in depth what these dimensions --- do so if you run into problems ask on the context list. - --- The radical_rule value is also used as a trigger. In luatex the accent --- placement happens either the opentype way (using top_accent cum suis) or the --- traditional way. In order to determine what method to use the \Umathradicalrule --- setting is consulted to determine what method to use. This is more efficient --- than analyzing the (potentially spread over multiple families) situation. For --- this reason we need to set the radical_rule here. It used to be "" in --- which case the engine takes the rulethickness. In c-speak: --- --- int compat_mode = (radical_rule(cur_style) == undefined_math_parameter) ; - -local abs, next = math.abs, next - -local defaults = { - axis = { default = { "AxisHeight", "axis_height" }, }, - accent_base_height = { default = { "AccentBaseHeight", "x_height" }, }, - fraction_del_size = { default = { "FractionDelimiterSize", "delim2" }, - cramped_display_style = { "FractionDelimiterDisplayStyleSize", "delim1" }, - display_style = { "FractionDelimiterDisplayStyleSize", "delim1" }, }, - fraction_denom_down = { default = { "FractionDenominatorShiftDown", "denom2" }, - cramped_display_style = { "FractionDenominatorDisplayStyleShiftDown", "denom1" }, - display_style = { "FractionDenominatorDisplayStyleShiftDown", "denom1" }, }, - fraction_denom_vgap = { default = { "FractionDenominatorGapMin", "default_rule_thickness" }, - cramped_display_style = { "FractionDenominatorDisplayStyleGapMin", "3*default_rule_thickness" }, - display_style = { "FractionDenominatorDisplayStyleGapMin", "3*default_rule_thickness" }, }, - fraction_num_up = { default = { "FractionNumeratorShiftUp", "num2" }, - cramped_display_style = { "FractionNumeratorDisplayStyleShiftUp", "num1" }, - display_style = { "FractionNumeratorDisplayStyleShiftUp", "num1" }, }, - fraction_num_vgap = { default = { "FractionNumeratorGapMin", "default_rule_thickness" }, - cramped_display_style = { "FractionNumeratorDisplayStyleGapMin", "3*default_rule_thickness" }, - display_style = { "FractionNumeratorDisplayStyleGapMin", "3*default_rule_thickness" }, }, - fraction_rule = { default = { "FractionRuleThickness", "default_rule_thickness" }, }, - limit_above_bgap = { default = { "UpperLimitBaselineRiseMin", "big_op_spacing3" }, }, - limit_above_vgap = { default = { "UpperLimitGapMin", "big_op_spacing1" }, }, - limit_above_kern = { default = { "0", "big_op_spacing5" }, }, - limit_below_bgap = { default = { "LowerLimitBaselineDropMin", "big_op_spacing4" }, }, - limit_below_vgap = { default = { "LowerLimitGapMin", "big_op_spacing2" }, }, - limit_below_kern = { default = { "0", "big_op_spacing5" }, }, - math_operator_size = { default = { "DisplayOperatorMinHeight", "math_x_height*3" }, }, -- 2 - overbar_kern = { default = { "OverbarExtraAscender", "default_rule_thickness" }, }, - overbar_rule = { default = { "OverbarRuleThickness", "default_rule_thickness" }, }, - overbar_vgap = { default = { "OverbarVerticalGap", "3*default_rule_thickness" }, }, - quad = { default = { "font_size(f)", "math_quad" }, }, - radical_kern = { default = { "RadicalExtraAscender", "default_rule_thickness" }, }, - radical_rule = { default = { "RadicalRuleThickness", "default_rule_thickness" }, }, - -- default = { "surd_height(f)", "default_rule_thickness" }, - radical_vgap = { default = { "RadicalVerticalGap", "default_rule_thickness+(abs(default_rule_thickness)/4)" }, - display_style = { "RadicalDisplayStyleVerticalGap", "default_rule_thickness+(abs(math_x_height)/4)" }, }, - space_after_script = { default = { "SpaceAfterScript", "script_space" }, }, - stack_denom_down = { default = { "StackBottomShiftDown", "denom2" }, - cramped_display_style = { "StackBottomDisplayStyleShiftDown", "denom1" }, - display_style = { "StackBottomDisplayStyleShiftDown", "denom1" }, }, - stack_num_up = { default = { "StackTopShiftUp", "num3" }, - cramped_display_style = { "StackTopDisplayStyleShiftUp", "num1" }, - display_style = { "StackTopDisplayStyleShiftUp", "num1" }, }, - stack_vgap = { default = { "StackGapMin", "3*default_rule_thickness" }, - cramped_display_style = { "StackDisplayStyleGapMin", "7*default_rule_thickness" }, - display_style = { "StackDisplayStyleGapMin", "7*default_rule_thickness" }, }, - sub_shift_down = { default = { "SubscriptShiftDown", "sub1" }, }, - sub_shift_drop = { default = { "SubscriptBaselineDropMin", "sub_drop" }, }, - sub_sup_shift_down = { default = { "SubscriptShiftDown", "sub2" }, }, - sub_top_max = { default = { "SubscriptTopMax", "abs(math_x_height*4)/5" }, }, - subsup_vgap = { default = { "SubSuperscriptGapMin", "4*default_rule_thickness" }, }, - sup_bottom_min = { default = { "SuperscriptBottomMin", "abs(math_x_height)/4" }, }, - sup_shift_drop = { default = { "SuperscriptBaselineDropMax", "sup_drop" }, }, - sup_shift_up = { cramped_display_style = { "SuperscriptShiftUpCramped", "sup3" }, - cramped_script_script_style = { "SuperscriptShiftUpCramped", "sup3" }, - cramped_script_style = { "SuperscriptShiftUpCramped", "sup3" }, - cramped_text_style = { "SuperscriptShiftUpCramped", "sup3" }, - display_style = { "SuperscriptShiftUp", "sup1" }, - script_script_style = { "SuperscriptShiftUp", "sup2" }, - script_style = { "SuperscriptShiftUp", "sup2" }, - text_style = { "SuperscriptShiftUp", "sup2" }, }, - sup_sub_bottom_max = { default = { "SuperscriptBottomMaxWithSubscript", "abs(math_x_height*4)/5" }, }, - underbar_kern = { default = { "UnderbarExtraDescender", "0" }, }, - underbar_rule = { default = { "UnderbarRuleThickness", "default_rule_thickness" }, }, - underbar_vgap = { default = { "UnderbarVerticalGap", "3*default_rule_thickness" }, }, - connector_overlap_min = { default = { "MinConnectorOverlap", "0.25*default_rule_thickness" }, }, - over_delimiter_vgap = { default = { "StretchStackGapBelowMin", "big_op_spacing1" }, }, - over_delimiter_bgap = { default = { "StretchStackTopShiftUp", "big_op_spacing3" }, }, - under_delimiter_vgap = { default = { "StretchStackGapAboveMin", "big_op_spacing2" }, }, - under_delimiter_bgap = { default = { "StretchStackBottomShiftDown", "big_op_spacing4" }, }, - radical_degree_before = { default = { "RadicalKernBeforeDegree", "(5/18)*quad" }, }, - radical_degree_after = { default = { "RadicalKernAfterDegree", "(-10/18)*quad" }, }, - radical_degree_raise = { default = { "RadicalDegreeBottomRaisePercent", "60" }, }, -} - -local styles = { - 'cramped_display_style', - 'cramped_script_script_style', - 'cramped_script_style', - 'cramped_text_style', - 'display_style', - 'script_script_style', - 'script_style', - 'text_style', -} - -for k, v in next, defaults do - for _, s in next, styles do - if not v[s] then - v[s] = v.default - end - end -end - --- we cannot use a metatable because we do a copy (takes a bit more work) --- --- local mt = { } setmetatable(defaults,mt) --- --- mt.__index = function(t,s) --- return t.default or t.text_style or 0 --- end - -function mathematics.dimensions(dimens) -- beware, dimens get spoiled - if dimens.SpaceAfterScript then - dimens.SubscriptShiftDownWithSuperscript = dimens.SubscriptShiftDown * 1.5 -- move this one - return table.fastcopy(dimens), { } - elseif dimens.AxisHeight or dimens.axis_height then - local t = { } - local math_x_height = dimens.x_height or 10*65536 - local math_quad = dimens.quad or 10*65536 - local default_rule_thickness = dimens.FractionDenominatorGapMin or dimens.default_rule_thickness or 0.4*65536 - dimens["0"] = 0 - dimens["60"] = 60 - dimens["0.25*default_rule_thickness"] = default_rule_thickness / 4 - dimens["3*default_rule_thickness"] = 3 * default_rule_thickness - dimens["4*default_rule_thickness"] = 4 * default_rule_thickness - dimens["7*default_rule_thickness"] = 7 * default_rule_thickness - dimens["(5/18)*quad"] = (math_quad * 5) / 18 - dimens["(-10/18)*quad"] = - (math_quad * 10) / 18 - dimens["math_x_height*3"] = math_x_height * 3 -- needs checking - dimens["abs(math_x_height*4)/5"] = abs(math_x_height * 4) / 5 - dimens["default_rule_thickness+(abs(default_rule_thickness)/4)"] = default_rule_thickness+(abs(default_rule_thickness) / 4) - dimens["default_rule_thickness+(abs(math_x_height)/4)"] = default_rule_thickness+(abs(math_x_height) / 4) - dimens["abs(math_x_height)/4"] = abs(math_x_height) / 4 - dimens["abs(math_x_height*4)/5"] = abs(math_x_height * 4) / 5 - dimens[""] = false - dimens["script_space"] = false -- at macro level - for variable, styles in next, defaults do - local tt = { } - for style, default in next, styles do - local one, two = default[1], default[2] - local value = dimens[one] - if value then - tt[style] = value - else - value = dimens[two] - if value == false then - tt[style] = nil - else - tt[style] = value or 0 - end - end - end - t[variable] = tt - end - local d = { - AccentBaseHeight = t . accent_base_height . text_style, - AxisHeight = t . axis . text_style, - -- DelimitedSubFormulaMinHeight - DisplayOperatorMinHeight = t . math_operator_size . text_style, -- no longer let tex decide (weird values) - -- FlattenedAccentBaseHeight - FractionDenominatorDisplayStyleGapMin = t . fraction_denom_vgap . display_style, - FractionDenominatorDisplayStyleShiftDown = t . fraction_denom_down . display_style, - FractionDenominatorGapMin = t . fraction_denom_vgap . text_style, - FractionDenominatorShiftDown = t . fraction_denom_down . text_style, - FractionNumeratorDisplayStyleGapMin = t . fraction_num_vgap . display_style, - FractionNumeratorDisplayStyleShiftUp = t . fraction_num_up . display_style, - FractionNumeratorGapMin = t . fraction_num_vgap . text_style, - FractionNumeratorShiftUp = t . fraction_num_up . text_style, - FractionRuleThickness = t . fraction_rule . text_style, - FractionDelimiterSize = t . fraction_del_size . text_style, - FractionDelimiterDisplayStyleSize = t . fraction_del_size . display_style, - LowerLimitBaselineDropMin = t . limit_below_bgap . text_style, - LowerLimitGapMin = t . limit_below_vgap . text_style, - -- MathLeading - MinConnectorOverlap = t . connector_overlap_min . text_style, - OverbarExtraAscender = t . overbar_kern . text_style, - OverbarRuleThickness = t . overbar_rule . text_style, - OverbarVerticalGap = t . overbar_vgap . text_style, - RadicalDisplayStyleVerticalGap = t . radical_vgap . display_style, - RadicalExtraAscender = t . radical_kern . text_style, - RadicalRuleThickness = t . radical_rule . text_style, - RadicalVerticalGap = t . radical_vgap . text_style, - RadicalKernBeforeDegree = t . radical_degree_before . display_style, - RadicalKernAfterDegree = t . radical_degree_after . display_style, - RadicalDegreeBottomRaisePercent = t . radical_degree_raise . display_style, - -- ScriptPercentScaleDown - -- ScriptScriptPercentScaleDown - -- SkewedFractionHorizontalGap - -- SkewedFractionVerticalGap - SpaceAfterScript = t . space_after_script . text_style, - StackBottomDisplayStyleShiftDown = t . stack_denom_down . display_style, - StackBottomShiftDown = t . stack_denom_down . text_style, - StackDisplayStyleGapMin = t . stack_vgap . display_style, - StackGapMin = t . stack_vgap . text_style, - StackTopDisplayStyleShiftUp = t . stack_num_up . display_style, - StackTopShiftUp = t . stack_num_up . text_style, - StretchStackGapBelowMin = t . over_delimiter_vgap . text_style, - StretchStackTopShiftUp = t . over_delimiter_bgap . text_style, - StretchStackGapAboveMin = t . under_delimiter_vgap . text_style, - StretchStackBottomShiftDown = t . under_delimiter_bgap . text_style, - SubSuperscriptGapMin = t . subsup_vgap . text_style, - SubscriptBaselineDropMin = t . sub_shift_drop . text_style, - SubscriptShiftDown = t . sub_shift_down . text_style, - SubscriptShiftDownWithSuperscript = t . sub_sup_shift_down . text_style, - SubscriptTopMax = t . sub_top_max . text_style, - SuperscriptBaselineDropMax = t . sup_shift_drop . text_style, - SuperscriptBottomMaxWithSubscript = t . sup_sub_bottom_max . text_style, - SuperscriptBottomMin = t . sup_bottom_min . text_style, - SuperscriptShiftUp = t . sup_shift_up . text_style, - SuperscriptShiftUpCramped = t . sup_shift_up . cramped_text_style, - UnderbarExtraDescender = t . underbar_kern . text_style, - UnderbarRuleThickness = t . underbar_rule . text_style, - UnderbarVerticalGap = t . underbar_vgap . text_style, - UpperLimitBaselineRiseMin = t . limit_above_bgap . text_style, - UpperLimitGapMin = t . limit_above_vgap . text_style, - } - - -- too fragile for tx/px ... even the same values give different results - d.DisplayOperatorMinHeight = nil - -- - d.AccentBaseHeight = 0 -- here? still? - return d, t -- t only for diagnostics - else - return { }, { } - end -end - +if not modules then modules = { } end modules ['math-dim'] = { + version = 1.001, + comment = "companion to math-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Beware: only Taco and Ulrik really understands in depth what these dimensions +-- do so if you run into problems ask on the context list. + +-- The radical_rule value is also used as a trigger. In luatex the accent +-- placement happens either the opentype way (using top_accent cum suis) or the +-- traditional way. In order to determine what method to use the \Umathradicalrule +-- setting is consulted to determine what method to use. This is more efficient +-- than analyzing the (potentially spread over multiple families) situation. For +-- this reason we need to set the radical_rule here. It used to be "" in +-- which case the engine takes the rulethickness. In c-speak: +-- +-- int compat_mode = (radical_rule(cur_style) == undefined_math_parameter) ; + +local abs, next = math.abs, next + +local defaults = { + axis = { default = { "AxisHeight", "axis_height" }, }, + accent_base_height = { default = { "AccentBaseHeight", "x_height" }, }, + fraction_del_size = { default = { "FractionDelimiterSize", "delim2" }, + cramped_display_style = { "FractionDelimiterDisplayStyleSize", "delim1" }, + display_style = { "FractionDelimiterDisplayStyleSize", "delim1" }, }, + fraction_denom_down = { default = { "FractionDenominatorShiftDown", "denom2" }, + cramped_display_style = { "FractionDenominatorDisplayStyleShiftDown", "denom1" }, + display_style = { "FractionDenominatorDisplayStyleShiftDown", "denom1" }, }, + fraction_denom_vgap = { default = { "FractionDenominatorGapMin", "default_rule_thickness" }, + cramped_display_style = { "FractionDenominatorDisplayStyleGapMin", "3*default_rule_thickness" }, + display_style = { "FractionDenominatorDisplayStyleGapMin", "3*default_rule_thickness" }, }, + fraction_num_up = { default = { "FractionNumeratorShiftUp", "num2" }, + cramped_display_style = { "FractionNumeratorDisplayStyleShiftUp", "num1" }, + display_style = { "FractionNumeratorDisplayStyleShiftUp", "num1" }, }, + fraction_num_vgap = { default = { "FractionNumeratorGapMin", "default_rule_thickness" }, + cramped_display_style = { "FractionNumeratorDisplayStyleGapMin", "3*default_rule_thickness" }, + display_style = { "FractionNumeratorDisplayStyleGapMin", "3*default_rule_thickness" }, }, + fraction_rule = { default = { "FractionRuleThickness", "default_rule_thickness" }, }, + limit_above_bgap = { default = { "UpperLimitBaselineRiseMin", "big_op_spacing3" }, }, + limit_above_vgap = { default = { "UpperLimitGapMin", "big_op_spacing1" }, }, + limit_above_kern = { default = { "0", "big_op_spacing5" }, }, + limit_below_bgap = { default = { "LowerLimitBaselineDropMin", "big_op_spacing4" }, }, + limit_below_vgap = { default = { "LowerLimitGapMin", "big_op_spacing2" }, }, + limit_below_kern = { default = { "0", "big_op_spacing5" }, }, + math_operator_size = { default = { "DisplayOperatorMinHeight", "math_x_height*3" }, }, -- 2 + overbar_kern = { default = { "OverbarExtraAscender", "default_rule_thickness" }, }, + overbar_rule = { default = { "OverbarRuleThickness", "default_rule_thickness" }, }, + overbar_vgap = { default = { "OverbarVerticalGap", "3*default_rule_thickness" }, }, + quad = { default = { "font_size(f)", "math_quad" }, }, + radical_kern = { default = { "RadicalExtraAscender", "default_rule_thickness" }, }, + radical_rule = { default = { "RadicalRuleThickness", "default_rule_thickness" }, }, + -- default = { "surd_height(f)", "default_rule_thickness" }, + radical_vgap = { default = { "RadicalVerticalGap", "default_rule_thickness+(abs(default_rule_thickness)/4)" }, + display_style = { "RadicalDisplayStyleVerticalGap", "default_rule_thickness+(abs(math_x_height)/4)" }, }, + space_after_script = { default = { "SpaceAfterScript", "script_space" }, }, + stack_denom_down = { default = { "StackBottomShiftDown", "denom2" }, + cramped_display_style = { "StackBottomDisplayStyleShiftDown", "denom1" }, + display_style = { "StackBottomDisplayStyleShiftDown", "denom1" }, }, + stack_num_up = { default = { "StackTopShiftUp", "num3" }, + cramped_display_style = { "StackTopDisplayStyleShiftUp", "num1" }, + display_style = { "StackTopDisplayStyleShiftUp", "num1" }, }, + stack_vgap = { default = { "StackGapMin", "3*default_rule_thickness" }, + cramped_display_style = { "StackDisplayStyleGapMin", "7*default_rule_thickness" }, + display_style = { "StackDisplayStyleGapMin", "7*default_rule_thickness" }, }, + sub_shift_down = { default = { "SubscriptShiftDown", "sub1" }, }, + sub_shift_drop = { default = { "SubscriptBaselineDropMin", "sub_drop" }, }, + sub_sup_shift_down = { default = { "SubscriptShiftDown", "sub2" }, }, + sub_top_max = { default = { "SubscriptTopMax", "abs(math_x_height*4)/5" }, }, + subsup_vgap = { default = { "SubSuperscriptGapMin", "4*default_rule_thickness" }, }, + sup_bottom_min = { default = { "SuperscriptBottomMin", "abs(math_x_height)/4" }, }, + sup_shift_drop = { default = { "SuperscriptBaselineDropMax", "sup_drop" }, }, + sup_shift_up = { cramped_display_style = { "SuperscriptShiftUpCramped", "sup3" }, + cramped_script_script_style = { "SuperscriptShiftUpCramped", "sup3" }, + cramped_script_style = { "SuperscriptShiftUpCramped", "sup3" }, + cramped_text_style = { "SuperscriptShiftUpCramped", "sup3" }, + display_style = { "SuperscriptShiftUp", "sup1" }, + script_script_style = { "SuperscriptShiftUp", "sup2" }, + script_style = { "SuperscriptShiftUp", "sup2" }, + text_style = { "SuperscriptShiftUp", "sup2" }, }, + sup_sub_bottom_max = { default = { "SuperscriptBottomMaxWithSubscript", "abs(math_x_height*4)/5" }, }, + underbar_kern = { default = { "UnderbarExtraDescender", "0" }, }, + underbar_rule = { default = { "UnderbarRuleThickness", "default_rule_thickness" }, }, + underbar_vgap = { default = { "UnderbarVerticalGap", "3*default_rule_thickness" }, }, + connector_overlap_min = { default = { "MinConnectorOverlap", "0.25*default_rule_thickness" }, }, + over_delimiter_vgap = { default = { "StretchStackGapBelowMin", "big_op_spacing1" }, }, + over_delimiter_bgap = { default = { "StretchStackTopShiftUp", "big_op_spacing3" }, }, + under_delimiter_vgap = { default = { "StretchStackGapAboveMin", "big_op_spacing2" }, }, + under_delimiter_bgap = { default = { "StretchStackBottomShiftDown", "big_op_spacing4" }, }, + radical_degree_before = { default = { "RadicalKernBeforeDegree", "(5/18)*quad" }, }, + radical_degree_after = { default = { "RadicalKernAfterDegree", "(-10/18)*quad" }, }, + radical_degree_raise = { default = { "RadicalDegreeBottomRaisePercent", "60" }, }, +} + +local styles = { + 'cramped_display_style', + 'cramped_script_script_style', + 'cramped_script_style', + 'cramped_text_style', + 'display_style', + 'script_script_style', + 'script_style', + 'text_style', +} + +for k, v in next, defaults do + for _, s in next, styles do + if not v[s] then + v[s] = v.default + end + end +end + +-- we cannot use a metatable because we do a copy (takes a bit more work) +-- +-- local mt = { } setmetatable(defaults,mt) +-- +-- mt.__index = function(t,s) +-- return t.default or t.text_style or 0 +-- end + +function mathematics.dimensions(dimens) -- beware, dimens get spoiled + if dimens.SpaceAfterScript then + dimens.SubscriptShiftDownWithSuperscript = dimens.SubscriptShiftDown * 1.5 -- move this one + return table.fastcopy(dimens), { } + elseif dimens.AxisHeight or dimens.axis_height then + local t = { } + local math_x_height = dimens.x_height or 10*65536 + local math_quad = dimens.quad or 10*65536 + local default_rule_thickness = dimens.FractionDenominatorGapMin or dimens.default_rule_thickness or 0.4*65536 + dimens["0"] = 0 + dimens["60"] = 60 + dimens["0.25*default_rule_thickness"] = default_rule_thickness / 4 + dimens["3*default_rule_thickness"] = 3 * default_rule_thickness + dimens["4*default_rule_thickness"] = 4 * default_rule_thickness + dimens["7*default_rule_thickness"] = 7 * default_rule_thickness + dimens["(5/18)*quad"] = (math_quad * 5) / 18 + dimens["(-10/18)*quad"] = - (math_quad * 10) / 18 + dimens["math_x_height*3"] = math_x_height * 3 -- needs checking + dimens["abs(math_x_height*4)/5"] = abs(math_x_height * 4) / 5 + dimens["default_rule_thickness+(abs(default_rule_thickness)/4)"] = default_rule_thickness+(abs(default_rule_thickness) / 4) + dimens["default_rule_thickness+(abs(math_x_height)/4)"] = default_rule_thickness+(abs(math_x_height) / 4) + dimens["abs(math_x_height)/4"] = abs(math_x_height) / 4 + dimens["abs(math_x_height*4)/5"] = abs(math_x_height * 4) / 5 + dimens[""] = false + dimens["script_space"] = false -- at macro level + for variable, styles in next, defaults do + local tt = { } + for style, default in next, styles do + local one, two = default[1], default[2] + local value = dimens[one] + if value then + tt[style] = value + else + value = dimens[two] + if value == false then + tt[style] = nil + else + tt[style] = value or 0 + end + end + end + t[variable] = tt + end + local d = { + AccentBaseHeight = t . accent_base_height . text_style, + AxisHeight = t . axis . text_style, + -- DelimitedSubFormulaMinHeight + DisplayOperatorMinHeight = t . math_operator_size . text_style, -- no longer let tex decide (weird values) + -- FlattenedAccentBaseHeight + FractionDenominatorDisplayStyleGapMin = t . fraction_denom_vgap . display_style, + FractionDenominatorDisplayStyleShiftDown = t . fraction_denom_down . display_style, + FractionDenominatorGapMin = t . fraction_denom_vgap . text_style, + FractionDenominatorShiftDown = t . fraction_denom_down . text_style, + FractionNumeratorDisplayStyleGapMin = t . fraction_num_vgap . display_style, + FractionNumeratorDisplayStyleShiftUp = t . fraction_num_up . display_style, + FractionNumeratorGapMin = t . fraction_num_vgap . text_style, + FractionNumeratorShiftUp = t . fraction_num_up . text_style, + FractionRuleThickness = t . fraction_rule . text_style, + FractionDelimiterSize = t . fraction_del_size . text_style, + FractionDelimiterDisplayStyleSize = t . fraction_del_size . display_style, + LowerLimitBaselineDropMin = t . limit_below_bgap . text_style, + LowerLimitGapMin = t . limit_below_vgap . text_style, + -- MathLeading + MinConnectorOverlap = t . connector_overlap_min . text_style, + OverbarExtraAscender = t . overbar_kern . text_style, + OverbarRuleThickness = t . overbar_rule . text_style, + OverbarVerticalGap = t . overbar_vgap . text_style, + RadicalDisplayStyleVerticalGap = t . radical_vgap . display_style, + RadicalExtraAscender = t . radical_kern . text_style, + RadicalRuleThickness = t . radical_rule . text_style, + RadicalVerticalGap = t . radical_vgap . text_style, + RadicalKernBeforeDegree = t . radical_degree_before . display_style, + RadicalKernAfterDegree = t . radical_degree_after . display_style, + RadicalDegreeBottomRaisePercent = t . radical_degree_raise . display_style, + -- ScriptPercentScaleDown + -- ScriptScriptPercentScaleDown + -- SkewedFractionHorizontalGap + -- SkewedFractionVerticalGap + SpaceAfterScript = t . space_after_script . text_style, + StackBottomDisplayStyleShiftDown = t . stack_denom_down . display_style, + StackBottomShiftDown = t . stack_denom_down . text_style, + StackDisplayStyleGapMin = t . stack_vgap . display_style, + StackGapMin = t . stack_vgap . text_style, + StackTopDisplayStyleShiftUp = t . stack_num_up . display_style, + StackTopShiftUp = t . stack_num_up . text_style, + StretchStackGapBelowMin = t . over_delimiter_vgap . text_style, + StretchStackTopShiftUp = t . over_delimiter_bgap . text_style, + StretchStackGapAboveMin = t . under_delimiter_vgap . text_style, + StretchStackBottomShiftDown = t . under_delimiter_bgap . text_style, + SubSuperscriptGapMin = t . subsup_vgap . text_style, + SubscriptBaselineDropMin = t . sub_shift_drop . text_style, + SubscriptShiftDown = t . sub_shift_down . text_style, + SubscriptShiftDownWithSuperscript = t . sub_sup_shift_down . text_style, + SubscriptTopMax = t . sub_top_max . text_style, + SuperscriptBaselineDropMax = t . sup_shift_drop . text_style, + SuperscriptBottomMaxWithSubscript = t . sup_sub_bottom_max . text_style, + SuperscriptBottomMin = t . sup_bottom_min . text_style, + SuperscriptShiftUp = t . sup_shift_up . text_style, + SuperscriptShiftUpCramped = t . sup_shift_up . cramped_text_style, + UnderbarExtraDescender = t . underbar_kern . text_style, + UnderbarRuleThickness = t . underbar_rule . text_style, + UnderbarVerticalGap = t . underbar_vgap . text_style, + UpperLimitBaselineRiseMin = t . limit_above_bgap . text_style, + UpperLimitGapMin = t . limit_above_vgap . text_style, + } + + -- too fragile for tx/px ... even the same values give different results + d.DisplayOperatorMinHeight = nil + -- + d.AccentBaseHeight = 0 -- here? still? + return d, t -- t only for diagnostics + else + return { }, { } + end +end + diff --git a/tex/context/base/math-ext.lua b/tex/context/base/math-ext.lua index b00d6cde2..2b6860d75 100644 --- a/tex/context/base/math-ext.lua +++ b/tex/context/base/math-ext.lua @@ -1,197 +1,197 @@ -if not modules then modules = { } end modules ['math-ext'] = { - version = 1.001, - comment = "companion to math-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local trace_virtual = false trackers.register("math.virtual", function(v) trace_virtual = v end) - -local basename = file.basename - -local mathematics = mathematics -local characters = characters - -local report_math = logs.reporter("mathematics") - -mathematics.extras = mathematics.extras or { } -local extras = mathematics.extras - -characters.math = characters.math or { } -local mathdata = characters.math -local chardata = characters.data - -function extras.add(unicode,t) -- todo: if already stored ... - local min, max = mathematics.extrabase, mathematics.privatebase - 1 - -- if mathdata[unicode] or chardata[unicode] then - -- report_math("extra %U overloads existing character",unicode) - -- end - if unicode >= min and unicode <= max then - mathdata[unicode], chardata[unicode] = t, t - else - report_math("extra %U should be in range %U - %U",unicode,min,max) - end -end - -function extras.copy(target,original) - local characters = target.characters - local properties = target.properties - local parameters = target.parameters - for unicode, extradesc in next, mathdata do - -- always, because in an intermediate step we can have a non math font - local extrachar = characters[unicode] - local nextinsize = extradesc.nextinsize - if nextinsize then - local first = 1 - local charused = unicode - if not extrachar then - for i=1,#nextinsize do - local slot = nextinsize[i] - extrachar = characters[slot] - if extrachar then - characters[unicode] = extrachar - first = i + 1 - charused = slot - break - end - end - end - if not extrachar then - if trace_virtual then - report_math("extra %U in %a at %p with class %a and name %a is not mapped", - unicode,basename(properties.fullname),parameters.size, - extradesc.mathclass,extradesc.mathname) - end - elseif not extrachar.next then - local nextused = false - for i=first,#nextinsize do - local nextslot = nextinsize[i] - local nextbase = characters[nextslot] - if nextbase then - local nextnext = nextbase and nextbase.next - if nextnext then - local nextchar = characters[nextnext] - if nextchar then - extrachar.next = nextchar - nextused = nextslot - break - end - end - end - end - if trace_virtual then - if nextused then - report_math("extra %U in %a at %p with class %a and name %a maps onto %U with next %U", - unicode,basename(properties.fullname),parameters.size,charused, - extradesc.mathclass,extradesc.mathname,nextused) - else - report_math("extra %U in %a at %p with class %a and name %a maps onto %U with no next", - unicode,basename(properties.fullname),parameters.size,charused, - extradesc.mathclass,extradesc.mathname) - end - end - else - if trace_virtual then - report_math("extra %U in %a at %p with class %a and name %a maps onto %U with no next", -- own next - unicode,basename(properties.fullname),parameters.size,charused, - extradesc.mathclass,extradesc.mathname) - end - end - end - end -end - -utilities.sequencers.appendaction(mathactions,"system","mathematics.extras.copy") - --- 0xFE302 -- 0xFE320 for accents (gone with new lm/gyre) --- --- extras.add(0xFE302, { --- category="mn", --- description="WIDE MATHEMATICAL HAT", --- direction="nsm", --- linebreak="cm", --- mathclass="topaccent", --- mathname="widehat", --- mathstretch="h", --- unicodeslot=0xFE302, --- nextinsize={ 0x00302, 0x0005E }, --- } ) --- --- extras.add(0xFE303, { --- category="mn", --- cjkwd="a", --- description="WIDE MATHEMATICAL TILDE", --- direction="nsm", --- linebreak="cm", --- mathclass="topaccent", --- mathname="widetilde", --- mathstretch="h", --- unicodeslot=0xFE303, --- nextinsize={ 0x00303, 0x0007E }, --- } ) - --- 0xFE321 -- 0xFE340 for missing characters - -extras.add(0xFE321, { - category="sm", - description="MATHEMATICAL SHORT BAR", - -- direction="on", - -- linebreak="nu", - mathclass="relation", - mathname="mapstochar", - unicodeslot=0xFE321, -} ) - -extras.add(0xFE322, { - category="sm", - description="MATHEMATICAL LEFT HOOK", - mathclass="relation", - mathname="lhook", - unicodeslot=0xFE322, -} ) - -extras.add(0xFE323, { - category="sm", - description="MATHEMATICAL RIGHT HOOK", - mathclass="relation", - mathname="rhook", - unicodeslot=0xFE323, -} ) - -extras.add(0xFE324, { - category="sm", - description="MATHEMATICAL SHORT BAR MIRRORED", --- direction="on", --- linebreak="nu", - mathclass="relation", - mathname="mapsfromchar", - unicodeslot=0xFE324, -} ) - ---~ extras.add(0xFE304, { ---~ category="sm", ---~ description="TOP AND BOTTOM PARENTHESES", ---~ direction="on", ---~ linebreak="al", ---~ mathclass="doubleaccent", ---~ mathname="doubleparent", ---~ unicodeslot=0xFE304, ---~ accents={ 0x023DC, 0x023DD }, ---~ } ) - ---~ extras.add(0xFE305, { ---~ category="sm", ---~ description="TOP AND BOTTOM BRACES", ---~ direction="on", ---~ linebreak="al", ---~ mathclass="doubleaccent", ---~ mathname="doublebrace", ---~ unicodeslot=0xFE305, ---~ accents={ 0x023DE, 0x023DF }, ---~ } ) - ---~ \Umathchardef\braceld="0 "1 "FF07A ---~ \Umathchardef\bracerd="0 "1 "FF07B ---~ \Umathchardef\bracelu="0 "1 "FF07C ---~ \Umathchardef\braceru="0 "1 "FF07D +if not modules then modules = { } end modules ['math-ext'] = { + version = 1.001, + comment = "companion to math-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local trace_virtual = false trackers.register("math.virtual", function(v) trace_virtual = v end) + +local basename = file.basename + +local mathematics = mathematics +local characters = characters + +local report_math = logs.reporter("mathematics") + +mathematics.extras = mathematics.extras or { } +local extras = mathematics.extras + +characters.math = characters.math or { } +local mathdata = characters.math +local chardata = characters.data + +function extras.add(unicode,t) -- todo: if already stored ... + local min, max = mathematics.extrabase, mathematics.privatebase - 1 + -- if mathdata[unicode] or chardata[unicode] then + -- report_math("extra %U overloads existing character",unicode) + -- end + if unicode >= min and unicode <= max then + mathdata[unicode], chardata[unicode] = t, t + else + report_math("extra %U should be in range %U - %U",unicode,min,max) + end +end + +function extras.copy(target,original) + local characters = target.characters + local properties = target.properties + local parameters = target.parameters + for unicode, extradesc in next, mathdata do + -- always, because in an intermediate step we can have a non math font + local extrachar = characters[unicode] + local nextinsize = extradesc.nextinsize + if nextinsize then + local first = 1 + local charused = unicode + if not extrachar then + for i=1,#nextinsize do + local slot = nextinsize[i] + extrachar = characters[slot] + if extrachar then + characters[unicode] = extrachar + first = i + 1 + charused = slot + break + end + end + end + if not extrachar then + if trace_virtual then + report_math("extra %U in %a at %p with class %a and name %a is not mapped", + unicode,basename(properties.fullname),parameters.size, + extradesc.mathclass,extradesc.mathname) + end + elseif not extrachar.next then + local nextused = false + for i=first,#nextinsize do + local nextslot = nextinsize[i] + local nextbase = characters[nextslot] + if nextbase then + local nextnext = nextbase and nextbase.next + if nextnext then + local nextchar = characters[nextnext] + if nextchar then + extrachar.next = nextchar + nextused = nextslot + break + end + end + end + end + if trace_virtual then + if nextused then + report_math("extra %U in %a at %p with class %a and name %a maps onto %U with next %U", + unicode,basename(properties.fullname),parameters.size,charused, + extradesc.mathclass,extradesc.mathname,nextused) + else + report_math("extra %U in %a at %p with class %a and name %a maps onto %U with no next", + unicode,basename(properties.fullname),parameters.size,charused, + extradesc.mathclass,extradesc.mathname) + end + end + else + if trace_virtual then + report_math("extra %U in %a at %p with class %a and name %a maps onto %U with no next", -- own next + unicode,basename(properties.fullname),parameters.size,charused, + extradesc.mathclass,extradesc.mathname) + end + end + end + end +end + +utilities.sequencers.appendaction(mathactions,"system","mathematics.extras.copy") + +-- 0xFE302 -- 0xFE320 for accents (gone with new lm/gyre) +-- +-- extras.add(0xFE302, { +-- category="mn", +-- description="WIDE MATHEMATICAL HAT", +-- direction="nsm", +-- linebreak="cm", +-- mathclass="topaccent", +-- mathname="widehat", +-- mathstretch="h", +-- unicodeslot=0xFE302, +-- nextinsize={ 0x00302, 0x0005E }, +-- } ) +-- +-- extras.add(0xFE303, { +-- category="mn", +-- cjkwd="a", +-- description="WIDE MATHEMATICAL TILDE", +-- direction="nsm", +-- linebreak="cm", +-- mathclass="topaccent", +-- mathname="widetilde", +-- mathstretch="h", +-- unicodeslot=0xFE303, +-- nextinsize={ 0x00303, 0x0007E }, +-- } ) + +-- 0xFE321 -- 0xFE340 for missing characters + +extras.add(0xFE321, { + category="sm", + description="MATHEMATICAL SHORT BAR", + -- direction="on", + -- linebreak="nu", + mathclass="relation", + mathname="mapstochar", + unicodeslot=0xFE321, +} ) + +extras.add(0xFE322, { + category="sm", + description="MATHEMATICAL LEFT HOOK", + mathclass="relation", + mathname="lhook", + unicodeslot=0xFE322, +} ) + +extras.add(0xFE323, { + category="sm", + description="MATHEMATICAL RIGHT HOOK", + mathclass="relation", + mathname="rhook", + unicodeslot=0xFE323, +} ) + +extras.add(0xFE324, { + category="sm", + description="MATHEMATICAL SHORT BAR MIRRORED", +-- direction="on", +-- linebreak="nu", + mathclass="relation", + mathname="mapsfromchar", + unicodeslot=0xFE324, +} ) + +--~ extras.add(0xFE304, { +--~ category="sm", +--~ description="TOP AND BOTTOM PARENTHESES", +--~ direction="on", +--~ linebreak="al", +--~ mathclass="doubleaccent", +--~ mathname="doubleparent", +--~ unicodeslot=0xFE304, +--~ accents={ 0x023DC, 0x023DD }, +--~ } ) + +--~ extras.add(0xFE305, { +--~ category="sm", +--~ description="TOP AND BOTTOM BRACES", +--~ direction="on", +--~ linebreak="al", +--~ mathclass="doubleaccent", +--~ mathname="doublebrace", +--~ unicodeslot=0xFE305, +--~ accents={ 0x023DE, 0x023DF }, +--~ } ) + +--~ \Umathchardef\braceld="0 "1 "FF07A +--~ \Umathchardef\bracerd="0 "1 "FF07B +--~ \Umathchardef\bracelu="0 "1 "FF07C +--~ \Umathchardef\braceru="0 "1 "FF07D diff --git a/tex/context/base/math-fbk.lua b/tex/context/base/math-fbk.lua index eebc4e4e7..f34019b6e 100644 --- a/tex/context/base/math-fbk.lua +++ b/tex/context/base/math-fbk.lua @@ -1,312 +1,312 @@ -if not modules then modules = { } end modules ['math-fbk'] = { - version = 1.001, - comment = "companion to math-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local trace_fallbacks = false trackers.register("math.fallbacks", function(v) trace_fallbacks = v end) - -local report_fallbacks = logs.reporter("math","fallbacks") - -local fallbacks = { } -mathematics.fallbacks = fallbacks - -local virtualcharacters = { } - -local identifiers = fonts.hashes.identifiers -local lastmathids = fonts.hashes.lastmathids - --- we need a trick (todo): if we define scriptscript, script and text in --- that order we could use their id's .. i.e. we could always add a font --- table with those id's .. in fact, we could also add a whole lot more --- as it doesn't hurt --- --- todo: use index 'true when luatex provides that feature (on the agenda) - -function fallbacks.apply(target,original) - local mathparameters = target.mathparameters -- why not hasmath - if mathparameters then - local characters = target.characters - local parameters = target.parameters - local mathsize = parameters.mathsize - local size = parameters.size - local usedfonts = target.fonts - if not usedfonts then - usedfonts = { } - target.fonts = usedfonts - end - -- This is not okay yet ... we have no proper way to refer to 'self' - -- otherwise I will make my own id allocator). -local self = #usedfonts == 0 and font.nextid() or nil -- will be true - local textid, scriptid, scriptscriptid - local textindex, scriptindex, scriptscriptindex - local textdata, scriptdata, scriptscriptdata - if mathsize == 3 then - -- scriptscriptsize - -- textid = nil -- self - -- scriptid = nil -- no smaller - -- scriptscriptid = nil -- no smaller -textid = self -scriptid = self -scriptscriptid = self - elseif mathsize == 2 then - -- scriptsize - -- textid = nil -- self -textid = self - scriptid = lastmathids[3] - scriptscriptid = lastmathids[3] - else - -- textsize - -- textid = nil -- self -textid = self - scriptid = lastmathids[2] - scriptscriptid = lastmathids[3] - end - if textid then - textindex = #usedfonts + 1 - usedfonts[textindex] = { id = textid } - textdata = identifiers[textid] - else - textdata = target - end - if scriptid then - scriptindex = #usedfonts + 1 - usedfonts[scriptindex] = { id = scriptid } - scriptdata = identifiers[scriptid] - else - scriptindex = textindex - scriptdata = textdata - end - if scriptscriptid then - scriptscriptindex = #usedfonts + 1 - usedfonts[scriptscriptindex] = { id = scriptscriptid } - scriptscriptdata = identifiers[scriptscriptid] - else - scriptscriptindex = scriptindex - scriptscriptdata = scriptdata - end --- report_fallbacks("used textid: %s, used script id: %s, used scriptscript id: %s", --- tostring(textid),tostring(scriptid),tostring(scriptscriptid)) - local data = { - textdata = textdata, - scriptdata = scriptdata, - scriptscriptdata = scriptscriptdata, - textindex = textindex, - scriptindex = scriptindex, - scriptscriptindex = scriptscriptindex, - characters = characters, - unicode = k, - target = target, - original = original, - size = size, - mathsize = mathsize, - } --- inspect(usedfonts) - for k, v in next, virtualcharacters do - if not characters[k] then - local tv = type(v) - if tv == "table" then - characters[k] = v - elseif tv == "number" then - characters[k] = characters[v] - elseif tv == "function" then - characters[k] = v(data) - end - if trace_fallbacks then - if characters[k] then - report_fallbacks("extending font %a with %U",target.properties.fullname,k) - end - end - end - end - end -end - -utilities.sequencers.appendaction("aftercopyingcharacters","system","mathematics.fallbacks.apply") - -function fallbacks.install(unicode,value) - virtualcharacters[unicode] = value -end - --- a few examples: - -local function reference(index,char) - if index then - return { "slot", index, char } - else - return { "char", char } - end -end - -local function raised(data,down) - local replacement = data.replacement - local character = data.scriptdata.characters[replacement] - if character then - return { - width = character.width, - height = character.height, - depth = character.depth, - commands = { - { "down", down and data.size/4 or -data.size/2 }, -- maybe exheight - reference(data.scriptindex,replacement) - } - } - end -end - --- virtualcharacters[0x207A] = 0x2212 --- virtualcharacters[0x207B] = 0x002B --- virtualcharacters[0x208A] = 0x2212 --- virtualcharacters[0x208B] = 0x002B - -virtualcharacters[0x207A] = function(data) - data.replacement = 0x2212 - return raised(data) -end - -virtualcharacters[0x207B] = function(data) - data.replacement = 0x002B - return raised(data) -end - -virtualcharacters[0x208A] = function(data) - data.replacement = 0x2212 - return raised(data,true) -end - -virtualcharacters[0x208B] = function(data) - data.replacement = 0x002B - return raised(data,true) -end - --- local function repeated(data,char,n,fraction) --- local character = data.characters[char] --- if character then --- local width = character.width --- local delta = width - character.italic -- width * fraction --- local c = { "char", char } --- local r = { "right", right } --- local commands = { } --- for i=1,n-1 do --- width = width + delta --- commands[#commands+1] = c --- commands[#commands+1] = -delta --- end --- commands[#commands+1] = c --- return { --- width = width, --- height = character.height, --- depth = character.depth, --- commands = commands, --- } --- end --- end - --- virtualcharacters[0x222C] = function(data) --- return repeated(data,0x222B,2,1/8) --- end - --- virtualcharacters[0x222D] = function(data) --- return repeated(data,0x222B,3,1/8) --- end - -local addextra = mathematics.extras.add - -addextra(0xFE350, { - category="sm", - description="MATHEMATICAL DOUBLE ARROW LEFT END", - mathclass="relation", - mathname="ctxdoublearrowfillleftend", - unicodeslot=0xFE350, -} ) - -addextra(0xFE351, { - category="sm", - description="MATHEMATICAL DOUBLE ARROW MIDDLE PART", - mathclass="relation", - mathname="ctxdoublearrowfillmiddlepart", - unicodeslot=0xFE351, -} ) - -addextra(0xFE352, { - category="sm", - description="MATHEMATICAL DOUBLE ARROW RIGHT END", - mathclass="relation", - mathname="ctxdoublearrowfillrightend", - unicodeslot=0xFE352, -} ) - -local push = { "push" } -local pop = { "pop" } -local leftarrow = { "char", 0x2190 } -local relbar = { "char", 0x2212 } -local rightarrow = { "char", 0x2192 } - -virtualcharacters[0xFE350] = function(data) - -- return combined(data,0x2190,0x2212) -- leftarrow relbar - local charone = data.characters[0x2190] - local chartwo = data.characters[0x2212] - if charone and chartwo then - local size = data.size/2 - return { - width = chartwo.width, - height = size, - depth = size, - commands = { - push, - { "down", size/2 }, - leftarrow, - pop, - { "down", -size/2 }, - relbar, - } - } - end -end - -virtualcharacters[0xFE351] = function(data) - -- return combined(data,0x2212,0x2212) -- relbar, relbar (isn't that just equal) - local char = data.characters[0x2212] - if char then - local size = data.size/2 - return { - width = char.width, - height = size, - depth = size, - commands = { - push, - { "down", size/2 }, - relbar, - pop, - { "down", -size/2 }, - relbar, - } - } - end -end - -virtualcharacters[0xFE352] = function(data) - -- return combined(data,0x2192,0x2212) -- rightarrow relbar - local charone = data.characters[0x2192] - local chartwo = data.characters[0x2212] - if charone and chartwo then - local size = data.size/2 - return { - width = chartwo.width, - height = size, - depth = size, - commands = { - push, - { "down", size/2 }, - relbar, - pop, - { "right", chartwo.width - charone.width }, - { "down", -size/2 }, - rightarrow, - } - } - end -end - +if not modules then modules = { } end modules ['math-fbk'] = { + version = 1.001, + comment = "companion to math-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local trace_fallbacks = false trackers.register("math.fallbacks", function(v) trace_fallbacks = v end) + +local report_fallbacks = logs.reporter("math","fallbacks") + +local fallbacks = { } +mathematics.fallbacks = fallbacks + +local virtualcharacters = { } + +local identifiers = fonts.hashes.identifiers +local lastmathids = fonts.hashes.lastmathids + +-- we need a trick (todo): if we define scriptscript, script and text in +-- that order we could use their id's .. i.e. we could always add a font +-- table with those id's .. in fact, we could also add a whole lot more +-- as it doesn't hurt +-- +-- todo: use index 'true when luatex provides that feature (on the agenda) + +function fallbacks.apply(target,original) + local mathparameters = target.mathparameters -- why not hasmath + if mathparameters then + local characters = target.characters + local parameters = target.parameters + local mathsize = parameters.mathsize + local size = parameters.size + local usedfonts = target.fonts + if not usedfonts then + usedfonts = { } + target.fonts = usedfonts + end + -- This is not okay yet ... we have no proper way to refer to 'self' + -- otherwise I will make my own id allocator). +local self = #usedfonts == 0 and font.nextid() or nil -- will be true + local textid, scriptid, scriptscriptid + local textindex, scriptindex, scriptscriptindex + local textdata, scriptdata, scriptscriptdata + if mathsize == 3 then + -- scriptscriptsize + -- textid = nil -- self + -- scriptid = nil -- no smaller + -- scriptscriptid = nil -- no smaller +textid = self +scriptid = self +scriptscriptid = self + elseif mathsize == 2 then + -- scriptsize + -- textid = nil -- self +textid = self + scriptid = lastmathids[3] + scriptscriptid = lastmathids[3] + else + -- textsize + -- textid = nil -- self +textid = self + scriptid = lastmathids[2] + scriptscriptid = lastmathids[3] + end + if textid then + textindex = #usedfonts + 1 + usedfonts[textindex] = { id = textid } + textdata = identifiers[textid] + else + textdata = target + end + if scriptid then + scriptindex = #usedfonts + 1 + usedfonts[scriptindex] = { id = scriptid } + scriptdata = identifiers[scriptid] + else + scriptindex = textindex + scriptdata = textdata + end + if scriptscriptid then + scriptscriptindex = #usedfonts + 1 + usedfonts[scriptscriptindex] = { id = scriptscriptid } + scriptscriptdata = identifiers[scriptscriptid] + else + scriptscriptindex = scriptindex + scriptscriptdata = scriptdata + end +-- report_fallbacks("used textid: %s, used script id: %s, used scriptscript id: %s", +-- tostring(textid),tostring(scriptid),tostring(scriptscriptid)) + local data = { + textdata = textdata, + scriptdata = scriptdata, + scriptscriptdata = scriptscriptdata, + textindex = textindex, + scriptindex = scriptindex, + scriptscriptindex = scriptscriptindex, + characters = characters, + unicode = k, + target = target, + original = original, + size = size, + mathsize = mathsize, + } +-- inspect(usedfonts) + for k, v in next, virtualcharacters do + if not characters[k] then + local tv = type(v) + if tv == "table" then + characters[k] = v + elseif tv == "number" then + characters[k] = characters[v] + elseif tv == "function" then + characters[k] = v(data) + end + if trace_fallbacks then + if characters[k] then + report_fallbacks("extending font %a with %U",target.properties.fullname,k) + end + end + end + end + end +end + +utilities.sequencers.appendaction("aftercopyingcharacters","system","mathematics.fallbacks.apply") + +function fallbacks.install(unicode,value) + virtualcharacters[unicode] = value +end + +-- a few examples: + +local function reference(index,char) + if index then + return { "slot", index, char } + else + return { "char", char } + end +end + +local function raised(data,down) + local replacement = data.replacement + local character = data.scriptdata.characters[replacement] + if character then + return { + width = character.width, + height = character.height, + depth = character.depth, + commands = { + { "down", down and data.size/4 or -data.size/2 }, -- maybe exheight + reference(data.scriptindex,replacement) + } + } + end +end + +-- virtualcharacters[0x207A] = 0x2212 +-- virtualcharacters[0x207B] = 0x002B +-- virtualcharacters[0x208A] = 0x2212 +-- virtualcharacters[0x208B] = 0x002B + +virtualcharacters[0x207A] = function(data) + data.replacement = 0x2212 + return raised(data) +end + +virtualcharacters[0x207B] = function(data) + data.replacement = 0x002B + return raised(data) +end + +virtualcharacters[0x208A] = function(data) + data.replacement = 0x2212 + return raised(data,true) +end + +virtualcharacters[0x208B] = function(data) + data.replacement = 0x002B + return raised(data,true) +end + +-- local function repeated(data,char,n,fraction) +-- local character = data.characters[char] +-- if character then +-- local width = character.width +-- local delta = width - character.italic -- width * fraction +-- local c = { "char", char } +-- local r = { "right", right } +-- local commands = { } +-- for i=1,n-1 do +-- width = width + delta +-- commands[#commands+1] = c +-- commands[#commands+1] = -delta +-- end +-- commands[#commands+1] = c +-- return { +-- width = width, +-- height = character.height, +-- depth = character.depth, +-- commands = commands, +-- } +-- end +-- end + +-- virtualcharacters[0x222C] = function(data) +-- return repeated(data,0x222B,2,1/8) +-- end + +-- virtualcharacters[0x222D] = function(data) +-- return repeated(data,0x222B,3,1/8) +-- end + +local addextra = mathematics.extras.add + +addextra(0xFE350, { + category="sm", + description="MATHEMATICAL DOUBLE ARROW LEFT END", + mathclass="relation", + mathname="ctxdoublearrowfillleftend", + unicodeslot=0xFE350, +} ) + +addextra(0xFE351, { + category="sm", + description="MATHEMATICAL DOUBLE ARROW MIDDLE PART", + mathclass="relation", + mathname="ctxdoublearrowfillmiddlepart", + unicodeslot=0xFE351, +} ) + +addextra(0xFE352, { + category="sm", + description="MATHEMATICAL DOUBLE ARROW RIGHT END", + mathclass="relation", + mathname="ctxdoublearrowfillrightend", + unicodeslot=0xFE352, +} ) + +local push = { "push" } +local pop = { "pop" } +local leftarrow = { "char", 0x2190 } +local relbar = { "char", 0x2212 } +local rightarrow = { "char", 0x2192 } + +virtualcharacters[0xFE350] = function(data) + -- return combined(data,0x2190,0x2212) -- leftarrow relbar + local charone = data.characters[0x2190] + local chartwo = data.characters[0x2212] + if charone and chartwo then + local size = data.size/2 + return { + width = chartwo.width, + height = size, + depth = size, + commands = { + push, + { "down", size/2 }, + leftarrow, + pop, + { "down", -size/2 }, + relbar, + } + } + end +end + +virtualcharacters[0xFE351] = function(data) + -- return combined(data,0x2212,0x2212) -- relbar, relbar (isn't that just equal) + local char = data.characters[0x2212] + if char then + local size = data.size/2 + return { + width = char.width, + height = size, + depth = size, + commands = { + push, + { "down", size/2 }, + relbar, + pop, + { "down", -size/2 }, + relbar, + } + } + end +end + +virtualcharacters[0xFE352] = function(data) + -- return combined(data,0x2192,0x2212) -- rightarrow relbar + local charone = data.characters[0x2192] + local chartwo = data.characters[0x2212] + if charone and chartwo then + local size = data.size/2 + return { + width = chartwo.width, + height = size, + depth = size, + commands = { + push, + { "down", size/2 }, + relbar, + pop, + { "right", chartwo.width - charone.width }, + { "down", -size/2 }, + rightarrow, + } + } + end +end + diff --git a/tex/context/base/math-frc.lua b/tex/context/base/math-frc.lua index 4f531a530..077da643b 100644 --- a/tex/context/base/math-frc.lua +++ b/tex/context/base/math-frc.lua @@ -1,51 +1,51 @@ -if not modules then modules = { } end modules ['math-frc'] = { - version = 1.001, - comment = "companion to math-frc.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local utfchar = utf.char - -local context = context -local variables = interfaces.variables - -local v_no = variables.no -local v_yes = variables.yes - -local resolved = { - [0x007B] = "\\{", - [0x007D] = "\\}", -} - -table.setmetatableindex(resolved, function(t,k) - local v = utfchar(k) - t[k] = v - return v -end) - -local normalatop = context.normalatop -local normalover = context.normalover - -function commands.math_frac(how,left,right,width) - if how == v_no then - if left == 0x002E and right == 0x002E then - normalatop() - else - context("\\atopwithdelims%s%s",resolved[left],resolved[right]) - end - elseif how == v_yes then - if left == 0x002E and right == 0x002E then - context("\\normalabove%ssp",width) - else - context("\\abovewithdelims%s%s%ssp",resolved[left],resolved[right],width) - end - else -- v_auto - if left == 0x002E and right == 0x002E then - normalover() - else - context("\\overwithdelims%s%s",resolved[left],resolved[right]) - end - end -end +if not modules then modules = { } end modules ['math-frc'] = { + version = 1.001, + comment = "companion to math-frc.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local utfchar = utf.char + +local context = context +local variables = interfaces.variables + +local v_no = variables.no +local v_yes = variables.yes + +local resolved = { + [0x007B] = "\\{", + [0x007D] = "\\}", +} + +table.setmetatableindex(resolved, function(t,k) + local v = utfchar(k) + t[k] = v + return v +end) + +local normalatop = context.normalatop +local normalover = context.normalover + +function commands.math_frac(how,left,right,width) + if how == v_no then + if left == 0x002E and right == 0x002E then + normalatop() + else + context("\\atopwithdelims%s%s",resolved[left],resolved[right]) + end + elseif how == v_yes then + if left == 0x002E and right == 0x002E then + context("\\normalabove%ssp",width) + else + context("\\abovewithdelims%s%s%ssp",resolved[left],resolved[right],width) + end + else -- v_auto + if left == 0x002E and right == 0x002E then + normalover() + else + context("\\overwithdelims%s%s",resolved[left],resolved[right]) + end + end +end diff --git a/tex/context/base/math-map.lua b/tex/context/base/math-map.lua index a0d7457d1..51e0f6831 100644 --- a/tex/context/base/math-map.lua +++ b/tex/context/base/math-map.lua @@ -1,684 +1,684 @@ -if not modules then modules = { } end modules ['math-map'] = { - version = 1.001, - comment = "companion to math-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: make sparse .. if self - ---[[ldx-- -

    Remapping mathematics alphabets.

    ---ldx]]-- - --- oldstyle: not really mathematics but happened to be part of --- the mathematics fonts in cmr --- --- persian: we will also provide mappers for other --- scripts - --- todo: alphabets namespace --- maybe: script/scriptscript dynamic, - --- to be looked into once the fonts are ready (will become font --- goodie): --- --- (U+2202,U+1D715) : upright --- (U+2202,U+1D715) : italic --- (U+2202,U+1D715) : upright --- --- plus add them to the regular vectors below so that they honor \it etc - -local type, next = type, next -local floor, div = math.floor, math.div -local merged = table.merged -local extract = bit32.extract - -local allocate = utilities.storage.allocate -local texattribute = tex.attribute -local otffeatures = fonts.constructors.newfeatures("otf") -local registerotffeature = otffeatures.register -local setmetatableindex = table.setmetatableindex - -local trace_greek = false trackers.register("math.greek", function(v) trace_greek = v end) -local report_remapping = logs.reporter("mathematics","remapping") - -mathematics = mathematics or { } -local mathematics = mathematics - --- Unfortunately some alphabets have gaps (thereby troubling all applications that --- need to deal with math). Somewhat strange considering all those weird symbols that --- were added afterwards. The following trickery (and data) is only to be used for --- diagnostics and quick and dirty alphabet tracing (s-mat-10.mkiv) as we deal with --- it otherwise. - -mathematics.gaps = { - [0x1D455] = 0x0210E, -- H - [0x1D49D] = 0x0212C, -- script B - [0x1D4A0] = 0x02130, -- script E - [0x1D4A1] = 0x02131, -- script F - [0x1D4A3] = 0x0210B, -- script H - [0x1D4A4] = 0x02110, -- script I - [0x1D4A7] = 0x02112, -- script L - [0x1D4A8] = 0x02133, -- script M - [0x1D4AD] = 0x0211B, -- script R - [0x1D4BA] = 0x0212F, -- script e - [0x1D4BC] = 0x0210A, -- script g - [0x1D4C4] = 0x02134, -- script o - [0x1D506] = 0x0212D, -- fraktur C - [0x1D50B] = 0x0210C, -- fraktur H - [0x1D50C] = 0x02111, -- fraktur I - [0x1D515] = 0x0211C, -- fraktur R - [0x1D51D] = 0x02128, -- fraktur Z - [0x1D53A] = 0x02102, -- bb C - [0x1D53F] = 0x0210D, -- bb H - [0x1D545] = 0x02115, -- bb N - [0x1D547] = 0x02119, -- bb P - [0x1D548] = 0x0211A, -- bb Q - [0x1D549] = 0x0211D, -- bb R - [0x1D551] = 0x02124, -- bb Z -} - -local function fillinmathgaps(tfmdata,key,value) - local mathgaps = mathematics.gaps - local characters = tfmdata.characters - local descriptions = tfmdata.descriptions - for gap, original in next, mathgaps do - if characters[original] and not characters[gap] then - characters [gap] = characters [original] - descriptions[gap] = descriptions[original] - end - end -end - -registerotffeature { - name = "mathgaps", - description = "plug gaps in math alphabets", - comment = "regular document sources should not depend on this", - manipulators = { - base = fillinmathgaps, - node = fillinmathgaps, - } -} - --- we could use one level less and have tf etc be tables directly but the --- following approach permits easier remapping of a-a, A-Z and 0-9 to --- fallbacks; symbols is currently mostly greek - -local function todigit(n) local t = { } for i=0, 9 do t[0x00030+i] = n+i end return t end -local function toupper(n) local t = { } for i=0,25 do t[0x00041+i] = n+i end return t end -local function tolower(n) local t = { } for i=0,25 do t[0x00061+i] = n+i end return t end - -local regular_tf = { - digits = todigit(0x00030), - ucletters = toupper(0x00041), - lcletters = tolower(0x00061), - ucgreek = { - [0x0391]=0x0391, [0x0392]=0x0392, [0x0393]=0x0393, [0x0394]=0x0394, [0x0395]=0x0395, - [0x0396]=0x0396, [0x0397]=0x0397, [0x0398]=0x0398, [0x0399]=0x0399, [0x039A]=0x039A, - [0x039B]=0x039B, [0x039C]=0x039C, [0x039D]=0x039D, [0x039E]=0x039E, [0x039F]=0x039F, - [0x03A0]=0x03A0, [0x03A1]=0x03A1, [0x03A3]=0x03A3, [0x03A4]=0x03A4, [0x03A5]=0x03A5, - [0x03A6]=0x03A6, [0x03A7]=0x03A7, [0x03A8]=0x03A8, [0x03A9]=0x03A9, - }, - lcgreek = { - [0x03B1]=0x03B1, [0x03B2]=0x03B2, [0x03B3]=0x03B3, [0x03B4]=0x03B4, [0x03B5]=0x03B5, - [0x03B6]=0x03B6, [0x03B7]=0x03B7, [0x03B8]=0x03B8, [0x03B9]=0x03B9, [0x03BA]=0x03BA, - [0x03BB]=0x03BB, [0x03BC]=0x03BC, [0x03BD]=0x03BD, [0x03BE]=0x03BE, [0x03BF]=0x03BF, - [0x03C0]=0x03C0, [0x03C1]=0x03C1, [0x03C2]=0x03C2, [0x03C3]=0x03C3, [0x03C4]=0x03C4, - [0x03C5]=0x03C5, [0x03C6]=0x03C6, [0x03C7]=0x03C7, [0x03C8]=0x03C8, [0x03C9]=0x03C9, - [0x03D1]=0x03D1, [0x03D5]=0x03D5, [0x03D6]=0x03D6, [0x03F0]=0x03F0, [0x03F1]=0x03F1, - [0x03F4]=0x03F4, [0x03F5]=0x03F5, - }, - symbols = { - [0x2202]=0x2202, [0x2207]=0x2207, - }, -} - -local regular_it = { - digits = regular_tf.digits, - ucletters = toupper(0x1D434), - lcletters = { -- H - [0x00061]=0x1D44E, [0x00062]=0x1D44F, [0x00063]=0x1D450, [0x00064]=0x1D451, [0x00065]=0x1D452, - [0x00066]=0x1D453, [0x00067]=0x1D454, [0x00068]=0x0210E, [0x00069]=0x1D456, [0x0006A]=0x1D457, - [0x0006B]=0x1D458, [0x0006C]=0x1D459, [0x0006D]=0x1D45A, [0x0006E]=0x1D45B, [0x0006F]=0x1D45C, - [0x00070]=0x1D45D, [0x00071]=0x1D45E, [0x00072]=0x1D45F, [0x00073]=0x1D460, [0x00074]=0x1D461, - [0x00075]=0x1D462, [0x00076]=0x1D463, [0x00077]=0x1D464, [0x00078]=0x1D465, [0x00079]=0x1D466, - [0x0007A]=0x1D467, - }, - ucgreek = { - [0x0391]=0x1D6E2, [0x0392]=0x1D6E3, [0x0393]=0x1D6E4, [0x0394]=0x1D6E5, [0x0395]=0x1D6E6, - [0x0396]=0x1D6E7, [0x0397]=0x1D6E8, [0x0398]=0x1D6E9, [0x0399]=0x1D6EA, [0x039A]=0x1D6EB, - [0x039B]=0x1D6EC, [0x039C]=0x1D6ED, [0x039D]=0x1D6EE, [0x039E]=0x1D6EF, [0x039F]=0x1D6F0, - [0x03A0]=0x1D6F1, [0x03A1]=0x1D6F2, [0x03A3]=0x1D6F4, [0x03A4]=0x1D6F5, [0x03A5]=0x1D6F6, - [0x03A6]=0x1D6F7, [0x03A7]=0x1D6F8, [0x03A8]=0x1D6F9, [0x03A9]=0x1D6FA, - }, - lcgreek = { - [0x03B1]=0x1D6FC, [0x03B2]=0x1D6FD, [0x03B3]=0x1D6FE, [0x03B4]=0x1D6FF, [0x03B5]=0x1D700, - [0x03B6]=0x1D701, [0x03B7]=0x1D702, [0x03B8]=0x1D703, [0x03B9]=0x1D704, [0x03BA]=0x1D705, - [0x03BB]=0x1D706, [0x03BC]=0x1D707, [0x03BD]=0x1D708, [0x03BE]=0x1D709, [0x03BF]=0x1D70A, - [0x03C0]=0x1D70B, [0x03C1]=0x1D70C, [0x03C2]=0x1D70D, [0x03C3]=0x1D70E, [0x03C4]=0x1D70F, - [0x03C5]=0x1D710, [0x03C6]=0x1D711, [0x03C7]=0x1D712, [0x03C8]=0x1D713, [0x03C9]=0x1D714, - [0x03D1]=0x1D717, [0x03D5]=0x1D719, [0x03D6]=0x1D71B, [0x03F0]=0x1D718, [0x03F1]=0x1D71A, - [0x03F4]=0x1D6F3, [0x03F5]=0x1D716, - }, - symbols = { - [0x2202]=0x1D715, [0x2207]=0x1D6FB, - }, -} - -local regular_bf= { - digits = todigit(0x1D7CE), - ucletters = toupper(0x1D400), - lcletters = tolower(0x1D41A), - ucgreek = { - [0x0391]=0x1D6A8, [0x0392]=0x1D6A9, [0x0393]=0x1D6AA, [0x0394]=0x1D6AB, [0x0395]=0x1D6AC, - [0x0396]=0x1D6AD, [0x0397]=0x1D6AE, [0x0398]=0x1D6AF, [0x0399]=0x1D6B0, [0x039A]=0x1D6B1, - [0x039B]=0x1D6B2, [0x039C]=0x1D6B3, [0x039D]=0x1D6B4, [0x039E]=0x1D6B5, [0x039F]=0x1D6B6, - [0x03A0]=0x1D6B7, [0x03A1]=0x1D6B8, [0x03A3]=0x1D6BA, [0x03A4]=0x1D6BB, [0x03A5]=0x1D6BC, - [0x03A6]=0x1D6BD, [0x03A7]=0x1D6BE, [0x03A8]=0x1D6BF, [0x03A9]=0x1D6C0, - }, - lcgreek = { - [0x03B1]=0x1D6C2, [0x03B2]=0x1D6C3, [0x03B3]=0x1D6C4, [0x03B4]=0x1D6C5, [0x03B5]=0x1D6C6, - [0x03B6]=0x1D6C7, [0x03B7]=0x1D6C8, [0x03B8]=0x1D6C9, [0x03B9]=0x1D6CA, [0x03BA]=0x1D6CB, - [0x03BB]=0x1D6CC, [0x03BC]=0x1D6CD, [0x03BD]=0x1D6CE, [0x03BE]=0x1D6CF, [0x03BF]=0x1D6D0, - [0x03C0]=0x1D6D1, [0x03C1]=0x1D6D2, [0x03C2]=0x1D6D3, [0x03C3]=0x1D6D4, [0x03C4]=0x1D6D5, - [0x03C5]=0x1D6D6, [0x03C6]=0x1D6D7, [0x03C7]=0x1D6D8, [0x03C8]=0x1D6D9, [0x03C9]=0x1D6DA, - [0x03D1]=0x1D6DD, [0x03D5]=0x1D6DF, [0x03D6]=0x1D6E1, [0x03F0]=0x1D6DE, [0x03F1]=0x1D6E0, - [0x03F4]=0x1D6B9, [0x03F5]=0x1D6DC, - }, - symbols = { - [0x2202]=0x1D6DB, [0x2207]=0x1D6C1, - }, -} - -local regular_bi = { - digits = regular_bf.digits, - ucletters = toupper(0x1D468), - lcletters = tolower(0x1D482), - ucgreek = { - [0x0391]=0x1D71C, [0x0392]=0x1D71D, [0x0393]=0x1D71E, [0x0394]=0x1D71F, [0x0395]=0x1D720, - [0x0396]=0x1D721, [0x0397]=0x1D722, [0x0398]=0x1D723, [0x0399]=0x1D724, [0x039A]=0x1D725, - [0x039B]=0x1D726, [0x039C]=0x1D727, [0x039D]=0x1D728, [0x039E]=0x1D729, [0x039F]=0x1D72A, - [0x03A0]=0x1D72B, [0x03A1]=0x1D72C, [0x03A3]=0x1D72E, [0x03A4]=0x1D72F, [0x03A5]=0x1D730, - [0x03A6]=0x1D731, [0x03A7]=0x1D732, [0x03A8]=0x1D733, [0x03A9]=0x1D734, - }, - lcgreek = { - [0x03B1]=0x1D736, [0x03B2]=0x1D737, [0x03B3]=0x1D738, [0x03B4]=0x1D739, [0x03B5]=0x1D73A, - [0x03B6]=0x1D73B, [0x03B7]=0x1D73C, [0x03B8]=0x1D73D, [0x03B9]=0x1D73E, [0x03BA]=0x1D73F, - [0x03BB]=0x1D740, [0x03BC]=0x1D741, [0x03BD]=0x1D742, [0x03BE]=0x1D743, [0x03BF]=0x1D744, - [0x03C0]=0x1D745, [0x03C1]=0x1D746, [0x03C2]=0x1D747, [0x03C3]=0x1D748, [0x03C4]=0x1D749, - [0x03C5]=0x1D74A, [0x03C6]=0x1D74B, [0x03C7]=0x1D74C, [0x03C8]=0x1D74D, [0x03C9]=0x1D74E, - [0x03D1]=0x1D751, [0x03D5]=0x1D753, [0x03D6]=0x1D755, [0x03F0]=0x1D752, [0x03F1]=0x1D754, - [0x03F4]=0x1D72D, [0x03F5]=0x1D750, - }, - symbols = { - [0x2202]=0x1D74F, [0x2207]=0x1D735, - }, -} - -local regular = { - tf = regular_tf, - it = regular_it, - bf = regular_bf, - bi = regular_bi, -} - -local sansserif_tf = { - digits = todigit(0x1D7E2), - ucletters = toupper(0x1D5A0), - lcletters = tolower(0x1D5BA), - lcgreek = regular_tf.lcgreek, - ucgreek = regular_tf.ucgreek, - symbols = regular_tf.symbols, -} - -local sansserif_it = { - digits = regular_tf.digits, - ucletters = toupper(0x1D608), - lcletters = tolower(0x1D622), - lcgreek = regular_tf.lcgreek, - ucgreek = regular_tf.ucgreek, - symbols = regular_tf.symbols, -} - -local sansserif_bf = { - digits = todigit(0x1D7EC), - ucletters = toupper(0x1D5D4), - lcletters = tolower(0x1D5EE), - ucgreek = { - [0x0391]=0x1D756, [0x0392]=0x1D757, [0x0393]=0x1D758, [0x0394]=0x1D759, [0x0395]=0x1D75A, - [0x0396]=0x1D75B, [0x0397]=0x1D75C, [0x0398]=0x1D75D, [0x0399]=0x1D75E, [0x039A]=0x1D75F, - [0x039B]=0x1D760, [0x039C]=0x1D761, [0x039D]=0x1D762, [0x039E]=0x1D763, [0x039F]=0x1D764, - [0x03A0]=0x1D765, [0x03A1]=0x1D766, [0x03A3]=0x1D768, [0x03A4]=0x1D769, [0x03A5]=0x1D76A, - [0x03A6]=0x1D76B, [0x03A7]=0x1D76C, [0x03A8]=0x1D76D, [0x03A9]=0x1D76E, - }, - lcgreek = { - [0x03B1]=0x1D770, [0x03B2]=0x1D771, [0x03B3]=0x1D772, [0x03B4]=0x1D773, [0x03B5]=0x1D774, - [0x03B6]=0x1D775, [0x03B7]=0x1D776, [0x03B8]=0x1D777, [0x03B9]=0x1D778, [0x03BA]=0x1D779, - [0x03BB]=0x1D77A, [0x03BC]=0x1D77B, [0x03BD]=0x1D77C, [0x03BE]=0x1D77D, [0x03BF]=0x1D77E, - [0x03C0]=0x1D77F, [0x03C1]=0x1D780, [0x03C2]=0x1D781, [0x03C3]=0x1D782, [0x03C4]=0x1D783, - [0x03C5]=0x1D784, [0x03C6]=0x1D785, [0x03C7]=0x1D786, [0x03C8]=0x1D787, [0x03C9]=0x1D788, - [0x03D1]=0x1D78B, [0x03D5]=0x1D78D, [0x03D6]=0x1D78F, [0x03F0]=0x1D78C, [0x03F1]=0x1D78E, - [0x03F4]=0x1D767, [0x03F5]=0x1D78A, - }, - symbols = { - [0x2202]=0x1D789, [0x2207]=0x1D76F, - }, -} - -local sansserif_bi = { - digits = sansserif_bf.digits, - ucletters = toupper(0x1D63C), - lcletters = tolower(0x1D656), - ucgreek = { - [0x0391]=0x1D790, [0x0392]=0x1D791, [0x0393]=0x1D792, [0x0394]=0x1D793, [0x0395]=0x1D794, - [0x0396]=0x1D795, [0x0397]=0x1D796, [0x0398]=0x1D797, [0x0399]=0x1D798, [0x039A]=0x1D799, - [0x039B]=0x1D79A, [0x039C]=0x1D79B, [0x039D]=0x1D79C, [0x039E]=0x1D79D, [0x039F]=0x1D79E, - [0x03A0]=0x1D79F, [0x03A1]=0x1D7A0, [0x03A3]=0x1D7A2, [0x03A4]=0x1D7A3, [0x03A5]=0x1D7A4, - [0x03A6]=0x1D7A5, [0x03A7]=0x1D7A6, [0x03A8]=0x1D7A7, [0x03A9]=0x1D7A8, - }, - lcgreek = { - [0x03B1]=0x1D7AA, [0x03B2]=0x1D7AB, [0x03B3]=0x1D7AC, [0x03B4]=0x1D7AD, [0x03B5]=0x1D7AE, - [0x03B6]=0x1D7AF, [0x03B7]=0x1D7B0, [0x03B8]=0x1D7B1, [0x03B9]=0x1D7B2, [0x03BA]=0x1D7B3, - [0x03BB]=0x1D7B4, [0x03BC]=0x1D7B5, [0x03BD]=0x1D7B6, [0x03BE]=0x1D7B7, [0x03BF]=0x1D7B8, - [0x03C0]=0x1D7B9, [0x03C1]=0x1D7BA, [0x03C2]=0x1D7BB, [0x03C3]=0x1D7BC, [0x03C4]=0x1D7BD, - [0x03C5]=0x1D7BE, [0x03C6]=0x1D7BF, [0x03C7]=0x1D7C0, [0x03C8]=0x1D7C1, [0x03C9]=0x1D7C2, - [0x03D1]=0x1D7C5, [0x03D5]=0x1D7C7, [0x03D6]=0x1D7C9, [0x03F0]=0x1D7C6, [0x03F1]=0x1D7C8, - [0x03F4]=0x1D7A1, [0x03F5]=0x1D7C4, - }, - symbols = { - [0x2202]=0x1D7C3, [0x2207]=0x1D7A9, - }, -} - -local sansserif = { - tf = sansserif_tf, - it = sansserif_it, - bf = sansserif_bf, - bi = sansserif_bi, -} - -local monospaced_tf = { - digits = todigit(0x1D7F6), - ucletters = toupper(0x1D670), - lcletters = tolower(0x1D68A), - lcgreek = sansserif_tf.lcgreek, - ucgreek = sansserif_tf.ucgreek, - symbols = sansserif_tf.symbols, -} - -local monospaced = { - tf = monospaced_tf, - it = sansserif_tf, - bf = sansserif_tf, - bi = sansserif_bf, -} - -local blackboard_tf = { - digits = todigit(0x1D7D8), - ucletters = { -- C H N P Q R Z - [0x00041]=0x1D538, [0x00042]=0x1D539, [0x00043]=0x02102, [0x00044]=0x1D53B, [0x00045]=0x1D53C, - [0x00046]=0x1D53D, [0x00047]=0x1D53E, [0x00048]=0x0210D, [0x00049]=0x1D540, [0x0004A]=0x1D541, - [0x0004B]=0x1D542, [0x0004C]=0x1D543, [0x0004D]=0x1D544, [0x0004E]=0x02115, [0x0004F]=0x1D546, - [0x00050]=0x02119, [0x00051]=0x0211A, [0x00052]=0x0211D, [0x00053]=0x1D54A, [0x00054]=0x1D54B, - [0x00055]=0x1D54C, [0x00056]=0x1D54D, [0x00057]=0x1D54E, [0x00058]=0x1D54F, [0x00059]=0x1D550, - [0x0005A]=0x02124, - }, - lcletters = tolower(0x1D552), - lcgreek = { -- gamma pi - [0x03B3]=0x0213C, [0x03C0]=0x0213D, - }, - ucgreek = { -- Gamma pi - [0x0393]=0x0213E, [0x03A0]=0x0213F, - }, - symbols = { -- sum - [0x2211]=0x02140, - }, -} - -blackboard_tf.lcgreek = merged(regular_tf.lcgreek, blackboard_tf.lcgreek) -blackboard_tf.ucgreek = merged(regular_tf.ucgreek, blackboard_tf.ucgreek) -blackboard_tf.symbols = merged(regular_tf.symbols, blackboard_tf.symbols) - -local blackboard = { - tf = blackboard_tf, - it = blackboard_tf, - bf = blackboard_tf, - bi = blackboard_tf, -} - -local fraktur_tf= { - digits = regular_tf.digits, - ucletters = { -- C H I R Z - [0x00041]=0x1D504, [0x00042]=0x1D505, [0x00043]=0x0212D, [0x00044]=0x1D507, [0x00045]=0x1D508, - [0x00046]=0x1D509, [0x00047]=0x1D50A, [0x00048]=0x0210C, [0x00049]=0x02111, [0x0004A]=0x1D50D, - [0x0004B]=0x1D50E, [0x0004C]=0x1D50F, [0x0004D]=0x1D510, [0x0004E]=0x1D511, [0x0004F]=0x1D512, - [0x00050]=0x1D513, [0x00051]=0x1D514, [0x00052]=0x0211C, [0x00053]=0x1D516, [0x00054]=0x1D517, - [0x00055]=0x1D518, [0x00056]=0x1D519, [0x00057]=0x1D51A, [0x00058]=0x1D51B, [0x00059]=0x1D51C, - [0x0005A]=0x02128, - }, - lcletters = tolower(0x1D51E), - lcgreek = regular_tf.lcgreek, - ucgreek = regular_tf.ucgreek, - symbols = regular_tf.symbols, -} - -local fraktur_bf = { - digits = regular_bf.digits, - ucletters = toupper(0x1D56C), - lcletters = tolower(0x1D586), - lcgreek = regular_bf.lcgreek, - ucgreek = regular_bf.ucgreek, - symbols = regular_bf.symbols, -} - -local fraktur = { -- ok - tf = fraktur_tf, - bf = fraktur_bf, - it = fraktur_tf, - bi = fraktur_bf, -} - -local script_tf = { - digits = regular_tf.digits, - ucletters = { -- B E F H I L M R -- P 2118 - [0x00041]=0x1D49C, [0x00042]=0x0212C, [0x00043]=0x1D49E, [0x00044]=0x1D49F, [0x00045]=0x02130, - [0x00046]=0x02131, [0x00047]=0x1D4A2, [0x00048]=0x0210B, [0x00049]=0x02110, [0x0004A]=0x1D4A5, - [0x0004B]=0x1D4A6, [0x0004C]=0x02112, [0x0004D]=0x02133, [0x0004E]=0x1D4A9, [0x0004F]=0x1D4AA, - [0x00050]=0x1D4AB, [0x00051]=0x1D4AC, [0x00052]=0x0211B, [0x00053]=0x1D4AE, [0x00054]=0x1D4AF, - [0x00055]=0x1D4B0, [0x00056]=0x1D4B1, [0x00057]=0x1D4B2, [0x00058]=0x1D4B3, [0x00059]=0x1D4B4, - [0x0005A]=0x1D4B5, - }, - lcletters = { -- E G O -- L 2113 - [0x00061]=0x1D4B6, [0x00062]=0x1D4B7, [0x00063]=0x1D4B8, [0x00064]=0x1D4B9, [0x00065]=0x0212F, - [0x00066]=0x1D4BB, [0x00067]=0x0210A, [0x00068]=0x1D4BD, [0x00069]=0x1D4BE, [0x0006A]=0x1D4BF, - [0x0006B]=0x1D4C0, [0x0006C]=0x1D4C1, [0x0006D]=0x1D4C2, [0x0006E]=0x1D4C3, [0x0006F]=0x02134, - [0x00070]=0x1D4C5, [0x00071]=0x1D4C6, [0x00072]=0x1D4C7, [0x00073]=0x1D4C8, [0x00074]=0x1D4C9, - [0x00075]=0x1D4CA, [0x00076]=0x1D4CB, [0x00077]=0x1D4CC, [0x00078]=0x1D4CD, [0x00079]=0x1D4CE, - [0x0007A]=0x1D4CF, - }, - lcgreek = regular_tf.lcgreek, - ucgreek = regular_tf.ucgreek, - symbols = regular_tf.symbols, -} - -local script_bf = { - digits = regular_bf.digits, - ucletters = toupper(0x1D4D0), - lcletters = tolower(0x1D4EA), - lcgreek = regular_bf.lcgreek, - ucgreek = regular_bf.ucgreek, - symbols = regular_bf.symbols, -} - -local script = { - tf = script_tf, - bf = script_bf, - it = script_tf, - bi = script_bf, -} - -local alphabets = allocate { - regular = regular, - sansserif = sansserif, - monospaced = monospaced, - blackboard = blackboard, - fraktur = fraktur, - script = script, -} - -mathematics.alphabets = alphabets - -local boldmap = { } - -local function remap(tf,bf) - for _, alphabet in next, alphabets do - local tfdata = alphabet[tf] - local bfdata = alphabet[bf] - if tfdata then - for k, tfd in next, tfdata do - if type(tfd) == "table" then - local bfd = bfdata[k] - if bfd then - for n, u in next, tfd do - local bn = bfd[n] - if bn then - boldmap[u] = bn - end - end - end - end - end - end - end -end - -remap("tf","bf") -remap("it","bi") - -mathematics.boldmap = boldmap - -local mathremap = allocate { } - -for alphabet, styles in next, alphabets do -- per 9/6/2011 we also have attr for missing - for style, data in next, styles do - -- let's keep the long names (for tracing) - local n = #mathremap + 1 - data.attribute = n - data.alphabet = alphabet - data.style = style - mathremap[n] = data - end -end - -mathematics.mapremap = mathremap - --- beware, these are shared tables (no problem since they're not --- in unicode) - -alphabets.tt = monospaced -alphabets.ss = sansserif -alphabets.rm = regular -alphabets.bb = blackboard -alphabets.fr = fraktur -alphabets.sr = script - -alphabets.serif = regular -alphabets.type = monospaced -alphabets.teletype = monospaced - -regular.normal = regular_tf -regular.italic = regular_it -regular.bold = regular_bf -regular.bolditalic = regular_bi - -sansserif.normal = sansserif_tf -sansserif.italic = sansserif_it -sansserif.bold = sansserif_bf -sansserif.bolditalic = sansserif_bi - -monospaced.normal = monospaced_tf -monospaced.italic = monospaced_it -monospaced.bold = monospaced_bf -monospaced.bolditalic = monospaced_bi - -function mathematics.tostyle(attribute) - local r = mathremap[attribute] - return r and r.style or "tf" -end - -function mathematics.toname(attribute) - local r = mathremap[attribute] - return r and r.alphabet or "regular" -end - --- of course we could do some div/mod trickery instead - -local mathalphabet = attributes.private("mathalphabet") - -function mathematics.getboth(alphabet,style) - local data = alphabet and alphabets[alphabet] or regular - data = data[style or "tf"] or data.tf - return data and data.attribute -end - -function mathematics.getstyle(style) - local r = mathremap[texattribute[mathalphabet]] - local alphabet = r and r.alphabet or "regular" - local data = alphabets[alphabet][style] - return data and data.attribute -end - -function mathematics.syncboth(alphabet,style) - local data = alphabet and alphabets[alphabet] or regular - data = style and data[style] or data.tf - texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet] -end - -function mathematics.syncstyle(style) - local r = mathremap[texattribute[mathalphabet]] - local alphabet = r and r.alphabet or "regular" - local data = alphabets[alphabet][style] - texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet] -end - -function mathematics.syncname(alphabet) - -- local r = mathremap[mathalphabet] - local r = mathremap[texattribute[mathalphabet]] - local style = r and r.style or "tf" - local data = alphabets[alphabet][style] - texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet] -end - -local islcgreek = regular_tf.lcgreek -local isucgreek = regular_tf.ucgreek -local issygreek = regular_tf.symbols -local isgreek = merged(islcgreek,isucgreek,issygreek) - -local greekremapping = { - [1] = { what = "unchanged" }, -- upright - [2] = { what = "upright", it = "tf", bi = "bf" }, -- upright - [3] = { what = "italic", tf = "it", bf = "bi" }, -- italic -} - -local usedremap = { } - -local function resolver(map) - return function (t,k) - local v = - map.digits [k] or - map.lcletters[k] or map.ucletters[k] or - map.lcgreek [k] or map.ucgreek [k] or - map.symbols [k] or k - t[k] = v - return v - end -end - -for k, v in next, mathremap do - local t = { } - setmetatableindex(t,resolver(v)) - usedremap[k] = t -end - -local function remapgreek(mathalphabet,how,detail,char) - local r = mathremap[mathalphabet] -- what if 0 - local alphabet = r and r.alphabet or "regular" - local style = r and r.style or "tf" - local remapping = greekremapping[how] - if trace_greek then - report_remapping("greek %s, %s char %C, alphabet %a %a, method %a","before",detail,char,alphabet,style,remapping.what) - end - local newstyle = remapping[style] - if newstyle then - local data = alphabets[alphabet][newstyle] -- always something - mathalphabet = data and data.attribute or mathalphabet - style = newstyle - end - if trace_greek then - report_remapping("greek %s, %s char %C, alphabet %a %a, method %a","after",detail,char,alphabet,style,remapping.what) - end - return mathalphabet, style -end - -function mathematics.remapalphabets(char,mathalphabet,mathgreek) - if not mathalphabet then - return - end - if mathgreek and mathgreek > 0 then - if not isgreek[char] then - -- nothing needed - elseif islcgreek[char] then - local lc = extract(mathgreek,4,4) - if lc > 1 then - mathalphabet = remapgreek(mathalphabet,lc,"lowercase",char) - end - elseif isucgreek[char] then - local uc = extract(mathgreek,0,4) - if uc > 1 then - mathalphabet = remapgreek(mathalphabet,uc,"uppercase",char) - end - elseif issygreek[char] then - local sy = extract(mathgreek,8,4) - if sy > 1 then - mathalphabet = remapgreek(mathalphabet,sy,"symbol",char) - end - end - end - if mathalphabet > 0 then - local remap = usedremap[mathalphabet] -- redundant check - if remap then - local newchar = remap[char] - return newchar ~= char and newchar - end - end - -- return nil -end - --- begin of experiment - -local fallback = { - tf = "bf", - it = "bi", - bf = "tf", - bi = "it", -} - -function mathematics.fallbackstyleattr(attribute) - local r = mathremap[attribute] - local alphabet = r.alphabet or "regular" - local style = r.style or "tf" - local fback = fallback[style] - if fback then - local data = alphabets[alphabet][fback] - if data then - local attr = data.attribute - return attribute ~= attr and attr - end - end -end - --- end of experiment - -local function checkedcopy(characters,child,parent) - for k, v in next, child do - if not characters[v] then - characters[v] = characters[parent[k]] - end - end -end - -function mathematics.addfallbacks(main) - local characters = main.characters - checkedcopy(characters,regular.bf.ucgreek,regular.tf.ucgreek) - checkedcopy(characters,regular.bf.lcgreek,regular.tf.lcgreek) - checkedcopy(characters,regular.bi.ucgreek,regular.it.ucgreek) - checkedcopy(characters,regular.bi.lcgreek,regular.it.lcgreek) -end - --- interface - -commands.setmathattribute = mathematics.syncboth -commands.setmathalphabet = mathematics.syncname -commands.setmathstyle = mathematics.syncstyle +if not modules then modules = { } end modules ['math-map'] = { + version = 1.001, + comment = "companion to math-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: make sparse .. if self + +--[[ldx-- +

    Remapping mathematics alphabets.

    +--ldx]]-- + +-- oldstyle: not really mathematics but happened to be part of +-- the mathematics fonts in cmr +-- +-- persian: we will also provide mappers for other +-- scripts + +-- todo: alphabets namespace +-- maybe: script/scriptscript dynamic, + +-- to be looked into once the fonts are ready (will become font +-- goodie): +-- +-- (U+2202,U+1D715) : upright +-- (U+2202,U+1D715) : italic +-- (U+2202,U+1D715) : upright +-- +-- plus add them to the regular vectors below so that they honor \it etc + +local type, next = type, next +local floor, div = math.floor, math.div +local merged = table.merged +local extract = bit32.extract + +local allocate = utilities.storage.allocate +local texattribute = tex.attribute +local otffeatures = fonts.constructors.newfeatures("otf") +local registerotffeature = otffeatures.register +local setmetatableindex = table.setmetatableindex + +local trace_greek = false trackers.register("math.greek", function(v) trace_greek = v end) +local report_remapping = logs.reporter("mathematics","remapping") + +mathematics = mathematics or { } +local mathematics = mathematics + +-- Unfortunately some alphabets have gaps (thereby troubling all applications that +-- need to deal with math). Somewhat strange considering all those weird symbols that +-- were added afterwards. The following trickery (and data) is only to be used for +-- diagnostics and quick and dirty alphabet tracing (s-mat-10.mkiv) as we deal with +-- it otherwise. + +mathematics.gaps = { + [0x1D455] = 0x0210E, -- H + [0x1D49D] = 0x0212C, -- script B + [0x1D4A0] = 0x02130, -- script E + [0x1D4A1] = 0x02131, -- script F + [0x1D4A3] = 0x0210B, -- script H + [0x1D4A4] = 0x02110, -- script I + [0x1D4A7] = 0x02112, -- script L + [0x1D4A8] = 0x02133, -- script M + [0x1D4AD] = 0x0211B, -- script R + [0x1D4BA] = 0x0212F, -- script e + [0x1D4BC] = 0x0210A, -- script g + [0x1D4C4] = 0x02134, -- script o + [0x1D506] = 0x0212D, -- fraktur C + [0x1D50B] = 0x0210C, -- fraktur H + [0x1D50C] = 0x02111, -- fraktur I + [0x1D515] = 0x0211C, -- fraktur R + [0x1D51D] = 0x02128, -- fraktur Z + [0x1D53A] = 0x02102, -- bb C + [0x1D53F] = 0x0210D, -- bb H + [0x1D545] = 0x02115, -- bb N + [0x1D547] = 0x02119, -- bb P + [0x1D548] = 0x0211A, -- bb Q + [0x1D549] = 0x0211D, -- bb R + [0x1D551] = 0x02124, -- bb Z +} + +local function fillinmathgaps(tfmdata,key,value) + local mathgaps = mathematics.gaps + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + for gap, original in next, mathgaps do + if characters[original] and not characters[gap] then + characters [gap] = characters [original] + descriptions[gap] = descriptions[original] + end + end +end + +registerotffeature { + name = "mathgaps", + description = "plug gaps in math alphabets", + comment = "regular document sources should not depend on this", + manipulators = { + base = fillinmathgaps, + node = fillinmathgaps, + } +} + +-- we could use one level less and have tf etc be tables directly but the +-- following approach permits easier remapping of a-a, A-Z and 0-9 to +-- fallbacks; symbols is currently mostly greek + +local function todigit(n) local t = { } for i=0, 9 do t[0x00030+i] = n+i end return t end +local function toupper(n) local t = { } for i=0,25 do t[0x00041+i] = n+i end return t end +local function tolower(n) local t = { } for i=0,25 do t[0x00061+i] = n+i end return t end + +local regular_tf = { + digits = todigit(0x00030), + ucletters = toupper(0x00041), + lcletters = tolower(0x00061), + ucgreek = { + [0x0391]=0x0391, [0x0392]=0x0392, [0x0393]=0x0393, [0x0394]=0x0394, [0x0395]=0x0395, + [0x0396]=0x0396, [0x0397]=0x0397, [0x0398]=0x0398, [0x0399]=0x0399, [0x039A]=0x039A, + [0x039B]=0x039B, [0x039C]=0x039C, [0x039D]=0x039D, [0x039E]=0x039E, [0x039F]=0x039F, + [0x03A0]=0x03A0, [0x03A1]=0x03A1, [0x03A3]=0x03A3, [0x03A4]=0x03A4, [0x03A5]=0x03A5, + [0x03A6]=0x03A6, [0x03A7]=0x03A7, [0x03A8]=0x03A8, [0x03A9]=0x03A9, + }, + lcgreek = { + [0x03B1]=0x03B1, [0x03B2]=0x03B2, [0x03B3]=0x03B3, [0x03B4]=0x03B4, [0x03B5]=0x03B5, + [0x03B6]=0x03B6, [0x03B7]=0x03B7, [0x03B8]=0x03B8, [0x03B9]=0x03B9, [0x03BA]=0x03BA, + [0x03BB]=0x03BB, [0x03BC]=0x03BC, [0x03BD]=0x03BD, [0x03BE]=0x03BE, [0x03BF]=0x03BF, + [0x03C0]=0x03C0, [0x03C1]=0x03C1, [0x03C2]=0x03C2, [0x03C3]=0x03C3, [0x03C4]=0x03C4, + [0x03C5]=0x03C5, [0x03C6]=0x03C6, [0x03C7]=0x03C7, [0x03C8]=0x03C8, [0x03C9]=0x03C9, + [0x03D1]=0x03D1, [0x03D5]=0x03D5, [0x03D6]=0x03D6, [0x03F0]=0x03F0, [0x03F1]=0x03F1, + [0x03F4]=0x03F4, [0x03F5]=0x03F5, + }, + symbols = { + [0x2202]=0x2202, [0x2207]=0x2207, + }, +} + +local regular_it = { + digits = regular_tf.digits, + ucletters = toupper(0x1D434), + lcletters = { -- H + [0x00061]=0x1D44E, [0x00062]=0x1D44F, [0x00063]=0x1D450, [0x00064]=0x1D451, [0x00065]=0x1D452, + [0x00066]=0x1D453, [0x00067]=0x1D454, [0x00068]=0x0210E, [0x00069]=0x1D456, [0x0006A]=0x1D457, + [0x0006B]=0x1D458, [0x0006C]=0x1D459, [0x0006D]=0x1D45A, [0x0006E]=0x1D45B, [0x0006F]=0x1D45C, + [0x00070]=0x1D45D, [0x00071]=0x1D45E, [0x00072]=0x1D45F, [0x00073]=0x1D460, [0x00074]=0x1D461, + [0x00075]=0x1D462, [0x00076]=0x1D463, [0x00077]=0x1D464, [0x00078]=0x1D465, [0x00079]=0x1D466, + [0x0007A]=0x1D467, + }, + ucgreek = { + [0x0391]=0x1D6E2, [0x0392]=0x1D6E3, [0x0393]=0x1D6E4, [0x0394]=0x1D6E5, [0x0395]=0x1D6E6, + [0x0396]=0x1D6E7, [0x0397]=0x1D6E8, [0x0398]=0x1D6E9, [0x0399]=0x1D6EA, [0x039A]=0x1D6EB, + [0x039B]=0x1D6EC, [0x039C]=0x1D6ED, [0x039D]=0x1D6EE, [0x039E]=0x1D6EF, [0x039F]=0x1D6F0, + [0x03A0]=0x1D6F1, [0x03A1]=0x1D6F2, [0x03A3]=0x1D6F4, [0x03A4]=0x1D6F5, [0x03A5]=0x1D6F6, + [0x03A6]=0x1D6F7, [0x03A7]=0x1D6F8, [0x03A8]=0x1D6F9, [0x03A9]=0x1D6FA, + }, + lcgreek = { + [0x03B1]=0x1D6FC, [0x03B2]=0x1D6FD, [0x03B3]=0x1D6FE, [0x03B4]=0x1D6FF, [0x03B5]=0x1D700, + [0x03B6]=0x1D701, [0x03B7]=0x1D702, [0x03B8]=0x1D703, [0x03B9]=0x1D704, [0x03BA]=0x1D705, + [0x03BB]=0x1D706, [0x03BC]=0x1D707, [0x03BD]=0x1D708, [0x03BE]=0x1D709, [0x03BF]=0x1D70A, + [0x03C0]=0x1D70B, [0x03C1]=0x1D70C, [0x03C2]=0x1D70D, [0x03C3]=0x1D70E, [0x03C4]=0x1D70F, + [0x03C5]=0x1D710, [0x03C6]=0x1D711, [0x03C7]=0x1D712, [0x03C8]=0x1D713, [0x03C9]=0x1D714, + [0x03D1]=0x1D717, [0x03D5]=0x1D719, [0x03D6]=0x1D71B, [0x03F0]=0x1D718, [0x03F1]=0x1D71A, + [0x03F4]=0x1D6F3, [0x03F5]=0x1D716, + }, + symbols = { + [0x2202]=0x1D715, [0x2207]=0x1D6FB, + }, +} + +local regular_bf= { + digits = todigit(0x1D7CE), + ucletters = toupper(0x1D400), + lcletters = tolower(0x1D41A), + ucgreek = { + [0x0391]=0x1D6A8, [0x0392]=0x1D6A9, [0x0393]=0x1D6AA, [0x0394]=0x1D6AB, [0x0395]=0x1D6AC, + [0x0396]=0x1D6AD, [0x0397]=0x1D6AE, [0x0398]=0x1D6AF, [0x0399]=0x1D6B0, [0x039A]=0x1D6B1, + [0x039B]=0x1D6B2, [0x039C]=0x1D6B3, [0x039D]=0x1D6B4, [0x039E]=0x1D6B5, [0x039F]=0x1D6B6, + [0x03A0]=0x1D6B7, [0x03A1]=0x1D6B8, [0x03A3]=0x1D6BA, [0x03A4]=0x1D6BB, [0x03A5]=0x1D6BC, + [0x03A6]=0x1D6BD, [0x03A7]=0x1D6BE, [0x03A8]=0x1D6BF, [0x03A9]=0x1D6C0, + }, + lcgreek = { + [0x03B1]=0x1D6C2, [0x03B2]=0x1D6C3, [0x03B3]=0x1D6C4, [0x03B4]=0x1D6C5, [0x03B5]=0x1D6C6, + [0x03B6]=0x1D6C7, [0x03B7]=0x1D6C8, [0x03B8]=0x1D6C9, [0x03B9]=0x1D6CA, [0x03BA]=0x1D6CB, + [0x03BB]=0x1D6CC, [0x03BC]=0x1D6CD, [0x03BD]=0x1D6CE, [0x03BE]=0x1D6CF, [0x03BF]=0x1D6D0, + [0x03C0]=0x1D6D1, [0x03C1]=0x1D6D2, [0x03C2]=0x1D6D3, [0x03C3]=0x1D6D4, [0x03C4]=0x1D6D5, + [0x03C5]=0x1D6D6, [0x03C6]=0x1D6D7, [0x03C7]=0x1D6D8, [0x03C8]=0x1D6D9, [0x03C9]=0x1D6DA, + [0x03D1]=0x1D6DD, [0x03D5]=0x1D6DF, [0x03D6]=0x1D6E1, [0x03F0]=0x1D6DE, [0x03F1]=0x1D6E0, + [0x03F4]=0x1D6B9, [0x03F5]=0x1D6DC, + }, + symbols = { + [0x2202]=0x1D6DB, [0x2207]=0x1D6C1, + }, +} + +local regular_bi = { + digits = regular_bf.digits, + ucletters = toupper(0x1D468), + lcletters = tolower(0x1D482), + ucgreek = { + [0x0391]=0x1D71C, [0x0392]=0x1D71D, [0x0393]=0x1D71E, [0x0394]=0x1D71F, [0x0395]=0x1D720, + [0x0396]=0x1D721, [0x0397]=0x1D722, [0x0398]=0x1D723, [0x0399]=0x1D724, [0x039A]=0x1D725, + [0x039B]=0x1D726, [0x039C]=0x1D727, [0x039D]=0x1D728, [0x039E]=0x1D729, [0x039F]=0x1D72A, + [0x03A0]=0x1D72B, [0x03A1]=0x1D72C, [0x03A3]=0x1D72E, [0x03A4]=0x1D72F, [0x03A5]=0x1D730, + [0x03A6]=0x1D731, [0x03A7]=0x1D732, [0x03A8]=0x1D733, [0x03A9]=0x1D734, + }, + lcgreek = { + [0x03B1]=0x1D736, [0x03B2]=0x1D737, [0x03B3]=0x1D738, [0x03B4]=0x1D739, [0x03B5]=0x1D73A, + [0x03B6]=0x1D73B, [0x03B7]=0x1D73C, [0x03B8]=0x1D73D, [0x03B9]=0x1D73E, [0x03BA]=0x1D73F, + [0x03BB]=0x1D740, [0x03BC]=0x1D741, [0x03BD]=0x1D742, [0x03BE]=0x1D743, [0x03BF]=0x1D744, + [0x03C0]=0x1D745, [0x03C1]=0x1D746, [0x03C2]=0x1D747, [0x03C3]=0x1D748, [0x03C4]=0x1D749, + [0x03C5]=0x1D74A, [0x03C6]=0x1D74B, [0x03C7]=0x1D74C, [0x03C8]=0x1D74D, [0x03C9]=0x1D74E, + [0x03D1]=0x1D751, [0x03D5]=0x1D753, [0x03D6]=0x1D755, [0x03F0]=0x1D752, [0x03F1]=0x1D754, + [0x03F4]=0x1D72D, [0x03F5]=0x1D750, + }, + symbols = { + [0x2202]=0x1D74F, [0x2207]=0x1D735, + }, +} + +local regular = { + tf = regular_tf, + it = regular_it, + bf = regular_bf, + bi = regular_bi, +} + +local sansserif_tf = { + digits = todigit(0x1D7E2), + ucletters = toupper(0x1D5A0), + lcletters = tolower(0x1D5BA), + lcgreek = regular_tf.lcgreek, + ucgreek = regular_tf.ucgreek, + symbols = regular_tf.symbols, +} + +local sansserif_it = { + digits = regular_tf.digits, + ucletters = toupper(0x1D608), + lcletters = tolower(0x1D622), + lcgreek = regular_tf.lcgreek, + ucgreek = regular_tf.ucgreek, + symbols = regular_tf.symbols, +} + +local sansserif_bf = { + digits = todigit(0x1D7EC), + ucletters = toupper(0x1D5D4), + lcletters = tolower(0x1D5EE), + ucgreek = { + [0x0391]=0x1D756, [0x0392]=0x1D757, [0x0393]=0x1D758, [0x0394]=0x1D759, [0x0395]=0x1D75A, + [0x0396]=0x1D75B, [0x0397]=0x1D75C, [0x0398]=0x1D75D, [0x0399]=0x1D75E, [0x039A]=0x1D75F, + [0x039B]=0x1D760, [0x039C]=0x1D761, [0x039D]=0x1D762, [0x039E]=0x1D763, [0x039F]=0x1D764, + [0x03A0]=0x1D765, [0x03A1]=0x1D766, [0x03A3]=0x1D768, [0x03A4]=0x1D769, [0x03A5]=0x1D76A, + [0x03A6]=0x1D76B, [0x03A7]=0x1D76C, [0x03A8]=0x1D76D, [0x03A9]=0x1D76E, + }, + lcgreek = { + [0x03B1]=0x1D770, [0x03B2]=0x1D771, [0x03B3]=0x1D772, [0x03B4]=0x1D773, [0x03B5]=0x1D774, + [0x03B6]=0x1D775, [0x03B7]=0x1D776, [0x03B8]=0x1D777, [0x03B9]=0x1D778, [0x03BA]=0x1D779, + [0x03BB]=0x1D77A, [0x03BC]=0x1D77B, [0x03BD]=0x1D77C, [0x03BE]=0x1D77D, [0x03BF]=0x1D77E, + [0x03C0]=0x1D77F, [0x03C1]=0x1D780, [0x03C2]=0x1D781, [0x03C3]=0x1D782, [0x03C4]=0x1D783, + [0x03C5]=0x1D784, [0x03C6]=0x1D785, [0x03C7]=0x1D786, [0x03C8]=0x1D787, [0x03C9]=0x1D788, + [0x03D1]=0x1D78B, [0x03D5]=0x1D78D, [0x03D6]=0x1D78F, [0x03F0]=0x1D78C, [0x03F1]=0x1D78E, + [0x03F4]=0x1D767, [0x03F5]=0x1D78A, + }, + symbols = { + [0x2202]=0x1D789, [0x2207]=0x1D76F, + }, +} + +local sansserif_bi = { + digits = sansserif_bf.digits, + ucletters = toupper(0x1D63C), + lcletters = tolower(0x1D656), + ucgreek = { + [0x0391]=0x1D790, [0x0392]=0x1D791, [0x0393]=0x1D792, [0x0394]=0x1D793, [0x0395]=0x1D794, + [0x0396]=0x1D795, [0x0397]=0x1D796, [0x0398]=0x1D797, [0x0399]=0x1D798, [0x039A]=0x1D799, + [0x039B]=0x1D79A, [0x039C]=0x1D79B, [0x039D]=0x1D79C, [0x039E]=0x1D79D, [0x039F]=0x1D79E, + [0x03A0]=0x1D79F, [0x03A1]=0x1D7A0, [0x03A3]=0x1D7A2, [0x03A4]=0x1D7A3, [0x03A5]=0x1D7A4, + [0x03A6]=0x1D7A5, [0x03A7]=0x1D7A6, [0x03A8]=0x1D7A7, [0x03A9]=0x1D7A8, + }, + lcgreek = { + [0x03B1]=0x1D7AA, [0x03B2]=0x1D7AB, [0x03B3]=0x1D7AC, [0x03B4]=0x1D7AD, [0x03B5]=0x1D7AE, + [0x03B6]=0x1D7AF, [0x03B7]=0x1D7B0, [0x03B8]=0x1D7B1, [0x03B9]=0x1D7B2, [0x03BA]=0x1D7B3, + [0x03BB]=0x1D7B4, [0x03BC]=0x1D7B5, [0x03BD]=0x1D7B6, [0x03BE]=0x1D7B7, [0x03BF]=0x1D7B8, + [0x03C0]=0x1D7B9, [0x03C1]=0x1D7BA, [0x03C2]=0x1D7BB, [0x03C3]=0x1D7BC, [0x03C4]=0x1D7BD, + [0x03C5]=0x1D7BE, [0x03C6]=0x1D7BF, [0x03C7]=0x1D7C0, [0x03C8]=0x1D7C1, [0x03C9]=0x1D7C2, + [0x03D1]=0x1D7C5, [0x03D5]=0x1D7C7, [0x03D6]=0x1D7C9, [0x03F0]=0x1D7C6, [0x03F1]=0x1D7C8, + [0x03F4]=0x1D7A1, [0x03F5]=0x1D7C4, + }, + symbols = { + [0x2202]=0x1D7C3, [0x2207]=0x1D7A9, + }, +} + +local sansserif = { + tf = sansserif_tf, + it = sansserif_it, + bf = sansserif_bf, + bi = sansserif_bi, +} + +local monospaced_tf = { + digits = todigit(0x1D7F6), + ucletters = toupper(0x1D670), + lcletters = tolower(0x1D68A), + lcgreek = sansserif_tf.lcgreek, + ucgreek = sansserif_tf.ucgreek, + symbols = sansserif_tf.symbols, +} + +local monospaced = { + tf = monospaced_tf, + it = sansserif_tf, + bf = sansserif_tf, + bi = sansserif_bf, +} + +local blackboard_tf = { + digits = todigit(0x1D7D8), + ucletters = { -- C H N P Q R Z + [0x00041]=0x1D538, [0x00042]=0x1D539, [0x00043]=0x02102, [0x00044]=0x1D53B, [0x00045]=0x1D53C, + [0x00046]=0x1D53D, [0x00047]=0x1D53E, [0x00048]=0x0210D, [0x00049]=0x1D540, [0x0004A]=0x1D541, + [0x0004B]=0x1D542, [0x0004C]=0x1D543, [0x0004D]=0x1D544, [0x0004E]=0x02115, [0x0004F]=0x1D546, + [0x00050]=0x02119, [0x00051]=0x0211A, [0x00052]=0x0211D, [0x00053]=0x1D54A, [0x00054]=0x1D54B, + [0x00055]=0x1D54C, [0x00056]=0x1D54D, [0x00057]=0x1D54E, [0x00058]=0x1D54F, [0x00059]=0x1D550, + [0x0005A]=0x02124, + }, + lcletters = tolower(0x1D552), + lcgreek = { -- gamma pi + [0x03B3]=0x0213C, [0x03C0]=0x0213D, + }, + ucgreek = { -- Gamma pi + [0x0393]=0x0213E, [0x03A0]=0x0213F, + }, + symbols = { -- sum + [0x2211]=0x02140, + }, +} + +blackboard_tf.lcgreek = merged(regular_tf.lcgreek, blackboard_tf.lcgreek) +blackboard_tf.ucgreek = merged(regular_tf.ucgreek, blackboard_tf.ucgreek) +blackboard_tf.symbols = merged(regular_tf.symbols, blackboard_tf.symbols) + +local blackboard = { + tf = blackboard_tf, + it = blackboard_tf, + bf = blackboard_tf, + bi = blackboard_tf, +} + +local fraktur_tf= { + digits = regular_tf.digits, + ucletters = { -- C H I R Z + [0x00041]=0x1D504, [0x00042]=0x1D505, [0x00043]=0x0212D, [0x00044]=0x1D507, [0x00045]=0x1D508, + [0x00046]=0x1D509, [0x00047]=0x1D50A, [0x00048]=0x0210C, [0x00049]=0x02111, [0x0004A]=0x1D50D, + [0x0004B]=0x1D50E, [0x0004C]=0x1D50F, [0x0004D]=0x1D510, [0x0004E]=0x1D511, [0x0004F]=0x1D512, + [0x00050]=0x1D513, [0x00051]=0x1D514, [0x00052]=0x0211C, [0x00053]=0x1D516, [0x00054]=0x1D517, + [0x00055]=0x1D518, [0x00056]=0x1D519, [0x00057]=0x1D51A, [0x00058]=0x1D51B, [0x00059]=0x1D51C, + [0x0005A]=0x02128, + }, + lcletters = tolower(0x1D51E), + lcgreek = regular_tf.lcgreek, + ucgreek = regular_tf.ucgreek, + symbols = regular_tf.symbols, +} + +local fraktur_bf = { + digits = regular_bf.digits, + ucletters = toupper(0x1D56C), + lcletters = tolower(0x1D586), + lcgreek = regular_bf.lcgreek, + ucgreek = regular_bf.ucgreek, + symbols = regular_bf.symbols, +} + +local fraktur = { -- ok + tf = fraktur_tf, + bf = fraktur_bf, + it = fraktur_tf, + bi = fraktur_bf, +} + +local script_tf = { + digits = regular_tf.digits, + ucletters = { -- B E F H I L M R -- P 2118 + [0x00041]=0x1D49C, [0x00042]=0x0212C, [0x00043]=0x1D49E, [0x00044]=0x1D49F, [0x00045]=0x02130, + [0x00046]=0x02131, [0x00047]=0x1D4A2, [0x00048]=0x0210B, [0x00049]=0x02110, [0x0004A]=0x1D4A5, + [0x0004B]=0x1D4A6, [0x0004C]=0x02112, [0x0004D]=0x02133, [0x0004E]=0x1D4A9, [0x0004F]=0x1D4AA, + [0x00050]=0x1D4AB, [0x00051]=0x1D4AC, [0x00052]=0x0211B, [0x00053]=0x1D4AE, [0x00054]=0x1D4AF, + [0x00055]=0x1D4B0, [0x00056]=0x1D4B1, [0x00057]=0x1D4B2, [0x00058]=0x1D4B3, [0x00059]=0x1D4B4, + [0x0005A]=0x1D4B5, + }, + lcletters = { -- E G O -- L 2113 + [0x00061]=0x1D4B6, [0x00062]=0x1D4B7, [0x00063]=0x1D4B8, [0x00064]=0x1D4B9, [0x00065]=0x0212F, + [0x00066]=0x1D4BB, [0x00067]=0x0210A, [0x00068]=0x1D4BD, [0x00069]=0x1D4BE, [0x0006A]=0x1D4BF, + [0x0006B]=0x1D4C0, [0x0006C]=0x1D4C1, [0x0006D]=0x1D4C2, [0x0006E]=0x1D4C3, [0x0006F]=0x02134, + [0x00070]=0x1D4C5, [0x00071]=0x1D4C6, [0x00072]=0x1D4C7, [0x00073]=0x1D4C8, [0x00074]=0x1D4C9, + [0x00075]=0x1D4CA, [0x00076]=0x1D4CB, [0x00077]=0x1D4CC, [0x00078]=0x1D4CD, [0x00079]=0x1D4CE, + [0x0007A]=0x1D4CF, + }, + lcgreek = regular_tf.lcgreek, + ucgreek = regular_tf.ucgreek, + symbols = regular_tf.symbols, +} + +local script_bf = { + digits = regular_bf.digits, + ucletters = toupper(0x1D4D0), + lcletters = tolower(0x1D4EA), + lcgreek = regular_bf.lcgreek, + ucgreek = regular_bf.ucgreek, + symbols = regular_bf.symbols, +} + +local script = { + tf = script_tf, + bf = script_bf, + it = script_tf, + bi = script_bf, +} + +local alphabets = allocate { + regular = regular, + sansserif = sansserif, + monospaced = monospaced, + blackboard = blackboard, + fraktur = fraktur, + script = script, +} + +mathematics.alphabets = alphabets + +local boldmap = { } + +local function remap(tf,bf) + for _, alphabet in next, alphabets do + local tfdata = alphabet[tf] + local bfdata = alphabet[bf] + if tfdata then + for k, tfd in next, tfdata do + if type(tfd) == "table" then + local bfd = bfdata[k] + if bfd then + for n, u in next, tfd do + local bn = bfd[n] + if bn then + boldmap[u] = bn + end + end + end + end + end + end + end +end + +remap("tf","bf") +remap("it","bi") + +mathematics.boldmap = boldmap + +local mathremap = allocate { } + +for alphabet, styles in next, alphabets do -- per 9/6/2011 we also have attr for missing + for style, data in next, styles do + -- let's keep the long names (for tracing) + local n = #mathremap + 1 + data.attribute = n + data.alphabet = alphabet + data.style = style + mathremap[n] = data + end +end + +mathematics.mapremap = mathremap + +-- beware, these are shared tables (no problem since they're not +-- in unicode) + +alphabets.tt = monospaced +alphabets.ss = sansserif +alphabets.rm = regular +alphabets.bb = blackboard +alphabets.fr = fraktur +alphabets.sr = script + +alphabets.serif = regular +alphabets.type = monospaced +alphabets.teletype = monospaced + +regular.normal = regular_tf +regular.italic = regular_it +regular.bold = regular_bf +regular.bolditalic = regular_bi + +sansserif.normal = sansserif_tf +sansserif.italic = sansserif_it +sansserif.bold = sansserif_bf +sansserif.bolditalic = sansserif_bi + +monospaced.normal = monospaced_tf +monospaced.italic = monospaced_it +monospaced.bold = monospaced_bf +monospaced.bolditalic = monospaced_bi + +function mathematics.tostyle(attribute) + local r = mathremap[attribute] + return r and r.style or "tf" +end + +function mathematics.toname(attribute) + local r = mathremap[attribute] + return r and r.alphabet or "regular" +end + +-- of course we could do some div/mod trickery instead + +local mathalphabet = attributes.private("mathalphabet") + +function mathematics.getboth(alphabet,style) + local data = alphabet and alphabets[alphabet] or regular + data = data[style or "tf"] or data.tf + return data and data.attribute +end + +function mathematics.getstyle(style) + local r = mathremap[texattribute[mathalphabet]] + local alphabet = r and r.alphabet or "regular" + local data = alphabets[alphabet][style] + return data and data.attribute +end + +function mathematics.syncboth(alphabet,style) + local data = alphabet and alphabets[alphabet] or regular + data = style and data[style] or data.tf + texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet] +end + +function mathematics.syncstyle(style) + local r = mathremap[texattribute[mathalphabet]] + local alphabet = r and r.alphabet or "regular" + local data = alphabets[alphabet][style] + texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet] +end + +function mathematics.syncname(alphabet) + -- local r = mathremap[mathalphabet] + local r = mathremap[texattribute[mathalphabet]] + local style = r and r.style or "tf" + local data = alphabets[alphabet][style] + texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet] +end + +local islcgreek = regular_tf.lcgreek +local isucgreek = regular_tf.ucgreek +local issygreek = regular_tf.symbols +local isgreek = merged(islcgreek,isucgreek,issygreek) + +local greekremapping = { + [1] = { what = "unchanged" }, -- upright + [2] = { what = "upright", it = "tf", bi = "bf" }, -- upright + [3] = { what = "italic", tf = "it", bf = "bi" }, -- italic +} + +local usedremap = { } + +local function resolver(map) + return function (t,k) + local v = + map.digits [k] or + map.lcletters[k] or map.ucletters[k] or + map.lcgreek [k] or map.ucgreek [k] or + map.symbols [k] or k + t[k] = v + return v + end +end + +for k, v in next, mathremap do + local t = { } + setmetatableindex(t,resolver(v)) + usedremap[k] = t +end + +local function remapgreek(mathalphabet,how,detail,char) + local r = mathremap[mathalphabet] -- what if 0 + local alphabet = r and r.alphabet or "regular" + local style = r and r.style or "tf" + local remapping = greekremapping[how] + if trace_greek then + report_remapping("greek %s, %s char %C, alphabet %a %a, method %a","before",detail,char,alphabet,style,remapping.what) + end + local newstyle = remapping[style] + if newstyle then + local data = alphabets[alphabet][newstyle] -- always something + mathalphabet = data and data.attribute or mathalphabet + style = newstyle + end + if trace_greek then + report_remapping("greek %s, %s char %C, alphabet %a %a, method %a","after",detail,char,alphabet,style,remapping.what) + end + return mathalphabet, style +end + +function mathematics.remapalphabets(char,mathalphabet,mathgreek) + if not mathalphabet then + return + end + if mathgreek and mathgreek > 0 then + if not isgreek[char] then + -- nothing needed + elseif islcgreek[char] then + local lc = extract(mathgreek,4,4) + if lc > 1 then + mathalphabet = remapgreek(mathalphabet,lc,"lowercase",char) + end + elseif isucgreek[char] then + local uc = extract(mathgreek,0,4) + if uc > 1 then + mathalphabet = remapgreek(mathalphabet,uc,"uppercase",char) + end + elseif issygreek[char] then + local sy = extract(mathgreek,8,4) + if sy > 1 then + mathalphabet = remapgreek(mathalphabet,sy,"symbol",char) + end + end + end + if mathalphabet > 0 then + local remap = usedremap[mathalphabet] -- redundant check + if remap then + local newchar = remap[char] + return newchar ~= char and newchar + end + end + -- return nil +end + +-- begin of experiment + +local fallback = { + tf = "bf", + it = "bi", + bf = "tf", + bi = "it", +} + +function mathematics.fallbackstyleattr(attribute) + local r = mathremap[attribute] + local alphabet = r.alphabet or "regular" + local style = r.style or "tf" + local fback = fallback[style] + if fback then + local data = alphabets[alphabet][fback] + if data then + local attr = data.attribute + return attribute ~= attr and attr + end + end +end + +-- end of experiment + +local function checkedcopy(characters,child,parent) + for k, v in next, child do + if not characters[v] then + characters[v] = characters[parent[k]] + end + end +end + +function mathematics.addfallbacks(main) + local characters = main.characters + checkedcopy(characters,regular.bf.ucgreek,regular.tf.ucgreek) + checkedcopy(characters,regular.bf.lcgreek,regular.tf.lcgreek) + checkedcopy(characters,regular.bi.ucgreek,regular.it.ucgreek) + checkedcopy(characters,regular.bi.lcgreek,regular.it.lcgreek) +end + +-- interface + +commands.setmathattribute = mathematics.syncboth +commands.setmathalphabet = mathematics.syncname +commands.setmathstyle = mathematics.syncstyle diff --git a/tex/context/base/math-noa.lua b/tex/context/base/math-noa.lua index b309ba077..51c89ea77 100644 --- a/tex/context/base/math-noa.lua +++ b/tex/context/base/math-noa.lua @@ -1,1192 +1,1192 @@ -if not modules then modules = { } end modules ['math-noa'] = { - version = 1.001, - comment = "companion to math-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- beware: this is experimental code and there will be a more --- generic (attribute value driven) interface too but for the --- moment this is ok --- --- we will also make dedicated processors (faster) --- --- beware: names will change as we wil make noads.xxx.handler i.e. xxx --- subnamespaces - --- 20D6 -> 2190 --- 20D7 -> 2192 - -local utfchar, utfbyte = utf.char, utf.byte -local formatters = string.formatters - -local fonts, nodes, node, mathematics = fonts, nodes, node, mathematics - -local otf = fonts.handlers.otf -local otffeatures = fonts.constructors.newfeatures("otf") -local registerotffeature = otffeatures.register - -local trace_remapping = false trackers.register("math.remapping", function(v) trace_remapping = v end) -local trace_processing = false trackers.register("math.processing", function(v) trace_processing = v end) -local trace_analyzing = false trackers.register("math.analyzing", function(v) trace_analyzing = v end) -local trace_normalizing = false trackers.register("math.normalizing", function(v) trace_normalizing = v end) -local trace_collapsing = false trackers.register("math.collapsing", function(v) trace_collapsing = v end) -local trace_goodies = false trackers.register("math.goodies", function(v) trace_goodies = v end) -local trace_variants = false trackers.register("math.variants", function(v) trace_variants = v end) -local trace_alternates = false trackers.register("math.alternates", function(v) trace_alternates = v end) -local trace_italics = false trackers.register("math.italics", function(v) trace_italics = v end) -local trace_families = false trackers.register("math.families", function(v) trace_families = v end) - -local check_coverage = true directives.register("math.checkcoverage", function(v) check_coverage = v end) - -local report_processing = logs.reporter("mathematics","processing") -local report_remapping = logs.reporter("mathematics","remapping") -local report_normalizing = logs.reporter("mathematics","normalizing") -local report_collapsing = logs.reporter("mathematics","collapsing") -local report_goodies = logs.reporter("mathematics","goodies") -local report_variants = logs.reporter("mathematics","variants") -local report_alternates = logs.reporter("mathematics","alternates") -local report_italics = logs.reporter("mathematics","italics") -local report_families = logs.reporter("mathematics","families") - -local a_mathrendering = attributes.private("mathrendering") -local a_exportstatus = attributes.private("exportstatus") - -local mlist_to_hlist = node.mlist_to_hlist -local font_of_family = node.family_font -local insert_node_after = node.insert_after -local insert_node_before = node.insert_before -local free_node = node.free -local new_node = node.new -- todo: pool: math_noad math_sub - -local new_kern = nodes.pool.kern -local new_rule = nodes.pool.rule -local concat_nodes = nodes.concat - -local topoints = number.points - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers -local fontcharacters = fonthashes.characters -local fontproperties = fonthashes.properties -local fontitalics = fonthashes.italics -local fontemwidths = fonthashes.emwidths -local fontexheights = fonthashes.exheights - -local variables = interfaces.variables -local texattribute = tex.attribute -local unsetvalue = attributes.unsetvalue - -local chardata = characters.data - -noads = noads or { } -- todo: only here -local noads = noads - -noads.processors = noads.processors or { } -local processors = noads.processors - -noads.handlers = noads.handlers or { } -local handlers = noads.handlers - -local tasks = nodes.tasks - -local nodecodes = nodes.nodecodes -local noadcodes = nodes.noadcodes - -local noad_ord = noadcodes.ord -local noad_rel = noadcodes.rel -local noad_punct = noadcodes.punct -local noad_opdisplaylimits= noadcodes.opdisplaylimits -local noad_oplimits = noadcodes.oplimits -local noad_opnolimits = noadcodes.opnolimits - -local math_noad = nodecodes.noad -- attr nucleus sub sup -local math_accent = nodecodes.accent -- attr nucleus sub sup accent -local math_radical = nodecodes.radical -- attr nucleus sub sup left degree -local math_fraction = nodecodes.fraction -- attr nucleus sub sup left right -local math_box = nodecodes.subbox -- attr list -local math_sub = nodecodes.submlist -- attr list -local math_char = nodecodes.mathchar -- attr fam char -local math_textchar = nodecodes.mathtextchar -- attr fam char -local math_delim = nodecodes.delim -- attr small_fam small_char large_fam large_char -local math_style = nodecodes.style -- attr style -local math_choice = nodecodes.choice -- attr display text script scriptscript -local math_fence = nodecodes.fence -- attr subtype - -local hlist_code = nodecodes.hlist -local glyph_code = nodecodes.glyph - -local left_fence_code = 1 - -local function process(start,what,n,parent) - if n then n = n + 1 else n = 0 end - while start do - local id = start.id - if trace_processing then - if id == math_noad then - report_processing("%w%S, class %a",n*2,start,noadcodes[start.subtype]) - elseif id == math_char then - local char = start.char - local fam = start.fam - local font = font_of_family(fam) - report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,start,fam,font,char,char) - else - report_processing("%w%S",n*2,start) - end - end - local proc = what[id] - if proc then - -- report_processing("start processing") - local done, newstart = proc(start,what,n,parent) -- prev is bugged: or start.prev - if newstart then - start = newstart - -- report_processing("stop processing (new start)") - else - -- report_processing("stop processing") - end - elseif id == math_char or id == math_textchar or id == math_delim then - break - elseif id == math_noad then - local noad = start.nucleus if noad then process(noad,what,n,start) end -- list - noad = start.sup if noad then process(noad,what,n,start) end -- list - noad = start.sub if noad then process(noad,what,n,start) end -- list - elseif id == math_box or id == math_sub then - -- local noad = start.list if noad then process(noad,what,n,start) end -- list - local noad = start.head if noad then process(noad,what,n,start) end -- list - elseif id == math_fraction then - local noad = start.num if noad then process(noad,what,n,start) end -- list - noad = start.denom if noad then process(noad,what,n,start) end -- list - noad = start.left if noad then process(noad,what,n,start) end -- delimiter - noad = start.right if noad then process(noad,what,n,start) end -- delimiter - elseif id == math_choice then - local noad = start.display if noad then process(noad,what,n,start) end -- list - noad = start.text if noad then process(noad,what,n,start) end -- list - noad = start.script if noad then process(noad,what,n,start) end -- list - noad = start.scriptscript if noad then process(noad,what,n,start) end -- list - elseif id == math_fence then - local noad = start.delim if noad then process(noad,what,n,start) end -- delimiter - elseif id == math_radical then - local noad = start.nucleus if noad then process(noad,what,n,start) end -- list - noad = start.sup if noad then process(noad,what,n,start) end -- list - noad = start.sub if noad then process(noad,what,n,start) end -- list - noad = start.left if noad then process(noad,what,n,start) end -- delimiter - noad = start.degree if noad then process(noad,what,n,start) end -- list - elseif id == math_accent then - local noad = start.nucleus if noad then process(noad,what,n,start) end -- list - noad = start.sup if noad then process(noad,what,n,start) end -- list - noad = start.sub if noad then process(noad,what,n,start) end -- list - noad = start.accent if noad then process(noad,what,n,start) end -- list - noad = start.bot_accent if noad then process(noad,what,n,start) end -- list - elseif id == math_style then - -- has a next - else - -- glue, penalty, etc - end - start = start.next - end -end - -local function processnoads(head,actions,banner) - if trace_processing then - report_processing("start %a",banner) - process(head,actions) - report_processing("stop %a",banner) - else - process(head,actions) - end -end - -noads.process = processnoads - --- experiment (when not present fall back to fam 0) -- needs documentation - --- 0-2 regular --- 3-5 bold --- 6-8 pseudobold - --- this could best be integrated in the remapper, and if we run into problems, we --- might as well do this - -local families = { } -local a_mathfamily = attributes.private("mathfamily") -local boldmap = mathematics.boldmap - -local familymap = { [0] = - "regular", - "regular", - "regular", - "bold", - "bold", - "bold", - "pseudobold", - "pseudobold", - "pseudobold", -} - -families[math_char] = function(pointer) - if pointer.fam == 0 then - local a = pointer[a_mathfamily] - if a and a > 0 then - pointer[a_mathfamily] = 0 - if a > 5 then - local char = pointer.char - local bold = boldmap[char] - local newa = a - 3 - if bold then - pointer[a_exportstatus] = char - pointer.char = bold - if trace_families then - report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa]) - end - else - if trace_families then - report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa]) - end - end - pointer.fam = newa - else - if trace_families then - local char = pointer.char - report_families("family of %C becomes %s with remap %s",char,a,familymap[a]) - end - pointer.fam = a - end - else - -- pointer.fam = 0 - end - end -end - -families[math_delim] = function(pointer) - if pointer.small_fam == 0 then - local a = pointer[a_mathfamily] - if a and a > 0 then - pointer[a_mathfamily] = 0 - if a > 5 then - -- no bold delimiters in unicode - a = a - 3 - end - pointer.small_fam = a - pointer.large_fam = a - else - pointer.small_fam = 0 - pointer.large_fam = 0 - end - end -end - -families[math_textchar] = families[math_char] - -function handlers.families(head,style,penalties) - processnoads(head,families,"families") - return true -end - --- character remapping - -local a_mathalphabet = attributes.private("mathalphabet") -local a_mathgreek = attributes.private("mathgreek") - -processors.relocate = { } - -local function report_remap(tag,id,old,new,extra) - report_remapping("remapping %s in font %s from %C to %C%s",tag,id,old,new,extra) -end - -local remapalphabets = mathematics.remapalphabets -local fallbackstyleattr = mathematics.fallbackstyleattr -local setnodecolor = nodes.tracers.colors.set - -local function checked(pointer) - local char = pointer.char - local fam = pointer.fam - local id = font_of_family(fam) - local tc = fontcharacters[id] - if not tc[char] then - local specials = characters.data[char].specials - if specials and (specials[1] == "char" or specials[1] == "font") then - newchar = specials[#specials] - if trace_remapping then - report_remap("fallback",id,char,newchar) - end - if trace_analyzing then - setnodecolor(pointer,"font:isol") - end - pointer[a_exportstatus] = char -- testcase: exponentiale - pointer.char = newchar - return true - end - end -end - -processors.relocate[math_char] = function(pointer) - local g = pointer[a_mathgreek] or 0 - local a = pointer[a_mathalphabet] or 0 - if a > 0 or g > 0 then - if a > 0 then - pointer[a_mathgreek] = 0 - end - if g > 0 then - pointer[a_mathalphabet] = 0 - end - local char = pointer.char - local newchar = remapalphabets(char,a,g) - if newchar then - local fam = pointer.fam - local id = font_of_family(fam) - local characters = fontcharacters[id] - if characters[newchar] then - if trace_remapping then - report_remap("char",id,char,newchar) - end - if trace_analyzing then - setnodecolor(pointer,"font:isol") - end - pointer.char = newchar - return true - else - local fallback = fallbackstyleattr(a) - if fallback then - local newchar = remapalphabets(char,fallback,g) - if newchar then - if characters[newchar] then - if trace_remapping then - report_remap("char",id,char,newchar," (fallback remapping used)") - end - if trace_analyzing then - setnodecolor(pointer,"font:isol") - end - pointer.char = newchar - return true - elseif trace_remapping then - report_remap("char",id,char,newchar," fails (no fallback character)") - end - elseif trace_remapping then - report_remap("char",id,char,newchar," fails (no fallback remap character)") - end - elseif trace_remapping then - report_remap("char",id,char,newchar," fails (no fallback style)") - end - end - end - end - if trace_analyzing then - setnodecolor(pointer,"font:medi") - end - if check_coverage then - return checked(pointer) - end -end - -processors.relocate[math_textchar] = function(pointer) - if trace_analyzing then - setnodecolor(pointer,"font:init") - end -end - -processors.relocate[math_delim] = function(pointer) - if trace_analyzing then - setnodecolor(pointer,"font:fina") - end -end - -function handlers.relocate(head,style,penalties) - processnoads(head,processors.relocate,"relocate") - return true -end - --- rendering (beware, not exported) - -processors.render = { } - -local rendersets = mathematics.renderings.numbers or { } -- store - -processors.render[math_char] = function(pointer) - local attr = pointer[a_mathrendering] - if attr and attr > 0 then - local char = pointer.char - local renderset = rendersets[attr] - if renderset then - local newchar = renderset[char] - if newchar then - local fam = pointer.fam - local id = font_of_family(fam) - local characters = fontcharacters[id] - if characters and characters[newchar] then - pointer.char = newchar - pointer[a_exportstatus] = char - end - end - end - end -end - -function handlers.render(head,style,penalties) - processnoads(head,processors.render,"render") - return true -end - --- some resize options (this works ok because the content is --- empty and no larger next will be forced) --- --- beware: we don't use \delcode but \Udelcode and as such have --- no large_fam; also, we need to check for subtype and/or --- small_fam not being 0 because \. sits in 0,0 by default --- --- todo: just replace the character by an ord noad --- and remove the right delimiter as well - -local mathsize = attributes.private("mathsize") - -local resize = { } processors.resize = resize - -resize[math_fence] = function(pointer) - if pointer.subtype == left_fence_code then - local a = pointer[mathsize] - if a and a > 0 then - pointer[mathsize] = 0 - local d = pointer.delim - local df = d.small_fam - local id = font_of_family(df) - if id > 0 then - local ch = d.small_char - d.small_char = mathematics.big(fontdata[id],ch,a) - end - end - end -end - -function handlers.resize(head,style,penalties) - processnoads(head,resize,"resize") - return true -end - --- respacing - --- local mathpunctuation = attributes.private("mathpunctuation") --- --- local respace = { } processors.respace = respace - --- only [nd,ll,ul][po][nd,ll,ul] - --- respace[math_char] = function(pointer,what,n,parent) -- not math_noad .. math_char ... and then parent --- pointer = parent --- if pointer and pointer.subtype == noad_ord then --- local a = pointer[mathpunctuation] --- if a and a > 0 then --- pointer[mathpunctuation] = 0 --- local current_nucleus = pointer.nucleus --- if current_nucleus.id == math_char then --- local current_char = current_nucleus.char --- local fc = chardata[current_char] --- fc = fc and fc.category --- if fc == "nd" or fc == "ll" or fc == "lu" then --- local next_noad = pointer.next --- if next_noad and next_noad.id == math_noad and next_noad.subtype == noad_punct then --- local next_nucleus = next_noad.nucleus --- if next_nucleus.id == math_char then --- local next_char = next_nucleus.char --- local nc = chardata[next_char] --- nc = nc and nc.category --- if nc == "po" then --- local last_noad = next_noad.next --- if last_noad and last_noad.id == math_noad and last_noad.subtype == noad_ord then --- local last_nucleus = last_noad.nucleus --- if last_nucleus.id == math_char then --- local last_char = last_nucleus.char --- local lc = chardata[last_char] --- lc = lc and lc.category --- if lc == "nd" or lc == "ll" or lc == "lu" then --- local ord = new_node(math_noad) -- todo: pool --- ord.subtype, ord.nucleus, ord.sub, ord.sup, ord.attr = noad_ord, next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr --- -- next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr = nil, nil, nil, nil --- next_noad.nucleus, next_noad.sub, next_noad.sup = nil, nil, nil -- else crash with attributes ref count --- --~ next_noad.attr = nil --- ord.next = last_noad --- pointer.next = ord --- free_node(next_noad) --- end --- end --- end --- end --- end --- end --- end --- end --- end --- end --- end - --- local comma = 0x002C --- local period = 0x002E --- --- respace[math_char] = function(pointer,what,n,parent) --- pointer = parent --- if pointer and pointer.subtype == noad_punct then --- local current_nucleus = pointer.nucleus --- if current_nucleus.id == math_char then --- local current_nucleus = pointer.nucleus --- if current_nucleus.id == math_char then --- local current_char = current_nucleus.char --- local a = pointer[mathpunctuation] --- if not a or a == 0 then --- if current_char == comma then --- -- default tex: 2,5 or 2, 5 --> 2, 5 --- elseif current_char == period then --- -- default tex: 2.5 or 2. 5 --> 2.5 --- pointer.subtype = noad_ord --- end --- elseif a == 1 then --- local next_noad = pointer.next --- if next_noad and next_noad.id == math_noad then --- local next_nucleus = next_noad.nucleus --- if next_nucleus.id == math_char and next_nucleus.char == 0 then --- nodes.remove(pointer,next_noad,true) --- end --- if current_char == comma then --- -- default tex: 2,5 or 2, 5 --> 2, 5 --- elseif current_char == period then --- -- default tex: 2.5 or 2. 5 --> 2.5 --- pointer.subtype = noad_ord --- end --- end --- elseif a == 2 then --- if current_char == comma or current_char == period then --- local next_noad = pointer.next --- if next_noad and next_noad.id == math_noad then --- local next_nucleus = next_noad.nucleus --- if next_nucleus.id == math_char and next_nucleus.char == 0 then --- if current_char == comma then --- -- adaptive: 2, 5 --> 2, 5 --- elseif current_char == period then --- -- adaptive: 2. 5 --> 2. 5 --- end --- nodes.remove(pointer,next_noad,true) --- else --- if current_char == comma then --- -- adaptive: 2,5 --> 2,5 --- pointer.subtype = noad_ord --- elseif current_char == period then --- -- adaptive: 2.5 --> 2.5 --- pointer.subtype = noad_ord --- end --- end --- end --- end --- end --- end --- end --- end --- end --- --- function handlers.respace(head,style,penalties) --- processnoads(head,respace,"respace") --- return true --- end - --- The following code is dedicated to Luigi Scarso who pointed me --- to the fact that \not= is not producing valid pdf-a code. --- The code does not solve this for virtual characters but it does --- a decent job on collapsing so that fonts that have the right --- glyph will have a decent unicode point. In the meantime this code --- has been moved elsewhere. - -local collapse = { } processors.collapse = collapse - -local mathpairs = characters.mathpairs - -mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034 } -- (prime,prime) (prime,doubleprime) -mathpairs[0x2033] = { [0x2032] = 0x2034 } -- (doubleprime,prime) - -mathpairs[0x222B] = { [0x222B] = 0x222C, [0x222C] = 0x222D } -mathpairs[0x222C] = { [0x222B] = 0x222D } - -mathpairs[0x007C] = { [0x007C] = 0x2016 } -- double bars - -local validpair = { - [noad_rel] = true, - [noad_ord] = true, - [noad_opdisplaylimits] = true, - [noad_oplimits] = true, - [noad_opnolimits] = true, -} - -local function collapsepair(pointer,what,n,parent) -- todo: switch to turn in on and off - if parent then - if validpair[parent.subtype] then - local current_nucleus = parent.nucleus - if not parent.sub and not parent.sup and current_nucleus.id == math_char then - local current_char = current_nucleus.char - local mathpair = mathpairs[current_char] - if mathpair then - local next_noad = parent.next - if next_noad and next_noad.id == math_noad then - if validpair[next_noad.subtype] then - local next_nucleus = next_noad.nucleus - if next_nucleus.id == math_char then - local next_char = next_nucleus.char - local newchar = mathpair[next_char] - if newchar then - local fam = current_nucleus.fam - local id = font_of_family(fam) - local characters = fontcharacters[id] - if characters and characters[newchar] then - if trace_collapsing then - report_collapsing("%U + %U => %U",current_char,next_char,newchar) - end - current_nucleus.char = newchar - local next_next_noad = next_noad.next - if next_next_noad then - parent.next = next_next_noad - next_next_noad.prev = parent - else - parent.next = nil - end - parent.sup = next_noad.sup - parent.sub = next_noad.sub - next_noad.sup = nil - next_noad.sub = nil - free_node(next_noad) - collapsepair(pointer,what,n,parent) - end - end - end - end - end - end - end - end - end -end - -collapse[math_char] = collapsepair - -function noads.handlers.collapse(head,style,penalties) - processnoads(head,collapse,"collapse") - return true -end - --- normalize scripts - -local unscript = { } noads.processors.unscript = unscript - -local superscripts = characters.superscripts -local subscripts = characters.subscripts - -local replaced = { } - -local function replace(pointer,what,n,parent) - pointer = parent -- we're following the parent list (chars trigger this) - local next = pointer.next - local start_super, stop_super, start_sub, stop_sub - local mode = "unset" - while next and next.id == math_noad do - local nextnucleus = next.nucleus - if nextnucleus and nextnucleus.id == math_char and not next.sub and not next.sup then - local char = nextnucleus.char - local s = superscripts[char] - if s then - if not start_super then - start_super = next - mode = "super" - elseif mode == "sub" then - break - end - stop_super = next - next = next.next - nextnucleus.char = s - replaced[char] = (replaced[char] or 0) + 1 - if trace_normalizing then - report_normalizing("superscript %C becomes %C",char,s) - end - else - local s = subscripts[char] - if s then - if not start_sub then - start_sub = next - mode = "sub" - elseif mode == "super" then - break - end - stop_sub = next - next = next.next - nextnucleus.char = s - replaced[char] = (replaced[char] or 0) + 1 - if trace_normalizing then - report_normalizing("subscript %C becomes %C",char,s) - end - else - break - end - end - else - break - end - end - if start_super then - if start_super == stop_super then - pointer.sup = start_super.nucleus - else - local list = new_node(math_sub) -- todo attr - list.head = start_super - pointer.sup = list - end - if mode == "super" then - pointer.next = stop_super.next - end - stop_super.next = nil - end - if start_sub then - if start_sub == stop_sub then - pointer.sub = start_sub.nucleus - else - local list = new_node(math_sub) -- todo attr - list.head = start_sub - pointer.sub = list - end - if mode == "sub" then - pointer.next = stop_sub.next - end - stop_sub.next = nil - end - -- we could return stop -end - -unscript[math_char] = replace -- not noads as we need to recurse - -function handlers.unscript(head,style,penalties) - processnoads(head,unscript,"unscript") - return true -end - -statistics.register("math script replacements", function() - if next(replaced) then - local n, t = 0, { } - for k, v in table.sortedpairs(replaced) do - n = n + v - t[#t+1] = formatters["%C"](k) - end - return formatters["% t (n=%s)"](t,n) - end -end) - --- math alternates: (in xits lgf: $ABC$ $\cal ABC$ $\mathalternate{cal}\cal ABC$) --- math alternates: (in lucidanova lgf: $ABC \mathalternate{italic} ABC$) - --- todo: set alternate for specific symbols - -local function initializemathalternates(tfmdata) - local goodies = tfmdata.goodies - if goodies then - local shared = tfmdata.shared - for i=1,#goodies do - -- first one counts - -- we can consider sharing the attributes ... todo (only once scan) - local mathgoodies = goodies[i].mathematics - local alternates = mathgoodies and mathgoodies.alternates - if alternates then - if trace_goodies then - report_goodies("loading alternates for font %a",tfmdata.properties.name) - end - local lastattribute, attributes = 0, { } - for k, v in next, alternates do - lastattribute = lastattribute + 1 - v.attribute = lastattribute - attributes[lastattribute] = v - end - shared.mathalternates = alternates -- to be checked if shared is ok here - shared.mathalternatesattributes = attributes -- to be checked if shared is ok here - return - end - end - end -end - -registerotffeature { - name = "mathalternates", - description = "additional math alternative shapes", - initializers = { - base = initializemathalternates, - node = initializemathalternates, - } -} - -local getalternate = otf.getalternate - -local a_mathalternate = attributes.private("mathalternate") - -local alternate = { } -- processors.alternate = alternate - -function mathematics.setalternate(fam,tag) - local id = font_of_family(fam) - local tfmdata = fontdata[id] - local mathalternates = tfmdata.shared and tfmdata.shared.mathalternates - if mathalternates then - local m = mathalternates[tag] - tex.attribute[a_mathalternate] = m and m.attribute or unsetvalue - end -end - -alternate[math_char] = function(pointer) - local a = pointer[a_mathalternate] - if a and a > 0 then - pointer[a_mathalternate] = 0 - local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata - local mathalternatesattributes = tfmdata.shared.mathalternatesattributes - if mathalternatesattributes then - local what = mathalternatesattributes[a] - local alt = getalternate(tfmdata,pointer.char,what.feature,what.value) - if alt then - if trace_alternates then - report_alternates("alternate %a, value %a, replacing glyph %U by glyph %U", - tostring(what.feature),tostring(what.value),pointer.char,alt) - end - pointer.char = alt - end - end - end -end - -function handlers.check(head,style,penalties) - processnoads(head,alternate,"check") - return true -end - --- italics: we assume that only characters matter --- --- = we check for correction first because accessing nodes is slower --- = the actual glyph is not that important (we can control it with numbers) - -local a_mathitalics = attributes.private("mathitalics") - -local italics = { } -local default_factor = 1/20 - -local function getcorrection(method,font,char) -- -- or character.italic -- (this one is for tex) - - local correction, fromvisual - - if method == 1 then - -- only font data triggered by fontitalics - local italics = fontitalics[font] - if italics then - local character = fontcharacters[font][char] - if character then - correction = character.italic_correction - if correction and correction ~= 0 then - return correction, false - end - end - end - elseif method == 2 then - -- only font data triggered by fontdata - local character = fontcharacters[font][char] - if character then - correction = character.italic_correction - if correction and correction ~= 0 then - return correction, false - end - end - elseif method == 3 then - -- only quad based by selective - local visual = chardata[char].visual - if not visual then - -- skip - elseif visual == "it" or visual == "bi" then - correction = fontproperties[font].mathitalic_defaultvalue or default_factor*fontemwidths[font] - if correction and correction ~= 0 then - return correction, true - end - end - elseif method == 4 then - -- combination of 1 and 3 - local italics = fontitalics[font] - if italics then - local character = fontcharacters[font][char] - if character then - correction = character.italic_correction - if correction and correction ~= 0 then - return correction, false - end - end - end - if not correction then - local visual = chardata[char].visual - if not visual then - -- skip - elseif visual == "it" or visual == "bi" then - correction = fontproperties[font].mathitalic_defaultvalue or default_factor*fontemwidths[font] - if correction and correction ~= 0 then - return correction, true - end - end - end - end - -end - -local function insert_kern(current,kern) - local sub = new_node(math_sub) -- todo: pool - local noad = new_node(math_noad) -- todo: pool - sub.head = kern - kern.next = noad - noad.nucleus = current - return sub -end - -local setcolor = nodes.tracers.colors.set -local italic_kern = new_kern -local c_positive_d = "trace:db" -local c_negative_d = "trace:dr" - -trackers.register("math.italics", function(v) - if v then - italic_kern = function(k,font) - local ex = 1.5 * fontexheights[font] - if k > 0 then - return setcolor(new_rule(k,ex,ex),c_positive_d) - else - return concat_nodes { - old_kern(k), - setcolor(new_rule(-k,ex,ex),c_negative_d), - old_kern(k), - } - end - end - else - italic_kern = new_kern - end -end) - -italics[math_char] = function(pointer,what,n,parent) - local method = pointer[a_mathitalics] - if method and method > 0 then - local char = pointer.char - local font = font_of_family(pointer.fam) -- todo: table - local correction, visual = getcorrection(method,font,char) - if correction then - local pid = parent.id - local sub, sup - if pid == math_noad then - sup = parent.sup - sub = parent.sub - end - if sup or sub then - local subtype = parent.subtype - if subtype == noad_oplimits then - if sup then - parent.sup = insert_kern(sup,italic_kern(correction,font)) - if trace_italics then - report_italics("method %a, adding %p italic correction for upper limit of %C",method,correction,char) - end - end - if sub then - local correction = - correction - parent.sub = insert_kern(sub,italic_kern(correction,font)) - if trace_italics then - report_italics("method %a, adding %p italic correction for lower limit of %C",method,correction,char) - end - end - else - if sup then - parent.sup = insert_kern(sup,italic_kern(correction,font)) - if trace_italics then - report_italics("method %a, adding %p italic correction before superscript after %C",method,correction,char) - end - end - end - else - local next_noad = parent.next - if not next_noad then - if n== 1 then -- only at the outer level .. will become an option (always,endonly,none) - if trace_italics then - report_italics("method %a, adding %p italic correction between %C and end math",method,correctio,char) - end - insert_node_after(parent,parent,italic_kern(correction,font)) - end - elseif next_noad.id == math_noad then - local next_subtype = next_noad.subtype - if next_subtype == noad_punct or next_subtype == noad_ord then - local next_nucleus = next_noad.nucleus - if next_nucleus.id == math_char then - local next_char = next_nucleus.char - local next_data = chardata[next_char] - local visual = next_data.visual - if visual == "it" or visual == "bi" then - -- if trace_italics then - -- report_italics("method %a, skipping %p italic correction between italic %C and italic %C",method,correction,char,next_char) - -- end - else - local category = next_data.category - if category == "nd" or category == "ll" or category == "lu" then - if trace_italics then - report_italics("method %a, adding %p italic correction between italic %C and non italic %C",method,correction,char,next_char) - end - insert_node_after(parent,parent,italic_kern(correction,font)) - -- elseif next_data.height > (fontexheights[font]/2) then - -- if trace_italics then - -- report_italics("method %a, adding %p italic correction between %C and ascending %C",method,correction,char,next_char) - -- end - -- insert_node_after(parent,parent,italic_kern(correction,font)) - -- elseif trace_italics then - -- -- report_italics("method %a, skipping %p italic correction between %C and %C",method,correction,char,next_char) - end - end - end - end - end - end - end - end -end - -function handlers.italics(head,style,penalties) - processnoads(head,italics,"italics") - return true -end - -local enable - -enable = function() - tasks.enableaction("math", "noads.handlers.italics") - if trace_italics then - report_italics("enabling math italics") - end - enable = false -end - --- best do this only on math mode (less overhead) - -function mathematics.setitalics(n) - if enable then - enable() - end - if n == variables.reset then - texattribute[a_mathitalics] = unsetvalue - else - texattribute[a_mathitalics] = tonumber(n) or unsetvalue - end -end - -function mathematics.resetitalics() - texattribute[a_mathitalics] = unsetvalue -end - --- variants - -local variants = { } - -local validvariants = { -- fast check on valid - [0x2229] = 0xFE00, [0x222A] = 0xFE00, - [0x2268] = 0xFE00, [0x2269] = 0xFE00, - [0x2272] = 0xFE00, [0x2273] = 0xFE00, - [0x228A] = 0xFE00, [0x228B] = 0xFE00, - [0x2293] = 0xFE00, [0x2294] = 0xFE00, - [0x2295] = 0xFE00, - [0x2297] = 0xFE00, - [0x229C] = 0xFE00, - [0x22DA] = 0xFE00, [0x22DB] = 0xFE00, - [0x2A3C] = 0xFE00, [0x2A3D] = 0xFE00, - [0x2A9D] = 0xFE00, [0x2A9E] = 0xFE00, - [0x2AAC] = 0xFE00, [0x2AAD] = 0xFE00, - [0x2ACB] = 0xFE00, [0x2ACC] = 0xFE00, -} - -variants[math_char] = function(pointer,what,n,parent) -- also set export value - local char = pointer.char - local selector = validvariants[char] - if selector then - local next = parent.next - if next and next.id == math_noad then - local nucleus = next.nucleus - if nucleus and nucleus.id == math_char and nucleus.char == selector then - local variant - local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata - local mathvariants = tfmdata.resources.variants -- and variantdata - if mathvariants then - mathvariants = mathvariants[selector] - if mathvariants then - variant = mathvariants[char] - end - end - if variant then - pointer.char = variant - pointer[a_exportstatus] = char -- we don't export the variant as it's visual markup - if trace_variants then - report_variants("variant (%U,%U) replaced by %U",char,selector,variant) - end - else - if trace_variants then - report_variants("no variant (%U,%U)",char,selector) - end - end - next.prev = pointer - parent.next = next.next - free_node(next) - end - end - end -end - -function handlers.variants(head,style,penalties) - processnoads(head,variants,"unicode variant") - return true -end - --- the normal builder - -function builders.kernel.mlist_to_hlist(head,style,penalties) - return mlist_to_hlist(head,style,penalties), true -end - --- function builders.kernel.mlist_to_hlist(head,style,penalties) --- print("!!!!!!! BEFORE",penalties) --- for n in node.traverse(head) do print(n) end --- print("!!!!!!!") --- head = mlist_to_hlist(head,style,penalties) --- print("!!!!!!! AFTER") --- for n in node.traverse(head) do print(n) end --- print("!!!!!!!") --- return head, true --- end - -tasks.new { - name = "math", - arguments = 2, - processor = utilities.sequencers.nodeprocessor, - sequence = { - "before", - "normalizers", - "builders", - "after", - }, -} - -tasks.freezegroup("math", "normalizers") -- experimental -tasks.freezegroup("math", "builders") -- experimental - -local actions = tasks.actions("math") -- head, style, penalties - -local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming - -function processors.mlist_to_hlist(head,style,penalties) - starttiming(noads) - local head, done = actions(head,style,penalties) - stoptiming(noads) - return head, done -end - -callbacks.register('mlist_to_hlist',processors.mlist_to_hlist,"preprocessing math list") - --- tracing - -statistics.register("math processing time", function() - return statistics.elapsedseconds(noads) -end) - --- interface - -commands.setmathalternate = mathematics.setalternate -commands.setmathitalics = mathematics.setitalics -commands.resetmathitalics = mathematics.resetitalics +if not modules then modules = { } end modules ['math-noa'] = { + version = 1.001, + comment = "companion to math-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- beware: this is experimental code and there will be a more +-- generic (attribute value driven) interface too but for the +-- moment this is ok +-- +-- we will also make dedicated processors (faster) +-- +-- beware: names will change as we wil make noads.xxx.handler i.e. xxx +-- subnamespaces + +-- 20D6 -> 2190 +-- 20D7 -> 2192 + +local utfchar, utfbyte = utf.char, utf.byte +local formatters = string.formatters + +local fonts, nodes, node, mathematics = fonts, nodes, node, mathematics + +local otf = fonts.handlers.otf +local otffeatures = fonts.constructors.newfeatures("otf") +local registerotffeature = otffeatures.register + +local trace_remapping = false trackers.register("math.remapping", function(v) trace_remapping = v end) +local trace_processing = false trackers.register("math.processing", function(v) trace_processing = v end) +local trace_analyzing = false trackers.register("math.analyzing", function(v) trace_analyzing = v end) +local trace_normalizing = false trackers.register("math.normalizing", function(v) trace_normalizing = v end) +local trace_collapsing = false trackers.register("math.collapsing", function(v) trace_collapsing = v end) +local trace_goodies = false trackers.register("math.goodies", function(v) trace_goodies = v end) +local trace_variants = false trackers.register("math.variants", function(v) trace_variants = v end) +local trace_alternates = false trackers.register("math.alternates", function(v) trace_alternates = v end) +local trace_italics = false trackers.register("math.italics", function(v) trace_italics = v end) +local trace_families = false trackers.register("math.families", function(v) trace_families = v end) + +local check_coverage = true directives.register("math.checkcoverage", function(v) check_coverage = v end) + +local report_processing = logs.reporter("mathematics","processing") +local report_remapping = logs.reporter("mathematics","remapping") +local report_normalizing = logs.reporter("mathematics","normalizing") +local report_collapsing = logs.reporter("mathematics","collapsing") +local report_goodies = logs.reporter("mathematics","goodies") +local report_variants = logs.reporter("mathematics","variants") +local report_alternates = logs.reporter("mathematics","alternates") +local report_italics = logs.reporter("mathematics","italics") +local report_families = logs.reporter("mathematics","families") + +local a_mathrendering = attributes.private("mathrendering") +local a_exportstatus = attributes.private("exportstatus") + +local mlist_to_hlist = node.mlist_to_hlist +local font_of_family = node.family_font +local insert_node_after = node.insert_after +local insert_node_before = node.insert_before +local free_node = node.free +local new_node = node.new -- todo: pool: math_noad math_sub + +local new_kern = nodes.pool.kern +local new_rule = nodes.pool.rule +local concat_nodes = nodes.concat + +local topoints = number.points + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers +local fontcharacters = fonthashes.characters +local fontproperties = fonthashes.properties +local fontitalics = fonthashes.italics +local fontemwidths = fonthashes.emwidths +local fontexheights = fonthashes.exheights + +local variables = interfaces.variables +local texattribute = tex.attribute +local unsetvalue = attributes.unsetvalue + +local chardata = characters.data + +noads = noads or { } -- todo: only here +local noads = noads + +noads.processors = noads.processors or { } +local processors = noads.processors + +noads.handlers = noads.handlers or { } +local handlers = noads.handlers + +local tasks = nodes.tasks + +local nodecodes = nodes.nodecodes +local noadcodes = nodes.noadcodes + +local noad_ord = noadcodes.ord +local noad_rel = noadcodes.rel +local noad_punct = noadcodes.punct +local noad_opdisplaylimits= noadcodes.opdisplaylimits +local noad_oplimits = noadcodes.oplimits +local noad_opnolimits = noadcodes.opnolimits + +local math_noad = nodecodes.noad -- attr nucleus sub sup +local math_accent = nodecodes.accent -- attr nucleus sub sup accent +local math_radical = nodecodes.radical -- attr nucleus sub sup left degree +local math_fraction = nodecodes.fraction -- attr nucleus sub sup left right +local math_box = nodecodes.subbox -- attr list +local math_sub = nodecodes.submlist -- attr list +local math_char = nodecodes.mathchar -- attr fam char +local math_textchar = nodecodes.mathtextchar -- attr fam char +local math_delim = nodecodes.delim -- attr small_fam small_char large_fam large_char +local math_style = nodecodes.style -- attr style +local math_choice = nodecodes.choice -- attr display text script scriptscript +local math_fence = nodecodes.fence -- attr subtype + +local hlist_code = nodecodes.hlist +local glyph_code = nodecodes.glyph + +local left_fence_code = 1 + +local function process(start,what,n,parent) + if n then n = n + 1 else n = 0 end + while start do + local id = start.id + if trace_processing then + if id == math_noad then + report_processing("%w%S, class %a",n*2,start,noadcodes[start.subtype]) + elseif id == math_char then + local char = start.char + local fam = start.fam + local font = font_of_family(fam) + report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,start,fam,font,char,char) + else + report_processing("%w%S",n*2,start) + end + end + local proc = what[id] + if proc then + -- report_processing("start processing") + local done, newstart = proc(start,what,n,parent) -- prev is bugged: or start.prev + if newstart then + start = newstart + -- report_processing("stop processing (new start)") + else + -- report_processing("stop processing") + end + elseif id == math_char or id == math_textchar or id == math_delim then + break + elseif id == math_noad then + local noad = start.nucleus if noad then process(noad,what,n,start) end -- list + noad = start.sup if noad then process(noad,what,n,start) end -- list + noad = start.sub if noad then process(noad,what,n,start) end -- list + elseif id == math_box or id == math_sub then + -- local noad = start.list if noad then process(noad,what,n,start) end -- list + local noad = start.head if noad then process(noad,what,n,start) end -- list + elseif id == math_fraction then + local noad = start.num if noad then process(noad,what,n,start) end -- list + noad = start.denom if noad then process(noad,what,n,start) end -- list + noad = start.left if noad then process(noad,what,n,start) end -- delimiter + noad = start.right if noad then process(noad,what,n,start) end -- delimiter + elseif id == math_choice then + local noad = start.display if noad then process(noad,what,n,start) end -- list + noad = start.text if noad then process(noad,what,n,start) end -- list + noad = start.script if noad then process(noad,what,n,start) end -- list + noad = start.scriptscript if noad then process(noad,what,n,start) end -- list + elseif id == math_fence then + local noad = start.delim if noad then process(noad,what,n,start) end -- delimiter + elseif id == math_radical then + local noad = start.nucleus if noad then process(noad,what,n,start) end -- list + noad = start.sup if noad then process(noad,what,n,start) end -- list + noad = start.sub if noad then process(noad,what,n,start) end -- list + noad = start.left if noad then process(noad,what,n,start) end -- delimiter + noad = start.degree if noad then process(noad,what,n,start) end -- list + elseif id == math_accent then + local noad = start.nucleus if noad then process(noad,what,n,start) end -- list + noad = start.sup if noad then process(noad,what,n,start) end -- list + noad = start.sub if noad then process(noad,what,n,start) end -- list + noad = start.accent if noad then process(noad,what,n,start) end -- list + noad = start.bot_accent if noad then process(noad,what,n,start) end -- list + elseif id == math_style then + -- has a next + else + -- glue, penalty, etc + end + start = start.next + end +end + +local function processnoads(head,actions,banner) + if trace_processing then + report_processing("start %a",banner) + process(head,actions) + report_processing("stop %a",banner) + else + process(head,actions) + end +end + +noads.process = processnoads + +-- experiment (when not present fall back to fam 0) -- needs documentation + +-- 0-2 regular +-- 3-5 bold +-- 6-8 pseudobold + +-- this could best be integrated in the remapper, and if we run into problems, we +-- might as well do this + +local families = { } +local a_mathfamily = attributes.private("mathfamily") +local boldmap = mathematics.boldmap + +local familymap = { [0] = + "regular", + "regular", + "regular", + "bold", + "bold", + "bold", + "pseudobold", + "pseudobold", + "pseudobold", +} + +families[math_char] = function(pointer) + if pointer.fam == 0 then + local a = pointer[a_mathfamily] + if a and a > 0 then + pointer[a_mathfamily] = 0 + if a > 5 then + local char = pointer.char + local bold = boldmap[char] + local newa = a - 3 + if bold then + pointer[a_exportstatus] = char + pointer.char = bold + if trace_families then + report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa]) + end + else + if trace_families then + report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa]) + end + end + pointer.fam = newa + else + if trace_families then + local char = pointer.char + report_families("family of %C becomes %s with remap %s",char,a,familymap[a]) + end + pointer.fam = a + end + else + -- pointer.fam = 0 + end + end +end + +families[math_delim] = function(pointer) + if pointer.small_fam == 0 then + local a = pointer[a_mathfamily] + if a and a > 0 then + pointer[a_mathfamily] = 0 + if a > 5 then + -- no bold delimiters in unicode + a = a - 3 + end + pointer.small_fam = a + pointer.large_fam = a + else + pointer.small_fam = 0 + pointer.large_fam = 0 + end + end +end + +families[math_textchar] = families[math_char] + +function handlers.families(head,style,penalties) + processnoads(head,families,"families") + return true +end + +-- character remapping + +local a_mathalphabet = attributes.private("mathalphabet") +local a_mathgreek = attributes.private("mathgreek") + +processors.relocate = { } + +local function report_remap(tag,id,old,new,extra) + report_remapping("remapping %s in font %s from %C to %C%s",tag,id,old,new,extra) +end + +local remapalphabets = mathematics.remapalphabets +local fallbackstyleattr = mathematics.fallbackstyleattr +local setnodecolor = nodes.tracers.colors.set + +local function checked(pointer) + local char = pointer.char + local fam = pointer.fam + local id = font_of_family(fam) + local tc = fontcharacters[id] + if not tc[char] then + local specials = characters.data[char].specials + if specials and (specials[1] == "char" or specials[1] == "font") then + newchar = specials[#specials] + if trace_remapping then + report_remap("fallback",id,char,newchar) + end + if trace_analyzing then + setnodecolor(pointer,"font:isol") + end + pointer[a_exportstatus] = char -- testcase: exponentiale + pointer.char = newchar + return true + end + end +end + +processors.relocate[math_char] = function(pointer) + local g = pointer[a_mathgreek] or 0 + local a = pointer[a_mathalphabet] or 0 + if a > 0 or g > 0 then + if a > 0 then + pointer[a_mathgreek] = 0 + end + if g > 0 then + pointer[a_mathalphabet] = 0 + end + local char = pointer.char + local newchar = remapalphabets(char,a,g) + if newchar then + local fam = pointer.fam + local id = font_of_family(fam) + local characters = fontcharacters[id] + if characters[newchar] then + if trace_remapping then + report_remap("char",id,char,newchar) + end + if trace_analyzing then + setnodecolor(pointer,"font:isol") + end + pointer.char = newchar + return true + else + local fallback = fallbackstyleattr(a) + if fallback then + local newchar = remapalphabets(char,fallback,g) + if newchar then + if characters[newchar] then + if trace_remapping then + report_remap("char",id,char,newchar," (fallback remapping used)") + end + if trace_analyzing then + setnodecolor(pointer,"font:isol") + end + pointer.char = newchar + return true + elseif trace_remapping then + report_remap("char",id,char,newchar," fails (no fallback character)") + end + elseif trace_remapping then + report_remap("char",id,char,newchar," fails (no fallback remap character)") + end + elseif trace_remapping then + report_remap("char",id,char,newchar," fails (no fallback style)") + end + end + end + end + if trace_analyzing then + setnodecolor(pointer,"font:medi") + end + if check_coverage then + return checked(pointer) + end +end + +processors.relocate[math_textchar] = function(pointer) + if trace_analyzing then + setnodecolor(pointer,"font:init") + end +end + +processors.relocate[math_delim] = function(pointer) + if trace_analyzing then + setnodecolor(pointer,"font:fina") + end +end + +function handlers.relocate(head,style,penalties) + processnoads(head,processors.relocate,"relocate") + return true +end + +-- rendering (beware, not exported) + +processors.render = { } + +local rendersets = mathematics.renderings.numbers or { } -- store + +processors.render[math_char] = function(pointer) + local attr = pointer[a_mathrendering] + if attr and attr > 0 then + local char = pointer.char + local renderset = rendersets[attr] + if renderset then + local newchar = renderset[char] + if newchar then + local fam = pointer.fam + local id = font_of_family(fam) + local characters = fontcharacters[id] + if characters and characters[newchar] then + pointer.char = newchar + pointer[a_exportstatus] = char + end + end + end + end +end + +function handlers.render(head,style,penalties) + processnoads(head,processors.render,"render") + return true +end + +-- some resize options (this works ok because the content is +-- empty and no larger next will be forced) +-- +-- beware: we don't use \delcode but \Udelcode and as such have +-- no large_fam; also, we need to check for subtype and/or +-- small_fam not being 0 because \. sits in 0,0 by default +-- +-- todo: just replace the character by an ord noad +-- and remove the right delimiter as well + +local mathsize = attributes.private("mathsize") + +local resize = { } processors.resize = resize + +resize[math_fence] = function(pointer) + if pointer.subtype == left_fence_code then + local a = pointer[mathsize] + if a and a > 0 then + pointer[mathsize] = 0 + local d = pointer.delim + local df = d.small_fam + local id = font_of_family(df) + if id > 0 then + local ch = d.small_char + d.small_char = mathematics.big(fontdata[id],ch,a) + end + end + end +end + +function handlers.resize(head,style,penalties) + processnoads(head,resize,"resize") + return true +end + +-- respacing + +-- local mathpunctuation = attributes.private("mathpunctuation") +-- +-- local respace = { } processors.respace = respace + +-- only [nd,ll,ul][po][nd,ll,ul] + +-- respace[math_char] = function(pointer,what,n,parent) -- not math_noad .. math_char ... and then parent +-- pointer = parent +-- if pointer and pointer.subtype == noad_ord then +-- local a = pointer[mathpunctuation] +-- if a and a > 0 then +-- pointer[mathpunctuation] = 0 +-- local current_nucleus = pointer.nucleus +-- if current_nucleus.id == math_char then +-- local current_char = current_nucleus.char +-- local fc = chardata[current_char] +-- fc = fc and fc.category +-- if fc == "nd" or fc == "ll" or fc == "lu" then +-- local next_noad = pointer.next +-- if next_noad and next_noad.id == math_noad and next_noad.subtype == noad_punct then +-- local next_nucleus = next_noad.nucleus +-- if next_nucleus.id == math_char then +-- local next_char = next_nucleus.char +-- local nc = chardata[next_char] +-- nc = nc and nc.category +-- if nc == "po" then +-- local last_noad = next_noad.next +-- if last_noad and last_noad.id == math_noad and last_noad.subtype == noad_ord then +-- local last_nucleus = last_noad.nucleus +-- if last_nucleus.id == math_char then +-- local last_char = last_nucleus.char +-- local lc = chardata[last_char] +-- lc = lc and lc.category +-- if lc == "nd" or lc == "ll" or lc == "lu" then +-- local ord = new_node(math_noad) -- todo: pool +-- ord.subtype, ord.nucleus, ord.sub, ord.sup, ord.attr = noad_ord, next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr +-- -- next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr = nil, nil, nil, nil +-- next_noad.nucleus, next_noad.sub, next_noad.sup = nil, nil, nil -- else crash with attributes ref count +-- --~ next_noad.attr = nil +-- ord.next = last_noad +-- pointer.next = ord +-- free_node(next_noad) +-- end +-- end +-- end +-- end +-- end +-- end +-- end +-- end +-- end +-- end +-- end + +-- local comma = 0x002C +-- local period = 0x002E +-- +-- respace[math_char] = function(pointer,what,n,parent) +-- pointer = parent +-- if pointer and pointer.subtype == noad_punct then +-- local current_nucleus = pointer.nucleus +-- if current_nucleus.id == math_char then +-- local current_nucleus = pointer.nucleus +-- if current_nucleus.id == math_char then +-- local current_char = current_nucleus.char +-- local a = pointer[mathpunctuation] +-- if not a or a == 0 then +-- if current_char == comma then +-- -- default tex: 2,5 or 2, 5 --> 2, 5 +-- elseif current_char == period then +-- -- default tex: 2.5 or 2. 5 --> 2.5 +-- pointer.subtype = noad_ord +-- end +-- elseif a == 1 then +-- local next_noad = pointer.next +-- if next_noad and next_noad.id == math_noad then +-- local next_nucleus = next_noad.nucleus +-- if next_nucleus.id == math_char and next_nucleus.char == 0 then +-- nodes.remove(pointer,next_noad,true) +-- end +-- if current_char == comma then +-- -- default tex: 2,5 or 2, 5 --> 2, 5 +-- elseif current_char == period then +-- -- default tex: 2.5 or 2. 5 --> 2.5 +-- pointer.subtype = noad_ord +-- end +-- end +-- elseif a == 2 then +-- if current_char == comma or current_char == period then +-- local next_noad = pointer.next +-- if next_noad and next_noad.id == math_noad then +-- local next_nucleus = next_noad.nucleus +-- if next_nucleus.id == math_char and next_nucleus.char == 0 then +-- if current_char == comma then +-- -- adaptive: 2, 5 --> 2, 5 +-- elseif current_char == period then +-- -- adaptive: 2. 5 --> 2. 5 +-- end +-- nodes.remove(pointer,next_noad,true) +-- else +-- if current_char == comma then +-- -- adaptive: 2,5 --> 2,5 +-- pointer.subtype = noad_ord +-- elseif current_char == period then +-- -- adaptive: 2.5 --> 2.5 +-- pointer.subtype = noad_ord +-- end +-- end +-- end +-- end +-- end +-- end +-- end +-- end +-- end +-- +-- function handlers.respace(head,style,penalties) +-- processnoads(head,respace,"respace") +-- return true +-- end + +-- The following code is dedicated to Luigi Scarso who pointed me +-- to the fact that \not= is not producing valid pdf-a code. +-- The code does not solve this for virtual characters but it does +-- a decent job on collapsing so that fonts that have the right +-- glyph will have a decent unicode point. In the meantime this code +-- has been moved elsewhere. + +local collapse = { } processors.collapse = collapse + +local mathpairs = characters.mathpairs + +mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034 } -- (prime,prime) (prime,doubleprime) +mathpairs[0x2033] = { [0x2032] = 0x2034 } -- (doubleprime,prime) + +mathpairs[0x222B] = { [0x222B] = 0x222C, [0x222C] = 0x222D } +mathpairs[0x222C] = { [0x222B] = 0x222D } + +mathpairs[0x007C] = { [0x007C] = 0x2016 } -- double bars + +local validpair = { + [noad_rel] = true, + [noad_ord] = true, + [noad_opdisplaylimits] = true, + [noad_oplimits] = true, + [noad_opnolimits] = true, +} + +local function collapsepair(pointer,what,n,parent) -- todo: switch to turn in on and off + if parent then + if validpair[parent.subtype] then + local current_nucleus = parent.nucleus + if not parent.sub and not parent.sup and current_nucleus.id == math_char then + local current_char = current_nucleus.char + local mathpair = mathpairs[current_char] + if mathpair then + local next_noad = parent.next + if next_noad and next_noad.id == math_noad then + if validpair[next_noad.subtype] then + local next_nucleus = next_noad.nucleus + if next_nucleus.id == math_char then + local next_char = next_nucleus.char + local newchar = mathpair[next_char] + if newchar then + local fam = current_nucleus.fam + local id = font_of_family(fam) + local characters = fontcharacters[id] + if characters and characters[newchar] then + if trace_collapsing then + report_collapsing("%U + %U => %U",current_char,next_char,newchar) + end + current_nucleus.char = newchar + local next_next_noad = next_noad.next + if next_next_noad then + parent.next = next_next_noad + next_next_noad.prev = parent + else + parent.next = nil + end + parent.sup = next_noad.sup + parent.sub = next_noad.sub + next_noad.sup = nil + next_noad.sub = nil + free_node(next_noad) + collapsepair(pointer,what,n,parent) + end + end + end + end + end + end + end + end + end +end + +collapse[math_char] = collapsepair + +function noads.handlers.collapse(head,style,penalties) + processnoads(head,collapse,"collapse") + return true +end + +-- normalize scripts + +local unscript = { } noads.processors.unscript = unscript + +local superscripts = characters.superscripts +local subscripts = characters.subscripts + +local replaced = { } + +local function replace(pointer,what,n,parent) + pointer = parent -- we're following the parent list (chars trigger this) + local next = pointer.next + local start_super, stop_super, start_sub, stop_sub + local mode = "unset" + while next and next.id == math_noad do + local nextnucleus = next.nucleus + if nextnucleus and nextnucleus.id == math_char and not next.sub and not next.sup then + local char = nextnucleus.char + local s = superscripts[char] + if s then + if not start_super then + start_super = next + mode = "super" + elseif mode == "sub" then + break + end + stop_super = next + next = next.next + nextnucleus.char = s + replaced[char] = (replaced[char] or 0) + 1 + if trace_normalizing then + report_normalizing("superscript %C becomes %C",char,s) + end + else + local s = subscripts[char] + if s then + if not start_sub then + start_sub = next + mode = "sub" + elseif mode == "super" then + break + end + stop_sub = next + next = next.next + nextnucleus.char = s + replaced[char] = (replaced[char] or 0) + 1 + if trace_normalizing then + report_normalizing("subscript %C becomes %C",char,s) + end + else + break + end + end + else + break + end + end + if start_super then + if start_super == stop_super then + pointer.sup = start_super.nucleus + else + local list = new_node(math_sub) -- todo attr + list.head = start_super + pointer.sup = list + end + if mode == "super" then + pointer.next = stop_super.next + end + stop_super.next = nil + end + if start_sub then + if start_sub == stop_sub then + pointer.sub = start_sub.nucleus + else + local list = new_node(math_sub) -- todo attr + list.head = start_sub + pointer.sub = list + end + if mode == "sub" then + pointer.next = stop_sub.next + end + stop_sub.next = nil + end + -- we could return stop +end + +unscript[math_char] = replace -- not noads as we need to recurse + +function handlers.unscript(head,style,penalties) + processnoads(head,unscript,"unscript") + return true +end + +statistics.register("math script replacements", function() + if next(replaced) then + local n, t = 0, { } + for k, v in table.sortedpairs(replaced) do + n = n + v + t[#t+1] = formatters["%C"](k) + end + return formatters["% t (n=%s)"](t,n) + end +end) + +-- math alternates: (in xits lgf: $ABC$ $\cal ABC$ $\mathalternate{cal}\cal ABC$) +-- math alternates: (in lucidanova lgf: $ABC \mathalternate{italic} ABC$) + +-- todo: set alternate for specific symbols + +local function initializemathalternates(tfmdata) + local goodies = tfmdata.goodies + if goodies then + local shared = tfmdata.shared + for i=1,#goodies do + -- first one counts + -- we can consider sharing the attributes ... todo (only once scan) + local mathgoodies = goodies[i].mathematics + local alternates = mathgoodies and mathgoodies.alternates + if alternates then + if trace_goodies then + report_goodies("loading alternates for font %a",tfmdata.properties.name) + end + local lastattribute, attributes = 0, { } + for k, v in next, alternates do + lastattribute = lastattribute + 1 + v.attribute = lastattribute + attributes[lastattribute] = v + end + shared.mathalternates = alternates -- to be checked if shared is ok here + shared.mathalternatesattributes = attributes -- to be checked if shared is ok here + return + end + end + end +end + +registerotffeature { + name = "mathalternates", + description = "additional math alternative shapes", + initializers = { + base = initializemathalternates, + node = initializemathalternates, + } +} + +local getalternate = otf.getalternate + +local a_mathalternate = attributes.private("mathalternate") + +local alternate = { } -- processors.alternate = alternate + +function mathematics.setalternate(fam,tag) + local id = font_of_family(fam) + local tfmdata = fontdata[id] + local mathalternates = tfmdata.shared and tfmdata.shared.mathalternates + if mathalternates then + local m = mathalternates[tag] + tex.attribute[a_mathalternate] = m and m.attribute or unsetvalue + end +end + +alternate[math_char] = function(pointer) + local a = pointer[a_mathalternate] + if a and a > 0 then + pointer[a_mathalternate] = 0 + local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata + local mathalternatesattributes = tfmdata.shared.mathalternatesattributes + if mathalternatesattributes then + local what = mathalternatesattributes[a] + local alt = getalternate(tfmdata,pointer.char,what.feature,what.value) + if alt then + if trace_alternates then + report_alternates("alternate %a, value %a, replacing glyph %U by glyph %U", + tostring(what.feature),tostring(what.value),pointer.char,alt) + end + pointer.char = alt + end + end + end +end + +function handlers.check(head,style,penalties) + processnoads(head,alternate,"check") + return true +end + +-- italics: we assume that only characters matter +-- +-- = we check for correction first because accessing nodes is slower +-- = the actual glyph is not that important (we can control it with numbers) + +local a_mathitalics = attributes.private("mathitalics") + +local italics = { } +local default_factor = 1/20 + +local function getcorrection(method,font,char) -- -- or character.italic -- (this one is for tex) + + local correction, fromvisual + + if method == 1 then + -- only font data triggered by fontitalics + local italics = fontitalics[font] + if italics then + local character = fontcharacters[font][char] + if character then + correction = character.italic_correction + if correction and correction ~= 0 then + return correction, false + end + end + end + elseif method == 2 then + -- only font data triggered by fontdata + local character = fontcharacters[font][char] + if character then + correction = character.italic_correction + if correction and correction ~= 0 then + return correction, false + end + end + elseif method == 3 then + -- only quad based by selective + local visual = chardata[char].visual + if not visual then + -- skip + elseif visual == "it" or visual == "bi" then + correction = fontproperties[font].mathitalic_defaultvalue or default_factor*fontemwidths[font] + if correction and correction ~= 0 then + return correction, true + end + end + elseif method == 4 then + -- combination of 1 and 3 + local italics = fontitalics[font] + if italics then + local character = fontcharacters[font][char] + if character then + correction = character.italic_correction + if correction and correction ~= 0 then + return correction, false + end + end + end + if not correction then + local visual = chardata[char].visual + if not visual then + -- skip + elseif visual == "it" or visual == "bi" then + correction = fontproperties[font].mathitalic_defaultvalue or default_factor*fontemwidths[font] + if correction and correction ~= 0 then + return correction, true + end + end + end + end + +end + +local function insert_kern(current,kern) + local sub = new_node(math_sub) -- todo: pool + local noad = new_node(math_noad) -- todo: pool + sub.head = kern + kern.next = noad + noad.nucleus = current + return sub +end + +local setcolor = nodes.tracers.colors.set +local italic_kern = new_kern +local c_positive_d = "trace:db" +local c_negative_d = "trace:dr" + +trackers.register("math.italics", function(v) + if v then + italic_kern = function(k,font) + local ex = 1.5 * fontexheights[font] + if k > 0 then + return setcolor(new_rule(k,ex,ex),c_positive_d) + else + return concat_nodes { + old_kern(k), + setcolor(new_rule(-k,ex,ex),c_negative_d), + old_kern(k), + } + end + end + else + italic_kern = new_kern + end +end) + +italics[math_char] = function(pointer,what,n,parent) + local method = pointer[a_mathitalics] + if method and method > 0 then + local char = pointer.char + local font = font_of_family(pointer.fam) -- todo: table + local correction, visual = getcorrection(method,font,char) + if correction then + local pid = parent.id + local sub, sup + if pid == math_noad then + sup = parent.sup + sub = parent.sub + end + if sup or sub then + local subtype = parent.subtype + if subtype == noad_oplimits then + if sup then + parent.sup = insert_kern(sup,italic_kern(correction,font)) + if trace_italics then + report_italics("method %a, adding %p italic correction for upper limit of %C",method,correction,char) + end + end + if sub then + local correction = - correction + parent.sub = insert_kern(sub,italic_kern(correction,font)) + if trace_italics then + report_italics("method %a, adding %p italic correction for lower limit of %C",method,correction,char) + end + end + else + if sup then + parent.sup = insert_kern(sup,italic_kern(correction,font)) + if trace_italics then + report_italics("method %a, adding %p italic correction before superscript after %C",method,correction,char) + end + end + end + else + local next_noad = parent.next + if not next_noad then + if n== 1 then -- only at the outer level .. will become an option (always,endonly,none) + if trace_italics then + report_italics("method %a, adding %p italic correction between %C and end math",method,correctio,char) + end + insert_node_after(parent,parent,italic_kern(correction,font)) + end + elseif next_noad.id == math_noad then + local next_subtype = next_noad.subtype + if next_subtype == noad_punct or next_subtype == noad_ord then + local next_nucleus = next_noad.nucleus + if next_nucleus.id == math_char then + local next_char = next_nucleus.char + local next_data = chardata[next_char] + local visual = next_data.visual + if visual == "it" or visual == "bi" then + -- if trace_italics then + -- report_italics("method %a, skipping %p italic correction between italic %C and italic %C",method,correction,char,next_char) + -- end + else + local category = next_data.category + if category == "nd" or category == "ll" or category == "lu" then + if trace_italics then + report_italics("method %a, adding %p italic correction between italic %C and non italic %C",method,correction,char,next_char) + end + insert_node_after(parent,parent,italic_kern(correction,font)) + -- elseif next_data.height > (fontexheights[font]/2) then + -- if trace_italics then + -- report_italics("method %a, adding %p italic correction between %C and ascending %C",method,correction,char,next_char) + -- end + -- insert_node_after(parent,parent,italic_kern(correction,font)) + -- elseif trace_italics then + -- -- report_italics("method %a, skipping %p italic correction between %C and %C",method,correction,char,next_char) + end + end + end + end + end + end + end + end +end + +function handlers.italics(head,style,penalties) + processnoads(head,italics,"italics") + return true +end + +local enable + +enable = function() + tasks.enableaction("math", "noads.handlers.italics") + if trace_italics then + report_italics("enabling math italics") + end + enable = false +end + +-- best do this only on math mode (less overhead) + +function mathematics.setitalics(n) + if enable then + enable() + end + if n == variables.reset then + texattribute[a_mathitalics] = unsetvalue + else + texattribute[a_mathitalics] = tonumber(n) or unsetvalue + end +end + +function mathematics.resetitalics() + texattribute[a_mathitalics] = unsetvalue +end + +-- variants + +local variants = { } + +local validvariants = { -- fast check on valid + [0x2229] = 0xFE00, [0x222A] = 0xFE00, + [0x2268] = 0xFE00, [0x2269] = 0xFE00, + [0x2272] = 0xFE00, [0x2273] = 0xFE00, + [0x228A] = 0xFE00, [0x228B] = 0xFE00, + [0x2293] = 0xFE00, [0x2294] = 0xFE00, + [0x2295] = 0xFE00, + [0x2297] = 0xFE00, + [0x229C] = 0xFE00, + [0x22DA] = 0xFE00, [0x22DB] = 0xFE00, + [0x2A3C] = 0xFE00, [0x2A3D] = 0xFE00, + [0x2A9D] = 0xFE00, [0x2A9E] = 0xFE00, + [0x2AAC] = 0xFE00, [0x2AAD] = 0xFE00, + [0x2ACB] = 0xFE00, [0x2ACC] = 0xFE00, +} + +variants[math_char] = function(pointer,what,n,parent) -- also set export value + local char = pointer.char + local selector = validvariants[char] + if selector then + local next = parent.next + if next and next.id == math_noad then + local nucleus = next.nucleus + if nucleus and nucleus.id == math_char and nucleus.char == selector then + local variant + local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata + local mathvariants = tfmdata.resources.variants -- and variantdata + if mathvariants then + mathvariants = mathvariants[selector] + if mathvariants then + variant = mathvariants[char] + end + end + if variant then + pointer.char = variant + pointer[a_exportstatus] = char -- we don't export the variant as it's visual markup + if trace_variants then + report_variants("variant (%U,%U) replaced by %U",char,selector,variant) + end + else + if trace_variants then + report_variants("no variant (%U,%U)",char,selector) + end + end + next.prev = pointer + parent.next = next.next + free_node(next) + end + end + end +end + +function handlers.variants(head,style,penalties) + processnoads(head,variants,"unicode variant") + return true +end + +-- the normal builder + +function builders.kernel.mlist_to_hlist(head,style,penalties) + return mlist_to_hlist(head,style,penalties), true +end + +-- function builders.kernel.mlist_to_hlist(head,style,penalties) +-- print("!!!!!!! BEFORE",penalties) +-- for n in node.traverse(head) do print(n) end +-- print("!!!!!!!") +-- head = mlist_to_hlist(head,style,penalties) +-- print("!!!!!!! AFTER") +-- for n in node.traverse(head) do print(n) end +-- print("!!!!!!!") +-- return head, true +-- end + +tasks.new { + name = "math", + arguments = 2, + processor = utilities.sequencers.nodeprocessor, + sequence = { + "before", + "normalizers", + "builders", + "after", + }, +} + +tasks.freezegroup("math", "normalizers") -- experimental +tasks.freezegroup("math", "builders") -- experimental + +local actions = tasks.actions("math") -- head, style, penalties + +local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming + +function processors.mlist_to_hlist(head,style,penalties) + starttiming(noads) + local head, done = actions(head,style,penalties) + stoptiming(noads) + return head, done +end + +callbacks.register('mlist_to_hlist',processors.mlist_to_hlist,"preprocessing math list") + +-- tracing + +statistics.register("math processing time", function() + return statistics.elapsedseconds(noads) +end) + +-- interface + +commands.setmathalternate = mathematics.setalternate +commands.setmathitalics = mathematics.setitalics +commands.resetmathitalics = mathematics.resetitalics diff --git a/tex/context/base/math-ren.lua b/tex/context/base/math-ren.lua index 2e7dba13d..348d8a2d9 100644 --- a/tex/context/base/math-ren.lua +++ b/tex/context/base/math-ren.lua @@ -1,69 +1,69 @@ -if not modules then modules = { } end modules ['math-ren'] = { - version = 1.001, - comment = "companion to math-ren.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local next = next -local gsub = string.gsub - -local settings_to_array = utilities.parsers.settings_to_array -local allocate = storage.allocate - -local renderings = { } -mathematics.renderings = renderings - -local mappings = allocate() -renderings.mappings = mappings - -local numbers = allocate() -renderings.numbers = numbers - -local sets = allocate() -renderings.sets = sets - -mappings["blackboard-to-bold"] = { - [0x1D538] = 0x1D400, [0x1D539] = 0x1D401, [0x02102] = 0x1D402, [0x1D53B] = 0x1D403, [0x1D53C] = 0x1D404, - [0x1D53D] = 0x1D405, [0x1D53E] = 0x1D406, [0x0210D] = 0x1D407, [0x1D540] = 0x1D408, [0x1D541] = 0x1D409, - [0x1D542] = 0x1D40A, [0x1D543] = 0x1D40B, [0x1D544] = 0x1D40C, [0x02115] = 0x1D40D, [0x1D546] = 0x1D40E, - [0x02119] = 0x1D40F, [0x0211A] = 0x1D410, [0x0211D] = 0x1D411, [0x1D54A] = 0x1D412, [0x1D54B] = 0x1D413, - [0x1D54C] = 0x1D414, [0x1D54D] = 0x1D415, [0x1D54E] = 0x1D416, [0x1D54F] = 0x1D417, [0x1D550] = 0x1D418, - [0x02124] = 0x1D419, -} - -local function renderset(list) -- order matters - local tag = gsub(list," ","") - local n = sets[tag] - if not n then - local list = settings_to_array(tag) - local mapping = { } - for i=1,#list do - local m = mappings[list[i]] - if m then - for k, v in next, m do - mapping[k] = v - end - end - end - if next(mapping) then - n = #numbers + 1 - numbers[n] = mapping - else - n = attributes.unsetvalue - end - sets[tag] = n - end - return n -end - -mathematics.renderset = renderset - -function commands.mathrenderset(list) - context(renderset(list)) -end - --- function commands.setmatrendering(list) --- tex.setattribute(renderset(list)) --- end +if not modules then modules = { } end modules ['math-ren'] = { + version = 1.001, + comment = "companion to math-ren.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local next = next +local gsub = string.gsub + +local settings_to_array = utilities.parsers.settings_to_array +local allocate = storage.allocate + +local renderings = { } +mathematics.renderings = renderings + +local mappings = allocate() +renderings.mappings = mappings + +local numbers = allocate() +renderings.numbers = numbers + +local sets = allocate() +renderings.sets = sets + +mappings["blackboard-to-bold"] = { + [0x1D538] = 0x1D400, [0x1D539] = 0x1D401, [0x02102] = 0x1D402, [0x1D53B] = 0x1D403, [0x1D53C] = 0x1D404, + [0x1D53D] = 0x1D405, [0x1D53E] = 0x1D406, [0x0210D] = 0x1D407, [0x1D540] = 0x1D408, [0x1D541] = 0x1D409, + [0x1D542] = 0x1D40A, [0x1D543] = 0x1D40B, [0x1D544] = 0x1D40C, [0x02115] = 0x1D40D, [0x1D546] = 0x1D40E, + [0x02119] = 0x1D40F, [0x0211A] = 0x1D410, [0x0211D] = 0x1D411, [0x1D54A] = 0x1D412, [0x1D54B] = 0x1D413, + [0x1D54C] = 0x1D414, [0x1D54D] = 0x1D415, [0x1D54E] = 0x1D416, [0x1D54F] = 0x1D417, [0x1D550] = 0x1D418, + [0x02124] = 0x1D419, +} + +local function renderset(list) -- order matters + local tag = gsub(list," ","") + local n = sets[tag] + if not n then + local list = settings_to_array(tag) + local mapping = { } + for i=1,#list do + local m = mappings[list[i]] + if m then + for k, v in next, m do + mapping[k] = v + end + end + end + if next(mapping) then + n = #numbers + 1 + numbers[n] = mapping + else + n = attributes.unsetvalue + end + sets[tag] = n + end + return n +end + +mathematics.renderset = renderset + +function commands.mathrenderset(list) + context(renderset(list)) +end + +-- function commands.setmatrendering(list) +-- tex.setattribute(renderset(list)) +-- end diff --git a/tex/context/base/math-tag.lua b/tex/context/base/math-tag.lua index ab5902dd4..3dafaaa2f 100644 --- a/tex/context/base/math-tag.lua +++ b/tex/context/base/math-tag.lua @@ -1,345 +1,345 @@ -if not modules then modules = { } end modules ['math-tag'] = { - version = 1.001, - comment = "companion to math-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- use lpeg matchers - -local find, match = string.find, string.match -local insert, remove = table.insert, table.remove - -local attributes, nodes = attributes, nodes - -local set_attributes = nodes.setattributes -local traverse_nodes = node.traverse - -local nodecodes = nodes.nodecodes - -local math_noad_code = nodecodes.noad -- attr nucleus sub sup -local math_accent_code = nodecodes.accent -- attr nucleus sub sup accent -local math_radical_code = nodecodes.radical -- attr nucleus sub sup left degree -local math_fraction_code = nodecodes.fraction -- attr nucleus sub sup left right -local math_box_code = nodecodes.subbox -- attr list -local math_sub_code = nodecodes.submlist -- attr list -local math_char_code = nodecodes.mathchar -- attr fam char -local math_textchar_code = nodecodes.mathtextchar -- attr fam char -local math_delim_code = nodecodes.delim -- attr small_fam small_char large_fam large_char -local math_style_code = nodecodes.style -- attr style -local math_choice_code = nodecodes.choice -- attr display text script scriptscript -local math_fence_code = nodecodes.fence -- attr subtype - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local glyph_code = nodecodes.glyph -local glue_code = nodecodes.glue - -local a_tagged = attributes.private('tagged') -local a_exportstatus = attributes.private('exportstatus') -local a_mathcategory = attributes.private('mathcategory') -local a_mathmode = attributes.private('mathmode') - -local tags = structures.tags - -local start_tagged = tags.start -local restart_tagged = tags.restart -local stop_tagged = tags.stop -local taglist = tags.taglist - -local chardata = characters.data - -local getmathcode = tex.getmathcode -local mathcodes = mathematics.codes -local ordinary_code = mathcodes.ordinary -local variable_code = mathcodes.variable - -local process - -local function processsubsup(start) - -- At some point we might need to add an attribute signaling the - -- super- and subscripts because TeX and MathML use a different - -- order. - local nucleus, sup, sub = start.nucleus, start.sup, start.sub - if sub then - if sup then - start[a_tagged] = start_tagged("msubsup") - process(nucleus) - process(sub) - process(sup) - stop_tagged() - else - start[a_tagged] = start_tagged("msub") - process(nucleus) - process(sub) - stop_tagged() - end - elseif sup then - start[a_tagged] = start_tagged("msup") - process(nucleus) - process(sup) - stop_tagged() - else - process(nucleus) - end -end - --- todo: check function here and keep attribute the same - --- todo: variants -> original - -local actionstack = { } - -process = function(start) -- we cannot use the processor as we have no finalizers (yet) - while start do - local id = start.id - if id == math_char_code then - local char = start.char - -- check for code - local a = start[a_mathcategory] - if a then - a = { detail = a } - end - local code = getmathcode(char) - if code then - code = code[1] - end - local tag - if code == ordinary_code or code == variable_code then - local ch = chardata[char] - local mc = ch and ch.mathclass - if mc == "number" then - tag = "mn" - elseif mc == "variable" or not mc then -- variable is default - tag = "mi" - else - tag = "mo" - end - else - tag = "mo" - end - start[a_tagged] = start_tagged(tag,a) - stop_tagged() - break -- okay? - elseif id == math_textchar_code then - -- check for code - local a = start[a_mathcategory] - if a then - start[a_tagged] = start_tagged("ms",{ detail = a }) - else - start[a_tagged] = start_tagged("ms") - end - stop_tagged() - break - elseif id == math_delim_code then - -- check for code - start[a_tagged] = start_tagged("mo") - stop_tagged() - break - elseif id == math_style_code then - -- has a next - elseif id == math_noad_code then - processsubsup(start) - elseif id == math_box_code or id == hlist_code or id == vlist_code then - -- keep an eye on math_box_code and see what ends up in there - local attr = start[a_tagged] - local last = attr and taglist[attr] - if last and find(last[#last],"formulacaption[:%-]") then - -- leave alone, will nicely move to the outer level - else - local text = start_tagged("mtext") - start[a_tagged] = text - local list = start.list - if not list then - -- empty list - elseif not attr then - -- box comes from strange place - set_attributes(list,a_tagged,text) - else - -- Beware, the first node in list is the actual list so we definitely - -- need to nest. This approach is a hack, maybe I'll make a proper - -- nesting feature to deal with this at another level. Here we just - -- fake structure by enforcing the inner one. - local tagdata = taglist[attr] - local common = #tagdata + 1 - local function runner(list) -- quite inefficient - local cache = { } -- we can have nested unboxed mess so best local to runner - for n in traverse_nodes(list) do - local id = n.id - local aa = n[a_tagged] - if aa then - local ac = cache[aa] - if not ac then - local tagdata = taglist[aa] - local extra = #tagdata - if common <= extra then - for i=common,extra do - ac = restart_tagged(tagdata[i]) -- can be made faster - end - for i=common,extra do - stop_tagged() -- can be made faster - end - else - ac = text - end - cache[aa] = ac - end - n[a_tagged] = ac - else - n[a_tagged] = text - end - if id == hlist_code or id == vlist_code then - runner(n.list) - end - end - end - runner(list) - end - stop_tagged() - end - elseif id == math_sub_code then - local list = start.list - if list then - local attr = start[a_tagged] - local last = attr and taglist[attr] - local action = last and match(last[#last],"maction:(.-)%-") - if action and action ~= "" then - if actionstack[#actionstack] == action then - start[a_tagged] = start_tagged("mrow") - process(list) - stop_tagged() - else - insert(actionstack,action) - start[a_tagged] = start_tagged("mrow",{ detail = action }) - process(list) - stop_tagged() - remove(actionstack) - end - else - start[a_tagged] = start_tagged("mrow") - process(list) - stop_tagged() - end - end - elseif id == math_fraction_code then - local num, denom, left, right = start.num, start.denom, start.left, start.right - if left then - left[a_tagged] = start_tagged("mo") - process(left) - stop_tagged() - end - start[a_tagged] = start_tagged("mfrac") - process(num) - process(denom) - stop_tagged() - if right then - right[a_tagged] = start_tagged("mo") - process(right) - stop_tagged() - end - elseif id == math_choice_code then - local display, text, script, scriptscript = start.display, start.text, start.script, start.scriptscript - if display then - process(display) - end - if text then - process(text) - end - if script then - process(script) - end - if scriptscript then - process(scriptscript) - end - elseif id == math_fence_code then - local delim = start.delim - local subtype = start.subtype - if subtype == 1 then - -- left - start[a_tagged] = start_tagged("mfenced") - if delim then - start[a_tagged] = start_tagged("mleft") - process(delim) - stop_tagged() - end - elseif subtype == 2 then - -- middle - if delim then - start[a_tagged] = start_tagged("mmiddle") - process(delim) - stop_tagged() - end - elseif subtype == 3 then - if delim then - start[a_tagged] = start_tagged("mright") - process(delim) - stop_tagged() - end - stop_tagged() - else - -- can't happen - end - elseif id == math_radical_code then - local left, degree = start.left, start.degree - if left then - start_tagged("") - process(left) -- root symbol, ignored - stop_tagged() - end - if degree then -- not good enough, can be empty mlist - start[a_tagged] = start_tagged("mroot") - processsubsup(start) - process(degree) - stop_tagged() - else - start[a_tagged] = start_tagged("msqrt") - processsubsup(start) - stop_tagged() - end - elseif id == math_accent_code then - local accent, bot_accent = start.accent, start.bot_accent - if bot_accent then - if accent then - start[a_tagged] = start_tagged("munderover",{ detail = "accent" }) - processsubsup(start) - process(bot_accent) - process(accent) - stop_tagged() - else - start[a_tagged] = start_tagged("munder",{ detail = "accent" }) - processsubsup(start) - process(bot_accent) - stop_tagged() - end - elseif accent then - start[a_tagged] = start_tagged("mover",{ detail = "accent" }) - processsubsup(start) - process(accent) - stop_tagged() - else - processsubsup(start) - end - elseif id == glue_code then - start[a_tagged] = start_tagged("mspace") - stop_tagged() - else - start[a_tagged] = start_tagged("merror", { detail = nodecodes[i] }) - stop_tagged() - end - start = start.next - end -end - -function noads.handlers.tags(head,style,penalties) - local v_math = start_tagged("math") - local v_mrow = start_tagged("mrow") - local v_mode = head[a_mathmode] - head[a_tagged] = v_math - head[a_tagged] = v_mrow - tags.setattributehash(v_math,"mode",v_mode == 1 and "display" or "inline") - process(head) - stop_tagged() - stop_tagged() - return true -end +if not modules then modules = { } end modules ['math-tag'] = { + version = 1.001, + comment = "companion to math-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- use lpeg matchers + +local find, match = string.find, string.match +local insert, remove = table.insert, table.remove + +local attributes, nodes = attributes, nodes + +local set_attributes = nodes.setattributes +local traverse_nodes = node.traverse + +local nodecodes = nodes.nodecodes + +local math_noad_code = nodecodes.noad -- attr nucleus sub sup +local math_accent_code = nodecodes.accent -- attr nucleus sub sup accent +local math_radical_code = nodecodes.radical -- attr nucleus sub sup left degree +local math_fraction_code = nodecodes.fraction -- attr nucleus sub sup left right +local math_box_code = nodecodes.subbox -- attr list +local math_sub_code = nodecodes.submlist -- attr list +local math_char_code = nodecodes.mathchar -- attr fam char +local math_textchar_code = nodecodes.mathtextchar -- attr fam char +local math_delim_code = nodecodes.delim -- attr small_fam small_char large_fam large_char +local math_style_code = nodecodes.style -- attr style +local math_choice_code = nodecodes.choice -- attr display text script scriptscript +local math_fence_code = nodecodes.fence -- attr subtype + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local glyph_code = nodecodes.glyph +local glue_code = nodecodes.glue + +local a_tagged = attributes.private('tagged') +local a_exportstatus = attributes.private('exportstatus') +local a_mathcategory = attributes.private('mathcategory') +local a_mathmode = attributes.private('mathmode') + +local tags = structures.tags + +local start_tagged = tags.start +local restart_tagged = tags.restart +local stop_tagged = tags.stop +local taglist = tags.taglist + +local chardata = characters.data + +local getmathcode = tex.getmathcode +local mathcodes = mathematics.codes +local ordinary_code = mathcodes.ordinary +local variable_code = mathcodes.variable + +local process + +local function processsubsup(start) + -- At some point we might need to add an attribute signaling the + -- super- and subscripts because TeX and MathML use a different + -- order. + local nucleus, sup, sub = start.nucleus, start.sup, start.sub + if sub then + if sup then + start[a_tagged] = start_tagged("msubsup") + process(nucleus) + process(sub) + process(sup) + stop_tagged() + else + start[a_tagged] = start_tagged("msub") + process(nucleus) + process(sub) + stop_tagged() + end + elseif sup then + start[a_tagged] = start_tagged("msup") + process(nucleus) + process(sup) + stop_tagged() + else + process(nucleus) + end +end + +-- todo: check function here and keep attribute the same + +-- todo: variants -> original + +local actionstack = { } + +process = function(start) -- we cannot use the processor as we have no finalizers (yet) + while start do + local id = start.id + if id == math_char_code then + local char = start.char + -- check for code + local a = start[a_mathcategory] + if a then + a = { detail = a } + end + local code = getmathcode(char) + if code then + code = code[1] + end + local tag + if code == ordinary_code or code == variable_code then + local ch = chardata[char] + local mc = ch and ch.mathclass + if mc == "number" then + tag = "mn" + elseif mc == "variable" or not mc then -- variable is default + tag = "mi" + else + tag = "mo" + end + else + tag = "mo" + end + start[a_tagged] = start_tagged(tag,a) + stop_tagged() + break -- okay? + elseif id == math_textchar_code then + -- check for code + local a = start[a_mathcategory] + if a then + start[a_tagged] = start_tagged("ms",{ detail = a }) + else + start[a_tagged] = start_tagged("ms") + end + stop_tagged() + break + elseif id == math_delim_code then + -- check for code + start[a_tagged] = start_tagged("mo") + stop_tagged() + break + elseif id == math_style_code then + -- has a next + elseif id == math_noad_code then + processsubsup(start) + elseif id == math_box_code or id == hlist_code or id == vlist_code then + -- keep an eye on math_box_code and see what ends up in there + local attr = start[a_tagged] + local last = attr and taglist[attr] + if last and find(last[#last],"formulacaption[:%-]") then + -- leave alone, will nicely move to the outer level + else + local text = start_tagged("mtext") + start[a_tagged] = text + local list = start.list + if not list then + -- empty list + elseif not attr then + -- box comes from strange place + set_attributes(list,a_tagged,text) + else + -- Beware, the first node in list is the actual list so we definitely + -- need to nest. This approach is a hack, maybe I'll make a proper + -- nesting feature to deal with this at another level. Here we just + -- fake structure by enforcing the inner one. + local tagdata = taglist[attr] + local common = #tagdata + 1 + local function runner(list) -- quite inefficient + local cache = { } -- we can have nested unboxed mess so best local to runner + for n in traverse_nodes(list) do + local id = n.id + local aa = n[a_tagged] + if aa then + local ac = cache[aa] + if not ac then + local tagdata = taglist[aa] + local extra = #tagdata + if common <= extra then + for i=common,extra do + ac = restart_tagged(tagdata[i]) -- can be made faster + end + for i=common,extra do + stop_tagged() -- can be made faster + end + else + ac = text + end + cache[aa] = ac + end + n[a_tagged] = ac + else + n[a_tagged] = text + end + if id == hlist_code or id == vlist_code then + runner(n.list) + end + end + end + runner(list) + end + stop_tagged() + end + elseif id == math_sub_code then + local list = start.list + if list then + local attr = start[a_tagged] + local last = attr and taglist[attr] + local action = last and match(last[#last],"maction:(.-)%-") + if action and action ~= "" then + if actionstack[#actionstack] == action then + start[a_tagged] = start_tagged("mrow") + process(list) + stop_tagged() + else + insert(actionstack,action) + start[a_tagged] = start_tagged("mrow",{ detail = action }) + process(list) + stop_tagged() + remove(actionstack) + end + else + start[a_tagged] = start_tagged("mrow") + process(list) + stop_tagged() + end + end + elseif id == math_fraction_code then + local num, denom, left, right = start.num, start.denom, start.left, start.right + if left then + left[a_tagged] = start_tagged("mo") + process(left) + stop_tagged() + end + start[a_tagged] = start_tagged("mfrac") + process(num) + process(denom) + stop_tagged() + if right then + right[a_tagged] = start_tagged("mo") + process(right) + stop_tagged() + end + elseif id == math_choice_code then + local display, text, script, scriptscript = start.display, start.text, start.script, start.scriptscript + if display then + process(display) + end + if text then + process(text) + end + if script then + process(script) + end + if scriptscript then + process(scriptscript) + end + elseif id == math_fence_code then + local delim = start.delim + local subtype = start.subtype + if subtype == 1 then + -- left + start[a_tagged] = start_tagged("mfenced") + if delim then + start[a_tagged] = start_tagged("mleft") + process(delim) + stop_tagged() + end + elseif subtype == 2 then + -- middle + if delim then + start[a_tagged] = start_tagged("mmiddle") + process(delim) + stop_tagged() + end + elseif subtype == 3 then + if delim then + start[a_tagged] = start_tagged("mright") + process(delim) + stop_tagged() + end + stop_tagged() + else + -- can't happen + end + elseif id == math_radical_code then + local left, degree = start.left, start.degree + if left then + start_tagged("") + process(left) -- root symbol, ignored + stop_tagged() + end + if degree then -- not good enough, can be empty mlist + start[a_tagged] = start_tagged("mroot") + processsubsup(start) + process(degree) + stop_tagged() + else + start[a_tagged] = start_tagged("msqrt") + processsubsup(start) + stop_tagged() + end + elseif id == math_accent_code then + local accent, bot_accent = start.accent, start.bot_accent + if bot_accent then + if accent then + start[a_tagged] = start_tagged("munderover",{ detail = "accent" }) + processsubsup(start) + process(bot_accent) + process(accent) + stop_tagged() + else + start[a_tagged] = start_tagged("munder",{ detail = "accent" }) + processsubsup(start) + process(bot_accent) + stop_tagged() + end + elseif accent then + start[a_tagged] = start_tagged("mover",{ detail = "accent" }) + processsubsup(start) + process(accent) + stop_tagged() + else + processsubsup(start) + end + elseif id == glue_code then + start[a_tagged] = start_tagged("mspace") + stop_tagged() + else + start[a_tagged] = start_tagged("merror", { detail = nodecodes[i] }) + stop_tagged() + end + start = start.next + end +end + +function noads.handlers.tags(head,style,penalties) + local v_math = start_tagged("math") + local v_mrow = start_tagged("mrow") + local v_mode = head[a_mathmode] + head[a_tagged] = v_math + head[a_tagged] = v_mrow + tags.setattributehash(v_math,"mode",v_mode == 1 and "display" or "inline") + process(head) + stop_tagged() + stop_tagged() + return true +end diff --git a/tex/context/base/math-ttv.lua b/tex/context/base/math-ttv.lua index 1f644e788..e5548c730 100644 --- a/tex/context/base/math-ttv.lua +++ b/tex/context/base/math-ttv.lua @@ -1,801 +1,801 @@ -if not modules then modules = { } end modules ['math-ttv'] = { - version = 1.001, - comment = "traditional tex vectors, companion to math-vfu.lua", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", - dataonly = true, -} - -local vfmath = fonts.handlers.vf.math -local setletters = vfmath.setletters -local setdigits = vfmath.setdigits - -local mathencodings = fonts.encodings.math - --- varphi is part of the alphabet, contrary to the other var*s' - -mathencodings["large-to-small"] = { - [0x00028] = 0x00, -- ( - [0x00029] = 0x01, -- ) - [0x0005B] = 0x02, -- [ - [0x0005D] = 0x03, -- ] - [0x0230A] = 0x04, -- lfloor - [0x0230B] = 0x05, -- rfloor - [0x02308] = 0x06, -- lceil - [0x02309] = 0x07, -- rceil - [0x0007B] = 0x08, -- { - [0x0007D] = 0x09, -- } - [0x027E8] = 0x0A, -- < - [0x027E9] = 0x0B, -- > - [0x0007C] = 0x0C, -- | - -- [0x0] = 0x0D, -- lVert rVert Vert - -- [0x0002F] = 0x0E, -- / - [0x0005C] = 0x0F, -- \ - -- [0x0] = 0x3A, -- lgroup - -- [0x0] = 0x3B, -- rgroup - -- [0x0] = 0x3C, -- arrowvert - -- [0x0] = 0x3D, -- Arrowvert - [0x02195] = 0x3F, -- updownarrow - -- [0x0] = 0x40, -- lmoustache - -- [0x0] = 0x41, -- rmoustache - [0x0221A] = 0x70, -- sqrt - [0x021D5] = 0x77, -- Updownarrow - [0x02191] = 0x78, -- uparrow - [0x02193] = 0x79, -- downarrow - [0x021D1] = 0x7E, -- Uparrow - [0x021D3] = 0x7F, -- Downarrow - [0x0220F] = 0x59, -- prod - [0x02210] = 0x61, -- coprod - [0x02211] = 0x58, -- sum - [0x0222B] = 0x5A, -- intop - [0x0222E] = 0x49, -- ointop - -- [0xFE302] = 0x62, -- widehat - -- [0xFE303] = 0x65, -- widetilde - [0x00302] = 0x62, -- widehat - [0x00303] = 0x65, -- widetilde - [0x022C0] = 0x5E, -- bigwedge - [0x022C1] = 0x5F, -- bigvee - [0x022C2] = 0x5C, -- bigcap - [0x022C3] = 0x5B, -- bigcup - [0x02044] = 0x0E, -- / -} - --- Beware: these are (in cm/lm) below the baseline due to limitations --- in the tfm format bu the engien (combined with the mathclass) takes --- care of it. If we need them in textmode, we should make them virtual --- and move them up but we're in no hurry with that. - -mathencodings["tex-ex"] = { - [0x0220F] = 0x51, -- prod - [0x02210] = 0x60, -- coprod - [0x02211] = 0x50, -- sum - [0x0222B] = 0x52, -- intop - [0x0222E] = 0x48, -- ointop - [0x022C0] = 0x56, -- bigwedge - [0x022C1] = 0x57, -- bigvee - [0x022C2] = 0x54, -- bigcap - [0x022C3] = 0x53, -- bigcup - [0x02A00] = 0x4A, -- bigodot -- fixed BJ - [0x02A01] = 0x4C, -- bigoplus - [0x02A02] = 0x4E, -- bigotimes - -- [0x02A03] = , -- bigudot -- - [0x02A04] = 0x55, -- biguplus - [0x02A06] = 0x46, -- bigsqcup -} - --- only math stuff is needed, since we always use an lm or gyre --- font as main font - -mathencodings["tex-mr"] = { - [0x00393] = 0x00, -- Gamma - [0x00394] = 0x01, -- Delta - [0x00398] = 0x02, -- Theta - [0x0039B] = 0x03, -- Lambda - [0x0039E] = 0x04, -- Xi - [0x003A0] = 0x05, -- Pi - [0x003A3] = 0x06, -- Sigma - [0x003A5] = 0x07, -- Upsilon - [0x003A6] = 0x08, -- Phi - [0x003A8] = 0x09, -- Psi - [0x003A9] = 0x0A, -- Omega --- [0x00060] = 0x12, -- [math]grave --- [0x000B4] = 0x13, -- [math]acute --- [0x002C7] = 0x14, -- [math]check --- [0x002D8] = 0x15, -- [math]breve --- [0x000AF] = 0x16, -- [math]bar --- [0x00021] = 0x21, -- ! --- [0x00028] = 0x28, -- ( --- [0x00029] = 0x29, -- ) --- [0x0002B] = 0x2B, -- + --- [0x0002F] = 0x2F, -- / --- [0x0003A] = 0x3A, -- : --- [0x02236] = 0x3A, -- colon --- [0x0003B] = 0x3B, -- ; --- [0x0003C] = 0x3C, -- < --- [0x0003D] = 0x3D, -- = --- [0x0003E] = 0x3E, -- > --- [0x0003F] = 0x3F, -- ? - [0x00391] = 0x41, -- Alpha - [0x00392] = 0x42, -- Beta - [0x02145] = 0x44, - [0x00395] = 0x45, -- Epsilon - [0x00397] = 0x48, -- Eta - [0x00399] = 0x49, -- Iota - [0x0039A] = 0x4B, -- Kappa - [0x0039C] = 0x4D, -- Mu - [0x0039D] = 0x4E, -- Nu - [0x0039F] = 0x4F, -- Omicron - [0x003A1] = 0x52, -- Rho - [0x003A4] = 0x54, -- Tau - [0x003A7] = 0x58, -- Chi - [0x00396] = 0x5A, -- Zeta --- [0x0005B] = 0x5B, -- [ --- [0x0005D] = 0x5D, -- ] --- [0x0005E] = 0x5E, -- [math]hat -- the text one - [0x00302] = 0x5E, -- [math]hat -- the real math one --- [0x002D9] = 0x5F, -- [math]dot - [0x02146] = 0x64, - [0x02147] = 0x65, --- [0x002DC] = 0x7E, -- [math]tilde -- the text one - [0x00303] = 0x7E, -- [math]tilde -- the real one --- [0x000A8] = 0x7F, -- [math]ddot -} - -mathencodings["tex-mr-missing"] = { - [0x02236] = 0x3A, -- colon -} - -mathencodings["tex-mi"] = { - [0x1D6E4] = 0x00, -- Gamma - [0x1D6E5] = 0x01, -- Delta - [0x1D6E9] = 0x02, -- Theta - [0x1D6F3] = 0x02, -- varTheta (not present in TeX) - [0x1D6EC] = 0x03, -- Lambda - [0x1D6EF] = 0x04, -- Xi - [0x1D6F1] = 0x05, -- Pi - [0x1D6F4] = 0x06, -- Sigma - [0x1D6F6] = 0x07, -- Upsilon - [0x1D6F7] = 0x08, -- Phi - [0x1D6F9] = 0x09, -- Psi - [0x1D6FA] = 0x0A, -- Omega - [0x1D6FC] = 0x0B, -- alpha - [0x1D6FD] = 0x0C, -- beta - [0x1D6FE] = 0x0D, -- gamma - [0x1D6FF] = 0x0E, -- delta - [0x1D716] = 0x0F, -- epsilon TODO: 1D716 - [0x1D701] = 0x10, -- zeta - [0x1D702] = 0x11, -- eta - [0x1D703] = 0x12, -- theta TODO: 1D703 - [0x1D704] = 0x13, -- iota - [0x1D705] = 0x14, -- kappa - [0x1D718] = 0x14, -- varkappa, not in tex fonts - [0x1D706] = 0x15, -- lambda - [0x1D707] = 0x16, -- mu - [0x1D708] = 0x17, -- nu - [0x1D709] = 0x18, -- xi - [0x1D70B] = 0x19, -- pi - [0x1D70C] = 0x1A, -- rho - [0x1D70E] = 0x1B, -- sigma - [0x1D70F] = 0x1C, -- tau - [0x1D710] = 0x1D, -- upsilon - [0x1D719] = 0x1E, -- phi - [0x1D712] = 0x1F, -- chi - [0x1D713] = 0x20, -- psi - [0x1D714] = 0x21, -- omega - [0x1D700] = 0x22, -- varepsilon (the other way around) - [0x1D717] = 0x23, -- vartheta - [0x1D71B] = 0x24, -- varpi - [0x1D71A] = 0x25, -- varrho - [0x1D70D] = 0x26, -- varsigma - [0x1D711] = 0x27, -- varphi (the other way around) - [0x021BC] = 0x28, -- leftharpoonup - [0x021BD] = 0x29, -- leftharpoondown - [0x021C0] = 0x2A, -- rightharpoonup - [0x021C1] = 0x2B, -- rightharpoondown - [0xFE322] = 0x2C, -- lhook (hook for combining arrows) - [0xFE323] = 0x2D, -- rhook (hook for combining arrows) - [0x025B7] = 0x2E, -- triangleright : cf lmmath / BJ - [0x025C1] = 0x2F, -- triangleleft : cf lmmath / BJ - [0x022B3] = 0x2E, -- triangleright : cf lmmath this a cramped triangles / BJ / see * - [0x022B2] = 0x2F, -- triangleleft : cf lmmath this a cramped triangles / BJ / see * --- [0x00041] = 0x30, -- 0 --- [0x00041] = 0x31, -- 1 --- [0x00041] = 0x32, -- 2 --- [0x00041] = 0x33, -- 3 --- [0x00041] = 0x34, -- 4 --- [0x00041] = 0x35, -- 5 --- [0x00041] = 0x36, -- 6 --- [0x00041] = 0x37, -- 7 --- [0x00041] = 0x38, -- 8 --- [0x00041] = 0x39, -- 9 ---~ [0x0002E] = 0x3A, -- . - [0x0002C] = 0x3B, -- , - [0x0003C] = 0x3C, -- < --- [0x0002F] = 0x3D, -- /, slash, solidus - [0x02044] = 0x3D, -- / AM: Not sure - [0x0003E] = 0x3E, -- > - [0x022C6] = 0x3F, -- star - [0x02202] = 0x40, -- partial --- - [0x0266D] = 0x5B, -- flat - [0x0266E] = 0x5C, -- natural - [0x0266F] = 0x5D, -- sharp - [0x02323] = 0x5E, -- smile - [0x02322] = 0x5F, -- frown - [0x02113] = 0x60, -- ell --- - [0x1D6A4] = 0x7B, -- imath (TODO: also 0131) - [0x1D6A5] = 0x7C, -- jmath (TODO: also 0237) - [0x02118] = 0x7D, -- wp - [0x020D7] = 0x7E, -- vec (TODO: not sure) --- 0x7F, -- (no idea what that could be) -} - -mathencodings["tex-it"] = { --- [0x1D434] = 0x41, -- A - [0x1D6E2] = 0x41, -- Alpha --- [0x1D435] = 0x42, -- B - [0x1D6E3] = 0x42, -- Beta --- [0x1D436] = 0x43, -- C --- [0x1D437] = 0x44, -- D --- [0x1D438] = 0x45, -- E - [0x1D6E6] = 0x45, -- Epsilon --- [0x1D439] = 0x46, -- F --- [0x1D43A] = 0x47, -- G --- [0x1D43B] = 0x48, -- H - [0x1D6E8] = 0x48, -- Eta --- [0x1D43C] = 0x49, -- I - [0x1D6EA] = 0x49, -- Iota --- [0x1D43D] = 0x4A, -- J --- [0x1D43E] = 0x4B, -- K - [0x1D6EB] = 0x4B, -- Kappa --- [0x1D43F] = 0x4C, -- L --- [0x1D440] = 0x4D, -- M - [0x1D6ED] = 0x4D, -- Mu --- [0x1D441] = 0x4E, -- N - [0x1D6EE] = 0x4E, -- Nu --- [0x1D442] = 0x4F, -- O - [0x1D6F0] = 0x4F, -- Omicron --- [0x1D443] = 0x50, -- P - [0x1D6F2] = 0x50, -- Rho --- [0x1D444] = 0x51, -- Q --- [0x1D445] = 0x52, -- R --- [0x1D446] = 0x53, -- S --- [0x1D447] = 0x54, -- T - [0x1D6F5] = 0x54, -- Tau --- [0x1D448] = 0x55, -- U --- [0x1D449] = 0x56, -- V --- [0x1D44A] = 0x57, -- W --- [0x1D44B] = 0x58, -- X - [0x1D6F8] = 0x58, -- Chi --- [0x1D44C] = 0x59, -- Y --- [0x1D44D] = 0x5A, -- Z --- --- [0x1D44E] = 0x61, -- a --- [0x1D44F] = 0x62, -- b --- [0x1D450] = 0x63, -- c --- [0x1D451] = 0x64, -- d --- [0x1D452] = 0x65, -- e --- [0x1D453] = 0x66, -- f --- [0x1D454] = 0x67, -- g --- [0x1D455] = 0x68, -- h - [0x0210E] = 0x68, -- Planck constant (h) --- [0x1D456] = 0x69, -- i --- [0x1D457] = 0x6A, -- j --- [0x1D458] = 0x6B, -- k --- [0x1D459] = 0x6C, -- l --- [0x1D45A] = 0x6D, -- m --- [0x1D45B] = 0x6E, -- n --- [0x1D45C] = 0x6F, -- o - [0x1D70A] = 0x6F, -- omicron --- [0x1D45D] = 0x70, -- p --- [0x1D45E] = 0x71, -- q --- [0x1D45F] = 0x72, -- r --- [0x1D460] = 0x73, -- s --- [0x1D461] = 0x74, -- t --- [0x1D462] = 0x75, -- u --- [0x1D463] = 0x76, -- v --- [0x1D464] = 0x77, -- w --- [0x1D465] = 0x78, -- x --- [0x1D466] = 0x79, -- y --- [0x1D467] = 0x7A, -- z -} - -mathencodings["tex-ss"] = { } -mathencodings["tex-tt"] = { } -mathencodings["tex-bf"] = { } -mathencodings["tex-bi"] = { } -mathencodings["tex-fraktur"] = { } -mathencodings["tex-fraktur-bold"] = { } - -mathencodings["tex-sy"] = { - [0x0002D] = 0x00, -- - - [0x02212] = 0x00, -- - --- [0x02201] = 0x00, -- complement --- [0x02206] = 0x00, -- increment --- [0x02204] = 0x00, -- not exists --- [0x000B7] = 0x01, -- cdot - [0x022C5] = 0x01, -- cdot - [0x000D7] = 0x02, -- times - [0x0002A] = 0x03, -- * - [0x02217] = 0x03, -- * - [0x000F7] = 0x04, -- div - [0x022C4] = 0x05, -- diamond - [0x000B1] = 0x06, -- pm - [0x02213] = 0x07, -- mp - [0x02295] = 0x08, -- oplus - [0x02296] = 0x09, -- ominus - [0x02297] = 0x0A, -- otimes - [0x02298] = 0x0B, -- oslash - [0x02299] = 0x0C, -- odot - [0x025EF] = 0x0D, -- bigcirc, Orb (either 25EF or 25CB) -- todo - [0x02218] = 0x0E, -- circ - [0x02219] = 0x0F, -- bullet - [0x02022] = 0x0F, -- bullet - [0x0224D] = 0x10, -- asymp - [0x02261] = 0x11, -- equiv - [0x02286] = 0x12, -- subseteq - [0x02287] = 0x13, -- supseteq - [0x02264] = 0x14, -- leq - [0x02265] = 0x15, -- geq - [0x02AAF] = 0x16, -- preceq --- [0x0227C] = 0x16, -- preceq, AM:No see 2AAF - [0x02AB0] = 0x17, -- succeq --- [0x0227D] = 0x17, -- succeq, AM:No see 2AB0 - [0x0223C] = 0x18, -- sim - [0x02248] = 0x19, -- approx - [0x02282] = 0x1A, -- subset - [0x02283] = 0x1B, -- supset - [0x0226A] = 0x1C, -- ll - [0x0226B] = 0x1D, -- gg - [0x0227A] = 0x1E, -- prec - [0x0227B] = 0x1F, -- succ - [0x02190] = 0x20, -- leftarrow - [0x02192] = 0x21, -- rightarrow ---~ [0xFE190] = 0x20, -- leftarrow ---~ [0xFE192] = 0x21, -- rightarrow - [0x02191] = 0x22, -- uparrow - [0x02193] = 0x23, -- downarrow - [0x02194] = 0x24, -- leftrightarrow - [0x02197] = 0x25, -- nearrow - [0x02198] = 0x26, -- searrow - [0x02243] = 0x27, -- simeq - [0x021D0] = 0x28, -- Leftarrow - [0x021D2] = 0x29, -- Rightarrow - [0x021D1] = 0x2A, -- Uparrow - [0x021D3] = 0x2B, -- Downarrow - [0x021D4] = 0x2C, -- Leftrightarrow - [0x02196] = 0x2D, -- nwarrow - [0x02199] = 0x2E, -- swarrow - [0x0221D] = 0x2F, -- propto - [0x02032] = 0x30, -- prime - [0x0221E] = 0x31, -- infty - [0x02208] = 0x32, -- in - [0x0220B] = 0x33, -- ni - [0x025B3] = 0x34, -- triangle, bigtriangleup - [0x025BD] = 0x35, -- bigtriangledown - [0x00338] = 0x36, -- not --- 0x37, -- (beginning of arrow) - [0x02200] = 0x38, -- forall - [0x02203] = 0x39, -- exists - [0x000AC] = 0x3A, -- neg, lnot - [0x02205] = 0x3B, -- empty set - [0x0211C] = 0x3C, -- Re - [0x02111] = 0x3D, -- Im - [0x022A4] = 0x3E, -- top - [0x022A5] = 0x3F, -- bot, perp - [0x02135] = 0x40, -- aleph - [0x1D49C] = 0x41, -- script A - [0x0212C] = 0x42, -- script B - [0x1D49E] = 0x43, -- script C - [0x1D49F] = 0x44, -- script D - [0x02130] = 0x45, -- script E - [0x02131] = 0x46, -- script F - [0x1D4A2] = 0x47, -- script G - [0x0210B] = 0x48, -- script H - [0x02110] = 0x49, -- script I - [0x1D4A5] = 0x4A, -- script J - [0x1D4A6] = 0x4B, -- script K - [0x02112] = 0x4C, -- script L - [0x02133] = 0x4D, -- script M - [0x1D4A9] = 0x4E, -- script N - [0x1D4AA] = 0x4F, -- script O - [0x1D4AB] = 0x50, -- script P - [0x1D4AC] = 0x51, -- script Q - [0x0211B] = 0x52, -- script R - [0x1D4AE] = 0x53, -- script S - [0x1D4AF] = 0x54, -- script T - [0x1D4B0] = 0x55, -- script U - [0x1D4B1] = 0x56, -- script V - [0x1D4B2] = 0x57, -- script W - [0x1D4B3] = 0x58, -- script X - [0x1D4B4] = 0x59, -- script Y - [0x1D4B5] = 0x5A, -- script Z - [0x0222A] = 0x5B, -- cup - [0x02229] = 0x5C, -- cap - [0x0228E] = 0x5D, -- uplus - [0x02227] = 0x5E, -- wedge, land - [0x02228] = 0x5F, -- vee, lor - [0x022A2] = 0x60, -- vdash - [0x022A3] = 0x61, -- dashv - [0x0230A] = 0x62, -- lfloor - [0x0230B] = 0x63, -- rfloor - [0x02308] = 0x64, -- lceil - [0x02309] = 0x65, -- rceil - [0x0007B] = 0x66, -- {, lbrace - [0x0007D] = 0x67, -- }, rbrace - [0x027E8] = 0x68, -- <, langle - [0x027E9] = 0x69, -- >, rangle - [0x0007C] = 0x6A, -- |, mid, lvert, rvert - [0x02225] = 0x6B, -- parallel - -- [0x0 ] = 0x00, -- Vert, lVert, rVert, arrowvert, Arrowvert - [0x02195] = 0x6C, -- updownarrow - [0x021D5] = 0x6D, -- Updownarrow - [0x0005C] = 0x6E, -- \, backslash, setminus - [0x02216] = 0x6E, -- setminus - [0x02240] = 0x6F, -- wr - [0x0221A] = 0x70, -- sqrt. AM: Check surd?? - [0x02A3F] = 0x71, -- amalg - [0x1D6FB] = 0x72, -- nabla --- [0x0222B] = 0x73, -- smallint (TODO: what about intop?) - [0x02294] = 0x74, -- sqcup - [0x02293] = 0x75, -- sqcap - [0x02291] = 0x76, -- sqsubseteq - [0x02292] = 0x77, -- sqsupseteq - [0x000A7] = 0x78, -- S - [0x02020] = 0x79, -- dagger, dag - [0x02021] = 0x7A, -- ddagger, ddag - [0x000B6] = 0x7B, -- P - [0x02663] = 0x7C, -- clubsuit - [0x02662] = 0x7D, -- diamondsuit - [0x02661] = 0x7E, -- heartsuit - [0x02660] = 0x7F, -- spadesuit - [0xFE321] = 0x37, -- mapstochar - - [0xFE325] = 0x30, -- prime 0x02032 -} - --- The names in masm10.enc can be trusted best and are shown in the first --- column, while in the second column we show the tex/ams names. As usual --- it costs hours to figure out such a table. - -mathencodings["tex-ma"] = { - [0x022A1] = 0x00, -- squaredot \boxdot - [0x0229E] = 0x01, -- squareplus \boxplus - [0x022A0] = 0x02, -- squaremultiply \boxtimes - [0x025A1] = 0x03, -- square \square \Box - [0x025A0] = 0x04, -- squaresolid \blacksquare - [0x025AA] = 0x05, -- squaresmallsolid \centerdot - [0x022C4] = 0x06, -- diamond \Diamond \lozenge - [0x02666] = 0x07, -- diamondsolid \blacklozenge - [0x021BB] = 0x08, -- clockwise \circlearrowright - [0x021BA] = 0x09, -- anticlockwise \circlearrowleft - [0x021CC] = 0x0A, -- harpoonleftright \rightleftharpoons - [0x021CB] = 0x0B, -- harpoonrightleft \leftrightharpoons - [0x0229F] = 0x0C, -- squareminus \boxminus - [0x022A9] = 0x0D, -- forces \Vdash - [0x022AA] = 0x0E, -- forcesbar \Vvdash - [0x022A8] = 0x0F, -- satisfies \vDash - [0x021A0] = 0x10, -- dblarrowheadright \twoheadrightarrow - [0x0219E] = 0x11, -- dblarrowheadleft \twoheadleftarrow - [0x021C7] = 0x12, -- dblarrowleft \leftleftarrows - [0x021C9] = 0x13, -- dblarrowright \rightrightarrows - [0x021C8] = 0x14, -- dblarrowup \upuparrows - [0x021CA] = 0x15, -- dblarrowdwn \downdownarrows - [0x021BE] = 0x16, -- harpoonupright \upharpoonright \restriction - [0x021C2] = 0x17, -- harpoondownright \downharpoonright - [0x021BF] = 0x18, -- harpoonupleft \upharpoonleft - [0x021C3] = 0x19, -- harpoondownleft \downharpoonleft - [0x021A3] = 0x1A, -- arrowtailright \rightarrowtail - [0x021A2] = 0x1B, -- arrowtailleft \leftarrowtail - [0x021C6] = 0x1C, -- arrowparrleftright \leftrightarrows --- [0x021C5] = 0x00, -- \updownarrows (missing in lm) - [0x021C4] = 0x1D, -- arrowparrrightleft \rightleftarrows - [0x021B0] = 0x1E, -- shiftleft \Lsh - [0x021B1] = 0x1F, -- shiftright \Rsh - [0x021DD] = 0x20, -- squiggleright \leadsto \rightsquigarrow - [0x021AD] = 0x21, -- squiggleleftright \leftrightsquigarrow - [0x021AB] = 0x22, -- curlyleft \looparrowleft - [0x021AC] = 0x23, -- curlyright \looparrowright - [0x02257] = 0x24, -- circleequal \circeq - [0x0227F] = 0x25, -- followsorequal \succsim - [0x02273] = 0x26, -- greaterorsimilar \gtrsim - [0x02A86] = 0x27, -- greaterorapproxeql \gtrapprox - [0x022B8] = 0x28, -- multimap \multimap - [0x02234] = 0x29, -- therefore \therefore - [0x02235] = 0x2A, -- because \because - [0x02251] = 0x2B, -- equalsdots \Doteq \doteqdot - [0x0225C] = 0x2C, -- defines \triangleq - [0x0227E] = 0x2D, -- precedesorequal \precsim - [0x02272] = 0x2E, -- lessorsimilar \lesssim - [0x02A85] = 0x2F, -- lessorapproxeql \lessapprox - [0x02A95] = 0x30, -- equalorless \eqslantless - [0x02A96] = 0x31, -- equalorgreater \eqslantgtr - [0x022DE] = 0x32, -- equalorprecedes \curlyeqprec - [0x022DF] = 0x33, -- equalorfollows \curlyeqsucc - [0x0227C] = 0x34, -- precedesorcurly \preccurlyeq - [0x02266] = 0x35, -- lessdblequal \leqq - [0x02A7D] = 0x36, -- lessorequalslant \leqslant - [0x02276] = 0x37, -- lessorgreater \lessgtr - [0x02035] = 0x38, -- primereverse \backprime - -- [0x0] = 0x39, -- axisshort \dabar - [0x02253] = 0x3A, -- equaldotrightleft \risingdotseq - [0x02252] = 0x3B, -- equaldotleftright \fallingdotseq - [0x0227D] = 0x3C, -- followsorcurly \succcurlyeq - [0x02267] = 0x3D, -- greaterdblequal \geqq - [0x02A7E] = 0x3E, -- greaterorequalslant \geqslant - [0x02277] = 0x3F, -- greaterorless \gtrless - [0x0228F] = 0x40, -- squareimage \sqsubset - [0x02290] = 0x41, -- squareoriginal \sqsupset - -- wrong: see ** - -- [0x022B3] = 0x42, -- triangleright \rhd \vartriangleright - -- [0x022B2] = 0x43, -- triangleleft \lhd \vartriangleleft - -- cf lm - [0x022B5] = 0x44, -- trianglerightequal \unrhd \trianglerighteq - [0x022B4] = 0x45, -- triangleleftequal \unlhd \trianglelefteq - -- - [0x02605] = 0x46, -- star \bigstar - [0x0226C] = 0x47, -- between \between - [0x025BC] = 0x48, -- triangledownsld \blacktriangledown - [0x025B6] = 0x49, -- trianglerightsld \blacktriangleright - [0x025C0] = 0x4A, -- triangleleftsld \blacktriangleleft - -- [0x0] = 0x4B, -- arrowaxisright - -- [0x0] = 0x4C, -- arrowaxisleft - [0x025B2] = 0x4D, -- triangle \triangleup \vartriangle - [0x025B2] = 0x4E, -- trianglesolid \blacktriangle - [0x025BD] = 0x4F, -- triangleinv \triangledown - [0x02256] = 0x50, -- ringinequal \eqcirc - [0x022DA] = 0x51, -- lessequalgreater \lesseqgtr - [0x022DB] = 0x52, -- greaterlessequal \gtreqless - [0x02A8B] = 0x53, -- lessdbleqlgreater \lesseqqgtr - [0x02A8C] = 0x54, -- greaterdbleqlless \gtreqqless - [0x000A5] = 0x55, -- Yen \yen - [0x021DB] = 0x56, -- arrowtripleright \Rrightarrow - [0x021DA] = 0x57, -- arrowtripleleft \Lleftarrow - [0x02713] = 0x58, -- check \checkmark - [0x022BB] = 0x59, -- orunderscore \veebar - [0x022BC] = 0x5A, -- nand \barwedge - [0x02306] = 0x5B, -- perpcorrespond \doublebarwedge - [0x02220] = 0x5C, -- angle \angle - [0x02221] = 0x5D, -- measuredangle \measuredangle - [0x02222] = 0x5E, -- sphericalangle \sphericalangle - -- [0x0] = 0x5F, -- proportional \varpropto - -- [0x0] = 0x60, -- smile \smallsmile - -- [0x0] = 0x61, -- frown \smallfrown - [0x022D0] = 0x62, -- subsetdbl \Subset - [0x022D1] = 0x63, -- supersetdbl \Supset - [0x022D3] = 0x64, -- uniondbl \doublecup \Cup - [0x022D2] = 0x65, -- intersectiondbl \doublecap \Cap - [0x022CF] = 0x66, -- uprise \curlywedge - [0x022CE] = 0x67, -- downfall \curlyvee - [0x022CB] = 0x68, -- multiopenleft \leftthreetimes - [0x022CC] = 0x69, -- multiopenright \rightthreetimes - [0x02AC5] = 0x6A, -- subsetdblequal \subseteqq - [0x02AC6] = 0x6B, -- supersetdblequal \supseteqq - [0x0224F] = 0x6C, -- difference \bumpeq - [0x0224E] = 0x6D, -- geomequivalent \Bumpeq - [0x022D8] = 0x6E, -- muchless \lll \llless - [0x022D9] = 0x6F, -- muchgreater \ggg \gggtr - [0x0231C] = 0x70, -- rightanglenw \ulcorner - [0x0231D] = 0x71, -- rightanglene \urcorner - [0x024C7] = 0x72, -- circleR \circledR - [0x024C8] = 0x73, -- circleS \circledS - [0x022D4] = 0x74, -- fork \pitchfork - [0x02214] = 0x75, -- dotplus \dotplus - [0x0223D] = 0x76, -- revsimilar \backsim - [0x022CD] = 0x77, -- revasymptequal \backsimeq -- AM: Check this! I mapped it to simeq. - [0x0231E] = 0x78, -- rightanglesw \llcorner - [0x0231F] = 0x79, -- rightanglese \lrcorner - [0x02720] = 0x7A, -- maltesecross \maltese - [0x02201] = 0x7B, -- complement \complement - [0x022BA] = 0x7C, -- intercal \intercal - [0x0229A] = 0x7D, -- circlering \circledcirc - [0x0229B] = 0x7E, -- circleasterisk \circledast - [0x0229D] = 0x7F, -- circleminus \circleddash -} - -mathencodings["tex-mb"] = { - -- [0x0] = 0x00, -- lessornotequal \lvertneqq - -- [0x0] = 0x01, -- greaterornotequal \gvertneqq - [0x02270] = 0x02, -- notlessequal \nleq - [0x02271] = 0x03, -- notgreaterequal \ngeq - [0x0226E] = 0x04, -- notless \nless - [0x0226F] = 0x05, -- notgreater \ngtr - [0x02280] = 0x06, -- notprecedes \nprec - [0x02281] = 0x07, -- notfollows \nsucc - [0x02268] = 0x08, -- lessornotdbleql \lneqq - [0x02269] = 0x09, -- greaterornotdbleql \gneqq - -- [0x0] = 0x0A, -- notlessorslnteql \nleqslant - -- [0x0] = 0x0B, -- notgreaterorslnteql \ngeqslant - [0x02A87] = 0x0C, -- lessnotequal \lneq - [0x02A88] = 0x0D, -- greaternotequal \gneq - -- [0x0] = 0x0E, -- notprecedesoreql \npreceq - -- [0x0] = 0x0F, -- notfollowsoreql \nsucceq - [0x022E8] = 0x10, -- precedeornoteqvlnt \precnsim - [0x022E9] = 0x11, -- followornoteqvlnt \succnsim - [0x022E6] = 0x12, -- lessornotsimilar \lnsim - [0x022E7] = 0x13, -- greaterornotsimilar \gnsim - -- [0x0] = 0x14, -- notlessdblequal \nleqq - -- [0x0] = 0x15, -- notgreaterdblequal \ngeqq - [0x02AB5] = 0x16, -- precedenotslnteql \precneqq - [0x02AB6] = 0x17, -- follownotslnteql \succneqq - [0x02AB9] = 0x18, -- precedenotdbleqv \precnapprox - [0x02ABA] = 0x19, -- follownotdbleqv \succnapprox - [0x02A89] = 0x1A, -- lessnotdblequal \lnapprox - [0x02A8A] = 0x1B, -- greaternotdblequal \gnapprox - [0x02241] = 0x1C, -- notsimilar \nsim - [0x02247] = 0x1D, -- notapproxequal \ncong - -- [0x0] = 0x1E, -- upslope \diagup - -- [0x0] = 0x1F, -- downslope \diagdown - -- [0x0] = 0x20, -- notsubsetoreql \varsubsetneq - -- [0x0] = 0x21, -- notsupersetoreql \varsupsetneq - -- [0x0] = 0x22, -- notsubsetordbleql \nsubseteqq - -- [0x0] = 0x23, -- notsupersetordbleql \nsupseteqq - [0x02ACB] = 0x24, -- subsetornotdbleql \subsetneqq - [0x02ACC] = 0x25, -- supersetornotdbleql \supsetneqq - -- [0x0] = 0x26, -- subsetornoteql \varsubsetneqq - -- [0x0] = 0x27, -- supersetornoteql \varsupsetneqq - [0x0228A] = 0x28, -- subsetnoteql \subsetneq - [0x0228B] = 0x29, -- supersetnoteql \supsetneq - [0x02288] = 0x2A, -- notsubseteql \nsubseteq - [0x02289] = 0x2B, -- notsuperseteql \nsupseteq - [0x02226] = 0x2C, -- notparallel \nparallel - [0x02224] = 0x2D, -- notbar \nmid \ndivides - -- [0x0] = 0x2E, -- notshortbar \nshortmid - -- [0x0] = 0x2F, -- notshortparallel \nshortparallel - [0x022AC] = 0x30, -- notturnstile \nvdash - [0x022AE] = 0x31, -- notforces \nVdash - [0x022AD] = 0x32, -- notsatisfies \nvDash - [0x022AF] = 0x33, -- notforcesextra \nVDash - [0x022ED] = 0x34, -- nottriangeqlright \ntrianglerighteq - [0x022EC] = 0x35, -- nottriangeqlleft \ntrianglelefteq - [0x022EA] = 0x36, -- nottriangleleft \ntriangleleft - [0x022EB] = 0x37, -- nottriangleright \ntriangleright - [0x0219A] = 0x38, -- notarrowleft \nleftarrow - [0x0219B] = 0x39, -- notarrowright \nrightarrow - [0x021CD] = 0x3A, -- notdblarrowleft \nLeftarrow - [0x021CF] = 0x3B, -- notdblarrowright \nRightarrow - [0x021CE] = 0x3C, -- notdblarrowboth \nLeftrightarrow - [0x021AE] = 0x3D, -- notarrowboth \nleftrightarrow - [0x022C7] = 0x3E, -- dividemultiply \divideontimes - [0x02300] = 0x3F, -- diametersign \varnothing - [0x02204] = 0x40, -- notexistential \nexists - [0x1D538] = 0x41, -- A (blackboard A) - [0x1D539] = 0x42, -- B - [0x02102] = 0x43, -- C - [0x1D53B] = 0x44, -- D - [0x1D53C] = 0x45, -- E - [0x1D53D] = 0x46, -- F - [0x1D53E] = 0x47, -- G - [0x0210D] = 0x48, -- H - [0x1D540] = 0x49, -- I - [0x1D541] = 0x4A, -- J - [0x1D542] = 0x4B, -- K - [0x1D543] = 0x4C, -- L - [0x1D544] = 0x4D, -- M - [0x02115] = 0x4E, -- N - [0x1D546] = 0x4F, -- O - [0x02119] = 0x50, -- P - [0x0211A] = 0x51, -- Q - [0x0211D] = 0x52, -- R - [0x1D54A] = 0x53, -- S - [0x1D54B] = 0x54, -- T - [0x1D54C] = 0x55, -- U - [0x1D54D] = 0x56, -- V - [0x1D54E] = 0x57, -- W - [0x1D54F] = 0x58, -- X - [0x1D550] = 0x59, -- Y - [0x02124] = 0x5A, -- Z (blackboard Z) - [0x02132] = 0x60, -- finv \Finv - [0x02141] = 0x61, -- fmir \Game - -- [0x0] = 0x62, tildewide - -- [0x0] = 0x63, tildewider - -- [0x0] = 0x64, Finv - -- [0x0] = 0x65, Gmir - [0x02127] = 0x66, -- Omegainv \mho - [0x000F0] = 0x67, -- eth \eth - [0x02242] = 0x68, -- equalorsimilar \eqsim - [0x02136] = 0x69, -- beth \beth - [0x02137] = 0x6A, -- gimel \gimel - [0x02138] = 0x6B, -- daleth \daleth - [0x022D6] = 0x6C, -- lessdot \lessdot - [0x022D7] = 0x6D, -- greaterdot \gtrdot - [0x022C9] = 0x6E, -- multicloseleft \ltimes - [0x022CA] = 0x6F, -- multicloseright \rtimes - -- [0x0] = 0x70, -- barshort \shortmid - -- [0x0] = 0x71, -- parallelshort \shortparallel - -- [0x02216] = 0x72, -- integerdivide \smallsetminus (2216 already part of tex-sy - -- [0x0] = 0x73, -- similar \thicksim - -- [0x0] = 0x74, -- approxequal \thickapprox - [0x0224A] = 0x75, -- approxorequal \approxeq - [0x02AB8] = 0x76, -- followsorequal \succapprox - [0x02AB7] = 0x77, -- precedesorequal \precapprox - [0x021B6] = 0x78, -- archleftdown \curvearrowleft - [0x021B7] = 0x79, -- archrightdown \curvearrowright - [0x003DC] = 0x7A, -- Digamma \digamma - [0x003F0] = 0x7B, -- kappa \varkappa - [0x1D55C] = 0x7C, -- k \Bbbk (blackboard k) - [0x0210F] = 0x7D, -- planckover2pi \hslash % 0x7D - [0x00127] = 0x7E, -- planckover2pi1 \hbar % 0x7E - [0x003F6] = 0x7F, -- epsiloninv \backepsilon -} - -mathencodings["tex-mc"] = { - -- this file has no tfm so it gets mapped in the private space - [0xFE324] = "mapsfromchar", -} - -mathencodings["tex-fraktur"] = { --- [0x1D504] = 0x41, -- A (fraktur A) --- [0x1D505] = 0x42, -- B - [0x0212D] = 0x43, -- C --- [0x1D507] = 0x44, -- D --- [0x1D508] = 0x45, -- E --- [0x1D509] = 0x46, -- F --- [0x1D50A] = 0x47, -- G - [0x0210C] = 0x48, -- H - [0x02111] = 0x49, -- I --- [0x1D50D] = 0x4A, -- J --- [0x1D50E] = 0x4B, -- K --- [0x1D50F] = 0x4C, -- L --- [0x1D510] = 0x4D, -- M --- [0x1D511] = 0x4E, -- N --- [0x1D512] = 0x4F, -- O --- [0x1D513] = 0x50, -- P --- [0x1D514] = 0x51, -- Q - [0x0211C] = 0x52, -- R --- [0x1D516] = 0x53, -- S --- [0x1D517] = 0x54, -- T --- [0x1D518] = 0x55, -- U --- [0x1D519] = 0x56, -- V --- [0x1D51A] = 0x57, -- W --- [0x1D51B] = 0x58, -- X --- [0x1D51C] = 0x59, -- Y - [0x02128] = 0x5A, -- Z (fraktur Z) --- [0x1D51E] = 0x61, -- a (fraktur a) --- [0x1D51F] = 0x62, -- b --- [0x1D520] = 0x63, -- c --- [0x1D521] = 0x64, -- d --- [0x1D522] = 0x65, -- e --- [0x1D523] = 0x66, -- f --- [0x1D524] = 0x67, -- g --- [0x1D525] = 0x68, -- h --- [0x1D526] = 0x69, -- i --- [0x1D527] = 0x6A, -- j --- [0x1D528] = 0x6B, -- k --- [0x1D529] = 0x6C, -- l --- [0x1D52A] = 0x6D, -- m --- [0x1D52B] = 0x6E, -- n --- [0x1D52C] = 0x6F, -- o --- [0x1D52D] = 0x70, -- p --- [0x1D52E] = 0x71, -- q --- [0x1D52F] = 0x72, -- r --- [0x1D530] = 0x73, -- s --- [0x1D531] = 0x74, -- t --- [0x1D532] = 0x75, -- u --- [0x1D533] = 0x76, -- v --- [0x1D534] = 0x77, -- w --- [0x1D535] = 0x78, -- x --- [0x1D536] = 0x79, -- y --- [0x1D537] = 0x7A, -- z -} - --- now that all other vectors are defined ... - -setletters(mathencodings, "tex-it", 0x1D434, 0x1D44E) -setletters(mathencodings, "tex-ss", 0x1D5A0, 0x1D5BA) -setletters(mathencodings, "tex-tt", 0x1D670, 0x1D68A) -setletters(mathencodings, "tex-bf", 0x1D400, 0x1D41A) -setletters(mathencodings, "tex-bi", 0x1D468, 0x1D482) -setletters(mathencodings, "tex-fraktur", 0x1D504, 0x1D51E) -setletters(mathencodings, "tex-fraktur-bold", 0x1D56C, 0x1D586) - -setdigits (mathencodings, "tex-ss", 0x1D7E2) -setdigits (mathencodings, "tex-tt", 0x1D7F6) -setdigits (mathencodings, "tex-bf", 0x1D7CE) - --- setdigits (mathencodings, "tex-bi", 0x1D7CE) - --- todo: add ss, tt, bf etc vectors --- todo: we can make ss tt etc an option +if not modules then modules = { } end modules ['math-ttv'] = { + version = 1.001, + comment = "traditional tex vectors, companion to math-vfu.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", + dataonly = true, +} + +local vfmath = fonts.handlers.vf.math +local setletters = vfmath.setletters +local setdigits = vfmath.setdigits + +local mathencodings = fonts.encodings.math + +-- varphi is part of the alphabet, contrary to the other var*s' + +mathencodings["large-to-small"] = { + [0x00028] = 0x00, -- ( + [0x00029] = 0x01, -- ) + [0x0005B] = 0x02, -- [ + [0x0005D] = 0x03, -- ] + [0x0230A] = 0x04, -- lfloor + [0x0230B] = 0x05, -- rfloor + [0x02308] = 0x06, -- lceil + [0x02309] = 0x07, -- rceil + [0x0007B] = 0x08, -- { + [0x0007D] = 0x09, -- } + [0x027E8] = 0x0A, -- < + [0x027E9] = 0x0B, -- > + [0x0007C] = 0x0C, -- | + -- [0x0] = 0x0D, -- lVert rVert Vert + -- [0x0002F] = 0x0E, -- / + [0x0005C] = 0x0F, -- \ + -- [0x0] = 0x3A, -- lgroup + -- [0x0] = 0x3B, -- rgroup + -- [0x0] = 0x3C, -- arrowvert + -- [0x0] = 0x3D, -- Arrowvert + [0x02195] = 0x3F, -- updownarrow + -- [0x0] = 0x40, -- lmoustache + -- [0x0] = 0x41, -- rmoustache + [0x0221A] = 0x70, -- sqrt + [0x021D5] = 0x77, -- Updownarrow + [0x02191] = 0x78, -- uparrow + [0x02193] = 0x79, -- downarrow + [0x021D1] = 0x7E, -- Uparrow + [0x021D3] = 0x7F, -- Downarrow + [0x0220F] = 0x59, -- prod + [0x02210] = 0x61, -- coprod + [0x02211] = 0x58, -- sum + [0x0222B] = 0x5A, -- intop + [0x0222E] = 0x49, -- ointop + -- [0xFE302] = 0x62, -- widehat + -- [0xFE303] = 0x65, -- widetilde + [0x00302] = 0x62, -- widehat + [0x00303] = 0x65, -- widetilde + [0x022C0] = 0x5E, -- bigwedge + [0x022C1] = 0x5F, -- bigvee + [0x022C2] = 0x5C, -- bigcap + [0x022C3] = 0x5B, -- bigcup + [0x02044] = 0x0E, -- / +} + +-- Beware: these are (in cm/lm) below the baseline due to limitations +-- in the tfm format bu the engien (combined with the mathclass) takes +-- care of it. If we need them in textmode, we should make them virtual +-- and move them up but we're in no hurry with that. + +mathencodings["tex-ex"] = { + [0x0220F] = 0x51, -- prod + [0x02210] = 0x60, -- coprod + [0x02211] = 0x50, -- sum + [0x0222B] = 0x52, -- intop + [0x0222E] = 0x48, -- ointop + [0x022C0] = 0x56, -- bigwedge + [0x022C1] = 0x57, -- bigvee + [0x022C2] = 0x54, -- bigcap + [0x022C3] = 0x53, -- bigcup + [0x02A00] = 0x4A, -- bigodot -- fixed BJ + [0x02A01] = 0x4C, -- bigoplus + [0x02A02] = 0x4E, -- bigotimes + -- [0x02A03] = , -- bigudot -- + [0x02A04] = 0x55, -- biguplus + [0x02A06] = 0x46, -- bigsqcup +} + +-- only math stuff is needed, since we always use an lm or gyre +-- font as main font + +mathencodings["tex-mr"] = { + [0x00393] = 0x00, -- Gamma + [0x00394] = 0x01, -- Delta + [0x00398] = 0x02, -- Theta + [0x0039B] = 0x03, -- Lambda + [0x0039E] = 0x04, -- Xi + [0x003A0] = 0x05, -- Pi + [0x003A3] = 0x06, -- Sigma + [0x003A5] = 0x07, -- Upsilon + [0x003A6] = 0x08, -- Phi + [0x003A8] = 0x09, -- Psi + [0x003A9] = 0x0A, -- Omega +-- [0x00060] = 0x12, -- [math]grave +-- [0x000B4] = 0x13, -- [math]acute +-- [0x002C7] = 0x14, -- [math]check +-- [0x002D8] = 0x15, -- [math]breve +-- [0x000AF] = 0x16, -- [math]bar +-- [0x00021] = 0x21, -- ! +-- [0x00028] = 0x28, -- ( +-- [0x00029] = 0x29, -- ) +-- [0x0002B] = 0x2B, -- + +-- [0x0002F] = 0x2F, -- / +-- [0x0003A] = 0x3A, -- : +-- [0x02236] = 0x3A, -- colon +-- [0x0003B] = 0x3B, -- ; +-- [0x0003C] = 0x3C, -- < +-- [0x0003D] = 0x3D, -- = +-- [0x0003E] = 0x3E, -- > +-- [0x0003F] = 0x3F, -- ? + [0x00391] = 0x41, -- Alpha + [0x00392] = 0x42, -- Beta + [0x02145] = 0x44, + [0x00395] = 0x45, -- Epsilon + [0x00397] = 0x48, -- Eta + [0x00399] = 0x49, -- Iota + [0x0039A] = 0x4B, -- Kappa + [0x0039C] = 0x4D, -- Mu + [0x0039D] = 0x4E, -- Nu + [0x0039F] = 0x4F, -- Omicron + [0x003A1] = 0x52, -- Rho + [0x003A4] = 0x54, -- Tau + [0x003A7] = 0x58, -- Chi + [0x00396] = 0x5A, -- Zeta +-- [0x0005B] = 0x5B, -- [ +-- [0x0005D] = 0x5D, -- ] +-- [0x0005E] = 0x5E, -- [math]hat -- the text one + [0x00302] = 0x5E, -- [math]hat -- the real math one +-- [0x002D9] = 0x5F, -- [math]dot + [0x02146] = 0x64, + [0x02147] = 0x65, +-- [0x002DC] = 0x7E, -- [math]tilde -- the text one + [0x00303] = 0x7E, -- [math]tilde -- the real one +-- [0x000A8] = 0x7F, -- [math]ddot +} + +mathencodings["tex-mr-missing"] = { + [0x02236] = 0x3A, -- colon +} + +mathencodings["tex-mi"] = { + [0x1D6E4] = 0x00, -- Gamma + [0x1D6E5] = 0x01, -- Delta + [0x1D6E9] = 0x02, -- Theta + [0x1D6F3] = 0x02, -- varTheta (not present in TeX) + [0x1D6EC] = 0x03, -- Lambda + [0x1D6EF] = 0x04, -- Xi + [0x1D6F1] = 0x05, -- Pi + [0x1D6F4] = 0x06, -- Sigma + [0x1D6F6] = 0x07, -- Upsilon + [0x1D6F7] = 0x08, -- Phi + [0x1D6F9] = 0x09, -- Psi + [0x1D6FA] = 0x0A, -- Omega + [0x1D6FC] = 0x0B, -- alpha + [0x1D6FD] = 0x0C, -- beta + [0x1D6FE] = 0x0D, -- gamma + [0x1D6FF] = 0x0E, -- delta + [0x1D716] = 0x0F, -- epsilon TODO: 1D716 + [0x1D701] = 0x10, -- zeta + [0x1D702] = 0x11, -- eta + [0x1D703] = 0x12, -- theta TODO: 1D703 + [0x1D704] = 0x13, -- iota + [0x1D705] = 0x14, -- kappa + [0x1D718] = 0x14, -- varkappa, not in tex fonts + [0x1D706] = 0x15, -- lambda + [0x1D707] = 0x16, -- mu + [0x1D708] = 0x17, -- nu + [0x1D709] = 0x18, -- xi + [0x1D70B] = 0x19, -- pi + [0x1D70C] = 0x1A, -- rho + [0x1D70E] = 0x1B, -- sigma + [0x1D70F] = 0x1C, -- tau + [0x1D710] = 0x1D, -- upsilon + [0x1D719] = 0x1E, -- phi + [0x1D712] = 0x1F, -- chi + [0x1D713] = 0x20, -- psi + [0x1D714] = 0x21, -- omega + [0x1D700] = 0x22, -- varepsilon (the other way around) + [0x1D717] = 0x23, -- vartheta + [0x1D71B] = 0x24, -- varpi + [0x1D71A] = 0x25, -- varrho + [0x1D70D] = 0x26, -- varsigma + [0x1D711] = 0x27, -- varphi (the other way around) + [0x021BC] = 0x28, -- leftharpoonup + [0x021BD] = 0x29, -- leftharpoondown + [0x021C0] = 0x2A, -- rightharpoonup + [0x021C1] = 0x2B, -- rightharpoondown + [0xFE322] = 0x2C, -- lhook (hook for combining arrows) + [0xFE323] = 0x2D, -- rhook (hook for combining arrows) + [0x025B7] = 0x2E, -- triangleright : cf lmmath / BJ + [0x025C1] = 0x2F, -- triangleleft : cf lmmath / BJ + [0x022B3] = 0x2E, -- triangleright : cf lmmath this a cramped triangles / BJ / see * + [0x022B2] = 0x2F, -- triangleleft : cf lmmath this a cramped triangles / BJ / see * +-- [0x00041] = 0x30, -- 0 +-- [0x00041] = 0x31, -- 1 +-- [0x00041] = 0x32, -- 2 +-- [0x00041] = 0x33, -- 3 +-- [0x00041] = 0x34, -- 4 +-- [0x00041] = 0x35, -- 5 +-- [0x00041] = 0x36, -- 6 +-- [0x00041] = 0x37, -- 7 +-- [0x00041] = 0x38, -- 8 +-- [0x00041] = 0x39, -- 9 +--~ [0x0002E] = 0x3A, -- . + [0x0002C] = 0x3B, -- , + [0x0003C] = 0x3C, -- < +-- [0x0002F] = 0x3D, -- /, slash, solidus + [0x02044] = 0x3D, -- / AM: Not sure + [0x0003E] = 0x3E, -- > + [0x022C6] = 0x3F, -- star + [0x02202] = 0x40, -- partial +-- + [0x0266D] = 0x5B, -- flat + [0x0266E] = 0x5C, -- natural + [0x0266F] = 0x5D, -- sharp + [0x02323] = 0x5E, -- smile + [0x02322] = 0x5F, -- frown + [0x02113] = 0x60, -- ell +-- + [0x1D6A4] = 0x7B, -- imath (TODO: also 0131) + [0x1D6A5] = 0x7C, -- jmath (TODO: also 0237) + [0x02118] = 0x7D, -- wp + [0x020D7] = 0x7E, -- vec (TODO: not sure) +-- 0x7F, -- (no idea what that could be) +} + +mathencodings["tex-it"] = { +-- [0x1D434] = 0x41, -- A + [0x1D6E2] = 0x41, -- Alpha +-- [0x1D435] = 0x42, -- B + [0x1D6E3] = 0x42, -- Beta +-- [0x1D436] = 0x43, -- C +-- [0x1D437] = 0x44, -- D +-- [0x1D438] = 0x45, -- E + [0x1D6E6] = 0x45, -- Epsilon +-- [0x1D439] = 0x46, -- F +-- [0x1D43A] = 0x47, -- G +-- [0x1D43B] = 0x48, -- H + [0x1D6E8] = 0x48, -- Eta +-- [0x1D43C] = 0x49, -- I + [0x1D6EA] = 0x49, -- Iota +-- [0x1D43D] = 0x4A, -- J +-- [0x1D43E] = 0x4B, -- K + [0x1D6EB] = 0x4B, -- Kappa +-- [0x1D43F] = 0x4C, -- L +-- [0x1D440] = 0x4D, -- M + [0x1D6ED] = 0x4D, -- Mu +-- [0x1D441] = 0x4E, -- N + [0x1D6EE] = 0x4E, -- Nu +-- [0x1D442] = 0x4F, -- O + [0x1D6F0] = 0x4F, -- Omicron +-- [0x1D443] = 0x50, -- P + [0x1D6F2] = 0x50, -- Rho +-- [0x1D444] = 0x51, -- Q +-- [0x1D445] = 0x52, -- R +-- [0x1D446] = 0x53, -- S +-- [0x1D447] = 0x54, -- T + [0x1D6F5] = 0x54, -- Tau +-- [0x1D448] = 0x55, -- U +-- [0x1D449] = 0x56, -- V +-- [0x1D44A] = 0x57, -- W +-- [0x1D44B] = 0x58, -- X + [0x1D6F8] = 0x58, -- Chi +-- [0x1D44C] = 0x59, -- Y +-- [0x1D44D] = 0x5A, -- Z +-- +-- [0x1D44E] = 0x61, -- a +-- [0x1D44F] = 0x62, -- b +-- [0x1D450] = 0x63, -- c +-- [0x1D451] = 0x64, -- d +-- [0x1D452] = 0x65, -- e +-- [0x1D453] = 0x66, -- f +-- [0x1D454] = 0x67, -- g +-- [0x1D455] = 0x68, -- h + [0x0210E] = 0x68, -- Planck constant (h) +-- [0x1D456] = 0x69, -- i +-- [0x1D457] = 0x6A, -- j +-- [0x1D458] = 0x6B, -- k +-- [0x1D459] = 0x6C, -- l +-- [0x1D45A] = 0x6D, -- m +-- [0x1D45B] = 0x6E, -- n +-- [0x1D45C] = 0x6F, -- o + [0x1D70A] = 0x6F, -- omicron +-- [0x1D45D] = 0x70, -- p +-- [0x1D45E] = 0x71, -- q +-- [0x1D45F] = 0x72, -- r +-- [0x1D460] = 0x73, -- s +-- [0x1D461] = 0x74, -- t +-- [0x1D462] = 0x75, -- u +-- [0x1D463] = 0x76, -- v +-- [0x1D464] = 0x77, -- w +-- [0x1D465] = 0x78, -- x +-- [0x1D466] = 0x79, -- y +-- [0x1D467] = 0x7A, -- z +} + +mathencodings["tex-ss"] = { } +mathencodings["tex-tt"] = { } +mathencodings["tex-bf"] = { } +mathencodings["tex-bi"] = { } +mathencodings["tex-fraktur"] = { } +mathencodings["tex-fraktur-bold"] = { } + +mathencodings["tex-sy"] = { + [0x0002D] = 0x00, -- - + [0x02212] = 0x00, -- - +-- [0x02201] = 0x00, -- complement +-- [0x02206] = 0x00, -- increment +-- [0x02204] = 0x00, -- not exists +-- [0x000B7] = 0x01, -- cdot + [0x022C5] = 0x01, -- cdot + [0x000D7] = 0x02, -- times + [0x0002A] = 0x03, -- * + [0x02217] = 0x03, -- * + [0x000F7] = 0x04, -- div + [0x022C4] = 0x05, -- diamond + [0x000B1] = 0x06, -- pm + [0x02213] = 0x07, -- mp + [0x02295] = 0x08, -- oplus + [0x02296] = 0x09, -- ominus + [0x02297] = 0x0A, -- otimes + [0x02298] = 0x0B, -- oslash + [0x02299] = 0x0C, -- odot + [0x025EF] = 0x0D, -- bigcirc, Orb (either 25EF or 25CB) -- todo + [0x02218] = 0x0E, -- circ + [0x02219] = 0x0F, -- bullet + [0x02022] = 0x0F, -- bullet + [0x0224D] = 0x10, -- asymp + [0x02261] = 0x11, -- equiv + [0x02286] = 0x12, -- subseteq + [0x02287] = 0x13, -- supseteq + [0x02264] = 0x14, -- leq + [0x02265] = 0x15, -- geq + [0x02AAF] = 0x16, -- preceq +-- [0x0227C] = 0x16, -- preceq, AM:No see 2AAF + [0x02AB0] = 0x17, -- succeq +-- [0x0227D] = 0x17, -- succeq, AM:No see 2AB0 + [0x0223C] = 0x18, -- sim + [0x02248] = 0x19, -- approx + [0x02282] = 0x1A, -- subset + [0x02283] = 0x1B, -- supset + [0x0226A] = 0x1C, -- ll + [0x0226B] = 0x1D, -- gg + [0x0227A] = 0x1E, -- prec + [0x0227B] = 0x1F, -- succ + [0x02190] = 0x20, -- leftarrow + [0x02192] = 0x21, -- rightarrow +--~ [0xFE190] = 0x20, -- leftarrow +--~ [0xFE192] = 0x21, -- rightarrow + [0x02191] = 0x22, -- uparrow + [0x02193] = 0x23, -- downarrow + [0x02194] = 0x24, -- leftrightarrow + [0x02197] = 0x25, -- nearrow + [0x02198] = 0x26, -- searrow + [0x02243] = 0x27, -- simeq + [0x021D0] = 0x28, -- Leftarrow + [0x021D2] = 0x29, -- Rightarrow + [0x021D1] = 0x2A, -- Uparrow + [0x021D3] = 0x2B, -- Downarrow + [0x021D4] = 0x2C, -- Leftrightarrow + [0x02196] = 0x2D, -- nwarrow + [0x02199] = 0x2E, -- swarrow + [0x0221D] = 0x2F, -- propto + [0x02032] = 0x30, -- prime + [0x0221E] = 0x31, -- infty + [0x02208] = 0x32, -- in + [0x0220B] = 0x33, -- ni + [0x025B3] = 0x34, -- triangle, bigtriangleup + [0x025BD] = 0x35, -- bigtriangledown + [0x00338] = 0x36, -- not +-- 0x37, -- (beginning of arrow) + [0x02200] = 0x38, -- forall + [0x02203] = 0x39, -- exists + [0x000AC] = 0x3A, -- neg, lnot + [0x02205] = 0x3B, -- empty set + [0x0211C] = 0x3C, -- Re + [0x02111] = 0x3D, -- Im + [0x022A4] = 0x3E, -- top + [0x022A5] = 0x3F, -- bot, perp + [0x02135] = 0x40, -- aleph + [0x1D49C] = 0x41, -- script A + [0x0212C] = 0x42, -- script B + [0x1D49E] = 0x43, -- script C + [0x1D49F] = 0x44, -- script D + [0x02130] = 0x45, -- script E + [0x02131] = 0x46, -- script F + [0x1D4A2] = 0x47, -- script G + [0x0210B] = 0x48, -- script H + [0x02110] = 0x49, -- script I + [0x1D4A5] = 0x4A, -- script J + [0x1D4A6] = 0x4B, -- script K + [0x02112] = 0x4C, -- script L + [0x02133] = 0x4D, -- script M + [0x1D4A9] = 0x4E, -- script N + [0x1D4AA] = 0x4F, -- script O + [0x1D4AB] = 0x50, -- script P + [0x1D4AC] = 0x51, -- script Q + [0x0211B] = 0x52, -- script R + [0x1D4AE] = 0x53, -- script S + [0x1D4AF] = 0x54, -- script T + [0x1D4B0] = 0x55, -- script U + [0x1D4B1] = 0x56, -- script V + [0x1D4B2] = 0x57, -- script W + [0x1D4B3] = 0x58, -- script X + [0x1D4B4] = 0x59, -- script Y + [0x1D4B5] = 0x5A, -- script Z + [0x0222A] = 0x5B, -- cup + [0x02229] = 0x5C, -- cap + [0x0228E] = 0x5D, -- uplus + [0x02227] = 0x5E, -- wedge, land + [0x02228] = 0x5F, -- vee, lor + [0x022A2] = 0x60, -- vdash + [0x022A3] = 0x61, -- dashv + [0x0230A] = 0x62, -- lfloor + [0x0230B] = 0x63, -- rfloor + [0x02308] = 0x64, -- lceil + [0x02309] = 0x65, -- rceil + [0x0007B] = 0x66, -- {, lbrace + [0x0007D] = 0x67, -- }, rbrace + [0x027E8] = 0x68, -- <, langle + [0x027E9] = 0x69, -- >, rangle + [0x0007C] = 0x6A, -- |, mid, lvert, rvert + [0x02225] = 0x6B, -- parallel + -- [0x0 ] = 0x00, -- Vert, lVert, rVert, arrowvert, Arrowvert + [0x02195] = 0x6C, -- updownarrow + [0x021D5] = 0x6D, -- Updownarrow + [0x0005C] = 0x6E, -- \, backslash, setminus + [0x02216] = 0x6E, -- setminus + [0x02240] = 0x6F, -- wr + [0x0221A] = 0x70, -- sqrt. AM: Check surd?? + [0x02A3F] = 0x71, -- amalg + [0x1D6FB] = 0x72, -- nabla +-- [0x0222B] = 0x73, -- smallint (TODO: what about intop?) + [0x02294] = 0x74, -- sqcup + [0x02293] = 0x75, -- sqcap + [0x02291] = 0x76, -- sqsubseteq + [0x02292] = 0x77, -- sqsupseteq + [0x000A7] = 0x78, -- S + [0x02020] = 0x79, -- dagger, dag + [0x02021] = 0x7A, -- ddagger, ddag + [0x000B6] = 0x7B, -- P + [0x02663] = 0x7C, -- clubsuit + [0x02662] = 0x7D, -- diamondsuit + [0x02661] = 0x7E, -- heartsuit + [0x02660] = 0x7F, -- spadesuit + [0xFE321] = 0x37, -- mapstochar + + [0xFE325] = 0x30, -- prime 0x02032 +} + +-- The names in masm10.enc can be trusted best and are shown in the first +-- column, while in the second column we show the tex/ams names. As usual +-- it costs hours to figure out such a table. + +mathencodings["tex-ma"] = { + [0x022A1] = 0x00, -- squaredot \boxdot + [0x0229E] = 0x01, -- squareplus \boxplus + [0x022A0] = 0x02, -- squaremultiply \boxtimes + [0x025A1] = 0x03, -- square \square \Box + [0x025A0] = 0x04, -- squaresolid \blacksquare + [0x025AA] = 0x05, -- squaresmallsolid \centerdot + [0x022C4] = 0x06, -- diamond \Diamond \lozenge + [0x02666] = 0x07, -- diamondsolid \blacklozenge + [0x021BB] = 0x08, -- clockwise \circlearrowright + [0x021BA] = 0x09, -- anticlockwise \circlearrowleft + [0x021CC] = 0x0A, -- harpoonleftright \rightleftharpoons + [0x021CB] = 0x0B, -- harpoonrightleft \leftrightharpoons + [0x0229F] = 0x0C, -- squareminus \boxminus + [0x022A9] = 0x0D, -- forces \Vdash + [0x022AA] = 0x0E, -- forcesbar \Vvdash + [0x022A8] = 0x0F, -- satisfies \vDash + [0x021A0] = 0x10, -- dblarrowheadright \twoheadrightarrow + [0x0219E] = 0x11, -- dblarrowheadleft \twoheadleftarrow + [0x021C7] = 0x12, -- dblarrowleft \leftleftarrows + [0x021C9] = 0x13, -- dblarrowright \rightrightarrows + [0x021C8] = 0x14, -- dblarrowup \upuparrows + [0x021CA] = 0x15, -- dblarrowdwn \downdownarrows + [0x021BE] = 0x16, -- harpoonupright \upharpoonright \restriction + [0x021C2] = 0x17, -- harpoondownright \downharpoonright + [0x021BF] = 0x18, -- harpoonupleft \upharpoonleft + [0x021C3] = 0x19, -- harpoondownleft \downharpoonleft + [0x021A3] = 0x1A, -- arrowtailright \rightarrowtail + [0x021A2] = 0x1B, -- arrowtailleft \leftarrowtail + [0x021C6] = 0x1C, -- arrowparrleftright \leftrightarrows +-- [0x021C5] = 0x00, -- \updownarrows (missing in lm) + [0x021C4] = 0x1D, -- arrowparrrightleft \rightleftarrows + [0x021B0] = 0x1E, -- shiftleft \Lsh + [0x021B1] = 0x1F, -- shiftright \Rsh + [0x021DD] = 0x20, -- squiggleright \leadsto \rightsquigarrow + [0x021AD] = 0x21, -- squiggleleftright \leftrightsquigarrow + [0x021AB] = 0x22, -- curlyleft \looparrowleft + [0x021AC] = 0x23, -- curlyright \looparrowright + [0x02257] = 0x24, -- circleequal \circeq + [0x0227F] = 0x25, -- followsorequal \succsim + [0x02273] = 0x26, -- greaterorsimilar \gtrsim + [0x02A86] = 0x27, -- greaterorapproxeql \gtrapprox + [0x022B8] = 0x28, -- multimap \multimap + [0x02234] = 0x29, -- therefore \therefore + [0x02235] = 0x2A, -- because \because + [0x02251] = 0x2B, -- equalsdots \Doteq \doteqdot + [0x0225C] = 0x2C, -- defines \triangleq + [0x0227E] = 0x2D, -- precedesorequal \precsim + [0x02272] = 0x2E, -- lessorsimilar \lesssim + [0x02A85] = 0x2F, -- lessorapproxeql \lessapprox + [0x02A95] = 0x30, -- equalorless \eqslantless + [0x02A96] = 0x31, -- equalorgreater \eqslantgtr + [0x022DE] = 0x32, -- equalorprecedes \curlyeqprec + [0x022DF] = 0x33, -- equalorfollows \curlyeqsucc + [0x0227C] = 0x34, -- precedesorcurly \preccurlyeq + [0x02266] = 0x35, -- lessdblequal \leqq + [0x02A7D] = 0x36, -- lessorequalslant \leqslant + [0x02276] = 0x37, -- lessorgreater \lessgtr + [0x02035] = 0x38, -- primereverse \backprime + -- [0x0] = 0x39, -- axisshort \dabar + [0x02253] = 0x3A, -- equaldotrightleft \risingdotseq + [0x02252] = 0x3B, -- equaldotleftright \fallingdotseq + [0x0227D] = 0x3C, -- followsorcurly \succcurlyeq + [0x02267] = 0x3D, -- greaterdblequal \geqq + [0x02A7E] = 0x3E, -- greaterorequalslant \geqslant + [0x02277] = 0x3F, -- greaterorless \gtrless + [0x0228F] = 0x40, -- squareimage \sqsubset + [0x02290] = 0x41, -- squareoriginal \sqsupset + -- wrong: see ** + -- [0x022B3] = 0x42, -- triangleright \rhd \vartriangleright + -- [0x022B2] = 0x43, -- triangleleft \lhd \vartriangleleft + -- cf lm + [0x022B5] = 0x44, -- trianglerightequal \unrhd \trianglerighteq + [0x022B4] = 0x45, -- triangleleftequal \unlhd \trianglelefteq + -- + [0x02605] = 0x46, -- star \bigstar + [0x0226C] = 0x47, -- between \between + [0x025BC] = 0x48, -- triangledownsld \blacktriangledown + [0x025B6] = 0x49, -- trianglerightsld \blacktriangleright + [0x025C0] = 0x4A, -- triangleleftsld \blacktriangleleft + -- [0x0] = 0x4B, -- arrowaxisright + -- [0x0] = 0x4C, -- arrowaxisleft + [0x025B2] = 0x4D, -- triangle \triangleup \vartriangle + [0x025B2] = 0x4E, -- trianglesolid \blacktriangle + [0x025BD] = 0x4F, -- triangleinv \triangledown + [0x02256] = 0x50, -- ringinequal \eqcirc + [0x022DA] = 0x51, -- lessequalgreater \lesseqgtr + [0x022DB] = 0x52, -- greaterlessequal \gtreqless + [0x02A8B] = 0x53, -- lessdbleqlgreater \lesseqqgtr + [0x02A8C] = 0x54, -- greaterdbleqlless \gtreqqless + [0x000A5] = 0x55, -- Yen \yen + [0x021DB] = 0x56, -- arrowtripleright \Rrightarrow + [0x021DA] = 0x57, -- arrowtripleleft \Lleftarrow + [0x02713] = 0x58, -- check \checkmark + [0x022BB] = 0x59, -- orunderscore \veebar + [0x022BC] = 0x5A, -- nand \barwedge + [0x02306] = 0x5B, -- perpcorrespond \doublebarwedge + [0x02220] = 0x5C, -- angle \angle + [0x02221] = 0x5D, -- measuredangle \measuredangle + [0x02222] = 0x5E, -- sphericalangle \sphericalangle + -- [0x0] = 0x5F, -- proportional \varpropto + -- [0x0] = 0x60, -- smile \smallsmile + -- [0x0] = 0x61, -- frown \smallfrown + [0x022D0] = 0x62, -- subsetdbl \Subset + [0x022D1] = 0x63, -- supersetdbl \Supset + [0x022D3] = 0x64, -- uniondbl \doublecup \Cup + [0x022D2] = 0x65, -- intersectiondbl \doublecap \Cap + [0x022CF] = 0x66, -- uprise \curlywedge + [0x022CE] = 0x67, -- downfall \curlyvee + [0x022CB] = 0x68, -- multiopenleft \leftthreetimes + [0x022CC] = 0x69, -- multiopenright \rightthreetimes + [0x02AC5] = 0x6A, -- subsetdblequal \subseteqq + [0x02AC6] = 0x6B, -- supersetdblequal \supseteqq + [0x0224F] = 0x6C, -- difference \bumpeq + [0x0224E] = 0x6D, -- geomequivalent \Bumpeq + [0x022D8] = 0x6E, -- muchless \lll \llless + [0x022D9] = 0x6F, -- muchgreater \ggg \gggtr + [0x0231C] = 0x70, -- rightanglenw \ulcorner + [0x0231D] = 0x71, -- rightanglene \urcorner + [0x024C7] = 0x72, -- circleR \circledR + [0x024C8] = 0x73, -- circleS \circledS + [0x022D4] = 0x74, -- fork \pitchfork + [0x02214] = 0x75, -- dotplus \dotplus + [0x0223D] = 0x76, -- revsimilar \backsim + [0x022CD] = 0x77, -- revasymptequal \backsimeq -- AM: Check this! I mapped it to simeq. + [0x0231E] = 0x78, -- rightanglesw \llcorner + [0x0231F] = 0x79, -- rightanglese \lrcorner + [0x02720] = 0x7A, -- maltesecross \maltese + [0x02201] = 0x7B, -- complement \complement + [0x022BA] = 0x7C, -- intercal \intercal + [0x0229A] = 0x7D, -- circlering \circledcirc + [0x0229B] = 0x7E, -- circleasterisk \circledast + [0x0229D] = 0x7F, -- circleminus \circleddash +} + +mathencodings["tex-mb"] = { + -- [0x0] = 0x00, -- lessornotequal \lvertneqq + -- [0x0] = 0x01, -- greaterornotequal \gvertneqq + [0x02270] = 0x02, -- notlessequal \nleq + [0x02271] = 0x03, -- notgreaterequal \ngeq + [0x0226E] = 0x04, -- notless \nless + [0x0226F] = 0x05, -- notgreater \ngtr + [0x02280] = 0x06, -- notprecedes \nprec + [0x02281] = 0x07, -- notfollows \nsucc + [0x02268] = 0x08, -- lessornotdbleql \lneqq + [0x02269] = 0x09, -- greaterornotdbleql \gneqq + -- [0x0] = 0x0A, -- notlessorslnteql \nleqslant + -- [0x0] = 0x0B, -- notgreaterorslnteql \ngeqslant + [0x02A87] = 0x0C, -- lessnotequal \lneq + [0x02A88] = 0x0D, -- greaternotequal \gneq + -- [0x0] = 0x0E, -- notprecedesoreql \npreceq + -- [0x0] = 0x0F, -- notfollowsoreql \nsucceq + [0x022E8] = 0x10, -- precedeornoteqvlnt \precnsim + [0x022E9] = 0x11, -- followornoteqvlnt \succnsim + [0x022E6] = 0x12, -- lessornotsimilar \lnsim + [0x022E7] = 0x13, -- greaterornotsimilar \gnsim + -- [0x0] = 0x14, -- notlessdblequal \nleqq + -- [0x0] = 0x15, -- notgreaterdblequal \ngeqq + [0x02AB5] = 0x16, -- precedenotslnteql \precneqq + [0x02AB6] = 0x17, -- follownotslnteql \succneqq + [0x02AB9] = 0x18, -- precedenotdbleqv \precnapprox + [0x02ABA] = 0x19, -- follownotdbleqv \succnapprox + [0x02A89] = 0x1A, -- lessnotdblequal \lnapprox + [0x02A8A] = 0x1B, -- greaternotdblequal \gnapprox + [0x02241] = 0x1C, -- notsimilar \nsim + [0x02247] = 0x1D, -- notapproxequal \ncong + -- [0x0] = 0x1E, -- upslope \diagup + -- [0x0] = 0x1F, -- downslope \diagdown + -- [0x0] = 0x20, -- notsubsetoreql \varsubsetneq + -- [0x0] = 0x21, -- notsupersetoreql \varsupsetneq + -- [0x0] = 0x22, -- notsubsetordbleql \nsubseteqq + -- [0x0] = 0x23, -- notsupersetordbleql \nsupseteqq + [0x02ACB] = 0x24, -- subsetornotdbleql \subsetneqq + [0x02ACC] = 0x25, -- supersetornotdbleql \supsetneqq + -- [0x0] = 0x26, -- subsetornoteql \varsubsetneqq + -- [0x0] = 0x27, -- supersetornoteql \varsupsetneqq + [0x0228A] = 0x28, -- subsetnoteql \subsetneq + [0x0228B] = 0x29, -- supersetnoteql \supsetneq + [0x02288] = 0x2A, -- notsubseteql \nsubseteq + [0x02289] = 0x2B, -- notsuperseteql \nsupseteq + [0x02226] = 0x2C, -- notparallel \nparallel + [0x02224] = 0x2D, -- notbar \nmid \ndivides + -- [0x0] = 0x2E, -- notshortbar \nshortmid + -- [0x0] = 0x2F, -- notshortparallel \nshortparallel + [0x022AC] = 0x30, -- notturnstile \nvdash + [0x022AE] = 0x31, -- notforces \nVdash + [0x022AD] = 0x32, -- notsatisfies \nvDash + [0x022AF] = 0x33, -- notforcesextra \nVDash + [0x022ED] = 0x34, -- nottriangeqlright \ntrianglerighteq + [0x022EC] = 0x35, -- nottriangeqlleft \ntrianglelefteq + [0x022EA] = 0x36, -- nottriangleleft \ntriangleleft + [0x022EB] = 0x37, -- nottriangleright \ntriangleright + [0x0219A] = 0x38, -- notarrowleft \nleftarrow + [0x0219B] = 0x39, -- notarrowright \nrightarrow + [0x021CD] = 0x3A, -- notdblarrowleft \nLeftarrow + [0x021CF] = 0x3B, -- notdblarrowright \nRightarrow + [0x021CE] = 0x3C, -- notdblarrowboth \nLeftrightarrow + [0x021AE] = 0x3D, -- notarrowboth \nleftrightarrow + [0x022C7] = 0x3E, -- dividemultiply \divideontimes + [0x02300] = 0x3F, -- diametersign \varnothing + [0x02204] = 0x40, -- notexistential \nexists + [0x1D538] = 0x41, -- A (blackboard A) + [0x1D539] = 0x42, -- B + [0x02102] = 0x43, -- C + [0x1D53B] = 0x44, -- D + [0x1D53C] = 0x45, -- E + [0x1D53D] = 0x46, -- F + [0x1D53E] = 0x47, -- G + [0x0210D] = 0x48, -- H + [0x1D540] = 0x49, -- I + [0x1D541] = 0x4A, -- J + [0x1D542] = 0x4B, -- K + [0x1D543] = 0x4C, -- L + [0x1D544] = 0x4D, -- M + [0x02115] = 0x4E, -- N + [0x1D546] = 0x4F, -- O + [0x02119] = 0x50, -- P + [0x0211A] = 0x51, -- Q + [0x0211D] = 0x52, -- R + [0x1D54A] = 0x53, -- S + [0x1D54B] = 0x54, -- T + [0x1D54C] = 0x55, -- U + [0x1D54D] = 0x56, -- V + [0x1D54E] = 0x57, -- W + [0x1D54F] = 0x58, -- X + [0x1D550] = 0x59, -- Y + [0x02124] = 0x5A, -- Z (blackboard Z) + [0x02132] = 0x60, -- finv \Finv + [0x02141] = 0x61, -- fmir \Game + -- [0x0] = 0x62, tildewide + -- [0x0] = 0x63, tildewider + -- [0x0] = 0x64, Finv + -- [0x0] = 0x65, Gmir + [0x02127] = 0x66, -- Omegainv \mho + [0x000F0] = 0x67, -- eth \eth + [0x02242] = 0x68, -- equalorsimilar \eqsim + [0x02136] = 0x69, -- beth \beth + [0x02137] = 0x6A, -- gimel \gimel + [0x02138] = 0x6B, -- daleth \daleth + [0x022D6] = 0x6C, -- lessdot \lessdot + [0x022D7] = 0x6D, -- greaterdot \gtrdot + [0x022C9] = 0x6E, -- multicloseleft \ltimes + [0x022CA] = 0x6F, -- multicloseright \rtimes + -- [0x0] = 0x70, -- barshort \shortmid + -- [0x0] = 0x71, -- parallelshort \shortparallel + -- [0x02216] = 0x72, -- integerdivide \smallsetminus (2216 already part of tex-sy + -- [0x0] = 0x73, -- similar \thicksim + -- [0x0] = 0x74, -- approxequal \thickapprox + [0x0224A] = 0x75, -- approxorequal \approxeq + [0x02AB8] = 0x76, -- followsorequal \succapprox + [0x02AB7] = 0x77, -- precedesorequal \precapprox + [0x021B6] = 0x78, -- archleftdown \curvearrowleft + [0x021B7] = 0x79, -- archrightdown \curvearrowright + [0x003DC] = 0x7A, -- Digamma \digamma + [0x003F0] = 0x7B, -- kappa \varkappa + [0x1D55C] = 0x7C, -- k \Bbbk (blackboard k) + [0x0210F] = 0x7D, -- planckover2pi \hslash % 0x7D + [0x00127] = 0x7E, -- planckover2pi1 \hbar % 0x7E + [0x003F6] = 0x7F, -- epsiloninv \backepsilon +} + +mathencodings["tex-mc"] = { + -- this file has no tfm so it gets mapped in the private space + [0xFE324] = "mapsfromchar", +} + +mathencodings["tex-fraktur"] = { +-- [0x1D504] = 0x41, -- A (fraktur A) +-- [0x1D505] = 0x42, -- B + [0x0212D] = 0x43, -- C +-- [0x1D507] = 0x44, -- D +-- [0x1D508] = 0x45, -- E +-- [0x1D509] = 0x46, -- F +-- [0x1D50A] = 0x47, -- G + [0x0210C] = 0x48, -- H + [0x02111] = 0x49, -- I +-- [0x1D50D] = 0x4A, -- J +-- [0x1D50E] = 0x4B, -- K +-- [0x1D50F] = 0x4C, -- L +-- [0x1D510] = 0x4D, -- M +-- [0x1D511] = 0x4E, -- N +-- [0x1D512] = 0x4F, -- O +-- [0x1D513] = 0x50, -- P +-- [0x1D514] = 0x51, -- Q + [0x0211C] = 0x52, -- R +-- [0x1D516] = 0x53, -- S +-- [0x1D517] = 0x54, -- T +-- [0x1D518] = 0x55, -- U +-- [0x1D519] = 0x56, -- V +-- [0x1D51A] = 0x57, -- W +-- [0x1D51B] = 0x58, -- X +-- [0x1D51C] = 0x59, -- Y + [0x02128] = 0x5A, -- Z (fraktur Z) +-- [0x1D51E] = 0x61, -- a (fraktur a) +-- [0x1D51F] = 0x62, -- b +-- [0x1D520] = 0x63, -- c +-- [0x1D521] = 0x64, -- d +-- [0x1D522] = 0x65, -- e +-- [0x1D523] = 0x66, -- f +-- [0x1D524] = 0x67, -- g +-- [0x1D525] = 0x68, -- h +-- [0x1D526] = 0x69, -- i +-- [0x1D527] = 0x6A, -- j +-- [0x1D528] = 0x6B, -- k +-- [0x1D529] = 0x6C, -- l +-- [0x1D52A] = 0x6D, -- m +-- [0x1D52B] = 0x6E, -- n +-- [0x1D52C] = 0x6F, -- o +-- [0x1D52D] = 0x70, -- p +-- [0x1D52E] = 0x71, -- q +-- [0x1D52F] = 0x72, -- r +-- [0x1D530] = 0x73, -- s +-- [0x1D531] = 0x74, -- t +-- [0x1D532] = 0x75, -- u +-- [0x1D533] = 0x76, -- v +-- [0x1D534] = 0x77, -- w +-- [0x1D535] = 0x78, -- x +-- [0x1D536] = 0x79, -- y +-- [0x1D537] = 0x7A, -- z +} + +-- now that all other vectors are defined ... + +setletters(mathencodings, "tex-it", 0x1D434, 0x1D44E) +setletters(mathencodings, "tex-ss", 0x1D5A0, 0x1D5BA) +setletters(mathencodings, "tex-tt", 0x1D670, 0x1D68A) +setletters(mathencodings, "tex-bf", 0x1D400, 0x1D41A) +setletters(mathencodings, "tex-bi", 0x1D468, 0x1D482) +setletters(mathencodings, "tex-fraktur", 0x1D504, 0x1D51E) +setletters(mathencodings, "tex-fraktur-bold", 0x1D56C, 0x1D586) + +setdigits (mathencodings, "tex-ss", 0x1D7E2) +setdigits (mathencodings, "tex-tt", 0x1D7F6) +setdigits (mathencodings, "tex-bf", 0x1D7CE) + +-- setdigits (mathencodings, "tex-bi", 0x1D7CE) + +-- todo: add ss, tt, bf etc vectors +-- todo: we can make ss tt etc an option diff --git a/tex/context/base/meta-fun.lua b/tex/context/base/meta-fun.lua index 78ee25baf..e12298e8b 100644 --- a/tex/context/base/meta-fun.lua +++ b/tex/context/base/meta-fun.lua @@ -1,57 +1,57 @@ -if not modules then modules = { } end modules ['meta-fun'] = { - version = 1.001, - comment = "companion to meta-fun.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - --- very experimental, actually a joke ... see metafun manual for usage - -local format, load, type = string.format, load, type - -local metapost = metapost - -metapost.metafun = metapost.metafun or { } -local metafun = metapost.metafun - -function metafun.topath(t,connector) - context("(") - if #t > 0 then - for i=1,#t do - if i > 1 then - context(connector or "..") - end - local ti = t[i] - if type(ti) == "string" then - context(ti) - else - context("(%s,%s)",ti.x or ti[1] or 0,ti.y or ti[2] or 0) - end - end - else - context("origin") - end - context(")") -end - -function metafun.interpolate(f,b,e,s,c) - local done = false - context("(") - for i=b,e,(e-b)/s do - local d = load(format("return function(x) return %s end",f)) - if d then - d = d() - if done then - context(c or "...") - else - done = true - end - context("(%s,%s)",i,d(i)) - end - end - if not done then - context("origin") - end - context(")") -end +if not modules then modules = { } end modules ['meta-fun'] = { + version = 1.001, + comment = "companion to meta-fun.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +-- very experimental, actually a joke ... see metafun manual for usage + +local format, load, type = string.format, load, type + +local metapost = metapost + +metapost.metafun = metapost.metafun or { } +local metafun = metapost.metafun + +function metafun.topath(t,connector) + context("(") + if #t > 0 then + for i=1,#t do + if i > 1 then + context(connector or "..") + end + local ti = t[i] + if type(ti) == "string" then + context(ti) + else + context("(%s,%s)",ti.x or ti[1] or 0,ti.y or ti[2] or 0) + end + end + else + context("origin") + end + context(")") +end + +function metafun.interpolate(f,b,e,s,c) + local done = false + context("(") + for i=b,e,(e-b)/s do + local d = load(format("return function(x) return %s end",f)) + if d then + d = d() + if done then + context(c or "...") + else + done = true + end + context("(%s,%s)",i,d(i)) + end + end + if not done then + context("origin") + end + context(")") +end diff --git a/tex/context/base/meta-ini.lua b/tex/context/base/meta-ini.lua index 713ba3d5d..460738930 100644 --- a/tex/context/base/meta-ini.lua +++ b/tex/context/base/meta-ini.lua @@ -1,165 +1,165 @@ -if not modules then modules = { } end modules ['meta-ini'] = { - version = 1.001, - comment = "companion to meta-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local tonumber = tonumber -local format = string.format -local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns -local P, Cs, R, S, C, Cc = lpeg.P, lpeg.Cs, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc - -local context = context - -metapost = metapost or { } - --- for the moment downward compatible - -local report_metapost = logs.reporter ("metapost") -local status_metapost = logs.messenger("metapost") - -local patterns = { "meta-imp-%s.mkiv", "meta-imp-%s.tex", "meta-%s.mkiv", "meta-%s.tex" } -- we are compatible - -local function action(name,foundname) - status_metapost("library %a is loaded",name) - context.startreadingfile() - context.input(foundname) - context.stopreadingfile() -end - -local function failure(name) - report_metapost("library %a is unknown or invalid",name) -end - -function commands.useMPlibrary(name) - commands.uselibrary { - name = name, - patterns = patterns, - action = action, - failure = failure, - onlyonce = true, - } -end - --- experimental - -local colorhash = attributes.list[attributes.private('color')] - -local textype = tex.type -local MPcolor = context.MPcolor - --- local validdimen = lpegpatterns.validdimen * P(-1) --- --- function commands.prepareMPvariable(v) -- slow but ok --- if v == "" then --- MPcolor("black") --- else --- local typ, var = match(v,"(.):(.*)") --- if not typ then --- -- parse --- if colorhash[v] then --- MPcolor(v) --- elseif tonumber(v) then --- context(v) --- elseif lpegmatch(validdimen,v) then --- return context("\\the\\dimexpr %s",v) --- else --- for s in gmatch(v,"\\([a-zA-Z]+)") do -- can have trailing space --- local t = textype(s) --- if t == "dimen" then --- return context("\\the\\dimexpr %s",v) --- elseif t == "count" then --- return context("\\the\\numexpr %s",v) --- end --- end --- context("\\number %s",v) -- 0.4 ... --- end --- elseif typ == "d" then -- to be documented --- -- dimension --- context("\\the\\dimexpr %s",var) --- elseif typ == "n" then -- to be documented --- -- number --- context("\\the\\numexpr %s",var) --- elseif typ == "s" then -- to be documented --- -- string --- context(var) --- elseif typ == "c" then -- to be documented --- -- color --- MPcolor(var) --- else --- context(var) --- end --- end --- end - --- we can actually get the dimen/count values here - -local dimenorname = - lpegpatterns.validdimen / function(s) - context("\\the\\dimexpr %s",s) - end - + (C(lpegpatterns.float) + Cc(1)) * lpegpatterns.space^0 * P("\\") * C(lpegpatterns.letter^1) / function(f,s) - local t = textype(s) - if t == "dimen" then - context("\\the\\dimexpr %s\\%s",f,s) - elseif t == "count" then - context("\\the\\numexpr \\%s * %s\\relax",s,f) -- \scratchcounter is not permitted - end - end - -local splitter = lpeg.splitat(":",true) - -function commands.prepareMPvariable(v) -- slow but ok - if v == "" then - MPcolor("black") - else - local typ, var = lpegmatch(splitter,v) - if not var then - -- parse - if colorhash[v] then - MPcolor(v) - elseif tonumber(v) then - context(v) - elseif not lpegmatch(dimenorname,v) then - context("\\number %s",v) -- 0.4 ... - end - elseif typ == "d" then -- to be documented - -- dimension - context("\\the\\dimexpr %s",var) - elseif typ == "n" then -- to be documented - -- number - context("\\the\\numexpr %s",var) - elseif typ == "s" then -- to be documented - -- string - context(var) - elseif typ == "c" then -- to be documented - -- color - MPcolor(var) - else - context(var) - end - end -end - --- function metapost.formatnumber(f,n) -- just lua format --- f = gsub(f,"@(%d)","%%.%1") --- f = gsub(f,"@","%%") --- f = format(f,tonumber(n) or 0) --- f = gsub(f,"e([%+%-%d]+)",function(s) --- return format("\\times10^{%s}",tonumber(s) or s) -- strips leading zeros --- end) --- context.mathematics(f) --- end - --- formatters["\\times10^{%N}"](s) -- strips leading zeros too - -local one = Cs((P("@")/"%%." * (R("09")^1) + P("@")/"%%" + 1)^0) -local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / function(s) return format("\\times10^{%s}",tonumber(s) or s) end) + 1)^1) - --- local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / formatters["\\times10^{%N}"]) + 1)^1) - -function metapost.formatnumber(fmt,n) -- just lua format - context.mathematics(lpegmatch(two,format(lpegmatch(one,fmt),n))) -end +if not modules then modules = { } end modules ['meta-ini'] = { + version = 1.001, + comment = "companion to meta-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local tonumber = tonumber +local format = string.format +local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns +local P, Cs, R, S, C, Cc = lpeg.P, lpeg.Cs, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc + +local context = context + +metapost = metapost or { } + +-- for the moment downward compatible + +local report_metapost = logs.reporter ("metapost") +local status_metapost = logs.messenger("metapost") + +local patterns = { "meta-imp-%s.mkiv", "meta-imp-%s.tex", "meta-%s.mkiv", "meta-%s.tex" } -- we are compatible + +local function action(name,foundname) + status_metapost("library %a is loaded",name) + context.startreadingfile() + context.input(foundname) + context.stopreadingfile() +end + +local function failure(name) + report_metapost("library %a is unknown or invalid",name) +end + +function commands.useMPlibrary(name) + commands.uselibrary { + name = name, + patterns = patterns, + action = action, + failure = failure, + onlyonce = true, + } +end + +-- experimental + +local colorhash = attributes.list[attributes.private('color')] + +local textype = tex.type +local MPcolor = context.MPcolor + +-- local validdimen = lpegpatterns.validdimen * P(-1) +-- +-- function commands.prepareMPvariable(v) -- slow but ok +-- if v == "" then +-- MPcolor("black") +-- else +-- local typ, var = match(v,"(.):(.*)") +-- if not typ then +-- -- parse +-- if colorhash[v] then +-- MPcolor(v) +-- elseif tonumber(v) then +-- context(v) +-- elseif lpegmatch(validdimen,v) then +-- return context("\\the\\dimexpr %s",v) +-- else +-- for s in gmatch(v,"\\([a-zA-Z]+)") do -- can have trailing space +-- local t = textype(s) +-- if t == "dimen" then +-- return context("\\the\\dimexpr %s",v) +-- elseif t == "count" then +-- return context("\\the\\numexpr %s",v) +-- end +-- end +-- context("\\number %s",v) -- 0.4 ... +-- end +-- elseif typ == "d" then -- to be documented +-- -- dimension +-- context("\\the\\dimexpr %s",var) +-- elseif typ == "n" then -- to be documented +-- -- number +-- context("\\the\\numexpr %s",var) +-- elseif typ == "s" then -- to be documented +-- -- string +-- context(var) +-- elseif typ == "c" then -- to be documented +-- -- color +-- MPcolor(var) +-- else +-- context(var) +-- end +-- end +-- end + +-- we can actually get the dimen/count values here + +local dimenorname = + lpegpatterns.validdimen / function(s) + context("\\the\\dimexpr %s",s) + end + + (C(lpegpatterns.float) + Cc(1)) * lpegpatterns.space^0 * P("\\") * C(lpegpatterns.letter^1) / function(f,s) + local t = textype(s) + if t == "dimen" then + context("\\the\\dimexpr %s\\%s",f,s) + elseif t == "count" then + context("\\the\\numexpr \\%s * %s\\relax",s,f) -- \scratchcounter is not permitted + end + end + +local splitter = lpeg.splitat(":",true) + +function commands.prepareMPvariable(v) -- slow but ok + if v == "" then + MPcolor("black") + else + local typ, var = lpegmatch(splitter,v) + if not var then + -- parse + if colorhash[v] then + MPcolor(v) + elseif tonumber(v) then + context(v) + elseif not lpegmatch(dimenorname,v) then + context("\\number %s",v) -- 0.4 ... + end + elseif typ == "d" then -- to be documented + -- dimension + context("\\the\\dimexpr %s",var) + elseif typ == "n" then -- to be documented + -- number + context("\\the\\numexpr %s",var) + elseif typ == "s" then -- to be documented + -- string + context(var) + elseif typ == "c" then -- to be documented + -- color + MPcolor(var) + else + context(var) + end + end +end + +-- function metapost.formatnumber(f,n) -- just lua format +-- f = gsub(f,"@(%d)","%%.%1") +-- f = gsub(f,"@","%%") +-- f = format(f,tonumber(n) or 0) +-- f = gsub(f,"e([%+%-%d]+)",function(s) +-- return format("\\times10^{%s}",tonumber(s) or s) -- strips leading zeros +-- end) +-- context.mathematics(f) +-- end + +-- formatters["\\times10^{%N}"](s) -- strips leading zeros too + +local one = Cs((P("@")/"%%." * (R("09")^1) + P("@")/"%%" + 1)^0) +local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / function(s) return format("\\times10^{%s}",tonumber(s) or s) end) + 1)^1) + +-- local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / formatters["\\times10^{%N}"]) + 1)^1) + +function metapost.formatnumber(fmt,n) -- just lua format + context.mathematics(lpegmatch(two,format(lpegmatch(one,fmt),n))) +end diff --git a/tex/context/base/meta-pdf.lua b/tex/context/base/meta-pdf.lua index 32e48902a..15211b560 100644 --- a/tex/context/base/meta-pdf.lua +++ b/tex/context/base/meta-pdf.lua @@ -1,567 +1,567 @@ -if not modules then modules = { } end modules ['meta-pdf'] = { - version = 1.001, - comment = "companion to meta-pdf.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- Finally we used an optimized version. The test code can be found in --- meta-pdh.lua but since we no longer want to overload functione we use --- more locals now. This module keeps changing as it is also a testbed. --- --- We can make it even more efficient if needed, but as we don't use this --- code often in \MKIV\ it makes no sense. - -local concat, unpack = table.concat, table.unpack -local gsub, find, byte, gmatch, match = string.gsub, string.find, string.byte, string.gmatch, string.match -local lpegmatch = lpeg.match -local round = math.round -local formatters, format = string.formatters, string.format - -local report_mptopdf = logs.reporter("graphics","mptopdf") - -local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context - -local pdfrgbcode = lpdf.rgbcode -local pdfcmykcode = lpdf.cmykcode -local pdfgraycode = lpdf.graycode -local pdfspotcode = lpdf.spotcode -local pdftransparencycode = lpdf.transparencycode -local pdffinishtransparencycode = lpdf.finishtransparencycode -local pdfliteral = nodes.pool.pdfliteral - -metapost.mptopdf = metapost.mptopdf or { } -local mptopdf = metapost.mptopdf - -mptopdf.nofconverted = 0 - -local f_translate = formatters["1 0 0 0 1 %f %f cm"] -- no %s due to 1e-035 issues -local f_concat = formatters["%f %f %f %f %f %f cm"] -- no %s due to 1e-035 issues - -local m_path, m_stack, m_texts, m_version, m_date, m_shortcuts = { }, { }, { }, 0, 0, false - -local m_stack_close, m_stack_path, m_stack_concat = false, { }, nil -local extra_path_code, ignore_path = nil, false -local specials = { } - -local function resetpath() - m_stack_close, m_stack_path, m_stack_concat = false, { }, nil -end - -local function resetall() - m_path, m_stack, m_texts, m_version, m_shortcuts = { }, { }, { }, 0, false - extra_path_code, ignore_path = nil, false - specials = { } - resetpath() -end - -resetall() - --- -- this does not work as expected (displacement of text) beware, needs another --- -- comment hack --- --- local function pdfcode(str) --- context(pdfliteral(str)) --- end - -local pdfcode = context.pdfliteral - -local function mpscode(str) - if ignore_path then - pdfcode("h W n") - if extra_path_code then - pdfcode(extra_path_code) - extra_path_code = nil - end - ignore_path = false - else - pdfcode(str) - end -end - --- auxiliary functions - -local function flushconcat() - if m_stack_concat then - mpscode(f_concatm(unpack(m_stack_concat))) - m_stack_concat = nil - end -end - -local function flushpath(cmd) - if #m_stack_path > 0 then - local path = { } - if m_stack_concat then - local sx, sy = m_stack_concat[1], m_stack_concat[4] - local rx, ry = m_stack_concat[2], m_stack_concat[3] - local tx, ty = m_stack_concat[5], m_stack_concat[6] - local d = (sx*sy) - (rx*ry) - for k=1,#m_stack_path do - local v = m_stack_path[k] - local px, py = v[1], v[2] ; v[1], v[2] = (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d -- mpconcat(v[1],v[2]) - if #v == 7 then - local px, py = v[3], v[4] ; v[3], v[4] = (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d -- mpconcat(v[3],v[4]) - local px, py = v[5], v[6] ; v[5], v[6] = (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d -- mpconcat(v[5],v[6]) - end - path[k] = concat(v," ") - end - else - for k=1,#m_stack_path do - path[k] = concat(m_stack_path[k]," ") - end - end - flushconcat() - pdfcode(concat(path," ")) - if m_stack_close then - mpscode("h " .. cmd) - else - mpscode(cmd) - end - end - resetpath() -end - --- mp interface - -local mps = { } - -function mps.creator(a, b, c) - m_version = tonumber(b) -end - -function mps.creationdate(a) - m_date = a -end - -function mps.newpath() - m_stack_path = { } -end - -function mps.boundingbox(llx, lly, urx, ury) - context.setMPboundingbox(llx,lly,urx,ury) -end - -function mps.moveto(x,y) - m_stack_path[#m_stack_path+1] = { x, y, "m" } -end - -function mps.curveto(ax, ay, bx, by, cx, cy) - m_stack_path[#m_stack_path+1] = { ax, ay, bx, by, cx, cy, "c" } -end - -function mps.lineto(x,y) - m_stack_path[#m_stack_path+1] = { x, y, "l" } -end - -function mps.rlineto(x,y) - local dx, dy = 0, 0 - local topofstack = #m_stack_path - if topofstack > 0 then - local msp = m_stack_path[topofstack] - dx = msp[1] - dy = msp[2] - end - m_stack_path[topofstack+1] = {dx,dy,"l"} -end - -function mps.translate(tx,ty) - mpscode(f_translate(tx,ty) -end - -function mps.scale(sx,sy) - m_stack_concat = {sx,0,0,sy,0,0} -end - -function mps.concat(sx, rx, ry, sy, tx, ty) - m_stack_concat = {sx,rx,ry,sy,tx,ty} -end - -function mps.setlinejoin(d) - mpscode(d .. " j") -end - -function mps.setlinecap(d) - mpscode(d .. " J") -end - -function mps.setmiterlimit(d) - mpscode(d .. " M") -end - -function mps.gsave() - mpscode("q") -end - -function mps.grestore() - mpscode("Q") -end - -function mps.setdash(...) -- can be made faster, operate on t = { ... } - local n = select("#",...) - mpscode("[" .. concat({...}," ",1,n-1) .. "] " .. select(n,...) .. " d") - -- mpscode("[" .. concat({select(1,n-1)}," ") .. "] " .. select(n,...) .. " d") -end - -function mps.resetdash() - mpscode("[ ] 0 d") -end - -function mps.setlinewidth(d) - mpscode(d .. " w") -end - -function mps.closepath() - m_stack_close = true -end - -function mps.fill() - flushpath('f') -end - -function mps.stroke() - flushpath('S') -end - -function mps.both() - flushpath('B') -end - -function mps.clip() - flushpath('W n') -end - -function mps.textext(font, scale, str) -- old parser - local dx, dy = 0, 0 - if #m_stack_path > 0 then - dx, dy = m_stack_path[1][1], m_stack_path[1][2] - end - flushconcat() - context.MPtextext(font,scale,str,dx,dy) - resetpath() -end - -local handlers = { } - -handlers[1] = function(s) - pdfcode(pdffinishtransparencycode()) - pdfcode(pdfcmykcode(mps.colormodel,s[3],s[4],s[5],s[6])) -end -handlers[2] = function(s) - pdfcode(pdffinishtransparencycode()) - pdfcode(pdfspotcode(mps.colormodel,s[3],s[4],s[5],s[6])) -end -handlers[3] = function(s) - pdfcode(pdfrgbcode(mps.colormodel,s[4],s[5],s[6])) - pdfcode(pdftransparencycode(s[2],s[3])) -end -handlers[4] = function(s) - pdfcode(pdfcmykcode(mps.colormodel,s[4],s[5],s[6],s[7])) - pdfcode(pdftransparencycode(s[2],s[3])) -end -handlers[5] = function(s) - pdfcode(pdfspotcode(mps.colormodel,s[4],s[5],s[6],s[7])) - pdfcode(pdftransparencycode(s[2],s[3])) -end - --- todo: color conversion - -local nofshades, tn = 0, tonumber - -local function linearshade(colorspace,domain,ca,cb,coordinates) - pdfcode(pdffinishtransparencycode()) - nofshades = nofshades + 1 - local name = formatters["MpsSh%s"](nofshades) - lpdf.linearshade(name,domain,ca,cb,1,colorspace,coordinates) - extra_path_code, ignore_path = formatters["/%s sh Q"](name), true - pdfcode("q /Pattern cs") -end - -local function circularshade(colorspace,domain,ca,cb,coordinates) - pdfcode(pdffinishtransparencycode()) - nofshades = nofshades + 1 - local name = formatters["MpsSh%s"](nofshades) - lpdf.circularshade(name,domain,ca,cb,1,colorspace,coordinates) - extra_path_code, ignore_path = formatters["/%s sh Q"](name), true - pdfcode("q /Pattern cs") -end - -handlers[30] = function(s) - linearshade("DeviceRGB", { tn(s[ 2]), tn(s[ 3]) }, - { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]) }, { tn(s[10]), tn(s[11]), tn(s[12]) }, - { tn(s[ 8]), tn(s[ 9]), tn(s[13]), tn(s[14]) } ) -end - -handlers[31] = function(s) - circularshade("DeviceRGB", { tn(s[ 2]), tn(s[ 3]) }, - { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]) }, { tn(s[11]), tn(s[12]), tn(s[13]) }, - { tn(s[ 8]), tn(s[ 9]), tn(s[10]), tn(s[14]), tn(s[15]), tn(s[16]) } ) -end - -handlers[32] = function(s) - linearshade("DeviceCMYK", { tn(s[ 2]), tn(s[ 3]) }, - { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]), tn(s[ 8]) }, { tn(s[11]), tn(s[12]), tn(s[13]), tn(s[14]) }, - { tn(s[ 9]), tn(s[10]), tn(s[15]), tn(s[16]) } ) -end - -handlers[33] = function(s) - circularshade("DeviceCMYK", { tn(s[ 2]), tn(s[ 3]) }, - { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]), tn(s[ 8]) }, { tn(s[12]), tn(s[13]), tn(s[14]), tn(s[15]) }, - { tn(s[ 9]), tn(s[10]), tn(s[11]), tn(s[16]), tn(s[17]), tn(s[18]) } ) -end - -handlers[34] = function(s) -- todo (after further cleanup) - linearshade("DeviceGray", { tn(s[ 2]), tn(s[ 3]) }, { 0 }, { 1 }, { tn(s[9]), tn(s[10]), tn(s[15]), tn(s[16]) } ) -end - -handlers[35] = function(s) -- todo (after further cleanup) - circularshade("DeviceGray", { tn(s[ 2]), tn(s[ 3]) }, { 0 }, { 1 }, { tn(s[9]), tn(s[10]), tn(s[15]), tn(s[16]) } ) -end - --- not supported in mkiv , use mplib instead - -handlers[10] = function() report_mptopdf("skipping special %s",10) end -handlers[20] = function() report_mptopdf("skipping special %s",20) end -handlers[50] = function() report_mptopdf("skipping special %s",50) end - ---end of not supported - -function mps.setrgbcolor(r,g,b) -- extra check - r, g = tonumber(r), tonumber(g) -- needed when we use lpeg - if r == 0.0123 and g < 0.1 then - g, b = round(g*10000), round(b*10000) - local s = specials[b] - local h = round(s[#s]) - local handler = handlers[h] - if handler then - handler(s) - else - report_mptopdf("unknown special handler %s (1)",h) - end - elseif r == 0.123 and g < 0.1 then - g, b = round(g*1000), round(b*1000) - local s = specials[b] - local h = round(s[#s]) - local handler = handlers[h] - if handler then - handler(s) - else - report_mptopdf("unknown special handler %s (2)",h) - end - else - pdfcode(pdffinishtransparencycode()) - pdfcode(pdfrgbcode(mps.colormodel,r,g,b)) - end -end - -function mps.setcmykcolor(c,m,y,k) - pdfcode(pdffinishtransparencycode()) - pdfcode(pdfcmykcode(mps.colormodel,c,m,y,k)) -end - -function mps.setgray(s) - pdfcode(pdffinishtransparencycode()) - pdfcode(pdfgraycode(mps.colormodel,s)) -end - -function mps.specials(version,signal,factor) -- 2.0 123 1000 -end - -function mps.special(...) -- 7 1 0.5 1 0 0 1 3 - local t = { ... } - local n = tonumber(t[#t-1]) - specials[n] = t -end - -function mps.begindata() -end - -function mps.enddata() -end - -function mps.showpage() -end - --- lpeg parser - --- The lpeg based parser is rather optimized for the kind of output --- that MetaPost produces. It's my first real lpeg code, which may --- show. Because the parser binds to functions, we define it last. - -local lpegP, lpegR, lpegS, lpegC, lpegCc, lpegCs = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs - -local digit = lpegR("09") -local eol = lpegS('\r\n')^1 -local sp = lpegP(' ')^1 -local space = lpegS(' \r\n')^1 -local number = lpegS('0123456789.-+')^1 -local nonspace = lpegP(1-lpegS(' \r\n'))^1 - -local spec = digit^2 * lpegP("::::") * digit^2 -local text = lpegCc("{") * ( - lpegP("\\") * ( (digit * digit * digit) / function(n) return "c" .. tonumber(n,8) end) + - lpegP(" ") / function(n) return "\\c32" end + -- never in new mp - lpegP(1) / function(n) return "\\c" .. byte(n) end - ) * lpegCc("}") -local package = lpegCs(spec + text^0) - -function mps.fshow(str,font,scale) -- lpeg parser - mps.textext(font,scale,lpegmatch(package,str)) -end - -local cnumber = lpegC(number) -local cstring = lpegC(nonspace) - -local specials = (lpegP("%%MetaPostSpecials:") * sp * (cstring * sp^0)^0 * eol) / mps.specials -local special = (lpegP("%%MetaPostSpecial:") * sp * (cstring * sp^0)^0 * eol) / mps.special -local boundingbox = (lpegP("%%BoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox -local highresboundingbox = (lpegP("%%HiResBoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox - -local setup = lpegP("%%BeginSetup") * (1 - lpegP("%%EndSetup") )^1 -local prolog = lpegP("%%BeginProlog") * (1 - lpegP("%%EndProlog"))^1 -local comment = lpegP('%')^1 * (1 - eol)^1 - -local curveto = ((cnumber * sp)^6 * lpegP("curveto") ) / mps.curveto -local lineto = ((cnumber * sp)^2 * lpegP("lineto") ) / mps.lineto -local rlineto = ((cnumber * sp)^2 * lpegP("rlineto") ) / mps.rlineto -local moveto = ((cnumber * sp)^2 * lpegP("moveto") ) / mps.moveto -local setrgbcolor = ((cnumber * sp)^3 * lpegP("setrgbcolor") ) / mps.setrgbcolor -local setcmykcolor = ((cnumber * sp)^4 * lpegP("setcmykcolor") ) / mps.setcmykcolor -local setgray = ((cnumber * sp)^1 * lpegP("setgray") ) / mps.setgray -local newpath = ( lpegP("newpath") ) / mps.newpath -local closepath = ( lpegP("closepath") ) / mps.closepath -local fill = ( lpegP("fill") ) / mps.fill -local stroke = ( lpegP("stroke") ) / mps.stroke -local clip = ( lpegP("clip") ) / mps.clip -local both = ( lpegP("gsave fill grestore")) / mps.both -local showpage = ( lpegP("showpage") ) -local setlinejoin = ((cnumber * sp)^1 * lpegP("setlinejoin") ) / mps.setlinejoin -local setlinecap = ((cnumber * sp)^1 * lpegP("setlinecap") ) / mps.setlinecap -local setmiterlimit = ((cnumber * sp)^1 * lpegP("setmiterlimit") ) / mps.setmiterlimit -local gsave = ( lpegP("gsave") ) / mps.gsave -local grestore = ( lpegP("grestore") ) / mps.grestore - -local setdash = (lpegP("[") * (cnumber * sp^0)^0 * lpegP("]") * sp * cnumber * sp * lpegP("setdash")) / mps.setdash -local concat = (lpegP("[") * (cnumber * sp^0)^6 * lpegP("]") * sp * lpegP("concat") ) / mps.concat -local scale = ( (cnumber * sp^0)^6 * sp * lpegP("concat") ) / mps.concat - -local fshow = (lpegP("(") * lpegC((1-lpegP(")"))^1) * lpegP(")") * space * cstring * space * cnumber * space * lpegP("fshow")) / mps.fshow -local fshow = (lpegP("(") * lpegCs( ( lpegP("\\(")/"\\050" + lpegP("\\)")/"\\051" + (1-lpegP(")")) )^1 ) - * lpegP(")") * space * cstring * space * cnumber * space * lpegP("fshow")) / mps.fshow - -local setlinewidth_x = (lpegP("0") * sp * cnumber * sp * lpegP("dtransform truncate idtransform setlinewidth pop")) / mps.setlinewidth -local setlinewidth_y = (cnumber * sp * lpegP("0 dtransform exch truncate exch idtransform pop setlinewidth") ) / mps.setlinewidth - -local c = ((cnumber * sp)^6 * lpegP("c") ) / mps.curveto -- ^6 very inefficient, ^1 ok too -local l = ((cnumber * sp)^2 * lpegP("l") ) / mps.lineto -local r = ((cnumber * sp)^2 * lpegP("r") ) / mps.rlineto -local m = ((cnumber * sp)^2 * lpegP("m") ) / mps.moveto -local vlw = ((cnumber * sp)^1 * lpegP("vlw")) / mps.setlinewidth -local hlw = ((cnumber * sp)^1 * lpegP("hlw")) / mps.setlinewidth - -local R = ((cnumber * sp)^3 * lpegP("R") ) / mps.setrgbcolor -local C = ((cnumber * sp)^4 * lpegP("C") ) / mps.setcmykcolor -local G = ((cnumber * sp)^1 * lpegP("G") ) / mps.setgray - -local lj = ((cnumber * sp)^1 * lpegP("lj") ) / mps.setlinejoin -local ml = ((cnumber * sp)^1 * lpegP("ml") ) / mps.setmiterlimit -local lc = ((cnumber * sp)^1 * lpegP("lc") ) / mps.setlinecap - -local n = lpegP("n") / mps.newpath -local p = lpegP("p") / mps.closepath -local S = lpegP("S") / mps.stroke -local F = lpegP("F") / mps.fill -local B = lpegP("B") / mps.both -local W = lpegP("W") / mps.clip -local P = lpegP("P") / mps.showpage - -local q = lpegP("q") / mps.gsave -local Q = lpegP("Q") / mps.grestore - -local sd = (lpegP("[") * (cnumber * sp^0)^0 * lpegP("]") * sp * cnumber * sp * lpegP("sd")) / mps.setdash -local rd = ( lpegP("rd")) / mps.resetdash - -local s = ( (cnumber * sp^0)^2 * lpegP("s") ) / mps.scale -local t = (lpegP("[") * (cnumber * sp^0)^6 * lpegP("]") * sp * lpegP("t") ) / mps.concat - --- experimental - -local preamble = ( - prolog + setup + - boundingbox + highresboundingbox + specials + special + - comment -) - -local procset = ( - lj + ml + lc + - c + l + m + n + p + r + - R + C + G + - S + F + B + W + - vlw + hlw + - Q + q + - sd + rd + - t + s + - fshow + - P -) - -local verbose = ( - curveto + lineto + moveto + newpath + closepath + rlineto + - setrgbcolor + setcmykcolor + setgray + - setlinejoin + setmiterlimit + setlinecap + - stroke + fill + clip + both + - setlinewidth_x + setlinewidth_y + - gsave + grestore + - concat + scale + - fshow + - setdash + -- no resetdash - showpage -) - --- order matters in terms of speed / we could check for procset first - -local captures_old = ( space + verbose + preamble )^0 -local captures_new = ( space + verbose + procset + preamble )^0 - -local function parse(m_data) - if find(m_data,"%%%%BeginResource: procset mpost") then - lpegmatch(captures_new,m_data) - else - lpegmatch(captures_old,m_data) - end -end - --- main converter - -local a_colorspace = attributes.private('colormodel') - -function mptopdf.convertmpstopdf(name) - resetall() - local ok, m_data, n = resolvers.loadbinfile(name, 'tex') -- we need a binary load ! - if ok then - mps.colormodel = tex.attribute[a_colorspace] - statistics.starttiming(mptopdf) - mptopdf.nofconverted = mptopdf.nofconverted + 1 - pdfcode(formatters["\\letterpercent\\space mptopdf begin: n=%s, file=%s"](mptopdf.nofconverted,file.basename(name))) - pdfcode("q 1 0 0 1 0 0 cm") - parse(m_data) - pdfcode(pdffinishtransparencycode()) - pdfcode("Q") - pdfcode("\\letterpercent\\space mptopdf end") - resetall() - statistics.stoptiming(mptopdf) - else - report_mptopdf("file %a not found",name) - end -end - --- status info - -statistics.register("mps conversion time",function() - local n = mptopdf.nofconverted - if n > 0 then - return format("%s seconds, %s conversions", statistics.elapsedtime(mptopdf),n) - else - return nil - end -end) +if not modules then modules = { } end modules ['meta-pdf'] = { + version = 1.001, + comment = "companion to meta-pdf.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Finally we used an optimized version. The test code can be found in +-- meta-pdh.lua but since we no longer want to overload functione we use +-- more locals now. This module keeps changing as it is also a testbed. +-- +-- We can make it even more efficient if needed, but as we don't use this +-- code often in \MKIV\ it makes no sense. + +local concat, unpack = table.concat, table.unpack +local gsub, find, byte, gmatch, match = string.gsub, string.find, string.byte, string.gmatch, string.match +local lpegmatch = lpeg.match +local round = math.round +local formatters, format = string.formatters, string.format + +local report_mptopdf = logs.reporter("graphics","mptopdf") + +local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context + +local pdfrgbcode = lpdf.rgbcode +local pdfcmykcode = lpdf.cmykcode +local pdfgraycode = lpdf.graycode +local pdfspotcode = lpdf.spotcode +local pdftransparencycode = lpdf.transparencycode +local pdffinishtransparencycode = lpdf.finishtransparencycode +local pdfliteral = nodes.pool.pdfliteral + +metapost.mptopdf = metapost.mptopdf or { } +local mptopdf = metapost.mptopdf + +mptopdf.nofconverted = 0 + +local f_translate = formatters["1 0 0 0 1 %f %f cm"] -- no %s due to 1e-035 issues +local f_concat = formatters["%f %f %f %f %f %f cm"] -- no %s due to 1e-035 issues + +local m_path, m_stack, m_texts, m_version, m_date, m_shortcuts = { }, { }, { }, 0, 0, false + +local m_stack_close, m_stack_path, m_stack_concat = false, { }, nil +local extra_path_code, ignore_path = nil, false +local specials = { } + +local function resetpath() + m_stack_close, m_stack_path, m_stack_concat = false, { }, nil +end + +local function resetall() + m_path, m_stack, m_texts, m_version, m_shortcuts = { }, { }, { }, 0, false + extra_path_code, ignore_path = nil, false + specials = { } + resetpath() +end + +resetall() + +-- -- this does not work as expected (displacement of text) beware, needs another +-- -- comment hack +-- +-- local function pdfcode(str) +-- context(pdfliteral(str)) +-- end + +local pdfcode = context.pdfliteral + +local function mpscode(str) + if ignore_path then + pdfcode("h W n") + if extra_path_code then + pdfcode(extra_path_code) + extra_path_code = nil + end + ignore_path = false + else + pdfcode(str) + end +end + +-- auxiliary functions + +local function flushconcat() + if m_stack_concat then + mpscode(f_concatm(unpack(m_stack_concat))) + m_stack_concat = nil + end +end + +local function flushpath(cmd) + if #m_stack_path > 0 then + local path = { } + if m_stack_concat then + local sx, sy = m_stack_concat[1], m_stack_concat[4] + local rx, ry = m_stack_concat[2], m_stack_concat[3] + local tx, ty = m_stack_concat[5], m_stack_concat[6] + local d = (sx*sy) - (rx*ry) + for k=1,#m_stack_path do + local v = m_stack_path[k] + local px, py = v[1], v[2] ; v[1], v[2] = (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d -- mpconcat(v[1],v[2]) + if #v == 7 then + local px, py = v[3], v[4] ; v[3], v[4] = (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d -- mpconcat(v[3],v[4]) + local px, py = v[5], v[6] ; v[5], v[6] = (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d -- mpconcat(v[5],v[6]) + end + path[k] = concat(v," ") + end + else + for k=1,#m_stack_path do + path[k] = concat(m_stack_path[k]," ") + end + end + flushconcat() + pdfcode(concat(path," ")) + if m_stack_close then + mpscode("h " .. cmd) + else + mpscode(cmd) + end + end + resetpath() +end + +-- mp interface + +local mps = { } + +function mps.creator(a, b, c) + m_version = tonumber(b) +end + +function mps.creationdate(a) + m_date = a +end + +function mps.newpath() + m_stack_path = { } +end + +function mps.boundingbox(llx, lly, urx, ury) + context.setMPboundingbox(llx,lly,urx,ury) +end + +function mps.moveto(x,y) + m_stack_path[#m_stack_path+1] = { x, y, "m" } +end + +function mps.curveto(ax, ay, bx, by, cx, cy) + m_stack_path[#m_stack_path+1] = { ax, ay, bx, by, cx, cy, "c" } +end + +function mps.lineto(x,y) + m_stack_path[#m_stack_path+1] = { x, y, "l" } +end + +function mps.rlineto(x,y) + local dx, dy = 0, 0 + local topofstack = #m_stack_path + if topofstack > 0 then + local msp = m_stack_path[topofstack] + dx = msp[1] + dy = msp[2] + end + m_stack_path[topofstack+1] = {dx,dy,"l"} +end + +function mps.translate(tx,ty) + mpscode(f_translate(tx,ty) +end + +function mps.scale(sx,sy) + m_stack_concat = {sx,0,0,sy,0,0} +end + +function mps.concat(sx, rx, ry, sy, tx, ty) + m_stack_concat = {sx,rx,ry,sy,tx,ty} +end + +function mps.setlinejoin(d) + mpscode(d .. " j") +end + +function mps.setlinecap(d) + mpscode(d .. " J") +end + +function mps.setmiterlimit(d) + mpscode(d .. " M") +end + +function mps.gsave() + mpscode("q") +end + +function mps.grestore() + mpscode("Q") +end + +function mps.setdash(...) -- can be made faster, operate on t = { ... } + local n = select("#",...) + mpscode("[" .. concat({...}," ",1,n-1) .. "] " .. select(n,...) .. " d") + -- mpscode("[" .. concat({select(1,n-1)}," ") .. "] " .. select(n,...) .. " d") +end + +function mps.resetdash() + mpscode("[ ] 0 d") +end + +function mps.setlinewidth(d) + mpscode(d .. " w") +end + +function mps.closepath() + m_stack_close = true +end + +function mps.fill() + flushpath('f') +end + +function mps.stroke() + flushpath('S') +end + +function mps.both() + flushpath('B') +end + +function mps.clip() + flushpath('W n') +end + +function mps.textext(font, scale, str) -- old parser + local dx, dy = 0, 0 + if #m_stack_path > 0 then + dx, dy = m_stack_path[1][1], m_stack_path[1][2] + end + flushconcat() + context.MPtextext(font,scale,str,dx,dy) + resetpath() +end + +local handlers = { } + +handlers[1] = function(s) + pdfcode(pdffinishtransparencycode()) + pdfcode(pdfcmykcode(mps.colormodel,s[3],s[4],s[5],s[6])) +end +handlers[2] = function(s) + pdfcode(pdffinishtransparencycode()) + pdfcode(pdfspotcode(mps.colormodel,s[3],s[4],s[5],s[6])) +end +handlers[3] = function(s) + pdfcode(pdfrgbcode(mps.colormodel,s[4],s[5],s[6])) + pdfcode(pdftransparencycode(s[2],s[3])) +end +handlers[4] = function(s) + pdfcode(pdfcmykcode(mps.colormodel,s[4],s[5],s[6],s[7])) + pdfcode(pdftransparencycode(s[2],s[3])) +end +handlers[5] = function(s) + pdfcode(pdfspotcode(mps.colormodel,s[4],s[5],s[6],s[7])) + pdfcode(pdftransparencycode(s[2],s[3])) +end + +-- todo: color conversion + +local nofshades, tn = 0, tonumber + +local function linearshade(colorspace,domain,ca,cb,coordinates) + pdfcode(pdffinishtransparencycode()) + nofshades = nofshades + 1 + local name = formatters["MpsSh%s"](nofshades) + lpdf.linearshade(name,domain,ca,cb,1,colorspace,coordinates) + extra_path_code, ignore_path = formatters["/%s sh Q"](name), true + pdfcode("q /Pattern cs") +end + +local function circularshade(colorspace,domain,ca,cb,coordinates) + pdfcode(pdffinishtransparencycode()) + nofshades = nofshades + 1 + local name = formatters["MpsSh%s"](nofshades) + lpdf.circularshade(name,domain,ca,cb,1,colorspace,coordinates) + extra_path_code, ignore_path = formatters["/%s sh Q"](name), true + pdfcode("q /Pattern cs") +end + +handlers[30] = function(s) + linearshade("DeviceRGB", { tn(s[ 2]), tn(s[ 3]) }, + { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]) }, { tn(s[10]), tn(s[11]), tn(s[12]) }, + { tn(s[ 8]), tn(s[ 9]), tn(s[13]), tn(s[14]) } ) +end + +handlers[31] = function(s) + circularshade("DeviceRGB", { tn(s[ 2]), tn(s[ 3]) }, + { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]) }, { tn(s[11]), tn(s[12]), tn(s[13]) }, + { tn(s[ 8]), tn(s[ 9]), tn(s[10]), tn(s[14]), tn(s[15]), tn(s[16]) } ) +end + +handlers[32] = function(s) + linearshade("DeviceCMYK", { tn(s[ 2]), tn(s[ 3]) }, + { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]), tn(s[ 8]) }, { tn(s[11]), tn(s[12]), tn(s[13]), tn(s[14]) }, + { tn(s[ 9]), tn(s[10]), tn(s[15]), tn(s[16]) } ) +end + +handlers[33] = function(s) + circularshade("DeviceCMYK", { tn(s[ 2]), tn(s[ 3]) }, + { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]), tn(s[ 8]) }, { tn(s[12]), tn(s[13]), tn(s[14]), tn(s[15]) }, + { tn(s[ 9]), tn(s[10]), tn(s[11]), tn(s[16]), tn(s[17]), tn(s[18]) } ) +end + +handlers[34] = function(s) -- todo (after further cleanup) + linearshade("DeviceGray", { tn(s[ 2]), tn(s[ 3]) }, { 0 }, { 1 }, { tn(s[9]), tn(s[10]), tn(s[15]), tn(s[16]) } ) +end + +handlers[35] = function(s) -- todo (after further cleanup) + circularshade("DeviceGray", { tn(s[ 2]), tn(s[ 3]) }, { 0 }, { 1 }, { tn(s[9]), tn(s[10]), tn(s[15]), tn(s[16]) } ) +end + +-- not supported in mkiv , use mplib instead + +handlers[10] = function() report_mptopdf("skipping special %s",10) end +handlers[20] = function() report_mptopdf("skipping special %s",20) end +handlers[50] = function() report_mptopdf("skipping special %s",50) end + +--end of not supported + +function mps.setrgbcolor(r,g,b) -- extra check + r, g = tonumber(r), tonumber(g) -- needed when we use lpeg + if r == 0.0123 and g < 0.1 then + g, b = round(g*10000), round(b*10000) + local s = specials[b] + local h = round(s[#s]) + local handler = handlers[h] + if handler then + handler(s) + else + report_mptopdf("unknown special handler %s (1)",h) + end + elseif r == 0.123 and g < 0.1 then + g, b = round(g*1000), round(b*1000) + local s = specials[b] + local h = round(s[#s]) + local handler = handlers[h] + if handler then + handler(s) + else + report_mptopdf("unknown special handler %s (2)",h) + end + else + pdfcode(pdffinishtransparencycode()) + pdfcode(pdfrgbcode(mps.colormodel,r,g,b)) + end +end + +function mps.setcmykcolor(c,m,y,k) + pdfcode(pdffinishtransparencycode()) + pdfcode(pdfcmykcode(mps.colormodel,c,m,y,k)) +end + +function mps.setgray(s) + pdfcode(pdffinishtransparencycode()) + pdfcode(pdfgraycode(mps.colormodel,s)) +end + +function mps.specials(version,signal,factor) -- 2.0 123 1000 +end + +function mps.special(...) -- 7 1 0.5 1 0 0 1 3 + local t = { ... } + local n = tonumber(t[#t-1]) + specials[n] = t +end + +function mps.begindata() +end + +function mps.enddata() +end + +function mps.showpage() +end + +-- lpeg parser + +-- The lpeg based parser is rather optimized for the kind of output +-- that MetaPost produces. It's my first real lpeg code, which may +-- show. Because the parser binds to functions, we define it last. + +local lpegP, lpegR, lpegS, lpegC, lpegCc, lpegCs = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs + +local digit = lpegR("09") +local eol = lpegS('\r\n')^1 +local sp = lpegP(' ')^1 +local space = lpegS(' \r\n')^1 +local number = lpegS('0123456789.-+')^1 +local nonspace = lpegP(1-lpegS(' \r\n'))^1 + +local spec = digit^2 * lpegP("::::") * digit^2 +local text = lpegCc("{") * ( + lpegP("\\") * ( (digit * digit * digit) / function(n) return "c" .. tonumber(n,8) end) + + lpegP(" ") / function(n) return "\\c32" end + -- never in new mp + lpegP(1) / function(n) return "\\c" .. byte(n) end + ) * lpegCc("}") +local package = lpegCs(spec + text^0) + +function mps.fshow(str,font,scale) -- lpeg parser + mps.textext(font,scale,lpegmatch(package,str)) +end + +local cnumber = lpegC(number) +local cstring = lpegC(nonspace) + +local specials = (lpegP("%%MetaPostSpecials:") * sp * (cstring * sp^0)^0 * eol) / mps.specials +local special = (lpegP("%%MetaPostSpecial:") * sp * (cstring * sp^0)^0 * eol) / mps.special +local boundingbox = (lpegP("%%BoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox +local highresboundingbox = (lpegP("%%HiResBoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox + +local setup = lpegP("%%BeginSetup") * (1 - lpegP("%%EndSetup") )^1 +local prolog = lpegP("%%BeginProlog") * (1 - lpegP("%%EndProlog"))^1 +local comment = lpegP('%')^1 * (1 - eol)^1 + +local curveto = ((cnumber * sp)^6 * lpegP("curveto") ) / mps.curveto +local lineto = ((cnumber * sp)^2 * lpegP("lineto") ) / mps.lineto +local rlineto = ((cnumber * sp)^2 * lpegP("rlineto") ) / mps.rlineto +local moveto = ((cnumber * sp)^2 * lpegP("moveto") ) / mps.moveto +local setrgbcolor = ((cnumber * sp)^3 * lpegP("setrgbcolor") ) / mps.setrgbcolor +local setcmykcolor = ((cnumber * sp)^4 * lpegP("setcmykcolor") ) / mps.setcmykcolor +local setgray = ((cnumber * sp)^1 * lpegP("setgray") ) / mps.setgray +local newpath = ( lpegP("newpath") ) / mps.newpath +local closepath = ( lpegP("closepath") ) / mps.closepath +local fill = ( lpegP("fill") ) / mps.fill +local stroke = ( lpegP("stroke") ) / mps.stroke +local clip = ( lpegP("clip") ) / mps.clip +local both = ( lpegP("gsave fill grestore")) / mps.both +local showpage = ( lpegP("showpage") ) +local setlinejoin = ((cnumber * sp)^1 * lpegP("setlinejoin") ) / mps.setlinejoin +local setlinecap = ((cnumber * sp)^1 * lpegP("setlinecap") ) / mps.setlinecap +local setmiterlimit = ((cnumber * sp)^1 * lpegP("setmiterlimit") ) / mps.setmiterlimit +local gsave = ( lpegP("gsave") ) / mps.gsave +local grestore = ( lpegP("grestore") ) / mps.grestore + +local setdash = (lpegP("[") * (cnumber * sp^0)^0 * lpegP("]") * sp * cnumber * sp * lpegP("setdash")) / mps.setdash +local concat = (lpegP("[") * (cnumber * sp^0)^6 * lpegP("]") * sp * lpegP("concat") ) / mps.concat +local scale = ( (cnumber * sp^0)^6 * sp * lpegP("concat") ) / mps.concat + +local fshow = (lpegP("(") * lpegC((1-lpegP(")"))^1) * lpegP(")") * space * cstring * space * cnumber * space * lpegP("fshow")) / mps.fshow +local fshow = (lpegP("(") * lpegCs( ( lpegP("\\(")/"\\050" + lpegP("\\)")/"\\051" + (1-lpegP(")")) )^1 ) + * lpegP(")") * space * cstring * space * cnumber * space * lpegP("fshow")) / mps.fshow + +local setlinewidth_x = (lpegP("0") * sp * cnumber * sp * lpegP("dtransform truncate idtransform setlinewidth pop")) / mps.setlinewidth +local setlinewidth_y = (cnumber * sp * lpegP("0 dtransform exch truncate exch idtransform pop setlinewidth") ) / mps.setlinewidth + +local c = ((cnumber * sp)^6 * lpegP("c") ) / mps.curveto -- ^6 very inefficient, ^1 ok too +local l = ((cnumber * sp)^2 * lpegP("l") ) / mps.lineto +local r = ((cnumber * sp)^2 * lpegP("r") ) / mps.rlineto +local m = ((cnumber * sp)^2 * lpegP("m") ) / mps.moveto +local vlw = ((cnumber * sp)^1 * lpegP("vlw")) / mps.setlinewidth +local hlw = ((cnumber * sp)^1 * lpegP("hlw")) / mps.setlinewidth + +local R = ((cnumber * sp)^3 * lpegP("R") ) / mps.setrgbcolor +local C = ((cnumber * sp)^4 * lpegP("C") ) / mps.setcmykcolor +local G = ((cnumber * sp)^1 * lpegP("G") ) / mps.setgray + +local lj = ((cnumber * sp)^1 * lpegP("lj") ) / mps.setlinejoin +local ml = ((cnumber * sp)^1 * lpegP("ml") ) / mps.setmiterlimit +local lc = ((cnumber * sp)^1 * lpegP("lc") ) / mps.setlinecap + +local n = lpegP("n") / mps.newpath +local p = lpegP("p") / mps.closepath +local S = lpegP("S") / mps.stroke +local F = lpegP("F") / mps.fill +local B = lpegP("B") / mps.both +local W = lpegP("W") / mps.clip +local P = lpegP("P") / mps.showpage + +local q = lpegP("q") / mps.gsave +local Q = lpegP("Q") / mps.grestore + +local sd = (lpegP("[") * (cnumber * sp^0)^0 * lpegP("]") * sp * cnumber * sp * lpegP("sd")) / mps.setdash +local rd = ( lpegP("rd")) / mps.resetdash + +local s = ( (cnumber * sp^0)^2 * lpegP("s") ) / mps.scale +local t = (lpegP("[") * (cnumber * sp^0)^6 * lpegP("]") * sp * lpegP("t") ) / mps.concat + +-- experimental + +local preamble = ( + prolog + setup + + boundingbox + highresboundingbox + specials + special + + comment +) + +local procset = ( + lj + ml + lc + + c + l + m + n + p + r + + R + C + G + + S + F + B + W + + vlw + hlw + + Q + q + + sd + rd + + t + s + + fshow + + P +) + +local verbose = ( + curveto + lineto + moveto + newpath + closepath + rlineto + + setrgbcolor + setcmykcolor + setgray + + setlinejoin + setmiterlimit + setlinecap + + stroke + fill + clip + both + + setlinewidth_x + setlinewidth_y + + gsave + grestore + + concat + scale + + fshow + + setdash + -- no resetdash + showpage +) + +-- order matters in terms of speed / we could check for procset first + +local captures_old = ( space + verbose + preamble )^0 +local captures_new = ( space + verbose + procset + preamble )^0 + +local function parse(m_data) + if find(m_data,"%%%%BeginResource: procset mpost") then + lpegmatch(captures_new,m_data) + else + lpegmatch(captures_old,m_data) + end +end + +-- main converter + +local a_colorspace = attributes.private('colormodel') + +function mptopdf.convertmpstopdf(name) + resetall() + local ok, m_data, n = resolvers.loadbinfile(name, 'tex') -- we need a binary load ! + if ok then + mps.colormodel = tex.attribute[a_colorspace] + statistics.starttiming(mptopdf) + mptopdf.nofconverted = mptopdf.nofconverted + 1 + pdfcode(formatters["\\letterpercent\\space mptopdf begin: n=%s, file=%s"](mptopdf.nofconverted,file.basename(name))) + pdfcode("q 1 0 0 1 0 0 cm") + parse(m_data) + pdfcode(pdffinishtransparencycode()) + pdfcode("Q") + pdfcode("\\letterpercent\\space mptopdf end") + resetall() + statistics.stoptiming(mptopdf) + else + report_mptopdf("file %a not found",name) + end +end + +-- status info + +statistics.register("mps conversion time",function() + local n = mptopdf.nofconverted + if n > 0 then + return format("%s seconds, %s conversions", statistics.elapsedtime(mptopdf),n) + else + return nil + end +end) diff --git a/tex/context/base/meta-pdh.lua b/tex/context/base/meta-pdh.lua index 5040715c4..10fbad141 100644 --- a/tex/context/base/meta-pdh.lua +++ b/tex/context/base/meta-pdh.lua @@ -1,610 +1,610 @@ -if not modules then modules = { } end modules ['meta-pdf'] = { - version = 1.001, - comment = "companion to meta-pdf.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if true then - return -- or os.exit() -end - --- This file contains the history of the converter. We keep it around as it --- relates to the development of luatex. - --- This is the third version. Version 1 converted to Lua code, --- version 2 gsubbed the file into TeX code, and version 3 uses --- the new lpeg functionality and streams the result into TeX. - --- We will move old stuff to edu. - ---~ old lpeg 0.4 lpeg 0.5 ---~ 100 times test graphic 2.45 (T:1.07) 0.72 (T:0.24) 0.580 (0.560 no table) -- 0.54 optimized for one space (T:0.19) ---~ 100 times big graphic 10.44 4.30/3.35 nogb 2.914 (2.050 no table) -- 1.99 optimized for one space (T:0.85) ---~ 500 times test graphic T:1.29 T:1.16 (T:1.10 no table) -- T:1.10 - --- only needed for mp output on disk - -local concat, format, find, gsub, gmatch = table.concat, string.format, string.find, string.gsub, string.gmatch -local tostring, tonumber, select = tostring, tonumber, select -local lpegmatch = lpeg.match - -local metapost = metapost - -metapost.mptopdf = metapost.mptopdf or { } -local mptopdf = metapost.mptopdf - -mptopdf.parsers = { } -mptopdf.parser = 'none' -mptopdf.nofconverted = 0 - -function mptopdf.reset() - mptopdf.data = "" - mptopdf.path = { } - mptopdf.stack = { } - mptopdf.texts = { } - mptopdf.version = 0 - mptopdf.shortcuts = false - mptopdf.resetpath() -end - -function mptopdf.resetpath() - mptopdf.stack.close = false - mptopdf.stack.path = { } - mptopdf.stack.concat = nil - mptopdf.stack.special = false -end - -mptopdf.reset() - -function mptopdf.parsers.none() - -- no parser set -end - -function mptopdf.parse() - mptopdf.parsers[mptopdf.parser]() -end - --- old code - -mptopdf.steps = { } - -mptopdf.descapes = { - ['('] = "\\\\char40 ", - [')'] = "\\\\char41 ", - ['"'] = "\\\\char92 " -} - -function mptopdf.descape(str) - str = gsub(str,"\\(%d%d%d)",function(n) - return "\\char" .. tonumber(n,8) .. " " - end) - return gsub(str,"\\([%(%)\\])",mptopdf.descapes) -end - -function mptopdf.steps.descape(str) - str = gsub(str,"\\(%d%d%d)",function(n) - return "\\\\char" .. tonumber(n,8) .. " " - end) - return gsub(str,"\\([%(%)\\])",mptopdf.descapes) -end - -function mptopdf.steps.strip() -- .3 per expr - mptopdf.data = gsub(mptopdf.data,"^(.-)%%+Page:.-%c+(.*)%s+%a+%s+%%+EOF.*$", function(preamble, graphic) - local bbox = "0 0 0 0" - for b in gmatch(preamble,"%%%%%a+oundingBox: +(.-)%c+") do - bbox = b - end - local name, version = gmatch(preamble,"%%%%Creator: +(.-) +(.-) ") - mptopdf.version = tostring(version or "0") - if find(preamble,"/hlw{0 dtransform") then - mptopdf.shortcuts = true - end - -- the boundingbox specification needs to come before data, well, not really - return bbox .. " boundingbox\n" .. "\nbegindata\n" .. graphic .. "\nenddata\n" - end, 1) - mptopdf.data = gsub(mptopdf.data,"%%%%MetaPostSpecials: +(.-)%c+", "%1 specials\n", 1) - mptopdf.data = gsub(mptopdf.data,"%%%%MetaPostSpecial: +(.-)%c+", "%1 special\n") - mptopdf.data = gsub(mptopdf.data,"%%.-%c+", "") -end - -function mptopdf.steps.cleanup() - if not mptopdf.shortcuts then - mptopdf.data = gsub(mptopdf.data,"gsave%s+fill%s+grestore%s+stroke", "both") - mptopdf.data = gsub(mptopdf.data,"([%d%.]+)%s+([%d%.]+)%s+dtransform%s+exch%s+truncate%s+exch%s+idtransform%s+pop%s+setlinewidth", function(wx,wy) - if tonumber(wx) > 0 then return wx .. " setlinewidth" else return wy .. " setlinewidth" end - end) - mptopdf.data = gsub(mptopdf.data,"([%d%.]+)%s+([%d%.]+)%s+dtransform%s+truncate%s+idtransform%s+setlinewidth%s+pop", function(wx,wy) - if tonumber(wx) > 0 then return wx .. " setlinewidth" else return wy .. " setlinewidth" end - end) - end -end - -function mptopdf.steps.convert() - mptopdf.data = gsub(mptopdf.data,"%c%((.-)%) (.-) (.-) fshow", function(str,font,scale) - mptopdf.texts[mptopdf.texts+1] = {mptopdf.steps.descape(str), font, scale} - return "\n" .. #mptopdf.texts .. " textext" - end) - mptopdf.data = gsub(mptopdf.data,"%[%s*(.-)%s*%]", function(str) - return gsub(str,"%s+"," ") - end) - local t - mptopdf.data = gsub(mptopdf.data,"%s*([^%a]-)%s*(%a+)", function(args,cmd) - if cmd == "textext" then - t = mptopdf.texts[tonumber(args)] - return "metapost.mps.textext(" .. "\"" .. t[2] .. "\"," .. t[3] .. ",\"" .. t[1] .. "\")\n" - else - return "metapost.mps." .. cmd .. "(" .. gsub(args," +",",") .. ")\n" - end - end) -end - -function mptopdf.steps.process() - assert(loadstring(mptopdf.data))() -- () runs the loaded chunk -end - -function mptopdf.parsers.gsub() - mptopdf.steps.strip() - mptopdf.steps.cleanup() - mptopdf.steps.convert() - mptopdf.steps.process() -end - --- end of old code - --- from lua to tex - -function mptopdf.pdfcode(str) - context.pdfliteral(str) -- \\MPScode -end - -function mptopdf.texcode(str) - context(str) -end - --- auxiliary functions - -function mptopdf.flushconcat() - if mptopdf.stack.concat then - mptopdf.pdfcode(concat(mptopdf.stack.concat," ") .. " cm") - mptopdf.stack.concat = nil - end -end - -function mptopdf.flushpath(cmd) - -- faster: no local function and loop - if #mptopdf.stack.path > 0 then - local path = { } - if mptopdf.stack.concat then - local sx, sy = mptopdf.stack.concat[1], mptopdf.stack.concat[4] - local rx, ry = mptopdf.stack.concat[2], mptopdf.stack.concat[3] - local tx, ty = mptopdf.stack.concat[5], mptopdf.stack.concat[6] - local d = (sx*sy) - (rx*ry) - local function mpconcat(px, py) - return (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d - end - local stackpath = mptopdf.stack.path - for k=1,#stackpath do - local v = stackpath[k] - v[1],v[2] = mpconcat(v[1],v[2]) - if #v == 7 then - v[3],v[4] = mpconcat(v[3],v[4]) - v[5],v[6] = mpconcat(v[5],v[6]) - end - path[#path+1] = concat(v," ") - end - else - local stackpath = mptopdf.stack.path - for k=1,#stackpath do - path[#path+1] = concat(stackpath[k]," ") - end - end - mptopdf.flushconcat() - mptopdf.texcode("\\MPSpath{" .. concat(path," ") .. "}") - if mptopdf.stack.close then - mptopdf.texcode("\\MPScode{h " .. cmd .. "}") - else - mptopdf.texcode("\\MPScode{" .. cmd .."}") - end - end - mptopdf.resetpath() -end - -function mptopdf.loaded(name) - local ok, n - mptopdf.reset() - ok, mptopdf.data, n = resolvers.loadbinfile(name, 'tex') -- we need a binary load ! - return ok -end - -if not mptopdf.parse then - function mptopdf.parse() end -- forward declaration -end - -function mptopdf.convertmpstopdf(name) - if mptopdf.loaded(name) then - mptopdf.nofconverted = mptopdf.nofconverted + 1 - statistics.starttiming(mptopdf) - mptopdf.parse() - mptopdf.reset() - statistics.stoptiming(mptopdf) - else - context("file " .. name .. " not found") - end -end - --- mp interface - -metapost.mps = metapost.mps or { } -local mps = metapost.mps or { } - -function mps.creator(a, b, c) - mptopdf.version = tonumber(b) -end - -function mps.creationdate(a) - mptopdf.date= a -end - -function mps.newpath() - mptopdf.stack.path = { } -end - -function mps.boundingbox(llx, lly, urx, ury) - mptopdf.texcode("\\MPSboundingbox{" .. llx .. "}{" .. lly .. "}{" .. urx .. "}{" .. ury .. "}") -end - -function mps.moveto(x,y) - mptopdf.stack.path[#mptopdf.stack.path+1] = {x,y,"m"} -end - -function mps.curveto(ax, ay, bx, by, cx, cy) - mptopdf.stack.path[#mptopdf.stack.path+1] = {ax,ay,bx,by,cx,cy,"c"} -end - -function mps.lineto(x,y) - mptopdf.stack.path[#mptopdf.stack.path+1] = {x,y,"l"} -end - -function mps.rlineto(x,y) - local dx, dy = 0, 0 - if #mptopdf.stack.path > 0 then - dx, dy = mptopdf.stack.path[#mptopdf.stack.path][1], mptopdf.stack.path[#mptopdf.stack.path][2] - end - mptopdf.stack.path[#mptopdf.stack.path+1] = {dx,dy,"l"} -end - -function mps.translate(tx,ty) - mptopdf.pdfcode("1 0 0 0 1 " .. tx .. " " .. ty .. " cm") -end - -function mps.scale(sx,sy) - mptopdf.stack.concat = {sx,0,0,sy,0,0} -end - -function mps.concat(sx, rx, ry, sy, tx, ty) - mptopdf.stack.concat = {sx,rx,ry,sy,tx,ty} -end - -function mps.setlinejoin(d) - mptopdf.pdfcode(d .. " j") -end - -function mps.setlinecap(d) - mptopdf.pdfcode(d .. " J") -end - -function mps.setmiterlimit(d) - mptopdf.pdfcode(d .. " M") -end - -function mps.gsave() - mptopdf.pdfcode("q") -end - -function mps.grestore() - mptopdf.pdfcode("Q") -end - -function mps.setdash(...) - local n = select("#",...) - mptopdf.pdfcode("[" .. concat({...}," ",1,n-1) .. "] " .. select(n,...) .. " d") -end - -function mps.resetdash() - mptopdf.pdfcode("[ ] 0 d") -end - -function mps.setlinewidth(d) - mptopdf.pdfcode(d .. " w") -end - -function mps.closepath() - mptopdf.stack.close = true -end - -function mps.fill() - mptopdf.flushpath('f') -end - -function mps.stroke() - mptopdf.flushpath('S') -end - -function mps.both() - mptopdf.flushpath('B') -end - -function mps.clip() - mptopdf.flushpath('W n') -end - -function mps.textext(font, scale, str) -- old parser - local dx, dy = 0, 0 - if #mptopdf.stack.path > 0 then - dx, dy = mptopdf.stack.path[1][1], mptopdf.stack.path[1][2] - end - mptopdf.flushconcat() - mptopdf.texcode("\\MPStextext{"..font.."}{"..scale.."}{"..str.."}{"..dx.."}{"..dy.."}") - mptopdf.resetpath() -end - ---~ function mps.handletext(font,scale.str,dx,dy) ---~ local one, two = string.match(str, "^(%d+)::::(%d+)") ---~ if one and two then ---~ mptopdf.texcode("\\MPTOPDFtextext{"..font.."}{"..scale.."}{"..one.."}{"..two.."}{"..dx.."}{"..dy.."}") ---~ else ---~ mptopdf.texcode("\\MPTOPDFtexcode{"..font.."}{"..scale.."}{"..str.."}{"..dx.."}{"..dy.."}") ---~ end ---~ end - -function mps.setrgbcolor(r,g,b) -- extra check - r, g = tonumber(r), tonumber(g) -- needed when we use lpeg - if r == 0.0123 and g < 0.1 then - mptopdf.texcode("\\MPSspecial{" .. g*10000 .. "}{" .. b*10000 .. "}") - elseif r == 0.123 and g < 0.1 then - mptopdf.texcode("\\MPSspecial{" .. g* 1000 .. "}{" .. b* 1000 .. "}") - else - mptopdf.texcode("\\MPSrgb{" .. r .. "}{" .. g .. "}{" .. b .. "}") - end -end - -function mps.setcmykcolor(c,m,y,k) - mptopdf.texcode("\\MPScmyk{" .. c .. "}{" .. m .. "}{" .. y .. "}{" .. k .. "}") -end - -function mps.setgray(s) - mptopdf.texcode("\\MPSgray{" .. s .. "}") -end - -function mps.specials(version,signal,factor) -- 2.0 123 1000 -end - -function mps.special(...) -- 7 1 0.5 1 0 0 1 3 - local n = select("#",...) - mptopdf.texcode("\\MPSbegin\\MPSset{" .. concat({...},"}\\MPSset{",2,n) .. "}\\MPSend") -end - -function mps.begindata() -end - -function mps.enddata() -end - -function mps.showpage() -end - -mps.n = mps.newpath -- n -mps.p = mps.closepath -- h -mps.l = mps.lineto -- l -mps.r = mps.rlineto -- r -mps.m = mps.moveto -- m -mps.c = mps.curveto -- c -mps.hlw = mps.setlinewidth -mps.vlw = mps.setlinewidth - -mps.C = mps.setcmykcolor -- k -mps.G = mps.setgray -- g -mps.R = mps.setrgbcolor -- rg - -mps.lj = mps.setlinejoin -- j -mps.ml = mps.setmiterlimit -- M -mps.lc = mps.setlinecap -- J -mps.sd = mps.setdash -- d -mps.rd = mps.resetdash - -mps.S = mps.stroke -- S -mps.F = mps.fill -- f -mps.B = mps.both -- B -mps.W = mps.clip -- W - -mps.q = mps.gsave -- q -mps.Q = mps.grestore -- Q - -mps.s = mps.scale -- (not in pdf) -mps.t = mps.concat -- (not the same as pdf anyway) - -mps.P = mps.showpage - --- experimental - -function mps.attribute(id,value) - mptopdf.texcode("\\attribute " .. id .. "=" .. value .. " ") --- mptopdf.texcode("\\dompattribute{" .. id .. "}{" .. value .. "}") -end - --- lpeg parser - --- The lpeg based parser is rather optimized for the kind of output --- that MetaPost produces. It's my first real lpeg code, which may --- show. Because the parser binds to functions, we define it last. - -do -- assumes \let\c\char - - local byte = string.byte - local digit = lpeg.R("09") - local spec = digit^2 * lpeg.P("::::") * digit^2 - local text = lpeg.Cc("{") * ( - lpeg.P("\\") * ( (digit * digit * digit) / function(n) return "c" .. tonumber(n,8) end) + - lpeg.P(" ") / function(n) return "\\c32" end + -- never in new mp - lpeg.P(1) / function(n) return "\\c" .. byte(n) end - ) * lpeg.Cc("}") - local package = lpeg.Cs(spec + text^0) - - function mps.fshow(str,font,scale) -- lpeg parser - mps.textext(font,scale,lpegmatch(package,str)) - end - -end - -do - - local eol = lpeg.S('\r\n')^1 - local sp = lpeg.P(' ')^1 - local space = lpeg.S(' \r\n')^1 - local number = lpeg.S('0123456789.-+')^1 - local nonspace = lpeg.P(1-lpeg.S(' \r\n'))^1 - - local cnumber = lpeg.C(number) - local cstring = lpeg.C(nonspace) - - local specials = (lpeg.P("%%MetaPostSpecials:") * sp * (cstring * sp^0)^0 * eol) / mps.specials - local special = (lpeg.P("%%MetaPostSpecial:") * sp * (cstring * sp^0)^0 * eol) / mps.special - local boundingbox = (lpeg.P("%%BoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox - local highresboundingbox = (lpeg.P("%%HiResBoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox - - local setup = lpeg.P("%%BeginSetup") * (1 - lpeg.P("%%EndSetup") )^1 - local prolog = lpeg.P("%%BeginProlog") * (1 - lpeg.P("%%EndProlog"))^1 - local comment = lpeg.P('%')^1 * (1 - eol)^1 - - local curveto = ((cnumber * sp)^6 * lpeg.P("curveto") ) / mps.curveto - local lineto = ((cnumber * sp)^2 * lpeg.P("lineto") ) / mps.lineto - local rlineto = ((cnumber * sp)^2 * lpeg.P("rlineto") ) / mps.rlineto - local moveto = ((cnumber * sp)^2 * lpeg.P("moveto") ) / mps.moveto - local setrgbcolor = ((cnumber * sp)^3 * lpeg.P("setrgbcolor") ) / mps.setrgbcolor - local setcmykcolor = ((cnumber * sp)^4 * lpeg.P("setcmykcolor") ) / mps.setcmykcolor - local setgray = ((cnumber * sp)^1 * lpeg.P("setgray") ) / mps.setgray - local newpath = ( lpeg.P("newpath") ) / mps.newpath - local closepath = ( lpeg.P("closepath") ) / mps.closepath - local fill = ( lpeg.P("fill") ) / mps.fill - local stroke = ( lpeg.P("stroke") ) / mps.stroke - local clip = ( lpeg.P("clip") ) / mps.clip - local both = ( lpeg.P("gsave fill grestore")) / mps.both - local showpage = ( lpeg.P("showpage") ) - local setlinejoin = ((cnumber * sp)^1 * lpeg.P("setlinejoin") ) / mps.setlinejoin - local setlinecap = ((cnumber * sp)^1 * lpeg.P("setlinecap") ) / mps.setlinecap - local setmiterlimit = ((cnumber * sp)^1 * lpeg.P("setmiterlimit") ) / mps.setmiterlimit - local gsave = ( lpeg.P("gsave") ) / mps.gsave - local grestore = ( lpeg.P("grestore") ) / mps.grestore - - local setdash = (lpeg.P("[") * (cnumber * sp^0)^0 * lpeg.P("]") * sp * cnumber * sp * lpeg.P("setdash")) / mps.setdash - local concat = (lpeg.P("[") * (cnumber * sp^0)^6 * lpeg.P("]") * sp * lpeg.P("concat") ) / mps.concat - local scale = ( (cnumber * sp^0)^6 * sp * lpeg.P("concat") ) / mps.concat - - local fshow = (lpeg.P("(") * lpeg.C((1-lpeg.P(")"))^1) * lpeg.P(")") * space * cstring * space * cnumber * space * lpeg.P("fshow")) / mps.fshow - local fshow = (lpeg.P("(") * - lpeg.Cs( ( lpeg.P("\\(")/"\\050" + lpeg.P("\\)")/"\\051" + (1-lpeg.P(")")) )^1 ) - * lpeg.P(")") * space * cstring * space * cnumber * space * lpeg.P("fshow")) / mps.fshow - - local setlinewidth_x = (lpeg.P("0") * sp * cnumber * sp * lpeg.P("dtransform truncate idtransform setlinewidth pop")) / mps.setlinewidth - local setlinewidth_y = (cnumber * sp * lpeg.P("0 dtransform exch truncate exch idtransform pop setlinewidth") ) / mps.setlinewidth - - local c = ((cnumber * sp)^6 * lpeg.P("c") ) / mps.curveto -- ^6 very inefficient, ^1 ok too - local l = ((cnumber * sp)^2 * lpeg.P("l") ) / mps.lineto - local r = ((cnumber * sp)^2 * lpeg.P("r") ) / mps.rlineto - local m = ((cnumber * sp)^2 * lpeg.P("m") ) / mps.moveto - local vlw = ((cnumber * sp)^1 * lpeg.P("vlw")) / mps.setlinewidth - local hlw = ((cnumber * sp)^1 * lpeg.P("hlw")) / mps.setlinewidth - - local R = ((cnumber * sp)^3 * lpeg.P("R") ) / mps.setrgbcolor - local C = ((cnumber * sp)^4 * lpeg.P("C") ) / mps.setcmykcolor - local G = ((cnumber * sp)^1 * lpeg.P("G") ) / mps.setgray - - local lj = ((cnumber * sp)^1 * lpeg.P("lj") ) / mps.setlinejoin - local ml = ((cnumber * sp)^1 * lpeg.P("ml") ) / mps.setmiterlimit - local lc = ((cnumber * sp)^1 * lpeg.P("lc") ) / mps.setlinecap - - local n = lpeg.P("n") / mps.newpath - local p = lpeg.P("p") / mps.closepath - local S = lpeg.P("S") / mps.stroke - local F = lpeg.P("F") / mps.fill - local B = lpeg.P("B") / mps.both - local W = lpeg.P("W") / mps.clip - local P = lpeg.P("P") / mps.showpage - - local q = lpeg.P("q") / mps.gsave - local Q = lpeg.P("Q") / mps.grestore - - local sd = (lpeg.P("[") * (cnumber * sp^0)^0 * lpeg.P("]") * sp * cnumber * sp * lpeg.P("sd")) / mps.setdash - local rd = ( lpeg.P("rd")) / mps.resetdash - - local s = ( (cnumber * sp^0)^2 * lpeg.P("s") ) / mps.scale - local t = (lpeg.P("[") * (cnumber * sp^0)^6 * lpeg.P("]") * sp * lpeg.P("t") ) / mps.concat - - -- experimental - - local attribute = ((cnumber * sp)^2 * lpeg.P("attribute")) / mps.attribute - local A = ((cnumber * sp)^2 * lpeg.P("A")) / mps.attribute - - local preamble = ( - prolog + setup + - boundingbox + highresboundingbox + specials + special + - comment - ) - - local procset = ( - lj + ml + lc + - c + l + m + n + p + r + - A + - R + C + G + - S + F + B + W + - vlw + hlw + - Q + q + - sd + rd + - t + s + - fshow + - P - ) - - local verbose = ( - curveto + lineto + moveto + newpath + closepath + rlineto + - setrgbcolor + setcmykcolor + setgray + - attribute + - setlinejoin + setmiterlimit + setlinecap + - stroke + fill + clip + both + - setlinewidth_x + setlinewidth_y + - gsave + grestore + - concat + scale + - fshow + - setdash + -- no resetdash - showpage - ) - - -- order matters in terms of speed / we could check for procset first - - local captures_old = ( space + verbose + preamble )^0 - local captures_new = ( space + procset + preamble + verbose )^0 - - function mptopdf.parsers.lpeg() - if find(mptopdf.data,"%%%%BeginResource: procset mpost") then - lpegmatch(captures_new,mptopdf.data) - else - lpegmatch(captures_old,mptopdf.data) - end - end - -end - -mptopdf.parser = 'lpeg' - --- status info - -statistics.register("mps conversion time",function() - local n = mptopdf.nofconverted - if n > 0 then - return format("%s seconds, %s conversions", statistics.elapsedtime(mptopdf),n) - else - return nil - end -end) +if not modules then modules = { } end modules ['meta-pdf'] = { + version = 1.001, + comment = "companion to meta-pdf.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if true then + return -- or os.exit() +end + +-- This file contains the history of the converter. We keep it around as it +-- relates to the development of luatex. + +-- This is the third version. Version 1 converted to Lua code, +-- version 2 gsubbed the file into TeX code, and version 3 uses +-- the new lpeg functionality and streams the result into TeX. + +-- We will move old stuff to edu. + +--~ old lpeg 0.4 lpeg 0.5 +--~ 100 times test graphic 2.45 (T:1.07) 0.72 (T:0.24) 0.580 (0.560 no table) -- 0.54 optimized for one space (T:0.19) +--~ 100 times big graphic 10.44 4.30/3.35 nogb 2.914 (2.050 no table) -- 1.99 optimized for one space (T:0.85) +--~ 500 times test graphic T:1.29 T:1.16 (T:1.10 no table) -- T:1.10 + +-- only needed for mp output on disk + +local concat, format, find, gsub, gmatch = table.concat, string.format, string.find, string.gsub, string.gmatch +local tostring, tonumber, select = tostring, tonumber, select +local lpegmatch = lpeg.match + +local metapost = metapost + +metapost.mptopdf = metapost.mptopdf or { } +local mptopdf = metapost.mptopdf + +mptopdf.parsers = { } +mptopdf.parser = 'none' +mptopdf.nofconverted = 0 + +function mptopdf.reset() + mptopdf.data = "" + mptopdf.path = { } + mptopdf.stack = { } + mptopdf.texts = { } + mptopdf.version = 0 + mptopdf.shortcuts = false + mptopdf.resetpath() +end + +function mptopdf.resetpath() + mptopdf.stack.close = false + mptopdf.stack.path = { } + mptopdf.stack.concat = nil + mptopdf.stack.special = false +end + +mptopdf.reset() + +function mptopdf.parsers.none() + -- no parser set +end + +function mptopdf.parse() + mptopdf.parsers[mptopdf.parser]() +end + +-- old code + +mptopdf.steps = { } + +mptopdf.descapes = { + ['('] = "\\\\char40 ", + [')'] = "\\\\char41 ", + ['"'] = "\\\\char92 " +} + +function mptopdf.descape(str) + str = gsub(str,"\\(%d%d%d)",function(n) + return "\\char" .. tonumber(n,8) .. " " + end) + return gsub(str,"\\([%(%)\\])",mptopdf.descapes) +end + +function mptopdf.steps.descape(str) + str = gsub(str,"\\(%d%d%d)",function(n) + return "\\\\char" .. tonumber(n,8) .. " " + end) + return gsub(str,"\\([%(%)\\])",mptopdf.descapes) +end + +function mptopdf.steps.strip() -- .3 per expr + mptopdf.data = gsub(mptopdf.data,"^(.-)%%+Page:.-%c+(.*)%s+%a+%s+%%+EOF.*$", function(preamble, graphic) + local bbox = "0 0 0 0" + for b in gmatch(preamble,"%%%%%a+oundingBox: +(.-)%c+") do + bbox = b + end + local name, version = gmatch(preamble,"%%%%Creator: +(.-) +(.-) ") + mptopdf.version = tostring(version or "0") + if find(preamble,"/hlw{0 dtransform") then + mptopdf.shortcuts = true + end + -- the boundingbox specification needs to come before data, well, not really + return bbox .. " boundingbox\n" .. "\nbegindata\n" .. graphic .. "\nenddata\n" + end, 1) + mptopdf.data = gsub(mptopdf.data,"%%%%MetaPostSpecials: +(.-)%c+", "%1 specials\n", 1) + mptopdf.data = gsub(mptopdf.data,"%%%%MetaPostSpecial: +(.-)%c+", "%1 special\n") + mptopdf.data = gsub(mptopdf.data,"%%.-%c+", "") +end + +function mptopdf.steps.cleanup() + if not mptopdf.shortcuts then + mptopdf.data = gsub(mptopdf.data,"gsave%s+fill%s+grestore%s+stroke", "both") + mptopdf.data = gsub(mptopdf.data,"([%d%.]+)%s+([%d%.]+)%s+dtransform%s+exch%s+truncate%s+exch%s+idtransform%s+pop%s+setlinewidth", function(wx,wy) + if tonumber(wx) > 0 then return wx .. " setlinewidth" else return wy .. " setlinewidth" end + end) + mptopdf.data = gsub(mptopdf.data,"([%d%.]+)%s+([%d%.]+)%s+dtransform%s+truncate%s+idtransform%s+setlinewidth%s+pop", function(wx,wy) + if tonumber(wx) > 0 then return wx .. " setlinewidth" else return wy .. " setlinewidth" end + end) + end +end + +function mptopdf.steps.convert() + mptopdf.data = gsub(mptopdf.data,"%c%((.-)%) (.-) (.-) fshow", function(str,font,scale) + mptopdf.texts[mptopdf.texts+1] = {mptopdf.steps.descape(str), font, scale} + return "\n" .. #mptopdf.texts .. " textext" + end) + mptopdf.data = gsub(mptopdf.data,"%[%s*(.-)%s*%]", function(str) + return gsub(str,"%s+"," ") + end) + local t + mptopdf.data = gsub(mptopdf.data,"%s*([^%a]-)%s*(%a+)", function(args,cmd) + if cmd == "textext" then + t = mptopdf.texts[tonumber(args)] + return "metapost.mps.textext(" .. "\"" .. t[2] .. "\"," .. t[3] .. ",\"" .. t[1] .. "\")\n" + else + return "metapost.mps." .. cmd .. "(" .. gsub(args," +",",") .. ")\n" + end + end) +end + +function mptopdf.steps.process() + assert(loadstring(mptopdf.data))() -- () runs the loaded chunk +end + +function mptopdf.parsers.gsub() + mptopdf.steps.strip() + mptopdf.steps.cleanup() + mptopdf.steps.convert() + mptopdf.steps.process() +end + +-- end of old code + +-- from lua to tex + +function mptopdf.pdfcode(str) + context.pdfliteral(str) -- \\MPScode +end + +function mptopdf.texcode(str) + context(str) +end + +-- auxiliary functions + +function mptopdf.flushconcat() + if mptopdf.stack.concat then + mptopdf.pdfcode(concat(mptopdf.stack.concat," ") .. " cm") + mptopdf.stack.concat = nil + end +end + +function mptopdf.flushpath(cmd) + -- faster: no local function and loop + if #mptopdf.stack.path > 0 then + local path = { } + if mptopdf.stack.concat then + local sx, sy = mptopdf.stack.concat[1], mptopdf.stack.concat[4] + local rx, ry = mptopdf.stack.concat[2], mptopdf.stack.concat[3] + local tx, ty = mptopdf.stack.concat[5], mptopdf.stack.concat[6] + local d = (sx*sy) - (rx*ry) + local function mpconcat(px, py) + return (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d + end + local stackpath = mptopdf.stack.path + for k=1,#stackpath do + local v = stackpath[k] + v[1],v[2] = mpconcat(v[1],v[2]) + if #v == 7 then + v[3],v[4] = mpconcat(v[3],v[4]) + v[5],v[6] = mpconcat(v[5],v[6]) + end + path[#path+1] = concat(v," ") + end + else + local stackpath = mptopdf.stack.path + for k=1,#stackpath do + path[#path+1] = concat(stackpath[k]," ") + end + end + mptopdf.flushconcat() + mptopdf.texcode("\\MPSpath{" .. concat(path," ") .. "}") + if mptopdf.stack.close then + mptopdf.texcode("\\MPScode{h " .. cmd .. "}") + else + mptopdf.texcode("\\MPScode{" .. cmd .."}") + end + end + mptopdf.resetpath() +end + +function mptopdf.loaded(name) + local ok, n + mptopdf.reset() + ok, mptopdf.data, n = resolvers.loadbinfile(name, 'tex') -- we need a binary load ! + return ok +end + +if not mptopdf.parse then + function mptopdf.parse() end -- forward declaration +end + +function mptopdf.convertmpstopdf(name) + if mptopdf.loaded(name) then + mptopdf.nofconverted = mptopdf.nofconverted + 1 + statistics.starttiming(mptopdf) + mptopdf.parse() + mptopdf.reset() + statistics.stoptiming(mptopdf) + else + context("file " .. name .. " not found") + end +end + +-- mp interface + +metapost.mps = metapost.mps or { } +local mps = metapost.mps or { } + +function mps.creator(a, b, c) + mptopdf.version = tonumber(b) +end + +function mps.creationdate(a) + mptopdf.date= a +end + +function mps.newpath() + mptopdf.stack.path = { } +end + +function mps.boundingbox(llx, lly, urx, ury) + mptopdf.texcode("\\MPSboundingbox{" .. llx .. "}{" .. lly .. "}{" .. urx .. "}{" .. ury .. "}") +end + +function mps.moveto(x,y) + mptopdf.stack.path[#mptopdf.stack.path+1] = {x,y,"m"} +end + +function mps.curveto(ax, ay, bx, by, cx, cy) + mptopdf.stack.path[#mptopdf.stack.path+1] = {ax,ay,bx,by,cx,cy,"c"} +end + +function mps.lineto(x,y) + mptopdf.stack.path[#mptopdf.stack.path+1] = {x,y,"l"} +end + +function mps.rlineto(x,y) + local dx, dy = 0, 0 + if #mptopdf.stack.path > 0 then + dx, dy = mptopdf.stack.path[#mptopdf.stack.path][1], mptopdf.stack.path[#mptopdf.stack.path][2] + end + mptopdf.stack.path[#mptopdf.stack.path+1] = {dx,dy,"l"} +end + +function mps.translate(tx,ty) + mptopdf.pdfcode("1 0 0 0 1 " .. tx .. " " .. ty .. " cm") +end + +function mps.scale(sx,sy) + mptopdf.stack.concat = {sx,0,0,sy,0,0} +end + +function mps.concat(sx, rx, ry, sy, tx, ty) + mptopdf.stack.concat = {sx,rx,ry,sy,tx,ty} +end + +function mps.setlinejoin(d) + mptopdf.pdfcode(d .. " j") +end + +function mps.setlinecap(d) + mptopdf.pdfcode(d .. " J") +end + +function mps.setmiterlimit(d) + mptopdf.pdfcode(d .. " M") +end + +function mps.gsave() + mptopdf.pdfcode("q") +end + +function mps.grestore() + mptopdf.pdfcode("Q") +end + +function mps.setdash(...) + local n = select("#",...) + mptopdf.pdfcode("[" .. concat({...}," ",1,n-1) .. "] " .. select(n,...) .. " d") +end + +function mps.resetdash() + mptopdf.pdfcode("[ ] 0 d") +end + +function mps.setlinewidth(d) + mptopdf.pdfcode(d .. " w") +end + +function mps.closepath() + mptopdf.stack.close = true +end + +function mps.fill() + mptopdf.flushpath('f') +end + +function mps.stroke() + mptopdf.flushpath('S') +end + +function mps.both() + mptopdf.flushpath('B') +end + +function mps.clip() + mptopdf.flushpath('W n') +end + +function mps.textext(font, scale, str) -- old parser + local dx, dy = 0, 0 + if #mptopdf.stack.path > 0 then + dx, dy = mptopdf.stack.path[1][1], mptopdf.stack.path[1][2] + end + mptopdf.flushconcat() + mptopdf.texcode("\\MPStextext{"..font.."}{"..scale.."}{"..str.."}{"..dx.."}{"..dy.."}") + mptopdf.resetpath() +end + +--~ function mps.handletext(font,scale.str,dx,dy) +--~ local one, two = string.match(str, "^(%d+)::::(%d+)") +--~ if one and two then +--~ mptopdf.texcode("\\MPTOPDFtextext{"..font.."}{"..scale.."}{"..one.."}{"..two.."}{"..dx.."}{"..dy.."}") +--~ else +--~ mptopdf.texcode("\\MPTOPDFtexcode{"..font.."}{"..scale.."}{"..str.."}{"..dx.."}{"..dy.."}") +--~ end +--~ end + +function mps.setrgbcolor(r,g,b) -- extra check + r, g = tonumber(r), tonumber(g) -- needed when we use lpeg + if r == 0.0123 and g < 0.1 then + mptopdf.texcode("\\MPSspecial{" .. g*10000 .. "}{" .. b*10000 .. "}") + elseif r == 0.123 and g < 0.1 then + mptopdf.texcode("\\MPSspecial{" .. g* 1000 .. "}{" .. b* 1000 .. "}") + else + mptopdf.texcode("\\MPSrgb{" .. r .. "}{" .. g .. "}{" .. b .. "}") + end +end + +function mps.setcmykcolor(c,m,y,k) + mptopdf.texcode("\\MPScmyk{" .. c .. "}{" .. m .. "}{" .. y .. "}{" .. k .. "}") +end + +function mps.setgray(s) + mptopdf.texcode("\\MPSgray{" .. s .. "}") +end + +function mps.specials(version,signal,factor) -- 2.0 123 1000 +end + +function mps.special(...) -- 7 1 0.5 1 0 0 1 3 + local n = select("#",...) + mptopdf.texcode("\\MPSbegin\\MPSset{" .. concat({...},"}\\MPSset{",2,n) .. "}\\MPSend") +end + +function mps.begindata() +end + +function mps.enddata() +end + +function mps.showpage() +end + +mps.n = mps.newpath -- n +mps.p = mps.closepath -- h +mps.l = mps.lineto -- l +mps.r = mps.rlineto -- r +mps.m = mps.moveto -- m +mps.c = mps.curveto -- c +mps.hlw = mps.setlinewidth +mps.vlw = mps.setlinewidth + +mps.C = mps.setcmykcolor -- k +mps.G = mps.setgray -- g +mps.R = mps.setrgbcolor -- rg + +mps.lj = mps.setlinejoin -- j +mps.ml = mps.setmiterlimit -- M +mps.lc = mps.setlinecap -- J +mps.sd = mps.setdash -- d +mps.rd = mps.resetdash + +mps.S = mps.stroke -- S +mps.F = mps.fill -- f +mps.B = mps.both -- B +mps.W = mps.clip -- W + +mps.q = mps.gsave -- q +mps.Q = mps.grestore -- Q + +mps.s = mps.scale -- (not in pdf) +mps.t = mps.concat -- (not the same as pdf anyway) + +mps.P = mps.showpage + +-- experimental + +function mps.attribute(id,value) + mptopdf.texcode("\\attribute " .. id .. "=" .. value .. " ") +-- mptopdf.texcode("\\dompattribute{" .. id .. "}{" .. value .. "}") +end + +-- lpeg parser + +-- The lpeg based parser is rather optimized for the kind of output +-- that MetaPost produces. It's my first real lpeg code, which may +-- show. Because the parser binds to functions, we define it last. + +do -- assumes \let\c\char + + local byte = string.byte + local digit = lpeg.R("09") + local spec = digit^2 * lpeg.P("::::") * digit^2 + local text = lpeg.Cc("{") * ( + lpeg.P("\\") * ( (digit * digit * digit) / function(n) return "c" .. tonumber(n,8) end) + + lpeg.P(" ") / function(n) return "\\c32" end + -- never in new mp + lpeg.P(1) / function(n) return "\\c" .. byte(n) end + ) * lpeg.Cc("}") + local package = lpeg.Cs(spec + text^0) + + function mps.fshow(str,font,scale) -- lpeg parser + mps.textext(font,scale,lpegmatch(package,str)) + end + +end + +do + + local eol = lpeg.S('\r\n')^1 + local sp = lpeg.P(' ')^1 + local space = lpeg.S(' \r\n')^1 + local number = lpeg.S('0123456789.-+')^1 + local nonspace = lpeg.P(1-lpeg.S(' \r\n'))^1 + + local cnumber = lpeg.C(number) + local cstring = lpeg.C(nonspace) + + local specials = (lpeg.P("%%MetaPostSpecials:") * sp * (cstring * sp^0)^0 * eol) / mps.specials + local special = (lpeg.P("%%MetaPostSpecial:") * sp * (cstring * sp^0)^0 * eol) / mps.special + local boundingbox = (lpeg.P("%%BoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox + local highresboundingbox = (lpeg.P("%%HiResBoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox + + local setup = lpeg.P("%%BeginSetup") * (1 - lpeg.P("%%EndSetup") )^1 + local prolog = lpeg.P("%%BeginProlog") * (1 - lpeg.P("%%EndProlog"))^1 + local comment = lpeg.P('%')^1 * (1 - eol)^1 + + local curveto = ((cnumber * sp)^6 * lpeg.P("curveto") ) / mps.curveto + local lineto = ((cnumber * sp)^2 * lpeg.P("lineto") ) / mps.lineto + local rlineto = ((cnumber * sp)^2 * lpeg.P("rlineto") ) / mps.rlineto + local moveto = ((cnumber * sp)^2 * lpeg.P("moveto") ) / mps.moveto + local setrgbcolor = ((cnumber * sp)^3 * lpeg.P("setrgbcolor") ) / mps.setrgbcolor + local setcmykcolor = ((cnumber * sp)^4 * lpeg.P("setcmykcolor") ) / mps.setcmykcolor + local setgray = ((cnumber * sp)^1 * lpeg.P("setgray") ) / mps.setgray + local newpath = ( lpeg.P("newpath") ) / mps.newpath + local closepath = ( lpeg.P("closepath") ) / mps.closepath + local fill = ( lpeg.P("fill") ) / mps.fill + local stroke = ( lpeg.P("stroke") ) / mps.stroke + local clip = ( lpeg.P("clip") ) / mps.clip + local both = ( lpeg.P("gsave fill grestore")) / mps.both + local showpage = ( lpeg.P("showpage") ) + local setlinejoin = ((cnumber * sp)^1 * lpeg.P("setlinejoin") ) / mps.setlinejoin + local setlinecap = ((cnumber * sp)^1 * lpeg.P("setlinecap") ) / mps.setlinecap + local setmiterlimit = ((cnumber * sp)^1 * lpeg.P("setmiterlimit") ) / mps.setmiterlimit + local gsave = ( lpeg.P("gsave") ) / mps.gsave + local grestore = ( lpeg.P("grestore") ) / mps.grestore + + local setdash = (lpeg.P("[") * (cnumber * sp^0)^0 * lpeg.P("]") * sp * cnumber * sp * lpeg.P("setdash")) / mps.setdash + local concat = (lpeg.P("[") * (cnumber * sp^0)^6 * lpeg.P("]") * sp * lpeg.P("concat") ) / mps.concat + local scale = ( (cnumber * sp^0)^6 * sp * lpeg.P("concat") ) / mps.concat + + local fshow = (lpeg.P("(") * lpeg.C((1-lpeg.P(")"))^1) * lpeg.P(")") * space * cstring * space * cnumber * space * lpeg.P("fshow")) / mps.fshow + local fshow = (lpeg.P("(") * + lpeg.Cs( ( lpeg.P("\\(")/"\\050" + lpeg.P("\\)")/"\\051" + (1-lpeg.P(")")) )^1 ) + * lpeg.P(")") * space * cstring * space * cnumber * space * lpeg.P("fshow")) / mps.fshow + + local setlinewidth_x = (lpeg.P("0") * sp * cnumber * sp * lpeg.P("dtransform truncate idtransform setlinewidth pop")) / mps.setlinewidth + local setlinewidth_y = (cnumber * sp * lpeg.P("0 dtransform exch truncate exch idtransform pop setlinewidth") ) / mps.setlinewidth + + local c = ((cnumber * sp)^6 * lpeg.P("c") ) / mps.curveto -- ^6 very inefficient, ^1 ok too + local l = ((cnumber * sp)^2 * lpeg.P("l") ) / mps.lineto + local r = ((cnumber * sp)^2 * lpeg.P("r") ) / mps.rlineto + local m = ((cnumber * sp)^2 * lpeg.P("m") ) / mps.moveto + local vlw = ((cnumber * sp)^1 * lpeg.P("vlw")) / mps.setlinewidth + local hlw = ((cnumber * sp)^1 * lpeg.P("hlw")) / mps.setlinewidth + + local R = ((cnumber * sp)^3 * lpeg.P("R") ) / mps.setrgbcolor + local C = ((cnumber * sp)^4 * lpeg.P("C") ) / mps.setcmykcolor + local G = ((cnumber * sp)^1 * lpeg.P("G") ) / mps.setgray + + local lj = ((cnumber * sp)^1 * lpeg.P("lj") ) / mps.setlinejoin + local ml = ((cnumber * sp)^1 * lpeg.P("ml") ) / mps.setmiterlimit + local lc = ((cnumber * sp)^1 * lpeg.P("lc") ) / mps.setlinecap + + local n = lpeg.P("n") / mps.newpath + local p = lpeg.P("p") / mps.closepath + local S = lpeg.P("S") / mps.stroke + local F = lpeg.P("F") / mps.fill + local B = lpeg.P("B") / mps.both + local W = lpeg.P("W") / mps.clip + local P = lpeg.P("P") / mps.showpage + + local q = lpeg.P("q") / mps.gsave + local Q = lpeg.P("Q") / mps.grestore + + local sd = (lpeg.P("[") * (cnumber * sp^0)^0 * lpeg.P("]") * sp * cnumber * sp * lpeg.P("sd")) / mps.setdash + local rd = ( lpeg.P("rd")) / mps.resetdash + + local s = ( (cnumber * sp^0)^2 * lpeg.P("s") ) / mps.scale + local t = (lpeg.P("[") * (cnumber * sp^0)^6 * lpeg.P("]") * sp * lpeg.P("t") ) / mps.concat + + -- experimental + + local attribute = ((cnumber * sp)^2 * lpeg.P("attribute")) / mps.attribute + local A = ((cnumber * sp)^2 * lpeg.P("A")) / mps.attribute + + local preamble = ( + prolog + setup + + boundingbox + highresboundingbox + specials + special + + comment + ) + + local procset = ( + lj + ml + lc + + c + l + m + n + p + r + + A + + R + C + G + + S + F + B + W + + vlw + hlw + + Q + q + + sd + rd + + t + s + + fshow + + P + ) + + local verbose = ( + curveto + lineto + moveto + newpath + closepath + rlineto + + setrgbcolor + setcmykcolor + setgray + + attribute + + setlinejoin + setmiterlimit + setlinecap + + stroke + fill + clip + both + + setlinewidth_x + setlinewidth_y + + gsave + grestore + + concat + scale + + fshow + + setdash + -- no resetdash + showpage + ) + + -- order matters in terms of speed / we could check for procset first + + local captures_old = ( space + verbose + preamble )^0 + local captures_new = ( space + procset + preamble + verbose )^0 + + function mptopdf.parsers.lpeg() + if find(mptopdf.data,"%%%%BeginResource: procset mpost") then + lpegmatch(captures_new,mptopdf.data) + else + lpegmatch(captures_old,mptopdf.data) + end + end + +end + +mptopdf.parser = 'lpeg' + +-- status info + +statistics.register("mps conversion time",function() + local n = mptopdf.nofconverted + if n > 0 then + return format("%s seconds, %s conversions", statistics.elapsedtime(mptopdf),n) + else + return nil + end +end) diff --git a/tex/context/base/meta-tex.lua b/tex/context/base/meta-tex.lua index c29498ad1..117d604b3 100644 --- a/tex/context/base/meta-tex.lua +++ b/tex/context/base/meta-tex.lua @@ -1,38 +1,38 @@ -if not modules then modules = { } end modules ['meta-tex'] = { - version = 1.001, - comment = "companion to meta-tex.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---~ local P, C, lpegmatch = lpeg.P, lpeg.C, lpeg.match - --- local left = P("[") --- local right = P("]") --- local space = P(" ") --- local argument = left * C((1-right)^1) * right --- local pattern = (argument + space)^0 - --- function metapost.sometxt(optional,str) --- if optional == "" then --- context.sometxta(str) --- else --- local one, two = lpegmatch(pattern,optional) --- if two then --- context.sometxtc(one,two,str) --- elseif one then --- context.sometxtb(one,str) --- else --- context.sometxta(str) --- end --- end --- end - -local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match - -local pattern = Cs((P([[\"]]) + P([["]])/"\\quotedbl{}" + P(1))^0) -- or \char - -function metapost.escaped(str) - context(lpegmatch(pattern,str)) -end +if not modules then modules = { } end modules ['meta-tex'] = { + version = 1.001, + comment = "companion to meta-tex.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--~ local P, C, lpegmatch = lpeg.P, lpeg.C, lpeg.match + +-- local left = P("[") +-- local right = P("]") +-- local space = P(" ") +-- local argument = left * C((1-right)^1) * right +-- local pattern = (argument + space)^0 + +-- function metapost.sometxt(optional,str) +-- if optional == "" then +-- context.sometxta(str) +-- else +-- local one, two = lpegmatch(pattern,optional) +-- if two then +-- context.sometxtc(one,two,str) +-- elseif one then +-- context.sometxtb(one,str) +-- else +-- context.sometxta(str) +-- end +-- end +-- end + +local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match + +local pattern = Cs((P([[\"]]) + P([["]])/"\\quotedbl{}" + P(1))^0) -- or \char + +function metapost.escaped(str) + context(lpegmatch(pattern,str)) +end diff --git a/tex/context/base/mlib-ctx.lua b/tex/context/base/mlib-ctx.lua index 04e0efcb4..8d6d7aa3e 100644 --- a/tex/context/base/mlib-ctx.lua +++ b/tex/context/base/mlib-ctx.lua @@ -1,178 +1,178 @@ -if not modules then modules = { } end modules ['mlib-ctx'] = { - version = 1.001, - comment = "companion to mlib-ctx.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - --- todo - -local format, concat = string.format, table.concat -local settings_to_hash = utilities.parsers.settings_to_hash - -local report_metapost = logs.reporter("metapost") - -local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming - -local mplib = mplib - -metapost = metapost or {} -local metapost = metapost - -local v_no = interfaces.variables.no - -metapost.defaultformat = "metafun" -metapost.defaultinstance = "metafun" -metapost.defaultmethod = "default" - -local function setmpsformat(specification) - local instance = specification.instance - local format = specification.format - local method = specification.method - if not instance or instance == "" then - instance = metapost.defaultinstance - specification.instance = instance - end - if not format or format == "" then - format = metapost.defaultformat - specification.format = format - end - if not method or method == "" then - method = metapost.defaultmethod - specification.method = method - end - specification.mpx = metapost.format(instance,format,method) -end - -local extensiondata = metapost.extensiondata or storage.allocate { } -metapost.extensiondata = extensiondata - -storage.register("metapost/extensiondata",extensiondata,"metapost.extensiondata") - -function metapost.setextensions(instances,data) - if data and data ~= "" then - extensiondata[#extensiondata+1] = { - usedinall = not instances or instances == "", - instances = settings_to_hash(instances or ""), - extensions = data, - } - end -end - -function metapost.getextensions(instance,state) - if state and state == v_no then - return "" - else - local t = { } - for i=1,#extensiondata do - local e = extensiondata[i] - local status = e.instances[instance] - if (status ~= true) and (e.usedinall or status) then - t[#t+1] = e.extensions - e.instances[instance] = true - end - end - return concat(t," ") - end -end - -function commands.getmpextensions(instance,state) - context(metapost.getextensions(instance,state)) -end - -function metapost.graphic(specification) - setmpsformat(specification) - metapost.graphic_base_pass(specification) -end - -function metapost.getclippath(specification) -- why not a special instance for this - setmpsformat(specification) - local mpx = specification.mpx - local data = specification.data or "" - if mpx and data ~= "" then - starttiming(metapost) - starttiming(metapost.exectime) - local result = mpx:execute ( format ( "%s;%s;beginfig(1);%s;%s;endfig;", - specification.extensions or "", - specification.inclusions or "", - specification.initializations or "", - data - ) ) - stoptiming(metapost.exectime) - if result.status > 0 then - report_metapost("%s: %s", result.status, result.error or result.term or result.log) - result = nil - else - result = metapost.filterclippath(result) - end - stoptiming(metapost) - return result - end -end - -function metapost.filterclippath(result) - if result then - local figures = result.fig - if figures and #figures > 0 then - local figure = figures[1] - local objects = figure:objects() - if objects then - local lastclippath - for o=1,#objects do - local object = objects[o] - if object.type == "start_clip" then - lastclippath = object.path - end - end - return lastclippath - end - end - end -end - -function metapost.theclippath(...) - local result = metapost.getclippath(...) - if result then -- we could just print the table - result = concat(metapost.flushnormalpath(result),"\n") - context(result) - end -end - -statistics.register("metapost processing time", function() - local n = metapost.n - if n and n > 0 then - local nofconverted = metapost.makempy.nofconverted - local elapsedtime = statistics.elapsedtime - local elapsed = statistics.elapsed - local str = format("%s seconds, loading: %s, execution: %s, n: %s, average: %s", - elapsedtime(metapost), elapsedtime(mplib), elapsedtime(metapost.exectime), n, - elapsedtime((elapsed(metapost) + elapsed(mplib) + elapsed(metapost.exectime)) / n)) - if nofconverted > 0 then - return format("%s, external: %s (%s calls)", - str, elapsedtime(metapost.makempy), nofconverted) - else - return str - end - else - return nil - end -end) - --- only used in graphictexts - -metapost.tex = metapost.tex or { } - -local environments = { } - -function metapost.tex.set(str) - environments[#environments+1] = str -end - -function metapost.tex.reset() - environments = { } -end - -function metapost.tex.get() - return concat(environments,"\n") -end +if not modules then modules = { } end modules ['mlib-ctx'] = { + version = 1.001, + comment = "companion to mlib-ctx.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +-- todo + +local format, concat = string.format, table.concat +local settings_to_hash = utilities.parsers.settings_to_hash + +local report_metapost = logs.reporter("metapost") + +local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming + +local mplib = mplib + +metapost = metapost or {} +local metapost = metapost + +local v_no = interfaces.variables.no + +metapost.defaultformat = "metafun" +metapost.defaultinstance = "metafun" +metapost.defaultmethod = "default" + +local function setmpsformat(specification) + local instance = specification.instance + local format = specification.format + local method = specification.method + if not instance or instance == "" then + instance = metapost.defaultinstance + specification.instance = instance + end + if not format or format == "" then + format = metapost.defaultformat + specification.format = format + end + if not method or method == "" then + method = metapost.defaultmethod + specification.method = method + end + specification.mpx = metapost.format(instance,format,method) +end + +local extensiondata = metapost.extensiondata or storage.allocate { } +metapost.extensiondata = extensiondata + +storage.register("metapost/extensiondata",extensiondata,"metapost.extensiondata") + +function metapost.setextensions(instances,data) + if data and data ~= "" then + extensiondata[#extensiondata+1] = { + usedinall = not instances or instances == "", + instances = settings_to_hash(instances or ""), + extensions = data, + } + end +end + +function metapost.getextensions(instance,state) + if state and state == v_no then + return "" + else + local t = { } + for i=1,#extensiondata do + local e = extensiondata[i] + local status = e.instances[instance] + if (status ~= true) and (e.usedinall or status) then + t[#t+1] = e.extensions + e.instances[instance] = true + end + end + return concat(t," ") + end +end + +function commands.getmpextensions(instance,state) + context(metapost.getextensions(instance,state)) +end + +function metapost.graphic(specification) + setmpsformat(specification) + metapost.graphic_base_pass(specification) +end + +function metapost.getclippath(specification) -- why not a special instance for this + setmpsformat(specification) + local mpx = specification.mpx + local data = specification.data or "" + if mpx and data ~= "" then + starttiming(metapost) + starttiming(metapost.exectime) + local result = mpx:execute ( format ( "%s;%s;beginfig(1);%s;%s;endfig;", + specification.extensions or "", + specification.inclusions or "", + specification.initializations or "", + data + ) ) + stoptiming(metapost.exectime) + if result.status > 0 then + report_metapost("%s: %s", result.status, result.error or result.term or result.log) + result = nil + else + result = metapost.filterclippath(result) + end + stoptiming(metapost) + return result + end +end + +function metapost.filterclippath(result) + if result then + local figures = result.fig + if figures and #figures > 0 then + local figure = figures[1] + local objects = figure:objects() + if objects then + local lastclippath + for o=1,#objects do + local object = objects[o] + if object.type == "start_clip" then + lastclippath = object.path + end + end + return lastclippath + end + end + end +end + +function metapost.theclippath(...) + local result = metapost.getclippath(...) + if result then -- we could just print the table + result = concat(metapost.flushnormalpath(result),"\n") + context(result) + end +end + +statistics.register("metapost processing time", function() + local n = metapost.n + if n and n > 0 then + local nofconverted = metapost.makempy.nofconverted + local elapsedtime = statistics.elapsedtime + local elapsed = statistics.elapsed + local str = format("%s seconds, loading: %s, execution: %s, n: %s, average: %s", + elapsedtime(metapost), elapsedtime(mplib), elapsedtime(metapost.exectime), n, + elapsedtime((elapsed(metapost) + elapsed(mplib) + elapsed(metapost.exectime)) / n)) + if nofconverted > 0 then + return format("%s, external: %s (%s calls)", + str, elapsedtime(metapost.makempy), nofconverted) + else + return str + end + else + return nil + end +end) + +-- only used in graphictexts + +metapost.tex = metapost.tex or { } + +local environments = { } + +function metapost.tex.set(str) + environments[#environments+1] = str +end + +function metapost.tex.reset() + environments = { } +end + +function metapost.tex.get() + return concat(environments,"\n") +end diff --git a/tex/context/base/mlib-pdf.lua b/tex/context/base/mlib-pdf.lua index 963309951..6ca50a12f 100644 --- a/tex/context/base/mlib-pdf.lua +++ b/tex/context/base/mlib-pdf.lua @@ -1,530 +1,530 @@ -if not modules then modules = { } end modules ['mlib-pdf'] = { - version = 1.001, - comment = "companion to mlib-ctx.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - --- maybe %s is better than %f - -local format, concat, gsub = string.format, table.concat, string.gsub -local abs, sqrt, round = math.abs, math.sqrt, math.round -local setmetatable = setmetatable -local Cf, C, Cg, Ct, P, S, lpegmatch = lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.match -local formatters = string.formatters - -local report_metapost = logs.reporter("metapost") - -local mplib, context = mplib, context - -local allocate = utilities.storage.allocate - -local copy_node = node.copy -local write_node = node.write - -metapost = metapost or { } -local metapost = metapost - -metapost.flushers = metapost.flushers or { } -local pdfflusher = { } -metapost.flushers.pdf = pdfflusher - -metapost.multipass = false -metapost.n = 0 -metapost.optimize = true -- false - -local experiment = true -- uses context(node) that already does delayed nodes - -local savedliterals = nil -- needs checking -local mpsliteral = nodes.pool.register(node.new("whatsit",nodes.whatsitcodes.pdfliteral)) -- pdfliteral.mode = 1 - -local pdfliteral = function(s) - local literal = copy_node(mpsliteral) - literal.data = s - return literal -end - --- Because in MKiV we always have two passes, we save the objects. When an extra --- mp run is done (due to for instance texts identifier in the parse pass), we --- get a new result table and the stored objects are forgotten. Otherwise they --- are reused. - -local function getobjects(result,figure,f) - if metapost.optimize then - local objects = result.objects - if not objects then - result.objects = { } - end - objects = result.objects[f] - if not objects then - objects = figure:objects() - result.objects[f] = objects - end - return objects - else - return figure:objects() - end -end - -function metapost.convert(result, trialrun, flusher, multipass, askedfig) - if trialrun then - metapost.multipass = false - metapost.parse(result, askedfig) - if multipass and not metapost.multipass and metapost.optimize then - metapost.flush(result, flusher, askedfig) -- saves a run - else - return false - end - else - metapost.flush(result, flusher, askedfig) - end - return true -- done -end - -function metapost.flushliteral(d) - if savedliterals then - local literal = copy_node(mpsliteral) - literal.data = savedliterals[d] - write_node(literal) - else - report_metapost("problem flushing literal %a",d) - end -end - -function metapost.flushreset() -- will become obsolete and internal - savedliterals = nil -end - -function pdfflusher.comment(message) - if message then - message = formatters["%% mps graphic %s: %s"](metapost.n,message) - if experiment then - context(pdfliteral(message)) - else - if savedliterals then - local last = #savedliterals + 1 - savedliterals[last] = message - context.MPLIBtoPDF(last) - else - savedliterals = { message } - context.MPLIBtoPDF(1) - end - end - end -end - -function pdfflusher.startfigure(n,llx,lly,urx,ury,message) - savedliterals = nil - metapost.n = metapost.n + 1 - context.startMPLIBtoPDF(llx,lly,urx,ury) - if message then pdfflusher.comment(message) end -end - -function pdfflusher.stopfigure(message) - if message then pdfflusher.comment(message) end - context.stopMPLIBtoPDF() - context.MPLIBflushreset() -- maybe just at the beginning -end - -function pdfflusher.flushfigure(pdfliterals) -- table - if #pdfliterals > 0 then - pdfliterals = concat(pdfliterals,"\n") - if experiment then - context(pdfliteral(pdfliterals)) - else - if savedliterals then - local last = #savedliterals + 1 - savedliterals[last] = pdfliterals - context.MPLIBtoPDF(last) - else - savedliterals = { pdfliterals } - context.MPLIBtoPDF(1) - end - end - end -end - -function pdfflusher.textfigure(font,size,text,width,height,depth) -- we could save the factor - text = gsub(text,".","\\hbox{%1}") -- kerning happens in metapost (i have to check if this is true for mplib) - context.MPtextext(font,size,text,0,-number.dimenfactors.bp*depth) -end - -local bend_tolerance = 131/65536 - -local rx, sx, sy, ry, tx, ty, divider = 1, 0, 0, 1, 0, 0, 1 - -local pen_info = mplib.pen_info - -local function pen_characteristics(object) - local t = pen_info(object) - rx, ry, sx, sy, tx, ty = t.rx, t.ry, t.sx, t.sy, t.tx, t.ty - divider = sx*sy - rx*ry - return not (sx==1 and rx==0 and ry==0 and sy==1 and tx==0 and ty==0), t.width -end - -local function mpconcat(px, py) -- no tx, ty here / we can move this one inline if needed - return (sy*px-ry*py)/divider,(sx*py-rx*px)/divider -end - -local function curved(ith,pth) - local d = pth.left_x - ith.right_x - if abs(ith.right_x - ith.x_coord - d) <= bend_tolerance and abs(pth.x_coord - pth.left_x - d) <= bend_tolerance then - d = pth.left_y - ith.right_y - if abs(ith.right_y - ith.y_coord - d) <= bend_tolerance and abs(pth.y_coord - pth.left_y - d) <= bend_tolerance then - return false - end - end - return true -end - -local function flushnormalpath(path, t, open) - local pth, ith, nt - if t then - nt = #t - else - t = { } - nt = 0 - end - for i=1,#path do - nt = nt + 1 - pth = path[i] - if not ith then - t[nt] = formatters["%f %f m"](pth.x_coord,pth.y_coord) - elseif curved(ith,pth) then - t[nt] = formatters["%f %f %f %f %f %f c"](ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord) - else - t[nt] = formatters["%f %f l"](pth.x_coord,pth.y_coord) - end - ith = pth - end - if not open then - nt = nt + 1 - local one = path[1] - if curved(pth,one) then - t[nt] = formatters["%f %f %f %f %f %f c"](pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord ) - else - t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord) - end - elseif #path == 1 then - -- special case .. draw point - local one = path[1] - nt = nt + 1 - t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord) - end - return t -end - -local function flushconcatpath(path, t, open) - local pth, ith, nt - if t then - nt = #t - else - t = { } - nt = 0 - end - nt = nt + 1 - t[nt] = formatters["%f %f %f %f %f %f cm"](sx,rx,ry,sy,tx,ty) - for i=1,#path do - nt = nt + 1 - pth = path[i] - if not ith then - t[nt] = formatters["%f %f m"](mpconcat(pth.x_coord,pth.y_coord)) - elseif curved(ith,pth) then - local a, b = mpconcat(ith.right_x,ith.right_y) - local c, d = mpconcat(pth.left_x,pth.left_y) - t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(pth.x_coord,pth.y_coord)) - else - t[nt] = formatters["%f %f l"](mpconcat(pth.x_coord, pth.y_coord)) - end - ith = pth - end - if not open then - nt = nt + 1 - local one = path[1] - if curved(pth,one) then - local a, b = mpconcat(pth.right_x,pth.right_y) - local c, d = mpconcat(one.left_x,one.left_y) - t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(one.x_coord, one.y_coord)) - else - t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord)) - end - elseif #path == 1 then - -- special case .. draw point - nt = nt + 1 - local one = path[1] - t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord)) - end - return t -end - -metapost.flushnormalpath = flushnormalpath - --- The flusher is pdf based, if another backend is used, we need to overload the --- flusher; this is beta code, the organization will change (already upgraded in --- sync with mplib) --- --- We can avoid the before table but I like symmetry. There is of course a small --- performance penalty, but so is passing extra arguments (result, flusher, after) --- and returning stuff. - -local function ignore() end - -function metapost.flush(result,flusher,askedfig) - if result then - local figures = result.fig - if figures then - flusher = flusher or pdfflusher - local resetplugins = metapost.resetplugins or ignore -- before figure - local processplugins = metapost.processplugins or ignore -- each object - local synchronizeplugins = metapost.synchronizeplugins or ignore - local pluginactions = metapost.pluginactions or ignore -- before / after - local startfigure = flusher.startfigure - local stopfigure = flusher.stopfigure - local flushfigure = flusher.flushfigure - local textfigure = flusher.textfigure - for f=1, #figures do - local figure = figures[f] - local objects = getobjects(result,figure,f) - local fignum = figure:charcode() or 0 - if askedfig == "direct" or askedfig == "all" or askedfig == fignum then - local t = { } - local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false - local bbox = figure:boundingbox() - local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4] - metapost.llx = llx - metapost.lly = lly - metapost.urx = urx - metapost.ury = ury - if urx < llx then - -- invalid - startfigure(fignum,0,0,0,0,"invalid",figure) - stopfigure() - else - startfigure(fignum,llx,lly,urx,ury,"begin",figure) - t[#t+1] = "q" - if objects then - resetplugins(t) -- we should move the colorinitializer here - for o=1,#objects do - local object = objects[o] - local objecttype = object.type - if objecttype == "start_bounds" or objecttype == "stop_bounds" or objecttype == "special" then - -- skip - elseif objecttype == "start_clip" then - t[#t+1] = "q" - flushnormalpath(object.path,t,false) - t[#t+1] = "W n" - elseif objecttype == "stop_clip" then - t[#t+1] = "Q" - miterlimit, linecap, linejoin, dashed = -1, -1, -1, false - elseif objecttype == "text" then - t[#t+1] = "q" - local ot = object.transform -- 3,4,5,6,1,2 - t[#t+1] = formatters["%f %f %f %f %f %f cm"](ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: formatters["%f %f m %f %f %f %f 0 0 cm"](unpack(ot)) - flushfigure(t) -- flush accumulated literals - t = { } - textfigure(object.font,object.dsize,object.text,object.width,object.height,object.depth) - t[#t+1] = "Q" - else - -- we use an indirect table as we want to overload - -- entries but this is not possible in userdata - -- - -- can be optimized if no path - -- - local original = object - local object = { } - setmetatable(object, { - __index = original - }) - -- first we analyze - local before, after = processplugins(object) - local objecttype = object.type -- can have changed - if before then - t = pluginactions(before,t,flushfigure) - end - local ml = object.miterlimit - if ml and ml ~= miterlimit then - miterlimit = ml - t[#t+1] = formatters["%f M"](ml) - end - local lj = object.linejoin - if lj and lj ~= linejoin then - linejoin = lj - t[#t+1] = formatters["%i j"](lj) - end - local lc = object.linecap - if lc and lc ~= linecap then - linecap = lc - t[#t+1] = formatters["%i J"](lc) - end - local dl = object.dash - if dl then - local d = formatters["[%s] %f d"](concat(dl.dashes or {}," "),dl.offset) - if d ~= dashed then - dashed = d - t[#t+1] = dashed - end - elseif dashed then - t[#t+1] = "[] 0 d" - dashed = false - end - local path = object.path -- newpath - local transformed, penwidth = false, 1 - local open = path and path[1].left_type and path[#path].right_type -- at this moment only "end_point" - local pen = object.pen - if pen then - if pen.type == 'elliptical' then - transformed, penwidth = pen_characteristics(original) -- boolean, value - t[#t+1] = formatters["%f w"](penwidth) -- todo: only if changed - if objecttype == 'fill' then - objecttype = 'both' - end - else -- calculated by mplib itself - objecttype = 'fill' - end - end - if transformed then - t[#t+1] = "q" - end - if path then - if transformed then - flushconcatpath(path,t,open) - else - flushnormalpath(path,t,open) - end - if objecttype == "fill" then - t[#t+1] = "h f" - elseif objecttype == "outline" then - t[#t+1] = (open and "S") or "h S" - elseif objecttype == "both" then - t[#t+1] = "h B" - end - end - if transformed then - t[#t+1] = "Q" - end - local path = object.htap - if path then - if transformed then - t[#t+1] = "q" - end - if transformed then - flushconcatpath(path,t,open) - else - flushnormalpath(path,t,open) - end - if objecttype == "fill" then - t[#t+1] = "h f" - elseif objecttype == "outline" then - t[#t+1] = (open and "S") or "h S" - elseif objecttype == "both" then - t[#t+1] = "h B" - end - if transformed then - t[#t+1] = "Q" - end - end - if after then - t = pluginactions(after,t,flushfigure) - end - if object.grouped then - -- can be qQ'd so changes can end up in groups - miterlimit, linecap, linejoin, dashed = -1, -1, -1, false - end - end - end - end - t[#t+1] = "Q" - flushfigure(t) - stopfigure("end") - end - if askedfig ~= "all" then - break - end - end - end - end - end -end - -function metapost.parse(result,askedfig) - if result then - local figures = result.fig - if figures then - local analyzeplugins = metapost.analyzeplugins -- each object - for f=1,#figures do - local figure = figures[f] - local fignum = figure:charcode() or 0 - if askedfig == "direct" or askedfig == "all" or askedfig == fignum then - local bbox = figure:boundingbox() - metapost.llx = bbox[1] - metapost.lly = bbox[2] - metapost.urx = bbox[3] - metapost.ury = bbox[4] - local objects = getobjects(result,figure,f) - if objects then - for o=1,#objects do - analyzeplugins(objects[o]) - end - end - if askedfig ~= "all" then - break - end - end - end - end - end -end - --- tracing: - -local t = { } - -local flusher = { - startfigure = function() - t = { } - context.startnointerference() - end, - flushfigure = function(literals) - local n = #t - for i=1, #literals do - n = n + 1 - t[n] = literals[i] - end - end, - stopfigure = function() - context.stopnointerference() - end -} - -function metapost.pdfliterals(result) - metapost.flush(result,flusher) - return t -end - --- so far - -function metapost.totable(result) - local figure = result and result.fig and result.fig[1] - if figure then - local t = { } - local objects = figure:objects() - for o=1,#objects do - local object = objects[o] - local tt = { } - local fields = mplib.fields(object) - for f=1,#fields do - local field = fields[f] - tt[field] = object[field] - end - t[o] = tt - end - local b = figure:boundingbox() - return { - boundingbox = { llx = b[1], lly = b[2], urx = b[3], ury = b[4] }, - objects = t - } - else - return nil - end -end +if not modules then modules = { } end modules ['mlib-pdf'] = { + version = 1.001, + comment = "companion to mlib-ctx.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +-- maybe %s is better than %f + +local format, concat, gsub = string.format, table.concat, string.gsub +local abs, sqrt, round = math.abs, math.sqrt, math.round +local setmetatable = setmetatable +local Cf, C, Cg, Ct, P, S, lpegmatch = lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.match +local formatters = string.formatters + +local report_metapost = logs.reporter("metapost") + +local mplib, context = mplib, context + +local allocate = utilities.storage.allocate + +local copy_node = node.copy +local write_node = node.write + +metapost = metapost or { } +local metapost = metapost + +metapost.flushers = metapost.flushers or { } +local pdfflusher = { } +metapost.flushers.pdf = pdfflusher + +metapost.multipass = false +metapost.n = 0 +metapost.optimize = true -- false + +local experiment = true -- uses context(node) that already does delayed nodes + +local savedliterals = nil -- needs checking +local mpsliteral = nodes.pool.register(node.new("whatsit",nodes.whatsitcodes.pdfliteral)) -- pdfliteral.mode = 1 + +local pdfliteral = function(s) + local literal = copy_node(mpsliteral) + literal.data = s + return literal +end + +-- Because in MKiV we always have two passes, we save the objects. When an extra +-- mp run is done (due to for instance texts identifier in the parse pass), we +-- get a new result table and the stored objects are forgotten. Otherwise they +-- are reused. + +local function getobjects(result,figure,f) + if metapost.optimize then + local objects = result.objects + if not objects then + result.objects = { } + end + objects = result.objects[f] + if not objects then + objects = figure:objects() + result.objects[f] = objects + end + return objects + else + return figure:objects() + end +end + +function metapost.convert(result, trialrun, flusher, multipass, askedfig) + if trialrun then + metapost.multipass = false + metapost.parse(result, askedfig) + if multipass and not metapost.multipass and metapost.optimize then + metapost.flush(result, flusher, askedfig) -- saves a run + else + return false + end + else + metapost.flush(result, flusher, askedfig) + end + return true -- done +end + +function metapost.flushliteral(d) + if savedliterals then + local literal = copy_node(mpsliteral) + literal.data = savedliterals[d] + write_node(literal) + else + report_metapost("problem flushing literal %a",d) + end +end + +function metapost.flushreset() -- will become obsolete and internal + savedliterals = nil +end + +function pdfflusher.comment(message) + if message then + message = formatters["%% mps graphic %s: %s"](metapost.n,message) + if experiment then + context(pdfliteral(message)) + else + if savedliterals then + local last = #savedliterals + 1 + savedliterals[last] = message + context.MPLIBtoPDF(last) + else + savedliterals = { message } + context.MPLIBtoPDF(1) + end + end + end +end + +function pdfflusher.startfigure(n,llx,lly,urx,ury,message) + savedliterals = nil + metapost.n = metapost.n + 1 + context.startMPLIBtoPDF(llx,lly,urx,ury) + if message then pdfflusher.comment(message) end +end + +function pdfflusher.stopfigure(message) + if message then pdfflusher.comment(message) end + context.stopMPLIBtoPDF() + context.MPLIBflushreset() -- maybe just at the beginning +end + +function pdfflusher.flushfigure(pdfliterals) -- table + if #pdfliterals > 0 then + pdfliterals = concat(pdfliterals,"\n") + if experiment then + context(pdfliteral(pdfliterals)) + else + if savedliterals then + local last = #savedliterals + 1 + savedliterals[last] = pdfliterals + context.MPLIBtoPDF(last) + else + savedliterals = { pdfliterals } + context.MPLIBtoPDF(1) + end + end + end +end + +function pdfflusher.textfigure(font,size,text,width,height,depth) -- we could save the factor + text = gsub(text,".","\\hbox{%1}") -- kerning happens in metapost (i have to check if this is true for mplib) + context.MPtextext(font,size,text,0,-number.dimenfactors.bp*depth) +end + +local bend_tolerance = 131/65536 + +local rx, sx, sy, ry, tx, ty, divider = 1, 0, 0, 1, 0, 0, 1 + +local pen_info = mplib.pen_info + +local function pen_characteristics(object) + local t = pen_info(object) + rx, ry, sx, sy, tx, ty = t.rx, t.ry, t.sx, t.sy, t.tx, t.ty + divider = sx*sy - rx*ry + return not (sx==1 and rx==0 and ry==0 and sy==1 and tx==0 and ty==0), t.width +end + +local function mpconcat(px, py) -- no tx, ty here / we can move this one inline if needed + return (sy*px-ry*py)/divider,(sx*py-rx*px)/divider +end + +local function curved(ith,pth) + local d = pth.left_x - ith.right_x + if abs(ith.right_x - ith.x_coord - d) <= bend_tolerance and abs(pth.x_coord - pth.left_x - d) <= bend_tolerance then + d = pth.left_y - ith.right_y + if abs(ith.right_y - ith.y_coord - d) <= bend_tolerance and abs(pth.y_coord - pth.left_y - d) <= bend_tolerance then + return false + end + end + return true +end + +local function flushnormalpath(path, t, open) + local pth, ith, nt + if t then + nt = #t + else + t = { } + nt = 0 + end + for i=1,#path do + nt = nt + 1 + pth = path[i] + if not ith then + t[nt] = formatters["%f %f m"](pth.x_coord,pth.y_coord) + elseif curved(ith,pth) then + t[nt] = formatters["%f %f %f %f %f %f c"](ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord) + else + t[nt] = formatters["%f %f l"](pth.x_coord,pth.y_coord) + end + ith = pth + end + if not open then + nt = nt + 1 + local one = path[1] + if curved(pth,one) then + t[nt] = formatters["%f %f %f %f %f %f c"](pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord ) + else + t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord) + end + elseif #path == 1 then + -- special case .. draw point + local one = path[1] + nt = nt + 1 + t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord) + end + return t +end + +local function flushconcatpath(path, t, open) + local pth, ith, nt + if t then + nt = #t + else + t = { } + nt = 0 + end + nt = nt + 1 + t[nt] = formatters["%f %f %f %f %f %f cm"](sx,rx,ry,sy,tx,ty) + for i=1,#path do + nt = nt + 1 + pth = path[i] + if not ith then + t[nt] = formatters["%f %f m"](mpconcat(pth.x_coord,pth.y_coord)) + elseif curved(ith,pth) then + local a, b = mpconcat(ith.right_x,ith.right_y) + local c, d = mpconcat(pth.left_x,pth.left_y) + t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(pth.x_coord,pth.y_coord)) + else + t[nt] = formatters["%f %f l"](mpconcat(pth.x_coord, pth.y_coord)) + end + ith = pth + end + if not open then + nt = nt + 1 + local one = path[1] + if curved(pth,one) then + local a, b = mpconcat(pth.right_x,pth.right_y) + local c, d = mpconcat(one.left_x,one.left_y) + t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(one.x_coord, one.y_coord)) + else + t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord)) + end + elseif #path == 1 then + -- special case .. draw point + nt = nt + 1 + local one = path[1] + t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord)) + end + return t +end + +metapost.flushnormalpath = flushnormalpath + +-- The flusher is pdf based, if another backend is used, we need to overload the +-- flusher; this is beta code, the organization will change (already upgraded in +-- sync with mplib) +-- +-- We can avoid the before table but I like symmetry. There is of course a small +-- performance penalty, but so is passing extra arguments (result, flusher, after) +-- and returning stuff. + +local function ignore() end + +function metapost.flush(result,flusher,askedfig) + if result then + local figures = result.fig + if figures then + flusher = flusher or pdfflusher + local resetplugins = metapost.resetplugins or ignore -- before figure + local processplugins = metapost.processplugins or ignore -- each object + local synchronizeplugins = metapost.synchronizeplugins or ignore + local pluginactions = metapost.pluginactions or ignore -- before / after + local startfigure = flusher.startfigure + local stopfigure = flusher.stopfigure + local flushfigure = flusher.flushfigure + local textfigure = flusher.textfigure + for f=1, #figures do + local figure = figures[f] + local objects = getobjects(result,figure,f) + local fignum = figure:charcode() or 0 + if askedfig == "direct" or askedfig == "all" or askedfig == fignum then + local t = { } + local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false + local bbox = figure:boundingbox() + local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4] + metapost.llx = llx + metapost.lly = lly + metapost.urx = urx + metapost.ury = ury + if urx < llx then + -- invalid + startfigure(fignum,0,0,0,0,"invalid",figure) + stopfigure() + else + startfigure(fignum,llx,lly,urx,ury,"begin",figure) + t[#t+1] = "q" + if objects then + resetplugins(t) -- we should move the colorinitializer here + for o=1,#objects do + local object = objects[o] + local objecttype = object.type + if objecttype == "start_bounds" or objecttype == "stop_bounds" or objecttype == "special" then + -- skip + elseif objecttype == "start_clip" then + t[#t+1] = "q" + flushnormalpath(object.path,t,false) + t[#t+1] = "W n" + elseif objecttype == "stop_clip" then + t[#t+1] = "Q" + miterlimit, linecap, linejoin, dashed = -1, -1, -1, false + elseif objecttype == "text" then + t[#t+1] = "q" + local ot = object.transform -- 3,4,5,6,1,2 + t[#t+1] = formatters["%f %f %f %f %f %f cm"](ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: formatters["%f %f m %f %f %f %f 0 0 cm"](unpack(ot)) + flushfigure(t) -- flush accumulated literals + t = { } + textfigure(object.font,object.dsize,object.text,object.width,object.height,object.depth) + t[#t+1] = "Q" + else + -- we use an indirect table as we want to overload + -- entries but this is not possible in userdata + -- + -- can be optimized if no path + -- + local original = object + local object = { } + setmetatable(object, { + __index = original + }) + -- first we analyze + local before, after = processplugins(object) + local objecttype = object.type -- can have changed + if before then + t = pluginactions(before,t,flushfigure) + end + local ml = object.miterlimit + if ml and ml ~= miterlimit then + miterlimit = ml + t[#t+1] = formatters["%f M"](ml) + end + local lj = object.linejoin + if lj and lj ~= linejoin then + linejoin = lj + t[#t+1] = formatters["%i j"](lj) + end + local lc = object.linecap + if lc and lc ~= linecap then + linecap = lc + t[#t+1] = formatters["%i J"](lc) + end + local dl = object.dash + if dl then + local d = formatters["[%s] %f d"](concat(dl.dashes or {}," "),dl.offset) + if d ~= dashed then + dashed = d + t[#t+1] = dashed + end + elseif dashed then + t[#t+1] = "[] 0 d" + dashed = false + end + local path = object.path -- newpath + local transformed, penwidth = false, 1 + local open = path and path[1].left_type and path[#path].right_type -- at this moment only "end_point" + local pen = object.pen + if pen then + if pen.type == 'elliptical' then + transformed, penwidth = pen_characteristics(original) -- boolean, value + t[#t+1] = formatters["%f w"](penwidth) -- todo: only if changed + if objecttype == 'fill' then + objecttype = 'both' + end + else -- calculated by mplib itself + objecttype = 'fill' + end + end + if transformed then + t[#t+1] = "q" + end + if path then + if transformed then + flushconcatpath(path,t,open) + else + flushnormalpath(path,t,open) + end + if objecttype == "fill" then + t[#t+1] = "h f" + elseif objecttype == "outline" then + t[#t+1] = (open and "S") or "h S" + elseif objecttype == "both" then + t[#t+1] = "h B" + end + end + if transformed then + t[#t+1] = "Q" + end + local path = object.htap + if path then + if transformed then + t[#t+1] = "q" + end + if transformed then + flushconcatpath(path,t,open) + else + flushnormalpath(path,t,open) + end + if objecttype == "fill" then + t[#t+1] = "h f" + elseif objecttype == "outline" then + t[#t+1] = (open and "S") or "h S" + elseif objecttype == "both" then + t[#t+1] = "h B" + end + if transformed then + t[#t+1] = "Q" + end + end + if after then + t = pluginactions(after,t,flushfigure) + end + if object.grouped then + -- can be qQ'd so changes can end up in groups + miterlimit, linecap, linejoin, dashed = -1, -1, -1, false + end + end + end + end + t[#t+1] = "Q" + flushfigure(t) + stopfigure("end") + end + if askedfig ~= "all" then + break + end + end + end + end + end +end + +function metapost.parse(result,askedfig) + if result then + local figures = result.fig + if figures then + local analyzeplugins = metapost.analyzeplugins -- each object + for f=1,#figures do + local figure = figures[f] + local fignum = figure:charcode() or 0 + if askedfig == "direct" or askedfig == "all" or askedfig == fignum then + local bbox = figure:boundingbox() + metapost.llx = bbox[1] + metapost.lly = bbox[2] + metapost.urx = bbox[3] + metapost.ury = bbox[4] + local objects = getobjects(result,figure,f) + if objects then + for o=1,#objects do + analyzeplugins(objects[o]) + end + end + if askedfig ~= "all" then + break + end + end + end + end + end +end + +-- tracing: + +local t = { } + +local flusher = { + startfigure = function() + t = { } + context.startnointerference() + end, + flushfigure = function(literals) + local n = #t + for i=1, #literals do + n = n + 1 + t[n] = literals[i] + end + end, + stopfigure = function() + context.stopnointerference() + end +} + +function metapost.pdfliterals(result) + metapost.flush(result,flusher) + return t +end + +-- so far + +function metapost.totable(result) + local figure = result and result.fig and result.fig[1] + if figure then + local t = { } + local objects = figure:objects() + for o=1,#objects do + local object = objects[o] + local tt = { } + local fields = mplib.fields(object) + for f=1,#fields do + local field = fields[f] + tt[field] = object[field] + end + t[o] = tt + end + local b = figure:boundingbox() + return { + boundingbox = { llx = b[1], lly = b[2], urx = b[3], ury = b[4] }, + objects = t + } + else + return nil + end +end diff --git a/tex/context/base/mlib-pps.lua b/tex/context/base/mlib-pps.lua index 93bddc2dd..217625bcb 100644 --- a/tex/context/base/mlib-pps.lua +++ b/tex/context/base/mlib-pps.lua @@ -1,1216 +1,1216 @@ -if not modules then modules = { } end modules ['mlib-pps'] = { - version = 1.001, - comment = "companion to mlib-ctx.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - --- todo: make a hashed textext variant where we only process the text once (normally --- we cannot assume that no macros are involved which influence a next textext - -local format, gmatch, match, split = string.format, string.gmatch, string.match, string.split -local tonumber, type = tonumber, type -local round = math.round -local insert, concat = table.insert, table.concat -local Cs, Cf, C, Cg, Ct, P, S, V, Carg = lpeg.Cs, lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.V, lpeg.Carg -local lpegmatch = lpeg.match -local formatters = string.formatters - -local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context - -local texbox = tex.box -local copy_list = node.copy_list -local free_list = node.flush_list -local setmetatableindex = table.setmetatableindex -local sortedhash = table.sortedhash - -local starttiming = statistics.starttiming -local stoptiming = statistics.stoptiming - -local trace_runs = false trackers.register("metapost.runs", function(v) trace_runs = v end) -local trace_textexts = false trackers.register("metapost.textexts", function(v) trace_textexts = v end) -local trace_scripts = false trackers.register("metapost.scripts", function(v) trace_scripts = v end) - -local report_metapost = logs.reporter("metapost") -local report_textexts = logs.reporter("metapost","textexts") -local report_scripts = logs.reporter("metapost","scripts") - -local colors = attributes.colors - -local rgbtocmyk = colors.rgbtocmyk or function() return 0,0,0,1 end -local cmyktorgb = colors.cmyktorgb or function() return 0,0,0 end -local rgbtogray = colors.rgbtogray or function() return 0 end -local cmyktogray = colors.cmyktogray or function() return 0 end - -metapost.makempy = metapost.makempy or { nofconverted = 0 } -local makempy = metapost.makempy - -local nooutercolor = "0 g 0 G" -local nooutertransparency = "/Tr0 gs" -- only when set -local outercolormode = 0 -local outercolor = nooutercolor -local outertransparency = nooutertransparency -local innercolor = nooutercolor -local innertransparency = nooutertransparency - -local pdfcolor = lpdf.color -local pdftransparency = lpdf.transparency -local registercolor = colors.register -local registerspotcolor = colors.registerspotcolor - -local transparencies = attributes.transparencies -local registertransparency = transparencies.register - -function metapost.setoutercolor(mode,colormodel,colorattribute,transparencyattribute) - -- has always to be called before conversion - -- todo: transparency (not in the mood now) - outercolormode = mode - if mode == 1 or mode == 3 then - -- inherit from outer (registered color) - outercolor = pdfcolor(colormodel,colorattribute) or nooutercolor - outertransparency = pdftransparency(transparencyattribute) or nooutertransparency - elseif mode == 2 then - -- stand alone (see m-punk.tex) - outercolor = "" - outertransparency = "" - else -- 0 - outercolor = nooutercolor - outertransparency = nooutertransparency - end - innercolor = outercolor - innertransparency = outertransparency -- not yet used -end - -local f_gray = formatters["%.3f g %.3f G"] -local f_rgb = formatters["%.3f %.3f %.3f rg %.3f %.3f %.3f RG"] -local f_cmyk = formatters["%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K"] -local f_cm = formatters["q %f %f %f %f %f %f cm"] -local f_shade = formatters["MpSh%s"] - -local function checked_color_pair(color,...) - if not color then - return innercolor, outercolor - end - if outercolormode == 3 then - innercolor = color(...) - return innercolor, innercolor - else - return color(...), outercolor - end -end - -function metapost.colorinitializer() - innercolor = outercolor - innertransparency = outertransparency - return outercolor, outertransparency -end - ---~ - -local specificationsplitter = lpeg.tsplitat(" ") -local colorsplitter = lpeg.tsplitter(":",tonumber) -- no need for : -local domainsplitter = lpeg.tsplitter(" ",tonumber) -local centersplitter = domainsplitter -local coordinatesplitter = domainsplitter - --- thanks to taco's reading of the postscript manual: --- --- x' = sx * x + ry * y + tx --- y' = rx * x + sy * y + ty - -local nofshades = 0 -- todo: hash resources, start at 1000 in order not to clash with older - -local function normalize(ca,cb) - if #cb == 1 then - if #ca == 4 then - cb[1], cb[2], cb[3], cb[4] = 0, 0, 0, 1-cb[1] - else - cb[1], cb[2], cb[3] = cb[1], cb[1], cb[1] - end - elseif #cb == 3 then - if #ca == 4 then - cb[1], cb[2], cb[3], cb[4] = rgbtocmyk(cb[1],cb[2],cb[3]) - else - cb[1], cb[2], cb[3] = cmyktorgb(cb[1],cb[2],cb[3],cb[4]) - end - end -end - --- todo: check for the same colorspace (actually a backend issue), now we can --- have several similar resources --- --- normalize(ca,cb) fails for spotcolors - -local function spotcolorconverter(parent, n, d, p) - registerspotcolor(parent) - return pdfcolor(colors.model,registercolor(nil,'spot',parent,n,d,p)), outercolor -end - -local commasplitter = lpeg.tsplitat(",") - -local function checkandconvertspot(n_a,f_a,c_a,v_a,n_b,f_b,c_b,v_b) - -- must be the same but we don't check - local name = f_shade(nofshades) - local ca = lpegmatch(commasplitter,v_a) - local cb = lpegmatch(commasplitter,v_b) - if #ca == 0 or #cb == 0 then - return { 0 }, { 1 }, "DeviceGray", name - else - for i=1,#ca do ca[i] = tonumber(ca[i]) or 0 end - for i=1,#cb do cb[i] = tonumber(cb[i]) or 1 end - --~ spotcolorconverter(n_a,f_a,c_a,v_a) -- not really needed - return ca, cb, n_a or n_b, name - end -end - -local function checkandconvert(ca,cb) - local name = f_shade(nofshades) - if not ca or not cb or type(ca) == "string" then - return { 0 }, { 1 }, "DeviceGray", name - else - if #ca > #cb then - normalize(ca,cb) - elseif #ca < #cb then - normalize(cb,ca) - end - local model = colors.model - if model == "all" then - model= (#ca == 4 and "cmyk") or (#ca == 3 and "rgb") or "gray" - end - if model == "rgb" then - if #ca == 4 then - ca = { cmyktorgb(ca[1],ca[2],ca[3],ca[4]) } - cb = { cmyktorgb(cb[1],cb[2],cb[3],cb[4]) } - elseif #ca == 1 then - local a, b = 1-ca[1], 1-cb[1] - ca = { a, a, a } - cb = { b, b, b } - end - return ca, cb, "DeviceRGB", name - elseif model == "cmyk" then - if #ca == 3 then - ca = { rgbtocmyk(ca[1],ca[2],ca[3]) } - cb = { rgbtocmyk(cb[1],cb[2],cb[3]) } - elseif #ca == 1 then - ca = { 0, 0, 0, ca[1] } - cb = { 0, 0, 0, ca[1] } - end - return ca, cb, "DeviceCMYK", name - else - if #ca == 4 then - ca = { cmyktogray(ca[1],ca[2],ca[3],ca[4]) } - cb = { cmyktogray(cb[1],cb[2],cb[3],cb[4]) } - elseif #ca == 3 then - ca = { rgbtogray(ca[1],ca[2],ca[3]) } - cb = { rgbtogray(cb[1],cb[2],cb[3]) } - end - -- backend specific (will be renamed) - return ca, cb, "DeviceGray", name - end - end -end - -local current_format, current_graphic, current_initializations - -metapost.multipass = false - -local textexts = { } -- all boxes, optionally with a different color -local texslots = { } -- references to textexts in order or usage -local texorder = { } -- references to textexts by mp index -local textrial = 0 -local texfinal = 0 -local scratchbox = 0 - -local function freeboxes() - for n, box in next, textexts do - local tn = textexts[n] - if tn then - free_list(tn) - -- texbox[scratchbox] = tn - -- texbox[scratchbox] = nil -- this frees too - if trace_textexts then - report_textexts("freeing box %s",n) - end - end - end - textexts = { } - texslots = { } - texorder = { } - textrial = 0 - texfinal = 0 -end - -metapost.resettextexts = freeboxes - -function metapost.settext(box,slot) - textexts[slot] = copy_list(texbox[box]) - texbox[box] = nil - -- this will become - -- textexts[slot] = texbox[box] - -- unsetbox(box) -end - -function metapost.gettext(box,slot) - texbox[box] = copy_list(textexts[slot]) - if trace_textexts then - report_textexts("putting text %s in box %s",slot,box) - end - -- textexts[slot] = nil -- no, pictures can be placed several times -end - --- rather generic pdf, so use this elsewhere too it no longer pays --- off to distinguish between outline and fill (we now have both --- too, e.g. in arrows) - -metapost.reducetogray = true - -local models = { } - -function models.all(cr) - local n = #cr - if n == 0 then - return checked_color_pair() - elseif metapost.reducetogray then - if n == 1 then - local s = cr[1] - return checked_color_pair(f_gray,s,s) - elseif n == 3 then - local r, g, b = cr[1], cr[2], cr[3] - if r == g and g == b then - return checked_color_pair(f_gray,r,r) - else - return checked_color_pair(f_rgb,r,g,b,r,g,b) - end - else - local c, m, y, k = cr[1], cr[2], cr[3], cr[4] - if c == m and m == y and y == 0 then - k = 1 - k - return checked_color_pair(f_gray,k,k) - else - return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k) - end - end - elseif n == 1 then - local s = cr[1] - return checked_color_pair(f_gray,s,s) - elseif n == 3 then - local r, g, b = cr[1], cr[2], cr[3] - return checked_color_pair(f_rgb,r,g,b,r,g,b) - else - local c, m, y, k = cr[1], cr[2], cr[3], cr[4] - return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k) - end -end - -function models.rgb(cr) - local n = #cr - if n == 0 then - return checked_color_pair() - elseif metapost.reducetogray then - if n == 1 then - local s = cr[1] - checked_color_pair(f_gray,s,s) - elseif n == 3 then - local r, g, b = cr[1], cr[2], cr[3] - if r == g and g == b then - return checked_color_pair(f_gray,r,r) - else - return checked_color_pair(f_rgb,r,g,b,r,g,b) - end - else - local c, m, y, k = cr[1], cr[2], cr[3], cr[4] - if c == m and m == y and y == 0 then - k = 1 - k - return checked_color_pair(f_gray,k,k) - else - local r, g, b = cmyktorgb(c,m,y,k) - return checked_color_pair(f_rgb,r,g,b,r,g,b) - end - end - elseif n == 1 then - local s = cr[1] - return checked_color_pair(f_gray,s,s) - else - local r, g, b - if n == 3 then - r, g, b = cmyktorgb(cr[1],cr[2],cr[3],cr[4]) - else - r, g, b = cr[1], cr[2], cr[3] - end - return checked_color_pair(f_rgb,r,g,b,r,g,b) - end -end - -function models.cmyk(cr) - local n = #cr - if n == 0 then - return checked_color_pair() - elseif metapost.reducetogray then - if n == 1 then - local s = cr[1] - return checked_color_pair(f_gray,s,s) - elseif n == 3 then - local r, g, b = cr[1], cr[2], cr[3] - if r == g and g == b then - return checked_color_pair(f_gray,r,r) - else - local c, m, y, k = rgbtocmyk(r,g,b) - return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k) - end - else - local c, m, y, k = cr[1], cr[2], cr[3], cr[4] - if c == m and m == y and y == 0 then - k = k - 1 - return checked_color_pair(f_gray,k,k) - else - return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k) - end - end - elseif n == 1 then - local s = cr[1] - return checked_color_pair(f_gray,s,s) - else - local c, m, y, k - if n == 3 then - c, m, y, k = rgbtocmyk(cr[1],cr[2],cr[3]) - else - c, m, y, k = cr[1], cr[2], cr[3], cr[4] - end - return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k) - end -end - -function models.gray(cr) - local n, s = #cr, 0 - if n == 0 then - return checked_color_pair() - elseif n == 4 then - s = cmyktogray(cr[1],cr[2],cr[3],cr[4]) - elseif n == 3 then - s = rgbtogray(cr[1],cr[2],cr[3]) - else - s = cr[1] - end - return checked_color_pair(f_gray,s,s) -end - -setmetatableindex(models, function(t,k) - local v = models.gray - t[k] = v - return v -end) - -local function colorconverter(cs) - return models[colors.model](cs) -end - -local btex = P("btex") -local etex = P(" etex") -local vtex = P("verbatimtex") -local ttex = P("textext") -local gtex = P("graphictext") -local multipass = P("forcemultipass") -local spacing = S(" \n\r\t\v")^0 -local dquote = P('"') - -local found, forced = false, false - -local function convert(str) - found = true - return "rawtextext(\"" .. str .. "\")" -- centered -end -local function ditto(str) - return "\" & ditto & \"" -end -local function register() - found = true -end -local function force() - forced = true -end - -local texmess = (dquote/ditto + (1 - etex))^0 - -local function ignore(s) - report_metapost("ignoring verbatim tex: %s",s) - return "" -end - --- local parser = P { --- [1] = Cs((V(2)/register + V(4)/ignore + V(3)/convert + V(5)/force + 1)^0), --- [2] = ttex + gtex, --- [3] = btex * spacing * Cs(texmess) * etex, --- [4] = vtex * spacing * Cs(texmess) * etex, --- [5] = multipass, -- experimental, only for testing --- } - --- currently a a one-liner produces less code - --- textext.*(".*") can have "'s but tricky parsing as we can have concatenated strings --- so this is something for a boring plain or train trip and we might assume proper mp --- input anyway - -local parser = Cs(( - (ttex + gtex)/register - + (btex * spacing * Cs(texmess) * etex)/convert - + (vtex * spacing * Cs(texmess) * etex)/ignore - + 1 -)^0) - -local function checktexts(str) - found, forced = false, false - return lpegmatch(parser,str), found, forced -end - -metapost.checktexts = checktexts - -local factor = 65536*(7227/7200) - -function metapost.edefsxsy(wd,ht,dp) -- helper for figure - local hd = ht + dp - context.setvalue("sx",wd ~= 0 and factor/wd or 0) - context.setvalue("sy",hd ~= 0 and factor/hd or 0) -end - -local function sxsy(wd,ht,dp) -- helper for text - local hd = ht + dp - return (wd ~= 0 and factor/wd) or 0, (hd ~= 0 and factor/hd) or 0 -end - -local no_first_run = "mfun_first_run := false ;" -local do_first_run = "mfun_first_run := true ;" -local no_trial_run = "mfun_trial_run := false ;" -local do_trial_run = "mfun_trial_run := true ;" -local do_begin_fig = "; beginfig(1) ; " -local do_end_fig = "; endfig ;" -local do_safeguard = ";" - -local f_text_data = formatters["mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfun_tt_d[%i] := %f ;"] - -function metapost.textextsdata() - local t, nt, n = { }, 0, 0 - for n=1,#texorder do - local box = textexts[texorder[n]] - if box then - local wd, ht, dp = box.width/factor, box.height/factor, box.depth/factor - if trace_textexts then - report_textexts("passed data item %s: (%p,%p,%p)",n,wd,ht,dp) - end - nt = nt + 1 - t[nt] = f_text_data(n,wd,n,ht,n,dp) - else - break - end - end --- inspect(t) - return t -end - -metapost.intermediate = metapost.intermediate or {} -metapost.intermediate.actions = metapost.intermediate.actions or {} -metapost.intermediate.needed = false - -metapost.method = 1 -- 1:dumb 2:clever - --- maybe we can latelua the texts some day - -local nofruns = 0 -- askedfig: "all", "first", number - -local function checkaskedfig(askedfig) -- return askedfig, wrappit - if not askedfig then - return "direct", true - elseif askedfig == "all" then - return "all", false - elseif askedfig == "direct" then - return "all", true - else - askedfig = tonumber(askedfig) - if askedfig then - return askedfig, false - else - return "direct", true - end - end -end - -function metapost.graphic_base_pass(specification) - local mpx = specification.mpx -- mandate - local data = specification.data or "" - local definitions = specification.definitions or "" --- local extensions = metapost.getextensions(specification.instance,specification.useextensions) - local extensions = specification.extensions or "" - local inclusions = specification.inclusions or "" - local initializations = specification.initializations or "" - local askedfig = specification.figure -- no default else no wrapper - -- - nofruns = nofruns + 1 - local askedfig, wrappit = checkaskedfig(askedfig) - local done_1, done_2, done_3, forced_1, forced_2, forced_3 - data, done_1, forced_1 = checktexts(data) - -- we had preamble = extensions + inclusions - if extensions == "" then - extensions, done_2, forced_2 = "", false, false - else - extensions, done_2, forced_2 = checktexts(extensions) - end - if inclusions == "" then - inclusions, done_3, forced_3 = "", false, false - else - inclusions, done_3, forced_3 = checktexts(inclusions) - end - metapost.intermediate.needed = false - metapost.multipass = false -- no needed here - current_format = mpx - current_graphic = data - current_initializations = initializations - local method = metapost.method - if trace_runs then - if method == 1 then - report_metapost("forcing two runs due to library configuration") - elseif method ~= 2 then - report_metapost("ignoring run due to library configuration") - elseif not (done_1 or done_2 or done_3) then - report_metapost("forcing one run only due to analysis") - elseif done_1 then - report_metapost("forcing at max two runs due to main code") - elseif done_2 then - report_metapost("forcing at max two runs due to extensions") - else - report_metapost("forcing at max two runs due to inclusions") - end - end - if method == 1 or (method == 2 and (done_1 or done_2 or done_3)) then - if trace_runs then - report_metapost("first run of job %s, asked figure %a",nofruns,askedfig) - end - -- first true means: trialrun, second true means: avoid extra run if no multipass - local flushed = metapost.process(mpx, { - definitions, - extensions, - inclusions, - wrappit and do_begin_fig or "", - do_first_run, - do_trial_run, - current_initializations, - do_safeguard, - current_graphic, - wrappit and do_end_fig or "", - }, true, nil, not (forced_1 or forced_2 or forced_3), false, askedfig) - if metapost.intermediate.needed then - for _, action in next, metapost.intermediate.actions do - action() - end - end - if not flushed or not metapost.optimize then - -- tricky, we can only ask once for objects and therefore - -- we really need a second run when not optimized - context.MPLIBextrapass(askedfig) - end - else - if trace_runs then - report_metapost("running job %s, asked figure %a",nofruns,askedfig) - end - metapost.process(mpx, { - preamble, - wrappit and do_begin_fig or "", - do_first_run, - no_trial_run, - current_initializations, - do_safeguard, - current_graphic, - wrappit and do_end_fig or "", - }, false, nil, false, false, askedfig) - end -end - -function metapost.graphic_extra_pass(askedfig) - if trace_runs then - report_metapost("second run of job %s, asked figure %a",nofruns,askedfig) - end - local askedfig, wrappit = checkaskedfig(askedfig) - metapost.process(current_format, { - wrappit and do_begin_fig or "", - no_trial_run, - concat(metapost.textextsdata()," ;\n"), - current_initializations, - do_safeguard, - current_graphic, - wrappit and do_end_fig or "", - }, false, nil, false, true, askedfig) - context.MPLIBresettexts() -- must happen afterwards -end - -local start = [[\starttext]] -local preamble = [[\long\def\MPLIBgraphictext#1{\startTEXpage[scale=10000]#1\stopTEXpage}]] -local stop = [[\stoptext]] - -function makempy.processgraphics(graphics) - if #graphics > 0 then - makempy.nofconverted = makempy.nofconverted + 1 - starttiming(makempy) - local mpofile = tex.jobname .. "-mpgraph" - local mpyfile = file.replacesuffix(mpofile,"mpy") - local pdffile = file.replacesuffix(mpofile,"pdf") - local texfile = file.replacesuffix(mpofile,"tex") - io.savedata(texfile, { start, preamble, metapost.tex.get(), concat(graphics,"\n"), stop }, "\n") - local command = format("context --once %s %s", (tex.interactionmode == 0 and "--batchmode") or "", texfile) - os.execute(command) - if io.exists(pdffile) then - command = format("pstoedit -ssp -dt -f mpost %s %s", pdffile, mpyfile) - os.execute(command) - local result, r = { }, 0 - if io.exists(mpyfile) then - local data = io.loaddata(mpyfile) - for figure in gmatch(data,"beginfig(.-)endfig") do - r = r + 1 - result[r] = formatters["begingraphictextfig%sendgraphictextfig ;\n"](figure) - end - io.savedata(mpyfile,concat(result,"")) - end - end - stoptiming(makempy) - end -end - --- -- the new plugin handler -- -- - -local sequencers = utilities.sequencers -local appendgroup = sequencers.appendgroup -local appendaction = sequencers.appendaction - -local resetter = nil -local analyzer = nil -local processor = nil - -local resetteractions = sequencers.new { arguments = "t" } -local analyzeractions = sequencers.new { arguments = "object,prescript" } -local processoractions = sequencers.new { arguments = "object,prescript,before,after" } - -appendgroup(resetteractions, "system") -appendgroup(analyzeractions, "system") -appendgroup(processoractions, "system") - --- later entries come first - ---~ local scriptsplitter = Cf(Ct("") * ( ---~ Cg(C((1-S("= "))^1) * S("= ")^1 * C((1-S("\n\r"))^0) * S("\n\r")^0) ---~ )^0, rawset) - -local scriptsplitter = Ct ( Ct ( - C((1-S("= "))^1) * S("= ")^1 * C((1-S("\n\r"))^0) * S("\n\r")^0 -)^0 ) - -local function splitprescript(script) - local hash = lpegmatch(scriptsplitter,script) - for i=#hash,1,-1 do - local h = hash[i] - hash[h[1]] = h[2] - end - if trace_scripts then - report_scripts(table.serialize(hash,"prescript")) - end - return hash -end - --- -- not used: --- --- local function splitpostscript(script) --- local hash = lpegmatch(scriptsplitter,script) --- for i=1,#hash do --- local h = hash[i] --- hash[h[1]] = h[2] --- end --- if trace_scripts then --- report_scripts(table.serialize(hash,"postscript")) --- end --- return hash --- end - -function metapost.pluginactions(what,t,flushfigure) -- before/after object, depending on what - for i=1,#what do - local wi = what[i] - if type(wi) == "function" then - -- assume injection - flushfigure(t) -- to be checked: too many 0 g 0 G - t = { } - wi() - else - t[#t+1] = wi - end - end - return t -end - -function metapost.resetplugins(t) -- intialize plugins, before figure - -- plugins can have been added - resetter = resetteractions .runner - analyzer = analyzeractions .runner - processor = processoractions .runner - -- let's apply one runner - resetter(t) -end - -function metapost.analyzeplugins(object) -- each object (first pass) - local prescript = object.prescript -- specifications - if prescript and #prescript > 0 then - return analyzer(object,splitprescript(prescript)) - end -end - -function metapost.processplugins(object) -- each object (second pass) - local prescript = object.prescript -- specifications - if prescript and #prescript > 0 then - local before = { } - local after = { } - processor(object,splitprescript(prescript),before,after) - return #before > 0 and before, #after > 0 and after - else - local c = object.color - if c and #c > 0 then - local b, a = colorconverter(c) - return { b }, { a } - end - end -end - --- helpers - -local basepoints = number.dimenfactors["bp"] - -local function cm(object) - local op = object.path - if op then - local first, second, fourth = op[1], op[2], op[4] - local tx, ty = first.x_coord , first.y_coord - local sx, sy = second.x_coord - tx, fourth.y_coord - ty - local rx, ry = second.y_coord - ty, fourth.x_coord - tx - if sx == 0 then sx = 0.00001 end - if sy == 0 then sy = 0.00001 end - return sx, rx, ry, sy, tx, ty - else - return 1, 0, 0, 1, 0, 0 -- weird case - end -end - --- color - -local function cl_reset(t) - t[#t+1] = metapost.colorinitializer() -- only color -end - -local tx_hash = { } -local tx_last = 0 - -local function tx_reset() - tx_hash = { } - tx_last = 0 -end - -local fmt = formatters["%s %s %s % t"] - -local function tx_analyze(object,prescript) -- todo: hash content and reuse them - local tx_stage = prescript.tx_stage - if tx_stage == "trial" then - textrial = textrial + 1 - local tx_number = tonumber(prescript.tx_number) - local s = object.postscript or "" - local c = object.color -- only simple ones, no transparency - local a = prescript.tr_alternative - local t = prescript.tr_transparency - local h = fmt(tx_number,a or "?",t or "?",c) - local n = tx_hash[h] -- todo: hashed variant with s (nicer for similar labels) - if not n then - tx_last = tx_last + 1 - if not c then - -- no color - elseif #c == 1 then - if a and t then - s = formatters["\\directcolored[s=%f,a=%f,t=%f]%s"](c[1],a,t,s) - else - s = formatters["\\directcolored[s=%f]%s"](c[1],s) - end - elseif #c == 3 then - if a and t then - s = formatters["\\directcolored[r=%f,g=%f,b=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],a,t,s) - else - s = formatters["\\directcolored[r=%f,g=%f,b=%f]%s"](c[1],c[2],c[3],s) - end - elseif #c == 4 then - if a and t then - s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],c[4],a,t,s) - else - s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f]%s"](c[1],c[2],c[3],c[4],s) - end - end - context.MPLIBsettext(tx_last,s) - metapost.multipass = true - tx_hash[h] = tx_last - texslots[textrial] = tx_last - texorder[tx_number] = tx_last - if trace_textexts then - report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,tx_last,h) - end - else - texslots[textrial] = n - if trace_textexts then - report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,n,h) - end - end - elseif tx_stage == "extra" then - textrial = textrial + 1 - local tx_number = tonumber(prescript.tx_number) - if not texorder[tx_number] then - local s = object.postscript or "" - tx_last = tx_last + 1 - context.MPLIBsettext(tx_last,s) - metapost.multipass = true - texslots[textrial] = tx_last - texorder[tx_number] = tx_last - if trace_textexts then - report_textexts("stage %a, usage %a, number %a, extra %a",tx_stage,textrial,tx_number,tx_last) - end - end - end -end - -local function tx_process(object,prescript,before,after) - local tx_number = prescript.tx_number - if tx_number then - tx_number = tonumber(tx_number) - local tx_stage = prescript.tx_stage - if tx_stage == "final" then - texfinal = texfinal + 1 - local n = texslots[texfinal] - if trace_textexts then - report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,texfinal,tx_number,n) - end - local sx, rx, ry, sy, tx, ty = cm(object) -- needs to be frozen outside the function - local box = textexts[n] - if box then - before[#before+1] = function() - -- flush always happens, we can have a special flush function injected before - context.MPLIBgettextscaledcm(n, - format("%f",sx), -- bah ... %s no longer checks - format("%f",rx), -- bah ... %s no longer checks - format("%f",ry), -- bah ... %s no longer checks - format("%f",sy), -- bah ... %s no longer checks - format("%f",tx), -- bah ... %s no longer checks - format("%f",ty), -- bah ... %s no longer checks - sxsy(box.width,box.height,box.depth)) - end - else - before[#before+1] = function() - report_textexts("unknown %s",tx_number) - end - end - if not trace_textexts then - object.path = false -- else: keep it - end - object.color = false - object.grouped = true - end - end -end - --- graphics - -local graphics = { } - -function metapost.intermediate.actions.makempy() - if #graphics > 0 then - makempy.processgraphics(graphics) - graphics = { } -- ? - end -end - -local function gt_analyze(object,prescript) - local gt_stage = prescript.gt_stage - if gt_stage == "trial" then - graphics[#graphics+1] = formatters["\\MPLIBgraphictext{%s}"](object.postscript or "") - metapost.intermediate.needed = true - metapost.multipass = true - end -end - --- local function gt_process(object,prescript,before,after) --- local gt_stage = prescript.gt_stage --- if gt_stage == "final" then --- end --- end - --- shades - -local function sh_process(object,prescript,before,after) - local sh_type = prescript.sh_type - if sh_type then - nofshades = nofshades + 1 - local domain = lpegmatch(domainsplitter,prescript.sh_domain) - local centera = lpegmatch(centersplitter,prescript.sh_center_a) - local centerb = lpegmatch(centersplitter,prescript.sh_center_b) - -- - local sh_color_a = prescript.sh_color_a or "1" - local sh_color_b = prescript.sh_color_b or "1" - local ca, cb, colorspace, name, separation - if prescript.sh_color == "into" and prescript.sp_name then - -- some spotcolor - local value_a, components_a, fractions_a, name_a - local value_b, components_b, fractions_b, name_b - for i=1,#prescript do - -- { "sh_color_a", "1" }, - -- { "sh_color", "into" }, - -- { "sh_radius_b", "0" }, - -- { "sh_radius_a", "141.73225" }, - -- { "sh_center_b", "425.19676 141.73225" }, - -- { "sh_center_a", "425.19676 0" }, - -- { "sh_factor", "1" }, - local tag = prescript[i][1] - if not name_a and tag == "sh_color_a" then - value_a = prescript[i-5][2] - components_a = prescript[i-4][2] - fractions_a = prescript[i-3][2] - name_a = prescript[i-2][2] - elseif not name_b and tag == "sh_color_b" then - value_b = prescript[i-5][2] - components_b = prescript[i-4][2] - fractions_b = prescript[i-3][2] - name_b = prescript[i-2][2] - end - if name_a and name_b then - break - end - end - ca, cb, separation, name = checkandconvertspot( - name_a,fractions_a,components_a,value_a, - name_b,fractions_b,components_b,value_b - ) - else - local colora = lpegmatch(colorsplitter,sh_color_a) - local colorb = lpegmatch(colorsplitter,sh_color_b) - ca, cb, colorspace, name = checkandconvert(colora,colorb) - end - if not ca or not cb then - ca, cb, colorspace, name = checkandconvert() - end - if sh_type == "linear" then - local coordinates = { centera[1], centera[2], centerb[1], centerb[2] } - lpdf.linearshade(name,domain,ca,cb,1,colorspace,coordinates,separation) -- backend specific (will be renamed) - elseif sh_type == "circular" then - local radiusa = tonumber(prescript.sh_radius_a) - local radiusb = tonumber(prescript.sh_radius_b) - local coordinates = { centera[1], centera[2], radiusa, centerb[1], centerb[2], radiusb } - lpdf.circularshade(name,domain,ca,cb,1,colorspace,coordinates,separation) -- backend specific (will be renamed) - else - -- fatal error - end - before[#before+1], after[#after+1] = "q /Pattern cs", formatters["W n /%s sh Q"](name) - -- false, not nil, else mt triggered - object.colored = false -- hm, not object.color ? - object.type = false - object.grouped = true - end -end - --- bitmaps - -local function bm_process(object,prescript,before,after) - local bm_xresolution = prescript.bm_xresolution - if bm_xresolution then - before[#before+1] = f_cm(cm(object)) - before[#before+1] = function() - figures.bitmapimage { - xresolution = tonumber(bm_xresolution), - yresolution = tonumber(prescript.bm_yresolution), - width = 1/basepoints, - height = 1/basepoints, - data = object.postscript - } - end - before[#before+1] = "Q" - object.path = false - object.color = false - object.grouped = true - end -end - --- positions - -local function ps_process(object,prescript,before,after) - local ps_label = prescript.ps_label - if ps_label then - local op = object.path - local first, third = op[1], op[3] - local x, y = first.x_coord, first.y_coord - local w, h = third.x_coord - x, third.y_coord - y - x = x - metapost.llx - y = metapost.ury - y - before[#before+1] = function() - context.MPLIBpositionwhd(ps_label,x,y,w,h) - end - object.path = false - end -end - --- figures - -local function fg_process(object,prescript,before,after) - local fg_name = prescript.fg_name - if fg_name then - before[#before+1] = f_cm(cm(object)) -- beware: does not use the cm stack - before[#before+1] = function() - context.MPLIBfigure(fg_name,prescript.fg_mask or "") - end - before[#before+1] = "Q" - object.path = false - object.grouped = true - end -end - --- color and transparency - -local value = Cs ( ( - (Carg(1) * C((1-P(","))^1)) / function(a,b) return format("%0.3f",a * tonumber(b)) end - + P(","))^1 -) - --- should be codeinjections - -local t_list = attributes.list[attributes.private('transparency')] -local c_list = attributes.list[attributes.private('color')] - -local function tr_process(object,prescript,before,after) - -- before can be shortcut to t - local tr_alternative = prescript.tr_alternative - if tr_alternative then - tr_alternative = tonumber(tr_alternative) - local tr_transparency = tonumber(prescript.tr_transparency) - before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,tr_alternative,tr_transparency,true)) - after[#after+1] = "/Tr0 gs" -- outertransparency - end - local cs = object.color - if cs and #cs > 0 then - local c_b, c_a - local sp_type = prescript.sp_type - if not sp_type then - c_b, c_a = colorconverter(cs) - elseif sp_type == "spot" or sp_type == "multitone" then - local sp_name = prescript.sp_name or "black" - local sp_fractions = prescript.sp_fractions or 1 - local sp_components = prescript.sp_components or "" - local sp_value = prescript.sp_value or "1" - local cf = cs[1] - if cf ~= 1 then - -- beware, we do scale the spotcolors but not the alternative representation - sp_value = lpegmatch(value,sp_value,1,cf) or sp_value - end - c_b, c_a = spotcolorconverter(sp_name,sp_fractions,sp_components,sp_value) - elseif sp_type == "named" then - -- we might move this to another namespace .. also, named can be a spotcolor - -- so we need to check for that too ... also we need to resolve indirect - -- colors so we might need the second pass for this (draw dots with \MPcolor) - local sp_name = prescript.sp_name or "black" - if not tr_alternative then - -- todo: sp_name is not yet registered at this time - local t = t_list[sp_name] -- string or attribute - local v = t and attributes.transparencies.value(t) - if v then - before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,v[1],v[2],true)) - after[#after+1] = "/Tr0 gs" -- outertransparency - end - end - local c = c_list[sp_name] -- string or attribute - local v = c and attributes.colors.value(c) - if v then - -- all=1 gray=2 rgb=3 cmyk=4 - local colorspace = v[1] - local f = cs[1] - if colorspace == 2 then - local s = f*v[2] - c_b, c_a = checked_color_pair(f_gray,s,s) - elseif colorspace == 3 then - local r, g, b = f*v[3], f*v[4], f*v[5] - c_b, c_a = checked_color_pair(f_rgb,r,g,b,r,g,b) - elseif colorspace == 4 or colorspace == 1 then - local c, m, y, k = f*v[6], f*v[7], f*v[8], f*v[9] - c_b, c_a = checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k) - else - local s = f*v[2] - c_b, c_a = checked_color_pair(f_gray,s,s) - end - end - -- - end - if c_a and c_b then - before[#before+1] = c_b - after[#after+1] = c_a - end - end -end - --- layers (nasty: we need to keep the 'grouping' right - -local function la_process(object,prescript,before,after) - local la_name = prescript.la_name - if la_name then - before[#before+1] = backends.codeinjections.startlayer(la_name) - insert(after,1,backends.codeinjections.stoplayer()) - end -end - --- groups - -local types = { - isolated -} - -local function gr_process(object,prescript,before,after) - local gr_state = prescript.gr_state - if gr_state then - if gr_state == "start" then - local gr_type = utilities.parsers.settings_to_hash(prescript.gr_type) - before[#before+1] = function() - context.MPLIBstartgroup( - gr_type.isolated and 1 or 0, - gr_type.knockout and 1 or 0, - prescript.gr_llx, - prescript.gr_lly, - prescript.gr_urx, - prescript.gr_ury - ) - end - elseif gr_state == "stop" then - after[#after+1] = function() - context.MPLIBstopgroup() - end - end - object.path = false - object.color = false - object.grouped = true - end -end - --- definitions - -appendaction(resetteractions, "system",cl_reset) -appendaction(resetteractions, "system",tx_reset) - -appendaction(processoractions,"system",gr_process) - -appendaction(analyzeractions, "system",tx_analyze) -appendaction(analyzeractions, "system",gt_analyze) - -appendaction(processoractions,"system",sh_process) --- (processoractions,"system",gt_process) -appendaction(processoractions,"system",bm_process) -appendaction(processoractions,"system",tx_process) -appendaction(processoractions,"system",ps_process) -appendaction(processoractions,"system",fg_process) -appendaction(processoractions,"system",tr_process) -- last, as color can be reset - -appendaction(processoractions,"system",la_process) - --- we're nice and set them already - -resetter = resetteractions .runner -analyzer = analyzeractions .runner -processor = processoractions.runner +if not modules then modules = { } end modules ['mlib-pps'] = { + version = 1.001, + comment = "companion to mlib-ctx.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +-- todo: make a hashed textext variant where we only process the text once (normally +-- we cannot assume that no macros are involved which influence a next textext + +local format, gmatch, match, split = string.format, string.gmatch, string.match, string.split +local tonumber, type = tonumber, type +local round = math.round +local insert, concat = table.insert, table.concat +local Cs, Cf, C, Cg, Ct, P, S, V, Carg = lpeg.Cs, lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.V, lpeg.Carg +local lpegmatch = lpeg.match +local formatters = string.formatters + +local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context + +local texbox = tex.box +local copy_list = node.copy_list +local free_list = node.flush_list +local setmetatableindex = table.setmetatableindex +local sortedhash = table.sortedhash + +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming + +local trace_runs = false trackers.register("metapost.runs", function(v) trace_runs = v end) +local trace_textexts = false trackers.register("metapost.textexts", function(v) trace_textexts = v end) +local trace_scripts = false trackers.register("metapost.scripts", function(v) trace_scripts = v end) + +local report_metapost = logs.reporter("metapost") +local report_textexts = logs.reporter("metapost","textexts") +local report_scripts = logs.reporter("metapost","scripts") + +local colors = attributes.colors + +local rgbtocmyk = colors.rgbtocmyk or function() return 0,0,0,1 end +local cmyktorgb = colors.cmyktorgb or function() return 0,0,0 end +local rgbtogray = colors.rgbtogray or function() return 0 end +local cmyktogray = colors.cmyktogray or function() return 0 end + +metapost.makempy = metapost.makempy or { nofconverted = 0 } +local makempy = metapost.makempy + +local nooutercolor = "0 g 0 G" +local nooutertransparency = "/Tr0 gs" -- only when set +local outercolormode = 0 +local outercolor = nooutercolor +local outertransparency = nooutertransparency +local innercolor = nooutercolor +local innertransparency = nooutertransparency + +local pdfcolor = lpdf.color +local pdftransparency = lpdf.transparency +local registercolor = colors.register +local registerspotcolor = colors.registerspotcolor + +local transparencies = attributes.transparencies +local registertransparency = transparencies.register + +function metapost.setoutercolor(mode,colormodel,colorattribute,transparencyattribute) + -- has always to be called before conversion + -- todo: transparency (not in the mood now) + outercolormode = mode + if mode == 1 or mode == 3 then + -- inherit from outer (registered color) + outercolor = pdfcolor(colormodel,colorattribute) or nooutercolor + outertransparency = pdftransparency(transparencyattribute) or nooutertransparency + elseif mode == 2 then + -- stand alone (see m-punk.tex) + outercolor = "" + outertransparency = "" + else -- 0 + outercolor = nooutercolor + outertransparency = nooutertransparency + end + innercolor = outercolor + innertransparency = outertransparency -- not yet used +end + +local f_gray = formatters["%.3f g %.3f G"] +local f_rgb = formatters["%.3f %.3f %.3f rg %.3f %.3f %.3f RG"] +local f_cmyk = formatters["%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K"] +local f_cm = formatters["q %f %f %f %f %f %f cm"] +local f_shade = formatters["MpSh%s"] + +local function checked_color_pair(color,...) + if not color then + return innercolor, outercolor + end + if outercolormode == 3 then + innercolor = color(...) + return innercolor, innercolor + else + return color(...), outercolor + end +end + +function metapost.colorinitializer() + innercolor = outercolor + innertransparency = outertransparency + return outercolor, outertransparency +end + +--~ + +local specificationsplitter = lpeg.tsplitat(" ") +local colorsplitter = lpeg.tsplitter(":",tonumber) -- no need for : +local domainsplitter = lpeg.tsplitter(" ",tonumber) +local centersplitter = domainsplitter +local coordinatesplitter = domainsplitter + +-- thanks to taco's reading of the postscript manual: +-- +-- x' = sx * x + ry * y + tx +-- y' = rx * x + sy * y + ty + +local nofshades = 0 -- todo: hash resources, start at 1000 in order not to clash with older + +local function normalize(ca,cb) + if #cb == 1 then + if #ca == 4 then + cb[1], cb[2], cb[3], cb[4] = 0, 0, 0, 1-cb[1] + else + cb[1], cb[2], cb[3] = cb[1], cb[1], cb[1] + end + elseif #cb == 3 then + if #ca == 4 then + cb[1], cb[2], cb[3], cb[4] = rgbtocmyk(cb[1],cb[2],cb[3]) + else + cb[1], cb[2], cb[3] = cmyktorgb(cb[1],cb[2],cb[3],cb[4]) + end + end +end + +-- todo: check for the same colorspace (actually a backend issue), now we can +-- have several similar resources +-- +-- normalize(ca,cb) fails for spotcolors + +local function spotcolorconverter(parent, n, d, p) + registerspotcolor(parent) + return pdfcolor(colors.model,registercolor(nil,'spot',parent,n,d,p)), outercolor +end + +local commasplitter = lpeg.tsplitat(",") + +local function checkandconvertspot(n_a,f_a,c_a,v_a,n_b,f_b,c_b,v_b) + -- must be the same but we don't check + local name = f_shade(nofshades) + local ca = lpegmatch(commasplitter,v_a) + local cb = lpegmatch(commasplitter,v_b) + if #ca == 0 or #cb == 0 then + return { 0 }, { 1 }, "DeviceGray", name + else + for i=1,#ca do ca[i] = tonumber(ca[i]) or 0 end + for i=1,#cb do cb[i] = tonumber(cb[i]) or 1 end + --~ spotcolorconverter(n_a,f_a,c_a,v_a) -- not really needed + return ca, cb, n_a or n_b, name + end +end + +local function checkandconvert(ca,cb) + local name = f_shade(nofshades) + if not ca or not cb or type(ca) == "string" then + return { 0 }, { 1 }, "DeviceGray", name + else + if #ca > #cb then + normalize(ca,cb) + elseif #ca < #cb then + normalize(cb,ca) + end + local model = colors.model + if model == "all" then + model= (#ca == 4 and "cmyk") or (#ca == 3 and "rgb") or "gray" + end + if model == "rgb" then + if #ca == 4 then + ca = { cmyktorgb(ca[1],ca[2],ca[3],ca[4]) } + cb = { cmyktorgb(cb[1],cb[2],cb[3],cb[4]) } + elseif #ca == 1 then + local a, b = 1-ca[1], 1-cb[1] + ca = { a, a, a } + cb = { b, b, b } + end + return ca, cb, "DeviceRGB", name + elseif model == "cmyk" then + if #ca == 3 then + ca = { rgbtocmyk(ca[1],ca[2],ca[3]) } + cb = { rgbtocmyk(cb[1],cb[2],cb[3]) } + elseif #ca == 1 then + ca = { 0, 0, 0, ca[1] } + cb = { 0, 0, 0, ca[1] } + end + return ca, cb, "DeviceCMYK", name + else + if #ca == 4 then + ca = { cmyktogray(ca[1],ca[2],ca[3],ca[4]) } + cb = { cmyktogray(cb[1],cb[2],cb[3],cb[4]) } + elseif #ca == 3 then + ca = { rgbtogray(ca[1],ca[2],ca[3]) } + cb = { rgbtogray(cb[1],cb[2],cb[3]) } + end + -- backend specific (will be renamed) + return ca, cb, "DeviceGray", name + end + end +end + +local current_format, current_graphic, current_initializations + +metapost.multipass = false + +local textexts = { } -- all boxes, optionally with a different color +local texslots = { } -- references to textexts in order or usage +local texorder = { } -- references to textexts by mp index +local textrial = 0 +local texfinal = 0 +local scratchbox = 0 + +local function freeboxes() + for n, box in next, textexts do + local tn = textexts[n] + if tn then + free_list(tn) + -- texbox[scratchbox] = tn + -- texbox[scratchbox] = nil -- this frees too + if trace_textexts then + report_textexts("freeing box %s",n) + end + end + end + textexts = { } + texslots = { } + texorder = { } + textrial = 0 + texfinal = 0 +end + +metapost.resettextexts = freeboxes + +function metapost.settext(box,slot) + textexts[slot] = copy_list(texbox[box]) + texbox[box] = nil + -- this will become + -- textexts[slot] = texbox[box] + -- unsetbox(box) +end + +function metapost.gettext(box,slot) + texbox[box] = copy_list(textexts[slot]) + if trace_textexts then + report_textexts("putting text %s in box %s",slot,box) + end + -- textexts[slot] = nil -- no, pictures can be placed several times +end + +-- rather generic pdf, so use this elsewhere too it no longer pays +-- off to distinguish between outline and fill (we now have both +-- too, e.g. in arrows) + +metapost.reducetogray = true + +local models = { } + +function models.all(cr) + local n = #cr + if n == 0 then + return checked_color_pair() + elseif metapost.reducetogray then + if n == 1 then + local s = cr[1] + return checked_color_pair(f_gray,s,s) + elseif n == 3 then + local r, g, b = cr[1], cr[2], cr[3] + if r == g and g == b then + return checked_color_pair(f_gray,r,r) + else + return checked_color_pair(f_rgb,r,g,b,r,g,b) + end + else + local c, m, y, k = cr[1], cr[2], cr[3], cr[4] + if c == m and m == y and y == 0 then + k = 1 - k + return checked_color_pair(f_gray,k,k) + else + return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k) + end + end + elseif n == 1 then + local s = cr[1] + return checked_color_pair(f_gray,s,s) + elseif n == 3 then + local r, g, b = cr[1], cr[2], cr[3] + return checked_color_pair(f_rgb,r,g,b,r,g,b) + else + local c, m, y, k = cr[1], cr[2], cr[3], cr[4] + return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k) + end +end + +function models.rgb(cr) + local n = #cr + if n == 0 then + return checked_color_pair() + elseif metapost.reducetogray then + if n == 1 then + local s = cr[1] + checked_color_pair(f_gray,s,s) + elseif n == 3 then + local r, g, b = cr[1], cr[2], cr[3] + if r == g and g == b then + return checked_color_pair(f_gray,r,r) + else + return checked_color_pair(f_rgb,r,g,b,r,g,b) + end + else + local c, m, y, k = cr[1], cr[2], cr[3], cr[4] + if c == m and m == y and y == 0 then + k = 1 - k + return checked_color_pair(f_gray,k,k) + else + local r, g, b = cmyktorgb(c,m,y,k) + return checked_color_pair(f_rgb,r,g,b,r,g,b) + end + end + elseif n == 1 then + local s = cr[1] + return checked_color_pair(f_gray,s,s) + else + local r, g, b + if n == 3 then + r, g, b = cmyktorgb(cr[1],cr[2],cr[3],cr[4]) + else + r, g, b = cr[1], cr[2], cr[3] + end + return checked_color_pair(f_rgb,r,g,b,r,g,b) + end +end + +function models.cmyk(cr) + local n = #cr + if n == 0 then + return checked_color_pair() + elseif metapost.reducetogray then + if n == 1 then + local s = cr[1] + return checked_color_pair(f_gray,s,s) + elseif n == 3 then + local r, g, b = cr[1], cr[2], cr[3] + if r == g and g == b then + return checked_color_pair(f_gray,r,r) + else + local c, m, y, k = rgbtocmyk(r,g,b) + return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k) + end + else + local c, m, y, k = cr[1], cr[2], cr[3], cr[4] + if c == m and m == y and y == 0 then + k = k - 1 + return checked_color_pair(f_gray,k,k) + else + return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k) + end + end + elseif n == 1 then + local s = cr[1] + return checked_color_pair(f_gray,s,s) + else + local c, m, y, k + if n == 3 then + c, m, y, k = rgbtocmyk(cr[1],cr[2],cr[3]) + else + c, m, y, k = cr[1], cr[2], cr[3], cr[4] + end + return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k) + end +end + +function models.gray(cr) + local n, s = #cr, 0 + if n == 0 then + return checked_color_pair() + elseif n == 4 then + s = cmyktogray(cr[1],cr[2],cr[3],cr[4]) + elseif n == 3 then + s = rgbtogray(cr[1],cr[2],cr[3]) + else + s = cr[1] + end + return checked_color_pair(f_gray,s,s) +end + +setmetatableindex(models, function(t,k) + local v = models.gray + t[k] = v + return v +end) + +local function colorconverter(cs) + return models[colors.model](cs) +end + +local btex = P("btex") +local etex = P(" etex") +local vtex = P("verbatimtex") +local ttex = P("textext") +local gtex = P("graphictext") +local multipass = P("forcemultipass") +local spacing = S(" \n\r\t\v")^0 +local dquote = P('"') + +local found, forced = false, false + +local function convert(str) + found = true + return "rawtextext(\"" .. str .. "\")" -- centered +end +local function ditto(str) + return "\" & ditto & \"" +end +local function register() + found = true +end +local function force() + forced = true +end + +local texmess = (dquote/ditto + (1 - etex))^0 + +local function ignore(s) + report_metapost("ignoring verbatim tex: %s",s) + return "" +end + +-- local parser = P { +-- [1] = Cs((V(2)/register + V(4)/ignore + V(3)/convert + V(5)/force + 1)^0), +-- [2] = ttex + gtex, +-- [3] = btex * spacing * Cs(texmess) * etex, +-- [4] = vtex * spacing * Cs(texmess) * etex, +-- [5] = multipass, -- experimental, only for testing +-- } + +-- currently a a one-liner produces less code + +-- textext.*(".*") can have "'s but tricky parsing as we can have concatenated strings +-- so this is something for a boring plain or train trip and we might assume proper mp +-- input anyway + +local parser = Cs(( + (ttex + gtex)/register + + (btex * spacing * Cs(texmess) * etex)/convert + + (vtex * spacing * Cs(texmess) * etex)/ignore + + 1 +)^0) + +local function checktexts(str) + found, forced = false, false + return lpegmatch(parser,str), found, forced +end + +metapost.checktexts = checktexts + +local factor = 65536*(7227/7200) + +function metapost.edefsxsy(wd,ht,dp) -- helper for figure + local hd = ht + dp + context.setvalue("sx",wd ~= 0 and factor/wd or 0) + context.setvalue("sy",hd ~= 0 and factor/hd or 0) +end + +local function sxsy(wd,ht,dp) -- helper for text + local hd = ht + dp + return (wd ~= 0 and factor/wd) or 0, (hd ~= 0 and factor/hd) or 0 +end + +local no_first_run = "mfun_first_run := false ;" +local do_first_run = "mfun_first_run := true ;" +local no_trial_run = "mfun_trial_run := false ;" +local do_trial_run = "mfun_trial_run := true ;" +local do_begin_fig = "; beginfig(1) ; " +local do_end_fig = "; endfig ;" +local do_safeguard = ";" + +local f_text_data = formatters["mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfun_tt_d[%i] := %f ;"] + +function metapost.textextsdata() + local t, nt, n = { }, 0, 0 + for n=1,#texorder do + local box = textexts[texorder[n]] + if box then + local wd, ht, dp = box.width/factor, box.height/factor, box.depth/factor + if trace_textexts then + report_textexts("passed data item %s: (%p,%p,%p)",n,wd,ht,dp) + end + nt = nt + 1 + t[nt] = f_text_data(n,wd,n,ht,n,dp) + else + break + end + end +-- inspect(t) + return t +end + +metapost.intermediate = metapost.intermediate or {} +metapost.intermediate.actions = metapost.intermediate.actions or {} +metapost.intermediate.needed = false + +metapost.method = 1 -- 1:dumb 2:clever + +-- maybe we can latelua the texts some day + +local nofruns = 0 -- askedfig: "all", "first", number + +local function checkaskedfig(askedfig) -- return askedfig, wrappit + if not askedfig then + return "direct", true + elseif askedfig == "all" then + return "all", false + elseif askedfig == "direct" then + return "all", true + else + askedfig = tonumber(askedfig) + if askedfig then + return askedfig, false + else + return "direct", true + end + end +end + +function metapost.graphic_base_pass(specification) + local mpx = specification.mpx -- mandate + local data = specification.data or "" + local definitions = specification.definitions or "" +-- local extensions = metapost.getextensions(specification.instance,specification.useextensions) + local extensions = specification.extensions or "" + local inclusions = specification.inclusions or "" + local initializations = specification.initializations or "" + local askedfig = specification.figure -- no default else no wrapper + -- + nofruns = nofruns + 1 + local askedfig, wrappit = checkaskedfig(askedfig) + local done_1, done_2, done_3, forced_1, forced_2, forced_3 + data, done_1, forced_1 = checktexts(data) + -- we had preamble = extensions + inclusions + if extensions == "" then + extensions, done_2, forced_2 = "", false, false + else + extensions, done_2, forced_2 = checktexts(extensions) + end + if inclusions == "" then + inclusions, done_3, forced_3 = "", false, false + else + inclusions, done_3, forced_3 = checktexts(inclusions) + end + metapost.intermediate.needed = false + metapost.multipass = false -- no needed here + current_format = mpx + current_graphic = data + current_initializations = initializations + local method = metapost.method + if trace_runs then + if method == 1 then + report_metapost("forcing two runs due to library configuration") + elseif method ~= 2 then + report_metapost("ignoring run due to library configuration") + elseif not (done_1 or done_2 or done_3) then + report_metapost("forcing one run only due to analysis") + elseif done_1 then + report_metapost("forcing at max two runs due to main code") + elseif done_2 then + report_metapost("forcing at max two runs due to extensions") + else + report_metapost("forcing at max two runs due to inclusions") + end + end + if method == 1 or (method == 2 and (done_1 or done_2 or done_3)) then + if trace_runs then + report_metapost("first run of job %s, asked figure %a",nofruns,askedfig) + end + -- first true means: trialrun, second true means: avoid extra run if no multipass + local flushed = metapost.process(mpx, { + definitions, + extensions, + inclusions, + wrappit and do_begin_fig or "", + do_first_run, + do_trial_run, + current_initializations, + do_safeguard, + current_graphic, + wrappit and do_end_fig or "", + }, true, nil, not (forced_1 or forced_2 or forced_3), false, askedfig) + if metapost.intermediate.needed then + for _, action in next, metapost.intermediate.actions do + action() + end + end + if not flushed or not metapost.optimize then + -- tricky, we can only ask once for objects and therefore + -- we really need a second run when not optimized + context.MPLIBextrapass(askedfig) + end + else + if trace_runs then + report_metapost("running job %s, asked figure %a",nofruns,askedfig) + end + metapost.process(mpx, { + preamble, + wrappit and do_begin_fig or "", + do_first_run, + no_trial_run, + current_initializations, + do_safeguard, + current_graphic, + wrappit and do_end_fig or "", + }, false, nil, false, false, askedfig) + end +end + +function metapost.graphic_extra_pass(askedfig) + if trace_runs then + report_metapost("second run of job %s, asked figure %a",nofruns,askedfig) + end + local askedfig, wrappit = checkaskedfig(askedfig) + metapost.process(current_format, { + wrappit and do_begin_fig or "", + no_trial_run, + concat(metapost.textextsdata()," ;\n"), + current_initializations, + do_safeguard, + current_graphic, + wrappit and do_end_fig or "", + }, false, nil, false, true, askedfig) + context.MPLIBresettexts() -- must happen afterwards +end + +local start = [[\starttext]] +local preamble = [[\long\def\MPLIBgraphictext#1{\startTEXpage[scale=10000]#1\stopTEXpage}]] +local stop = [[\stoptext]] + +function makempy.processgraphics(graphics) + if #graphics > 0 then + makempy.nofconverted = makempy.nofconverted + 1 + starttiming(makempy) + local mpofile = tex.jobname .. "-mpgraph" + local mpyfile = file.replacesuffix(mpofile,"mpy") + local pdffile = file.replacesuffix(mpofile,"pdf") + local texfile = file.replacesuffix(mpofile,"tex") + io.savedata(texfile, { start, preamble, metapost.tex.get(), concat(graphics,"\n"), stop }, "\n") + local command = format("context --once %s %s", (tex.interactionmode == 0 and "--batchmode") or "", texfile) + os.execute(command) + if io.exists(pdffile) then + command = format("pstoedit -ssp -dt -f mpost %s %s", pdffile, mpyfile) + os.execute(command) + local result, r = { }, 0 + if io.exists(mpyfile) then + local data = io.loaddata(mpyfile) + for figure in gmatch(data,"beginfig(.-)endfig") do + r = r + 1 + result[r] = formatters["begingraphictextfig%sendgraphictextfig ;\n"](figure) + end + io.savedata(mpyfile,concat(result,"")) + end + end + stoptiming(makempy) + end +end + +-- -- the new plugin handler -- -- + +local sequencers = utilities.sequencers +local appendgroup = sequencers.appendgroup +local appendaction = sequencers.appendaction + +local resetter = nil +local analyzer = nil +local processor = nil + +local resetteractions = sequencers.new { arguments = "t" } +local analyzeractions = sequencers.new { arguments = "object,prescript" } +local processoractions = sequencers.new { arguments = "object,prescript,before,after" } + +appendgroup(resetteractions, "system") +appendgroup(analyzeractions, "system") +appendgroup(processoractions, "system") + +-- later entries come first + +--~ local scriptsplitter = Cf(Ct("") * ( +--~ Cg(C((1-S("= "))^1) * S("= ")^1 * C((1-S("\n\r"))^0) * S("\n\r")^0) +--~ )^0, rawset) + +local scriptsplitter = Ct ( Ct ( + C((1-S("= "))^1) * S("= ")^1 * C((1-S("\n\r"))^0) * S("\n\r")^0 +)^0 ) + +local function splitprescript(script) + local hash = lpegmatch(scriptsplitter,script) + for i=#hash,1,-1 do + local h = hash[i] + hash[h[1]] = h[2] + end + if trace_scripts then + report_scripts(table.serialize(hash,"prescript")) + end + return hash +end + +-- -- not used: +-- +-- local function splitpostscript(script) +-- local hash = lpegmatch(scriptsplitter,script) +-- for i=1,#hash do +-- local h = hash[i] +-- hash[h[1]] = h[2] +-- end +-- if trace_scripts then +-- report_scripts(table.serialize(hash,"postscript")) +-- end +-- return hash +-- end + +function metapost.pluginactions(what,t,flushfigure) -- before/after object, depending on what + for i=1,#what do + local wi = what[i] + if type(wi) == "function" then + -- assume injection + flushfigure(t) -- to be checked: too many 0 g 0 G + t = { } + wi() + else + t[#t+1] = wi + end + end + return t +end + +function metapost.resetplugins(t) -- intialize plugins, before figure + -- plugins can have been added + resetter = resetteractions .runner + analyzer = analyzeractions .runner + processor = processoractions .runner + -- let's apply one runner + resetter(t) +end + +function metapost.analyzeplugins(object) -- each object (first pass) + local prescript = object.prescript -- specifications + if prescript and #prescript > 0 then + return analyzer(object,splitprescript(prescript)) + end +end + +function metapost.processplugins(object) -- each object (second pass) + local prescript = object.prescript -- specifications + if prescript and #prescript > 0 then + local before = { } + local after = { } + processor(object,splitprescript(prescript),before,after) + return #before > 0 and before, #after > 0 and after + else + local c = object.color + if c and #c > 0 then + local b, a = colorconverter(c) + return { b }, { a } + end + end +end + +-- helpers + +local basepoints = number.dimenfactors["bp"] + +local function cm(object) + local op = object.path + if op then + local first, second, fourth = op[1], op[2], op[4] + local tx, ty = first.x_coord , first.y_coord + local sx, sy = second.x_coord - tx, fourth.y_coord - ty + local rx, ry = second.y_coord - ty, fourth.x_coord - tx + if sx == 0 then sx = 0.00001 end + if sy == 0 then sy = 0.00001 end + return sx, rx, ry, sy, tx, ty + else + return 1, 0, 0, 1, 0, 0 -- weird case + end +end + +-- color + +local function cl_reset(t) + t[#t+1] = metapost.colorinitializer() -- only color +end + +local tx_hash = { } +local tx_last = 0 + +local function tx_reset() + tx_hash = { } + tx_last = 0 +end + +local fmt = formatters["%s %s %s % t"] + +local function tx_analyze(object,prescript) -- todo: hash content and reuse them + local tx_stage = prescript.tx_stage + if tx_stage == "trial" then + textrial = textrial + 1 + local tx_number = tonumber(prescript.tx_number) + local s = object.postscript or "" + local c = object.color -- only simple ones, no transparency + local a = prescript.tr_alternative + local t = prescript.tr_transparency + local h = fmt(tx_number,a or "?",t or "?",c) + local n = tx_hash[h] -- todo: hashed variant with s (nicer for similar labels) + if not n then + tx_last = tx_last + 1 + if not c then + -- no color + elseif #c == 1 then + if a and t then + s = formatters["\\directcolored[s=%f,a=%f,t=%f]%s"](c[1],a,t,s) + else + s = formatters["\\directcolored[s=%f]%s"](c[1],s) + end + elseif #c == 3 then + if a and t then + s = formatters["\\directcolored[r=%f,g=%f,b=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],a,t,s) + else + s = formatters["\\directcolored[r=%f,g=%f,b=%f]%s"](c[1],c[2],c[3],s) + end + elseif #c == 4 then + if a and t then + s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],c[4],a,t,s) + else + s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f]%s"](c[1],c[2],c[3],c[4],s) + end + end + context.MPLIBsettext(tx_last,s) + metapost.multipass = true + tx_hash[h] = tx_last + texslots[textrial] = tx_last + texorder[tx_number] = tx_last + if trace_textexts then + report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,tx_last,h) + end + else + texslots[textrial] = n + if trace_textexts then + report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,n,h) + end + end + elseif tx_stage == "extra" then + textrial = textrial + 1 + local tx_number = tonumber(prescript.tx_number) + if not texorder[tx_number] then + local s = object.postscript or "" + tx_last = tx_last + 1 + context.MPLIBsettext(tx_last,s) + metapost.multipass = true + texslots[textrial] = tx_last + texorder[tx_number] = tx_last + if trace_textexts then + report_textexts("stage %a, usage %a, number %a, extra %a",tx_stage,textrial,tx_number,tx_last) + end + end + end +end + +local function tx_process(object,prescript,before,after) + local tx_number = prescript.tx_number + if tx_number then + tx_number = tonumber(tx_number) + local tx_stage = prescript.tx_stage + if tx_stage == "final" then + texfinal = texfinal + 1 + local n = texslots[texfinal] + if trace_textexts then + report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,texfinal,tx_number,n) + end + local sx, rx, ry, sy, tx, ty = cm(object) -- needs to be frozen outside the function + local box = textexts[n] + if box then + before[#before+1] = function() + -- flush always happens, we can have a special flush function injected before + context.MPLIBgettextscaledcm(n, + format("%f",sx), -- bah ... %s no longer checks + format("%f",rx), -- bah ... %s no longer checks + format("%f",ry), -- bah ... %s no longer checks + format("%f",sy), -- bah ... %s no longer checks + format("%f",tx), -- bah ... %s no longer checks + format("%f",ty), -- bah ... %s no longer checks + sxsy(box.width,box.height,box.depth)) + end + else + before[#before+1] = function() + report_textexts("unknown %s",tx_number) + end + end + if not trace_textexts then + object.path = false -- else: keep it + end + object.color = false + object.grouped = true + end + end +end + +-- graphics + +local graphics = { } + +function metapost.intermediate.actions.makempy() + if #graphics > 0 then + makempy.processgraphics(graphics) + graphics = { } -- ? + end +end + +local function gt_analyze(object,prescript) + local gt_stage = prescript.gt_stage + if gt_stage == "trial" then + graphics[#graphics+1] = formatters["\\MPLIBgraphictext{%s}"](object.postscript or "") + metapost.intermediate.needed = true + metapost.multipass = true + end +end + +-- local function gt_process(object,prescript,before,after) +-- local gt_stage = prescript.gt_stage +-- if gt_stage == "final" then +-- end +-- end + +-- shades + +local function sh_process(object,prescript,before,after) + local sh_type = prescript.sh_type + if sh_type then + nofshades = nofshades + 1 + local domain = lpegmatch(domainsplitter,prescript.sh_domain) + local centera = lpegmatch(centersplitter,prescript.sh_center_a) + local centerb = lpegmatch(centersplitter,prescript.sh_center_b) + -- + local sh_color_a = prescript.sh_color_a or "1" + local sh_color_b = prescript.sh_color_b or "1" + local ca, cb, colorspace, name, separation + if prescript.sh_color == "into" and prescript.sp_name then + -- some spotcolor + local value_a, components_a, fractions_a, name_a + local value_b, components_b, fractions_b, name_b + for i=1,#prescript do + -- { "sh_color_a", "1" }, + -- { "sh_color", "into" }, + -- { "sh_radius_b", "0" }, + -- { "sh_radius_a", "141.73225" }, + -- { "sh_center_b", "425.19676 141.73225" }, + -- { "sh_center_a", "425.19676 0" }, + -- { "sh_factor", "1" }, + local tag = prescript[i][1] + if not name_a and tag == "sh_color_a" then + value_a = prescript[i-5][2] + components_a = prescript[i-4][2] + fractions_a = prescript[i-3][2] + name_a = prescript[i-2][2] + elseif not name_b and tag == "sh_color_b" then + value_b = prescript[i-5][2] + components_b = prescript[i-4][2] + fractions_b = prescript[i-3][2] + name_b = prescript[i-2][2] + end + if name_a and name_b then + break + end + end + ca, cb, separation, name = checkandconvertspot( + name_a,fractions_a,components_a,value_a, + name_b,fractions_b,components_b,value_b + ) + else + local colora = lpegmatch(colorsplitter,sh_color_a) + local colorb = lpegmatch(colorsplitter,sh_color_b) + ca, cb, colorspace, name = checkandconvert(colora,colorb) + end + if not ca or not cb then + ca, cb, colorspace, name = checkandconvert() + end + if sh_type == "linear" then + local coordinates = { centera[1], centera[2], centerb[1], centerb[2] } + lpdf.linearshade(name,domain,ca,cb,1,colorspace,coordinates,separation) -- backend specific (will be renamed) + elseif sh_type == "circular" then + local radiusa = tonumber(prescript.sh_radius_a) + local radiusb = tonumber(prescript.sh_radius_b) + local coordinates = { centera[1], centera[2], radiusa, centerb[1], centerb[2], radiusb } + lpdf.circularshade(name,domain,ca,cb,1,colorspace,coordinates,separation) -- backend specific (will be renamed) + else + -- fatal error + end + before[#before+1], after[#after+1] = "q /Pattern cs", formatters["W n /%s sh Q"](name) + -- false, not nil, else mt triggered + object.colored = false -- hm, not object.color ? + object.type = false + object.grouped = true + end +end + +-- bitmaps + +local function bm_process(object,prescript,before,after) + local bm_xresolution = prescript.bm_xresolution + if bm_xresolution then + before[#before+1] = f_cm(cm(object)) + before[#before+1] = function() + figures.bitmapimage { + xresolution = tonumber(bm_xresolution), + yresolution = tonumber(prescript.bm_yresolution), + width = 1/basepoints, + height = 1/basepoints, + data = object.postscript + } + end + before[#before+1] = "Q" + object.path = false + object.color = false + object.grouped = true + end +end + +-- positions + +local function ps_process(object,prescript,before,after) + local ps_label = prescript.ps_label + if ps_label then + local op = object.path + local first, third = op[1], op[3] + local x, y = first.x_coord, first.y_coord + local w, h = third.x_coord - x, third.y_coord - y + x = x - metapost.llx + y = metapost.ury - y + before[#before+1] = function() + context.MPLIBpositionwhd(ps_label,x,y,w,h) + end + object.path = false + end +end + +-- figures + +local function fg_process(object,prescript,before,after) + local fg_name = prescript.fg_name + if fg_name then + before[#before+1] = f_cm(cm(object)) -- beware: does not use the cm stack + before[#before+1] = function() + context.MPLIBfigure(fg_name,prescript.fg_mask or "") + end + before[#before+1] = "Q" + object.path = false + object.grouped = true + end +end + +-- color and transparency + +local value = Cs ( ( + (Carg(1) * C((1-P(","))^1)) / function(a,b) return format("%0.3f",a * tonumber(b)) end + + P(","))^1 +) + +-- should be codeinjections + +local t_list = attributes.list[attributes.private('transparency')] +local c_list = attributes.list[attributes.private('color')] + +local function tr_process(object,prescript,before,after) + -- before can be shortcut to t + local tr_alternative = prescript.tr_alternative + if tr_alternative then + tr_alternative = tonumber(tr_alternative) + local tr_transparency = tonumber(prescript.tr_transparency) + before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,tr_alternative,tr_transparency,true)) + after[#after+1] = "/Tr0 gs" -- outertransparency + end + local cs = object.color + if cs and #cs > 0 then + local c_b, c_a + local sp_type = prescript.sp_type + if not sp_type then + c_b, c_a = colorconverter(cs) + elseif sp_type == "spot" or sp_type == "multitone" then + local sp_name = prescript.sp_name or "black" + local sp_fractions = prescript.sp_fractions or 1 + local sp_components = prescript.sp_components or "" + local sp_value = prescript.sp_value or "1" + local cf = cs[1] + if cf ~= 1 then + -- beware, we do scale the spotcolors but not the alternative representation + sp_value = lpegmatch(value,sp_value,1,cf) or sp_value + end + c_b, c_a = spotcolorconverter(sp_name,sp_fractions,sp_components,sp_value) + elseif sp_type == "named" then + -- we might move this to another namespace .. also, named can be a spotcolor + -- so we need to check for that too ... also we need to resolve indirect + -- colors so we might need the second pass for this (draw dots with \MPcolor) + local sp_name = prescript.sp_name or "black" + if not tr_alternative then + -- todo: sp_name is not yet registered at this time + local t = t_list[sp_name] -- string or attribute + local v = t and attributes.transparencies.value(t) + if v then + before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,v[1],v[2],true)) + after[#after+1] = "/Tr0 gs" -- outertransparency + end + end + local c = c_list[sp_name] -- string or attribute + local v = c and attributes.colors.value(c) + if v then + -- all=1 gray=2 rgb=3 cmyk=4 + local colorspace = v[1] + local f = cs[1] + if colorspace == 2 then + local s = f*v[2] + c_b, c_a = checked_color_pair(f_gray,s,s) + elseif colorspace == 3 then + local r, g, b = f*v[3], f*v[4], f*v[5] + c_b, c_a = checked_color_pair(f_rgb,r,g,b,r,g,b) + elseif colorspace == 4 or colorspace == 1 then + local c, m, y, k = f*v[6], f*v[7], f*v[8], f*v[9] + c_b, c_a = checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k) + else + local s = f*v[2] + c_b, c_a = checked_color_pair(f_gray,s,s) + end + end + -- + end + if c_a and c_b then + before[#before+1] = c_b + after[#after+1] = c_a + end + end +end + +-- layers (nasty: we need to keep the 'grouping' right + +local function la_process(object,prescript,before,after) + local la_name = prescript.la_name + if la_name then + before[#before+1] = backends.codeinjections.startlayer(la_name) + insert(after,1,backends.codeinjections.stoplayer()) + end +end + +-- groups + +local types = { + isolated +} + +local function gr_process(object,prescript,before,after) + local gr_state = prescript.gr_state + if gr_state then + if gr_state == "start" then + local gr_type = utilities.parsers.settings_to_hash(prescript.gr_type) + before[#before+1] = function() + context.MPLIBstartgroup( + gr_type.isolated and 1 or 0, + gr_type.knockout and 1 or 0, + prescript.gr_llx, + prescript.gr_lly, + prescript.gr_urx, + prescript.gr_ury + ) + end + elseif gr_state == "stop" then + after[#after+1] = function() + context.MPLIBstopgroup() + end + end + object.path = false + object.color = false + object.grouped = true + end +end + +-- definitions + +appendaction(resetteractions, "system",cl_reset) +appendaction(resetteractions, "system",tx_reset) + +appendaction(processoractions,"system",gr_process) + +appendaction(analyzeractions, "system",tx_analyze) +appendaction(analyzeractions, "system",gt_analyze) + +appendaction(processoractions,"system",sh_process) +-- (processoractions,"system",gt_process) +appendaction(processoractions,"system",bm_process) +appendaction(processoractions,"system",tx_process) +appendaction(processoractions,"system",ps_process) +appendaction(processoractions,"system",fg_process) +appendaction(processoractions,"system",tr_process) -- last, as color can be reset + +appendaction(processoractions,"system",la_process) + +-- we're nice and set them already + +resetter = resetteractions .runner +analyzer = analyzeractions .runner +processor = processoractions.runner diff --git a/tex/context/base/mlib-run.lua b/tex/context/base/mlib-run.lua index 1fc36dd80..3915196b0 100644 --- a/tex/context/base/mlib-run.lua +++ b/tex/context/base/mlib-run.lua @@ -1,591 +1,591 @@ -if not modules then modules = { } end modules ['mlib-run'] = { - version = 1.001, - comment = "companion to mlib-ctx.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - ---~ cmyk -> done, native ---~ spot -> done, but needs reworking (simpler) ---~ multitone -> ---~ shade -> partly done, todo: cm ---~ figure -> done ---~ hyperlink -> low priority, easy - --- new * run --- or --- new * execute^1 * finish - --- a*[b,c] == b + a * (c-b) - ---[[ldx-- -

    The directional helpers and pen analysis are more or less translated from the - code. It really helps that Taco know that source so well. Taco and I spent -quite some time on speeding up the and code. There is not -much to gain, especially if one keeps in mind that when integrated in -only a part of the time is spent in . Of course an integrated -approach is way faster than an external and processing time -nears zero.

    ---ldx]]-- - -local type, tostring, tonumber = type, tostring, tonumber -local format, gsub, match, find = string.format, string.gsub, string.match, string.find -local concat = table.concat -local emptystring = string.is_empty -local lpegmatch, P = lpeg.match, lpeg.P - -local trace_graphics = false trackers.register("metapost.graphics", function(v) trace_graphics = v end) -local trace_tracingall = false trackers.register("metapost.tracingall", function(v) trace_tracingall = v end) - -local report_metapost = logs.reporter("metapost") -local texerrormessage = logs.texerrormessage - -local starttiming = statistics.starttiming -local stoptiming = statistics.stoptiming - -local mplib = mplib -metapost = metapost or { } -local metapost = metapost - -local mplibone = tonumber(mplib.version()) <= 1.50 - -metapost.showlog = false -metapost.lastlog = "" -metapost.collapse = true -- currently mplib cannot deal with begingroup/endgroup mismatch in stepwise processing -metapost.texerrors = false -metapost.exectime = metapost.exectime or { } -- hack - --- metapost.collapse = false - -directives.register("mplib.texerrors", function(v) metapost.texerrors = v end) -trackers.register ("metapost.showlog", function(v) metapost.showlog = v end) - -function metapost.resetlastlog() - metapost.lastlog = "" -end - ------ mpbasepath = lpeg.instringchecker(lpeg.append { "/metapost/context/", "/metapost/base/" }) -local mpbasepath = lpeg.instringchecker(P("/metapost/") * (P("context") + P("base")) * P("/")) - --- local function i_finder(askedname,mode,ftype) -- fake message for mpost.map and metafun.mpvi --- local foundname = file.is_qualified_path(askedname) and askedname or resolvers.findfile(askedname,ftype) --- if not mpbasepath(foundname) then --- -- we could use the via file but we don't have a complete io interface yet --- local data, found, forced = metapost.checktexts(io.loaddata(foundname) or "") --- if found then --- local tempname = luatex.registertempfile(foundname,true) --- io.savedata(tempname,data) --- foundname = tempname --- end --- end --- return foundname --- end - --- mplib has no real io interface so we have a different mechanism than --- tex (as soon as we have more control, we will use the normal code) - -local finders = { } -mplib.finders = finders - --- for some reason mp sometimes calls this function twice which is inefficient --- but we cannot catch this - -local function preprocessed(name) - if not mpbasepath(name) then - -- we could use the via file but we don't have a complete io interface yet - local data, found, forced = metapost.checktexts(io.loaddata(name) or "") - if found then - local temp = luatex.registertempfile(name,true) - io.savedata(temp,data) - return temp - end - end - return name -end - -mplib.preprocessed = preprocessed -- helper - -finders.file = function(specification,name,mode,ftype) - return preprocessed(resolvers.findfile(name,ftype)) -end - -local function i_finder(name,mode,ftype) -- fake message for mpost.map and metafun.mpvi - local specification = url.hashed(name) - local finder = finders[specification.scheme] or finders.file - return finder(specification,name,mode,ftype) -end - -local function o_finder(name,mode,ftype) - -- report_metapost("output file %a, mode %a, ftype %a",name,mode,ftype) - return name -end - -local function finder(name, mode, ftype) - if mode == "w" then - return o_finder(name,mode,ftype) - else - return i_finder(name,mode,ftype) - end -end - -local i_limited = false -local o_limited = false - -directives.register("system.inputmode", function(v) - if not i_limited then - local i_limiter = io.i_limiter(v) - if i_limiter then - i_finder = i_limiter.protect(i_finder) - i_limited = true - end - end -end) - -directives.register("system.outputmode", function(v) - if not o_limited then - local o_limiter = io.o_limiter(v) - if o_limiter then - o_finder = o_limiter.protect(o_finder) - o_limited = true - end - end -end) - --- -- -- - -metapost.finder = finder - -function metapost.reporterror(result) - if not result then - report_metapost("error: no result object returned") - elseif result.status > 0 then - local t, e, l = result.term, result.error, result.log - if t and t ~= "" then - (metapost.texerrors and texerrormessage or report_metapost)("terminal: %s",t) - end - if e == "" or e == "no-error" then - e = nil - end - if e then - (metapost.texerrors and texerrormessage or report_metapost)("error: %s",e) - end - if not t and not e and l then - metapost.lastlog = metapost.lastlog .. "\n" .. l - report_metapost("log: %s",l) - else - report_metapost("error: unknown, no error, terminal or log messages") - end - else - return false - end - return true -end - -if mplibone then - - report_metapost("fatal error: mplib is too old") - - os.exit() - - -- local preamble = [[ - -- boolean mplib ; mplib := true ; - -- string mp_parent_version ; mp_parent_version := "%s" ; - -- input "%s" ; dump ; - -- ]] - -- - -- metapost.parameters = { - -- hash_size = 100000, - -- main_memory = 4000000, - -- max_in_open = 50, - -- param_size = 100000, - -- } - -- - -- function metapost.make(name, target, version) - -- starttiming(mplib) - -- target = file.replacesuffix(target or name, "mem") -- redundant - -- local mpx = mplib.new ( table.merged ( - -- metapost.parameters, - -- { - -- ini_version = true, - -- find_file = finder, - -- job_name = file.removesuffix(target), - -- } - -- ) ) - -- if mpx then - -- starttiming(metapost.exectime) - -- local result = mpx:execute(format(preamble,version or "unknown",name)) - -- stoptiming(metapost.exectime) - -- mpx:finish() - -- end - -- stoptiming(mplib) - -- end - -- - -- function metapost.load(name) - -- starttiming(mplib) - -- local mpx = mplib.new ( table.merged ( - -- metapost.parameters, - -- { - -- ini_version = false, - -- mem_name = file.replacesuffix(name,"mem"), - -- find_file = finder, - -- -- job_name = "mplib", - -- } - -- ) ) - -- local result - -- if not mpx then - -- result = { status = 99, error = "out of memory"} - -- end - -- stoptiming(mplib) - -- return mpx, result - -- end - -- - -- function metapost.checkformat(mpsinput) - -- local mpsversion = environment.version or "unset version" - -- local mpsinput = file.addsuffix(mpsinput or "metafun", "mp") - -- local mpsformat = file.removesuffix(file.basename(texconfig.formatname or (tex and tex.formatname) or mpsinput)) - -- local mpsbase = file.removesuffix(file.basename(mpsinput)) - -- if mpsbase ~= mpsformat then - -- mpsformat = mpsformat .. "-" .. mpsbase - -- end - -- mpsformat = file.addsuffix(mpsformat, "mem") - -- local mpsformatfullname = caches.getfirstreadablefile(mpsformat,"formats","metapost") or "" - -- if mpsformatfullname ~= "" then - -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname) - -- local mpx, result = metapost.load(mpsformatfullname) - -- if mpx then - -- local result = mpx:execute("show mp_parent_version ;") - -- if not result.log then - -- metapost.reporterror(result) - -- else - -- local version = match(result.log,">> *(.-)[\n\r]") or "unknown" - -- version = gsub(version,"[\'\"]","") - -- if version ~= mpsversion then - -- report_metapost("version mismatch: %s <> %s", version or "unknown", mpsversion) - -- else - -- return mpx - -- end - -- end - -- else - -- report_metapost("error in loading %a from %a", mpsinput, mpsformatfullname) - -- metapost.reporterror(result) - -- end - -- end - -- local mpsformatfullname = caches.setfirstwritablefile(mpsformat,"formats") - -- report_metapost("making %a into %a", mpsinput, mpsformatfullname) - -- metapost.make(mpsinput,mpsformatfullname,mpsversion) -- somehow return ... fails here - -- if lfs.isfile(mpsformatfullname) then - -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname) - -- return metapost.load(mpsformatfullname) - -- else - -- report_metapost("problems with %a from %a", mpsinput, mpsformatfullname) - -- end - -- end - -else - - local preamble = [[ - boolean mplib ; mplib := true ; - let dump = endinput ; - input "%s" ; - ]] - - local methods = { - double = "double", - scaled = "scaled", - default = "scaled", - decimal = false, -- for the moment - } - - function metapost.load(name,method) - starttiming(mplib) - method = method and methods[method] or "scaled" - local mpx = mplib.new { - ini_version = true, - find_file = finder, - math_mode = method, - } - report_metapost("initializing number mode %a",method) - local result - if not mpx then - result = { status = 99, error = "out of memory"} - else - result = mpx:execute(format(preamble, file.addsuffix(name,"mp"))) -- addsuffix is redundant - end - stoptiming(mplib) - metapost.reporterror(result) - return mpx, result - end - - function metapost.checkformat(mpsinput,method) - local mpsversion = environment.version or "unset version" - local mpsinput = mpsinput or "metafun" - local foundfile = "" - if file.suffix(mpsinput) ~= "" then - foundfile = finder(mpsinput) or "" - end - if foundfile == "" then - foundfile = finder(file.replacesuffix(mpsinput,"mpvi")) or "" - end - if foundfile == "" then - foundfile = finder(file.replacesuffix(mpsinput,"mpiv")) or "" - end - if foundfile == "" then - foundfile = finder(file.replacesuffix(mpsinput,"mp")) or "" - end - if foundfile == "" then - report_metapost("loading %a fails, format not found",mpsinput) - else - report_metapost("loading %a as %a using method %a",mpsinput,foundfile,method or "default") - local mpx, result = metapost.load(foundfile,method) - if mpx then - return mpx - else - report_metapost("error in loading %a",mpsinput) - metapost.reporterror(result) - end - end - end - -end - -function metapost.unload(mpx) - starttiming(mplib) - if mpx then - mpx:finish() - end - stoptiming(mplib) -end - -local mpxformats = { } - -function metapost.format(instance,name,method) - if not instance or instance == "" then - instance = "metafun" -- brrr - end - name = name or instance - local mpx = mpxformats[instance] - if not mpx then - report_metapost("initializing instance %a using format %a",instance,name) - mpx = metapost.checkformat(name,method) - mpxformats[instance] = mpx - end - return mpx -end - -function metapost.instance(instance) - return mpxformats[instance] -end - -function metapost.reset(mpx) - if not mpx then - -- nothing - elseif type(mpx) == "string" then - if mpxformats[mpx] then - mpxformats[mpx]:finish() - mpxformats[mpx] = nil - end - else - for name, instance in next, mpxformats do - if instance == mpx then - mpx:finish() - mpxformats[name] = nil - break - end - end - end -end - -local mp_inp, mp_log, mp_tag = { }, { }, 0 - --- key/values - -function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass, askedfig) - local converted, result = false, { } - if type(mpx) == "string" then - mpx = metapost.format(mpx) -- goody - end - if mpx and data then - starttiming(metapost) - if trace_graphics then - if not mp_inp[mpx] then - mp_tag = mp_tag + 1 - local jobname = tex.jobname - mp_inp[mpx] = io.open(format("%s-mplib-run-%03i.mp", jobname,mp_tag),"w") - mp_log[mpx] = io.open(format("%s-mplib-run-%03i.log",jobname,mp_tag),"w") - end - local banner = format("%% begin graphic: n=%s, trialrun=%s, multipass=%s, isextrapass=%s\n\n", metapost.n, tostring(trialrun), tostring(multipass), tostring(isextrapass)) - mp_inp[mpx]:write(banner) - mp_log[mpx]:write(banner) - end - if type(data) == "table" then - -- this hack is needed because the library currently barks on \n\n - -- eventually we can text for "" in the next loop - local n = 0 - local nofsnippets = #data - for i=1,nofsnippets do - local d = data[i] - if d ~= "" then - n = n + 1 - data[n] = d - end - end - for i=nofsnippets,n+1,-1 do - data[i] = nil - end - -- and this one because mp cannot handle snippets due to grouping issues - if metapost.collapse then - if #data > 1 then - data = concat(data,"\n") - else - data = data[1] - end - end - -- end of hacks - end - if type(data) == "table" then - if trace_tracingall then - mpx:execute("tracingall;") - end - -- table.insert(data,2,"") - for i=1,#data do - local d = data[i] - -- d = string.gsub(d,"\r","") - if d then - if trace_graphics then - mp_inp[mpx]:write(format("\n%% begin snippet %s\n",i)) - mp_inp[mpx]:write(d) - mp_inp[mpx]:write(format("\n%% end snippet %s\n",i)) - end - starttiming(metapost.exectime) - result = mpx:execute(d) - stoptiming(metapost.exectime) - if trace_graphics and result then - local str = result.log or result.error - if str and str ~= "" then - mp_log[mpx]:write(str) - end - end - if not metapost.reporterror(result) then - if metapost.showlog then - local str = result.term ~= "" and result.term or "no terminal output" - if not emptystring(str) then - metapost.lastlog = metapost.lastlog .. "\n" .. str - report_metapost("log: %s",str) - end - end - if result.fig then - converted = metapost.convert(result, trialrun, flusher, multipass, askedfig) - end - end - else - report_metapost("error: invalid graphic component %s",i) - end - end - else - if trace_tracingall then - data = "tracingall;" .. data - end - if trace_graphics then - mp_inp[mpx]:write(data) - end - starttiming(metapost.exectime) - result = mpx:execute(data) - stoptiming(metapost.exectime) - if trace_graphics and result then - local str = result.log or result.error - if str and str ~= "" then - mp_log[mpx]:write(str) - end - end - -- todo: error message - if not result then - report_metapost("error: no result object returned") - elseif result.status > 0 then - report_metapost("error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error")) - else - if metapost.showlog then - metapost.lastlog = metapost.lastlog .. "\n" .. result.term - report_metapost("info: %s",result.term or "no-term") - end - if result.fig then - converted = metapost.convert(result, trialrun, flusher, multipass, askedfig) - end - end - end - if trace_graphics then - local banner = "\n% end graphic\n\n" - mp_inp[mpx]:write(banner) - mp_log[mpx]:write(banner) - end - stoptiming(metapost) - end - return converted, result -end - -function metapost.convert() - report_metapost('warning: no converter set') -end - --- handy - -function metapost.directrun(formatname,filename,outputformat,astable,mpdata) - local fullname = file.addsuffix(filename,"mp") - local data = mpdata or io.loaddata(fullname) - if outputformat ~= "svg" then - outputformat = "mps" - end - if not data then - report_metapost("unknown file %a",filename) - else - local mpx = metapost.checkformat(formatname) - if not mpx then - report_metapost("unknown format %a",formatname) - else - report_metapost("processing %a",(mpdata and (filename or "data")) or fullname) - local result = mpx:execute(data) - if not result then - report_metapost("error: no result object returned") - elseif result.status > 0 then - report_metapost("error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error")) - else - if metapost.showlog then - metapost.lastlog = metapost.lastlog .. "\n" .. result.term - report_metapost("info: %s",result.term or "no-term") - end - local figures = result.fig - if figures then - local sorted = table.sortedkeys(figures) - if astable then - local result = { } - report_metapost("storing %s figures in table",#sorted) - for k=1,#sorted do - local v = sorted[k] - if outputformat == "mps" then - result[v] = figures[v]:postscript() - else - result[v] = figures[v]:svg() -- (3) for prologues - end - end - return result - else - local basename = file.removesuffix(file.basename(filename)) - for k=1,#sorted do - local v = sorted[k] - local output - if outputformat == "mps" then - output = figures[v]:postscript() - else - output = figures[v]:svg() -- (3) for prologues - end - local outname = format("%s-%s.%s",basename,v,outputformat) - report_metapost("saving %s bytes in %a",#output,outname) - io.savedata(outname,output) - end - return #sorted - end - end - end - end - end -end +if not modules then modules = { } end modules ['mlib-run'] = { + version = 1.001, + comment = "companion to mlib-ctx.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +--~ cmyk -> done, native +--~ spot -> done, but needs reworking (simpler) +--~ multitone -> +--~ shade -> partly done, todo: cm +--~ figure -> done +--~ hyperlink -> low priority, easy + +-- new * run +-- or +-- new * execute^1 * finish + +-- a*[b,c] == b + a * (c-b) + +--[[ldx-- +

    The directional helpers and pen analysis are more or less translated from the + code. It really helps that Taco know that source so well. Taco and I spent +quite some time on speeding up the and code. There is not +much to gain, especially if one keeps in mind that when integrated in +only a part of the time is spent in . Of course an integrated +approach is way faster than an external and processing time +nears zero.

    +--ldx]]-- + +local type, tostring, tonumber = type, tostring, tonumber +local format, gsub, match, find = string.format, string.gsub, string.match, string.find +local concat = table.concat +local emptystring = string.is_empty +local lpegmatch, P = lpeg.match, lpeg.P + +local trace_graphics = false trackers.register("metapost.graphics", function(v) trace_graphics = v end) +local trace_tracingall = false trackers.register("metapost.tracingall", function(v) trace_tracingall = v end) + +local report_metapost = logs.reporter("metapost") +local texerrormessage = logs.texerrormessage + +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming + +local mplib = mplib +metapost = metapost or { } +local metapost = metapost + +local mplibone = tonumber(mplib.version()) <= 1.50 + +metapost.showlog = false +metapost.lastlog = "" +metapost.collapse = true -- currently mplib cannot deal with begingroup/endgroup mismatch in stepwise processing +metapost.texerrors = false +metapost.exectime = metapost.exectime or { } -- hack + +-- metapost.collapse = false + +directives.register("mplib.texerrors", function(v) metapost.texerrors = v end) +trackers.register ("metapost.showlog", function(v) metapost.showlog = v end) + +function metapost.resetlastlog() + metapost.lastlog = "" +end + +----- mpbasepath = lpeg.instringchecker(lpeg.append { "/metapost/context/", "/metapost/base/" }) +local mpbasepath = lpeg.instringchecker(P("/metapost/") * (P("context") + P("base")) * P("/")) + +-- local function i_finder(askedname,mode,ftype) -- fake message for mpost.map and metafun.mpvi +-- local foundname = file.is_qualified_path(askedname) and askedname or resolvers.findfile(askedname,ftype) +-- if not mpbasepath(foundname) then +-- -- we could use the via file but we don't have a complete io interface yet +-- local data, found, forced = metapost.checktexts(io.loaddata(foundname) or "") +-- if found then +-- local tempname = luatex.registertempfile(foundname,true) +-- io.savedata(tempname,data) +-- foundname = tempname +-- end +-- end +-- return foundname +-- end + +-- mplib has no real io interface so we have a different mechanism than +-- tex (as soon as we have more control, we will use the normal code) + +local finders = { } +mplib.finders = finders + +-- for some reason mp sometimes calls this function twice which is inefficient +-- but we cannot catch this + +local function preprocessed(name) + if not mpbasepath(name) then + -- we could use the via file but we don't have a complete io interface yet + local data, found, forced = metapost.checktexts(io.loaddata(name) or "") + if found then + local temp = luatex.registertempfile(name,true) + io.savedata(temp,data) + return temp + end + end + return name +end + +mplib.preprocessed = preprocessed -- helper + +finders.file = function(specification,name,mode,ftype) + return preprocessed(resolvers.findfile(name,ftype)) +end + +local function i_finder(name,mode,ftype) -- fake message for mpost.map and metafun.mpvi + local specification = url.hashed(name) + local finder = finders[specification.scheme] or finders.file + return finder(specification,name,mode,ftype) +end + +local function o_finder(name,mode,ftype) + -- report_metapost("output file %a, mode %a, ftype %a",name,mode,ftype) + return name +end + +local function finder(name, mode, ftype) + if mode == "w" then + return o_finder(name,mode,ftype) + else + return i_finder(name,mode,ftype) + end +end + +local i_limited = false +local o_limited = false + +directives.register("system.inputmode", function(v) + if not i_limited then + local i_limiter = io.i_limiter(v) + if i_limiter then + i_finder = i_limiter.protect(i_finder) + i_limited = true + end + end +end) + +directives.register("system.outputmode", function(v) + if not o_limited then + local o_limiter = io.o_limiter(v) + if o_limiter then + o_finder = o_limiter.protect(o_finder) + o_limited = true + end + end +end) + +-- -- -- + +metapost.finder = finder + +function metapost.reporterror(result) + if not result then + report_metapost("error: no result object returned") + elseif result.status > 0 then + local t, e, l = result.term, result.error, result.log + if t and t ~= "" then + (metapost.texerrors and texerrormessage or report_metapost)("terminal: %s",t) + end + if e == "" or e == "no-error" then + e = nil + end + if e then + (metapost.texerrors and texerrormessage or report_metapost)("error: %s",e) + end + if not t and not e and l then + metapost.lastlog = metapost.lastlog .. "\n" .. l + report_metapost("log: %s",l) + else + report_metapost("error: unknown, no error, terminal or log messages") + end + else + return false + end + return true +end + +if mplibone then + + report_metapost("fatal error: mplib is too old") + + os.exit() + + -- local preamble = [[ + -- boolean mplib ; mplib := true ; + -- string mp_parent_version ; mp_parent_version := "%s" ; + -- input "%s" ; dump ; + -- ]] + -- + -- metapost.parameters = { + -- hash_size = 100000, + -- main_memory = 4000000, + -- max_in_open = 50, + -- param_size = 100000, + -- } + -- + -- function metapost.make(name, target, version) + -- starttiming(mplib) + -- target = file.replacesuffix(target or name, "mem") -- redundant + -- local mpx = mplib.new ( table.merged ( + -- metapost.parameters, + -- { + -- ini_version = true, + -- find_file = finder, + -- job_name = file.removesuffix(target), + -- } + -- ) ) + -- if mpx then + -- starttiming(metapost.exectime) + -- local result = mpx:execute(format(preamble,version or "unknown",name)) + -- stoptiming(metapost.exectime) + -- mpx:finish() + -- end + -- stoptiming(mplib) + -- end + -- + -- function metapost.load(name) + -- starttiming(mplib) + -- local mpx = mplib.new ( table.merged ( + -- metapost.parameters, + -- { + -- ini_version = false, + -- mem_name = file.replacesuffix(name,"mem"), + -- find_file = finder, + -- -- job_name = "mplib", + -- } + -- ) ) + -- local result + -- if not mpx then + -- result = { status = 99, error = "out of memory"} + -- end + -- stoptiming(mplib) + -- return mpx, result + -- end + -- + -- function metapost.checkformat(mpsinput) + -- local mpsversion = environment.version or "unset version" + -- local mpsinput = file.addsuffix(mpsinput or "metafun", "mp") + -- local mpsformat = file.removesuffix(file.basename(texconfig.formatname or (tex and tex.formatname) or mpsinput)) + -- local mpsbase = file.removesuffix(file.basename(mpsinput)) + -- if mpsbase ~= mpsformat then + -- mpsformat = mpsformat .. "-" .. mpsbase + -- end + -- mpsformat = file.addsuffix(mpsformat, "mem") + -- local mpsformatfullname = caches.getfirstreadablefile(mpsformat,"formats","metapost") or "" + -- if mpsformatfullname ~= "" then + -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname) + -- local mpx, result = metapost.load(mpsformatfullname) + -- if mpx then + -- local result = mpx:execute("show mp_parent_version ;") + -- if not result.log then + -- metapost.reporterror(result) + -- else + -- local version = match(result.log,">> *(.-)[\n\r]") or "unknown" + -- version = gsub(version,"[\'\"]","") + -- if version ~= mpsversion then + -- report_metapost("version mismatch: %s <> %s", version or "unknown", mpsversion) + -- else + -- return mpx + -- end + -- end + -- else + -- report_metapost("error in loading %a from %a", mpsinput, mpsformatfullname) + -- metapost.reporterror(result) + -- end + -- end + -- local mpsformatfullname = caches.setfirstwritablefile(mpsformat,"formats") + -- report_metapost("making %a into %a", mpsinput, mpsformatfullname) + -- metapost.make(mpsinput,mpsformatfullname,mpsversion) -- somehow return ... fails here + -- if lfs.isfile(mpsformatfullname) then + -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname) + -- return metapost.load(mpsformatfullname) + -- else + -- report_metapost("problems with %a from %a", mpsinput, mpsformatfullname) + -- end + -- end + +else + + local preamble = [[ + boolean mplib ; mplib := true ; + let dump = endinput ; + input "%s" ; + ]] + + local methods = { + double = "double", + scaled = "scaled", + default = "scaled", + decimal = false, -- for the moment + } + + function metapost.load(name,method) + starttiming(mplib) + method = method and methods[method] or "scaled" + local mpx = mplib.new { + ini_version = true, + find_file = finder, + math_mode = method, + } + report_metapost("initializing number mode %a",method) + local result + if not mpx then + result = { status = 99, error = "out of memory"} + else + result = mpx:execute(format(preamble, file.addsuffix(name,"mp"))) -- addsuffix is redundant + end + stoptiming(mplib) + metapost.reporterror(result) + return mpx, result + end + + function metapost.checkformat(mpsinput,method) + local mpsversion = environment.version or "unset version" + local mpsinput = mpsinput or "metafun" + local foundfile = "" + if file.suffix(mpsinput) ~= "" then + foundfile = finder(mpsinput) or "" + end + if foundfile == "" then + foundfile = finder(file.replacesuffix(mpsinput,"mpvi")) or "" + end + if foundfile == "" then + foundfile = finder(file.replacesuffix(mpsinput,"mpiv")) or "" + end + if foundfile == "" then + foundfile = finder(file.replacesuffix(mpsinput,"mp")) or "" + end + if foundfile == "" then + report_metapost("loading %a fails, format not found",mpsinput) + else + report_metapost("loading %a as %a using method %a",mpsinput,foundfile,method or "default") + local mpx, result = metapost.load(foundfile,method) + if mpx then + return mpx + else + report_metapost("error in loading %a",mpsinput) + metapost.reporterror(result) + end + end + end + +end + +function metapost.unload(mpx) + starttiming(mplib) + if mpx then + mpx:finish() + end + stoptiming(mplib) +end + +local mpxformats = { } + +function metapost.format(instance,name,method) + if not instance or instance == "" then + instance = "metafun" -- brrr + end + name = name or instance + local mpx = mpxformats[instance] + if not mpx then + report_metapost("initializing instance %a using format %a",instance,name) + mpx = metapost.checkformat(name,method) + mpxformats[instance] = mpx + end + return mpx +end + +function metapost.instance(instance) + return mpxformats[instance] +end + +function metapost.reset(mpx) + if not mpx then + -- nothing + elseif type(mpx) == "string" then + if mpxformats[mpx] then + mpxformats[mpx]:finish() + mpxformats[mpx] = nil + end + else + for name, instance in next, mpxformats do + if instance == mpx then + mpx:finish() + mpxformats[name] = nil + break + end + end + end +end + +local mp_inp, mp_log, mp_tag = { }, { }, 0 + +-- key/values + +function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass, askedfig) + local converted, result = false, { } + if type(mpx) == "string" then + mpx = metapost.format(mpx) -- goody + end + if mpx and data then + starttiming(metapost) + if trace_graphics then + if not mp_inp[mpx] then + mp_tag = mp_tag + 1 + local jobname = tex.jobname + mp_inp[mpx] = io.open(format("%s-mplib-run-%03i.mp", jobname,mp_tag),"w") + mp_log[mpx] = io.open(format("%s-mplib-run-%03i.log",jobname,mp_tag),"w") + end + local banner = format("%% begin graphic: n=%s, trialrun=%s, multipass=%s, isextrapass=%s\n\n", metapost.n, tostring(trialrun), tostring(multipass), tostring(isextrapass)) + mp_inp[mpx]:write(banner) + mp_log[mpx]:write(banner) + end + if type(data) == "table" then + -- this hack is needed because the library currently barks on \n\n + -- eventually we can text for "" in the next loop + local n = 0 + local nofsnippets = #data + for i=1,nofsnippets do + local d = data[i] + if d ~= "" then + n = n + 1 + data[n] = d + end + end + for i=nofsnippets,n+1,-1 do + data[i] = nil + end + -- and this one because mp cannot handle snippets due to grouping issues + if metapost.collapse then + if #data > 1 then + data = concat(data,"\n") + else + data = data[1] + end + end + -- end of hacks + end + if type(data) == "table" then + if trace_tracingall then + mpx:execute("tracingall;") + end + -- table.insert(data,2,"") + for i=1,#data do + local d = data[i] + -- d = string.gsub(d,"\r","") + if d then + if trace_graphics then + mp_inp[mpx]:write(format("\n%% begin snippet %s\n",i)) + mp_inp[mpx]:write(d) + mp_inp[mpx]:write(format("\n%% end snippet %s\n",i)) + end + starttiming(metapost.exectime) + result = mpx:execute(d) + stoptiming(metapost.exectime) + if trace_graphics and result then + local str = result.log or result.error + if str and str ~= "" then + mp_log[mpx]:write(str) + end + end + if not metapost.reporterror(result) then + if metapost.showlog then + local str = result.term ~= "" and result.term or "no terminal output" + if not emptystring(str) then + metapost.lastlog = metapost.lastlog .. "\n" .. str + report_metapost("log: %s",str) + end + end + if result.fig then + converted = metapost.convert(result, trialrun, flusher, multipass, askedfig) + end + end + else + report_metapost("error: invalid graphic component %s",i) + end + end + else + if trace_tracingall then + data = "tracingall;" .. data + end + if trace_graphics then + mp_inp[mpx]:write(data) + end + starttiming(metapost.exectime) + result = mpx:execute(data) + stoptiming(metapost.exectime) + if trace_graphics and result then + local str = result.log or result.error + if str and str ~= "" then + mp_log[mpx]:write(str) + end + end + -- todo: error message + if not result then + report_metapost("error: no result object returned") + elseif result.status > 0 then + report_metapost("error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error")) + else + if metapost.showlog then + metapost.lastlog = metapost.lastlog .. "\n" .. result.term + report_metapost("info: %s",result.term or "no-term") + end + if result.fig then + converted = metapost.convert(result, trialrun, flusher, multipass, askedfig) + end + end + end + if trace_graphics then + local banner = "\n% end graphic\n\n" + mp_inp[mpx]:write(banner) + mp_log[mpx]:write(banner) + end + stoptiming(metapost) + end + return converted, result +end + +function metapost.convert() + report_metapost('warning: no converter set') +end + +-- handy + +function metapost.directrun(formatname,filename,outputformat,astable,mpdata) + local fullname = file.addsuffix(filename,"mp") + local data = mpdata or io.loaddata(fullname) + if outputformat ~= "svg" then + outputformat = "mps" + end + if not data then + report_metapost("unknown file %a",filename) + else + local mpx = metapost.checkformat(formatname) + if not mpx then + report_metapost("unknown format %a",formatname) + else + report_metapost("processing %a",(mpdata and (filename or "data")) or fullname) + local result = mpx:execute(data) + if not result then + report_metapost("error: no result object returned") + elseif result.status > 0 then + report_metapost("error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error")) + else + if metapost.showlog then + metapost.lastlog = metapost.lastlog .. "\n" .. result.term + report_metapost("info: %s",result.term or "no-term") + end + local figures = result.fig + if figures then + local sorted = table.sortedkeys(figures) + if astable then + local result = { } + report_metapost("storing %s figures in table",#sorted) + for k=1,#sorted do + local v = sorted[k] + if outputformat == "mps" then + result[v] = figures[v]:postscript() + else + result[v] = figures[v]:svg() -- (3) for prologues + end + end + return result + else + local basename = file.removesuffix(file.basename(filename)) + for k=1,#sorted do + local v = sorted[k] + local output + if outputformat == "mps" then + output = figures[v]:postscript() + else + output = figures[v]:svg() -- (3) for prologues + end + local outname = format("%s-%s.%s",basename,v,outputformat) + report_metapost("saving %s bytes in %a",#output,outname) + io.savedata(outname,output) + end + return #sorted + end + end + end + end + end +end diff --git a/tex/context/base/mult-aux.lua b/tex/context/base/mult-aux.lua index 3c4cbcc0f..05dd112a8 100644 --- a/tex/context/base/mult-aux.lua +++ b/tex/context/base/mult-aux.lua @@ -1,154 +1,154 @@ -if not modules then modules = { } end modules ['mult-aux'] = { - version = 1.001, - comment = "companion to mult-aux.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local find = string.find - -interfaces.namespaces = interfaces.namespaces or { } -local namespaces = interfaces.namespaces -local variables = interfaces.variables - -local trace_namespaces = false trackers.register("interfaces.namespaces", function(v) trace_namespaces = v end) - -local report_namespaces = logs.reporter("interface","namespaces") - -local v_yes, v_list = variables.yes, variables.list - -local prefix = "????" -local meaning = "@@@@" - -local data = { } - -function namespaces.define(namespace,settings) - if trace_namespaces then - report_namespaces("installing namespace %a with settings %a",namespace,settings) - end - if data[namespace] then - report_namespaces("namespace %a is already taken",namespace) - end - if #namespace < 2 then - report_namespaces("namespace %a should have more than 1 character",namespace) - end - local ns = { } - data[namespace] = ns - utilities.parsers.settings_to_hash(settings,ns) - local name = ns.name - if not name or name == "" then - report_namespaces("provide a (command) name in namespace %a",namespace) - end - local self = "\\" .. prefix .. namespace - context.unprotect() - -- context.installnamespace(namespace) - context("\\def\\%s%s{%s%s}",prefix,namespace,meaning,namespace) -- or context.setvalue - if trace_namespaces then - report_namespaces("using namespace %a for %a",namespace,name) - end - local parent = ns.parent or "" - if parent ~= "" then - if trace_namespaces then - report_namespaces("namespace %a for %a uses parent %a",namespace,name,parent) - end - if not find(parent,"\\") then - parent = "\\" .. prefix .. parent - -- todo: check if defined - end - end - context.installparameterhandler(self,name) - if trace_namespaces then - report_namespaces("installing parameter handler for %a",name) - end - context.installparameterhashhandler(self,name) - if trace_namespaces then - report_namespaces("installing parameterhash handler for %a",name) - end - local style = ns.style - if style == v_yes then - context.installstyleandcolorhandler(self,name) - if trace_namespaces then - report_namespaces("installing attribute handler for %a",name) - end - end - local command = ns.command - if command == v_yes then - context.installdefinehandler(self,name,parent) - if trace_namespaces then - report_namespaces("installing definition command for %a (single)",name) - end - elseif command == v_list then - context.installdefinehandler(self,name,parent) - if trace_namespaces then - report_namespaces("installing definition command for %a (multiple)",name) - end - end - local setup = ns.setup - if setup == v_yes then - context.installsetuphandler(self,name) - if trace_namespaces then - report_namespaces("installing setup command for %a (%s)",name,"single") - end - elseif setup == v_list then - context.installsetuphandler(self,name) - if trace_namespaces then - report_namespaces("installing setup command for %a (%s)",name,"multiple") - end - end - local set = ns.set - if set == v_yes then - context.installparametersethandler(self,name) - if trace_namespaces then - report_namespaces("installing set/let/reset command for %a (%s)",name,"single") - end - elseif set == v_list then - context.installparametersethandler(self,name) - if trace_namespaces then - report_namespaces("installing set/let/reset command for %a (%s)",name,"multiple") - end - end - local frame = ns.frame - if frame == v_yes then - context.installinheritedframed(name) - if trace_namespaces then - report_namespaces("installing framed command for %a",name) - end - end - context.protect() -end - -function utilities.formatters.list(data,key,keys) - if not keys then - keys = { } - for _, v in next, data do - for k, _ in next, v do - keys[k] = true - end - end - keys = table.sortedkeys(keys) - end - context.starttabulate { "|"..string.rep("l|",#keys+1) } - context.NC() - context(key) - for i=1,#keys do - context.NC() - context(keys[i]) - end context.NR() - context.HL() - for k, v in table.sortedhash(data) do - context.NC() - context(k) - for i=1,#keys do - context.NC() - context(v[keys[i]]) - end context.NR() - end - context.stoptabulate() -end - -function namespaces.list() - -- utilities.formatters.list(data,"namespace") - local keys = { "type", "name", "comment", "version", "parent", "definition", "setup", "style" } - utilities.formatters.list(data,"namespace",keys) -end +if not modules then modules = { } end modules ['mult-aux'] = { + version = 1.001, + comment = "companion to mult-aux.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local find = string.find + +interfaces.namespaces = interfaces.namespaces or { } +local namespaces = interfaces.namespaces +local variables = interfaces.variables + +local trace_namespaces = false trackers.register("interfaces.namespaces", function(v) trace_namespaces = v end) + +local report_namespaces = logs.reporter("interface","namespaces") + +local v_yes, v_list = variables.yes, variables.list + +local prefix = "????" +local meaning = "@@@@" + +local data = { } + +function namespaces.define(namespace,settings) + if trace_namespaces then + report_namespaces("installing namespace %a with settings %a",namespace,settings) + end + if data[namespace] then + report_namespaces("namespace %a is already taken",namespace) + end + if #namespace < 2 then + report_namespaces("namespace %a should have more than 1 character",namespace) + end + local ns = { } + data[namespace] = ns + utilities.parsers.settings_to_hash(settings,ns) + local name = ns.name + if not name or name == "" then + report_namespaces("provide a (command) name in namespace %a",namespace) + end + local self = "\\" .. prefix .. namespace + context.unprotect() + -- context.installnamespace(namespace) + context("\\def\\%s%s{%s%s}",prefix,namespace,meaning,namespace) -- or context.setvalue + if trace_namespaces then + report_namespaces("using namespace %a for %a",namespace,name) + end + local parent = ns.parent or "" + if parent ~= "" then + if trace_namespaces then + report_namespaces("namespace %a for %a uses parent %a",namespace,name,parent) + end + if not find(parent,"\\") then + parent = "\\" .. prefix .. parent + -- todo: check if defined + end + end + context.installparameterhandler(self,name) + if trace_namespaces then + report_namespaces("installing parameter handler for %a",name) + end + context.installparameterhashhandler(self,name) + if trace_namespaces then + report_namespaces("installing parameterhash handler for %a",name) + end + local style = ns.style + if style == v_yes then + context.installstyleandcolorhandler(self,name) + if trace_namespaces then + report_namespaces("installing attribute handler for %a",name) + end + end + local command = ns.command + if command == v_yes then + context.installdefinehandler(self,name,parent) + if trace_namespaces then + report_namespaces("installing definition command for %a (single)",name) + end + elseif command == v_list then + context.installdefinehandler(self,name,parent) + if trace_namespaces then + report_namespaces("installing definition command for %a (multiple)",name) + end + end + local setup = ns.setup + if setup == v_yes then + context.installsetuphandler(self,name) + if trace_namespaces then + report_namespaces("installing setup command for %a (%s)",name,"single") + end + elseif setup == v_list then + context.installsetuphandler(self,name) + if trace_namespaces then + report_namespaces("installing setup command for %a (%s)",name,"multiple") + end + end + local set = ns.set + if set == v_yes then + context.installparametersethandler(self,name) + if trace_namespaces then + report_namespaces("installing set/let/reset command for %a (%s)",name,"single") + end + elseif set == v_list then + context.installparametersethandler(self,name) + if trace_namespaces then + report_namespaces("installing set/let/reset command for %a (%s)",name,"multiple") + end + end + local frame = ns.frame + if frame == v_yes then + context.installinheritedframed(name) + if trace_namespaces then + report_namespaces("installing framed command for %a",name) + end + end + context.protect() +end + +function utilities.formatters.list(data,key,keys) + if not keys then + keys = { } + for _, v in next, data do + for k, _ in next, v do + keys[k] = true + end + end + keys = table.sortedkeys(keys) + end + context.starttabulate { "|"..string.rep("l|",#keys+1) } + context.NC() + context(key) + for i=1,#keys do + context.NC() + context(keys[i]) + end context.NR() + context.HL() + for k, v in table.sortedhash(data) do + context.NC() + context(k) + for i=1,#keys do + context.NC() + context(v[keys[i]]) + end context.NR() + end + context.stoptabulate() +end + +function namespaces.list() + -- utilities.formatters.list(data,"namespace") + local keys = { "type", "name", "comment", "version", "parent", "definition", "setup", "style" } + utilities.formatters.list(data,"namespace",keys) +end diff --git a/tex/context/base/mult-chk.lua b/tex/context/base/mult-chk.lua index 2a2dfcd4b..06e7aa8e6 100644 --- a/tex/context/base/mult-chk.lua +++ b/tex/context/base/mult-chk.lua @@ -1,76 +1,76 @@ -if not modules then modules = { } end modules ['mult-chk'] = { - version = 1.001, - comment = "companion to mult-chk.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format -local lpegmatch = lpeg.match -local type = type - -local make_settings_to_hash_pattern = utilities.parsers.make_settings_to_hash_pattern -local settings_to_set = utilities.parsers.settings_to_set -local allocate = utilities.storage.allocate - -local report_interface = logs.reporter("interface","checking") - -interfaces = interfaces or { } - -interfaces.syntax = allocate { - test = { keys = table.tohash { "a","b","c","d","e","f","g" } } -} - -function interfaces.invalidkey(category,key) - report_interface("invalid key %a for %a in line %a",key,category,tex.inputlineno) -end - -function interfaces.setvalidkeys(category,list) - local s = interfaces.syntax[category] - if not s then - interfaces.syntax[category] = { - keys = settings_to_set(list) - } - else - s.keys = settings_to_set(list) - end -end - -function interfaces.addvalidkeys(category,list) - local s = interfaces.syntax[category] - if not s then - interfaces.syntax[category] = { - keys = settings_to_set(list) - } - else - settings_to_set(list,s.keys) - end -end - --- weird code, looks incomplete ... probably an experiment - -local prefix, category, keys - -local setsomevalue = context.setsomevalue -local invalidkey = interfaces.invalidkey - -local function set(key,value) - if keys and not keys[key] then - invalidkey(category,key) - else - setsomevalue(prefix,key,value) - end -end - -local pattern = make_settings_to_hash_pattern(set,"tolerant") - -function interfaces.getcheckedparameters(k,p,s) - if s and s ~= "" then - prefix, category = p, k - keys = k and k ~= "" and interfaces.syntax[k].keys - lpegmatch(pattern,s) - end -end - --- _igcp_ = interfaces.getcheckedparameters +if not modules then modules = { } end modules ['mult-chk'] = { + version = 1.001, + comment = "companion to mult-chk.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format +local lpegmatch = lpeg.match +local type = type + +local make_settings_to_hash_pattern = utilities.parsers.make_settings_to_hash_pattern +local settings_to_set = utilities.parsers.settings_to_set +local allocate = utilities.storage.allocate + +local report_interface = logs.reporter("interface","checking") + +interfaces = interfaces or { } + +interfaces.syntax = allocate { + test = { keys = table.tohash { "a","b","c","d","e","f","g" } } +} + +function interfaces.invalidkey(category,key) + report_interface("invalid key %a for %a in line %a",key,category,tex.inputlineno) +end + +function interfaces.setvalidkeys(category,list) + local s = interfaces.syntax[category] + if not s then + interfaces.syntax[category] = { + keys = settings_to_set(list) + } + else + s.keys = settings_to_set(list) + end +end + +function interfaces.addvalidkeys(category,list) + local s = interfaces.syntax[category] + if not s then + interfaces.syntax[category] = { + keys = settings_to_set(list) + } + else + settings_to_set(list,s.keys) + end +end + +-- weird code, looks incomplete ... probably an experiment + +local prefix, category, keys + +local setsomevalue = context.setsomevalue +local invalidkey = interfaces.invalidkey + +local function set(key,value) + if keys and not keys[key] then + invalidkey(category,key) + else + setsomevalue(prefix,key,value) + end +end + +local pattern = make_settings_to_hash_pattern(set,"tolerant") + +function interfaces.getcheckedparameters(k,p,s) + if s and s ~= "" then + prefix, category = p, k + keys = k and k ~= "" and interfaces.syntax[k].keys + lpegmatch(pattern,s) + end +end + +-- _igcp_ = interfaces.getcheckedparameters diff --git a/tex/context/base/mult-fun.lua b/tex/context/base/mult-fun.lua index a661c53bb..e263c3559 100644 --- a/tex/context/base/mult-fun.lua +++ b/tex/context/base/mult-fun.lua @@ -1,101 +1,101 @@ -return { - internals = { - -- - "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel", - "shadefactor", - "textextoffset", - "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent", - "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent", - "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent", --- "originlength", "tickstep ", "ticklength", --- "autoarrows", "ahfactor", --- "angleoffset", anglelength", anglemethod", - "metapostversion", - "maxdimensions", - }, - commands = { - -- - "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", - "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", - "invsin", "invcos", "acosh", "asinh", "sinh", "cosh", - "paired", "tripled", - "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", - -- "halfcircle", "quartercircle", - "llcircle", "lrcircle", "urcircle", "ulcircle", - "tcircle", "bcircle", "lcircle", "rcircle", - "lltriangle", "lrtriangle", "urtriangle", "ultriangle", - "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", - "punked", "curved", "unspiked", "simplified", "blownup", "stretched", - "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", - "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", - "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", - "llmoved", "lrmoved", "urmoved", "ulmoved", - "rightarrow", "leftarrow", "centerarrow", - "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", - "bottomboundary", "leftboundary", "topboundary", "rightboundary", - "xsized", "ysized", "xysized", "sized", "xyscaled", - "intersection_point", "intersection_found", "penpoint", - "bbwidth", "bbheight", - "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto", - "withcircularshade", "withlinearshade", - "cmyk", "spotcolor", "multitonecolor", "namedcolor", - "drawfill", "undrawfill", - "inverted", "uncolored", "softened", "grayed", "greyed", - "onlayer", - "along", - "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage", - "colordecimals", "ddecimal", "dddecimal", "ddddecimal", - "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign", - "transparent", "withtransparency", - "property", "properties", "withproperties", - "asgroup", - "infont", -- redefined usign textext - -- "property", "withproperties", "properties", -- not yet - "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade", - "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade", - "space", "CRLF", - "grayscale", "greyscale", "withgray", "withgrey", - "colorpart", - "readfile", - "clearxy", "unitvector", "center", -- redefined - "epsed", "anchored", - "originpath", "infinite", - "break", - "xstretched", "ystretched", "snapped", - -- - "pathconnectors", "function", "constructedpath", "constructedpairs", - "punkedfunction", "curvedfunction", "tightfunction", - "punkedpath", "curvedpath", "tightpath", - "punkedpairs", "curvedpairs", "tightpairs", - -- - "evenly", "oddly", - -- - "condition", - -- - "pushcurrentpicture", "popcurrentpicture", - -- - "arrowpath", --- "colorlike", "dowithpath", "rangepath", "straightpath", "addbackground", --- "cleanstring", "asciistring", "setunstringed", "getunstringed", "unstringed", --- "showgrid", --- "phantom", --- "xshifted", "yshifted", --- "drawarrowpath", "midarrowhead", "arrowheadonpath", --- "drawxticks", "drawyticks", "drawticks", --- "pointarrow", --- "thefreelabel", "freelabel", "freedotlabel", --- "anglebetween", "colorcircle", --- "remapcolors", "normalcolors", "resetcolormap", "remapcolor", "remappedcolor", --- "recolor", "refill", "redraw", "retext", "untext", "restroke", "reprocess", "repathed", - "tensecircle", "roundedsquare", - "colortype", "whitecolor", "blackcolor", - -- --- "swappointlabels", - "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", - "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", - "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", - "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions", - -- - "decorated", "redecorated", "undecorated", - }, -} +return { + internals = { + -- + "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel", + "shadefactor", + "textextoffset", + "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent", + "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent", + "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent", +-- "originlength", "tickstep ", "ticklength", +-- "autoarrows", "ahfactor", +-- "angleoffset", anglelength", anglemethod", + "metapostversion", + "maxdimensions", + }, + commands = { + -- + "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", + "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", + "invsin", "invcos", "acosh", "asinh", "sinh", "cosh", + "paired", "tripled", + "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", + -- "halfcircle", "quartercircle", + "llcircle", "lrcircle", "urcircle", "ulcircle", + "tcircle", "bcircle", "lcircle", "rcircle", + "lltriangle", "lrtriangle", "urtriangle", "ultriangle", + "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", + "punked", "curved", "unspiked", "simplified", "blownup", "stretched", + "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", + "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", + "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", + "llmoved", "lrmoved", "urmoved", "ulmoved", + "rightarrow", "leftarrow", "centerarrow", + "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", + "bottomboundary", "leftboundary", "topboundary", "rightboundary", + "xsized", "ysized", "xysized", "sized", "xyscaled", + "intersection_point", "intersection_found", "penpoint", + "bbwidth", "bbheight", + "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto", + "withcircularshade", "withlinearshade", + "cmyk", "spotcolor", "multitonecolor", "namedcolor", + "drawfill", "undrawfill", + "inverted", "uncolored", "softened", "grayed", "greyed", + "onlayer", + "along", + "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage", + "colordecimals", "ddecimal", "dddecimal", "ddddecimal", + "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign", + "transparent", "withtransparency", + "property", "properties", "withproperties", + "asgroup", + "infont", -- redefined usign textext + -- "property", "withproperties", "properties", -- not yet + "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade", + "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade", + "space", "CRLF", + "grayscale", "greyscale", "withgray", "withgrey", + "colorpart", + "readfile", + "clearxy", "unitvector", "center", -- redefined + "epsed", "anchored", + "originpath", "infinite", + "break", + "xstretched", "ystretched", "snapped", + -- + "pathconnectors", "function", "constructedpath", "constructedpairs", + "punkedfunction", "curvedfunction", "tightfunction", + "punkedpath", "curvedpath", "tightpath", + "punkedpairs", "curvedpairs", "tightpairs", + -- + "evenly", "oddly", + -- + "condition", + -- + "pushcurrentpicture", "popcurrentpicture", + -- + "arrowpath", +-- "colorlike", "dowithpath", "rangepath", "straightpath", "addbackground", +-- "cleanstring", "asciistring", "setunstringed", "getunstringed", "unstringed", +-- "showgrid", +-- "phantom", +-- "xshifted", "yshifted", +-- "drawarrowpath", "midarrowhead", "arrowheadonpath", +-- "drawxticks", "drawyticks", "drawticks", +-- "pointarrow", +-- "thefreelabel", "freelabel", "freedotlabel", +-- "anglebetween", "colorcircle", +-- "remapcolors", "normalcolors", "resetcolormap", "remapcolor", "remappedcolor", +-- "recolor", "refill", "redraw", "retext", "untext", "restroke", "reprocess", "repathed", + "tensecircle", "roundedsquare", + "colortype", "whitecolor", "blackcolor", + -- +-- "swappointlabels", + "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", + "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", + "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", + "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions", + -- + "decorated", "redecorated", "undecorated", + }, +} diff --git a/tex/context/base/mult-ini.lua b/tex/context/base/mult-ini.lua index 3b18738de..491557446 100644 --- a/tex/context/base/mult-ini.lua +++ b/tex/context/base/mult-ini.lua @@ -1,333 +1,333 @@ -if not modules then modules = { } end modules ['mult-ini'] = { - version = 1.001, - comment = "companion to mult-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format, gmatch, match = string.format, string.gmatch, string.match -local lpegmatch = lpeg.match -local serialize = table.serialize - -local allocate = utilities.storage.allocate -local mark = utilities.storage.mark -local prtcatcodes = catcodes.numbers.prtcatcodes -local contextsprint = context.sprint -local setmetatableindex = table.setmetatableindex -local formatters = string.formatters - -local report_interface = logs.reporter("interface","initialization") - -interfaces = interfaces or { } -interfaces.constants = mark(interfaces.constants or { }) -interfaces.variables = mark(interfaces.variables or { }) -interfaces.elements = mark(interfaces.elements or { }) -interfaces.formats = mark(interfaces.formats or { }) -interfaces.translations = mark(interfaces.translations or { }) -interfaces.corenamespaces = mark(interfaces.corenamespaces or { }) - -local registerstorage = storage.register -local sharedstorage = storage.shared - -local constants = interfaces.constants -local variables = interfaces.variables -local elements = interfaces.elements -local formats = interfaces.formats -local translations = interfaces.translations -local corenamespaces = interfaces.corenamespaces -local reporters = { } -- just an optimization - -registerstorage("interfaces/constants", constants, "interfaces.constants") -registerstorage("interfaces/variables", variables, "interfaces.variables") -registerstorage("interfaces/elements", elements, "interfaces.elements") -registerstorage("interfaces/formats", formats, "interfaces.formats") -registerstorage("interfaces/translations", translations, "interfaces.translations") -registerstorage("interfaces/corenamespaces", corenamespaces, "interfaces.corenamespaces") - -interfaces.interfaces = { - "cs", "de", "en", "fr", "it", "nl", "ro", "pe", -} - -sharedstorage.currentinterface = sharedstorage.currentinterface or "en" -sharedstorage.currentresponse = sharedstorage.currentresponse or "en" - -local currentinterface = sharedstorage.currentinterface -local currentresponse = sharedstorage.currentresponse - -local complete = allocate() -interfaces.complete = complete - -local function resolve(t,k) -- one access needed to get loaded (not stored!) - report_interface("loading interface definitions from 'mult-def.lua'") - complete = dofile(resolvers.findfile("mult-def.lua")) - report_interface("loading interface messages from 'mult-mes.lua'") - complete.messages = dofile(resolvers.findfile("mult-mes.lua")) - interfaces.complete = complete - return rawget(complete,k) -end - -setmetatableindex(complete, resolve) - -local function valueiskey(t,k) -- will be helper - t[k] = k - return k -end - -setmetatableindex(variables, valueiskey) -setmetatableindex(constants, valueiskey) -setmetatableindex(elements, valueiskey) -setmetatableindex(formats, valueiskey) -setmetatableindex(translations, valueiskey) - -function interfaces.registernamespace(n,namespace) - corenamespaces[n] = namespace -end - -local function resolve(t,k) - local v = logs.reporter(k) - t[k] = v - return v -end - -setmetatableindex(reporters,resolve) - -for category, _ in next, translations do - -- We pre-create reporters for already defined messages - -- because otherwise listing is incomplete and we want - -- to use that for checking so delaying makes not much - -- sense there. - local r = reporters[category] -end - --- adding messages - -local function add(target,tag,values) - local t = target[tag] - if not f then - target[tag] = values - else - for k, v in next, values do - if f[k] then - -- error - else - f[k] = v - end - end - end -end - -function interfaces.settranslation(tag,values) - add(translations,tag,values) -end - -function interfaces.setformat(tag,values) - add(formats,tag,values) -end - --- the old method: - -local replacer = lpeg.replacer { { "--", "%%a" } } - -local function fulltag(category,tag) - return formatters["%s:%s"](category,lpegmatch(replacer,tag)) -end - -function interfaces.setmessages(category,str) - for tag, message in gmatch(str,"(%S+) *: *(.-) *[\n\r]") do - if tag == "title" then - translations[tag] = translations[tag] or tag - else - formats[fulltag(category,tag)] = lpegmatch(replacer,message) - end - end -end - -function interfaces.setmessage(category,tag,message) - formats[fulltag(category,tag)] = lpegmatch(replacer,message) -end - -function interfaces.getmessage(category,tag,default) - return formats[fulltag(category,tag)] or default or "unknown message" -end - -function interfaces.doifelsemessage(category,tag) - return formats[fulltag(category,tag)] -end - -local splitter = lpeg.splitat(",") - -function interfaces.showmessage(category,tag,arguments) - local r = reporters[category] - local f = formats[fulltag(category,tag)] - local t = type(arguments) - if t == "string" and #arguments > 0 then - r(f,lpegmatch(splitter,arguments)) - elseif t == "table" then - r(f,unpack(arguments)) - elseif arguments then - r(f,arguments) - else - r(f) - end -end - --- till here - -function interfaces.setvariable(variable,given) - variables[given] = variable -end - -function interfaces.setconstant(constant,given) - constants[given] = constant -end - -function interfaces.setelement(element,given) - elements[given] = element -end - --- the real thing: - -logs.setmessenger(context.verbatim.ctxreport) - --- initialization - -function interfaces.setuserinterface(interface,response) - sharedstorage.currentinterface, currentinterface = interface, interface - sharedstorage.currentresponse, currentresponse = response, response - if environment.initex then - local nofconstants = 0 - for given, constant in next, complete.constants do - constant = constant[interface] or constant.en or given - constants[constant] = given -- breedte -> width - contextsprint(prtcatcodes,"\\ui_c{",given,"}{",constant,"}") -- user interface constant - nofconstants = nofconstants + 1 - end - local nofvariables = 0 - for given, variable in next, complete.variables do - variable = variable[interface] or variable.en or given - variables[given] = variable -- ja -> yes - contextsprint(prtcatcodes,"\\ui_v{",given,"}{",variable,"}") -- user interface variable - nofvariables = nofvariables + 1 - end - local nofelements = 0 - for given, element in next, complete.elements do - element = element[interface] or element.en or given - elements[element] = given - contextsprint(prtcatcodes,"\\ui_e{",given,"}{",element,"}") -- user interface element - nofelements = nofelements + 1 - end - local nofcommands = 0 - for given, command in next, complete.commands do - command = command[interface] or command.en or given - if command ~= given then - contextsprint(prtcatcodes,"\\ui_m{",given,"}{",command,"}") -- user interface macro - end - nofcommands = nofcommands + 1 - end - local nofformats = 0 - for given, format in next, complete.messages.formats do - formats[given] = format[interface] or format.en or given - nofformats = nofformats + 1 - end - local noftranslations = 0 - for given, translation in next, complete.messages.translations do - translations[given] = translation[interface] or translation.en or given - noftranslations = noftranslations + 1 - end - report_interface("definitions: %a constants, %a variables, %a elements, %a commands, %a formats, %a translations", - nofconstants,nofvariables,nofelements,nofcommands,nofformats,noftranslations) - end -end - -interfaces.cachedsetups = interfaces.cachedsetups or { } -interfaces.hashedsetups = interfaces.hashedsetups or { } - -local cachedsetups = interfaces.cachedsetups -local hashedsetups = interfaces.hashedsetups - -storage.register("interfaces/cachedsetups", cachedsetups, "interfaces.cachedsetups") -storage.register("interfaces/hashedsetups", hashedsetups, "interfaces.hashedsetups") - -function interfaces.cachesetup(t) - local hash = serialize(t) - local done = hashedsetups[hash] - if done then - return cachedsetups[done] - else - done = #cachedsetups + 1 - cachedsetups[done] = t - hashedsetups[hash] = done - return t - end -end - -function interfaces.is_command(str) - return (str and str ~= "" and token.csname_name(token.create(str)) ~= "") or false -- there will be a proper function for this -end - -function interfaces.interfacedcommand(name) - local command = complete.commands[name] - return command and command[currentinterface] or name -end - --- interface - -function commands.writestatus(category,message,...) - local r = reporters[category] - if r then - r(message,...) - end -end - -commands.registernamespace = interfaces.registernamespace -commands.setinterfaceconstant = interfaces.setconstant -commands.setinterfacevariable = interfaces.setvariable -commands.setinterfaceelement = interfaces.setelement -commands.setinterfacemessage = interfaces.setmessage -commands.setinterfacemessages = interfaces.setmessages -commands.showmessage = interfaces.showmessage - -function commands.doifelsemessage(category,tag) - commands.doifelse(interfaces.doifelsemessage(category,tag)) -end - -function commands.getmessage(category,tag,default) - context(interfaces.getmessage(category,tag,default)) -end - -function commands.showassignerror(namespace,key,value,line) - local ns, instance = match(namespace,"^(%d+)[^%a]+(%a+)") - if ns then - namespace = corenamespaces[tonumber(ns)] or ns - end - if instance then - context.writestatus("setup",formatters["error in line %a, namespace %a, instance %a, key %a"](line,namespace,instance,key)) - else - context.writestatus("setup",formatters["error in line %a, namespace %a, key %a"](line,namespace,key)) - end -end - --- a simple helper - -local settings_to_hash = utilities.parsers.settings_to_hash - -local makesparse = function(t) - for k, v in next, t do - if not v or v == "" then - t[k] = nil - end - end - return t -end - -function interfaces.checkedspecification(specification) - local kind = type(specification) - if kind == "table" then - return makesparse(specification) - elseif kind == "string" and specification ~= "" then - return makesparse(settings_to_hash(specification)) - else - return { } - end -end +if not modules then modules = { } end modules ['mult-ini'] = { + version = 1.001, + comment = "companion to mult-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, gmatch, match = string.format, string.gmatch, string.match +local lpegmatch = lpeg.match +local serialize = table.serialize + +local allocate = utilities.storage.allocate +local mark = utilities.storage.mark +local prtcatcodes = catcodes.numbers.prtcatcodes +local contextsprint = context.sprint +local setmetatableindex = table.setmetatableindex +local formatters = string.formatters + +local report_interface = logs.reporter("interface","initialization") + +interfaces = interfaces or { } +interfaces.constants = mark(interfaces.constants or { }) +interfaces.variables = mark(interfaces.variables or { }) +interfaces.elements = mark(interfaces.elements or { }) +interfaces.formats = mark(interfaces.formats or { }) +interfaces.translations = mark(interfaces.translations or { }) +interfaces.corenamespaces = mark(interfaces.corenamespaces or { }) + +local registerstorage = storage.register +local sharedstorage = storage.shared + +local constants = interfaces.constants +local variables = interfaces.variables +local elements = interfaces.elements +local formats = interfaces.formats +local translations = interfaces.translations +local corenamespaces = interfaces.corenamespaces +local reporters = { } -- just an optimization + +registerstorage("interfaces/constants", constants, "interfaces.constants") +registerstorage("interfaces/variables", variables, "interfaces.variables") +registerstorage("interfaces/elements", elements, "interfaces.elements") +registerstorage("interfaces/formats", formats, "interfaces.formats") +registerstorage("interfaces/translations", translations, "interfaces.translations") +registerstorage("interfaces/corenamespaces", corenamespaces, "interfaces.corenamespaces") + +interfaces.interfaces = { + "cs", "de", "en", "fr", "it", "nl", "ro", "pe", +} + +sharedstorage.currentinterface = sharedstorage.currentinterface or "en" +sharedstorage.currentresponse = sharedstorage.currentresponse or "en" + +local currentinterface = sharedstorage.currentinterface +local currentresponse = sharedstorage.currentresponse + +local complete = allocate() +interfaces.complete = complete + +local function resolve(t,k) -- one access needed to get loaded (not stored!) + report_interface("loading interface definitions from 'mult-def.lua'") + complete = dofile(resolvers.findfile("mult-def.lua")) + report_interface("loading interface messages from 'mult-mes.lua'") + complete.messages = dofile(resolvers.findfile("mult-mes.lua")) + interfaces.complete = complete + return rawget(complete,k) +end + +setmetatableindex(complete, resolve) + +local function valueiskey(t,k) -- will be helper + t[k] = k + return k +end + +setmetatableindex(variables, valueiskey) +setmetatableindex(constants, valueiskey) +setmetatableindex(elements, valueiskey) +setmetatableindex(formats, valueiskey) +setmetatableindex(translations, valueiskey) + +function interfaces.registernamespace(n,namespace) + corenamespaces[n] = namespace +end + +local function resolve(t,k) + local v = logs.reporter(k) + t[k] = v + return v +end + +setmetatableindex(reporters,resolve) + +for category, _ in next, translations do + -- We pre-create reporters for already defined messages + -- because otherwise listing is incomplete and we want + -- to use that for checking so delaying makes not much + -- sense there. + local r = reporters[category] +end + +-- adding messages + +local function add(target,tag,values) + local t = target[tag] + if not f then + target[tag] = values + else + for k, v in next, values do + if f[k] then + -- error + else + f[k] = v + end + end + end +end + +function interfaces.settranslation(tag,values) + add(translations,tag,values) +end + +function interfaces.setformat(tag,values) + add(formats,tag,values) +end + +-- the old method: + +local replacer = lpeg.replacer { { "--", "%%a" } } + +local function fulltag(category,tag) + return formatters["%s:%s"](category,lpegmatch(replacer,tag)) +end + +function interfaces.setmessages(category,str) + for tag, message in gmatch(str,"(%S+) *: *(.-) *[\n\r]") do + if tag == "title" then + translations[tag] = translations[tag] or tag + else + formats[fulltag(category,tag)] = lpegmatch(replacer,message) + end + end +end + +function interfaces.setmessage(category,tag,message) + formats[fulltag(category,tag)] = lpegmatch(replacer,message) +end + +function interfaces.getmessage(category,tag,default) + return formats[fulltag(category,tag)] or default or "unknown message" +end + +function interfaces.doifelsemessage(category,tag) + return formats[fulltag(category,tag)] +end + +local splitter = lpeg.splitat(",") + +function interfaces.showmessage(category,tag,arguments) + local r = reporters[category] + local f = formats[fulltag(category,tag)] + local t = type(arguments) + if t == "string" and #arguments > 0 then + r(f,lpegmatch(splitter,arguments)) + elseif t == "table" then + r(f,unpack(arguments)) + elseif arguments then + r(f,arguments) + else + r(f) + end +end + +-- till here + +function interfaces.setvariable(variable,given) + variables[given] = variable +end + +function interfaces.setconstant(constant,given) + constants[given] = constant +end + +function interfaces.setelement(element,given) + elements[given] = element +end + +-- the real thing: + +logs.setmessenger(context.verbatim.ctxreport) + +-- initialization + +function interfaces.setuserinterface(interface,response) + sharedstorage.currentinterface, currentinterface = interface, interface + sharedstorage.currentresponse, currentresponse = response, response + if environment.initex then + local nofconstants = 0 + for given, constant in next, complete.constants do + constant = constant[interface] or constant.en or given + constants[constant] = given -- breedte -> width + contextsprint(prtcatcodes,"\\ui_c{",given,"}{",constant,"}") -- user interface constant + nofconstants = nofconstants + 1 + end + local nofvariables = 0 + for given, variable in next, complete.variables do + variable = variable[interface] or variable.en or given + variables[given] = variable -- ja -> yes + contextsprint(prtcatcodes,"\\ui_v{",given,"}{",variable,"}") -- user interface variable + nofvariables = nofvariables + 1 + end + local nofelements = 0 + for given, element in next, complete.elements do + element = element[interface] or element.en or given + elements[element] = given + contextsprint(prtcatcodes,"\\ui_e{",given,"}{",element,"}") -- user interface element + nofelements = nofelements + 1 + end + local nofcommands = 0 + for given, command in next, complete.commands do + command = command[interface] or command.en or given + if command ~= given then + contextsprint(prtcatcodes,"\\ui_m{",given,"}{",command,"}") -- user interface macro + end + nofcommands = nofcommands + 1 + end + local nofformats = 0 + for given, format in next, complete.messages.formats do + formats[given] = format[interface] or format.en or given + nofformats = nofformats + 1 + end + local noftranslations = 0 + for given, translation in next, complete.messages.translations do + translations[given] = translation[interface] or translation.en or given + noftranslations = noftranslations + 1 + end + report_interface("definitions: %a constants, %a variables, %a elements, %a commands, %a formats, %a translations", + nofconstants,nofvariables,nofelements,nofcommands,nofformats,noftranslations) + end +end + +interfaces.cachedsetups = interfaces.cachedsetups or { } +interfaces.hashedsetups = interfaces.hashedsetups or { } + +local cachedsetups = interfaces.cachedsetups +local hashedsetups = interfaces.hashedsetups + +storage.register("interfaces/cachedsetups", cachedsetups, "interfaces.cachedsetups") +storage.register("interfaces/hashedsetups", hashedsetups, "interfaces.hashedsetups") + +function interfaces.cachesetup(t) + local hash = serialize(t) + local done = hashedsetups[hash] + if done then + return cachedsetups[done] + else + done = #cachedsetups + 1 + cachedsetups[done] = t + hashedsetups[hash] = done + return t + end +end + +function interfaces.is_command(str) + return (str and str ~= "" and token.csname_name(token.create(str)) ~= "") or false -- there will be a proper function for this +end + +function interfaces.interfacedcommand(name) + local command = complete.commands[name] + return command and command[currentinterface] or name +end + +-- interface + +function commands.writestatus(category,message,...) + local r = reporters[category] + if r then + r(message,...) + end +end + +commands.registernamespace = interfaces.registernamespace +commands.setinterfaceconstant = interfaces.setconstant +commands.setinterfacevariable = interfaces.setvariable +commands.setinterfaceelement = interfaces.setelement +commands.setinterfacemessage = interfaces.setmessage +commands.setinterfacemessages = interfaces.setmessages +commands.showmessage = interfaces.showmessage + +function commands.doifelsemessage(category,tag) + commands.doifelse(interfaces.doifelsemessage(category,tag)) +end + +function commands.getmessage(category,tag,default) + context(interfaces.getmessage(category,tag,default)) +end + +function commands.showassignerror(namespace,key,value,line) + local ns, instance = match(namespace,"^(%d+)[^%a]+(%a+)") + if ns then + namespace = corenamespaces[tonumber(ns)] or ns + end + if instance then + context.writestatus("setup",formatters["error in line %a, namespace %a, instance %a, key %a"](line,namespace,instance,key)) + else + context.writestatus("setup",formatters["error in line %a, namespace %a, key %a"](line,namespace,key)) + end +end + +-- a simple helper + +local settings_to_hash = utilities.parsers.settings_to_hash + +local makesparse = function(t) + for k, v in next, t do + if not v or v == "" then + t[k] = nil + end + end + return t +end + +function interfaces.checkedspecification(specification) + local kind = type(specification) + if kind == "table" then + return makesparse(specification) + elseif kind == "string" and specification ~= "" then + return makesparse(settings_to_hash(specification)) + else + return { } + end +end diff --git a/tex/context/base/mult-low.lua b/tex/context/base/mult-low.lua index 47e31978b..46c2c24d6 100644 --- a/tex/context/base/mult-low.lua +++ b/tex/context/base/mult-low.lua @@ -1,347 +1,347 @@ -if not modules then modules = { } end modules ['mult-low'] = { - version = 1.001, - comment = "companion to mult-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- for syntax highlighters, only the ones that are for users (boring to collect them) - -return { - ["constants"] = { - -- - "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", - "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", - "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", - "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", - "points", "halfpoint", - "zeroskip", - "zeromuskip", "onemuskip", - "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", - "normalpagebox", - -- -- - "endoflinetoken", "outputnewlinechar", - -- - "emptytoks", "empty", "undefined", - -- - "voidbox", "emptybox", "emptyvbox", "emptyhbox", - -- - "bigskipamount", "medskipamount", "smallskipamount", - -- - "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", - "luatexengine", "pdftexengine", "xetexengine", "unknownengine", - "etexversion", "pdftexversion", "xetexversion", "xetexrevision", - -- - "activecatcode", - -- - "bgroup", "egroup", - "endline", - -- - "conditionaltrue", "conditionalfalse", - -- - "attributeunsetvalue", - -- - "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", - -- - "inicatcodes", - "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", - "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", - "xmlcatcodes", - -- - "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", - "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", - "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", - -- - "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", - "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", - "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", - "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", - "lessthanasciicode", "morethanasciicode", "doublecommentsignal", - "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", - "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", - "primeasciicode", - -- - "activemathcharcode", - -- - "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", - -- - "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", - -- - "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", - "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", - "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", - "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", - -- - "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", - "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", - "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", - -- - "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", - "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", - "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", - -- - "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", - "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", - "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", - "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", - -- - -- maybe a different class - -- - "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", - "doifmode", "doifmodeelse", "doifnotmode", - "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes", - "startenvironment", "stopenvironment", "environment", - "startcomponent", "stopcomponent", "component", - "startproduct", "stopproduct", "product", - "startproject", "stopproject", "project", - "starttext", "stoptext", "startnotext", "stopnotext","startdocument", "stopdocument", "documentvariable", "setupdocument", - "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", - -- - "startTEXpage", "stopTEXpage", - -- "startMPpage", "stopMPpage", -- already catched by nested lexer - -- - "enablemode", "disablemode", "preventmode", - "globalenablemode", "globaldisablemode", "globalpreventmode", - "pushmode", "popmode", - -- - "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", - -- - "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", - "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", - "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", - -- - "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", - -- - "continueifinputfile", - -- - "luastringsep", "!!bs", "!!es", - }, - ["helpers"] = { - -- - "startsetups", "stopsetups", - "startxmlsetups", "stopxmlsetups", - "startluasetups", "stopluasetups", - "starttexsetups", "stoptexsetups", - "startrawsetups", "stoprawsetups", - "startlocalsetups", "stoplocalsetups", - "starttexdefinition", "stoptexdefinition", - "starttexcode", "stoptexcode", - "startcontextcode", "stopcontextcode", - -- - "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", - "doifelsecommandhandler","doifnotcommandhandler","doifcommandhandler", - -- - "newmode", "setmode", "resetmode", - "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", - "booleanmodevalue", - -- - "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", - "newlanguage", "newfamily", "newfam", "newhelp", -- not used - -- - "then", - "begcsname", - -- - "strippedcsname", - -- - "firstargumentfalse", "firstargumenttrue", - "secondargumentfalse", "secondargumenttrue", - "thirdargumentfalse", "thirdargumenttrue", - "fourthargumentfalse", "fourthargumenttrue", - "fifthargumentfalse", "fifthsargumenttrue", - "sixthargumentfalse", "sixtsargumenttrue", - -- - "doglobal", "dodoglobal", "redoglobal", "resetglobal", - -- - "donothing", "dontcomplain", "forgetall", - -- - "donetrue", "donefalse", - -- - "htdp", - "unvoidbox", - "hfilll", "vfilll", - -- - "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", - -- - "currentcatcodetable", "defaultcatcodetable", "catcodetablename", - "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", - "pushcatcodetable", "popcatcodetable", "restorecatcodes", - "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", - -- - "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", - -- - "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", - "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", - "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", - "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", - "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", - "ruledpenalty", - -- - "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", - -- - "scratchcounter", "globalscratchcounter", - "scratchdimen", "globalscratchdimen", - "scratchskip", "globalscratchskip", - "scratchmuskip", "globalscratchmuskip", - "scratchtoks", "globalscratchtoks", - "scratchbox", "globalscratchbox", - -- - "availablehsize", "localhsize", "setlocalhsize", - -- - "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", - -- - "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", - "scratchhsize", "scratchvsize", - "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", - "scratchxposition", "scratchyposition", - "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", - -- - "scratchcounterone", "scratchcountertwo", "scratchcounterthree", - "scratchdimenone", "scratchdimentwo", "scratchdimenthree", - "scratchskipone", "scratchskiptwo", "scratchskipthree", - "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", - "scratchtoksone", "scratchtokstwo", "scratchtoksthree", - "scratchboxone", "scratchboxtwo", "scratchboxthree", - "scratchnx", "scratchny", "scratchmx", "scratchmy", - "scratchunicode", - -- - "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", - -- - "doif", "doifnot", "doifelse", - "doifinset", "doifnotinset", "doifinsetelse", - "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse", - "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined", - "doifelsevalue", "doifvalue", "doifnotvalue", - "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse", - "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", - "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber", - "doifcommonelse", "doifcommon", "doifnotcommon", - "doifinstring", "doifnotinstring", "doifinstringelse", - "doifassignmentelse", "docheckassignment", - -- - "tracingall", "tracingnone", "loggingall", - -- - "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", - -- - "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", - "obeyspaces", "obeylines", "obeyedspace", "obeyedline", - "normalspace", - -- - "executeifdefined", - -- - "singleexpandafter", "doubleexpandafter", "tripleexpandafter", - -- - "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", - -- - "wait", "writestatus", "define", "defineexpandable", "redefine", - -- - "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", - -- - "installcorenamespace", - -- - "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", - "resetvalue", "undefinevalue", "ignorevalue", - "setuvalue", "setuevalue", "setugvalue", "setuxvalue", - -- - "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", - -- - "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", - -- - "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", - "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", - -- - "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", - "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", - -- - "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", - -- - "firstofoneargument", - "firstoftwoarguments", "secondoftwoarguments", - "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", - "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", - "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", - "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", - -- - "firstofoneunexpanded", - -- - "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", - "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", - -- - "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith", - -- - "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant", - "newmacro", "setnewmacro", "newfraction", - "newsignal", - -- - "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", - "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", - "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", - "permitspacesbetweengroups", "dontpermitspacesbetweengroups", - -- - "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", - -- - "modulonumber", "dividenumber", - -- - "getfirstcharacter", "doiffirstcharelse", - -- - "startnointerference", "stopnointerference", - -- - "twodigits","threedigits", - -- - "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", - -- - "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", - "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", - -- - "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", - "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", - -- - "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", - "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", - -- - "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", - "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", - -- - "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", - "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", - -- - "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", - "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", - -- - "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", - "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", - -- - "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", - "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", - -- - "normalreqno", - -- - "startimath", "stopimath", "normalstartimath", "normalstopimath", - "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", - -- - "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", - "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", - "mathtext", "setmathsmalltextbox", "setmathtextbox", - -- - "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", - "triggeruncrampedstyle", "triggercrampedstyle", - "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", - "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", - -- - "luaexpr", "expdoifelse", "expdoif", "expdoifnot", "expdoifcommonelse", "expdoifinsetelse", - -- - "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", - "ctxlua", "luacode", "lateluacode", "directluacode", - "registerctxluafile", "ctxloadluafile", - "luaversion", "luamajorversion", "luaminorversion", - "ctxluacode", "luaconditional", "luaexpanded", - "startluaparameterset", "stopluaparameterset", "luaparameterset", - "definenamedlua", - "obeylualines", "obeyluatokens", - "startluacode", "stopluacode", "startlua", "stoplua", - -- - "carryoverpar", - -- - "Umathbotaccent", - } -} +if not modules then modules = { } end modules ['mult-low'] = { + version = 1.001, + comment = "companion to mult-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- for syntax highlighters, only the ones that are for users (boring to collect them) + +return { + ["constants"] = { + -- + "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", + "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", + "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", + "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", + "points", "halfpoint", + "zeroskip", + "zeromuskip", "onemuskip", + "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", + "normalpagebox", + -- -- + "endoflinetoken", "outputnewlinechar", + -- + "emptytoks", "empty", "undefined", + -- + "voidbox", "emptybox", "emptyvbox", "emptyhbox", + -- + "bigskipamount", "medskipamount", "smallskipamount", + -- + "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", + "luatexengine", "pdftexengine", "xetexengine", "unknownengine", + "etexversion", "pdftexversion", "xetexversion", "xetexrevision", + -- + "activecatcode", + -- + "bgroup", "egroup", + "endline", + -- + "conditionaltrue", "conditionalfalse", + -- + "attributeunsetvalue", + -- + "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", + -- + "inicatcodes", + "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", + "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", + "xmlcatcodes", + -- + "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", + "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", + "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", + -- + "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", + "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", + "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", + "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", + "lessthanasciicode", "morethanasciicode", "doublecommentsignal", + "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", + "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", + "primeasciicode", + -- + "activemathcharcode", + -- + "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", + -- + "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", + -- + "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", + "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", + "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", + "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", + -- + "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", + "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", + "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", + -- + "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", + "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", + "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", + -- + "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", + "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", + "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", + "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", + -- + -- maybe a different class + -- + "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", + "doifmode", "doifmodeelse", "doifnotmode", + "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes", + "startenvironment", "stopenvironment", "environment", + "startcomponent", "stopcomponent", "component", + "startproduct", "stopproduct", "product", + "startproject", "stopproject", "project", + "starttext", "stoptext", "startnotext", "stopnotext","startdocument", "stopdocument", "documentvariable", "setupdocument", + "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", + -- + "startTEXpage", "stopTEXpage", + -- "startMPpage", "stopMPpage", -- already catched by nested lexer + -- + "enablemode", "disablemode", "preventmode", + "globalenablemode", "globaldisablemode", "globalpreventmode", + "pushmode", "popmode", + -- + "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", + -- + "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", + "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", + "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", + -- + "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", + -- + "continueifinputfile", + -- + "luastringsep", "!!bs", "!!es", + }, + ["helpers"] = { + -- + "startsetups", "stopsetups", + "startxmlsetups", "stopxmlsetups", + "startluasetups", "stopluasetups", + "starttexsetups", "stoptexsetups", + "startrawsetups", "stoprawsetups", + "startlocalsetups", "stoplocalsetups", + "starttexdefinition", "stoptexdefinition", + "starttexcode", "stoptexcode", + "startcontextcode", "stopcontextcode", + -- + "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", + "doifelsecommandhandler","doifnotcommandhandler","doifcommandhandler", + -- + "newmode", "setmode", "resetmode", + "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", + "booleanmodevalue", + -- + "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", + "newlanguage", "newfamily", "newfam", "newhelp", -- not used + -- + "then", + "begcsname", + -- + "strippedcsname", + -- + "firstargumentfalse", "firstargumenttrue", + "secondargumentfalse", "secondargumenttrue", + "thirdargumentfalse", "thirdargumenttrue", + "fourthargumentfalse", "fourthargumenttrue", + "fifthargumentfalse", "fifthsargumenttrue", + "sixthargumentfalse", "sixtsargumenttrue", + -- + "doglobal", "dodoglobal", "redoglobal", "resetglobal", + -- + "donothing", "dontcomplain", "forgetall", + -- + "donetrue", "donefalse", + -- + "htdp", + "unvoidbox", + "hfilll", "vfilll", + -- + "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", + -- + "currentcatcodetable", "defaultcatcodetable", "catcodetablename", + "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", + "pushcatcodetable", "popcatcodetable", "restorecatcodes", + "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", + -- + "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", + -- + "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", + "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", + "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", + "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", + "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", + "ruledpenalty", + -- + "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", + -- + "scratchcounter", "globalscratchcounter", + "scratchdimen", "globalscratchdimen", + "scratchskip", "globalscratchskip", + "scratchmuskip", "globalscratchmuskip", + "scratchtoks", "globalscratchtoks", + "scratchbox", "globalscratchbox", + -- + "availablehsize", "localhsize", "setlocalhsize", + -- + "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", + -- + "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", + "scratchhsize", "scratchvsize", + "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", + "scratchxposition", "scratchyposition", + "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", + -- + "scratchcounterone", "scratchcountertwo", "scratchcounterthree", + "scratchdimenone", "scratchdimentwo", "scratchdimenthree", + "scratchskipone", "scratchskiptwo", "scratchskipthree", + "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", + "scratchtoksone", "scratchtokstwo", "scratchtoksthree", + "scratchboxone", "scratchboxtwo", "scratchboxthree", + "scratchnx", "scratchny", "scratchmx", "scratchmy", + "scratchunicode", + -- + "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", + -- + "doif", "doifnot", "doifelse", + "doifinset", "doifnotinset", "doifinsetelse", + "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse", + "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined", + "doifelsevalue", "doifvalue", "doifnotvalue", + "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse", + "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", + "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber", + "doifcommonelse", "doifcommon", "doifnotcommon", + "doifinstring", "doifnotinstring", "doifinstringelse", + "doifassignmentelse", "docheckassignment", + -- + "tracingall", "tracingnone", "loggingall", + -- + "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", + -- + "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", + "obeyspaces", "obeylines", "obeyedspace", "obeyedline", + "normalspace", + -- + "executeifdefined", + -- + "singleexpandafter", "doubleexpandafter", "tripleexpandafter", + -- + "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", + -- + "wait", "writestatus", "define", "defineexpandable", "redefine", + -- + "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", + -- + "installcorenamespace", + -- + "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", + "resetvalue", "undefinevalue", "ignorevalue", + "setuvalue", "setuevalue", "setugvalue", "setuxvalue", + -- + "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", + -- + "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", + -- + "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", + "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", + -- + "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", + "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", + -- + "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", + -- + "firstofoneargument", + "firstoftwoarguments", "secondoftwoarguments", + "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", + "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", + "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", + "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", + -- + "firstofoneunexpanded", + -- + "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", + "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", + -- + "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith", + -- + "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant", + "newmacro", "setnewmacro", "newfraction", + "newsignal", + -- + "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", + "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", + "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", + "permitspacesbetweengroups", "dontpermitspacesbetweengroups", + -- + "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", + -- + "modulonumber", "dividenumber", + -- + "getfirstcharacter", "doiffirstcharelse", + -- + "startnointerference", "stopnointerference", + -- + "twodigits","threedigits", + -- + "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", + -- + "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", + "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", + -- + "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", + "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", + -- + "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", + "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", + -- + "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", + "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", + -- + "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", + "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", + -- + "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", + "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", + -- + "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", + "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", + -- + "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", + "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", + -- + "normalreqno", + -- + "startimath", "stopimath", "normalstartimath", "normalstopimath", + "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", + -- + "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", + "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", + "mathtext", "setmathsmalltextbox", "setmathtextbox", + -- + "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", + "triggeruncrampedstyle", "triggercrampedstyle", + "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", + "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", + -- + "luaexpr", "expdoifelse", "expdoif", "expdoifnot", "expdoifcommonelse", "expdoifinsetelse", + -- + "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", + "ctxlua", "luacode", "lateluacode", "directluacode", + "registerctxluafile", "ctxloadluafile", + "luaversion", "luamajorversion", "luaminorversion", + "ctxluacode", "luaconditional", "luaexpanded", + "startluaparameterset", "stopluaparameterset", "luaparameterset", + "definenamedlua", + "obeylualines", "obeyluatokens", + "startluacode", "stopluacode", "startlua", "stoplua", + -- + "carryoverpar", + -- + "Umathbotaccent", + } +} diff --git a/tex/context/base/mult-mps.lua b/tex/context/base/mult-mps.lua index 59411cd97..f599111e8 100644 --- a/tex/context/base/mult-mps.lua +++ b/tex/context/base/mult-mps.lua @@ -1,115 +1,115 @@ -return { - tex = { - "btex", "etex", "verbatimtex", - }, - shortcuts = { - "..", "...", "--", "---", "&", - }, - primitives = { -- to be checked - "charcode", "day", "linecap", "linejoin", "miterlimit", "month", "pausing", - "prologues", "showstopping", "time", "tracingcapsules", "tracingchoices", "mpprocset", - "tracingcommands", "tracingequations", "tracinglostchars", - "tracingmacros", "tracingonline", "tracingoutput", "tracingrestores", - "tracingspecs", "tracingstats", "tracingtitles", "truecorners", - "warningcheck", "year", - "false", "nullpicture", "pencircle", "true", - "and", "angle", "arclength", "arctime", "ASCII", "boolean", "bot", - "char", "color", "cosd", "cycle", "decimal", "directiontime", "floor", "fontsize", - "hex", "infont", "intersectiontimes", "known", "length", "llcorner", - "lrcorner", "makepath", "makepen", "mexp", "mlog", "normaldeviate", "not", - "numeric", "oct", "odd", "or", "path", "pair", "pen", "penoffset", "picture", "point", - "postcontrol", "precontrol", "reverse", "rotated", "scaled", - "shifted", "sind", "slanted", "sqrt", "str", "string", "subpath", "substring", - "transform", "transformed", "ulcorner", "uniformdeviate", "unknown", - "urcorner", "xpart", "xscaled", "xxpart", "xypart", "ypart", "yscaled", "yxpart", - "yypart", "zscaled", - "addto", "clip", "input", "interim", "let", "newinternal", "save", "setbounds", - "shipout", "show", "showdependencies", "showtoken", "showvariable", - "special", - "begingroup", "endgroup", "of", "curl", "tension", "and", "controls", - "interpath", "on", "off", - "def", "vardef", "enddef", "expr", "suffix", "text", "primary", "secondary", - "tertiary", "primarydef", "secondarydef", "tertiarydef", - "randomseed", "also", "contour", "doublepath", - "withcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within", - "forsuffixes", "downto", "upto", "step", "until", - "charlist", "extensible", "fontdimen", "headerbyte", "kern", "ligtable", - "boundarychar", "chardp", "charext", "charht", "charic", "charwd", "designsize", - "fontmaking", "charexists", - "cullit", "currenttransform", "gfcorners", "grayfont", "hround", - "imagerules", "lowres_fix", "nodisplays", "notransforms", "openit", - "displaying", "currentwindow", "screen_rows", "screen_cols", - "pixels_per_inch", "cull", "display", "openwindow", "numspecial", - "totalweight", "autorounding", "fillin", "proofing", "tracingpens", - "xoffset", "chardx", "granularity", "smoothing", "turningcheck", "yoffset", - "chardy", "hppp", "tracingedges", "vppp", - "extra_beginfig", "extra_endfig", "mpxbreak", - "endinput", - "message", "delimiters", "turningnumber", "errmessage", - "readstring", "scantokens", "end", "outer", "inner", "write", "to", "readfrom", - "withprescript", "withpostscript", - "top", "bot", "lft", "rt", "ulft", "urt", "llft", "lrt", - -- - "redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "greypart", - "rgbcolor", "cmykcolor", "greycolor", "graycolor", - "colormodel", "graypart", - "dashpart", "penpart", --- "colorpart", - "stroked", "filled", "textual", "clipped", "bounded", - "expandafter", - }, - commands = { - "beginfig", "endfig", - "rotatedaround", "reflectedabout", - "arrowhead", - "currentpen", "currentpicture", "cuttings", - "defaultfont", "extra_beginfig", "extra_endfig", - "ditto", "EOF", "down", - "evenly", "fullcircle", "halfcircle", "identity", "in", "left", - "origin", "pensquare", "quartercircle", "right", - "unitsquare", "up", "withdots", - "abs", "bbox", "ceiling", "center", "cutafter", "cutbefore", "dir", - "directionpoint", "div", "dotprod", "intersectionpoint", "inverse", "mod", - "round", "unitvector", "whatever", - "cutdraw", "draw", "drawarrow", "drawdblarrow", "fill", "filldraw", "drawdot", - "loggingall", "interact", "tracingall", "tracingnone", - "pickup", - "undraw", "unfill", "unfilldraw", - "buildcycle", "dashpattern", "decr", "dotlabel", "dotlabels", "drawoptions", - "incr", "label", "labels", "max", "min", "thelabel", "z", - "beginchar", "blacker", "capsule_end", "change_width", - "define_blacker_pixels", "define_corrected_pixels", - "define_good_x_pixels", "define_good_y_pixels", - "define_horizontal_corrected_pixels", "define_pixels", - "define_whole_blacker_pixels", "define_whole_pixels", - "define_whole_vertical_blacker_pixels", - "define_whole_vertical_pixels", "endchar", "extra_beginchar", - "extra_endchar", "extra_setup", "font_coding_scheme", - "clearxy", "clearit", "clearpen", "shipit", - "font_extra_space", - "exitunless", - "relax", "hide", "gobble", "gobbled", "stop", - "blankpicture", - "counterclockwise", "tensepath", "takepower", "direction", - "softjoin", -- "magstep", - "makelabel", -- "laboff", - "rotatedabout", "flex", "superellipse", "erase", "image", - "nullpen", "savepen", "clearpen", "penpos", "penlabels", -- "clear_pen_memory", - "range", "numtok", "thru", - "z", "laboff", - "bye", - -- - "red", "green", "blue", "cyan", "magenta", "yellow", "black", "white", "background", - "graypart", "graycolor", - -- - "mm", "pt", "dd", "bp", "cm", "pc", "cc", "in", - }, - internals = { -- we need to remove duplicates above - -- - "mitered", "rounded", "beveled", "butt", "squared", - "eps", "epsilon", "infinity", - "bboxmargin", "ahlength", "ahangle", "labeloffset", "dotlabeldiam", "defaultpen", "defaultscale", "join_radius", - -- - "pen_lft", "pen_rt", "pen_top", "pen_bot", -- "pen_count_", - }, -} +return { + tex = { + "btex", "etex", "verbatimtex", + }, + shortcuts = { + "..", "...", "--", "---", "&", + }, + primitives = { -- to be checked + "charcode", "day", "linecap", "linejoin", "miterlimit", "month", "pausing", + "prologues", "showstopping", "time", "tracingcapsules", "tracingchoices", "mpprocset", + "tracingcommands", "tracingequations", "tracinglostchars", + "tracingmacros", "tracingonline", "tracingoutput", "tracingrestores", + "tracingspecs", "tracingstats", "tracingtitles", "truecorners", + "warningcheck", "year", + "false", "nullpicture", "pencircle", "true", + "and", "angle", "arclength", "arctime", "ASCII", "boolean", "bot", + "char", "color", "cosd", "cycle", "decimal", "directiontime", "floor", "fontsize", + "hex", "infont", "intersectiontimes", "known", "length", "llcorner", + "lrcorner", "makepath", "makepen", "mexp", "mlog", "normaldeviate", "not", + "numeric", "oct", "odd", "or", "path", "pair", "pen", "penoffset", "picture", "point", + "postcontrol", "precontrol", "reverse", "rotated", "scaled", + "shifted", "sind", "slanted", "sqrt", "str", "string", "subpath", "substring", + "transform", "transformed", "ulcorner", "uniformdeviate", "unknown", + "urcorner", "xpart", "xscaled", "xxpart", "xypart", "ypart", "yscaled", "yxpart", + "yypart", "zscaled", + "addto", "clip", "input", "interim", "let", "newinternal", "save", "setbounds", + "shipout", "show", "showdependencies", "showtoken", "showvariable", + "special", + "begingroup", "endgroup", "of", "curl", "tension", "and", "controls", + "interpath", "on", "off", + "def", "vardef", "enddef", "expr", "suffix", "text", "primary", "secondary", + "tertiary", "primarydef", "secondarydef", "tertiarydef", + "randomseed", "also", "contour", "doublepath", + "withcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within", + "forsuffixes", "downto", "upto", "step", "until", + "charlist", "extensible", "fontdimen", "headerbyte", "kern", "ligtable", + "boundarychar", "chardp", "charext", "charht", "charic", "charwd", "designsize", + "fontmaking", "charexists", + "cullit", "currenttransform", "gfcorners", "grayfont", "hround", + "imagerules", "lowres_fix", "nodisplays", "notransforms", "openit", + "displaying", "currentwindow", "screen_rows", "screen_cols", + "pixels_per_inch", "cull", "display", "openwindow", "numspecial", + "totalweight", "autorounding", "fillin", "proofing", "tracingpens", + "xoffset", "chardx", "granularity", "smoothing", "turningcheck", "yoffset", + "chardy", "hppp", "tracingedges", "vppp", + "extra_beginfig", "extra_endfig", "mpxbreak", + "endinput", + "message", "delimiters", "turningnumber", "errmessage", + "readstring", "scantokens", "end", "outer", "inner", "write", "to", "readfrom", + "withprescript", "withpostscript", + "top", "bot", "lft", "rt", "ulft", "urt", "llft", "lrt", + -- + "redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "greypart", + "rgbcolor", "cmykcolor", "greycolor", "graycolor", + "colormodel", "graypart", + "dashpart", "penpart", +-- "colorpart", + "stroked", "filled", "textual", "clipped", "bounded", + "expandafter", + }, + commands = { + "beginfig", "endfig", + "rotatedaround", "reflectedabout", + "arrowhead", + "currentpen", "currentpicture", "cuttings", + "defaultfont", "extra_beginfig", "extra_endfig", + "ditto", "EOF", "down", + "evenly", "fullcircle", "halfcircle", "identity", "in", "left", + "origin", "pensquare", "quartercircle", "right", + "unitsquare", "up", "withdots", + "abs", "bbox", "ceiling", "center", "cutafter", "cutbefore", "dir", + "directionpoint", "div", "dotprod", "intersectionpoint", "inverse", "mod", + "round", "unitvector", "whatever", + "cutdraw", "draw", "drawarrow", "drawdblarrow", "fill", "filldraw", "drawdot", + "loggingall", "interact", "tracingall", "tracingnone", + "pickup", + "undraw", "unfill", "unfilldraw", + "buildcycle", "dashpattern", "decr", "dotlabel", "dotlabels", "drawoptions", + "incr", "label", "labels", "max", "min", "thelabel", "z", + "beginchar", "blacker", "capsule_end", "change_width", + "define_blacker_pixels", "define_corrected_pixels", + "define_good_x_pixels", "define_good_y_pixels", + "define_horizontal_corrected_pixels", "define_pixels", + "define_whole_blacker_pixels", "define_whole_pixels", + "define_whole_vertical_blacker_pixels", + "define_whole_vertical_pixels", "endchar", "extra_beginchar", + "extra_endchar", "extra_setup", "font_coding_scheme", + "clearxy", "clearit", "clearpen", "shipit", + "font_extra_space", + "exitunless", + "relax", "hide", "gobble", "gobbled", "stop", + "blankpicture", + "counterclockwise", "tensepath", "takepower", "direction", + "softjoin", -- "magstep", + "makelabel", -- "laboff", + "rotatedabout", "flex", "superellipse", "erase", "image", + "nullpen", "savepen", "clearpen", "penpos", "penlabels", -- "clear_pen_memory", + "range", "numtok", "thru", + "z", "laboff", + "bye", + -- + "red", "green", "blue", "cyan", "magenta", "yellow", "black", "white", "background", + "graypart", "graycolor", + -- + "mm", "pt", "dd", "bp", "cm", "pc", "cc", "in", + }, + internals = { -- we need to remove duplicates above + -- + "mitered", "rounded", "beveled", "butt", "squared", + "eps", "epsilon", "infinity", + "bboxmargin", "ahlength", "ahangle", "labeloffset", "dotlabeldiam", "defaultpen", "defaultscale", "join_radius", + -- + "pen_lft", "pen_rt", "pen_top", "pen_bot", -- "pen_count_", + }, +} diff --git a/tex/context/base/node-acc.lua b/tex/context/base/node-acc.lua index 4380ec3a4..c2675b970 100644 --- a/tex/context/base/node-acc.lua +++ b/tex/context/base/node-acc.lua @@ -1,140 +1,140 @@ -if not modules then modules = { } end modules ['node-acc'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local nodes, node = nodes, node - -local nodecodes = nodes.nodecodes -local tasks = nodes.tasks - -local traverse_nodes = node.traverse -local traverse_id = node.traverse_id -local copy_node = node.copy -local free_nodelist = node.flush_list - -local glue_code = nodecodes.glue -local kern_code = nodecodes.kern -local glyph_code = nodecodes.glyph -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist - -local a_characters = attributes.private("characters") - -local threshold = 65536 - --- todo: nbsp etc --- todo: collapse kerns - -local function injectspaces(head) - local p - local n = head - while n do - local id = n.id - if id == glue_code then -- todo: check for subtype related to spacing (13/14 but most seems to be 0) ---~ if n.spec.width > 0 then -- threshold - if p and p.id == glyph_code then - local g = copy_node(p) - local c = g.components - if c then -- it happens that we copied a ligature - free_nodelist(c) - g.components = nil - g.subtype = 256 - end - local a = n[a_characters] - local s = copy_node(n.spec) - g.char, n.spec = 32, s - p.next, g.prev = g, p - g.next, n.prev = n, g - s.width = s.width - g.width - if a then - g[a_characters] = a - end - s[a_characters] = 0 - n[a_characters] = 0 - end ---~ end - elseif id == hlist_code or id == vlist_code then - injectspaces(n.list,attribute) - -- elseif id == kern_code then -- the backend already collapses - -- local first = n - -- while true do - -- local nn = n.next - -- if nn and nn.id == kern_code then - -- -- maybe we should delete kerns but who cares at this stage - -- first.kern = first.kern + nn.kern - -- nn.kern = 0 - -- n = nn - -- else - -- break - -- end - -- end - end - p = n - n = n.next - end - return head, true -end - -nodes.handlers.accessibility = injectspaces - --- todo: - ---~ local a_hyphenated = attributes.private('hyphenated') ---~ ---~ local hyphenated, codes = { }, { } ---~ ---~ local function compact(n) ---~ local t = { } ---~ for n in traverse_id(glyph_code,n) do ---~ t[#t+1] = utfchar(n.char) -- check for unicode ---~ end ---~ return concat(t,"") ---~ end ---~ ---~ local function injectspans(head) ---~ for n in traverse_nodes(head) do ---~ local id = n.id ---~ if id == disc then ---~ local r, p = n.replace, n.pre ---~ if r and p then ---~ local str = compact(r) ---~ local hsh = hyphenated[str] ---~ if not hsh then ---~ hsh = #codes + 1 ---~ hyphenated[str] = hsh ---~ codes[hsh] = str ---~ end ---~ n[a_hyphenated] = hsh ---~ end ---~ elseif id == hlist_code or id == vlist_code then ---~ injectspans(n.list) ---~ end ---~ end ---~ return head, true ---~ end ---~ ---~ nodes.injectspans = injectspans ---~ ---~ tasks.appendaction("processors", "words", "nodes.injectspans") ---~ ---~ local function injectspans(head) ---~ for n in traverse_nodes(head) do ---~ local id = n.id ---~ if id == disc then ---~ local a = n[a_hyphenated] ---~ if a then ---~ local str = codes[a] ---~ local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str))) ---~ local e = new_pdfliteral("EMC") ---~ node.insert_before(head,n,b) ---~ node.insert_after(head,n,e) ---~ end ---~ elseif id == hlist_code or id == vlist_code then ---~ injectspans(n.list) ---~ end ---~ end ---~ end +if not modules then modules = { } end modules ['node-acc'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local nodes, node = nodes, node + +local nodecodes = nodes.nodecodes +local tasks = nodes.tasks + +local traverse_nodes = node.traverse +local traverse_id = node.traverse_id +local copy_node = node.copy +local free_nodelist = node.flush_list + +local glue_code = nodecodes.glue +local kern_code = nodecodes.kern +local glyph_code = nodecodes.glyph +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist + +local a_characters = attributes.private("characters") + +local threshold = 65536 + +-- todo: nbsp etc +-- todo: collapse kerns + +local function injectspaces(head) + local p + local n = head + while n do + local id = n.id + if id == glue_code then -- todo: check for subtype related to spacing (13/14 but most seems to be 0) +--~ if n.spec.width > 0 then -- threshold + if p and p.id == glyph_code then + local g = copy_node(p) + local c = g.components + if c then -- it happens that we copied a ligature + free_nodelist(c) + g.components = nil + g.subtype = 256 + end + local a = n[a_characters] + local s = copy_node(n.spec) + g.char, n.spec = 32, s + p.next, g.prev = g, p + g.next, n.prev = n, g + s.width = s.width - g.width + if a then + g[a_characters] = a + end + s[a_characters] = 0 + n[a_characters] = 0 + end +--~ end + elseif id == hlist_code or id == vlist_code then + injectspaces(n.list,attribute) + -- elseif id == kern_code then -- the backend already collapses + -- local first = n + -- while true do + -- local nn = n.next + -- if nn and nn.id == kern_code then + -- -- maybe we should delete kerns but who cares at this stage + -- first.kern = first.kern + nn.kern + -- nn.kern = 0 + -- n = nn + -- else + -- break + -- end + -- end + end + p = n + n = n.next + end + return head, true +end + +nodes.handlers.accessibility = injectspaces + +-- todo: + +--~ local a_hyphenated = attributes.private('hyphenated') +--~ +--~ local hyphenated, codes = { }, { } +--~ +--~ local function compact(n) +--~ local t = { } +--~ for n in traverse_id(glyph_code,n) do +--~ t[#t+1] = utfchar(n.char) -- check for unicode +--~ end +--~ return concat(t,"") +--~ end +--~ +--~ local function injectspans(head) +--~ for n in traverse_nodes(head) do +--~ local id = n.id +--~ if id == disc then +--~ local r, p = n.replace, n.pre +--~ if r and p then +--~ local str = compact(r) +--~ local hsh = hyphenated[str] +--~ if not hsh then +--~ hsh = #codes + 1 +--~ hyphenated[str] = hsh +--~ codes[hsh] = str +--~ end +--~ n[a_hyphenated] = hsh +--~ end +--~ elseif id == hlist_code or id == vlist_code then +--~ injectspans(n.list) +--~ end +--~ end +--~ return head, true +--~ end +--~ +--~ nodes.injectspans = injectspans +--~ +--~ tasks.appendaction("processors", "words", "nodes.injectspans") +--~ +--~ local function injectspans(head) +--~ for n in traverse_nodes(head) do +--~ local id = n.id +--~ if id == disc then +--~ local a = n[a_hyphenated] +--~ if a then +--~ local str = codes[a] +--~ local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str))) +--~ local e = new_pdfliteral("EMC") +--~ node.insert_before(head,n,b) +--~ node.insert_after(head,n,e) +--~ end +--~ elseif id == hlist_code or id == vlist_code then +--~ injectspans(n.list) +--~ end +--~ end +--~ end diff --git a/tex/context/base/node-aux.lua b/tex/context/base/node-aux.lua index e3fc7ad6f..21737a43b 100644 --- a/tex/context/base/node-aux.lua +++ b/tex/context/base/node-aux.lua @@ -1,389 +1,389 @@ -if not modules then modules = { } end modules ['node-aux'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: n1 .. n2 : __concat metatable - -local type, tostring = type, tostring - -local nodes, node = nodes, node - -local utfvalues = utf.values - -local nodecodes = nodes.nodecodes - -local glyph_code = nodecodes.glyph -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local attributelist_code = nodecodes.attributelist -- temporary -local math_code = nodecodes.math - -local nodepool = nodes.pool - -local new_glue = nodepool.glue -local new_glyph = nodepool.glyph - -local traverse_nodes = node.traverse -local traverse_id = node.traverse_id -local free_node = node.free -local hpack_nodes = node.hpack -local unset_attribute = node.unset_attribute -local first_glyph = node.first_glyph or node.first_character -local copy_node = node.copy -local copy_node_list = node.copy_list -local slide_nodes = node.slide -local insert_node_after = node.insert_after -local isnode = node.is_node - -local unsetvalue = attributes.unsetvalue - -local current_font = font.current - -local texbox = tex.box - -local report_error = logs.reporter("node-aux:error") - -function nodes.repackhlist(list,...) ---~ nodes.showsimplelist(list) - local temp, b = hpack_nodes(list,...) - list = temp.list - temp.list = nil - free_node(temp) - return list, b -end - -local function set_attributes(head,attr,value) - for n in traverse_nodes(head) do - n[attr] = value - local id = n.id - if id == hlist_node or id == vlist_node then - set_attributes(n.list,attr,value) - end - end -end - -local function set_unset_attributes(head,attr,value) - for n in traverse_nodes(head) do - if not n[attr] then - n[attr] = value - end - local id = n.id - if id == hlist_code or id == vlist_code then - set_unset_attributes(n.list,attr,value) - end - end -end - -local function unset_attributes(head,attr) - for n in traverse_nodes(head) do - n[attr] = unsetvalue - local id = n.id - if id == hlist_code or id == vlist_code then - unset_attributes(n.list,attr) - end - end -end - -nodes.setattribute = node.set_attribute -nodes.getattribute = node.has_attribute -nodes.unsetattribute = node.unset_attribute -nodes.has_attribute = node.has_attribute - -nodes.firstglyph = first_glyph -nodes.setattributes = set_attributes -nodes.setunsetattributes = set_unset_attributes -nodes.unsetattributes = unset_attributes - --- function nodes.is_skipable(a,id) -- skipable nodes at the margins during character protrusion --- return ( --- id ~= glyph_node --- or id == ins_node --- or id == mark_node --- or id == adjust_node --- or id == penalty_node --- or (id == glue_node and a.spec.writable) --- or (id == disc_node and a.pre == nil and a.post == nil and a.replace == nil) --- or (id == math_node and a.surround == 0) --- or (id == kern_node and (a.kern == 0 or a.subtype == NORMAL)) --- or (id == hlist_node and a.width == 0 and a.height == 0 and a.depth == 0 and a.list == nil) --- or (id == whatsit_node and a.subtype ~= pdf_refximage_node and a.subtype ~= pdf_refxform_node) --- ) --- end - --- history: --- --- --- local function glyph_width(a) --- local ch = chardata[a.font][a.char] --- return (ch and ch.width) or 0 --- end --- --- local function glyph_total(a) --- local ch = chardata[a.font][a.char] --- return (ch and (ch.height+ch.depth)) or 0 --- end --- --- local function non_discardable(a) -- inline --- return a.id < math_node -- brrrr --- end --- --- local function calculate_badness(t,s) --- if t == 0 then --- return 0 --- elseif s <= 0 then --- return INF_BAD --- else --- local r --- if t <= 7230584 then --- r = t * 297 / s --- elseif s >= 1663497 then --- r = t / floor(s / 297) --- else --- r = t --- end --- r = floor(r) --- if r > 1290 then --- return INF_BAD --- else --- return floor((r * r * r + 0x20000) / 0x40000) -- 0400000 / 01000000 --- end --- end --- end --- --- left-overs --- --- local function round_xn_over_d(x, n, d) --- local positive -- was x >= 0 --- if x >= 0 then --- positive = true --- else --- x = -x --- positive = false --- end --- local t = floor(x % 0x8000) * n -- 0100000 --- local f = floor(t / 0x8000) -- 0100000 --- local u = floor(x / 0x8000) * n + f -- 0100000 --- local v = floor(u % d) * 0x8000 + f -- 0100000 --- if floor(u / d) >= 0x8000 then -- 0100000 --- report_parbuilders('arith_error') --- else --- u = 0x8000 * floor(u / d) + floor(v / d) -- 0100000 --- end --- v = floor(v % d) --- if 2*v >= d then --- u = u + 1 --- end --- if positive then --- return u --- else --- return -u --- end --- end - -function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255 - if untagged then - return first_glyph(n) - else - for g in traverse_id(glyph_code,n) do - return g - end - end -end - -function nodes.firstcharinbox(n) - local l = texbox[n].list - if l then - for g in traverse_id(glyph_code,l) do - return g.char - end - end - return 0 -end - -if not node.end_of_math then - function node.end_of_math(n) - for n in traverse_id(math_code,n.next) do - return n - end - end -end - -nodes.endofmath = node.end_of_math - --- local function firstline(n) --- while n do --- local id = n.id --- if id == hlist_code then --- if n.subtype == line_code then --- return n --- else --- return firstline(n.list) --- end --- elseif id == vlist_code then --- return firstline(n.list) --- end --- n = n.next --- end --- end - --- nodes.firstline = firstline - --- this depends on fonts, so we have a funny dependency ... will be --- sorted out .. we could make tonodes a plugin into this - -local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-ini - if not str or str == "" then - return - end - local head, tail, space, fnt, template = nil, nil, nil, nil, nil - if not fnt then - fnt = current_font() - elseif type(fnt) ~= "number" and fnt.id == "glyph" then - fnt, template = nil, fnt - -- else - -- already a number - end - for s in utfvalues(str) do - local n - if s == 32 then - if space then - n = copy_node(space) - elseif fonts then -- depedency - local parameters = fonts.hashes.identifiers[fnt].parameters - space = new_glue(parameters.space,parameters.space_stretch,parameters.space_shrink) - n = space - end - elseif template then - n = copy_node(template) - n.char = s - else - n = new_glyph(fnt,s) - end - if attr then -- normally false when template - n.attr = copy_node_list(attr) - end - if head then - insert_node_after(head,tail,n) - else - head = n - end - tail = n - end - return head, tail -end - -nodes.tonodes = tonodes - -local function link(list,currentfont,currentattr,head,tail) - for i=1,#list do - local n = list[i] - if n then - local tn = isnode(n) - if not tn then - local tn = type(n) - if tn == "number" then - if not currentfont then - currentfont = current_font() - end - local h, t = tonodes(tostring(n),currentfont,currentattr) - if not h then - -- skip - elseif not head then - head, tail = h, t - else - tail.next, h.prev, tail = h, t, t - end - elseif tn == "string" then - if #tn > 0 then - if not currentfont then - currentfont = current_font() - end - local h, t = tonodes(n,currentfont,currentattr) - if not h then - -- skip - elseif not head then - head, tail = h, t - else - tail.next, h.prev, tail = h, t, t - end - end - elseif tn == "table" then - if #tn > 0 then - if not currentfont then - currentfont = current_font() - end - head, tail = link(n,currentfont,currentattr,head,tail) - end - end - elseif not head then - head = n - if n.next then - tail = slide_nodes(n) - else - tail = n - end - elseif n.id == attributelist_code then - -- weird case - report_error("weird node type in list at index %s:",i) - for i=1,#list do - local l = list[i] - report_error("%3i: %s %S",i,l.id == attributelist_code and "!" or ">",l) - end - os.exit() - else - tail.next = n - n.prev = tail - if n.next then - tail = slide_nodes(n) - else - tail = n - end - end - else - -- permitting nil is convenient - end - end - return head, tail -end - -nodes.link = link - -local function locate(start,wantedid,wantedsubtype) - for n in traverse_nodes(start) do - local id = n.id - if id == wantedid then - if not wantedsubtype or n.subtype == wantedsubtype then - return n - end - elseif id == hlist_code or id == vlist_code then - local found = locate(n.list,wantedid,wantedsubtype) - if found then - return found - end - end - end -end - -nodes.locate = locate - -function nodes.concat(list) - local head, tail - for i=1,#list do - local li = list[i] - if not li then - -- skip - elseif head then - tail.next = li - li.prev = tail - tail = li.next and slide_nodes(li) or li - else - head = li - tail = li.next and slide_nodes(li) or li - end - end - return head, tail -end +if not modules then modules = { } end modules ['node-aux'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: n1 .. n2 : __concat metatable + +local type, tostring = type, tostring + +local nodes, node = nodes, node + +local utfvalues = utf.values + +local nodecodes = nodes.nodecodes + +local glyph_code = nodecodes.glyph +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local attributelist_code = nodecodes.attributelist -- temporary +local math_code = nodecodes.math + +local nodepool = nodes.pool + +local new_glue = nodepool.glue +local new_glyph = nodepool.glyph + +local traverse_nodes = node.traverse +local traverse_id = node.traverse_id +local free_node = node.free +local hpack_nodes = node.hpack +local unset_attribute = node.unset_attribute +local first_glyph = node.first_glyph or node.first_character +local copy_node = node.copy +local copy_node_list = node.copy_list +local slide_nodes = node.slide +local insert_node_after = node.insert_after +local isnode = node.is_node + +local unsetvalue = attributes.unsetvalue + +local current_font = font.current + +local texbox = tex.box + +local report_error = logs.reporter("node-aux:error") + +function nodes.repackhlist(list,...) +--~ nodes.showsimplelist(list) + local temp, b = hpack_nodes(list,...) + list = temp.list + temp.list = nil + free_node(temp) + return list, b +end + +local function set_attributes(head,attr,value) + for n in traverse_nodes(head) do + n[attr] = value + local id = n.id + if id == hlist_node or id == vlist_node then + set_attributes(n.list,attr,value) + end + end +end + +local function set_unset_attributes(head,attr,value) + for n in traverse_nodes(head) do + if not n[attr] then + n[attr] = value + end + local id = n.id + if id == hlist_code or id == vlist_code then + set_unset_attributes(n.list,attr,value) + end + end +end + +local function unset_attributes(head,attr) + for n in traverse_nodes(head) do + n[attr] = unsetvalue + local id = n.id + if id == hlist_code or id == vlist_code then + unset_attributes(n.list,attr) + end + end +end + +nodes.setattribute = node.set_attribute +nodes.getattribute = node.has_attribute +nodes.unsetattribute = node.unset_attribute +nodes.has_attribute = node.has_attribute + +nodes.firstglyph = first_glyph +nodes.setattributes = set_attributes +nodes.setunsetattributes = set_unset_attributes +nodes.unsetattributes = unset_attributes + +-- function nodes.is_skipable(a,id) -- skipable nodes at the margins during character protrusion +-- return ( +-- id ~= glyph_node +-- or id == ins_node +-- or id == mark_node +-- or id == adjust_node +-- or id == penalty_node +-- or (id == glue_node and a.spec.writable) +-- or (id == disc_node and a.pre == nil and a.post == nil and a.replace == nil) +-- or (id == math_node and a.surround == 0) +-- or (id == kern_node and (a.kern == 0 or a.subtype == NORMAL)) +-- or (id == hlist_node and a.width == 0 and a.height == 0 and a.depth == 0 and a.list == nil) +-- or (id == whatsit_node and a.subtype ~= pdf_refximage_node and a.subtype ~= pdf_refxform_node) +-- ) +-- end + +-- history: +-- +-- +-- local function glyph_width(a) +-- local ch = chardata[a.font][a.char] +-- return (ch and ch.width) or 0 +-- end +-- +-- local function glyph_total(a) +-- local ch = chardata[a.font][a.char] +-- return (ch and (ch.height+ch.depth)) or 0 +-- end +-- +-- local function non_discardable(a) -- inline +-- return a.id < math_node -- brrrr +-- end +-- +-- local function calculate_badness(t,s) +-- if t == 0 then +-- return 0 +-- elseif s <= 0 then +-- return INF_BAD +-- else +-- local r +-- if t <= 7230584 then +-- r = t * 297 / s +-- elseif s >= 1663497 then +-- r = t / floor(s / 297) +-- else +-- r = t +-- end +-- r = floor(r) +-- if r > 1290 then +-- return INF_BAD +-- else +-- return floor((r * r * r + 0x20000) / 0x40000) -- 0400000 / 01000000 +-- end +-- end +-- end +-- +-- left-overs +-- +-- local function round_xn_over_d(x, n, d) +-- local positive -- was x >= 0 +-- if x >= 0 then +-- positive = true +-- else +-- x = -x +-- positive = false +-- end +-- local t = floor(x % 0x8000) * n -- 0100000 +-- local f = floor(t / 0x8000) -- 0100000 +-- local u = floor(x / 0x8000) * n + f -- 0100000 +-- local v = floor(u % d) * 0x8000 + f -- 0100000 +-- if floor(u / d) >= 0x8000 then -- 0100000 +-- report_parbuilders('arith_error') +-- else +-- u = 0x8000 * floor(u / d) + floor(v / d) -- 0100000 +-- end +-- v = floor(v % d) +-- if 2*v >= d then +-- u = u + 1 +-- end +-- if positive then +-- return u +-- else +-- return -u +-- end +-- end + +function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255 + if untagged then + return first_glyph(n) + else + for g in traverse_id(glyph_code,n) do + return g + end + end +end + +function nodes.firstcharinbox(n) + local l = texbox[n].list + if l then + for g in traverse_id(glyph_code,l) do + return g.char + end + end + return 0 +end + +if not node.end_of_math then + function node.end_of_math(n) + for n in traverse_id(math_code,n.next) do + return n + end + end +end + +nodes.endofmath = node.end_of_math + +-- local function firstline(n) +-- while n do +-- local id = n.id +-- if id == hlist_code then +-- if n.subtype == line_code then +-- return n +-- else +-- return firstline(n.list) +-- end +-- elseif id == vlist_code then +-- return firstline(n.list) +-- end +-- n = n.next +-- end +-- end + +-- nodes.firstline = firstline + +-- this depends on fonts, so we have a funny dependency ... will be +-- sorted out .. we could make tonodes a plugin into this + +local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-ini + if not str or str == "" then + return + end + local head, tail, space, fnt, template = nil, nil, nil, nil, nil + if not fnt then + fnt = current_font() + elseif type(fnt) ~= "number" and fnt.id == "glyph" then + fnt, template = nil, fnt + -- else + -- already a number + end + for s in utfvalues(str) do + local n + if s == 32 then + if space then + n = copy_node(space) + elseif fonts then -- depedency + local parameters = fonts.hashes.identifiers[fnt].parameters + space = new_glue(parameters.space,parameters.space_stretch,parameters.space_shrink) + n = space + end + elseif template then + n = copy_node(template) + n.char = s + else + n = new_glyph(fnt,s) + end + if attr then -- normally false when template + n.attr = copy_node_list(attr) + end + if head then + insert_node_after(head,tail,n) + else + head = n + end + tail = n + end + return head, tail +end + +nodes.tonodes = tonodes + +local function link(list,currentfont,currentattr,head,tail) + for i=1,#list do + local n = list[i] + if n then + local tn = isnode(n) + if not tn then + local tn = type(n) + if tn == "number" then + if not currentfont then + currentfont = current_font() + end + local h, t = tonodes(tostring(n),currentfont,currentattr) + if not h then + -- skip + elseif not head then + head, tail = h, t + else + tail.next, h.prev, tail = h, t, t + end + elseif tn == "string" then + if #tn > 0 then + if not currentfont then + currentfont = current_font() + end + local h, t = tonodes(n,currentfont,currentattr) + if not h then + -- skip + elseif not head then + head, tail = h, t + else + tail.next, h.prev, tail = h, t, t + end + end + elseif tn == "table" then + if #tn > 0 then + if not currentfont then + currentfont = current_font() + end + head, tail = link(n,currentfont,currentattr,head,tail) + end + end + elseif not head then + head = n + if n.next then + tail = slide_nodes(n) + else + tail = n + end + elseif n.id == attributelist_code then + -- weird case + report_error("weird node type in list at index %s:",i) + for i=1,#list do + local l = list[i] + report_error("%3i: %s %S",i,l.id == attributelist_code and "!" or ">",l) + end + os.exit() + else + tail.next = n + n.prev = tail + if n.next then + tail = slide_nodes(n) + else + tail = n + end + end + else + -- permitting nil is convenient + end + end + return head, tail +end + +nodes.link = link + +local function locate(start,wantedid,wantedsubtype) + for n in traverse_nodes(start) do + local id = n.id + if id == wantedid then + if not wantedsubtype or n.subtype == wantedsubtype then + return n + end + elseif id == hlist_code or id == vlist_code then + local found = locate(n.list,wantedid,wantedsubtype) + if found then + return found + end + end + end +end + +nodes.locate = locate + +function nodes.concat(list) + local head, tail + for i=1,#list do + local li = list[i] + if not li then + -- skip + elseif head then + tail.next = li + li.prev = tail + tail = li.next and slide_nodes(li) or li + else + head = li + tail = li.next and slide_nodes(li) or li + end + end + return head, tail +end diff --git a/tex/context/base/node-bck.lua b/tex/context/base/node-bck.lua index feaa2c684..44fed5e17 100644 --- a/tex/context/base/node-bck.lua +++ b/tex/context/base/node-bck.lua @@ -1,161 +1,161 @@ -if not modules then modules = { } end modules ['node-bck'] = { - version = 1.001, - comment = "companion to node-bck.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- beware, this one takes quite some runtime, so we need a status flag --- maybe some page related state - -local attributes, nodes, node = attributes, nodes, node - -local nodecodes = nodes.nodecodes -local listcodes = nodes.listcodes - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local glyph_code = nodecodes.glyph -local cell_code = listcodes.cell - -local traverse = node.traverse -local traverse_id = node.traverse_id - -local nodepool = nodes.pool -local tasks = nodes.tasks - -local new_rule = nodepool.rule -local new_glue = nodepool.glue - -local a_color = attributes.private('color') -local a_transparency = attributes.private('transparency') -local a_colorspace = attributes.private('colormodel') -local a_background = attributes.private('background') -local a_alignbackground = attributes.private('alignbackground') - -local function add_backgrounds(head) -- rather old code .. to be redone - local current = head - while current do - local id = current.id - if id == hlist_code or id == vlist_code then - local list = current.list - if list then - local head = add_backgrounds(list) - if head then - current.list = head - list = head - end - end - local width = current.width - if width > 0 then - local background = current[a_background] - if background then - -- direct to hbox - -- colorspace is already set so we can omit that and stick to color - local mode = current[a_colorspace] - if mode then - local height = current.height - local depth = current.depth - local skip = id == hlist_code and width or (height + depth) - local glue = new_glue(-skip) - local rule = new_rule(width,height,depth) - local color = current[a_color] - local transparency = current[a_transparency] - rule[a_colorspace] = mode - if color then - rule[a_color] = color - end - if transparency then - rule[a_transparency] = transparency - end - rule.next = glue - glue.prev = rule - if list then - glue.next = list - list.prev = glue - end - current.list = rule - end - end - end - end - current = current.next - end - return head, true -end - -local function add_alignbackgrounds(head) - local current = head - while current do - local id = current.id - if id == hlist_code then - local list = current.list - if not list then - -- no need to look - elseif current.subtype == cell_code then - local background = nil - local found = nil - -- for l in traverse(list) do - -- background = l[a_alignbackground] - -- if background then - -- found = l - -- break - -- end - -- end - -- we know that it's a fake hlist (could be user node) - -- but we cannot store tables in user nodes yet - for l in traverse_id(hpack_code,list) do - background = l[a_alignbackground] - if background then - found = l - end - break - end - -- - if background then - -- current has subtype 5 (cell) - local width = current.width - if width > 0 then - local mode = found[a_colorspace] - if mode then - local glue = new_glue(-width) - local rule = new_rule(width,current.height,current.depth) - local color = found[a_color] - local transparency = found[a_transparency] - rule[a_colorspace] = mode - if color then - rule[a_color] = color - end - if transparency then - rule[a_transparency] = transparency - end - rule.next = glue - glue.prev = rule - if list then - glue.next = list - list.prev = glue - end - current.list = rule - end - end - end - else - add_alignbackgrounds(list) - end - elseif id == vlist_code then - local list = current.list - if list then - add_alignbackgrounds(list) - end - end - current = current.next - end - return head, true -end - -nodes.handlers.backgrounds = add_backgrounds -nodes.handlers.alignbackgrounds = add_alignbackgrounds - -tasks.appendaction("shipouts","normalizers","nodes.handlers.backgrounds") -tasks.appendaction("shipouts","normalizers","nodes.handlers.alignbackgrounds") +if not modules then modules = { } end modules ['node-bck'] = { + version = 1.001, + comment = "companion to node-bck.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- beware, this one takes quite some runtime, so we need a status flag +-- maybe some page related state + +local attributes, nodes, node = attributes, nodes, node + +local nodecodes = nodes.nodecodes +local listcodes = nodes.listcodes + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local glyph_code = nodecodes.glyph +local cell_code = listcodes.cell + +local traverse = node.traverse +local traverse_id = node.traverse_id + +local nodepool = nodes.pool +local tasks = nodes.tasks + +local new_rule = nodepool.rule +local new_glue = nodepool.glue + +local a_color = attributes.private('color') +local a_transparency = attributes.private('transparency') +local a_colorspace = attributes.private('colormodel') +local a_background = attributes.private('background') +local a_alignbackground = attributes.private('alignbackground') + +local function add_backgrounds(head) -- rather old code .. to be redone + local current = head + while current do + local id = current.id + if id == hlist_code or id == vlist_code then + local list = current.list + if list then + local head = add_backgrounds(list) + if head then + current.list = head + list = head + end + end + local width = current.width + if width > 0 then + local background = current[a_background] + if background then + -- direct to hbox + -- colorspace is already set so we can omit that and stick to color + local mode = current[a_colorspace] + if mode then + local height = current.height + local depth = current.depth + local skip = id == hlist_code and width or (height + depth) + local glue = new_glue(-skip) + local rule = new_rule(width,height,depth) + local color = current[a_color] + local transparency = current[a_transparency] + rule[a_colorspace] = mode + if color then + rule[a_color] = color + end + if transparency then + rule[a_transparency] = transparency + end + rule.next = glue + glue.prev = rule + if list then + glue.next = list + list.prev = glue + end + current.list = rule + end + end + end + end + current = current.next + end + return head, true +end + +local function add_alignbackgrounds(head) + local current = head + while current do + local id = current.id + if id == hlist_code then + local list = current.list + if not list then + -- no need to look + elseif current.subtype == cell_code then + local background = nil + local found = nil + -- for l in traverse(list) do + -- background = l[a_alignbackground] + -- if background then + -- found = l + -- break + -- end + -- end + -- we know that it's a fake hlist (could be user node) + -- but we cannot store tables in user nodes yet + for l in traverse_id(hpack_code,list) do + background = l[a_alignbackground] + if background then + found = l + end + break + end + -- + if background then + -- current has subtype 5 (cell) + local width = current.width + if width > 0 then + local mode = found[a_colorspace] + if mode then + local glue = new_glue(-width) + local rule = new_rule(width,current.height,current.depth) + local color = found[a_color] + local transparency = found[a_transparency] + rule[a_colorspace] = mode + if color then + rule[a_color] = color + end + if transparency then + rule[a_transparency] = transparency + end + rule.next = glue + glue.prev = rule + if list then + glue.next = list + list.prev = glue + end + current.list = rule + end + end + end + else + add_alignbackgrounds(list) + end + elseif id == vlist_code then + local list = current.list + if list then + add_alignbackgrounds(list) + end + end + current = current.next + end + return head, true +end + +nodes.handlers.backgrounds = add_backgrounds +nodes.handlers.alignbackgrounds = add_alignbackgrounds + +tasks.appendaction("shipouts","normalizers","nodes.handlers.backgrounds") +tasks.appendaction("shipouts","normalizers","nodes.handlers.alignbackgrounds") diff --git a/tex/context/base/node-dir.lua b/tex/context/base/node-dir.lua index 6ee5cd4b8..9a1f4e30c 100644 --- a/tex/context/base/node-dir.lua +++ b/tex/context/base/node-dir.lua @@ -1,309 +1,309 @@ -if not modules then modules = { } end modules ['node-dir'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Taco Hoekwater and Hans Hagen", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ -

    In the process of cleaning up the lua variant of the parbuilder -we ran into a couple of functions (translated c macros) that were -somewhat inefficient. More convenient is to use hashes although at -the c-end still macros are used. In the process directions.h was -adapted and now has the mappings as comments. This lua file is -based on that file. -]]-- - -local allocate = utilities.storage.allocate - -local nodes = nodes - -nodes.is_mirrored = allocate { - -- TLT = false, - -- TRT = false, - -- LTL = false, - -- RTT = false, -} - -nodes.is_rotated = allocate { - -- TLT = false, - -- TRT = false, - -- LTL = false, - RTT = true, ["+RTT"] = true, -} - -nodes.textdir_is_parallel = allocate { - TLT = { - TLT = true, ["+TLT"] = true, - TRT = true, ["+TRT"] = true, - -- LTL = false, - -- RTT = false, - }, - TRT= { - TLT = true, ["+TLT"] = true, - TRT = true, ["+TRT"] = true, - -- LTL = false, - -- RTT = false, - }, - LTL = { - -- TLT = false, - -- TRT = false, - LTL = true, ["+LTL"] = true, - RTT = true, ["+RTT"] = true, - }, - RTT = { - -- TLT = false, - -- TRT = false, - LTL = true, ["+LTL"] = true, - RTT = true, ["+RTT"] = true, - } -} - -nodes.pardir_is_parallel = allocate { - TLT = { - TLT = true, ["+TLT"] = true, - TRT = true, ["+TRT"] = true, - -- LTL = false, - -- RTT = false, - }, - TRT = { - TLT = true, ["+TLT"] = true, - TRT = true, ["+TRT"] = true, - -- LTL = false, - -- RTT = false, - }, - LTL = { - -- TLT = false, - -- TRT = false, - LTL = true, ["+LTL"] = true, - RTT = true, ["+RTT"] = true, - }, - RTT = { - -- TLT = false, - -- TRT = false, - LTL = true, ["+LTL"] = true, - RTT = true, ["+RTT"] = true, - }, -} - -nodes.pardir_is_opposite = allocate { - TLT = { - -- TLT = false, - -- TRT = false, - -- LTL = false, - -- RTT = false, - }, - TRT = { - -- TLT = false, - -- TRT = false, - -- LTL = false, - -- RTT = false, - }, - LTL = { - -- TLT = false, - -- TRT = false, - -- LTL = false, - RTT = true, ["+RTT"] = true, - }, - RTT = { - -- TLT = false, - -- TRT = false, - LTL = true, ["+LTL"] = true, - -- RTT = false, - }, -} - -nodes.textdir_is_opposite = allocate { - TLT = { - -- TLT = false, - TRT = true, ["+TRT"] = true, - -- LTL = false, - -- RTT = false, - }, - TRT= { - TLT = true, ["+TLT"] = true, - -- TRT = false, - -- LTL = false, - -- RTT = false, - }, - LTL = { - -- TLT = false, - -- TRT = false, - -- LTL = false, - -- RTT = false, - }, - RTT = { - -- TLT = false, - -- TRT = false, - -- LTL = false, - -- RTT = false, - }, -} - -nodes.glyphdir_is_opposite = allocate { - TLT = { - -- TLT = false, - -- TRT = false, - -- LTL = false, - -- RTT = false, - }, - TRT= { - -- TLT = false, - -- TRT = false, - -- LTL = false, - -- RTT = false, - }, - LTL = { - -- TLT = false, - -- TRT = false, - -- LTL = false, - -- RTT = false, - }, - RTT = { - -- TLT = false, - -- TRT = false, - -- LTL = false, - -- RTT = false, - }, -} - -nodes.pardir_is_equal = allocate { - TLT = { - TLT = true, ["+TLT"] = true, - TRT = true, ["+TRT"] = true, - -- LTL = false, - -- RTT = false, - }, - TRT= { - TLT = true, ["+TLT"] = true, - TRT = true, ["+TRT"] = true, - -- LTL = false, - -- RTT = false, - }, - LTL= { - -- TLT = false, - -- TRT = false, - LTL = true, ["+LTL"] = true, - -- RTT = false, - }, - RTT= { - -- TLT = false, - -- TRT = false, - -- LTL = false, - RTT = true, ["+RTT"] = true, - }, -} - -nodes.textdir_is_equal = allocate { - TLT = { - TLT = true, ["+TLT"] = true, - -- TRT = false, - -- LTL = false, - -- RTT = false, - }, - TRT= { - -- TLT = false, - TRT = true, ["+TRT"] = true, - -- LTL = false, - -- RTT = false, - }, - LTL = { - -- TLT = false, - -- TRT = false, - LTL = true, ["+LTL"] = true, - RTT = true, ["+RTT"] = true, - }, - RTT = { - -- TLT = false, - -- TRT = false, - LTL = true, ["+LTL"] = true, - RTT = true, ["+RTT"] = true, - }, -} - -nodes.glyphdir_is_equal = allocate { - TLT = { - TLT = true, ["+TLT"] = true, - TRT = true, ["+TRT"] = true, - -- LTL = false, - RTT = true, ["+RTT"] = true, - }, - TRT= { - TLT = true, ["+TLT"] = true, - TRT = true, ["+TRT"] = true, - -- LTL = false, - RTT = true, ["+RTT"] = true, - }, - LTL = { - -- TLT = false, - -- TRT = false, - LTL = true, ["+LTL"] = true, - -- RTT = false, - }, - RTT = { - TLT = true, ["+TLT"] = true, - TRT = true, ["+TRT"] = true, - -- LTL = false, - RTT = true, ["+RTT"] = true, - }, -} - -nodes.partextdir_is_equal = allocate { - TLT = { - -- TLT = false, - -- TRT = false, - LTL = true, ["+LTL"] = true, - RTT = true, ["+RTT"] = true, - }, - TRT= { - -- TLT = false, - -- TRT = false, - LTL = true, ["+LTL"] = true, - RTT = true, ["+RTT"] = true, - }, - LTL = { - TLT = true, ["+TLT"] = true, - -- TRT = false, - -- LTL = false, - -- RTT = false, - }, - RTT = { - -- TLT = false, - TRT = true, ["+TRT"] = true, - -- LTL = false, - -- RTT = false, - }, -} - -nodes.textdir_is_is = allocate { - TLT = true, ["+TLT"] = true, - -- TRT = false, - -- LTL = false, - -- RTT = false, -} - -nodes.glyphdir_is_orthogonal = allocate { - TLT = true, ["+TLT"] = true, - TRT = true, ["+TRT"] = true, - LTL = true, ["+LTL"] = true, - -- RTT = false -} - -nodes.dir_is_pop = allocate { - ["-TRT"] = true, - ["-TLT"] = true, - ["-LTL"] = true, - ["-RTT"] = true, -} - -nodes.dir_negation = allocate { - ["-TRT"] = "+TRT", - ["-TLT"] = "+TLT", - ["-LTL"] = "+LTL", - ["-RTT"] = "+RTT", - ["+TRT"] = "-TRT", - ["+TLT"] = "-TLT", - ["+LTL"] = "-LTL", - ["+RTT"] = "-RTT", -} +if not modules then modules = { } end modules ['node-dir'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Taco Hoekwater and Hans Hagen", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ +

    In the process of cleaning up the lua variant of the parbuilder +we ran into a couple of functions (translated c macros) that were +somewhat inefficient. More convenient is to use hashes although at +the c-end still macros are used. In the process directions.h was +adapted and now has the mappings as comments. This lua file is +based on that file. +]]-- + +local allocate = utilities.storage.allocate + +local nodes = nodes + +nodes.is_mirrored = allocate { + -- TLT = false, + -- TRT = false, + -- LTL = false, + -- RTT = false, +} + +nodes.is_rotated = allocate { + -- TLT = false, + -- TRT = false, + -- LTL = false, + RTT = true, ["+RTT"] = true, +} + +nodes.textdir_is_parallel = allocate { + TLT = { + TLT = true, ["+TLT"] = true, + TRT = true, ["+TRT"] = true, + -- LTL = false, + -- RTT = false, + }, + TRT= { + TLT = true, ["+TLT"] = true, + TRT = true, ["+TRT"] = true, + -- LTL = false, + -- RTT = false, + }, + LTL = { + -- TLT = false, + -- TRT = false, + LTL = true, ["+LTL"] = true, + RTT = true, ["+RTT"] = true, + }, + RTT = { + -- TLT = false, + -- TRT = false, + LTL = true, ["+LTL"] = true, + RTT = true, ["+RTT"] = true, + } +} + +nodes.pardir_is_parallel = allocate { + TLT = { + TLT = true, ["+TLT"] = true, + TRT = true, ["+TRT"] = true, + -- LTL = false, + -- RTT = false, + }, + TRT = { + TLT = true, ["+TLT"] = true, + TRT = true, ["+TRT"] = true, + -- LTL = false, + -- RTT = false, + }, + LTL = { + -- TLT = false, + -- TRT = false, + LTL = true, ["+LTL"] = true, + RTT = true, ["+RTT"] = true, + }, + RTT = { + -- TLT = false, + -- TRT = false, + LTL = true, ["+LTL"] = true, + RTT = true, ["+RTT"] = true, + }, +} + +nodes.pardir_is_opposite = allocate { + TLT = { + -- TLT = false, + -- TRT = false, + -- LTL = false, + -- RTT = false, + }, + TRT = { + -- TLT = false, + -- TRT = false, + -- LTL = false, + -- RTT = false, + }, + LTL = { + -- TLT = false, + -- TRT = false, + -- LTL = false, + RTT = true, ["+RTT"] = true, + }, + RTT = { + -- TLT = false, + -- TRT = false, + LTL = true, ["+LTL"] = true, + -- RTT = false, + }, +} + +nodes.textdir_is_opposite = allocate { + TLT = { + -- TLT = false, + TRT = true, ["+TRT"] = true, + -- LTL = false, + -- RTT = false, + }, + TRT= { + TLT = true, ["+TLT"] = true, + -- TRT = false, + -- LTL = false, + -- RTT = false, + }, + LTL = { + -- TLT = false, + -- TRT = false, + -- LTL = false, + -- RTT = false, + }, + RTT = { + -- TLT = false, + -- TRT = false, + -- LTL = false, + -- RTT = false, + }, +} + +nodes.glyphdir_is_opposite = allocate { + TLT = { + -- TLT = false, + -- TRT = false, + -- LTL = false, + -- RTT = false, + }, + TRT= { + -- TLT = false, + -- TRT = false, + -- LTL = false, + -- RTT = false, + }, + LTL = { + -- TLT = false, + -- TRT = false, + -- LTL = false, + -- RTT = false, + }, + RTT = { + -- TLT = false, + -- TRT = false, + -- LTL = false, + -- RTT = false, + }, +} + +nodes.pardir_is_equal = allocate { + TLT = { + TLT = true, ["+TLT"] = true, + TRT = true, ["+TRT"] = true, + -- LTL = false, + -- RTT = false, + }, + TRT= { + TLT = true, ["+TLT"] = true, + TRT = true, ["+TRT"] = true, + -- LTL = false, + -- RTT = false, + }, + LTL= { + -- TLT = false, + -- TRT = false, + LTL = true, ["+LTL"] = true, + -- RTT = false, + }, + RTT= { + -- TLT = false, + -- TRT = false, + -- LTL = false, + RTT = true, ["+RTT"] = true, + }, +} + +nodes.textdir_is_equal = allocate { + TLT = { + TLT = true, ["+TLT"] = true, + -- TRT = false, + -- LTL = false, + -- RTT = false, + }, + TRT= { + -- TLT = false, + TRT = true, ["+TRT"] = true, + -- LTL = false, + -- RTT = false, + }, + LTL = { + -- TLT = false, + -- TRT = false, + LTL = true, ["+LTL"] = true, + RTT = true, ["+RTT"] = true, + }, + RTT = { + -- TLT = false, + -- TRT = false, + LTL = true, ["+LTL"] = true, + RTT = true, ["+RTT"] = true, + }, +} + +nodes.glyphdir_is_equal = allocate { + TLT = { + TLT = true, ["+TLT"] = true, + TRT = true, ["+TRT"] = true, + -- LTL = false, + RTT = true, ["+RTT"] = true, + }, + TRT= { + TLT = true, ["+TLT"] = true, + TRT = true, ["+TRT"] = true, + -- LTL = false, + RTT = true, ["+RTT"] = true, + }, + LTL = { + -- TLT = false, + -- TRT = false, + LTL = true, ["+LTL"] = true, + -- RTT = false, + }, + RTT = { + TLT = true, ["+TLT"] = true, + TRT = true, ["+TRT"] = true, + -- LTL = false, + RTT = true, ["+RTT"] = true, + }, +} + +nodes.partextdir_is_equal = allocate { + TLT = { + -- TLT = false, + -- TRT = false, + LTL = true, ["+LTL"] = true, + RTT = true, ["+RTT"] = true, + }, + TRT= { + -- TLT = false, + -- TRT = false, + LTL = true, ["+LTL"] = true, + RTT = true, ["+RTT"] = true, + }, + LTL = { + TLT = true, ["+TLT"] = true, + -- TRT = false, + -- LTL = false, + -- RTT = false, + }, + RTT = { + -- TLT = false, + TRT = true, ["+TRT"] = true, + -- LTL = false, + -- RTT = false, + }, +} + +nodes.textdir_is_is = allocate { + TLT = true, ["+TLT"] = true, + -- TRT = false, + -- LTL = false, + -- RTT = false, +} + +nodes.glyphdir_is_orthogonal = allocate { + TLT = true, ["+TLT"] = true, + TRT = true, ["+TRT"] = true, + LTL = true, ["+LTL"] = true, + -- RTT = false +} + +nodes.dir_is_pop = allocate { + ["-TRT"] = true, + ["-TLT"] = true, + ["-LTL"] = true, + ["-RTT"] = true, +} + +nodes.dir_negation = allocate { + ["-TRT"] = "+TRT", + ["-TLT"] = "+TLT", + ["-LTL"] = "+LTL", + ["-RTT"] = "+RTT", + ["+TRT"] = "-TRT", + ["+TLT"] = "-TLT", + ["+LTL"] = "-LTL", + ["+RTT"] = "-RTT", +} diff --git a/tex/context/base/node-ext.lua b/tex/context/base/node-ext.lua index 82ec04ee5..df2a37650 100644 --- a/tex/context/base/node-ext.lua +++ b/tex/context/base/node-ext.lua @@ -1,30 +1,30 @@ -if not modules then modules = { } end modules ['node-ext'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

    Serializing nodes can be handy for tracing. Also, saving and -loading node lists can come in handy as soon we are going to -use external applications to process node lists.

    ---ldx]]-- - -function nodes.show(stack) --- logs.writer(table.serialize(stack)) -end - -function nodes.save(stack,name) -- *.ltn : luatex node file --- if name then --- file.savedata(name,table.serialize(stack)) --- else --- logs.writer(table.serialize(stack)) --- end -end - -function nodes.load(name) --- return file.loaddata(name) --- -- todo -end +if not modules then modules = { } end modules ['node-ext'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

    Serializing nodes can be handy for tracing. Also, saving and +loading node lists can come in handy as soon we are going to +use external applications to process node lists.

    +--ldx]]-- + +function nodes.show(stack) +-- logs.writer(table.serialize(stack)) +end + +function nodes.save(stack,name) -- *.ltn : luatex node file +-- if name then +-- file.savedata(name,table.serialize(stack)) +-- else +-- logs.writer(table.serialize(stack)) +-- end +end + +function nodes.load(name) +-- return file.loaddata(name) +-- -- todo +end diff --git a/tex/context/base/node-fin.lua b/tex/context/base/node-fin.lua index 2e62ebcb5..e95725d29 100644 --- a/tex/context/base/node-fin.lua +++ b/tex/context/base/node-fin.lua @@ -1,1222 +1,1222 @@ -if not modules then modules = { } end modules ['node-fin'] = { - version = 1.001, - comment = "companion to node-fin.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - --- this module is being reconstructed --- local functions, only slightly slower - -local next, type, format = next, type, string.format - -local attributes, nodes, node = attributes, nodes, node - -local copy_node = node.copy -local find_tail = node.slide - -local nodecodes = nodes.nodecodes -local whatcodes = nodes.whatcodes - -local glyph_code = nodecodes.glyph -local disc_code = nodecodes.disc -local glue_code = nodecodes.glue -local rule_code = nodecodes.rule -local whatsit_code = nodecodes.whatsit -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist - -local pdfliteral_code = whatcodes.pdfliteral - -local states = attributes.states -local numbers = attributes.numbers -local a_trigger = attributes.private('trigger') -local triggering = false - -local starttiming = statistics.starttiming -local stoptiming = statistics.stoptiming -local loadstripped = utilities.lua.loadstripped -local unsetvalue = attributes.unsetvalue - --- these two will be like trackers - -function states.enabletriggering() - triggering = true -end -function states.disabletriggering() - triggering = false -end - --- the following code is no longer needed due to the new backend --- but we keep it around for a while as an example --- --- states.collected = states.collected or { } --- --- storage.register("states/collected", states.collected, "states.collected") --- --- local collected = states.collected --- --- function states.collect(str) --- collected[#collected+1] = str --- end --- --- function states.flush() --- if #collected > 0 then --- for i=1,#collected do --- context(collected[i]) -- we're in context mode anyway --- end --- collected = { } --- states.collected = collected --- end --- end --- --- function states.check() --- logs.report("states",concat(collected,"\n")) --- end - --- we used to do the main processor loop here and call processor for each node --- but eventually this was too much a slow down (1 sec on 23 for 120 pages mk) --- so that we moved looping to the processor itself; this may lead to a bit of --- duplicate code once that we have more state handlers - --- local function process_attribute(head,plugin) -- head,attribute,enabled,initializer,resolver,processor,finalizer --- local namespace = plugin.namespace --- if namespace.enabled ~= false then -- this test will go away --- starttiming(attributes) -- in principle we could delegate this to the main caller --- local done, used, ok = false, nil, false --- local attribute = namespace.attribute or numbers[plugin.name] -- todo: plugin.attribute --- local processor = plugin.processor --- if processor then --- local initializer = plugin.initializer --- local resolver = plugin.resolver --- local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip ! --- if initializer then --- initializer(namespace,attribute,head) --- end --- head, ok = processor(namespace,attribute,head,inheritance) --- if ok then --- local finalizer = plugin.finalizer --- if finalizer then --- head, ok, used = finalizer(namespace,attribute,head) --- if used then --- local flusher = plugin.flusher --- if flusher then --- head = flusher(namespace,attribute,head,used) --- end --- end --- end --- done = true --- end --- end --- stoptiming(attributes) --- return head, done --- else --- return head, false --- end --- end --- --- function nodes.installattributehandler(plugin) -- we need to avoid this nested function --- return function(head) --- return process_attribute(head,plugin) --- end --- end - --- An experiment: lean and mean functions. It is not really faster but --- with upcoming functionality it might make a difference, e.g. features --- like 'casing' and 'italics' can be called a lot so there it makes sense. - -nodes.plugindata = nil - -local template = [[ -local plugin = nodes.plugindata -local starttiming = statistics.starttiming -local stoptiming = statistics.stoptiming -local namespace = plugin.namespace -local attribute = namespace.attribute or attributes.numbers[plugin.name] -local processor = plugin.processor -local initializer = plugin.initializer -local resolver = plugin.resolver -local finalizer = plugin.finalizer -local flusher = plugin.flusher -if not processor then - return function(head) - return head, false - end -elseif initializer or finalizer or resolver then - return function(head) - starttiming(attributes) - local done, used, ok = false, nil, false - local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip ! - if initializer then - initializer(namespace,attribute,head) - end - head, ok = processor(namespace,attribute,head,inheritance) - if ok then - if finalizer then - head, ok, used = finalizer(namespace,attribute,head) - if used and flusher then - head = flusher(namespace,attribute,head,used) - end - end - done = true - end - stoptiming(attributes) - return head, done - end -else - return function(head) - starttiming(attributes) - local head, done = processor(namespace,attribute,head) - stoptiming(attributes) - return head, done - end -end -nodes.plugindata = nil -]] - -function nodes.installattributehandler(plugin) - nodes.plugindata = plugin - return loadstripped(template)() -end - --- the injectors - -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after - -local nsdata, nsnone, nslistwise, nsforced, nsselector, nstrigger -local current, current_selector, done = 0, 0, false -- nb, stack has a local current ! -local nsbegin, nsend - -function states.initialize(namespace,attribute,head) - nsdata = namespace.data - nsnone = namespace.none - nsforced = namespace.forced - nsselector = namespace.selector - nslistwise = namespace.listwise - nstrigger = triggering and namespace.triggering and a_trigger - current = 0 - current_selector = 0 - done = false -- todo: done cleanup - nsstep = namespace.resolve_step - if nsstep then - nsbegin = namespace.resolve_begin - nsend = namespace.resolve_end - nspush = namespace.push - nspop = namespace.pop - end -end - -function states.finalize(namespace,attribute,head) -- is this one ok? - if current > 0 and nsnone then - local id = head.id - if id == hlist_code or id == vlist_code then - local list = head.list - if list then - head.list = insert_node_before(list,list,copy_node(nsnone)) - end - else - head = insert_node_before(head,head,copy_node(nsnone)) - end - return head, true, true - end - return head, false, false -end - --- disc nodes can be ignored --- we need to deal with literals too (reset as well as oval) --- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then - --- local function process(namespace,attribute,head,inheritance,default) -- one attribute --- local stack, done = head, false --- while stack do --- local id = stack.id --- if id == glyph_code or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current ~= c then --- head = insert_node_before(head,stack,copy_node(nsdata[c])) --- current = c --- done = true --- end --- -- here ? compare selective --- if id == glue_code then --leader --- -- same as *list --- local content = stack.leader --- if content then --- local savedcurrent = current --- local ci = content.id --- if ci == hlist_code or ci == vlist_code then --- -- else we reset inside a box unneeded, okay, the downside is --- -- that we trigger color in each repeated box, so there is room --- -- for improvement here --- current = 0 --- end --- local ok = false --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- stack.leader, ok = process(namespace,attribute,content,inheritance,outer) --- else --- stack.leader, ok = process(namespace,attribute,content,inheritance,default) --- end --- else --- stack.leader, ok = process(namespace,attribute,content,inheritance,default) --- end --- current = savedcurrent --- done = done or ok --- end --- end --- elseif default and inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current = 0 --- done = true --- end --- elseif id == hlist_code or id == vlist_code then --- local content = stack.list --- if content then --- local ok = false --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- stack.list, ok = process(namespace,attribute,content,inheritance,outer) --- else --- stack.list, ok = process(namespace,attribute,content,inheritance,default) --- end --- else --- stack.list, ok = process(namespace,attribute,content,inheritance,default) --- end --- done = done or ok --- end --- end --- stack = stack.next --- end --- return head, done --- end - --- local function process(namespace,attribute,head,inheritance,default) -- one attribute --- local stack, done = head, false - --- local function check() --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current ~= c then --- head = insert_node_before(head,stack,copy_node(nsdata[c])) --- current = c --- done = true --- end --- elseif default and inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current = 0 --- done = true --- end --- return c --- end - --- local function nested(content) --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- return process(namespace,attribute,content,inheritance,outer) --- else --- return process(namespace,attribute,content,inheritance,default) --- end --- else --- return process(namespace,attribute,content,inheritance,default) --- end --- end - --- while stack do --- local id = stack.id --- if id == glyph_code then --- check() --- elseif id == glue_code then --- local content = stack.leader --- if content and check() then --- local savedcurrent = current --- local ci = content.id --- if ci == hlist_code or ci == vlist_code then --- -- else we reset inside a box unneeded, okay, the downside is --- -- that we trigger color in each repeated box, so there is room --- -- for improvement here --- current = 0 --- end - --- local ok = false --- stack.leader, ok = nested(content) --- done = done or ok - --- current = savedcurrent --- end --- elseif id == hlist_code or id == vlist_code then --- local content = stack.list --- if content then - --- local ok = false --- stack.list, ok = nested(content) --- done = done or ok - --- end --- elseif id == rule_code then --- if stack.width ~= 0 then --- check() --- end --- end --- stack = stack.next --- end --- return head, done --- end - --- local function process(namespace,attribute,head,inheritance,default) -- one attribute --- local stack, done = head, false --- while stack do --- local id = stack.id --- if id == glyph_code then --- -- begin of check --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current ~= c then --- head = insert_node_before(head,stack,copy_node(nsdata[c])) --- current = c --- done = true --- end --- elseif default and inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current = 0 --- done = true --- end --- -- end of check --- elseif id == glue_code then --- local content = stack.leader --- if content then --- -- begin of check --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current ~= c then --- head = insert_node_before(head,stack,copy_node(nsdata[c])) --- current = c --- done = true --- end --- -- begin special to this check --- local savedcurrent = current --- local ci = content.id --- if ci == hlist_code or ci == vlist_code then --- -- else we reset inside a box unneeded, okay, the downside is --- -- that we trigger color in each repeated box, so there is room --- -- for improvement here --- current = 0 --- end --- -- begin nested -- --- local ok = false --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- stack.leader, ok = process(namespace,attribute,content,inheritance,outer) --- else --- stack.leader, ok = process(namespace,attribute,content,inheritance,default) --- end --- else --- stack.leader, ok = process(namespace,attribute,content,inheritance,default) --- end --- -- end nested -- --- done = done or ok --- current = savedcurrent --- -- end special to this check --- elseif default and inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current = 0 --- done = true --- end --- -- end of check --- end --- elseif id == hlist_code or id == vlist_code then --- local content = stack.list --- if content then --- -- begin nested -- --- local ok --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- stack.list, ok = process(namespace,attribute,content,inheritance,outer) --- else --- stack.list, ok = process(namespace,attribute,content,inheritance,default) --- end --- else --- stack.list, ok = process(namespace,attribute,content,inheritance,default) --- end --- -- end nested -- --- done = done or ok --- end --- elseif id == rule_code then --- if stack.width ~= 0 then --- -- begin of check --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current ~= c then --- head = insert_node_before(head,stack,copy_node(nsdata[c])) --- current = c --- done = true --- end --- elseif default and inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current = 0 --- done = true --- end --- -- end of check --- end --- end --- stack = stack.next --- end --- return head, done --- end - -local function process(namespace,attribute,head,inheritance,default) -- one attribute - local stack = head - local done = false - local check = false - local leader = nil - while stack do - local id = stack.id - if id == glyph_code then - check = true - elseif id == glue_code then - leader = stack.leader - if leader then - check = true - end - elseif id == hlist_code or id == vlist_code then - local content = stack.list - if content then - -- begin nested -- - local ok - if nstrigger and stack[nstrigger] then - local outer = stack[attribute] - if outer ~= inheritance then - stack.list, ok = process(namespace,attribute,content,inheritance,outer) - else - stack.list, ok = process(namespace,attribute,content,inheritance,default) - end - else - stack.list, ok = process(namespace,attribute,content,inheritance,default) - end - -- end nested -- - done = done or ok - end - elseif id == rule_code then - check = stack.width ~= 0 - end - -- much faster this way than using a check() and nested() function - if check then - local c = stack[attribute] - if c then - if default and c == inheritance then - if current ~= default then - head = insert_node_before(head,stack,copy_node(nsdata[default])) - current = default - done = true - end - elseif current ~= c then - head = insert_node_before(head,stack,copy_node(nsdata[c])) - current = c - done = true - end - if leader then - local savedcurrent = current - local ci = leader.id - if ci == hlist_code or ci == vlist_code then - -- else we reset inside a box unneeded, okay, the downside is - -- that we trigger color in each repeated box, so there is room - -- for improvement here - current = 0 - end - -- begin nested -- - local ok = false - if nstrigger and stack[nstrigger] then - local outer = stack[attribute] - if outer ~= inheritance then - stack.leader, ok = process(namespace,attribute,leader,inheritance,outer) - else - stack.leader, ok = process(namespace,attribute,leader,inheritance,default) - end - else - stack.leader, ok = process(namespace,attribute,leader,inheritance,default) - end - -- end nested -- - done = done or ok - current = savedcurrent - leader = false - end - elseif default and inheritance then - if current ~= default then - head = insert_node_before(head,stack,copy_node(nsdata[default])) - current = default - done = true - end - elseif current > 0 then - head = insert_node_before(head,stack,copy_node(nsnone)) - current = 0 - done = true - end - check = false - end - stack = stack.next - end - return head, done -end - -states.process = process - --- we can force a selector, e.g. document wide color spaces, saves a little --- watch out, we need to check both the selector state (like colorspace) and --- the main state (like color), otherwise we get into troubles when a selector --- state changes while the main state stays the same (like two glyphs following --- each other with the same color but different color spaces e.g. \showcolor) - --- local function selective(namespace,attribute,head,inheritance,default) -- two attributes --- local stack, done = head, false --- while stack do --- local id = stack.id --- -- we need to deal with literals too (reset as well as oval) --- -- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code --- if id == glyph_code -- or id == disc_code --- or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- local data = nsdata[default] --- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) --- current = default --- done = true --- end --- else --- local s = stack[nsselector] --- if current ~= c or current_selector ~= s then --- local data = nsdata[c] --- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) --- current = c --- current_selector = s --- done = true --- end --- end --- elseif default and inheritance then --- if current ~= default then --- local data = nsdata[default] --- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current, current_selector, done = 0, 0, true --- end --- if id == glue_code then -- leader --- -- same as *list --- local content = stack.leader --- if content then --- local savedcurrent = current --- local ci = content.id --- if ci == hlist_code or ci == vlist_code then --- -- else we reset inside a box unneeded, okay, the downside is --- -- that we trigger color in each repeated box, so there is room --- -- for improvement here --- current = 0 --- end --- local ok = false --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- stack.leader, ok = selective(namespace,attribute,content,inheritance,outer) --- else --- stack.leader, ok = selective(namespace,attribute,content,inheritance,default) --- end --- else --- stack.leader, ok = selective(namespace,attribute,content,inheritance,default) --- end --- current = savedcurrent --- done = done or ok --- end --- end --- elseif id == hlist_code or id == vlist_code then --- local content = stack.list --- if content then --- local ok = false --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- stack.list, ok = selective(namespace,attribute,content,inheritance,outer) --- else --- stack.list, ok = selective(namespace,attribute,content,inheritance,default) --- end --- else --- stack.list, ok = selective(namespace,attribute,content,inheritance,default) --- end --- done = done or ok --- end --- end --- stack = stack.next --- end --- return head, done --- end - --- local function selective(namespace,attribute,head,inheritance,default) -- two attributes --- local stack, done = head, false - --- local function check() --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- local data = nsdata[default] --- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) --- current = default --- done = true --- end --- else --- local s = stack[nsselector] --- if current ~= c or current_selector ~= s then --- local data = nsdata[c] --- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) --- current = c --- current_selector = s --- done = true --- end --- end --- elseif default and inheritance then --- if current ~= default then --- local data = nsdata[default] --- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current, current_selector, done = 0, 0, true --- end --- return c --- end - --- local function nested(content) --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- return selective(namespace,attribute,content,inheritance,outer) --- else --- return selective(namespace,attribute,content,inheritance,default) --- end --- else --- return selective(namespace,attribute,content,inheritance,default) --- end --- end - --- while stack do --- local id = stack.id --- if id == glyph_code then --- check() --- elseif id == glue_code then --- local content = stack.leader --- if content and check() then --- -- local savedcurrent = current --- -- local ci = content.id --- -- if ci == hlist_code or ci == vlist_code then --- -- -- else we reset inside a box unneeded, okay, the downside is --- -- -- that we trigger color in each repeated box, so there is room --- -- -- for improvement here --- -- current = 0 --- -- end - --- local ok = false --- stack.leader, ok = nested(content) --- done = done or ok - --- -- current = savedcurrent --- end --- elseif id == hlist_code or id == vlist_code then --- local content = stack.list --- if content then - --- local ok = false --- stack.list, ok = nested(content) --- done = done or ok - --- end --- elseif id == rule_code then --- if stack.width ~= 0 then --- check() --- end --- end --- stack = stack.next --- end --- return head, done --- end - -local function selective(namespace,attribute,head,inheritance,default) -- two attributes - local stack = head - local done = false - local check = false - local leader = nil - while stack do - local id = stack.id - if id == glyph_code then - check = true - elseif id == glue_code then - leader = stack.leader - if leader then - check = true - end - elseif id == hlist_code or id == vlist_code then - local content = stack.list - if content then - local ok = false - -- begin nested - if nstrigger and stack[nstrigger] then - local outer = stack[attribute] - if outer ~= inheritance then - stack.list, ok = selective(namespace,attribute,content,inheritance,outer) - else - stack.list, ok = selective(namespace,attribute,content,inheritance,default) - end - else - stack.list, ok = selective(namespace,attribute,content,inheritance,default) - end - -- end nested - done = done or ok - end - elseif id == rule_code then - check = stack.width ~= 0 - end - - if check then - local c = stack[attribute] - if c then - if default and c == inheritance then - if current ~= default then - local data = nsdata[default] - head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) - current = default - done = true - end - else - local s = stack[nsselector] - if current ~= c or current_selector ~= s then - local data = nsdata[c] - head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) - current = c - current_selector = s - done = true - end - end - if leader then - local ok = false - -- begin nested - if nstrigger and stack[nstrigger] then - local outer = stack[attribute] - if outer ~= inheritance then - stack.leader, ok = selective(namespace,attribute,leader,inheritance,outer) - else - stack.leader, ok = selective(namespace,attribute,leader,inheritance,default) - end - else - stack.leader, ok = selective(namespace,attribute,leader,inheritance,default) - end - -- end nested - done = done or ok - leader = false - end - elseif default and inheritance then - if current ~= default then - local data = nsdata[default] - head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) - current = default - done = true - end - elseif current > 0 then - head = insert_node_before(head,stack,copy_node(nsnone)) - current, current_selector, done = 0, 0, true - end - check = false - end - - stack = stack.next - end - return head, done -end - -states.selective = selective - --- Ideally the next one should be merged with the previous but keeping it separate is --- safer. We deal with two situations: efficient boxwise (layoutareas) and mixed layers --- (as used in the stepper). In the stepper we cannot use the box branch as it involves --- paragraph lines and then gets mixed up. A messy business (esp since we want to be --- efficient). --- --- Todo: make a better stacker. Keep track (in attribute) about nesting level. Not --- entirely trivial and a generic solution is nicer (compares to the exporter). - --- local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise --- local stack, done = head, false --- local current, depth = default or 0, 0 --- --- local function check() --- local a = stack[attribute] --- if a then --- if current ~= a then --- head = insert_node_before(head,stack,copy_node(nsdata[a])) --- depth = depth + 1 --- current, done = a, true --- end --- elseif default > 0 then --- -- --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- depth = depth - 1 --- current, done = 0, true --- end --- return a --- end --- --- while stack do --- local id = stack.id --- if id == glyph_code then --- check() --- elseif id == glue_code then --- local content = stack.leader --- if content and check() then --- local ok = false --- stack.leader, ok = stacked(namespace,attribute,content,current) --- done = done or ok --- end --- elseif id == hlist_code or id == vlist_code then --- local content = stack.list --- if content then --- -- the problem is that broken lines gets the attribute which can be a later one --- if nslistwise then --- local a = stack[attribute] --- if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below --- local p = current --- current, done = a, true --- head = insert_node_before(head,stack,copy_node(nsdata[a])) --- stack.list = stacked(namespace,attribute,content,current) --- head, stack = insert_node_after(head,stack,copy_node(nsnone)) --- current = p --- else --- local ok = false --- stack.list, ok = stacked(namespace,attribute,content,current) --- done = done or ok --- end --- else --- local ok = false --- stack.list, ok = stacked(namespace,attribute,content,current) --- done = done or ok --- end --- end --- elseif id == rule_code then --- if stack.width ~= 0 then --- check() --- end --- end --- stack = stack.next --- end --- while depth > 0 do --- head = insert_node_after(head,stack,copy_node(nsnone)) --- depth = depth - 1 --- end --- return head, done --- end - -local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise - local stack = head - local done = false - local current = default or 0 - local depth = 0 - local check = false - local leader = false - while stack do - local id = stack.id - if id == glyph_code then - check = true - elseif id == glue_code then - leader = stack.leader - if leader then - check = true - end - elseif id == hlist_code or id == vlist_code then - local content = stack.list - if content then - -- the problem is that broken lines gets the attribute which can be a later one - if nslistwise then - local a = stack[attribute] - if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below - local p = current - current, done = a, true - head = insert_node_before(head,stack,copy_node(nsdata[a])) - stack.list = stacked(namespace,attribute,content,current) - head, stack = insert_node_after(head,stack,copy_node(nsnone)) - current = p - else - local ok = false - stack.list, ok = stacked(namespace,attribute,content,current) - done = done or ok - end - else - local ok = false - stack.list, ok = stacked(namespace,attribute,content,current) - done = done or ok - end - end - elseif id == rule_code then - check = stack.width ~= 0 - end - - if check then - local a = stack[attribute] - if a then - if current ~= a then - head = insert_node_before(head,stack,copy_node(nsdata[a])) - depth = depth + 1 - current, done = a, true - end - if leader then - local ok = false - stack.leader, ok = stacked(namespace,attribute,content,current) - done = done or ok - leader = false - end - elseif default > 0 then - -- - elseif current > 0 then - head = insert_node_before(head,stack,copy_node(nsnone)) - depth = depth - 1 - current, done = 0, true - end - check = false - end - - stack = stack.next - end - while depth > 0 do - head = insert_node_after(head,stack,copy_node(nsnone)) - depth = depth - 1 - end - return head, done -end - -states.stacked = stacked - --- experimental - --- local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise --- nsbegin() --- local current, previous, done, okay = head, head, false, false --- local attrib = default or unsetvalue --- --- local function check() --- local a = current[attribute] or unsetvalue --- if a ~= attrib then --- local n = nsstep(a) --- if n then --- -- !!!! TEST CODE !!!! --- -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a --- head = insert_node_before(head,current,n) -- a --- end --- attrib, done, okay = a, true, true --- end --- return a --- end --- --- while current do --- local id = current.id --- if id == glyph_code then --- check() --- elseif id == glue_code then --- local content = current.leader --- if content and check() then --- -- tricky as a leader has to be a list so we cannot inject before --- local _, ok = stacker(namespace,attribute,content,attrib) --- done = done or ok --- end --- elseif id == hlist_code or id == vlist_code then --- local content = current.list --- if not content then --- -- skip --- elseif nslistwise then --- local a = current[attribute] --- if a and attrib ~= a and nslistwise[a] then -- viewerlayer --- done = true --- head = insert_node_before(head,current,copy_node(nsdata[a])) --- current.list = stacker(namespace,attribute,content,a) --- head, current = insert_node_after(head,current,copy_node(nsnone)) --- else --- local ok = false --- current.list, ok = stacker(namespace,attribute,content,attrib) --- done = done or ok --- end --- else --- local ok = false --- current.list, ok = stacker(namespace,attribute,content,default) --- done = done or ok --- end --- elseif id == rule_code then --- if current.width ~= 0 then --- check() --- end --- end --- previous = current --- current = current.next --- end --- if okay then --- local n = nsend() --- if n then --- -- !!!! TEST CODE !!!! --- -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)])) --- head = insert_node_after(head,previous,n) --- end --- end --- return head, done --- end - -local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise - nsbegin() - local current = head - local previous = head - local done = false - local okay = false - local attrib = default or unsetvalue - local check = false - local leader = false - while current do - local id = current.id - if id == glyph_code then - check = true - elseif id == glue_code then - leader = current.leader - if leader then - check = true - end - elseif id == hlist_code or id == vlist_code then - local content = current.list - if not content then - -- skip - elseif nslistwise then - local a = current[attribute] - if a and attrib ~= a and nslistwise[a] then -- viewerlayer - done = true - head = insert_node_before(head,current,copy_node(nsdata[a])) - current.list = stacker(namespace,attribute,content,a) - head, current = insert_node_after(head,current,copy_node(nsnone)) - else - local ok = false - current.list, ok = stacker(namespace,attribute,content,attrib) - done = done or ok - end - else - local ok = false - current.list, ok = stacker(namespace,attribute,content,default) - done = done or ok - end - elseif id == rule_code then - check = current.width ~= 0 - end - - if check then - local a = current[attribute] or unsetvalue - if a ~= attrib then - local n = nsstep(a) - if n then - -- !!!! TEST CODE !!!! - -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a - head = insert_node_before(head,current,n) -- a - end - attrib, done, okay = a, true, true - if leader then - -- tricky as a leader has to be a list so we cannot inject before - local _, ok = stacker(namespace,attribute,leader,attrib) - done = done or ok - leader = false - end - end - check = false - end - - previous = current - current = current.next - end - if okay then - local n = nsend() - if n then - -- !!!! TEST CODE !!!! - -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)])) - head = insert_node_after(head,previous,n) - end - end - return head, done -end - -states.stacker = stacker - --- -- -- - -statistics.register("attribute processing time", function() - return statistics.elapsedseconds(attributes,"front- and backend") -end) +if not modules then modules = { } end modules ['node-fin'] = { + version = 1.001, + comment = "companion to node-fin.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +-- this module is being reconstructed +-- local functions, only slightly slower + +local next, type, format = next, type, string.format + +local attributes, nodes, node = attributes, nodes, node + +local copy_node = node.copy +local find_tail = node.slide + +local nodecodes = nodes.nodecodes +local whatcodes = nodes.whatcodes + +local glyph_code = nodecodes.glyph +local disc_code = nodecodes.disc +local glue_code = nodecodes.glue +local rule_code = nodecodes.rule +local whatsit_code = nodecodes.whatsit +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist + +local pdfliteral_code = whatcodes.pdfliteral + +local states = attributes.states +local numbers = attributes.numbers +local a_trigger = attributes.private('trigger') +local triggering = false + +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming +local loadstripped = utilities.lua.loadstripped +local unsetvalue = attributes.unsetvalue + +-- these two will be like trackers + +function states.enabletriggering() + triggering = true +end +function states.disabletriggering() + triggering = false +end + +-- the following code is no longer needed due to the new backend +-- but we keep it around for a while as an example +-- +-- states.collected = states.collected or { } +-- +-- storage.register("states/collected", states.collected, "states.collected") +-- +-- local collected = states.collected +-- +-- function states.collect(str) +-- collected[#collected+1] = str +-- end +-- +-- function states.flush() +-- if #collected > 0 then +-- for i=1,#collected do +-- context(collected[i]) -- we're in context mode anyway +-- end +-- collected = { } +-- states.collected = collected +-- end +-- end +-- +-- function states.check() +-- logs.report("states",concat(collected,"\n")) +-- end + +-- we used to do the main processor loop here and call processor for each node +-- but eventually this was too much a slow down (1 sec on 23 for 120 pages mk) +-- so that we moved looping to the processor itself; this may lead to a bit of +-- duplicate code once that we have more state handlers + +-- local function process_attribute(head,plugin) -- head,attribute,enabled,initializer,resolver,processor,finalizer +-- local namespace = plugin.namespace +-- if namespace.enabled ~= false then -- this test will go away +-- starttiming(attributes) -- in principle we could delegate this to the main caller +-- local done, used, ok = false, nil, false +-- local attribute = namespace.attribute or numbers[plugin.name] -- todo: plugin.attribute +-- local processor = plugin.processor +-- if processor then +-- local initializer = plugin.initializer +-- local resolver = plugin.resolver +-- local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip ! +-- if initializer then +-- initializer(namespace,attribute,head) +-- end +-- head, ok = processor(namespace,attribute,head,inheritance) +-- if ok then +-- local finalizer = plugin.finalizer +-- if finalizer then +-- head, ok, used = finalizer(namespace,attribute,head) +-- if used then +-- local flusher = plugin.flusher +-- if flusher then +-- head = flusher(namespace,attribute,head,used) +-- end +-- end +-- end +-- done = true +-- end +-- end +-- stoptiming(attributes) +-- return head, done +-- else +-- return head, false +-- end +-- end +-- +-- function nodes.installattributehandler(plugin) -- we need to avoid this nested function +-- return function(head) +-- return process_attribute(head,plugin) +-- end +-- end + +-- An experiment: lean and mean functions. It is not really faster but +-- with upcoming functionality it might make a difference, e.g. features +-- like 'casing' and 'italics' can be called a lot so there it makes sense. + +nodes.plugindata = nil + +local template = [[ +local plugin = nodes.plugindata +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming +local namespace = plugin.namespace +local attribute = namespace.attribute or attributes.numbers[plugin.name] +local processor = plugin.processor +local initializer = plugin.initializer +local resolver = plugin.resolver +local finalizer = plugin.finalizer +local flusher = plugin.flusher +if not processor then + return function(head) + return head, false + end +elseif initializer or finalizer or resolver then + return function(head) + starttiming(attributes) + local done, used, ok = false, nil, false + local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip ! + if initializer then + initializer(namespace,attribute,head) + end + head, ok = processor(namespace,attribute,head,inheritance) + if ok then + if finalizer then + head, ok, used = finalizer(namespace,attribute,head) + if used and flusher then + head = flusher(namespace,attribute,head,used) + end + end + done = true + end + stoptiming(attributes) + return head, done + end +else + return function(head) + starttiming(attributes) + local head, done = processor(namespace,attribute,head) + stoptiming(attributes) + return head, done + end +end +nodes.plugindata = nil +]] + +function nodes.installattributehandler(plugin) + nodes.plugindata = plugin + return loadstripped(template)() +end + +-- the injectors + +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after + +local nsdata, nsnone, nslistwise, nsforced, nsselector, nstrigger +local current, current_selector, done = 0, 0, false -- nb, stack has a local current ! +local nsbegin, nsend + +function states.initialize(namespace,attribute,head) + nsdata = namespace.data + nsnone = namespace.none + nsforced = namespace.forced + nsselector = namespace.selector + nslistwise = namespace.listwise + nstrigger = triggering and namespace.triggering and a_trigger + current = 0 + current_selector = 0 + done = false -- todo: done cleanup + nsstep = namespace.resolve_step + if nsstep then + nsbegin = namespace.resolve_begin + nsend = namespace.resolve_end + nspush = namespace.push + nspop = namespace.pop + end +end + +function states.finalize(namespace,attribute,head) -- is this one ok? + if current > 0 and nsnone then + local id = head.id + if id == hlist_code or id == vlist_code then + local list = head.list + if list then + head.list = insert_node_before(list,list,copy_node(nsnone)) + end + else + head = insert_node_before(head,head,copy_node(nsnone)) + end + return head, true, true + end + return head, false, false +end + +-- disc nodes can be ignored +-- we need to deal with literals too (reset as well as oval) +-- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then + +-- local function process(namespace,attribute,head,inheritance,default) -- one attribute +-- local stack, done = head, false +-- while stack do +-- local id = stack.id +-- if id == glyph_code or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code +-- local c = stack[attribute] +-- if c then +-- if default and c == inheritance then +-- if current ~= default then +-- head = insert_node_before(head,stack,copy_node(nsdata[default])) +-- current = default +-- done = true +-- end +-- elseif current ~= c then +-- head = insert_node_before(head,stack,copy_node(nsdata[c])) +-- current = c +-- done = true +-- end +-- -- here ? compare selective +-- if id == glue_code then --leader +-- -- same as *list +-- local content = stack.leader +-- if content then +-- local savedcurrent = current +-- local ci = content.id +-- if ci == hlist_code or ci == vlist_code then +-- -- else we reset inside a box unneeded, okay, the downside is +-- -- that we trigger color in each repeated box, so there is room +-- -- for improvement here +-- current = 0 +-- end +-- local ok = false +-- if nstrigger and stack[nstrigger] then +-- local outer = stack[attribute] +-- if outer ~= inheritance then +-- stack.leader, ok = process(namespace,attribute,content,inheritance,outer) +-- else +-- stack.leader, ok = process(namespace,attribute,content,inheritance,default) +-- end +-- else +-- stack.leader, ok = process(namespace,attribute,content,inheritance,default) +-- end +-- current = savedcurrent +-- done = done or ok +-- end +-- end +-- elseif default and inheritance then +-- if current ~= default then +-- head = insert_node_before(head,stack,copy_node(nsdata[default])) +-- current = default +-- done = true +-- end +-- elseif current > 0 then +-- head = insert_node_before(head,stack,copy_node(nsnone)) +-- current = 0 +-- done = true +-- end +-- elseif id == hlist_code or id == vlist_code then +-- local content = stack.list +-- if content then +-- local ok = false +-- if nstrigger and stack[nstrigger] then +-- local outer = stack[attribute] +-- if outer ~= inheritance then +-- stack.list, ok = process(namespace,attribute,content,inheritance,outer) +-- else +-- stack.list, ok = process(namespace,attribute,content,inheritance,default) +-- end +-- else +-- stack.list, ok = process(namespace,attribute,content,inheritance,default) +-- end +-- done = done or ok +-- end +-- end +-- stack = stack.next +-- end +-- return head, done +-- end + +-- local function process(namespace,attribute,head,inheritance,default) -- one attribute +-- local stack, done = head, false + +-- local function check() +-- local c = stack[attribute] +-- if c then +-- if default and c == inheritance then +-- if current ~= default then +-- head = insert_node_before(head,stack,copy_node(nsdata[default])) +-- current = default +-- done = true +-- end +-- elseif current ~= c then +-- head = insert_node_before(head,stack,copy_node(nsdata[c])) +-- current = c +-- done = true +-- end +-- elseif default and inheritance then +-- if current ~= default then +-- head = insert_node_before(head,stack,copy_node(nsdata[default])) +-- current = default +-- done = true +-- end +-- elseif current > 0 then +-- head = insert_node_before(head,stack,copy_node(nsnone)) +-- current = 0 +-- done = true +-- end +-- return c +-- end + +-- local function nested(content) +-- if nstrigger and stack[nstrigger] then +-- local outer = stack[attribute] +-- if outer ~= inheritance then +-- return process(namespace,attribute,content,inheritance,outer) +-- else +-- return process(namespace,attribute,content,inheritance,default) +-- end +-- else +-- return process(namespace,attribute,content,inheritance,default) +-- end +-- end + +-- while stack do +-- local id = stack.id +-- if id == glyph_code then +-- check() +-- elseif id == glue_code then +-- local content = stack.leader +-- if content and check() then +-- local savedcurrent = current +-- local ci = content.id +-- if ci == hlist_code or ci == vlist_code then +-- -- else we reset inside a box unneeded, okay, the downside is +-- -- that we trigger color in each repeated box, so there is room +-- -- for improvement here +-- current = 0 +-- end + +-- local ok = false +-- stack.leader, ok = nested(content) +-- done = done or ok + +-- current = savedcurrent +-- end +-- elseif id == hlist_code or id == vlist_code then +-- local content = stack.list +-- if content then + +-- local ok = false +-- stack.list, ok = nested(content) +-- done = done or ok + +-- end +-- elseif id == rule_code then +-- if stack.width ~= 0 then +-- check() +-- end +-- end +-- stack = stack.next +-- end +-- return head, done +-- end + +-- local function process(namespace,attribute,head,inheritance,default) -- one attribute +-- local stack, done = head, false +-- while stack do +-- local id = stack.id +-- if id == glyph_code then +-- -- begin of check +-- local c = stack[attribute] +-- if c then +-- if default and c == inheritance then +-- if current ~= default then +-- head = insert_node_before(head,stack,copy_node(nsdata[default])) +-- current = default +-- done = true +-- end +-- elseif current ~= c then +-- head = insert_node_before(head,stack,copy_node(nsdata[c])) +-- current = c +-- done = true +-- end +-- elseif default and inheritance then +-- if current ~= default then +-- head = insert_node_before(head,stack,copy_node(nsdata[default])) +-- current = default +-- done = true +-- end +-- elseif current > 0 then +-- head = insert_node_before(head,stack,copy_node(nsnone)) +-- current = 0 +-- done = true +-- end +-- -- end of check +-- elseif id == glue_code then +-- local content = stack.leader +-- if content then +-- -- begin of check +-- local c = stack[attribute] +-- if c then +-- if default and c == inheritance then +-- if current ~= default then +-- head = insert_node_before(head,stack,copy_node(nsdata[default])) +-- current = default +-- done = true +-- end +-- elseif current ~= c then +-- head = insert_node_before(head,stack,copy_node(nsdata[c])) +-- current = c +-- done = true +-- end +-- -- begin special to this check +-- local savedcurrent = current +-- local ci = content.id +-- if ci == hlist_code or ci == vlist_code then +-- -- else we reset inside a box unneeded, okay, the downside is +-- -- that we trigger color in each repeated box, so there is room +-- -- for improvement here +-- current = 0 +-- end +-- -- begin nested -- +-- local ok = false +-- if nstrigger and stack[nstrigger] then +-- local outer = stack[attribute] +-- if outer ~= inheritance then +-- stack.leader, ok = process(namespace,attribute,content,inheritance,outer) +-- else +-- stack.leader, ok = process(namespace,attribute,content,inheritance,default) +-- end +-- else +-- stack.leader, ok = process(namespace,attribute,content,inheritance,default) +-- end +-- -- end nested -- +-- done = done or ok +-- current = savedcurrent +-- -- end special to this check +-- elseif default and inheritance then +-- if current ~= default then +-- head = insert_node_before(head,stack,copy_node(nsdata[default])) +-- current = default +-- done = true +-- end +-- elseif current > 0 then +-- head = insert_node_before(head,stack,copy_node(nsnone)) +-- current = 0 +-- done = true +-- end +-- -- end of check +-- end +-- elseif id == hlist_code or id == vlist_code then +-- local content = stack.list +-- if content then +-- -- begin nested -- +-- local ok +-- if nstrigger and stack[nstrigger] then +-- local outer = stack[attribute] +-- if outer ~= inheritance then +-- stack.list, ok = process(namespace,attribute,content,inheritance,outer) +-- else +-- stack.list, ok = process(namespace,attribute,content,inheritance,default) +-- end +-- else +-- stack.list, ok = process(namespace,attribute,content,inheritance,default) +-- end +-- -- end nested -- +-- done = done or ok +-- end +-- elseif id == rule_code then +-- if stack.width ~= 0 then +-- -- begin of check +-- local c = stack[attribute] +-- if c then +-- if default and c == inheritance then +-- if current ~= default then +-- head = insert_node_before(head,stack,copy_node(nsdata[default])) +-- current = default +-- done = true +-- end +-- elseif current ~= c then +-- head = insert_node_before(head,stack,copy_node(nsdata[c])) +-- current = c +-- done = true +-- end +-- elseif default and inheritance then +-- if current ~= default then +-- head = insert_node_before(head,stack,copy_node(nsdata[default])) +-- current = default +-- done = true +-- end +-- elseif current > 0 then +-- head = insert_node_before(head,stack,copy_node(nsnone)) +-- current = 0 +-- done = true +-- end +-- -- end of check +-- end +-- end +-- stack = stack.next +-- end +-- return head, done +-- end + +local function process(namespace,attribute,head,inheritance,default) -- one attribute + local stack = head + local done = false + local check = false + local leader = nil + while stack do + local id = stack.id + if id == glyph_code then + check = true + elseif id == glue_code then + leader = stack.leader + if leader then + check = true + end + elseif id == hlist_code or id == vlist_code then + local content = stack.list + if content then + -- begin nested -- + local ok + if nstrigger and stack[nstrigger] then + local outer = stack[attribute] + if outer ~= inheritance then + stack.list, ok = process(namespace,attribute,content,inheritance,outer) + else + stack.list, ok = process(namespace,attribute,content,inheritance,default) + end + else + stack.list, ok = process(namespace,attribute,content,inheritance,default) + end + -- end nested -- + done = done or ok + end + elseif id == rule_code then + check = stack.width ~= 0 + end + -- much faster this way than using a check() and nested() function + if check then + local c = stack[attribute] + if c then + if default and c == inheritance then + if current ~= default then + head = insert_node_before(head,stack,copy_node(nsdata[default])) + current = default + done = true + end + elseif current ~= c then + head = insert_node_before(head,stack,copy_node(nsdata[c])) + current = c + done = true + end + if leader then + local savedcurrent = current + local ci = leader.id + if ci == hlist_code or ci == vlist_code then + -- else we reset inside a box unneeded, okay, the downside is + -- that we trigger color in each repeated box, so there is room + -- for improvement here + current = 0 + end + -- begin nested -- + local ok = false + if nstrigger and stack[nstrigger] then + local outer = stack[attribute] + if outer ~= inheritance then + stack.leader, ok = process(namespace,attribute,leader,inheritance,outer) + else + stack.leader, ok = process(namespace,attribute,leader,inheritance,default) + end + else + stack.leader, ok = process(namespace,attribute,leader,inheritance,default) + end + -- end nested -- + done = done or ok + current = savedcurrent + leader = false + end + elseif default and inheritance then + if current ~= default then + head = insert_node_before(head,stack,copy_node(nsdata[default])) + current = default + done = true + end + elseif current > 0 then + head = insert_node_before(head,stack,copy_node(nsnone)) + current = 0 + done = true + end + check = false + end + stack = stack.next + end + return head, done +end + +states.process = process + +-- we can force a selector, e.g. document wide color spaces, saves a little +-- watch out, we need to check both the selector state (like colorspace) and +-- the main state (like color), otherwise we get into troubles when a selector +-- state changes while the main state stays the same (like two glyphs following +-- each other with the same color but different color spaces e.g. \showcolor) + +-- local function selective(namespace,attribute,head,inheritance,default) -- two attributes +-- local stack, done = head, false +-- while stack do +-- local id = stack.id +-- -- we need to deal with literals too (reset as well as oval) +-- -- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code +-- if id == glyph_code -- or id == disc_code +-- or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code +-- local c = stack[attribute] +-- if c then +-- if default and c == inheritance then +-- if current ~= default then +-- local data = nsdata[default] +-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) +-- current = default +-- done = true +-- end +-- else +-- local s = stack[nsselector] +-- if current ~= c or current_selector ~= s then +-- local data = nsdata[c] +-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) +-- current = c +-- current_selector = s +-- done = true +-- end +-- end +-- elseif default and inheritance then +-- if current ~= default then +-- local data = nsdata[default] +-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) +-- current = default +-- done = true +-- end +-- elseif current > 0 then +-- head = insert_node_before(head,stack,copy_node(nsnone)) +-- current, current_selector, done = 0, 0, true +-- end +-- if id == glue_code then -- leader +-- -- same as *list +-- local content = stack.leader +-- if content then +-- local savedcurrent = current +-- local ci = content.id +-- if ci == hlist_code or ci == vlist_code then +-- -- else we reset inside a box unneeded, okay, the downside is +-- -- that we trigger color in each repeated box, so there is room +-- -- for improvement here +-- current = 0 +-- end +-- local ok = false +-- if nstrigger and stack[nstrigger] then +-- local outer = stack[attribute] +-- if outer ~= inheritance then +-- stack.leader, ok = selective(namespace,attribute,content,inheritance,outer) +-- else +-- stack.leader, ok = selective(namespace,attribute,content,inheritance,default) +-- end +-- else +-- stack.leader, ok = selective(namespace,attribute,content,inheritance,default) +-- end +-- current = savedcurrent +-- done = done or ok +-- end +-- end +-- elseif id == hlist_code or id == vlist_code then +-- local content = stack.list +-- if content then +-- local ok = false +-- if nstrigger and stack[nstrigger] then +-- local outer = stack[attribute] +-- if outer ~= inheritance then +-- stack.list, ok = selective(namespace,attribute,content,inheritance,outer) +-- else +-- stack.list, ok = selective(namespace,attribute,content,inheritance,default) +-- end +-- else +-- stack.list, ok = selective(namespace,attribute,content,inheritance,default) +-- end +-- done = done or ok +-- end +-- end +-- stack = stack.next +-- end +-- return head, done +-- end + +-- local function selective(namespace,attribute,head,inheritance,default) -- two attributes +-- local stack, done = head, false + +-- local function check() +-- local c = stack[attribute] +-- if c then +-- if default and c == inheritance then +-- if current ~= default then +-- local data = nsdata[default] +-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) +-- current = default +-- done = true +-- end +-- else +-- local s = stack[nsselector] +-- if current ~= c or current_selector ~= s then +-- local data = nsdata[c] +-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) +-- current = c +-- current_selector = s +-- done = true +-- end +-- end +-- elseif default and inheritance then +-- if current ~= default then +-- local data = nsdata[default] +-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) +-- current = default +-- done = true +-- end +-- elseif current > 0 then +-- head = insert_node_before(head,stack,copy_node(nsnone)) +-- current, current_selector, done = 0, 0, true +-- end +-- return c +-- end + +-- local function nested(content) +-- if nstrigger and stack[nstrigger] then +-- local outer = stack[attribute] +-- if outer ~= inheritance then +-- return selective(namespace,attribute,content,inheritance,outer) +-- else +-- return selective(namespace,attribute,content,inheritance,default) +-- end +-- else +-- return selective(namespace,attribute,content,inheritance,default) +-- end +-- end + +-- while stack do +-- local id = stack.id +-- if id == glyph_code then +-- check() +-- elseif id == glue_code then +-- local content = stack.leader +-- if content and check() then +-- -- local savedcurrent = current +-- -- local ci = content.id +-- -- if ci == hlist_code or ci == vlist_code then +-- -- -- else we reset inside a box unneeded, okay, the downside is +-- -- -- that we trigger color in each repeated box, so there is room +-- -- -- for improvement here +-- -- current = 0 +-- -- end + +-- local ok = false +-- stack.leader, ok = nested(content) +-- done = done or ok + +-- -- current = savedcurrent +-- end +-- elseif id == hlist_code or id == vlist_code then +-- local content = stack.list +-- if content then + +-- local ok = false +-- stack.list, ok = nested(content) +-- done = done or ok + +-- end +-- elseif id == rule_code then +-- if stack.width ~= 0 then +-- check() +-- end +-- end +-- stack = stack.next +-- end +-- return head, done +-- end + +local function selective(namespace,attribute,head,inheritance,default) -- two attributes + local stack = head + local done = false + local check = false + local leader = nil + while stack do + local id = stack.id + if id == glyph_code then + check = true + elseif id == glue_code then + leader = stack.leader + if leader then + check = true + end + elseif id == hlist_code or id == vlist_code then + local content = stack.list + if content then + local ok = false + -- begin nested + if nstrigger and stack[nstrigger] then + local outer = stack[attribute] + if outer ~= inheritance then + stack.list, ok = selective(namespace,attribute,content,inheritance,outer) + else + stack.list, ok = selective(namespace,attribute,content,inheritance,default) + end + else + stack.list, ok = selective(namespace,attribute,content,inheritance,default) + end + -- end nested + done = done or ok + end + elseif id == rule_code then + check = stack.width ~= 0 + end + + if check then + local c = stack[attribute] + if c then + if default and c == inheritance then + if current ~= default then + local data = nsdata[default] + head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) + current = default + done = true + end + else + local s = stack[nsselector] + if current ~= c or current_selector ~= s then + local data = nsdata[c] + head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) + current = c + current_selector = s + done = true + end + end + if leader then + local ok = false + -- begin nested + if nstrigger and stack[nstrigger] then + local outer = stack[attribute] + if outer ~= inheritance then + stack.leader, ok = selective(namespace,attribute,leader,inheritance,outer) + else + stack.leader, ok = selective(namespace,attribute,leader,inheritance,default) + end + else + stack.leader, ok = selective(namespace,attribute,leader,inheritance,default) + end + -- end nested + done = done or ok + leader = false + end + elseif default and inheritance then + if current ~= default then + local data = nsdata[default] + head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) + current = default + done = true + end + elseif current > 0 then + head = insert_node_before(head,stack,copy_node(nsnone)) + current, current_selector, done = 0, 0, true + end + check = false + end + + stack = stack.next + end + return head, done +end + +states.selective = selective + +-- Ideally the next one should be merged with the previous but keeping it separate is +-- safer. We deal with two situations: efficient boxwise (layoutareas) and mixed layers +-- (as used in the stepper). In the stepper we cannot use the box branch as it involves +-- paragraph lines and then gets mixed up. A messy business (esp since we want to be +-- efficient). +-- +-- Todo: make a better stacker. Keep track (in attribute) about nesting level. Not +-- entirely trivial and a generic solution is nicer (compares to the exporter). + +-- local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise +-- local stack, done = head, false +-- local current, depth = default or 0, 0 +-- +-- local function check() +-- local a = stack[attribute] +-- if a then +-- if current ~= a then +-- head = insert_node_before(head,stack,copy_node(nsdata[a])) +-- depth = depth + 1 +-- current, done = a, true +-- end +-- elseif default > 0 then +-- -- +-- elseif current > 0 then +-- head = insert_node_before(head,stack,copy_node(nsnone)) +-- depth = depth - 1 +-- current, done = 0, true +-- end +-- return a +-- end +-- +-- while stack do +-- local id = stack.id +-- if id == glyph_code then +-- check() +-- elseif id == glue_code then +-- local content = stack.leader +-- if content and check() then +-- local ok = false +-- stack.leader, ok = stacked(namespace,attribute,content,current) +-- done = done or ok +-- end +-- elseif id == hlist_code or id == vlist_code then +-- local content = stack.list +-- if content then +-- -- the problem is that broken lines gets the attribute which can be a later one +-- if nslistwise then +-- local a = stack[attribute] +-- if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below +-- local p = current +-- current, done = a, true +-- head = insert_node_before(head,stack,copy_node(nsdata[a])) +-- stack.list = stacked(namespace,attribute,content,current) +-- head, stack = insert_node_after(head,stack,copy_node(nsnone)) +-- current = p +-- else +-- local ok = false +-- stack.list, ok = stacked(namespace,attribute,content,current) +-- done = done or ok +-- end +-- else +-- local ok = false +-- stack.list, ok = stacked(namespace,attribute,content,current) +-- done = done or ok +-- end +-- end +-- elseif id == rule_code then +-- if stack.width ~= 0 then +-- check() +-- end +-- end +-- stack = stack.next +-- end +-- while depth > 0 do +-- head = insert_node_after(head,stack,copy_node(nsnone)) +-- depth = depth - 1 +-- end +-- return head, done +-- end + +local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise + local stack = head + local done = false + local current = default or 0 + local depth = 0 + local check = false + local leader = false + while stack do + local id = stack.id + if id == glyph_code then + check = true + elseif id == glue_code then + leader = stack.leader + if leader then + check = true + end + elseif id == hlist_code or id == vlist_code then + local content = stack.list + if content then + -- the problem is that broken lines gets the attribute which can be a later one + if nslistwise then + local a = stack[attribute] + if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below + local p = current + current, done = a, true + head = insert_node_before(head,stack,copy_node(nsdata[a])) + stack.list = stacked(namespace,attribute,content,current) + head, stack = insert_node_after(head,stack,copy_node(nsnone)) + current = p + else + local ok = false + stack.list, ok = stacked(namespace,attribute,content,current) + done = done or ok + end + else + local ok = false + stack.list, ok = stacked(namespace,attribute,content,current) + done = done or ok + end + end + elseif id == rule_code then + check = stack.width ~= 0 + end + + if check then + local a = stack[attribute] + if a then + if current ~= a then + head = insert_node_before(head,stack,copy_node(nsdata[a])) + depth = depth + 1 + current, done = a, true + end + if leader then + local ok = false + stack.leader, ok = stacked(namespace,attribute,content,current) + done = done or ok + leader = false + end + elseif default > 0 then + -- + elseif current > 0 then + head = insert_node_before(head,stack,copy_node(nsnone)) + depth = depth - 1 + current, done = 0, true + end + check = false + end + + stack = stack.next + end + while depth > 0 do + head = insert_node_after(head,stack,copy_node(nsnone)) + depth = depth - 1 + end + return head, done +end + +states.stacked = stacked + +-- experimental + +-- local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise +-- nsbegin() +-- local current, previous, done, okay = head, head, false, false +-- local attrib = default or unsetvalue +-- +-- local function check() +-- local a = current[attribute] or unsetvalue +-- if a ~= attrib then +-- local n = nsstep(a) +-- if n then +-- -- !!!! TEST CODE !!!! +-- -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a +-- head = insert_node_before(head,current,n) -- a +-- end +-- attrib, done, okay = a, true, true +-- end +-- return a +-- end +-- +-- while current do +-- local id = current.id +-- if id == glyph_code then +-- check() +-- elseif id == glue_code then +-- local content = current.leader +-- if content and check() then +-- -- tricky as a leader has to be a list so we cannot inject before +-- local _, ok = stacker(namespace,attribute,content,attrib) +-- done = done or ok +-- end +-- elseif id == hlist_code or id == vlist_code then +-- local content = current.list +-- if not content then +-- -- skip +-- elseif nslistwise then +-- local a = current[attribute] +-- if a and attrib ~= a and nslistwise[a] then -- viewerlayer +-- done = true +-- head = insert_node_before(head,current,copy_node(nsdata[a])) +-- current.list = stacker(namespace,attribute,content,a) +-- head, current = insert_node_after(head,current,copy_node(nsnone)) +-- else +-- local ok = false +-- current.list, ok = stacker(namespace,attribute,content,attrib) +-- done = done or ok +-- end +-- else +-- local ok = false +-- current.list, ok = stacker(namespace,attribute,content,default) +-- done = done or ok +-- end +-- elseif id == rule_code then +-- if current.width ~= 0 then +-- check() +-- end +-- end +-- previous = current +-- current = current.next +-- end +-- if okay then +-- local n = nsend() +-- if n then +-- -- !!!! TEST CODE !!!! +-- -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)])) +-- head = insert_node_after(head,previous,n) +-- end +-- end +-- return head, done +-- end + +local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise + nsbegin() + local current = head + local previous = head + local done = false + local okay = false + local attrib = default or unsetvalue + local check = false + local leader = false + while current do + local id = current.id + if id == glyph_code then + check = true + elseif id == glue_code then + leader = current.leader + if leader then + check = true + end + elseif id == hlist_code or id == vlist_code then + local content = current.list + if not content then + -- skip + elseif nslistwise then + local a = current[attribute] + if a and attrib ~= a and nslistwise[a] then -- viewerlayer + done = true + head = insert_node_before(head,current,copy_node(nsdata[a])) + current.list = stacker(namespace,attribute,content,a) + head, current = insert_node_after(head,current,copy_node(nsnone)) + else + local ok = false + current.list, ok = stacker(namespace,attribute,content,attrib) + done = done or ok + end + else + local ok = false + current.list, ok = stacker(namespace,attribute,content,default) + done = done or ok + end + elseif id == rule_code then + check = current.width ~= 0 + end + + if check then + local a = current[attribute] or unsetvalue + if a ~= attrib then + local n = nsstep(a) + if n then + -- !!!! TEST CODE !!!! + -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a + head = insert_node_before(head,current,n) -- a + end + attrib, done, okay = a, true, true + if leader then + -- tricky as a leader has to be a list so we cannot inject before + local _, ok = stacker(namespace,attribute,leader,attrib) + done = done or ok + leader = false + end + end + check = false + end + + previous = current + current = current.next + end + if okay then + local n = nsend() + if n then + -- !!!! TEST CODE !!!! + -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)])) + head = insert_node_after(head,previous,n) + end + end + return head, done +end + +states.stacker = stacker + +-- -- -- + +statistics.register("attribute processing time", function() + return statistics.elapsedseconds(attributes,"front- and backend") +end) diff --git a/tex/context/base/node-fnt.lua b/tex/context/base/node-fnt.lua index 54359117e..edc1c990e 100644 --- a/tex/context/base/node-fnt.lua +++ b/tex/context/base/node-fnt.lua @@ -1,226 +1,226 @@ -if not modules then modules = { } end modules ['node-fnt'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - -if not context then os.exit() end -- generic function in node-dum - -local next, type = next, type -local concat, keys = table.concat, table.keys - -local nodes, node, fonts = nodes, node, fonts - -local trace_characters = false trackers.register("nodes.characters", function(v) trace_characters = v end) -local trace_fontrun = false trackers.register("nodes.fontrun", function(v) trace_fontrun = v end) - -local report_fonts = logs.reporter("fonts","processing") - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers - -local otf = fonts.handlers.otf - -local traverse_id = node.traverse_id -local starttiming = statistics.starttiming -local stoptiming = statistics.stoptiming -local nodecodes = nodes.nodecodes -local handlers = nodes.handlers - -local glyph_code = nodecodes.glyph - -local setmetatableindex = table.setmetatableindex - --- some tests with using an array of dynamics[id] and processes[id] demonstrated --- that there was nothing to gain (unless we also optimize other parts) --- --- maybe getting rid of the intermediate shared can save some time - --- potential speedup: check for subtype < 256 so that we can remove that test --- elsewhere, danger: injected nodes will not be dealt with but that does not --- happen often; we could consider processing sublists but that might need more --- checking later on; the current approach also permits variants - -local run = 0 - -local setfontdynamics = { } -local fontprocesses = { } - -setmetatableindex(setfontdynamics, function(t,font) - local tfmdata = fontdata[font] - local shared = tfmdata.shared - local v = shared and shared.dynamics and otf.setdynamics or false - t[font] = v - return v -end) - -setmetatableindex(fontprocesses, function(t,font) - local tfmdata = fontdata[font] - local shared = tfmdata.shared -- we need to check shared, only when same features - local processes = shared and shared.processes - if processes and #processes > 0 then - t[font] = processes - return processes - else - t[font] = false - return false - end -end) - -fonts.hashes.setdynamics = setfontdynamics -fonts.hashes.processes = fontprocesses - -function handlers.characters(head) - -- either next or not, but definitely no already processed list - starttiming(nodes) - local usedfonts, attrfonts, done = { }, { }, false - local a, u, prevfont, prevattr = 0, 0, nil, 0 - if trace_fontrun then - run = run + 1 - report_fonts() - report_fonts("checking node list, run %s",run) - report_fonts() - local n = head - while n do - local id = n.id - if id == glyph_code then - local font = n.font - local attr = n[0] or 0 - report_fonts("font %03i, dynamic %03i, glyph %s",font,attr,utf.char(n.char)) - else - report_fonts("[%s]",nodecodes[n.id]) - end - n = n.next - end - end - for n in traverse_id(glyph_code,head) do - -- if n.subtype<256 then -- all are 1 - local font = n.font - local attr = n[0] or 0 -- zero attribute is reserved for fonts in context - if font ~= prevfont or attr ~= prevattr then - if attr > 0 then - local used = attrfonts[font] - if not used then - used = { } - attrfonts[font] = used - end - if not used[attr] then - local sd = setfontdynamics[font] - if sd then -- always true ? - local d = sd(font,attr) -- can we cache this one? - if d then - used[attr] = d - a = a + 1 - else - -- can't happen ... otherwise best use nil/false distinction - end - end - end - else - local used = usedfonts[font] - if not used then - local fp = fontprocesses[font] - if fp then - usedfonts[font] = fp - u = u + 1 - else - -- can't happen ... otherwise best use nil/false distinction - end - end - end - prevfont = font - prevattr = attr - end - -- end - end - if trace_fontrun then - report_fonts() - report_fonts("statics : %s",(u > 0 and concat(keys(usedfonts)," ")) or "none") - report_fonts("dynamics: %s",(a > 0 and concat(keys(attrfonts)," ")) or "none") - report_fonts() - end - if u == 0 then - -- skip - elseif u == 1 then - local font, processors = next(usedfonts) - local n = #processors - if n > 0 then - local h, d = processors[1](head,font,0) - head = h or head - done = done or d - if n > 1 then - for i=2,n do - local h, d = processors[i](head,font,0) - head = h or head - done = done or d - end - end - end - else - for font, processors in next, usedfonts do - local n = #processors - local h, d = processors[1](head,font,0) - head = h or head - done = done or d - if n > 1 then - for i=2,n do - local h, d = processors[i](head,font,0) - head = h or head - done = done or d - end - end - end - end - if a == 0 then - -- skip - elseif a == 1 then - local font, dynamics = next(attrfonts) - for attribute, processors in next, dynamics do -- attr can switch in between - local n = #processors - if n == 0 then - report_fonts("no processors associated with dynamic %s",attribute) - else - local h, d = processors[1](head,font,attribute) - head = h or head - done = done or d - if n > 1 then - for i=2,n do - local h, d = processors[i](head,font,attribute) - head = h or head - done = done or d - end - end - end - end - else - for font, dynamics in next, attrfonts do - for attribute, processors in next, dynamics do -- attr can switch in between - local n = #processors - if n == 0 then - report_fonts("no processors associated with dynamic %s",attribute) - else - local h, d = processors[1](head,font,attribute) - head = h or head - done = done or d - if n > 1 then - for i=2,n do - local h, d = processors[i](head,font,attribute) - head = h or head - done = done or d - end - end - end - end - end - end - stoptiming(nodes) - if trace_characters then - nodes.report(head,done) - end - return head, true -end - -handlers.protectglyphs = node.protect_glyphs -handlers.unprotectglyphs = node.unprotect_glyphs +if not modules then modules = { } end modules ['node-fnt'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +if not context then os.exit() end -- generic function in node-dum + +local next, type = next, type +local concat, keys = table.concat, table.keys + +local nodes, node, fonts = nodes, node, fonts + +local trace_characters = false trackers.register("nodes.characters", function(v) trace_characters = v end) +local trace_fontrun = false trackers.register("nodes.fontrun", function(v) trace_fontrun = v end) + +local report_fonts = logs.reporter("fonts","processing") + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers + +local otf = fonts.handlers.otf + +local traverse_id = node.traverse_id +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming +local nodecodes = nodes.nodecodes +local handlers = nodes.handlers + +local glyph_code = nodecodes.glyph + +local setmetatableindex = table.setmetatableindex + +-- some tests with using an array of dynamics[id] and processes[id] demonstrated +-- that there was nothing to gain (unless we also optimize other parts) +-- +-- maybe getting rid of the intermediate shared can save some time + +-- potential speedup: check for subtype < 256 so that we can remove that test +-- elsewhere, danger: injected nodes will not be dealt with but that does not +-- happen often; we could consider processing sublists but that might need more +-- checking later on; the current approach also permits variants + +local run = 0 + +local setfontdynamics = { } +local fontprocesses = { } + +setmetatableindex(setfontdynamics, function(t,font) + local tfmdata = fontdata[font] + local shared = tfmdata.shared + local v = shared and shared.dynamics and otf.setdynamics or false + t[font] = v + return v +end) + +setmetatableindex(fontprocesses, function(t,font) + local tfmdata = fontdata[font] + local shared = tfmdata.shared -- we need to check shared, only when same features + local processes = shared and shared.processes + if processes and #processes > 0 then + t[font] = processes + return processes + else + t[font] = false + return false + end +end) + +fonts.hashes.setdynamics = setfontdynamics +fonts.hashes.processes = fontprocesses + +function handlers.characters(head) + -- either next or not, but definitely no already processed list + starttiming(nodes) + local usedfonts, attrfonts, done = { }, { }, false + local a, u, prevfont, prevattr = 0, 0, nil, 0 + if trace_fontrun then + run = run + 1 + report_fonts() + report_fonts("checking node list, run %s",run) + report_fonts() + local n = head + while n do + local id = n.id + if id == glyph_code then + local font = n.font + local attr = n[0] or 0 + report_fonts("font %03i, dynamic %03i, glyph %s",font,attr,utf.char(n.char)) + else + report_fonts("[%s]",nodecodes[n.id]) + end + n = n.next + end + end + for n in traverse_id(glyph_code,head) do + -- if n.subtype<256 then -- all are 1 + local font = n.font + local attr = n[0] or 0 -- zero attribute is reserved for fonts in context + if font ~= prevfont or attr ~= prevattr then + if attr > 0 then + local used = attrfonts[font] + if not used then + used = { } + attrfonts[font] = used + end + if not used[attr] then + local sd = setfontdynamics[font] + if sd then -- always true ? + local d = sd(font,attr) -- can we cache this one? + if d then + used[attr] = d + a = a + 1 + else + -- can't happen ... otherwise best use nil/false distinction + end + end + end + else + local used = usedfonts[font] + if not used then + local fp = fontprocesses[font] + if fp then + usedfonts[font] = fp + u = u + 1 + else + -- can't happen ... otherwise best use nil/false distinction + end + end + end + prevfont = font + prevattr = attr + end + -- end + end + if trace_fontrun then + report_fonts() + report_fonts("statics : %s",(u > 0 and concat(keys(usedfonts)," ")) or "none") + report_fonts("dynamics: %s",(a > 0 and concat(keys(attrfonts)," ")) or "none") + report_fonts() + end + if u == 0 then + -- skip + elseif u == 1 then + local font, processors = next(usedfonts) + local n = #processors + if n > 0 then + local h, d = processors[1](head,font,0) + head = h or head + done = done or d + if n > 1 then + for i=2,n do + local h, d = processors[i](head,font,0) + head = h or head + done = done or d + end + end + end + else + for font, processors in next, usedfonts do + local n = #processors + local h, d = processors[1](head,font,0) + head = h or head + done = done or d + if n > 1 then + for i=2,n do + local h, d = processors[i](head,font,0) + head = h or head + done = done or d + end + end + end + end + if a == 0 then + -- skip + elseif a == 1 then + local font, dynamics = next(attrfonts) + for attribute, processors in next, dynamics do -- attr can switch in between + local n = #processors + if n == 0 then + report_fonts("no processors associated with dynamic %s",attribute) + else + local h, d = processors[1](head,font,attribute) + head = h or head + done = done or d + if n > 1 then + for i=2,n do + local h, d = processors[i](head,font,attribute) + head = h or head + done = done or d + end + end + end + end + else + for font, dynamics in next, attrfonts do + for attribute, processors in next, dynamics do -- attr can switch in between + local n = #processors + if n == 0 then + report_fonts("no processors associated with dynamic %s",attribute) + else + local h, d = processors[1](head,font,attribute) + head = h or head + done = done or d + if n > 1 then + for i=2,n do + local h, d = processors[i](head,font,attribute) + head = h or head + done = done or d + end + end + end + end + end + end + stoptiming(nodes) + if trace_characters then + nodes.report(head,done) + end + return head, true +end + +handlers.protectglyphs = node.protect_glyphs +handlers.unprotectglyphs = node.unprotect_glyphs diff --git a/tex/context/base/node-ini.lua b/tex/context/base/node-ini.lua index 5a3986c3a..1de6fbddd 100644 --- a/tex/context/base/node-ini.lua +++ b/tex/context/base/node-ini.lua @@ -1,421 +1,421 @@ -if not modules then modules = { } end modules ['node-ini'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

    Most of the code that had accumulated here is now separated in -modules.

    ---ldx]]-- - --- this module is being reconstructed - -local next, type = next, type -local format, match, gsub = string.format, string.match, string.gsub -local concat, remove = table.concat, table.remove -local sortedhash, sortedkeys, swapped, tohash = table.sortedhash, table.sortedkeys, table.swapped, table.tohash -local utfchar = utf.char -local lpegmatch = lpeg.match -local formatcolumns = utilities.formatters.formatcolumns - ---[[ldx-- -

    Access to nodes is what gives its power. Here we -implement a few helper functions. These functions are rather optimized.

    ---ldx]]-- - ---[[ldx-- -

    When manipulating node lists in , we will remove -nodes and insert new ones. While node access was implemented, we did -quite some experiments in order to find out if manipulating nodes -in was feasible from the perspective of performance.

    - -

    First of all, we noticed that the bottleneck is more with excessive -callbacks (some gets called very often) and the conversion from and to -'s datastructures. However, at the end, we -found that inserting and deleting nodes in a table could become a -bottleneck.

    - -

    This resulted in two special situations in passing nodes back to -: a table entry with value false is ignored, -and when instead of a table true is returned, the -original table is used.

    - -

    Insertion is handled (at least in as follows. When -we need to insert a node at a certain position, we change the node at -that position by a dummy node, tagged inline which itself -has_attribute the original node and one or more new nodes. Before we pass -back the list we collapse the list. Of course collapsing could be built -into the engine, but this is a not so natural extension.

    - -

    When we collapse (something that we only do when really needed), we -also ignore the empty nodes. [This is obsolete!]

    ---ldx]]-- - -local traverse = node.traverse -local traverse_id = node.traverse_id -local free_node = node.free -local remove_node = node.remove -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after -local node_fields = node.fields - -local allocate = utilities.storage.allocate - -nodes = nodes or { } -local nodes = nodes - -nodes.handlers = nodes.handlers or { } - --- there will be more of this: - -local skipcodes = allocate { - [ 0] = "userskip", - [ 1] = "lineskip", - [ 2] = "baselineskip", - [ 3] = "parskip", - [ 4] = "abovedisplayskip", - [ 5] = "belowdisplayskip", - [ 6] = "abovedisplayshortskip", - [ 7] = "belowdisplayshortskip", - [ 8] = "leftskip", - [ 9] = "rightskip", - [ 10] = "topskip", - [ 11] = "splittopskip", - [ 12] = "tabskip", - [ 13] = "spaceskip", - [ 14] = "xspaceskip", - [ 15] = "parfillskip", - [ 16] = "thinmuskip", - [ 17] = "medmuskip", - [ 18] = "thickmuskip", - [100] = "leaders", - [101] = "cleaders", - [102] = "xleaders", - [103] = "gleaders", -} - -local penaltycodes = allocate { -- unfortunately not used - [ 0] = "userpenalty", -} - -table.setmetatableindex(penaltycodes,function(t,k) return "userpenalty" end) -- not used anyway - -local noadcodes = allocate { - [ 0] = "ord", - [ 1] = "opdisplaylimits", - [ 2] = "oplimits", - [ 3] = "opnolimits", - [ 4] = "bin", - [ 5] = "rel", - [ 6] = "open", - [ 7] = "close", - [ 8] = "punct", - [ 9] = "inner", - [10] = "under", - [11] = "over", - [12] = "vcenter", -} - -local listcodes = allocate { - [ 0] = "unknown", - [ 1] = "line", - [ 2] = "box", - [ 3] = "indent", - [ 4] = "alignment", -- row or column - [ 5] = "cell", -} - -local glyphcodes = allocate { - [0] = "character", - [1] = "glyph", - [2] = "ligature", - [3] = "ghost", - [4] = "left", - [5] = "right", -} - -local kerncodes = allocate { - [0] = "fontkern", - [1] = "userkern", - [2] = "accentkern", -} - -local mathcodes = allocate { - [0] = "beginmath", - [1] = "endmath", -} - -local fillcodes = allocate { - [0] = "stretch", - [1] = "fi", - [2] = "fil", - [3] = "fill", - [4] = "filll", -} - -local margincodes = allocate { - [0] = "left", - [1] = "right", -} - -local disccodes = allocate { - [0] = "discretionary", -- \discretionary - [1] = "explicit", -- \- - [2] = "automatic", -- following a - - [3] = "regular", -- simple - [4] = "first", -- hard first item - [5] = "second", -- hard second item -} - -local function simplified(t) - local r = { } - for k, v in next, t do - r[k] = gsub(v,"_","") - end - return r -end - -local nodecodes = simplified(node.types()) -local whatcodes = simplified(node.whatsits()) - -skipcodes = allocate(swapped(skipcodes,skipcodes)) -noadcodes = allocate(swapped(noadcodes,noadcodes)) -nodecodes = allocate(swapped(nodecodes,nodecodes)) -whatcodes = allocate(swapped(whatcodes,whatcodes)) -listcodes = allocate(swapped(listcodes,listcodes)) -glyphcodes = allocate(swapped(glyphcodes,glyphcodes)) -kerncodes = allocate(swapped(kerncodes,kerncodes)) -penaltycodes = allocate(swapped(penaltycodes,penaltycodes)) -mathcodes = allocate(swapped(mathcodes,mathcodes)) -fillcodes = allocate(swapped(fillcodes,fillcodes)) -margincodes = allocate(swapped(margincodes,margincodes)) -disccodes = allocate(swapped(disccodes,disccodes)) - -nodes.skipcodes = skipcodes nodes.gluecodes = skipcodes -- more official -nodes.noadcodes = noadcodes -nodes.nodecodes = nodecodes -nodes.whatcodes = whatcodes nodes.whatsitcodes = whatcodes -- more official -nodes.listcodes = listcodes -nodes.glyphcodes = glyphcodes -nodes.kerncodes = kerncodes -nodes.penaltycodes = kerncodes -nodes.mathcodes = mathcodes -nodes.fillcodes = fillcodes -nodes.margincodes = margincodes -nodes.disccodes = disccodes nodes.discretionarycodes = disccodes - -listcodes.row = listcodes.alignment -listcodes.column = listcodes.alignment - -kerncodes.italiccorrection = kerncodes.userkern -kerncodes.kerning = kerncodes.fontkern - -nodes.codes = allocate { -- mostly for listing - glue = skipcodes, - noad = noadcodes, - node = nodecodes, - hlist = listcodes, - vlist = listcodes, - glyph = glyphcodes, - kern = kerncodes, - penalty = penaltycodes, - math = mathnodes, - fill = fillcodes, - margin = margincodes, - disc = disccodes, - whatsit = whatcodes, -} - -local report_codes = logs.reporter("nodes","codes") - -function nodes.showcodes() - local t = { } - for name, codes in sortedhash(nodes.codes) do - local sorted = sortedkeys(codes) - for i=1,#sorted do - local s = sorted[i] - if type(s) ~= "number" then - t[#t+1] = { name, s, codes[s] } - end - end - end - formatcolumns(t) - for k=1,#t do - report_codes (t[k]) - end -end - -local whatsit_node = nodecodes.whatsit - -local messyhack = tohash { -- temporary solution - nodecodes.attributelist, - nodecodes.attribute, - nodecodes.gluespec, - nodecodes.action, -} - -function nodes.fields(n) - local id = n.id - if id == whatsit_node then - return node_fields(id,n.subtype) - else - local t = node_fields(id) - if messyhack[id] then - for i=1,#t do - if t[i] == "subtype" then - remove(t,i) - break - end - end - end - return t - end -end - -trackers.register("system.showcodes", nodes.showcodes) - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local glue_code = nodecodes.glue - --- if t.id == glue_code then --- local s = t.spec --- print(t) --- print(s,s and s.writable) --- if s and s.writable then --- free_node(s) --- end --- t.spec = nil --- end - -local function remove(head, current, free_too) - local t = current - head, current = remove_node(head,current) - if t then - if free_too then - free_node(t) - t = nil - else - t.next = nil - t.prev = nil - end - end - return head, current, t -end - -nodes.remove = remove - -function nodes.delete(head,current) - return remove(head,current,true) -end - -nodes.before = insert_node_before -nodes.after = insert_node_after - --- we need to test this, as it might be fixed now - -function nodes.before(h,c,n) - if c then - if c == h then - n.next = h - n.prev = nil - h.prev = n - else - local cp = c.prev - n.next = c - n.prev = cp - if cp then - cp.next = n - end - c.prev = n - return h, n - end - end - return n, n -end - -function nodes.after(h,c,n) - if c then - local cn = c.next - if cn then - n.next = cn - cn.prev = n - else - n.next = nil - end - c.next = n - n.prev = c - return h, n - end - return n, n -end - --- local h, c = nodes.replace(head,current,new) --- local c = nodes.replace(false,current,new) --- local c = nodes.replace(current,new) - -function nodes.replace(head,current,new) -- no head returned if false - if not new then - head, current, new = false, head, current - end - local prev, next = current.prev, current.next - if next then - new.next = next - next.prev = new - end - if prev then - new.prev = prev - prev.next = new - end - if head then - if head == current then - head = new - end - free_node(current) - return head, new - else - free_node(current) - return new - end -end - --- will move - -local function count(stack,flat) - local n = 0 - while stack do - local id = stack.id - if not flat and id == hlist_code or id == vlist_code then - local list = stack.list - if list then - n = n + 1 + count(list) -- self counts too - else - n = n + 1 - end - else - n = n + 1 - end - stack = stack.next - end - return n -end - -nodes.count = count - -local left, space = lpeg.P("<"), lpeg.P(" ") - -local reference = left * (1-left)^0 * left * space^0 * lpeg.C((1-space)^0) - -function nodes.reference(n) - return lpegmatch(reference,tostring(n)) -end - -if not node.next then - - function node.next(n) return n and n.next end - function node.prev(n) return n and n.prev end - -end +if not modules then modules = { } end modules ['node-ini'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

    Most of the code that had accumulated here is now separated in +modules.

    +--ldx]]-- + +-- this module is being reconstructed + +local next, type = next, type +local format, match, gsub = string.format, string.match, string.gsub +local concat, remove = table.concat, table.remove +local sortedhash, sortedkeys, swapped, tohash = table.sortedhash, table.sortedkeys, table.swapped, table.tohash +local utfchar = utf.char +local lpegmatch = lpeg.match +local formatcolumns = utilities.formatters.formatcolumns + +--[[ldx-- +

    Access to nodes is what gives its power. Here we +implement a few helper functions. These functions are rather optimized.

    +--ldx]]-- + +--[[ldx-- +

    When manipulating node lists in , we will remove +nodes and insert new ones. While node access was implemented, we did +quite some experiments in order to find out if manipulating nodes +in was feasible from the perspective of performance.

    + +

    First of all, we noticed that the bottleneck is more with excessive +callbacks (some gets called very often) and the conversion from and to +'s datastructures. However, at the end, we +found that inserting and deleting nodes in a table could become a +bottleneck.

    + +

    This resulted in two special situations in passing nodes back to +: a table entry with value false is ignored, +and when instead of a table true is returned, the +original table is used.

    + +

    Insertion is handled (at least in as follows. When +we need to insert a node at a certain position, we change the node at +that position by a dummy node, tagged inline which itself +has_attribute the original node and one or more new nodes. Before we pass +back the list we collapse the list. Of course collapsing could be built +into the engine, but this is a not so natural extension.

    + +

    When we collapse (something that we only do when really needed), we +also ignore the empty nodes. [This is obsolete!]

    +--ldx]]-- + +local traverse = node.traverse +local traverse_id = node.traverse_id +local free_node = node.free +local remove_node = node.remove +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after +local node_fields = node.fields + +local allocate = utilities.storage.allocate + +nodes = nodes or { } +local nodes = nodes + +nodes.handlers = nodes.handlers or { } + +-- there will be more of this: + +local skipcodes = allocate { + [ 0] = "userskip", + [ 1] = "lineskip", + [ 2] = "baselineskip", + [ 3] = "parskip", + [ 4] = "abovedisplayskip", + [ 5] = "belowdisplayskip", + [ 6] = "abovedisplayshortskip", + [ 7] = "belowdisplayshortskip", + [ 8] = "leftskip", + [ 9] = "rightskip", + [ 10] = "topskip", + [ 11] = "splittopskip", + [ 12] = "tabskip", + [ 13] = "spaceskip", + [ 14] = "xspaceskip", + [ 15] = "parfillskip", + [ 16] = "thinmuskip", + [ 17] = "medmuskip", + [ 18] = "thickmuskip", + [100] = "leaders", + [101] = "cleaders", + [102] = "xleaders", + [103] = "gleaders", +} + +local penaltycodes = allocate { -- unfortunately not used + [ 0] = "userpenalty", +} + +table.setmetatableindex(penaltycodes,function(t,k) return "userpenalty" end) -- not used anyway + +local noadcodes = allocate { + [ 0] = "ord", + [ 1] = "opdisplaylimits", + [ 2] = "oplimits", + [ 3] = "opnolimits", + [ 4] = "bin", + [ 5] = "rel", + [ 6] = "open", + [ 7] = "close", + [ 8] = "punct", + [ 9] = "inner", + [10] = "under", + [11] = "over", + [12] = "vcenter", +} + +local listcodes = allocate { + [ 0] = "unknown", + [ 1] = "line", + [ 2] = "box", + [ 3] = "indent", + [ 4] = "alignment", -- row or column + [ 5] = "cell", +} + +local glyphcodes = allocate { + [0] = "character", + [1] = "glyph", + [2] = "ligature", + [3] = "ghost", + [4] = "left", + [5] = "right", +} + +local kerncodes = allocate { + [0] = "fontkern", + [1] = "userkern", + [2] = "accentkern", +} + +local mathcodes = allocate { + [0] = "beginmath", + [1] = "endmath", +} + +local fillcodes = allocate { + [0] = "stretch", + [1] = "fi", + [2] = "fil", + [3] = "fill", + [4] = "filll", +} + +local margincodes = allocate { + [0] = "left", + [1] = "right", +} + +local disccodes = allocate { + [0] = "discretionary", -- \discretionary + [1] = "explicit", -- \- + [2] = "automatic", -- following a - + [3] = "regular", -- simple + [4] = "first", -- hard first item + [5] = "second", -- hard second item +} + +local function simplified(t) + local r = { } + for k, v in next, t do + r[k] = gsub(v,"_","") + end + return r +end + +local nodecodes = simplified(node.types()) +local whatcodes = simplified(node.whatsits()) + +skipcodes = allocate(swapped(skipcodes,skipcodes)) +noadcodes = allocate(swapped(noadcodes,noadcodes)) +nodecodes = allocate(swapped(nodecodes,nodecodes)) +whatcodes = allocate(swapped(whatcodes,whatcodes)) +listcodes = allocate(swapped(listcodes,listcodes)) +glyphcodes = allocate(swapped(glyphcodes,glyphcodes)) +kerncodes = allocate(swapped(kerncodes,kerncodes)) +penaltycodes = allocate(swapped(penaltycodes,penaltycodes)) +mathcodes = allocate(swapped(mathcodes,mathcodes)) +fillcodes = allocate(swapped(fillcodes,fillcodes)) +margincodes = allocate(swapped(margincodes,margincodes)) +disccodes = allocate(swapped(disccodes,disccodes)) + +nodes.skipcodes = skipcodes nodes.gluecodes = skipcodes -- more official +nodes.noadcodes = noadcodes +nodes.nodecodes = nodecodes +nodes.whatcodes = whatcodes nodes.whatsitcodes = whatcodes -- more official +nodes.listcodes = listcodes +nodes.glyphcodes = glyphcodes +nodes.kerncodes = kerncodes +nodes.penaltycodes = kerncodes +nodes.mathcodes = mathcodes +nodes.fillcodes = fillcodes +nodes.margincodes = margincodes +nodes.disccodes = disccodes nodes.discretionarycodes = disccodes + +listcodes.row = listcodes.alignment +listcodes.column = listcodes.alignment + +kerncodes.italiccorrection = kerncodes.userkern +kerncodes.kerning = kerncodes.fontkern + +nodes.codes = allocate { -- mostly for listing + glue = skipcodes, + noad = noadcodes, + node = nodecodes, + hlist = listcodes, + vlist = listcodes, + glyph = glyphcodes, + kern = kerncodes, + penalty = penaltycodes, + math = mathnodes, + fill = fillcodes, + margin = margincodes, + disc = disccodes, + whatsit = whatcodes, +} + +local report_codes = logs.reporter("nodes","codes") + +function nodes.showcodes() + local t = { } + for name, codes in sortedhash(nodes.codes) do + local sorted = sortedkeys(codes) + for i=1,#sorted do + local s = sorted[i] + if type(s) ~= "number" then + t[#t+1] = { name, s, codes[s] } + end + end + end + formatcolumns(t) + for k=1,#t do + report_codes (t[k]) + end +end + +local whatsit_node = nodecodes.whatsit + +local messyhack = tohash { -- temporary solution + nodecodes.attributelist, + nodecodes.attribute, + nodecodes.gluespec, + nodecodes.action, +} + +function nodes.fields(n) + local id = n.id + if id == whatsit_node then + return node_fields(id,n.subtype) + else + local t = node_fields(id) + if messyhack[id] then + for i=1,#t do + if t[i] == "subtype" then + remove(t,i) + break + end + end + end + return t + end +end + +trackers.register("system.showcodes", nodes.showcodes) + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local glue_code = nodecodes.glue + +-- if t.id == glue_code then +-- local s = t.spec +-- print(t) +-- print(s,s and s.writable) +-- if s and s.writable then +-- free_node(s) +-- end +-- t.spec = nil +-- end + +local function remove(head, current, free_too) + local t = current + head, current = remove_node(head,current) + if t then + if free_too then + free_node(t) + t = nil + else + t.next = nil + t.prev = nil + end + end + return head, current, t +end + +nodes.remove = remove + +function nodes.delete(head,current) + return remove(head,current,true) +end + +nodes.before = insert_node_before +nodes.after = insert_node_after + +-- we need to test this, as it might be fixed now + +function nodes.before(h,c,n) + if c then + if c == h then + n.next = h + n.prev = nil + h.prev = n + else + local cp = c.prev + n.next = c + n.prev = cp + if cp then + cp.next = n + end + c.prev = n + return h, n + end + end + return n, n +end + +function nodes.after(h,c,n) + if c then + local cn = c.next + if cn then + n.next = cn + cn.prev = n + else + n.next = nil + end + c.next = n + n.prev = c + return h, n + end + return n, n +end + +-- local h, c = nodes.replace(head,current,new) +-- local c = nodes.replace(false,current,new) +-- local c = nodes.replace(current,new) + +function nodes.replace(head,current,new) -- no head returned if false + if not new then + head, current, new = false, head, current + end + local prev, next = current.prev, current.next + if next then + new.next = next + next.prev = new + end + if prev then + new.prev = prev + prev.next = new + end + if head then + if head == current then + head = new + end + free_node(current) + return head, new + else + free_node(current) + return new + end +end + +-- will move + +local function count(stack,flat) + local n = 0 + while stack do + local id = stack.id + if not flat and id == hlist_code or id == vlist_code then + local list = stack.list + if list then + n = n + 1 + count(list) -- self counts too + else + n = n + 1 + end + else + n = n + 1 + end + stack = stack.next + end + return n +end + +nodes.count = count + +local left, space = lpeg.P("<"), lpeg.P(" ") + +local reference = left * (1-left)^0 * left * space^0 * lpeg.C((1-space)^0) + +function nodes.reference(n) + return lpegmatch(reference,tostring(n)) +end + +if not node.next then + + function node.next(n) return n and n.next end + function node.prev(n) return n and n.prev end + +end diff --git a/tex/context/base/node-inj.lua b/tex/context/base/node-inj.lua index 697370cfb..d6a851cfb 100644 --- a/tex/context/base/node-inj.lua +++ b/tex/context/base/node-inj.lua @@ -1,519 +1,519 @@ -if not modules then modules = { } end modules ['node-inj'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - --- This is very experimental (this will change when we have luatex > .50 and --- a few pending thingies are available. Also, Idris needs to make a few more --- test fonts. Btw, future versions of luatex will have extended glyph properties --- that can be of help. Some optimizations can go away when we have faster machines. - -local next = next -local utfchar = utf.char - -local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end) - -local report_injections = logs.reporter("nodes","injections") - -local attributes, nodes, node = attributes, nodes, node - -fonts = fonts -local fontdata = fonts.hashes.identifiers - -nodes.injections = nodes.injections or { } -local injections = nodes.injections - -local nodecodes = nodes.nodecodes -local glyph_code = nodecodes.glyph -local kern_code = nodecodes.kern -local nodepool = nodes.pool -local newkern = nodepool.kern - -local traverse_id = node.traverse_id -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after - -local a_kernpair = attributes.private('kernpair') -local a_ligacomp = attributes.private('ligacomp') -local a_markbase = attributes.private('markbase') -local a_markmark = attributes.private('markmark') -local a_markdone = attributes.private('markdone') -local a_cursbase = attributes.private('cursbase') -local a_curscurs = attributes.private('curscurs') -local a_cursdone = attributes.private('cursdone') - --- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as --- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner --- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure --- that this code is not 100% okay but examples are needed to figure things out. - -function injections.installnewkern(nk) - newkern = nk or newkern -end - -local cursives = { } -local marks = { } -local kerns = { } - --- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in --- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we --- can share tables. - --- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs --- checking with husayni (volt and fontforge). - -function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) - local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2]) - local ws, wn = tfmstart.width, tfmnext.width - local bound = #cursives + 1 - start[a_cursbase] = bound - nxt[a_curscurs] = bound - cursives[bound] = { rlmode, dx, dy, ws, wn } - return dx, dy, bound -end - -function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) - local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4] - -- dy = y - h - if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then - local bound = current[a_kernpair] - if bound then - local kb = kerns[bound] - -- inefficient but singles have less, but weird anyway, needs checking - kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h - else - bound = #kerns + 1 - current[a_kernpair] = bound - kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width } - end - return x, y, w, h, bound - end - return x, y, w, h -- no bound -end - -function injections.setkern(current,factor,rlmode,x,tfmchr) - local dx = factor*x - if dx ~= 0 then - local bound = #kerns + 1 - current[a_kernpair] = bound - kerns[bound] = { rlmode, dx } - return dx, bound - else - return 0, 0 - end -end - -function injections.setmark(start,base,factor,rlmode,ba,ma,index) -- ba=baseanchor, ma=markanchor - local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this - local bound = base[a_markbase] -- fails again we should pass it - local index = 1 - if bound then - local mb = marks[bound] - if mb then - -- if not index then index = #mb + 1 end - index = #mb + 1 - mb[index] = { dx, dy, rlmode } - start[a_markmark] = bound - start[a_markdone] = index - return dx, dy, bound - else - report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound) - end - end --- index = index or 1 - index = index or 1 - bound = #marks + 1 - base[a_markbase] = bound - start[a_markmark] = bound - start[a_markdone] = index - marks[bound] = { [index] = { dx, dy, rlmode } } - return dx, dy, bound -end - -local function dir(n) - return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" -end - -local function trace(head) - report_injections("begin run") - for n in traverse_id(glyph_code,head) do - if n.subtype < 256 then - local kp = n[a_kernpair] - local mb = n[a_markbase] - local mm = n[a_markmark] - local md = n[a_markdone] - local cb = n[a_cursbase] - local cc = n[a_curscurs] - local char = n.char - report_injections("font %s, char %U, glyph %c",n.font,char,char) - if kp then - local k = kerns[kp] - if k[3] then - report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) - else - report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) - end - end - if mb then - report_injections(" markbase: bound %a",mb) - end - if mm then - local m = marks[mm] - if mb then - local m = m[mb] - if m then - report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) - else - report_injections(" markmark: bound %a, missing index",mm) - end - else - m = m[1] - report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) - end - end - if cb then - report_injections(" cursbase: bound %a",cb) - end - if cc then - local c = cursives[cc] - report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) - end - end - end - report_injections("end run") -end - --- todo: reuse tables (i.e. no collection), but will be extra fields anyway --- todo: check for attribute - --- We can have a fast test on a font being processed, so we can check faster for marks etc --- but I'll make a context variant anyway. - -local function show_result(head) - local current = head - local skipping = false - while current do - local id = current.id - if id == glyph_code then - report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset) - skipping = false - elseif id == kern_code then - report_injections("kern: %p",current.kern) - skipping = false - elseif not skipping then - report_injections() - skipping = true - end - current = current.next - end -end - -function injections.handler(head,where,keep) - local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns) - if has_marks or has_cursives then - if trace_injections then - trace(head) - end - -- in the future variant we will not copy items but refs to tables - local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0 - if has_kerns then -- move outside loop - local nf, tm = nil, nil - for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts - if n.subtype < 256 then - nofvalid = nofvalid + 1 - valid[nofvalid] = n - if n.font ~= nf then - nf = n.font - tm = fontdata[nf].resources.marks - end - if tm then - mk[n] = tm[n.char] - end - local k = n[a_kernpair] - if k then - local kk = kerns[k] - if kk then - local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0 - local dy = y - h - if dy ~= 0 then - ky[n] = dy - end - if w ~= 0 or x ~= 0 then - wx[n] = kk - end - rl[n] = kk[1] -- could move in test - end - end - end - end - else - local nf, tm = nil, nil - for n in traverse_id(glyph_code,head) do - if n.subtype < 256 then - nofvalid = nofvalid + 1 - valid[nofvalid] = n - if n.font ~= nf then - nf = n.font - tm = fontdata[nf].resources.marks - end - if tm then - mk[n] = tm[n.char] - end - end - end - end - if nofvalid > 0 then - -- we can assume done == true because we have cursives and marks - local cx = { } - if has_kerns and next(ky) then - for n, k in next, ky do - n.yoffset = k - end - end - -- todo: reuse t and use maxt - if has_cursives then - local p_cursbase, p = nil, nil - -- since we need valid[n+1] we can also use a "while true do" - local t, d, maxt = { }, { }, 0 - for i=1,nofvalid do -- valid == glyphs - local n = valid[i] - if not mk[n] then - local n_cursbase = n[a_cursbase] - if p_cursbase then - local n_curscurs = n[a_curscurs] - if p_cursbase == n_curscurs then - local c = cursives[n_curscurs] - if c then - local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5] - if rlmode >= 0 then - dx = dx - ws - else - dx = dx + wn - end - if dx ~= 0 then - cx[n] = dx - rl[n] = rlmode - end - -- if rlmode and rlmode < 0 then - dy = -dy - -- end - maxt = maxt + 1 - t[maxt] = p - d[maxt] = dy - else - maxt = 0 - end - end - elseif maxt > 0 then - local ny = n.yoffset - for i=maxt,1,-1 do - ny = ny + d[i] - local ti = t[i] - ti.yoffset = ti.yoffset + ny - end - maxt = 0 - end - if not n_cursbase and maxt > 0 then - local ny = n.yoffset - for i=maxt,1,-1 do - ny = ny + d[i] - local ti = t[i] - ti.yoffset = ny - end - maxt = 0 - end - p_cursbase, p = n_cursbase, n - end - end - if maxt > 0 then - local ny = n.yoffset - for i=maxt,1,-1 do - ny = ny + d[i] - local ti = t[i] - ti.yoffset = ny - end - maxt = 0 - end - if not keep then - cursives = { } - end - end - if has_marks then - for i=1,nofvalid do - local p = valid[i] - local p_markbase = p[a_markbase] - if p_markbase then - local mrks = marks[p_markbase] - local nofmarks = #mrks - for n in traverse_id(glyph_code,p.next) do - local n_markmark = n[a_markmark] - if p_markbase == n_markmark then - local index = n[a_markdone] or 1 - local d = mrks[index] - if d then - local rlmode = d[3] - -- - local k = wx[p] - if k then - local x = k[2] - local w = k[4] - if w then - if rlmode and rlmode >= 0 then - -- kern(x) glyph(p) kern(w-x) mark(n) - n.xoffset = p.xoffset - p.width + d[1] - (w-x) - else - -- kern(w-x) glyph(p) kern(x) mark(n) - n.xoffset = p.xoffset - d[1] - x - end - else - if rlmode and rlmode >= 0 then - -- okay for husayni - n.xoffset = p.xoffset - p.width + d[1] - else - -- needs checking: is x ok here? - n.xoffset = p.xoffset - d[1] - x - end - end - else - if rlmode and rlmode >= 0 then - n.xoffset = p.xoffset - p.width + d[1] - else - n.xoffset = p.xoffset - d[1] - end - end - -- -- - if mk[p] then - n.yoffset = p.yoffset + d[2] - else - n.yoffset = n.yoffset + p.yoffset + d[2] - end - -- - if nofmarks == 1 then - break - else - nofmarks = nofmarks - 1 - end - end - else - -- KE: there can be sequences in ligatures - end - end - end - end - if not keep then - marks = { } - end - end - -- todo : combine - if next(wx) then - for n, k in next, wx do - -- only w can be nil (kernclasses), can be sped up when w == nil - local x = k[2] - local w = k[4] - if w then - local rl = k[1] -- r2l = k[6] - local wx = w - x - if rl < 0 then -- KE: don't use r2l here - if wx ~= 0 then - insert_node_before(head,n,newkern(wx)) -- type 0/2 - end - if x ~= 0 then - insert_node_after (head,n,newkern(x)) -- type 0/2 - end - else - if x ~= 0 then - insert_node_before(head,n,newkern(x)) -- type 0/2 - end - if wx ~= 0 then - insert_node_after (head,n,newkern(wx)) -- type 0/2 - end - end - elseif x ~= 0 then - -- this needs checking for rl < 0 but it is unlikely that a r2l script - -- uses kernclasses between glyphs so we're probably safe (KE has a - -- problematic font where marks interfere with rl < 0 in the previous - -- case) - insert_node_before(head,n,newkern(x)) -- a real font kern, type 0 - end - end - end - if next(cx) then - for n, k in next, cx do - if k ~= 0 then - local rln = rl[n] - if rln and rln < 0 then - insert_node_before(head,n,newkern(-k)) -- type 0/2 - else - insert_node_before(head,n,newkern(k)) -- type 0/2 - end - end - end - end - if not keep then - kerns = { } - end - -- if trace_injections then - -- show_result(head) - -- end - return head, true - elseif not keep then - kerns, cursives, marks = { }, { }, { } - end - elseif has_kerns then - if trace_injections then - trace(head) - end - for n in traverse_id(glyph_code,head) do - if n.subtype < 256 then - local k = n[a_kernpair] - if k then - local kk = kerns[k] - if kk then - local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4] - if y and y ~= 0 then - n.yoffset = y -- todo: h ? - end - if w then - -- copied from above - -- local r2l = kk[6] - local wx = w - x - if rl < 0 then -- KE: don't use r2l here - if wx ~= 0 then - insert_node_before(head,n,newkern(wx)) - end - if x ~= 0 then - insert_node_after (head,n,newkern(x)) - end - else - if x ~= 0 then - insert_node_before(head,n,newkern(x)) - end - if wx ~= 0 then - insert_node_after(head,n,newkern(wx)) - end - end - else - -- simple (e.g. kernclass kerns) - if x ~= 0 then - insert_node_before(head,n,newkern(x)) - end - end - end - end - end - end - if not keep then - kerns = { } - end - -- if trace_injections then - -- show_result(head) - -- end - return head, true - else - -- no tracing needed - end - return head, false -end +if not modules then modules = { } end modules ['node-inj'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +-- This is very experimental (this will change when we have luatex > .50 and +-- a few pending thingies are available. Also, Idris needs to make a few more +-- test fonts. Btw, future versions of luatex will have extended glyph properties +-- that can be of help. Some optimizations can go away when we have faster machines. + +local next = next +local utfchar = utf.char + +local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end) + +local report_injections = logs.reporter("nodes","injections") + +local attributes, nodes, node = attributes, nodes, node + +fonts = fonts +local fontdata = fonts.hashes.identifiers + +nodes.injections = nodes.injections or { } +local injections = nodes.injections + +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph +local kern_code = nodecodes.kern +local nodepool = nodes.pool +local newkern = nodepool.kern + +local traverse_id = node.traverse_id +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after + +local a_kernpair = attributes.private('kernpair') +local a_ligacomp = attributes.private('ligacomp') +local a_markbase = attributes.private('markbase') +local a_markmark = attributes.private('markmark') +local a_markdone = attributes.private('markdone') +local a_cursbase = attributes.private('cursbase') +local a_curscurs = attributes.private('curscurs') +local a_cursdone = attributes.private('cursdone') + +-- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as +-- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner +-- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure +-- that this code is not 100% okay but examples are needed to figure things out. + +function injections.installnewkern(nk) + newkern = nk or newkern +end + +local cursives = { } +local marks = { } +local kerns = { } + +-- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in +-- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we +-- can share tables. + +-- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs +-- checking with husayni (volt and fontforge). + +function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) + local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2]) + local ws, wn = tfmstart.width, tfmnext.width + local bound = #cursives + 1 + start[a_cursbase] = bound + nxt[a_curscurs] = bound + cursives[bound] = { rlmode, dx, dy, ws, wn } + return dx, dy, bound +end + +function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) + local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4] + -- dy = y - h + if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then + local bound = current[a_kernpair] + if bound then + local kb = kerns[bound] + -- inefficient but singles have less, but weird anyway, needs checking + kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h + else + bound = #kerns + 1 + current[a_kernpair] = bound + kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width } + end + return x, y, w, h, bound + end + return x, y, w, h -- no bound +end + +function injections.setkern(current,factor,rlmode,x,tfmchr) + local dx = factor*x + if dx ~= 0 then + local bound = #kerns + 1 + current[a_kernpair] = bound + kerns[bound] = { rlmode, dx } + return dx, bound + else + return 0, 0 + end +end + +function injections.setmark(start,base,factor,rlmode,ba,ma,index) -- ba=baseanchor, ma=markanchor + local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this + local bound = base[a_markbase] -- fails again we should pass it + local index = 1 + if bound then + local mb = marks[bound] + if mb then + -- if not index then index = #mb + 1 end + index = #mb + 1 + mb[index] = { dx, dy, rlmode } + start[a_markmark] = bound + start[a_markdone] = index + return dx, dy, bound + else + report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound) + end + end +-- index = index or 1 + index = index or 1 + bound = #marks + 1 + base[a_markbase] = bound + start[a_markmark] = bound + start[a_markdone] = index + marks[bound] = { [index] = { dx, dy, rlmode } } + return dx, dy, bound +end + +local function dir(n) + return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" +end + +local function trace(head) + report_injections("begin run") + for n in traverse_id(glyph_code,head) do + if n.subtype < 256 then + local kp = n[a_kernpair] + local mb = n[a_markbase] + local mm = n[a_markmark] + local md = n[a_markdone] + local cb = n[a_cursbase] + local cc = n[a_curscurs] + local char = n.char + report_injections("font %s, char %U, glyph %c",n.font,char,char) + if kp then + local k = kerns[kp] + if k[3] then + report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) + else + report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) + end + end + if mb then + report_injections(" markbase: bound %a",mb) + end + if mm then + local m = marks[mm] + if mb then + local m = m[mb] + if m then + report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) + else + report_injections(" markmark: bound %a, missing index",mm) + end + else + m = m[1] + report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) + end + end + if cb then + report_injections(" cursbase: bound %a",cb) + end + if cc then + local c = cursives[cc] + report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) + end + end + end + report_injections("end run") +end + +-- todo: reuse tables (i.e. no collection), but will be extra fields anyway +-- todo: check for attribute + +-- We can have a fast test on a font being processed, so we can check faster for marks etc +-- but I'll make a context variant anyway. + +local function show_result(head) + local current = head + local skipping = false + while current do + local id = current.id + if id == glyph_code then + report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset) + skipping = false + elseif id == kern_code then + report_injections("kern: %p",current.kern) + skipping = false + elseif not skipping then + report_injections() + skipping = true + end + current = current.next + end +end + +function injections.handler(head,where,keep) + local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns) + if has_marks or has_cursives then + if trace_injections then + trace(head) + end + -- in the future variant we will not copy items but refs to tables + local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0 + if has_kerns then -- move outside loop + local nf, tm = nil, nil + for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts + if n.subtype < 256 then + nofvalid = nofvalid + 1 + valid[nofvalid] = n + if n.font ~= nf then + nf = n.font + tm = fontdata[nf].resources.marks + end + if tm then + mk[n] = tm[n.char] + end + local k = n[a_kernpair] + if k then + local kk = kerns[k] + if kk then + local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0 + local dy = y - h + if dy ~= 0 then + ky[n] = dy + end + if w ~= 0 or x ~= 0 then + wx[n] = kk + end + rl[n] = kk[1] -- could move in test + end + end + end + end + else + local nf, tm = nil, nil + for n in traverse_id(glyph_code,head) do + if n.subtype < 256 then + nofvalid = nofvalid + 1 + valid[nofvalid] = n + if n.font ~= nf then + nf = n.font + tm = fontdata[nf].resources.marks + end + if tm then + mk[n] = tm[n.char] + end + end + end + end + if nofvalid > 0 then + -- we can assume done == true because we have cursives and marks + local cx = { } + if has_kerns and next(ky) then + for n, k in next, ky do + n.yoffset = k + end + end + -- todo: reuse t and use maxt + if has_cursives then + local p_cursbase, p = nil, nil + -- since we need valid[n+1] we can also use a "while true do" + local t, d, maxt = { }, { }, 0 + for i=1,nofvalid do -- valid == glyphs + local n = valid[i] + if not mk[n] then + local n_cursbase = n[a_cursbase] + if p_cursbase then + local n_curscurs = n[a_curscurs] + if p_cursbase == n_curscurs then + local c = cursives[n_curscurs] + if c then + local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5] + if rlmode >= 0 then + dx = dx - ws + else + dx = dx + wn + end + if dx ~= 0 then + cx[n] = dx + rl[n] = rlmode + end + -- if rlmode and rlmode < 0 then + dy = -dy + -- end + maxt = maxt + 1 + t[maxt] = p + d[maxt] = dy + else + maxt = 0 + end + end + elseif maxt > 0 then + local ny = n.yoffset + for i=maxt,1,-1 do + ny = ny + d[i] + local ti = t[i] + ti.yoffset = ti.yoffset + ny + end + maxt = 0 + end + if not n_cursbase and maxt > 0 then + local ny = n.yoffset + for i=maxt,1,-1 do + ny = ny + d[i] + local ti = t[i] + ti.yoffset = ny + end + maxt = 0 + end + p_cursbase, p = n_cursbase, n + end + end + if maxt > 0 then + local ny = n.yoffset + for i=maxt,1,-1 do + ny = ny + d[i] + local ti = t[i] + ti.yoffset = ny + end + maxt = 0 + end + if not keep then + cursives = { } + end + end + if has_marks then + for i=1,nofvalid do + local p = valid[i] + local p_markbase = p[a_markbase] + if p_markbase then + local mrks = marks[p_markbase] + local nofmarks = #mrks + for n in traverse_id(glyph_code,p.next) do + local n_markmark = n[a_markmark] + if p_markbase == n_markmark then + local index = n[a_markdone] or 1 + local d = mrks[index] + if d then + local rlmode = d[3] + -- + local k = wx[p] + if k then + local x = k[2] + local w = k[4] + if w then + if rlmode and rlmode >= 0 then + -- kern(x) glyph(p) kern(w-x) mark(n) + n.xoffset = p.xoffset - p.width + d[1] - (w-x) + else + -- kern(w-x) glyph(p) kern(x) mark(n) + n.xoffset = p.xoffset - d[1] - x + end + else + if rlmode and rlmode >= 0 then + -- okay for husayni + n.xoffset = p.xoffset - p.width + d[1] + else + -- needs checking: is x ok here? + n.xoffset = p.xoffset - d[1] - x + end + end + else + if rlmode and rlmode >= 0 then + n.xoffset = p.xoffset - p.width + d[1] + else + n.xoffset = p.xoffset - d[1] + end + end + -- -- + if mk[p] then + n.yoffset = p.yoffset + d[2] + else + n.yoffset = n.yoffset + p.yoffset + d[2] + end + -- + if nofmarks == 1 then + break + else + nofmarks = nofmarks - 1 + end + end + else + -- KE: there can be sequences in ligatures + end + end + end + end + if not keep then + marks = { } + end + end + -- todo : combine + if next(wx) then + for n, k in next, wx do + -- only w can be nil (kernclasses), can be sped up when w == nil + local x = k[2] + local w = k[4] + if w then + local rl = k[1] -- r2l = k[6] + local wx = w - x + if rl < 0 then -- KE: don't use r2l here + if wx ~= 0 then + insert_node_before(head,n,newkern(wx)) -- type 0/2 + end + if x ~= 0 then + insert_node_after (head,n,newkern(x)) -- type 0/2 + end + else + if x ~= 0 then + insert_node_before(head,n,newkern(x)) -- type 0/2 + end + if wx ~= 0 then + insert_node_after (head,n,newkern(wx)) -- type 0/2 + end + end + elseif x ~= 0 then + -- this needs checking for rl < 0 but it is unlikely that a r2l script + -- uses kernclasses between glyphs so we're probably safe (KE has a + -- problematic font where marks interfere with rl < 0 in the previous + -- case) + insert_node_before(head,n,newkern(x)) -- a real font kern, type 0 + end + end + end + if next(cx) then + for n, k in next, cx do + if k ~= 0 then + local rln = rl[n] + if rln and rln < 0 then + insert_node_before(head,n,newkern(-k)) -- type 0/2 + else + insert_node_before(head,n,newkern(k)) -- type 0/2 + end + end + end + end + if not keep then + kerns = { } + end + -- if trace_injections then + -- show_result(head) + -- end + return head, true + elseif not keep then + kerns, cursives, marks = { }, { }, { } + end + elseif has_kerns then + if trace_injections then + trace(head) + end + for n in traverse_id(glyph_code,head) do + if n.subtype < 256 then + local k = n[a_kernpair] + if k then + local kk = kerns[k] + if kk then + local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4] + if y and y ~= 0 then + n.yoffset = y -- todo: h ? + end + if w then + -- copied from above + -- local r2l = kk[6] + local wx = w - x + if rl < 0 then -- KE: don't use r2l here + if wx ~= 0 then + insert_node_before(head,n,newkern(wx)) + end + if x ~= 0 then + insert_node_after (head,n,newkern(x)) + end + else + if x ~= 0 then + insert_node_before(head,n,newkern(x)) + end + if wx ~= 0 then + insert_node_after(head,n,newkern(wx)) + end + end + else + -- simple (e.g. kernclass kerns) + if x ~= 0 then + insert_node_before(head,n,newkern(x)) + end + end + end + end + end + end + if not keep then + kerns = { } + end + -- if trace_injections then + -- show_result(head) + -- end + return head, true + else + -- no tracing needed + end + return head, false +end diff --git a/tex/context/base/node-mig.lua b/tex/context/base/node-mig.lua index 9fc35a048..fd14fc43f 100644 --- a/tex/context/base/node-mig.lua +++ b/tex/context/base/node-mig.lua @@ -1,138 +1,138 @@ -if not modules then modules = { } end modules ['node-mig'] = { - version = 1.001, - comment = "companion to node-mig.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format - -local attributes, nodes, node = attributes, nodes, node - -local remove_nodes = nodes.remove - -local nodecodes = nodes.nodecodes -local tasks = nodes.tasks - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local insert_code = nodecodes.ins -local mark_code = nodecodes.mark - -local a_migrated = attributes.private("migrated") - -local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end) - -local report_nodes = logs.reporter("nodes","migrations") - -local migrate_inserts, migrate_marks, inserts_too - -local t_inserts, t_marks, t_sweeps = 0, 0, 0 - -local function locate(head,first,last,ni,nm) - local current = head - while current do - local id = current.id - if id == vlist_code or id == hlist_code then - current.list, first, last, ni, nm = locate(current.list,first,last,ni,nm) - current = current.next - elseif migrate_inserts and id == insert_code then - local insert - head, current, insert = remove_nodes(head,current) - insert.next = nil - if first then - insert.prev, last.next = last, insert - else - insert.prev, first = nil, insert - end - last, ni = insert, ni + 1 - elseif migrate_marks and id == mark_code then - local mark - head, current, mark = remove_nodes(head,current) - mark.next = nil - if first then - mark.prev, last.next = last, mark - else - mark.prev, first = nil, mark - end - last, nm = mark, nm + 1 - else - current= current.next - end - end - return head, first, last, ni, nm -end - -function nodes.handlers.migrate(head,where) - local done = false - if head then - if trace_migrations then - report_nodes("migration sweep %a",where) - end - local current = head - while current do - local id = current.id - -- inserts_too is a temp hack, we should only do them when it concerns - -- newly placed (flushed) inserts - if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not current[a_migrated] then - current[a_migrated] = 1 - t_sweeps = t_sweeps + 1 - local h = current.list - local first, last, ni, nm - while h do - local id = h.id - if id == vlist_code or id == hlist_code then - h, first, last, ni, nm = locate(h,first,last,0,0) - end - h = h.next - end - if first then - t_inserts, t_marks = t_inserts + ni, t_marks + nm - if trace_migrations and (ni > 0 or nm > 0) then - report_nodes("sweep %a, container %a, %s inserts and %s marks migrated outwards during %a", - t_sweeps,nodecodes[id],ni,nm,where) - end - -- inserts after head - local n = current.next - if n then - last.next, n.prev = n, last - end - current.next, first.prev = first, current - done, current = true, last - end - end - current = current.next - end - return head, done - end -end - --- for the moment this way, this will disappear - -experiments.register("marks.migrate", function(v) - if v then - tasks.enableaction("mvlbuilders", "nodes.handlers.migrate") - end - migrate_marks = v -end) - -experiments.register("inserts.migrate", function(v) - if v then - tasks.enableaction("mvlbuilders", "nodes.handlers.migrate") - end - migrate_inserts = v -end) - -experiments.register("inserts.migrate.nested", function(v) - if v then - tasks.enableaction("mvlbuilders", "nodes.handlers.migrate") - end - inserts_too = v -end) - -statistics.register("node migrations", function() - if trace_migrations and t_sweeps > 0 then - return format("%s sweeps, %s inserts moved, %s marks moved",t_sweeps,t_inserts,t_marks) - end -end) +if not modules then modules = { } end modules ['node-mig'] = { + version = 1.001, + comment = "companion to node-mig.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format + +local attributes, nodes, node = attributes, nodes, node + +local remove_nodes = nodes.remove + +local nodecodes = nodes.nodecodes +local tasks = nodes.tasks + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local insert_code = nodecodes.ins +local mark_code = nodecodes.mark + +local a_migrated = attributes.private("migrated") + +local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end) + +local report_nodes = logs.reporter("nodes","migrations") + +local migrate_inserts, migrate_marks, inserts_too + +local t_inserts, t_marks, t_sweeps = 0, 0, 0 + +local function locate(head,first,last,ni,nm) + local current = head + while current do + local id = current.id + if id == vlist_code or id == hlist_code then + current.list, first, last, ni, nm = locate(current.list,first,last,ni,nm) + current = current.next + elseif migrate_inserts and id == insert_code then + local insert + head, current, insert = remove_nodes(head,current) + insert.next = nil + if first then + insert.prev, last.next = last, insert + else + insert.prev, first = nil, insert + end + last, ni = insert, ni + 1 + elseif migrate_marks and id == mark_code then + local mark + head, current, mark = remove_nodes(head,current) + mark.next = nil + if first then + mark.prev, last.next = last, mark + else + mark.prev, first = nil, mark + end + last, nm = mark, nm + 1 + else + current= current.next + end + end + return head, first, last, ni, nm +end + +function nodes.handlers.migrate(head,where) + local done = false + if head then + if trace_migrations then + report_nodes("migration sweep %a",where) + end + local current = head + while current do + local id = current.id + -- inserts_too is a temp hack, we should only do them when it concerns + -- newly placed (flushed) inserts + if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not current[a_migrated] then + current[a_migrated] = 1 + t_sweeps = t_sweeps + 1 + local h = current.list + local first, last, ni, nm + while h do + local id = h.id + if id == vlist_code or id == hlist_code then + h, first, last, ni, nm = locate(h,first,last,0,0) + end + h = h.next + end + if first then + t_inserts, t_marks = t_inserts + ni, t_marks + nm + if trace_migrations and (ni > 0 or nm > 0) then + report_nodes("sweep %a, container %a, %s inserts and %s marks migrated outwards during %a", + t_sweeps,nodecodes[id],ni,nm,where) + end + -- inserts after head + local n = current.next + if n then + last.next, n.prev = n, last + end + current.next, first.prev = first, current + done, current = true, last + end + end + current = current.next + end + return head, done + end +end + +-- for the moment this way, this will disappear + +experiments.register("marks.migrate", function(v) + if v then + tasks.enableaction("mvlbuilders", "nodes.handlers.migrate") + end + migrate_marks = v +end) + +experiments.register("inserts.migrate", function(v) + if v then + tasks.enableaction("mvlbuilders", "nodes.handlers.migrate") + end + migrate_inserts = v +end) + +experiments.register("inserts.migrate.nested", function(v) + if v then + tasks.enableaction("mvlbuilders", "nodes.handlers.migrate") + end + inserts_too = v +end) + +statistics.register("node migrations", function() + if trace_migrations and t_sweeps > 0 then + return format("%s sweeps, %s inserts moved, %s marks moved",t_sweeps,t_inserts,t_marks) + end +end) diff --git a/tex/context/base/node-pag.lua b/tex/context/base/node-pag.lua index 9b8202042..47eba4eeb 100644 --- a/tex/context/base/node-pag.lua +++ b/tex/context/base/node-pag.lua @@ -1,30 +1,30 @@ -if not modules then modules = { } end modules ['node-pag'] = { - version = 1.001, - comment = "companion to node-pag.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this callback might disappear and come back in the same way --- as par builders - -pagebuilders = pagebuilders or { } - -local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming - -local actions = nodes.tasks.actions("pagebuilders") - -local function processor(head,groupcode,size,packtype,maxdepth,direction) - starttiming(pagebuilders) - local _, done = actions(head,groupcode,size,packtype,maxdepth,direction) - stoptiming(pagebuilders) - return (done and head) or true --- return vpack(head) -end - ---~ callbacks.register('pre_output_filter', processor, "preparing output box") - ---~ statistics.register("output preparation time", function() ---~ return statistics.elapsedseconds(pagebuilders) ---~ end) +if not modules then modules = { } end modules ['node-pag'] = { + version = 1.001, + comment = "companion to node-pag.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this callback might disappear and come back in the same way +-- as par builders + +pagebuilders = pagebuilders or { } + +local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming + +local actions = nodes.tasks.actions("pagebuilders") + +local function processor(head,groupcode,size,packtype,maxdepth,direction) + starttiming(pagebuilders) + local _, done = actions(head,groupcode,size,packtype,maxdepth,direction) + stoptiming(pagebuilders) + return (done and head) or true +-- return vpack(head) +end + +--~ callbacks.register('pre_output_filter', processor, "preparing output box") + +--~ statistics.register("output preparation time", function() +--~ return statistics.elapsedseconds(pagebuilders) +--~ end) diff --git a/tex/context/base/node-pro.lua b/tex/context/base/node-pro.lua index 60f2d8a72..6b0829e5e 100644 --- a/tex/context/base/node-pro.lua +++ b/tex/context/base/node-pro.lua @@ -1,165 +1,165 @@ -if not modules then modules = { } end modules ['node-pro'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local utfchar = utf.char -local format, concat = string.format, table.concat - -local trace_callbacks = false trackers.register("nodes.callbacks", function(v) trace_callbacks = v end) - -local report_nodes = logs.reporter("nodes","processors") - -local nodes, node = nodes, node - -local nodecodes = nodes.nodecodes -local glyph_code = nodecodes.glyph -local tasks = nodes.tasks - -local free_node = node.free -local first_glyph = node.first_glyph or node.first_character -local has_attribute = node.has_attribute - -nodes.processors = nodes.processors or { } -local processors = nodes.processors - --- vbox: grouptype: vbox vtop output split_off split_keep | box_type: exactly|aditional --- hbox: grouptype: hbox adjusted_hbox(=hbox_in_vmode) | box_type: exactly|aditional - -local actions = tasks.actions("processors") - -local n = 0 - -local function reconstruct(head) -- we probably have a better one - local t, n, h = { }, 0, head - while h do - n = n + 1 - local id = h.id - if id == glyph_code then -- todo: disc etc - t[n] = utfchar(h.char) - else - t[n] = "[]" - end - h = h.next - end - return concat(t) -end - -local function tracer(what,state,head,groupcode,before,after,show) - if not groupcode then - groupcode = "unknown" - elseif groupcode == "" then - groupcode = "mvl" - end - n = n + 1 - if show then - report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head)) - else - report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after) - end -end - -processors.tracer = tracer - -processors.enabled = true -- this will become a proper state (like trackers) - -function processors.pre_linebreak_filter(head,groupcode,size,packtype,direction) - local first, found = first_glyph(head) -- they really need to be glyphs - if found then - if trace_callbacks then - local before = nodes.count(head,true) - local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first - local after = nodes.count(head,true) - if done then - tracer("pre_linebreak","changed",head,groupcode,before,after,true) - else - tracer("pre_linebreak","unchanged",head,groupcode,before,after,true) - end - return done and head or true - else - local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first - return done and head or true - end - elseif trace_callbacks then - local n = nodes.count(head,false) - tracer("pre_linebreak","no chars",head,groupcode,n,n) - end - return true -end - -local enabled = true - -function processors.hpack_filter(head,groupcode,size,packtype,direction) - if enabled then - local first, found = first_glyph(head) -- they really need to be glyphs - if found then - if trace_callbacks then - local before = nodes.count(head,true) - local head, done = actions(head,groupcode,size,packtype,direction) - local after = nodes.count(head,true) - if done then - tracer("hpack","changed",head,groupcode,before,after,true) - else - tracer("hpack","unchanged",head,groupcode,before,after,true) - end - return done and head or true - else - local head, done = actions(head,groupcode,size,packtype,direction) - return done and head or true - end - elseif trace_callbacks then - local n = nodes.count(head,false) - tracer("hpack","no chars",head,groupcode,n,n) - end - end - return true -end - -local hpack = node.hpack - -function nodes.fasthpack(...) -- todo: pass explicit arguments - enabled = false - local hp, b = hpack(...) - hp.prev = nil - hp.next = nil - enabled = true - return hp, b -end - -callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter, "all kind of horizontal manipulations (before par break)") -callbacks.register('hpack_filter' , processors.hpack_filter, "all kind of horizontal manipulations (before hbox creation)") - -local actions = tasks.actions("finalizers") -- head, where - --- beware, these are packaged boxes so no first_glyph test --- maybe some day a hash with valid groupcodes --- --- beware, much can pass twice, for instance vadjust passes two times --- --- something weird here .. group mvl when making a vbox - -function processors.post_linebreak_filter(head,groupcode) - if trace_callbacks then - local before = nodes.count(head,true) - local head, done = actions(head,groupcode) - local after = nodes.count(head,true) - if done then - tracer("post_linebreak","changed",head,groupcode,before,after,true) - else - tracer("post_linebreak","unchanged",head,groupcode,before,after,true) - end - return done and head or true - else - local head, done = actions(head,groupcode) - return done and head or true - end -end - -callbacks.register('post_linebreak_filter', processors.post_linebreak_filter,"all kind of horizontal manipulations (after par break)") - -statistics.register("h-node processing time", function() - return statistics.elapsedseconds(nodes,"including kernel") -- hm, ok here? -end) +if not modules then modules = { } end modules ['node-pro'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local utfchar = utf.char +local format, concat = string.format, table.concat + +local trace_callbacks = false trackers.register("nodes.callbacks", function(v) trace_callbacks = v end) + +local report_nodes = logs.reporter("nodes","processors") + +local nodes, node = nodes, node + +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph +local tasks = nodes.tasks + +local free_node = node.free +local first_glyph = node.first_glyph or node.first_character +local has_attribute = node.has_attribute + +nodes.processors = nodes.processors or { } +local processors = nodes.processors + +-- vbox: grouptype: vbox vtop output split_off split_keep | box_type: exactly|aditional +-- hbox: grouptype: hbox adjusted_hbox(=hbox_in_vmode) | box_type: exactly|aditional + +local actions = tasks.actions("processors") + +local n = 0 + +local function reconstruct(head) -- we probably have a better one + local t, n, h = { }, 0, head + while h do + n = n + 1 + local id = h.id + if id == glyph_code then -- todo: disc etc + t[n] = utfchar(h.char) + else + t[n] = "[]" + end + h = h.next + end + return concat(t) +end + +local function tracer(what,state,head,groupcode,before,after,show) + if not groupcode then + groupcode = "unknown" + elseif groupcode == "" then + groupcode = "mvl" + end + n = n + 1 + if show then + report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head)) + else + report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after) + end +end + +processors.tracer = tracer + +processors.enabled = true -- this will become a proper state (like trackers) + +function processors.pre_linebreak_filter(head,groupcode,size,packtype,direction) + local first, found = first_glyph(head) -- they really need to be glyphs + if found then + if trace_callbacks then + local before = nodes.count(head,true) + local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first + local after = nodes.count(head,true) + if done then + tracer("pre_linebreak","changed",head,groupcode,before,after,true) + else + tracer("pre_linebreak","unchanged",head,groupcode,before,after,true) + end + return done and head or true + else + local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first + return done and head or true + end + elseif trace_callbacks then + local n = nodes.count(head,false) + tracer("pre_linebreak","no chars",head,groupcode,n,n) + end + return true +end + +local enabled = true + +function processors.hpack_filter(head,groupcode,size,packtype,direction) + if enabled then + local first, found = first_glyph(head) -- they really need to be glyphs + if found then + if trace_callbacks then + local before = nodes.count(head,true) + local head, done = actions(head,groupcode,size,packtype,direction) + local after = nodes.count(head,true) + if done then + tracer("hpack","changed",head,groupcode,before,after,true) + else + tracer("hpack","unchanged",head,groupcode,before,after,true) + end + return done and head or true + else + local head, done = actions(head,groupcode,size,packtype,direction) + return done and head or true + end + elseif trace_callbacks then + local n = nodes.count(head,false) + tracer("hpack","no chars",head,groupcode,n,n) + end + end + return true +end + +local hpack = node.hpack + +function nodes.fasthpack(...) -- todo: pass explicit arguments + enabled = false + local hp, b = hpack(...) + hp.prev = nil + hp.next = nil + enabled = true + return hp, b +end + +callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter, "all kind of horizontal manipulations (before par break)") +callbacks.register('hpack_filter' , processors.hpack_filter, "all kind of horizontal manipulations (before hbox creation)") + +local actions = tasks.actions("finalizers") -- head, where + +-- beware, these are packaged boxes so no first_glyph test +-- maybe some day a hash with valid groupcodes +-- +-- beware, much can pass twice, for instance vadjust passes two times +-- +-- something weird here .. group mvl when making a vbox + +function processors.post_linebreak_filter(head,groupcode) + if trace_callbacks then + local before = nodes.count(head,true) + local head, done = actions(head,groupcode) + local after = nodes.count(head,true) + if done then + tracer("post_linebreak","changed",head,groupcode,before,after,true) + else + tracer("post_linebreak","unchanged",head,groupcode,before,after,true) + end + return done and head or true + else + local head, done = actions(head,groupcode) + return done and head or true + end +end + +callbacks.register('post_linebreak_filter', processors.post_linebreak_filter,"all kind of horizontal manipulations (after par break)") + +statistics.register("h-node processing time", function() + return statistics.elapsedseconds(nodes,"including kernel") -- hm, ok here? +end) diff --git a/tex/context/base/node-ref.lua b/tex/context/base/node-ref.lua index 09e066434..cd46cd2dd 100644 --- a/tex/context/base/node-ref.lua +++ b/tex/context/base/node-ref.lua @@ -1,585 +1,585 @@ -if not modules then modules = { } end modules ['node-ref'] = { - version = 1.001, - comment = "companion to node-ref.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- We supported pdf right from the start and in mkii this has resulted in --- extensive control over the links. Nowadays pdftex provides a lot more --- control over margins but as mkii supports multiple backends we stuck to --- our own mechanisms. In mkiv again we implement our own handling. Eventually --- we will even disable the pdf primitives. - --- helper, will end up in luatex - --- is grouplevel still used? - -local format = string.format - -local allocate, mark = utilities.storage.allocate, utilities.storage.mark - -local cleanupreferences, cleanupdestinations = false, true - -local attributes, nodes, node = attributes, nodes, node - -local nodeinjections = backends.nodeinjections -local codeinjections = backends.codeinjections - -local transparencies = attributes.transparencies -local colors = attributes.colors -local references = structures.references -local tasks = nodes.tasks - -local hpack_list = node.hpack -local list_dimensions = node.dimensions - --- current.glue_set current.glue_sign - -local trace_backend = false trackers.register("nodes.backend", function(v) trace_backend = v end) -local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end) -local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end) - -local report_reference = logs.reporter("backend","references") -local report_destination = logs.reporter("backend","destinations") -local report_area = logs.reporter("backend","areas") - -local nodecodes = nodes.nodecodes -local skipcodes = nodes.skipcodes -local whatcodes = nodes.whatcodes -local listcodes = nodes.listcodes - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local glue_code = nodecodes.glue -local whatsit_code = nodecodes.whatsit - -local leftskip_code = skipcodes.leftskip -local rightskip_code = skipcodes.rightskip -local parfillskip_code = skipcodes.parfillskip - -local localpar_code = whatcodes.localpar -local dir_code = whatcodes.dir - -local line_code = listcodes.line - -local nodepool = nodes.pool - -local new_kern = nodepool.kern - -local traverse = node.traverse -local find_node_tail = node.tail or node.slide -local tosequence = nodes.tosequence - --- local function dimensions(parent,start,stop) --- stop = stop and stop.next --- if parent then --- if stop then --- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop) --- else --- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start) --- end --- else --- if stop then --- return list_dimensions(start,stop) --- else --- return list_dimensions(start) --- end --- end --- end --- --- -- more compact - -local function dimensions(parent,start,stop) - if parent then - return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop and stop.next) - else - return list_dimensions(start,stop and stop.next) - end -end - --- is pardir important at all? - -local function inject_range(head,first,last,reference,make,stack,parent,pardir,txtdir) - local width, height, depth = dimensions(parent,first,last) - if txtdir == "+TRT" or (txtdir == "===" and pardir == "TRT") then -- KH: textdir == "===" test added - width = - width - end - local result, resolved = make(width,height,depth,reference) - if result and resolved then - if head == first then - if trace_backend then - report_area("head: %04i %s %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved) - end - result.next = first - first.prev = result - return result, last - else - if trace_backend then - report_area("middle: %04i %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved) - end - local prev = first.prev - if prev then - result.next = first - result.prev = prev - prev.next = result - first.prev = result - else - result.next = first - first.prev = result - end - if first == head.next then - head.next = result -- hm, weird - end - return head, last - end - else - return head, last - end -end - -local function inject_list(id,current,reference,make,stack,pardir,txtdir) - local width, height, depth, correction = current.width, current.height, current.depth, 0 - local moveright = false - local first = current.list - if id == hlist_code then -- box_code line_code - -- can be either an explicit hbox or a line and there is no way - -- to recognize this; anyway only if ht/dp (then inline) - local sr = stack[reference] - if first then - if sr and sr[2] then - local last = find_node_tail(first) - if last.id == glue_code and last.subtype == rightskip_code then - local prev = last.prev - moveright = first.id == glue_code and first.subtype == leftskip_code - if prev and prev.id == glue_code and prev.subtype == parfillskip_code then - width = dimensions(current,first,prev.prev) -- maybe not current as we already take care of it - else - if moveright and first.writable then - width = width - first.spec.stretch*current.glue_set * current.glue_sign - end - if last.writable then - width = width - last.spec.stretch*current.glue_set * current.glue_sign - end - end - end - else - -- also weird - end - else - -- ok - end - correction = width - else - correction = height + depth - height, depth = depth, height -- ugly hack, needed because pdftex backend does something funny - end - if pardir == "TRT" then - width = - width - end - local result, resolved = make(width,height,depth,reference) - -- todo: only when width is ok - if result and resolved then - if trace_backend then - report_area("box: %04i %s %s: w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",width,height,depth,resolved) - end - if not first then - current.list = result - elseif moveright then -- brr no prevs done - -- result after first - local n = first.next - result.next = n - first.next = result - result.prev = first - if n then n.prev = result end - else - -- first after result - result.next = first - first.prev = result - current.list = result - end - end -end - --- skip is somewhat messy - -local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,txtdir) -- main - if head then - local current, first, last, firstdir, reference = head, nil, nil, nil, nil - pardir = pardir or "===" - txtdir = txtdir or "===" - while current do - local id = current.id - if id == hlist_code or id == vlist_code then - local r = current[attribute] - -- somehow reference is true so the following fails (second one not done) in - -- test \goto{test}[page(2)] test \gotobox{test}[page(2)] - -- so let's wait till this fails again - -- if not reference and r and (not skip or r > skip) then -- > or ~= - if r and (not skip or r > skip) then -- > or ~= - inject_list(id,current,r,make,stack,pardir,txtdir) - end - if r then - done[r] = (done[r] or 0) + 1 - end - local list = current.list - if list then - local _ - current.list, _, pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir) - end - if r then - done[r] = done[r] - 1 - end - elseif id == whatsit_code then - local subtype = current.subtype - if subtype == localpar_code then - pardir = current.dir - elseif subtype == dir_code then - txtdir = current.dir - end - elseif id == glue_code and current.subtype == leftskip_code then -- any glue at the left? - -- - else - local r = current[attribute] - if not r then - -- just go on, can be kerns - elseif not reference then - reference, first, last, firstdir = r, current, current, txtdir - elseif r == reference then - last = current - elseif (done[reference] or 0) == 0 then -- or id == glue_code and current.subtype == right_skip_code - if not skip or r > skip then -- maybe no > test - head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir) - reference, first, last, firstdir = nil, nil, nil, nil - end - else - reference, first, last, firstdir = r, current, current, txtdir - end - end - current = current.next - end - if reference and (done[reference] or 0) == 0 then - head = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir) - end - end - return head, true, pardir, txtdir -end - -local function inject_area(head,attribute,make,stack,done,parent,pardir,txtdir) -- singular ! - if head then - pardir = pardir or "===" - txtdir = txtdir or "===" - local current = head - while current do - local id = current.id - if id == hlist_code or id == vlist_code then - local r = current[attribute] - if r and not done[r] then - done[r] = true - inject_list(id,current,r,make,stack,pardir,txtdir) - end - local list = current.list - if list then - current.list = inject_area(list,attribute,make,stack,done,current,pardir,txtdir) - end - elseif id == whatsit_code then - local subtype = current.subtype - if subtype == localpar_code then - pardir = current.dir - elseif subtype == dir_code then - txtdir = current.dir - end - else - local r = current[attribute] - if r and not done[r] then - done[r] = true - head, current = inject_range(head,current,current,r,make,stack,parent,pardir,txtdir) - end - end - current = current.next - end - end - return head, true -end - --- tracing - -local nodepool = nodes.pool - -local new_rule = nodepool.rule -local new_kern = nodepool.kern - -local set_attribute = node.set_attribute -local register_color = colors.register - -local a_color = attributes.private('color') -local a_colormodel = attributes.private('colormodel') -local a_transparency = attributes.private('transparency') -local u_transparency = nil -local u_colors = { } -local force_gray = true - -local function colorize(width,height,depth,n,reference,what) - if force_gray then n = 0 end - u_transparency = u_transparency or transparencies.register(nil,2,.65) - local ucolor = u_colors[n] - if not ucolor then - if n == 1 then - u_color = register_color(nil,'rgb',.75,0,0) - elseif n == 2 then - u_color = register_color(nil,'rgb',0,.75,0) - elseif n == 3 then - u_color = register_color(nil,'rgb',0,0,.75) - else - n = 0 - u_color = register_color(nil,'gray',.5) - end - u_colors[n] = u_color - end - if width == 0 then - -- probably a strut as placeholder - report_area("%s %s has no %s dimensions, width %p, height %p, depth %p",what,reference,"horizontal",width,height,depth) - width = 65536 - end - if height + depth <= 0 then - report_area("%s %s has no %s dimensions, width %p, height %p, depth %p",what,reference,"vertical",width,height,depth) - height = 65536/2 - depth = height - end - local rule = new_rule(width,height,depth) - rule[a_colormodel] = 1 -- gray color model - rule[a_color] = u_color - rule[a_transparency] = u_transparency - if width < 0 then - local kern = new_kern(width) - rule.width = -width - kern.next = rule - rule.prev = kern - return kern - else - return rule - end -end - -local nodepool = nodes.pool - -local new_kern = nodepool.kern - -local texattribute = tex.attribute -local texcount = tex.count - --- references: - -local stack = { } -local done = { } -local attribute = attributes.private('reference') -local nofreferences = 0 -local topofstack = 0 - -nodes.references = { - attribute = attribute, - stack = stack, - done = done, -} - --- todo: get rid of n (n is just a number, can be used for tracing, obsolete) - -local function setreference(h,d,r) - topofstack = topofstack + 1 - -- the preroll permits us to determine samepage (but delayed also has some advantages) - -- so some part of the backend work is already done here - stack[topofstack] = { r, h, d, codeinjections.prerollreference(r) } - -- texattribute[attribute] = topofstack -- todo -> at tex end - texcount.lastreferenceattribute = topofstack -end - -function references.get(n) -- not public so functionality can change - local sn = stack[n] - return sn and sn[1] -end - -local function makereference(width,height,depth,reference) - local sr = stack[reference] - if sr then - if trace_references then - report_reference("resolving attribute %a",reference) - end - local resolved, ht, dp, set, n = sr[1], sr[2], sr[3], sr[4], sr[5] - if ht then - if height < ht then height = ht end - if depth < dp then depth = dp end - end - local annot = nodeinjections.reference(width,height,depth,set) - if annot then - nofreferences = nofreferences + 1 - local result, current - if trace_references then - local step = 65536 - result = hpack_list(colorize(width,height-step,depth-step,2,reference,"reference")) -- step subtracted so that we can see seperate links - result.width = 0 - current = result - end - if current then - current.next = annot - else - result = annot - end - references.registerpage(n) - result = hpack_list(result,0) - result.width, result.height, result.depth = 0, 0, 0 - if cleanupreferences then stack[reference] = nil end - return result, resolved - elseif trace_references then - report_reference("unable to resolve annotation %a",reference) - end - elseif trace_references then - report_reference("unable to resolve attribute %a",reference) - end -end - -function nodes.references.handler(head) - if topofstack > 0 then - return inject_areas(head,attribute,makereference,stack,done) - else - return head, false - end -end - --- destinations (we can clean up once set, unless tagging!) - -local stack = { } -local done = { } -local attribute = attributes.private('destination') -local nofdestinations = 0 -local topofstack = 0 - -nodes.destinations = { - attribute = attribute, - stack = stack, - done = done, -} - -local function setdestination(n,h,d,name,view) -- n = grouplevel, name == table - topofstack = topofstack + 1 - stack[topofstack] = { n, h, d, name, view } - return topofstack -end - -local function makedestination(width,height,depth,reference) - local sr = stack[reference] - if sr then - if trace_destinations then - report_destination("resolving attribute %a",reference) - end - local resolved, ht, dp, name, view = sr[1], sr[2], sr[3], sr[4], sr[5] - if ht then - if height < ht then height = ht end - if depth < dp then depth = dp end - end - local result, current - if trace_destinations then - local step = 0 - if width == 0 then - step = 4*65536 - width, height, depth = 5*step, 5*step, 0 - end - for n=1,#name do - local rule = hpack_list(colorize(width,height,depth,3,reference,"destination")) - rule.width = 0 - if not result then - result, current = rule, rule - else - current.next = rule - rule.prev = current - current = rule - end - width, height = width - step, height - step - end - end - nofdestinations = nofdestinations + 1 - for n=1,#name do - local annot = nodeinjections.destination(width,height,depth,name[n],view) - if not result then - result = annot - else - current.next = annot - annot.prev = current - end - current = find_node_tail(annot) - end - if result then - -- some internal error - result = hpack_list(result,0) - result.width, result.height, result.depth = 0, 0, 0 - end - if cleanupdestinations then stack[reference] = nil end - return result, resolved - elseif trace_destinations then - report_destination("unable to resolve attribute %a",reference) - end -end - -function nodes.destinations.handler(head) - if topofstack > 0 then - return inject_area(head,attribute,makedestination,stack,done) -- singular - else - return head, false - end -end - --- will move - -function references.mark(reference,h,d,view) - return setdestination(tex.currentgrouplevel,h,d,reference,view) -end - -function references.inject(prefix,reference,h,d,highlight,newwindow,layer) -- todo: use currentreference is possible - local set, bug = references.identify(prefix,reference) - if bug or #set == 0 then - -- unknown ref, just don't set it and issue an error - else - -- check - set.highlight, set.newwindow,set.layer = highlight, newwindow, layer - setreference(h,d,set) -- sets attribute / todo: for set[*].error - end -end - -function references.injectcurrentset(h,d) -- used inside doifelse - local currentset = references.currentset - if currentset then - setreference(h,d,currentset) -- sets attribute / todo: for set[*].error - end -end - -commands.injectreference = references.inject -commands.injectcurrentreference = references.injectcurrentset - --- - -local function checkboth(open,close) - if open and open ~= "" then - local set, bug = references.identify("",open) - open = not bug and #set > 0 and set - end - if close and close ~= "" then - local set, bug = references.identify("",close) - close = not bug and #set > 0 and set - end - return open, close -end - --- end temp hack - -statistics.register("interactive elements", function() - if nofreferences > 0 or nofdestinations > 0 then - return format("%s references, %s destinations",nofreferences,nofdestinations) - else - return nil - end -end) - -function references.enableinteraction() - tasks.enableaction("shipouts","nodes.references.handler") - tasks.enableaction("shipouts","nodes.destinations.handler") -end +if not modules then modules = { } end modules ['node-ref'] = { + version = 1.001, + comment = "companion to node-ref.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- We supported pdf right from the start and in mkii this has resulted in +-- extensive control over the links. Nowadays pdftex provides a lot more +-- control over margins but as mkii supports multiple backends we stuck to +-- our own mechanisms. In mkiv again we implement our own handling. Eventually +-- we will even disable the pdf primitives. + +-- helper, will end up in luatex + +-- is grouplevel still used? + +local format = string.format + +local allocate, mark = utilities.storage.allocate, utilities.storage.mark + +local cleanupreferences, cleanupdestinations = false, true + +local attributes, nodes, node = attributes, nodes, node + +local nodeinjections = backends.nodeinjections +local codeinjections = backends.codeinjections + +local transparencies = attributes.transparencies +local colors = attributes.colors +local references = structures.references +local tasks = nodes.tasks + +local hpack_list = node.hpack +local list_dimensions = node.dimensions + +-- current.glue_set current.glue_sign + +local trace_backend = false trackers.register("nodes.backend", function(v) trace_backend = v end) +local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end) +local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end) + +local report_reference = logs.reporter("backend","references") +local report_destination = logs.reporter("backend","destinations") +local report_area = logs.reporter("backend","areas") + +local nodecodes = nodes.nodecodes +local skipcodes = nodes.skipcodes +local whatcodes = nodes.whatcodes +local listcodes = nodes.listcodes + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local glue_code = nodecodes.glue +local whatsit_code = nodecodes.whatsit + +local leftskip_code = skipcodes.leftskip +local rightskip_code = skipcodes.rightskip +local parfillskip_code = skipcodes.parfillskip + +local localpar_code = whatcodes.localpar +local dir_code = whatcodes.dir + +local line_code = listcodes.line + +local nodepool = nodes.pool + +local new_kern = nodepool.kern + +local traverse = node.traverse +local find_node_tail = node.tail or node.slide +local tosequence = nodes.tosequence + +-- local function dimensions(parent,start,stop) +-- stop = stop and stop.next +-- if parent then +-- if stop then +-- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop) +-- else +-- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start) +-- end +-- else +-- if stop then +-- return list_dimensions(start,stop) +-- else +-- return list_dimensions(start) +-- end +-- end +-- end +-- +-- -- more compact + +local function dimensions(parent,start,stop) + if parent then + return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop and stop.next) + else + return list_dimensions(start,stop and stop.next) + end +end + +-- is pardir important at all? + +local function inject_range(head,first,last,reference,make,stack,parent,pardir,txtdir) + local width, height, depth = dimensions(parent,first,last) + if txtdir == "+TRT" or (txtdir == "===" and pardir == "TRT") then -- KH: textdir == "===" test added + width = - width + end + local result, resolved = make(width,height,depth,reference) + if result and resolved then + if head == first then + if trace_backend then + report_area("head: %04i %s %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved) + end + result.next = first + first.prev = result + return result, last + else + if trace_backend then + report_area("middle: %04i %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved) + end + local prev = first.prev + if prev then + result.next = first + result.prev = prev + prev.next = result + first.prev = result + else + result.next = first + first.prev = result + end + if first == head.next then + head.next = result -- hm, weird + end + return head, last + end + else + return head, last + end +end + +local function inject_list(id,current,reference,make,stack,pardir,txtdir) + local width, height, depth, correction = current.width, current.height, current.depth, 0 + local moveright = false + local first = current.list + if id == hlist_code then -- box_code line_code + -- can be either an explicit hbox or a line and there is no way + -- to recognize this; anyway only if ht/dp (then inline) + local sr = stack[reference] + if first then + if sr and sr[2] then + local last = find_node_tail(first) + if last.id == glue_code and last.subtype == rightskip_code then + local prev = last.prev + moveright = first.id == glue_code and first.subtype == leftskip_code + if prev and prev.id == glue_code and prev.subtype == parfillskip_code then + width = dimensions(current,first,prev.prev) -- maybe not current as we already take care of it + else + if moveright and first.writable then + width = width - first.spec.stretch*current.glue_set * current.glue_sign + end + if last.writable then + width = width - last.spec.stretch*current.glue_set * current.glue_sign + end + end + end + else + -- also weird + end + else + -- ok + end + correction = width + else + correction = height + depth + height, depth = depth, height -- ugly hack, needed because pdftex backend does something funny + end + if pardir == "TRT" then + width = - width + end + local result, resolved = make(width,height,depth,reference) + -- todo: only when width is ok + if result and resolved then + if trace_backend then + report_area("box: %04i %s %s: w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",width,height,depth,resolved) + end + if not first then + current.list = result + elseif moveright then -- brr no prevs done + -- result after first + local n = first.next + result.next = n + first.next = result + result.prev = first + if n then n.prev = result end + else + -- first after result + result.next = first + first.prev = result + current.list = result + end + end +end + +-- skip is somewhat messy + +local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,txtdir) -- main + if head then + local current, first, last, firstdir, reference = head, nil, nil, nil, nil + pardir = pardir or "===" + txtdir = txtdir or "===" + while current do + local id = current.id + if id == hlist_code or id == vlist_code then + local r = current[attribute] + -- somehow reference is true so the following fails (second one not done) in + -- test \goto{test}[page(2)] test \gotobox{test}[page(2)] + -- so let's wait till this fails again + -- if not reference and r and (not skip or r > skip) then -- > or ~= + if r and (not skip or r > skip) then -- > or ~= + inject_list(id,current,r,make,stack,pardir,txtdir) + end + if r then + done[r] = (done[r] or 0) + 1 + end + local list = current.list + if list then + local _ + current.list, _, pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir) + end + if r then + done[r] = done[r] - 1 + end + elseif id == whatsit_code then + local subtype = current.subtype + if subtype == localpar_code then + pardir = current.dir + elseif subtype == dir_code then + txtdir = current.dir + end + elseif id == glue_code and current.subtype == leftskip_code then -- any glue at the left? + -- + else + local r = current[attribute] + if not r then + -- just go on, can be kerns + elseif not reference then + reference, first, last, firstdir = r, current, current, txtdir + elseif r == reference then + last = current + elseif (done[reference] or 0) == 0 then -- or id == glue_code and current.subtype == right_skip_code + if not skip or r > skip then -- maybe no > test + head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir) + reference, first, last, firstdir = nil, nil, nil, nil + end + else + reference, first, last, firstdir = r, current, current, txtdir + end + end + current = current.next + end + if reference and (done[reference] or 0) == 0 then + head = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir) + end + end + return head, true, pardir, txtdir +end + +local function inject_area(head,attribute,make,stack,done,parent,pardir,txtdir) -- singular ! + if head then + pardir = pardir or "===" + txtdir = txtdir or "===" + local current = head + while current do + local id = current.id + if id == hlist_code or id == vlist_code then + local r = current[attribute] + if r and not done[r] then + done[r] = true + inject_list(id,current,r,make,stack,pardir,txtdir) + end + local list = current.list + if list then + current.list = inject_area(list,attribute,make,stack,done,current,pardir,txtdir) + end + elseif id == whatsit_code then + local subtype = current.subtype + if subtype == localpar_code then + pardir = current.dir + elseif subtype == dir_code then + txtdir = current.dir + end + else + local r = current[attribute] + if r and not done[r] then + done[r] = true + head, current = inject_range(head,current,current,r,make,stack,parent,pardir,txtdir) + end + end + current = current.next + end + end + return head, true +end + +-- tracing + +local nodepool = nodes.pool + +local new_rule = nodepool.rule +local new_kern = nodepool.kern + +local set_attribute = node.set_attribute +local register_color = colors.register + +local a_color = attributes.private('color') +local a_colormodel = attributes.private('colormodel') +local a_transparency = attributes.private('transparency') +local u_transparency = nil +local u_colors = { } +local force_gray = true + +local function colorize(width,height,depth,n,reference,what) + if force_gray then n = 0 end + u_transparency = u_transparency or transparencies.register(nil,2,.65) + local ucolor = u_colors[n] + if not ucolor then + if n == 1 then + u_color = register_color(nil,'rgb',.75,0,0) + elseif n == 2 then + u_color = register_color(nil,'rgb',0,.75,0) + elseif n == 3 then + u_color = register_color(nil,'rgb',0,0,.75) + else + n = 0 + u_color = register_color(nil,'gray',.5) + end + u_colors[n] = u_color + end + if width == 0 then + -- probably a strut as placeholder + report_area("%s %s has no %s dimensions, width %p, height %p, depth %p",what,reference,"horizontal",width,height,depth) + width = 65536 + end + if height + depth <= 0 then + report_area("%s %s has no %s dimensions, width %p, height %p, depth %p",what,reference,"vertical",width,height,depth) + height = 65536/2 + depth = height + end + local rule = new_rule(width,height,depth) + rule[a_colormodel] = 1 -- gray color model + rule[a_color] = u_color + rule[a_transparency] = u_transparency + if width < 0 then + local kern = new_kern(width) + rule.width = -width + kern.next = rule + rule.prev = kern + return kern + else + return rule + end +end + +local nodepool = nodes.pool + +local new_kern = nodepool.kern + +local texattribute = tex.attribute +local texcount = tex.count + +-- references: + +local stack = { } +local done = { } +local attribute = attributes.private('reference') +local nofreferences = 0 +local topofstack = 0 + +nodes.references = { + attribute = attribute, + stack = stack, + done = done, +} + +-- todo: get rid of n (n is just a number, can be used for tracing, obsolete) + +local function setreference(h,d,r) + topofstack = topofstack + 1 + -- the preroll permits us to determine samepage (but delayed also has some advantages) + -- so some part of the backend work is already done here + stack[topofstack] = { r, h, d, codeinjections.prerollreference(r) } + -- texattribute[attribute] = topofstack -- todo -> at tex end + texcount.lastreferenceattribute = topofstack +end + +function references.get(n) -- not public so functionality can change + local sn = stack[n] + return sn and sn[1] +end + +local function makereference(width,height,depth,reference) + local sr = stack[reference] + if sr then + if trace_references then + report_reference("resolving attribute %a",reference) + end + local resolved, ht, dp, set, n = sr[1], sr[2], sr[3], sr[4], sr[5] + if ht then + if height < ht then height = ht end + if depth < dp then depth = dp end + end + local annot = nodeinjections.reference(width,height,depth,set) + if annot then + nofreferences = nofreferences + 1 + local result, current + if trace_references then + local step = 65536 + result = hpack_list(colorize(width,height-step,depth-step,2,reference,"reference")) -- step subtracted so that we can see seperate links + result.width = 0 + current = result + end + if current then + current.next = annot + else + result = annot + end + references.registerpage(n) + result = hpack_list(result,0) + result.width, result.height, result.depth = 0, 0, 0 + if cleanupreferences then stack[reference] = nil end + return result, resolved + elseif trace_references then + report_reference("unable to resolve annotation %a",reference) + end + elseif trace_references then + report_reference("unable to resolve attribute %a",reference) + end +end + +function nodes.references.handler(head) + if topofstack > 0 then + return inject_areas(head,attribute,makereference,stack,done) + else + return head, false + end +end + +-- destinations (we can clean up once set, unless tagging!) + +local stack = { } +local done = { } +local attribute = attributes.private('destination') +local nofdestinations = 0 +local topofstack = 0 + +nodes.destinations = { + attribute = attribute, + stack = stack, + done = done, +} + +local function setdestination(n,h,d,name,view) -- n = grouplevel, name == table + topofstack = topofstack + 1 + stack[topofstack] = { n, h, d, name, view } + return topofstack +end + +local function makedestination(width,height,depth,reference) + local sr = stack[reference] + if sr then + if trace_destinations then + report_destination("resolving attribute %a",reference) + end + local resolved, ht, dp, name, view = sr[1], sr[2], sr[3], sr[4], sr[5] + if ht then + if height < ht then height = ht end + if depth < dp then depth = dp end + end + local result, current + if trace_destinations then + local step = 0 + if width == 0 then + step = 4*65536 + width, height, depth = 5*step, 5*step, 0 + end + for n=1,#name do + local rule = hpack_list(colorize(width,height,depth,3,reference,"destination")) + rule.width = 0 + if not result then + result, current = rule, rule + else + current.next = rule + rule.prev = current + current = rule + end + width, height = width - step, height - step + end + end + nofdestinations = nofdestinations + 1 + for n=1,#name do + local annot = nodeinjections.destination(width,height,depth,name[n],view) + if not result then + result = annot + else + current.next = annot + annot.prev = current + end + current = find_node_tail(annot) + end + if result then + -- some internal error + result = hpack_list(result,0) + result.width, result.height, result.depth = 0, 0, 0 + end + if cleanupdestinations then stack[reference] = nil end + return result, resolved + elseif trace_destinations then + report_destination("unable to resolve attribute %a",reference) + end +end + +function nodes.destinations.handler(head) + if topofstack > 0 then + return inject_area(head,attribute,makedestination,stack,done) -- singular + else + return head, false + end +end + +-- will move + +function references.mark(reference,h,d,view) + return setdestination(tex.currentgrouplevel,h,d,reference,view) +end + +function references.inject(prefix,reference,h,d,highlight,newwindow,layer) -- todo: use currentreference is possible + local set, bug = references.identify(prefix,reference) + if bug or #set == 0 then + -- unknown ref, just don't set it and issue an error + else + -- check + set.highlight, set.newwindow,set.layer = highlight, newwindow, layer + setreference(h,d,set) -- sets attribute / todo: for set[*].error + end +end + +function references.injectcurrentset(h,d) -- used inside doifelse + local currentset = references.currentset + if currentset then + setreference(h,d,currentset) -- sets attribute / todo: for set[*].error + end +end + +commands.injectreference = references.inject +commands.injectcurrentreference = references.injectcurrentset + +-- + +local function checkboth(open,close) + if open and open ~= "" then + local set, bug = references.identify("",open) + open = not bug and #set > 0 and set + end + if close and close ~= "" then + local set, bug = references.identify("",close) + close = not bug and #set > 0 and set + end + return open, close +end + +-- end temp hack + +statistics.register("interactive elements", function() + if nofreferences > 0 or nofdestinations > 0 then + return format("%s references, %s destinations",nofreferences,nofdestinations) + else + return nil + end +end) + +function references.enableinteraction() + tasks.enableaction("shipouts","nodes.references.handler") + tasks.enableaction("shipouts","nodes.destinations.handler") +end diff --git a/tex/context/base/node-res.lua b/tex/context/base/node-res.lua index 768aac404..6ec6895c8 100644 --- a/tex/context/base/node-res.lua +++ b/tex/context/base/node-res.lua @@ -1,406 +1,406 @@ -if not modules then modules = { } end modules ['node-res'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local gmatch, format = string.gmatch, string.format -local tonumber, round = tonumber, math.round - ---[[ldx-- -

    The next function is not that much needed but in we use -for debugging node management.

    ---ldx]]-- - -local report_nodes = logs.reporter("nodes","housekeeping") - -local nodes, node = nodes, node - -local copy_node = node.copy -local free_node = node.free -local free_list = node.flush_list -local new_node = node.new - -nodes.pool = nodes.pool or { } -local pool = nodes.pool - -local whatsitcodes = nodes.whatsitcodes -local skipcodes = nodes.skipcodes -local kerncodes = nodes.kerncodes -local nodecodes = nodes.nodecodes - -local glyph_code = nodecodes.glyph - -local allocate = utilities.storage.allocate - -local reserved, nofreserved = { }, 0 - -local function register_node(n) - nofreserved = nofreserved + 1 - reserved[nofreserved] = n - return n -end - -pool.register = register_node - -function pool.cleanup(nofboxes) -- todo - if nodes.tracers.steppers then -- to be resolved - nodes.tracers.steppers.reset() -- todo: make a registration subsystem - end - local nl, nr = 0, nofreserved - for i=1,nofreserved do - local ri = reserved[i] - -- if not (ri.id == glue_spec and not ri.is_writable) then - free_node(reserved[i]) - -- end - end - if nofboxes then - local tb = tex.box - for i=0,nofboxes do - local l = tb[i] - if l then - free_node(tb[i]) - nl = nl + 1 - end - end - end - reserved = { } - nofreserved = 0 - return nr, nl, nofboxes -- can be nil -end - -function pool.usage() - local t = { } - for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do - t[tag] = n - end - return t -end - -local disc = register_node(new_node("disc")) -local kern = register_node(new_node("kern",kerncodes.userkern)) -local fontkern = register_node(new_node("kern",kerncodes.fontkern)) -local penalty = register_node(new_node("penalty")) -local glue = register_node(new_node("glue")) -- glue.spec = nil -local glue_spec = register_node(new_node("glue_spec")) -local glyph = register_node(new_node("glyph",0)) -local textdir = register_node(new_node("whatsit",whatsitcodes.dir)) -local latelua = register_node(new_node("whatsit",whatsitcodes.latelua)) -local special = register_node(new_node("whatsit",whatsitcodes.special)) -local user_n = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_n.type = 100 -- 44 -local user_l = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_l.type = 110 -- 44 -local user_s = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_s.type = 115 -- 44 -local user_t = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_t.type = 116 -- 44 -local left_margin_kern = register_node(new_node("margin_kern",0)) -local right_margin_kern = register_node(new_node("margin_kern",1)) -local lineskip = register_node(new_node("glue",skipcodes.lineskip)) -local baselineskip = register_node(new_node("glue",skipcodes.baselineskip)) -local leftskip = register_node(new_node("glue",skipcodes.leftskip)) -local rightskip = register_node(new_node("glue",skipcodes.rightskip)) -local temp = register_node(new_node("temp",0)) -local noad = register_node(new_node("noad")) - --- the dir field needs to be set otherwise crash: - -local rule = register_node(new_node("rule")) rule .dir = "TLT" -local hlist = register_node(new_node("hlist")) hlist.dir = "TLT" -local vlist = register_node(new_node("vlist")) vlist.dir = "TLT" - -function pool.zeroglue(n) - local s = n.spec - return not writable or ( - s.width == 0 - and s.stretch == 0 - and s.shrink == 0 - and s.stretch_order == 0 - and s.shrink_order == 0 - ) -end - -function pool.glyph(fnt,chr) - local n = copy_node(glyph) - if fnt then n.font = fnt end - if chr then n.char = chr end - return n -end - -function pool.penalty(p) - local n = copy_node(penalty) - n.penalty = p - return n -end - -function pool.kern(k) - local n = copy_node(kern) - n.kern = k - return n -end - -function pool.fontkern(k) - local n = copy_node(fontkern) - n.kern = k - return n -end - -function pool.gluespec(width,stretch,shrink,stretch_order,shrink_order) - local s = copy_node(glue_spec) - if width then s.width = width end - if stretch then s.stretch = stretch end - if shrink then s.shrink = shrink end - if stretch_order then s.stretch_order = stretch_order end - if shrink_order then s.shrink_order = shrink_order end - return s -end - -local function someskip(skip,width,stretch,shrink,stretch_order,shrink_order) - local n = copy_node(skip) - if not width then - -- no spec - elseif width == false or tonumber(width) then - local s = copy_node(glue_spec) - if width then s.width = width end - if stretch then s.stretch = stretch end - if shrink then s.shrink = shrink end - if stretch_order then s.stretch_order = stretch_order end - if shrink_order then s.shrink_order = shrink_order end - n.spec = s - else - -- shared - n.spec = copy_node(width) - end - return n -end - -function pool.stretch(a,b) - local n = copy_node(glue) - local s = copy_node(glue_spec) - if b then - s.stretch = a - s.stretch_order = b - else - s.stretch = 1 - s.stretch_order = a or 1 - end - n.spec = s - return n -end - -function pool.shrink(a,b) - local n = copy_node(glue) - local s = copy_node(glue_spec) - if b then - s.shrink = a - s.shrink_order = b - else - s.shrink = 1 - s.shrink_order = a or 1 - end - n.spec = s - return n -end - - -function pool.glue(width,stretch,shrink,stretch_order,shrink_order) - return someskip(glue,width,stretch,shrink,stretch_order,shrink_order) -end - -function pool.leftskip(width,stretch,shrink,stretch_order,shrink_order) - return someskip(leftskip,width,stretch,shrink,stretch_order,shrink_order) -end - -function pool.rightskip(width,stretch,shrink,stretch_order,shrink_order) - return someskip(rightskip,width,stretch,shrink,stretch_order,shrink_order) -end - -function pool.lineskip(width,stretch,shrink,stretch_order,shrink_order) - return someskip(lineskip,width,stretch,shrink,stretch_order,shrink_order) -end - -function pool.baselineskip(width,stretch,shrink) - return someskip(baselineskip,width,stretch,shrink) -end - -function pool.disc() - return copy_node(disc) -end - -function pool.textdir(dir) - local t = copy_node(textdir) - t.dir = dir - return t -end - -function pool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt - local n = copy_node(rule) - if width then n.width = width end - if height then n.height = height end - if depth then n.depth = depth end - if dir then n.dir = dir end - return n -end - -if node.has_field(latelua,'string') then - function pool.latelua(code) - local n = copy_node(latelua) - n.string = code - return n - end -else - function pool.latelua(code) - local n = copy_node(latelua) - n.data = code - return n - end -end - -function pool.leftmarginkern(glyph,width) - local n = copy_node(left_margin_kern) - if not glyph then - report_nodes("invalid pointer to left margin glyph node") - elseif glyph.id ~= glyph_code then - report_nodes("invalid node type %a for %s margin glyph node",nodecodes[glyph],"left") - else - n.glyph = glyph - end - if width then - n.width = width - end - return n -end - -function pool.rightmarginkern(glyph,width) - local n = copy_node(right_margin_kern) - if not glyph then - report_nodes("invalid pointer to right margin glyph node") - elseif glyph.id ~= glyph_code then - report_nodes("invalid node type %a for %s margin glyph node",nodecodes[p],"right") - else - n.glyph = glyph - end - if width then - n.width = width - end - return n -end - -function pool.temp() - return copy_node(temp) -end - -function pool.noad() - return copy_node(noad) -end - -function pool.hlist() - return copy_node(hlist) -end - -function pool.vlist() - return copy_node(vlist) -end - ---[[ -

    At some point we ran into a problem that the glue specification -of the zeropoint dimension was overwritten when adapting a glue spec -node. This is a side effect of glue specs being shared. After a -couple of hours tracing and debugging Taco and I came to the -conclusion that it made no sense to complicate the spec allocator -and settled on a writable flag. This all is a side effect of the -fact that some glues use reserved memory slots (with the zeropoint -glue being a noticeable one). So, next we wrap this into a function -and hide it for the user. And yes, LuaTeX now gives a warning as -well.

    -]]-- - -function nodes.writable_spec(n) -- not pool - local spec = n.spec - if not spec then - spec = copy_node(glue_spec) - n.spec = spec - elseif not spec.writable then - spec = copy_node(spec) - n.spec = spec - end - return spec -end - --- local num = userids["my id"] --- local str = userids[num] - -local userids = allocate() pool.userids = userids -local lastid = 0 - -setmetatable(userids, { - __index = function(t,k) - if type(k) == "string" then - lastid = lastid + 1 - rawset(userids,lastid,k) - rawset(userids,k,lastid) - return lastid - else - rawset(userids,k,k) - return k - end - end, - __call = function(t,k) - return t[k] - end -} ) - -function pool.usernumber(id,num) - local n = copy_node(user_n) - if num then - n.user_id, n.value = id, num - elseif id then - n.value = id - end - return n -end - -function pool.userlist(id,list) - local n = copy_node(user_l) - if list then - n.user_id, n.value = id, list - else - n.value = id - end - return n -end - -function pool.userstring(id,str) - local n = copy_node(user_s) - if str then - n.user_id, n.value = id, str - else - n.value = id - end - return n -end - -function pool.usertokens(id,tokens) - local n = copy_node(user_t) - if tokens then - n.user_id, n.value = id, tokens - else - n.value = id - end - return n -end - -function pool.special(str) - local n = copy_node(special) - n.data = str - return n -end - -statistics.register("cleaned up reserved nodes", function() - return format("%s nodes, %s lists of %s", pool.cleanup(tex.count["c_syst_last_allocated_box"])) -end) -- \topofboxstack - -statistics.register("node memory usage", function() -- comes after cleanup ! - return status.node_mem_usage -end) - -lua.registerfinalizer(pool.cleanup, "cleanup reserved nodes") +if not modules then modules = { } end modules ['node-res'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local gmatch, format = string.gmatch, string.format +local tonumber, round = tonumber, math.round + +--[[ldx-- +

    The next function is not that much needed but in we use +for debugging node management.

    +--ldx]]-- + +local report_nodes = logs.reporter("nodes","housekeeping") + +local nodes, node = nodes, node + +local copy_node = node.copy +local free_node = node.free +local free_list = node.flush_list +local new_node = node.new + +nodes.pool = nodes.pool or { } +local pool = nodes.pool + +local whatsitcodes = nodes.whatsitcodes +local skipcodes = nodes.skipcodes +local kerncodes = nodes.kerncodes +local nodecodes = nodes.nodecodes + +local glyph_code = nodecodes.glyph + +local allocate = utilities.storage.allocate + +local reserved, nofreserved = { }, 0 + +local function register_node(n) + nofreserved = nofreserved + 1 + reserved[nofreserved] = n + return n +end + +pool.register = register_node + +function pool.cleanup(nofboxes) -- todo + if nodes.tracers.steppers then -- to be resolved + nodes.tracers.steppers.reset() -- todo: make a registration subsystem + end + local nl, nr = 0, nofreserved + for i=1,nofreserved do + local ri = reserved[i] + -- if not (ri.id == glue_spec and not ri.is_writable) then + free_node(reserved[i]) + -- end + end + if nofboxes then + local tb = tex.box + for i=0,nofboxes do + local l = tb[i] + if l then + free_node(tb[i]) + nl = nl + 1 + end + end + end + reserved = { } + nofreserved = 0 + return nr, nl, nofboxes -- can be nil +end + +function pool.usage() + local t = { } + for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do + t[tag] = n + end + return t +end + +local disc = register_node(new_node("disc")) +local kern = register_node(new_node("kern",kerncodes.userkern)) +local fontkern = register_node(new_node("kern",kerncodes.fontkern)) +local penalty = register_node(new_node("penalty")) +local glue = register_node(new_node("glue")) -- glue.spec = nil +local glue_spec = register_node(new_node("glue_spec")) +local glyph = register_node(new_node("glyph",0)) +local textdir = register_node(new_node("whatsit",whatsitcodes.dir)) +local latelua = register_node(new_node("whatsit",whatsitcodes.latelua)) +local special = register_node(new_node("whatsit",whatsitcodes.special)) +local user_n = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_n.type = 100 -- 44 +local user_l = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_l.type = 110 -- 44 +local user_s = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_s.type = 115 -- 44 +local user_t = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_t.type = 116 -- 44 +local left_margin_kern = register_node(new_node("margin_kern",0)) +local right_margin_kern = register_node(new_node("margin_kern",1)) +local lineskip = register_node(new_node("glue",skipcodes.lineskip)) +local baselineskip = register_node(new_node("glue",skipcodes.baselineskip)) +local leftskip = register_node(new_node("glue",skipcodes.leftskip)) +local rightskip = register_node(new_node("glue",skipcodes.rightskip)) +local temp = register_node(new_node("temp",0)) +local noad = register_node(new_node("noad")) + +-- the dir field needs to be set otherwise crash: + +local rule = register_node(new_node("rule")) rule .dir = "TLT" +local hlist = register_node(new_node("hlist")) hlist.dir = "TLT" +local vlist = register_node(new_node("vlist")) vlist.dir = "TLT" + +function pool.zeroglue(n) + local s = n.spec + return not writable or ( + s.width == 0 + and s.stretch == 0 + and s.shrink == 0 + and s.stretch_order == 0 + and s.shrink_order == 0 + ) +end + +function pool.glyph(fnt,chr) + local n = copy_node(glyph) + if fnt then n.font = fnt end + if chr then n.char = chr end + return n +end + +function pool.penalty(p) + local n = copy_node(penalty) + n.penalty = p + return n +end + +function pool.kern(k) + local n = copy_node(kern) + n.kern = k + return n +end + +function pool.fontkern(k) + local n = copy_node(fontkern) + n.kern = k + return n +end + +function pool.gluespec(width,stretch,shrink,stretch_order,shrink_order) + local s = copy_node(glue_spec) + if width then s.width = width end + if stretch then s.stretch = stretch end + if shrink then s.shrink = shrink end + if stretch_order then s.stretch_order = stretch_order end + if shrink_order then s.shrink_order = shrink_order end + return s +end + +local function someskip(skip,width,stretch,shrink,stretch_order,shrink_order) + local n = copy_node(skip) + if not width then + -- no spec + elseif width == false or tonumber(width) then + local s = copy_node(glue_spec) + if width then s.width = width end + if stretch then s.stretch = stretch end + if shrink then s.shrink = shrink end + if stretch_order then s.stretch_order = stretch_order end + if shrink_order then s.shrink_order = shrink_order end + n.spec = s + else + -- shared + n.spec = copy_node(width) + end + return n +end + +function pool.stretch(a,b) + local n = copy_node(glue) + local s = copy_node(glue_spec) + if b then + s.stretch = a + s.stretch_order = b + else + s.stretch = 1 + s.stretch_order = a or 1 + end + n.spec = s + return n +end + +function pool.shrink(a,b) + local n = copy_node(glue) + local s = copy_node(glue_spec) + if b then + s.shrink = a + s.shrink_order = b + else + s.shrink = 1 + s.shrink_order = a or 1 + end + n.spec = s + return n +end + + +function pool.glue(width,stretch,shrink,stretch_order,shrink_order) + return someskip(glue,width,stretch,shrink,stretch_order,shrink_order) +end + +function pool.leftskip(width,stretch,shrink,stretch_order,shrink_order) + return someskip(leftskip,width,stretch,shrink,stretch_order,shrink_order) +end + +function pool.rightskip(width,stretch,shrink,stretch_order,shrink_order) + return someskip(rightskip,width,stretch,shrink,stretch_order,shrink_order) +end + +function pool.lineskip(width,stretch,shrink,stretch_order,shrink_order) + return someskip(lineskip,width,stretch,shrink,stretch_order,shrink_order) +end + +function pool.baselineskip(width,stretch,shrink) + return someskip(baselineskip,width,stretch,shrink) +end + +function pool.disc() + return copy_node(disc) +end + +function pool.textdir(dir) + local t = copy_node(textdir) + t.dir = dir + return t +end + +function pool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt + local n = copy_node(rule) + if width then n.width = width end + if height then n.height = height end + if depth then n.depth = depth end + if dir then n.dir = dir end + return n +end + +if node.has_field(latelua,'string') then + function pool.latelua(code) + local n = copy_node(latelua) + n.string = code + return n + end +else + function pool.latelua(code) + local n = copy_node(latelua) + n.data = code + return n + end +end + +function pool.leftmarginkern(glyph,width) + local n = copy_node(left_margin_kern) + if not glyph then + report_nodes("invalid pointer to left margin glyph node") + elseif glyph.id ~= glyph_code then + report_nodes("invalid node type %a for %s margin glyph node",nodecodes[glyph],"left") + else + n.glyph = glyph + end + if width then + n.width = width + end + return n +end + +function pool.rightmarginkern(glyph,width) + local n = copy_node(right_margin_kern) + if not glyph then + report_nodes("invalid pointer to right margin glyph node") + elseif glyph.id ~= glyph_code then + report_nodes("invalid node type %a for %s margin glyph node",nodecodes[p],"right") + else + n.glyph = glyph + end + if width then + n.width = width + end + return n +end + +function pool.temp() + return copy_node(temp) +end + +function pool.noad() + return copy_node(noad) +end + +function pool.hlist() + return copy_node(hlist) +end + +function pool.vlist() + return copy_node(vlist) +end + +--[[ +

    At some point we ran into a problem that the glue specification +of the zeropoint dimension was overwritten when adapting a glue spec +node. This is a side effect of glue specs being shared. After a +couple of hours tracing and debugging Taco and I came to the +conclusion that it made no sense to complicate the spec allocator +and settled on a writable flag. This all is a side effect of the +fact that some glues use reserved memory slots (with the zeropoint +glue being a noticeable one). So, next we wrap this into a function +and hide it for the user. And yes, LuaTeX now gives a warning as +well.

    +]]-- + +function nodes.writable_spec(n) -- not pool + local spec = n.spec + if not spec then + spec = copy_node(glue_spec) + n.spec = spec + elseif not spec.writable then + spec = copy_node(spec) + n.spec = spec + end + return spec +end + +-- local num = userids["my id"] +-- local str = userids[num] + +local userids = allocate() pool.userids = userids +local lastid = 0 + +setmetatable(userids, { + __index = function(t,k) + if type(k) == "string" then + lastid = lastid + 1 + rawset(userids,lastid,k) + rawset(userids,k,lastid) + return lastid + else + rawset(userids,k,k) + return k + end + end, + __call = function(t,k) + return t[k] + end +} ) + +function pool.usernumber(id,num) + local n = copy_node(user_n) + if num then + n.user_id, n.value = id, num + elseif id then + n.value = id + end + return n +end + +function pool.userlist(id,list) + local n = copy_node(user_l) + if list then + n.user_id, n.value = id, list + else + n.value = id + end + return n +end + +function pool.userstring(id,str) + local n = copy_node(user_s) + if str then + n.user_id, n.value = id, str + else + n.value = id + end + return n +end + +function pool.usertokens(id,tokens) + local n = copy_node(user_t) + if tokens then + n.user_id, n.value = id, tokens + else + n.value = id + end + return n +end + +function pool.special(str) + local n = copy_node(special) + n.data = str + return n +end + +statistics.register("cleaned up reserved nodes", function() + return format("%s nodes, %s lists of %s", pool.cleanup(tex.count["c_syst_last_allocated_box"])) +end) -- \topofboxstack + +statistics.register("node memory usage", function() -- comes after cleanup ! + return status.node_mem_usage +end) + +lua.registerfinalizer(pool.cleanup, "cleanup reserved nodes") diff --git a/tex/context/base/node-rul.lua b/tex/context/base/node-rul.lua index 09300964e..00039550c 100644 --- a/tex/context/base/node-rul.lua +++ b/tex/context/base/node-rul.lua @@ -1,389 +1,389 @@ -if not modules then modules = { } end modules ['node-rul'] = { - version = 1.001, - comment = "companion to node-rul.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this will go to an auxiliary module --- beware: rules now have a dir field --- --- todo: make robust for layers ... order matters - -local attributes, nodes, node = attributes, nodes, node - -local nodecodes = nodes.nodecodes -local tasks = nodes.tasks - -local glyph_code = nodecodes.glyph -local disc_code = nodecodes.disc -local rule_code = nodecodes.rule - -function nodes.striprange(first,last) -- todo: dir - if first and last then -- just to be sure - if first == last then - return first, last - end - while first and first ~= last do - local id = first.id - if id == glyph_code or id == disc_code then -- or id == rule_code - break - else - first = first.next - end - end - if not first then - return nil, nil - elseif first == last then - return first, last - end - while last and last ~= first do - local id = last.id - if id == glyph_code or id == disc_code then -- or id == rule_code - break - else - local prev = last.prev -- luatex < 0.70 has italic correction kern not prev'd - if prev then - last = last.prev - else - break - end - end - end - if not last then - return nil, nil - end - end - return first, last -end - --- todo: order and maybe other dimensions - -local floor = math.floor - -local trace_ruled = false trackers.register("nodes.rules", function(v) trace_ruled = v end) -local report_ruled = logs.reporter("nodes","rules") - -local n_tostring = nodes.idstostring -local n_tosequence = nodes.tosequence - -local a_ruled = attributes.private('ruled') -local a_color = attributes.private('color') -local a_transparency = attributes.private('transparency') -local a_colorspace = attributes.private('colormodel') - -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after -local striprange = nodes.striprange -local list_dimensions = node.dimensions - -local hpack_nodes = node.hpack - -local fontdata = fonts.hashes.identifiers -local variables = interfaces.variables -local dimenfactor = fonts.helpers.dimenfactor -local splitdimen = number.splitdimen - -local nodecodes = nodes.nodecodes -local skipcodes = nodes.skipcodes -local whatcodes = nodes.whatcodes -local kerncodes = nodes.kerncodes - -local glyph_code = nodecodes.glyph -local disc_code = nodecodes.disc -local glue_code = nodecodes.glue -local penalty_code = nodecodes.penalty -local kern_code = nodecodes.kern -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local rule_code = nodecodes.rule -local whatsit_code = nodecodes.whatsit - -local userskip_code = skipcodes.userskip -local spaceskip_code = skipcodes.spaceskip -local xspaceskip_code = skipcodes.xspaceskip - -local dir_code = whatcodes.dir - -local kerning_code = kerncodes.kern - -local nodepool = nodes.pool - -local new_rule = nodepool.rule -local new_kern = nodepool.kern -local new_glue = nodepool.glue - --- we can use this one elsewhere too --- --- todo: functions: word, sentence --- --- glyph rule unset whatsit glue margin_kern kern math disc - -local checkdir = true - --- we assume {glyphruns} and no funny extra kerning, ok, maybe we need --- a dummy character as start and end; anyway we only collect glyphs --- --- this one needs to take layers into account (i.e. we need a list of --- critical attributes) - --- omkeren class en level -> scheelt functie call in analyze - --- todo: switching inside math - -local function processwords(attribute,data,flush,head,parent) -- we have hlistdir and local dir - local n = head - if n then - local f, l, a, d, i, class - local continue, done, strip, level = false, false, true, -1 - while n do - local id = n.id - if id == glyph_code or id == rule_code then - local aa = n[attribute] - if aa then - if aa == a then - if not f then -- ? - f = n - end - l = n - else - -- possible extensions: when in same class then keep spanning - local newlevel, newclass = floor(aa/1000), aa%1000 ---~ strip = not continue or level == 1 -- 0 - if f then - if class == newclass then -- and newlevel > level then - head, done = flush(head,f,l,d,level,parent,false), true - else - head, done = flush(head,f,l,d,level,parent,strip), true - end - end - f, l, a = n, n, aa - level, class = newlevel, newclass - d = data[class] - continue = d.continue == variables.yes - end - else - if f then - head, done = flush(head,f,l,d,level,parent,strip), true - end - f, l, a = nil, nil, nil - end - elseif f and (id == disc_code or (id == kern_code and n.subtype == kerning_code)) then - l = n - elseif id == hlist_code or id == vlist_code then - if f then - head, done = flush(head,f,l,d,level,parent,strip), true - f, l, a = nil, nil, nil - end - local list = n.list - if list then - n.list = processwords(attribute,data,flush,list,n) - end - elseif checkdir and id == whatsit_code and n.subtype == dir_code then -- only changes in dir, we assume proper boundaries - if f and a then - l = n - end - elseif f then - if continue then - if id == penalty_code then - l = n - elseif id == kern_code then - l = n - elseif id == glue_code then - -- catch \underbar{a} \underbar{a} (subtype test is needed) - local subtype = n.subtype - if continue and n[attribute] and - (subtype == userskip_code or subtype == spaceskip_code or subskip == xspaceskip_code) then - l = n - else - head, done = flush(head,f,l,d,level,parent,strip), true - f, l, a = nil, nil, nil - end - end - else - head, done = flush(head,f,l,d,level,parent,strip), true - f, l, a = nil, nil, nil - end - end - n = n.next - end - if f then - head, done = flush(head,f,l,d,level,parent,strip), true - end - return head, true -- todo: done - else - return head, false - end -end - -nodes.processwords = processwords - --- - -nodes.rules = nodes.rules or { } -nodes.rules.data = nodes.rules.data or { } - -storage.register("nodes/rules/data", nodes.rules.data, "nodes.rules.data") - -local data = nodes.rules.data - -function nodes.rules.define(settings) - data[#data+1] = settings - context(#data) -end - -local a_viewerlayer = attributes.private("viewerlayer") - -local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but acceptable for this purpose --- check for f and l - if f.id ~= glyph_code then - -- saveguard ... we need to deal with rules and so (math) - return head - end - local r, m - if strip then - if trace_ruled then - local before = n_tosequence(f,l,true) - f, l = striprange(f,l) - local after = n_tosequence(f,l,true) - report_ruled("range stripper, before %a, after %a",before,after) - else - f, l = striprange(f,l) - end - end - if not f then - return head - end - local w = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,f,l.next) - local method, offset, continue, dy, order, max = d.method, d.offset, d.continue, d.dy, d.order, d.max - local rulethickness, unit = d.rulethickness, d.unit - local ma, ca, ta = d.ma, d.ca, d.ta - local colorspace = (ma > 0 and ma) or f[a_colorspace] or 1 - local color = (ca > 0 and ca) or f[a_color] - local transparency = (ta > 0 and ta) or f[a_transparency] - local foreground = order == variables.foreground - - local e = dimenfactor(unit,fontdata[f.font]) -- what if no glyph node - - local rt = tonumber(rulethickness) - if rt then - rulethickness = e * rulethickness / 2 - else - local n, u = splitdimen(rulethickness) - if n and u then -- we need to intercept ex and em and % and ... - rulethickness = n * dimenfactor(u,fontdata[f.font]) / 2 - else - rulethickness = 1/5 - end - end - - if level > max then - level = max - end - if method == 0 then -- center - offset = 2*offset --- m = (offset+(level-1)*dy+rulethickness)*e/2 - m = (offset+(level-1)*dy)*e/2 + rulethickness/2 - else - m = 0 - end - for i=1,level do --- local ht = (offset+(i-1)*dy+rulethickness)*e - m --- local dp = -(offset+(i-1)*dy-rulethickness)*e + m - local ht = (offset+(i-1)*dy)*e + rulethickness - m - local dp = -(offset+(i-1)*dy)*e + rulethickness + m - local r = new_rule(w,ht,dp) - local v = f[a_viewerlayer] - -- quick hack - if v then - r[a_viewerlayer] = v - end - -- - if color then - r[a_colorspace] = colorspace - r[a_color] = color - end - if transparency then - r[a_transparency] = transparency - end - local k = new_kern(-w) - if foreground then - insert_node_after(head,l,k) - insert_node_after(head,k,r) - l = r - else - head = insert_node_before(head,f,r) - insert_node_after(head,r,k) - end - if trace_ruled then - report_ruled("level %a, width %p, height %p, depth %p, nodes %a, text %a", - level,w,ht,dp,n_tostring(f,l),n_tosequence(f,l,true)) - end - end - return head -end - -local process = nodes.processwords - -nodes.rules.handler = function(head) return process(a_ruled,data,flush_ruled,head) end - -function nodes.rules.enable() - tasks.enableaction("shipouts","nodes.rules.handler") -end - --- elsewhere: --- --- tasks.appendaction ("shipouts", "normalizers", "nodes.rules.handler") --- tasks.disableaction("shipouts", "nodes.rules.handler") -- only kick in when used - -local trace_shifted = false trackers.register("nodes.shifting", function(v) trace_shifted = v end) - -local report_shifted = logs.reporter("nodes","shifting") - -local a_shifted = attributes.private('shifted') - -nodes.shifts = nodes.shifts or { } -nodes.shifts.data = nodes.shifts.data or { } - -storage.register("nodes/shifts/data", nodes.shifts.data, "nodes.shifts.data") - -local data = nodes.shifts.data - -function nodes.shifts.define(settings) - data[#data+1] = settings - context(#data) -end - -local function flush_shifted(head,first,last,data,level,parent,strip) -- not that fast but acceptable for this purpose - if true then - first, last = striprange(first,last) - end - local prev, next = first.prev, last.next - first.prev, last.next = nil, nil - local width, height, depth = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,first,next) - local list = hpack_nodes(first,width,"exactly") - if first == head then - head = list - end - if prev then - prev.next, list.prev = list, prev - end - if next then - next.prev, list.next = list, next - end - local raise = data.dy * dimenfactor(data.unit,fontdata[first.font]) - list.shift, list.height, list.depth = raise, height, depth - if trace_shifted then - report_shifted("width %p, nodes %a, text %a",width,n_tostring(first,last),n_tosequence(first,last,true)) - end - return head -end - -local process = nodes.processwords - -nodes.shifts.handler = function(head) return process(a_shifted,data,flush_shifted,head) end - -function nodes.shifts.enable() - tasks.enableaction("shipouts","nodes.shifts.handler") -end +if not modules then modules = { } end modules ['node-rul'] = { + version = 1.001, + comment = "companion to node-rul.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this will go to an auxiliary module +-- beware: rules now have a dir field +-- +-- todo: make robust for layers ... order matters + +local attributes, nodes, node = attributes, nodes, node + +local nodecodes = nodes.nodecodes +local tasks = nodes.tasks + +local glyph_code = nodecodes.glyph +local disc_code = nodecodes.disc +local rule_code = nodecodes.rule + +function nodes.striprange(first,last) -- todo: dir + if first and last then -- just to be sure + if first == last then + return first, last + end + while first and first ~= last do + local id = first.id + if id == glyph_code or id == disc_code then -- or id == rule_code + break + else + first = first.next + end + end + if not first then + return nil, nil + elseif first == last then + return first, last + end + while last and last ~= first do + local id = last.id + if id == glyph_code or id == disc_code then -- or id == rule_code + break + else + local prev = last.prev -- luatex < 0.70 has italic correction kern not prev'd + if prev then + last = last.prev + else + break + end + end + end + if not last then + return nil, nil + end + end + return first, last +end + +-- todo: order and maybe other dimensions + +local floor = math.floor + +local trace_ruled = false trackers.register("nodes.rules", function(v) trace_ruled = v end) +local report_ruled = logs.reporter("nodes","rules") + +local n_tostring = nodes.idstostring +local n_tosequence = nodes.tosequence + +local a_ruled = attributes.private('ruled') +local a_color = attributes.private('color') +local a_transparency = attributes.private('transparency') +local a_colorspace = attributes.private('colormodel') + +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after +local striprange = nodes.striprange +local list_dimensions = node.dimensions + +local hpack_nodes = node.hpack + +local fontdata = fonts.hashes.identifiers +local variables = interfaces.variables +local dimenfactor = fonts.helpers.dimenfactor +local splitdimen = number.splitdimen + +local nodecodes = nodes.nodecodes +local skipcodes = nodes.skipcodes +local whatcodes = nodes.whatcodes +local kerncodes = nodes.kerncodes + +local glyph_code = nodecodes.glyph +local disc_code = nodecodes.disc +local glue_code = nodecodes.glue +local penalty_code = nodecodes.penalty +local kern_code = nodecodes.kern +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local rule_code = nodecodes.rule +local whatsit_code = nodecodes.whatsit + +local userskip_code = skipcodes.userskip +local spaceskip_code = skipcodes.spaceskip +local xspaceskip_code = skipcodes.xspaceskip + +local dir_code = whatcodes.dir + +local kerning_code = kerncodes.kern + +local nodepool = nodes.pool + +local new_rule = nodepool.rule +local new_kern = nodepool.kern +local new_glue = nodepool.glue + +-- we can use this one elsewhere too +-- +-- todo: functions: word, sentence +-- +-- glyph rule unset whatsit glue margin_kern kern math disc + +local checkdir = true + +-- we assume {glyphruns} and no funny extra kerning, ok, maybe we need +-- a dummy character as start and end; anyway we only collect glyphs +-- +-- this one needs to take layers into account (i.e. we need a list of +-- critical attributes) + +-- omkeren class en level -> scheelt functie call in analyze + +-- todo: switching inside math + +local function processwords(attribute,data,flush,head,parent) -- we have hlistdir and local dir + local n = head + if n then + local f, l, a, d, i, class + local continue, done, strip, level = false, false, true, -1 + while n do + local id = n.id + if id == glyph_code or id == rule_code then + local aa = n[attribute] + if aa then + if aa == a then + if not f then -- ? + f = n + end + l = n + else + -- possible extensions: when in same class then keep spanning + local newlevel, newclass = floor(aa/1000), aa%1000 +--~ strip = not continue or level == 1 -- 0 + if f then + if class == newclass then -- and newlevel > level then + head, done = flush(head,f,l,d,level,parent,false), true + else + head, done = flush(head,f,l,d,level,parent,strip), true + end + end + f, l, a = n, n, aa + level, class = newlevel, newclass + d = data[class] + continue = d.continue == variables.yes + end + else + if f then + head, done = flush(head,f,l,d,level,parent,strip), true + end + f, l, a = nil, nil, nil + end + elseif f and (id == disc_code or (id == kern_code and n.subtype == kerning_code)) then + l = n + elseif id == hlist_code or id == vlist_code then + if f then + head, done = flush(head,f,l,d,level,parent,strip), true + f, l, a = nil, nil, nil + end + local list = n.list + if list then + n.list = processwords(attribute,data,flush,list,n) + end + elseif checkdir and id == whatsit_code and n.subtype == dir_code then -- only changes in dir, we assume proper boundaries + if f and a then + l = n + end + elseif f then + if continue then + if id == penalty_code then + l = n + elseif id == kern_code then + l = n + elseif id == glue_code then + -- catch \underbar{a} \underbar{a} (subtype test is needed) + local subtype = n.subtype + if continue and n[attribute] and + (subtype == userskip_code or subtype == spaceskip_code or subskip == xspaceskip_code) then + l = n + else + head, done = flush(head,f,l,d,level,parent,strip), true + f, l, a = nil, nil, nil + end + end + else + head, done = flush(head,f,l,d,level,parent,strip), true + f, l, a = nil, nil, nil + end + end + n = n.next + end + if f then + head, done = flush(head,f,l,d,level,parent,strip), true + end + return head, true -- todo: done + else + return head, false + end +end + +nodes.processwords = processwords + +-- + +nodes.rules = nodes.rules or { } +nodes.rules.data = nodes.rules.data or { } + +storage.register("nodes/rules/data", nodes.rules.data, "nodes.rules.data") + +local data = nodes.rules.data + +function nodes.rules.define(settings) + data[#data+1] = settings + context(#data) +end + +local a_viewerlayer = attributes.private("viewerlayer") + +local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but acceptable for this purpose +-- check for f and l + if f.id ~= glyph_code then + -- saveguard ... we need to deal with rules and so (math) + return head + end + local r, m + if strip then + if trace_ruled then + local before = n_tosequence(f,l,true) + f, l = striprange(f,l) + local after = n_tosequence(f,l,true) + report_ruled("range stripper, before %a, after %a",before,after) + else + f, l = striprange(f,l) + end + end + if not f then + return head + end + local w = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,f,l.next) + local method, offset, continue, dy, order, max = d.method, d.offset, d.continue, d.dy, d.order, d.max + local rulethickness, unit = d.rulethickness, d.unit + local ma, ca, ta = d.ma, d.ca, d.ta + local colorspace = (ma > 0 and ma) or f[a_colorspace] or 1 + local color = (ca > 0 and ca) or f[a_color] + local transparency = (ta > 0 and ta) or f[a_transparency] + local foreground = order == variables.foreground + + local e = dimenfactor(unit,fontdata[f.font]) -- what if no glyph node + + local rt = tonumber(rulethickness) + if rt then + rulethickness = e * rulethickness / 2 + else + local n, u = splitdimen(rulethickness) + if n and u then -- we need to intercept ex and em and % and ... + rulethickness = n * dimenfactor(u,fontdata[f.font]) / 2 + else + rulethickness = 1/5 + end + end + + if level > max then + level = max + end + if method == 0 then -- center + offset = 2*offset +-- m = (offset+(level-1)*dy+rulethickness)*e/2 + m = (offset+(level-1)*dy)*e/2 + rulethickness/2 + else + m = 0 + end + for i=1,level do +-- local ht = (offset+(i-1)*dy+rulethickness)*e - m +-- local dp = -(offset+(i-1)*dy-rulethickness)*e + m + local ht = (offset+(i-1)*dy)*e + rulethickness - m + local dp = -(offset+(i-1)*dy)*e + rulethickness + m + local r = new_rule(w,ht,dp) + local v = f[a_viewerlayer] + -- quick hack + if v then + r[a_viewerlayer] = v + end + -- + if color then + r[a_colorspace] = colorspace + r[a_color] = color + end + if transparency then + r[a_transparency] = transparency + end + local k = new_kern(-w) + if foreground then + insert_node_after(head,l,k) + insert_node_after(head,k,r) + l = r + else + head = insert_node_before(head,f,r) + insert_node_after(head,r,k) + end + if trace_ruled then + report_ruled("level %a, width %p, height %p, depth %p, nodes %a, text %a", + level,w,ht,dp,n_tostring(f,l),n_tosequence(f,l,true)) + end + end + return head +end + +local process = nodes.processwords + +nodes.rules.handler = function(head) return process(a_ruled,data,flush_ruled,head) end + +function nodes.rules.enable() + tasks.enableaction("shipouts","nodes.rules.handler") +end + +-- elsewhere: +-- +-- tasks.appendaction ("shipouts", "normalizers", "nodes.rules.handler") +-- tasks.disableaction("shipouts", "nodes.rules.handler") -- only kick in when used + +local trace_shifted = false trackers.register("nodes.shifting", function(v) trace_shifted = v end) + +local report_shifted = logs.reporter("nodes","shifting") + +local a_shifted = attributes.private('shifted') + +nodes.shifts = nodes.shifts or { } +nodes.shifts.data = nodes.shifts.data or { } + +storage.register("nodes/shifts/data", nodes.shifts.data, "nodes.shifts.data") + +local data = nodes.shifts.data + +function nodes.shifts.define(settings) + data[#data+1] = settings + context(#data) +end + +local function flush_shifted(head,first,last,data,level,parent,strip) -- not that fast but acceptable for this purpose + if true then + first, last = striprange(first,last) + end + local prev, next = first.prev, last.next + first.prev, last.next = nil, nil + local width, height, depth = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,first,next) + local list = hpack_nodes(first,width,"exactly") + if first == head then + head = list + end + if prev then + prev.next, list.prev = list, prev + end + if next then + next.prev, list.next = list, next + end + local raise = data.dy * dimenfactor(data.unit,fontdata[first.font]) + list.shift, list.height, list.depth = raise, height, depth + if trace_shifted then + report_shifted("width %p, nodes %a, text %a",width,n_tostring(first,last),n_tosequence(first,last,true)) + end + return head +end + +local process = nodes.processwords + +nodes.shifts.handler = function(head) return process(a_shifted,data,flush_shifted,head) end + +function nodes.shifts.enable() + tasks.enableaction("shipouts","nodes.shifts.handler") +end diff --git a/tex/context/base/node-ser.lua b/tex/context/base/node-ser.lua index b0a6e9952..f4ae1e2b2 100644 --- a/tex/context/base/node-ser.lua +++ b/tex/context/base/node-ser.lua @@ -1,286 +1,286 @@ -if not modules then modules = { } end modules ['node-ser'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- beware, some field names will change in a next releases --- of luatex; this is pretty old code that needs an overhaul - -local type, format, rep = type, string.format, string.rep -local concat, tohash, sortedkeys, printtable = table.concat, table.tohash, table.sortedkeys, table.print - -local allocate = utilities.storage.allocate - -local nodes, node = nodes, node - -local traverse = node.traverse -local is_node = node.is_node - -local nodecodes = nodes.nodecodes -local noadcodes = nodes.noadcodes -local nodefields = nodes.fields - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist - -local expand = allocate ( tohash { - "list", -- list_ptr & ins_ptr & adjust_ptr - "pre", -- - "post", -- - "spec", -- glue_ptr - "top_skip", -- - "attr", -- - "replace", -- nobreak - "components", -- lig_ptr - "box_left", -- - "box_right", -- - "glyph", -- margin_char - "leader", -- leader_ptr - "action", -- action_ptr - "value", -- user_defined nodes with subtype 'a' en 'n' - "head", -} ) - --- page_insert: "height", "last_ins_ptr", "best_ins_ptr" --- split_insert: "height", "last_ins_ptr", "best_ins_ptr", "broken_ptr", "broken_ins" - -local ignore = allocate ( tohash { - "page_insert", - "split_insert", - "ref_count", -} ) - -local dimension = allocate ( tohash { - "width", "height", "depth", "shift", - "stretch", "shrink", - "xoffset", "yoffset", - "surround", - "kern", - "box_left_width", "box_right_width" -} ) - --- flat: don't use next, but indexes --- verbose: also add type --- can be sped up - -nodes.dimensionfields = dimension -nodes.listablefields = expand -nodes.ignorablefields = ignore - --- not ok yet: - -local function astable(n,sparse) -- not yet ok - local f, t = nodefields(n), { } - for i=1,#f do - local v = f[i] - local d = n[v] - if d then - if ignore[v] or v == "id" then - -- skip - elseif expand[v] then -- or: type(n[v]) ~= "string" or type(n[v]) ~= "number" or type(n[v]) ~= "table" - t[v] = "pointer to list" - elseif sparse then - if (type(d) == "number" and d ~= 0) or (type(d) == "string" and d ~= "") then - t[v] = d - end - else - t[v] = d - end - end - end - t.type = nodecodes[n.id] - return t -end - -nodes.astable = astable - -setinspector(function(v) if is_node(v) then printtable(astable(v),tostring(v)) return true end end) - --- under construction: - -local function totable(n,flat,verbose,noattributes) - -- todo: no local function - local function to_table(n,flat,verbose,noattributes) -- no need to pass - local f = nodefields(n) - local tt = { } - for k=1,#f do - local v = f[k] - local nv = v and n[v] - if nv then - if ignore[v] then - -- skip - elseif noattributes and v == "attr" then - -- skip - elseif expand[v] then - if type(nv) == "number" or type(nv) == "string" then - tt[v] = nv - else - tt[v] = totable(nv,flat,verbose) - end - elseif type(nv) == "table" then - tt[v] = nv -- totable(nv,flat,verbose) -- data - else - tt[v] = nv - end - end - end - if verbose then - tt.type = nodecodes[tt.id] - end - return tt - end - if n then - if flat then - local t, tn = { }, 0 - while n do - tn = tn + 1 - t[tn] = to_table(n,flat,verbose,noattributes) - n = n.next - end - return t - else - local t = to_table(n) - if n.next then - t.next = totable(n.next,flat,verbose,noattributes) - end - return t - end - else - return { } - end -end - -nodes.totable = totable - -local function key(k) - return ((type(k) == "number") and "["..k.."]") or k -end - --- not ok yet; this will become a module - --- todo: adapt to nodecodes etc - -local function serialize(root,name,handle,depth,m,noattributes) - handle = handle or print - if depth then - depth = depth .. " " - handle(format("%s%s={",depth,key(name))) - else - depth = "" - local tname = type(name) - if tname == "string" then - if name == "return" then - handle("return {") - else - handle(name .. "={") - end - elseif tname == "number" then - handle("[" .. name .. "]={") - else - handle("t={") - end - end - if root then - local fld - if root.id then - fld = nodefields(root) -- we can cache these (todo) - else - fld = sortedkeys(root) - end - if type(root) == 'table' and root['type'] then -- userdata or table - handle(format("%s %s=%q,",depth,'type',root['type'])) - end - for f=1,#fld do - local k = fld[f] - if k == "ref_count" then - -- skip - elseif noattributes and k == "attr" then - -- skip - elseif k == "id" then - local v = root[k] - handle(format("%s id=%s,",depth,nodecodes[v] or noadcodes[v] or v)) - elseif k then - local v = root[k] - local t = type(v) - if t == "number" then - if v == 0 then - -- skip - else - handle(format("%s %s=%s,",depth,key(k),v)) - end - elseif t == "string" then - if v == "" then - -- skip - else - handle(format("%s %s=%q,",depth,key(k),v)) - end - elseif t == "boolean" then - handle(format("%s %s=%q,",depth,key(k),tostring(v))) - elseif v then -- userdata or table - serialize(v,k,handle,depth,m+1,noattributes) - end - end - end - if root['next'] then -- userdata or table - serialize(root['next'],'next',handle,depth,m+1,noattributes) - end - end - if m and m > 0 then - handle(format("%s},",depth)) - else - handle(format("%s}",depth)) - end -end - -function nodes.serialize(root,name,noattributes) - local t, n = { }, 0 - local function flush(s) - n = n + 1 - t[n] = s - end - serialize(root,name,flush,nil,0,noattributes) - return concat(t,"\n") -end - -function nodes.serializebox(n,flat,verbose,name) - return nodes.serialize(nodes.totable(tex.box[n],flat,verbose),name) -end - -function nodes.visualizebox(...) -- to be checked .. will move to module anyway - context.starttyping() - context.pushcatcodes("verbatim") - context(nodes.serializebox(...)) - context.stoptyping() - context.popcatcodes() -end - -function nodes.list(head,n) -- name might change to nodes.type -- to be checked .. will move to module anyway - if not n then - context.starttyping(true) - end - while head do - local id = head.id - context(rep(" ",n or 0) .. tostring(head) .. "\n") - if id == hlist_code or id == vlist_code then - nodes.list(head.list,(n or 0)+1) - end - head = head.next - end - if not n then - context.stoptyping(true) - end -end - -function nodes.print(head,n) - while head do - local id = head.id - logs.writer(string.formatters["%w%S"],n or 0,head) - if id == hlist_code or id == vlist_code then - nodes.print(head.list,(n or 0)+1) - end - head = head.next - end -end +if not modules then modules = { } end modules ['node-ser'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- beware, some field names will change in a next releases +-- of luatex; this is pretty old code that needs an overhaul + +local type, format, rep = type, string.format, string.rep +local concat, tohash, sortedkeys, printtable = table.concat, table.tohash, table.sortedkeys, table.print + +local allocate = utilities.storage.allocate + +local nodes, node = nodes, node + +local traverse = node.traverse +local is_node = node.is_node + +local nodecodes = nodes.nodecodes +local noadcodes = nodes.noadcodes +local nodefields = nodes.fields + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist + +local expand = allocate ( tohash { + "list", -- list_ptr & ins_ptr & adjust_ptr + "pre", -- + "post", -- + "spec", -- glue_ptr + "top_skip", -- + "attr", -- + "replace", -- nobreak + "components", -- lig_ptr + "box_left", -- + "box_right", -- + "glyph", -- margin_char + "leader", -- leader_ptr + "action", -- action_ptr + "value", -- user_defined nodes with subtype 'a' en 'n' + "head", +} ) + +-- page_insert: "height", "last_ins_ptr", "best_ins_ptr" +-- split_insert: "height", "last_ins_ptr", "best_ins_ptr", "broken_ptr", "broken_ins" + +local ignore = allocate ( tohash { + "page_insert", + "split_insert", + "ref_count", +} ) + +local dimension = allocate ( tohash { + "width", "height", "depth", "shift", + "stretch", "shrink", + "xoffset", "yoffset", + "surround", + "kern", + "box_left_width", "box_right_width" +} ) + +-- flat: don't use next, but indexes +-- verbose: also add type +-- can be sped up + +nodes.dimensionfields = dimension +nodes.listablefields = expand +nodes.ignorablefields = ignore + +-- not ok yet: + +local function astable(n,sparse) -- not yet ok + local f, t = nodefields(n), { } + for i=1,#f do + local v = f[i] + local d = n[v] + if d then + if ignore[v] or v == "id" then + -- skip + elseif expand[v] then -- or: type(n[v]) ~= "string" or type(n[v]) ~= "number" or type(n[v]) ~= "table" + t[v] = "pointer to list" + elseif sparse then + if (type(d) == "number" and d ~= 0) or (type(d) == "string" and d ~= "") then + t[v] = d + end + else + t[v] = d + end + end + end + t.type = nodecodes[n.id] + return t +end + +nodes.astable = astable + +setinspector(function(v) if is_node(v) then printtable(astable(v),tostring(v)) return true end end) + +-- under construction: + +local function totable(n,flat,verbose,noattributes) + -- todo: no local function + local function to_table(n,flat,verbose,noattributes) -- no need to pass + local f = nodefields(n) + local tt = { } + for k=1,#f do + local v = f[k] + local nv = v and n[v] + if nv then + if ignore[v] then + -- skip + elseif noattributes and v == "attr" then + -- skip + elseif expand[v] then + if type(nv) == "number" or type(nv) == "string" then + tt[v] = nv + else + tt[v] = totable(nv,flat,verbose) + end + elseif type(nv) == "table" then + tt[v] = nv -- totable(nv,flat,verbose) -- data + else + tt[v] = nv + end + end + end + if verbose then + tt.type = nodecodes[tt.id] + end + return tt + end + if n then + if flat then + local t, tn = { }, 0 + while n do + tn = tn + 1 + t[tn] = to_table(n,flat,verbose,noattributes) + n = n.next + end + return t + else + local t = to_table(n) + if n.next then + t.next = totable(n.next,flat,verbose,noattributes) + end + return t + end + else + return { } + end +end + +nodes.totable = totable + +local function key(k) + return ((type(k) == "number") and "["..k.."]") or k +end + +-- not ok yet; this will become a module + +-- todo: adapt to nodecodes etc + +local function serialize(root,name,handle,depth,m,noattributes) + handle = handle or print + if depth then + depth = depth .. " " + handle(format("%s%s={",depth,key(name))) + else + depth = "" + local tname = type(name) + if tname == "string" then + if name == "return" then + handle("return {") + else + handle(name .. "={") + end + elseif tname == "number" then + handle("[" .. name .. "]={") + else + handle("t={") + end + end + if root then + local fld + if root.id then + fld = nodefields(root) -- we can cache these (todo) + else + fld = sortedkeys(root) + end + if type(root) == 'table' and root['type'] then -- userdata or table + handle(format("%s %s=%q,",depth,'type',root['type'])) + end + for f=1,#fld do + local k = fld[f] + if k == "ref_count" then + -- skip + elseif noattributes and k == "attr" then + -- skip + elseif k == "id" then + local v = root[k] + handle(format("%s id=%s,",depth,nodecodes[v] or noadcodes[v] or v)) + elseif k then + local v = root[k] + local t = type(v) + if t == "number" then + if v == 0 then + -- skip + else + handle(format("%s %s=%s,",depth,key(k),v)) + end + elseif t == "string" then + if v == "" then + -- skip + else + handle(format("%s %s=%q,",depth,key(k),v)) + end + elseif t == "boolean" then + handle(format("%s %s=%q,",depth,key(k),tostring(v))) + elseif v then -- userdata or table + serialize(v,k,handle,depth,m+1,noattributes) + end + end + end + if root['next'] then -- userdata or table + serialize(root['next'],'next',handle,depth,m+1,noattributes) + end + end + if m and m > 0 then + handle(format("%s},",depth)) + else + handle(format("%s}",depth)) + end +end + +function nodes.serialize(root,name,noattributes) + local t, n = { }, 0 + local function flush(s) + n = n + 1 + t[n] = s + end + serialize(root,name,flush,nil,0,noattributes) + return concat(t,"\n") +end + +function nodes.serializebox(n,flat,verbose,name) + return nodes.serialize(nodes.totable(tex.box[n],flat,verbose),name) +end + +function nodes.visualizebox(...) -- to be checked .. will move to module anyway + context.starttyping() + context.pushcatcodes("verbatim") + context(nodes.serializebox(...)) + context.stoptyping() + context.popcatcodes() +end + +function nodes.list(head,n) -- name might change to nodes.type -- to be checked .. will move to module anyway + if not n then + context.starttyping(true) + end + while head do + local id = head.id + context(rep(" ",n or 0) .. tostring(head) .. "\n") + if id == hlist_code or id == vlist_code then + nodes.list(head.list,(n or 0)+1) + end + head = head.next + end + if not n then + context.stoptyping(true) + end +end + +function nodes.print(head,n) + while head do + local id = head.id + logs.writer(string.formatters["%w%S"],n or 0,head) + if id == hlist_code or id == vlist_code then + nodes.print(head.list,(n or 0)+1) + end + head = head.next + end +end diff --git a/tex/context/base/node-shp.lua b/tex/context/base/node-shp.lua index 8f7a411a7..42084a135 100644 --- a/tex/context/base/node-shp.lua +++ b/tex/context/base/node-shp.lua @@ -1,148 +1,148 @@ -if not modules then modules = { } end modules ['node-shp'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local nodes, node = nodes, node - -local next, type = next, type -local format = string.format -local concat, sortedpairs = table.concat, table.sortedpairs -local setmetatableindex = table.setmetatableindex - -local nodecodes = nodes.nodecodes -local tasks = nodes.tasks -local handlers = nodes.handlers - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local disc_code = nodecodes.disc -local mark_code = nodecodes.mark -local kern_code = nodecodes.kern -local glue_code = nodecodes.glue - -local texbox = tex.box - -local free_node = node.free -local remove_node = node.remove -local traverse_nodes = node.traverse - -local function cleanup(head) -- rough - local start = head - while start do - local id = start.id - if id == disc_code or (id == glue_code and not start.writable) or (id == kern_code and start.kern == 0) or id == mark_code then - head, start, tmp = remove_node(head,start) - free_node(tmp) - elseif id == hlist_code or id == vlist_code then - local sl = start.list - if sl then - start.list = cleanup(sl) - start = start.next - else - head, start, tmp = remove_node(head,start) - free_node(tmp) - end - else - start = start.next - end - end - return head -end - -directives.register("backend.cleanup", function() - tasks.enableaction("shipouts","nodes.handlers.cleanuppage") -end) - -function handlers.cleanuppage(head) - -- about 10% of the nodes make no sense for the backend - return cleanup(head), true -end - -local actions = tasks.actions("shipouts") -- no extra arguments - -function handlers.finalize(head) -- problem, attr loaded before node, todo ... - return actions(head) -end - --- handlers.finalize = actions - --- interface - -function commands.finalizebox(n) - actions(texbox[n]) -end - --- just in case we want to optimize lookups: - -local frequencies = { } - -nodes.tracers.frequencies = frequencies - -local data = { } -local done = false - -setmetatableindex(data,function(t,k) - local v = { } - setmetatableindex(v,function(t,k) - local v = { } - t[k] = v - setmetatableindex(v,function(t,k) - t[k] = 0 - return 0 - end) - return v - end) - t[k] = v - return v -end) - -local function count(head,data,subcategory) - -- no components, pre, post, replace .. can maybe an option .. but - -- we use this for optimization so it makes sense to look the the - -- main node only - for n in traverse_nodes(head) do - local id = n.id - local dn = data[nodecodes[n.id]] - dn[subcategory] = dn[subcategory] + 1 - if id == hlist_code or id == vlist_code then - count(n.list,data,subcategory) - end - end -end - -local function register(category,subcategory) - return function(head) - done = true - count(head,data[category],subcategory) - return head, false - end -end - -frequencies.register = register -frequencies.filename = nil - -trackers.register("nodes.frequencies",function(v) - if type(v) == "string" then - frequencies.filename = v - end - handlers.frequencies_shipouts_before = register("shipouts", "begin") - handlers.frequencies_shipouts_after = register("shipouts", "end") - handlers.frequencies_processors_before = register("processors", "begin") - handlers.frequencies_processors_after = register("processors", "end") - tasks.prependaction("shipouts", "before", "nodes.handlers.frequencies_shipouts_before") - tasks.appendaction ("shipouts", "after", "nodes.handlers.frequencies_shipouts_after") - tasks.prependaction("processors", "before", "nodes.handlers.frequencies_processors_before") - tasks.appendaction ("processors", "after", "nodes.handlers.frequencies_processors_after") -end) - -statistics.register("node frequencies", function() - if done then - local filename = frequencies.filename or (tex.jobname .. "-frequencies.lua") - io.savedata(filename,table.serialize(data,true)) - return format("saved in %q",filename) - end -end) +if not modules then modules = { } end modules ['node-shp'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local nodes, node = nodes, node + +local next, type = next, type +local format = string.format +local concat, sortedpairs = table.concat, table.sortedpairs +local setmetatableindex = table.setmetatableindex + +local nodecodes = nodes.nodecodes +local tasks = nodes.tasks +local handlers = nodes.handlers + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local disc_code = nodecodes.disc +local mark_code = nodecodes.mark +local kern_code = nodecodes.kern +local glue_code = nodecodes.glue + +local texbox = tex.box + +local free_node = node.free +local remove_node = node.remove +local traverse_nodes = node.traverse + +local function cleanup(head) -- rough + local start = head + while start do + local id = start.id + if id == disc_code or (id == glue_code and not start.writable) or (id == kern_code and start.kern == 0) or id == mark_code then + head, start, tmp = remove_node(head,start) + free_node(tmp) + elseif id == hlist_code or id == vlist_code then + local sl = start.list + if sl then + start.list = cleanup(sl) + start = start.next + else + head, start, tmp = remove_node(head,start) + free_node(tmp) + end + else + start = start.next + end + end + return head +end + +directives.register("backend.cleanup", function() + tasks.enableaction("shipouts","nodes.handlers.cleanuppage") +end) + +function handlers.cleanuppage(head) + -- about 10% of the nodes make no sense for the backend + return cleanup(head), true +end + +local actions = tasks.actions("shipouts") -- no extra arguments + +function handlers.finalize(head) -- problem, attr loaded before node, todo ... + return actions(head) +end + +-- handlers.finalize = actions + +-- interface + +function commands.finalizebox(n) + actions(texbox[n]) +end + +-- just in case we want to optimize lookups: + +local frequencies = { } + +nodes.tracers.frequencies = frequencies + +local data = { } +local done = false + +setmetatableindex(data,function(t,k) + local v = { } + setmetatableindex(v,function(t,k) + local v = { } + t[k] = v + setmetatableindex(v,function(t,k) + t[k] = 0 + return 0 + end) + return v + end) + t[k] = v + return v +end) + +local function count(head,data,subcategory) + -- no components, pre, post, replace .. can maybe an option .. but + -- we use this for optimization so it makes sense to look the the + -- main node only + for n in traverse_nodes(head) do + local id = n.id + local dn = data[nodecodes[n.id]] + dn[subcategory] = dn[subcategory] + 1 + if id == hlist_code or id == vlist_code then + count(n.list,data,subcategory) + end + end +end + +local function register(category,subcategory) + return function(head) + done = true + count(head,data[category],subcategory) + return head, false + end +end + +frequencies.register = register +frequencies.filename = nil + +trackers.register("nodes.frequencies",function(v) + if type(v) == "string" then + frequencies.filename = v + end + handlers.frequencies_shipouts_before = register("shipouts", "begin") + handlers.frequencies_shipouts_after = register("shipouts", "end") + handlers.frequencies_processors_before = register("processors", "begin") + handlers.frequencies_processors_after = register("processors", "end") + tasks.prependaction("shipouts", "before", "nodes.handlers.frequencies_shipouts_before") + tasks.appendaction ("shipouts", "after", "nodes.handlers.frequencies_shipouts_after") + tasks.prependaction("processors", "before", "nodes.handlers.frequencies_processors_before") + tasks.appendaction ("processors", "after", "nodes.handlers.frequencies_processors_after") +end) + +statistics.register("node frequencies", function() + if done then + local filename = frequencies.filename or (tex.jobname .. "-frequencies.lua") + io.savedata(filename,table.serialize(data,true)) + return format("saved in %q",filename) + end +end) diff --git a/tex/context/base/node-snp.lua b/tex/context/base/node-snp.lua index 31c7771ac..3a764e90a 100644 --- a/tex/context/base/node-snp.lua +++ b/tex/context/base/node-snp.lua @@ -1,66 +1,66 @@ -if not modules then modules = { } end modules ['node-snp'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if not nodes then - nodes = { } -- also loaded in mtx-timing -end - -local snapshots = { } -nodes.snapshots = snapshots - -local nodeusage = nodes.pool and nodes.pool.usage -local clock = os.gettimeofday or os.clock -- should go in environment -local lasttime = clock() -local samples = { } - -local parameters = { - "cs_count", - "dyn_used", - "elapsed_time", - "luabytecode_bytes", - "luastate_bytes", - "max_buf_stack", - "obj_ptr", - "pdf_mem_ptr", - "pdf_mem_size", - "pdf_os_cntr", --- "pool_ptr", -- obsolete - "str_ptr", -} - -function snapshots.takesample(comment) - if nodeusage then - local c = clock() - local t = { - elapsed_time = c - lasttime, - node_memory = nodeusage(), - comment = comment, - } - for i=1,#parameters do - local parameter = parameters[i] - local ps = status[parameter] - if ps then - t[parameter] = ps - end - end - samples[#samples+1] = t - lasttime = c - end -end - -function snapshots.getsamples() - return samples -- one return value ! -end - -function snapshots.resetsamples() - samples = { } -end - -function snapshots.getparameters() - return parameters -end +if not modules then modules = { } end modules ['node-snp'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if not nodes then + nodes = { } -- also loaded in mtx-timing +end + +local snapshots = { } +nodes.snapshots = snapshots + +local nodeusage = nodes.pool and nodes.pool.usage +local clock = os.gettimeofday or os.clock -- should go in environment +local lasttime = clock() +local samples = { } + +local parameters = { + "cs_count", + "dyn_used", + "elapsed_time", + "luabytecode_bytes", + "luastate_bytes", + "max_buf_stack", + "obj_ptr", + "pdf_mem_ptr", + "pdf_mem_size", + "pdf_os_cntr", +-- "pool_ptr", -- obsolete + "str_ptr", +} + +function snapshots.takesample(comment) + if nodeusage then + local c = clock() + local t = { + elapsed_time = c - lasttime, + node_memory = nodeusage(), + comment = comment, + } + for i=1,#parameters do + local parameter = parameters[i] + local ps = status[parameter] + if ps then + t[parameter] = ps + end + end + samples[#samples+1] = t + lasttime = c + end +end + +function snapshots.getsamples() + return samples -- one return value ! +end + +function snapshots.resetsamples() + samples = { } +end + +function snapshots.getparameters() + return parameters +end diff --git a/tex/context/base/node-tex.lua b/tex/context/base/node-tex.lua index 2170e0603..9393eaf79 100644 --- a/tex/context/base/node-tex.lua +++ b/tex/context/base/node-tex.lua @@ -1,41 +1,41 @@ -if not modules then modules = { } end modules ['node-tex'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format - -builders = builders or { } -builders.kernel = builders.kernel or { } -local kernel = builders.kernel - -local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming -local hyphenate, ligaturing, kerning = lang.hyphenate, node.ligaturing, node.kerning - -function kernel.hyphenation(head) - -- starttiming(kernel) - local done = hyphenate(head) - -- stoptiming(kernel) - return head, done -end - -function kernel.ligaturing(head) - -- starttiming(kernel) - local head, tail, done = ligaturing(head) -- todo: check what is returned - -- stoptiming(kernel) - return head, done -end - -function kernel.kerning(head) - -- starttiming(kernel) - local head, tail, done = kerning(head) -- todo: check what is returned - -- stoptiming(kernel) - return head, done -end - -callbacks.register('hyphenate' , false, "normal hyphenation routine, called elsewhere") -callbacks.register('ligaturing', false, "normal ligaturing routine, called elsewhere") -callbacks.register('kerning' , false, "normal kerning routine, called elsewhere") +if not modules then modules = { } end modules ['node-tex'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format + +builders = builders or { } +builders.kernel = builders.kernel or { } +local kernel = builders.kernel + +local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming +local hyphenate, ligaturing, kerning = lang.hyphenate, node.ligaturing, node.kerning + +function kernel.hyphenation(head) + -- starttiming(kernel) + local done = hyphenate(head) + -- stoptiming(kernel) + return head, done +end + +function kernel.ligaturing(head) + -- starttiming(kernel) + local head, tail, done = ligaturing(head) -- todo: check what is returned + -- stoptiming(kernel) + return head, done +end + +function kernel.kerning(head) + -- starttiming(kernel) + local head, tail, done = kerning(head) -- todo: check what is returned + -- stoptiming(kernel) + return head, done +end + +callbacks.register('hyphenate' , false, "normal hyphenation routine, called elsewhere") +callbacks.register('ligaturing', false, "normal ligaturing routine, called elsewhere") +callbacks.register('kerning' , false, "normal kerning routine, called elsewhere") diff --git a/tex/context/base/node-tra.lua b/tex/context/base/node-tra.lua index 916b2143d..f194239bb 100644 --- a/tex/context/base/node-tra.lua +++ b/tex/context/base/node-tra.lua @@ -1,529 +1,529 @@ -if not modules then modules = { } end modules ['node-tra'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

    This is rather experimental. We need more control and some of this -might become a runtime module instead. This module will be cleaned up!

    ---ldx]]-- - -local utfchar = utf.char -local format, match, gmatch, concat, rep = string.format, string.match, string.gmatch, table.concat, string.rep -local lpegmatch = lpeg.match -local clock = os.gettimeofday or os.clock -- should go in environment - -local report_nodes = logs.reporter("nodes","tracing") - -nodes = nodes or { } - -local nodes, node, context = nodes, node, context - -local tracers = nodes.tracers or { } -nodes.tracers = tracers - -local tasks = nodes.tasks or { } -nodes.tasks = tasks - -local handlers = nodes.handlers or {} -nodes.handlers = handlers - -local injections = nodes.injections or { } -nodes.injections = injections - -local traverse_nodes = node.traverse -local traverse_by_id = node.traverse_id -local count_nodes = nodes.count - -local nodecodes = nodes.nodecodes -local whatcodes = nodes.whatcodes -local skipcodes = nodes.skipcodes -local fillcodes = nodes.fillcodes - -local glyph_code = nodecodes.glyph -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local disc_code = nodecodes.disc -local glue_code = nodecodes.glue -local kern_code = nodecodes.kern -local rule_code = nodecodes.rule -local whatsit_code = nodecodes.whatsit -local spec_code = nodecodes.glue_spec - -local localpar_code = whatcodes.localpar -local dir_code = whatcodes.dir - -local nodepool = nodes.pool - -local dimenfactors = number.dimenfactors -local formatters = string.formatters - --- this will be reorganized: - -function nodes.showlist(head, message) - if message then - report_nodes(message) - end - for n in traverse_nodes(head) do - report_nodes(tostring(n)) - end -end - -function nodes.handlers.checkglyphs(head,message) - local t = { } - for g in traverse_by_id(glyph_code,head) do - t[#t+1] = formatters["%U:%s"](g.char,g.subtype) - end - if #t > 0 then - if message and message ~= "" then - report_nodes("%s, %s glyphs: % t",message,#t,t) - else - report_nodes("%s glyphs: % t",#t,t) - end - end - return false -end - -function nodes.handlers.checkforleaks(sparse) - local l = { } - local q = node.usedlist() - for p in traverse(q) do - local s = table.serialize(nodes.astable(p,sparse),nodecodes[p.id]) - l[s] = (l[s] or 0) + 1 - end - node.flush_list(q) - for k, v in next, l do - write_nl(formatters["%s * %s"](v,k)) - end -end - -local f_sequence = formatters["U+%04X:%s"] - -local function tosequence(start,stop,compact) - if start then - local t = { } - while start do - local id = start.id - if id == glyph_code then - local c = start.char - if compact then - if start.components then - t[#t+1] = tosequence(start.components,nil,compact) - else - t[#t+1] = utfchar(c) - end - else - t[#t+1] = f_sequence(c,utfchar(c)) - end - elseif id == whatsit_code and start.subtype == localpar_code or start.subtype == dir_code then - t[#t+1] = "[" .. start.dir .. "]" - elseif id == rule_code then - if compact then - t[#t+1] = "|" - else - t[#t+1] = nodecodes[id] - end - else - if compact then - t[#t+1] = "[]" - else - t[#t+1] = nodecodes[id] - end - end - if start == stop then - break - else - start = start.next - end - end - if compact then - return concat(t) - else - return concat(t," ") - end - else - return "[empty]" - end -end - -nodes.tosequence = tosequence - -function nodes.report(t,done) - report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(t)) -end - -function nodes.packlist(head) - local t = { } - for n in traverse(head) do - t[#t+1] = tostring(n) - end - return t -end - -function nodes.idstostring(head,tail) - local t, last_id, last_n = { }, nil, 0 - for n in traverse_nodes(head,tail) do -- hm, does not stop at tail - local id = n.id - if not last_id then - last_id, last_n = id, 1 - elseif last_id == id then - last_n = last_n + 1 - else - if last_n > 1 then - t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?") - else - t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?") - end - last_id, last_n = id, 1 - end - if n == tail then - break - end - end - if not last_id then - t[#t+1] = "no nodes" - elseif last_n > 1 then - t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?") - else - t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?") - end - return concat(t," ") -end - --- function nodes.xidstostring(head,tail) -- only for special tracing of backlinks --- local n = head --- while n.next do --- n = n.next --- end --- local t, last_id, last_n = { }, nil, 0 --- while n do --- local id = n.id --- if not last_id then --- last_id, last_n = id, 1 --- elseif last_id == id then --- last_n = last_n + 1 --- else --- if last_n > 1 then --- t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?") --- else --- t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?") --- end --- last_id, last_n = id, 1 --- end --- if n == head then --- break --- end --- n = n.prev --- end --- if not last_id then --- t[#t+1] = "no nodes" --- elseif last_n > 1 then --- t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?") --- else --- t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?") --- end --- return table.concat(table.reversed(t)," ") --- end - -local function showsimplelist(h,depth,n) - while h do - write_nl(rep(" ",n) .. tostring(h)) - if not depth or n < depth then - local id = h.id - if id == hlist_code or id == vlist_code then - showsimplelist(h.list,depth,n+1) - end - end - h = h.next - end -end - ---~ \startluacode ---~ callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end) ---~ \stopluacode ---~ \vbox{b\footnote{n}a} ---~ \startluacode ---~ callback.register('buildpage_filter',nil) ---~ \stopluacode - -nodes.showsimplelist = function(h,depth) showsimplelist(h,depth,0) end - -local function listtoutf(h,joiner,textonly,last) - local joiner = (joiner == true and utfchar(0x200C)) or joiner -- zwnj - local w = { } - while h do - local id = h.id - if id == glyph_code then -- always true - w[#w+1] = utfchar(h.char) - if joiner then - w[#w+1] = joiner - end - elseif id == disc_code then - local pre, rep, pos = h.pre, h.replace, h.post - w[#w+1] = formatters["[%s|%s|%s]"] ( - pre and listtoutf(pre,joiner,textonly) or "", - rep and listtoutf(rep,joiner,textonly) or "", - mid and listtoutf(mid,joiner,textonly) or "" - ) - elseif textonly then - if id == glue_code and h.spec and h.spec.width > 0 then - w[#w+1] = " " - end - else - w[#w+1] = "[-]" - end - if h == last then - break - else - h = h.next - end - end - return concat(w) -end - -nodes.listtoutf = listtoutf - -local what = { [0] = "unknown", "line", "box", "indent", "row", "cell" } - -local function showboxes(n,symbol,depth) - depth, symbol = depth or 0, symbol or "." - for n in traverse_nodes(n) do - local id = n.id - if id == hlist_code or id == vlist_code then - local s = n.subtype - report_nodes(rep(symbol,depth) .. what[s] or s) - showboxes(n.list,symbol,depth+1) - end - end -end - -nodes.showboxes = showboxes - -local ptfactor = dimenfactors.pt -local bpfactor = dimenfactors.bp -local stripper = lpeg.patterns.stripzeros - --- start redefinition --- --- -- if fmt then --- -- return formatters[fmt](n*dimenfactors[unit],unit) --- -- else --- -- return match(formatters["%.20f"](n*dimenfactors[unit]),"(.-0?)0*$") .. unit --- -- end --- --- redefined: - -local dimenfactors = number.dimenfactors - -local function numbertodimen(d,unit,fmt,strip) - if not d then - local str = formatters[fmt](0,unit) - return strip and lpegmatch(stripper,str) or str - end - local t = type(d) - if t == 'string' then - return d - end - if unit == true then - unit = "pt" - fmt = "%0.5f%s" - else - unit = unit or 'pt' - if not fmt then - fmt = "%s%s" - elseif fmt == true then - fmt = "%0.5f%s" - end - end - if t == "number" then - local str = formatters[fmt](d*dimenfactors[unit],unit) - return strip and lpegmatch(stripper,str) or str - end - local id = node.id - if id == kern_code then - local str = formatters[fmt](d.width*dimenfactors[unit],unit) - return strip and lpegmatch(stripper,str) or str - end - if id == glue_code then - d = d.spec - end - if not d or not d.id == spec_code then - local str = formatters[fmt](0,unit) - return strip and lpegmatch(stripper,str) or str - end - local width = d.width - local plus = d.stretch_order - local minus = d.shrink_order - local stretch = d.stretch - local shrink = d.shrink - if plus ~= 0 then - plus = " plus " .. stretch/65536 .. fillcodes[plus] - elseif stretch ~= 0 then - plus = formatters[fmt](stretch*dimenfactors[unit],unit) - plus = " plus " .. (strip and lpegmatch(stripper,plus) or plus) - else - plus = "" - end - if minus ~= 0 then - minus = " minus " .. shrink/65536 .. fillcodes[minus] - elseif shrink ~= 0 then - minus = formatters[fmt](shrink*dimenfactors[unit],unit) - minus = " minus " .. (strip and lpegmatch(stripper,minus) or minus) - else - minus = "" - end - local str = formatters[fmt](d.width*dimenfactors[unit],unit) - return (strip and lpegmatch(stripper,str) or str) .. plus .. minus -end - -number.todimen = numbertodimen - -function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end -function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end -function number.tocentimeters (n,fmt) return numbertodimen(n,"cm",fmt) end -function number.tomillimeters (n,fmt) return numbertodimen(n,"mm",fmt) end -function number.toscaledpoints(n,fmt) return numbertodimen(n,"sp",fmt) end -function number.toscaledpoints(n) return n .. "sp" end -function number.tobasepoints (n,fmt) return numbertodimen(n,"bp",fmt) end -function number.topicas (n,fmt) return numbertodimen(n "pc",fmt) end -function number.todidots (n,fmt) return numbertodimen(n,"dd",fmt) end -function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end -function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end -function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end - --- stop redefinition - -local points = function(n) - if not n or n == 0 then - return "0pt" - elseif type(n) == "number" then - return lpegmatch(stripper,format("%.5fpt",n*ptfactor)) -- faster than formatter - else - return numbertodimen(n,"pt",true,true) -- also deals with nodes - end -end - -local basepoints = function(n) - if not n or n == 0 then - return "0bp" - elseif type(n) == "number" then - return lpegmatch(stripper,format("%.5fbp",n*bpfactor)) -- faster than formatter - else - return numbertodimen(n,"bp",true,true) -- also deals with nodes - end -end - -local pts = function(n) - if not n or n == 0 then - return "0pt" - elseif type(n) == "number" then - return format("%.5fpt",n*ptfactor) -- faster than formatter - else - return numbertodimen(n,"pt",true) -- also deals with nodes - end -end - -local nopts = function(n) - if not n or n == 0 then - return "0" - else - return format("%.5f",n*ptfactor) -- faster than formatter - end -end - -number.points = points -number.basepoints = basepoints -number.pts = pts -number.nopts = nopts - -local colors = { } -tracers.colors = colors - -local unsetvalue = attributes.unsetvalue - -local a_color = attributes.private('color') -local a_colormodel = attributes.private('colormodel') -local m_color = attributes.list[a_color] or { } - -function colors.set(n,c,s) - local mc = m_color[c] - if not mc then - n[a_color] = unsetvalue - else - if not n[a_colormodel] then - n[a_colormodel] = s or 1 - end - n[a_color] = mc - end - return n -end - -function colors.setlist(n,c,s) - local f = n - while n do - local mc = m_color[c] - if not mc then - n[a_color] = unsetvalue - else - if not n[a_colormodel] then - n[a_colormodel] = s or 1 - end - n[a_color] = mc - end - n = n.next - end - return f -end - -function colors.reset(n) - n[a_color] = unsetvalue - return n -end - --- maybe - -local transparencies = { } -tracers.transparencies = transparencies - -local a_transparency = attributes.private('transparency') -local m_transparency = attributes.list[a_transparency] or { } - -function transparencies.set(n,t) - local mt = m_transparency[t] - if not mt then - n[a_transparency] = unsetvalue - else - n[a_transparency] = mt - end - return n -end - -function transparencies.setlist(n,c,s) - local f = n - while n do - local mt = m_transparency[c] - if not mt then - n[a_transparency] = unsetvalue - else - n[a_transparency] = mt - end - n = n.next - end - return f -end - -function transparencies.reset(n) - n[a_transparency] = unsetvalue - return n -end - --- for the moment here - -nodes.visualizers = { } - -function nodes.visualizers.handler(head) - return head, false -end +if not modules then modules = { } end modules ['node-tra'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

    This is rather experimental. We need more control and some of this +might become a runtime module instead. This module will be cleaned up!

    +--ldx]]-- + +local utfchar = utf.char +local format, match, gmatch, concat, rep = string.format, string.match, string.gmatch, table.concat, string.rep +local lpegmatch = lpeg.match +local clock = os.gettimeofday or os.clock -- should go in environment + +local report_nodes = logs.reporter("nodes","tracing") + +nodes = nodes or { } + +local nodes, node, context = nodes, node, context + +local tracers = nodes.tracers or { } +nodes.tracers = tracers + +local tasks = nodes.tasks or { } +nodes.tasks = tasks + +local handlers = nodes.handlers or {} +nodes.handlers = handlers + +local injections = nodes.injections or { } +nodes.injections = injections + +local traverse_nodes = node.traverse +local traverse_by_id = node.traverse_id +local count_nodes = nodes.count + +local nodecodes = nodes.nodecodes +local whatcodes = nodes.whatcodes +local skipcodes = nodes.skipcodes +local fillcodes = nodes.fillcodes + +local glyph_code = nodecodes.glyph +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local disc_code = nodecodes.disc +local glue_code = nodecodes.glue +local kern_code = nodecodes.kern +local rule_code = nodecodes.rule +local whatsit_code = nodecodes.whatsit +local spec_code = nodecodes.glue_spec + +local localpar_code = whatcodes.localpar +local dir_code = whatcodes.dir + +local nodepool = nodes.pool + +local dimenfactors = number.dimenfactors +local formatters = string.formatters + +-- this will be reorganized: + +function nodes.showlist(head, message) + if message then + report_nodes(message) + end + for n in traverse_nodes(head) do + report_nodes(tostring(n)) + end +end + +function nodes.handlers.checkglyphs(head,message) + local t = { } + for g in traverse_by_id(glyph_code,head) do + t[#t+1] = formatters["%U:%s"](g.char,g.subtype) + end + if #t > 0 then + if message and message ~= "" then + report_nodes("%s, %s glyphs: % t",message,#t,t) + else + report_nodes("%s glyphs: % t",#t,t) + end + end + return false +end + +function nodes.handlers.checkforleaks(sparse) + local l = { } + local q = node.usedlist() + for p in traverse(q) do + local s = table.serialize(nodes.astable(p,sparse),nodecodes[p.id]) + l[s] = (l[s] or 0) + 1 + end + node.flush_list(q) + for k, v in next, l do + write_nl(formatters["%s * %s"](v,k)) + end +end + +local f_sequence = formatters["U+%04X:%s"] + +local function tosequence(start,stop,compact) + if start then + local t = { } + while start do + local id = start.id + if id == glyph_code then + local c = start.char + if compact then + if start.components then + t[#t+1] = tosequence(start.components,nil,compact) + else + t[#t+1] = utfchar(c) + end + else + t[#t+1] = f_sequence(c,utfchar(c)) + end + elseif id == whatsit_code and start.subtype == localpar_code or start.subtype == dir_code then + t[#t+1] = "[" .. start.dir .. "]" + elseif id == rule_code then + if compact then + t[#t+1] = "|" + else + t[#t+1] = nodecodes[id] + end + else + if compact then + t[#t+1] = "[]" + else + t[#t+1] = nodecodes[id] + end + end + if start == stop then + break + else + start = start.next + end + end + if compact then + return concat(t) + else + return concat(t," ") + end + else + return "[empty]" + end +end + +nodes.tosequence = tosequence + +function nodes.report(t,done) + report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(t)) +end + +function nodes.packlist(head) + local t = { } + for n in traverse(head) do + t[#t+1] = tostring(n) + end + return t +end + +function nodes.idstostring(head,tail) + local t, last_id, last_n = { }, nil, 0 + for n in traverse_nodes(head,tail) do -- hm, does not stop at tail + local id = n.id + if not last_id then + last_id, last_n = id, 1 + elseif last_id == id then + last_n = last_n + 1 + else + if last_n > 1 then + t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?") + else + t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?") + end + last_id, last_n = id, 1 + end + if n == tail then + break + end + end + if not last_id then + t[#t+1] = "no nodes" + elseif last_n > 1 then + t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?") + else + t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?") + end + return concat(t," ") +end + +-- function nodes.xidstostring(head,tail) -- only for special tracing of backlinks +-- local n = head +-- while n.next do +-- n = n.next +-- end +-- local t, last_id, last_n = { }, nil, 0 +-- while n do +-- local id = n.id +-- if not last_id then +-- last_id, last_n = id, 1 +-- elseif last_id == id then +-- last_n = last_n + 1 +-- else +-- if last_n > 1 then +-- t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?") +-- else +-- t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?") +-- end +-- last_id, last_n = id, 1 +-- end +-- if n == head then +-- break +-- end +-- n = n.prev +-- end +-- if not last_id then +-- t[#t+1] = "no nodes" +-- elseif last_n > 1 then +-- t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?") +-- else +-- t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?") +-- end +-- return table.concat(table.reversed(t)," ") +-- end + +local function showsimplelist(h,depth,n) + while h do + write_nl(rep(" ",n) .. tostring(h)) + if not depth or n < depth then + local id = h.id + if id == hlist_code or id == vlist_code then + showsimplelist(h.list,depth,n+1) + end + end + h = h.next + end +end + +--~ \startluacode +--~ callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end) +--~ \stopluacode +--~ \vbox{b\footnote{n}a} +--~ \startluacode +--~ callback.register('buildpage_filter',nil) +--~ \stopluacode + +nodes.showsimplelist = function(h,depth) showsimplelist(h,depth,0) end + +local function listtoutf(h,joiner,textonly,last) + local joiner = (joiner == true and utfchar(0x200C)) or joiner -- zwnj + local w = { } + while h do + local id = h.id + if id == glyph_code then -- always true + w[#w+1] = utfchar(h.char) + if joiner then + w[#w+1] = joiner + end + elseif id == disc_code then + local pre, rep, pos = h.pre, h.replace, h.post + w[#w+1] = formatters["[%s|%s|%s]"] ( + pre and listtoutf(pre,joiner,textonly) or "", + rep and listtoutf(rep,joiner,textonly) or "", + mid and listtoutf(mid,joiner,textonly) or "" + ) + elseif textonly then + if id == glue_code and h.spec and h.spec.width > 0 then + w[#w+1] = " " + end + else + w[#w+1] = "[-]" + end + if h == last then + break + else + h = h.next + end + end + return concat(w) +end + +nodes.listtoutf = listtoutf + +local what = { [0] = "unknown", "line", "box", "indent", "row", "cell" } + +local function showboxes(n,symbol,depth) + depth, symbol = depth or 0, symbol or "." + for n in traverse_nodes(n) do + local id = n.id + if id == hlist_code or id == vlist_code then + local s = n.subtype + report_nodes(rep(symbol,depth) .. what[s] or s) + showboxes(n.list,symbol,depth+1) + end + end +end + +nodes.showboxes = showboxes + +local ptfactor = dimenfactors.pt +local bpfactor = dimenfactors.bp +local stripper = lpeg.patterns.stripzeros + +-- start redefinition +-- +-- -- if fmt then +-- -- return formatters[fmt](n*dimenfactors[unit],unit) +-- -- else +-- -- return match(formatters["%.20f"](n*dimenfactors[unit]),"(.-0?)0*$") .. unit +-- -- end +-- +-- redefined: + +local dimenfactors = number.dimenfactors + +local function numbertodimen(d,unit,fmt,strip) + if not d then + local str = formatters[fmt](0,unit) + return strip and lpegmatch(stripper,str) or str + end + local t = type(d) + if t == 'string' then + return d + end + if unit == true then + unit = "pt" + fmt = "%0.5f%s" + else + unit = unit or 'pt' + if not fmt then + fmt = "%s%s" + elseif fmt == true then + fmt = "%0.5f%s" + end + end + if t == "number" then + local str = formatters[fmt](d*dimenfactors[unit],unit) + return strip and lpegmatch(stripper,str) or str + end + local id = node.id + if id == kern_code then + local str = formatters[fmt](d.width*dimenfactors[unit],unit) + return strip and lpegmatch(stripper,str) or str + end + if id == glue_code then + d = d.spec + end + if not d or not d.id == spec_code then + local str = formatters[fmt](0,unit) + return strip and lpegmatch(stripper,str) or str + end + local width = d.width + local plus = d.stretch_order + local minus = d.shrink_order + local stretch = d.stretch + local shrink = d.shrink + if plus ~= 0 then + plus = " plus " .. stretch/65536 .. fillcodes[plus] + elseif stretch ~= 0 then + plus = formatters[fmt](stretch*dimenfactors[unit],unit) + plus = " plus " .. (strip and lpegmatch(stripper,plus) or plus) + else + plus = "" + end + if minus ~= 0 then + minus = " minus " .. shrink/65536 .. fillcodes[minus] + elseif shrink ~= 0 then + minus = formatters[fmt](shrink*dimenfactors[unit],unit) + minus = " minus " .. (strip and lpegmatch(stripper,minus) or minus) + else + minus = "" + end + local str = formatters[fmt](d.width*dimenfactors[unit],unit) + return (strip and lpegmatch(stripper,str) or str) .. plus .. minus +end + +number.todimen = numbertodimen + +function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end +function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end +function number.tocentimeters (n,fmt) return numbertodimen(n,"cm",fmt) end +function number.tomillimeters (n,fmt) return numbertodimen(n,"mm",fmt) end +function number.toscaledpoints(n,fmt) return numbertodimen(n,"sp",fmt) end +function number.toscaledpoints(n) return n .. "sp" end +function number.tobasepoints (n,fmt) return numbertodimen(n,"bp",fmt) end +function number.topicas (n,fmt) return numbertodimen(n "pc",fmt) end +function number.todidots (n,fmt) return numbertodimen(n,"dd",fmt) end +function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end +function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end +function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end + +-- stop redefinition + +local points = function(n) + if not n or n == 0 then + return "0pt" + elseif type(n) == "number" then + return lpegmatch(stripper,format("%.5fpt",n*ptfactor)) -- faster than formatter + else + return numbertodimen(n,"pt",true,true) -- also deals with nodes + end +end + +local basepoints = function(n) + if not n or n == 0 then + return "0bp" + elseif type(n) == "number" then + return lpegmatch(stripper,format("%.5fbp",n*bpfactor)) -- faster than formatter + else + return numbertodimen(n,"bp",true,true) -- also deals with nodes + end +end + +local pts = function(n) + if not n or n == 0 then + return "0pt" + elseif type(n) == "number" then + return format("%.5fpt",n*ptfactor) -- faster than formatter + else + return numbertodimen(n,"pt",true) -- also deals with nodes + end +end + +local nopts = function(n) + if not n or n == 0 then + return "0" + else + return format("%.5f",n*ptfactor) -- faster than formatter + end +end + +number.points = points +number.basepoints = basepoints +number.pts = pts +number.nopts = nopts + +local colors = { } +tracers.colors = colors + +local unsetvalue = attributes.unsetvalue + +local a_color = attributes.private('color') +local a_colormodel = attributes.private('colormodel') +local m_color = attributes.list[a_color] or { } + +function colors.set(n,c,s) + local mc = m_color[c] + if not mc then + n[a_color] = unsetvalue + else + if not n[a_colormodel] then + n[a_colormodel] = s or 1 + end + n[a_color] = mc + end + return n +end + +function colors.setlist(n,c,s) + local f = n + while n do + local mc = m_color[c] + if not mc then + n[a_color] = unsetvalue + else + if not n[a_colormodel] then + n[a_colormodel] = s or 1 + end + n[a_color] = mc + end + n = n.next + end + return f +end + +function colors.reset(n) + n[a_color] = unsetvalue + return n +end + +-- maybe + +local transparencies = { } +tracers.transparencies = transparencies + +local a_transparency = attributes.private('transparency') +local m_transparency = attributes.list[a_transparency] or { } + +function transparencies.set(n,t) + local mt = m_transparency[t] + if not mt then + n[a_transparency] = unsetvalue + else + n[a_transparency] = mt + end + return n +end + +function transparencies.setlist(n,c,s) + local f = n + while n do + local mt = m_transparency[c] + if not mt then + n[a_transparency] = unsetvalue + else + n[a_transparency] = mt + end + n = n.next + end + return f +end + +function transparencies.reset(n) + n[a_transparency] = unsetvalue + return n +end + +-- for the moment here + +nodes.visualizers = { } + +function nodes.visualizers.handler(head) + return head, false +end diff --git a/tex/context/base/node-tsk.lua b/tex/context/base/node-tsk.lua index 596ac765a..d2686d4d8 100644 --- a/tex/context/base/node-tsk.lua +++ b/tex/context/base/node-tsk.lua @@ -1,402 +1,402 @@ -if not modules then modules = { } end modules ['node-tsk'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This might move to task-* and become less code as in sequencers --- we already have dirty flags as well. On the other hand, nodes are --- rather specialized and here we focus on node related tasks. - -local format = string.format - -local trace_tasks = false trackers.register("tasks.creation", function(v) trace_tasks = v end) - -local report_tasks = logs.reporter("tasks") - -local allocate = utilities.storage.allocate - -local nodes = nodes - -nodes.tasks = nodes.tasks or { } -local tasks = nodes.tasks - -local tasksdata = { } -- no longer public - -local sequencers = utilities.sequencers -local compile = sequencers.compile -local nodeprocessor = sequencers.nodeprocessor - -local frozengroups = "no" - -function tasks.freeze(kind) - frozengroups = kind or "tolerant" -- todo: hook into jobname -end - -function tasks.new(specification) -- was: name,arguments,list - local name = specification.name - local arguments = specification.arguments or 0 - local sequence = specification.sequence - if name and sequence then - local tasklist = sequencers.new { - -- we can move more to the sequencer now .. todo - } - tasksdata[name] = { - list = tasklist, - runner = false, - arguments = arguments, - -- sequence = sequence, - frozen = { }, - processor = specification.processor or nodeprocessor - } - for l=1,#sequence do - sequencers.appendgroup(tasklist,sequence[l]) - end - end -end - -local function valid(name) - local data = tasksdata[name] - if not data then - report_tasks("unknown task %a",name) - else - return data - end -end - -local function validgroup(name,group,what) - local data = tasksdata[name] - if not data then - report_tasks("unknown task %a",name) - else - local frozen = data.frozen[group] - if frozen then - if frozengroup == "no" then - -- default - elseif frozengroup == "strict" then - report_tasks("warning: group %a of task %a is frozen, %a applied but not supported",group,name,what) - return - else -- if frozengroup == "tolerant" then - report_tasks("warning: group %a of task %a is frozen, %a ignored",group,name,what) - end - end - return data - end -end - -function tasks.freezegroup(name,group) - local data = valid(name) - if data then - data.frozen[group] = true - end -end - -function tasks.restart(name) - local data = valid(name) - if data then - data.runner = false - end -end - -function tasks.enableaction(name,action) - local data = valid(name) - if data then - sequencers.enableaction(data.list,action) - data.runner = false - end -end - -function tasks.disableaction(name,action) - local data = valid(name) - if data then - sequencers.disableaction(data.list,action) - data.runner = false - end -end - -function tasks.enablegroup(name,group) - local data = validgroup(name,"enable group") - if data then - sequencers.enablegroup(data.list,group) - data.runner = false - end -end - -function tasks.disablegroup(name,group) - local data = validgroup(name,"disable group") - if data then - sequencers.disablegroup(data.list,group) - data.runner = false - end -end - -function tasks.appendaction(name,group,action,where,kind) - local data = validgroup(name,"append action") - if data then - sequencers.appendaction(data.list,group,action,where,kind) - data.runner = false - end -end - -function tasks.prependaction(name,group,action,where,kind) - local data = validgroup(name,"prepend action") - if data then - sequencers.prependaction(data.list,group,action,where,kind) - data.runner = false - end -end - -function tasks.removeaction(name,group,action) - local data = validgroup(name,"remove action") - if data then - sequencers.removeaction(data.list,group,action) - data.runner = false - end -end - -function tasks.showactions(name,group,action,where,kind) - local data = valid(name) - if data then - report_tasks("task %a, list:\n%s",name,nodeprocessor(data.list)) - end -end - --- Optimizing for the number of arguments makes sense, but getting rid of --- the nested call (no problem but then we also need to register the --- callback with this mechanism so that it gets updated) does not save --- much time (24K calls on mk.tex). - -local created, total = 0, 0 - -statistics.register("node list callback tasks", function() - if total > 0 then - return format("%s unique task lists, %s instances (re)created, %s calls",table.count(tasksdata),created,total) - else - return nil - end -end) - -function tasks.actions(name) -- we optimize for the number or arguments (no ...) - local data = tasksdata[name] - if data then - local n = data.arguments or 0 - if n == 0 then - return function(head) - total = total + 1 -- will go away - local runner = data.runner - if not runner then - created = created + 1 - if trace_tasks then - report_tasks("creating runner %a",name) - end - runner = compile(data.list,data.processor,0) - data.runner = runner - end - return runner(head) - end - elseif n == 1 then - return function(head,one) - total = total + 1 -- will go away - local runner = data.runner - if not runner then - created = created + 1 - if trace_tasks then - report_tasks("creating runner %a with %s extra arguments",name,1) - end - runner = compile(data.list,data.processor,1) - data.runner = runner - end - return runner(head,one) - end - elseif n == 2 then - return function(head,one,two) - total = total + 1 -- will go away - local runner = data.runner - if not runner then - created = created + 1 - if trace_tasks then - report_tasks("creating runner %a with %s extra arguments",name,2) - end - runner = compile(data.list,data.processor,2) - data.runner = runner - end - return runner(head,one,two) - end - elseif n == 3 then - return function(head,one,two,three) - total = total + 1 -- will go away - local runner = data.runner - if not runner then - created = created + 1 - if trace_tasks then - report_tasks("creating runner %a with %s extra arguments",name,3) - end - runner = compile(data.list,data.processor,3) - data.runner = runner - end - return runner(head,one,two,three) - end - elseif n == 4 then - return function(head,one,two,three,four) - total = total + 1 -- will go away - local runner = data.runner - if not runner then - created = created + 1 - if trace_tasks then - report_tasks("creating runner %a with %s extra arguments",name,4) - end - runner = compile(data.list,data.processor,4) - data.runner = runner - end - return runner(head,one,two,three,four) - end - elseif n == 5 then - return function(head,one,two,three,four,five) - total = total + 1 -- will go away - local runner = data.runner - if not runner then - created = created + 1 - if trace_tasks then - report_tasks("creating runner %a with %s extra arguments",name,5) - end - runner = compile(data.list,data.processor,5) - data.runner = runner - end - return runner(head,one,two,three,four,five) - end - else - return function(head,...) - total = total + 1 -- will go away - local runner = data.runner - if not runner then - created = created + 1 - if trace_tasks then - report_tasks("creating runner %a with %s extra arguments",name,n) - end - runner = compile(data.list,data.processor,"n") - data.runner = runner - end - return runner(head,...) - end - end - else - return nil - end -end - -function tasks.table(name) --maybe move this to task-deb.lua - local tsk = tasksdata[name] - local lst = tsk and tsk.list - local HL, NC, NR, bold, type = context.HL, context.NC, context.NR, context.bold, context.type - if lst then - local list, order = lst.list, lst.order - if list and order then - context.starttabulate { "|l|l|" } - NC() bold("category") NC() bold("function") NC() NR() - for i=1,#order do - HL() - local o = order[i] - local l = list[o] - if #l == 0 then - NC() type(o) NC() context("unset") NC() NR() - else - local done = false - for k, v in table.sortedhash(l) do - NC() if not done then type(o) done = true end NC() type(v) NC() NR() - end - end - end - context.stoptabulate() - end - end -end - --- this will move - -tasks.new { - name = "processors", - arguments = 4, - processor = nodeprocessor, - sequence = { - "before", -- for users - "normalizers", - "characters", - "words", - "fonts", - "lists", - "after", -- for users - } -} - -tasks.new { - name = "finalizers", - arguments = 1, - processor = nodeprocessor, - sequence = { - "before", -- for users - "normalizers", --- "characters", --- "finishers", - "fonts", - "lists", - "after", -- for users - } -} - -tasks.new { - name = "shipouts", - arguments = 0, - processor = nodeprocessor, - sequence = { - "before", -- for users - "normalizers", - "finishers", - "after", -- for users - } -} - -tasks.new { - name = "mvlbuilders", - arguments = 1, - processor = nodeprocessor, - sequence = { - "before", -- for users - "normalizers", - "after", -- for users - } -} - -tasks.new { - name = "vboxbuilders", - arguments = 5, - processor = nodeprocessor, - sequence = { - "before", -- for users - "normalizers", - "after", -- for users - } -} - --- tasks.new { --- name = "parbuilders", --- arguments = 1, --- processor = nodeprocessor, --- sequence = { --- "before", -- for users --- "lists", --- "after", -- for users --- } --- } - --- tasks.new { --- name = "pagebuilders", --- arguments = 5, --- processor = nodeprocessor, --- sequence = { --- "before", -- for users --- "lists", --- "after", -- for users --- } --- } +if not modules then modules = { } end modules ['node-tsk'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This might move to task-* and become less code as in sequencers +-- we already have dirty flags as well. On the other hand, nodes are +-- rather specialized and here we focus on node related tasks. + +local format = string.format + +local trace_tasks = false trackers.register("tasks.creation", function(v) trace_tasks = v end) + +local report_tasks = logs.reporter("tasks") + +local allocate = utilities.storage.allocate + +local nodes = nodes + +nodes.tasks = nodes.tasks or { } +local tasks = nodes.tasks + +local tasksdata = { } -- no longer public + +local sequencers = utilities.sequencers +local compile = sequencers.compile +local nodeprocessor = sequencers.nodeprocessor + +local frozengroups = "no" + +function tasks.freeze(kind) + frozengroups = kind or "tolerant" -- todo: hook into jobname +end + +function tasks.new(specification) -- was: name,arguments,list + local name = specification.name + local arguments = specification.arguments or 0 + local sequence = specification.sequence + if name and sequence then + local tasklist = sequencers.new { + -- we can move more to the sequencer now .. todo + } + tasksdata[name] = { + list = tasklist, + runner = false, + arguments = arguments, + -- sequence = sequence, + frozen = { }, + processor = specification.processor or nodeprocessor + } + for l=1,#sequence do + sequencers.appendgroup(tasklist,sequence[l]) + end + end +end + +local function valid(name) + local data = tasksdata[name] + if not data then + report_tasks("unknown task %a",name) + else + return data + end +end + +local function validgroup(name,group,what) + local data = tasksdata[name] + if not data then + report_tasks("unknown task %a",name) + else + local frozen = data.frozen[group] + if frozen then + if frozengroup == "no" then + -- default + elseif frozengroup == "strict" then + report_tasks("warning: group %a of task %a is frozen, %a applied but not supported",group,name,what) + return + else -- if frozengroup == "tolerant" then + report_tasks("warning: group %a of task %a is frozen, %a ignored",group,name,what) + end + end + return data + end +end + +function tasks.freezegroup(name,group) + local data = valid(name) + if data then + data.frozen[group] = true + end +end + +function tasks.restart(name) + local data = valid(name) + if data then + data.runner = false + end +end + +function tasks.enableaction(name,action) + local data = valid(name) + if data then + sequencers.enableaction(data.list,action) + data.runner = false + end +end + +function tasks.disableaction(name,action) + local data = valid(name) + if data then + sequencers.disableaction(data.list,action) + data.runner = false + end +end + +function tasks.enablegroup(name,group) + local data = validgroup(name,"enable group") + if data then + sequencers.enablegroup(data.list,group) + data.runner = false + end +end + +function tasks.disablegroup(name,group) + local data = validgroup(name,"disable group") + if data then + sequencers.disablegroup(data.list,group) + data.runner = false + end +end + +function tasks.appendaction(name,group,action,where,kind) + local data = validgroup(name,"append action") + if data then + sequencers.appendaction(data.list,group,action,where,kind) + data.runner = false + end +end + +function tasks.prependaction(name,group,action,where,kind) + local data = validgroup(name,"prepend action") + if data then + sequencers.prependaction(data.list,group,action,where,kind) + data.runner = false + end +end + +function tasks.removeaction(name,group,action) + local data = validgroup(name,"remove action") + if data then + sequencers.removeaction(data.list,group,action) + data.runner = false + end +end + +function tasks.showactions(name,group,action,where,kind) + local data = valid(name) + if data then + report_tasks("task %a, list:\n%s",name,nodeprocessor(data.list)) + end +end + +-- Optimizing for the number of arguments makes sense, but getting rid of +-- the nested call (no problem but then we also need to register the +-- callback with this mechanism so that it gets updated) does not save +-- much time (24K calls on mk.tex). + +local created, total = 0, 0 + +statistics.register("node list callback tasks", function() + if total > 0 then + return format("%s unique task lists, %s instances (re)created, %s calls",table.count(tasksdata),created,total) + else + return nil + end +end) + +function tasks.actions(name) -- we optimize for the number or arguments (no ...) + local data = tasksdata[name] + if data then + local n = data.arguments or 0 + if n == 0 then + return function(head) + total = total + 1 -- will go away + local runner = data.runner + if not runner then + created = created + 1 + if trace_tasks then + report_tasks("creating runner %a",name) + end + runner = compile(data.list,data.processor,0) + data.runner = runner + end + return runner(head) + end + elseif n == 1 then + return function(head,one) + total = total + 1 -- will go away + local runner = data.runner + if not runner then + created = created + 1 + if trace_tasks then + report_tasks("creating runner %a with %s extra arguments",name,1) + end + runner = compile(data.list,data.processor,1) + data.runner = runner + end + return runner(head,one) + end + elseif n == 2 then + return function(head,one,two) + total = total + 1 -- will go away + local runner = data.runner + if not runner then + created = created + 1 + if trace_tasks then + report_tasks("creating runner %a with %s extra arguments",name,2) + end + runner = compile(data.list,data.processor,2) + data.runner = runner + end + return runner(head,one,two) + end + elseif n == 3 then + return function(head,one,two,three) + total = total + 1 -- will go away + local runner = data.runner + if not runner then + created = created + 1 + if trace_tasks then + report_tasks("creating runner %a with %s extra arguments",name,3) + end + runner = compile(data.list,data.processor,3) + data.runner = runner + end + return runner(head,one,two,three) + end + elseif n == 4 then + return function(head,one,two,three,four) + total = total + 1 -- will go away + local runner = data.runner + if not runner then + created = created + 1 + if trace_tasks then + report_tasks("creating runner %a with %s extra arguments",name,4) + end + runner = compile(data.list,data.processor,4) + data.runner = runner + end + return runner(head,one,two,three,four) + end + elseif n == 5 then + return function(head,one,two,three,four,five) + total = total + 1 -- will go away + local runner = data.runner + if not runner then + created = created + 1 + if trace_tasks then + report_tasks("creating runner %a with %s extra arguments",name,5) + end + runner = compile(data.list,data.processor,5) + data.runner = runner + end + return runner(head,one,two,three,four,five) + end + else + return function(head,...) + total = total + 1 -- will go away + local runner = data.runner + if not runner then + created = created + 1 + if trace_tasks then + report_tasks("creating runner %a with %s extra arguments",name,n) + end + runner = compile(data.list,data.processor,"n") + data.runner = runner + end + return runner(head,...) + end + end + else + return nil + end +end + +function tasks.table(name) --maybe move this to task-deb.lua + local tsk = tasksdata[name] + local lst = tsk and tsk.list + local HL, NC, NR, bold, type = context.HL, context.NC, context.NR, context.bold, context.type + if lst then + local list, order = lst.list, lst.order + if list and order then + context.starttabulate { "|l|l|" } + NC() bold("category") NC() bold("function") NC() NR() + for i=1,#order do + HL() + local o = order[i] + local l = list[o] + if #l == 0 then + NC() type(o) NC() context("unset") NC() NR() + else + local done = false + for k, v in table.sortedhash(l) do + NC() if not done then type(o) done = true end NC() type(v) NC() NR() + end + end + end + context.stoptabulate() + end + end +end + +-- this will move + +tasks.new { + name = "processors", + arguments = 4, + processor = nodeprocessor, + sequence = { + "before", -- for users + "normalizers", + "characters", + "words", + "fonts", + "lists", + "after", -- for users + } +} + +tasks.new { + name = "finalizers", + arguments = 1, + processor = nodeprocessor, + sequence = { + "before", -- for users + "normalizers", +-- "characters", +-- "finishers", + "fonts", + "lists", + "after", -- for users + } +} + +tasks.new { + name = "shipouts", + arguments = 0, + processor = nodeprocessor, + sequence = { + "before", -- for users + "normalizers", + "finishers", + "after", -- for users + } +} + +tasks.new { + name = "mvlbuilders", + arguments = 1, + processor = nodeprocessor, + sequence = { + "before", -- for users + "normalizers", + "after", -- for users + } +} + +tasks.new { + name = "vboxbuilders", + arguments = 5, + processor = nodeprocessor, + sequence = { + "before", -- for users + "normalizers", + "after", -- for users + } +} + +-- tasks.new { +-- name = "parbuilders", +-- arguments = 1, +-- processor = nodeprocessor, +-- sequence = { +-- "before", -- for users +-- "lists", +-- "after", -- for users +-- } +-- } + +-- tasks.new { +-- name = "pagebuilders", +-- arguments = 5, +-- processor = nodeprocessor, +-- sequence = { +-- "before", -- for users +-- "lists", +-- "after", -- for users +-- } +-- } diff --git a/tex/context/base/node-tst.lua b/tex/context/base/node-tst.lua index bfe0051bd..98743ca0d 100644 --- a/tex/context/base/node-tst.lua +++ b/tex/context/base/node-tst.lua @@ -1,120 +1,120 @@ -if not modules then modules = { } end modules ['node-tst'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local nodes, node = nodes, node - -local chardata = characters.data -local nodecodes = nodes.nodecodes -local skipcodes = nodes.skipcodes - -local glue_code = nodecodes.glue -local penalty_code = nodecodes.penalty -local kern_code = nodecodes.kern -local glyph_code = nodecodes.glyph -local whatsit_code = nodecodes.whatsit -local hlist_code = nodecodes.hlist - -local leftskip_code = skipcodes.leftskip -local rightskip_code = skipcodes.rightskip -local abovedisplayshortskip_code = skipcodes.abovedisplayshortskip -local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip - -local find_node_tail = node.tail or node.slide - -function nodes.leftmarginwidth(n) -- todo: three values - while n do - local id = n.id - if id == glue_code then - return n.subtype == leftskip_code and n.spec.width or 0 - elseif id == whatsit_code then - n = n.next - elseif id == hlist_code then - return n.width - else - break - end - end - return 0 -end - -function nodes.rightmarginwidth(n) - if n then - n = find_node_tail(n) - while n do - local id = n.id - if id == glue_code then - return n.subtype == rightskip_code and n.spec.width or 0 - elseif id == whatsit_code then - n = n.prev - else - break - end - end - end - return false -end - -function nodes.somespace(n,all) - if n then - local id = n.id - if id == glue_code then - return (all or (n.spec.width ~= 0)) and glue_code - elseif id == kern_code then - return (all or (n.kern ~= 0)) and kern - elseif id == glyph_code then - local category = chardata[n.char].category - -- maybe more category checks are needed - return (category == "zs") and glyph_code - end - end - return false -end - -function nodes.somepenalty(n,value) - if n then - local id = n.id - if id == penalty_code then - if value then - return n.penalty == value - else - return true - end - end - end - return false -end - -function nodes.is_display_math(head) - local n = head.prev - while n do - local id = n.id - if id == penalty_code then - elseif id == glue_code then - if n.subtype == abovedisplayshortskip_code then - return true - end - else - break - end - n = n.prev - end - n = head.next - while n do - local id = n.id - if id == penalty_code then - elseif id == glue_code then - if n.subtype == belowdisplayshortskip_code then - return true - end - else - break - end - n = n.next - end - return false -end +if not modules then modules = { } end modules ['node-tst'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local nodes, node = nodes, node + +local chardata = characters.data +local nodecodes = nodes.nodecodes +local skipcodes = nodes.skipcodes + +local glue_code = nodecodes.glue +local penalty_code = nodecodes.penalty +local kern_code = nodecodes.kern +local glyph_code = nodecodes.glyph +local whatsit_code = nodecodes.whatsit +local hlist_code = nodecodes.hlist + +local leftskip_code = skipcodes.leftskip +local rightskip_code = skipcodes.rightskip +local abovedisplayshortskip_code = skipcodes.abovedisplayshortskip +local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip + +local find_node_tail = node.tail or node.slide + +function nodes.leftmarginwidth(n) -- todo: three values + while n do + local id = n.id + if id == glue_code then + return n.subtype == leftskip_code and n.spec.width or 0 + elseif id == whatsit_code then + n = n.next + elseif id == hlist_code then + return n.width + else + break + end + end + return 0 +end + +function nodes.rightmarginwidth(n) + if n then + n = find_node_tail(n) + while n do + local id = n.id + if id == glue_code then + return n.subtype == rightskip_code and n.spec.width or 0 + elseif id == whatsit_code then + n = n.prev + else + break + end + end + end + return false +end + +function nodes.somespace(n,all) + if n then + local id = n.id + if id == glue_code then + return (all or (n.spec.width ~= 0)) and glue_code + elseif id == kern_code then + return (all or (n.kern ~= 0)) and kern + elseif id == glyph_code then + local category = chardata[n.char].category + -- maybe more category checks are needed + return (category == "zs") and glyph_code + end + end + return false +end + +function nodes.somepenalty(n,value) + if n then + local id = n.id + if id == penalty_code then + if value then + return n.penalty == value + else + return true + end + end + end + return false +end + +function nodes.is_display_math(head) + local n = head.prev + while n do + local id = n.id + if id == penalty_code then + elseif id == glue_code then + if n.subtype == abovedisplayshortskip_code then + return true + end + else + break + end + n = n.prev + end + n = head.next + while n do + local id = n.id + if id == penalty_code then + elseif id == glue_code then + if n.subtype == belowdisplayshortskip_code then + return true + end + else + break + end + n = n.next + end + return false +end diff --git a/tex/context/base/node-typ.lua b/tex/context/base/node-typ.lua index 6e1a31643..25ad31f83 100644 --- a/tex/context/base/node-typ.lua +++ b/tex/context/base/node-typ.lua @@ -1,79 +1,79 @@ -if not modules then modules = { } end modules ['node-typ'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local utfvalues = utf.values - -local currentfont = font.current -local fontparameters = fonts.hashes.parameters - -local hpack = node.hpack -local vpack = node.vpack -local fast_hpack = nodes.fasthpack - -local nodepool = nodes.pool - -local newglyph = nodepool.glyph -local newglue = nodepool.glue - -typesetters = typesetters or { } - -local function tonodes(str,fontid,spacing) -- quick and dirty - local head, prev = nil, nil - if not fontid then - fontid = currentfont() - end - local fp = fontparameters[fontid] - local s, p, m - if spacing then - s, p, m = spacing, 0, 0 - else - s, p, m = fp.space, fp.space_stretch, fp,space_shrink - end - local spacedone = false - for c in utfvalues(str) do - local next - if c == 32 then - if not spacedone then - next = newglue(s,p,m) - spacedone = true - end - else - next = newglyph(fontid or 1,c) - spacedone = false - end - if not next then - -- nothing - elseif not head then - head = next - else - prev.next = next - next.prev = prev - end - prev = next - end - return head -end - -typesetters.tonodes = tonodes - -function typesetters.hpack(str,fontid,spacing) - return hpack(tonodes(str,fontid,spacing),"exactly") -end - -function typesetters.fast_hpack(str,fontid,spacing) - return fast_hpack(tonodes(str,fontid,spacing),"exactly") -end - -function typesetters.vpack(str,fontid,spacing) - -- vpack is just a hack, and a proper implentation is on the agenda - -- as it needs more info etc than currently available - return vpack(tonodes(str,fontid,spacing)) -end - ---~ node.write(typesetters.hpack("Hello World!")) ---~ node.write(typesetters.hpack("Hello World!",1,100*1024*10)) +if not modules then modules = { } end modules ['node-typ'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local utfvalues = utf.values + +local currentfont = font.current +local fontparameters = fonts.hashes.parameters + +local hpack = node.hpack +local vpack = node.vpack +local fast_hpack = nodes.fasthpack + +local nodepool = nodes.pool + +local newglyph = nodepool.glyph +local newglue = nodepool.glue + +typesetters = typesetters or { } + +local function tonodes(str,fontid,spacing) -- quick and dirty + local head, prev = nil, nil + if not fontid then + fontid = currentfont() + end + local fp = fontparameters[fontid] + local s, p, m + if spacing then + s, p, m = spacing, 0, 0 + else + s, p, m = fp.space, fp.space_stretch, fp,space_shrink + end + local spacedone = false + for c in utfvalues(str) do + local next + if c == 32 then + if not spacedone then + next = newglue(s,p,m) + spacedone = true + end + else + next = newglyph(fontid or 1,c) + spacedone = false + end + if not next then + -- nothing + elseif not head then + head = next + else + prev.next = next + next.prev = prev + end + prev = next + end + return head +end + +typesetters.tonodes = tonodes + +function typesetters.hpack(str,fontid,spacing) + return hpack(tonodes(str,fontid,spacing),"exactly") +end + +function typesetters.fast_hpack(str,fontid,spacing) + return fast_hpack(tonodes(str,fontid,spacing),"exactly") +end + +function typesetters.vpack(str,fontid,spacing) + -- vpack is just a hack, and a proper implentation is on the agenda + -- as it needs more info etc than currently available + return vpack(tonodes(str,fontid,spacing)) +end + +--~ node.write(typesetters.hpack("Hello World!")) +--~ node.write(typesetters.hpack("Hello World!",1,100*1024*10)) diff --git a/tex/context/base/pack-obj.lua b/tex/context/base/pack-obj.lua index 1e4e0f59e..b218a0a5c 100644 --- a/tex/context/base/pack-obj.lua +++ b/tex/context/base/pack-obj.lua @@ -1,77 +1,77 @@ -if not modules then modules = { } end modules ['pack-obj'] = { - version = 1.001, - comment = "companion to pack-obj.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

    We save object references in the main utility table. jobobjects are -reusable components.

    ---ldx]]-- - -local commands, context = commands, context - -local texcount = tex.count -local allocate = utilities.storage.allocate - -local collected = allocate() -local tobesaved = allocate() - -local jobobjects = { - collected = collected, - tobesaved = tobesaved, -} - -job.objects = jobobjects - -local function initializer() - collected = jobobjects.collected - tobesaved = jobobjects.tobesaved -end - -job.register('job.objects.collected', tobesaved, initializer, nil) - -function jobobjects.save(tag,number,page) - local t = { number, page } - tobesaved[tag], collected[tag] = t, t -end - -function jobobjects.set(tag,number,page) - collected[tag] = { number, page } -end - -function jobobjects.get(tag) - return collected[tag] or tobesaved[tag] -end - -function jobobjects.number(tag,default) - local o = collected[tag] or tobesaved[tag] - return o and o[1] or default -end - -function jobobjects.page(tag,default) - local o = collected[tag] or tobesaved[tag] - return o and o[2] or default -end - --- interface - -commands.saveobject = jobobjects.save -commands.setobject = jobobjects.set - -function commands.objectnumber(tag,default) - local o = collected[tag] or tobesaved[tag] - context(o and o[1] or default) -end - -function commands.objectpage(tag,default) - local o = collected[tag] or tobesaved[tag] - context(o and o[2] or default) -end - -function commands.doifobjectreferencefoundelse(tag) - commands.doifelse(collected[tag] or tobesaved[tag]) -end - +if not modules then modules = { } end modules ['pack-obj'] = { + version = 1.001, + comment = "companion to pack-obj.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

    We save object references in the main utility table. jobobjects are +reusable components.

    +--ldx]]-- + +local commands, context = commands, context + +local texcount = tex.count +local allocate = utilities.storage.allocate + +local collected = allocate() +local tobesaved = allocate() + +local jobobjects = { + collected = collected, + tobesaved = tobesaved, +} + +job.objects = jobobjects + +local function initializer() + collected = jobobjects.collected + tobesaved = jobobjects.tobesaved +end + +job.register('job.objects.collected', tobesaved, initializer, nil) + +function jobobjects.save(tag,number,page) + local t = { number, page } + tobesaved[tag], collected[tag] = t, t +end + +function jobobjects.set(tag,number,page) + collected[tag] = { number, page } +end + +function jobobjects.get(tag) + return collected[tag] or tobesaved[tag] +end + +function jobobjects.number(tag,default) + local o = collected[tag] or tobesaved[tag] + return o and o[1] or default +end + +function jobobjects.page(tag,default) + local o = collected[tag] or tobesaved[tag] + return o and o[2] or default +end + +-- interface + +commands.saveobject = jobobjects.save +commands.setobject = jobobjects.set + +function commands.objectnumber(tag,default) + local o = collected[tag] or tobesaved[tag] + context(o and o[1] or default) +end + +function commands.objectpage(tag,default) + local o = collected[tag] or tobesaved[tag] + context(o and o[2] or default) +end + +function commands.doifobjectreferencefoundelse(tag) + commands.doifelse(collected[tag] or tobesaved[tag]) +end + diff --git a/tex/context/base/pack-rul.lua b/tex/context/base/pack-rul.lua index a990936e7..3dcabc3da 100644 --- a/tex/context/base/pack-rul.lua +++ b/tex/context/base/pack-rul.lua @@ -1,109 +1,109 @@ -if not modules then modules = { } end modules ['pack-rul'] = { - version = 1.001, - comment = "companion to pack-rul.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

    An explanation is given in the history document mk.

    ---ldx]]-- - -local texsetdimen, texsetcount, texbox = tex.setdimen, tex.setcount, tex.box -local hpack, free, copy, traverse_id = node.hpack, node.free, node.copy_list, node.traverse_id -local texdimen, texcount = tex.dimen, tex.count - -local hlist_code = nodes.nodecodes.hlist -local box_code = nodes.listcodes.box -local node_dimensions = node.dimensions - -function commands.doreshapeframedbox(n) - local box = texbox[n] - local noflines = 0 - local firstheight = nil - local lastdepth = nil - local lastlinelength = 0 - local minwidth = 0 - local maxwidth = 0 - local totalwidth = 0 - if box.width ~= 0 then - local list = box.list - if list then - for h in traverse_id(hlist_code,list) do -- no dir etc needed - if not firstheight then - firstheight = h.height - end - lastdepth = h.depth - noflines = noflines + 1 - local l = h.list - if l then - if h.subtype == box_code then -- maybe more - lastlinelength = h.width - else - lastlinelength = node_dimensions(l) -- used to be: hpack(copy(l)).width - end - if lastlinelength > maxwidth then - maxwidth = lastlinelength - end - if lastlinelength < minwidth or minwidth == 0 then - minwidth = lastlinelength - end - totalwidth = totalwidth + lastlinelength - end - end - if firstheight then - if maxwidth ~= 0 then - for h in traverse_id(hlist_code,list) do - local l = h.list - if l then - if h.subtype == box_code then - -- explicit box, no 'line' - else - -- if h.width ~= maxwidth then -- else no display math handling (uses shift) - -- challenge: adapt glue_set - -- h.glue_set = h.glue_set * h.width/maxwidth -- interesting ... doesn't matter much - -- h.width = maxwidth - h.list = hpack(l,maxwidth,'exactly',h.dir) - h.shift = 0 -- needed for display math - h.width = maxwidth - -- end - end - end - end - end - box.width = maxwidth - end - end - end - -- print("reshape", noflines, firstheight or 0, lastdepth or 0) - texsetcount("global","framednoflines", noflines) - texsetdimen("global","framedfirstheight", firstheight or 0) - texsetdimen("global","framedlastdepth", lastdepth or 0) - texsetdimen("global","framedminwidth", minwidth) - texsetdimen("global","framedmaxwidth", maxwidth) - texsetdimen("global","framedaveragewidth", noflines > 0 and totalwidth/noflines or 0) -end - -function commands.doanalyzeframedbox(n) - local box = texbox[n] - local noflines = 0 - local firstheight = nil - local lastdepth = nil - if box.width ~= 0 then - local list = box.list - if list then - for h in traverse_id(hlist_code,list) do - if not firstheight then - firstheight = h.height - end - lastdepth = h.depth - noflines = noflines + 1 - end - end - end - -- print("analyze", noflines, firstheight or 0, lastdepth or 0) - texsetcount("global","framednoflines", noflines) - texsetdimen("global","framedfirstheight", firstheight or 0) - texsetdimen("global","framedlastdepth", lastdepth or 0) -end +if not modules then modules = { } end modules ['pack-rul'] = { + version = 1.001, + comment = "companion to pack-rul.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

    An explanation is given in the history document mk.

    +--ldx]]-- + +local texsetdimen, texsetcount, texbox = tex.setdimen, tex.setcount, tex.box +local hpack, free, copy, traverse_id = node.hpack, node.free, node.copy_list, node.traverse_id +local texdimen, texcount = tex.dimen, tex.count + +local hlist_code = nodes.nodecodes.hlist +local box_code = nodes.listcodes.box +local node_dimensions = node.dimensions + +function commands.doreshapeframedbox(n) + local box = texbox[n] + local noflines = 0 + local firstheight = nil + local lastdepth = nil + local lastlinelength = 0 + local minwidth = 0 + local maxwidth = 0 + local totalwidth = 0 + if box.width ~= 0 then + local list = box.list + if list then + for h in traverse_id(hlist_code,list) do -- no dir etc needed + if not firstheight then + firstheight = h.height + end + lastdepth = h.depth + noflines = noflines + 1 + local l = h.list + if l then + if h.subtype == box_code then -- maybe more + lastlinelength = h.width + else + lastlinelength = node_dimensions(l) -- used to be: hpack(copy(l)).width + end + if lastlinelength > maxwidth then + maxwidth = lastlinelength + end + if lastlinelength < minwidth or minwidth == 0 then + minwidth = lastlinelength + end + totalwidth = totalwidth + lastlinelength + end + end + if firstheight then + if maxwidth ~= 0 then + for h in traverse_id(hlist_code,list) do + local l = h.list + if l then + if h.subtype == box_code then + -- explicit box, no 'line' + else + -- if h.width ~= maxwidth then -- else no display math handling (uses shift) + -- challenge: adapt glue_set + -- h.glue_set = h.glue_set * h.width/maxwidth -- interesting ... doesn't matter much + -- h.width = maxwidth + h.list = hpack(l,maxwidth,'exactly',h.dir) + h.shift = 0 -- needed for display math + h.width = maxwidth + -- end + end + end + end + end + box.width = maxwidth + end + end + end + -- print("reshape", noflines, firstheight or 0, lastdepth or 0) + texsetcount("global","framednoflines", noflines) + texsetdimen("global","framedfirstheight", firstheight or 0) + texsetdimen("global","framedlastdepth", lastdepth or 0) + texsetdimen("global","framedminwidth", minwidth) + texsetdimen("global","framedmaxwidth", maxwidth) + texsetdimen("global","framedaveragewidth", noflines > 0 and totalwidth/noflines or 0) +end + +function commands.doanalyzeframedbox(n) + local box = texbox[n] + local noflines = 0 + local firstheight = nil + local lastdepth = nil + if box.width ~= 0 then + local list = box.list + if list then + for h in traverse_id(hlist_code,list) do + if not firstheight then + firstheight = h.height + end + lastdepth = h.depth + noflines = noflines + 1 + end + end + end + -- print("analyze", noflines, firstheight or 0, lastdepth or 0) + texsetcount("global","framednoflines", noflines) + texsetdimen("global","framedfirstheight", firstheight or 0) + texsetdimen("global","framedlastdepth", lastdepth or 0) +end diff --git a/tex/context/base/page-flt.lua b/tex/context/base/page-flt.lua index ab7a534eb..cd78b9356 100644 --- a/tex/context/base/page-flt.lua +++ b/tex/context/base/page-flt.lua @@ -1,289 +1,289 @@ -if not modules then modules = { } end modules ['page-flt'] = { - version = 1.001, - comment = "companion to page-flt.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- floats -> managers.floats --- some functions are a tex/lua mix so we need a separation - -local insert, remove = table.insert, table.remove -local find = string.find -local setdimen, setcount, texbox = tex.setdimen, tex.setcount, tex.box - -local copy_node_list = node.copy_list - -local trace_floats = false trackers.register("graphics.floats", function(v) trace_floats = v end) -- name might change - -local report_floats = logs.reporter("structure","floats") - -local C, S, P, lpegmatch = lpeg.C, lpeg.S, lpeg.P, lpeg.match - --- we use floatbox, floatwidth, floatheight --- text page leftpage rightpage (todo: top, bottom, margin, order) - -floats = floats or { } -local floats = floats - -local noffloats, last, default, pushed = 0, nil, "text", { } - -local function initialize() - return { - text = { }, - page = { }, - leftpage = { }, - rightpage = { }, - somewhere = { }, - } -end - -local stacks = initialize() - --- list location - -function floats.stacked(which) -- floats.thenofstacked - return #stacks[which or default] -end - -function floats.push() - insert(pushed,stacks) - stacks = initialize() - setcount("global","savednoffloats",0) -end - -function floats.pop() - local popped = remove(pushed) - if popped then - for which, stack in next, stacks do - for i=1,#stack do - insert(popped[which],stack[i]) - end - end - stacks = popped - setcount("global","savednoffloats",#stacks[default]) - end -end - -local function setdimensions(b) - local w, h, d = 0, 0, 0 - if b then - w, h, d = b.width, b.height, b.depth - end - setdimen("global","floatwidth", w) - setdimen("global","floatheight", h+d) - return w, h, d -end - -local function get(stack,n,bylabel) - if bylabel then - for i=1,#stack do - local s = stack[i] - local n = string.topattern(tostring(n)) -- to be sure - if find(s.data.label,n) then - return s, s.box, i - end - end - else - n = n or #stack - if n > 0 then - local t = stack[n] - if t then - return t, t.box, n - end - end - end -end - -function floats.save(which,data) - which = which or default - local b = texbox.floatbox - if b then - local stack = stacks[which] - noffloats = noffloats + 1 - local w, h, d = b.width, b.height, b.depth - local t = { - n = noffloats, - data = data or { }, - box = copy_node_list(b), - } - texbox.floatbox = nil - insert(stack,t) - setcount("global","savednoffloats",#stacks[default]) - if trace_floats then - report_floats("%s, category %a, number %a, slot %a, width %p, height %p, depth %p","saving",which,noffloats,#stack,w,h,d) - else - interfaces.showmessage("floatblocks",2,noffloats) - end - else - report_floats("ignoring empty, category %a, number %a",which,noffloats) - end -end - -function floats.resave(which) - if last then - which = which or default - local stack = stacks[which] - local b = texbox.floatbox - local w, h, d = b.width, b.height, b.depth - last.box = copy_node_list(b) - texbox.floatbox = nil - insert(stack,1,last) - setcount("global","savednoffloats",#stacks[default]) - if trace_floats then - report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","resaving",which,noffloats,#stack,w,h,d) - else - interfaces.showmessage("floatblocks",2,noffloats) - end - else - report_floats("unable to resave float") - end -end - -function floats.flush(which,n,bylabel) - which = which or default - local stack = stacks[which] - local t, b, n = get(stack,n or 1,bylabel) - if t then - local w, h, d = setdimensions(b) - if trace_floats then - report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","flushing",which,t.n,n,w,h,d) - else - interfaces.showmessage("floatblocks",3,t.n) - end - texbox.floatbox = b - last = remove(stack,n) - last.box = nil - setcount("global","savednoffloats",#stacks[default]) -- default? - else - setdimensions() - end -end - -function floats.consult(which,n) - which = which or default - local stack = stacks[which] - local t, b, n = get(stack,n) - if t then - local w, h, d = setdimensions(b) - if trace_floats then - report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","consulting",which,t.n,n,w,h,d) - end - return t, b, n - else - if trace_floats then - report_floats("nothing to consult") - end - setdimensions() - end -end - -function floats.collect(which,maxwidth,distance) - which = which or default - local stack = stacks[which] - local n, m = #stack, 0 - for i=1,n do - local t, b, n = get(stack,i) - if t then - local w, h, d = setdimensions(b) - if w + distance < maxwidth then - m = m + 1 - maxwidth = maxwidth - w - distance - else - break - end - else - break - end - end - if m == 0 then - m = 1 - end - setcount("global","nofcollectedfloats",m) -end - -function floats.getvariable(name,default) - local value = last and last.data[name] or default - return value ~= "" and value -end - -function floats.checkedpagefloat(packed) - if structures.pages.is_odd() then - if #stacks.rightpage > 0 then - return "rightpage" - elseif #stacks.page > 0 then - return "page" - elseif #stacks.leftpage > 0 then - if packed then - return "leftpage" - end - end - else - if #stacks.leftpage > 0 then - return "leftpage" - elseif #stacks.page > 0 then - return "page" - elseif #stacks.rightpage > 0 then - if packed then - return "rightpage" - end - end - end -end - -function floats.nofstacked() - return #stacks[which or default] or 0 -end - --- todo: check for digits ! - -local method = C((1-S(", :"))^1) -local position = P(":") * C((1-S("*,"))^1) * (P("*") * C((1-S(","))^1))^0 -local label = P(":") * C((1-S(",*: "))^0) - -local pattern = method * ( - label * position * C("") - + C("") * position * C("") - + label * C("") * C("") - + C("") * C("") * C("") -) + C("") * C("") * C("") * C("") - --- inspect { lpegmatch(pattern,"somewhere:blabla,crap") } --- inspect { lpegmatch(pattern,"somewhere:1*2") } --- inspect { lpegmatch(pattern,"somewhere:blabla:1*2") } --- inspect { lpegmatch(pattern,"somewhere::1*2") } --- inspect { lpegmatch(pattern,"somewhere,") } --- inspect { lpegmatch(pattern,"somewhere") } --- inspect { lpegmatch(pattern,"") } - -function floats.analysemethod(str) -- will become a more extensive parser - return lpegmatch(pattern,str or "") -end - --- interface - -local context = context -local setvalue = context.setvalue - -commands.flushfloat = floats.flush -commands.savefloat = floats.save -commands.resavefloat = floats.resave -commands.pushfloat = floats.push -commands.popfloat = floats.pop -commands.consultfloat = floats.consult -commands.collectfloat = floats.collect - -function commands.getfloatvariable (...) local v = floats.getvariable(...) if v then context(v) end end -function commands.checkedpagefloat (...) local v = floats.checkedpagefloat(...) if v then context(v) end end - -function commands.nofstackedfloats (...) context(floats.nofstacked(...)) end -function commands.doifelsesavedfloat(...) commands.doifelse(floats.nofstacked(...)>0) end - -function commands.analysefloatmethod(str) -- currently only one method - local method, label, row, column = floats.analysemethod(str) - setvalue("floatmethod",method or "") - setvalue("floatlabel", label or "") - setvalue("floatrow", row or "") - setvalue("floatcolumn",column or "") -end +if not modules then modules = { } end modules ['page-flt'] = { + version = 1.001, + comment = "companion to page-flt.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- floats -> managers.floats +-- some functions are a tex/lua mix so we need a separation + +local insert, remove = table.insert, table.remove +local find = string.find +local setdimen, setcount, texbox = tex.setdimen, tex.setcount, tex.box + +local copy_node_list = node.copy_list + +local trace_floats = false trackers.register("graphics.floats", function(v) trace_floats = v end) -- name might change + +local report_floats = logs.reporter("structure","floats") + +local C, S, P, lpegmatch = lpeg.C, lpeg.S, lpeg.P, lpeg.match + +-- we use floatbox, floatwidth, floatheight +-- text page leftpage rightpage (todo: top, bottom, margin, order) + +floats = floats or { } +local floats = floats + +local noffloats, last, default, pushed = 0, nil, "text", { } + +local function initialize() + return { + text = { }, + page = { }, + leftpage = { }, + rightpage = { }, + somewhere = { }, + } +end + +local stacks = initialize() + +-- list location + +function floats.stacked(which) -- floats.thenofstacked + return #stacks[which or default] +end + +function floats.push() + insert(pushed,stacks) + stacks = initialize() + setcount("global","savednoffloats",0) +end + +function floats.pop() + local popped = remove(pushed) + if popped then + for which, stack in next, stacks do + for i=1,#stack do + insert(popped[which],stack[i]) + end + end + stacks = popped + setcount("global","savednoffloats",#stacks[default]) + end +end + +local function setdimensions(b) + local w, h, d = 0, 0, 0 + if b then + w, h, d = b.width, b.height, b.depth + end + setdimen("global","floatwidth", w) + setdimen("global","floatheight", h+d) + return w, h, d +end + +local function get(stack,n,bylabel) + if bylabel then + for i=1,#stack do + local s = stack[i] + local n = string.topattern(tostring(n)) -- to be sure + if find(s.data.label,n) then + return s, s.box, i + end + end + else + n = n or #stack + if n > 0 then + local t = stack[n] + if t then + return t, t.box, n + end + end + end +end + +function floats.save(which,data) + which = which or default + local b = texbox.floatbox + if b then + local stack = stacks[which] + noffloats = noffloats + 1 + local w, h, d = b.width, b.height, b.depth + local t = { + n = noffloats, + data = data or { }, + box = copy_node_list(b), + } + texbox.floatbox = nil + insert(stack,t) + setcount("global","savednoffloats",#stacks[default]) + if trace_floats then + report_floats("%s, category %a, number %a, slot %a, width %p, height %p, depth %p","saving",which,noffloats,#stack,w,h,d) + else + interfaces.showmessage("floatblocks",2,noffloats) + end + else + report_floats("ignoring empty, category %a, number %a",which,noffloats) + end +end + +function floats.resave(which) + if last then + which = which or default + local stack = stacks[which] + local b = texbox.floatbox + local w, h, d = b.width, b.height, b.depth + last.box = copy_node_list(b) + texbox.floatbox = nil + insert(stack,1,last) + setcount("global","savednoffloats",#stacks[default]) + if trace_floats then + report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","resaving",which,noffloats,#stack,w,h,d) + else + interfaces.showmessage("floatblocks",2,noffloats) + end + else + report_floats("unable to resave float") + end +end + +function floats.flush(which,n,bylabel) + which = which or default + local stack = stacks[which] + local t, b, n = get(stack,n or 1,bylabel) + if t then + local w, h, d = setdimensions(b) + if trace_floats then + report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","flushing",which,t.n,n,w,h,d) + else + interfaces.showmessage("floatblocks",3,t.n) + end + texbox.floatbox = b + last = remove(stack,n) + last.box = nil + setcount("global","savednoffloats",#stacks[default]) -- default? + else + setdimensions() + end +end + +function floats.consult(which,n) + which = which or default + local stack = stacks[which] + local t, b, n = get(stack,n) + if t then + local w, h, d = setdimensions(b) + if trace_floats then + report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","consulting",which,t.n,n,w,h,d) + end + return t, b, n + else + if trace_floats then + report_floats("nothing to consult") + end + setdimensions() + end +end + +function floats.collect(which,maxwidth,distance) + which = which or default + local stack = stacks[which] + local n, m = #stack, 0 + for i=1,n do + local t, b, n = get(stack,i) + if t then + local w, h, d = setdimensions(b) + if w + distance < maxwidth then + m = m + 1 + maxwidth = maxwidth - w - distance + else + break + end + else + break + end + end + if m == 0 then + m = 1 + end + setcount("global","nofcollectedfloats",m) +end + +function floats.getvariable(name,default) + local value = last and last.data[name] or default + return value ~= "" and value +end + +function floats.checkedpagefloat(packed) + if structures.pages.is_odd() then + if #stacks.rightpage > 0 then + return "rightpage" + elseif #stacks.page > 0 then + return "page" + elseif #stacks.leftpage > 0 then + if packed then + return "leftpage" + end + end + else + if #stacks.leftpage > 0 then + return "leftpage" + elseif #stacks.page > 0 then + return "page" + elseif #stacks.rightpage > 0 then + if packed then + return "rightpage" + end + end + end +end + +function floats.nofstacked() + return #stacks[which or default] or 0 +end + +-- todo: check for digits ! + +local method = C((1-S(", :"))^1) +local position = P(":") * C((1-S("*,"))^1) * (P("*") * C((1-S(","))^1))^0 +local label = P(":") * C((1-S(",*: "))^0) + +local pattern = method * ( + label * position * C("") + + C("") * position * C("") + + label * C("") * C("") + + C("") * C("") * C("") +) + C("") * C("") * C("") * C("") + +-- inspect { lpegmatch(pattern,"somewhere:blabla,crap") } +-- inspect { lpegmatch(pattern,"somewhere:1*2") } +-- inspect { lpegmatch(pattern,"somewhere:blabla:1*2") } +-- inspect { lpegmatch(pattern,"somewhere::1*2") } +-- inspect { lpegmatch(pattern,"somewhere,") } +-- inspect { lpegmatch(pattern,"somewhere") } +-- inspect { lpegmatch(pattern,"") } + +function floats.analysemethod(str) -- will become a more extensive parser + return lpegmatch(pattern,str or "") +end + +-- interface + +local context = context +local setvalue = context.setvalue + +commands.flushfloat = floats.flush +commands.savefloat = floats.save +commands.resavefloat = floats.resave +commands.pushfloat = floats.push +commands.popfloat = floats.pop +commands.consultfloat = floats.consult +commands.collectfloat = floats.collect + +function commands.getfloatvariable (...) local v = floats.getvariable(...) if v then context(v) end end +function commands.checkedpagefloat (...) local v = floats.checkedpagefloat(...) if v then context(v) end end + +function commands.nofstackedfloats (...) context(floats.nofstacked(...)) end +function commands.doifelsesavedfloat(...) commands.doifelse(floats.nofstacked(...)>0) end + +function commands.analysefloatmethod(str) -- currently only one method + local method, label, row, column = floats.analysemethod(str) + setvalue("floatmethod",method or "") + setvalue("floatlabel", label or "") + setvalue("floatrow", row or "") + setvalue("floatcolumn",column or "") +end diff --git a/tex/context/base/page-inj.lua b/tex/context/base/page-inj.lua index 5b450d60e..205f8d397 100644 --- a/tex/context/base/page-inj.lua +++ b/tex/context/base/page-inj.lua @@ -1,101 +1,101 @@ -if not modules then modules = { } end modules ["page-inj"] = { - version = 1.000, - comment = "Page injections", - author = "Wolfgang Schuster & Hans Hagen", - copyright = "Wolfgang Schuster & Hans Hagen", - license = "see context related readme files", -} - --- Adapted a bit by HH: numbered states, tracking, delayed, order, etc. - -local injections = pagebuilders.injections or { } -pagebuilders.injections = injections - -local report = logs.reporter("pagebuilder","injections") -local trace = false trackers.register("pagebuilder.injections",function(v) trace = v end) - -local variables = interfaces.variables - -local v_yes = variables.yes -local v_previous = variables.previous -local v_next = variables.next - -local order = 0 -local cache = { } - -function injections.save(specification) -- maybe not public, just commands.* - order = order + 1 - cache[#cache+1] = { - order = order, - name = specification.name, - state = tonumber(specification.state) or specification.state, - parameters = specification.userdata, - } - tex.setcount("global","c_page_boxes_flush_n",#cache) -end - -function injections.flushbefore() -- maybe not public, just commands.* - if #cache > 0 then - local delayed = { } - context.unprotect() - for i=1,#cache do - local c = cache[i] - local oldstate = c.state - if oldstate == v_previous then - if trace then - report("entry %a, order %a, flushing due to state %a",i,c.order,oldstate) - end - context.page_injections_flush_saved(c.name,c.parameters) - elseif type(oldstate) == "number" and oldstate < 0 then - local newstate = oldstate + 1 - if newstate >= 0 then - newstate = v_previous - end - if trace then - report("entry %a, order %a, changing state from %a to %a",i,c.order,oldstate,newstate) - end - c.state = newstate - delayed[#delayed+1] = c - else - delayed[#delayed+1] = c - end - end - context.unprotect() - cache = delayed - tex.setcount("global","c_page_boxes_flush_n",#cache) - end -end - -function injections.flushafter() -- maybe not public, just commands.* - if #cache > 0 then - local delayed = { } - context.unprotect() - for i=1,#cache do - local c = cache[i] - local oldstate = c.state - if oldstate == v_next then - if trace then - report("entry %a, order %a, flushing due to state %a",i,c.order,oldstate) - end - context.page_injections_flush_saved(c.name,c.parameters) - elseif type(oldstate) == "number" and oldstate> 0 then - local newstate = oldstate- 1 - if newstate <= 0 then - newstate = v_next - end - if trace then - report("entry %a, order %a, changing state from %a to %a",i,c.order,oldstate,newstate) - end - c.state = newstate - delayed[#delayed+1] = c - end - end - context.protect() - cache = delayed - tex.setcount("global","c_page_boxes_flush_n",#cache) - end -end - -commands.page_injections_save = injections.save -commands.page_injections_flush_after = injections.flushafter -commands.page_injections_flush_before = injections.flushbefore +if not modules then modules = { } end modules ["page-inj"] = { + version = 1.000, + comment = "Page injections", + author = "Wolfgang Schuster & Hans Hagen", + copyright = "Wolfgang Schuster & Hans Hagen", + license = "see context related readme files", +} + +-- Adapted a bit by HH: numbered states, tracking, delayed, order, etc. + +local injections = pagebuilders.injections or { } +pagebuilders.injections = injections + +local report = logs.reporter("pagebuilder","injections") +local trace = false trackers.register("pagebuilder.injections",function(v) trace = v end) + +local variables = interfaces.variables + +local v_yes = variables.yes +local v_previous = variables.previous +local v_next = variables.next + +local order = 0 +local cache = { } + +function injections.save(specification) -- maybe not public, just commands.* + order = order + 1 + cache[#cache+1] = { + order = order, + name = specification.name, + state = tonumber(specification.state) or specification.state, + parameters = specification.userdata, + } + tex.setcount("global","c_page_boxes_flush_n",#cache) +end + +function injections.flushbefore() -- maybe not public, just commands.* + if #cache > 0 then + local delayed = { } + context.unprotect() + for i=1,#cache do + local c = cache[i] + local oldstate = c.state + if oldstate == v_previous then + if trace then + report("entry %a, order %a, flushing due to state %a",i,c.order,oldstate) + end + context.page_injections_flush_saved(c.name,c.parameters) + elseif type(oldstate) == "number" and oldstate < 0 then + local newstate = oldstate + 1 + if newstate >= 0 then + newstate = v_previous + end + if trace then + report("entry %a, order %a, changing state from %a to %a",i,c.order,oldstate,newstate) + end + c.state = newstate + delayed[#delayed+1] = c + else + delayed[#delayed+1] = c + end + end + context.unprotect() + cache = delayed + tex.setcount("global","c_page_boxes_flush_n",#cache) + end +end + +function injections.flushafter() -- maybe not public, just commands.* + if #cache > 0 then + local delayed = { } + context.unprotect() + for i=1,#cache do + local c = cache[i] + local oldstate = c.state + if oldstate == v_next then + if trace then + report("entry %a, order %a, flushing due to state %a",i,c.order,oldstate) + end + context.page_injections_flush_saved(c.name,c.parameters) + elseif type(oldstate) == "number" and oldstate> 0 then + local newstate = oldstate- 1 + if newstate <= 0 then + newstate = v_next + end + if trace then + report("entry %a, order %a, changing state from %a to %a",i,c.order,oldstate,newstate) + end + c.state = newstate + delayed[#delayed+1] = c + end + end + context.protect() + cache = delayed + tex.setcount("global","c_page_boxes_flush_n",#cache) + end +end + +commands.page_injections_save = injections.save +commands.page_injections_flush_after = injections.flushafter +commands.page_injections_flush_before = injections.flushbefore diff --git a/tex/context/base/page-ins.lua b/tex/context/base/page-ins.lua index 7f870735d..15656a231 100644 --- a/tex/context/base/page-ins.lua +++ b/tex/context/base/page-ins.lua @@ -1,97 +1,97 @@ -if not modules then modules = { } end modules ['page-ins'] = { - version = 1.001, - comment = "companion to page-mix.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", - -- public = { - -- functions = { - -- "inserts.define", - -- "inserts.getdata", - -- }, - -- commands = { - -- "defineinsertion", - -- "inserttionnumber", - -- } - -- } -} - --- Maybe we should only register in lua and forget about the tex end. - -structures = structures or { } -structures.inserts = structures.inserts or { } -local inserts = structures.inserts - -local report_inserts = logs.reporter("inserts") - -local allocate = utilities.storage.allocate - -inserts.stored = inserts.stored or allocate { } -- combining them in one is inefficient in the -inserts.data = inserts.data or allocate { } -- bytecode storage pool - -local variables = interfaces.variables -local v_page = variables.page -local v_columns = variables.columns -local v_firstcolumn = variables.firstcolumn -local v_lastcolumn = variables.lastcolumn -local v_text = variables.text - -storage.register("structures/inserts/stored", inserts.stored, "structures.inserts.stored") - -local data = inserts.data -local stored = inserts.stored - -for name, specification in next, stored do - data[specification.number] = specification - data[name] = specification -end - -function inserts.define(name,specification) - specification.name= name - local number = specification.number or 0 - data[name] = specification - data[number] = specification - -- only needed at runtime as this get stored in a bytecode register - stored[name] = specification - if not specification.location then - specification.location = v_page - end - return specification -end - -function inserts.setup(name,settings) - local specification = data[name] - for k, v in next, settings do - -- maybe trace change - specification[k] = v - end - return specification -end - -function inserts.setlocation(name,location) -- a practical fast one - data[name].location = location -end - -function inserts.getlocation(name,location) - return data[name].location or v_page -end - -function inserts.getdata(name) -- or number - return data[name] -end - -function inserts.getname(number) - return data[name].name -end - -function inserts.getnumber(name) - return data[name].number -end - --- interface - -commands.defineinsertion = inserts.define -commands.setupinsertion = inserts.setup -commands.setinsertionlocation = inserts.setlocation -commands.insertionnumber = function(name) context(data[name].number or 0) end - +if not modules then modules = { } end modules ['page-ins'] = { + version = 1.001, + comment = "companion to page-mix.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", + -- public = { + -- functions = { + -- "inserts.define", + -- "inserts.getdata", + -- }, + -- commands = { + -- "defineinsertion", + -- "inserttionnumber", + -- } + -- } +} + +-- Maybe we should only register in lua and forget about the tex end. + +structures = structures or { } +structures.inserts = structures.inserts or { } +local inserts = structures.inserts + +local report_inserts = logs.reporter("inserts") + +local allocate = utilities.storage.allocate + +inserts.stored = inserts.stored or allocate { } -- combining them in one is inefficient in the +inserts.data = inserts.data or allocate { } -- bytecode storage pool + +local variables = interfaces.variables +local v_page = variables.page +local v_columns = variables.columns +local v_firstcolumn = variables.firstcolumn +local v_lastcolumn = variables.lastcolumn +local v_text = variables.text + +storage.register("structures/inserts/stored", inserts.stored, "structures.inserts.stored") + +local data = inserts.data +local stored = inserts.stored + +for name, specification in next, stored do + data[specification.number] = specification + data[name] = specification +end + +function inserts.define(name,specification) + specification.name= name + local number = specification.number or 0 + data[name] = specification + data[number] = specification + -- only needed at runtime as this get stored in a bytecode register + stored[name] = specification + if not specification.location then + specification.location = v_page + end + return specification +end + +function inserts.setup(name,settings) + local specification = data[name] + for k, v in next, settings do + -- maybe trace change + specification[k] = v + end + return specification +end + +function inserts.setlocation(name,location) -- a practical fast one + data[name].location = location +end + +function inserts.getlocation(name,location) + return data[name].location or v_page +end + +function inserts.getdata(name) -- or number + return data[name] +end + +function inserts.getname(number) + return data[name].name +end + +function inserts.getnumber(name) + return data[name].number +end + +-- interface + +commands.defineinsertion = inserts.define +commands.setupinsertion = inserts.setup +commands.setinsertionlocation = inserts.setlocation +commands.insertionnumber = function(name) context(data[name].number or 0) end + diff --git a/tex/context/base/page-lin.lua b/tex/context/base/page-lin.lua index e6b500e8b..5f7ea7eed 100644 --- a/tex/context/base/page-lin.lua +++ b/tex/context/base/page-lin.lua @@ -1,290 +1,290 @@ -if not modules then modules = { } end modules ['page-lin'] = { - version = 1.001, - comment = "companion to page-lin.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- experimental -> will become builders - -local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end) - -local report_lines = logs.reporter("lines") - -local texbox = tex.box - -local attributes, nodes, node, context = attributes, nodes, node, context - -nodes.lines = nodes.lines or { } -local lines = nodes.lines - -lines.data = lines.data or { } -- start step tag -local data = lines.data -local last = #data - -lines.scratchbox = lines.scratchbox or 0 - -local leftmarginwidth = nodes.leftmarginwidth - -storage.register("lines/data", lines.data, "nodes.lines.data") - --- if there is demand for it, we can support multiple numbering streams --- and use more than one attibute - -local variables = interfaces.variables - -local nodecodes = nodes.nodecodes - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local whatsit_code = nodecodes.whatsit - -local a_displaymath = attributes.private('displaymath') -local a_linenumber = attributes.private('linenumber') -local a_linereference = attributes.private('linereference') -local a_verbatimline = attributes.private('verbatimline') - -local current_list = { } -local cross_references = { } -local chunksize = 250 -- not used in boxed - -local traverse_id = node.traverse_id -local traverse = node.traverse -local copy_node = node.copy -local hpack_node = node.hpack -local insert_node_after = node.insert_after -local insert_node_before = node.insert_before - --- cross referencing - -function lines.number(n) - n = tonumber(n) - local cr = cross_references[n] or 0 - cross_references[n] = nil - return cr -end - -local function resolve(n,m) -- we can now check the 'line' flag (todo) - while n do - local id = n.id - if id == whatsit_code then -- why whatsit - local a = n[a_linereference] - if a then - cross_references[a] = m - end - elseif id == hlist_code or id == vlist_code then - resolve(n.list,m) - end - n = n.next - end -end - -function lines.finalize(t) - local getnumber = lines.number - for _,p in next, t do - for _,r in next, p do - local m = r.metadata - if m and m.kind == "line" then - local e = r.entries - local u = r.userdata - e.linenumber = getnumber(e.text or 0) -- we can nil e.text - e.conversion = u and u.conversion - r.userdata = nil -- hack - end - end - end -end - -local filters = structures.references.filters -local helpers = structures.helpers - -structures.references.registerfinalizer(lines.finalize) - -filters.line = filters.line or { } - -function filters.line.default(data) --- helpers.title(data.entries.linenumber or "?",data.metadata) - context.convertnumber(data.entries.conversion or "numbers",data.entries.linenumber or "0") -end - -function filters.line.page(data,prefixspec,pagespec) -- redundant - helpers.prefixpage(data,prefixspec,pagespec) -end - -function filters.line.linenumber(data) -- raw - context(data.entries.linenumber or "0") -end - --- boxed variant, todo: use number mechanism - -lines.boxed = { } -local boxed = lines.boxed - --- todo: cache setups, and free id no longer used --- use interfaces.cachesetup(t) - -function boxed.register(configuration) - last = last + 1 - data[last] = configuration - if trace_numbers then - report_lines("registering setup %a",last) - end - return last -end - -function commands.registerlinenumbering(configuration) - context(boxed.register(configuration)) -end - -function boxed.setup(n,configuration) - local d = data[n] - if d then - if trace_numbers then - report_lines("updating setup %a",n) - end - for k,v in next, configuration do - d[k] = v - end - else - if trace_numbers then - report_lines("registering setup %a (br)",n) - end - data[n] = configuration - end - return n -end - -commands.setuplinenumbering = boxed.setup - -local function check_number(n,a,skip,sameline) - local d = data[a] - if d then - local tag, skipflag, s = d.tag or "", 0, d.start or 1 - current_list[#current_list+1] = { n, s } - if sameline then - skipflag = 0 - if trace_numbers then - report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no") - end - elseif not skip and s % d.step == 0 then - skipflag, d.start = 1, s + 1 -- (d.step or 1) - if trace_numbers then - report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no") - end - else - skipflag, d.start = 0, s + 1 -- (d.step or 1) - if trace_numbers then - report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no") - end - end - context.makelinenumber(tag,skipflag,s,n.shift,n.width,leftmarginwidth(n.list),n.dir) - end -end - --- xlist --- xlist --- hlist - -local function identify(list) - if list then - for n in traverse_id(hlist_code,list) do - if n[a_linenumber] then - return list - end - end - local n = list - while n do - local id = n.id - if id == hlist_code or id == vlist_code then - local ok = identify(n.list) - if ok then - return ok - end - end - n = n.next - end - end -end - -function boxed.stage_zero(n) - return identify(texbox[n].list) -end - --- reset ranges per page --- store first and last per page --- maybe just set marks directly - -function boxed.stage_one(n,nested) - current_list = { } - local head = texbox[n] - if head then - local list = head.list - if nested then - list = identify(list) - end - local last_a, last_v, skip = nil, -1, false - for n in traverse_id(hlist_code,list) do -- attr test here and quit as soon as zero found - if n.height == 0 and n.depth == 0 then - -- skip funny hlists -- todo: check line subtype - else - local list = n.list - local a = list[a_linenumber] - if a and a > 0 then - if last_a ~= a then - local da = data[a] - local ma = da.method - if ma == variables.next then - skip = true - elseif ma == variables.page then - da.start = 1 -- eventually we will have a normal counter - end - last_a = a - if trace_numbers then - report_lines("starting line number range %s: start %s, continue",a,da.start,da.continue or "no") - end - end - if n[a_displaymath] then - if nodes.is_display_math(n) then - check_number(n,a,skip) - end - else - local v = list[a_verbatimline] - if not v or v ~= last_v then - last_v = v - check_number(n,a,skip) - else - check_number(n,a,skip,true) - end - end - skip = false - end - end - end - end -end - -function boxed.stage_two(n,m) - if #current_list > 0 then - m = m or lines.scratchbox - local t, tn = { }, 0 - for l in traverse_id(hlist_code,texbox[m].list) do - tn = tn + 1 - t[tn] = copy_node(l) - end - for i=1,#current_list do - local li = current_list[i] - local n, m, ti = li[1], li[2], t[i] - if ti then - ti.next, n.list = n.list, ti - resolve(n,m) - else - report_lines("error in linenumbering (1)") - return - end - end - end -end - -commands.linenumbersstageone = boxed.stage_one -commands.linenumbersstagetwo = boxed.stage_two +if not modules then modules = { } end modules ['page-lin'] = { + version = 1.001, + comment = "companion to page-lin.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- experimental -> will become builders + +local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end) + +local report_lines = logs.reporter("lines") + +local texbox = tex.box + +local attributes, nodes, node, context = attributes, nodes, node, context + +nodes.lines = nodes.lines or { } +local lines = nodes.lines + +lines.data = lines.data or { } -- start step tag +local data = lines.data +local last = #data + +lines.scratchbox = lines.scratchbox or 0 + +local leftmarginwidth = nodes.leftmarginwidth + +storage.register("lines/data", lines.data, "nodes.lines.data") + +-- if there is demand for it, we can support multiple numbering streams +-- and use more than one attibute + +local variables = interfaces.variables + +local nodecodes = nodes.nodecodes + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local whatsit_code = nodecodes.whatsit + +local a_displaymath = attributes.private('displaymath') +local a_linenumber = attributes.private('linenumber') +local a_linereference = attributes.private('linereference') +local a_verbatimline = attributes.private('verbatimline') + +local current_list = { } +local cross_references = { } +local chunksize = 250 -- not used in boxed + +local traverse_id = node.traverse_id +local traverse = node.traverse +local copy_node = node.copy +local hpack_node = node.hpack +local insert_node_after = node.insert_after +local insert_node_before = node.insert_before + +-- cross referencing + +function lines.number(n) + n = tonumber(n) + local cr = cross_references[n] or 0 + cross_references[n] = nil + return cr +end + +local function resolve(n,m) -- we can now check the 'line' flag (todo) + while n do + local id = n.id + if id == whatsit_code then -- why whatsit + local a = n[a_linereference] + if a then + cross_references[a] = m + end + elseif id == hlist_code or id == vlist_code then + resolve(n.list,m) + end + n = n.next + end +end + +function lines.finalize(t) + local getnumber = lines.number + for _,p in next, t do + for _,r in next, p do + local m = r.metadata + if m and m.kind == "line" then + local e = r.entries + local u = r.userdata + e.linenumber = getnumber(e.text or 0) -- we can nil e.text + e.conversion = u and u.conversion + r.userdata = nil -- hack + end + end + end +end + +local filters = structures.references.filters +local helpers = structures.helpers + +structures.references.registerfinalizer(lines.finalize) + +filters.line = filters.line or { } + +function filters.line.default(data) +-- helpers.title(data.entries.linenumber or "?",data.metadata) + context.convertnumber(data.entries.conversion or "numbers",data.entries.linenumber or "0") +end + +function filters.line.page(data,prefixspec,pagespec) -- redundant + helpers.prefixpage(data,prefixspec,pagespec) +end + +function filters.line.linenumber(data) -- raw + context(data.entries.linenumber or "0") +end + +-- boxed variant, todo: use number mechanism + +lines.boxed = { } +local boxed = lines.boxed + +-- todo: cache setups, and free id no longer used +-- use interfaces.cachesetup(t) + +function boxed.register(configuration) + last = last + 1 + data[last] = configuration + if trace_numbers then + report_lines("registering setup %a",last) + end + return last +end + +function commands.registerlinenumbering(configuration) + context(boxed.register(configuration)) +end + +function boxed.setup(n,configuration) + local d = data[n] + if d then + if trace_numbers then + report_lines("updating setup %a",n) + end + for k,v in next, configuration do + d[k] = v + end + else + if trace_numbers then + report_lines("registering setup %a (br)",n) + end + data[n] = configuration + end + return n +end + +commands.setuplinenumbering = boxed.setup + +local function check_number(n,a,skip,sameline) + local d = data[a] + if d then + local tag, skipflag, s = d.tag or "", 0, d.start or 1 + current_list[#current_list+1] = { n, s } + if sameline then + skipflag = 0 + if trace_numbers then + report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no") + end + elseif not skip and s % d.step == 0 then + skipflag, d.start = 1, s + 1 -- (d.step or 1) + if trace_numbers then + report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no") + end + else + skipflag, d.start = 0, s + 1 -- (d.step or 1) + if trace_numbers then + report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no") + end + end + context.makelinenumber(tag,skipflag,s,n.shift,n.width,leftmarginwidth(n.list),n.dir) + end +end + +-- xlist +-- xlist +-- hlist + +local function identify(list) + if list then + for n in traverse_id(hlist_code,list) do + if n[a_linenumber] then + return list + end + end + local n = list + while n do + local id = n.id + if id == hlist_code or id == vlist_code then + local ok = identify(n.list) + if ok then + return ok + end + end + n = n.next + end + end +end + +function boxed.stage_zero(n) + return identify(texbox[n].list) +end + +-- reset ranges per page +-- store first and last per page +-- maybe just set marks directly + +function boxed.stage_one(n,nested) + current_list = { } + local head = texbox[n] + if head then + local list = head.list + if nested then + list = identify(list) + end + local last_a, last_v, skip = nil, -1, false + for n in traverse_id(hlist_code,list) do -- attr test here and quit as soon as zero found + if n.height == 0 and n.depth == 0 then + -- skip funny hlists -- todo: check line subtype + else + local list = n.list + local a = list[a_linenumber] + if a and a > 0 then + if last_a ~= a then + local da = data[a] + local ma = da.method + if ma == variables.next then + skip = true + elseif ma == variables.page then + da.start = 1 -- eventually we will have a normal counter + end + last_a = a + if trace_numbers then + report_lines("starting line number range %s: start %s, continue",a,da.start,da.continue or "no") + end + end + if n[a_displaymath] then + if nodes.is_display_math(n) then + check_number(n,a,skip) + end + else + local v = list[a_verbatimline] + if not v or v ~= last_v then + last_v = v + check_number(n,a,skip) + else + check_number(n,a,skip,true) + end + end + skip = false + end + end + end + end +end + +function boxed.stage_two(n,m) + if #current_list > 0 then + m = m or lines.scratchbox + local t, tn = { }, 0 + for l in traverse_id(hlist_code,texbox[m].list) do + tn = tn + 1 + t[tn] = copy_node(l) + end + for i=1,#current_list do + local li = current_list[i] + local n, m, ti = li[1], li[2], t[i] + if ti then + ti.next, n.list = n.list, ti + resolve(n,m) + else + report_lines("error in linenumbering (1)") + return + end + end + end +end + +commands.linenumbersstageone = boxed.stage_one +commands.linenumbersstagetwo = boxed.stage_two diff --git a/tex/context/base/page-mix.lua b/tex/context/base/page-mix.lua index cf0094787..999427b8f 100644 --- a/tex/context/base/page-mix.lua +++ b/tex/context/base/page-mix.lua @@ -1,695 +1,695 @@ -if not modules then modules = { } end modules ["page-mix"] = { - version = 1.001, - comment = "companion to page-mix.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- inserts.getname(name) - --- local node, tex = node, tex --- local nodes, interfaces, utilities = nodes, interfaces, utilities --- local trackers, logs, storage = trackers, logs, storage --- local number, table = number, table - -local concat = table.concat - -local nodecodes = nodes.nodecodes -local gluecodes = nodes.gluecodes -local nodepool = nodes.pool - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local kern_code = nodecodes.kern -local glue_code = nodecodes.glue -local penalty_code = nodecodes.penalty -local insert_code = nodecodes.ins -local mark_code = nodecodes.mark - -local new_hlist = nodepool.hlist -local new_vlist = nodepool.vlist -local new_glue = nodepool.glue - -local hpack = node.hpack -local vpack = node.vpack -local freenode = node.free - -local texbox = tex.box -local texskip = tex.skip -local texdimen = tex.dimen -local points = number.points -local settings_to_hash = utilities.parsers.settings_to_hash - -local variables = interfaces.variables -local v_yes = variables.yes -local v_global = variables["global"] -local v_local = variables["local"] -local v_columns = variables.columns - -local trace_state = false trackers.register("mixedcolumns.trace", function(v) trace_state = v end) -local trace_detail = false trackers.register("mixedcolumns.detail", function(v) trace_detail = v end) - -local report_state = logs.reporter("mixed columns") - -pagebuilders = pagebuilders or { } -pagebuilders.mixedcolumns = pagebuilders.mixedcolumns or { } -local mixedcolumns = pagebuilders.mixedcolumns - -local forcedbreak = -123 - --- initializesplitter(specification) --- cleanupsplitter() - --- Inserts complicate matters a lot. In order to deal with them well, we need to --- distinguish several cases. --- --- (1) full page columns: firstcolumn, columns, lastcolumn, page --- (2) mid page columns : firstcolumn, columns, lastcolumn, page --- --- We need to collect them accordingly. - -local function collectinserts(result,nxt,nxtid) - local inserts, currentskips, nextskips, inserttotal = { }, 0, 0, 0 - while nxt do - if nxtid == insert_code then - inserttotal = inserttotal + nxt.height + nxt.depth - local s = nxt.subtype - local c = inserts[s] - if not c then - c = { } - inserts[s] = c - local width = texskip[s].width - if not result.inserts[s] then - currentskips = currentskips + width - end - nextskips = nextskips + width - end - c[#c+1] = nxt - if trace_detail then - report_state("insert of class %s found",s) - end - elseif nxtid == mark_code then - if trace_detail then - report_state("mark found") - end - else - break - end - nxt = nxt.next - if nxt then - nxtid = nxt.id - else - break - end - end - return nxt, inserts, currentskips, nextskips, inserttotal -end - -local function appendinserts(ri,inserts) - for class, collected in next, inserts do - local ric = ri[class] - if not ric then - -- assign to collected - ri[class] = collected - else - -- append to collected - for j=1,#collected do - ric[#ric+1] = collected[j] - end - end - end -end - -local function discardtopglue(current,discarded) - local size = 0 - while current do - local id = current.id - if id == glue_code then - size = size + current.spec.width - discarded[#discarded+1] = current - current = current.next - elseif id == penalty_code then - if current.penalty == forcedbreak then - discarded[#discarded+1] = current - current = current.next - while current do - local id = current.id - if id == glue_code then - size = size + current.spec.width - discarded[#discarded+1] = current - current = current.next - else - break - end - end - else - discarded[#discarded+1] = current - current = current.next - end - else - break - end - end - return current, size -end - -local function stripbottomglue(results,discarded) - local height = 0 - for i=1,#results do - local r = results[i] - local t = r.tail - while t and t ~= r.head do - local prev = t.prev - if not prev then - break - end - local id = t.id - if id == penalty_code then - if t.penalty == forcedbreak then - break - else - discarded[#discarded+1] = t - r.tail = prev - t = prev - end - elseif id == glue_code then - discarded[#discarded+1] = t - local width = t.spec.width - if trace_state then - report_state("columns %s, discarded bottom glue %p",i,width) - end - r.height = r.height - width - r.tail = prev - t = prev - else - break - end - end - if r.height > height then - height = r.height - end - end - return height -end - -local function setsplit(specification) -- a rather large function - local box = specification.box - if not box then - report_state("fatal error, no box") - return - end - local list = texbox[box] - if not list then - report_state("fatal error, no list") - return - end - local head = list.head or specification.originalhead - if not head then - report_state("fatal error, no head") - return - end - local discarded = { } - local originalhead = head - local originalwidth = specification.originalwidth or list.width - local originalheight = specification.originalheight or list.height - local current = head - local skipped = 0 - local height = 0 - local depth = 0 - local skip = 0 - local options = settings_to_hash(specification.option or "") - local stripbottom = specification.alternative == v_local - local cycle = specification.cycle or 1 - local nofcolumns = specification.nofcolumns or 1 - if nofcolumns == 0 then - nofcolumns = 1 - end - local preheight = specification.preheight or 0 - local extra = specification.extra or 0 - local maxheight = specification.maxheight - local optimal = originalheight/nofcolumns - if specification.balance ~= v_yes then - optimal = maxheight - end - local target = optimal + extra - local overflow = target > maxheight - preheight - local threshold = specification.threshold or 0 - if overflow then - target = maxheight - preheight - end - if trace_state then - report_state("cycle %s, maxheight %p, preheight %p, target %p, overflow %a, extra %p", - cycle, maxheight, preheight , target, overflow, extra) - end - local results = { } - for i=1,nofcolumns do - results[i] = { - head = false, - tail = false, - height = 0, - depth = 0, - inserts = { }, - delta = 0, - } - end - local column = 1 - local line = 0 - local result = results[column] - local lasthead = nil - local rest = nil - local function gotonext() - if head == lasthead then - if trace_state then - report_state("empty column %s, needs more work",column) - end - rest = current - return false, 0 - else - lasthead = head - result.head = head - if current == head then - result.tail = head - else - result.tail = current.prev - end - result.height = height - result.depth = depth - end - head = current - height = 0 - depth = 0 - if column == nofcolumns then - column = 0 -- nicer in trace - rest = head - -- lasthead = head - return false, 0 - else - local skipped - column = column + 1 - result = results[column] - current, skipped = discardtopglue(current,discarded) - head = current - -- lasthead = head - return true, skipped - end - end - local function checked(advance,where) - local total = skip + height + depth + advance - local delta = total - target - local state = "same" - local okay = false - local skipped = 0 - local curcol = column - if delta > threshold then - result.delta = delta - okay, skipped = gotonext() - if okay then - state = "next" - else - state = "quit" - end - end - if trace_detail then - report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p, discarded %p => %a (height %p, depth %p, skip %p)", - where,curcol,delta,threshold,advance,total,target,state,skipped,height,depth,skip) - end - return state, skipped - end - current, skipped = discardtopglue(current,discarded) - if trace_detail and skipped ~= 0 then - report_state("check > column 1, discarded %p",skipped) - end - head = current - while current do - local id = current.id - local nxt = current.next -local lastcolumn = column - if id == hlist_code or id == vlist_code then - line = line + 1 - local nxtid = nxt and nxt.id - local inserts, currentskips, nextskips, inserttotal = nil, 0, 0, 0 - local advance = current.height -- + current.depth - if nxt and (nxtid == insert_code or nxtid == mark_code) then - nxt, inserts, localskips, insertskips, inserttotal = collectinserts(result,nxt,nxtid) - end - local state, skipped = checked(advance+inserttotal+currentskips,"line") - if trace_state then - report_state("%-7s > column %s, state %a, line %s, advance %p, insert %p, height %p","line",column,state,line,advance,inserttotal,height) - if skipped ~= 0 then - report_state("%-7s > column %s, discarded %p","line",column,skipped) - end - end - if state == "quit" then - break - else - height = height + depth + skip + advance + inserttotal - if state == "next" then - height = height + nextskips - else - height = height + currentskips - end - end - depth = current.depth - skip = 0 - if inserts then - appendinserts(result.inserts,inserts) - end - elseif id == glue_code then - local advance = current.spec.width - if advance ~= 0 then - local state, skipped = checked(advance,"glue") - if trace_state then - report_state("%-7s > column %s, state %a, advance %p, height %p","glue",column,state,advance,height) - if skipped ~= 0 then - report_state("%-7s > column %s, discarded %p","glue",column,skipped) - end - end - if state == "quit" then - break - end - height = height + depth + skip - depth = 0 - skip = height > 0 and advance or 0 - end - elseif id == kern_code then - local advance = current.kern - if advance ~= 0 then - local state, skipped = checked(advance,"kern") - if trace_state then - report_state("%-7s > column %s, state %a, advance %p, height %p, state %a","kern",column,state,advance,height) - if skipped ~= 0 then - report_state("%-7s > column %s, discarded %p","kern",column,skipped) - end - end - if state == "quit" then - break - end - height = height + depth + skip + advance - depth = 0 - skip = 0 - end - elseif id == penalty_code then - local penalty = current.penalty - if penalty == 0 then - -- don't bother - elseif penalty == forcedbreak then - local okay, skipped = gotonext() - if okay then - if trace_state then - report_state("cycle: %s, forced column break (same page)",cycle) - if skipped ~= 0 then - report_state("%-7s > column %s, discarded %p","penalty",column,skipped) - end - end - else - if trace_state then - report_state("cycle: %s, forced column break (next page)",cycle) - if skipped ~= 0 then - report_state("%-7s > column %s, discarded %p","penalty",column,skipped) - end - end - break - end - else - -- todo: nobreak etc ... we might need to backtrack so we need to remember - -- the last acceptable break - -- club and widow and such i.e. resulting penalties (if we care) - end - end -if lastcolumn == column then - nxt = current.next -- can have changed -end - if nxt then - current = nxt - elseif head == lasthead then - -- to be checked but break needed as otherwise we have a loop - if trace_state then - report_state("quit as head is lasthead") - end - break - else - local r = results[column] - r.head = head - r.tail = current - r.height = height - r.depth = depth - break - end - end - if not current then - if trace_state then - report_state("nilling rest") - end - rest = nil - elseif rest == lasthead then - if trace_state then - report_state("nilling rest as rest is lasthead") - end - rest = nil - end - - if stripbottom then - local height = stripbottomglue(results,discarded) - if height > 0 then - target = height - end - end - - specification.results = results - specification.height = target - specification.originalheight = originalheight - specification.originalwidth = originalwidth - specification.originalhead = originalhead - specification.targetheight = target or 0 - specification.rest = rest - specification.overflow = overflow - specification.discarded = discarded - - texbox[specification.box].head = nil - - return specification -end - -function mixedcolumns.finalize(result) - if result then - local results = result.results - for i=1,result.nofcolumns do - local r = results[i] - local h = r.head - if h then - h.prev = nil - local t = r.tail - if t then - t.next = nil - else - h.next = nil - r.tail = h - end - for c, list in next, r.inserts do - local t = { } - for i=1,#list do - local l = list[i] - local h = new_hlist() - t[i] = h - h.head = l.head - h.height = l.height - h.depth = l.depth - l.head = nil - end - t[1].prev = nil -- needs checking - t[#t].next = nil -- needs checking - r.inserts[c] = t - end - end - end - end -end - -local splitruns = 0 - -local function report_deltas(result,str) - local t = { } - for i=1,result.nofcolumns do - t[#t+1] = points(result.results[i].delta or 0) - end - report_state("%s, cycles %s, deltas % | t",str,result.cycle or 1,t) -end - -function mixedcolumns.setsplit(specification) - splitruns = splitruns + 1 - if trace_state then - report_state("split run %s",splitruns) - end - local result = setsplit(specification) - if result then - if result.overflow then - if trace_state then - report_deltas(result,"overflow") - end - -- we might have some rest - elseif result.rest and specification.balance == v_yes then - local step = specification.step or 65536*2 - local cycle = 1 - local cycles = specification.cycles or 100 - while result.rest and cycle <= cycles do - specification.extra = cycle * step - result = setsplit(specification) or result - if trace_state then - report_state("cycle: %s.%s, original height %p, total height %p", - splitruns,cycle,result.originalheight,result.nofcolumns*result.targetheight) - end - cycle = cycle + 1 - specification.cycle = cycle - end - if cycle > cycles then - report_deltas(result,"too many balancing cycles") - elseif trace_state then - report_deltas(result,"balanced") - end - elseif trace_state then - report_deltas(result,"done") - end - return result - elseif trace_state then - report_state("no result") - end -end - -local topskip_code = gluecodes.topskip -local baselineskip_code = gluecodes.baselineskip - -function mixedcolumns.getsplit(result,n) - if not result then - report_state("flush, column %s, no result",n) - return - end - local r = result.results[n] - if not r then - report_state("flush, column %s, empty",n) - end - local h = r.head - if not h then - return new_glue(result.originalwidth) - end - - h.prev = nil -- move up - local strutht = result.strutht - local strutdp = result.strutdp - local lineheight = strutht + strutdp - - local v = new_vlist() - v.head = h - - -- local v = vpack(h,"exactly",height) - - if result.alternative == v_global then -- option - result.height = result.maxheight - end - - local ht = 0 - local dp = 0 - local wd = result.originalwidth - - local grid = result.grid - - if grid then - ht = lineheight * math.ceil(result.height/lineheight) - strutdp - dp = strutdp - else - ht = result.height - dp = result.depth - end - - v.width = wd - v.height = ht - v.depth = dp - - if trace_state then - local id = h.id - if id == hlist_code then - report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",nodes.toutf(h.list)) - else - report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"head node",nodecodes[id]) - end - end - - for c, list in next, r.inserts do - -- tex.setbox("global",c,vpack(nodes.concat(list))) - -- tex.setbox(c,vpack(nodes.concat(list))) - texbox[c] = vpack(nodes.concat(list)) - r.inserts[c] = nil - end - - return v -end - -function mixedcolumns.getrest(result) - local rest = result and result.rest - result.rest = nil -- to be sure - return rest -end - -function mixedcolumns.getlist(result) - local originalhead = result and result.originalhead - result.originalhead = nil -- to be sure - return originalhead -end - -function mixedcolumns.cleanup(result) - local discarded = result.discarded - for i=1,#discarded do - freenode(discarded[i]) - end - result.discarded = { } -end - --- interface -- - -local result - -function commands.mixsetsplit(specification) - if result then - for k, v in next, specification do - result[k] = v - end - result = mixedcolumns.setsplit(result) - else - result = mixedcolumns.setsplit(specification) - end -end - -function commands.mixgetsplit(n) - if result then - context(mixedcolumns.getsplit(result,n)) - end -end - -function commands.mixfinalize() - if result then - mixedcolumns.finalize(result) - end -end - -function commands.mixflushrest() - if result then - context(mixedcolumns.getrest(result)) - end -end - -function commands.mixflushlist() - if result then - context(mixedcolumns.getlist(result)) - end -end - -function commands.mixstate() - context(result and result.rest and 1 or 0) -end - -function commands.mixcleanup() - if result then - mixedcolumns.cleanup(result) - result = nil - end -end +if not modules then modules = { } end modules ["page-mix"] = { + version = 1.001, + comment = "companion to page-mix.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- inserts.getname(name) + +-- local node, tex = node, tex +-- local nodes, interfaces, utilities = nodes, interfaces, utilities +-- local trackers, logs, storage = trackers, logs, storage +-- local number, table = number, table + +local concat = table.concat + +local nodecodes = nodes.nodecodes +local gluecodes = nodes.gluecodes +local nodepool = nodes.pool + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local kern_code = nodecodes.kern +local glue_code = nodecodes.glue +local penalty_code = nodecodes.penalty +local insert_code = nodecodes.ins +local mark_code = nodecodes.mark + +local new_hlist = nodepool.hlist +local new_vlist = nodepool.vlist +local new_glue = nodepool.glue + +local hpack = node.hpack +local vpack = node.vpack +local freenode = node.free + +local texbox = tex.box +local texskip = tex.skip +local texdimen = tex.dimen +local points = number.points +local settings_to_hash = utilities.parsers.settings_to_hash + +local variables = interfaces.variables +local v_yes = variables.yes +local v_global = variables["global"] +local v_local = variables["local"] +local v_columns = variables.columns + +local trace_state = false trackers.register("mixedcolumns.trace", function(v) trace_state = v end) +local trace_detail = false trackers.register("mixedcolumns.detail", function(v) trace_detail = v end) + +local report_state = logs.reporter("mixed columns") + +pagebuilders = pagebuilders or { } +pagebuilders.mixedcolumns = pagebuilders.mixedcolumns or { } +local mixedcolumns = pagebuilders.mixedcolumns + +local forcedbreak = -123 + +-- initializesplitter(specification) +-- cleanupsplitter() + +-- Inserts complicate matters a lot. In order to deal with them well, we need to +-- distinguish several cases. +-- +-- (1) full page columns: firstcolumn, columns, lastcolumn, page +-- (2) mid page columns : firstcolumn, columns, lastcolumn, page +-- +-- We need to collect them accordingly. + +local function collectinserts(result,nxt,nxtid) + local inserts, currentskips, nextskips, inserttotal = { }, 0, 0, 0 + while nxt do + if nxtid == insert_code then + inserttotal = inserttotal + nxt.height + nxt.depth + local s = nxt.subtype + local c = inserts[s] + if not c then + c = { } + inserts[s] = c + local width = texskip[s].width + if not result.inserts[s] then + currentskips = currentskips + width + end + nextskips = nextskips + width + end + c[#c+1] = nxt + if trace_detail then + report_state("insert of class %s found",s) + end + elseif nxtid == mark_code then + if trace_detail then + report_state("mark found") + end + else + break + end + nxt = nxt.next + if nxt then + nxtid = nxt.id + else + break + end + end + return nxt, inserts, currentskips, nextskips, inserttotal +end + +local function appendinserts(ri,inserts) + for class, collected in next, inserts do + local ric = ri[class] + if not ric then + -- assign to collected + ri[class] = collected + else + -- append to collected + for j=1,#collected do + ric[#ric+1] = collected[j] + end + end + end +end + +local function discardtopglue(current,discarded) + local size = 0 + while current do + local id = current.id + if id == glue_code then + size = size + current.spec.width + discarded[#discarded+1] = current + current = current.next + elseif id == penalty_code then + if current.penalty == forcedbreak then + discarded[#discarded+1] = current + current = current.next + while current do + local id = current.id + if id == glue_code then + size = size + current.spec.width + discarded[#discarded+1] = current + current = current.next + else + break + end + end + else + discarded[#discarded+1] = current + current = current.next + end + else + break + end + end + return current, size +end + +local function stripbottomglue(results,discarded) + local height = 0 + for i=1,#results do + local r = results[i] + local t = r.tail + while t and t ~= r.head do + local prev = t.prev + if not prev then + break + end + local id = t.id + if id == penalty_code then + if t.penalty == forcedbreak then + break + else + discarded[#discarded+1] = t + r.tail = prev + t = prev + end + elseif id == glue_code then + discarded[#discarded+1] = t + local width = t.spec.width + if trace_state then + report_state("columns %s, discarded bottom glue %p",i,width) + end + r.height = r.height - width + r.tail = prev + t = prev + else + break + end + end + if r.height > height then + height = r.height + end + end + return height +end + +local function setsplit(specification) -- a rather large function + local box = specification.box + if not box then + report_state("fatal error, no box") + return + end + local list = texbox[box] + if not list then + report_state("fatal error, no list") + return + end + local head = list.head or specification.originalhead + if not head then + report_state("fatal error, no head") + return + end + local discarded = { } + local originalhead = head + local originalwidth = specification.originalwidth or list.width + local originalheight = specification.originalheight or list.height + local current = head + local skipped = 0 + local height = 0 + local depth = 0 + local skip = 0 + local options = settings_to_hash(specification.option or "") + local stripbottom = specification.alternative == v_local + local cycle = specification.cycle or 1 + local nofcolumns = specification.nofcolumns or 1 + if nofcolumns == 0 then + nofcolumns = 1 + end + local preheight = specification.preheight or 0 + local extra = specification.extra or 0 + local maxheight = specification.maxheight + local optimal = originalheight/nofcolumns + if specification.balance ~= v_yes then + optimal = maxheight + end + local target = optimal + extra + local overflow = target > maxheight - preheight + local threshold = specification.threshold or 0 + if overflow then + target = maxheight - preheight + end + if trace_state then + report_state("cycle %s, maxheight %p, preheight %p, target %p, overflow %a, extra %p", + cycle, maxheight, preheight , target, overflow, extra) + end + local results = { } + for i=1,nofcolumns do + results[i] = { + head = false, + tail = false, + height = 0, + depth = 0, + inserts = { }, + delta = 0, + } + end + local column = 1 + local line = 0 + local result = results[column] + local lasthead = nil + local rest = nil + local function gotonext() + if head == lasthead then + if trace_state then + report_state("empty column %s, needs more work",column) + end + rest = current + return false, 0 + else + lasthead = head + result.head = head + if current == head then + result.tail = head + else + result.tail = current.prev + end + result.height = height + result.depth = depth + end + head = current + height = 0 + depth = 0 + if column == nofcolumns then + column = 0 -- nicer in trace + rest = head + -- lasthead = head + return false, 0 + else + local skipped + column = column + 1 + result = results[column] + current, skipped = discardtopglue(current,discarded) + head = current + -- lasthead = head + return true, skipped + end + end + local function checked(advance,where) + local total = skip + height + depth + advance + local delta = total - target + local state = "same" + local okay = false + local skipped = 0 + local curcol = column + if delta > threshold then + result.delta = delta + okay, skipped = gotonext() + if okay then + state = "next" + else + state = "quit" + end + end + if trace_detail then + report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p, discarded %p => %a (height %p, depth %p, skip %p)", + where,curcol,delta,threshold,advance,total,target,state,skipped,height,depth,skip) + end + return state, skipped + end + current, skipped = discardtopglue(current,discarded) + if trace_detail and skipped ~= 0 then + report_state("check > column 1, discarded %p",skipped) + end + head = current + while current do + local id = current.id + local nxt = current.next +local lastcolumn = column + if id == hlist_code or id == vlist_code then + line = line + 1 + local nxtid = nxt and nxt.id + local inserts, currentskips, nextskips, inserttotal = nil, 0, 0, 0 + local advance = current.height -- + current.depth + if nxt and (nxtid == insert_code or nxtid == mark_code) then + nxt, inserts, localskips, insertskips, inserttotal = collectinserts(result,nxt,nxtid) + end + local state, skipped = checked(advance+inserttotal+currentskips,"line") + if trace_state then + report_state("%-7s > column %s, state %a, line %s, advance %p, insert %p, height %p","line",column,state,line,advance,inserttotal,height) + if skipped ~= 0 then + report_state("%-7s > column %s, discarded %p","line",column,skipped) + end + end + if state == "quit" then + break + else + height = height + depth + skip + advance + inserttotal + if state == "next" then + height = height + nextskips + else + height = height + currentskips + end + end + depth = current.depth + skip = 0 + if inserts then + appendinserts(result.inserts,inserts) + end + elseif id == glue_code then + local advance = current.spec.width + if advance ~= 0 then + local state, skipped = checked(advance,"glue") + if trace_state then + report_state("%-7s > column %s, state %a, advance %p, height %p","glue",column,state,advance,height) + if skipped ~= 0 then + report_state("%-7s > column %s, discarded %p","glue",column,skipped) + end + end + if state == "quit" then + break + end + height = height + depth + skip + depth = 0 + skip = height > 0 and advance or 0 + end + elseif id == kern_code then + local advance = current.kern + if advance ~= 0 then + local state, skipped = checked(advance,"kern") + if trace_state then + report_state("%-7s > column %s, state %a, advance %p, height %p, state %a","kern",column,state,advance,height) + if skipped ~= 0 then + report_state("%-7s > column %s, discarded %p","kern",column,skipped) + end + end + if state == "quit" then + break + end + height = height + depth + skip + advance + depth = 0 + skip = 0 + end + elseif id == penalty_code then + local penalty = current.penalty + if penalty == 0 then + -- don't bother + elseif penalty == forcedbreak then + local okay, skipped = gotonext() + if okay then + if trace_state then + report_state("cycle: %s, forced column break (same page)",cycle) + if skipped ~= 0 then + report_state("%-7s > column %s, discarded %p","penalty",column,skipped) + end + end + else + if trace_state then + report_state("cycle: %s, forced column break (next page)",cycle) + if skipped ~= 0 then + report_state("%-7s > column %s, discarded %p","penalty",column,skipped) + end + end + break + end + else + -- todo: nobreak etc ... we might need to backtrack so we need to remember + -- the last acceptable break + -- club and widow and such i.e. resulting penalties (if we care) + end + end +if lastcolumn == column then + nxt = current.next -- can have changed +end + if nxt then + current = nxt + elseif head == lasthead then + -- to be checked but break needed as otherwise we have a loop + if trace_state then + report_state("quit as head is lasthead") + end + break + else + local r = results[column] + r.head = head + r.tail = current + r.height = height + r.depth = depth + break + end + end + if not current then + if trace_state then + report_state("nilling rest") + end + rest = nil + elseif rest == lasthead then + if trace_state then + report_state("nilling rest as rest is lasthead") + end + rest = nil + end + + if stripbottom then + local height = stripbottomglue(results,discarded) + if height > 0 then + target = height + end + end + + specification.results = results + specification.height = target + specification.originalheight = originalheight + specification.originalwidth = originalwidth + specification.originalhead = originalhead + specification.targetheight = target or 0 + specification.rest = rest + specification.overflow = overflow + specification.discarded = discarded + + texbox[specification.box].head = nil + + return specification +end + +function mixedcolumns.finalize(result) + if result then + local results = result.results + for i=1,result.nofcolumns do + local r = results[i] + local h = r.head + if h then + h.prev = nil + local t = r.tail + if t then + t.next = nil + else + h.next = nil + r.tail = h + end + for c, list in next, r.inserts do + local t = { } + for i=1,#list do + local l = list[i] + local h = new_hlist() + t[i] = h + h.head = l.head + h.height = l.height + h.depth = l.depth + l.head = nil + end + t[1].prev = nil -- needs checking + t[#t].next = nil -- needs checking + r.inserts[c] = t + end + end + end + end +end + +local splitruns = 0 + +local function report_deltas(result,str) + local t = { } + for i=1,result.nofcolumns do + t[#t+1] = points(result.results[i].delta or 0) + end + report_state("%s, cycles %s, deltas % | t",str,result.cycle or 1,t) +end + +function mixedcolumns.setsplit(specification) + splitruns = splitruns + 1 + if trace_state then + report_state("split run %s",splitruns) + end + local result = setsplit(specification) + if result then + if result.overflow then + if trace_state then + report_deltas(result,"overflow") + end + -- we might have some rest + elseif result.rest and specification.balance == v_yes then + local step = specification.step or 65536*2 + local cycle = 1 + local cycles = specification.cycles or 100 + while result.rest and cycle <= cycles do + specification.extra = cycle * step + result = setsplit(specification) or result + if trace_state then + report_state("cycle: %s.%s, original height %p, total height %p", + splitruns,cycle,result.originalheight,result.nofcolumns*result.targetheight) + end + cycle = cycle + 1 + specification.cycle = cycle + end + if cycle > cycles then + report_deltas(result,"too many balancing cycles") + elseif trace_state then + report_deltas(result,"balanced") + end + elseif trace_state then + report_deltas(result,"done") + end + return result + elseif trace_state then + report_state("no result") + end +end + +local topskip_code = gluecodes.topskip +local baselineskip_code = gluecodes.baselineskip + +function mixedcolumns.getsplit(result,n) + if not result then + report_state("flush, column %s, no result",n) + return + end + local r = result.results[n] + if not r then + report_state("flush, column %s, empty",n) + end + local h = r.head + if not h then + return new_glue(result.originalwidth) + end + + h.prev = nil -- move up + local strutht = result.strutht + local strutdp = result.strutdp + local lineheight = strutht + strutdp + + local v = new_vlist() + v.head = h + + -- local v = vpack(h,"exactly",height) + + if result.alternative == v_global then -- option + result.height = result.maxheight + end + + local ht = 0 + local dp = 0 + local wd = result.originalwidth + + local grid = result.grid + + if grid then + ht = lineheight * math.ceil(result.height/lineheight) - strutdp + dp = strutdp + else + ht = result.height + dp = result.depth + end + + v.width = wd + v.height = ht + v.depth = dp + + if trace_state then + local id = h.id + if id == hlist_code then + report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",nodes.toutf(h.list)) + else + report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"head node",nodecodes[id]) + end + end + + for c, list in next, r.inserts do + -- tex.setbox("global",c,vpack(nodes.concat(list))) + -- tex.setbox(c,vpack(nodes.concat(list))) + texbox[c] = vpack(nodes.concat(list)) + r.inserts[c] = nil + end + + return v +end + +function mixedcolumns.getrest(result) + local rest = result and result.rest + result.rest = nil -- to be sure + return rest +end + +function mixedcolumns.getlist(result) + local originalhead = result and result.originalhead + result.originalhead = nil -- to be sure + return originalhead +end + +function mixedcolumns.cleanup(result) + local discarded = result.discarded + for i=1,#discarded do + freenode(discarded[i]) + end + result.discarded = { } +end + +-- interface -- + +local result + +function commands.mixsetsplit(specification) + if result then + for k, v in next, specification do + result[k] = v + end + result = mixedcolumns.setsplit(result) + else + result = mixedcolumns.setsplit(specification) + end +end + +function commands.mixgetsplit(n) + if result then + context(mixedcolumns.getsplit(result,n)) + end +end + +function commands.mixfinalize() + if result then + mixedcolumns.finalize(result) + end +end + +function commands.mixflushrest() + if result then + context(mixedcolumns.getrest(result)) + end +end + +function commands.mixflushlist() + if result then + context(mixedcolumns.getlist(result)) + end +end + +function commands.mixstate() + context(result and result.rest and 1 or 0) +end + +function commands.mixcleanup() + if result then + mixedcolumns.cleanup(result) + result = nil + end +end diff --git a/tex/context/base/page-pst.lua b/tex/context/base/page-pst.lua index 8586830cf..1256d4067 100644 --- a/tex/context/base/page-pst.lua +++ b/tex/context/base/page-pst.lua @@ -1,78 +1,78 @@ -if not modules then modules = { } end modules ['page-pst'] = { - version = 1.001, - comment = "companion to page-pst.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: adapt message - -local format, validstring = string.format, string.valid -local sortedkeys = table.sortedkeys - -local cache = { } - -local function flush(page) - local c = cache[page] - if c then - for i=1,#c do - context.viafile(c[i],format("page.%s",validstring(page,"nopage"))) - end - cache[page] = nil - end -end - -local function setnextpage() - local n = next(cache) and sortedkeys(cache)[1] - if not n then - n = 0 -- nothing in the cache - elseif n == 0 then - n = -1 -- generic buffer (0) - elseif n > 0 then - -- upcoming page (realpageno) - end - tex.setcount("global","c_page_postponed_blocks_next_page",n) -end - -function commands.flushpostponedblocks(page) - -- we need to flush previously pending pages as well and the zero - -- slot is the generic one so that one is always flushed - local t = sortedkeys(cache) - local p = tonumber(page) or tex.count.realpageno or 0 - for i=1,#t do - local ti = t[i] - if ti <= p then - flush(ti) - else - break - end - end - setnextpage() -end - -function commands.registerpostponedblock(page) - if type(page) == "string" then - if string.find(page,"^+") then - page = tex.count.realpageno + (tonumber(page) or 1) -- future delta page - else - page = tonumber(page) or 0 -- preferred page or otherwise first possible occasion - end - end - if not page then - page = 0 - end - local c = cache[page] - if not c then - c = { } - cache[page] = c - end - c[#c+1] = buffers.raw("postponedblock") - buffers.erase("postponedblock") - if page == 0 then - interfaces.showmessage("layouts",3,#c) - else - interfaces.showmessage("layouts",3,string.format("%s (realpage: %s)",#c,page)) - end - setnextpage() -end +if not modules then modules = { } end modules ['page-pst'] = { + version = 1.001, + comment = "companion to page-pst.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: adapt message + +local format, validstring = string.format, string.valid +local sortedkeys = table.sortedkeys + +local cache = { } + +local function flush(page) + local c = cache[page] + if c then + for i=1,#c do + context.viafile(c[i],format("page.%s",validstring(page,"nopage"))) + end + cache[page] = nil + end +end + +local function setnextpage() + local n = next(cache) and sortedkeys(cache)[1] + if not n then + n = 0 -- nothing in the cache + elseif n == 0 then + n = -1 -- generic buffer (0) + elseif n > 0 then + -- upcoming page (realpageno) + end + tex.setcount("global","c_page_postponed_blocks_next_page",n) +end + +function commands.flushpostponedblocks(page) + -- we need to flush previously pending pages as well and the zero + -- slot is the generic one so that one is always flushed + local t = sortedkeys(cache) + local p = tonumber(page) or tex.count.realpageno or 0 + for i=1,#t do + local ti = t[i] + if ti <= p then + flush(ti) + else + break + end + end + setnextpage() +end + +function commands.registerpostponedblock(page) + if type(page) == "string" then + if string.find(page,"^+") then + page = tex.count.realpageno + (tonumber(page) or 1) -- future delta page + else + page = tonumber(page) or 0 -- preferred page or otherwise first possible occasion + end + end + if not page then + page = 0 + end + local c = cache[page] + if not c then + c = { } + cache[page] = c + end + c[#c+1] = buffers.raw("postponedblock") + buffers.erase("postponedblock") + if page == 0 then + interfaces.showmessage("layouts",3,#c) + else + interfaces.showmessage("layouts",3,string.format("%s (realpage: %s)",#c,page)) + end + setnextpage() +end diff --git a/tex/context/base/page-str.lua b/tex/context/base/page-str.lua index f6314657f..b9b5086cf 100644 --- a/tex/context/base/page-str.lua +++ b/tex/context/base/page-str.lua @@ -1,232 +1,232 @@ -if not modules then modules = { } end modules ['page-str'] = { - version = 1.001, - comment = "companion to page-str.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- streams -> managers.streams - --- work in progresss .. unfinished - -local concat, insert, remove = table.concat, table.insert, table.remove - -local find_tail, write_node, free_node, copy_nodelist = node.slide, node.write, node.free, node.copy_list -local vpack_nodelist, hpack_nodelist = node.vpack, node.hpack -local texdimen, texbox = tex.dimen, tex.box -local settings_to_array = utilities.parsers.settings_to_array - -local nodes, node = nodes, node - -local nodepool = nodes.pool -local tasks = nodes.tasks - -local new_kern = nodepool.kern -local new_glyph = nodepool.glyph - -local trace_collecting = false trackers.register("streams.collecting", function(v) trace_collecting = v end) -local trace_flushing = false trackers.register("streams.flushing", function(v) trace_flushing = v end) - -local report_streams = logs.reporter("streams") - -streams = streams or { } -- might move to the builders namespace -local streams = streams - -local data, name, stack = { }, nil, { } - -function streams.enable(newname) - if newname == "default" then - name = nil - else - name = newname - end -end - -function streams.disable() - name = stack[#stack] -end - -function streams.start(newname) - insert(stack,name) - name = newname -end - -function streams.stop(newname) - name = remove(stack) -end - -function streams.collect(head,where) - if name and head and name ~= "default" then - local tail = node.slide(head) - local dana = data[name] - if not dana then - dana = { } - data[name] = dana - end - local last = dana[#dana] - if last then - local tail = find_tail(last) - tail.next, head.prev = head, tail - elseif last == false then - dana[#dana] = head - else - dana[1] = head - end - if trace_collecting then - report_streams("appending snippet %a to slot %s",name,#dana) - end - return nil, true - else - return head, false - end -end - -function streams.push(thename) - if not thename or thename == "" then - thename = name - end - if thename and thename ~= "" then - local dana = data[thename] - if dana then - dana[#dana+1] = false - if trace_collecting then - report_streams("pushing snippet %a",thename) - end - end - end -end - -function streams.flush(name,copy) -- problem: we need to migrate afterwards - local dana = data[name] - if dana then - local dn = #dana - if dn == 0 then - -- nothing to flush - elseif copy then - if trace_flushing then - report_streams("flushing copies of %s slots of %a",dn,name) - end - for i=1,dn do - local di = dana[i] - if di then - write_node(copy_nodelist(di.list)) -- list, will be option - end - end - if copy then - data[name] = nil - end - else - if trace_flushing then - report_streams("flushing %s slots of %a",dn,name) - end - for i=1,dn do - local di = dana[i] - if di then - write_node(di.list) -- list, will be option - di.list = nil - free_node(di) - end - end - end - end -end - -function streams.synchronize(list) -- this is an experiment ! - -- we don't optimize this as we want to trace in detail - list = settings_to_array(list) - local max = 0 - if trace_flushing then - report_streams("synchronizing list: % t",list) - end - for i=1,#list do - local dana = data[list[i]] - if dana then - local n = #dana - if n > max then - max = n - end - end - end - if trace_flushing then - report_streams("maximum number of slots: %s",max) - end - for m=1,max do - local height, depth = 0, 0 - for i=1,#list do - local name = list[i] - local dana = data[name] - local slot = dana[m] - if slot then - local vbox = vpack_nodelist(slot) - local ht, dp = vbox.height, vbox.depth - if ht > height then - height = ht - end - if dp > depth then - depth = dp - end - dana[m] = vbox - if trace_flushing then - report_streams("slot %s of %a is packed to height %p and depth %p",m,name,ht,dp) - end - end - end - if trace_flushing then - report_streams("slot %s has max height %p and max depth %p",m,height,depth) - end - local strutht, strutdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth - local struthtdp = strutht + strutdp - for i=1,#list do - local name = list[i] - local dana = data[name] - local vbox = dana[m] - if vbox then - local delta_height = height - vbox.height - local delta_depth = depth - vbox.depth - if delta_height > 0 or delta_depth > 0 then - if false then - -- actually we need to add glue and repack - vbox.height, vbox.depth = height, depth - if trace_flushing then - report_streams("slot %s of %a with delta (%p,%p) is compensated",m,i,delta_height,delta_depth) - end - else - -- this is not yet ok as we also need to keep an eye on vertical spacing - -- so we might need to do some splitting or whatever - local tail = vbox.list and find_tail(vbox.list) - local n, delta = 0, delta_height -- for tracing - while delta > 0 do - -- we need to add some interline penalties - local line = copy_nodelist(tex.box.strutbox) - line.height, line.depth = strutht, strutdp - if tail then - tail.next, line.prev = line, tail - end - tail = line - n, delta = n +1, delta - struthtdp - end - dana[m] = vpack_nodelist(vbox.list) - vbox.list = nil - free_node(vbox) - if trace_flushing then - report_streams("slot %s:%s with delta (%p,%p) is compensated by %s lines",m,i,delta_height,delta_depth,n) - end - end - end - else - -- make dummy - end - end - end -end - -tasks.appendaction("mvlbuilders", "normalizers", "streams.collect") - -tasks.disableaction("mvlbuilders", "streams.collect") - -function streams.initialize() - tasks.enableaction ("mvlbuilders", "streams.collect") -end - --- todo: remove empty last { }'s +if not modules then modules = { } end modules ['page-str'] = { + version = 1.001, + comment = "companion to page-str.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- streams -> managers.streams + +-- work in progresss .. unfinished + +local concat, insert, remove = table.concat, table.insert, table.remove + +local find_tail, write_node, free_node, copy_nodelist = node.slide, node.write, node.free, node.copy_list +local vpack_nodelist, hpack_nodelist = node.vpack, node.hpack +local texdimen, texbox = tex.dimen, tex.box +local settings_to_array = utilities.parsers.settings_to_array + +local nodes, node = nodes, node + +local nodepool = nodes.pool +local tasks = nodes.tasks + +local new_kern = nodepool.kern +local new_glyph = nodepool.glyph + +local trace_collecting = false trackers.register("streams.collecting", function(v) trace_collecting = v end) +local trace_flushing = false trackers.register("streams.flushing", function(v) trace_flushing = v end) + +local report_streams = logs.reporter("streams") + +streams = streams or { } -- might move to the builders namespace +local streams = streams + +local data, name, stack = { }, nil, { } + +function streams.enable(newname) + if newname == "default" then + name = nil + else + name = newname + end +end + +function streams.disable() + name = stack[#stack] +end + +function streams.start(newname) + insert(stack,name) + name = newname +end + +function streams.stop(newname) + name = remove(stack) +end + +function streams.collect(head,where) + if name and head and name ~= "default" then + local tail = node.slide(head) + local dana = data[name] + if not dana then + dana = { } + data[name] = dana + end + local last = dana[#dana] + if last then + local tail = find_tail(last) + tail.next, head.prev = head, tail + elseif last == false then + dana[#dana] = head + else + dana[1] = head + end + if trace_collecting then + report_streams("appending snippet %a to slot %s",name,#dana) + end + return nil, true + else + return head, false + end +end + +function streams.push(thename) + if not thename or thename == "" then + thename = name + end + if thename and thename ~= "" then + local dana = data[thename] + if dana then + dana[#dana+1] = false + if trace_collecting then + report_streams("pushing snippet %a",thename) + end + end + end +end + +function streams.flush(name,copy) -- problem: we need to migrate afterwards + local dana = data[name] + if dana then + local dn = #dana + if dn == 0 then + -- nothing to flush + elseif copy then + if trace_flushing then + report_streams("flushing copies of %s slots of %a",dn,name) + end + for i=1,dn do + local di = dana[i] + if di then + write_node(copy_nodelist(di.list)) -- list, will be option + end + end + if copy then + data[name] = nil + end + else + if trace_flushing then + report_streams("flushing %s slots of %a",dn,name) + end + for i=1,dn do + local di = dana[i] + if di then + write_node(di.list) -- list, will be option + di.list = nil + free_node(di) + end + end + end + end +end + +function streams.synchronize(list) -- this is an experiment ! + -- we don't optimize this as we want to trace in detail + list = settings_to_array(list) + local max = 0 + if trace_flushing then + report_streams("synchronizing list: % t",list) + end + for i=1,#list do + local dana = data[list[i]] + if dana then + local n = #dana + if n > max then + max = n + end + end + end + if trace_flushing then + report_streams("maximum number of slots: %s",max) + end + for m=1,max do + local height, depth = 0, 0 + for i=1,#list do + local name = list[i] + local dana = data[name] + local slot = dana[m] + if slot then + local vbox = vpack_nodelist(slot) + local ht, dp = vbox.height, vbox.depth + if ht > height then + height = ht + end + if dp > depth then + depth = dp + end + dana[m] = vbox + if trace_flushing then + report_streams("slot %s of %a is packed to height %p and depth %p",m,name,ht,dp) + end + end + end + if trace_flushing then + report_streams("slot %s has max height %p and max depth %p",m,height,depth) + end + local strutht, strutdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth + local struthtdp = strutht + strutdp + for i=1,#list do + local name = list[i] + local dana = data[name] + local vbox = dana[m] + if vbox then + local delta_height = height - vbox.height + local delta_depth = depth - vbox.depth + if delta_height > 0 or delta_depth > 0 then + if false then + -- actually we need to add glue and repack + vbox.height, vbox.depth = height, depth + if trace_flushing then + report_streams("slot %s of %a with delta (%p,%p) is compensated",m,i,delta_height,delta_depth) + end + else + -- this is not yet ok as we also need to keep an eye on vertical spacing + -- so we might need to do some splitting or whatever + local tail = vbox.list and find_tail(vbox.list) + local n, delta = 0, delta_height -- for tracing + while delta > 0 do + -- we need to add some interline penalties + local line = copy_nodelist(tex.box.strutbox) + line.height, line.depth = strutht, strutdp + if tail then + tail.next, line.prev = line, tail + end + tail = line + n, delta = n +1, delta - struthtdp + end + dana[m] = vpack_nodelist(vbox.list) + vbox.list = nil + free_node(vbox) + if trace_flushing then + report_streams("slot %s:%s with delta (%p,%p) is compensated by %s lines",m,i,delta_height,delta_depth,n) + end + end + end + else + -- make dummy + end + end + end +end + +tasks.appendaction("mvlbuilders", "normalizers", "streams.collect") + +tasks.disableaction("mvlbuilders", "streams.collect") + +function streams.initialize() + tasks.enableaction ("mvlbuilders", "streams.collect") +end + +-- todo: remove empty last { }'s diff --git a/tex/context/base/regi-8859-1.lua b/tex/context/base/regi-8859-1.lua index ff2182afa..2a3caea54 100644 --- a/tex/context/base/regi-8859-1.lua +++ b/tex/context/base/regi-8859-1.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-1'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, - 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF, - 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, - 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF, - 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, - 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF -} +if not modules then modules = { } end modules ['regi-8859-1'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, + 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF, + 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, + 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF, + 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, + 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF +} diff --git a/tex/context/base/regi-8859-10.lua b/tex/context/base/regi-8859-10.lua index f23744b4a..1d3888c9e 100644 --- a/tex/context/base/regi-8859-10.lua +++ b/tex/context/base/regi-8859-10.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-10'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x0104, 0x0112, 0x0122, 0x012A, 0x0128, 0x0136, 0x00A7, 0x013B, 0x0110, 0x0160, 0x0166, 0x017D, 0x00AD, 0x016A, 0x014A, - 0x00B0, 0x0105, 0x0113, 0x0123, 0x012B, 0x0129, 0x0137, 0x00B7, 0x013C, 0x0111, 0x0161, 0x0167, 0x017E, 0x2015, 0x016B, 0x014B, - 0x0100, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x012E, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x0116, 0x00CD, 0x00CE, 0x00CF, - 0x00D0, 0x0145, 0x014C, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x0168, 0x00D8, 0x0172, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF, - 0x0101, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x012F, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x0117, 0x00ED, 0x00EE, 0x00EF, - 0x00F0, 0x0146, 0x014D, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x0169, 0x00F8, 0x0173, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x0138 -} +if not modules then modules = { } end modules ['regi-8859-10'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x0104, 0x0112, 0x0122, 0x012A, 0x0128, 0x0136, 0x00A7, 0x013B, 0x0110, 0x0160, 0x0166, 0x017D, 0x00AD, 0x016A, 0x014A, + 0x00B0, 0x0105, 0x0113, 0x0123, 0x012B, 0x0129, 0x0137, 0x00B7, 0x013C, 0x0111, 0x0161, 0x0167, 0x017E, 0x2015, 0x016B, 0x014B, + 0x0100, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x012E, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x0116, 0x00CD, 0x00CE, 0x00CF, + 0x00D0, 0x0145, 0x014C, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x0168, 0x00D8, 0x0172, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF, + 0x0101, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x012F, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x0117, 0x00ED, 0x00EE, 0x00EF, + 0x00F0, 0x0146, 0x014D, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x0169, 0x00F8, 0x0173, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x0138 +} diff --git a/tex/context/base/regi-8859-11.lua b/tex/context/base/regi-8859-11.lua index 54e5626c2..f7a87efe9 100644 --- a/tex/context/base/regi-8859-11.lua +++ b/tex/context/base/regi-8859-11.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-11'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x0E01, 0x0E02, 0x0E03, 0x0E04, 0x0E05, 0x0E06, 0x0E07, 0x0E08, 0x0E09, 0x0E0A, 0x0E0B, 0x0E0C, 0x0E0D, 0x0E0E, 0x0E0F, - 0x0E10, 0x0E11, 0x0E12, 0x0E13, 0x0E14, 0x0E15, 0x0E16, 0x0E17, 0x0E18, 0x0E19, 0x0E1A, 0x0E1B, 0x0E1C, 0x0E1D, 0x0E1E, 0x0E1F, - 0x0E20, 0x0E21, 0x0E22, 0x0E23, 0x0E24, 0x0E25, 0x0E26, 0x0E27, 0x0E28, 0x0E29, 0x0E2A, 0x0E2B, 0x0E2C, 0x0E2D, 0x0E2E, 0x0E2F, - 0x0E30, 0x0E31, 0x0E32, 0x0E33, 0x0E34, 0x0E35, 0x0E36, 0x0E37, 0x0E38, 0x0E39, 0x0E3A, 0x0000, 0x0000, 0x0000, 0x0000, 0x0E3F, - 0x0E40, 0x0E41, 0x0E42, 0x0E43, 0x0E44, 0x0E45, 0x0E46, 0x0E47, 0x0E48, 0x0E49, 0x0E4A, 0x0E4B, 0x0E4C, 0x0E4D, 0x0E4E, 0x0E4F, - 0x0E50, 0x0E51, 0x0E52, 0x0E53, 0x0E54, 0x0E55, 0x0E56, 0x0E57, 0x0E58, 0x0E59, 0x0E5A, 0x0E5B, 0x0000, 0x0000, 0x0000, 0x0000 -} +if not modules then modules = { } end modules ['regi-8859-11'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x0E01, 0x0E02, 0x0E03, 0x0E04, 0x0E05, 0x0E06, 0x0E07, 0x0E08, 0x0E09, 0x0E0A, 0x0E0B, 0x0E0C, 0x0E0D, 0x0E0E, 0x0E0F, + 0x0E10, 0x0E11, 0x0E12, 0x0E13, 0x0E14, 0x0E15, 0x0E16, 0x0E17, 0x0E18, 0x0E19, 0x0E1A, 0x0E1B, 0x0E1C, 0x0E1D, 0x0E1E, 0x0E1F, + 0x0E20, 0x0E21, 0x0E22, 0x0E23, 0x0E24, 0x0E25, 0x0E26, 0x0E27, 0x0E28, 0x0E29, 0x0E2A, 0x0E2B, 0x0E2C, 0x0E2D, 0x0E2E, 0x0E2F, + 0x0E30, 0x0E31, 0x0E32, 0x0E33, 0x0E34, 0x0E35, 0x0E36, 0x0E37, 0x0E38, 0x0E39, 0x0E3A, 0x0000, 0x0000, 0x0000, 0x0000, 0x0E3F, + 0x0E40, 0x0E41, 0x0E42, 0x0E43, 0x0E44, 0x0E45, 0x0E46, 0x0E47, 0x0E48, 0x0E49, 0x0E4A, 0x0E4B, 0x0E4C, 0x0E4D, 0x0E4E, 0x0E4F, + 0x0E50, 0x0E51, 0x0E52, 0x0E53, 0x0E54, 0x0E55, 0x0E56, 0x0E57, 0x0E58, 0x0E59, 0x0E5A, 0x0E5B, 0x0000, 0x0000, 0x0000, 0x0000 +} diff --git a/tex/context/base/regi-8859-13.lua b/tex/context/base/regi-8859-13.lua index 1646133b5..163b441c7 100644 --- a/tex/context/base/regi-8859-13.lua +++ b/tex/context/base/regi-8859-13.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-13'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x201D, 0x00A2, 0x00A3, 0x00A4, 0x201E, 0x00A6, 0x00A7, 0x00D8, 0x00A9, 0x0156, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00C6, - 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x201C, 0x00B5, 0x00B6, 0x00B7, 0x00F8, 0x00B9, 0x0157, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00E6, - 0x0104, 0x012E, 0x0100, 0x0106, 0x00C4, 0x00C5, 0x0118, 0x0112, 0x010C, 0x00C9, 0x0179, 0x0116, 0x0122, 0x0136, 0x012A, 0x013B, - 0x0160, 0x0143, 0x0145, 0x00D3, 0x014C, 0x00D5, 0x00D6, 0x00D7, 0x0172, 0x0141, 0x015A, 0x016A, 0x00DC, 0x017B, 0x017D, 0x00DF, - 0x0105, 0x012F, 0x0101, 0x0107, 0x00E4, 0x00E5, 0x0119, 0x0113, 0x010D, 0x00E9, 0x017A, 0x0117, 0x0123, 0x0137, 0x012B, 0x013C, - 0x0161, 0x0144, 0x0146, 0x00F3, 0x014D, 0x00F5, 0x00F6, 0x00F7, 0x0173, 0x0142, 0x015B, 0x016B, 0x00FC, 0x017C, 0x017E, 0x2019 -} +if not modules then modules = { } end modules ['regi-8859-13'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x201D, 0x00A2, 0x00A3, 0x00A4, 0x201E, 0x00A6, 0x00A7, 0x00D8, 0x00A9, 0x0156, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00C6, + 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x201C, 0x00B5, 0x00B6, 0x00B7, 0x00F8, 0x00B9, 0x0157, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00E6, + 0x0104, 0x012E, 0x0100, 0x0106, 0x00C4, 0x00C5, 0x0118, 0x0112, 0x010C, 0x00C9, 0x0179, 0x0116, 0x0122, 0x0136, 0x012A, 0x013B, + 0x0160, 0x0143, 0x0145, 0x00D3, 0x014C, 0x00D5, 0x00D6, 0x00D7, 0x0172, 0x0141, 0x015A, 0x016A, 0x00DC, 0x017B, 0x017D, 0x00DF, + 0x0105, 0x012F, 0x0101, 0x0107, 0x00E4, 0x00E5, 0x0119, 0x0113, 0x010D, 0x00E9, 0x017A, 0x0117, 0x0123, 0x0137, 0x012B, 0x013C, + 0x0161, 0x0144, 0x0146, 0x00F3, 0x014D, 0x00F5, 0x00F6, 0x00F7, 0x0173, 0x0142, 0x015B, 0x016B, 0x00FC, 0x017C, 0x017E, 0x2019 +} diff --git a/tex/context/base/regi-8859-14.lua b/tex/context/base/regi-8859-14.lua index 2b0c68814..b69eaecea 100644 --- a/tex/context/base/regi-8859-14.lua +++ b/tex/context/base/regi-8859-14.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-14'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x1E02, 0x1E03, 0x00A3, 0x010A, 0x010B, 0x1E0A, 0x00A7, 0x1E80, 0x00A9, 0x1E82, 0x1E0B, 0x1EF2, 0x00AD, 0x00AE, 0x0178, - 0x1E1E, 0x1E1F, 0x0120, 0x0121, 0x1E40, 0x1E41, 0x00B6, 0x1E56, 0x1E81, 0x1E57, 0x1E83, 0x1E60, 0x1EF3, 0x1E84, 0x1E85, 0x1E61, - 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, - 0x0174, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x1E6A, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x0176, 0x00DF, - 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, - 0x0175, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x1E6B, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x0177, 0x00FF -} +if not modules then modules = { } end modules ['regi-8859-14'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x1E02, 0x1E03, 0x00A3, 0x010A, 0x010B, 0x1E0A, 0x00A7, 0x1E80, 0x00A9, 0x1E82, 0x1E0B, 0x1EF2, 0x00AD, 0x00AE, 0x0178, + 0x1E1E, 0x1E1F, 0x0120, 0x0121, 0x1E40, 0x1E41, 0x00B6, 0x1E56, 0x1E81, 0x1E57, 0x1E83, 0x1E60, 0x1EF3, 0x1E84, 0x1E85, 0x1E61, + 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, + 0x0174, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x1E6A, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x0176, 0x00DF, + 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, + 0x0175, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x1E6B, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x0177, 0x00FF +} diff --git a/tex/context/base/regi-8859-15.lua b/tex/context/base/regi-8859-15.lua index 48861f396..3bc1d527a 100644 --- a/tex/context/base/regi-8859-15.lua +++ b/tex/context/base/regi-8859-15.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-15'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x20AC, 0x00A5, 0x0160, 0x00A7, 0x0161, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, - 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x017D, 0x00B5, 0x00B6, 0x00B7, 0x017E, 0x00B9, 0x00BA, 0x00BB, 0x0152, 0x0153, 0x0178, 0x00BF, - 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, - 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF, - 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, - 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF -} +if not modules then modules = { } end modules ['regi-8859-15'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x20AC, 0x00A5, 0x0160, 0x00A7, 0x0161, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, + 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x017D, 0x00B5, 0x00B6, 0x00B7, 0x017E, 0x00B9, 0x00BA, 0x00BB, 0x0152, 0x0153, 0x0178, 0x00BF, + 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, + 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF, + 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, + 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF +} diff --git a/tex/context/base/regi-8859-16.lua b/tex/context/base/regi-8859-16.lua index e122a2042..c2a235363 100644 --- a/tex/context/base/regi-8859-16.lua +++ b/tex/context/base/regi-8859-16.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-16'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x0104, 0x0105, 0x0141, 0x20AC, 0x201E, 0x0160, 0x00A7, 0x0161, 0x00A9, 0x0218, 0x00AB, 0x0179, 0x00AD, 0x017A, 0x017B, - 0x00B0, 0x00B1, 0x010C, 0x0142, 0x017D, 0x201D, 0x00B6, 0x00B7, 0x017E, 0x010D, 0x0219, 0x00BB, 0x0152, 0x0153, 0x0178, 0x017C, - 0x00C0, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x0106, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, - 0x0110, 0x0143, 0x00D2, 0x00D3, 0x00D4, 0x0150, 0x00D6, 0x015A, 0x0170, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x0118, 0x021A, 0x00DF, - 0x00E0, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x0107, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, - 0x0111, 0x0144, 0x00F2, 0x00F3, 0x00F4, 0x0151, 0x00F6, 0x015B, 0x0171, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x0119, 0x021B, 0x00FF -} +if not modules then modules = { } end modules ['regi-8859-16'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x0104, 0x0105, 0x0141, 0x20AC, 0x201E, 0x0160, 0x00A7, 0x0161, 0x00A9, 0x0218, 0x00AB, 0x0179, 0x00AD, 0x017A, 0x017B, + 0x00B0, 0x00B1, 0x010C, 0x0142, 0x017D, 0x201D, 0x00B6, 0x00B7, 0x017E, 0x010D, 0x0219, 0x00BB, 0x0152, 0x0153, 0x0178, 0x017C, + 0x00C0, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x0106, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, + 0x0110, 0x0143, 0x00D2, 0x00D3, 0x00D4, 0x0150, 0x00D6, 0x015A, 0x0170, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x0118, 0x021A, 0x00DF, + 0x00E0, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x0107, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, + 0x0111, 0x0144, 0x00F2, 0x00F3, 0x00F4, 0x0151, 0x00F6, 0x015B, 0x0171, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x0119, 0x021B, 0x00FF +} diff --git a/tex/context/base/regi-8859-2.lua b/tex/context/base/regi-8859-2.lua index affd6c3ca..f0fe5f404 100644 --- a/tex/context/base/regi-8859-2.lua +++ b/tex/context/base/regi-8859-2.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-2'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x0104, 0x02D8, 0x0141, 0x00A4, 0x013D, 0x015A, 0x00A7, 0x00A8, 0x0160, 0x015E, 0x0164, 0x0179, 0x00AD, 0x017D, 0x017B, - 0x00B0, 0x0105, 0x02DB, 0x0142, 0x00B4, 0x013E, 0x015B, 0x02C7, 0x00B8, 0x0161, 0x015F, 0x0165, 0x017A, 0x02DD, 0x017E, 0x017C, - 0x0154, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x0139, 0x0106, 0x00C7, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x011A, 0x00CD, 0x00CE, 0x010E, - 0x0110, 0x0143, 0x0147, 0x00D3, 0x00D4, 0x0150, 0x00D6, 0x00D7, 0x0158, 0x016E, 0x00DA, 0x0170, 0x00DC, 0x00DD, 0x0162, 0x00DF, - 0x0155, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x013A, 0x0107, 0x00E7, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x011B, 0x00ED, 0x00EE, 0x010F, - 0x0111, 0x0144, 0x0148, 0x00F3, 0x00F4, 0x0151, 0x00F6, 0x00F7, 0x0159, 0x016F, 0x00FA, 0x0171, 0x00FC, 0x00FD, 0x0163, 0x02D9 -} +if not modules then modules = { } end modules ['regi-8859-2'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x0104, 0x02D8, 0x0141, 0x00A4, 0x013D, 0x015A, 0x00A7, 0x00A8, 0x0160, 0x015E, 0x0164, 0x0179, 0x00AD, 0x017D, 0x017B, + 0x00B0, 0x0105, 0x02DB, 0x0142, 0x00B4, 0x013E, 0x015B, 0x02C7, 0x00B8, 0x0161, 0x015F, 0x0165, 0x017A, 0x02DD, 0x017E, 0x017C, + 0x0154, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x0139, 0x0106, 0x00C7, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x011A, 0x00CD, 0x00CE, 0x010E, + 0x0110, 0x0143, 0x0147, 0x00D3, 0x00D4, 0x0150, 0x00D6, 0x00D7, 0x0158, 0x016E, 0x00DA, 0x0170, 0x00DC, 0x00DD, 0x0162, 0x00DF, + 0x0155, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x013A, 0x0107, 0x00E7, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x011B, 0x00ED, 0x00EE, 0x010F, + 0x0111, 0x0144, 0x0148, 0x00F3, 0x00F4, 0x0151, 0x00F6, 0x00F7, 0x0159, 0x016F, 0x00FA, 0x0171, 0x00FC, 0x00FD, 0x0163, 0x02D9 +} diff --git a/tex/context/base/regi-8859-3.lua b/tex/context/base/regi-8859-3.lua index 4b5c54b4f..e84220bde 100644 --- a/tex/context/base/regi-8859-3.lua +++ b/tex/context/base/regi-8859-3.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-3'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x0126, 0x02D8, 0x00A3, 0x00A4, 0x0000, 0x0124, 0x00A7, 0x00A8, 0x0130, 0x015E, 0x011E, 0x0134, 0x00AD, 0x0000, 0x017B, - 0x00B0, 0x0127, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x0125, 0x00B7, 0x00B8, 0x0131, 0x015F, 0x011F, 0x0135, 0x00BD, 0x0000, 0x017C, - 0x00C0, 0x00C1, 0x00C2, 0x0000, 0x00C4, 0x010A, 0x0108, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, - 0x0000, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x0120, 0x00D6, 0x00D7, 0x011C, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x016C, 0x015C, 0x00DF, - 0x00E0, 0x00E1, 0x00E2, 0x0000, 0x00E4, 0x010B, 0x0109, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, - 0x0000, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x0121, 0x00F6, 0x00F7, 0x011D, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x016D, 0x015D, 0x02D9 -} +if not modules then modules = { } end modules ['regi-8859-3'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x0126, 0x02D8, 0x00A3, 0x00A4, 0x0000, 0x0124, 0x00A7, 0x00A8, 0x0130, 0x015E, 0x011E, 0x0134, 0x00AD, 0x0000, 0x017B, + 0x00B0, 0x0127, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x0125, 0x00B7, 0x00B8, 0x0131, 0x015F, 0x011F, 0x0135, 0x00BD, 0x0000, 0x017C, + 0x00C0, 0x00C1, 0x00C2, 0x0000, 0x00C4, 0x010A, 0x0108, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, + 0x0000, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x0120, 0x00D6, 0x00D7, 0x011C, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x016C, 0x015C, 0x00DF, + 0x00E0, 0x00E1, 0x00E2, 0x0000, 0x00E4, 0x010B, 0x0109, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, + 0x0000, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x0121, 0x00F6, 0x00F7, 0x011D, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x016D, 0x015D, 0x02D9 +} diff --git a/tex/context/base/regi-8859-4.lua b/tex/context/base/regi-8859-4.lua index 774ec2e10..9fdc39a40 100644 --- a/tex/context/base/regi-8859-4.lua +++ b/tex/context/base/regi-8859-4.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-4'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x0104, 0x0138, 0x0156, 0x00A4, 0x0128, 0x013B, 0x00A7, 0x00A8, 0x0160, 0x0112, 0x0122, 0x0166, 0x00AD, 0x017D, 0x00AF, - 0x00B0, 0x0105, 0x02DB, 0x0157, 0x00B4, 0x0129, 0x013C, 0x02C7, 0x00B8, 0x0161, 0x0113, 0x0123, 0x0167, 0x014A, 0x017E, 0x014B, - 0x0100, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x012E, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x0116, 0x00CD, 0x00CE, 0x012A, - 0x0110, 0x0145, 0x014C, 0x0136, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x0172, 0x00DA, 0x00DB, 0x00DC, 0x0168, 0x016A, 0x00DF, - 0x0101, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x012F, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x0117, 0x00ED, 0x00EE, 0x012B, - 0x0111, 0x0146, 0x014D, 0x0137, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x0173, 0x00FA, 0x00FB, 0x00FC, 0x0169, 0x016B, 0x02D9 -} +if not modules then modules = { } end modules ['regi-8859-4'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x0104, 0x0138, 0x0156, 0x00A4, 0x0128, 0x013B, 0x00A7, 0x00A8, 0x0160, 0x0112, 0x0122, 0x0166, 0x00AD, 0x017D, 0x00AF, + 0x00B0, 0x0105, 0x02DB, 0x0157, 0x00B4, 0x0129, 0x013C, 0x02C7, 0x00B8, 0x0161, 0x0113, 0x0123, 0x0167, 0x014A, 0x017E, 0x014B, + 0x0100, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x012E, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x0116, 0x00CD, 0x00CE, 0x012A, + 0x0110, 0x0145, 0x014C, 0x0136, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x0172, 0x00DA, 0x00DB, 0x00DC, 0x0168, 0x016A, 0x00DF, + 0x0101, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x012F, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x0117, 0x00ED, 0x00EE, 0x012B, + 0x0111, 0x0146, 0x014D, 0x0137, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x0173, 0x00FA, 0x00FB, 0x00FC, 0x0169, 0x016B, 0x02D9 +} diff --git a/tex/context/base/regi-8859-5.lua b/tex/context/base/regi-8859-5.lua index 1137f37bb..af35a71b8 100644 --- a/tex/context/base/regi-8859-5.lua +++ b/tex/context/base/regi-8859-5.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-5'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x0401, 0x0402, 0x0403, 0x0404, 0x0405, 0x0406, 0x0407, 0x0408, 0x0409, 0x040A, 0x040B, 0x040C, 0x00AD, 0x040E, 0x040F, - 0x0410, 0x0411, 0x0412, 0x0413, 0x0414, 0x0415, 0x0416, 0x0417, 0x0418, 0x0419, 0x041A, 0x041B, 0x041C, 0x041D, 0x041E, 0x041F, - 0x0420, 0x0421, 0x0422, 0x0423, 0x0424, 0x0425, 0x0426, 0x0427, 0x0428, 0x0429, 0x042A, 0x042B, 0x042C, 0x042D, 0x042E, 0x042F, - 0x0430, 0x0431, 0x0432, 0x0433, 0x0434, 0x0435, 0x0436, 0x0437, 0x0438, 0x0439, 0x043A, 0x043B, 0x043C, 0x043D, 0x043E, 0x043F, - 0x0440, 0x0441, 0x0442, 0x0443, 0x0444, 0x0445, 0x0446, 0x0447, 0x0448, 0x0449, 0x044A, 0x044B, 0x044C, 0x044D, 0x044E, 0x044F, - 0x2116, 0x0451, 0x0452, 0x0453, 0x0454, 0x0455, 0x0456, 0x0457, 0x0458, 0x0459, 0x045A, 0x045B, 0x045C, 0x00A7, 0x045E, 0x045F -} +if not modules then modules = { } end modules ['regi-8859-5'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x0401, 0x0402, 0x0403, 0x0404, 0x0405, 0x0406, 0x0407, 0x0408, 0x0409, 0x040A, 0x040B, 0x040C, 0x00AD, 0x040E, 0x040F, + 0x0410, 0x0411, 0x0412, 0x0413, 0x0414, 0x0415, 0x0416, 0x0417, 0x0418, 0x0419, 0x041A, 0x041B, 0x041C, 0x041D, 0x041E, 0x041F, + 0x0420, 0x0421, 0x0422, 0x0423, 0x0424, 0x0425, 0x0426, 0x0427, 0x0428, 0x0429, 0x042A, 0x042B, 0x042C, 0x042D, 0x042E, 0x042F, + 0x0430, 0x0431, 0x0432, 0x0433, 0x0434, 0x0435, 0x0436, 0x0437, 0x0438, 0x0439, 0x043A, 0x043B, 0x043C, 0x043D, 0x043E, 0x043F, + 0x0440, 0x0441, 0x0442, 0x0443, 0x0444, 0x0445, 0x0446, 0x0447, 0x0448, 0x0449, 0x044A, 0x044B, 0x044C, 0x044D, 0x044E, 0x044F, + 0x2116, 0x0451, 0x0452, 0x0453, 0x0454, 0x0455, 0x0456, 0x0457, 0x0458, 0x0459, 0x045A, 0x045B, 0x045C, 0x00A7, 0x045E, 0x045F +} diff --git a/tex/context/base/regi-8859-6.lua b/tex/context/base/regi-8859-6.lua index 651ae79ff..89ca3ce7f 100644 --- a/tex/context/base/regi-8859-6.lua +++ b/tex/context/base/regi-8859-6.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-6'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x0000, 0x0000, 0x0000, 0x00A4, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x060C, 0x00AD, 0x0000, 0x0000, - 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x061B, 0x0000, 0x0000, 0x0000, 0x061F, - 0x0000, 0x0621, 0x0622, 0x0623, 0x0624, 0x0625, 0x0626, 0x0627, 0x0628, 0x0629, 0x062A, 0x062B, 0x062C, 0x062D, 0x062E, 0x062F, - 0x0630, 0x0631, 0x0632, 0x0633, 0x0634, 0x0635, 0x0636, 0x0637, 0x0638, 0x0639, 0x063A, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, - 0x0640, 0x0641, 0x0642, 0x0643, 0x0644, 0x0645, 0x0646, 0x0647, 0x0648, 0x0649, 0x064A, 0x064B, 0x064C, 0x064D, 0x064E, 0x064F, - 0x0650, 0x0651, 0x0652, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000 -} +if not modules then modules = { } end modules ['regi-8859-6'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x0000, 0x0000, 0x0000, 0x00A4, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x060C, 0x00AD, 0x0000, 0x0000, + 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x061B, 0x0000, 0x0000, 0x0000, 0x061F, + 0x0000, 0x0621, 0x0622, 0x0623, 0x0624, 0x0625, 0x0626, 0x0627, 0x0628, 0x0629, 0x062A, 0x062B, 0x062C, 0x062D, 0x062E, 0x062F, + 0x0630, 0x0631, 0x0632, 0x0633, 0x0634, 0x0635, 0x0636, 0x0637, 0x0638, 0x0639, 0x063A, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, + 0x0640, 0x0641, 0x0642, 0x0643, 0x0644, 0x0645, 0x0646, 0x0647, 0x0648, 0x0649, 0x064A, 0x064B, 0x064C, 0x064D, 0x064E, 0x064F, + 0x0650, 0x0651, 0x0652, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000 +} diff --git a/tex/context/base/regi-8859-7.lua b/tex/context/base/regi-8859-7.lua index 08cbbab6e..8769b0483 100644 --- a/tex/context/base/regi-8859-7.lua +++ b/tex/context/base/regi-8859-7.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-7'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x2018, 0x2019, 0x00A3, 0x20AC, 0x20AF, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x037A, 0x00AB, 0x00AC, 0x00AD, 0x0000, 0x2015, - 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x0384, 0x0385, 0x0386, 0x00B7, 0x0388, 0x0389, 0x038A, 0x00BB, 0x038C, 0x00BD, 0x038E, 0x038F, - 0x0390, 0x0391, 0x0392, 0x0393, 0x0394, 0x0395, 0x0396, 0x0397, 0x0398, 0x0399, 0x039A, 0x039B, 0x039C, 0x039D, 0x039E, 0x039F, - 0x03A0, 0x03A1, 0x0000, 0x03A3, 0x03A4, 0x03A5, 0x03A6, 0x03A7, 0x03A8, 0x03A9, 0x03AA, 0x03AB, 0x03AC, 0x03AD, 0x03AE, 0x03AF, - 0x03B0, 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5, 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA, 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF, - 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4, 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9, 0x03CA, 0x03CB, 0x03CC, 0x03CD, 0x03CE, 0x0000 -} +if not modules then modules = { } end modules ['regi-8859-7'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x2018, 0x2019, 0x00A3, 0x20AC, 0x20AF, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x037A, 0x00AB, 0x00AC, 0x00AD, 0x0000, 0x2015, + 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x0384, 0x0385, 0x0386, 0x00B7, 0x0388, 0x0389, 0x038A, 0x00BB, 0x038C, 0x00BD, 0x038E, 0x038F, + 0x0390, 0x0391, 0x0392, 0x0393, 0x0394, 0x0395, 0x0396, 0x0397, 0x0398, 0x0399, 0x039A, 0x039B, 0x039C, 0x039D, 0x039E, 0x039F, + 0x03A0, 0x03A1, 0x0000, 0x03A3, 0x03A4, 0x03A5, 0x03A6, 0x03A7, 0x03A8, 0x03A9, 0x03AA, 0x03AB, 0x03AC, 0x03AD, 0x03AE, 0x03AF, + 0x03B0, 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5, 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA, 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF, + 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4, 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9, 0x03CA, 0x03CB, 0x03CC, 0x03CD, 0x03CE, 0x0000 +} diff --git a/tex/context/base/regi-8859-8.lua b/tex/context/base/regi-8859-8.lua index b69609991..e72d7c7fb 100644 --- a/tex/context/base/regi-8859-8.lua +++ b/tex/context/base/regi-8859-8.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-8'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x0000, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00D7, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, - 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00F7, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x0000, - 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, - 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x2017, - 0x05D0, 0x05D1, 0x05D2, 0x05D3, 0x05D4, 0x05D5, 0x05D6, 0x05D7, 0x05D8, 0x05D9, 0x05DA, 0x05DB, 0x05DC, 0x05DD, 0x05DE, 0x05DF, - 0x05E0, 0x05E1, 0x05E2, 0x05E3, 0x05E4, 0x05E5, 0x05E6, 0x05E7, 0x05E8, 0x05E9, 0x05EA, 0x0000, 0x0000, 0x200E, 0x200F, 0x0000 -} +if not modules then modules = { } end modules ['regi-8859-8'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x0000, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00D7, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, + 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00F7, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x0000, + 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, + 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x2017, + 0x05D0, 0x05D1, 0x05D2, 0x05D3, 0x05D4, 0x05D5, 0x05D6, 0x05D7, 0x05D8, 0x05D9, 0x05DA, 0x05DB, 0x05DC, 0x05DD, 0x05DE, 0x05DF, + 0x05E0, 0x05E1, 0x05E2, 0x05E3, 0x05E4, 0x05E5, 0x05E6, 0x05E7, 0x05E8, 0x05E9, 0x05EA, 0x0000, 0x0000, 0x200E, 0x200F, 0x0000 +} diff --git a/tex/context/base/regi-8859-9.lua b/tex/context/base/regi-8859-9.lua index 773307fff..eb9515af9 100644 --- a/tex/context/base/regi-8859-9.lua +++ b/tex/context/base/regi-8859-9.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-8859-9'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, - 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, - 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, - 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF, - 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, - 0x011E, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x0130, 0x015E, 0x00DF, - 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, - 0x011F, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x0131, 0x015F, 0x00FF -} +if not modules then modules = { } end modules ['regi-8859-9'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F, + 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F, + 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, + 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF, + 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, + 0x011E, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x0130, 0x015E, 0x00DF, + 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, + 0x011F, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x0131, 0x015F, 0x00FF +} diff --git a/tex/context/base/regi-cp1250.lua b/tex/context/base/regi-cp1250.lua index 00d55d1b8..80a4b8639 100644 --- a/tex/context/base/regi-cp1250.lua +++ b/tex/context/base/regi-cp1250.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-cp1250'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x20AC, 0x0000, 0x201A, 0x0000, 0x201E, 0x2026, 0x2020, 0x2021, 0x0000, 0x2030, 0x0160, 0x2039, 0x015A, 0x0164, 0x017D, 0x0179, - 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0161, 0x203A, 0x015B, 0x0165, 0x017E, 0x017A, - 0x00A0, 0x02C7, 0x02D8, 0x0141, 0x00A4, 0x0104, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x015E, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x017B, - 0x00B0, 0x00B1, 0x02DB, 0x0142, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x0105, 0x015F, 0x00BB, 0x013D, 0x02DD, 0x013E, 0x017C, - 0x0154, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x0139, 0x0106, 0x00C7, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x011A, 0x00CD, 0x00CE, 0x010E, - 0x0110, 0x0143, 0x0147, 0x00D3, 0x00D4, 0x0150, 0x00D6, 0x00D7, 0x0158, 0x016E, 0x00DA, 0x0170, 0x00DC, 0x00DD, 0x0162, 0x00DF, - 0x0155, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x013A, 0x0107, 0x00E7, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x011B, 0x00ED, 0x00EE, 0x010F, - 0x0111, 0x0144, 0x0148, 0x00F3, 0x00F4, 0x0151, 0x00F6, 0x00F7, 0x0159, 0x016F, 0x00FA, 0x0171, 0x00FC, 0x00FD, 0x0163, 0x02D9 -} +if not modules then modules = { } end modules ['regi-cp1250'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x20AC, 0x0000, 0x201A, 0x0000, 0x201E, 0x2026, 0x2020, 0x2021, 0x0000, 0x2030, 0x0160, 0x2039, 0x015A, 0x0164, 0x017D, 0x0179, + 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0161, 0x203A, 0x015B, 0x0165, 0x017E, 0x017A, + 0x00A0, 0x02C7, 0x02D8, 0x0141, 0x00A4, 0x0104, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x015E, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x017B, + 0x00B0, 0x00B1, 0x02DB, 0x0142, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x0105, 0x015F, 0x00BB, 0x013D, 0x02DD, 0x013E, 0x017C, + 0x0154, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x0139, 0x0106, 0x00C7, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x011A, 0x00CD, 0x00CE, 0x010E, + 0x0110, 0x0143, 0x0147, 0x00D3, 0x00D4, 0x0150, 0x00D6, 0x00D7, 0x0158, 0x016E, 0x00DA, 0x0170, 0x00DC, 0x00DD, 0x0162, 0x00DF, + 0x0155, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x013A, 0x0107, 0x00E7, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x011B, 0x00ED, 0x00EE, 0x010F, + 0x0111, 0x0144, 0x0148, 0x00F3, 0x00F4, 0x0151, 0x00F6, 0x00F7, 0x0159, 0x016F, 0x00FA, 0x0171, 0x00FC, 0x00FD, 0x0163, 0x02D9 +} diff --git a/tex/context/base/regi-cp1251.lua b/tex/context/base/regi-cp1251.lua index 7bb72e0cc..07f1d81ad 100644 --- a/tex/context/base/regi-cp1251.lua +++ b/tex/context/base/regi-cp1251.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-cp1251'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x0402, 0x0403, 0x201A, 0x0453, 0x201E, 0x2026, 0x2020, 0x2021, 0x20AC, 0x2030, 0x0409, 0x2039, 0x040A, 0x040C, 0x040B, 0x040F, - 0x0452, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0459, 0x203A, 0x045A, 0x045C, 0x045B, 0x045F, - 0x00A0, 0x040E, 0x045E, 0x0408, 0x00A4, 0x0490, 0x00A6, 0x00A7, 0x0401, 0x00A9, 0x0404, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x0407, - 0x00B0, 0x00B1, 0x0406, 0x0456, 0x0491, 0x00B5, 0x00B6, 0x00B7, 0x0451, 0x2116, 0x0454, 0x00BB, 0x0458, 0x0405, 0x0455, 0x0457, - 0x0410, 0x0411, 0x0412, 0x0413, 0x0414, 0x0415, 0x0416, 0x0417, 0x0418, 0x0419, 0x041A, 0x041B, 0x041C, 0x041D, 0x041E, 0x041F, - 0x0420, 0x0421, 0x0422, 0x0423, 0x0424, 0x0425, 0x0426, 0x0427, 0x0428, 0x0429, 0x042A, 0x042B, 0x042C, 0x042D, 0x042E, 0x042F, - 0x0430, 0x0431, 0x0432, 0x0433, 0x0434, 0x0435, 0x0436, 0x0437, 0x0438, 0x0439, 0x043A, 0x043B, 0x043C, 0x043D, 0x043E, 0x043F, - 0x0440, 0x0441, 0x0442, 0x0443, 0x0444, 0x0445, 0x0446, 0x0447, 0x0448, 0x0449, 0x044A, 0x044B, 0x044C, 0x044D, 0x044E, 0x044F -} +if not modules then modules = { } end modules ['regi-cp1251'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x0402, 0x0403, 0x201A, 0x0453, 0x201E, 0x2026, 0x2020, 0x2021, 0x20AC, 0x2030, 0x0409, 0x2039, 0x040A, 0x040C, 0x040B, 0x040F, + 0x0452, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0459, 0x203A, 0x045A, 0x045C, 0x045B, 0x045F, + 0x00A0, 0x040E, 0x045E, 0x0408, 0x00A4, 0x0490, 0x00A6, 0x00A7, 0x0401, 0x00A9, 0x0404, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x0407, + 0x00B0, 0x00B1, 0x0406, 0x0456, 0x0491, 0x00B5, 0x00B6, 0x00B7, 0x0451, 0x2116, 0x0454, 0x00BB, 0x0458, 0x0405, 0x0455, 0x0457, + 0x0410, 0x0411, 0x0412, 0x0413, 0x0414, 0x0415, 0x0416, 0x0417, 0x0418, 0x0419, 0x041A, 0x041B, 0x041C, 0x041D, 0x041E, 0x041F, + 0x0420, 0x0421, 0x0422, 0x0423, 0x0424, 0x0425, 0x0426, 0x0427, 0x0428, 0x0429, 0x042A, 0x042B, 0x042C, 0x042D, 0x042E, 0x042F, + 0x0430, 0x0431, 0x0432, 0x0433, 0x0434, 0x0435, 0x0436, 0x0437, 0x0438, 0x0439, 0x043A, 0x043B, 0x043C, 0x043D, 0x043E, 0x043F, + 0x0440, 0x0441, 0x0442, 0x0443, 0x0444, 0x0445, 0x0446, 0x0447, 0x0448, 0x0449, 0x044A, 0x044B, 0x044C, 0x044D, 0x044E, 0x044F +} diff --git a/tex/context/base/regi-cp1252.lua b/tex/context/base/regi-cp1252.lua index 86954c9af..08bd22bf6 100644 --- a/tex/context/base/regi-cp1252.lua +++ b/tex/context/base/regi-cp1252.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-cp1252'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0160, 0x2039, 0x0152, 0x0000, 0x017D, 0x0000, - 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0161, 0x203A, 0x0153, 0x0000, 0x017E, 0x0178, - 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, - 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF, - 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, - 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF, - 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, - 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF -} +if not modules then modules = { } end modules ['regi-cp1252'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0160, 0x2039, 0x0152, 0x0000, 0x017D, 0x0000, + 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0161, 0x203A, 0x0153, 0x0000, 0x017E, 0x0178, + 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, + 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF, + 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, + 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF, + 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, + 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF +} diff --git a/tex/context/base/regi-cp1253.lua b/tex/context/base/regi-cp1253.lua index 31a411efe..d272692cf 100644 --- a/tex/context/base/regi-cp1253.lua +++ b/tex/context/base/regi-cp1253.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-cp1253'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x0000, 0x2030, 0x0000, 0x2039, 0x0000, 0x0000, 0x0000, 0x0000, - 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0000, 0x203A, 0x0000, 0x0000, 0x0000, 0x0000, - 0x00A0, 0x0385, 0x0386, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x0000, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x2015, - 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x0384, 0x00B5, 0x00B6, 0x00B7, 0x0388, 0x0389, 0x038A, 0x00BB, 0x038C, 0x00BD, 0x038E, 0x038F, - 0x0390, 0x0391, 0x0392, 0x0393, 0x0394, 0x0395, 0x0396, 0x0397, 0x0398, 0x0399, 0x039A, 0x039B, 0x039C, 0x039D, 0x039E, 0x039F, - 0x03A0, 0x03A1, 0x0000, 0x03A3, 0x03A4, 0x03A5, 0x03A6, 0x03A7, 0x03A8, 0x03A9, 0x03AA, 0x03AB, 0x03AC, 0x03AD, 0x03AE, 0x03AF, - 0x03B0, 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5, 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA, 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF, - 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4, 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9, 0x03CA, 0x03CB, 0x03CC, 0x03CD, 0x03CE, 0x0000 -} +if not modules then modules = { } end modules ['regi-cp1253'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x0000, 0x2030, 0x0000, 0x2039, 0x0000, 0x0000, 0x0000, 0x0000, + 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0000, 0x203A, 0x0000, 0x0000, 0x0000, 0x0000, + 0x00A0, 0x0385, 0x0386, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x0000, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x2015, + 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x0384, 0x00B5, 0x00B6, 0x00B7, 0x0388, 0x0389, 0x038A, 0x00BB, 0x038C, 0x00BD, 0x038E, 0x038F, + 0x0390, 0x0391, 0x0392, 0x0393, 0x0394, 0x0395, 0x0396, 0x0397, 0x0398, 0x0399, 0x039A, 0x039B, 0x039C, 0x039D, 0x039E, 0x039F, + 0x03A0, 0x03A1, 0x0000, 0x03A3, 0x03A4, 0x03A5, 0x03A6, 0x03A7, 0x03A8, 0x03A9, 0x03AA, 0x03AB, 0x03AC, 0x03AD, 0x03AE, 0x03AF, + 0x03B0, 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5, 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA, 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF, + 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4, 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9, 0x03CA, 0x03CB, 0x03CC, 0x03CD, 0x03CE, 0x0000 +} diff --git a/tex/context/base/regi-cp1254.lua b/tex/context/base/regi-cp1254.lua index 73b9927c6..c8ef03da9 100644 --- a/tex/context/base/regi-cp1254.lua +++ b/tex/context/base/regi-cp1254.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-cp1254'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0160, 0x2039, 0x0152, 0x0000, 0x0000, 0x0000, - 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0161, 0x203A, 0x0153, 0x0000, 0x0000, 0x0178, - 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, - 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF, - 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, - 0x011E, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x0130, 0x015E, 0x00DF, - 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, - 0x011F, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x0131, 0x015F, 0x00FF -} +if not modules then modules = { } end modules ['regi-cp1254'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0160, 0x2039, 0x0152, 0x0000, 0x0000, 0x0000, + 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0161, 0x203A, 0x0153, 0x0000, 0x0000, 0x0178, + 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, + 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF, + 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF, + 0x011E, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x0130, 0x015E, 0x00DF, + 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF, + 0x011F, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x0131, 0x015F, 0x00FF +} diff --git a/tex/context/base/regi-cp1255.lua b/tex/context/base/regi-cp1255.lua index 2abb16b54..7f33b67a8 100644 --- a/tex/context/base/regi-cp1255.lua +++ b/tex/context/base/regi-cp1255.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-cp1255'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0000, 0x2039, 0x0000, 0x0000, 0x0000, 0x0000, - 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0000, 0x203A, 0x0000, 0x0000, 0x0000, 0x0000, - 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x20AA, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00D7, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, - 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00F7, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF, - 0x05B0, 0x05B1, 0x05B2, 0x05B3, 0x05B4, 0x05B5, 0x05B6, 0x05B7, 0x05B8, 0x05B9, 0x0000, 0x05BB, 0x05BC, 0x05BD, 0x05BE, 0x05BF, - 0x05C0, 0x05C1, 0x05C2, 0x05C3, 0x05F0, 0x05F1, 0x05F2, 0x05F3, 0x05F4, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, - 0x05D0, 0x05D1, 0x05D2, 0x05D3, 0x05D4, 0x05D5, 0x05D6, 0x05D7, 0x05D8, 0x05D9, 0x05DA, 0x05DB, 0x05DC, 0x05DD, 0x05DE, 0x05DF, - 0x05E0, 0x05E1, 0x05E2, 0x05E3, 0x05E4, 0x05E5, 0x05E6, 0x05E7, 0x05E8, 0x05E9, 0x05EA, 0x0000, 0x0000, 0x200E, 0x200F, 0x0000 -} +if not modules then modules = { } end modules ['regi-cp1255'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0000, 0x2039, 0x0000, 0x0000, 0x0000, 0x0000, + 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0000, 0x203A, 0x0000, 0x0000, 0x0000, 0x0000, + 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x20AA, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00D7, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, + 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00F7, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF, + 0x05B0, 0x05B1, 0x05B2, 0x05B3, 0x05B4, 0x05B5, 0x05B6, 0x05B7, 0x05B8, 0x05B9, 0x0000, 0x05BB, 0x05BC, 0x05BD, 0x05BE, 0x05BF, + 0x05C0, 0x05C1, 0x05C2, 0x05C3, 0x05F0, 0x05F1, 0x05F2, 0x05F3, 0x05F4, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, + 0x05D0, 0x05D1, 0x05D2, 0x05D3, 0x05D4, 0x05D5, 0x05D6, 0x05D7, 0x05D8, 0x05D9, 0x05DA, 0x05DB, 0x05DC, 0x05DD, 0x05DE, 0x05DF, + 0x05E0, 0x05E1, 0x05E2, 0x05E3, 0x05E4, 0x05E5, 0x05E6, 0x05E7, 0x05E8, 0x05E9, 0x05EA, 0x0000, 0x0000, 0x200E, 0x200F, 0x0000 +} diff --git a/tex/context/base/regi-cp1256.lua b/tex/context/base/regi-cp1256.lua index a0697c321..e9a4363c7 100644 --- a/tex/context/base/regi-cp1256.lua +++ b/tex/context/base/regi-cp1256.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-cp1256'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x20AC, 0x067E, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0679, 0x2039, 0x0152, 0x0686, 0x0698, 0x0688, - 0x06AF, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x06A9, 0x2122, 0x0691, 0x203A, 0x0153, 0x200C, 0x200D, 0x06BA, - 0x00A0, 0x060C, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x06BE, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, - 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x061B, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x061F, - 0x06C1, 0x0621, 0x0622, 0x0623, 0x0624, 0x0625, 0x0626, 0x0627, 0x0628, 0x0629, 0x062A, 0x062B, 0x062C, 0x062D, 0x062E, 0x062F, - 0x0630, 0x0631, 0x0632, 0x0633, 0x0634, 0x0635, 0x0636, 0x00D7, 0x0637, 0x0638, 0x0639, 0x063A, 0x0640, 0x0641, 0x0642, 0x0643, - 0x00E0, 0x0644, 0x00E2, 0x0645, 0x0646, 0x0647, 0x0648, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x0649, 0x064A, 0x00EE, 0x00EF, - 0x064B, 0x064C, 0x064D, 0x064E, 0x00F4, 0x064F, 0x0650, 0x00F7, 0x0651, 0x00F9, 0x0652, 0x00FB, 0x00FC, 0x200E, 0x200F, 0x06D2 -} +if not modules then modules = { } end modules ['regi-cp1256'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x20AC, 0x067E, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0679, 0x2039, 0x0152, 0x0686, 0x0698, 0x0688, + 0x06AF, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x06A9, 0x2122, 0x0691, 0x203A, 0x0153, 0x200C, 0x200D, 0x06BA, + 0x00A0, 0x060C, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x06BE, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, + 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x061B, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x061F, + 0x06C1, 0x0621, 0x0622, 0x0623, 0x0624, 0x0625, 0x0626, 0x0627, 0x0628, 0x0629, 0x062A, 0x062B, 0x062C, 0x062D, 0x062E, 0x062F, + 0x0630, 0x0631, 0x0632, 0x0633, 0x0634, 0x0635, 0x0636, 0x00D7, 0x0637, 0x0638, 0x0639, 0x063A, 0x0640, 0x0641, 0x0642, 0x0643, + 0x00E0, 0x0644, 0x00E2, 0x0645, 0x0646, 0x0647, 0x0648, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x0649, 0x064A, 0x00EE, 0x00EF, + 0x064B, 0x064C, 0x064D, 0x064E, 0x00F4, 0x064F, 0x0650, 0x00F7, 0x0651, 0x00F9, 0x0652, 0x00FB, 0x00FC, 0x200E, 0x200F, 0x06D2 +} diff --git a/tex/context/base/regi-cp1257.lua b/tex/context/base/regi-cp1257.lua index 6e39c10d4..a4a492a13 100644 --- a/tex/context/base/regi-cp1257.lua +++ b/tex/context/base/regi-cp1257.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-cp1257'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x20AC, 0x0000, 0x201A, 0x0000, 0x201E, 0x2026, 0x2020, 0x2021, 0x0000, 0x2030, 0x0000, 0x2039, 0x0000, 0x00A8, 0x02C7, 0x00B8, - 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0000, 0x203A, 0x0000, 0x00AF, 0x02DB, 0x0000, - 0x00A0, 0x0000, 0x00A2, 0x00A3, 0x00A4, 0x0000, 0x00A6, 0x00A7, 0x00D8, 0x00A9, 0x0156, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00C6, - 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00F8, 0x00B9, 0x0157, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00E6, - 0x0104, 0x012E, 0x0100, 0x0106, 0x00C4, 0x00C5, 0x0118, 0x0112, 0x010C, 0x00C9, 0x0179, 0x0116, 0x0122, 0x0136, 0x012A, 0x013B, - 0x0160, 0x0143, 0x0145, 0x00D3, 0x014C, 0x00D5, 0x00D6, 0x00D7, 0x0172, 0x0141, 0x015A, 0x016A, 0x00DC, 0x017B, 0x017D, 0x00DF, - 0x0105, 0x012F, 0x0101, 0x0107, 0x00E4, 0x00E5, 0x0119, 0x0113, 0x010D, 0x00E9, 0x017A, 0x0117, 0x0123, 0x0137, 0x012B, 0x013C, - 0x0161, 0x0144, 0x0146, 0x00F3, 0x014D, 0x00F5, 0x00F6, 0x00F7, 0x0173, 0x0142, 0x015B, 0x016B, 0x00FC, 0x017C, 0x017E, 0x02D9 -} +if not modules then modules = { } end modules ['regi-cp1257'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x20AC, 0x0000, 0x201A, 0x0000, 0x201E, 0x2026, 0x2020, 0x2021, 0x0000, 0x2030, 0x0000, 0x2039, 0x0000, 0x00A8, 0x02C7, 0x00B8, + 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0000, 0x203A, 0x0000, 0x00AF, 0x02DB, 0x0000, + 0x00A0, 0x0000, 0x00A2, 0x00A3, 0x00A4, 0x0000, 0x00A6, 0x00A7, 0x00D8, 0x00A9, 0x0156, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00C6, + 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00F8, 0x00B9, 0x0157, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00E6, + 0x0104, 0x012E, 0x0100, 0x0106, 0x00C4, 0x00C5, 0x0118, 0x0112, 0x010C, 0x00C9, 0x0179, 0x0116, 0x0122, 0x0136, 0x012A, 0x013B, + 0x0160, 0x0143, 0x0145, 0x00D3, 0x014C, 0x00D5, 0x00D6, 0x00D7, 0x0172, 0x0141, 0x015A, 0x016A, 0x00DC, 0x017B, 0x017D, 0x00DF, + 0x0105, 0x012F, 0x0101, 0x0107, 0x00E4, 0x00E5, 0x0119, 0x0113, 0x010D, 0x00E9, 0x017A, 0x0117, 0x0123, 0x0137, 0x012B, 0x013C, + 0x0161, 0x0144, 0x0146, 0x00F3, 0x014D, 0x00F5, 0x00F6, 0x00F7, 0x0173, 0x0142, 0x015B, 0x016B, 0x00FC, 0x017C, 0x017E, 0x02D9 +} diff --git a/tex/context/base/regi-cp1258.lua b/tex/context/base/regi-cp1258.lua index cf64d2ab6..a4630e7e9 100644 --- a/tex/context/base/regi-cp1258.lua +++ b/tex/context/base/regi-cp1258.lua @@ -1,26 +1,26 @@ -if not modules then modules = { } end modules ['regi-cp1258'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -return { [0] = - 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, - 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, - 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, - 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, - 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, - 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, - 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, - 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, - 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0000, 0x2039, 0x0152, 0x0000, 0x0000, 0x0000, - 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0000, 0x203A, 0x0153, 0x0000, 0x0000, 0x0178, - 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, - 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF, - 0x00C0, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x0300, 0x00CD, 0x00CE, 0x00CF, - 0x0110, 0x00D1, 0x0309, 0x00D3, 0x00D4, 0x01A0, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x01AF, 0x0303, 0x00DF, - 0x00E0, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x0301, 0x00ED, 0x00EE, 0x00EF, - 0x0111, 0x00F1, 0x0323, 0x00F3, 0x00F4, 0x01A1, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x01B0, 0x20AB, 0x00FF -} +if not modules then modules = { } end modules ['regi-cp1258'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +return { [0] = + 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, + 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F, + 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F, + 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F, + 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F, + 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F, + 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F, + 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F, + 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0000, 0x2039, 0x0152, 0x0000, 0x0000, 0x0000, + 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0000, 0x203A, 0x0153, 0x0000, 0x0000, 0x0178, + 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF, + 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF, + 0x00C0, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x0300, 0x00CD, 0x00CE, 0x00CF, + 0x0110, 0x00D1, 0x0309, 0x00D3, 0x00D4, 0x01A0, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x01AF, 0x0303, 0x00DF, + 0x00E0, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x0301, 0x00ED, 0x00EE, 0x00EF, + 0x0111, 0x00F1, 0x0323, 0x00F3, 0x00F4, 0x01A1, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x01B0, 0x20AB, 0x00FF +} diff --git a/tex/context/base/regi-demo.lua b/tex/context/base/regi-demo.lua index 689f44e32..f709a11aa 100644 --- a/tex/context/base/regi-demo.lua +++ b/tex/context/base/regi-demo.lua @@ -1,22 +1,22 @@ -if not modules then modules = { } end modules ['regi-demo'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- digits -> * - -return { - [0x0030] = 0x002A, - [0x0031] = 0x002A, - [0x0032] = 0x002A, - [0x0033] = 0x002A, - [0x0034] = 0x002A, - [0x0035] = 0x002A, - [0x0036] = 0x002A, - [0x0037] = 0x002A, - [0x0038] = 0x002A, - [0x0039] = 0x002A, -} +if not modules then modules = { } end modules ['regi-demo'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- digits -> * + +return { + [0x0030] = 0x002A, + [0x0031] = 0x002A, + [0x0032] = 0x002A, + [0x0033] = 0x002A, + [0x0034] = 0x002A, + [0x0035] = 0x002A, + [0x0036] = 0x002A, + [0x0037] = 0x002A, + [0x0038] = 0x002A, + [0x0039] = 0x002A, +} diff --git a/tex/context/base/regi-ini.lua b/tex/context/base/regi-ini.lua index d5d278b16..784a1ed46 100644 --- a/tex/context/base/regi-ini.lua +++ b/tex/context/base/regi-ini.lua @@ -1,388 +1,388 @@ -if not modules then modules = { } end modules ['regi-ini'] = { - version = 1.001, - comment = "companion to regi-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

    Regimes take care of converting the input characters into - sequences. The conversion tables are loaded at -runtime.

    ---ldx]]-- - -local commands, context = commands, context - -local utfchar = utf.char -local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match -local char, gsub, format, gmatch, byte, match = string.char, string.gsub, string.format, string.gmatch, string.byte, string.match -local next = next -local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy -local concat = table.concat -local totable = string.totable - -local allocate = utilities.storage.allocate -local sequencers = utilities.sequencers -local textlineactions = resolvers.openers.helpers.textlineactions -local setmetatableindex = table.setmetatableindex - ---[[ldx-- -

    We will hook regime handling code into the input methods.

    ---ldx]]-- - -local trace_translating = false trackers.register("regimes.translating", function(v) trace_translating = v end) - -local report_loading = logs.reporter("regimes","loading") -local report_translating = logs.reporter("regimes","translating") - -regimes = regimes or { } -local regimes = regimes - -local mapping = allocate { - utf = false -} - -local backmapping = allocate { -} - --- regimes.mapping = mapping - -local synonyms = { -- backward compatibility list - - ["windows-1250"] = "cp1250", - ["windows-1251"] = "cp1251", - ["windows-1252"] = "cp1252", - ["windows-1253"] = "cp1253", - ["windows-1254"] = "cp1254", - ["windows-1255"] = "cp1255", - ["windows-1256"] = "cp1256", - ["windows-1257"] = "cp1257", - ["windows-1258"] = "cp1258", - - ["il1"] = "8859-1", - ["il2"] = "8859-2", - ["il3"] = "8859-3", - ["il4"] = "8859-4", - ["il5"] = "8859-9", - ["il6"] = "8859-10", - ["il7"] = "8859-13", - ["il8"] = "8859-14", - ["il9"] = "8859-15", - ["il10"] = "8859-16", - - ["iso-8859-1"] = "8859-1", - ["iso-8859-2"] = "8859-2", - ["iso-8859-3"] = "8859-3", - ["iso-8859-4"] = "8859-4", - ["iso-8859-9"] = "8859-9", - ["iso-8859-10"] = "8859-10", - ["iso-8859-13"] = "8859-13", - ["iso-8859-14"] = "8859-14", - ["iso-8859-15"] = "8859-15", - ["iso-8859-16"] = "8859-16", - - ["latin1"] = "8859-1", - ["latin2"] = "8859-2", - ["latin3"] = "8859-3", - ["latin4"] = "8859-4", - ["latin5"] = "8859-9", - ["latin6"] = "8859-10", - ["latin7"] = "8859-13", - ["latin8"] = "8859-14", - ["latin9"] = "8859-15", - ["latin10"] = "8859-16", - - ["utf-8"] = "utf", - ["utf8"] = "utf", - [""] = "utf", - - ["windows"] = "cp1252", - -} - -local currentregime = "utf" - -local function loadregime(mapping,regime) - local name = resolvers.findfile(format("regi-%s.lua",regime)) or "" - local data = name ~= "" and dofile(name) - if data then - vector = { } - for eightbit, unicode in next, data do - vector[char(eightbit)] = utfchar(unicode) - end - report_loading("vector %a is loaded",regime) - else - vector = false - report_loading("vector %a is unknown",regime) - end - mapping[regime] = vector - return vector -end - -local function loadreverse(t,k) - local t = { } - for k, v in next, mapping[k] do - t[v] = k - end - backmapping[k] = t - return t -end - -setmetatableindex(mapping, loadregime) -setmetatableindex(backmapping,loadreverse) - -local function translate(line,regime) - if line and #line > 0 then - local map = mapping[regime and synonyms[regime] or regime or currentregime] - if map then - line = gsub(line,".",map) - end - end - return line -end - --- local remappers = { } --- --- local function toregime(vector,str,default) -- toregime('8859-1',"abcde Ä","?") --- local t = backmapping[vector] --- local remapper = remappers[vector] --- if not remapper then --- remapper = utf.remapper(t) --- remappers[t] = remapper --- end --- local m = getmetatable(t) --- setmetatableindex(t, function(t,k) --- local v = default or "?" --- t[k] = v --- return v --- end) --- str = remapper(str) --- setmetatable(t,m) --- return str --- end --- --- -- much faster (but only matters when we have > 10K calls - -local cache = { } -- if really needed we can copy vectors and hash defaults - -setmetatableindex(cache, function(t,k) - local v = { remappers = { } } - t[k] = v - return v -end) - -local function toregime(vector,str,default) -- toregime('8859-1',"abcde Ä","?") - local d = default or "?" - local c = cache[vector].remappers - local r = c[d] - if not r then - local t = fastcopy(backmapping[vector]) - setmetatableindex(t, function(t,k) - local v = d - t[k] = v - return v - end) - r = utf.remapper(t) - c[d] = r - end - return r(str) -end - -local function disable() - currentregime = "utf" - sequencers.disableaction(textlineactions,"regimes.process") -end - -local function enable(regime) - regime = synonyms[regime] or regime - if mapping[regime] == false then - disable() - else - currentregime = regime - sequencers.enableaction(textlineactions,"regimes.process") - end -end - -regimes.toregime = toregime -regimes.translate = translate -regimes.enable = enable -regimes.disable = disable - --- The following function can be used when we want to make sure that --- utf gets passed unharmed. This is needed for modules. - -local level = 0 - -function regimes.process(str,filename,currentline,noflines,coding) - if level == 0 and coding ~= "utf-8" then - str = translate(str,currentregime) - if trace_translating then - report_translating("utf: %s",str) - end - end - return str -end - -local function push() - level = level + 1 - if trace_translating then - report_translating("pushing level %s",level) - end -end - -local function pop() - if level > 0 then - if trace_translating then - report_translating("popping level %s",level) - end - level = level - 1 - end -end - -regimes.push = push -regimes.pop = pop - -sequencers.prependaction(textlineactions,"system","regimes.process") -sequencers.disableaction(textlineactions,"regimes.process") - --- interface: - -commands.enableregime = enable -commands.disableregime = disable - -commands.pushregime = push -commands.popregime = pop - -function commands.currentregime() - context(currentregime) -end - -local stack = { } - -function commands.startregime(regime) - insert(stack,currentregime) - if trace_translating then - report_translating("start using %a",regime) - end - enable(regime) -end - -function commands.stopregime() - if #stack > 0 then - local regime = remove(stack) - if trace_translating then - report_translating("stop using %a",regime) - end - enable(regime) - end -end - --- Next we provide some hacks. Unfortunately we run into crappy encoded --- (read : mixed) encoded xml files that have these ë ä ö ü sequences --- instead of ë ä ö ü - -local patterns = { } - --- function regimes.cleanup(regime,str) --- local p = patterns[regime] --- if p == nil then --- regime = regime and synonyms[regime] or regime or currentregime --- local vector = regime ~= "utf" and mapping[regime] --- if vector then --- local list = { } --- for k, uchar in next, vector do --- local stream = totable(uchar) --- for i=1,#stream do --- stream[i] = vector[stream[i]] --- end --- list[concat(stream)] = uchar --- end --- p = lpeg.append(list,nil,true) --- p = Cs((p+1)^0) --- -- lpeg.print(p) -- size 1604 --- else --- p = false --- end --- patterns[vector] = p --- end --- return p and lpegmatch(p,str) or str --- end --- --- twice as fast and much less lpeg bytecode - -function regimes.cleanup(regime,str) - local p = patterns[regime] - if p == nil then - regime = regime and synonyms[regime] or regime or currentregime - local vector = regime ~= "utf" and mapping[regime] - if vector then - local utfchars = { } - local firsts = { } - for k, uchar in next, vector do - local stream = { } - local split = totable(uchar) - local nofsplits = #split - if nofsplits > 1 then - local first - for i=1,nofsplits do - local u = vector[split[i]] - if not first then - first = firsts[u] - if not first then - first = { } - firsts[u] = first - end - end - stream[i] = u - end - local nofstream = #stream - if nofstream > 1 then - first[#first+1] = concat(stream,2,nofstream) - utfchars[concat(stream)] = uchar - end - end - end - p = P(false) - for k, v in next, firsts do - local q = P(false) - for i=1,#v do - q = q + P(v[i]) - end - p = p + P(k) * q - end - p = Cs(((p+1)/utfchars)^1) - -- lpeg.print(p) -- size: 1042 - else - p = false - end - patterns[regime] = p - end - return p and lpegmatch(p,str) or str -end - --- local map = require("regi-cp1252") --- local old = [[test ë ä ö ü crap]] --- local new = correctencoding(map,old) --- --- print(old,new) - --- obsolete: --- --- function regimes.setsynonym(synonym,target) --- synonyms[synonym] = target --- end --- --- function regimes.truename(regime) --- return regime and synonyms[regime] or regime or currentregime --- end --- --- commands.setregimesynonym = regimes.setsynonym --- --- function commands.trueregimename(regime) --- context(regimes.truename(regime)) --- end --- --- function regimes.load(regime) --- return mapping[synonyms[regime] or regime] --- end +if not modules then modules = { } end modules ['regi-ini'] = { + version = 1.001, + comment = "companion to regi-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

    Regimes take care of converting the input characters into + sequences. The conversion tables are loaded at +runtime.

    +--ldx]]-- + +local commands, context = commands, context + +local utfchar = utf.char +local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match +local char, gsub, format, gmatch, byte, match = string.char, string.gsub, string.format, string.gmatch, string.byte, string.match +local next = next +local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy +local concat = table.concat +local totable = string.totable + +local allocate = utilities.storage.allocate +local sequencers = utilities.sequencers +local textlineactions = resolvers.openers.helpers.textlineactions +local setmetatableindex = table.setmetatableindex + +--[[ldx-- +

    We will hook regime handling code into the input methods.

    +--ldx]]-- + +local trace_translating = false trackers.register("regimes.translating", function(v) trace_translating = v end) + +local report_loading = logs.reporter("regimes","loading") +local report_translating = logs.reporter("regimes","translating") + +regimes = regimes or { } +local regimes = regimes + +local mapping = allocate { + utf = false +} + +local backmapping = allocate { +} + +-- regimes.mapping = mapping + +local synonyms = { -- backward compatibility list + + ["windows-1250"] = "cp1250", + ["windows-1251"] = "cp1251", + ["windows-1252"] = "cp1252", + ["windows-1253"] = "cp1253", + ["windows-1254"] = "cp1254", + ["windows-1255"] = "cp1255", + ["windows-1256"] = "cp1256", + ["windows-1257"] = "cp1257", + ["windows-1258"] = "cp1258", + + ["il1"] = "8859-1", + ["il2"] = "8859-2", + ["il3"] = "8859-3", + ["il4"] = "8859-4", + ["il5"] = "8859-9", + ["il6"] = "8859-10", + ["il7"] = "8859-13", + ["il8"] = "8859-14", + ["il9"] = "8859-15", + ["il10"] = "8859-16", + + ["iso-8859-1"] = "8859-1", + ["iso-8859-2"] = "8859-2", + ["iso-8859-3"] = "8859-3", + ["iso-8859-4"] = "8859-4", + ["iso-8859-9"] = "8859-9", + ["iso-8859-10"] = "8859-10", + ["iso-8859-13"] = "8859-13", + ["iso-8859-14"] = "8859-14", + ["iso-8859-15"] = "8859-15", + ["iso-8859-16"] = "8859-16", + + ["latin1"] = "8859-1", + ["latin2"] = "8859-2", + ["latin3"] = "8859-3", + ["latin4"] = "8859-4", + ["latin5"] = "8859-9", + ["latin6"] = "8859-10", + ["latin7"] = "8859-13", + ["latin8"] = "8859-14", + ["latin9"] = "8859-15", + ["latin10"] = "8859-16", + + ["utf-8"] = "utf", + ["utf8"] = "utf", + [""] = "utf", + + ["windows"] = "cp1252", + +} + +local currentregime = "utf" + +local function loadregime(mapping,regime) + local name = resolvers.findfile(format("regi-%s.lua",regime)) or "" + local data = name ~= "" and dofile(name) + if data then + vector = { } + for eightbit, unicode in next, data do + vector[char(eightbit)] = utfchar(unicode) + end + report_loading("vector %a is loaded",regime) + else + vector = false + report_loading("vector %a is unknown",regime) + end + mapping[regime] = vector + return vector +end + +local function loadreverse(t,k) + local t = { } + for k, v in next, mapping[k] do + t[v] = k + end + backmapping[k] = t + return t +end + +setmetatableindex(mapping, loadregime) +setmetatableindex(backmapping,loadreverse) + +local function translate(line,regime) + if line and #line > 0 then + local map = mapping[regime and synonyms[regime] or regime or currentregime] + if map then + line = gsub(line,".",map) + end + end + return line +end + +-- local remappers = { } +-- +-- local function toregime(vector,str,default) -- toregime('8859-1',"abcde Ä","?") +-- local t = backmapping[vector] +-- local remapper = remappers[vector] +-- if not remapper then +-- remapper = utf.remapper(t) +-- remappers[t] = remapper +-- end +-- local m = getmetatable(t) +-- setmetatableindex(t, function(t,k) +-- local v = default or "?" +-- t[k] = v +-- return v +-- end) +-- str = remapper(str) +-- setmetatable(t,m) +-- return str +-- end +-- +-- -- much faster (but only matters when we have > 10K calls + +local cache = { } -- if really needed we can copy vectors and hash defaults + +setmetatableindex(cache, function(t,k) + local v = { remappers = { } } + t[k] = v + return v +end) + +local function toregime(vector,str,default) -- toregime('8859-1',"abcde Ä","?") + local d = default or "?" + local c = cache[vector].remappers + local r = c[d] + if not r then + local t = fastcopy(backmapping[vector]) + setmetatableindex(t, function(t,k) + local v = d + t[k] = v + return v + end) + r = utf.remapper(t) + c[d] = r + end + return r(str) +end + +local function disable() + currentregime = "utf" + sequencers.disableaction(textlineactions,"regimes.process") +end + +local function enable(regime) + regime = synonyms[regime] or regime + if mapping[regime] == false then + disable() + else + currentregime = regime + sequencers.enableaction(textlineactions,"regimes.process") + end +end + +regimes.toregime = toregime +regimes.translate = translate +regimes.enable = enable +regimes.disable = disable + +-- The following function can be used when we want to make sure that +-- utf gets passed unharmed. This is needed for modules. + +local level = 0 + +function regimes.process(str,filename,currentline,noflines,coding) + if level == 0 and coding ~= "utf-8" then + str = translate(str,currentregime) + if trace_translating then + report_translating("utf: %s",str) + end + end + return str +end + +local function push() + level = level + 1 + if trace_translating then + report_translating("pushing level %s",level) + end +end + +local function pop() + if level > 0 then + if trace_translating then + report_translating("popping level %s",level) + end + level = level - 1 + end +end + +regimes.push = push +regimes.pop = pop + +sequencers.prependaction(textlineactions,"system","regimes.process") +sequencers.disableaction(textlineactions,"regimes.process") + +-- interface: + +commands.enableregime = enable +commands.disableregime = disable + +commands.pushregime = push +commands.popregime = pop + +function commands.currentregime() + context(currentregime) +end + +local stack = { } + +function commands.startregime(regime) + insert(stack,currentregime) + if trace_translating then + report_translating("start using %a",regime) + end + enable(regime) +end + +function commands.stopregime() + if #stack > 0 then + local regime = remove(stack) + if trace_translating then + report_translating("stop using %a",regime) + end + enable(regime) + end +end + +-- Next we provide some hacks. Unfortunately we run into crappy encoded +-- (read : mixed) encoded xml files that have these ë ä ö ü sequences +-- instead of ë ä ö ü + +local patterns = { } + +-- function regimes.cleanup(regime,str) +-- local p = patterns[regime] +-- if p == nil then +-- regime = regime and synonyms[regime] or regime or currentregime +-- local vector = regime ~= "utf" and mapping[regime] +-- if vector then +-- local list = { } +-- for k, uchar in next, vector do +-- local stream = totable(uchar) +-- for i=1,#stream do +-- stream[i] = vector[stream[i]] +-- end +-- list[concat(stream)] = uchar +-- end +-- p = lpeg.append(list,nil,true) +-- p = Cs((p+1)^0) +-- -- lpeg.print(p) -- size 1604 +-- else +-- p = false +-- end +-- patterns[vector] = p +-- end +-- return p and lpegmatch(p,str) or str +-- end +-- +-- twice as fast and much less lpeg bytecode + +function regimes.cleanup(regime,str) + local p = patterns[regime] + if p == nil then + regime = regime and synonyms[regime] or regime or currentregime + local vector = regime ~= "utf" and mapping[regime] + if vector then + local utfchars = { } + local firsts = { } + for k, uchar in next, vector do + local stream = { } + local split = totable(uchar) + local nofsplits = #split + if nofsplits > 1 then + local first + for i=1,nofsplits do + local u = vector[split[i]] + if not first then + first = firsts[u] + if not first then + first = { } + firsts[u] = first + end + end + stream[i] = u + end + local nofstream = #stream + if nofstream > 1 then + first[#first+1] = concat(stream,2,nofstream) + utfchars[concat(stream)] = uchar + end + end + end + p = P(false) + for k, v in next, firsts do + local q = P(false) + for i=1,#v do + q = q + P(v[i]) + end + p = p + P(k) * q + end + p = Cs(((p+1)/utfchars)^1) + -- lpeg.print(p) -- size: 1042 + else + p = false + end + patterns[regime] = p + end + return p and lpegmatch(p,str) or str +end + +-- local map = require("regi-cp1252") +-- local old = [[test ë ä ö ü crap]] +-- local new = correctencoding(map,old) +-- +-- print(old,new) + +-- obsolete: +-- +-- function regimes.setsynonym(synonym,target) +-- synonyms[synonym] = target +-- end +-- +-- function regimes.truename(regime) +-- return regime and synonyms[regime] or regime or currentregime +-- end +-- +-- commands.setregimesynonym = regimes.setsynonym +-- +-- function commands.trueregimename(regime) +-- context(regimes.truename(regime)) +-- end +-- +-- function regimes.load(regime) +-- return mapping[synonyms[regime] or regime] +-- end diff --git a/tex/context/base/s-fonts-coverage.lua b/tex/context/base/s-fonts-coverage.lua index db47e57c4..668c430a9 100644 --- a/tex/context/base/s-fonts-coverage.lua +++ b/tex/context/base/s-fonts-coverage.lua @@ -1,113 +1,113 @@ -if not modules then modules = { } end modules ['s-fonts-coverage'] = { - version = 1.001, - comment = "companion to s-fonts-coverage.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -moduledata.fonts = moduledata.fonts or { } -moduledata.fonts.coverage = moduledata.fonts.coverage or { } - -local upper, format = string.upper, string.format -local lpegmatch = lpeg.match -local concat = table.concat - -local context = context -local NC, NR, HL = context.NC, context.NR, context.HL -local char, bold, getvalue = context.char, context.bold, context.getvalue - -local chardata = characters.data - -function moduledata.fonts.coverage.showcomparison(specification) - - specification = interfaces.checkedspecification(specification) - - local fontfiles = utilities.parsers.settings_to_array(specification.list or "") - local pattern = upper(specification.pattern or "") - - local present = { } - local names = { } - local files = { } - - if not pattern then - -- skip - elseif pattern == "" then - pattern = nil - elseif tonumber(pattern) then - pattern = tonumber(pattern) - else - pattern = lpeg.oneof(utilities.parsers.settings_to_array(pattern)) - pattern = (1-pattern)^0 * pattern - end - - for i=1,#fontfiles do - local fontname = format("testfont-%s",i) - local fontfile = fontfiles[i] - local fontsize = tex.dimen.bodyfontsize - local id, fontdata = fonts.definers.define { - name = fontfile, - size = fontsize, - cs = fontname, - } - if id and fontdata then - for k, v in next, fontdata.characters do - present[k] = true - end - names[#names+1] = fontname - files[#files+1] = fontfile - end - end - - local t = { } - - context.starttabulate { "|Tr" .. string.rep("|l",#names) .. "|" } - for i=1,#files do - local file = files[i] - t[#t+1] = i .. "=" .. file - NC() - context(i) - NC() - context(file) - NC() - NR() - end - context.stoptabulate() - - context.setupfootertexts { - table.concat(t," ") - } - - context.starttabulate { "|Tl" .. string.rep("|c",#names) .. "|Tl|" } - NC() - bold("unicode") - NC() - for i=1,#names do - bold(i) - NC() - end - bold("description") - NC() - NR() - HL() - for k, v in table.sortedpairs(present) do - if k > 0 then - local description = chardata[k].description - if not pattern or (pattern == k) or (description and lpegmatch(pattern,description)) then - NC() - context("%05X",k) - NC() - for i=1,#names do - getvalue(names[i]) - char(k) - NC() - end - context(description) - NC() - NR() - end - end - end - context.stoptabulate() - -end +if not modules then modules = { } end modules ['s-fonts-coverage'] = { + version = 1.001, + comment = "companion to s-fonts-coverage.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +moduledata.fonts = moduledata.fonts or { } +moduledata.fonts.coverage = moduledata.fonts.coverage or { } + +local upper, format = string.upper, string.format +local lpegmatch = lpeg.match +local concat = table.concat + +local context = context +local NC, NR, HL = context.NC, context.NR, context.HL +local char, bold, getvalue = context.char, context.bold, context.getvalue + +local chardata = characters.data + +function moduledata.fonts.coverage.showcomparison(specification) + + specification = interfaces.checkedspecification(specification) + + local fontfiles = utilities.parsers.settings_to_array(specification.list or "") + local pattern = upper(specification.pattern or "") + + local present = { } + local names = { } + local files = { } + + if not pattern then + -- skip + elseif pattern == "" then + pattern = nil + elseif tonumber(pattern) then + pattern = tonumber(pattern) + else + pattern = lpeg.oneof(utilities.parsers.settings_to_array(pattern)) + pattern = (1-pattern)^0 * pattern + end + + for i=1,#fontfiles do + local fontname = format("testfont-%s",i) + local fontfile = fontfiles[i] + local fontsize = tex.dimen.bodyfontsize + local id, fontdata = fonts.definers.define { + name = fontfile, + size = fontsize, + cs = fontname, + } + if id and fontdata then + for k, v in next, fontdata.characters do + present[k] = true + end + names[#names+1] = fontname + files[#files+1] = fontfile + end + end + + local t = { } + + context.starttabulate { "|Tr" .. string.rep("|l",#names) .. "|" } + for i=1,#files do + local file = files[i] + t[#t+1] = i .. "=" .. file + NC() + context(i) + NC() + context(file) + NC() + NR() + end + context.stoptabulate() + + context.setupfootertexts { + table.concat(t," ") + } + + context.starttabulate { "|Tl" .. string.rep("|c",#names) .. "|Tl|" } + NC() + bold("unicode") + NC() + for i=1,#names do + bold(i) + NC() + end + bold("description") + NC() + NR() + HL() + for k, v in table.sortedpairs(present) do + if k > 0 then + local description = chardata[k].description + if not pattern or (pattern == k) or (description and lpegmatch(pattern,description)) then + NC() + context("%05X",k) + NC() + for i=1,#names do + getvalue(names[i]) + char(k) + NC() + end + context(description) + NC() + NR() + end + end + end + context.stoptabulate() + +end diff --git a/tex/context/base/s-fonts-features.lua b/tex/context/base/s-fonts-features.lua index 0a7cf8b13..a45195df7 100644 --- a/tex/context/base/s-fonts-features.lua +++ b/tex/context/base/s-fonts-features.lua @@ -1,161 +1,161 @@ -if not modules then modules = { } end modules ['s-fonts-features'] = { - version = 1.001, - comment = "companion to s-fonts-features.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -moduledata.fonts = moduledata.fonts or { } -moduledata.fonts.features = moduledata.fonts.features or { } - --- for the moment only otf - -local sortedhash = table.sortedhash - -local NC, NR, bold = context.NC, context.NR, context.bold - -function moduledata.fonts.features.showused(specification) - - specification = interfaces.checkedspecification(specification) - - -- local list = utilities.parsers.settings_to_set(specification.list or "all") - - context.starttabulate { "|T|T|T|T|T|" } - - context.HL() - - NC() bold("feature") - NC() - NC() bold("description") - NC() bold("value") - NC() bold("internal") - NC() NR() - - context.HL() - - local usedfeatures = fonts.handlers.otf.statistics.usedfeatures - local features = fonts.handlers.otf.tables.features - local descriptions = fonts.handlers.otf.features.descriptions - - for feature, keys in sortedhash(usedfeatures) do - -- if list.all or (list.otf and rawget(features,feature)) or (list.extra and rawget(descriptions,feature)) then - local done = false - for k, v in sortedhash(keys) do - if done then - NC() - NC() - NC() - elseif rawget(descriptions,feature) then - NC() context(feature) - NC() context("+") -- extra - NC() context(descriptions[feature]) - done = true - elseif rawget(features,feature) then - NC() context(feature) - NC() -- otf - NC() context(features[feature]) - done = true - else - NC() context(feature) - NC() context("-") -- unknown - NC() - done = true - end - NC() context(k) - NC() context(tostring(v)) - NC() NR() - end - -- end - end - - context.HL() - - context.stoptabulate() - -end - -local function collectkerns(tfmdata,feature) - local combinations = { } - local resources = tfmdata.resources - local characters = tfmdata.characters - local sequences = resources.sequences - local lookuphash = resources.lookuphash - local feature = feature or "kern" - if sequences then - for i=1,#sequences do - local sequence = sequences[i] - if sequence.features and sequence.features[feature] then - local lookuplist = sequence.subtables - if lookuplist then - for l=1,#lookuplist do - local lookupname = lookuplist[l] - local lookupdata = lookuphash[lookupname] - for unicode, data in next, lookupdata do - local kerns = combinations[unicode] - if not kerns then - kerns = { } - combinations[unicode] = kerns - end - for otherunicode, kern in next, data do - if not kerns[otherunicode] and kern ~= 0 then - kerns[otherunicode] = kern - end - end - end - end - end - end - end - end - return combinations -end - -local showkernpair = context.showkernpair - -function moduledata.fonts.features.showbasekerns(specification) - -- assumes that the font is loaded in base mode - specification = interfaces.checkedspecification(specification) - local id, cs = fonts.definers.internal(specification,"") - local tfmdata = fonts.hashes.identifiers[id] - local done = false - for unicode, character in sortedhash(tfmdata.characters) do - local kerns = character.kerns - if kerns then - context.par() - for othercode, kern in sortedhash(kerns) do - showkernpair(unicode,kern,othercode) - end - context.par() - done = true - end - end - if not done then - context("no kern pairs found") - context.par() - end -end - -function moduledata.fonts.features.showallkerns(specification) - specification = interfaces.checkedspecification(specification) - local id, cs = fonts.definers.internal(specification,"") - local tfmdata = fonts.hashes.identifiers[id] - local allkerns = collectkerns(tfmdata) - local characters = tfmdata.characters - if next(allkerns) then - for first, pairs in sortedhash(allkerns) do - context.par() - for second, kern in sortedhash(pairs) do - -- local kerns = characters[first].kerns - -- if not kerns and pairs[second] then - -- -- weird - -- end - showkernpair(first,kern,second,0) - end - context.par() - end - else - context("no kern pairs found") - context.par() - end -end +if not modules then modules = { } end modules ['s-fonts-features'] = { + version = 1.001, + comment = "companion to s-fonts-features.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +moduledata.fonts = moduledata.fonts or { } +moduledata.fonts.features = moduledata.fonts.features or { } + +-- for the moment only otf + +local sortedhash = table.sortedhash + +local NC, NR, bold = context.NC, context.NR, context.bold + +function moduledata.fonts.features.showused(specification) + + specification = interfaces.checkedspecification(specification) + + -- local list = utilities.parsers.settings_to_set(specification.list or "all") + + context.starttabulate { "|T|T|T|T|T|" } + + context.HL() + + NC() bold("feature") + NC() + NC() bold("description") + NC() bold("value") + NC() bold("internal") + NC() NR() + + context.HL() + + local usedfeatures = fonts.handlers.otf.statistics.usedfeatures + local features = fonts.handlers.otf.tables.features + local descriptions = fonts.handlers.otf.features.descriptions + + for feature, keys in sortedhash(usedfeatures) do + -- if list.all or (list.otf and rawget(features,feature)) or (list.extra and rawget(descriptions,feature)) then + local done = false + for k, v in sortedhash(keys) do + if done then + NC() + NC() + NC() + elseif rawget(descriptions,feature) then + NC() context(feature) + NC() context("+") -- extra + NC() context(descriptions[feature]) + done = true + elseif rawget(features,feature) then + NC() context(feature) + NC() -- otf + NC() context(features[feature]) + done = true + else + NC() context(feature) + NC() context("-") -- unknown + NC() + done = true + end + NC() context(k) + NC() context(tostring(v)) + NC() NR() + end + -- end + end + + context.HL() + + context.stoptabulate() + +end + +local function collectkerns(tfmdata,feature) + local combinations = { } + local resources = tfmdata.resources + local characters = tfmdata.characters + local sequences = resources.sequences + local lookuphash = resources.lookuphash + local feature = feature or "kern" + if sequences then + for i=1,#sequences do + local sequence = sequences[i] + if sequence.features and sequence.features[feature] then + local lookuplist = sequence.subtables + if lookuplist then + for l=1,#lookuplist do + local lookupname = lookuplist[l] + local lookupdata = lookuphash[lookupname] + for unicode, data in next, lookupdata do + local kerns = combinations[unicode] + if not kerns then + kerns = { } + combinations[unicode] = kerns + end + for otherunicode, kern in next, data do + if not kerns[otherunicode] and kern ~= 0 then + kerns[otherunicode] = kern + end + end + end + end + end + end + end + end + return combinations +end + +local showkernpair = context.showkernpair + +function moduledata.fonts.features.showbasekerns(specification) + -- assumes that the font is loaded in base mode + specification = interfaces.checkedspecification(specification) + local id, cs = fonts.definers.internal(specification,"") + local tfmdata = fonts.hashes.identifiers[id] + local done = false + for unicode, character in sortedhash(tfmdata.characters) do + local kerns = character.kerns + if kerns then + context.par() + for othercode, kern in sortedhash(kerns) do + showkernpair(unicode,kern,othercode) + end + context.par() + done = true + end + end + if not done then + context("no kern pairs found") + context.par() + end +end + +function moduledata.fonts.features.showallkerns(specification) + specification = interfaces.checkedspecification(specification) + local id, cs = fonts.definers.internal(specification,"") + local tfmdata = fonts.hashes.identifiers[id] + local allkerns = collectkerns(tfmdata) + local characters = tfmdata.characters + if next(allkerns) then + for first, pairs in sortedhash(allkerns) do + context.par() + for second, kern in sortedhash(pairs) do + -- local kerns = characters[first].kerns + -- if not kerns and pairs[second] then + -- -- weird + -- end + showkernpair(first,kern,second,0) + end + context.par() + end + else + context("no kern pairs found") + context.par() + end +end diff --git a/tex/context/base/s-fonts-goodies.lua b/tex/context/base/s-fonts-goodies.lua index 381fc45ea..5b83760d7 100644 --- a/tex/context/base/s-fonts-goodies.lua +++ b/tex/context/base/s-fonts-goodies.lua @@ -1,117 +1,117 @@ -if not modules then modules = { } end modules['s-fonts-goodies'] = { - version = 1.001, - comment = "companion to s-fonts-goodies.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -moduledata.fonts = moduledata.fonts or { } -moduledata.fonts.goodies = moduledata.fonts.goodies or { } - -local NC, NR, HL = context.NC, context.NR, context.HL - -local function initialized(specification) - specification = interfaces.checkedspecification(specification) - local name = specification.name - if name then - local goodies = fonts.goodies.load(name) - if goodies then - return specification, goodies - end - end -end - -function moduledata.fonts.goodies.showstylistics(specification) - local specification, goodies = initialized(specification) - if goodies then - local stylistics = goodies.stylistics - if stylistics then - context.starttabulate { "|Tl|Tpl|" } - HL() - NC() context.bold("feature") - NC() context.bold("meaning") - NC() NR() - HL() - for feature, meaning in table.sortedpairs(stylistics) do - NC() context(feature) - NC() context(string.lower(meaning)) - NC() NR() - end - HL() - context.stoptabulate() - end - end -end - -function moduledata.fonts.goodies.showfeaturesets(specification) - local specification, goodies = initialized(specification) - if goodies then - local featuresets = goodies.featuresets - if featuresets then - context.starttabulate { "|Tl|Tpl|" } - HL() - NC() context.bold("featureset") - NC() context.bold("definitions") - NC() NR() - HL() - for featureset, definitions in table.sortedpairs(featuresets) do - NC() context.type(featureset) NC() - for k, v in table.sortedpairs(definitions) do - context("%s=%S",k,v) - context.quad() - end - NC() NR() - end - HL() - context.stoptabulate() - end - end -end - -function moduledata.fonts.goodies.showcolorschemes(specification) - local specification, goodies = initialized(specification) - if goodies then - local colorschemes = goodies.colorschemes - if colorschemes then - context.starttabulate { "|Tl|Tpl|" } - HL() - NC() context.bold("colorscheme") - NC() context.bold("numbers") - NC() NR() - HL() - for colorscheme, numbers in table.sortedpairs(colorschemes) do - NC() context.type(colorscheme) NC() - for i=1,#numbers do - context(i) - context.quad() - end - NC() NR() - end - HL() - context.stoptabulate() - end - end -end - -function moduledata.fonts.goodies.showfiles(specification) - local specification, goodies = initialized(specification) - if goodies then - local files = goodies.files - if files and files.list then - for filename, specification in table.sortedpairs(files.list) do - context.start() - context.dontleavehmode() - context.definedfont{ filename .. "*default" } - context("%s-%s-%s-%s-%s", - specification.name or files.name, - specification.weight or "normal", - specification.style or "normal", - specification.width or "normal", - specification.variant or "normal") - context.par() - context.stop() - end - end - end -end +if not modules then modules = { } end modules['s-fonts-goodies'] = { + version = 1.001, + comment = "companion to s-fonts-goodies.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +moduledata.fonts = moduledata.fonts or { } +moduledata.fonts.goodies = moduledata.fonts.goodies or { } + +local NC, NR, HL = context.NC, context.NR, context.HL + +local function initialized(specification) + specification = interfaces.checkedspecification(specification) + local name = specification.name + if name then + local goodies = fonts.goodies.load(name) + if goodies then + return specification, goodies + end + end +end + +function moduledata.fonts.goodies.showstylistics(specification) + local specification, goodies = initialized(specification) + if goodies then + local stylistics = goodies.stylistics + if stylistics then + context.starttabulate { "|Tl|Tpl|" } + HL() + NC() context.bold("feature") + NC() context.bold("meaning") + NC() NR() + HL() + for feature, meaning in table.sortedpairs(stylistics) do + NC() context(feature) + NC() context(string.lower(meaning)) + NC() NR() + end + HL() + context.stoptabulate() + end + end +end + +function moduledata.fonts.goodies.showfeaturesets(specification) + local specification, goodies = initialized(specification) + if goodies then + local featuresets = goodies.featuresets + if featuresets then + context.starttabulate { "|Tl|Tpl|" } + HL() + NC() context.bold("featureset") + NC() context.bold("definitions") + NC() NR() + HL() + for featureset, definitions in table.sortedpairs(featuresets) do + NC() context.type(featureset) NC() + for k, v in table.sortedpairs(definitions) do + context("%s=%S",k,v) + context.quad() + end + NC() NR() + end + HL() + context.stoptabulate() + end + end +end + +function moduledata.fonts.goodies.showcolorschemes(specification) + local specification, goodies = initialized(specification) + if goodies then + local colorschemes = goodies.colorschemes + if colorschemes then + context.starttabulate { "|Tl|Tpl|" } + HL() + NC() context.bold("colorscheme") + NC() context.bold("numbers") + NC() NR() + HL() + for colorscheme, numbers in table.sortedpairs(colorschemes) do + NC() context.type(colorscheme) NC() + for i=1,#numbers do + context(i) + context.quad() + end + NC() NR() + end + HL() + context.stoptabulate() + end + end +end + +function moduledata.fonts.goodies.showfiles(specification) + local specification, goodies = initialized(specification) + if goodies then + local files = goodies.files + if files and files.list then + for filename, specification in table.sortedpairs(files.list) do + context.start() + context.dontleavehmode() + context.definedfont{ filename .. "*default" } + context("%s-%s-%s-%s-%s", + specification.name or files.name, + specification.weight or "normal", + specification.style or "normal", + specification.width or "normal", + specification.variant or "normal") + context.par() + context.stop() + end + end + end +end diff --git a/tex/context/base/s-fonts-missing.lua b/tex/context/base/s-fonts-missing.lua index 829fed45f..331e73715 100644 --- a/tex/context/base/s-fonts-missing.lua +++ b/tex/context/base/s-fonts-missing.lua @@ -1,101 +1,101 @@ -if not modules then modules = { } end modules ['s-fonts-missing'] = { - version = 1.001, - comment = "companion to s-fonts-missing.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -moduledata.fonts = moduledata.fonts or { } -moduledata.fonts.missing = moduledata.fonts.missing or { } - -local function legend(id) - local c = fonts.hashes.identifiers[id] - local privates = c.properties.privates - if privates then - local categories = table.swapped(fonts.loggers.category_to_placeholder) - -- context.starttabulate { "|l|c|c|l|" } - context.starttabulate { "|l|c|l|" } - context.HL() - context.NC() - context.bold("name") - context.NC() - context.bold("symbol") - context.NC() - -- context.bold("node") - -- context.NC() - context.bold("category") - context.NC() - context.NR() - context.HL() - for k, v in table.sortedhash(privates) do - local tag = characters.categorytags[categories[k]] - if tag and tag ~= "" then - context.NC() - context(k) - context.NC() - context.dontleavehmode() - context.char(v) - context.NC() - -- context.dontleavehmode() - -- commands.getprivatechar(k) - -- context.NC() - context(string.lower(tag)) - context.NC() - context.NR() - end - end - context.HL() - context.stoptabulate() - end -end - -function moduledata.fonts.missing.showlegend(specification) - specification = interfaces.checkedspecification(specification) - context.begingroup() - context.definedfont { "Mono*missing" } -- otherwise no privates added - context(function() legend(specification.id or font.current()) end) - context.endgroup() -end - -local function missings() - local collected = fonts.checkers.getmissing() - for filename, list in table.sortedhash(collected) do - if #list > 0 then - context.starttabulate { "|l|l|" } - context.NC() - context.bold("filename") - context.NC() - context(file.basename(filename)) - context.NC() - context.NR() - context.NC() - context.bold("missing") - context.NC() - context(#list) - context.NC() - context.NR() - context.stoptabulate() - context.starttabulate { "|l|c|l|" } - for i=1,#list do - local u = list[i] - context.NC() - context("%U",u) - context.NC() - context.char(u) - context.NC() - context(characters.data[u].description) - context.NC() - context.NR() - end - context.stoptabulate() - end - end -end - -function moduledata.fonts.missing.showcharacters(specification) - context.begingroup() - context.definedfont { "Mono*missing" } -- otherwise no privates added - context(function() missings() end) - context.endgroup() -end +if not modules then modules = { } end modules ['s-fonts-missing'] = { + version = 1.001, + comment = "companion to s-fonts-missing.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +moduledata.fonts = moduledata.fonts or { } +moduledata.fonts.missing = moduledata.fonts.missing or { } + +local function legend(id) + local c = fonts.hashes.identifiers[id] + local privates = c.properties.privates + if privates then + local categories = table.swapped(fonts.loggers.category_to_placeholder) + -- context.starttabulate { "|l|c|c|l|" } + context.starttabulate { "|l|c|l|" } + context.HL() + context.NC() + context.bold("name") + context.NC() + context.bold("symbol") + context.NC() + -- context.bold("node") + -- context.NC() + context.bold("category") + context.NC() + context.NR() + context.HL() + for k, v in table.sortedhash(privates) do + local tag = characters.categorytags[categories[k]] + if tag and tag ~= "" then + context.NC() + context(k) + context.NC() + context.dontleavehmode() + context.char(v) + context.NC() + -- context.dontleavehmode() + -- commands.getprivatechar(k) + -- context.NC() + context(string.lower(tag)) + context.NC() + context.NR() + end + end + context.HL() + context.stoptabulate() + end +end + +function moduledata.fonts.missing.showlegend(specification) + specification = interfaces.checkedspecification(specification) + context.begingroup() + context.definedfont { "Mono*missing" } -- otherwise no privates added + context(function() legend(specification.id or font.current()) end) + context.endgroup() +end + +local function missings() + local collected = fonts.checkers.getmissing() + for filename, list in table.sortedhash(collected) do + if #list > 0 then + context.starttabulate { "|l|l|" } + context.NC() + context.bold("filename") + context.NC() + context(file.basename(filename)) + context.NC() + context.NR() + context.NC() + context.bold("missing") + context.NC() + context(#list) + context.NC() + context.NR() + context.stoptabulate() + context.starttabulate { "|l|c|l|" } + for i=1,#list do + local u = list[i] + context.NC() + context("%U",u) + context.NC() + context.char(u) + context.NC() + context(characters.data[u].description) + context.NC() + context.NR() + end + context.stoptabulate() + end + end +end + +function moduledata.fonts.missing.showcharacters(specification) + context.begingroup() + context.definedfont { "Mono*missing" } -- otherwise no privates added + context(function() missings() end) + context.endgroup() +end diff --git a/tex/context/base/s-fonts-shapes.lua b/tex/context/base/s-fonts-shapes.lua index b387c11dd..5898b5469 100644 --- a/tex/context/base/s-fonts-shapes.lua +++ b/tex/context/base/s-fonts-shapes.lua @@ -1,328 +1,328 @@ -if not modules then modules = { } end modules['s-fonts-shapes'] = { - version = 1.001, - comment = "companion to s-fonts-shapes.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -moduledata.fonts = moduledata.fonts or { } -moduledata.fonts.shapes = moduledata.fonts.shapes or { } - -local fontdata = fonts.hashes.identifiers - -local context = context -local NC, NR = context.NC, context.NR -local space, dontleavehmode, glyph, getvalue = context.space, context.dontleavehmode, context.glyph, context.getvalue -local formatters = string.formatters - -function moduledata.fonts.shapes.showlist(specification) -- todo: ranges - specification = interfaces.checkedspecification(specification) - local id, cs = fonts.definers.internal(specification,"") - local chrs = fontdata[id].characters - function char(k) - dontleavehmode() - glyph(id,k) - end - local function special(v) - local specials = v.specials - if specials and #specials > 1 then - context("%s:",specials[1]) - for i=2,#specials do - space() - char(specials[i]) - end - end - end - context.begingroup() - context.tt() - context.starttabulate { "|l|c|c|c|c|l|l|" } - context.FL() - NC() context.bold("unicode") - NC() context.bold("glyph") - NC() context.bold("shape") - NC() context.bold("lower") - NC() context.bold("upper") - NC() context.bold("specials") - NC() context.bold("description") - NC() NR() - context.TL() - for k, v in next, characters.data do - if chrs[k] then - NC() context("0x%05X",k) - NC() char(k) -- getvalue(cs) context.char(k) - NC() char(v.shcode) - NC() char(v.lccode or k) - NC() char(v.uccode or k) - NC() special(v) - NC() context.tx(v.description) - NC() NR() - end - end - context.stoptabulate() - context.endgroup() -end - -function moduledata.fonts.shapes.showlist(specification) -- todo: ranges - specification = interfaces.checkedspecification(specification) - local id, cs = fonts.definers.internal(specification,"") - local chrs = fontdata[id].characters - function char(k) - dontleavehmode() - glyph(id,k) - end - local function special(v) - local specials = v.specials - if specials and #specials > 1 then - context("%s:",specials[1]) - for i=2,#specials do - space() - char(specials[i]) - end - end - end - context.begingroup() - context.tt() - context.starttabulate { "|l|c|c|c|c|l|l|" } - context.FL() - NC() context.bold("unicode") - NC() context.bold("glyph") - NC() context.bold("shape") - NC() context.bold("lower") - NC() context.bold("upper") - NC() context.bold("specials") - NC() context.bold("description") - NC() NR() - context.TL() - for k, v in next, characters.data do - if chrs[k] then - NC() context("0x%05X",k) - NC() char(k) - NC() char(v.shcode) - NC() char(v.lccode or k) - NC() char(v.uccode or k) - NC() special(v) - NC() context.tx(v.description) - NC() NR() - end - end - context.stoptabulate() - context.endgroup() -end - -local descriptions = nil -local characters = nil - -local function showglyphshape(specification) - specification = interfaces.checkedspecification(specification) - local id, cs = fonts.definers.internal(specification,"") - local tfmdata = fontdata[id] - local charnum = tonumber(specification.character) - if not charnum then - charnum = fonts.helpers.nametoslot(n) - end - context.start() - context.dontleavehmode() - context.obeyMPboxdepth() - local characters = tfmdata.characters - local descriptions = tfmdata.descriptions - local parameters = tfmdata.parameters - local c = characters[charnum] - local d = descriptions[charnum] - if d then - local factor = (parameters.size/parameters.units)*((7200/7227)/65536) - local llx, lly, urx, ury = unpack(d.boundingbox) - llx, lly, urx, ury = llx*factor, lly*factor, urx*factor, ury*factor - local width, italic = (d.width or 0)*factor, (d.italic or 0)*factor - local top_accent, bot_accent = (d.top_accent or 0)*factor, (d.bot_accent or 0)*factor - local anchors, math = d.anchors, d.math - context.startMPcode() - context("pickup pencircle scaled .25bp ;") - context('picture p ; p := image(draw textext.drt("\\getuvalue{%s}\\gray\\char%s");); draw p ;',cs,charnum) - context('draw (%s,%s)--(%s,%s)--(%s,%s)--(%s,%s)--cycle withcolor green ;',llx,lly,urx,lly,urx,ury,llx,ury) - context('draw (%s,%s)--(%s,%s) withcolor green ;',llx,0,urx,0) - context('draw boundingbox p withcolor .2white withpen pencircle scaled .065bp ;') - context("defaultscale := 0.05 ; ") - -- inefficient but non critical - local function slant_1(v,dx,dy,txt,xsign,ysign,loc,labloc) - if #v > 0 then - local l = { } - for kk, vv in ipairs(v) do - local h, k = vv.height, vv.kern - if h and k then - l[#l+1] = formatters["((%s,%s) shifted (%s,%s))"](xsign*k*factor,ysign*h*factor,dx,dy) - end - end - context("draw ((%s,%s) shifted (%s,%s))--%s dashed (evenly scaled .25) withcolor .5white;", xsign*v[1].kern*factor,lly,dx,dy,l[1]) - context("draw laddered (%s) withcolor .5white ;",table.concat(l,"..")) - context("draw ((%s,%s) shifted (%s,%s))--%s dashed (evenly scaled .25) withcolor .5white;", xsign*v[#v].kern*factor,ury,dx,dy,l[#l]) - for k, v in ipairs(l) do - context("draw %s withcolor blue withpen pencircle scaled 1bp;",v) - end - end - end - local function slant_2(v,dx,dy,txt,xsign,ysign,loc,labloc) - if #v > 0 then - local l = { } - for kk, vv in ipairs(v) do - local h, k = vv.height, vv.kern - if h and k then - l[#l+1] = formatters["((%s,%s) shifted (%s,%s))"](xsign*k*factor,ysign*h*factor,dx,dy) - end - end - if loc == "top" then - context('label.%s("\\type{%s}",%s shifted (0,-1bp)) ;',loc,txt,l[#l]) - else - context('label.%s("\\type{%s}",%s shifted (0,2bp)) ;',loc,txt,l[1]) - end - for kk, vv in ipairs(v) do - local h, k = vv.height, vv.kern - if h and k then - context('label.top("(%s,%s)",%s shifted (0,-2bp));',k,h,l[kk]) - end - end - end - end - if math then - local kerns = math.kerns - if kerns then - for _, slant in ipairs { slant_1, slant_2 } do - for k,v in pairs(kerns) do - if k == "top_right" then - slant(v,width+italic,0,k,1,1,"top","ulft") - elseif k == "bottom_right" then - slant(v,width,0,k,1,1,"bot","lrt") - elseif k == "top_left" then - slant(v,0,0,k,-1,1,"top","ulft") - elseif k == "bottom_left" then - slant(v,0,0,k,-1,1,"bot","lrt") - end - end - end - end - end - local function show(x,y,txt) - local xx, yy = x*factor, y*factor - context("draw (%s,%s) withcolor blue withpen pencircle scaled 1bp;",xx,yy) - context('label.top("\\type{%s}",(%s,%s-2bp)) ;',txt,xx,yy) - context('label.bot("(%s,%s)",(%s,%s+2bp)) ;',x,y,xx,yy) - end - if anchors then - local a = anchors.baselig - if a then - for k, v in pairs(a) do - for kk, vv in ipairs(v) do - show(vv[1],vv[2],k .. ":" .. kk) - end - end - end - local a = anchors.mark - if a then - for k, v in pairs(a) do - show(v[1],v[2],k) - end - end - local a = anchors.basechar - if a then - for k, v in pairs(a) do - show(v[1],v[2],k) - end - end - local ba = anchors.centry - if a then - for k, v in pairs(a) do - show(v[1],v[2],k) - end - end - local a = anchors.cexit - if a then - for k, v in pairs(a) do - show(v[1],v[2],k) - end - end - end - if italic ~= 0 then - context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue;',width,ury,width,ury) - context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue;',width+italic,ury,width+italic,ury) - context('draw (%s,%s-1bp)--(%s,%s-1bp) withcolor blue;',width,ury,width+italic,ury) - context('label.lft("\\type{%s}",(%s+2bp,%s-1bp));',"italic",width,ury) - context('label.rt("%s",(%s-2bp,%s-1bp));',d.italic,width+italic,ury) - end - if top_accent ~= 0 then - context('draw (%s,%s+1bp)--(%s,%s-1bp) withcolor blue;',top_accent,ury,top_accent,ury) - context('label.bot("\\type{%s}",(%s,%s+1bp));',"top_accent",top_accent,ury) - context('label.top("%s",(%s,%s-1bp));',d.top_accent,top_accent,ury) - end - if bot_accent ~= 0 then - context('draw (%s,%s+1bp)--(%s,%s-1bp) withcolor blue;',bot_accent,lly,bot_accent,lly) - context('label.top("\\type{%s}",(%s,%s-1bp));',"bot_accent",top_accent,ury) - context('label.bot("%s",(%s,%s+1bp));',d.bot_accent,bot_accent,lly) - end - context('draw origin withcolor red withpen pencircle scaled 1bp;') - context("setbounds currentpicture to boundingbox currentpicture enlarged 1bp ;") - context("currentpicture := currentpicture scaled 8 ;") - context.stopMPcode() - -- elseif c then - -- lastdata, lastunicode = nil, nil - -- local factor = (7200/7227)/65536 - -- context.startMPcode() - -- context("pickup pencircle scaled .25bp ; ") - -- context('picture p ; p := image(draw textext.drt("\\gray\\char%s");); draw p ;',charnum) - -- context('draw boundingbox p withcolor .2white withpen pencircle scaled .065bp ;') - -- context("defaultscale := 0.05 ; ") - -- local italic, top_accent, bot_accent = (c.italic or 0)*factor, (c.top_accent or 0)*factor, (c.bot_accent or 0)*factor - -- local width, height, depth = (c.width or 0)*factor, (c.height or 0)*factor, (c.depth or 0)*factor - -- local ury = height - -- if italic ~= 0 then - -- context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue;',width,ury,width,ury) - -- context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue;',width+italic,ury,width+italic,ury) - -- context('draw (%s,%s-1bp)--(%s,%s-1bp) withcolor blue;',width,ury,width+italic,height) - -- context('label.lft("\\type{%s}",(%s+2bp,%s-1bp));',"italic",width,height) - -- context('label.rt("%6.3f bp",(%s-2bp,%s-1bp));',italic,width+italic,height) - -- end - -- if top_accent ~= 0 then - -- context('draw (%s,%s+1bp)--(%s,%s-1bp) withcolor blue;',top_accent,ury,top_accent,height) - -- context('label.bot("\\type{%s}",(%s,%s+1bp));',"top_accent",top_accent,height) - -- context('label.top("%6.3f bp",(%s,%s-1bp));',top_accent,top_accent,height) - -- end - -- if bot_accent ~= 0 then - -- context('draw (%s,%s+1bp)--(%s,%s-1bp) withcolor blue;',bot_accent,lly,bot_accent,height) - -- context('label.top("\\type{%s}",(%s,%s-1bp));',"bot_accent",top_accent,height) - -- context('label.bot("%6.3f bp",(%s,%s+1bp));',bot_accent,bot_accent,height) - -- end - -- context('draw origin withcolor red withpen pencircle scaled 1bp;') - -- context("setbounds currentpicture to boundingbox currentpicture enlarged 1bp ;") - -- context("currentpicture := currentpicture scaled 8 ;") - -- context.stopMPcode() - else - lastdata, lastunicode = nil, nil - context("no such shape: 0x%05X",charnum) - end - context.stop() -end - -moduledata.fonts.shapes.showglyphshape = showglyphshape - -function moduledata.fonts.shapes.showallglypshapes(specification) - specification = interfaces.checkedspecification(specification) - local id, cs = fonts.definers.internal(specification,"") - local descriptions = fontdata[id].descriptions - for unicode, description in fonts.iterators.descriptions(tfmdata) do - context.modulefontsstartshowglyphshape(unicode,description.name) - showglyphshape { number = id, character = unicode } - context.modulefontsstopshowglyphshape() - end -end - -function moduledata.fonts.shapes.showlastglyphshapefield(unicode,name) - if not descriptions then - -- bad news - elseif name == "unicode" then - context("U+%05X",descriptions.unicode) - else - local d = descriptions[name] - if d then - context(d) - end - end -end +if not modules then modules = { } end modules['s-fonts-shapes'] = { + version = 1.001, + comment = "companion to s-fonts-shapes.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +moduledata.fonts = moduledata.fonts or { } +moduledata.fonts.shapes = moduledata.fonts.shapes or { } + +local fontdata = fonts.hashes.identifiers + +local context = context +local NC, NR = context.NC, context.NR +local space, dontleavehmode, glyph, getvalue = context.space, context.dontleavehmode, context.glyph, context.getvalue +local formatters = string.formatters + +function moduledata.fonts.shapes.showlist(specification) -- todo: ranges + specification = interfaces.checkedspecification(specification) + local id, cs = fonts.definers.internal(specification,"") + local chrs = fontdata[id].characters + function char(k) + dontleavehmode() + glyph(id,k) + end + local function special(v) + local specials = v.specials + if specials and #specials > 1 then + context("%s:",specials[1]) + for i=2,#specials do + space() + char(specials[i]) + end + end + end + context.begingroup() + context.tt() + context.starttabulate { "|l|c|c|c|c|l|l|" } + context.FL() + NC() context.bold("unicode") + NC() context.bold("glyph") + NC() context.bold("shape") + NC() context.bold("lower") + NC() context.bold("upper") + NC() context.bold("specials") + NC() context.bold("description") + NC() NR() + context.TL() + for k, v in next, characters.data do + if chrs[k] then + NC() context("0x%05X",k) + NC() char(k) -- getvalue(cs) context.char(k) + NC() char(v.shcode) + NC() char(v.lccode or k) + NC() char(v.uccode or k) + NC() special(v) + NC() context.tx(v.description) + NC() NR() + end + end + context.stoptabulate() + context.endgroup() +end + +function moduledata.fonts.shapes.showlist(specification) -- todo: ranges + specification = interfaces.checkedspecification(specification) + local id, cs = fonts.definers.internal(specification,"") + local chrs = fontdata[id].characters + function char(k) + dontleavehmode() + glyph(id,k) + end + local function special(v) + local specials = v.specials + if specials and #specials > 1 then + context("%s:",specials[1]) + for i=2,#specials do + space() + char(specials[i]) + end + end + end + context.begingroup() + context.tt() + context.starttabulate { "|l|c|c|c|c|l|l|" } + context.FL() + NC() context.bold("unicode") + NC() context.bold("glyph") + NC() context.bold("shape") + NC() context.bold("lower") + NC() context.bold("upper") + NC() context.bold("specials") + NC() context.bold("description") + NC() NR() + context.TL() + for k, v in next, characters.data do + if chrs[k] then + NC() context("0x%05X",k) + NC() char(k) + NC() char(v.shcode) + NC() char(v.lccode or k) + NC() char(v.uccode or k) + NC() special(v) + NC() context.tx(v.description) + NC() NR() + end + end + context.stoptabulate() + context.endgroup() +end + +local descriptions = nil +local characters = nil + +local function showglyphshape(specification) + specification = interfaces.checkedspecification(specification) + local id, cs = fonts.definers.internal(specification,"") + local tfmdata = fontdata[id] + local charnum = tonumber(specification.character) + if not charnum then + charnum = fonts.helpers.nametoslot(n) + end + context.start() + context.dontleavehmode() + context.obeyMPboxdepth() + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local parameters = tfmdata.parameters + local c = characters[charnum] + local d = descriptions[charnum] + if d then + local factor = (parameters.size/parameters.units)*((7200/7227)/65536) + local llx, lly, urx, ury = unpack(d.boundingbox) + llx, lly, urx, ury = llx*factor, lly*factor, urx*factor, ury*factor + local width, italic = (d.width or 0)*factor, (d.italic or 0)*factor + local top_accent, bot_accent = (d.top_accent or 0)*factor, (d.bot_accent or 0)*factor + local anchors, math = d.anchors, d.math + context.startMPcode() + context("pickup pencircle scaled .25bp ;") + context('picture p ; p := image(draw textext.drt("\\getuvalue{%s}\\gray\\char%s");); draw p ;',cs,charnum) + context('draw (%s,%s)--(%s,%s)--(%s,%s)--(%s,%s)--cycle withcolor green ;',llx,lly,urx,lly,urx,ury,llx,ury) + context('draw (%s,%s)--(%s,%s) withcolor green ;',llx,0,urx,0) + context('draw boundingbox p withcolor .2white withpen pencircle scaled .065bp ;') + context("defaultscale := 0.05 ; ") + -- inefficient but non critical + local function slant_1(v,dx,dy,txt,xsign,ysign,loc,labloc) + if #v > 0 then + local l = { } + for kk, vv in ipairs(v) do + local h, k = vv.height, vv.kern + if h and k then + l[#l+1] = formatters["((%s,%s) shifted (%s,%s))"](xsign*k*factor,ysign*h*factor,dx,dy) + end + end + context("draw ((%s,%s) shifted (%s,%s))--%s dashed (evenly scaled .25) withcolor .5white;", xsign*v[1].kern*factor,lly,dx,dy,l[1]) + context("draw laddered (%s) withcolor .5white ;",table.concat(l,"..")) + context("draw ((%s,%s) shifted (%s,%s))--%s dashed (evenly scaled .25) withcolor .5white;", xsign*v[#v].kern*factor,ury,dx,dy,l[#l]) + for k, v in ipairs(l) do + context("draw %s withcolor blue withpen pencircle scaled 1bp;",v) + end + end + end + local function slant_2(v,dx,dy,txt,xsign,ysign,loc,labloc) + if #v > 0 then + local l = { } + for kk, vv in ipairs(v) do + local h, k = vv.height, vv.kern + if h and k then + l[#l+1] = formatters["((%s,%s) shifted (%s,%s))"](xsign*k*factor,ysign*h*factor,dx,dy) + end + end + if loc == "top" then + context('label.%s("\\type{%s}",%s shifted (0,-1bp)) ;',loc,txt,l[#l]) + else + context('label.%s("\\type{%s}",%s shifted (0,2bp)) ;',loc,txt,l[1]) + end + for kk, vv in ipairs(v) do + local h, k = vv.height, vv.kern + if h and k then + context('label.top("(%s,%s)",%s shifted (0,-2bp));',k,h,l[kk]) + end + end + end + end + if math then + local kerns = math.kerns + if kerns then + for _, slant in ipairs { slant_1, slant_2 } do + for k,v in pairs(kerns) do + if k == "top_right" then + slant(v,width+italic,0,k,1,1,"top","ulft") + elseif k == "bottom_right" then + slant(v,width,0,k,1,1,"bot","lrt") + elseif k == "top_left" then + slant(v,0,0,k,-1,1,"top","ulft") + elseif k == "bottom_left" then + slant(v,0,0,k,-1,1,"bot","lrt") + end + end + end + end + end + local function show(x,y,txt) + local xx, yy = x*factor, y*factor + context("draw (%s,%s) withcolor blue withpen pencircle scaled 1bp;",xx,yy) + context('label.top("\\type{%s}",(%s,%s-2bp)) ;',txt,xx,yy) + context('label.bot("(%s,%s)",(%s,%s+2bp)) ;',x,y,xx,yy) + end + if anchors then + local a = anchors.baselig + if a then + for k, v in pairs(a) do + for kk, vv in ipairs(v) do + show(vv[1],vv[2],k .. ":" .. kk) + end + end + end + local a = anchors.mark + if a then + for k, v in pairs(a) do + show(v[1],v[2],k) + end + end + local a = anchors.basechar + if a then + for k, v in pairs(a) do + show(v[1],v[2],k) + end + end + local ba = anchors.centry + if a then + for k, v in pairs(a) do + show(v[1],v[2],k) + end + end + local a = anchors.cexit + if a then + for k, v in pairs(a) do + show(v[1],v[2],k) + end + end + end + if italic ~= 0 then + context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue;',width,ury,width,ury) + context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue;',width+italic,ury,width+italic,ury) + context('draw (%s,%s-1bp)--(%s,%s-1bp) withcolor blue;',width,ury,width+italic,ury) + context('label.lft("\\type{%s}",(%s+2bp,%s-1bp));',"italic",width,ury) + context('label.rt("%s",(%s-2bp,%s-1bp));',d.italic,width+italic,ury) + end + if top_accent ~= 0 then + context('draw (%s,%s+1bp)--(%s,%s-1bp) withcolor blue;',top_accent,ury,top_accent,ury) + context('label.bot("\\type{%s}",(%s,%s+1bp));',"top_accent",top_accent,ury) + context('label.top("%s",(%s,%s-1bp));',d.top_accent,top_accent,ury) + end + if bot_accent ~= 0 then + context('draw (%s,%s+1bp)--(%s,%s-1bp) withcolor blue;',bot_accent,lly,bot_accent,lly) + context('label.top("\\type{%s}",(%s,%s-1bp));',"bot_accent",top_accent,ury) + context('label.bot("%s",(%s,%s+1bp));',d.bot_accent,bot_accent,lly) + end + context('draw origin withcolor red withpen pencircle scaled 1bp;') + context("setbounds currentpicture to boundingbox currentpicture enlarged 1bp ;") + context("currentpicture := currentpicture scaled 8 ;") + context.stopMPcode() + -- elseif c then + -- lastdata, lastunicode = nil, nil + -- local factor = (7200/7227)/65536 + -- context.startMPcode() + -- context("pickup pencircle scaled .25bp ; ") + -- context('picture p ; p := image(draw textext.drt("\\gray\\char%s");); draw p ;',charnum) + -- context('draw boundingbox p withcolor .2white withpen pencircle scaled .065bp ;') + -- context("defaultscale := 0.05 ; ") + -- local italic, top_accent, bot_accent = (c.italic or 0)*factor, (c.top_accent or 0)*factor, (c.bot_accent or 0)*factor + -- local width, height, depth = (c.width or 0)*factor, (c.height or 0)*factor, (c.depth or 0)*factor + -- local ury = height + -- if italic ~= 0 then + -- context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue;',width,ury,width,ury) + -- context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue;',width+italic,ury,width+italic,ury) + -- context('draw (%s,%s-1bp)--(%s,%s-1bp) withcolor blue;',width,ury,width+italic,height) + -- context('label.lft("\\type{%s}",(%s+2bp,%s-1bp));',"italic",width,height) + -- context('label.rt("%6.3f bp",(%s-2bp,%s-1bp));',italic,width+italic,height) + -- end + -- if top_accent ~= 0 then + -- context('draw (%s,%s+1bp)--(%s,%s-1bp) withcolor blue;',top_accent,ury,top_accent,height) + -- context('label.bot("\\type{%s}",(%s,%s+1bp));',"top_accent",top_accent,height) + -- context('label.top("%6.3f bp",(%s,%s-1bp));',top_accent,top_accent,height) + -- end + -- if bot_accent ~= 0 then + -- context('draw (%s,%s+1bp)--(%s,%s-1bp) withcolor blue;',bot_accent,lly,bot_accent,height) + -- context('label.top("\\type{%s}",(%s,%s-1bp));',"bot_accent",top_accent,height) + -- context('label.bot("%6.3f bp",(%s,%s+1bp));',bot_accent,bot_accent,height) + -- end + -- context('draw origin withcolor red withpen pencircle scaled 1bp;') + -- context("setbounds currentpicture to boundingbox currentpicture enlarged 1bp ;") + -- context("currentpicture := currentpicture scaled 8 ;") + -- context.stopMPcode() + else + lastdata, lastunicode = nil, nil + context("no such shape: 0x%05X",charnum) + end + context.stop() +end + +moduledata.fonts.shapes.showglyphshape = showglyphshape + +function moduledata.fonts.shapes.showallglypshapes(specification) + specification = interfaces.checkedspecification(specification) + local id, cs = fonts.definers.internal(specification,"") + local descriptions = fontdata[id].descriptions + for unicode, description in fonts.iterators.descriptions(tfmdata) do + context.modulefontsstartshowglyphshape(unicode,description.name) + showglyphshape { number = id, character = unicode } + context.modulefontsstopshowglyphshape() + end +end + +function moduledata.fonts.shapes.showlastglyphshapefield(unicode,name) + if not descriptions then + -- bad news + elseif name == "unicode" then + context("U+%05X",descriptions.unicode) + else + local d = descriptions[name] + if d then + context(d) + end + end +end diff --git a/tex/context/base/s-fonts-system.lua b/tex/context/base/s-fonts-system.lua index 0c0ad4d86..a8b6ddaa9 100644 --- a/tex/context/base/s-fonts-system.lua +++ b/tex/context/base/s-fonts-system.lua @@ -1,68 +1,68 @@ -if not modules then modules = { } end modules ['s-fonts-system'] = { - version = 1.001, - comment = "companion to s-fonts-system.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- ["zapfinoforteltpro"]={ --- ["designsize"]=0, --- ["familyname"]="zapfinoforteltpro", --- ["filename"]="zapfinoforteltpro.otf", --- ["fontname"]="zapfinoforteltpro", --- ["fontweight"]="regular", --- ["format"]="otf", --- ["fullname"]="zapfinoforteltpro", --- ["maxsize"]=0, --- ["minsize"]=0, --- ["modification"]=1105543074, --- ["modifiers"]="regular", --- ["rawname"]="ZapfinoForteLTPro", --- ["style"]="normal", --- ["subfamily"]="regular", --- ["variant"]="normal", --- ["weight"]="normal", --- ["width"]="normal", --- } - -moduledata.fonts = moduledata.fonts or { } -moduledata.fonts.system = moduledata.fonts.system or { } - -local lower = string.lower - -local context = context -local NC, NR, HL = context.NC, context.NR, context.HL -local bold = context.bold - -function moduledata.fonts.system.showinstalled(specification) - specification = interfaces.checkedspecification(specification) - local pattern = lower(specification.pattern or "") - local list = fonts.names.list(pattern,false,true) - if list then - local files = { } - for k, v in next, list do - files[file.basename(string.lower(v.filename))] = v - end - context.starttabulate { "|Tl|Tl|Tl|Tl|Tl|Tl|" } - HL() - NC() bold("filename") - NC() bold("fontname") - NC() bold("subfamily") - NC() bold("variant") - NC() bold("weight") - NC() bold("width") - NC() NR() - HL() - for filename, data in table.sortedpairs(files) do - NC() context(filename) - NC() context(data.fontname) - NC() context(data.subfamily) - NC() context(data.variant) - NC() context(data.weight) - NC() context(data.width) - NC() NR() - end - context.stoptabulate() - end -end +if not modules then modules = { } end modules ['s-fonts-system'] = { + version = 1.001, + comment = "companion to s-fonts-system.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- ["zapfinoforteltpro"]={ +-- ["designsize"]=0, +-- ["familyname"]="zapfinoforteltpro", +-- ["filename"]="zapfinoforteltpro.otf", +-- ["fontname"]="zapfinoforteltpro", +-- ["fontweight"]="regular", +-- ["format"]="otf", +-- ["fullname"]="zapfinoforteltpro", +-- ["maxsize"]=0, +-- ["minsize"]=0, +-- ["modification"]=1105543074, +-- ["modifiers"]="regular", +-- ["rawname"]="ZapfinoForteLTPro", +-- ["style"]="normal", +-- ["subfamily"]="regular", +-- ["variant"]="normal", +-- ["weight"]="normal", +-- ["width"]="normal", +-- } + +moduledata.fonts = moduledata.fonts or { } +moduledata.fonts.system = moduledata.fonts.system or { } + +local lower = string.lower + +local context = context +local NC, NR, HL = context.NC, context.NR, context.HL +local bold = context.bold + +function moduledata.fonts.system.showinstalled(specification) + specification = interfaces.checkedspecification(specification) + local pattern = lower(specification.pattern or "") + local list = fonts.names.list(pattern,false,true) + if list then + local files = { } + for k, v in next, list do + files[file.basename(string.lower(v.filename))] = v + end + context.starttabulate { "|Tl|Tl|Tl|Tl|Tl|Tl|" } + HL() + NC() bold("filename") + NC() bold("fontname") + NC() bold("subfamily") + NC() bold("variant") + NC() bold("weight") + NC() bold("width") + NC() NR() + HL() + for filename, data in table.sortedpairs(files) do + NC() context(filename) + NC() context(data.fontname) + NC() context(data.subfamily) + NC() context(data.variant) + NC() context(data.weight) + NC() context(data.width) + NC() NR() + end + context.stoptabulate() + end +end diff --git a/tex/context/base/s-fonts-tables.lua b/tex/context/base/s-fonts-tables.lua index 5c91d5ee7..4f147f3e0 100644 --- a/tex/context/base/s-fonts-tables.lua +++ b/tex/context/base/s-fonts-tables.lua @@ -1,312 +1,312 @@ -if not modules then modules = { } end modules ['s-fonts-tables'] = { - version = 1.001, - comment = "companion to s-fonts-tables.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -moduledata.fonts = moduledata.fonts or { } -moduledata.fonts.tables = moduledata.fonts.tables or { } - -local setmetatableindex = table.setmetatableindex -local sortedhash = table.sortedhash -local sortedkeys = table.sortedkeys -local format = string.format -local concat = table.concat - -local tabletracers = moduledata.fonts.tables - -local digits = { - dflt = { - dflt = "1234567890 1/2", - }, -} - -local punctuation = { - dflt = { - dflt = ". , : ; ? ! ‹ › « »", - }, -} - -local symbols = { - dflt = { - dflt = "@ # $ % & * () [] {} <> + - = / |", - }, -} - -local LATN = "abcdefghijklmnopqrstuvwxyz" - -local uppercase = { - latn = { - dflt = LATN, - fra = LATN .. " ÀÃÂÈÉÊÒÓÔÙÚÛÆÇ", - }, - grek = { - dftl = "ΑΒΓΔΕΗΘΙΚΛΜÎΞΟΠΡΣΤΥΦΧΨΩ", - }, - cyrl= { - dflt = "ÐБВГДЕЖЗИІЙКЛМÐОПРСТУФХЦЧШЩЪЫЬѢЭЮЯѲ" - }, -} - -local latn = "abcdefghijklmnopqrstuvwxyz" - -local lowercase = { - latn = { - dftl = latn, - nld = latn .. " ïèéë", - deu = latn .. " äöüß", - fra = latn .. " àáâèéêòóôùúûæç", - }, - grek = { - dftl = "αβγδεηθικλμνξοπÏστυφχψω", - }, - cyrl= { - dflt = "абвгдежзиійклмнопрÑтуфхцчшщъыьѣÑÑŽÑѳ" - }, -} - -local samples = { - digits = digits, - punctuation = punctuation, - symbols = symbols, - uppercase = uppercase, - lowercase = lowercase, -} - -tabletracers.samples = samples - -setmetatableindex(uppercase, function(t,k) return rawget(t,"latn") end) -setmetatableindex(lowercase, function(t,k) return rawget(t,"latn") end) -setmetatableindex(digits, function(t,k) return rawget(t,"dflt") end) -setmetatableindex(symbols, function(t,k) return rawget(t,"dflt") end) -setmetatableindex(punctuation, function(t,k) return rawget(t,"dflt") end) - -setmetatableindex(uppercase.latn, function(t,k) return rawget(t,"dflt") end) -setmetatableindex(uppercase.grek, function(t,k) return rawget(t,"dflt") end) -setmetatableindex(uppercase.cyrl, function(t,k) return rawget(t,"dflt") end) - -setmetatableindex(lowercase.latn, function(t,k) return rawget(t,"dflt") end) -setmetatableindex(lowercase.grek, function(t,k) return rawget(t,"dflt") end) -setmetatableindex(lowercase.cyrl, function(t,k) return rawget(t,"dflt") end) - -setmetatableindex(digits.dflt, function(t,k) return rawget(t,"dflt") end) -setmetatableindex(symbols.dflt, function(t,k) return rawget(t,"dflt") end) -setmetatableindex(punctuation.dflt, function(t,k) return rawget(t,"dflt") end) - -local function typesettable(t,keys,synonyms,nesting,prefix) - if t then - if not prefix then - context.starttabulate { "|Tl|Tl|Tl|" } - end - for k, v in sortedhash(keys) do - if k == "synonyms" then - elseif type(v) ~= "table" then - context.NC() - if prefix then - context("%s.%s",prefix,k) - else - context(k) - end - context.NC() - local tk = t[k] - if v == "boolean" then - context(tostring(tk or false)) - elseif not tk then - context("") - elseif v == "filename" then - context(file.basename(tk)) - elseif v == "basepoints" then - context("%sbp",tk) - elseif v == "scaledpoints" then - context("%p",tk) - elseif v == "table" then - context("") - else -- if v == "integerscale" then - context(tostring(tk)) - end - context.NC() - local synonym = (not prefix and synonyms[k]) or (prefix and synonyms[format("%s.%s",prefix,k)]) - if synonym then - context(format("(%s)",concat(synonym," "))) - end - context.NC() - context.NR() - elseif nesting == false then - context("
    ") - else -- true or nil - typesettable(t[k],v,synonyms,nesting,k) - end - end - if not prefix then - context.stoptabulate() - end - end -end - -local function typeset(t,keys,nesting,prefix) - local synonyms = keys.synonyms or { } - local collected = { } - for k, v in next, synonyms do - local c = collected[v] - if not c then - c = { } - collected[v] = c - end - c[#c+1] = k - end - for k, v in next, collected do - table.sort(v) - end - typesettable(t,keys,collected,nesting,prefix) -end - -tabletracers.typeset = typeset - -function tabletracers.showproperties(nesting) - local tfmdata = fonts.hashes.identifiers[font.current()] - typeset(tfmdata.properties,fonts.constructors.keys.properties,nesting) -end - -function tabletracers.showparameters(nesting) - local tfmdata = fonts.hashes.identifiers[font.current()] - typeset(tfmdata.parameters,fonts.constructors.keys.parameters,nesting) -end - -function tabletracers.showpositionings() - local tfmdata = fonts.hashes.identifiers[font.current()] - local resources = tfmdata.resources - if resources then - local features = resources.features - if features then - local gpos = features.gpos - if gpos and next(gpos) then - context.starttabulate { "|Tl|Tl|Tlp|" } - for feature, scripts in sortedhash(gpos) do - for script, languages in sortedhash(scripts) do - context.NC() - context(feature) - context.NC() - context(script) - context.NC() - context(concat(sortedkeys(languages)," ")) - context.NC() - context.NR() - end - end - context.stoptabulate() - else - context("no entries") - context.par() - end - end - end -end - -local dynamics = true - -function tabletracers.showsubstitutions() - local tfmdata = fonts.hashes.identifiers[font.current()] - local resources = tfmdata.resources - if resources then - local features = resources.features - if features then - local gsub = features.gsub - if gsub then - local makes_sense = { } - for feature, scripts in sortedhash(gsub) do - for script, languages in sortedhash(scripts) do - for language in sortedhash(languages) do - local tag = format("dummy-%s-%s-%s",feature,script,language) - local fnt = format("file:%s*%s",file.basename(tfmdata.properties.filename),tag) - context.definefontfeature ( - { tag }, - { - mode = "node", - script = script, - language = language, - [feature] = "yes" - } - ) - if not dynamics then - context.definefont( { fnt }, { fnt } ) - end - makes_sense[#makes_sense+1] = { - feature = feature, - tag = tag, - script = script, - language = language, - fontname = fnt, - } - end - end - end - if #makes_sense > 0 then - context.starttabulate { "|Tl|Tl|Tl|p|" } - for i=1,#makes_sense do - local data = makes_sense[i] - local script = data.script - local language = data.language - context.NC() - context(data.feature) - context.NC() - context(script) - context.NC() - context(language) - context.NC() - if not dynamics then - context.startfont { data.fontname } - else - context.addff(data.tag) - end - context.verbatim(samples.lowercase [script][language]) context.par() - context.verbatim(samples.uppercase [script][language]) context.par() - context.verbatim(samples.digits [script][language]) context.par() - context.verbatim(samples.punctuation[script][language]) context.quad() - context.verbatim(samples.symbols [script][language]) - if not dynamics then - context.stopfont() - end - context.NC() - context.NR() - end - context.stoptabulate() - else - context("no entries") - context.par() - end - end - end - end -end - -function tabletracers.showall(specification) -- not interfaced - - specification = interfaces.checkedspecification(specification) - - if specification.title then - context.starttitle { title = specification.title } - end - - context.startsubject { title = "Properties" } - tabletracers.showproperties() - context.stopsubject() - - context.startsubject { title = "Parameters" } - tabletracers.showparameters() - context.stopsubject() - - context.startsubject { title = "Positioning features" } - tabletracers.showpositionings() - context.stopsubject() - - context.startsubject { title = "Substitution features" } - tabletracers.showsubstitutions() - context.stopsubject() - - if title then - context.stoptitle() - end - -end +if not modules then modules = { } end modules ['s-fonts-tables'] = { + version = 1.001, + comment = "companion to s-fonts-tables.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +moduledata.fonts = moduledata.fonts or { } +moduledata.fonts.tables = moduledata.fonts.tables or { } + +local setmetatableindex = table.setmetatableindex +local sortedhash = table.sortedhash +local sortedkeys = table.sortedkeys +local format = string.format +local concat = table.concat + +local tabletracers = moduledata.fonts.tables + +local digits = { + dflt = { + dflt = "1234567890 1/2", + }, +} + +local punctuation = { + dflt = { + dflt = ". , : ; ? ! ‹ › « »", + }, +} + +local symbols = { + dflt = { + dflt = "@ # $ % & * () [] {} <> + - = / |", + }, +} + +local LATN = "abcdefghijklmnopqrstuvwxyz" + +local uppercase = { + latn = { + dflt = LATN, + fra = LATN .. " ÀÃÂÈÉÊÒÓÔÙÚÛÆÇ", + }, + grek = { + dftl = "ΑΒΓΔΕΗΘΙΚΛΜÎΞΟΠΡΣΤΥΦΧΨΩ", + }, + cyrl= { + dflt = "ÐБВГДЕЖЗИІЙКЛМÐОПРСТУФХЦЧШЩЪЫЬѢЭЮЯѲ" + }, +} + +local latn = "abcdefghijklmnopqrstuvwxyz" + +local lowercase = { + latn = { + dftl = latn, + nld = latn .. " ïèéë", + deu = latn .. " äöüß", + fra = latn .. " àáâèéêòóôùúûæç", + }, + grek = { + dftl = "αβγδεηθικλμνξοπÏστυφχψω", + }, + cyrl= { + dflt = "абвгдежзиійклмнопрÑтуфхцчшщъыьѣÑÑŽÑѳ" + }, +} + +local samples = { + digits = digits, + punctuation = punctuation, + symbols = symbols, + uppercase = uppercase, + lowercase = lowercase, +} + +tabletracers.samples = samples + +setmetatableindex(uppercase, function(t,k) return rawget(t,"latn") end) +setmetatableindex(lowercase, function(t,k) return rawget(t,"latn") end) +setmetatableindex(digits, function(t,k) return rawget(t,"dflt") end) +setmetatableindex(symbols, function(t,k) return rawget(t,"dflt") end) +setmetatableindex(punctuation, function(t,k) return rawget(t,"dflt") end) + +setmetatableindex(uppercase.latn, function(t,k) return rawget(t,"dflt") end) +setmetatableindex(uppercase.grek, function(t,k) return rawget(t,"dflt") end) +setmetatableindex(uppercase.cyrl, function(t,k) return rawget(t,"dflt") end) + +setmetatableindex(lowercase.latn, function(t,k) return rawget(t,"dflt") end) +setmetatableindex(lowercase.grek, function(t,k) return rawget(t,"dflt") end) +setmetatableindex(lowercase.cyrl, function(t,k) return rawget(t,"dflt") end) + +setmetatableindex(digits.dflt, function(t,k) return rawget(t,"dflt") end) +setmetatableindex(symbols.dflt, function(t,k) return rawget(t,"dflt") end) +setmetatableindex(punctuation.dflt, function(t,k) return rawget(t,"dflt") end) + +local function typesettable(t,keys,synonyms,nesting,prefix) + if t then + if not prefix then + context.starttabulate { "|Tl|Tl|Tl|" } + end + for k, v in sortedhash(keys) do + if k == "synonyms" then + elseif type(v) ~= "table" then + context.NC() + if prefix then + context("%s.%s",prefix,k) + else + context(k) + end + context.NC() + local tk = t[k] + if v == "boolean" then + context(tostring(tk or false)) + elseif not tk then + context("") + elseif v == "filename" then + context(file.basename(tk)) + elseif v == "basepoints" then + context("%sbp",tk) + elseif v == "scaledpoints" then + context("%p",tk) + elseif v == "table" then + context("
    ") + else -- if v == "integerscale" then + context(tostring(tk)) + end + context.NC() + local synonym = (not prefix and synonyms[k]) or (prefix and synonyms[format("%s.%s",prefix,k)]) + if synonym then + context(format("(%s)",concat(synonym," "))) + end + context.NC() + context.NR() + elseif nesting == false then + context("
    ") + else -- true or nil + typesettable(t[k],v,synonyms,nesting,k) + end + end + if not prefix then + context.stoptabulate() + end + end +end + +local function typeset(t,keys,nesting,prefix) + local synonyms = keys.synonyms or { } + local collected = { } + for k, v in next, synonyms do + local c = collected[v] + if not c then + c = { } + collected[v] = c + end + c[#c+1] = k + end + for k, v in next, collected do + table.sort(v) + end + typesettable(t,keys,collected,nesting,prefix) +end + +tabletracers.typeset = typeset + +function tabletracers.showproperties(nesting) + local tfmdata = fonts.hashes.identifiers[font.current()] + typeset(tfmdata.properties,fonts.constructors.keys.properties,nesting) +end + +function tabletracers.showparameters(nesting) + local tfmdata = fonts.hashes.identifiers[font.current()] + typeset(tfmdata.parameters,fonts.constructors.keys.parameters,nesting) +end + +function tabletracers.showpositionings() + local tfmdata = fonts.hashes.identifiers[font.current()] + local resources = tfmdata.resources + if resources then + local features = resources.features + if features then + local gpos = features.gpos + if gpos and next(gpos) then + context.starttabulate { "|Tl|Tl|Tlp|" } + for feature, scripts in sortedhash(gpos) do + for script, languages in sortedhash(scripts) do + context.NC() + context(feature) + context.NC() + context(script) + context.NC() + context(concat(sortedkeys(languages)," ")) + context.NC() + context.NR() + end + end + context.stoptabulate() + else + context("no entries") + context.par() + end + end + end +end + +local dynamics = true + +function tabletracers.showsubstitutions() + local tfmdata = fonts.hashes.identifiers[font.current()] + local resources = tfmdata.resources + if resources then + local features = resources.features + if features then + local gsub = features.gsub + if gsub then + local makes_sense = { } + for feature, scripts in sortedhash(gsub) do + for script, languages in sortedhash(scripts) do + for language in sortedhash(languages) do + local tag = format("dummy-%s-%s-%s",feature,script,language) + local fnt = format("file:%s*%s",file.basename(tfmdata.properties.filename),tag) + context.definefontfeature ( + { tag }, + { + mode = "node", + script = script, + language = language, + [feature] = "yes" + } + ) + if not dynamics then + context.definefont( { fnt }, { fnt } ) + end + makes_sense[#makes_sense+1] = { + feature = feature, + tag = tag, + script = script, + language = language, + fontname = fnt, + } + end + end + end + if #makes_sense > 0 then + context.starttabulate { "|Tl|Tl|Tl|p|" } + for i=1,#makes_sense do + local data = makes_sense[i] + local script = data.script + local language = data.language + context.NC() + context(data.feature) + context.NC() + context(script) + context.NC() + context(language) + context.NC() + if not dynamics then + context.startfont { data.fontname } + else + context.addff(data.tag) + end + context.verbatim(samples.lowercase [script][language]) context.par() + context.verbatim(samples.uppercase [script][language]) context.par() + context.verbatim(samples.digits [script][language]) context.par() + context.verbatim(samples.punctuation[script][language]) context.quad() + context.verbatim(samples.symbols [script][language]) + if not dynamics then + context.stopfont() + end + context.NC() + context.NR() + end + context.stoptabulate() + else + context("no entries") + context.par() + end + end + end + end +end + +function tabletracers.showall(specification) -- not interfaced + + specification = interfaces.checkedspecification(specification) + + if specification.title then + context.starttitle { title = specification.title } + end + + context.startsubject { title = "Properties" } + tabletracers.showproperties() + context.stopsubject() + + context.startsubject { title = "Parameters" } + tabletracers.showparameters() + context.stopsubject() + + context.startsubject { title = "Positioning features" } + tabletracers.showpositionings() + context.stopsubject() + + context.startsubject { title = "Substitution features" } + tabletracers.showsubstitutions() + context.stopsubject() + + if title then + context.stoptitle() + end + +end diff --git a/tex/context/base/s-fonts-vectors.lua b/tex/context/base/s-fonts-vectors.lua index 1bac0ae8b..436f3e63d 100644 --- a/tex/context/base/s-fonts-vectors.lua +++ b/tex/context/base/s-fonts-vectors.lua @@ -1,104 +1,104 @@ -if not modules then modules = { } end modules ['s-fonts-vectors'] = { - version = 1.001, - comment = "companion to s-fonts-vectors.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -moduledata.fonts = moduledata.fonts or { } -moduledata.fonts.protrusions = moduledata.fonts.protrusions or { } -moduledata.fonts.expansions = moduledata.fonts.expansions or { } - -local NC, NR = context.NC, context.NR - -local classes = fonts.protrusions.classes -local vectors = fonts.protrusions.vectors - -function moduledata.fonts.protrusions.showvector(specification) - specification = interfaces.checkedspecification(specification) - local vector = vectors[specification.name or "?"] - if vector then - context.blank() - context.startcolumns { n = specification.columns or 3 } - context.starttabulate { "|T||cw(.5em)||" } - for unicode, values in table.sortedhash(vector) do - NC() context("%U",unicode) - NC() context("%.02f",values[1]) - NC() context("%c",unicode) - NC() context("%.02f",values[2]) - NC() NR() - end - context.stoptabulate() - context.stopcolumns() - context.blank() - end -end - -function moduledata.fonts.protrusions.showclass(specification) - specification = interfaces.checkedspecification(specification) - local class = specification.name and classes[specification.name] - local classes = class and { class} or classes - context.starttabulate { "|l|l|r|r|r|" } - NC() context.bold("name") - NC() context.bold("vector") - NC() context.bold("factor") - NC() context.bold("left") - NC() context.bold("right") - NC() NR() - for name, class in table.sortedhash(classes) do - NC() context(name) - NC() context(class.vector) - NC() context("%.02f",class.factor) - NC() context("%.02f",class.left) - NC() context("%.02f",class.right) - NC() NR() - end - context.stoptabulate() -end - -local classes = fonts.expansions.classes -local vectors = fonts.expansions.vectors - -function moduledata.fonts.expansions.showvector(specification) - specification = interfaces.checkedspecification(specification) - local vector = vectors[specification.name or "?"] - if vector then - context.blank() - context.startcolumns { n = specification.columns or 3 } - context.starttabulate { "|T|cw(.5em)||" } - for unicode, value in table.sortedhash(vector) do - NC() context("%U",unicode) - NC() context("%c",unicode) - NC() context("%.02f",value) - NC() NR() - end - context.stoptabulate() - context.stopcolumns() - context.blank() - end -end - -function moduledata.fonts.expansions.showclass(specification) - specification = interfaces.checkedspecification(specification) - local class = specification.name and classes[specification.name] - local classes = class and { class} or classes - context.starttabulate { "|l|l|r|r|r|" } - NC() context.bold("name") - NC() context.bold("vector") - NC() context.bold("step") - NC() context.bold("factor") - NC() context.bold("stretch") - NC() context.bold("shrink") - NC() NR() - for name, class in table.sortedhash(classes) do - NC() context(name) - NC() context(class.vector) - NC() context("%.02f",class.step) - NC() context("%.02f",class.factor) - NC() context("% 2i",class.stretch) - NC() context("% 2i",class.shrink) - NC() NR() - end - context.stoptabulate() -end +if not modules then modules = { } end modules ['s-fonts-vectors'] = { + version = 1.001, + comment = "companion to s-fonts-vectors.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +moduledata.fonts = moduledata.fonts or { } +moduledata.fonts.protrusions = moduledata.fonts.protrusions or { } +moduledata.fonts.expansions = moduledata.fonts.expansions or { } + +local NC, NR = context.NC, context.NR + +local classes = fonts.protrusions.classes +local vectors = fonts.protrusions.vectors + +function moduledata.fonts.protrusions.showvector(specification) + specification = interfaces.checkedspecification(specification) + local vector = vectors[specification.name or "?"] + if vector then + context.blank() + context.startcolumns { n = specification.columns or 3 } + context.starttabulate { "|T||cw(.5em)||" } + for unicode, values in table.sortedhash(vector) do + NC() context("%U",unicode) + NC() context("%.02f",values[1]) + NC() context("%c",unicode) + NC() context("%.02f",values[2]) + NC() NR() + end + context.stoptabulate() + context.stopcolumns() + context.blank() + end +end + +function moduledata.fonts.protrusions.showclass(specification) + specification = interfaces.checkedspecification(specification) + local class = specification.name and classes[specification.name] + local classes = class and { class} or classes + context.starttabulate { "|l|l|r|r|r|" } + NC() context.bold("name") + NC() context.bold("vector") + NC() context.bold("factor") + NC() context.bold("left") + NC() context.bold("right") + NC() NR() + for name, class in table.sortedhash(classes) do + NC() context(name) + NC() context(class.vector) + NC() context("%.02f",class.factor) + NC() context("%.02f",class.left) + NC() context("%.02f",class.right) + NC() NR() + end + context.stoptabulate() +end + +local classes = fonts.expansions.classes +local vectors = fonts.expansions.vectors + +function moduledata.fonts.expansions.showvector(specification) + specification = interfaces.checkedspecification(specification) + local vector = vectors[specification.name or "?"] + if vector then + context.blank() + context.startcolumns { n = specification.columns or 3 } + context.starttabulate { "|T|cw(.5em)||" } + for unicode, value in table.sortedhash(vector) do + NC() context("%U",unicode) + NC() context("%c",unicode) + NC() context("%.02f",value) + NC() NR() + end + context.stoptabulate() + context.stopcolumns() + context.blank() + end +end + +function moduledata.fonts.expansions.showclass(specification) + specification = interfaces.checkedspecification(specification) + local class = specification.name and classes[specification.name] + local classes = class and { class} or classes + context.starttabulate { "|l|l|r|r|r|" } + NC() context.bold("name") + NC() context.bold("vector") + NC() context.bold("step") + NC() context.bold("factor") + NC() context.bold("stretch") + NC() context.bold("shrink") + NC() NR() + for name, class in table.sortedhash(classes) do + NC() context(name) + NC() context(class.vector) + NC() context("%.02f",class.step) + NC() context("%.02f",class.factor) + NC() context("% 2i",class.stretch) + NC() context("% 2i",class.shrink) + NC() NR() + end + context.stoptabulate() +end diff --git a/tex/context/base/s-lan-03.mkiv b/tex/context/base/s-lan-03.mkiv index a490261f4..6b46a49b1 100644 --- a/tex/context/base/s-lan-03.mkiv +++ b/tex/context/base/s-lan-03.mkiv @@ -36,5 +36,5 @@ function languages.words.tracers.showwords(filename) end \stopluacode -% \ctxlua{languages.words.tracers.showwords("words-003.words")} +\ctxlua{languages.words.tracers.showwords("words-003.words")} diff --git a/tex/context/base/s-languages-sorting.lua b/tex/context/base/s-languages-sorting.lua new file mode 100644 index 000000000..b7d75f8b8 --- /dev/null +++ b/tex/context/base/s-languages-sorting.lua @@ -0,0 +1,118 @@ +if not modules then modules = { } end modules ['s-languages-system'] = { + version = 1.001, + comment = "companion to s-languages-system.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +moduledata.languages = moduledata.languages or { } +moduledata.languages.sorting = moduledata.languages.sorting or { } + +local formatters = string.formatters +local utfbyte, utfcharacters = utf.byte, utf.characters +local sortedpairs = table.sortedpairs + +local definitions = sorters.definitions +local constants = sorters.constants +local replacementoffset = constants.replacementoffset + +local currentfont = font.current +local fontchars = fonts.hashes.characters + +local c_darkblue = { "darkblue" } +local c_darkred = { "darkred" } +local f_chr = formatters["\\tttf%H"] + +local function chr(str,done) + if done then + context.space() + end + local c = fontchars[currentfont()] + for s in utfcharacters(str) do + local u = utfbyte(s) + if c[u] then + context(s) + elseif u > replacementoffset then + context.color(c_darkblue, f_chr(u)) + else + context.color(c_darkred, f_chr(u)) + end + end + return true +end + +local function map(a,b,done) + if done then + context.space() + end + -- context.tttf() + chr(a) + context("=") + chr(b) + return true +end + +local function nop() + -- context.tttf() + context("none") +end + +local function key(data,field) + context.NC() + context(field) + context.NC() + context(data[field]) + context.NC() + context.NR() +end + +function moduledata.languages.sorting.showinstalled(tag) + if not tag or tag == "" or tag == interfaces.variables.all then + for tag, data in sortedpairs(definitions) do + moduledata.languages.sorting.showinstalled (tag) + end + else + sorters.update() -- syncs data + local data = definitions[tag] + if data then + context.starttabulate { "|lB|pl|" } + key(data,"language") + key(data,"parent") + key(data,"method") + context.NC() + context("replacements") + context.NC() + local replacements = data.replacements + if #replacements == 0 then + nop() + else + for i=1,#replacements do + local r = replacements[i] + map(r[1],r[2],i > 1) + end + end + context.NC() + context.NR() + context.NC() + context("order") + context.NC() + local orders = data.orders + for i=1,#orders do + chr(orders[i],i > 1) + end + context.NC() + context.NR() + context.NC() + context("entries") + context.NC() + local done = false + for k, e in sortedpairs(data.entries) do + done = map(k,e,done) + end + context.NC() + context.NR() + context.stoptabulate() + end + end +end diff --git a/tex/context/base/s-languages-sorting.mkiv b/tex/context/base/s-languages-sorting.mkiv new file mode 100644 index 000000000..67acda6f9 --- /dev/null +++ b/tex/context/base/s-languages-sorting.mkiv @@ -0,0 +1,30 @@ +%D \module +%D [ file=s-languages-sorting, % s-lan-02.mkiv +%D version=2010.09.21, +%D title=\CONTEXT\ Style File, +%D subtitle=Language Sorting, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\startmodule[languages-sorting] + +\registerctxluafile{s-languages-sorting}{} + +\installmodulecommandluasingle \showinstalledsorting {moduledata.languages.sorting.showinstalled} + +\stopmodule + +\continueifinputfile{s-languages-sorting.mkiv} + +\usemodule[art-01] + +\starttext + + \showinstalledsorting + +\stoptext diff --git a/tex/context/base/s-languages-system.lua b/tex/context/base/s-languages-system.lua new file mode 100644 index 000000000..4c27b5b2a --- /dev/null +++ b/tex/context/base/s-languages-system.lua @@ -0,0 +1,35 @@ +if not modules then modules = { } end modules ['s-languages-system'] = { + version = 1.001, + comment = "companion to s-languages-system.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +moduledata.languages = moduledata.languages or { } +moduledata.languages.system = moduledata.languages.system or { } + +local NC, NR, HL = context.NC, context.NR, context.HL + +function moduledata.languages.system.showinstalled() + local numbers = languages.numbers + local registered = languages.registered + context.starttabulate { "|r|l|l|l|l|" } + NC() context("id") + NC() context("tag") + NC() context("synonyms") + NC() context("parent") + NC() context("loaded") + NC() NR() HL() + for i=1,#numbers do + local tag = numbers[i] + local data = registered[tag] + NC() context(data.number) + NC() context(tag) + NC() context("% t",table.sortedkeys(data.synonyms)) + NC() context(data.parent) + NC() context("%+t",table.sortedkeys(data.used)) + NC() NR() + end + context.stoptabulate() +end diff --git a/tex/context/base/s-languages-system.mkiv b/tex/context/base/s-languages-system.mkiv new file mode 100644 index 000000000..363720374 --- /dev/null +++ b/tex/context/base/s-languages-system.mkiv @@ -0,0 +1,30 @@ +%D \module +%D [ file=s-languages-system, % moved from local s-lan-01 +%D version=2013.05.19, +%D title=\CONTEXT\ Style File, +%D subtitle=Installed Languages, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\startmodule[languages-system] + +\registerctxluafile{s-languages-system}{} + +\installmodulecommandluasingle \showinstalledlanguages {moduledata.languages.system.showinstalled} + +\stopmodule + +\continueifinputfile{s-languages-system.mkiv} + +\usemodule[art-01] + +\starttext + + \showinstalledlanguages + +\stoptext diff --git a/tex/context/base/s-math-coverage.lua b/tex/context/base/s-math-coverage.lua index 258019c9d..52e9b777c 100644 --- a/tex/context/base/s-math-coverage.lua +++ b/tex/context/base/s-math-coverage.lua @@ -1,180 +1,180 @@ -if not modules then modules = { } end modules ['s-math-coverage'] = { - version = 1.001, - comment = "companion to s-math-coverage.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -moduledata.math = moduledata.math or { } -moduledata.math.coverage = moduledata.math.coverage or { } - -local utfchar, utfbyte = utf.char, utf.byte -local formatters, lower = string.formatters, string.lower -local concat = table.concat - -local context = context -local NC, NR, HL = context.NC, context.NR, context.HL -local char, getglyph, bold = context.char, context.getglyph, context.bold - -local ucgreek = { - 0x0391, 0x0392, 0x0393, 0x0394, 0x0395, - 0x0396, 0x0397, 0x0398, 0x0399, 0x039A, - 0x039B, 0x039C, 0x039D, 0x039E, 0x039F, - 0x03A0, 0x03A1, 0x03A3, 0x03A4, 0x03A5, - 0x03A6, 0x03A7, 0x03A8, 0x03A9 -} - -local lcgreek = { - 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5, - 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA, - 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF, - 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4, - 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9, - 0x03D1, 0x03D5, 0x03D6, 0x03F0, 0x03F1, - 0x03F4, 0x03F5 -} - -local ucletters = { - 0x00041, 0x00042, 0x00043, 0x00044, 0x00045, - 0x00046, 0x00047, 0x00048, 0x00049, 0x0004A, - 0x0004B, 0x0004C, 0x0004D, 0x0004E, 0x0004F, - 0x00050, 0x00051, 0x00052, 0x00053, 0x00054, - 0x00055, 0x00056, 0x00057, 0x00058, 0x00059, - 0x0005A, -} - -local lcletters = { - 0x00061, 0x00062, 0x00063, 0x00064, 0x00065, - 0x00066, 0x00067, 0x00068, 0x00069, 0x0006A, - 0x0006B, 0x0006C, 0x0006D, 0x0006E, 0x0006F, - 0x00070, 0x00071, 0x00072, 0x00073, 0x00074, - 0x00075, 0x00076, 0x00077, 0x00078, 0x00079, - 0x0007A, -} - -local digits = { - 0x00030, 0x00031, 0x00032, 0x00033, 0x00034, - 0x00035, 0x00036, 0x00037, 0x00038, 0x00039, -} - -local styles = { - "regular", "sansserif", "monospaced", "fraktur", "script", "blackboard" -} - -local alternatives = { - "normal", "bold", "italic", "bolditalic" -} - -local alphabets = { - ucletters, lcletters, ucgreek, lcgreek, digits, -} - -local getboth = mathematics.getboth -local remapalphabets = mathematics.remapalphabets - -local chardata = characters.data -local superscripts = characters.superscripts -local subscripts = characters.subscripts - -function moduledata.math.coverage.showalphabets() - context.starttabulate { "|lT|l|Tl|" } - for i=1,#styles do - local style = styles[i] - for i=1,#alternatives do - local alternative = alternatives[i] - for i=1,#alphabets do - local alphabet = alphabets[i] - NC() - if i == 1 then - context("%s %s",style,alternative) - end - NC() - context.startimath() - context.setmathattribute(style,alternative) - for i=1,#alphabet do - local letter = alphabet[i] - local id = getboth(style,alternative) - local unicode = remapalphabets(letter,id) - if not unicode then - context.underbar(utfchar(letter)) - elseif unicode == letter then - context(utfchar(unicode)) - else - context(utfchar(unicode)) - end - end - context.stopimath() - NC() - local first = alphabet[1] - local last = alphabet[#alphabet] - local id = getboth(style,alternative) - local f_unicode = remapalphabets(first,id) or utfbyte(first) - local l_unicode = remapalphabets(last,id) or utfbyte(last) - context("%05X - %05X",f_unicode,l_unicode) - NC() - NR() - end - end - end - context.stoptabulate() -end - -function moduledata.math.coverage.showcharacters() - context.startcolumns() - context.setupalign { "nothyphenated" } - context.starttabulate { "|T|i2|Tpl|" } - for u, d in table.sortedpairs(chardata) do - local mathclass = d.mathclass - local mathspec = d.mathspec - if mathclass or mathspec then - NC() - context("%05X",u) - NC() - getglyph("MathRoman",u) - NC() - if mathspec then - local t = { } - for i=1,#mathspec do - t[mathspec[i].class] = true - end - t = table.sortedkeys(t) - context("% t",t) - else - context(mathclass) - end - NC() - NR() - end - end - context.stoptabulate() - context.stopcolumns() -end - --- This is a somewhat tricky table as we need to bypass the math machinery. - -function moduledata.math.coverage.showscripts() - context.starttabulate { "|cT|c|cT|c|c|c|l|" } - for k, v in table.sortedpairs(table.merged(superscripts,subscripts)) do - local ck = utfchar(k) - local cv = utfchar(v) - local ss = superscripts[k] and "^" or "_" - NC() - context("%05X",k) - NC() - context(ck) - NC() - context("%05X",v) - NC() - context(cv) - NC() - context.formatted.rawmathematics("x%s = x%s%s",ck,ss,cv) - NC() - context.formatted.mathematics("x%s = x%s%s",ck,ss,cv) - NC() - context(lower(chardata[k].description)) - NC() - NR() - end - context.stoptabulate() -end +if not modules then modules = { } end modules ['s-math-coverage'] = { + version = 1.001, + comment = "companion to s-math-coverage.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +moduledata.math = moduledata.math or { } +moduledata.math.coverage = moduledata.math.coverage or { } + +local utfchar, utfbyte = utf.char, utf.byte +local formatters, lower = string.formatters, string.lower +local concat = table.concat + +local context = context +local NC, NR, HL = context.NC, context.NR, context.HL +local char, getglyph, bold = context.char, context.getglyph, context.bold + +local ucgreek = { + 0x0391, 0x0392, 0x0393, 0x0394, 0x0395, + 0x0396, 0x0397, 0x0398, 0x0399, 0x039A, + 0x039B, 0x039C, 0x039D, 0x039E, 0x039F, + 0x03A0, 0x03A1, 0x03A3, 0x03A4, 0x03A5, + 0x03A6, 0x03A7, 0x03A8, 0x03A9 +} + +local lcgreek = { + 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5, + 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA, + 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF, + 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4, + 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9, + 0x03D1, 0x03D5, 0x03D6, 0x03F0, 0x03F1, + 0x03F4, 0x03F5 +} + +local ucletters = { + 0x00041, 0x00042, 0x00043, 0x00044, 0x00045, + 0x00046, 0x00047, 0x00048, 0x00049, 0x0004A, + 0x0004B, 0x0004C, 0x0004D, 0x0004E, 0x0004F, + 0x00050, 0x00051, 0x00052, 0x00053, 0x00054, + 0x00055, 0x00056, 0x00057, 0x00058, 0x00059, + 0x0005A, +} + +local lcletters = { + 0x00061, 0x00062, 0x00063, 0x00064, 0x00065, + 0x00066, 0x00067, 0x00068, 0x00069, 0x0006A, + 0x0006B, 0x0006C, 0x0006D, 0x0006E, 0x0006F, + 0x00070, 0x00071, 0x00072, 0x00073, 0x00074, + 0x00075, 0x00076, 0x00077, 0x00078, 0x00079, + 0x0007A, +} + +local digits = { + 0x00030, 0x00031, 0x00032, 0x00033, 0x00034, + 0x00035, 0x00036, 0x00037, 0x00038, 0x00039, +} + +local styles = { + "regular", "sansserif", "monospaced", "fraktur", "script", "blackboard" +} + +local alternatives = { + "normal", "bold", "italic", "bolditalic" +} + +local alphabets = { + ucletters, lcletters, ucgreek, lcgreek, digits, +} + +local getboth = mathematics.getboth +local remapalphabets = mathematics.remapalphabets + +local chardata = characters.data +local superscripts = characters.superscripts +local subscripts = characters.subscripts + +function moduledata.math.coverage.showalphabets() + context.starttabulate { "|lT|l|Tl|" } + for i=1,#styles do + local style = styles[i] + for i=1,#alternatives do + local alternative = alternatives[i] + for i=1,#alphabets do + local alphabet = alphabets[i] + NC() + if i == 1 then + context("%s %s",style,alternative) + end + NC() + context.startimath() + context.setmathattribute(style,alternative) + for i=1,#alphabet do + local letter = alphabet[i] + local id = getboth(style,alternative) + local unicode = remapalphabets(letter,id) + if not unicode then + context.underbar(utfchar(letter)) + elseif unicode == letter then + context(utfchar(unicode)) + else + context(utfchar(unicode)) + end + end + context.stopimath() + NC() + local first = alphabet[1] + local last = alphabet[#alphabet] + local id = getboth(style,alternative) + local f_unicode = remapalphabets(first,id) or utfbyte(first) + local l_unicode = remapalphabets(last,id) or utfbyte(last) + context("%05X - %05X",f_unicode,l_unicode) + NC() + NR() + end + end + end + context.stoptabulate() +end + +function moduledata.math.coverage.showcharacters() + context.startcolumns() + context.setupalign { "nothyphenated" } + context.starttabulate { "|T|i2|Tpl|" } + for u, d in table.sortedpairs(chardata) do + local mathclass = d.mathclass + local mathspec = d.mathspec + if mathclass or mathspec then + NC() + context("%05X",u) + NC() + getglyph("MathRoman",u) + NC() + if mathspec then + local t = { } + for i=1,#mathspec do + t[mathspec[i].class] = true + end + t = table.sortedkeys(t) + context("% t",t) + else + context(mathclass) + end + NC() + NR() + end + end + context.stoptabulate() + context.stopcolumns() +end + +-- This is a somewhat tricky table as we need to bypass the math machinery. + +function moduledata.math.coverage.showscripts() + context.starttabulate { "|cT|c|cT|c|c|c|l|" } + for k, v in table.sortedpairs(table.merged(superscripts,subscripts)) do + local ck = utfchar(k) + local cv = utfchar(v) + local ss = superscripts[k] and "^" or "_" + NC() + context("%05X",k) + NC() + context(ck) + NC() + context("%05X",v) + NC() + context(cv) + NC() + context.formatted.rawmathematics("x%s = x%s%s",ck,ss,cv) + NC() + context.formatted.mathematics("x%s = x%s%s",ck,ss,cv) + NC() + context(lower(chardata[k].description)) + NC() + NR() + end + context.stoptabulate() +end diff --git a/tex/context/base/s-math-parameters.lua b/tex/context/base/s-math-parameters.lua index 8e8c15a2d..50500466a 100644 --- a/tex/context/base/s-math-parameters.lua +++ b/tex/context/base/s-math-parameters.lua @@ -1,135 +1,135 @@ -if not modules then modules = { } end modules ['s-math-coverage'] = { - version = 1.001, - comment = "companion to s-math-coverage.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -moduledata.math = moduledata.math or { } -moduledata.math.parameters = moduledata.math.parameters or { } - -local tables = utilities.tables.definedtable("math","tracing","spacing","tables") - -tables.styleaxis = { - "ord", "op", "bin", "rel", "open", "close", "punct", "inner", -} - -tables.parameters = { - "quad", "axis", "operatorsize", - "overbarkern", "overbarrule", "overbarvgap", - "underbarkern", "underbarrule", "underbarvgap", - "radicalkern", "radicalrule", "radicalvgap", - "radicaldegreebefore", "radicaldegreeafter", "radicaldegreeraise", - "stackvgap", "stacknumup", "stackdenomdown", - "fractionrule", "fractionnumvgap", "fractionnumup", - "fractiondenomvgap", "fractiondenomdown", "fractiondelsize", - "limitabovevgap", "limitabovebgap", "limitabovekern", - "limitbelowvgap", "limitbelowbgap", "limitbelowkern", - "underdelimitervgap", "underdelimiterbgap", - "overdelimitervgap", "overdelimiterbgap", - "subshiftdrop", "supshiftdrop", "subshiftdown", - "subsupshiftdown", "subtopmax", "supshiftup", - "supbottommin", "supsubbottommax", "subsupvgap", - "spaceafterscript", "connectoroverlapmin", -} - -tables.styles = { - "display", - "text", - "script", - "scriptscript", -} - -function tables.stripmu(str) - str = string.gsub(str,"mu","") - str = string.gsub(str," ","") - str = string.gsub(str,"plus","+") - str = string.gsub(str,"minus","-") - return str -end - -function tables.strippt(old) - local new = string.gsub(old,"pt","") - if new ~= old then - new = string.format("%0.4f",tonumber(new)) - end - return new -end - -function moduledata.math.parameters.showspacing() - - local styles = tables.styles - local styleaxis = tables.styleaxis - - context.starttabulate { "|Tl|Tl|" .. string.rep("Tc|",(#styles*2)) } - context.HL() - context.NC() - context.NC() - context.NC() - for i=1,#styles do - context.bold(styles[i]) - context.NC() - context.bold("(cramped)") - context.NC() - end - context.NR() - context.HL() - for i=1,#styleaxis do - -- print(key,tex.getmath(key,"text")) - local one = styleaxis[i] - for j=1,#styleaxis do - local two = styleaxis[j] - context.NC() - if j == 1 then - context.bold(one) - end - context.NC() - context.bold(two) - context.NC() - for i=1,#styles do - context("\\ctxlua{context(math.tracing.spacing.tables.stripmu('\\the\\Umath%s%sspacing\\%sstyle'))}",one,two,styles[i]) - context.NC() - context("\\ctxlua{context(math.tracing.spacing.tables.stripmu('\\the\\Umath%s%sspacing\\cramped%sstyle'))}",one,two,styles[i]) - context.NC() - end - context.NR() - end - end - context.stoptabulate() -end - -function moduledata.math.parameters.showparameters() - - local styles = tables.styles - local parameters = tables.parameters - - context.starttabulate { "|l|" .. string.rep("Tc|",(#styles*2)) } - context.HL() - context.NC() - context.NC() - for i=1,#styles do - context.bold(styles[i]) - context.NC() - context.bold("(cramped)") - context.NC() - end - context.NR() - context.HL() - for i=1,#parameters do - local parameter = parameters[i] - -- print(parameter,tex.getmath(parameter,"text")) - context.NC() - context.type(parameter) - context.NC() - for i=1,#styles do - context("\\ctxlua{context(math.tracing.spacing.tables.strippt('\\the\\Umath%s\\%sstyle'))}",parameter,styles[i]) - context.NC() - context("\\ctxlua{context(math.tracing.spacing.tables.strippt('\\the\\Umath%s\\cramped%sstyle'))}",parameter,styles[i]) - context.NC() - end - context.NR() - end - context.stoptabulate() - -end +if not modules then modules = { } end modules ['s-math-coverage'] = { + version = 1.001, + comment = "companion to s-math-coverage.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +moduledata.math = moduledata.math or { } +moduledata.math.parameters = moduledata.math.parameters or { } + +local tables = utilities.tables.definedtable("math","tracing","spacing","tables") + +tables.styleaxis = { + "ord", "op", "bin", "rel", "open", "close", "punct", "inner", +} + +tables.parameters = { + "quad", "axis", "operatorsize", + "overbarkern", "overbarrule", "overbarvgap", + "underbarkern", "underbarrule", "underbarvgap", + "radicalkern", "radicalrule", "radicalvgap", + "radicaldegreebefore", "radicaldegreeafter", "radicaldegreeraise", + "stackvgap", "stacknumup", "stackdenomdown", + "fractionrule", "fractionnumvgap", "fractionnumup", + "fractiondenomvgap", "fractiondenomdown", "fractiondelsize", + "limitabovevgap", "limitabovebgap", "limitabovekern", + "limitbelowvgap", "limitbelowbgap", "limitbelowkern", + "underdelimitervgap", "underdelimiterbgap", + "overdelimitervgap", "overdelimiterbgap", + "subshiftdrop", "supshiftdrop", "subshiftdown", + "subsupshiftdown", "subtopmax", "supshiftup", + "supbottommin", "supsubbottommax", "subsupvgap", + "spaceafterscript", "connectoroverlapmin", +} + +tables.styles = { + "display", + "text", + "script", + "scriptscript", +} + +function tables.stripmu(str) + str = string.gsub(str,"mu","") + str = string.gsub(str," ","") + str = string.gsub(str,"plus","+") + str = string.gsub(str,"minus","-") + return str +end + +function tables.strippt(old) + local new = string.gsub(old,"pt","") + if new ~= old then + new = string.format("%0.4f",tonumber(new)) + end + return new +end + +function moduledata.math.parameters.showspacing() + + local styles = tables.styles + local styleaxis = tables.styleaxis + + context.starttabulate { "|Tl|Tl|" .. string.rep("Tc|",(#styles*2)) } + context.HL() + context.NC() + context.NC() + context.NC() + for i=1,#styles do + context.bold(styles[i]) + context.NC() + context.bold("(cramped)") + context.NC() + end + context.NR() + context.HL() + for i=1,#styleaxis do + -- print(key,tex.getmath(key,"text")) + local one = styleaxis[i] + for j=1,#styleaxis do + local two = styleaxis[j] + context.NC() + if j == 1 then + context.bold(one) + end + context.NC() + context.bold(two) + context.NC() + for i=1,#styles do + context("\\ctxlua{context(math.tracing.spacing.tables.stripmu('\\the\\Umath%s%sspacing\\%sstyle'))}",one,two,styles[i]) + context.NC() + context("\\ctxlua{context(math.tracing.spacing.tables.stripmu('\\the\\Umath%s%sspacing\\cramped%sstyle'))}",one,two,styles[i]) + context.NC() + end + context.NR() + end + end + context.stoptabulate() +end + +function moduledata.math.parameters.showparameters() + + local styles = tables.styles + local parameters = tables.parameters + + context.starttabulate { "|l|" .. string.rep("Tc|",(#styles*2)) } + context.HL() + context.NC() + context.NC() + for i=1,#styles do + context.bold(styles[i]) + context.NC() + context.bold("(cramped)") + context.NC() + end + context.NR() + context.HL() + for i=1,#parameters do + local parameter = parameters[i] + -- print(parameter,tex.getmath(parameter,"text")) + context.NC() + context.type(parameter) + context.NC() + for i=1,#styles do + context("\\ctxlua{context(math.tracing.spacing.tables.strippt('\\the\\Umath%s\\%sstyle'))}",parameter,styles[i]) + context.NC() + context("\\ctxlua{context(math.tracing.spacing.tables.strippt('\\the\\Umath%s\\cramped%sstyle'))}",parameter,styles[i]) + context.NC() + end + context.NR() + end + context.stoptabulate() + +end diff --git a/tex/context/base/s-pre-71.lua b/tex/context/base/s-pre-71.lua index bfa45a705..7d5c011f1 100644 --- a/tex/context/base/s-pre-71.lua +++ b/tex/context/base/s-pre-71.lua @@ -1,63 +1,63 @@ -if not modules then modules = { } end modules ['steps'] = { - version = 1.001, - comment = "companion to steps.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -moduledata.steps = moduledata.steps or { } -local steps = moduledata.steps - -local locations = { - 'lefttop', - 'middletop', - 'righttop', - 'middleleft', - 'middle', - 'middleright', - 'leftbottom', - 'middlebottom', - 'rightbottom', -} - -local done, current, previous, n - -function steps.reset_locations() - done, current, previous, n = table.tohash(locations,false), 0, 0, 0 -end - -function steps.next_location(loc) - previous = current - n = n + 1 - loc = loc and loc ~= "" and tonumber(loc) - while true do - current = loc or math.random(1,#locations) - if not done[current] then - done[current] = true - break - end - end -end - -function steps.current_location() - context(locations[current] or "") -end - -function steps.previous_location() - context(locations[previous] or "") -end - -function steps.current_n() - context(current) -end - -function steps.previous_n() - context(previous) -end - -function steps.step() - context(n) -end - -steps.reset_locations() +if not modules then modules = { } end modules ['steps'] = { + version = 1.001, + comment = "companion to steps.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +moduledata.steps = moduledata.steps or { } +local steps = moduledata.steps + +local locations = { + 'lefttop', + 'middletop', + 'righttop', + 'middleleft', + 'middle', + 'middleright', + 'leftbottom', + 'middlebottom', + 'rightbottom', +} + +local done, current, previous, n + +function steps.reset_locations() + done, current, previous, n = table.tohash(locations,false), 0, 0, 0 +end + +function steps.next_location(loc) + previous = current + n = n + 1 + loc = loc and loc ~= "" and tonumber(loc) + while true do + current = loc or math.random(1,#locations) + if not done[current] then + done[current] = true + break + end + end +end + +function steps.current_location() + context(locations[current] or "") +end + +function steps.previous_location() + context(locations[previous] or "") +end + +function steps.current_n() + context(current) +end + +function steps.previous_n() + context(previous) +end + +function steps.step() + context(n) +end + +steps.reset_locations() diff --git a/tex/context/base/scrn-but.lua b/tex/context/base/scrn-but.lua index e49372ce9..4766df9d7 100644 --- a/tex/context/base/scrn-but.lua +++ b/tex/context/base/scrn-but.lua @@ -1,19 +1,19 @@ -if not modules then modules = { } end modules ['scrn-but'] = { - version = 1.001, - comment = "companion to scrn-but.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local f_two_colon = string.formatters["%s:%s"] - -function commands.registerbuttons(tag,register,language) - local data = sorters.definitions[language] - local orders = daya and data.orders or sorters.definitions.default.orders - local tag = tag == "" and { "" } or { tag } - for i=1,#orders do - local order = orders[i] - context.menubutton(tag,f_two_colon(register,order),order) - end -end +if not modules then modules = { } end modules ['scrn-but'] = { + version = 1.001, + comment = "companion to scrn-but.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local f_two_colon = string.formatters["%s:%s"] + +function commands.registerbuttons(tag,register,language) + local data = sorters.definitions[language] + local orders = daya and data.orders or sorters.definitions.default.orders + local tag = tag == "" and { "" } or { tag } + for i=1,#orders do + local order = orders[i] + context.menubutton(tag,f_two_colon(register,order),order) + end +end diff --git a/tex/context/base/scrn-fld.lua b/tex/context/base/scrn-fld.lua index 9836cbebe..846385686 100644 --- a/tex/context/base/scrn-fld.lua +++ b/tex/context/base/scrn-fld.lua @@ -1,85 +1,85 @@ -if not modules then modules = { } end modules ['scrn-fld'] = { - version = 1.001, - comment = "companion to scrn-fld.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- we should move some code from lpdf-fld to here - -local variables = interfaces.variables -local v_yes = variables.yes - -local fields = { } -interactions.fields = fields - -local codeinjections = backends.codeinjections -local nodeinjections = backends.nodeinjections - -local function define(specification) - codeinjections.definefield(specification) -end - -local function defineset(name,set) - codeinjections.definefield(name,set) -end - -local function clone(specification) - codeinjections.clonefield(specification) -end - -local function insert(name,specification) - return nodeinjections.typesetfield(name,specification) -end - -fields.define = define -fields.defineset = defineset -fields.clone = clone -fields.insert = insert - -commands.definefield = define -commands.definefieldset = defineset -commands.clonefield = clone - -function commands.insertfield(name,specification) - tex.box["b_scrn_field_body"] = insert(name,specification) -end - --- (for the monent) only tex interface - -function commands.getfieldcategory(name) - local g = codeinjections.getfieldcategory(name) - if g then - context(g) - end -end - -function commands.getdefaultfieldvalue(name) - local d = codeinjections.getdefaultfieldvalue(name) - if d then - context(d) - end -end - -function commands.exportformdata(export) - if export == v_yes then - codeinjections.exportformdata() - end -end - -function commands.setformsmethod(method) - codeinjections.setformsmethod(method) -end - -function commands.doiffieldcategoryelse(name) - commands.doifelse(codeinjections.validfieldcategory(name)) -end - -function commands.doiffieldsetelse(tag) - commands.doifelse(codeinjections.validfieldset(name)) -end - -function commands.doiffieldelse(name) - commands.doifelse(codeinjections.validfield(name)) -end +if not modules then modules = { } end modules ['scrn-fld'] = { + version = 1.001, + comment = "companion to scrn-fld.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- we should move some code from lpdf-fld to here + +local variables = interfaces.variables +local v_yes = variables.yes + +local fields = { } +interactions.fields = fields + +local codeinjections = backends.codeinjections +local nodeinjections = backends.nodeinjections + +local function define(specification) + codeinjections.definefield(specification) +end + +local function defineset(name,set) + codeinjections.definefield(name,set) +end + +local function clone(specification) + codeinjections.clonefield(specification) +end + +local function insert(name,specification) + return nodeinjections.typesetfield(name,specification) +end + +fields.define = define +fields.defineset = defineset +fields.clone = clone +fields.insert = insert + +commands.definefield = define +commands.definefieldset = defineset +commands.clonefield = clone + +function commands.insertfield(name,specification) + tex.box["b_scrn_field_body"] = insert(name,specification) +end + +-- (for the monent) only tex interface + +function commands.getfieldcategory(name) + local g = codeinjections.getfieldcategory(name) + if g then + context(g) + end +end + +function commands.getdefaultfieldvalue(name) + local d = codeinjections.getdefaultfieldvalue(name) + if d then + context(d) + end +end + +function commands.exportformdata(export) + if export == v_yes then + codeinjections.exportformdata() + end +end + +function commands.setformsmethod(method) + codeinjections.setformsmethod(method) +end + +function commands.doiffieldcategoryelse(name) + commands.doifelse(codeinjections.validfieldcategory(name)) +end + +function commands.doiffieldsetelse(tag) + commands.doifelse(codeinjections.validfieldset(name)) +end + +function commands.doiffieldelse(name) + commands.doifelse(codeinjections.validfield(name)) +end diff --git a/tex/context/base/scrn-hlp.lua b/tex/context/base/scrn-hlp.lua index 5f8368c6d..06abb3237 100644 --- a/tex/context/base/scrn-hlp.lua +++ b/tex/context/base/scrn-hlp.lua @@ -1,119 +1,119 @@ -if not modules then modules = { } end modules ['scrn-hlp'] = { - version = 1.001, - comment = "companion to scrn-hlp.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format - -local help = { } -interactions.help = help - -local a_help = attributes.private("help") - -local copy_nodelist = node.copy_list -local hpack_nodelist = node.hpack - -local register_list = nodes.pool.register - -local nodecodes = nodes.nodecodes - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist - -local data, references = { }, { } - -local helpscript = [[ - function Hide_All_Help(prefix) { - var n = 0 - while (true) { - n += 1 ; - v = this.getField(prefix + n) ; - if (v) { - v.hidden = true ; - this.dirty = false ; - } else { - return ; - } - } - } -]] - -local template = "javascript(Hide_All_Help{help:}),action(show{help:%s})" - -function help.register(number,name,box) - if helpscript then - interactions.javascripts.setpreamble("HelpTexts",helpscript) - helpscript = false - end - local b = copy_nodelist(tex.box[box]) - register_list(b) - data[number] = b - if name and name ~= "" then - references[name] = number - structures.references.define("",name,format(template,number)) - end -end - -local function collect(head,used) - while head do - local id = head.id - if id == hlist_code then - local a = head[a_help] - if a then - if not used then - used = { a } - else - used[#used+1] = a - end - else - used = collect(head.list,used) - end - elseif id == vlist_code then - used = collect(head.list,used) - end - head = head.next - end - return used -end - -function help.collect(box) - if next(data) then - return collect(tex.box[box].list) - end -end - -commands.registerhelp = help.register - -function commands.collecthelp(box) - local used = help.collect(box) - if used then - local done = { } - context.startoverlay() - for i=1,#used do - local d = data[used[i]] - if d and not done[d] then - local box = hpack_nodelist(copy_nodelist(d)) - context(false,box) - done[d] = true - else - -- error - end - end - context.stopoverlay() - end -end - -function help.reference(name) - return references[name] or tonumber(name) or 0 -end - -function commands.helpreference(name) - context(references[name] or tonumber(name) or 0) -end - -function commands.helpaction(name) - context(template,references[name] or tonumber(name) or 0) -end +if not modules then modules = { } end modules ['scrn-hlp'] = { + version = 1.001, + comment = "companion to scrn-hlp.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format + +local help = { } +interactions.help = help + +local a_help = attributes.private("help") + +local copy_nodelist = node.copy_list +local hpack_nodelist = node.hpack + +local register_list = nodes.pool.register + +local nodecodes = nodes.nodecodes + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist + +local data, references = { }, { } + +local helpscript = [[ + function Hide_All_Help(prefix) { + var n = 0 + while (true) { + n += 1 ; + v = this.getField(prefix + n) ; + if (v) { + v.hidden = true ; + this.dirty = false ; + } else { + return ; + } + } + } +]] + +local template = "javascript(Hide_All_Help{help:}),action(show{help:%s})" + +function help.register(number,name,box) + if helpscript then + interactions.javascripts.setpreamble("HelpTexts",helpscript) + helpscript = false + end + local b = copy_nodelist(tex.box[box]) + register_list(b) + data[number] = b + if name and name ~= "" then + references[name] = number + structures.references.define("",name,format(template,number)) + end +end + +local function collect(head,used) + while head do + local id = head.id + if id == hlist_code then + local a = head[a_help] + if a then + if not used then + used = { a } + else + used[#used+1] = a + end + else + used = collect(head.list,used) + end + elseif id == vlist_code then + used = collect(head.list,used) + end + head = head.next + end + return used +end + +function help.collect(box) + if next(data) then + return collect(tex.box[box].list) + end +end + +commands.registerhelp = help.register + +function commands.collecthelp(box) + local used = help.collect(box) + if used then + local done = { } + context.startoverlay() + for i=1,#used do + local d = data[used[i]] + if d and not done[d] then + local box = hpack_nodelist(copy_nodelist(d)) + context(false,box) + done[d] = true + else + -- error + end + end + context.stopoverlay() + end +end + +function help.reference(name) + return references[name] or tonumber(name) or 0 +end + +function commands.helpreference(name) + context(references[name] or tonumber(name) or 0) +end + +function commands.helpaction(name) + context(template,references[name] or tonumber(name) or 0) +end diff --git a/tex/context/base/scrn-ini.lua b/tex/context/base/scrn-ini.lua index 4831408f9..deca9cbbb 100644 --- a/tex/context/base/scrn-ini.lua +++ b/tex/context/base/scrn-ini.lua @@ -1,32 +1,32 @@ -if not modules then modules = { } end modules ['scrn-ini'] = { - version = 1.001, - comment = "companion to scrn-int.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local next = next - -interactions = { } -interactions.general = interactions.general or { } -local general = interactions.general - -local codeinjections = backends.codeinjections - -local identitydata = { } - -local function setupidentity(specification) - for k, v in next, specification do - identitydata[k] = v - end - codeinjections.setupidentity(specification) -end - -function general.getidentity() - return identitydata -end - -general.setupidentity = setupidentity - -commands.setupidentity = setupidentity +if not modules then modules = { } end modules ['scrn-ini'] = { + version = 1.001, + comment = "companion to scrn-int.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local next = next + +interactions = { } +interactions.general = interactions.general or { } +local general = interactions.general + +local codeinjections = backends.codeinjections + +local identitydata = { } + +local function setupidentity(specification) + for k, v in next, specification do + identitydata[k] = v + end + codeinjections.setupidentity(specification) +end + +function general.getidentity() + return identitydata +end + +general.setupidentity = setupidentity + +commands.setupidentity = setupidentity diff --git a/tex/context/base/scrn-pag.lua b/tex/context/base/scrn-pag.lua index 7003d0285..2a44ffbcd 100644 --- a/tex/context/base/scrn-pag.lua +++ b/tex/context/base/scrn-pag.lua @@ -1,27 +1,27 @@ -if not modules then modules = { } end modules ['scrn-pag'] = { - version = 1.001, - comment = "companion to scrn-pag.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -interactions = interactions or { } -interactions.pages = interactions.pages or { } -local pages = interactions.pages - -local codeinjections = backends.codeinjections - -local function setupcanvas(specification) - codeinjections.setupcanvas(specification) -end - -local function setpagetransition(specification) - codeinjections.setpagetransition(specification) -end - -pages.setupcanvas = setupcanvas -pages.setpagetransition = setpagetransition - -commands.setupcanvas = setupcanvas -commands.setpagetransition = setpagetransition +if not modules then modules = { } end modules ['scrn-pag'] = { + version = 1.001, + comment = "companion to scrn-pag.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +interactions = interactions or { } +interactions.pages = interactions.pages or { } +local pages = interactions.pages + +local codeinjections = backends.codeinjections + +local function setupcanvas(specification) + codeinjections.setupcanvas(specification) +end + +local function setpagetransition(specification) + codeinjections.setpagetransition(specification) +end + +pages.setupcanvas = setupcanvas +pages.setpagetransition = setpagetransition + +commands.setupcanvas = setupcanvas +commands.setpagetransition = setpagetransition diff --git a/tex/context/base/scrn-ref.lua b/tex/context/base/scrn-ref.lua index df71b6a97..fb79ff6d8 100644 --- a/tex/context/base/scrn-ref.lua +++ b/tex/context/base/scrn-ref.lua @@ -1,65 +1,65 @@ -if not modules then modules = { } end modules ['scrn-ref'] = { - version = 1.001, - comment = "companion to scrn-int.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -interactions = interactions or { } -interactions.references = interactions.references or { } -local references = interactions.references - -local codeinjections = backends.codeinjections - -local expandcurrent = structures.references.expandcurrent -local identify = structures.references.identify - -local function check(what) - if what and what ~= "" then - local set, bug = identify("",what) - return not bug and #set > 0 and set - end -end - -local function setopendocumentaction(open) - local opendocument = check(open) - if opendocument then - codeinjections.registerdocumentopenaction(opendocument) - expandcurrent() - end -end - -local function setclosedocumentaction(close) - local closedocument = check(close) - if closedocument then - codeinjections.registerdocumentcloseaction(closedocument) - expandcurrent() - end -end - -local function setopenpageaction(open) - local openpage = check(open) - if openpage then - codeinjections.registerpageopenaction(openpage) - expandcurrent() - end -end - -local function setclosepageaction(close) - local closepage = check(close) - if closepage then - codeinjections.registerpagecloseaction(closepage) - expandcurrent() - end -end - -references.setopendocument = setopendocumentaction -references.setclosedocument = setclosedocumentaction -references.setopenpage = setopenpageaction -references.setclosepage = setclosepageaction - -commands.setopendocumentaction = setopendocumentaction -commands.setclosedocumentaction = setclosedocumentaction -commands.setopenpageaction = setopenpageaction -commands.setclosepageaction = setclosepageaction +if not modules then modules = { } end modules ['scrn-ref'] = { + version = 1.001, + comment = "companion to scrn-int.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +interactions = interactions or { } +interactions.references = interactions.references or { } +local references = interactions.references + +local codeinjections = backends.codeinjections + +local expandcurrent = structures.references.expandcurrent +local identify = structures.references.identify + +local function check(what) + if what and what ~= "" then + local set, bug = identify("",what) + return not bug and #set > 0 and set + end +end + +local function setopendocumentaction(open) + local opendocument = check(open) + if opendocument then + codeinjections.registerdocumentopenaction(opendocument) + expandcurrent() + end +end + +local function setclosedocumentaction(close) + local closedocument = check(close) + if closedocument then + codeinjections.registerdocumentcloseaction(closedocument) + expandcurrent() + end +end + +local function setopenpageaction(open) + local openpage = check(open) + if openpage then + codeinjections.registerpageopenaction(openpage) + expandcurrent() + end +end + +local function setclosepageaction(close) + local closepage = check(close) + if closepage then + codeinjections.registerpagecloseaction(closepage) + expandcurrent() + end +end + +references.setopendocument = setopendocumentaction +references.setclosedocument = setclosedocumentaction +references.setopenpage = setopenpageaction +references.setclosepage = setclosepageaction + +commands.setopendocumentaction = setopendocumentaction +commands.setclosedocumentaction = setclosedocumentaction +commands.setopenpageaction = setopenpageaction +commands.setclosepageaction = setclosepageaction diff --git a/tex/context/base/scrn-wid.lua b/tex/context/base/scrn-wid.lua index 4ad46761e..e0c3d54b6 100644 --- a/tex/context/base/scrn-wid.lua +++ b/tex/context/base/scrn-wid.lua @@ -1,214 +1,214 @@ -if not modules then modules = { } end modules ['scrn-wid'] = { - version = 1.001, - comment = "companion to scrn-wid.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -interactions = interactions or { } -local interactions = interactions - -local attachments = { } -local comments = { } -local soundclips = { } -local renderings = { } -local linkedlists = { } - -interactions.attachments = attachments -interactions.soundclips = soundclips -interactions.renderings = renderings -interactions.linkedlists = linkedlists - -local jobpasses = job.passes - -local codeinjections = backends.codeinjections -local nodeinjections = backends.nodeinjections - -local variables = interfaces.variables -local v_auto = variables.auto - -local trace_attachments = false trackers.register("widgets.attachments", function(v) trace_attachments = v end) - -local report_attachments = logs.reporter("widgets","attachments") - --- Symbols - -function commands.presetsymbollist(list) - codeinjections.presetsymbollist(list) -end - --- Attachments --- --- registered : unique id --- tag : used at the tex end --- file : name that the file has on the filesystem --- name : name that the file will get in the output --- title : up to the backend --- subtitle : up to the backend --- author : up to the backend --- method : up to the backend (hidden == no rendering) - -local nofautoattachments, lastregistered = 0, nil - -local function checkregistered(specification) - local registered = specification.registered - if not registered or registered == "" or registered == v_auto then - nofautoattachments = nofautoattachments + 1 - lastregistered = "attachment-" .. nofautoattachments - specification.registered = lastregistered - return lastregistered - else - return registered - end -end - -local function checkbuffer(specification) - local buffer = specification.buffer - if buffer ~= "" then - specification.data = buffers.getcontent(buffer) or "" - end -end - -function attachments.register(specification) -- beware of tag/registered mixup(tag is namespace) - local registered = checkregistered(specification) - checkbuffer(specification) - attachments[registered] = specification - if trace_attachments then - report_attachments("registering %a",registered) - end - return specification -end - -function attachments.insert(specification) - local registered = checkregistered(specification) - local r = attachments[registered] - if r then - if trace_attachments then - report_attachments("including registered %a",registered) - end - for k, v in next, r do - local s = specification[k] - if s == "" then - specification[k] = v - end - end - elseif trace_attachments then - report_attachments("including unregistered %a",registered) - end - checkbuffer(specification) - return nodeinjections.attachfile(specification) -end - -commands.registerattachment = attachments.register - -function commands.insertattachment(specification) - tex.box["b_scrn_attachment_link"] = attachments.insert(specification) -end - --- Comment - -function comments.insert(specification) - local buffer = specification.buffer - if buffer ~= "" then - specification.data = buffers.getcontent(buffer) or "" - end - return nodeinjections.comment(specification) -end - -function commands.insertcomment(specification) - tex.box["b_scrn_comment_link"] = comments.insert(specification) -end - --- Soundclips - -function soundclips.register(specification) - local tag = specification.tag - if tag and tag ~= "" then - local filename = specification.file - if not filename or filename == "" then - filename = tag - specification.file = filename - end - soundclips[tag] = specification - return specification - end -end - -function soundclips.insert(tag) - local sc = soundclips[tag] - if not sc then - -- todo: message - return soundclips.register { tag = tag } - else - return sc - end -end - -commands.registersoundclip = soundclips.register -commands.insertsoundclip = soundclips.insert - --- Renderings - -function renderings.register(specification) - if specification.label then - renderings[specification.label] = specification - return specification - end -end - -function renderings.rendering(label) - local rn = renderings[label] - if not rn then - -- todo: message - return renderings.register { label = label } - else - return rn - end -end - -local function var(label,key) - local rn = renderings[label] - return rn and rn[key] or "" -end - -renderings.var = var - -function commands.renderingvar(label,key) - context(var(label,key)) -end - -commands.registerrendering = renderings.register - --- Rendering: - -function commands.insertrenderingwindow(specification) - codeinjections.insertrenderingwindow(specification) -end - --- Linkedlists (only a context interface) - -function commands.definelinkedlist(tag) - -- no need -end - -function commands.enhancelinkedlist(tag,n) - local ll = jobpasses.gettobesaved(tag) - if ll then - ll[n] = texcount.realpageno - end -end - -function commands.addlinklistelement(tag) - local tobesaved = jobpasses.gettobesaved(tag) - local collected = jobpasses.getcollected(tag) or { } - local currentlink = #tobesaved + 1 - local noflinks = #collected - tobesaved[currentlink] = 0 - local f = collected[1] or 0 - local l = collected[noflinks] or 0 - local p = collected[currentlink-1] or f - local n = collected[currentlink+1] or l - context.setlinkedlistproperties(currentlink,noflinks,f,p,n,l) - -- context.ctxlatelua(function() commands.enhancelinkedlist(tag,currentlink) end) -end +if not modules then modules = { } end modules ['scrn-wid'] = { + version = 1.001, + comment = "companion to scrn-wid.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +interactions = interactions or { } +local interactions = interactions + +local attachments = { } +local comments = { } +local soundclips = { } +local renderings = { } +local linkedlists = { } + +interactions.attachments = attachments +interactions.soundclips = soundclips +interactions.renderings = renderings +interactions.linkedlists = linkedlists + +local jobpasses = job.passes + +local codeinjections = backends.codeinjections +local nodeinjections = backends.nodeinjections + +local variables = interfaces.variables +local v_auto = variables.auto + +local trace_attachments = false trackers.register("widgets.attachments", function(v) trace_attachments = v end) + +local report_attachments = logs.reporter("widgets","attachments") + +-- Symbols + +function commands.presetsymbollist(list) + codeinjections.presetsymbollist(list) +end + +-- Attachments +-- +-- registered : unique id +-- tag : used at the tex end +-- file : name that the file has on the filesystem +-- name : name that the file will get in the output +-- title : up to the backend +-- subtitle : up to the backend +-- author : up to the backend +-- method : up to the backend (hidden == no rendering) + +local nofautoattachments, lastregistered = 0, nil + +local function checkregistered(specification) + local registered = specification.registered + if not registered or registered == "" or registered == v_auto then + nofautoattachments = nofautoattachments + 1 + lastregistered = "attachment-" .. nofautoattachments + specification.registered = lastregistered + return lastregistered + else + return registered + end +end + +local function checkbuffer(specification) + local buffer = specification.buffer + if buffer ~= "" then + specification.data = buffers.getcontent(buffer) or "" + end +end + +function attachments.register(specification) -- beware of tag/registered mixup(tag is namespace) + local registered = checkregistered(specification) + checkbuffer(specification) + attachments[registered] = specification + if trace_attachments then + report_attachments("registering %a",registered) + end + return specification +end + +function attachments.insert(specification) + local registered = checkregistered(specification) + local r = attachments[registered] + if r then + if trace_attachments then + report_attachments("including registered %a",registered) + end + for k, v in next, r do + local s = specification[k] + if s == "" then + specification[k] = v + end + end + elseif trace_attachments then + report_attachments("including unregistered %a",registered) + end + checkbuffer(specification) + return nodeinjections.attachfile(specification) +end + +commands.registerattachment = attachments.register + +function commands.insertattachment(specification) + tex.box["b_scrn_attachment_link"] = attachments.insert(specification) +end + +-- Comment + +function comments.insert(specification) + local buffer = specification.buffer + if buffer ~= "" then + specification.data = buffers.getcontent(buffer) or "" + end + return nodeinjections.comment(specification) +end + +function commands.insertcomment(specification) + tex.box["b_scrn_comment_link"] = comments.insert(specification) +end + +-- Soundclips + +function soundclips.register(specification) + local tag = specification.tag + if tag and tag ~= "" then + local filename = specification.file + if not filename or filename == "" then + filename = tag + specification.file = filename + end + soundclips[tag] = specification + return specification + end +end + +function soundclips.insert(tag) + local sc = soundclips[tag] + if not sc then + -- todo: message + return soundclips.register { tag = tag } + else + return sc + end +end + +commands.registersoundclip = soundclips.register +commands.insertsoundclip = soundclips.insert + +-- Renderings + +function renderings.register(specification) + if specification.label then + renderings[specification.label] = specification + return specification + end +end + +function renderings.rendering(label) + local rn = renderings[label] + if not rn then + -- todo: message + return renderings.register { label = label } + else + return rn + end +end + +local function var(label,key) + local rn = renderings[label] + return rn and rn[key] or "" +end + +renderings.var = var + +function commands.renderingvar(label,key) + context(var(label,key)) +end + +commands.registerrendering = renderings.register + +-- Rendering: + +function commands.insertrenderingwindow(specification) + codeinjections.insertrenderingwindow(specification) +end + +-- Linkedlists (only a context interface) + +function commands.definelinkedlist(tag) + -- no need +end + +function commands.enhancelinkedlist(tag,n) + local ll = jobpasses.gettobesaved(tag) + if ll then + ll[n] = texcount.realpageno + end +end + +function commands.addlinklistelement(tag) + local tobesaved = jobpasses.gettobesaved(tag) + local collected = jobpasses.getcollected(tag) or { } + local currentlink = #tobesaved + 1 + local noflinks = #collected + tobesaved[currentlink] = 0 + local f = collected[1] or 0 + local l = collected[noflinks] or 0 + local p = collected[currentlink-1] or f + local n = collected[currentlink+1] or l + context.setlinkedlistproperties(currentlink,noflinks,f,p,n,l) + -- context.ctxlatelua(function() commands.enhancelinkedlist(tag,currentlink) end) +end diff --git a/tex/context/base/scrp-cjk.lua b/tex/context/base/scrp-cjk.lua index f7167b45c..083fc4e53 100644 --- a/tex/context/base/scrp-cjk.lua +++ b/tex/context/base/scrp-cjk.lua @@ -1,951 +1,951 @@ -if not modules then modules = { } end modules ['scrp-cjk'] = { - version = 1.001, - comment = "companion to scrp-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- We can speed this up by preallocating nodes and copying them but the --- gain is not that large. - --- The input line endings: there is no way to distinguish between --- inline spaces and endofline turned into spaces (would not make --- sense either because otherwise a wanted space at the end of a --- line would have to be a hard coded ones. - -local utfchar = utf.char - -local insert_node_after = node.insert_after -local insert_node_before = node.insert_before -local remove_node = nodes.remove - -local nodepool = nodes.pool -local new_glue = nodepool.glue -local new_kern = nodepool.kern -local new_penalty = nodepool.penalty - -local nodecodes = nodes.nodecodes -local skipcodes = nodes.skipcodes -local glyph_code = nodecodes.glyph -local glue_code = nodecodes.glue -local userskip_code = skipcodes.userskip - -local a_scriptstatus = attributes.private('scriptstatus') -local a_scriptinjection = attributes.private('scriptinjection') - -local categorytonumber = scripts.categorytonumber -local numbertocategory = scripts.numbertocategory -local hash = scripts.hash -local numbertodataset = scripts.numbertodataset - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers -local quaddata = fonthashes.quads -local spacedata = fonthashes.spaces - -local trace_details = false trackers.register("scripts.details", function(v) trace_details = v end) - -local report_details = logs.reporter("scripts","detail") - --- raggedleft is controlled by leftskip and we might end up with a situation where --- the intercharacter spacing interferes with this; the solution is to patch the --- nodelist but better is to use veryraggedleft - -local inter_char_shrink = 0 -local inter_char_stretch = 0 -local inter_char_half_shrink = 0 -local inter_char_half_stretch = 0 -local inter_char_quarter_shrink = 0 -local inter_char_quarter_stretch = 0 - -local full_char_width = 0 -local half_char_width = 0 -local quarter_char_width = 0 - -local inter_char_hangul_penalty = 0 - -local function set_parameters(font,data) - -- beware: parameters can be nil in e.g. punk variants - local quad = quaddata[font] - full_char_width = quad - half_char_width = quad/2 - quarter_char_width = quad/4 - inter_char_shrink = data.inter_char_shrink_factor * quad - inter_char_stretch = data.inter_char_stretch_factor * quad - inter_char_half_shrink = data.inter_char_half_shrink_factor * quad - inter_char_half_stretch = data.inter_char_half_stretch_factor * quad - inter_char_quarter_shrink = data.inter_char_quarter_shrink_factor * quad - inter_char_quarter_stretch = data.inter_char_quarter_stretch_factor * quad - inter_char_hangul_penalty = data.inter_char_hangul_penalty -end - --- a test version did compensate for crappy halfwidth but we can best do that --- at font definition time and/or just assume a correct font - -local function trace_detail(current,what) - local prev = current.prev - local c_id = current.id - local p_id = prev and prev.id - if c_id == glyph_code then - local c_ch = current.char - if p_id == glyph_code then - local p_ch = p_id and prev.char - report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,c_ch,hash[c_ch]) - else - report_details("[%s] [%C %a]",what,c_ch,hash[c_ch]) - end - else - if p_id == glyph_code then - local p_ch = p_id and prev.char - report_details("[%C %a] [%s]",p_ch,hash[p_ch],what) - else - report_details("[%s]",what) - end - end -end - -local function trace_detail_between(p,n,what) - local p_ch = p.char - local n_ch = n.char - report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,n_ch,hash[n_ch]) -end - -local function nobreak(head,current) - if trace_details then - trace_detail(current,"break") - end - insert_node_before(head,current,new_penalty(10000)) -end - -local function stretch_break(head,current) - if trace_details then - trace_detail(current,"stretch break") - end - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) -end - -local function shrink_break(head,current) - if trace_details then - trace_detail(current,"shrink break") - end - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) -end - -local function nobreak_stretch(head,current) - if trace_details then - trace_detail(current,"no break stretch") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) -end - -local function korean_break(head,current) - if trace_details then - trace_detail(current,"korean break") - end - insert_node_before(head,current,new_penalty(inter_char_hangul_penalty)) -end - -local function nobreak_shrink(head,current) - if trace_details then - trace_detail(current,"nobreak shrink") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) -end - -local function nobreak_autoshrink(head,current) - if trace_details then - trace_detail(current,"nobreak autoshrink") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) -end - -local function nobreak_stretch_nobreak_shrink(head,current) - if trace_details then - trace_detail(current,"nobreak stretch nobreak shrink") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) -end - -local function nobreak_stretch_nobreak_autoshrink(head,current) - if trace_details then - trace_detail(current,"nobreak stretch nobreak autoshrink") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) -end - -local function nobreak_shrink_nobreak_stretch(head,current) - if trace_details then - trace_detail(current,"nobreak shrink nobreak stretch") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) -end - -local function nobreak_autoshrink_nobreak_stretch(head,current) - if trace_details then - trace_detail(current,"nobreak autoshrink nobreak stretch") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) -end - -local function nobreak_shrink_break_stretch(head,current) - if trace_details then - trace_detail(current,"nobreak shrink break stretch") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) -end - -local function nobreak_autoshrink_break_stretch(head,current) - if trace_details then - trace_detail(current,"nobreak autoshrink break stretch") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) -end - -local function nobreak_shrink_break_stretch_nobreak_shrink(head,current) - if trace_details then - trace_detail(current,"nobreak shrink break stretch nobreak shrink") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) -end - -local function japanese_between_full_close_open(head,current) -- todo: check width - if trace_details then - trace_detail(current,"japanese between full close open") - end - insert_node_before(head,current,new_kern(-half_char_width)) - insert_node_before(head,current,new_glue(half_char_width,0,inter_char_half_shrink)) - insert_node_before(head,current,new_kern(-half_char_width)) -end - -local function japanese_between_full_close_full_close(head,current) -- todo: check width - if trace_details then - trace_detail(current,"japanese between full close full close") - end - insert_node_before(head,current,new_kern(-half_char_width)) - -- insert_node_before(head,current,new_glue(half_char_width,0,inter_char_half_shrink)) -end - -local function japanese_before_full_width_punct(head,current) -- todo: check width - if trace_details then - trace_detail(current,"japanese before full width punct") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(quarter_char_width,0,inter_char_quarter_shrink)) - insert_node_before(head,current,new_kern(-quarter_char_width)) -end - -local function japanese_after_full_width_punct(head,current) -- todo: check width - if trace_details then - trace_detail(current,"japanese after full width punct") - end - insert_node_before(head,current,new_kern(-quarter_char_width)) - insert_node_before(head,current,new_glue(quarter_char_width,0,inter_char_quarter_shrink)) -end - -local function nobreak_autoshrink_break_stretch_nobreak_autoshrink(head,current) - if trace_details then - trace_detail(current,"nobreak autoshrink break stretch nobreak autoshrink") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) -end - -local function nobreak_autoshrink_break_stretch_nobreak_shrink(head,current) - if trace_details then - trace_detail(current,"nobreak autoshrink break stretch nobreak shrink") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) -end - -local function nobreak_shrink_break_stretch_nobreak_autoshrink(head,current) - if trace_details then - trace_detail(current,"nobreak shrink break stretch nobreak autoshrink") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) -end - -local function nobreak_stretch_break_shrink(head,current) - if trace_details then - trace_detail(current,"nobreak stretch break shrink") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) -end - -local function nobreak_stretch_break_autoshrink(head,current) - if trace_details then - trace_detail(current,"nobreak stretch break autoshrink") - end - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) - insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) -end - --- Korean: hangul - -local korean_0 = { -} - -local korean_1 = { - jamo_initial = korean_break, - korean = korean_break, - chinese = korean_break, - hiragana = korean_break, - katakana = korean_break, - half_width_open = stretch_break, - half_width_close = nobreak, - full_width_open = stretch_break, - full_width_close = nobreak, - full_width_punct = nobreak, --- hyphen = nil, - non_starter = korean_break, - other = korean_break, -} - -local korean_2 = { - jamo_initial = stretch_break, - korean = stretch_break, - chinese = stretch_break, - hiragana = stretch_break, - katakana = stretch_break, - half_width_open = stretch_break, - half_width_close = nobreak, - full_width_open = stretch_break, - full_width_close = nobreak, - full_width_punct = nobreak, --- hyphen = nil, - non_starter = stretch_break, - other = stretch_break, -} - -local korean_3 = { - jamo_initial = stretch_break, - korean = stretch_break, - chinese = stretch_break, - hiragana = stretch_break, - katakana = stretch_break, - half_width_open = stretch_break, - half_width_close = nobreak, - full_width_open = stretch_break, - full_width_close = nobreak, - full_width_punct = nobreak, --- hyphen = nil, - non_starter = nobreak, - other = nobreak, -} - -local korean_4 = { - jamo_initial = nobreak, - korean = nobreak, - chinese = nobreak, - hiragana = nobreak, - katakana = nobreak, - half_width_open = nobreak, - half_width_close = nobreak, - full_width_open = nobreak, - full_width_close = nobreak, - full_width_punct = nobreak, - hyphen = nobreak, - non_starter = nobreak, - other = nobreak, -} - -local korean_5 = { - jamo_initial = stretch_break, - korean = stretch_break, - chinese = stretch_break, - hiragana = stretch_break, - katakana = stretch_break, - half_width_open = stretch_break, - half_width_close = nobreak_stretch, - full_width_open = stretch_break, - full_width_close = nobreak_stretch, - full_width_punct = nobreak_stretch, - hyphen = nobreak_stretch, - non_starter = nobreak_stretch, - other = stretch_break, -} - -local injectors = { -- [previous] [current] - jamo_final = korean_1, - korean = korean_1, - chinese = korean_1, - hiragana = korean_1, - katakana = korean_1, - hyphen = korean_2, - start = korean_0, - other = korean_2, - non_starter = korean_3, - full_width_open = korean_4, - half_width_open = korean_4, - full_width_close = korean_5, - full_width_punct = korean_5, - half_width_close = korean_5, -} - -local function process(head,first,last) - if first ~= last then - local lastfont, previous, last = nil, "start", nil - while true do - local upcoming, id = first.next, first.id - if id == glyph_code then - local a = first[a_scriptstatus] - local current = numbertocategory[a] - local action = injectors[previous] - if action then - action = action[current] - if action then - local font = first.font - if font ~= lastfont then - lastfont = font - set_parameters(font,numbertodataset[first[a_scriptinjection]]) - end - action(head,first) - end - end - previous = current - else -- glue - local p, n = first.prev, upcoming - if p and n then - local pid, nid = p.id, n.id - if pid == glyph_code and nid == glyph_code then - local pa, na = p[a_scriptstatus], n[a_scriptstatus] - local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na] - if not pcjk or not ncjk - or pcjk == "korean" or ncjk == "korean" - or pcjk == "other" or ncjk == "other" - or pcjk == "jamo_final" or ncjk == "jamo_initial" then - previous = "start" - else -- if head ~= first then - remove_node(head,first,true) - previous = pcjk - -- else - -- previous = pcjk - end - else - previous = "start" - end - else - previous = "start" - end - end - if upcoming == last then -- was stop - break - else - first = upcoming - end - end - end -end - -scripts.installmethod { - name = "hangul", - injector = process, - datasets = { -- todo: metatables - default = { - inter_char_shrink_factor = 0.50, -- of quad - inter_char_stretch_factor = 0.50, -- of quad - inter_char_half_shrink_factor = 0.50, -- of quad - inter_char_half_stretch_factor = 0.50, -- of quad - inter_char_quarter_shrink_factor = 0.50, -- of quad - inter_char_quarter_stretch_factor = 0.50, -- of quad - inter_char_hangul_penalty = 50, - }, - }, -} - --- Chinese: hanzi - -local chinese_0 = { -} - -local chinese_1 = { - jamo_initial = korean_break, - korean = korean_break, - chinese = stretch_break, - hiragana = stretch_break, - katakana = stretch_break, - half_width_open = nobreak_stretch_break_autoshrink, - half_width_close = nobreak_stretch, - full_width_open = nobreak_stretch_break_shrink, - full_width_close = nobreak_stretch, - full_width_punct = nobreak_stretch, --- hyphen = nil, - non_starter = nobreak_stretch, - other = stretch_break, -} - -local chinese_2 = { - jamo_initial = korean_break, - korean = stretch_break, - chinese = stretch_break, - hiragana = stretch_break, - katakana = stretch_break, - half_width_open = nobreak_stretch_break_autoshrink, - half_width_close = nobreak_stretch, - full_width_open = nobreak_stretch_break_shrink, - full_width_close = nobreak_stretch, - full_width_punct = nobreak_stretch, - hyphen = nobreak_stretch, - non_starter = nobreak_stretch, - other = stretch_break, -} - -local chinese_3 = { - jamo_initial = korean_break, - korean = stretch_break, - chinese = stretch_break, - hiragana = stretch_break, - katakana = stretch_break, - half_width_open = nobreak_stretch_break_autoshrink, - half_width_close = nobreak_stretch, - full_width_open = nobreak_stretch_break_shrink, - full_width_close = nobreak_stretch, - full_width_punct = nobreak_stretch, --- hyphen = nil, - non_starter = nobreak_stretch, - other = stretch_break, -} - -local chinese_4 = { --- jamo_initial = nil, --- korean = nil, --- chinese = nil, --- hiragana = nil, --- katakana = nil, - half_width_open = nobreak_autoshrink, - half_width_close = nil, - full_width_open = nobreak_shrink, - full_width_close = nobreak, - full_width_punct = nobreak, --- hyphen = nil, - non_starter = nobreak, --- other = nil, -} - -local chinese_5 = { - jamo_initial = stretch_break, - korean = stretch_break, - chinese = stretch_break, - hiragana = stretch_break, - katakana = stretch_break, - half_width_open = nobreak_stretch_break_autoshrink, - half_width_close = nobreak_stretch, - full_width_open = nobreak_stretch_break_shrink, - full_width_close = nobreak_stretch, - full_width_punct = nobreak_stretch, --- hyphen = nil, - non_starter = nobreak_stretch, - other = stretch_break, -} - -local chinese_6 = { - jamo_initial = nobreak_stretch, - korean = nobreak_stretch, - chinese = nobreak_stretch, - hiragana = nobreak_stretch, - katakana = nobreak_stretch, - half_width_open = nobreak_stretch_break_autoshrink, - half_width_close = nobreak_stretch, - full_width_open = nobreak_stretch_break_shrink, - full_width_close = nobreak_stretch, - full_width_punct = nobreak_stretch, - hyphen = nobreak_stretch, - non_starter = nobreak_stretch, - other = nobreak_stretch, -} - -local chinese_7 = { - jami_initial = nobreak_shrink_break_stretch, - korean = nobreak_shrink_break_stretch, - chinese = stretch_break, -- nobreak_shrink_break_stretch, - hiragana = stretch_break, -- nobreak_shrink_break_stretch, - katakana = stretch_break, -- nobreak_shrink_break_stretch, - half_width_open = nobreak_shrink_break_stretch_nobreak_autoshrink, - half_width_close = nobreak_shrink_nobreak_stretch, - full_width_open = nobreak_shrink_break_stretch_nobreak_shrink, - full_width_close = nobreak_shrink_nobreak_stretch, - full_width_punct = nobreak_shrink_nobreak_stretch, - hyphen = nobreak_shrink_break_stretch, - non_starter = nobreak_shrink_break_stretch, - other = nobreak_shrink_break_stretch, -} - -local chinese_8 = { - jami_initial = nobreak_shrink_break_stretch, - korean = nobreak_autoshrink_break_stretch, - chinese = stretch_break, -- nobreak_autoshrink_break_stretch, - hiragana = stretch_break, -- nobreak_autoshrink_break_stretch, - katakana = stretch_break, -- nobreak_autoshrink_break_stretch, - half_width_open = nobreak_autoshrink_break_stretch_nobreak_autoshrink, - half_width_close = nobreak_autoshrink_nobreak_stretch, - full_width_open = nobreak_autoshrink_break_stretch_nobreak_shrink, - full_width_close = nobreak_autoshrink_nobreak_stretch, - full_width_punct = nobreak_autoshrink_nobreak_stretch, - hyphen = nobreak_autoshrink_break_stretch, - non_starter = nobreak_autoshrink_break_stretch, - other = nobreak_autoshrink_break_stretch, -} - -local injectors = { -- [previous] [current] - jamo_final = chinese_1, - korean = chinese_1, - chinese = chinese_2, - hiragana = chinese_2, - katakana = chinese_2, - hyphen = chinese_3, - start = chinese_4, - other = chinese_5, - non_starter = chinese_5, - full_width_open = chinese_6, - half_width_open = chinese_6, - full_width_close = chinese_7, - full_width_punct = chinese_7, - half_width_close = chinese_8, -} - -local function process(head,first,last) - if first ~= last then - local lastfont, previous, last = nil, "start", nil - while true do - local upcoming, id = first.next, first.id - if id == glyph_code then - local a = first[a_scriptstatus] - local current = numbertocategory[a] - local action = injectors[previous] - if action then - action = action[current] - if action then - local font = first.font - if font ~= lastfont then - lastfont = font - set_parameters(font,numbertodataset[first[a_scriptinjection]]) - end - action(head,first) - end - end - previous = current - else -- glue - local p, n = first.prev, upcoming - if p and n then - local pid, nid = p.id, n.id - if pid == glyph_code and nid == glyph_code then - local pa, na = p[a_scriptstatus], n[a_scriptstatus] - local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na] - if not pcjk or not ncjk - or pcjk == "korean" or ncjk == "korean" - or pcjk == "other" or ncjk == "other" - or pcjk == "jamo_final" or ncjk == "jamo_initial" - or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean - previous = "start" - else -- if head ~= first then - remove_node(head,first,true) - previous = pcjk - -- else - -- previous = pcjk - end - else - previous = "start" - end - else - previous = "start" - end - end - if upcoming == last then -- was stop - break - else - first = upcoming - end - end - end -end - -scripts.installmethod { - name = "hanzi", - injector = process, - datasets = { - default = { - inter_char_shrink_factor = 0.50, -- of quad - inter_char_stretch_factor = 0.50, -- of quad - inter_char_half_shrink_factor = 0.50, -- of quad - inter_char_half_stretch_factor = 0.50, -- of quad - inter_char_quarter_shrink_factor = 0.50, -- of quad - inter_char_quarter_stretch_factor = 0.50, -- of quad - inter_char_hangul_penalty = 50, - }, - }, -} - --- Japanese: idiographic, hiragana, katakana, romanji / jis - -local japanese_0 = { -} - -local japanese_1 = { - jamo_initial = korean_break, - korean = korean_break, - chinese = stretch_break, - hiragana = stretch_break, - katakana = stretch_break, - half_width_open = nobreak_stretch_break_autoshrink, - half_width_close = nobreak_stretch, - full_width_open = nobreak_stretch_break_shrink, - full_width_close = nobreak_stretch, - full_width_punct = nobreak_stretch, --- hyphen = nil, - non_starter = nobreak_stretch, - other = stretch_break, -} - -local japanese_2 = { - jamo_initial = korean_break, - korean = stretch_break, - chinese = stretch_break, - hiragana = stretch_break, - katakana = stretch_break, - half_width_open = nobreak_stretch_break_autoshrink, - half_width_close = nobreak_stretch, - full_width_open = nobreak_stretch_break_shrink, - full_width_close = nobreak_stretch, - full_width_punct = japanese_before_full_width_punct, -- nobreak_stretch, - hyphen = nobreak_stretch, - non_starter = nobreak_stretch, - other = stretch_break, -} - -local japanese_3 = { - jamo_initial = korean_break, - korean = stretch_break, - chinese = stretch_break, - hiragana = stretch_break, - katakana = stretch_break, - half_width_open = nobreak_stretch_break_autoshrink, - half_width_close = nobreak_stretch, - full_width_open = nobreak_stretch_break_shrink, - full_width_close = nobreak_stretch, - full_width_punct = nobreak_stretch, --- hyphen = nil, - non_starter = nobreak_stretch, - other = stretch_break, -} - -local japanese_4 = { --- jamo_initial = nil, --- korean = nil, --- chinese = nil, --- hiragana = nil, --- katakana = nil, - half_width_open = nobreak_autoshrink, - half_width_close = nil, - full_width_open = nobreak_shrink, - full_width_close = nobreak, - full_width_punct = nobreak, --- hyphen = nil, - non_starter = nobreak, --- other = nil, -} - -local japanese_5 = { - jamo_initial = stretch_break, - korean = stretch_break, - chinese = stretch_break, - hiragana = stretch_break, - katakana = stretch_break, - half_width_open = nobreak_stretch_break_autoshrink, - half_width_close = nobreak_stretch, - full_width_open = nobreak_stretch_break_shrink, - full_width_close = nobreak_stretch, - full_width_punct = nobreak_stretch, --- hyphen = nil, - non_starter = nobreak_stretch, - other = stretch_break, -} - -local japanese_6 = { - jamo_initial = nobreak_stretch, - korean = nobreak_stretch, - chinese = nobreak_stretch, - hiragana = nobreak_stretch, - katakana = nobreak_stretch, - half_width_open = nobreak_stretch_break_autoshrink, - half_width_close = nobreak_stretch, - full_width_open = nobreak_stretch_break_shrink, - full_width_close = nobreak_stretch, - full_width_punct = nobreak_stretch, - hyphen = nobreak_stretch, - non_starter = nobreak_stretch, - other = nobreak_stretch, -} - -local japanese_7 = { - jami_initial = nobreak_shrink_break_stretch, - korean = nobreak_shrink_break_stretch, - chinese = japanese_after_full_width_punct, -- stretch_break - hiragana = japanese_after_full_width_punct, -- stretch_break - katakana = japanese_after_full_width_punct, -- stretch_break - half_width_open = nobreak_shrink_break_stretch_nobreak_autoshrink, - half_width_close = nobreak_shrink_nobreak_stretch, - full_width_open = japanese_between_full_close_open, -- !! - full_width_close = japanese_between_full_close_full_close, -- nobreak_shrink_nobreak_stretch, - full_width_punct = nobreak_shrink_nobreak_stretch, - hyphen = nobreak_shrink_break_stretch, - non_starter = nobreak_shrink_break_stretch, - other = nobreak_shrink_break_stretch, -} - -local japanese_8 = { - jami_initial = nobreak_shrink_break_stretch, - korean = nobreak_autoshrink_break_stretch, - chinese = stretch_break, - hiragana = stretch_break, - katakana = stretch_break, - half_width_open = nobreak_autoshrink_break_stretch_nobreak_autoshrink, - half_width_close = nobreak_autoshrink_nobreak_stretch, - full_width_open = nobreak_autoshrink_break_stretch_nobreak_shrink, - full_width_close = nobreak_autoshrink_nobreak_stretch, - full_width_punct = nobreak_autoshrink_nobreak_stretch, - hyphen = nobreak_autoshrink_break_stretch, - non_starter = nobreak_autoshrink_break_stretch, - other = nobreak_autoshrink_break_stretch, -} - -local injectors = { -- [previous] [current] - jamo_final = japanese_1, - korean = japanese_1, - chinese = japanese_2, - hiragana = japanese_2, - katakana = japanese_2, - hyphen = japanese_3, - start = japanese_4, - other = japanese_5, - non_starter = japanese_5, - full_width_open = japanese_6, - half_width_open = japanese_6, - full_width_close = japanese_7, - full_width_punct = japanese_7, - half_width_close = japanese_8, -} - -local function process(head,first,last) - if first ~= last then - local lastfont, previous, last = nil, "start", nil - while true do - local upcoming, id = first.next, first.id - if id == glyph_code then - local a = first[a_scriptstatus] - local current = numbertocategory[a] - local action = injectors[previous] - if action then - action = action[current] - if action then - local font = first.font - if font ~= lastfont then - lastfont = font - set_parameters(font,numbertodataset[first[a_scriptinjection]]) - end - action(head,first) - end - end - previous = current - --- elseif id == math_code then --- upcoming = end_of_math(current).next --- previous = "start" - - else -- glue - local p, n = first.prev, upcoming -- we should remember prev - if p and n then - local pid, nid = p.id, n.id - if pid == glyph_code and nid == glyph_code then - local pa, na = p[a_scriptstatus], n[a_scriptstatus] - local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na] - if not pcjk or not ncjk - or pcjk == "korean" or ncjk == "korean" - or pcjk == "other" or ncjk == "other" - or pcjk == "jamo_final" or ncjk == "jamo_initial" - or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean - previous = "start" - else -- if head ~= first then -if id == glue_code and first.subtype == userskip_code then -- also scriptstatus check? - -- for the moment no distinction possible between space and userskip - local w = first.spec.width - local s = spacedata[p.font] - if w == s then -- could be option - if trace_details then - trace_detail_between(p,n,"space removed") - end - remove_node(head,first,true) - end -end - previous = pcjk - -- else - -- previous = pcjk - end - else - previous = "start" - end - else - previous = "start" - end - end - if upcoming == last then -- was stop - break - else - first = upcoming - end - end - end -end - -scripts.installmethod { - name = "nihongo", -- what name to use? - injector = process, - datasets = { - default = { - inter_char_shrink_factor = 0.50, -- of quad - inter_char_stretch_factor = 0.50, -- of quad - inter_char_half_shrink_factor = 0.50, -- of quad - inter_char_half_stretch_factor = 0.50, -- of quad - inter_char_quarter_shrink_factor = 0.25, -- of quad - inter_char_quarter_stretch_factor = 0.25, -- of quad - inter_char_hangul_penalty = 50, - }, - }, -} - +if not modules then modules = { } end modules ['scrp-cjk'] = { + version = 1.001, + comment = "companion to scrp-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- We can speed this up by preallocating nodes and copying them but the +-- gain is not that large. + +-- The input line endings: there is no way to distinguish between +-- inline spaces and endofline turned into spaces (would not make +-- sense either because otherwise a wanted space at the end of a +-- line would have to be a hard coded ones. + +local utfchar = utf.char + +local insert_node_after = node.insert_after +local insert_node_before = node.insert_before +local remove_node = nodes.remove + +local nodepool = nodes.pool +local new_glue = nodepool.glue +local new_kern = nodepool.kern +local new_penalty = nodepool.penalty + +local nodecodes = nodes.nodecodes +local skipcodes = nodes.skipcodes +local glyph_code = nodecodes.glyph +local glue_code = nodecodes.glue +local userskip_code = skipcodes.userskip + +local a_scriptstatus = attributes.private('scriptstatus') +local a_scriptinjection = attributes.private('scriptinjection') + +local categorytonumber = scripts.categorytonumber +local numbertocategory = scripts.numbertocategory +local hash = scripts.hash +local numbertodataset = scripts.numbertodataset + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers +local quaddata = fonthashes.quads +local spacedata = fonthashes.spaces + +local trace_details = false trackers.register("scripts.details", function(v) trace_details = v end) + +local report_details = logs.reporter("scripts","detail") + +-- raggedleft is controlled by leftskip and we might end up with a situation where +-- the intercharacter spacing interferes with this; the solution is to patch the +-- nodelist but better is to use veryraggedleft + +local inter_char_shrink = 0 +local inter_char_stretch = 0 +local inter_char_half_shrink = 0 +local inter_char_half_stretch = 0 +local inter_char_quarter_shrink = 0 +local inter_char_quarter_stretch = 0 + +local full_char_width = 0 +local half_char_width = 0 +local quarter_char_width = 0 + +local inter_char_hangul_penalty = 0 + +local function set_parameters(font,data) + -- beware: parameters can be nil in e.g. punk variants + local quad = quaddata[font] + full_char_width = quad + half_char_width = quad/2 + quarter_char_width = quad/4 + inter_char_shrink = data.inter_char_shrink_factor * quad + inter_char_stretch = data.inter_char_stretch_factor * quad + inter_char_half_shrink = data.inter_char_half_shrink_factor * quad + inter_char_half_stretch = data.inter_char_half_stretch_factor * quad + inter_char_quarter_shrink = data.inter_char_quarter_shrink_factor * quad + inter_char_quarter_stretch = data.inter_char_quarter_stretch_factor * quad + inter_char_hangul_penalty = data.inter_char_hangul_penalty +end + +-- a test version did compensate for crappy halfwidth but we can best do that +-- at font definition time and/or just assume a correct font + +local function trace_detail(current,what) + local prev = current.prev + local c_id = current.id + local p_id = prev and prev.id + if c_id == glyph_code then + local c_ch = current.char + if p_id == glyph_code then + local p_ch = p_id and prev.char + report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,c_ch,hash[c_ch]) + else + report_details("[%s] [%C %a]",what,c_ch,hash[c_ch]) + end + else + if p_id == glyph_code then + local p_ch = p_id and prev.char + report_details("[%C %a] [%s]",p_ch,hash[p_ch],what) + else + report_details("[%s]",what) + end + end +end + +local function trace_detail_between(p,n,what) + local p_ch = p.char + local n_ch = n.char + report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,n_ch,hash[n_ch]) +end + +local function nobreak(head,current) + if trace_details then + trace_detail(current,"break") + end + insert_node_before(head,current,new_penalty(10000)) +end + +local function stretch_break(head,current) + if trace_details then + trace_detail(current,"stretch break") + end + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) +end + +local function shrink_break(head,current) + if trace_details then + trace_detail(current,"shrink break") + end + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) +end + +local function nobreak_stretch(head,current) + if trace_details then + trace_detail(current,"no break stretch") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) +end + +local function korean_break(head,current) + if trace_details then + trace_detail(current,"korean break") + end + insert_node_before(head,current,new_penalty(inter_char_hangul_penalty)) +end + +local function nobreak_shrink(head,current) + if trace_details then + trace_detail(current,"nobreak shrink") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) +end + +local function nobreak_autoshrink(head,current) + if trace_details then + trace_detail(current,"nobreak autoshrink") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) +end + +local function nobreak_stretch_nobreak_shrink(head,current) + if trace_details then + trace_detail(current,"nobreak stretch nobreak shrink") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) +end + +local function nobreak_stretch_nobreak_autoshrink(head,current) + if trace_details then + trace_detail(current,"nobreak stretch nobreak autoshrink") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) +end + +local function nobreak_shrink_nobreak_stretch(head,current) + if trace_details then + trace_detail(current,"nobreak shrink nobreak stretch") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) +end + +local function nobreak_autoshrink_nobreak_stretch(head,current) + if trace_details then + trace_detail(current,"nobreak autoshrink nobreak stretch") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) +end + +local function nobreak_shrink_break_stretch(head,current) + if trace_details then + trace_detail(current,"nobreak shrink break stretch") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) +end + +local function nobreak_autoshrink_break_stretch(head,current) + if trace_details then + trace_detail(current,"nobreak autoshrink break stretch") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) +end + +local function nobreak_shrink_break_stretch_nobreak_shrink(head,current) + if trace_details then + trace_detail(current,"nobreak shrink break stretch nobreak shrink") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) +end + +local function japanese_between_full_close_open(head,current) -- todo: check width + if trace_details then + trace_detail(current,"japanese between full close open") + end + insert_node_before(head,current,new_kern(-half_char_width)) + insert_node_before(head,current,new_glue(half_char_width,0,inter_char_half_shrink)) + insert_node_before(head,current,new_kern(-half_char_width)) +end + +local function japanese_between_full_close_full_close(head,current) -- todo: check width + if trace_details then + trace_detail(current,"japanese between full close full close") + end + insert_node_before(head,current,new_kern(-half_char_width)) + -- insert_node_before(head,current,new_glue(half_char_width,0,inter_char_half_shrink)) +end + +local function japanese_before_full_width_punct(head,current) -- todo: check width + if trace_details then + trace_detail(current,"japanese before full width punct") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(quarter_char_width,0,inter_char_quarter_shrink)) + insert_node_before(head,current,new_kern(-quarter_char_width)) +end + +local function japanese_after_full_width_punct(head,current) -- todo: check width + if trace_details then + trace_detail(current,"japanese after full width punct") + end + insert_node_before(head,current,new_kern(-quarter_char_width)) + insert_node_before(head,current,new_glue(quarter_char_width,0,inter_char_quarter_shrink)) +end + +local function nobreak_autoshrink_break_stretch_nobreak_autoshrink(head,current) + if trace_details then + trace_detail(current,"nobreak autoshrink break stretch nobreak autoshrink") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) +end + +local function nobreak_autoshrink_break_stretch_nobreak_shrink(head,current) + if trace_details then + trace_detail(current,"nobreak autoshrink break stretch nobreak shrink") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) +end + +local function nobreak_shrink_break_stretch_nobreak_autoshrink(head,current) + if trace_details then + trace_detail(current,"nobreak shrink break stretch nobreak autoshrink") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) +end + +local function nobreak_stretch_break_shrink(head,current) + if trace_details then + trace_detail(current,"nobreak stretch break shrink") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) +end + +local function nobreak_stretch_break_autoshrink(head,current) + if trace_details then + trace_detail(current,"nobreak stretch break autoshrink") + end + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,new_glue(0,inter_char_stretch,0)) + insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink)) +end + +-- Korean: hangul + +local korean_0 = { +} + +local korean_1 = { + jamo_initial = korean_break, + korean = korean_break, + chinese = korean_break, + hiragana = korean_break, + katakana = korean_break, + half_width_open = stretch_break, + half_width_close = nobreak, + full_width_open = stretch_break, + full_width_close = nobreak, + full_width_punct = nobreak, +-- hyphen = nil, + non_starter = korean_break, + other = korean_break, +} + +local korean_2 = { + jamo_initial = stretch_break, + korean = stretch_break, + chinese = stretch_break, + hiragana = stretch_break, + katakana = stretch_break, + half_width_open = stretch_break, + half_width_close = nobreak, + full_width_open = stretch_break, + full_width_close = nobreak, + full_width_punct = nobreak, +-- hyphen = nil, + non_starter = stretch_break, + other = stretch_break, +} + +local korean_3 = { + jamo_initial = stretch_break, + korean = stretch_break, + chinese = stretch_break, + hiragana = stretch_break, + katakana = stretch_break, + half_width_open = stretch_break, + half_width_close = nobreak, + full_width_open = stretch_break, + full_width_close = nobreak, + full_width_punct = nobreak, +-- hyphen = nil, + non_starter = nobreak, + other = nobreak, +} + +local korean_4 = { + jamo_initial = nobreak, + korean = nobreak, + chinese = nobreak, + hiragana = nobreak, + katakana = nobreak, + half_width_open = nobreak, + half_width_close = nobreak, + full_width_open = nobreak, + full_width_close = nobreak, + full_width_punct = nobreak, + hyphen = nobreak, + non_starter = nobreak, + other = nobreak, +} + +local korean_5 = { + jamo_initial = stretch_break, + korean = stretch_break, + chinese = stretch_break, + hiragana = stretch_break, + katakana = stretch_break, + half_width_open = stretch_break, + half_width_close = nobreak_stretch, + full_width_open = stretch_break, + full_width_close = nobreak_stretch, + full_width_punct = nobreak_stretch, + hyphen = nobreak_stretch, + non_starter = nobreak_stretch, + other = stretch_break, +} + +local injectors = { -- [previous] [current] + jamo_final = korean_1, + korean = korean_1, + chinese = korean_1, + hiragana = korean_1, + katakana = korean_1, + hyphen = korean_2, + start = korean_0, + other = korean_2, + non_starter = korean_3, + full_width_open = korean_4, + half_width_open = korean_4, + full_width_close = korean_5, + full_width_punct = korean_5, + half_width_close = korean_5, +} + +local function process(head,first,last) + if first ~= last then + local lastfont, previous, last = nil, "start", nil + while true do + local upcoming, id = first.next, first.id + if id == glyph_code then + local a = first[a_scriptstatus] + local current = numbertocategory[a] + local action = injectors[previous] + if action then + action = action[current] + if action then + local font = first.font + if font ~= lastfont then + lastfont = font + set_parameters(font,numbertodataset[first[a_scriptinjection]]) + end + action(head,first) + end + end + previous = current + else -- glue + local p, n = first.prev, upcoming + if p and n then + local pid, nid = p.id, n.id + if pid == glyph_code and nid == glyph_code then + local pa, na = p[a_scriptstatus], n[a_scriptstatus] + local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na] + if not pcjk or not ncjk + or pcjk == "korean" or ncjk == "korean" + or pcjk == "other" or ncjk == "other" + or pcjk == "jamo_final" or ncjk == "jamo_initial" then + previous = "start" + else -- if head ~= first then + remove_node(head,first,true) + previous = pcjk + -- else + -- previous = pcjk + end + else + previous = "start" + end + else + previous = "start" + end + end + if upcoming == last then -- was stop + break + else + first = upcoming + end + end + end +end + +scripts.installmethod { + name = "hangul", + injector = process, + datasets = { -- todo: metatables + default = { + inter_char_shrink_factor = 0.50, -- of quad + inter_char_stretch_factor = 0.50, -- of quad + inter_char_half_shrink_factor = 0.50, -- of quad + inter_char_half_stretch_factor = 0.50, -- of quad + inter_char_quarter_shrink_factor = 0.50, -- of quad + inter_char_quarter_stretch_factor = 0.50, -- of quad + inter_char_hangul_penalty = 50, + }, + }, +} + +-- Chinese: hanzi + +local chinese_0 = { +} + +local chinese_1 = { + jamo_initial = korean_break, + korean = korean_break, + chinese = stretch_break, + hiragana = stretch_break, + katakana = stretch_break, + half_width_open = nobreak_stretch_break_autoshrink, + half_width_close = nobreak_stretch, + full_width_open = nobreak_stretch_break_shrink, + full_width_close = nobreak_stretch, + full_width_punct = nobreak_stretch, +-- hyphen = nil, + non_starter = nobreak_stretch, + other = stretch_break, +} + +local chinese_2 = { + jamo_initial = korean_break, + korean = stretch_break, + chinese = stretch_break, + hiragana = stretch_break, + katakana = stretch_break, + half_width_open = nobreak_stretch_break_autoshrink, + half_width_close = nobreak_stretch, + full_width_open = nobreak_stretch_break_shrink, + full_width_close = nobreak_stretch, + full_width_punct = nobreak_stretch, + hyphen = nobreak_stretch, + non_starter = nobreak_stretch, + other = stretch_break, +} + +local chinese_3 = { + jamo_initial = korean_break, + korean = stretch_break, + chinese = stretch_break, + hiragana = stretch_break, + katakana = stretch_break, + half_width_open = nobreak_stretch_break_autoshrink, + half_width_close = nobreak_stretch, + full_width_open = nobreak_stretch_break_shrink, + full_width_close = nobreak_stretch, + full_width_punct = nobreak_stretch, +-- hyphen = nil, + non_starter = nobreak_stretch, + other = stretch_break, +} + +local chinese_4 = { +-- jamo_initial = nil, +-- korean = nil, +-- chinese = nil, +-- hiragana = nil, +-- katakana = nil, + half_width_open = nobreak_autoshrink, + half_width_close = nil, + full_width_open = nobreak_shrink, + full_width_close = nobreak, + full_width_punct = nobreak, +-- hyphen = nil, + non_starter = nobreak, +-- other = nil, +} + +local chinese_5 = { + jamo_initial = stretch_break, + korean = stretch_break, + chinese = stretch_break, + hiragana = stretch_break, + katakana = stretch_break, + half_width_open = nobreak_stretch_break_autoshrink, + half_width_close = nobreak_stretch, + full_width_open = nobreak_stretch_break_shrink, + full_width_close = nobreak_stretch, + full_width_punct = nobreak_stretch, +-- hyphen = nil, + non_starter = nobreak_stretch, + other = stretch_break, +} + +local chinese_6 = { + jamo_initial = nobreak_stretch, + korean = nobreak_stretch, + chinese = nobreak_stretch, + hiragana = nobreak_stretch, + katakana = nobreak_stretch, + half_width_open = nobreak_stretch_break_autoshrink, + half_width_close = nobreak_stretch, + full_width_open = nobreak_stretch_break_shrink, + full_width_close = nobreak_stretch, + full_width_punct = nobreak_stretch, + hyphen = nobreak_stretch, + non_starter = nobreak_stretch, + other = nobreak_stretch, +} + +local chinese_7 = { + jami_initial = nobreak_shrink_break_stretch, + korean = nobreak_shrink_break_stretch, + chinese = stretch_break, -- nobreak_shrink_break_stretch, + hiragana = stretch_break, -- nobreak_shrink_break_stretch, + katakana = stretch_break, -- nobreak_shrink_break_stretch, + half_width_open = nobreak_shrink_break_stretch_nobreak_autoshrink, + half_width_close = nobreak_shrink_nobreak_stretch, + full_width_open = nobreak_shrink_break_stretch_nobreak_shrink, + full_width_close = nobreak_shrink_nobreak_stretch, + full_width_punct = nobreak_shrink_nobreak_stretch, + hyphen = nobreak_shrink_break_stretch, + non_starter = nobreak_shrink_break_stretch, + other = nobreak_shrink_break_stretch, +} + +local chinese_8 = { + jami_initial = nobreak_shrink_break_stretch, + korean = nobreak_autoshrink_break_stretch, + chinese = stretch_break, -- nobreak_autoshrink_break_stretch, + hiragana = stretch_break, -- nobreak_autoshrink_break_stretch, + katakana = stretch_break, -- nobreak_autoshrink_break_stretch, + half_width_open = nobreak_autoshrink_break_stretch_nobreak_autoshrink, + half_width_close = nobreak_autoshrink_nobreak_stretch, + full_width_open = nobreak_autoshrink_break_stretch_nobreak_shrink, + full_width_close = nobreak_autoshrink_nobreak_stretch, + full_width_punct = nobreak_autoshrink_nobreak_stretch, + hyphen = nobreak_autoshrink_break_stretch, + non_starter = nobreak_autoshrink_break_stretch, + other = nobreak_autoshrink_break_stretch, +} + +local injectors = { -- [previous] [current] + jamo_final = chinese_1, + korean = chinese_1, + chinese = chinese_2, + hiragana = chinese_2, + katakana = chinese_2, + hyphen = chinese_3, + start = chinese_4, + other = chinese_5, + non_starter = chinese_5, + full_width_open = chinese_6, + half_width_open = chinese_6, + full_width_close = chinese_7, + full_width_punct = chinese_7, + half_width_close = chinese_8, +} + +local function process(head,first,last) + if first ~= last then + local lastfont, previous, last = nil, "start", nil + while true do + local upcoming, id = first.next, first.id + if id == glyph_code then + local a = first[a_scriptstatus] + local current = numbertocategory[a] + local action = injectors[previous] + if action then + action = action[current] + if action then + local font = first.font + if font ~= lastfont then + lastfont = font + set_parameters(font,numbertodataset[first[a_scriptinjection]]) + end + action(head,first) + end + end + previous = current + else -- glue + local p, n = first.prev, upcoming + if p and n then + local pid, nid = p.id, n.id + if pid == glyph_code and nid == glyph_code then + local pa, na = p[a_scriptstatus], n[a_scriptstatus] + local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na] + if not pcjk or not ncjk + or pcjk == "korean" or ncjk == "korean" + or pcjk == "other" or ncjk == "other" + or pcjk == "jamo_final" or ncjk == "jamo_initial" + or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean + previous = "start" + else -- if head ~= first then + remove_node(head,first,true) + previous = pcjk + -- else + -- previous = pcjk + end + else + previous = "start" + end + else + previous = "start" + end + end + if upcoming == last then -- was stop + break + else + first = upcoming + end + end + end +end + +scripts.installmethod { + name = "hanzi", + injector = process, + datasets = { + default = { + inter_char_shrink_factor = 0.50, -- of quad + inter_char_stretch_factor = 0.50, -- of quad + inter_char_half_shrink_factor = 0.50, -- of quad + inter_char_half_stretch_factor = 0.50, -- of quad + inter_char_quarter_shrink_factor = 0.50, -- of quad + inter_char_quarter_stretch_factor = 0.50, -- of quad + inter_char_hangul_penalty = 50, + }, + }, +} + +-- Japanese: idiographic, hiragana, katakana, romanji / jis + +local japanese_0 = { +} + +local japanese_1 = { + jamo_initial = korean_break, + korean = korean_break, + chinese = stretch_break, + hiragana = stretch_break, + katakana = stretch_break, + half_width_open = nobreak_stretch_break_autoshrink, + half_width_close = nobreak_stretch, + full_width_open = nobreak_stretch_break_shrink, + full_width_close = nobreak_stretch, + full_width_punct = nobreak_stretch, +-- hyphen = nil, + non_starter = nobreak_stretch, + other = stretch_break, +} + +local japanese_2 = { + jamo_initial = korean_break, + korean = stretch_break, + chinese = stretch_break, + hiragana = stretch_break, + katakana = stretch_break, + half_width_open = nobreak_stretch_break_autoshrink, + half_width_close = nobreak_stretch, + full_width_open = nobreak_stretch_break_shrink, + full_width_close = nobreak_stretch, + full_width_punct = japanese_before_full_width_punct, -- nobreak_stretch, + hyphen = nobreak_stretch, + non_starter = nobreak_stretch, + other = stretch_break, +} + +local japanese_3 = { + jamo_initial = korean_break, + korean = stretch_break, + chinese = stretch_break, + hiragana = stretch_break, + katakana = stretch_break, + half_width_open = nobreak_stretch_break_autoshrink, + half_width_close = nobreak_stretch, + full_width_open = nobreak_stretch_break_shrink, + full_width_close = nobreak_stretch, + full_width_punct = nobreak_stretch, +-- hyphen = nil, + non_starter = nobreak_stretch, + other = stretch_break, +} + +local japanese_4 = { +-- jamo_initial = nil, +-- korean = nil, +-- chinese = nil, +-- hiragana = nil, +-- katakana = nil, + half_width_open = nobreak_autoshrink, + half_width_close = nil, + full_width_open = nobreak_shrink, + full_width_close = nobreak, + full_width_punct = nobreak, +-- hyphen = nil, + non_starter = nobreak, +-- other = nil, +} + +local japanese_5 = { + jamo_initial = stretch_break, + korean = stretch_break, + chinese = stretch_break, + hiragana = stretch_break, + katakana = stretch_break, + half_width_open = nobreak_stretch_break_autoshrink, + half_width_close = nobreak_stretch, + full_width_open = nobreak_stretch_break_shrink, + full_width_close = nobreak_stretch, + full_width_punct = nobreak_stretch, +-- hyphen = nil, + non_starter = nobreak_stretch, + other = stretch_break, +} + +local japanese_6 = { + jamo_initial = nobreak_stretch, + korean = nobreak_stretch, + chinese = nobreak_stretch, + hiragana = nobreak_stretch, + katakana = nobreak_stretch, + half_width_open = nobreak_stretch_break_autoshrink, + half_width_close = nobreak_stretch, + full_width_open = nobreak_stretch_break_shrink, + full_width_close = nobreak_stretch, + full_width_punct = nobreak_stretch, + hyphen = nobreak_stretch, + non_starter = nobreak_stretch, + other = nobreak_stretch, +} + +local japanese_7 = { + jami_initial = nobreak_shrink_break_stretch, + korean = nobreak_shrink_break_stretch, + chinese = japanese_after_full_width_punct, -- stretch_break + hiragana = japanese_after_full_width_punct, -- stretch_break + katakana = japanese_after_full_width_punct, -- stretch_break + half_width_open = nobreak_shrink_break_stretch_nobreak_autoshrink, + half_width_close = nobreak_shrink_nobreak_stretch, + full_width_open = japanese_between_full_close_open, -- !! + full_width_close = japanese_between_full_close_full_close, -- nobreak_shrink_nobreak_stretch, + full_width_punct = nobreak_shrink_nobreak_stretch, + hyphen = nobreak_shrink_break_stretch, + non_starter = nobreak_shrink_break_stretch, + other = nobreak_shrink_break_stretch, +} + +local japanese_8 = { + jami_initial = nobreak_shrink_break_stretch, + korean = nobreak_autoshrink_break_stretch, + chinese = stretch_break, + hiragana = stretch_break, + katakana = stretch_break, + half_width_open = nobreak_autoshrink_break_stretch_nobreak_autoshrink, + half_width_close = nobreak_autoshrink_nobreak_stretch, + full_width_open = nobreak_autoshrink_break_stretch_nobreak_shrink, + full_width_close = nobreak_autoshrink_nobreak_stretch, + full_width_punct = nobreak_autoshrink_nobreak_stretch, + hyphen = nobreak_autoshrink_break_stretch, + non_starter = nobreak_autoshrink_break_stretch, + other = nobreak_autoshrink_break_stretch, +} + +local injectors = { -- [previous] [current] + jamo_final = japanese_1, + korean = japanese_1, + chinese = japanese_2, + hiragana = japanese_2, + katakana = japanese_2, + hyphen = japanese_3, + start = japanese_4, + other = japanese_5, + non_starter = japanese_5, + full_width_open = japanese_6, + half_width_open = japanese_6, + full_width_close = japanese_7, + full_width_punct = japanese_7, + half_width_close = japanese_8, +} + +local function process(head,first,last) + if first ~= last then + local lastfont, previous, last = nil, "start", nil + while true do + local upcoming, id = first.next, first.id + if id == glyph_code then + local a = first[a_scriptstatus] + local current = numbertocategory[a] + local action = injectors[previous] + if action then + action = action[current] + if action then + local font = first.font + if font ~= lastfont then + lastfont = font + set_parameters(font,numbertodataset[first[a_scriptinjection]]) + end + action(head,first) + end + end + previous = current + +-- elseif id == math_code then +-- upcoming = end_of_math(current).next +-- previous = "start" + + else -- glue + local p, n = first.prev, upcoming -- we should remember prev + if p and n then + local pid, nid = p.id, n.id + if pid == glyph_code and nid == glyph_code then + local pa, na = p[a_scriptstatus], n[a_scriptstatus] + local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na] + if not pcjk or not ncjk + or pcjk == "korean" or ncjk == "korean" + or pcjk == "other" or ncjk == "other" + or pcjk == "jamo_final" or ncjk == "jamo_initial" + or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean + previous = "start" + else -- if head ~= first then +if id == glue_code and first.subtype == userskip_code then -- also scriptstatus check? + -- for the moment no distinction possible between space and userskip + local w = first.spec.width + local s = spacedata[p.font] + if w == s then -- could be option + if trace_details then + trace_detail_between(p,n,"space removed") + end + remove_node(head,first,true) + end +end + previous = pcjk + -- else + -- previous = pcjk + end + else + previous = "start" + end + else + previous = "start" + end + end + if upcoming == last then -- was stop + break + else + first = upcoming + end + end + end +end + +scripts.installmethod { + name = "nihongo", -- what name to use? + injector = process, + datasets = { + default = { + inter_char_shrink_factor = 0.50, -- of quad + inter_char_stretch_factor = 0.50, -- of quad + inter_char_half_shrink_factor = 0.50, -- of quad + inter_char_half_stretch_factor = 0.50, -- of quad + inter_char_quarter_shrink_factor = 0.25, -- of quad + inter_char_quarter_stretch_factor = 0.25, -- of quad + inter_char_hangul_penalty = 50, + }, + }, +} + diff --git a/tex/context/base/scrp-eth.lua b/tex/context/base/scrp-eth.lua index 597afa1b5..20b00a0ec 100644 --- a/tex/context/base/scrp-eth.lua +++ b/tex/context/base/scrp-eth.lua @@ -1,150 +1,150 @@ -if not modules then modules = { } end modules ['scrp-eth'] = { - version = 1.001, - comment = "companion to scrp-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- at some point I will review the script code but for the moment we --- do it this way; so space settings like with cjk yet - -local insert_node_before = node.insert_before - -local nodepool = nodes.pool - -local new_glue = nodepool.glue -local new_penalty = nodepool.penalty - -local nodecodes = nodes.nodecodes -local glyph_code = nodecodes.glyph - -local a_scriptstatus = attributes.private('scriptstatus') -local a_scriptinjection = attributes.private('scriptinjection') - -local categorytonumber = scripts.categorytonumber -local numbertocategory = scripts.numbertocategory -local hash = scripts.hash -local numbertodataset = scripts.numbertodataset - -local fonthashes = fonts.hashes -local parameters = fonthashes.parameters - -local space, stretch, shrink, lastfont - -local inter_character_space_factor = 1 -local inter_character_stretch_factor = 1 -local inter_character_shrink_factor = 1 - -local function space_glue(current) - local data = numbertodataset[current[a_scriptinjection]] - if data then - inter_character_space_factor = data.inter_character_space_factor or 1 - inter_character_stretch_factor = data.inter_character_stretch_factor or 1 - inter_character_shrink_factor = data.inter_character_shrink_factor or 1 - end - local font = current.font - if lastfont ~= font then - local pf = parameters[font] - space = pf.space - stretch = pf.space_stretch - shrink = pf.space_shrink - lastfont = font - end - return new_glue( - inter_character_space_factor * space, - inter_character_stretch_factor * stretch, - inter_character_shrink_factor * shrink - ) -end - -local function insert_space(head,current) - insert_node_before(head,current,space_glue(current)) -end - -local function insert_zerowidthspace(head,current) - insert_node_before(head,current,new_glue(0)) -end - -local function insert_nobreakspace(head,current) - insert_node_before(head,current,new_penalty(10000)) - insert_node_before(head,current,space_glue(current)) -end - --- syllable [zerowidthspace] syllable --- syllable [zerowidthspace] word --- syllable [zerowidthspace] sentence --- word [nobreakspace] syllable --- word [space] word --- word [space] sentence --- sentence [nobreakspace] syllable --- sentence [space] word --- sentence [space] sentence - -local injectors = { -- [previous] [current] - ethiopic_syllable = { - ethiopic_syllable = insert_zerowidthspace, - ethiopic_word = insert_nobreakspace, - ethiopic_sentence = insert_nobreakspace, - }, - ethiopic_word = { - ethiopic_syllable = insert_space, - ethiopic_word = insert_space, - ethiopic_sentence = insert_space, - }, - ethiopic_sentence = { - ethiopic_syllable = insert_space, - ethiopic_word = insert_space, - ethiopic_sentence = insert_space, - }, -} - -local function process(head,first,last) - if first ~= last then - local injector = false - local current = first - while current do - local id = current.id - if id == glyph_code then - local scriptstatus = current[a_scriptstatus] - local category = numbertocategory[scriptstatus] - if injector then - local action = injector[category] - if action then - action(head,current) - end - end - injector = injectors[category] - else - -- nothing yet - end - if current == last then - break - else - current = current.next - end - end - end -end - -scripts.installmethod { - name = "ethiopic", - injector = process, - datasets = { - default = { - inter_character_space_factor = 1, - inter_character_stretch_factor = 1, - inter_character_shrink_factor = 1, - }, - half = { - inter_character_space_factor = 0.5, - inter_character_stretch_factor = 0.5, - inter_character_shrink_factor = 0.5, - }, - quarter = { - inter_character_space_factor = 0.25, - inter_character_stretch_factor = 0.25, - inter_character_shrink_factor = 0.25, - }, - }, -} +if not modules then modules = { } end modules ['scrp-eth'] = { + version = 1.001, + comment = "companion to scrp-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- at some point I will review the script code but for the moment we +-- do it this way; so space settings like with cjk yet + +local insert_node_before = node.insert_before + +local nodepool = nodes.pool + +local new_glue = nodepool.glue +local new_penalty = nodepool.penalty + +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph + +local a_scriptstatus = attributes.private('scriptstatus') +local a_scriptinjection = attributes.private('scriptinjection') + +local categorytonumber = scripts.categorytonumber +local numbertocategory = scripts.numbertocategory +local hash = scripts.hash +local numbertodataset = scripts.numbertodataset + +local fonthashes = fonts.hashes +local parameters = fonthashes.parameters + +local space, stretch, shrink, lastfont + +local inter_character_space_factor = 1 +local inter_character_stretch_factor = 1 +local inter_character_shrink_factor = 1 + +local function space_glue(current) + local data = numbertodataset[current[a_scriptinjection]] + if data then + inter_character_space_factor = data.inter_character_space_factor or 1 + inter_character_stretch_factor = data.inter_character_stretch_factor or 1 + inter_character_shrink_factor = data.inter_character_shrink_factor or 1 + end + local font = current.font + if lastfont ~= font then + local pf = parameters[font] + space = pf.space + stretch = pf.space_stretch + shrink = pf.space_shrink + lastfont = font + end + return new_glue( + inter_character_space_factor * space, + inter_character_stretch_factor * stretch, + inter_character_shrink_factor * shrink + ) +end + +local function insert_space(head,current) + insert_node_before(head,current,space_glue(current)) +end + +local function insert_zerowidthspace(head,current) + insert_node_before(head,current,new_glue(0)) +end + +local function insert_nobreakspace(head,current) + insert_node_before(head,current,new_penalty(10000)) + insert_node_before(head,current,space_glue(current)) +end + +-- syllable [zerowidthspace] syllable +-- syllable [zerowidthspace] word +-- syllable [zerowidthspace] sentence +-- word [nobreakspace] syllable +-- word [space] word +-- word [space] sentence +-- sentence [nobreakspace] syllable +-- sentence [space] word +-- sentence [space] sentence + +local injectors = { -- [previous] [current] + ethiopic_syllable = { + ethiopic_syllable = insert_zerowidthspace, + ethiopic_word = insert_nobreakspace, + ethiopic_sentence = insert_nobreakspace, + }, + ethiopic_word = { + ethiopic_syllable = insert_space, + ethiopic_word = insert_space, + ethiopic_sentence = insert_space, + }, + ethiopic_sentence = { + ethiopic_syllable = insert_space, + ethiopic_word = insert_space, + ethiopic_sentence = insert_space, + }, +} + +local function process(head,first,last) + if first ~= last then + local injector = false + local current = first + while current do + local id = current.id + if id == glyph_code then + local scriptstatus = current[a_scriptstatus] + local category = numbertocategory[scriptstatus] + if injector then + local action = injector[category] + if action then + action(head,current) + end + end + injector = injectors[category] + else + -- nothing yet + end + if current == last then + break + else + current = current.next + end + end + end +end + +scripts.installmethod { + name = "ethiopic", + injector = process, + datasets = { + default = { + inter_character_space_factor = 1, + inter_character_stretch_factor = 1, + inter_character_shrink_factor = 1, + }, + half = { + inter_character_space_factor = 0.5, + inter_character_stretch_factor = 0.5, + inter_character_shrink_factor = 0.5, + }, + quarter = { + inter_character_space_factor = 0.25, + inter_character_stretch_factor = 0.25, + inter_character_shrink_factor = 0.25, + }, + }, +} diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua index 18f86475f..fbe673db9 100644 --- a/tex/context/base/scrp-ini.lua +++ b/tex/context/base/scrp-ini.lua @@ -1,634 +1,634 @@ -if not modules then modules = { } end modules ['scrp-ini'] = { - version = 1.001, - comment = "companion to scrp-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- We need to rewrite this a bit ... rather old code ... will be done when japanese --- is finished. - -local attributes, nodes, node = attributes, nodes, node - -local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end) -local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end) - -local report_preprocessing = logs.reporter("scripts","preprocessing") - -local utfchar = utf.char - -local first_glyph = node.first_glyph or node.first_character -local traverse_id = node.traverse_id - -local texsetattribute = tex.setattribute - -local nodecodes = nodes.nodecodes -local unsetvalue = attributes.unsetvalue - -local glyph_code = nodecodes.glyph -local glue_code = nodecodes.glue - -local a_scriptinjection = attributes.private('scriptinjection') -local a_scriptsplitting = attributes.private('scriptsplitting') -local a_scriptstatus = attributes.private('scriptstatus') - -local fontdata = fonts.hashes.identifiers -local allocate = utilities.storage.allocate -local setnodecolor = nodes.tracers.colors.set -local setmetatableindex = table.setmetatableindex - -local enableaction = nodes.tasks.enableaction -local disableaction = nodes.tasks.disableaction - -scripts = scripts or { } -local scripts = scripts - -scripts.hash = scripts.hash or { } -local hash = scripts.hash - -local handlers = allocate() -scripts.handlers = handlers - -local injectors = allocate() -scripts.injectors = handlers - -local splitters = allocate() -scripts.splitters = splitters - -local hash = { -- we could put these presets in char-def.lua - -- - -- half width opening parenthesis - -- - [0x0028] = "half_width_open", - [0x005B] = "half_width_open", - [0x007B] = "half_width_open", - [0x2018] = "half_width_open", -- ‘ - [0x201C] = "half_width_open", -- “ - -- - -- full width opening parenthesis - -- - [0x3008] = "full_width_open", -- 〈 Left book quote - [0x300A] = "full_width_open", -- 《 Left double book quote - [0x300C] = "full_width_open", -- 「 left quote - [0x300E] = "full_width_open", -- 『 left double quote - [0x3010] = "full_width_open", -- 〠left double book quote - [0x3014] = "full_width_open", -- 〔 left book quote - [0x3016] = "full_width_open", --〖 left double book quote - [0x3018] = "full_width_open", -- left tortoise bracket - [0x301A] = "full_width_open", -- left square bracket - [0x301D] = "full_width_open", -- reverse double prime qm - [0xFF08] = "full_width_open", -- ( left parenthesis - [0xFF3B] = "full_width_open", -- ï¼» left square brackets - [0xFF5B] = "full_width_open", -- ï½› left curve bracket - -- - -- half width closing parenthesis - -- - [0x0029] = "half_width_close", - [0x005D] = "half_width_close", - [0x007D] = "half_width_close", - [0x2019] = "half_width_close", -- ’ right quote, right - [0x201D] = "half_width_close", -- †right double quote - -- - -- full width closing parenthesis - -- - [0x3009] = "full_width_close", -- 〉 book quote - [0x300B] = "full_width_close", -- 》 double book quote - [0x300D] = "full_width_close", -- 〠right quote, right - [0x300F] = "full_width_close", -- 〠right double quote - [0x3011] = "full_width_close", -- 】 right double book quote - [0x3015] = "full_width_close", -- 〕 right book quote - [0x3017] = "full_width_close", -- 〗 right double book quote - [0x3019] = "full_width_close", -- right tortoise bracket - [0x301B] = "full_width_close", -- right square bracket - [0x301E] = "full_width_close", -- double prime qm - [0x301F] = "full_width_close", -- low double prime qm - [0xFF09] = "full_width_close", -- ) right parenthesis - [0xFF3D] = "full_width_close", -- ï¼½ right square brackets - [0xFF5D] = "full_width_close", -- ï½ right curve brackets - -- - [0xFF62] = "half_width_open", -- left corner bracket - [0xFF63] = "half_width_close", -- right corner bracket - -- - -- vertical opening vertical - -- - -- 0xFE35, 0xFE37, 0xFE39, 0xFE3B, 0xFE3D, 0xFE3F, 0xFE41, 0xFE43, 0xFE47, - -- - -- vertical closing - -- - -- 0xFE36, 0xFE38, 0xFE3A, 0xFE3C, 0xFE3E, 0xFE40, 0xFE42, 0xFE44, 0xFE48, - -- - -- half width opening punctuation - -- - -- - -- - -- full width opening punctuation - -- - -- 0x2236, -- ∶ - -- 0xFF0C, -- , - -- - -- half width closing punctuation_hw - -- - [0x0021] = "half_width_close", -- ! - [0x002C] = "half_width_close", -- , - [0x002E] = "half_width_close", -- . - [0x003A] = "half_width_close", -- : - [0x003B] = "half_width_close", -- ; - [0x003F] = "half_width_close", -- ? - [0xFF61] = "half_width_close", -- hw full stop - -- - -- full width closing punctuation - -- - [0x3001] = "full_width_close", -- 〠- [0x3002] = "full_width_close", -- 。 - [0xFF0C] = "full_width_close", -- , - [0xFF0E] = "full_width_close", -- - -- - -- depends on font - -- - [0xFF01] = "full_width_close", -- ï¼ - [0xFF1F] = "full_width_close", -- ? - -- - [0xFF1A] = "full_width_punct", -- : - [0xFF1B] = "full_width_punct", -- ï¼› - -- - -- non starter - -- - [0x3005] = "non_starter", [0x3041] = "non_starter", [0x3043] = "non_starter", [0x3045] = "non_starter", [0x3047] = "non_starter", - [0x3049] = "non_starter", [0x3063] = "non_starter", [0x3083] = "non_starter", [0x3085] = "non_starter", [0x3087] = "non_starter", - [0x308E] = "non_starter", [0x3095] = "non_starter", [0x3096] = "non_starter", [0x309B] = "non_starter", [0x309C] = "non_starter", - [0x309D] = "non_starter", [0x309E] = "non_starter", [0x30A0] = "non_starter", [0x30A1] = "non_starter", [0x30A3] = "non_starter", - [0x30A5] = "non_starter", [0x30A7] = "non_starter", [0x30A9] = "non_starter", [0x30C3] = "non_starter", [0x30E3] = "non_starter", - [0x30E5] = "non_starter", [0x30E7] = "non_starter", [0x30EE] = "non_starter", [0x30F5] = "non_starter", [0x30F6] = "non_starter", - [0x30FC] = "non_starter", [0x30FD] = "non_starter", [0x30FE] = "non_starter", [0x31F0] = "non_starter", [0x31F1] = "non_starter", - [0x30F2] = "non_starter", [0x30F3] = "non_starter", [0x30F4] = "non_starter", [0x31F5] = "non_starter", [0x31F6] = "non_starter", - [0x30F7] = "non_starter", [0x30F8] = "non_starter", [0x30F9] = "non_starter", [0x31FA] = "non_starter", [0x31FB] = "non_starter", - [0x30FC] = "non_starter", [0x30FD] = "non_starter", [0x30FE] = "non_starter", [0x31FF] = "non_starter", - -- - -- hyphenation - -- - [0x2026] = "hyphen", -- … ellipsis - [0x2014] = "hyphen", -- — hyphen - -- - [0x1361] = "ethiopic_word", - [0x1362] = "ethiopic_sentence", - -- -} - -local function provide(t,k) - local v - if not tonumber(k) then v = false - elseif (k >= 0x03040 and k <= 0x030FF) - or (k >= 0x031F0 and k <= 0x031FF) - or (k >= 0x032D0 and k <= 0x032FE) - or (k >= 0x0FF00 and k <= 0x0FFEF) then v = "katakana" - elseif (k >= 0x03400 and k <= 0x04DFF) - or (k >= 0x04E00 and k <= 0x09FFF) - or (k >= 0x0F900 and k <= 0x0FAFF) - or (k >= 0x20000 and k <= 0x2A6DF) - or (k >= 0x2F800 and k <= 0x2FA1F) then v = "chinese" - elseif (k >= 0x0AC00 and k <= 0x0D7A3) then v = "korean" - elseif (k >= 0x01100 and k <= 0x0115F) then v = "jamo_initial" - elseif (k >= 0x01160 and k <= 0x011A7) then v = "jamo_medial" - elseif (k >= 0x011A8 and k <= 0x011FF) then v = "jamo_final" - elseif (k >= 0x01200 and k <= 0x0139F) then v = "ethiopic_syllable" - else v = false - end - t[k] = v - return v -end - -setmetatableindex(hash,provide) - -scripts.hash = hash - -local numbertodataset = allocate() -local numbertohandler = allocate() - ---~ storage.register("scripts/hash", hash, "scripts.hash") - -scripts.numbertodataset = numbertodataset -scripts.numbertohandler = numbertohandler - -local defaults = { - inter_char_shrink_factor = 0, - inter_char_shrink_factor = 0, - inter_char_stretch_factor = 0, - inter_char_half_shrink_factor = 0, - inter_char_half_stretch_factor = 0, - inter_char_quarter_shrink_factor = 0, - inter_char_quarter_stretch_factor = 0, - inter_char_hangul_penalty = 0, - - inter_word_stretch_factor = 0, -} - -scripts.defaults = defaults -- so we can add more - -function scripts.installmethod(handler) - local name = handler.name - handlers[name] = handler - local attributes = { } - local datasets = handler.datasets - if not datasets or not datasets.default then - report_preprocessing("missing (default) dataset in script %a",name) - datasets.default = { } -- slower but an error anyway - end - for k, v in next, datasets do - setmetatableindex(v,defaults) - end - setmetatable(attributes, { - __index = function(t,k) - local v = datasets[k] or datasets.default - local a = unsetvalue - if v then - v.name = name -- for tracing - a = #numbertodataset + 1 - numbertodataset[a] = v - numbertohandler[a] = handler - end - t[k] = a - return a - end - } ) - handler.attributes = attributes -end - -function scripts.installdataset(specification) -- global overload - local method = specification.method - local name = specification.name - local dataset = specification.dataset - if method and name and dataset then - local parent = specification.parent or "" - local handler = handlers[method] - if handler then - local datasets = handler.datasets - if datasets then - local defaultset = datasets.default - if defaultset then - if parent ~= "" then - local p = datasets[parent] - if p then - defaultset = p - else - report_preprocessing("dataset, unknown parent %a for method %a",parent,method) - end - end - setmetatable(dataset,defaultset) - local existing = datasets[name] - if existing then - for k, v in next, existing do - existing[k] = dataset - end - else - datasets[name] = dataset - end - else - report_preprocessing("dataset, no default for method %a",method) - end - else - report_preprocessing("dataset, no datasets for method %a",method) - end - else - report_preprocessing("dataset, no method %a",method) - end - else - report_preprocessing("dataset, invalid specification") -- maybe report table - end -end - -local injectorenabled = false -local splitterenabled = false - -function scripts.set(name,method,preset) - local handler = handlers[method] - if handler then - if handler.injector then - if not injectorenabled then - enableaction("processors","scripts.injectors.handler") - injectorenabled = true - end - texsetattribute(a_scriptinjection,handler.attributes[preset] or unsetvalue) - end - if handler.splitter then - if not splitterenabled then - enableaction("processors","scripts.splitters.handler") - splitterenabled = true - end - texsetattribute(a_scriptsplitting,handler.attributes[preset] or unsetvalue) - end - if handler.initializer then - handler.initializer(handler) - handler.initializer = nil - end - else - texsetattribute(a_scriptinjection,unsetvalue) - texsetattribute(a_scriptsplitting,unsetvalue) - end -end - -function scripts.reset() - texsetattribute(a_scriptinjection,unsetvalue) - texsetattribute(a_scriptsplitting,unsetvalue) -end - --- the following tables will become a proper installer (move to cjk/eth) --- --- 0=gray 1=red 2=green 3=blue 4=yellow 5=magenta 6=cyan 7=x-yellow 8=x-magenta 9=x-cyan - -local scriptcolors = allocate { -- todo: just named colors - korean = "trace:0", - chinese = "trace:0", - katakana = "trace:0", - hiragana = "trace:0", - full_width_open = "trace:1", - full_width_close = "trace:2", - half_width_open = "trace:3", - half_width_close = "trace:4", - full_width_punct = "trace:5", - hyphen = "trace:5", - non_starter = "trace:6", - jamo_initial = "trace:7", - jamo_medial = "trace:8", - jamo_final = "trace:9", - ethiopic_syllable = "trace:1", - ethiopic_word = "trace:2", - ethiopic_sentence = "trace:3", -} - -scripts.colors = scriptcolors - -local numbertocategory = allocate { -- rather bound to cjk ... will be generalized - "korean", - "chinese", - "katakana", - "hiragana", - "full_width_open", - "full_width_close", - "half_width_open", - "half_width_close", - "full_width_punct", - "hyphen", - "non_starter", - "jamo_initial", - "jamo_medial", - "jamo_final", - "ethiopic_syllable", - "ethiopic_word", - "ethiopic_sentence", -} - -local categorytonumber = allocate(table.swapped(numbertocategory)) -- could be one table - -scripts.categorytonumber = categorytonumber -scripts.numbertocategory = numbertocategory - -local function colorize(start,stop) - for n in traverse_id(glyph_code,start) do - local kind = numbertocategory[n[a_scriptstatus]] - if kind then - local ac = scriptcolors[kind] - if ac then - setnodecolor(n,ac) - end - end - if n == stop then - break - end - end -end - -local function traced_process(head,first,last,process,a) - if start ~= last then - local f, l = first, last - local name = numbertodataset[a] - name = name and name.name or "?" - report_preprocessing("before %s: %s",name,nodes.tosequence(f,l)) - process(head,first,last) - report_preprocessing("after %s: %s", name,nodes.tosequence(f,l)) - end -end - --- eventually we might end up with more extensive parsing --- todo: pass t[start..stop] == original --- --- one of the time consuming functions: - --- we can have a fonts.hashes.originals - -function scripts.injectors.handler(head) - local start = first_glyph(head) -- we already have glyphs here (subtype 1) - if not start then - return head, false - else - local last_a, normal_process, lastfont, originals = nil, nil, nil, nil - local done, first, last, ok = false, nil, nil, false - while start do - local id = start.id - if id == glyph_code then - local a = start[a_scriptinjection] - if a then - if a ~= last_a then - if first then - if ok then - if trace_analyzing then - colorize(first,last) - end - if trace_injections then - traced_process(head,first,last,normal_process,last_a) - else - normal_process(head,first,last) - end - ok, done = false, true - end - first, last = nil, nil - end - last_a = a - local handler = numbertohandler[a] - normal_process = handler.injector - end - if normal_process then - local f = start.font - if f ~= lastfont then - originals = fontdata[f].resources - if resources then - originals = resources.originals - else - -- can't happen - end - lastfont = f - end - local c = start.char - if originals then - c = originals[c] or c - end - local h = hash[c] - if h then - start[a_scriptstatus] = categorytonumber[h] - if not first then - first, last = start, start - else - last = start - end - -- if cjk == "chinese" or cjk == "korean" then -- we need to prevent too much ( ) processing - ok = true - -- end - elseif first then - if ok then - if trace_analyzing then - colorize(first,last) - end - if trace_injections then - traced_process(head,first,last,normal_process,last_a) - else - normal_process(head,first,last) - end - ok, done = false, true - end - first, last = nil, nil - end - end - elseif first then - if ok then - if trace_analyzing then - colorize(first,last) - end - if trace_injections then - traced_process(head,first,last,normal_process,last_a) - else - normal_process(head,first,last) - end - ok, done = false, true - end - first, last = nil, nil - end - elseif id == glue_code then - if ok then - -- continue - elseif first then - -- no chinese or korean - first, last = nil, nil - end - elseif first then - if ok then - -- some chinese or korean - if trace_analyzing then - colorize(first,last) - end - if trace_injections then - traced_process(head,first,last,normal_process,last_a) - else - normal_process(head,first,last) - end - first, last, ok, done = nil, nil, false, true - elseif first then - first, last = nil, nil - end - end - start = start.next - end - if ok then - if trace_analyzing then - colorize(first,last) - end - if trace_injections then - traced_process(head,first,last,normal_process,last_a) - else - normal_process(head,first,last) - end - done = true - end - return head, done - end -end - -function scripts.splitters.handler(head) - return head, false -end - --- new plugin: - -local registercontext = fonts.specifiers.registercontext -local mergecontext = fonts.specifiers.mergecontext - -local otfscripts = characters.otfscripts - -local report_scripts = logs.reporter("scripts","auto feature") -local trace_scripts = false trackers.register("scripts.autofeature",function(v) trace_scripts = v end) - -local autofontfeature = scripts.autofontfeature or { } -scripts.autofontfeature = autofontfeature - -local cache_yes = { } -local cache_nop = { } - -setmetatableindex(cache_yes,function(t,k) local v = { } t[k] = v return v end) -setmetatableindex(cache_nop,function(t,k) local v = { } t[k] = v return v end) - --- beware: we need to tag a done (otherwise too many extra instances ... but how --- often unpack? wait till we have a bitmap --- --- we can consider merging this in handlers.characters(head) at some point as there --- already check for the dynamic attribute so it saves a pass, however, then we also --- need to check for a_scriptinjection there which nils the benefit --- --- we can consider cheating: set all glyphs in a word as the first one but it's not --- playing nice - -function autofontfeature.handler(head) - for n in traverse_id(glyph_code,head) do - -- if n[a_scriptinjection] then - -- -- already tagged by script feature, maybe some day adapt - -- else - local char = n.char - local script = otfscripts[char] - if script then - local dynamic = n[0] or 0 - local font = n.font - if dynamic > 0 then - local slot = cache_yes[font] - local attr = slot[script] - if not attr then - attr = mergecontext(dynamic,name,2) - slot[script] = attr - if trace_scripts then - report_scripts("script: %s, trigger %C, dynamic: %a, variant: %a",script,char,attr,"extended") - end - end - if attr ~= 0 then - n[0] = attr - -- maybe set scriptinjection when associated - end - else - local slot = cache_nop[font] - local attr = slot[script] - if not attr then - attr = registercontext(font,script,2) - slot[script] = attr - if trace_scripts then - report_scripts("script: %s, trigger %C, dynamic: %s, variant: %a",script,char,attr,"normal") - end - end - if attr ~= 0 then - n[0] = attr - -- maybe set scriptinjection when associated - end - end - end - -- end - end - return head -end - -function autofontfeature.enable() - report_scripts("globally enabled") - enableaction("processors","scripts.autofontfeature.handler") -end - -function autofontfeature.disable() - report_scripts("globally disabled") - disableaction("processors","scripts.autofontfeature.handler") -end - -commands.enableautofontscript = autofontfeature.enable -commands.disableautofontscript = autofontfeature.disable +if not modules then modules = { } end modules ['scrp-ini'] = { + version = 1.001, + comment = "companion to scrp-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- We need to rewrite this a bit ... rather old code ... will be done when japanese +-- is finished. + +local attributes, nodes, node = attributes, nodes, node + +local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end) +local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end) + +local report_preprocessing = logs.reporter("scripts","preprocessing") + +local utfchar = utf.char + +local first_glyph = node.first_glyph or node.first_character +local traverse_id = node.traverse_id + +local texsetattribute = tex.setattribute + +local nodecodes = nodes.nodecodes +local unsetvalue = attributes.unsetvalue + +local glyph_code = nodecodes.glyph +local glue_code = nodecodes.glue + +local a_scriptinjection = attributes.private('scriptinjection') +local a_scriptsplitting = attributes.private('scriptsplitting') +local a_scriptstatus = attributes.private('scriptstatus') + +local fontdata = fonts.hashes.identifiers +local allocate = utilities.storage.allocate +local setnodecolor = nodes.tracers.colors.set +local setmetatableindex = table.setmetatableindex + +local enableaction = nodes.tasks.enableaction +local disableaction = nodes.tasks.disableaction + +scripts = scripts or { } +local scripts = scripts + +scripts.hash = scripts.hash or { } +local hash = scripts.hash + +local handlers = allocate() +scripts.handlers = handlers + +local injectors = allocate() +scripts.injectors = handlers + +local splitters = allocate() +scripts.splitters = splitters + +local hash = { -- we could put these presets in char-def.lua + -- + -- half width opening parenthesis + -- + [0x0028] = "half_width_open", + [0x005B] = "half_width_open", + [0x007B] = "half_width_open", + [0x2018] = "half_width_open", -- ‘ + [0x201C] = "half_width_open", -- “ + -- + -- full width opening parenthesis + -- + [0x3008] = "full_width_open", -- 〈 Left book quote + [0x300A] = "full_width_open", -- 《 Left double book quote + [0x300C] = "full_width_open", -- 「 left quote + [0x300E] = "full_width_open", -- 『 left double quote + [0x3010] = "full_width_open", -- 〠left double book quote + [0x3014] = "full_width_open", -- 〔 left book quote + [0x3016] = "full_width_open", --〖 left double book quote + [0x3018] = "full_width_open", -- left tortoise bracket + [0x301A] = "full_width_open", -- left square bracket + [0x301D] = "full_width_open", -- reverse double prime qm + [0xFF08] = "full_width_open", -- ( left parenthesis + [0xFF3B] = "full_width_open", -- ï¼» left square brackets + [0xFF5B] = "full_width_open", -- ï½› left curve bracket + -- + -- half width closing parenthesis + -- + [0x0029] = "half_width_close", + [0x005D] = "half_width_close", + [0x007D] = "half_width_close", + [0x2019] = "half_width_close", -- ’ right quote, right + [0x201D] = "half_width_close", -- †right double quote + -- + -- full width closing parenthesis + -- + [0x3009] = "full_width_close", -- 〉 book quote + [0x300B] = "full_width_close", -- 》 double book quote + [0x300D] = "full_width_close", -- 〠right quote, right + [0x300F] = "full_width_close", -- 〠right double quote + [0x3011] = "full_width_close", -- 】 right double book quote + [0x3015] = "full_width_close", -- 〕 right book quote + [0x3017] = "full_width_close", -- 〗 right double book quote + [0x3019] = "full_width_close", -- right tortoise bracket + [0x301B] = "full_width_close", -- right square bracket + [0x301E] = "full_width_close", -- double prime qm + [0x301F] = "full_width_close", -- low double prime qm + [0xFF09] = "full_width_close", -- ) right parenthesis + [0xFF3D] = "full_width_close", -- ï¼½ right square brackets + [0xFF5D] = "full_width_close", -- ï½ right curve brackets + -- + [0xFF62] = "half_width_open", -- left corner bracket + [0xFF63] = "half_width_close", -- right corner bracket + -- + -- vertical opening vertical + -- + -- 0xFE35, 0xFE37, 0xFE39, 0xFE3B, 0xFE3D, 0xFE3F, 0xFE41, 0xFE43, 0xFE47, + -- + -- vertical closing + -- + -- 0xFE36, 0xFE38, 0xFE3A, 0xFE3C, 0xFE3E, 0xFE40, 0xFE42, 0xFE44, 0xFE48, + -- + -- half width opening punctuation + -- + -- + -- + -- full width opening punctuation + -- + -- 0x2236, -- ∶ + -- 0xFF0C, -- , + -- + -- half width closing punctuation_hw + -- + [0x0021] = "half_width_close", -- ! + [0x002C] = "half_width_close", -- , + [0x002E] = "half_width_close", -- . + [0x003A] = "half_width_close", -- : + [0x003B] = "half_width_close", -- ; + [0x003F] = "half_width_close", -- ? + [0xFF61] = "half_width_close", -- hw full stop + -- + -- full width closing punctuation + -- + [0x3001] = "full_width_close", -- 〠+ [0x3002] = "full_width_close", -- 。 + [0xFF0C] = "full_width_close", -- , + [0xFF0E] = "full_width_close", -- + -- + -- depends on font + -- + [0xFF01] = "full_width_close", -- ï¼ + [0xFF1F] = "full_width_close", -- ? + -- + [0xFF1A] = "full_width_punct", -- : + [0xFF1B] = "full_width_punct", -- ï¼› + -- + -- non starter + -- + [0x3005] = "non_starter", [0x3041] = "non_starter", [0x3043] = "non_starter", [0x3045] = "non_starter", [0x3047] = "non_starter", + [0x3049] = "non_starter", [0x3063] = "non_starter", [0x3083] = "non_starter", [0x3085] = "non_starter", [0x3087] = "non_starter", + [0x308E] = "non_starter", [0x3095] = "non_starter", [0x3096] = "non_starter", [0x309B] = "non_starter", [0x309C] = "non_starter", + [0x309D] = "non_starter", [0x309E] = "non_starter", [0x30A0] = "non_starter", [0x30A1] = "non_starter", [0x30A3] = "non_starter", + [0x30A5] = "non_starter", [0x30A7] = "non_starter", [0x30A9] = "non_starter", [0x30C3] = "non_starter", [0x30E3] = "non_starter", + [0x30E5] = "non_starter", [0x30E7] = "non_starter", [0x30EE] = "non_starter", [0x30F5] = "non_starter", [0x30F6] = "non_starter", + [0x30FC] = "non_starter", [0x30FD] = "non_starter", [0x30FE] = "non_starter", [0x31F0] = "non_starter", [0x31F1] = "non_starter", + [0x30F2] = "non_starter", [0x30F3] = "non_starter", [0x30F4] = "non_starter", [0x31F5] = "non_starter", [0x31F6] = "non_starter", + [0x30F7] = "non_starter", [0x30F8] = "non_starter", [0x30F9] = "non_starter", [0x31FA] = "non_starter", [0x31FB] = "non_starter", + [0x30FC] = "non_starter", [0x30FD] = "non_starter", [0x30FE] = "non_starter", [0x31FF] = "non_starter", + -- + -- hyphenation + -- + [0x2026] = "hyphen", -- … ellipsis + [0x2014] = "hyphen", -- — hyphen + -- + [0x1361] = "ethiopic_word", + [0x1362] = "ethiopic_sentence", + -- +} + +local function provide(t,k) + local v + if not tonumber(k) then v = false + elseif (k >= 0x03040 and k <= 0x030FF) + or (k >= 0x031F0 and k <= 0x031FF) + or (k >= 0x032D0 and k <= 0x032FE) + or (k >= 0x0FF00 and k <= 0x0FFEF) then v = "katakana" + elseif (k >= 0x03400 and k <= 0x04DFF) + or (k >= 0x04E00 and k <= 0x09FFF) + or (k >= 0x0F900 and k <= 0x0FAFF) + or (k >= 0x20000 and k <= 0x2A6DF) + or (k >= 0x2F800 and k <= 0x2FA1F) then v = "chinese" + elseif (k >= 0x0AC00 and k <= 0x0D7A3) then v = "korean" + elseif (k >= 0x01100 and k <= 0x0115F) then v = "jamo_initial" + elseif (k >= 0x01160 and k <= 0x011A7) then v = "jamo_medial" + elseif (k >= 0x011A8 and k <= 0x011FF) then v = "jamo_final" + elseif (k >= 0x01200 and k <= 0x0139F) then v = "ethiopic_syllable" + else v = false + end + t[k] = v + return v +end + +setmetatableindex(hash,provide) + +scripts.hash = hash + +local numbertodataset = allocate() +local numbertohandler = allocate() + +--~ storage.register("scripts/hash", hash, "scripts.hash") + +scripts.numbertodataset = numbertodataset +scripts.numbertohandler = numbertohandler + +local defaults = { + inter_char_shrink_factor = 0, + inter_char_shrink_factor = 0, + inter_char_stretch_factor = 0, + inter_char_half_shrink_factor = 0, + inter_char_half_stretch_factor = 0, + inter_char_quarter_shrink_factor = 0, + inter_char_quarter_stretch_factor = 0, + inter_char_hangul_penalty = 0, + + inter_word_stretch_factor = 0, +} + +scripts.defaults = defaults -- so we can add more + +function scripts.installmethod(handler) + local name = handler.name + handlers[name] = handler + local attributes = { } + local datasets = handler.datasets + if not datasets or not datasets.default then + report_preprocessing("missing (default) dataset in script %a",name) + datasets.default = { } -- slower but an error anyway + end + for k, v in next, datasets do + setmetatableindex(v,defaults) + end + setmetatable(attributes, { + __index = function(t,k) + local v = datasets[k] or datasets.default + local a = unsetvalue + if v then + v.name = name -- for tracing + a = #numbertodataset + 1 + numbertodataset[a] = v + numbertohandler[a] = handler + end + t[k] = a + return a + end + } ) + handler.attributes = attributes +end + +function scripts.installdataset(specification) -- global overload + local method = specification.method + local name = specification.name + local dataset = specification.dataset + if method and name and dataset then + local parent = specification.parent or "" + local handler = handlers[method] + if handler then + local datasets = handler.datasets + if datasets then + local defaultset = datasets.default + if defaultset then + if parent ~= "" then + local p = datasets[parent] + if p then + defaultset = p + else + report_preprocessing("dataset, unknown parent %a for method %a",parent,method) + end + end + setmetatable(dataset,defaultset) + local existing = datasets[name] + if existing then + for k, v in next, existing do + existing[k] = dataset + end + else + datasets[name] = dataset + end + else + report_preprocessing("dataset, no default for method %a",method) + end + else + report_preprocessing("dataset, no datasets for method %a",method) + end + else + report_preprocessing("dataset, no method %a",method) + end + else + report_preprocessing("dataset, invalid specification") -- maybe report table + end +end + +local injectorenabled = false +local splitterenabled = false + +function scripts.set(name,method,preset) + local handler = handlers[method] + if handler then + if handler.injector then + if not injectorenabled then + enableaction("processors","scripts.injectors.handler") + injectorenabled = true + end + texsetattribute(a_scriptinjection,handler.attributes[preset] or unsetvalue) + end + if handler.splitter then + if not splitterenabled then + enableaction("processors","scripts.splitters.handler") + splitterenabled = true + end + texsetattribute(a_scriptsplitting,handler.attributes[preset] or unsetvalue) + end + if handler.initializer then + handler.initializer(handler) + handler.initializer = nil + end + else + texsetattribute(a_scriptinjection,unsetvalue) + texsetattribute(a_scriptsplitting,unsetvalue) + end +end + +function scripts.reset() + texsetattribute(a_scriptinjection,unsetvalue) + texsetattribute(a_scriptsplitting,unsetvalue) +end + +-- the following tables will become a proper installer (move to cjk/eth) +-- +-- 0=gray 1=red 2=green 3=blue 4=yellow 5=magenta 6=cyan 7=x-yellow 8=x-magenta 9=x-cyan + +local scriptcolors = allocate { -- todo: just named colors + korean = "trace:0", + chinese = "trace:0", + katakana = "trace:0", + hiragana = "trace:0", + full_width_open = "trace:1", + full_width_close = "trace:2", + half_width_open = "trace:3", + half_width_close = "trace:4", + full_width_punct = "trace:5", + hyphen = "trace:5", + non_starter = "trace:6", + jamo_initial = "trace:7", + jamo_medial = "trace:8", + jamo_final = "trace:9", + ethiopic_syllable = "trace:1", + ethiopic_word = "trace:2", + ethiopic_sentence = "trace:3", +} + +scripts.colors = scriptcolors + +local numbertocategory = allocate { -- rather bound to cjk ... will be generalized + "korean", + "chinese", + "katakana", + "hiragana", + "full_width_open", + "full_width_close", + "half_width_open", + "half_width_close", + "full_width_punct", + "hyphen", + "non_starter", + "jamo_initial", + "jamo_medial", + "jamo_final", + "ethiopic_syllable", + "ethiopic_word", + "ethiopic_sentence", +} + +local categorytonumber = allocate(table.swapped(numbertocategory)) -- could be one table + +scripts.categorytonumber = categorytonumber +scripts.numbertocategory = numbertocategory + +local function colorize(start,stop) + for n in traverse_id(glyph_code,start) do + local kind = numbertocategory[n[a_scriptstatus]] + if kind then + local ac = scriptcolors[kind] + if ac then + setnodecolor(n,ac) + end + end + if n == stop then + break + end + end +end + +local function traced_process(head,first,last,process,a) + if start ~= last then + local f, l = first, last + local name = numbertodataset[a] + name = name and name.name or "?" + report_preprocessing("before %s: %s",name,nodes.tosequence(f,l)) + process(head,first,last) + report_preprocessing("after %s: %s", name,nodes.tosequence(f,l)) + end +end + +-- eventually we might end up with more extensive parsing +-- todo: pass t[start..stop] == original +-- +-- one of the time consuming functions: + +-- we can have a fonts.hashes.originals + +function scripts.injectors.handler(head) + local start = first_glyph(head) -- we already have glyphs here (subtype 1) + if not start then + return head, false + else + local last_a, normal_process, lastfont, originals = nil, nil, nil, nil + local done, first, last, ok = false, nil, nil, false + while start do + local id = start.id + if id == glyph_code then + local a = start[a_scriptinjection] + if a then + if a ~= last_a then + if first then + if ok then + if trace_analyzing then + colorize(first,last) + end + if trace_injections then + traced_process(head,first,last,normal_process,last_a) + else + normal_process(head,first,last) + end + ok, done = false, true + end + first, last = nil, nil + end + last_a = a + local handler = numbertohandler[a] + normal_process = handler.injector + end + if normal_process then + local f = start.font + if f ~= lastfont then + originals = fontdata[f].resources + if resources then + originals = resources.originals + else + -- can't happen + end + lastfont = f + end + local c = start.char + if originals then + c = originals[c] or c + end + local h = hash[c] + if h then + start[a_scriptstatus] = categorytonumber[h] + if not first then + first, last = start, start + else + last = start + end + -- if cjk == "chinese" or cjk == "korean" then -- we need to prevent too much ( ) processing + ok = true + -- end + elseif first then + if ok then + if trace_analyzing then + colorize(first,last) + end + if trace_injections then + traced_process(head,first,last,normal_process,last_a) + else + normal_process(head,first,last) + end + ok, done = false, true + end + first, last = nil, nil + end + end + elseif first then + if ok then + if trace_analyzing then + colorize(first,last) + end + if trace_injections then + traced_process(head,first,last,normal_process,last_a) + else + normal_process(head,first,last) + end + ok, done = false, true + end + first, last = nil, nil + end + elseif id == glue_code then + if ok then + -- continue + elseif first then + -- no chinese or korean + first, last = nil, nil + end + elseif first then + if ok then + -- some chinese or korean + if trace_analyzing then + colorize(first,last) + end + if trace_injections then + traced_process(head,first,last,normal_process,last_a) + else + normal_process(head,first,last) + end + first, last, ok, done = nil, nil, false, true + elseif first then + first, last = nil, nil + end + end + start = start.next + end + if ok then + if trace_analyzing then + colorize(first,last) + end + if trace_injections then + traced_process(head,first,last,normal_process,last_a) + else + normal_process(head,first,last) + end + done = true + end + return head, done + end +end + +function scripts.splitters.handler(head) + return head, false +end + +-- new plugin: + +local registercontext = fonts.specifiers.registercontext +local mergecontext = fonts.specifiers.mergecontext + +local otfscripts = characters.otfscripts + +local report_scripts = logs.reporter("scripts","auto feature") +local trace_scripts = false trackers.register("scripts.autofeature",function(v) trace_scripts = v end) + +local autofontfeature = scripts.autofontfeature or { } +scripts.autofontfeature = autofontfeature + +local cache_yes = { } +local cache_nop = { } + +setmetatableindex(cache_yes,function(t,k) local v = { } t[k] = v return v end) +setmetatableindex(cache_nop,function(t,k) local v = { } t[k] = v return v end) + +-- beware: we need to tag a done (otherwise too many extra instances ... but how +-- often unpack? wait till we have a bitmap +-- +-- we can consider merging this in handlers.characters(head) at some point as there +-- already check for the dynamic attribute so it saves a pass, however, then we also +-- need to check for a_scriptinjection there which nils the benefit +-- +-- we can consider cheating: set all glyphs in a word as the first one but it's not +-- playing nice + +function autofontfeature.handler(head) + for n in traverse_id(glyph_code,head) do + -- if n[a_scriptinjection] then + -- -- already tagged by script feature, maybe some day adapt + -- else + local char = n.char + local script = otfscripts[char] + if script then + local dynamic = n[0] or 0 + local font = n.font + if dynamic > 0 then + local slot = cache_yes[font] + local attr = slot[script] + if not attr then + attr = mergecontext(dynamic,name,2) + slot[script] = attr + if trace_scripts then + report_scripts("script: %s, trigger %C, dynamic: %a, variant: %a",script,char,attr,"extended") + end + end + if attr ~= 0 then + n[0] = attr + -- maybe set scriptinjection when associated + end + else + local slot = cache_nop[font] + local attr = slot[script] + if not attr then + attr = registercontext(font,script,2) + slot[script] = attr + if trace_scripts then + report_scripts("script: %s, trigger %C, dynamic: %s, variant: %a",script,char,attr,"normal") + end + end + if attr ~= 0 then + n[0] = attr + -- maybe set scriptinjection when associated + end + end + end + -- end + end + return head +end + +function autofontfeature.enable() + report_scripts("globally enabled") + enableaction("processors","scripts.autofontfeature.handler") +end + +function autofontfeature.disable() + report_scripts("globally disabled") + disableaction("processors","scripts.autofontfeature.handler") +end + +commands.enableautofontscript = autofontfeature.enable +commands.disableautofontscript = autofontfeature.disable diff --git a/tex/context/base/sort-ini.lua b/tex/context/base/sort-ini.lua index 479d1c489..a07cbc6d2 100644 --- a/tex/context/base/sort-ini.lua +++ b/tex/context/base/sort-ini.lua @@ -1,665 +1,665 @@ -if not modules then modules = { } end modules ['sort-ini'] = { - version = 1.001, - comment = "companion to sort-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- It took a while to get there, but with Fleetwood Mac's "Don't Stop" --- playing in the background we sort of got it done. - ---[[

    The code here evolved from the rather old mkii approach. There -we concatinate the key and (raw) entry into a new string. Numbers and -special characters get some treatment so that they sort ok. In -addition some normalization (lowercasing, accent stripping) takes -place and again data is appended ror prepended. Eventually these -strings are sorted using a regular string sorter. The relative order -of character is dealt with by weighting them. It took a while to -figure this all out but eventually it worked ok for most languages, -given that the right datatables were provided.

    - -

    Here we do follow a similar approach but this time we don't append -the manipulated keys and entries but create tables for each of them -with entries being tables themselves having different properties. In -these tables characters are represented by numbers and sorting takes -place using these numbers. Strings are simplified using lowercasing -as well as shape codes. Numbers are filtered and after getting an offset -they end up at the right end of the spectrum (more clever parser will -be added some day). There are definitely more solutions to the problem -and it is a nice puzzle to solve.

    - -

    In the future more methods can be added, as there is practically no -limit to what goes into the tables. For that we will provide hooks.

    - -

    Todo: decomposition with specific order of accents, this is -relatively easy to do.

    - -

    Todo: investigate what standards and conventions there are and see -how they map onto this mechanism. I've learned that users can come up -with any demand so nothing here is frozen.

    - -

    In the future index entries will become more clever, i.e. they will -have language etc properties that then can be used.

    -]]-- - -local gsub, rep, sub, sort, concat = string.gsub, string.rep, string.sub, table.sort, table.concat -local utfbyte, utfchar, utfcharacters, utfvalues = utf.byte, utf.char, utf.characters, utf.values -local next, type, tonumber, rawget, rawset = next, type, tonumber, rawget, rawset - -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex - -local trace_tests = false trackers.register("sorters.tests", function(v) trace_tests = v end) -local trace_methods = false trackers.register("sorters.methods", function(v) trace_methods = v end) - -local report_sorters = logs.reporter("languages","sorters") - -local comparers = { } -local splitters = { } -local definitions = allocate() -local tracers = allocate() -local ignoredoffset = 0x10000 -- frozen -local replacementoffset = 0x10000 -- frozen -local digitsoffset = 0x20000 -- frozen -local digitsmaximum = 0xFFFFF -- frozen - -local lccodes = characters.lccodes -local lcchars = characters.lcchars -local shchars = characters.shchars -local fscodes = characters.fscodes -local fschars = characters.fschars - -local decomposed = characters.decomposed - -local variables = interfaces.variables - -local v_numbers = variables.numbers -local v_default = variables.default -local v_before = variables.before -local v_after = variables.after -local v_first = variables.first -local v_last = variables.last - -local validmethods = table.tohash { - -- "ch", -- raw character - "mm", -- minus mapping - "zm", -- zero mapping - "pm", -- plus mapping - "mc", -- lower case - 1 - "zc", -- lower case - "pc", -- lower case + 1 - "uc", -- unicode -} - -local predefinedmethods = { - [v_default] = "zc,pc,zm,pm,uc", - [v_before] = "mm,mc,uc", - [v_after] = "pm,mc,uc", - [v_first] = "pc,mm,uc", - [v_last] = "mc,mm,uc", -} - -sorters = { - comparers = comparers, - splitters = splitters, - definitions = definitions, - tracers = tracers, - constants = { - ignoredoffset = ignoredoffset, - replacementoffset = replacementoffset, - digitsoffset = digitsoffset, - digitsmaximum = digitsmaximum, - defaultlanguage = v_default, - defaultmethod = v_default, - defaultdigits = v_numbers, - } -} - -local sorters = sorters -local constants = sorters.constants - -local data, language, method, digits -local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence -local thefirstofsplit - -local mte = { -- todo: assign to t - __index = function(t,k) - if k and k ~= "" and utfbyte(k) < digitsoffset then -- k check really needed (see s-lan-02) - local el - if k then - local l = lower[k] or lcchars[k] - el = rawget(t,l) - end - if not el then - local l = shchars[k] - if l and l ~= k then - if #l > 1 then - l = sub(l,1,1) -- todo - end - el = rawget(t,l) - if not el then - l = lower[k] or lcchars[l] - if l then - el = rawget(t,l) - end - end - end - el = el or k - end - -- rawset(t,k,el) - return el - else - -- rawset(t,k,k) - end - end -} - -local noorder = false - -local function preparetables(data) - local orders, lower, m_mappings, z_mappings, p_mappings = data.orders, data.lower, { }, { }, { } - for i=1,#orders do - local oi = orders[i] - local n = { 2 * i } - m_mappings[oi], z_mappings[oi], p_mappings[oi] = n, n, n - end - local mtm = { - __index = function(t,k) - local n, nn - if k then - if trace_tests then - report_sorters("simplifing character %C",k) - end - local l = lower[k] or lcchars[k] - if l then - if trace_tests then - report_sorters(" 1 lower: %C",l) - end - local ml = rawget(t,l) - if ml then - n = { } - nn = 0 - for i=1,#ml do - nn = nn + 1 - n[nn] = ml[i] + (t.__delta or 0) - end - if trace_tests then - report_sorters(" 2 order: % t",n) - end - end - end - if not n then - local s = shchars[k] -- maybe all components? - if s and s ~= k then - if trace_tests then - report_sorters(" 3 shape: %C",s) - end - n = { } - nn = 0 - for l in utfcharacters(s) do - local ml = rawget(t,l) - if ml then - if trace_tests then - report_sorters(" 4 keep: %C",l) - end - if ml then - for i=1,#ml do - nn = nn + 1 - n[nn] = ml[i] - end - end - else - l = lower[l] or lcchars[l] - if l then - if trace_tests then - report_sorters(" 5 lower: %C",l) - end - local ml = rawget(t,l) - if ml then - for i=1,#ml do - nn = nn + 1 - n[nn] = ml[i] + (t.__delta or 0) - end - end - end - end - end - else - -- -- we probably never enter this branch - -- -- fschars returns a single char - -- - -- s = fschars[k] - -- if s and s ~= k then - -- if trace_tests then - -- report_sorters(" 6 split: %s",s) - -- end - -- local ml = rawget(t,s) - -- if ml then - -- n = { } - -- nn = 0 - -- for i=1,#ml do - -- nn = nn + 1 - -- n[nn] = ml[i] - -- end - -- end - -- end - local b = utfbyte(k) - n = decomposed[b] or { b } - if trace_tests then - report_sorters(" 6 split: %s",utf.tostring(b)) -- todo - end - end - if n then - if trace_tests then - report_sorters(" 7 order: % t",n) - end - else - n = noorder - if trace_tests then - report_sorters(" 8 order: 0") - end - end - end - else - n = noorder - if trace_tests then - report_sorters(" 9 order: 0") - end - end - rawset(t,k,n) - return n - end - } - data.m_mappings = m_mappings - data.z_mappings = z_mappings - data.p_mappings = p_mappings - m_mappings.__delta = -1 - z_mappings.__delta = 0 - p_mappings.__delta = 1 - setmetatable(data.entries,mte) - setmetatable(data.m_mappings,mtm) - setmetatable(data.z_mappings,mtm) - setmetatable(data.p_mappings,mtm) - thefirstofsplit = data.firstofsplit -end - -local function update() -- prepare parent chains, needed when new languages are added - for language, data in next, definitions do - local parent = data.parent or "default" - if language ~= "default" then - setmetatableindex(data,definitions[parent] or definitions.default) - end - data.language = language - data.parent = parent - data.m_mappings = { } -- free temp data - data.z_mappings = { } -- free temp data - data.p_mappings = { } -- free temp data - end -end - -local function setlanguage(l,m,d,u) - language = (l ~= "" and l) or constants.defaultlanguage - data = definitions[language or constants.defaultlanguage] or definitions[constants.defaultlanguage] - method = (m ~= "" and m) or data.method or constants.defaultmethod - digits = (d ~= "" and d) or data.digits or constants.defaultdigits - if trace_tests then - report_sorters("setting language %a, method %a, digits %a",language,method,digits) - end - replacements = data.replacements - entries = data.entries - orders = data.orders - lower = data.lower - upper = data.upper - preparetables(data) - m_mappings = data.m_mappings - z_mappings = data.z_mappings - p_mappings = data.p_mappings - -- - method = predefinedmethods[variables[method]] or method - data.method = method - -- - data.digits = digits - -- - local seq = utilities.parsers.settings_to_array(method or "") -- check the list - sequence = { } - local nofsequence = 0 - for i=1,#seq do - local s = seq[i] - if validmethods[s] then - nofsequence = nofsequence + 1 - sequence[nofsequence] = s - else - report_sorters("invalid sorter method %a in %a",s,method) - end - end - data.sequence = sequence - if trace_tests then - report_sorters("using sort sequence: % t",sequence) - end - -- - return data -end - -function sorters.update() - update() - setlanguage(language,method,numberorder) -- resync current language and method -end - -function sorters.setlanguage(language,method,numberorder) - update() - setlanguage(language,method,numberorder) -- new language and method -end - --- tricky: { 0, 0, 0 } vs { 0, 0, 0, 0 } => longer wins and mm, pm, zm can have them - -local function basicsort(sort_a,sort_b) - if sort_a and sort_b then - local na = #sort_a - local nb = #sort_b - if na > nb then - na = nb - end - for i=1,na do - local ai, bi = sort_a[i], sort_b[i] - if ai > bi then - return 1 - elseif ai < bi then - return -1 - end - end - end - return 0 -end - -function comparers.basic(a,b) -- trace ea and eb - local ea, eb = a.split, b.split - local na, nb = #ea, #eb - if na == 0 and nb == 0 then - -- simple variant (single word) - local result = 0 - for j=1,#sequence do - local m = sequence[j] - result = basicsort(ea[m],eb[m]) - if result ~= 0 then - return result - end - end - if result == 0 then - local la, lb = #ea.uc, #eb.uc - if la > lb then - return 1 - elseif lb > la then - return -1 - else - return 0 - end - else - return result - end - else - -- complex variant, used in register (multiple words) - local result = 0 - for i=1,nb < na and nb or na do - local eai, ebi = ea[i], eb[i] - for j=1,#sequence do - local m = sequence[j] - result = basicsort(eai[m],ebi[m]) - if result ~= 0 then - return result - end - end - if result == 0 then - local la, lb = #eai.uc, #ebi.uc - if la > lb then - return 1 - elseif lb > la then - return -1 - end - else - return result - end - end - if result ~= 0 then - return result - elseif na > nb then - return 1 - elseif nb > na then - return -1 - else - return 0 - end - end -end - -local function numify(s) - s = digitsoffset + tonumber(s) -- alternatively we can create range - if s > digitsmaximum then - s = digitsmaximum - end - return utfchar(s) -end - -function sorters.strip(str) -- todo: only letters and such - if str and str ~= "" then - -- todo: make a decent lpeg - str = gsub(str,"\\[\"\'~^`]*","") -- \"e -- hm, too greedy - str = gsub(str,"\\%S*","") -- the rest - str = gsub(str,"%s","\001") -- can be option - str = gsub(str,"[%s%[%](){}%$\"\']*","") - if digits == v_numbers then - str = gsub(str,"(%d+)",numify) -- sort numbers properly - end - return str - else - return "" - end -end - -local function firstofsplit(entry) - -- numbers are left padded by spaces - local split = entry.split - if #split > 0 then - split = split[1].ch - else - split = split.ch - end - local first = split and split[1] or "" - if thefirstofsplit then - return thefirstofsplit(first,data,entry) -- normally the first one is needed - else - return first, entries[first] or "\000" -- tag - end -end - -sorters.firstofsplit = firstofsplit - --- for the moment we use an inefficient bunch of tables but once --- we know what combinations make sense we can optimize this - -function splitters.utf(str) -- we could append m and u but this is cleaner, s is for tracing - if #replacements > 0 then - -- todo make an lpeg for this - for k=1,#replacements do - local v = replacements[k] - str = gsub(str,v[1],v[2]) - end - end - local m_case, z_case, p_case, m_mapping, z_mapping, p_mapping, char, byte, n = { }, { }, { }, { }, { }, { }, { }, { }, 0 - local nm, nz, np = 0, 0, 0 - for sc in utfcharacters(str) do - local b = utfbyte(sc) - if b >= digitsoffset then - if n == 0 then - -- we need to force number to the top - z_case[1] = 0 - m_case[1] = 0 - p_case[1] = 0 - char[1] = sc - byte[1] = 0 - m_mapping[1] = 0 - z_mapping[1] = 0 - p_mapping[1] = 0 - n = 2 - else - n = n + 1 - end - z_case[n] = b - m_case[n] = b - p_case[n] = b - char[n] = sc - byte[n] = b - nm = nm + 1 - nz = nz + 1 - np = np + 1 - m_mapping[nm] = b - z_mapping[nz] = b - p_mapping[np] = b - else - n = n + 1 - local l = lower[sc] - l = l and utfbyte(l) or lccodes[b] - if type(l) == "table" then - l = l[1] -- there are currently no tables in lccodes but it can be some, day - end - z_case[n] = l - if l ~= b then - m_case[n] = l - 1 - p_case[n] = l + 1 - else - m_case[n] = l - p_case[n] = l - end - char[n], byte[n] = sc, b - local fs = fscodes[b] or b - local msc = m_mappings[sc] - if msc ~= noorder then - if not msc then - msc = m_mappings[fs] - end - for i=1,#msc do - nm = nm + 1 - m_mapping[nm] = msc[i] - end - end - local zsc = z_mappings[sc] - if zsc ~= noorder then - if not zsc then - zsc = z_mappings[fs] - end - for i=1,#zsc do - nz = nz + 1 - z_mapping[nz] = zsc[i] - end - end - local psc = p_mappings[sc] - if psc ~= noorder then - if not psc then - psc = p_mappings[fs] - end - for i=1,#psc do - np = np + 1 - p_mapping[np] = psc[i] - end - end - end - end - -- -- only those needed that are part of a sequence - -- - -- local b = byte[1] - -- if b then - -- -- we set them to the first split code (korean) - -- local fs = fscodes[b] or b - -- if #m_mapping == 0 then - -- m_mapping = { m_mappings[fs][1] } - -- end - -- if #z_mapping == 0 then - -- z_mapping = { z_mappings[fs][1] } - -- end - -- if #p_mapping == 0 then - -- p_mapping = { p_mappings[fs][1] } - -- end - -- end - local t = { - ch = char, - uc = byte, - mc = m_case, - zc = z_case, - pc = p_case, - mm = m_mapping, - zm = z_mapping, - pm = p_mapping, - } - - return t -end - -local function packch(entry) - local split = entry.split - if #split > 0 then -- useless test - local t = { } - for i=1,#split do - local tt, li = { }, split[i].ch - for j=1,#li do - local lij = li[j] - tt[j] = utfbyte(lij) > ignoredoffset and "[]" or lij - end - t[i] = concat(tt) - end - return concat(t," + ") - else - local t, li = { }, split.ch - for j=1,#li do - local lij = li[j] - t[j] = utfbyte(lij) > ignoredoffset and "[]" or lij - end - return concat(t) - end -end - -local function packuc(entry) - local split = entry.split - if #split > 0 then -- useless test - local t = { } - for i=1,#split do - t[i] = concat(split[i].uc, " ") - end - return concat(t," + ") - else - return concat(split.uc," ") - end -end - -function sorters.sort(entries,cmp) - if trace_tests or trace_methods then - local nofentries = #entries - report_sorters("entries: %s, language: %s, method: %s, digits: %s",nofentries,language,method,tostring(digits)) - for i=1,nofentries do - report_sorters("entry %s",table.serialize(entries[i].split,i,true,true,true)) - end - end - if trace_tests then - sort(entries,function(a,b) - local r = cmp(a,b) - local e = (not r and "?") or (r<0 and "<") or (r>0 and ">") or "=" - report_sorters("%s %s %s | %s %s %s",packch(a),e,packch(b),packuc(a),e,packuc(b)) - return r == -1 - end) - local s - for i=1,#entries do - local entry = entries[i] - local letter, first = firstofsplit(entry) - if first == s then - first = " " - else - s = first - report_sorters(">> %C (%C)",first,letter) - end - report_sorters(" %s | %s",packch(entry),packuc(entry)) - end - else - sort(entries,function(a,b) - return cmp(a,b) == -1 - end) - end -end +if not modules then modules = { } end modules ['sort-ini'] = { + version = 1.001, + comment = "companion to sort-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- It took a while to get there, but with Fleetwood Mac's "Don't Stop" +-- playing in the background we sort of got it done. + +--[[

    The code here evolved from the rather old mkii approach. There +we concatinate the key and (raw) entry into a new string. Numbers and +special characters get some treatment so that they sort ok. In +addition some normalization (lowercasing, accent stripping) takes +place and again data is appended ror prepended. Eventually these +strings are sorted using a regular string sorter. The relative order +of character is dealt with by weighting them. It took a while to +figure this all out but eventually it worked ok for most languages, +given that the right datatables were provided.

    + +

    Here we do follow a similar approach but this time we don't append +the manipulated keys and entries but create tables for each of them +with entries being tables themselves having different properties. In +these tables characters are represented by numbers and sorting takes +place using these numbers. Strings are simplified using lowercasing +as well as shape codes. Numbers are filtered and after getting an offset +they end up at the right end of the spectrum (more clever parser will +be added some day). There are definitely more solutions to the problem +and it is a nice puzzle to solve.

    + +

    In the future more methods can be added, as there is practically no +limit to what goes into the tables. For that we will provide hooks.

    + +

    Todo: decomposition with specific order of accents, this is +relatively easy to do.

    + +

    Todo: investigate what standards and conventions there are and see +how they map onto this mechanism. I've learned that users can come up +with any demand so nothing here is frozen.

    + +

    In the future index entries will become more clever, i.e. they will +have language etc properties that then can be used.

    +]]-- + +local gsub, rep, sub, sort, concat = string.gsub, string.rep, string.sub, table.sort, table.concat +local utfbyte, utfchar, utfcharacters, utfvalues = utf.byte, utf.char, utf.characters, utf.values +local next, type, tonumber, rawget, rawset = next, type, tonumber, rawget, rawset + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex + +local trace_tests = false trackers.register("sorters.tests", function(v) trace_tests = v end) +local trace_methods = false trackers.register("sorters.methods", function(v) trace_methods = v end) + +local report_sorters = logs.reporter("languages","sorters") + +local comparers = { } +local splitters = { } +local definitions = allocate() +local tracers = allocate() +local ignoredoffset = 0x10000 -- frozen +local replacementoffset = 0x10000 -- frozen +local digitsoffset = 0x20000 -- frozen +local digitsmaximum = 0xFFFFF -- frozen + +local lccodes = characters.lccodes +local lcchars = characters.lcchars +local shchars = characters.shchars +local fscodes = characters.fscodes +local fschars = characters.fschars + +local decomposed = characters.decomposed + +local variables = interfaces.variables + +local v_numbers = variables.numbers +local v_default = variables.default +local v_before = variables.before +local v_after = variables.after +local v_first = variables.first +local v_last = variables.last + +local validmethods = table.tohash { + -- "ch", -- raw character + "mm", -- minus mapping + "zm", -- zero mapping + "pm", -- plus mapping + "mc", -- lower case - 1 + "zc", -- lower case + "pc", -- lower case + 1 + "uc", -- unicode +} + +local predefinedmethods = { + [v_default] = "zc,pc,zm,pm,uc", + [v_before] = "mm,mc,uc", + [v_after] = "pm,mc,uc", + [v_first] = "pc,mm,uc", + [v_last] = "mc,mm,uc", +} + +sorters = { + comparers = comparers, + splitters = splitters, + definitions = definitions, + tracers = tracers, + constants = { + ignoredoffset = ignoredoffset, + replacementoffset = replacementoffset, + digitsoffset = digitsoffset, + digitsmaximum = digitsmaximum, + defaultlanguage = v_default, + defaultmethod = v_default, + defaultdigits = v_numbers, + } +} + +local sorters = sorters +local constants = sorters.constants + +local data, language, method, digits +local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence +local thefirstofsplit + +local mte = { -- todo: assign to t + __index = function(t,k) + if k and k ~= "" and utfbyte(k) < digitsoffset then -- k check really needed (see s-lan-02) + local el + if k then + local l = lower[k] or lcchars[k] + el = rawget(t,l) + end + if not el then + local l = shchars[k] + if l and l ~= k then + if #l > 1 then + l = sub(l,1,1) -- todo + end + el = rawget(t,l) + if not el then + l = lower[k] or lcchars[l] + if l then + el = rawget(t,l) + end + end + end + el = el or k + end + -- rawset(t,k,el) + return el + else + -- rawset(t,k,k) + end + end +} + +local noorder = false + +local function preparetables(data) + local orders, lower, m_mappings, z_mappings, p_mappings = data.orders, data.lower, { }, { }, { } + for i=1,#orders do + local oi = orders[i] + local n = { 2 * i } + m_mappings[oi], z_mappings[oi], p_mappings[oi] = n, n, n + end + local mtm = { + __index = function(t,k) + local n, nn + if k then + if trace_tests then + report_sorters("simplifing character %C",k) + end + local l = lower[k] or lcchars[k] + if l then + if trace_tests then + report_sorters(" 1 lower: %C",l) + end + local ml = rawget(t,l) + if ml then + n = { } + nn = 0 + for i=1,#ml do + nn = nn + 1 + n[nn] = ml[i] + (t.__delta or 0) + end + if trace_tests then + report_sorters(" 2 order: % t",n) + end + end + end + if not n then + local s = shchars[k] -- maybe all components? + if s and s ~= k then + if trace_tests then + report_sorters(" 3 shape: %C",s) + end + n = { } + nn = 0 + for l in utfcharacters(s) do + local ml = rawget(t,l) + if ml then + if trace_tests then + report_sorters(" 4 keep: %C",l) + end + if ml then + for i=1,#ml do + nn = nn + 1 + n[nn] = ml[i] + end + end + else + l = lower[l] or lcchars[l] + if l then + if trace_tests then + report_sorters(" 5 lower: %C",l) + end + local ml = rawget(t,l) + if ml then + for i=1,#ml do + nn = nn + 1 + n[nn] = ml[i] + (t.__delta or 0) + end + end + end + end + end + else + -- -- we probably never enter this branch + -- -- fschars returns a single char + -- + -- s = fschars[k] + -- if s and s ~= k then + -- if trace_tests then + -- report_sorters(" 6 split: %s",s) + -- end + -- local ml = rawget(t,s) + -- if ml then + -- n = { } + -- nn = 0 + -- for i=1,#ml do + -- nn = nn + 1 + -- n[nn] = ml[i] + -- end + -- end + -- end + local b = utfbyte(k) + n = decomposed[b] or { b } + if trace_tests then + report_sorters(" 6 split: %s",utf.tostring(b)) -- todo + end + end + if n then + if trace_tests then + report_sorters(" 7 order: % t",n) + end + else + n = noorder + if trace_tests then + report_sorters(" 8 order: 0") + end + end + end + else + n = noorder + if trace_tests then + report_sorters(" 9 order: 0") + end + end + rawset(t,k,n) + return n + end + } + data.m_mappings = m_mappings + data.z_mappings = z_mappings + data.p_mappings = p_mappings + m_mappings.__delta = -1 + z_mappings.__delta = 0 + p_mappings.__delta = 1 + setmetatable(data.entries,mte) + setmetatable(data.m_mappings,mtm) + setmetatable(data.z_mappings,mtm) + setmetatable(data.p_mappings,mtm) + thefirstofsplit = data.firstofsplit +end + +local function update() -- prepare parent chains, needed when new languages are added + for language, data in next, definitions do + local parent = data.parent or "default" + if language ~= "default" then + setmetatableindex(data,definitions[parent] or definitions.default) + end + data.language = language + data.parent = parent + data.m_mappings = { } -- free temp data + data.z_mappings = { } -- free temp data + data.p_mappings = { } -- free temp data + end +end + +local function setlanguage(l,m,d,u) + language = (l ~= "" and l) or constants.defaultlanguage + data = definitions[language or constants.defaultlanguage] or definitions[constants.defaultlanguage] + method = (m ~= "" and m) or data.method or constants.defaultmethod + digits = (d ~= "" and d) or data.digits or constants.defaultdigits + if trace_tests then + report_sorters("setting language %a, method %a, digits %a",language,method,digits) + end + replacements = data.replacements + entries = data.entries + orders = data.orders + lower = data.lower + upper = data.upper + preparetables(data) + m_mappings = data.m_mappings + z_mappings = data.z_mappings + p_mappings = data.p_mappings + -- + method = predefinedmethods[variables[method]] or method + data.method = method + -- + data.digits = digits + -- + local seq = utilities.parsers.settings_to_array(method or "") -- check the list + sequence = { } + local nofsequence = 0 + for i=1,#seq do + local s = seq[i] + if validmethods[s] then + nofsequence = nofsequence + 1 + sequence[nofsequence] = s + else + report_sorters("invalid sorter method %a in %a",s,method) + end + end + data.sequence = sequence + if trace_tests then + report_sorters("using sort sequence: % t",sequence) + end + -- + return data +end + +function sorters.update() + update() + setlanguage(language,method,numberorder) -- resync current language and method +end + +function sorters.setlanguage(language,method,numberorder) + update() + setlanguage(language,method,numberorder) -- new language and method +end + +-- tricky: { 0, 0, 0 } vs { 0, 0, 0, 0 } => longer wins and mm, pm, zm can have them + +local function basicsort(sort_a,sort_b) + if sort_a and sort_b then + local na = #sort_a + local nb = #sort_b + if na > nb then + na = nb + end + for i=1,na do + local ai, bi = sort_a[i], sort_b[i] + if ai > bi then + return 1 + elseif ai < bi then + return -1 + end + end + end + return 0 +end + +function comparers.basic(a,b) -- trace ea and eb + local ea, eb = a.split, b.split + local na, nb = #ea, #eb + if na == 0 and nb == 0 then + -- simple variant (single word) + local result = 0 + for j=1,#sequence do + local m = sequence[j] + result = basicsort(ea[m],eb[m]) + if result ~= 0 then + return result + end + end + if result == 0 then + local la, lb = #ea.uc, #eb.uc + if la > lb then + return 1 + elseif lb > la then + return -1 + else + return 0 + end + else + return result + end + else + -- complex variant, used in register (multiple words) + local result = 0 + for i=1,nb < na and nb or na do + local eai, ebi = ea[i], eb[i] + for j=1,#sequence do + local m = sequence[j] + result = basicsort(eai[m],ebi[m]) + if result ~= 0 then + return result + end + end + if result == 0 then + local la, lb = #eai.uc, #ebi.uc + if la > lb then + return 1 + elseif lb > la then + return -1 + end + else + return result + end + end + if result ~= 0 then + return result + elseif na > nb then + return 1 + elseif nb > na then + return -1 + else + return 0 + end + end +end + +local function numify(s) + s = digitsoffset + tonumber(s) -- alternatively we can create range + if s > digitsmaximum then + s = digitsmaximum + end + return utfchar(s) +end + +function sorters.strip(str) -- todo: only letters and such + if str and str ~= "" then + -- todo: make a decent lpeg + str = gsub(str,"\\[\"\'~^`]*","") -- \"e -- hm, too greedy + str = gsub(str,"\\%S*","") -- the rest + str = gsub(str,"%s","\001") -- can be option + str = gsub(str,"[%s%[%](){}%$\"\']*","") + if digits == v_numbers then + str = gsub(str,"(%d+)",numify) -- sort numbers properly + end + return str + else + return "" + end +end + +local function firstofsplit(entry) + -- numbers are left padded by spaces + local split = entry.split + if #split > 0 then + split = split[1].ch + else + split = split.ch + end + local first = split and split[1] or "" + if thefirstofsplit then + return thefirstofsplit(first,data,entry) -- normally the first one is needed + else + return first, entries[first] or "\000" -- tag + end +end + +sorters.firstofsplit = firstofsplit + +-- for the moment we use an inefficient bunch of tables but once +-- we know what combinations make sense we can optimize this + +function splitters.utf(str) -- we could append m and u but this is cleaner, s is for tracing + if #replacements > 0 then + -- todo make an lpeg for this + for k=1,#replacements do + local v = replacements[k] + str = gsub(str,v[1],v[2]) + end + end + local m_case, z_case, p_case, m_mapping, z_mapping, p_mapping, char, byte, n = { }, { }, { }, { }, { }, { }, { }, { }, 0 + local nm, nz, np = 0, 0, 0 + for sc in utfcharacters(str) do + local b = utfbyte(sc) + if b >= digitsoffset then + if n == 0 then + -- we need to force number to the top + z_case[1] = 0 + m_case[1] = 0 + p_case[1] = 0 + char[1] = sc + byte[1] = 0 + m_mapping[1] = 0 + z_mapping[1] = 0 + p_mapping[1] = 0 + n = 2 + else + n = n + 1 + end + z_case[n] = b + m_case[n] = b + p_case[n] = b + char[n] = sc + byte[n] = b + nm = nm + 1 + nz = nz + 1 + np = np + 1 + m_mapping[nm] = b + z_mapping[nz] = b + p_mapping[np] = b + else + n = n + 1 + local l = lower[sc] + l = l and utfbyte(l) or lccodes[b] + if type(l) == "table" then + l = l[1] -- there are currently no tables in lccodes but it can be some, day + end + z_case[n] = l + if l ~= b then + m_case[n] = l - 1 + p_case[n] = l + 1 + else + m_case[n] = l + p_case[n] = l + end + char[n], byte[n] = sc, b + local fs = fscodes[b] or b + local msc = m_mappings[sc] + if msc ~= noorder then + if not msc then + msc = m_mappings[fs] + end + for i=1,#msc do + nm = nm + 1 + m_mapping[nm] = msc[i] + end + end + local zsc = z_mappings[sc] + if zsc ~= noorder then + if not zsc then + zsc = z_mappings[fs] + end + for i=1,#zsc do + nz = nz + 1 + z_mapping[nz] = zsc[i] + end + end + local psc = p_mappings[sc] + if psc ~= noorder then + if not psc then + psc = p_mappings[fs] + end + for i=1,#psc do + np = np + 1 + p_mapping[np] = psc[i] + end + end + end + end + -- -- only those needed that are part of a sequence + -- + -- local b = byte[1] + -- if b then + -- -- we set them to the first split code (korean) + -- local fs = fscodes[b] or b + -- if #m_mapping == 0 then + -- m_mapping = { m_mappings[fs][1] } + -- end + -- if #z_mapping == 0 then + -- z_mapping = { z_mappings[fs][1] } + -- end + -- if #p_mapping == 0 then + -- p_mapping = { p_mappings[fs][1] } + -- end + -- end + local t = { + ch = char, + uc = byte, + mc = m_case, + zc = z_case, + pc = p_case, + mm = m_mapping, + zm = z_mapping, + pm = p_mapping, + } + + return t +end + +local function packch(entry) + local split = entry.split + if #split > 0 then -- useless test + local t = { } + for i=1,#split do + local tt, li = { }, split[i].ch + for j=1,#li do + local lij = li[j] + tt[j] = utfbyte(lij) > ignoredoffset and "[]" or lij + end + t[i] = concat(tt) + end + return concat(t," + ") + else + local t, li = { }, split.ch + for j=1,#li do + local lij = li[j] + t[j] = utfbyte(lij) > ignoredoffset and "[]" or lij + end + return concat(t) + end +end + +local function packuc(entry) + local split = entry.split + if #split > 0 then -- useless test + local t = { } + for i=1,#split do + t[i] = concat(split[i].uc, " ") + end + return concat(t," + ") + else + return concat(split.uc," ") + end +end + +function sorters.sort(entries,cmp) + if trace_tests or trace_methods then + local nofentries = #entries + report_sorters("entries: %s, language: %s, method: %s, digits: %s",nofentries,language,method,tostring(digits)) + for i=1,nofentries do + report_sorters("entry %s",table.serialize(entries[i].split,i,true,true,true)) + end + end + if trace_tests then + sort(entries,function(a,b) + local r = cmp(a,b) + local e = (not r and "?") or (r<0 and "<") or (r>0 and ">") or "=" + report_sorters("%s %s %s | %s %s %s",packch(a),e,packch(b),packuc(a),e,packuc(b)) + return r == -1 + end) + local s + for i=1,#entries do + local entry = entries[i] + local letter, first = firstofsplit(entry) + if first == s then + first = " " + else + s = first + report_sorters(">> %C (%C)",first,letter) + end + report_sorters(" %s | %s",packch(entry),packuc(entry)) + end + else + sort(entries,function(a,b) + return cmp(a,b) == -1 + end) + end +end diff --git a/tex/context/base/sort-lan.lua b/tex/context/base/sort-lan.lua index d2fa276d7..e0c6376dc 100644 --- a/tex/context/base/sort-lan.lua +++ b/tex/context/base/sort-lan.lua @@ -1,925 +1,925 @@ -if not modules then modules = { } end modules ['sort-lan'] = { - version = 1.001, - comment = "companion to sort-lan.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", - dataonly = true, -} - --- todo: look into uts#10 (2012) ... some experiments ... something --- to finish in winter. - --- Many vectors were supplied by Wolfgang Schuster and Philipp --- Gesang. However this is a quite adapted and reformatted variant --- so it needs some checking. Other users provides tables and --- corrections as well. - -local utfchar, utfbyte = utf.char, utf.byte -local sorters = sorters -local definitions = sorters.definitions -local replacementoffset = sorters.constants.replacementoffset -local variables = interfaces.variables - -definitions["default"] = { - method = variables.before, - replacements = { - -- no replacements - }, - entries = { - ["a"] = "a", ["b"] = "b", ["c"] = "c", ["d"] = "d", ["e"] = "e", - ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", ["j"] = "j", - ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", - ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", ["t"] = "t", - ["u"] = "u", ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", - ["z"] = "z", - }, - orders = { - "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", - "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", - "u", "v", "w", "x", "y", "z", - }, - lower = { - -- no replacements - }, - upper = { - -- no replacements - } -} - -sorters.setlanguage("default") - --- english - -definitions["en"] = { parent = "default" } - --- dutch - -definitions['nl'] = { - parent = 'default', - replacements = { - { "ij", 'y' }, { "IJ", 'Y' }, - }, -} - --- French - -definitions['fr'] = { parent = 'default' } - --- German (by Wolfgang Schuster) - --- DIN 5007-1 - -definitions['DIN 5007-1'] = { parent = 'default' } - --- DIN 5007-2 - -definitions['DIN 5007-2'] = { - parent = 'default', - replacements = { - { "ä", 'ae' }, { "Ä", 'Ae' }, - { "ö", 'oe' }, { "Ö", 'Oe' }, - { "ü", 'ue' }, { "Ãœ", 'Ue' }, - }, -} - --- Duden - -definitions['Duden'] = { - parent = 'default', - replacements = { { "ß", 's' } }, -} - --- definitions['de'] = { parent = 'default' } -- new german - -definitions['de'] = { - parent = 'default', - replacements = { - { "ä", 'ae' }, { "Ä", 'Ae' }, - { "ö", 'oe' }, { "Ö", 'Oe' }, - { "ü", 'ue' }, { "Ãœ", 'Ue' }, - { "ß", 's' }, - }, -} - -definitions['deo'] = { parent = 'de' } -- old german - -definitions['de-DE'] = { parent = 'de' } -- german - Germany -definitions['de-CH'] = { parent = 'de' } -- german - Swiss - --- german - Austria - -definitions['de-AT'] = { - entries = { - ["a"] = "a", ["ä"] = "ä", ["b"] = "b", ["c"] = "c", ["d"] = "d", - ["e"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", - ["j"] = "j", ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", - ["o"] = "o", ["ö"] = "ö", ["p"] = "p", ["q"] = "q", ["r"] = "r", - ["s"] = "s", ["t"] = "t", ["u"] = "u", ["ü"] = "ü", ["v"] = "v", - ["w"] = "w", ["x"] = "x", ["y"] = "y", ["z"] = "z", - }, - orders = { - "a", "ä", "b", "c", "d", "e", "f", "g", "h", "i", - "j", "k", "l", "m", "n", "o", "ö", "p", "q", "r", - "s", "t", "u", "ü", "v", "w", "x", "y", "z", - }, -} - --- finish (by Wolfgang Schuster) - -definitions['fi'] = { - entries = { - ["a"] = "a", ["b"] = "b", ["c"] = "c", ["d"] = "d", ["e"] = "e", - ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", ["j"] = "j", - ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", - ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", ["t"] = "t", - ["u"] = "u", ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", - ["z"] = "z", ["Ã¥"] = "Ã¥", ["ä"] = "ä", ["ö"] = "ö", - }, - orders = { - "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", - "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", - "u", "v", "w", "x", "y", "z", "Ã¥", "ä", "ö", - } -} - --- slovenian by MM: this will change since we need to add accented vowels - -definitions['sl'] = { - entries = { - ["a"] = "a", ["b"] = "b", ["c"] = "c", ["Ä"] = "Ä", ["ć"] = "ć", ["d"] = "d", - ["Ä‘"] = "Ä‘", ["e"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", - ["j"] = "j", ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", - ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", ["Å¡"] = "Å¡", ["t"] = "t", - ["u"] = "u", ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", ["z"] = "z", - ["ž"] = "ž", - }, - orders = { - "a", "b", "c", "Ä", "ć", "d", "Ä‘", "e", "f", "g", "h", "i", - "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "Å¡", "t", - "u", "v", "w", "x", "y", "z", "ž", - } -} - --- The following data was provided by Philipp Gesang. - -definitions["ru"] = { - entries = { - ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["д"] = "д", - ["е"] = "е", ["Ñ‘"] = "е", ["ж"] = "ж", ["з"] = "з", ["и"] = "и", - ["Ñ–"] = "и", ["й"] = "й", ["к"] = "к", ["л"] = "л", ["м"] = "м", - ["н"] = "н", ["о"] = "о", ["п"] = "п", ["Ñ€"] = "Ñ€", ["Ñ"] = "Ñ", - ["Ñ‚"] = "Ñ‚", ["у"] = "у", ["Ñ„"] = "Ñ„", ["Ñ…"] = "Ñ…", ["ц"] = "ц", - ["ч"] = "ч", ["ш"] = "ш", ["щ"] = "щ", ["ÑŠ"] = "ÑŠ", ["Ñ‹"] = "Ñ‹", - ["ÑŒ"] = "ÑŒ", ["Ñ£"] = "Ñ£", ["Ñ"] = "Ñ", ["ÑŽ"] = "ÑŽ", ["Ñ"] = "Ñ", - ["ѳ"] = "ѳ", ["ѵ"] = "ѵ", - }, - orders = { - "а", "б", "в", "г", "д", "е", "Ñ‘", "ж", "з", "и", - "Ñ–", "й", "к", "л", "м", "н", "о", "п", "Ñ€", "Ñ", - "Ñ‚", "у", "Ñ„", "Ñ…", "ц", "ч", "ш", "щ", "ÑŠ", "Ñ‹", - "ÑŒ", "Ñ£", "Ñ", "ÑŽ", "Ñ", "ѳ", "ѵ", - } -} - ---- Basic Ukrainian - -definitions["uk"] = { - entries = { - ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["Ò‘"] = "Ò‘", - ["д"] = "д", ["е"] = "е", ["Ñ”"] = "Ñ”", ["ж"] = "ж", ["з"] = "з", - ["и"] = "и", ["Ñ–"] = "Ñ–", ["Ñ—"] = "Ñ—", ["й"] = "й", ["к"] = "к", - ["л"] = "л", ["м"] = "м", ["н"] = "н", ["о"] = "о", ["п"] = "п", - ["Ñ€"] = "Ñ€", ["Ñ"] = "Ñ", ["Ñ‚"] = "Ñ‚", ["у"] = "у", ["Ñ„"] = "Ñ„", - ["Ñ…"] = "Ñ…", ["ц"] = "ц", ["ч"] = "ч", ["ш"] = "ш", ["щ"] = "щ", - ["ÑŒ"] = "ÑŒ", ["ÑŽ"] = "ÑŽ", ["Ñ"] = "Ñ", - }, - orders = { - "а", "б", "в", "г", "Ò‘", "д", "е", "Ñ”", "ж", "з", "и", "Ñ–", - "Ñ—", "й", "к", "л", "м", "н", "о", "п", "Ñ€", "Ñ", "Ñ‚", "у", - "Ñ„", "Ñ…", "ц", "ч", "ш", "щ", "ÑŒ", "ÑŽ", "Ñ", - } -} - ---- Belarusian - -definitions["be"] = { - entries = { - ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["д"] = "д", - ["е"] = "е", ["Ñ‘"] = "е", ["ж"] = "ж", ["з"] = "з", ["Ñ–"] = "Ñ–", - ["й"] = "й", ["к"] = "к", ["л"] = "л", ["м"] = "м", ["н"] = "н", - ["о"] = "о", ["п"] = "п", ["Ñ€"] = "Ñ€", ["Ñ"] = "Ñ", ["Ñ‚"] = "Ñ‚", - ["у"] = "у", ["Ñž"] = "Ñž", ["Ñ„"] = "Ñ„", ["Ñ…"] = "Ñ…", ["ц"] = "ц", - ["ч"] = "ч", ["ш"] = "ш", ["Ñ‹"] = "Ñ‹", ["ÑŒ"] = "ÑŒ", ["Ñ"] = "Ñ", - ["ÑŽ"] = "ÑŽ", ["Ñ"] = "Ñ", - }, - orders = { - "а", "б", "в", "г", "д", "е", "Ñ‘", "ж", "з", "Ñ–", - "й", "к", "л", "м", "н", "о", "п", "Ñ€", "Ñ", "Ñ‚", - "у", "Ñž", "Ñ„", "Ñ…", "ц", "ч", "ш", "Ñ‹", "ÑŒ", "Ñ", - "ÑŽ", "Ñ", - } -} - ---- Bulgarian - -definitions["bg"] = { - entries = { - ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["д"] = "д", - ["е"] = "е", ["ж"] = "ж", ["з"] = "з", ["и"] = "и", ["й"] = "й", - ["к"] = "к", ["a"] = "a", ["л"] = "л", ["a"] = "a", ["м"] = "м", - ["н"] = "н", ["о"] = "о", ["п"] = "п", ["Ñ€"] = "Ñ€", ["Ñ"] = "Ñ", - ["Ñ‚"] = "Ñ‚", ["у"] = "у", ["Ñ„"] = "Ñ„", ["Ñ…"] = "Ñ…", ["ц"] = "ц", - ["ч"] = "ч", ["ш"] = "ш", ["щ"] = "щ", ["ÑŠ"] = "ÑŠ", ["ÑŒ"] = "ÑŒ", - ["ÑŽ"] = "ÑŽ", ["Ñ"] = "Ñ", - }, - orders = { - "а", "б", "в", "г", "д", "е", "ж", "з","и", "й", - "к", "a", "л", "a", "м", "н", "о", "п", "Ñ€", "Ñ", - "Ñ‚", "у", "Ñ„", "Ñ…", "ц", "ч", "ш", "щ", "ÑŠ", "ÑŒ", - "ÑŽ", "Ñ", - } -} - ---- Old Church Slavonic - --- The language symbol “cu†is taken from the Wikipedia subdomain --- cu.wikipedia.org. - -local uk, UK = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11) - -definitions["cu"] = { - replacements = { - { "оу", uk }, { "ОУ", UK }, - }, - entries = { - ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["д"] = "д", - ["Ñ”"] = "Ñ”", ["ж"] = "ж", ["Ñ•"] = "Ñ•", ["ꙃ"] = "Ñ•", ["з"] = "з", - ["ê™"] = "з", ["и"] = "и", ["Ñ–"] = "и", ["Ñ—"] = "и", ["Ñ›"] = "Ñ›", - ["к"] = "к", ["л"] = "л", ["м"] = "м", ["н"] = "н", ["о"] = "о", - ["п"] = "п", ["Ñ€"] = "Ñ€", ["Ñ"] = "Ñ", ["Ñ‚"] = "Ñ‚", ["у"] = "у", - ["ѹ"] = "у", ["ꙋ"] = "у", [uk] = "у", ["Ñ„"] = "Ñ„", ["Ñ…"] = "Ñ…", - ["Ñ¡"] = "Ñ¡", ["Ñ¿"] = "Ñ¡", ["ѽ"] = "Ñ¡", ["ê™"] = "Ñ¡", ["ц"] = "ц", - ["ч"] = "ч", ["ш"] = "ш", ["щ"] = "щ", ["ÑŠ"] = "ÑŠ", ["Ñ‹"] = "Ñ‹", - ["ꙑ"] = "Ñ‹", ["ÑŒ"] = "ÑŒ", ["Ñ£"] = "Ñ£", ["ÑŽ"] = "ÑŽ", ["ê™—"] = "ê™—", - ["Ñ¥"] = "Ñ¥", ["ѧ"] = "ѧ", ["Ñ©"] = "Ñ©", ["Ñ«"] = "Ñ«", ["Ñ­"] = "Ñ­", - ["ѯ"] = "ѯ", ["ѱ"] = "ѱ", ["ѳ"] = "ѳ", ["ѵ"] = "ѵ", ["Ñ·"] = "ѵ", - }, - orders = { - "а", "б", "в", "г", "д", "Ñ”", "ж", "Ñ•", "ꙃ", "з", -- DzÄ›lo, U+0292, alternative: dz U+01f3 - "ê™", "и", "Ñ–", "Ñ—", "Ñ›", "к", "л", "м", "н", "о", -- Zemlja - "п", "Ñ€", "Ñ", "Ñ‚", "у", "ѹ", "ꙋ", uk, "Ñ„", "Ñ…", -- U+0478 uk, horizontal ligature, U+0479 uk, vertical ligature - "Ñ¡", "Ñ¿", "ѽ", "ê™", "ц", "ч", "ш", "щ", "ÑŠ", "Ñ‹", -- "Å", U+047f \, U+047d > Omega variants, U+064D / - "ꙑ", "ÑŒ", "Ñ£", "ÑŽ", "ê™—", "Ñ¥", "ѧ", "Ñ©", "Ñ«", "Ñ­", -- Old jery (U+a651) as used e.g. by the OCS Wikipedia. IOTIFIED A - "ѯ", "ѱ", "ѳ", "ѵ", "Ñ·", - }, - upper = { - uk = UK, - }, - lower = { - UK = uk, - } -} - ---- Polish (including the letters q, v, x) Cf. ftp://ftp.gust.org.pl/pub/GUST/bulletin/03/02-bl.pdf. - -definitions["pl"] = { - entries = { - ["a"] = "a", ["Ä…"] = "Ä…", ["b"] = "b", ["c"] = "c", ["ć"] = "ć", - ["d"] = "d", ["e"] = "e", ["Ä™"] = "Ä™", ["f"] = "f", ["g"] = "g", - ["h"] = "h", ["i"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", - ["Å‚"] = "Å‚", ["m"] = "m", ["n"] = "n", ["Å„"] = "Å„", ["o"] = "o", - ["ó"] = "ó", ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", - ["Å›"] = "Å›", ["t"] = "t", ["u"] = "u", ["v"] = "v", ["w"] = "w", - ["x"] = "x", ["y"] = "y", ["z"] = "z", ["ź"] = "ź", ["ż"] = "ż", - }, - orders = { - "a", "Ä…", "b", "c", "ć", "d", "e", "Ä™", "f", "g", - "h", "i", "j", "k", "l", "Å‚", "m", "n", "Å„", "o", - "ó", "p", "q", "r", "s", "Å›", "t", "u", "v", "w", - "x", "y", "z", "ź", "ż", - }, -} - --- Czech, modified to treat quantities and other secondary characteristics indifferently. Cf. --- http://racek.vlada.cz/usneseni/usneseni_webtest.nsf/WebGovRes/0AD8FEF4CC04B7A4C12571B6006D69D0?OpenDocument --- (2.4.3; via ) - -local ch, CH = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11) - -definitions["cz"] = { - replacements = { - { "ch", ch }, { "CH", CH } - }, - entries = { - ["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", ["Ä"] = "Ä", - ["d"] = "d", ["Ä"] = "d", ["e"] = "e", ["é"] = "e", ["Ä›"] = "e", - ["f"] = "f", ["g"] = "g", ["h"] = "h", [ch] = "ch", ["i"] = "i", - ["í"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", ["m"] = "m", - ["n"] = "n", ["ň"] = "n", ["o"] = "o", ["ó"] = "o", ["p"] = "p", - ["q"] = "q", ["r"] = "r", ["Å™"] = "Å™", ["s"] = "s", ["Å¡"] = "Å¡", - ["t"] = "t", ["Å¥"] = "t", ["u"] = "u", ["ú"] = "u", ["ů"] = "u", - ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", ["ý"] = "y", - ["z"] = "z", ["ž"] = "ž", - }, - orders = { - "a", "á", "b", "c", "Ä", "d", "Ä", "e", "é", "Ä›", - "f", "g", "h", ch, "i", "í", "j", "k", "l", "m", - "n", "ň", "o", "ó", "p", "q", "r", "Å™", "s", "Å¡", - "t", "Å¥", "u", "ú", "ů", "v", "w", "x", "y", "ý", - "z", "ž", - }, - upper = { - ch = CH, - }, - lower = { - CH = ch, - } -} - -definitions["cs"] = { parent = "cz" } - ---- Slovak. - --- Vowel and consonant quantities, "Ä", "ľ", "ň", "Å¥", "ô", and "ä" are treated --- indifferently as their base character, as in my dictionary. If you prefer them --- to affect collation order, then use the values given in the comments. We could --- define an additional vector for that. - -local dz, DZ = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11) -local dzh, DZH = utfchar(replacementoffset + 2), utfchar(replacementoffset + 12) -local ch, CH = utfchar(replacementoffset + 3), utfchar(replacementoffset + 13) - -definitions["sk"] = { - replacements = { - { "dz", dz }, { "dz", DZ }, - { "dž", dzh }, { "dž", DZH }, - { "ch", ch }, { "ch", CH }, - }, - entries = { - ["a"] = "a", ["á"] = "a", ["ä"] = "a", ["b"] = "b", ["c"] = "c", - ["Ä"] = "Ä", ["d"] = "d", ["Ä"] = "d", [dz] = "dz", [dzh] = "dž", - ["e"] = "e", ["é"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", - [ch] = "ch", ["i"] = "i", ["í"] = "i", ["j"] = "j", ["k"] = "k", - ["l"] = "l", ["ĺ"] = "l", ["ľ"] = "l", ["m"] = "m", ["n"] = "n", - ["ň"] = "n", ["o"] = "o", ["ó"] = "o", ["ô"] = "o", ["p"] = "p", - ["q"] = "q", ["r"] = "r", ["Å•"] = "r", ["s"] = "s", ["Å¡"] = "Å¡", - ["t"] = "t", ["Å¥"] = "t", ["u"] = "u", ["ú"] = "u", ["v"] = "v", - ["w"] = "w", ["x"] = "x", ["y"] = "y", ["ý"] = "y", ["z"] = "z", - ["ž"] = "ž", - }, - orders = { - "a", "á", "ä", "b", "c", "Ä", "d", "Ä", dz, dzh, - "e", "é", "f", "g", "h", ch, "i", "í", "j", "k", - "l", "ĺ", "ľ", "m", "n", "ň", "o", "ó", "ô", "p", - "q", "r", "Å•", "s", "Å¡", "t", "Å¥", "u", "ú", "v", - "w", "x", "y", "ý", "z", "ž", - }, - upper = { - dz = DZ, dzh = DZH, ch = CH, - }, - lower = { - DZ = dz, DZH = dzh, CH = ch, - } -} - ---- Croatian - -local dzh, DZH = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11) -local lj, LJ = utfchar(replacementoffset + 2), utfchar(replacementoffset + 12) -local nj, NJ = utfchar(replacementoffset + 3), utfchar(replacementoffset + 13) - -definitions["hr"] = { - replacements = { - { "dž", dzh }, { "DŽ", DZH }, - { "lj", lj }, { "LJ", LJ }, - { "nj", nj }, { "NJ", NJ }, - }, - entries = { - ["a"] = "a", ["b"] = "b", ["c"] = "c", ["Ä"] = "Ä", ["ć"] = "ć", - ["d"] = "d", [dzh] = "dž", ["Ä‘"] = "Ä‘", ["e"] = "e", ["f"] = "f", - ["g"] = "g", ["h"] = "h", ["i"] = "i", ["j"] = "j", ["k"] = "k", - ["l"] = "l", [lj] = "lj", ["m"] = "m", ["n"] = "n", [nj] = "nj", - ["o"] = "o", ["p"] = "p", ["r"] = "r", ["s"] = "s", ["Å¡"] = "Å¡", - ["t"] = "t", ["u"] = "u", ["v"] = "v", ["z"] = "z", ["ž"] = "ž", - }, - orders = { - "a", "b", "c", "Ä", "ć", "d", dzh, "Ä‘", "e", "f", - "g", "h", "i", "j", "k", "l", lj, "m", "n", nj, - "o", "p", "r", "s", "Å¡", "t", "u", "v", "z", "ž", - }, - upper = { - dzh = DZH, lj = LJ, nj = NJ, - }, - lower = { - DZH = dzh, LJ = lj, NJ = nj, - } -} - - ---- Serbian - -definitions["sr"] = { - entries = { - ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["д"] = "д", - ["Ñ’"] = "Ñ’", ["е"] = "е", ["ж"] = "ж", ["з"] = "з", ["и"] = "и", - ["ј"] = "ј", ["к"] = "к", ["л"] = "л", ["Ñ™"] = "Ñ™", ["м"] = "м", - ["н"] = "н", ["Ñš"] = "Ñš", ["о"] = "о", ["п"] = "п", ["Ñ€"] = "Ñ€", - ["Ñ"] = "Ñ", ["Ñ‚"] = "Ñ‚", ["Ñ›"] = "Ñ›", ["у"] = "у", ["Ñ„"] = "Ñ„", - ["Ñ…"] = "Ñ…", ["ц"] = "ц", ["ч"] = "ч", ["ÑŸ"] = "ÑŸ", - ["ш"] = "ш", - }, - orders = { - "а", "б", "в", "г", "д", "Ñ’", "е", "ж", "з", "и", - "ј", "к", "л", "Ñ™", "м", "н", "Ñš", "о", "п", "Ñ€", - "Ñ", "Ñ‚", "Ñ›", "у", "Ñ„", "Ñ…", "ц", "ч", "ÑŸ", "ш", - } -} - ---- Transliteration: Russian|ISO9-1995 - --- Keeping the same collation order as Russian (v.s.). --- Matches the tables from: --- http://bitbucket.org/phg/transliterator/src/tip/tex/context/third/transliterator/trans_tables_iso9.lua - -local yer = utfchar(replacementoffset + 1) - -definitions["ru-iso9"] = { - replacements = { - { "''", yer }, - }, - entries = { - ["a"] = "a", ["b"] = "b", ["v"] = "v", ["g"] = "g", ["d"] = "d", - ["e"] = "e", ["ë"] = "ë", ["ž"] = "ž", ["z"] = "z", ["i"] = "i", - ["ì"] = "ì", ["j"] = "j", ["k"] = "k", ["l"] = "l", ["m"] = "m", - ["n"] = "n", ["o"] = "o", ["p"] = "p", ["r"] = "r", ["s"] = "s", - ["t"] = "t", ["u"] = "u", ["f"] = "f", ["h"] = "h", ["c"] = "c", - ["Ä"] = "Ä", ["Å¡"] = "Å¡", ["Å"] = "Å", ["ʺ"] = "ʺ", [yer] = "ʺ", - ["y"] = "y", ["ʹ"] = "ʹ", ["'"] = "ʹ", ["Ä›"] = "Ä›", ["è"] = "è", - ["û"] = "û", ["â"] = "â", ["û"] = "û", ["â"] = "â", - }, - orders = { - "a", "b", "v", "g", "d", "e", "ë", "ž", "z", "i", - "ì", "j", "k", "l", "m", "n", "o", "p", "r", "s", - "t", "u", "f", "h", "c", "Ä", "Å¡", "Å", "ʺ", yer, - "y", "ʹ", "'", "Ä›", "è", "û", "â", "û", "â", - } -} - ---- Transliteration: Old Slavonic|scientific - --- Matches the tables from: --- http://bitbucket.org/phg/transliterator/src/tip/tex/context/third/transliterator/trans_tables_scntfc.lua - -local uk, UK = utfchar(replacementoffset + 1), utfchar(replacementoffset + 21) -local tshe, TSHE = utfchar(replacementoffset + 2), utfchar(replacementoffset + 22) -local sht, SHT = utfchar(replacementoffset + 3), utfchar(replacementoffset + 23) -local ju, JU = utfchar(replacementoffset + 4), utfchar(replacementoffset + 24) -local ja, JA = utfchar(replacementoffset + 5), utfchar(replacementoffset + 25) -local je, JE = utfchar(replacementoffset + 6), utfchar(replacementoffset + 26) -local ijus, IJUS = utfchar(replacementoffset + 7), utfchar(replacementoffset + 27) -local ibigjus, IBIGJUS = utfchar(replacementoffset + 8), utfchar(replacementoffset + 28) -local xi, XI = utfchar(replacementoffset + 9), utfchar(replacementoffset + 29) -local psi, PSI = utfchar(replacementoffset + 10), utfchar(replacementoffset + 30) -local theta, THETA = utfchar(replacementoffset + 11), utfchar(replacementoffset + 31) -local shch, SHCH = utfchar(replacementoffset + 12), utfchar(replacementoffset + 32) - -definitions["ocs-scn"] = { - replacements = { - { "ou", uk }, { "OU", UK }, - { "g’", tshe }, { "G’", TSHE }, - { "Å¡t", sht }, { "Å T", SHT }, - { "ju", ju }, { "JU", JU }, - { "ja", ja }, { "JA", JA }, - { "je", je }, { "JE", JE }, - { "jÄ™", ijus }, { "JĘ", IJUS }, - { "jÇ«", ibigjus }, { "JǪ", IBIGJUS }, - { "ks", xi }, { "KS", XI }, - { "ps", psi }, { "PS", PSI }, - { "th", theta }, { "TH", THETA }, - { "Å¡Ä", shch }, { "Å ÄŒ", SHCH }, - }, - entries = { - ["a"] = "a", ["b"] = "b", ["v"] = "v", ["g"] = "g", ["d"] = "d", - ["e"] = "e", ["ž"] = "ž", ["Ê’"] = "Ê’", ["z"] = "z", ["i"] = "i", - ["ï"] = "ï", [tshe] = "g’", ["k"] = "k", ["l"] = "l", ["m"] = "m", - ["n"] = "n", ["o"] = "o", ["p"] = "p", ["r"] = "r", ["s"] = "s", - ["t"] = "t", ["u"] = "u", ["f"] = "f", ["x"] = "x", ["o"] = "o", - ["c"] = "c", ["Ä"] = "Ä", ["Å¡"] = "Å¡", [sht] = "Å¡t", [shch] = "Å¡Ä", - ["ÑŠ"] = "ÑŠ", ["y"] = "y", [uk] = "y", ["ÑŒ"] = "ÑŒ", ["Ä›"] = "Ä›", - [ju] = "ju", [ja] = "ja", [je] = "je", ["Ä™"] = "Ä™", [ijus] = "jÄ™", - ["Ç«"] = "Ç«", [ibigjus] = "jÇ«", [xi] = "ks", [psi] = "ps", [theta] = "th", - ["ü"] = "ü", - }, - orders = { - "a", "b", "v", "g", "d", "e", "ž", "Ê’", "z", "i", "ï", - tshe, "k", "l", "m", "n", "o", "p", "r", "s", "t", "u", - "f", "x", "o", "c", "Ä", "Å¡", sht, shch, "ÑŠ", "y", uk, - "ÑŒ", "Ä›", ju, ja, je, "Ä™", ijus, "Ç«", ibigjus, xi, psi, - theta, "ü", - }, - upper = { - uk = UK, tshe = TSHE, sht = SHT, ju = JU, ja = JA, je = JE, ijus = IJUS, ibigjus = IBIGJUS, xi = XI, psi = PSI, theta = THETA, shch = SHCH, - }, - lower = { - UK = uk, TSHE = tshe, SHT = sht, JU = ju, JA = ja, JE = je, IJUS = ijus, IBIGJUS = ibigjus, XI = xi, PSI = psi, THETA = theta, SHCH = shch, - }, -} - - ---- Norwegian (bokmÃ¥l). - -definitions["no"] = { - entries = { - ["a"] = "a", ["b"] = "b", ["c"] = "c", ["d"] = "d", ["e"] = "e", - ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", ["j"] = "j", - ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", - ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", ["t"] = "t", - ["u"] = "u", ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", - ["z"] = "z", ["æ"] = "æ", ["ø"] = "ø", ["Ã¥"] = "Ã¥", - }, - orders = { - "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", - "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", - "u", "v", "w", "x", "y", "z", "æ", "ø", "Ã¥", - } -} - ---- Danish (-> Norwegian). - -definitions["da"] = { parent = "no" } - ---- Swedish - -definitions["sv"] = { - entries = { - ["a"] = "a", ["b"] = "b", ["c"] = "c", ["d"] = "d", ["e"] = "e", - ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", ["j"] = "j", - ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", - ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", ["t"] = "t", - ["u"] = "u", ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", - ["z"] = "z", ["Ã¥"] = "Ã¥", ["ä"] = "ä", ["ö"] = "ö", - }, - orders = { - "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", - "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", - "u", "v", "w", "x", "y", "z", "Ã¥", "ä", "ö", - } -} - ---- Icelandic - --- Treating quantities as allographs. - -definitions["is"] = { - entries = { - ["a"] = "a", ["á"] = "a", ["b"] = "b", ["d"] = "d", ["ð"] = "ð", - ["e"] = "e", ["é"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", - ["i"] = "i", ["í"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", - ["m"] = "m", ["n"] = "n", ["o"] = "o", ["ó"] = "o", ["p"] = "p", - ["r"] = "r", ["s"] = "s", ["t"] = "t", ["u"] = "u", ["ú"] = "u", - ["v"] = "v", ["x"] = "x", ["y"] = "y", ["ý"] = "y", ["þ"] = "þ", - ["æ"] = "æ", ["ö"] = "ö", - }, - orders = { - "a", "á", "b", "d", "ð", "e", "é", "f", "g", "h", - "i", "í", "j", "k", "l", "m", "n", "o", "ó", "p", - "r", "s", "t", "u", "ú", "v", "x", "y", "ý", "þ", - "æ", "ö", - }, -} - ---- Greek - -definitions["gr"] = { - entries = { - ["α"] = "α", ["ά"] = "α", ["á½°"] = "α", ["ᾶ"] = "α", ["á¾³"] = "α", - ["á¼€"] = "α", ["á¼"] = "α", ["ἄ"] = "α", ["ἂ"] = "α", ["ἆ"] = "α", - ["á¼"] = "α", ["á¼…"] = "α", ["ἃ"] = "α", ["ἇ"] = "α", ["á¾"] = "α", - ["á¾´"] = "α", ["á¾²"] = "α", ["á¾·"] = "α", ["ᾄ"] = "α", ["ᾂ"] = "α", - ["á¾…"] = "α", ["ᾃ"] = "α", ["ᾆ"] = "α", ["ᾇ"] = "α", ["β"] = "β", - ["γ"] = "γ", ["δ"] = "δ", ["ε"] = "ε", ["έ"] = "ε", ["á½²"] = "ε", - ["á¼"] = "ε", ["á¼”"] = "ε", ["á¼’"] = "ε", ["ἑ"] = "ε", ["ἕ"] = "ε", - ["ἓ"] = "ε", ["ζ"] = "ζ", ["η"] = "η", ["η"] = "η", ["ή"] = "η", - ["á½´"] = "η", ["ῆ"] = "η", ["ῃ"] = "η", ["á¼ "] = "η", ["ἤ"] = "η", - ["á¼¢"] = "η", ["ἦ"] = "η", ["á¾"] = "η", ["ἡ"] = "η", ["á¼¥"] = "η", - ["á¼£"] = "η", ["ἧ"] = "η", ["ᾑ"] = "η", ["á¿„"] = "η", ["á¿‚"] = "η", - ["ῇ"] = "η", ["á¾”"] = "η", ["á¾’"] = "η", ["ᾕ"] = "η", ["ᾓ"] = "η", - ["á¾–"] = "η", ["á¾—"] = "η", ["θ"] = "θ", ["ι"] = "ι", ["ί"] = "ι", - ["ὶ"] = "ι", ["á¿–"] = "ι", ["á¼°"] = "ι", ["á¼´"] = "ι", ["á¼²"] = "ι", - ["ἶ"] = "ι", ["á¼±"] = "ι", ["á¼µ"] = "ι", ["á¼³"] = "ι", ["á¼·"] = "ι", - ["ÏŠ"] = "ι", ["Î"] = "ι", ["á¿’"] = "ι", ["á¿—"] = "ι", ["κ"] = "κ", - ["λ"] = "λ", ["μ"] = "μ", ["ν"] = "ν", ["ξ"] = "ξ", ["ο"] = "ο", - ["ÏŒ"] = "ο", ["ὸ"] = "ο", ["á½€"] = "ο", ["ὄ"] = "ο", ["ὂ"] = "ο", - ["á½"] = "ο", ["á½…"] = "ο", ["ὃ"] = "ο", ["Ï€"] = "Ï€", ["Ï"] = "Ï", - ["ῤ"] = "ῤ", ["á¿¥"] = "á¿¥", ["σ"] = "σ", ["Ï‚"] = "Ï‚", ["Ï„"] = "Ï„", - ["Ï…"] = "Ï…", ["Ï"] = "Ï…", ["ὺ"] = "Ï…", ["ῦ"] = "Ï…", ["á½"] = "Ï…", - ["á½”"] = "Ï…", ["á½’"] = "Ï…", ["á½–"] = "Ï…", ["ὑ"] = "Ï…", ["ὕ"] = "Ï…", - ["ὓ"] = "Ï…", ["á½—"] = "Ï…", ["Ï‹"] = "Ï…", ["ΰ"] = "Ï…", ["á¿¢"] = "Ï…", - ["ῧ"] = "Ï…", ["φ"] = "φ", ["χ"] = "χ", ["ψ"] = "ω", ["ω"] = "ω", - ["ÏŽ"] = "ω", ["á½¼"] = "ω", ["ῶ"] = "ω", ["ῳ"] = "ω", ["á½ "] = "ω", - ["ὤ"] = "ω", ["á½¢"] = "ω", ["ὦ"] = "ω", ["á¾ "] = "ω", ["ὡ"] = "ω", - ["á½¥"] = "ω", ["á½£"] = "ω", ["ὧ"] = "ω", ["ᾡ"] = "ω", ["á¿´"] = "ω", - ["ῲ"] = "ω", ["á¿·"] = "ω", ["ᾤ"] = "ω", ["á¾¢"] = "ω", ["á¾¥"] = "ω", - ["á¾£"] = "ω", ["ᾦ"] = "ω", ["ᾧ"] = "ω", - }, - orders = { - "α", "ά", "á½°", "ᾶ", "á¾³", "á¼€", "á¼", "ἄ", "ἂ", "ἆ", - "á¼", "á¼…", "ἃ", "ἇ", "á¾", "á¾´", "á¾²", "á¾·", "ᾄ", "ᾂ", - "á¾…", "ᾃ", "ᾆ", "ᾇ", "β", "γ", "δ", "ε", "έ", "á½²", - "á¼", "á¼”", "á¼’", "ἑ", "ἕ", "ἓ", "ζ", "η", "η", "ή", - "á½´", "ῆ", "ῃ", "á¼ ", "ἤ", "á¼¢", "ἦ", "á¾", "ἡ", "á¼¥", - "á¼£", "ἧ", "ᾑ", "á¿„", "á¿‚", "ῇ", "á¾”", "á¾’", "ᾕ", "ᾓ", - "á¾–", "á¾—", "θ", "ι", "ί", "ὶ", "á¿–", "á¼°", "á¼´", "á¼²", - "ἶ", "á¼±", "á¼µ", "á¼³", "á¼·", "ÏŠ", "Î", "á¿’", "á¿—", "κ", - "λ", "μ", "ν", "ξ", "ο", "ÏŒ", "ὸ", "á½€", "ὄ", "ὂ", - "á½", "á½…", "ὃ", "Ï€", "Ï", "ῤ", "á¿¥", "σ", "Ï‚", "Ï„", - "Ï…", "Ï", "ὺ", "ῦ", "á½", "á½”", "á½’", "á½–", "ὑ", "ὕ", - "ὓ", "á½—", "Ï‹", "ΰ", "á¿¢", "ῧ", "φ", "χ", "ψ", "ω", - "ÏŽ", "á½¼", "ῶ", "ῳ", "á½ ", "ὤ", "á½¢", "ὦ", "á¾ ", "ὡ", - "á½¥", "á½£", "ὧ", "ᾡ", "á¿´", "ῲ", "á¿·", "ᾤ", "á¾¢", "á¾¥", - "á¾£", "ᾦ", "ᾧ", - }, -} - ---- Latin - --- Treating the post-classical fricatives “j†and “v†as “i†and “u†--- respectively. - -definitions["la"] = { - replacements = { - { "æ", "ae" }, { "Æ", "AE" }, - }, - entries = { - ["a"] = "a", ["Ä"] = "a", ["ă"] = "a", ["b"] = "b", ["c"] = "c", - ["d"] = "d", ["e"] = "e", ["Ä“"] = "e", ["Ä•"] = "e", ["f"] = "f", - ["g"] = "g", ["h"] = "h", ["i"] = "i", ["Ä«"] = "i", ["Ä­"] = "i", - ["j"] = "i", ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", - ["o"] = "o", ["Å"] = "o", ["Å"] = "o", ["p"] = "p", ["q"] = "q", - ["r"] = "r", ["s"] = "s", ["t"] = "t", ["u"] = "u", ["Å«"] = "u", - ["Å­"] = "u", ["v"] = "u", ["w"] = "w", ["x"] = "x", ["y"] = "y", - ["ȳ"] = "y", ["y̆"] = "y", ["z"] = "z", - }, - orders = { - "a", "Ä", "ă", "b", "c", "d", "e", "Ä“", "Ä•", "f", - "g", "h", "i", "Ä«", "Ä­", "j", "k", "l", "m", "n", - "o", "Å", "Å", "p", "q", "r", "s", "t", "u", "Å«", - "Å­", "v", "w", "x", "y", "ȳ", "y̆", "z", - } -} - ---- Italian - -definitions["it"] = { - entries = { - ["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", ["d"] = "d", - ["e"] = "e", ["é"] = "e", ["è"] = "e", ["f"] = "f", ["g"] = "g", - ["h"] = "h", ["i"] = "i", ["í"] = "i", ["ì"] = "i", ["j"] = "i", - ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", - ["ó"] = "o", ["ò"] = "o", ["p"] = "p", ["q"] = "q", ["r"] = "r", - ["s"] = "s", ["t"] = "t", ["u"] = "u", ["ú"] = "u", ["ù"] = "u", - ["v"] = "u", ["w"] = "w", ["x"] = "x", ["y"] = "y", ["z"] = "z", - }, - orders = { - "a", "á", "b", "c", "d", "e", "é", "è", "f", "g", - "h", "i", "í", "ì", "j", "k", "l", "m", "n", "o", - "ó", "ò", "p", "q", "r", "s", "t", "u", "ú", "ù", - "v", "w", "x", "y", "z", - } -} - ---- Romanian - -definitions["ro"] = { - entries = { - ["a"] = "a", ["ă"] = "ă", ["â"] = "â", ["b"] = "b", ["c"] = "c", - ["d"] = "d", ["e"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", - ["i"] = "i", ["î"] = "î", ["j"] = "j", ["k"] = "k", ["l"] = "l", - ["m"] = "m", ["n"] = "n", ["o"] = "o", ["p"] = "p", ["q"] = "q", - ["r"] = "r", ["s"] = "s", ["È™"] = "È™", ["t"] = "t", ["È›"] = "È›", - ["u"] = "u", ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", - ["z"] = "z", - }, - orders = { - "a", "ă", "â", "b", "c", "d", "e", "f", "g", "h", - "i", "î", "j", "k", "l", "m", "n", "o", "p", "q", - "r", "s", "È™", "t", "È›", "u", "v", "w", "x", "y", - "z", - } -} - ---- Spanish - -definitions["es"] = { - entries = { - ["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", ["d"] = "d", - ["e"] = "e", ["é"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", - ["i"] = "i", ["í"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", - ["m"] = "m", ["n"] = "n", ["ñ"] = "ñ", ["o"] = "o", ["ó"] = "o", - ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", ["t"] = "t", - ["u"] = "u", ["ú"] = "u", ["ü"] = "u", ["v"] = "v", ["w"] = "w", - ["x"] = "x", ["y"] = "y", ["z"] = "z", - }, - orders = { - "a", "á", "b", "c", "d", "e", "é", "f", "g", "h", - "i", "í", "j", "k", "l", "m", "n", "ñ", "o", "ó", - "p", "q", "r", "s", "t", "u", "ú", "ü", "v", "w", - "x", "y", "z", - } -} - ---- Portuguese - -definitions["pt"] = { - entries = { - ["a"] = "a", ["á"] = "a", ["â"] = "a", ["ã"] = "a", ["à"] = "a", - ["b"] = "b", ["c"] = "c", ["ç"] = "c", ["d"] = "d", ["e"] = "e", - ["é"] = "e", ["ê"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", - ["i"] = "i", ["í"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", - ["m"] = "m", ["n"] = "n", ["o"] = "o", ["ó"] = "o", ["ô"] = "o", - ["õ"] = "o", ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", - ["t"] = "t", ["u"] = "u", ["ú"] = "u", ["ü"] = "u", ["v"] = "v", - ["w"] = "w", ["x"] = "x", ["y"] = "y", ["z"] = "z", - }, - orders = { - "a", "á", "â", "ã", "à", "b", "c", "ç", "d", "e", - "é", "ê", "f", "g", "h", "i", "í", "j", "k", "l", - "m", "n", "o", "ó", "ô", "õ", "p", "q", "r", "s", - "t", "u", "ú", "ü", "v", "w", "x", "y", "z", - } -} - ---- Lithuanian - -local ch, CH = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11) - -definitions["lt"] = { - replacements = { - { "ch", ch }, { "CH", CH} - }, - entries = { - ["a"] = "a", ["Ä…"] = "a", ["b"] = "b", ["c"] = "c", [ch ] = "c", - ["Ä"] = "Ä", ["d"] = "d", ["e"] = "e", ["Ä™"] = "e", ["Ä—"] = "e", - ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", ["į"] = "i", - ["y"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", ["m"] = "m", - ["n"] = "n", ["o"] = "o", ["p"] = "p", ["r"] = "r", ["s"] = "s", - ["Å¡"] = "Å¡", ["t"] = "t", ["u"] = "u", ["ų"] = "u", ["Å«"] = "u", - ["v"] = "v", ["z"] = "z", ["ž"] = "ž", - }, - orders = { - "a", "Ä…", "b", "c", ch, "Ä", "d", "e", "Ä™", "Ä—", - "f", "g", "h", "i", "į", "y", "j", "k", "l", "m", - "n", "o", "p", "r", "s", "Å¡", "t", "u", "ų", "Å«", - "v", "z", "ž", - }, - lower = { - ch = CH, - }, - upper = { - CH = ch, - }, -} - ---- Latvian - -definitions["lv"] = { - entries = { - ["a"] = "a", ["Ä"] = "a", ["b"] = "b", ["c"] = "c", ["Ä"] = "Ä", - ["d"] = "d", ["e"] = "e", ["Ä“"] = "e", ["f"] = "f", ["g"] = "g", - ["Ä£"] = "Ä£", ["h"] = "h", ["i"] = "i", ["Ä«"] = "i", ["j"] = "j", - ["k"] = "k", ["Ä·"] = "Ä·", ["l"] = "l", ["ļ"] = "ļ", ["m"] = "m", - ["n"] = "n", ["ņ"] = "ņ", ["o"] = "o", ["Å"] = "o", ["p"] = "p", - ["r"] = "r", ["Å—"] = "Å—", ["s"] = "s", ["Å¡"] = "Å¡", ["t"] = "t", - ["u"] = "u", ["Å«"] = "u", ["v"] = "v", ["z"] = "z", ["ž"] = "ž", - }, - orders = { - "a", "Ä", "b", "c", "Ä", "d", "e", "Ä“", "f", "g", - "Ä£", "h", "i", "Ä«", "j", "k", "Ä·", "l", "ļ", "m", - "n", "ņ", "o", "Å", "p", "r", "Å—", "s", "Å¡", "t", - "u", "Å«", "v", "z", "ž", - } -} - ---- Hungarian - --- Helpful but disturbing: --- http://en.wikipedia.org/wiki/Hungarian_alphabet#Alphabetical_ordering_.28collation.29 --- (In short: you'd have to analyse word-compounds to realize a correct order --- for sequences like “nnyâ€, “sszâ€, and “zszâ€. This is left as an exercise to --- the reader…) - -local cs, CS = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11) -local dz, DZ = utfchar(replacementoffset + 2), utfchar(replacementoffset + 12) -local dzs, DZS = utfchar(replacementoffset + 3), utfchar(replacementoffset + 13) -local gy, GY = utfchar(replacementoffset + 4), utfchar(replacementoffset + 14) -local ly, LY = utfchar(replacementoffset + 5), utfchar(replacementoffset + 15) -local ny, NY = utfchar(replacementoffset + 6), utfchar(replacementoffset + 16) -local sz, SZ = utfchar(replacementoffset + 7), utfchar(replacementoffset + 17) -local ty, TY = utfchar(replacementoffset + 8), utfchar(replacementoffset + 18) -local zs, ZS = utfchar(replacementoffset + 9), utfchar(replacementoffset + 19) - -definitions["hu"] = { - replacements = { - { "cs", cs }, { "CS", CS }, - { "dz", dz }, { "DZ", DZ }, - { "dzs", dzs }, { "DZS", DZS }, - { "gy", gy }, { "GY", GY }, - { "ly", ly }, { "LY", LY }, - { "ny", ny }, { "NY", NY }, - { "sz", sz }, { "SZ", SZ }, - { "ty", ty }, { "TY", TY }, - { "zs", zs }, { "ZS", ZS }, - }, - entries = { - ["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", [cs ] = "cs", - ["d"] = "d", [dz ] = "dz", [dzs] = "dzs", ["e"] = "e", ["é"] = "e", - ["f"] = "f", ["g"] = "g", [gy ] = "gy", ["h"] = "h", ["i"] = "i", - ["í"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", [ly ] = "ly", - ["m"] = "m", ["n"] = "n", [ny ] = "ny", ["o"] = "o", ["ó"] = "o", - ["ö"] = "ö", ["Å‘"] = "ö", ["p"] = "p", ["q"] = "q", ["r"] = "r", - ["s"] = "s", [sz ] = "sz", ["t"] = "t", [ty ] = "ty", ["u"] = "u", - ["ú"] = "u", ["ü"] = "ü", ["ű"] = "ü", ["v"] = "v", ["w"] = "w", - ["x"] = "x", ["y"] = "y", ["z"] = "z", [zs ] = "zs", - }, - orders = { - "a", "á", "b", "c", cs, "d", dz, dzs, "e", "é", - "f", "g", gy, "h", "i", "í", "j", "k", "l", ly, - "m", "n", ny, "o", "ó", "ö", "Å‘", "p", "q", "r", - "s", sz, "t", ty, "u", "ú", "ü", "ű", "v", "w", - "x", "y", "z", zs, - }, - lower = { - CS = cs, DZ = dz, DZS = dzs, GY = gy, LY = ly, NY = ny, SZ = sz, TY = ty, ZS = zs, - }, - upper = { - cs = CS, dz = DZ, dzs = DZS, gy = GY, ly = LY, ny = NY, sz = SZ, ty = TY, zs = ZS, - }, -} - ---- Estonian - -definitions["et"] = { - entries = { -- w x y are used for foreign words only - ["a"] = "a", ["b"] = "b", ["d"] = "d", ["e"] = "e", ["f"] = "f", - ["g"] = "g", ["h"] = "h", ["i"] = "i", ["j"] = "j", ["k"] = "k", - ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", ["p"] = "p", - ["r"] = "r", ["s"] = "s", ["Å¡"] = "Å¡", ["z"] = "z", ["ž"] = "ž", - ["t"] = "t", ["u"] = "u", ["v"] = "v", ["w"] = "v", ["õ"] = "õ", - ["ä"] = "ä", ["ö"] = "ö", ["ü"] = "ü", ["x"] = "x", ["y"] = "y", - }, - orders = { - "a", "b", "d", "e", "f", "g", "h", "i", "j", "k", - "l", "m", "n", "o", "p", "r", "s", "Å¡", "z", "ž", - "t", "u", "v", "w", "õ", "ä", "ö", "ü", "x", "y", - } -} - ---- Korean - -local fschars = characters.fschars - -local function firstofsplit(first) - local fs = fschars[first] or first -- leadconsonant - return fs, fs -- entry, tag -end - -definitions["kr"] = { - firstofsplit = firstofsplit, - orders = { - "ㄱ", "ã„´", "ã„·", "ㄹ", "ã…", "ã…‚", "ã……", "ã…‡", "ã…ˆ", "ã…Š", "ã…‹", "ã…Œ", "ã…", "ã…Ž", - "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", - "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", - } -} - --- Japanese - -definitions["jp"] = { - replacements = { - { "ã", "ã‚" }, { "ãƒ", "ã„" }, - { "ã…", "ã†" }, { "ã‡", "ãˆ" }, - { "ã‰", "ãŠ" }, { "ã£", "ã¤" }, - { "ゃ", "ã‚„" }, { "ã‚…", "ゆ" }, - { "ょ", "よ" }, - }, - entries = { - ["ã‚"] = "ã‚", ["ã„"] = "ã„", ["ã†"] = "ã†", ["ãˆ"] = "ãˆ", ["ãŠ"] = "ãŠ", - ["ã‹"] = "ã‹", ["ã"] = "ã", ["ã"] = "ã", ["ã‘"] = "ã‘", ["ã“"] = "ã“", - ["ã•"] = "ã•", ["ã—"] = "ã—", ["ã™"] = "ã™", ["ã›"] = "ã›", ["ã"] = "ã", - ["ãŸ"] = "ãŸ", ["ã¡"] = "ã¡", ["ã¤"] = "ã¤", ["ã¦"] = "ã¦", ["ã¨"] = "ã¨", - ["ãª"] = "ãª", ["ã«"] = "ã«", ["ã¬"] = "ã¬", ["ã­"] = "ã­", ["ã®"] = "ã®", - ["ã¯"] = "ã¯", ["ã²"] = "ã²", ["ãµ"] = "ãµ", ["ã¸"] = "ã¸", ["ã»"] = "ã»", - ["ã¾"] = "ã¾", ["ã¿"] = "ã¿", ["ã‚€"] = "ã‚€", ["ã‚"] = "ã‚", ["ã‚‚"] = "ã‚‚", - ["ã‚„"] = "ã‚„", ["ゆ"] = "ゆ", ["よ"] = "よ", - ["ら"] = "ら", ["ã‚Š"] = "ã‚Š", ["ã‚‹"] = "ã‚‹", ["ã‚Œ"] = "ã‚Œ", ["ã‚"] = "ã‚", - ["ã‚"] = "ã‚", ["ã‚"] = "ã‚", ["ã‚‘"] = "ã‚‘", ["ã‚’"] = "ã‚’", ["ã‚“"] = "ã‚“", - }, - orders = { - "ã‚", "ã„", "ã†", "ãˆ", "ãŠ", "ã‹", "ã", "ã", "ã‘", "ã“", - "ã•", "ã—", "ã™", "ã›", "ã", "ãŸ", "ã¡", "ã¤", "ã¦", "ã¨", - "ãª", "ã«", "ã¬", "ã­", "ã®", "ã¯", "ã²", "ãµ", "ã¸", "ã»", - "ã¾", "ã¿", "ã‚€", "ã‚", "ã‚‚", "ã‚„", "ゆ", "よ", - "ら", "ã‚Š", "ã‚‹", "ã‚Œ", "ã‚", "ã‚", "ã‚", "ã‚‘", "ã‚’", "ã‚“", - } -} +if not modules then modules = { } end modules ['sort-lan'] = { + version = 1.001, + comment = "companion to sort-lan.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", + dataonly = true, +} + +-- todo: look into uts#10 (2012) ... some experiments ... something +-- to finish in winter. + +-- Many vectors were supplied by Wolfgang Schuster and Philipp +-- Gesang. However this is a quite adapted and reformatted variant +-- so it needs some checking. Other users provides tables and +-- corrections as well. + +local utfchar, utfbyte = utf.char, utf.byte +local sorters = sorters +local definitions = sorters.definitions +local replacementoffset = sorters.constants.replacementoffset +local variables = interfaces.variables + +definitions["default"] = { + method = variables.before, + replacements = { + -- no replacements + }, + entries = { + ["a"] = "a", ["b"] = "b", ["c"] = "c", ["d"] = "d", ["e"] = "e", + ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", ["j"] = "j", + ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", + ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", ["t"] = "t", + ["u"] = "u", ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", + ["z"] = "z", + }, + orders = { + "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", + "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", + "u", "v", "w", "x", "y", "z", + }, + lower = { + -- no replacements + }, + upper = { + -- no replacements + } +} + +sorters.setlanguage("default") + +-- english + +definitions["en"] = { parent = "default" } + +-- dutch + +definitions['nl'] = { + parent = 'default', + replacements = { + { "ij", 'y' }, { "IJ", 'Y' }, + }, +} + +-- French + +definitions['fr'] = { parent = 'default' } + +-- German (by Wolfgang Schuster) + +-- DIN 5007-1 + +definitions['DIN 5007-1'] = { parent = 'default' } + +-- DIN 5007-2 + +definitions['DIN 5007-2'] = { + parent = 'default', + replacements = { + { "ä", 'ae' }, { "Ä", 'Ae' }, + { "ö", 'oe' }, { "Ö", 'Oe' }, + { "ü", 'ue' }, { "Ãœ", 'Ue' }, + }, +} + +-- Duden + +definitions['Duden'] = { + parent = 'default', + replacements = { { "ß", 's' } }, +} + +-- definitions['de'] = { parent = 'default' } -- new german + +definitions['de'] = { + parent = 'default', + replacements = { + { "ä", 'ae' }, { "Ä", 'Ae' }, + { "ö", 'oe' }, { "Ö", 'Oe' }, + { "ü", 'ue' }, { "Ãœ", 'Ue' }, + { "ß", 's' }, + }, +} + +definitions['deo'] = { parent = 'de' } -- old german + +definitions['de-DE'] = { parent = 'de' } -- german - Germany +definitions['de-CH'] = { parent = 'de' } -- german - Swiss + +-- german - Austria + +definitions['de-AT'] = { + entries = { + ["a"] = "a", ["ä"] = "ä", ["b"] = "b", ["c"] = "c", ["d"] = "d", + ["e"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", + ["j"] = "j", ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", + ["o"] = "o", ["ö"] = "ö", ["p"] = "p", ["q"] = "q", ["r"] = "r", + ["s"] = "s", ["t"] = "t", ["u"] = "u", ["ü"] = "ü", ["v"] = "v", + ["w"] = "w", ["x"] = "x", ["y"] = "y", ["z"] = "z", + }, + orders = { + "a", "ä", "b", "c", "d", "e", "f", "g", "h", "i", + "j", "k", "l", "m", "n", "o", "ö", "p", "q", "r", + "s", "t", "u", "ü", "v", "w", "x", "y", "z", + }, +} + +-- finish (by Wolfgang Schuster) + +definitions['fi'] = { + entries = { + ["a"] = "a", ["b"] = "b", ["c"] = "c", ["d"] = "d", ["e"] = "e", + ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", ["j"] = "j", + ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", + ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", ["t"] = "t", + ["u"] = "u", ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", + ["z"] = "z", ["Ã¥"] = "Ã¥", ["ä"] = "ä", ["ö"] = "ö", + }, + orders = { + "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", + "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", + "u", "v", "w", "x", "y", "z", "Ã¥", "ä", "ö", + } +} + +-- slovenian by MM: this will change since we need to add accented vowels + +definitions['sl'] = { + entries = { + ["a"] = "a", ["b"] = "b", ["c"] = "c", ["Ä"] = "Ä", ["ć"] = "ć", ["d"] = "d", + ["Ä‘"] = "Ä‘", ["e"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", + ["j"] = "j", ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", + ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", ["Å¡"] = "Å¡", ["t"] = "t", + ["u"] = "u", ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", ["z"] = "z", + ["ž"] = "ž", + }, + orders = { + "a", "b", "c", "Ä", "ć", "d", "Ä‘", "e", "f", "g", "h", "i", + "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "Å¡", "t", + "u", "v", "w", "x", "y", "z", "ž", + } +} + +-- The following data was provided by Philipp Gesang. + +definitions["ru"] = { + entries = { + ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["д"] = "д", + ["е"] = "е", ["Ñ‘"] = "е", ["ж"] = "ж", ["з"] = "з", ["и"] = "и", + ["Ñ–"] = "и", ["й"] = "й", ["к"] = "к", ["л"] = "л", ["м"] = "м", + ["н"] = "н", ["о"] = "о", ["п"] = "п", ["Ñ€"] = "Ñ€", ["Ñ"] = "Ñ", + ["Ñ‚"] = "Ñ‚", ["у"] = "у", ["Ñ„"] = "Ñ„", ["Ñ…"] = "Ñ…", ["ц"] = "ц", + ["ч"] = "ч", ["ш"] = "ш", ["щ"] = "щ", ["ÑŠ"] = "ÑŠ", ["Ñ‹"] = "Ñ‹", + ["ÑŒ"] = "ÑŒ", ["Ñ£"] = "Ñ£", ["Ñ"] = "Ñ", ["ÑŽ"] = "ÑŽ", ["Ñ"] = "Ñ", + ["ѳ"] = "ѳ", ["ѵ"] = "ѵ", + }, + orders = { + "а", "б", "в", "г", "д", "е", "Ñ‘", "ж", "з", "и", + "Ñ–", "й", "к", "л", "м", "н", "о", "п", "Ñ€", "Ñ", + "Ñ‚", "у", "Ñ„", "Ñ…", "ц", "ч", "ш", "щ", "ÑŠ", "Ñ‹", + "ÑŒ", "Ñ£", "Ñ", "ÑŽ", "Ñ", "ѳ", "ѵ", + } +} + +--- Basic Ukrainian + +definitions["uk"] = { + entries = { + ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["Ò‘"] = "Ò‘", + ["д"] = "д", ["е"] = "е", ["Ñ”"] = "Ñ”", ["ж"] = "ж", ["з"] = "з", + ["и"] = "и", ["Ñ–"] = "Ñ–", ["Ñ—"] = "Ñ—", ["й"] = "й", ["к"] = "к", + ["л"] = "л", ["м"] = "м", ["н"] = "н", ["о"] = "о", ["п"] = "п", + ["Ñ€"] = "Ñ€", ["Ñ"] = "Ñ", ["Ñ‚"] = "Ñ‚", ["у"] = "у", ["Ñ„"] = "Ñ„", + ["Ñ…"] = "Ñ…", ["ц"] = "ц", ["ч"] = "ч", ["ш"] = "ш", ["щ"] = "щ", + ["ÑŒ"] = "ÑŒ", ["ÑŽ"] = "ÑŽ", ["Ñ"] = "Ñ", + }, + orders = { + "а", "б", "в", "г", "Ò‘", "д", "е", "Ñ”", "ж", "з", "и", "Ñ–", + "Ñ—", "й", "к", "л", "м", "н", "о", "п", "Ñ€", "Ñ", "Ñ‚", "у", + "Ñ„", "Ñ…", "ц", "ч", "ш", "щ", "ÑŒ", "ÑŽ", "Ñ", + } +} + +--- Belarusian + +definitions["be"] = { + entries = { + ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["д"] = "д", + ["е"] = "е", ["Ñ‘"] = "е", ["ж"] = "ж", ["з"] = "з", ["Ñ–"] = "Ñ–", + ["й"] = "й", ["к"] = "к", ["л"] = "л", ["м"] = "м", ["н"] = "н", + ["о"] = "о", ["п"] = "п", ["Ñ€"] = "Ñ€", ["Ñ"] = "Ñ", ["Ñ‚"] = "Ñ‚", + ["у"] = "у", ["Ñž"] = "Ñž", ["Ñ„"] = "Ñ„", ["Ñ…"] = "Ñ…", ["ц"] = "ц", + ["ч"] = "ч", ["ш"] = "ш", ["Ñ‹"] = "Ñ‹", ["ÑŒ"] = "ÑŒ", ["Ñ"] = "Ñ", + ["ÑŽ"] = "ÑŽ", ["Ñ"] = "Ñ", + }, + orders = { + "а", "б", "в", "г", "д", "е", "Ñ‘", "ж", "з", "Ñ–", + "й", "к", "л", "м", "н", "о", "п", "Ñ€", "Ñ", "Ñ‚", + "у", "Ñž", "Ñ„", "Ñ…", "ц", "ч", "ш", "Ñ‹", "ÑŒ", "Ñ", + "ÑŽ", "Ñ", + } +} + +--- Bulgarian + +definitions["bg"] = { + entries = { + ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["д"] = "д", + ["е"] = "е", ["ж"] = "ж", ["з"] = "з", ["и"] = "и", ["й"] = "й", + ["к"] = "к", ["a"] = "a", ["л"] = "л", ["a"] = "a", ["м"] = "м", + ["н"] = "н", ["о"] = "о", ["п"] = "п", ["Ñ€"] = "Ñ€", ["Ñ"] = "Ñ", + ["Ñ‚"] = "Ñ‚", ["у"] = "у", ["Ñ„"] = "Ñ„", ["Ñ…"] = "Ñ…", ["ц"] = "ц", + ["ч"] = "ч", ["ш"] = "ш", ["щ"] = "щ", ["ÑŠ"] = "ÑŠ", ["ÑŒ"] = "ÑŒ", + ["ÑŽ"] = "ÑŽ", ["Ñ"] = "Ñ", + }, + orders = { + "а", "б", "в", "г", "д", "е", "ж", "з","и", "й", + "к", "a", "л", "a", "м", "н", "о", "п", "Ñ€", "Ñ", + "Ñ‚", "у", "Ñ„", "Ñ…", "ц", "ч", "ш", "щ", "ÑŠ", "ÑŒ", + "ÑŽ", "Ñ", + } +} + +--- Old Church Slavonic + +-- The language symbol “cu†is taken from the Wikipedia subdomain +-- cu.wikipedia.org. + +local uk, UK = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11) + +definitions["cu"] = { + replacements = { + { "оу", uk }, { "ОУ", UK }, + }, + entries = { + ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["д"] = "д", + ["Ñ”"] = "Ñ”", ["ж"] = "ж", ["Ñ•"] = "Ñ•", ["ꙃ"] = "Ñ•", ["з"] = "з", + ["ê™"] = "з", ["и"] = "и", ["Ñ–"] = "и", ["Ñ—"] = "и", ["Ñ›"] = "Ñ›", + ["к"] = "к", ["л"] = "л", ["м"] = "м", ["н"] = "н", ["о"] = "о", + ["п"] = "п", ["Ñ€"] = "Ñ€", ["Ñ"] = "Ñ", ["Ñ‚"] = "Ñ‚", ["у"] = "у", + ["ѹ"] = "у", ["ꙋ"] = "у", [uk] = "у", ["Ñ„"] = "Ñ„", ["Ñ…"] = "Ñ…", + ["Ñ¡"] = "Ñ¡", ["Ñ¿"] = "Ñ¡", ["ѽ"] = "Ñ¡", ["ê™"] = "Ñ¡", ["ц"] = "ц", + ["ч"] = "ч", ["ш"] = "ш", ["щ"] = "щ", ["ÑŠ"] = "ÑŠ", ["Ñ‹"] = "Ñ‹", + ["ꙑ"] = "Ñ‹", ["ÑŒ"] = "ÑŒ", ["Ñ£"] = "Ñ£", ["ÑŽ"] = "ÑŽ", ["ê™—"] = "ê™—", + ["Ñ¥"] = "Ñ¥", ["ѧ"] = "ѧ", ["Ñ©"] = "Ñ©", ["Ñ«"] = "Ñ«", ["Ñ­"] = "Ñ­", + ["ѯ"] = "ѯ", ["ѱ"] = "ѱ", ["ѳ"] = "ѳ", ["ѵ"] = "ѵ", ["Ñ·"] = "ѵ", + }, + orders = { + "а", "б", "в", "г", "д", "Ñ”", "ж", "Ñ•", "ꙃ", "з", -- DzÄ›lo, U+0292, alternative: dz U+01f3 + "ê™", "и", "Ñ–", "Ñ—", "Ñ›", "к", "л", "м", "н", "о", -- Zemlja + "п", "Ñ€", "Ñ", "Ñ‚", "у", "ѹ", "ꙋ", uk, "Ñ„", "Ñ…", -- U+0478 uk, horizontal ligature, U+0479 uk, vertical ligature + "Ñ¡", "Ñ¿", "ѽ", "ê™", "ц", "ч", "ш", "щ", "ÑŠ", "Ñ‹", -- "Å", U+047f \, U+047d > Omega variants, U+064D / + "ꙑ", "ÑŒ", "Ñ£", "ÑŽ", "ê™—", "Ñ¥", "ѧ", "Ñ©", "Ñ«", "Ñ­", -- Old jery (U+a651) as used e.g. by the OCS Wikipedia. IOTIFIED A + "ѯ", "ѱ", "ѳ", "ѵ", "Ñ·", + }, + upper = { + uk = UK, + }, + lower = { + UK = uk, + } +} + +--- Polish (including the letters q, v, x) Cf. ftp://ftp.gust.org.pl/pub/GUST/bulletin/03/02-bl.pdf. + +definitions["pl"] = { + entries = { + ["a"] = "a", ["Ä…"] = "Ä…", ["b"] = "b", ["c"] = "c", ["ć"] = "ć", + ["d"] = "d", ["e"] = "e", ["Ä™"] = "Ä™", ["f"] = "f", ["g"] = "g", + ["h"] = "h", ["i"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", + ["Å‚"] = "Å‚", ["m"] = "m", ["n"] = "n", ["Å„"] = "Å„", ["o"] = "o", + ["ó"] = "ó", ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", + ["Å›"] = "Å›", ["t"] = "t", ["u"] = "u", ["v"] = "v", ["w"] = "w", + ["x"] = "x", ["y"] = "y", ["z"] = "z", ["ź"] = "ź", ["ż"] = "ż", + }, + orders = { + "a", "Ä…", "b", "c", "ć", "d", "e", "Ä™", "f", "g", + "h", "i", "j", "k", "l", "Å‚", "m", "n", "Å„", "o", + "ó", "p", "q", "r", "s", "Å›", "t", "u", "v", "w", + "x", "y", "z", "ź", "ż", + }, +} + +-- Czech, modified to treat quantities and other secondary characteristics indifferently. Cf. +-- http://racek.vlada.cz/usneseni/usneseni_webtest.nsf/WebGovRes/0AD8FEF4CC04B7A4C12571B6006D69D0?OpenDocument +-- (2.4.3; via ) + +local ch, CH = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11) + +definitions["cz"] = { + replacements = { + { "ch", ch }, { "CH", CH } + }, + entries = { + ["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", ["Ä"] = "Ä", + ["d"] = "d", ["Ä"] = "d", ["e"] = "e", ["é"] = "e", ["Ä›"] = "e", + ["f"] = "f", ["g"] = "g", ["h"] = "h", [ch] = "ch", ["i"] = "i", + ["í"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", ["m"] = "m", + ["n"] = "n", ["ň"] = "n", ["o"] = "o", ["ó"] = "o", ["p"] = "p", + ["q"] = "q", ["r"] = "r", ["Å™"] = "Å™", ["s"] = "s", ["Å¡"] = "Å¡", + ["t"] = "t", ["Å¥"] = "t", ["u"] = "u", ["ú"] = "u", ["ů"] = "u", + ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", ["ý"] = "y", + ["z"] = "z", ["ž"] = "ž", + }, + orders = { + "a", "á", "b", "c", "Ä", "d", "Ä", "e", "é", "Ä›", + "f", "g", "h", ch, "i", "í", "j", "k", "l", "m", + "n", "ň", "o", "ó", "p", "q", "r", "Å™", "s", "Å¡", + "t", "Å¥", "u", "ú", "ů", "v", "w", "x", "y", "ý", + "z", "ž", + }, + upper = { + ch = CH, + }, + lower = { + CH = ch, + } +} + +definitions["cs"] = { parent = "cz" } + +--- Slovak. + +-- Vowel and consonant quantities, "Ä", "ľ", "ň", "Å¥", "ô", and "ä" are treated +-- indifferently as their base character, as in my dictionary. If you prefer them +-- to affect collation order, then use the values given in the comments. We could +-- define an additional vector for that. + +local dz, DZ = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11) +local dzh, DZH = utfchar(replacementoffset + 2), utfchar(replacementoffset + 12) +local ch, CH = utfchar(replacementoffset + 3), utfchar(replacementoffset + 13) + +definitions["sk"] = { + replacements = { + { "dz", dz }, { "dz", DZ }, + { "dž", dzh }, { "dž", DZH }, + { "ch", ch }, { "ch", CH }, + }, + entries = { + ["a"] = "a", ["á"] = "a", ["ä"] = "a", ["b"] = "b", ["c"] = "c", + ["Ä"] = "Ä", ["d"] = "d", ["Ä"] = "d", [dz] = "dz", [dzh] = "dž", + ["e"] = "e", ["é"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", + [ch] = "ch", ["i"] = "i", ["í"] = "i", ["j"] = "j", ["k"] = "k", + ["l"] = "l", ["ĺ"] = "l", ["ľ"] = "l", ["m"] = "m", ["n"] = "n", + ["ň"] = "n", ["o"] = "o", ["ó"] = "o", ["ô"] = "o", ["p"] = "p", + ["q"] = "q", ["r"] = "r", ["Å•"] = "r", ["s"] = "s", ["Å¡"] = "Å¡", + ["t"] = "t", ["Å¥"] = "t", ["u"] = "u", ["ú"] = "u", ["v"] = "v", + ["w"] = "w", ["x"] = "x", ["y"] = "y", ["ý"] = "y", ["z"] = "z", + ["ž"] = "ž", + }, + orders = { + "a", "á", "ä", "b", "c", "Ä", "d", "Ä", dz, dzh, + "e", "é", "f", "g", "h", ch, "i", "í", "j", "k", + "l", "ĺ", "ľ", "m", "n", "ň", "o", "ó", "ô", "p", + "q", "r", "Å•", "s", "Å¡", "t", "Å¥", "u", "ú", "v", + "w", "x", "y", "ý", "z", "ž", + }, + upper = { + dz = DZ, dzh = DZH, ch = CH, + }, + lower = { + DZ = dz, DZH = dzh, CH = ch, + } +} + +--- Croatian + +local dzh, DZH = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11) +local lj, LJ = utfchar(replacementoffset + 2), utfchar(replacementoffset + 12) +local nj, NJ = utfchar(replacementoffset + 3), utfchar(replacementoffset + 13) + +definitions["hr"] = { + replacements = { + { "dž", dzh }, { "DŽ", DZH }, + { "lj", lj }, { "LJ", LJ }, + { "nj", nj }, { "NJ", NJ }, + }, + entries = { + ["a"] = "a", ["b"] = "b", ["c"] = "c", ["Ä"] = "Ä", ["ć"] = "ć", + ["d"] = "d", [dzh] = "dž", ["Ä‘"] = "Ä‘", ["e"] = "e", ["f"] = "f", + ["g"] = "g", ["h"] = "h", ["i"] = "i", ["j"] = "j", ["k"] = "k", + ["l"] = "l", [lj] = "lj", ["m"] = "m", ["n"] = "n", [nj] = "nj", + ["o"] = "o", ["p"] = "p", ["r"] = "r", ["s"] = "s", ["Å¡"] = "Å¡", + ["t"] = "t", ["u"] = "u", ["v"] = "v", ["z"] = "z", ["ž"] = "ž", + }, + orders = { + "a", "b", "c", "Ä", "ć", "d", dzh, "Ä‘", "e", "f", + "g", "h", "i", "j", "k", "l", lj, "m", "n", nj, + "o", "p", "r", "s", "Å¡", "t", "u", "v", "z", "ž", + }, + upper = { + dzh = DZH, lj = LJ, nj = NJ, + }, + lower = { + DZH = dzh, LJ = lj, NJ = nj, + } +} + + +--- Serbian + +definitions["sr"] = { + entries = { + ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["д"] = "д", + ["Ñ’"] = "Ñ’", ["е"] = "е", ["ж"] = "ж", ["з"] = "з", ["и"] = "и", + ["ј"] = "ј", ["к"] = "к", ["л"] = "л", ["Ñ™"] = "Ñ™", ["м"] = "м", + ["н"] = "н", ["Ñš"] = "Ñš", ["о"] = "о", ["п"] = "п", ["Ñ€"] = "Ñ€", + ["Ñ"] = "Ñ", ["Ñ‚"] = "Ñ‚", ["Ñ›"] = "Ñ›", ["у"] = "у", ["Ñ„"] = "Ñ„", + ["Ñ…"] = "Ñ…", ["ц"] = "ц", ["ч"] = "ч", ["ÑŸ"] = "ÑŸ", + ["ш"] = "ш", + }, + orders = { + "а", "б", "в", "г", "д", "Ñ’", "е", "ж", "з", "и", + "ј", "к", "л", "Ñ™", "м", "н", "Ñš", "о", "п", "Ñ€", + "Ñ", "Ñ‚", "Ñ›", "у", "Ñ„", "Ñ…", "ц", "ч", "ÑŸ", "ш", + } +} + +--- Transliteration: Russian|ISO9-1995 + +-- Keeping the same collation order as Russian (v.s.). +-- Matches the tables from: +-- http://bitbucket.org/phg/transliterator/src/tip/tex/context/third/transliterator/trans_tables_iso9.lua + +local yer = utfchar(replacementoffset + 1) + +definitions["ru-iso9"] = { + replacements = { + { "''", yer }, + }, + entries = { + ["a"] = "a", ["b"] = "b", ["v"] = "v", ["g"] = "g", ["d"] = "d", + ["e"] = "e", ["ë"] = "ë", ["ž"] = "ž", ["z"] = "z", ["i"] = "i", + ["ì"] = "ì", ["j"] = "j", ["k"] = "k", ["l"] = "l", ["m"] = "m", + ["n"] = "n", ["o"] = "o", ["p"] = "p", ["r"] = "r", ["s"] = "s", + ["t"] = "t", ["u"] = "u", ["f"] = "f", ["h"] = "h", ["c"] = "c", + ["Ä"] = "Ä", ["Å¡"] = "Å¡", ["Å"] = "Å", ["ʺ"] = "ʺ", [yer] = "ʺ", + ["y"] = "y", ["ʹ"] = "ʹ", ["'"] = "ʹ", ["Ä›"] = "Ä›", ["è"] = "è", + ["û"] = "û", ["â"] = "â", ["û"] = "û", ["â"] = "â", + }, + orders = { + "a", "b", "v", "g", "d", "e", "ë", "ž", "z", "i", + "ì", "j", "k", "l", "m", "n", "o", "p", "r", "s", + "t", "u", "f", "h", "c", "Ä", "Å¡", "Å", "ʺ", yer, + "y", "ʹ", "'", "Ä›", "è", "û", "â", "û", "â", + } +} + +--- Transliteration: Old Slavonic|scientific + +-- Matches the tables from: +-- http://bitbucket.org/phg/transliterator/src/tip/tex/context/third/transliterator/trans_tables_scntfc.lua + +local uk, UK = utfchar(replacementoffset + 1), utfchar(replacementoffset + 21) +local tshe, TSHE = utfchar(replacementoffset + 2), utfchar(replacementoffset + 22) +local sht, SHT = utfchar(replacementoffset + 3), utfchar(replacementoffset + 23) +local ju, JU = utfchar(replacementoffset + 4), utfchar(replacementoffset + 24) +local ja, JA = utfchar(replacementoffset + 5), utfchar(replacementoffset + 25) +local je, JE = utfchar(replacementoffset + 6), utfchar(replacementoffset + 26) +local ijus, IJUS = utfchar(replacementoffset + 7), utfchar(replacementoffset + 27) +local ibigjus, IBIGJUS = utfchar(replacementoffset + 8), utfchar(replacementoffset + 28) +local xi, XI = utfchar(replacementoffset + 9), utfchar(replacementoffset + 29) +local psi, PSI = utfchar(replacementoffset + 10), utfchar(replacementoffset + 30) +local theta, THETA = utfchar(replacementoffset + 11), utfchar(replacementoffset + 31) +local shch, SHCH = utfchar(replacementoffset + 12), utfchar(replacementoffset + 32) + +definitions["ocs-scn"] = { + replacements = { + { "ou", uk }, { "OU", UK }, + { "g’", tshe }, { "G’", TSHE }, + { "Å¡t", sht }, { "Å T", SHT }, + { "ju", ju }, { "JU", JU }, + { "ja", ja }, { "JA", JA }, + { "je", je }, { "JE", JE }, + { "jÄ™", ijus }, { "JĘ", IJUS }, + { "jÇ«", ibigjus }, { "JǪ", IBIGJUS }, + { "ks", xi }, { "KS", XI }, + { "ps", psi }, { "PS", PSI }, + { "th", theta }, { "TH", THETA }, + { "Å¡Ä", shch }, { "Å ÄŒ", SHCH }, + }, + entries = { + ["a"] = "a", ["b"] = "b", ["v"] = "v", ["g"] = "g", ["d"] = "d", + ["e"] = "e", ["ž"] = "ž", ["Ê’"] = "Ê’", ["z"] = "z", ["i"] = "i", + ["ï"] = "ï", [tshe] = "g’", ["k"] = "k", ["l"] = "l", ["m"] = "m", + ["n"] = "n", ["o"] = "o", ["p"] = "p", ["r"] = "r", ["s"] = "s", + ["t"] = "t", ["u"] = "u", ["f"] = "f", ["x"] = "x", ["o"] = "o", + ["c"] = "c", ["Ä"] = "Ä", ["Å¡"] = "Å¡", [sht] = "Å¡t", [shch] = "Å¡Ä", + ["ÑŠ"] = "ÑŠ", ["y"] = "y", [uk] = "y", ["ÑŒ"] = "ÑŒ", ["Ä›"] = "Ä›", + [ju] = "ju", [ja] = "ja", [je] = "je", ["Ä™"] = "Ä™", [ijus] = "jÄ™", + ["Ç«"] = "Ç«", [ibigjus] = "jÇ«", [xi] = "ks", [psi] = "ps", [theta] = "th", + ["ü"] = "ü", + }, + orders = { + "a", "b", "v", "g", "d", "e", "ž", "Ê’", "z", "i", "ï", + tshe, "k", "l", "m", "n", "o", "p", "r", "s", "t", "u", + "f", "x", "o", "c", "Ä", "Å¡", sht, shch, "ÑŠ", "y", uk, + "ÑŒ", "Ä›", ju, ja, je, "Ä™", ijus, "Ç«", ibigjus, xi, psi, + theta, "ü", + }, + upper = { + uk = UK, tshe = TSHE, sht = SHT, ju = JU, ja = JA, je = JE, ijus = IJUS, ibigjus = IBIGJUS, xi = XI, psi = PSI, theta = THETA, shch = SHCH, + }, + lower = { + UK = uk, TSHE = tshe, SHT = sht, JU = ju, JA = ja, JE = je, IJUS = ijus, IBIGJUS = ibigjus, XI = xi, PSI = psi, THETA = theta, SHCH = shch, + }, +} + + +--- Norwegian (bokmÃ¥l). + +definitions["no"] = { + entries = { + ["a"] = "a", ["b"] = "b", ["c"] = "c", ["d"] = "d", ["e"] = "e", + ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", ["j"] = "j", + ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", + ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", ["t"] = "t", + ["u"] = "u", ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", + ["z"] = "z", ["æ"] = "æ", ["ø"] = "ø", ["Ã¥"] = "Ã¥", + }, + orders = { + "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", + "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", + "u", "v", "w", "x", "y", "z", "æ", "ø", "Ã¥", + } +} + +--- Danish (-> Norwegian). + +definitions["da"] = { parent = "no" } + +--- Swedish + +definitions["sv"] = { + entries = { + ["a"] = "a", ["b"] = "b", ["c"] = "c", ["d"] = "d", ["e"] = "e", + ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", ["j"] = "j", + ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", + ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", ["t"] = "t", + ["u"] = "u", ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", + ["z"] = "z", ["Ã¥"] = "Ã¥", ["ä"] = "ä", ["ö"] = "ö", + }, + orders = { + "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", + "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", + "u", "v", "w", "x", "y", "z", "Ã¥", "ä", "ö", + } +} + +--- Icelandic + +-- Treating quantities as allographs. + +definitions["is"] = { + entries = { + ["a"] = "a", ["á"] = "a", ["b"] = "b", ["d"] = "d", ["ð"] = "ð", + ["e"] = "e", ["é"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", + ["i"] = "i", ["í"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", + ["m"] = "m", ["n"] = "n", ["o"] = "o", ["ó"] = "o", ["p"] = "p", + ["r"] = "r", ["s"] = "s", ["t"] = "t", ["u"] = "u", ["ú"] = "u", + ["v"] = "v", ["x"] = "x", ["y"] = "y", ["ý"] = "y", ["þ"] = "þ", + ["æ"] = "æ", ["ö"] = "ö", + }, + orders = { + "a", "á", "b", "d", "ð", "e", "é", "f", "g", "h", + "i", "í", "j", "k", "l", "m", "n", "o", "ó", "p", + "r", "s", "t", "u", "ú", "v", "x", "y", "ý", "þ", + "æ", "ö", + }, +} + +--- Greek + +definitions["gr"] = { + entries = { + ["α"] = "α", ["ά"] = "α", ["á½°"] = "α", ["ᾶ"] = "α", ["á¾³"] = "α", + ["á¼€"] = "α", ["á¼"] = "α", ["ἄ"] = "α", ["ἂ"] = "α", ["ἆ"] = "α", + ["á¼"] = "α", ["á¼…"] = "α", ["ἃ"] = "α", ["ἇ"] = "α", ["á¾"] = "α", + ["á¾´"] = "α", ["á¾²"] = "α", ["á¾·"] = "α", ["ᾄ"] = "α", ["ᾂ"] = "α", + ["á¾…"] = "α", ["ᾃ"] = "α", ["ᾆ"] = "α", ["ᾇ"] = "α", ["β"] = "β", + ["γ"] = "γ", ["δ"] = "δ", ["ε"] = "ε", ["έ"] = "ε", ["á½²"] = "ε", + ["á¼"] = "ε", ["á¼”"] = "ε", ["á¼’"] = "ε", ["ἑ"] = "ε", ["ἕ"] = "ε", + ["ἓ"] = "ε", ["ζ"] = "ζ", ["η"] = "η", ["η"] = "η", ["ή"] = "η", + ["á½´"] = "η", ["ῆ"] = "η", ["ῃ"] = "η", ["á¼ "] = "η", ["ἤ"] = "η", + ["á¼¢"] = "η", ["ἦ"] = "η", ["á¾"] = "η", ["ἡ"] = "η", ["á¼¥"] = "η", + ["á¼£"] = "η", ["ἧ"] = "η", ["ᾑ"] = "η", ["á¿„"] = "η", ["á¿‚"] = "η", + ["ῇ"] = "η", ["á¾”"] = "η", ["á¾’"] = "η", ["ᾕ"] = "η", ["ᾓ"] = "η", + ["á¾–"] = "η", ["á¾—"] = "η", ["θ"] = "θ", ["ι"] = "ι", ["ί"] = "ι", + ["ὶ"] = "ι", ["á¿–"] = "ι", ["á¼°"] = "ι", ["á¼´"] = "ι", ["á¼²"] = "ι", + ["ἶ"] = "ι", ["á¼±"] = "ι", ["á¼µ"] = "ι", ["á¼³"] = "ι", ["á¼·"] = "ι", + ["ÏŠ"] = "ι", ["Î"] = "ι", ["á¿’"] = "ι", ["á¿—"] = "ι", ["κ"] = "κ", + ["λ"] = "λ", ["μ"] = "μ", ["ν"] = "ν", ["ξ"] = "ξ", ["ο"] = "ο", + ["ÏŒ"] = "ο", ["ὸ"] = "ο", ["á½€"] = "ο", ["ὄ"] = "ο", ["ὂ"] = "ο", + ["á½"] = "ο", ["á½…"] = "ο", ["ὃ"] = "ο", ["Ï€"] = "Ï€", ["Ï"] = "Ï", + ["ῤ"] = "ῤ", ["á¿¥"] = "á¿¥", ["σ"] = "σ", ["Ï‚"] = "Ï‚", ["Ï„"] = "Ï„", + ["Ï…"] = "Ï…", ["Ï"] = "Ï…", ["ὺ"] = "Ï…", ["ῦ"] = "Ï…", ["á½"] = "Ï…", + ["á½”"] = "Ï…", ["á½’"] = "Ï…", ["á½–"] = "Ï…", ["ὑ"] = "Ï…", ["ὕ"] = "Ï…", + ["ὓ"] = "Ï…", ["á½—"] = "Ï…", ["Ï‹"] = "Ï…", ["ΰ"] = "Ï…", ["á¿¢"] = "Ï…", + ["ῧ"] = "Ï…", ["φ"] = "φ", ["χ"] = "χ", ["ψ"] = "ω", ["ω"] = "ω", + ["ÏŽ"] = "ω", ["á½¼"] = "ω", ["ῶ"] = "ω", ["ῳ"] = "ω", ["á½ "] = "ω", + ["ὤ"] = "ω", ["á½¢"] = "ω", ["ὦ"] = "ω", ["á¾ "] = "ω", ["ὡ"] = "ω", + ["á½¥"] = "ω", ["á½£"] = "ω", ["ὧ"] = "ω", ["ᾡ"] = "ω", ["á¿´"] = "ω", + ["ῲ"] = "ω", ["á¿·"] = "ω", ["ᾤ"] = "ω", ["á¾¢"] = "ω", ["á¾¥"] = "ω", + ["á¾£"] = "ω", ["ᾦ"] = "ω", ["ᾧ"] = "ω", + }, + orders = { + "α", "ά", "á½°", "ᾶ", "á¾³", "á¼€", "á¼", "ἄ", "ἂ", "ἆ", + "á¼", "á¼…", "ἃ", "ἇ", "á¾", "á¾´", "á¾²", "á¾·", "ᾄ", "ᾂ", + "á¾…", "ᾃ", "ᾆ", "ᾇ", "β", "γ", "δ", "ε", "έ", "á½²", + "á¼", "á¼”", "á¼’", "ἑ", "ἕ", "ἓ", "ζ", "η", "η", "ή", + "á½´", "ῆ", "ῃ", "á¼ ", "ἤ", "á¼¢", "ἦ", "á¾", "ἡ", "á¼¥", + "á¼£", "ἧ", "ᾑ", "á¿„", "á¿‚", "ῇ", "á¾”", "á¾’", "ᾕ", "ᾓ", + "á¾–", "á¾—", "θ", "ι", "ί", "ὶ", "á¿–", "á¼°", "á¼´", "á¼²", + "ἶ", "á¼±", "á¼µ", "á¼³", "á¼·", "ÏŠ", "Î", "á¿’", "á¿—", "κ", + "λ", "μ", "ν", "ξ", "ο", "ÏŒ", "ὸ", "á½€", "ὄ", "ὂ", + "á½", "á½…", "ὃ", "Ï€", "Ï", "ῤ", "á¿¥", "σ", "Ï‚", "Ï„", + "Ï…", "Ï", "ὺ", "ῦ", "á½", "á½”", "á½’", "á½–", "ὑ", "ὕ", + "ὓ", "á½—", "Ï‹", "ΰ", "á¿¢", "ῧ", "φ", "χ", "ψ", "ω", + "ÏŽ", "á½¼", "ῶ", "ῳ", "á½ ", "ὤ", "á½¢", "ὦ", "á¾ ", "ὡ", + "á½¥", "á½£", "ὧ", "ᾡ", "á¿´", "ῲ", "á¿·", "ᾤ", "á¾¢", "á¾¥", + "á¾£", "ᾦ", "ᾧ", + }, +} + +--- Latin + +-- Treating the post-classical fricatives “j†and “v†as “i†and “u†+-- respectively. + +definitions["la"] = { + replacements = { + { "æ", "ae" }, { "Æ", "AE" }, + }, + entries = { + ["a"] = "a", ["Ä"] = "a", ["ă"] = "a", ["b"] = "b", ["c"] = "c", + ["d"] = "d", ["e"] = "e", ["Ä“"] = "e", ["Ä•"] = "e", ["f"] = "f", + ["g"] = "g", ["h"] = "h", ["i"] = "i", ["Ä«"] = "i", ["Ä­"] = "i", + ["j"] = "i", ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", + ["o"] = "o", ["Å"] = "o", ["Å"] = "o", ["p"] = "p", ["q"] = "q", + ["r"] = "r", ["s"] = "s", ["t"] = "t", ["u"] = "u", ["Å«"] = "u", + ["Å­"] = "u", ["v"] = "u", ["w"] = "w", ["x"] = "x", ["y"] = "y", + ["ȳ"] = "y", ["y̆"] = "y", ["z"] = "z", + }, + orders = { + "a", "Ä", "ă", "b", "c", "d", "e", "Ä“", "Ä•", "f", + "g", "h", "i", "Ä«", "Ä­", "j", "k", "l", "m", "n", + "o", "Å", "Å", "p", "q", "r", "s", "t", "u", "Å«", + "Å­", "v", "w", "x", "y", "ȳ", "y̆", "z", + } +} + +--- Italian + +definitions["it"] = { + entries = { + ["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", ["d"] = "d", + ["e"] = "e", ["é"] = "e", ["è"] = "e", ["f"] = "f", ["g"] = "g", + ["h"] = "h", ["i"] = "i", ["í"] = "i", ["ì"] = "i", ["j"] = "i", + ["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", + ["ó"] = "o", ["ò"] = "o", ["p"] = "p", ["q"] = "q", ["r"] = "r", + ["s"] = "s", ["t"] = "t", ["u"] = "u", ["ú"] = "u", ["ù"] = "u", + ["v"] = "u", ["w"] = "w", ["x"] = "x", ["y"] = "y", ["z"] = "z", + }, + orders = { + "a", "á", "b", "c", "d", "e", "é", "è", "f", "g", + "h", "i", "í", "ì", "j", "k", "l", "m", "n", "o", + "ó", "ò", "p", "q", "r", "s", "t", "u", "ú", "ù", + "v", "w", "x", "y", "z", + } +} + +--- Romanian + +definitions["ro"] = { + entries = { + ["a"] = "a", ["ă"] = "ă", ["â"] = "â", ["b"] = "b", ["c"] = "c", + ["d"] = "d", ["e"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", + ["i"] = "i", ["î"] = "î", ["j"] = "j", ["k"] = "k", ["l"] = "l", + ["m"] = "m", ["n"] = "n", ["o"] = "o", ["p"] = "p", ["q"] = "q", + ["r"] = "r", ["s"] = "s", ["È™"] = "È™", ["t"] = "t", ["È›"] = "È›", + ["u"] = "u", ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", + ["z"] = "z", + }, + orders = { + "a", "ă", "â", "b", "c", "d", "e", "f", "g", "h", + "i", "î", "j", "k", "l", "m", "n", "o", "p", "q", + "r", "s", "È™", "t", "È›", "u", "v", "w", "x", "y", + "z", + } +} + +--- Spanish + +definitions["es"] = { + entries = { + ["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", ["d"] = "d", + ["e"] = "e", ["é"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", + ["i"] = "i", ["í"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", + ["m"] = "m", ["n"] = "n", ["ñ"] = "ñ", ["o"] = "o", ["ó"] = "o", + ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", ["t"] = "t", + ["u"] = "u", ["ú"] = "u", ["ü"] = "u", ["v"] = "v", ["w"] = "w", + ["x"] = "x", ["y"] = "y", ["z"] = "z", + }, + orders = { + "a", "á", "b", "c", "d", "e", "é", "f", "g", "h", + "i", "í", "j", "k", "l", "m", "n", "ñ", "o", "ó", + "p", "q", "r", "s", "t", "u", "ú", "ü", "v", "w", + "x", "y", "z", + } +} + +--- Portuguese + +definitions["pt"] = { + entries = { + ["a"] = "a", ["á"] = "a", ["â"] = "a", ["ã"] = "a", ["à"] = "a", + ["b"] = "b", ["c"] = "c", ["ç"] = "c", ["d"] = "d", ["e"] = "e", + ["é"] = "e", ["ê"] = "e", ["f"] = "f", ["g"] = "g", ["h"] = "h", + ["i"] = "i", ["í"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", + ["m"] = "m", ["n"] = "n", ["o"] = "o", ["ó"] = "o", ["ô"] = "o", + ["õ"] = "o", ["p"] = "p", ["q"] = "q", ["r"] = "r", ["s"] = "s", + ["t"] = "t", ["u"] = "u", ["ú"] = "u", ["ü"] = "u", ["v"] = "v", + ["w"] = "w", ["x"] = "x", ["y"] = "y", ["z"] = "z", + }, + orders = { + "a", "á", "â", "ã", "à", "b", "c", "ç", "d", "e", + "é", "ê", "f", "g", "h", "i", "í", "j", "k", "l", + "m", "n", "o", "ó", "ô", "õ", "p", "q", "r", "s", + "t", "u", "ú", "ü", "v", "w", "x", "y", "z", + } +} + +--- Lithuanian + +local ch, CH = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11) + +definitions["lt"] = { + replacements = { + { "ch", ch }, { "CH", CH} + }, + entries = { + ["a"] = "a", ["Ä…"] = "a", ["b"] = "b", ["c"] = "c", [ch ] = "c", + ["Ä"] = "Ä", ["d"] = "d", ["e"] = "e", ["Ä™"] = "e", ["Ä—"] = "e", + ["f"] = "f", ["g"] = "g", ["h"] = "h", ["i"] = "i", ["į"] = "i", + ["y"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", ["m"] = "m", + ["n"] = "n", ["o"] = "o", ["p"] = "p", ["r"] = "r", ["s"] = "s", + ["Å¡"] = "Å¡", ["t"] = "t", ["u"] = "u", ["ų"] = "u", ["Å«"] = "u", + ["v"] = "v", ["z"] = "z", ["ž"] = "ž", + }, + orders = { + "a", "Ä…", "b", "c", ch, "Ä", "d", "e", "Ä™", "Ä—", + "f", "g", "h", "i", "į", "y", "j", "k", "l", "m", + "n", "o", "p", "r", "s", "Å¡", "t", "u", "ų", "Å«", + "v", "z", "ž", + }, + lower = { + ch = CH, + }, + upper = { + CH = ch, + }, +} + +--- Latvian + +definitions["lv"] = { + entries = { + ["a"] = "a", ["Ä"] = "a", ["b"] = "b", ["c"] = "c", ["Ä"] = "Ä", + ["d"] = "d", ["e"] = "e", ["Ä“"] = "e", ["f"] = "f", ["g"] = "g", + ["Ä£"] = "Ä£", ["h"] = "h", ["i"] = "i", ["Ä«"] = "i", ["j"] = "j", + ["k"] = "k", ["Ä·"] = "Ä·", ["l"] = "l", ["ļ"] = "ļ", ["m"] = "m", + ["n"] = "n", ["ņ"] = "ņ", ["o"] = "o", ["Å"] = "o", ["p"] = "p", + ["r"] = "r", ["Å—"] = "Å—", ["s"] = "s", ["Å¡"] = "Å¡", ["t"] = "t", + ["u"] = "u", ["Å«"] = "u", ["v"] = "v", ["z"] = "z", ["ž"] = "ž", + }, + orders = { + "a", "Ä", "b", "c", "Ä", "d", "e", "Ä“", "f", "g", + "Ä£", "h", "i", "Ä«", "j", "k", "Ä·", "l", "ļ", "m", + "n", "ņ", "o", "Å", "p", "r", "Å—", "s", "Å¡", "t", + "u", "Å«", "v", "z", "ž", + } +} + +--- Hungarian + +-- Helpful but disturbing: +-- http://en.wikipedia.org/wiki/Hungarian_alphabet#Alphabetical_ordering_.28collation.29 +-- (In short: you'd have to analyse word-compounds to realize a correct order +-- for sequences like “nnyâ€, “sszâ€, and “zszâ€. This is left as an exercise to +-- the reader…) + +local cs, CS = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11) +local dz, DZ = utfchar(replacementoffset + 2), utfchar(replacementoffset + 12) +local dzs, DZS = utfchar(replacementoffset + 3), utfchar(replacementoffset + 13) +local gy, GY = utfchar(replacementoffset + 4), utfchar(replacementoffset + 14) +local ly, LY = utfchar(replacementoffset + 5), utfchar(replacementoffset + 15) +local ny, NY = utfchar(replacementoffset + 6), utfchar(replacementoffset + 16) +local sz, SZ = utfchar(replacementoffset + 7), utfchar(replacementoffset + 17) +local ty, TY = utfchar(replacementoffset + 8), utfchar(replacementoffset + 18) +local zs, ZS = utfchar(replacementoffset + 9), utfchar(replacementoffset + 19) + +definitions["hu"] = { + replacements = { + { "cs", cs }, { "CS", CS }, + { "dz", dz }, { "DZ", DZ }, + { "dzs", dzs }, { "DZS", DZS }, + { "gy", gy }, { "GY", GY }, + { "ly", ly }, { "LY", LY }, + { "ny", ny }, { "NY", NY }, + { "sz", sz }, { "SZ", SZ }, + { "ty", ty }, { "TY", TY }, + { "zs", zs }, { "ZS", ZS }, + }, + entries = { + ["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", [cs ] = "cs", + ["d"] = "d", [dz ] = "dz", [dzs] = "dzs", ["e"] = "e", ["é"] = "e", + ["f"] = "f", ["g"] = "g", [gy ] = "gy", ["h"] = "h", ["i"] = "i", + ["í"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l", [ly ] = "ly", + ["m"] = "m", ["n"] = "n", [ny ] = "ny", ["o"] = "o", ["ó"] = "o", + ["ö"] = "ö", ["Å‘"] = "ö", ["p"] = "p", ["q"] = "q", ["r"] = "r", + ["s"] = "s", [sz ] = "sz", ["t"] = "t", [ty ] = "ty", ["u"] = "u", + ["ú"] = "u", ["ü"] = "ü", ["ű"] = "ü", ["v"] = "v", ["w"] = "w", + ["x"] = "x", ["y"] = "y", ["z"] = "z", [zs ] = "zs", + }, + orders = { + "a", "á", "b", "c", cs, "d", dz, dzs, "e", "é", + "f", "g", gy, "h", "i", "í", "j", "k", "l", ly, + "m", "n", ny, "o", "ó", "ö", "Å‘", "p", "q", "r", + "s", sz, "t", ty, "u", "ú", "ü", "ű", "v", "w", + "x", "y", "z", zs, + }, + lower = { + CS = cs, DZ = dz, DZS = dzs, GY = gy, LY = ly, NY = ny, SZ = sz, TY = ty, ZS = zs, + }, + upper = { + cs = CS, dz = DZ, dzs = DZS, gy = GY, ly = LY, ny = NY, sz = SZ, ty = TY, zs = ZS, + }, +} + +--- Estonian + +definitions["et"] = { + entries = { -- w x y are used for foreign words only + ["a"] = "a", ["b"] = "b", ["d"] = "d", ["e"] = "e", ["f"] = "f", + ["g"] = "g", ["h"] = "h", ["i"] = "i", ["j"] = "j", ["k"] = "k", + ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o", ["p"] = "p", + ["r"] = "r", ["s"] = "s", ["Å¡"] = "Å¡", ["z"] = "z", ["ž"] = "ž", + ["t"] = "t", ["u"] = "u", ["v"] = "v", ["w"] = "v", ["õ"] = "õ", + ["ä"] = "ä", ["ö"] = "ö", ["ü"] = "ü", ["x"] = "x", ["y"] = "y", + }, + orders = { + "a", "b", "d", "e", "f", "g", "h", "i", "j", "k", + "l", "m", "n", "o", "p", "r", "s", "Å¡", "z", "ž", + "t", "u", "v", "w", "õ", "ä", "ö", "ü", "x", "y", + } +} + +--- Korean + +local fschars = characters.fschars + +local function firstofsplit(first) + local fs = fschars[first] or first -- leadconsonant + return fs, fs -- entry, tag +end + +definitions["kr"] = { + firstofsplit = firstofsplit, + orders = { + "ㄱ", "ã„´", "ã„·", "ㄹ", "ã…", "ã…‚", "ã……", "ã…‡", "ã…ˆ", "ã…Š", "ã…‹", "ã…Œ", "ã…", "ã…Ž", + "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", + "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", + } +} + +-- Japanese + +definitions["jp"] = { + replacements = { + { "ã", "ã‚" }, { "ãƒ", "ã„" }, + { "ã…", "ã†" }, { "ã‡", "ãˆ" }, + { "ã‰", "ãŠ" }, { "ã£", "ã¤" }, + { "ゃ", "ã‚„" }, { "ã‚…", "ゆ" }, + { "ょ", "よ" }, + }, + entries = { + ["ã‚"] = "ã‚", ["ã„"] = "ã„", ["ã†"] = "ã†", ["ãˆ"] = "ãˆ", ["ãŠ"] = "ãŠ", + ["ã‹"] = "ã‹", ["ã"] = "ã", ["ã"] = "ã", ["ã‘"] = "ã‘", ["ã“"] = "ã“", + ["ã•"] = "ã•", ["ã—"] = "ã—", ["ã™"] = "ã™", ["ã›"] = "ã›", ["ã"] = "ã", + ["ãŸ"] = "ãŸ", ["ã¡"] = "ã¡", ["ã¤"] = "ã¤", ["ã¦"] = "ã¦", ["ã¨"] = "ã¨", + ["ãª"] = "ãª", ["ã«"] = "ã«", ["ã¬"] = "ã¬", ["ã­"] = "ã­", ["ã®"] = "ã®", + ["ã¯"] = "ã¯", ["ã²"] = "ã²", ["ãµ"] = "ãµ", ["ã¸"] = "ã¸", ["ã»"] = "ã»", + ["ã¾"] = "ã¾", ["ã¿"] = "ã¿", ["ã‚€"] = "ã‚€", ["ã‚"] = "ã‚", ["ã‚‚"] = "ã‚‚", + ["ã‚„"] = "ã‚„", ["ゆ"] = "ゆ", ["よ"] = "よ", + ["ら"] = "ら", ["ã‚Š"] = "ã‚Š", ["ã‚‹"] = "ã‚‹", ["ã‚Œ"] = "ã‚Œ", ["ã‚"] = "ã‚", + ["ã‚"] = "ã‚", ["ã‚"] = "ã‚", ["ã‚‘"] = "ã‚‘", ["ã‚’"] = "ã‚’", ["ã‚“"] = "ã‚“", + }, + orders = { + "ã‚", "ã„", "ã†", "ãˆ", "ãŠ", "ã‹", "ã", "ã", "ã‘", "ã“", + "ã•", "ã—", "ã™", "ã›", "ã", "ãŸ", "ã¡", "ã¤", "ã¦", "ã¨", + "ãª", "ã«", "ã¬", "ã­", "ã®", "ã¯", "ã²", "ãµ", "ã¸", "ã»", + "ã¾", "ã¿", "ã‚€", "ã‚", "ã‚‚", "ã‚„", "ゆ", "よ", + "ら", "ã‚Š", "ã‚‹", "ã‚Œ", "ã‚", "ã‚", "ã‚", "ã‚‘", "ã‚’", "ã‚“", + } +} diff --git a/tex/context/base/spac-adj.lua b/tex/context/base/spac-adj.lua index c87a9d17f..6dff0dede 100644 --- a/tex/context/base/spac-adj.lua +++ b/tex/context/base/spac-adj.lua @@ -1,58 +1,58 @@ -if not modules then modules = { } end modules ['spac-adj'] = { - version = 1.001, - comment = "companion to spac-adj.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- sort of obsolete code - -local a_vadjust = attributes.private('graphicvadjust') - -local nodecodes = nodes.nodecodes - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist - -local remove_node = nodes.remove -local hpack_node = node.hpack -local vpack_node = node.vpack - -function nodes.handlers.graphicvadjust(head,groupcode) -- we can make an actionchain for mvl only - if groupcode == "" then -- mvl only - local h, p, done = head, nil, false - while h do - local id = h.id - if id == hlist_code or id == vlist_code then - local a = h[a_vadjust] - if a then - if p then - local n - head, h, n = remove_node(head,h) - local pl = p.list - if n.width ~= 0 then - n = hpack_node(n,0,'exactly') -- todo: dir - end - if pl then - pl.prev = n - n.next = pl - end - p.list = n - done = true - else - -- can't happen - end - else - p = h - h = h.next - end - else - h = h.next - end - end - return head, done - else - return head, false - end -end +if not modules then modules = { } end modules ['spac-adj'] = { + version = 1.001, + comment = "companion to spac-adj.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- sort of obsolete code + +local a_vadjust = attributes.private('graphicvadjust') + +local nodecodes = nodes.nodecodes + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist + +local remove_node = nodes.remove +local hpack_node = node.hpack +local vpack_node = node.vpack + +function nodes.handlers.graphicvadjust(head,groupcode) -- we can make an actionchain for mvl only + if groupcode == "" then -- mvl only + local h, p, done = head, nil, false + while h do + local id = h.id + if id == hlist_code or id == vlist_code then + local a = h[a_vadjust] + if a then + if p then + local n + head, h, n = remove_node(head,h) + local pl = p.list + if n.width ~= 0 then + n = hpack_node(n,0,'exactly') -- todo: dir + end + if pl then + pl.prev = n + n.next = pl + end + p.list = n + done = true + else + -- can't happen + end + else + p = h + h = h.next + end + else + h = h.next + end + end + return head, done + else + return head, false + end +end diff --git a/tex/context/base/spac-ali.lua b/tex/context/base/spac-ali.lua index ceb278433..6357f0f15 100644 --- a/tex/context/base/spac-ali.lua +++ b/tex/context/base/spac-ali.lua @@ -1,134 +1,134 @@ -if not modules then modules = { } end modules ['spac-ali'] = { - version = 1.001, - comment = "companion to spac-ali.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local div = math.div -local format = string.format - -local tasks = nodes.tasks -local appendaction = tasks.appendaction -local prependaction = tasks.prependaction -local disableaction = tasks.disableaction -local enableaction = tasks.enableaction - -local slide_nodes = node.slide -local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here - -local unsetvalue = attributes.unsetvalue - -local concat_nodes = nodes.concat - -local nodecodes = nodes.nodecodes -local listcodes = nodes.listcodes - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local line_code = listcodes.line - -local nodepool = nodes.pool - -local new_stretch = nodepool.stretch - -local a_realign = attributes.private("realign") - -local texattribute = tex.attribute -local texcount = tex.count - -local isleftpage = layouts.status.isleftpage - -typesetters = typesetters or { } -local alignments = { } -typesetters.alignments = alignments - -local report_realign = logs.reporter("typesetters","margindata") -local trace_realign = trackers.register("typesetters.margindata", function(v) trace_margindata = v end) - -local nofrealigned = 0 - --- leftskip rightskip parfillskip --- raggedleft 0 + 0 - --- raggedright 0 0 fil --- raggedcenter 0 + 0 + - - -local function handler(head,leftpage,realpageno) - local current = head - local done = false - while current do - local id = current.id - if id == hlist_code then - if current.subtype == line_code then - local a = current[a_realign] - if not a or a == 0 then - -- skip - else - local align = a % 10 - local pageno = div(a,10) - if pageno == realpageno then - -- already ok - else - local action = 0 - if align == 1 then -- flushright - action = leftpage and 1 or 2 - elseif align == 2 then -- flushleft - action = leftpage and 2 or 1 - end - if action == 1 then - current.list = hpack_nodes(concat_nodes{current.list,new_stretch(3)},current.width,"exactly") - if trace_realign then - report_realign("flushing left, align %a, page %a, realpage %a",align,pageno,realpageno) - end - elseif action == 2 then - current.list = hpack_nodes(concat_nodes{new_stretch(3),current.list},current.width,"exactly") - if trace_realign then - report_realign("flushing right. align %a, page %a, realpage %a",align,pageno,realpageno) - end - elseif trace_realign then - report_realign("invalid flushing, align %a, page %a, realpage %a",align,pageno,realpageno) - end - done = true - nofrealigned = nofrealigned + 1 - end - current[a_realign] = unsetvalue - end - end - handler(current.list,leftpage,realpageno) - elseif id == vlist_code then - handler(current.list,leftpage,realpageno) - end - current = current.next - end - return head, done -end - -function alignments.handler(head) - local leftpage = isleftpage(true,false) - local realpageno = texcount.realpageno - return handler(head,leftpage,realpageno) -end - -local enabled = false - -function alignments.set(n) - if not enabled then - enableaction("shipouts","typesetters.alignments.handler") - enabled = true - if trace_realign then - report_realign("enabled") - end - end - texattribute[a_realign] = texcount.realpageno * 10 + n -end - -commands.setrealign = alignments.set - -statistics.register("realigning", function() - if nofrealigned > 0 then - return format("%s processed",nofrealigned) - else - return nil - end -end) +if not modules then modules = { } end modules ['spac-ali'] = { + version = 1.001, + comment = "companion to spac-ali.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local div = math.div +local format = string.format + +local tasks = nodes.tasks +local appendaction = tasks.appendaction +local prependaction = tasks.prependaction +local disableaction = tasks.disableaction +local enableaction = tasks.enableaction + +local slide_nodes = node.slide +local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here + +local unsetvalue = attributes.unsetvalue + +local concat_nodes = nodes.concat + +local nodecodes = nodes.nodecodes +local listcodes = nodes.listcodes + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local line_code = listcodes.line + +local nodepool = nodes.pool + +local new_stretch = nodepool.stretch + +local a_realign = attributes.private("realign") + +local texattribute = tex.attribute +local texcount = tex.count + +local isleftpage = layouts.status.isleftpage + +typesetters = typesetters or { } +local alignments = { } +typesetters.alignments = alignments + +local report_realign = logs.reporter("typesetters","margindata") +local trace_realign = trackers.register("typesetters.margindata", function(v) trace_margindata = v end) + +local nofrealigned = 0 + +-- leftskip rightskip parfillskip +-- raggedleft 0 + 0 - +-- raggedright 0 0 fil +-- raggedcenter 0 + 0 + - + +local function handler(head,leftpage,realpageno) + local current = head + local done = false + while current do + local id = current.id + if id == hlist_code then + if current.subtype == line_code then + local a = current[a_realign] + if not a or a == 0 then + -- skip + else + local align = a % 10 + local pageno = div(a,10) + if pageno == realpageno then + -- already ok + else + local action = 0 + if align == 1 then -- flushright + action = leftpage and 1 or 2 + elseif align == 2 then -- flushleft + action = leftpage and 2 or 1 + end + if action == 1 then + current.list = hpack_nodes(concat_nodes{current.list,new_stretch(3)},current.width,"exactly") + if trace_realign then + report_realign("flushing left, align %a, page %a, realpage %a",align,pageno,realpageno) + end + elseif action == 2 then + current.list = hpack_nodes(concat_nodes{new_stretch(3),current.list},current.width,"exactly") + if trace_realign then + report_realign("flushing right. align %a, page %a, realpage %a",align,pageno,realpageno) + end + elseif trace_realign then + report_realign("invalid flushing, align %a, page %a, realpage %a",align,pageno,realpageno) + end + done = true + nofrealigned = nofrealigned + 1 + end + current[a_realign] = unsetvalue + end + end + handler(current.list,leftpage,realpageno) + elseif id == vlist_code then + handler(current.list,leftpage,realpageno) + end + current = current.next + end + return head, done +end + +function alignments.handler(head) + local leftpage = isleftpage(true,false) + local realpageno = texcount.realpageno + return handler(head,leftpage,realpageno) +end + +local enabled = false + +function alignments.set(n) + if not enabled then + enableaction("shipouts","typesetters.alignments.handler") + enabled = true + if trace_realign then + report_realign("enabled") + end + end + texattribute[a_realign] = texcount.realpageno * 10 + n +end + +commands.setrealign = alignments.set + +statistics.register("realigning", function() + if nofrealigned > 0 then + return format("%s processed",nofrealigned) + else + return nil + end +end) diff --git a/tex/context/base/spac-chr.lua b/tex/context/base/spac-chr.lua index 24364978a..6c9cb82df 100644 --- a/tex/context/base/spac-chr.lua +++ b/tex/context/base/spac-chr.lua @@ -1,200 +1,200 @@ -if not modules then modules = { } end modules ['spac-chr'] = { - version = 1.001, - comment = "companion to spac-chr.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local byte, lower = string.byte, string.lower - --- beware: attribute copying is bugged ... there will be a proper luatex helper --- for this - --- to be redone: characters will become tagged spaces instead as then we keep track of --- spaceskip etc - -trace_characters = false trackers.register("typesetters.characters", function(v) trace_characters = v end) - -report_characters = logs.reporter("typesetting","characters") - -local nodes, node = nodes, node - -local insert_node_after = node.insert_after -local remove_node = nodes.remove -- ! nodes -local copy_node_list = node.copy_list - -local nodepool = nodes.pool -local tasks = nodes.tasks - -local new_penalty = nodepool.penalty -local new_glue = nodepool.glue - -local nodecodes = nodes.nodecodes -local skipcodes = nodes.skipcodes -local glyph_code = nodecodes.glyph -local glue_code = nodecodes.glue - -local space_skip_code = skipcodes["spaceskip"] - -local chardata = characters.data - -local typesetters = typesetters - -local characters = { } -typesetters.characters = characters - -local fonthashes = fonts.hashes -local fontparameters = fonthashes.parameters -local fontcharacters = fonthashes.characters -local fontquads = fonthashes.quads - -local a_character = attributes.private("characters") -local a_alignstate = attributes.private("alignstate") - -local c_zero = byte('0') -local c_period = byte('.') - -local function inject_quad_space(unicode,head,current,fraction) - local attr = current.attr - if fraction ~= 0 then - fraction = fraction * fontquads[current.font] - end - local glue = new_glue(fraction) --- glue.attr = copy_node_list(attr) - glue.attr = attr - current.attr = nil - glue[a_character] = unicode - head, current = insert_node_after(head,current,glue) - return head, current -end - -local function inject_char_space(unicode,head,current,parent) - local attr = current.attr - local font = current.font - local char = fontcharacters[font][parent] - local glue = new_glue(char and char.width or fontparameters[font].space) - -- glue.attr = copy_node_list(current.attr) - glue.attr = current.attr - current.attr = nil - glue[a_character] = unicode - head, current = insert_node_after(head,current,glue) - return head, current -end - -local function inject_nobreak_space(unicode,head,current,space,spacestretch,spaceshrink) - local attr = current.attr - local glue = new_glue(space,spacestretch,spaceshrink) - local penalty = new_penalty(10000) - -- glue.attr = copy_node_list(attr) - glue.attr = attr - current.attr = nil - -- penalty.attr = attr - glue[a_character] = unicode - head, current = insert_node_after(head,current,penalty) - head, current = insert_node_after(head,current,glue) - return head, current -end - -local methods = { - - -- The next one uses an attribute assigned to the character but still we - -- don't have the 'local' value. - - [0x00A0] = function(head,current) -- nbsp - local para = fontparameters[current.font] - if current[a_alignstate] == 1 then -- flushright - head, current = inject_nobreak_space(0x00A0,head,current,para.space,0,0) - current.subtype = space_skip_code - else - head, current = inject_nobreak_space(0x00A0,head,current,para.space,para.spacestretch,para.spaceshrink) - end - return head, current - end, - - [0x2000] = function(head,current) -- enquad - return inject_quad_space(0x2000,head,current,1/2) - end, - - [0x2001] = function(head,current) -- emquad - return inject_quad_space(0x2001,head,current,1) - end, - - [0x2002] = function(head,current) -- enspace - return inject_quad_space(0x2002,head,current,1/2) - end, - - [0x2003] = function(head,current) -- emspace - return inject_quad_space(0x2003,head,current,1) - end, - - [0x2004] = function(head,current) -- threeperemspace - return inject_quad_space(0x2004,head,current,1/3) - end, - - [0x2005] = function(head,current) -- fourperemspace - return inject_quad_space(0x2005,head,current,1/4) - end, - - [0x2006] = function(head,current) -- sixperemspace - return inject_quad_space(0x2006,head,current,1/6) - end, - - [0x2007] = function(head,current) -- figurespace - return inject_char_space(0x2007,head,current,c_zero) - end, - - [0x2008] = function(head,current) -- punctuationspace - return inject_char_space(0x2008,head,current,c_period) - end, - - [0x2009] = function(head,current) -- breakablethinspace - return inject_quad_space(0x2009,head,current,1/8) -- same as next - end, - - [0x200A] = function(head,current) -- hairspace - return inject_quad_space(0x200A,head,current,1/8) -- same as previous (todo) - end, - - [0x200B] = function(head,current) -- zerowidthspace - return inject_quad_space(0x200B,head,current,0) - end, - - [0x202F] = function(head,current) -- narrownobreakspace - return inject_nobreak_space(0x202F,head,current,fontquads[current.font]/8) - end, - - [0x205F] = function(head,current) -- math thinspace - return inject_nobreak_space(0x205F,head,current,fontparameters[current.font].space/8) - end, - - -- [0xFEFF] = function(head,current) -- zerowidthnobreakspace - -- return head, current - -- end, - -} - -function characters.handler(head) - local current = head - local done = false - while current do - local id = current.id - if id == glyph_code then - local next = current.next - local char = current.char - local method = methods[char] - if method then - if trace_characters then - report_characters("replacing character %C, description %a",char,lower(chardata[char].description)) - end - head = method(head,current) - head = remove_node(head,current,true) - done = true - end - current = next - else - current = current.next - end - end - return head, done -end +if not modules then modules = { } end modules ['spac-chr'] = { + version = 1.001, + comment = "companion to spac-chr.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local byte, lower = string.byte, string.lower + +-- beware: attribute copying is bugged ... there will be a proper luatex helper +-- for this + +-- to be redone: characters will become tagged spaces instead as then we keep track of +-- spaceskip etc + +trace_characters = false trackers.register("typesetters.characters", function(v) trace_characters = v end) + +report_characters = logs.reporter("typesetting","characters") + +local nodes, node = nodes, node + +local insert_node_after = node.insert_after +local remove_node = nodes.remove -- ! nodes +local copy_node_list = node.copy_list + +local nodepool = nodes.pool +local tasks = nodes.tasks + +local new_penalty = nodepool.penalty +local new_glue = nodepool.glue + +local nodecodes = nodes.nodecodes +local skipcodes = nodes.skipcodes +local glyph_code = nodecodes.glyph +local glue_code = nodecodes.glue + +local space_skip_code = skipcodes["spaceskip"] + +local chardata = characters.data + +local typesetters = typesetters + +local characters = { } +typesetters.characters = characters + +local fonthashes = fonts.hashes +local fontparameters = fonthashes.parameters +local fontcharacters = fonthashes.characters +local fontquads = fonthashes.quads + +local a_character = attributes.private("characters") +local a_alignstate = attributes.private("alignstate") + +local c_zero = byte('0') +local c_period = byte('.') + +local function inject_quad_space(unicode,head,current,fraction) + local attr = current.attr + if fraction ~= 0 then + fraction = fraction * fontquads[current.font] + end + local glue = new_glue(fraction) +-- glue.attr = copy_node_list(attr) + glue.attr = attr + current.attr = nil + glue[a_character] = unicode + head, current = insert_node_after(head,current,glue) + return head, current +end + +local function inject_char_space(unicode,head,current,parent) + local attr = current.attr + local font = current.font + local char = fontcharacters[font][parent] + local glue = new_glue(char and char.width or fontparameters[font].space) + -- glue.attr = copy_node_list(current.attr) + glue.attr = current.attr + current.attr = nil + glue[a_character] = unicode + head, current = insert_node_after(head,current,glue) + return head, current +end + +local function inject_nobreak_space(unicode,head,current,space,spacestretch,spaceshrink) + local attr = current.attr + local glue = new_glue(space,spacestretch,spaceshrink) + local penalty = new_penalty(10000) + -- glue.attr = copy_node_list(attr) + glue.attr = attr + current.attr = nil + -- penalty.attr = attr + glue[a_character] = unicode + head, current = insert_node_after(head,current,penalty) + head, current = insert_node_after(head,current,glue) + return head, current +end + +local methods = { + + -- The next one uses an attribute assigned to the character but still we + -- don't have the 'local' value. + + [0x00A0] = function(head,current) -- nbsp + local para = fontparameters[current.font] + if current[a_alignstate] == 1 then -- flushright + head, current = inject_nobreak_space(0x00A0,head,current,para.space,0,0) + current.subtype = space_skip_code + else + head, current = inject_nobreak_space(0x00A0,head,current,para.space,para.spacestretch,para.spaceshrink) + end + return head, current + end, + + [0x2000] = function(head,current) -- enquad + return inject_quad_space(0x2000,head,current,1/2) + end, + + [0x2001] = function(head,current) -- emquad + return inject_quad_space(0x2001,head,current,1) + end, + + [0x2002] = function(head,current) -- enspace + return inject_quad_space(0x2002,head,current,1/2) + end, + + [0x2003] = function(head,current) -- emspace + return inject_quad_space(0x2003,head,current,1) + end, + + [0x2004] = function(head,current) -- threeperemspace + return inject_quad_space(0x2004,head,current,1/3) + end, + + [0x2005] = function(head,current) -- fourperemspace + return inject_quad_space(0x2005,head,current,1/4) + end, + + [0x2006] = function(head,current) -- sixperemspace + return inject_quad_space(0x2006,head,current,1/6) + end, + + [0x2007] = function(head,current) -- figurespace + return inject_char_space(0x2007,head,current,c_zero) + end, + + [0x2008] = function(head,current) -- punctuationspace + return inject_char_space(0x2008,head,current,c_period) + end, + + [0x2009] = function(head,current) -- breakablethinspace + return inject_quad_space(0x2009,head,current,1/8) -- same as next + end, + + [0x200A] = function(head,current) -- hairspace + return inject_quad_space(0x200A,head,current,1/8) -- same as previous (todo) + end, + + [0x200B] = function(head,current) -- zerowidthspace + return inject_quad_space(0x200B,head,current,0) + end, + + [0x202F] = function(head,current) -- narrownobreakspace + return inject_nobreak_space(0x202F,head,current,fontquads[current.font]/8) + end, + + [0x205F] = function(head,current) -- math thinspace + return inject_nobreak_space(0x205F,head,current,fontparameters[current.font].space/8) + end, + + -- [0xFEFF] = function(head,current) -- zerowidthnobreakspace + -- return head, current + -- end, + +} + +function characters.handler(head) + local current = head + local done = false + while current do + local id = current.id + if id == glyph_code then + local next = current.next + local char = current.char + local method = methods[char] + if method then + if trace_characters then + report_characters("replacing character %C, description %a",char,lower(chardata[char].description)) + end + head = method(head,current) + head = remove_node(head,current,true) + done = true + end + current = next + else + current = current.next + end + end + return head, done +end diff --git a/tex/context/base/spac-hor.lua b/tex/context/base/spac-hor.lua index 09920bd46..36802bfbb 100644 --- a/tex/context/base/spac-hor.lua +++ b/tex/context/base/spac-hor.lua @@ -1,31 +1,31 @@ -if not modules then modules = { } end modules ['spac-hor'] = { - version = 1.001, - comment = "companion to spac-hor.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local match = string.match -local utfbyte = utf.byte -local chardata = characters.data - -local can_have_space = table.tohash { - "lu", "ll", "lt", "lm", "lo", -- letters - -- "mn", "mc", "me", -- marks - "nd", "nl", "no", -- numbers - "ps", "pi", -- initial - -- "pe", "pf", -- final - -- "pc", "pd", "po", -- punctuation - "sm", "sc", "sk", "so", -- symbols - -- "zs", "zl", "zp", -- separators - -- "cc", "cf", "cs", "co", "cn", -- others -} - -function commands.autonextspace(str) -- todo: use nexttoken - local ch = match(str,"the letter (.)") or match(str,"the character (.)") - ch = ch and chardata[utfbyte(ch)] - if ch and can_have_space[ch.category] then - context.space() - end -end +if not modules then modules = { } end modules ['spac-hor'] = { + version = 1.001, + comment = "companion to spac-hor.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local match = string.match +local utfbyte = utf.byte +local chardata = characters.data + +local can_have_space = table.tohash { + "lu", "ll", "lt", "lm", "lo", -- letters + -- "mn", "mc", "me", -- marks + "nd", "nl", "no", -- numbers + "ps", "pi", -- initial + -- "pe", "pf", -- final + -- "pc", "pd", "po", -- punctuation + "sm", "sc", "sk", "so", -- symbols + -- "zs", "zl", "zp", -- separators + -- "cc", "cf", "cs", "co", "cn", -- others +} + +function commands.autonextspace(str) -- todo: use nexttoken + local ch = match(str,"the letter (.)") or match(str,"the character (.)") + ch = ch and chardata[utfbyte(ch)] + if ch and can_have_space[ch.category] then + context.space() + end +end diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua index 7d030ab1a..a042945f2 100644 --- a/tex/context/base/spac-ver.lua +++ b/tex/context/base/spac-ver.lua @@ -1,1358 +1,1358 @@ -if not modules then modules = { } end modules ['spac-ver'] = { - version = 1.001, - comment = "companion to spac-ver.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- we also need to call the spacer for inserts! - --- todo: directly set skips - --- this code dates from the beginning and is kind of experimental; it --- will be optimized and improved soon --- --- the collapser will be redone with user nodes; also, we might get make --- parskip into an attribute and appy it explicitly thereby getting rid --- of automated injections; eventually i want to get rid of the currently --- still needed tex -> lua -> tex > lua chain (needed because we can have --- expandable settings at the tex end - --- todo: strip baselineskip around display math - -local next, type, tonumber = next, type, tonumber -local gmatch, concat = string.gmatch, table.concat -local ceil, floor, max, min, round, abs = math.ceil, math.floor, math.max, math.min, math.round, math.abs -local texlists, texdimen, texbox = tex.lists, tex.dimen, tex.box -local lpegmatch = lpeg.match -local unpack = unpack or table.unpack -local allocate = utilities.storage.allocate -local todimen = string.todimen -local formatters = string.formatters - -local P, C, R, S, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc - -local nodes, node, trackers, attributes, context = nodes, node, trackers, attributes, context - -local variables = interfaces.variables - -local starttiming = statistics.starttiming -local stoptiming = statistics.stoptiming - --- vertical space handler - -local trace_vbox_vspacing = false trackers.register("vspacing.vbox", function(v) trace_vbox_vspacing = v end) -local trace_page_vspacing = false trackers.register("vspacing.page", function(v) trace_page_vspacing = v end) -local trace_page_builder = false trackers.register("builders.page", function(v) trace_page_builder = v end) -local trace_collect_vspacing = false trackers.register("vspacing.collect", function(v) trace_collect_vspacing = v end) -local trace_vspacing = false trackers.register("vspacing.spacing", function(v) trace_vspacing = v end) -local trace_vsnapping = false trackers.register("vspacing.snapping", function(v) trace_vsnapping = v end) -local trace_vpacking = false trackers.register("vspacing.packing", function(v) trace_vpacking = v end) - -local report_vspacing = logs.reporter("vspacing","spacing") -local report_collapser = logs.reporter("vspacing","collapsing") -local report_snapper = logs.reporter("vspacing","snapping") -local report_page_builder = logs.reporter("builders","page") - -local a_skipcategory = attributes.private('skipcategory') -local a_skippenalty = attributes.private('skippenalty') -local a_skiporder = attributes.private('skiporder') ------ snap_category = attributes.private('snapcategory') -local a_snapmethod = attributes.private('snapmethod') -local a_snapvbox = attributes.private('snapvbox') - -local find_node_tail = node.tail -local free_node = node.free -local free_node_list = node.flush_list -local copy_node = node.copy -local traverse_nodes = node.traverse -local traverse_nodes_id = node.traverse_id -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after -local remove_node = nodes.remove -local count_nodes = nodes.count -local nodeidstostring = nodes.idstostring -local hpack_node = node.hpack -local vpack_node = node.vpack -local writable_spec = nodes.writable_spec -local listtoutf = nodes.listtoutf - -local nodepool = nodes.pool - -local new_penalty = nodepool.penalty -local new_kern = nodepool.kern -local new_rule = nodepool.rule -local new_gluespec = nodepool.gluespec - -local nodecodes = nodes.nodecodes -local skipcodes = nodes.skipcodes -local fillcodes = nodes.fillcodes - -local penalty_code = nodecodes.penalty -local kern_code = nodecodes.kern -local glue_code = nodecodes.glue -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local whatsit_code = nodecodes.whatsit - -local userskip_code = skipcodes.userskip - -local vspacing = builders.vspacing or { } -builders.vspacing = vspacing - -local vspacingdata = vspacing.data or { } -vspacing.data = vspacingdata - -vspacingdata.snapmethods = vspacingdata.snapmethods or { } -local snapmethods = vspacingdata.snapmethods --maybe some older code can go - -storage.register("builders/vspacing/data/snapmethods", snapmethods, "builders.vspacing.data.snapmethods") - -local default = { - maxheight = true, - maxdepth = true, - strut = true, - hfraction = 1, - dfraction = 1, -} - -local fractions = { - minheight = "hfraction", maxheight = "hfraction", - mindepth = "dfraction", maxdepth = "dfraction", - top = "tlines", bottom = "blines", -} - -local values = { - offset = "offset" -} - -local colonsplitter = lpeg.splitat(":") - -local function listtohash(str) - local t = { } - for s in gmatch(str,"[^, ]+") do - local key, detail = lpegmatch(colonsplitter,s) - local v = variables[key] - if v then - t[v] = true - if detail then - local k = fractions[key] - if k then - detail = tonumber("0" .. detail) - if detail then - t[k] = detail - end - else - k = values[key] - if k then - detail = todimen(detail) - if detail then - t[k] = detail - end - end - end - end - else - detail = tonumber("0" .. key) - if detail then - t.hfraction, t.dfraction = detail, detail - end - end - end - if next(t) then - t.hfraction = t.hfraction or 1 - t.dfraction = t.dfraction or 1 - return t - else - return default - end -end - -function vspacing.definesnapmethod(name,method) - local n = #snapmethods + 1 - local t = listtohash(method) - snapmethods[n] = t - t.name, t.specification = name, method - context(n) -end - --- local rule_id = nodecodes.rule --- local vlist_id = nodecodes.vlist --- function nodes.makevtop(n) --- if n.id == vlist_id then --- local list = n.list --- local height = (list and list.id <= rule_id and list.height) or 0 --- n.depth = n.depth - height + n.height --- n.height = height --- end --- end - -local reference = nodes.reference - -local function validvbox(parentid,list) - if parentid == hlist_code then - local id = list.id - if id == whatsit_code then -- check for initial par subtype - list = list.next - if not next then - return nil - end - end - local done = nil - for n in traverse_nodes(list) do - local id = n.id - if id == vlist_code or id == hlist_code then - if done then - return nil - else - done = n - end - elseif id == glue_code or id == penalty_code then - -- go on - else - return nil -- whatever - end - end - if done then - local id = done.id - if id == hlist_code then - return validvbox(id,done.list) - end - end - return done -- only one vbox - end -end - -local function already_done(parentid,list,a_snapmethod) -- todo: done when only boxes and all snapped - -- problem: any snapped vbox ends up in a line - if list and parentid == hlist_code then - local id = list.id - if id == whatsit_code then -- check for initial par subtype - list = list.next - if not next then - return false - end - end ---~ local i = 0 - for n in traverse_nodes(list) do - local id = n.id ---~ i = i + 1 print(i,nodecodes[id],n[a_snapmethod]) - if id == hlist_code or id == vlist_code then - local a = n[a_snapmethod] - if not a then - -- return true -- not snapped at all - elseif a == 0 then - return true -- already snapped - end - elseif id == glue_code or id == penalty_code then -- whatsit is weak spot - -- go on - else - return false -- whatever - end - end - end - return false -end - - --- quite tricky: ceil(-something) => -0 - -local function ceiled(n) - if n < 0 or n < 0.01 then - return 0 - else - return ceil(n) - end -end - -local function floored(n) - if n < 0 or n < 0.01 then - return 0 - else - return floor(n) - end -end - --- check variables.none etc - -local function snap_hlist(where,current,method,height,depth) -- method.strut is default - local list = current.list - local t = trace_vsnapping and { } - if t then - t[#t+1] = formatters["list content: %s"](nodes.toutf(list)) - t[#t+1] = formatters["parent id: %s"](reference(current)) - t[#t+1] = formatters["snap method: %s"](method.name) - t[#t+1] = formatters["specification: %s"](method.specification) - end - local snapht, snapdp - if method["local"] then - -- snapping is done immediately here - snapht, snapdp = texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth - if t then - t[#t+1] = formatters["local: snapht %p snapdp %p"](snapht,snapdp) - end - elseif method["global"] then - snapht, snapdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth - if t then - t[#t+1] = formatters["global: snapht %p snapdp %p"](snapht,snapdp) - end - else - -- maybe autolocal - -- snapping might happen later in the otr - snapht, snapdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth - local lsnapht, lsnapdp = texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth - if snapht ~= lsnapht and snapdp ~= lsnapdp then - snapht, snapdp = lsnapht, lsnapdp - end - if t then - t[#t+1] = formatters["auto: snapht %p snapdp %p"](snapht,snapdp) - end - end - local h, d = height or current.height, depth or current.depth - local hr, dr, ch, cd = method.hfraction or 1, method.dfraction or 1, h, d - local tlines, blines = method.tlines or 1, method.blines or 1 - local done, plusht, plusdp = false, snapht, snapdp - local snaphtdp = snapht + snapdp - - if method.none then - plusht, plusdp = 0, 0 - if t then - t[#t+1] = "none: plusht 0pt plusdp 0pt" - end - end - if method.halfline then -- extra halfline - plusht, plusdp = plusht + snaphtdp/2, plusdp + snaphtdp/2 - if t then - t[#t+1] = formatters["halfline: plusht %p plusdp %p"](plusht,plusdp) - end - end - if method.line then -- extra line - plusht, plusdp = plusht + snaphtdp, plusdp + snaphtdp - if t then - t[#t+1] = formatters["line: plusht %p plusdp %p"](plusht,plusdp) - end - end - - if method.first then - local thebox = current - local id = thebox.id - if id == hlist_code then - thebox = validvbox(id,thebox.list) - id = thebox and thebox.id - end - if thebox and id == vlist_code then - local list, lh, ld = thebox.list - for n in traverse_nodes_id(hlist_code,list) do - lh, ld = n.height, n.depth - break - end - if lh then - local ht, dp = thebox.height, thebox.depth - if t then - t[#t+1] = formatters["first line: height %p depth %p"](lh,ld) - t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp) - end - local delta = h - lh - ch, cd = lh, delta + d - h, d = ch, cd - local shifted = hpack_node(current.list) - shifted.shift = delta - current.list = shifted - done = true - if t then - t[#t+1] = formatters["first: height %p depth %p shift %p"](ch,cd,delta) - end - elseif t then - t[#t+1] = "first: not done, no content" - end - elseif t then - t[#t+1] = "first: not done, no vbox" - end - elseif method.last then - local thebox = current - local id = thebox.id - if id == hlist_code then - thebox = validvbox(id,thebox.list) - id = thebox and thebox.id - end - if thebox and id == vlist_code then - local list, lh, ld = thebox.list - for n in traverse_nodes_id(hlist_code,list) do - lh, ld = n.height, n.depth - end - if lh then - local ht, dp = thebox.height, thebox.depth - if t then - t[#t+1] = formatters["last line: height %p depth %p" ](lh,ld) - t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp) - end - local delta = d - ld - cd, ch = ld, delta + h - h, d = ch, cd - local shifted = hpack_node(current.list) - shifted.shift = delta - current.list = shifted - done = true - if t then - t[#t+1] = formatters["last: height %p depth %p shift %p"](ch,cd,delta) - end - elseif t then - t[#t+1] = "last: not done, no content" - end - elseif t then - t[#t+1] = "last: not done, no vbox" - end - end - if method.minheight then - ch = floored((h-hr*snapht)/snaphtdp)*snaphtdp + plusht - if t then - t[#t+1] = formatters["minheight: %p"](ch) - end - elseif method.maxheight then - ch = ceiled((h-hr*snapht)/snaphtdp)*snaphtdp + plusht - if t then - t[#t+1] = formatters["maxheight: %p"](ch) - end - else - ch = plusht - if t then - t[#t+1] = formatters["set height: %p"](ch) - end - end - if method.mindepth then - cd = floored((d-dr*snapdp)/snaphtdp)*snaphtdp + plusdp - if t then - t[#t+1] = formatters["mindepth: %p"](cd) - end - elseif method.maxdepth then - cd = ceiled((d-dr*snapdp)/snaphtdp)*snaphtdp + plusdp - if t then - t[#t+1] = formatters["maxdepth: %p"](cd) - end - else - cd = plusdp - if t then - t[#t+1] = formatters["set depth: %p"](cd) - end - end - if method.top then - ch = ch + tlines * snaphtdp - if t then - t[#t+1] = formatters["top height: %p"](ch) - end - end - if method.bottom then - cd = cd + blines * snaphtdp - if t then - t[#t+1] = formatters["bottom depth: %p"](cd) - end - end - - local offset = method.offset - if offset then - -- we need to set the attr - if t then - t[#t+1] = formatters["before offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth) - end - local shifted = hpack_node(current.list) - shifted.shift = offset - current.list = shifted - if t then - t[#t+1] = formatters["after offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth) - end - shifted[a_snapmethod] = 0 - current[a_snapmethod] = 0 - end - if not height then - current.height = ch - if t then - t[#t+1] = formatters["forced height: %p"](ch) - end - end - if not depth then - current.depth = cd - if t then - t[#t+1] = formatters["forced depth: %p"](cd) - end - end - local lines = (ch+cd)/snaphtdp - if t then - local original = (h+d)/snaphtdp - local whatever = (ch+cd)/(texdimen.globalbodyfontstrutheight + texdimen.globalbodyfontstrutdepth) - t[#t+1] = formatters["final lines: %s -> %s (%s)"](original,lines,whatever) - t[#t+1] = formatters["final height: %p -> %p"](h,ch) - t[#t+1] = formatters["final depth: %p -> %p"](d,cd) - end - if t then - report_snapper("trace: %s type %s\n\t%\n\tt",where,nodecodes[current.id],t) - end - return h, d, ch, cd, lines -end - -local function snap_topskip(current,method) - local spec = current.spec - local w = spec.width - local wd = w - if spec.writable then - spec.width, wd = 0, 0 - end - return w, wd -end - -local categories = allocate { - [0] = 'discard', - [1] = 'largest', - [2] = 'force' , - [3] = 'penalty', - [4] = 'add' , - [5] = 'disable', - [6] = 'nowhite', - [7] = 'goback', - [8] = 'together' -} - -vspacing.categories = categories - -function vspacing.tocategories(str) - local t = { } - for s in gmatch(str,"[^, ]") do - local n = tonumber(s) - if n then - t[categories[n]] = true - else - t[b] = true - end - end - return t -end - -function vspacing.tocategory(str) - if type(str) == "string" then - return set.tonumber(vspacing.tocategories(str)) - else - return set.tonumber({ [categories[str]] = true }) - end -end - -vspacingdata.map = vspacingdata.map or { } -- allocate ? -vspacingdata.skip = vspacingdata.skip or { } -- allocate ? - -storage.register("builders/vspacing/data/map", vspacingdata.map, "builders.vspacing.data.map") -storage.register("builders/vspacing/data/skip", vspacingdata.skip, "builders.vspacing.data.skip") - -do -- todo: interface.variables - - vspacing.fixed = false - - local map = vspacingdata.map - local skip = vspacingdata.skip - - local multiplier = C(S("+-")^0 * R("09")^1) * P("*") - local category = P(":") * C(P(1)^1) - local keyword = C((1-category)^1) - local splitter = (multiplier + Cc(1)) * keyword * (category + Cc(false)) - - local k_fixed, k_flexible, k_category, k_penalty, k_order = variables.fixed, variables.flexible, "category", "penalty", "order" - - -- This will change: just node.write and we can store the values in skips which - -- then obeys grouping - - local fixedblankskip = context.fixedblankskip - local flexibleblankskip = context.flexibleblankskip - local setblankcategory = context.setblankcategory - local setblankorder = context.setblankorder - local setblankpenalty = context.setblankpenalty - local setblankhandling = context.setblankhandling - local flushblankhandling = context.flushblankhandling - local addpredefinedblankskip = context.addpredefinedblankskip - local addaskedblankskip = context.addaskedblankskip - - local function analyze(str,oldcategory) -- we could use shorter names - for s in gmatch(str,"([^ ,]+)") do - local amount, keyword, detail = lpegmatch(splitter,s) -- the comma splitter can be merged - if not keyword then - report_vspacing("unknown directive %a",s) - else - local mk = map[keyword] - if mk then - category = analyze(mk,category) - elseif keyword == k_fixed then - fixedblankskip() - elseif keyword == k_flexible then - flexibleblankskip() - elseif keyword == k_category then - local category = tonumber(detail) - if category then - setblankcategory(category) - if category ~= oldcategory then - flushblankhandling() - oldcategory = category - end - end - elseif keyword == k_order and detail then - local order = tonumber(detail) - if order then - setblankorder(order) - end - elseif keyword == k_penalty and detail then - local penalty = tonumber(detail) - if penalty then - setblankpenalty(penalty) - end - else - amount = tonumber(amount) or 1 - local sk = skip[keyword] - if sk then - addpredefinedblankskip(amount,keyword) - else -- no check - addaskedblankskip(amount,keyword) - end - end - end - end - return category - end - - local pushlogger = context.pushlogger - local startblankhandling = context.startblankhandling - local stopblankhandling = context.stopblankhandling - local poplogger = context.poplogger - - function vspacing.analyze(str) - if trace_vspacing then - pushlogger(report_vspacing) - startblankhandling() - analyze(str,1) - stopblankhandling() - poplogger() - else - startblankhandling() - analyze(str,1) - stopblankhandling() - end - end - - -- - - function vspacing.setmap(from,to) - map[from] = to - end - - function vspacing.setskip(key,value,grid) - if value ~= "" then - if grid == "" then grid = value end - skip[key] = { value, grid } - end - end - -end - --- implementation - -local trace_list, tracing_info, before, after = { }, false, "", "" - -local function nodes_to_string(head) - local current, t = head, { } - while current do - local id = current.id - local ty = nodecodes[id] - if id == penalty_code then - t[#t+1] = formatters["%s:%s"](ty,current.penalty) - elseif id == glue_code then -- or id == kern_code then -- to be tested - t[#t+1] = formatters["%s:%p"](ty,current) - elseif id == kern_code then - t[#t+1] = formatters["%s:%p"](ty,current.kern) - else - t[#t+1] = ty - end - current = current.next - end - return concat(t," + ") -end - -local function reset_tracing(head) - trace_list, tracing_info, before, after = { }, false, nodes_to_string(head), "" -end - -local function trace_skip(str,sc,so,sp,data) - trace_list[#trace_list+1] = { "skip", formatters["%s | %p | category %s | order %s | penalty %s"](str, data, sc or "-", so or "-", sp or "-") } - tracing_info = true -end - -local function trace_natural(str,data) - trace_list[#trace_list+1] = { "skip", formatters["%s | %p"](str, data) } - tracing_info = true -end - -local function trace_info(message, where, what) - trace_list[#trace_list+1] = { "info", formatters["%s: %s/%s"](message,where,what) } -end - -local function trace_node(what) - local nt = nodecodes[what.id] - local tl = trace_list[#trace_list] - if tl and tl[1] == "node" then - trace_list[#trace_list] = { "node", formatters["%s + %s"](tl[2],nt) } - else - trace_list[#trace_list+1] = { "node", nt } - end -end - -local function trace_done(str,data) - if data.id == penalty_code then - trace_list[#trace_list+1] = { "penalty", formatters["%s | %s"](str,data.penalty) } - else - trace_list[#trace_list+1] = { "glue", formatters["%s | %p"](str,data) } - end - tracing_info = true -end - -local function show_tracing(head) - if tracing_info then - after = nodes_to_string(head) - for i=1,#trace_list do - local tag, text = unpack(trace_list[i]) - if tag == "info" then - report_collapser(text) - else - report_collapser(" %s: %s",tag,text) - end - end - report_collapser("before: %s",before) - report_collapser("after : %s",after) - end -end - --- alignment box begin_of_par vmode_par hmode_par insert penalty before_display after_display - -local skipcodes = nodes.skipcodes - -local userskip_code = skipcodes.userskip -local lineskip_code = skipcodes.lineskip -local baselineskip_code = skipcodes.baselineskip -local parskip_code = skipcodes.parskip -local abovedisplayskip_code = skipcodes.abovedisplayskip -local belowdisplayskip_code = skipcodes.belowdisplayskip -local abovedisplayshortskip_code = skipcodes.abovedisplayshortskip -local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip -local topskip_code = skipcodes.topskip -local splittopskip_code = skipcodes.splittopskip - -local free_glue_node = free_node -local discard, largest, force, penalty, add, disable, nowhite, goback, together = 0, 1, 2, 3, 4, 5, 6, 7, 8 - --- local function free_glue_node(n) --- -- free_node(n.spec) --- print("before",n) --- logs.flush() --- free_node(n) --- print("after") --- logs.flush() --- end - -function vspacing.snapbox(n,how) - local sv = snapmethods[how] - if sv then - local box = texbox[n] - local list = box.list - if list then - local s = list[a_snapmethod] - if s == 0 then - if trace_vsnapping then - -- report_snapper("box list not snapped, already done") - end - else - local ht, dp = box.height, box.depth - if false then -- todo: already_done - -- assume that the box is already snapped - if trace_vsnapping then - report_snapper("box list already snapped at (%p,%p): %s", - ht,dp,listtoutf(list)) - end - else - local h, d, ch, cd, lines = snap_hlist("box",box,sv,ht,dp) - box.height, box.depth = ch, cd - if trace_vsnapping then - report_snapper("box list snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s", - h,d,ch,cd,sv.name,sv.specification,"direct",lines,listtoutf(list)) - end - box[a_snapmethod] = 0 -- - list[a_snapmethod] = 0 -- yes or no - end - end - end - end -end - -local function forced_skip(head,current,width,where,trace) - if where == "after" then - head, current = insert_node_after(head,current,new_rule(0,0,0)) - head, current = insert_node_after(head,current,new_kern(width)) - head, current = insert_node_after(head,current,new_rule(0,0,0)) - else - local c = current - head, current = insert_node_before(head,current,new_rule(0,0,0)) - head, current = insert_node_before(head,current,new_kern(width)) - head, current = insert_node_before(head,current,new_rule(0,0,0)) - current = c - end - if trace then - report_vspacing("inserting forced skip of %p",width) - end - return head, current -end - --- penalty only works well when before skip - -local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also pass tail - if trace then - reset_tracing(head) - end - local current, oldhead = head, head - local glue_order, glue_data, force_glue = 0, nil, false - local penalty_order, penalty_data, natural_penalty = 0, nil, nil - local parskip, ignore_parskip, ignore_following, ignore_whitespace, keep_together = nil, false, false, false, false - -- - -- todo: keep_together: between headers - -- - local function flush(why) - if penalty_data then - local p = new_penalty(penalty_data) - if trace then trace_done("flushed due to " .. why,p) end - head = insert_node_before(head,current,p) - end - if glue_data then - if force_glue then - if trace then trace_done("flushed due to " .. why,glue_data) end - head = forced_skip(head,current,glue_data.spec.width,"before",trace) - free_glue_node(glue_data) - elseif glue_data.spec.writable then - if trace then trace_done("flushed due to " .. why,glue_data) end - head = insert_node_before(head,current,glue_data) - else - free_glue_node(glue_data) - end - end - if trace then trace_node(current) end - glue_order, glue_data, force_glue = 0, nil, false - penalty_order, penalty_data, natural_penalty = 0, nil, nil - parskip, ignore_parskip, ignore_following, ignore_whitespace = nil, false, false, false - end - if trace_vsnapping then - report_snapper("global ht/dp = %p/%p, local ht/dp = %p/%p", - texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth, - texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth) - end - if trace then trace_info("start analyzing",where,what) end - while current do - local id = current.id - if id == hlist_code or id == vlist_code then - -- needs checking, why so many calls - if snap then - local list = current.list - local s = current[a_snapmethod] - if not s then - -- if trace_vsnapping then - -- report_snapper("mvl list not snapped") - -- end - elseif s == 0 then - if trace_vsnapping then - report_snapper("mvl %a not snapped, already done: %s",nodecodes[id],listtoutf(list)) - end - else - local sv = snapmethods[s] - if sv then - -- check if already snapped - if list and already_done(id,list,a_snapmethod) then - local ht, dp = current.height, current.depth - -- assume that the box is already snapped - if trace_vsnapping then - report_snapper("mvl list already snapped at (%p,%p): %s",ht,dp,listtoutf(list)) - end - else - local h, d, ch, cd, lines = snap_hlist("mvl",current,sv) - if trace_vsnapping then - report_snapper("mvl %a snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s", - nodecodes[id],h,d,ch,cd,sv.name,sv.specification,where,lines,listtoutf(list)) - end - end - elseif trace_vsnapping then - report_snapper("mvl %a not snapped due to unknown snap specification: %s",nodecodes[id],listtoutf(list)) - end - current[a_snapmethod] = 0 - end - else - -- - end - -- tex.prevdepth = 0 - flush("list") - current = current.next - elseif id == penalty_code then - -- natural_penalty = current.penalty - -- if trace then trace_done("removed penalty",current) end - -- head, current = remove_node(head, current, true) - current = current.next - elseif id == kern_code then - if snap and trace_vsnapping and current.kern ~= 0 then - report_snapper("kern of %p kept",current.kern) - end - flush("kern") - current = current.next - elseif id == glue_code then - local subtype = current.subtype - if subtype == userskip_code then - local sc = current[a_skipcategory] -- has no default, no unset (yet) - local so = current[a_skiporder] or 1 -- has 1 default, no unset (yet) - local sp = current[a_skippenalty] -- has no default, no unset (yet) - if sp and sc == penalty then - if not penalty_data then - penalty_data = sp - elseif penalty_order < so then - penalty_order, penalty_data = so, sp - elseif penalty_order == so and sp > penalty_data then - penalty_data = sp - end - if trace then trace_skip("penalty in skip",sc,so,sp,current) end - head, current = remove_node(head, current, true) - elseif not sc then -- if not sc then - if glue_data then - if trace then trace_done("flush",glue_data) end - head = insert_node_before(head,current,glue_data) - if trace then trace_natural("natural",current) end - current = current.next - else - -- not look back across head - local previous = current.prev - if previous and previous.id == glue_code and previous.subtype == userskip_code then - local ps = previous.spec - if ps.writable then - local cs = current.spec - if cs.writable and ps.stretch_order == 0 and ps.shrink_order == 0 and cs.stretch_order == 0 and cs.shrink_order == 0 then - local pw, pp, pm = ps.width, ps.stretch, ps.shrink - local cw, cp, cm = cs.width, cs.stretch, cs.shrink - -- ps = writable_spec(previous) -- no writable needed here - -- ps.width, ps.stretch, ps.shrink = pw + cw, pp + cp, pm + cm - previous.spec = new_gluespec(pw + cw, pp + cp, pm + cm) -- else topskip can disappear - if trace then trace_natural("removed",current) end - head, current = remove_node(head, current, true) - -- current = previous - if trace then trace_natural("collapsed",previous) end - -- current = current.next - else - if trace then trace_natural("filler",current) end - current = current.next - end - else - if trace then trace_natural("natural (no prev spec)",current) end - current = current.next - end - else - if trace then trace_natural("natural (no prev)",current) end - current = current.next - end - end - glue_order, glue_data = 0, nil - elseif sc == disable then - ignore_following = true - if trace then trace_skip("disable",sc,so,sp,current) end - head, current = remove_node(head, current, true) - elseif sc == together then - keep_together = true - if trace then trace_skip("together",sc,so,sp,current) end - head, current = remove_node(head, current, true) - elseif sc == nowhite then - ignore_whitespace = true - head, current = remove_node(head, current, true) - elseif sc == discard then - if trace then trace_skip("discard",sc,so,sp,current) end - head, current = remove_node(head, current, true) - elseif ignore_following then - if trace then trace_skip("disabled",sc,so,sp,current) end - head, current = remove_node(head, current, true) - elseif not glue_data then - if trace then trace_skip("assign",sc,so,sp,current) end - glue_order = so - head, current, glue_data = remove_node(head, current) - elseif glue_order < so then - if trace then trace_skip("force",sc,so,sp,current) end - glue_order = so - free_glue_node(glue_data) - head, current, glue_data = remove_node(head, current) - elseif glue_order == so then - -- is now exclusive, maybe support goback as combi, else why a set - if sc == largest then - local cs, gs = current.spec, glue_data.spec - local cw, gw = cs.width, gs.width - if cw > gw then - if trace then trace_skip("largest",sc,so,sp,current) end - free_glue_node(glue_data) -- also free spec - head, current, glue_data = remove_node(head, current) - else - if trace then trace_skip("remove smallest",sc,so,sp,current) end - head, current = remove_node(head, current, true) - end - elseif sc == goback then - if trace then trace_skip("goback",sc,so,sp,current) end - free_glue_node(glue_data) -- also free spec - head, current, glue_data = remove_node(head, current) - elseif sc == force then - -- last one counts, some day we can provide an accumulator and largest etc - -- but not now - if trace then trace_skip("force",sc,so,sp,current) end - free_glue_node(glue_data) -- also free spec - head, current, glue_data = remove_node(head, current) - elseif sc == penalty then - if trace then trace_skip("penalty",sc,so,sp,current) end - free_glue_node(glue_data) -- also free spec - glue_data = nil - head, current = remove_node(head, current, true) - elseif sc == add then - if trace then trace_skip("add",sc,so,sp,current) end - -- local old, new = glue_data.spec, current.spec - local old, new = writable_spec(glue_data), current.spec - old.width = old.width + new.width - old.stretch = old.stretch + new.stretch - old.shrink = old.shrink + new.shrink - -- toto: order - head, current = remove_node(head, current, true) - else - if trace then trace_skip("unknown",sc,so,sp,current) end - head, current = remove_node(head, current, true) - end - else - if trace then trace_skip("unknown",sc,so,sp,current) end - head, current = remove_node(head, current, true) - end - if sc == force then - force_glue = true - end - elseif subtype == lineskip_code then - if snap then - local s = current[a_snapmethod] - if s and s ~= 0 then - current[a_snapmethod] = 0 - if current.spec.writable then - local spec = writable_spec(current) - spec.width = 0 - if trace_vsnapping then - report_snapper("lineskip set to zero") - end - end - else - if trace then trace_skip("lineskip",sc,so,sp,current) end - flush("lineskip") - end - else - if trace then trace_skip("lineskip",sc,so,sp,current) end - flush("lineskip") - end - current = current.next - elseif subtype == baselineskip_code then - if snap then - local s = current[a_snapmethod] - if s and s ~= 0 then - current[a_snapmethod] = 0 - if current.spec.writable then - local spec = writable_spec(current) - spec.width = 0 - if trace_vsnapping then - report_snapper("baselineskip set to zero") - end - end - else - if trace then trace_skip("baselineskip",sc,so,sp,current) end - flush("baselineskip") - end - else - if trace then trace_skip("baselineskip",sc,so,sp,current) end - flush("baselineskip") - end - current = current.next - elseif subtype == parskip_code then - -- parskip always comes later - if ignore_whitespace then - if trace then trace_natural("ignored parskip",current) end - head, current = remove_node(head, current, true) - elseif glue_data then - local ps, gs = current.spec, glue_data.spec - if ps.writable and gs.writable and ps.width > gs.width then - glue_data.spec = copy_node(ps) - if trace then trace_natural("taking parskip",current) end - else - if trace then trace_natural("removed parskip",current) end - end - head, current = remove_node(head, current, true) - else - if trace then trace_natural("honored parskip",current) end - head, current, glue_data = remove_node(head, current) - end - elseif subtype == topskip_code or subtype == splittopskip_code then - if snap then - local s = current[a_snapmethod] - if s and s ~= 0 then - current[a_snapmethod] = 0 - local sv = snapmethods[s] - local w, cw = snap_topskip(current,sv) - if trace_vsnapping then - report_snapper("topskip snapped from %p to %p for %a",w,cw,where) - end - else - if trace then trace_skip("topskip",sc,so,sp,current) end - flush("topskip") - end - else - if trace then trace_skip("topskip",sc,so,sp,current) end - flush("topskip") - end - current = current.next - elseif subtype == abovedisplayskip_code then - -- - if trace then trace_skip("above display skip (normal)",sc,so,sp,current) end - flush("above display skip (normal)") - current = current.next - -- - elseif subtype == belowdisplayskip_code then - -- - if trace then trace_skip("below display skip (normal)",sc,so,sp,current) end - flush("below display skip (normal)") - current = current.next - -- - elseif subtype == abovedisplayshortskip_code then - -- - if trace then trace_skip("above display skip (short)",sc,so,sp,current) end - flush("above display skip (short)") - current = current.next - -- - elseif subtype == belowdisplayshortskip_code then - -- - if trace then trace_skip("below display skip (short)",sc,so,sp,current) end - flush("below display skip (short)") - current = current.next - -- - else -- other glue - if snap and trace_vsnapping and current.spec.writable and current.spec.width ~= 0 then - report_snapper("glue %p of type %a kept",current.spec.width,skipcodes[subtype]) - --~ current.spec.width = 0 - end - if trace then trace_skip(formatted["glue of type %a"](subtype),sc,so,sp,current) end - flush("some glue") - current = current.next - end - else - flush("something else") - current = current.next - end - end - if trace then trace_info("stop analyzing",where,what) end - -- if natural_penalty and (not penalty_data or natural_penalty > penalty_data) then - -- penalty_data = natural_penalty - -- end - if trace and (glue_data or penalty_data) then - trace_info("start flushing",where,what) - end - local tail - if penalty_data then - tail = find_node_tail(head) - local p = new_penalty(penalty_data) - if trace then trace_done("result",p) end - head, tail = insert_node_after(head,tail,p) - end - if glue_data then - if not tail then tail = find_node_tail(head) end - if trace then trace_done("result",glue_data) end - if force_glue then - head, tail = forced_skip(head,tail,glue_data.spec.width,"after",trace) - free_glue_node(glue_data) - else - head, tail = insert_node_after(head,tail,glue_data) - end - end - if trace then - if glue_data or penalty_data then - trace_info("stop flushing",where,what) - end - show_tracing(head) - if oldhead ~= head then - trace_info("head has been changed from %a to %a",nodecodes[oldhead.id],nodecodes[head.id]) - end - end - return head, true -end - --- alignment after_output end box new_graf vmode_par hmode_par insert penalty before_display after_display --- \par -> vmode_par --- --- status.best_page_break --- tex.lists.best_page_break --- tex.lists.best_size (natural size to best_page_break) --- tex.lists.least_page_cost (badness of best_page_break) --- tex.lists.page_head --- tex.lists.contrib_head - -local stackhead, stacktail, stackhack = nil, nil, false - -local function report(message,lst) - report_vspacing(message,count_nodes(lst,true),nodeidstostring(lst)) -end - -function vspacing.pagehandler(newhead,where) - -- local newhead = texlists.contrib_head - if newhead then - local newtail = find_node_tail(newhead) -- best pass that tail, known anyway - local flush = false - stackhack = true -- todo: only when grid snapping once enabled - for n in traverse_nodes(newhead) do -- we could just look for glue nodes - local id = n.id - if id ~= glue_code then - flush = true - elseif n.subtype == userskip_code then - if n[a_skipcategory] then - stackhack = true - else - flush = true - end - else - -- tricky - end - end - if flush then - if stackhead then - if trace_collect_vspacing then report("appending %s nodes to stack (final): %s",newhead) end - stacktail.next = newhead - newhead.prev = stacktail - newhead = stackhead - stackhead, stacktail = nil, nil - end - if stackhack then - stackhack = false - if trace_collect_vspacing then report("processing %s nodes: %s",newhead) end ---~ texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod) - newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod) - else - if trace_collect_vspacing then report("flushing %s nodes: %s",newhead) end ---~ texlists.contrib_head = newhead - end - else - if stackhead then - if trace_collect_vspacing then report("appending %s nodes to stack (intermediate): %s",newhead) end - stacktail.next = newhead - newhead.prev = stacktail - else - if trace_collect_vspacing then report("storing %s nodes in stack (initial): %s",newhead) end - stackhead = newhead - end - stacktail = newtail - -- texlists.contrib_head = nil - newhead = nil - end - end - return newhead -end - -local ignore = table.tohash { - "split_keep", - "split_off", - -- "vbox", -} - -function vspacing.vboxhandler(head,where) - if head and not ignore[where] and head.next then - -- starttiming(vspacing) - head = collapser(head,"vbox",where,trace_vbox_vspacing,true,a_snapvbox) -- todo: local snapper - -- stoptiming(vspacing) - end - return head -end - -function vspacing.collapsevbox(n) -- for boxes but using global a_snapmethod - local list = texbox[n].list - if list then - -- starttiming(vspacing) - texbox[n].list = vpack_node(collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod)) - -- stoptiming(vspacing) - end -end - --- We will split this module so a few locals are repeated. Also this will be --- rewritten. - -nodes.builders = nodes.builder or { } -local builders = nodes.builders - -local actions = nodes.tasks.actions("vboxbuilders") - -function builders.vpack_filter(head,groupcode,size,packtype,maxdepth,direction) - local done = false - if head then - starttiming(builders) - if trace_vpacking then - local before = nodes.count(head) - head, done = actions(head,groupcode,size,packtype,maxdepth,direction) - local after = nodes.count(head) - if done then - nodes.processors.tracer("vpack","changed",head,groupcode,before,after,true) - else - nodes.processors.tracer("vpack","unchanged",head,groupcode,before,after,true) - end - else - head, done = actions(head,groupcode) - end - stoptiming(builders) - end - return head, done -end - --- This one is special in the sense that it has no head and we operate on the mlv. Also, --- we need to do the vspacing last as it removes items from the mvl. - -local actions = nodes.tasks.actions("mvlbuilders") - -local function report(groupcode,head) - report_page_builder("trigger: %s",groupcode) - report_page_builder(" vsize : %p",tex.vsize) - report_page_builder(" pagegoal : %p",tex.pagegoal) - report_page_builder(" pagetotal: %p",tex.pagetotal) - report_page_builder(" list : %s",head and nodeidstostring(head) or "") -end - -function builders.buildpage_filter(groupcode) - local head, done = texlists.contrib_head, false - -- if head and head.next and head.next.id == hlist_code and head.next.width == 1 then - -- report_page_builder("trigger otr calculations") - -- free_node_list(head) - -- head = nil - -- end - if head then - starttiming(builders) - if trace_page_builder then - report(groupcode,head) - end - head, done = actions(head,groupcode) - stoptiming(builders) - -- -- doesn't work here (not passed on?) - -- tex.pagegoal = tex.vsize - tex.dimen.d_page_floats_inserted_top - tex.dimen.d_page_floats_inserted_bottom - texlists.contrib_head = head - return done and head or true - else - if trace_page_builder then - report(groupcode) - end - return nil, false - end -end - -callbacks.register('vpack_filter', builders.vpack_filter, "vertical spacing etc") -callbacks.register('buildpage_filter', builders.buildpage_filter, "vertical spacing etc (mvl)") - -statistics.register("v-node processing time", function() - return statistics.elapsedseconds(builders) -end) - --- interface - -commands.vspacing = vspacing.analyze -commands.vspacingsetamount = vspacing.setskip -commands.vspacingdefine = vspacing.setmap -commands.vspacingcollapse = vspacing.collapsevbox -commands.vspacingsnap = vspacing.snapbox +if not modules then modules = { } end modules ['spac-ver'] = { + version = 1.001, + comment = "companion to spac-ver.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- we also need to call the spacer for inserts! + +-- todo: directly set skips + +-- this code dates from the beginning and is kind of experimental; it +-- will be optimized and improved soon +-- +-- the collapser will be redone with user nodes; also, we might get make +-- parskip into an attribute and appy it explicitly thereby getting rid +-- of automated injections; eventually i want to get rid of the currently +-- still needed tex -> lua -> tex > lua chain (needed because we can have +-- expandable settings at the tex end + +-- todo: strip baselineskip around display math + +local next, type, tonumber = next, type, tonumber +local gmatch, concat = string.gmatch, table.concat +local ceil, floor, max, min, round, abs = math.ceil, math.floor, math.max, math.min, math.round, math.abs +local texlists, texdimen, texbox = tex.lists, tex.dimen, tex.box +local lpegmatch = lpeg.match +local unpack = unpack or table.unpack +local allocate = utilities.storage.allocate +local todimen = string.todimen +local formatters = string.formatters + +local P, C, R, S, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc + +local nodes, node, trackers, attributes, context = nodes, node, trackers, attributes, context + +local variables = interfaces.variables + +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming + +-- vertical space handler + +local trace_vbox_vspacing = false trackers.register("vspacing.vbox", function(v) trace_vbox_vspacing = v end) +local trace_page_vspacing = false trackers.register("vspacing.page", function(v) trace_page_vspacing = v end) +local trace_page_builder = false trackers.register("builders.page", function(v) trace_page_builder = v end) +local trace_collect_vspacing = false trackers.register("vspacing.collect", function(v) trace_collect_vspacing = v end) +local trace_vspacing = false trackers.register("vspacing.spacing", function(v) trace_vspacing = v end) +local trace_vsnapping = false trackers.register("vspacing.snapping", function(v) trace_vsnapping = v end) +local trace_vpacking = false trackers.register("vspacing.packing", function(v) trace_vpacking = v end) + +local report_vspacing = logs.reporter("vspacing","spacing") +local report_collapser = logs.reporter("vspacing","collapsing") +local report_snapper = logs.reporter("vspacing","snapping") +local report_page_builder = logs.reporter("builders","page") + +local a_skipcategory = attributes.private('skipcategory') +local a_skippenalty = attributes.private('skippenalty') +local a_skiporder = attributes.private('skiporder') +----- snap_category = attributes.private('snapcategory') +local a_snapmethod = attributes.private('snapmethod') +local a_snapvbox = attributes.private('snapvbox') + +local find_node_tail = node.tail +local free_node = node.free +local free_node_list = node.flush_list +local copy_node = node.copy +local traverse_nodes = node.traverse +local traverse_nodes_id = node.traverse_id +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after +local remove_node = nodes.remove +local count_nodes = nodes.count +local nodeidstostring = nodes.idstostring +local hpack_node = node.hpack +local vpack_node = node.vpack +local writable_spec = nodes.writable_spec +local listtoutf = nodes.listtoutf + +local nodepool = nodes.pool + +local new_penalty = nodepool.penalty +local new_kern = nodepool.kern +local new_rule = nodepool.rule +local new_gluespec = nodepool.gluespec + +local nodecodes = nodes.nodecodes +local skipcodes = nodes.skipcodes +local fillcodes = nodes.fillcodes + +local penalty_code = nodecodes.penalty +local kern_code = nodecodes.kern +local glue_code = nodecodes.glue +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local whatsit_code = nodecodes.whatsit + +local userskip_code = skipcodes.userskip + +local vspacing = builders.vspacing or { } +builders.vspacing = vspacing + +local vspacingdata = vspacing.data or { } +vspacing.data = vspacingdata + +vspacingdata.snapmethods = vspacingdata.snapmethods or { } +local snapmethods = vspacingdata.snapmethods --maybe some older code can go + +storage.register("builders/vspacing/data/snapmethods", snapmethods, "builders.vspacing.data.snapmethods") + +local default = { + maxheight = true, + maxdepth = true, + strut = true, + hfraction = 1, + dfraction = 1, +} + +local fractions = { + minheight = "hfraction", maxheight = "hfraction", + mindepth = "dfraction", maxdepth = "dfraction", + top = "tlines", bottom = "blines", +} + +local values = { + offset = "offset" +} + +local colonsplitter = lpeg.splitat(":") + +local function listtohash(str) + local t = { } + for s in gmatch(str,"[^, ]+") do + local key, detail = lpegmatch(colonsplitter,s) + local v = variables[key] + if v then + t[v] = true + if detail then + local k = fractions[key] + if k then + detail = tonumber("0" .. detail) + if detail then + t[k] = detail + end + else + k = values[key] + if k then + detail = todimen(detail) + if detail then + t[k] = detail + end + end + end + end + else + detail = tonumber("0" .. key) + if detail then + t.hfraction, t.dfraction = detail, detail + end + end + end + if next(t) then + t.hfraction = t.hfraction or 1 + t.dfraction = t.dfraction or 1 + return t + else + return default + end +end + +function vspacing.definesnapmethod(name,method) + local n = #snapmethods + 1 + local t = listtohash(method) + snapmethods[n] = t + t.name, t.specification = name, method + context(n) +end + +-- local rule_id = nodecodes.rule +-- local vlist_id = nodecodes.vlist +-- function nodes.makevtop(n) +-- if n.id == vlist_id then +-- local list = n.list +-- local height = (list and list.id <= rule_id and list.height) or 0 +-- n.depth = n.depth - height + n.height +-- n.height = height +-- end +-- end + +local reference = nodes.reference + +local function validvbox(parentid,list) + if parentid == hlist_code then + local id = list.id + if id == whatsit_code then -- check for initial par subtype + list = list.next + if not next then + return nil + end + end + local done = nil + for n in traverse_nodes(list) do + local id = n.id + if id == vlist_code or id == hlist_code then + if done then + return nil + else + done = n + end + elseif id == glue_code or id == penalty_code then + -- go on + else + return nil -- whatever + end + end + if done then + local id = done.id + if id == hlist_code then + return validvbox(id,done.list) + end + end + return done -- only one vbox + end +end + +local function already_done(parentid,list,a_snapmethod) -- todo: done when only boxes and all snapped + -- problem: any snapped vbox ends up in a line + if list and parentid == hlist_code then + local id = list.id + if id == whatsit_code then -- check for initial par subtype + list = list.next + if not next then + return false + end + end +--~ local i = 0 + for n in traverse_nodes(list) do + local id = n.id +--~ i = i + 1 print(i,nodecodes[id],n[a_snapmethod]) + if id == hlist_code or id == vlist_code then + local a = n[a_snapmethod] + if not a then + -- return true -- not snapped at all + elseif a == 0 then + return true -- already snapped + end + elseif id == glue_code or id == penalty_code then -- whatsit is weak spot + -- go on + else + return false -- whatever + end + end + end + return false +end + + +-- quite tricky: ceil(-something) => -0 + +local function ceiled(n) + if n < 0 or n < 0.01 then + return 0 + else + return ceil(n) + end +end + +local function floored(n) + if n < 0 or n < 0.01 then + return 0 + else + return floor(n) + end +end + +-- check variables.none etc + +local function snap_hlist(where,current,method,height,depth) -- method.strut is default + local list = current.list + local t = trace_vsnapping and { } + if t then + t[#t+1] = formatters["list content: %s"](nodes.toutf(list)) + t[#t+1] = formatters["parent id: %s"](reference(current)) + t[#t+1] = formatters["snap method: %s"](method.name) + t[#t+1] = formatters["specification: %s"](method.specification) + end + local snapht, snapdp + if method["local"] then + -- snapping is done immediately here + snapht, snapdp = texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth + if t then + t[#t+1] = formatters["local: snapht %p snapdp %p"](snapht,snapdp) + end + elseif method["global"] then + snapht, snapdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth + if t then + t[#t+1] = formatters["global: snapht %p snapdp %p"](snapht,snapdp) + end + else + -- maybe autolocal + -- snapping might happen later in the otr + snapht, snapdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth + local lsnapht, lsnapdp = texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth + if snapht ~= lsnapht and snapdp ~= lsnapdp then + snapht, snapdp = lsnapht, lsnapdp + end + if t then + t[#t+1] = formatters["auto: snapht %p snapdp %p"](snapht,snapdp) + end + end + local h, d = height or current.height, depth or current.depth + local hr, dr, ch, cd = method.hfraction or 1, method.dfraction or 1, h, d + local tlines, blines = method.tlines or 1, method.blines or 1 + local done, plusht, plusdp = false, snapht, snapdp + local snaphtdp = snapht + snapdp + + if method.none then + plusht, plusdp = 0, 0 + if t then + t[#t+1] = "none: plusht 0pt plusdp 0pt" + end + end + if method.halfline then -- extra halfline + plusht, plusdp = plusht + snaphtdp/2, plusdp + snaphtdp/2 + if t then + t[#t+1] = formatters["halfline: plusht %p plusdp %p"](plusht,plusdp) + end + end + if method.line then -- extra line + plusht, plusdp = plusht + snaphtdp, plusdp + snaphtdp + if t then + t[#t+1] = formatters["line: plusht %p plusdp %p"](plusht,plusdp) + end + end + + if method.first then + local thebox = current + local id = thebox.id + if id == hlist_code then + thebox = validvbox(id,thebox.list) + id = thebox and thebox.id + end + if thebox and id == vlist_code then + local list, lh, ld = thebox.list + for n in traverse_nodes_id(hlist_code,list) do + lh, ld = n.height, n.depth + break + end + if lh then + local ht, dp = thebox.height, thebox.depth + if t then + t[#t+1] = formatters["first line: height %p depth %p"](lh,ld) + t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp) + end + local delta = h - lh + ch, cd = lh, delta + d + h, d = ch, cd + local shifted = hpack_node(current.list) + shifted.shift = delta + current.list = shifted + done = true + if t then + t[#t+1] = formatters["first: height %p depth %p shift %p"](ch,cd,delta) + end + elseif t then + t[#t+1] = "first: not done, no content" + end + elseif t then + t[#t+1] = "first: not done, no vbox" + end + elseif method.last then + local thebox = current + local id = thebox.id + if id == hlist_code then + thebox = validvbox(id,thebox.list) + id = thebox and thebox.id + end + if thebox and id == vlist_code then + local list, lh, ld = thebox.list + for n in traverse_nodes_id(hlist_code,list) do + lh, ld = n.height, n.depth + end + if lh then + local ht, dp = thebox.height, thebox.depth + if t then + t[#t+1] = formatters["last line: height %p depth %p" ](lh,ld) + t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp) + end + local delta = d - ld + cd, ch = ld, delta + h + h, d = ch, cd + local shifted = hpack_node(current.list) + shifted.shift = delta + current.list = shifted + done = true + if t then + t[#t+1] = formatters["last: height %p depth %p shift %p"](ch,cd,delta) + end + elseif t then + t[#t+1] = "last: not done, no content" + end + elseif t then + t[#t+1] = "last: not done, no vbox" + end + end + if method.minheight then + ch = floored((h-hr*snapht)/snaphtdp)*snaphtdp + plusht + if t then + t[#t+1] = formatters["minheight: %p"](ch) + end + elseif method.maxheight then + ch = ceiled((h-hr*snapht)/snaphtdp)*snaphtdp + plusht + if t then + t[#t+1] = formatters["maxheight: %p"](ch) + end + else + ch = plusht + if t then + t[#t+1] = formatters["set height: %p"](ch) + end + end + if method.mindepth then + cd = floored((d-dr*snapdp)/snaphtdp)*snaphtdp + plusdp + if t then + t[#t+1] = formatters["mindepth: %p"](cd) + end + elseif method.maxdepth then + cd = ceiled((d-dr*snapdp)/snaphtdp)*snaphtdp + plusdp + if t then + t[#t+1] = formatters["maxdepth: %p"](cd) + end + else + cd = plusdp + if t then + t[#t+1] = formatters["set depth: %p"](cd) + end + end + if method.top then + ch = ch + tlines * snaphtdp + if t then + t[#t+1] = formatters["top height: %p"](ch) + end + end + if method.bottom then + cd = cd + blines * snaphtdp + if t then + t[#t+1] = formatters["bottom depth: %p"](cd) + end + end + + local offset = method.offset + if offset then + -- we need to set the attr + if t then + t[#t+1] = formatters["before offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth) + end + local shifted = hpack_node(current.list) + shifted.shift = offset + current.list = shifted + if t then + t[#t+1] = formatters["after offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth) + end + shifted[a_snapmethod] = 0 + current[a_snapmethod] = 0 + end + if not height then + current.height = ch + if t then + t[#t+1] = formatters["forced height: %p"](ch) + end + end + if not depth then + current.depth = cd + if t then + t[#t+1] = formatters["forced depth: %p"](cd) + end + end + local lines = (ch+cd)/snaphtdp + if t then + local original = (h+d)/snaphtdp + local whatever = (ch+cd)/(texdimen.globalbodyfontstrutheight + texdimen.globalbodyfontstrutdepth) + t[#t+1] = formatters["final lines: %s -> %s (%s)"](original,lines,whatever) + t[#t+1] = formatters["final height: %p -> %p"](h,ch) + t[#t+1] = formatters["final depth: %p -> %p"](d,cd) + end + if t then + report_snapper("trace: %s type %s\n\t%\n\tt",where,nodecodes[current.id],t) + end + return h, d, ch, cd, lines +end + +local function snap_topskip(current,method) + local spec = current.spec + local w = spec.width + local wd = w + if spec.writable then + spec.width, wd = 0, 0 + end + return w, wd +end + +local categories = allocate { + [0] = 'discard', + [1] = 'largest', + [2] = 'force' , + [3] = 'penalty', + [4] = 'add' , + [5] = 'disable', + [6] = 'nowhite', + [7] = 'goback', + [8] = 'together' +} + +vspacing.categories = categories + +function vspacing.tocategories(str) + local t = { } + for s in gmatch(str,"[^, ]") do + local n = tonumber(s) + if n then + t[categories[n]] = true + else + t[b] = true + end + end + return t +end + +function vspacing.tocategory(str) + if type(str) == "string" then + return set.tonumber(vspacing.tocategories(str)) + else + return set.tonumber({ [categories[str]] = true }) + end +end + +vspacingdata.map = vspacingdata.map or { } -- allocate ? +vspacingdata.skip = vspacingdata.skip or { } -- allocate ? + +storage.register("builders/vspacing/data/map", vspacingdata.map, "builders.vspacing.data.map") +storage.register("builders/vspacing/data/skip", vspacingdata.skip, "builders.vspacing.data.skip") + +do -- todo: interface.variables + + vspacing.fixed = false + + local map = vspacingdata.map + local skip = vspacingdata.skip + + local multiplier = C(S("+-")^0 * R("09")^1) * P("*") + local category = P(":") * C(P(1)^1) + local keyword = C((1-category)^1) + local splitter = (multiplier + Cc(1)) * keyword * (category + Cc(false)) + + local k_fixed, k_flexible, k_category, k_penalty, k_order = variables.fixed, variables.flexible, "category", "penalty", "order" + + -- This will change: just node.write and we can store the values in skips which + -- then obeys grouping + + local fixedblankskip = context.fixedblankskip + local flexibleblankskip = context.flexibleblankskip + local setblankcategory = context.setblankcategory + local setblankorder = context.setblankorder + local setblankpenalty = context.setblankpenalty + local setblankhandling = context.setblankhandling + local flushblankhandling = context.flushblankhandling + local addpredefinedblankskip = context.addpredefinedblankskip + local addaskedblankskip = context.addaskedblankskip + + local function analyze(str,oldcategory) -- we could use shorter names + for s in gmatch(str,"([^ ,]+)") do + local amount, keyword, detail = lpegmatch(splitter,s) -- the comma splitter can be merged + if not keyword then + report_vspacing("unknown directive %a",s) + else + local mk = map[keyword] + if mk then + category = analyze(mk,category) + elseif keyword == k_fixed then + fixedblankskip() + elseif keyword == k_flexible then + flexibleblankskip() + elseif keyword == k_category then + local category = tonumber(detail) + if category then + setblankcategory(category) + if category ~= oldcategory then + flushblankhandling() + oldcategory = category + end + end + elseif keyword == k_order and detail then + local order = tonumber(detail) + if order then + setblankorder(order) + end + elseif keyword == k_penalty and detail then + local penalty = tonumber(detail) + if penalty then + setblankpenalty(penalty) + end + else + amount = tonumber(amount) or 1 + local sk = skip[keyword] + if sk then + addpredefinedblankskip(amount,keyword) + else -- no check + addaskedblankskip(amount,keyword) + end + end + end + end + return category + end + + local pushlogger = context.pushlogger + local startblankhandling = context.startblankhandling + local stopblankhandling = context.stopblankhandling + local poplogger = context.poplogger + + function vspacing.analyze(str) + if trace_vspacing then + pushlogger(report_vspacing) + startblankhandling() + analyze(str,1) + stopblankhandling() + poplogger() + else + startblankhandling() + analyze(str,1) + stopblankhandling() + end + end + + -- + + function vspacing.setmap(from,to) + map[from] = to + end + + function vspacing.setskip(key,value,grid) + if value ~= "" then + if grid == "" then grid = value end + skip[key] = { value, grid } + end + end + +end + +-- implementation + +local trace_list, tracing_info, before, after = { }, false, "", "" + +local function nodes_to_string(head) + local current, t = head, { } + while current do + local id = current.id + local ty = nodecodes[id] + if id == penalty_code then + t[#t+1] = formatters["%s:%s"](ty,current.penalty) + elseif id == glue_code then -- or id == kern_code then -- to be tested + t[#t+1] = formatters["%s:%p"](ty,current) + elseif id == kern_code then + t[#t+1] = formatters["%s:%p"](ty,current.kern) + else + t[#t+1] = ty + end + current = current.next + end + return concat(t," + ") +end + +local function reset_tracing(head) + trace_list, tracing_info, before, after = { }, false, nodes_to_string(head), "" +end + +local function trace_skip(str,sc,so,sp,data) + trace_list[#trace_list+1] = { "skip", formatters["%s | %p | category %s | order %s | penalty %s"](str, data, sc or "-", so or "-", sp or "-") } + tracing_info = true +end + +local function trace_natural(str,data) + trace_list[#trace_list+1] = { "skip", formatters["%s | %p"](str, data) } + tracing_info = true +end + +local function trace_info(message, where, what) + trace_list[#trace_list+1] = { "info", formatters["%s: %s/%s"](message,where,what) } +end + +local function trace_node(what) + local nt = nodecodes[what.id] + local tl = trace_list[#trace_list] + if tl and tl[1] == "node" then + trace_list[#trace_list] = { "node", formatters["%s + %s"](tl[2],nt) } + else + trace_list[#trace_list+1] = { "node", nt } + end +end + +local function trace_done(str,data) + if data.id == penalty_code then + trace_list[#trace_list+1] = { "penalty", formatters["%s | %s"](str,data.penalty) } + else + trace_list[#trace_list+1] = { "glue", formatters["%s | %p"](str,data) } + end + tracing_info = true +end + +local function show_tracing(head) + if tracing_info then + after = nodes_to_string(head) + for i=1,#trace_list do + local tag, text = unpack(trace_list[i]) + if tag == "info" then + report_collapser(text) + else + report_collapser(" %s: %s",tag,text) + end + end + report_collapser("before: %s",before) + report_collapser("after : %s",after) + end +end + +-- alignment box begin_of_par vmode_par hmode_par insert penalty before_display after_display + +local skipcodes = nodes.skipcodes + +local userskip_code = skipcodes.userskip +local lineskip_code = skipcodes.lineskip +local baselineskip_code = skipcodes.baselineskip +local parskip_code = skipcodes.parskip +local abovedisplayskip_code = skipcodes.abovedisplayskip +local belowdisplayskip_code = skipcodes.belowdisplayskip +local abovedisplayshortskip_code = skipcodes.abovedisplayshortskip +local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip +local topskip_code = skipcodes.topskip +local splittopskip_code = skipcodes.splittopskip + +local free_glue_node = free_node +local discard, largest, force, penalty, add, disable, nowhite, goback, together = 0, 1, 2, 3, 4, 5, 6, 7, 8 + +-- local function free_glue_node(n) +-- -- free_node(n.spec) +-- print("before",n) +-- logs.flush() +-- free_node(n) +-- print("after") +-- logs.flush() +-- end + +function vspacing.snapbox(n,how) + local sv = snapmethods[how] + if sv then + local box = texbox[n] + local list = box.list + if list then + local s = list[a_snapmethod] + if s == 0 then + if trace_vsnapping then + -- report_snapper("box list not snapped, already done") + end + else + local ht, dp = box.height, box.depth + if false then -- todo: already_done + -- assume that the box is already snapped + if trace_vsnapping then + report_snapper("box list already snapped at (%p,%p): %s", + ht,dp,listtoutf(list)) + end + else + local h, d, ch, cd, lines = snap_hlist("box",box,sv,ht,dp) + box.height, box.depth = ch, cd + if trace_vsnapping then + report_snapper("box list snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s", + h,d,ch,cd,sv.name,sv.specification,"direct",lines,listtoutf(list)) + end + box[a_snapmethod] = 0 -- + list[a_snapmethod] = 0 -- yes or no + end + end + end + end +end + +local function forced_skip(head,current,width,where,trace) + if where == "after" then + head, current = insert_node_after(head,current,new_rule(0,0,0)) + head, current = insert_node_after(head,current,new_kern(width)) + head, current = insert_node_after(head,current,new_rule(0,0,0)) + else + local c = current + head, current = insert_node_before(head,current,new_rule(0,0,0)) + head, current = insert_node_before(head,current,new_kern(width)) + head, current = insert_node_before(head,current,new_rule(0,0,0)) + current = c + end + if trace then + report_vspacing("inserting forced skip of %p",width) + end + return head, current +end + +-- penalty only works well when before skip + +local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also pass tail + if trace then + reset_tracing(head) + end + local current, oldhead = head, head + local glue_order, glue_data, force_glue = 0, nil, false + local penalty_order, penalty_data, natural_penalty = 0, nil, nil + local parskip, ignore_parskip, ignore_following, ignore_whitespace, keep_together = nil, false, false, false, false + -- + -- todo: keep_together: between headers + -- + local function flush(why) + if penalty_data then + local p = new_penalty(penalty_data) + if trace then trace_done("flushed due to " .. why,p) end + head = insert_node_before(head,current,p) + end + if glue_data then + if force_glue then + if trace then trace_done("flushed due to " .. why,glue_data) end + head = forced_skip(head,current,glue_data.spec.width,"before",trace) + free_glue_node(glue_data) + elseif glue_data.spec.writable then + if trace then trace_done("flushed due to " .. why,glue_data) end + head = insert_node_before(head,current,glue_data) + else + free_glue_node(glue_data) + end + end + if trace then trace_node(current) end + glue_order, glue_data, force_glue = 0, nil, false + penalty_order, penalty_data, natural_penalty = 0, nil, nil + parskip, ignore_parskip, ignore_following, ignore_whitespace = nil, false, false, false + end + if trace_vsnapping then + report_snapper("global ht/dp = %p/%p, local ht/dp = %p/%p", + texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth, + texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth) + end + if trace then trace_info("start analyzing",where,what) end + while current do + local id = current.id + if id == hlist_code or id == vlist_code then + -- needs checking, why so many calls + if snap then + local list = current.list + local s = current[a_snapmethod] + if not s then + -- if trace_vsnapping then + -- report_snapper("mvl list not snapped") + -- end + elseif s == 0 then + if trace_vsnapping then + report_snapper("mvl %a not snapped, already done: %s",nodecodes[id],listtoutf(list)) + end + else + local sv = snapmethods[s] + if sv then + -- check if already snapped + if list and already_done(id,list,a_snapmethod) then + local ht, dp = current.height, current.depth + -- assume that the box is already snapped + if trace_vsnapping then + report_snapper("mvl list already snapped at (%p,%p): %s",ht,dp,listtoutf(list)) + end + else + local h, d, ch, cd, lines = snap_hlist("mvl",current,sv) + if trace_vsnapping then + report_snapper("mvl %a snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s", + nodecodes[id],h,d,ch,cd,sv.name,sv.specification,where,lines,listtoutf(list)) + end + end + elseif trace_vsnapping then + report_snapper("mvl %a not snapped due to unknown snap specification: %s",nodecodes[id],listtoutf(list)) + end + current[a_snapmethod] = 0 + end + else + -- + end + -- tex.prevdepth = 0 + flush("list") + current = current.next + elseif id == penalty_code then + -- natural_penalty = current.penalty + -- if trace then trace_done("removed penalty",current) end + -- head, current = remove_node(head, current, true) + current = current.next + elseif id == kern_code then + if snap and trace_vsnapping and current.kern ~= 0 then + report_snapper("kern of %p kept",current.kern) + end + flush("kern") + current = current.next + elseif id == glue_code then + local subtype = current.subtype + if subtype == userskip_code then + local sc = current[a_skipcategory] -- has no default, no unset (yet) + local so = current[a_skiporder] or 1 -- has 1 default, no unset (yet) + local sp = current[a_skippenalty] -- has no default, no unset (yet) + if sp and sc == penalty then + if not penalty_data then + penalty_data = sp + elseif penalty_order < so then + penalty_order, penalty_data = so, sp + elseif penalty_order == so and sp > penalty_data then + penalty_data = sp + end + if trace then trace_skip("penalty in skip",sc,so,sp,current) end + head, current = remove_node(head, current, true) + elseif not sc then -- if not sc then + if glue_data then + if trace then trace_done("flush",glue_data) end + head = insert_node_before(head,current,glue_data) + if trace then trace_natural("natural",current) end + current = current.next + else + -- not look back across head + local previous = current.prev + if previous and previous.id == glue_code and previous.subtype == userskip_code then + local ps = previous.spec + if ps.writable then + local cs = current.spec + if cs.writable and ps.stretch_order == 0 and ps.shrink_order == 0 and cs.stretch_order == 0 and cs.shrink_order == 0 then + local pw, pp, pm = ps.width, ps.stretch, ps.shrink + local cw, cp, cm = cs.width, cs.stretch, cs.shrink + -- ps = writable_spec(previous) -- no writable needed here + -- ps.width, ps.stretch, ps.shrink = pw + cw, pp + cp, pm + cm + previous.spec = new_gluespec(pw + cw, pp + cp, pm + cm) -- else topskip can disappear + if trace then trace_natural("removed",current) end + head, current = remove_node(head, current, true) + -- current = previous + if trace then trace_natural("collapsed",previous) end + -- current = current.next + else + if trace then trace_natural("filler",current) end + current = current.next + end + else + if trace then trace_natural("natural (no prev spec)",current) end + current = current.next + end + else + if trace then trace_natural("natural (no prev)",current) end + current = current.next + end + end + glue_order, glue_data = 0, nil + elseif sc == disable then + ignore_following = true + if trace then trace_skip("disable",sc,so,sp,current) end + head, current = remove_node(head, current, true) + elseif sc == together then + keep_together = true + if trace then trace_skip("together",sc,so,sp,current) end + head, current = remove_node(head, current, true) + elseif sc == nowhite then + ignore_whitespace = true + head, current = remove_node(head, current, true) + elseif sc == discard then + if trace then trace_skip("discard",sc,so,sp,current) end + head, current = remove_node(head, current, true) + elseif ignore_following then + if trace then trace_skip("disabled",sc,so,sp,current) end + head, current = remove_node(head, current, true) + elseif not glue_data then + if trace then trace_skip("assign",sc,so,sp,current) end + glue_order = so + head, current, glue_data = remove_node(head, current) + elseif glue_order < so then + if trace then trace_skip("force",sc,so,sp,current) end + glue_order = so + free_glue_node(glue_data) + head, current, glue_data = remove_node(head, current) + elseif glue_order == so then + -- is now exclusive, maybe support goback as combi, else why a set + if sc == largest then + local cs, gs = current.spec, glue_data.spec + local cw, gw = cs.width, gs.width + if cw > gw then + if trace then trace_skip("largest",sc,so,sp,current) end + free_glue_node(glue_data) -- also free spec + head, current, glue_data = remove_node(head, current) + else + if trace then trace_skip("remove smallest",sc,so,sp,current) end + head, current = remove_node(head, current, true) + end + elseif sc == goback then + if trace then trace_skip("goback",sc,so,sp,current) end + free_glue_node(glue_data) -- also free spec + head, current, glue_data = remove_node(head, current) + elseif sc == force then + -- last one counts, some day we can provide an accumulator and largest etc + -- but not now + if trace then trace_skip("force",sc,so,sp,current) end + free_glue_node(glue_data) -- also free spec + head, current, glue_data = remove_node(head, current) + elseif sc == penalty then + if trace then trace_skip("penalty",sc,so,sp,current) end + free_glue_node(glue_data) -- also free spec + glue_data = nil + head, current = remove_node(head, current, true) + elseif sc == add then + if trace then trace_skip("add",sc,so,sp,current) end + -- local old, new = glue_data.spec, current.spec + local old, new = writable_spec(glue_data), current.spec + old.width = old.width + new.width + old.stretch = old.stretch + new.stretch + old.shrink = old.shrink + new.shrink + -- toto: order + head, current = remove_node(head, current, true) + else + if trace then trace_skip("unknown",sc,so,sp,current) end + head, current = remove_node(head, current, true) + end + else + if trace then trace_skip("unknown",sc,so,sp,current) end + head, current = remove_node(head, current, true) + end + if sc == force then + force_glue = true + end + elseif subtype == lineskip_code then + if snap then + local s = current[a_snapmethod] + if s and s ~= 0 then + current[a_snapmethod] = 0 + if current.spec.writable then + local spec = writable_spec(current) + spec.width = 0 + if trace_vsnapping then + report_snapper("lineskip set to zero") + end + end + else + if trace then trace_skip("lineskip",sc,so,sp,current) end + flush("lineskip") + end + else + if trace then trace_skip("lineskip",sc,so,sp,current) end + flush("lineskip") + end + current = current.next + elseif subtype == baselineskip_code then + if snap then + local s = current[a_snapmethod] + if s and s ~= 0 then + current[a_snapmethod] = 0 + if current.spec.writable then + local spec = writable_spec(current) + spec.width = 0 + if trace_vsnapping then + report_snapper("baselineskip set to zero") + end + end + else + if trace then trace_skip("baselineskip",sc,so,sp,current) end + flush("baselineskip") + end + else + if trace then trace_skip("baselineskip",sc,so,sp,current) end + flush("baselineskip") + end + current = current.next + elseif subtype == parskip_code then + -- parskip always comes later + if ignore_whitespace then + if trace then trace_natural("ignored parskip",current) end + head, current = remove_node(head, current, true) + elseif glue_data then + local ps, gs = current.spec, glue_data.spec + if ps.writable and gs.writable and ps.width > gs.width then + glue_data.spec = copy_node(ps) + if trace then trace_natural("taking parskip",current) end + else + if trace then trace_natural("removed parskip",current) end + end + head, current = remove_node(head, current, true) + else + if trace then trace_natural("honored parskip",current) end + head, current, glue_data = remove_node(head, current) + end + elseif subtype == topskip_code or subtype == splittopskip_code then + if snap then + local s = current[a_snapmethod] + if s and s ~= 0 then + current[a_snapmethod] = 0 + local sv = snapmethods[s] + local w, cw = snap_topskip(current,sv) + if trace_vsnapping then + report_snapper("topskip snapped from %p to %p for %a",w,cw,where) + end + else + if trace then trace_skip("topskip",sc,so,sp,current) end + flush("topskip") + end + else + if trace then trace_skip("topskip",sc,so,sp,current) end + flush("topskip") + end + current = current.next + elseif subtype == abovedisplayskip_code then + -- + if trace then trace_skip("above display skip (normal)",sc,so,sp,current) end + flush("above display skip (normal)") + current = current.next + -- + elseif subtype == belowdisplayskip_code then + -- + if trace then trace_skip("below display skip (normal)",sc,so,sp,current) end + flush("below display skip (normal)") + current = current.next + -- + elseif subtype == abovedisplayshortskip_code then + -- + if trace then trace_skip("above display skip (short)",sc,so,sp,current) end + flush("above display skip (short)") + current = current.next + -- + elseif subtype == belowdisplayshortskip_code then + -- + if trace then trace_skip("below display skip (short)",sc,so,sp,current) end + flush("below display skip (short)") + current = current.next + -- + else -- other glue + if snap and trace_vsnapping and current.spec.writable and current.spec.width ~= 0 then + report_snapper("glue %p of type %a kept",current.spec.width,skipcodes[subtype]) + --~ current.spec.width = 0 + end + if trace then trace_skip(formatted["glue of type %a"](subtype),sc,so,sp,current) end + flush("some glue") + current = current.next + end + else + flush("something else") + current = current.next + end + end + if trace then trace_info("stop analyzing",where,what) end + -- if natural_penalty and (not penalty_data or natural_penalty > penalty_data) then + -- penalty_data = natural_penalty + -- end + if trace and (glue_data or penalty_data) then + trace_info("start flushing",where,what) + end + local tail + if penalty_data then + tail = find_node_tail(head) + local p = new_penalty(penalty_data) + if trace then trace_done("result",p) end + head, tail = insert_node_after(head,tail,p) + end + if glue_data then + if not tail then tail = find_node_tail(head) end + if trace then trace_done("result",glue_data) end + if force_glue then + head, tail = forced_skip(head,tail,glue_data.spec.width,"after",trace) + free_glue_node(glue_data) + else + head, tail = insert_node_after(head,tail,glue_data) + end + end + if trace then + if glue_data or penalty_data then + trace_info("stop flushing",where,what) + end + show_tracing(head) + if oldhead ~= head then + trace_info("head has been changed from %a to %a",nodecodes[oldhead.id],nodecodes[head.id]) + end + end + return head, true +end + +-- alignment after_output end box new_graf vmode_par hmode_par insert penalty before_display after_display +-- \par -> vmode_par +-- +-- status.best_page_break +-- tex.lists.best_page_break +-- tex.lists.best_size (natural size to best_page_break) +-- tex.lists.least_page_cost (badness of best_page_break) +-- tex.lists.page_head +-- tex.lists.contrib_head + +local stackhead, stacktail, stackhack = nil, nil, false + +local function report(message,lst) + report_vspacing(message,count_nodes(lst,true),nodeidstostring(lst)) +end + +function vspacing.pagehandler(newhead,where) + -- local newhead = texlists.contrib_head + if newhead then + local newtail = find_node_tail(newhead) -- best pass that tail, known anyway + local flush = false + stackhack = true -- todo: only when grid snapping once enabled + for n in traverse_nodes(newhead) do -- we could just look for glue nodes + local id = n.id + if id ~= glue_code then + flush = true + elseif n.subtype == userskip_code then + if n[a_skipcategory] then + stackhack = true + else + flush = true + end + else + -- tricky + end + end + if flush then + if stackhead then + if trace_collect_vspacing then report("appending %s nodes to stack (final): %s",newhead) end + stacktail.next = newhead + newhead.prev = stacktail + newhead = stackhead + stackhead, stacktail = nil, nil + end + if stackhack then + stackhack = false + if trace_collect_vspacing then report("processing %s nodes: %s",newhead) end +--~ texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod) + newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod) + else + if trace_collect_vspacing then report("flushing %s nodes: %s",newhead) end +--~ texlists.contrib_head = newhead + end + else + if stackhead then + if trace_collect_vspacing then report("appending %s nodes to stack (intermediate): %s",newhead) end + stacktail.next = newhead + newhead.prev = stacktail + else + if trace_collect_vspacing then report("storing %s nodes in stack (initial): %s",newhead) end + stackhead = newhead + end + stacktail = newtail + -- texlists.contrib_head = nil + newhead = nil + end + end + return newhead +end + +local ignore = table.tohash { + "split_keep", + "split_off", + -- "vbox", +} + +function vspacing.vboxhandler(head,where) + if head and not ignore[where] and head.next then + -- starttiming(vspacing) + head = collapser(head,"vbox",where,trace_vbox_vspacing,true,a_snapvbox) -- todo: local snapper + -- stoptiming(vspacing) + end + return head +end + +function vspacing.collapsevbox(n) -- for boxes but using global a_snapmethod + local list = texbox[n].list + if list then + -- starttiming(vspacing) + texbox[n].list = vpack_node(collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod)) + -- stoptiming(vspacing) + end +end + +-- We will split this module so a few locals are repeated. Also this will be +-- rewritten. + +nodes.builders = nodes.builder or { } +local builders = nodes.builders + +local actions = nodes.tasks.actions("vboxbuilders") + +function builders.vpack_filter(head,groupcode,size,packtype,maxdepth,direction) + local done = false + if head then + starttiming(builders) + if trace_vpacking then + local before = nodes.count(head) + head, done = actions(head,groupcode,size,packtype,maxdepth,direction) + local after = nodes.count(head) + if done then + nodes.processors.tracer("vpack","changed",head,groupcode,before,after,true) + else + nodes.processors.tracer("vpack","unchanged",head,groupcode,before,after,true) + end + else + head, done = actions(head,groupcode) + end + stoptiming(builders) + end + return head, done +end + +-- This one is special in the sense that it has no head and we operate on the mlv. Also, +-- we need to do the vspacing last as it removes items from the mvl. + +local actions = nodes.tasks.actions("mvlbuilders") + +local function report(groupcode,head) + report_page_builder("trigger: %s",groupcode) + report_page_builder(" vsize : %p",tex.vsize) + report_page_builder(" pagegoal : %p",tex.pagegoal) + report_page_builder(" pagetotal: %p",tex.pagetotal) + report_page_builder(" list : %s",head and nodeidstostring(head) or "") +end + +function builders.buildpage_filter(groupcode) + local head, done = texlists.contrib_head, false + -- if head and head.next and head.next.id == hlist_code and head.next.width == 1 then + -- report_page_builder("trigger otr calculations") + -- free_node_list(head) + -- head = nil + -- end + if head then + starttiming(builders) + if trace_page_builder then + report(groupcode,head) + end + head, done = actions(head,groupcode) + stoptiming(builders) + -- -- doesn't work here (not passed on?) + -- tex.pagegoal = tex.vsize - tex.dimen.d_page_floats_inserted_top - tex.dimen.d_page_floats_inserted_bottom + texlists.contrib_head = head + return done and head or true + else + if trace_page_builder then + report(groupcode) + end + return nil, false + end +end + +callbacks.register('vpack_filter', builders.vpack_filter, "vertical spacing etc") +callbacks.register('buildpage_filter', builders.buildpage_filter, "vertical spacing etc (mvl)") + +statistics.register("v-node processing time", function() + return statistics.elapsedseconds(builders) +end) + +-- interface + +commands.vspacing = vspacing.analyze +commands.vspacingsetamount = vspacing.setskip +commands.vspacingdefine = vspacing.setmap +commands.vspacingcollapse = vspacing.collapsevbox +commands.vspacingsnap = vspacing.snapbox diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf index 679c052b5..043d5740e 100644 Binary files a/tex/context/base/status-files.pdf and b/tex/context/base/status-files.pdf differ diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf index cf1f4f0e3..41276909d 100644 Binary files a/tex/context/base/status-lua.pdf and b/tex/context/base/status-lua.pdf differ diff --git a/tex/context/base/status-mkiv.lua b/tex/context/base/status-mkiv.lua index de35a4c0a..a112d644b 100644 --- a/tex/context/base/status-mkiv.lua +++ b/tex/context/base/status-mkiv.lua @@ -4300,6 +4300,16 @@ return { filename = "s-fonts-vectors", status = "okay", }, + { + category = "lua", + filename = "s-languages-sorting", + status = "okay", + }, + { + category = "lua", + filename = "s-languages-system", + status = "okay", + }, { category = "lua", filename = "s-math-characters", @@ -5389,6 +5399,16 @@ return { filename = "s-inf-04", status = "todo", }, + { + category = "mkiv", + filename = "s-languages-sorting", + status = "okay", + }, + { + category = "mkiv", + filename = "s-languages-system", + status = "okay", + }, { category = "mkiv", filename = "s-lan-03", diff --git a/tex/context/base/strc-bkm.lua b/tex/context/base/strc-bkm.lua index d9c268ce4..76049f6cf 100644 --- a/tex/context/base/strc-bkm.lua +++ b/tex/context/base/strc-bkm.lua @@ -1,196 +1,196 @@ -if not modules then modules = { } end modules ['strc-bkm'] = { - version = 0.200, - comment = "companion to strc-bkm.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- Future version will support adding arbitrary bookmarks with --- associated complex actions (rather trivial to implement). - --- this should become proper separated backend code - --- we should hook the placement into everystoptext ... needs checking - -local format, concat, gsub = string.format, table.concat, string.gsub -local utfvalues = utf.values -local settings_to_hash = utilities.parsers.settings_to_hash - -local codeinjections = backends.codeinjections - -local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end) - -local report_bookmarks = logs.reporter("structure","bookmarks") - -local structures = structures - -structures.bookmarks = structures.bookmarks or { } - -local bookmarks = structures.bookmarks -local sections = structures.sections -local lists = structures.lists - -local levelmap = sections.levelmap -local variables = interfaces.variables - -bookmarks.method = "internal" -- or "page" - -local names, opened, forced, numbered = { }, { }, { }, { } - -function bookmarks.register(settings) - local force = settings.force == variables.yes - local number = settings.number == variables.yes - local allopen = settings.opened == variables.all - for k, v in next, settings_to_hash(settings.names or "") do - names[k] = true - if force then - forced[k] = true - if allopen then - opened[k] = true - end - end - if number then - numbered[k] = true - end - end - if not allopen then - for k, v in next, settings_to_hash(settings.opened or "") do - opened[k] = true - end - end -end - -function bookmarks.overload(name,text) - local l, ls = lists.tobesaved, nil - if #l == 0 then - -- no entries - elseif name == "" then - ls = l[#l] - else - for i=#l,0,-1 do - local li = l[i] - local metadata = li.metadata - if metadata and not metadata.nolist and metadata.name == name then - ls = li - break - end - end - end - if ls then - ls.titledata.bookmark = text - end -end - -local function stripped(str) -- kind of generic - str = gsub(str,"\\([A-Z]+)","%1") -- \LOGO - str = gsub(str,"\\ "," ") -- \ - str = gsub(str,"\\([A-Za-z]+) *{(.-)}","%1") -- \bla{...} - str = gsub(str," +"," ") -- spaces - return str -end - --- todo: collect specs and collect later i.e. multiple places - -local numberspec = { } - -function bookmarks.setup(spec) - -- table.merge(numberspec,spec) - for k, v in next, spec do - numberspec[k] = v - end -end - -function bookmarks.place() - if next(names) then - local list = lists.filtercollected(names,"all",nil,lists.collected,forced) - if #list > 0 then - local levels, noflevels, lastlevel = { }, 0, 1 - for i=1,#list do - local li = list[i] - local metadata = li.metadata - local name = metadata.name - if not metadata.nolist or forced[name] then -- and levelmap[name] then - local titledata = li.titledata - if titledata then - local structural = levelmap[name] - lastlevel = structural or lastlevel - local title = titledata.bookmark - if not title or title == "" then - -- We could typeset the title and then convert it. - if not structural then - -- placeholder, todo: bookmarklabel - title = name .. ": " .. (titledata.title or "?") - else - title = titledata.title or "?" - end - end - if numbered[name] then - local sectiondata = sections.collected[li.references.section] - local numberdata = li.numberdata - if sectiondata and numberdata and not numberdata.hidenumber then - -- we could typeset the number and convert it - title = concat(sections.typesetnumber(sectiondata,"direct",numberspec,sectiondata)) .. " " .. title - end - end - noflevels = noflevels + 1 - levels[noflevels] = { - lastlevel, - stripped(title), -- can be replaced by converter - li.references, -- has internal and realpage - allopen or opened[name] - } - end - end - end - bookmarks.finalize(levels) - end - function bookmarks.place() end -- prevent second run - end -end - -function bookmarks.flatten(levels) - -- This function promotes leading structurelements with a higher level - -- to the next lower level. Such situations are the result of lack of - -- structure: a subject preceding a chapter in a sectionblock. So, the - -- following code runs over section blocks as well. (bookmarks-007.tex) - local noflevels = #levels - if noflevels > 1 then - local skip, start, one = false, 1, levels[1] - local first, block = one[1], one[3].block - for i=2,noflevels do - local li = levels[i] - local new, newblock = li[1], li[3].block - if newblock ~= block then - first, block, start, skip = new, newblock, i, false - elseif skip then - -- go on - elseif new > first then - skip = true - elseif new < first then - for j=start,i-1 do - local lj = levels[j] - local old = lj[1] - lj[1] = new - if trace_bookmarks then - report_bookmarks("promoting entry %a from level %a to %a: %s",j,old,new,lj[2]) - end - end - skip = true - end - end - end -end - -function bookmarks.finalize(levels) - -- This function can be overloaded by an optional converter - -- that uses nodes.toutf on a typeset stream. This is something - -- that we will support when the main loop has become a coroutine. - codeinjections.addbookmarks(levels,bookmarks.method) -end - --- interface - -commands.overloadbookmark = bookmarks.overload -commands.registerbookmark = bookmarks.register -commands.setupbookmarks = bookmarks.setup +if not modules then modules = { } end modules ['strc-bkm'] = { + version = 0.200, + comment = "companion to strc-bkm.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Future version will support adding arbitrary bookmarks with +-- associated complex actions (rather trivial to implement). + +-- this should become proper separated backend code + +-- we should hook the placement into everystoptext ... needs checking + +local format, concat, gsub = string.format, table.concat, string.gsub +local utfvalues = utf.values +local settings_to_hash = utilities.parsers.settings_to_hash + +local codeinjections = backends.codeinjections + +local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end) + +local report_bookmarks = logs.reporter("structure","bookmarks") + +local structures = structures + +structures.bookmarks = structures.bookmarks or { } + +local bookmarks = structures.bookmarks +local sections = structures.sections +local lists = structures.lists + +local levelmap = sections.levelmap +local variables = interfaces.variables + +bookmarks.method = "internal" -- or "page" + +local names, opened, forced, numbered = { }, { }, { }, { } + +function bookmarks.register(settings) + local force = settings.force == variables.yes + local number = settings.number == variables.yes + local allopen = settings.opened == variables.all + for k, v in next, settings_to_hash(settings.names or "") do + names[k] = true + if force then + forced[k] = true + if allopen then + opened[k] = true + end + end + if number then + numbered[k] = true + end + end + if not allopen then + for k, v in next, settings_to_hash(settings.opened or "") do + opened[k] = true + end + end +end + +function bookmarks.overload(name,text) + local l, ls = lists.tobesaved, nil + if #l == 0 then + -- no entries + elseif name == "" then + ls = l[#l] + else + for i=#l,0,-1 do + local li = l[i] + local metadata = li.metadata + if metadata and not metadata.nolist and metadata.name == name then + ls = li + break + end + end + end + if ls then + ls.titledata.bookmark = text + end +end + +local function stripped(str) -- kind of generic + str = gsub(str,"\\([A-Z]+)","%1") -- \LOGO + str = gsub(str,"\\ "," ") -- \ + str = gsub(str,"\\([A-Za-z]+) *{(.-)}","%1") -- \bla{...} + str = gsub(str," +"," ") -- spaces + return str +end + +-- todo: collect specs and collect later i.e. multiple places + +local numberspec = { } + +function bookmarks.setup(spec) + -- table.merge(numberspec,spec) + for k, v in next, spec do + numberspec[k] = v + end +end + +function bookmarks.place() + if next(names) then + local list = lists.filtercollected(names,"all",nil,lists.collected,forced) + if #list > 0 then + local levels, noflevels, lastlevel = { }, 0, 1 + for i=1,#list do + local li = list[i] + local metadata = li.metadata + local name = metadata.name + if not metadata.nolist or forced[name] then -- and levelmap[name] then + local titledata = li.titledata + if titledata then + local structural = levelmap[name] + lastlevel = structural or lastlevel + local title = titledata.bookmark + if not title or title == "" then + -- We could typeset the title and then convert it. + if not structural then + -- placeholder, todo: bookmarklabel + title = name .. ": " .. (titledata.title or "?") + else + title = titledata.title or "?" + end + end + if numbered[name] then + local sectiondata = sections.collected[li.references.section] + local numberdata = li.numberdata + if sectiondata and numberdata and not numberdata.hidenumber then + -- we could typeset the number and convert it + title = concat(sections.typesetnumber(sectiondata,"direct",numberspec,sectiondata)) .. " " .. title + end + end + noflevels = noflevels + 1 + levels[noflevels] = { + lastlevel, + stripped(title), -- can be replaced by converter + li.references, -- has internal and realpage + allopen or opened[name] + } + end + end + end + bookmarks.finalize(levels) + end + function bookmarks.place() end -- prevent second run + end +end + +function bookmarks.flatten(levels) + -- This function promotes leading structurelements with a higher level + -- to the next lower level. Such situations are the result of lack of + -- structure: a subject preceding a chapter in a sectionblock. So, the + -- following code runs over section blocks as well. (bookmarks-007.tex) + local noflevels = #levels + if noflevels > 1 then + local skip, start, one = false, 1, levels[1] + local first, block = one[1], one[3].block + for i=2,noflevels do + local li = levels[i] + local new, newblock = li[1], li[3].block + if newblock ~= block then + first, block, start, skip = new, newblock, i, false + elseif skip then + -- go on + elseif new > first then + skip = true + elseif new < first then + for j=start,i-1 do + local lj = levels[j] + local old = lj[1] + lj[1] = new + if trace_bookmarks then + report_bookmarks("promoting entry %a from level %a to %a: %s",j,old,new,lj[2]) + end + end + skip = true + end + end + end +end + +function bookmarks.finalize(levels) + -- This function can be overloaded by an optional converter + -- that uses nodes.toutf on a typeset stream. This is something + -- that we will support when the main loop has become a coroutine. + codeinjections.addbookmarks(levels,bookmarks.method) +end + +-- interface + +commands.overloadbookmark = bookmarks.overload +commands.registerbookmark = bookmarks.register +commands.setupbookmarks = bookmarks.setup diff --git a/tex/context/base/strc-blk.lua b/tex/context/base/strc-blk.lua index 791f8f99b..16a621ad4 100644 --- a/tex/context/base/strc-blk.lua +++ b/tex/context/base/strc-blk.lua @@ -1,152 +1,152 @@ -if not modules then modules = { } end modules ['strc-blk'] = { - version = 1.001, - comment = "companion to strc-blk.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this one runs on top of buffers and structure - -local type = type -local find, format, validstring = string.find, string.format, string.valid -local settings_to_set, settings_to_array = utilities.parsers.settings_to_set, utilities.parsers.settings_to_array -local allocate = utilities.storage.allocate - -local structures, context = structures, context - -structures.blocks = structures.blocks or { } - -local blocks = structures.blocks -local sections = structures.sections -local lists = structures.lists - -local collected = allocate() -local tobesaved = allocate() -local states = allocate() - -blocks.collected = collected -blocks.tobesaved = tobesaved -blocks.states = states - -local function initializer() - collected = blocks.collected - tobesaved = blocks.tobesaved -end - -job.register('structures.blocks.collected', tobesaved, initializer) - -local listitem = utilities.parsers.listitem - -function blocks.print(name,data,hide) - if hide then - context.dostarthiddenblock(name) - else - context.dostartnormalblock(name) - end - context.viafile(data,format("block.%s",validstring(name,"noname"))) - if hide then - context.dostophiddenblock() - else - context.dostopnormalblock() - end -end - -function blocks.define(name) - states[name] = { all = "hide" } -end - -function blocks.setstate(state,name,tag) - local all = tag == "" - local tags = not all and settings_to_array(tag) - for n in listitem(name) do - local sn = states[n] - if not sn then - -- error - elseif all then - sn.all = state - else - for _, tag in next, tags do - sn[tag] = state - end - end - end -end - -function blocks.select(state,name,tag,criterium) - criterium = criterium or "text" - if find(tag,"=") then tag = "" end - local names = settings_to_set(name) - local all = tag == "" - local tags = not all and settings_to_set(tag) - local hide = state == "process" - local n = sections.numberatdepth(criterium) - local result = lists.filtercollected("all", criterium, n, collected, { }) - for i=1,#result do - local ri = result[i] - local metadata = ri.metadata - if names[metadata.name] then - if all then - blocks.print(name,ri.data,hide) - else - local mtags = metadata.tags - for tag, sta in next, tags do - if mtags[tag] then - blocks.print(name,ri.data,hide) - break - end - end - end - end - end -end - -function blocks.save(name,tag,buffer) -- wrong, not yet adapted - local data = buffers.getcontent(buffer) - local tags = settings_to_set(tag) - local plus, minus = false, false - if tags['+'] then plus = true tags['+'] = nil end - if tags['-'] then minus = true tags['-'] = nil end - tobesaved[#tobesaved+1] = { - metadata = { - name = name, - tags = tags, - plus = plus, - minus = minus, - }, - references = { - section = sections.currentid(), - }, - data = data or "error", - } - local allstate = states[name].all - if not next(tags) then - if allstate ~= "hide" then - blocks.print(name,data) - elseif plus then - blocks.print(name,data,true) - end - else - local sn = states[name] - for tag, _ in next, tags do - if sn[tag] == nil then - if allstate ~= "hide" then - blocks.print(name,data) - break - end - elseif sn[tag] ~= "hide" then - blocks.print(name,data) - break - end - end - end - buffers.erase(buffer) -end - --- interface - - -commands.definestructureblock = blocks.define -commands.savestructureblock = blocks.save -commands.selectstructureblock = blocks.select -commands.setstructureblockstate = blocks.setstate +if not modules then modules = { } end modules ['strc-blk'] = { + version = 1.001, + comment = "companion to strc-blk.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this one runs on top of buffers and structure + +local type = type +local find, format, validstring = string.find, string.format, string.valid +local settings_to_set, settings_to_array = utilities.parsers.settings_to_set, utilities.parsers.settings_to_array +local allocate = utilities.storage.allocate + +local structures, context = structures, context + +structures.blocks = structures.blocks or { } + +local blocks = structures.blocks +local sections = structures.sections +local lists = structures.lists + +local collected = allocate() +local tobesaved = allocate() +local states = allocate() + +blocks.collected = collected +blocks.tobesaved = tobesaved +blocks.states = states + +local function initializer() + collected = blocks.collected + tobesaved = blocks.tobesaved +end + +job.register('structures.blocks.collected', tobesaved, initializer) + +local listitem = utilities.parsers.listitem + +function blocks.print(name,data,hide) + if hide then + context.dostarthiddenblock(name) + else + context.dostartnormalblock(name) + end + context.viafile(data,format("block.%s",validstring(name,"noname"))) + if hide then + context.dostophiddenblock() + else + context.dostopnormalblock() + end +end + +function blocks.define(name) + states[name] = { all = "hide" } +end + +function blocks.setstate(state,name,tag) + local all = tag == "" + local tags = not all and settings_to_array(tag) + for n in listitem(name) do + local sn = states[n] + if not sn then + -- error + elseif all then + sn.all = state + else + for _, tag in next, tags do + sn[tag] = state + end + end + end +end + +function blocks.select(state,name,tag,criterium) + criterium = criterium or "text" + if find(tag,"=") then tag = "" end + local names = settings_to_set(name) + local all = tag == "" + local tags = not all and settings_to_set(tag) + local hide = state == "process" + local n = sections.numberatdepth(criterium) + local result = lists.filtercollected("all", criterium, n, collected, { }) + for i=1,#result do + local ri = result[i] + local metadata = ri.metadata + if names[metadata.name] then + if all then + blocks.print(name,ri.data,hide) + else + local mtags = metadata.tags + for tag, sta in next, tags do + if mtags[tag] then + blocks.print(name,ri.data,hide) + break + end + end + end + end + end +end + +function blocks.save(name,tag,buffer) -- wrong, not yet adapted + local data = buffers.getcontent(buffer) + local tags = settings_to_set(tag) + local plus, minus = false, false + if tags['+'] then plus = true tags['+'] = nil end + if tags['-'] then minus = true tags['-'] = nil end + tobesaved[#tobesaved+1] = { + metadata = { + name = name, + tags = tags, + plus = plus, + minus = minus, + }, + references = { + section = sections.currentid(), + }, + data = data or "error", + } + local allstate = states[name].all + if not next(tags) then + if allstate ~= "hide" then + blocks.print(name,data) + elseif plus then + blocks.print(name,data,true) + end + else + local sn = states[name] + for tag, _ in next, tags do + if sn[tag] == nil then + if allstate ~= "hide" then + blocks.print(name,data) + break + end + elseif sn[tag] ~= "hide" then + blocks.print(name,data) + break + end + end + end + buffers.erase(buffer) +end + +-- interface + + +commands.definestructureblock = blocks.define +commands.savestructureblock = blocks.save +commands.selectstructureblock = blocks.select +commands.setstructureblockstate = blocks.setstate diff --git a/tex/context/base/strc-con.lua b/tex/context/base/strc-con.lua index 29a1c0cb3..42da72f64 100644 --- a/tex/context/base/strc-con.lua +++ b/tex/context/base/strc-con.lua @@ -1,9 +1,9 @@ -if not modules then modules = { } end modules ['strc-con'] = { - version = 1.001, - comment = "companion to strc-con.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- empty +if not modules then modules = { } end modules ['strc-con'] = { + version = 1.001, + comment = "companion to strc-con.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- empty diff --git a/tex/context/base/strc-doc.lua b/tex/context/base/strc-doc.lua index 50a9e67a0..37a16c414 100644 --- a/tex/context/base/strc-doc.lua +++ b/tex/context/base/strc-doc.lua @@ -1,956 +1,956 @@ -if not modules then modules = { } end modules ['strc-doc'] = { - version = 1.001, - comment = "companion to strc-doc.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: associate counter with head --- we need to better split the lua/tex end --- we need to freeze and document this module - --- keep this as is: --- --- in section titles by default a zero aborts, so there we need: sectionset=bagger with \definestructureprefixset [bagger] [section-2,section-4] [] --- in lists however zero's are ignored, so there numbersegments=2:4 gives result - -local next, type, tonumber, select = next, type, tonumber, select -local format, gsub, find, gmatch, match = string.format, string.gsub, string.find, string.gmatch, string.match -local concat, fastcopy = table.concat, table.fastcopy -local max, min = math.max, math.min -local allocate, mark, accesstable = utilities.storage.allocate, utilities.storage.mark, utilities.tables.accesstable -local setmetatableindex = table.setmetatableindex - -local catcodenumbers = catcodes.numbers -local ctxcatcodes = catcodenumbers.ctxcatcodes -local variables = interfaces.variables - -local v_last = variables.last -local v_first = variables.first -local v_previous = variables.previous -local v_next = variables.next -local v_auto = variables.auto -local v_strict = variables.strict -local v_all = variables.all -local v_positive = variables.positive -local v_by = variables.by - -local trace_sectioning = false trackers.register("structures.sectioning", function(v) trace_sectioning = v end) -local trace_detail = false trackers.register("structures.detail", function(v) trace_detail = v end) - -local report_structure = logs.reporter("structure","sectioning") - -local structures = structures -local context = context - -local helpers = structures.helpers -local documents = structures.documents -local sections = structures.sections -local lists = structures.lists -local counters = structures.counters -local sets = structures.sets -local tags = structures.tags - -local processors = typesetters.processors -local applyprocessor = processors.apply -local startapplyprocessor = processors.startapply -local stopapplyprocessor = processors.stopapply -local strippedprocessor = processors.stripped - -local a_internal = attributes.private('internal') - --- -- -- document -- -- -- - -local data -- the current state - -function documents.initialize() - data = allocate { -- whole data is marked - numbers = { }, - forced = { }, - ownnumbers = { }, - status = { }, - checkers = { }, - depth = 0, - blocks = { }, - block = "", - } - documents.data = data -end - -function documents.reset() - data.numbers = { } - data.forced = { } - data.ownnumbers = { } - data.status = { } - -- data.checkers = { } - data.depth = 0 -end - -documents.initialize() - --- -- -- components -- -- -- - -function documents.preset(numbers) - local nofnumbers = #numbers - local ownnumbers = { } - data.numbers = numbers - data.ownnumbers = ownnumbers - data.depth = nofnumbers - for i=1,nofnumbers do - ownnumbers[i] = "" - end - sections.setnumber(nofnumbers,"-1") -end - --- -- -- sections -- -- -- - -local collected = allocate() -local tobesaved = allocate() - -sections.collected = collected -sections.tobesaved = tobesaved - --- local function initializer() --- collected = sections.collected --- tobesaved = sections.tobesaved --- end --- --- job.register('structures.sections.collected', tobesaved, initializer) - -sections.registered = sections.registered or allocate() -local registered = sections.registered - -storage.register("structures/sections/registered", registered, "structures.sections.registered") - -function sections.register(name,specification) - registered[name] = specification -end - -function sections.currentid() - return #tobesaved -end - -function sections.save(sectiondata) --- local sectionnumber = helpers.simplify(section.sectiondata) -- maybe done earlier - local numberdata = sectiondata.numberdata - local ntobesaved = #tobesaved - if not numberdata or sectiondata.metadata.nolist then - return ntobesaved - else - ntobesaved = ntobesaved + 1 - tobesaved[ntobesaved] = numberdata - if not collected[ntobesaved] then - collected[ntobesaved] = numberdata - end - return ntobesaved - end -end - -function sections.load() - setmetatableindex(collected,nil) - local lists = lists.collected - for i=1,#lists do - local list = lists[i] - local metadata = list.metadata - if metadata and metadata.kind == "section" and not metadata.nolist then - local numberdata = list.numberdata - if numberdata then - collected[#collected+1] = numberdata - end - end - end - sections.load = functions.dummy -end - -table.setmetatableindex(collected, function(t,i) - sections.load() - return collected[i] or { } -end) - --- - -sections.levelmap = sections.levelmap or { } - -local levelmap = sections.levelmap - -storage.register("structures/sections/levelmap", sections.levelmap, "structures.sections.levelmap") - -sections.verbose = true - -levelmap.block = -1 - -function sections.setlevel(name,level) -- level can be number or parent (=string) - local l = tonumber(level) - if not l then - l = levelmap[level] - end - if l and l > 0 then - levelmap[name] = l - else - -- error - end -end - -function sections.getlevel(name) - return levelmap[name] or 0 -end - -function sections.setblock(name) - local block = name or data.block or "unknown" -- can be used to set the default - data.block = block - return block -end - -function sections.pushblock(name) - counters.check(0) -- we assume sane usage of \page between blocks - local block = name or data.block - data.blocks[#data.blocks+1] = block - data.block = block - documents.reset() - return block -end - -function sections.popblock() - data.blocks[#data.blocks] = nil - local block = data.blocks[#data.blocks] or data.block - data.block = block - documents.reset() - return block -end - -function sections.currentblock() - return data.block or data.blocks[#data.blocks] or "unknown" -end - -function sections.currentlevel() - return data.depth -end - -function sections.getcurrentlevel() - context(data.depth) -end - -local saveset = { } -- experiment, see sections/tricky-001.tex - -function sections.somelevel(given) - -- old number - local numbers = data.numbers - - local ownnumbers = data.ownnumbers - local forced = data.forced - local status = data.status - local olddepth = data.depth - local givenname = given.metadata.name - local mappedlevel = levelmap[givenname] - local newdepth = tonumber(mappedlevel or (olddepth > 0 and olddepth) or 1) -- hm, levelmap only works for section-* - local directives = given.directives - local resetset = directives and directives.resetset or "" - -- local resetter = sets.getall("structure:resets",data.block,resetset) - -- a trick to permit userdata to overload title, ownnumber and reference - -- normally these are passed as argument but nowadays we provide several - -- interfaces (we need this because we want to be compatible) - if trace_detail then - report_structure("name %a, mapped level %a, old depth %a, new depth %a, reset set %a", - givenname,mappedlevel,olddepth,newdepth,resetset) - end - local u = given.userdata - if u then - -- kind of obsolete as we can pass them directly anyway - if u.reference and u.reference ~= "" then given.metadata.reference = u.reference ; u.reference = nil end - if u.ownnumber and u.ownnumber ~= "" then given.numberdata.ownnumber = u.ownnumber ; u.ownnumber = nil end - if u.title and u.title ~= "" then given.titledata.title = u.title ; u.title = nil end - if u.bookmark and u.bookmark ~= "" then given.titledata.bookmark = u.bookmark ; u.bookmark = nil end - if u.label and u.label ~= "" then given.titledata.label = u.label ; u.label = nil end - end - -- so far for the trick - if saveset then - saveset[newdepth] = (resetset ~= "" and resetset) or saveset[newdepth] or "" - end - if newdepth > olddepth then - for i=olddepth+1,newdepth do - local s = tonumber(sets.get("structure:resets",data.block,saveset and saveset[i] or resetset,i)) - if trace_detail then - report_structure("new depth %s, old depth %s, reset set %a, reset value %a, current %a",olddepth,newdepth,resetset,s,numbers[i]) - end - if not s or s == 0 then - numbers[i] = numbers[i] or 0 - ownnumbers[i] = ownnumbers[i] or "" - else - numbers[i] = s - 1 - ownnumbers[i] = "" - end - status[i] = { } - end - elseif newdepth < olddepth then - for i=olddepth,newdepth+1,-1 do - local s = tonumber(sets.get("structure:resets",data.block,saveset and saveset[i] or resetset,i)) - if trace_detail then - report_structure("new depth %s, old depth %s, reset set %a, reset value %a, current %a",olddepth,newdepth,resetset,s,numbers[i]) - end - if not s or s == 0 then - numbers[i] = numbers[i] or 0 - ownnumbers[i] = ownnumbers[i] or "" - else - numbers[i] = s - 1 - ownnumbers[i] = "" - end - status[i] = nil - end - end - counters.check(newdepth) - ownnumbers[newdepth] = given.numberdata.ownnumber or "" - given.numberdata.ownnumber = nil - data.depth = newdepth - -- new number - olddepth = newdepth - if given.metadata.increment then - local oldn, newn = numbers[newdepth] or 0, 0 - local fd = forced[newdepth] - if fd then - if fd[1] == "add" then - newn = oldn + fd[2] + 1 - else - newn = fd[2] + 1 - end - if newn < 0 then - newn = 1 -- maybe zero is nicer - end - forced[newdepth] = nil - if trace_detail then - report_structure("old depth %a, new depth %a, old n %a, new n %a, forced %t",olddepth,newdepth,oldn,newn,fd) - end - else - newn = oldn + 1 - if trace_detail then - report_structure("old depth %a, new depth %a, old n %a, new n %a, increment",olddepth,newdepth,oldn,newn) - end - end - numbers[newdepth] = newn - end - status[newdepth] = given or { } - for k, v in next, data.checkers do - if v[1] == newdepth and v[2] then - v[2](k) - end - end - local numberdata= given.numberdata - if not numberdata then - -- probably simplified to nothing - numberdata = { } - given.numberdata = numberdata - end - - local n = { } - for i=1,newdepth do - n[i] = numbers[i] - end - numberdata.numbers = n --- numberdata.numbers = fastcopy(numbers) - - if #ownnumbers > 0 then - numberdata.ownnumbers = fastcopy(ownnumbers) - end - if trace_detail then - report_structure("name %a, numbers % a, own numbers % a",givenname,numberdata.numbers,numberdata.ownnumbers) - end - - local metadata = given.metadata - local references = given.references - - local tag = references.tag or tags.getid(metadata.kind,metadata.name) - if tag and tag ~= "" and tag ~= "?" then - references.tag = tag - end - - local setcomponent = structures.references.setcomponent - if setcomponent then - setcomponent(given) -- might move to the tex end - end - - references.section = sections.save(given) - -- given.numberdata = nil -end - -function sections.reportstructure() - if sections.verbose then - local numbers, ownnumbers, status, depth = data.numbers, data.ownnumbers, data.status, data.depth - local d = status[depth] - local o = concat(ownnumbers,".",1,depth) - local n = (numbers and concat(numbers,".",1,min(depth,#numbers))) or 0 - local l = d.titledata.title or "" - local t = (l ~= "" and l) or d.titledata.title or "[no title]" - local m = d.metadata.name - if o and not find(o,"^%.*$") then - report_structure("%s @ level %i : (%s) %s -> %s",m,depth,n,o,t) - elseif d.directives and d.directives.hidenumber then - report_structure("%s @ level %i : (%s) -> %s",m,depth,n,t) - else - report_structure("%s @ level %i : %s -> %s",m,depth,n,t) - end - end -end - -function sections.setnumber(depth,n) - local forced, depth, new = data.forced, depth or data.depth, tonumber(n) - if type(n) == "string" then - if find(n,"^[%+%-]") then - forced[depth] = { "add", new } - else - forced[depth] = { "set", new } - end - else - forced[depth] = { "set", new } - end -end - -function sections.numberatdepth(depth) - return data.numbers[tonumber(depth) or sections.getlevel(depth) or 0] or 0 -end - -function sections.numbers() - return data.numbers -end - -function sections.matchingtilldepth(depth,numbers,parentnumbers) - local dn = parentnumbers or data.numbers - local ok = false - for i=1,depth do - if dn[i] == numbers[i] then - ok = true - else - return false - end - end - return ok -end - -function sections.getnumber(depth) -- redefined later ... - context(data.numbers[depth] or 0) -end - -function sections.set(key,value) - data.status[data.depth][key] = value -- may be nil for a reset -end - -function sections.cct() - local metadata = data.status[data.depth].metadata - context(metadata and metadata.catcodes or ctxcatcodes) -end - --- this one will become: return catcode, d (etc) - -function sections.structuredata(depth,key,default,honorcatcodetable) -- todo: spec table and then also depth - if depth then - depth = levelmap[depth] or tonumber(depth) - end - if not depth or depth == 0 then - depth = data.depth - end - local data = data.status[depth] - local d - if data then - if find(key,"%.") then - d = accesstable(key,data) - else - d = data.titledata - d = d and d[key] - end - end - if d and type(d) ~= "table" then - if honorcatcodetable == true or honorcatcodetable == v_auto then - local metadata = data.metadata - local catcodes = metadata and metadata.catcodes - if catcodes then - context.sprint(catcodes,d) - else - context(d) - end - elseif not honorcatcodetable or honorcatcodetable == "" then - context(d) - else - local catcodes = catcodenumbers[honorcatcodetable] - if catcodes then - context.sprint(catcodes,d) - else - context(d) - end - end - elseif default then - context(default) - end -end - -function sections.userdata(depth,key,default) - if depth then - depth = levelmap[depth] or tonumber(depth) - end - if not depth or depth == 0 then - depth = data.depth - end - if depth > 0 then - local userdata = data.status[depth] - userdata = userdata and userdata.userdata - userdata = (userdata and userdata[key]) or default - if userdata then - context(userdata) - end - end -end - -function sections.setchecker(name,level,command) -- hm, checkers are not saved - data.checkers[name] = (name and command and level >= 0 and { level, command }) or nil -end - -function sections.current() - return data.status[data.depth] -end - -function sections.depthnumber(n) - local depth = data.depth - if not n or n == 0 then - n = depth - elseif n < 0 then - n = depth + n - end - return context(data.numbers[n] or 0) -end - -function sections.autodepth(numbers) - for i=#numbers,1,-1 do - if numbers[i] ~= 0 then - return i - end - end - return 0 -end - --- - -function structures.currentsectionnumber() -- brr, namespace wrong - local sc = sections.current() - return sc and sc.numberdata -end - --- \dorecurse{3} { --- \chapter{Blabla} \subsection{bla 1 1} \subsection{bla 1 2} --- \section{bla 2} \subsection{bla 2 1} \subsection{bla 2 2} --- } - --- sign=all => also zero and negative --- sign=positive => also zero --- sign=hang => llap sign - ---~ todo: test this ---~ - -local function process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done) - -- todo: too much (100 steps) - local number = numbers and (numbers[index] or 0) - local ownnumber = ownnumbers and ownnumbers[index] or "" - if number > criterium or (ownnumber ~= "") then - local block = (entry.block ~= "" and entry.block) or sections.currentblock() -- added - if preceding then - local separator = sets.get("structure:separators",block,separatorset,preceding,".") - if separator then - if result then - result[#result+1] = strippedprocessor(separator) - else - applyprocessor(separator) - end - end - preceding = false - end - if result then - if ownnumber ~= "" then - result[#result+1] = ownnumber - elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups) .. inherited! - result[#result+1] = converters.convert(conversion,number) - else - local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers") - result[#result+1] = converters.convert(theconversion,number) - end - else - if ownnumber ~= "" then - applyprocessor(ownnumber) - elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups) - context.convertnumber(conversion,number) - else - local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers") - local data = startapplyprocessor(theconversion) - context.convertnumber(data or "numbers",number) - stopapplyprocessor() - end - end - return index, true - else - return preceding or false, done - end -end - -function sections.typesetnumber(entry,kind,...) -- kind='section','number','prefix' - if entry and entry.hidenumber ~= true then -- can be nil - local separatorset = "" - local conversionset = "" - local conversion = "" - local groupsuffix = "" - local stopper = "" - local starter = "" - local connector = "" - local set = "" - local segments = "" - local criterium = "" - for d=1,select("#",...) do - local data = select(d,...) -- can be multiple parametersets - if data then - if separatorset == "" then separatorset = data.separatorset or "" end - if conversionset == "" then conversionset = data.conversionset or "" end - if conversion == "" then conversion = data.conversion or "" end - if groupsuffix == "" then groupsuffix = data.groupsuffix or "" end - if stopper == "" then stopper = data.stopper or "" end - if starter == "" then starter = data.starter or "" end - if connector == "" then connector = data.connector or "" end - if set == "" then set = data.set or "" end - if segments == "" then segments = data.segments or "" end - if criterium == "" then criterium = data.criterium or "" end - end - end - if separatorset == "" then separatorset = "default" end - if conversionset == "" then conversionset = "default" end -- not used - if conversion == "" then conversion = nil end - if groupsuffix == "" then groupsuffix = nil end - if stopper == "" then stopper = nil end - if starter == "" then starter = nil end - if connector == "" then connector = nil end - if set == "" then set = "default" end - if segments == "" then segments = nil end - -- - if criterium == v_strict then - criterium = 0 - elseif criterium == v_positive then - criterium = -1 - elseif criterium == v_all then - criterium = -1000000 - else - criterium = 0 - end - -- - local firstprefix, lastprefix = 0, 16 - if segments then - local f, l = match(tostring(segments),"^(.-):(.+)$") - if l == "*" then - l = 100 -- new - end - if f and l then - -- 0:100, chapter:subsubsection - firstprefix = tonumber(f) or sections.getlevel(f) or 0 - lastprefix = tonumber(l) or sections.getlevel(l) or 100 - else - -- 3, section - local fl = tonumber(segments) or sections.getlevel(segments) -- generalize - if fl then - firstprefix = fl - lastprefix = fl - end - end - end - -- - local numbers, ownnumbers = entry.numbers, entry.ownnumbers - if numbers then - local done, preceding = false, false - -- - local result = kind == "direct" and { } - if result then - connector = false - end - -- - local prefixlist = set and sets.getall("structure:prefixes","",set) -- "" == block - if starter then - if result then - result[#result+1] = strippedprocessor(starter) - else - applyprocessor(starter) - end - end - if prefixlist and (kind == 'section' or kind == 'prefix' or kind == 'direct') then - -- find valid set (problem: for sectionnumber we should pass the level) - -- no holes - local b, e, bb, ee = 1, #prefixlist, 0, 0 - -- find last valid number - for k=e,b,-1 do - local prefix = prefixlist[k] - local index = sections.getlevel(prefix) or k - if index >= firstprefix and index <= lastprefix then - local number = numbers and numbers[index] - if number then - local ownnumber = ownnumbers and ownnumbers[index] or "" - if number > 0 or (ownnumber ~= "") then - break - else - e = k -1 - end - end - end - end - -- find valid range - for k=b,e do - local prefix = prefixlist[k] - local index = sections.getlevel(prefix) or k - if index >= firstprefix and index <= lastprefix then - local number = numbers and numbers[index] - if number then - local ownnumber = ownnumbers and ownnumbers[index] or "" - if number > 0 or (ownnumber ~= "") then - if bb == 0 then bb = k end - ee = k - else - bb, ee = 0, 0 - end - else - break - end - end - end - -- print valid range - for k=bb,ee do - local prefix = prefixlist[k] - local index = sections.getlevel(prefix) or k - if index >= firstprefix and index <= lastprefix then - -- process(index,result) - preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done) - end - end - else - -- also holes check - for index=firstprefix,lastprefix do - -- process(index,result) - preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done) - end - end - -- - if done then - if connector and kind == 'prefix' then - if result then - -- can't happen as we're in 'direct' - else - applyprocessor(connector) - end - else -if groupsuffix and kind ~= "prefix" then - if result then - result[#result+1] = strippedprocessor(groupsuffix) - else - applyprocessor(groupsuffix) - end -end - if stopper then - if result then - result[#result+1] = strippedprocessor(stopper) - else - applyprocessor(stopper) - end - end - end - end - return result -- a table ! - else - -- report_structure("error: no numbers") - end - end -end - -function sections.title() - local sc = sections.current() - if sc then - helpers.title(sc.titledata.title,sc.metadata) - end -end - -function sections.findnumber(depth,what) -- needs checking (looks wrong and slow too) - local data = data.status[depth or data.depth] - if data then - local index = data.references.section - local collected = sections.collected - local sectiondata = collected[index] - if sectiondata and sectiondata.hidenumber ~= true then -- can be nil - local quit = what == v_previous or what == v_next - if what == v_first or what == v_previous then - for i=index,1,-1 do - local s = collected[i] - if s then - local n = s.numbers - if #n == depth and n[depth] and n[depth] ~= 0 then - sectiondata = s - if quit then - break - end - elseif #n < depth then - break - end - end - end - elseif what == v_last or what == v_next then - for i=index,#collected do - local s = collected[i] - if s then - local n = s.numbers - if #n == depth and n[depth] and n[depth] ~= 0 then - sectiondata = s - if quit then - break - end - elseif #n < depth then - break - end - end - end - end - return sectiondata - end - end -end - -function sections.finddata(depth,what) - local data = data.status[depth or data.depth] - if data then - -- if sectiondata and sectiondata.hidenumber ~= true then -- can be nil - local index = data.references.listindex - if index then - local collected = structures.lists.collected - local quit = what == v_previous or what == v_next - if what == v_first or what == v_previous then - for i=index-1,1,-1 do - local s = collected[i] - if not s then - break - elseif s.metadata.kind == "section" then -- maybe check on name - local n = s.numberdata.numbers - if #n == depth and n[depth] and n[depth] ~= 0 then - data = s - if quit then - break - end - elseif #n < depth then - break - end - end - end - elseif what == v_last or what == v_next then - for i=index+1,#collected do - local s = collected[i] - if not s then - break - elseif s.metadata.kind == "section" then -- maybe check on name - local n = s.numberdata.numbers - if #n == depth and n[depth] and n[depth] ~= 0 then - data = s - if quit then - break - end - elseif #n < depth then - break - end - end - end - end - end - return data - end -end - -function sections.internalreference(sectionname,what) -- to be used in pagebuilder (no marks used) - local r = type(sectionname) == "number" and sectionname or registered[sectionname] - if r then - local data = sections.finddata(r.level,what) - return data and data.references and data.references.internal - end -end - -function sections.fullnumber(depth,what) - local sectiondata = sections.findnumber(depth,what) - if sectiondata then - sections.typesetnumber(sectiondata,'section',sectiondata) - end -end - -function sections.getnumber(depth,what) -- redefined here - local sectiondata = sections.findnumber(depth,what) - context((sectiondata and sectiondata.numbers[depth]) or 0) -end - --- experimental - -local levels = { } - ---~ function commands.autonextstructurelevel(level) ---~ if level > #levels then ---~ for i=#levels+1,level do ---~ levels[i] = "" ---~ end ---~ end ---~ local finish = concat(levels,"\n",level) or "" ---~ for i=level+1,#levels do ---~ levels[i] = "" ---~ end ---~ levels[level] = [[\finalizeautostructurelevel]] ---~ context(finish) ---~ end - ---~ function commands.autofinishstructurelevels() ---~ local finish = concat(levels,"\n") or "" ---~ levels = { } ---~ context(finish) ---~ end - -function commands.autonextstructurelevel(level) - if level > #levels then - for i=#levels+1,level do - levels[i] = false - end - else - for i=level,#levels do - if levels[i] then - context.finalizeautostructurelevel() - levels[i] = false - end - end - end - levels[level] = true -end - -function commands.autofinishstructurelevels() - for i=1,#levels do - if levels[i] then - context.finalizeautostructurelevel() - end - end - levels = { } -end - --- interface (some are actually already commands, like sections.fullnumber) - -commands.structurenumber = function() sections.fullnumber() end -commands.structuretitle = function() sections.title () end - -commands.structurevariable = function(name) sections.structuredata(nil,name) end -commands.structureuservariable = function(name) sections.userdata (nil,name) end -commands.structurecatcodedget = function(name) sections.structuredata(nil,name,nil,true) end -commands.structuregivencatcodedget = function(name,catcode) sections.structuredata(nil,name,nil,catcode) end -commands.structureautocatcodedget = function(name,catcode) sections.structuredata(nil,name,nil,catcode) end - -commands.namedstructurevariable = function(depth,name) sections.structuredata(depth,name) end -commands.namedstructureuservariable = function(depth,name) sections.userdata (depth,name) end - --- - -function commands.setsectionblock (name) context(sections.setblock(name)) end -function commands.pushsectionblock(name) context(sections.pushblock(name)) end -function commands.popsectionblock () context(sections.popblock()) end - --- - -local byway = "^" .. v_by -- ugly but downward compatible - -function commands.way(way) - context((gsub(way,byway,""))) -end +if not modules then modules = { } end modules ['strc-doc'] = { + version = 1.001, + comment = "companion to strc-doc.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: associate counter with head +-- we need to better split the lua/tex end +-- we need to freeze and document this module + +-- keep this as is: +-- +-- in section titles by default a zero aborts, so there we need: sectionset=bagger with \definestructureprefixset [bagger] [section-2,section-4] [] +-- in lists however zero's are ignored, so there numbersegments=2:4 gives result + +local next, type, tonumber, select = next, type, tonumber, select +local format, gsub, find, gmatch, match = string.format, string.gsub, string.find, string.gmatch, string.match +local concat, fastcopy = table.concat, table.fastcopy +local max, min = math.max, math.min +local allocate, mark, accesstable = utilities.storage.allocate, utilities.storage.mark, utilities.tables.accesstable +local setmetatableindex = table.setmetatableindex + +local catcodenumbers = catcodes.numbers +local ctxcatcodes = catcodenumbers.ctxcatcodes +local variables = interfaces.variables + +local v_last = variables.last +local v_first = variables.first +local v_previous = variables.previous +local v_next = variables.next +local v_auto = variables.auto +local v_strict = variables.strict +local v_all = variables.all +local v_positive = variables.positive +local v_by = variables.by + +local trace_sectioning = false trackers.register("structures.sectioning", function(v) trace_sectioning = v end) +local trace_detail = false trackers.register("structures.detail", function(v) trace_detail = v end) + +local report_structure = logs.reporter("structure","sectioning") + +local structures = structures +local context = context + +local helpers = structures.helpers +local documents = structures.documents +local sections = structures.sections +local lists = structures.lists +local counters = structures.counters +local sets = structures.sets +local tags = structures.tags + +local processors = typesetters.processors +local applyprocessor = processors.apply +local startapplyprocessor = processors.startapply +local stopapplyprocessor = processors.stopapply +local strippedprocessor = processors.stripped + +local a_internal = attributes.private('internal') + +-- -- -- document -- -- -- + +local data -- the current state + +function documents.initialize() + data = allocate { -- whole data is marked + numbers = { }, + forced = { }, + ownnumbers = { }, + status = { }, + checkers = { }, + depth = 0, + blocks = { }, + block = "", + } + documents.data = data +end + +function documents.reset() + data.numbers = { } + data.forced = { } + data.ownnumbers = { } + data.status = { } + -- data.checkers = { } + data.depth = 0 +end + +documents.initialize() + +-- -- -- components -- -- -- + +function documents.preset(numbers) + local nofnumbers = #numbers + local ownnumbers = { } + data.numbers = numbers + data.ownnumbers = ownnumbers + data.depth = nofnumbers + for i=1,nofnumbers do + ownnumbers[i] = "" + end + sections.setnumber(nofnumbers,"-1") +end + +-- -- -- sections -- -- -- + +local collected = allocate() +local tobesaved = allocate() + +sections.collected = collected +sections.tobesaved = tobesaved + +-- local function initializer() +-- collected = sections.collected +-- tobesaved = sections.tobesaved +-- end +-- +-- job.register('structures.sections.collected', tobesaved, initializer) + +sections.registered = sections.registered or allocate() +local registered = sections.registered + +storage.register("structures/sections/registered", registered, "structures.sections.registered") + +function sections.register(name,specification) + registered[name] = specification +end + +function sections.currentid() + return #tobesaved +end + +function sections.save(sectiondata) +-- local sectionnumber = helpers.simplify(section.sectiondata) -- maybe done earlier + local numberdata = sectiondata.numberdata + local ntobesaved = #tobesaved + if not numberdata or sectiondata.metadata.nolist then + return ntobesaved + else + ntobesaved = ntobesaved + 1 + tobesaved[ntobesaved] = numberdata + if not collected[ntobesaved] then + collected[ntobesaved] = numberdata + end + return ntobesaved + end +end + +function sections.load() + setmetatableindex(collected,nil) + local lists = lists.collected + for i=1,#lists do + local list = lists[i] + local metadata = list.metadata + if metadata and metadata.kind == "section" and not metadata.nolist then + local numberdata = list.numberdata + if numberdata then + collected[#collected+1] = numberdata + end + end + end + sections.load = functions.dummy +end + +table.setmetatableindex(collected, function(t,i) + sections.load() + return collected[i] or { } +end) + +-- + +sections.levelmap = sections.levelmap or { } + +local levelmap = sections.levelmap + +storage.register("structures/sections/levelmap", sections.levelmap, "structures.sections.levelmap") + +sections.verbose = true + +levelmap.block = -1 + +function sections.setlevel(name,level) -- level can be number or parent (=string) + local l = tonumber(level) + if not l then + l = levelmap[level] + end + if l and l > 0 then + levelmap[name] = l + else + -- error + end +end + +function sections.getlevel(name) + return levelmap[name] or 0 +end + +function sections.setblock(name) + local block = name or data.block or "unknown" -- can be used to set the default + data.block = block + return block +end + +function sections.pushblock(name) + counters.check(0) -- we assume sane usage of \page between blocks + local block = name or data.block + data.blocks[#data.blocks+1] = block + data.block = block + documents.reset() + return block +end + +function sections.popblock() + data.blocks[#data.blocks] = nil + local block = data.blocks[#data.blocks] or data.block + data.block = block + documents.reset() + return block +end + +function sections.currentblock() + return data.block or data.blocks[#data.blocks] or "unknown" +end + +function sections.currentlevel() + return data.depth +end + +function sections.getcurrentlevel() + context(data.depth) +end + +local saveset = { } -- experiment, see sections/tricky-001.tex + +function sections.somelevel(given) + -- old number + local numbers = data.numbers + + local ownnumbers = data.ownnumbers + local forced = data.forced + local status = data.status + local olddepth = data.depth + local givenname = given.metadata.name + local mappedlevel = levelmap[givenname] + local newdepth = tonumber(mappedlevel or (olddepth > 0 and olddepth) or 1) -- hm, levelmap only works for section-* + local directives = given.directives + local resetset = directives and directives.resetset or "" + -- local resetter = sets.getall("structure:resets",data.block,resetset) + -- a trick to permit userdata to overload title, ownnumber and reference + -- normally these are passed as argument but nowadays we provide several + -- interfaces (we need this because we want to be compatible) + if trace_detail then + report_structure("name %a, mapped level %a, old depth %a, new depth %a, reset set %a", + givenname,mappedlevel,olddepth,newdepth,resetset) + end + local u = given.userdata + if u then + -- kind of obsolete as we can pass them directly anyway + if u.reference and u.reference ~= "" then given.metadata.reference = u.reference ; u.reference = nil end + if u.ownnumber and u.ownnumber ~= "" then given.numberdata.ownnumber = u.ownnumber ; u.ownnumber = nil end + if u.title and u.title ~= "" then given.titledata.title = u.title ; u.title = nil end + if u.bookmark and u.bookmark ~= "" then given.titledata.bookmark = u.bookmark ; u.bookmark = nil end + if u.label and u.label ~= "" then given.titledata.label = u.label ; u.label = nil end + end + -- so far for the trick + if saveset then + saveset[newdepth] = (resetset ~= "" and resetset) or saveset[newdepth] or "" + end + if newdepth > olddepth then + for i=olddepth+1,newdepth do + local s = tonumber(sets.get("structure:resets",data.block,saveset and saveset[i] or resetset,i)) + if trace_detail then + report_structure("new depth %s, old depth %s, reset set %a, reset value %a, current %a",olddepth,newdepth,resetset,s,numbers[i]) + end + if not s or s == 0 then + numbers[i] = numbers[i] or 0 + ownnumbers[i] = ownnumbers[i] or "" + else + numbers[i] = s - 1 + ownnumbers[i] = "" + end + status[i] = { } + end + elseif newdepth < olddepth then + for i=olddepth,newdepth+1,-1 do + local s = tonumber(sets.get("structure:resets",data.block,saveset and saveset[i] or resetset,i)) + if trace_detail then + report_structure("new depth %s, old depth %s, reset set %a, reset value %a, current %a",olddepth,newdepth,resetset,s,numbers[i]) + end + if not s or s == 0 then + numbers[i] = numbers[i] or 0 + ownnumbers[i] = ownnumbers[i] or "" + else + numbers[i] = s - 1 + ownnumbers[i] = "" + end + status[i] = nil + end + end + counters.check(newdepth) + ownnumbers[newdepth] = given.numberdata.ownnumber or "" + given.numberdata.ownnumber = nil + data.depth = newdepth + -- new number + olddepth = newdepth + if given.metadata.increment then + local oldn, newn = numbers[newdepth] or 0, 0 + local fd = forced[newdepth] + if fd then + if fd[1] == "add" then + newn = oldn + fd[2] + 1 + else + newn = fd[2] + 1 + end + if newn < 0 then + newn = 1 -- maybe zero is nicer + end + forced[newdepth] = nil + if trace_detail then + report_structure("old depth %a, new depth %a, old n %a, new n %a, forced %t",olddepth,newdepth,oldn,newn,fd) + end + else + newn = oldn + 1 + if trace_detail then + report_structure("old depth %a, new depth %a, old n %a, new n %a, increment",olddepth,newdepth,oldn,newn) + end + end + numbers[newdepth] = newn + end + status[newdepth] = given or { } + for k, v in next, data.checkers do + if v[1] == newdepth and v[2] then + v[2](k) + end + end + local numberdata= given.numberdata + if not numberdata then + -- probably simplified to nothing + numberdata = { } + given.numberdata = numberdata + end + + local n = { } + for i=1,newdepth do + n[i] = numbers[i] + end + numberdata.numbers = n +-- numberdata.numbers = fastcopy(numbers) + + if #ownnumbers > 0 then + numberdata.ownnumbers = fastcopy(ownnumbers) + end + if trace_detail then + report_structure("name %a, numbers % a, own numbers % a",givenname,numberdata.numbers,numberdata.ownnumbers) + end + + local metadata = given.metadata + local references = given.references + + local tag = references.tag or tags.getid(metadata.kind,metadata.name) + if tag and tag ~= "" and tag ~= "?" then + references.tag = tag + end + + local setcomponent = structures.references.setcomponent + if setcomponent then + setcomponent(given) -- might move to the tex end + end + + references.section = sections.save(given) + -- given.numberdata = nil +end + +function sections.reportstructure() + if sections.verbose then + local numbers, ownnumbers, status, depth = data.numbers, data.ownnumbers, data.status, data.depth + local d = status[depth] + local o = concat(ownnumbers,".",1,depth) + local n = (numbers and concat(numbers,".",1,min(depth,#numbers))) or 0 + local l = d.titledata.title or "" + local t = (l ~= "" and l) or d.titledata.title or "[no title]" + local m = d.metadata.name + if o and not find(o,"^%.*$") then + report_structure("%s @ level %i : (%s) %s -> %s",m,depth,n,o,t) + elseif d.directives and d.directives.hidenumber then + report_structure("%s @ level %i : (%s) -> %s",m,depth,n,t) + else + report_structure("%s @ level %i : %s -> %s",m,depth,n,t) + end + end +end + +function sections.setnumber(depth,n) + local forced, depth, new = data.forced, depth or data.depth, tonumber(n) + if type(n) == "string" then + if find(n,"^[%+%-]") then + forced[depth] = { "add", new } + else + forced[depth] = { "set", new } + end + else + forced[depth] = { "set", new } + end +end + +function sections.numberatdepth(depth) + return data.numbers[tonumber(depth) or sections.getlevel(depth) or 0] or 0 +end + +function sections.numbers() + return data.numbers +end + +function sections.matchingtilldepth(depth,numbers,parentnumbers) + local dn = parentnumbers or data.numbers + local ok = false + for i=1,depth do + if dn[i] == numbers[i] then + ok = true + else + return false + end + end + return ok +end + +function sections.getnumber(depth) -- redefined later ... + context(data.numbers[depth] or 0) +end + +function sections.set(key,value) + data.status[data.depth][key] = value -- may be nil for a reset +end + +function sections.cct() + local metadata = data.status[data.depth].metadata + context(metadata and metadata.catcodes or ctxcatcodes) +end + +-- this one will become: return catcode, d (etc) + +function sections.structuredata(depth,key,default,honorcatcodetable) -- todo: spec table and then also depth + if depth then + depth = levelmap[depth] or tonumber(depth) + end + if not depth or depth == 0 then + depth = data.depth + end + local data = data.status[depth] + local d + if data then + if find(key,"%.") then + d = accesstable(key,data) + else + d = data.titledata + d = d and d[key] + end + end + if d and type(d) ~= "table" then + if honorcatcodetable == true or honorcatcodetable == v_auto then + local metadata = data.metadata + local catcodes = metadata and metadata.catcodes + if catcodes then + context.sprint(catcodes,d) + else + context(d) + end + elseif not honorcatcodetable or honorcatcodetable == "" then + context(d) + else + local catcodes = catcodenumbers[honorcatcodetable] + if catcodes then + context.sprint(catcodes,d) + else + context(d) + end + end + elseif default then + context(default) + end +end + +function sections.userdata(depth,key,default) + if depth then + depth = levelmap[depth] or tonumber(depth) + end + if not depth or depth == 0 then + depth = data.depth + end + if depth > 0 then + local userdata = data.status[depth] + userdata = userdata and userdata.userdata + userdata = (userdata and userdata[key]) or default + if userdata then + context(userdata) + end + end +end + +function sections.setchecker(name,level,command) -- hm, checkers are not saved + data.checkers[name] = (name and command and level >= 0 and { level, command }) or nil +end + +function sections.current() + return data.status[data.depth] +end + +function sections.depthnumber(n) + local depth = data.depth + if not n or n == 0 then + n = depth + elseif n < 0 then + n = depth + n + end + return context(data.numbers[n] or 0) +end + +function sections.autodepth(numbers) + for i=#numbers,1,-1 do + if numbers[i] ~= 0 then + return i + end + end + return 0 +end + +-- + +function structures.currentsectionnumber() -- brr, namespace wrong + local sc = sections.current() + return sc and sc.numberdata +end + +-- \dorecurse{3} { +-- \chapter{Blabla} \subsection{bla 1 1} \subsection{bla 1 2} +-- \section{bla 2} \subsection{bla 2 1} \subsection{bla 2 2} +-- } + +-- sign=all => also zero and negative +-- sign=positive => also zero +-- sign=hang => llap sign + +--~ todo: test this +--~ + +local function process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done) + -- todo: too much (100 steps) + local number = numbers and (numbers[index] or 0) + local ownnumber = ownnumbers and ownnumbers[index] or "" + if number > criterium or (ownnumber ~= "") then + local block = (entry.block ~= "" and entry.block) or sections.currentblock() -- added + if preceding then + local separator = sets.get("structure:separators",block,separatorset,preceding,".") + if separator then + if result then + result[#result+1] = strippedprocessor(separator) + else + applyprocessor(separator) + end + end + preceding = false + end + if result then + if ownnumber ~= "" then + result[#result+1] = ownnumber + elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups) .. inherited! + result[#result+1] = converters.convert(conversion,number) + else + local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers") + result[#result+1] = converters.convert(theconversion,number) + end + else + if ownnumber ~= "" then + applyprocessor(ownnumber) + elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups) + context.convertnumber(conversion,number) + else + local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers") + local data = startapplyprocessor(theconversion) + context.convertnumber(data or "numbers",number) + stopapplyprocessor() + end + end + return index, true + else + return preceding or false, done + end +end + +function sections.typesetnumber(entry,kind,...) -- kind='section','number','prefix' + if entry and entry.hidenumber ~= true then -- can be nil + local separatorset = "" + local conversionset = "" + local conversion = "" + local groupsuffix = "" + local stopper = "" + local starter = "" + local connector = "" + local set = "" + local segments = "" + local criterium = "" + for d=1,select("#",...) do + local data = select(d,...) -- can be multiple parametersets + if data then + if separatorset == "" then separatorset = data.separatorset or "" end + if conversionset == "" then conversionset = data.conversionset or "" end + if conversion == "" then conversion = data.conversion or "" end + if groupsuffix == "" then groupsuffix = data.groupsuffix or "" end + if stopper == "" then stopper = data.stopper or "" end + if starter == "" then starter = data.starter or "" end + if connector == "" then connector = data.connector or "" end + if set == "" then set = data.set or "" end + if segments == "" then segments = data.segments or "" end + if criterium == "" then criterium = data.criterium or "" end + end + end + if separatorset == "" then separatorset = "default" end + if conversionset == "" then conversionset = "default" end -- not used + if conversion == "" then conversion = nil end + if groupsuffix == "" then groupsuffix = nil end + if stopper == "" then stopper = nil end + if starter == "" then starter = nil end + if connector == "" then connector = nil end + if set == "" then set = "default" end + if segments == "" then segments = nil end + -- + if criterium == v_strict then + criterium = 0 + elseif criterium == v_positive then + criterium = -1 + elseif criterium == v_all then + criterium = -1000000 + else + criterium = 0 + end + -- + local firstprefix, lastprefix = 0, 16 + if segments then + local f, l = match(tostring(segments),"^(.-):(.+)$") + if l == "*" then + l = 100 -- new + end + if f and l then + -- 0:100, chapter:subsubsection + firstprefix = tonumber(f) or sections.getlevel(f) or 0 + lastprefix = tonumber(l) or sections.getlevel(l) or 100 + else + -- 3, section + local fl = tonumber(segments) or sections.getlevel(segments) -- generalize + if fl then + firstprefix = fl + lastprefix = fl + end + end + end + -- + local numbers, ownnumbers = entry.numbers, entry.ownnumbers + if numbers then + local done, preceding = false, false + -- + local result = kind == "direct" and { } + if result then + connector = false + end + -- + local prefixlist = set and sets.getall("structure:prefixes","",set) -- "" == block + if starter then + if result then + result[#result+1] = strippedprocessor(starter) + else + applyprocessor(starter) + end + end + if prefixlist and (kind == 'section' or kind == 'prefix' or kind == 'direct') then + -- find valid set (problem: for sectionnumber we should pass the level) + -- no holes + local b, e, bb, ee = 1, #prefixlist, 0, 0 + -- find last valid number + for k=e,b,-1 do + local prefix = prefixlist[k] + local index = sections.getlevel(prefix) or k + if index >= firstprefix and index <= lastprefix then + local number = numbers and numbers[index] + if number then + local ownnumber = ownnumbers and ownnumbers[index] or "" + if number > 0 or (ownnumber ~= "") then + break + else + e = k -1 + end + end + end + end + -- find valid range + for k=b,e do + local prefix = prefixlist[k] + local index = sections.getlevel(prefix) or k + if index >= firstprefix and index <= lastprefix then + local number = numbers and numbers[index] + if number then + local ownnumber = ownnumbers and ownnumbers[index] or "" + if number > 0 or (ownnumber ~= "") then + if bb == 0 then bb = k end + ee = k + else + bb, ee = 0, 0 + end + else + break + end + end + end + -- print valid range + for k=bb,ee do + local prefix = prefixlist[k] + local index = sections.getlevel(prefix) or k + if index >= firstprefix and index <= lastprefix then + -- process(index,result) + preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done) + end + end + else + -- also holes check + for index=firstprefix,lastprefix do + -- process(index,result) + preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done) + end + end + -- + if done then + if connector and kind == 'prefix' then + if result then + -- can't happen as we're in 'direct' + else + applyprocessor(connector) + end + else +if groupsuffix and kind ~= "prefix" then + if result then + result[#result+1] = strippedprocessor(groupsuffix) + else + applyprocessor(groupsuffix) + end +end + if stopper then + if result then + result[#result+1] = strippedprocessor(stopper) + else + applyprocessor(stopper) + end + end + end + end + return result -- a table ! + else + -- report_structure("error: no numbers") + end + end +end + +function sections.title() + local sc = sections.current() + if sc then + helpers.title(sc.titledata.title,sc.metadata) + end +end + +function sections.findnumber(depth,what) -- needs checking (looks wrong and slow too) + local data = data.status[depth or data.depth] + if data then + local index = data.references.section + local collected = sections.collected + local sectiondata = collected[index] + if sectiondata and sectiondata.hidenumber ~= true then -- can be nil + local quit = what == v_previous or what == v_next + if what == v_first or what == v_previous then + for i=index,1,-1 do + local s = collected[i] + if s then + local n = s.numbers + if #n == depth and n[depth] and n[depth] ~= 0 then + sectiondata = s + if quit then + break + end + elseif #n < depth then + break + end + end + end + elseif what == v_last or what == v_next then + for i=index,#collected do + local s = collected[i] + if s then + local n = s.numbers + if #n == depth and n[depth] and n[depth] ~= 0 then + sectiondata = s + if quit then + break + end + elseif #n < depth then + break + end + end + end + end + return sectiondata + end + end +end + +function sections.finddata(depth,what) + local data = data.status[depth or data.depth] + if data then + -- if sectiondata and sectiondata.hidenumber ~= true then -- can be nil + local index = data.references.listindex + if index then + local collected = structures.lists.collected + local quit = what == v_previous or what == v_next + if what == v_first or what == v_previous then + for i=index-1,1,-1 do + local s = collected[i] + if not s then + break + elseif s.metadata.kind == "section" then -- maybe check on name + local n = s.numberdata.numbers + if #n == depth and n[depth] and n[depth] ~= 0 then + data = s + if quit then + break + end + elseif #n < depth then + break + end + end + end + elseif what == v_last or what == v_next then + for i=index+1,#collected do + local s = collected[i] + if not s then + break + elseif s.metadata.kind == "section" then -- maybe check on name + local n = s.numberdata.numbers + if #n == depth and n[depth] and n[depth] ~= 0 then + data = s + if quit then + break + end + elseif #n < depth then + break + end + end + end + end + end + return data + end +end + +function sections.internalreference(sectionname,what) -- to be used in pagebuilder (no marks used) + local r = type(sectionname) == "number" and sectionname or registered[sectionname] + if r then + local data = sections.finddata(r.level,what) + return data and data.references and data.references.internal + end +end + +function sections.fullnumber(depth,what) + local sectiondata = sections.findnumber(depth,what) + if sectiondata then + sections.typesetnumber(sectiondata,'section',sectiondata) + end +end + +function sections.getnumber(depth,what) -- redefined here + local sectiondata = sections.findnumber(depth,what) + context((sectiondata and sectiondata.numbers[depth]) or 0) +end + +-- experimental + +local levels = { } + +--~ function commands.autonextstructurelevel(level) +--~ if level > #levels then +--~ for i=#levels+1,level do +--~ levels[i] = "" +--~ end +--~ end +--~ local finish = concat(levels,"\n",level) or "" +--~ for i=level+1,#levels do +--~ levels[i] = "" +--~ end +--~ levels[level] = [[\finalizeautostructurelevel]] +--~ context(finish) +--~ end + +--~ function commands.autofinishstructurelevels() +--~ local finish = concat(levels,"\n") or "" +--~ levels = { } +--~ context(finish) +--~ end + +function commands.autonextstructurelevel(level) + if level > #levels then + for i=#levels+1,level do + levels[i] = false + end + else + for i=level,#levels do + if levels[i] then + context.finalizeautostructurelevel() + levels[i] = false + end + end + end + levels[level] = true +end + +function commands.autofinishstructurelevels() + for i=1,#levels do + if levels[i] then + context.finalizeautostructurelevel() + end + end + levels = { } +end + +-- interface (some are actually already commands, like sections.fullnumber) + +commands.structurenumber = function() sections.fullnumber() end +commands.structuretitle = function() sections.title () end + +commands.structurevariable = function(name) sections.structuredata(nil,name) end +commands.structureuservariable = function(name) sections.userdata (nil,name) end +commands.structurecatcodedget = function(name) sections.structuredata(nil,name,nil,true) end +commands.structuregivencatcodedget = function(name,catcode) sections.structuredata(nil,name,nil,catcode) end +commands.structureautocatcodedget = function(name,catcode) sections.structuredata(nil,name,nil,catcode) end + +commands.namedstructurevariable = function(depth,name) sections.structuredata(depth,name) end +commands.namedstructureuservariable = function(depth,name) sections.userdata (depth,name) end + +-- + +function commands.setsectionblock (name) context(sections.setblock(name)) end +function commands.pushsectionblock(name) context(sections.pushblock(name)) end +function commands.popsectionblock () context(sections.popblock()) end + +-- + +local byway = "^" .. v_by -- ugly but downward compatible + +function commands.way(way) + context((gsub(way,byway,""))) +end diff --git a/tex/context/base/strc-flt.lua b/tex/context/base/strc-flt.lua index 466fd515e..0fdadc583 100644 --- a/tex/context/base/strc-flt.lua +++ b/tex/context/base/strc-flt.lua @@ -1,9 +1,9 @@ -if not modules then modules = { } end modules ['strc-flt'] = { - version = 1.001, - comment = "companion to strc-flt.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- nothing +if not modules then modules = { } end modules ['strc-flt'] = { + version = 1.001, + comment = "companion to strc-flt.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- nothing diff --git a/tex/context/base/strc-ini.lua b/tex/context/base/strc-ini.lua index fd7c10f79..5c72f3158 100644 --- a/tex/context/base/strc-ini.lua +++ b/tex/context/base/strc-ini.lua @@ -1,338 +1,338 @@ -if not modules then modules = { } end modules ['strc-ini'] = { - version = 1.001, - comment = "companion to strc-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ -The restructuring is the (intermediate) result of quite some experiments. I started -with the basic structure, followed by lists, numbers, enumerations, itemgroups -and floats. All these have something in common, like pagenumbers and section -prefixes. I played with some generic datastructure (in order to save space) but -the code at both the lua and tex end then quickly becomes messy due to the fact -that access to variables is too different. So, eventually I ended up with -dedicated structures combined with sharing data. In lua this is quite efficient -because tables are referenced. However, some precautions are to be taken in -order to keep the utility file small. Utility data and process data share much -but it does not make sense to store all processdata. - -]]-- - -local formatters = string.formatters -local lpegmatch = lpeg.match -local count = tex.count -local type, next, tonumber, select = type, next, tonumber, select -local settings_to_array, settings_to_hash = utilities.parsers.settings_to_array, utilities.parsers.settings_to_hash -local allocate = utilities.storage.allocate - -local catcodenumbers = catcodes.numbers -- better use the context(...) way to switch - -local ctxcatcodes = catcodenumbers.ctxcatcodes -local xmlcatcodes = catcodenumbers.xmlcatcodes -local notcatcodes = catcodenumbers.notcatcodes -local txtcatcodes = catcodenumbers.txtcatcodes - -local context, commands = context, commands - -local pushcatcodes = context.pushcatcodes -local popcatcodes = context.popcatcodes - -local trace_processors = false -local report_processors = logs.reporter("processors","structure") - -trackers.register("typesetters.processors", function(v) trace_processors = v end) - --- -- -- namespace -- -- -- - --- This is tricky: we have stored and initialized already some of --- the job.registered tables so we have a forward reference! - -structures = structures or { } -local structures = structures - -structures.blocks = structures.blocks or { } -structures.sections = structures.sections or { } -structures.pages = structures.pages or { } -structures.registers = structures.registers or { } -structures.references = structures.references or { } -structures.lists = structures.lists or { } -structures.helpers = structures.helpers or { } -structures.documents = structures.documents or { } -structures.notes = structures.notes or { } -structures.descriptions = structures.descriptions or { } -structures.itemgroups = structures.itemgroups or { } -structures.specials = structures.specials or { } -structures.counters = structures.counters or { } -structures.tags = structures.tags or { } -structures.formulas = structures.formulas or { } -structures.sets = structures.sets or { } -structures.marks = structures.marks or { } -structures.floats = structures.floats or { } -structures.synonyms = structures.synonyms or { } - ---~ table.print(structures) - -local processors = typesetters.processors - --- -- -- specials -- -- -- - --- we can store information and get back a reference; this permits --- us to store rather raw data in references - -local specials = structures.specials - -local collected = allocate() -local tobesaved = allocate() - -specials.collected = collected -specials.tobesaved = tobesaved - -local function initializer() - collected = specials.collected - tobesaved = specials.tobesaved -end - -if job then - job.register('structures.specials.collected', tobesaved, initializer) -end - -function specials.store(class,data) - if class and data then - local s = tobesaved[class] - if not s then - s = { } - tobesaved[class] = s - end - s[#s+1] = data - context(#s) - else - context(0) - end -end - -function specials.retrieve(class,n) - if class and n then - local c = collected[class] - return c and c[n] - end -end - --- -- -- helpers -- -- -- - -local helpers = structures.helpers - --- function helpers.touserdata(str) --- local hash = str and str ~= "" and settings_to_hash(str) --- if hash and next(hash) then --- return hash --- end --- end - -function helpers.touserdata(data) - if type(data) == "string" then - if data == "" then - return nil - else - data = settings_to_hash(data) - end - end - if data and next(data) then - return data - end -end - -local function simplify(d,nodefault) - if d then - local t = { } - for k, v in next, d do - local tv = type(v) - if tv == "table" then - if next(v) then t[k] = simplify(v) end - elseif tv == "string" then - if v ~= "" and v ~= "default" then t[k] = v end - elseif tv == "boolean" then - if v then t[k] = v end - else - t[k] = v - end - end - return next(t) and t - elseif nodefault then - return nil - else - return { } - end -end - -helpers.simplify = simplify - -function helpers.merged(...) - local t = { } - for k=1, select("#",...) do - local v = select(k,...) - if v and v ~= "" and not t[k] then - t[k] = v - end - end - return t -end - -local tags = { - generic = "ctx:genericentry", - section = "ctx:sectionentry", - entry = "ctx:registerentry", -} - --- We had the following but it overloads the main document so it's a no-go as we --- no longer push and pop. So now we use the tag as buffername, namespace and also --- (optionally) as a setups to be applied but keep in mind that document setups --- also get applied (when they use #1's). --- --- local command = formatters["\\xmlprocessbuffer{%s}{%s}{}"](metadata.xmlroot or "main",tag) - -local experiment = true - -function helpers.title(title,metadata) -- coding is xml is rather old and not that much needed now - if title and title ~= "" then -- so it might disappear - if metadata then - local xmlsetup = metadata.xmlsetup - if metadata.coding == "xml" then - -- title can contain raw xml - local tag = tags[metadata.kind] or tags.generic - local xmldata = formatters["<%s>%s"](tag,title,tag) - if not experiment then - buffers.assign(tag,xmldata) - end - if trace_processors then - report_processors("putting xml data in buffer: %s",xmldata) - report_processors("processing buffer with setup %a and tag %a",xmlsetup,tag) - end - if experiment then - -- the question is: will this be forgotten ... better store in a via file - local xmltable = lxml.convert("temp",xmldata or "") - lxml.store("temp",xmltable) - context.xmlsetup("temp",xmlsetup or "") - else - context.xmlprocessbuffer("dummy",tag,xmlsetup or "") - end - elseif xmlsetup then -- title is reference to node (so \xmlraw should have been used) - if trace_processors then - report_processors("feeding xmlsetup %a using node %a",xmlsetup,title) - end - context.xmlsetup(title,metadata.xmlsetup) - else - local catcodes = metadata.catcodes - if catcodes == notcatcodes or catcodes == xmlcatcodes then - if trace_processors then - report_processors("catcodetable %a, overloads %a, text %a",ctxcatcodes,catcodes,title) - end - context(title) -- nasty - else - if trace_processors then - report_processors("catcodetable %a, text %a",catcodes,title) - end - -- - -- context.sprint(catcodes,title) - -- - -- doesn't work when a newline is in there \section{Test\ A} so we do - -- it this way: - -- - pushcatcodes(catcodes) - context(title) - popcatcodes() - end - end - else - context(title) -- no catcode switch, was: texsprint(title) - end - end -end - --- -- -- sets -- -- -- - -local sets = structures.sets - -sets.setlist = sets.setlist or { } - -storage.register("structures/sets/setlist", structures.sets.setlist, "structures.sets.setlist") - -local setlist = sets.setlist - -function sets.define(namespace,name,values,default,numbers) - local dn = setlist[namespace] - if not dn then - dn = { } - setlist[namespace] = dn - end - if values == "" then - dn[name] = { { }, default } - else - local split = settings_to_array(values) - if numbers then - -- convert to numbers (e.g. for reset) - for i=1,#split do - split[i] = tonumber(split[i]) or 0 - end - end - dn[name] = { split, default } - end -end - -function sets.getall(namespace,block,name) - local ds = setlist[namespace] - if not ds then - return { } - else - local dn - if block and block ~= "" then - dn = ds[block..":"..name] or ds[name] or ds[block] or ds.default - else - dn = ds[name] or ds.default - end - return (dn and dn[1]) or { } - end -end - --- messy (will be another keyword, fixedconversion) - -local splitter = lpeg.splitat("::") - -function sets.get(namespace,block,name,level,default) -- check if name is passed - --fixed::R:a: ... - local kind, rest = lpegmatch(splitter,name) - if rest and kind == "fixed" then -- fixed::n,a,i - local s = settings_to_array(rest) - return s[level] or s[#s] or default - end - -- - local ds = setlist[namespace] - if not ds then - return default - end - local dn - if name and name ~= "" then - if block and block ~= "" then - dn = ds[block..":"..name] or ds[name] or ds[block] or ds.default - else - dn = ds[name] or ds.default - end - else - if block and block ~= "" then - dn = ds[block] or ds[block..":default"] or ds.default - else - dn = ds.default - end - end - if not dn then - return default - end --- inspect(dn) - local dl = dn[1][level] - return dl or dn[2] or default -end - --- interface - -commands.definestructureset = sets.define +if not modules then modules = { } end modules ['strc-ini'] = { + version = 1.001, + comment = "companion to strc-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ +The restructuring is the (intermediate) result of quite some experiments. I started +with the basic structure, followed by lists, numbers, enumerations, itemgroups +and floats. All these have something in common, like pagenumbers and section +prefixes. I played with some generic datastructure (in order to save space) but +the code at both the lua and tex end then quickly becomes messy due to the fact +that access to variables is too different. So, eventually I ended up with +dedicated structures combined with sharing data. In lua this is quite efficient +because tables are referenced. However, some precautions are to be taken in +order to keep the utility file small. Utility data and process data share much +but it does not make sense to store all processdata. + +]]-- + +local formatters = string.formatters +local lpegmatch = lpeg.match +local count = tex.count +local type, next, tonumber, select = type, next, tonumber, select +local settings_to_array, settings_to_hash = utilities.parsers.settings_to_array, utilities.parsers.settings_to_hash +local allocate = utilities.storage.allocate + +local catcodenumbers = catcodes.numbers -- better use the context(...) way to switch + +local ctxcatcodes = catcodenumbers.ctxcatcodes +local xmlcatcodes = catcodenumbers.xmlcatcodes +local notcatcodes = catcodenumbers.notcatcodes +local txtcatcodes = catcodenumbers.txtcatcodes + +local context, commands = context, commands + +local pushcatcodes = context.pushcatcodes +local popcatcodes = context.popcatcodes + +local trace_processors = false +local report_processors = logs.reporter("processors","structure") + +trackers.register("typesetters.processors", function(v) trace_processors = v end) + +-- -- -- namespace -- -- -- + +-- This is tricky: we have stored and initialized already some of +-- the job.registered tables so we have a forward reference! + +structures = structures or { } +local structures = structures + +structures.blocks = structures.blocks or { } +structures.sections = structures.sections or { } +structures.pages = structures.pages or { } +structures.registers = structures.registers or { } +structures.references = structures.references or { } +structures.lists = structures.lists or { } +structures.helpers = structures.helpers or { } +structures.documents = structures.documents or { } +structures.notes = structures.notes or { } +structures.descriptions = structures.descriptions or { } +structures.itemgroups = structures.itemgroups or { } +structures.specials = structures.specials or { } +structures.counters = structures.counters or { } +structures.tags = structures.tags or { } +structures.formulas = structures.formulas or { } +structures.sets = structures.sets or { } +structures.marks = structures.marks or { } +structures.floats = structures.floats or { } +structures.synonyms = structures.synonyms or { } + +--~ table.print(structures) + +local processors = typesetters.processors + +-- -- -- specials -- -- -- + +-- we can store information and get back a reference; this permits +-- us to store rather raw data in references + +local specials = structures.specials + +local collected = allocate() +local tobesaved = allocate() + +specials.collected = collected +specials.tobesaved = tobesaved + +local function initializer() + collected = specials.collected + tobesaved = specials.tobesaved +end + +if job then + job.register('structures.specials.collected', tobesaved, initializer) +end + +function specials.store(class,data) + if class and data then + local s = tobesaved[class] + if not s then + s = { } + tobesaved[class] = s + end + s[#s+1] = data + context(#s) + else + context(0) + end +end + +function specials.retrieve(class,n) + if class and n then + local c = collected[class] + return c and c[n] + end +end + +-- -- -- helpers -- -- -- + +local helpers = structures.helpers + +-- function helpers.touserdata(str) +-- local hash = str and str ~= "" and settings_to_hash(str) +-- if hash and next(hash) then +-- return hash +-- end +-- end + +function helpers.touserdata(data) + if type(data) == "string" then + if data == "" then + return nil + else + data = settings_to_hash(data) + end + end + if data and next(data) then + return data + end +end + +local function simplify(d,nodefault) + if d then + local t = { } + for k, v in next, d do + local tv = type(v) + if tv == "table" then + if next(v) then t[k] = simplify(v) end + elseif tv == "string" then + if v ~= "" and v ~= "default" then t[k] = v end + elseif tv == "boolean" then + if v then t[k] = v end + else + t[k] = v + end + end + return next(t) and t + elseif nodefault then + return nil + else + return { } + end +end + +helpers.simplify = simplify + +function helpers.merged(...) + local t = { } + for k=1, select("#",...) do + local v = select(k,...) + if v and v ~= "" and not t[k] then + t[k] = v + end + end + return t +end + +local tags = { + generic = "ctx:genericentry", + section = "ctx:sectionentry", + entry = "ctx:registerentry", +} + +-- We had the following but it overloads the main document so it's a no-go as we +-- no longer push and pop. So now we use the tag as buffername, namespace and also +-- (optionally) as a setups to be applied but keep in mind that document setups +-- also get applied (when they use #1's). +-- +-- local command = formatters["\\xmlprocessbuffer{%s}{%s}{}"](metadata.xmlroot or "main",tag) + +local experiment = true + +function helpers.title(title,metadata) -- coding is xml is rather old and not that much needed now + if title and title ~= "" then -- so it might disappear + if metadata then + local xmlsetup = metadata.xmlsetup + if metadata.coding == "xml" then + -- title can contain raw xml + local tag = tags[metadata.kind] or tags.generic + local xmldata = formatters["<%s>%s"](tag,title,tag) + if not experiment then + buffers.assign(tag,xmldata) + end + if trace_processors then + report_processors("putting xml data in buffer: %s",xmldata) + report_processors("processing buffer with setup %a and tag %a",xmlsetup,tag) + end + if experiment then + -- the question is: will this be forgotten ... better store in a via file + local xmltable = lxml.convert("temp",xmldata or "") + lxml.store("temp",xmltable) + context.xmlsetup("temp",xmlsetup or "") + else + context.xmlprocessbuffer("dummy",tag,xmlsetup or "") + end + elseif xmlsetup then -- title is reference to node (so \xmlraw should have been used) + if trace_processors then + report_processors("feeding xmlsetup %a using node %a",xmlsetup,title) + end + context.xmlsetup(title,metadata.xmlsetup) + else + local catcodes = metadata.catcodes + if catcodes == notcatcodes or catcodes == xmlcatcodes then + if trace_processors then + report_processors("catcodetable %a, overloads %a, text %a",ctxcatcodes,catcodes,title) + end + context(title) -- nasty + else + if trace_processors then + report_processors("catcodetable %a, text %a",catcodes,title) + end + -- + -- context.sprint(catcodes,title) + -- + -- doesn't work when a newline is in there \section{Test\ A} so we do + -- it this way: + -- + pushcatcodes(catcodes) + context(title) + popcatcodes() + end + end + else + context(title) -- no catcode switch, was: texsprint(title) + end + end +end + +-- -- -- sets -- -- -- + +local sets = structures.sets + +sets.setlist = sets.setlist or { } + +storage.register("structures/sets/setlist", structures.sets.setlist, "structures.sets.setlist") + +local setlist = sets.setlist + +function sets.define(namespace,name,values,default,numbers) + local dn = setlist[namespace] + if not dn then + dn = { } + setlist[namespace] = dn + end + if values == "" then + dn[name] = { { }, default } + else + local split = settings_to_array(values) + if numbers then + -- convert to numbers (e.g. for reset) + for i=1,#split do + split[i] = tonumber(split[i]) or 0 + end + end + dn[name] = { split, default } + end +end + +function sets.getall(namespace,block,name) + local ds = setlist[namespace] + if not ds then + return { } + else + local dn + if block and block ~= "" then + dn = ds[block..":"..name] or ds[name] or ds[block] or ds.default + else + dn = ds[name] or ds.default + end + return (dn and dn[1]) or { } + end +end + +-- messy (will be another keyword, fixedconversion) + +local splitter = lpeg.splitat("::") + +function sets.get(namespace,block,name,level,default) -- check if name is passed + --fixed::R:a: ... + local kind, rest = lpegmatch(splitter,name) + if rest and kind == "fixed" then -- fixed::n,a,i + local s = settings_to_array(rest) + return s[level] or s[#s] or default + end + -- + local ds = setlist[namespace] + if not ds then + return default + end + local dn + if name and name ~= "" then + if block and block ~= "" then + dn = ds[block..":"..name] or ds[name] or ds[block] or ds.default + else + dn = ds[name] or ds.default + end + else + if block and block ~= "" then + dn = ds[block] or ds[block..":default"] or ds.default + else + dn = ds.default + end + end + if not dn then + return default + end +-- inspect(dn) + local dl = dn[1][level] + return dl or dn[2] or default +end + +-- interface + +commands.definestructureset = sets.define diff --git a/tex/context/base/strc-itm.lua b/tex/context/base/strc-itm.lua index 8a745f356..75e77767f 100644 --- a/tex/context/base/strc-itm.lua +++ b/tex/context/base/strc-itm.lua @@ -1,38 +1,38 @@ -if not modules then modules = { } end modules ['strc-itm'] = { - version = 1.001, - comment = "companion to strc-itm.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local structures = structures -local itemgroups = structures.itemgroups -local jobpasses = job.passes - -local setfield = jobpasses.save -local getfield = jobpasses.getfield - -function itemgroups.register(name,nofitems,maxwidth) - setfield("itemgroup", { nofitems, maxwidth }) -end - -function itemgroups.nofitems(name,index) - return getfield("itemgroup", index, 1, 0) -end - -function itemgroups.maxwidth(name,index) - return getfield("itemgroup", index, 2, 0) -end - --- interface (might become counter/dimension) - -commands.registeritemgroup = itemgroups.register - -function commands.nofitems(name,index) - context(getfield("itemgroup", index, 1, 0)) -end - -function commands.maxitemwidth(name,index) - context(getfield("itemgroup", index, 2, 0)) -end +if not modules then modules = { } end modules ['strc-itm'] = { + version = 1.001, + comment = "companion to strc-itm.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local structures = structures +local itemgroups = structures.itemgroups +local jobpasses = job.passes + +local setfield = jobpasses.save +local getfield = jobpasses.getfield + +function itemgroups.register(name,nofitems,maxwidth) + setfield("itemgroup", { nofitems, maxwidth }) +end + +function itemgroups.nofitems(name,index) + return getfield("itemgroup", index, 1, 0) +end + +function itemgroups.maxwidth(name,index) + return getfield("itemgroup", index, 2, 0) +end + +-- interface (might become counter/dimension) + +commands.registeritemgroup = itemgroups.register + +function commands.nofitems(name,index) + context(getfield("itemgroup", index, 1, 0)) +end + +function commands.maxitemwidth(name,index) + context(getfield("itemgroup", index, 2, 0)) +end diff --git a/tex/context/base/strc-lev.lua b/tex/context/base/strc-lev.lua index 50a63c938..016aa2039 100644 --- a/tex/context/base/strc-lev.lua +++ b/tex/context/base/strc-lev.lua @@ -1,51 +1,51 @@ -if not modules then modules = { } end modules ['strc-lev'] = { - version = 1.001, - comment = "companion to strc-lev.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local insert, remove = table.insert, table.remove - -local sections = structures.sections -local default = interfaces.variables.default - -sections.levels = sections.levels or { } - -local level, levels, categories = 0, sections.levels, { } - -storage.register("structures/sections/levels", levels, "structures.sections.levels") - -local f_two_colon = string.formatters["%s:%s"] - -function commands.definesectionlevels(category,list) - levels[category] = utilities.parsers.settings_to_array(list) -end - -function commands.startsectionlevel(category) - category = category ~= "" and category or default - level = level + 1 - local lc = levels[category] - if not lc or level > #lc then - context.nostarthead { f_two_colon(category,level) } - else - context.dostarthead { lc[level] } - end - insert(categories,category) -end - -function commands.stopsectionlevel() - local category = remove(categories) - if category then - local lc = levels[category] - if not lc or level > #lc then - context.nostophead { f_two_colon(category,level) } - else - context.dostophead { lc[level] } - end - level = level - 1 - else - -- error - end -end +if not modules then modules = { } end modules ['strc-lev'] = { + version = 1.001, + comment = "companion to strc-lev.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local insert, remove = table.insert, table.remove + +local sections = structures.sections +local default = interfaces.variables.default + +sections.levels = sections.levels or { } + +local level, levels, categories = 0, sections.levels, { } + +storage.register("structures/sections/levels", levels, "structures.sections.levels") + +local f_two_colon = string.formatters["%s:%s"] + +function commands.definesectionlevels(category,list) + levels[category] = utilities.parsers.settings_to_array(list) +end + +function commands.startsectionlevel(category) + category = category ~= "" and category or default + level = level + 1 + local lc = levels[category] + if not lc or level > #lc then + context.nostarthead { f_two_colon(category,level) } + else + context.dostarthead { lc[level] } + end + insert(categories,category) +end + +function commands.stopsectionlevel() + local category = remove(categories) + if category then + local lc = levels[category] + if not lc or level > #lc then + context.nostophead { f_two_colon(category,level) } + else + context.dostophead { lc[level] } + end + level = level - 1 + else + -- error + end +end diff --git a/tex/context/base/strc-lst.lua b/tex/context/base/strc-lst.lua index ad7dc0f54..2395abb62 100644 --- a/tex/context/base/strc-lst.lua +++ b/tex/context/base/strc-lst.lua @@ -1,845 +1,845 @@ -if not modules then modules = { } end modules ['strc-lst'] = { - version = 1.001, - comment = "companion to strc-lst.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- when all datastructures are stable a packer will be added which will --- bring down memory consumption a bit; we can use for instance a pagenumber, --- section, metadata cache (internal then has to move up one level) or a --- shared cache [we can use a fast and stupid serializer] - --- todo: tag entry in list is crap --- --- move more to commands - -local format, gmatch, gsub = string.format, string.gmatch, string.gsub -local tonumber = tonumber -local texcount = tex.count -local concat, insert, remove = table.concat, table.insert, table.remove -local lpegmatch = lpeg.match -local simple_hash_to_string, settings_to_hash = utilities.parsers.simple_hash_to_string, utilities.parsers.settings_to_hash -local allocate, checked = utilities.storage.allocate, utilities.storage.checked - -local trace_lists = false trackers.register("structures.lists", function(v) trace_lists = v end) - -local report_lists = logs.reporter("structure","lists") - -local structures = structures -local lists = structures.lists -local sections = structures.sections -local helpers = structures.helpers -local documents = structures.documents -local pages = structures.pages -local tags = structures.tags -local references = structures.references - -local collected = allocate() -local tobesaved = allocate() -local cached = allocate() -local pushed = allocate() - -lists.collected = collected -lists.tobesaved = tobesaved - -lists.enhancers = lists.enhancers or { } -lists.internals = allocate(lists.internals or { }) -- to be checked -lists.ordered = allocate(lists.ordered or { }) -- to be checked -lists.cached = cached -lists.pushed = pushed - -references.specials = references.specials or { } - -local variables = interfaces.variables -local matchingtilldepth = sections.matchingtilldepth -local numberatdepth = sections.numberatdepth - --- -- -- -- -- -- - -local function zerostrippedconcat(t,separator) -- for the moment not public - local f, l = 1, #t - for i=f,l do - if t[i] == 0 then - f = f + 1 - end - end - for i=l,f,-1 do - if t[i] == 0 then - l = l - 1 - end - end - return concat(t,separator,f,l) -end - --- -- -- -- -- -- - -local function initializer() - -- create a cross reference between internal references - -- and list entries - local collected = lists.collected - local internals = checked(references.internals) - local ordered = lists.ordered - for i=1,#collected do - local c = collected[i] - local m = c.metadata - local r = c.references - if m then - -- access by internal reference - local internal = r and r.internal - if internal then - internals[internal] = c - end - -- access by order in list - local kind, name = m.kind, m.name - if kind and name then - local ok = ordered[kind] - if ok then - local on = ok[name] - if on then - on[#on+1] = c - else - ok[name] = { c } - end - else - ordered[kind] = { [name] = { c } } - end - end - end - if r then - r.listindex = i -- handy to have - end - end -end - -job.register('structures.lists.collected', tobesaved, initializer) - -local groupindices = table.setmetatableindex("table") - -function lists.groupindex(name,group) - local groupindex = groupindices[name] - return groupindex and groupindex[group] or 0 -end - -function lists.addto(t) - local m = t.metadata - local u = t.userdata - if u and type(u) == "string" then - t.userdata = helpers.touserdata(u) -- nicer at the tex end - end - local numberdata = t.numberdata - local group = numberdata and numberdata.group - if not group then - -- forget about it - elseif group == "" then - group, numberdata.group = nil, nil - else - local groupindex = groupindices[m.name][group] - if groupindex then - numberdata.numbers = cached[groupindex].numberdata.numbers - end - end - local r = t.references - local i = r and r.internal or 0 -- brrr - local p = pushed[i] - if not p then - p = #cached + 1 - cached[p] = helpers.simplify(t) - pushed[i] = p - r.listindex = p - end - local setcomponent = references.setcomponent - if setcomponent then - setcomponent(t) -- might move to the tex end - end - if group then - groupindices[m.name][group] = p - end - return p -end - -function lists.discard(n) - n = tonumber(n) - if not n then - -- maybe an error message - elseif n == #cached then - cached[n] = nil - n = n -1 - while n > 0 and cached[n] == false do - cached[n] = nil -- collect garbage - n = n - 1 - end - else - cached[n] = false - end -end - -function lists.iscached(n) - return cached[tonumber(n)] -end - --- this is the main pagenumber enhancer - -function lists.enhance(n) - -- todo: symbolic names for counters - local l = cached[n] - if l then - local metadata = l.metadata - local references = l.references - -- - l.directives = nil -- might change - -- save in the right order (happens at shipout) - lists.tobesaved[#lists.tobesaved+1] = l - -- default enhancer (cross referencing) - references.realpage = texcount.realpageno - -- tags - local kind = metadata.kind - local name = metadata.name - if references then - -- is this used ? - local tag = tags.getid(kind,name) - if tag and tag ~= "?" then - references.tag = tag - end - --~ references.listindex = n - end - -- specific enhancer (kind of obsolete) - local enhancer = kind and lists.enhancers[kind] - if enhancer then - enhancer(l) - end - return l - end -end - --- we can use level instead but we can also decide to remove level from the metadata - -local nesting = { } - -function lists.pushnesting(i) - local parent = lists.result[i] - local name = parent.metadata.name - local numberdata = parent and parent.numberdata - local numbers = numberdata and numberdata.numbers - local number = numbers and numbers[sections.getlevel(name)] or 0 - insert(nesting, { number = number, name = name, result = lists.result, parent = parent }) -end - -function lists.popnesting() - local old = remove(nesting) - lists.result = old.result -end - --- will be split - --- Historically we had blocks but in the mkiv approach that could as well be a level --- which would simplify things a bit. - -local splitter = lpeg.splitat(":") - --- this will become filtercollected(specification) and then we'll also have sectionblock as key - -local sorters = { - [variables.command] = function(a,b) - if a.metadata.kind == "command" or b.metadata.kind == "command" then - return a.references.internal < b.references.internal - else - return a.references.order < b.references.order - end - end, - [variables.all] = function(a,b) - return a.references.internal < b.references.internal - end, -} - --- some day soon we will pass a table .. also split the function - -local function filtercollected(names, criterium, number, collected, forced, nested, sortorder) -- names is hash or string - local numbers, depth = documents.data.numbers, documents.data.depth - local result, nofresult, detail = { }, 0, nil - local block = false -- all - criterium = gsub(criterium or ""," ","") -- not needed - -- new, will be applied stepwise - local wantedblock, wantedcriterium = lpegmatch(splitter,criterium) -- block:criterium - if wantedblock == "" or wantedblock == variables.all or wantedblock == variables.text then - criterium = wantedcriterium ~= "" and wantedcriterium or criterium - elseif not wantedcriterium then - block = documents.data.block - else - block, criterium = wantedblock, wantedcriterium - end - if block == "" then - block = false - end --- print(">>",block,criterium) - -- - forced = forced or { } -- todo: also on other branched, for the moment only needed for bookmarks - if type(names) == "string" then - names = settings_to_hash(names) - end - local all = not next(names) or names[variables.all] or false - if trace_lists then - report_lists("filtering names %a, criterium %a, block %a, number %a",names,criterium,block or "*",number) - end - if criterium == variables.intro then - -- special case, no structure yet - for i=1,#collected do - local v = collected[i] - local r = v.references - if r and r.section == 0 then - nofresult = nofresult + 1 - result[nofresult] = v - end - end - elseif all or criterium == variables.all or criterium == variables.text then - for i=1,#collected do - local v = collected[i] - local r = v.references - if r and (not block or not r.block or block == r.block) then - local metadata = v.metadata - if metadata then - local name = metadata.name or false - local sectionnumber = (r.section == 0) or sections.collected[r.section] - if forced[name] or (sectionnumber and not metadata.nolist and (all or names[name])) then -- and not sectionnumber.hidenumber then - nofresult = nofresult + 1 - result[nofresult] = v - end - end - end - end - elseif criterium == variables.current then - if depth == 0 then - return filtercollected(names,variables.intro,number,collected,forced,false,sortorder) - else - for i=1,#collected do - local v = collected[i] - local r = v.references - if r and (not block or not r.block or block == r.block) then - local sectionnumber = sections.collected[r.section] - if sectionnumber then -- and not sectionnumber.hidenumber then - local cnumbers = sectionnumber.numbers - local metadata = v.metadata - if cnumbers then - if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers > depth then - local ok = true - for d=1,depth do - local cnd = cnumbers[d] - if not (cnd == 0 or cnd == numbers[d]) then - ok = false - break - end - end - if ok then - nofresult = nofresult + 1 - result[nofresult] = v - end - end - end - end - end - end - end - elseif criterium == variables.here then - -- this is quite dirty ... as cnumbers is not sparse we can misuse #cnumbers - if depth == 0 then - return filtercollected(names,variables.intro,number,collected,forced,false,sortorder) - else - for i=1,#collected do - local v = collected[i] - local r = v.references - if r then -- and (not block or not r.block or block == r.block) then - local sectionnumber = sections.collected[r.section] - if sectionnumber then -- and not sectionnumber.hidenumber then - local cnumbers = sectionnumber.numbers - local metadata = v.metadata - if cnumbers then - if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers >= depth then - local ok = true - for d=1,depth do - local cnd = cnumbers[d] - if not (cnd == 0 or cnd == numbers[d]) then - ok = false - break - end - end - if ok then - nofresult = nofresult + 1 - result[nofresult] = v - end - end - end - end - end - end - end - elseif criterium == variables.previous then - if depth == 0 then - return filtercollected(names,variables.intro,number,collected,forced,false,sortorder) - else - for i=1,#collected do - local v = collected[i] - local r = v.references - if r and (not block or not r.block or block == r.block) then - local sectionnumber = sections.collected[r.section] - if sectionnumber then -- and not sectionnumber.hidenumber then - local cnumbers = sectionnumber.numbers - local metadata = v.metadata - if cnumbers then - if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers >= depth then - local ok = true - for d=1,depth-1 do - local cnd = cnumbers[d] - if not (cnd == 0 or cnd == numbers[d]) then - ok = false - break - end - end - if ok then - nofresult = nofresult + 1 - result[nofresult] = v - end - end - end - end - end - end - end - elseif criterium == variables["local"] then -- not yet ok - local nested = nesting[#nesting] - if nested then - return filtercollected(names,nested.name,nested.number,collected,forced,nested,sortorder) - elseif sections.autodepth(documents.data.numbers) == 0 then - return filtercollected(names,variables.all,number,collected,forced,false,sortorder) - else - return filtercollected(names,variables.current,number,collected,forced,false,sortorder) - end - elseif criterium == variables.component then - -- special case, no structure yet - local component = resolvers.jobs.currentcomponent() or "" - if component ~= "" then - for i=1,#collected do - local v = collected[i] - local r = v.references - local m = v.metadata - if r and r.component == component and (m and names[m.name] or all) then - nofresult = nofresult + 1 - result[nofresult] = v - end - end - end - else -- sectionname, number - -- not the same as register - local depth = sections.getlevel(criterium) - local number = tonumber(number) or numberatdepth(depth) or 0 - if trace_lists then - local t = sections.numbers() - detail = format("depth %s, number %s, numbers %s, startset %s",depth,number,(#t>0 and concat(t,".",1,depth)) or "?",#collected) - end - if number > 0 then - local pnumbers = nil - local pblock = block - local parent = nested and nested.parent - if parent then - pnumbers = parent.numberdata.numbers or pnumbers -- so local as well as nested - pblock = parent.references.block or pblock - end - for i=1,#collected do - local v = collected[i] - local r = v.references - if r and (not block or not r.block or pblock == r.block) then - local sectionnumber = sections.collected[r.section] - if sectionnumber then - local metadata = v.metadata - local cnumbers = sectionnumber.numbers - if cnumbers then - if (all or names[metadata.name or false]) and #cnumbers >= depth and matchingtilldepth(depth,cnumbers,pnumbers) then - nofresult = nofresult + 1 - result[nofresult] = v - end - end - end - end - end - end - end - if trace_lists then - report_lists("criterium %a, block %a, found %a, detail %a",criterium,block or "*",#result,detail) - end - - if sortorder then -- experiment - local sorter = sorters[sortorder] - if sorter then - if trace_lists then - report_lists("sorting list using method %a",sortorder) - end - for i=1,#result do - result[i].references.order = i - end - table.sort(result,sorter) - end - end - - return result -end - -lists.filtercollected = filtercollected - -function lists.filter(specification) - return filtercollected( - specification.names, - specification.criterium, - specification.number, - lists.collected, - specification.forced, - false, - specification.order - ) -end - -lists.result = { } - -function lists.process(specification) - lists.result = lists.filter(specification) - local specials = utilities.parsers.settings_to_hash(specification.extras or "") - specials = next(specials) and specials or nil - for i=1,#lists.result do - local r = lists.result[i] - local m = r.metadata - local s = specials and r.numberdata and specials[zerostrippedconcat(r.numberdata.numbers,".")] or "" - context.strclistsentryprocess(m.name,m.kind,i,s) - end -end - -function lists.analyze(specification) - lists.result = lists.filter(specification) -end - -function lists.userdata(name,r,tag) -- to tex (todo: xml) - local result = lists.result[r] - if result then - local userdata, metadata = result.userdata, result.metadata - local str = userdata and userdata[tag] - if str then - return str, metadata - end - end -end - -function lists.uservalue(name,r,tag,default) -- to lua - local str = lists.result[r] - str = str and str.userdata - str = str and str[tag] - return str or default -end - -function lists.size() - return #lists.result -end - -function lists.location(n) - local l = lists.result[n] - return l and l.references.internal or n -end - -function lists.label(n,default) - local l = lists.result[n] - local t = l.titledata - return t and t.label or default or "" -end - -function lists.sectionnumber(name,n,spec) - local data = lists.result[n] - local sectiondata = sections.collected[data.references.section] - -- hm, prefixnumber? - sections.typesetnumber(sectiondata,"prefix",spec,sectiondata) -- data happens to contain the spec too -end - --- some basics (todo: helpers for pages) - -function lists.title(name,n,tag) -- tag becomes obsolete - local data = lists.result[n] - if data then - local titledata = data.titledata - if titledata then - helpers.title(titledata[tag] or titledata.list or titledata.title or "",data.metadata) - end - end -end - -function lists.hastitledata(name,n,tag) - local data = cached[tonumber(n)] - if data then - local titledata = data.titledata - if titledata then - return (titledata[tag] or titledata.title or "") == "" - end - end - return false -end - -function lists.haspagedata(name,n) - local data = lists.result[n] - if data then - local references = data.references - if references and references.realpage then -- or references.pagedata - return true - end - end - return false -end - -function lists.hasnumberdata(name,n) - local data = lists.result[n] - if data then - local numberdata = data.numberdata - if numberdata and not numberdata.hidenumber then -- th ehide number is true - return true - end - end - return false -end - -function lists.prefix(name,n,spec) - helpers.prefix(lists.result[n],spec) -end - -function lists.page(name,n,pagespec) - helpers.page(lists.result[n],pagespec) -end - -function lists.prefixedpage(name,n,prefixspec,pagespec) - helpers.prefixpage(lists.result[n],prefixspec,pagespec) -end - -function lists.realpage(name,n) - local data = lists.result[n] - if data then - local references = data.references - return references and references.realpage or 0 - else - return 0 - end -end - --- numbers stored in entry.numberdata + entry.numberprefix - -function lists.number(name,n,spec) - local data = lists.result[n] - if data then - local numberdata = data.numberdata - if numberdata then - sections.typesetnumber(numberdata,"number",spec or false,numberdata or false) - end - end -end - -function lists.prefixednumber(name,n,prefixspec,numberspec) - local data = lists.result[n] - if data then - helpers.prefix(data,prefixspec) - local numberdata = data.numberdata - if numberdata then - sections.typesetnumber(numberdata,"number",numberspec or false,numberdata or false) - end - end -end - --- todo, do this in references namespace ordered instead (this is an experiment) --- --- also see lpdf-ano (maybe move this there) - -local splitter = lpeg.splitat(":") - -function references.specials.order(var,actions) -- references.specials ! - local operation = var.operation - if operation then - local kind, name, n = lpegmatch(splitter,operation) - local order = lists.ordered[kind] - order = order and order[name] - local v = order[tonumber(n)] - local r = v and v.references.realpage - if r then - actions.realpage = r - var.operation = r -- brrr, but test anyway - return references.specials.page(var,actions) - end - end -end - --- interface (maybe strclistpush etc) - -commands.pushlist = lists.pushnesting -commands.poplist = lists.popnesting -commands.enhancelist = lists.enhance -commands.processlist = lists.process -commands.analyzelist = lists.analyze -commands.listtitle = lists.title -commands.listprefixednumber = lists.prefixednumber -commands.listprefixedpage = lists.prefixedpage - - -function commands.addtolist (...) context(lists.addto (...)) end -- we could use variables instead of print -function commands.listsize (...) context(lists.size (...)) end -function commands.listlocation (...) context(lists.location (...)) end -function commands.listlabel (...) context(lists.label (...)) end -function commands.listrealpage (...) context(lists.realpage (...)) end -function commands.listgroupindex(...) context(lists.groupindex(...)) end - -function commands.listuserdata(...) - local str, metadata = lists.userdata(...) - if str then - -- local catcodes = metadata and metadata.catcodes - -- if catcodes then - -- context.sprint(catcodes,str) - -- else - -- context(str) - -- end - helpers.title(str,metadata) - end -end - --- we could also set variables .. names will change (when this module is done) --- maybe strc_lists_savedtitle etc - -function commands.doiflisthastitleelse (...) commands.doifelse(lists.hastitledata (...)) end -function commands.doiflisthaspageelse (...) commands.doifelse(lists.haspagedata (...)) end -function commands.doiflisthasnumberelse(...) commands.doifelse(lists.hasnumberdata(...)) end -function commands.doiflisthasentry (n) commands.doifelse(lists.iscached (n )) end - -function commands.savedlistnumber(name,n) - local data = cached[tonumber(n)] - if data then - local numberdata = data.numberdata - if numberdata then - sections.typesetnumber(numberdata,"number",numberdata or false) - end - end -end - -function commands.savedlisttitle(name,n,tag) - local data = cached[tonumber(n)] - if data then - local titledata = data.titledata - if titledata then - helpers.title(titledata[tag] or titledata.title or "",data.metadata) - end - end -end - --- function commands.savedlistprefixednumber(name,n) --- local data = cached[tonumber(n)] --- if data then --- local numberdata = data.numberdata --- if numberdata then --- helpers.prefix(data,data.prefixdata) --- sections.typesetnumber(numberdata,"number",numberdata or false) --- end --- end --- end - -if not lists.reordered then - function lists.reordered(data) - return data.numberdata - end -end - -function commands.savedlistprefixednumber(name,n) - local data = cached[tonumber(n)] - if data then - local numberdata = lists.reordered(data) - if numberdata then - helpers.prefix(data,data.prefixdata) - sections.typesetnumber(numberdata,"number",numberdata or false) - end - end -end - -commands.discardfromlist = lists.discard - --- new and experimental and therefore off by default - -local sort, setmetatableindex = table.sort, table.setmetatableindex - -lists.autoreorder = false -- true - -local function addlevel(t,k) - local v = { } - setmetatableindex(v,function(t,k) - local v = { } - t[k] = v - return v - end) - t[k] = v - return v -end - -local internals = setmetatableindex({ }, function(t,k) - - local sublists = setmetatableindex({ },addlevel) - - local collected = lists.collected or { } - - for i=1,#collected do - local entry = collected[i] - local numberdata = entry.numberdata - if numberdata then - local metadata = entry.metadata - if metadata then - local references = entry.references - if references then - local kind = metadata.kind - local name = numberdata.counter or metadata.name - local internal = references.internal - if kind and name and internal then - local sublist = sublists[kind][name] - sublist[#sublist + 1] = { internal, numberdata } - end - end - end - end - end - - for k, v in next, sublists do - for k, v in next, v do - local tmp = { } - for i=1,#v do - tmp[i] = v[i] - end - sort(v,function(a,b) return a[1] < b[1] end) - for i=1,#v do - t[v[i][1]] = tmp[i][2] - end - end - end - - setmetatableindex(t,nil) - - return t[k] - -end) - -function lists.reordered(entry) - local numberdata = entry.numberdata - if lists.autoreorder then - if numberdata then - local metadata = entry.metadata - if metadata then - local references = entry.references - if references then - local kind = metadata.kind - local name = numberdata.counter or metadata.name - local internal = references.internal - if kind and name and internal then - return internals[internal] or numberdata - end - end - end - end - else - function lists.reordered(entry) - return entry.numberdata - end - end - return numberdata -end +if not modules then modules = { } end modules ['strc-lst'] = { + version = 1.001, + comment = "companion to strc-lst.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- when all datastructures are stable a packer will be added which will +-- bring down memory consumption a bit; we can use for instance a pagenumber, +-- section, metadata cache (internal then has to move up one level) or a +-- shared cache [we can use a fast and stupid serializer] + +-- todo: tag entry in list is crap +-- +-- move more to commands + +local format, gmatch, gsub = string.format, string.gmatch, string.gsub +local tonumber = tonumber +local texcount = tex.count +local concat, insert, remove = table.concat, table.insert, table.remove +local lpegmatch = lpeg.match +local simple_hash_to_string, settings_to_hash = utilities.parsers.simple_hash_to_string, utilities.parsers.settings_to_hash +local allocate, checked = utilities.storage.allocate, utilities.storage.checked + +local trace_lists = false trackers.register("structures.lists", function(v) trace_lists = v end) + +local report_lists = logs.reporter("structure","lists") + +local structures = structures +local lists = structures.lists +local sections = structures.sections +local helpers = structures.helpers +local documents = structures.documents +local pages = structures.pages +local tags = structures.tags +local references = structures.references + +local collected = allocate() +local tobesaved = allocate() +local cached = allocate() +local pushed = allocate() + +lists.collected = collected +lists.tobesaved = tobesaved + +lists.enhancers = lists.enhancers or { } +lists.internals = allocate(lists.internals or { }) -- to be checked +lists.ordered = allocate(lists.ordered or { }) -- to be checked +lists.cached = cached +lists.pushed = pushed + +references.specials = references.specials or { } + +local variables = interfaces.variables +local matchingtilldepth = sections.matchingtilldepth +local numberatdepth = sections.numberatdepth + +-- -- -- -- -- -- + +local function zerostrippedconcat(t,separator) -- for the moment not public + local f, l = 1, #t + for i=f,l do + if t[i] == 0 then + f = f + 1 + end + end + for i=l,f,-1 do + if t[i] == 0 then + l = l - 1 + end + end + return concat(t,separator,f,l) +end + +-- -- -- -- -- -- + +local function initializer() + -- create a cross reference between internal references + -- and list entries + local collected = lists.collected + local internals = checked(references.internals) + local ordered = lists.ordered + for i=1,#collected do + local c = collected[i] + local m = c.metadata + local r = c.references + if m then + -- access by internal reference + local internal = r and r.internal + if internal then + internals[internal] = c + end + -- access by order in list + local kind, name = m.kind, m.name + if kind and name then + local ok = ordered[kind] + if ok then + local on = ok[name] + if on then + on[#on+1] = c + else + ok[name] = { c } + end + else + ordered[kind] = { [name] = { c } } + end + end + end + if r then + r.listindex = i -- handy to have + end + end +end + +job.register('structures.lists.collected', tobesaved, initializer) + +local groupindices = table.setmetatableindex("table") + +function lists.groupindex(name,group) + local groupindex = groupindices[name] + return groupindex and groupindex[group] or 0 +end + +function lists.addto(t) + local m = t.metadata + local u = t.userdata + if u and type(u) == "string" then + t.userdata = helpers.touserdata(u) -- nicer at the tex end + end + local numberdata = t.numberdata + local group = numberdata and numberdata.group + if not group then + -- forget about it + elseif group == "" then + group, numberdata.group = nil, nil + else + local groupindex = groupindices[m.name][group] + if groupindex then + numberdata.numbers = cached[groupindex].numberdata.numbers + end + end + local r = t.references + local i = r and r.internal or 0 -- brrr + local p = pushed[i] + if not p then + p = #cached + 1 + cached[p] = helpers.simplify(t) + pushed[i] = p + r.listindex = p + end + local setcomponent = references.setcomponent + if setcomponent then + setcomponent(t) -- might move to the tex end + end + if group then + groupindices[m.name][group] = p + end + return p +end + +function lists.discard(n) + n = tonumber(n) + if not n then + -- maybe an error message + elseif n == #cached then + cached[n] = nil + n = n -1 + while n > 0 and cached[n] == false do + cached[n] = nil -- collect garbage + n = n - 1 + end + else + cached[n] = false + end +end + +function lists.iscached(n) + return cached[tonumber(n)] +end + +-- this is the main pagenumber enhancer + +function lists.enhance(n) + -- todo: symbolic names for counters + local l = cached[n] + if l then + local metadata = l.metadata + local references = l.references + -- + l.directives = nil -- might change + -- save in the right order (happens at shipout) + lists.tobesaved[#lists.tobesaved+1] = l + -- default enhancer (cross referencing) + references.realpage = texcount.realpageno + -- tags + local kind = metadata.kind + local name = metadata.name + if references then + -- is this used ? + local tag = tags.getid(kind,name) + if tag and tag ~= "?" then + references.tag = tag + end + --~ references.listindex = n + end + -- specific enhancer (kind of obsolete) + local enhancer = kind and lists.enhancers[kind] + if enhancer then + enhancer(l) + end + return l + end +end + +-- we can use level instead but we can also decide to remove level from the metadata + +local nesting = { } + +function lists.pushnesting(i) + local parent = lists.result[i] + local name = parent.metadata.name + local numberdata = parent and parent.numberdata + local numbers = numberdata and numberdata.numbers + local number = numbers and numbers[sections.getlevel(name)] or 0 + insert(nesting, { number = number, name = name, result = lists.result, parent = parent }) +end + +function lists.popnesting() + local old = remove(nesting) + lists.result = old.result +end + +-- will be split + +-- Historically we had blocks but in the mkiv approach that could as well be a level +-- which would simplify things a bit. + +local splitter = lpeg.splitat(":") + +-- this will become filtercollected(specification) and then we'll also have sectionblock as key + +local sorters = { + [variables.command] = function(a,b) + if a.metadata.kind == "command" or b.metadata.kind == "command" then + return a.references.internal < b.references.internal + else + return a.references.order < b.references.order + end + end, + [variables.all] = function(a,b) + return a.references.internal < b.references.internal + end, +} + +-- some day soon we will pass a table .. also split the function + +local function filtercollected(names, criterium, number, collected, forced, nested, sortorder) -- names is hash or string + local numbers, depth = documents.data.numbers, documents.data.depth + local result, nofresult, detail = { }, 0, nil + local block = false -- all + criterium = gsub(criterium or ""," ","") -- not needed + -- new, will be applied stepwise + local wantedblock, wantedcriterium = lpegmatch(splitter,criterium) -- block:criterium + if wantedblock == "" or wantedblock == variables.all or wantedblock == variables.text then + criterium = wantedcriterium ~= "" and wantedcriterium or criterium + elseif not wantedcriterium then + block = documents.data.block + else + block, criterium = wantedblock, wantedcriterium + end + if block == "" then + block = false + end +-- print(">>",block,criterium) + -- + forced = forced or { } -- todo: also on other branched, for the moment only needed for bookmarks + if type(names) == "string" then + names = settings_to_hash(names) + end + local all = not next(names) or names[variables.all] or false + if trace_lists then + report_lists("filtering names %a, criterium %a, block %a, number %a",names,criterium,block or "*",number) + end + if criterium == variables.intro then + -- special case, no structure yet + for i=1,#collected do + local v = collected[i] + local r = v.references + if r and r.section == 0 then + nofresult = nofresult + 1 + result[nofresult] = v + end + end + elseif all or criterium == variables.all or criterium == variables.text then + for i=1,#collected do + local v = collected[i] + local r = v.references + if r and (not block or not r.block or block == r.block) then + local metadata = v.metadata + if metadata then + local name = metadata.name or false + local sectionnumber = (r.section == 0) or sections.collected[r.section] + if forced[name] or (sectionnumber and not metadata.nolist and (all or names[name])) then -- and not sectionnumber.hidenumber then + nofresult = nofresult + 1 + result[nofresult] = v + end + end + end + end + elseif criterium == variables.current then + if depth == 0 then + return filtercollected(names,variables.intro,number,collected,forced,false,sortorder) + else + for i=1,#collected do + local v = collected[i] + local r = v.references + if r and (not block or not r.block or block == r.block) then + local sectionnumber = sections.collected[r.section] + if sectionnumber then -- and not sectionnumber.hidenumber then + local cnumbers = sectionnumber.numbers + local metadata = v.metadata + if cnumbers then + if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers > depth then + local ok = true + for d=1,depth do + local cnd = cnumbers[d] + if not (cnd == 0 or cnd == numbers[d]) then + ok = false + break + end + end + if ok then + nofresult = nofresult + 1 + result[nofresult] = v + end + end + end + end + end + end + end + elseif criterium == variables.here then + -- this is quite dirty ... as cnumbers is not sparse we can misuse #cnumbers + if depth == 0 then + return filtercollected(names,variables.intro,number,collected,forced,false,sortorder) + else + for i=1,#collected do + local v = collected[i] + local r = v.references + if r then -- and (not block or not r.block or block == r.block) then + local sectionnumber = sections.collected[r.section] + if sectionnumber then -- and not sectionnumber.hidenumber then + local cnumbers = sectionnumber.numbers + local metadata = v.metadata + if cnumbers then + if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers >= depth then + local ok = true + for d=1,depth do + local cnd = cnumbers[d] + if not (cnd == 0 or cnd == numbers[d]) then + ok = false + break + end + end + if ok then + nofresult = nofresult + 1 + result[nofresult] = v + end + end + end + end + end + end + end + elseif criterium == variables.previous then + if depth == 0 then + return filtercollected(names,variables.intro,number,collected,forced,false,sortorder) + else + for i=1,#collected do + local v = collected[i] + local r = v.references + if r and (not block or not r.block or block == r.block) then + local sectionnumber = sections.collected[r.section] + if sectionnumber then -- and not sectionnumber.hidenumber then + local cnumbers = sectionnumber.numbers + local metadata = v.metadata + if cnumbers then + if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers >= depth then + local ok = true + for d=1,depth-1 do + local cnd = cnumbers[d] + if not (cnd == 0 or cnd == numbers[d]) then + ok = false + break + end + end + if ok then + nofresult = nofresult + 1 + result[nofresult] = v + end + end + end + end + end + end + end + elseif criterium == variables["local"] then -- not yet ok + local nested = nesting[#nesting] + if nested then + return filtercollected(names,nested.name,nested.number,collected,forced,nested,sortorder) + elseif sections.autodepth(documents.data.numbers) == 0 then + return filtercollected(names,variables.all,number,collected,forced,false,sortorder) + else + return filtercollected(names,variables.current,number,collected,forced,false,sortorder) + end + elseif criterium == variables.component then + -- special case, no structure yet + local component = resolvers.jobs.currentcomponent() or "" + if component ~= "" then + for i=1,#collected do + local v = collected[i] + local r = v.references + local m = v.metadata + if r and r.component == component and (m and names[m.name] or all) then + nofresult = nofresult + 1 + result[nofresult] = v + end + end + end + else -- sectionname, number + -- not the same as register + local depth = sections.getlevel(criterium) + local number = tonumber(number) or numberatdepth(depth) or 0 + if trace_lists then + local t = sections.numbers() + detail = format("depth %s, number %s, numbers %s, startset %s",depth,number,(#t>0 and concat(t,".",1,depth)) or "?",#collected) + end + if number > 0 then + local pnumbers = nil + local pblock = block + local parent = nested and nested.parent + if parent then + pnumbers = parent.numberdata.numbers or pnumbers -- so local as well as nested + pblock = parent.references.block or pblock + end + for i=1,#collected do + local v = collected[i] + local r = v.references + if r and (not block or not r.block or pblock == r.block) then + local sectionnumber = sections.collected[r.section] + if sectionnumber then + local metadata = v.metadata + local cnumbers = sectionnumber.numbers + if cnumbers then + if (all or names[metadata.name or false]) and #cnumbers >= depth and matchingtilldepth(depth,cnumbers,pnumbers) then + nofresult = nofresult + 1 + result[nofresult] = v + end + end + end + end + end + end + end + if trace_lists then + report_lists("criterium %a, block %a, found %a, detail %a",criterium,block or "*",#result,detail) + end + + if sortorder then -- experiment + local sorter = sorters[sortorder] + if sorter then + if trace_lists then + report_lists("sorting list using method %a",sortorder) + end + for i=1,#result do + result[i].references.order = i + end + table.sort(result,sorter) + end + end + + return result +end + +lists.filtercollected = filtercollected + +function lists.filter(specification) + return filtercollected( + specification.names, + specification.criterium, + specification.number, + lists.collected, + specification.forced, + false, + specification.order + ) +end + +lists.result = { } + +function lists.process(specification) + lists.result = lists.filter(specification) + local specials = utilities.parsers.settings_to_hash(specification.extras or "") + specials = next(specials) and specials or nil + for i=1,#lists.result do + local r = lists.result[i] + local m = r.metadata + local s = specials and r.numberdata and specials[zerostrippedconcat(r.numberdata.numbers,".")] or "" + context.strclistsentryprocess(m.name,m.kind,i,s) + end +end + +function lists.analyze(specification) + lists.result = lists.filter(specification) +end + +function lists.userdata(name,r,tag) -- to tex (todo: xml) + local result = lists.result[r] + if result then + local userdata, metadata = result.userdata, result.metadata + local str = userdata and userdata[tag] + if str then + return str, metadata + end + end +end + +function lists.uservalue(name,r,tag,default) -- to lua + local str = lists.result[r] + str = str and str.userdata + str = str and str[tag] + return str or default +end + +function lists.size() + return #lists.result +end + +function lists.location(n) + local l = lists.result[n] + return l and l.references.internal or n +end + +function lists.label(n,default) + local l = lists.result[n] + local t = l.titledata + return t and t.label or default or "" +end + +function lists.sectionnumber(name,n,spec) + local data = lists.result[n] + local sectiondata = sections.collected[data.references.section] + -- hm, prefixnumber? + sections.typesetnumber(sectiondata,"prefix",spec,sectiondata) -- data happens to contain the spec too +end + +-- some basics (todo: helpers for pages) + +function lists.title(name,n,tag) -- tag becomes obsolete + local data = lists.result[n] + if data then + local titledata = data.titledata + if titledata then + helpers.title(titledata[tag] or titledata.list or titledata.title or "",data.metadata) + end + end +end + +function lists.hastitledata(name,n,tag) + local data = cached[tonumber(n)] + if data then + local titledata = data.titledata + if titledata then + return (titledata[tag] or titledata.title or "") == "" + end + end + return false +end + +function lists.haspagedata(name,n) + local data = lists.result[n] + if data then + local references = data.references + if references and references.realpage then -- or references.pagedata + return true + end + end + return false +end + +function lists.hasnumberdata(name,n) + local data = lists.result[n] + if data then + local numberdata = data.numberdata + if numberdata and not numberdata.hidenumber then -- th ehide number is true + return true + end + end + return false +end + +function lists.prefix(name,n,spec) + helpers.prefix(lists.result[n],spec) +end + +function lists.page(name,n,pagespec) + helpers.page(lists.result[n],pagespec) +end + +function lists.prefixedpage(name,n,prefixspec,pagespec) + helpers.prefixpage(lists.result[n],prefixspec,pagespec) +end + +function lists.realpage(name,n) + local data = lists.result[n] + if data then + local references = data.references + return references and references.realpage or 0 + else + return 0 + end +end + +-- numbers stored in entry.numberdata + entry.numberprefix + +function lists.number(name,n,spec) + local data = lists.result[n] + if data then + local numberdata = data.numberdata + if numberdata then + sections.typesetnumber(numberdata,"number",spec or false,numberdata or false) + end + end +end + +function lists.prefixednumber(name,n,prefixspec,numberspec) + local data = lists.result[n] + if data then + helpers.prefix(data,prefixspec) + local numberdata = data.numberdata + if numberdata then + sections.typesetnumber(numberdata,"number",numberspec or false,numberdata or false) + end + end +end + +-- todo, do this in references namespace ordered instead (this is an experiment) +-- +-- also see lpdf-ano (maybe move this there) + +local splitter = lpeg.splitat(":") + +function references.specials.order(var,actions) -- references.specials ! + local operation = var.operation + if operation then + local kind, name, n = lpegmatch(splitter,operation) + local order = lists.ordered[kind] + order = order and order[name] + local v = order[tonumber(n)] + local r = v and v.references.realpage + if r then + actions.realpage = r + var.operation = r -- brrr, but test anyway + return references.specials.page(var,actions) + end + end +end + +-- interface (maybe strclistpush etc) + +commands.pushlist = lists.pushnesting +commands.poplist = lists.popnesting +commands.enhancelist = lists.enhance +commands.processlist = lists.process +commands.analyzelist = lists.analyze +commands.listtitle = lists.title +commands.listprefixednumber = lists.prefixednumber +commands.listprefixedpage = lists.prefixedpage + + +function commands.addtolist (...) context(lists.addto (...)) end -- we could use variables instead of print +function commands.listsize (...) context(lists.size (...)) end +function commands.listlocation (...) context(lists.location (...)) end +function commands.listlabel (...) context(lists.label (...)) end +function commands.listrealpage (...) context(lists.realpage (...)) end +function commands.listgroupindex(...) context(lists.groupindex(...)) end + +function commands.listuserdata(...) + local str, metadata = lists.userdata(...) + if str then + -- local catcodes = metadata and metadata.catcodes + -- if catcodes then + -- context.sprint(catcodes,str) + -- else + -- context(str) + -- end + helpers.title(str,metadata) + end +end + +-- we could also set variables .. names will change (when this module is done) +-- maybe strc_lists_savedtitle etc + +function commands.doiflisthastitleelse (...) commands.doifelse(lists.hastitledata (...)) end +function commands.doiflisthaspageelse (...) commands.doifelse(lists.haspagedata (...)) end +function commands.doiflisthasnumberelse(...) commands.doifelse(lists.hasnumberdata(...)) end +function commands.doiflisthasentry (n) commands.doifelse(lists.iscached (n )) end + +function commands.savedlistnumber(name,n) + local data = cached[tonumber(n)] + if data then + local numberdata = data.numberdata + if numberdata then + sections.typesetnumber(numberdata,"number",numberdata or false) + end + end +end + +function commands.savedlisttitle(name,n,tag) + local data = cached[tonumber(n)] + if data then + local titledata = data.titledata + if titledata then + helpers.title(titledata[tag] or titledata.title or "",data.metadata) + end + end +end + +-- function commands.savedlistprefixednumber(name,n) +-- local data = cached[tonumber(n)] +-- if data then +-- local numberdata = data.numberdata +-- if numberdata then +-- helpers.prefix(data,data.prefixdata) +-- sections.typesetnumber(numberdata,"number",numberdata or false) +-- end +-- end +-- end + +if not lists.reordered then + function lists.reordered(data) + return data.numberdata + end +end + +function commands.savedlistprefixednumber(name,n) + local data = cached[tonumber(n)] + if data then + local numberdata = lists.reordered(data) + if numberdata then + helpers.prefix(data,data.prefixdata) + sections.typesetnumber(numberdata,"number",numberdata or false) + end + end +end + +commands.discardfromlist = lists.discard + +-- new and experimental and therefore off by default + +local sort, setmetatableindex = table.sort, table.setmetatableindex + +lists.autoreorder = false -- true + +local function addlevel(t,k) + local v = { } + setmetatableindex(v,function(t,k) + local v = { } + t[k] = v + return v + end) + t[k] = v + return v +end + +local internals = setmetatableindex({ }, function(t,k) + + local sublists = setmetatableindex({ },addlevel) + + local collected = lists.collected or { } + + for i=1,#collected do + local entry = collected[i] + local numberdata = entry.numberdata + if numberdata then + local metadata = entry.metadata + if metadata then + local references = entry.references + if references then + local kind = metadata.kind + local name = numberdata.counter or metadata.name + local internal = references.internal + if kind and name and internal then + local sublist = sublists[kind][name] + sublist[#sublist + 1] = { internal, numberdata } + end + end + end + end + end + + for k, v in next, sublists do + for k, v in next, v do + local tmp = { } + for i=1,#v do + tmp[i] = v[i] + end + sort(v,function(a,b) return a[1] < b[1] end) + for i=1,#v do + t[v[i][1]] = tmp[i][2] + end + end + end + + setmetatableindex(t,nil) + + return t[k] + +end) + +function lists.reordered(entry) + local numberdata = entry.numberdata + if lists.autoreorder then + if numberdata then + local metadata = entry.metadata + if metadata then + local references = entry.references + if references then + local kind = metadata.kind + local name = numberdata.counter or metadata.name + local internal = references.internal + if kind and name and internal then + return internals[internal] or numberdata + end + end + end + end + else + function lists.reordered(entry) + return entry.numberdata + end + end + return numberdata +end diff --git a/tex/context/base/strc-mar.lua b/tex/context/base/strc-mar.lua index 7b3ac11e1..4aa867992 100644 --- a/tex/context/base/strc-mar.lua +++ b/tex/context/base/strc-mar.lua @@ -1,696 +1,696 @@ -if not modules then modules = { } end modules ['strc-mar'] = { - version = 1.001, - comment = "companion to strc-mar.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: cleanup stack (structures.marks.reset(v_all) also does the job) --- todo: only commands.* print to tex, native marks return values - -local insert, concat = table.insert, table.concat -local tostring, next, rawget = tostring, next, rawget -local lpegmatch = lpeg.match -local match = string.match - -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex - -local nodecodes = nodes.nodecodes -local glyph_code = nodecodes.glyph -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist - -local traversenodes = node.traverse -local texsetattribute = tex.setattribute -local texbox = tex.box - -local a_marks = attributes.private("structure","marks") - -local trace_marks_set = false trackers.register("marks.set", function(v) trace_marks_set = v end) -local trace_marks_get = false trackers.register("marks.get", function(v) trace_marks_get = v end) -local trace_marks_all = false trackers.register("marks.detail", function(v) trace_marks_all = v end) - -local report_marks = logs.reporter("structure","marks") - -local variables = interfaces.variables - -local v_first = variables.first -local v_last = variables.last -local v_previous = variables.previous -local v_next = variables.next -local v_top = variables.top -local v_bottom = variables.bottom -local v_current = variables.current -local v_default = variables.default -local v_page = variables.page -local v_all = variables.all -local v_keep = variables.keep - -local v_nocheck_suffix = ":" .. variables.nocheck - -local v_first_nocheck = variables.first .. v_nocheck_suffix -local v_last_nocheck = variables.last .. v_nocheck_suffix -local v_previous_nocheck = variables.previous .. v_nocheck_suffix -local v_next_nocheck = variables.next .. v_nocheck_suffix -local v_top_nocheck = variables.top .. v_nocheck_suffix -local v_bottom_nocheck = variables.bottom .. v_nocheck_suffix - -local structures = structures -local marks = structures.marks -local lists = structures.lists - -local settings_to_array = utilities.parsers.settings_to_array - -marks.data = marks.data or allocate() - -storage.register("structures/marks/data", marks.data, "structures.marks.data") - -local data = marks.data -local stack, topofstack = { }, 0 - -local ranges = { - [v_page] = { - first = 0, - last = 0, - }, -} - -local function resolve(t,k) - if k then - if trace_marks_set or trace_marks_get then - report_marks("undefined mark, name %a",k) - end - local crap = { autodefined = true } -- maybe set = 0 and reset = 0 - t[k] = crap - return crap - else - -- weird: k is nil - end -end - -setmetatableindex(data, resolve) - -function marks.exists(name) - return rawget(data,name) ~= nil -end - --- identify range - -local function sweep(head,first,last) - for n in traversenodes(head) do - local id = n.id - if id == glyph_code then - local a = n[a_marks] - if not a then - -- next - elseif first == 0 then - first, last = a, a - elseif a > last then - last = a - end - elseif id == hlist_code or id == vlist_code then - local a = n[a_marks] - if not a then - -- next - elseif first == 0 then - first, last = a, a - elseif a > last then - last = a - end - local list = n.list - if list then - first, last = sweep(list, first, last) - end - end - end - return first, last -end - -local classes = { } - -setmetatableindex(classes, function(t,k) local s = settings_to_array(k) t[k] = s return s end) - -local lasts = { } - -function marks.synchronize(class,n,option) - local box = texbox[n] - if box then - local first, last = sweep(box.list,0,0) - if option == v_keep and first == 0 and last == 0 then - if trace_marks_get or trace_marks_set then - report_marks("action %a, class %a, box %a","retain at synchronize",class,n) - end - -- todo: check if still valid firts/last in range - first = lasts[class] or 0 - last = first - else - lasts[class] = last - local classlist = classes[class] - for i=1,#classlist do - local class = classlist[i] - local range = ranges[class] - if not range then - range = { } - ranges[class] = range - end - range.first, range.last = first, last - if trace_marks_get or trace_marks_set then - report_marks("action %a, class %a, first %a, last %a","synchronize",class,range.first,range.last) - end - end - end - elseif trace_marks_get or trace_marks_set then - report_marks("action %s, class %a, box %a","synchronize without content",class,n) - end -end - --- define etc - -local function resolve(t,k) - if k == "fullchain" then - local fullchain = { } - local chain = t.chain - while chain and chain ~= "" do - insert(fullchain,1,chain) - chain = data[chain].chain - end - t[k] = fullchain - return fullchain - elseif k == "chain" then - t[k] = "" - return "" - elseif k == "reset" or k == "set" then - t[k] = 0 - return 0 - elseif k == "parent" then - t[k] = false - return false - end -end - -function marks.define(name,settings) - settings = settings or { } - data[name] = settings - local parent = settings.parent - if parent == nil or parent == "" or parent == name then - settings.parent = false - else - local dp = data[parent] - if not dp then - settings.parent = false - elseif dp.parent then - settings.parent = dp.parent - end - end - setmetatableindex(settings, resolve) -end - -for k, v in next, data do - setmetatableindex(v,resolve) -- runtime loaded table -end - -local function parentname(name) - local dn = data[name] - return dn and dn.parent or name -end - -function marks.relate(name,chain) - local dn = data[name] - if dn and not dn.parent then - if chain and chain ~= "" then - dn.chain = chain - local dc = data[chain] - if dc then - local children = dc.children - if not children then - children = { } - dc.children = children - end - children[#children+1] = name - end - elseif trace_marks_set then - report_marks("error: invalid relation, name %a, chain %a",name,chain) - end - end -end - -local function resetchildren(new,name) - local dn = data[name] - if dn and not dn.parent then - local children = dn.children - if children then - for i=1,#children do - local ci = children[i] - new[ci] = false - if trace_marks_set then - report_marks("action %a, parent %a, child %a","reset",name,ci) - end - resetchildren(new,ci) - end - end - end -end - -function marks.set(name,value) - local dn = data[name] - if dn then - local child = name - local parent = dn.parent - if parent then - name = parent - dn = data[name] - end - dn.set = topofstack - if not dn.reset then - dn.reset = 0 -- in case of selfdefined - end - local top = stack[topofstack] - local new = { } - if top then - for k, v in next, top do - local d = data[k] - local r = d.reset or 0 - local s = d.set or 0 - if r <= topofstack and s < r then - new[k] = false - else - new[k] = v - end - end - end - resetchildren(new,name) - new[name] = value - topofstack = topofstack + 1 - stack[topofstack] = new - if trace_marks_set then - if name == child then - report_marks("action %a, name %a, index %a, value %a","set",name,topofstack,value) - else - report_marks("action %a, parent %a, child %a, index %a, value %a","set",parent,child,topofstack,value) - end - end - texsetattribute("global",a_marks,topofstack) - end -end - -local function reset(name) - if v_all then - if trace_marks_set then - report_marks("action %a","reset all") - end - stack = { } - for name, dn in next, data do - local parent = dn.parent - if parent then - dn.reset = 0 - dn.set = 0 - end - end - else - local dn = data[name] - if dn then - local parent = dn.parent - if parent then - name = parent - dn = data[name] - end - if trace_marks_set then - report_marks("action %a, name %a, index %a","reset",name,topofstack) - end - dn.reset = topofstack - local children = dn.children - if children then - for i=1,#children do - local ci = children[i] - reset(ci) - end - end - end - end -end - -marks.reset = reset - -function marks.get(n,name,value) - local dn = data[name] - if dn then - name = dn.parent or name - local top = stack[n] - if top then - context(top[name]) - end - end -end - -function marks.show(first,last) - if first and last then - for k=first,last do - local v = stack[k] - if v then - report_marks("% 4i: %s",k,table.sequenced(v)) - end - end - else - for k, v in table.sortedpairs(stack) do - report_marks("% 4i: %s",k,table.sequenced(v)) - end - end -end - -local function resolve(name,first,last,strict,quitonfalse,notrace) - local dn = data[name] - if dn then - local child = name - local parent = dn.parent - name = parent or child - dn = data[name] - local step, method - if first > last then - step, method = -1, "bottom-up" - else - step, method = 1, "top-down" - end - if trace_marks_get and not notrace then - report_marks("action %a, strategy %a, name %a, parent %a, strict %a","request",method,child,parent,strict or false) - end - if trace_marks_all and not notrace then - marks.show(first,last) - end - local r = dn.reset - local s = dn.set - if first <= last and first <= r then - if trace_marks_get and not notrace then - report_marks("action %a, name %a, first %a, last %a, reset %a, index %a","reset first",name,first,last,r,first) - end - elseif first >= last and last <= r then - if trace_marks_get and not notrace then - report_marks("action %a, name %a, first %a, last %a, reset %a, index %a","reset last",name,first,last,r,last) - end - elseif not stack[first] or not stack[last] then - if trace_marks_get and not notrace then - -- a previous or next method can give an out of range, which is valid - report_marks("error: out of range, name %a, reset %a, index %a",name,r,first) - end - elseif strict then - local top = stack[first] - local fullchain = dn.fullchain - if not fullchain or #fullchain == 0 then - if trace_marks_get and not notrace then - report_marks("warning: no full chain, trying again, name %a, first %a, last %a",name,first,last) - end - return resolve(name,first,last) - else - if trace_marks_get and not notrace then - report_marks("found chain [ % => T ]",fullchain) - end - local chaindata, chainlength = { }, #fullchain - for i=1,chainlength do - local cname = fullchain[i] - if data[cname].set > 0 then - local value = resolve(cname,first,last,false,false,true) - if value == "" then - if trace_marks_get and not notrace then - report_marks("quitting chain, name %a, reset %a, start %a",name,r,first) - end - return "" - else - chaindata[i] = value - end - end - end - if trace_marks_get and not notrace then - report_marks("using chain [ % => T ]",chaindata) - end - local value, index, found = resolve(name,first,last,false,false,true) - if value ~= "" then - if trace_marks_get and not notrace then - report_marks("following chain [ % => T ]",chaindata) - end - for i=1,chainlength do - local cname = fullchain[i] - if data[cname].set > 0 and chaindata[i] ~= found[cname] then - if trace_marks_get and not notrace then - report_marks("quiting chain, name %a, reset %a, index %a",name,r,first) - end - return "" - end - end - if trace_marks_get and not notrace then - report_marks("found in chain, name %a, reset %a, start %a, index %a, value %a",name,r,first,index,value) - end - return value, index, found - elseif trace_marks_get and not notrace then - report_marks("not found, name %a, reset %a",name,r) - end - end - else - for i=first,last,step do - local current = stack[i] - local value = current and current[name] - if value == nil then - -- search on - elseif value == false then - if quitonfalse then - return "" - end - elseif value == true then - if trace_marks_get and not notrace then - report_marks("quitting steps, name %a, reset %a, start %a, index %a",name,r,first,i) - end - return "" - elseif value ~= "" then - if trace_marks_get and not notrace then - report_marks("found in steps, name %a, reset %a, start %a, index %a, value %a",name,r,first,i,value) - end - return value, i, current - end - end - if trace_marks_get and not notrace then - report_marks("not found in steps, name %a, reset %a",name,r) - end - end - end - return "" -end - --- todo: column:first column:last - -local methods = { } - -local function doresolve(name,rangename,swap,df,dl,strict) - local range = ranges[rangename] or ranges[v_page] - local first, last = range.first, range.last - if trace_marks_get then - report_marks("action %a, name %a, range %a, swap %a, first %a, last %a, df %a, dl %a, strict %a", - "resolving",name,rangename,swap or false,first,last,df,dl,strict or false) - end - if swap then - first, last = last + df, first + dl - else - first, last = first + df, last + dl - end - local value, index, found = resolve(name,first,last,strict) - -- maybe something more - return value, index, found -end - --- previous : last before sync --- next : first after sync - --- top : first in sync --- bottom : last in sync - --- first : first not top in sync --- last : last not bottom in sync - -methods[v_previous] = function(name,range) return doresolve(name,range,false,-1,0,true ) end -- strict -methods[v_top] = function(name,range) return doresolve(name,range,false, 0,0,true ) end -- strict -methods[v_bottom] = function(name,range) return doresolve(name,range,true , 0,0,true ) end -- strict -methods[v_next] = function(name,range) return doresolve(name,range,true , 0,1,true ) end -- strict - -methods[v_previous_nocheck] = function(name,range) return doresolve(name,range,false,-1,0,false) end -methods[v_top_nocheck] = function(name,range) return doresolve(name,range,false, 0,0,false) end -methods[v_bottom_nocheck] = function(name,range) return doresolve(name,range,true , 0,0,false) end -methods[v_next_nocheck] = function(name,range) return doresolve(name,range,true , 0,1,false) end - -local function do_first(name,range,check) - if trace_marks_get then - report_marks("action %a, name %a, range %a","resolving first",name,range) - end - local f_value, f_index, f_found = doresolve(name,range,false,0,0,check) - if trace_marks_get then - report_marks("action %a, name %a, range %a","resolving last",name,range) - end - local l_value, l_index, l_found = doresolve(name,range,true ,0,0,check) - if f_found and l_found and l_index > f_index then - local name = parentname(name) - for i=f_index,l_index,1 do - local si = stack[i] - local sn = si[name] - if sn and sn ~= false and sn ~= true and sn ~= "" and sn ~= f_value then - if trace_marks_get then - report_marks("action %a, name %a, range %a, index %a, value %a","resolving",name,range,i,sn) - end - return sn, i, si - end - end - end - if trace_marks_get then - report_marks("resolved, name %a, range %a, using first",name,range) - end - return f_value, f_index, f_found -end - -local function do_last(name,range,check) - if trace_marks_get then - report_marks("action %a, name %a, range %a","resolving first",name,range) - end - local f_value, f_index, f_found = doresolve(name,range,false,0,0,check) - if trace_marks_get then - report_marks("action %a, name %a, range %a","resolving last",name,range) - end - local l_value, l_index, l_found = doresolve(name,range,true ,0,0,check) - if f_found and l_found and l_index > f_index then - local name = parentname(name) - for i=l_index,f_index,-1 do - local si = stack[i] - local sn = si[name] - if sn and sn ~= false and sn ~= true and sn ~= "" and sn ~= l_value then - if trace_marks_get then - report_marks("action %a, name %a, range %a, index %a, value %a","resolving",name,range,i,sn) - end - return sn, i, si - end - end - end - if trace_marks_get then - report_marks("resolved, name %a, range %a, using first",name,range) - end - return l_value, l_index, l_found -end - -methods[v_first ] = function(name,range) return do_first(name,range,true ) end -methods[v_last ] = function(name,range) return do_last (name,range,true ) end -methods[v_first_nocheck] = function(name,range) return do_first(name,range,false) end -methods[v_last_nocheck ] = function(name,range) return do_last (name,range,false) end - -methods[v_current] = function(name,range) -- range is ignored here - local top = stack[topofstack] - return top and top[parentname(name)] or "" -end - -local function fetched(name,range,method) - local value = (methods[method] or methods[v_first])(name,range) or "" - if not trace_marks_get then - -- no report - elseif value == "" then - report_marks("nothing fetched, name %a, range %a, method %a",name,range,method) - else - report_marks("marking fetched, name %a, range %a, method %a, value %a",name,range,method,value) - end - return value or "" -end - --- can be used at the lua end: - -marks.fetched = fetched - --- this will move to a separate runtime modules - -marks.tracers = marks.tracers or { } - -function marks.tracers.showtable() - context.starttabulate { "|l|l|l|lp|lp|" } - context.tabulaterowbold("name","parent","chain","children","fullchain") - context.ML() - for k, v in table.sortedpairs(data) do - local parent, chain, children, fullchain = v.parent or "", v.chain or "", v.children or { }, v.fullchain or { } - table.sort(children) -- in-place but harmless - context.tabulaterowtyp(k,parent,chain,concat(children," "),concat(fullchain," ")) - end - context.stoptabulate() -end - --- pushing to context: - -local separator = context.nested.markingseparator -local command = context.nested.markingcommand -local ctxconcat = context.concat - -local function fetchonemark(name,range,method) - context(command(name,fetched(name,range,method))) -end - -local function fetchtwomarks(name,range) - ctxconcat( { - command(name,fetched(name,range,v_first)), - command(name,fetched(name,range,v_last)), - }, separator(name)) -end - -local function fetchallmarks(name,range) - ctxconcat( { - command(name,fetched(name,range,v_previous)), - command(name,fetched(name,range,v_first)), - command(name,fetched(name,range,v_last)), - }, separator(name)) -end - -function marks.fetch(name,range,method) -- chapter page first | chapter column:1 first - if trace_marks_get then - report_marks("marking requested, name %a, range %a, method %a",name,range,method) - end - if method == "" or method == v_default then - fetchonemark(name,range,v_first) - elseif method == v_both then - fetchtwomarks(name,range) - elseif method == v_all then - fetchallmarks(name,range) - else - fetchonemark(name,range,method) - end -end - -function marks.fetchonemark (name,range,method) fetchonemark (name,range,method) end -function marks.fetchtwomarks(name,range) fetchtwomarks(name,range ) end -function marks.fetchallmarks(name,range) fetchallmarks(name,range ) end - --- here we have a few helpers .. will become commands.* - -function marks.title(tag,n) - local listindex = match(n,"^li::(.-)$") - if listindex then - commands.savedlisttitle(tag,listindex,"marking") - else - context(n) - end -end - -function marks.number(tag,n) -- no spec - local listindex = match(n,"^li::(.-)$") - if listindex then - commands.savedlistnumber(tag,listindex) - else - -- no prefix (as it is the prefix) - context(n) - end -end - --- interface - -commands.definemarking = marks.define -commands.relatemarking = marks.relate -commands.setmarking = marks.set -commands.resetmarking = marks.reset -commands.synchronizemarking = marks.synchronize -commands.getmarking = marks.fetch -commands.fetchonemark = marks.fetchonemark -commands.fetchtwomarks = marks.fetchtwomarks -commands.fetchallmarks = marks.fetchallmarks - -function commands.doifelsemarking(str) -- can be shortcut - commands.doifelse(marks.exists(str)) -end - +if not modules then modules = { } end modules ['strc-mar'] = { + version = 1.001, + comment = "companion to strc-mar.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: cleanup stack (structures.marks.reset(v_all) also does the job) +-- todo: only commands.* print to tex, native marks return values + +local insert, concat = table.insert, table.concat +local tostring, next, rawget = tostring, next, rawget +local lpegmatch = lpeg.match +local match = string.match + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex + +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist + +local traversenodes = node.traverse +local texsetattribute = tex.setattribute +local texbox = tex.box + +local a_marks = attributes.private("structure","marks") + +local trace_marks_set = false trackers.register("marks.set", function(v) trace_marks_set = v end) +local trace_marks_get = false trackers.register("marks.get", function(v) trace_marks_get = v end) +local trace_marks_all = false trackers.register("marks.detail", function(v) trace_marks_all = v end) + +local report_marks = logs.reporter("structure","marks") + +local variables = interfaces.variables + +local v_first = variables.first +local v_last = variables.last +local v_previous = variables.previous +local v_next = variables.next +local v_top = variables.top +local v_bottom = variables.bottom +local v_current = variables.current +local v_default = variables.default +local v_page = variables.page +local v_all = variables.all +local v_keep = variables.keep + +local v_nocheck_suffix = ":" .. variables.nocheck + +local v_first_nocheck = variables.first .. v_nocheck_suffix +local v_last_nocheck = variables.last .. v_nocheck_suffix +local v_previous_nocheck = variables.previous .. v_nocheck_suffix +local v_next_nocheck = variables.next .. v_nocheck_suffix +local v_top_nocheck = variables.top .. v_nocheck_suffix +local v_bottom_nocheck = variables.bottom .. v_nocheck_suffix + +local structures = structures +local marks = structures.marks +local lists = structures.lists + +local settings_to_array = utilities.parsers.settings_to_array + +marks.data = marks.data or allocate() + +storage.register("structures/marks/data", marks.data, "structures.marks.data") + +local data = marks.data +local stack, topofstack = { }, 0 + +local ranges = { + [v_page] = { + first = 0, + last = 0, + }, +} + +local function resolve(t,k) + if k then + if trace_marks_set or trace_marks_get then + report_marks("undefined mark, name %a",k) + end + local crap = { autodefined = true } -- maybe set = 0 and reset = 0 + t[k] = crap + return crap + else + -- weird: k is nil + end +end + +setmetatableindex(data, resolve) + +function marks.exists(name) + return rawget(data,name) ~= nil +end + +-- identify range + +local function sweep(head,first,last) + for n in traversenodes(head) do + local id = n.id + if id == glyph_code then + local a = n[a_marks] + if not a then + -- next + elseif first == 0 then + first, last = a, a + elseif a > last then + last = a + end + elseif id == hlist_code or id == vlist_code then + local a = n[a_marks] + if not a then + -- next + elseif first == 0 then + first, last = a, a + elseif a > last then + last = a + end + local list = n.list + if list then + first, last = sweep(list, first, last) + end + end + end + return first, last +end + +local classes = { } + +setmetatableindex(classes, function(t,k) local s = settings_to_array(k) t[k] = s return s end) + +local lasts = { } + +function marks.synchronize(class,n,option) + local box = texbox[n] + if box then + local first, last = sweep(box.list,0,0) + if option == v_keep and first == 0 and last == 0 then + if trace_marks_get or trace_marks_set then + report_marks("action %a, class %a, box %a","retain at synchronize",class,n) + end + -- todo: check if still valid firts/last in range + first = lasts[class] or 0 + last = first + else + lasts[class] = last + local classlist = classes[class] + for i=1,#classlist do + local class = classlist[i] + local range = ranges[class] + if not range then + range = { } + ranges[class] = range + end + range.first, range.last = first, last + if trace_marks_get or trace_marks_set then + report_marks("action %a, class %a, first %a, last %a","synchronize",class,range.first,range.last) + end + end + end + elseif trace_marks_get or trace_marks_set then + report_marks("action %s, class %a, box %a","synchronize without content",class,n) + end +end + +-- define etc + +local function resolve(t,k) + if k == "fullchain" then + local fullchain = { } + local chain = t.chain + while chain and chain ~= "" do + insert(fullchain,1,chain) + chain = data[chain].chain + end + t[k] = fullchain + return fullchain + elseif k == "chain" then + t[k] = "" + return "" + elseif k == "reset" or k == "set" then + t[k] = 0 + return 0 + elseif k == "parent" then + t[k] = false + return false + end +end + +function marks.define(name,settings) + settings = settings or { } + data[name] = settings + local parent = settings.parent + if parent == nil or parent == "" or parent == name then + settings.parent = false + else + local dp = data[parent] + if not dp then + settings.parent = false + elseif dp.parent then + settings.parent = dp.parent + end + end + setmetatableindex(settings, resolve) +end + +for k, v in next, data do + setmetatableindex(v,resolve) -- runtime loaded table +end + +local function parentname(name) + local dn = data[name] + return dn and dn.parent or name +end + +function marks.relate(name,chain) + local dn = data[name] + if dn and not dn.parent then + if chain and chain ~= "" then + dn.chain = chain + local dc = data[chain] + if dc then + local children = dc.children + if not children then + children = { } + dc.children = children + end + children[#children+1] = name + end + elseif trace_marks_set then + report_marks("error: invalid relation, name %a, chain %a",name,chain) + end + end +end + +local function resetchildren(new,name) + local dn = data[name] + if dn and not dn.parent then + local children = dn.children + if children then + for i=1,#children do + local ci = children[i] + new[ci] = false + if trace_marks_set then + report_marks("action %a, parent %a, child %a","reset",name,ci) + end + resetchildren(new,ci) + end + end + end +end + +function marks.set(name,value) + local dn = data[name] + if dn then + local child = name + local parent = dn.parent + if parent then + name = parent + dn = data[name] + end + dn.set = topofstack + if not dn.reset then + dn.reset = 0 -- in case of selfdefined + end + local top = stack[topofstack] + local new = { } + if top then + for k, v in next, top do + local d = data[k] + local r = d.reset or 0 + local s = d.set or 0 + if r <= topofstack and s < r then + new[k] = false + else + new[k] = v + end + end + end + resetchildren(new,name) + new[name] = value + topofstack = topofstack + 1 + stack[topofstack] = new + if trace_marks_set then + if name == child then + report_marks("action %a, name %a, index %a, value %a","set",name,topofstack,value) + else + report_marks("action %a, parent %a, child %a, index %a, value %a","set",parent,child,topofstack,value) + end + end + texsetattribute("global",a_marks,topofstack) + end +end + +local function reset(name) + if v_all then + if trace_marks_set then + report_marks("action %a","reset all") + end + stack = { } + for name, dn in next, data do + local parent = dn.parent + if parent then + dn.reset = 0 + dn.set = 0 + end + end + else + local dn = data[name] + if dn then + local parent = dn.parent + if parent then + name = parent + dn = data[name] + end + if trace_marks_set then + report_marks("action %a, name %a, index %a","reset",name,topofstack) + end + dn.reset = topofstack + local children = dn.children + if children then + for i=1,#children do + local ci = children[i] + reset(ci) + end + end + end + end +end + +marks.reset = reset + +function marks.get(n,name,value) + local dn = data[name] + if dn then + name = dn.parent or name + local top = stack[n] + if top then + context(top[name]) + end + end +end + +function marks.show(first,last) + if first and last then + for k=first,last do + local v = stack[k] + if v then + report_marks("% 4i: %s",k,table.sequenced(v)) + end + end + else + for k, v in table.sortedpairs(stack) do + report_marks("% 4i: %s",k,table.sequenced(v)) + end + end +end + +local function resolve(name,first,last,strict,quitonfalse,notrace) + local dn = data[name] + if dn then + local child = name + local parent = dn.parent + name = parent or child + dn = data[name] + local step, method + if first > last then + step, method = -1, "bottom-up" + else + step, method = 1, "top-down" + end + if trace_marks_get and not notrace then + report_marks("action %a, strategy %a, name %a, parent %a, strict %a","request",method,child,parent,strict or false) + end + if trace_marks_all and not notrace then + marks.show(first,last) + end + local r = dn.reset + local s = dn.set + if first <= last and first <= r then + if trace_marks_get and not notrace then + report_marks("action %a, name %a, first %a, last %a, reset %a, index %a","reset first",name,first,last,r,first) + end + elseif first >= last and last <= r then + if trace_marks_get and not notrace then + report_marks("action %a, name %a, first %a, last %a, reset %a, index %a","reset last",name,first,last,r,last) + end + elseif not stack[first] or not stack[last] then + if trace_marks_get and not notrace then + -- a previous or next method can give an out of range, which is valid + report_marks("error: out of range, name %a, reset %a, index %a",name,r,first) + end + elseif strict then + local top = stack[first] + local fullchain = dn.fullchain + if not fullchain or #fullchain == 0 then + if trace_marks_get and not notrace then + report_marks("warning: no full chain, trying again, name %a, first %a, last %a",name,first,last) + end + return resolve(name,first,last) + else + if trace_marks_get and not notrace then + report_marks("found chain [ % => T ]",fullchain) + end + local chaindata, chainlength = { }, #fullchain + for i=1,chainlength do + local cname = fullchain[i] + if data[cname].set > 0 then + local value = resolve(cname,first,last,false,false,true) + if value == "" then + if trace_marks_get and not notrace then + report_marks("quitting chain, name %a, reset %a, start %a",name,r,first) + end + return "" + else + chaindata[i] = value + end + end + end + if trace_marks_get and not notrace then + report_marks("using chain [ % => T ]",chaindata) + end + local value, index, found = resolve(name,first,last,false,false,true) + if value ~= "" then + if trace_marks_get and not notrace then + report_marks("following chain [ % => T ]",chaindata) + end + for i=1,chainlength do + local cname = fullchain[i] + if data[cname].set > 0 and chaindata[i] ~= found[cname] then + if trace_marks_get and not notrace then + report_marks("quiting chain, name %a, reset %a, index %a",name,r,first) + end + return "" + end + end + if trace_marks_get and not notrace then + report_marks("found in chain, name %a, reset %a, start %a, index %a, value %a",name,r,first,index,value) + end + return value, index, found + elseif trace_marks_get and not notrace then + report_marks("not found, name %a, reset %a",name,r) + end + end + else + for i=first,last,step do + local current = stack[i] + local value = current and current[name] + if value == nil then + -- search on + elseif value == false then + if quitonfalse then + return "" + end + elseif value == true then + if trace_marks_get and not notrace then + report_marks("quitting steps, name %a, reset %a, start %a, index %a",name,r,first,i) + end + return "" + elseif value ~= "" then + if trace_marks_get and not notrace then + report_marks("found in steps, name %a, reset %a, start %a, index %a, value %a",name,r,first,i,value) + end + return value, i, current + end + end + if trace_marks_get and not notrace then + report_marks("not found in steps, name %a, reset %a",name,r) + end + end + end + return "" +end + +-- todo: column:first column:last + +local methods = { } + +local function doresolve(name,rangename,swap,df,dl,strict) + local range = ranges[rangename] or ranges[v_page] + local first, last = range.first, range.last + if trace_marks_get then + report_marks("action %a, name %a, range %a, swap %a, first %a, last %a, df %a, dl %a, strict %a", + "resolving",name,rangename,swap or false,first,last,df,dl,strict or false) + end + if swap then + first, last = last + df, first + dl + else + first, last = first + df, last + dl + end + local value, index, found = resolve(name,first,last,strict) + -- maybe something more + return value, index, found +end + +-- previous : last before sync +-- next : first after sync + +-- top : first in sync +-- bottom : last in sync + +-- first : first not top in sync +-- last : last not bottom in sync + +methods[v_previous] = function(name,range) return doresolve(name,range,false,-1,0,true ) end -- strict +methods[v_top] = function(name,range) return doresolve(name,range,false, 0,0,true ) end -- strict +methods[v_bottom] = function(name,range) return doresolve(name,range,true , 0,0,true ) end -- strict +methods[v_next] = function(name,range) return doresolve(name,range,true , 0,1,true ) end -- strict + +methods[v_previous_nocheck] = function(name,range) return doresolve(name,range,false,-1,0,false) end +methods[v_top_nocheck] = function(name,range) return doresolve(name,range,false, 0,0,false) end +methods[v_bottom_nocheck] = function(name,range) return doresolve(name,range,true , 0,0,false) end +methods[v_next_nocheck] = function(name,range) return doresolve(name,range,true , 0,1,false) end + +local function do_first(name,range,check) + if trace_marks_get then + report_marks("action %a, name %a, range %a","resolving first",name,range) + end + local f_value, f_index, f_found = doresolve(name,range,false,0,0,check) + if trace_marks_get then + report_marks("action %a, name %a, range %a","resolving last",name,range) + end + local l_value, l_index, l_found = doresolve(name,range,true ,0,0,check) + if f_found and l_found and l_index > f_index then + local name = parentname(name) + for i=f_index,l_index,1 do + local si = stack[i] + local sn = si[name] + if sn and sn ~= false and sn ~= true and sn ~= "" and sn ~= f_value then + if trace_marks_get then + report_marks("action %a, name %a, range %a, index %a, value %a","resolving",name,range,i,sn) + end + return sn, i, si + end + end + end + if trace_marks_get then + report_marks("resolved, name %a, range %a, using first",name,range) + end + return f_value, f_index, f_found +end + +local function do_last(name,range,check) + if trace_marks_get then + report_marks("action %a, name %a, range %a","resolving first",name,range) + end + local f_value, f_index, f_found = doresolve(name,range,false,0,0,check) + if trace_marks_get then + report_marks("action %a, name %a, range %a","resolving last",name,range) + end + local l_value, l_index, l_found = doresolve(name,range,true ,0,0,check) + if f_found and l_found and l_index > f_index then + local name = parentname(name) + for i=l_index,f_index,-1 do + local si = stack[i] + local sn = si[name] + if sn and sn ~= false and sn ~= true and sn ~= "" and sn ~= l_value then + if trace_marks_get then + report_marks("action %a, name %a, range %a, index %a, value %a","resolving",name,range,i,sn) + end + return sn, i, si + end + end + end + if trace_marks_get then + report_marks("resolved, name %a, range %a, using first",name,range) + end + return l_value, l_index, l_found +end + +methods[v_first ] = function(name,range) return do_first(name,range,true ) end +methods[v_last ] = function(name,range) return do_last (name,range,true ) end +methods[v_first_nocheck] = function(name,range) return do_first(name,range,false) end +methods[v_last_nocheck ] = function(name,range) return do_last (name,range,false) end + +methods[v_current] = function(name,range) -- range is ignored here + local top = stack[topofstack] + return top and top[parentname(name)] or "" +end + +local function fetched(name,range,method) + local value = (methods[method] or methods[v_first])(name,range) or "" + if not trace_marks_get then + -- no report + elseif value == "" then + report_marks("nothing fetched, name %a, range %a, method %a",name,range,method) + else + report_marks("marking fetched, name %a, range %a, method %a, value %a",name,range,method,value) + end + return value or "" +end + +-- can be used at the lua end: + +marks.fetched = fetched + +-- this will move to a separate runtime modules + +marks.tracers = marks.tracers or { } + +function marks.tracers.showtable() + context.starttabulate { "|l|l|l|lp|lp|" } + context.tabulaterowbold("name","parent","chain","children","fullchain") + context.ML() + for k, v in table.sortedpairs(data) do + local parent, chain, children, fullchain = v.parent or "", v.chain or "", v.children or { }, v.fullchain or { } + table.sort(children) -- in-place but harmless + context.tabulaterowtyp(k,parent,chain,concat(children," "),concat(fullchain," ")) + end + context.stoptabulate() +end + +-- pushing to context: + +local separator = context.nested.markingseparator +local command = context.nested.markingcommand +local ctxconcat = context.concat + +local function fetchonemark(name,range,method) + context(command(name,fetched(name,range,method))) +end + +local function fetchtwomarks(name,range) + ctxconcat( { + command(name,fetched(name,range,v_first)), + command(name,fetched(name,range,v_last)), + }, separator(name)) +end + +local function fetchallmarks(name,range) + ctxconcat( { + command(name,fetched(name,range,v_previous)), + command(name,fetched(name,range,v_first)), + command(name,fetched(name,range,v_last)), + }, separator(name)) +end + +function marks.fetch(name,range,method) -- chapter page first | chapter column:1 first + if trace_marks_get then + report_marks("marking requested, name %a, range %a, method %a",name,range,method) + end + if method == "" or method == v_default then + fetchonemark(name,range,v_first) + elseif method == v_both then + fetchtwomarks(name,range) + elseif method == v_all then + fetchallmarks(name,range) + else + fetchonemark(name,range,method) + end +end + +function marks.fetchonemark (name,range,method) fetchonemark (name,range,method) end +function marks.fetchtwomarks(name,range) fetchtwomarks(name,range ) end +function marks.fetchallmarks(name,range) fetchallmarks(name,range ) end + +-- here we have a few helpers .. will become commands.* + +function marks.title(tag,n) + local listindex = match(n,"^li::(.-)$") + if listindex then + commands.savedlisttitle(tag,listindex,"marking") + else + context(n) + end +end + +function marks.number(tag,n) -- no spec + local listindex = match(n,"^li::(.-)$") + if listindex then + commands.savedlistnumber(tag,listindex) + else + -- no prefix (as it is the prefix) + context(n) + end +end + +-- interface + +commands.definemarking = marks.define +commands.relatemarking = marks.relate +commands.setmarking = marks.set +commands.resetmarking = marks.reset +commands.synchronizemarking = marks.synchronize +commands.getmarking = marks.fetch +commands.fetchonemark = marks.fetchonemark +commands.fetchtwomarks = marks.fetchtwomarks +commands.fetchallmarks = marks.fetchallmarks + +function commands.doifelsemarking(str) -- can be shortcut + commands.doifelse(marks.exists(str)) +end + diff --git a/tex/context/base/strc-not.lua b/tex/context/base/strc-not.lua index 882e00a44..a699fd8d5 100644 --- a/tex/context/base/strc-not.lua +++ b/tex/context/base/strc-not.lua @@ -1,447 +1,447 @@ -if not modules then modules = { } end modules ['strc-not'] = { - version = 1.001, - comment = "companion to strc-not.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format -local next = next -local texcount = tex.count - -local trace_notes = false trackers.register("structures.notes", function(v) trace_notes = v end) -local trace_references = false trackers.register("structures.notes.references", function(v) trace_references = v end) - -local report_notes = logs.reporter("structure","notes") - -local structures = structures -local helpers = structures.helpers -local lists = structures.lists -local sections = structures.sections -local counters = structures.counters -local notes = structures.notes -local references = structures.references -local counterspecials = counters.specials - -notes.states = notes.states or { } -lists.enhancers = lists.enhancers or { } - -storage.register("structures/notes/states", notes.states, "structures.notes.states") - -local notestates = notes.states -local notedata = { } - -local variables = interfaces.variables -local context = context -local commands = commands - --- state: store, insert, postpone - -local function store(tag,n) - -- somewhat weird but this is a cheap hook spot - if not counterspecials[tag] then - counterspecials[tag] = function(tag) - context.doresetlinenotecompression(tag) -- maybe flag that controls it - end - end - -- - local nd = notedata[tag] - if not nd then - nd = { } - notedata[tag] = nd - end - local nnd = #nd + 1 - nd[nnd] = n - local state = notestates[tag] - if not state then - report_notes("unknown state for %a",tag) - elseif state.kind ~= "insert" then - if trace_notes then - report_notes("storing %a with state %a as %a",tag,state.kind,nnd) - end - state.start = state.start or nnd - end - return #nd -end - -notes.store = store - -function commands.storenote(tag,n) - context(store(tag,n)) -end - -local function get(tag,n) -- tricky ... only works when defined - local nd = notedata[tag] - if nd then - n = n or #nd - nd = nd[n] - if nd then - if trace_notes then - report_notes("getting note %a of %a with listindex %a",n,tag,nd) - end - -- is this right? --- local newdata = lists.collected[nd] - local newdata = lists.cached[nd] --- local newdata = lists.tobesaved[nd] - return newdata - end - end -end - -local function getn(tag) - local nd = notedata[tag] - return (nd and #nd) or 0 -end - -notes.get = get -notes.getn = getn - --- we could make a special enhancer - -local function listindex(tag,n) - local ndt = notedata[tag] - return ndt and ndt[n] -end - -notes.listindex = listindex - -function commands.notelistindex(tag,n) - context(listindex(tag,n)) -end - -local function setstate(tag,newkind) - local state = notestates[tag] - if trace_notes then - report_notes("setting state of %a from %s to %s",tag,(state and state.kind) or "unset",newkind) - end - if not state then - state = { - kind = newkind - } - notestates[tag] = state - elseif newkind == "insert" then - if not state.start then - state.kind = newkind - end - else --- if newkind == "postpone" and state.kind == "store" then --- else - state.kind = newkind --- end - end - -- state.start can already be set and will be set when an entry is added or flushed - return state -end - -local function getstate(tag) - local state = notestates[tag] - return state and state.kind or "unknown" -end - -notes.setstate = setstate -notes.getstate = getstate - -commands.setnotestate = setstate - -function commands.getnotestate(tag) - context(getstate(tag)) -end - -function notes.define(tag,kind,number) - local state = setstate(tag,kind) - state.number = number -end - -commands.definenote = notes.define - -function notes.save(tag,newkind) - local state = notestates[tag] - if state and not state.saved then - if trace_notes then - report_notes("saving state of %a, old: %a, new %a",tag,state.kind,newkind or state.kind) - end - state.saveddata = notedata[tag] - state.savedkind = state.kind - state.kind = newkind or state.kind - state.saved = true - notedata[tag] = { } - end -end - -function notes.restore(tag,forcedstate) - local state = notestates[tag] - if state and state.saved then - if trace_notes then - report_notes("restoring state of %a, old: %a, new: %a",tag,state.kind,state.savedkind) - end - notedata[tag] = state.saveddata - state.kind = forcedstate or state.savedkind - state.saveddata = nil - state.saved = false - end -end - -commands.savenote = notes.save -commands.restorenote = notes.restore - -local function hascontent(tag) - local ok = notestates[tag] - if ok then - if ok.kind == "insert" then - ok = tex.box[ok.number] - if ok then - ok = tbs.list - ok = lst and lst.next - end - else - ok = ok.start - end - end - return ok and true or false -end - -notes.hascontent = hascontent - -function commands.doifnotecontent(tag) - commands.doif(hascontent(tag)) -end - -local function internal(tag,n) - local nd = get(tag,n) - if nd then - local r = nd.references - if r then - local i = r.internal - return i and references.internals[i] -- dependency on references - end - end - return nil -end - -local function ordered(kind,name,n) - local o = lists.ordered[kind] - o = o and o[name] - return o and o[n] -end - -notes.internal = internal -notes.ordered = ordered - -local function onsamepageasprevious(tag) - local same = false - local n = getn(tag,n) - local current, previous = get(tag,n), get(tag,n-1) - if current and previous then - local cr, pr = current.references, previous.references - same = cr and pr and cr.realpage == pr.realpage - end - return same and true or false -end - -notes.doifonsamepageasprevious = onsamepageasprevious - -function commands.doifnoteonsamepageasprevious(tag) - commands.doifelse(onsamepageasprevious(tag)) -end - -function notes.checkpagechange(tag) -- called before increment ! - local nd = notedata[tag] -- can be unset at first entry - if nd then - local current = ordered("note",tag,#nd) - local nextone = ordered("note",tag,#nd+1) - if nextone then - -- we can use data from the previous pass - if nextone.pagenumber.number > current.pagenumber.number then - counters.reset(tag) - end - elseif current then - -- we need to locate the next one, best guess - if texcount.realpageno > current.pagenumber.number then - counters.reset(tag) - end - end - end -end - -function notes.postpone() - if trace_notes then - report_notes("postponing all insert notes") - end - for tag, state in next, notestates do - if state.kind ~= "store" then - setstate(tag,"postpone") - end - end -end - -commands.postponenotes = notes.postpone - -function notes.setsymbolpage(tag,n,l) - local l = l or listindex(tag,n) - if l then - local p = texcount.realpageno - if trace_notes or trace_references then - report_notes("note %a of %a with list index %a gets symbol page %a",n,tag,l,p) - end - local entry = lists.cached[l] - if entry then - entry.references.symbolpage = p - else - report_notes("internal error: note %a of %a is not flushed",n,tag) - end - else - report_notes("internal error: note %a of %a is not initialized",n,tag) - end -end - -commands.setnotesymbolpage = notes.setsymbolpage - -local function getsymbolpage(tag,n) - local li = internal(tag,n) - li = li and li.references - li = li and (li.symbolpage or li.realpage) or 0 - if trace_notes or trace_references then - report_notes("page number of note symbol %a of %a is %a",n,tag,li) - end - return li -end - -local function getnumberpage(tag,n) - local li = internal(tag,n) - li = li and li.references - li = li and li.realpage or 0 - if trace_notes or trace_references then - report_notes("page number of note number %s of %a is %a",n,tag,li) - end - return li -end - -local function getdeltapage(tag,n) - -- 0:unknown 1:textbefore, 2:textafter, 3:samepage - local what = 0 - -- references.internals[lists.tobesaved[nd].internal] - local li = internal(tag,n) - if li then - local references = li.references - if references then - local symbolpage = references.symbolpage or 0 - local notepage = references.realpage or 0 - if trace_references then - report_notes("note number %a of %a points from page %a to page %a",n,tag,symbolpage,notepage) - end - if notepage < symbolpage then - what = 3 -- after - elseif notepage > symbolpage then - what = 2 -- before - elseif notepage > 0 then - what = 1 -- same - end - else - -- might be a note that is not flushed due to to deep - -- nesting in a vbox - end - end - return what -end - -notes.getsymbolpage = getsymbolpage -notes.getnumberpage = getnumberpage -notes.getdeltapage = getdeltapage - -function commands.notesymbolpage(tag,n) context(getsymbolpage(tag,n)) end -function commands.notenumberpage(tag,n) context(getnumberpage(tag,n)) end -function commands.notedeltapage (tag,n) context(getdeltapage (tag,n)) end - -function commands.flushnotes(tag,whatkind,how) -- store and postpone - local state = notestates[tag] - local kind = state.kind - if kind == whatkind then - local nd = notedata[tag] - local ns = state.start -- first index - if kind == "postpone" then - if nd and ns then - if trace_notes then - report_notes("flushing state %a of %a from %a to %a",whatkind,tag,ns,#nd) - end - for i=ns,#nd do - context.handlenoteinsert(tag,i) - end - end - state.start = nil - state.kind = "insert" - elseif kind == "store" then - if nd and ns then - if trace_notes then - report_notes("flushing state %a of %a from %a to %a",whatkind,tag,ns,#nd) - end - -- todo: as registers: start, stop, inbetween - for i=ns,#nd do - -- tricky : trialtypesetting - if how == variables.page then - local rp = get(tag,i) - rp = rp and rp.references - rp = rp and rp.symbolpage or 0 - if rp > texcount.realpageno then - state.start = i - return - end - end - if i > ns then - context.betweennoteitself(tag) - end - context.handlenoteitself(tag,i) - end - end - state.start = nil - elseif kind == "reset" then - if nd and ns then - if trace_notes then - report_notes("flushing state %a of %a from %a to %a",whatkind,tag,ns,#nd) - end - end - state.start = nil - elseif trace_notes then - report_notes("not flushing state %a of %a",whatkind,tag) - end - elseif trace_notes then - report_notes("not flushing state %a of %a",whatkind,tag) - end -end - -function commands.flushpostponednotes() - if trace_notes then - report_notes("flushing all postponed notes") - end - for tag, _ in next, notestates do - commands.flushnotes(tag,"postpone") - end -end - -function notes.resetpostponed() - if trace_notes then - report_notes("resetting all postponed notes") - end - for tag, state in next, notestates do - if state.kind == "postpone" then - state.start = nil - state.kind = "insert" - end - end -end - -function commands.notetitle(tag,n) - command.savedlisttitle(tag,notedata[tag][n]) -end - -function commands.noteprefixednumber(tag,n,spec) - commands.savedlistprefixednumber(tag,notedata[tag][n]) -end - -function notes.internalid(tag,n) - local nd = get(tag,n) - if nd then - local r = nd.references - return r.internal - end -end +if not modules then modules = { } end modules ['strc-not'] = { + version = 1.001, + comment = "companion to strc-not.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format +local next = next +local texcount = tex.count + +local trace_notes = false trackers.register("structures.notes", function(v) trace_notes = v end) +local trace_references = false trackers.register("structures.notes.references", function(v) trace_references = v end) + +local report_notes = logs.reporter("structure","notes") + +local structures = structures +local helpers = structures.helpers +local lists = structures.lists +local sections = structures.sections +local counters = structures.counters +local notes = structures.notes +local references = structures.references +local counterspecials = counters.specials + +notes.states = notes.states or { } +lists.enhancers = lists.enhancers or { } + +storage.register("structures/notes/states", notes.states, "structures.notes.states") + +local notestates = notes.states +local notedata = { } + +local variables = interfaces.variables +local context = context +local commands = commands + +-- state: store, insert, postpone + +local function store(tag,n) + -- somewhat weird but this is a cheap hook spot + if not counterspecials[tag] then + counterspecials[tag] = function(tag) + context.doresetlinenotecompression(tag) -- maybe flag that controls it + end + end + -- + local nd = notedata[tag] + if not nd then + nd = { } + notedata[tag] = nd + end + local nnd = #nd + 1 + nd[nnd] = n + local state = notestates[tag] + if not state then + report_notes("unknown state for %a",tag) + elseif state.kind ~= "insert" then + if trace_notes then + report_notes("storing %a with state %a as %a",tag,state.kind,nnd) + end + state.start = state.start or nnd + end + return #nd +end + +notes.store = store + +function commands.storenote(tag,n) + context(store(tag,n)) +end + +local function get(tag,n) -- tricky ... only works when defined + local nd = notedata[tag] + if nd then + n = n or #nd + nd = nd[n] + if nd then + if trace_notes then + report_notes("getting note %a of %a with listindex %a",n,tag,nd) + end + -- is this right? +-- local newdata = lists.collected[nd] + local newdata = lists.cached[nd] +-- local newdata = lists.tobesaved[nd] + return newdata + end + end +end + +local function getn(tag) + local nd = notedata[tag] + return (nd and #nd) or 0 +end + +notes.get = get +notes.getn = getn + +-- we could make a special enhancer + +local function listindex(tag,n) + local ndt = notedata[tag] + return ndt and ndt[n] +end + +notes.listindex = listindex + +function commands.notelistindex(tag,n) + context(listindex(tag,n)) +end + +local function setstate(tag,newkind) + local state = notestates[tag] + if trace_notes then + report_notes("setting state of %a from %s to %s",tag,(state and state.kind) or "unset",newkind) + end + if not state then + state = { + kind = newkind + } + notestates[tag] = state + elseif newkind == "insert" then + if not state.start then + state.kind = newkind + end + else +-- if newkind == "postpone" and state.kind == "store" then +-- else + state.kind = newkind +-- end + end + -- state.start can already be set and will be set when an entry is added or flushed + return state +end + +local function getstate(tag) + local state = notestates[tag] + return state and state.kind or "unknown" +end + +notes.setstate = setstate +notes.getstate = getstate + +commands.setnotestate = setstate + +function commands.getnotestate(tag) + context(getstate(tag)) +end + +function notes.define(tag,kind,number) + local state = setstate(tag,kind) + state.number = number +end + +commands.definenote = notes.define + +function notes.save(tag,newkind) + local state = notestates[tag] + if state and not state.saved then + if trace_notes then + report_notes("saving state of %a, old: %a, new %a",tag,state.kind,newkind or state.kind) + end + state.saveddata = notedata[tag] + state.savedkind = state.kind + state.kind = newkind or state.kind + state.saved = true + notedata[tag] = { } + end +end + +function notes.restore(tag,forcedstate) + local state = notestates[tag] + if state and state.saved then + if trace_notes then + report_notes("restoring state of %a, old: %a, new: %a",tag,state.kind,state.savedkind) + end + notedata[tag] = state.saveddata + state.kind = forcedstate or state.savedkind + state.saveddata = nil + state.saved = false + end +end + +commands.savenote = notes.save +commands.restorenote = notes.restore + +local function hascontent(tag) + local ok = notestates[tag] + if ok then + if ok.kind == "insert" then + ok = tex.box[ok.number] + if ok then + ok = tbs.list + ok = lst and lst.next + end + else + ok = ok.start + end + end + return ok and true or false +end + +notes.hascontent = hascontent + +function commands.doifnotecontent(tag) + commands.doif(hascontent(tag)) +end + +local function internal(tag,n) + local nd = get(tag,n) + if nd then + local r = nd.references + if r then + local i = r.internal + return i and references.internals[i] -- dependency on references + end + end + return nil +end + +local function ordered(kind,name,n) + local o = lists.ordered[kind] + o = o and o[name] + return o and o[n] +end + +notes.internal = internal +notes.ordered = ordered + +local function onsamepageasprevious(tag) + local same = false + local n = getn(tag,n) + local current, previous = get(tag,n), get(tag,n-1) + if current and previous then + local cr, pr = current.references, previous.references + same = cr and pr and cr.realpage == pr.realpage + end + return same and true or false +end + +notes.doifonsamepageasprevious = onsamepageasprevious + +function commands.doifnoteonsamepageasprevious(tag) + commands.doifelse(onsamepageasprevious(tag)) +end + +function notes.checkpagechange(tag) -- called before increment ! + local nd = notedata[tag] -- can be unset at first entry + if nd then + local current = ordered("note",tag,#nd) + local nextone = ordered("note",tag,#nd+1) + if nextone then + -- we can use data from the previous pass + if nextone.pagenumber.number > current.pagenumber.number then + counters.reset(tag) + end + elseif current then + -- we need to locate the next one, best guess + if texcount.realpageno > current.pagenumber.number then + counters.reset(tag) + end + end + end +end + +function notes.postpone() + if trace_notes then + report_notes("postponing all insert notes") + end + for tag, state in next, notestates do + if state.kind ~= "store" then + setstate(tag,"postpone") + end + end +end + +commands.postponenotes = notes.postpone + +function notes.setsymbolpage(tag,n,l) + local l = l or listindex(tag,n) + if l then + local p = texcount.realpageno + if trace_notes or trace_references then + report_notes("note %a of %a with list index %a gets symbol page %a",n,tag,l,p) + end + local entry = lists.cached[l] + if entry then + entry.references.symbolpage = p + else + report_notes("internal error: note %a of %a is not flushed",n,tag) + end + else + report_notes("internal error: note %a of %a is not initialized",n,tag) + end +end + +commands.setnotesymbolpage = notes.setsymbolpage + +local function getsymbolpage(tag,n) + local li = internal(tag,n) + li = li and li.references + li = li and (li.symbolpage or li.realpage) or 0 + if trace_notes or trace_references then + report_notes("page number of note symbol %a of %a is %a",n,tag,li) + end + return li +end + +local function getnumberpage(tag,n) + local li = internal(tag,n) + li = li and li.references + li = li and li.realpage or 0 + if trace_notes or trace_references then + report_notes("page number of note number %s of %a is %a",n,tag,li) + end + return li +end + +local function getdeltapage(tag,n) + -- 0:unknown 1:textbefore, 2:textafter, 3:samepage + local what = 0 + -- references.internals[lists.tobesaved[nd].internal] + local li = internal(tag,n) + if li then + local references = li.references + if references then + local symbolpage = references.symbolpage or 0 + local notepage = references.realpage or 0 + if trace_references then + report_notes("note number %a of %a points from page %a to page %a",n,tag,symbolpage,notepage) + end + if notepage < symbolpage then + what = 3 -- after + elseif notepage > symbolpage then + what = 2 -- before + elseif notepage > 0 then + what = 1 -- same + end + else + -- might be a note that is not flushed due to to deep + -- nesting in a vbox + end + end + return what +end + +notes.getsymbolpage = getsymbolpage +notes.getnumberpage = getnumberpage +notes.getdeltapage = getdeltapage + +function commands.notesymbolpage(tag,n) context(getsymbolpage(tag,n)) end +function commands.notenumberpage(tag,n) context(getnumberpage(tag,n)) end +function commands.notedeltapage (tag,n) context(getdeltapage (tag,n)) end + +function commands.flushnotes(tag,whatkind,how) -- store and postpone + local state = notestates[tag] + local kind = state.kind + if kind == whatkind then + local nd = notedata[tag] + local ns = state.start -- first index + if kind == "postpone" then + if nd and ns then + if trace_notes then + report_notes("flushing state %a of %a from %a to %a",whatkind,tag,ns,#nd) + end + for i=ns,#nd do + context.handlenoteinsert(tag,i) + end + end + state.start = nil + state.kind = "insert" + elseif kind == "store" then + if nd and ns then + if trace_notes then + report_notes("flushing state %a of %a from %a to %a",whatkind,tag,ns,#nd) + end + -- todo: as registers: start, stop, inbetween + for i=ns,#nd do + -- tricky : trialtypesetting + if how == variables.page then + local rp = get(tag,i) + rp = rp and rp.references + rp = rp and rp.symbolpage or 0 + if rp > texcount.realpageno then + state.start = i + return + end + end + if i > ns then + context.betweennoteitself(tag) + end + context.handlenoteitself(tag,i) + end + end + state.start = nil + elseif kind == "reset" then + if nd and ns then + if trace_notes then + report_notes("flushing state %a of %a from %a to %a",whatkind,tag,ns,#nd) + end + end + state.start = nil + elseif trace_notes then + report_notes("not flushing state %a of %a",whatkind,tag) + end + elseif trace_notes then + report_notes("not flushing state %a of %a",whatkind,tag) + end +end + +function commands.flushpostponednotes() + if trace_notes then + report_notes("flushing all postponed notes") + end + for tag, _ in next, notestates do + commands.flushnotes(tag,"postpone") + end +end + +function notes.resetpostponed() + if trace_notes then + report_notes("resetting all postponed notes") + end + for tag, state in next, notestates do + if state.kind == "postpone" then + state.start = nil + state.kind = "insert" + end + end +end + +function commands.notetitle(tag,n) + command.savedlisttitle(tag,notedata[tag][n]) +end + +function commands.noteprefixednumber(tag,n,spec) + commands.savedlistprefixednumber(tag,notedata[tag][n]) +end + +function notes.internalid(tag,n) + local nd = get(tag,n) + if nd then + local r = nd.references + return r.internal + end +end diff --git a/tex/context/base/strc-num.lua b/tex/context/base/strc-num.lua index b0eae6b78..6245a537e 100644 --- a/tex/context/base/strc-num.lua +++ b/tex/context/base/strc-num.lua @@ -1,649 +1,649 @@ -if not modules then modules = { } end modules ['strc-num'] = { - version = 1.001, - comment = "companion to strc-num.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format -local next, type = next, type -local min, max = math.min, math.max -local texcount, texsetcount = tex.count, tex.setcount - --- Counters are managed here. They can have multiple levels which makes it easier to synchronize --- them. Synchronization is sort of special anyway, as it relates to document structuring. - -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex - -local trace_counters = false trackers.register("structures.counters", function(v) trace_counters = v end) -local report_counters = logs.reporter("structure","counters") - -local structures = structures -local helpers = structures.helpers -local sections = structures.sections -local counters = structures.counters -local documents = structures.documents - -local variables = interfaces.variables -local v_start = variables.start -local v_page = variables.page -local v_reverse = variables.reverse -local v_first = variables.first -local v_next = variables.next -local v_previous = variables.previous -local v_prev = variables.prev -local v_last = variables.last ------ v_no = variables.no -local v_backward = variables.backward -local v_forward = variables.forward ------ v_subs = variables.subs or "subs" - --- states: start stop none reset - --- specials are used for counters that are set and incremented in special ways, like --- pagecounters that get this treatment in the page builder - -counters.specials = counters.specials or { } -local counterspecials = counters.specials - -local counterranges, tbs = { }, 0 - -counters.collected = allocate() -counters.tobesaved = counters.tobesaved or { } -counters.data = counters.data or { } - -storage.register("structures/counters/data", counters.data, "structures.counters.data") -storage.register("structures/counters/tobesaved", counters.tobesaved, "structures.counters.tobesaved") - -local collected = counters.collected -local tobesaved = counters.tobesaved -local counterdata = counters.data - -local function initializer() -- not really needed - collected = counters.collected - tobesaved = counters.tobesaved - counterdata = counters.data -end - -local function finalizer() - for name, cd in next, counterdata do - local cs = tobesaved[name] - local data = cd.data - for i=1,#data do - local d = data[i] - local r = d.range - cs[i][r] = d.number - d.range = r + 1 - end - end -end - -job.register('structures.counters.collected', tobesaved, initializer, finalizer) - -local constructor = { -- maybe some day we will provide an installer for more variants - - last = function(t,name,i) - local cc = collected[name] - local stop = (cc and cc[i] and cc[i][t.range]) or 0 -- stop is available for diagnostics purposes only - t.stop = stop - if t.offset then - return stop - t.step - else - return stop - end - end, - - first = function(t,name,i) - local start = t.start - if start > 0 then - return start -- brrr - elseif t.offset then - return start + t.step + 1 - else - return start + 1 - end - end, - - prev = function(t,name,i) - return max(t.first,t.number-1) -- todo: step - end, - - previous = function(t,name,i) - return max(t.first,t.number-1) -- todo: step - end, - - next = function(t,name,i) - return min(t.last,t.number+1) -- todo: step - end, - - backward =function(t,name,i) - if t.number - 1 < t.first then - return t.last - else - return t.previous - end - end, - - forward = function(t,name,i) - if t.number + 1 > t.last then - return t.first - else - return t.next - end - end, - - subs = function(t,name,i) - local cc = collected[name] - t.subs = (cc and cc[i+1] and cc[i+1][t.range]) or 0 - return t.subs - end, - -} - -local function dummyconstructor(t,name,i) - return nil -- was 0, but that is fuzzy in testing for e.g. own -end - -setmetatableindex(constructor,function(t,k) - if trace_counters then - report_counters("unknown constructor %a",k) - end - return dummyconstructor -end) - -local function enhance() - for name, cd in next, counterdata do - local data = cd.data - for i=1,#data do - local ci = data[i] - setmetatableindex(ci, function(t,s) return constructor[s](t,name,i) end) - end - end - enhance = nil -end - -local function allocate(name,i) -- can be metatable - local cd = counterdata[name] - if not cd then - cd = { - level = 1, - -- block = "", -- todo - numbers = nil, - state = v_start, -- true - data = { }, - saved = { }, - } - tobesaved[name] = { } - counterdata[name] = cd - end - cd = cd.data - local ci = cd[i] - if not ci then - ci = { - number = 0, - start = 0, - saved = 0, - step = 1, - range = 1, - offset = false, - stop = 0, -- via metatable: last, first, stop only for tracing - } - setmetatableindex(ci, function(t,s) return constructor[s](t,name,i) end) - cd[i] = ci - tobesaved[name][i] = { } - else - if enhance then enhance() end -- not stored in bytecode - end - return ci -end - -function counters.record(name,i) - return allocate(name,i or 1) -end - -local function savevalue(name,i) - if name then - local cd = counterdata[name].data[i] - local cs = tobesaved[name][i] - local cc = collected[name] - if trace_counters then - report_counters("action %a, counter %s, value %s","save",name,cd.number) - end - local cr = cd.range - local old = (cc and cc[i] and cc[i][cr]) or 0 - local number = cd.number - if cd.method == v_page then - -- we can be one page ahead - number = number - 1 - end - cs[cr] = (number >= 0) and number or 0 - cd.range = cr + 1 - return old - else - return 0 - end -end - -function counters.define(specification) - local name = specification.name - if name and name ~= "" then - -- todo: step - local d = allocate(name,1) - d.start = tonumber(specification.start) or 0 - d.state = v_state or "" - local counter = specification.counter - if counter and counter ~= "" then - d.counter = counter -- only for special purposes, cannot be false - d.method = specification.method -- frozen at define time - end - end -end - -function counters.raw(name) - return counterdata[name] -end - -function counters.compact(name,level,onlynumbers) - local cd = counterdata[name] - if cd then - local data = cd.data - local compact = { } - for i=1,level or #data do - local d = data[i] - if d.number ~= 0 then - compact[i] = (onlynumbers and d.number) or d - end - end - return compact - end -end - --- depends on when incremented, before or after (driven by d.offset) - -function counters.previous(name,n) - return allocate(name,n).previous -end - -function counters.next(name,n) - return allocate(name,n).next -end - -counters.prev = counters.previous - -function counters.currentvalue(name,n) - return allocate(name,n).number -end - -function counters.first(name,n) - return allocate(name,n).first -end - -function counters.last(name,n) - return allocate(name,n).last -end - -function counters.subs(name,n) - return counterdata[name].data[n].subs or 0 -end - -local function setvalue(name,tag,value) - local cd = counterdata[name] - if cd then - cd[tag] = value - end -end - -counters.setvalue = setvalue - -function counters.setstate(name,value) -- true/false - value = variables[value] - if value then - setvalue(name,"state",value) - end -end - -function counters.setlevel(name,value) - setvalue(name,"level",value) -end - -function counters.setoffset(name,value) - setvalue(name,"offset",value) -end - -local function synchronize(name,d) - local dc = d.counter - if dc then - if trace_counters then - report_counters("action %a, name %a, counter %a, value %a","synchronize",name,dc,d.number) - end - texsetcount("global",dc,d.number) - end - local cs = counterspecials[name] - if cs then - if trace_counters then - report_counters("action %a, name %a, counter %a","synccommand",name,dc) - end - cs(name) - end -end - -local function reset(name,n) - local cd = counterdata[name] - if cd then - for i=n or 1,#cd.data do - local d = cd.data[i] - savevalue(name,i) - local number = d.start or 0 - d.number = number - d.own = nil - if trace_counters then - report_counters("action %a, name %a, sub %a, value %a","reset",name,i,number) - end - synchronize(name,d) - end - cd.numbers = nil - else - end -end - -local function set(name,n,value) - local cd = counterdata[name] - if cd then - local d = allocate(name,n) - local number = value or 0 - d.number = number - d.own = nil - if trace_counters then - report_counters("action %a, name %a, sub %a, value %a","set",name,"no",number) - end - synchronize(name,d) - end -end - -local function check(name,data,start,stop) - for i=start or 1,stop or #data do - local d = data[i] - savevalue(name,i) - local number = d.start or 0 - d.number = number - d.own = nil - if trace_counters then - report_counters("action %a, name %a, sub %a, value %a","check",name,i,number) - end - synchronize(name,d) - end -end - -counters.reset = reset -counters.set = set - -function counters.setown(name,n,value) - local cd = counterdata[name] - if cd then - local d = allocate(name,n) - d.own = value - d.number = (d.number or d.start or 0) + (d.step or 0) - local level = cd.level - if not level or level == -1 then - -- -1 is signal that we reset manually - elseif level > 0 or level == -3 then - check(name,d,n+1) - elseif level == 0 then - -- happens elsewhere, check this for block - end - synchronize(name,d) - end -end - -function counters.restart(name,n,newstart,noreset) - local cd = counterdata[name] - if cd then - newstart = tonumber(newstart) - if newstart then - local d = allocate(name,n) - d.start = newstart - if not noreset then - reset(name,n) -- hm - end - end - end -end - -function counters.save(name) -- or just number - local cd = counterdata[name] - if cd then - table.insert(cd.saved,table.copy(cd.data)) - end -end - -function counters.restore(name) - local cd = counterdata[name] - if cd and cd.saved then - cd.data = table.remove(cd.saved) - end -end - -function counters.add(name,n,delta) - local cd = counterdata[name] - if cd and (cd.state == v_start or cd.state == "") then - local data = cd.data - local d = allocate(name,n) - d.number = (d.number or d.start or 0) + delta*(d.step or 0) - -- d.own = nil - local level = cd.level - if not level or level == -1 then - -- -1 is signal that we reset manually - if trace_counters then - report_counters("action %a, name %a, sub %a, how %a","add",name,"no","no checking") - end - elseif level == -2 then - -- -2 is signal that we work per text - if trace_counters then - report_counters("action %a, name %a, sub %a, how %a","add",name,"text","checking") - end - check(name,data,n+1) - elseif level > 0 or level == -3 then - -- within countergroup - if trace_counters then - report_counters("action %a, name %a, sub %a, how %a","add",name,level,"checking within group") - end - check(name,data,n+1) - elseif level == 0 then - -- happens elsewhere - if trace_counters then - report_counters("action %a, name %a, sub %a, how %a","add",name,level,"no checking") - end - else - if trace_counters then - report_counters("action %a, name %a, sub %a, how %a","add",name,"unknown","no checking") - end - end - synchronize(name,d) - return d.number -- not needed - end - return 0 -end - -function counters.check(level) - for name, cd in next, counterdata do - if level > 0 and cd.level == -3 then -- could become an option - if trace_counters then - report_counters("action %a, name %a, sub %a, detail %a","reset",name,level,"head") - end - reset(name) - elseif cd.level == level then - if trace_counters then - report_counters("action %a, name %a, sub %a, detail %a","reset",name,level,"normal") - end - reset(name) - end - end -end - -local function get(name,n,key) - local d = allocate(name,n) - d = d and d[key] - if not d then - return 0 - elseif type(d) == "function" then - return d() - else - return d - end -end - -counters.get = get - -function counters.value(name,n) -- what to do with own - return get(name,n or 1,'number') or 0 -end - -function counters.converted(name,spec) -- name can be number and reference to storage - local cd - if type(name) == "number" then - cd = specials.retrieve("counter",name) - cd = cd and cd.counter - else - cd = counterdata[name] - end - if cd then - local spec = spec or { } - local numbers, ownnumbers = { }, { } - local reverse = spec.order == v_reverse - local kind = spec.type or "number" - local data = cd.data - for k=1,#data do - local v = data[k] - -- somewhat messy, what if subnr? only last must honour kind? - local vn - if v.own then - numbers[k], ownnumbers[k] = v.number, v.own - else - if kind == v_first then - vn = v.first - elseif kind == v_next then - vn = v.next - elseif kind == v_prev or kind == v_previous then - vn = v.prev - elseif kind == v_last then - vn = v.last - else - vn = v.number - if reverse then - local vf = v.first - local vl = v.last - if vl > 0 then - -- vn = vl - vn + 1 + vf - vn = vl - vn + vf -- see testbed for test - end - end - end - numbers[k], ownnumbers[k] = vn or v.number, nil - end - end - cd.numbers = numbers - cd.ownnumbers = ownnumbers - sections.typesetnumber(cd,'number',spec) - cd.numbers = nil - cd.ownnumbers = nil - end -end - --- interfacing - -commands.definecounter = counters.define -commands.setcounter = counters.set -commands.setowncounter = counters.setown -commands.resetcounter = counters.reset -commands.restartcounter = counters.restart -commands.savecounter = counters.save -commands.restorecounter = counters.restore -commands.addcounter = counters.add - -commands.rawcountervalue = function(...) context(counters.raw (...)) end -commands.countervalue = function(...) context(counters.value (...)) end -commands.lastcountervalue = function(...) context(counters.last (...)) end -commands.firstcountervalue = function(...) context(counters.first (...)) end -commands.nextcountervalue = function(...) context(counters.next (...)) end -commands.prevcountervalue = function(...) context(counters.previous(...)) end -commands.subcountervalues = function(...) context(counters.subs (...)) end - -function commands.showcounter(name) - local cd = counterdata[name] - if cd then - context("[%s:",name) - local data = cd.data - for i=1,#data do - local d = data[i] - context(" (%s: %s,%s,%s s:%s r:%s)",i,d.start or 0,d.number or 0,d.last,d.step or 0,d.range or 0) - end - context("]") - end -end - -function commands.doifelsecounter(name) commands.doifelse(counterdata[name]) end -function commands.doifcounter (name) commands.doif (counterdata[name]) end -function commands.doifnotcounter (name) commands.doifnot (counterdata[name]) end - -function commands.incrementedcounter(...) context(counters.add(...)) end - -function commands.checkcountersetup(name,level,start,state) - counters.restart(name,1,start,true) -- no reset - counters.setstate(name,state) - counters.setlevel(name,level) - sections.setchecker(name,level,counters.reset) -end - --- -- move to strc-pag.lua --- --- function counters.analyze(name,counterspecification) --- local cd = counterdata[name] --- -- safeguard --- if not cd then --- return false, false, "no counter data" --- end --- -- section data --- local sectiondata = sections.current() --- if not sectiondata then --- return cd, false, "not in section" --- end --- local references = sectiondata.references --- if not references then --- return cd, false, "no references" --- end --- local section = references.section --- if not section then --- return cd, false, "no section" --- end --- sectiondata = sections.collected[references.section] --- if not sectiondata then --- return cd, false, "no section data" --- end --- -- local preferences --- local no = v_no --- if counterspecification and counterspecification.prefix == no then --- return cd, false, "current spec blocks prefix" --- end --- -- stored preferences (not used) --- if cd.prefix == no then --- return cd, false, "entry blocks prefix" --- end --- -- sectioning --- -- if sectiondata.prefix == no then --- -- return false, false, "sectiondata blocks prefix" --- -- end --- -- final verdict --- return cd, sectiondata, "okay" --- end --- --- function counters.prefixedconverted(name,prefixspec,numberspec) --- local cd, prefixdata, result = counters.analyze(name,prefixspec) --- if cd then --- if prefixdata then --- sections.typesetnumber(prefixdata,"prefix",prefixspec or false,cd or false) --- end --- counters.converted(name,numberspec) --- end --- end +if not modules then modules = { } end modules ['strc-num'] = { + version = 1.001, + comment = "companion to strc-num.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format +local next, type = next, type +local min, max = math.min, math.max +local texcount, texsetcount = tex.count, tex.setcount + +-- Counters are managed here. They can have multiple levels which makes it easier to synchronize +-- them. Synchronization is sort of special anyway, as it relates to document structuring. + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex + +local trace_counters = false trackers.register("structures.counters", function(v) trace_counters = v end) +local report_counters = logs.reporter("structure","counters") + +local structures = structures +local helpers = structures.helpers +local sections = structures.sections +local counters = structures.counters +local documents = structures.documents + +local variables = interfaces.variables +local v_start = variables.start +local v_page = variables.page +local v_reverse = variables.reverse +local v_first = variables.first +local v_next = variables.next +local v_previous = variables.previous +local v_prev = variables.prev +local v_last = variables.last +----- v_no = variables.no +local v_backward = variables.backward +local v_forward = variables.forward +----- v_subs = variables.subs or "subs" + +-- states: start stop none reset + +-- specials are used for counters that are set and incremented in special ways, like +-- pagecounters that get this treatment in the page builder + +counters.specials = counters.specials or { } +local counterspecials = counters.specials + +local counterranges, tbs = { }, 0 + +counters.collected = allocate() +counters.tobesaved = counters.tobesaved or { } +counters.data = counters.data or { } + +storage.register("structures/counters/data", counters.data, "structures.counters.data") +storage.register("structures/counters/tobesaved", counters.tobesaved, "structures.counters.tobesaved") + +local collected = counters.collected +local tobesaved = counters.tobesaved +local counterdata = counters.data + +local function initializer() -- not really needed + collected = counters.collected + tobesaved = counters.tobesaved + counterdata = counters.data +end + +local function finalizer() + for name, cd in next, counterdata do + local cs = tobesaved[name] + local data = cd.data + for i=1,#data do + local d = data[i] + local r = d.range + cs[i][r] = d.number + d.range = r + 1 + end + end +end + +job.register('structures.counters.collected', tobesaved, initializer, finalizer) + +local constructor = { -- maybe some day we will provide an installer for more variants + + last = function(t,name,i) + local cc = collected[name] + local stop = (cc and cc[i] and cc[i][t.range]) or 0 -- stop is available for diagnostics purposes only + t.stop = stop + if t.offset then + return stop - t.step + else + return stop + end + end, + + first = function(t,name,i) + local start = t.start + if start > 0 then + return start -- brrr + elseif t.offset then + return start + t.step + 1 + else + return start + 1 + end + end, + + prev = function(t,name,i) + return max(t.first,t.number-1) -- todo: step + end, + + previous = function(t,name,i) + return max(t.first,t.number-1) -- todo: step + end, + + next = function(t,name,i) + return min(t.last,t.number+1) -- todo: step + end, + + backward =function(t,name,i) + if t.number - 1 < t.first then + return t.last + else + return t.previous + end + end, + + forward = function(t,name,i) + if t.number + 1 > t.last then + return t.first + else + return t.next + end + end, + + subs = function(t,name,i) + local cc = collected[name] + t.subs = (cc and cc[i+1] and cc[i+1][t.range]) or 0 + return t.subs + end, + +} + +local function dummyconstructor(t,name,i) + return nil -- was 0, but that is fuzzy in testing for e.g. own +end + +setmetatableindex(constructor,function(t,k) + if trace_counters then + report_counters("unknown constructor %a",k) + end + return dummyconstructor +end) + +local function enhance() + for name, cd in next, counterdata do + local data = cd.data + for i=1,#data do + local ci = data[i] + setmetatableindex(ci, function(t,s) return constructor[s](t,name,i) end) + end + end + enhance = nil +end + +local function allocate(name,i) -- can be metatable + local cd = counterdata[name] + if not cd then + cd = { + level = 1, + -- block = "", -- todo + numbers = nil, + state = v_start, -- true + data = { }, + saved = { }, + } + tobesaved[name] = { } + counterdata[name] = cd + end + cd = cd.data + local ci = cd[i] + if not ci then + ci = { + number = 0, + start = 0, + saved = 0, + step = 1, + range = 1, + offset = false, + stop = 0, -- via metatable: last, first, stop only for tracing + } + setmetatableindex(ci, function(t,s) return constructor[s](t,name,i) end) + cd[i] = ci + tobesaved[name][i] = { } + else + if enhance then enhance() end -- not stored in bytecode + end + return ci +end + +function counters.record(name,i) + return allocate(name,i or 1) +end + +local function savevalue(name,i) + if name then + local cd = counterdata[name].data[i] + local cs = tobesaved[name][i] + local cc = collected[name] + if trace_counters then + report_counters("action %a, counter %s, value %s","save",name,cd.number) + end + local cr = cd.range + local old = (cc and cc[i] and cc[i][cr]) or 0 + local number = cd.number + if cd.method == v_page then + -- we can be one page ahead + number = number - 1 + end + cs[cr] = (number >= 0) and number or 0 + cd.range = cr + 1 + return old + else + return 0 + end +end + +function counters.define(specification) + local name = specification.name + if name and name ~= "" then + -- todo: step + local d = allocate(name,1) + d.start = tonumber(specification.start) or 0 + d.state = v_state or "" + local counter = specification.counter + if counter and counter ~= "" then + d.counter = counter -- only for special purposes, cannot be false + d.method = specification.method -- frozen at define time + end + end +end + +function counters.raw(name) + return counterdata[name] +end + +function counters.compact(name,level,onlynumbers) + local cd = counterdata[name] + if cd then + local data = cd.data + local compact = { } + for i=1,level or #data do + local d = data[i] + if d.number ~= 0 then + compact[i] = (onlynumbers and d.number) or d + end + end + return compact + end +end + +-- depends on when incremented, before or after (driven by d.offset) + +function counters.previous(name,n) + return allocate(name,n).previous +end + +function counters.next(name,n) + return allocate(name,n).next +end + +counters.prev = counters.previous + +function counters.currentvalue(name,n) + return allocate(name,n).number +end + +function counters.first(name,n) + return allocate(name,n).first +end + +function counters.last(name,n) + return allocate(name,n).last +end + +function counters.subs(name,n) + return counterdata[name].data[n].subs or 0 +end + +local function setvalue(name,tag,value) + local cd = counterdata[name] + if cd then + cd[tag] = value + end +end + +counters.setvalue = setvalue + +function counters.setstate(name,value) -- true/false + value = variables[value] + if value then + setvalue(name,"state",value) + end +end + +function counters.setlevel(name,value) + setvalue(name,"level",value) +end + +function counters.setoffset(name,value) + setvalue(name,"offset",value) +end + +local function synchronize(name,d) + local dc = d.counter + if dc then + if trace_counters then + report_counters("action %a, name %a, counter %a, value %a","synchronize",name,dc,d.number) + end + texsetcount("global",dc,d.number) + end + local cs = counterspecials[name] + if cs then + if trace_counters then + report_counters("action %a, name %a, counter %a","synccommand",name,dc) + end + cs(name) + end +end + +local function reset(name,n) + local cd = counterdata[name] + if cd then + for i=n or 1,#cd.data do + local d = cd.data[i] + savevalue(name,i) + local number = d.start or 0 + d.number = number + d.own = nil + if trace_counters then + report_counters("action %a, name %a, sub %a, value %a","reset",name,i,number) + end + synchronize(name,d) + end + cd.numbers = nil + else + end +end + +local function set(name,n,value) + local cd = counterdata[name] + if cd then + local d = allocate(name,n) + local number = value or 0 + d.number = number + d.own = nil + if trace_counters then + report_counters("action %a, name %a, sub %a, value %a","set",name,"no",number) + end + synchronize(name,d) + end +end + +local function check(name,data,start,stop) + for i=start or 1,stop or #data do + local d = data[i] + savevalue(name,i) + local number = d.start or 0 + d.number = number + d.own = nil + if trace_counters then + report_counters("action %a, name %a, sub %a, value %a","check",name,i,number) + end + synchronize(name,d) + end +end + +counters.reset = reset +counters.set = set + +function counters.setown(name,n,value) + local cd = counterdata[name] + if cd then + local d = allocate(name,n) + d.own = value + d.number = (d.number or d.start or 0) + (d.step or 0) + local level = cd.level + if not level or level == -1 then + -- -1 is signal that we reset manually + elseif level > 0 or level == -3 then + check(name,d,n+1) + elseif level == 0 then + -- happens elsewhere, check this for block + end + synchronize(name,d) + end +end + +function counters.restart(name,n,newstart,noreset) + local cd = counterdata[name] + if cd then + newstart = tonumber(newstart) + if newstart then + local d = allocate(name,n) + d.start = newstart + if not noreset then + reset(name,n) -- hm + end + end + end +end + +function counters.save(name) -- or just number + local cd = counterdata[name] + if cd then + table.insert(cd.saved,table.copy(cd.data)) + end +end + +function counters.restore(name) + local cd = counterdata[name] + if cd and cd.saved then + cd.data = table.remove(cd.saved) + end +end + +function counters.add(name,n,delta) + local cd = counterdata[name] + if cd and (cd.state == v_start or cd.state == "") then + local data = cd.data + local d = allocate(name,n) + d.number = (d.number or d.start or 0) + delta*(d.step or 0) + -- d.own = nil + local level = cd.level + if not level or level == -1 then + -- -1 is signal that we reset manually + if trace_counters then + report_counters("action %a, name %a, sub %a, how %a","add",name,"no","no checking") + end + elseif level == -2 then + -- -2 is signal that we work per text + if trace_counters then + report_counters("action %a, name %a, sub %a, how %a","add",name,"text","checking") + end + check(name,data,n+1) + elseif level > 0 or level == -3 then + -- within countergroup + if trace_counters then + report_counters("action %a, name %a, sub %a, how %a","add",name,level,"checking within group") + end + check(name,data,n+1) + elseif level == 0 then + -- happens elsewhere + if trace_counters then + report_counters("action %a, name %a, sub %a, how %a","add",name,level,"no checking") + end + else + if trace_counters then + report_counters("action %a, name %a, sub %a, how %a","add",name,"unknown","no checking") + end + end + synchronize(name,d) + return d.number -- not needed + end + return 0 +end + +function counters.check(level) + for name, cd in next, counterdata do + if level > 0 and cd.level == -3 then -- could become an option + if trace_counters then + report_counters("action %a, name %a, sub %a, detail %a","reset",name,level,"head") + end + reset(name) + elseif cd.level == level then + if trace_counters then + report_counters("action %a, name %a, sub %a, detail %a","reset",name,level,"normal") + end + reset(name) + end + end +end + +local function get(name,n,key) + local d = allocate(name,n) + d = d and d[key] + if not d then + return 0 + elseif type(d) == "function" then + return d() + else + return d + end +end + +counters.get = get + +function counters.value(name,n) -- what to do with own + return get(name,n or 1,'number') or 0 +end + +function counters.converted(name,spec) -- name can be number and reference to storage + local cd + if type(name) == "number" then + cd = specials.retrieve("counter",name) + cd = cd and cd.counter + else + cd = counterdata[name] + end + if cd then + local spec = spec or { } + local numbers, ownnumbers = { }, { } + local reverse = spec.order == v_reverse + local kind = spec.type or "number" + local data = cd.data + for k=1,#data do + local v = data[k] + -- somewhat messy, what if subnr? only last must honour kind? + local vn + if v.own then + numbers[k], ownnumbers[k] = v.number, v.own + else + if kind == v_first then + vn = v.first + elseif kind == v_next then + vn = v.next + elseif kind == v_prev or kind == v_previous then + vn = v.prev + elseif kind == v_last then + vn = v.last + else + vn = v.number + if reverse then + local vf = v.first + local vl = v.last + if vl > 0 then + -- vn = vl - vn + 1 + vf + vn = vl - vn + vf -- see testbed for test + end + end + end + numbers[k], ownnumbers[k] = vn or v.number, nil + end + end + cd.numbers = numbers + cd.ownnumbers = ownnumbers + sections.typesetnumber(cd,'number',spec) + cd.numbers = nil + cd.ownnumbers = nil + end +end + +-- interfacing + +commands.definecounter = counters.define +commands.setcounter = counters.set +commands.setowncounter = counters.setown +commands.resetcounter = counters.reset +commands.restartcounter = counters.restart +commands.savecounter = counters.save +commands.restorecounter = counters.restore +commands.addcounter = counters.add + +commands.rawcountervalue = function(...) context(counters.raw (...)) end +commands.countervalue = function(...) context(counters.value (...)) end +commands.lastcountervalue = function(...) context(counters.last (...)) end +commands.firstcountervalue = function(...) context(counters.first (...)) end +commands.nextcountervalue = function(...) context(counters.next (...)) end +commands.prevcountervalue = function(...) context(counters.previous(...)) end +commands.subcountervalues = function(...) context(counters.subs (...)) end + +function commands.showcounter(name) + local cd = counterdata[name] + if cd then + context("[%s:",name) + local data = cd.data + for i=1,#data do + local d = data[i] + context(" (%s: %s,%s,%s s:%s r:%s)",i,d.start or 0,d.number or 0,d.last,d.step or 0,d.range or 0) + end + context("]") + end +end + +function commands.doifelsecounter(name) commands.doifelse(counterdata[name]) end +function commands.doifcounter (name) commands.doif (counterdata[name]) end +function commands.doifnotcounter (name) commands.doifnot (counterdata[name]) end + +function commands.incrementedcounter(...) context(counters.add(...)) end + +function commands.checkcountersetup(name,level,start,state) + counters.restart(name,1,start,true) -- no reset + counters.setstate(name,state) + counters.setlevel(name,level) + sections.setchecker(name,level,counters.reset) +end + +-- -- move to strc-pag.lua +-- +-- function counters.analyze(name,counterspecification) +-- local cd = counterdata[name] +-- -- safeguard +-- if not cd then +-- return false, false, "no counter data" +-- end +-- -- section data +-- local sectiondata = sections.current() +-- if not sectiondata then +-- return cd, false, "not in section" +-- end +-- local references = sectiondata.references +-- if not references then +-- return cd, false, "no references" +-- end +-- local section = references.section +-- if not section then +-- return cd, false, "no section" +-- end +-- sectiondata = sections.collected[references.section] +-- if not sectiondata then +-- return cd, false, "no section data" +-- end +-- -- local preferences +-- local no = v_no +-- if counterspecification and counterspecification.prefix == no then +-- return cd, false, "current spec blocks prefix" +-- end +-- -- stored preferences (not used) +-- if cd.prefix == no then +-- return cd, false, "entry blocks prefix" +-- end +-- -- sectioning +-- -- if sectiondata.prefix == no then +-- -- return false, false, "sectiondata blocks prefix" +-- -- end +-- -- final verdict +-- return cd, sectiondata, "okay" +-- end +-- +-- function counters.prefixedconverted(name,prefixspec,numberspec) +-- local cd, prefixdata, result = counters.analyze(name,prefixspec) +-- if cd then +-- if prefixdata then +-- sections.typesetnumber(prefixdata,"prefix",prefixspec or false,cd or false) +-- end +-- counters.converted(name,numberspec) +-- end +-- end diff --git a/tex/context/base/strc-pag.lua b/tex/context/base/strc-pag.lua index f70d37d63..63c77c1e6 100644 --- a/tex/context/base/strc-pag.lua +++ b/tex/context/base/strc-pag.lua @@ -1,313 +1,313 @@ -if not modules then modules = { } end modules ['strc-pag'] = { - version = 1.001, - comment = "companion to strc-pag.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local texcount = tex.count - -local allocate, mark = utilities.storage.allocate, utilities.storage.mark - -local trace_pages = false trackers.register("structures.pages", function(v) trace_pages = v end) - -local report_pages = logs.reporter("structure","pages") - -local structures = structures - -local helpers = structures.helpers -local sections = structures.sections -local pages = structures.pages -local sets = structures.sets -local counters = structures.counters - -local counterdata = counters.data - -local variables = interfaces.variables -local context = context - -local processors = typesetters.processors -local applyprocessor = processors.apply -local startapplyprocessor = processors.startapply -local stopapplyprocessor = processors.stopapply - --- storage - -local collected, tobesaved = allocate(), allocate() - -pages.collected = collected -pages.tobesaved = tobesaved - -local function initializer() - collected = pages.collected - tobesaved = pages.tobesaved -end - -job.register('structures.pages.collected', tobesaved, initializer) - -local specification = { } -- to be checked - -function pages.save(prefixdata,numberdata) - local realpage, userpage = texcount.realpageno, texcount.userpageno - if realpage > 0 then - if trace_pages then - report_pages("saving page %s.%s",realpage,userpage) - end - local data = { - number = userpage, - block = sections.currentblock(), - prefixdata = prefixdata and helpers.simplify(prefixdata), - numberdata = numberdata and helpers.simplify(numberdata), - } - tobesaved[realpage] = data - if not collected[realpage] then - collected[realpage] = data - end - elseif trace_pages then - report_pages("not saving page %s.%s",realpage,userpage) - end -end - --- We can set the pagenumber but as it only get incremented in the page --- builder we have to make sure it starts at least at 1. - -function counters.specials.userpage() - local r = texcount.realpageno - if r > 0 then - local t = tobesaved[r] - if t then - t.number = texcount.userpageno - if trace_pages then - report_pages("forcing pagenumber of realpage %s to %s",r,t.number) - end - return - end - end - local u = texcount.userpageno - if u == 0 then - if trace_pages then - report_pages("forcing pagenumber of realpage %s to %s (probably a bug)",r,1) - end - counters.setvalue("userpage",1) - texcount.userpageno = 1 - end -end - -local f_convert = string.formatters["\\convertnumber{%s}{%s}"] - -local function convertnumber(str,n) - return f_convert(str or "numbers",n) -end - -function pages.number(realdata,pagespec) - local userpage, block = realdata.number, realdata.block or "" -- sections.currentblock() - local numberspec = realdata.numberdata - local conversionset = (pagespec and pagespec.conversionset ~= "" and pagespec.conversionset) or (numberspec and numberspec.conversionset ~= "" and numberspec.conversionset) or "" - local conversion = (pagespec and pagespec.conversion ~= "" and pagespec.conversion ) or (numberspec and numberspec.conversion ~= "" and numberspec.conversion ) or "" - local starter = (pagespec and pagespec.starter ~= "" and pagespec.starter ) or (numberspec and numberspec.starter ~= "" and numberspec.starter ) or "" - local stopper = (pagespec and pagespec.stopper ~= "" and pagespec.stopper ) or (numberspec and numberspec.stopper ~= "" and numberspec.stopper ) or "" - if starter ~= "" then - applyprocessor(starter) - end - if conversion ~= "" then - context.convertnumber(conversion,userpage) - else - if conversionset == "" then conversionset = "default" end - local theconversion = sets.get("structure:conversions",block,conversionset,1,"numbers") -- to be checked: 1 - local data = startapplyprocessor(theconversion) - context.convertnumber(data or "number",userpage) - stopapplyprocessor() - end - if stopper ~= "" then - applyprocessors(stopper) - end -end - --- (pagespec.prefix == yes|unset) and (pages.prefix == yes) => prefix - -function pages.analyze(entry,pagespecification) - -- safeguard - if not entry then - return false, false, "no entry" - end - local references = entry.references - if not references then - return false, false, "no references" - end - local pagedata = references.pagedata -- sometimes resolved (external) - if not pagedata then - local realpage = references.realpage - if realpage then - pagedata = collected[realpage] - else - return false, false, "no realpage" - end - end - if not pagedata then - return false, false, "no pagedata" - end - local sectiondata = references.sectiondata -- sometimes resolved (external) - if not sectiondata then - local section = references.section - if section then - sectiondata = sections.collected[section] - else - return pagedata, false, "no section" - end - end - if not sectiondata then - return pagedata, false, "no sectiondata" - end - local no = variables.no - -- local preferences - if pagespecification and pagespecification.prefix == no then - return pagedata, false, "current spec blocks prefix" - end - -- stored preferences - -- if entry.prefix == no then - -- return pagedata, false, "entry blocks prefix" - -- end - -- stored page state - pagespecification = pagedata.prefixdata - if pagespecification and pagespecification.prefix == no then - return pagedata, false, "pagedata blocks prefix" - end - -- final verdict - return pagedata, sectiondata, "okay" -end - -function helpers.page(data,pagespec) - if data then - local pagedata = pages.analyze(data,pagespec) - if pagedata then - pages.number(pagedata,pagespec) - end - end -end - -function helpers.prefixpage(data,prefixspec,pagespec) - if data then - local pagedata, prefixdata, e = pages.analyze(data,pagespec) - if pagedata then - if prefixdata then - sections.typesetnumber(prefixdata,"prefix",prefixspec or false,prefixdata or false,pagedata.prefixdata or false) - end - pages.number(pagedata,pagespec) - end - end -end - -function helpers.prefixlastpage(data,prefixspec,pagespec) - if data then - local r = data.references - local ls, lr = r.section, r.realpage - r.section, r.realpage = r.lastsection or r.section, r.lastrealpage or r.realpage - helpers.prefixpage(data,prefixspec,pagespec) - r.section, r.realpage = ls, lr - end -end - --- - -function helpers.analyze(entry,specification) - -- safeguard - if not entry then - return false, false, "no entry" - end - local yes, no = variables.yes, variables.no - -- section data - local references = entry.references - if not references then - return entry, false, "no references" - end - local section = references.section - if not section then - return entry, false, "no section" - end - local sectiondata = sections.collected[references.section] - if not sectiondata then - return entry, false, "no section data" - end - -- local preferences - if specification and specification.prefix == no then - return entry, false, "current spec blocks prefix" - end - -- stored preferences (not used) - local prefixdata = entry.prefixdata - if prefixdata and prefixdata.prefix == no then - return entry, false, "entry blocks prefix" - end - -- final verdict - return entry, sectiondata, "okay" -end - -function helpers.prefix(data,prefixspec) - if data then - local _, prefixdata, status = helpers.analyze(data,prefixspec) - if prefixdata then - sections.typesetnumber(prefixdata,"prefix",prefixspec or false,data.prefixdata or false,prefixdata or false) - end - end -end - -function pages.is_odd(n) - n = n or texcount.realpageno - if texcount.pagenoshift % 2 == 0 then - return n % 2 == 0 - else - return n % 2 ~= 0 - end -end - --- move to strc-pag.lua - -function counters.analyze(name,counterspecification) - local cd = counterdata[name] - -- safeguard - if not cd then - return false, false, "no counter data" - end - -- section data - local sectiondata = sections.current() - if not sectiondata then - return cd, false, "not in section" - end - local references = sectiondata.references - if not references then - return cd, false, "no references" - end - local section = references.section - if not section then - return cd, false, "no section" - end - sectiondata = sections.collected[references.section] - if not sectiondata then - return cd, false, "no section data" - end - -- local preferences - local no = variables.no - if counterspecification and counterspecification.prefix == no then - return cd, false, "current spec blocks prefix" - end - -- stored preferences (not used) - if cd.prefix == no then - return cd, false, "entry blocks prefix" - end - -- sectioning - -- if sectiondata.prefix == no then - -- return false, false, "sectiondata blocks prefix" - -- end - -- final verdict - return cd, sectiondata, "okay" -end - -function sections.prefixedconverted(name,prefixspec,numberspec) - local cd, prefixdata, result = counters.analyze(name,prefixspec) - if cd then - if prefixdata then - sections.typesetnumber(prefixdata,"prefix",prefixspec or false,cd or false) - end - counters.converted(name,numberspec) - end -end +if not modules then modules = { } end modules ['strc-pag'] = { + version = 1.001, + comment = "companion to strc-pag.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local texcount = tex.count + +local allocate, mark = utilities.storage.allocate, utilities.storage.mark + +local trace_pages = false trackers.register("structures.pages", function(v) trace_pages = v end) + +local report_pages = logs.reporter("structure","pages") + +local structures = structures + +local helpers = structures.helpers +local sections = structures.sections +local pages = structures.pages +local sets = structures.sets +local counters = structures.counters + +local counterdata = counters.data + +local variables = interfaces.variables +local context = context + +local processors = typesetters.processors +local applyprocessor = processors.apply +local startapplyprocessor = processors.startapply +local stopapplyprocessor = processors.stopapply + +-- storage + +local collected, tobesaved = allocate(), allocate() + +pages.collected = collected +pages.tobesaved = tobesaved + +local function initializer() + collected = pages.collected + tobesaved = pages.tobesaved +end + +job.register('structures.pages.collected', tobesaved, initializer) + +local specification = { } -- to be checked + +function pages.save(prefixdata,numberdata) + local realpage, userpage = texcount.realpageno, texcount.userpageno + if realpage > 0 then + if trace_pages then + report_pages("saving page %s.%s",realpage,userpage) + end + local data = { + number = userpage, + block = sections.currentblock(), + prefixdata = prefixdata and helpers.simplify(prefixdata), + numberdata = numberdata and helpers.simplify(numberdata), + } + tobesaved[realpage] = data + if not collected[realpage] then + collected[realpage] = data + end + elseif trace_pages then + report_pages("not saving page %s.%s",realpage,userpage) + end +end + +-- We can set the pagenumber but as it only get incremented in the page +-- builder we have to make sure it starts at least at 1. + +function counters.specials.userpage() + local r = texcount.realpageno + if r > 0 then + local t = tobesaved[r] + if t then + t.number = texcount.userpageno + if trace_pages then + report_pages("forcing pagenumber of realpage %s to %s",r,t.number) + end + return + end + end + local u = texcount.userpageno + if u == 0 then + if trace_pages then + report_pages("forcing pagenumber of realpage %s to %s (probably a bug)",r,1) + end + counters.setvalue("userpage",1) + texcount.userpageno = 1 + end +end + +local f_convert = string.formatters["\\convertnumber{%s}{%s}"] + +local function convertnumber(str,n) + return f_convert(str or "numbers",n) +end + +function pages.number(realdata,pagespec) + local userpage, block = realdata.number, realdata.block or "" -- sections.currentblock() + local numberspec = realdata.numberdata + local conversionset = (pagespec and pagespec.conversionset ~= "" and pagespec.conversionset) or (numberspec and numberspec.conversionset ~= "" and numberspec.conversionset) or "" + local conversion = (pagespec and pagespec.conversion ~= "" and pagespec.conversion ) or (numberspec and numberspec.conversion ~= "" and numberspec.conversion ) or "" + local starter = (pagespec and pagespec.starter ~= "" and pagespec.starter ) or (numberspec and numberspec.starter ~= "" and numberspec.starter ) or "" + local stopper = (pagespec and pagespec.stopper ~= "" and pagespec.stopper ) or (numberspec and numberspec.stopper ~= "" and numberspec.stopper ) or "" + if starter ~= "" then + applyprocessor(starter) + end + if conversion ~= "" then + context.convertnumber(conversion,userpage) + else + if conversionset == "" then conversionset = "default" end + local theconversion = sets.get("structure:conversions",block,conversionset,1,"numbers") -- to be checked: 1 + local data = startapplyprocessor(theconversion) + context.convertnumber(data or "number",userpage) + stopapplyprocessor() + end + if stopper ~= "" then + applyprocessors(stopper) + end +end + +-- (pagespec.prefix == yes|unset) and (pages.prefix == yes) => prefix + +function pages.analyze(entry,pagespecification) + -- safeguard + if not entry then + return false, false, "no entry" + end + local references = entry.references + if not references then + return false, false, "no references" + end + local pagedata = references.pagedata -- sometimes resolved (external) + if not pagedata then + local realpage = references.realpage + if realpage then + pagedata = collected[realpage] + else + return false, false, "no realpage" + end + end + if not pagedata then + return false, false, "no pagedata" + end + local sectiondata = references.sectiondata -- sometimes resolved (external) + if not sectiondata then + local section = references.section + if section then + sectiondata = sections.collected[section] + else + return pagedata, false, "no section" + end + end + if not sectiondata then + return pagedata, false, "no sectiondata" + end + local no = variables.no + -- local preferences + if pagespecification and pagespecification.prefix == no then + return pagedata, false, "current spec blocks prefix" + end + -- stored preferences + -- if entry.prefix == no then + -- return pagedata, false, "entry blocks prefix" + -- end + -- stored page state + pagespecification = pagedata.prefixdata + if pagespecification and pagespecification.prefix == no then + return pagedata, false, "pagedata blocks prefix" + end + -- final verdict + return pagedata, sectiondata, "okay" +end + +function helpers.page(data,pagespec) + if data then + local pagedata = pages.analyze(data,pagespec) + if pagedata then + pages.number(pagedata,pagespec) + end + end +end + +function helpers.prefixpage(data,prefixspec,pagespec) + if data then + local pagedata, prefixdata, e = pages.analyze(data,pagespec) + if pagedata then + if prefixdata then + sections.typesetnumber(prefixdata,"prefix",prefixspec or false,prefixdata or false,pagedata.prefixdata or false) + end + pages.number(pagedata,pagespec) + end + end +end + +function helpers.prefixlastpage(data,prefixspec,pagespec) + if data then + local r = data.references + local ls, lr = r.section, r.realpage + r.section, r.realpage = r.lastsection or r.section, r.lastrealpage or r.realpage + helpers.prefixpage(data,prefixspec,pagespec) + r.section, r.realpage = ls, lr + end +end + +-- + +function helpers.analyze(entry,specification) + -- safeguard + if not entry then + return false, false, "no entry" + end + local yes, no = variables.yes, variables.no + -- section data + local references = entry.references + if not references then + return entry, false, "no references" + end + local section = references.section + if not section then + return entry, false, "no section" + end + local sectiondata = sections.collected[references.section] + if not sectiondata then + return entry, false, "no section data" + end + -- local preferences + if specification and specification.prefix == no then + return entry, false, "current spec blocks prefix" + end + -- stored preferences (not used) + local prefixdata = entry.prefixdata + if prefixdata and prefixdata.prefix == no then + return entry, false, "entry blocks prefix" + end + -- final verdict + return entry, sectiondata, "okay" +end + +function helpers.prefix(data,prefixspec) + if data then + local _, prefixdata, status = helpers.analyze(data,prefixspec) + if prefixdata then + sections.typesetnumber(prefixdata,"prefix",prefixspec or false,data.prefixdata or false,prefixdata or false) + end + end +end + +function pages.is_odd(n) + n = n or texcount.realpageno + if texcount.pagenoshift % 2 == 0 then + return n % 2 == 0 + else + return n % 2 ~= 0 + end +end + +-- move to strc-pag.lua + +function counters.analyze(name,counterspecification) + local cd = counterdata[name] + -- safeguard + if not cd then + return false, false, "no counter data" + end + -- section data + local sectiondata = sections.current() + if not sectiondata then + return cd, false, "not in section" + end + local references = sectiondata.references + if not references then + return cd, false, "no references" + end + local section = references.section + if not section then + return cd, false, "no section" + end + sectiondata = sections.collected[references.section] + if not sectiondata then + return cd, false, "no section data" + end + -- local preferences + local no = variables.no + if counterspecification and counterspecification.prefix == no then + return cd, false, "current spec blocks prefix" + end + -- stored preferences (not used) + if cd.prefix == no then + return cd, false, "entry blocks prefix" + end + -- sectioning + -- if sectiondata.prefix == no then + -- return false, false, "sectiondata blocks prefix" + -- end + -- final verdict + return cd, sectiondata, "okay" +end + +function sections.prefixedconverted(name,prefixspec,numberspec) + local cd, prefixdata, result = counters.analyze(name,prefixspec) + if cd then + if prefixdata then + sections.typesetnumber(prefixdata,"prefix",prefixspec or false,cd or false) + end + counters.converted(name,numberspec) + end +end diff --git a/tex/context/base/strc-ref.lua b/tex/context/base/strc-ref.lua index 284418c48..54484fabe 100644 --- a/tex/context/base/strc-ref.lua +++ b/tex/context/base/strc-ref.lua @@ -1,2158 +1,2158 @@ -if not modules then modules = { } end modules ['strc-ref'] = { - version = 1.001, - comment = "companion to strc-ref.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- beware, this is a first step in the rewrite (just getting rid of --- the tuo file); later all access and parsing will also move to lua - --- the useddata and pagedata names might change --- todo: pack exported data - --- todo: autoload components when ::: - -local format, find, gmatch, match, concat = string.format, string.find, string.gmatch, string.match, table.concat -local texcount, texsetcount = tex.count, tex.setcount -local rawget, tonumber = rawget, tonumber -local lpegmatch = lpeg.match -local copytable = table.copy -local formatters = string.formatters - -local allocate = utilities.storage.allocate -local mark = utilities.storage.mark -local setmetatableindex = table.setmetatableindex - -local trace_referencing = false trackers.register("structures.referencing", function(v) trace_referencing = v end) -local trace_analyzing = false trackers.register("structures.referencing.analyzing", function(v) trace_analyzing = v end) -local trace_identifying = false trackers.register("structures.referencing.identifying", function(v) trace_identifying = v end) -local trace_importing = false trackers.register("structures.referencing.importing", function(v) trace_importing = v end) -local trace_empty = false trackers.register("structures.referencing.empty", function(v) trace_empty = v end) - -local check_duplicates = true - -directives.register("structures.referencing.checkduplicates", function(v) - check_duplicates = v -end) - -local report_references = logs.reporter("references") -local report_unknown = logs.reporter("references","unknown") -local report_identifying = logs.reporter("references","identifying") -local report_importing = logs.reporter("references","importing") -local report_empty = logs.reporter("references","empty") - -local variables = interfaces.variables -local constants = interfaces.constants -local context = context - -local v_default = variables.default -local v_url = variables.url -local v_file = variables.file -local v_unknown = variables.unknown -local v_yes = variables.yes - -local texcount = tex.count -local texconditionals = tex.conditionals - -local productcomponent = resolvers.jobs.productcomponent -local justacomponent = resolvers.jobs.justacomponent - -local logsnewline = logs.newline -local logspushtarget = logs.pushtarget -local logspoptarget = logs.poptarget - -local settings_to_array = utilities.parsers.settings_to_array -local unsetvalue = attributes.unsetvalue - -local structures = structures -local helpers = structures.helpers -local sections = structures.sections -local references = structures.references -local lists = structures.lists -local counters = structures.counters - --- some might become local - -references.defined = references.defined or allocate() - -local defined = references.defined -local derived = allocate() -local specials = allocate() -local runners = allocate() -local internals = allocate() -local filters = allocate() -local executers = allocate() -local handlers = allocate() -local tobesaved = allocate() -local collected = allocate() -local tobereferred = allocate() -local referred = allocate() - -references.derived = derived -references.specials = specials -references.runners = runners -references.internals = internals -references.filters = filters -references.executers = executers -references.handlers = handlers -references.tobesaved = tobesaved -references.collected = collected -references.tobereferred = tobereferred -references.referred = referred - -local splitreference = references.splitreference -local splitprefix = references.splitcomponent -- replaces: references.splitprefix -local prefixsplitter = references.prefixsplitter -local componentsplitter = references.componentsplitter - -local currentreference = nil - -storage.register("structures/references/defined", references.defined, "structures.references.defined") - -local initializers = { } -local finalizers = { } - -function references.registerinitializer(func) -- we could use a token register instead - initializers[#initializers+1] = func -end -function references.registerfinalizer(func) -- we could use a token register instead - finalizers[#finalizers+1] = func -end - -local function initializer() -- can we use a tobesaved as metatable for collected? - tobesaved = references.tobesaved - collected = references.collected - for i=1,#initializers do - initializers[i](tobesaved,collected) - end -end - -local function finalizer() - for i=1,#finalizers do - finalizers[i](tobesaved) - end -end - -job.register('structures.references.collected', tobesaved, initializer, finalizer) - -local maxreferred = 1 -local nofreferred = 0 - --- local function initializer() -- can we use a tobesaved as metatable for collected? --- tobereferred = references.tobereferred --- referred = references.referred --- nofreferred = #referred --- end - -local function initializer() -- can we use a tobesaved as metatable for collected? - tobereferred = references.tobereferred - referred = references.referred - setmetatableindex(referred,get) -- hm, what is get ? -end - --- We make the array sparse (maybe a finalizer should optionally return a table) because --- there can be quite some page links involved. We only store one action number per page --- which is normally good enough for what we want (e.g. see above/below) and we do --- a combination of a binary search and traverse backwards. A previous implementation --- always did a traverse and was pretty slow on a large number of links (given that this --- methods was used). It took me about a day to locate this as a bottleneck in processing --- a 2500 page interactive document with 60 links per page. In that case, traversing --- thousands of slots per link then brings processing to a grinding halt (especially when --- there are no slots at all, which is the case in a first run). - -local sparsetobereferred = { } - -local function finalizer() - local lastr, lasti - local n = 0 - for i=1,maxreferred do - local r = tobereferred[i] - if not lastr then - lastr = r - lasti = i - elseif r ~= lastr then - n = n + 1 - sparsetobereferred[n] = { lastr, lasti } - lastr = r - lasti = i - end - end - if lastr then - n = n + 1 - sparsetobereferred[n] = { lastr, lasti } - end -end - -job.register('structures.references.referred', sparsetobereferred, initializer, finalizer) - -local function referredpage(n) - local max = nofreferred - if max > 0 then - -- find match - local min = 1 - while true do - local mid = floor((min+max)/2) - local r = referred[mid] - local m = r[2] - if n == m then - return r[1] - elseif n > m then - min = mid + 1 - else - max = mid - 1 - end - if min > max then - break - end - end - -- find first previous - for i=min,1,-1 do - local r = referred[i] - if r and r[2] < n then - return r[1] - end - end - end - -- fallback - return texcount.realpageno -end - -references.referredpage = referredpage - -function references.registerpage(n) -- called in the backend code - if not tobereferred[n] then - if n > maxreferred then - maxreferred = n - end - tobereferred[n] = texcount.realpageno - end -end - --- todo: delay split till later as in destinations we split anyway - -local orders, lastorder = { }, 0 - -local function setnextorder(kind,name) - lastorder = 0 - if kind and name then - local ok = orders[kind] - if not ok then - ok = { } - orders[kind] = ok - end - lastorder = (ok[name] or 0) + 1 - ok[name] = lastorder - end - texsetcount("global","locationorder",lastorder) -end - -references.setnextorder = setnextorder - -function references.setnextinternal(kind,name) - setnextorder(kind,name) -- always incremented with internal - local n = texcount.locationcount + 1 - texsetcount("global","locationcount",n) - return n -end - -function references.currentorder(kind,name) - return orders[kind] and orders[kind][name] or lastorder -end - -local function setcomponent(data) - -- we might consider doing this at the tex end, just like prefix - local component = productcomponent() - if component then - local references = data and data.references - if references then - references.component = component - end - return component - end - -- but for the moment we do it here (experiment) -end - -commands.setnextinternalreference = references.setnextinternal - -function commands.currentreferenceorder(kind,name) - context(references.currentorder(kind,name)) -end - -references.setcomponent = setcomponent - -function references.set(kind,prefix,tag,data) --- setcomponent(data) - local pd = tobesaved[prefix] -- nicer is a metatable - if not pd then - pd = { } - tobesaved[prefix] = pd - end - local n = 0 - for ref in gmatch(tag,"[^,]+") do - if ref ~= "" then - if check_duplicates and pd[ref] then - if prefix and prefix ~= "" then - report_references("redundant reference %a in namespace %a",ref,prefix) - else - report_references("redundant reference %a",ref) - end - else - n = n + 1 - pd[ref] = data - context.dofinishsomereference(kind,prefix,ref) - end - end - end - return n > 0 -end - -function references.enhance(prefix,tag) - local l = tobesaved[prefix][tag] - if l then - l.references.realpage = texcount.realpageno - end -end - -commands.enhancereference = references.enhance - --- -- -- related to strc-ini.lua -- -- -- - -references.resolvers = references.resolvers or { } -local resolvers = references.resolvers - -local function getfromlist(var) - local vi = var.i - if vi then - vi = vi[3] or lists.collected[vi[2]] - if vi then - local r = vi.references and vi.references - if r then - r = r.realpage - end - if not r then - r = vi.pagedata and vi.pagedata - if r then - r = r.realpage - end - end - var.i = vi - var.r = r or 1 - else - var.i = nil - var.r = 1 - end - else - var.i = nil - var.r = 1 - end -end - --- resolvers.section = getfromlist --- resolvers.float = getfromlist --- resolvers.description = getfromlist --- resolvers.formula = getfromlist --- resolvers.note = getfromlist - -setmetatableindex(resolvers,function(t,k) - local v = getfromlist - resolvers[k] = v - return v -end) - -function resolvers.reference(var) - local vi = var.i[2] -- check - if vi then - var.i = vi - var.r = (vi.references and vi.references.realpage) or (vi.pagedata and vi.pagedata.realpage) or 1 - else - var.i = nil - var.r = 1 - end -end - -local function register_from_lists(collected,derived,pages,sections) - local g = derived[""] if not g then g = { } derived[""] = g end -- global - for i=1,#collected do - local entry = collected[i] - local m, r = entry.metadata, entry.references - if m and r then - local reference = r.reference or "" - local prefix = r.referenceprefix or "" - local component = r.component and r.component or "" - if reference ~= "" then - local kind, realpage = m.kind, r.realpage - if kind and realpage then - local d = derived[prefix] - if not d then - d = { } - derived[prefix] = d - end - local c = derived[component] - if not c then - c = { } - derived[component] = c - end - local t = { kind, i, entry } - for s in gmatch(reference,"%s*([^,]+)") do - if trace_referencing then - report_references("list entry %a provides %a reference %a on realpage %a",i,kind,s,realpage) - end - c[s] = c[s] or t -- share them - d[s] = d[s] or t -- share them - g[s] = g[s] or t -- first wins - end - end - end - end - end --- inspect(derived) -end - -references.registerinitializer(function() register_from_lists(lists.collected,derived) end) - --- urls - -references.urls = references.urls or { } -references.urls.data = references.urls.data or { } - -local urls = references.urls.data - -function references.urls.define(name,url,file,description) - if name and name ~= "" then - urls[name] = { url or "", file or "", description or url or file or ""} - end -end - -local pushcatcodes = context.pushcatcodes -local popcatcodes = context.popcatcodes -local txtcatcodes = catcodes.numbers.txtcatcodes -- or just use "txtcatcodes" - -function references.urls.get(name) - local u = urls[name] - if u then - local url, file = u[1], u[2] - if file and file ~= "" then - return formatters["%s/%s"](url,file) - else - return url - end - end -end - -function commands.geturl(name) - local url = references.urls.get(name) - if url and url ~= "" then - pushcatcodes(txtcatcodes) - context(url) - popcatcodes() - end -end - --- function commands.gethyphenatedurl(name,...) --- local url = references.urls.get(name) --- if url and url ~= "" then --- hyphenatedurl(url,...) --- end --- end - -function commands.doifurldefinedelse(name) - commands.doifelse(urls[name]) -end - -commands.useurl= references.urls.define - --- files - -references.files = references.files or { } -references.files.data = references.files.data or { } - -local files = references.files.data - -function references.files.define(name,file,description) - if name and name ~= "" then - files[name] = { file or "", description or file or "" } - end -end - -function references.files.get(name,method,space) -- method: none, before, after, both, space: yes/no - local f = files[name] - if f then - context(f[1]) - end -end - -function commands.doiffiledefinedelse(name) - commands.doifelse(files[name]) -end - -commands.usefile= references.files.define - --- helpers - -function references.checkedfile(whatever) -- return whatever if not resolved - if whatever then - local w = files[whatever] - if w then - return w[1] - else - return whatever - end - end -end - -function references.checkedurl(whatever) -- return whatever if not resolved - if whatever then - local w = urls[whatever] - if w then - local u, f = w[1], w[2] - if f and f ~= "" then - return u .. "/" .. f - else - return u - end - else - return whatever - end - end -end - -function references.checkedfileorurl(whatever,default) -- return nil, nil if not resolved - if whatever then - local w = files[whatever] - if w then - return w[1], nil - else - local w = urls[whatever] - if w then - local u, f = w[1], w[2] - if f and f ~= "" then - return nil, u .. "/" .. f - else - return nil, u - end - end - end - end - return default -end - --- programs - -references.programs = references.programs or { } -references.programs.data = references.programs.data or { } - -local programs = references.programs.data - -function references.programs.define(name,file,description) - if name and name ~= "" then - programs[name] = { file or "", description or file or ""} - end -end - -function references.programs.get(name) - local f = programs[name] - return f and f[1] -end - -function references.checkedprogram(whatever) -- return whatever if not resolved - if whatever then - local w = programs[whatever] - if w then - return w[1] - else - return whatever - end - end -end - -commands.defineprogram = references.programs.define - -function commands.getprogram(name) - local f = programs[name] - if f then - context(f[1]) - end -end - --- shared by urls and files - -function references.whatfrom(name) - context((urls[name] and v_url) or (files[name] and v_file) or v_unknown) -end - -function references.from(name) - local u = urls[name] - if u then - local url, file, description = u[1], u[2], u[3] - if description ~= "" then - return description - -- ok - elseif file and file ~= "" then - return url .. "/" .. file - else - return url - end - else - local f = files[name] - if f then - local file, description = f[1], f[2] - if description ~= "" then - return description - else - return file - end - end - end -end - -function commands.from(name) - local u = urls[name] - if u then - local url, file, description = u[1], u[2], u[3] - if description ~= "" then - context.dofromurldescription(description) - -- ok - elseif file and file ~= "" then - context.dofromurlliteral(url .. "/" .. file) - else - context.dofromurlliteral(url) - end - else - local f = files[name] - if f then - local file, description = f[1], f[2] - if description ~= "" then - context.dofromfiledescription(description) - else - context.dofromfileliteral(file) - end - end - end -end - -function references.define(prefix,reference,list) - local d = defined[prefix] if not d then d = { } defined[prefix] = d end - d[reference] = { "defined", list } -end - -function references.reset(prefix,reference) - local d = defined[prefix] - if d then - d[reference] = nil - end -end - -commands.definereference = references.define -commands.resetreference = references.reset - --- \primaryreferencefoundaction --- \secondaryreferencefoundaction --- \referenceunknownaction - --- t.special t.operation t.arguments t.outer t.inner - --- to what extend do we check the non prefixed variant - -local strict = false - -local function resolve(prefix,reference,args,set) -- we start with prefix,reference - if reference and reference ~= "" then - if not set then - set = { prefix = prefix, reference = reference } - else - set.reference = set.reference or reference - set.prefix = set.prefix or prefix - end - local r = settings_to_array(reference) - for i=1,#r do - local ri = r[i] - local d - if strict then - d = defined[prefix] or defined[""] - d = d and d[ri] - else - d = defined[prefix] - d = d and d[ri] - if not d then - d = defined[""] - d = d and d[ri] - end - end - if d then - resolve(prefix,d[2],nil,set) - else - local var = splitreference(ri) - if var then - var.reference = ri - local vo, vi = var.outer, var.inner - if not vo and vi then - -- to be checked - if strict then - d = defined[prefix] or defined[""] - d = d and d[vi] - else - d = defined[prefix] - d = d and d[vi] - if not d then - d = defined[""] - d = d and d[vi] - end - end - -- - if d then - resolve(prefix,d[2],var.arguments,set) -- args can be nil - else - if args then var.arguments = args end - set[#set+1] = var - end - else - if args then var.arguments = args end - set[#set+1] = var - end - if var.has_tex then - set.has_tex = true - end - else - -- report_references("funny pattern %a",ri) - end - end - end - return set - else - return { } - end -end - --- prefix == "" is valid prefix which saves multistep lookup - -references.currentset = nil - -function commands.setreferenceoperation(k,v) - references.currentset[k].operation = v -end - -function commands.setreferencearguments(k,v) - references.currentset[k].arguments = v -end - -local expandreferenceoperation = context.expandreferenceoperation -local expandreferencearguments = context.expandreferencearguments - -function references.expandcurrent() -- todo: two booleans: o_has_tex& a_has_tex - local currentset = references.currentset - if currentset and currentset.has_tex then - for i=1,#currentset do - local ci = currentset[i] - local operation = ci.operation - if operation and find(operation,"\\") then -- if o_has_tex then - expandreferenceoperation(i,operation) - end - local arguments = ci.arguments - if arguments and find(arguments,"\\") then -- if a_has_tex then - expandreferencearguments(i,arguments) - end - end - end -end - -commands.expandcurrentreference = references.expandcurrent -- for the moment the same - -local externals = { } - --- we have prefixes but also components: --- --- : prefix --- :: always external --- ::: internal (for products) or external (for components) - -local function loadexternalreferences(name,utilitydata) - local struc = utilitydata.structures - if struc then - local external = struc.references.collected -- direct references - local lists = struc.lists.collected -- indirect references (derived) - local pages = struc.pages.collected -- pagenumber data - -- a bit weird one, as we don't have the externals in the collected - for prefix, set in next, external do - for reference, data in next, set do - if trace_importing then - report_importing("registering %a reference, kind %a, name %a, prefix %a, reference %a", - "external","regular",name,prefix,reference) - end - local section = reference.section - local realpage = reference.realpage - if section then - reference.sectiondata = lists[section] - end - if realpage then - reference.pagedata = pages[realpage] - end - end - end - for i=1,#lists do - local entry = lists[i] - local metadata = entry.metadata - local references = entry.references - if metadata and references then - local reference = references.reference - if reference and reference ~= "" then - local kind = metadata.kind - local realpage = references.realpage - if kind and realpage then - references.pagedata = pages[realpage] - local prefix = references.referenceprefix or "" - local target = external[prefix] - if not target then - target = { } - external[prefix] = target - end - for s in gmatch(reference,"%s*([^,]+)") do - if trace_importing then - report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a", - "external",kind,name,prefix,s) - end - target[s] = target[s] or entry - end - end - end - end - end - externals[name] = external - return external - end -end - -local externalfiles = { } - -table.setmetatableindex(externalfiles, function(t,k) - local v = files[k] - if not v then - v = { k, k } - end - externalfiles[k] = v - return v -end) - -table.setmetatableindex(externals,function(t,k) -- either or not automatically - local filename = externalfiles[k][1] -- filename - local fullname = file.replacesuffix(filename,"tuc") - if lfs.isfile(fullname) then -- todo: use other locator - local utilitydata = job.loadother(fullname) - if utilitydata then - local external = loadexternalreferences(k,utilitydata) - t[k] = external or false - return external - end - end - t[k] = false - return false -end) - -local productdata = allocate { - productreferences = { }, - componentreferences = { }, - components = { }, -} - -references.productdata = productdata - -local function loadproductreferences(productname,componentname,utilitydata) - local struc = utilitydata.structures - if struc then - local productreferences = struc.references.collected -- direct references - local lists = struc.lists.collected -- indirect references (derived) - local pages = struc.pages.collected -- pagenumber data - -- we use indirect tables to save room but as they are eventually - -- just references we resolve them to data here (the mechanisms - -- that use this data check for indirectness) - for prefix, set in next, productreferences do - for reference, data in next, set do - if trace_importing then - report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a", - "product","regular",productname,prefix,reference) - end - local section = reference.section - local realpage = reference.realpage - if section then - reference.sectiondata = lists[section] - end - if realpage then - reference.pagedata = pages[realpage] - end - end - end - -- - local componentreferences = { } - for i=1,#lists do - local entry = lists[i] - local metadata = entry.metadata - local references = entry.references - if metadata and references then - local reference = references.reference - if reference and reference ~= "" then - local kind = metadata.kind - local realpage = references.realpage - if kind and realpage then - references.pagedata = pages[realpage] - local prefix = references.referenceprefix or "" - local component = references.component - local ctarget, ptarget - if not component or component == componentname then - -- skip - else - -- one level up - local external = componentreferences[component] - if not external then - external = { } - componentreferences[component] = external - end - if component == prefix then - prefix = "" - end - ctarget = external[prefix] - if not ctarget then - ctarget = { } - external[prefix] = ctarget - end - end - ptarget = productreferences[prefix] - if not ptarget then - ptarget = { } - productreferences[prefix] = ptarget - end - for s in gmatch(reference,"%s*([^,]+)") do - if ptarget then - if trace_importing then - report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a", - "product",kind,productname,prefix,s) - end - ptarget[s] = ptarget[s] or entry - end - if ctarget then - if trace_importing then - report_importing("registering %s reference, kind %a, name %a, prefix %a, referenc %a", - "component",kind,productname,prefix,s) - end - ctarget[s] = ctarget[s] or entry - end - end - end - end - end - end - productdata.productreferences = productreferences -- not yet used - productdata.componentreferences = componentreferences - end -end - -local function loadproductvariables(product,component,utilitydata) - local struc = utilitydata.structures - if struc then - local lists = struc.lists and struc.lists.collected - if lists then - local pages = struc.pages and struc.pages.collected - for i=1,#lists do - local li = lists[i] - if li.metadata.kind == "section" and li.references.component == component then - local firstsection = li - if firstsection.numberdata then - local numbers = firstsection.numberdata.numbers - if numbers then - if trace_importing then - report_importing("initializing section number to %:t",numbers) - end - productdata.firstsection = firstsection - structures.documents.preset(numbers) - end - end - if pages and firstsection.references then - local firstpage = pages[firstsection.references.realpage] - local number = firstpage and firstpage.number - if number then - if trace_importing then - report_importing("initializing page number to %a",number) - end - productdata.firstpage = firstpage - counters.set("userpage",1,number) - end - end - break - end - end - end - end -end - -local function componentlist(tree,target) - local branches = tree and tree.branches - if branches then - for i=1,#branches do - local branch = branches[i] - local type = branch.type - if type == "component" then - if target then - target[#target+1] = branch.name - else - target = { branch.name } - end - elseif type == "product" or type == "component" then - target = componentlist(branch,target) - end - end - end - return target -end - -local function loadproductcomponents(product,component,utilitydata) - local job = utilitydata.job - productdata.components = componentlist(job and job.structure and job.structure.collected) or { } -end - -references.registerinitializer(function(tobesaved,collected) - -- not that much related to tobesaved or collected - productdata.components = componentlist(job.structure.collected) or { } -end) - -function structures.references.loadpresets(product,component) -- we can consider a special components hash - if product and component and product~= "" and component ~= "" and not productdata.product then -- maybe: productdata.filename ~= filename - productdata.product = product - productdata.component = component - local fullname = file.replacesuffix(product,"tuc") - if lfs.isfile(fullname) then -- todo: use other locator - local utilitydata = job.loadother(fullname) - if utilitydata then - if trace_importing then - report_importing("loading references for component %a of product %a from %a",component,product,fullname) - end - loadproductvariables (product,component,utilitydata) - loadproductreferences(product,component,utilitydata) - loadproductcomponents(product,component,utilitydata) - -- inspect(productdata) - end - end - end -end - -structures.references.productdata = productdata - -local useproduct = commands.useproduct - -if useproduct then - - function commands.useproduct(product) - useproduct(product) - if texconditionals.autocrossfilereferences then - local component = justacomponent() - if component then - if trace_referencing or trace_importing then - report_references("loading presets for component %a of product %a",component,product) - end - structures.references.loadpresets(product,component) - end - end - end - -end - --- productdata.firstsection.numberdata.numbers --- productdata.firstpage.number - -local function report_identify_special(set,var,i,type) - local reference = set.reference - local prefix = set.prefix or "" - local special = var.special - local error = var.error - local kind = var.kind - if error then - report_identifying("type %a, reference %a, index %a, prefix %a, special %a, error %a",type,reference,i,prefix,special,error) - else - report_identifying("type %a, reference %a, index %a, prefix %a, special %a, kind %a",type,reference,i,prefix,special,kind) - end -end - -local function report_identify_arguments(set,var,i,type) - local reference = set.reference - local prefix = set.prefix or "" - local arguments = var.arguments - local error = var.error - local kind = var.kind - if error then - report_identifying("type %a, reference %a, index %a, prefix %a, arguments %a, error %a",type,reference,i,prefix,arguments,error) - else - report_identifying("type %a, reference %a, index %a, prefix %a, arguments %a, kind %a",type,reference,i,prefix,arguments,kind) - end -end - -local function report_identify_outer(set,var,i,type) - local reference = set.reference - local prefix = set.prefix or "" - local outer = var.outer - local error = var.error - local kind = var.kind - if outer then - if error then - report_identifying("type %a, reference %a, index %a, prefix %a, outer %a, error %a",type,reference,i,prefix,outer,error) - else - report_identifying("type %a, reference %a, index %a, prefix %a, outer %a, kind %a",type,reference,i,prefix,outer,kind) - end - else - if error then - report_identifying("type %a, reference %a, index %a, prefix %a, error %a",type,reference,i,prefix,error) - else - report_identifying("type %a, reference %a, index %a, prefix %a, kind %a",type,reference,i,prefix,kind) - end - end -end - -local function identify_special(set,var,i) - local special = var.special - local s = specials[special] - if s then - local outer = var.outer - local operation = var.operation - local arguments = var.arguments - if outer then - if operation then - -- special(outer::operation) - var.kind = "special outer with operation" - else - -- special() - var.kind = "special outer" - end - var.f = outer - elseif operation then - if arguments then - -- special(operation{argument,argument}) - var.kind = "special operation with arguments" - else - -- special(operation) - var.kind = "special operation" - end - else - -- special() - var.kind = "special" - end - if trace_identifying then - report_identify_special(set,var,i,"1a") - end - else - var.error = "unknown special" - end - return var -end - -local function identify_arguments(set,var,i) - local s = specials[var.inner] - if s then - -- inner{argument} - var.kind = "special with arguments" - else - var.error = "unknown inner or special" - end - if trace_identifying then - report_identify_arguments(set,var,i,"3a") - end - return var -end - -local function identify_inner(set,var,prefix,collected,derived,tobesaved) - local inner = var.inner - local outer = var.outer - -- inner ... we could move the prefix logic into the parser so that we have 'm for each entry - -- foo:bar -> foo == prefix (first we try the global one) - -- -:bar -> ignore prefix - local p, i = prefix, nil - local splitprefix, splitinner - -- the next test is a safeguard when references are auto loaded from outer - if inner then - splitprefix, splitinner = lpegmatch(prefixsplitter,inner) - end - -- these are taken from other anonymous references - if splitprefix and splitinner then - if splitprefix == "-" then - i = collected[""] - i = i and i[splitinner] - if i then - p = "" - end - else - i = collected[splitprefix] - i = i and i[splitinner] - if i then - p = splitprefix - end - end - end - -- todo: strict here - if not i then - i = collected[prefix] - i = i and i[inner] - if i then - p = prefix - end - end - if not i and prefix ~= "" then - i = collected[""] - i = i and i[inner] - if i then - p = "" - end - end - if i then - var.i = { "reference", i } - resolvers.reference(var) - var.kind = "inner" - var.p = p - elseif derived then - -- these are taken from other data structures (like lists) - if splitprefix and splitinner then - if splitprefix == "-" then - i = derived[""] - i = i and i[splitinner] - if i then - p = "" - end - else - i = derived[splitprefix] - i = i and i[splitinner] - if i then - p = splitprefix - end - end - end - if not i then - i = derived[prefix] - i = i and i[inner] - if i then - p = prefix - end - end - if not i and prefix ~= "" then - i = derived[""] - i = i and i[inner] - if i then - p = "" - end - end - if i then - var.kind = "inner" - var.i = i - var.p = p - local ri = resolvers[i[1]] - if ri then - ri(var) - else - -- can't happen as we catch it with a metatable now - report_references("unknown inner resolver for %a",i[1]) - end - else - -- no prefixes here - local s = specials[inner] - if s then - var.kind = "special" - else - i = (collected and collected[""] and collected[""][inner]) or - (derived and derived [""] and derived [""][inner]) or - (tobesaved and tobesaved[""] and tobesaved[""][inner]) - if i then - var.kind = "inner" - var.i = { "reference", i } - resolvers.reference(var) - var.p = "" - else - var.error = "unknown inner or special" - end - end - end - end - return var -end - -local function identify_outer(set,var,i) - local outer = var.outer - local inner = var.inner - local external = externals[outer] - if external then - local v = copytable(var) - v = identify_inner(set,v,nil,external) - if v.i and not v.error then - v.kind = "outer with inner" - set.external = true - if trace_identifying then - report_identify_outer(set,v,i,"2a") - end - return v - end - v = copytable(var) - local v = identify_inner(set,v,v.outer,external) - if v.i and not v.error then - v.kind = "outer with inner" - set.external = true - if trace_identifying then - report_identify_outer(set,v,i,"2b") - end - return v - end - end - local external = productdata.componentreferences[outer] - if external then - local v = identify_inner(set,copytable(var),nil,external) - if v.i and not v.error then - v.kind = "outer with inner" - set.external = true - if trace_identifying then - report_identify_outer(set,v,i,"2c") - end - return v - end - end - local external = productdata.productreferences[outer] - if external then - local vi = external[inner] - if vi then - var.kind = "outer with inner" - var.i = vi - set.external = true - if trace_identifying then - report_identify_outer(set,var,i,"2d") - end - return var - end - end - -- the rest - local special = var.special - local arguments = var.arguments - local operation = var.operation - if inner then - if arguments then - -- outer::inner{argument} - var.kind = "outer with inner with arguments" - else - -- outer::inner - var.kind = "outer with inner" - end - var.i = { "reference", inner } - resolvers.reference(var) - var.f = outer - if trace_identifying then - report_identify_outer(set,var,i,"2e") - end - elseif special then - local s = specials[special] - if s then - if operation then - if arguments then - -- outer::special(operation{argument,argument}) - var.kind = "outer with special and operation and arguments" - else - -- outer::special(operation) - var.kind = "outer with special and operation" - end - else - -- outer::special() - var.kind = "outer with special" - end - var.f = outer - else - var.error = "unknown outer with special" - end - if trace_identifying then - report_identify_outer(set,var,i,"2f") - end - else - -- outer:: - var.kind = "outer" - var.f = outer - if trace_identifying then - report_identify_outer(set,var,i,"2g") - end - end - return var -end - -local function identify_inner_or_outer(set,var,i) - -- here we fall back on product data - local inner = var.inner - if inner and inner ~= "" then - local v = identify_inner(set,copytable(var),set.prefix,collected,derived,tobesaved) - if v.i and not v.error then - v.kind = "inner" -- check this - if trace_identifying then - report_identify_outer(set,v,i,"4a") - end - return v - end - -local components = job.structure.components - -if components then - for i=1,#components do - local component = components[i] - local data = collected[component] - local vi = data and data[inner] - if vi then - var.outer = component - var.i = vi - var.kind = "outer with inner" - set.external = true - if trace_identifying then - report_identify_outer(set,var,i,"4x") - end - return var - end - end -end - - local componentreferences = productdata.componentreferences - local productreferences = productdata.productreferences - local components = productdata.components - if components and componentreferences then - -- for component, data in next, productdata.componentreferences do -- better do this in order of processing: - for i=1,#components do - local component = components[i] - local data = componentreferences[component] - if data then - local d = data[""] - local vi = d and d[inner] - if vi then - var.outer = component - var.i = vi - var.kind = "outer with inner" - set.external = true - if trace_identifying then - report_identify_outer(set,var,i,"4b") - end - return var - end - end - end - end - local component, inner = lpegmatch(componentsplitter,inner) - if component then - local data = componentreferences and componentreferences[component] - if data then - local d = data[""] - local vi = d and d[inner] - if vi then - var.inner = inner - var.outer = component - var.i = vi - var.kind = "outer with inner" - set.external = true - if trace_identifying then - report_identify_outer(set,var,i,"4c") - end - return var - end - end - local data = productreferences and productreferences[component] - if data then - local vi = data[inner] - if vi then - var.inner = inner - var.outer = component - var.i = vi - var.kind = "outer with inner" - set.external = true - if trace_identifying then - report_identify_outer(set,var,i,"4d") - end - return var - end - end - end - var.error = "unknown inner" - else - var.error = "no inner" - end - if trace_identifying then - report_identify_outer(set,var,i,"4e") - end - return var -end - --- local function identify_inner_or_outer(set,var,i) --- -- we might consider first checking with a prefix prepended and then without --- -- which is better for fig:oeps --- local var = do_identify_inner_or_outer(set,var,i) --- if var.error then --- local prefix = set.prefix --- if prefix and prefix ~= "" then --- var.inner = prefix .. ':' .. var.inner --- var.error = nil --- return do_identify_inner_or_outer(set,var,i) --- end --- end --- return var --- end - -local function identify_inner_component(set,var,i) - -- we're in a product (maybe ignore when same as component) - local component = var.component - identify_inner(set,var,component,collected,derived,tobesaved) - if trace_identifying then - report_identify_outer(set,var,i,"5a") - end - return var -end - -local function identify_outer_component(set,var,i) - local component = var.component - local inner = var.inner - local data = productdata.componentreferences[component] - if data then - local d = data[""] - local vi = d and d[inner] - if vi then - var.inner = inner - var.outer = component - var.i = vi - var.kind = "outer with inner" - set.external = true - if trace_identifying then - report_identify_outer(set,var,i,"6a") - end - return var - end - end - local data = productdata.productreferences[component] - if data then - local vi = data[inner] - if vi then - var.inner = inner - var.outer = component - var.i = vi - var.kind = "outer with inner" - set.external = true - if trace_identifying then - report_identify_outer(set,var,i,"6b") - end - return var - end - end - var.error = "unknown component" - if trace_identifying then - report_identify_outer(set,var,i,"6c") - end - return var -end - -local nofidentified = 0 - -local function identify(prefix,reference) - if not reference then - prefix = "" - reference = prefix - end - local set = resolve(prefix,reference) - local bug = false - texcount.referencehastexstate = set.has_tex and 1 or 0 - nofidentified = nofidentified + 1 - set.n = nofidentified - for i=1,#set do - local var = set[i] - if var.special then - var = identify_special(set,var,i) - elseif var.outer then - var = identify_outer(set,var,i) - elseif var.arguments then - var = identify_arguments(set,var,i) - elseif not var.component then - var = identify_inner_or_outer(set,var,i) - elseif productcomponent() then - var = identify_inner_component(set,var,i) - else - var = identify_outer_component(set,var,i) - end - set[i] = var - bug = bug or var.error - end - references.currentset = mark(set) -- mark, else in api doc - if trace_analyzing then - report_references(table.serialize(set,reference)) - end - return set, bug -end - -references.identify = identify - -local unknowns, nofunknowns, f_valid = { }, 0, formatters["[%s][%s]"] - -function references.valid(prefix,reference,highlight,newwindow,layer) - local set, bug = identify(prefix,reference) - local unknown = bug or #set == 0 - if unknown then - currentreference = nil -- will go away - local str = f_valid(prefix,reference) - local u = unknowns[str] - if not u then - interfaces.showmessage("references",1,str) -- 1 = unknown, 4 = illegal - unknowns[str] = 1 - nofunknowns = nofunknowns + 1 - else - unknowns[str] = u + 1 - end - else - set.highlight, set.newwindow, set.layer = highlight, newwindow, layer - currentreference = set[1] - end - -- we can do the expansion here which saves a call - return not unknown -end - -function commands.doifelsereference(prefix,reference,highlight,newwindow,layer) - commands.doifelse(references.valid(prefix,reference,highlight,newwindow,layer)) -end - -function references.reportproblems() -- might become local - if nofunknowns > 0 then - statistics.register("cross referencing", function() - return format("%s identified, %s unknown",nofidentified,nofunknowns) - end) - logspushtarget("logfile") - logsnewline() - report_references("start problematic references") - logsnewline() - for k, v in table.sortedpairs(unknowns) do - report_unknown("%4i: %s",v,k) - end - logsnewline() - report_references("stop problematic references") - logsnewline() - logspoptarget() - end -end - -luatex.registerstopactions(references.reportproblems) - -local innermethod = "names" - -function references.setinnermethod(m) - if m then - if m == "page" or m == "mixed" or m == "names" then - innermethod = m - elseif m == true or m == v_yes then - innermethod = "page" - end - end - function references.setinnermethod() - report_references("inner method is already set and frozen to %a",innermethod) - end -end - -function references.getinnermethod() - return innermethod or "names" -end - -directives.register("references.linkmethod", function(v) -- page mixed names - references.setinnermethod(v) -end) - --- this is inconsistent - -function references.setinternalreference(prefix,tag,internal,view) -- needs checking - if innermethod == "page" then - return unsetvalue - else - local t, tn = { }, 0 -- maybe add to current - if tag then - if prefix and prefix ~= "" then - prefix = prefix .. ":" -- watch out, : here - for ref in gmatch(tag,"[^,]+") do - tn = tn + 1 - t[tn] = prefix .. ref - end - else - for ref in gmatch(tag,"[^,]+") do - tn = tn + 1 - t[tn] = ref - end - end - end - if internal and innermethod == "names" then -- mixed or page - tn = tn + 1 - t[tn] = "aut:" .. internal - end - local destination = references.mark(t,nil,nil,view) -- returns an attribute - texcount.lastdestinationattribute = destination - return destination - end -end - -function references.setandgetattribute(kind,prefix,tag,data,view) -- maybe do internal automatically here - local attr = references.set(kind,prefix,tag,data) and references.setinternalreference(prefix,tag,nil,view) or unsetvalue - texcount.lastdestinationattribute = attr - return attr -end - -commands.setreferenceattribute = references.setandgetattribute - -function references.getinternalreference(n) -- n points into list (todo: registers) - local l = lists.collected[n] - return l and l.references.internal or n -end - -function commands.setinternalreference(prefix,tag,internal,view) -- needs checking - context(references.setinternalreference(prefix,tag,internal,view)) -end - -function commands.getinternalreference(n) -- this will also be a texcount - local l = lists.collected[n] - context(l and l.references.internal or n) -end - --- - -function references.getcurrentmetadata(tag) - local data = currentreference and currentreference.i - return data and data.metadata and data.metadata[tag] -end - -function commands.getcurrentreferencemetadata(tag) - local data = references.getcurrentmetadata(tag) - if data then - context(data) - end -end - -local function currentmetadata(tag) - local data = currentreference and currentreference.i - return data and data.metadata and data.metadata[tag] -end - -references.currentmetadata = currentmetadata - -local function getcurrentprefixspec(default) - -- todo: message - return currentmetadata("kind") or "?", currentmetadata("name") or "?", default or "?" -end - -references.getcurrentprefixspec = getcurrentprefixspec - -function commands.getcurrentprefixspec(default) - context.getreferencestructureprefix(getcurrentprefixspec(default)) -end - -function references.filter(name,...) -- number page title ... - local data = currentreference and currentreference.i -- maybe we should take realpage from here - if data then - if name == "realpage" then - local cs = references.analyze() -- normally already analyzed but also sets state - context(tonumber(cs.realpage) or 0) -- todo, return and in command namespace - else -- assumes data is table - local kind = type(data) == "table" and data.metadata and data.metadata.kind - if kind then - local filter = filters[kind] or filters.generic - filter = filter and (filter[name] or filter.unknown or filters.generic[name] or filters.generic.unknown) - if filter then - if trace_referencing then - report_references("name %a, kind %a, using dedicated filter",name,kind) - end - filter(data,name,...) - elseif trace_referencing then - report_references("name %a, kind %a, using generic filter",name,kind) - end - elseif trace_referencing then - report_references("name %a, unknown kind",name) - end - end - elseif name == "realpage" then - context(0) - elseif trace_referencing then - report_references("name %a, no reference",name) - end -end - -function references.filterdefault() - return references.filter("default",getcurrentprefixspec(v_default)) -end - -function commands.currentreferencedefault(tag) - if not tag then tag = "default" end - references.filter(tag,context.delayed(getcurrentprefixspec(tag))) -end - -filters.generic = { } - -function filters.generic.title(data) - if data then - local titledata = data.titledata or data.useddata - if titledata then - helpers.title(titledata.title or "?",data.metadata) - end - end -end - -function filters.generic.text(data) - if data then - local entries = data.entries or data.useddata - if entries then - helpers.title(entries.text or "?",data.metadata) - end - end -end - -function filters.generic.number(data,what,prefixspec) -- todo: spec and then no stopper - if data then - numberdata = lists.reordered(data) -- data.numberdata - if numberdata then - helpers.prefix(data,prefixspec) - sections.typesetnumber(numberdata,"number",numberdata) - else - local useddata = data.useddata - if useddata and useddsta.number then - context(useddata.number) - end - end - end -end - -filters.generic.default = filters.generic.text - -function filters.generic.page(data,prefixspec,pagespec) - local pagedata = data.pagedata - if pagedata then - local number, conversion = pagedata.number, pagedata.conversion - if not number then - -- error - elseif conversion then - context.convertnumber(conversion,number) - else - context(number) - end - else - helpers.prefixpage(data,prefixspec,pagespec) - end -end - -filters.user = { } - -function filters.user.unknown(data,name) - if data then - local userdata = data.userdata - local userkind = userdata and userdata.kind - if userkind then - local filter = filters[userkind] or filters.generic - filter = filter and (filter[name] or filter.unknown) - if filter then - filter(data,name) - return - end - end - local namedata = userdata and userdata[name] - if namedata then - context(namedata) - end - end -end - -filters.text = { } - -function filters.text.title(data) - helpers.title(data.entries.text or "?",data.metadata) -end - --- no longer considered useful: --- --- function filters.text.number(data) --- helpers.title(data.entries.text or "?",data.metadata) --- end - -function filters.text.page(data,prefixspec,pagespec) - helpers.prefixpage(data,prefixspec,pagespec) -end - -filters.full = { } - -filters.full.title = filters.text.title -filters.full.page = filters.text.page - -filters.section = { } - -function filters.section.number(data,what,prefixspec) - if data then - local numberdata = data.numberdata - if not numberdata then - local useddata = data.useddata - if useddata and useddata.number then - context(useddata.number) - end - elseif numberdata.hidenumber then - local references = data.references - if trace_empty then - report_empty("reference %a has a hidden number",references.reference) - context.emptyreference() -- maybe an option - end - else - sections.typesetnumber(numberdata,"number",prefixspec,numberdata) - end - end -end - -filters.section.title = filters.generic.title -filters.section.page = filters.generic.page -filters.section.default = filters.section.number - --- filters.note = { default = filters.generic.number } --- filters.formula = { default = filters.generic.number } --- filters.float = { default = filters.generic.number } --- filters.description = { default = filters.generic.number } --- filters.item = { default = filters.generic.number } - -setmetatableindex(filters, function(t,k) -- beware, test with rawget - local v = { default = filters.generic.number } -- not copy as it might be extended differently - t[k] = v - return v -end) - --- function references.sectiontitle(n) --- helpers.sectiontitle(lists.collected[tonumber(n) or 0]) --- end - --- function references.sectionnumber(n) --- helpers.sectionnumber(lists.collected[tonumber(n) or 0]) --- end - --- function references.sectionpage(n,prefixspec,pagespec) --- helpers.prefixedpage(lists.collected[tonumber(n) or 0],prefixspec,pagespec) --- end - --- analyze - -references.testrunners = references.testrunners or { } -references.testspecials = references.testspecials or { } - -local runners = references.testrunners -local specials = references.testspecials - --- We need to prevent ending up in the 'relative location' analyzer as it is --- pretty slow (progressively). In the pagebody one can best check the reference --- real page to determine if we need contrastlocation as that is more lightweight. - -local function checkedpagestate(n,page) - local r, p = referredpage(n), tonumber(page) - if not p then - return 0 - elseif p > r then - return 3 -- after - elseif p < r then - return 2 -- before - else - return 1 -- same - end -end - -local function setreferencerealpage(actions) - actions = actions or references.currentset - if not actions then - return 0 - else - local realpage = actions.realpage - if realpage then - return realpage - end - local nofactions = #actions - if nofactions > 0 then - for i=1,nofactions do - local a = actions[i] - local what = runners[a.kind] - if what then - what = what(a,actions) -- needs documentation - end - end - realpage = actions.realpage - if realpage then - return realpage - end - end - actions.realpage = 0 - return 0 - end -end - --- we store some analysis data alongside the indexed array --- at this moment only the real reference page is analyzed --- normally such an analysis happens in the backend code - -function references.analyze(actions) - actions = actions or references.currentset - if not actions then - actions = { realpage = 0, pagestate = 0 } - elseif actions.pagestate then - -- already done - else - local realpage = actions.realpage or setreferencerealpage(actions) - if realpage == 0 then - actions.pagestate = 0 - elseif actions.external then - actions.pagestate = 0 - else - actions.pagestate = checkedpagestate(actions.n,realpage) - end - end - return actions -end - -function commands.referencepagestate(actions) - actions = actions or references.currentset - if not actions then - context(0) - else - if not actions.pagestate then - references.analyze(actions) -- delayed unless explicitly asked for - end - context(actions.pagestate) - end -end - -function commands.referencerealpage(actions) - actions = actions or references.currentset - context(not actions and 0 or actions.realpage or setreferencerealpage(actions)) -end - -local plist, nofrealpages - -local function realpageofpage(p) -- the last one counts ! - if not plist then - local pages = structures.pages.collected - nofrealpages = #pages - plist = { } - for rp=1,nofrealpages do - plist[pages[rp].number] = rp - end - references.nofrealpages = nofrealpages - end - return plist[p] -end - -references.realpageofpage = realpageofpage - -function references.checkedrealpage(r) - if not plist then - realpageofpage(r) -- just initialize - end - if not r then - return texcount.realpageno - elseif r < 1 then - return 1 - elseif r > nofrealpages then - return nofrealpages - else - return r - end -end - --- use local ? - -local pages = allocate { - [variables.firstpage] = function() return counters.record("realpage")["first"] end, - [variables.previouspage] = function() return counters.record("realpage")["previous"] end, - [variables.nextpage] = function() return counters.record("realpage")["next"] end, - [variables.lastpage] = function() return counters.record("realpage")["last"] end, - - [variables.firstsubpage] = function() return counters.record("subpage" )["first"] end, - [variables.previoussubpage] = function() return counters.record("subpage" )["previous"] end, - [variables.nextsubpage] = function() return counters.record("subpage" )["next"] end, - [variables.lastsubpage] = function() return counters.record("subpage" )["last"] end, - - [variables.forward] = function() return counters.record("realpage")["forward"] end, - [variables.backward] = function() return counters.record("realpage")["backward"] end, -} - -references.pages = pages - --- maybe some day i will merge this in the backend code with a testmode (so each --- runner then implements a branch) - -runners["inner"] = function(var,actions) - local r = var.r - if r then - actions.realpage = r - end -end - -runners["special"] = function(var,actions) - local handler = specials[var.special] - return handler and handler(var,actions) -end - -runners["special operation"] = runners["special"] -runners["special operation with arguments"] = runners["special"] - --- These are the testspecials not the real ones. They are used to --- check the validity. - -function specials.internal(var,actions) - local v = references.internals[tonumber(var.operation)] - local r = v and v.references.realpage - if r then - actions.realpage = r - end -end - -specials.i = specials.internal - -function specials.page(var,actions) - local o = var.operation - local p = pages[o] - if type(p) == "function" then - p = p() - else - p = tonumber(realpageofpage(tonumber(o))) - end - if p then - var.r = p - actions.realpage = actions.realpage or p -- first wins - end -end - -function specials.realpage(var,actions) - local p = tonumber(var.operation) - if p then - var.r = p - actions.realpage = actions.realpage or p -- first wins - end -end - -function specials.userpage(var,actions) - local p = tonumber(realpageofpage(var.operation)) - if p then - var.r = p - actions.realpage = actions.realpage or p -- first wins - end -end - -function specials.deltapage(var,actions) - local p = tonumber(var.operation) - if p then - p = references.checkedrealpage(p + texcount.realpageno) - var.r = p - actions.realpage = actions.realpage or p -- first wins - end -end - -function specials.section(var,actions) - local sectionname = var.arguments - local destination = var.operation - local internal = structures.sections.internalreference(sectionname,destination) - if internal then - var.special = "internal" - var.operation = internal - var.arguments = nil - specials.internal(var,actions) - end -end - --- needs a better split ^^^ - -commands.filterreference = references.filter -commands.filterdefaultreference = references.filterdefault - --- done differently now: - -function references.export(usedname) end -function references.import(usedname) end -function references.load (usedname) end - -commands.exportreferences = references.export +if not modules then modules = { } end modules ['strc-ref'] = { + version = 1.001, + comment = "companion to strc-ref.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- beware, this is a first step in the rewrite (just getting rid of +-- the tuo file); later all access and parsing will also move to lua + +-- the useddata and pagedata names might change +-- todo: pack exported data + +-- todo: autoload components when ::: + +local format, find, gmatch, match, concat = string.format, string.find, string.gmatch, string.match, table.concat +local texcount, texsetcount = tex.count, tex.setcount +local rawget, tonumber = rawget, tonumber +local lpegmatch = lpeg.match +local copytable = table.copy +local formatters = string.formatters + +local allocate = utilities.storage.allocate +local mark = utilities.storage.mark +local setmetatableindex = table.setmetatableindex + +local trace_referencing = false trackers.register("structures.referencing", function(v) trace_referencing = v end) +local trace_analyzing = false trackers.register("structures.referencing.analyzing", function(v) trace_analyzing = v end) +local trace_identifying = false trackers.register("structures.referencing.identifying", function(v) trace_identifying = v end) +local trace_importing = false trackers.register("structures.referencing.importing", function(v) trace_importing = v end) +local trace_empty = false trackers.register("structures.referencing.empty", function(v) trace_empty = v end) + +local check_duplicates = true + +directives.register("structures.referencing.checkduplicates", function(v) + check_duplicates = v +end) + +local report_references = logs.reporter("references") +local report_unknown = logs.reporter("references","unknown") +local report_identifying = logs.reporter("references","identifying") +local report_importing = logs.reporter("references","importing") +local report_empty = logs.reporter("references","empty") + +local variables = interfaces.variables +local constants = interfaces.constants +local context = context + +local v_default = variables.default +local v_url = variables.url +local v_file = variables.file +local v_unknown = variables.unknown +local v_yes = variables.yes + +local texcount = tex.count +local texconditionals = tex.conditionals + +local productcomponent = resolvers.jobs.productcomponent +local justacomponent = resolvers.jobs.justacomponent + +local logsnewline = logs.newline +local logspushtarget = logs.pushtarget +local logspoptarget = logs.poptarget + +local settings_to_array = utilities.parsers.settings_to_array +local unsetvalue = attributes.unsetvalue + +local structures = structures +local helpers = structures.helpers +local sections = structures.sections +local references = structures.references +local lists = structures.lists +local counters = structures.counters + +-- some might become local + +references.defined = references.defined or allocate() + +local defined = references.defined +local derived = allocate() +local specials = allocate() +local runners = allocate() +local internals = allocate() +local filters = allocate() +local executers = allocate() +local handlers = allocate() +local tobesaved = allocate() +local collected = allocate() +local tobereferred = allocate() +local referred = allocate() + +references.derived = derived +references.specials = specials +references.runners = runners +references.internals = internals +references.filters = filters +references.executers = executers +references.handlers = handlers +references.tobesaved = tobesaved +references.collected = collected +references.tobereferred = tobereferred +references.referred = referred + +local splitreference = references.splitreference +local splitprefix = references.splitcomponent -- replaces: references.splitprefix +local prefixsplitter = references.prefixsplitter +local componentsplitter = references.componentsplitter + +local currentreference = nil + +storage.register("structures/references/defined", references.defined, "structures.references.defined") + +local initializers = { } +local finalizers = { } + +function references.registerinitializer(func) -- we could use a token register instead + initializers[#initializers+1] = func +end +function references.registerfinalizer(func) -- we could use a token register instead + finalizers[#finalizers+1] = func +end + +local function initializer() -- can we use a tobesaved as metatable for collected? + tobesaved = references.tobesaved + collected = references.collected + for i=1,#initializers do + initializers[i](tobesaved,collected) + end +end + +local function finalizer() + for i=1,#finalizers do + finalizers[i](tobesaved) + end +end + +job.register('structures.references.collected', tobesaved, initializer, finalizer) + +local maxreferred = 1 +local nofreferred = 0 + +-- local function initializer() -- can we use a tobesaved as metatable for collected? +-- tobereferred = references.tobereferred +-- referred = references.referred +-- nofreferred = #referred +-- end + +local function initializer() -- can we use a tobesaved as metatable for collected? + tobereferred = references.tobereferred + referred = references.referred + setmetatableindex(referred,get) -- hm, what is get ? +end + +-- We make the array sparse (maybe a finalizer should optionally return a table) because +-- there can be quite some page links involved. We only store one action number per page +-- which is normally good enough for what we want (e.g. see above/below) and we do +-- a combination of a binary search and traverse backwards. A previous implementation +-- always did a traverse and was pretty slow on a large number of links (given that this +-- methods was used). It took me about a day to locate this as a bottleneck in processing +-- a 2500 page interactive document with 60 links per page. In that case, traversing +-- thousands of slots per link then brings processing to a grinding halt (especially when +-- there are no slots at all, which is the case in a first run). + +local sparsetobereferred = { } + +local function finalizer() + local lastr, lasti + local n = 0 + for i=1,maxreferred do + local r = tobereferred[i] + if not lastr then + lastr = r + lasti = i + elseif r ~= lastr then + n = n + 1 + sparsetobereferred[n] = { lastr, lasti } + lastr = r + lasti = i + end + end + if lastr then + n = n + 1 + sparsetobereferred[n] = { lastr, lasti } + end +end + +job.register('structures.references.referred', sparsetobereferred, initializer, finalizer) + +local function referredpage(n) + local max = nofreferred + if max > 0 then + -- find match + local min = 1 + while true do + local mid = floor((min+max)/2) + local r = referred[mid] + local m = r[2] + if n == m then + return r[1] + elseif n > m then + min = mid + 1 + else + max = mid - 1 + end + if min > max then + break + end + end + -- find first previous + for i=min,1,-1 do + local r = referred[i] + if r and r[2] < n then + return r[1] + end + end + end + -- fallback + return texcount.realpageno +end + +references.referredpage = referredpage + +function references.registerpage(n) -- called in the backend code + if not tobereferred[n] then + if n > maxreferred then + maxreferred = n + end + tobereferred[n] = texcount.realpageno + end +end + +-- todo: delay split till later as in destinations we split anyway + +local orders, lastorder = { }, 0 + +local function setnextorder(kind,name) + lastorder = 0 + if kind and name then + local ok = orders[kind] + if not ok then + ok = { } + orders[kind] = ok + end + lastorder = (ok[name] or 0) + 1 + ok[name] = lastorder + end + texsetcount("global","locationorder",lastorder) +end + +references.setnextorder = setnextorder + +function references.setnextinternal(kind,name) + setnextorder(kind,name) -- always incremented with internal + local n = texcount.locationcount + 1 + texsetcount("global","locationcount",n) + return n +end + +function references.currentorder(kind,name) + return orders[kind] and orders[kind][name] or lastorder +end + +local function setcomponent(data) + -- we might consider doing this at the tex end, just like prefix + local component = productcomponent() + if component then + local references = data and data.references + if references then + references.component = component + end + return component + end + -- but for the moment we do it here (experiment) +end + +commands.setnextinternalreference = references.setnextinternal + +function commands.currentreferenceorder(kind,name) + context(references.currentorder(kind,name)) +end + +references.setcomponent = setcomponent + +function references.set(kind,prefix,tag,data) +-- setcomponent(data) + local pd = tobesaved[prefix] -- nicer is a metatable + if not pd then + pd = { } + tobesaved[prefix] = pd + end + local n = 0 + for ref in gmatch(tag,"[^,]+") do + if ref ~= "" then + if check_duplicates and pd[ref] then + if prefix and prefix ~= "" then + report_references("redundant reference %a in namespace %a",ref,prefix) + else + report_references("redundant reference %a",ref) + end + else + n = n + 1 + pd[ref] = data + context.dofinishsomereference(kind,prefix,ref) + end + end + end + return n > 0 +end + +function references.enhance(prefix,tag) + local l = tobesaved[prefix][tag] + if l then + l.references.realpage = texcount.realpageno + end +end + +commands.enhancereference = references.enhance + +-- -- -- related to strc-ini.lua -- -- -- + +references.resolvers = references.resolvers or { } +local resolvers = references.resolvers + +local function getfromlist(var) + local vi = var.i + if vi then + vi = vi[3] or lists.collected[vi[2]] + if vi then + local r = vi.references and vi.references + if r then + r = r.realpage + end + if not r then + r = vi.pagedata and vi.pagedata + if r then + r = r.realpage + end + end + var.i = vi + var.r = r or 1 + else + var.i = nil + var.r = 1 + end + else + var.i = nil + var.r = 1 + end +end + +-- resolvers.section = getfromlist +-- resolvers.float = getfromlist +-- resolvers.description = getfromlist +-- resolvers.formula = getfromlist +-- resolvers.note = getfromlist + +setmetatableindex(resolvers,function(t,k) + local v = getfromlist + resolvers[k] = v + return v +end) + +function resolvers.reference(var) + local vi = var.i[2] -- check + if vi then + var.i = vi + var.r = (vi.references and vi.references.realpage) or (vi.pagedata and vi.pagedata.realpage) or 1 + else + var.i = nil + var.r = 1 + end +end + +local function register_from_lists(collected,derived,pages,sections) + local g = derived[""] if not g then g = { } derived[""] = g end -- global + for i=1,#collected do + local entry = collected[i] + local m, r = entry.metadata, entry.references + if m and r then + local reference = r.reference or "" + local prefix = r.referenceprefix or "" + local component = r.component and r.component or "" + if reference ~= "" then + local kind, realpage = m.kind, r.realpage + if kind and realpage then + local d = derived[prefix] + if not d then + d = { } + derived[prefix] = d + end + local c = derived[component] + if not c then + c = { } + derived[component] = c + end + local t = { kind, i, entry } + for s in gmatch(reference,"%s*([^,]+)") do + if trace_referencing then + report_references("list entry %a provides %a reference %a on realpage %a",i,kind,s,realpage) + end + c[s] = c[s] or t -- share them + d[s] = d[s] or t -- share them + g[s] = g[s] or t -- first wins + end + end + end + end + end +-- inspect(derived) +end + +references.registerinitializer(function() register_from_lists(lists.collected,derived) end) + +-- urls + +references.urls = references.urls or { } +references.urls.data = references.urls.data or { } + +local urls = references.urls.data + +function references.urls.define(name,url,file,description) + if name and name ~= "" then + urls[name] = { url or "", file or "", description or url or file or ""} + end +end + +local pushcatcodes = context.pushcatcodes +local popcatcodes = context.popcatcodes +local txtcatcodes = catcodes.numbers.txtcatcodes -- or just use "txtcatcodes" + +function references.urls.get(name) + local u = urls[name] + if u then + local url, file = u[1], u[2] + if file and file ~= "" then + return formatters["%s/%s"](url,file) + else + return url + end + end +end + +function commands.geturl(name) + local url = references.urls.get(name) + if url and url ~= "" then + pushcatcodes(txtcatcodes) + context(url) + popcatcodes() + end +end + +-- function commands.gethyphenatedurl(name,...) +-- local url = references.urls.get(name) +-- if url and url ~= "" then +-- hyphenatedurl(url,...) +-- end +-- end + +function commands.doifurldefinedelse(name) + commands.doifelse(urls[name]) +end + +commands.useurl= references.urls.define + +-- files + +references.files = references.files or { } +references.files.data = references.files.data or { } + +local files = references.files.data + +function references.files.define(name,file,description) + if name and name ~= "" then + files[name] = { file or "", description or file or "" } + end +end + +function references.files.get(name,method,space) -- method: none, before, after, both, space: yes/no + local f = files[name] + if f then + context(f[1]) + end +end + +function commands.doiffiledefinedelse(name) + commands.doifelse(files[name]) +end + +commands.usefile= references.files.define + +-- helpers + +function references.checkedfile(whatever) -- return whatever if not resolved + if whatever then + local w = files[whatever] + if w then + return w[1] + else + return whatever + end + end +end + +function references.checkedurl(whatever) -- return whatever if not resolved + if whatever then + local w = urls[whatever] + if w then + local u, f = w[1], w[2] + if f and f ~= "" then + return u .. "/" .. f + else + return u + end + else + return whatever + end + end +end + +function references.checkedfileorurl(whatever,default) -- return nil, nil if not resolved + if whatever then + local w = files[whatever] + if w then + return w[1], nil + else + local w = urls[whatever] + if w then + local u, f = w[1], w[2] + if f and f ~= "" then + return nil, u .. "/" .. f + else + return nil, u + end + end + end + end + return default +end + +-- programs + +references.programs = references.programs or { } +references.programs.data = references.programs.data or { } + +local programs = references.programs.data + +function references.programs.define(name,file,description) + if name and name ~= "" then + programs[name] = { file or "", description or file or ""} + end +end + +function references.programs.get(name) + local f = programs[name] + return f and f[1] +end + +function references.checkedprogram(whatever) -- return whatever if not resolved + if whatever then + local w = programs[whatever] + if w then + return w[1] + else + return whatever + end + end +end + +commands.defineprogram = references.programs.define + +function commands.getprogram(name) + local f = programs[name] + if f then + context(f[1]) + end +end + +-- shared by urls and files + +function references.whatfrom(name) + context((urls[name] and v_url) or (files[name] and v_file) or v_unknown) +end + +function references.from(name) + local u = urls[name] + if u then + local url, file, description = u[1], u[2], u[3] + if description ~= "" then + return description + -- ok + elseif file and file ~= "" then + return url .. "/" .. file + else + return url + end + else + local f = files[name] + if f then + local file, description = f[1], f[2] + if description ~= "" then + return description + else + return file + end + end + end +end + +function commands.from(name) + local u = urls[name] + if u then + local url, file, description = u[1], u[2], u[3] + if description ~= "" then + context.dofromurldescription(description) + -- ok + elseif file and file ~= "" then + context.dofromurlliteral(url .. "/" .. file) + else + context.dofromurlliteral(url) + end + else + local f = files[name] + if f then + local file, description = f[1], f[2] + if description ~= "" then + context.dofromfiledescription(description) + else + context.dofromfileliteral(file) + end + end + end +end + +function references.define(prefix,reference,list) + local d = defined[prefix] if not d then d = { } defined[prefix] = d end + d[reference] = { "defined", list } +end + +function references.reset(prefix,reference) + local d = defined[prefix] + if d then + d[reference] = nil + end +end + +commands.definereference = references.define +commands.resetreference = references.reset + +-- \primaryreferencefoundaction +-- \secondaryreferencefoundaction +-- \referenceunknownaction + +-- t.special t.operation t.arguments t.outer t.inner + +-- to what extend do we check the non prefixed variant + +local strict = false + +local function resolve(prefix,reference,args,set) -- we start with prefix,reference + if reference and reference ~= "" then + if not set then + set = { prefix = prefix, reference = reference } + else + set.reference = set.reference or reference + set.prefix = set.prefix or prefix + end + local r = settings_to_array(reference) + for i=1,#r do + local ri = r[i] + local d + if strict then + d = defined[prefix] or defined[""] + d = d and d[ri] + else + d = defined[prefix] + d = d and d[ri] + if not d then + d = defined[""] + d = d and d[ri] + end + end + if d then + resolve(prefix,d[2],nil,set) + else + local var = splitreference(ri) + if var then + var.reference = ri + local vo, vi = var.outer, var.inner + if not vo and vi then + -- to be checked + if strict then + d = defined[prefix] or defined[""] + d = d and d[vi] + else + d = defined[prefix] + d = d and d[vi] + if not d then + d = defined[""] + d = d and d[vi] + end + end + -- + if d then + resolve(prefix,d[2],var.arguments,set) -- args can be nil + else + if args then var.arguments = args end + set[#set+1] = var + end + else + if args then var.arguments = args end + set[#set+1] = var + end + if var.has_tex then + set.has_tex = true + end + else + -- report_references("funny pattern %a",ri) + end + end + end + return set + else + return { } + end +end + +-- prefix == "" is valid prefix which saves multistep lookup + +references.currentset = nil + +function commands.setreferenceoperation(k,v) + references.currentset[k].operation = v +end + +function commands.setreferencearguments(k,v) + references.currentset[k].arguments = v +end + +local expandreferenceoperation = context.expandreferenceoperation +local expandreferencearguments = context.expandreferencearguments + +function references.expandcurrent() -- todo: two booleans: o_has_tex& a_has_tex + local currentset = references.currentset + if currentset and currentset.has_tex then + for i=1,#currentset do + local ci = currentset[i] + local operation = ci.operation + if operation and find(operation,"\\") then -- if o_has_tex then + expandreferenceoperation(i,operation) + end + local arguments = ci.arguments + if arguments and find(arguments,"\\") then -- if a_has_tex then + expandreferencearguments(i,arguments) + end + end + end +end + +commands.expandcurrentreference = references.expandcurrent -- for the moment the same + +local externals = { } + +-- we have prefixes but also components: +-- +-- : prefix +-- :: always external +-- ::: internal (for products) or external (for components) + +local function loadexternalreferences(name,utilitydata) + local struc = utilitydata.structures + if struc then + local external = struc.references.collected -- direct references + local lists = struc.lists.collected -- indirect references (derived) + local pages = struc.pages.collected -- pagenumber data + -- a bit weird one, as we don't have the externals in the collected + for prefix, set in next, external do + for reference, data in next, set do + if trace_importing then + report_importing("registering %a reference, kind %a, name %a, prefix %a, reference %a", + "external","regular",name,prefix,reference) + end + local section = reference.section + local realpage = reference.realpage + if section then + reference.sectiondata = lists[section] + end + if realpage then + reference.pagedata = pages[realpage] + end + end + end + for i=1,#lists do + local entry = lists[i] + local metadata = entry.metadata + local references = entry.references + if metadata and references then + local reference = references.reference + if reference and reference ~= "" then + local kind = metadata.kind + local realpage = references.realpage + if kind and realpage then + references.pagedata = pages[realpage] + local prefix = references.referenceprefix or "" + local target = external[prefix] + if not target then + target = { } + external[prefix] = target + end + for s in gmatch(reference,"%s*([^,]+)") do + if trace_importing then + report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a", + "external",kind,name,prefix,s) + end + target[s] = target[s] or entry + end + end + end + end + end + externals[name] = external + return external + end +end + +local externalfiles = { } + +table.setmetatableindex(externalfiles, function(t,k) + local v = files[k] + if not v then + v = { k, k } + end + externalfiles[k] = v + return v +end) + +table.setmetatableindex(externals,function(t,k) -- either or not automatically + local filename = externalfiles[k][1] -- filename + local fullname = file.replacesuffix(filename,"tuc") + if lfs.isfile(fullname) then -- todo: use other locator + local utilitydata = job.loadother(fullname) + if utilitydata then + local external = loadexternalreferences(k,utilitydata) + t[k] = external or false + return external + end + end + t[k] = false + return false +end) + +local productdata = allocate { + productreferences = { }, + componentreferences = { }, + components = { }, +} + +references.productdata = productdata + +local function loadproductreferences(productname,componentname,utilitydata) + local struc = utilitydata.structures + if struc then + local productreferences = struc.references.collected -- direct references + local lists = struc.lists.collected -- indirect references (derived) + local pages = struc.pages.collected -- pagenumber data + -- we use indirect tables to save room but as they are eventually + -- just references we resolve them to data here (the mechanisms + -- that use this data check for indirectness) + for prefix, set in next, productreferences do + for reference, data in next, set do + if trace_importing then + report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a", + "product","regular",productname,prefix,reference) + end + local section = reference.section + local realpage = reference.realpage + if section then + reference.sectiondata = lists[section] + end + if realpage then + reference.pagedata = pages[realpage] + end + end + end + -- + local componentreferences = { } + for i=1,#lists do + local entry = lists[i] + local metadata = entry.metadata + local references = entry.references + if metadata and references then + local reference = references.reference + if reference and reference ~= "" then + local kind = metadata.kind + local realpage = references.realpage + if kind and realpage then + references.pagedata = pages[realpage] + local prefix = references.referenceprefix or "" + local component = references.component + local ctarget, ptarget + if not component or component == componentname then + -- skip + else + -- one level up + local external = componentreferences[component] + if not external then + external = { } + componentreferences[component] = external + end + if component == prefix then + prefix = "" + end + ctarget = external[prefix] + if not ctarget then + ctarget = { } + external[prefix] = ctarget + end + end + ptarget = productreferences[prefix] + if not ptarget then + ptarget = { } + productreferences[prefix] = ptarget + end + for s in gmatch(reference,"%s*([^,]+)") do + if ptarget then + if trace_importing then + report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a", + "product",kind,productname,prefix,s) + end + ptarget[s] = ptarget[s] or entry + end + if ctarget then + if trace_importing then + report_importing("registering %s reference, kind %a, name %a, prefix %a, referenc %a", + "component",kind,productname,prefix,s) + end + ctarget[s] = ctarget[s] or entry + end + end + end + end + end + end + productdata.productreferences = productreferences -- not yet used + productdata.componentreferences = componentreferences + end +end + +local function loadproductvariables(product,component,utilitydata) + local struc = utilitydata.structures + if struc then + local lists = struc.lists and struc.lists.collected + if lists then + local pages = struc.pages and struc.pages.collected + for i=1,#lists do + local li = lists[i] + if li.metadata.kind == "section" and li.references.component == component then + local firstsection = li + if firstsection.numberdata then + local numbers = firstsection.numberdata.numbers + if numbers then + if trace_importing then + report_importing("initializing section number to %:t",numbers) + end + productdata.firstsection = firstsection + structures.documents.preset(numbers) + end + end + if pages and firstsection.references then + local firstpage = pages[firstsection.references.realpage] + local number = firstpage and firstpage.number + if number then + if trace_importing then + report_importing("initializing page number to %a",number) + end + productdata.firstpage = firstpage + counters.set("userpage",1,number) + end + end + break + end + end + end + end +end + +local function componentlist(tree,target) + local branches = tree and tree.branches + if branches then + for i=1,#branches do + local branch = branches[i] + local type = branch.type + if type == "component" then + if target then + target[#target+1] = branch.name + else + target = { branch.name } + end + elseif type == "product" or type == "component" then + target = componentlist(branch,target) + end + end + end + return target +end + +local function loadproductcomponents(product,component,utilitydata) + local job = utilitydata.job + productdata.components = componentlist(job and job.structure and job.structure.collected) or { } +end + +references.registerinitializer(function(tobesaved,collected) + -- not that much related to tobesaved or collected + productdata.components = componentlist(job.structure.collected) or { } +end) + +function structures.references.loadpresets(product,component) -- we can consider a special components hash + if product and component and product~= "" and component ~= "" and not productdata.product then -- maybe: productdata.filename ~= filename + productdata.product = product + productdata.component = component + local fullname = file.replacesuffix(product,"tuc") + if lfs.isfile(fullname) then -- todo: use other locator + local utilitydata = job.loadother(fullname) + if utilitydata then + if trace_importing then + report_importing("loading references for component %a of product %a from %a",component,product,fullname) + end + loadproductvariables (product,component,utilitydata) + loadproductreferences(product,component,utilitydata) + loadproductcomponents(product,component,utilitydata) + -- inspect(productdata) + end + end + end +end + +structures.references.productdata = productdata + +local useproduct = commands.useproduct + +if useproduct then + + function commands.useproduct(product) + useproduct(product) + if texconditionals.autocrossfilereferences then + local component = justacomponent() + if component then + if trace_referencing or trace_importing then + report_references("loading presets for component %a of product %a",component,product) + end + structures.references.loadpresets(product,component) + end + end + end + +end + +-- productdata.firstsection.numberdata.numbers +-- productdata.firstpage.number + +local function report_identify_special(set,var,i,type) + local reference = set.reference + local prefix = set.prefix or "" + local special = var.special + local error = var.error + local kind = var.kind + if error then + report_identifying("type %a, reference %a, index %a, prefix %a, special %a, error %a",type,reference,i,prefix,special,error) + else + report_identifying("type %a, reference %a, index %a, prefix %a, special %a, kind %a",type,reference,i,prefix,special,kind) + end +end + +local function report_identify_arguments(set,var,i,type) + local reference = set.reference + local prefix = set.prefix or "" + local arguments = var.arguments + local error = var.error + local kind = var.kind + if error then + report_identifying("type %a, reference %a, index %a, prefix %a, arguments %a, error %a",type,reference,i,prefix,arguments,error) + else + report_identifying("type %a, reference %a, index %a, prefix %a, arguments %a, kind %a",type,reference,i,prefix,arguments,kind) + end +end + +local function report_identify_outer(set,var,i,type) + local reference = set.reference + local prefix = set.prefix or "" + local outer = var.outer + local error = var.error + local kind = var.kind + if outer then + if error then + report_identifying("type %a, reference %a, index %a, prefix %a, outer %a, error %a",type,reference,i,prefix,outer,error) + else + report_identifying("type %a, reference %a, index %a, prefix %a, outer %a, kind %a",type,reference,i,prefix,outer,kind) + end + else + if error then + report_identifying("type %a, reference %a, index %a, prefix %a, error %a",type,reference,i,prefix,error) + else + report_identifying("type %a, reference %a, index %a, prefix %a, kind %a",type,reference,i,prefix,kind) + end + end +end + +local function identify_special(set,var,i) + local special = var.special + local s = specials[special] + if s then + local outer = var.outer + local operation = var.operation + local arguments = var.arguments + if outer then + if operation then + -- special(outer::operation) + var.kind = "special outer with operation" + else + -- special() + var.kind = "special outer" + end + var.f = outer + elseif operation then + if arguments then + -- special(operation{argument,argument}) + var.kind = "special operation with arguments" + else + -- special(operation) + var.kind = "special operation" + end + else + -- special() + var.kind = "special" + end + if trace_identifying then + report_identify_special(set,var,i,"1a") + end + else + var.error = "unknown special" + end + return var +end + +local function identify_arguments(set,var,i) + local s = specials[var.inner] + if s then + -- inner{argument} + var.kind = "special with arguments" + else + var.error = "unknown inner or special" + end + if trace_identifying then + report_identify_arguments(set,var,i,"3a") + end + return var +end + +local function identify_inner(set,var,prefix,collected,derived,tobesaved) + local inner = var.inner + local outer = var.outer + -- inner ... we could move the prefix logic into the parser so that we have 'm for each entry + -- foo:bar -> foo == prefix (first we try the global one) + -- -:bar -> ignore prefix + local p, i = prefix, nil + local splitprefix, splitinner + -- the next test is a safeguard when references are auto loaded from outer + if inner then + splitprefix, splitinner = lpegmatch(prefixsplitter,inner) + end + -- these are taken from other anonymous references + if splitprefix and splitinner then + if splitprefix == "-" then + i = collected[""] + i = i and i[splitinner] + if i then + p = "" + end + else + i = collected[splitprefix] + i = i and i[splitinner] + if i then + p = splitprefix + end + end + end + -- todo: strict here + if not i then + i = collected[prefix] + i = i and i[inner] + if i then + p = prefix + end + end + if not i and prefix ~= "" then + i = collected[""] + i = i and i[inner] + if i then + p = "" + end + end + if i then + var.i = { "reference", i } + resolvers.reference(var) + var.kind = "inner" + var.p = p + elseif derived then + -- these are taken from other data structures (like lists) + if splitprefix and splitinner then + if splitprefix == "-" then + i = derived[""] + i = i and i[splitinner] + if i then + p = "" + end + else + i = derived[splitprefix] + i = i and i[splitinner] + if i then + p = splitprefix + end + end + end + if not i then + i = derived[prefix] + i = i and i[inner] + if i then + p = prefix + end + end + if not i and prefix ~= "" then + i = derived[""] + i = i and i[inner] + if i then + p = "" + end + end + if i then + var.kind = "inner" + var.i = i + var.p = p + local ri = resolvers[i[1]] + if ri then + ri(var) + else + -- can't happen as we catch it with a metatable now + report_references("unknown inner resolver for %a",i[1]) + end + else + -- no prefixes here + local s = specials[inner] + if s then + var.kind = "special" + else + i = (collected and collected[""] and collected[""][inner]) or + (derived and derived [""] and derived [""][inner]) or + (tobesaved and tobesaved[""] and tobesaved[""][inner]) + if i then + var.kind = "inner" + var.i = { "reference", i } + resolvers.reference(var) + var.p = "" + else + var.error = "unknown inner or special" + end + end + end + end + return var +end + +local function identify_outer(set,var,i) + local outer = var.outer + local inner = var.inner + local external = externals[outer] + if external then + local v = copytable(var) + v = identify_inner(set,v,nil,external) + if v.i and not v.error then + v.kind = "outer with inner" + set.external = true + if trace_identifying then + report_identify_outer(set,v,i,"2a") + end + return v + end + v = copytable(var) + local v = identify_inner(set,v,v.outer,external) + if v.i and not v.error then + v.kind = "outer with inner" + set.external = true + if trace_identifying then + report_identify_outer(set,v,i,"2b") + end + return v + end + end + local external = productdata.componentreferences[outer] + if external then + local v = identify_inner(set,copytable(var),nil,external) + if v.i and not v.error then + v.kind = "outer with inner" + set.external = true + if trace_identifying then + report_identify_outer(set,v,i,"2c") + end + return v + end + end + local external = productdata.productreferences[outer] + if external then + local vi = external[inner] + if vi then + var.kind = "outer with inner" + var.i = vi + set.external = true + if trace_identifying then + report_identify_outer(set,var,i,"2d") + end + return var + end + end + -- the rest + local special = var.special + local arguments = var.arguments + local operation = var.operation + if inner then + if arguments then + -- outer::inner{argument} + var.kind = "outer with inner with arguments" + else + -- outer::inner + var.kind = "outer with inner" + end + var.i = { "reference", inner } + resolvers.reference(var) + var.f = outer + if trace_identifying then + report_identify_outer(set,var,i,"2e") + end + elseif special then + local s = specials[special] + if s then + if operation then + if arguments then + -- outer::special(operation{argument,argument}) + var.kind = "outer with special and operation and arguments" + else + -- outer::special(operation) + var.kind = "outer with special and operation" + end + else + -- outer::special() + var.kind = "outer with special" + end + var.f = outer + else + var.error = "unknown outer with special" + end + if trace_identifying then + report_identify_outer(set,var,i,"2f") + end + else + -- outer:: + var.kind = "outer" + var.f = outer + if trace_identifying then + report_identify_outer(set,var,i,"2g") + end + end + return var +end + +local function identify_inner_or_outer(set,var,i) + -- here we fall back on product data + local inner = var.inner + if inner and inner ~= "" then + local v = identify_inner(set,copytable(var),set.prefix,collected,derived,tobesaved) + if v.i and not v.error then + v.kind = "inner" -- check this + if trace_identifying then + report_identify_outer(set,v,i,"4a") + end + return v + end + +local components = job.structure.components + +if components then + for i=1,#components do + local component = components[i] + local data = collected[component] + local vi = data and data[inner] + if vi then + var.outer = component + var.i = vi + var.kind = "outer with inner" + set.external = true + if trace_identifying then + report_identify_outer(set,var,i,"4x") + end + return var + end + end +end + + local componentreferences = productdata.componentreferences + local productreferences = productdata.productreferences + local components = productdata.components + if components and componentreferences then + -- for component, data in next, productdata.componentreferences do -- better do this in order of processing: + for i=1,#components do + local component = components[i] + local data = componentreferences[component] + if data then + local d = data[""] + local vi = d and d[inner] + if vi then + var.outer = component + var.i = vi + var.kind = "outer with inner" + set.external = true + if trace_identifying then + report_identify_outer(set,var,i,"4b") + end + return var + end + end + end + end + local component, inner = lpegmatch(componentsplitter,inner) + if component then + local data = componentreferences and componentreferences[component] + if data then + local d = data[""] + local vi = d and d[inner] + if vi then + var.inner = inner + var.outer = component + var.i = vi + var.kind = "outer with inner" + set.external = true + if trace_identifying then + report_identify_outer(set,var,i,"4c") + end + return var + end + end + local data = productreferences and productreferences[component] + if data then + local vi = data[inner] + if vi then + var.inner = inner + var.outer = component + var.i = vi + var.kind = "outer with inner" + set.external = true + if trace_identifying then + report_identify_outer(set,var,i,"4d") + end + return var + end + end + end + var.error = "unknown inner" + else + var.error = "no inner" + end + if trace_identifying then + report_identify_outer(set,var,i,"4e") + end + return var +end + +-- local function identify_inner_or_outer(set,var,i) +-- -- we might consider first checking with a prefix prepended and then without +-- -- which is better for fig:oeps +-- local var = do_identify_inner_or_outer(set,var,i) +-- if var.error then +-- local prefix = set.prefix +-- if prefix and prefix ~= "" then +-- var.inner = prefix .. ':' .. var.inner +-- var.error = nil +-- return do_identify_inner_or_outer(set,var,i) +-- end +-- end +-- return var +-- end + +local function identify_inner_component(set,var,i) + -- we're in a product (maybe ignore when same as component) + local component = var.component + identify_inner(set,var,component,collected,derived,tobesaved) + if trace_identifying then + report_identify_outer(set,var,i,"5a") + end + return var +end + +local function identify_outer_component(set,var,i) + local component = var.component + local inner = var.inner + local data = productdata.componentreferences[component] + if data then + local d = data[""] + local vi = d and d[inner] + if vi then + var.inner = inner + var.outer = component + var.i = vi + var.kind = "outer with inner" + set.external = true + if trace_identifying then + report_identify_outer(set,var,i,"6a") + end + return var + end + end + local data = productdata.productreferences[component] + if data then + local vi = data[inner] + if vi then + var.inner = inner + var.outer = component + var.i = vi + var.kind = "outer with inner" + set.external = true + if trace_identifying then + report_identify_outer(set,var,i,"6b") + end + return var + end + end + var.error = "unknown component" + if trace_identifying then + report_identify_outer(set,var,i,"6c") + end + return var +end + +local nofidentified = 0 + +local function identify(prefix,reference) + if not reference then + prefix = "" + reference = prefix + end + local set = resolve(prefix,reference) + local bug = false + texcount.referencehastexstate = set.has_tex and 1 or 0 + nofidentified = nofidentified + 1 + set.n = nofidentified + for i=1,#set do + local var = set[i] + if var.special then + var = identify_special(set,var,i) + elseif var.outer then + var = identify_outer(set,var,i) + elseif var.arguments then + var = identify_arguments(set,var,i) + elseif not var.component then + var = identify_inner_or_outer(set,var,i) + elseif productcomponent() then + var = identify_inner_component(set,var,i) + else + var = identify_outer_component(set,var,i) + end + set[i] = var + bug = bug or var.error + end + references.currentset = mark(set) -- mark, else in api doc + if trace_analyzing then + report_references(table.serialize(set,reference)) + end + return set, bug +end + +references.identify = identify + +local unknowns, nofunknowns, f_valid = { }, 0, formatters["[%s][%s]"] + +function references.valid(prefix,reference,highlight,newwindow,layer) + local set, bug = identify(prefix,reference) + local unknown = bug or #set == 0 + if unknown then + currentreference = nil -- will go away + local str = f_valid(prefix,reference) + local u = unknowns[str] + if not u then + interfaces.showmessage("references",1,str) -- 1 = unknown, 4 = illegal + unknowns[str] = 1 + nofunknowns = nofunknowns + 1 + else + unknowns[str] = u + 1 + end + else + set.highlight, set.newwindow, set.layer = highlight, newwindow, layer + currentreference = set[1] + end + -- we can do the expansion here which saves a call + return not unknown +end + +function commands.doifelsereference(prefix,reference,highlight,newwindow,layer) + commands.doifelse(references.valid(prefix,reference,highlight,newwindow,layer)) +end + +function references.reportproblems() -- might become local + if nofunknowns > 0 then + statistics.register("cross referencing", function() + return format("%s identified, %s unknown",nofidentified,nofunknowns) + end) + logspushtarget("logfile") + logsnewline() + report_references("start problematic references") + logsnewline() + for k, v in table.sortedpairs(unknowns) do + report_unknown("%4i: %s",v,k) + end + logsnewline() + report_references("stop problematic references") + logsnewline() + logspoptarget() + end +end + +luatex.registerstopactions(references.reportproblems) + +local innermethod = "names" + +function references.setinnermethod(m) + if m then + if m == "page" or m == "mixed" or m == "names" then + innermethod = m + elseif m == true or m == v_yes then + innermethod = "page" + end + end + function references.setinnermethod() + report_references("inner method is already set and frozen to %a",innermethod) + end +end + +function references.getinnermethod() + return innermethod or "names" +end + +directives.register("references.linkmethod", function(v) -- page mixed names + references.setinnermethod(v) +end) + +-- this is inconsistent + +function references.setinternalreference(prefix,tag,internal,view) -- needs checking + if innermethod == "page" then + return unsetvalue + else + local t, tn = { }, 0 -- maybe add to current + if tag then + if prefix and prefix ~= "" then + prefix = prefix .. ":" -- watch out, : here + for ref in gmatch(tag,"[^,]+") do + tn = tn + 1 + t[tn] = prefix .. ref + end + else + for ref in gmatch(tag,"[^,]+") do + tn = tn + 1 + t[tn] = ref + end + end + end + if internal and innermethod == "names" then -- mixed or page + tn = tn + 1 + t[tn] = "aut:" .. internal + end + local destination = references.mark(t,nil,nil,view) -- returns an attribute + texcount.lastdestinationattribute = destination + return destination + end +end + +function references.setandgetattribute(kind,prefix,tag,data,view) -- maybe do internal automatically here + local attr = references.set(kind,prefix,tag,data) and references.setinternalreference(prefix,tag,nil,view) or unsetvalue + texcount.lastdestinationattribute = attr + return attr +end + +commands.setreferenceattribute = references.setandgetattribute + +function references.getinternalreference(n) -- n points into list (todo: registers) + local l = lists.collected[n] + return l and l.references.internal or n +end + +function commands.setinternalreference(prefix,tag,internal,view) -- needs checking + context(references.setinternalreference(prefix,tag,internal,view)) +end + +function commands.getinternalreference(n) -- this will also be a texcount + local l = lists.collected[n] + context(l and l.references.internal or n) +end + +-- + +function references.getcurrentmetadata(tag) + local data = currentreference and currentreference.i + return data and data.metadata and data.metadata[tag] +end + +function commands.getcurrentreferencemetadata(tag) + local data = references.getcurrentmetadata(tag) + if data then + context(data) + end +end + +local function currentmetadata(tag) + local data = currentreference and currentreference.i + return data and data.metadata and data.metadata[tag] +end + +references.currentmetadata = currentmetadata + +local function getcurrentprefixspec(default) + -- todo: message + return currentmetadata("kind") or "?", currentmetadata("name") or "?", default or "?" +end + +references.getcurrentprefixspec = getcurrentprefixspec + +function commands.getcurrentprefixspec(default) + context.getreferencestructureprefix(getcurrentprefixspec(default)) +end + +function references.filter(name,...) -- number page title ... + local data = currentreference and currentreference.i -- maybe we should take realpage from here + if data then + if name == "realpage" then + local cs = references.analyze() -- normally already analyzed but also sets state + context(tonumber(cs.realpage) or 0) -- todo, return and in command namespace + else -- assumes data is table + local kind = type(data) == "table" and data.metadata and data.metadata.kind + if kind then + local filter = filters[kind] or filters.generic + filter = filter and (filter[name] or filter.unknown or filters.generic[name] or filters.generic.unknown) + if filter then + if trace_referencing then + report_references("name %a, kind %a, using dedicated filter",name,kind) + end + filter(data,name,...) + elseif trace_referencing then + report_references("name %a, kind %a, using generic filter",name,kind) + end + elseif trace_referencing then + report_references("name %a, unknown kind",name) + end + end + elseif name == "realpage" then + context(0) + elseif trace_referencing then + report_references("name %a, no reference",name) + end +end + +function references.filterdefault() + return references.filter("default",getcurrentprefixspec(v_default)) +end + +function commands.currentreferencedefault(tag) + if not tag then tag = "default" end + references.filter(tag,context.delayed(getcurrentprefixspec(tag))) +end + +filters.generic = { } + +function filters.generic.title(data) + if data then + local titledata = data.titledata or data.useddata + if titledata then + helpers.title(titledata.title or "?",data.metadata) + end + end +end + +function filters.generic.text(data) + if data then + local entries = data.entries or data.useddata + if entries then + helpers.title(entries.text or "?",data.metadata) + end + end +end + +function filters.generic.number(data,what,prefixspec) -- todo: spec and then no stopper + if data then + numberdata = lists.reordered(data) -- data.numberdata + if numberdata then + helpers.prefix(data,prefixspec) + sections.typesetnumber(numberdata,"number",numberdata) + else + local useddata = data.useddata + if useddata and useddsta.number then + context(useddata.number) + end + end + end +end + +filters.generic.default = filters.generic.text + +function filters.generic.page(data,prefixspec,pagespec) + local pagedata = data.pagedata + if pagedata then + local number, conversion = pagedata.number, pagedata.conversion + if not number then + -- error + elseif conversion then + context.convertnumber(conversion,number) + else + context(number) + end + else + helpers.prefixpage(data,prefixspec,pagespec) + end +end + +filters.user = { } + +function filters.user.unknown(data,name) + if data then + local userdata = data.userdata + local userkind = userdata and userdata.kind + if userkind then + local filter = filters[userkind] or filters.generic + filter = filter and (filter[name] or filter.unknown) + if filter then + filter(data,name) + return + end + end + local namedata = userdata and userdata[name] + if namedata then + context(namedata) + end + end +end + +filters.text = { } + +function filters.text.title(data) + helpers.title(data.entries.text or "?",data.metadata) +end + +-- no longer considered useful: +-- +-- function filters.text.number(data) +-- helpers.title(data.entries.text or "?",data.metadata) +-- end + +function filters.text.page(data,prefixspec,pagespec) + helpers.prefixpage(data,prefixspec,pagespec) +end + +filters.full = { } + +filters.full.title = filters.text.title +filters.full.page = filters.text.page + +filters.section = { } + +function filters.section.number(data,what,prefixspec) + if data then + local numberdata = data.numberdata + if not numberdata then + local useddata = data.useddata + if useddata and useddata.number then + context(useddata.number) + end + elseif numberdata.hidenumber then + local references = data.references + if trace_empty then + report_empty("reference %a has a hidden number",references.reference) + context.emptyreference() -- maybe an option + end + else + sections.typesetnumber(numberdata,"number",prefixspec,numberdata) + end + end +end + +filters.section.title = filters.generic.title +filters.section.page = filters.generic.page +filters.section.default = filters.section.number + +-- filters.note = { default = filters.generic.number } +-- filters.formula = { default = filters.generic.number } +-- filters.float = { default = filters.generic.number } +-- filters.description = { default = filters.generic.number } +-- filters.item = { default = filters.generic.number } + +setmetatableindex(filters, function(t,k) -- beware, test with rawget + local v = { default = filters.generic.number } -- not copy as it might be extended differently + t[k] = v + return v +end) + +-- function references.sectiontitle(n) +-- helpers.sectiontitle(lists.collected[tonumber(n) or 0]) +-- end + +-- function references.sectionnumber(n) +-- helpers.sectionnumber(lists.collected[tonumber(n) or 0]) +-- end + +-- function references.sectionpage(n,prefixspec,pagespec) +-- helpers.prefixedpage(lists.collected[tonumber(n) or 0],prefixspec,pagespec) +-- end + +-- analyze + +references.testrunners = references.testrunners or { } +references.testspecials = references.testspecials or { } + +local runners = references.testrunners +local specials = references.testspecials + +-- We need to prevent ending up in the 'relative location' analyzer as it is +-- pretty slow (progressively). In the pagebody one can best check the reference +-- real page to determine if we need contrastlocation as that is more lightweight. + +local function checkedpagestate(n,page) + local r, p = referredpage(n), tonumber(page) + if not p then + return 0 + elseif p > r then + return 3 -- after + elseif p < r then + return 2 -- before + else + return 1 -- same + end +end + +local function setreferencerealpage(actions) + actions = actions or references.currentset + if not actions then + return 0 + else + local realpage = actions.realpage + if realpage then + return realpage + end + local nofactions = #actions + if nofactions > 0 then + for i=1,nofactions do + local a = actions[i] + local what = runners[a.kind] + if what then + what = what(a,actions) -- needs documentation + end + end + realpage = actions.realpage + if realpage then + return realpage + end + end + actions.realpage = 0 + return 0 + end +end + +-- we store some analysis data alongside the indexed array +-- at this moment only the real reference page is analyzed +-- normally such an analysis happens in the backend code + +function references.analyze(actions) + actions = actions or references.currentset + if not actions then + actions = { realpage = 0, pagestate = 0 } + elseif actions.pagestate then + -- already done + else + local realpage = actions.realpage or setreferencerealpage(actions) + if realpage == 0 then + actions.pagestate = 0 + elseif actions.external then + actions.pagestate = 0 + else + actions.pagestate = checkedpagestate(actions.n,realpage) + end + end + return actions +end + +function commands.referencepagestate(actions) + actions = actions or references.currentset + if not actions then + context(0) + else + if not actions.pagestate then + references.analyze(actions) -- delayed unless explicitly asked for + end + context(actions.pagestate) + end +end + +function commands.referencerealpage(actions) + actions = actions or references.currentset + context(not actions and 0 or actions.realpage or setreferencerealpage(actions)) +end + +local plist, nofrealpages + +local function realpageofpage(p) -- the last one counts ! + if not plist then + local pages = structures.pages.collected + nofrealpages = #pages + plist = { } + for rp=1,nofrealpages do + plist[pages[rp].number] = rp + end + references.nofrealpages = nofrealpages + end + return plist[p] +end + +references.realpageofpage = realpageofpage + +function references.checkedrealpage(r) + if not plist then + realpageofpage(r) -- just initialize + end + if not r then + return texcount.realpageno + elseif r < 1 then + return 1 + elseif r > nofrealpages then + return nofrealpages + else + return r + end +end + +-- use local ? + +local pages = allocate { + [variables.firstpage] = function() return counters.record("realpage")["first"] end, + [variables.previouspage] = function() return counters.record("realpage")["previous"] end, + [variables.nextpage] = function() return counters.record("realpage")["next"] end, + [variables.lastpage] = function() return counters.record("realpage")["last"] end, + + [variables.firstsubpage] = function() return counters.record("subpage" )["first"] end, + [variables.previoussubpage] = function() return counters.record("subpage" )["previous"] end, + [variables.nextsubpage] = function() return counters.record("subpage" )["next"] end, + [variables.lastsubpage] = function() return counters.record("subpage" )["last"] end, + + [variables.forward] = function() return counters.record("realpage")["forward"] end, + [variables.backward] = function() return counters.record("realpage")["backward"] end, +} + +references.pages = pages + +-- maybe some day i will merge this in the backend code with a testmode (so each +-- runner then implements a branch) + +runners["inner"] = function(var,actions) + local r = var.r + if r then + actions.realpage = r + end +end + +runners["special"] = function(var,actions) + local handler = specials[var.special] + return handler and handler(var,actions) +end + +runners["special operation"] = runners["special"] +runners["special operation with arguments"] = runners["special"] + +-- These are the testspecials not the real ones. They are used to +-- check the validity. + +function specials.internal(var,actions) + local v = references.internals[tonumber(var.operation)] + local r = v and v.references.realpage + if r then + actions.realpage = r + end +end + +specials.i = specials.internal + +function specials.page(var,actions) + local o = var.operation + local p = pages[o] + if type(p) == "function" then + p = p() + else + p = tonumber(realpageofpage(tonumber(o))) + end + if p then + var.r = p + actions.realpage = actions.realpage or p -- first wins + end +end + +function specials.realpage(var,actions) + local p = tonumber(var.operation) + if p then + var.r = p + actions.realpage = actions.realpage or p -- first wins + end +end + +function specials.userpage(var,actions) + local p = tonumber(realpageofpage(var.operation)) + if p then + var.r = p + actions.realpage = actions.realpage or p -- first wins + end +end + +function specials.deltapage(var,actions) + local p = tonumber(var.operation) + if p then + p = references.checkedrealpage(p + texcount.realpageno) + var.r = p + actions.realpage = actions.realpage or p -- first wins + end +end + +function specials.section(var,actions) + local sectionname = var.arguments + local destination = var.operation + local internal = structures.sections.internalreference(sectionname,destination) + if internal then + var.special = "internal" + var.operation = internal + var.arguments = nil + specials.internal(var,actions) + end +end + +-- needs a better split ^^^ + +commands.filterreference = references.filter +commands.filterdefaultreference = references.filterdefault + +-- done differently now: + +function references.export(usedname) end +function references.import(usedname) end +function references.load (usedname) end + +commands.exportreferences = references.export diff --git a/tex/context/base/strc-reg.lua b/tex/context/base/strc-reg.lua index 40cd3455b..61d18a5d5 100644 --- a/tex/context/base/strc-reg.lua +++ b/tex/context/base/strc-reg.lua @@ -1,862 +1,862 @@ -if not modules then modules = { } end modules ['strc-reg'] = { - version = 1.001, - comment = "companion to strc-reg.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local next, type = next, type -local texcount = tex.count -local format, gmatch = string.format, string.gmatch -local equal, concat, remove = table.are_equal, table.concat, table.remove -local utfchar = utf.char -local lpegmatch = lpeg.match -local allocate = utilities.storage.allocate - -local trace_registers = false trackers.register("structures.registers", function(v) trace_registers = v end) - -local report_registers = logs.reporter("structure","registers") - -local structures = structures -local registers = structures.registers -local helpers = structures.helpers -local sections = structures.sections -local documents = structures.documents -local pages = structures.pages -local references = structures.references - -local mappings = sorters.mappings -local entries = sorters.entries -local replacements = sorters.replacements - -local processors = typesetters.processors -local splitprocessor = processors.split - -local variables = interfaces.variables -local context = context - -local matchingtilldepth, numberatdepth = sections.matchingtilldepth, sections.numberatdepth - --- some day we will share registers and lists (although there are some conceptual --- differences in the application of keywords) - -local function filtercollected(names,criterium,number,collected,prevmode) - if not criterium or criterium == "" then criterium = variables.all end - local data = documents.data - local numbers, depth = data.numbers, data.depth - local hash, result, nofresult, all, detail = { }, { }, 0, not names or names == "" or names == variables.all, nil - if not all then - for s in gmatch(names,"[^, ]+") do - hash[s] = true - end - end - if criterium == variables.all or criterium == variables.text then - for i=1,#collected do - local v = collected[i] - if all then - nofresult = nofresult + 1 - result[nofresult] = v - else - local vmn = v.metadata and v.metadata.name - if hash[vmn] then - nofresult = nofresult + 1 - result[nofresult] = v - end - end - end - elseif criterium == variables.current then - for i=1,#collected do - local v = collected[i] - local sectionnumber = sections.collected[v.references.section] - if sectionnumber then - local cnumbers = sectionnumber.numbers - if prevmode then - if (all or hash[v.metadata.name]) and #cnumbers >= depth then -- is the = ok for lists as well? - local ok = true - for d=1,depth do - if not (cnumbers[d] == numbers[d]) then -- no zero test - ok = false - break - end - end - if ok then - nofresult = nofresult + 1 - result[nofresult] = v - end - end - else - if (all or hash[v.metadata.name]) and #cnumbers > depth then - local ok = true - for d=1,depth do - local cnd = cnumbers[d] - if not (cnd == 0 or cnd == numbers[d]) then - ok = false - break - end - end - if ok then - nofresult = nofresult + 1 - result[nofresult] = v - end - end - end - end - end - elseif criterium == variables.previous then - for i=1,#collected do - local v = collected[i] - local sectionnumber = sections.collected[v.references.section] - if sectionnumber then - local cnumbers = sectionnumber.numbers - if (all or hash[v.metadata.name]) and #cnumbers >= depth then - local ok = true - if prevmode then - for d=1,depth do - if not (cnumbers[d] == numbers[d]) then - ok = false - break - end - end - else - for d=1,depth do - local cnd = cnumbers[d] - if not (cnd == 0 or cnd == numbers[d]) then - ok = false - break - end - end - end - if ok then - nofresult = nofresult + 1 - result[nofresult] = v - end - end - end - end - elseif criterium == variables["local"] then - if sections.autodepth(data.numbers) == 0 then - return filtercollected(names,variables.all,number,collected,prevmode) - else - return filtercollected(names,variables.current,number,collected,prevmode) - end - else -- sectionname, number - -- beware, this works ok for registers - local depth = sections.getlevel(criterium) - local number = tonumber(number) or numberatdepth(depth) or 0 - if trace_registers then - detail = format("depth: %s, number: %s, numbers: %s, startset: %s",depth,number,concat(sections.numbers(),".",1,depth),#collected) - end - if number > 0 then - for i=1,#collected do - local v = collected[i] - local r = v.references - if r then - local sectionnumber = sections.collected[r.section] - if sectionnumber then - local metadata = v.metadata - local cnumbers = sectionnumber.numbers - if cnumbers then - if (all or hash[metadata.name or false]) and #cnumbers >= depth and matchingtilldepth(depth,cnumbers) then - nofresult = nofresult + 1 - result[nofresult] = v - end - end - end - end - end - end - end - if trace_registers then - if detail then - report_registers("criterium %a, detail %a, found %a",criterium,detail,#result) - else - report_registers("criterium %a, detail %a, found %a",criterium,nil,#result) - end - end - return result -end - -local tobesaved = allocate() -local collected = allocate() - -registers.collected = collected -registers.tobesaved = tobesaved -registers.filtercollected = filtercollected - --- we follow a different strategy than by lists, where we have a global --- result table; we might do that here as well but since sorting code is --- older we delay that decision - -local function initializer() - tobesaved = registers.tobesaved - collected = registers.collected - local internals = references.internals - for name, list in next, collected do - local entries = list.entries - for e=1,#entries do - local entry = entries[e] - local r = entry.references - if r then - local internal = r and r.internal - if internal then - internals[internal] = entry - end - end - end - end -end - -job.register('structures.registers.collected', tobesaved, initializer) - -local function allocate(class) - local d = tobesaved[class] - if not d then - d = { - metadata = { - language = 'en', - sorted = false, - class = class - }, - entries = { }, - } - tobesaved[class] = d - end - return d -end - -registers.define = allocate - -local entrysplitter = lpeg.tsplitat('+') -- & obsolete in mkiv - -local tagged = { } - -local function preprocessentries(rawdata) - local entries = rawdata.entries - if entries then ---~ table.print(rawdata) - local e, k = entries[1] or "", entries[2] or "" - local et, kt, entryproc, pageproc - if type(e) == "table" then - et = e - else - entryproc, e = splitprocessor(e) - et = lpegmatch(entrysplitter,e) - end - if type(k) == "table" then - kt = k - else - pageproc, k = splitprocessor(k) - kt = lpegmatch(entrysplitter,k) - end - entries = { } - for k=1,#et do - entries[k] = { et[k] or "", kt[k] or "" } - end - for k=#et,1,-1 do - if entries[k][1] ~= "" then - break - else - entries[k] = nil - end - end - rawdata.list = entries - if pageproc or entryproc then - rawdata.processors = { entryproc, pageproc } - end - rawdata.entries = nil - end - local seeword = rawdata.seeword - if seeword then - seeword.processor, seeword.text = splitprocessor(seeword.text or "") - end -end - -function registers.store(rawdata) -- metadata, references, entries - local data = allocate(rawdata.metadata.name).entries - local references = rawdata.references - references.realpage = references.realpage or 0 -- just to be sure as it can be refered to - preprocessentries(rawdata) - data[#data+1] = rawdata - local label = references.label - if label and label ~= "" then tagged[label] = #data end - context(#data) -end - -function registers.enhance(name,n) - local r = tobesaved[name].entries[n] - if r then - r.references.realpage = texcount.realpageno - end -end - -function registers.extend(name,tag,rawdata) -- maybe do lastsection internally - if type(tag) == "string" then - tag = tagged[tag] - end - if tag then - local r = tobesaved[name].entries[tag] - if r then - local rr = r.references - rr.lastrealpage = texcount.realpageno - rr.lastsection = sections.currentid() - if rawdata then - if rawdata.entries then - preprocessentries(rawdata) - end - for k,v in next, rawdata do - if not r[k] then - r[k] = v - else - local rk = r[k] - for kk,vv in next, v do - if type(vv) == "table" then - if next(vv) then - rk[kk] = vv - end - elseif vv ~= "" then - rk[kk] = vv - end - end - end - end - end - end - end -end - --- sorting and rendering - -local compare = sorters.comparers.basic - -function registers.compare(a,b) - local result = compare(a,b) - if result ~= 0 then - return result - else - local ka, kb = a.metadata.kind, b.metadata.kind - if ka == kb then - local page_a, page_b = a.references.realpage, b.references.realpage - if not page_a or not page_b then - return 0 - elseif page_a < page_b then - return -1 - elseif page_a > page_b then - return 1 - end - elseif ka == "see" then - return 1 - elseif kb == "see" then - return -1 - end - end - return 0 -end - -function registers.filter(data,options) - data.result = registers.filtercollected(nil,options.criterium,options.number,data.entries,true) -end - -local seeindex = 0 - --- meerdere loops, seewords, dan words, an seewords - -local function crosslinkseewords(result) -- all words - -- collect all seewords - local seewords = { } - for i=1,#result do - local data = result[i] - local seeword = data.seeword - if seeword then - local seetext = seeword.text - if seetext and not seewords[seetext] then - seeindex = seeindex + 1 - seewords[seetext] = seeindex - if trace_registers then - report_registers("see word %03i: %s",seeindex,seetext) - end - end - end - end - -- mark seeparents - local seeparents = { } - for i=1,#result do - local data = result[i] - local word = data.list[1] - word = word and word[1] - if word then - local seeindex = seewords[word] - if seeindex then - seeparents[word] = data - data.references.seeparent = seeindex - if trace_registers then - report_registers("see parent %03i: %s",seeindex,word) - end - end - end - end - -- mark seewords and extend sort list - for i=1,#result do - local data = result[i] - local seeword = data.seeword - if seeword then - local text = seeword.text - if text then - local seeparent = seeparents[text] - if seeparent then - local seeindex = seewords[text] - local s, ns, d, w, l = { }, 0, data.split, seeparent.split, data.list - -- trick: we influence sorting by adding fake subentries - for i=1,#d do - ns = ns + 1 - s[ns] = d[i] -- parent - end - for i=1,#w do - ns = ns + 1 - s[ns] = w[i] -- see - end - data.split = s - -- we also register a fake extra list entry so that the - -- collapser works okay - l[#l+1] = { text, "" } - data.references.seeindex = seeindex - if trace_registers then - report_registers("see crosslink %03i: %s",seeindex,text) - end - end - end - end - end -end - -local function removeemptyentries(result) - local i, n, m = 1, #result, 0 - while i <= n do - local entry = result[i] - if #entry.list == 0 or #entry.split == 0 then - remove(result,i) - n = n - 1 - m = m + 1 - else - i = i + 1 - end - end - if m > 0 then - report_registers("%s empty entries removed in register",m) - end -end - -function registers.prepare(data) - -- data has 'list' table - local strip = sorters.strip - local splitter = sorters.splitters.utf - local result = data.result - if result then - for i=1, #result do - local entry, split = result[i], { } - local list = entry.list - if list then - for l=1,#list do - local ll = list[l] - local word, key = ll[1], ll[2] - if not key or key == "" then - key = word - end - split[l] = splitter(strip(key)) - end - end - entry.split = split - end - removeemptyentries(result) - crosslinkseewords(result) - end -end - -function registers.sort(data,options) - sorters.sort(data.result,registers.compare) -end - -function registers.unique(data,options) - local result, nofresult, prev = { }, 0, nil - local dataresult = data.result - for k=1,#dataresult do - local v = dataresult[k] - if prev then - local pr, vr = prev.references, v.references - if not equal(prev.list,v.list) then - -- ok - elseif pr.realpage ~= vr.realpage then - -- ok - else - local pl, vl = pr.lastrealpage, vr.lastrealpage - if pl or vl then - if not vl then - -- ok - elseif not pl then - -- ok - elseif pl ~= vl then - -- ok - else - v = nil - end - else - v = nil - end - end - end - if v then - nofresult = nofresult + 1 - result[nofresult] = v - prev = v - end - end - data.result = result -end - -function registers.finalize(data,options) -- maps character to index (order) - local result = data.result - data.metadata.nofsorted = #result - local split, nofsplit, lasttag, done, nofdone = { }, 0, nil, nil, 0 - local firstofsplit = sorters.firstofsplit - for k=1,#result do - local v = result[k] - local entry, tag = firstofsplit(v) - if tag ~= lasttag then - if trace_registers then - report_registers("splitting at %a",tag) - end - done, nofdone = { }, 0 - nofsplit = nofsplit + 1 - split[nofsplit] = { tag = tag, data = done } - lasttag = tag - end - nofdone = nofdone + 1 - done[nofdone] = v - end - data.result = split -end - -function registers.analyzed(class,options) - local data = collected[class] - if data and data.entries then - options = options or { } - sorters.setlanguage(options.language,options.method,options.numberorder) - registers.filter(data,options) -- filter entries into results (criteria) - registers.prepare(data,options) -- adds split table parallel to list table - registers.sort(data,options) -- sorts results - registers.unique(data,options) -- get rid of duplicates - registers.finalize(data,options) -- split result in ranges - data.metadata.sorted = true - return data.metadata.nofsorted or 0 - else - return 0 - end -end - --- todo take conversion from index - -function registers.userdata(index,name) - local data = references.internals[tonumber(index)] - data = data and data.userdata and data.userdata[name] - if data then - context(data) - end -end - --- todo: ownnumber - -local function pagerange(f_entry,t_entry,is_last,prefixspec,pagespec) - local fer, ter = f_entry.references, t_entry.references - context.registerpagerange( - f_entry.processors and f_entry.processors[2] or "", - fer.internal or 0, - fer.realpage or 0, - function() - helpers.prefixpage(f_entry,prefixspec,pagespec) - end, - ter.internal or 0, - ter.lastrealpage or ter.realpage or 0, - function() - if is_last then - helpers.prefixlastpage(t_entry,prefixspec,pagespec) -- swaps page and realpage keys - else - helpers.prefixpage (t_entry,prefixspec,pagespec) - end - end - ) -end - -local function pagenumber(entry,prefixspec,pagespec) - local er = entry.references - context.registeronepage( - entry.processors and entry.processors[2] or "", - er.internal or 0, - er.realpage or 0, - function() helpers.prefixpage(entry,prefixspec,pagespec) end - ) -end - -local function collapsedpage(pages) - for i=2,#pages do - local first, second = pages[i-1], pages[i] - local first_first, first_last, second_first, second_last = first[1], first[2], second[1], second[2] - local first_last_pn = first_last .references.realpage - local second_first_pn = second_first.references.realpage - local second_last_pn = second_last .references.realpage - local first_last_last = first_last .references.lastrealpage - local second_first_last = second_first.references.lastrealpage - if first_last_last then - first_last_pn = first_last_last - if second_first == second_last and second_first_pn <= first_last_pn then - -- 2=8, 5 -> 12=8 - remove(pages,i) - return true - elseif second_first == second_last and second_first_pn > first_last_pn then - -- 2=8, 9 -> 2-9 - pages[i-1] = { first_first, second_last } - remove(pages,i) - return true - elseif second_last_pn < first_last_pn then - -- 2=8, 3-4 -> 2=8 - remove(pages,i) - return true - elseif first_last_pn < second_last_pn then - -- 2=8, 3-9 -> 2-9 - pages[i-1] = { first_first, second_last } - remove(pages,i) - return true - elseif first_last_pn + 1 == second_first_pn and second_last_pn > first_last_pn then - -- 2=8, 9-11 -> 2-11 - pages[i-1] = { first_first, second_last } - remove(pages,i) - return true - elseif second_first.references.lastrealpage then - -- 2=8, 9=11 -> 2-11 - pages[i-1] = { first_first, second_last } - remove(pages,i) - return true - end - elseif second_first_last then - second_first_pn = second_first_last - if first_last_pn == second_first_pn then - -- 2-4, 5=9 -> 2-9 - pages[i-1] = { first_first, second_last } - remove(pages,i) - return true - end - elseif first_last_pn == second_first_pn then - -- 2-3, 3-4 -> 2-4 - pages[i-1] = { first_last, second_last } - remove(pages,i) - return true - end - end - return false -end - -local function collapsepages(pages) - while collapsedpage(pages) do end - return #pages -end - -function registers.flush(data,options,prefixspec,pagespec) - local collapse_singles = options.compress == variables.yes - local collapse_ranges = options.compress == variables.all - local result = data.result - context.startregisteroutput() - for i=1,#result do - -- ranges need checking ! - local sublist = result[i] - local done = { false, false, false, false } - local data = sublist.data - local d, n = 0, 0 - context.startregistersection(sublist.tag) - for d=1,#data do - local entry = data[d] - if entry.metadata.kind == "see" then - local list = entry.list - if #list > 1 then - list[#list] = nil - else - -- we have an \seeindex{Foo}{Bar} without Foo being defined anywhere - report_registers("invalid see entry in register %a, reference %a",entry.metadata.name,list[1][1]) - end - end - end - while d < #data do - d = d + 1 - local entry = data[d] - local e = { false, false, false, false } - local metadata = entry.metadata - local kind = metadata.kind - local list = entry.list - for i=1,4 do -- max 4 - if list[i] then - e[i] = list[i][1] - end - if e[i] ~= done[i] then - if e[i] and e[i] ~= "" then - done[i] = e[i] - if n == i then - context.stopregisterentries() - context.startregisterentries(n) - else - while n > i do - n = n - 1 - context.stopregisterentries() - end - while n < i do - n = n + 1 - context.startregisterentries(n) - end - end - local internal = entry.references.internal or 0 - local seeparent = entry.references.seeparent or "" - local processor = entry.processors and entry.processors[1] or "" - if metadata then - context.registerentry(processor,internal,seeparent,function() helpers.title(e[i],metadata) end) - else -- ? - context.registerentry(processor,internal,seeindex,e[i]) - end - else - done[i] = false - end - end - end - if kind == 'entry' then - context.startregisterpages() - if collapse_singles or collapse_ranges then - -- we collapse ranges and keep existing ranges as they are - -- so we get prebuilt as well as built ranges - local first, last, prev, pages, dd, nofpages = entry, nil, entry, { }, d, 0 - while dd < #data do - dd = dd + 1 - local next = data[dd] - if next and next.metadata.kind == "see" then - dd = dd - 1 - break - else - local el, nl = entry.list, next.list - if not equal(el,nl) then - dd = dd - 1 - --~ first = nil - break - elseif next.references.lastrealpage then - nofpages = nofpages + 1 - pages[nofpages] = first and { first, last or first } or { entry, entry } - nofpages = nofpages + 1 - pages[nofpages] = { next, next } - first, last, prev = nil, nil, nil - elseif not first then - first, prev = next, next - elseif next.references.realpage - prev.references.realpage == 1 then -- 1 ? - last, prev = next, next - else - nofpages = nofpages + 1 - pages[nofpages] = { first, last or first } - first, last, prev = next, nil, next - end - end - end - if first then - nofpages = nofpages + 1 - pages[nofpages] = { first, last or first } - end - if collapse_ranges and nofpages > 1 then - nofpages = collapsepages(pages) - end - if nofpages > 0 then -- or 0 - d = dd - for p=1,nofpages do - local first, last = pages[p][1], pages[p][2] - if first == last then - if first.references.lastrealpage then - pagerange(first,first,true,prefixspec,pagespec) - else - pagenumber(first,prefixspec,pagespec) - end - elseif last.references.lastrealpage then - pagerange(first,last,true,prefixspec,pagespec) - else - pagerange(first,last,false,prefixspec,pagespec) - end - end - elseif entry.references.lastrealpage then - pagerange(entry,entry,true,prefixspec,pagespec) - else - pagenumber(entry,prefixspec,pagespec) - end - else - while true do - if entry.references.lastrealpage then - pagerange(entry,entry,true,prefixspec,pagespec) - else - pagenumber(entry,prefixspec,pagespec) - end - if d == #data then - break - else - d = d + 1 - local next = data[d] - if next.metadata.kind == "see" or not equal(entry.list,next.list) then - d = d - 1 - break - else - entry = next - end - end - end - end - context.stopregisterpages() - elseif kind == 'see' then - local t, nt = { }, 0 - while true do - nt = nt + 1 - t[nt] = entry - if d == #data then - break - else - d = d + 1 - local next = data[d] - if next.metadata.kind ~= "see" or not equal(entry.list,next.list) then - d = d - 1 - break - else - entry = next - end - end - end - context.startregisterseewords() - for i=1,nt do - local entry = t[i] - local seeword = entry.seeword - local seetext = seeword.text or "" - local processor = seeword.processor or (entry.processors and entry.processors[1]) or "" - local seeindex = entry.references.seeindex or "" - context.registerseeword(i,n,processor,0,seeindex,seetext) - end - context.stopregisterseewords() - end - end - while n > 0 do - context.stopregisterentries() - n = n - 1 - end - context.stopregistersection() - end - context.stopregisteroutput() - -- for now, maybe at some point we will do a multipass or so - data.result = nil - data.metadata.sorted = false -end - -function registers.analyze(class,options) - context(registers.analyzed(class,options)) -end - -function registers.process(class,...) - if registers.analyzed(class,...) > 0 then - registers.flush(collected[class],...) - end -end - +if not modules then modules = { } end modules ['strc-reg'] = { + version = 1.001, + comment = "companion to strc-reg.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local next, type = next, type +local texcount = tex.count +local format, gmatch = string.format, string.gmatch +local equal, concat, remove = table.are_equal, table.concat, table.remove +local utfchar = utf.char +local lpegmatch = lpeg.match +local allocate = utilities.storage.allocate + +local trace_registers = false trackers.register("structures.registers", function(v) trace_registers = v end) + +local report_registers = logs.reporter("structure","registers") + +local structures = structures +local registers = structures.registers +local helpers = structures.helpers +local sections = structures.sections +local documents = structures.documents +local pages = structures.pages +local references = structures.references + +local mappings = sorters.mappings +local entries = sorters.entries +local replacements = sorters.replacements + +local processors = typesetters.processors +local splitprocessor = processors.split + +local variables = interfaces.variables +local context = context + +local matchingtilldepth, numberatdepth = sections.matchingtilldepth, sections.numberatdepth + +-- some day we will share registers and lists (although there are some conceptual +-- differences in the application of keywords) + +local function filtercollected(names,criterium,number,collected,prevmode) + if not criterium or criterium == "" then criterium = variables.all end + local data = documents.data + local numbers, depth = data.numbers, data.depth + local hash, result, nofresult, all, detail = { }, { }, 0, not names or names == "" or names == variables.all, nil + if not all then + for s in gmatch(names,"[^, ]+") do + hash[s] = true + end + end + if criterium == variables.all or criterium == variables.text then + for i=1,#collected do + local v = collected[i] + if all then + nofresult = nofresult + 1 + result[nofresult] = v + else + local vmn = v.metadata and v.metadata.name + if hash[vmn] then + nofresult = nofresult + 1 + result[nofresult] = v + end + end + end + elseif criterium == variables.current then + for i=1,#collected do + local v = collected[i] + local sectionnumber = sections.collected[v.references.section] + if sectionnumber then + local cnumbers = sectionnumber.numbers + if prevmode then + if (all or hash[v.metadata.name]) and #cnumbers >= depth then -- is the = ok for lists as well? + local ok = true + for d=1,depth do + if not (cnumbers[d] == numbers[d]) then -- no zero test + ok = false + break + end + end + if ok then + nofresult = nofresult + 1 + result[nofresult] = v + end + end + else + if (all or hash[v.metadata.name]) and #cnumbers > depth then + local ok = true + for d=1,depth do + local cnd = cnumbers[d] + if not (cnd == 0 or cnd == numbers[d]) then + ok = false + break + end + end + if ok then + nofresult = nofresult + 1 + result[nofresult] = v + end + end + end + end + end + elseif criterium == variables.previous then + for i=1,#collected do + local v = collected[i] + local sectionnumber = sections.collected[v.references.section] + if sectionnumber then + local cnumbers = sectionnumber.numbers + if (all or hash[v.metadata.name]) and #cnumbers >= depth then + local ok = true + if prevmode then + for d=1,depth do + if not (cnumbers[d] == numbers[d]) then + ok = false + break + end + end + else + for d=1,depth do + local cnd = cnumbers[d] + if not (cnd == 0 or cnd == numbers[d]) then + ok = false + break + end + end + end + if ok then + nofresult = nofresult + 1 + result[nofresult] = v + end + end + end + end + elseif criterium == variables["local"] then + if sections.autodepth(data.numbers) == 0 then + return filtercollected(names,variables.all,number,collected,prevmode) + else + return filtercollected(names,variables.current,number,collected,prevmode) + end + else -- sectionname, number + -- beware, this works ok for registers + local depth = sections.getlevel(criterium) + local number = tonumber(number) or numberatdepth(depth) or 0 + if trace_registers then + detail = format("depth: %s, number: %s, numbers: %s, startset: %s",depth,number,concat(sections.numbers(),".",1,depth),#collected) + end + if number > 0 then + for i=1,#collected do + local v = collected[i] + local r = v.references + if r then + local sectionnumber = sections.collected[r.section] + if sectionnumber then + local metadata = v.metadata + local cnumbers = sectionnumber.numbers + if cnumbers then + if (all or hash[metadata.name or false]) and #cnumbers >= depth and matchingtilldepth(depth,cnumbers) then + nofresult = nofresult + 1 + result[nofresult] = v + end + end + end + end + end + end + end + if trace_registers then + if detail then + report_registers("criterium %a, detail %a, found %a",criterium,detail,#result) + else + report_registers("criterium %a, detail %a, found %a",criterium,nil,#result) + end + end + return result +end + +local tobesaved = allocate() +local collected = allocate() + +registers.collected = collected +registers.tobesaved = tobesaved +registers.filtercollected = filtercollected + +-- we follow a different strategy than by lists, where we have a global +-- result table; we might do that here as well but since sorting code is +-- older we delay that decision + +local function initializer() + tobesaved = registers.tobesaved + collected = registers.collected + local internals = references.internals + for name, list in next, collected do + local entries = list.entries + for e=1,#entries do + local entry = entries[e] + local r = entry.references + if r then + local internal = r and r.internal + if internal then + internals[internal] = entry + end + end + end + end +end + +job.register('structures.registers.collected', tobesaved, initializer) + +local function allocate(class) + local d = tobesaved[class] + if not d then + d = { + metadata = { + language = 'en', + sorted = false, + class = class + }, + entries = { }, + } + tobesaved[class] = d + end + return d +end + +registers.define = allocate + +local entrysplitter = lpeg.tsplitat('+') -- & obsolete in mkiv + +local tagged = { } + +local function preprocessentries(rawdata) + local entries = rawdata.entries + if entries then +--~ table.print(rawdata) + local e, k = entries[1] or "", entries[2] or "" + local et, kt, entryproc, pageproc + if type(e) == "table" then + et = e + else + entryproc, e = splitprocessor(e) + et = lpegmatch(entrysplitter,e) + end + if type(k) == "table" then + kt = k + else + pageproc, k = splitprocessor(k) + kt = lpegmatch(entrysplitter,k) + end + entries = { } + for k=1,#et do + entries[k] = { et[k] or "", kt[k] or "" } + end + for k=#et,1,-1 do + if entries[k][1] ~= "" then + break + else + entries[k] = nil + end + end + rawdata.list = entries + if pageproc or entryproc then + rawdata.processors = { entryproc, pageproc } + end + rawdata.entries = nil + end + local seeword = rawdata.seeword + if seeword then + seeword.processor, seeword.text = splitprocessor(seeword.text or "") + end +end + +function registers.store(rawdata) -- metadata, references, entries + local data = allocate(rawdata.metadata.name).entries + local references = rawdata.references + references.realpage = references.realpage or 0 -- just to be sure as it can be refered to + preprocessentries(rawdata) + data[#data+1] = rawdata + local label = references.label + if label and label ~= "" then tagged[label] = #data end + context(#data) +end + +function registers.enhance(name,n) + local r = tobesaved[name].entries[n] + if r then + r.references.realpage = texcount.realpageno + end +end + +function registers.extend(name,tag,rawdata) -- maybe do lastsection internally + if type(tag) == "string" then + tag = tagged[tag] + end + if tag then + local r = tobesaved[name].entries[tag] + if r then + local rr = r.references + rr.lastrealpage = texcount.realpageno + rr.lastsection = sections.currentid() + if rawdata then + if rawdata.entries then + preprocessentries(rawdata) + end + for k,v in next, rawdata do + if not r[k] then + r[k] = v + else + local rk = r[k] + for kk,vv in next, v do + if type(vv) == "table" then + if next(vv) then + rk[kk] = vv + end + elseif vv ~= "" then + rk[kk] = vv + end + end + end + end + end + end + end +end + +-- sorting and rendering + +local compare = sorters.comparers.basic + +function registers.compare(a,b) + local result = compare(a,b) + if result ~= 0 then + return result + else + local ka, kb = a.metadata.kind, b.metadata.kind + if ka == kb then + local page_a, page_b = a.references.realpage, b.references.realpage + if not page_a or not page_b then + return 0 + elseif page_a < page_b then + return -1 + elseif page_a > page_b then + return 1 + end + elseif ka == "see" then + return 1 + elseif kb == "see" then + return -1 + end + end + return 0 +end + +function registers.filter(data,options) + data.result = registers.filtercollected(nil,options.criterium,options.number,data.entries,true) +end + +local seeindex = 0 + +-- meerdere loops, seewords, dan words, an seewords + +local function crosslinkseewords(result) -- all words + -- collect all seewords + local seewords = { } + for i=1,#result do + local data = result[i] + local seeword = data.seeword + if seeword then + local seetext = seeword.text + if seetext and not seewords[seetext] then + seeindex = seeindex + 1 + seewords[seetext] = seeindex + if trace_registers then + report_registers("see word %03i: %s",seeindex,seetext) + end + end + end + end + -- mark seeparents + local seeparents = { } + for i=1,#result do + local data = result[i] + local word = data.list[1] + word = word and word[1] + if word then + local seeindex = seewords[word] + if seeindex then + seeparents[word] = data + data.references.seeparent = seeindex + if trace_registers then + report_registers("see parent %03i: %s",seeindex,word) + end + end + end + end + -- mark seewords and extend sort list + for i=1,#result do + local data = result[i] + local seeword = data.seeword + if seeword then + local text = seeword.text + if text then + local seeparent = seeparents[text] + if seeparent then + local seeindex = seewords[text] + local s, ns, d, w, l = { }, 0, data.split, seeparent.split, data.list + -- trick: we influence sorting by adding fake subentries + for i=1,#d do + ns = ns + 1 + s[ns] = d[i] -- parent + end + for i=1,#w do + ns = ns + 1 + s[ns] = w[i] -- see + end + data.split = s + -- we also register a fake extra list entry so that the + -- collapser works okay + l[#l+1] = { text, "" } + data.references.seeindex = seeindex + if trace_registers then + report_registers("see crosslink %03i: %s",seeindex,text) + end + end + end + end + end +end + +local function removeemptyentries(result) + local i, n, m = 1, #result, 0 + while i <= n do + local entry = result[i] + if #entry.list == 0 or #entry.split == 0 then + remove(result,i) + n = n - 1 + m = m + 1 + else + i = i + 1 + end + end + if m > 0 then + report_registers("%s empty entries removed in register",m) + end +end + +function registers.prepare(data) + -- data has 'list' table + local strip = sorters.strip + local splitter = sorters.splitters.utf + local result = data.result + if result then + for i=1, #result do + local entry, split = result[i], { } + local list = entry.list + if list then + for l=1,#list do + local ll = list[l] + local word, key = ll[1], ll[2] + if not key or key == "" then + key = word + end + split[l] = splitter(strip(key)) + end + end + entry.split = split + end + removeemptyentries(result) + crosslinkseewords(result) + end +end + +function registers.sort(data,options) + sorters.sort(data.result,registers.compare) +end + +function registers.unique(data,options) + local result, nofresult, prev = { }, 0, nil + local dataresult = data.result + for k=1,#dataresult do + local v = dataresult[k] + if prev then + local pr, vr = prev.references, v.references + if not equal(prev.list,v.list) then + -- ok + elseif pr.realpage ~= vr.realpage then + -- ok + else + local pl, vl = pr.lastrealpage, vr.lastrealpage + if pl or vl then + if not vl then + -- ok + elseif not pl then + -- ok + elseif pl ~= vl then + -- ok + else + v = nil + end + else + v = nil + end + end + end + if v then + nofresult = nofresult + 1 + result[nofresult] = v + prev = v + end + end + data.result = result +end + +function registers.finalize(data,options) -- maps character to index (order) + local result = data.result + data.metadata.nofsorted = #result + local split, nofsplit, lasttag, done, nofdone = { }, 0, nil, nil, 0 + local firstofsplit = sorters.firstofsplit + for k=1,#result do + local v = result[k] + local entry, tag = firstofsplit(v) + if tag ~= lasttag then + if trace_registers then + report_registers("splitting at %a",tag) + end + done, nofdone = { }, 0 + nofsplit = nofsplit + 1 + split[nofsplit] = { tag = tag, data = done } + lasttag = tag + end + nofdone = nofdone + 1 + done[nofdone] = v + end + data.result = split +end + +function registers.analyzed(class,options) + local data = collected[class] + if data and data.entries then + options = options or { } + sorters.setlanguage(options.language,options.method,options.numberorder) + registers.filter(data,options) -- filter entries into results (criteria) + registers.prepare(data,options) -- adds split table parallel to list table + registers.sort(data,options) -- sorts results + registers.unique(data,options) -- get rid of duplicates + registers.finalize(data,options) -- split result in ranges + data.metadata.sorted = true + return data.metadata.nofsorted or 0 + else + return 0 + end +end + +-- todo take conversion from index + +function registers.userdata(index,name) + local data = references.internals[tonumber(index)] + data = data and data.userdata and data.userdata[name] + if data then + context(data) + end +end + +-- todo: ownnumber + +local function pagerange(f_entry,t_entry,is_last,prefixspec,pagespec) + local fer, ter = f_entry.references, t_entry.references + context.registerpagerange( + f_entry.processors and f_entry.processors[2] or "", + fer.internal or 0, + fer.realpage or 0, + function() + helpers.prefixpage(f_entry,prefixspec,pagespec) + end, + ter.internal or 0, + ter.lastrealpage or ter.realpage or 0, + function() + if is_last then + helpers.prefixlastpage(t_entry,prefixspec,pagespec) -- swaps page and realpage keys + else + helpers.prefixpage (t_entry,prefixspec,pagespec) + end + end + ) +end + +local function pagenumber(entry,prefixspec,pagespec) + local er = entry.references + context.registeronepage( + entry.processors and entry.processors[2] or "", + er.internal or 0, + er.realpage or 0, + function() helpers.prefixpage(entry,prefixspec,pagespec) end + ) +end + +local function collapsedpage(pages) + for i=2,#pages do + local first, second = pages[i-1], pages[i] + local first_first, first_last, second_first, second_last = first[1], first[2], second[1], second[2] + local first_last_pn = first_last .references.realpage + local second_first_pn = second_first.references.realpage + local second_last_pn = second_last .references.realpage + local first_last_last = first_last .references.lastrealpage + local second_first_last = second_first.references.lastrealpage + if first_last_last then + first_last_pn = first_last_last + if second_first == second_last and second_first_pn <= first_last_pn then + -- 2=8, 5 -> 12=8 + remove(pages,i) + return true + elseif second_first == second_last and second_first_pn > first_last_pn then + -- 2=8, 9 -> 2-9 + pages[i-1] = { first_first, second_last } + remove(pages,i) + return true + elseif second_last_pn < first_last_pn then + -- 2=8, 3-4 -> 2=8 + remove(pages,i) + return true + elseif first_last_pn < second_last_pn then + -- 2=8, 3-9 -> 2-9 + pages[i-1] = { first_first, second_last } + remove(pages,i) + return true + elseif first_last_pn + 1 == second_first_pn and second_last_pn > first_last_pn then + -- 2=8, 9-11 -> 2-11 + pages[i-1] = { first_first, second_last } + remove(pages,i) + return true + elseif second_first.references.lastrealpage then + -- 2=8, 9=11 -> 2-11 + pages[i-1] = { first_first, second_last } + remove(pages,i) + return true + end + elseif second_first_last then + second_first_pn = second_first_last + if first_last_pn == second_first_pn then + -- 2-4, 5=9 -> 2-9 + pages[i-1] = { first_first, second_last } + remove(pages,i) + return true + end + elseif first_last_pn == second_first_pn then + -- 2-3, 3-4 -> 2-4 + pages[i-1] = { first_last, second_last } + remove(pages,i) + return true + end + end + return false +end + +local function collapsepages(pages) + while collapsedpage(pages) do end + return #pages +end + +function registers.flush(data,options,prefixspec,pagespec) + local collapse_singles = options.compress == variables.yes + local collapse_ranges = options.compress == variables.all + local result = data.result + context.startregisteroutput() + for i=1,#result do + -- ranges need checking ! + local sublist = result[i] + local done = { false, false, false, false } + local data = sublist.data + local d, n = 0, 0 + context.startregistersection(sublist.tag) + for d=1,#data do + local entry = data[d] + if entry.metadata.kind == "see" then + local list = entry.list + if #list > 1 then + list[#list] = nil + else + -- we have an \seeindex{Foo}{Bar} without Foo being defined anywhere + report_registers("invalid see entry in register %a, reference %a",entry.metadata.name,list[1][1]) + end + end + end + while d < #data do + d = d + 1 + local entry = data[d] + local e = { false, false, false, false } + local metadata = entry.metadata + local kind = metadata.kind + local list = entry.list + for i=1,4 do -- max 4 + if list[i] then + e[i] = list[i][1] + end + if e[i] ~= done[i] then + if e[i] and e[i] ~= "" then + done[i] = e[i] + if n == i then + context.stopregisterentries() + context.startregisterentries(n) + else + while n > i do + n = n - 1 + context.stopregisterentries() + end + while n < i do + n = n + 1 + context.startregisterentries(n) + end + end + local internal = entry.references.internal or 0 + local seeparent = entry.references.seeparent or "" + local processor = entry.processors and entry.processors[1] or "" + if metadata then + context.registerentry(processor,internal,seeparent,function() helpers.title(e[i],metadata) end) + else -- ? + context.registerentry(processor,internal,seeindex,e[i]) + end + else + done[i] = false + end + end + end + if kind == 'entry' then + context.startregisterpages() + if collapse_singles or collapse_ranges then + -- we collapse ranges and keep existing ranges as they are + -- so we get prebuilt as well as built ranges + local first, last, prev, pages, dd, nofpages = entry, nil, entry, { }, d, 0 + while dd < #data do + dd = dd + 1 + local next = data[dd] + if next and next.metadata.kind == "see" then + dd = dd - 1 + break + else + local el, nl = entry.list, next.list + if not equal(el,nl) then + dd = dd - 1 + --~ first = nil + break + elseif next.references.lastrealpage then + nofpages = nofpages + 1 + pages[nofpages] = first and { first, last or first } or { entry, entry } + nofpages = nofpages + 1 + pages[nofpages] = { next, next } + first, last, prev = nil, nil, nil + elseif not first then + first, prev = next, next + elseif next.references.realpage - prev.references.realpage == 1 then -- 1 ? + last, prev = next, next + else + nofpages = nofpages + 1 + pages[nofpages] = { first, last or first } + first, last, prev = next, nil, next + end + end + end + if first then + nofpages = nofpages + 1 + pages[nofpages] = { first, last or first } + end + if collapse_ranges and nofpages > 1 then + nofpages = collapsepages(pages) + end + if nofpages > 0 then -- or 0 + d = dd + for p=1,nofpages do + local first, last = pages[p][1], pages[p][2] + if first == last then + if first.references.lastrealpage then + pagerange(first,first,true,prefixspec,pagespec) + else + pagenumber(first,prefixspec,pagespec) + end + elseif last.references.lastrealpage then + pagerange(first,last,true,prefixspec,pagespec) + else + pagerange(first,last,false,prefixspec,pagespec) + end + end + elseif entry.references.lastrealpage then + pagerange(entry,entry,true,prefixspec,pagespec) + else + pagenumber(entry,prefixspec,pagespec) + end + else + while true do + if entry.references.lastrealpage then + pagerange(entry,entry,true,prefixspec,pagespec) + else + pagenumber(entry,prefixspec,pagespec) + end + if d == #data then + break + else + d = d + 1 + local next = data[d] + if next.metadata.kind == "see" or not equal(entry.list,next.list) then + d = d - 1 + break + else + entry = next + end + end + end + end + context.stopregisterpages() + elseif kind == 'see' then + local t, nt = { }, 0 + while true do + nt = nt + 1 + t[nt] = entry + if d == #data then + break + else + d = d + 1 + local next = data[d] + if next.metadata.kind ~= "see" or not equal(entry.list,next.list) then + d = d - 1 + break + else + entry = next + end + end + end + context.startregisterseewords() + for i=1,nt do + local entry = t[i] + local seeword = entry.seeword + local seetext = seeword.text or "" + local processor = seeword.processor or (entry.processors and entry.processors[1]) or "" + local seeindex = entry.references.seeindex or "" + context.registerseeword(i,n,processor,0,seeindex,seetext) + end + context.stopregisterseewords() + end + end + while n > 0 do + context.stopregisterentries() + n = n - 1 + end + context.stopregistersection() + end + context.stopregisteroutput() + -- for now, maybe at some point we will do a multipass or so + data.result = nil + data.metadata.sorted = false +end + +function registers.analyze(class,options) + context(registers.analyzed(class,options)) +end + +function registers.process(class,...) + if registers.analyzed(class,...) > 0 then + registers.flush(collected[class],...) + end +end + diff --git a/tex/context/base/strc-rsc.lua b/tex/context/base/strc-rsc.lua index a90f577e3..34a532928 100644 --- a/tex/context/base/strc-rsc.lua +++ b/tex/context/base/strc-rsc.lua @@ -1,154 +1,154 @@ -if not modules then modules = { } end modules ['strc-rsc'] = { - version = 1.001, - comment = "companion to strc-ref.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- The scanner is in a separate module so that we can test without too --- many dependencies. - --- The scanner accepts nested outer, but we don't care too much, maybe --- some day we will have both but currently the innermost wins. - -local lpegmatch, lpegP, lpegS, lpegCs, lpegCt, lpegCf, lpegCc, lpegC, lpegCg = lpeg.match, lpeg.P, lpeg.S, lpeg.Cs, lpeg.Ct, lpeg.Cf, lpeg.Cc, lpeg.C, lpeg.Cg -local find = string.find - -local spaces = lpegP(" ")^0 -local lparent = lpegP("(") -local rparent = lpegP(")") -local lbrace = lpegP("{") -local rbrace = lpegP("}") -local tcolon = lpegP(":::") -- component or outer -local dcolon = lpegP("::") -- outer -local scolon = lpegP(":") -- prefix -local backslash = lpegP("\\") - - lparent = spaces * lparent * spaces - rparent = spaces * rparent * spaces - lbrace = spaces * lbrace * spaces - rbrace = spaces * rbrace * spaces - tcolon = spaces * tcolon * spaces - dcolon = spaces * dcolon * spaces - -local endofall = spaces * lpegP(-1) - -local o_token = 1 - rparent - rbrace - lparent - lbrace -- can be made more efficient -local a_token = 1 - rbrace -local s_token = 1 - lparent - lbrace -local i_token = 1 - lparent - lbrace - endofall -local f_token = 1 - lparent - lbrace - dcolon -local c_token = 1 - lparent - lbrace - tcolon - -local hastexcode = lpegCg(lpegCc("has_tex") * lpegCc(true)) -- cannot be made to work -local component = lpegCg(lpegCc("component") * lpegCs(c_token^1)) -local outer = lpegCg(lpegCc("outer") * lpegCs(f_token^1)) -local operation = lpegCg(lpegCc("operation") * lpegCs(o_token^1)) -local arguments = lpegCg(lpegCc("arguments") * lpegCs(a_token^0)) -local special = lpegCg(lpegCc("special") * lpegCs(s_token^1)) -local inner = lpegCg(lpegCc("inner") * lpegCs(i_token^1)) - - arguments = (lbrace * arguments * rbrace)^-1 - component = component * tcolon - outer = outer * dcolon - operation = outer^-1 * operation -- special case: page(file::1) and file::page(1) - inner = inner * arguments - special = special * lparent * (operation * arguments)^-1 * rparent - -local referencesplitter = spaces * lpegCf (lpegCt("") * (component + outer)^-1 * (special + inner)^-1 * endofall, rawset) -local prefixsplitter = lpegCs(lpegP((1-scolon)^1 * scolon)) * #-scolon * lpegCs(lpegP(1)^1) -local componentsplitter = lpegCs(lpegP((1-scolon)^1)) * scolon * #-scolon * lpegCs(lpegP(1)^1) - -prefixsplitter = componentsplitter - -local function splitreference(str) - if str and str ~= "" then - local t = lpegmatch(referencesplitter,str) - if t then - local a = t.arguments - if a and find(a,"\\") then - t.has_tex = true - else - local o = t.arguments - if o and find(o,"\\") then - t.has_tex = true - end - end - return t - end - end -end - -local function splitprefix(str) - return lpegmatch(prefixsplitter,str) -end - -local function splitcomponent(str) - return lpegmatch(componentsplitter,str) -end - --- register in the right namespace - -structures = structures or { } -structures.references = structures.references or { } -local references = structures.references - -references.referencesplitter = referencesplitter -references.splitreference = splitreference -references.prefixsplitter = prefixsplitter -references.splitprefix = splitprefix -references.componentsplitter = componentsplitter -references.splitcomponent = splitcomponent - --- test code: - --- inspect(splitreference([[component:::inner]])) --- inspect(splitprefix([[component:::inner]])) --- inspect(splitprefix([[component:inner]])) - --- inspect(splitreference([[ ]])) --- inspect(splitreference([[ inner ]])) --- inspect(splitreference([[ special ( operation { argument, argument } ) ]])) --- inspect(splitreference([[ special ( operation { argument } ) ]])) --- inspect(splitreference([[ special ( operation { argument, \argument } ) ]])) --- inspect(splitreference([[ special ( operation { \argument } ) ]])) --- inspect(splitreference([[ special ( operation ) ]])) --- inspect(splitreference([[ special ( \operation ) ]])) --- inspect(splitreference([[ special ( o\peration ) ]])) --- inspect(splitreference([[ special ( ) ]])) --- inspect(splitreference([[ inner { argument } ]])) --- inspect(splitreference([[ inner { \argument } ]])) --- inspect(splitreference([[ inner { ar\gument } ]])) --- inspect(splitreference([[inner{a\rgument}]])) --- inspect(splitreference([[ inner { argument, argument } ]])) --- inspect(splitreference([[ inner { argument, \argument } ]])) -- fails: bug in lpeg? --- inspect(splitreference([[ inner { \argument, \argument } ]])) --- inspect(splitreference([[ outer :: ]])) --- inspect(splitreference([[ outer :: inner]])) --- inspect(splitreference([[ outer :: special (operation { argument,argument } ) ]])) --- inspect(splitreference([[ outer :: special (operation { } )]])) --- inspect(splitreference([[ outer :: special ( operation { argument, \argument } ) ]])) --- inspect(splitreference([[ outer :: special ( operation ) ]])) --- inspect(splitreference([[ outer :: special ( \operation ) ]])) --- inspect(splitreference([[ outer :: special ( ) ]])) --- inspect(splitreference([[ outer :: inner { argument } ]])) --- inspect(splitreference([[ special ( outer :: operation ) ]])) - --- inspect(splitreference([[]])) --- inspect(splitreference([[inner]])) --- inspect(splitreference([[special(operation{argument,argument})]])) --- inspect(splitreference([[special(operation)]])) --- inspect(splitreference([[special(\operation)]])) --- inspect(splitreference([[special()]])) --- inspect(splitreference([[inner{argument}]])) --- inspect(splitreference([[inner{\argument}]])) --- inspect(splitreference([[outer::]])) --- inspect(splitreference([[outer::inner]])) --- inspect(splitreference([[outer::special(operation{argument,argument})]])) --- inspect(splitreference([[outer::special(operation{argument,\argument})]])) --- inspect(splitreference([[outer::special(operation)]])) --- inspect(splitreference([[outer::special(\operation)]])) --- inspect(splitreference([[outer::special()]])) --- inspect(splitreference([[outer::inner{argument}]])) --- inspect(splitreference([[special(outer::operation)]])) +if not modules then modules = { } end modules ['strc-rsc'] = { + version = 1.001, + comment = "companion to strc-ref.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- The scanner is in a separate module so that we can test without too +-- many dependencies. + +-- The scanner accepts nested outer, but we don't care too much, maybe +-- some day we will have both but currently the innermost wins. + +local lpegmatch, lpegP, lpegS, lpegCs, lpegCt, lpegCf, lpegCc, lpegC, lpegCg = lpeg.match, lpeg.P, lpeg.S, lpeg.Cs, lpeg.Ct, lpeg.Cf, lpeg.Cc, lpeg.C, lpeg.Cg +local find = string.find + +local spaces = lpegP(" ")^0 +local lparent = lpegP("(") +local rparent = lpegP(")") +local lbrace = lpegP("{") +local rbrace = lpegP("}") +local tcolon = lpegP(":::") -- component or outer +local dcolon = lpegP("::") -- outer +local scolon = lpegP(":") -- prefix +local backslash = lpegP("\\") + + lparent = spaces * lparent * spaces + rparent = spaces * rparent * spaces + lbrace = spaces * lbrace * spaces + rbrace = spaces * rbrace * spaces + tcolon = spaces * tcolon * spaces + dcolon = spaces * dcolon * spaces + +local endofall = spaces * lpegP(-1) + +local o_token = 1 - rparent - rbrace - lparent - lbrace -- can be made more efficient +local a_token = 1 - rbrace +local s_token = 1 - lparent - lbrace +local i_token = 1 - lparent - lbrace - endofall +local f_token = 1 - lparent - lbrace - dcolon +local c_token = 1 - lparent - lbrace - tcolon + +local hastexcode = lpegCg(lpegCc("has_tex") * lpegCc(true)) -- cannot be made to work +local component = lpegCg(lpegCc("component") * lpegCs(c_token^1)) +local outer = lpegCg(lpegCc("outer") * lpegCs(f_token^1)) +local operation = lpegCg(lpegCc("operation") * lpegCs(o_token^1)) +local arguments = lpegCg(lpegCc("arguments") * lpegCs(a_token^0)) +local special = lpegCg(lpegCc("special") * lpegCs(s_token^1)) +local inner = lpegCg(lpegCc("inner") * lpegCs(i_token^1)) + + arguments = (lbrace * arguments * rbrace)^-1 + component = component * tcolon + outer = outer * dcolon + operation = outer^-1 * operation -- special case: page(file::1) and file::page(1) + inner = inner * arguments + special = special * lparent * (operation * arguments)^-1 * rparent + +local referencesplitter = spaces * lpegCf (lpegCt("") * (component + outer)^-1 * (special + inner)^-1 * endofall, rawset) +local prefixsplitter = lpegCs(lpegP((1-scolon)^1 * scolon)) * #-scolon * lpegCs(lpegP(1)^1) +local componentsplitter = lpegCs(lpegP((1-scolon)^1)) * scolon * #-scolon * lpegCs(lpegP(1)^1) + +prefixsplitter = componentsplitter + +local function splitreference(str) + if str and str ~= "" then + local t = lpegmatch(referencesplitter,str) + if t then + local a = t.arguments + if a and find(a,"\\") then + t.has_tex = true + else + local o = t.arguments + if o and find(o,"\\") then + t.has_tex = true + end + end + return t + end + end +end + +local function splitprefix(str) + return lpegmatch(prefixsplitter,str) +end + +local function splitcomponent(str) + return lpegmatch(componentsplitter,str) +end + +-- register in the right namespace + +structures = structures or { } +structures.references = structures.references or { } +local references = structures.references + +references.referencesplitter = referencesplitter +references.splitreference = splitreference +references.prefixsplitter = prefixsplitter +references.splitprefix = splitprefix +references.componentsplitter = componentsplitter +references.splitcomponent = splitcomponent + +-- test code: + +-- inspect(splitreference([[component:::inner]])) +-- inspect(splitprefix([[component:::inner]])) +-- inspect(splitprefix([[component:inner]])) + +-- inspect(splitreference([[ ]])) +-- inspect(splitreference([[ inner ]])) +-- inspect(splitreference([[ special ( operation { argument, argument } ) ]])) +-- inspect(splitreference([[ special ( operation { argument } ) ]])) +-- inspect(splitreference([[ special ( operation { argument, \argument } ) ]])) +-- inspect(splitreference([[ special ( operation { \argument } ) ]])) +-- inspect(splitreference([[ special ( operation ) ]])) +-- inspect(splitreference([[ special ( \operation ) ]])) +-- inspect(splitreference([[ special ( o\peration ) ]])) +-- inspect(splitreference([[ special ( ) ]])) +-- inspect(splitreference([[ inner { argument } ]])) +-- inspect(splitreference([[ inner { \argument } ]])) +-- inspect(splitreference([[ inner { ar\gument } ]])) +-- inspect(splitreference([[inner{a\rgument}]])) +-- inspect(splitreference([[ inner { argument, argument } ]])) +-- inspect(splitreference([[ inner { argument, \argument } ]])) -- fails: bug in lpeg? +-- inspect(splitreference([[ inner { \argument, \argument } ]])) +-- inspect(splitreference([[ outer :: ]])) +-- inspect(splitreference([[ outer :: inner]])) +-- inspect(splitreference([[ outer :: special (operation { argument,argument } ) ]])) +-- inspect(splitreference([[ outer :: special (operation { } )]])) +-- inspect(splitreference([[ outer :: special ( operation { argument, \argument } ) ]])) +-- inspect(splitreference([[ outer :: special ( operation ) ]])) +-- inspect(splitreference([[ outer :: special ( \operation ) ]])) +-- inspect(splitreference([[ outer :: special ( ) ]])) +-- inspect(splitreference([[ outer :: inner { argument } ]])) +-- inspect(splitreference([[ special ( outer :: operation ) ]])) + +-- inspect(splitreference([[]])) +-- inspect(splitreference([[inner]])) +-- inspect(splitreference([[special(operation{argument,argument})]])) +-- inspect(splitreference([[special(operation)]])) +-- inspect(splitreference([[special(\operation)]])) +-- inspect(splitreference([[special()]])) +-- inspect(splitreference([[inner{argument}]])) +-- inspect(splitreference([[inner{\argument}]])) +-- inspect(splitreference([[outer::]])) +-- inspect(splitreference([[outer::inner]])) +-- inspect(splitreference([[outer::special(operation{argument,argument})]])) +-- inspect(splitreference([[outer::special(operation{argument,\argument})]])) +-- inspect(splitreference([[outer::special(operation)]])) +-- inspect(splitreference([[outer::special(\operation)]])) +-- inspect(splitreference([[outer::special()]])) +-- inspect(splitreference([[outer::inner{argument}]])) +-- inspect(splitreference([[special(outer::operation)]])) diff --git a/tex/context/base/strc-syn.lua b/tex/context/base/strc-syn.lua index ca4b3ac18..a6d49715a 100644 --- a/tex/context/base/strc-syn.lua +++ b/tex/context/base/strc-syn.lua @@ -1,198 +1,198 @@ -if not modules then modules = { } end modules ['strc-syn'] = { - version = 1.001, - comment = "companion to str-syn.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local next, type = next, type -local format = string.format -local allocate = utilities.storage.allocate - --- interface to tex end - -local structures = structures -local synonyms = structures.synonyms -local tags = structures.tags - -local collected = allocate() -local tobesaved = allocate() - -synonyms.collected = collected -synonyms.tobesaved = tobesaved - -local function initializer() - collected = synonyms.collected - tobesaved = synonyms.tobesaved -end - -local function finalizer() - for entry, data in next, tobesaved do - data.hash = nil - end -end - -job.register('structures.synonyms.collected', tobesaved, initializer, finalizer) - --- todo: allocate becomes metatable - -local function allocate(class) - local d = tobesaved[class] - if not d then - d = { - metadata = { - language = 'en', - sorted = false, - class = class - }, - entries = { - }, - hash = { - } - } - tobesaved[class] = d - end - return d -end - -function synonyms.define(class,kind) - local data = allocate(class) - data.metadata.kind = kind -end - -function synonyms.register(class,kind,spec) - local data = allocate(class) - data.metadata.kind = kind -- runtime, not saved in format (yet) - if not data.hash[spec.definition.tag or ""] then - data.entries[#data.entries+1] = spec - data.hash[spec.definition.tag or ""] = spec - end -end - -function synonyms.registerused(class,tag) - local data = allocate(class) - local dht = data.hash[tag] - if dht then - dht.definition.used = true - end -end - -function synonyms.synonym(class,tag) - local data = allocate(class).hash - local d = data[tag] - if d then - local de = d.definition - de.used = true - context(de.synonym) - end -end - -function synonyms.meaning(class,tag) - local data = allocate(class).hash - local d = data[tag] - if d then - local de = d.definition - de.used = true - context(de.meaning) - end -end - -synonyms.compare = sorters.comparers.basic -- (a,b) - -function synonyms.filter(data,options) - local result = { } - local entries = data.entries - local all = options and options.criterium == interfaces.variables.all - for i=1,#entries do - local entry = entries[i] - if all or entry.definition.used then - result[#result+1] = entry - end - end - data.result = result -end - -function synonyms.prepare(data) - local strip = sorters.strip - local splitter = sorters.splitters.utf - local result = data.result - if result then - for i=1, #result do - local r = result[i] - local rd = r.definition - if rd then - local rt = rd.tag - local sortkey = (rt and rt ~= "" and rt) or rd.synonym - r.split = splitter(strip(sortkey)) - end - end - end -end - -function synonyms.sort(data,options) - sorters.sort(data.result,synonyms.compare) -end - -function synonyms.finalize(data,options) - local result = data.result - data.metadata.nofsorted = #result - local split = { } - for k=1,#result do - local v = result[k] - local entry, tag = sorters.firstofsplit(v) - local s = split[entry] -- keeps track of change - if not s then - s = { tag = tag, data = { } } - split[entry] = s - end - s.data[#s.data+1] = v - end - data.result = split -end - --- for now, maybe at some point we will do a multipass or so --- maybe pass the settings differently - -function synonyms.flush(data,options) - local kind = data.metadata.kind -- hack, will be done better - -- context[format("\\start%soutput",kind)]() - local result = data.result - local sorted = table.sortedkeys(result) - for k=1,#sorted do - local letter = sorted[k] - local sublist = result[letter] - local data = sublist.data - -- context[format("\\start%ssection",kind)](sublist.tag) - for d=1,#data do - local entry = data[d].definition - -- context[format("\\%sentry",kind)](d,entry.tag,entry.synonym,entry.meaning or "") - context("\\%sentry{%s}{%s}{%s}{%s}",kind,d,entry.tag,entry.synonym,entry.meaning or "") - end - -- context[format("\\stop%ssection",kind)]() - end - -- context[format("\\stop%soutput",kind)]() - data.result = nil - data.metadata.sorted = false -end - -function synonyms.analyzed(class,options) - local data = synonyms.collected[class] - if data and data.entries then - options = options or { } - sorters.setlanguage(options.language) - synonyms.filter(data,options) -- filters entries to result - synonyms.prepare(data,options) -- adds split table parallel to list table - synonyms.sort(data,options) -- sorts entries in result - synonyms.finalize(data,options) -- do things with data.entries - data.metadata.sorted = true - end - return data and data.metadata.sorted and data.result and next(data.result) -end - -function synonyms.process(class,options) - if synonyms.analyzed(class,options) then - synonyms.flush(synonyms.collected[class],options) - end -end - +if not modules then modules = { } end modules ['strc-syn'] = { + version = 1.001, + comment = "companion to str-syn.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local next, type = next, type +local format = string.format +local allocate = utilities.storage.allocate + +-- interface to tex end + +local structures = structures +local synonyms = structures.synonyms +local tags = structures.tags + +local collected = allocate() +local tobesaved = allocate() + +synonyms.collected = collected +synonyms.tobesaved = tobesaved + +local function initializer() + collected = synonyms.collected + tobesaved = synonyms.tobesaved +end + +local function finalizer() + for entry, data in next, tobesaved do + data.hash = nil + end +end + +job.register('structures.synonyms.collected', tobesaved, initializer, finalizer) + +-- todo: allocate becomes metatable + +local function allocate(class) + local d = tobesaved[class] + if not d then + d = { + metadata = { + language = 'en', + sorted = false, + class = class + }, + entries = { + }, + hash = { + } + } + tobesaved[class] = d + end + return d +end + +function synonyms.define(class,kind) + local data = allocate(class) + data.metadata.kind = kind +end + +function synonyms.register(class,kind,spec) + local data = allocate(class) + data.metadata.kind = kind -- runtime, not saved in format (yet) + if not data.hash[spec.definition.tag or ""] then + data.entries[#data.entries+1] = spec + data.hash[spec.definition.tag or ""] = spec + end +end + +function synonyms.registerused(class,tag) + local data = allocate(class) + local dht = data.hash[tag] + if dht then + dht.definition.used = true + end +end + +function synonyms.synonym(class,tag) + local data = allocate(class).hash + local d = data[tag] + if d then + local de = d.definition + de.used = true + context(de.synonym) + end +end + +function synonyms.meaning(class,tag) + local data = allocate(class).hash + local d = data[tag] + if d then + local de = d.definition + de.used = true + context(de.meaning) + end +end + +synonyms.compare = sorters.comparers.basic -- (a,b) + +function synonyms.filter(data,options) + local result = { } + local entries = data.entries + local all = options and options.criterium == interfaces.variables.all + for i=1,#entries do + local entry = entries[i] + if all or entry.definition.used then + result[#result+1] = entry + end + end + data.result = result +end + +function synonyms.prepare(data) + local strip = sorters.strip + local splitter = sorters.splitters.utf + local result = data.result + if result then + for i=1, #result do + local r = result[i] + local rd = r.definition + if rd then + local rt = rd.tag + local sortkey = (rt and rt ~= "" and rt) or rd.synonym + r.split = splitter(strip(sortkey)) + end + end + end +end + +function synonyms.sort(data,options) + sorters.sort(data.result,synonyms.compare) +end + +function synonyms.finalize(data,options) + local result = data.result + data.metadata.nofsorted = #result + local split = { } + for k=1,#result do + local v = result[k] + local entry, tag = sorters.firstofsplit(v) + local s = split[entry] -- keeps track of change + if not s then + s = { tag = tag, data = { } } + split[entry] = s + end + s.data[#s.data+1] = v + end + data.result = split +end + +-- for now, maybe at some point we will do a multipass or so +-- maybe pass the settings differently + +function synonyms.flush(data,options) + local kind = data.metadata.kind -- hack, will be done better + -- context[format("\\start%soutput",kind)]() + local result = data.result + local sorted = table.sortedkeys(result) + for k=1,#sorted do + local letter = sorted[k] + local sublist = result[letter] + local data = sublist.data + -- context[format("\\start%ssection",kind)](sublist.tag) + for d=1,#data do + local entry = data[d].definition + -- context[format("\\%sentry",kind)](d,entry.tag,entry.synonym,entry.meaning or "") + context("\\%sentry{%s}{%s}{%s}{%s}",kind,d,entry.tag,entry.synonym,entry.meaning or "") + end + -- context[format("\\stop%ssection",kind)]() + end + -- context[format("\\stop%soutput",kind)]() + data.result = nil + data.metadata.sorted = false +end + +function synonyms.analyzed(class,options) + local data = synonyms.collected[class] + if data and data.entries then + options = options or { } + sorters.setlanguage(options.language) + synonyms.filter(data,options) -- filters entries to result + synonyms.prepare(data,options) -- adds split table parallel to list table + synonyms.sort(data,options) -- sorts entries in result + synonyms.finalize(data,options) -- do things with data.entries + data.metadata.sorted = true + end + return data and data.metadata.sorted and data.result and next(data.result) +end + +function synonyms.process(class,options) + if synonyms.analyzed(class,options) then + synonyms.flush(synonyms.collected[class],options) + end +end + diff --git a/tex/context/base/strc-tag.lua b/tex/context/base/strc-tag.lua index 7e5c6f993..bb083786b 100644 --- a/tex/context/base/strc-tag.lua +++ b/tex/context/base/strc-tag.lua @@ -1,354 +1,354 @@ -if not modules then modules = { } end modules ['strc-tag'] = { - version = 1.001, - comment = "companion to strc-tag.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This is rather experimental code. - -local insert, remove, unpack, concat = table.insert, table.remove, table.unpack, table.concat -local gsub, find, topattern, format = string.gsub, string.find, string.topattern, string.format -local lpegmatch = lpeg.match -local texattribute = tex.attribute -local allocate = utilities.storage.allocate -local settings_to_hash = utilities.parsers.settings_to_hash - -local trace_tags = false trackers.register("structures.tags", function(v) trace_tags = v end) - -local report_tags = logs.reporter("structure","tags") - -local attributes, structures = attributes, structures - -local a_tagged = attributes.private('tagged') - -local unsetvalue = attributes.unsetvalue -local codeinjections = backends.codeinjections - -local taglist = allocate() -local properties = allocate() -local labels = allocate() -local stack = { } -local chain = { } -local ids = { } -local enabled = false -local tagdata = { } -- used in export -local tagmetadata = { } -- used in export - -local tags = structures.tags -tags.taglist = taglist -- can best be hidden -tags.labels = labels -tags.data = tagdata -tags.metadata = tagmetadata - -local properties = allocate { - - document = { pdf = "Div", nature = "display" }, - - division = { pdf = "Div", nature = "display" }, - paragraph = { pdf = "P", nature = "mixed" }, - p = { pdf = "P", nature = "mixed" }, - construct = { pdf = "Span", nature = "inline" }, - highlight = { pdf = "Span", nature = "inline" }, - - section = { pdf = "Sect", nature = "display" }, - sectiontitle = { pdf = "H", nature = "mixed" }, - sectionnumber = { pdf = "H", nature = "mixed" }, - sectioncontent = { pdf = "Div", nature = "display" }, - - itemgroup = { pdf = "L", nature = "display" }, - item = { pdf = "Li", nature = "display" }, - itemtag = { pdf = "Lbl", nature = "mixed" }, - itemcontent = { pdf = "LBody", nature = "mixed" }, - - description = { pdf = "Div", nature = "display" }, - descriptiontag = { pdf = "Div", nature = "mixed" }, - descriptioncontent = { pdf = "Div", nature = "mixed" }, - descriptionsymbol = { pdf = "Span", nature = "inline" }, -- note reference - - verbatimblock = { pdf = "Code", nature = "display" }, - verbatimlines = { pdf = "Code", nature = "display" }, - verbatimline = { pdf = "Code", nature = "mixed" }, - verbatim = { pdf = "Code", nature = "inline" }, - - lines = { pdf = "Code", nature = "display" }, - line = { pdf = "Code", nature = "mixed" }, - - synonym = { pdf = "Span", nature = "inline" }, - sorting = { pdf = "Span", nature = "inline" }, - - register = { pdf = "Div", nature = "display" }, - registersection = { pdf = "Div", nature = "display" }, - registertag = { pdf = "Span", nature = "mixed" }, - registerentries = { pdf = "Div", nature = "display" }, - registerentry = { pdf = "Span", nature = "mixed" }, - registersee = { pdf = "Span", nature = "mixed" }, - registerpages = { pdf = "Span", nature = "mixed" }, - registerpage = { pdf = "Span", nature = "inline" }, - registerpagerange = { pdf = "Span", nature = "mixed" }, - - table = { pdf = "Table", nature = "display" }, - tablerow = { pdf = "TR", nature = "display" }, - tablecell = { pdf = "TD", nature = "mixed" }, - - tabulate = { pdf = "Table", nature = "display" }, - tabulaterow = { pdf = "TR", nature = "display" }, - tabulatecell = { pdf = "TD", nature = "mixed" }, - - list = { pdf = "TOC", nature = "display" }, - listitem = { pdf = "TOCI", nature = "display" }, - listtag = { pdf = "Lbl", nature = "mixed" }, - listcontent = { pdf = "P", nature = "mixed" }, - listdata = { pdf = "P", nature = "mixed" }, - listpage = { pdf = "Reference", nature = "mixed" }, - - delimitedblock = { pdf = "BlockQuote", nature = "display" }, - delimited = { pdf = "Quote", nature = "inline" }, - subsentence = { pdf = "Span", nature = "inline" }, - - label = { pdf = "Span", nature = "mixed" }, - number = { pdf = "Span", nature = "mixed" }, - - float = { pdf = "Div", nature = "display" }, -- Figure - floatcaption = { pdf = "Caption", nature = "mixed" }, - floatlabel = { pdf = "Span", nature = "inline" }, - floatnumber = { pdf = "Span", nature = "inline" }, - floattext = { pdf = "Span", nature = "mixed" }, - floatcontent = { pdf = "P", nature = "mixed" }, - - image = { pdf = "P", nature = "mixed" }, - mpgraphic = { pdf = "P", nature = "mixed" }, - - formulaset = { pdf = "Div", nature = "display" }, - formula = { pdf = "Div", nature = "display" }, -- Formula - formulacaption = { pdf = "Span", nature = "mixed" }, - formulalabel = { pdf = "Span", nature = "mixed" }, - formulanumber = { pdf = "Span", nature = "mixed" }, - formulacontent = { pdf = "P", nature = "display" }, - subformula = { pdf = "Div", nature = "display" }, - - link = { pdf = "Link", nature = "inline" }, - - margintextblock = { pdf = "Span", nature = "inline" }, - margintext = { pdf = "Span", nature = "inline" }, - - math = { pdf = "Div", nature = "inline" }, -- no display - mn = { pdf = "Span", nature = "mixed" }, - mi = { pdf = "Span", nature = "mixed" }, - mo = { pdf = "Span", nature = "mixed" }, - ms = { pdf = "Span", nature = "mixed" }, - mrow = { pdf = "Span", nature = "display" }, - msubsup = { pdf = "Span", nature = "display" }, - msub = { pdf = "Span", nature = "display" }, - msup = { pdf = "Span", nature = "display" }, - merror = { pdf = "Span", nature = "mixed" }, - munderover = { pdf = "Span", nature = "display" }, - munder = { pdf = "Span", nature = "display" }, - mover = { pdf = "Span", nature = "display" }, - mtext = { pdf = "Span", nature = "mixed" }, - mfrac = { pdf = "Span", nature = "display" }, - mroot = { pdf = "Span", nature = "display" }, - msqrt = { pdf = "Span", nature = "display" }, - mfenced = { pdf = "Span", nature = "display" }, - maction = { pdf = "Span", nature = "display" }, - - mtable = { pdf = "Table", nature = "display" }, -- might change - mtr = { pdf = "TR", nature = "display" }, -- might change - mtd = { pdf = "TD", nature = "display" }, -- might change - - ignore = { pdf = "Span", nature = "mixed" }, - metadata = { pdf = "Div", nature = "display" }, - metavariable = { pdf = "Span", nature = "mixed" }, - - mid = { pdf = "Span", nature = "inline" }, - sub = { pdf = "Span", nature = "inline" }, - sup = { pdf = "Span", nature = "inline" }, - subsup = { pdf = "Span", nature = "inline" }, - - combination = { pdf = "Span", nature = "display" }, - combinationpair = { pdf = "Span", nature = "display" }, - combinationcontent = { pdf = "Span", nature = "mixed" }, - combinationcaption = { pdf = "Span", nature = "mixed" }, -} - -function tags.detailedtag(tag,detail,attribute) - if not attribute then - attribute = texattribute[a_tagged] - end - if attribute >= 0 then - local tl = taglist[attribute] - if tl then - local pattern - if detail and detail ~= "" then - pattern = "^" .. tag .. ":".. detail .. "%-" - else - pattern = "^" .. tag .. "%-" - end - for i=#tl,1,-1 do - local tli = tl[i] - if find(tli,pattern) then - return tli - end - end - end - else - -- enabled but not auto - end - return false -- handy as bogus index -end - -tags.properties = properties - -local lasttags = { } -local userdata = { } - -tags.userdata = userdata - -function tags.setproperty(tag,key,value) - local p = properties[tag] - if p then - p[key] = value - else - properties[tag] = { [key] = value } - end -end - -function tags.registerdata(data) - local fulltag = chain[nstack] - if fulltag then - tagdata[fulltag] = data - end -end - -local metadata - -function tags.registermetadata(data) - local d = settings_to_hash(data) - if metadata then - table.merge(metadata,d) - else - metadata = d - end -end - -local nstack = 0 - -function tags.start(tag,specification) - local label, detail, user - if specification then - label, detail, user = specification.label, specification.detail, specification.userdata - end - if not enabled then - codeinjections.enabletags() - enabled = true - end - -- ---~ labels[tag] = label ~= "" and label or tag ---~ local fulltag ---~ if detail and detail ~= "" then ---~ fulltag = tag .. ":" .. detail ---~ else ---~ fulltag = tag ---~ end - -- - local fulltag = label ~= "" and label or tag - labels[tag] = fulltag - if detail and detail ~= "" then - fulltag = fulltag .. ":" .. detail - end - -- - local t = #taglist + 1 - local n = (ids[fulltag] or 0) + 1 - ids[fulltag] = n - lasttags[tag] = n - local completetag = fulltag .. "-" .. n - nstack = nstack + 1 - chain[nstack] = completetag - stack[nstack] = t - -- a copy as we can add key values for alt and actualtext if needed: - taglist[t] = { unpack(chain,1,nstack) } - -- - if user and user ~= "" then - -- maybe we should merge this into taglist or whatever ... anyway there is room to optimize - -- taglist.userdata = settings_to_hash(user) - userdata[completetag] = settings_to_hash(user) - end - if metadata then - tagmetadata[completetag] = metadata - metadata = nil - end - texattribute[a_tagged] = t - return t -end - -function tags.restart(completetag) - local t = #taglist + 1 - nstack = nstack + 1 - chain[nstack] = completetag - stack[nstack] = t - taglist[t] = { unpack(chain,1,nstack) } - texattribute[a_tagged] = t - return t -end - -function tags.stop() - if nstack > 0 then - nstack = nstack -1 - end - local t = stack[nstack] - if not t then - if trace_tags then - report_tags("ignoring end tag, previous chain: %s",nstack > 0 and concat(chain[nstack],"",1,nstack) or "none") - end - t = unsetvalue - end - texattribute[a_tagged] = t - return t -end - -function tags.getid(tag,detail) - if detail and detail ~= "" then - return ids[tag .. ":" .. detail] or "?" - else - return ids[tag] or "?" - end -end - -function tags.last(tag) - return lasttags[tag] -- or false -end - -function tags.lastinchain() - return chain[nstack] -end - -function structures.atlocation(str) - local location = gsub(concat(taglist[texattribute[a_tagged]],"-"),"%-%d+","") - return find(location,topattern(str)) ~= nil -end - -function tags.handler(head) -- we need a dummy - return head, false -end - -statistics.register("structure elements", function() - if enabled then - if nstack > 0 then - return format("%s element chains identified, open chain: %s ",#taglist,concat(chain," => ",1,nstack)) - else - return format("%s element chains identified",#taglist) - end - end -end) - -directives.register("backend.addtags", function(v) - if not enabled then - codeinjections.enabletags() - enabled = true - end -end) - -commands.starttag = tags.start -commands.stoptag = tags.stop -commands.settagproperty = tags.setproperty +if not modules then modules = { } end modules ['strc-tag'] = { + version = 1.001, + comment = "companion to strc-tag.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This is rather experimental code. + +local insert, remove, unpack, concat = table.insert, table.remove, table.unpack, table.concat +local gsub, find, topattern, format = string.gsub, string.find, string.topattern, string.format +local lpegmatch = lpeg.match +local texattribute = tex.attribute +local allocate = utilities.storage.allocate +local settings_to_hash = utilities.parsers.settings_to_hash + +local trace_tags = false trackers.register("structures.tags", function(v) trace_tags = v end) + +local report_tags = logs.reporter("structure","tags") + +local attributes, structures = attributes, structures + +local a_tagged = attributes.private('tagged') + +local unsetvalue = attributes.unsetvalue +local codeinjections = backends.codeinjections + +local taglist = allocate() +local properties = allocate() +local labels = allocate() +local stack = { } +local chain = { } +local ids = { } +local enabled = false +local tagdata = { } -- used in export +local tagmetadata = { } -- used in export + +local tags = structures.tags +tags.taglist = taglist -- can best be hidden +tags.labels = labels +tags.data = tagdata +tags.metadata = tagmetadata + +local properties = allocate { + + document = { pdf = "Div", nature = "display" }, + + division = { pdf = "Div", nature = "display" }, + paragraph = { pdf = "P", nature = "mixed" }, + p = { pdf = "P", nature = "mixed" }, + construct = { pdf = "Span", nature = "inline" }, + highlight = { pdf = "Span", nature = "inline" }, + + section = { pdf = "Sect", nature = "display" }, + sectiontitle = { pdf = "H", nature = "mixed" }, + sectionnumber = { pdf = "H", nature = "mixed" }, + sectioncontent = { pdf = "Div", nature = "display" }, + + itemgroup = { pdf = "L", nature = "display" }, + item = { pdf = "Li", nature = "display" }, + itemtag = { pdf = "Lbl", nature = "mixed" }, + itemcontent = { pdf = "LBody", nature = "mixed" }, + + description = { pdf = "Div", nature = "display" }, + descriptiontag = { pdf = "Div", nature = "mixed" }, + descriptioncontent = { pdf = "Div", nature = "mixed" }, + descriptionsymbol = { pdf = "Span", nature = "inline" }, -- note reference + + verbatimblock = { pdf = "Code", nature = "display" }, + verbatimlines = { pdf = "Code", nature = "display" }, + verbatimline = { pdf = "Code", nature = "mixed" }, + verbatim = { pdf = "Code", nature = "inline" }, + + lines = { pdf = "Code", nature = "display" }, + line = { pdf = "Code", nature = "mixed" }, + + synonym = { pdf = "Span", nature = "inline" }, + sorting = { pdf = "Span", nature = "inline" }, + + register = { pdf = "Div", nature = "display" }, + registersection = { pdf = "Div", nature = "display" }, + registertag = { pdf = "Span", nature = "mixed" }, + registerentries = { pdf = "Div", nature = "display" }, + registerentry = { pdf = "Span", nature = "mixed" }, + registersee = { pdf = "Span", nature = "mixed" }, + registerpages = { pdf = "Span", nature = "mixed" }, + registerpage = { pdf = "Span", nature = "inline" }, + registerpagerange = { pdf = "Span", nature = "mixed" }, + + table = { pdf = "Table", nature = "display" }, + tablerow = { pdf = "TR", nature = "display" }, + tablecell = { pdf = "TD", nature = "mixed" }, + + tabulate = { pdf = "Table", nature = "display" }, + tabulaterow = { pdf = "TR", nature = "display" }, + tabulatecell = { pdf = "TD", nature = "mixed" }, + + list = { pdf = "TOC", nature = "display" }, + listitem = { pdf = "TOCI", nature = "display" }, + listtag = { pdf = "Lbl", nature = "mixed" }, + listcontent = { pdf = "P", nature = "mixed" }, + listdata = { pdf = "P", nature = "mixed" }, + listpage = { pdf = "Reference", nature = "mixed" }, + + delimitedblock = { pdf = "BlockQuote", nature = "display" }, + delimited = { pdf = "Quote", nature = "inline" }, + subsentence = { pdf = "Span", nature = "inline" }, + + label = { pdf = "Span", nature = "mixed" }, + number = { pdf = "Span", nature = "mixed" }, + + float = { pdf = "Div", nature = "display" }, -- Figure + floatcaption = { pdf = "Caption", nature = "mixed" }, + floatlabel = { pdf = "Span", nature = "inline" }, + floatnumber = { pdf = "Span", nature = "inline" }, + floattext = { pdf = "Span", nature = "mixed" }, + floatcontent = { pdf = "P", nature = "mixed" }, + + image = { pdf = "P", nature = "mixed" }, + mpgraphic = { pdf = "P", nature = "mixed" }, + + formulaset = { pdf = "Div", nature = "display" }, + formula = { pdf = "Div", nature = "display" }, -- Formula + formulacaption = { pdf = "Span", nature = "mixed" }, + formulalabel = { pdf = "Span", nature = "mixed" }, + formulanumber = { pdf = "Span", nature = "mixed" }, + formulacontent = { pdf = "P", nature = "display" }, + subformula = { pdf = "Div", nature = "display" }, + + link = { pdf = "Link", nature = "inline" }, + + margintextblock = { pdf = "Span", nature = "inline" }, + margintext = { pdf = "Span", nature = "inline" }, + + math = { pdf = "Div", nature = "inline" }, -- no display + mn = { pdf = "Span", nature = "mixed" }, + mi = { pdf = "Span", nature = "mixed" }, + mo = { pdf = "Span", nature = "mixed" }, + ms = { pdf = "Span", nature = "mixed" }, + mrow = { pdf = "Span", nature = "display" }, + msubsup = { pdf = "Span", nature = "display" }, + msub = { pdf = "Span", nature = "display" }, + msup = { pdf = "Span", nature = "display" }, + merror = { pdf = "Span", nature = "mixed" }, + munderover = { pdf = "Span", nature = "display" }, + munder = { pdf = "Span", nature = "display" }, + mover = { pdf = "Span", nature = "display" }, + mtext = { pdf = "Span", nature = "mixed" }, + mfrac = { pdf = "Span", nature = "display" }, + mroot = { pdf = "Span", nature = "display" }, + msqrt = { pdf = "Span", nature = "display" }, + mfenced = { pdf = "Span", nature = "display" }, + maction = { pdf = "Span", nature = "display" }, + + mtable = { pdf = "Table", nature = "display" }, -- might change + mtr = { pdf = "TR", nature = "display" }, -- might change + mtd = { pdf = "TD", nature = "display" }, -- might change + + ignore = { pdf = "Span", nature = "mixed" }, + metadata = { pdf = "Div", nature = "display" }, + metavariable = { pdf = "Span", nature = "mixed" }, + + mid = { pdf = "Span", nature = "inline" }, + sub = { pdf = "Span", nature = "inline" }, + sup = { pdf = "Span", nature = "inline" }, + subsup = { pdf = "Span", nature = "inline" }, + + combination = { pdf = "Span", nature = "display" }, + combinationpair = { pdf = "Span", nature = "display" }, + combinationcontent = { pdf = "Span", nature = "mixed" }, + combinationcaption = { pdf = "Span", nature = "mixed" }, +} + +function tags.detailedtag(tag,detail,attribute) + if not attribute then + attribute = texattribute[a_tagged] + end + if attribute >= 0 then + local tl = taglist[attribute] + if tl then + local pattern + if detail and detail ~= "" then + pattern = "^" .. tag .. ":".. detail .. "%-" + else + pattern = "^" .. tag .. "%-" + end + for i=#tl,1,-1 do + local tli = tl[i] + if find(tli,pattern) then + return tli + end + end + end + else + -- enabled but not auto + end + return false -- handy as bogus index +end + +tags.properties = properties + +local lasttags = { } +local userdata = { } + +tags.userdata = userdata + +function tags.setproperty(tag,key,value) + local p = properties[tag] + if p then + p[key] = value + else + properties[tag] = { [key] = value } + end +end + +function tags.registerdata(data) + local fulltag = chain[nstack] + if fulltag then + tagdata[fulltag] = data + end +end + +local metadata + +function tags.registermetadata(data) + local d = settings_to_hash(data) + if metadata then + table.merge(metadata,d) + else + metadata = d + end +end + +local nstack = 0 + +function tags.start(tag,specification) + local label, detail, user + if specification then + label, detail, user = specification.label, specification.detail, specification.userdata + end + if not enabled then + codeinjections.enabletags() + enabled = true + end + -- +--~ labels[tag] = label ~= "" and label or tag +--~ local fulltag +--~ if detail and detail ~= "" then +--~ fulltag = tag .. ":" .. detail +--~ else +--~ fulltag = tag +--~ end + -- + local fulltag = label ~= "" and label or tag + labels[tag] = fulltag + if detail and detail ~= "" then + fulltag = fulltag .. ":" .. detail + end + -- + local t = #taglist + 1 + local n = (ids[fulltag] or 0) + 1 + ids[fulltag] = n + lasttags[tag] = n + local completetag = fulltag .. "-" .. n + nstack = nstack + 1 + chain[nstack] = completetag + stack[nstack] = t + -- a copy as we can add key values for alt and actualtext if needed: + taglist[t] = { unpack(chain,1,nstack) } + -- + if user and user ~= "" then + -- maybe we should merge this into taglist or whatever ... anyway there is room to optimize + -- taglist.userdata = settings_to_hash(user) + userdata[completetag] = settings_to_hash(user) + end + if metadata then + tagmetadata[completetag] = metadata + metadata = nil + end + texattribute[a_tagged] = t + return t +end + +function tags.restart(completetag) + local t = #taglist + 1 + nstack = nstack + 1 + chain[nstack] = completetag + stack[nstack] = t + taglist[t] = { unpack(chain,1,nstack) } + texattribute[a_tagged] = t + return t +end + +function tags.stop() + if nstack > 0 then + nstack = nstack -1 + end + local t = stack[nstack] + if not t then + if trace_tags then + report_tags("ignoring end tag, previous chain: %s",nstack > 0 and concat(chain[nstack],"",1,nstack) or "none") + end + t = unsetvalue + end + texattribute[a_tagged] = t + return t +end + +function tags.getid(tag,detail) + if detail and detail ~= "" then + return ids[tag .. ":" .. detail] or "?" + else + return ids[tag] or "?" + end +end + +function tags.last(tag) + return lasttags[tag] -- or false +end + +function tags.lastinchain() + return chain[nstack] +end + +function structures.atlocation(str) + local location = gsub(concat(taglist[texattribute[a_tagged]],"-"),"%-%d+","") + return find(location,topattern(str)) ~= nil +end + +function tags.handler(head) -- we need a dummy + return head, false +end + +statistics.register("structure elements", function() + if enabled then + if nstack > 0 then + return format("%s element chains identified, open chain: %s ",#taglist,concat(chain," => ",1,nstack)) + else + return format("%s element chains identified",#taglist) + end + end +end) + +directives.register("backend.addtags", function(v) + if not enabled then + codeinjections.enabletags() + enabled = true + end +end) + +commands.starttag = tags.start +commands.stoptag = tags.stop +commands.settagproperty = tags.setproperty diff --git a/tex/context/base/supp-box.lua b/tex/context/base/supp-box.lua index c7382834a..f564723ed 100644 --- a/tex/context/base/supp-box.lua +++ b/tex/context/base/supp-box.lua @@ -1,112 +1,112 @@ -if not modules then modules = { } end modules ['supp-box'] = { - version = 1.001, - comment = "companion to supp-box.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this is preliminary code - -local report_hyphenation = logs.reporter("languages","hyphenation") - -local tex, node = tex, node -local context, commands, nodes = context, commands, nodes - -local nodecodes = nodes.nodecodes - -local disc_code = nodecodes.disc -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local glue_code = nodecodes.glue -local glyph_code = nodecodes.glyph - -local new_penalty = nodes.pool.penalty - -local free_node = node.free -local copynodelist = node.copy_list -local copynode = node.copy -local texbox = tex.box - -local function hyphenatedlist(list) - while list do - local id, next, prev = list.id, list.next, list.prev - if id == disc_code then - local hyphen = list.pre - if hyphen then - local penalty = new_penalty(-500) - hyphen.next, penalty.prev = penalty, hyphen - prev.next, next.prev = hyphen, penalty - penalty.next, hyphen.prev = next, prev - list.pre = nil - free_node(list) - end - elseif id == vlist_code or id == hlist_code then - hyphenatedlist(list.list) - end - list = next - end -end - -commands.hyphenatedlist = hyphenatedlist - -function commands.showhyphenatedinlist(list) - report_hyphenation("show: %s",nodes.listtoutf(list,false,true)) -end - -local function checkedlist(list) - if type(list) == "number" then - return texbox[list].list - else - return list - end -end - -local function applytochars(list,what,nested) - local doaction = context[what or "ruledhbox"] - local noaction = context - local current = checkedlist(list) - while current do - local id = current.id - if nested and (id == hlist_code or id == vlist_code) then - context.beginhbox() - applytochars(current.list,what,nested) - context.endhbox() - elseif id ~= glyph_code then - noaction(copynode(current)) - else - doaction(copynode(current)) - end - current = current.next - end -end - -local function applytowords(list,what,nested) - local doaction = context[what or "ruledhbox"] - local noaction = context - local current = checkedlist(list) - local start - while current do - local id = current.id - if id == glue_code then - if start then - doaction(copynodelist(start,current)) - start = nil - end - noaction(copynode(current)) - elseif nested and (id == hlist_code or id == vlist_code) then - context.beginhbox() - applytowords(current.list,what,nested) - context.egroup() - elseif not start then - start = current - end - current = current.next - end - if start then - doaction(copynodelist(start)) - end -end - -commands.applytochars = applytochars -commands.applytowords = applytowords +if not modules then modules = { } end modules ['supp-box'] = { + version = 1.001, + comment = "companion to supp-box.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this is preliminary code + +local report_hyphenation = logs.reporter("languages","hyphenation") + +local tex, node = tex, node +local context, commands, nodes = context, commands, nodes + +local nodecodes = nodes.nodecodes + +local disc_code = nodecodes.disc +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local glue_code = nodecodes.glue +local glyph_code = nodecodes.glyph + +local new_penalty = nodes.pool.penalty + +local free_node = node.free +local copynodelist = node.copy_list +local copynode = node.copy +local texbox = tex.box + +local function hyphenatedlist(list) + while list do + local id, next, prev = list.id, list.next, list.prev + if id == disc_code then + local hyphen = list.pre + if hyphen then + local penalty = new_penalty(-500) + hyphen.next, penalty.prev = penalty, hyphen + prev.next, next.prev = hyphen, penalty + penalty.next, hyphen.prev = next, prev + list.pre = nil + free_node(list) + end + elseif id == vlist_code or id == hlist_code then + hyphenatedlist(list.list) + end + list = next + end +end + +commands.hyphenatedlist = hyphenatedlist + +function commands.showhyphenatedinlist(list) + report_hyphenation("show: %s",nodes.listtoutf(list,false,true)) +end + +local function checkedlist(list) + if type(list) == "number" then + return texbox[list].list + else + return list + end +end + +local function applytochars(list,what,nested) + local doaction = context[what or "ruledhbox"] + local noaction = context + local current = checkedlist(list) + while current do + local id = current.id + if nested and (id == hlist_code or id == vlist_code) then + context.beginhbox() + applytochars(current.list,what,nested) + context.endhbox() + elseif id ~= glyph_code then + noaction(copynode(current)) + else + doaction(copynode(current)) + end + current = current.next + end +end + +local function applytowords(list,what,nested) + local doaction = context[what or "ruledhbox"] + local noaction = context + local current = checkedlist(list) + local start + while current do + local id = current.id + if id == glue_code then + if start then + doaction(copynodelist(start,current)) + start = nil + end + noaction(copynode(current)) + elseif nested and (id == hlist_code or id == vlist_code) then + context.beginhbox() + applytowords(current.list,what,nested) + context.egroup() + elseif not start then + start = current + end + current = current.next + end + if start then + doaction(copynodelist(start)) + end +end + +commands.applytochars = applytochars +commands.applytowords = applytowords diff --git a/tex/context/base/supp-ran.lua b/tex/context/base/supp-ran.lua index 7997db8f6..57f041c69 100644 --- a/tex/context/base/supp-ran.lua +++ b/tex/context/base/supp-ran.lua @@ -1,73 +1,73 @@ -if not modules then modules = { } end modules ['supp-ran'] = { - version = 1.001, - comment = "companion to supp-ran.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- We cannot ask for the current seed, so we need some messy hack here. - -local report_system = logs.reporter("system","randomizer") - -local math = math -local context, commands = context, commands - -local random, randomseed, round, seed, last = math.random, math.randomseed, math.round, false, 1 - -local maxcount = 2^30-1 -- 1073741823 - -local function setrandomseedi(n,comment) - if not n then - -- n = 0.5 -- hack - end - if n <= 1 then - n = n * maxcount - end - n = round(n) - if false then - report_system("setting seed to %s (%s)",n,comment or "normal") - end - randomseed(n) - last = random(0,maxcount) -- we need an initial value -end - -math.setrandomseedi = setrandomseedi - -function commands.getrandomcounta(min,max) - last = random(min,max) - context(last) -end - -function commands.getrandomcountb(min,max) - last = random(min,max)/65536 - context(last) -end - -function commands.setrandomseed(n) - last = n - setrandomseedi(n) -end - -function commands.getrandomseed(n) - context(last) -end - --- maybe stack - -function commands.freezerandomseed(n) - if seed == false or seed == nil then - seed = last - setrandomseedi(seed,"freeze",seed) - end - if n then - randomseed(n) - end -end - -function commands.defrostrandomseed() - if seed ~= false then - setrandomseedi(seed,"defrost",seed) -- was last (bug) - seed = false - end -end +if not modules then modules = { } end modules ['supp-ran'] = { + version = 1.001, + comment = "companion to supp-ran.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- We cannot ask for the current seed, so we need some messy hack here. + +local report_system = logs.reporter("system","randomizer") + +local math = math +local context, commands = context, commands + +local random, randomseed, round, seed, last = math.random, math.randomseed, math.round, false, 1 + +local maxcount = 2^30-1 -- 1073741823 + +local function setrandomseedi(n,comment) + if not n then + -- n = 0.5 -- hack + end + if n <= 1 then + n = n * maxcount + end + n = round(n) + if false then + report_system("setting seed to %s (%s)",n,comment or "normal") + end + randomseed(n) + last = random(0,maxcount) -- we need an initial value +end + +math.setrandomseedi = setrandomseedi + +function commands.getrandomcounta(min,max) + last = random(min,max) + context(last) +end + +function commands.getrandomcountb(min,max) + last = random(min,max)/65536 + context(last) +end + +function commands.setrandomseed(n) + last = n + setrandomseedi(n) +end + +function commands.getrandomseed(n) + context(last) +end + +-- maybe stack + +function commands.freezerandomseed(n) + if seed == false or seed == nil then + seed = last + setrandomseedi(seed,"freeze",seed) + end + if n then + randomseed(n) + end +end + +function commands.defrostrandomseed() + if seed ~= false then + setrandomseedi(seed,"defrost",seed) -- was last (bug) + seed = false + end +end diff --git a/tex/context/base/symb-ini.lua b/tex/context/base/symb-ini.lua index deeef667a..1be423b92 100644 --- a/tex/context/base/symb-ini.lua +++ b/tex/context/base/symb-ini.lua @@ -1,50 +1,50 @@ -if not modules then modules = { } end modules ['symb-ini'] = { - version = 1.001, - comment = "companion to symb-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - - -local variables = interfaces.variables - -fonts = fonts or { } -- brrrr - -local symbols = fonts.symbols or { } -fonts.symbols = symbols - -local report_symbols = logs.reporter ("fonts","symbols") -local status_symbols = logs.messenger("fonts","symbols") - -local patterns = { "symb-imp-%s.mkiv", "symb-imp-%s.tex", "symb-%s.mkiv", "symb-%s.tex" } -local listitem = utilities.parsers.listitem - -local function action(name,foundname) - -- context.startnointerference() - context.startreadingfile() - context.input(foundname) - status_symbols("library %a loaded",name) - context.stopreadingfile() - -- context.stopnointerference() -end - -local function failure(name) - report_symbols("library %a is unknown",name) -end - -function symbols.uselibrary(name) - if name ~= variables.reset then - for name in listitem(name) do - commands.uselibrary { - name = name, - patterns = patterns, - action = action, - failure = failure, - onlyonce = true, - } - end - end -end - -commands.usesymbols = symbols.uselibrary +if not modules then modules = { } end modules ['symb-ini'] = { + version = 1.001, + comment = "companion to symb-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + + +local variables = interfaces.variables + +fonts = fonts or { } -- brrrr + +local symbols = fonts.symbols or { } +fonts.symbols = symbols + +local report_symbols = logs.reporter ("fonts","symbols") +local status_symbols = logs.messenger("fonts","symbols") + +local patterns = { "symb-imp-%s.mkiv", "symb-imp-%s.tex", "symb-%s.mkiv", "symb-%s.tex" } +local listitem = utilities.parsers.listitem + +local function action(name,foundname) + -- context.startnointerference() + context.startreadingfile() + context.input(foundname) + status_symbols("library %a loaded",name) + context.stopreadingfile() + -- context.stopnointerference() +end + +local function failure(name) + report_symbols("library %a is unknown",name) +end + +function symbols.uselibrary(name) + if name ~= variables.reset then + for name in listitem(name) do + commands.uselibrary { + name = name, + patterns = patterns, + action = action, + failure = failure, + onlyonce = true, + } + end + end +end + +commands.usesymbols = symbols.uselibrary diff --git a/tex/context/base/syst-aux.lua b/tex/context/base/syst-aux.lua index b0fb8483b..d7250d239 100644 --- a/tex/context/base/syst-aux.lua +++ b/tex/context/base/syst-aux.lua @@ -1,80 +1,80 @@ -if not modules then modules = { } end modules ['syst-aux'] = { - version = 1.001, - comment = "companion to syst-aux.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- slower than lpeg: --- --- utfmatch(str,"(.?)(.*)$") --- utf.sub(str,1,1) - -local commands, context = commands, context - -local settings_to_array = utilities.parsers.settings_to_array -local format = string.format -local utfsub = utf.sub -local P, C, Carg, lpegmatch, utf8char = lpeg.P, lpeg.C, lpeg.Carg, lpeg.match, lpeg.patterns.utf8char - -local setvalue = context.setvalue - -local pattern = C(utf8char^-1) * C(P(1)^0) - -function commands.getfirstcharacter(str) - local first, rest = lpegmatch(pattern,str) - setvalue("firstcharacter",first) - setvalue("remainingcharacters",rest) -end - -local pattern = C(utf8char^-1) - -function commands.doiffirstcharelse(chr,str) - commands.doifelse(lpegmatch(pattern,str) == chr) -end - -function commands.getsubstring(str,first,last) - context(utfsub(str,tonumber(first),tonumber(last))) -end - --- function commands.addtocommalist(list,item) --- if list == "" then --- context(item) --- else --- context("%s,%s",list,item) -- using tex.print is some 10% faster --- end --- end --- --- function commands.removefromcommalist(list,item) --- if list == "" then --- context(item) --- else --- -- okay, using a proper lpeg is probably faster --- -- we could also check for #l = 1 --- local l = settings_to_array(list) --- local t, n = { } --- for i=1,#l do --- if l[i] ~= item then --- n = n + 1 --- t[n] = item --- end --- end --- if n == 0 then --- context(item) --- else --- context(concat(list,",")) --- end --- end --- end - -local pattern = (C((1-P("%"))^1) * Carg(1)) /function(n,d) return format("%.0fsp",d * tonumber(n)/100) end * P("%") * P(-1) - --- commands.percentageof("10%",65536*10) - -function commands.percentageof(str,dim) - context(lpegmatch(pattern,str,1,dim) or str) -end - --- \gdef\setpercentdimen#1#2% --- {#1=\ctxcommand{percentageof("#2",\number#1)}\relax} +if not modules then modules = { } end modules ['syst-aux'] = { + version = 1.001, + comment = "companion to syst-aux.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- slower than lpeg: +-- +-- utfmatch(str,"(.?)(.*)$") +-- utf.sub(str,1,1) + +local commands, context = commands, context + +local settings_to_array = utilities.parsers.settings_to_array +local format = string.format +local utfsub = utf.sub +local P, C, Carg, lpegmatch, utf8char = lpeg.P, lpeg.C, lpeg.Carg, lpeg.match, lpeg.patterns.utf8char + +local setvalue = context.setvalue + +local pattern = C(utf8char^-1) * C(P(1)^0) + +function commands.getfirstcharacter(str) + local first, rest = lpegmatch(pattern,str) + setvalue("firstcharacter",first) + setvalue("remainingcharacters",rest) +end + +local pattern = C(utf8char^-1) + +function commands.doiffirstcharelse(chr,str) + commands.doifelse(lpegmatch(pattern,str) == chr) +end + +function commands.getsubstring(str,first,last) + context(utfsub(str,tonumber(first),tonumber(last))) +end + +-- function commands.addtocommalist(list,item) +-- if list == "" then +-- context(item) +-- else +-- context("%s,%s",list,item) -- using tex.print is some 10% faster +-- end +-- end +-- +-- function commands.removefromcommalist(list,item) +-- if list == "" then +-- context(item) +-- else +-- -- okay, using a proper lpeg is probably faster +-- -- we could also check for #l = 1 +-- local l = settings_to_array(list) +-- local t, n = { } +-- for i=1,#l do +-- if l[i] ~= item then +-- n = n + 1 +-- t[n] = item +-- end +-- end +-- if n == 0 then +-- context(item) +-- else +-- context(concat(list,",")) +-- end +-- end +-- end + +local pattern = (C((1-P("%"))^1) * Carg(1)) /function(n,d) return format("%.0fsp",d * tonumber(n)/100) end * P("%") * P(-1) + +-- commands.percentageof("10%",65536*10) + +function commands.percentageof(str,dim) + context(lpegmatch(pattern,str,1,dim) or str) +end + +-- \gdef\setpercentdimen#1#2% +-- {#1=\ctxcommand{percentageof("#2",\number#1)}\relax} diff --git a/tex/context/base/syst-con.lua b/tex/context/base/syst-con.lua index 48f02da3a..103aea2d4 100644 --- a/tex/context/base/syst-con.lua +++ b/tex/context/base/syst-con.lua @@ -1,62 +1,62 @@ -if not modules then modules = { } end modules ['syst-con'] = { - version = 1.001, - comment = "companion to syst-con.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -converters = converters or { } - ---[[ldx-- -

    For raw 8 bit characters, the offset is 0x110000 (bottom of plane 18) at -the top of 's char range but outside the unicode range.

    ---ldx]]-- - -local tonumber = tonumber -local utfchar = utf.char -local gsub, format = string.gsub, string.format - -function converters.hexstringtonumber(n) tonumber(n,16) end -function converters.octstringtonumber(n) tonumber(n, 8) end -function converters.rawcharacter (n) utfchar(0x110000+n) end -function converters.lchexnumber (n) format("%x" ,n) end -function converters.uchexnumber (n) format("%X" ,n) end -function converters.lchexnumbers (n) format("%02x",n) end -function converters.uchexnumbers (n) format("%02X",n) end -function converters.octnumber (n) format("%03o",n) end - -function commands.hexstringtonumber(n) context(tonumber(n,16)) end -function commands.octstringtonumber(n) context(tonumber(n, 8)) end -function commands.rawcharacter (n) context(utfchar(0x110000+n)) end -function commands.lchexnumber (n) context("%x" ,n) end -function commands.uchexnumber (n) context("%X" ,n) end -function commands.lchexnumbers (n) context("%02x",n) end -function commands.uchexnumbers (n) context("%02X",n) end -function commands.octnumber (n) context("%03o",n) end - -function commands.format(fmt,...) -- used ? - fmt = gsub(fmt,"@","%%") - context(fmt,...) -end - -local cosd, sind, tand = math.cosd, math.sind, math.tand -local cos, sin, tan = math.cos, math.sin, math.tan - --- unfortunately %s spits out: 6.1230317691119e-017 --- --- function commands.sind(n) context(sind(n)) end --- function commands.cosd(n) context(cosd(n)) end --- function commands.tand(n) context(tand(n)) end --- --- function commands.sin (n) context(sin (n)) end --- function commands.cos (n) context(cos (n)) end --- function commands.tan (n) context(tan (n)) end - -function commands.sind(n) context("%0.6f",sind(n)) end -function commands.cosd(n) context("%0.6f",cosd(n)) end -function commands.tand(n) context("%0.6f",tand(n)) end - -function commands.sin (n) context("%0.6f",sin (n)) end -function commands.cos (n) context("%0.6f",cos (n)) end -function commands.tan (n) context("%0.6f",tan (n)) end +if not modules then modules = { } end modules ['syst-con'] = { + version = 1.001, + comment = "companion to syst-con.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +converters = converters or { } + +--[[ldx-- +

    For raw 8 bit characters, the offset is 0x110000 (bottom of plane 18) at +the top of 's char range but outside the unicode range.

    +--ldx]]-- + +local tonumber = tonumber +local utfchar = utf.char +local gsub, format = string.gsub, string.format + +function converters.hexstringtonumber(n) tonumber(n,16) end +function converters.octstringtonumber(n) tonumber(n, 8) end +function converters.rawcharacter (n) utfchar(0x110000+n) end +function converters.lchexnumber (n) format("%x" ,n) end +function converters.uchexnumber (n) format("%X" ,n) end +function converters.lchexnumbers (n) format("%02x",n) end +function converters.uchexnumbers (n) format("%02X",n) end +function converters.octnumber (n) format("%03o",n) end + +function commands.hexstringtonumber(n) context(tonumber(n,16)) end +function commands.octstringtonumber(n) context(tonumber(n, 8)) end +function commands.rawcharacter (n) context(utfchar(0x110000+n)) end +function commands.lchexnumber (n) context("%x" ,n) end +function commands.uchexnumber (n) context("%X" ,n) end +function commands.lchexnumbers (n) context("%02x",n) end +function commands.uchexnumbers (n) context("%02X",n) end +function commands.octnumber (n) context("%03o",n) end + +function commands.format(fmt,...) -- used ? + fmt = gsub(fmt,"@","%%") + context(fmt,...) +end + +local cosd, sind, tand = math.cosd, math.sind, math.tand +local cos, sin, tan = math.cos, math.sin, math.tan + +-- unfortunately %s spits out: 6.1230317691119e-017 +-- +-- function commands.sind(n) context(sind(n)) end +-- function commands.cosd(n) context(cosd(n)) end +-- function commands.tand(n) context(tand(n)) end +-- +-- function commands.sin (n) context(sin (n)) end +-- function commands.cos (n) context(cos (n)) end +-- function commands.tan (n) context(tan (n)) end + +function commands.sind(n) context("%0.6f",sind(n)) end +function commands.cosd(n) context("%0.6f",cosd(n)) end +function commands.tand(n) context("%0.6f",tand(n)) end + +function commands.sin (n) context("%0.6f",sin (n)) end +function commands.cos (n) context("%0.6f",cos (n)) end +function commands.tan (n) context("%0.6f",tan (n)) end diff --git a/tex/context/base/syst-lua.lua b/tex/context/base/syst-lua.lua index ef524c339..4795efe68 100644 --- a/tex/context/base/syst-lua.lua +++ b/tex/context/base/syst-lua.lua @@ -1,123 +1,123 @@ -if not modules then modules = { } end modules ['syst-lua'] = { - version = 1.001, - comment = "companion to syst-lua.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format, find, match, rep = string.format, string.find, string.match, string.rep -local tonumber = tonumber -local S, lpegmatch, lpegtsplitat = lpeg.S, lpeg.match, lpeg.tsplitat - -local context = context - -commands = commands or { } - -function commands.writestatus(...) logs.status(...) end -- overloaded later - -local firstoftwoarguments = context.firstoftwoarguments -- context.constructcsonly("firstoftwoarguments" ) -local secondoftwoarguments = context.secondoftwoarguments -- context.constructcsonly("secondoftwoarguments") -local firstofoneargument = context.firstofoneargument -- context.constructcsonly("firstofoneargument" ) -local gobbleoneargument = context.gobbleoneargument -- context.constructcsonly("gobbleoneargument" ) - --- contextsprint(prtcatcodes,[[\ui_fo]]) -- firstofonearguments --- contextsprint(prtcatcodes,[[\ui_go]]) -- gobbleonearguments --- contextsprint(prtcatcodes,[[\ui_ft]]) -- firstoftwoarguments --- contextsprint(prtcatcodes,[[\ui_st]]) -- secondoftwoarguments - -function commands.doifelse(b) - if b then - firstoftwoarguments() - else - secondoftwoarguments() - end -end - -function commands.doif(b) - if b then - firstofoneargument() - else - gobbleoneargument() - end -end - -function commands.doifnot(b) - if b then - gobbleoneargument() - else - firstofoneargument() - end -end - -commands.testcase = commands.doifelse -- obsolete - -function commands.boolcase(b) - context(b and 1 or 0) -end - -function commands.doifelsespaces(str) - if find(str,"^ +$") then - firstoftwoarguments() - else - secondoftwoarguments() - end -end - -local s = lpegtsplitat(",") -local h = { } - -function commands.doifcommonelse(a,b) -- often the same test - local ha = h[a] - local hb = h[b] - if not ha then - ha = lpegmatch(s,a) - h[a] = ha - end - if not hb then - hb = lpegmatch(s,b) - h[b] = hb - end - local na = #ha - local nb = #hb - for i=1,na do - for j=1,nb do - if ha[i] == hb[j] then - firstoftwoarguments() - return - end - end - end - secondoftwoarguments() -end - -function commands.doifinsetelse(a,b) - local hb = h[b] - if not hb then hb = lpegmatch(s,b) h[b] = hb end - for i=1,#hb do - if a == hb[i] then - firstoftwoarguments() - return - end - end - secondoftwoarguments() -end - -local pattern = lpeg.patterns.validdimen - -function commands.doifdimenstringelse(str) - if lpegmatch(pattern,str) then - firstoftwoarguments() - else - secondoftwoarguments() - end -end - -function commands.firstinset(str) - local first = match(str,"^([^,]+),") - context(first or str) -end - -function commands.ntimes(str,n) - context(rep(str,n or 1)) -end +if not modules then modules = { } end modules ['syst-lua'] = { + version = 1.001, + comment = "companion to syst-lua.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, find, match, rep = string.format, string.find, string.match, string.rep +local tonumber = tonumber +local S, lpegmatch, lpegtsplitat = lpeg.S, lpeg.match, lpeg.tsplitat + +local context = context + +commands = commands or { } + +function commands.writestatus(...) logs.status(...) end -- overloaded later + +local firstoftwoarguments = context.firstoftwoarguments -- context.constructcsonly("firstoftwoarguments" ) +local secondoftwoarguments = context.secondoftwoarguments -- context.constructcsonly("secondoftwoarguments") +local firstofoneargument = context.firstofoneargument -- context.constructcsonly("firstofoneargument" ) +local gobbleoneargument = context.gobbleoneargument -- context.constructcsonly("gobbleoneargument" ) + +-- contextsprint(prtcatcodes,[[\ui_fo]]) -- firstofonearguments +-- contextsprint(prtcatcodes,[[\ui_go]]) -- gobbleonearguments +-- contextsprint(prtcatcodes,[[\ui_ft]]) -- firstoftwoarguments +-- contextsprint(prtcatcodes,[[\ui_st]]) -- secondoftwoarguments + +function commands.doifelse(b) + if b then + firstoftwoarguments() + else + secondoftwoarguments() + end +end + +function commands.doif(b) + if b then + firstofoneargument() + else + gobbleoneargument() + end +end + +function commands.doifnot(b) + if b then + gobbleoneargument() + else + firstofoneargument() + end +end + +commands.testcase = commands.doifelse -- obsolete + +function commands.boolcase(b) + context(b and 1 or 0) +end + +function commands.doifelsespaces(str) + if find(str,"^ +$") then + firstoftwoarguments() + else + secondoftwoarguments() + end +end + +local s = lpegtsplitat(",") +local h = { } + +function commands.doifcommonelse(a,b) -- often the same test + local ha = h[a] + local hb = h[b] + if not ha then + ha = lpegmatch(s,a) + h[a] = ha + end + if not hb then + hb = lpegmatch(s,b) + h[b] = hb + end + local na = #ha + local nb = #hb + for i=1,na do + for j=1,nb do + if ha[i] == hb[j] then + firstoftwoarguments() + return + end + end + end + secondoftwoarguments() +end + +function commands.doifinsetelse(a,b) + local hb = h[b] + if not hb then hb = lpegmatch(s,b) h[b] = hb end + for i=1,#hb do + if a == hb[i] then + firstoftwoarguments() + return + end + end + secondoftwoarguments() +end + +local pattern = lpeg.patterns.validdimen + +function commands.doifdimenstringelse(str) + if lpegmatch(pattern,str) then + firstoftwoarguments() + else + secondoftwoarguments() + end +end + +function commands.firstinset(str) + local first = match(str,"^([^,]+),") + context(first or str) +end + +function commands.ntimes(str,n) + context(rep(str,n or 1)) +end diff --git a/tex/context/base/tabl-tbl.lua b/tex/context/base/tabl-tbl.lua index 19548e7b3..224b2fb99 100644 --- a/tex/context/base/tabl-tbl.lua +++ b/tex/context/base/tabl-tbl.lua @@ -1,41 +1,41 @@ -if not modules then modules = { } end modules ['tabl-tbl'] = { - version = 1.001, - comment = "companion to tabl-tbl.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- A couple of hacks ... easier to do in Lua than in regular TeX. More will --- follow. - -local context, commands = context, commands - -local tonumber = tonumber -local gsub, rep, sub, find = string.gsub, string.rep, string.sub, string.find -local P, C, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Ct, lpeg.match - -local settexcount = tex.setcount - -local separator = P("|") -local nested = lpeg.patterns.nested -local pattern = Ct((separator * (C(nested) + Cc("")) * C((1-separator)^0))^0) - -function commands.presettabulate(preamble) - preamble = gsub(preamble,"~","d") -- let's get rid of ~ mess here - if find(preamble,"%*") then - -- todo: lpeg but not now - preamble = gsub(preamble, "%*(%b{})(%b{})", function(n,p) - return rep(sub(p,2,-2),tonumber(sub(n,2,-2)) or 1) - end) - end - local t = lpegmatch(pattern,preamble) - local m = #t - 2 - settexcount("global","c_tabl_tabulate_nofcolumns", m/2) - settexcount("global","c_tabl_tabulate_has_rule_spec_first", t[1] == "" and 0 or 1) - settexcount("global","c_tabl_tabulate_has_rule_spec_last", t[m+1] == "" and 0 or 1) - for i=1,m,2 do - context.settabulateentry(t[i],t[i+1]) - end - context.settabulatelastentry(t[m+1]) -end +if not modules then modules = { } end modules ['tabl-tbl'] = { + version = 1.001, + comment = "companion to tabl-tbl.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- A couple of hacks ... easier to do in Lua than in regular TeX. More will +-- follow. + +local context, commands = context, commands + +local tonumber = tonumber +local gsub, rep, sub, find = string.gsub, string.rep, string.sub, string.find +local P, C, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Ct, lpeg.match + +local settexcount = tex.setcount + +local separator = P("|") +local nested = lpeg.patterns.nested +local pattern = Ct((separator * (C(nested) + Cc("")) * C((1-separator)^0))^0) + +function commands.presettabulate(preamble) + preamble = gsub(preamble,"~","d") -- let's get rid of ~ mess here + if find(preamble,"%*") then + -- todo: lpeg but not now + preamble = gsub(preamble, "%*(%b{})(%b{})", function(n,p) + return rep(sub(p,2,-2),tonumber(sub(n,2,-2)) or 1) + end) + end + local t = lpegmatch(pattern,preamble) + local m = #t - 2 + settexcount("global","c_tabl_tabulate_nofcolumns", m/2) + settexcount("global","c_tabl_tabulate_has_rule_spec_first", t[1] == "" and 0 or 1) + settexcount("global","c_tabl_tabulate_has_rule_spec_last", t[m+1] == "" and 0 or 1) + for i=1,m,2 do + context.settabulateentry(t[i],t[i+1]) + end + context.settabulatelastentry(t[m+1]) +end diff --git a/tex/context/base/tabl-xtb.lua b/tex/context/base/tabl-xtb.lua index 3ffe8a219..5b47bf705 100644 --- a/tex/context/base/tabl-xtb.lua +++ b/tex/context/base/tabl-xtb.lua @@ -1,988 +1,988 @@ -if not modules then modules = { } end modules ['tabl-xtb'] = { - version = 1.001, - comment = "companion to tabl-xtb.mkvi", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ - -This table mechanism is a combination between TeX and Lua. We do process -cells at the TeX end and inspect them at the Lua end. After some analysis -we have a second pass using the calculated widths, and if needed cells -will go through a third pass to get the heights right. This last pass is -avoided when possible which is why some code below looks a bit more -complex than needed. The reason for such optimizations is that each cells -is actually a framed instance and because tables like this can be hundreds -of pages we want to keep processing time reasonable. - -To a large extend the behaviour is comparable with the way bTABLE/eTABLE -works and there is a module that maps that one onto this one. Eventually -this mechamism will be improved so that it can replace its older cousin. - -]]-- - --- todo: use linked list instead of r/c array - -local commands, context, tex, node = commands, context, tex, node - -local texdimen = tex.dimen -local texcount = tex.count -local texbox = tex.box -local texsetcount = tex.setcount -local texsetdimen = tex.setdimen - -local format = string.format -local concat = table.concat -local points = number.points - -local context = context -local context_beginvbox = context.beginvbox -local context_endvbox = context.endvbox -local context_blank = context.blank -local context_nointerlineskip = context.nointerlineskip - -local variables = interfaces.variables - -local setmetatableindex = table.setmetatableindex -local settings_to_hash = utilities.parsers.settings_to_hash - -local copy_node_list = node.copy_list -local hpack_node_list = node.hpack -local vpack_node_list = node.vpack -local slide_node_list = node.slide -local flush_node_list = node.flush_list - -local nodepool = nodes.pool - -local new_glue = nodepool.glue -local new_kern = nodepool.kern -local new_penalty = nodepool.penalty -local new_hlist = nodepool.hlist - -local v_stretch = variables.stretch -local v_normal = variables.normal -local v_width = variables.width -local v_height = variables.height -local v_repeat = variables["repeat"] -local v_max = variables.max -local v_fixed = variables.fixed - -local xtables = { } -typesetters.xtables = xtables - -local trace_xtable = false -local report_xtable = logs.reporter("xtable") - -trackers.register("xtable.construct", function(v) trace_xtable = v end) - -local null_mode = 0 -local head_mode = 1 -local foot_mode = 2 -local more_mode = 3 -local body_mode = 4 - -local namedmodes = { [0] = - "null", - "head", - "foot", - "next", - "body", -} - -local stack, data = { }, nil - -function xtables.create(settings) - table.insert(stack,data) - local rows = { } - local widths = { } - local heights = { } - local depths = { } - local spans = { } - local distances = { } - local autowidths = { } - local modes = { } - local fixedrows = { } - local fixedcolumns = { } - local frozencolumns = { } - local options = { } - data = { - rows = rows, - widths = widths, - heights = heights, - depths = depths, - spans = spans, - distances = distances, - modes = modes, - autowidths = autowidths, - fixedrows = fixedrows, - fixedcolumns = fixedcolumns, - frozencolumns = frozencolumns, - options = options, - nofrows = 0, - nofcolumns = 0, - currentrow = 0, - currentcolumn = 0, - settings = settings or { }, - } - local function add_zero(t,k) - t[k] = 0 - return 0 - end - local function add_table(t,k) - local v = { } - t[k] = v - return v - end - local function add_cell(row,c) - local cell = { - nx = 0, - ny = 0, - list = false, - } - row[c] = cell - if c > data.nofcolumns then - data.nofcolumns = c - end - return cell - end - local function add_row(rows,r) - local row = { } - setmetatableindex(row,add_cell) - rows[r] = row - if r > data.nofrows then - data.nofrows = r - end - return row - end - setmetatableindex(rows,add_row) - setmetatableindex(widths,add_zero) - setmetatableindex(heights,add_zero) - setmetatableindex(depths,add_zero) - setmetatableindex(distances,add_zero) - setmetatableindex(modes,add_zero) - setmetatableindex(fixedrows,add_zero) - setmetatableindex(fixedcolumns,add_zero) - setmetatableindex(options,add_table) - -- - settings.columndistance = tonumber(settings.columndistance) or 0 - settings.rowdistance = tonumber(settings.rowdistance) or 0 - settings.leftmargindistance = tonumber(settings.leftmargindistance) or 0 - settings.rightmargindistance = tonumber(settings.rightmargindistance) or 0 - settings.options = settings_to_hash(settings.option) - settings.textwidth = tonumber(settings.textwidth) or tex.hsize - settings.lineheight = tonumber(settings.lineheight) or texdimen.lineheight - settings.maxwidth = tonumber(settings.maxwidth) or settings.textwidth/8 - -- if #stack > 0 then - -- settings.textwidth = tex.hsize - -- end - data.criterium_v = 2 * data.settings.lineheight - data.criterium_h = .75 * data.settings.textwidth - -end - -function xtables.initialize_reflow_width(option) - local r = data.currentrow - local c = data.currentcolumn + 1 - local drc = data.rows[r][c] - drc.nx = texcount.c_tabl_x_nx - drc.ny = texcount.c_tabl_x_ny - local distances = data.distances - local distance = texdimen.d_tabl_x_distance - if distance > distances[c] then - distances[c] = distance - end - if option and option ~= "" then - local options = settings_to_hash(option) - data.options[r][c] = options - if options[v_fixed] then - data.frozencolumns[c] = true - end - end - data.currentcolumn = c -end - --- local function rather_fixed(n) --- for n in node. - -function xtables.set_reflow_width() - local r = data.currentrow - local c = data.currentcolumn - local rows = data.rows - local row = rows[r] - while row[c].span do -- can also be previous row ones - c = c + 1 - end - local tb = texbox.b_tabl_x - local drc = row[c] - -- - drc.list = true -- we don't need to keep the content around as we're in trial mode (no: copy_node_list(tb)) - -- - local widths, width = data.widths, tb.width - if width > widths[c] then - widths[c] = width - end - local heights, height = data.heights, tb.height - if height > heights[r] then - heights[r] = height - end - local depths, depth = data.depths, tb.depth - if depth > depths[r] then - depths[r] = depth - end - -- - local dimensionstate = texcount.frameddimensionstate - local fixedcolumns = data.fixedcolumns - local fixedrows = data.fixedrows - if dimensionstate == 1 then - if width > fixedcolumns[c] then -- how about a span here? - fixedcolumns[c] = width - end - elseif dimensionstate == 2 then - fixedrows[r] = height - elseif dimensionstate == 3 then - fixedrows[r] = height -- width - fixedcolumns[c] = width -- height - else -- probably something frozen, like an image -- we could parse the list - if width <= data.criterium_h and height >= data.criterium_v then - if width > fixedcolumns[c] then -- how about a span here? - fixedcolumns[c] = width - end - end - end - drc.dimensionstate = dimensionstate - -- - local nx, ny = drc.nx, drc.ny - if nx > 1 or ny > 1 then - local spans = data.spans - local self = true - for y=1,ny do - for x=1,nx do - if self then - self = false - else - local ry = r + y - 1 - local cx = c + x - 1 - if y > 1 then - spans[ry] = true - end - rows[ry][cx].span = true - end - end - end - c = c + nx - 1 - end - if c > data.nofcolumns then - data.nofcolumns = c - end - data.currentcolumn = c -end - -function xtables.initialize_reflow_height() - local r = data.currentrow - local c = data.currentcolumn + 1 - local rows = data.rows - local row = rows[r] - while row[c].span do -- can also be previous row ones - c = c + 1 - end - data.currentcolumn = c - local widths = data.widths - local w = widths[c] - local drc = row[c] - for x=1,drc.nx-1 do - w = w + widths[c+x] - end - texdimen.d_tabl_x_width = w - local dimensionstate = drc.dimensionstate or 0 - if dimensionstate == 1 or dimensionstate == 3 then - -- width was fixed so height is known - texcount.c_tabl_x_skip_mode = 1 - elseif dimensionstate == 2 then - -- height is enforced - texcount.c_tabl_x_skip_mode = 1 - elseif data.autowidths[c] then - -- width has changed so we need to recalculate the height - texcount.c_tabl_x_skip_mode = 0 - else - texcount.c_tabl_x_skip_mode = 1 - end -end - -function xtables.set_reflow_height() - local r = data.currentrow - local c = data.currentcolumn - local rows = data.rows - local row = rows[r] --- while row[c].span do -- we could adapt drc.nx instead --- c = c + 1 --- end - local tb = texbox.b_tabl_x - local drc = row[c] - if data.fixedrows[r] == 0 then -- and drc.dimensionstate < 2 - local heights, height = data.heights, tb.height - if height > heights[r] then - heights[r] = height - end - local depths, depth = data.depths, tb.depth - if depth > depths[r] then - depths[r] = depth - end - end --- c = c + drc.nx - 1 --- data.currentcolumn = c -end - -function xtables.initialize_construct() - local r = data.currentrow - local c = data.currentcolumn + 1 - local rows = data.rows - local row = rows[r] - while row[c].span do -- can also be previous row ones - c = c + 1 - end - data.currentcolumn = c - local widths = data.widths - local heights = data.heights - local depths = data.depths - local w = widths[c] - local h = heights[r] - local d = depths[r] - local drc = row[c] - for x=1,drc.nx-1 do - w = w + widths[c+x] - end - for y=1,drc.ny-1 do - h = h + heights[r+y] - d = d + depths[r+y] - end - texdimen.d_tabl_x_width = w - texdimen.d_tabl_x_height = h + d - texdimen.d_tabl_x_depth = 0 -end - -function xtables.set_construct() - local r = data.currentrow - local c = data.currentcolumn - local rows = data.rows - local row = rows[r] --- while row[c].span do -- can also be previous row ones --- c = c + 1 --- end - local drc = row[c] - -- this will change as soon as in luatex we can reset a box list without freeing - drc.list = copy_node_list(texbox.b_tabl_x) --- c = c + drc.nx - 1 --- data.currentcolumn = c -end - -local function showwidths(where,widths,autowidths) - local result = { } - for i=1,#widths do - result[#result+1] = format("%12s%s",points(widths[i]),autowidths[i] and "*" or " ") - end - return report_xtable("%s : %s",where,concat(result," ")) -end - -function xtables.reflow_width() - local nofrows = data.nofrows - local nofcolumns = data.nofcolumns - local rows = data.rows - for r=1,nofrows do - local row = rows[r] - for c=1,nofcolumns do - local drc = row[c] - if drc.list then - -- flush_node_list(drc.list) - drc.list = false - end - end - end - -- spread - local settings = data.settings - local options = settings.options - local maxwidth = settings.maxwidth - -- calculate width - local widths = data.widths - local distances = data.distances - local autowidths = data.autowidths - local fixedcolumns = data.fixedcolumns - local frozencolumns = data.frozencolumns - local width = 0 - local distance = 0 - local nofwide = 0 - local widetotal = 0 - local available = settings.textwidth - settings.leftmargindistance - settings.rightmargindistance - if trace_xtable then - showwidths("stage 1",widths,autowidths) - end - local noffrozen = 0 - if options[v_max] then - for c=1,nofcolumns do - width = width + widths[c] - if width > maxwidth then - autowidths[c] = true - nofwide = nofwide + 1 - widetotal = widetotal + widths[c] - end - if c < nofcolumns then - distance = distance + distances[c] - end - if frozencolumns[c] then - noffrozen = noffrozen + 1 -- brr, should be nx or so - end - end - else - for c=1,nofcolumns do -- also keep track of forced - local fixedwidth = fixedcolumns[c] - if fixedwidth > 0 then - widths[c] = fixedwidth - width = width + fixedwidth - else - width = width + widths[c] - if width > maxwidth then - autowidths[c] = true - nofwide = nofwide + 1 - widetotal = widetotal + widths[c] - end - end - if c < nofcolumns then - distance = distance + distances[c] - end - if frozencolumns[c] then - noffrozen = noffrozen + 1 -- brr, should be nx or so - end - end - end - if trace_xtable then - showwidths("stage 2",widths,autowidths) - end - local delta = available - width - distance - (nofcolumns-1) * settings.columndistance - if delta == 0 then - -- nothing to be done - if trace_xtable then - report_xtable("perfect fit") - end - elseif delta > 0 then - -- we can distribute some - if not options[v_stretch] then - -- not needed - if trace_xtable then - report_xtable("too wide but no stretch, delta %p",delta) - end - elseif options[v_width] then - local factor = delta / width - if trace_xtable then - report_xtable("proportional stretch, delta %p, width %p, factor %a",delta,width,factor) - end - for c=1,nofcolumns do - widths[c] = widths[c] + factor * widths[c] - end - else - -- frozen -> a column with option=fixed will not stretch - local extra = delta / (nofcolumns - noffrozen) - if trace_xtable then - report_xtable("normal stretch, delta %p, extra %p",delta,extra) - end - for c=1,nofcolumns do - if not frozencolumns[c] then - widths[c] = widths[c] + extra - end - end - end - elseif nofwide > 0 then - while true do - done = false - local available = (widetotal + delta) / nofwide - if trace_xtable then - report_xtable("shrink check, total %p, delta %p, columns %s, fixed %p",widetotal,delta,nofwide,available) - end - for c=1,nofcolumns do - if autowidths[c] and available >= widths[c] then - autowidths[c] = nil - nofwide = nofwide - 1 - widetotal = widetotal - widths[c] - done = true - end - end - if not done then - break - end - end - -- maybe also options[v_width] here but tricky as width does not say - -- much about amount - if options[v_width] then -- not that much (we could have a clever vpack loop balancing .. no fun) - local factor = (widetotal + delta) / width - if trace_xtable then - report_xtable("proportional shrink used, total %p, delta %p, columns %s, factor %s",widetotal,delta,nofwide,factor) - end - for c=1,nofcolumns do - if autowidths[c] then - widths[c] = factor * widths[c] - end - end - else - local available = (widetotal + delta) / nofwide - if trace_xtable then - report_xtable("normal shrink used, total %p, delta %p, columns %s, fixed %p",widetotal,delta,nofwide,available) - end - for c=1,nofcolumns do - if autowidths[c] then - widths[c] = available - end - end - end - end - if trace_xtable then - showwidths("stage 3",widths,autowidths) - end - -- - data.currentrow = 0 - data.currentcolumn = 0 -end - -function xtables.reflow_height() - data.currentrow = 0 - data.currentcolumn = 0 - local settings = data.settings - if settings.options[v_height] then - local heights = data.heights - local depths = data.depths - local nofrows = data.nofrows - local totalheight = 0 - local totaldepth = 0 - for i=1,nofrows do - totalheight = totalheight + heights[i] - totalheight = totalheight + depths [i] - end - local total = totalheight + totaldepth - local leftover = settings.textheight - total - if leftover > 0 then - local leftheight = (totalheight / total ) * leftover / #heights - local leftdepth = (totaldepth / total ) * leftover / #depths - for i=1,nofrows do - heights[i] = heights[i] + leftheight - depths [i] = depths [i] + leftdepth - end - end - end -end - -local function showspans(data) - local rows = data.rows - local modes = data.modes - local nofcolumns = data.nofcolumns - local nofrows = data.nofrows - for r=1,nofrows do - local line = { } - local row = rows[r] - for c=1,nofcolumns do - local cell =row[c] - if cell.list then - line[#line+1] = "list" - elseif cell.span then - line[#line+1] = "span" - else - line[#line+1] = "none" - end - end - report_xtable("%3d : %s : % t",r,namedmodes[modes[r]] or "----",line) - end -end - -function xtables.construct() - local rows = data.rows - local heights = data.heights - local depths = data.depths - local widths = data.widths - local spans = data.spans - local distances = data.distances - local modes = data.modes - local settings = data.settings - local nofcolumns = data.nofcolumns - local nofrows = data.nofrows - local columndistance = settings.columndistance - local rowdistance = settings.rowdistance - local leftmargindistance = settings.leftmargindistance - local rightmargindistance = settings.rightmargindistance - -- ranges can be mixes so we collect - - if trace_xtable then - showspans(data) - end - - local ranges = { - [head_mode] = { }, - [foot_mode] = { }, - [more_mode] = { }, - [body_mode] = { }, - } - for r=1,nofrows do - local m = modes[r] - if m == 0 then - m = body_mode - end - local range = ranges[m] - range[#range+1] = r - end - -- todo: hook in the splitter ... the splitter can ask for a chunk of - -- a certain size ... no longer a split memory issue then and header - -- footer then has to happen here too .. target height - local function packaged_column(r) - local row = rows[r] - local start = nil - local stop = nil - if leftmargindistance > 0 then - start = new_kern(leftmargindistance) - stop = start - end - local hasspan = false - for c=1,nofcolumns do - local drc = row[c] - if not hasspan then - hasspan = drc.span - end - local list = drc.list - if list then - list.shift = list.height + list.depth - -- list = hpack_node_list(list) -- is somehow needed - -- list.width = 0 - -- list.height = 0 - -- list.depth = 0 - -- faster: - local h = new_hlist() - h.list = list - list = h - -- - if start then - stop.next = list - list.prev = stop - else - start = list - end - stop = list -- one node anyway, so not needed: slide_node_list(list) - end - local step = widths[c] - if c < nofcolumns then - step = step + columndistance + distances[c] - end - local kern = new_kern(step) - if stop then - stop.prev = kern - stop.next = kern - else -- can be first spanning next row (ny=...) - start = kern - end - stop = kern - end - if start then - if rightmargindistance > 0 then - local kern = new_kern(rightmargindistance) - stop.next = kern - kern.prev = stop - -- stop = kern - end - return start, heights[r] + depths[r], hasspan - end - end - local function collect_range(range) - local result, nofr = { }, 0 - local nofrange = #range - for i=1,#range do - local r = range[i] - -- local row = rows[r] - local list, size, hasspan = packaged_column(r) - if list then - if hasspan and nofr > 0 then - result[nofr][4] = true - end - nofr = nofr + 1 - result[nofr] = { - hpack_node_list(list), - size, - i < nofrange and rowdistance > 0 and rowdistance or false, -- might move - false - } - end - end - return result - end - local body = collect_range(ranges[body_mode]) - data.results = { - [head_mode] = collect_range(ranges[head_mode]), - [foot_mode] = collect_range(ranges[foot_mode]), - [more_mode] = collect_range(ranges[more_mode]), - [body_mode] = body, - } - if #body == 0 then - texsetcount("global","c_tabl_x_state",0) - texsetdimen("global","d_tabl_x_final_width",0) - else - texsetcount("global","c_tabl_x_state",1) - texsetdimen("global","d_tabl_x_final_width",body[1][1].width) - end -end - -local function inject(row,copy,package) - local list = row[1] - if copy then - row[1] = copy_node_list(list) - end - if package then - context_beginvbox() - context(list) - context(new_kern(row[2])) - context_endvbox() - context_nointerlineskip() -- figure out a better way - if row[4] then - -- nothing as we have a span - elseif row[3] then - context_blank(row[3] .. "sp") -- why blank ? - else - context(new_glue(0)) - end - else - context(list) - context(new_kern(row[2])) - if row[3] then - context(new_glue(row[3])) - end - end -end - -local function total(row,distance) - local n = #row > 0 and rowdistance or 0 - for i=1,#row do - local ri = row[i] - n = n + ri[2] + (ri[3] or 0) - end - return n -end - --- local function append(list,what) --- for i=1,#what do --- local l = what[i] --- list[#list+1] = l[1] --- local k = l[2] + (l[3] or 0) --- if k ~= 0 then --- list[#list+1] = new_kern(k) --- end --- end --- end - -local function spanheight(body,i) - local height, n = 0, 1 - while true do - local bi = body[i] - if bi then - height = height + bi[2] + (bi[3] or 0) - if bi[4] then - n = n + 1 - i = i + 1 - else - break - end - else - break - end - end - return height, n -end - -function xtables.flush(directives) -- todo split by size / no inbetween then .. glue list kern blank - local vsize = directives.vsize - local method = directives.method or v_normal - local settings = data.settings - local results = data.results - local rowdistance = settings.rowdistance - local head = results[head_mode] - local foot = results[foot_mode] - local more = results[more_mode] - local body = results[body_mode] - local repeatheader = settings.header == v_repeat - local repeatfooter = settings.footer == v_repeat - if vsize and vsize > 0 then - context_beginvbox() - local bodystart = data.bodystart or 1 - local bodystop = data.bodystop or #body - if bodystart > 0 and bodystart <= bodystop then - local bodysize = vsize - local footsize = total(foot,rowdistance) - local headsize = total(head,rowdistance) - local moresize = total(more,rowdistance) - local firstsize, firstspans = spanheight(body,bodystart) - if bodystart == 1 then -- first chunk gets head - bodysize = bodysize - headsize - footsize - if headsize > 0 and bodysize >= firstsize then - for i=1,#head do - inject(head[i],repeatheader) - end - if rowdistance > 0 then - context(new_glue(rowdistance)) - end - if not repeatheader then - results[head_mode] = { } - end - end - elseif moresize > 0 then -- following chunk gets next - bodysize = bodysize - footsize - moresize - if bodysize >= firstsize then - for i=1,#more do - inject(more[i],true) - end - if rowdistance > 0 then - context(new_glue(rowdistance)) - end - end - elseif headsize > 0 and repeatheader then -- following chunk gets head - bodysize = bodysize - footsize - headsize - if bodysize >= firstsize then - for i=1,#head do - inject(head[i],true) - end - if rowdistance > 0 then - context(new_glue(rowdistance)) - end - end - else -- following chunk gets nothing - bodysize = bodysize - footsize - end - if bodysize >= firstsize then - local i = bodystart - while i <= bodystop do -- room for improvement - local total, spans = spanheight(body,i) - local bs = bodysize - total - if bs > 0 then - bodysize = bs - for s=1,spans do - inject(body[i]) - body[i] = nil - i = i + 1 - end - bodystart = i - else - break - end - end - if bodystart > bodystop then - -- all is flushed and footer fits - if footsize > 0 then - if rowdistance > 0 then - context(new_glue(rowdistance)) - end - for i=1,#foot do - inject(foot[i]) - end - results[foot_mode] = { } - end - results[body_mode] = { } - texsetcount("global","c_tabl_x_state",0) - else - -- some is left so footer is delayed - -- todo: try to flush a few more lines - if repeatfooter and footsize > 0 then - if rowdistance > 0 then - context(new_glue(rowdistance)) - end - for i=1,#foot do - inject(foot[i],true) - end - else - -- todo: try to fit more of body - end - texsetcount("global","c_tabl_x_state",2) - end - else - if firstsize > vsize then - -- get rid of the too large cell - for s=1,firstspans do - inject(body[bodystart]) - body[bodystart] = nil - bodystart = bodystart + 1 - end - end - texsetcount("global","c_tabl_x_state",2) -- 1 - end - else - texsetcount("global","c_tabl_x_state",0) - end - data.bodystart = bodystart - data.bodystop = bodystop - context_endvbox() - else - if method == variables.split then - -- maybe also a non float mode with header/footer repeat although - -- we can also use a float without caption - for i=1,#head do - inject(head[i],false,true) - end - if #head > 0 and rowdistance > 0 then - context_blank(rowdistance .. "sp") - end - for i=1,#body do - inject(body[i],false,true) - end - if #foot > 0 and rowdistance > 0 then - context_blank(rowdistance .. "sp") - end - for i=1,#foot do - inject(foot[i],false,true) - end - else -- normal - context_beginvbox() - for i=1,#head do - inject(head[i]) - end - if #head > 0 and rowdistance > 0 then - context(new_glue(rowdistance)) - end - for i=1,#body do - inject(body[i]) - end - if #foot > 0 and rowdistance > 0 then - context(new_glue(rowdistance)) - end - for i=1,#foot do - inject(foot[i]) - end - context_endvbox() - end - results[head_mode] = { } - results[body_mode] = { } - results[foot_mode] = { } - texsetcount("global","c_tabl_x_state",0) - end -end - -function xtables.cleanup() - for mode, result in next, data.results do - for _, r in next, result do - flush_node_list(r[1]) - end - end - data = table.remove(stack) -end - -function xtables.next_row() - local r = data.currentrow + 1 - data.modes[r] = texcount.c_tabl_x_mode - data.currentrow = r - data.currentcolumn = 0 -end - --- eventually we might only have commands - -commands.x_table_create = xtables.create -commands.x_table_reflow_width = xtables.reflow_width -commands.x_table_reflow_height = xtables.reflow_height -commands.x_table_construct = xtables.construct -commands.x_table_flush = xtables.flush -commands.x_table_cleanup = xtables.cleanup -commands.x_table_next_row = xtables.next_row -commands.x_table_init_reflow_width = xtables.initialize_reflow_width -commands.x_table_init_reflow_height = xtables.initialize_reflow_height -commands.x_table_init_construct = xtables.initialize_construct -commands.x_table_set_reflow_width = xtables.set_reflow_width -commands.x_table_set_reflow_height = xtables.set_reflow_height -commands.x_table_set_construct = xtables.set_construct +if not modules then modules = { } end modules ['tabl-xtb'] = { + version = 1.001, + comment = "companion to tabl-xtb.mkvi", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ + +This table mechanism is a combination between TeX and Lua. We do process +cells at the TeX end and inspect them at the Lua end. After some analysis +we have a second pass using the calculated widths, and if needed cells +will go through a third pass to get the heights right. This last pass is +avoided when possible which is why some code below looks a bit more +complex than needed. The reason for such optimizations is that each cells +is actually a framed instance and because tables like this can be hundreds +of pages we want to keep processing time reasonable. + +To a large extend the behaviour is comparable with the way bTABLE/eTABLE +works and there is a module that maps that one onto this one. Eventually +this mechamism will be improved so that it can replace its older cousin. + +]]-- + +-- todo: use linked list instead of r/c array + +local commands, context, tex, node = commands, context, tex, node + +local texdimen = tex.dimen +local texcount = tex.count +local texbox = tex.box +local texsetcount = tex.setcount +local texsetdimen = tex.setdimen + +local format = string.format +local concat = table.concat +local points = number.points + +local context = context +local context_beginvbox = context.beginvbox +local context_endvbox = context.endvbox +local context_blank = context.blank +local context_nointerlineskip = context.nointerlineskip + +local variables = interfaces.variables + +local setmetatableindex = table.setmetatableindex +local settings_to_hash = utilities.parsers.settings_to_hash + +local copy_node_list = node.copy_list +local hpack_node_list = node.hpack +local vpack_node_list = node.vpack +local slide_node_list = node.slide +local flush_node_list = node.flush_list + +local nodepool = nodes.pool + +local new_glue = nodepool.glue +local new_kern = nodepool.kern +local new_penalty = nodepool.penalty +local new_hlist = nodepool.hlist + +local v_stretch = variables.stretch +local v_normal = variables.normal +local v_width = variables.width +local v_height = variables.height +local v_repeat = variables["repeat"] +local v_max = variables.max +local v_fixed = variables.fixed + +local xtables = { } +typesetters.xtables = xtables + +local trace_xtable = false +local report_xtable = logs.reporter("xtable") + +trackers.register("xtable.construct", function(v) trace_xtable = v end) + +local null_mode = 0 +local head_mode = 1 +local foot_mode = 2 +local more_mode = 3 +local body_mode = 4 + +local namedmodes = { [0] = + "null", + "head", + "foot", + "next", + "body", +} + +local stack, data = { }, nil + +function xtables.create(settings) + table.insert(stack,data) + local rows = { } + local widths = { } + local heights = { } + local depths = { } + local spans = { } + local distances = { } + local autowidths = { } + local modes = { } + local fixedrows = { } + local fixedcolumns = { } + local frozencolumns = { } + local options = { } + data = { + rows = rows, + widths = widths, + heights = heights, + depths = depths, + spans = spans, + distances = distances, + modes = modes, + autowidths = autowidths, + fixedrows = fixedrows, + fixedcolumns = fixedcolumns, + frozencolumns = frozencolumns, + options = options, + nofrows = 0, + nofcolumns = 0, + currentrow = 0, + currentcolumn = 0, + settings = settings or { }, + } + local function add_zero(t,k) + t[k] = 0 + return 0 + end + local function add_table(t,k) + local v = { } + t[k] = v + return v + end + local function add_cell(row,c) + local cell = { + nx = 0, + ny = 0, + list = false, + } + row[c] = cell + if c > data.nofcolumns then + data.nofcolumns = c + end + return cell + end + local function add_row(rows,r) + local row = { } + setmetatableindex(row,add_cell) + rows[r] = row + if r > data.nofrows then + data.nofrows = r + end + return row + end + setmetatableindex(rows,add_row) + setmetatableindex(widths,add_zero) + setmetatableindex(heights,add_zero) + setmetatableindex(depths,add_zero) + setmetatableindex(distances,add_zero) + setmetatableindex(modes,add_zero) + setmetatableindex(fixedrows,add_zero) + setmetatableindex(fixedcolumns,add_zero) + setmetatableindex(options,add_table) + -- + settings.columndistance = tonumber(settings.columndistance) or 0 + settings.rowdistance = tonumber(settings.rowdistance) or 0 + settings.leftmargindistance = tonumber(settings.leftmargindistance) or 0 + settings.rightmargindistance = tonumber(settings.rightmargindistance) or 0 + settings.options = settings_to_hash(settings.option) + settings.textwidth = tonumber(settings.textwidth) or tex.hsize + settings.lineheight = tonumber(settings.lineheight) or texdimen.lineheight + settings.maxwidth = tonumber(settings.maxwidth) or settings.textwidth/8 + -- if #stack > 0 then + -- settings.textwidth = tex.hsize + -- end + data.criterium_v = 2 * data.settings.lineheight + data.criterium_h = .75 * data.settings.textwidth + +end + +function xtables.initialize_reflow_width(option) + local r = data.currentrow + local c = data.currentcolumn + 1 + local drc = data.rows[r][c] + drc.nx = texcount.c_tabl_x_nx + drc.ny = texcount.c_tabl_x_ny + local distances = data.distances + local distance = texdimen.d_tabl_x_distance + if distance > distances[c] then + distances[c] = distance + end + if option and option ~= "" then + local options = settings_to_hash(option) + data.options[r][c] = options + if options[v_fixed] then + data.frozencolumns[c] = true + end + end + data.currentcolumn = c +end + +-- local function rather_fixed(n) +-- for n in node. + +function xtables.set_reflow_width() + local r = data.currentrow + local c = data.currentcolumn + local rows = data.rows + local row = rows[r] + while row[c].span do -- can also be previous row ones + c = c + 1 + end + local tb = texbox.b_tabl_x + local drc = row[c] + -- + drc.list = true -- we don't need to keep the content around as we're in trial mode (no: copy_node_list(tb)) + -- + local widths, width = data.widths, tb.width + if width > widths[c] then + widths[c] = width + end + local heights, height = data.heights, tb.height + if height > heights[r] then + heights[r] = height + end + local depths, depth = data.depths, tb.depth + if depth > depths[r] then + depths[r] = depth + end + -- + local dimensionstate = texcount.frameddimensionstate + local fixedcolumns = data.fixedcolumns + local fixedrows = data.fixedrows + if dimensionstate == 1 then + if width > fixedcolumns[c] then -- how about a span here? + fixedcolumns[c] = width + end + elseif dimensionstate == 2 then + fixedrows[r] = height + elseif dimensionstate == 3 then + fixedrows[r] = height -- width + fixedcolumns[c] = width -- height + else -- probably something frozen, like an image -- we could parse the list + if width <= data.criterium_h and height >= data.criterium_v then + if width > fixedcolumns[c] then -- how about a span here? + fixedcolumns[c] = width + end + end + end + drc.dimensionstate = dimensionstate + -- + local nx, ny = drc.nx, drc.ny + if nx > 1 or ny > 1 then + local spans = data.spans + local self = true + for y=1,ny do + for x=1,nx do + if self then + self = false + else + local ry = r + y - 1 + local cx = c + x - 1 + if y > 1 then + spans[ry] = true + end + rows[ry][cx].span = true + end + end + end + c = c + nx - 1 + end + if c > data.nofcolumns then + data.nofcolumns = c + end + data.currentcolumn = c +end + +function xtables.initialize_reflow_height() + local r = data.currentrow + local c = data.currentcolumn + 1 + local rows = data.rows + local row = rows[r] + while row[c].span do -- can also be previous row ones + c = c + 1 + end + data.currentcolumn = c + local widths = data.widths + local w = widths[c] + local drc = row[c] + for x=1,drc.nx-1 do + w = w + widths[c+x] + end + texdimen.d_tabl_x_width = w + local dimensionstate = drc.dimensionstate or 0 + if dimensionstate == 1 or dimensionstate == 3 then + -- width was fixed so height is known + texcount.c_tabl_x_skip_mode = 1 + elseif dimensionstate == 2 then + -- height is enforced + texcount.c_tabl_x_skip_mode = 1 + elseif data.autowidths[c] then + -- width has changed so we need to recalculate the height + texcount.c_tabl_x_skip_mode = 0 + else + texcount.c_tabl_x_skip_mode = 1 + end +end + +function xtables.set_reflow_height() + local r = data.currentrow + local c = data.currentcolumn + local rows = data.rows + local row = rows[r] +-- while row[c].span do -- we could adapt drc.nx instead +-- c = c + 1 +-- end + local tb = texbox.b_tabl_x + local drc = row[c] + if data.fixedrows[r] == 0 then -- and drc.dimensionstate < 2 + local heights, height = data.heights, tb.height + if height > heights[r] then + heights[r] = height + end + local depths, depth = data.depths, tb.depth + if depth > depths[r] then + depths[r] = depth + end + end +-- c = c + drc.nx - 1 +-- data.currentcolumn = c +end + +function xtables.initialize_construct() + local r = data.currentrow + local c = data.currentcolumn + 1 + local rows = data.rows + local row = rows[r] + while row[c].span do -- can also be previous row ones + c = c + 1 + end + data.currentcolumn = c + local widths = data.widths + local heights = data.heights + local depths = data.depths + local w = widths[c] + local h = heights[r] + local d = depths[r] + local drc = row[c] + for x=1,drc.nx-1 do + w = w + widths[c+x] + end + for y=1,drc.ny-1 do + h = h + heights[r+y] + d = d + depths[r+y] + end + texdimen.d_tabl_x_width = w + texdimen.d_tabl_x_height = h + d + texdimen.d_tabl_x_depth = 0 +end + +function xtables.set_construct() + local r = data.currentrow + local c = data.currentcolumn + local rows = data.rows + local row = rows[r] +-- while row[c].span do -- can also be previous row ones +-- c = c + 1 +-- end + local drc = row[c] + -- this will change as soon as in luatex we can reset a box list without freeing + drc.list = copy_node_list(texbox.b_tabl_x) +-- c = c + drc.nx - 1 +-- data.currentcolumn = c +end + +local function showwidths(where,widths,autowidths) + local result = { } + for i=1,#widths do + result[#result+1] = format("%12s%s",points(widths[i]),autowidths[i] and "*" or " ") + end + return report_xtable("%s : %s",where,concat(result," ")) +end + +function xtables.reflow_width() + local nofrows = data.nofrows + local nofcolumns = data.nofcolumns + local rows = data.rows + for r=1,nofrows do + local row = rows[r] + for c=1,nofcolumns do + local drc = row[c] + if drc.list then + -- flush_node_list(drc.list) + drc.list = false + end + end + end + -- spread + local settings = data.settings + local options = settings.options + local maxwidth = settings.maxwidth + -- calculate width + local widths = data.widths + local distances = data.distances + local autowidths = data.autowidths + local fixedcolumns = data.fixedcolumns + local frozencolumns = data.frozencolumns + local width = 0 + local distance = 0 + local nofwide = 0 + local widetotal = 0 + local available = settings.textwidth - settings.leftmargindistance - settings.rightmargindistance + if trace_xtable then + showwidths("stage 1",widths,autowidths) + end + local noffrozen = 0 + if options[v_max] then + for c=1,nofcolumns do + width = width + widths[c] + if width > maxwidth then + autowidths[c] = true + nofwide = nofwide + 1 + widetotal = widetotal + widths[c] + end + if c < nofcolumns then + distance = distance + distances[c] + end + if frozencolumns[c] then + noffrozen = noffrozen + 1 -- brr, should be nx or so + end + end + else + for c=1,nofcolumns do -- also keep track of forced + local fixedwidth = fixedcolumns[c] + if fixedwidth > 0 then + widths[c] = fixedwidth + width = width + fixedwidth + else + width = width + widths[c] + if width > maxwidth then + autowidths[c] = true + nofwide = nofwide + 1 + widetotal = widetotal + widths[c] + end + end + if c < nofcolumns then + distance = distance + distances[c] + end + if frozencolumns[c] then + noffrozen = noffrozen + 1 -- brr, should be nx or so + end + end + end + if trace_xtable then + showwidths("stage 2",widths,autowidths) + end + local delta = available - width - distance - (nofcolumns-1) * settings.columndistance + if delta == 0 then + -- nothing to be done + if trace_xtable then + report_xtable("perfect fit") + end + elseif delta > 0 then + -- we can distribute some + if not options[v_stretch] then + -- not needed + if trace_xtable then + report_xtable("too wide but no stretch, delta %p",delta) + end + elseif options[v_width] then + local factor = delta / width + if trace_xtable then + report_xtable("proportional stretch, delta %p, width %p, factor %a",delta,width,factor) + end + for c=1,nofcolumns do + widths[c] = widths[c] + factor * widths[c] + end + else + -- frozen -> a column with option=fixed will not stretch + local extra = delta / (nofcolumns - noffrozen) + if trace_xtable then + report_xtable("normal stretch, delta %p, extra %p",delta,extra) + end + for c=1,nofcolumns do + if not frozencolumns[c] then + widths[c] = widths[c] + extra + end + end + end + elseif nofwide > 0 then + while true do + done = false + local available = (widetotal + delta) / nofwide + if trace_xtable then + report_xtable("shrink check, total %p, delta %p, columns %s, fixed %p",widetotal,delta,nofwide,available) + end + for c=1,nofcolumns do + if autowidths[c] and available >= widths[c] then + autowidths[c] = nil + nofwide = nofwide - 1 + widetotal = widetotal - widths[c] + done = true + end + end + if not done then + break + end + end + -- maybe also options[v_width] here but tricky as width does not say + -- much about amount + if options[v_width] then -- not that much (we could have a clever vpack loop balancing .. no fun) + local factor = (widetotal + delta) / width + if trace_xtable then + report_xtable("proportional shrink used, total %p, delta %p, columns %s, factor %s",widetotal,delta,nofwide,factor) + end + for c=1,nofcolumns do + if autowidths[c] then + widths[c] = factor * widths[c] + end + end + else + local available = (widetotal + delta) / nofwide + if trace_xtable then + report_xtable("normal shrink used, total %p, delta %p, columns %s, fixed %p",widetotal,delta,nofwide,available) + end + for c=1,nofcolumns do + if autowidths[c] then + widths[c] = available + end + end + end + end + if trace_xtable then + showwidths("stage 3",widths,autowidths) + end + -- + data.currentrow = 0 + data.currentcolumn = 0 +end + +function xtables.reflow_height() + data.currentrow = 0 + data.currentcolumn = 0 + local settings = data.settings + if settings.options[v_height] then + local heights = data.heights + local depths = data.depths + local nofrows = data.nofrows + local totalheight = 0 + local totaldepth = 0 + for i=1,nofrows do + totalheight = totalheight + heights[i] + totalheight = totalheight + depths [i] + end + local total = totalheight + totaldepth + local leftover = settings.textheight - total + if leftover > 0 then + local leftheight = (totalheight / total ) * leftover / #heights + local leftdepth = (totaldepth / total ) * leftover / #depths + for i=1,nofrows do + heights[i] = heights[i] + leftheight + depths [i] = depths [i] + leftdepth + end + end + end +end + +local function showspans(data) + local rows = data.rows + local modes = data.modes + local nofcolumns = data.nofcolumns + local nofrows = data.nofrows + for r=1,nofrows do + local line = { } + local row = rows[r] + for c=1,nofcolumns do + local cell =row[c] + if cell.list then + line[#line+1] = "list" + elseif cell.span then + line[#line+1] = "span" + else + line[#line+1] = "none" + end + end + report_xtable("%3d : %s : % t",r,namedmodes[modes[r]] or "----",line) + end +end + +function xtables.construct() + local rows = data.rows + local heights = data.heights + local depths = data.depths + local widths = data.widths + local spans = data.spans + local distances = data.distances + local modes = data.modes + local settings = data.settings + local nofcolumns = data.nofcolumns + local nofrows = data.nofrows + local columndistance = settings.columndistance + local rowdistance = settings.rowdistance + local leftmargindistance = settings.leftmargindistance + local rightmargindistance = settings.rightmargindistance + -- ranges can be mixes so we collect + + if trace_xtable then + showspans(data) + end + + local ranges = { + [head_mode] = { }, + [foot_mode] = { }, + [more_mode] = { }, + [body_mode] = { }, + } + for r=1,nofrows do + local m = modes[r] + if m == 0 then + m = body_mode + end + local range = ranges[m] + range[#range+1] = r + end + -- todo: hook in the splitter ... the splitter can ask for a chunk of + -- a certain size ... no longer a split memory issue then and header + -- footer then has to happen here too .. target height + local function packaged_column(r) + local row = rows[r] + local start = nil + local stop = nil + if leftmargindistance > 0 then + start = new_kern(leftmargindistance) + stop = start + end + local hasspan = false + for c=1,nofcolumns do + local drc = row[c] + if not hasspan then + hasspan = drc.span + end + local list = drc.list + if list then + list.shift = list.height + list.depth + -- list = hpack_node_list(list) -- is somehow needed + -- list.width = 0 + -- list.height = 0 + -- list.depth = 0 + -- faster: + local h = new_hlist() + h.list = list + list = h + -- + if start then + stop.next = list + list.prev = stop + else + start = list + end + stop = list -- one node anyway, so not needed: slide_node_list(list) + end + local step = widths[c] + if c < nofcolumns then + step = step + columndistance + distances[c] + end + local kern = new_kern(step) + if stop then + stop.prev = kern + stop.next = kern + else -- can be first spanning next row (ny=...) + start = kern + end + stop = kern + end + if start then + if rightmargindistance > 0 then + local kern = new_kern(rightmargindistance) + stop.next = kern + kern.prev = stop + -- stop = kern + end + return start, heights[r] + depths[r], hasspan + end + end + local function collect_range(range) + local result, nofr = { }, 0 + local nofrange = #range + for i=1,#range do + local r = range[i] + -- local row = rows[r] + local list, size, hasspan = packaged_column(r) + if list then + if hasspan and nofr > 0 then + result[nofr][4] = true + end + nofr = nofr + 1 + result[nofr] = { + hpack_node_list(list), + size, + i < nofrange and rowdistance > 0 and rowdistance or false, -- might move + false + } + end + end + return result + end + local body = collect_range(ranges[body_mode]) + data.results = { + [head_mode] = collect_range(ranges[head_mode]), + [foot_mode] = collect_range(ranges[foot_mode]), + [more_mode] = collect_range(ranges[more_mode]), + [body_mode] = body, + } + if #body == 0 then + texsetcount("global","c_tabl_x_state",0) + texsetdimen("global","d_tabl_x_final_width",0) + else + texsetcount("global","c_tabl_x_state",1) + texsetdimen("global","d_tabl_x_final_width",body[1][1].width) + end +end + +local function inject(row,copy,package) + local list = row[1] + if copy then + row[1] = copy_node_list(list) + end + if package then + context_beginvbox() + context(list) + context(new_kern(row[2])) + context_endvbox() + context_nointerlineskip() -- figure out a better way + if row[4] then + -- nothing as we have a span + elseif row[3] then + context_blank(row[3] .. "sp") -- why blank ? + else + context(new_glue(0)) + end + else + context(list) + context(new_kern(row[2])) + if row[3] then + context(new_glue(row[3])) + end + end +end + +local function total(row,distance) + local n = #row > 0 and rowdistance or 0 + for i=1,#row do + local ri = row[i] + n = n + ri[2] + (ri[3] or 0) + end + return n +end + +-- local function append(list,what) +-- for i=1,#what do +-- local l = what[i] +-- list[#list+1] = l[1] +-- local k = l[2] + (l[3] or 0) +-- if k ~= 0 then +-- list[#list+1] = new_kern(k) +-- end +-- end +-- end + +local function spanheight(body,i) + local height, n = 0, 1 + while true do + local bi = body[i] + if bi then + height = height + bi[2] + (bi[3] or 0) + if bi[4] then + n = n + 1 + i = i + 1 + else + break + end + else + break + end + end + return height, n +end + +function xtables.flush(directives) -- todo split by size / no inbetween then .. glue list kern blank + local vsize = directives.vsize + local method = directives.method or v_normal + local settings = data.settings + local results = data.results + local rowdistance = settings.rowdistance + local head = results[head_mode] + local foot = results[foot_mode] + local more = results[more_mode] + local body = results[body_mode] + local repeatheader = settings.header == v_repeat + local repeatfooter = settings.footer == v_repeat + if vsize and vsize > 0 then + context_beginvbox() + local bodystart = data.bodystart or 1 + local bodystop = data.bodystop or #body + if bodystart > 0 and bodystart <= bodystop then + local bodysize = vsize + local footsize = total(foot,rowdistance) + local headsize = total(head,rowdistance) + local moresize = total(more,rowdistance) + local firstsize, firstspans = spanheight(body,bodystart) + if bodystart == 1 then -- first chunk gets head + bodysize = bodysize - headsize - footsize + if headsize > 0 and bodysize >= firstsize then + for i=1,#head do + inject(head[i],repeatheader) + end + if rowdistance > 0 then + context(new_glue(rowdistance)) + end + if not repeatheader then + results[head_mode] = { } + end + end + elseif moresize > 0 then -- following chunk gets next + bodysize = bodysize - footsize - moresize + if bodysize >= firstsize then + for i=1,#more do + inject(more[i],true) + end + if rowdistance > 0 then + context(new_glue(rowdistance)) + end + end + elseif headsize > 0 and repeatheader then -- following chunk gets head + bodysize = bodysize - footsize - headsize + if bodysize >= firstsize then + for i=1,#head do + inject(head[i],true) + end + if rowdistance > 0 then + context(new_glue(rowdistance)) + end + end + else -- following chunk gets nothing + bodysize = bodysize - footsize + end + if bodysize >= firstsize then + local i = bodystart + while i <= bodystop do -- room for improvement + local total, spans = spanheight(body,i) + local bs = bodysize - total + if bs > 0 then + bodysize = bs + for s=1,spans do + inject(body[i]) + body[i] = nil + i = i + 1 + end + bodystart = i + else + break + end + end + if bodystart > bodystop then + -- all is flushed and footer fits + if footsize > 0 then + if rowdistance > 0 then + context(new_glue(rowdistance)) + end + for i=1,#foot do + inject(foot[i]) + end + results[foot_mode] = { } + end + results[body_mode] = { } + texsetcount("global","c_tabl_x_state",0) + else + -- some is left so footer is delayed + -- todo: try to flush a few more lines + if repeatfooter and footsize > 0 then + if rowdistance > 0 then + context(new_glue(rowdistance)) + end + for i=1,#foot do + inject(foot[i],true) + end + else + -- todo: try to fit more of body + end + texsetcount("global","c_tabl_x_state",2) + end + else + if firstsize > vsize then + -- get rid of the too large cell + for s=1,firstspans do + inject(body[bodystart]) + body[bodystart] = nil + bodystart = bodystart + 1 + end + end + texsetcount("global","c_tabl_x_state",2) -- 1 + end + else + texsetcount("global","c_tabl_x_state",0) + end + data.bodystart = bodystart + data.bodystop = bodystop + context_endvbox() + else + if method == variables.split then + -- maybe also a non float mode with header/footer repeat although + -- we can also use a float without caption + for i=1,#head do + inject(head[i],false,true) + end + if #head > 0 and rowdistance > 0 then + context_blank(rowdistance .. "sp") + end + for i=1,#body do + inject(body[i],false,true) + end + if #foot > 0 and rowdistance > 0 then + context_blank(rowdistance .. "sp") + end + for i=1,#foot do + inject(foot[i],false,true) + end + else -- normal + context_beginvbox() + for i=1,#head do + inject(head[i]) + end + if #head > 0 and rowdistance > 0 then + context(new_glue(rowdistance)) + end + for i=1,#body do + inject(body[i]) + end + if #foot > 0 and rowdistance > 0 then + context(new_glue(rowdistance)) + end + for i=1,#foot do + inject(foot[i]) + end + context_endvbox() + end + results[head_mode] = { } + results[body_mode] = { } + results[foot_mode] = { } + texsetcount("global","c_tabl_x_state",0) + end +end + +function xtables.cleanup() + for mode, result in next, data.results do + for _, r in next, result do + flush_node_list(r[1]) + end + end + data = table.remove(stack) +end + +function xtables.next_row() + local r = data.currentrow + 1 + data.modes[r] = texcount.c_tabl_x_mode + data.currentrow = r + data.currentcolumn = 0 +end + +-- eventually we might only have commands + +commands.x_table_create = xtables.create +commands.x_table_reflow_width = xtables.reflow_width +commands.x_table_reflow_height = xtables.reflow_height +commands.x_table_construct = xtables.construct +commands.x_table_flush = xtables.flush +commands.x_table_cleanup = xtables.cleanup +commands.x_table_next_row = xtables.next_row +commands.x_table_init_reflow_width = xtables.initialize_reflow_width +commands.x_table_init_reflow_height = xtables.initialize_reflow_height +commands.x_table_init_construct = xtables.initialize_construct +commands.x_table_set_reflow_width = xtables.set_reflow_width +commands.x_table_set_reflow_height = xtables.set_reflow_height +commands.x_table_set_construct = xtables.set_construct diff --git a/tex/context/base/task-ini.lua b/tex/context/base/task-ini.lua index 0f477cb6e..41f045ac9 100644 --- a/tex/context/base/task-ini.lua +++ b/tex/context/base/task-ini.lua @@ -1,191 +1,191 @@ -if not modules then modules = { } end modules ['task-ini'] = { - version = 1.001, - comment = "companion to task-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this is a temporary solution, we need to isolate some modules and then --- the load order can determine the trickery to be applied to node lists --- --- we can disable more handlers and enable then when really used (*) --- --- todo: two finalizers: real shipout (can be imposed page) and page shipout (individual page) - -local tasks = nodes.tasks -local appendaction = tasks.appendaction -local disableaction = tasks.disableaction -local freezegroup = tasks.freezegroup -local freezecallbacks = callbacks.freeze - -appendaction("processors", "normalizers", "typesetters.characters.handler") -- always on -appendaction("processors", "normalizers", "fonts.collections.process") -- disabled -appendaction("processors", "normalizers", "fonts.checkers.missing") -- disabled - -appendaction("processors", "characters", "scripts.autofontfeature.handler") -appendaction("processors", "characters", "scripts.splitters.handler") -- disabled -appendaction("processors", "characters", "typesetters.cleaners.handler") -- disabled -appendaction("processors", "characters", "typesetters.directions.handler") -- disabled -appendaction("processors", "characters", "typesetters.cases.handler") -- disabled -appendaction("processors", "characters", "typesetters.breakpoints.handler") -- disabled -appendaction("processors", "characters", "scripts.injectors.handler") -- disabled - -appendaction("processors", "words", "builders.kernel.hyphenation") -- always on -appendaction("processors", "words", "languages.words.check") -- disabled - -appendaction("processors", "fonts", "builders.paragraphs.solutions.splitters.split") -- experimental -appendaction("processors", "fonts", "nodes.handlers.characters") -- maybe todo -appendaction("processors", "fonts", "nodes.injections.handler") -- maybe todo -appendaction("processors", "fonts", "nodes.handlers.protectglyphs", nil, "nohead") -- maybe todo -appendaction("processors", "fonts", "builders.kernel.ligaturing") -- always on (could be selective: if only node mode) -appendaction("processors", "fonts", "builders.kernel.kerning") -- always on (could be selective: if only node mode) -appendaction("processors", "fonts", "nodes.handlers.stripping") -- disabled (might move) -------------("processors", "fonts", "typesetters.italics.handler") -- disabled (after otf/kern handling) - -appendaction("processors", "lists", "typesetters.spacings.handler") -- disabled -appendaction("processors", "lists", "typesetters.kerns.handler") -- disabled -appendaction("processors", "lists", "typesetters.digits.handler") -- disabled (after otf handling) -appendaction("processors", "lists", "typesetters.italics.handler") -- disabled (after otf/kern handling) -appendaction("processors", "lists", "typesetters.paragraphs.handler") -- disabled - -appendaction("shipouts", "normalizers", "nodes.handlers.cleanuppage") -- disabled -appendaction("shipouts", "normalizers", "typesetters.alignments.handler") -appendaction("shipouts", "normalizers", "nodes.references.handler") -- disabled -appendaction("shipouts", "normalizers", "nodes.destinations.handler") -- disabled -appendaction("shipouts", "normalizers", "nodes.rules.handler") -- disabled -appendaction("shipouts", "normalizers", "nodes.shifts.handler") -- disabled -appendaction("shipouts", "normalizers", "structures.tags.handler") -- disabled -appendaction("shipouts", "normalizers", "nodes.handlers.accessibility") -- disabled -appendaction("shipouts", "normalizers", "nodes.handlers.backgrounds") -- disabled -appendaction("shipouts", "normalizers", "nodes.handlers.alignbackgrounds") -- disabled -------------("shipouts", "normalizers", "nodes.handlers.export") -- disabled - -appendaction("shipouts", "finishers", "nodes.visualizers.handler") -- disabled -appendaction("shipouts", "finishers", "attributes.colors.handler") -- disabled -appendaction("shipouts", "finishers", "attributes.transparencies.handler") -- disabled -appendaction("shipouts", "finishers", "attributes.colorintents.handler") -- disabled -appendaction("shipouts", "finishers", "attributes.negatives.handler") -- disabled -appendaction("shipouts", "finishers", "attributes.effects.handler") -- disabled -appendaction("shipouts", "finishers", "attributes.viewerlayers.handler") -- disabled - ---maybe integrate relocate and families - -appendaction("math", "normalizers", "noads.handlers.unscript", nil, "nohead") -- always on (maybe disabled) -appendaction("math", "normalizers", "noads.handlers.variants", nil, "nohead") -- always on -appendaction("math", "normalizers", "noads.handlers.relocate", nil, "nohead") -- always on -appendaction("math", "normalizers", "noads.handlers.families", nil, "nohead") -- always on - -appendaction("math", "normalizers", "noads.handlers.render", nil, "nohead") -- always on -appendaction("math", "normalizers", "noads.handlers.collapse", nil, "nohead") -- always on -appendaction("math", "normalizers", "noads.handlers.resize", nil, "nohead") -- always on -------------("math", "normalizers", "noads.handlers.respace", nil, "nohead") -- always on -appendaction("math", "normalizers", "noads.handlers.check", nil, "nohead") -- always on -appendaction("math", "normalizers", "noads.handlers.tags", nil, "nohead") -- disabled -appendaction("math", "normalizers", "noads.handlers.italics", nil, "nohead") -- disabled - -appendaction("math", "builders", "builders.kernel.mlist_to_hlist") -- always on -------------("math", "builders", "noads.handlers.italics", nil, "nohead") -- disabled - --- quite experimental (nodes.handlers.graphicvadjust might go away) - -appendaction("finalizers", "lists", "builders.paragraphs.keeptogether") -appendaction("finalizers", "lists", "nodes.handlers.graphicvadjust") -- todo -appendaction("finalizers", "fonts", "builders.paragraphs.solutions.splitters.optimize") -- experimental -appendaction("finalizers", "lists", "builders.paragraphs.tag") - --- still experimental - -appendaction("mvlbuilders", "normalizers", "nodes.handlers.migrate") -- -appendaction("mvlbuilders", "normalizers", "builders.vspacing.pagehandler") -- last ! - -appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler") -- - --- experimental too - -appendaction("mvlbuilders","normalizers","typesetters.checkers.handler") -appendaction("vboxbuilders","normalizers","typesetters.checkers.handler") - --- speedup: only kick in when used - -disableaction("processors", "scripts.autofontfeature.handler") -disableaction("processors", "scripts.splitters.handler") -disableaction("processors", "scripts.injectors.handler") -- was enabled -disableaction("processors", "fonts.collections.process") -disableaction("processors", "fonts.checkers.missing") -disableaction("processors", "chars.handle_breakpoints") -disableaction("processors", "typesetters.cleaners.handler") -disableaction("processors", "typesetters.cases.handler") -disableaction("processors", "typesetters.digits.handler") -disableaction("processors", "typesetters.breakpoints.handler") -disableaction("processors", "typesetters.directions.handler") -disableaction("processors", "languages.words.check") -disableaction("processors", "typesetters.spacings.handler") -disableaction("processors", "typesetters.kerns.handler") -disableaction("processors", "typesetters.italics.handler") -disableaction("processors", "nodes.handlers.stripping") -disableaction("processors", "typesetters.paragraphs.handler") - -disableaction("shipouts", "typesetters.alignments.handler") -disableaction("shipouts", "nodes.rules.handler") -disableaction("shipouts", "nodes.shifts.handler") -disableaction("shipouts", "attributes.colors.handler") -disableaction("shipouts", "attributes.transparencies.handler") -disableaction("shipouts", "attributes.colorintents.handler") -disableaction("shipouts", "attributes.effects.handler") -disableaction("shipouts", "attributes.negatives.handler") -disableaction("shipouts", "attributes.viewerlayers.handler") -disableaction("shipouts", "structures.tags.handler") -disableaction("shipouts", "nodes.visualizers.handler") -disableaction("shipouts", "nodes.handlers.accessibility") -disableaction("shipouts", "nodes.handlers.backgrounds") -disableaction("shipouts", "nodes.handlers.alignbackgrounds") -disableaction("shipouts", "nodes.handlers.cleanuppage") - -disableaction("shipouts", "nodes.references.handler") -disableaction("shipouts", "nodes.destinations.handler") - ---~ disableaction("shipouts", "nodes.handlers.export") - -disableaction("mvlbuilders", "nodes.handlers.migrate") - -disableaction("processors", "builders.paragraphs.solutions.splitters.split") - -disableaction("finalizers", "builders.paragraphs.keeptogether") -disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize") -disableaction("finalizers", "nodes.handlers.graphicvadjust") -- sort of obsolete -disableaction("finalizers", "builders.paragraphs.tag") - -disableaction("math", "noads.handlers.tags") -disableaction("math", "noads.handlers.italics") - -disableaction("mvlbuilders", "typesetters.checkers.handler") -disableaction("vboxbuilders","typesetters.checkers.handler") - -freezecallbacks("find_.*_file", "find file using resolver") -freezecallbacks("read_.*_file", "read file at once") -freezecallbacks("open_.*_file", "open file for reading") - --- experimental: - -freezegroup("processors", "normalizers") -freezegroup("processors", "characters") -freezegroup("processors", "words") -freezegroup("processors", "fonts") -freezegroup("processors", "lists") - -freezegroup("finalizers", "normalizers") -freezegroup("finalizers", "fonts") -freezegroup("finalizers", "lists") - -freezegroup("shipouts", "normalizers") -freezegroup("shipouts", "finishers") - -freezegroup("mvlbuilders", "normalizers") -freezegroup("vboxbuilders", "normalizers") - ------------("parbuilders", "lists") ------------("pagebuilders", "lists") - -freezegroup("math", "normalizers") -freezegroup("math", "builders") +if not modules then modules = { } end modules ['task-ini'] = { + version = 1.001, + comment = "companion to task-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this is a temporary solution, we need to isolate some modules and then +-- the load order can determine the trickery to be applied to node lists +-- +-- we can disable more handlers and enable then when really used (*) +-- +-- todo: two finalizers: real shipout (can be imposed page) and page shipout (individual page) + +local tasks = nodes.tasks +local appendaction = tasks.appendaction +local disableaction = tasks.disableaction +local freezegroup = tasks.freezegroup +local freezecallbacks = callbacks.freeze + +appendaction("processors", "normalizers", "typesetters.characters.handler") -- always on +appendaction("processors", "normalizers", "fonts.collections.process") -- disabled +appendaction("processors", "normalizers", "fonts.checkers.missing") -- disabled + +appendaction("processors", "characters", "scripts.autofontfeature.handler") +appendaction("processors", "characters", "scripts.splitters.handler") -- disabled +appendaction("processors", "characters", "typesetters.cleaners.handler") -- disabled +appendaction("processors", "characters", "typesetters.directions.handler") -- disabled +appendaction("processors", "characters", "typesetters.cases.handler") -- disabled +appendaction("processors", "characters", "typesetters.breakpoints.handler") -- disabled +appendaction("processors", "characters", "scripts.injectors.handler") -- disabled + +appendaction("processors", "words", "builders.kernel.hyphenation") -- always on +appendaction("processors", "words", "languages.words.check") -- disabled + +appendaction("processors", "fonts", "builders.paragraphs.solutions.splitters.split") -- experimental +appendaction("processors", "fonts", "nodes.handlers.characters") -- maybe todo +appendaction("processors", "fonts", "nodes.injections.handler") -- maybe todo +appendaction("processors", "fonts", "nodes.handlers.protectglyphs", nil, "nohead") -- maybe todo +appendaction("processors", "fonts", "builders.kernel.ligaturing") -- always on (could be selective: if only node mode) +appendaction("processors", "fonts", "builders.kernel.kerning") -- always on (could be selective: if only node mode) +appendaction("processors", "fonts", "nodes.handlers.stripping") -- disabled (might move) +------------("processors", "fonts", "typesetters.italics.handler") -- disabled (after otf/kern handling) + +appendaction("processors", "lists", "typesetters.spacings.handler") -- disabled +appendaction("processors", "lists", "typesetters.kerns.handler") -- disabled +appendaction("processors", "lists", "typesetters.digits.handler") -- disabled (after otf handling) +appendaction("processors", "lists", "typesetters.italics.handler") -- disabled (after otf/kern handling) +appendaction("processors", "lists", "typesetters.paragraphs.handler") -- disabled + +appendaction("shipouts", "normalizers", "nodes.handlers.cleanuppage") -- disabled +appendaction("shipouts", "normalizers", "typesetters.alignments.handler") +appendaction("shipouts", "normalizers", "nodes.references.handler") -- disabled +appendaction("shipouts", "normalizers", "nodes.destinations.handler") -- disabled +appendaction("shipouts", "normalizers", "nodes.rules.handler") -- disabled +appendaction("shipouts", "normalizers", "nodes.shifts.handler") -- disabled +appendaction("shipouts", "normalizers", "structures.tags.handler") -- disabled +appendaction("shipouts", "normalizers", "nodes.handlers.accessibility") -- disabled +appendaction("shipouts", "normalizers", "nodes.handlers.backgrounds") -- disabled +appendaction("shipouts", "normalizers", "nodes.handlers.alignbackgrounds") -- disabled +------------("shipouts", "normalizers", "nodes.handlers.export") -- disabled + +appendaction("shipouts", "finishers", "nodes.visualizers.handler") -- disabled +appendaction("shipouts", "finishers", "attributes.colors.handler") -- disabled +appendaction("shipouts", "finishers", "attributes.transparencies.handler") -- disabled +appendaction("shipouts", "finishers", "attributes.colorintents.handler") -- disabled +appendaction("shipouts", "finishers", "attributes.negatives.handler") -- disabled +appendaction("shipouts", "finishers", "attributes.effects.handler") -- disabled +appendaction("shipouts", "finishers", "attributes.viewerlayers.handler") -- disabled + +--maybe integrate relocate and families + +appendaction("math", "normalizers", "noads.handlers.unscript", nil, "nohead") -- always on (maybe disabled) +appendaction("math", "normalizers", "noads.handlers.variants", nil, "nohead") -- always on +appendaction("math", "normalizers", "noads.handlers.relocate", nil, "nohead") -- always on +appendaction("math", "normalizers", "noads.handlers.families", nil, "nohead") -- always on + +appendaction("math", "normalizers", "noads.handlers.render", nil, "nohead") -- always on +appendaction("math", "normalizers", "noads.handlers.collapse", nil, "nohead") -- always on +appendaction("math", "normalizers", "noads.handlers.resize", nil, "nohead") -- always on +------------("math", "normalizers", "noads.handlers.respace", nil, "nohead") -- always on +appendaction("math", "normalizers", "noads.handlers.check", nil, "nohead") -- always on +appendaction("math", "normalizers", "noads.handlers.tags", nil, "nohead") -- disabled +appendaction("math", "normalizers", "noads.handlers.italics", nil, "nohead") -- disabled + +appendaction("math", "builders", "builders.kernel.mlist_to_hlist") -- always on +------------("math", "builders", "noads.handlers.italics", nil, "nohead") -- disabled + +-- quite experimental (nodes.handlers.graphicvadjust might go away) + +appendaction("finalizers", "lists", "builders.paragraphs.keeptogether") +appendaction("finalizers", "lists", "nodes.handlers.graphicvadjust") -- todo +appendaction("finalizers", "fonts", "builders.paragraphs.solutions.splitters.optimize") -- experimental +appendaction("finalizers", "lists", "builders.paragraphs.tag") + +-- still experimental + +appendaction("mvlbuilders", "normalizers", "nodes.handlers.migrate") -- +appendaction("mvlbuilders", "normalizers", "builders.vspacing.pagehandler") -- last ! + +appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler") -- + +-- experimental too + +appendaction("mvlbuilders","normalizers","typesetters.checkers.handler") +appendaction("vboxbuilders","normalizers","typesetters.checkers.handler") + +-- speedup: only kick in when used + +disableaction("processors", "scripts.autofontfeature.handler") +disableaction("processors", "scripts.splitters.handler") +disableaction("processors", "scripts.injectors.handler") -- was enabled +disableaction("processors", "fonts.collections.process") +disableaction("processors", "fonts.checkers.missing") +disableaction("processors", "chars.handle_breakpoints") +disableaction("processors", "typesetters.cleaners.handler") +disableaction("processors", "typesetters.cases.handler") +disableaction("processors", "typesetters.digits.handler") +disableaction("processors", "typesetters.breakpoints.handler") +disableaction("processors", "typesetters.directions.handler") +disableaction("processors", "languages.words.check") +disableaction("processors", "typesetters.spacings.handler") +disableaction("processors", "typesetters.kerns.handler") +disableaction("processors", "typesetters.italics.handler") +disableaction("processors", "nodes.handlers.stripping") +disableaction("processors", "typesetters.paragraphs.handler") + +disableaction("shipouts", "typesetters.alignments.handler") +disableaction("shipouts", "nodes.rules.handler") +disableaction("shipouts", "nodes.shifts.handler") +disableaction("shipouts", "attributes.colors.handler") +disableaction("shipouts", "attributes.transparencies.handler") +disableaction("shipouts", "attributes.colorintents.handler") +disableaction("shipouts", "attributes.effects.handler") +disableaction("shipouts", "attributes.negatives.handler") +disableaction("shipouts", "attributes.viewerlayers.handler") +disableaction("shipouts", "structures.tags.handler") +disableaction("shipouts", "nodes.visualizers.handler") +disableaction("shipouts", "nodes.handlers.accessibility") +disableaction("shipouts", "nodes.handlers.backgrounds") +disableaction("shipouts", "nodes.handlers.alignbackgrounds") +disableaction("shipouts", "nodes.handlers.cleanuppage") + +disableaction("shipouts", "nodes.references.handler") +disableaction("shipouts", "nodes.destinations.handler") + +--~ disableaction("shipouts", "nodes.handlers.export") + +disableaction("mvlbuilders", "nodes.handlers.migrate") + +disableaction("processors", "builders.paragraphs.solutions.splitters.split") + +disableaction("finalizers", "builders.paragraphs.keeptogether") +disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize") +disableaction("finalizers", "nodes.handlers.graphicvadjust") -- sort of obsolete +disableaction("finalizers", "builders.paragraphs.tag") + +disableaction("math", "noads.handlers.tags") +disableaction("math", "noads.handlers.italics") + +disableaction("mvlbuilders", "typesetters.checkers.handler") +disableaction("vboxbuilders","typesetters.checkers.handler") + +freezecallbacks("find_.*_file", "find file using resolver") +freezecallbacks("read_.*_file", "read file at once") +freezecallbacks("open_.*_file", "open file for reading") + +-- experimental: + +freezegroup("processors", "normalizers") +freezegroup("processors", "characters") +freezegroup("processors", "words") +freezegroup("processors", "fonts") +freezegroup("processors", "lists") + +freezegroup("finalizers", "normalizers") +freezegroup("finalizers", "fonts") +freezegroup("finalizers", "lists") + +freezegroup("shipouts", "normalizers") +freezegroup("shipouts", "finishers") + +freezegroup("mvlbuilders", "normalizers") +freezegroup("vboxbuilders", "normalizers") + +-----------("parbuilders", "lists") +-----------("pagebuilders", "lists") + +freezegroup("math", "normalizers") +freezegroup("math", "builders") diff --git a/tex/context/base/toks-ini.lua b/tex/context/base/toks-ini.lua index ef4b5406b..0136f274f 100644 --- a/tex/context/base/toks-ini.lua +++ b/tex/context/base/toks-ini.lua @@ -1,341 +1,341 @@ -if not modules then modules = { } end modules ['toks-ini'] = { - version = 1.001, - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local utfbyte, utfchar, utfvalues = utf.byte, utf.char, utf.values -local format, gsub = string.format, string.gsub - ---[[ldx-- -

    This code is experimental and needs a cleanup. The visualizers will move to -a module.

    ---ldx]]-- - --- 1 = command, 2 = modifier (char), 3 = controlsequence id --- --- callback.register('token_filter', token.get_next) --- --- token.get_next() --- token.expand() --- token.create() --- token.csname_id() --- token.csname_name(v) --- token.command_id() --- token.command_name(v) --- token.is_expandable() --- token.is_activechar() --- token.lookup(v) - --- actually, we can use token registers to store tokens - -local token, tex = token, tex - -local createtoken = token.create -local csname_id = token.csname_id -local command_id = token.command_id -local command_name = token.command_name -local get_next = token.get_next -local expand = token.expand -local is_activechar = token.is_activechar -local csname_name = token.csname_name - -tokens = tokens or { } -local tokens = tokens - -tokens.vbox = createtoken("vbox") -tokens.hbox = createtoken("hbox") -tokens.vtop = createtoken("vtop") -tokens.bgroup = createtoken(utfbyte("{"), 1) -tokens.egroup = createtoken(utfbyte("}"), 2) - -tokens.letter = function(chr) return createtoken(utfbyte(chr), 11) end -tokens.other = function(chr) return createtoken(utfbyte(chr), 12) end - -tokens.letters = function(str) - local t, n = { }, 0 - for chr in utfvalues(str) do - n = n + 1 - t[n] = createtoken(chr, 11) - end - return t -end - -tokens.collectors = tokens.collectors or { } -local collectors = tokens.collectors - -collectors.data = collectors.data or { } -local collectordata = collectors.data - -collectors.registered = collectors.registered or { } -local registered = collectors.registered - -local function printlist(data) - callbacks.push('token_filter', function () - callbacks.pop('token_filter') -- tricky but the nil assignment helps - return data - end) -end - -tex.printlist = printlist -- will change to another namespace - -function collectors.flush(tag) - printlist(collectordata[tag]) -end - -function collectors.test(tag) - printlist(collectordata[tag]) -end - -function collectors.register(name) - registered[csname_id(name)] = name -end - -local call = command_id("call") -local letter = command_id("letter") -local other = command_id("other_char") - -function collectors.install(tag,end_cs) - local data, d = { }, 0 - collectordata[tag] = data - local endcs = csname_id(end_cs) - while true do - local t = get_next() - local a, b = t[1], t[3] - if b == endcs then - context["end_cs"]() - return - elseif a == call and registered[b] then - expand() - else - d = d + 1 - data[d] = t - end - end -end - -function collectors.handle(tag,handle,flush) - collectordata[tag] = handle(collectordata[tag]) - if flush then - collectors.flush(tag) - end -end - -local show_methods = { } -collectors.show_methods = show_methods - -function collectors.show(tag, method) - if type(tag) == "table" then - show_methods[method or 'a'](tag) - else - show_methods[method or 'a'](collectordata[tag]) - end -end - -function collectors.defaultwords(t,str) - local n = #t - n = n + 1 - t[n] = tokens.bgroup - n = n + 1 - t[n] = createtoken("red") - for i=1,#str do - n = n + 1 - t[n] = tokens.other('*') - end - n = n + 1 - t[n] = tokens.egroup -end - -function collectors.dowithwords(tag,handle) - local t, w, tn, wn = { }, { }, 0, 0 - handle = handle or collectors.defaultwords - local tagdata = collectordata[tag] - for k=1,#tagdata do - local v = tagdata[k] - if v[1] == letter then - wn = wn + 1 - w[wn] = v[2] - else - if wn > 0 then - handle(t,w) - wn = 0 - end - tn = tn + 1 - t[tn] = v - end - end - if wn > 0 then - handle(t,w) - end - collectordata[tag] = t -end - -local function showtoken(t) - if t then - local cmd, chr, id, cs, name = t[1], t[2], t[3], nil, command_name(t) or "" - if cmd == letter or cmd == other then - return format("%s-> %s -> %s", name, chr, utfchar(chr)) - elseif id > 0 then - cs = csname_name(t) or nil - if cs then - return format("%s-> %s", name, cs) - elseif tonumber(chr) < 0 then - return format("%s-> %s", name, id) - else - return format("%s-> (%s,%s)", name, chr, id) - end - else - return format("%s", name) - end - else - return "no node" - end -end - -collectors.showtoken = showtoken - -function collectors.trace() - local t = get_next() - logs.report("tokenlist",showtoken(t)) - return t -end - --- these might move to a runtime module - -show_methods.a = function(data) -- no need to store the table, just pass directly - local function row(one,two,three,four,five) - context.NC() context(one) - context.NC() context(two) - context.NC() context(three) - context.NC() context(four) - context.NC() context(five) - context.NC() context.NR() - end - context.starttabulate { "|T|Tr|cT|Tr|T|" } - row("cmd","chr","","id","name") - context.HL() - for _,v in next, data do - local cmd, chr, id, cs, sym = v[1], v[2], v[3], "", "" - local name = gsub(command_name(v) or "","_","\\_") - if id > 0 then - cs = csname_name(v) or "" - if cs ~= "" then cs = "\\string " .. cs end - else - id = "" - end - if cmd == letter or cmd == other then - sym = "\\char " .. chr - end - if tonumber(chr) < 0 then - row(name,"",sym,id,cs) - else - row(name,chr,sym,id,cs) - end - end - context.stoptabulate() -end - -local function show_b_c(data,swap) -- no need to store the table, just pass directly - local function row(one,two,three) - context.NC() context(one) - context.NC() context(two) - context.NC() context(three) - context.NC() context.NR() - end - if swap then - context.starttabulate { "|Tl|Tl|Tr|" } - else - context.starttabulate { "|Tl|Tr|Tl|" } - end - row("cmd","chr","name") - context.HL() - for _,v in next, data do - local cmd, chr, id, cs, sym = v[1], v[2], v[3], "", "" - local name = gsub(command_name(v) or "","_","\\_") - if id > 0 then - cs = csname_name(v) or "" - end - if cmd == letter or cmd == other then - sym = "\\char " .. chr - elseif cs == "" then - -- okay - elseif is_activechar(v) then - sym = "\\string " .. cs - else - sym = "\\string\\" .. cs - end - if swap then - row(name,sym,chr) - elseif tonumber(chr) < 0 then - row(name,"",sym) - else - row(name,chr,sym) - end - end - context.stoptabulate() -end - --- Even more experimental ... - -show_methods.b = function(data) show_b_c(data,false) end -show_methods.c = function(data) show_b_c(data,true ) end - -local remapper = { } -- namespace -collectors.remapper = remapper - -local remapperdata = { } -- user mappings -remapper.data = remapperdata - -function remapper.store(tag,class,key) - local s = remapperdata[class] - if not s then - s = { } - remapperdata[class] = s - end - s[key] = collectordata[tag] - collectordata[tag] = nil -end - -function remapper.convert(tag,toks) - local data = remapperdata[tag] - local leftbracket, rightbracket = utfbyte('['), utfbyte(']') - local skipping = 0 - -- todo: math - if data then - local t, n = { }, 0 - for s=1,#toks do - local tok = toks[s] - local one, two = tok[1], tok[2] - if one == 11 or one == 12 then - if two == leftbracket then - skipping = skipping + 1 - n = n + 1 ; t[n] = tok - elseif two == rightbracket then - skipping = skipping - 1 - n = n + 1 ; t[n] = tok - elseif skipping == 0 then - local new = data[two] - if new then - if #new > 1 then - for n=1,#new do - n = n + 1 ; t[n] = new[n] - end - else - n = n + 1 ; t[n] = new[1] - end - else - n = n + 1 ; t[n] = tok - end - else - n = n + 1 ; t[n] = tok - end - else - n = n + 1 ; t[n] = tok - end - end - return t - else - return toks - end -end +if not modules then modules = { } end modules ['toks-ini'] = { + version = 1.001, + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local utfbyte, utfchar, utfvalues = utf.byte, utf.char, utf.values +local format, gsub = string.format, string.gsub + +--[[ldx-- +

    This code is experimental and needs a cleanup. The visualizers will move to +a module.

    +--ldx]]-- + +-- 1 = command, 2 = modifier (char), 3 = controlsequence id +-- +-- callback.register('token_filter', token.get_next) +-- +-- token.get_next() +-- token.expand() +-- token.create() +-- token.csname_id() +-- token.csname_name(v) +-- token.command_id() +-- token.command_name(v) +-- token.is_expandable() +-- token.is_activechar() +-- token.lookup(v) + +-- actually, we can use token registers to store tokens + +local token, tex = token, tex + +local createtoken = token.create +local csname_id = token.csname_id +local command_id = token.command_id +local command_name = token.command_name +local get_next = token.get_next +local expand = token.expand +local is_activechar = token.is_activechar +local csname_name = token.csname_name + +tokens = tokens or { } +local tokens = tokens + +tokens.vbox = createtoken("vbox") +tokens.hbox = createtoken("hbox") +tokens.vtop = createtoken("vtop") +tokens.bgroup = createtoken(utfbyte("{"), 1) +tokens.egroup = createtoken(utfbyte("}"), 2) + +tokens.letter = function(chr) return createtoken(utfbyte(chr), 11) end +tokens.other = function(chr) return createtoken(utfbyte(chr), 12) end + +tokens.letters = function(str) + local t, n = { }, 0 + for chr in utfvalues(str) do + n = n + 1 + t[n] = createtoken(chr, 11) + end + return t +end + +tokens.collectors = tokens.collectors or { } +local collectors = tokens.collectors + +collectors.data = collectors.data or { } +local collectordata = collectors.data + +collectors.registered = collectors.registered or { } +local registered = collectors.registered + +local function printlist(data) + callbacks.push('token_filter', function () + callbacks.pop('token_filter') -- tricky but the nil assignment helps + return data + end) +end + +tex.printlist = printlist -- will change to another namespace + +function collectors.flush(tag) + printlist(collectordata[tag]) +end + +function collectors.test(tag) + printlist(collectordata[tag]) +end + +function collectors.register(name) + registered[csname_id(name)] = name +end + +local call = command_id("call") +local letter = command_id("letter") +local other = command_id("other_char") + +function collectors.install(tag,end_cs) + local data, d = { }, 0 + collectordata[tag] = data + local endcs = csname_id(end_cs) + while true do + local t = get_next() + local a, b = t[1], t[3] + if b == endcs then + context["end_cs"]() + return + elseif a == call and registered[b] then + expand() + else + d = d + 1 + data[d] = t + end + end +end + +function collectors.handle(tag,handle,flush) + collectordata[tag] = handle(collectordata[tag]) + if flush then + collectors.flush(tag) + end +end + +local show_methods = { } +collectors.show_methods = show_methods + +function collectors.show(tag, method) + if type(tag) == "table" then + show_methods[method or 'a'](tag) + else + show_methods[method or 'a'](collectordata[tag]) + end +end + +function collectors.defaultwords(t,str) + local n = #t + n = n + 1 + t[n] = tokens.bgroup + n = n + 1 + t[n] = createtoken("red") + for i=1,#str do + n = n + 1 + t[n] = tokens.other('*') + end + n = n + 1 + t[n] = tokens.egroup +end + +function collectors.dowithwords(tag,handle) + local t, w, tn, wn = { }, { }, 0, 0 + handle = handle or collectors.defaultwords + local tagdata = collectordata[tag] + for k=1,#tagdata do + local v = tagdata[k] + if v[1] == letter then + wn = wn + 1 + w[wn] = v[2] + else + if wn > 0 then + handle(t,w) + wn = 0 + end + tn = tn + 1 + t[tn] = v + end + end + if wn > 0 then + handle(t,w) + end + collectordata[tag] = t +end + +local function showtoken(t) + if t then + local cmd, chr, id, cs, name = t[1], t[2], t[3], nil, command_name(t) or "" + if cmd == letter or cmd == other then + return format("%s-> %s -> %s", name, chr, utfchar(chr)) + elseif id > 0 then + cs = csname_name(t) or nil + if cs then + return format("%s-> %s", name, cs) + elseif tonumber(chr) < 0 then + return format("%s-> %s", name, id) + else + return format("%s-> (%s,%s)", name, chr, id) + end + else + return format("%s", name) + end + else + return "no node" + end +end + +collectors.showtoken = showtoken + +function collectors.trace() + local t = get_next() + logs.report("tokenlist",showtoken(t)) + return t +end + +-- these might move to a runtime module + +show_methods.a = function(data) -- no need to store the table, just pass directly + local function row(one,two,three,four,five) + context.NC() context(one) + context.NC() context(two) + context.NC() context(three) + context.NC() context(four) + context.NC() context(five) + context.NC() context.NR() + end + context.starttabulate { "|T|Tr|cT|Tr|T|" } + row("cmd","chr","","id","name") + context.HL() + for _,v in next, data do + local cmd, chr, id, cs, sym = v[1], v[2], v[3], "", "" + local name = gsub(command_name(v) or "","_","\\_") + if id > 0 then + cs = csname_name(v) or "" + if cs ~= "" then cs = "\\string " .. cs end + else + id = "" + end + if cmd == letter or cmd == other then + sym = "\\char " .. chr + end + if tonumber(chr) < 0 then + row(name,"",sym,id,cs) + else + row(name,chr,sym,id,cs) + end + end + context.stoptabulate() +end + +local function show_b_c(data,swap) -- no need to store the table, just pass directly + local function row(one,two,three) + context.NC() context(one) + context.NC() context(two) + context.NC() context(three) + context.NC() context.NR() + end + if swap then + context.starttabulate { "|Tl|Tl|Tr|" } + else + context.starttabulate { "|Tl|Tr|Tl|" } + end + row("cmd","chr","name") + context.HL() + for _,v in next, data do + local cmd, chr, id, cs, sym = v[1], v[2], v[3], "", "" + local name = gsub(command_name(v) or "","_","\\_") + if id > 0 then + cs = csname_name(v) or "" + end + if cmd == letter or cmd == other then + sym = "\\char " .. chr + elseif cs == "" then + -- okay + elseif is_activechar(v) then + sym = "\\string " .. cs + else + sym = "\\string\\" .. cs + end + if swap then + row(name,sym,chr) + elseif tonumber(chr) < 0 then + row(name,"",sym) + else + row(name,chr,sym) + end + end + context.stoptabulate() +end + +-- Even more experimental ... + +show_methods.b = function(data) show_b_c(data,false) end +show_methods.c = function(data) show_b_c(data,true ) end + +local remapper = { } -- namespace +collectors.remapper = remapper + +local remapperdata = { } -- user mappings +remapper.data = remapperdata + +function remapper.store(tag,class,key) + local s = remapperdata[class] + if not s then + s = { } + remapperdata[class] = s + end + s[key] = collectordata[tag] + collectordata[tag] = nil +end + +function remapper.convert(tag,toks) + local data = remapperdata[tag] + local leftbracket, rightbracket = utfbyte('['), utfbyte(']') + local skipping = 0 + -- todo: math + if data then + local t, n = { }, 0 + for s=1,#toks do + local tok = toks[s] + local one, two = tok[1], tok[2] + if one == 11 or one == 12 then + if two == leftbracket then + skipping = skipping + 1 + n = n + 1 ; t[n] = tok + elseif two == rightbracket then + skipping = skipping - 1 + n = n + 1 ; t[n] = tok + elseif skipping == 0 then + local new = data[two] + if new then + if #new > 1 then + for n=1,#new do + n = n + 1 ; t[n] = new[n] + end + else + n = n + 1 ; t[n] = new[1] + end + else + n = n + 1 ; t[n] = tok + end + else + n = n + 1 ; t[n] = tok + end + else + n = n + 1 ; t[n] = tok + end + end + return t + else + return toks + end +end diff --git a/tex/context/base/trac-ctx.lua b/tex/context/base/trac-ctx.lua index 706e7a244..8153d079a 100644 --- a/tex/context/base/trac-ctx.lua +++ b/tex/context/base/trac-ctx.lua @@ -1,48 +1,48 @@ -if not modules then modules = { } end modules ['trac-ctx'] = { - version = 1.001, - comment = "companion to trac-ctx.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local commands = commands -local context = context -local register = trackers.register - -local textrackers = tex.trackers or { } -local texdirectives = tex.directives or { } - -tex.trackers = textrackers -tex.directives = texdirectives - -storage.register("tex/trackers", textrackers, "tex.trackers") -storage.register("tex/directives",texdirectives,"tex.directives") - -local function doit(category,tag,v) - local tt = category[tag] - if tt then - context.unprotect() - context(v and tt[1] or tt[2]) -- could be one call - context.protect() - end -end - -local function initialize(category,register) - for tag, commands in next, category do - register(tag, function(v) doit(category,tag,v) end) -- todo: v,tag in caller - end -end - -local function install(category,register,tag,enable,disable) - category[tag] = { enable, disable } - register(tag, function(v) doit(category,tag,v) end) -- todo: v,tag in caller -end - -function commands.initializetextrackers () initialize(textrackers ,trackers .register ) end -function commands.initializetexdirectives() initialize(texdirectives,directives.register) end - --- commands.install(tag,enable,disable): - -function commands.installtextracker (...) install(textrackers ,trackers .register,...) end -function commands.installtexdirective(...) install(texdirectives,directives.register,...) end +if not modules then modules = { } end modules ['trac-ctx'] = { + version = 1.001, + comment = "companion to trac-ctx.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local commands = commands +local context = context +local register = trackers.register + +local textrackers = tex.trackers or { } +local texdirectives = tex.directives or { } + +tex.trackers = textrackers +tex.directives = texdirectives + +storage.register("tex/trackers", textrackers, "tex.trackers") +storage.register("tex/directives",texdirectives,"tex.directives") + +local function doit(category,tag,v) + local tt = category[tag] + if tt then + context.unprotect() + context(v and tt[1] or tt[2]) -- could be one call + context.protect() + end +end + +local function initialize(category,register) + for tag, commands in next, category do + register(tag, function(v) doit(category,tag,v) end) -- todo: v,tag in caller + end +end + +local function install(category,register,tag,enable,disable) + category[tag] = { enable, disable } + register(tag, function(v) doit(category,tag,v) end) -- todo: v,tag in caller +end + +function commands.initializetextrackers () initialize(textrackers ,trackers .register ) end +function commands.initializetexdirectives() initialize(texdirectives,directives.register) end + +-- commands.install(tag,enable,disable): + +function commands.installtextracker (...) install(textrackers ,trackers .register,...) end +function commands.installtexdirective(...) install(texdirectives,directives.register,...) end diff --git a/tex/context/base/trac-deb.lua b/tex/context/base/trac-deb.lua index fe167c343..b2a86df88 100644 --- a/tex/context/base/trac-deb.lua +++ b/tex/context/base/trac-deb.lua @@ -1,248 +1,248 @@ -if not modules then modules = { } end modules ['trac-deb'] = { - version = 1.001, - comment = "companion to trac-deb.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local lpeg, status = lpeg, status - -local lpegmatch = lpeg.match -local format, concat, match = string.format, table.concat, string.match -local tonumber, tostring = tonumber, tostring -local texdimen, textoks, texcount = tex.dimen, tex.toks, tex.count - --- maybe tracers -> tracers.tex (and tracers.lua for current debugger) - -local report_system = logs.reporter("system","tex") - -tracers = tracers or { } -local tracers = tracers - -tracers.lists = { } -local lists = tracers.lists - -tracers.strings = { } -local strings = tracers.strings - -strings.undefined = "undefined" - -lists.scratch = { - 0, 2, 4, 6, 8 -} - -lists.internals = { - 'p:hsize', 'p:parindent', 'p:leftskip','p:rightskip', - 'p:vsize', 'p:parskip', 'p:baselineskip', 'p:lineskip', 'p:topskip' -} - -lists.context = { - 'd:lineheight', - 'c:realpageno', 'c:userpageno', 'c:pageno', 'c:subpageno' -} - -local types = { - ['d'] = tracers.dimen, - ['c'] = tracers.count, - ['t'] = tracers.toks, - ['p'] = tracers.primitive -} - -local splitboth = lpeg.splitat(":") -local splittype = lpeg.firstofsplit(":") -local splitname = lpeg.secondofsplit(":") - -function tracers.type(csname) - return lpegmatch(splittype,csname) -end - -function tracers.name(csname) - return lpegmatch(splitname,csname) or csname -end - -function tracers.cs(csname) - local tag, name = lpegmatch(splitboth,csname) - if name and types[tag] then - return types[tag](name) - else - return tracers.primitive(csname) - end -end - -function tracers.dimen(name) - local d = texdimen[name] - return d and number.topoints(d) or strings.undefined -end - -function tracers.count(name) - return texcount[name] or strings.undefined -end - -function tracers.toks(name,limit) - local t = textoks[name] - return t and string.limit(t,tonumber(limit) or 40) or strings.undefined -end - -function tracers.primitive(name) - return tex[name] or strings.undefined -end - -function tracers.knownlist(name) - local l = lists[name] - return l and #l > 0 -end - -function tracers.showlines(filename,linenumber,offset,errorstr) - local data = io.loaddata(filename) - if not data or data == "" then - local hash = url.hashed(filename) - if not hash.noscheme then - local ok, d, n = resolvers.loaders.byscheme(hash.scheme,filename) - if ok and n > 0 then - data = d - end - end - end - local lines = data and string.splitlines(data) - if lines and #lines > 0 then - -- This does not work completely as we cannot access the last Lua error using - -- table.print(status.list()). This is on the agenda. Eventually we will - -- have a sequence of checks here (tex, lua, mp) at this end. - -- - -- Actually, in 0.75+ the lua error message is even weirder as you can - -- get: - -- - -- LuaTeX error [string "\directlua "]:3: unexpected symbol near '1' ... - -- - -- \endgroup \directlua { - -- - -- So there is some work to be done in the LuaTeX engine. - -- - local what, where = match(errorstr,[[LuaTeX error
    :(%d+)]]) - or match(errorstr,[[LuaTeX error %[string "\\(.-lua) "%]:(%d+)]]) -- buglet - if where then - -- lua error: linenumber points to last line - local start = "\\startluacode" - local stop = "\\stopluacode" - local where = tonumber(where) - if lines[linenumber] == start then - local n = linenumber - for i=n,1,-1 do - if lines[i] == start then - local n = i + where - if n <= linenumber then - linenumber = n - end - end - end - end - end - offset = tonumber(offset) or 10 - linenumber = tonumber(linenumber) or 10 - local start = math.max(linenumber - offset,1) - local stop = math.min(linenumber + offset,#lines) - if stop > #lines then - return "" - else - local result, fmt = { }, "%" .. #tostring(stop) .. "d %s %s" - for n=start,stop do - result[#result+1] = format(fmt,n,n == linenumber and ">>" or " ",lines[n]) - end - return concat(result,"\n") - end - else - return "" - end -end - -function tracers.printerror(offset) - local inputstack = resolvers.inputstack - local filename = inputstack[#inputstack] or status.filename - local linenumber = tonumber(status.linenumber) or 0 - if not filename then - report_system("error not related to input file: %s ...",status.lasterrorstring) - elseif type(filename) == "number" then - report_system("error on line %s of filehandle %s: %s ...",linenumber,filename,status.lasterrorstring) - else - -- currently we still get the error message printed to the log/console so we - -- add a bit of spacing around our variant - texio.write_nl("\n") - local errorstr = status.lasterrorstring or "?" - -- inspect(status.list()) - report_system("error on line %s in file %s: %s ...\n",linenumber,filename,errorstr) -- lua error? - texio.write_nl(tracers.showlines(filename,linenumber,offset,errorstr),"\n") - end -end - -directives.register("system.errorcontext", function(v) - if v then - callback.register('show_error_hook', function() tracers.printerror(v) end) - else - callback.register('show_error_hook', nil) - end -end) - --- this might move - -lmx = lmx or { } - -lmx.htmfile = function(name) return environment.jobname .. "-status.html" end -lmx.lmxfile = function(name) return resolvers.findfile(name,'tex') end - -function lmx.showdebuginfo(lmxname) - local variables = { - ['title'] = 'ConTeXt Debug Information', - ['color-background-one'] = lmx.get('color-background-green'), - ['color-background-two'] = lmx.get('color-background-blue'), - } - if lmxname == false then - return variables - else - lmx.show(lmxname or 'context-debug.lmx',variables) - end -end - -function lmx.showerror(lmxname) - local filename, linenumber, errorcontext = status.filename, tonumber(status.linenumber) or 0, "" - if not filename then - filename, errorcontext = 'unknown', 'error in filename' - elseif type(filename) == "number" then - filename, errorcontext = format("",filename), 'unknown error' - else - errorcontext = tracers.showlines(filename,linenumber,offset) - end - local variables = { - ['title'] = 'ConTeXt Error Information', - ['errormessage'] = status.lasterrorstring, - ['linenumber'] = linenumber, - ['color-background-one'] = lmx.get('color-background-yellow'), - ['color-background-two'] = lmx.get('color-background-purple'), - ['filename'] = filename, - ['errorcontext'] = errorcontext, - } - if lmxname == false then - return variables - else - lmx.show(lmxname or 'context-error.lmx',variables) - end -end - -function lmx.overloaderror() - callback.register('show_error_hook', function() lmx.showerror() end) -- prevents arguments being passed -end - -directives.register("system.showerror", lmx.overloaderror) - -local debugger = utilities.debugger - -local function trace_calls(n) - debugger.enable() - luatex.registerstopactions(function() - debugger.disable() - debugger.savestats(tex.jobname .. "-luacalls.log",tonumber(n)) - end) - trace_calls = function() end -end - -directives.register("system.tracecalls", function(n) trace_calls(n) end) -- indirect is needed for nilling +if not modules then modules = { } end modules ['trac-deb'] = { + version = 1.001, + comment = "companion to trac-deb.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local lpeg, status = lpeg, status + +local lpegmatch = lpeg.match +local format, concat, match = string.format, table.concat, string.match +local tonumber, tostring = tonumber, tostring +local texdimen, textoks, texcount = tex.dimen, tex.toks, tex.count + +-- maybe tracers -> tracers.tex (and tracers.lua for current debugger) + +local report_system = logs.reporter("system","tex") + +tracers = tracers or { } +local tracers = tracers + +tracers.lists = { } +local lists = tracers.lists + +tracers.strings = { } +local strings = tracers.strings + +strings.undefined = "undefined" + +lists.scratch = { + 0, 2, 4, 6, 8 +} + +lists.internals = { + 'p:hsize', 'p:parindent', 'p:leftskip','p:rightskip', + 'p:vsize', 'p:parskip', 'p:baselineskip', 'p:lineskip', 'p:topskip' +} + +lists.context = { + 'd:lineheight', + 'c:realpageno', 'c:userpageno', 'c:pageno', 'c:subpageno' +} + +local types = { + ['d'] = tracers.dimen, + ['c'] = tracers.count, + ['t'] = tracers.toks, + ['p'] = tracers.primitive +} + +local splitboth = lpeg.splitat(":") +local splittype = lpeg.firstofsplit(":") +local splitname = lpeg.secondofsplit(":") + +function tracers.type(csname) + return lpegmatch(splittype,csname) +end + +function tracers.name(csname) + return lpegmatch(splitname,csname) or csname +end + +function tracers.cs(csname) + local tag, name = lpegmatch(splitboth,csname) + if name and types[tag] then + return types[tag](name) + else + return tracers.primitive(csname) + end +end + +function tracers.dimen(name) + local d = texdimen[name] + return d and number.topoints(d) or strings.undefined +end + +function tracers.count(name) + return texcount[name] or strings.undefined +end + +function tracers.toks(name,limit) + local t = textoks[name] + return t and string.limit(t,tonumber(limit) or 40) or strings.undefined +end + +function tracers.primitive(name) + return tex[name] or strings.undefined +end + +function tracers.knownlist(name) + local l = lists[name] + return l and #l > 0 +end + +function tracers.showlines(filename,linenumber,offset,errorstr) + local data = io.loaddata(filename) + if not data or data == "" then + local hash = url.hashed(filename) + if not hash.noscheme then + local ok, d, n = resolvers.loaders.byscheme(hash.scheme,filename) + if ok and n > 0 then + data = d + end + end + end + local lines = data and string.splitlines(data) + if lines and #lines > 0 then + -- This does not work completely as we cannot access the last Lua error using + -- table.print(status.list()). This is on the agenda. Eventually we will + -- have a sequence of checks here (tex, lua, mp) at this end. + -- + -- Actually, in 0.75+ the lua error message is even weirder as you can + -- get: + -- + -- LuaTeX error [string "\directlua "]:3: unexpected symbol near '1' ... + -- + -- \endgroup \directlua { + -- + -- So there is some work to be done in the LuaTeX engine. + -- + local what, where = match(errorstr,[[LuaTeX error
    :(%d+)]]) + or match(errorstr,[[LuaTeX error %[string "\\(.-lua) "%]:(%d+)]]) -- buglet + if where then + -- lua error: linenumber points to last line + local start = "\\startluacode" + local stop = "\\stopluacode" + local where = tonumber(where) + if lines[linenumber] == start then + local n = linenumber + for i=n,1,-1 do + if lines[i] == start then + local n = i + where + if n <= linenumber then + linenumber = n + end + end + end + end + end + offset = tonumber(offset) or 10 + linenumber = tonumber(linenumber) or 10 + local start = math.max(linenumber - offset,1) + local stop = math.min(linenumber + offset,#lines) + if stop > #lines then + return "" + else + local result, fmt = { }, "%" .. #tostring(stop) .. "d %s %s" + for n=start,stop do + result[#result+1] = format(fmt,n,n == linenumber and ">>" or " ",lines[n]) + end + return concat(result,"\n") + end + else + return "" + end +end + +function tracers.printerror(offset) + local inputstack = resolvers.inputstack + local filename = inputstack[#inputstack] or status.filename + local linenumber = tonumber(status.linenumber) or 0 + if not filename then + report_system("error not related to input file: %s ...",status.lasterrorstring) + elseif type(filename) == "number" then + report_system("error on line %s of filehandle %s: %s ...",linenumber,filename,status.lasterrorstring) + else + -- currently we still get the error message printed to the log/console so we + -- add a bit of spacing around our variant + texio.write_nl("\n") + local errorstr = status.lasterrorstring or "?" + -- inspect(status.list()) + report_system("error on line %s in file %s: %s ...\n",linenumber,filename,errorstr) -- lua error? + texio.write_nl(tracers.showlines(filename,linenumber,offset,errorstr),"\n") + end +end + +directives.register("system.errorcontext", function(v) + if v then + callback.register('show_error_hook', function() tracers.printerror(v) end) + else + callback.register('show_error_hook', nil) + end +end) + +-- this might move + +lmx = lmx or { } + +lmx.htmfile = function(name) return environment.jobname .. "-status.html" end +lmx.lmxfile = function(name) return resolvers.findfile(name,'tex') end + +function lmx.showdebuginfo(lmxname) + local variables = { + ['title'] = 'ConTeXt Debug Information', + ['color-background-one'] = lmx.get('color-background-green'), + ['color-background-two'] = lmx.get('color-background-blue'), + } + if lmxname == false then + return variables + else + lmx.show(lmxname or 'context-debug.lmx',variables) + end +end + +function lmx.showerror(lmxname) + local filename, linenumber, errorcontext = status.filename, tonumber(status.linenumber) or 0, "" + if not filename then + filename, errorcontext = 'unknown', 'error in filename' + elseif type(filename) == "number" then + filename, errorcontext = format("",filename), 'unknown error' + else + errorcontext = tracers.showlines(filename,linenumber,offset) + end + local variables = { + ['title'] = 'ConTeXt Error Information', + ['errormessage'] = status.lasterrorstring, + ['linenumber'] = linenumber, + ['color-background-one'] = lmx.get('color-background-yellow'), + ['color-background-two'] = lmx.get('color-background-purple'), + ['filename'] = filename, + ['errorcontext'] = errorcontext, + } + if lmxname == false then + return variables + else + lmx.show(lmxname or 'context-error.lmx',variables) + end +end + +function lmx.overloaderror() + callback.register('show_error_hook', function() lmx.showerror() end) -- prevents arguments being passed +end + +directives.register("system.showerror", lmx.overloaderror) + +local debugger = utilities.debugger + +local function trace_calls(n) + debugger.enable() + luatex.registerstopactions(function() + debugger.disable() + debugger.savestats(tex.jobname .. "-luacalls.log",tonumber(n)) + end) + trace_calls = function() end +end + +directives.register("system.tracecalls", function(n) trace_calls(n) end) -- indirect is needed for nilling diff --git a/tex/context/base/trac-exp.lua b/tex/context/base/trac-exp.lua index 5879f1b7b..9daf86357 100644 --- a/tex/context/base/trac-exp.lua +++ b/tex/context/base/trac-exp.lua @@ -1,229 +1,229 @@ -if not modules then modules = { } end modules ['trac-exp'] = { - version = 1.001, - comment = "companion to trac-log.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local formatters = string.formatters -local reporters = logs.reporters -local xmlserialize = xml.serialize -local xmlcollected = xml.collected -local xmltext = xml.text -local xmlfirst = xml.first -local xmlfilter = xml.filter - --- there is no need for a newhandlers { name = "help", parent = "string" } - -local function flagdata(flag) - local name = flag.at.name or "" - local value = flag.at.value or "" - -- local short = xmlfirst(s,"/short") - -- local short = xmlserialize(short,xs) - local short = xmltext(xmlfirst(flag,"/short")) or "" - return name, value, short -end - -local function exampledata(example) - local command = xmltext(xmlfirst(example,"/command")) or "" - local comment = xmltext(xmlfirst(example,"/comment")) or "" - return command, comment -end - -local function categorytitle(category) - return xmltext(xmlfirst(category,"/title")) or "" -end - -local exporters = logs.exporters - -function exporters.man(specification,...) - local root = xml.convert(specification.helpinfo or "") - if not root then - return - end - local xs = xml.gethandlers("string") - xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end) - xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end) - local wantedcategories = select("#",...) == 0 and true or table.tohash { ... } - local nofcategories = xml.count(root,"/application/flags/category") - local name = xmlfilter(root,"/application/metadata/entry[@name='name']/text()") - local detail = xmlfilter(root,"/application/metadata/entry[@name='detail']/text()") or name - local version = xmlfilter(root,"/application/metadata/entry[@name='version']/text()") or "0.00" - local banner = specification.banner or detail or name - -- - local result = { } - -- - -- .TH "context" "1" "some date" "version" "ConTeXt" -- we use a fake date as I don't want to polute the git repos - -- - local runner = string.match(name,"^mtx%-(.*)") - if runner then - runner = formatters["mtxrun --script %s"](runner) - else - runner = name - end - -- - result[#result+1] = formatters['.TH "%s" "1" "%s" "version %s" "%s"'](name,os.date("01-01-%Y"),version,detail) - result[#result+1] = formatters[".SH NAME\n.B %s"](name) - result[#result+1] = formatters[".SH SYNOPSIS\n.B %s [\n.I OPTIONS ...\n.B ] [\n.I FILENAMES\n.B ]"](runner) - result[#result+1] = formatters[".SH DESCRIPTION\n.B %s"](detail) - -- - for category in xmlcollected(root,"/application/flags/category") do - if nofcategories > 1 then - result[#result+1] = formatters['.SH OPTIONS: %s'](string.upper(category.at.name or "all")) - else - result[#result+1] = ".SH OPTIONS" - end - for subcategory in xmlcollected(category,"/subcategory") do - for flag in xmlcollected(subcategory,"/flag") do - local name, value, short = flagdata(flag) - if value == "" then - result[#result+1] = formatters[".TP\n.B --%s\n%s"](name,short) - else - result[#result+1] = formatters[".TP\n.B --%s=%s\n%s"](name,value,short) - end - end - end - end - local moreinfo = specification.moreinfo - if moreinfo and moreinfo ~= "" then - moreinfo = string.gsub(moreinfo,"[\n\r]([%a]+)%s*:%s*",'\n\n.B "%1:"\n') - result[#result+1] = formatters[".SH AUTHOR\n%s"](moreinfo) - end - return table.concat(result,"\n") -end - -local craptemplate = [[ - - - -%s - - -%s - -]] - -function exporters.xml(specification,...) - local helpinfo = specification.helpinfo - if type(helpinfo) == "string" then - if string.find(helpinfo,"^<%?xml") then - return helpinfo - end - elseif type(helpinfo) == "table" then - helpinfo = table.concat(helpinfo,"\n\n") - else - helpinfo = "no help" - end - return formatters[craptemplate](specification.banner or "?",helpinfo) -end - --- the following template is optimized a bit for space - --- local bodytemplate = [[ ---

    Command line options

    ---
    --- --- --- --- --- --- 1 then --- ?> --- --- --- --- --- --- ---
    flagvaluedescription
    --
    ---
    ---





    --- ]] - -local bodytemplate = [[ -

    Command line options

    - - - 1 then ?> - - - - - - -
    flagvaluedescription
    --
    -
    - -

    - - -

    -

    -]] - -function exporters.html(specification,...) - local root = xml.convert(specification.helpinfo or "") - if not root then - return - end - local xs = xml.gethandlers("string") - xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end) - xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end) - local wantedcategories = select("#",...) == 0 and true or table.tohash { ... } - local nofcategories = xml.count(root,"/application/flags/category") - local name = xmlfilter(root,"/application/metadata/entry[@name='name']/text()") - local detail = xmlfilter(root,"/application/metadata/entry[@name='detail']/text()") or name - local version = xmlfilter(root,"/application/metadata/entry[@name='version']/text()") or "0.00" - local banner = specification.banner or detail or name - -- - dofile(resolvers.findfile("trac-lmx.lua","tex")) - -- - local htmltemplate = io.loaddata(resolvers.findfile("context-base.lmx","tex")) or "no template" - -- - local body = lmx.convertstring(bodytemplate, { - nofcategories = nofcategories, - wantedcategories = wantedcategories, - root = root, - -- moreinfo = specification.moreinfo, - flagdata = flagdata, - exampledata = exampledata, - categorytitle = categorytitle, - }) - local html = lmx.convertstring(htmltemplate, { - maintext = body, - title = banner, - bottomtext = "wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl", - }) - -- - return html -end +if not modules then modules = { } end modules ['trac-exp'] = { + version = 1.001, + comment = "companion to trac-log.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local formatters = string.formatters +local reporters = logs.reporters +local xmlserialize = xml.serialize +local xmlcollected = xml.collected +local xmltext = xml.text +local xmlfirst = xml.first +local xmlfilter = xml.filter + +-- there is no need for a newhandlers { name = "help", parent = "string" } + +local function flagdata(flag) + local name = flag.at.name or "" + local value = flag.at.value or "" + -- local short = xmlfirst(s,"/short") + -- local short = xmlserialize(short,xs) + local short = xmltext(xmlfirst(flag,"/short")) or "" + return name, value, short +end + +local function exampledata(example) + local command = xmltext(xmlfirst(example,"/command")) or "" + local comment = xmltext(xmlfirst(example,"/comment")) or "" + return command, comment +end + +local function categorytitle(category) + return xmltext(xmlfirst(category,"/title")) or "" +end + +local exporters = logs.exporters + +function exporters.man(specification,...) + local root = xml.convert(specification.helpinfo or "") + if not root then + return + end + local xs = xml.gethandlers("string") + xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end) + xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end) + local wantedcategories = select("#",...) == 0 and true or table.tohash { ... } + local nofcategories = xml.count(root,"/application/flags/category") + local name = xmlfilter(root,"/application/metadata/entry[@name='name']/text()") + local detail = xmlfilter(root,"/application/metadata/entry[@name='detail']/text()") or name + local version = xmlfilter(root,"/application/metadata/entry[@name='version']/text()") or "0.00" + local banner = specification.banner or detail or name + -- + local result = { } + -- + -- .TH "context" "1" "some date" "version" "ConTeXt" -- we use a fake date as I don't want to polute the git repos + -- + local runner = string.match(name,"^mtx%-(.*)") + if runner then + runner = formatters["mtxrun --script %s"](runner) + else + runner = name + end + -- + result[#result+1] = formatters['.TH "%s" "1" "%s" "version %s" "%s"'](name,os.date("01-01-%Y"),version,detail) + result[#result+1] = formatters[".SH NAME\n.B %s"](name) + result[#result+1] = formatters[".SH SYNOPSIS\n.B %s [\n.I OPTIONS ...\n.B ] [\n.I FILENAMES\n.B ]"](runner) + result[#result+1] = formatters[".SH DESCRIPTION\n.B %s"](detail) + -- + for category in xmlcollected(root,"/application/flags/category") do + if nofcategories > 1 then + result[#result+1] = formatters['.SH OPTIONS: %s'](string.upper(category.at.name or "all")) + else + result[#result+1] = ".SH OPTIONS" + end + for subcategory in xmlcollected(category,"/subcategory") do + for flag in xmlcollected(subcategory,"/flag") do + local name, value, short = flagdata(flag) + if value == "" then + result[#result+1] = formatters[".TP\n.B --%s\n%s"](name,short) + else + result[#result+1] = formatters[".TP\n.B --%s=%s\n%s"](name,value,short) + end + end + end + end + local moreinfo = specification.moreinfo + if moreinfo and moreinfo ~= "" then + moreinfo = string.gsub(moreinfo,"[\n\r]([%a]+)%s*:%s*",'\n\n.B "%1:"\n') + result[#result+1] = formatters[".SH AUTHOR\n%s"](moreinfo) + end + return table.concat(result,"\n") +end + +local craptemplate = [[ + + + +%s + + +%s + +]] + +function exporters.xml(specification,...) + local helpinfo = specification.helpinfo + if type(helpinfo) == "string" then + if string.find(helpinfo,"^<%?xml") then + return helpinfo + end + elseif type(helpinfo) == "table" then + helpinfo = table.concat(helpinfo,"\n\n") + else + helpinfo = "no help" + end + return formatters[craptemplate](specification.banner or "?",helpinfo) +end + +-- the following template is optimized a bit for space + +-- local bodytemplate = [[ +--

    Command line options

    +-- +-- +-- +-- +-- +-- +-- 1 then +-- ?> +-- +-- +-- +-- +-- +-- +--
    flagvaluedescription
    --
    +--
    +--





    +-- ]] + +local bodytemplate = [[ +

    Command line options

    + + + 1 then ?> + + + + + + +
    flagvaluedescription
    --
    +
    + +

    + + +

    +

    +]] + +function exporters.html(specification,...) + local root = xml.convert(specification.helpinfo or "") + if not root then + return + end + local xs = xml.gethandlers("string") + xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end) + xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end) + local wantedcategories = select("#",...) == 0 and true or table.tohash { ... } + local nofcategories = xml.count(root,"/application/flags/category") + local name = xmlfilter(root,"/application/metadata/entry[@name='name']/text()") + local detail = xmlfilter(root,"/application/metadata/entry[@name='detail']/text()") or name + local version = xmlfilter(root,"/application/metadata/entry[@name='version']/text()") or "0.00" + local banner = specification.banner or detail or name + -- + dofile(resolvers.findfile("trac-lmx.lua","tex")) + -- + local htmltemplate = io.loaddata(resolvers.findfile("context-base.lmx","tex")) or "no template" + -- + local body = lmx.convertstring(bodytemplate, { + nofcategories = nofcategories, + wantedcategories = wantedcategories, + root = root, + -- moreinfo = specification.moreinfo, + flagdata = flagdata, + exampledata = exampledata, + categorytitle = categorytitle, + }) + local html = lmx.convertstring(htmltemplate, { + maintext = body, + title = banner, + bottomtext = "wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl", + }) + -- + return html +end diff --git a/tex/context/base/trac-fil.lua b/tex/context/base/trac-fil.lua index 8cc903e2a..d6d40356d 100644 --- a/tex/context/base/trac-fil.lua +++ b/tex/context/base/trac-fil.lua @@ -1,181 +1,181 @@ -if not modules then modules = { } end modules ['trac-fil'] = { - version = 1.001, - comment = "for the moment for myself", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local rawset, tonumber, type, pcall = rawset, tonumber, type, pcall -local format, concat = string.format, table.concat -local openfile = io.open -local date = os.date -local sortedpairs = table.sortedpairs - -local P, C, Cc, Cg, Cf, Ct, Cs, Carg = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Ct, lpeg.Cs, lpeg.Carg -local lpegmatch = lpeg.match - -local patterns = lpeg.patterns -local cardinal = patterns.cardinal -local whitespace = patterns.whitespace^0 - -local timestamp = Cf(Ct("") * ( - Cg (Cc("year") * (cardinal/tonumber)) * P("-") - * Cg (Cc("month") * (cardinal/tonumber)) * P("-") - * Cg (Cc("day") * (cardinal/tonumber)) * P(" ") - * Cg (Cc("hour") * (cardinal/tonumber)) * P(":") - * Cg (Cc("minute") * (cardinal/tonumber)) * P(":") - * Cg (Cc("second") * (cardinal/tonumber)) * P("+") - * Cg (Cc("thour") * (cardinal/tonumber)) * P(":") - * Cg (Cc("tminute") * (cardinal/tonumber)) -)^0, rawset) - -local keysvalues = Cf(Ct("") * ( - Cg(C(patterns.letter^0) * whitespace * "=" * whitespace * Cs(patterns.unquoted) * whitespace) -)^0, rawset) - -local statusline = Cf(Ct("") * ( - whitespace * P("[") * Cg(Cc("timestamp") * timestamp ) * P("]") - * whitespace * Cg(Cc("status" ) * keysvalues) -),rawset) - -patterns.keysvalues = keysvalues -patterns.statusline = statusline -patterns.timestamp = timestamp - -loggers = loggers or { } - -local timeformat = format("[%%s%s]",os.timezone(true)) -local dateformat = "!%Y-%m-%d %H:%M:%S" - -function loggers.makeline(t) - local result = { } -- minimize time that file is open - result[#result+1] = format(timeformat,date(dateformat)) - for k, v in sortedpairs(t) do - local tv = type(v) - if tv == "string" then - if v ~= "password" then - result[#result+1] = format(" %s=%q",k,v) - end - elseif tv == "number" or tv == "boolean" then - result[#result+1] = format(" %s=%q",k,tostring(v)) - end - end - return concat(result," ") -end - -local function append(filename,...) - local f = openfile(filename,"a+") - if not f then - dir.mkdirs(file.dirname(filename)) - f = openfile(filename,"a+") - end - if f then - f:write(...) - f:close() - return true - else - return false - end -end - -function loggers.store(filename,data) -- a log service is nicer - if type(data) == "table"then - data = loggers.makeline(data) - end - pcall(append,filename,data,"\n") -end - -function loggers.collect(filename,result) - if lfs.isfile(filename) then - local r = lpegmatch(Ct(statusline^0),io.loaddata(filename)) - if result then -- append - local nofresult = #result - for i=1,#r do - nofresult = nofresult + 1 - result[nofresult] = r[i] - end - return result - else - return r - end - else - return result or { } - end -end - -function loggers.fields(results) -- returns hash of fields with counts so that we can decide on importance - local fields = { } - if results then - for i=1,#results do - local r = results[i] - for k, v in next, r do - local f = fields[k] - if not f then - fields[k] = 1 - else - fields[k] = f + 1 - end - end - end - end - return fields -end - -local template = [[ - -%s -%s -
    - -]] - -function loggers.tohtml(entries,fields) - if not fields or #fields == 0 then - return "" - end - if type(entries) == "string" then - entries = loggers.collect(entries) - end - local scratch, lines = { }, { } - for i=1,#entries do - local entry = entries[i] - local status = entry.status - for i=1,#fields do - local field = fields[i] - local v = status[field.name] - if v ~= nil then - v = tostring(v) - local f = field.format - if f then - v = format(f,v) - end - scratch[i] = format("%s",field.align or "left",v) - else - scratch[i] = "" - end - end - lines[i] = format("%s",concat(scratch)) - end - for i=1,#fields do - local field = fields[i] - scratch[i] = format("%s", field.label or field.name) - end - local result = format(template,concat(scratch),concat(lines,"\n")) - return result, entries -end - --- loggers.store("test.log", { name = "whatever", more = math.random(1,100) }) - --- local fields = { --- { name = "name", align = "left" }, --- { name = "more", align = "right" }, --- } - --- local entries = loggers.collect("test.log") --- local html = loggers.tohtml(entries,fields) - --- inspect(entries) --- inspect(fields) --- inspect(html) - +if not modules then modules = { } end modules ['trac-fil'] = { + version = 1.001, + comment = "for the moment for myself", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local rawset, tonumber, type, pcall = rawset, tonumber, type, pcall +local format, concat = string.format, table.concat +local openfile = io.open +local date = os.date +local sortedpairs = table.sortedpairs + +local P, C, Cc, Cg, Cf, Ct, Cs, Carg = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Ct, lpeg.Cs, lpeg.Carg +local lpegmatch = lpeg.match + +local patterns = lpeg.patterns +local cardinal = patterns.cardinal +local whitespace = patterns.whitespace^0 + +local timestamp = Cf(Ct("") * ( + Cg (Cc("year") * (cardinal/tonumber)) * P("-") + * Cg (Cc("month") * (cardinal/tonumber)) * P("-") + * Cg (Cc("day") * (cardinal/tonumber)) * P(" ") + * Cg (Cc("hour") * (cardinal/tonumber)) * P(":") + * Cg (Cc("minute") * (cardinal/tonumber)) * P(":") + * Cg (Cc("second") * (cardinal/tonumber)) * P("+") + * Cg (Cc("thour") * (cardinal/tonumber)) * P(":") + * Cg (Cc("tminute") * (cardinal/tonumber)) +)^0, rawset) + +local keysvalues = Cf(Ct("") * ( + Cg(C(patterns.letter^0) * whitespace * "=" * whitespace * Cs(patterns.unquoted) * whitespace) +)^0, rawset) + +local statusline = Cf(Ct("") * ( + whitespace * P("[") * Cg(Cc("timestamp") * timestamp ) * P("]") + * whitespace * Cg(Cc("status" ) * keysvalues) +),rawset) + +patterns.keysvalues = keysvalues +patterns.statusline = statusline +patterns.timestamp = timestamp + +loggers = loggers or { } + +local timeformat = format("[%%s%s]",os.timezone(true)) +local dateformat = "!%Y-%m-%d %H:%M:%S" + +function loggers.makeline(t) + local result = { } -- minimize time that file is open + result[#result+1] = format(timeformat,date(dateformat)) + for k, v in sortedpairs(t) do + local tv = type(v) + if tv == "string" then + if v ~= "password" then + result[#result+1] = format(" %s=%q",k,v) + end + elseif tv == "number" or tv == "boolean" then + result[#result+1] = format(" %s=%q",k,tostring(v)) + end + end + return concat(result," ") +end + +local function append(filename,...) + local f = openfile(filename,"a+") + if not f then + dir.mkdirs(file.dirname(filename)) + f = openfile(filename,"a+") + end + if f then + f:write(...) + f:close() + return true + else + return false + end +end + +function loggers.store(filename,data) -- a log service is nicer + if type(data) == "table"then + data = loggers.makeline(data) + end + pcall(append,filename,data,"\n") +end + +function loggers.collect(filename,result) + if lfs.isfile(filename) then + local r = lpegmatch(Ct(statusline^0),io.loaddata(filename)) + if result then -- append + local nofresult = #result + for i=1,#r do + nofresult = nofresult + 1 + result[nofresult] = r[i] + end + return result + else + return r + end + else + return result or { } + end +end + +function loggers.fields(results) -- returns hash of fields with counts so that we can decide on importance + local fields = { } + if results then + for i=1,#results do + local r = results[i] + for k, v in next, r do + local f = fields[k] + if not f then + fields[k] = 1 + else + fields[k] = f + 1 + end + end + end + end + return fields +end + +local template = [[ + +%s +%s +
    + +]] + +function loggers.tohtml(entries,fields) + if not fields or #fields == 0 then + return "" + end + if type(entries) == "string" then + entries = loggers.collect(entries) + end + local scratch, lines = { }, { } + for i=1,#entries do + local entry = entries[i] + local status = entry.status + for i=1,#fields do + local field = fields[i] + local v = status[field.name] + if v ~= nil then + v = tostring(v) + local f = field.format + if f then + v = format(f,v) + end + scratch[i] = format("%s",field.align or "left",v) + else + scratch[i] = "" + end + end + lines[i] = format("%s",concat(scratch)) + end + for i=1,#fields do + local field = fields[i] + scratch[i] = format("%s", field.label or field.name) + end + local result = format(template,concat(scratch),concat(lines,"\n")) + return result, entries +end + +-- loggers.store("test.log", { name = "whatever", more = math.random(1,100) }) + +-- local fields = { +-- { name = "name", align = "left" }, +-- { name = "more", align = "right" }, +-- } + +-- local entries = loggers.collect("test.log") +-- local html = loggers.tohtml(entries,fields) + +-- inspect(entries) +-- inspect(fields) +-- inspect(html) + diff --git a/tex/context/base/trac-inf.lua b/tex/context/base/trac-inf.lua index eefc15a6f..aa7704d3f 100644 --- a/tex/context/base/trac-inf.lua +++ b/tex/context/base/trac-inf.lua @@ -1,193 +1,193 @@ -if not modules then modules = { } end modules ['trac-inf'] = { - version = 1.001, - comment = "companion to trac-inf.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- As we want to protect the global tables, we no longer store the timing --- in the tables themselves but in a hidden timers table so that we don't --- get warnings about assignments. This is more efficient than using rawset --- and rawget. - -local type, tonumber = type, tonumber -local format, lower = string.format, string.lower -local concat = table.concat -local clock = os.gettimeofday or os.clock -- should go in environment - -statistics = statistics or { } -local statistics = statistics - -statistics.enable = true -statistics.threshold = 0.01 - -local statusinfo, n, registered, timers = { }, 0, { }, { } - -table.setmetatableindex(timers,function(t,k) - local v = { timing = 0, loadtime = 0 } - t[k] = v - return v -end) - -local function hastiming(instance) - return instance and timers[instance] -end - -local function resettiming(instance) - timers[instance or "notimer"] = { timing = 0, loadtime = 0 } -end - -local function starttiming(instance) - local timer = timers[instance or "notimer"] - local it = timer.timing or 0 - if it == 0 then - timer.starttime = clock() - if not timer.loadtime then - timer.loadtime = 0 - end - end - timer.timing = it + 1 -end - -local function stoptiming(instance) - local timer = timers[instance or "notimer"] - local it = timer.timing - if it > 1 then - timer.timing = it - 1 - else - local starttime = timer.starttime - if starttime then - local stoptime = clock() - local loadtime = stoptime - starttime - timer.stoptime = stoptime - timer.loadtime = timer.loadtime + loadtime - timer.timing = 0 - return loadtime - end - end - return 0 -end - -local function elapsed(instance) - if type(instance) == "number" then - return instance or 0 - else - local timer = timers[instance or "notimer"] - return timer and timer.loadtime or 0 - end -end - -local function elapsedtime(instance) - return format("%0.3f",elapsed(instance)) -end - -local function elapsedindeed(instance) - return elapsed(instance) > statistics.threshold -end - -local function elapsedseconds(instance,rest) -- returns nil if 0 seconds - if elapsedindeed(instance) then - return format("%0.3f seconds %s", elapsed(instance),rest or "") - end -end - -statistics.hastiming = hastiming -statistics.resettiming = resettiming -statistics.starttiming = starttiming -statistics.stoptiming = stoptiming -statistics.elapsed = elapsed -statistics.elapsedtime = elapsedtime -statistics.elapsedindeed = elapsedindeed -statistics.elapsedseconds = elapsedseconds - --- general function .. we might split this module - -function statistics.register(tag,fnc) - if statistics.enable and type(fnc) == "function" then - local rt = registered[tag] or (#statusinfo + 1) - statusinfo[rt] = { tag, fnc } - registered[tag] = rt - if #tag > n then n = #tag end - end -end - -local report = logs.reporter("mkiv lua stats") - -function statistics.show() - if statistics.enable then - -- this code will move - local register = statistics.register - register("luatex banner", function() - return lower(status.banner) - end) - register("control sequences", function() - return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra) - end) - register("callbacks", function() - local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0 - return format("%s direct, %s indirect, %s total", total-indirect, indirect, total) - end) - if jit then - local status = { jit.status() } - if status[1] then - register("luajit status", function() - return concat(status," ",2) - end) - end - end - -- so far - -- collectgarbage("collect") - register("current memory usage",statistics.memused) - register("runtime",statistics.runtime) - logs.newline() -- initial newline - for i=1,#statusinfo do - local s = statusinfo[i] - local r = s[2]() - if r then - report("%s: %s",s[1],r) - end - end - -- logs.newline() -- final newline - statistics.enable = false - end -end - -function statistics.memused() -- no math.round yet -) - local round = math.round or math.floor - return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000)) -end - -starttiming(statistics) - -function statistics.formatruntime(runtime) -- indirect so it can be overloaded and - return format("%s seconds", runtime) -- indeed that happens in cure-uti.lua -end - -function statistics.runtime() - stoptiming(statistics) - return statistics.formatruntime(elapsedtime(statistics)) -end - -local report = logs.reporter("system") - -function statistics.timed(action) - starttiming("run") - action() - stoptiming("run") - report("total runtime: %s",elapsedtime("run")) -end - --- where, not really the best spot for this: - -commands = commands or { } - -function commands.resettimer(name) - resettiming(name or "whatever") - starttiming(name or "whatever") -end - -function commands.elapsedtime(name) - stoptiming(name or "whatever") - context(elapsedtime(name or "whatever")) -end +if not modules then modules = { } end modules ['trac-inf'] = { + version = 1.001, + comment = "companion to trac-inf.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- As we want to protect the global tables, we no longer store the timing +-- in the tables themselves but in a hidden timers table so that we don't +-- get warnings about assignments. This is more efficient than using rawset +-- and rawget. + +local type, tonumber = type, tonumber +local format, lower = string.format, string.lower +local concat = table.concat +local clock = os.gettimeofday or os.clock -- should go in environment + +statistics = statistics or { } +local statistics = statistics + +statistics.enable = true +statistics.threshold = 0.01 + +local statusinfo, n, registered, timers = { }, 0, { }, { } + +table.setmetatableindex(timers,function(t,k) + local v = { timing = 0, loadtime = 0 } + t[k] = v + return v +end) + +local function hastiming(instance) + return instance and timers[instance] +end + +local function resettiming(instance) + timers[instance or "notimer"] = { timing = 0, loadtime = 0 } +end + +local function starttiming(instance) + local timer = timers[instance or "notimer"] + local it = timer.timing or 0 + if it == 0 then + timer.starttime = clock() + if not timer.loadtime then + timer.loadtime = 0 + end + end + timer.timing = it + 1 +end + +local function stoptiming(instance) + local timer = timers[instance or "notimer"] + local it = timer.timing + if it > 1 then + timer.timing = it - 1 + else + local starttime = timer.starttime + if starttime then + local stoptime = clock() + local loadtime = stoptime - starttime + timer.stoptime = stoptime + timer.loadtime = timer.loadtime + loadtime + timer.timing = 0 + return loadtime + end + end + return 0 +end + +local function elapsed(instance) + if type(instance) == "number" then + return instance or 0 + else + local timer = timers[instance or "notimer"] + return timer and timer.loadtime or 0 + end +end + +local function elapsedtime(instance) + return format("%0.3f",elapsed(instance)) +end + +local function elapsedindeed(instance) + return elapsed(instance) > statistics.threshold +end + +local function elapsedseconds(instance,rest) -- returns nil if 0 seconds + if elapsedindeed(instance) then + return format("%0.3f seconds %s", elapsed(instance),rest or "") + end +end + +statistics.hastiming = hastiming +statistics.resettiming = resettiming +statistics.starttiming = starttiming +statistics.stoptiming = stoptiming +statistics.elapsed = elapsed +statistics.elapsedtime = elapsedtime +statistics.elapsedindeed = elapsedindeed +statistics.elapsedseconds = elapsedseconds + +-- general function .. we might split this module + +function statistics.register(tag,fnc) + if statistics.enable and type(fnc) == "function" then + local rt = registered[tag] or (#statusinfo + 1) + statusinfo[rt] = { tag, fnc } + registered[tag] = rt + if #tag > n then n = #tag end + end +end + +local report = logs.reporter("mkiv lua stats") + +function statistics.show() + if statistics.enable then + -- this code will move + local register = statistics.register + register("luatex banner", function() + return lower(status.banner) + end) + register("control sequences", function() + return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra) + end) + register("callbacks", function() + local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0 + return format("%s direct, %s indirect, %s total", total-indirect, indirect, total) + end) + if jit then + local status = { jit.status() } + if status[1] then + register("luajit status", function() + return concat(status," ",2) + end) + end + end + -- so far + -- collectgarbage("collect") + register("current memory usage",statistics.memused) + register("runtime",statistics.runtime) + logs.newline() -- initial newline + for i=1,#statusinfo do + local s = statusinfo[i] + local r = s[2]() + if r then + report("%s: %s",s[1],r) + end + end + -- logs.newline() -- final newline + statistics.enable = false + end +end + +function statistics.memused() -- no math.round yet -) + local round = math.round or math.floor + return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000)) +end + +starttiming(statistics) + +function statistics.formatruntime(runtime) -- indirect so it can be overloaded and + return format("%s seconds", runtime) -- indeed that happens in cure-uti.lua +end + +function statistics.runtime() + stoptiming(statistics) + return statistics.formatruntime(elapsedtime(statistics)) +end + +local report = logs.reporter("system") + +function statistics.timed(action) + starttiming("run") + action() + stoptiming("run") + report("total runtime: %s",elapsedtime("run")) +end + +-- where, not really the best spot for this: + +commands = commands or { } + +function commands.resettimer(name) + resettiming(name or "whatever") + starttiming(name or "whatever") +end + +function commands.elapsedtime(name) + stoptiming(name or "whatever") + context(elapsedtime(name or "whatever")) +end diff --git a/tex/context/base/trac-jus.lua b/tex/context/base/trac-jus.lua index 9d99f059d..4be9b30f8 100644 --- a/tex/context/base/trac-jus.lua +++ b/tex/context/base/trac-jus.lua @@ -1,136 +1,136 @@ -if not modules then modules = { } end modules ['trac-jus'] = { - version = 1.001, - comment = "companion to trac-jus.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local checkers = typesetters.checkers or { } -typesetters.checkers = checkers - ------ report_justification = logs.reporter("visualize","justification") - -local a_alignstate = attributes.private("alignstate") -local a_justification = attributes.private("justification") - -local tracers = nodes.tracers -local setcolor = tracers.colors.set -local settransparency = tracers.transparencies.set - -local new_rule = nodes.pool.rule -local new_glue = nodes.pool.glue -local new_kern = nodes.pool.kern -local concat_nodes = nodes.concat -local hpack_nodes = node.hpack -local copy_node = node.copy -local get_list_dimensions = node.dimensions -local hlist_code = nodes.nodecodes.hlist - -local tex_set_attribute = tex.setattribute -local unsetvalue = attributes.unsetvalue - -local min_threshold = 0 -local max_threshold = 0 - -local function set(n) - nodes.tasks.enableaction("mvlbuilders", "typesetters.checkers.handler") - nodes.tasks.enableaction("vboxbuilders","typesetters.checkers.handler") - tex_set_attribute(a_justification,n or 1) - function typesetters.checkers.set(n) - tex_set_attribute(a_justification,n or 1) - end -end - -local function reset() - tex_set_attribute(a_justification,unsetvalue) -end - -checkers.set = set -checkers.reset = reset - -function commands.showjustification(n) - set(n) -end - -trackers.register("visualizers.justification", function(v) - if v then - set(1) - else - reset() - end -end) - -function checkers.handler(head) - for current in node.traverse_id(hlist_code,head) do - if current[a_justification] == 1 then - current[a_justification] = 0 - local width = current.width - if width > 0 then - local list = current.list - if list then - local naturalwidth, naturalheight, naturaldepth = get_list_dimensions(list) - local delta = naturalwidth - width - if naturalwidth == 0 or delta == 0 then - -- special box - elseif delta >= max_threshold then - local rule = new_rule(delta,naturalheight,naturaldepth) - list = hpack_nodes(list,width,"exactly") - if list.glue_set == 1 then - setcolor(rule,"trace:dr") - settransparency(rule,"trace:dr") - else - setcolor(rule,"trace:db") - settransparency(rule,"trace:db") - end - rule = hpack_nodes(rule) - rule.width = 0 - rule.height = 0 - rule.depth = 0 - current.list = concat_nodes { list, rule } - -- current.list = concat_nodes { list, new_kern(-naturalwidth+width), rule } - elseif delta <= min_threshold then - local alignstate = list[a_alignstate] - if alignstate == 1 then - local rule = new_rule(-delta,naturalheight,naturaldepth) - setcolor(rule,"trace:dc") - settransparency(rule,"trace:dc") - rule = hpack_nodes(rule) - rule.height = 0 - rule.depth = 0 - rule.width = 0 - current.list = nodes.concat { rule, list } - elseif alignstate == 2 then - local rule = new_rule(-delta/2,naturalheight,naturaldepth) - setcolor(rule,"trace:dy") - settransparency(rule,"trace:dy") - rule = hpack_nodes(rule) - rule.width = 0 - rule.height = 0 - rule.depth = 0 - current.list = concat_nodes { copy_node(rule), list, new_kern(delta/2), rule } - elseif alignstate == 3 then - local rule = new_rule(-delta,naturalheight,naturaldepth) - setcolor(rule,"trace:dm") - settransparency(rule,"trace:dm") - rule = hpack_nodes(rule) - rule.height = 0 - rule.depth = 0 - current.list = concat_nodes { list, new_kern(delta), rule } - else - local rule = new_rule(-delta,naturalheight,naturaldepth) - setcolor(rule,"trace:dg") - settransparency(rule,"trace:dg") - rule = hpack_nodes(rule) - rule.height = 0 - rule.depth = 0 - rule.width = 0 - current.list = concat_nodes { list, new_kern(delta), rule } - end - end - end - end - end - end - return head -end +if not modules then modules = { } end modules ['trac-jus'] = { + version = 1.001, + comment = "companion to trac-jus.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local checkers = typesetters.checkers or { } +typesetters.checkers = checkers + +----- report_justification = logs.reporter("visualize","justification") + +local a_alignstate = attributes.private("alignstate") +local a_justification = attributes.private("justification") + +local tracers = nodes.tracers +local setcolor = tracers.colors.set +local settransparency = tracers.transparencies.set + +local new_rule = nodes.pool.rule +local new_glue = nodes.pool.glue +local new_kern = nodes.pool.kern +local concat_nodes = nodes.concat +local hpack_nodes = node.hpack +local copy_node = node.copy +local get_list_dimensions = node.dimensions +local hlist_code = nodes.nodecodes.hlist + +local tex_set_attribute = tex.setattribute +local unsetvalue = attributes.unsetvalue + +local min_threshold = 0 +local max_threshold = 0 + +local function set(n) + nodes.tasks.enableaction("mvlbuilders", "typesetters.checkers.handler") + nodes.tasks.enableaction("vboxbuilders","typesetters.checkers.handler") + tex_set_attribute(a_justification,n or 1) + function typesetters.checkers.set(n) + tex_set_attribute(a_justification,n or 1) + end +end + +local function reset() + tex_set_attribute(a_justification,unsetvalue) +end + +checkers.set = set +checkers.reset = reset + +function commands.showjustification(n) + set(n) +end + +trackers.register("visualizers.justification", function(v) + if v then + set(1) + else + reset() + end +end) + +function checkers.handler(head) + for current in node.traverse_id(hlist_code,head) do + if current[a_justification] == 1 then + current[a_justification] = 0 + local width = current.width + if width > 0 then + local list = current.list + if list then + local naturalwidth, naturalheight, naturaldepth = get_list_dimensions(list) + local delta = naturalwidth - width + if naturalwidth == 0 or delta == 0 then + -- special box + elseif delta >= max_threshold then + local rule = new_rule(delta,naturalheight,naturaldepth) + list = hpack_nodes(list,width,"exactly") + if list.glue_set == 1 then + setcolor(rule,"trace:dr") + settransparency(rule,"trace:dr") + else + setcolor(rule,"trace:db") + settransparency(rule,"trace:db") + end + rule = hpack_nodes(rule) + rule.width = 0 + rule.height = 0 + rule.depth = 0 + current.list = concat_nodes { list, rule } + -- current.list = concat_nodes { list, new_kern(-naturalwidth+width), rule } + elseif delta <= min_threshold then + local alignstate = list[a_alignstate] + if alignstate == 1 then + local rule = new_rule(-delta,naturalheight,naturaldepth) + setcolor(rule,"trace:dc") + settransparency(rule,"trace:dc") + rule = hpack_nodes(rule) + rule.height = 0 + rule.depth = 0 + rule.width = 0 + current.list = nodes.concat { rule, list } + elseif alignstate == 2 then + local rule = new_rule(-delta/2,naturalheight,naturaldepth) + setcolor(rule,"trace:dy") + settransparency(rule,"trace:dy") + rule = hpack_nodes(rule) + rule.width = 0 + rule.height = 0 + rule.depth = 0 + current.list = concat_nodes { copy_node(rule), list, new_kern(delta/2), rule } + elseif alignstate == 3 then + local rule = new_rule(-delta,naturalheight,naturaldepth) + setcolor(rule,"trace:dm") + settransparency(rule,"trace:dm") + rule = hpack_nodes(rule) + rule.height = 0 + rule.depth = 0 + current.list = concat_nodes { list, new_kern(delta), rule } + else + local rule = new_rule(-delta,naturalheight,naturaldepth) + setcolor(rule,"trace:dg") + settransparency(rule,"trace:dg") + rule = hpack_nodes(rule) + rule.height = 0 + rule.depth = 0 + rule.width = 0 + current.list = concat_nodes { list, new_kern(delta), rule } + end + end + end + end + end + end + return head +end diff --git a/tex/context/base/trac-lmx.lua b/tex/context/base/trac-lmx.lua index 18c7f6020..1a12d2078 100644 --- a/tex/context/base/trac-lmx.lua +++ b/tex/context/base/trac-lmx.lua @@ -1,732 +1,732 @@ -if not modules then modules = { } end modules ['trac-lmx'] = { - version = 1.002, - comment = "companion to trac-lmx.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this one will be adpated to the latest helpers - -local type, tostring, rawget, loadstring, pcall = type, tostring, rawget, loadstring, pcall -local format, sub, gsub = string.format, string.sub, string.gsub -local concat = table.concat -local collapsespaces = string.collapsespaces -local P, Cc, Cs, C, Carg, lpegmatch = lpeg.P, lpeg.Cc, lpeg.Cs, lpeg.C, lpeg.Carg, lpeg.match -local joinpath, replacesuffix, pathpart, filesuffix = file.join, file.replacesuffix, file.pathpart, file.suffix - -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex - ------ trace_templates = false trackers .register("lmx.templates", function(v) trace_templates = v end) -local trace_variables = false trackers .register("lmx.variables", function(v) trace_variables = v end) - -local cache_templates = true directives.register("lmx.cache.templates",function(v) cache_templates = v end) -local cache_files = true directives.register("lmx.cache.files", function(v) cache_files = v end) - -local report_lmx = logs.reporter("lmx") -local report_error = logs.reporter("lmx","error") - -lmx = lmx or { } -local lmx = lmx - --- This will change: we will just pass the global defaults as argument, but then we need --- to rewrite some older code or come up with an ugly trick. - -local lmxvariables = { - ['title-default'] = 'ConTeXt LMX File', - ['color-background-green'] = '#4F6F6F', - ['color-background-blue'] = '#6F6F8F', - ['color-background-yellow'] = '#8F8F6F', - ['color-background-purple'] = '#8F6F8F', - ['color-background-body'] = '#808080', - ['color-background-main'] = '#3F3F3F', -} - -local lmxinherited = { - ['title'] = 'title-default', - ['color-background-one'] = 'color-background-green', - ['color-background-two'] = 'color-background-blue', - ['color-background-three'] = 'color-background-one', - ['color-background-four'] = 'color-background-two', -} - -lmx.variables = lmxvariables -lmx.inherited = lmxinherited - -setmetatableindex(lmxvariables,function(t,k) - k = lmxinherited[k] - while k do - local v = rawget(lmxvariables,k) - if v then - return v - end - k = lmxinherited[k] - end -end) - -function lmx.set(key,value) - lmxvariables[key] = value -end - -function lmx.get(key) - return lmxvariables[key] or "" -end - -lmx.report = report_lmx - --- helpers - --- the variables table is an empty one that gets linked to a defaults table --- that gets passed with a creation (first time only) and that itself links --- to one that gets passed to the converter - -local variables = { } -- we assume no nesting -local result = { } -- we assume no nesting - -local function do_print(one,two,...) - if two then - result[#result+1] = concat { one, two, ... } - else - result[#result+1] = one - end -end - --- Although it does not make much sense for most elements, we provide a mechanism --- to print wrapped content, something that is more efficient when we are constructing --- tables. - -local html = { } -lmx.html = html - -function html.td(str) - if type(str) == "table" then - for i=1,#str do -- spoils t ! - str[i] = format("%s",str[i] or "") - end - result[#result+1] = concat(str) - else - result[#result+1] = format("%s",str or "") - end -end - -function html.th(str) - if type(str) == "table" then - for i=1,#str do -- spoils t ! - str[i] = format("%s",str[i]) - end - result[#result+1] = concat(str) - else - result[#result+1] = format("%s",str or "") - end -end - -function html.a(text,url) - result[#result+1] = format("%s",url,text) -end - -setmetatableindex(html,function(t,k) - local f = format("<%s>%%s",k,k) - local v = function(str) result[#result+1] = format(f,str or "") end - t[k] = v - return v -end) - --- Loading templates: - -local function loadedfile(name) - name = resolvers and resolvers.findfile and resolvers.findfile(name) or name - local data = io.loaddata(name) - if not data or data == "" then - report_lmx("file %a is empty",name) - end - return data -end - -local function loadedsubfile(name) - return io.loaddata(resolvers and resolvers.findfile and resolvers.findfile(name) or name) -end - -lmx.loadedfile = loadedfile - --- A few helpers (the next one could end up in l-lpeg): - -local usedpaths = { } -local givenpath = nil - -local do_nested_include = nil - -local pattern = lpeg.replacer { - ["&"] = "&", - [">"] = ">", - ["<"] = "<", - ['"'] = """, -} - -local function do_escape(str) - return lpegmatch(pattern,str) or str -end - -local function do_variable(str) - local value = variables[str] - if not trace_variables then - -- nothing - elseif type(value) == "string" then - if #value > 80 then - report_lmx("variable %a is set to: %s ...",str,collapsespaces(sub(value,1,80))) - else - report_lmx("variable %a is set to: %s",str,collapsespaces(value)) - end - elseif type(value) == "nil" then - report_lmx("variable %a is set to: %s",str,"") - else - report_lmx("variable %a is set to: %S",str,value) - end - if type(value) == "function" then -- obsolete ... will go away - return value(str) - else - return value - end -end - -local function do_type(str) - if str and str ~= "" then - result[#result+1] = format("%s",do_escape(str)) - end -end - -local function do_fprint(str,...) - if str and str ~= "" then - result[#result+1] = format(str,...) - end -end - -local function do_eprint(str,...) - if str and str ~= "" then - result[#result+1] = lpegmatch(pattern,format(str,...)) - end -end - -local function do_print_variable(str) - local str = do_variable(str) -- variables[str] - if str and str ~= "" then - result[#result+1] = str - end -end - -local function do_type_variable(str) - local str = do_variable(str) -- variables[str] - if str and str ~= "" then - result[#result+1] = format("%s",do_escape(str)) - end -end - -local function do_include(filename,option) - local data = loadedsubfile(filename) - if (not data or data == "") and givenpath then - data = loadedsubfile(joinpath(givenpath,filename)) - end - if (not data or data == "") and type(usedpaths) == "table" then - for i=1,#usedpaths do - data = loadedsubfile(joinpath(usedpaths[i],filename)) - if data and data ~= "" then - break - end - end - end - if not data or data == "" then - data = format("",filename) - report_lmx("include file %a is empty",filename) - else - -- report_lmx("included file: %s",filename) - data = do_nested_include(data) - end - if filesuffix(filename,"css") and option == "strip" then -- new - data = lmx.stripcss(data) - end - return data -end - --- Flushers: - -lmx.print = do_print -lmx.type = do_type -lmx.eprint = do_eprint -lmx.fprint = do_fprint - -lmx.escape = do_escape -lmx.urlescape = url.escape -lmx.variable = do_variable -lmx.include = do_include - -lmx.inject = do_print -lmx.finject = do_fprint -lmx.einject = do_eprint - -lmx.pv = do_print_variable -lmx.tv = do_type_variable - --- The next functions set up the closure. - -function lmx.initialize(d,v) - if not v then - setmetatableindex(d,lmxvariables) - if variables ~= d then - setmetatableindex(variables,d) - if trace_variables then - report_lmx("using chain: variables => given defaults => lmx variables") - end - elseif trace_variables then - report_lmx("using chain: variables == given defaults => lmx variables") - end - elseif d ~= v then - setmetatableindex(v,d) - if d ~= lmxvariables then - setmetatableindex(d,lmxvariables) - if variables ~= v then - setmetatableindex(variables,v) - if trace_variables then - report_lmx("using chain: variables => given variables => given defaults => lmx variables") - end - elseif trace_variables then - report_lmx("using chain: variables == given variables => given defaults => lmx variables") - end - else - if variables ~= v then - setmetatableindex(variables,v) - if trace_variables then - report_lmx("using chain: variabes => given variables => given defaults") - end - elseif trace_variables then - report_lmx("using chain: variables == given variables => given defaults") - end - end - else - setmetatableindex(v,lmxvariables) - if variables ~= v then - setmetatableindex(variables,v) - if trace_variables then - report_lmx("using chain: variables => given variables => lmx variables") - end - elseif trace_variables then - report_lmx("using chain: variables == given variables => lmx variables") - end - end - result = { } -end - -function lmx.finalized() - local collapsed = concat(result) - result = { } -- free memory - return collapsed -end - -function lmx.getvariables() - return variables -end - -function lmx.reset() - -- obsolete -end - --- Creation: (todo: strip ) - --- local template = [[ --- return function(defaults,variables) --- --- -- initialize --- --- lmx.initialize(defaults,variables) --- --- -- interface --- --- local definitions = { } --- local variables = lmx.getvariables() --- local html = lmx.html --- local inject = lmx.print --- local finject = lmx.fprint --- local einject = lmx.eprint --- local escape = lmx.escape --- local verbose = lmx.type --- --- -- shortcuts (sort of obsolete as there is no gain) --- --- local p = lmx.print --- local f = lmx.fprint --- local v = lmx.variable --- local e = lmx.escape --- local t = lmx.type --- local pv = lmx.pv --- local tv = lmx.tv --- --- -- generator --- --- %s --- --- -- finalize --- --- return lmx.finalized() --- --- end --- ]] - -local template = [[ --- interface - -local html = lmx.html -local inject = lmx.print -local finject = lmx.fprint -- better use the following -local einject = lmx.eprint -- better use the following -local injectf = lmx.fprint -local injecte = lmx.eprint -local injectfmt = lmx.fprint -local injectesc = lmx.eprint -local escape = lmx.escape -local verbose = lmx.type - -local i_n_j_e_c_t = lmx.print - --- shortcuts (sort of obsolete as there is no gain) - -local p = lmx.print -local f = lmx.fprint -local v = lmx.variable -local e = lmx.escape -local t = lmx.type -local pv = lmx.pv -local tv = lmx.tv - -local lmx_initialize = lmx.initialize -local lmx_finalized = lmx.finalized -local lmx_getvariables = lmx.getvariables - --- generator - -return function(defaults,variables) - - lmx_initialize(defaults,variables) - - local definitions = { } - local variables = lmx_getvariables() - - %s -- the action: appends to result - - return lmx_finalized() - -end -]] - -local function savedefinition(definitions,tag,content) - definitions[tag] = content - return "" -end - -local function getdefinition(definitions,tag) - return definitions[tag] or "" -end - -local whitespace = lpeg.patterns.whitespace -local optionalspaces = whitespace^0 - -local dquote = P('"') - -local begincomment = P("") - -local beginembedxml = P("") - -local beginembedcss = P("/*") -local endembedcss = P("*/") - -local gobbledendxml = (optionalspaces * endembedxml) / "" ------ argumentxml = (1-gobbledendxml)^0 -local argumentxml = (whitespace^1 + dquote * C((1-dquote)^1) * dquote + C((1-gobbledendxml-whitespace)^1))^0 - -local gobbledendcss = (optionalspaces * endembedcss) / "" ------ argumentcss = (1-gobbledendcss)^0 -local argumentcss = (whitespace^1 + dquote * C((1-dquote)^1) * dquote + C((1-gobbledendcss-whitespace)^1))^0 - -local commentxml = (begincomment * (1-endcomment)^0 * endcomment) / "" - -local beginluaxml = (beginembedxml * P("lua")) / "" -local endluaxml = endembedxml / "" - -local luacodexml = beginluaxml - * (1-endluaxml)^1 - * endluaxml - -local beginluacss = (beginembedcss * P("lua")) / "" -local endluacss = endembedcss / "" - -local luacodecss = beginluacss - * (1-endluacss)^1 - * endluacss - -local othercode = (1-beginluaxml-beginluacss)^1 / " i_n_j_e_c_t[==[%0]==] " - -local includexml = ((beginembedxml * P("lmx-include") * optionalspaces) / "") - * (argumentxml / do_include) - * gobbledendxml - -local includecss = ((beginembedcss * P("lmx-include") * optionalspaces) / "") - * (argumentcss / do_include) - * gobbledendcss - -local definexml_b = ((beginembedxml * P("lmx-define-begin") * optionalspaces) / "") - * argumentxml - * gobbledendxml - -local definexml_e = ((beginembedxml * P("lmx-define-end") * optionalspaces) / "") - * argumentxml - * gobbledendxml - -local definexml_c = C((1-definexml_e)^0) - -local definexml = (Carg(1) * C(definexml_b) * definexml_c * definexml_e) / savedefinition - -local resolvexml = ((beginembedxml * P("lmx-resolve") * optionalspaces) / "") - * ((Carg(1) * C(argumentxml)) / getdefinition) - * gobbledendxml - -local definecss_b = ((beginembedcss * P("lmx-define-begin") * optionalspaces) / "") - * argumentcss - * gobbledendcss - -local definecss_e = ((beginembedcss * P("lmx-define-end") * optionalspaces) / "") - * argumentcss - * gobbledendcss - -local definecss_c = C((1-definecss_e)^0) - -local definecss = (Carg(1) * C(definecss_b) * definecss_c * definecss_e) / savedefinition - -local resolvecss = ((beginembedcss * P("lmx-resolve") * optionalspaces) / "") - * ((Carg(1) * C(argumentcss)) / getdefinition) - * gobbledendcss - -local pattern_1 = Cs((commentxml + includexml + includecss + P(1))^0) -- get rid of xml comments asap -local pattern_2 = Cs((definexml + resolvexml + definecss + resolvecss + P(1))^0) -local pattern_3 = Cs((luacodexml + luacodecss + othercode)^0) - -local cache = { } - -local function lmxerror(str) - report_error(str) - return html.tt(str) -end - -local function wrapper(converter,defaults,variables) - local outcome, message = pcall(converter,defaults,variables) - if not outcome then - return lmxerror(format("error in conversion: %s",message)) - else - return message - end -end - -do_nested_include = function(data) -- also used in include - return lpegmatch(pattern_1,data) -end - -function lmxnew(data,defaults,nocache,path) -- todo: use defaults in calling routines - data = data or "" - local known = cache[data] - if not known then - givenpath = path - usedpaths = lmxvariables.includepath or { } - if type(usedpaths) == "string" then - usedpaths = { usedpaths } - end - data = lpegmatch(pattern_1,data) - data = lpegmatch(pattern_2,data,1,{}) - data = lpegmatch(pattern_3,data) - local converted = loadstring(format(template,data)) - if converted then - converted = converted() - end - defaults = defaults or { } - local converter - if converted then - converter = function(variables) - return wrapper(converted,defaults,variables) - end - else - report_error("error in:\n%s\n:",data) - converter = function() lmxerror("error in template") end - end - known = { - data = defaults.trace and data or "", - variables = defaults, - converter = converter, - } - if cache_templates and nocache ~= false then - cache[data] = known - end - elseif variables then - known.variables = variables - end - return known, known.variables -end - -local function lmxresult(self,variables) - if self then - local converter = self.converter - if converter then - local converted = converter(variables) - if trace_variables then -- will become templates - report_lmx("converted size: %s",#converted) - end - return converted or lmxerror("no result from converter") - else - return lmxerror("invalid converter") - end - else - return lmxerror("invalid specification") - end -end - -lmx.new = lmxnew -lmx.result = lmxresult - -local loadedfiles = { } - -function lmx.convertstring(templatestring,variables,nocache,path) - return lmxresult(lmxnew(templatestring,nil,nocache,path),variables) -end - -function lmx.convertfile(templatefile,variables,nocache) - if trace_variables then -- will become templates - report_lmx("converting file %a",templatefile) - end - local converter = loadedfiles[templatefile] - if not converter then - converter = lmxnew(loadedfile(templatefile),nil,nocache,pathpart(templatefile)) - loadedfiles[templatefile] = converter - end - return lmxresult(converter,variables) -end - -function lmxconvert(templatefile,resultfile,variables,nocache) -- or (templatefile,variables) - if trace_variables then -- will become templates - report_lmx("converting file %a",templatefile) - end - if not variables and type(resultfile) == "table" then - variables = resultfile - end - local converter = loadedfiles[templatefile] - if not converter then - converter = lmxnew(loadedfile(templatefile),nil,nocache,pathpart(templatefile)) - if cache_files then - loadedfiles[templatefile] = converter - end - end - local result = lmxresult(converter,variables) - if resultfile then - io.savedata(resultfile,result) - else - return result - end -end - -lmx.convert = lmxconvert - --- helpers - -local nocomment = (beginembedcss * (1 - endembedcss)^1 * endembedcss) / "" -local nowhitespace = whitespace^1 / " " -- "" -local semistripped = whitespace^1 / "" * P(";") -local stripper = Cs((nocomment + semistripped + nowhitespace + 1)^1) - -function lmx.stripcss(str) - return lpegmatch(stripper,str) -end - -function lmx.color(r,g,b,a) - if r > 1 then - r = 1 - end - if g > 1 then - g = 1 - end - if b > 1 then - b = 1 - end - if not a then - a= 0 - elseif a > 1 then - a = 1 - end - if a > 0 then - return format("rgba(%s%%,%s%%,%s%%,%s)",r*100,g*100,b*100,a) - else - return format("rgb(%s%%,%s%%,%s%%)",r*100,g*100,b*100) - end -end - - --- these can be overloaded - -lmx.lmxfile = string.itself -lmx.htmfile = string.itself -lmx.popupfile = os.launch - -function lmxmake(name,variables) - local lmxfile = lmx.lmxfile(name) - local htmfile = lmx.htmfile(name) - if lmxfile == htmfile then - htmfile = replacesuffix(lmxfile,"html") - end - lmxconvert(lmxfile,htmfile,variables) - return htmfile -end - -lmxmake = lmx.make - -function lmx.show(name,variables) - local htmfile = lmxmake(name,variables) - lmx.popupfile(htmfile) - return htmfile -end - --- Command line (will become mtx-lmx): - -if arg then - if arg[1] == "--show" then if arg[2] then lmx.show (arg[2]) end - elseif arg[1] == "--convert" then if arg[2] then lmx.convert(arg[2], arg[3] or "temp.html") end - end -end - --- Test 1: - --- inspect(lmx.result(lmx.new(io.loaddata("t:/sources/context-timing.lmx")))) - --- Test 2: - --- local str = [[ --- --- --- some content a --- some content b --- --- --- --- --- --- --- --- --- --- --- --- ]] - --- local defaults = { trace = true, a = 3, b = 3 } --- local result = lmx.new(str,defaults) --- inspect(result.data) --- inspect(result.converter(defaults)) --- inspect(result.converter { a = 1 }) --- inspect(lmx.result(result, { b = 2 })) --- inspect(lmx.result(result, { a = 20000, b = 40000 })) +if not modules then modules = { } end modules ['trac-lmx'] = { + version = 1.002, + comment = "companion to trac-lmx.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this one will be adpated to the latest helpers + +local type, tostring, rawget, loadstring, pcall = type, tostring, rawget, loadstring, pcall +local format, sub, gsub = string.format, string.sub, string.gsub +local concat = table.concat +local collapsespaces = string.collapsespaces +local P, Cc, Cs, C, Carg, lpegmatch = lpeg.P, lpeg.Cc, lpeg.Cs, lpeg.C, lpeg.Carg, lpeg.match +local joinpath, replacesuffix, pathpart, filesuffix = file.join, file.replacesuffix, file.pathpart, file.suffix + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex + +----- trace_templates = false trackers .register("lmx.templates", function(v) trace_templates = v end) +local trace_variables = false trackers .register("lmx.variables", function(v) trace_variables = v end) + +local cache_templates = true directives.register("lmx.cache.templates",function(v) cache_templates = v end) +local cache_files = true directives.register("lmx.cache.files", function(v) cache_files = v end) + +local report_lmx = logs.reporter("lmx") +local report_error = logs.reporter("lmx","error") + +lmx = lmx or { } +local lmx = lmx + +-- This will change: we will just pass the global defaults as argument, but then we need +-- to rewrite some older code or come up with an ugly trick. + +local lmxvariables = { + ['title-default'] = 'ConTeXt LMX File', + ['color-background-green'] = '#4F6F6F', + ['color-background-blue'] = '#6F6F8F', + ['color-background-yellow'] = '#8F8F6F', + ['color-background-purple'] = '#8F6F8F', + ['color-background-body'] = '#808080', + ['color-background-main'] = '#3F3F3F', +} + +local lmxinherited = { + ['title'] = 'title-default', + ['color-background-one'] = 'color-background-green', + ['color-background-two'] = 'color-background-blue', + ['color-background-three'] = 'color-background-one', + ['color-background-four'] = 'color-background-two', +} + +lmx.variables = lmxvariables +lmx.inherited = lmxinherited + +setmetatableindex(lmxvariables,function(t,k) + k = lmxinherited[k] + while k do + local v = rawget(lmxvariables,k) + if v then + return v + end + k = lmxinherited[k] + end +end) + +function lmx.set(key,value) + lmxvariables[key] = value +end + +function lmx.get(key) + return lmxvariables[key] or "" +end + +lmx.report = report_lmx + +-- helpers + +-- the variables table is an empty one that gets linked to a defaults table +-- that gets passed with a creation (first time only) and that itself links +-- to one that gets passed to the converter + +local variables = { } -- we assume no nesting +local result = { } -- we assume no nesting + +local function do_print(one,two,...) + if two then + result[#result+1] = concat { one, two, ... } + else + result[#result+1] = one + end +end + +-- Although it does not make much sense for most elements, we provide a mechanism +-- to print wrapped content, something that is more efficient when we are constructing +-- tables. + +local html = { } +lmx.html = html + +function html.td(str) + if type(str) == "table" then + for i=1,#str do -- spoils t ! + str[i] = format("%s",str[i] or "") + end + result[#result+1] = concat(str) + else + result[#result+1] = format("%s",str or "") + end +end + +function html.th(str) + if type(str) == "table" then + for i=1,#str do -- spoils t ! + str[i] = format("%s",str[i]) + end + result[#result+1] = concat(str) + else + result[#result+1] = format("%s",str or "") + end +end + +function html.a(text,url) + result[#result+1] = format("%s",url,text) +end + +setmetatableindex(html,function(t,k) + local f = format("<%s>%%s",k,k) + local v = function(str) result[#result+1] = format(f,str or "") end + t[k] = v + return v +end) + +-- Loading templates: + +local function loadedfile(name) + name = resolvers and resolvers.findfile and resolvers.findfile(name) or name + local data = io.loaddata(name) + if not data or data == "" then + report_lmx("file %a is empty",name) + end + return data +end + +local function loadedsubfile(name) + return io.loaddata(resolvers and resolvers.findfile and resolvers.findfile(name) or name) +end + +lmx.loadedfile = loadedfile + +-- A few helpers (the next one could end up in l-lpeg): + +local usedpaths = { } +local givenpath = nil + +local do_nested_include = nil + +local pattern = lpeg.replacer { + ["&"] = "&", + [">"] = ">", + ["<"] = "<", + ['"'] = """, +} + +local function do_escape(str) + return lpegmatch(pattern,str) or str +end + +local function do_variable(str) + local value = variables[str] + if not trace_variables then + -- nothing + elseif type(value) == "string" then + if #value > 80 then + report_lmx("variable %a is set to: %s ...",str,collapsespaces(sub(value,1,80))) + else + report_lmx("variable %a is set to: %s",str,collapsespaces(value)) + end + elseif type(value) == "nil" then + report_lmx("variable %a is set to: %s",str,"") + else + report_lmx("variable %a is set to: %S",str,value) + end + if type(value) == "function" then -- obsolete ... will go away + return value(str) + else + return value + end +end + +local function do_type(str) + if str and str ~= "" then + result[#result+1] = format("%s",do_escape(str)) + end +end + +local function do_fprint(str,...) + if str and str ~= "" then + result[#result+1] = format(str,...) + end +end + +local function do_eprint(str,...) + if str and str ~= "" then + result[#result+1] = lpegmatch(pattern,format(str,...)) + end +end + +local function do_print_variable(str) + local str = do_variable(str) -- variables[str] + if str and str ~= "" then + result[#result+1] = str + end +end + +local function do_type_variable(str) + local str = do_variable(str) -- variables[str] + if str and str ~= "" then + result[#result+1] = format("%s",do_escape(str)) + end +end + +local function do_include(filename,option) + local data = loadedsubfile(filename) + if (not data or data == "") and givenpath then + data = loadedsubfile(joinpath(givenpath,filename)) + end + if (not data or data == "") and type(usedpaths) == "table" then + for i=1,#usedpaths do + data = loadedsubfile(joinpath(usedpaths[i],filename)) + if data and data ~= "" then + break + end + end + end + if not data or data == "" then + data = format("",filename) + report_lmx("include file %a is empty",filename) + else + -- report_lmx("included file: %s",filename) + data = do_nested_include(data) + end + if filesuffix(filename,"css") and option == "strip" then -- new + data = lmx.stripcss(data) + end + return data +end + +-- Flushers: + +lmx.print = do_print +lmx.type = do_type +lmx.eprint = do_eprint +lmx.fprint = do_fprint + +lmx.escape = do_escape +lmx.urlescape = url.escape +lmx.variable = do_variable +lmx.include = do_include + +lmx.inject = do_print +lmx.finject = do_fprint +lmx.einject = do_eprint + +lmx.pv = do_print_variable +lmx.tv = do_type_variable + +-- The next functions set up the closure. + +function lmx.initialize(d,v) + if not v then + setmetatableindex(d,lmxvariables) + if variables ~= d then + setmetatableindex(variables,d) + if trace_variables then + report_lmx("using chain: variables => given defaults => lmx variables") + end + elseif trace_variables then + report_lmx("using chain: variables == given defaults => lmx variables") + end + elseif d ~= v then + setmetatableindex(v,d) + if d ~= lmxvariables then + setmetatableindex(d,lmxvariables) + if variables ~= v then + setmetatableindex(variables,v) + if trace_variables then + report_lmx("using chain: variables => given variables => given defaults => lmx variables") + end + elseif trace_variables then + report_lmx("using chain: variables == given variables => given defaults => lmx variables") + end + else + if variables ~= v then + setmetatableindex(variables,v) + if trace_variables then + report_lmx("using chain: variabes => given variables => given defaults") + end + elseif trace_variables then + report_lmx("using chain: variables == given variables => given defaults") + end + end + else + setmetatableindex(v,lmxvariables) + if variables ~= v then + setmetatableindex(variables,v) + if trace_variables then + report_lmx("using chain: variables => given variables => lmx variables") + end + elseif trace_variables then + report_lmx("using chain: variables == given variables => lmx variables") + end + end + result = { } +end + +function lmx.finalized() + local collapsed = concat(result) + result = { } -- free memory + return collapsed +end + +function lmx.getvariables() + return variables +end + +function lmx.reset() + -- obsolete +end + +-- Creation: (todo: strip ) + +-- local template = [[ +-- return function(defaults,variables) +-- +-- -- initialize +-- +-- lmx.initialize(defaults,variables) +-- +-- -- interface +-- +-- local definitions = { } +-- local variables = lmx.getvariables() +-- local html = lmx.html +-- local inject = lmx.print +-- local finject = lmx.fprint +-- local einject = lmx.eprint +-- local escape = lmx.escape +-- local verbose = lmx.type +-- +-- -- shortcuts (sort of obsolete as there is no gain) +-- +-- local p = lmx.print +-- local f = lmx.fprint +-- local v = lmx.variable +-- local e = lmx.escape +-- local t = lmx.type +-- local pv = lmx.pv +-- local tv = lmx.tv +-- +-- -- generator +-- +-- %s +-- +-- -- finalize +-- +-- return lmx.finalized() +-- +-- end +-- ]] + +local template = [[ +-- interface + +local html = lmx.html +local inject = lmx.print +local finject = lmx.fprint -- better use the following +local einject = lmx.eprint -- better use the following +local injectf = lmx.fprint +local injecte = lmx.eprint +local injectfmt = lmx.fprint +local injectesc = lmx.eprint +local escape = lmx.escape +local verbose = lmx.type + +local i_n_j_e_c_t = lmx.print + +-- shortcuts (sort of obsolete as there is no gain) + +local p = lmx.print +local f = lmx.fprint +local v = lmx.variable +local e = lmx.escape +local t = lmx.type +local pv = lmx.pv +local tv = lmx.tv + +local lmx_initialize = lmx.initialize +local lmx_finalized = lmx.finalized +local lmx_getvariables = lmx.getvariables + +-- generator + +return function(defaults,variables) + + lmx_initialize(defaults,variables) + + local definitions = { } + local variables = lmx_getvariables() + + %s -- the action: appends to result + + return lmx_finalized() + +end +]] + +local function savedefinition(definitions,tag,content) + definitions[tag] = content + return "" +end + +local function getdefinition(definitions,tag) + return definitions[tag] or "" +end + +local whitespace = lpeg.patterns.whitespace +local optionalspaces = whitespace^0 + +local dquote = P('"') + +local begincomment = P("") + +local beginembedxml = P("") + +local beginembedcss = P("/*") +local endembedcss = P("*/") + +local gobbledendxml = (optionalspaces * endembedxml) / "" +----- argumentxml = (1-gobbledendxml)^0 +local argumentxml = (whitespace^1 + dquote * C((1-dquote)^1) * dquote + C((1-gobbledendxml-whitespace)^1))^0 + +local gobbledendcss = (optionalspaces * endembedcss) / "" +----- argumentcss = (1-gobbledendcss)^0 +local argumentcss = (whitespace^1 + dquote * C((1-dquote)^1) * dquote + C((1-gobbledendcss-whitespace)^1))^0 + +local commentxml = (begincomment * (1-endcomment)^0 * endcomment) / "" + +local beginluaxml = (beginembedxml * P("lua")) / "" +local endluaxml = endembedxml / "" + +local luacodexml = beginluaxml + * (1-endluaxml)^1 + * endluaxml + +local beginluacss = (beginembedcss * P("lua")) / "" +local endluacss = endembedcss / "" + +local luacodecss = beginluacss + * (1-endluacss)^1 + * endluacss + +local othercode = (1-beginluaxml-beginluacss)^1 / " i_n_j_e_c_t[==[%0]==] " + +local includexml = ((beginembedxml * P("lmx-include") * optionalspaces) / "") + * (argumentxml / do_include) + * gobbledendxml + +local includecss = ((beginembedcss * P("lmx-include") * optionalspaces) / "") + * (argumentcss / do_include) + * gobbledendcss + +local definexml_b = ((beginembedxml * P("lmx-define-begin") * optionalspaces) / "") + * argumentxml + * gobbledendxml + +local definexml_e = ((beginembedxml * P("lmx-define-end") * optionalspaces) / "") + * argumentxml + * gobbledendxml + +local definexml_c = C((1-definexml_e)^0) + +local definexml = (Carg(1) * C(definexml_b) * definexml_c * definexml_e) / savedefinition + +local resolvexml = ((beginembedxml * P("lmx-resolve") * optionalspaces) / "") + * ((Carg(1) * C(argumentxml)) / getdefinition) + * gobbledendxml + +local definecss_b = ((beginembedcss * P("lmx-define-begin") * optionalspaces) / "") + * argumentcss + * gobbledendcss + +local definecss_e = ((beginembedcss * P("lmx-define-end") * optionalspaces) / "") + * argumentcss + * gobbledendcss + +local definecss_c = C((1-definecss_e)^0) + +local definecss = (Carg(1) * C(definecss_b) * definecss_c * definecss_e) / savedefinition + +local resolvecss = ((beginembedcss * P("lmx-resolve") * optionalspaces) / "") + * ((Carg(1) * C(argumentcss)) / getdefinition) + * gobbledendcss + +local pattern_1 = Cs((commentxml + includexml + includecss + P(1))^0) -- get rid of xml comments asap +local pattern_2 = Cs((definexml + resolvexml + definecss + resolvecss + P(1))^0) +local pattern_3 = Cs((luacodexml + luacodecss + othercode)^0) + +local cache = { } + +local function lmxerror(str) + report_error(str) + return html.tt(str) +end + +local function wrapper(converter,defaults,variables) + local outcome, message = pcall(converter,defaults,variables) + if not outcome then + return lmxerror(format("error in conversion: %s",message)) + else + return message + end +end + +do_nested_include = function(data) -- also used in include + return lpegmatch(pattern_1,data) +end + +function lmxnew(data,defaults,nocache,path) -- todo: use defaults in calling routines + data = data or "" + local known = cache[data] + if not known then + givenpath = path + usedpaths = lmxvariables.includepath or { } + if type(usedpaths) == "string" then + usedpaths = { usedpaths } + end + data = lpegmatch(pattern_1,data) + data = lpegmatch(pattern_2,data,1,{}) + data = lpegmatch(pattern_3,data) + local converted = loadstring(format(template,data)) + if converted then + converted = converted() + end + defaults = defaults or { } + local converter + if converted then + converter = function(variables) + return wrapper(converted,defaults,variables) + end + else + report_error("error in:\n%s\n:",data) + converter = function() lmxerror("error in template") end + end + known = { + data = defaults.trace and data or "", + variables = defaults, + converter = converter, + } + if cache_templates and nocache ~= false then + cache[data] = known + end + elseif variables then + known.variables = variables + end + return known, known.variables +end + +local function lmxresult(self,variables) + if self then + local converter = self.converter + if converter then + local converted = converter(variables) + if trace_variables then -- will become templates + report_lmx("converted size: %s",#converted) + end + return converted or lmxerror("no result from converter") + else + return lmxerror("invalid converter") + end + else + return lmxerror("invalid specification") + end +end + +lmx.new = lmxnew +lmx.result = lmxresult + +local loadedfiles = { } + +function lmx.convertstring(templatestring,variables,nocache,path) + return lmxresult(lmxnew(templatestring,nil,nocache,path),variables) +end + +function lmx.convertfile(templatefile,variables,nocache) + if trace_variables then -- will become templates + report_lmx("converting file %a",templatefile) + end + local converter = loadedfiles[templatefile] + if not converter then + converter = lmxnew(loadedfile(templatefile),nil,nocache,pathpart(templatefile)) + loadedfiles[templatefile] = converter + end + return lmxresult(converter,variables) +end + +function lmxconvert(templatefile,resultfile,variables,nocache) -- or (templatefile,variables) + if trace_variables then -- will become templates + report_lmx("converting file %a",templatefile) + end + if not variables and type(resultfile) == "table" then + variables = resultfile + end + local converter = loadedfiles[templatefile] + if not converter then + converter = lmxnew(loadedfile(templatefile),nil,nocache,pathpart(templatefile)) + if cache_files then + loadedfiles[templatefile] = converter + end + end + local result = lmxresult(converter,variables) + if resultfile then + io.savedata(resultfile,result) + else + return result + end +end + +lmx.convert = lmxconvert + +-- helpers + +local nocomment = (beginembedcss * (1 - endembedcss)^1 * endembedcss) / "" +local nowhitespace = whitespace^1 / " " -- "" +local semistripped = whitespace^1 / "" * P(";") +local stripper = Cs((nocomment + semistripped + nowhitespace + 1)^1) + +function lmx.stripcss(str) + return lpegmatch(stripper,str) +end + +function lmx.color(r,g,b,a) + if r > 1 then + r = 1 + end + if g > 1 then + g = 1 + end + if b > 1 then + b = 1 + end + if not a then + a= 0 + elseif a > 1 then + a = 1 + end + if a > 0 then + return format("rgba(%s%%,%s%%,%s%%,%s)",r*100,g*100,b*100,a) + else + return format("rgb(%s%%,%s%%,%s%%)",r*100,g*100,b*100) + end +end + + +-- these can be overloaded + +lmx.lmxfile = string.itself +lmx.htmfile = string.itself +lmx.popupfile = os.launch + +function lmxmake(name,variables) + local lmxfile = lmx.lmxfile(name) + local htmfile = lmx.htmfile(name) + if lmxfile == htmfile then + htmfile = replacesuffix(lmxfile,"html") + end + lmxconvert(lmxfile,htmfile,variables) + return htmfile +end + +lmxmake = lmx.make + +function lmx.show(name,variables) + local htmfile = lmxmake(name,variables) + lmx.popupfile(htmfile) + return htmfile +end + +-- Command line (will become mtx-lmx): + +if arg then + if arg[1] == "--show" then if arg[2] then lmx.show (arg[2]) end + elseif arg[1] == "--convert" then if arg[2] then lmx.convert(arg[2], arg[3] or "temp.html") end + end +end + +-- Test 1: + +-- inspect(lmx.result(lmx.new(io.loaddata("t:/sources/context-timing.lmx")))) + +-- Test 2: + +-- local str = [[ +-- +-- +-- some content a +-- some content b +-- +-- +-- +-- +-- +-- +-- +-- +-- +-- +-- +-- ]] + +-- local defaults = { trace = true, a = 3, b = 3 } +-- local result = lmx.new(str,defaults) +-- inspect(result.data) +-- inspect(result.converter(defaults)) +-- inspect(result.converter { a = 1 }) +-- inspect(lmx.result(result, { b = 2 })) +-- inspect(lmx.result(result, { a = 20000, b = 40000 })) diff --git a/tex/context/base/trac-log.lua b/tex/context/base/trac-log.lua index 1f2520130..73e302e26 100644 --- a/tex/context/base/trac-log.lua +++ b/tex/context/base/trac-log.lua @@ -1,816 +1,816 @@ -if not modules then modules = { } end modules ['trac-log'] = { - version = 1.001, - comment = "companion to trac-log.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- if tex and (tex.jobname or tex.formatname) then --- --- -- quick hack, awaiting speedup in engine (8 -> 6.4 sec for --make with console2) --- -- still needed for luajittex --- --- local texio_write_nl = texio.write_nl --- local texio_write = texio.write --- local io_write = io.write - --- local write_nl = function(target,...) --- if not io_write then --- io_write = io.write --- end --- if target == "term and log" then --- texio_write_nl("log",...) --- texio_write_nl("term","") --- io_write(...) --- elseif target == "log" then --- texio_write_nl("log",...) --- elseif target == "term" then --- texio_write_nl("term","") --- io_write(...) --- else --- texio_write_nl("log",target,...) --- texio_write_nl("term","") --- io_write(target,...) --- end --- end - --- local write = function(target,...) --- if not io_write then --- io_write = io.write --- end --- if target == "term and log" then --- texio_write("log",...) --- io_write(...) --- elseif target == "log" then --- texio_write("log",...) --- elseif target == "term" then --- io_write(...) --- else --- texio_write("log",target,...) --- io_write(target,...) --- end --- end - --- texio.write = write --- texio.write_nl = write_nl --- --- else --- --- -- texlua or just lua --- --- end - --- todo: less categories, more subcategories (e.g. nodes) --- todo: split into basics and ctx specific - -local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write -local format, gmatch, find = string.format, string.gmatch, string.find -local concat, insert, remove = table.concat, table.insert, table.remove -local topattern = string.topattern -local texcount = tex and tex.count -local next, type, select = next, type, select -local utfchar = utf.char - -local setmetatableindex = table.setmetatableindex -local formatters = string.formatters - ---[[ldx-- -

    This is a prelude to a more extensive logging module. We no longer -provide based logging as parsing is relatively easy anyway.

    ---ldx]]-- - -logs = logs or { } -local logs = logs - -local moreinfo = [[ -More information about ConTeXt and the tools that come with it can be found at: -]] .. "\n" .. [[ -maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context -webpage : http://www.pragma-ade.nl / http://tex.aanhet.net -wiki : http://contextgarden.net -]] - --- -- we extend the formatters: --- --- function utilities.strings.unichr(s) return "U+" .. format("%05X",s) .. " (" .. utfchar(s) .. ")" end --- function utilities.strings.chruni(s) return utfchar(s) .. " (U+" .. format("%05X",s) .. ")" end --- --- utilities.strings.formatters.add ( --- string.formatters, "uni", --- [[unichr(%s)]], --- [[local unichr = utilities.strings.unichr]] --- ) --- --- utilities.strings.formatters.add ( --- string.formatters, "chr", --- [[chruni(%s)]], --- [[local chruni = utilities.strings.chruni]] --- ) - -utilities.strings.formatters.add ( - formatters, "unichr", - [["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]] -) - -utilities.strings.formatters.add ( - formatters, "chruni", - [[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]] -) - --- print(formatters["Missing character %!chruni! in font."](234)) --- print(formatters["Missing character %!unichr! in font."](234)) - --- basic loggers - -local function ignore() end - -setmetatableindex(logs, function(t,k) t[k] = ignore ; return ignore end) - -local report, subreport, status, settarget, setformats, settranslations - -local direct, subdirect, writer, pushtarget, poptarget - -if tex and (tex.jobname or tex.formatname) then - - -- local format = string.formatter - - local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper - - local target = "term and log" - - logs.flush = io.flush - - local formats = { } setmetatable(formats, valueiskey) - local translations = { } setmetatable(translations,valueiskey) - - writer = function(...) - write_nl(target,...) - end - - newline = function() - write_nl(target,"\n") - end - - local f_one = formatters["%-15s > %s\n"] - local f_two = formatters["%-15s >\n"] - - -- we can use formatters but best check for % then because for simple messages - -- we con't want this overhead for single messages (not that there are that - -- many; we could have a special weak table) - - report = function(a,b,c,...) - if c then - write_nl(target,f_one(translations[a],formatters[formats[b]](c,...))) - elseif b then - write_nl(target,f_one(translations[a],formats[b])) - elseif a then - write_nl(target,f_two(translations[a])) - else - write_nl(target,"\n") - end - end - - local f_one = formatters["%-15s > %s"] - local f_two = formatters["%-15s >"] - - direct = function(a,b,c,...) - if c then - return f_one(translations[a],formatters[formats[b]](c,...)) - elseif b then - return f_one(translations[a],formats[b]) - elseif a then - return f_two(translations[a]) - else - return "" - end - end - - local f_one = formatters["%-15s > %s > %s\n"] - local f_two = formatters["%-15s > %s >\n"] - - subreport = function(a,s,b,c,...) - if c then - write_nl(target,f_one(translations[a],translations[s],formatters[formats[b]](c,...))) - elseif b then - write_nl(target,f_one(translations[a],translations[s],formats[b])) - elseif a then - write_nl(target,f_two(translations[a],translations[s])) - else - write_nl(target,"\n") - end - end - - local f_one = formatters["%-15s > %s > %s"] - local f_two = formatters["%-15s > %s >"] - - subdirect = function(a,s,b,c,...) - if c then - return f_one(translations[a],translations[s],formatters[formats[b]](c,...)) - elseif b then - return f_one(translations[a],translations[s],formats[b]) - elseif a then - return f_two(translations[a],translations[s]) - else - return "" - end - end - - local f_one = formatters["%-15s : %s\n"] - local f_two = formatters["%-15s :\n"] - - status = function(a,b,c,...) - if c then - write_nl(target,f_one(translations[a],formatters[formats[b]](c,...))) - elseif b then - write_nl(target,f_one(translations[a],formats[b])) - elseif a then - write_nl(target,f_two(translations[a])) - else - write_nl(target,"\n") - end - end - - local targets = { - logfile = "log", - log = "log", - file = "log", - console = "term", - terminal = "term", - both = "term and log", - } - - settarget = function(whereto) - target = targets[whereto or "both"] or targets.both - if target == "term" or target == "term and log" then - logs.flush = io.flush - else - logs.flush = ignore - end - end - - local stack = { } - - pushtarget = function(newtarget) - insert(stack,target) - settarget(newtarget) - end - - poptarget = function() - if #stack > 0 then - settarget(remove(stack)) - end - end - - setformats = function(f) - formats = f - end - - settranslations = function(t) - translations = t - end - -else - - logs.flush = ignore - - writer = write_nl - - newline = function() - write_nl("\n") - end - - local f_one = formatters["%-15s | %s"] - local f_two = formatters["%-15s |"] - - report = function(a,b,c,...) - if c then - write_nl(f_one(a,formatters[b](c,...))) - elseif b then - write_nl(f_one(a,b)) - elseif a then - write_nl(f_two(a)) - else - write_nl("") - end - end - - local f_one = formatters["%-15s | %s | %s"] - local f_two = formatters["%-15s | %s |"] - - subreport = function(a,sub,b,c,...) - if c then - write_nl(f_one(a,sub,formatters[b](c,...))) - elseif b then - write_nl(f_one(a,sub,b)) - elseif a then - write_nl(f_two(a,sub)) - else - write_nl("") - end - end - - local f_one = formatters["%-15s : %s\n"] - local f_two = formatters["%-15s :\n"] - - status = function(a,b,c,...) -- not to be used in lua anyway - if c then - write_nl(f_one(a,formatters[b](c,...))) - elseif b then - write_nl(f_one(a,b)) -- b can have %'s - elseif a then - write_nl(f_two(a)) - else - write_nl("\n") - end - end - - direct = ignore - subdirect = ignore - - settarget = ignore - pushtarget = ignore - poptarget = ignore - setformats = ignore - settranslations = ignore - -end - -logs.report = report -logs.subreport = subreport -logs.status = status -logs.settarget = settarget -logs.pushtarget = pushtarget -logs.poptarget = poptarget -logs.setformats = setformats -logs.settranslations = settranslations - -logs.direct = direct -logs.subdirect = subdirect -logs.writer = writer -logs.newline = newline - --- installer - --- todo: renew (un) locks when a new one is added and wildcard - -local data, states = { }, nil - -function logs.reporter(category,subcategory) - local logger = data[category] - if not logger then - local state = false - if states == true then - state = true - elseif type(states) == "table" then - for c, _ in next, states do - if find(category,c) then - state = true - break - end - end - end - logger = { - reporters = { }, - state = state, - } - data[category] = logger - end - local reporter = logger.reporters[subcategory or "default"] - if not reporter then - if subcategory then - reporter = function(...) - if not logger.state then - subreport(category,subcategory,...) - end - end - logger.reporters[subcategory] = reporter - else - local tag = category - reporter = function(...) - if not logger.state then - report(category,...) - end - end - logger.reporters.default = reporter - end - end - return reporter -end - -logs.new = logs.reporter -- for old times sake - --- context specicific: this ends up in the macro stream - -local ctxreport = logs.writer - -function logs.setmessenger(m) - ctxreport = m -end - -function logs.messenger(category,subcategory) - -- we need to avoid catcode mess (todo: fast context) - if subcategory then - return function(...) - ctxreport(subdirect(category,subcategory,...)) - end - else - return function(...) - ctxreport(direct(category,...)) - end - end -end - --- so far - -local function setblocked(category,value) - if category == true then - -- lock all - category, value = "*", true - elseif category == false then - -- unlock all - category, value = "*", false - elseif value == nil then - -- lock selective - value = true - end - if category == "*" then - states = value - for k, v in next, data do - v.state = value - end - else - states = utilities.parsers.settings_to_hash(category) - for c, _ in next, states do - if data[c] then - v.state = value - else - c = topattern(c,true,true) - for k, v in next, data do - if find(k,c) then - v.state = value - end - end - end - end - end -end - -function logs.disable(category,value) - setblocked(category,value == nil and true or value) -end - -function logs.enable(category) - setblocked(category,false) -end - -function logs.categories() - return table.sortedkeys(data) -end - -function logs.show() - local n, c, s, max = 0, 0, 0, 0 - for category, v in table.sortedpairs(data) do - n = n + 1 - local state = v.state - local reporters = v.reporters - local nc = #category - if nc > c then - c = nc - end - for subcategory, _ in next, reporters do - local ns = #subcategory - if ns > c then - s = ns - end - local m = nc + ns - if m > max then - max = m - end - end - local subcategories = concat(table.sortedkeys(reporters),", ") - if state == true then - state = "disabled" - elseif state == false then - state = "enabled" - else - state = "unknown" - end - -- no new here - report("logging","category %a, subcategories %a, state %a",category,subcategories,state) - end - report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max) -end - -local delayed_reporters = { } - -setmetatableindex(delayed_reporters,function(t,k) - local v = logs.reporter(k.name) - t[k] = v - return v -end) - -function utilities.setters.report(setter,...) - delayed_reporters[setter](...) -end - -directives.register("logs.blocked", function(v) - setblocked(v,true) -end) - -directives.register("logs.target", function(v) - settarget(v) -end) - --- tex specific loggers (might move elsewhere) - -local report_pages = logs.reporter("pages") -- not needed but saves checking when we grep for it - -local real, user, sub - -function logs.start_page_number() - real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno --- real, user, sub = 0, 0, 0 -end - -local timing = false -local starttime = nil -local lasttime = nil - -trackers.register("pages.timing", function(v) -- only for myself (diagnostics) - starttime = os.clock() - timing = true -end) - -function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average - if timing then - local elapsed, average - local stoptime = os.clock() - if not lasttime or real < 2 then - elapsed = stoptime - average = stoptime - starttime = stoptime - else - elapsed = stoptime - lasttime - average = (stoptime - starttime) / (real - 1) - end - lasttime = stoptime - if real <= 0 then - report_pages("flushing page, time %0.04f / %0.04f",elapsed,average) - elseif user <= 0 then - report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average) - elseif sub <= 0 then - report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average) - else - report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average) - end - else - if real <= 0 then - report_pages("flushing page") - elseif user <= 0 then - report_pages("flushing realpage %s",real) - elseif sub <= 0 then - report_pages("flushing realpage %s, userpage %s",real,user) - else - report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub) - end - end - logs.flush() -end - --- we don't have show_open and show_close callbacks yet - -local report_files = logs.reporter("files") -local nesting = 0 -local verbose = false -local hasscheme = url.hasscheme - -function logs.show_open(name) - -- if hasscheme(name) ~= "virtual" then - -- if verbose then - -- nesting = nesting + 1 - -- report_files("level %s, opening %s",nesting,name) - -- else - -- write(formatters["(%s"](name)) -- tex adds a space - -- end - -- end -end - -function logs.show_close(name) - -- if hasscheme(name) ~= "virtual" then - -- if verbose then - -- report_files("level %s, closing %s",nesting,name) - -- nesting = nesting - 1 - -- else - -- write(")") -- tex adds a space - -- end - -- end -end - -function logs.show_load(name) - -- if hasscheme(name) ~= "virtual" then - -- if verbose then - -- report_files("level %s, loading %s",nesting+1,name) - -- else - -- write(formatters["(%s)"](name)) - -- end - -- end -end - --- there may be scripts out there using this: - -local simple = logs.reporter("comment") - -logs.simple = simple -logs.simpleline = simple - --- obsolete - -function logs.setprogram () end -- obsolete -function logs.extendbanner() end -- obsolete -function logs.reportlines () end -- obsolete -function logs.reportbanner() end -- obsolete -function logs.reportline () end -- obsolete -function logs.simplelines () end -- obsolete -function logs.help () end -- obsolete - --- applications - --- local function reportlines(t,str) --- if str then --- for line in gmatch(str,"([^\n\r]*)[\n\r]") do --- t.report(line) --- end --- end --- end - -local Carg, C, lpegmatch = lpeg.Carg, lpeg.C, lpeg.match -local p_newline = lpeg.patterns.newline - -local linewise = ( - Carg(1) * C((1-p_newline)^1) / function(t,s) t.report(s) end - + Carg(1) * p_newline^2 / function(t) t.report() end - + p_newline -)^1 - -local function reportlines(t,str) - if str then - lpegmatch(linewise,str,1,t) - end -end - -local function reportbanner(t) - local banner = t.banner - if banner then - t.report(banner) - t.report() - end -end - -local function reportversion(t) - local banner = t.banner - if banner then - t.report(banner) - end -end - -local function reporthelp(t,...) - local helpinfo = t.helpinfo - if type(helpinfo) == "string" then - reportlines(t,helpinfo) - elseif type(helpinfo) == "table" then - for i=1,select("#",...) do - reportlines(t,t.helpinfo[select(i,...)]) - if i < n then - t.report() - end - end - end -end - -local function reportinfo(t) - t.report() - reportlines(t,t.moreinfo) -end - -local function reportexport(t,method) - report(t.helpinfo) -end - -local reporters = { - lines = reportlines, -- not to be overloaded - banner = reportbanner, - version = reportversion, - help = reporthelp, - info = reportinfo, - export = reportexport, -} - -local exporters = { - -- empty -} - -logs.reporters = reporters -logs.exporters = exporters - -function logs.application(t) - t.name = t.name or "unknown" - t.banner = t.banner - t.moreinfo = moreinfo - t.report = logs.reporter(t.name) - t.help = function(...) - reporters.banner(t) - reporters.help(t,...) - reporters.info(t) - end - t.export = function(...) - reporters.export(t,...) - end - t.identify = function() - reporters.banner(t) - end - t.version = function() - reporters.version(t) - end - return t -end - --- somewhat special .. will be redone (already a better solution in place in lmx) - --- logging to a file - --- local syslogname = "oeps.xxx" --- --- for i=1,10 do --- logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123") --- end - -function logs.system(whereto,process,jobname,category,...) - local message = formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...)) - for i=1,10 do - local f = io.open(whereto,"a") -- we can consider keeping the file open - if f then - f:write(message) - f:close() - break - else - sleep(0.1) - end - end -end - -local report_system = logs.reporter("system","logs") - -function logs.obsolete(old,new) - local o = loadstring("return " .. new)() - if type(o) == "function" then - return function(...) - report_system("function %a is obsolete, use %a",old,new) - loadstring(old .. "=" .. new .. " return ".. old)()(...) - end - elseif type(o) == "table" then - local t, m = { }, { } - m.__index = function(t,k) - report_system("table %a is obsolete, use %a",old,new) - m.__index, m.__newindex = o, o - return o[k] - end - m.__newindex = function(t,k,v) - report_system("table %a is obsolete, use %a",old,new) - m.__index, m.__newindex = o, o - o[k] = v - end - if libraries then - libraries.obsolete[old] = t -- true - end - setmetatable(t,m) - return t - end -end - -if utilities then - utilities.report = report_system -end - -if tex and tex.error then - function logs.texerrormessage(...) -- for the moment we put this function here - tex.error(format(...), { }) - end -else - function logs.texerrormessage(...) - print(format(...)) - end -end - --- this is somewhat slower but prevents out-of-order messages when print is mixed --- with texio.write - -io.stdout:setvbuf('no') -io.stderr:setvbuf('no') - --- windows: > nul 2>&1 --- unix : > null 2>&1 - -if package.helpers.report then - package.helpers.report = logs.reporter("package loader") -- when used outside mtxrun -end +if not modules then modules = { } end modules ['trac-log'] = { + version = 1.001, + comment = "companion to trac-log.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- if tex and (tex.jobname or tex.formatname) then +-- +-- -- quick hack, awaiting speedup in engine (8 -> 6.4 sec for --make with console2) +-- -- still needed for luajittex +-- +-- local texio_write_nl = texio.write_nl +-- local texio_write = texio.write +-- local io_write = io.write + +-- local write_nl = function(target,...) +-- if not io_write then +-- io_write = io.write +-- end +-- if target == "term and log" then +-- texio_write_nl("log",...) +-- texio_write_nl("term","") +-- io_write(...) +-- elseif target == "log" then +-- texio_write_nl("log",...) +-- elseif target == "term" then +-- texio_write_nl("term","") +-- io_write(...) +-- else +-- texio_write_nl("log",target,...) +-- texio_write_nl("term","") +-- io_write(target,...) +-- end +-- end + +-- local write = function(target,...) +-- if not io_write then +-- io_write = io.write +-- end +-- if target == "term and log" then +-- texio_write("log",...) +-- io_write(...) +-- elseif target == "log" then +-- texio_write("log",...) +-- elseif target == "term" then +-- io_write(...) +-- else +-- texio_write("log",target,...) +-- io_write(target,...) +-- end +-- end + +-- texio.write = write +-- texio.write_nl = write_nl +-- +-- else +-- +-- -- texlua or just lua +-- +-- end + +-- todo: less categories, more subcategories (e.g. nodes) +-- todo: split into basics and ctx specific + +local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write +local format, gmatch, find = string.format, string.gmatch, string.find +local concat, insert, remove = table.concat, table.insert, table.remove +local topattern = string.topattern +local texcount = tex and tex.count +local next, type, select = next, type, select +local utfchar = utf.char + +local setmetatableindex = table.setmetatableindex +local formatters = string.formatters + +--[[ldx-- +

    This is a prelude to a more extensive logging module. We no longer +provide based logging as parsing is relatively easy anyway.

    +--ldx]]-- + +logs = logs or { } +local logs = logs + +local moreinfo = [[ +More information about ConTeXt and the tools that come with it can be found at: +]] .. "\n" .. [[ +maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context +webpage : http://www.pragma-ade.nl / http://tex.aanhet.net +wiki : http://contextgarden.net +]] + +-- -- we extend the formatters: +-- +-- function utilities.strings.unichr(s) return "U+" .. format("%05X",s) .. " (" .. utfchar(s) .. ")" end +-- function utilities.strings.chruni(s) return utfchar(s) .. " (U+" .. format("%05X",s) .. ")" end +-- +-- utilities.strings.formatters.add ( +-- string.formatters, "uni", +-- [[unichr(%s)]], +-- [[local unichr = utilities.strings.unichr]] +-- ) +-- +-- utilities.strings.formatters.add ( +-- string.formatters, "chr", +-- [[chruni(%s)]], +-- [[local chruni = utilities.strings.chruni]] +-- ) + +utilities.strings.formatters.add ( + formatters, "unichr", + [["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]] +) + +utilities.strings.formatters.add ( + formatters, "chruni", + [[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]] +) + +-- print(formatters["Missing character %!chruni! in font."](234)) +-- print(formatters["Missing character %!unichr! in font."](234)) + +-- basic loggers + +local function ignore() end + +setmetatableindex(logs, function(t,k) t[k] = ignore ; return ignore end) + +local report, subreport, status, settarget, setformats, settranslations + +local direct, subdirect, writer, pushtarget, poptarget + +if tex and (tex.jobname or tex.formatname) then + + -- local format = string.formatter + + local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper + + local target = "term and log" + + logs.flush = io.flush + + local formats = { } setmetatable(formats, valueiskey) + local translations = { } setmetatable(translations,valueiskey) + + writer = function(...) + write_nl(target,...) + end + + newline = function() + write_nl(target,"\n") + end + + local f_one = formatters["%-15s > %s\n"] + local f_two = formatters["%-15s >\n"] + + -- we can use formatters but best check for % then because for simple messages + -- we con't want this overhead for single messages (not that there are that + -- many; we could have a special weak table) + + report = function(a,b,c,...) + if c then + write_nl(target,f_one(translations[a],formatters[formats[b]](c,...))) + elseif b then + write_nl(target,f_one(translations[a],formats[b])) + elseif a then + write_nl(target,f_two(translations[a])) + else + write_nl(target,"\n") + end + end + + local f_one = formatters["%-15s > %s"] + local f_two = formatters["%-15s >"] + + direct = function(a,b,c,...) + if c then + return f_one(translations[a],formatters[formats[b]](c,...)) + elseif b then + return f_one(translations[a],formats[b]) + elseif a then + return f_two(translations[a]) + else + return "" + end + end + + local f_one = formatters["%-15s > %s > %s\n"] + local f_two = formatters["%-15s > %s >\n"] + + subreport = function(a,s,b,c,...) + if c then + write_nl(target,f_one(translations[a],translations[s],formatters[formats[b]](c,...))) + elseif b then + write_nl(target,f_one(translations[a],translations[s],formats[b])) + elseif a then + write_nl(target,f_two(translations[a],translations[s])) + else + write_nl(target,"\n") + end + end + + local f_one = formatters["%-15s > %s > %s"] + local f_two = formatters["%-15s > %s >"] + + subdirect = function(a,s,b,c,...) + if c then + return f_one(translations[a],translations[s],formatters[formats[b]](c,...)) + elseif b then + return f_one(translations[a],translations[s],formats[b]) + elseif a then + return f_two(translations[a],translations[s]) + else + return "" + end + end + + local f_one = formatters["%-15s : %s\n"] + local f_two = formatters["%-15s :\n"] + + status = function(a,b,c,...) + if c then + write_nl(target,f_one(translations[a],formatters[formats[b]](c,...))) + elseif b then + write_nl(target,f_one(translations[a],formats[b])) + elseif a then + write_nl(target,f_two(translations[a])) + else + write_nl(target,"\n") + end + end + + local targets = { + logfile = "log", + log = "log", + file = "log", + console = "term", + terminal = "term", + both = "term and log", + } + + settarget = function(whereto) + target = targets[whereto or "both"] or targets.both + if target == "term" or target == "term and log" then + logs.flush = io.flush + else + logs.flush = ignore + end + end + + local stack = { } + + pushtarget = function(newtarget) + insert(stack,target) + settarget(newtarget) + end + + poptarget = function() + if #stack > 0 then + settarget(remove(stack)) + end + end + + setformats = function(f) + formats = f + end + + settranslations = function(t) + translations = t + end + +else + + logs.flush = ignore + + writer = write_nl + + newline = function() + write_nl("\n") + end + + local f_one = formatters["%-15s | %s"] + local f_two = formatters["%-15s |"] + + report = function(a,b,c,...) + if c then + write_nl(f_one(a,formatters[b](c,...))) + elseif b then + write_nl(f_one(a,b)) + elseif a then + write_nl(f_two(a)) + else + write_nl("") + end + end + + local f_one = formatters["%-15s | %s | %s"] + local f_two = formatters["%-15s | %s |"] + + subreport = function(a,sub,b,c,...) + if c then + write_nl(f_one(a,sub,formatters[b](c,...))) + elseif b then + write_nl(f_one(a,sub,b)) + elseif a then + write_nl(f_two(a,sub)) + else + write_nl("") + end + end + + local f_one = formatters["%-15s : %s\n"] + local f_two = formatters["%-15s :\n"] + + status = function(a,b,c,...) -- not to be used in lua anyway + if c then + write_nl(f_one(a,formatters[b](c,...))) + elseif b then + write_nl(f_one(a,b)) -- b can have %'s + elseif a then + write_nl(f_two(a)) + else + write_nl("\n") + end + end + + direct = ignore + subdirect = ignore + + settarget = ignore + pushtarget = ignore + poptarget = ignore + setformats = ignore + settranslations = ignore + +end + +logs.report = report +logs.subreport = subreport +logs.status = status +logs.settarget = settarget +logs.pushtarget = pushtarget +logs.poptarget = poptarget +logs.setformats = setformats +logs.settranslations = settranslations + +logs.direct = direct +logs.subdirect = subdirect +logs.writer = writer +logs.newline = newline + +-- installer + +-- todo: renew (un) locks when a new one is added and wildcard + +local data, states = { }, nil + +function logs.reporter(category,subcategory) + local logger = data[category] + if not logger then + local state = false + if states == true then + state = true + elseif type(states) == "table" then + for c, _ in next, states do + if find(category,c) then + state = true + break + end + end + end + logger = { + reporters = { }, + state = state, + } + data[category] = logger + end + local reporter = logger.reporters[subcategory or "default"] + if not reporter then + if subcategory then + reporter = function(...) + if not logger.state then + subreport(category,subcategory,...) + end + end + logger.reporters[subcategory] = reporter + else + local tag = category + reporter = function(...) + if not logger.state then + report(category,...) + end + end + logger.reporters.default = reporter + end + end + return reporter +end + +logs.new = logs.reporter -- for old times sake + +-- context specicific: this ends up in the macro stream + +local ctxreport = logs.writer + +function logs.setmessenger(m) + ctxreport = m +end + +function logs.messenger(category,subcategory) + -- we need to avoid catcode mess (todo: fast context) + if subcategory then + return function(...) + ctxreport(subdirect(category,subcategory,...)) + end + else + return function(...) + ctxreport(direct(category,...)) + end + end +end + +-- so far + +local function setblocked(category,value) + if category == true then + -- lock all + category, value = "*", true + elseif category == false then + -- unlock all + category, value = "*", false + elseif value == nil then + -- lock selective + value = true + end + if category == "*" then + states = value + for k, v in next, data do + v.state = value + end + else + states = utilities.parsers.settings_to_hash(category) + for c, _ in next, states do + if data[c] then + v.state = value + else + c = topattern(c,true,true) + for k, v in next, data do + if find(k,c) then + v.state = value + end + end + end + end + end +end + +function logs.disable(category,value) + setblocked(category,value == nil and true or value) +end + +function logs.enable(category) + setblocked(category,false) +end + +function logs.categories() + return table.sortedkeys(data) +end + +function logs.show() + local n, c, s, max = 0, 0, 0, 0 + for category, v in table.sortedpairs(data) do + n = n + 1 + local state = v.state + local reporters = v.reporters + local nc = #category + if nc > c then + c = nc + end + for subcategory, _ in next, reporters do + local ns = #subcategory + if ns > c then + s = ns + end + local m = nc + ns + if m > max then + max = m + end + end + local subcategories = concat(table.sortedkeys(reporters),", ") + if state == true then + state = "disabled" + elseif state == false then + state = "enabled" + else + state = "unknown" + end + -- no new here + report("logging","category %a, subcategories %a, state %a",category,subcategories,state) + end + report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max) +end + +local delayed_reporters = { } + +setmetatableindex(delayed_reporters,function(t,k) + local v = logs.reporter(k.name) + t[k] = v + return v +end) + +function utilities.setters.report(setter,...) + delayed_reporters[setter](...) +end + +directives.register("logs.blocked", function(v) + setblocked(v,true) +end) + +directives.register("logs.target", function(v) + settarget(v) +end) + +-- tex specific loggers (might move elsewhere) + +local report_pages = logs.reporter("pages") -- not needed but saves checking when we grep for it + +local real, user, sub + +function logs.start_page_number() + real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno +-- real, user, sub = 0, 0, 0 +end + +local timing = false +local starttime = nil +local lasttime = nil + +trackers.register("pages.timing", function(v) -- only for myself (diagnostics) + starttime = os.clock() + timing = true +end) + +function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average + if timing then + local elapsed, average + local stoptime = os.clock() + if not lasttime or real < 2 then + elapsed = stoptime + average = stoptime + starttime = stoptime + else + elapsed = stoptime - lasttime + average = (stoptime - starttime) / (real - 1) + end + lasttime = stoptime + if real <= 0 then + report_pages("flushing page, time %0.04f / %0.04f",elapsed,average) + elseif user <= 0 then + report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average) + elseif sub <= 0 then + report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average) + else + report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average) + end + else + if real <= 0 then + report_pages("flushing page") + elseif user <= 0 then + report_pages("flushing realpage %s",real) + elseif sub <= 0 then + report_pages("flushing realpage %s, userpage %s",real,user) + else + report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub) + end + end + logs.flush() +end + +-- we don't have show_open and show_close callbacks yet + +local report_files = logs.reporter("files") +local nesting = 0 +local verbose = false +local hasscheme = url.hasscheme + +function logs.show_open(name) + -- if hasscheme(name) ~= "virtual" then + -- if verbose then + -- nesting = nesting + 1 + -- report_files("level %s, opening %s",nesting,name) + -- else + -- write(formatters["(%s"](name)) -- tex adds a space + -- end + -- end +end + +function logs.show_close(name) + -- if hasscheme(name) ~= "virtual" then + -- if verbose then + -- report_files("level %s, closing %s",nesting,name) + -- nesting = nesting - 1 + -- else + -- write(")") -- tex adds a space + -- end + -- end +end + +function logs.show_load(name) + -- if hasscheme(name) ~= "virtual" then + -- if verbose then + -- report_files("level %s, loading %s",nesting+1,name) + -- else + -- write(formatters["(%s)"](name)) + -- end + -- end +end + +-- there may be scripts out there using this: + +local simple = logs.reporter("comment") + +logs.simple = simple +logs.simpleline = simple + +-- obsolete + +function logs.setprogram () end -- obsolete +function logs.extendbanner() end -- obsolete +function logs.reportlines () end -- obsolete +function logs.reportbanner() end -- obsolete +function logs.reportline () end -- obsolete +function logs.simplelines () end -- obsolete +function logs.help () end -- obsolete + +-- applications + +-- local function reportlines(t,str) +-- if str then +-- for line in gmatch(str,"([^\n\r]*)[\n\r]") do +-- t.report(line) +-- end +-- end +-- end + +local Carg, C, lpegmatch = lpeg.Carg, lpeg.C, lpeg.match +local p_newline = lpeg.patterns.newline + +local linewise = ( + Carg(1) * C((1-p_newline)^1) / function(t,s) t.report(s) end + + Carg(1) * p_newline^2 / function(t) t.report() end + + p_newline +)^1 + +local function reportlines(t,str) + if str then + lpegmatch(linewise,str,1,t) + end +end + +local function reportbanner(t) + local banner = t.banner + if banner then + t.report(banner) + t.report() + end +end + +local function reportversion(t) + local banner = t.banner + if banner then + t.report(banner) + end +end + +local function reporthelp(t,...) + local helpinfo = t.helpinfo + if type(helpinfo) == "string" then + reportlines(t,helpinfo) + elseif type(helpinfo) == "table" then + for i=1,select("#",...) do + reportlines(t,t.helpinfo[select(i,...)]) + if i < n then + t.report() + end + end + end +end + +local function reportinfo(t) + t.report() + reportlines(t,t.moreinfo) +end + +local function reportexport(t,method) + report(t.helpinfo) +end + +local reporters = { + lines = reportlines, -- not to be overloaded + banner = reportbanner, + version = reportversion, + help = reporthelp, + info = reportinfo, + export = reportexport, +} + +local exporters = { + -- empty +} + +logs.reporters = reporters +logs.exporters = exporters + +function logs.application(t) + t.name = t.name or "unknown" + t.banner = t.banner + t.moreinfo = moreinfo + t.report = logs.reporter(t.name) + t.help = function(...) + reporters.banner(t) + reporters.help(t,...) + reporters.info(t) + end + t.export = function(...) + reporters.export(t,...) + end + t.identify = function() + reporters.banner(t) + end + t.version = function() + reporters.version(t) + end + return t +end + +-- somewhat special .. will be redone (already a better solution in place in lmx) + +-- logging to a file + +-- local syslogname = "oeps.xxx" +-- +-- for i=1,10 do +-- logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123") +-- end + +function logs.system(whereto,process,jobname,category,...) + local message = formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...)) + for i=1,10 do + local f = io.open(whereto,"a") -- we can consider keeping the file open + if f then + f:write(message) + f:close() + break + else + sleep(0.1) + end + end +end + +local report_system = logs.reporter("system","logs") + +function logs.obsolete(old,new) + local o = loadstring("return " .. new)() + if type(o) == "function" then + return function(...) + report_system("function %a is obsolete, use %a",old,new) + loadstring(old .. "=" .. new .. " return ".. old)()(...) + end + elseif type(o) == "table" then + local t, m = { }, { } + m.__index = function(t,k) + report_system("table %a is obsolete, use %a",old,new) + m.__index, m.__newindex = o, o + return o[k] + end + m.__newindex = function(t,k,v) + report_system("table %a is obsolete, use %a",old,new) + m.__index, m.__newindex = o, o + o[k] = v + end + if libraries then + libraries.obsolete[old] = t -- true + end + setmetatable(t,m) + return t + end +end + +if utilities then + utilities.report = report_system +end + +if tex and tex.error then + function logs.texerrormessage(...) -- for the moment we put this function here + tex.error(format(...), { }) + end +else + function logs.texerrormessage(...) + print(format(...)) + end +end + +-- this is somewhat slower but prevents out-of-order messages when print is mixed +-- with texio.write + +io.stdout:setvbuf('no') +io.stderr:setvbuf('no') + +-- windows: > nul 2>&1 +-- unix : > null 2>&1 + +if package.helpers.report then + package.helpers.report = logs.reporter("package loader") -- when used outside mtxrun +end diff --git a/tex/context/base/trac-pro.lua b/tex/context/base/trac-pro.lua index d6e0d0339..401fa9275 100644 --- a/tex/context/base/trac-pro.lua +++ b/tex/context/base/trac-pro.lua @@ -1,208 +1,208 @@ -if not modules then modules = { } end modules ['trac-pro'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local getmetatable, setmetatable, rawset, type = getmetatable, setmetatable, rawset, type - --- The protection implemented here is probably not that tight but good enough to catch --- problems due to naive usage. --- --- There's a more extensive version (trac-xxx.lua) that supports nesting. --- --- This will change when we have _ENV in lua 5.2+ - -local trace_namespaces = false trackers.register("system.namespaces", function(v) trace_namespaces = v end) - -local report_system = logs.reporter("system","protection") - -namespaces = namespaces or { } -local namespaces = namespaces - -local registered = { } - -local function report_index(k,name) - if trace_namespaces then - report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback()) - else - report_system("reference to %a in protected namespace %a",k,name) - end -end - -local function report_newindex(k,name) - if trace_namespaces then - report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback()) - else - report_system("assignment to %a in protected namespace %a",k,name) - end -end - -local function register(name) - local data = name == "global" and _G or _G[name] - if not data then - return -- error - end - registered[name] = data - local m = getmetatable(data) - if not m then - m = { } - setmetatable(data,m) - end - local index, newindex = { }, { } - m.__saved__index = m.__index - m.__no__index = function(t,k) - if not index[k] then - index[k] = true - report_index(k,name) - end - return nil - end - m.__saved__newindex = m.__newindex - m.__no__newindex = function(t,k,v) - if not newindex[k] then - newindex[k] = true - report_newindex(k,name) - end - rawset(t,k,v) - end - m.__protection__depth = 0 -end - -local function private(name) -- maybe save name - local data = registered[name] - if not data then - data = _G[name] - if not data then - data = { } - _G[name] = data - end - register(name) - end - return data -end - -local function protect(name) - local data = registered[name] - if not data then - return - end - local m = getmetatable(data) - local pd = m.__protection__depth - if pd > 0 then - m.__protection__depth = pd + 1 - else - m.__save_d_index, m.__saved__newindex = m.__index, m.__newindex - m.__index, m.__newindex = m.__no__index, m.__no__newindex - m.__protection__depth = 1 - end -end - -local function unprotect(name) - local data = registered[name] - if not data then - return - end - local m = getmetatable(data) - local pd = m.__protection__depth - if pd > 1 then - m.__protection__depth = pd - 1 - else - m.__index, m.__newindex = m.__saved__index, m.__saved__newindex - m.__protection__depth = 0 - end -end - -local function protectall() - for name, _ in next, registered do - if name ~= "global" then - protect(name) - end - end -end - -local function unprotectall() - for name, _ in next, registered do - if name ~= "global" then - unprotect(name) - end - end -end - -namespaces.register = register -- register when defined -namespaces.private = private -- allocate and register if needed -namespaces.protect = protect -namespaces.unprotect = unprotect -namespaces.protectall = protectall -namespaces.unprotectall = unprotectall - -namespaces.private("namespaces") registered = { } register("global") -- unreachable - -directives.register("system.protect", function(v) - if v then - protectall() - else - unprotectall() - end -end) - -directives.register("system.checkglobals", function(v) - if v then - report_system("enabling global namespace guard") - protect("global") - else - report_system("disabling global namespace guard") - unprotect("global") - end -end) - --- dummy section (will go to luat-dum.lua) - ---~ if not namespaces.private then ---~ -- somewhat protected ---~ local registered = { } ---~ function namespaces.private(name) ---~ local data = registered[name] ---~ if data then ---~ return data ---~ end ---~ local data = _G[name] ---~ if not data then ---~ data = { } ---~ _G[name] = data ---~ end ---~ registered[name] = data ---~ return data ---~ end ---~ function namespaces.protectall(list) ---~ for name, data in next, list or registered do ---~ setmetatable(data, { __newindex = function() print(string.format("table %s is protected",name)) end }) ---~ end ---~ end ---~ namespaces.protectall { namespaces = namespaces } ---~ end - ---~ directives.enable("system.checkglobals") - ---~ namespaces.register("resolvers","trackers") ---~ namespaces.protect("resolvers") ---~ namespaces.protect("resolvers") ---~ namespaces.protect("resolvers") ---~ namespaces.unprotect("resolvers") ---~ namespaces.unprotect("resolvers") ---~ namespaces.unprotect("resolvers") ---~ namespaces.protect("trackers") - ---~ resolvers.x = true ---~ resolvers.y = true ---~ trackers.a = "" ---~ resolvers.z = true ---~ oeps = { } - ---~ resolvers = namespaces.private("resolvers") ---~ fonts = namespaces.private("fonts") ---~ directives.enable("system.protect") ---~ namespaces.protectall() ---~ resolvers.xx = { } +if not modules then modules = { } end modules ['trac-pro'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local getmetatable, setmetatable, rawset, type = getmetatable, setmetatable, rawset, type + +-- The protection implemented here is probably not that tight but good enough to catch +-- problems due to naive usage. +-- +-- There's a more extensive version (trac-xxx.lua) that supports nesting. +-- +-- This will change when we have _ENV in lua 5.2+ + +local trace_namespaces = false trackers.register("system.namespaces", function(v) trace_namespaces = v end) + +local report_system = logs.reporter("system","protection") + +namespaces = namespaces or { } +local namespaces = namespaces + +local registered = { } + +local function report_index(k,name) + if trace_namespaces then + report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback()) + else + report_system("reference to %a in protected namespace %a",k,name) + end +end + +local function report_newindex(k,name) + if trace_namespaces then + report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback()) + else + report_system("assignment to %a in protected namespace %a",k,name) + end +end + +local function register(name) + local data = name == "global" and _G or _G[name] + if not data then + return -- error + end + registered[name] = data + local m = getmetatable(data) + if not m then + m = { } + setmetatable(data,m) + end + local index, newindex = { }, { } + m.__saved__index = m.__index + m.__no__index = function(t,k) + if not index[k] then + index[k] = true + report_index(k,name) + end + return nil + end + m.__saved__newindex = m.__newindex + m.__no__newindex = function(t,k,v) + if not newindex[k] then + newindex[k] = true + report_newindex(k,name) + end + rawset(t,k,v) + end + m.__protection__depth = 0 +end + +local function private(name) -- maybe save name + local data = registered[name] + if not data then + data = _G[name] + if not data then + data = { } + _G[name] = data + end + register(name) + end + return data +end + +local function protect(name) + local data = registered[name] + if not data then + return + end + local m = getmetatable(data) + local pd = m.__protection__depth + if pd > 0 then + m.__protection__depth = pd + 1 + else + m.__save_d_index, m.__saved__newindex = m.__index, m.__newindex + m.__index, m.__newindex = m.__no__index, m.__no__newindex + m.__protection__depth = 1 + end +end + +local function unprotect(name) + local data = registered[name] + if not data then + return + end + local m = getmetatable(data) + local pd = m.__protection__depth + if pd > 1 then + m.__protection__depth = pd - 1 + else + m.__index, m.__newindex = m.__saved__index, m.__saved__newindex + m.__protection__depth = 0 + end +end + +local function protectall() + for name, _ in next, registered do + if name ~= "global" then + protect(name) + end + end +end + +local function unprotectall() + for name, _ in next, registered do + if name ~= "global" then + unprotect(name) + end + end +end + +namespaces.register = register -- register when defined +namespaces.private = private -- allocate and register if needed +namespaces.protect = protect +namespaces.unprotect = unprotect +namespaces.protectall = protectall +namespaces.unprotectall = unprotectall + +namespaces.private("namespaces") registered = { } register("global") -- unreachable + +directives.register("system.protect", function(v) + if v then + protectall() + else + unprotectall() + end +end) + +directives.register("system.checkglobals", function(v) + if v then + report_system("enabling global namespace guard") + protect("global") + else + report_system("disabling global namespace guard") + unprotect("global") + end +end) + +-- dummy section (will go to luat-dum.lua) + +--~ if not namespaces.private then +--~ -- somewhat protected +--~ local registered = { } +--~ function namespaces.private(name) +--~ local data = registered[name] +--~ if data then +--~ return data +--~ end +--~ local data = _G[name] +--~ if not data then +--~ data = { } +--~ _G[name] = data +--~ end +--~ registered[name] = data +--~ return data +--~ end +--~ function namespaces.protectall(list) +--~ for name, data in next, list or registered do +--~ setmetatable(data, { __newindex = function() print(string.format("table %s is protected",name)) end }) +--~ end +--~ end +--~ namespaces.protectall { namespaces = namespaces } +--~ end + +--~ directives.enable("system.checkglobals") + +--~ namespaces.register("resolvers","trackers") +--~ namespaces.protect("resolvers") +--~ namespaces.protect("resolvers") +--~ namespaces.protect("resolvers") +--~ namespaces.unprotect("resolvers") +--~ namespaces.unprotect("resolvers") +--~ namespaces.unprotect("resolvers") +--~ namespaces.protect("trackers") + +--~ resolvers.x = true +--~ resolvers.y = true +--~ trackers.a = "" +--~ resolvers.z = true +--~ oeps = { } + +--~ resolvers = namespaces.private("resolvers") +--~ fonts = namespaces.private("fonts") +--~ directives.enable("system.protect") +--~ namespaces.protectall() +--~ resolvers.xx = { } diff --git a/tex/context/base/trac-set.lua b/tex/context/base/trac-set.lua index 95fdc43b3..5ab189f55 100644 --- a/tex/context/base/trac-set.lua +++ b/tex/context/base/trac-set.lua @@ -1,379 +1,379 @@ -if not modules then modules = { } end modules ['trac-set'] = { -- might become util-set.lua - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- maybe this should be util-set.lua - -local type, next, tostring = type, next, tostring -local concat = table.concat -local format, find, lower, gsub, topattern = string.format, string.find, string.lower, string.gsub, string.topattern -local is_boolean = string.is_boolean -local settings_to_hash = utilities.parsers.settings_to_hash -local allocate = utilities.storage.allocate - -utilities = utilities or { } -local utilities = utilities - -local setters = utilities.setters or { } -utilities.setters = setters - -local data = { } - --- We can initialize from the cnf file. This is sort of tricky as --- later defined setters also need to be initialized then. If set --- this way, we need to ensure that they are not reset later on. - -local trace_initialize = false -- only for testing during development - -function setters.initialize(filename,name,values) -- filename only for diagnostics - local setter = data[name] - if setter then - frozen = true -- don't permitoverload - -- trace_initialize = true - local data = setter.data - if data then - for key, newvalue in next, values do - local newvalue = is_boolean(newvalue,newvalue) - local functions = data[key] - if functions then - local oldvalue = functions.value - if functions.frozen then - if trace_initialize then - setter.report("%s: %a is %s to %a",filename,key,"frozen",oldvalue) - end - elseif #functions > 0 and not oldvalue then --- elseif #functions > 0 and oldvalue == nil then - if trace_initialize then - setter.report("%s: %a is %s to %a",filename,key,"set",newvalue) - end - for i=1,#functions do - functions[i](newvalue) - end - functions.value = newvalue - functions.frozen = functions.frozen or frozen - else - if trace_initialize then - setter.report("%s: %a is %s as %a",filename,key,"kept",oldvalue) - end - end - else - -- we do a simple preregistration i.e. not in the - -- list as it might be an obsolete entry - functions = { default = newvalue, frozen = frozen } - data[key] = functions - if trace_initialize then - setter.report("%s: %a is %s to %a",filename,key,"defaulted",newvalue) - end - end - end - return true - end - end -end - --- user interface code - -local function set(t,what,newvalue) - local data = t.data - if not data.frozen then - local done = t.done - if type(what) == "string" then - what = settings_to_hash(what) -- inefficient but ok - end - if type(what) ~= "table" then - return - end - if not done then -- catch ... why not set? - done = { } - t.done = done - end - for w, value in next, what do - if value == "" then - value = newvalue - elseif not value then - value = false -- catch nil - else - value = is_boolean(value,value) - end - w = topattern(w,true,true) - for name, functions in next, data do - if done[name] then - -- prevent recursion due to wildcards - elseif find(name,w) then - done[name] = true - for i=1,#functions do - functions[i](value) - end - functions.value = value - end - end - end - end -end - -local function reset(t) - local data = t.data - if not data.frozen then - for name, functions in next, data do - for i=1,#functions do - functions[i](false) - end - functions.value = false - end - end -end - -local function enable(t,what) - set(t,what,true) -end - -local function disable(t,what) - local data = t.data - if not what or what == "" then - t.done = { } - reset(t) - else - set(t,what,false) - end -end - -function setters.register(t,what,...) - local data = t.data - what = lower(what) - local functions = data[what] - if not functions then - functions = { } - data[what] = functions - if trace_initialize then - t.report("defining %a",what) - end - end - local default = functions.default -- can be set from cnf file - for i=1,select("#",...) do - local fnc = select(i,...) - local typ = type(fnc) - if typ == "string" then - if trace_initialize then - t.report("coupling %a to %a",what,fnc) - end - local s = fnc -- else wrong reference - fnc = function(value) set(t,s,value) end - elseif typ ~= "function" then - fnc = nil - end - if fnc then - functions[#functions+1] = fnc - -- default: set at command line or in cnf file - -- value : set in tex run (needed when loading runtime) - local value = functions.value or default - if value ~= nil then - fnc(value) - functions.value = value - end - end - end - return false -- so we can use it in an assignment -end - -function setters.enable(t,what) - local e = t.enable - t.enable, t.done = enable, { } - enable(t,what) - t.enable, t.done = e, { } -end - -function setters.disable(t,what) - local e = t.disable - t.disable, t.done = disable, { } - disable(t,what) - t.disable, t.done = e, { } -end - -function setters.reset(t) - t.done = { } - reset(t) -end - -function setters.list(t) -- pattern - local list = table.sortedkeys(t.data) - local user, system = { }, { } - for l=1,#list do - local what = list[l] - if find(what,"^%*") then - system[#system+1] = what - else - user[#user+1] = what - end - end - return user, system -end - -function setters.show(t) - local category = t.name - local list = setters.list(t) - t.report() - for k=1,#list do - local name = list[k] - local functions = t.data[name] - if functions then - local value, default, modules = functions.value, functions.default, #functions - value = value == nil and "unset" or tostring(value) - default = default == nil and "unset" or tostring(default) - t.report("%-50s modules: %2i default: %-12s value: %-12s",name,modules,default,value) - end - end - t.report() -end - --- we could have used a bit of oo and the trackers:enable syntax but --- there is already a lot of code around using the singular tracker - --- we could make this into a module but we also want the rest avaliable - -local enable, disable, register, list, show = setters.enable, setters.disable, setters.register, setters.list, setters.show - -function setters.report(setter,...) - print(format("%-15s : %s\n",setter.name,format(...))) -end - -local function default(setter,name) - local d = setter.data[name] - return d and d.default -end - -local function value(setter,name) - local d = setter.data[name] - return d and (d.value or d.default) -end - -function setters.new(name) -- we could use foo:bar syntax (but not used that often) - local setter -- we need to access it in setter itself - setter = { - data = allocate(), -- indexed, but also default and value fields - name = name, - report = function(...) setters.report (setter,...) end, - enable = function(...) enable (setter,...) end, - disable = function(...) disable (setter,...) end, - register = function(...) register(setter,...) end, - list = function(...) list (setter,...) end, - show = function(...) show (setter,...) end, - default = function(...) return default (setter,...) end, - value = function(...) return value (setter,...) end, - } - data[name] = setter - return setter -end - -trackers = setters.new("trackers") -directives = setters.new("directives") -experiments = setters.new("experiments") - -local t_enable, t_disable = trackers .enable, trackers .disable -local d_enable, d_disable = directives .enable, directives .disable -local e_enable, e_disable = experiments.enable, experiments.disable - --- nice trick: we overload two of the directives related functions with variants that --- do tracing (itself using a tracker) .. proof of concept - -local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end) -local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end) - -function directives.enable(...) - if trace_directives then - directives.report("enabling: % t",{...}) - end - d_enable(...) -end - -function directives.disable(...) - if trace_directives then - directives.report("disabling: % t",{...}) - end - d_disable(...) -end - -function experiments.enable(...) - if trace_experiments then - experiments.report("enabling: % t",{...}) - end - e_enable(...) -end - -function experiments.disable(...) - if trace_experiments then - experiments.report("disabling: % t",{...}) - end - e_disable(...) -end - --- a useful example - -directives.register("system.nostatistics", function(v) - if statistics then - statistics.enable = not v - else - -- forget about it - end -end) - -directives.register("system.nolibraries", function(v) - if libraries then - libraries = nil -- we discard this tracing for security - else - -- no libraries defined - end -end) - --- experiment - -if environment then - - -- The engineflags are known earlier than environment.arguments but maybe we - -- need to handle them both as the later are parsed differently. The c: prefix - -- is used by mtx-context to isolate the flags from those that concern luatex. - - local engineflags = environment.engineflags - - if engineflags then - local list = engineflags["c:trackers"] or engineflags["trackers"] - if type(list) == "string" then - setters.initialize("commandline flags","trackers",settings_to_hash(list)) - -- t_enable(list) - end - local list = engineflags["c:directives"] or engineflags["directives"] - if type(list) == "string" then - setters.initialize("commandline flags","directives", settings_to_hash(list)) - -- d_enable(list) - end - end - -end - --- here - -if texconfig then - - -- this happens too late in ini mode but that is no problem - - local function set(k,v) - v = tonumber(v) - if v then - texconfig[k] = v - end - end - - directives.register("luatex.expanddepth", function(v) set("expand_depth",v) end) - directives.register("luatex.hashextra", function(v) set("hash_extra",v) end) - directives.register("luatex.nestsize", function(v) set("nest_size",v) end) - directives.register("luatex.maxinopen", function(v) set("max_in_open",v) end) - directives.register("luatex.maxprintline", function(v) set("max_print_line",v) end) - directives.register("luatex.maxstrings", function(v) set("max_strings",v) end) - directives.register("luatex.paramsize", function(v) set("param_size",v) end) - directives.register("luatex.savesize", function(v) set("save_size",v) end) - directives.register("luatex.stacksize", function(v) set("stack_size",v) end) - -end +if not modules then modules = { } end modules ['trac-set'] = { -- might become util-set.lua + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- maybe this should be util-set.lua + +local type, next, tostring = type, next, tostring +local concat = table.concat +local format, find, lower, gsub, topattern = string.format, string.find, string.lower, string.gsub, string.topattern +local is_boolean = string.is_boolean +local settings_to_hash = utilities.parsers.settings_to_hash +local allocate = utilities.storage.allocate + +utilities = utilities or { } +local utilities = utilities + +local setters = utilities.setters or { } +utilities.setters = setters + +local data = { } + +-- We can initialize from the cnf file. This is sort of tricky as +-- later defined setters also need to be initialized then. If set +-- this way, we need to ensure that they are not reset later on. + +local trace_initialize = false -- only for testing during development + +function setters.initialize(filename,name,values) -- filename only for diagnostics + local setter = data[name] + if setter then + frozen = true -- don't permitoverload + -- trace_initialize = true + local data = setter.data + if data then + for key, newvalue in next, values do + local newvalue = is_boolean(newvalue,newvalue) + local functions = data[key] + if functions then + local oldvalue = functions.value + if functions.frozen then + if trace_initialize then + setter.report("%s: %a is %s to %a",filename,key,"frozen",oldvalue) + end + elseif #functions > 0 and not oldvalue then +-- elseif #functions > 0 and oldvalue == nil then + if trace_initialize then + setter.report("%s: %a is %s to %a",filename,key,"set",newvalue) + end + for i=1,#functions do + functions[i](newvalue) + end + functions.value = newvalue + functions.frozen = functions.frozen or frozen + else + if trace_initialize then + setter.report("%s: %a is %s as %a",filename,key,"kept",oldvalue) + end + end + else + -- we do a simple preregistration i.e. not in the + -- list as it might be an obsolete entry + functions = { default = newvalue, frozen = frozen } + data[key] = functions + if trace_initialize then + setter.report("%s: %a is %s to %a",filename,key,"defaulted",newvalue) + end + end + end + return true + end + end +end + +-- user interface code + +local function set(t,what,newvalue) + local data = t.data + if not data.frozen then + local done = t.done + if type(what) == "string" then + what = settings_to_hash(what) -- inefficient but ok + end + if type(what) ~= "table" then + return + end + if not done then -- catch ... why not set? + done = { } + t.done = done + end + for w, value in next, what do + if value == "" then + value = newvalue + elseif not value then + value = false -- catch nil + else + value = is_boolean(value,value) + end + w = topattern(w,true,true) + for name, functions in next, data do + if done[name] then + -- prevent recursion due to wildcards + elseif find(name,w) then + done[name] = true + for i=1,#functions do + functions[i](value) + end + functions.value = value + end + end + end + end +end + +local function reset(t) + local data = t.data + if not data.frozen then + for name, functions in next, data do + for i=1,#functions do + functions[i](false) + end + functions.value = false + end + end +end + +local function enable(t,what) + set(t,what,true) +end + +local function disable(t,what) + local data = t.data + if not what or what == "" then + t.done = { } + reset(t) + else + set(t,what,false) + end +end + +function setters.register(t,what,...) + local data = t.data + what = lower(what) + local functions = data[what] + if not functions then + functions = { } + data[what] = functions + if trace_initialize then + t.report("defining %a",what) + end + end + local default = functions.default -- can be set from cnf file + for i=1,select("#",...) do + local fnc = select(i,...) + local typ = type(fnc) + if typ == "string" then + if trace_initialize then + t.report("coupling %a to %a",what,fnc) + end + local s = fnc -- else wrong reference + fnc = function(value) set(t,s,value) end + elseif typ ~= "function" then + fnc = nil + end + if fnc then + functions[#functions+1] = fnc + -- default: set at command line or in cnf file + -- value : set in tex run (needed when loading runtime) + local value = functions.value or default + if value ~= nil then + fnc(value) + functions.value = value + end + end + end + return false -- so we can use it in an assignment +end + +function setters.enable(t,what) + local e = t.enable + t.enable, t.done = enable, { } + enable(t,what) + t.enable, t.done = e, { } +end + +function setters.disable(t,what) + local e = t.disable + t.disable, t.done = disable, { } + disable(t,what) + t.disable, t.done = e, { } +end + +function setters.reset(t) + t.done = { } + reset(t) +end + +function setters.list(t) -- pattern + local list = table.sortedkeys(t.data) + local user, system = { }, { } + for l=1,#list do + local what = list[l] + if find(what,"^%*") then + system[#system+1] = what + else + user[#user+1] = what + end + end + return user, system +end + +function setters.show(t) + local category = t.name + local list = setters.list(t) + t.report() + for k=1,#list do + local name = list[k] + local functions = t.data[name] + if functions then + local value, default, modules = functions.value, functions.default, #functions + value = value == nil and "unset" or tostring(value) + default = default == nil and "unset" or tostring(default) + t.report("%-50s modules: %2i default: %-12s value: %-12s",name,modules,default,value) + end + end + t.report() +end + +-- we could have used a bit of oo and the trackers:enable syntax but +-- there is already a lot of code around using the singular tracker + +-- we could make this into a module but we also want the rest avaliable + +local enable, disable, register, list, show = setters.enable, setters.disable, setters.register, setters.list, setters.show + +function setters.report(setter,...) + print(format("%-15s : %s\n",setter.name,format(...))) +end + +local function default(setter,name) + local d = setter.data[name] + return d and d.default +end + +local function value(setter,name) + local d = setter.data[name] + return d and (d.value or d.default) +end + +function setters.new(name) -- we could use foo:bar syntax (but not used that often) + local setter -- we need to access it in setter itself + setter = { + data = allocate(), -- indexed, but also default and value fields + name = name, + report = function(...) setters.report (setter,...) end, + enable = function(...) enable (setter,...) end, + disable = function(...) disable (setter,...) end, + register = function(...) register(setter,...) end, + list = function(...) list (setter,...) end, + show = function(...) show (setter,...) end, + default = function(...) return default (setter,...) end, + value = function(...) return value (setter,...) end, + } + data[name] = setter + return setter +end + +trackers = setters.new("trackers") +directives = setters.new("directives") +experiments = setters.new("experiments") + +local t_enable, t_disable = trackers .enable, trackers .disable +local d_enable, d_disable = directives .enable, directives .disable +local e_enable, e_disable = experiments.enable, experiments.disable + +-- nice trick: we overload two of the directives related functions with variants that +-- do tracing (itself using a tracker) .. proof of concept + +local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end) +local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end) + +function directives.enable(...) + if trace_directives then + directives.report("enabling: % t",{...}) + end + d_enable(...) +end + +function directives.disable(...) + if trace_directives then + directives.report("disabling: % t",{...}) + end + d_disable(...) +end + +function experiments.enable(...) + if trace_experiments then + experiments.report("enabling: % t",{...}) + end + e_enable(...) +end + +function experiments.disable(...) + if trace_experiments then + experiments.report("disabling: % t",{...}) + end + e_disable(...) +end + +-- a useful example + +directives.register("system.nostatistics", function(v) + if statistics then + statistics.enable = not v + else + -- forget about it + end +end) + +directives.register("system.nolibraries", function(v) + if libraries then + libraries = nil -- we discard this tracing for security + else + -- no libraries defined + end +end) + +-- experiment + +if environment then + + -- The engineflags are known earlier than environment.arguments but maybe we + -- need to handle them both as the later are parsed differently. The c: prefix + -- is used by mtx-context to isolate the flags from those that concern luatex. + + local engineflags = environment.engineflags + + if engineflags then + local list = engineflags["c:trackers"] or engineflags["trackers"] + if type(list) == "string" then + setters.initialize("commandline flags","trackers",settings_to_hash(list)) + -- t_enable(list) + end + local list = engineflags["c:directives"] or engineflags["directives"] + if type(list) == "string" then + setters.initialize("commandline flags","directives", settings_to_hash(list)) + -- d_enable(list) + end + end + +end + +-- here + +if texconfig then + + -- this happens too late in ini mode but that is no problem + + local function set(k,v) + v = tonumber(v) + if v then + texconfig[k] = v + end + end + + directives.register("luatex.expanddepth", function(v) set("expand_depth",v) end) + directives.register("luatex.hashextra", function(v) set("hash_extra",v) end) + directives.register("luatex.nestsize", function(v) set("nest_size",v) end) + directives.register("luatex.maxinopen", function(v) set("max_in_open",v) end) + directives.register("luatex.maxprintline", function(v) set("max_print_line",v) end) + directives.register("luatex.maxstrings", function(v) set("max_strings",v) end) + directives.register("luatex.paramsize", function(v) set("param_size",v) end) + directives.register("luatex.savesize", function(v) set("save_size",v) end) + directives.register("luatex.stacksize", function(v) set("stack_size",v) end) + +end diff --git a/tex/context/base/trac-tex.lua b/tex/context/base/trac-tex.lua index 7e3406073..aecf1799b 100644 --- a/tex/context/base/trac-tex.lua +++ b/tex/context/base/trac-tex.lua @@ -1,75 +1,75 @@ -if not modules then modules = { } end modules ['trac-tex'] = { - version = 1.001, - comment = "companion to trac-deb.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- moved from trac-deb.lua - -local format = string.format - -local texhashtokens = tex.hashtokens - -local trackers = trackers - -local saved = { } - -function trackers.savehash() - saved = texhashtokens() -end - -function trackers.dumphashtofile(filename,delta) - local list, hash, command_name = { }, texhashtokens(), token.command_name - for name, token in next, hash do - if not delta or not saved[name] then - -- token: cmd, chr, csid -- combination cmd,chr determines name - local category = command_name(token) - local dk = list[category] - if not dk then - -- a bit funny names but this sorts better (easier to study) - dk = { names = { }, found = 0, code = token[1] } - list[category] = dk - end - dk.names[name] = { token[2], token[3] } - dk.found = dk.found + 1 - end - end - io.savedata(filename or tex.jobname .. "-hash.log",table.serialize(list,true)) -end - -local delta = nil - -local function dump_hash(wanteddelta) - if delta == nil then - saved = saved or texhashtokens() -- no need for trackers.dump_hash - luatex.registerstopactions(1,function() dump_hash(nil,wanteddelta) end) -- at front - end - delta = wanteddelta -end - -directives.register("system.dumphash", function() dump_hash(false) end) -directives.register("system.dumpdelta", function() dump_hash(true ) end) - -local report_dump = logs.reporter("resolvers","dump") - -local function saveusedfilesintrees(format) - local data = { - jobname = environment.jobname or "?", - version = environment.version or "?", - kind = environment.kind or "?", - files = resolvers.instance.foundintrees - } - local filename = file.replacesuffix(environment.jobname or "context-job",'jlg') - if format == "lua" then - io.savedata(filename,table.serialize(data,true)) - else - io.savedata(filename,table.toxml(data,"job")) - end -end - -directives.register("system.dumpfiles", function(v) - luatex.registerstopactions(function() saveusedfilesintrees(v) end) -end) - +if not modules then modules = { } end modules ['trac-tex'] = { + version = 1.001, + comment = "companion to trac-deb.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- moved from trac-deb.lua + +local format = string.format + +local texhashtokens = tex.hashtokens + +local trackers = trackers + +local saved = { } + +function trackers.savehash() + saved = texhashtokens() +end + +function trackers.dumphashtofile(filename,delta) + local list, hash, command_name = { }, texhashtokens(), token.command_name + for name, token in next, hash do + if not delta or not saved[name] then + -- token: cmd, chr, csid -- combination cmd,chr determines name + local category = command_name(token) + local dk = list[category] + if not dk then + -- a bit funny names but this sorts better (easier to study) + dk = { names = { }, found = 0, code = token[1] } + list[category] = dk + end + dk.names[name] = { token[2], token[3] } + dk.found = dk.found + 1 + end + end + io.savedata(filename or tex.jobname .. "-hash.log",table.serialize(list,true)) +end + +local delta = nil + +local function dump_hash(wanteddelta) + if delta == nil then + saved = saved or texhashtokens() -- no need for trackers.dump_hash + luatex.registerstopactions(1,function() dump_hash(nil,wanteddelta) end) -- at front + end + delta = wanteddelta +end + +directives.register("system.dumphash", function() dump_hash(false) end) +directives.register("system.dumpdelta", function() dump_hash(true ) end) + +local report_dump = logs.reporter("resolvers","dump") + +local function saveusedfilesintrees(format) + local data = { + jobname = environment.jobname or "?", + version = environment.version or "?", + kind = environment.kind or "?", + files = resolvers.instance.foundintrees + } + local filename = file.replacesuffix(environment.jobname or "context-job",'jlg') + if format == "lua" then + io.savedata(filename,table.serialize(data,true)) + else + io.savedata(filename,table.toxml(data,"job")) + end +end + +directives.register("system.dumpfiles", function(v) + luatex.registerstopactions(function() saveusedfilesintrees(v) end) +end) + diff --git a/tex/context/base/trac-tim.lua b/tex/context/base/trac-tim.lua index 15ac9bf1b..e62e7e149 100644 --- a/tex/context/base/trac-tim.lua +++ b/tex/context/base/trac-tim.lua @@ -1,138 +1,138 @@ -if not modules then modules = { } end modules ['trac-tim'] = { - version = 1.001, - comment = "companion to m-timing.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format, gsub = string.format, string.gsub -local concat, sort = table.concat, table.sort -local next, tonumber = next, tonumber - -moduledata = moduledata or { } -local progress = moduledata.progress or { } -moduledata.progress = progress - -local report_timing = logs.reporter("timing") - -if not nodes then nodes = { } end -- when loaded in mtxrun - -progress.parameters = nodes and nodes.snapshots.getparameters -progress.defaultfilename = ((tex and tex.jobname) or "whatever") .. "-luatex-progress" - --- storage - -function progress.store() - nodes.snapshots.takesample() -end - -function progress.save(name) - local filename = (name or progress.defaultfilename) .. ".lut" - report_timing("saving data in %a",filename) - table.save(filename,nodes.snapshots.getsamples()) - nodes.snapshots.resetsamples() -end - --- conversion - -local processed = { } -local parameters = progress.parameters() - -local function convert(name) - name = name ~= "" and name or progress.defaultfilename - if not processed[name] then - local names, top, bot, pages, paths, keys = { }, { }, { }, 0, { }, { } - local data = table.load(name .. ".lut") - if data then - pages = #data - if pages > 1 then - local factor = 100 - for k=1,#data do - for k, v in next, data[k].node_memory do - keys[k] = true - end - end - for k=1,#data do - local m = data[k].node_memory - for k, v in next, keys do - if not m[k] then m[k] = 0 end - end - end - local function path(tag,subtag) - local b, t, s = nil, nil, { } - for k=1,#data do - local v = data[k][tag] - v = v and (subtag and v[subtag]) or v - if v then - v = tonumber(v) - if b then - if v > t then t = v end - if v < b then b = v end - else - t = v - b = v - end - s[k] = v - else - s[k] = 0 - end - end - local tagname = subtag or tag - top[tagname] = gsub(format("%.3f",t),"%.000$","") - bot[tagname] = gsub(format("%.3f",b),"%.000$","") - local delta = t-b - if delta == 0 then - delta = 1 - else - delta = factor/delta - end - for k=1,#s do - s[k] = format("(%s,%s)",k,(s[k]-b)*delta) - end - paths[tagname] = concat(s,"--") - end - for i=1,#parameters do - path(parameters[i]) - end - for tag, _ in next, keys do - path("node_memory",tag) - names[#names+1] = tag - end - pages = pages - 1 - end - end - sort(names) - processed[name] = { - names = names, - top = top, - bot = bot, - pages = pages, - paths = paths, - } - end - return processed[name] -end - -progress.convert = convert - -function progress.bot(name,tag) - return convert(name).bot[tag] or 0 -end - -function progress.top(name,tag) - return convert(name).top[tag] or 0 -end - -function progress.pages(name,tag) - return convert(name).pages or 0 -end - -function progress.path(name,tag) - return convert(name).paths[tag] or "origin" -end - -function progress.nodes(name) - return convert(name).names or { } -end - +if not modules then modules = { } end modules ['trac-tim'] = { + version = 1.001, + comment = "companion to m-timing.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, gsub = string.format, string.gsub +local concat, sort = table.concat, table.sort +local next, tonumber = next, tonumber + +moduledata = moduledata or { } +local progress = moduledata.progress or { } +moduledata.progress = progress + +local report_timing = logs.reporter("timing") + +if not nodes then nodes = { } end -- when loaded in mtxrun + +progress.parameters = nodes and nodes.snapshots.getparameters +progress.defaultfilename = ((tex and tex.jobname) or "whatever") .. "-luatex-progress" + +-- storage + +function progress.store() + nodes.snapshots.takesample() +end + +function progress.save(name) + local filename = (name or progress.defaultfilename) .. ".lut" + report_timing("saving data in %a",filename) + table.save(filename,nodes.snapshots.getsamples()) + nodes.snapshots.resetsamples() +end + +-- conversion + +local processed = { } +local parameters = progress.parameters() + +local function convert(name) + name = name ~= "" and name or progress.defaultfilename + if not processed[name] then + local names, top, bot, pages, paths, keys = { }, { }, { }, 0, { }, { } + local data = table.load(name .. ".lut") + if data then + pages = #data + if pages > 1 then + local factor = 100 + for k=1,#data do + for k, v in next, data[k].node_memory do + keys[k] = true + end + end + for k=1,#data do + local m = data[k].node_memory + for k, v in next, keys do + if not m[k] then m[k] = 0 end + end + end + local function path(tag,subtag) + local b, t, s = nil, nil, { } + for k=1,#data do + local v = data[k][tag] + v = v and (subtag and v[subtag]) or v + if v then + v = tonumber(v) + if b then + if v > t then t = v end + if v < b then b = v end + else + t = v + b = v + end + s[k] = v + else + s[k] = 0 + end + end + local tagname = subtag or tag + top[tagname] = gsub(format("%.3f",t),"%.000$","") + bot[tagname] = gsub(format("%.3f",b),"%.000$","") + local delta = t-b + if delta == 0 then + delta = 1 + else + delta = factor/delta + end + for k=1,#s do + s[k] = format("(%s,%s)",k,(s[k]-b)*delta) + end + paths[tagname] = concat(s,"--") + end + for i=1,#parameters do + path(parameters[i]) + end + for tag, _ in next, keys do + path("node_memory",tag) + names[#names+1] = tag + end + pages = pages - 1 + end + end + sort(names) + processed[name] = { + names = names, + top = top, + bot = bot, + pages = pages, + paths = paths, + } + end + return processed[name] +end + +progress.convert = convert + +function progress.bot(name,tag) + return convert(name).bot[tag] or 0 +end + +function progress.top(name,tag) + return convert(name).top[tag] or 0 +end + +function progress.pages(name,tag) + return convert(name).pages or 0 +end + +function progress.path(name,tag) + return convert(name).paths[tag] or "origin" +end + +function progress.nodes(name) + return convert(name).names or { } +end + diff --git a/tex/context/base/trac-vis.lua b/tex/context/base/trac-vis.lua index df4909c3e..3dc7aa9d2 100644 --- a/tex/context/base/trac-vis.lua +++ b/tex/context/base/trac-vis.lua @@ -1,926 +1,926 @@ -if not modules then modules = { } end modules ['trac-vis'] = { - version = 1.001, - comment = "companion to trac-vis.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local string, number, table = string, number, table -local node, nodes, attributes, fonts, tex = node, nodes, attributes, fonts, tex -local type = type -local format = string.format -local formatters = string.formatters - --- This module started out in the early days of mkiv and luatex with --- visualizing kerns related to fonts. In the process of cleaning up the --- visual debugger code it made sense to integrate some other code that --- I had laying around and replace the old supp-vis debugging code. As --- only a subset of the old visual debugger makes sense it has become a --- different implementation. Soms of the m-visual functionality will also --- be ported. The code is rather trivial. The caching is not really needed --- but saves upto 50% of the time needed to add visualization. Of course --- the overall runtime is larger because of color and layer processing in --- the backend (can be times as much) so the runtime is somewhat larger --- with full visualization enabled. In practice this will never happen --- unless one is demoing. - --- We could use pdf literals and re stream codes but it's not worth the --- trouble because we would end up in color etc mess. Maybe one day I'll --- make a nodeinjection variant. - --- todo: global switch (so no attributes) --- todo: maybe also xoffset, yoffset of glyph --- todo: inline concat (more efficient) - -local nodecodes = nodes.nodecodes -local disc_code = nodecodes.disc -local kern_code = nodecodes.kern -local glyph_code = nodecodes.glyph -local disc_code = nodecodes.disc -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local glue_code = nodecodes.glue -local penalty_code = nodecodes.penalty -local whatsit_code = nodecodes.whatsit -local user_code = nodecodes.user -local gluespec_code = nodecodes.gluespec - -local kerncodes = nodes.kerncodes -local font_kern_code = kerncodes.fontkern -local user_kern_code = kerncodes.userkern - -local gluecodes = nodes.gluecodes -local cleaders_code = gluecodes.cleaders -local userskip_code = gluecodes.userskip -local space_code = gluecodes.space -local xspace_code = gluecodes.xspace -local leftskip_code = gluecodes.leftskip -local rightskip_code = gluecodes.rightskip - -local whatsitcodes = nodes.whatsitcodes - -local concat_nodes = nodes.concat -local hpack_nodes = node.hpack -local vpack_nodes = node.vpack -local hpack_string = typesetters.hpack -local fast_hpack_string = typesetters.fast_hpack -local copy_node = node.copy -local copy_list = node.copy_list -local free_node = node.free -local free_node_list = node.flush_list -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after -local fast_hpack = nodes.fasthpack -local traverse_nodes = node.traverse - -local tex_attribute = tex.attribute -local tex_box = tex.box -local unsetvalue = attributes.unsetvalue - -local current_font = font.current - -local exheights = fonts.hashes.exheights -local emwidths = fonts.hashes.emwidths -local pt_factor = number.dimenfactors.pt - -local nodepool = nodes.pool -local new_rule = nodepool.rule -local new_kern = nodepool.kern -local new_glue = nodepool.glue -local new_penalty = nodepool.penalty - -local tracers = nodes.tracers -local visualizers = nodes.visualizers - -local setcolor = tracers.colors.set -local setlistcolor = tracers.colors.setlist -local settransparency = tracers.transparencies.set -local setlisttransparency = tracers.transparencies.setlist - -local starttiming = statistics.starttiming -local stoptiming = statistics.stoptiming - -local a_visual = attributes.private("visual") -local a_fontkern = attributes.private("fontkern") -local a_layer = attributes.private("viewerlayer") - -local hasbit = number.hasbit -local bit = number.bit -local setbit = number.setbit -local clearbit = number.clearbit - -local trace_hbox -local trace_vbox -local trace_vtop -local trace_kern -local trace_glue -local trace_penalty -local trace_fontkern -local trace_strut -local trace_whatsit -local trace_user - -local report_visualize = logs.reporter("visualize") - -local modes = { - hbox = 1, - vbox = 2, - vtop = 4, - kern = 8, - glue = 16, - penalty = 32, - fontkern = 64, - strut = 128, - whatsit = 256, - glyph = 512, - simple = 1024, - simplehbox = 1024 + 1, - simplevbox = 1024 + 2, - simplevtop = 1024 + 4, - user = 2048, -} - -local modes_makeup = { "hbox", "vbox", "kern", "glue", "penalty" } -local modes_boxes = { "hbox", "vbox" } -local modes_all = { "hbox", "vbox", "kern", "glue", "penalty", "fontkern", "whatsit", "glyph", "user" } - -local usedfont, exheight, emwidth -local l_penalty, l_glue, l_kern, l_fontkern, l_hbox, l_vbox, l_vtop, l_strut, l_whatsit, l_glyph, l_user - -local enabled = false -local layers = { } - -local preset_boxes = modes.hbox + modes.vbox -local preset_makeup = preset_boxes + modes.kern + modes.glue + modes.penalty -local preset_all = preset_makeup + modes.fontkern + modes.whatsit + modes.glyph + modes.user - -function visualizers.setfont(id) - usedfont = id or current_font() - exheight = exheights[usedfont] - emwidth = emwidths[usedfont] -end - --- we can preset a bunch of bits - -local function enable() - if not usedfont then - -- we use a narrow monospaced font - visualizers.setfont(fonts.definers.define { name = "lmmonoltcond10regular", size = tex.sp("4pt") }) - end - for mode, value in next, modes do - local tag = formatters["v_%s"](mode) - attributes.viewerlayers.define { - tag = tag, - title = formatters["visualizer %s"](mode), - visible = "start", - editable = "yes", - printable = "yes" - } - layers[mode] = attributes.viewerlayers.register(tag,true) - end - l_hbox = layers.hbox - l_vbox = layers.vbox - l_vtop = layers.vtop - l_glue = layers.glue - l_kern = layers.kern - l_penalty = layers.penalty - l_fontkern = layers.fontkern - l_strut = layers.strut - l_whatsit = layers.whatsit - l_glyph = layers.glyph - l_user = layers.user - nodes.tasks.enableaction("shipouts","nodes.visualizers.handler") - report_visualize("enabled") - enabled = true - tex.setcount("global","c_syst_visualizers_state",1) -- so that we can optimize at the tex end -end - -local function setvisual(n,a,what) -- this will become more efficient when we have the bit lib linked in - if not n or n == "reset" then - return unsetvalue - elseif n == "makeup" then - if not a or a == 0 or a == unsetvalue then - a = preset_makeup - else - a = setbit(a,preset_makeup) - -- for i=1,#modes_makeup do - -- a = setvisual(modes_makeup[i],a) - -- end - end - elseif n == "boxes" then - if not a or a == 0 or a == unsetvalue then - a = preset_boxes - else - a = setbit(a,preset_boxes) - -- for i=1,#modes_boxes do - -- a = setvisual(modes_boxes[i],a) - -- end - end - elseif n == "all" then - if what == false then - return unsetvalue - elseif not a or a == 0 or a == unsetvalue then - a = preset_all - else - a = setbit(a,preset_all) - -- for i=1,#modes_all do - -- a = setvisual(modes_all[i],a) - -- end - end - else - local m = modes[n] - if not m then - -- go on - elseif a == unsetvalue then - if what == false then - return unsetvalue - else - -- a = setbit(0,m) - a = m - end - elseif what == false then - a = clearbit(a,m) - elseif not a or a == 0 then - a = m - else - a = setbit(a,m) - end - end - if not a or a == 0 or a == unsetvalue then - return unsetvalue - elseif not enabled then -- must happen at runtime (as we don't store layers yet) - enable() - end - return a -end - -function visualizers.setvisual(n) - tex_attribute[a_visual] = setvisual(n,tex_attribute[a_visual]) -end - -function visualizers.setlayer(n) - tex_attribute[a_layer] = layers[n] or unsetvalue -end - -commands.setvisual = visualizers.setvisual -commands.setlayer = visualizers.setlayer - -function commands.visual(n) - context(setvisual(n)) -end - -local function set(mode,v) - tex_attribute[a_visual] = setvisual(mode,tex_attribute[a_visual],v) -end - -for mode, value in next, modes do - trackers.register(formatters["visualizers.%s"](mode), function(v) set(mode,v) end) -end - -trackers.register("visualizers.reset", function(v) set("reset", v) end) -trackers.register("visualizers.all", function(v) set("all", v) end) -trackers.register("visualizers.makeup",function(v) set("makeup",v) end) -trackers.register("visualizers.boxes", function(v) set("boxes", v) end) - -local c_positive = "trace:b" -local c_negative = "trace:r" -local c_zero = "trace:g" -local c_text = "trace:s" -local c_space = "trace:y" -local c_skip_a = "trace:c" -local c_skip_b = "trace:m" -local c_glyph = "trace:o" -local c_white = "trace:w" - -local c_positive_d = "trace:db" -local c_negative_d = "trace:dr" -local c_zero_d = "trace:dg" -local c_text_d = "trace:ds" -local c_space_d = "trace:dy" -local c_skip_a_d = "trace:dc" -local c_skip_b_d = "trace:dm" -local c_glyph_d = "trace:do" -local c_white_d = "trace:dw" - -local function sometext(str,layer,color,textcolor) -- we can just paste verbatim together .. no typesteting needed - local text = fast_hpack_string(str,usedfont) - local size = text.width - local rule = new_rule(size,2*exheight,exheight/2) - local kern = new_kern(-size) - if color then - setcolor(rule,color) - end - if textcolor then - setlistcolor(text.list,textcolor) - end - local info = concat_nodes { - rule, - kern, - text, - } - setlisttransparency(info,c_zero) - info = fast_hpack(info) - if layer then - info[a_layer] = layer - end - local width = info.width - info.width = 0 - info.height = 0 - info.depth = 0 - return info, width -end - -local f_cache = { } - -local function fontkern(head,current) - local kern = current.kern - local info = f_cache[kern] - if info then - -- print("hit fontkern") - else - local text = fast_hpack_string(formatters[" %0.3f"](kern*pt_factor),usedfont) - local rule = new_rule(emwidth/10,6*exheight,2*exheight) - local list = text.list - if kern > 0 then - setlistcolor(list,c_positive_d) - elseif kern < 0 then - setlistcolor(list,c_negative_d) - else - setlistcolor(list,c_zero_d) - end - setlisttransparency(list,c_text_d) - settransparency(rule,c_text_d) - text.shift = -5 * exheight - info = concat_nodes { - rule, - text, - } - info = fast_hpack(info) - info[a_layer] = l_fontkern - info.width = 0 - info.height = 0 - info.depth = 0 - f_cache[kern] = info - end - head = insert_node_before(head,current,copy_list(info)) - return head, current -end - -local w_cache = { } - -local tags = { - open = "FIC", - write = "FIW", - close = "FIC", - special = "SPE", - localpar = "PAR", - dir = "DIR", - pdfliteral = "PDF", - pdfrefobj = "PDF", - pdfrefxform = "PDF", - pdfrefximage = "PDF", - pdfannot = "PDF", - pdfstartlink = "PDF", - pdfendlink = "PDF", - pdfdest = "PDF", - pdfthread = "PDF", - pdfstartthread = "PDF", - pdfendthread = "PDF", - pdfsavepos = "PDF", - pdfthreaddata = "PDF", - pdflinkdata = "PDF", - pdfcolorstack = "PDF", - pdfsetmatrix = "PDF", - pdfsave = "PDF", - pdfrestore = "PDF", - latelua = "LUA", - closelua = "LUA", - cancelboundary = "CBD", - userdefined = "USR", -} - -local function whatsit(head,current) - local what = current.subtype - local info = w_cache[what] - if info then - -- print("hit whatsit") - else - local tag = whatsitcodes[what] - -- maybe different text colors per tag - info = sometext(formatters["W:%s"](tag and tags[tag] or what),usedfont,nil,c_white) - info[a_layer] = l_whatsit - w_cache[what] = info - end - head, current = insert_node_after(head,current,copy_list(info)) - return head, current -end - -local function user(head,current) - local what = current.subtype - local info = w_cache[what] - if info then - -- print("hit user") - else - info = sometext(formatters["U:%s"](what),usedfont) - info[a_layer] = l_user - w_cache[what] = info - end - head, current = insert_node_after(head,current,copy_list(info)) - return head, current -end - -local b_cache = { } - -local function ruledbox(head,current,vertical,layer,what,simple) - local wd = current.width - if wd ~= 0 then - local ht, dp = current.height, current.depth - local next, prev = current.next, current.prev - current.next, current.prev = nil, nil - local linewidth = emwidth/10 - local baseline, baseskip - if dp ~= 0 and ht ~= 0 then - if wd > 20*linewidth then - baseline = b_cache.baseline - if not baseline then - -- due to an optimized leader color/transparency we need to set the glue node in order - -- to trigger this mechanism - local leader = concat_nodes { - new_glue(2*linewidth), -- 2.5 - new_rule(6*linewidth,linewidth,0), -- 5.0 - new_glue(2*linewidth), -- 2.5 - } - -- setlisttransparency(leader,c_text) - leader = fast_hpack(leader) - -- setlisttransparency(leader,c_text) - baseline = new_glue(0) - baseline.leader = leader - baseline.subtype = cleaders_code - baseline.spec.stretch = 65536 - baseline.spec.stretch_order = 2 - setlisttransparency(baseline,c_text) - b_cache.baseline = baseline - end - baseline = copy_list(baseline) - baseline = fast_hpack(baseline,wd-2*linewidth) - -- or new hpack node, set head and also: - -- baseline.width = wd - -- baseline.glue_set = wd/65536 - -- baseline.glue_order = 2 - -- baseline.glue_sign = 1 - baseskip = new_kern(-wd+linewidth) - else - baseline = new_rule(wd-2*linewidth,linewidth,0) - baseskip = new_kern(-wd+2*linewidth) - end - end - local this - if not simple then - this = b_cache[what] - if not this then - local text = fast_hpack_string(what,usedfont) - this = concat_nodes { - new_kern(-text.width), - text, - } - setlisttransparency(this,c_text) - this = fast_hpack(this) - this.width = 0 - this.height = 0 - this.depth = 0 - b_cache[what] = this - end - end - local info = concat_nodes { - this and copy_list(this) or nil, -- this also triggers the right mode (else sometimes no whatits) - new_rule(linewidth,ht,dp), - new_rule(wd-2*linewidth,-dp+linewidth,dp), - new_rule(linewidth,ht,dp), - new_kern(-wd+linewidth), - new_rule(wd-2*linewidth,ht,-ht+linewidth), - baseskip, - baseline, - } - setlisttransparency(info,c_text) - info = fast_hpack(info) - info.width = 0 - info.height = 0 - info.depth = 0 - info[a_layer] = layer - local info = concat_nodes { - current, - new_kern(-wd), - info, - } - info = fast_hpack(info,wd) - if vertical then - info = vpack_nodes(info) - end - if next then - info.next = next - next.prev = info - end - if prev then -if prev.id == gluespec_code then - -- weird, how can this happen, an inline glue-spec -else - info.prev = prev - prev.next = info -end - end - if head == current then - return info, info - else - return head, info - end - else - return head, current - end -end - -local function ruledglyph(head,current) - local wd = current.width - if wd ~= 0 then - local ht, dp = current.height, current.depth - local next, prev = current.next, current.prev - current.next, current.prev = nil, nil - local linewidth = emwidth/20 - local baseline - if dp ~= 0 and ht ~= 0 then - baseline = new_rule(wd-2*linewidth,linewidth,0) - end - local doublelinewidth = 2*linewidth - local info = concat_nodes { - new_rule(linewidth,ht,dp), - new_rule(wd-doublelinewidth,-dp+linewidth,dp), - new_rule(linewidth,ht,dp), - new_kern(-wd+linewidth), - new_rule(wd-doublelinewidth,ht,-ht+linewidth), - new_kern(-wd+doublelinewidth), - baseline, - } - setlistcolor(info,c_glyph) - setlisttransparency(info,c_glyph_d) - info = fast_hpack(info) - info.width = 0 - info.height = 0 - info.depth = 0 - info[a_layer] = l_glyph - local info = concat_nodes { - current, - new_kern(-wd), - info, - } - info = fast_hpack(info) - info.width = wd - if next then - info.next = next - next.prev = info - end - if prev then - info.prev = prev - prev.next = info - end - if head == current then - return info, info - else - return head, info - end - else - return head, current - end -end - -local g_cache = { } - -local tags = { - -- userskip = "US", - lineskip = "LS", - baselineskip = "BS", - parskip = "PS", - abovedisplayskip = "DA", - belowdisplayskip = "DB", - abovedisplayshortskip = "SA", - belowdisplayshortskip = "SB", - leftskip = "LS", - rightskip = "RS", - topskip = "TS", - splittopskip = "ST", - tabskip = "AS", - spaceskip = "SS", - xspaceskip = "XS", - parfillskip = "PF", - thinmuskip = "MS", - medmuskip = "MM", - thickmuskip = "ML", - leaders = "NL", - cleaders = "CL", - xleaders = "XL", - gleaders = "GL", - -- true = "VS", - -- false = "HS", -} - -local function ruledglue(head,current,vertical) - local spec = current.spec - local width = spec.width - local subtype = current.subtype - local amount = formatters["%s:%0.3f"](tags[subtype] or (vertical and "VS") or "HS",width*pt_factor) - local info = g_cache[amount] - if info then - -- print("glue hit") - else - if subtype == space_code or subtype == xspace_code then -- not yet all space - info = sometext(amount,l_glue,c_space) - elseif subtype == leftskip_code or subtype == rightskip_code then - info = sometext(amount,l_glue,c_skip_a) - elseif subtype == userskip_code then - if width > 0 then - info = sometext(amount,l_glue,c_positive) - elseif width < 0 then - info = sometext(amount,l_glue,c_negative) - else - info = sometext(amount,l_glue,c_zero) - end - else - info = sometext(amount,l_glue,c_skip_b) - end - g_cache[amount] = info - end - info = copy_list(info) - if vertical then - info = vpack_nodes(info) - end - head, current = insert_node_before(head,current,info) - return head, current.next -end - -local k_cache = { } - -local function ruledkern(head,current,vertical) - local kern = current.kern - local info = k_cache[kern] - if info then - -- print("kern hit") - else - local amount = formatters["%s:%0.3f"](vertical and "VK" or "HK",kern*pt_factor) - if kern > 0 then - info = sometext(amount,l_kern,c_positive) - elseif kern < 0 then - info = sometext(amount,l_kern,c_negative) - else - info = sometext(amount,l_kern,c_zero) - end - k_cache[kern] = info - end - info = copy_list(info) - if vertical then - info = vpack_nodes(info) - end - head, current = insert_node_before(head,current,info) - return head, current.next -end - -local p_cache = { } - -local function ruledpenalty(head,current,vertical) - local penalty = current.penalty - local info = p_cache[penalty] - if info then - -- print("penalty hit") - else - local amount = formatters["%s:%s"](vertical and "VP" or "HP",penalty) - if penalty > 0 then - info = sometext(amount,l_penalty,c_positive) - elseif penalty < 0 then - info = sometext(amount,l_penalty,c_negative) - else - info = sometext(amount,l_penalty,c_zero) - end - p_cache[penalty] = info - end - info = copy_list(info) - if vertical then - info = vpack_nodes(info) - end - head, current = insert_node_before(head,current,info) - return head, current.next -end - -local function visualize(head,vertical) - local trace_hbox = false - local trace_vbox = false - local trace_vtop = false - local trace_kern = false - local trace_glue = false - local trace_penalty = false - local trace_fontkern = false - local trace_strut = false - local trace_whatsit = false - local trace_glyph = false - local trace_simple = false - local trace_user = false - local current = head - local prev_trace_fontkern = nil - local attr = unsetvalue - while current do - local id = current.id - local a = current[a_visual] or unsetvalue - if a ~= attr then - prev_trace_fontkern = trace_fontkern - if a == unsetvalue then - trace_hbox = false - trace_vbox = false - trace_vtop = false - trace_kern = false - trace_glue = false - trace_penalty = false - trace_fontkern = false - trace_strut = false - trace_whatsit = false - trace_glyph = false - trace_simple = false - trace_user = false - else -- dead slow: - trace_hbox = hasbit(a, 1) - trace_vbox = hasbit(a, 2) - trace_vtop = hasbit(a, 4) - trace_kern = hasbit(a, 8) - trace_glue = hasbit(a, 16) - trace_penalty = hasbit(a, 32) - trace_fontkern = hasbit(a, 64) - trace_strut = hasbit(a, 128) - trace_whatsit = hasbit(a, 256) - trace_glyph = hasbit(a, 512) - trace_simple = hasbit(a,1024) - trace_user = hasbit(a,2048) - end - attr = a - end - if trace_strut then - current[a_layer] = l_strut - elseif id == glyph_code then - if trace_glyph then - head, current = ruledglyph(head,current) - end - elseif id == disc_code then - if trace_glyph then - local pre = current.pre - if pre then - current.pre = ruledglyph(pre,pre) - end - local post = current.post - if post then - current.post = ruledglyph(post,post) - end - local replace = current.replace - if replace then - current.replace = ruledglyph(replace,replace) - end - end - elseif id == kern_code then - local subtype = current.subtype - -- tricky ... we don't copy the trace attribute in node-inj (yet) - if subtype == font_kern_code or current[a_fontkern] then - if trace_fontkern or prev_trace_fontkern then - head, current = fontkern(head,current) - end - elseif subtype == user_kern_code then - if trace_kern then - head, current = ruledkern(head,current,vertical) - end - end - elseif id == glue_code then - local content = current.leader - if content then - current.leader = visualize(content,false) - elseif trace_glue then - head, current = ruledglue(head,current,vertical) - end - elseif id == penalty_code then - if trace_penalty then - head, current = ruledpenalty(head,current,vertical) - end - elseif id == disc_code then - current.pre = visualize(current.pre) - current.post = visualize(current.post) - current.replace = visualize(current.replace) - elseif id == hlist_code then - local content = current.list - if content then - current.list = visualize(content,false) - end - if trace_hbox then - head, current = ruledbox(head,current,false,l_hbox,"H__",trace_simple) - end - elseif id == vlist_code then - local content = current.list - if content then - current.list = visualize(content,true) - end - if trace_vtop then - head, current = ruledbox(head,current,true,l_vtop,"_T_",trace_simple) - elseif trace_vbox then - head, current = ruledbox(head,current,true,l_vbox,"__V",trace_simple) - end - elseif id == whatsit_code then - if trace_whatsit then - head, current = whatsit(head,current) - end - elseif id == user_code then - if trace_whatsit then - head, current = user(head,current) - end - end - current = current.next - end - return head -end - -local function freed(cache) - local n = 0 - for k, v in next, cache do - free_node_list(v) - n = n + 1 - end - if n == 0 then - return 0, cache - else - return n, { } - end -end - -local function cleanup() - local hf, ng, np, nk, nw - nf, f_cache = freed(f_cache) - ng, g_cache = freed(g_cache) - np, p_cache = freed(p_cache) - nk, k_cache = freed(k_cache) - nw, w_cache = freed(w_cache) - nb, b_cache = freed(b_cache) - -- report_visualize("cache: %s fontkerns, %s skips, %s penalties, %s kerns, %s whatsits, %s boxes",nf,ng,np,nk,nw,nb) -end - -function visualizers.handler(head) - if usedfont then - starttiming(visualizers) - -- local l = tex_attribute[a_layer] - -- local v = tex_attribute[a_visual] - -- tex_attribute[a_layer] = unsetvalue - -- tex_attribute[a_visual] = unsetvalue - head = visualize(head) - -- tex_attribute[a_layer] = l - -- tex_attribute[a_visual] = v - -- -- cleanup() - stoptiming(visualizers) - end - return head, false -end - -function visualizers.box(n) - tex_box[n].list = visualizers.handler(tex_box[n].list) -end - -local last = nil -local used = nil - -local mark = { - "trace:1", "trace:2", "trace:3", - "trace:4", "trace:5", "trace:6", - "trace:7", -} - -local function markfonts(list) - for n in traverse_nodes(list) do - local id = n.id - if id == glyph_code then - local font = n.font - local okay = used[font] - if not okay then - last = last + 1 - okay = mark[last] - used[font] = okay - end - setcolor(n,okay) - elseif id == hlist_code or id == vlist_code then - markfonts(n.list) - end - end -end - -function visualizers.markfonts(list) - last, used = 0, { } - markfonts(type(n) == "number" and tex_box[n].list or n) -end - -function commands.markfonts(n) - visualizers.markfonts(n) -end - -statistics.register("visualization time",function() - if enabled then - cleanup() -- in case we don't don't do it each time - return format("%s seconds",statistics.elapsedtime(visualizers)) - end -end) +if not modules then modules = { } end modules ['trac-vis'] = { + version = 1.001, + comment = "companion to trac-vis.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local string, number, table = string, number, table +local node, nodes, attributes, fonts, tex = node, nodes, attributes, fonts, tex +local type = type +local format = string.format +local formatters = string.formatters + +-- This module started out in the early days of mkiv and luatex with +-- visualizing kerns related to fonts. In the process of cleaning up the +-- visual debugger code it made sense to integrate some other code that +-- I had laying around and replace the old supp-vis debugging code. As +-- only a subset of the old visual debugger makes sense it has become a +-- different implementation. Soms of the m-visual functionality will also +-- be ported. The code is rather trivial. The caching is not really needed +-- but saves upto 50% of the time needed to add visualization. Of course +-- the overall runtime is larger because of color and layer processing in +-- the backend (can be times as much) so the runtime is somewhat larger +-- with full visualization enabled. In practice this will never happen +-- unless one is demoing. + +-- We could use pdf literals and re stream codes but it's not worth the +-- trouble because we would end up in color etc mess. Maybe one day I'll +-- make a nodeinjection variant. + +-- todo: global switch (so no attributes) +-- todo: maybe also xoffset, yoffset of glyph +-- todo: inline concat (more efficient) + +local nodecodes = nodes.nodecodes +local disc_code = nodecodes.disc +local kern_code = nodecodes.kern +local glyph_code = nodecodes.glyph +local disc_code = nodecodes.disc +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local glue_code = nodecodes.glue +local penalty_code = nodecodes.penalty +local whatsit_code = nodecodes.whatsit +local user_code = nodecodes.user +local gluespec_code = nodecodes.gluespec + +local kerncodes = nodes.kerncodes +local font_kern_code = kerncodes.fontkern +local user_kern_code = kerncodes.userkern + +local gluecodes = nodes.gluecodes +local cleaders_code = gluecodes.cleaders +local userskip_code = gluecodes.userskip +local space_code = gluecodes.space +local xspace_code = gluecodes.xspace +local leftskip_code = gluecodes.leftskip +local rightskip_code = gluecodes.rightskip + +local whatsitcodes = nodes.whatsitcodes + +local concat_nodes = nodes.concat +local hpack_nodes = node.hpack +local vpack_nodes = node.vpack +local hpack_string = typesetters.hpack +local fast_hpack_string = typesetters.fast_hpack +local copy_node = node.copy +local copy_list = node.copy_list +local free_node = node.free +local free_node_list = node.flush_list +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after +local fast_hpack = nodes.fasthpack +local traverse_nodes = node.traverse + +local tex_attribute = tex.attribute +local tex_box = tex.box +local unsetvalue = attributes.unsetvalue + +local current_font = font.current + +local exheights = fonts.hashes.exheights +local emwidths = fonts.hashes.emwidths +local pt_factor = number.dimenfactors.pt + +local nodepool = nodes.pool +local new_rule = nodepool.rule +local new_kern = nodepool.kern +local new_glue = nodepool.glue +local new_penalty = nodepool.penalty + +local tracers = nodes.tracers +local visualizers = nodes.visualizers + +local setcolor = tracers.colors.set +local setlistcolor = tracers.colors.setlist +local settransparency = tracers.transparencies.set +local setlisttransparency = tracers.transparencies.setlist + +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming + +local a_visual = attributes.private("visual") +local a_fontkern = attributes.private("fontkern") +local a_layer = attributes.private("viewerlayer") + +local hasbit = number.hasbit +local bit = number.bit +local setbit = number.setbit +local clearbit = number.clearbit + +local trace_hbox +local trace_vbox +local trace_vtop +local trace_kern +local trace_glue +local trace_penalty +local trace_fontkern +local trace_strut +local trace_whatsit +local trace_user + +local report_visualize = logs.reporter("visualize") + +local modes = { + hbox = 1, + vbox = 2, + vtop = 4, + kern = 8, + glue = 16, + penalty = 32, + fontkern = 64, + strut = 128, + whatsit = 256, + glyph = 512, + simple = 1024, + simplehbox = 1024 + 1, + simplevbox = 1024 + 2, + simplevtop = 1024 + 4, + user = 2048, +} + +local modes_makeup = { "hbox", "vbox", "kern", "glue", "penalty" } +local modes_boxes = { "hbox", "vbox" } +local modes_all = { "hbox", "vbox", "kern", "glue", "penalty", "fontkern", "whatsit", "glyph", "user" } + +local usedfont, exheight, emwidth +local l_penalty, l_glue, l_kern, l_fontkern, l_hbox, l_vbox, l_vtop, l_strut, l_whatsit, l_glyph, l_user + +local enabled = false +local layers = { } + +local preset_boxes = modes.hbox + modes.vbox +local preset_makeup = preset_boxes + modes.kern + modes.glue + modes.penalty +local preset_all = preset_makeup + modes.fontkern + modes.whatsit + modes.glyph + modes.user + +function visualizers.setfont(id) + usedfont = id or current_font() + exheight = exheights[usedfont] + emwidth = emwidths[usedfont] +end + +-- we can preset a bunch of bits + +local function enable() + if not usedfont then + -- we use a narrow monospaced font + visualizers.setfont(fonts.definers.define { name = "lmmonoltcond10regular", size = tex.sp("4pt") }) + end + for mode, value in next, modes do + local tag = formatters["v_%s"](mode) + attributes.viewerlayers.define { + tag = tag, + title = formatters["visualizer %s"](mode), + visible = "start", + editable = "yes", + printable = "yes" + } + layers[mode] = attributes.viewerlayers.register(tag,true) + end + l_hbox = layers.hbox + l_vbox = layers.vbox + l_vtop = layers.vtop + l_glue = layers.glue + l_kern = layers.kern + l_penalty = layers.penalty + l_fontkern = layers.fontkern + l_strut = layers.strut + l_whatsit = layers.whatsit + l_glyph = layers.glyph + l_user = layers.user + nodes.tasks.enableaction("shipouts","nodes.visualizers.handler") + report_visualize("enabled") + enabled = true + tex.setcount("global","c_syst_visualizers_state",1) -- so that we can optimize at the tex end +end + +local function setvisual(n,a,what) -- this will become more efficient when we have the bit lib linked in + if not n or n == "reset" then + return unsetvalue + elseif n == "makeup" then + if not a or a == 0 or a == unsetvalue then + a = preset_makeup + else + a = setbit(a,preset_makeup) + -- for i=1,#modes_makeup do + -- a = setvisual(modes_makeup[i],a) + -- end + end + elseif n == "boxes" then + if not a or a == 0 or a == unsetvalue then + a = preset_boxes + else + a = setbit(a,preset_boxes) + -- for i=1,#modes_boxes do + -- a = setvisual(modes_boxes[i],a) + -- end + end + elseif n == "all" then + if what == false then + return unsetvalue + elseif not a or a == 0 or a == unsetvalue then + a = preset_all + else + a = setbit(a,preset_all) + -- for i=1,#modes_all do + -- a = setvisual(modes_all[i],a) + -- end + end + else + local m = modes[n] + if not m then + -- go on + elseif a == unsetvalue then + if what == false then + return unsetvalue + else + -- a = setbit(0,m) + a = m + end + elseif what == false then + a = clearbit(a,m) + elseif not a or a == 0 then + a = m + else + a = setbit(a,m) + end + end + if not a or a == 0 or a == unsetvalue then + return unsetvalue + elseif not enabled then -- must happen at runtime (as we don't store layers yet) + enable() + end + return a +end + +function visualizers.setvisual(n) + tex_attribute[a_visual] = setvisual(n,tex_attribute[a_visual]) +end + +function visualizers.setlayer(n) + tex_attribute[a_layer] = layers[n] or unsetvalue +end + +commands.setvisual = visualizers.setvisual +commands.setlayer = visualizers.setlayer + +function commands.visual(n) + context(setvisual(n)) +end + +local function set(mode,v) + tex_attribute[a_visual] = setvisual(mode,tex_attribute[a_visual],v) +end + +for mode, value in next, modes do + trackers.register(formatters["visualizers.%s"](mode), function(v) set(mode,v) end) +end + +trackers.register("visualizers.reset", function(v) set("reset", v) end) +trackers.register("visualizers.all", function(v) set("all", v) end) +trackers.register("visualizers.makeup",function(v) set("makeup",v) end) +trackers.register("visualizers.boxes", function(v) set("boxes", v) end) + +local c_positive = "trace:b" +local c_negative = "trace:r" +local c_zero = "trace:g" +local c_text = "trace:s" +local c_space = "trace:y" +local c_skip_a = "trace:c" +local c_skip_b = "trace:m" +local c_glyph = "trace:o" +local c_white = "trace:w" + +local c_positive_d = "trace:db" +local c_negative_d = "trace:dr" +local c_zero_d = "trace:dg" +local c_text_d = "trace:ds" +local c_space_d = "trace:dy" +local c_skip_a_d = "trace:dc" +local c_skip_b_d = "trace:dm" +local c_glyph_d = "trace:do" +local c_white_d = "trace:dw" + +local function sometext(str,layer,color,textcolor) -- we can just paste verbatim together .. no typesteting needed + local text = fast_hpack_string(str,usedfont) + local size = text.width + local rule = new_rule(size,2*exheight,exheight/2) + local kern = new_kern(-size) + if color then + setcolor(rule,color) + end + if textcolor then + setlistcolor(text.list,textcolor) + end + local info = concat_nodes { + rule, + kern, + text, + } + setlisttransparency(info,c_zero) + info = fast_hpack(info) + if layer then + info[a_layer] = layer + end + local width = info.width + info.width = 0 + info.height = 0 + info.depth = 0 + return info, width +end + +local f_cache = { } + +local function fontkern(head,current) + local kern = current.kern + local info = f_cache[kern] + if info then + -- print("hit fontkern") + else + local text = fast_hpack_string(formatters[" %0.3f"](kern*pt_factor),usedfont) + local rule = new_rule(emwidth/10,6*exheight,2*exheight) + local list = text.list + if kern > 0 then + setlistcolor(list,c_positive_d) + elseif kern < 0 then + setlistcolor(list,c_negative_d) + else + setlistcolor(list,c_zero_d) + end + setlisttransparency(list,c_text_d) + settransparency(rule,c_text_d) + text.shift = -5 * exheight + info = concat_nodes { + rule, + text, + } + info = fast_hpack(info) + info[a_layer] = l_fontkern + info.width = 0 + info.height = 0 + info.depth = 0 + f_cache[kern] = info + end + head = insert_node_before(head,current,copy_list(info)) + return head, current +end + +local w_cache = { } + +local tags = { + open = "FIC", + write = "FIW", + close = "FIC", + special = "SPE", + localpar = "PAR", + dir = "DIR", + pdfliteral = "PDF", + pdfrefobj = "PDF", + pdfrefxform = "PDF", + pdfrefximage = "PDF", + pdfannot = "PDF", + pdfstartlink = "PDF", + pdfendlink = "PDF", + pdfdest = "PDF", + pdfthread = "PDF", + pdfstartthread = "PDF", + pdfendthread = "PDF", + pdfsavepos = "PDF", + pdfthreaddata = "PDF", + pdflinkdata = "PDF", + pdfcolorstack = "PDF", + pdfsetmatrix = "PDF", + pdfsave = "PDF", + pdfrestore = "PDF", + latelua = "LUA", + closelua = "LUA", + cancelboundary = "CBD", + userdefined = "USR", +} + +local function whatsit(head,current) + local what = current.subtype + local info = w_cache[what] + if info then + -- print("hit whatsit") + else + local tag = whatsitcodes[what] + -- maybe different text colors per tag + info = sometext(formatters["W:%s"](tag and tags[tag] or what),usedfont,nil,c_white) + info[a_layer] = l_whatsit + w_cache[what] = info + end + head, current = insert_node_after(head,current,copy_list(info)) + return head, current +end + +local function user(head,current) + local what = current.subtype + local info = w_cache[what] + if info then + -- print("hit user") + else + info = sometext(formatters["U:%s"](what),usedfont) + info[a_layer] = l_user + w_cache[what] = info + end + head, current = insert_node_after(head,current,copy_list(info)) + return head, current +end + +local b_cache = { } + +local function ruledbox(head,current,vertical,layer,what,simple) + local wd = current.width + if wd ~= 0 then + local ht, dp = current.height, current.depth + local next, prev = current.next, current.prev + current.next, current.prev = nil, nil + local linewidth = emwidth/10 + local baseline, baseskip + if dp ~= 0 and ht ~= 0 then + if wd > 20*linewidth then + baseline = b_cache.baseline + if not baseline then + -- due to an optimized leader color/transparency we need to set the glue node in order + -- to trigger this mechanism + local leader = concat_nodes { + new_glue(2*linewidth), -- 2.5 + new_rule(6*linewidth,linewidth,0), -- 5.0 + new_glue(2*linewidth), -- 2.5 + } + -- setlisttransparency(leader,c_text) + leader = fast_hpack(leader) + -- setlisttransparency(leader,c_text) + baseline = new_glue(0) + baseline.leader = leader + baseline.subtype = cleaders_code + baseline.spec.stretch = 65536 + baseline.spec.stretch_order = 2 + setlisttransparency(baseline,c_text) + b_cache.baseline = baseline + end + baseline = copy_list(baseline) + baseline = fast_hpack(baseline,wd-2*linewidth) + -- or new hpack node, set head and also: + -- baseline.width = wd + -- baseline.glue_set = wd/65536 + -- baseline.glue_order = 2 + -- baseline.glue_sign = 1 + baseskip = new_kern(-wd+linewidth) + else + baseline = new_rule(wd-2*linewidth,linewidth,0) + baseskip = new_kern(-wd+2*linewidth) + end + end + local this + if not simple then + this = b_cache[what] + if not this then + local text = fast_hpack_string(what,usedfont) + this = concat_nodes { + new_kern(-text.width), + text, + } + setlisttransparency(this,c_text) + this = fast_hpack(this) + this.width = 0 + this.height = 0 + this.depth = 0 + b_cache[what] = this + end + end + local info = concat_nodes { + this and copy_list(this) or nil, -- this also triggers the right mode (else sometimes no whatits) + new_rule(linewidth,ht,dp), + new_rule(wd-2*linewidth,-dp+linewidth,dp), + new_rule(linewidth,ht,dp), + new_kern(-wd+linewidth), + new_rule(wd-2*linewidth,ht,-ht+linewidth), + baseskip, + baseline, + } + setlisttransparency(info,c_text) + info = fast_hpack(info) + info.width = 0 + info.height = 0 + info.depth = 0 + info[a_layer] = layer + local info = concat_nodes { + current, + new_kern(-wd), + info, + } + info = fast_hpack(info,wd) + if vertical then + info = vpack_nodes(info) + end + if next then + info.next = next + next.prev = info + end + if prev then +if prev.id == gluespec_code then + -- weird, how can this happen, an inline glue-spec +else + info.prev = prev + prev.next = info +end + end + if head == current then + return info, info + else + return head, info + end + else + return head, current + end +end + +local function ruledglyph(head,current) + local wd = current.width + if wd ~= 0 then + local ht, dp = current.height, current.depth + local next, prev = current.next, current.prev + current.next, current.prev = nil, nil + local linewidth = emwidth/20 + local baseline + if dp ~= 0 and ht ~= 0 then + baseline = new_rule(wd-2*linewidth,linewidth,0) + end + local doublelinewidth = 2*linewidth + local info = concat_nodes { + new_rule(linewidth,ht,dp), + new_rule(wd-doublelinewidth,-dp+linewidth,dp), + new_rule(linewidth,ht,dp), + new_kern(-wd+linewidth), + new_rule(wd-doublelinewidth,ht,-ht+linewidth), + new_kern(-wd+doublelinewidth), + baseline, + } + setlistcolor(info,c_glyph) + setlisttransparency(info,c_glyph_d) + info = fast_hpack(info) + info.width = 0 + info.height = 0 + info.depth = 0 + info[a_layer] = l_glyph + local info = concat_nodes { + current, + new_kern(-wd), + info, + } + info = fast_hpack(info) + info.width = wd + if next then + info.next = next + next.prev = info + end + if prev then + info.prev = prev + prev.next = info + end + if head == current then + return info, info + else + return head, info + end + else + return head, current + end +end + +local g_cache = { } + +local tags = { + -- userskip = "US", + lineskip = "LS", + baselineskip = "BS", + parskip = "PS", + abovedisplayskip = "DA", + belowdisplayskip = "DB", + abovedisplayshortskip = "SA", + belowdisplayshortskip = "SB", + leftskip = "LS", + rightskip = "RS", + topskip = "TS", + splittopskip = "ST", + tabskip = "AS", + spaceskip = "SS", + xspaceskip = "XS", + parfillskip = "PF", + thinmuskip = "MS", + medmuskip = "MM", + thickmuskip = "ML", + leaders = "NL", + cleaders = "CL", + xleaders = "XL", + gleaders = "GL", + -- true = "VS", + -- false = "HS", +} + +local function ruledglue(head,current,vertical) + local spec = current.spec + local width = spec.width + local subtype = current.subtype + local amount = formatters["%s:%0.3f"](tags[subtype] or (vertical and "VS") or "HS",width*pt_factor) + local info = g_cache[amount] + if info then + -- print("glue hit") + else + if subtype == space_code or subtype == xspace_code then -- not yet all space + info = sometext(amount,l_glue,c_space) + elseif subtype == leftskip_code or subtype == rightskip_code then + info = sometext(amount,l_glue,c_skip_a) + elseif subtype == userskip_code then + if width > 0 then + info = sometext(amount,l_glue,c_positive) + elseif width < 0 then + info = sometext(amount,l_glue,c_negative) + else + info = sometext(amount,l_glue,c_zero) + end + else + info = sometext(amount,l_glue,c_skip_b) + end + g_cache[amount] = info + end + info = copy_list(info) + if vertical then + info = vpack_nodes(info) + end + head, current = insert_node_before(head,current,info) + return head, current.next +end + +local k_cache = { } + +local function ruledkern(head,current,vertical) + local kern = current.kern + local info = k_cache[kern] + if info then + -- print("kern hit") + else + local amount = formatters["%s:%0.3f"](vertical and "VK" or "HK",kern*pt_factor) + if kern > 0 then + info = sometext(amount,l_kern,c_positive) + elseif kern < 0 then + info = sometext(amount,l_kern,c_negative) + else + info = sometext(amount,l_kern,c_zero) + end + k_cache[kern] = info + end + info = copy_list(info) + if vertical then + info = vpack_nodes(info) + end + head, current = insert_node_before(head,current,info) + return head, current.next +end + +local p_cache = { } + +local function ruledpenalty(head,current,vertical) + local penalty = current.penalty + local info = p_cache[penalty] + if info then + -- print("penalty hit") + else + local amount = formatters["%s:%s"](vertical and "VP" or "HP",penalty) + if penalty > 0 then + info = sometext(amount,l_penalty,c_positive) + elseif penalty < 0 then + info = sometext(amount,l_penalty,c_negative) + else + info = sometext(amount,l_penalty,c_zero) + end + p_cache[penalty] = info + end + info = copy_list(info) + if vertical then + info = vpack_nodes(info) + end + head, current = insert_node_before(head,current,info) + return head, current.next +end + +local function visualize(head,vertical) + local trace_hbox = false + local trace_vbox = false + local trace_vtop = false + local trace_kern = false + local trace_glue = false + local trace_penalty = false + local trace_fontkern = false + local trace_strut = false + local trace_whatsit = false + local trace_glyph = false + local trace_simple = false + local trace_user = false + local current = head + local prev_trace_fontkern = nil + local attr = unsetvalue + while current do + local id = current.id + local a = current[a_visual] or unsetvalue + if a ~= attr then + prev_trace_fontkern = trace_fontkern + if a == unsetvalue then + trace_hbox = false + trace_vbox = false + trace_vtop = false + trace_kern = false + trace_glue = false + trace_penalty = false + trace_fontkern = false + trace_strut = false + trace_whatsit = false + trace_glyph = false + trace_simple = false + trace_user = false + else -- dead slow: + trace_hbox = hasbit(a, 1) + trace_vbox = hasbit(a, 2) + trace_vtop = hasbit(a, 4) + trace_kern = hasbit(a, 8) + trace_glue = hasbit(a, 16) + trace_penalty = hasbit(a, 32) + trace_fontkern = hasbit(a, 64) + trace_strut = hasbit(a, 128) + trace_whatsit = hasbit(a, 256) + trace_glyph = hasbit(a, 512) + trace_simple = hasbit(a,1024) + trace_user = hasbit(a,2048) + end + attr = a + end + if trace_strut then + current[a_layer] = l_strut + elseif id == glyph_code then + if trace_glyph then + head, current = ruledglyph(head,current) + end + elseif id == disc_code then + if trace_glyph then + local pre = current.pre + if pre then + current.pre = ruledglyph(pre,pre) + end + local post = current.post + if post then + current.post = ruledglyph(post,post) + end + local replace = current.replace + if replace then + current.replace = ruledglyph(replace,replace) + end + end + elseif id == kern_code then + local subtype = current.subtype + -- tricky ... we don't copy the trace attribute in node-inj (yet) + if subtype == font_kern_code or current[a_fontkern] then + if trace_fontkern or prev_trace_fontkern then + head, current = fontkern(head,current) + end + elseif subtype == user_kern_code then + if trace_kern then + head, current = ruledkern(head,current,vertical) + end + end + elseif id == glue_code then + local content = current.leader + if content then + current.leader = visualize(content,false) + elseif trace_glue then + head, current = ruledglue(head,current,vertical) + end + elseif id == penalty_code then + if trace_penalty then + head, current = ruledpenalty(head,current,vertical) + end + elseif id == disc_code then + current.pre = visualize(current.pre) + current.post = visualize(current.post) + current.replace = visualize(current.replace) + elseif id == hlist_code then + local content = current.list + if content then + current.list = visualize(content,false) + end + if trace_hbox then + head, current = ruledbox(head,current,false,l_hbox,"H__",trace_simple) + end + elseif id == vlist_code then + local content = current.list + if content then + current.list = visualize(content,true) + end + if trace_vtop then + head, current = ruledbox(head,current,true,l_vtop,"_T_",trace_simple) + elseif trace_vbox then + head, current = ruledbox(head,current,true,l_vbox,"__V",trace_simple) + end + elseif id == whatsit_code then + if trace_whatsit then + head, current = whatsit(head,current) + end + elseif id == user_code then + if trace_whatsit then + head, current = user(head,current) + end + end + current = current.next + end + return head +end + +local function freed(cache) + local n = 0 + for k, v in next, cache do + free_node_list(v) + n = n + 1 + end + if n == 0 then + return 0, cache + else + return n, { } + end +end + +local function cleanup() + local hf, ng, np, nk, nw + nf, f_cache = freed(f_cache) + ng, g_cache = freed(g_cache) + np, p_cache = freed(p_cache) + nk, k_cache = freed(k_cache) + nw, w_cache = freed(w_cache) + nb, b_cache = freed(b_cache) + -- report_visualize("cache: %s fontkerns, %s skips, %s penalties, %s kerns, %s whatsits, %s boxes",nf,ng,np,nk,nw,nb) +end + +function visualizers.handler(head) + if usedfont then + starttiming(visualizers) + -- local l = tex_attribute[a_layer] + -- local v = tex_attribute[a_visual] + -- tex_attribute[a_layer] = unsetvalue + -- tex_attribute[a_visual] = unsetvalue + head = visualize(head) + -- tex_attribute[a_layer] = l + -- tex_attribute[a_visual] = v + -- -- cleanup() + stoptiming(visualizers) + end + return head, false +end + +function visualizers.box(n) + tex_box[n].list = visualizers.handler(tex_box[n].list) +end + +local last = nil +local used = nil + +local mark = { + "trace:1", "trace:2", "trace:3", + "trace:4", "trace:5", "trace:6", + "trace:7", +} + +local function markfonts(list) + for n in traverse_nodes(list) do + local id = n.id + if id == glyph_code then + local font = n.font + local okay = used[font] + if not okay then + last = last + 1 + okay = mark[last] + used[font] = okay + end + setcolor(n,okay) + elseif id == hlist_code or id == vlist_code then + markfonts(n.list) + end + end +end + +function visualizers.markfonts(list) + last, used = 0, { } + markfonts(type(n) == "number" and tex_box[n].list or n) +end + +function commands.markfonts(n) + visualizers.markfonts(n) +end + +statistics.register("visualization time",function() + if enabled then + cleanup() -- in case we don't don't do it each time + return format("%s seconds",statistics.elapsedtime(visualizers)) + end +end) diff --git a/tex/context/base/trac-xml.lua b/tex/context/base/trac-xml.lua index cd8b8c0a5..aba82ef52 100644 --- a/tex/context/base/trac-xml.lua +++ b/tex/context/base/trac-xml.lua @@ -1,183 +1,183 @@ -if not modules then modules = { } end modules ['trac-xml'] = { - version = 1.001, - comment = "companion to trac-log.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- Application helpinfo can be defined in several ways: --- --- helpinfo = "big blob of help" --- --- helpinfo = { basic = "blob of basic help", extra = "blob of extra help" } --- --- helpinfo = "..." --- --- helpinfo = "somefile.xml" --- --- In the case of an xml file, the file should be either present on the same path --- as the script, or we should be be able to locate it using the resolver. - -local formatters = string.formatters -local reporters = logs.reporters -local xmlserialize = xml.serialize -local xmlcollected = xml.collected -local xmltext = xml.text -local xmlfirst = xml.first - --- there is no need for a newhandlers { name = "help", parent = "string" } - -local function showhelp(specification,...) - local root = xml.convert(specification.helpinfo or "") - if not root then - return - end - local xs = xml.gethandlers("string") - xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end) - xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end) - local wantedcategories = select("#",...) == 0 and true or table.tohash { ... } - local nofcategories = xml.count(root,"/application/flags/category") - local report = specification.report - for category in xmlcollected(root,"/application/flags/category") do - local categoryname = category.at.name or "" - if wantedcategories == true or wantedcategories[categoryname] then - if nofcategories > 1 then - report("%s options:",categoryname) - report() - end - for subcategory in xmlcollected(category,"/subcategory") do - for flag in xmlcollected(subcategory,"/flag") do - local name = flag.at.name - local value = flag.at.value - -- local short = xmlfirst(s,"/short") - -- local short = xmlserialize(short,xs) - local short = xmltext(xmlfirst(flag,"/short")) - if value then - report("--%-20s %s",formatters["%s=%s"](name,value),short) - else - report("--%-20s %s",name,short) - end - end - report() - end - end - end - for category in xmlcollected(root,"/application/examples/category") do - local title = xmltext(xmlfirst(category,"/title")) - if title and title ~= "" then - report() - report(title) - report() - end - for subcategory in xmlcollected(category,"/subcategory") do - for example in xmlcollected(subcategory,"/example") do - local command = xmltext(xmlfirst(example,"/command")) - local comment = xmltext(xmlfirst(example,"/comment")) - report(command) - end - report() - end - end - for comment in xmlcollected(root,"/application/comments/comment") do - local comment = xmltext(comment) - report() - report(comment) - report() - end -end - -local reporthelp = reporters.help -local exporthelp = reporters.export - -local function xmlfound(t) - local helpinfo = t.helpinfo - if type(helpinfo) == "table" then - return false - end - if type(helpinfo) ~= "string" then - helpinfo = "Warning: no helpinfo found." - t.helpinfo = helpinfo - return false - end - if string.find(helpinfo,".xml$") then - local ownscript = environment.ownscript - local helpdata = false - if ownscript then - local helpfile = file.join(file.pathpart(ownscript),helpinfo) - helpdata = io.loaddata(helpfile) - if helpdata == "" then - helpdata = false - end - end - if not helpdata then - local helpfile = resolvers.findfile(helpinfo,"tex") - helpdata = helpfile and io.loaddata(helpfile) - end - if helpdata and helpdata ~= "" then - helpinfo = helpdata - else - helpinfo = formatters["Warning: help file %a is not found."](helpinfo) - end - end - t.helpinfo = helpinfo - return string.find(t.helpinfo,"^<%?xml") and true or false -end - -function reporters.help(t,...) - if xmlfound(t) then - showhelp(t,...) - else - reporthelp(t,...) - end -end - -function reporters.export(t,methods,filename) - if not xmlfound(t) then - return exporthelp(t) - end - if not methods or methods == "" then - methods = environment.arguments["exporthelp"] - end - if not filename or filename == "" then - filename = environment.files[1] - end - dofile(resolvers.findfile("trac-exp.lua","tex")) - local exporters = logs.exporters - if not exporters or not methods then - return exporthelp(t) - end - if methods == "all" then - methods = table.keys(exporters) - elseif type(methods) == "string" then - methods = utilities.parsers.settings_to_array(methods) - else - return exporthelp(t) - end - if type(filename) ~= "string" or filename == "" then - filename = false - elseif file.pathpart(filename) == "" then - t.report("export file %a will not be saved on the current path (safeguard)",filename) - return - end - for i=1,#methods do - local method = methods[i] - local exporter = exporters[method] - if exporter then - local result = exporter(t,method) - if result and result ~= "" then - if filename then - local fullname = file.replacesuffix(filename,method) - t.report("saving export in %a",fullname) - io.savedata(fullname,result) - else - reporters.lines(t,result) - end - else - t.report("no output from exporter %a",method) - end - else - t.report("unknown exporter %a",method) - end - end -end +if not modules then modules = { } end modules ['trac-xml'] = { + version = 1.001, + comment = "companion to trac-log.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Application helpinfo can be defined in several ways: +-- +-- helpinfo = "big blob of help" +-- +-- helpinfo = { basic = "blob of basic help", extra = "blob of extra help" } +-- +-- helpinfo = "..." +-- +-- helpinfo = "somefile.xml" +-- +-- In the case of an xml file, the file should be either present on the same path +-- as the script, or we should be be able to locate it using the resolver. + +local formatters = string.formatters +local reporters = logs.reporters +local xmlserialize = xml.serialize +local xmlcollected = xml.collected +local xmltext = xml.text +local xmlfirst = xml.first + +-- there is no need for a newhandlers { name = "help", parent = "string" } + +local function showhelp(specification,...) + local root = xml.convert(specification.helpinfo or "") + if not root then + return + end + local xs = xml.gethandlers("string") + xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end) + xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end) + local wantedcategories = select("#",...) == 0 and true or table.tohash { ... } + local nofcategories = xml.count(root,"/application/flags/category") + local report = specification.report + for category in xmlcollected(root,"/application/flags/category") do + local categoryname = category.at.name or "" + if wantedcategories == true or wantedcategories[categoryname] then + if nofcategories > 1 then + report("%s options:",categoryname) + report() + end + for subcategory in xmlcollected(category,"/subcategory") do + for flag in xmlcollected(subcategory,"/flag") do + local name = flag.at.name + local value = flag.at.value + -- local short = xmlfirst(s,"/short") + -- local short = xmlserialize(short,xs) + local short = xmltext(xmlfirst(flag,"/short")) + if value then + report("--%-20s %s",formatters["%s=%s"](name,value),short) + else + report("--%-20s %s",name,short) + end + end + report() + end + end + end + for category in xmlcollected(root,"/application/examples/category") do + local title = xmltext(xmlfirst(category,"/title")) + if title and title ~= "" then + report() + report(title) + report() + end + for subcategory in xmlcollected(category,"/subcategory") do + for example in xmlcollected(subcategory,"/example") do + local command = xmltext(xmlfirst(example,"/command")) + local comment = xmltext(xmlfirst(example,"/comment")) + report(command) + end + report() + end + end + for comment in xmlcollected(root,"/application/comments/comment") do + local comment = xmltext(comment) + report() + report(comment) + report() + end +end + +local reporthelp = reporters.help +local exporthelp = reporters.export + +local function xmlfound(t) + local helpinfo = t.helpinfo + if type(helpinfo) == "table" then + return false + end + if type(helpinfo) ~= "string" then + helpinfo = "Warning: no helpinfo found." + t.helpinfo = helpinfo + return false + end + if string.find(helpinfo,".xml$") then + local ownscript = environment.ownscript + local helpdata = false + if ownscript then + local helpfile = file.join(file.pathpart(ownscript),helpinfo) + helpdata = io.loaddata(helpfile) + if helpdata == "" then + helpdata = false + end + end + if not helpdata then + local helpfile = resolvers.findfile(helpinfo,"tex") + helpdata = helpfile and io.loaddata(helpfile) + end + if helpdata and helpdata ~= "" then + helpinfo = helpdata + else + helpinfo = formatters["Warning: help file %a is not found."](helpinfo) + end + end + t.helpinfo = helpinfo + return string.find(t.helpinfo,"^<%?xml") and true or false +end + +function reporters.help(t,...) + if xmlfound(t) then + showhelp(t,...) + else + reporthelp(t,...) + end +end + +function reporters.export(t,methods,filename) + if not xmlfound(t) then + return exporthelp(t) + end + if not methods or methods == "" then + methods = environment.arguments["exporthelp"] + end + if not filename or filename == "" then + filename = environment.files[1] + end + dofile(resolvers.findfile("trac-exp.lua","tex")) + local exporters = logs.exporters + if not exporters or not methods then + return exporthelp(t) + end + if methods == "all" then + methods = table.keys(exporters) + elseif type(methods) == "string" then + methods = utilities.parsers.settings_to_array(methods) + else + return exporthelp(t) + end + if type(filename) ~= "string" or filename == "" then + filename = false + elseif file.pathpart(filename) == "" then + t.report("export file %a will not be saved on the current path (safeguard)",filename) + return + end + for i=1,#methods do + local method = methods[i] + local exporter = exporters[method] + if exporter then + local result = exporter(t,method) + if result and result ~= "" then + if filename then + local fullname = file.replacesuffix(filename,method) + t.report("saving export in %a",fullname) + io.savedata(fullname,result) + else + reporters.lines(t,result) + end + else + t.report("no output from exporter %a",method) + end + else + t.report("unknown exporter %a",method) + end + end +end diff --git a/tex/context/base/type-ini.lua b/tex/context/base/type-ini.lua index 9ee97acae..fd9aa1e6d 100644 --- a/tex/context/base/type-ini.lua +++ b/tex/context/base/type-ini.lua @@ -1,76 +1,76 @@ -if not modules then modules = { } end modules ['type-ini'] = { - version = 1.001, - comment = "companion to type-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- more code will move here - -local commands, context = commands, context - -local gsub = string.gsub - -local report_typescripts = logs.reporter("fonts","typescripts") - -local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex", "type-%s.mkiv", "type-%s.tex" } -- this will be imp only - -local function action(name,foundname) - -- context.startreadingfile() - -- context.unprotect() - -- context.pushendofline() - -- context.input(foundname) - -- context.popendofline() - -- context.protect() - -- context.stopreadingfile() - context.loadfoundtypescriptfile(foundname) -end - -local name_one, name_two - -local function failure_two(name) - report_typescripts("unknown library %a or %a",name_one,name_two) -end - -local function failure_one(name) - name_two = gsub(name,"%-.*$","") - if name_two == name then - report_typescripts("unknown library %a",name_one) - else - commands.uselibrary { - name = name_two, - patterns = patterns, - action = action, - failure = failure_two, - onlyonce = false, -- will become true - } - end -end - -function commands.doprocesstypescriptfile(name) - name_one = gsub(name,"^type%-","") - commands.uselibrary { - name = name_one, - patterns = patterns, - action = action, - failure = failure_one, - onlyonce = false, -- will become true - } -end - -local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex" } - -local function failure(name) - report_typescripts("unknown library %a",name) -end - -function commands.loadtypescriptfile(name) -- a more specific name - commands.uselibrary { - name = gsub(name,"^type%-",""), - patterns = patterns, - action = action, - failure = failure, - onlyonce = false, -- will become true - } -end +if not modules then modules = { } end modules ['type-ini'] = { + version = 1.001, + comment = "companion to type-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- more code will move here + +local commands, context = commands, context + +local gsub = string.gsub + +local report_typescripts = logs.reporter("fonts","typescripts") + +local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex", "type-%s.mkiv", "type-%s.tex" } -- this will be imp only + +local function action(name,foundname) + -- context.startreadingfile() + -- context.unprotect() + -- context.pushendofline() + -- context.input(foundname) + -- context.popendofline() + -- context.protect() + -- context.stopreadingfile() + context.loadfoundtypescriptfile(foundname) +end + +local name_one, name_two + +local function failure_two(name) + report_typescripts("unknown library %a or %a",name_one,name_two) +end + +local function failure_one(name) + name_two = gsub(name,"%-.*$","") + if name_two == name then + report_typescripts("unknown library %a",name_one) + else + commands.uselibrary { + name = name_two, + patterns = patterns, + action = action, + failure = failure_two, + onlyonce = false, -- will become true + } + end +end + +function commands.doprocesstypescriptfile(name) + name_one = gsub(name,"^type%-","") + commands.uselibrary { + name = name_one, + patterns = patterns, + action = action, + failure = failure_one, + onlyonce = false, -- will become true + } +end + +local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex" } + +local function failure(name) + report_typescripts("unknown library %a",name) +end + +function commands.loadtypescriptfile(name) -- a more specific name + commands.uselibrary { + name = gsub(name,"^type%-",""), + patterns = patterns, + action = action, + failure = failure, + onlyonce = false, -- will become true + } +end diff --git a/tex/context/base/typo-bld.lua b/tex/context/base/typo-bld.lua index ed700add7..125b9946c 100644 --- a/tex/context/base/typo-bld.lua +++ b/tex/context/base/typo-bld.lua @@ -1,185 +1,185 @@ -if not modules then modules = { } end modules ['typo-bld'] = { -- was node-par - version = 1.001, - comment = "companion to typo-bld.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local insert, remove = table.insert, table.remove - -local builders, nodes, node = builders, nodes, node - -builders.paragraphs = builders.paragraphs or { } -local parbuilders = builders.paragraphs - -parbuilders.constructors = parbuilders.constructors or { } -local constructors = parbuilders.constructors - -constructors.names = constructors.names or { } -local names = constructors.names - -constructors.numbers = constructors.numbers or { } -local numbers = constructors.numbers - -constructors.methods = constructors.methods or { } -local methods = constructors.methods - -local a_parbuilder = attributes.numbers['parbuilder'] or 999 -- why 999 -constructors.attribute = a_parbuilder - -local unsetvalue = attributes.unsetvalue -local texsetattribute = tex.setattribute -local texnest = tex.nest - -local nodepool = nodes.pool -local new_baselineskip = nodepool.baselineskip -local new_lineskip = nodepool.lineskip -local insert_node_before = node.insert_before -local hpack_node = node.hpack - -local starttiming = statistics.starttiming -local stoptiming = statistics.stoptiming - -storage.register("builders/paragraphs/constructors/names", names, "builders.paragraphs.constructors.names") -storage.register("builders/paragraphs/constructors/numbers", numbers, "builders.paragraphs.constructors.numbers") - -local report_parbuilders = logs.reporter("parbuilders") - -local mainconstructor = nil -- not stored in format -local nofconstructors = 0 -local stack = { } - -function constructors.define(name) - nofconstructors = nofconstructors + 1 - names[nofconstructors] = name - numbers[name] = nofconstructors -end - -function constructors.set(name) --- will go - if name then - mainconstructor = numbers[name] or unsetvalue - else - mainconstructor = stack[#stack] or unsetvalue - end - texsetattribute(a_parbuilder,mainconstructor) - if mainconstructor ~= unsetvalue then - constructors.enable() - end -end - -function constructors.start(name) - local number = numbers[name] - insert(stack,number) - mainconstructor = number or unsetvalue - texsetattribute(a_parbuilder,mainconstructor) - if mainconstructor ~= unsetvalue then - constructors.enable() - end - -- report_parbuilders("start %a",name) -end - -function constructors.stop() - remove(stack) - mainconstructor = stack[#stack] or unsetvalue - texsetattribute(a_parbuilder,mainconstructor) - if mainconstructor == unsetvalue then - constructors.disable() - end - -- report_parbuilders("stop") -end - --- return values: --- --- true : tex will break itself --- false : idem but dangerous --- head : list of valid vmode nodes with last being hlist - -function constructors.handler(head,followed_by_display) - if type(head) == "boolean" then - return head - else - local attribute = head[a_parbuilder] -- or mainconstructor - if attribute then - local method = names[attribute] - if method then - local handler = methods[method] - if handler then - return handler(head,followed_by_display) - else - report_parbuilders("contructor method %a is not defined",tostring(method)) - return true -- let tex break - end - end - end - return true -- let tex break - end -end - --- just for testing - -function constructors.methods.default(head,followed_by_display) - return true -- let tex break -end - --- also for testing (now also surrounding spacing done) - -function builders.paragraphs.constructors.methods.oneline(head,followed_by_display) - -- when needed we will turn this into a helper - local t = texnest[texnest.ptr] - local h = hpack_node(head) - local d = tex.baselineskip.width - t.prevdepth - h.height - t.prevdepth = h.depth - t.prevgraf = 1 - if d < tex.lineskiplimit then - return insert_node_before(h,h,new_lineskip(tex.lineskip)) - else - return insert_node_before(h,h,new_baselineskip(d)) - end -end - --- It makes no sense to have a sequence here as we already have --- pre and post hooks and only one parbuilder makes sense, so no: --- --- local actions = nodes.tasks.actions("parbuilders") --- --- yet ... maybe some day. - -local actions = constructors.handler -local enabled = false - -local function processor(head,followed_by_display) - -- todo: not again in otr so we need to flag - if enabled then - starttiming(parbuilders) - local head = actions(head,followed_by_display) - stoptiming(parbuilders) - return head - else - return true -- let tex do the work - end -end - -function constructors.enable() - enabled = true -end - -function constructors.disable() - enabled = false -end - - -callbacks.register('linebreak_filter', processor, "breaking paragraps into lines") - -statistics.register("linebreak processing time", function() - return statistics.elapsedseconds(parbuilders) -end) - --- interface - -commands.defineparbuilder = constructors.define -commands.startparbuilder = constructors.start -commands.stopparbuilder = constructors.stop -commands.setparbuilder = constructors.set -commands.enableparbuilder = constructors.enable -commands.disableparbuilder = constructors.disable +if not modules then modules = { } end modules ['typo-bld'] = { -- was node-par + version = 1.001, + comment = "companion to typo-bld.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local insert, remove = table.insert, table.remove + +local builders, nodes, node = builders, nodes, node + +builders.paragraphs = builders.paragraphs or { } +local parbuilders = builders.paragraphs + +parbuilders.constructors = parbuilders.constructors or { } +local constructors = parbuilders.constructors + +constructors.names = constructors.names or { } +local names = constructors.names + +constructors.numbers = constructors.numbers or { } +local numbers = constructors.numbers + +constructors.methods = constructors.methods or { } +local methods = constructors.methods + +local a_parbuilder = attributes.numbers['parbuilder'] or 999 -- why 999 +constructors.attribute = a_parbuilder + +local unsetvalue = attributes.unsetvalue +local texsetattribute = tex.setattribute +local texnest = tex.nest + +local nodepool = nodes.pool +local new_baselineskip = nodepool.baselineskip +local new_lineskip = nodepool.lineskip +local insert_node_before = node.insert_before +local hpack_node = node.hpack + +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming + +storage.register("builders/paragraphs/constructors/names", names, "builders.paragraphs.constructors.names") +storage.register("builders/paragraphs/constructors/numbers", numbers, "builders.paragraphs.constructors.numbers") + +local report_parbuilders = logs.reporter("parbuilders") + +local mainconstructor = nil -- not stored in format +local nofconstructors = 0 +local stack = { } + +function constructors.define(name) + nofconstructors = nofconstructors + 1 + names[nofconstructors] = name + numbers[name] = nofconstructors +end + +function constructors.set(name) --- will go + if name then + mainconstructor = numbers[name] or unsetvalue + else + mainconstructor = stack[#stack] or unsetvalue + end + texsetattribute(a_parbuilder,mainconstructor) + if mainconstructor ~= unsetvalue then + constructors.enable() + end +end + +function constructors.start(name) + local number = numbers[name] + insert(stack,number) + mainconstructor = number or unsetvalue + texsetattribute(a_parbuilder,mainconstructor) + if mainconstructor ~= unsetvalue then + constructors.enable() + end + -- report_parbuilders("start %a",name) +end + +function constructors.stop() + remove(stack) + mainconstructor = stack[#stack] or unsetvalue + texsetattribute(a_parbuilder,mainconstructor) + if mainconstructor == unsetvalue then + constructors.disable() + end + -- report_parbuilders("stop") +end + +-- return values: +-- +-- true : tex will break itself +-- false : idem but dangerous +-- head : list of valid vmode nodes with last being hlist + +function constructors.handler(head,followed_by_display) + if type(head) == "boolean" then + return head + else + local attribute = head[a_parbuilder] -- or mainconstructor + if attribute then + local method = names[attribute] + if method then + local handler = methods[method] + if handler then + return handler(head,followed_by_display) + else + report_parbuilders("contructor method %a is not defined",tostring(method)) + return true -- let tex break + end + end + end + return true -- let tex break + end +end + +-- just for testing + +function constructors.methods.default(head,followed_by_display) + return true -- let tex break +end + +-- also for testing (now also surrounding spacing done) + +function builders.paragraphs.constructors.methods.oneline(head,followed_by_display) + -- when needed we will turn this into a helper + local t = texnest[texnest.ptr] + local h = hpack_node(head) + local d = tex.baselineskip.width - t.prevdepth - h.height + t.prevdepth = h.depth + t.prevgraf = 1 + if d < tex.lineskiplimit then + return insert_node_before(h,h,new_lineskip(tex.lineskip)) + else + return insert_node_before(h,h,new_baselineskip(d)) + end +end + +-- It makes no sense to have a sequence here as we already have +-- pre and post hooks and only one parbuilder makes sense, so no: +-- +-- local actions = nodes.tasks.actions("parbuilders") +-- +-- yet ... maybe some day. + +local actions = constructors.handler +local enabled = false + +local function processor(head,followed_by_display) + -- todo: not again in otr so we need to flag + if enabled then + starttiming(parbuilders) + local head = actions(head,followed_by_display) + stoptiming(parbuilders) + return head + else + return true -- let tex do the work + end +end + +function constructors.enable() + enabled = true +end + +function constructors.disable() + enabled = false +end + + +callbacks.register('linebreak_filter', processor, "breaking paragraps into lines") + +statistics.register("linebreak processing time", function() + return statistics.elapsedseconds(parbuilders) +end) + +-- interface + +commands.defineparbuilder = constructors.define +commands.startparbuilder = constructors.start +commands.stopparbuilder = constructors.stop +commands.setparbuilder = constructors.set +commands.enableparbuilder = constructors.enable +commands.disableparbuilder = constructors.disable diff --git a/tex/context/base/typo-brk.lua b/tex/context/base/typo-brk.lua index d6326ebeb..532909a30 100644 --- a/tex/context/base/typo-brk.lua +++ b/tex/context/base/typo-brk.lua @@ -1,302 +1,302 @@ -if not modules then modules = { } end modules ['typo-brk'] = { - version = 1.001, - comment = "companion to typo-brk.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this code dates from the beginning and is kind of experimental; it --- will be optimized and improved soon - -local next, type, tonumber = next, type, tonumber -local utfbyte, utfchar = utf.byte, utf.char -local format = string.format - -local trace_breakpoints = false trackers.register("typesetters.breakpoints", function(v) trace_breakpoints = v end) - -local report_breakpoints = logs.reporter("typesetting","breakpoints") - -local nodes, node = nodes, node - -local settings_to_array = utilities.parsers.settings_to_array -local copy_node = node.copy -local copy_nodelist = node.copy_list -local free_node = node.free -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after -local remove_node = nodes.remove -- ! nodes - -local tonodes = nodes.tonodes - -local texattribute = tex.attribute -local unsetvalue = attributes.unsetvalue - -local nodepool = nodes.pool -local tasks = nodes.tasks - -local v_reset = interfaces.variables.reset - -local new_penalty = nodepool.penalty -local new_glue = nodepool.glue -local new_disc = nodepool.disc - -local nodecodes = nodes.nodecodes -local kerncodes = nodes.kerncodes - -local glyph_code = nodecodes.glyph -local kern_code = nodecodes.kern - -local kerning_code = kerncodes.kerning - -local typesetters = typesetters - -typesetters.breakpoints = typesetters.breakpoints or {} -local breakpoints = typesetters.breakpoints - -breakpoints.mapping = breakpoints.mapping or { } -breakpoints.numbers = breakpoints.numbers or { } - -breakpoints.methods = breakpoints.methods or { } -local methods = breakpoints.methods - -local a_breakpoints = attributes.private("breakpoint") -breakpoints.attribute = a_breakpoints - -storage.register("typesetters/breakpoints/mapping", breakpoints.mapping, "typesetters.breakpoints.mapping") - -local mapping = breakpoints.mapping -local numbers = breakpoints.mapping - -for i=1,#mapping do - local m = mapping[i] - numbers[m.name] = m -end - -local function insert_break(head,start,before,after) - insert_node_before(head,start,new_penalty(before)) - insert_node_before(head,start,new_glue(0)) - insert_node_after(head,start,new_glue(0)) - insert_node_after(head,start,new_penalty(after)) -end - -methods[1] = function(head,start) - if start.prev and start.next then - insert_break(head,start,10000,0) - end - return head, start -end - -methods[2] = function(head,start) -- ( => (- - if start.prev and start.next then - local tmp - head, start, tmp = remove_node(head,start) - head, start = insert_node_before(head,start,new_disc()) - start.attr = copy_nodelist(tmp.attr) -- todo: critical only - start.replace = tmp - local tmp, hyphen = copy_node(tmp), copy_node(tmp) - hyphen.char = languages.prehyphenchar(tmp.lang) - tmp.next, hyphen.prev = hyphen, tmp - start.post = tmp - insert_break(head,start,10000,10000) - end - return head, start -end - -methods[3] = function(head,start) -- ) => -) - if start.prev and start.next then - local tmp - head, start, tmp = remove_node(head,start) - head, start = insert_node_before(head,start,new_disc()) - start.attr = copy_nodelist(tmp.attr) -- todo: critical only - start.replace = tmp - local tmp, hyphen = copy_node(tmp), copy_node(tmp) - hyphen.char = languages.prehyphenchar(tmp.lang) - tmp.prev, hyphen.next = hyphen, tmp - start.pre = hyphen - insert_break(head,start,10000,10000) - end - return head, start -end - -methods[4] = function(head,start) -- - => - - - - if start.prev and start.next then - local tmp - head, start, tmp = remove_node(head,start) - head, start = insert_node_before(head,start,new_disc()) - start.attr = copy_nodelist(tmp.attr) -- todo: critical only - start.pre, start.post, start.replace = copy_node(tmp), copy_node(tmp), tmp - insert_break(head,start,10000,10000) - end - return head, start -end - -methods[5] = function(head,start,settings) -- x => p q r - if start.prev and start.next then - local tmp - head, start, tmp = remove_node(head,start) - head, start = insert_node_before(head,start,new_disc()) - local attr = tmp.attr - local font = tmp.font - start.attr = copy_nodelist(attr) -- todo: critical only - local left, right, middle = settings.left, settings.right, settings.middle - if left then - start.pre = tonodes(tostring(left),font,attr) -- was right - end - if right then - start.post = tonodes(tostring(right),font,attr) -- was left - end - if middle then - start.replace = tonodes(tostring(middle),font,attr) - end - free_node(tmp) - insert_break(head,start,10000,10000) - end - return head, start -end - -local function process(namespace,attribute,head) - local done, numbers = false, languages.numbers - local start, n = head, 0 - while start do - local id = start.id - if id == glyph_code then - local attr = start[a_breakpoints] - if attr and attr > 0 then - start[a_breakpoints] = unsetvalue -- maybe test for subtype > 256 (faster) - -- look ahead and back n chars - local data = mapping[attr] - if data then - local map = data.characters - local cmap = map[start.char] - if cmap then - local lang = start.lang - -- we do a sanity check for language - local smap = lang and lang >= 0 and lang < 0x7FFF and (cmap[numbers[lang]] or cmap[""]) - if smap then - if n >= smap.nleft then - local m = smap.nright - local next = start.next - while next do -- gamble on same attribute (not that important actually) - local id = next.id - if id == glyph_code then -- gamble on same attribute (not that important actually) - if map[next.char] then - break - elseif m == 1 then - local method = methods[smap.type] - if method then - head, start = method(head,start,smap) - done = true - end - break - else - m = m - 1 - next = next.next - end - elseif id == kern_code and next.subtype == kerning_code then - next = next.next - -- ignore intercharacter kerning, will go way - else - -- we can do clever and set n and jump ahead but ... not now - break - end - end - end - n = 0 - else - n = n + 1 - end - else - n = n + 1 - end - else - n = 0 - end - else - -- n = n + 1 -- if we want single char handling (|-|) then we will use grouping and then we need this - end - elseif id == kern_code and start.subtype == kerning_code then - -- ignore intercharacter kerning, will go way - else - n = 0 - end - start = start.next - end - return head, done -end - -local enabled = false - -function breakpoints.define(name) - local data = numbers[name] - if data then - -- error - else - local number = #mapping + 1 - local data = { - name = name, - number = number, - characters = { }, - } - mapping[number] = data - numbers[name] = data - end -end - -function breakpoints.setreplacement(name,char,language,settings) - char = utfbyte(char) - local data = numbers[name] - if data then - local characters = data.characters - local cmap = characters[char] - if not cmap then - cmap = { } - characters[char] = cmap - end - local left, right, middle = settings.left, settings.right, settings.middle - cmap[language or ""] = { - type = tonumber(settings.type) or 1, - nleft = tonumber(settings.nleft) or 1, - nright = tonumber(settings.nright) or 1, - left = left ~= "" and left or nil, - right = right ~= "" and right or nil, - middle = middle ~= "" and middle or nil, - } -- was { type or 1, before or 1, after or 1 } - end -end - -function breakpoints.set(n) - if n == v_reset then - n = unsetvalue - else - n = mapping[n] - if not n then - n = unsetvalue - else - if not enabled then - if trace_breakpoints then - report_breakpoints("enabling breakpoints handler") - end - tasks.enableaction("processors","typesetters.breakpoints.handler") - end - n = n.number - end - end - texattribute[a_breakpoints] = n -end - -breakpoints.handler = nodes.installattributehandler { - name = "breakpoint", - namespace = breakpoints, - processor = process, -} - --- function breakpoints.enable() --- tasks.enableaction("processors","typesetters.breakpoints.handler") --- end - --- interface - -commands.definebreakpoints = breakpoints.define -commands.definebreakpoint = breakpoints.setreplacement -commands.setbreakpoints = breakpoints.set +if not modules then modules = { } end modules ['typo-brk'] = { + version = 1.001, + comment = "companion to typo-brk.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this code dates from the beginning and is kind of experimental; it +-- will be optimized and improved soon + +local next, type, tonumber = next, type, tonumber +local utfbyte, utfchar = utf.byte, utf.char +local format = string.format + +local trace_breakpoints = false trackers.register("typesetters.breakpoints", function(v) trace_breakpoints = v end) + +local report_breakpoints = logs.reporter("typesetting","breakpoints") + +local nodes, node = nodes, node + +local settings_to_array = utilities.parsers.settings_to_array +local copy_node = node.copy +local copy_nodelist = node.copy_list +local free_node = node.free +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after +local remove_node = nodes.remove -- ! nodes + +local tonodes = nodes.tonodes + +local texattribute = tex.attribute +local unsetvalue = attributes.unsetvalue + +local nodepool = nodes.pool +local tasks = nodes.tasks + +local v_reset = interfaces.variables.reset + +local new_penalty = nodepool.penalty +local new_glue = nodepool.glue +local new_disc = nodepool.disc + +local nodecodes = nodes.nodecodes +local kerncodes = nodes.kerncodes + +local glyph_code = nodecodes.glyph +local kern_code = nodecodes.kern + +local kerning_code = kerncodes.kerning + +local typesetters = typesetters + +typesetters.breakpoints = typesetters.breakpoints or {} +local breakpoints = typesetters.breakpoints + +breakpoints.mapping = breakpoints.mapping or { } +breakpoints.numbers = breakpoints.numbers or { } + +breakpoints.methods = breakpoints.methods or { } +local methods = breakpoints.methods + +local a_breakpoints = attributes.private("breakpoint") +breakpoints.attribute = a_breakpoints + +storage.register("typesetters/breakpoints/mapping", breakpoints.mapping, "typesetters.breakpoints.mapping") + +local mapping = breakpoints.mapping +local numbers = breakpoints.mapping + +for i=1,#mapping do + local m = mapping[i] + numbers[m.name] = m +end + +local function insert_break(head,start,before,after) + insert_node_before(head,start,new_penalty(before)) + insert_node_before(head,start,new_glue(0)) + insert_node_after(head,start,new_glue(0)) + insert_node_after(head,start,new_penalty(after)) +end + +methods[1] = function(head,start) + if start.prev and start.next then + insert_break(head,start,10000,0) + end + return head, start +end + +methods[2] = function(head,start) -- ( => (- + if start.prev and start.next then + local tmp + head, start, tmp = remove_node(head,start) + head, start = insert_node_before(head,start,new_disc()) + start.attr = copy_nodelist(tmp.attr) -- todo: critical only + start.replace = tmp + local tmp, hyphen = copy_node(tmp), copy_node(tmp) + hyphen.char = languages.prehyphenchar(tmp.lang) + tmp.next, hyphen.prev = hyphen, tmp + start.post = tmp + insert_break(head,start,10000,10000) + end + return head, start +end + +methods[3] = function(head,start) -- ) => -) + if start.prev and start.next then + local tmp + head, start, tmp = remove_node(head,start) + head, start = insert_node_before(head,start,new_disc()) + start.attr = copy_nodelist(tmp.attr) -- todo: critical only + start.replace = tmp + local tmp, hyphen = copy_node(tmp), copy_node(tmp) + hyphen.char = languages.prehyphenchar(tmp.lang) + tmp.prev, hyphen.next = hyphen, tmp + start.pre = hyphen + insert_break(head,start,10000,10000) + end + return head, start +end + +methods[4] = function(head,start) -- - => - - - + if start.prev and start.next then + local tmp + head, start, tmp = remove_node(head,start) + head, start = insert_node_before(head,start,new_disc()) + start.attr = copy_nodelist(tmp.attr) -- todo: critical only + start.pre, start.post, start.replace = copy_node(tmp), copy_node(tmp), tmp + insert_break(head,start,10000,10000) + end + return head, start +end + +methods[5] = function(head,start,settings) -- x => p q r + if start.prev and start.next then + local tmp + head, start, tmp = remove_node(head,start) + head, start = insert_node_before(head,start,new_disc()) + local attr = tmp.attr + local font = tmp.font + start.attr = copy_nodelist(attr) -- todo: critical only + local left, right, middle = settings.left, settings.right, settings.middle + if left then + start.pre = tonodes(tostring(left),font,attr) -- was right + end + if right then + start.post = tonodes(tostring(right),font,attr) -- was left + end + if middle then + start.replace = tonodes(tostring(middle),font,attr) + end + free_node(tmp) + insert_break(head,start,10000,10000) + end + return head, start +end + +local function process(namespace,attribute,head) + local done, numbers = false, languages.numbers + local start, n = head, 0 + while start do + local id = start.id + if id == glyph_code then + local attr = start[a_breakpoints] + if attr and attr > 0 then + start[a_breakpoints] = unsetvalue -- maybe test for subtype > 256 (faster) + -- look ahead and back n chars + local data = mapping[attr] + if data then + local map = data.characters + local cmap = map[start.char] + if cmap then + local lang = start.lang + -- we do a sanity check for language + local smap = lang and lang >= 0 and lang < 0x7FFF and (cmap[numbers[lang]] or cmap[""]) + if smap then + if n >= smap.nleft then + local m = smap.nright + local next = start.next + while next do -- gamble on same attribute (not that important actually) + local id = next.id + if id == glyph_code then -- gamble on same attribute (not that important actually) + if map[next.char] then + break + elseif m == 1 then + local method = methods[smap.type] + if method then + head, start = method(head,start,smap) + done = true + end + break + else + m = m - 1 + next = next.next + end + elseif id == kern_code and next.subtype == kerning_code then + next = next.next + -- ignore intercharacter kerning, will go way + else + -- we can do clever and set n and jump ahead but ... not now + break + end + end + end + n = 0 + else + n = n + 1 + end + else + n = n + 1 + end + else + n = 0 + end + else + -- n = n + 1 -- if we want single char handling (|-|) then we will use grouping and then we need this + end + elseif id == kern_code and start.subtype == kerning_code then + -- ignore intercharacter kerning, will go way + else + n = 0 + end + start = start.next + end + return head, done +end + +local enabled = false + +function breakpoints.define(name) + local data = numbers[name] + if data then + -- error + else + local number = #mapping + 1 + local data = { + name = name, + number = number, + characters = { }, + } + mapping[number] = data + numbers[name] = data + end +end + +function breakpoints.setreplacement(name,char,language,settings) + char = utfbyte(char) + local data = numbers[name] + if data then + local characters = data.characters + local cmap = characters[char] + if not cmap then + cmap = { } + characters[char] = cmap + end + local left, right, middle = settings.left, settings.right, settings.middle + cmap[language or ""] = { + type = tonumber(settings.type) or 1, + nleft = tonumber(settings.nleft) or 1, + nright = tonumber(settings.nright) or 1, + left = left ~= "" and left or nil, + right = right ~= "" and right or nil, + middle = middle ~= "" and middle or nil, + } -- was { type or 1, before or 1, after or 1 } + end +end + +function breakpoints.set(n) + if n == v_reset then + n = unsetvalue + else + n = mapping[n] + if not n then + n = unsetvalue + else + if not enabled then + if trace_breakpoints then + report_breakpoints("enabling breakpoints handler") + end + tasks.enableaction("processors","typesetters.breakpoints.handler") + end + n = n.number + end + end + texattribute[a_breakpoints] = n +end + +breakpoints.handler = nodes.installattributehandler { + name = "breakpoint", + namespace = breakpoints, + processor = process, +} + +-- function breakpoints.enable() +-- tasks.enableaction("processors","typesetters.breakpoints.handler") +-- end + +-- interface + +commands.definebreakpoints = breakpoints.define +commands.definebreakpoint = breakpoints.setreplacement +commands.setbreakpoints = breakpoints.set diff --git a/tex/context/base/typo-cap.lua b/tex/context/base/typo-cap.lua index fdbf2e353..304d133c9 100644 --- a/tex/context/base/typo-cap.lua +++ b/tex/context/base/typo-cap.lua @@ -1,331 +1,331 @@ -if not modules then modules = { } end modules ['typo-cap'] = { - version = 1.001, - comment = "companion to typo-cap.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" - } - -local next, type = next, type -local format, insert = string.format, table.insert -local div = math.div - -local trace_casing = false trackers.register("typesetters.casing", function(v) trace_casing = v end) - -local report_casing = logs.reporter("typesetting","casing") - -local nodes, node = nodes, node - -local traverse_id = node.traverse_id -local copy_node = node.copy -local end_of_math = node.end_of_math - -local texattribute = tex.attribute -local unsetvalue = attributes.unsetvalue - -local nodecodes = nodes.nodecodes -local skipcodes = nodes.skipcodes -local kerncodes = nodes.kerncodes - -local glyph_code = nodecodes.glyph -local kern_code = nodecodes.kern -local math_code = nodecodes.math - -local kerning_code = kerncodes.kerning -local userskip_code = skipcodes.userskip - -local tasks = nodes.tasks - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers -local fontchar = fonthashes.characters - -local variables = interfaces.variables -local v_reset = variables.reset - -local chardata = characters.data - -typesetters = typesetters or { } -local typesetters = typesetters - -typesetters.cases = typesetters.cases or { } -local cases = typesetters.cases - -cases.actions = { } -local actions = cases.actions -cases.attribute = c_cases -- no longer needed -local a_cases = attributes.private("case") - -local lastfont = nil - --- we use char(0) as placeholder for the larger font, so we need to remove it --- before it can do further harm --- --- we could do the whole glyph run here (till no more attributes match) but --- then we end up with more code .. maybe i will clean this up anyway as the --- lastfont hack is somewhat ugly .. on the other hand, we need to deal with --- cases like: --- --- \WORD {far too \Word{many \WORD{more \word{pushed} in between} useless} words} - -local uccodes = characters.uccodes -local lccodes = characters.lccodes - -local function helper(start, codes, special, attribute, once) - local char = start.char - local dc = codes[char] - if dc then - local fnt = start.font - if special then - -- will become function - if start.char == 0 then - lastfont = fnt - local prev, next = start.prev, start.next - prev.next = next - if next then - next.prev = prev - end - return prev, true - elseif lastfont and start.prev.id ~= glyph_code then - fnt = lastfont - start.font = lastfont - end - end - local ifc = fontchar[fnt] - if type(dc) == "table" then - local ok = true - for i=1,#dc do - ok = ok and ifc[dc[i]] - end - if ok then - -- tood; use generic injector - local prev, original = start, start - for i=1,#dc do - local chr = dc[i] - prev = start - if i == 1 then - start.char = chr - else - local g = copy_node(original) - g.char = chr - local next = start.next - g.prev = start - if next then - g.next = next - start.next = g - next.prev = g - end - start = g - end - end - if once then lastfont = nil end - return prev, true - end - if once then lastfont = nil end - return start, false - elseif ifc[dc] then - start.char = dc - if once then lastfont = nil end - return start, true - end - end - if once then lastfont = nil end - return start, false -end - -local registered, n = { }, 0 - -local function register(name,f) - if type(f) == "function" then - n = n + 1 - actions[n] = f - registered[name] = n - return n - else - local n = registered[f] - registered[name] = n - return n - end -end - -cases.register = register - -local function WORD(start,attribute) - lastfont = nil - return helper(start,uccodes) -end - -local function word(start,attribute) - lastfont = nil - return helper(start,lccodes) -end - -local function Word(start,attribute,attr) - lastfont = nil - local prev = start.prev - if prev and prev.id == kern_code and prev.subtype == kerning_code then - prev = prev.prev - end - if not prev or prev.id ~= glyph_code then - --- only the first character is treated - for n in traverse_id(glyph_code,start.next) do - if n[attribute] == attr then - n[attribute] = unsetvalue - else - -- break -- we can have nested mess - end - end - -- we could return the last in the range and save some scanning - -- but why bother - return helper(start,uccodes) - else - return start, false - end -end - -local function Words(start,attribute) - lastfont = nil - local prev = start.prev - if prev and prev.id == kern_code and prev.subtype == kerning_code then - prev = prev.prev - end - if not prev or prev.id ~= glyph_code then - return helper(start,uccodes) - else - return start, false - end -end - -local function capital(start,attribute) -- 3 - return helper(start,uccodes,true,attribute,true) -end - -local function Capital(start,attribute) -- 4 - return helper(start,uccodes,true,attribute,false) -end - -local function none(start) - return start, false -end - -local function random(start) - lastfont = nil - local ch = start.char - local mr = math.random - -- local tfm = fontdata[start.font].characters - local tfm = fontchar[start.font] - if lccodes[ch] then - while true do - local d = chardata[mr(1,0xFFFF)] - if d then - local uc = uccodes[d] - if uc and tfm[uc] then -- this also intercepts tables - start.char = uc - return start, true - end - end - end - elseif uccodes[ch] then - while true do - local d = chardata[mr(1,0xFFFF)] - if d then - local lc = lccodes[d] - if lc and tfm[lc] then -- this also intercepts tables - start.char = lc - return start, true - end - end - end - end - return start, false -end - -register(variables.WORD, WORD) -- 1 -register(variables.word, word) -- 2 -register(variables.Word, Word) -- 3 -register(variables.Words, Words) -- 4 -register(variables.capital, capital) -- 5 -register(variables.Capital, Capital) -- 6 -register(variables.none, none) -- 7 (dummy) -register(variables.random, random) -- 8 - -register(variables.cap, variables.capital) -- clone -register(variables.Cap, variables.Capital) -- clone - --- node.traverse_id_attr - -local function process(namespace,attribute,head) -- not real fast but also not used on much data - lastfont = nil - local lastattr = nil - local done = false - local start = head - while start do -- while because start can jump ahead - local id = start.id - if id == glyph_code then - local attr = start[attribute] - if attr and attr > 0 then - if attr ~= lastattr then - lastfont = nil - lastattr = attr - end - start[attribute] = unsetvalue - local action = actions[attr%100] -- map back to low number - if action then - start, ok = action(start,attribute,attr) - done = done and ok - if trace_casing then - report_casing("case trigger %a, instance %a, result %a",attr%100,div(attr,100),ok) - end - elseif trace_casing then - report_casing("unknown case trigger %a",attr) - end - end - elseif id == math_code then - start = end_of_math(start) - end - if start then -- why test - start = start.next - end - end - lastfont = nil - return head, done -end - -local m, enabled = 0, false -- a trick to make neighbouring ranges work - -function cases.set(n) - if n == v_reset then - n = unsetvalue - else - n = registered[n] or tonumber(n) - if n then - if not enabled then - tasks.enableaction("processors","typesetters.cases.handler") - if trace_casing then - report_casing("enabling case handler") - end - enabled = true - end - if m == 100 then - m = 1 - else - m = m + 1 - end - n = m * 100 + n - else - n = unsetvalue - end - end - texattribute[a_cases] = n - -- return n -- bonus -end - -cases.handler = nodes.installattributehandler { - name = "case", - namespace = cases, - processor = process, -} - --- interface - -commands.setcharactercasing = cases.set +if not modules then modules = { } end modules ['typo-cap'] = { + version = 1.001, + comment = "companion to typo-cap.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" + } + +local next, type = next, type +local format, insert = string.format, table.insert +local div = math.div + +local trace_casing = false trackers.register("typesetters.casing", function(v) trace_casing = v end) + +local report_casing = logs.reporter("typesetting","casing") + +local nodes, node = nodes, node + +local traverse_id = node.traverse_id +local copy_node = node.copy +local end_of_math = node.end_of_math + +local texattribute = tex.attribute +local unsetvalue = attributes.unsetvalue + +local nodecodes = nodes.nodecodes +local skipcodes = nodes.skipcodes +local kerncodes = nodes.kerncodes + +local glyph_code = nodecodes.glyph +local kern_code = nodecodes.kern +local math_code = nodecodes.math + +local kerning_code = kerncodes.kerning +local userskip_code = skipcodes.userskip + +local tasks = nodes.tasks + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers +local fontchar = fonthashes.characters + +local variables = interfaces.variables +local v_reset = variables.reset + +local chardata = characters.data + +typesetters = typesetters or { } +local typesetters = typesetters + +typesetters.cases = typesetters.cases or { } +local cases = typesetters.cases + +cases.actions = { } +local actions = cases.actions +cases.attribute = c_cases -- no longer needed +local a_cases = attributes.private("case") + +local lastfont = nil + +-- we use char(0) as placeholder for the larger font, so we need to remove it +-- before it can do further harm +-- +-- we could do the whole glyph run here (till no more attributes match) but +-- then we end up with more code .. maybe i will clean this up anyway as the +-- lastfont hack is somewhat ugly .. on the other hand, we need to deal with +-- cases like: +-- +-- \WORD {far too \Word{many \WORD{more \word{pushed} in between} useless} words} + +local uccodes = characters.uccodes +local lccodes = characters.lccodes + +local function helper(start, codes, special, attribute, once) + local char = start.char + local dc = codes[char] + if dc then + local fnt = start.font + if special then + -- will become function + if start.char == 0 then + lastfont = fnt + local prev, next = start.prev, start.next + prev.next = next + if next then + next.prev = prev + end + return prev, true + elseif lastfont and start.prev.id ~= glyph_code then + fnt = lastfont + start.font = lastfont + end + end + local ifc = fontchar[fnt] + if type(dc) == "table" then + local ok = true + for i=1,#dc do + ok = ok and ifc[dc[i]] + end + if ok then + -- tood; use generic injector + local prev, original = start, start + for i=1,#dc do + local chr = dc[i] + prev = start + if i == 1 then + start.char = chr + else + local g = copy_node(original) + g.char = chr + local next = start.next + g.prev = start + if next then + g.next = next + start.next = g + next.prev = g + end + start = g + end + end + if once then lastfont = nil end + return prev, true + end + if once then lastfont = nil end + return start, false + elseif ifc[dc] then + start.char = dc + if once then lastfont = nil end + return start, true + end + end + if once then lastfont = nil end + return start, false +end + +local registered, n = { }, 0 + +local function register(name,f) + if type(f) == "function" then + n = n + 1 + actions[n] = f + registered[name] = n + return n + else + local n = registered[f] + registered[name] = n + return n + end +end + +cases.register = register + +local function WORD(start,attribute) + lastfont = nil + return helper(start,uccodes) +end + +local function word(start,attribute) + lastfont = nil + return helper(start,lccodes) +end + +local function Word(start,attribute,attr) + lastfont = nil + local prev = start.prev + if prev and prev.id == kern_code and prev.subtype == kerning_code then + prev = prev.prev + end + if not prev or prev.id ~= glyph_code then + --- only the first character is treated + for n in traverse_id(glyph_code,start.next) do + if n[attribute] == attr then + n[attribute] = unsetvalue + else + -- break -- we can have nested mess + end + end + -- we could return the last in the range and save some scanning + -- but why bother + return helper(start,uccodes) + else + return start, false + end +end + +local function Words(start,attribute) + lastfont = nil + local prev = start.prev + if prev and prev.id == kern_code and prev.subtype == kerning_code then + prev = prev.prev + end + if not prev or prev.id ~= glyph_code then + return helper(start,uccodes) + else + return start, false + end +end + +local function capital(start,attribute) -- 3 + return helper(start,uccodes,true,attribute,true) +end + +local function Capital(start,attribute) -- 4 + return helper(start,uccodes,true,attribute,false) +end + +local function none(start) + return start, false +end + +local function random(start) + lastfont = nil + local ch = start.char + local mr = math.random + -- local tfm = fontdata[start.font].characters + local tfm = fontchar[start.font] + if lccodes[ch] then + while true do + local d = chardata[mr(1,0xFFFF)] + if d then + local uc = uccodes[d] + if uc and tfm[uc] then -- this also intercepts tables + start.char = uc + return start, true + end + end + end + elseif uccodes[ch] then + while true do + local d = chardata[mr(1,0xFFFF)] + if d then + local lc = lccodes[d] + if lc and tfm[lc] then -- this also intercepts tables + start.char = lc + return start, true + end + end + end + end + return start, false +end + +register(variables.WORD, WORD) -- 1 +register(variables.word, word) -- 2 +register(variables.Word, Word) -- 3 +register(variables.Words, Words) -- 4 +register(variables.capital, capital) -- 5 +register(variables.Capital, Capital) -- 6 +register(variables.none, none) -- 7 (dummy) +register(variables.random, random) -- 8 + +register(variables.cap, variables.capital) -- clone +register(variables.Cap, variables.Capital) -- clone + +-- node.traverse_id_attr + +local function process(namespace,attribute,head) -- not real fast but also not used on much data + lastfont = nil + local lastattr = nil + local done = false + local start = head + while start do -- while because start can jump ahead + local id = start.id + if id == glyph_code then + local attr = start[attribute] + if attr and attr > 0 then + if attr ~= lastattr then + lastfont = nil + lastattr = attr + end + start[attribute] = unsetvalue + local action = actions[attr%100] -- map back to low number + if action then + start, ok = action(start,attribute,attr) + done = done and ok + if trace_casing then + report_casing("case trigger %a, instance %a, result %a",attr%100,div(attr,100),ok) + end + elseif trace_casing then + report_casing("unknown case trigger %a",attr) + end + end + elseif id == math_code then + start = end_of_math(start) + end + if start then -- why test + start = start.next + end + end + lastfont = nil + return head, done +end + +local m, enabled = 0, false -- a trick to make neighbouring ranges work + +function cases.set(n) + if n == v_reset then + n = unsetvalue + else + n = registered[n] or tonumber(n) + if n then + if not enabled then + tasks.enableaction("processors","typesetters.cases.handler") + if trace_casing then + report_casing("enabling case handler") + end + enabled = true + end + if m == 100 then + m = 1 + else + m = m + 1 + end + n = m * 100 + n + else + n = unsetvalue + end + end + texattribute[a_cases] = n + -- return n -- bonus +end + +cases.handler = nodes.installattributehandler { + name = "case", + namespace = cases, + processor = process, +} + +-- interface + +commands.setcharactercasing = cases.set diff --git a/tex/context/base/typo-cln.lua b/tex/context/base/typo-cln.lua index be00ac10d..70d2f7b60 100644 --- a/tex/context/base/typo-cln.lua +++ b/tex/context/base/typo-cln.lua @@ -1,102 +1,102 @@ -if not modules then modules = { } end modules ['typo-cln'] = { - version = 1.001, - comment = "companion to typo-cln.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This quick and dirty hack took less time than listening to a CD (In --- this case Dream Theaters' Octavium. Of course extensions will take --- more time. - -local utfbyte = utf.byte - -local trace_cleaners = false trackers.register("typesetters.cleaners", function(v) trace_cleaners = v end) -local trace_autocase = false trackers.register("typesetters.cleaners.autocase",function(v) trace_autocase = v end) - -local report_cleaners = logs.reporter("nodes","cleaners") -local report_autocase = logs.reporter("nodes","autocase") - -typesetters.cleaners = typesetters.cleaners or { } -local cleaners = typesetters.cleaners - -local variables = interfaces.variables - -local nodecodes = nodes.nodecodes -local tasks = nodes.tasks - -local texattribute = tex.attribute - -local traverse_id = node.traverse_id - -local unsetvalue = attributes.unsetvalue - -local glyph_code = nodecodes.glyph -local uccodes = characters.uccodes - -local a_cleaner = attributes.private("cleaner") - -local resetter = { -- this will become an entry in char-def - [utfbyte(".")] = true -} - --- Contrary to the casing code we need to keep track of a state. --- We could extend the casing code with a status tracker but on --- the other hand we might want to apply casing afterwards. So, --- cleaning comes first. - -local function process(namespace,attribute,head) - local inline, done = false, false - for n in traverse_id(glyph_code,head) do - local char = n.char - if resetter[char] then - inline = false - elseif not inline then - local a = n[attribute] - if a == 1 then -- currently only one cleaner so no need to be fancy - local upper = uccodes[char] - if type(upper) == "table" then - -- some day, not much change that \SS ends up here - else - n.char = upper - done = true - if trace_autocase then - report_autocase("") - end - end - end - inline = true - end - end - return head, done -end - --- see typo-cap for a more advanced settings handler .. not needed now - -local enabled = false - -function cleaners.set(n) - if n == variables.reset or not tonumber(n) or n == 0 then - texattribute[a_cleaner] = unsetvalue - else - if not enabled then - tasks.enableaction("processors","typesetters.cleaners.handler") - if trace_cleaners then - report_cleaners("enabling cleaners") - end - enabled = true - end - texattribute[a_cleaner] = n - end -end - -cleaners.handler = nodes.installattributehandler { - name = "cleaner", - namespace = cleaners, - processor = process, -} - --- interface - -commands.setcharactercleaning = cleaners.set +if not modules then modules = { } end modules ['typo-cln'] = { + version = 1.001, + comment = "companion to typo-cln.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This quick and dirty hack took less time than listening to a CD (In +-- this case Dream Theaters' Octavium. Of course extensions will take +-- more time. + +local utfbyte = utf.byte + +local trace_cleaners = false trackers.register("typesetters.cleaners", function(v) trace_cleaners = v end) +local trace_autocase = false trackers.register("typesetters.cleaners.autocase",function(v) trace_autocase = v end) + +local report_cleaners = logs.reporter("nodes","cleaners") +local report_autocase = logs.reporter("nodes","autocase") + +typesetters.cleaners = typesetters.cleaners or { } +local cleaners = typesetters.cleaners + +local variables = interfaces.variables + +local nodecodes = nodes.nodecodes +local tasks = nodes.tasks + +local texattribute = tex.attribute + +local traverse_id = node.traverse_id + +local unsetvalue = attributes.unsetvalue + +local glyph_code = nodecodes.glyph +local uccodes = characters.uccodes + +local a_cleaner = attributes.private("cleaner") + +local resetter = { -- this will become an entry in char-def + [utfbyte(".")] = true +} + +-- Contrary to the casing code we need to keep track of a state. +-- We could extend the casing code with a status tracker but on +-- the other hand we might want to apply casing afterwards. So, +-- cleaning comes first. + +local function process(namespace,attribute,head) + local inline, done = false, false + for n in traverse_id(glyph_code,head) do + local char = n.char + if resetter[char] then + inline = false + elseif not inline then + local a = n[attribute] + if a == 1 then -- currently only one cleaner so no need to be fancy + local upper = uccodes[char] + if type(upper) == "table" then + -- some day, not much change that \SS ends up here + else + n.char = upper + done = true + if trace_autocase then + report_autocase("") + end + end + end + inline = true + end + end + return head, done +end + +-- see typo-cap for a more advanced settings handler .. not needed now + +local enabled = false + +function cleaners.set(n) + if n == variables.reset or not tonumber(n) or n == 0 then + texattribute[a_cleaner] = unsetvalue + else + if not enabled then + tasks.enableaction("processors","typesetters.cleaners.handler") + if trace_cleaners then + report_cleaners("enabling cleaners") + end + enabled = true + end + texattribute[a_cleaner] = n + end +end + +cleaners.handler = nodes.installattributehandler { + name = "cleaner", + namespace = cleaners, + processor = process, +} + +-- interface + +commands.setcharactercleaning = cleaners.set diff --git a/tex/context/base/typo-dig.lua b/tex/context/base/typo-dig.lua index 62d17fa3b..9cf8417b8 100644 --- a/tex/context/base/typo-dig.lua +++ b/tex/context/base/typo-dig.lua @@ -1,162 +1,162 @@ -if not modules then modules = { } end modules ['typo-dig'] = { - version = 1.001, - comment = "companion to typo-dig.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- we might consider doing this after the otf pass because now osf do not work --- out well in node mode. - -local next, type = next, type -local format, insert = string.format, table.insert -local round, div = math.round, math.div - -local trace_digits = false trackers.register("typesetters.digits", function(v) trace_digits = v end) - -local report_digits = logs.reporter("typesetting","digits") - -local nodes, node = nodes, node - -local hpack_node = node.hpack -local traverse_id = node.traverse_id -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after - -local texattribute = tex.attribute -local unsetvalue = attributes.unsetvalue - -local nodecodes = nodes.nodecodes -local glyph_code = nodecodes.glyph - -local nodepool = nodes.pool -local tasks = nodes.tasks - -local new_glue = nodepool.glue - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers -local chardata = fonthashes.characters -local quaddata = fonthashes.quads - -local v_reset = interfaces.variables.reset - -local charbase = characters.data -local getdigitwidth = fonts.helpers.getdigitwidth - -typesetters = typesetters or { } -local typesetters = typesetters - -typesetters.digits = typesetters.digits or { } -local digits = typesetters.digits - -digits.actions = { } -local actions = digits.actions - -local a_digits = attributes.private("digits") -digits.attribute = a_digits - --- at some point we can manipulate the glyph node so then i need --- to rewrite this then - -function nodes.aligned(head,start,stop,width,how) - if how == "flushright" or how == "middle" then - head, start = insert_node_before(head,start,new_glue(0,65536,65536)) - end - if how == "flushleft" or how == "middle" then - head, stop = insert_node_after(head,stop,new_glue(0,65536,65536)) - end - local prv, nxt = start.prev, stop.next - start.prev, stop.next = nil, nil - local packed = hpack_node(start,width,"exactly") -- no directional mess here, just lr - if prv then - prv.next, packed.prev = packed, prv - end - if nxt then - nxt.prev, packed.next = packed, nxt - end - if packed.prev then - return head, packed - else - return packed, packed - end -end - -actions[1] = function(head,start,attribute,attr) - local font = start.font - local char = start.char - local unic = chardata[font][char].tounicode - local what = unic and tonumber(unic,16) or char - if charbase[what].category == "nd" then - local oldwidth, newwidth = start.width, getdigitwidth(font) - if newwidth ~= oldwidth then - if trace_digits then - report_digits("digit trigger %a, instance %a, char %C, unicode %U, delta %s", - attr%100,div(attr,100),char,what,newwidth-oldwidth) - end - head, start = nodes.aligned(head,start,start,newwidth,"middle") - return head, start, true - end - end - return head, start, false -end - -local function process(namespace,attribute,head) - local done, current, ok = false, head, false - while current do - if current.id == glyph_code then - local attr = current[attribute] - if attr and attr > 0 then - current[attribute] = unsetvalue - local action = actions[attr%100] -- map back to low number - if action then - head, current, ok = action(head,current,attribute,attr) - done = done and ok - elseif trace_digits then - report_digits("unknown digit trigger %a",attr) - end - end - end - current = current and current.next - end - return head, done -end - -local m, enabled = 0, false -- a trick to make neighbouring ranges work - -function digits.set(n) -- number or 'reset' - if n == v_reset then - n = unsetvalue - else - n = tonumber(n) - if n then - if not enabled then - tasks.enableaction("processors","typesetters.digits.handler") - if trace_digits then - report_digits("enabling digit handler") - end - enabled = true - end - if m == 100 then - m = 1 - else - m = m + 1 - end - n = m * 100 + n - else - n = unsetvalue - end - end - texattribute[a_digits] = n -end - -digits.handler = nodes.installattributehandler { -- we could avoid this wrapper - name = "digits", - namespace = digits, - processor = process, -} - --- interface - -commands.setdigitsmanipulation = digits.set +if not modules then modules = { } end modules ['typo-dig'] = { + version = 1.001, + comment = "companion to typo-dig.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- we might consider doing this after the otf pass because now osf do not work +-- out well in node mode. + +local next, type = next, type +local format, insert = string.format, table.insert +local round, div = math.round, math.div + +local trace_digits = false trackers.register("typesetters.digits", function(v) trace_digits = v end) + +local report_digits = logs.reporter("typesetting","digits") + +local nodes, node = nodes, node + +local hpack_node = node.hpack +local traverse_id = node.traverse_id +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after + +local texattribute = tex.attribute +local unsetvalue = attributes.unsetvalue + +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph + +local nodepool = nodes.pool +local tasks = nodes.tasks + +local new_glue = nodepool.glue + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers +local chardata = fonthashes.characters +local quaddata = fonthashes.quads + +local v_reset = interfaces.variables.reset + +local charbase = characters.data +local getdigitwidth = fonts.helpers.getdigitwidth + +typesetters = typesetters or { } +local typesetters = typesetters + +typesetters.digits = typesetters.digits or { } +local digits = typesetters.digits + +digits.actions = { } +local actions = digits.actions + +local a_digits = attributes.private("digits") +digits.attribute = a_digits + +-- at some point we can manipulate the glyph node so then i need +-- to rewrite this then + +function nodes.aligned(head,start,stop,width,how) + if how == "flushright" or how == "middle" then + head, start = insert_node_before(head,start,new_glue(0,65536,65536)) + end + if how == "flushleft" or how == "middle" then + head, stop = insert_node_after(head,stop,new_glue(0,65536,65536)) + end + local prv, nxt = start.prev, stop.next + start.prev, stop.next = nil, nil + local packed = hpack_node(start,width,"exactly") -- no directional mess here, just lr + if prv then + prv.next, packed.prev = packed, prv + end + if nxt then + nxt.prev, packed.next = packed, nxt + end + if packed.prev then + return head, packed + else + return packed, packed + end +end + +actions[1] = function(head,start,attribute,attr) + local font = start.font + local char = start.char + local unic = chardata[font][char].tounicode + local what = unic and tonumber(unic,16) or char + if charbase[what].category == "nd" then + local oldwidth, newwidth = start.width, getdigitwidth(font) + if newwidth ~= oldwidth then + if trace_digits then + report_digits("digit trigger %a, instance %a, char %C, unicode %U, delta %s", + attr%100,div(attr,100),char,what,newwidth-oldwidth) + end + head, start = nodes.aligned(head,start,start,newwidth,"middle") + return head, start, true + end + end + return head, start, false +end + +local function process(namespace,attribute,head) + local done, current, ok = false, head, false + while current do + if current.id == glyph_code then + local attr = current[attribute] + if attr and attr > 0 then + current[attribute] = unsetvalue + local action = actions[attr%100] -- map back to low number + if action then + head, current, ok = action(head,current,attribute,attr) + done = done and ok + elseif trace_digits then + report_digits("unknown digit trigger %a",attr) + end + end + end + current = current and current.next + end + return head, done +end + +local m, enabled = 0, false -- a trick to make neighbouring ranges work + +function digits.set(n) -- number or 'reset' + if n == v_reset then + n = unsetvalue + else + n = tonumber(n) + if n then + if not enabled then + tasks.enableaction("processors","typesetters.digits.handler") + if trace_digits then + report_digits("enabling digit handler") + end + enabled = true + end + if m == 100 then + m = 1 + else + m = m + 1 + end + n = m * 100 + n + else + n = unsetvalue + end + end + texattribute[a_digits] = n +end + +digits.handler = nodes.installattributehandler { -- we could avoid this wrapper + name = "digits", + namespace = digits, + processor = process, +} + +-- interface + +commands.setdigitsmanipulation = digits.set diff --git a/tex/context/base/typo-dir.lua b/tex/context/base/typo-dir.lua index 7e5f8c2d3..f02395475 100644 --- a/tex/context/base/typo-dir.lua +++ b/tex/context/base/typo-dir.lua @@ -1,463 +1,463 @@ -if not modules then modules = { } end modules ['typo-dir'] = { - version = 1.001, - comment = "companion to typo-dir.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: also use end_of_math here? - -local next, type = next, type -local format, insert, sub, find, match = string.format, table.insert, string.sub, string.find, string.match -local utfchar = utf.char - --- vertical space handler - -local nodes, node = nodes, node - -local trace_directions = false trackers.register("typesetters.directions", function(v) trace_directions = v end) - -local report_directions = logs.reporter("typesetting","directions") - -local traverse_id = node.traverse_id -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after -local remove_node = nodes.remove - -local texattribute = tex.attribute -local unsetvalue = attributes.unsetvalue - -local nodecodes = nodes.nodecodes -local whatcodes = nodes.whatcodes -local mathcodes = nodes.mathcodes - -local tasks = nodes.tasks - -local glyph_code = nodecodes.glyph -local whatsit_code = nodecodes.whatsit -local math_code = nodecodes.math - -local localpar_code = whatcodes.localpar -local dir_code = whatcodes.dir - -local nodepool = nodes.pool - -local new_textdir = nodepool.textdir - -local beginmath_code = mathcodes.beginmath -local endmath_code = mathcodes.endmath - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers -local fontchar = fonthashes.characters - -local chardata = characters.data -local chardirs = characters.directions -- maybe make a special mirror table - ---~ Analysis by Idris: ---~ ---~ 1. Assuming the reading- vs word-order distinction (bidi-char types) is governing; ---~ 2. Assuming that 'ARAB' represents an actual arabic string in raw input order, not word-order; ---~ 3. Assuming that 'BARA' represent the correct RL word order; ---~ ---~ Then we have, with input: LATIN ARAB ---~ ---~ \textdir TLT LATIN ARAB => LATIN BARA ---~ \textdir TRT LATIN ARAB => LATIN BARA ---~ \textdir TRT LRO LATIN ARAB => LATIN ARAB ---~ \textdir TLT LRO LATIN ARAB => LATIN ARAB ---~ \textdir TLT RLO LATIN ARAB => NITAL ARAB ---~ \textdir TRT RLO LATIN ARAB => NITAL ARAB - --- elseif d == "es" then -- European Number Separator --- elseif d == "et" then -- European Number Terminator --- elseif d == "cs" then -- Common Number Separator --- elseif d == "nsm" then -- Non-Spacing Mark --- elseif d == "bn" then -- Boundary Neutral --- elseif d == "b" then -- Paragraph Separator --- elseif d == "s" then -- Segment Separator --- elseif d == "ws" then -- Whitespace --- elseif d == "on" then -- Other Neutrals - -typesetters.directions = typesetters.directions or { } -local directions = typesetters.directions - -local a_state = attributes.private('state') -local a_directions = attributes.private('directions') - -local skipmath = true -local strip = false - --- todo: delayed inserts here --- todo: get rid of local functions here - --- beware, math adds whatsits afterwards so that will mess things up - -local finish, autodir, embedded, override, done = nil, 0, 0, 0, false -local list, glyphs = nil, false -local finished, finidir, finipos = nil, nil, 1 -local head, current, inserted = nil, nil, nil - -local function finish_auto_before() - head, inserted = insert_node_before(head,current,new_textdir("-"..finish)) - finished, finidir = inserted, finish - if trace_directions then - insert(list,#list,format("auto finish inserted before: %s",finish)) - finipos = #list-1 - end - finish, autodir, done = nil, 0, true -end - -local function finish_auto_after() - head, current = insert_node_after(head,current,new_textdir("-"..finish)) - finished, finidir = current, finish - if trace_directions then - list[#list+1] = format("auto finish inserted after: %s",finish) - finipos = #list - end - finish, autodir, done = nil, 0, true -end - -local function force_auto_left_before() - if finish then - finish_auto_before() - end - if embedded >= 0 then - finish, autodir, done = "TLT", 1, true - else - finish, autodir, done = "TRT", -1, true - end - if finidir == finish then - head = remove_node(head,finished,true) - if trace_directions then - list[finipos] = list[finipos] .. " (deleted afterwards)" - insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded)) - end - else - head, inserted = insert_node_before(head,current,new_textdir("+"..finish)) - if trace_directions then - insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded)) - end - end -end - -local function force_auto_right_before() - if finish then - finish_auto_before() - end - if embedded <= 0 then - finish, autodir, done = "TRT", -1, true - else - finish, autodir, done = "TLT", 1, true - end - if finidir == finish then - head = remove_node(head,finished,true) - if trace_directions then - list[finipos] = list[finipos] .. " (deleted afterwards)" - insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded)) - end - else - head, inserted = insert_node_before(head,current,new_textdir("+"..finish)) - if trace_directions then - insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded)) - end - end -end - --- todo: use new dir functions - -local s_isol = fonts.analyzers.states.isol - -function directions.process(namespace,attribute,start) -- todo: make faster - if not start.next then - return start, false - end - head, current, inserted = start, start, nil - finish, autodir, embedded, override, done = nil, 0, 0, 0, false - list, glyphs = trace_directions and { }, false - finished, finidir, finipos = nil, nil, 1 - local stack, top, obsolete = { }, 0, { } - local lro, rlo, prevattr, inmath = false, false, 0, false - while current do - local id = current.id - if skipmath and id == math_code then - local subtype = current.subtype - if subtype == beginmath_code then - inmath = true - elseif subtype == endmath_code then - inmath = false - else - -- todo - end - current = current.next - elseif inmath then - current = current.next - else - local attr = current[attribute] - if attr and attr > 0 then - -- current[attribute] = unsetvalue -- slow, needed? - if attr == 1 then - -- bidi parsing mode - elseif attr ~= prevattr then - -- no pop, grouped driven (2=normal,3=lro,4=rlo) - if attr == 3 then - if trace_directions then - list[#list+1] = format("override right -> left (lro) (bidi=%s)",attr) - end - lro, rlo = true, false - elseif attr == 4 then - if trace_directions then - list[#list+1] = format("override left -> right (rlo) (bidi=%s)",attr) - end - lro, rlo = false, true - else - if trace_directions and - current ~= head then list[#list+1] = format("override reset (bidi=%s)",attr) - end - lro, rlo = false, false - end - prevattr = attr - end - end - if id == glyph_code then - glyphs = true - if attr and attr > 0 then - local char = current.char - local d = chardirs[char] - if rlo or override > 0 then - if d == "l" then - if trace_directions then - list[#list+1] = format("char %s (%s / U+%04X) of class %s overidden to r (bidi=%s)",utfchar(char),char,char,d,attr) - end - d = "r" - elseif trace_directions then - if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal - list[#list+1] = format("override char of class %s (bidi=%s)",d,attr) - else -- todo: rle lre - list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr) - end - end - elseif lro or override < 0 then - if d == "r" or d == "al" then - current[a_state] = s_isol -- maybe better have a special bidi attr value -> override (9) -> todo - if trace_directions then - list[#list+1] = format("char %s (%s / U+%04X) of class %s overidden to l (bidi=%s) (state=isol)",utfchar(char),char,char,d,attr) - end - d = "l" - elseif trace_directions then - if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal - list[#list+1] = format("override char of class %s (bidi=%s)",d,attr) - else -- todo: rle lre - list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr) - end - end - elseif trace_directions then - if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal - list[#list+1] = format("override char of class %s (bidi=%s)",d,attr) - else -- todo: rle lre - list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr) - end - end - if d == "on" then - local mirror = chardata[char].mirror -- maybe make a special mirror table - if mirror and fontchar[current.font][mirror] then - -- todo: set attribute - if autodir < 0 then - current.char = mirror - done = true - --~ elseif left or autodir > 0 then - --~ if not is_right(current.prev) then - --~ current.char = mirror - --~ done = true - --~ end - end - end - elseif d == "l" or d == "en" then -- european number - if autodir <= 0 then -- could be option - force_auto_left_before() - end - elseif d == "r" or d == "al" then -- arabic number - if autodir >= 0 then - force_auto_right_before() - end - elseif d == "an" then -- arabic number - -- actually this is language dependent ... --- if autodir <= 0 then --- force_auto_left_before() --- end - if autodir >= 0 then - force_auto_right_before() - end - elseif d == "lro" then -- Left-to-Right Override -> right becomes left - if trace_directions then - list[#list+1] = "override right -> left" - end - top = top + 1 - stack[top] = { override, embedded } - override = -1 - obsolete[#obsolete+1] = current - elseif d == "rlo" then -- Right-to-Left Override -> left becomes right - if trace_directions then - list[#list+1] = "override left -> right" - end - top = top + 1 - stack[top] = { override, embedded } - override = 1 - obsolete[#obsolete+1] = current - elseif d == "lre" then -- Left-to-Right Embedding -> TLT - if trace_directions then - list[#list+1] = "embedding left -> right" - end - top = top + 1 - stack[top] = { override, embedded } - embedded = 1 - obsolete[#obsolete+1] = current - elseif d == "rle" then -- Right-to-Left Embedding -> TRT - if trace_directions then - list[#list+1] = "embedding right -> left" - end - top = top + 1 - stack[top] = { override, embedded } - embedded = -1 -- was 1 - obsolete[#obsolete+1] = current - elseif d == "pdf" then -- Pop Directional Format - -- override = 0 - if top > 0 then - local s = stack[top] - override, embedded = s[1], s[2] - top = top - 1 - if trace_directions then - list[#list+1] = format("state: override: %s, embedded: %s, autodir: %s",override,embedded,autodir) - end - else - if trace_directions then - list[#list+1] = "pop (error, too many pops)" - end - end - obsolete[#obsolete+1] = current - end - elseif trace_directions then - local char = current.char - local d = chardirs[char] - list[#list+1] = format("char %s (%s / U+%04X) of class %s (no bidi)",utfchar(char),char,char,d or "?") - end - elseif id == whatsit_code then - if finish then - finish_auto_before() - end - local subtype = current.subtype - if subtype == localpar_code then - local dir = current.dir - local d = sub(dir,2,2) - if d == 'R' then -- find(dir,".R.") / dir == "TRT" - autodir = -1 - else - autodir = 1 - end - -- embedded = autodir - if trace_directions then - list[#list+1] = format("pardir %s",dir) - end - elseif subtype == dir_code then - local dir = current.dir - -- local sign = sub(dir,1,1) - -- local dire = sub(dir,3,3) - local sign, dire = match(dir,"^(.).(.)") - if dire == "R" then - if sign == "+" then - finish, autodir = "TRT", -1 - else - finish, autodir = nil, 0 - end - else - if sign == "+" then - finish, autodir = "TLT", 1 - else - finish, autodir = nil, 0 - end - end - if trace_directions then - list[#list+1] = format("textdir %s",dir) - end - end - else - if trace_directions then - list[#list+1] = format("node %s (subtype %s)",nodecodes[id],current.subtype) - end - if finish then - finish_auto_before() - end - end - local cn = current.next - if not cn then - if finish then - finish_auto_after() - end - end - current = cn - end - end - if trace_directions and glyphs then - report_directions("start log") - for i=1,#list do - report_directions("%02i: %s",i,list[i]) - end - report_directions("stop log") - end - if done and strip then - local n = #obsolete - if n > 0 then - for i=1,n do - remove_node(head,obsolete[i],true) - end - report_directions("%s character nodes removed",n) - end - end - return head, done -end - ---~ local function is_right(n) -- keep ! ---~ if n then ---~ local id = n.id ---~ if id == glyph_code then ---~ local attr = n[attribute] ---~ if attr and attr > 0 then ---~ local d = chardirs[n.char] ---~ if d == "r" or d == "al" then -- override ---~ return true ---~ end ---~ end ---~ end ---~ end ---~ return false ---~ end - ---~ function directions.enable() ---~ tasks.enableaction("processors","directions.handler") ---~ end - -local enabled = false - -function directions.set(n) -- todo: names and numbers - if not enabled then - if trace_directions then - report_breakpoints("enabling directions handler") - end - tasks.enableaction("processors","typesetters.directions.handler") - enabled = true - end - if not n or n == 0 then - n = unsetvalue - -- maybe tracing - end - texattribute[a_directions] = n -end - -commands.setdirection = directions.set - -directions.handler = nodes.installattributehandler { - name = "directions", - namespace = directions, - processor = directions.process, -} +if not modules then modules = { } end modules ['typo-dir'] = { + version = 1.001, + comment = "companion to typo-dir.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: also use end_of_math here? + +local next, type = next, type +local format, insert, sub, find, match = string.format, table.insert, string.sub, string.find, string.match +local utfchar = utf.char + +-- vertical space handler + +local nodes, node = nodes, node + +local trace_directions = false trackers.register("typesetters.directions", function(v) trace_directions = v end) + +local report_directions = logs.reporter("typesetting","directions") + +local traverse_id = node.traverse_id +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after +local remove_node = nodes.remove + +local texattribute = tex.attribute +local unsetvalue = attributes.unsetvalue + +local nodecodes = nodes.nodecodes +local whatcodes = nodes.whatcodes +local mathcodes = nodes.mathcodes + +local tasks = nodes.tasks + +local glyph_code = nodecodes.glyph +local whatsit_code = nodecodes.whatsit +local math_code = nodecodes.math + +local localpar_code = whatcodes.localpar +local dir_code = whatcodes.dir + +local nodepool = nodes.pool + +local new_textdir = nodepool.textdir + +local beginmath_code = mathcodes.beginmath +local endmath_code = mathcodes.endmath + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers +local fontchar = fonthashes.characters + +local chardata = characters.data +local chardirs = characters.directions -- maybe make a special mirror table + +--~ Analysis by Idris: +--~ +--~ 1. Assuming the reading- vs word-order distinction (bidi-char types) is governing; +--~ 2. Assuming that 'ARAB' represents an actual arabic string in raw input order, not word-order; +--~ 3. Assuming that 'BARA' represent the correct RL word order; +--~ +--~ Then we have, with input: LATIN ARAB +--~ +--~ \textdir TLT LATIN ARAB => LATIN BARA +--~ \textdir TRT LATIN ARAB => LATIN BARA +--~ \textdir TRT LRO LATIN ARAB => LATIN ARAB +--~ \textdir TLT LRO LATIN ARAB => LATIN ARAB +--~ \textdir TLT RLO LATIN ARAB => NITAL ARAB +--~ \textdir TRT RLO LATIN ARAB => NITAL ARAB + +-- elseif d == "es" then -- European Number Separator +-- elseif d == "et" then -- European Number Terminator +-- elseif d == "cs" then -- Common Number Separator +-- elseif d == "nsm" then -- Non-Spacing Mark +-- elseif d == "bn" then -- Boundary Neutral +-- elseif d == "b" then -- Paragraph Separator +-- elseif d == "s" then -- Segment Separator +-- elseif d == "ws" then -- Whitespace +-- elseif d == "on" then -- Other Neutrals + +typesetters.directions = typesetters.directions or { } +local directions = typesetters.directions + +local a_state = attributes.private('state') +local a_directions = attributes.private('directions') + +local skipmath = true +local strip = false + +-- todo: delayed inserts here +-- todo: get rid of local functions here + +-- beware, math adds whatsits afterwards so that will mess things up + +local finish, autodir, embedded, override, done = nil, 0, 0, 0, false +local list, glyphs = nil, false +local finished, finidir, finipos = nil, nil, 1 +local head, current, inserted = nil, nil, nil + +local function finish_auto_before() + head, inserted = insert_node_before(head,current,new_textdir("-"..finish)) + finished, finidir = inserted, finish + if trace_directions then + insert(list,#list,format("auto finish inserted before: %s",finish)) + finipos = #list-1 + end + finish, autodir, done = nil, 0, true +end + +local function finish_auto_after() + head, current = insert_node_after(head,current,new_textdir("-"..finish)) + finished, finidir = current, finish + if trace_directions then + list[#list+1] = format("auto finish inserted after: %s",finish) + finipos = #list + end + finish, autodir, done = nil, 0, true +end + +local function force_auto_left_before() + if finish then + finish_auto_before() + end + if embedded >= 0 then + finish, autodir, done = "TLT", 1, true + else + finish, autodir, done = "TRT", -1, true + end + if finidir == finish then + head = remove_node(head,finished,true) + if trace_directions then + list[finipos] = list[finipos] .. " (deleted afterwards)" + insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded)) + end + else + head, inserted = insert_node_before(head,current,new_textdir("+"..finish)) + if trace_directions then + insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded)) + end + end +end + +local function force_auto_right_before() + if finish then + finish_auto_before() + end + if embedded <= 0 then + finish, autodir, done = "TRT", -1, true + else + finish, autodir, done = "TLT", 1, true + end + if finidir == finish then + head = remove_node(head,finished,true) + if trace_directions then + list[finipos] = list[finipos] .. " (deleted afterwards)" + insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded)) + end + else + head, inserted = insert_node_before(head,current,new_textdir("+"..finish)) + if trace_directions then + insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded)) + end + end +end + +-- todo: use new dir functions + +local s_isol = fonts.analyzers.states.isol + +function directions.process(namespace,attribute,start) -- todo: make faster + if not start.next then + return start, false + end + head, current, inserted = start, start, nil + finish, autodir, embedded, override, done = nil, 0, 0, 0, false + list, glyphs = trace_directions and { }, false + finished, finidir, finipos = nil, nil, 1 + local stack, top, obsolete = { }, 0, { } + local lro, rlo, prevattr, inmath = false, false, 0, false + while current do + local id = current.id + if skipmath and id == math_code then + local subtype = current.subtype + if subtype == beginmath_code then + inmath = true + elseif subtype == endmath_code then + inmath = false + else + -- todo + end + current = current.next + elseif inmath then + current = current.next + else + local attr = current[attribute] + if attr and attr > 0 then + -- current[attribute] = unsetvalue -- slow, needed? + if attr == 1 then + -- bidi parsing mode + elseif attr ~= prevattr then + -- no pop, grouped driven (2=normal,3=lro,4=rlo) + if attr == 3 then + if trace_directions then + list[#list+1] = format("override right -> left (lro) (bidi=%s)",attr) + end + lro, rlo = true, false + elseif attr == 4 then + if trace_directions then + list[#list+1] = format("override left -> right (rlo) (bidi=%s)",attr) + end + lro, rlo = false, true + else + if trace_directions and + current ~= head then list[#list+1] = format("override reset (bidi=%s)",attr) + end + lro, rlo = false, false + end + prevattr = attr + end + end + if id == glyph_code then + glyphs = true + if attr and attr > 0 then + local char = current.char + local d = chardirs[char] + if rlo or override > 0 then + if d == "l" then + if trace_directions then + list[#list+1] = format("char %s (%s / U+%04X) of class %s overidden to r (bidi=%s)",utfchar(char),char,char,d,attr) + end + d = "r" + elseif trace_directions then + if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal + list[#list+1] = format("override char of class %s (bidi=%s)",d,attr) + else -- todo: rle lre + list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr) + end + end + elseif lro or override < 0 then + if d == "r" or d == "al" then + current[a_state] = s_isol -- maybe better have a special bidi attr value -> override (9) -> todo + if trace_directions then + list[#list+1] = format("char %s (%s / U+%04X) of class %s overidden to l (bidi=%s) (state=isol)",utfchar(char),char,char,d,attr) + end + d = "l" + elseif trace_directions then + if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal + list[#list+1] = format("override char of class %s (bidi=%s)",d,attr) + else -- todo: rle lre + list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr) + end + end + elseif trace_directions then + if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal + list[#list+1] = format("override char of class %s (bidi=%s)",d,attr) + else -- todo: rle lre + list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr) + end + end + if d == "on" then + local mirror = chardata[char].mirror -- maybe make a special mirror table + if mirror and fontchar[current.font][mirror] then + -- todo: set attribute + if autodir < 0 then + current.char = mirror + done = true + --~ elseif left or autodir > 0 then + --~ if not is_right(current.prev) then + --~ current.char = mirror + --~ done = true + --~ end + end + end + elseif d == "l" or d == "en" then -- european number + if autodir <= 0 then -- could be option + force_auto_left_before() + end + elseif d == "r" or d == "al" then -- arabic number + if autodir >= 0 then + force_auto_right_before() + end + elseif d == "an" then -- arabic number + -- actually this is language dependent ... +-- if autodir <= 0 then +-- force_auto_left_before() +-- end + if autodir >= 0 then + force_auto_right_before() + end + elseif d == "lro" then -- Left-to-Right Override -> right becomes left + if trace_directions then + list[#list+1] = "override right -> left" + end + top = top + 1 + stack[top] = { override, embedded } + override = -1 + obsolete[#obsolete+1] = current + elseif d == "rlo" then -- Right-to-Left Override -> left becomes right + if trace_directions then + list[#list+1] = "override left -> right" + end + top = top + 1 + stack[top] = { override, embedded } + override = 1 + obsolete[#obsolete+1] = current + elseif d == "lre" then -- Left-to-Right Embedding -> TLT + if trace_directions then + list[#list+1] = "embedding left -> right" + end + top = top + 1 + stack[top] = { override, embedded } + embedded = 1 + obsolete[#obsolete+1] = current + elseif d == "rle" then -- Right-to-Left Embedding -> TRT + if trace_directions then + list[#list+1] = "embedding right -> left" + end + top = top + 1 + stack[top] = { override, embedded } + embedded = -1 -- was 1 + obsolete[#obsolete+1] = current + elseif d == "pdf" then -- Pop Directional Format + -- override = 0 + if top > 0 then + local s = stack[top] + override, embedded = s[1], s[2] + top = top - 1 + if trace_directions then + list[#list+1] = format("state: override: %s, embedded: %s, autodir: %s",override,embedded,autodir) + end + else + if trace_directions then + list[#list+1] = "pop (error, too many pops)" + end + end + obsolete[#obsolete+1] = current + end + elseif trace_directions then + local char = current.char + local d = chardirs[char] + list[#list+1] = format("char %s (%s / U+%04X) of class %s (no bidi)",utfchar(char),char,char,d or "?") + end + elseif id == whatsit_code then + if finish then + finish_auto_before() + end + local subtype = current.subtype + if subtype == localpar_code then + local dir = current.dir + local d = sub(dir,2,2) + if d == 'R' then -- find(dir,".R.") / dir == "TRT" + autodir = -1 + else + autodir = 1 + end + -- embedded = autodir + if trace_directions then + list[#list+1] = format("pardir %s",dir) + end + elseif subtype == dir_code then + local dir = current.dir + -- local sign = sub(dir,1,1) + -- local dire = sub(dir,3,3) + local sign, dire = match(dir,"^(.).(.)") + if dire == "R" then + if sign == "+" then + finish, autodir = "TRT", -1 + else + finish, autodir = nil, 0 + end + else + if sign == "+" then + finish, autodir = "TLT", 1 + else + finish, autodir = nil, 0 + end + end + if trace_directions then + list[#list+1] = format("textdir %s",dir) + end + end + else + if trace_directions then + list[#list+1] = format("node %s (subtype %s)",nodecodes[id],current.subtype) + end + if finish then + finish_auto_before() + end + end + local cn = current.next + if not cn then + if finish then + finish_auto_after() + end + end + current = cn + end + end + if trace_directions and glyphs then + report_directions("start log") + for i=1,#list do + report_directions("%02i: %s",i,list[i]) + end + report_directions("stop log") + end + if done and strip then + local n = #obsolete + if n > 0 then + for i=1,n do + remove_node(head,obsolete[i],true) + end + report_directions("%s character nodes removed",n) + end + end + return head, done +end + +--~ local function is_right(n) -- keep ! +--~ if n then +--~ local id = n.id +--~ if id == glyph_code then +--~ local attr = n[attribute] +--~ if attr and attr > 0 then +--~ local d = chardirs[n.char] +--~ if d == "r" or d == "al" then -- override +--~ return true +--~ end +--~ end +--~ end +--~ end +--~ return false +--~ end + +--~ function directions.enable() +--~ tasks.enableaction("processors","directions.handler") +--~ end + +local enabled = false + +function directions.set(n) -- todo: names and numbers + if not enabled then + if trace_directions then + report_breakpoints("enabling directions handler") + end + tasks.enableaction("processors","typesetters.directions.handler") + enabled = true + end + if not n or n == 0 then + n = unsetvalue + -- maybe tracing + end + texattribute[a_directions] = n +end + +commands.setdirection = directions.set + +directions.handler = nodes.installattributehandler { + name = "directions", + namespace = directions, + processor = directions.process, +} diff --git a/tex/context/base/typo-ini.lua b/tex/context/base/typo-ini.lua index c45d29664..42c752c31 100644 --- a/tex/context/base/typo-ini.lua +++ b/tex/context/base/typo-ini.lua @@ -1,11 +1,11 @@ -if not modules then modules = { } end modules ['typo-ini'] = { - version = 1.001, - comment = "companion to typo-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- nothing yet - -typesetters = typesetters or { } +if not modules then modules = { } end modules ['typo-ini'] = { + version = 1.001, + comment = "companion to typo-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- nothing yet + +typesetters = typesetters or { } diff --git a/tex/context/base/typo-itc.lua b/tex/context/base/typo-itc.lua index b39ea2f23..bee2cf41e 100644 --- a/tex/context/base/typo-itc.lua +++ b/tex/context/base/typo-itc.lua @@ -1,256 +1,256 @@ -if not modules then modules = { } end modules ['typo-itc'] = { - version = 1.001, - comment = "companion to typo-itc.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local utfchar = utf.char - -local trace_italics = false trackers.register("typesetters.italics", function(v) trace_italics = v end) - -local report_italics = logs.reporter("nodes","italics") - -typesetters.italics = typesetters.italics or { } -local italics = typesetters.italics - -local nodecodes = nodes.nodecodes -local glyph_code = nodecodes.glyph -local kern_code = nodecodes.kern -local glue_code = nodecodes.glue -local disc_code = nodecodes.disc -local math_code = nodecodes.math - -local tasks = nodes.tasks - -local insert_node_after = node.insert_after -local delete_node = nodes.delete -local end_of_math = node.end_of_math - -local texattribute = tex.attribute -local a_italics = attributes.private("italics") -local unsetvalue = attributes.unsetvalue - -local new_correction_kern = nodes.pool.fontkern -local new_correction_glue = nodes.pool.glue - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers -local italicsdata = fonthashes.italics - -local forcedvariant = false - -function typesetters.italics.forcevariant(variant) - forcedvariant = variant -end - -local function setitalicinfont(font,char) - local tfmdata = fontdata[font] - local character = tfmdata.characters[char] - if character then - local italic = character.italic_correction - if not italic then - local autoitalicamount = tfmdata.properties.autoitalicamount or 0 - if autoitalicamount ~= 0 then - local description = tfmdata.descriptions[char] - if description then - italic = description.italic - if not italic then - local boundingbox = description.boundingbox - italic = boundingbox[3] - description.width + autoitalicamount - if italic < 0 then -- < 0 indicates no overshoot or a very small auto italic - italic = 0 - end - end - if italic ~= 0 then - italic = italic * tfmdata.parameters.hfactor - end - end - end - if trace_italics then - report_italics("setting italic correction of %C of font %a to %p",char,font,italic) - end - character.italic_correction = italic or 0 - end - return italic - else - return 0 - end -end - --- todo: clear attribute - -local function process(namespace,attribute,head) - local done = false - local italic = 0 - local lastfont = nil - local lastattr = nil - local previous = nil - local prevchar = nil - local current = head - local inserted = nil - while current do - local id = current.id - if id == glyph_code then - local font = current.font - local char = current.char - local data = italicsdata[font] - if font ~= lastfont then - if italic ~= 0 then - if data then - if trace_italics then - report_italics("ignoring %p between italic %C and italic %C",italic,prevchar,char) - end - else - if trace_italics then - report_italics("inserting %p between italic %C and regular %C",italic,prevchar,char) - end - insert_node_after(head,previous,new_correction_kern(italic)) - done = true - end - elseif inserted and data then - if trace_italics then - report_italics("deleting last correction before %C",char) - end - delete_node(head,inserted) - else - -- nothing - end - lastfont = font - end - if data then - local attr = forcedvariant or current[attribute] - if attr and attr > 0 then - local cd = data[char] - if not cd then - -- this really can happen - italic = 0 - else - italic = cd.italic or cd.italic_correction - if not italic then - italic = setitalicinfont(font,char) -- calculated once - -- italic = 0 - end - if italic ~= 0 then - lastfont = font - lastattr = attr - previous = current - prevchar = char - end - end - else - italic = 0 - end - else - italic = 0 - end - inserted = nil - elseif id == disc_code then - -- skip - elseif id == kern_code then - inserted = nil - italic = 0 - elseif id == glue_code then - if italic ~= 0 then - if trace_italics then - report_italics("inserting %p between italic %C and glue",italic,prevchar) - end - inserted = new_correction_glue(italic) -- maybe just add ? else problem with penalties - insert_node_after(head,previous,inserted) - italic = 0 - done = true - end - elseif id == math_code then - current = end_of_math(current) - elseif italic ~= 0 then - if trace_italics then - report_italics("inserting %p between italic %C and whatever",italic,prevchar) - end - inserted = nil - insert_node_after(head,previous,new_correction_kern(italic)) - italic = 0 - done = true - end - current = current.next - end - if italic ~= 0 and lastattr > 1 then -- more control is needed here - if trace_italics then - report_italics("inserting %p between italic %C and end of list",italic,prevchar) - end - insert_node_after(head,previous,new_correction_kern(italic)) - done = true - end - return head, done -end - -local enable - -enable = function() - tasks.enableaction("processors","typesetters.italics.handler") - if trace_italics then - report_italics("enabling text italics") - end - enable = false -end - -function italics.set(n) - if enable then - enable() - end - if n == variables.reset then - texattribute[a_italics] = unsetvalue - else - texattribute[a_italics] = tonumber(n) or unsetvalue - end -end - -function italics.reset() - texattribute[a_italics] = unsetvalue -end - -italics.handler = nodes.installattributehandler { - name = "italics", - namespace = italics, - processor = process, -} - -local variables = interfaces.variables -local settings_to_hash = utilities.parsers.settings_to_hash - -function commands.setupitaliccorrection(option) -- no grouping ! - if enable then - enable() - end - local options = settings_to_hash(option) - local variant = unsetvalue - if options[variables.text] then - variant = 1 - elseif options[variables.always] then - variant = 2 - end - if options[variables.global] then - forcedvariant = variant - texattribute[a_italics] = unsetvalue - else - forcedvariant = false - texattribute[a_italics] = variant - end - if trace_italics then - report_italics("forcing %a, variant %a",forcedvariant,variant ~= unsetvalue and variant) - end -end - --- for manuals: - -local stack = { } - -function commands.pushitaliccorrection() - table.insert(stack,{forcedvariant, texattribute[a_italics] }) -end - -function commands.popitaliccorrection() - local top = table.remove(stack) - forcedvariant = top[1] - texattribute[a_italics] = top[2] -end +if not modules then modules = { } end modules ['typo-itc'] = { + version = 1.001, + comment = "companion to typo-itc.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local utfchar = utf.char + +local trace_italics = false trackers.register("typesetters.italics", function(v) trace_italics = v end) + +local report_italics = logs.reporter("nodes","italics") + +typesetters.italics = typesetters.italics or { } +local italics = typesetters.italics + +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph +local kern_code = nodecodes.kern +local glue_code = nodecodes.glue +local disc_code = nodecodes.disc +local math_code = nodecodes.math + +local tasks = nodes.tasks + +local insert_node_after = node.insert_after +local delete_node = nodes.delete +local end_of_math = node.end_of_math + +local texattribute = tex.attribute +local a_italics = attributes.private("italics") +local unsetvalue = attributes.unsetvalue + +local new_correction_kern = nodes.pool.fontkern +local new_correction_glue = nodes.pool.glue + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers +local italicsdata = fonthashes.italics + +local forcedvariant = false + +function typesetters.italics.forcevariant(variant) + forcedvariant = variant +end + +local function setitalicinfont(font,char) + local tfmdata = fontdata[font] + local character = tfmdata.characters[char] + if character then + local italic = character.italic_correction + if not italic then + local autoitalicamount = tfmdata.properties.autoitalicamount or 0 + if autoitalicamount ~= 0 then + local description = tfmdata.descriptions[char] + if description then + italic = description.italic + if not italic then + local boundingbox = description.boundingbox + italic = boundingbox[3] - description.width + autoitalicamount + if italic < 0 then -- < 0 indicates no overshoot or a very small auto italic + italic = 0 + end + end + if italic ~= 0 then + italic = italic * tfmdata.parameters.hfactor + end + end + end + if trace_italics then + report_italics("setting italic correction of %C of font %a to %p",char,font,italic) + end + character.italic_correction = italic or 0 + end + return italic + else + return 0 + end +end + +-- todo: clear attribute + +local function process(namespace,attribute,head) + local done = false + local italic = 0 + local lastfont = nil + local lastattr = nil + local previous = nil + local prevchar = nil + local current = head + local inserted = nil + while current do + local id = current.id + if id == glyph_code then + local font = current.font + local char = current.char + local data = italicsdata[font] + if font ~= lastfont then + if italic ~= 0 then + if data then + if trace_italics then + report_italics("ignoring %p between italic %C and italic %C",italic,prevchar,char) + end + else + if trace_italics then + report_italics("inserting %p between italic %C and regular %C",italic,prevchar,char) + end + insert_node_after(head,previous,new_correction_kern(italic)) + done = true + end + elseif inserted and data then + if trace_italics then + report_italics("deleting last correction before %C",char) + end + delete_node(head,inserted) + else + -- nothing + end + lastfont = font + end + if data then + local attr = forcedvariant or current[attribute] + if attr and attr > 0 then + local cd = data[char] + if not cd then + -- this really can happen + italic = 0 + else + italic = cd.italic or cd.italic_correction + if not italic then + italic = setitalicinfont(font,char) -- calculated once + -- italic = 0 + end + if italic ~= 0 then + lastfont = font + lastattr = attr + previous = current + prevchar = char + end + end + else + italic = 0 + end + else + italic = 0 + end + inserted = nil + elseif id == disc_code then + -- skip + elseif id == kern_code then + inserted = nil + italic = 0 + elseif id == glue_code then + if italic ~= 0 then + if trace_italics then + report_italics("inserting %p between italic %C and glue",italic,prevchar) + end + inserted = new_correction_glue(italic) -- maybe just add ? else problem with penalties + insert_node_after(head,previous,inserted) + italic = 0 + done = true + end + elseif id == math_code then + current = end_of_math(current) + elseif italic ~= 0 then + if trace_italics then + report_italics("inserting %p between italic %C and whatever",italic,prevchar) + end + inserted = nil + insert_node_after(head,previous,new_correction_kern(italic)) + italic = 0 + done = true + end + current = current.next + end + if italic ~= 0 and lastattr > 1 then -- more control is needed here + if trace_italics then + report_italics("inserting %p between italic %C and end of list",italic,prevchar) + end + insert_node_after(head,previous,new_correction_kern(italic)) + done = true + end + return head, done +end + +local enable + +enable = function() + tasks.enableaction("processors","typesetters.italics.handler") + if trace_italics then + report_italics("enabling text italics") + end + enable = false +end + +function italics.set(n) + if enable then + enable() + end + if n == variables.reset then + texattribute[a_italics] = unsetvalue + else + texattribute[a_italics] = tonumber(n) or unsetvalue + end +end + +function italics.reset() + texattribute[a_italics] = unsetvalue +end + +italics.handler = nodes.installattributehandler { + name = "italics", + namespace = italics, + processor = process, +} + +local variables = interfaces.variables +local settings_to_hash = utilities.parsers.settings_to_hash + +function commands.setupitaliccorrection(option) -- no grouping ! + if enable then + enable() + end + local options = settings_to_hash(option) + local variant = unsetvalue + if options[variables.text] then + variant = 1 + elseif options[variables.always] then + variant = 2 + end + if options[variables.global] then + forcedvariant = variant + texattribute[a_italics] = unsetvalue + else + forcedvariant = false + texattribute[a_italics] = variant + end + if trace_italics then + report_italics("forcing %a, variant %a",forcedvariant,variant ~= unsetvalue and variant) + end +end + +-- for manuals: + +local stack = { } + +function commands.pushitaliccorrection() + table.insert(stack,{forcedvariant, texattribute[a_italics] }) +end + +function commands.popitaliccorrection() + local top = table.remove(stack) + forcedvariant = top[1] + texattribute[a_italics] = top[2] +end diff --git a/tex/context/base/typo-krn.lua b/tex/context/base/typo-krn.lua index fb28d3b2d..eac876262 100644 --- a/tex/context/base/typo-krn.lua +++ b/tex/context/base/typo-krn.lua @@ -1,335 +1,335 @@ -if not modules then modules = { } end modules ['typo-krn'] = { - version = 1.001, - comment = "companion to typo-krn.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local next, type, tonumber = next, type, tonumber -local utfchar = utf.char - -local nodes, node, fonts = nodes, node, fonts - -local find_node_tail = node.tail or node.slide -local free_node = node.free -local free_nodelist = node.flush_list -local copy_node = node.copy -local copy_nodelist = node.copy_list -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after -local end_of_math = node.end_of_math - -local texattribute = tex.attribute -local unsetvalue = attributes.unsetvalue - -local nodepool = nodes.pool -local tasks = nodes.tasks - -local new_gluespec = nodepool.gluespec -local new_kern = nodepool.kern -local new_glue = nodepool.glue - -local nodecodes = nodes.nodecodes -local kerncodes = nodes.kerncodes -local skipcodes = nodes.skipcodes - -local glyph_code = nodecodes.glyph -local kern_code = nodecodes.kern -local disc_code = nodecodes.disc -local glue_code = nodecodes.glue -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local math_code = nodecodes.math - -local kerning_code = kerncodes.kerning -local userkern_code = kerncodes.userkern -local userskip_code = skipcodes.userskip -local spaceskip_code = skipcodes.spaceskip -local xspaceskip_code = skipcodes.xspaceskip - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers -local chardata = fonthashes.characters -local quaddata = fonthashes.quads -local markdata = fonthashes.marks - -local v_max = interfaces.variables.max - -typesetters = typesetters or { } -local typesetters = typesetters - -typesetters.kerns = typesetters.kerns or { } -local kerns = typesetters.kerns - -kerns.mapping = kerns.mapping or { } -kerns.factors = kerns.factors or { } -local a_kerns = attributes.private("kern") -local a_fontkern = attributes.private('fontkern') -kerns.attribute = kerns.attribute - -storage.register("typesetters/kerns/mapping", kerns.mapping, "typesetters.kerns.mapping") -storage.register("typesetters/kerns/factors", kerns.factors, "typesetters.kerns.factors") - -local mapping = kerns.mapping -local factors = kerns.factors - --- one must use liga=no and mode=base and kern=yes --- use more helpers --- make sure it runs after all others --- there will be a width adaptor field in nodes so this will change --- todo: interchar kerns / disc nodes / can be made faster - -local gluefactor = 4 -- assumes quad = .5 enspace - -kerns.keepligature = false -- just for fun (todo: control setting with key/value) -kerns.keeptogether = false -- just for fun (todo: control setting with key/value) - --- can be optimized .. the prev thing .. but hardly worth the effort - -local function kern_injector(fillup,kern) - if fillup then - local g = new_glue(kern) - local s = g.spec - s.stretch = kern - s.stretch_order = 1 - return g - else - return new_kern(kern) - end -end - -local function spec_injector(fillup,width,stretch,shrink) - if fillup then - local s = new_gluespec(width,2*stretch,2*shrink) - s.stretch_order = 1 - return s - else - return new_gluespec(width,stretch,shrink) - end -end - --- needs checking ... base mode / node mode - -local function do_process(namespace,attribute,head,force) -- todo: glue so that we can fully stretch - local start, done, lastfont = head, false, nil - local keepligature = kerns.keepligature - local keeptogether = kerns.keeptogether - local fillup = false - while start do - -- faster to test for attr first - local attr = force or start[attribute] - if attr and attr > 0 then - start[attribute] = unsetvalue - local krn = mapping[attr] - if krn == v_max then - krn = .25 - fillup = true - else - fillup = false - end - if krn and krn ~= 0 then - local id = start.id - if id == glyph_code then - lastfont = start.font - local c = start.components - if c then - if keepligature and keepligature(start) then - -- keep 'm - else - c = do_process(namespace,attribute,c,attr) - local s = start - local p, n = s.prev, s.next - local tail = find_node_tail(c) - if p then - p.next = c - c.prev = p - else - head = c - end - if n then - n.prev = tail - end - tail.next = n - start = c - s.components = nil - -- we now leak nodes ! - -- free_node(s) - done = true - end - end - local prev = start.prev - if not prev then - -- skip - elseif markdata[lastfont][start.char] then - -- skip - else - local pid = prev.id - if not pid then - -- nothing - elseif pid == kern_code then - if prev.subtype == kerning_code or prev[a_fontkern] then - if keeptogether and prev.prev.id == glyph_code and keeptogether(prev.prev,start) then -- we could also pass start - -- keep 'm - else - -- not yet ok, as injected kerns can be overlays (from node-inj.lua) - prev.subtype = userkern_code - prev.kern = prev.kern + quaddata[lastfont]*krn -- here - done = true - end - end - elseif pid == glyph_code then - if prev.font == lastfont then - local prevchar, lastchar = prev.char, start.char - if keeptogether and keeptogether(prev,start) then - -- keep 'm - else - local kerns = chardata[lastfont][prevchar].kerns - local kern = kerns and kerns[lastchar] or 0 - krn = kern + quaddata[lastfont]*krn -- here - insert_node_before(head,start,kern_injector(fillup,krn)) - done = true - end - else - krn = quaddata[lastfont]*krn -- here - insert_node_before(head,start,kern_injector(fillup,krn)) - done = true - end - elseif pid == disc_code then - -- a bit too complicated, we can best not copy and just calculate - -- but we could have multiple glyphs involved so ... - local disc = prev -- disc - local pre, post, replace = disc.pre, disc.post, disc.replace - local prv, nxt = disc.prev, disc.next - if pre and prv then -- must pair with start.prev - -- this one happens in most cases - local before = copy_node(prv) - pre.prev = before - before.next = pre - before.prev = nil - pre = do_process(namespace,attribute,before,attr) - pre = pre.next - pre.prev = nil - disc.pre = pre - free_node(before) - end - if post and nxt then -- must pair with start - local after = copy_node(nxt) - local tail = find_node_tail(post) - tail.next = after - after.prev = tail - after.next = nil - post = do_process(namespace,attribute,post,attr) - tail.next = nil - disc.post = post - free_node(after) - end - if replace and prv and nxt then -- must pair with start and start.prev - local before = copy_node(prv) - local after = copy_node(nxt) - local tail = find_node_tail(replace) - replace.prev = before - before.next = replace - before.prev = nil - tail.next = after - after.prev = tail - after.next = nil - replace = do_process(namespace,attribute,before,attr) - replace = replace.next - replace.prev = nil - after.prev.next = nil - disc.replace = replace - free_node(after) - free_node(before) - else - if prv and prv.id == glyph_code and prv.font == lastfont then - local prevchar, lastchar = prv.char, start.char - local kerns = chardata[lastfont][prevchar].kerns - local kern = kerns and kerns[lastchar] or 0 - krn = kern + quaddata[lastfont]*krn -- here - else - krn = quaddata[lastfont]*krn -- here - end - disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue - end - end - end - elseif id == glue_code then - local subtype = start.subtype - if subtype == userskip_code or subtype == xspaceskip_code or subtype == spaceskip_code then - local s = start.spec - local w = s.width - if w > 0 then - local width, stretch, shrink = w+gluefactor*w*krn, s.stretch, s.shrink - start.spec = spec_injector(fillup,width,stretch*width/w,shrink*width/w) - done = true - end - end - elseif id == kern_code then - -- if start.subtype == kerning_code then -- handle with glyphs - -- local sk = start.kern - -- if sk > 0 then - -- start.kern = sk*krn - -- done = true - -- end - -- end - elseif lastfont and (id == hlist_code or id == vlist_code) then -- todo: lookahead - local p = start.prev - if p and p.id ~= glue_code then - insert_node_before(head,start,kern_injector(fillup,quaddata[lastfont]*krn)) - done = true - end - local n = start.next - if n and n.id ~= glue_code then - insert_node_after(head,start,kern_injector(fillup,quaddata[lastfont]*krn)) - done = true - end - elseif id == math_code then - start = end_of_math(start) - end - end - end - if start then - start = start.next - end - end - return head, done -end - -local enabled = false - -function kerns.set(factor) - if factor ~= v_max then - factor = tonumber(factor) or 0 - end - if factor == v_max or factor ~= 0 then - if not enabled then - tasks.enableaction("processors","typesetters.kerns.handler") - enabled = true - end - local a = factors[factor] - if not a then - a = #mapping + 1 - factors[factors], mapping[a] = a, factor - end - factor = a - else - factor = unsetvalue - end - texattribute[a_kerns] = factor - return factor -end - -local function process(namespace,attribute,head) - return do_process(namespace,attribute,head) -- no direct map, because else fourth argument is tail == true -end - -kerns.handler = nodes.installattributehandler { - name = "kern", - namespace = kerns, - processor = process, -} - --- interface - -commands.setcharacterkerning = kerns.set +if not modules then modules = { } end modules ['typo-krn'] = { + version = 1.001, + comment = "companion to typo-krn.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local next, type, tonumber = next, type, tonumber +local utfchar = utf.char + +local nodes, node, fonts = nodes, node, fonts + +local find_node_tail = node.tail or node.slide +local free_node = node.free +local free_nodelist = node.flush_list +local copy_node = node.copy +local copy_nodelist = node.copy_list +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after +local end_of_math = node.end_of_math + +local texattribute = tex.attribute +local unsetvalue = attributes.unsetvalue + +local nodepool = nodes.pool +local tasks = nodes.tasks + +local new_gluespec = nodepool.gluespec +local new_kern = nodepool.kern +local new_glue = nodepool.glue + +local nodecodes = nodes.nodecodes +local kerncodes = nodes.kerncodes +local skipcodes = nodes.skipcodes + +local glyph_code = nodecodes.glyph +local kern_code = nodecodes.kern +local disc_code = nodecodes.disc +local glue_code = nodecodes.glue +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local math_code = nodecodes.math + +local kerning_code = kerncodes.kerning +local userkern_code = kerncodes.userkern +local userskip_code = skipcodes.userskip +local spaceskip_code = skipcodes.spaceskip +local xspaceskip_code = skipcodes.xspaceskip + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers +local chardata = fonthashes.characters +local quaddata = fonthashes.quads +local markdata = fonthashes.marks + +local v_max = interfaces.variables.max + +typesetters = typesetters or { } +local typesetters = typesetters + +typesetters.kerns = typesetters.kerns or { } +local kerns = typesetters.kerns + +kerns.mapping = kerns.mapping or { } +kerns.factors = kerns.factors or { } +local a_kerns = attributes.private("kern") +local a_fontkern = attributes.private('fontkern') +kerns.attribute = kerns.attribute + +storage.register("typesetters/kerns/mapping", kerns.mapping, "typesetters.kerns.mapping") +storage.register("typesetters/kerns/factors", kerns.factors, "typesetters.kerns.factors") + +local mapping = kerns.mapping +local factors = kerns.factors + +-- one must use liga=no and mode=base and kern=yes +-- use more helpers +-- make sure it runs after all others +-- there will be a width adaptor field in nodes so this will change +-- todo: interchar kerns / disc nodes / can be made faster + +local gluefactor = 4 -- assumes quad = .5 enspace + +kerns.keepligature = false -- just for fun (todo: control setting with key/value) +kerns.keeptogether = false -- just for fun (todo: control setting with key/value) + +-- can be optimized .. the prev thing .. but hardly worth the effort + +local function kern_injector(fillup,kern) + if fillup then + local g = new_glue(kern) + local s = g.spec + s.stretch = kern + s.stretch_order = 1 + return g + else + return new_kern(kern) + end +end + +local function spec_injector(fillup,width,stretch,shrink) + if fillup then + local s = new_gluespec(width,2*stretch,2*shrink) + s.stretch_order = 1 + return s + else + return new_gluespec(width,stretch,shrink) + end +end + +-- needs checking ... base mode / node mode + +local function do_process(namespace,attribute,head,force) -- todo: glue so that we can fully stretch + local start, done, lastfont = head, false, nil + local keepligature = kerns.keepligature + local keeptogether = kerns.keeptogether + local fillup = false + while start do + -- faster to test for attr first + local attr = force or start[attribute] + if attr and attr > 0 then + start[attribute] = unsetvalue + local krn = mapping[attr] + if krn == v_max then + krn = .25 + fillup = true + else + fillup = false + end + if krn and krn ~= 0 then + local id = start.id + if id == glyph_code then + lastfont = start.font + local c = start.components + if c then + if keepligature and keepligature(start) then + -- keep 'm + else + c = do_process(namespace,attribute,c,attr) + local s = start + local p, n = s.prev, s.next + local tail = find_node_tail(c) + if p then + p.next = c + c.prev = p + else + head = c + end + if n then + n.prev = tail + end + tail.next = n + start = c + s.components = nil + -- we now leak nodes ! + -- free_node(s) + done = true + end + end + local prev = start.prev + if not prev then + -- skip + elseif markdata[lastfont][start.char] then + -- skip + else + local pid = prev.id + if not pid then + -- nothing + elseif pid == kern_code then + if prev.subtype == kerning_code or prev[a_fontkern] then + if keeptogether and prev.prev.id == glyph_code and keeptogether(prev.prev,start) then -- we could also pass start + -- keep 'm + else + -- not yet ok, as injected kerns can be overlays (from node-inj.lua) + prev.subtype = userkern_code + prev.kern = prev.kern + quaddata[lastfont]*krn -- here + done = true + end + end + elseif pid == glyph_code then + if prev.font == lastfont then + local prevchar, lastchar = prev.char, start.char + if keeptogether and keeptogether(prev,start) then + -- keep 'm + else + local kerns = chardata[lastfont][prevchar].kerns + local kern = kerns and kerns[lastchar] or 0 + krn = kern + quaddata[lastfont]*krn -- here + insert_node_before(head,start,kern_injector(fillup,krn)) + done = true + end + else + krn = quaddata[lastfont]*krn -- here + insert_node_before(head,start,kern_injector(fillup,krn)) + done = true + end + elseif pid == disc_code then + -- a bit too complicated, we can best not copy and just calculate + -- but we could have multiple glyphs involved so ... + local disc = prev -- disc + local pre, post, replace = disc.pre, disc.post, disc.replace + local prv, nxt = disc.prev, disc.next + if pre and prv then -- must pair with start.prev + -- this one happens in most cases + local before = copy_node(prv) + pre.prev = before + before.next = pre + before.prev = nil + pre = do_process(namespace,attribute,before,attr) + pre = pre.next + pre.prev = nil + disc.pre = pre + free_node(before) + end + if post and nxt then -- must pair with start + local after = copy_node(nxt) + local tail = find_node_tail(post) + tail.next = after + after.prev = tail + after.next = nil + post = do_process(namespace,attribute,post,attr) + tail.next = nil + disc.post = post + free_node(after) + end + if replace and prv and nxt then -- must pair with start and start.prev + local before = copy_node(prv) + local after = copy_node(nxt) + local tail = find_node_tail(replace) + replace.prev = before + before.next = replace + before.prev = nil + tail.next = after + after.prev = tail + after.next = nil + replace = do_process(namespace,attribute,before,attr) + replace = replace.next + replace.prev = nil + after.prev.next = nil + disc.replace = replace + free_node(after) + free_node(before) + else + if prv and prv.id == glyph_code and prv.font == lastfont then + local prevchar, lastchar = prv.char, start.char + local kerns = chardata[lastfont][prevchar].kerns + local kern = kerns and kerns[lastchar] or 0 + krn = kern + quaddata[lastfont]*krn -- here + else + krn = quaddata[lastfont]*krn -- here + end + disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue + end + end + end + elseif id == glue_code then + local subtype = start.subtype + if subtype == userskip_code or subtype == xspaceskip_code or subtype == spaceskip_code then + local s = start.spec + local w = s.width + if w > 0 then + local width, stretch, shrink = w+gluefactor*w*krn, s.stretch, s.shrink + start.spec = spec_injector(fillup,width,stretch*width/w,shrink*width/w) + done = true + end + end + elseif id == kern_code then + -- if start.subtype == kerning_code then -- handle with glyphs + -- local sk = start.kern + -- if sk > 0 then + -- start.kern = sk*krn + -- done = true + -- end + -- end + elseif lastfont and (id == hlist_code or id == vlist_code) then -- todo: lookahead + local p = start.prev + if p and p.id ~= glue_code then + insert_node_before(head,start,kern_injector(fillup,quaddata[lastfont]*krn)) + done = true + end + local n = start.next + if n and n.id ~= glue_code then + insert_node_after(head,start,kern_injector(fillup,quaddata[lastfont]*krn)) + done = true + end + elseif id == math_code then + start = end_of_math(start) + end + end + end + if start then + start = start.next + end + end + return head, done +end + +local enabled = false + +function kerns.set(factor) + if factor ~= v_max then + factor = tonumber(factor) or 0 + end + if factor == v_max or factor ~= 0 then + if not enabled then + tasks.enableaction("processors","typesetters.kerns.handler") + enabled = true + end + local a = factors[factor] + if not a then + a = #mapping + 1 + factors[factors], mapping[a] = a, factor + end + factor = a + else + factor = unsetvalue + end + texattribute[a_kerns] = factor + return factor +end + +local function process(namespace,attribute,head) + return do_process(namespace,attribute,head) -- no direct map, because else fourth argument is tail == true +end + +kerns.handler = nodes.installattributehandler { + name = "kern", + namespace = kerns, + processor = process, +} + +-- interface + +commands.setcharacterkerning = kerns.set diff --git a/tex/context/base/typo-lan.lua b/tex/context/base/typo-lan.lua index 50927f744..a17732900 100644 --- a/tex/context/base/typo-lan.lua +++ b/tex/context/base/typo-lan.lua @@ -1,72 +1,72 @@ -if not modules then modules = { } end modules ['typo-lan'] = { - version = 1.001, - comment = "companion to typo-lan.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local type, next = type, next - -local currentfont = font.current -local setmetatableindex = table.setmetatableindex -local utfbyte = utf.byte - -local hashes = fonts.hashes -local fontdata = hashes.characters -local emwidths = hashes.emwidths - -local frequencies = languages.frequencies or { } -languages.frequencies = frequencies - -local frequencydata = { } -local frequencyfile = string.formatters["lang-frq-%s.lua"] -local frequencycache = { } - -setmetatableindex(frequencydata, function(t,language) - local fullname = resolvers.findfile(frequencyfile(language)) - local v = fullname ~= "" and dofile(fullname) - if not v or not v.frequencies then - v = t.en - end - t[language] = v - return v -end) - -setmetatableindex(frequencycache, function(t,language) - local dataset = frequencydata[language] - local frequencies = dataset.frequencies - if not frequencies then - return t.en - end - local v = { } - setmetatableindex(v, function(t,font) - local average = emwidths[font] / 2 - if frequencies then - local characters = fontdata[font] - local sum, tot = 0, 0 - for k, v in next, frequencies do - local character = characters[k] -- characters[type(k) == "number" and k or utfbyte(k)] - tot = tot + v - sum = sum + v * (character and character.width or average) - end - average = sum / tot -- widths - end - t[font] = average - return average - end) - t[language] = v - return v -end) - -function frequencies.getdata(language) - return frequencydata[language] -end - -function frequencies.averagecharwidth(language,font) - return frequencycache[language or "en"][font or currentfont()] -end - -function commands.averagecharwidth(language,font) - context(frequencycache[language or "en"][font or currentfont()]) -end +if not modules then modules = { } end modules ['typo-lan'] = { + version = 1.001, + comment = "companion to typo-lan.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local type, next = type, next + +local currentfont = font.current +local setmetatableindex = table.setmetatableindex +local utfbyte = utf.byte + +local hashes = fonts.hashes +local fontdata = hashes.characters +local emwidths = hashes.emwidths + +local frequencies = languages.frequencies or { } +languages.frequencies = frequencies + +local frequencydata = { } +local frequencyfile = string.formatters["lang-frq-%s.lua"] +local frequencycache = { } + +setmetatableindex(frequencydata, function(t,language) + local fullname = resolvers.findfile(frequencyfile(language)) + local v = fullname ~= "" and dofile(fullname) + if not v or not v.frequencies then + v = t.en + end + t[language] = v + return v +end) + +setmetatableindex(frequencycache, function(t,language) + local dataset = frequencydata[language] + local frequencies = dataset.frequencies + if not frequencies then + return t.en + end + local v = { } + setmetatableindex(v, function(t,font) + local average = emwidths[font] / 2 + if frequencies then + local characters = fontdata[font] + local sum, tot = 0, 0 + for k, v in next, frequencies do + local character = characters[k] -- characters[type(k) == "number" and k or utfbyte(k)] + tot = tot + v + sum = sum + v * (character and character.width or average) + end + average = sum / tot -- widths + end + t[font] = average + return average + end) + t[language] = v + return v +end) + +function frequencies.getdata(language) + return frequencydata[language] +end + +function frequencies.averagecharwidth(language,font) + return frequencycache[language or "en"][font or currentfont()] +end + +function commands.averagecharwidth(language,font) + context(frequencycache[language or "en"][font or currentfont()]) +end diff --git a/tex/context/base/typo-mar.lua b/tex/context/base/typo-mar.lua index ec827883d..65b205098 100644 --- a/tex/context/base/typo-mar.lua +++ b/tex/context/base/typo-mar.lua @@ -1,879 +1,879 @@ -if not modules then modules = { } end modules ['typo-mar'] = { - version = 1.001, - comment = "companion to typo-mar.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: --- --- * autoleft/right depending on available space (or distance to margin) --- * stack across paragraphs, but that is messy and one should reconsider --- using margin data then as also vertical spacing kicks in --- * floating margin data, with close-to-call anchoring - --- -- experiment (does not work, too much interference) --- --- local pdfprint = pdf.print --- local format = string.format --- --- anchors = anchors or { } --- --- local whatever = { } --- local factor = (7200/7227)/65536 --- --- function anchors.set(tag) --- whatever[tag] = { pdf.h, pdf.v } --- end --- --- function anchors.reset(tag) --- whatever[tag] = nil --- end --- --- function anchors.startmove(tag,how) -- save/restore nodes but they don't support moves --- local w = whatever[tag] --- if not w then --- -- error --- elseif how == "horizontal" or how == "h" then --- pdfprint("page",format(" q 1 0 0 1 %f 0 cm ", (w[1] - pdf.h) * factor)) --- elseif how == "vertical" or how == "v" then --- pdfprint("page",format(" q 1 0 0 1 0 %f cm ", (w[2] - pdf.v) * factor)) --- else --- pdfprint("page",format(" q 1 0 0 1 %f %f cm ", (w[1] - pdf.h) * factor, (w[2] - pdf.v) * factor)) --- end --- end --- --- function anchors.stopmove(tag) --- local w = whatever[tag] --- if not w then --- -- error --- else --- pdfprint("page"," Q ") --- end --- end --- --- local latelua = nodes.pool.latelua --- --- function anchors.node_set(tag) --- return latelua(formatters["anchors.set(%q)"](tag)) --- end --- --- function anchors.node_reset(tag) --- return latelua(formatters["anchors.reset(%q)"](tag)) --- end --- --- function anchors.node_start_move(tag,how) --- return latelua(formatters["anchors.startmove(%q,%q)](tag,how)) --- end --- --- function anchors.node_stop_move(tag) --- return latelua(formatters["anchors.stopmove(%q)"](tag)) --- end - --- so far - -local format, validstring = string.format, string.valid -local insert, remove = table.insert, table.remove -local setmetatable, next = setmetatable, next - -local attributes, nodes, node, variables = attributes, nodes, node, variables - -local trace_margindata = false trackers.register("typesetters.margindata", function(v) trace_margindata = v end) -local trace_marginstack = false trackers.register("typesetters.margindata.stack", function(v) trace_marginstack = v end) -local trace_margingroup = false trackers.register("typesetters.margindata.group", function(v) trace_margingroup = v end) - -local report_margindata = logs.reporter("typesetters","margindata") - -local tasks = nodes.tasks -local prependaction = tasks.prependaction -local disableaction = tasks.disableaction -local enableaction = tasks.enableaction - -local variables = interfaces.variables - -local conditionals = tex.conditionals -local systemmodes = tex.systemmodes - -local v_top = variables.top -local v_depth = variables.depth -local v_local = variables["local"] -local v_global = variables["global"] -local v_left = variables.left -local v_right = variables.right -local v_flushleft = variables.flushleft -local v_flushright = variables.flushright -local v_inner = variables.inner -local v_outer = variables.outer -local v_margin = variables.margin -local v_edge = variables.edge -local v_default = variables.default -local v_normal = variables.normal -local v_yes = variables.yes -local v_continue = variables.continue -local v_first = variables.first -local v_text = variables.text -local v_column = variables.column - -local copy_node_list = node.copy_list -local slide_nodes = node.slide -local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here -local traverse_id = node.traverse_id -local free_node_list = node.flush_list -local insert_node_after = node.insert_after -local insert_node_before = node.insert_before - -local concat_nodes = nodes.concat - -local nodecodes = nodes.nodecodes -local listcodes = nodes.listcodes -local gluecodes = nodes.gluecodes -local whatsitcodes = nodes.whatsitcodes - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local glue_code = nodecodes.glue -local kern_code = nodecodes.kern -local penalty_code = nodecodes.penalty -local whatsit_code = nodecodes.whatsit -local line_code = listcodes.line -local cell_code = listcodes.cell -local alignment_code = listcodes.alignment -local leftskip_code = gluecodes.leftskip -local rightskip_code = gluecodes.rightskip -local userdefined_code = whatsitcodes.userdefined - -local dir_code = whatsitcodes.dir -local localpar_code = whatsitcodes.localpar - -local nodepool = nodes.pool - -local new_kern = nodepool.kern -local new_glue = nodepool.glue -local new_penalty = nodepool.penalty -local new_stretch = nodepool.stretch -local new_usernumber = nodepool.usernumber -local new_latelua = nodepool.latelua - -local texcount = tex.count -local texdimen = tex.dimen -local texbox = tex.box - -local points = number.points - -local isleftpage = layouts.status.isleftpage -local registertogether = builders.paragraphs.registertogether - -local jobpositions = job.positions -local getposition = jobpositions.position - -local a_margindata = attributes.private("margindata") - -local inline_mark = nodepool.userids["margins.inline"] - -local margins = { } -typesetters.margins = margins - -local locations = { v_left, v_right, v_inner, v_outer } -- order might change -local categories = { } -local displaystore = { } -- [category][location][scope] -local inlinestore = { } -- [number] -local nofsaved = 0 -local nofstored = 0 -local nofinlined = 0 -local nofdelayed = 0 -local h_anchors = 0 -local v_anchors = 0 - -local mt1 = { - __index = function(t,location) - local v = { [v_local] = { }, [v_global] = { } } - t[location] = v - return v - end -} - -local mt2 = { - __index = function(stores,category) - categories[#categories+1] = category - local v = { } - setmetatable(v,mt1) - stores[category] = v - return v - end -} - -setmetatable(displaystore,mt2) - -local defaults = { - __index = { - location = v_left, - align = v_normal, - method = "", - name = "", - threshold = 0, -- .25ex - margin = v_normal, - scope = v_global, - distance = 0, - hoffset = 0, - voffset = 0, - category = v_default, - line = 0, - vstack = 0, - dy = 0, - baseline = false, - inline = false, - leftskip = 0, - rightskip = 0, - } -} - -local enablelocal, enableglobal -- forward reference (delayed initialization) - -local function showstore(store,banner,location) - if next(store) then - for i, si in table.sortedpairs(store) do - local si =store[i] - report_margindata("%s: stored in %a at %s: %a => %s",banner,location,i,validstring(si.name,"no name"),nodes.toutf(si.box.list)) - end - else - report_margindata("%s: nothing stored in location %a",banner,location) - end -end - -function margins.save(t) - setmetatable(t,defaults) - local content = texbox[t.number] - local location = t.location - local category = t.category - local inline = t.inline - local scope = t.scope or v_global - if not content then - report_margindata("ignoring empty margin data %a",location or "unknown") - return - end - local store - if inline then - store = inlinestore - else - store = displaystore[category][location] - if not store then - report_margindata("invalid location %a",location) - return - end - store = store[scope] - end - if not store then - report_margindata("invalid scope %a",scope) - return - end - if enablelocal and scope == v_local then - enablelocal() - if enableglobal then - enableglobal() -- is the fallback - end - elseif enableglobal and scope == v_global then - enableglobal() - end - nofsaved = nofsaved + 1 - nofstored = nofstored + 1 - local name = t.name - if trace_marginstack then - showstore(store,"before",location) - end - if name and name ~= "" then - if inlinestore then -- todo: inline store has to be done differently (not sparse) - local t = table.sortedkeys(store) for j=#t,1,-1 do local i = t[j] - local si = store[i] - if si.name == name then - local s = remove(store,i) - free_node_list(s.box) - end - end - else - for i=#store,1,-1 do - local si = store[i] - if si.name == name then - local s = remove(store,i) - free_node_list(s.box) - end - end - end - if trace_marginstack then - showstore(store,"between",location) - end - end - if t.number then - -- better make a new table and make t entry in t - t.box = copy_node_list(content) - t.n = nofsaved - -- used later (we will clean up this natural mess later) - -- nice is to make a special status table mechanism - local leftmargindistance = texdimen.naturalleftmargindistance - local rightmargindistance = texdimen.naturalrightmargindistance - t.strutdepth = texbox.strutbox.depth - t.strutheight = texbox.strutbox.height - t.leftskip = tex.leftskip.width -- we're not in forgetall - t.rightskip = tex.rightskip.width -- we're not in forgetall - t.leftmargindistance = leftmargindistance -- todo:layoutstatus table - t.rightmargindistance = rightmargindistance - t.leftedgedistance = texdimen.naturalleftedgedistance - + texdimen.leftmarginwidth - + leftmargindistance - t.rightedgedistance = texdimen.naturalrightedgedistance - + texdimen.rightmarginwidth - + rightmargindistance - t.lineheight = texdimen.lineheight - -- - -- t.realpageno = texcount.realpageno - if inline then - context(new_usernumber(inline_mark,nofsaved)) - store[nofsaved] = t -- no insert - nofinlined = nofinlined + 1 - else - insert(store,t) - end - end - if trace_marginstack then - showstore(store,"after",location) - end - if trace_margindata then - report_margindata("saved %a, location %a, scope %a, inline %a",nofsaved,location,scope,inline) - end -end - --- Actually it's an advantage to have them all anchored left (tags and such) --- we could keep them in store and flush in stage two but we might want to --- do more before that so we need the content to be there unless we can be --- sure that we flush this first which might not be the case in the future. --- --- When the prototype inner/outer code that was part of this proved to be --- okay it was moved elsewhere. - -local status, nofstatus = { }, 0 - -local function realign(current,candidate) - local location = candidate.location - local margin = candidate.margin - local hoffset = candidate.hoffset - local distance = candidate.distance - local hsize = candidate.hsize - local width = candidate.width - local align = candidate.align - -- local realpageno = candidate.realpageno - local leftpage = isleftpage(false,true) - local delta = 0 - local leftdelta = 0 - local rightdelta = 0 - local leftdistance = distance - local rightdistance = distance - if margin == v_normal then - -- - elseif margin == v_local then - leftdelta = - candidate.leftskip - rightdelta = candidate.rightskip - elseif margin == v_margin then - leftdistance = candidate.leftmargindistance - rightdistance = candidate.rightmargindistance - elseif margin == v_edge then - leftdistance = candidate.leftedgedistance - rightdistance = candidate.rightedgedistance - end - if leftpage then - leftdistance, rightdistance = rightdistance, leftdistance - end - - if location == v_left then - delta = hoffset + width + leftdistance + leftdelta - elseif location == v_right then - delta = -hoffset - hsize - rightdistance + rightdelta - elseif location == v_inner then - if leftpage then - delta = -hoffset - hsize - rightdistance + rightdelta - else - delta = hoffset + width + leftdistance + leftdelta - end - elseif location == v_outer then - if leftpage then - delta = hoffset + width + leftdistance + leftdelta - else - delta = -hoffset - hsize - rightdistance + rightdelta - end - end - - -- we assume that list is a hbox, otherwise we had to take the whole current - -- in order to get it right - - current.width = 0 - local anchornode, move_x - - -- this mess is needed for alignments (combinations) so we use that - -- oportunity to add arbitrary anchoring - - -- always increment anchor is nicer for multipass when we add new .. - - local inline = candidate.inline - local anchor = candidate.anchor - if not anchor or anchor == "" then - anchor = v_text - end - if inline or anchor ~= v_text or candidate.psubtype == alignment_code then - -- the alignment_code check catches margintexts ste before a tabulate - h_anchors = h_anchors + 1 - anchornode = new_latelua(format("_plib_.set('md:h',%i,{x=true,c=true})",h_anchors)) - local blob = jobpositions.get('md:h', h_anchors) - if blob then - local reference = jobpositions.getreserved(anchor,blob.c) - if reference then - if location == v_left then - move_x = (reference.x or 0) - (blob.x or 0) - elseif location == v_right then - move_x = (reference.x or 0) - (blob.x or 0) + (reference.w or 0) - hsize - else - -- not yet done - end - end - end - end - - if move_x then - delta = delta - move_x - if trace_margindata then - report_margindata("realigned %a, location %a, margin %a, move %p",candidate.n,location,margin,move_x) - end - else - if trace_margindata then - report_margindata("realigned %a, location %a, margin %a",candidate.n,location,margin) - end - end - - current.list = hpack_nodes(concat_nodes{anchornode,new_kern(-delta),current.list,new_kern(delta)}) - current.width = 0 -end - -local function realigned(current,a) - local candidate = status[a] - realign(current,candidate) - nofdelayed = nofdelayed - 1 - status[a] = nil - return true -end - --- Stacking is done in two ways: the v_yes option stacks per paragraph (or line, --- depending on what gets by) and mostly concerns margin data dat got set at more or --- less the same time. The v_continue option uses position tracking and works on --- larger range. However, crossing pages is not part of it. Anyway, when you have --- such messed up margin data you'd better think twice. --- --- The stacked table keeps track (per location) of the offsets (the v_yes case). This --- table gets saved when the v_continue case is active. We use a special variant --- of position tracking, after all we only need the page number and vertical position. - -local stacked = { } -- left/right keys depending on location -local cache = { } - -local function resetstacked() - stacked = { } -end - --- resetstacked() - -function margins.ha(tag) -- maybe l/r keys ipv left/right keys - local p = cache[tag] - p.p = true - p.y = true - jobpositions.set('md:v',tag,p) - cache[tag] = nil -end - -local function markovershoot(current) - v_anchors = v_anchors + 1 - cache[v_anchors] = stacked - local anchor = new_latelua(format("typesetters.margins.ha(%s)",v_anchors)) -- todo: alleen als offset > line - current.list = hpack_nodes(concat_nodes{anchor,current.list}) -end - -local function getovershoot(location) - local p = jobpositions.get("md:v",v_anchors) - local c = jobpositions.get("md:v",v_anchors+1) - if p and c and p.p and p.p == c.p then - local distance = p.y - c.y - local offset = p[location] or 0 - local overshoot = offset - distance - if trace_marginstack then - report_margindata("location %a, distance %p, offset %p, overshoot %p",location,distance,offset,overshoot) - end - if overshoot > 0 then - return overshoot - end - end - return 0 -end - -local function inject(parent,head,candidate) - local box = candidate.box - local width = box.width - local height = box.height - local depth = box.depth - local shift = box.shift - local stack = candidate.stack - local location = candidate.location - local method = candidate.method - local voffset = candidate.voffset - local line = candidate.line - local baseline = candidate.baseline - local strutheight = candidate.strutheight - local strutdepth = candidate.strutdepth - local psubtype = parent.subtype - local offset = stacked[location] - local firstonstack = offset == false or offset == nil - nofstatus = nofstatus + 1 - nofdelayed = nofdelayed + 1 - status[nofstatus] = candidate - -- yet untested - if baseline == true then - baseline = false - -- hbox vtop ---~ for h in traverse_id(hlist_code,box.list.list) do ---~ baseline = h.height ---~ break ---~ end - else - baseline = tonumber(baseline) - if not baseline or baseline <= 0 then - -- in case we have a box of width 0 that is not analyzed - baseline = false -- strutheight -- actually a hack - end - end - candidate.width = width - candidate.hsize = parent.width -- we can also pass textwidth - candidate.psubtype = psubtype - if trace_margindata then - report_margindata("processing, index %s, height %p, depth %p, parent %s",candidate.n,height,depth,listcodes[psubtype]) - end - if firstonstack then - offset = 0 - else --- offset = offset + height - end - if stack == v_yes then - offset = offset + candidate.dy - shift = shift + offset - elseif stack == v_continue then - offset = offset + candidate.dy - if firstonstack then - offset = offset + getovershoot(location) - end - shift = shift + offset - end - -- -- -- - -- Maybe we also need to patch offset when we apply methods, but how ... - -- This needs a bit of playing as it depends on the stack setting of the - -- following which we don't know yet ... so, consider stacking partially - -- experimental. - -- -- -- - if method == v_top then - local delta = height - parent.height - if trace_margindata then - report_margindata("top aligned by %p",delta) - end - if delta < candidate.threshold then - shift = shift + voffset + delta - end - elseif method == v_first then - if baseline then - shift = shift + voffset + height - baseline -- option - else - shift = shift + voffset -- normal - end - if trace_margindata then - report_margindata("first aligned") - end - elseif method == v_depth then - local delta = strutdepth - if trace_margindata then - report_margindata("depth aligned by %p",delta) - end - shift = shift + voffset + delta - elseif method == v_height then - local delta = - strutheight - if trace_margindata then - report_margindata("height aligned by %p",delta) - end - shift = shift + voffset + delta - elseif voffset ~= 0 then - if trace_margindata then - report_margindata("voffset %p applied",voffset) - end - shift = shift + voffset - end - -- -- -- - if line ~= 0 then - local delta = line * candidate.lineheight - if trace_margindata then - report_margindata("offset %p applied to line %s",delta,line) - end - shift = shift + delta - offset = offset + delta - end - box.shift = shift - box.width = 0 - if not head then - head = box - elseif head.id == whatsit_code and head.subtype == localpar_code then - -- experimental - if head.dir == "TRT" then - box.list = hpack_nodes(concat_nodes{new_kern(candidate.hsize),box.list,new_kern(-candidate.hsize)}) - end - insert_node_after(head,head,box) - else - head.prev = box - box.next = head - head = box - end - box[a_margindata] = nofstatus - if trace_margindata then - report_margindata("injected, location %a, shift %p",location,shift) - end - -- we need to add line etc to offset as well - offset = offset + depth - local room = { - height = height, - depth = offset, - slack = candidate.bottomspace, -- todo: 'depth' => strutdepth - lineheight = candidate.lineheight, -- only for tracing - } - offset = offset + height - stacked[location] = offset -- weird, no table ? - -- todo: if no real depth then zero - if trace_margindata then - report_margindata("status, offset %s",offset) - end - return head, room, stack == v_continue -end - -local function flushinline(parent,head) - local current = head - local done = false - local continue = false - local room, don, con - while current and nofinlined > 0 do - local id = current.id - if id == whatsit_code then - if current.subtype == userdefined_code and current.user_id == inline_mark then - local n = current.value - local candidate = inlinestore[n] - if candidate then -- no vpack, as we want to realign - inlinestore[n] = nil - nofinlined = nofinlined - 1 - head, room, con = inject(parent,head,candidate) -- maybe return applied offset - continue = continue or con - done = true - nofstored = nofstored - 1 - end - end - elseif id == hlist_code or id == vlist_code then - -- optional (but sometimes needed) - current.list, don, con = flushinline(current,current.list) - continue = continue or con - done = done or don - end - current = current.next - end - return head, done, continue -end - -local a_linenumber = attributes.private('linenumber') - -local function flushed(scope,parent) -- current is hlist - local head = parent.list - local done = false - local continue = false - local room, con, don - for c=1,#categories do - local category = categories[c] - for l=1,#locations do - local location = locations[l] - local store = displaystore[category][location][scope] - while true do - local candidate = remove(store,1) -- brr, local stores are sparse - if candidate then -- no vpack, as we want to realign - head, room, con = inject(parent,head,candidate) - done = true - continue = continue or con - nofstored = nofstored - 1 - registertogether(parent,room) - else - break - end - end - end - end - if nofinlined > 0 then - if done then - parent.list = head - end - head, don, con = flushinline(parent,head) - continue = continue or con - done = done or don - end - if done then - local a = head[a_linenumber] -- hack .. we need a more decent critical attribute inheritance mechanism - parent.list = hpack_nodes(head,parent.width,"exactly") - if a then - parent.list[a_linenumber] = a - end - -- resetstacked() - end - return done, continue -end - --- only when group : vbox|vmode_par --- only when subtype : line, box (no indent alignment cell) - -local function handler(scope,head,group) - if nofstored > 0 then - if trace_margindata then - report_margindata("flushing stage one, stored %s, scope %s, delayed %s, group %a",nofstored,scope,nofdelayed,group) - end - local current = head - local done = false - while current do - local id = current.id - if (id == vlist_code or id == hlist_code) and not current[a_margindata] then - local don, continue = flushed(scope,current) - if don then - current[a_margindata] = 0 -- signal to prevent duplicate processing - if continue then - markovershoot(current) - end - if nofstored <= 0 then - break - end - done = true - end - end - current = current.next - end - -- if done then - resetstacked() -- why doesn't done work ok here? - -- end - return head, done - else - return head, false - end -end - -function margins.localhandler(head,group) -- sometimes group is "" which is weird - local inhibit = conditionals.inhibitmargindata - if inhibit then - if trace_margingroup then - report_margindata("ignored 3, group %a, stored %s, inhibit %a",group,nofstored,inhibit) - end - return head, false - elseif nofstored > 0 then - return handler(v_local,head,group) - else - if trace_margingroup then - report_margindata("ignored 4, group %a, stored %s, inhibit %a",group,nofstored,inhibit) - end - return head, false - end -end - -function margins.globalhandler(head,group) -- check group - local inhibit = conditionals.inhibitmargindata - if inhibit or nofstored == 0 then - if trace_margingroup then - report_margindata("ignored 1, group %a, stored %s, inhibit %a",group,nofstored,inhibit) - end - return head, false - elseif group == "hmode_par" then - return handler("global",head,group) - elseif group == "vmode_par" then -- experiment (for alignments) - return handler("global",head,group) - -- this needs checking as we then get quite some one liners to process and - -- we cannot look ahead then: - elseif group == "box" then -- experiment (for alignments) - return handler("global",head,group) - elseif group == "alignment" then -- experiment (for alignments) - return handler("global",head,group) - else - if trace_margingroup then - report_margindata("ignored 2, group %a, stored %s, inhibit %a",group,nofstored,inhibit) - end - return head, false - end -end - -local function finalhandler(head) - if nofdelayed > 0 then - local current = head - local done = false - while current do - local id = current.id - if id == hlist_code then - local a = current[a_margindata] - if not a or a == 0 then - finalhandler(current.list) - elseif realigned(current,a) then - done = true - if nofdelayed == 0 then - return head, true - end - end - elseif id == vlist_code then - finalhandler(current.list) - end - current = current.next - end - return head, done - else - return head, false - end -end - -function margins.finalhandler(head) - if nofdelayed > 0 then - -- if trace_margindata then - -- report_margindata("flushing stage two, instore: %s, delayed: %s",nofstored,nofdelayed) - -- end - return finalhandler(head) - else - return head, false - end -end - --- Somehow the vbox builder (in combinations) gets pretty confused and decides to --- go horizontal. So this needs more testing. - -prependaction("finalizers", "lists", "typesetters.margins.localhandler") --- ("vboxbuilders", "normalizers", "typesetters.margins.localhandler") -prependaction("mvlbuilders", "normalizers", "typesetters.margins.globalhandler") -prependaction("shipouts", "normalizers", "typesetters.margins.finalhandler") - -disableaction("finalizers", "typesetters.margins.localhandler") --- ("vboxbuilders", "typesetters.margins.localhandler") -disableaction("mvlbuilders", "typesetters.margins.globalhandler") -disableaction("shipouts", "typesetters.margins.finalhandler") - -enablelocal = function() - enableaction("finalizers", "typesetters.margins.localhandler") - -- enableaction("vboxbuilders", "typesetters.margins.localhandler") - enableaction("shipouts", "typesetters.margins.finalhandler") - enablelocal = nil -end - -enableglobal = function() - enableaction("mvlbuilders", "typesetters.margins.globalhandler") - enableaction("shipouts", "typesetters.margins.finalhandler") - enableglobal = nil -end - -statistics.register("margin data", function() - if nofsaved > 0 then - return format("%s entries, %s pending",nofsaved,nofdelayed) - else - return nil - end -end) +if not modules then modules = { } end modules ['typo-mar'] = { + version = 1.001, + comment = "companion to typo-mar.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: +-- +-- * autoleft/right depending on available space (or distance to margin) +-- * stack across paragraphs, but that is messy and one should reconsider +-- using margin data then as also vertical spacing kicks in +-- * floating margin data, with close-to-call anchoring + +-- -- experiment (does not work, too much interference) +-- +-- local pdfprint = pdf.print +-- local format = string.format +-- +-- anchors = anchors or { } +-- +-- local whatever = { } +-- local factor = (7200/7227)/65536 +-- +-- function anchors.set(tag) +-- whatever[tag] = { pdf.h, pdf.v } +-- end +-- +-- function anchors.reset(tag) +-- whatever[tag] = nil +-- end +-- +-- function anchors.startmove(tag,how) -- save/restore nodes but they don't support moves +-- local w = whatever[tag] +-- if not w then +-- -- error +-- elseif how == "horizontal" or how == "h" then +-- pdfprint("page",format(" q 1 0 0 1 %f 0 cm ", (w[1] - pdf.h) * factor)) +-- elseif how == "vertical" or how == "v" then +-- pdfprint("page",format(" q 1 0 0 1 0 %f cm ", (w[2] - pdf.v) * factor)) +-- else +-- pdfprint("page",format(" q 1 0 0 1 %f %f cm ", (w[1] - pdf.h) * factor, (w[2] - pdf.v) * factor)) +-- end +-- end +-- +-- function anchors.stopmove(tag) +-- local w = whatever[tag] +-- if not w then +-- -- error +-- else +-- pdfprint("page"," Q ") +-- end +-- end +-- +-- local latelua = nodes.pool.latelua +-- +-- function anchors.node_set(tag) +-- return latelua(formatters["anchors.set(%q)"](tag)) +-- end +-- +-- function anchors.node_reset(tag) +-- return latelua(formatters["anchors.reset(%q)"](tag)) +-- end +-- +-- function anchors.node_start_move(tag,how) +-- return latelua(formatters["anchors.startmove(%q,%q)](tag,how)) +-- end +-- +-- function anchors.node_stop_move(tag) +-- return latelua(formatters["anchors.stopmove(%q)"](tag)) +-- end + +-- so far + +local format, validstring = string.format, string.valid +local insert, remove = table.insert, table.remove +local setmetatable, next = setmetatable, next + +local attributes, nodes, node, variables = attributes, nodes, node, variables + +local trace_margindata = false trackers.register("typesetters.margindata", function(v) trace_margindata = v end) +local trace_marginstack = false trackers.register("typesetters.margindata.stack", function(v) trace_marginstack = v end) +local trace_margingroup = false trackers.register("typesetters.margindata.group", function(v) trace_margingroup = v end) + +local report_margindata = logs.reporter("typesetters","margindata") + +local tasks = nodes.tasks +local prependaction = tasks.prependaction +local disableaction = tasks.disableaction +local enableaction = tasks.enableaction + +local variables = interfaces.variables + +local conditionals = tex.conditionals +local systemmodes = tex.systemmodes + +local v_top = variables.top +local v_depth = variables.depth +local v_local = variables["local"] +local v_global = variables["global"] +local v_left = variables.left +local v_right = variables.right +local v_flushleft = variables.flushleft +local v_flushright = variables.flushright +local v_inner = variables.inner +local v_outer = variables.outer +local v_margin = variables.margin +local v_edge = variables.edge +local v_default = variables.default +local v_normal = variables.normal +local v_yes = variables.yes +local v_continue = variables.continue +local v_first = variables.first +local v_text = variables.text +local v_column = variables.column + +local copy_node_list = node.copy_list +local slide_nodes = node.slide +local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here +local traverse_id = node.traverse_id +local free_node_list = node.flush_list +local insert_node_after = node.insert_after +local insert_node_before = node.insert_before + +local concat_nodes = nodes.concat + +local nodecodes = nodes.nodecodes +local listcodes = nodes.listcodes +local gluecodes = nodes.gluecodes +local whatsitcodes = nodes.whatsitcodes + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local glue_code = nodecodes.glue +local kern_code = nodecodes.kern +local penalty_code = nodecodes.penalty +local whatsit_code = nodecodes.whatsit +local line_code = listcodes.line +local cell_code = listcodes.cell +local alignment_code = listcodes.alignment +local leftskip_code = gluecodes.leftskip +local rightskip_code = gluecodes.rightskip +local userdefined_code = whatsitcodes.userdefined + +local dir_code = whatsitcodes.dir +local localpar_code = whatsitcodes.localpar + +local nodepool = nodes.pool + +local new_kern = nodepool.kern +local new_glue = nodepool.glue +local new_penalty = nodepool.penalty +local new_stretch = nodepool.stretch +local new_usernumber = nodepool.usernumber +local new_latelua = nodepool.latelua + +local texcount = tex.count +local texdimen = tex.dimen +local texbox = tex.box + +local points = number.points + +local isleftpage = layouts.status.isleftpage +local registertogether = builders.paragraphs.registertogether + +local jobpositions = job.positions +local getposition = jobpositions.position + +local a_margindata = attributes.private("margindata") + +local inline_mark = nodepool.userids["margins.inline"] + +local margins = { } +typesetters.margins = margins + +local locations = { v_left, v_right, v_inner, v_outer } -- order might change +local categories = { } +local displaystore = { } -- [category][location][scope] +local inlinestore = { } -- [number] +local nofsaved = 0 +local nofstored = 0 +local nofinlined = 0 +local nofdelayed = 0 +local h_anchors = 0 +local v_anchors = 0 + +local mt1 = { + __index = function(t,location) + local v = { [v_local] = { }, [v_global] = { } } + t[location] = v + return v + end +} + +local mt2 = { + __index = function(stores,category) + categories[#categories+1] = category + local v = { } + setmetatable(v,mt1) + stores[category] = v + return v + end +} + +setmetatable(displaystore,mt2) + +local defaults = { + __index = { + location = v_left, + align = v_normal, + method = "", + name = "", + threshold = 0, -- .25ex + margin = v_normal, + scope = v_global, + distance = 0, + hoffset = 0, + voffset = 0, + category = v_default, + line = 0, + vstack = 0, + dy = 0, + baseline = false, + inline = false, + leftskip = 0, + rightskip = 0, + } +} + +local enablelocal, enableglobal -- forward reference (delayed initialization) + +local function showstore(store,banner,location) + if next(store) then + for i, si in table.sortedpairs(store) do + local si =store[i] + report_margindata("%s: stored in %a at %s: %a => %s",banner,location,i,validstring(si.name,"no name"),nodes.toutf(si.box.list)) + end + else + report_margindata("%s: nothing stored in location %a",banner,location) + end +end + +function margins.save(t) + setmetatable(t,defaults) + local content = texbox[t.number] + local location = t.location + local category = t.category + local inline = t.inline + local scope = t.scope or v_global + if not content then + report_margindata("ignoring empty margin data %a",location or "unknown") + return + end + local store + if inline then + store = inlinestore + else + store = displaystore[category][location] + if not store then + report_margindata("invalid location %a",location) + return + end + store = store[scope] + end + if not store then + report_margindata("invalid scope %a",scope) + return + end + if enablelocal and scope == v_local then + enablelocal() + if enableglobal then + enableglobal() -- is the fallback + end + elseif enableglobal and scope == v_global then + enableglobal() + end + nofsaved = nofsaved + 1 + nofstored = nofstored + 1 + local name = t.name + if trace_marginstack then + showstore(store,"before",location) + end + if name and name ~= "" then + if inlinestore then -- todo: inline store has to be done differently (not sparse) + local t = table.sortedkeys(store) for j=#t,1,-1 do local i = t[j] + local si = store[i] + if si.name == name then + local s = remove(store,i) + free_node_list(s.box) + end + end + else + for i=#store,1,-1 do + local si = store[i] + if si.name == name then + local s = remove(store,i) + free_node_list(s.box) + end + end + end + if trace_marginstack then + showstore(store,"between",location) + end + end + if t.number then + -- better make a new table and make t entry in t + t.box = copy_node_list(content) + t.n = nofsaved + -- used later (we will clean up this natural mess later) + -- nice is to make a special status table mechanism + local leftmargindistance = texdimen.naturalleftmargindistance + local rightmargindistance = texdimen.naturalrightmargindistance + t.strutdepth = texbox.strutbox.depth + t.strutheight = texbox.strutbox.height + t.leftskip = tex.leftskip.width -- we're not in forgetall + t.rightskip = tex.rightskip.width -- we're not in forgetall + t.leftmargindistance = leftmargindistance -- todo:layoutstatus table + t.rightmargindistance = rightmargindistance + t.leftedgedistance = texdimen.naturalleftedgedistance + + texdimen.leftmarginwidth + + leftmargindistance + t.rightedgedistance = texdimen.naturalrightedgedistance + + texdimen.rightmarginwidth + + rightmargindistance + t.lineheight = texdimen.lineheight + -- + -- t.realpageno = texcount.realpageno + if inline then + context(new_usernumber(inline_mark,nofsaved)) + store[nofsaved] = t -- no insert + nofinlined = nofinlined + 1 + else + insert(store,t) + end + end + if trace_marginstack then + showstore(store,"after",location) + end + if trace_margindata then + report_margindata("saved %a, location %a, scope %a, inline %a",nofsaved,location,scope,inline) + end +end + +-- Actually it's an advantage to have them all anchored left (tags and such) +-- we could keep them in store and flush in stage two but we might want to +-- do more before that so we need the content to be there unless we can be +-- sure that we flush this first which might not be the case in the future. +-- +-- When the prototype inner/outer code that was part of this proved to be +-- okay it was moved elsewhere. + +local status, nofstatus = { }, 0 + +local function realign(current,candidate) + local location = candidate.location + local margin = candidate.margin + local hoffset = candidate.hoffset + local distance = candidate.distance + local hsize = candidate.hsize + local width = candidate.width + local align = candidate.align + -- local realpageno = candidate.realpageno + local leftpage = isleftpage(false,true) + local delta = 0 + local leftdelta = 0 + local rightdelta = 0 + local leftdistance = distance + local rightdistance = distance + if margin == v_normal then + -- + elseif margin == v_local then + leftdelta = - candidate.leftskip + rightdelta = candidate.rightskip + elseif margin == v_margin then + leftdistance = candidate.leftmargindistance + rightdistance = candidate.rightmargindistance + elseif margin == v_edge then + leftdistance = candidate.leftedgedistance + rightdistance = candidate.rightedgedistance + end + if leftpage then + leftdistance, rightdistance = rightdistance, leftdistance + end + + if location == v_left then + delta = hoffset + width + leftdistance + leftdelta + elseif location == v_right then + delta = -hoffset - hsize - rightdistance + rightdelta + elseif location == v_inner then + if leftpage then + delta = -hoffset - hsize - rightdistance + rightdelta + else + delta = hoffset + width + leftdistance + leftdelta + end + elseif location == v_outer then + if leftpage then + delta = hoffset + width + leftdistance + leftdelta + else + delta = -hoffset - hsize - rightdistance + rightdelta + end + end + + -- we assume that list is a hbox, otherwise we had to take the whole current + -- in order to get it right + + current.width = 0 + local anchornode, move_x + + -- this mess is needed for alignments (combinations) so we use that + -- oportunity to add arbitrary anchoring + + -- always increment anchor is nicer for multipass when we add new .. + + local inline = candidate.inline + local anchor = candidate.anchor + if not anchor or anchor == "" then + anchor = v_text + end + if inline or anchor ~= v_text or candidate.psubtype == alignment_code then + -- the alignment_code check catches margintexts ste before a tabulate + h_anchors = h_anchors + 1 + anchornode = new_latelua(format("_plib_.set('md:h',%i,{x=true,c=true})",h_anchors)) + local blob = jobpositions.get('md:h', h_anchors) + if blob then + local reference = jobpositions.getreserved(anchor,blob.c) + if reference then + if location == v_left then + move_x = (reference.x or 0) - (blob.x or 0) + elseif location == v_right then + move_x = (reference.x or 0) - (blob.x or 0) + (reference.w or 0) - hsize + else + -- not yet done + end + end + end + end + + if move_x then + delta = delta - move_x + if trace_margindata then + report_margindata("realigned %a, location %a, margin %a, move %p",candidate.n,location,margin,move_x) + end + else + if trace_margindata then + report_margindata("realigned %a, location %a, margin %a",candidate.n,location,margin) + end + end + + current.list = hpack_nodes(concat_nodes{anchornode,new_kern(-delta),current.list,new_kern(delta)}) + current.width = 0 +end + +local function realigned(current,a) + local candidate = status[a] + realign(current,candidate) + nofdelayed = nofdelayed - 1 + status[a] = nil + return true +end + +-- Stacking is done in two ways: the v_yes option stacks per paragraph (or line, +-- depending on what gets by) and mostly concerns margin data dat got set at more or +-- less the same time. The v_continue option uses position tracking and works on +-- larger range. However, crossing pages is not part of it. Anyway, when you have +-- such messed up margin data you'd better think twice. +-- +-- The stacked table keeps track (per location) of the offsets (the v_yes case). This +-- table gets saved when the v_continue case is active. We use a special variant +-- of position tracking, after all we only need the page number and vertical position. + +local stacked = { } -- left/right keys depending on location +local cache = { } + +local function resetstacked() + stacked = { } +end + +-- resetstacked() + +function margins.ha(tag) -- maybe l/r keys ipv left/right keys + local p = cache[tag] + p.p = true + p.y = true + jobpositions.set('md:v',tag,p) + cache[tag] = nil +end + +local function markovershoot(current) + v_anchors = v_anchors + 1 + cache[v_anchors] = stacked + local anchor = new_latelua(format("typesetters.margins.ha(%s)",v_anchors)) -- todo: alleen als offset > line + current.list = hpack_nodes(concat_nodes{anchor,current.list}) +end + +local function getovershoot(location) + local p = jobpositions.get("md:v",v_anchors) + local c = jobpositions.get("md:v",v_anchors+1) + if p and c and p.p and p.p == c.p then + local distance = p.y - c.y + local offset = p[location] or 0 + local overshoot = offset - distance + if trace_marginstack then + report_margindata("location %a, distance %p, offset %p, overshoot %p",location,distance,offset,overshoot) + end + if overshoot > 0 then + return overshoot + end + end + return 0 +end + +local function inject(parent,head,candidate) + local box = candidate.box + local width = box.width + local height = box.height + local depth = box.depth + local shift = box.shift + local stack = candidate.stack + local location = candidate.location + local method = candidate.method + local voffset = candidate.voffset + local line = candidate.line + local baseline = candidate.baseline + local strutheight = candidate.strutheight + local strutdepth = candidate.strutdepth + local psubtype = parent.subtype + local offset = stacked[location] + local firstonstack = offset == false or offset == nil + nofstatus = nofstatus + 1 + nofdelayed = nofdelayed + 1 + status[nofstatus] = candidate + -- yet untested + if baseline == true then + baseline = false + -- hbox vtop +--~ for h in traverse_id(hlist_code,box.list.list) do +--~ baseline = h.height +--~ break +--~ end + else + baseline = tonumber(baseline) + if not baseline or baseline <= 0 then + -- in case we have a box of width 0 that is not analyzed + baseline = false -- strutheight -- actually a hack + end + end + candidate.width = width + candidate.hsize = parent.width -- we can also pass textwidth + candidate.psubtype = psubtype + if trace_margindata then + report_margindata("processing, index %s, height %p, depth %p, parent %s",candidate.n,height,depth,listcodes[psubtype]) + end + if firstonstack then + offset = 0 + else +-- offset = offset + height + end + if stack == v_yes then + offset = offset + candidate.dy + shift = shift + offset + elseif stack == v_continue then + offset = offset + candidate.dy + if firstonstack then + offset = offset + getovershoot(location) + end + shift = shift + offset + end + -- -- -- + -- Maybe we also need to patch offset when we apply methods, but how ... + -- This needs a bit of playing as it depends on the stack setting of the + -- following which we don't know yet ... so, consider stacking partially + -- experimental. + -- -- -- + if method == v_top then + local delta = height - parent.height + if trace_margindata then + report_margindata("top aligned by %p",delta) + end + if delta < candidate.threshold then + shift = shift + voffset + delta + end + elseif method == v_first then + if baseline then + shift = shift + voffset + height - baseline -- option + else + shift = shift + voffset -- normal + end + if trace_margindata then + report_margindata("first aligned") + end + elseif method == v_depth then + local delta = strutdepth + if trace_margindata then + report_margindata("depth aligned by %p",delta) + end + shift = shift + voffset + delta + elseif method == v_height then + local delta = - strutheight + if trace_margindata then + report_margindata("height aligned by %p",delta) + end + shift = shift + voffset + delta + elseif voffset ~= 0 then + if trace_margindata then + report_margindata("voffset %p applied",voffset) + end + shift = shift + voffset + end + -- -- -- + if line ~= 0 then + local delta = line * candidate.lineheight + if trace_margindata then + report_margindata("offset %p applied to line %s",delta,line) + end + shift = shift + delta + offset = offset + delta + end + box.shift = shift + box.width = 0 + if not head then + head = box + elseif head.id == whatsit_code and head.subtype == localpar_code then + -- experimental + if head.dir == "TRT" then + box.list = hpack_nodes(concat_nodes{new_kern(candidate.hsize),box.list,new_kern(-candidate.hsize)}) + end + insert_node_after(head,head,box) + else + head.prev = box + box.next = head + head = box + end + box[a_margindata] = nofstatus + if trace_margindata then + report_margindata("injected, location %a, shift %p",location,shift) + end + -- we need to add line etc to offset as well + offset = offset + depth + local room = { + height = height, + depth = offset, + slack = candidate.bottomspace, -- todo: 'depth' => strutdepth + lineheight = candidate.lineheight, -- only for tracing + } + offset = offset + height + stacked[location] = offset -- weird, no table ? + -- todo: if no real depth then zero + if trace_margindata then + report_margindata("status, offset %s",offset) + end + return head, room, stack == v_continue +end + +local function flushinline(parent,head) + local current = head + local done = false + local continue = false + local room, don, con + while current and nofinlined > 0 do + local id = current.id + if id == whatsit_code then + if current.subtype == userdefined_code and current.user_id == inline_mark then + local n = current.value + local candidate = inlinestore[n] + if candidate then -- no vpack, as we want to realign + inlinestore[n] = nil + nofinlined = nofinlined - 1 + head, room, con = inject(parent,head,candidate) -- maybe return applied offset + continue = continue or con + done = true + nofstored = nofstored - 1 + end + end + elseif id == hlist_code or id == vlist_code then + -- optional (but sometimes needed) + current.list, don, con = flushinline(current,current.list) + continue = continue or con + done = done or don + end + current = current.next + end + return head, done, continue +end + +local a_linenumber = attributes.private('linenumber') + +local function flushed(scope,parent) -- current is hlist + local head = parent.list + local done = false + local continue = false + local room, con, don + for c=1,#categories do + local category = categories[c] + for l=1,#locations do + local location = locations[l] + local store = displaystore[category][location][scope] + while true do + local candidate = remove(store,1) -- brr, local stores are sparse + if candidate then -- no vpack, as we want to realign + head, room, con = inject(parent,head,candidate) + done = true + continue = continue or con + nofstored = nofstored - 1 + registertogether(parent,room) + else + break + end + end + end + end + if nofinlined > 0 then + if done then + parent.list = head + end + head, don, con = flushinline(parent,head) + continue = continue or con + done = done or don + end + if done then + local a = head[a_linenumber] -- hack .. we need a more decent critical attribute inheritance mechanism + parent.list = hpack_nodes(head,parent.width,"exactly") + if a then + parent.list[a_linenumber] = a + end + -- resetstacked() + end + return done, continue +end + +-- only when group : vbox|vmode_par +-- only when subtype : line, box (no indent alignment cell) + +local function handler(scope,head,group) + if nofstored > 0 then + if trace_margindata then + report_margindata("flushing stage one, stored %s, scope %s, delayed %s, group %a",nofstored,scope,nofdelayed,group) + end + local current = head + local done = false + while current do + local id = current.id + if (id == vlist_code or id == hlist_code) and not current[a_margindata] then + local don, continue = flushed(scope,current) + if don then + current[a_margindata] = 0 -- signal to prevent duplicate processing + if continue then + markovershoot(current) + end + if nofstored <= 0 then + break + end + done = true + end + end + current = current.next + end + -- if done then + resetstacked() -- why doesn't done work ok here? + -- end + return head, done + else + return head, false + end +end + +function margins.localhandler(head,group) -- sometimes group is "" which is weird + local inhibit = conditionals.inhibitmargindata + if inhibit then + if trace_margingroup then + report_margindata("ignored 3, group %a, stored %s, inhibit %a",group,nofstored,inhibit) + end + return head, false + elseif nofstored > 0 then + return handler(v_local,head,group) + else + if trace_margingroup then + report_margindata("ignored 4, group %a, stored %s, inhibit %a",group,nofstored,inhibit) + end + return head, false + end +end + +function margins.globalhandler(head,group) -- check group + local inhibit = conditionals.inhibitmargindata + if inhibit or nofstored == 0 then + if trace_margingroup then + report_margindata("ignored 1, group %a, stored %s, inhibit %a",group,nofstored,inhibit) + end + return head, false + elseif group == "hmode_par" then + return handler("global",head,group) + elseif group == "vmode_par" then -- experiment (for alignments) + return handler("global",head,group) + -- this needs checking as we then get quite some one liners to process and + -- we cannot look ahead then: + elseif group == "box" then -- experiment (for alignments) + return handler("global",head,group) + elseif group == "alignment" then -- experiment (for alignments) + return handler("global",head,group) + else + if trace_margingroup then + report_margindata("ignored 2, group %a, stored %s, inhibit %a",group,nofstored,inhibit) + end + return head, false + end +end + +local function finalhandler(head) + if nofdelayed > 0 then + local current = head + local done = false + while current do + local id = current.id + if id == hlist_code then + local a = current[a_margindata] + if not a or a == 0 then + finalhandler(current.list) + elseif realigned(current,a) then + done = true + if nofdelayed == 0 then + return head, true + end + end + elseif id == vlist_code then + finalhandler(current.list) + end + current = current.next + end + return head, done + else + return head, false + end +end + +function margins.finalhandler(head) + if nofdelayed > 0 then + -- if trace_margindata then + -- report_margindata("flushing stage two, instore: %s, delayed: %s",nofstored,nofdelayed) + -- end + return finalhandler(head) + else + return head, false + end +end + +-- Somehow the vbox builder (in combinations) gets pretty confused and decides to +-- go horizontal. So this needs more testing. + +prependaction("finalizers", "lists", "typesetters.margins.localhandler") +-- ("vboxbuilders", "normalizers", "typesetters.margins.localhandler") +prependaction("mvlbuilders", "normalizers", "typesetters.margins.globalhandler") +prependaction("shipouts", "normalizers", "typesetters.margins.finalhandler") + +disableaction("finalizers", "typesetters.margins.localhandler") +-- ("vboxbuilders", "typesetters.margins.localhandler") +disableaction("mvlbuilders", "typesetters.margins.globalhandler") +disableaction("shipouts", "typesetters.margins.finalhandler") + +enablelocal = function() + enableaction("finalizers", "typesetters.margins.localhandler") + -- enableaction("vboxbuilders", "typesetters.margins.localhandler") + enableaction("shipouts", "typesetters.margins.finalhandler") + enablelocal = nil +end + +enableglobal = function() + enableaction("mvlbuilders", "typesetters.margins.globalhandler") + enableaction("shipouts", "typesetters.margins.finalhandler") + enableglobal = nil +end + +statistics.register("margin data", function() + if nofsaved > 0 then + return format("%s entries, %s pending",nofsaved,nofdelayed) + else + return nil + end +end) diff --git a/tex/context/base/typo-pag.lua b/tex/context/base/typo-pag.lua index 0dd75ddf9..d39748d26 100644 --- a/tex/context/base/typo-pag.lua +++ b/tex/context/base/typo-pag.lua @@ -1,179 +1,179 @@ -if not modules then modules = { } end modules ['typo-pag'] = { - version = 1.001, - comment = "companion to typo-pag.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local nodecodes = nodes.nodecodes - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local glue_code = nodecodes.glue -local kern_code = nodecodes.kern -local penalty_code = nodecodes.penalty - -local insert_node_after = node.insert_after -local new_penalty = nodes.pool.penalty - -local unsetvalue = attributes.unsetvalue - -local a_keeptogether = attributes.private("keeptogether") - -local trace_keeptogether = false -local report_keeptogether = logs.reporter("parbuilders","keeptogether") - -local cache = { } -local last = 0 -local enabled = false - -trackers.register("parbuilders.keeptogether", function(v) trace_keeptogether = v end) - --- todo: also support lines = 3 etc (e.g. dropped caps) but how to set that --- when no hlists are there ? ... maybe the local_par - -function builders.paragraphs.registertogether(line,specification) -- might change - if not enabled then - nodes.tasks.enableaction("finalizers","builders.paragraphs.keeptogether") - end - local a = line[a_keeptogether] - local c = a and cache[a] - if c then - local height = specification.height - local depth = specification.depth - local slack = specification.slack - if height and height > c.height then - c.height = height - end - if depth and depth > c.depth then - c.depth = depth - end - if slack and slack > c.slack then - c.slack = slack - end - else - last = last + 1 - cache[last] = specification - if not specification.height then - specification.height = 0 - end - if not specification.depth then - specification.depth = 0 - end - if not specification.slack then - specification.slack = 0 - end - line[a_keeptogether] = last - end - if trace_keeptogether then - local a = a or last - local c = cache[a] - if trace_keeptogether then - local noflines = specification.lineheight - local height = c.height - local depth = c.depth - local slack = c.slack - if not noflines or noflines == 0 then - noflines = "unknown" - else - noflines = math.round((height + depth - slack) / noflines) - end - report_keeptogether("registered, index %s, height %p, depth %p, slack %p, noflines %a",a,height,depth,slack,noflines) - end - end -end - -local function keeptogether(start,a) - if start then - local specification = cache[a] - if a then - local current = start.next - local previous = start - local total = previous.depth - local slack = specification.slack - local threshold = specification.depth - slack - if trace_keeptogether then - report_keeptogether("%s, index %s, total %p, threshold %p, slack %p","list",a,total,threshold,slack) - end - while current do - local id = current.id - if id == vlist_code or id == hlist_code then - total = total + current.height + current.depth - if trace_keeptogether then - report_keeptogether("%s, index %s, total %p, threshold %p","list",a,total,threshold) - end - if total <= threshold then - if previous.id == penalty_code then - previous.penalty = 10000 - else - insert_node_after(head,previous,new_penalty(10000)) - end - else - break - end - elseif id == glue_code then - -- hm, breakpoint, maybe turn this into kern - total = total + current.spec.width - if trace_keeptogether then - report_keeptogether("%s, index %s, total %p, threshold %p","glue",a,total,threshold) - end - if total <= threshold then - if previous.id == penalty_code then - previous.penalty = 10000 - else - insert_node_after(head,previous,new_penalty(10000)) - end - else - break - end - elseif id == kern_code then - total = total + current.kern - if trace_keeptogether then - report_keeptogether("%s, index %s, total %s, threshold %s","kern",a,total,threshold) - end - if total <= threshold then - if previous.id == penalty_code then - previous.penalty = 10000 - else - insert_node_after(head,previous,new_penalty(10000)) - end - else - break - end - elseif id == penalty_code then - if total <= threshold then - if previous.id == penalty_code then - previous.penalty = 10000 - end - current.penalty = 10000 - else - break - end - end - previous = current - current = current.next - end - end - end -end - --- also look at first non glue/kern node e.g for a dropped caps - -function builders.paragraphs.keeptogether(head) - local done = false - local current = head - while current do - if current.id == hlist_code then - local a = current[a_keeptogether] - if a and a > 0 then - keeptogether(current,a) - current[a_keeptogether] = unsetvalue - cache[a] = nil - done = true - end - end - current = current.next - end - return head, done -end +if not modules then modules = { } end modules ['typo-pag'] = { + version = 1.001, + comment = "companion to typo-pag.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local nodecodes = nodes.nodecodes + +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local glue_code = nodecodes.glue +local kern_code = nodecodes.kern +local penalty_code = nodecodes.penalty + +local insert_node_after = node.insert_after +local new_penalty = nodes.pool.penalty + +local unsetvalue = attributes.unsetvalue + +local a_keeptogether = attributes.private("keeptogether") + +local trace_keeptogether = false +local report_keeptogether = logs.reporter("parbuilders","keeptogether") + +local cache = { } +local last = 0 +local enabled = false + +trackers.register("parbuilders.keeptogether", function(v) trace_keeptogether = v end) + +-- todo: also support lines = 3 etc (e.g. dropped caps) but how to set that +-- when no hlists are there ? ... maybe the local_par + +function builders.paragraphs.registertogether(line,specification) -- might change + if not enabled then + nodes.tasks.enableaction("finalizers","builders.paragraphs.keeptogether") + end + local a = line[a_keeptogether] + local c = a and cache[a] + if c then + local height = specification.height + local depth = specification.depth + local slack = specification.slack + if height and height > c.height then + c.height = height + end + if depth and depth > c.depth then + c.depth = depth + end + if slack and slack > c.slack then + c.slack = slack + end + else + last = last + 1 + cache[last] = specification + if not specification.height then + specification.height = 0 + end + if not specification.depth then + specification.depth = 0 + end + if not specification.slack then + specification.slack = 0 + end + line[a_keeptogether] = last + end + if trace_keeptogether then + local a = a or last + local c = cache[a] + if trace_keeptogether then + local noflines = specification.lineheight + local height = c.height + local depth = c.depth + local slack = c.slack + if not noflines or noflines == 0 then + noflines = "unknown" + else + noflines = math.round((height + depth - slack) / noflines) + end + report_keeptogether("registered, index %s, height %p, depth %p, slack %p, noflines %a",a,height,depth,slack,noflines) + end + end +end + +local function keeptogether(start,a) + if start then + local specification = cache[a] + if a then + local current = start.next + local previous = start + local total = previous.depth + local slack = specification.slack + local threshold = specification.depth - slack + if trace_keeptogether then + report_keeptogether("%s, index %s, total %p, threshold %p, slack %p","list",a,total,threshold,slack) + end + while current do + local id = current.id + if id == vlist_code or id == hlist_code then + total = total + current.height + current.depth + if trace_keeptogether then + report_keeptogether("%s, index %s, total %p, threshold %p","list",a,total,threshold) + end + if total <= threshold then + if previous.id == penalty_code then + previous.penalty = 10000 + else + insert_node_after(head,previous,new_penalty(10000)) + end + else + break + end + elseif id == glue_code then + -- hm, breakpoint, maybe turn this into kern + total = total + current.spec.width + if trace_keeptogether then + report_keeptogether("%s, index %s, total %p, threshold %p","glue",a,total,threshold) + end + if total <= threshold then + if previous.id == penalty_code then + previous.penalty = 10000 + else + insert_node_after(head,previous,new_penalty(10000)) + end + else + break + end + elseif id == kern_code then + total = total + current.kern + if trace_keeptogether then + report_keeptogether("%s, index %s, total %s, threshold %s","kern",a,total,threshold) + end + if total <= threshold then + if previous.id == penalty_code then + previous.penalty = 10000 + else + insert_node_after(head,previous,new_penalty(10000)) + end + else + break + end + elseif id == penalty_code then + if total <= threshold then + if previous.id == penalty_code then + previous.penalty = 10000 + end + current.penalty = 10000 + else + break + end + end + previous = current + current = current.next + end + end + end +end + +-- also look at first non glue/kern node e.g for a dropped caps + +function builders.paragraphs.keeptogether(head) + local done = false + local current = head + while current do + if current.id == hlist_code then + local a = current[a_keeptogether] + if a and a > 0 then + keeptogether(current,a) + current[a_keeptogether] = unsetvalue + cache[a] = nil + done = true + end + end + current = current.next + end + return head, done +end diff --git a/tex/context/base/typo-par.lua b/tex/context/base/typo-par.lua index b25ae4a5b..0449becbf 100644 --- a/tex/context/base/typo-par.lua +++ b/tex/context/base/typo-par.lua @@ -1,181 +1,181 @@ -if not modules then modules = { } end modules ['typo-par'] = { - version = 1.001, - comment = "companion to typo-par.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- A playground for experiments. - -local utfbyte = utf.byte -local utfchar = utf.char - -local trace_paragraphs = false trackers.register("typesetters.paragraphs", function(v) trace_paragraphs = v end) -local trace_dropper = false trackers.register("typesetters.paragraphs.dropper",function(v) trace_dropper = v end) - -local report_paragraphs = logs.reporter("nodes","paragraphs") -local report_dropper = logs.reporter("nodes","dropped") - -typesetters.paragraphs = typesetters.paragraphs or { } -local paragraphs = typesetters.paragraphs - -local nodecodes = nodes.nodecodes -local whatsitcodes = nodes.whatsitcodes -local tasks = nodes.tasks - -local variables = interfaces.variables - -local texattribute = tex.attribute -local unsetvalue = attributes.unsetvalue - -local glyph_code = nodecodes.glyph -local hlist_code = nodecodes.hlist -local kern_node = nodecodes.kern -local whatsit_code = nodecodes.whatsit -local localpar_code = whatsitcodes.localpar - -local a_paragraph = attributes.private("paragraphspecial") -local a_color = attributes.private('color') -local a_transparency = attributes.private('transparency') -local a_colorspace = attributes.private('colormodel') - -local dropper = { - enabled = false, - -- font = 0, - -- n = 0, - -- distance = 0, - -- hoffset = 0, - -- voffset = 0, -} - -local droppers = { } - -typesetters.paragraphs.droppers = droppers - -function droppers.set(specification) - dropper = specification or { } -end - -function droppers.freeze() - if dropper.enabled then - dropper.font = font.current() - end -end - --- dropped caps experiment (will be done properly when luatex --- stores the state in the local par node) .. btw, search still --- works with dropped caps, as does an export - --- we need a 'par' attribute and in fact for dropped caps we don't need --- need an attribute ... dropit will become s state counter (or end up --- in the localpar user data - --- for the moment, each paragraph gets a number as id (attribute) ..problem --- with nesting .. or anyhow, needed for tagging anyway - --- todo: prevent linebreak .. but normally a dropper ends up atthe top of --- a page so this has a low priority - -local function process(namespace,attribute,head) - local done = false - if head.id == whatsit_code and head.subtype == localpar_code then - -- begin of par - local a = head[attribute] - if a and a > 0 then - if dropper.enabled then - dropper.enabled = false -- dangerous for e.g. nested || in tufte - local first = head.next - if first and first.id == hlist_code then - -- parbox .. needs to be set at 0 - first = first.next - end - if first and first.id == glyph_code then --- if texattribute[a_paragraph] >= 0 then --- texattribute[a_paragraph] = unsetvalue --- end - local char = first.char - local prev = first.prev - local next = first.next - -- if prev.id == hlist_code then - -- -- set the width to 0 - -- end - if next and next.id == kern_node then - next.kern = 0 - end - first.font = dropper.font or first.font - -- can be a helper - local ma = dropper.ma or 0 - local ca = dropper.ca - local ta = dropper.ta - if ca and ca > 0 then - first[a_colorspace] = ma == 0 and 1 or ma - first[a_color] = ca - end - if ta and ta > 0 then - first[a_transparency] = ta - end - -- - local width = first.width - local height = first.height - local depth = first.depth - local distance = dropper.distance or 0 - local voffset = dropper.voffset or 0 - local hoffset = dropper.hoffset or 0 - first.xoffset = - width - hoffset - distance - first.yoffset = - height - voffset - if true then - -- needed till we can store parindent with localpar - first.prev = nil - first.next = nil - local h = node.hpack(first) - h.width = 0 - h.height = 0 - h.depth = 0 - prev.next = h - next.prev = h - h.next = next - h.prev = prev - end - if dropper.location == variables.margin then - -- okay - else - local lines = tonumber(dropper.n) or 0 - if lines == 0 then -- safeguard, not too precise - lines = math.ceil((height+voffset) / tex.baselineskip.width) - end - tex.hangafter = - lines - tex.hangindent = width + distance - end - done = true - end - end - end - end - return head, done -end - -local enabled = false - -function paragraphs.set(n) - if n == variables.reset or not tonumber(n) or n == 0 then - texattribute[a_paragraph] = unsetvalue - else - if not enabled then - tasks.enableaction("processors","typesetters.paragraphs.handler") - if trace_paragraphs then - report_paragraphs("enabling paragraphs") - end - enabled = true - end - texattribute[a_paragraph] = n - end -end - -paragraphs.attribute = a_paragraph - -paragraphs.handler = nodes.installattributehandler { - name = "paragraphs", - namespace = paragraphs, - processor = process, -} +if not modules then modules = { } end modules ['typo-par'] = { + version = 1.001, + comment = "companion to typo-par.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- A playground for experiments. + +local utfbyte = utf.byte +local utfchar = utf.char + +local trace_paragraphs = false trackers.register("typesetters.paragraphs", function(v) trace_paragraphs = v end) +local trace_dropper = false trackers.register("typesetters.paragraphs.dropper",function(v) trace_dropper = v end) + +local report_paragraphs = logs.reporter("nodes","paragraphs") +local report_dropper = logs.reporter("nodes","dropped") + +typesetters.paragraphs = typesetters.paragraphs or { } +local paragraphs = typesetters.paragraphs + +local nodecodes = nodes.nodecodes +local whatsitcodes = nodes.whatsitcodes +local tasks = nodes.tasks + +local variables = interfaces.variables + +local texattribute = tex.attribute +local unsetvalue = attributes.unsetvalue + +local glyph_code = nodecodes.glyph +local hlist_code = nodecodes.hlist +local kern_node = nodecodes.kern +local whatsit_code = nodecodes.whatsit +local localpar_code = whatsitcodes.localpar + +local a_paragraph = attributes.private("paragraphspecial") +local a_color = attributes.private('color') +local a_transparency = attributes.private('transparency') +local a_colorspace = attributes.private('colormodel') + +local dropper = { + enabled = false, + -- font = 0, + -- n = 0, + -- distance = 0, + -- hoffset = 0, + -- voffset = 0, +} + +local droppers = { } + +typesetters.paragraphs.droppers = droppers + +function droppers.set(specification) + dropper = specification or { } +end + +function droppers.freeze() + if dropper.enabled then + dropper.font = font.current() + end +end + +-- dropped caps experiment (will be done properly when luatex +-- stores the state in the local par node) .. btw, search still +-- works with dropped caps, as does an export + +-- we need a 'par' attribute and in fact for dropped caps we don't need +-- need an attribute ... dropit will become s state counter (or end up +-- in the localpar user data + +-- for the moment, each paragraph gets a number as id (attribute) ..problem +-- with nesting .. or anyhow, needed for tagging anyway + +-- todo: prevent linebreak .. but normally a dropper ends up atthe top of +-- a page so this has a low priority + +local function process(namespace,attribute,head) + local done = false + if head.id == whatsit_code and head.subtype == localpar_code then + -- begin of par + local a = head[attribute] + if a and a > 0 then + if dropper.enabled then + dropper.enabled = false -- dangerous for e.g. nested || in tufte + local first = head.next + if first and first.id == hlist_code then + -- parbox .. needs to be set at 0 + first = first.next + end + if first and first.id == glyph_code then +-- if texattribute[a_paragraph] >= 0 then +-- texattribute[a_paragraph] = unsetvalue +-- end + local char = first.char + local prev = first.prev + local next = first.next + -- if prev.id == hlist_code then + -- -- set the width to 0 + -- end + if next and next.id == kern_node then + next.kern = 0 + end + first.font = dropper.font or first.font + -- can be a helper + local ma = dropper.ma or 0 + local ca = dropper.ca + local ta = dropper.ta + if ca and ca > 0 then + first[a_colorspace] = ma == 0 and 1 or ma + first[a_color] = ca + end + if ta and ta > 0 then + first[a_transparency] = ta + end + -- + local width = first.width + local height = first.height + local depth = first.depth + local distance = dropper.distance or 0 + local voffset = dropper.voffset or 0 + local hoffset = dropper.hoffset or 0 + first.xoffset = - width - hoffset - distance + first.yoffset = - height - voffset + if true then + -- needed till we can store parindent with localpar + first.prev = nil + first.next = nil + local h = node.hpack(first) + h.width = 0 + h.height = 0 + h.depth = 0 + prev.next = h + next.prev = h + h.next = next + h.prev = prev + end + if dropper.location == variables.margin then + -- okay + else + local lines = tonumber(dropper.n) or 0 + if lines == 0 then -- safeguard, not too precise + lines = math.ceil((height+voffset) / tex.baselineskip.width) + end + tex.hangafter = - lines + tex.hangindent = width + distance + end + done = true + end + end + end + end + return head, done +end + +local enabled = false + +function paragraphs.set(n) + if n == variables.reset or not tonumber(n) or n == 0 then + texattribute[a_paragraph] = unsetvalue + else + if not enabled then + tasks.enableaction("processors","typesetters.paragraphs.handler") + if trace_paragraphs then + report_paragraphs("enabling paragraphs") + end + enabled = true + end + texattribute[a_paragraph] = n + end +end + +paragraphs.attribute = a_paragraph + +paragraphs.handler = nodes.installattributehandler { + name = "paragraphs", + namespace = paragraphs, + processor = process, +} diff --git a/tex/context/base/typo-prc.lua b/tex/context/base/typo-prc.lua index 5b74abd0b..4fb64d0f5 100644 --- a/tex/context/base/typo-prc.lua +++ b/tex/context/base/typo-prc.lua @@ -1,125 +1,125 @@ -if not modules then modules = { } end modules ['typo-prc'] = { - version = 1.001, - comment = "companion to typo-prc.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- moved from strc-ini.lua - - -local formatters = string.formatters -local lpegmatch, patterns, P, C, Cs = lpeg.match, lpeg.patterns, lpeg.P, lpeg.C, lpeg.Cs - --- processors: syntax: processor->data ... not ok yet - -typesetters.processors = typesetters.processors or { } -local processors = typesetters.processors - -local trace_processors = false -local report_processors = logs.reporter("processors") -local registered = { } - -trackers.register("typesetters.processors", function(v) trace_processors = v end) - -function processors.register(p) - registered[p] = true -end - -function processors.reset(p) - registered[p] = nil -end - ---~ local splitter = lpeg.splitat("->",true) -- also support => - -local becomes = P('->') -local processor = (1-becomes)^1 -local splitter = C(processor) * becomes * Cs(patterns.argument + patterns.content) - -function processors.split(str) - local p, s = lpegmatch(splitter,str) - if registered[p] then - return p, s - else - return false, str - end -end - -function processors.apply(p,s) - local str = p - if s == nil then - p, s = lpegmatch(splitter,p) - end - if p and registered[p] then - if trace_processors then - report_processors("applying %s processor %a, argument: %s","known",p,s) - end - context.applyprocessor(p,s) - elseif s then - if trace_processors then - report_processors("applying %s processor %a, argument: %s","unknown",p,s) - end - context(s) - elseif str then - if trace_processors then - report_processors("applying %s processor, data: %s","ignored",str) - end - context(str) - end -end - -function processors.startapply(p,s) - local str = p - if s == nil then - p, s = lpegmatch(splitter,p) - end - if p and registered[p] then - if trace_processors then - report_processors("start applying %s processor %a","known",p) - end - context.applyprocessor(p) - context("{") - return s - elseif p then - if trace_processors then - report_processors("start applying %s processor %a","unknown",p) - end - context.firstofoneargument() - context("{") - return s - else - if trace_processors then - report_processors("start applying %s processor","ignored") - end - context.firstofoneargument() - context("{") - return str - end -end - -function processors.stopapply() - context("}") - if trace_processors then - report_processors("stop applying processor") - end -end - -function processors.tostring(str) - local p, s = lpegmatch(splitter,str) - if registered[p] then - return formatters["\\applyprocessor{%s}{%s}"](p,s) - else - return str - end -end - -function processors.stripped(str) - local p, s = lpegmatch(splitter,str) - return s or str -end - --- interface - -commands.registerstructureprocessor = processors.register -commands.resetstructureprocessor = processors.reset +if not modules then modules = { } end modules ['typo-prc'] = { + version = 1.001, + comment = "companion to typo-prc.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- moved from strc-ini.lua + + +local formatters = string.formatters +local lpegmatch, patterns, P, C, Cs = lpeg.match, lpeg.patterns, lpeg.P, lpeg.C, lpeg.Cs + +-- processors: syntax: processor->data ... not ok yet + +typesetters.processors = typesetters.processors or { } +local processors = typesetters.processors + +local trace_processors = false +local report_processors = logs.reporter("processors") +local registered = { } + +trackers.register("typesetters.processors", function(v) trace_processors = v end) + +function processors.register(p) + registered[p] = true +end + +function processors.reset(p) + registered[p] = nil +end + +--~ local splitter = lpeg.splitat("->",true) -- also support => + +local becomes = P('->') +local processor = (1-becomes)^1 +local splitter = C(processor) * becomes * Cs(patterns.argument + patterns.content) + +function processors.split(str) + local p, s = lpegmatch(splitter,str) + if registered[p] then + return p, s + else + return false, str + end +end + +function processors.apply(p,s) + local str = p + if s == nil then + p, s = lpegmatch(splitter,p) + end + if p and registered[p] then + if trace_processors then + report_processors("applying %s processor %a, argument: %s","known",p,s) + end + context.applyprocessor(p,s) + elseif s then + if trace_processors then + report_processors("applying %s processor %a, argument: %s","unknown",p,s) + end + context(s) + elseif str then + if trace_processors then + report_processors("applying %s processor, data: %s","ignored",str) + end + context(str) + end +end + +function processors.startapply(p,s) + local str = p + if s == nil then + p, s = lpegmatch(splitter,p) + end + if p and registered[p] then + if trace_processors then + report_processors("start applying %s processor %a","known",p) + end + context.applyprocessor(p) + context("{") + return s + elseif p then + if trace_processors then + report_processors("start applying %s processor %a","unknown",p) + end + context.firstofoneargument() + context("{") + return s + else + if trace_processors then + report_processors("start applying %s processor","ignored") + end + context.firstofoneargument() + context("{") + return str + end +end + +function processors.stopapply() + context("}") + if trace_processors then + report_processors("stop applying processor") + end +end + +function processors.tostring(str) + local p, s = lpegmatch(splitter,str) + if registered[p] then + return formatters["\\applyprocessor{%s}{%s}"](p,s) + else + return str + end +end + +function processors.stripped(str) + local p, s = lpegmatch(splitter,str) + return s or str +end + +-- interface + +commands.registerstructureprocessor = processors.register +commands.resetstructureprocessor = processors.reset diff --git a/tex/context/base/typo-rep.lua b/tex/context/base/typo-rep.lua index 8451ce52b..e7e11bbf0 100644 --- a/tex/context/base/typo-rep.lua +++ b/tex/context/base/typo-rep.lua @@ -1,128 +1,128 @@ -if not modules then modules = { } end modules ['typo-rep'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This was rather boring to program (more of the same) but I could --- endure it by listening to a couple cd's by The Scene and The Lau --- on the squeezebox on my desk. - -local trace_stripping = false trackers.register("nodes.stripping", function(v) trace_stripping = v end) - trackers.register("fonts.stripping", function(v) trace_stripping = v end) - -local report_stripping = logs.reporter("fonts","stripping") - -local nodes, node = nodes, node - -local delete_node = nodes.delete -local replace_node = nodes.replace -local copy_node = node.copy - -local chardata = characters.data -local collected = false -local a_stripping = attributes.private("stripping") -local fontdata = fonts.hashes.identifiers -local tasks = nodes.tasks - -local texattribute = tex.attribute -local unsetvalue = attributes.unsetvalue - -local v_reset = interfaces.variables.reset - -local nodecodes = nodes.nodecodes -local glyph_code = nodecodes.glyph - --- todo: other namespace -> typesetters - -nodes.stripping = nodes.stripping or { } local stripping = nodes.stripping -stripping.glyphs = stripping.glyphs or { } local glyphs = stripping.glyphs - -local function initialize() - for k,v in next, chardata do - if v.category == "cf" and v.visible ~= "yes" then - if not glyphs[k] then - glyphs[k] = true - end - end - end - initialize = nil -end - -local function process(what,head,current,char) - if what == true then - if trace_stripping then - report_stripping("deleting %C from text",char) - end - head, current = delete_node(head,current) - elseif type(what) == "function" then - head, current = what(head,current) - current = current.next - if trace_stripping then - report_stripping("processing %C in text",char) - end - elseif what then -- assume node - head, current = replace_node(head,current,copy_node(what)) - current = current.next - if trace_stripping then - report_stripping("replacing %C in text",char) - end - end - return head, current -end - -function nodes.handlers.stripping(head) - local current, done = head, false - while current do - if current.id == glyph_code then - -- it's more efficient to keep track of what needs to be kept - local todo = current[a_stripping] - if todo == 1 then - local char = current.char - local what = glyphs[char] - if what then - head, current = process(what,head,current,char) - done = true - else -- handling of spacing etc has to be done elsewhere - current = current.next - end - else - current = current.next - end - else - current = current.next - end - end - return head, done -end - -local enabled = false - -function stripping.set(n) -- number or 'reset' - if n == v_reset then - n = unsetvalue - else - n = tonumber(n) - if n then - if not enabled then - if initialize then initialize() end - tasks.enableaction("processors","nodes.handlers.stripping") - enabled = true - end - else - n = unsetvalue - end - end - texattribute[a_stripping] = n -end - --- why not in task-ini? - -tasks.appendaction("processors","fonts","nodes.handlers.stripping",nil,"nodes.handlers.characters") -tasks.disableaction("processors","nodes.handlers.stripping") - --- interface - -commands.setcharacterstripping = stripping.set +if not modules then modules = { } end modules ['typo-rep'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This was rather boring to program (more of the same) but I could +-- endure it by listening to a couple cd's by The Scene and The Lau +-- on the squeezebox on my desk. + +local trace_stripping = false trackers.register("nodes.stripping", function(v) trace_stripping = v end) + trackers.register("fonts.stripping", function(v) trace_stripping = v end) + +local report_stripping = logs.reporter("fonts","stripping") + +local nodes, node = nodes, node + +local delete_node = nodes.delete +local replace_node = nodes.replace +local copy_node = node.copy + +local chardata = characters.data +local collected = false +local a_stripping = attributes.private("stripping") +local fontdata = fonts.hashes.identifiers +local tasks = nodes.tasks + +local texattribute = tex.attribute +local unsetvalue = attributes.unsetvalue + +local v_reset = interfaces.variables.reset + +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph + +-- todo: other namespace -> typesetters + +nodes.stripping = nodes.stripping or { } local stripping = nodes.stripping +stripping.glyphs = stripping.glyphs or { } local glyphs = stripping.glyphs + +local function initialize() + for k,v in next, chardata do + if v.category == "cf" and v.visible ~= "yes" then + if not glyphs[k] then + glyphs[k] = true + end + end + end + initialize = nil +end + +local function process(what,head,current,char) + if what == true then + if trace_stripping then + report_stripping("deleting %C from text",char) + end + head, current = delete_node(head,current) + elseif type(what) == "function" then + head, current = what(head,current) + current = current.next + if trace_stripping then + report_stripping("processing %C in text",char) + end + elseif what then -- assume node + head, current = replace_node(head,current,copy_node(what)) + current = current.next + if trace_stripping then + report_stripping("replacing %C in text",char) + end + end + return head, current +end + +function nodes.handlers.stripping(head) + local current, done = head, false + while current do + if current.id == glyph_code then + -- it's more efficient to keep track of what needs to be kept + local todo = current[a_stripping] + if todo == 1 then + local char = current.char + local what = glyphs[char] + if what then + head, current = process(what,head,current,char) + done = true + else -- handling of spacing etc has to be done elsewhere + current = current.next + end + else + current = current.next + end + else + current = current.next + end + end + return head, done +end + +local enabled = false + +function stripping.set(n) -- number or 'reset' + if n == v_reset then + n = unsetvalue + else + n = tonumber(n) + if n then + if not enabled then + if initialize then initialize() end + tasks.enableaction("processors","nodes.handlers.stripping") + enabled = true + end + else + n = unsetvalue + end + end + texattribute[a_stripping] = n +end + +-- why not in task-ini? + +tasks.appendaction("processors","fonts","nodes.handlers.stripping",nil,"nodes.handlers.characters") +tasks.disableaction("processors","nodes.handlers.stripping") + +-- interface + +commands.setcharacterstripping = stripping.set diff --git a/tex/context/base/typo-spa.lua b/tex/context/base/typo-spa.lua index 5eba22889..11de65f7b 100644 --- a/tex/context/base/typo-spa.lua +++ b/tex/context/base/typo-spa.lua @@ -1,229 +1,229 @@ -if not modules then modules = { } end modules ['typo-spa'] = { - version = 1.001, - comment = "companion to typo-spa.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local next, type = next, type -local utfchar = utf.char - -local trace_spacing = false trackers.register("typesetters.spacing", function(v) trace_spacing = v end) - -local report_spacing = logs.reporter("typesetting","spacing") - -local nodes, fonts, node = nodes, fonts, node - -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after -local remove_node = nodes.remove -local end_of_math = node.end_of_math - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers -local quaddata = fonthashes.quads - -local texattribute = tex.attribute -local unsetvalue = attributes.unsetvalue - -local v_reset = interfaces.variables.reset - -local nodecodes = nodes.nodecodes -local glyph_code = nodecodes.glyph -local math_code = nodecodes.math - -local somespace = nodes.somespace -local somepenalty = nodes.somepenalty - -local nodepool = nodes.pool -local tasks = nodes.tasks - -local new_penalty = nodepool.penalty -local new_glue = nodepool.glue - -typesetters = typesetters or { } -local typesetters = typesetters - -typesetters.spacings = typesetters.spacings or { } -local spacings = typesetters.spacings - -spacings.mapping = spacings.mapping or { } -spacings.numbers = spacings.numbers or { } - -local a_spacings = attributes.private("spacing") -spacings.attribute = a_spacings - -storage.register("typesetters/spacings/mapping", spacings.mapping, "typesetters.spacings.mapping") - -local mapping = spacings.mapping -local numbers = spacings.numbers - -for i=1,#mapping do - local m = mapping[i] - numbers[m.name] = m -end - --- todo cache lastattr - -local function process(namespace,attribute,head) - local done = false - local start = head - -- head is always begin of par (whatsit), so we have at least two prev nodes - -- penalty followed by glue - while start do - local id = start.id - if id == glyph_code then - local attr = start[attribute] - if attr and attr > 0 then - local data = mapping[attr] - if data then - local char = start.char - local map = data.characters[char] - start[attribute] = unsetvalue -- needed? - if map then - local left = map.left - local right = map.right - local alternative = map.alternative - local quad = quaddata[start.font] - local prev = start.prev - if left and left ~= 0 and prev then - local ok = false - local prevprev = prev.prev - if alternative == 1 then - local somespace = somespace(prev,true) - if somespace then - local somepenalty = somepenalty(prevprev,10000) - if somepenalty then - if trace_spacing then - report_spacing("removing penalty and space before %C (left)",char) - end - head = remove_node(head,prev,true) - head = remove_node(head,prevprev,true) - else - if trace_spacing then - report_spacing("removing space before %C (left)",char) - end - head = remove_node(head,prev,true) - end - end - ok = true - else - ok = not (somespace(prev,true) and somepenalty(prevprev,true)) or somespace(prev,true) - end - if ok then - if trace_spacing then - report_spacing("inserting penalty and space before %C (left)",char) - end - insert_node_before(head,start,new_penalty(10000)) - insert_node_before(head,start,new_glue(left*quad)) - done = true - end - end - local next = start.next - if right and right ~= 0 and next then - local ok = false - local nextnext = next.next - if alternative == 1 then - local somepenalty = somepenalty(next,10000) - if somepenalty then - local somespace = somespace(nextnext,true) - if somespace then - if trace_spacing then - report_spacing("removing penalty and space after %C right",char) - end - head = remove_node(head,next,true) - head = remove_node(head,nextnext,true) - end - else - local somespace = somespace(next,true) - if somespace then - if trace_spacing then - report_spacing("removing space after %C (right)", char) - end - head = remove_node(head,next,true) - end - end - ok = true - else - ok = not (somepenalty(next,10000) and somespace(nextnext,true)) or somespace(next,true) - end - if ok then - if trace_spacing then - report_spacing("inserting penalty and space after %C (right)",char) - end - insert_node_after(head,start,new_glue(right*quad)) - insert_node_after(head,start,new_penalty(10000)) - done = true - end - end - end - end - end - elseif id == math_code then - start = end_of_math(start) -- weird, can return nil .. no math end? - end - if start then - start = start.next - end - end - return head, done -end - -local enabled = false - -function spacings.define(name) - local data = numbers[name] - if data then - -- error - else - local number = #mapping + 1 - local data = { - name = name, - number = number, - characters = { }, - } - mapping[number] = data - numbers[name] = data - end -end - -function spacings.setup(name,char,settings) - local data = numbers[name] - if not data then - -- error - else - data.characters[char] = settings - end -end - -function spacings.set(name) - local n = unsetvalue - if name ~= v_reset then - local data = numbers[name] - if data then - if not enabled then - tasks.enableaction("processors","typesetters.spacings.handler") - enabled = true - end - n = data.number or unsetvalue - end - end - texattribute[a_spacings] = n -end - -function spacings.reset() - texattribute[a_spacings] = unsetvalue -end - -spacings.handler = nodes.installattributehandler { - name = "spacing", - namespace = spacings, - processor = process, -} - --- interface - -commands.definecharacterspacing = spacings.define -commands.setupcharacterspacing = spacings.setup -commands.setcharacterspacing = spacings.set +if not modules then modules = { } end modules ['typo-spa'] = { + version = 1.001, + comment = "companion to typo-spa.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local next, type = next, type +local utfchar = utf.char + +local trace_spacing = false trackers.register("typesetters.spacing", function(v) trace_spacing = v end) + +local report_spacing = logs.reporter("typesetting","spacing") + +local nodes, fonts, node = nodes, fonts, node + +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after +local remove_node = nodes.remove +local end_of_math = node.end_of_math + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers +local quaddata = fonthashes.quads + +local texattribute = tex.attribute +local unsetvalue = attributes.unsetvalue + +local v_reset = interfaces.variables.reset + +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph +local math_code = nodecodes.math + +local somespace = nodes.somespace +local somepenalty = nodes.somepenalty + +local nodepool = nodes.pool +local tasks = nodes.tasks + +local new_penalty = nodepool.penalty +local new_glue = nodepool.glue + +typesetters = typesetters or { } +local typesetters = typesetters + +typesetters.spacings = typesetters.spacings or { } +local spacings = typesetters.spacings + +spacings.mapping = spacings.mapping or { } +spacings.numbers = spacings.numbers or { } + +local a_spacings = attributes.private("spacing") +spacings.attribute = a_spacings + +storage.register("typesetters/spacings/mapping", spacings.mapping, "typesetters.spacings.mapping") + +local mapping = spacings.mapping +local numbers = spacings.numbers + +for i=1,#mapping do + local m = mapping[i] + numbers[m.name] = m +end + +-- todo cache lastattr + +local function process(namespace,attribute,head) + local done = false + local start = head + -- head is always begin of par (whatsit), so we have at least two prev nodes + -- penalty followed by glue + while start do + local id = start.id + if id == glyph_code then + local attr = start[attribute] + if attr and attr > 0 then + local data = mapping[attr] + if data then + local char = start.char + local map = data.characters[char] + start[attribute] = unsetvalue -- needed? + if map then + local left = map.left + local right = map.right + local alternative = map.alternative + local quad = quaddata[start.font] + local prev = start.prev + if left and left ~= 0 and prev then + local ok = false + local prevprev = prev.prev + if alternative == 1 then + local somespace = somespace(prev,true) + if somespace then + local somepenalty = somepenalty(prevprev,10000) + if somepenalty then + if trace_spacing then + report_spacing("removing penalty and space before %C (left)",char) + end + head = remove_node(head,prev,true) + head = remove_node(head,prevprev,true) + else + if trace_spacing then + report_spacing("removing space before %C (left)",char) + end + head = remove_node(head,prev,true) + end + end + ok = true + else + ok = not (somespace(prev,true) and somepenalty(prevprev,true)) or somespace(prev,true) + end + if ok then + if trace_spacing then + report_spacing("inserting penalty and space before %C (left)",char) + end + insert_node_before(head,start,new_penalty(10000)) + insert_node_before(head,start,new_glue(left*quad)) + done = true + end + end + local next = start.next + if right and right ~= 0 and next then + local ok = false + local nextnext = next.next + if alternative == 1 then + local somepenalty = somepenalty(next,10000) + if somepenalty then + local somespace = somespace(nextnext,true) + if somespace then + if trace_spacing then + report_spacing("removing penalty and space after %C right",char) + end + head = remove_node(head,next,true) + head = remove_node(head,nextnext,true) + end + else + local somespace = somespace(next,true) + if somespace then + if trace_spacing then + report_spacing("removing space after %C (right)", char) + end + head = remove_node(head,next,true) + end + end + ok = true + else + ok = not (somepenalty(next,10000) and somespace(nextnext,true)) or somespace(next,true) + end + if ok then + if trace_spacing then + report_spacing("inserting penalty and space after %C (right)",char) + end + insert_node_after(head,start,new_glue(right*quad)) + insert_node_after(head,start,new_penalty(10000)) + done = true + end + end + end + end + end + elseif id == math_code then + start = end_of_math(start) -- weird, can return nil .. no math end? + end + if start then + start = start.next + end + end + return head, done +end + +local enabled = false + +function spacings.define(name) + local data = numbers[name] + if data then + -- error + else + local number = #mapping + 1 + local data = { + name = name, + number = number, + characters = { }, + } + mapping[number] = data + numbers[name] = data + end +end + +function spacings.setup(name,char,settings) + local data = numbers[name] + if not data then + -- error + else + data.characters[char] = settings + end +end + +function spacings.set(name) + local n = unsetvalue + if name ~= v_reset then + local data = numbers[name] + if data then + if not enabled then + tasks.enableaction("processors","typesetters.spacings.handler") + enabled = true + end + n = data.number or unsetvalue + end + end + texattribute[a_spacings] = n +end + +function spacings.reset() + texattribute[a_spacings] = unsetvalue +end + +spacings.handler = nodes.installattributehandler { + name = "spacing", + namespace = spacings, + processor = process, +} + +-- interface + +commands.definecharacterspacing = spacings.define +commands.setupcharacterspacing = spacings.setup +commands.setcharacterspacing = spacings.set diff --git a/tex/context/base/unic-ini.lua b/tex/context/base/unic-ini.lua index cca1f0617..6a0c387d3 100644 --- a/tex/context/base/unic-ini.lua +++ b/tex/context/base/unic-ini.lua @@ -1,19 +1,19 @@ -if not modules then modules = { } end modules ['unic-ini'] = { - version = 1.001, - comment = "companion to unic-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local utfchar = utf.char - --- Beware, initializing unicodechar happens at first usage and takes --- 0.05 -- 0.1 second (lots of function calls). - -function commands.unicodechar(asked) - local n = characters.unicodechar(asked) - if n then - context(utfchar(n)) - end -end +if not modules then modules = { } end modules ['unic-ini'] = { + version = 1.001, + comment = "companion to unic-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local utfchar = utf.char + +-- Beware, initializing unicodechar happens at first usage and takes +-- 0.05 -- 0.1 second (lots of function calls). + +function commands.unicodechar(asked) + local n = characters.unicodechar(asked) + if n then + context(utfchar(n)) + end +end diff --git a/tex/context/base/util-deb.lua b/tex/context/base/util-deb.lua index 785373f86..9e5233774 100644 --- a/tex/context/base/util-deb.lua +++ b/tex/context/base/util-deb.lua @@ -1,128 +1,128 @@ -if not modules then modules = { } end modules ['util-deb'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- the tag is kind of generic and used for functions that are not --- bound to a variable, like node.new, node.copy etc (contrary to for instance --- node.has_attribute which is bound to a has_attribute local variable in mkiv) - -local debug = require "debug" - -local getinfo = debug.getinfo -local type, next, tostring = type, next, tostring -local format, find = string.format, string.find -local is_boolean = string.is_boolean - -utilities = utilities or { } -local debugger = utilities.debugger or { } -utilities.debugger = debugger - -local counters = { } -local names = { } - -local report = logs.reporter("debugger") - --- one - -local function hook() - local f = getinfo(2) -- "nS" - if f then - local n = "unknown" - if f.what == "C" then - n = f.name or '' - if not names[n] then - names[n] = format("%42s",n) - end - else - -- source short_src linedefined what name namewhat nups func - n = f.name or f.namewhat or f.what - if not n or n == "" then - n = "?" - end - if not names[n] then - names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source") - end - end - counters[n] = (counters[n] or 0) + 1 - end -end - -function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now - printer = printer or report - threshold = threshold or 0 - local total, grandtotal, functions = 0, 0, 0 - local dataset = { } - for name, count in next, counters do - dataset[#dataset+1] = { name, count } - end - table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end) - for i=1,#dataset do - local d = dataset[i] - local name = d[1] - local count = d[2] - if count > threshold and not find(name,"for generator") then -- move up - printer(format("%8i %s\n", count, names[name])) - total = total + count - end - grandtotal = grandtotal + count - functions = functions + 1 - end - printer("\n") - printer(format("functions : % 10i\n", functions)) - printer(format("total : % 10i\n", total)) - printer(format("grand total: % 10i\n", grandtotal)) - printer(format("threshold : % 10i\n", threshold)) -end - -function debugger.savestats(filename,threshold) - local f = io.open(filename,'w') - if f then - debugger.showstats(function(str) f:write(str) end,threshold) - f:close() - end -end - -function debugger.enable() - debug.sethook(hook,"c") -end - -function debugger.disable() - debug.sethook() ---~ counters[debug.getinfo(2,"f").func] = nil -end - ---~ debugger.enable() - ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) - ---~ debugger.disable() - ---~ print("") ---~ debugger.showstats() ---~ print("") ---~ debugger.showstats(print,3) - --- from the lua book: - -function traceback() - local level = 1 - while true do - local info = debug.getinfo(level, "Sl") - if not info then - break - elseif info.what == "C" then - print(format("%3i : C function",level)) - else - print(format("%3i : [%s]:%d",level,info.short_src,info.currentline)) - end - level = level + 1 - end -end +if not modules then modules = { } end modules ['util-deb'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- the tag is kind of generic and used for functions that are not +-- bound to a variable, like node.new, node.copy etc (contrary to for instance +-- node.has_attribute which is bound to a has_attribute local variable in mkiv) + +local debug = require "debug" + +local getinfo = debug.getinfo +local type, next, tostring = type, next, tostring +local format, find = string.format, string.find +local is_boolean = string.is_boolean + +utilities = utilities or { } +local debugger = utilities.debugger or { } +utilities.debugger = debugger + +local counters = { } +local names = { } + +local report = logs.reporter("debugger") + +-- one + +local function hook() + local f = getinfo(2) -- "nS" + if f then + local n = "unknown" + if f.what == "C" then + n = f.name or '' + if not names[n] then + names[n] = format("%42s",n) + end + else + -- source short_src linedefined what name namewhat nups func + n = f.name or f.namewhat or f.what + if not n or n == "" then + n = "?" + end + if not names[n] then + names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source") + end + end + counters[n] = (counters[n] or 0) + 1 + end +end + +function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now + printer = printer or report + threshold = threshold or 0 + local total, grandtotal, functions = 0, 0, 0 + local dataset = { } + for name, count in next, counters do + dataset[#dataset+1] = { name, count } + end + table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end) + for i=1,#dataset do + local d = dataset[i] + local name = d[1] + local count = d[2] + if count > threshold and not find(name,"for generator") then -- move up + printer(format("%8i %s\n", count, names[name])) + total = total + count + end + grandtotal = grandtotal + count + functions = functions + 1 + end + printer("\n") + printer(format("functions : % 10i\n", functions)) + printer(format("total : % 10i\n", total)) + printer(format("grand total: % 10i\n", grandtotal)) + printer(format("threshold : % 10i\n", threshold)) +end + +function debugger.savestats(filename,threshold) + local f = io.open(filename,'w') + if f then + debugger.showstats(function(str) f:write(str) end,threshold) + f:close() + end +end + +function debugger.enable() + debug.sethook(hook,"c") +end + +function debugger.disable() + debug.sethook() +--~ counters[debug.getinfo(2,"f").func] = nil +end + +--~ debugger.enable() + +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) + +--~ debugger.disable() + +--~ print("") +--~ debugger.showstats() +--~ print("") +--~ debugger.showstats(print,3) + +-- from the lua book: + +function traceback() + local level = 1 + while true do + local info = debug.getinfo(level, "Sl") + if not info then + break + elseif info.what == "C" then + print(format("%3i : C function",level)) + else + print(format("%3i : [%s]:%d",level,info.short_src,info.currentline)) + end + level = level + 1 + end +end diff --git a/tex/context/base/util-dim.lua b/tex/context/base/util-dim.lua index 47b2706b7..bbfeae7d4 100644 --- a/tex/context/base/util-dim.lua +++ b/tex/context/base/util-dim.lua @@ -1,449 +1,449 @@ -if not modules then modules = { } end modules ['util-dim'] = { - version = 1.001, - comment = "support for dimensions", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

    Internally work with scaled point, which are -represented by integers. However, in practice, at east at the - end we work with more generic units like points (pt). Going -from scaled points (numbers) to one of those units can be -done by using the conversion factors collected in the following -table.

    ---ldx]]-- - -local format, match, gsub, type, setmetatable = string.format, string.match, string.gsub, type, setmetatable -local P, S, R, Cc, C, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.Cc, lpeg.C, lpeg.match - -local allocate = utilities.storage.allocate -local setmetatableindex = table.setmetatableindex -local formatters = string.formatters - ---this might become another namespace - -number = number or { } -local number = number - -number.tonumberf = function(n) return match(format("%.20f",n),"(.-0?)0*$") end -- one zero too much but alas -number.tonumberg = function(n) return format("%.20g",n) end - -local dimenfactors = allocate { - ["pt"] = 1/65536, - ["in"] = ( 100/ 7227)/65536, - ["cm"] = ( 254/ 7227)/65536, - ["mm"] = ( 2540/ 7227)/65536, - ["sp"] = 1, -- 65536 sp in 1pt - ["bp"] = ( 7200/ 7227)/65536, - ["pc"] = ( 1/ 12)/65536, - ["dd"] = ( 1157/ 1238)/65536, - ["cc"] = ( 1157/14856)/65536, - ["nd"] = (20320/21681)/65536, - ["nc"] = ( 5080/65043)/65536 -} - ---~ print(table.serialize(dimenfactors)) ---~ ---~ %.99g: ---~ ---~ t={ ---~ ["bp"]=1.5201782378580324e-005, ---~ ["cc"]=1.1883696112892098e-006, ---~ ["cm"]=5.3628510057769479e-007, ---~ ["dd"]=1.4260435335470516e-005, ---~ ["em"]=0.000152587890625, ---~ ["ex"]=6.103515625e-005, ---~ ["in"]=2.1113586636917117e-007, ---~ ["mm"]=5.3628510057769473e-008, ---~ ["nc"]=1.1917446679504327e-006, ---~ ["nd"]=1.4300936015405194e-005, ---~ ["pc"]=1.2715657552083333e-006, ---~ ["pt"]=1.52587890625e-005, ---~ ["sp"]=1, ---~ } ---~ ---~ patched %s and tonumber ---~ ---~ t={ ---~ ["bp"]=0.00001520178238, ---~ ["cc"]=0.00000118836961, ---~ ["cm"]=0.0000005362851, ---~ ["dd"]=0.00001426043534, ---~ ["em"]=0.00015258789063, ---~ ["ex"]=0.00006103515625, ---~ ["in"]=0.00000021113587, ---~ ["mm"]=0.00000005362851, ---~ ["nc"]=0.00000119174467, ---~ ["nd"]=0.00001430093602, ---~ ["pc"]=0.00000127156576, ---~ ["pt"]=0.00001525878906, ---~ ["sp"]=1, ---~ } - ---[[ldx-- -

    A conversion function that takes a number, unit (string) and optional -format (string) is implemented using this table.

    ---ldx]]-- - - -local function numbertodimen(n,unit,fmt) - if type(n) == 'string' then - return n - else - unit = unit or 'pt' - if not fmt then - fmt = "%s%s" - elseif fmt == true then - fmt = "%0.5f%s" - end - return format(fmt,n*dimenfactors[unit],unit) - -- if fmt then - -- return format(fmt,n*dimenfactors[unit],unit) - -- else - -- return match(format("%.20f",n*dimenfactors[unit]),"(.-0?)0*$") .. unit - -- end - end -end - ---[[ldx-- -

    We collect a bunch of converters in the number namespace.

    ---ldx]]-- - -number.maxdimen = 1073741823 -number.todimen = numbertodimen -number.dimenfactors = dimenfactors - -function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end -function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end -function number.tocentimeters (n,fmt) return numbertodimen(n,"cm",fmt) end -function number.tomillimeters (n,fmt) return numbertodimen(n,"mm",fmt) end -function number.toscaledpoints(n,fmt) return numbertodimen(n,"sp",fmt) end -function number.toscaledpoints(n) return n .. "sp" end -function number.tobasepoints (n,fmt) return numbertodimen(n,"bp",fmt) end -function number.topicas (n,fmt) return numbertodimen(n "pc",fmt) end -function number.todidots (n,fmt) return numbertodimen(n,"dd",fmt) end -function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end -function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end -function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end - ---[[ldx-- -

    More interesting it to implement a (sort of) dimen datatype, one -that permits calculations too. First we define a function that -converts a string to scaledpoints. We use . We capture -a number and optionally a unit. When no unit is given a constant -capture takes place.

    ---ldx]]-- - -local amount = (S("+-")^0 * R("09")^0 * P(".")^0 * R("09")^0) + Cc("0") -local unit = R("az")^1 - -local dimenpair = amount/tonumber * (unit^1/dimenfactors + Cc(1)) -- tonumber is new - -lpeg.patterns.dimenpair = dimenpair - -local splitter = amount/tonumber * C(unit^1) - -function number.splitdimen(str) - return lpegmatch(splitter,str) -end - ---[[ldx-- -

    We use a metatable to intercept errors. When no key is found in -the table with factors, the metatable will be consulted for an -alternative index function.

    ---ldx]]-- - -setmetatableindex(dimenfactors, function(t,s) - -- error("wrong dimension: " .. (s or "?")) -- better a message - return false -end) - ---[[ldx-- -

    We redefine the following function later on, so we comment it -here (which saves us bytecodes.

    ---ldx]]-- - --- function string.todimen(str) --- if type(str) == "number" then --- return str --- else --- local value, unit = lpegmatch(dimenpair,str) --- return value/unit --- end --- end --- --- local stringtodimen = string.todimen - -local stringtodimen -- assigned later (commenting saves bytecode) - -local amount = S("+-")^0 * R("09")^0 * S(".,")^0 * R("09")^0 -local unit = P("pt") + P("cm") + P("mm") + P("sp") + P("bp") + P("in") + - P("pc") + P("dd") + P("cc") + P("nd") + P("nc") - -local validdimen = amount * unit - -lpeg.patterns.validdimen = validdimen - ---[[ldx-- -

    This converter accepts calls like:

    - - -string.todimen("10") -string.todimen(".10") -string.todimen("10.0") -string.todimen("10.0pt") -string.todimen("10pt") -string.todimen("10.0pt") - - -

    With this in place, we can now implement a proper datatype for dimensions, one -that permits us to do this:

    - - -s = dimen "10pt" + dimen "20pt" + dimen "200pt" - - dimen "100sp" / 10 + "20pt" + "0pt" - - -

    We create a local metatable for this new type:

    ---ldx]]-- - -local dimensions = { } - ---[[ldx-- -

    The main (and globally) visible representation of a dimen is defined next: it is -a one-element table. The unit that is returned from the match is normally a number -(one of the previously defined factors) but we also accept functions. Later we will -see why. This function is redefined later.

    ---ldx]]-- - --- function dimen(a) --- if a then --- local ta= type(a) --- if ta == "string" then --- local value, unit = lpegmatch(pattern,a) --- if type(unit) == "function" then --- k = value/unit() --- else --- k = value/unit --- end --- a = k --- elseif ta == "table" then --- a = a[1] --- end --- return setmetatable({ a }, dimensions) --- else --- return setmetatable({ 0 }, dimensions) --- end --- end - ---[[ldx-- -

    This function return a small hash with a metatable attached. It is -through this metatable that we can do the calculations. We could have -shared some of the code but for reasons of speed we don't.

    ---ldx]]-- - -function dimensions.__add(a, b) - local ta, tb = type(a), type(b) - if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end - if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end - return setmetatable({ a + b }, dimensions) -end - -function dimensions.__sub(a, b) - local ta, tb = type(a), type(b) - if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end - if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end - return setmetatable({ a - b }, dimensions) -end - -function dimensions.__mul(a, b) - local ta, tb = type(a), type(b) - if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end - if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end - return setmetatable({ a * b }, dimensions) -end - -function dimensions.__div(a, b) - local ta, tb = type(a), type(b) - if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end - if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end - return setmetatable({ a / b }, dimensions) -end - -function dimensions.__unm(a) - local ta = type(a) - if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end - return setmetatable({ - a }, dimensions) -end - ---[[ldx-- -

    It makes no sense to implement the power and modulo function but -the next two do make sense because they permits is code like:

    - - -local a, b = dimen "10pt", dimen "11pt" -... -if a > b then - ... -end - ---ldx]]-- - --- makes no sense: dimensions.__pow and dimensions.__mod - -function dimensions.__lt(a, b) - return a[1] < b[1] -end - -function dimensions.__eq(a, b) - return a[1] == b[1] -end - ---[[ldx-- -

    We also need to provide a function for conversion to string (so that -we can print dimensions). We print them as points, just like .

    ---ldx]]-- - -function dimensions.__tostring(a) - return a[1]/65536 .. "pt" -- instead of todimen(a[1]) -end - ---[[ldx-- -

    Since it does not take much code, we also provide a way to access -a few accessors

    - - -print(dimen().pt) -print(dimen().sp) - ---ldx]]-- - -function dimensions.__index(tab,key) - local d = dimenfactors[key] - if not d then - error("illegal property of dimen: " .. key) - d = 1 - end - return 1/d -end - ---[[ldx-- -

    In the converter from string to dimension we support functions as -factors. This is because in we have a few more units: -ex and em. These are not constant factors but -depend on the current font. They are not defined by default, but need -an explicit function call. This is because at the moment that this code -is loaded, the relevant tables that hold the functions needed may not -yet be available.

    ---ldx]]-- - - dimenfactors["ex"] = 4 * 1/65536 -- 4pt - dimenfactors["em"] = 10 * 1/65536 -- 10pt --- dimenfactors["%"] = 4 * 1/65536 -- 400pt/100 - ---[[ldx-- -

    The previous code is rather efficient (also thanks to ) but we -can speed it up by caching converted dimensions. On my machine (2008) the following -loop takes about 25.5 seconds.

    - - -for i=1,1000000 do - local s = dimen "10pt" + dimen "20pt" + dimen "200pt" - - dimen "100sp" / 10 + "20pt" + "0pt" -end - - -

    When we cache converted strings this becomes 16.3 seconds. In order not -to waste too much memory on it, we tag the values of the cache as being -week which mean that the garbage collector will collect them in a next -sweep. This means that in most cases the speed up is mostly affecting the -current couple of calculations and as such the speed penalty is small.

    - -

    We redefine two previous defined functions that can benefit from -this:

    ---ldx]]-- - -local known = { } setmetatable(known, { __mode = "v" }) - -function dimen(a) - if a then - local ta= type(a) - if ta == "string" then - local k = known[a] - if k then - a = k - else - local value, unit = lpegmatch(dimenpair,a) - if type(unit) == "function" then - k = value/unit() - else - k = value/unit - end - known[a] = k - a = k - end - elseif ta == "table" then - a = a[1] - end - return setmetatable({ a }, dimensions) - else - return setmetatable({ 0 }, dimensions) - end -end - -function string.todimen(str) -- maybe use tex.sp when available - if type(str) == "number" then - return str - else - local k = known[str] - if not k then - local value, unit = lpegmatch(dimenpair,str) - if value and unit then - k = value/unit -- to be considered: round - else - k = 0 - end - -- print(str,value,unit) - known[str] = k - end - return k - end -end - ---~ local known = { } - ---~ function string.todimen(str) -- maybe use tex.sp ---~ local k = known[str] ---~ if not k then ---~ k = tex.sp(str) ---~ known[str] = k ---~ end ---~ return k ---~ end - -stringtodimen = string.todimen -- local variable defined earlier - -function number.toscaled(d) - return format("%0.5f",d/2^16) -end - ---[[ldx-- -

    In a similar fashion we can define a glue datatype. In that case we -probably use a hash instead of a one-element table.

    ---ldx]]-- - ---[[ldx-- -

    Goodie:s

    ---ldx]]-- - -function number.percent(n,d) -- will be cleaned up once luatex 0.30 is out - d = d or tex.hsize - if type(d) == "string" then - d = stringtodimen(d) - end - return (n/100) * d -end - -number["%"] = number.percent +if not modules then modules = { } end modules ['util-dim'] = { + version = 1.001, + comment = "support for dimensions", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

    Internally work with scaled point, which are +represented by integers. However, in practice, at east at the + end we work with more generic units like points (pt). Going +from scaled points (numbers) to one of those units can be +done by using the conversion factors collected in the following +table.

    +--ldx]]-- + +local format, match, gsub, type, setmetatable = string.format, string.match, string.gsub, type, setmetatable +local P, S, R, Cc, C, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.Cc, lpeg.C, lpeg.match + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex +local formatters = string.formatters + +--this might become another namespace + +number = number or { } +local number = number + +number.tonumberf = function(n) return match(format("%.20f",n),"(.-0?)0*$") end -- one zero too much but alas +number.tonumberg = function(n) return format("%.20g",n) end + +local dimenfactors = allocate { + ["pt"] = 1/65536, + ["in"] = ( 100/ 7227)/65536, + ["cm"] = ( 254/ 7227)/65536, + ["mm"] = ( 2540/ 7227)/65536, + ["sp"] = 1, -- 65536 sp in 1pt + ["bp"] = ( 7200/ 7227)/65536, + ["pc"] = ( 1/ 12)/65536, + ["dd"] = ( 1157/ 1238)/65536, + ["cc"] = ( 1157/14856)/65536, + ["nd"] = (20320/21681)/65536, + ["nc"] = ( 5080/65043)/65536 +} + +--~ print(table.serialize(dimenfactors)) +--~ +--~ %.99g: +--~ +--~ t={ +--~ ["bp"]=1.5201782378580324e-005, +--~ ["cc"]=1.1883696112892098e-006, +--~ ["cm"]=5.3628510057769479e-007, +--~ ["dd"]=1.4260435335470516e-005, +--~ ["em"]=0.000152587890625, +--~ ["ex"]=6.103515625e-005, +--~ ["in"]=2.1113586636917117e-007, +--~ ["mm"]=5.3628510057769473e-008, +--~ ["nc"]=1.1917446679504327e-006, +--~ ["nd"]=1.4300936015405194e-005, +--~ ["pc"]=1.2715657552083333e-006, +--~ ["pt"]=1.52587890625e-005, +--~ ["sp"]=1, +--~ } +--~ +--~ patched %s and tonumber +--~ +--~ t={ +--~ ["bp"]=0.00001520178238, +--~ ["cc"]=0.00000118836961, +--~ ["cm"]=0.0000005362851, +--~ ["dd"]=0.00001426043534, +--~ ["em"]=0.00015258789063, +--~ ["ex"]=0.00006103515625, +--~ ["in"]=0.00000021113587, +--~ ["mm"]=0.00000005362851, +--~ ["nc"]=0.00000119174467, +--~ ["nd"]=0.00001430093602, +--~ ["pc"]=0.00000127156576, +--~ ["pt"]=0.00001525878906, +--~ ["sp"]=1, +--~ } + +--[[ldx-- +

    A conversion function that takes a number, unit (string) and optional +format (string) is implemented using this table.

    +--ldx]]-- + + +local function numbertodimen(n,unit,fmt) + if type(n) == 'string' then + return n + else + unit = unit or 'pt' + if not fmt then + fmt = "%s%s" + elseif fmt == true then + fmt = "%0.5f%s" + end + return format(fmt,n*dimenfactors[unit],unit) + -- if fmt then + -- return format(fmt,n*dimenfactors[unit],unit) + -- else + -- return match(format("%.20f",n*dimenfactors[unit]),"(.-0?)0*$") .. unit + -- end + end +end + +--[[ldx-- +

    We collect a bunch of converters in the number namespace.

    +--ldx]]-- + +number.maxdimen = 1073741823 +number.todimen = numbertodimen +number.dimenfactors = dimenfactors + +function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end +function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end +function number.tocentimeters (n,fmt) return numbertodimen(n,"cm",fmt) end +function number.tomillimeters (n,fmt) return numbertodimen(n,"mm",fmt) end +function number.toscaledpoints(n,fmt) return numbertodimen(n,"sp",fmt) end +function number.toscaledpoints(n) return n .. "sp" end +function number.tobasepoints (n,fmt) return numbertodimen(n,"bp",fmt) end +function number.topicas (n,fmt) return numbertodimen(n "pc",fmt) end +function number.todidots (n,fmt) return numbertodimen(n,"dd",fmt) end +function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end +function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end +function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end + +--[[ldx-- +

    More interesting it to implement a (sort of) dimen datatype, one +that permits calculations too. First we define a function that +converts a string to scaledpoints. We use . We capture +a number and optionally a unit. When no unit is given a constant +capture takes place.

    +--ldx]]-- + +local amount = (S("+-")^0 * R("09")^0 * P(".")^0 * R("09")^0) + Cc("0") +local unit = R("az")^1 + +local dimenpair = amount/tonumber * (unit^1/dimenfactors + Cc(1)) -- tonumber is new + +lpeg.patterns.dimenpair = dimenpair + +local splitter = amount/tonumber * C(unit^1) + +function number.splitdimen(str) + return lpegmatch(splitter,str) +end + +--[[ldx-- +

    We use a metatable to intercept errors. When no key is found in +the table with factors, the metatable will be consulted for an +alternative index function.

    +--ldx]]-- + +setmetatableindex(dimenfactors, function(t,s) + -- error("wrong dimension: " .. (s or "?")) -- better a message + return false +end) + +--[[ldx-- +

    We redefine the following function later on, so we comment it +here (which saves us bytecodes.

    +--ldx]]-- + +-- function string.todimen(str) +-- if type(str) == "number" then +-- return str +-- else +-- local value, unit = lpegmatch(dimenpair,str) +-- return value/unit +-- end +-- end +-- +-- local stringtodimen = string.todimen + +local stringtodimen -- assigned later (commenting saves bytecode) + +local amount = S("+-")^0 * R("09")^0 * S(".,")^0 * R("09")^0 +local unit = P("pt") + P("cm") + P("mm") + P("sp") + P("bp") + P("in") + + P("pc") + P("dd") + P("cc") + P("nd") + P("nc") + +local validdimen = amount * unit + +lpeg.patterns.validdimen = validdimen + +--[[ldx-- +

    This converter accepts calls like:

    + + +string.todimen("10") +string.todimen(".10") +string.todimen("10.0") +string.todimen("10.0pt") +string.todimen("10pt") +string.todimen("10.0pt") + + +

    With this in place, we can now implement a proper datatype for dimensions, one +that permits us to do this:

    + + +s = dimen "10pt" + dimen "20pt" + dimen "200pt" + - dimen "100sp" / 10 + "20pt" + "0pt" + + +

    We create a local metatable for this new type:

    +--ldx]]-- + +local dimensions = { } + +--[[ldx-- +

    The main (and globally) visible representation of a dimen is defined next: it is +a one-element table. The unit that is returned from the match is normally a number +(one of the previously defined factors) but we also accept functions. Later we will +see why. This function is redefined later.

    +--ldx]]-- + +-- function dimen(a) +-- if a then +-- local ta= type(a) +-- if ta == "string" then +-- local value, unit = lpegmatch(pattern,a) +-- if type(unit) == "function" then +-- k = value/unit() +-- else +-- k = value/unit +-- end +-- a = k +-- elseif ta == "table" then +-- a = a[1] +-- end +-- return setmetatable({ a }, dimensions) +-- else +-- return setmetatable({ 0 }, dimensions) +-- end +-- end + +--[[ldx-- +

    This function return a small hash with a metatable attached. It is +through this metatable that we can do the calculations. We could have +shared some of the code but for reasons of speed we don't.

    +--ldx]]-- + +function dimensions.__add(a, b) + local ta, tb = type(a), type(b) + if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end + if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end + return setmetatable({ a + b }, dimensions) +end + +function dimensions.__sub(a, b) + local ta, tb = type(a), type(b) + if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end + if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end + return setmetatable({ a - b }, dimensions) +end + +function dimensions.__mul(a, b) + local ta, tb = type(a), type(b) + if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end + if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end + return setmetatable({ a * b }, dimensions) +end + +function dimensions.__div(a, b) + local ta, tb = type(a), type(b) + if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end + if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end + return setmetatable({ a / b }, dimensions) +end + +function dimensions.__unm(a) + local ta = type(a) + if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end + return setmetatable({ - a }, dimensions) +end + +--[[ldx-- +

    It makes no sense to implement the power and modulo function but +the next two do make sense because they permits is code like:

    + + +local a, b = dimen "10pt", dimen "11pt" +... +if a > b then + ... +end + +--ldx]]-- + +-- makes no sense: dimensions.__pow and dimensions.__mod + +function dimensions.__lt(a, b) + return a[1] < b[1] +end + +function dimensions.__eq(a, b) + return a[1] == b[1] +end + +--[[ldx-- +

    We also need to provide a function for conversion to string (so that +we can print dimensions). We print them as points, just like .

    +--ldx]]-- + +function dimensions.__tostring(a) + return a[1]/65536 .. "pt" -- instead of todimen(a[1]) +end + +--[[ldx-- +

    Since it does not take much code, we also provide a way to access +a few accessors

    + + +print(dimen().pt) +print(dimen().sp) + +--ldx]]-- + +function dimensions.__index(tab,key) + local d = dimenfactors[key] + if not d then + error("illegal property of dimen: " .. key) + d = 1 + end + return 1/d +end + +--[[ldx-- +

    In the converter from string to dimension we support functions as +factors. This is because in we have a few more units: +ex and em. These are not constant factors but +depend on the current font. They are not defined by default, but need +an explicit function call. This is because at the moment that this code +is loaded, the relevant tables that hold the functions needed may not +yet be available.

    +--ldx]]-- + + dimenfactors["ex"] = 4 * 1/65536 -- 4pt + dimenfactors["em"] = 10 * 1/65536 -- 10pt +-- dimenfactors["%"] = 4 * 1/65536 -- 400pt/100 + +--[[ldx-- +

    The previous code is rather efficient (also thanks to ) but we +can speed it up by caching converted dimensions. On my machine (2008) the following +loop takes about 25.5 seconds.

    + + +for i=1,1000000 do + local s = dimen "10pt" + dimen "20pt" + dimen "200pt" + - dimen "100sp" / 10 + "20pt" + "0pt" +end + + +

    When we cache converted strings this becomes 16.3 seconds. In order not +to waste too much memory on it, we tag the values of the cache as being +week which mean that the garbage collector will collect them in a next +sweep. This means that in most cases the speed up is mostly affecting the +current couple of calculations and as such the speed penalty is small.

    + +

    We redefine two previous defined functions that can benefit from +this:

    +--ldx]]-- + +local known = { } setmetatable(known, { __mode = "v" }) + +function dimen(a) + if a then + local ta= type(a) + if ta == "string" then + local k = known[a] + if k then + a = k + else + local value, unit = lpegmatch(dimenpair,a) + if type(unit) == "function" then + k = value/unit() + else + k = value/unit + end + known[a] = k + a = k + end + elseif ta == "table" then + a = a[1] + end + return setmetatable({ a }, dimensions) + else + return setmetatable({ 0 }, dimensions) + end +end + +function string.todimen(str) -- maybe use tex.sp when available + if type(str) == "number" then + return str + else + local k = known[str] + if not k then + local value, unit = lpegmatch(dimenpair,str) + if value and unit then + k = value/unit -- to be considered: round + else + k = 0 + end + -- print(str,value,unit) + known[str] = k + end + return k + end +end + +--~ local known = { } + +--~ function string.todimen(str) -- maybe use tex.sp +--~ local k = known[str] +--~ if not k then +--~ k = tex.sp(str) +--~ known[str] = k +--~ end +--~ return k +--~ end + +stringtodimen = string.todimen -- local variable defined earlier + +function number.toscaled(d) + return format("%0.5f",d/2^16) +end + +--[[ldx-- +

    In a similar fashion we can define a glue datatype. In that case we +probably use a hash instead of a one-element table.

    +--ldx]]-- + +--[[ldx-- +

    Goodie:s

    +--ldx]]-- + +function number.percent(n,d) -- will be cleaned up once luatex 0.30 is out + d = d or tex.hsize + if type(d) == "string" then + d = stringtodimen(d) + end + return (n/100) * d +end + +number["%"] = number.percent diff --git a/tex/context/base/util-env.lua b/tex/context/base/util-env.lua index f4f3ef69f..1b1157931 100644 --- a/tex/context/base/util-env.lua +++ b/tex/context/base/util-env.lua @@ -1,287 +1,287 @@ -if not modules then modules = { } end modules ['util-env'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local allocate, mark = utilities.storage.allocate, utilities.storage.mark - -local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find -local unquoted, quoted = string.unquoted, string.quoted -local concat, insert, remove = table.concat, table.insert, table.remove - -environment = environment or { } -local environment = environment - --- precautions - -os.setlocale(nil,nil) -- useless feature and even dangerous in luatex - -function os.setlocale() - -- no way you can mess with it -end - --- dirty tricks (we will replace the texlua call by luatex --luaonly) - -local validengines = allocate { - ["luatex"] = true, - ["luajittex"] = true, - -- ["luatex.exe"] = true, - -- ["luajittex.exe"] = true, -} - -local basicengines = allocate { - ["luatex"] = "luatex", - ["texlua"] = "luatex", - ["texluac"] = "luatex", - ["luajittex"] = "luajittex", - ["texluajit"] = "luajittex", - -- ["texlua.exe"] = "luatex", - -- ["texluajit.exe"] = "luajittex", -} - -local luaengines=allocate { - ["lua"] = true, - ["luajit"] = true, -} - -environment.validengines = validengines -environment.basicengines = basicengines - --- [-1] = binary --- [ 0] = self --- [ 1] = argument 1 ... - --- instead we could set ranges - -if not arg then - -- used as library -elseif luaengines[file.removesuffix(arg[-1])] then --- arg[-1] = arg[0] --- arg[ 0] = arg[1] --- for k=2,#arg do --- arg[k-1] = arg[k] --- end --- remove(arg) -- last -elseif validengines[file.removesuffix(arg[0])] then - if arg[1] == "--luaonly" then - arg[-1] = arg[0] - arg[ 0] = arg[2] - for k=3,#arg do - arg[k-2] = arg[k] - end - remove(arg) -- last - remove(arg) -- pre-last - else - -- tex run - end - - -- This is an ugly hack but it permits symlinking a script (say 'context') to 'mtxrun' as in: - -- - -- ln -s /opt/minimals/tex/texmf-linux-64/bin/mtxrun context - -- - -- The special mapping hack is needed because 'luatools' boils down to 'mtxrun --script base' - -- but it's unlikely that there will be more of this - - local originalzero = file.basename(arg[0]) - local specialmapping = { luatools == "base" } - - if originalzero ~= "mtxrun" and originalzero ~= "mtxrun.lua" then - arg[0] = specialmapping[originalzero] or originalzero - insert(arg,0,"--script") - insert(arg,0,"mtxrun") - end - -end - --- environment - -environment.arguments = allocate() -environment.files = allocate() -environment.sortedflags = nil - --- context specific arguments (in order not to confuse the engine) - -function environment.initializearguments(arg) - local arguments, files = { }, { } - environment.arguments, environment.files, environment.sortedflags = arguments, files, nil - for index=1,#arg do - local argument = arg[index] - if index > 0 then - local flag, value = match(argument,"^%-+(.-)=(.-)$") - if flag then - flag = gsub(flag,"^c:","") - arguments[flag] = unquoted(value or "") - else - flag = match(argument,"^%-+(.+)") - if flag then - flag = gsub(flag,"^c:","") - arguments[flag] = true - else - files[#files+1] = argument - end - end - end - end - environment.ownname = file.reslash(environment.ownname or arg[0] or 'unknown.lua') -end - -function environment.setargument(name,value) - environment.arguments[name] = value -end - --- todo: defaults, better checks e.g on type (boolean versus string) --- --- tricky: too many hits when we support partials unless we add --- a registration of arguments so from now on we have 'partial' - -function environment.getargument(name,partial) - local arguments, sortedflags = environment.arguments, environment.sortedflags - if arguments[name] then - return arguments[name] - elseif partial then - if not sortedflags then - sortedflags = allocate(table.sortedkeys(arguments)) - for k=1,#sortedflags do - sortedflags[k] = "^" .. sortedflags[k] - end - environment.sortedflags = sortedflags - end - -- example of potential clash: ^mode ^modefile - for k=1,#sortedflags do - local v = sortedflags[k] - if find(name,v) then - return arguments[sub(v,2,#v)] - end - end - end - return nil -end - -environment.argument = environment.getargument - -function environment.splitarguments(separator) -- rather special, cut-off before separator - local done, before, after = false, { }, { } - local originalarguments = environment.originalarguments - for k=1,#originalarguments do - local v = originalarguments[k] - if not done and v == separator then - done = true - elseif done then - after[#after+1] = v - else - before[#before+1] = v - end - end - return before, after -end - -function environment.reconstructcommandline(arg,noquote) - arg = arg or environment.originalarguments - if noquote and #arg == 1 then - -- we could just do: return unquoted(resolvers.resolve(arg[i])) - local a = arg[1] - a = resolvers.resolve(a) - a = unquoted(a) - return a - elseif #arg > 0 then - local result = { } - for i=1,#arg do - -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i]))) - local a = arg[i] - a = resolvers.resolve(a) - a = unquoted(a) - a = gsub(a,'"','\\"') -- tricky - if find(a," ") then - result[#result+1] = quoted(a) - else - result[#result+1] = a - end - end - return concat(result," ") - else - return "" - end -end - --- handy in e.g. package.addluapath(environment.relativepath("scripts")) - -function environment.relativepath(path,root) - if not path then - path = "" - end - if not file.is_rootbased_path(path) then - if not root then - root = file.pathpart(environment.ownscript or environment.ownname or ".") - end - if root == "" then - root = "." - end - path = root .. "/" .. path - end - return file.collapsepath(path,true) -end - --- -- when script lives on e:/tmp we get this: --- --- print(environment.relativepath("x/y/z","c:/w")) -- c:/w/x/y/z --- print(environment.relativepath("x")) -- e:/tmp/x --- print(environment.relativepath("../x")) -- e:/x --- print(environment.relativepath("./x")) -- e:/tmp/x --- print(environment.relativepath("/x")) -- /x --- print(environment.relativepath("c:/x")) -- c:/x --- print(environment.relativepath("//x")) -- //x --- print(environment.relativepath()) -- e:/tmp - --- -- to be tested: --- --- function environment.reconstructcommandline(arg,noquote) --- arg = arg or environment.originalarguments --- if noquote and #arg == 1 then --- return unquoted(resolvers.resolve(arg[1])) --- elseif #arg > 0 then --- local result = { } --- for i=1,#arg do --- result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i]))) -- always quote --- end --- return concat(result," ") --- else --- return "" --- end --- end - -if arg then - - -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later) - local newarg, instring = { }, false - - for index=1,#arg do - local argument = arg[index] - if find(argument,"^\"") then - newarg[#newarg+1] = gsub(argument,"^\"","") - if not find(argument,"\"$") then - instring = true - end - elseif find(argument,"\"$") then - newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","") - instring = false - elseif instring then - newarg[#newarg] = newarg[#newarg] .. " " .. argument - else - newarg[#newarg+1] = argument - end - end - for i=1,-5,-1 do - newarg[i] = arg[i] - end - - environment.initializearguments(newarg) - - environment.originalarguments = mark(newarg) - environment.rawarguments = mark(arg) - - arg = { } -- prevent duplicate handling - -end +if not modules then modules = { } end modules ['util-env'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local allocate, mark = utilities.storage.allocate, utilities.storage.mark + +local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find +local unquoted, quoted = string.unquoted, string.quoted +local concat, insert, remove = table.concat, table.insert, table.remove + +environment = environment or { } +local environment = environment + +-- precautions + +os.setlocale(nil,nil) -- useless feature and even dangerous in luatex + +function os.setlocale() + -- no way you can mess with it +end + +-- dirty tricks (we will replace the texlua call by luatex --luaonly) + +local validengines = allocate { + ["luatex"] = true, + ["luajittex"] = true, + -- ["luatex.exe"] = true, + -- ["luajittex.exe"] = true, +} + +local basicengines = allocate { + ["luatex"] = "luatex", + ["texlua"] = "luatex", + ["texluac"] = "luatex", + ["luajittex"] = "luajittex", + ["texluajit"] = "luajittex", + -- ["texlua.exe"] = "luatex", + -- ["texluajit.exe"] = "luajittex", +} + +local luaengines=allocate { + ["lua"] = true, + ["luajit"] = true, +} + +environment.validengines = validengines +environment.basicengines = basicengines + +-- [-1] = binary +-- [ 0] = self +-- [ 1] = argument 1 ... + +-- instead we could set ranges + +if not arg then + -- used as library +elseif luaengines[file.removesuffix(arg[-1])] then +-- arg[-1] = arg[0] +-- arg[ 0] = arg[1] +-- for k=2,#arg do +-- arg[k-1] = arg[k] +-- end +-- remove(arg) -- last +elseif validengines[file.removesuffix(arg[0])] then + if arg[1] == "--luaonly" then + arg[-1] = arg[0] + arg[ 0] = arg[2] + for k=3,#arg do + arg[k-2] = arg[k] + end + remove(arg) -- last + remove(arg) -- pre-last + else + -- tex run + end + + -- This is an ugly hack but it permits symlinking a script (say 'context') to 'mtxrun' as in: + -- + -- ln -s /opt/minimals/tex/texmf-linux-64/bin/mtxrun context + -- + -- The special mapping hack is needed because 'luatools' boils down to 'mtxrun --script base' + -- but it's unlikely that there will be more of this + + local originalzero = file.basename(arg[0]) + local specialmapping = { luatools == "base" } + + if originalzero ~= "mtxrun" and originalzero ~= "mtxrun.lua" then + arg[0] = specialmapping[originalzero] or originalzero + insert(arg,0,"--script") + insert(arg,0,"mtxrun") + end + +end + +-- environment + +environment.arguments = allocate() +environment.files = allocate() +environment.sortedflags = nil + +-- context specific arguments (in order not to confuse the engine) + +function environment.initializearguments(arg) + local arguments, files = { }, { } + environment.arguments, environment.files, environment.sortedflags = arguments, files, nil + for index=1,#arg do + local argument = arg[index] + if index > 0 then + local flag, value = match(argument,"^%-+(.-)=(.-)$") + if flag then + flag = gsub(flag,"^c:","") + arguments[flag] = unquoted(value or "") + else + flag = match(argument,"^%-+(.+)") + if flag then + flag = gsub(flag,"^c:","") + arguments[flag] = true + else + files[#files+1] = argument + end + end + end + end + environment.ownname = file.reslash(environment.ownname or arg[0] or 'unknown.lua') +end + +function environment.setargument(name,value) + environment.arguments[name] = value +end + +-- todo: defaults, better checks e.g on type (boolean versus string) +-- +-- tricky: too many hits when we support partials unless we add +-- a registration of arguments so from now on we have 'partial' + +function environment.getargument(name,partial) + local arguments, sortedflags = environment.arguments, environment.sortedflags + if arguments[name] then + return arguments[name] + elseif partial then + if not sortedflags then + sortedflags = allocate(table.sortedkeys(arguments)) + for k=1,#sortedflags do + sortedflags[k] = "^" .. sortedflags[k] + end + environment.sortedflags = sortedflags + end + -- example of potential clash: ^mode ^modefile + for k=1,#sortedflags do + local v = sortedflags[k] + if find(name,v) then + return arguments[sub(v,2,#v)] + end + end + end + return nil +end + +environment.argument = environment.getargument + +function environment.splitarguments(separator) -- rather special, cut-off before separator + local done, before, after = false, { }, { } + local originalarguments = environment.originalarguments + for k=1,#originalarguments do + local v = originalarguments[k] + if not done and v == separator then + done = true + elseif done then + after[#after+1] = v + else + before[#before+1] = v + end + end + return before, after +end + +function environment.reconstructcommandline(arg,noquote) + arg = arg or environment.originalarguments + if noquote and #arg == 1 then + -- we could just do: return unquoted(resolvers.resolve(arg[i])) + local a = arg[1] + a = resolvers.resolve(a) + a = unquoted(a) + return a + elseif #arg > 0 then + local result = { } + for i=1,#arg do + -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i]))) + local a = arg[i] + a = resolvers.resolve(a) + a = unquoted(a) + a = gsub(a,'"','\\"') -- tricky + if find(a," ") then + result[#result+1] = quoted(a) + else + result[#result+1] = a + end + end + return concat(result," ") + else + return "" + end +end + +-- handy in e.g. package.addluapath(environment.relativepath("scripts")) + +function environment.relativepath(path,root) + if not path then + path = "" + end + if not file.is_rootbased_path(path) then + if not root then + root = file.pathpart(environment.ownscript or environment.ownname or ".") + end + if root == "" then + root = "." + end + path = root .. "/" .. path + end + return file.collapsepath(path,true) +end + +-- -- when script lives on e:/tmp we get this: +-- +-- print(environment.relativepath("x/y/z","c:/w")) -- c:/w/x/y/z +-- print(environment.relativepath("x")) -- e:/tmp/x +-- print(environment.relativepath("../x")) -- e:/x +-- print(environment.relativepath("./x")) -- e:/tmp/x +-- print(environment.relativepath("/x")) -- /x +-- print(environment.relativepath("c:/x")) -- c:/x +-- print(environment.relativepath("//x")) -- //x +-- print(environment.relativepath()) -- e:/tmp + +-- -- to be tested: +-- +-- function environment.reconstructcommandline(arg,noquote) +-- arg = arg or environment.originalarguments +-- if noquote and #arg == 1 then +-- return unquoted(resolvers.resolve(arg[1])) +-- elseif #arg > 0 then +-- local result = { } +-- for i=1,#arg do +-- result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i]))) -- always quote +-- end +-- return concat(result," ") +-- else +-- return "" +-- end +-- end + +if arg then + + -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later) + local newarg, instring = { }, false + + for index=1,#arg do + local argument = arg[index] + if find(argument,"^\"") then + newarg[#newarg+1] = gsub(argument,"^\"","") + if not find(argument,"\"$") then + instring = true + end + elseif find(argument,"\"$") then + newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","") + instring = false + elseif instring then + newarg[#newarg] = newarg[#newarg] .. " " .. argument + else + newarg[#newarg+1] = argument + end + end + for i=1,-5,-1 do + newarg[i] = arg[i] + end + + environment.initializearguments(newarg) + + environment.originalarguments = mark(newarg) + environment.rawarguments = mark(arg) + + arg = { } -- prevent duplicate handling + +end diff --git a/tex/context/base/util-fmt.lua b/tex/context/base/util-fmt.lua index 371a5dfce..8ec7236a9 100644 --- a/tex/context/base/util-fmt.lua +++ b/tex/context/base/util-fmt.lua @@ -1,76 +1,76 @@ -if not modules then modules = { } end modules ['util-fmt'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -utilities = utilities or { } -utilities.formatters = utilities.formatters or { } -local formatters = utilities.formatters - -local concat, format = table.concat, string.format -local tostring, type = tostring, type -local strip = string.strip - -local lpegmatch = lpeg.match -local stripper = lpeg.patterns.stripzeros - -function formatters.stripzeros(str) - return lpegmatch(stripper,str) -end - -function formatters.formatcolumns(result,between) - if result and #result > 0 then - between = between or " " - local widths, numbers = { }, { } - local first = result[1] - local n = #first - for i=1,n do - widths[i] = 0 - end - for i=1,#result do - local r = result[i] - for j=1,n do - local rj = r[j] - local tj = type(rj) - if tj == "number" then - numbers[j] = true - end - if tj ~= "string" then - rj = tostring(rj) - r[j] = rj - end - local w = #rj - if w > widths[j] then - widths[j] = w - end - end - end - for i=1,n do - local w = widths[i] - if numbers[i] then - if w > 80 then - widths[i] = "%s" .. between - else - widths[i] = "%0" .. w .. "i" .. between - end - else - if w > 80 then - widths[i] = "%s" .. between - elseif w > 0 then - widths[i] = "%-" .. w .. "s" .. between - else - widths[i] = "%s" - end - end - end - local template = strip(concat(widths)) - for i=1,#result do - local str = format(template,unpack(result[i])) - result[i] = strip(str) - end - end - return result -end +if not modules then modules = { } end modules ['util-fmt'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +utilities = utilities or { } +utilities.formatters = utilities.formatters or { } +local formatters = utilities.formatters + +local concat, format = table.concat, string.format +local tostring, type = tostring, type +local strip = string.strip + +local lpegmatch = lpeg.match +local stripper = lpeg.patterns.stripzeros + +function formatters.stripzeros(str) + return lpegmatch(stripper,str) +end + +function formatters.formatcolumns(result,between) + if result and #result > 0 then + between = between or " " + local widths, numbers = { }, { } + local first = result[1] + local n = #first + for i=1,n do + widths[i] = 0 + end + for i=1,#result do + local r = result[i] + for j=1,n do + local rj = r[j] + local tj = type(rj) + if tj == "number" then + numbers[j] = true + end + if tj ~= "string" then + rj = tostring(rj) + r[j] = rj + end + local w = #rj + if w > widths[j] then + widths[j] = w + end + end + end + for i=1,n do + local w = widths[i] + if numbers[i] then + if w > 80 then + widths[i] = "%s" .. between + else + widths[i] = "%0" .. w .. "i" .. between + end + else + if w > 80 then + widths[i] = "%s" .. between + elseif w > 0 then + widths[i] = "%-" .. w .. "s" .. between + else + widths[i] = "%s" + end + end + end + local template = strip(concat(widths)) + for i=1,#result do + local str = format(template,unpack(result[i])) + result[i] = strip(str) + end + end + return result +end diff --git a/tex/context/base/util-jsn.lua b/tex/context/base/util-jsn.lua index 29587cd38..9870d0896 100644 --- a/tex/context/base/util-jsn.lua +++ b/tex/context/base/util-jsn.lua @@ -1,146 +1,146 @@ -if not modules then modules = { } end modules ['util-jsn'] = { - version = 1.001, - comment = "companion to m-json.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- Of course we could make a nice complete parser with proper error messages but --- as json is generated programmatically errors are systematic and we can assume --- a correct stream. If not, we have some fatal error anyway. So, we can just rely --- on strings being strings (apart from the unicode escape which is not in 5.1) and --- as we first catch known types we just assume that anything else is a number. --- --- Reminder for me: check usage in framework and extend when needed. Also document --- it in the cld lib documentation. - -local P, V, R, S, C, Cc, Cs, Ct, Cf, Cg = lpeg.P, lpeg.V, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cf, lpeg.Cg -local lpegmatch = lpeg.match -local format = string.format -local utfchar = utf.char -local concat = table.concat - -local tonumber, tostring, rawset, type = tonumber, tostring, rawset, type - -local json = utilities.json or { } -utilities.json = json - --- moduledata = moduledata or { } --- moduledata.json = json - --- \\ \/ \b \f \n \r \t \uHHHH - -local lbrace = P("{") -local rbrace = P("}") -local lparent = P("[") -local rparent = P("]") -local comma = P(",") -local colon = P(":") -local dquote = P('"') - -local whitespace = lpeg.patterns.whitespace -local optionalws = whitespace^0 - -local escape = C(P("\\u") / "0x" * S("09","AF","af")) / function(s) return utfchar(tonumber(s)) end -local jstring = dquote * Cs((escape + (1-dquote))^0) * dquote -local jtrue = P("true") * Cc(true) -local jfalse = P("false") * Cc(false) -local jnull = P("null") * Cc(nil) -local jnumber = (1-whitespace-rparent-rbrace-comma)^1 / tonumber - -local key = jstring - -local jsonconverter = { "value", - object = lbrace * Cf(Ct("") * V("pair") * (comma * V("pair"))^0,rawset) * rbrace, - pair = Cg(optionalws * key * optionalws * colon * V("value")), - array = Ct(lparent * V("value") * (comma * V("value"))^0 * rparent), - value = optionalws * (jstring + V("object") + V("array") + jtrue + jfalse + jnull + jnumber + #rparent) * optionalws, -} - --- local jsonconverter = { "value", --- object = lbrace * Cf(Ct("") * V("pair") * (comma * V("pair"))^0,rawset) * rbrace, --- pair = Cg(optionalws * V("string") * optionalws * colon * V("value")), --- array = Ct(lparent * V("value") * (comma * V("value"))^0 * rparent), --- string = jstring, --- value = optionalws * (V("string") + V("object") + V("array") + jtrue + jfalse + jnull + jnumber) * optionalws, --- } - --- lpeg.print(jsonconverter) -- size 181 - -function json.tolua(str) - return lpegmatch(jsonconverter,str) -end - -local function tojson(value,t) -- we could optimize #t - local kind = type(value) - if kind == "table" then - local done = false - local size = #value - if size == 0 then - for k, v in next, value do - if done then - t[#t+1] = "," - else - t[#t+1] = "{" - done = true - end - t[#t+1] = format("%q:",k) - tojson(v,t) - end - if done then - t[#t+1] = "}" - else - t[#t+1] = "{}" - end - elseif size == 1 then - -- we can optimize for non tables - t[#t+1] = "[" - tojson(value[1],t) - t[#t+1] = "]" - else - for i=1,size do - if done then - t[#t+1] = "," - else - t[#t+1] = "[" - done = true - end - tojson(value[i],t) - end - t[#t+1] = "]" - end - elseif kind == "string" then - t[#t+1] = format("%q",value) - elseif kind == "number" then - t[#t+1] = value - elseif kind == "boolean" then - t[#t+1] = tostring(value) - end - return t -end - -function json.tostring(value) - -- todo optimize for non table - local kind = type(value) - if kind == "table" then - return concat(tojson(value,{}),"") - elseif kind == "string" or kind == "number" then - return value - else - return tostring(value) - end -end - --- local tmp = [[ { "a" : true, "b" : [ 123 , 456E-10, { "a" : true, "b" : [ 123 , 456 ] } ] } ]] - --- tmp = json.tolua(tmp) --- inspect(tmp) --- tmp = json.tostring(tmp) --- inspect(tmp) --- tmp = json.tolua(tmp) --- inspect(tmp) --- tmp = json.tostring(tmp) --- inspect(tmp) - --- inspect(json.tostring(true)) +if not modules then modules = { } end modules ['util-jsn'] = { + version = 1.001, + comment = "companion to m-json.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Of course we could make a nice complete parser with proper error messages but +-- as json is generated programmatically errors are systematic and we can assume +-- a correct stream. If not, we have some fatal error anyway. So, we can just rely +-- on strings being strings (apart from the unicode escape which is not in 5.1) and +-- as we first catch known types we just assume that anything else is a number. +-- +-- Reminder for me: check usage in framework and extend when needed. Also document +-- it in the cld lib documentation. + +local P, V, R, S, C, Cc, Cs, Ct, Cf, Cg = lpeg.P, lpeg.V, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cf, lpeg.Cg +local lpegmatch = lpeg.match +local format = string.format +local utfchar = utf.char +local concat = table.concat + +local tonumber, tostring, rawset, type = tonumber, tostring, rawset, type + +local json = utilities.json or { } +utilities.json = json + +-- moduledata = moduledata or { } +-- moduledata.json = json + +-- \\ \/ \b \f \n \r \t \uHHHH + +local lbrace = P("{") +local rbrace = P("}") +local lparent = P("[") +local rparent = P("]") +local comma = P(",") +local colon = P(":") +local dquote = P('"') + +local whitespace = lpeg.patterns.whitespace +local optionalws = whitespace^0 + +local escape = C(P("\\u") / "0x" * S("09","AF","af")) / function(s) return utfchar(tonumber(s)) end +local jstring = dquote * Cs((escape + (1-dquote))^0) * dquote +local jtrue = P("true") * Cc(true) +local jfalse = P("false") * Cc(false) +local jnull = P("null") * Cc(nil) +local jnumber = (1-whitespace-rparent-rbrace-comma)^1 / tonumber + +local key = jstring + +local jsonconverter = { "value", + object = lbrace * Cf(Ct("") * V("pair") * (comma * V("pair"))^0,rawset) * rbrace, + pair = Cg(optionalws * key * optionalws * colon * V("value")), + array = Ct(lparent * V("value") * (comma * V("value"))^0 * rparent), + value = optionalws * (jstring + V("object") + V("array") + jtrue + jfalse + jnull + jnumber + #rparent) * optionalws, +} + +-- local jsonconverter = { "value", +-- object = lbrace * Cf(Ct("") * V("pair") * (comma * V("pair"))^0,rawset) * rbrace, +-- pair = Cg(optionalws * V("string") * optionalws * colon * V("value")), +-- array = Ct(lparent * V("value") * (comma * V("value"))^0 * rparent), +-- string = jstring, +-- value = optionalws * (V("string") + V("object") + V("array") + jtrue + jfalse + jnull + jnumber) * optionalws, +-- } + +-- lpeg.print(jsonconverter) -- size 181 + +function json.tolua(str) + return lpegmatch(jsonconverter,str) +end + +local function tojson(value,t) -- we could optimize #t + local kind = type(value) + if kind == "table" then + local done = false + local size = #value + if size == 0 then + for k, v in next, value do + if done then + t[#t+1] = "," + else + t[#t+1] = "{" + done = true + end + t[#t+1] = format("%q:",k) + tojson(v,t) + end + if done then + t[#t+1] = "}" + else + t[#t+1] = "{}" + end + elseif size == 1 then + -- we can optimize for non tables + t[#t+1] = "[" + tojson(value[1],t) + t[#t+1] = "]" + else + for i=1,size do + if done then + t[#t+1] = "," + else + t[#t+1] = "[" + done = true + end + tojson(value[i],t) + end + t[#t+1] = "]" + end + elseif kind == "string" then + t[#t+1] = format("%q",value) + elseif kind == "number" then + t[#t+1] = value + elseif kind == "boolean" then + t[#t+1] = tostring(value) + end + return t +end + +function json.tostring(value) + -- todo optimize for non table + local kind = type(value) + if kind == "table" then + return concat(tojson(value,{}),"") + elseif kind == "string" or kind == "number" then + return value + else + return tostring(value) + end +end + +-- local tmp = [[ { "a" : true, "b" : [ 123 , 456E-10, { "a" : true, "b" : [ 123 , 456 ] } ] } ]] + +-- tmp = json.tolua(tmp) +-- inspect(tmp) +-- tmp = json.tostring(tmp) +-- inspect(tmp) +-- tmp = json.tolua(tmp) +-- inspect(tmp) +-- tmp = json.tostring(tmp) +-- inspect(tmp) + +-- inspect(json.tostring(true)) diff --git a/tex/context/base/util-lib.lua b/tex/context/base/util-lib.lua index c5c999113..065f91091 100644 --- a/tex/context/base/util-lib.lua +++ b/tex/context/base/util-lib.lua @@ -1,288 +1,288 @@ -if not modules then modules = { } end modules ['util-lib'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - --- This is experimental code for Hans and Luigi. Don't depend on it! There --- will be a plain variant. - ---[[ - -The problem with library bindings is manyfold. They are of course platform -dependent and while a binary with its directly related libraries are often -easy to maintain and load, additional libraries can each have their demands. - -One important aspect is that loading additional libraries from within the -loaded one is also operating system dependent. There can be shared libraries -elsewhere on the system and as there can be multiple libraries with the same -name but different usage and versioning there can be clashes. So there has to -be some logic in where to look for these sublibraries. - -We found out that for instance on windows libraries are by default sought on -the parents path and then on the binary paths and these of course can be in -an out of our control, thereby enlarging the changes on a clash. A rather -safe solution for that to load the library on the path where it sits. - -Another aspect is initialization. When you ask for a library t.e.x it will -try to initialize luaopen_t_e_x no matter if such an inializer is present. -However, because loading is configurable and in the case of luatex is already -partly under out control, this is easy to deal with. We only have to make -sure that we inform the loader that the library has been loaded so that -it won't load it twice. - -In swiglib we have chosen for a clear organization and although one can use -variants normally in the tex directory structure predictability is more or -less the standard. For instance: - -.../tex/texmf-mswin/bin/lib/luatex/lua/swiglib/mysql/core.dll -.../tex/texmf-mswin/bin/lib/luajittex/lua/swiglib/mysql/core.dll -.../tex/texmf-mswin/bin/lib/luatex/context/lua/swiglib/mysql/core.dll -.../tex/texmf-mswin/bin/lib/swiglib/lua/mysql/core.dll -.../tex/texmf-mswin/bin/lib/swiglib/lua/mysql/5.6/core.dll - -The lookups are determined via an entry in texmfcnf.lua: - -CLUAINPUTS = ".;$SELFAUTOLOC/lib/{$engine,luatex}/lua//", - -A request for t.e.x is converted to t/e/x.dll or t/e/x.so depending on the -platform. Then we use the regular finder to locate the file in the tex -directory structure. Once located we goto the path where it sits, load the -file and return to the original path. We register as t.e.x in order to -prevent reloading and also because the base name is seldom unique. - -The main function is a big one and evolved out of experiments that Luigi -Scarso and I conducted when playing with variants of SwigLib. The function -locates the library using the context mkiv resolver that operates on the -tds tree and if that doesn't work out well, the normal clib path is used. - -The lookups is somewhat clever in the sense that it can deal with (optional) -versions and can fall back on non versioned alternatives if needed, either -or not using a wildcard lookup. - -This code is experimental and by providing a special abstract loader (called -swiglib) we can start using the libraries. - -A complication is that we might end up with a luajittex path matching before a -luatex path due to the path spec. One solution is to first check with the engine -prefixed. This could be prevented by a more strict lib pattern but that is not -always under our control. So, we first check for paths with engine in their name -and then without. - -]]-- - --- seems to be clua in recent texlive - -local gsub, find = string.gsub, string.find -local pathpart, nameonly, joinfile = file.pathpart, file.nameonly, file.join -local findfile, findfiles = resolvers and resolvers.findfile, resolvers and resolvers.findfiles - -local loaded = package.loaded - -local report_swiglib = logs.reporter("swiglib") -local trace_swiglib = false trackers.register("resolvers.swiglib", function(v) trace_swiglib = v end) - --- We can check if there are more that one component, and if not, we can --- append 'core'. - -local done = false - -local function requireswiglib(required,version) - local trace_swiglib = trace_swiglib or package.helpers.trace - local library = loaded[required] - if library == nil then - -- initialize a few variables - local required_full = gsub(required,"%.","/") -- package.helpers.lualibfile - local required_path = pathpart(required_full) - local required_base = nameonly(required_full) - local required_name = required_base .. "." .. os.libsuffix - local version = type(version) == "string" and version ~= "" and version or false - local engine = environment.ownmain or false - -- - if trace_swiglib and not done then - local list = resolvers.expandedpathlistfromvariable("lib") -- fresh, no reuse - for i=1,#list do - report_swiglib("tds path %i: %s",i,list[i]) - end - end - -- helpers - local function found(locate,asked_library,how,...) - if trace_swiglib then - report_swiglib("checking %s: %a",how,asked_library) - end - return locate(asked_library,...) - end - local function check(locate,...) - local found = nil - if version then - local asked_library = joinfile(required_path,version,required_name) - if trace_swiglib then - report_swiglib("checking %s: %a","with version",asked_library) - end - found = locate(asked_library,...) - end - if not found or found == "" then - local asked_library = joinfile(required_path,required_name) - if trace_swiglib then - report_swiglib("checking %s: %a","with version",asked_library) - end - found = locate(asked_library,...) - end - return found and found ~= "" and found or false - end - -- Alternatively we could first collect the locations and then do the two attempts - -- on this list but in practice this is not more efficient as we might have a fast - -- match anyway. - local function attempt(checkpattern) - -- check cnf spec using name and version - if trace_swiglib then - report_swiglib("checking tds lib paths strictly") - end - local found = findfile and check(findfile,"lib") - if found and (not checkpattern or find(found,checkpattern)) then - return found - end - -- check cnf spec using wildcard - if trace_swiglib then - report_swiglib("checking tds lib paths with wildcard") - end - local asked_library = joinfile(required_path,".*",required_name) - if trace_swiglib then - report_swiglib("checking %s: %a","latest version",asked_library) - end - local list = findfiles(asked_library,"lib",true) - if list and #list > 0 then - table.sort(list) - local found = list[#list] - if found and (not checkpattern or find(found,checkpattern)) then - return found - end - end - -- Check lib paths using name and version. - if trace_swiglib then - report_swiglib("checking lib paths") - end - package.extralibpath(environment.ownpath) - local paths = package.libpaths() - for i=1,#paths do - local found = check(lfs.isfile) - if found and (not checkpattern or find(found,checkpattern)) then - return found - end - end - return false - end - local found_library = nil - if engine then - if trace_swiglib then - report_swiglib("attemp 1, engine %a",engine) - end - found_library = attempt("/"..engine.."/") - if not found_library then - if trace_swiglib then - report_swiglib("attemp 2, no engine",asked_library) - end - found_library = attempt() - end - else - found_library = attempt() - end - -- load and initialize when found - if not found_library then - if trace_swiglib then - report_swiglib("not found: %a",required) - end - library = false - else - local path = pathpart(found_library) - local base = nameonly(found_library) - dir.push(path) - if trace_swiglib then - report_swiglib("found: %a",found_library) - end - local message = nil - local opener = "luaopen_" .. required_base - library, message = package.loadlib(found_library,opener) - local libtype = type(library) - if libtype == "function" then - library = library() - else - report_swiglib("load error: %a returns %a, message %a",opener,libtype,message or "no message") - library = false - end - dir.pop() - end - -- cache result - if not library then - report_swiglib("unknown: %a",required) - elseif trace_swiglib then - report_swiglib("stored: %a",required) - end - loaded[required] = library - else - report_swiglib("reused: %a",required) - end - return library -end - ---[[ - -For convenience we make the require loader function swiglib aware. Alternatively -we could put the specific loader in the global namespace. - -]]-- - -local savedrequire = require - -function require(name,version) - if find(name,"^swiglib%.") then - return requireswiglib(name,version) - else - return savedrequire(name) - end -end - ---[[ - -At the cost of some overhead we provide a specific loader so that we can keep -track of swiglib usage which is handy for development. In context this is the -recommended loader. - -]]-- - -local swiglibs = { } - -function swiglib(name,version) - local library = swiglibs[name] - if not library then - statistics.starttiming(swiglibs) - if trace_swiglib then - report_swiglib("loading %a",name) - end - library = requireswiglib("swiglib." .. name,version) - swiglibs[name] = library - statistics.stoptiming(swiglibs) - end - return library -end - -statistics.register("used swiglibs", function() - if next(swiglibs) then - return string.format("%s, initial load time %s seconds",table.concat(table.sortedkeys(swiglibs)," "),statistics.elapsedtime(swiglibs)) - end -end) - ---[[ - -So, we now have: - -local gm = require("swiglib.gmwand.core") -local gm = swiglib("gmwand.core") -local sq = swiglib("mysql.core") -local sq = swiglib("mysql.core","5.6") - -Watch out, the last one is less explicit and lacks the swiglib prefix. - -]]-- +if not modules then modules = { } end modules ['util-lib'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +-- This is experimental code for Hans and Luigi. Don't depend on it! There +-- will be a plain variant. + +--[[ + +The problem with library bindings is manyfold. They are of course platform +dependent and while a binary with its directly related libraries are often +easy to maintain and load, additional libraries can each have their demands. + +One important aspect is that loading additional libraries from within the +loaded one is also operating system dependent. There can be shared libraries +elsewhere on the system and as there can be multiple libraries with the same +name but different usage and versioning there can be clashes. So there has to +be some logic in where to look for these sublibraries. + +We found out that for instance on windows libraries are by default sought on +the parents path and then on the binary paths and these of course can be in +an out of our control, thereby enlarging the changes on a clash. A rather +safe solution for that to load the library on the path where it sits. + +Another aspect is initialization. When you ask for a library t.e.x it will +try to initialize luaopen_t_e_x no matter if such an inializer is present. +However, because loading is configurable and in the case of luatex is already +partly under out control, this is easy to deal with. We only have to make +sure that we inform the loader that the library has been loaded so that +it won't load it twice. + +In swiglib we have chosen for a clear organization and although one can use +variants normally in the tex directory structure predictability is more or +less the standard. For instance: + +.../tex/texmf-mswin/bin/lib/luatex/lua/swiglib/mysql/core.dll +.../tex/texmf-mswin/bin/lib/luajittex/lua/swiglib/mysql/core.dll +.../tex/texmf-mswin/bin/lib/luatex/context/lua/swiglib/mysql/core.dll +.../tex/texmf-mswin/bin/lib/swiglib/lua/mysql/core.dll +.../tex/texmf-mswin/bin/lib/swiglib/lua/mysql/5.6/core.dll + +The lookups are determined via an entry in texmfcnf.lua: + +CLUAINPUTS = ".;$SELFAUTOLOC/lib/{$engine,luatex}/lua//", + +A request for t.e.x is converted to t/e/x.dll or t/e/x.so depending on the +platform. Then we use the regular finder to locate the file in the tex +directory structure. Once located we goto the path where it sits, load the +file and return to the original path. We register as t.e.x in order to +prevent reloading and also because the base name is seldom unique. + +The main function is a big one and evolved out of experiments that Luigi +Scarso and I conducted when playing with variants of SwigLib. The function +locates the library using the context mkiv resolver that operates on the +tds tree and if that doesn't work out well, the normal clib path is used. + +The lookups is somewhat clever in the sense that it can deal with (optional) +versions and can fall back on non versioned alternatives if needed, either +or not using a wildcard lookup. + +This code is experimental and by providing a special abstract loader (called +swiglib) we can start using the libraries. + +A complication is that we might end up with a luajittex path matching before a +luatex path due to the path spec. One solution is to first check with the engine +prefixed. This could be prevented by a more strict lib pattern but that is not +always under our control. So, we first check for paths with engine in their name +and then without. + +]]-- + +-- seems to be clua in recent texlive + +local gsub, find = string.gsub, string.find +local pathpart, nameonly, joinfile = file.pathpart, file.nameonly, file.join +local findfile, findfiles = resolvers and resolvers.findfile, resolvers and resolvers.findfiles + +local loaded = package.loaded + +local report_swiglib = logs.reporter("swiglib") +local trace_swiglib = false trackers.register("resolvers.swiglib", function(v) trace_swiglib = v end) + +-- We can check if there are more that one component, and if not, we can +-- append 'core'. + +local done = false + +local function requireswiglib(required,version) + local trace_swiglib = trace_swiglib or package.helpers.trace + local library = loaded[required] + if library == nil then + -- initialize a few variables + local required_full = gsub(required,"%.","/") -- package.helpers.lualibfile + local required_path = pathpart(required_full) + local required_base = nameonly(required_full) + local required_name = required_base .. "." .. os.libsuffix + local version = type(version) == "string" and version ~= "" and version or false + local engine = environment.ownmain or false + -- + if trace_swiglib and not done then + local list = resolvers.expandedpathlistfromvariable("lib") -- fresh, no reuse + for i=1,#list do + report_swiglib("tds path %i: %s",i,list[i]) + end + end + -- helpers + local function found(locate,asked_library,how,...) + if trace_swiglib then + report_swiglib("checking %s: %a",how,asked_library) + end + return locate(asked_library,...) + end + local function check(locate,...) + local found = nil + if version then + local asked_library = joinfile(required_path,version,required_name) + if trace_swiglib then + report_swiglib("checking %s: %a","with version",asked_library) + end + found = locate(asked_library,...) + end + if not found or found == "" then + local asked_library = joinfile(required_path,required_name) + if trace_swiglib then + report_swiglib("checking %s: %a","with version",asked_library) + end + found = locate(asked_library,...) + end + return found and found ~= "" and found or false + end + -- Alternatively we could first collect the locations and then do the two attempts + -- on this list but in practice this is not more efficient as we might have a fast + -- match anyway. + local function attempt(checkpattern) + -- check cnf spec using name and version + if trace_swiglib then + report_swiglib("checking tds lib paths strictly") + end + local found = findfile and check(findfile,"lib") + if found and (not checkpattern or find(found,checkpattern)) then + return found + end + -- check cnf spec using wildcard + if trace_swiglib then + report_swiglib("checking tds lib paths with wildcard") + end + local asked_library = joinfile(required_path,".*",required_name) + if trace_swiglib then + report_swiglib("checking %s: %a","latest version",asked_library) + end + local list = findfiles(asked_library,"lib",true) + if list and #list > 0 then + table.sort(list) + local found = list[#list] + if found and (not checkpattern or find(found,checkpattern)) then + return found + end + end + -- Check lib paths using name and version. + if trace_swiglib then + report_swiglib("checking lib paths") + end + package.extralibpath(environment.ownpath) + local paths = package.libpaths() + for i=1,#paths do + local found = check(lfs.isfile) + if found and (not checkpattern or find(found,checkpattern)) then + return found + end + end + return false + end + local found_library = nil + if engine then + if trace_swiglib then + report_swiglib("attemp 1, engine %a",engine) + end + found_library = attempt("/"..engine.."/") + if not found_library then + if trace_swiglib then + report_swiglib("attemp 2, no engine",asked_library) + end + found_library = attempt() + end + else + found_library = attempt() + end + -- load and initialize when found + if not found_library then + if trace_swiglib then + report_swiglib("not found: %a",required) + end + library = false + else + local path = pathpart(found_library) + local base = nameonly(found_library) + dir.push(path) + if trace_swiglib then + report_swiglib("found: %a",found_library) + end + local message = nil + local opener = "luaopen_" .. required_base + library, message = package.loadlib(found_library,opener) + local libtype = type(library) + if libtype == "function" then + library = library() + else + report_swiglib("load error: %a returns %a, message %a",opener,libtype,message or "no message") + library = false + end + dir.pop() + end + -- cache result + if not library then + report_swiglib("unknown: %a",required) + elseif trace_swiglib then + report_swiglib("stored: %a",required) + end + loaded[required] = library + else + report_swiglib("reused: %a",required) + end + return library +end + +--[[ + +For convenience we make the require loader function swiglib aware. Alternatively +we could put the specific loader in the global namespace. + +]]-- + +local savedrequire = require + +function require(name,version) + if find(name,"^swiglib%.") then + return requireswiglib(name,version) + else + return savedrequire(name) + end +end + +--[[ + +At the cost of some overhead we provide a specific loader so that we can keep +track of swiglib usage which is handy for development. In context this is the +recommended loader. + +]]-- + +local swiglibs = { } + +function swiglib(name,version) + local library = swiglibs[name] + if not library then + statistics.starttiming(swiglibs) + if trace_swiglib then + report_swiglib("loading %a",name) + end + library = requireswiglib("swiglib." .. name,version) + swiglibs[name] = library + statistics.stoptiming(swiglibs) + end + return library +end + +statistics.register("used swiglibs", function() + if next(swiglibs) then + return string.format("%s, initial load time %s seconds",table.concat(table.sortedkeys(swiglibs)," "),statistics.elapsedtime(swiglibs)) + end +end) + +--[[ + +So, we now have: + +local gm = require("swiglib.gmwand.core") +local gm = swiglib("gmwand.core") +local sq = swiglib("mysql.core") +local sq = swiglib("mysql.core","5.6") + +Watch out, the last one is less explicit and lacks the swiglib prefix. + +]]-- diff --git a/tex/context/base/util-lua.lua b/tex/context/base/util-lua.lua index f3be9dcd2..a69fa9cdd 100644 --- a/tex/context/base/util-lua.lua +++ b/tex/context/base/util-lua.lua @@ -1,351 +1,351 @@ -if not modules then modules = { } end modules ['util-lua'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - comment = "the strip code is written by Peter Cawley", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- we will remove the 5.1 code some day soon - -local rep, sub, byte, dump, format = string.rep, string.sub, string.byte, string.dump, string.format -local load, loadfile, type = load, loadfile, type - -utilities = utilities or {} -utilities.lua = utilities.lua or { } -local luautilities = utilities.lua - -local report_lua = logs.reporter("system","lua") - -local tracestripping = false -local forcestupidcompile = true -- use internal bytecode compiler -luautilities.stripcode = true -- support stripping when asked for -luautilities.alwaysstripcode = false -- saves 1 meg on 7 meg compressed format file (2012.08.12) -luautilities.nofstrippedchunks = 0 -luautilities.nofstrippedbytes = 0 -local strippedchunks = { } -- allocate() -luautilities.strippedchunks = strippedchunks - -luautilities.suffixes = { - tma = "tma", - tmc = jit and "tmb" or "tmc", - lua = "lua", - luc = jit and "lub" or "luc", - lui = "lui", - luv = "luv", - luj = "luj", - tua = "tua", - tuc = "tuc", -} - --- environment.loadpreprocessedfile can be set to a preprocessor - -if jit or status.luatex_version >= 74 then - - local function register(name) - if tracestripping then - report_lua("stripped bytecode from %a",name or "unknown") - end - strippedchunks[#strippedchunks+1] = name - luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1 - end - - local function stupidcompile(luafile,lucfile,strip) - local code = io.loaddata(luafile) - if code and code ~= "" then - code = load(code) - if code then - code = dump(code,strip and luautilities.stripcode or luautilities.alwaysstripcode) - if code and code ~= "" then - register(name) - io.savedata(lucfile,code) - return true, 0 - end - else - report_lua("fatal error %a in file %a",1,luafile) - end - else - report_lua("fatal error %a in file %a",2,luafile) - end - return false, 0 - end - - -- quite subtle ... doing this wrong incidentally can give more bytes - - function luautilities.loadedluacode(fullname,forcestrip,name) - -- quite subtle ... doing this wrong incidentally can give more bytes - name = name or fullname - local code = environment.loadpreprocessedfile and environment.loadpreprocessedfile(fullname) or loadfile(fullname) - if code then - code() - end - if forcestrip and luautilities.stripcode then - if type(forcestrip) == "function" then - forcestrip = forcestrip(fullname) - end - if forcestrip or luautilities.alwaysstripcode then - register(name) - return load(dump(code,true)), 0 - else - return code, 0 - end - elseif luautilities.alwaysstripcode then - register(name) - return load(dump(code,true)), 0 - else - return code, 0 - end - end - - function luautilities.strippedloadstring(code,forcestrip,name) -- not executed - if forcestrip and luautilities.stripcode or luautilities.alwaysstripcode then - code = load(code) - if not code then - report_lua("fatal error %a in file %a",3,name) - end - register(name) - code = dump(code,true) - end - return load(code), 0 - end - - function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true - report_lua("compiling %a into %a",luafile,lucfile) - os.remove(lucfile) - local done = stupidcompile(luafile,lucfile,strip ~= false) - if done then - report_lua("dumping %a into %a stripped",luafile,lucfile) - if cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then - report_lua("removing %a",luafile) - os.remove(luafile) - end - end - return done - end - - function luautilities.loadstripped(...) - local l = load(...) - if l then - return load(dump(l,true)) - end - end - -else - - -- The next function was posted by Peter Cawley on the lua list and strips line - -- number information etc. from the bytecode data blob. We only apply this trick - -- when we store data tables. Stripping makes the compressed format file about - -- 1MB smaller (and uncompressed we save at least 6MB). - -- - -- You can consider this feature an experiment, so it might disappear. There is - -- no noticeable gain in runtime although the memory footprint should be somewhat - -- smaller (and the file system has a bit less to deal with). - -- - -- Begin of borrowed code ... works for Lua 5.1 which LuaTeX currently uses ... - - local function register(name,before,after) - local delta = before - after - if tracestripping then - report_lua("bytecodes stripped from %a, # before %s, # after %s, delta %s",name,before,after,delta) - end - strippedchunks[#strippedchunks+1] = name - luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1 - luautilities.nofstrippedbytes = luautilities.nofstrippedbytes + delta - return delta - end - - local strip_code_pc - - if _MAJORVERSION == 5 and _MINORVERSION == 1 then - - strip_code_pc = function(dump,name) - local before = #dump - local version, format, endian, int, size, ins, num = byte(dump,5,11) - local subint - if endian == 1 then - subint = function(dump, i, l) - local val = 0 - for n = l, 1, -1 do - val = val * 256 + byte(dump,i + n - 1) - end - return val, i + l - end - else - subint = function(dump, i, l) - local val = 0 - for n = 1, l, 1 do - val = val * 256 + byte(dump,i + n - 1) - end - return val, i + l - end - end - local strip_function - strip_function = function(dump) - local count, offset = subint(dump, 1, size) - local stripped, dirty = rep("\0", size), offset + count - offset = offset + count + int * 2 + 4 - offset = offset + int + subint(dump, offset, int) * ins - count, offset = subint(dump, offset, int) - for n = 1, count do - local t - t, offset = subint(dump, offset, 1) - if t == 1 then - offset = offset + 1 - elseif t == 4 then - offset = offset + size + subint(dump, offset, size) - elseif t == 3 then - offset = offset + num - end - end - count, offset = subint(dump, offset, int) - stripped = stripped .. sub(dump,dirty, offset - 1) - for n = 1, count do - local proto, off = strip_function(sub(dump,offset, -1)) - stripped, offset = stripped .. proto, offset + off - 1 - end - offset = offset + subint(dump, offset, int) * int + int - count, offset = subint(dump, offset, int) - for n = 1, count do - offset = offset + subint(dump, offset, size) + size + int * 2 - end - count, offset = subint(dump, offset, int) - for n = 1, count do - offset = offset + subint(dump, offset, size) + size - end - stripped = stripped .. rep("\0", int * 3) - return stripped, offset - end - dump = sub(dump,1,12) .. strip_function(sub(dump,13,-1)) - local after = #dump - local delta = register(name,before,after) - return dump, delta - end - - else - - strip_code_pc = function(dump,name) - return dump, 0 - end - - end - - -- ... end of borrowed code. - - -- quite subtle ... doing this wrong incidentally can give more bytes - - function luautilities.loadedluacode(fullname,forcestrip,name) - -- quite subtle ... doing this wrong incidentally can give more bytes - local code = environment.loadpreprocessedfile and environment.preprocessedloadfile(fullname) or loadfile(fullname) - if code then - code() - end - if forcestrip and luautilities.stripcode then - if type(forcestrip) == "function" then - forcestrip = forcestrip(fullname) - end - if forcestrip then - local code, n = strip_code_pc(dump(code),name) - return load(code), n - elseif luautilities.alwaysstripcode then - return load(strip_code_pc(dump(code),name)) - else - return code, 0 - end - elseif luautilities.alwaysstripcode then - return load(strip_code_pc(dump(code),name)) - else - return code, 0 - end - end - - function luautilities.strippedloadstring(code,forcestrip,name) -- not executed - local n = 0 - if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then - code = load(code) - if not code then - report_lua("fatal error in file %a",name) - end - code, n = strip_code_pc(dump(code),name) - end - return load(code), n - end - - local function stupidcompile(luafile,lucfile,strip) - local code = io.loaddata(luafile) - local n = 0 - if code and code ~= "" then - code = load(code) - if not code then - report_lua("fatal error in file %a",luafile) - end - code = dump(code) - if strip then - code, n = strip_code_pc(code,luautilities.stripcode or luautilities.alwaysstripcode,luafile) -- last one is reported - end - if code and code ~= "" then - io.savedata(lucfile,code) - end - end - return n - end - - local luac_normal = "texluac -o %q %q" - local luac_strip = "texluac -s -o %q %q" - - function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true - report_lua("compiling %a into %a",luafile,lucfile) - os.remove(lucfile) - local done = false - if strip ~= false then - strip = true - end - if forcestupidcompile then - fallback = true - elseif strip then - done = os.spawn(format(luac_strip, lucfile,luafile)) == 0 - else - done = os.spawn(format(luac_normal,lucfile,luafile)) == 0 - end - if not done and fallback then - local n = stupidcompile(luafile,lucfile,strip) - if n > 0 then - report_lua("%a dumped into %a (%i bytes stripped)",luafile,lucfile,n) - else - report_lua("%a dumped into %a (unstripped)",luafile,lucfile) - end - cleanup = false -- better see how bad it is - done = true -- hm - end - if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then - report_lua("removing %a",luafile) - os.remove(luafile) - end - return done - end - - luautilities.loadstripped = loadstring - -end - --- local getmetatable, type = getmetatable, type --- --- local types = { } --- --- function luautilities.registerdatatype(d,name) --- types[getmetatable(d)] = name --- end --- --- function luautilities.datatype(d) --- local t = type(d) --- if t == "userdata" then --- local m = getmetatable(d) --- return m and types[m] or "userdata" --- else --- return t --- end --- end --- --- luautilities.registerdatatype(lpeg.P("!"),"lpeg") --- --- print(luautilities.datatype(lpeg.P("oeps"))) +if not modules then modules = { } end modules ['util-lua'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + comment = "the strip code is written by Peter Cawley", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- we will remove the 5.1 code some day soon + +local rep, sub, byte, dump, format = string.rep, string.sub, string.byte, string.dump, string.format +local load, loadfile, type = load, loadfile, type + +utilities = utilities or {} +utilities.lua = utilities.lua or { } +local luautilities = utilities.lua + +local report_lua = logs.reporter("system","lua") + +local tracestripping = false +local forcestupidcompile = true -- use internal bytecode compiler +luautilities.stripcode = true -- support stripping when asked for +luautilities.alwaysstripcode = false -- saves 1 meg on 7 meg compressed format file (2012.08.12) +luautilities.nofstrippedchunks = 0 +luautilities.nofstrippedbytes = 0 +local strippedchunks = { } -- allocate() +luautilities.strippedchunks = strippedchunks + +luautilities.suffixes = { + tma = "tma", + tmc = jit and "tmb" or "tmc", + lua = "lua", + luc = jit and "lub" or "luc", + lui = "lui", + luv = "luv", + luj = "luj", + tua = "tua", + tuc = "tuc", +} + +-- environment.loadpreprocessedfile can be set to a preprocessor + +if jit or status.luatex_version >= 74 then + + local function register(name) + if tracestripping then + report_lua("stripped bytecode from %a",name or "unknown") + end + strippedchunks[#strippedchunks+1] = name + luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1 + end + + local function stupidcompile(luafile,lucfile,strip) + local code = io.loaddata(luafile) + if code and code ~= "" then + code = load(code) + if code then + code = dump(code,strip and luautilities.stripcode or luautilities.alwaysstripcode) + if code and code ~= "" then + register(name) + io.savedata(lucfile,code) + return true, 0 + end + else + report_lua("fatal error %a in file %a",1,luafile) + end + else + report_lua("fatal error %a in file %a",2,luafile) + end + return false, 0 + end + + -- quite subtle ... doing this wrong incidentally can give more bytes + + function luautilities.loadedluacode(fullname,forcestrip,name) + -- quite subtle ... doing this wrong incidentally can give more bytes + name = name or fullname + local code = environment.loadpreprocessedfile and environment.loadpreprocessedfile(fullname) or loadfile(fullname) + if code then + code() + end + if forcestrip and luautilities.stripcode then + if type(forcestrip) == "function" then + forcestrip = forcestrip(fullname) + end + if forcestrip or luautilities.alwaysstripcode then + register(name) + return load(dump(code,true)), 0 + else + return code, 0 + end + elseif luautilities.alwaysstripcode then + register(name) + return load(dump(code,true)), 0 + else + return code, 0 + end + end + + function luautilities.strippedloadstring(code,forcestrip,name) -- not executed + if forcestrip and luautilities.stripcode or luautilities.alwaysstripcode then + code = load(code) + if not code then + report_lua("fatal error %a in file %a",3,name) + end + register(name) + code = dump(code,true) + end + return load(code), 0 + end + + function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true + report_lua("compiling %a into %a",luafile,lucfile) + os.remove(lucfile) + local done = stupidcompile(luafile,lucfile,strip ~= false) + if done then + report_lua("dumping %a into %a stripped",luafile,lucfile) + if cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then + report_lua("removing %a",luafile) + os.remove(luafile) + end + end + return done + end + + function luautilities.loadstripped(...) + local l = load(...) + if l then + return load(dump(l,true)) + end + end + +else + + -- The next function was posted by Peter Cawley on the lua list and strips line + -- number information etc. from the bytecode data blob. We only apply this trick + -- when we store data tables. Stripping makes the compressed format file about + -- 1MB smaller (and uncompressed we save at least 6MB). + -- + -- You can consider this feature an experiment, so it might disappear. There is + -- no noticeable gain in runtime although the memory footprint should be somewhat + -- smaller (and the file system has a bit less to deal with). + -- + -- Begin of borrowed code ... works for Lua 5.1 which LuaTeX currently uses ... + + local function register(name,before,after) + local delta = before - after + if tracestripping then + report_lua("bytecodes stripped from %a, # before %s, # after %s, delta %s",name,before,after,delta) + end + strippedchunks[#strippedchunks+1] = name + luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1 + luautilities.nofstrippedbytes = luautilities.nofstrippedbytes + delta + return delta + end + + local strip_code_pc + + if _MAJORVERSION == 5 and _MINORVERSION == 1 then + + strip_code_pc = function(dump,name) + local before = #dump + local version, format, endian, int, size, ins, num = byte(dump,5,11) + local subint + if endian == 1 then + subint = function(dump, i, l) + local val = 0 + for n = l, 1, -1 do + val = val * 256 + byte(dump,i + n - 1) + end + return val, i + l + end + else + subint = function(dump, i, l) + local val = 0 + for n = 1, l, 1 do + val = val * 256 + byte(dump,i + n - 1) + end + return val, i + l + end + end + local strip_function + strip_function = function(dump) + local count, offset = subint(dump, 1, size) + local stripped, dirty = rep("\0", size), offset + count + offset = offset + count + int * 2 + 4 + offset = offset + int + subint(dump, offset, int) * ins + count, offset = subint(dump, offset, int) + for n = 1, count do + local t + t, offset = subint(dump, offset, 1) + if t == 1 then + offset = offset + 1 + elseif t == 4 then + offset = offset + size + subint(dump, offset, size) + elseif t == 3 then + offset = offset + num + end + end + count, offset = subint(dump, offset, int) + stripped = stripped .. sub(dump,dirty, offset - 1) + for n = 1, count do + local proto, off = strip_function(sub(dump,offset, -1)) + stripped, offset = stripped .. proto, offset + off - 1 + end + offset = offset + subint(dump, offset, int) * int + int + count, offset = subint(dump, offset, int) + for n = 1, count do + offset = offset + subint(dump, offset, size) + size + int * 2 + end + count, offset = subint(dump, offset, int) + for n = 1, count do + offset = offset + subint(dump, offset, size) + size + end + stripped = stripped .. rep("\0", int * 3) + return stripped, offset + end + dump = sub(dump,1,12) .. strip_function(sub(dump,13,-1)) + local after = #dump + local delta = register(name,before,after) + return dump, delta + end + + else + + strip_code_pc = function(dump,name) + return dump, 0 + end + + end + + -- ... end of borrowed code. + + -- quite subtle ... doing this wrong incidentally can give more bytes + + function luautilities.loadedluacode(fullname,forcestrip,name) + -- quite subtle ... doing this wrong incidentally can give more bytes + local code = environment.loadpreprocessedfile and environment.preprocessedloadfile(fullname) or loadfile(fullname) + if code then + code() + end + if forcestrip and luautilities.stripcode then + if type(forcestrip) == "function" then + forcestrip = forcestrip(fullname) + end + if forcestrip then + local code, n = strip_code_pc(dump(code),name) + return load(code), n + elseif luautilities.alwaysstripcode then + return load(strip_code_pc(dump(code),name)) + else + return code, 0 + end + elseif luautilities.alwaysstripcode then + return load(strip_code_pc(dump(code),name)) + else + return code, 0 + end + end + + function luautilities.strippedloadstring(code,forcestrip,name) -- not executed + local n = 0 + if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then + code = load(code) + if not code then + report_lua("fatal error in file %a",name) + end + code, n = strip_code_pc(dump(code),name) + end + return load(code), n + end + + local function stupidcompile(luafile,lucfile,strip) + local code = io.loaddata(luafile) + local n = 0 + if code and code ~= "" then + code = load(code) + if not code then + report_lua("fatal error in file %a",luafile) + end + code = dump(code) + if strip then + code, n = strip_code_pc(code,luautilities.stripcode or luautilities.alwaysstripcode,luafile) -- last one is reported + end + if code and code ~= "" then + io.savedata(lucfile,code) + end + end + return n + end + + local luac_normal = "texluac -o %q %q" + local luac_strip = "texluac -s -o %q %q" + + function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true + report_lua("compiling %a into %a",luafile,lucfile) + os.remove(lucfile) + local done = false + if strip ~= false then + strip = true + end + if forcestupidcompile then + fallback = true + elseif strip then + done = os.spawn(format(luac_strip, lucfile,luafile)) == 0 + else + done = os.spawn(format(luac_normal,lucfile,luafile)) == 0 + end + if not done and fallback then + local n = stupidcompile(luafile,lucfile,strip) + if n > 0 then + report_lua("%a dumped into %a (%i bytes stripped)",luafile,lucfile,n) + else + report_lua("%a dumped into %a (unstripped)",luafile,lucfile) + end + cleanup = false -- better see how bad it is + done = true -- hm + end + if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then + report_lua("removing %a",luafile) + os.remove(luafile) + end + return done + end + + luautilities.loadstripped = loadstring + +end + +-- local getmetatable, type = getmetatable, type +-- +-- local types = { } +-- +-- function luautilities.registerdatatype(d,name) +-- types[getmetatable(d)] = name +-- end +-- +-- function luautilities.datatype(d) +-- local t = type(d) +-- if t == "userdata" then +-- local m = getmetatable(d) +-- return m and types[m] or "userdata" +-- else +-- return t +-- end +-- end +-- +-- luautilities.registerdatatype(lpeg.P("!"),"lpeg") +-- +-- print(luautilities.datatype(lpeg.P("oeps"))) diff --git a/tex/context/base/util-mrg.lua b/tex/context/base/util-mrg.lua index 690188ef8..c50ae8a75 100644 --- a/tex/context/base/util-mrg.lua +++ b/tex/context/base/util-mrg.lua @@ -1,228 +1,228 @@ -if not modules then modules = { } end modules ['util-mrg'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- hm, quite unreadable - -local gsub, format = string.gsub, string.format -local concat = table.concat -local type, next = type, next - -local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt, Cb, Cg = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt, lpeg.Cb, lpeg.Cg -local lpegmatch, patterns = lpeg.match, lpeg.patterns - -utilities = utilities or { } -local merger = utilities.merger or { } -utilities.merger = merger -merger.strip_comment = true - -local report = logs.reporter("system","merge") -utilities.report = report - -local m_begin_merge = "begin library merge" -local m_end_merge = "end library merge" -local m_begin_closure = "do -- create closure to overcome 200 locals limit" -local m_end_closure = "end -- of closure" - -local m_pattern = - "%c+" .. - "%-%-%s+" .. m_begin_merge .. - "%c+(.-)%c+" .. - "%-%-%s+" .. m_end_merge .. - "%c+" - -local m_format = - "\n\n-- " .. m_begin_merge .. - "\n%s\n" .. - "-- " .. m_end_merge .. "\n\n" - -local m_faked = - "-- " .. "created merged file" .. "\n\n" .. - "-- " .. m_begin_merge .. "\n\n" .. - "-- " .. m_end_merge .. "\n\n" - -local m_report = [[ --- used libraries : %s --- skipped libraries : %s --- original bytes : %s --- stripped bytes : %s -]] - -local m_preloaded = [[package.loaded[%q] = package.loaded[%q] or true]] - -local function self_fake() - return m_faked -end - -local function self_nothing() - return "" -end - -local function self_load(name) - local data = io.loaddata(name) or "" - if data == "" then - report("unknown file %a",name) - else - report("inserting file %a",name) - end - return data or "" -end - --- -- saves some 20K .. scite comments --- data = gsub(data,"%-%-~[^\n\r]*[\r\n]","") --- -- saves some 20K .. ldx comments --- data = gsub(data,"%-%-%[%[ldx%-%-.-%-%-ldx%]%]%-%-","") - -local space = patterns.space -local eol = patterns.newline -local equals = P("=")^0 -local open = P("[") * Cg(equals,"init") * P("[") * P("\n")^-1 -local close = P("]") * C(equals) * P("]") -local closeeq = Cmt(close * Cb("init"), function(s,i,a,b) return a == b end) -local longstring = open * (1 - closeeq)^0 * close - -local quoted = patterns.quoted -local digit = patterns.digit -local emptyline = space^0 * eol -local operator1 = P("<=") + P(">=") + P("~=") + P("..") + S("/^<>=*+%%") -local operator2 = S("*+/") -local operator3 = S("-") -local operator4 = P("..") -local separator = S(",;") - -local ignore = (P("]") * space^1 * P("=") * space^1 * P("]")) / "]=[" + - (P("=") * space^1 * P("{")) / "={" + - (P("(") * space^1) / "(" + - (P("{") * (space+eol)^1 * P("}")) / "{}" -local strings = quoted -- / function (s) print("<<"..s..">>") return s end -local longcmt = (emptyline^0 * P("--") * longstring * emptyline^0) / "" -local longstr = longstring -local comment = emptyline^0 * P("--") * P("-")^0 * (1-eol)^0 * emptyline^1 / "\n" -local optionalspaces = space^0 / "" -local mandatespaces = space^1 / "" -local optionalspacing = (eol+space)^0 / "" -local mandatespacing = (eol+space)^1 / "" -local pack = digit * space^1 * operator4 * optionalspacing + - optionalspacing * operator1 * optionalspacing + - optionalspacing * operator2 * optionalspaces + - mandatespacing * operator3 * mandatespaces + - optionalspaces * separator * optionalspaces -local lines = emptyline^2 / "\n" -local spaces = (space * space) / " " ------ spaces = ((space+eol)^1 ) / " " - -local compact = Cs ( ( - ignore + - strings + - longcmt + - longstr + - comment + - pack + - lines + - spaces + - 1 -)^1 ) - -local strip = Cs((emptyline^2/"\n" + 1)^0) -local stripreturn = Cs((1-P("return") * space^1 * P(1-space-eol)^1 * (space+eol)^0 * P(-1))^1) - -function merger.compact(data) - return lpegmatch(strip,lpegmatch(compact,data)) -end - -local function self_compact(data) - local delta = 0 - if merger.strip_comment then - local before = #data - data = lpegmatch(compact,data) - data = lpegmatch(strip,data) -- also strips in longstrings ... alas - -- data = string.strip(data) - local after = #data - delta = before - after - report("original size %s, compacted to %s, stripped %s",before,after,delta) - data = format("-- original size: %s, stripped down to: %s\n\n%s",before,after,data) - end - return lpegmatch(stripreturn,data) or data, delta -end - -local function self_save(name, data) - if data ~= "" then - io.savedata(name,data) - report("saving %s with size %s",name,#data) - end -end - -local function self_swap(data,code) - return data ~= "" and (gsub(data,m_pattern, function() return format(m_format,code) end, 1)) or "" -end - -local function self_libs(libs,list) - local result, f, frozen, foundpath = { }, nil, false, nil - result[#result+1] = "\n" - if type(libs) == 'string' then libs = { libs } end - if type(list) == 'string' then list = { list } end - for i=1,#libs do - local lib = libs[i] - for j=1,#list do - local pth = gsub(list[j],"\\","/") -- file.clean_path - report("checking library path %a",pth) - local name = pth .. "/" .. lib - if lfs.isfile(name) then - foundpath = pth - end - end - if foundpath then break end - end - if foundpath then - report("using library path %a",foundpath) - local right, wrong, original, stripped = { }, { }, 0, 0 - for i=1,#libs do - local lib = libs[i] - local fullname = foundpath .. "/" .. lib - if lfs.isfile(fullname) then - report("using library %a",fullname) - local preloaded = file.nameonly(lib) - local data = io.loaddata(fullname,true) - original = original + #data - local data, delta = self_compact(data) - right[#right+1] = lib - result[#result+1] = m_begin_closure - result[#result+1] = format(m_preloaded,preloaded,preloaded) - result[#result+1] = data - result[#result+1] = m_end_closure - stripped = stripped + delta - else - report("skipping library %a",fullname) - wrong[#wrong+1] = lib - end - end - right = #right > 0 and concat(right," ") or "-" - wrong = #wrong > 0 and concat(wrong," ") or "-" - report("used libraries: %a",right) - report("skipped libraries: %a",wrong) - report("original bytes: %a",original) - report("stripped bytes: %a",stripped) - result[#result+1] = format(m_report,right,wrong,original,stripped) - else - report("no valid library path found") - end - return concat(result, "\n\n") -end - -function merger.selfcreate(libs,list,target) - if target then - self_save(target,self_swap(self_fake(),self_libs(libs,list))) - end -end - -function merger.selfmerge(name,libs,list,target) - self_save(target or name,self_swap(self_load(name),self_libs(libs,list))) -end - -function merger.selfclean(name) - self_save(name,self_swap(self_load(name),self_nothing())) -end +if not modules then modules = { } end modules ['util-mrg'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- hm, quite unreadable + +local gsub, format = string.gsub, string.format +local concat = table.concat +local type, next = type, next + +local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt, Cb, Cg = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt, lpeg.Cb, lpeg.Cg +local lpegmatch, patterns = lpeg.match, lpeg.patterns + +utilities = utilities or { } +local merger = utilities.merger or { } +utilities.merger = merger +merger.strip_comment = true + +local report = logs.reporter("system","merge") +utilities.report = report + +local m_begin_merge = "begin library merge" +local m_end_merge = "end library merge" +local m_begin_closure = "do -- create closure to overcome 200 locals limit" +local m_end_closure = "end -- of closure" + +local m_pattern = + "%c+" .. + "%-%-%s+" .. m_begin_merge .. + "%c+(.-)%c+" .. + "%-%-%s+" .. m_end_merge .. + "%c+" + +local m_format = + "\n\n-- " .. m_begin_merge .. + "\n%s\n" .. + "-- " .. m_end_merge .. "\n\n" + +local m_faked = + "-- " .. "created merged file" .. "\n\n" .. + "-- " .. m_begin_merge .. "\n\n" .. + "-- " .. m_end_merge .. "\n\n" + +local m_report = [[ +-- used libraries : %s +-- skipped libraries : %s +-- original bytes : %s +-- stripped bytes : %s +]] + +local m_preloaded = [[package.loaded[%q] = package.loaded[%q] or true]] + +local function self_fake() + return m_faked +end + +local function self_nothing() + return "" +end + +local function self_load(name) + local data = io.loaddata(name) or "" + if data == "" then + report("unknown file %a",name) + else + report("inserting file %a",name) + end + return data or "" +end + +-- -- saves some 20K .. scite comments +-- data = gsub(data,"%-%-~[^\n\r]*[\r\n]","") +-- -- saves some 20K .. ldx comments +-- data = gsub(data,"%-%-%[%[ldx%-%-.-%-%-ldx%]%]%-%-","") + +local space = patterns.space +local eol = patterns.newline +local equals = P("=")^0 +local open = P("[") * Cg(equals,"init") * P("[") * P("\n")^-1 +local close = P("]") * C(equals) * P("]") +local closeeq = Cmt(close * Cb("init"), function(s,i,a,b) return a == b end) +local longstring = open * (1 - closeeq)^0 * close + +local quoted = patterns.quoted +local digit = patterns.digit +local emptyline = space^0 * eol +local operator1 = P("<=") + P(">=") + P("~=") + P("..") + S("/^<>=*+%%") +local operator2 = S("*+/") +local operator3 = S("-") +local operator4 = P("..") +local separator = S(",;") + +local ignore = (P("]") * space^1 * P("=") * space^1 * P("]")) / "]=[" + + (P("=") * space^1 * P("{")) / "={" + + (P("(") * space^1) / "(" + + (P("{") * (space+eol)^1 * P("}")) / "{}" +local strings = quoted -- / function (s) print("<<"..s..">>") return s end +local longcmt = (emptyline^0 * P("--") * longstring * emptyline^0) / "" +local longstr = longstring +local comment = emptyline^0 * P("--") * P("-")^0 * (1-eol)^0 * emptyline^1 / "\n" +local optionalspaces = space^0 / "" +local mandatespaces = space^1 / "" +local optionalspacing = (eol+space)^0 / "" +local mandatespacing = (eol+space)^1 / "" +local pack = digit * space^1 * operator4 * optionalspacing + + optionalspacing * operator1 * optionalspacing + + optionalspacing * operator2 * optionalspaces + + mandatespacing * operator3 * mandatespaces + + optionalspaces * separator * optionalspaces +local lines = emptyline^2 / "\n" +local spaces = (space * space) / " " +----- spaces = ((space+eol)^1 ) / " " + +local compact = Cs ( ( + ignore + + strings + + longcmt + + longstr + + comment + + pack + + lines + + spaces + + 1 +)^1 ) + +local strip = Cs((emptyline^2/"\n" + 1)^0) +local stripreturn = Cs((1-P("return") * space^1 * P(1-space-eol)^1 * (space+eol)^0 * P(-1))^1) + +function merger.compact(data) + return lpegmatch(strip,lpegmatch(compact,data)) +end + +local function self_compact(data) + local delta = 0 + if merger.strip_comment then + local before = #data + data = lpegmatch(compact,data) + data = lpegmatch(strip,data) -- also strips in longstrings ... alas + -- data = string.strip(data) + local after = #data + delta = before - after + report("original size %s, compacted to %s, stripped %s",before,after,delta) + data = format("-- original size: %s, stripped down to: %s\n\n%s",before,after,data) + end + return lpegmatch(stripreturn,data) or data, delta +end + +local function self_save(name, data) + if data ~= "" then + io.savedata(name,data) + report("saving %s with size %s",name,#data) + end +end + +local function self_swap(data,code) + return data ~= "" and (gsub(data,m_pattern, function() return format(m_format,code) end, 1)) or "" +end + +local function self_libs(libs,list) + local result, f, frozen, foundpath = { }, nil, false, nil + result[#result+1] = "\n" + if type(libs) == 'string' then libs = { libs } end + if type(list) == 'string' then list = { list } end + for i=1,#libs do + local lib = libs[i] + for j=1,#list do + local pth = gsub(list[j],"\\","/") -- file.clean_path + report("checking library path %a",pth) + local name = pth .. "/" .. lib + if lfs.isfile(name) then + foundpath = pth + end + end + if foundpath then break end + end + if foundpath then + report("using library path %a",foundpath) + local right, wrong, original, stripped = { }, { }, 0, 0 + for i=1,#libs do + local lib = libs[i] + local fullname = foundpath .. "/" .. lib + if lfs.isfile(fullname) then + report("using library %a",fullname) + local preloaded = file.nameonly(lib) + local data = io.loaddata(fullname,true) + original = original + #data + local data, delta = self_compact(data) + right[#right+1] = lib + result[#result+1] = m_begin_closure + result[#result+1] = format(m_preloaded,preloaded,preloaded) + result[#result+1] = data + result[#result+1] = m_end_closure + stripped = stripped + delta + else + report("skipping library %a",fullname) + wrong[#wrong+1] = lib + end + end + right = #right > 0 and concat(right," ") or "-" + wrong = #wrong > 0 and concat(wrong," ") or "-" + report("used libraries: %a",right) + report("skipped libraries: %a",wrong) + report("original bytes: %a",original) + report("stripped bytes: %a",stripped) + result[#result+1] = format(m_report,right,wrong,original,stripped) + else + report("no valid library path found") + end + return concat(result, "\n\n") +end + +function merger.selfcreate(libs,list,target) + if target then + self_save(target,self_swap(self_fake(),self_libs(libs,list))) + end +end + +function merger.selfmerge(name,libs,list,target) + self_save(target or name,self_swap(self_load(name),self_libs(libs,list))) +end + +function merger.selfclean(name) + self_save(name,self_swap(self_load(name),self_nothing())) +end diff --git a/tex/context/base/util-pck.lua b/tex/context/base/util-pck.lua index 7be5e8f42..fe9911946 100644 --- a/tex/context/base/util-pck.lua +++ b/tex/context/base/util-pck.lua @@ -1,144 +1,144 @@ -if not modules then modules = { } end modules ['util-pck'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- moved from core-uti - -local next, tostring, type = next, tostring, type -local sort, concat = table.sort, table.concat -local sortedhashkeys, sortedkeys = table.sortedhashkeys, table.sortedkeys - -utilities = utilities or { } -utilities.packers = utilities.packers or { } -local packers = utilities.packers -packers.version = 1.00 - -local function hashed(t) - local s, ns = { }, 0 - for k, v in next, t do - ns = ns + 1 - if type(v) == "table" then - s[ns] = k .. "={" .. hashed(v) .. "}" - else - s[ns] = k .. "=" .. tostring(v) - end - end - sort(s) - return concat(s,",") -end - -local function simplehashed(t) - local s, ns = { }, 0 - for k, v in next, t do - ns = ns + 1 - s[ns] = k .. "=" .. v - end - sort(s) - return concat(s,",") -end - -packers.hashed = hashed -packers.simplehashed = simplehashed - --- In luatex < 0.74 (lua 5.1) a next chain was the same for each run so no sort was needed, --- but in the latest greatest versions (lua 5.2) we really need to sort the keys in order --- not to get endless runs due to a difference in tuc files. - -local function pack(t,keys,hash,index) - if t then - -- for k, v in next, t do - -- local sk = sortedkeys(t) - local sk = sortedhashkeys(t) - for i=1,#sk do - local k = sk[i] - local v = t[k] - -- - if type(v) == "table" then - pack(v,keys,hash,index) - if keys[k] then - local h = hashed(v) - local i = hash[h] - if not i then - i = #index + 1 - index[i] = v - hash[h] = i - end - t[k] = i - end - end - end - end -end - -local function unpack(t,keys,index) - if t then - for k, v in next, t do - if keys[k] and type(v) == "number" then - local iv = index[v] - if iv then - v = iv - t[k] = v - end - end - if type(v) == "table" then - unpack(v,keys,index) - end - end - end -end - -function packers.new(keys,version) - return { - version = version or packers.version, - keys = table.tohash(keys), - hash = { }, - index = { }, - } -end - -function packers.pack(t,p,shared) - if shared then - pack(t,p.keys,p.hash,p.index) - elseif not t.packer then - pack(t,p.keys,p.hash,p.index) - if #p.index > 0 then - t.packer = { - version = p.version or packers.version, - keys = p.keys, - index = p.index, - } - end - p.hash = { } - p.index = { } - end -end - -function packers.unpack(t,p,shared) - if shared then - if p then - unpack(t,p.keys,p.index) - end - else - local tp = t.packer - if tp then - if tp.version == (p and p.version or packers.version) then - unpack(t,tp.keys,tp.index) - else - return false - end - t.packer = nil - end - end - return true -end - -function packers.strip(p) - p.hash = nil -end - --- We could have a packer.serialize where we first flush the shared table --- and then use inline a reference . This saves an unpack. +if not modules then modules = { } end modules ['util-pck'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- moved from core-uti + +local next, tostring, type = next, tostring, type +local sort, concat = table.sort, table.concat +local sortedhashkeys, sortedkeys = table.sortedhashkeys, table.sortedkeys + +utilities = utilities or { } +utilities.packers = utilities.packers or { } +local packers = utilities.packers +packers.version = 1.00 + +local function hashed(t) + local s, ns = { }, 0 + for k, v in next, t do + ns = ns + 1 + if type(v) == "table" then + s[ns] = k .. "={" .. hashed(v) .. "}" + else + s[ns] = k .. "=" .. tostring(v) + end + end + sort(s) + return concat(s,",") +end + +local function simplehashed(t) + local s, ns = { }, 0 + for k, v in next, t do + ns = ns + 1 + s[ns] = k .. "=" .. v + end + sort(s) + return concat(s,",") +end + +packers.hashed = hashed +packers.simplehashed = simplehashed + +-- In luatex < 0.74 (lua 5.1) a next chain was the same for each run so no sort was needed, +-- but in the latest greatest versions (lua 5.2) we really need to sort the keys in order +-- not to get endless runs due to a difference in tuc files. + +local function pack(t,keys,hash,index) + if t then + -- for k, v in next, t do + -- local sk = sortedkeys(t) + local sk = sortedhashkeys(t) + for i=1,#sk do + local k = sk[i] + local v = t[k] + -- + if type(v) == "table" then + pack(v,keys,hash,index) + if keys[k] then + local h = hashed(v) + local i = hash[h] + if not i then + i = #index + 1 + index[i] = v + hash[h] = i + end + t[k] = i + end + end + end + end +end + +local function unpack(t,keys,index) + if t then + for k, v in next, t do + if keys[k] and type(v) == "number" then + local iv = index[v] + if iv then + v = iv + t[k] = v + end + end + if type(v) == "table" then + unpack(v,keys,index) + end + end + end +end + +function packers.new(keys,version) + return { + version = version or packers.version, + keys = table.tohash(keys), + hash = { }, + index = { }, + } +end + +function packers.pack(t,p,shared) + if shared then + pack(t,p.keys,p.hash,p.index) + elseif not t.packer then + pack(t,p.keys,p.hash,p.index) + if #p.index > 0 then + t.packer = { + version = p.version or packers.version, + keys = p.keys, + index = p.index, + } + end + p.hash = { } + p.index = { } + end +end + +function packers.unpack(t,p,shared) + if shared then + if p then + unpack(t,p.keys,p.index) + end + else + local tp = t.packer + if tp then + if tp.version == (p and p.version or packers.version) then + unpack(t,tp.keys,tp.index) + else + return false + end + t.packer = nil + end + end + return true +end + +function packers.strip(p) + p.hash = nil +end + +-- We could have a packer.serialize where we first flush the shared table +-- and then use inline a reference . This saves an unpack. diff --git a/tex/context/base/util-prs.lua b/tex/context/base/util-prs.lua index cdf497588..ed1e32a99 100644 --- a/tex/context/base/util-prs.lua +++ b/tex/context/base/util-prs.lua @@ -1,593 +1,593 @@ -if not modules then modules = { } end modules ['util-prs'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local lpeg, table, string = lpeg, table, string -local P, R, V, S, C, Ct, Cs, Carg, Cc, Cg, Cf, Cp = lpeg.P, lpeg.R, lpeg.V, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Cp -local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns -local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find -local tostring, type, next, rawset = tostring, type, next, rawset - -utilities = utilities or {} -local parsers = utilities.parsers or { } -utilities.parsers = parsers -local patterns = parsers.patterns or { } -parsers.patterns = patterns - -local setmetatableindex = table.setmetatableindex -local sortedhash = table.sortedhash - --- we share some patterns - -local digit = R("09") -local space = P(' ') -local equal = P("=") -local comma = P(",") -local lbrace = P("{") -local rbrace = P("}") -local lparent = P("(") -local rparent = P(")") -local period = S(".") -local punctuation = S(".,:;") -local spacer = lpegpatterns.spacer -local whitespace = lpegpatterns.whitespace -local newline = lpegpatterns.newline -local anything = lpegpatterns.anything -local endofstring = lpegpatterns.endofstring - -local nobrace = 1 - ( lbrace + rbrace ) -local noparent = 1 - ( lparent + rparent) - --- we could use a Cf Cg construct - -local escape, left, right = P("\\"), P('{'), P('}') - -lpegpatterns.balanced = P { - [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0, - [2] = left * V(1) * right -} - -local nestedbraces = P { lbrace * (nobrace + V(1))^0 * rbrace } -local nestedparents = P { lparent * (noparent + V(1))^0 * rparent } -local spaces = space^0 -local argument = Cs((lbrace/"") * ((nobrace + nestedbraces)^0) * (rbrace/"")) -local content = (1-endofstring)^0 - -lpegpatterns.nestedbraces = nestedbraces -- no capture -lpegpatterns.nestedparents = nestedparents -- no capture -lpegpatterns.nested = nestedbraces -- no capture -lpegpatterns.argument = argument -- argument after e.g. = -lpegpatterns.content = content -- rest after e.g = - -local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace) + C((nestedbraces + (1-comma))^0) - -local key = C((1-equal-comma)^1) -local pattern_a = (space+comma)^0 * (key * equal * value + key * C("")) -local pattern_c = (space+comma)^0 * (key * equal * value) - -local key = C((1-space-equal-comma)^1) -local pattern_b = spaces * comma^0 * spaces * (key * ((spaces * equal * spaces * value) + C(""))) - --- "a=1, b=2, c=3, d={a{b,c}d}, e=12345, f=xx{a{b,c}d}xx, g={}" : outer {} removes, leading spaces ignored - --- todo: rewrite to fold etc --- --- parse = lpeg.Cf(lpeg.Carg(1) * lpeg.Cg(key * equal * value) * separator^0,rawset)^0 -- lpeg.match(parse,"...",1,hash) - -local hash = { } - -local function set(key,value) - hash[key] = value -end - -local pattern_a_s = (pattern_a/set)^1 -local pattern_b_s = (pattern_b/set)^1 -local pattern_c_s = (pattern_c/set)^1 - -patterns.settings_to_hash_a = pattern_a_s -patterns.settings_to_hash_b = pattern_b_s -patterns.settings_to_hash_c = pattern_c_s - -function parsers.make_settings_to_hash_pattern(set,how) - if type(str) == "table" then - return set - elseif how == "strict" then - return (pattern_c/set)^1 - elseif how == "tolerant" then - return (pattern_b/set)^1 - else - return (pattern_a/set)^1 - end -end - -function parsers.settings_to_hash(str,existing) - if type(str) == "table" then - if existing then - for k, v in next, str do - existing[k] = v - end - return exiting - else - return str - end - elseif str and str ~= "" then - hash = existing or { } - lpegmatch(pattern_a_s,str) - return hash - else - return { } - end -end - -function parsers.settings_to_hash_tolerant(str,existing) - if type(str) == "table" then - if existing then - for k, v in next, str do - existing[k] = v - end - return exiting - else - return str - end - elseif str and str ~= "" then - hash = existing or { } - lpegmatch(pattern_b_s,str) - return hash - else - return { } - end -end - -function parsers.settings_to_hash_strict(str,existing) - if type(str) == "table" then - if existing then - for k, v in next, str do - existing[k] = v - end - return exiting - else - return str - end - elseif str and str ~= "" then - hash = existing or { } - lpegmatch(pattern_c_s,str) - return next(hash) and hash - else - return nil - end -end - -local separator = comma * space^0 -local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace) - + C((nestedbraces + (1-comma))^0) -local pattern = spaces * Ct(value*(separator*value)^0) - --- "aap, {noot}, mies" : outer {} removes, leading spaces ignored - -patterns.settings_to_array = pattern - --- we could use a weak table as cache - -function parsers.settings_to_array(str,strict) - if type(str) == "table" then - return str - elseif not str or str == "" then - return { } - elseif strict then - if find(str,"{") then - return lpegmatch(pattern,str) - else - return { str } - end - else - return lpegmatch(pattern,str) - end -end - -local function set(t,v) - t[#t+1] = v -end - -local value = P(Carg(1)*value) / set -local pattern = value*(separator*value)^0 * Carg(1) - -function parsers.add_settings_to_array(t,str) - return lpegmatch(pattern,str,nil,t) -end - -function parsers.hash_to_string(h,separator,yes,no,strict,omit) - if h then - local t, tn, s = { }, 0, table.sortedkeys(h) - omit = omit and table.tohash(omit) - for i=1,#s do - local key = s[i] - if not omit or not omit[key] then - local value = h[key] - if type(value) == "boolean" then - if yes and no then - if value then - tn = tn + 1 - t[tn] = key .. '=' .. yes - elseif not strict then - tn = tn + 1 - t[tn] = key .. '=' .. no - end - elseif value or not strict then - tn = tn + 1 - t[tn] = key .. '=' .. tostring(value) - end - else - tn = tn + 1 - t[tn] = key .. '=' .. value - end - end - end - return concat(t,separator or ",") - else - return "" - end -end - -function parsers.array_to_string(a,separator) - if a then - return concat(a,separator or ",") - else - return "" - end -end - -function parsers.settings_to_set(str,t) -- tohash? -- todo: lpeg -- duplicate anyway - t = t or { } --- for s in gmatch(str,"%s*([^, ]+)") do -- space added - for s in gmatch(str,"[^, ]+") do -- space added - t[s] = true - end - return t -end - -function parsers.simple_hash_to_string(h, separator) - local t, tn = { }, 0 - for k, v in sortedhash(h) do - if v then - tn = tn + 1 - t[tn] = k - end - end - return concat(t,separator or ",") -end - --- for chem (currently one level) - -local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace) - + C(digit^1 * lparent * (noparent + nestedparents)^1 * rparent) - + C((nestedbraces + (1-comma))^1) -local pattern_a = spaces * Ct(value*(separator*value)^0) - -local function repeater(n,str) - if not n then - return str - else - local s = lpegmatch(pattern_a,str) - if n == 1 then - return unpack(s) - else - local t, tn = { }, 0 - for i=1,n do - for j=1,#s do - tn = tn + 1 - t[tn] = s[j] - end - end - return unpack(t) - end - end -end - -local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace) - + (C(digit^1)/tonumber * lparent * Cs((noparent + nestedparents)^1) * rparent) / repeater - + C((nestedbraces + (1-comma))^1) -local pattern_b = spaces * Ct(value*(separator*value)^0) - -function parsers.settings_to_array_with_repeat(str,expand) -- beware: "" => { } - if expand then - return lpegmatch(pattern_b,str) or { } - else - return lpegmatch(pattern_a,str) or { } - end -end - --- - -local value = lbrace * C((nobrace + nestedbraces)^0) * rbrace -local pattern = Ct((space + value)^0) - -function parsers.arguments_to_table(str) - return lpegmatch(pattern,str) -end - --- temporary here (unoptimized) - -function parsers.getparameters(self,class,parentclass,settings) - local sc = self[class] - if not sc then - sc = { } - self[class] = sc - if parentclass then - local sp = self[parentclass] - if not sp then - sp = { } - self[parentclass] = sp - end - setmetatableindex(sc,sp) - end - end - parsers.settings_to_hash(settings,sc) -end - -function parsers.listitem(str) - return gmatch(str,"[^, ]+") -end - --- - -local pattern = Cs { "start", - start = V("one") + V("two") + V("three"), - rest = (Cc(",") * V("thousand"))^0 * (P(".") + endofstring) * anything^0, - thousand = digit * digit * digit, - one = digit * V("rest"), - two = digit * digit * V("rest"), - three = V("thousand") * V("rest"), -} - -lpegpatterns.splitthousands = pattern -- maybe better in the parsers namespace ? - -function parsers.splitthousands(str) - return lpegmatch(pattern,str) or str -end - --- print(parsers.splitthousands("11111111111.11")) - -local optionalwhitespace = whitespace^0 - -lpegpatterns.words = Ct((Cs((1-punctuation-whitespace)^1) + anything)^1) -lpegpatterns.sentences = Ct((optionalwhitespace * Cs((1-period)^0 * period))^1) -lpegpatterns.paragraphs = Ct((optionalwhitespace * Cs((whitespace^1*endofstring/"" + 1 - (spacer^0*newline*newline))^1))^1) - --- local str = " Word1 word2. \n Word3 word4. \n\n Word5 word6.\n " --- inspect(lpegmatch(lpegpatterns.paragraphs,str)) --- inspect(lpegmatch(lpegpatterns.sentences,str)) --- inspect(lpegmatch(lpegpatterns.words,str)) - --- handy for k="v" [, ] k="v" - -local dquote = P('"') -local equal = P('=') -local escape = P('\\') -local separator = S(' ,') - -local key = C((1-equal)^1) -local value = dquote * C((1-dquote-escape*dquote)^0) * dquote - -local pattern = Cf(Ct("") * Cg(key * equal * value) * separator^0,rawset)^0 * P(-1) - -patterns.keq_to_hash_c = pattern - -function parsers.keq_to_hash(str) - if str and str ~= "" then - return lpegmatch(pattern,str) - else - return { } - end -end - --- inspect(lpeg.match(pattern,[[key="value"]])) - -local defaultspecification = { separator = ",", quote = '"' } - --- this version accepts multiple separators and quotes as used in the --- database module - -function parsers.csvsplitter(specification) - specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification - local separator = specification.separator - local quotechar = specification.quote - local separator = S(separator ~= "" and separator or ",") - local whatever = C((1 - separator - newline)^0) - if quotechar and quotechar ~= "" then - local quotedata = nil - for chr in gmatch(quotechar,".") do - local quotechar = P(chr) - local quoteword = quotechar * C((1 - quotechar)^0) * quotechar - if quotedata then - quotedata = quotedata + quoteword - else - quotedata = quoteword - end - end - whatever = quotedata + whatever - end - local parser = Ct((Ct(whatever * (separator * whatever)^0) * S("\n\r"))^0 ) - return function(data) - return lpegmatch(parser,data) - end -end - --- and this is a slightly patched version of a version posted by Philipp Gesang - --- local mycsvsplitter = utilities.parsers.rfc4180splitter() --- --- local crap = [[ --- first,second,third,fourth --- "1","2","3","4" --- "a","b","c","d" --- "foo","bar""baz","boogie","xyzzy" --- ]] --- --- local list, names = mycsvsplitter(crap,true) inspect(list) inspect(names) --- local list, names = mycsvsplitter(crap) inspect(list) inspect(names) - -function parsers.rfc4180splitter(specification) - specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification - local separator = specification.separator --> rfc: COMMA - local quotechar = P(specification.quote) --> DQUOTE - local dquotechar = quotechar * quotechar --> 2DQUOTE - / specification.quote - local separator = S(separator ~= "" and separator or ",") - local escaped = quotechar - * Cs((dquotechar + (1 - quotechar))^0) - * quotechar - local non_escaped = C((1 - quotechar - newline - separator)^1) - local field = escaped + non_escaped - local record = Ct((field * separator^-1)^1) - local headerline = record * Cp() - local wholeblob = Ct((newline^-1 * record)^0) - return function(data,getheader) - if getheader then - local header, position = lpegmatch(headerline,data) - local data = lpegmatch(wholeblob,data,position) - return data, header - else - return lpegmatch(wholeblob,data) - end - end -end - --- utilities.parsers.stepper("1,7-",9,function(i) print(">>>",i) end) --- utilities.parsers.stepper("1-3,7,8,9") --- utilities.parsers.stepper("1-3,6,7",function(i) print(">>>",i) end) --- utilities.parsers.stepper(" 1 : 3, ,7 ") --- utilities.parsers.stepper("1:4,9:13,24:*",30) - -local function ranger(first,last,n,action) - if not first then - -- forget about it - elseif last == true then - for i=first,n or first do - action(i) - end - elseif last then - for i=first,last do - action(i) - end - else - action(first) - end -end - -local cardinal = lpegpatterns.cardinal / tonumber -local spacers = lpegpatterns.spacer^0 -local endofstring = lpegpatterns.endofstring - -local stepper = spacers * ( C(cardinal) * ( spacers * S(":-") * spacers * ( C(cardinal) + Cc(true) ) + Cc(false) ) - * Carg(1) * Carg(2) / ranger * S(", ")^0 )^1 - -local stepper = spacers * ( C(cardinal) * ( spacers * S(":-") * spacers * ( C(cardinal) + (P("*") + endofstring) * Cc(true) ) + Cc(false) ) - * Carg(1) * Carg(2) / ranger * S(", ")^0 )^1 * endofstring -- we're sort of strict (could do without endofstring) - -function parsers.stepper(str,n,action) - if type(n) == "function" then - lpegmatch(stepper,str,1,false,n or print) - else - lpegmatch(stepper,str,1,n,action or print) - end -end - --- - -local pattern_math = Cs((P("%")/"\\percent " + P("^") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0) -local pattern_text = Cs((P("%")/"\\percent " + (P("^")/"\\high") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0) - -patterns.unittotex = pattern - -function parsers.unittotex(str,textmode) - return lpegmatch(textmode and pattern_text or pattern_math,str) -end - -local pattern = Cs((P("^") / "" * lpegpatterns.integer * Cc("") + P(1))^0) - -function parsers.unittoxml(str) - return lpegmatch(pattern,str) -end - --- print(utilities.parsers.unittotex("10^-32 %"),utilities.parsers.unittoxml("10^32 %")) - -local cache = { } -local spaces = lpeg.patterns.space^0 -local dummy = function() end - -table.setmetatableindex(cache,function(t,k) - local separator = P(k) - local value = (1-separator)^0 - local pattern = spaces * C(value) * separator^0 * Cp() - t[k] = pattern - return pattern -end) - -local commalistiterator = cache[","] - -function utilities.parsers.iterator(str,separator) - local n = #str - if n == 0 then - return dummy - else - local pattern = separator and cache[separator] or commalistiterator - local p = 1 - return function() - if p <= n then - local s, e = lpegmatch(pattern,str,p) - if e then - p = e - return s - end - end - end - end -end - --- for s in utilities.parsers.iterator("a b c,b,c") do --- print(s) --- end - -local function initialize(t,name) - local source = t[name] - if source then - local result = { } - for k, v in next, t[name] do - result[k] = v - end - return result - else - return { } - end -end - -local function fetch(t,name) - return t[name] or { } -end - -function process(result,more) - for k, v in next, more do - result[k] = v - end - return result -end - -local name = C((1-S(", "))^1) -local parser = (Carg(1) * name / initialize) * (S(", ")^1 * (Carg(1) * name / fetch))^0 -local merge = Cf(parser,process) - -function utilities.parsers.mergehashes(hash,list) - return lpegmatch(merge,list,1,hash) -end - --- local t = { --- aa = { alpha = 1, beta = 2, gamma = 3, }, --- bb = { alpha = 4, beta = 5, delta = 6, }, --- cc = { epsilon = 3 }, --- } --- --- inspect(utilities.parsers.mergehashes(t,"aa, bb, cc")) +if not modules then modules = { } end modules ['util-prs'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local lpeg, table, string = lpeg, table, string +local P, R, V, S, C, Ct, Cs, Carg, Cc, Cg, Cf, Cp = lpeg.P, lpeg.R, lpeg.V, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Cp +local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns +local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find +local tostring, type, next, rawset = tostring, type, next, rawset + +utilities = utilities or {} +local parsers = utilities.parsers or { } +utilities.parsers = parsers +local patterns = parsers.patterns or { } +parsers.patterns = patterns + +local setmetatableindex = table.setmetatableindex +local sortedhash = table.sortedhash + +-- we share some patterns + +local digit = R("09") +local space = P(' ') +local equal = P("=") +local comma = P(",") +local lbrace = P("{") +local rbrace = P("}") +local lparent = P("(") +local rparent = P(")") +local period = S(".") +local punctuation = S(".,:;") +local spacer = lpegpatterns.spacer +local whitespace = lpegpatterns.whitespace +local newline = lpegpatterns.newline +local anything = lpegpatterns.anything +local endofstring = lpegpatterns.endofstring + +local nobrace = 1 - ( lbrace + rbrace ) +local noparent = 1 - ( lparent + rparent) + +-- we could use a Cf Cg construct + +local escape, left, right = P("\\"), P('{'), P('}') + +lpegpatterns.balanced = P { + [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0, + [2] = left * V(1) * right +} + +local nestedbraces = P { lbrace * (nobrace + V(1))^0 * rbrace } +local nestedparents = P { lparent * (noparent + V(1))^0 * rparent } +local spaces = space^0 +local argument = Cs((lbrace/"") * ((nobrace + nestedbraces)^0) * (rbrace/"")) +local content = (1-endofstring)^0 + +lpegpatterns.nestedbraces = nestedbraces -- no capture +lpegpatterns.nestedparents = nestedparents -- no capture +lpegpatterns.nested = nestedbraces -- no capture +lpegpatterns.argument = argument -- argument after e.g. = +lpegpatterns.content = content -- rest after e.g = + +local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace) + C((nestedbraces + (1-comma))^0) + +local key = C((1-equal-comma)^1) +local pattern_a = (space+comma)^0 * (key * equal * value + key * C("")) +local pattern_c = (space+comma)^0 * (key * equal * value) + +local key = C((1-space-equal-comma)^1) +local pattern_b = spaces * comma^0 * spaces * (key * ((spaces * equal * spaces * value) + C(""))) + +-- "a=1, b=2, c=3, d={a{b,c}d}, e=12345, f=xx{a{b,c}d}xx, g={}" : outer {} removes, leading spaces ignored + +-- todo: rewrite to fold etc +-- +-- parse = lpeg.Cf(lpeg.Carg(1) * lpeg.Cg(key * equal * value) * separator^0,rawset)^0 -- lpeg.match(parse,"...",1,hash) + +local hash = { } + +local function set(key,value) + hash[key] = value +end + +local pattern_a_s = (pattern_a/set)^1 +local pattern_b_s = (pattern_b/set)^1 +local pattern_c_s = (pattern_c/set)^1 + +patterns.settings_to_hash_a = pattern_a_s +patterns.settings_to_hash_b = pattern_b_s +patterns.settings_to_hash_c = pattern_c_s + +function parsers.make_settings_to_hash_pattern(set,how) + if type(str) == "table" then + return set + elseif how == "strict" then + return (pattern_c/set)^1 + elseif how == "tolerant" then + return (pattern_b/set)^1 + else + return (pattern_a/set)^1 + end +end + +function parsers.settings_to_hash(str,existing) + if type(str) == "table" then + if existing then + for k, v in next, str do + existing[k] = v + end + return exiting + else + return str + end + elseif str and str ~= "" then + hash = existing or { } + lpegmatch(pattern_a_s,str) + return hash + else + return { } + end +end + +function parsers.settings_to_hash_tolerant(str,existing) + if type(str) == "table" then + if existing then + for k, v in next, str do + existing[k] = v + end + return exiting + else + return str + end + elseif str and str ~= "" then + hash = existing or { } + lpegmatch(pattern_b_s,str) + return hash + else + return { } + end +end + +function parsers.settings_to_hash_strict(str,existing) + if type(str) == "table" then + if existing then + for k, v in next, str do + existing[k] = v + end + return exiting + else + return str + end + elseif str and str ~= "" then + hash = existing or { } + lpegmatch(pattern_c_s,str) + return next(hash) and hash + else + return nil + end +end + +local separator = comma * space^0 +local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace) + + C((nestedbraces + (1-comma))^0) +local pattern = spaces * Ct(value*(separator*value)^0) + +-- "aap, {noot}, mies" : outer {} removes, leading spaces ignored + +patterns.settings_to_array = pattern + +-- we could use a weak table as cache + +function parsers.settings_to_array(str,strict) + if type(str) == "table" then + return str + elseif not str or str == "" then + return { } + elseif strict then + if find(str,"{") then + return lpegmatch(pattern,str) + else + return { str } + end + else + return lpegmatch(pattern,str) + end +end + +local function set(t,v) + t[#t+1] = v +end + +local value = P(Carg(1)*value) / set +local pattern = value*(separator*value)^0 * Carg(1) + +function parsers.add_settings_to_array(t,str) + return lpegmatch(pattern,str,nil,t) +end + +function parsers.hash_to_string(h,separator,yes,no,strict,omit) + if h then + local t, tn, s = { }, 0, table.sortedkeys(h) + omit = omit and table.tohash(omit) + for i=1,#s do + local key = s[i] + if not omit or not omit[key] then + local value = h[key] + if type(value) == "boolean" then + if yes and no then + if value then + tn = tn + 1 + t[tn] = key .. '=' .. yes + elseif not strict then + tn = tn + 1 + t[tn] = key .. '=' .. no + end + elseif value or not strict then + tn = tn + 1 + t[tn] = key .. '=' .. tostring(value) + end + else + tn = tn + 1 + t[tn] = key .. '=' .. value + end + end + end + return concat(t,separator or ",") + else + return "" + end +end + +function parsers.array_to_string(a,separator) + if a then + return concat(a,separator or ",") + else + return "" + end +end + +function parsers.settings_to_set(str,t) -- tohash? -- todo: lpeg -- duplicate anyway + t = t or { } +-- for s in gmatch(str,"%s*([^, ]+)") do -- space added + for s in gmatch(str,"[^, ]+") do -- space added + t[s] = true + end + return t +end + +function parsers.simple_hash_to_string(h, separator) + local t, tn = { }, 0 + for k, v in sortedhash(h) do + if v then + tn = tn + 1 + t[tn] = k + end + end + return concat(t,separator or ",") +end + +-- for chem (currently one level) + +local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace) + + C(digit^1 * lparent * (noparent + nestedparents)^1 * rparent) + + C((nestedbraces + (1-comma))^1) +local pattern_a = spaces * Ct(value*(separator*value)^0) + +local function repeater(n,str) + if not n then + return str + else + local s = lpegmatch(pattern_a,str) + if n == 1 then + return unpack(s) + else + local t, tn = { }, 0 + for i=1,n do + for j=1,#s do + tn = tn + 1 + t[tn] = s[j] + end + end + return unpack(t) + end + end +end + +local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace) + + (C(digit^1)/tonumber * lparent * Cs((noparent + nestedparents)^1) * rparent) / repeater + + C((nestedbraces + (1-comma))^1) +local pattern_b = spaces * Ct(value*(separator*value)^0) + +function parsers.settings_to_array_with_repeat(str,expand) -- beware: "" => { } + if expand then + return lpegmatch(pattern_b,str) or { } + else + return lpegmatch(pattern_a,str) or { } + end +end + +-- + +local value = lbrace * C((nobrace + nestedbraces)^0) * rbrace +local pattern = Ct((space + value)^0) + +function parsers.arguments_to_table(str) + return lpegmatch(pattern,str) +end + +-- temporary here (unoptimized) + +function parsers.getparameters(self,class,parentclass,settings) + local sc = self[class] + if not sc then + sc = { } + self[class] = sc + if parentclass then + local sp = self[parentclass] + if not sp then + sp = { } + self[parentclass] = sp + end + setmetatableindex(sc,sp) + end + end + parsers.settings_to_hash(settings,sc) +end + +function parsers.listitem(str) + return gmatch(str,"[^, ]+") +end + +-- + +local pattern = Cs { "start", + start = V("one") + V("two") + V("three"), + rest = (Cc(",") * V("thousand"))^0 * (P(".") + endofstring) * anything^0, + thousand = digit * digit * digit, + one = digit * V("rest"), + two = digit * digit * V("rest"), + three = V("thousand") * V("rest"), +} + +lpegpatterns.splitthousands = pattern -- maybe better in the parsers namespace ? + +function parsers.splitthousands(str) + return lpegmatch(pattern,str) or str +end + +-- print(parsers.splitthousands("11111111111.11")) + +local optionalwhitespace = whitespace^0 + +lpegpatterns.words = Ct((Cs((1-punctuation-whitespace)^1) + anything)^1) +lpegpatterns.sentences = Ct((optionalwhitespace * Cs((1-period)^0 * period))^1) +lpegpatterns.paragraphs = Ct((optionalwhitespace * Cs((whitespace^1*endofstring/"" + 1 - (spacer^0*newline*newline))^1))^1) + +-- local str = " Word1 word2. \n Word3 word4. \n\n Word5 word6.\n " +-- inspect(lpegmatch(lpegpatterns.paragraphs,str)) +-- inspect(lpegmatch(lpegpatterns.sentences,str)) +-- inspect(lpegmatch(lpegpatterns.words,str)) + +-- handy for k="v" [, ] k="v" + +local dquote = P('"') +local equal = P('=') +local escape = P('\\') +local separator = S(' ,') + +local key = C((1-equal)^1) +local value = dquote * C((1-dquote-escape*dquote)^0) * dquote + +local pattern = Cf(Ct("") * Cg(key * equal * value) * separator^0,rawset)^0 * P(-1) + +patterns.keq_to_hash_c = pattern + +function parsers.keq_to_hash(str) + if str and str ~= "" then + return lpegmatch(pattern,str) + else + return { } + end +end + +-- inspect(lpeg.match(pattern,[[key="value"]])) + +local defaultspecification = { separator = ",", quote = '"' } + +-- this version accepts multiple separators and quotes as used in the +-- database module + +function parsers.csvsplitter(specification) + specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification + local separator = specification.separator + local quotechar = specification.quote + local separator = S(separator ~= "" and separator or ",") + local whatever = C((1 - separator - newline)^0) + if quotechar and quotechar ~= "" then + local quotedata = nil + for chr in gmatch(quotechar,".") do + local quotechar = P(chr) + local quoteword = quotechar * C((1 - quotechar)^0) * quotechar + if quotedata then + quotedata = quotedata + quoteword + else + quotedata = quoteword + end + end + whatever = quotedata + whatever + end + local parser = Ct((Ct(whatever * (separator * whatever)^0) * S("\n\r"))^0 ) + return function(data) + return lpegmatch(parser,data) + end +end + +-- and this is a slightly patched version of a version posted by Philipp Gesang + +-- local mycsvsplitter = utilities.parsers.rfc4180splitter() +-- +-- local crap = [[ +-- first,second,third,fourth +-- "1","2","3","4" +-- "a","b","c","d" +-- "foo","bar""baz","boogie","xyzzy" +-- ]] +-- +-- local list, names = mycsvsplitter(crap,true) inspect(list) inspect(names) +-- local list, names = mycsvsplitter(crap) inspect(list) inspect(names) + +function parsers.rfc4180splitter(specification) + specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification + local separator = specification.separator --> rfc: COMMA + local quotechar = P(specification.quote) --> DQUOTE + local dquotechar = quotechar * quotechar --> 2DQUOTE + / specification.quote + local separator = S(separator ~= "" and separator or ",") + local escaped = quotechar + * Cs((dquotechar + (1 - quotechar))^0) + * quotechar + local non_escaped = C((1 - quotechar - newline - separator)^1) + local field = escaped + non_escaped + local record = Ct((field * separator^-1)^1) + local headerline = record * Cp() + local wholeblob = Ct((newline^-1 * record)^0) + return function(data,getheader) + if getheader then + local header, position = lpegmatch(headerline,data) + local data = lpegmatch(wholeblob,data,position) + return data, header + else + return lpegmatch(wholeblob,data) + end + end +end + +-- utilities.parsers.stepper("1,7-",9,function(i) print(">>>",i) end) +-- utilities.parsers.stepper("1-3,7,8,9") +-- utilities.parsers.stepper("1-3,6,7",function(i) print(">>>",i) end) +-- utilities.parsers.stepper(" 1 : 3, ,7 ") +-- utilities.parsers.stepper("1:4,9:13,24:*",30) + +local function ranger(first,last,n,action) + if not first then + -- forget about it + elseif last == true then + for i=first,n or first do + action(i) + end + elseif last then + for i=first,last do + action(i) + end + else + action(first) + end +end + +local cardinal = lpegpatterns.cardinal / tonumber +local spacers = lpegpatterns.spacer^0 +local endofstring = lpegpatterns.endofstring + +local stepper = spacers * ( C(cardinal) * ( spacers * S(":-") * spacers * ( C(cardinal) + Cc(true) ) + Cc(false) ) + * Carg(1) * Carg(2) / ranger * S(", ")^0 )^1 + +local stepper = spacers * ( C(cardinal) * ( spacers * S(":-") * spacers * ( C(cardinal) + (P("*") + endofstring) * Cc(true) ) + Cc(false) ) + * Carg(1) * Carg(2) / ranger * S(", ")^0 )^1 * endofstring -- we're sort of strict (could do without endofstring) + +function parsers.stepper(str,n,action) + if type(n) == "function" then + lpegmatch(stepper,str,1,false,n or print) + else + lpegmatch(stepper,str,1,n,action or print) + end +end + +-- + +local pattern_math = Cs((P("%")/"\\percent " + P("^") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0) +local pattern_text = Cs((P("%")/"\\percent " + (P("^")/"\\high") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0) + +patterns.unittotex = pattern + +function parsers.unittotex(str,textmode) + return lpegmatch(textmode and pattern_text or pattern_math,str) +end + +local pattern = Cs((P("^") / "" * lpegpatterns.integer * Cc("") + P(1))^0) + +function parsers.unittoxml(str) + return lpegmatch(pattern,str) +end + +-- print(utilities.parsers.unittotex("10^-32 %"),utilities.parsers.unittoxml("10^32 %")) + +local cache = { } +local spaces = lpeg.patterns.space^0 +local dummy = function() end + +table.setmetatableindex(cache,function(t,k) + local separator = P(k) + local value = (1-separator)^0 + local pattern = spaces * C(value) * separator^0 * Cp() + t[k] = pattern + return pattern +end) + +local commalistiterator = cache[","] + +function utilities.parsers.iterator(str,separator) + local n = #str + if n == 0 then + return dummy + else + local pattern = separator and cache[separator] or commalistiterator + local p = 1 + return function() + if p <= n then + local s, e = lpegmatch(pattern,str,p) + if e then + p = e + return s + end + end + end + end +end + +-- for s in utilities.parsers.iterator("a b c,b,c") do +-- print(s) +-- end + +local function initialize(t,name) + local source = t[name] + if source then + local result = { } + for k, v in next, t[name] do + result[k] = v + end + return result + else + return { } + end +end + +local function fetch(t,name) + return t[name] or { } +end + +function process(result,more) + for k, v in next, more do + result[k] = v + end + return result +end + +local name = C((1-S(", "))^1) +local parser = (Carg(1) * name / initialize) * (S(", ")^1 * (Carg(1) * name / fetch))^0 +local merge = Cf(parser,process) + +function utilities.parsers.mergehashes(hash,list) + return lpegmatch(merge,list,1,hash) +end + +-- local t = { +-- aa = { alpha = 1, beta = 2, gamma = 3, }, +-- bb = { alpha = 4, beta = 5, delta = 6, }, +-- cc = { epsilon = 3 }, +-- } +-- +-- inspect(utilities.parsers.mergehashes(t,"aa, bb, cc")) diff --git a/tex/context/base/util-ran.lua b/tex/context/base/util-ran.lua index 50d0a7082..7e97be2e6 100644 --- a/tex/context/base/util-ran.lua +++ b/tex/context/base/util-ran.lua @@ -1,107 +1,107 @@ -if not modules then modules = { } end modules ['util-ran'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local random = math.random -local concat = table.concat -local sub, upper = string.sub, string.upper - -local randomizers = utilities.randomizers or { } -utilities.randomizers = randomizers - -local l_one = "bcdfghjklmnpqrstvwxz" -local l_two = "aeiouy" - -local u_one = upper(l_one) -local u_two = upper(l_two) - -local n_one = #l_one -local n_two = #l_two - -function randomizers.word(min,max,separator) - local t = { } - for i=1,random(min,max) do - if i % 2 == 0 then - local r = random(1,n_one) - t[i] = sub(l_one,r,r) - else - local r = random(1,n_two) - t[i] = sub(l_two,r,r) - end - end - return concat(t,separator) -end - -function randomizers.initials(min,max) - if not min then - if not max then - min, max = 1, 3 - else - min, max = 1, min - end - elseif not max then - max = min - end - local t = { } - local n = random(min or 1,max or 3) - local m = 0 - for i=1,n do - m = m + 1 - if i % 2 == 0 then - local r = random(1,n_one) - t[m] = sub(u_one,r,r) - else - local r = random(1,n_two) - t[m] = sub(u_two,r,r) - end - m = m + 1 - t[m] = "." - end - return concat(t) -end - -function randomizers.firstname(min,max) - if not min then - if not max then - min, max = 3, 10 - else - min, max = 1, min - end - elseif not max then - max = min - end - local t = { } - local n = random(min,max) - local b = true - if n % 2 == 0 then - local r = random(1,n_two) - t[1] = sub(u_two,r,r) - b = true - else - local r = random(1,n_one) - t[1] = sub(u_one,r,r) - b = false - end - for i=2,n do - if b then - local r = random(1,n_one) - t[i] = sub(l_one,r,r) - b = false - else - local r = random(1,n_two) - t[i] = sub(l_two,r,r) - b = true - end - end - return concat(t,separator) -end - -randomizers.surname = randomizers.firstname - --- for i=1,10 do --- print(randomizers.initials(1,3),randomizers.firstname(5,10),randomizers.surname(5,15)) --- end +if not modules then modules = { } end modules ['util-ran'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local random = math.random +local concat = table.concat +local sub, upper = string.sub, string.upper + +local randomizers = utilities.randomizers or { } +utilities.randomizers = randomizers + +local l_one = "bcdfghjklmnpqrstvwxz" +local l_two = "aeiouy" + +local u_one = upper(l_one) +local u_two = upper(l_two) + +local n_one = #l_one +local n_two = #l_two + +function randomizers.word(min,max,separator) + local t = { } + for i=1,random(min,max) do + if i % 2 == 0 then + local r = random(1,n_one) + t[i] = sub(l_one,r,r) + else + local r = random(1,n_two) + t[i] = sub(l_two,r,r) + end + end + return concat(t,separator) +end + +function randomizers.initials(min,max) + if not min then + if not max then + min, max = 1, 3 + else + min, max = 1, min + end + elseif not max then + max = min + end + local t = { } + local n = random(min or 1,max or 3) + local m = 0 + for i=1,n do + m = m + 1 + if i % 2 == 0 then + local r = random(1,n_one) + t[m] = sub(u_one,r,r) + else + local r = random(1,n_two) + t[m] = sub(u_two,r,r) + end + m = m + 1 + t[m] = "." + end + return concat(t) +end + +function randomizers.firstname(min,max) + if not min then + if not max then + min, max = 3, 10 + else + min, max = 1, min + end + elseif not max then + max = min + end + local t = { } + local n = random(min,max) + local b = true + if n % 2 == 0 then + local r = random(1,n_two) + t[1] = sub(u_two,r,r) + b = true + else + local r = random(1,n_one) + t[1] = sub(u_one,r,r) + b = false + end + for i=2,n do + if b then + local r = random(1,n_one) + t[i] = sub(l_one,r,r) + b = false + else + local r = random(1,n_two) + t[i] = sub(l_two,r,r) + b = true + end + end + return concat(t,separator) +end + +randomizers.surname = randomizers.firstname + +-- for i=1,10 do +-- print(randomizers.initials(1,3),randomizers.firstname(5,10),randomizers.surname(5,15)) +-- end diff --git a/tex/context/base/util-seq.lua b/tex/context/base/util-seq.lua index 27f95f0ee..1b56bbdba 100644 --- a/tex/context/base/util-seq.lua +++ b/tex/context/base/util-seq.lua @@ -1,330 +1,330 @@ -if not modules then modules = { } end modules ['util-seq'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

    Here we implement a mechanism for chaining the special functions -that we use in to deal with mode list processing. We -assume that namespaces for the functions are used, but for speed we -use locals to refer to them when compiling the chain.

    ---ldx]]-- - --- todo: delayed: i.e. we register them in the right order already but delay usage - --- todo: protect groups (as in tasks) - -local format, gsub, concat, gmatch = string.format, string.gsub, table.concat, string.gmatch -local type, load = type, load - -utilities = utilities or { } -local tables = utilities.tables -local allocate = utilities.storage.allocate - -local sequencers = { } -utilities.sequencers = sequencers - -local functions = allocate() -sequencers.functions = functions - -local removevalue = tables.removevalue -local insertaftervalue = tables.insertaftervalue -local insertbeforevalue = tables.insertbeforevalue - -local function validaction(action) - if type(action) == "string" then - local g = _G - for str in gmatch(action,"[^%.]+") do - g = g[str] - if not g then - return false - end - end - end - return true -end - -local compile - -local known = { } -- just a convenience, in case we want public access (only to a few methods) - -function sequencers.new(t) -- was reset - local s = { - list = { }, - order = { }, - kind = { }, - askip = { }, - gskip = { }, - dirty = true, - runner = nil, - } - if t then - s.arguments = t.arguments - s.returnvalues = t.returnvalues - s.results = t.results - local name = t.name - if name and name ~= "" then - s.name = name - known[name] = s - end - end - table.setmetatableindex(s,function(t,k) - -- this will automake a dirty runner - if k == "runner" then - local v = compile(t,t.compiler) - return v - end - end) - known[s] = s -- saves test for string later on - return s -end - -function sequencers.prependgroup(t,group,where) - t = known[t] - if t then - local order = t.order - removevalue(order,group) - insertbeforevalue(order,where,group) - t.list[group] = { } - t.dirty = true - t.runner = nil - end -end - -function sequencers.appendgroup(t,group,where) - t = known[t] - if t then - local order = t.order - removevalue(order,group) - insertaftervalue(order,where,group) - t.list[group] = { } - t.dirty = true - t.runner = nil - end -end - -function sequencers.prependaction(t,group,action,where,kind,force) - t = known[t] - if t then - local g = t.list[group] - if g and (force or validaction(action)) then - removevalue(g,action) - insertbeforevalue(g,where,action) - t.kind[action] = kind - t.dirty = true - t.runner = nil - end - end -end - -function sequencers.appendaction(t,group,action,where,kind,force) - t = known[t] - if t then - local g = t.list[group] - if g and (force or validaction(action)) then - removevalue(g,action) - insertaftervalue(g,where,action) - t.kind[action] = kind - t.dirty = true - t.runner = nil - end - end -end - -function sequencers.enableaction(t,action) - t = known[t] - if t then - t.askip[action] = false - t.dirty = true - t.runner = nil - end -end - -function sequencers.disableaction(t,action) - t = known[t] - if t then - t.askip[action] = true - t.dirty = true - t.runner = nil - end -end - -function sequencers.enablegroup(t,group) - t = known[t] - if t then - t.gskip[action] = false - t.dirty = true - t.runner = nil - end -end - -function sequencers.disablegroup(t,group) - t = known[t] - if t then - t.gskip[action] = true - t.dirty = true - t.runner = nil - end -end - -function sequencers.setkind(t,action,kind) - t = known[t] - if t then - t.kind[action] = kind - t.dirty = true - t.runner = nil - end -end - -function sequencers.removeaction(t,group,action,force) - t = known[t] - local g = t and t.list[group] - if g and (force or validaction(action)) then - removevalue(g,action) - t.dirty = true - t.runner = nil - end -end - -local function localize(str) - return (gsub(str,"[%.: ]+","_")) -end - -local function construct(t) - local list, order, kind, gskip, askip = t.list, t.order, t.kind, t.gskip, t.askip - local arguments, returnvalues, results = t.arguments or "...", t.returnvalues, t.results - local variables, calls, n = { }, { }, 0 - for i=1,#order do - local group = order[i] - if not gskip[group] then - local actions = list[group] - for i=1,#actions do - local action = actions[i] - if not askip[action] then - if type(action) == "function" then - local name = localize(tostring(action)) - functions[name] = action - action = format("utilities.sequencers.functions.%s",name) - end - local localized = localize(action) - n = n + 1 - variables[n] = format("local %s = %s",localized,action) - if not returnvalues then - calls[n] = format("%s(%s)",localized,arguments) - elseif n == 1 then - calls[n] = format("local %s = %s(%s)",returnvalues,localized,arguments) - else - calls[n] = format("%s = %s(%s)",returnvalues,localized,arguments) - end - end - end - end - end - t.dirty = false - if n == 0 then - t.compiled = "" - else - variables = concat(variables,"\n") - calls = concat(calls,"\n") - if results then - t.compiled = format("%s\nreturn function(%s)\n%s\nreturn %s\nend",variables,arguments,calls,results) - else - t.compiled = format("%s\nreturn function(%s)\n%s\nend",variables,arguments,calls) - end - end --- print(t.compiled) - return t.compiled -- also stored so that we can trace -end - -sequencers.tostring = construct -sequencers.localize = localize - -compile = function(t,compiler,n) -- already referred to in sequencers.new - local compiled - if not t or type(t) == "string" then - -- weird ... t.compiled = t .. so - return false - end - if compiler then - compiled = compiler(t,n) - t.compiled = compiled - else - compiled = construct(t,n) - end - local runner - if compiled == "" then - runner = false - else - runner = compiled and load(compiled)() -- we can use loadstripped here - end - t.runner = runner - return runner -end - -sequencers.compile = compile - --- we used to deal with tail as well but now that the lists are always --- double linked and the kernel function no longer expect tail as --- argument we stick to head and done (done can probably also go --- as luatex deals with return values efficiently now .. in the --- past there was some copying involved, but no longer) - --- todo: use sequencer (can have arguments and returnvalues etc now) - -local template_yes = [[ -%s -return function(head%s) - local ok, done = false, false -%s - return head, done -end]] - -local template_nop = [[ -return function() - return false, false -end]] - -function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug into tostring - local list, order, kind, gskip, askip = t.list, t.order, t.kind, t.gskip, t.askip - local vars, calls, args, n = { }, { }, nil, 0 - if nofarguments == 0 then - args = "" - elseif nofarguments == 1 then - args = ",one" - elseif nofarguments == 2 then - args = ",one,two" - elseif nofarguments == 3 then - args = ",one,two,three" - elseif nofarguments == 4 then - args = ",one,two,three,four" - elseif nofarguments == 5 then - args = ",one,two,three,four,five" - else - args = ",..." - end - for i=1,#order do - local group = order[i] - if not gskip[group] then - local actions = list[group] - for i=1,#actions do - local action = actions[i] - if not askip[action] then - local localized = localize(action) - n = n + 1 - vars[n] = format("local %s = %s",localized,action) - -- only difference with tostring is kind and rets (why no return) - if kind[action] == "nohead" then - calls[n] = format(" ok = %s(head%s) done = done or ok",localized,args) - else - calls[n] = format(" head, ok = %s(head%s) done = done or ok",localized,args) - end - end - end - end - end - local processor = #calls > 0 and format(template_yes,concat(vars,"\n"),args,concat(calls,"\n")) or template_nop - return processor -end +if not modules then modules = { } end modules ['util-seq'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

    Here we implement a mechanism for chaining the special functions +that we use in to deal with mode list processing. We +assume that namespaces for the functions are used, but for speed we +use locals to refer to them when compiling the chain.

    +--ldx]]-- + +-- todo: delayed: i.e. we register them in the right order already but delay usage + +-- todo: protect groups (as in tasks) + +local format, gsub, concat, gmatch = string.format, string.gsub, table.concat, string.gmatch +local type, load = type, load + +utilities = utilities or { } +local tables = utilities.tables +local allocate = utilities.storage.allocate + +local sequencers = { } +utilities.sequencers = sequencers + +local functions = allocate() +sequencers.functions = functions + +local removevalue = tables.removevalue +local insertaftervalue = tables.insertaftervalue +local insertbeforevalue = tables.insertbeforevalue + +local function validaction(action) + if type(action) == "string" then + local g = _G + for str in gmatch(action,"[^%.]+") do + g = g[str] + if not g then + return false + end + end + end + return true +end + +local compile + +local known = { } -- just a convenience, in case we want public access (only to a few methods) + +function sequencers.new(t) -- was reset + local s = { + list = { }, + order = { }, + kind = { }, + askip = { }, + gskip = { }, + dirty = true, + runner = nil, + } + if t then + s.arguments = t.arguments + s.returnvalues = t.returnvalues + s.results = t.results + local name = t.name + if name and name ~= "" then + s.name = name + known[name] = s + end + end + table.setmetatableindex(s,function(t,k) + -- this will automake a dirty runner + if k == "runner" then + local v = compile(t,t.compiler) + return v + end + end) + known[s] = s -- saves test for string later on + return s +end + +function sequencers.prependgroup(t,group,where) + t = known[t] + if t then + local order = t.order + removevalue(order,group) + insertbeforevalue(order,where,group) + t.list[group] = { } + t.dirty = true + t.runner = nil + end +end + +function sequencers.appendgroup(t,group,where) + t = known[t] + if t then + local order = t.order + removevalue(order,group) + insertaftervalue(order,where,group) + t.list[group] = { } + t.dirty = true + t.runner = nil + end +end + +function sequencers.prependaction(t,group,action,where,kind,force) + t = known[t] + if t then + local g = t.list[group] + if g and (force or validaction(action)) then + removevalue(g,action) + insertbeforevalue(g,where,action) + t.kind[action] = kind + t.dirty = true + t.runner = nil + end + end +end + +function sequencers.appendaction(t,group,action,where,kind,force) + t = known[t] + if t then + local g = t.list[group] + if g and (force or validaction(action)) then + removevalue(g,action) + insertaftervalue(g,where,action) + t.kind[action] = kind + t.dirty = true + t.runner = nil + end + end +end + +function sequencers.enableaction(t,action) + t = known[t] + if t then + t.askip[action] = false + t.dirty = true + t.runner = nil + end +end + +function sequencers.disableaction(t,action) + t = known[t] + if t then + t.askip[action] = true + t.dirty = true + t.runner = nil + end +end + +function sequencers.enablegroup(t,group) + t = known[t] + if t then + t.gskip[action] = false + t.dirty = true + t.runner = nil + end +end + +function sequencers.disablegroup(t,group) + t = known[t] + if t then + t.gskip[action] = true + t.dirty = true + t.runner = nil + end +end + +function sequencers.setkind(t,action,kind) + t = known[t] + if t then + t.kind[action] = kind + t.dirty = true + t.runner = nil + end +end + +function sequencers.removeaction(t,group,action,force) + t = known[t] + local g = t and t.list[group] + if g and (force or validaction(action)) then + removevalue(g,action) + t.dirty = true + t.runner = nil + end +end + +local function localize(str) + return (gsub(str,"[%.: ]+","_")) +end + +local function construct(t) + local list, order, kind, gskip, askip = t.list, t.order, t.kind, t.gskip, t.askip + local arguments, returnvalues, results = t.arguments or "...", t.returnvalues, t.results + local variables, calls, n = { }, { }, 0 + for i=1,#order do + local group = order[i] + if not gskip[group] then + local actions = list[group] + for i=1,#actions do + local action = actions[i] + if not askip[action] then + if type(action) == "function" then + local name = localize(tostring(action)) + functions[name] = action + action = format("utilities.sequencers.functions.%s",name) + end + local localized = localize(action) + n = n + 1 + variables[n] = format("local %s = %s",localized,action) + if not returnvalues then + calls[n] = format("%s(%s)",localized,arguments) + elseif n == 1 then + calls[n] = format("local %s = %s(%s)",returnvalues,localized,arguments) + else + calls[n] = format("%s = %s(%s)",returnvalues,localized,arguments) + end + end + end + end + end + t.dirty = false + if n == 0 then + t.compiled = "" + else + variables = concat(variables,"\n") + calls = concat(calls,"\n") + if results then + t.compiled = format("%s\nreturn function(%s)\n%s\nreturn %s\nend",variables,arguments,calls,results) + else + t.compiled = format("%s\nreturn function(%s)\n%s\nend",variables,arguments,calls) + end + end +-- print(t.compiled) + return t.compiled -- also stored so that we can trace +end + +sequencers.tostring = construct +sequencers.localize = localize + +compile = function(t,compiler,n) -- already referred to in sequencers.new + local compiled + if not t or type(t) == "string" then + -- weird ... t.compiled = t .. so + return false + end + if compiler then + compiled = compiler(t,n) + t.compiled = compiled + else + compiled = construct(t,n) + end + local runner + if compiled == "" then + runner = false + else + runner = compiled and load(compiled)() -- we can use loadstripped here + end + t.runner = runner + return runner +end + +sequencers.compile = compile + +-- we used to deal with tail as well but now that the lists are always +-- double linked and the kernel function no longer expect tail as +-- argument we stick to head and done (done can probably also go +-- as luatex deals with return values efficiently now .. in the +-- past there was some copying involved, but no longer) + +-- todo: use sequencer (can have arguments and returnvalues etc now) + +local template_yes = [[ +%s +return function(head%s) + local ok, done = false, false +%s + return head, done +end]] + +local template_nop = [[ +return function() + return false, false +end]] + +function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug into tostring + local list, order, kind, gskip, askip = t.list, t.order, t.kind, t.gskip, t.askip + local vars, calls, args, n = { }, { }, nil, 0 + if nofarguments == 0 then + args = "" + elseif nofarguments == 1 then + args = ",one" + elseif nofarguments == 2 then + args = ",one,two" + elseif nofarguments == 3 then + args = ",one,two,three" + elseif nofarguments == 4 then + args = ",one,two,three,four" + elseif nofarguments == 5 then + args = ",one,two,three,four,five" + else + args = ",..." + end + for i=1,#order do + local group = order[i] + if not gskip[group] then + local actions = list[group] + for i=1,#actions do + local action = actions[i] + if not askip[action] then + local localized = localize(action) + n = n + 1 + vars[n] = format("local %s = %s",localized,action) + -- only difference with tostring is kind and rets (why no return) + if kind[action] == "nohead" then + calls[n] = format(" ok = %s(head%s) done = done or ok",localized,args) + else + calls[n] = format(" head, ok = %s(head%s) done = done or ok",localized,args) + end + end + end + end + end + local processor = #calls > 0 and format(template_yes,concat(vars,"\n"),args,concat(calls,"\n")) or template_nop + return processor +end diff --git a/tex/context/base/util-soc.lua b/tex/context/base/util-soc.lua index 30301c510..ba2f7b507 100644 --- a/tex/context/base/util-soc.lua +++ b/tex/context/base/util-soc.lua @@ -1,93 +1,93 @@ -if not modules then modules = { } end modules ['util-soc'] = { - version = 1.001, - comment = "support for sockets / protocols", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format = string.format - -local smtp = require("socket.smtp") -local ltn12 = require("ltn12") -local mime = require("mime") - -local mail = utilities.mail or { } -utilities.mail = mail - -local report_mail = logs.reporter("mail") - -function mail.send(specification) - local presets = specification.presets - if presets then - table.setmetatableindex(specification,presets) - end - local server = specification.server or "" - if not server then - report_mail("no server specified") - return false - end - local to = specification.to or specification.recepient or "" - if to == "" then - report_mail("no recepient specified") - return false - end - local from = specification.from or specification.sender or "" - if from == "" then - report_mail("no sender specified") - return false - end - local message = { } - local body = specification.body - if body then - message[#message+1] = { - body = body - } - end - local files = specification.files - if files then - for i=1,#files do - local filename = files[i] - local handle = io.open(filename, "rb") - if handle then - report_mail("attaching file %a",filename) - message[#message+1] = { - headers = { - ["content-type"] = format('application/pdf; name="%s"',filename), - ["content-disposition"] = format('attachment; filename="%s"',filename), - ["content-description"] = format('file: %s',filename), - ["content-transfer-encoding"] = "BASE64" - }, - body = ltn12.source.chain( - ltn12.source.file(handle), - ltn12.filter.chain(mime.encode("base64"),mime.wrap()) - ) - } - else - report_mail("file %a not found",filename) - end - end - end - local result, detail = smtp.send { - server = specification.server, - port = specification.port, - user = specification.user, - password = specification.password, - from = from, - rcpt = to, - source = smtp.message { - headers = { - to = to, - from = from, - cc = specification.cc, - subject = specification.subject or "no subject", - }, - body = message - }, - } - if detail then - report_mail("error: %s",detail) - else - report_mail("message sent") - end -end +if not modules then modules = { } end modules ['util-soc'] = { + version = 1.001, + comment = "support for sockets / protocols", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format + +local smtp = require("socket.smtp") +local ltn12 = require("ltn12") +local mime = require("mime") + +local mail = utilities.mail or { } +utilities.mail = mail + +local report_mail = logs.reporter("mail") + +function mail.send(specification) + local presets = specification.presets + if presets then + table.setmetatableindex(specification,presets) + end + local server = specification.server or "" + if not server then + report_mail("no server specified") + return false + end + local to = specification.to or specification.recepient or "" + if to == "" then + report_mail("no recepient specified") + return false + end + local from = specification.from or specification.sender or "" + if from == "" then + report_mail("no sender specified") + return false + end + local message = { } + local body = specification.body + if body then + message[#message+1] = { + body = body + } + end + local files = specification.files + if files then + for i=1,#files do + local filename = files[i] + local handle = io.open(filename, "rb") + if handle then + report_mail("attaching file %a",filename) + message[#message+1] = { + headers = { + ["content-type"] = format('application/pdf; name="%s"',filename), + ["content-disposition"] = format('attachment; filename="%s"',filename), + ["content-description"] = format('file: %s',filename), + ["content-transfer-encoding"] = "BASE64" + }, + body = ltn12.source.chain( + ltn12.source.file(handle), + ltn12.filter.chain(mime.encode("base64"),mime.wrap()) + ) + } + else + report_mail("file %a not found",filename) + end + end + end + local result, detail = smtp.send { + server = specification.server, + port = specification.port, + user = specification.user, + password = specification.password, + from = from, + rcpt = to, + source = smtp.message { + headers = { + to = to, + from = from, + cc = specification.cc, + subject = specification.subject or "no subject", + }, + body = message + }, + } + if detail then + report_mail("error: %s",detail) + else + report_mail("message sent") + end +end diff --git a/tex/context/base/util-sql-imp-client.lua b/tex/context/base/util-sql-imp-client.lua index e09dfde94..7c713a899 100644 --- a/tex/context/base/util-sql-imp-client.lua +++ b/tex/context/base/util-sql-imp-client.lua @@ -1,256 +1,256 @@ -if not modules then modules = { } end modules ['util-sql-client'] = { - version = 1.001, - comment = "companion to util-sql.lua", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: make a converter - -local rawset, setmetatable = rawset, setmetatable -local P, S, V, C, Cs, Ct, Cc, Cg, Cf, patterns, lpegmatch = lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.patterns, lpeg.match - -local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end) -local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end) -local report_state = logs.reporter("sql","client") - -local sql = utilities.sql -local helpers = sql.helpers -local methods = sql.methods -local validspecification = helpers.validspecification -local preparetemplate = helpers.preparetemplate -local splitdata = helpers.splitdata -local replacetemplate = utilities.templates.replace -local serialize = sql.serialize -local deserialize = sql.deserialize - --- Experiments with an p/action demonstrated that there is not much gain. We could do a runtime --- capture but creating all the small tables is not faster and it doesn't work well anyway. - -local separator = P("\t") -local newline = patterns.newline -local empty = Cc("") - -local entry = C((1-separator-newline)^0) -- C 10% faster than Cs - -local unescaped = P("\\n") / "\n" - + P("\\t") / "\t" - + P("\\0") / "\000" - + P("\\\\") / "\\" - -local entry = Cs((unescaped + (1-separator-newline))^0) -- C 10% faster than Cs but Cs needed due to nesting - -local getfirst = Ct( entry * (separator * (entry+empty))^0) + newline -local skipfirst = (1-newline)^1 * newline -local getfirstline = C((1-newline)^0) - -local cache = { } - -local function splitdata(data) -- todo: hash on first line ... maybe move to client module - if data == "" then - if trace_sql then - report_state("no data") - end - return { }, { } - end - local first = lpegmatch(getfirstline,data) - if not first then - if trace_sql then - report_state("no data") - end - return { }, { } - end - local p = cache[first] - if p then - -- report_state("reusing: %s",first) - local entries = lpegmatch(p.parser,data) - return entries or { }, p.keys - elseif p == false then - return { }, { } - elseif p == nil then - local keys = lpegmatch(getfirst,first) or { } - if #keys == 0 then - if trace_sql then - report_state("no banner") - end - cache[first] = false - return { }, { } - end - -- quite generic, could be a helper - local n = #keys - if n == 0 then - report_state("no fields") - cache[first] = false - return { }, { } - end - if n == 1 then - local key = keys[1] - if trace_sql then - report_state("one field with name %a",key) - end - p = Cg(Cc(key) * entry) - else - for i=1,n do - local key = keys[i] - if trace_sql then - report_state("field %s has name %a",i,key) - end - local s = Cg(Cc(key) * entry) - if p then - p = p * separator * s - else - p = s - end - end - end - p = Cf(Ct("") * p,rawset) * newline^1 - p = skipfirst * Ct(p^0) - cache[first] = { parser = p, keys = keys } - local entries = lpegmatch(p,data) - return entries or { }, keys - end -end - -local splitter = skipfirst * Ct((Ct(entry * (separator * entry)^0) * newline^1)^0) - -local function getdata(data) - return lpegmatch(splitter,data) -end - -helpers.splitdata = splitdata -helpers.getdata = getdata - -local function dataprepared(specification) - local query = preparetemplate(specification) - if query then - io.savedata(specification.queryfile,query) - os.remove(specification.resultfile) - if trace_queries then - report_state("query: %s",query) - end - return true - else - -- maybe push an error - os.remove(specification.queryfile) - os.remove(specification.resultfile) - end -end - -local function datafetched(specification,runner) - local command = replacetemplate(runner,specification) - if trace_sql then - local t = osclock() - report_state("command: %s",command) - local okay = os.execute(command) - report_state("fetchtime: %.3f sec",osclock()-t) -- not okay under linux - return okay == 0 - else - return os.execute(command) == 0 - end -end - -local function dataloaded(specification) - if trace_sql then - local t = osclock() - local data = io.loaddata(specification.resultfile) or "" - report_state("datasize: %.3f MB",#data/1024/1024) - report_state("loadtime: %.3f sec",osclock()-t) - return data - else - return io.loaddata(specification.resultfile) or "" - end -end - -local function dataconverted(data,converter) - if converter then - local data = getdata(data) - if data then - data = converter.client(data) - end - return data - elseif trace_sql then - local t = osclock() - local data, keys = splitdata(data,target) - report_state("converttime: %.3f",osclock()-t) - report_state("keys: %s ",#keys) - report_state("entries: %s ",#data) - return data, keys - else - return splitdata(data) - end -end - --- todo: new, etc - -local function execute(specification) - if trace_sql then - report_state("executing client") - end - if not validspecification(specification) then - report_state("error in specification") - return - end - if not dataprepared(specification) then - report_state("error in preparation") - return - end - if not datafetched(specification,methods.client.runner) then - report_state("error in fetching, query: %s",string.collapsespaces(io.loaddata(specification.queryfile))) - return - end - local data = dataloaded(specification) - if not data then - report_state("error in loading") - return - end - local data, keys = dataconverted(data,specification.converter) - if not data then - report_state("error in converting or no data") - return - end - local one = data[1] - if one then - setmetatable(data,{ __index = one } ) - end - return data, keys -end - --- The following is not that (memory) efficient but normally we will use --- the lib anyway. Of course we could make a dedicated converter and/or --- hook into the splitter code but ... it makes not much sense because then --- we can as well move the builder to the library modules. --- --- Here we reuse data as the indexes are the same, unless we hash. - -local wraptemplate = [[ -local converters = utilities.sql.converters -local deserialize = utilities.sql.deserialize - -local tostring = tostring -local tonumber = tonumber -local booleanstring = string.booleanstring - -%s - -return function(data) - local target = %s -- data or { } - for i=1,#data do - local cells = data[i] - target[%s] = { - %s - } - end - return target -end -]] - -local celltemplate = "cells[%s]" - -methods.client = { - runner = [[mysql --batch --user="%username%" --password="%password%" --host="%host%" --port=%port% --database="%database%" --default-character-set=utf8 < "%queryfile%" > "%resultfile%"]], - execute = execute, - usesfiles = true, - wraptemplate = wraptemplate, - celltemplate = celltemplate, -} +if not modules then modules = { } end modules ['util-sql-client'] = { + version = 1.001, + comment = "companion to util-sql.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: make a converter + +local rawset, setmetatable = rawset, setmetatable +local P, S, V, C, Cs, Ct, Cc, Cg, Cf, patterns, lpegmatch = lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.patterns, lpeg.match + +local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end) +local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end) +local report_state = logs.reporter("sql","client") + +local sql = utilities.sql +local helpers = sql.helpers +local methods = sql.methods +local validspecification = helpers.validspecification +local preparetemplate = helpers.preparetemplate +local splitdata = helpers.splitdata +local replacetemplate = utilities.templates.replace +local serialize = sql.serialize +local deserialize = sql.deserialize + +-- Experiments with an p/action demonstrated that there is not much gain. We could do a runtime +-- capture but creating all the small tables is not faster and it doesn't work well anyway. + +local separator = P("\t") +local newline = patterns.newline +local empty = Cc("") + +local entry = C((1-separator-newline)^0) -- C 10% faster than Cs + +local unescaped = P("\\n") / "\n" + + P("\\t") / "\t" + + P("\\0") / "\000" + + P("\\\\") / "\\" + +local entry = Cs((unescaped + (1-separator-newline))^0) -- C 10% faster than Cs but Cs needed due to nesting + +local getfirst = Ct( entry * (separator * (entry+empty))^0) + newline +local skipfirst = (1-newline)^1 * newline +local getfirstline = C((1-newline)^0) + +local cache = { } + +local function splitdata(data) -- todo: hash on first line ... maybe move to client module + if data == "" then + if trace_sql then + report_state("no data") + end + return { }, { } + end + local first = lpegmatch(getfirstline,data) + if not first then + if trace_sql then + report_state("no data") + end + return { }, { } + end + local p = cache[first] + if p then + -- report_state("reusing: %s",first) + local entries = lpegmatch(p.parser,data) + return entries or { }, p.keys + elseif p == false then + return { }, { } + elseif p == nil then + local keys = lpegmatch(getfirst,first) or { } + if #keys == 0 then + if trace_sql then + report_state("no banner") + end + cache[first] = false + return { }, { } + end + -- quite generic, could be a helper + local n = #keys + if n == 0 then + report_state("no fields") + cache[first] = false + return { }, { } + end + if n == 1 then + local key = keys[1] + if trace_sql then + report_state("one field with name %a",key) + end + p = Cg(Cc(key) * entry) + else + for i=1,n do + local key = keys[i] + if trace_sql then + report_state("field %s has name %a",i,key) + end + local s = Cg(Cc(key) * entry) + if p then + p = p * separator * s + else + p = s + end + end + end + p = Cf(Ct("") * p,rawset) * newline^1 + p = skipfirst * Ct(p^0) + cache[first] = { parser = p, keys = keys } + local entries = lpegmatch(p,data) + return entries or { }, keys + end +end + +local splitter = skipfirst * Ct((Ct(entry * (separator * entry)^0) * newline^1)^0) + +local function getdata(data) + return lpegmatch(splitter,data) +end + +helpers.splitdata = splitdata +helpers.getdata = getdata + +local function dataprepared(specification) + local query = preparetemplate(specification) + if query then + io.savedata(specification.queryfile,query) + os.remove(specification.resultfile) + if trace_queries then + report_state("query: %s",query) + end + return true + else + -- maybe push an error + os.remove(specification.queryfile) + os.remove(specification.resultfile) + end +end + +local function datafetched(specification,runner) + local command = replacetemplate(runner,specification) + if trace_sql then + local t = osclock() + report_state("command: %s",command) + local okay = os.execute(command) + report_state("fetchtime: %.3f sec",osclock()-t) -- not okay under linux + return okay == 0 + else + return os.execute(command) == 0 + end +end + +local function dataloaded(specification) + if trace_sql then + local t = osclock() + local data = io.loaddata(specification.resultfile) or "" + report_state("datasize: %.3f MB",#data/1024/1024) + report_state("loadtime: %.3f sec",osclock()-t) + return data + else + return io.loaddata(specification.resultfile) or "" + end +end + +local function dataconverted(data,converter) + if converter then + local data = getdata(data) + if data then + data = converter.client(data) + end + return data + elseif trace_sql then + local t = osclock() + local data, keys = splitdata(data,target) + report_state("converttime: %.3f",osclock()-t) + report_state("keys: %s ",#keys) + report_state("entries: %s ",#data) + return data, keys + else + return splitdata(data) + end +end + +-- todo: new, etc + +local function execute(specification) + if trace_sql then + report_state("executing client") + end + if not validspecification(specification) then + report_state("error in specification") + return + end + if not dataprepared(specification) then + report_state("error in preparation") + return + end + if not datafetched(specification,methods.client.runner) then + report_state("error in fetching, query: %s",string.collapsespaces(io.loaddata(specification.queryfile))) + return + end + local data = dataloaded(specification) + if not data then + report_state("error in loading") + return + end + local data, keys = dataconverted(data,specification.converter) + if not data then + report_state("error in converting or no data") + return + end + local one = data[1] + if one then + setmetatable(data,{ __index = one } ) + end + return data, keys +end + +-- The following is not that (memory) efficient but normally we will use +-- the lib anyway. Of course we could make a dedicated converter and/or +-- hook into the splitter code but ... it makes not much sense because then +-- we can as well move the builder to the library modules. +-- +-- Here we reuse data as the indexes are the same, unless we hash. + +local wraptemplate = [[ +local converters = utilities.sql.converters +local deserialize = utilities.sql.deserialize + +local tostring = tostring +local tonumber = tonumber +local booleanstring = string.booleanstring + +%s + +return function(data) + local target = %s -- data or { } + for i=1,#data do + local cells = data[i] + target[%s] = { + %s + } + end + return target +end +]] + +local celltemplate = "cells[%s]" + +methods.client = { + runner = [[mysql --batch --user="%username%" --password="%password%" --host="%host%" --port=%port% --database="%database%" --default-character-set=utf8 < "%queryfile%" > "%resultfile%"]], + execute = execute, + usesfiles = true, + wraptemplate = wraptemplate, + celltemplate = celltemplate, +} diff --git a/tex/context/base/util-sql-imp-library.lua b/tex/context/base/util-sql-imp-library.lua index 15754e26a..8a83b06d2 100644 --- a/tex/context/base/util-sql-imp-library.lua +++ b/tex/context/base/util-sql-imp-library.lua @@ -1,289 +1,289 @@ -if not modules then modules = { } end modules ['util-sql-library'] = { - version = 1.001, - comment = "companion to util-sql.lua", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- local function pcall(f,...) return true, f(...) end - --- For some reason the sql lib partially fails in luatex when creating hashed row. So far --- we couldn't figure it out (some issue with adapting the table that is passes as first --- argument in the fetch routine. Apart from this it looks like the mysql binding has some --- efficiency issues (like creating a keys and types table for each row) but that could be --- optimized. Anyhow, fecthing results can be done as follows: - --- local function collect_1(r) --- local t = { } --- for i=1,r:numrows() do --- t[#t+1] = r:fetch({},"a") --- end --- return t --- end --- --- local function collect_2(r) --- local keys = r:getcolnames() --- local n = #keys --- local t = { } --- for i=1,r:numrows() do --- local v = { r:fetch() } --- local r = { } --- for i=1,n do --- r[keys[i]] = v[i] --- end --- t[#t+1] = r --- end --- return t --- end --- --- local function collect_3(r) --- local keys = r:getcolnames() --- local n = #keys --- local t = { } --- for i=1,r:numrows() do --- local v = r:fetch({},"n") --- local r = { } --- for i=1,n do --- r[keys[i]] = v[i] --- end --- t[#t+1] = r --- end --- return t --- end --- --- On a large table with some 8 columns (mixed text and numbers) we get the following --- timings (the 'a' alternative is already using the more efficient variant in the --- binding). --- --- collect_1 : 1.31 --- collect_2 : 1.39 --- collect_3 : 1.75 --- --- Some, as a workaround for this 'bug' the second alternative can be used. - -local format = string.format -local lpegmatch = lpeg.match -local setmetatable, type = setmetatable, type - -local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end) -local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end) -local report_state = logs.reporter("sql","library") - -local sql = utilities.sql -local mysql = require("luasql.mysql") -local cache = { } -local helpers = sql.helpers -local methods = sql.methods -local validspecification = helpers.validspecification -local querysplitter = helpers.querysplitter -local dataprepared = helpers.preparetemplate -local serialize = sql.serialize -local deserialize = sql.deserialize -local formatters = string.formatters - -local initialize = mysql.mysql - -local function connect(session,specification) - return session:connect( - specification.database or "", - specification.username or "", - specification.password or "", - specification.host or "", - specification.port - ) -end - -local function fetched(specification,query,converter) - if not query or query == "" then - report_state("no valid query") - return false - end - local id = specification.id - local session, connection - if id then - local c = cache[id] - if c then - session = c.session - connection = c.connection - end - if not connection then - session = initialize() - if not session then - return formatters["no session for %a"](id) - end - connection = connect(session,specification) - if not connection then - return formatters["no connection for %a"](id) - end - cache[id] = { session = session, connection = connection } - end - else - session = initialize() - if not session then - return "no session" - end - connection = connect(session,specification) - if not connection then - return "no connection" - end - end - if not connection then - report_state("error in connection: %s@%s to %s:%s", - specification.database or "no database", - specification.username or "no username", - specification.host or "no host", - specification.port or "no port" - ) - return "no connection" - end - query = lpegmatch(querysplitter,query) - local result, okay - for i=1,#query do - local q = query[i] - local r, m = connection:execute(q) - if m then - report_state("error in query to host %a: %s",specification.host,string.collapsespaces(q)) - if m then - report_state("message: %s",m) - end - end - local t = type(r) - if t == "userdata" then - result = r - okay = true - elseif t == "number" then - okay = true - end - end - if not okay then -- can go - if session then - session:close() - end - if connection then - connection:close() - end - if id then - cache[id] = nil - end - return "execution error" - end - local data, keys - if result then - if converter then - data = converter.library(result) - else - keys = result:getcolnames() - if keys then - data = { } - local n = result:numrows() or 0 - if n > 0 then - local k = #keys - for i=1,n do - local v = { result:fetch() } - local d = { } - for i=1,k do - d[keys[i]] = v[i] - end - data[#data+1] = d - end - end - end - end - result:close() - end - if not id then - if connection then - connection:close() - end - if session then - session:close() - end - end - return false, data, keys -end - -local function datafetched(specification,query,converter) - local callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter) - if not callokay then - report_state("call error, retrying") - callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter) - elseif connectionerror then - report_state("error: %s, retrying",connectionerror) - callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter) - end - if not callokay then - report_state("persistent call error") - elseif connectionerror then - report_state("persistent error: %s",connectionerror) - end - return data or { }, keys or { } -end - -local function execute(specification) - if trace_sql then - report_state("executing library") - end - if not validspecification(specification) then - report_state("error in specification") - return - end - local query = dataprepared(specification) - if not query then - report_state("error in preparation") - return - end - local data, keys = datafetched(specification,query,specification.converter) - if not data then - report_state("error in fetching") - return - end - local one = data[1] - if one then - setmetatable(data,{ __index = one } ) - end - return data, keys -end - --- Here we build the dataset stepwise so we don't use the data hack that --- is used in the client variant. - -local wraptemplate = [[ -local converters = utilities.sql.converters -local deserialize = utilities.sql.deserialize - -local tostring = tostring -local tonumber = tonumber -local booleanstring = string.booleanstring - -%s - -return function(result) - if not result then - return { } - end - local nofrows = result:numrows() or 0 - if nofrows == 0 then - return { } - end - local target = { } -- no %s needed here - for i=1,nofrows do - local cells = { result:fetch() } - target[%s] = { - %s - } - end - return target -end -]] - -local celltemplate = "cells[%s]" - -methods.library = { - runner = function() end, -- never called - execute = execute, - initialize = initialize, -- returns session - usesfiles = false, - wraptemplate = wraptemplate, - celltemplate = celltemplate, -} +if not modules then modules = { } end modules ['util-sql-library'] = { + version = 1.001, + comment = "companion to util-sql.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- local function pcall(f,...) return true, f(...) end + +-- For some reason the sql lib partially fails in luatex when creating hashed row. So far +-- we couldn't figure it out (some issue with adapting the table that is passes as first +-- argument in the fetch routine. Apart from this it looks like the mysql binding has some +-- efficiency issues (like creating a keys and types table for each row) but that could be +-- optimized. Anyhow, fecthing results can be done as follows: + +-- local function collect_1(r) +-- local t = { } +-- for i=1,r:numrows() do +-- t[#t+1] = r:fetch({},"a") +-- end +-- return t +-- end +-- +-- local function collect_2(r) +-- local keys = r:getcolnames() +-- local n = #keys +-- local t = { } +-- for i=1,r:numrows() do +-- local v = { r:fetch() } +-- local r = { } +-- for i=1,n do +-- r[keys[i]] = v[i] +-- end +-- t[#t+1] = r +-- end +-- return t +-- end +-- +-- local function collect_3(r) +-- local keys = r:getcolnames() +-- local n = #keys +-- local t = { } +-- for i=1,r:numrows() do +-- local v = r:fetch({},"n") +-- local r = { } +-- for i=1,n do +-- r[keys[i]] = v[i] +-- end +-- t[#t+1] = r +-- end +-- return t +-- end +-- +-- On a large table with some 8 columns (mixed text and numbers) we get the following +-- timings (the 'a' alternative is already using the more efficient variant in the +-- binding). +-- +-- collect_1 : 1.31 +-- collect_2 : 1.39 +-- collect_3 : 1.75 +-- +-- Some, as a workaround for this 'bug' the second alternative can be used. + +local format = string.format +local lpegmatch = lpeg.match +local setmetatable, type = setmetatable, type + +local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end) +local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end) +local report_state = logs.reporter("sql","library") + +local sql = utilities.sql +local mysql = require("luasql.mysql") +local cache = { } +local helpers = sql.helpers +local methods = sql.methods +local validspecification = helpers.validspecification +local querysplitter = helpers.querysplitter +local dataprepared = helpers.preparetemplate +local serialize = sql.serialize +local deserialize = sql.deserialize +local formatters = string.formatters + +local initialize = mysql.mysql + +local function connect(session,specification) + return session:connect( + specification.database or "", + specification.username or "", + specification.password or "", + specification.host or "", + specification.port + ) +end + +local function fetched(specification,query,converter) + if not query or query == "" then + report_state("no valid query") + return false + end + local id = specification.id + local session, connection + if id then + local c = cache[id] + if c then + session = c.session + connection = c.connection + end + if not connection then + session = initialize() + if not session then + return formatters["no session for %a"](id) + end + connection = connect(session,specification) + if not connection then + return formatters["no connection for %a"](id) + end + cache[id] = { session = session, connection = connection } + end + else + session = initialize() + if not session then + return "no session" + end + connection = connect(session,specification) + if not connection then + return "no connection" + end + end + if not connection then + report_state("error in connection: %s@%s to %s:%s", + specification.database or "no database", + specification.username or "no username", + specification.host or "no host", + specification.port or "no port" + ) + return "no connection" + end + query = lpegmatch(querysplitter,query) + local result, okay + for i=1,#query do + local q = query[i] + local r, m = connection:execute(q) + if m then + report_state("error in query to host %a: %s",specification.host,string.collapsespaces(q)) + if m then + report_state("message: %s",m) + end + end + local t = type(r) + if t == "userdata" then + result = r + okay = true + elseif t == "number" then + okay = true + end + end + if not okay then -- can go + if session then + session:close() + end + if connection then + connection:close() + end + if id then + cache[id] = nil + end + return "execution error" + end + local data, keys + if result then + if converter then + data = converter.library(result) + else + keys = result:getcolnames() + if keys then + data = { } + local n = result:numrows() or 0 + if n > 0 then + local k = #keys + for i=1,n do + local v = { result:fetch() } + local d = { } + for i=1,k do + d[keys[i]] = v[i] + end + data[#data+1] = d + end + end + end + end + result:close() + end + if not id then + if connection then + connection:close() + end + if session then + session:close() + end + end + return false, data, keys +end + +local function datafetched(specification,query,converter) + local callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter) + if not callokay then + report_state("call error, retrying") + callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter) + elseif connectionerror then + report_state("error: %s, retrying",connectionerror) + callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter) + end + if not callokay then + report_state("persistent call error") + elseif connectionerror then + report_state("persistent error: %s",connectionerror) + end + return data or { }, keys or { } +end + +local function execute(specification) + if trace_sql then + report_state("executing library") + end + if not validspecification(specification) then + report_state("error in specification") + return + end + local query = dataprepared(specification) + if not query then + report_state("error in preparation") + return + end + local data, keys = datafetched(specification,query,specification.converter) + if not data then + report_state("error in fetching") + return + end + local one = data[1] + if one then + setmetatable(data,{ __index = one } ) + end + return data, keys +end + +-- Here we build the dataset stepwise so we don't use the data hack that +-- is used in the client variant. + +local wraptemplate = [[ +local converters = utilities.sql.converters +local deserialize = utilities.sql.deserialize + +local tostring = tostring +local tonumber = tonumber +local booleanstring = string.booleanstring + +%s + +return function(result) + if not result then + return { } + end + local nofrows = result:numrows() or 0 + if nofrows == 0 then + return { } + end + local target = { } -- no %s needed here + for i=1,nofrows do + local cells = { result:fetch() } + target[%s] = { + %s + } + end + return target +end +]] + +local celltemplate = "cells[%s]" + +methods.library = { + runner = function() end, -- never called + execute = execute, + initialize = initialize, -- returns session + usesfiles = false, + wraptemplate = wraptemplate, + celltemplate = celltemplate, +} diff --git a/tex/context/base/util-sql-imp-swiglib.lua b/tex/context/base/util-sql-imp-swiglib.lua index 719620a6f..f456c9ccb 100644 --- a/tex/context/base/util-sql-imp-swiglib.lua +++ b/tex/context/base/util-sql-imp-swiglib.lua @@ -1,505 +1,505 @@ -if not modules then modules = { } end modules ['util-sql-swiglib'] = { - version = 1.001, - comment = "companion to util-sql.lua", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- As the regular library is flawed (i.e. there are crashes in the table --- construction code) and also not that efficient, Luigi Scarso looked into --- a swig binding. This is a bit more low level approach but as we stay --- closer to the original library it's also less dependant. - -local concat = table.concat -local format = string.format -local lpegmatch = lpeg.match -local setmetatable, type = setmetatable, type -local sleep = os.sleep - -local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end) -local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end) -local report_state = logs.reporter("sql","swiglib") - -local sql = utilities.sql -local mysql = require("swiglib.mysql.core") -- "5.6" - --- inspect(table.sortedkeys(mysql)) - -local nofretries = 5 -local retrydelay = 1 - -local cache = { } -local helpers = sql.helpers -local methods = sql.methods -local validspecification = helpers.validspecification -local querysplitter = helpers.querysplitter -local dataprepared = helpers.preparetemplate -local serialize = sql.serialize -local deserialize = sql.deserialize - -local mysql_initialize = mysql.mysql_init - -local mysql_open_connection = mysql.mysql_real_connect -local mysql_execute_query = mysql.mysql_real_query -local mysql_close_connection = mysql.mysql_close - -local mysql_field_seek = mysql.mysql_field_seek -local mysql_num_fields = mysql.mysql_num_fields -local mysql_fetch_field = mysql.mysql_fetch_field -local mysql_num_rows = mysql.mysql_num_rows -local mysql_fetch_row = mysql.mysql_fetch_row -local mysql_fetch_lengths = mysql.mysql_fetch_lengths -local mysql_init = mysql.mysql_init -local mysql_store_result = mysql.mysql_store_result -local mysql_free_result = mysql.mysql_free_result -local mysql_use_result = mysql.mysql_use_result - -local mysql_error_message = mysql.mysql_error -local mysql_options_argument = mysql.mysql_options_argument - -local instance = mysql.MYSQL() - -local mysql_constant_false = false -local mysql_constant_true = true - --- if mysql_options_argument then --- --- mysql_constant_false = mysql_options_argument(false) -- 0 "\0" --- mysql_constant_true = mysql_options_argument(true) -- 1 "\1" --- --- -- print(swig_type(mysql_constant_false)) --- -- print(swig_type(mysql_constant_true)) --- --- mysql.mysql_options(instance,mysql.MYSQL_OPT_RECONNECT,mysql_constant_true); --- --- else --- --- print("") --- print("incomplete swiglib.mysql interface") --- print("") --- --- end - -local typemap = mysql.MYSQL_TYPE_VAR_STRING and { - [mysql.MYSQL_TYPE_VAR_STRING ] = "string", - [mysql.MYSQL_TYPE_STRING ] = "string", - [mysql.MYSQL_TYPE_DECIMAL ] = "number", - [mysql.MYSQL_TYPE_SHORT ] = "number", - [mysql.MYSQL_TYPE_LONG ] = "number", - [mysql.MYSQL_TYPE_FLOAT ] = "number", - [mysql.MYSQL_TYPE_DOUBLE ] = "number", - [mysql.MYSQL_TYPE_LONGLONG ] = "number", - [mysql.MYSQL_TYPE_INT24 ] = "number", - [mysql.MYSQL_TYPE_YEAR ] = "number", - [mysql.MYSQL_TYPE_TINY ] = "number", - [mysql.MYSQL_TYPE_TINY_BLOB ] = "binary", - [mysql.MYSQL_TYPE_MEDIUM_BLOB] = "binary", - [mysql.MYSQL_TYPE_LONG_BLOB ] = "binary", - [mysql.MYSQL_TYPE_BLOB ] = "binary", - [mysql.MYSQL_TYPE_DATE ] = "date", - [mysql.MYSQL_TYPE_NEWDATE ] = "date", - [mysql.MYSQL_TYPE_DATETIME ] = "datetime", - [mysql.MYSQL_TYPE_TIME ] = "time", - [mysql.MYSQL_TYPE_TIMESTAMP ] = "time", - [mysql.MYSQL_TYPE_ENUM ] = "set", - [mysql.MYSQL_TYPE_SET ] = "set", - [mysql.MYSQL_TYPE_NULL ] = "null", -} - --- real_escape_string - -local function finish(t) - local r = t._result_ - if r then - mysql_free_result(r) - end -end - --- will become metatable magic - --- local function analyze(result) --- mysql_field_seek(result,0) --- local nofrows = mysql_num_rows(result) or 0 --- local noffields = mysql_num_fields(result) --- local names = { } --- local types = { } --- for i=1,noffields do --- local field = mysql_fetch_field(result) --- names[i] = field.name --- types[i] = field.type --- end --- return names, types, noffields, nofrows --- end - -local function getcolnames(t) - return t.names -end - -local function getcoltypes(t) - return t.types -end - -local function numrows(t) - return t.nofrows -end - --- swig_type - --- local ulongArray_getitem = mysql.ulongArray_getitem --- local util_getbytearray = mysql.util_getbytearray - --- local function list(t) --- local result = t._result_ --- local row = mysql_fetch_row(result) --- local len = mysql_fetch_lengths(result) --- local result = { } --- for i=1,t.noffields do --- local r = i - 1 -- zero offset --- result[i] = util_getbytearray(row,r,ulongArray_getitem(len,r)) --- end --- return result --- end - --- local function hash(t) --- local list = util_mysql_fetch_fields_from_current_row(t._result_) --- local result = t._result_ --- local fields = t.names --- local row = mysql_fetch_row(result) --- local len = mysql_fetch_lengths(result) --- local result = { } --- for i=1,t.noffields do --- local r = i - 1 -- zero offset --- result[fields[i]] = util_getbytearray(row,r,ulongArray_getitem(len,r)) --- end --- return result --- end - -local util_mysql_fetch_fields_from_current_row = mysql.util_mysql_fetch_fields_from_current_row -local util_mysql_fetch_all_rows = mysql.util_mysql_fetch_all_rows - -local function list(t) - return util_mysql_fetch_fields_from_current_row(t._result_) -end - -local function hash(t) - local list = util_mysql_fetch_fields_from_current_row(t._result_) - local fields = t.names - local data = { } - for i=1,t.noffields do - data[fields[i]] = list[i] - end - return data -end - -local function wholelist(t) - return util_mysql_fetch_all_rows(t._result_) -end - -local mt = { __index = { - -- regular - finish = finish, - list = list, - hash = hash, - wholelist = wholelist, - -- compatibility - numrows = numrows, - getcolnames = getcolnames, - getcoltypes = getcoltypes, - -- fallback - _result_ = nil, - names = { }, - types = { }, - noffields = 0, - nofrows = 0, - } -} - -local nt = setmetatable({},mt) - --- session - -local function close(t) - mysql_close_connection(t._connection_) -end - -local function execute(t,query) - if query and query ~= "" then - local connection = t._connection_ - local result = mysql_execute_query(connection,query,#query) - if result == 0 then - local result = mysql_store_result(connection) - if result then - mysql_field_seek(result,0) - local nofrows = mysql_num_rows(result) or 0 - local noffields = mysql_num_fields(result) - local names = { } - local types = { } - for i=1,noffields do - local field = mysql_fetch_field(result) - names[i] = field.name - types[i] = field.type - end - local t = { - _result_ = result, - names = names, - types = types, - noffields = noffields, - nofrows = nofrows, - } - return setmetatable(t,mt) - else - return nt - end - end - end - return false -end - -local mt = { __index = { - close = close, - execute = execute, - } -} - -local function open(t,database,username,password,host,port) - local connection = mysql_open_connection(t._session_,host or "localhost",username or "",password or "",database or "",port or 0,0,0) - if connection then - local t = { - _connection_ = connection, - } - return setmetatable(t,mt) - end -end - -local function message(t) - return mysql_error_message(t._session_) -end - -local function close(t) - -- dummy, as we have a global session -end - -local mt = { - __index = { - connect = open, - close = close, - message = message, - } -} - -local function initialize() - local session = { - _session_ = mysql_initialize(instance) -- maybe share, single thread anyway - } - return setmetatable(session,mt) -end - --- -- -- -- - -local function connect(session,specification) - return session:connect( - specification.database or "", - specification.username or "", - specification.password or "", - specification.host or "", - specification.port - ) -end - -local function error_in_connection(specification,action) - report_state("error in connection: [%s] %s@%s to %s:%s", - action or "unknown", - specification.database or "no database", - specification.username or "no username", - specification.host or "no host", - specification.port or "no port" - ) -end - -local function datafetched(specification,query,converter) - if not query or query == "" then - report_state("no valid query") - return { }, { } - end - local id = specification.id - local session, connection - if id then - local c = cache[id] - if c then - session = c.session - connection = c.connection - end - if not connection then - session = initialize() - connection = connect(session,specification) - if not connection then - for i=1,nofretries do - sleep(retrydelay) - report_state("retrying to connect: [%s.%s] %s@%s to %s:%s", - id,i, - specification.database or "no database", - specification.username or "no username", - specification.host or "no host", - specification.port or "no port" - ) - connection = connect(session,specification) - if connection then - break - end - end - end - if connection then - cache[id] = { session = session, connection = connection } - end - end - else - session = initialize() - connection = connect(session,specification) - if not connection then - for i=1,nofretries do - sleep(retrydelay) - report_state("retrying to connect: [%s] %s@%s to %s:%s", - i, - specification.database or "no database", - specification.username or "no username", - specification.host or "no host", - specification.port or "no port" - ) - connection = connect(session,specification) - if connection then - break - end - end - end - end - if not connection then - report_state("error in connection: %s@%s to %s:%s", - specification.database or "no database", - specification.username or "no username", - specification.host or "no host", - specification.port or "no port" - ) - return { }, { } - end - query = lpegmatch(querysplitter,query) - local result, message, okay - for i=1,#query do - local q = query[i] - local r, m = connection:execute(q) - if m then - report_state("error in query, stage: %s",string.collapsespaces(q)) - message = message and format("%s\n%s",message,m) or m - end - if type(r) == "table" then - result = r - okay = true - elseif not m then - okay = true - end - end - local data, keys - if result then - if converter then - data = converter.swiglib(result) - else - keys = result.names - data = { } - for i=1,result.nofrows do - data[i] = result:hash() - end - end - result:finish() -- result:close() - elseif message then - report_state("message %s",message) - end - if not keys then - keys = { } - end - if not data then - data = { } - end - if not id then - connection:close() - session:close() - end - return data, keys -end - -local function execute(specification) - if trace_sql then - report_state("executing library") - end - if not validspecification(specification) then - report_state("error in specification") - return - end - local query = dataprepared(specification) - if not query then - report_state("error in preparation") - return - end - local data, keys = datafetched(specification,query,specification.converter) - if not data then - report_state("error in fetching") - return - end - local one = data[1] - if one then - setmetatable(data,{ __index = one } ) - end - return data, keys -end - -local wraptemplate = [[ -local mysql = require("swigluamysql") -- will be stored in method - ------ mysql_fetch_row = mysql.mysql_fetch_row ------ mysql_fetch_lengths = mysql.mysql_fetch_lengths ------ util_unpackbytearray = mysql.util_unpackbytearray -local util_mysql_fetch_fields_from_current_row - = mysql.util_mysql_fetch_fields_from_current_row - -local converters = utilities.sql.converters -local deserialize = utilities.sql.deserialize - -local tostring = tostring -local tonumber = tonumber -local booleanstring = string.booleanstring - -%s - -return function(result) - if not result then - return { } - end - local nofrows = result.nofrows or 0 - if nofrows == 0 then - return { } - end - local noffields = result.noffields or 0 - local target = { } -- no %s needed here - result = result._result_ - for i=1,nofrows do - -- local row = mysql_fetch_row(result) - -- local len = mysql_fetch_lengths(result) - -- local cells = util_unpackbytearray(row,noffields,len) - local cells = util_mysql_fetch_fields_from_current_row(result) - target[%s] = { - %s - } - end - return target -end -]] - -local celltemplate = "cells[%s]" - -methods.swiglib = { - runner = function() end, -- never called - execute = execute, - initialize = initialize, -- returns session - usesfiles = false, - wraptemplate = wraptemplate, - celltemplate = celltemplate, -} +if not modules then modules = { } end modules ['util-sql-swiglib'] = { + version = 1.001, + comment = "companion to util-sql.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- As the regular library is flawed (i.e. there are crashes in the table +-- construction code) and also not that efficient, Luigi Scarso looked into +-- a swig binding. This is a bit more low level approach but as we stay +-- closer to the original library it's also less dependant. + +local concat = table.concat +local format = string.format +local lpegmatch = lpeg.match +local setmetatable, type = setmetatable, type +local sleep = os.sleep + +local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end) +local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end) +local report_state = logs.reporter("sql","swiglib") + +local sql = utilities.sql +local mysql = require("swiglib.mysql.core") -- "5.6" + +-- inspect(table.sortedkeys(mysql)) + +local nofretries = 5 +local retrydelay = 1 + +local cache = { } +local helpers = sql.helpers +local methods = sql.methods +local validspecification = helpers.validspecification +local querysplitter = helpers.querysplitter +local dataprepared = helpers.preparetemplate +local serialize = sql.serialize +local deserialize = sql.deserialize + +local mysql_initialize = mysql.mysql_init + +local mysql_open_connection = mysql.mysql_real_connect +local mysql_execute_query = mysql.mysql_real_query +local mysql_close_connection = mysql.mysql_close + +local mysql_field_seek = mysql.mysql_field_seek +local mysql_num_fields = mysql.mysql_num_fields +local mysql_fetch_field = mysql.mysql_fetch_field +local mysql_num_rows = mysql.mysql_num_rows +local mysql_fetch_row = mysql.mysql_fetch_row +local mysql_fetch_lengths = mysql.mysql_fetch_lengths +local mysql_init = mysql.mysql_init +local mysql_store_result = mysql.mysql_store_result +local mysql_free_result = mysql.mysql_free_result +local mysql_use_result = mysql.mysql_use_result + +local mysql_error_message = mysql.mysql_error +local mysql_options_argument = mysql.mysql_options_argument + +local instance = mysql.MYSQL() + +local mysql_constant_false = false +local mysql_constant_true = true + +-- if mysql_options_argument then +-- +-- mysql_constant_false = mysql_options_argument(false) -- 0 "\0" +-- mysql_constant_true = mysql_options_argument(true) -- 1 "\1" +-- +-- -- print(swig_type(mysql_constant_false)) +-- -- print(swig_type(mysql_constant_true)) +-- +-- mysql.mysql_options(instance,mysql.MYSQL_OPT_RECONNECT,mysql_constant_true); +-- +-- else +-- +-- print("") +-- print("incomplete swiglib.mysql interface") +-- print("") +-- +-- end + +local typemap = mysql.MYSQL_TYPE_VAR_STRING and { + [mysql.MYSQL_TYPE_VAR_STRING ] = "string", + [mysql.MYSQL_TYPE_STRING ] = "string", + [mysql.MYSQL_TYPE_DECIMAL ] = "number", + [mysql.MYSQL_TYPE_SHORT ] = "number", + [mysql.MYSQL_TYPE_LONG ] = "number", + [mysql.MYSQL_TYPE_FLOAT ] = "number", + [mysql.MYSQL_TYPE_DOUBLE ] = "number", + [mysql.MYSQL_TYPE_LONGLONG ] = "number", + [mysql.MYSQL_TYPE_INT24 ] = "number", + [mysql.MYSQL_TYPE_YEAR ] = "number", + [mysql.MYSQL_TYPE_TINY ] = "number", + [mysql.MYSQL_TYPE_TINY_BLOB ] = "binary", + [mysql.MYSQL_TYPE_MEDIUM_BLOB] = "binary", + [mysql.MYSQL_TYPE_LONG_BLOB ] = "binary", + [mysql.MYSQL_TYPE_BLOB ] = "binary", + [mysql.MYSQL_TYPE_DATE ] = "date", + [mysql.MYSQL_TYPE_NEWDATE ] = "date", + [mysql.MYSQL_TYPE_DATETIME ] = "datetime", + [mysql.MYSQL_TYPE_TIME ] = "time", + [mysql.MYSQL_TYPE_TIMESTAMP ] = "time", + [mysql.MYSQL_TYPE_ENUM ] = "set", + [mysql.MYSQL_TYPE_SET ] = "set", + [mysql.MYSQL_TYPE_NULL ] = "null", +} + +-- real_escape_string + +local function finish(t) + local r = t._result_ + if r then + mysql_free_result(r) + end +end + +-- will become metatable magic + +-- local function analyze(result) +-- mysql_field_seek(result,0) +-- local nofrows = mysql_num_rows(result) or 0 +-- local noffields = mysql_num_fields(result) +-- local names = { } +-- local types = { } +-- for i=1,noffields do +-- local field = mysql_fetch_field(result) +-- names[i] = field.name +-- types[i] = field.type +-- end +-- return names, types, noffields, nofrows +-- end + +local function getcolnames(t) + return t.names +end + +local function getcoltypes(t) + return t.types +end + +local function numrows(t) + return t.nofrows +end + +-- swig_type + +-- local ulongArray_getitem = mysql.ulongArray_getitem +-- local util_getbytearray = mysql.util_getbytearray + +-- local function list(t) +-- local result = t._result_ +-- local row = mysql_fetch_row(result) +-- local len = mysql_fetch_lengths(result) +-- local result = { } +-- for i=1,t.noffields do +-- local r = i - 1 -- zero offset +-- result[i] = util_getbytearray(row,r,ulongArray_getitem(len,r)) +-- end +-- return result +-- end + +-- local function hash(t) +-- local list = util_mysql_fetch_fields_from_current_row(t._result_) +-- local result = t._result_ +-- local fields = t.names +-- local row = mysql_fetch_row(result) +-- local len = mysql_fetch_lengths(result) +-- local result = { } +-- for i=1,t.noffields do +-- local r = i - 1 -- zero offset +-- result[fields[i]] = util_getbytearray(row,r,ulongArray_getitem(len,r)) +-- end +-- return result +-- end + +local util_mysql_fetch_fields_from_current_row = mysql.util_mysql_fetch_fields_from_current_row +local util_mysql_fetch_all_rows = mysql.util_mysql_fetch_all_rows + +local function list(t) + return util_mysql_fetch_fields_from_current_row(t._result_) +end + +local function hash(t) + local list = util_mysql_fetch_fields_from_current_row(t._result_) + local fields = t.names + local data = { } + for i=1,t.noffields do + data[fields[i]] = list[i] + end + return data +end + +local function wholelist(t) + return util_mysql_fetch_all_rows(t._result_) +end + +local mt = { __index = { + -- regular + finish = finish, + list = list, + hash = hash, + wholelist = wholelist, + -- compatibility + numrows = numrows, + getcolnames = getcolnames, + getcoltypes = getcoltypes, + -- fallback + _result_ = nil, + names = { }, + types = { }, + noffields = 0, + nofrows = 0, + } +} + +local nt = setmetatable({},mt) + +-- session + +local function close(t) + mysql_close_connection(t._connection_) +end + +local function execute(t,query) + if query and query ~= "" then + local connection = t._connection_ + local result = mysql_execute_query(connection,query,#query) + if result == 0 then + local result = mysql_store_result(connection) + if result then + mysql_field_seek(result,0) + local nofrows = mysql_num_rows(result) or 0 + local noffields = mysql_num_fields(result) + local names = { } + local types = { } + for i=1,noffields do + local field = mysql_fetch_field(result) + names[i] = field.name + types[i] = field.type + end + local t = { + _result_ = result, + names = names, + types = types, + noffields = noffields, + nofrows = nofrows, + } + return setmetatable(t,mt) + else + return nt + end + end + end + return false +end + +local mt = { __index = { + close = close, + execute = execute, + } +} + +local function open(t,database,username,password,host,port) + local connection = mysql_open_connection(t._session_,host or "localhost",username or "",password or "",database or "",port or 0,0,0) + if connection then + local t = { + _connection_ = connection, + } + return setmetatable(t,mt) + end +end + +local function message(t) + return mysql_error_message(t._session_) +end + +local function close(t) + -- dummy, as we have a global session +end + +local mt = { + __index = { + connect = open, + close = close, + message = message, + } +} + +local function initialize() + local session = { + _session_ = mysql_initialize(instance) -- maybe share, single thread anyway + } + return setmetatable(session,mt) +end + +-- -- -- -- + +local function connect(session,specification) + return session:connect( + specification.database or "", + specification.username or "", + specification.password or "", + specification.host or "", + specification.port + ) +end + +local function error_in_connection(specification,action) + report_state("error in connection: [%s] %s@%s to %s:%s", + action or "unknown", + specification.database or "no database", + specification.username or "no username", + specification.host or "no host", + specification.port or "no port" + ) +end + +local function datafetched(specification,query,converter) + if not query or query == "" then + report_state("no valid query") + return { }, { } + end + local id = specification.id + local session, connection + if id then + local c = cache[id] + if c then + session = c.session + connection = c.connection + end + if not connection then + session = initialize() + connection = connect(session,specification) + if not connection then + for i=1,nofretries do + sleep(retrydelay) + report_state("retrying to connect: [%s.%s] %s@%s to %s:%s", + id,i, + specification.database or "no database", + specification.username or "no username", + specification.host or "no host", + specification.port or "no port" + ) + connection = connect(session,specification) + if connection then + break + end + end + end + if connection then + cache[id] = { session = session, connection = connection } + end + end + else + session = initialize() + connection = connect(session,specification) + if not connection then + for i=1,nofretries do + sleep(retrydelay) + report_state("retrying to connect: [%s] %s@%s to %s:%s", + i, + specification.database or "no database", + specification.username or "no username", + specification.host or "no host", + specification.port or "no port" + ) + connection = connect(session,specification) + if connection then + break + end + end + end + end + if not connection then + report_state("error in connection: %s@%s to %s:%s", + specification.database or "no database", + specification.username or "no username", + specification.host or "no host", + specification.port or "no port" + ) + return { }, { } + end + query = lpegmatch(querysplitter,query) + local result, message, okay + for i=1,#query do + local q = query[i] + local r, m = connection:execute(q) + if m then + report_state("error in query, stage: %s",string.collapsespaces(q)) + message = message and format("%s\n%s",message,m) or m + end + if type(r) == "table" then + result = r + okay = true + elseif not m then + okay = true + end + end + local data, keys + if result then + if converter then + data = converter.swiglib(result) + else + keys = result.names + data = { } + for i=1,result.nofrows do + data[i] = result:hash() + end + end + result:finish() -- result:close() + elseif message then + report_state("message %s",message) + end + if not keys then + keys = { } + end + if not data then + data = { } + end + if not id then + connection:close() + session:close() + end + return data, keys +end + +local function execute(specification) + if trace_sql then + report_state("executing library") + end + if not validspecification(specification) then + report_state("error in specification") + return + end + local query = dataprepared(specification) + if not query then + report_state("error in preparation") + return + end + local data, keys = datafetched(specification,query,specification.converter) + if not data then + report_state("error in fetching") + return + end + local one = data[1] + if one then + setmetatable(data,{ __index = one } ) + end + return data, keys +end + +local wraptemplate = [[ +local mysql = require("swigluamysql") -- will be stored in method + +----- mysql_fetch_row = mysql.mysql_fetch_row +----- mysql_fetch_lengths = mysql.mysql_fetch_lengths +----- util_unpackbytearray = mysql.util_unpackbytearray +local util_mysql_fetch_fields_from_current_row + = mysql.util_mysql_fetch_fields_from_current_row + +local converters = utilities.sql.converters +local deserialize = utilities.sql.deserialize + +local tostring = tostring +local tonumber = tonumber +local booleanstring = string.booleanstring + +%s + +return function(result) + if not result then + return { } + end + local nofrows = result.nofrows or 0 + if nofrows == 0 then + return { } + end + local noffields = result.noffields or 0 + local target = { } -- no %s needed here + result = result._result_ + for i=1,nofrows do + -- local row = mysql_fetch_row(result) + -- local len = mysql_fetch_lengths(result) + -- local cells = util_unpackbytearray(row,noffields,len) + local cells = util_mysql_fetch_fields_from_current_row(result) + target[%s] = { + %s + } + end + return target +end +]] + +local celltemplate = "cells[%s]" + +methods.swiglib = { + runner = function() end, -- never called + execute = execute, + initialize = initialize, -- returns session + usesfiles = false, + wraptemplate = wraptemplate, + celltemplate = celltemplate, +} diff --git a/tex/context/base/util-sql-loggers.lua b/tex/context/base/util-sql-loggers.lua index 7fceb8032..33071f2e3 100644 --- a/tex/context/base/util-sql-loggers.lua +++ b/tex/context/base/util-sql-loggers.lua @@ -1,277 +1,277 @@ -if not modules then modules = { } end modules ['util-sql-loggers'] = { - version = 1.001, - comment = "companion to lmx-*", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This is experimental code and currently part of the base installation simply --- because it's easier to dirtribute this way. Eventually it will be documented --- and the related scripts will show up as well. - -local tonumber = tonumber -local format = string.format -local concat = table.concat -local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime -local random = math.random - -local sql = utilities.sql -local loggers = { } -sql.loggers = loggers - -local trace_sql = false trackers.register("sql.loggers.trace", function(v) trace_sql = v end) -local report = logs.reporter("sql","loggers") - -loggers.newtoken = sql.tokens.new -local makeconverter = sql.makeconverter - -local function checkeddb(presets,datatable) - return sql.usedatabase(presets,datatable or presets.datatable or "loggers") -end - -loggers.usedb = checkeddb - -local totype = { - ["error"] = 1, [1] = 1, ["1"] = 1, - ["warning"] = 2, [2] = 2, ["2"] = 2, - ["debug"] = 3, [3] = 3, ["3"] = 3, - ["info"] = 4, [4] = 4, ["4"] = 4, -} - -local fromtype = { - ["error"] = "error", [1] = "error", ["1"] = "error", - ["warning"] = "warning", [2] = "warning", ["2"] = "warning", - ["debug"] = "debug", [3] = "debug", ["3"] = "debug", - ["info"] = "info", [4] = "info", ["4"] = "info", -} - -table.setmetatableindex(totype, function() return 4 end) -table.setmetatableindex(fromtype,function() return "info" end) - -loggers.totype = totype -loggers.fromtype = fromtype - -local template =[[ - CREATE TABLE IF NOT EXISTS %basename% ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `time` int(11) NOT NULL, - `type` int(11) NOT NULL, - `action` varchar(15) NOT NULL, - `data` longtext, - PRIMARY KEY (`id`), - UNIQUE KEY `id_unique_key` (`id`) - ) - DEFAULT CHARSET = utf8 ; -]] - -function loggers.createdb(presets,datatable) - - local db = checkeddb(presets,datatable) - - db.execute { - template = template, - variables = { - basename = db.basename, - }, - } - - report("datatable %a created in %a",db.name,db.base) - - return db - -end - -local template =[[ - DROP TABLE IF EXISTS %basename% ; -]] - -function loggers.deletedb(presets,datatable) - - local db = checkeddb(presets,datatable) - - db.execute { - template = template, - variables = { - basename = db.basename, - }, - } - - report("datatable %a removed in %a",db.name,db.base) - -end - -local template =[[ - INSERT INTO %basename% ( - `time`, - `type`, - `action`, - `data` - ) VALUES ( - %time%, - %type%, - '%action%', - '%[data]%' - ) ; -]] - -function loggers.save(db,data) -- beware, we pass type and action in the data (saves a table) - - if data then - - local time = ostime() - local kind = totype[data.type] - local action = data.action or "unknown" - - data.type = nil - data.action = nil - - db.execute { - template = template, - variables = { - basename = db.basename, - time = ostime(), - type = kind, - action = action, - data = data and db.serialize(data,"return") or "", - }, - } - - end - -end - --- local template =[[ --- REMOVE FROM --- %basename% --- WHERE --- `token` = '%token%' ; --- ]] --- --- function loggers.remove(db,token) --- --- db.execute { --- template = template, --- variables = { --- basename = db.basename, --- token = token, --- }, --- } --- --- if trace_sql then --- report("removed: %s",token) --- end --- --- end - -local template_nop =[[ - SELECT - `time`, - `type`, - `action`, - `data` - FROM - %basename% - ORDER BY - `time`, `type`, `action` - DESC LIMIT - %limit% ; -]] - -local template_yes =[[ - SELECT - `time`, - `type`, - `action`, - `data` - FROM - %basename% - %WHERE% - ORDER BY - `time`, `type`, `action` - DESC LIMIT - %limit% ; -]] - -local converter = makeconverter { - -- { name = "time", type = os.localtime }, - { name = "time", type = "number" }, - { name = "type", type = fromtype }, - { name = "action", type = "string" }, - { name = "data", type = "deserialize" }, -} - -function loggers.collect(db,specification) - - specification = specification or { } - - local start = specification.start - local stop = specification.stop - local limit = specification.limit or 100 - local kind = specification.type - local action = specification.action - - local filtered = start or stop - - local where = { } - - if filtered then - local today = os.date("*t") - - if type(start) ~= "table" then - start = { } - end - start = os.time { - day = start.day or today.day, - month = start.month or today.month, - year = start.year or today.year, - hour = start.hour or 0, - minute = start.minute or 0, - second = start.second or 0, - isdst = true, - } - - if type(stop) ~= "table" then - stop = { } - end - stop = os.time { - day = stop.day or today.day, - month = stop.month or today.month, - year = stop.year or today.year, - hour = stop.hour or 24, - minute = stop.minute or 0, - second = stop.second or 0, - isdst = true, - } - - -- report("filter: %s => %s",start,stop) - - where[#where+1] = format("`time` BETWEEN %s AND %s",start,stop) - - end - - if kind then - where[#where+1] = format("`type` = %s",totype[kind]) - end - - if action then - where[#where+1] = format("`action` = '%s'",action) - end - - local records = db.execute { - template = filtered and template_yes or template_nop, - converter = converter, - variables = { - basename = db.basename, - limit = limit, - WHERE = #where > 0 and format("WHERE\n%s",concat(where," AND ")) or "", - }, - } - - if trace_sql then - report("collected: %s loggers",#records) - end - - return records, keys - -end +if not modules then modules = { } end modules ['util-sql-loggers'] = { + version = 1.001, + comment = "companion to lmx-*", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This is experimental code and currently part of the base installation simply +-- because it's easier to dirtribute this way. Eventually it will be documented +-- and the related scripts will show up as well. + +local tonumber = tonumber +local format = string.format +local concat = table.concat +local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime +local random = math.random + +local sql = utilities.sql +local loggers = { } +sql.loggers = loggers + +local trace_sql = false trackers.register("sql.loggers.trace", function(v) trace_sql = v end) +local report = logs.reporter("sql","loggers") + +loggers.newtoken = sql.tokens.new +local makeconverter = sql.makeconverter + +local function checkeddb(presets,datatable) + return sql.usedatabase(presets,datatable or presets.datatable or "loggers") +end + +loggers.usedb = checkeddb + +local totype = { + ["error"] = 1, [1] = 1, ["1"] = 1, + ["warning"] = 2, [2] = 2, ["2"] = 2, + ["debug"] = 3, [3] = 3, ["3"] = 3, + ["info"] = 4, [4] = 4, ["4"] = 4, +} + +local fromtype = { + ["error"] = "error", [1] = "error", ["1"] = "error", + ["warning"] = "warning", [2] = "warning", ["2"] = "warning", + ["debug"] = "debug", [3] = "debug", ["3"] = "debug", + ["info"] = "info", [4] = "info", ["4"] = "info", +} + +table.setmetatableindex(totype, function() return 4 end) +table.setmetatableindex(fromtype,function() return "info" end) + +loggers.totype = totype +loggers.fromtype = fromtype + +local template =[[ + CREATE TABLE IF NOT EXISTS %basename% ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `time` int(11) NOT NULL, + `type` int(11) NOT NULL, + `action` varchar(15) NOT NULL, + `data` longtext, + PRIMARY KEY (`id`), + UNIQUE KEY `id_unique_key` (`id`) + ) + DEFAULT CHARSET = utf8 ; +]] + +function loggers.createdb(presets,datatable) + + local db = checkeddb(presets,datatable) + + db.execute { + template = template, + variables = { + basename = db.basename, + }, + } + + report("datatable %a created in %a",db.name,db.base) + + return db + +end + +local template =[[ + DROP TABLE IF EXISTS %basename% ; +]] + +function loggers.deletedb(presets,datatable) + + local db = checkeddb(presets,datatable) + + db.execute { + template = template, + variables = { + basename = db.basename, + }, + } + + report("datatable %a removed in %a",db.name,db.base) + +end + +local template =[[ + INSERT INTO %basename% ( + `time`, + `type`, + `action`, + `data` + ) VALUES ( + %time%, + %type%, + '%action%', + '%[data]%' + ) ; +]] + +function loggers.save(db,data) -- beware, we pass type and action in the data (saves a table) + + if data then + + local time = ostime() + local kind = totype[data.type] + local action = data.action or "unknown" + + data.type = nil + data.action = nil + + db.execute { + template = template, + variables = { + basename = db.basename, + time = ostime(), + type = kind, + action = action, + data = data and db.serialize(data,"return") or "", + }, + } + + end + +end + +-- local template =[[ +-- REMOVE FROM +-- %basename% +-- WHERE +-- `token` = '%token%' ; +-- ]] +-- +-- function loggers.remove(db,token) +-- +-- db.execute { +-- template = template, +-- variables = { +-- basename = db.basename, +-- token = token, +-- }, +-- } +-- +-- if trace_sql then +-- report("removed: %s",token) +-- end +-- +-- end + +local template_nop =[[ + SELECT + `time`, + `type`, + `action`, + `data` + FROM + %basename% + ORDER BY + `time`, `type`, `action` + DESC LIMIT + %limit% ; +]] + +local template_yes =[[ + SELECT + `time`, + `type`, + `action`, + `data` + FROM + %basename% + %WHERE% + ORDER BY + `time`, `type`, `action` + DESC LIMIT + %limit% ; +]] + +local converter = makeconverter { + -- { name = "time", type = os.localtime }, + { name = "time", type = "number" }, + { name = "type", type = fromtype }, + { name = "action", type = "string" }, + { name = "data", type = "deserialize" }, +} + +function loggers.collect(db,specification) + + specification = specification or { } + + local start = specification.start + local stop = specification.stop + local limit = specification.limit or 100 + local kind = specification.type + local action = specification.action + + local filtered = start or stop + + local where = { } + + if filtered then + local today = os.date("*t") + + if type(start) ~= "table" then + start = { } + end + start = os.time { + day = start.day or today.day, + month = start.month or today.month, + year = start.year or today.year, + hour = start.hour or 0, + minute = start.minute or 0, + second = start.second or 0, + isdst = true, + } + + if type(stop) ~= "table" then + stop = { } + end + stop = os.time { + day = stop.day or today.day, + month = stop.month or today.month, + year = stop.year or today.year, + hour = stop.hour or 24, + minute = stop.minute or 0, + second = stop.second or 0, + isdst = true, + } + + -- report("filter: %s => %s",start,stop) + + where[#where+1] = format("`time` BETWEEN %s AND %s",start,stop) + + end + + if kind then + where[#where+1] = format("`type` = %s",totype[kind]) + end + + if action then + where[#where+1] = format("`action` = '%s'",action) + end + + local records = db.execute { + template = filtered and template_yes or template_nop, + converter = converter, + variables = { + basename = db.basename, + limit = limit, + WHERE = #where > 0 and format("WHERE\n%s",concat(where," AND ")) or "", + }, + } + + if trace_sql then + report("collected: %s loggers",#records) + end + + return records, keys + +end diff --git a/tex/context/base/util-sql-sessions.lua b/tex/context/base/util-sql-sessions.lua index 76bb91962..d13293691 100644 --- a/tex/context/base/util-sql-sessions.lua +++ b/tex/context/base/util-sql-sessions.lua @@ -1,349 +1,349 @@ -if not modules then modules = { } end modules ['util-sql-sessions'] = { - version = 1.001, - comment = "companion to lmx-*", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This is experimental code and currently part of the base installation simply --- because it's easier to dirtribute this way. Eventually it will be documented --- and the related scripts will show up as well. - --- maybe store threshold in session (in seconds) - -local tonumber = tonumber -local format = string.format -local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime -local random = math.random - --- In older frameworks we kept a session table in memory. This time we --- follow a route where we store session data in a sql table. Each session --- has a token (similar to what we do on q2p and pod services), a data --- blob which is just a serialized lua table (we could consider a dump instead) --- and two times: the creation and last accessed time. The first one is handy --- for statistics and the second one for cleanup. Both are just numbers so that --- we don't have to waste code on conversions. Anyhow, we provide variants so that --- we can always choose what is best. - -local sql = utilities.sql -local sessions = { } -sql.sessions = sessions - -local trace_sql = false trackers.register("sql.sessions.trace", function(v) trace_sql = v end) -local report = logs.reporter("sql","sessions") - -sessions.newtoken = sql.tokens.new - -local function checkeddb(presets,datatable) - return sql.usedatabase(presets,datatable or presets.datatable or "sessions") -end - -sessions.usedb = checkeddb - -local template =[[ - CREATE TABLE IF NOT EXISTS %basename% ( - `token` varchar(50) NOT NULL, - `data` longtext NOT NULL, - `created` int(11) NOT NULL, - `accessed` int(11) NOT NULL, - UNIQUE KEY `token_unique_key` (`token`) - ) - DEFAULT CHARSET = utf8 ; -]] - -function sessions.createdb(presets,datatable) - - local db = checkeddb(presets,datatable) - - db.execute { - template = template, - variables = { - basename = db.basename, - }, - } - - report("datatable %a created in %a",db.name,db.base) - - return db - -end - -local template =[[ - DROP TABLE IF EXISTS %basename% ; -]] - -function sessions.deletedb(presets,datatable) - - local db = checkeddb(presets,datatable) - - db.execute { - template = template, - variables = { - basename = db.basename, - }, - } - - report("datatable %a removed in %a",db.name,db.base) - -end - -local template =[[ - INSERT INTO %basename% ( - `token`, - `created`, - `accessed`, - `data` - ) VALUES ( - '%token%', - %time%, - %time%, - '%[data]%' - ) ; -]] - -function sessions.create(db,data) - - local token = sessions.newtoken() - local time = ostime() - - db.execute { - template = template, - variables = { - basename = db.basename, - token = token, - time = time, - data = db.serialize(data or { },"return") - }, - } - - if trace_sql then - report("created: %s at %s",token,osfulltime(time)) - end - - return { - token = token, - created = time, - accessed = time, - data = data, - } -end - -local template =[[ - UPDATE - %basename% - SET - `data` = '%[data]%', - `accessed` = %time% - WHERE - `token` = '%token%' ; -]] - -function sessions.save(db,session) - - local time = ostime() - local data = db.serialize(session.data or { },"return") - local token = session.token - - session.accessed = time - - db.execute { - template = template, - variables = { - basename = db.basename, - token = token, - time = ostime(), - data = data, - }, - } - - if trace_sql then - report("saved: %s at %s",token,osfulltime(time)) - end - - return session -end - -local template = [[ - UPDATE - %basename% - SET - `accessed` = %time% - WHERE - `token` = '%token%' ; -]] - -function sessions.touch(db,token) - - db.execute { - template = template, - variables = { - basename = db.basename, - token = token, - time = ostime(), - }, - } - -end - -local template = [[ - UPDATE - %basename% - SET - `accessed` = %time% - WHERE - `token` = '%token%' ; - SELECT - * - FROM - %basename% - WHERE - `token` = '%token%' ; -]] - -function sessions.restore(db,token) - - local records, keys = db.execute { - template = template, - variables = { - basename = db.basename, - token = token, - time = ostime(), - }, - } - - local record = records and records[1] - - if record then - if trace_sql then - report("restored: %s",token) - end - record.data = db.deserialize(record.data or "") - return record, keys - elseif trace_sql then - report("unknown: %s",token) - end - -end - -local template =[[ - DELETE FROM - %basename% - WHERE - `token` = '%token%' ; -]] - -function sessions.remove(db,token) - - db.execute { - template = template, - variables = { - basename = db.basename, - token = token, - }, - } - - if trace_sql then - report("removed: %s",token) - end - -end - -local template_collect_yes =[[ - SELECT - * - FROM - %basename% - ORDER BY - `created` ; -]] - -local template_collect_nop =[[ - SELECT - `accessed`, - `created`, - `accessed`, - `token` - FROM - %basename% - ORDER BY - `created` ; -]] - -function sessions.collect(db,nodata) - - local records, keys = db.execute { - template = nodata and template_collect_nop or template_collect_yes, - variables = { - basename = db.basename, - }, - } - - if not nodata then - db.unpackdata(records) - end - - if trace_sql then - report("collected: %s sessions",#records) - end - - return records, keys - -end - -local template_cleanup_yes =[[ - SELECT - * - FROM - %basename% - WHERE - `accessed` < %time% - ORDER BY - `created` ; - DELETE FROM - %basename% - WHERE - `accessed` < %time% ; -]] - -local template_cleanup_nop =[[ - SELECT - `accessed`, - `created`, - `accessed`, - `token` - FROM - %basename% - WHERE - `accessed` < %time% - ORDER BY - `created` ; - DELETE FROM - %basename% - WHERE - `accessed` < %time% ; -]] - -function sessions.cleanupdb(db,delta,nodata) - - local time = ostime() - - local records, keys = db.execute { - template = nodata and template_cleanup_nop or template_cleanup_yes, - variables = { - basename = db.basename, - time = time - delta - }, - } - - if not nodata then - db.unpackdata(records) - end - - if trace_sql then - report("cleaned: %s seconds before %s",delta,osfulltime(time)) - end - - return records, keys - -end +if not modules then modules = { } end modules ['util-sql-sessions'] = { + version = 1.001, + comment = "companion to lmx-*", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This is experimental code and currently part of the base installation simply +-- because it's easier to dirtribute this way. Eventually it will be documented +-- and the related scripts will show up as well. + +-- maybe store threshold in session (in seconds) + +local tonumber = tonumber +local format = string.format +local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime +local random = math.random + +-- In older frameworks we kept a session table in memory. This time we +-- follow a route where we store session data in a sql table. Each session +-- has a token (similar to what we do on q2p and pod services), a data +-- blob which is just a serialized lua table (we could consider a dump instead) +-- and two times: the creation and last accessed time. The first one is handy +-- for statistics and the second one for cleanup. Both are just numbers so that +-- we don't have to waste code on conversions. Anyhow, we provide variants so that +-- we can always choose what is best. + +local sql = utilities.sql +local sessions = { } +sql.sessions = sessions + +local trace_sql = false trackers.register("sql.sessions.trace", function(v) trace_sql = v end) +local report = logs.reporter("sql","sessions") + +sessions.newtoken = sql.tokens.new + +local function checkeddb(presets,datatable) + return sql.usedatabase(presets,datatable or presets.datatable or "sessions") +end + +sessions.usedb = checkeddb + +local template =[[ + CREATE TABLE IF NOT EXISTS %basename% ( + `token` varchar(50) NOT NULL, + `data` longtext NOT NULL, + `created` int(11) NOT NULL, + `accessed` int(11) NOT NULL, + UNIQUE KEY `token_unique_key` (`token`) + ) + DEFAULT CHARSET = utf8 ; +]] + +function sessions.createdb(presets,datatable) + + local db = checkeddb(presets,datatable) + + db.execute { + template = template, + variables = { + basename = db.basename, + }, + } + + report("datatable %a created in %a",db.name,db.base) + + return db + +end + +local template =[[ + DROP TABLE IF EXISTS %basename% ; +]] + +function sessions.deletedb(presets,datatable) + + local db = checkeddb(presets,datatable) + + db.execute { + template = template, + variables = { + basename = db.basename, + }, + } + + report("datatable %a removed in %a",db.name,db.base) + +end + +local template =[[ + INSERT INTO %basename% ( + `token`, + `created`, + `accessed`, + `data` + ) VALUES ( + '%token%', + %time%, + %time%, + '%[data]%' + ) ; +]] + +function sessions.create(db,data) + + local token = sessions.newtoken() + local time = ostime() + + db.execute { + template = template, + variables = { + basename = db.basename, + token = token, + time = time, + data = db.serialize(data or { },"return") + }, + } + + if trace_sql then + report("created: %s at %s",token,osfulltime(time)) + end + + return { + token = token, + created = time, + accessed = time, + data = data, + } +end + +local template =[[ + UPDATE + %basename% + SET + `data` = '%[data]%', + `accessed` = %time% + WHERE + `token` = '%token%' ; +]] + +function sessions.save(db,session) + + local time = ostime() + local data = db.serialize(session.data or { },"return") + local token = session.token + + session.accessed = time + + db.execute { + template = template, + variables = { + basename = db.basename, + token = token, + time = ostime(), + data = data, + }, + } + + if trace_sql then + report("saved: %s at %s",token,osfulltime(time)) + end + + return session +end + +local template = [[ + UPDATE + %basename% + SET + `accessed` = %time% + WHERE + `token` = '%token%' ; +]] + +function sessions.touch(db,token) + + db.execute { + template = template, + variables = { + basename = db.basename, + token = token, + time = ostime(), + }, + } + +end + +local template = [[ + UPDATE + %basename% + SET + `accessed` = %time% + WHERE + `token` = '%token%' ; + SELECT + * + FROM + %basename% + WHERE + `token` = '%token%' ; +]] + +function sessions.restore(db,token) + + local records, keys = db.execute { + template = template, + variables = { + basename = db.basename, + token = token, + time = ostime(), + }, + } + + local record = records and records[1] + + if record then + if trace_sql then + report("restored: %s",token) + end + record.data = db.deserialize(record.data or "") + return record, keys + elseif trace_sql then + report("unknown: %s",token) + end + +end + +local template =[[ + DELETE FROM + %basename% + WHERE + `token` = '%token%' ; +]] + +function sessions.remove(db,token) + + db.execute { + template = template, + variables = { + basename = db.basename, + token = token, + }, + } + + if trace_sql then + report("removed: %s",token) + end + +end + +local template_collect_yes =[[ + SELECT + * + FROM + %basename% + ORDER BY + `created` ; +]] + +local template_collect_nop =[[ + SELECT + `accessed`, + `created`, + `accessed`, + `token` + FROM + %basename% + ORDER BY + `created` ; +]] + +function sessions.collect(db,nodata) + + local records, keys = db.execute { + template = nodata and template_collect_nop or template_collect_yes, + variables = { + basename = db.basename, + }, + } + + if not nodata then + db.unpackdata(records) + end + + if trace_sql then + report("collected: %s sessions",#records) + end + + return records, keys + +end + +local template_cleanup_yes =[[ + SELECT + * + FROM + %basename% + WHERE + `accessed` < %time% + ORDER BY + `created` ; + DELETE FROM + %basename% + WHERE + `accessed` < %time% ; +]] + +local template_cleanup_nop =[[ + SELECT + `accessed`, + `created`, + `accessed`, + `token` + FROM + %basename% + WHERE + `accessed` < %time% + ORDER BY + `created` ; + DELETE FROM + %basename% + WHERE + `accessed` < %time% ; +]] + +function sessions.cleanupdb(db,delta,nodata) + + local time = ostime() + + local records, keys = db.execute { + template = nodata and template_cleanup_nop or template_cleanup_yes, + variables = { + basename = db.basename, + time = time - delta + }, + } + + if not nodata then + db.unpackdata(records) + end + + if trace_sql then + report("cleaned: %s seconds before %s",delta,osfulltime(time)) + end + + return records, keys + +end diff --git a/tex/context/base/util-sql-tickets.lua b/tex/context/base/util-sql-tickets.lua index 5e958299d..65eb69bae 100644 --- a/tex/context/base/util-sql-tickets.lua +++ b/tex/context/base/util-sql-tickets.lua @@ -1,772 +1,772 @@ -if not modules then modules = { } end modules ['util-sql-tickets'] = { - version = 1.001, - comment = "companion to lmx-*", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- TODO: MAKE SOME INTO STORED PROCUDURES - --- This is experimental code and currently part of the base installation simply --- because it's easier to distribute this way. Eventually it will be documented --- and the related scripts will show up as well. - -local tonumber = tonumber -local format = string.format -local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime -local random = math.random -local concat = table.concat - -local sql = utilities.sql -local tickets = { } -sql.tickets = tickets - -local trace_sql = false trackers.register("sql.tickets.trace", function(v) trace_sql = v end) -local report = logs.reporter("sql","tickets") - -local serialize = sql.serialize -local deserialize = sql.deserialize -local execute = sql.execute - -tickets.newtoken = sql.tokens.new - --- Beware as an index can be a string or a number, we will create --- a combination of hash and index. - -local statustags = { [0] = - "unknown", - "pending", - "busy", - "finished", - "dependent", -- same token but different subtoken (so we only need to find the first) - "reserved-1", - "reserved-2", - "error", - "deleted", -} - -local status = table.swapped(statustags) -tickets.status = status -tickets.statustags = statustags - -local s_unknown = status.unknown -local s_pending = status.pending -local s_busy = status.busy -local s_finished = status.finished -local s_dependent = status.dependent -local s_error = status.error -local s_deleted = status.deleted - -local s_rubish = s_error -- and higher - -local function checkeddb(presets,datatable) - return sql.usedatabase(presets,datatable or presets.datatable or "tickets") -end - -tickets.usedb = checkeddb - -local template =[[ - CREATE TABLE IF NOT EXISTS %basename% ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `token` varchar(50) NOT NULL, - `subtoken` INT(11) NOT NULL, - `created` int(11) NOT NULL, - `accessed` int(11) NOT NULL, - `category` int(11) NOT NULL, - `status` int(11) NOT NULL, - `usertoken` varchar(50) NOT NULL, - `data` longtext NOT NULL, - `comment` longtext NOT NULL, - - PRIMARY KEY (`id`), - UNIQUE INDEX `id_unique_index` (`id` ASC), - KEY `token_unique_key` (`token`) - ) - DEFAULT CHARSET = utf8 ; -]] - -function tickets.createdb(presets,datatable) - local db = checkeddb(presets,datatable) - local data, keys = db.execute { - template = template, - variables = { - basename = db.basename, - }, - } - - report("datatable %a created in %a",db.name,db.base) - - return db - -end - -local template =[[ - DROP TABLE IF EXISTS %basename% ; -]] - -function tickets.deletedb(presets,datatable) - - local db = checkeddb(presets,datatable) - - local data, keys = db.execute { - template = template, - variables = { - basename = db.basename, - }, - } - - report("datatable %a removed in %a",db.name,db.base) - -end - -local template_push =[[ - INSERT INTO %basename% ( - `token`, - `subtoken`, - `created`, - `accessed`, - `status`, - `category`, - `usertoken`, - `data`, - `comment` - ) VALUES ( - '%token%', - %subtoken%, - %time%, - %time%, - %status%, - %category%, - '%usertoken%', - '%[data]%', - '%[comment]%' - ) ; -]] - -local template_fetch =[[ - SELECT - * - FROM - %basename% - WHERE - `token` = '%token%' - AND - `subtoken` = '%subtoken%' - ; -]] - -function tickets.create(db,ticket) - - -- We assume a unique token .. if not we're toast anyway. We used to lock and - -- get the last id etc etc but there is no real need for that. - - -- we could check for dependent here but we don't want the lookup - - local token = ticket.token or tickets.newtoken() - local time = ostime() - local status = ticket.status - local category = ticket.category or 0 - local subtoken = ticket.subtoken or 0 - local usertoken = ticket.usertoken or "" - local comment = ticket.comment or "" - - status = not status and subtoken > 1 and s_dependent or s_pending - - local result, message = db.execute { - template = template_push, - variables = { - basename = db.basename, - token = token, - subtoken = subtoken, - time = time, - status = status, - category = category, - usertoken = usertoken, - data = db.serialize(ticket.data or { },"return"), - comment = comment, - }, - } - - -- We could stick to only fetching the id and make the table here - -- but we're not pushing that many tickets so we can as well follow - -- the lazy approach and fetch the whole. - - local result, message = db.execute { - template = template_fetch, - variables = { - basename = db.basename, - token = token, - subtoken = subtoken, - }, - } - - if result and #result > 0 then - if trace_sql then - report("created: %s at %s",token,osfulltime(time)) - end - return result[1] - else - report("failed: %s at %s",token,osfulltime(time)) - end - -end - -local template =[[ - UPDATE - %basename% - SET - `data` = '%[data]%', - `status` = %status%, - `accessed` = %time% - WHERE - `id` = %id% ; -]] - -function tickets.save(db,ticket) - - local time = ostime() - local data = db.serialize(ticket.data or { },"return") - local status = ticket.status or s_error - --- print("SETTING") --- inspect(data) - - ticket.status = status - ticket.accessed = time - - db.execute { - template = template, - variables = { - basename = db.basename, - id = ticket.id, - time = ostime(), - status = status, - data = data, - }, - } - - if trace_sql then - report("saved: id %s, time %s",id,osfulltime(time)) - end - - return ticket -end - -local template =[[ - UPDATE - %basename% - SET - `accessed` = %time% - WHERE - `token` = '%token%' ; - - SELECT - * - FROM - %basename% - WHERE - `id` = %id% ; -]] - -function tickets.restore(db,id) - - local record, keys = db.execute { - template = template, - variables = { - basename = db.basename, - id = id, - time = ostime(), - }, - } - - local record = record and record[1] - - if record then - if trace_sql then - report("restored: id %s",id) - end - record.data = db.deserialize(record.data or "") - return record - elseif trace_sql then - report("unknown: id %s",id) - end - -end - -local template =[[ - DELETE FROM - %basename% - WHERE - `id` = %id% ; -]] - -function tickets.remove(db,id) - - db.execute { - template = template, - variables = { - basename = db.basename, - id = id, - }, - } - - if trace_sql then - report("removed: id %s",id) - end - -end - -local template_yes =[[ - SELECT - * - FROM - %basename% - ORDER BY - `id` ; -]] - -local template_nop =[[ - SELECT - `created`, - `usertoken`, - `accessed`, - `status` - FROM - %basename% - ORDER BY - `id` ; -]] - -function tickets.collect(db,nodata) - - local records, keys = db.execute { - template = nodata and template_nop or template_yes, - variables = { - basename = db.basename, - token = token, - }, - } - - if not nodata then - db.unpackdata(records) - end - - if trace_sql then - report("collected: %s tickets",#records) - end - - return records, keys - -end - --- We aleays keep the last select in the execute so one can have --- an update afterwards. - -local template =[[ - DELETE FROM - %basename% - WHERE - `accessed` < %time% OR `status` >= %rubish% ; -]] - -local template_cleanup_yes =[[ - SELECT - * - FROM - %basename% - WHERE - `accessed` < %time% - ORDER BY - `id` ; -]] .. template - -local template_cleanup_nop =[[ - SELECT - `accessed`, - `created`, - `accessed`, - `token` - `usertoken` - FROM - %basename% - WHERE - `accessed` < %time% - ORDER BY - `id` ; -]] .. template - -function tickets.cleanupdb(db,delta,nodata) -- maybe delta in db - - local time = delta and (ostime() - delta) or 0 - - local records, keys = db.execute { - template = nodata and template_cleanup_nop or template_cleanup_yes, - variables = { - basename = db.basename, - time = time, - rubish = s_rubish, - }, - } - - if not nodata then - db.unpackdata(records) - end - - if trace_sql then - report("cleaned: %s seconds before %s",delta,osfulltime(time)) - end - - return records, keys - -end - --- status related functions - -local template =[[ - SELECT - `status` - FROM - %basename% - WHERE - `token` = '%token%' - ORDER BY - `id` - ; -]] - -function tickets.getstatus(db,token) - - local record, keys = db.execute { - template = template, - variables = { - basename = db.basename, - token = token, - }, - } - - local record = record and record[1] - - return record and record.status or s_unknown - -end - -local template =[[ - SELECT - `status` - FROM - %basename% - WHERE - `status` >= %rubish% OR `accessed` < %time% - ORDER BY - `id` - ; -]] - -function tickets.getobsolete(db,delta) - - local time = delta and (ostime() - delta) or 0 - - local records = db.execute { - template = template, - variables = { - basename = db.basename, - time = time, - rubish = s_rubish, - }, - } - - db.unpackdata(records) - - return records - -end - -local template =[[ - SELECT - `id` - FROM - %basename% - WHERE - `status` = %status% - LIMIT - 1 ; -]] - -function tickets.hasstatus(db,status) - - local records = db.execute { - template = template, - variables = { - basename = db.basename, - status = status or s_unknown, - }, - } - - return records and #records > 0 or false - -end - -local template =[[ - UPDATE - %basename% - SET - `status` = %status%, - `accessed` = %time% - WHERE - `id` = %id% ; -]] - -function tickets.setstatus(db,id,status) - - db.execute { - template = template, - variables = { - basename = db.basename, - id = id, - time = ostime(), - status = status or s_error, - }, - } - -end - -local template =[[ - DELETE FROM - %basename% - WHERE - `status` IN (%status%) ; -]] - -function tickets.prunedb(db,status) - - if type(status) == "table" then - status = concat(status,",") - end - - local data, keys = db.execute { - template = template, - variables = { - basename = db.basename, - status = status or s_unknown, - }, - } - - if trace_sql then - report("pruned: status %s removed",status) - end - -end - --- START TRANSACTION ; ... COMMIT ; --- LOCK TABLES %basename% WRITE ; ... UNLOCK TABLES ; - -local template_a = [[ - SET - @last_ticket_token = '' ; - UPDATE - %basename% - SET - `token` = (@last_ticket_token := `token`), - `status` = %newstatus%, - `accessed` = %time% - WHERE - `status` = %status% - ORDER BY - `id` - LIMIT - 1 - ; - SELECT - * - FROM - %basename% - WHERE - `token` = @last_ticket_token - ORDER BY - `id` - ; -]] - -local template_b = [[ - SELECT - * - FROM - tickets - WHERE - `status` = %status% - ORDER BY - `id` - LIMIT - 1 - ; -]] - -function tickets.getfirstwithstatus(db,status,newstatus) - - local records - - if type(newstatus) == "number" then -- todo: also accept string - - records = db.execute { - template = template_a, - variables = { - basename = db.basename, - status = status or s_pending, - newstatus = newstatus, - time = ostime(), - }, - } - - - else - - records = db.execute { - template = template_b, - variables = { - basename = db.basename, - status = status or s_pending, - }, - } - - end - - if type(records) == "table" and #records > 0 then - - for i=1,#records do - local record = records[i] - record.data = db.deserialize(record.data or "") - record.status = newstatus or s_busy - end - - return records - - end -end - --- The next getter assumes that we have a sheduler running so that there is --- one process in charge of changing the status. - -local template = [[ - SET - @last_ticket_token = '' ; - UPDATE - %basename% - SET - `token` = (@last_ticket_token := `token`), - `status` = %newstatus%, - `accessed` = %time% - WHERE - `status` = %status% - ORDER BY - `id` - LIMIT - 1 - ; - SELECT - @last_ticket_token AS `token` - ; -]] - -function tickets.getfirstinqueue(db,status,newstatus) - - local records = db.execute { - template = template, - variables = { - basename = db.basename, - status = status or s_pending, - newstatus = newstatus or s_busy, - time = ostime(), - }, - } - - local token = type(records) == "table" and #records > 0 and records[1].token - - return token ~= "" and token - -end - -local template =[[ - SELECT - * - FROM - %basename% - WHERE - `token` = '%token%' - ORDER BY - `id` ; -]] - -function tickets.getticketsbytoken(db,token) - - local records, keys = db.execute { - template = template, - variables = { - basename = db.basename, - token = token, - }, - } - - db.unpackdata(records) - - return records - -end - -local template =[[ - SELECT - * - FROM - %basename% - WHERE - `usertoken` = '%usertoken%' AND `status` < %rubish% - ORDER BY - `id` ; -]] - -function tickets.getusertickets(db,usertoken) - - -- todo: update accessed - -- todo: get less fields - -- maybe only data for status changed (hard to check) - - local records, keys = db.execute { - template = template, - variables = { - basename = db.basename, - usertoken = usertoken, - rubish = s_rubish, - }, - } - - db.unpackdata(records) - - return records - -end - -local template =[[ - UPDATE - %basename% - SET - `status` = %deleted% - WHERE - `usertoken` = '%usertoken%' ; -]] - -function tickets.removeusertickets(db,usertoken) - - db.execute { - template = template, - variables = { - basename = db.basename, - usertoken = usertoken, - deleted = s_deleted, - }, - } - - if trace_sql then - report("removed: usertoken %s",usertoken) - end - -end +if not modules then modules = { } end modules ['util-sql-tickets'] = { + version = 1.001, + comment = "companion to lmx-*", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- TODO: MAKE SOME INTO STORED PROCUDURES + +-- This is experimental code and currently part of the base installation simply +-- because it's easier to distribute this way. Eventually it will be documented +-- and the related scripts will show up as well. + +local tonumber = tonumber +local format = string.format +local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime +local random = math.random +local concat = table.concat + +local sql = utilities.sql +local tickets = { } +sql.tickets = tickets + +local trace_sql = false trackers.register("sql.tickets.trace", function(v) trace_sql = v end) +local report = logs.reporter("sql","tickets") + +local serialize = sql.serialize +local deserialize = sql.deserialize +local execute = sql.execute + +tickets.newtoken = sql.tokens.new + +-- Beware as an index can be a string or a number, we will create +-- a combination of hash and index. + +local statustags = { [0] = + "unknown", + "pending", + "busy", + "finished", + "dependent", -- same token but different subtoken (so we only need to find the first) + "reserved-1", + "reserved-2", + "error", + "deleted", +} + +local status = table.swapped(statustags) +tickets.status = status +tickets.statustags = statustags + +local s_unknown = status.unknown +local s_pending = status.pending +local s_busy = status.busy +local s_finished = status.finished +local s_dependent = status.dependent +local s_error = status.error +local s_deleted = status.deleted + +local s_rubish = s_error -- and higher + +local function checkeddb(presets,datatable) + return sql.usedatabase(presets,datatable or presets.datatable or "tickets") +end + +tickets.usedb = checkeddb + +local template =[[ + CREATE TABLE IF NOT EXISTS %basename% ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `token` varchar(50) NOT NULL, + `subtoken` INT(11) NOT NULL, + `created` int(11) NOT NULL, + `accessed` int(11) NOT NULL, + `category` int(11) NOT NULL, + `status` int(11) NOT NULL, + `usertoken` varchar(50) NOT NULL, + `data` longtext NOT NULL, + `comment` longtext NOT NULL, + + PRIMARY KEY (`id`), + UNIQUE INDEX `id_unique_index` (`id` ASC), + KEY `token_unique_key` (`token`) + ) + DEFAULT CHARSET = utf8 ; +]] + +function tickets.createdb(presets,datatable) + local db = checkeddb(presets,datatable) + local data, keys = db.execute { + template = template, + variables = { + basename = db.basename, + }, + } + + report("datatable %a created in %a",db.name,db.base) + + return db + +end + +local template =[[ + DROP TABLE IF EXISTS %basename% ; +]] + +function tickets.deletedb(presets,datatable) + + local db = checkeddb(presets,datatable) + + local data, keys = db.execute { + template = template, + variables = { + basename = db.basename, + }, + } + + report("datatable %a removed in %a",db.name,db.base) + +end + +local template_push =[[ + INSERT INTO %basename% ( + `token`, + `subtoken`, + `created`, + `accessed`, + `status`, + `category`, + `usertoken`, + `data`, + `comment` + ) VALUES ( + '%token%', + %subtoken%, + %time%, + %time%, + %status%, + %category%, + '%usertoken%', + '%[data]%', + '%[comment]%' + ) ; +]] + +local template_fetch =[[ + SELECT + * + FROM + %basename% + WHERE + `token` = '%token%' + AND + `subtoken` = '%subtoken%' + ; +]] + +function tickets.create(db,ticket) + + -- We assume a unique token .. if not we're toast anyway. We used to lock and + -- get the last id etc etc but there is no real need for that. + + -- we could check for dependent here but we don't want the lookup + + local token = ticket.token or tickets.newtoken() + local time = ostime() + local status = ticket.status + local category = ticket.category or 0 + local subtoken = ticket.subtoken or 0 + local usertoken = ticket.usertoken or "" + local comment = ticket.comment or "" + + status = not status and subtoken > 1 and s_dependent or s_pending + + local result, message = db.execute { + template = template_push, + variables = { + basename = db.basename, + token = token, + subtoken = subtoken, + time = time, + status = status, + category = category, + usertoken = usertoken, + data = db.serialize(ticket.data or { },"return"), + comment = comment, + }, + } + + -- We could stick to only fetching the id and make the table here + -- but we're not pushing that many tickets so we can as well follow + -- the lazy approach and fetch the whole. + + local result, message = db.execute { + template = template_fetch, + variables = { + basename = db.basename, + token = token, + subtoken = subtoken, + }, + } + + if result and #result > 0 then + if trace_sql then + report("created: %s at %s",token,osfulltime(time)) + end + return result[1] + else + report("failed: %s at %s",token,osfulltime(time)) + end + +end + +local template =[[ + UPDATE + %basename% + SET + `data` = '%[data]%', + `status` = %status%, + `accessed` = %time% + WHERE + `id` = %id% ; +]] + +function tickets.save(db,ticket) + + local time = ostime() + local data = db.serialize(ticket.data or { },"return") + local status = ticket.status or s_error + +-- print("SETTING") +-- inspect(data) + + ticket.status = status + ticket.accessed = time + + db.execute { + template = template, + variables = { + basename = db.basename, + id = ticket.id, + time = ostime(), + status = status, + data = data, + }, + } + + if trace_sql then + report("saved: id %s, time %s",id,osfulltime(time)) + end + + return ticket +end + +local template =[[ + UPDATE + %basename% + SET + `accessed` = %time% + WHERE + `token` = '%token%' ; + + SELECT + * + FROM + %basename% + WHERE + `id` = %id% ; +]] + +function tickets.restore(db,id) + + local record, keys = db.execute { + template = template, + variables = { + basename = db.basename, + id = id, + time = ostime(), + }, + } + + local record = record and record[1] + + if record then + if trace_sql then + report("restored: id %s",id) + end + record.data = db.deserialize(record.data or "") + return record + elseif trace_sql then + report("unknown: id %s",id) + end + +end + +local template =[[ + DELETE FROM + %basename% + WHERE + `id` = %id% ; +]] + +function tickets.remove(db,id) + + db.execute { + template = template, + variables = { + basename = db.basename, + id = id, + }, + } + + if trace_sql then + report("removed: id %s",id) + end + +end + +local template_yes =[[ + SELECT + * + FROM + %basename% + ORDER BY + `id` ; +]] + +local template_nop =[[ + SELECT + `created`, + `usertoken`, + `accessed`, + `status` + FROM + %basename% + ORDER BY + `id` ; +]] + +function tickets.collect(db,nodata) + + local records, keys = db.execute { + template = nodata and template_nop or template_yes, + variables = { + basename = db.basename, + token = token, + }, + } + + if not nodata then + db.unpackdata(records) + end + + if trace_sql then + report("collected: %s tickets",#records) + end + + return records, keys + +end + +-- We aleays keep the last select in the execute so one can have +-- an update afterwards. + +local template =[[ + DELETE FROM + %basename% + WHERE + `accessed` < %time% OR `status` >= %rubish% ; +]] + +local template_cleanup_yes =[[ + SELECT + * + FROM + %basename% + WHERE + `accessed` < %time% + ORDER BY + `id` ; +]] .. template + +local template_cleanup_nop =[[ + SELECT + `accessed`, + `created`, + `accessed`, + `token` + `usertoken` + FROM + %basename% + WHERE + `accessed` < %time% + ORDER BY + `id` ; +]] .. template + +function tickets.cleanupdb(db,delta,nodata) -- maybe delta in db + + local time = delta and (ostime() - delta) or 0 + + local records, keys = db.execute { + template = nodata and template_cleanup_nop or template_cleanup_yes, + variables = { + basename = db.basename, + time = time, + rubish = s_rubish, + }, + } + + if not nodata then + db.unpackdata(records) + end + + if trace_sql then + report("cleaned: %s seconds before %s",delta,osfulltime(time)) + end + + return records, keys + +end + +-- status related functions + +local template =[[ + SELECT + `status` + FROM + %basename% + WHERE + `token` = '%token%' + ORDER BY + `id` + ; +]] + +function tickets.getstatus(db,token) + + local record, keys = db.execute { + template = template, + variables = { + basename = db.basename, + token = token, + }, + } + + local record = record and record[1] + + return record and record.status or s_unknown + +end + +local template =[[ + SELECT + `status` + FROM + %basename% + WHERE + `status` >= %rubish% OR `accessed` < %time% + ORDER BY + `id` + ; +]] + +function tickets.getobsolete(db,delta) + + local time = delta and (ostime() - delta) or 0 + + local records = db.execute { + template = template, + variables = { + basename = db.basename, + time = time, + rubish = s_rubish, + }, + } + + db.unpackdata(records) + + return records + +end + +local template =[[ + SELECT + `id` + FROM + %basename% + WHERE + `status` = %status% + LIMIT + 1 ; +]] + +function tickets.hasstatus(db,status) + + local records = db.execute { + template = template, + variables = { + basename = db.basename, + status = status or s_unknown, + }, + } + + return records and #records > 0 or false + +end + +local template =[[ + UPDATE + %basename% + SET + `status` = %status%, + `accessed` = %time% + WHERE + `id` = %id% ; +]] + +function tickets.setstatus(db,id,status) + + db.execute { + template = template, + variables = { + basename = db.basename, + id = id, + time = ostime(), + status = status or s_error, + }, + } + +end + +local template =[[ + DELETE FROM + %basename% + WHERE + `status` IN (%status%) ; +]] + +function tickets.prunedb(db,status) + + if type(status) == "table" then + status = concat(status,",") + end + + local data, keys = db.execute { + template = template, + variables = { + basename = db.basename, + status = status or s_unknown, + }, + } + + if trace_sql then + report("pruned: status %s removed",status) + end + +end + +-- START TRANSACTION ; ... COMMIT ; +-- LOCK TABLES %basename% WRITE ; ... UNLOCK TABLES ; + +local template_a = [[ + SET + @last_ticket_token = '' ; + UPDATE + %basename% + SET + `token` = (@last_ticket_token := `token`), + `status` = %newstatus%, + `accessed` = %time% + WHERE + `status` = %status% + ORDER BY + `id` + LIMIT + 1 + ; + SELECT + * + FROM + %basename% + WHERE + `token` = @last_ticket_token + ORDER BY + `id` + ; +]] + +local template_b = [[ + SELECT + * + FROM + tickets + WHERE + `status` = %status% + ORDER BY + `id` + LIMIT + 1 + ; +]] + +function tickets.getfirstwithstatus(db,status,newstatus) + + local records + + if type(newstatus) == "number" then -- todo: also accept string + + records = db.execute { + template = template_a, + variables = { + basename = db.basename, + status = status or s_pending, + newstatus = newstatus, + time = ostime(), + }, + } + + + else + + records = db.execute { + template = template_b, + variables = { + basename = db.basename, + status = status or s_pending, + }, + } + + end + + if type(records) == "table" and #records > 0 then + + for i=1,#records do + local record = records[i] + record.data = db.deserialize(record.data or "") + record.status = newstatus or s_busy + end + + return records + + end +end + +-- The next getter assumes that we have a sheduler running so that there is +-- one process in charge of changing the status. + +local template = [[ + SET + @last_ticket_token = '' ; + UPDATE + %basename% + SET + `token` = (@last_ticket_token := `token`), + `status` = %newstatus%, + `accessed` = %time% + WHERE + `status` = %status% + ORDER BY + `id` + LIMIT + 1 + ; + SELECT + @last_ticket_token AS `token` + ; +]] + +function tickets.getfirstinqueue(db,status,newstatus) + + local records = db.execute { + template = template, + variables = { + basename = db.basename, + status = status or s_pending, + newstatus = newstatus or s_busy, + time = ostime(), + }, + } + + local token = type(records) == "table" and #records > 0 and records[1].token + + return token ~= "" and token + +end + +local template =[[ + SELECT + * + FROM + %basename% + WHERE + `token` = '%token%' + ORDER BY + `id` ; +]] + +function tickets.getticketsbytoken(db,token) + + local records, keys = db.execute { + template = template, + variables = { + basename = db.basename, + token = token, + }, + } + + db.unpackdata(records) + + return records + +end + +local template =[[ + SELECT + * + FROM + %basename% + WHERE + `usertoken` = '%usertoken%' AND `status` < %rubish% + ORDER BY + `id` ; +]] + +function tickets.getusertickets(db,usertoken) + + -- todo: update accessed + -- todo: get less fields + -- maybe only data for status changed (hard to check) + + local records, keys = db.execute { + template = template, + variables = { + basename = db.basename, + usertoken = usertoken, + rubish = s_rubish, + }, + } + + db.unpackdata(records) + + return records + +end + +local template =[[ + UPDATE + %basename% + SET + `status` = %deleted% + WHERE + `usertoken` = '%usertoken%' ; +]] + +function tickets.removeusertickets(db,usertoken) + + db.execute { + template = template, + variables = { + basename = db.basename, + usertoken = usertoken, + deleted = s_deleted, + }, + } + + if trace_sql then + report("removed: usertoken %s",usertoken) + end + +end diff --git a/tex/context/base/util-sql-users.lua b/tex/context/base/util-sql-users.lua index ea8fb4e07..b99bfa58a 100644 --- a/tex/context/base/util-sql-users.lua +++ b/tex/context/base/util-sql-users.lua @@ -1,410 +1,410 @@ -if not modules then modules = { } end modules ['util-sql-users'] = { - version = 1.001, - comment = "companion to lmx-*", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This is experimental code and currently part of the base installation simply --- because it's easier to dirtribute this way. Eventually it will be documented --- and the related scripts will show up as well. - --- local sql = sql or (utilities and utilities.sql) or require("util-sql") --- local md5 = md5 or require("md5") - -local sql = utilities.sql - -local format, upper, find, gsub, topattern = string.format, string.upper, string.find, string.gsub, string.topattern -local sumhexa = md5.sumhexa -local booleanstring = string.booleanstring - -local sql = utilities.sql -local users = { } -sql.users = users - -local trace_sql = false trackers.register("sql.users.trace", function(v) trace_sql = v end) -local report = logs.reporter("sql","users") - -local function encryptpassword(str) - if not str or str == "" then - return "" - elseif find(str,"^MD5:") then - return str - else - return upper(format("MD5:%s",sumhexa(str))) - end -end - -local function cleanuppassword(str) - return (gsub(str,"^MD5:","")) -end - -local function samepasswords(one,two) - if not one or not two then - return false - end - if not find(one,"^MD5:") then - one = encryptpassword(one) - end - if not find(two,"^MD5:") then - two = encryptpassword(two) - end - return one == two -end - -local function validaddress(address,addresses) - if address and addresses and address ~= "" and addresses ~= "" then - if find(address,topattern(addresses,true,true)) then - return true, "valid remote address" - end - return false, "invalid remote address" - else - return true, "no remote address check" - end -end - - -users.encryptpassword = encryptpassword -users.cleanuppassword = cleanuppassword -users.samepasswords = samepasswords -users.validaddress = validaddress - --- print(users.encryptpassword("test")) -- MD5:098F6BCD4621D373CADE4E832627B4F6 - -local function checkeddb(presets,datatable) - return sql.usedatabase(presets,datatable or presets.datatable or "users") -end - -users.usedb = checkeddb - -local groupnames = { } -local groupnumbers = { } - -local function registergroup(name) - local n = #groupnames + 1 - groupnames [n] = name - groupnames [tostring(n)] = name - groupnames [name] = name - groupnumbers[n] = n - groupnumbers[tostring(n)] = n - groupnumbers[name] = n - return n -end - -registergroup("superuser") -registergroup("administrator") -registergroup("user") -registergroup("guest") - -users.groupnames = groupnames -users.groupnumbers = groupnumbers - --- password 'test': --- --- INSERT insert into users (`name`,`password`,`group`,`enabled`) values ('...','MD5:098F6BCD4621D373CADE4E832627B4F6',1,1) ; - -local template =[[ - CREATE TABLE `users` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `name` varchar(80) NOT NULL, - `fullname` varchar(80) NOT NULL, - `password` varchar(50) DEFAULT NULL, - `group` int(11) NOT NULL, - `enabled` int(11) DEFAULT '1', - `email` varchar(80) DEFAULT NULL, - `address` varchar(256) DEFAULT NULL, - `theme` varchar(50) DEFAULT NULL, - `data` longtext, - PRIMARY KEY (`id`), - UNIQUE KEY `name_unique` (`name`) - ) DEFAULT CHARSET = utf8 ; -]] - -local converter, fields = sql.makeconverter { - { name = "id", type = "number" }, - { name = "name", type = "string" }, - { name = "fullname", type = "string" }, - { name = "password", type = "string" }, - { name = "group", type = groupnames }, - { name = "enabled", type = "boolean" }, - { name = "email", type = "string" }, - { name = "address", type = "string" }, - { name = "theme", type = "string" }, - { name = "data", type = "deserialize" }, -} - -function users.createdb(presets,datatable) - - local db = checkeddb(presets,datatable) - - db.execute { - template = template, - variables = { - basename = db.basename, - }, - } - - report("datatable %a created in %a",db.name,db.base) - - return db - -end - -local template =[[ - SELECT - %fields% - FROM - %basename% - WHERE - `name` = '%[name]%' - AND - `password` = '%[password]%' - ; -]] - -local template =[[ - SELECT - %fields% - FROM - %basename% - WHERE - `name` = '%[name]%' - ; -]] - -function users.valid(db,username,password,address) - - local data = db.execute { - template = template, - converter = converter, - variables = { - basename = db.basename, - fields = fields, - name = username, - }, - } - - local data = data and data[1] - - if not data then - return false, "unknown user" - elseif not data.enabled then - return false, "disabled user" - elseif data.password ~= encryptpassword(password) then - return false, "wrong password" - elseif not validaddress(address,data.address) then - return false, "invalid address" - else - data.password = nil - return data, "okay" - end - -end - -local template =[[ - INSERT INTO %basename% ( - `name`, - `fullname`, - `password`, - `group`, - `enabled`, - `email`, - `address`, - `theme`, - `data` - ) VALUES ( - '%[name]%', - '%[fullname]%', - '%[password]%', - '%[group]%', - '%[enabled]%', - '%[email]%', - '%[address]%', - '%[theme]%', - '%[data]%' - ) ; -]] - -function users.add(db,specification) - - local name = specification.username or specification.name - - if not name or name == "" then - return - end - - local data = specification.data - - db.execute { - template = template, - variables = { - basename = db.basename, - name = name, - fullname = name or fullname, - password = encryptpassword(specification.password or ""), - group = groupnumbers[specification.group] or groupnumbers.guest, - enabled = booleanstring(specification.enabled) and "1" or "0", - email = specification.email, - address = specification.address, - theme = specification.theme, - data = type(data) == "table" and db.serialize(data,"return") or "", - }, - } - -end - -local template =[[ - SELECT - %fields% - FROM - %basename% - WHERE - `name` = '%[name]%' ; -]] - -function users.getbyname(db,name) - - local data = db.execute { - template = template, - converter = converter, - variables = { - basename = db.basename, - fields = fields, - name = name, - }, - } - - return data and data[1] or nil - -end - -local template =[[ - SELECT - %fields% - FROM - %basename% - WHERE - `id` = '%id%' ; -]] - -local function getbyid(db,id) - - local data = db.execute { - template = template, - converter = converter, - variables = { - basename = db.basename, - fields = fields, - id = id, - }, - } - - return data and data[1] or nil - -end - -users.getbyid = getbyid - -local template =[[ - UPDATE - %basename% - SET - `fullname` = '%[fullname]%', - `password` = '%[password]%', - `group` = '%[group]%', - `enabled` = '%[enabled]%', - `email` = '%[email]%', - `address` = '%[address]%', - `theme` = '%[theme]%', - `data` = '%[data]%' - WHERE - `id` = '%id%' - ; -]] - -function users.save(db,id,specification) - - id = tonumber(id) - - if not id then - return - end - - local user = getbyid(db,id) - - if tonumber(user.id) ~= id then - return - end - - local fullname = specification.fullname == nil and user.fulname or specification.fullname - local password = specification.password == nil and user.password or specification.password - local group = specification.group == nil and user.group or specification.group - local enabled = specification.enabled == nil and user.enabled or specification.enabled - local email = specification.email == nil and user.email or specification.email - local address = specification.address == nil and user.address or specification.address - local theme = specification.theme == nil and user.theme or specification.theme - local data = specification.data == nil and user.data or specification.data - - db.execute { - template = template, - variables = { - basename = db.basename, - id = id, - fullname = fullname, - password = encryptpassword(password), - group = groupnumbers[group], - enabled = booleanstring(enabled) and "1" or "0", - email = email, - address = address, - theme = theme, - data = type(data) == "table" and db.serialize(data,"return") or "", - }, - } - - return getbyid(db,id) - -end - -local template =[[ - DELETE FROM - %basename% - WHERE - `id` = '%id%' ; -]] - -function users.remove(db,id) - - db.execute { - template = template, - variables = { - basename = db.basename, - id = id, - }, - } - -end - -local template =[[ - SELECT - %fields% - FROM - %basename% - ORDER BY - `name` ; -]] - -function users.collect(db) -- maybe also an id/name only variant - - local records, keys = db.execute { - template = template, - converter = converter, - variables = { - basename = db.basename, - fields = fields, - }, - } - - return records, keys - -end +if not modules then modules = { } end modules ['util-sql-users'] = { + version = 1.001, + comment = "companion to lmx-*", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This is experimental code and currently part of the base installation simply +-- because it's easier to dirtribute this way. Eventually it will be documented +-- and the related scripts will show up as well. + +-- local sql = sql or (utilities and utilities.sql) or require("util-sql") +-- local md5 = md5 or require("md5") + +local sql = utilities.sql + +local format, upper, find, gsub, topattern = string.format, string.upper, string.find, string.gsub, string.topattern +local sumhexa = md5.sumhexa +local booleanstring = string.booleanstring + +local sql = utilities.sql +local users = { } +sql.users = users + +local trace_sql = false trackers.register("sql.users.trace", function(v) trace_sql = v end) +local report = logs.reporter("sql","users") + +local function encryptpassword(str) + if not str or str == "" then + return "" + elseif find(str,"^MD5:") then + return str + else + return upper(format("MD5:%s",sumhexa(str))) + end +end + +local function cleanuppassword(str) + return (gsub(str,"^MD5:","")) +end + +local function samepasswords(one,two) + if not one or not two then + return false + end + if not find(one,"^MD5:") then + one = encryptpassword(one) + end + if not find(two,"^MD5:") then + two = encryptpassword(two) + end + return one == two +end + +local function validaddress(address,addresses) + if address and addresses and address ~= "" and addresses ~= "" then + if find(address,topattern(addresses,true,true)) then + return true, "valid remote address" + end + return false, "invalid remote address" + else + return true, "no remote address check" + end +end + + +users.encryptpassword = encryptpassword +users.cleanuppassword = cleanuppassword +users.samepasswords = samepasswords +users.validaddress = validaddress + +-- print(users.encryptpassword("test")) -- MD5:098F6BCD4621D373CADE4E832627B4F6 + +local function checkeddb(presets,datatable) + return sql.usedatabase(presets,datatable or presets.datatable or "users") +end + +users.usedb = checkeddb + +local groupnames = { } +local groupnumbers = { } + +local function registergroup(name) + local n = #groupnames + 1 + groupnames [n] = name + groupnames [tostring(n)] = name + groupnames [name] = name + groupnumbers[n] = n + groupnumbers[tostring(n)] = n + groupnumbers[name] = n + return n +end + +registergroup("superuser") +registergroup("administrator") +registergroup("user") +registergroup("guest") + +users.groupnames = groupnames +users.groupnumbers = groupnumbers + +-- password 'test': +-- +-- INSERT insert into users (`name`,`password`,`group`,`enabled`) values ('...','MD5:098F6BCD4621D373CADE4E832627B4F6',1,1) ; + +local template =[[ + CREATE TABLE `users` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(80) NOT NULL, + `fullname` varchar(80) NOT NULL, + `password` varchar(50) DEFAULT NULL, + `group` int(11) NOT NULL, + `enabled` int(11) DEFAULT '1', + `email` varchar(80) DEFAULT NULL, + `address` varchar(256) DEFAULT NULL, + `theme` varchar(50) DEFAULT NULL, + `data` longtext, + PRIMARY KEY (`id`), + UNIQUE KEY `name_unique` (`name`) + ) DEFAULT CHARSET = utf8 ; +]] + +local converter, fields = sql.makeconverter { + { name = "id", type = "number" }, + { name = "name", type = "string" }, + { name = "fullname", type = "string" }, + { name = "password", type = "string" }, + { name = "group", type = groupnames }, + { name = "enabled", type = "boolean" }, + { name = "email", type = "string" }, + { name = "address", type = "string" }, + { name = "theme", type = "string" }, + { name = "data", type = "deserialize" }, +} + +function users.createdb(presets,datatable) + + local db = checkeddb(presets,datatable) + + db.execute { + template = template, + variables = { + basename = db.basename, + }, + } + + report("datatable %a created in %a",db.name,db.base) + + return db + +end + +local template =[[ + SELECT + %fields% + FROM + %basename% + WHERE + `name` = '%[name]%' + AND + `password` = '%[password]%' + ; +]] + +local template =[[ + SELECT + %fields% + FROM + %basename% + WHERE + `name` = '%[name]%' + ; +]] + +function users.valid(db,username,password,address) + + local data = db.execute { + template = template, + converter = converter, + variables = { + basename = db.basename, + fields = fields, + name = username, + }, + } + + local data = data and data[1] + + if not data then + return false, "unknown user" + elseif not data.enabled then + return false, "disabled user" + elseif data.password ~= encryptpassword(password) then + return false, "wrong password" + elseif not validaddress(address,data.address) then + return false, "invalid address" + else + data.password = nil + return data, "okay" + end + +end + +local template =[[ + INSERT INTO %basename% ( + `name`, + `fullname`, + `password`, + `group`, + `enabled`, + `email`, + `address`, + `theme`, + `data` + ) VALUES ( + '%[name]%', + '%[fullname]%', + '%[password]%', + '%[group]%', + '%[enabled]%', + '%[email]%', + '%[address]%', + '%[theme]%', + '%[data]%' + ) ; +]] + +function users.add(db,specification) + + local name = specification.username or specification.name + + if not name or name == "" then + return + end + + local data = specification.data + + db.execute { + template = template, + variables = { + basename = db.basename, + name = name, + fullname = name or fullname, + password = encryptpassword(specification.password or ""), + group = groupnumbers[specification.group] or groupnumbers.guest, + enabled = booleanstring(specification.enabled) and "1" or "0", + email = specification.email, + address = specification.address, + theme = specification.theme, + data = type(data) == "table" and db.serialize(data,"return") or "", + }, + } + +end + +local template =[[ + SELECT + %fields% + FROM + %basename% + WHERE + `name` = '%[name]%' ; +]] + +function users.getbyname(db,name) + + local data = db.execute { + template = template, + converter = converter, + variables = { + basename = db.basename, + fields = fields, + name = name, + }, + } + + return data and data[1] or nil + +end + +local template =[[ + SELECT + %fields% + FROM + %basename% + WHERE + `id` = '%id%' ; +]] + +local function getbyid(db,id) + + local data = db.execute { + template = template, + converter = converter, + variables = { + basename = db.basename, + fields = fields, + id = id, + }, + } + + return data and data[1] or nil + +end + +users.getbyid = getbyid + +local template =[[ + UPDATE + %basename% + SET + `fullname` = '%[fullname]%', + `password` = '%[password]%', + `group` = '%[group]%', + `enabled` = '%[enabled]%', + `email` = '%[email]%', + `address` = '%[address]%', + `theme` = '%[theme]%', + `data` = '%[data]%' + WHERE + `id` = '%id%' + ; +]] + +function users.save(db,id,specification) + + id = tonumber(id) + + if not id then + return + end + + local user = getbyid(db,id) + + if tonumber(user.id) ~= id then + return + end + + local fullname = specification.fullname == nil and user.fulname or specification.fullname + local password = specification.password == nil and user.password or specification.password + local group = specification.group == nil and user.group or specification.group + local enabled = specification.enabled == nil and user.enabled or specification.enabled + local email = specification.email == nil and user.email or specification.email + local address = specification.address == nil and user.address or specification.address + local theme = specification.theme == nil and user.theme or specification.theme + local data = specification.data == nil and user.data or specification.data + + db.execute { + template = template, + variables = { + basename = db.basename, + id = id, + fullname = fullname, + password = encryptpassword(password), + group = groupnumbers[group], + enabled = booleanstring(enabled) and "1" or "0", + email = email, + address = address, + theme = theme, + data = type(data) == "table" and db.serialize(data,"return") or "", + }, + } + + return getbyid(db,id) + +end + +local template =[[ + DELETE FROM + %basename% + WHERE + `id` = '%id%' ; +]] + +function users.remove(db,id) + + db.execute { + template = template, + variables = { + basename = db.basename, + id = id, + }, + } + +end + +local template =[[ + SELECT + %fields% + FROM + %basename% + ORDER BY + `name` ; +]] + +function users.collect(db) -- maybe also an id/name only variant + + local records, keys = db.execute { + template = template, + converter = converter, + variables = { + basename = db.basename, + fields = fields, + }, + } + + return records, keys + +end diff --git a/tex/context/base/util-sql.lua b/tex/context/base/util-sql.lua index 1c1766edf..cd2c4c2e2 100644 --- a/tex/context/base/util-sql.lua +++ b/tex/context/base/util-sql.lua @@ -1,443 +1,443 @@ -if not modules then modules = { } end modules ['util-sql'] = { - version = 1.001, - comment = "companion to m-sql.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- todo: templates as table (saves splitting) - --- Of course we could use a library but we don't want another depedency and there is --- a bit of flux in these libraries. Also, we want the data back in a way that we --- like. --- --- This is the first of set of sql related modules that are providing functionality --- for a web based framework that we use for typesetting (related) services. We're --- talking of session management, job ticket processing, storage, (xml) file processing --- and dealing with data from databases (often ambitiously called database publishing). --- --- There is no generic solution for such services, but from our perspective, as we use --- context in a regular tds tree (the standard distribution) it makes sense to put shared --- code in the context distribution. That way we don't need to reinvent wheels every time. - --- We use the template mechanism from util-tpl which inturn is just using the dos cq --- windows convention of %whatever% variables that I've used for ages. - --- util-sql-imp-client.lua --- util-sql-imp-library.lua --- util-sql-imp-swiglib.lua --- util-sql-imp-lmxsql.lua - --- local sql = require("util-sql") --- --- local converter = sql.makeconverter { --- { name = "id", type = "number" }, --- { name = "data",type = "string" }, --- } --- --- local execute = sql.methods.swiglib.execute --- -- local execute = sql.methods.library.execute --- -- local execute = sql.methods.client.execute --- -- local execute = sql.methods.lmxsql.execute --- --- result = execute { --- presets = { --- host = "localhost", --- username = "root", --- password = "test", --- database = "test", --- id = "test", -- forces persistent session --- }, --- template = "select * from `test` where `id` > %criterium% ;", --- variables = { --- criterium = 2, --- }, --- converter = converter --- } --- --- inspect(result) - -local format, match = string.format, string.match -local random = math.random -local rawset, setmetatable, getmetatable, load, type = rawset, setmetatable, getmetatable, load, type -local P, S, V, C, Cs, Ct, Cc, Cg, Cf, patterns, lpegmatch = lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.patterns, lpeg.match -local concat = table.concat - -local osuuid = os.uuid -local osclock = os.clock or os.time -local ostime = os.time -local setmetatableindex = table.setmetatableindex - -local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end) -local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end) -local report_state = logs.reporter("sql") - --- trace_sql = true --- trace_queries = true - -utilities.sql = utilities.sql or { } -local sql = utilities.sql - -local replacetemplate = utilities.templates.replace -local loadtemplate = utilities.templates.load - -local methods = { } -sql.methods = methods - -local helpers = { } -sql.helpers = helpers - -local serialize = table.fastserialize -local deserialize = table.deserialize - -sql.serialize = serialize -sql.deserialize = deserialize - -helpers.serialize = serialize -- bonus -helpers.deserialize = deserialize -- bonus - -local defaults = { __index = - { - resultfile = "result.dat", - templatefile = "template.sql", - queryfile = "query.sql", - variables = { }, - username = "default", - password = "default", - host = "localhost", - port = 3306, - database = "default", - }, -} - -setmetatableindex(sql.methods,function(t,k) - report_state("start loading method %a",k) - require("util-sql-imp-"..k) - report_state("loading method %a done",k) - return rawget(t,k) -end) - --- converters - -local converters = { } -sql.converters = converters - -local function makeconverter(entries,celltemplate,wraptemplate) - local shortcuts = { } - local assignments = { } - local key = false - for i=1,#entries do - local entry = entries[i] - local name = entry.name - local kind = entry.type or entry.kind - local value = format(celltemplate,i,i) - if kind == "boolean" then - assignments[#assignments+1] = format("[%q] = booleanstring(%s),",name,value) - elseif kind == "number" then - assignments[#assignments+1] = format("[%q] = tonumber(%s),",name,value) - elseif type(kind) == "function" then - local c = #converters + 1 - converters[c] = kind - shortcuts[#shortcuts+1] = format("local fun_%s = converters[%s]",c,c) - assignments[#assignments+1] = format("[%q] = fun_%s(%s),",name,c,value) - elseif type(kind) == "table" then - local c = #converters + 1 - converters[c] = kind - shortcuts[#shortcuts+1] = format("local tab_%s = converters[%s]",c,c) - assignments[#assignments+1] = format("[%q] = tab_%s[%s],",name,#converters,value) - elseif kind == "deserialize" then - assignments[#assignments+1] = format("[%q] = deserialize(%s),",name,value) - elseif kind == "key" then - -- hashed instead of indexed - key = value - elseif kind == "entry" then - -- so we can (efficiently) extend the hashed table - local default = entry.default or "" - if type(default) == "string" then - assignments[#assignments+1] = format("[%q] = %q,",name,default) - else - assignments[#assignments+1] = format("[%q] = %s,",name,tostring(default)) - end - else - assignments[#assignments+1] = format("[%q] = %s,",name,value) - end - end - local code = format(wraptemplate,concat(shortcuts,"\n"),key and "{ }" or "data",key or "i",concat(assignments,"\n ")) - -- print(code) - local func = load(code) - return func and func() -end - -function sql.makeconverter(entries) - local fields = { } - for i=1,#entries do - fields[i] = format("`%s`",entries[i].name) - end - fields = concat(fields, ", ") - local converter = { - fields = fields - } - setmetatableindex(converter, function(t,k) - local sqlmethod = methods[k] - local v = makeconverter(entries,sqlmethod.celltemplate,sqlmethod.wraptemplate) - t[k] = v - return v - end) - return converter, fields -end - --- helper for libraries: - -local function validspecification(specification) - local presets = specification.presets - if type(presets) == "string" then - presets = dofile(presets) - end - if type(presets) == "table" then - setmetatable(presets,defaults) - setmetatable(specification,{ __index = presets }) - else - setmetatable(specification,defaults) - end - return true -end - -helpers.validspecification = validspecification - -local whitespace = patterns.whitespace^0 -local eol = patterns.eol -local separator = P(";") -local escaped = patterns.escaped -local dquote = patterns.dquote -local squote = patterns.squote -local dsquote = squote * squote ----- quoted = patterns.quoted -local quoted = dquote * (escaped + (1-dquote))^0 * dquote - + squote * (escaped + dsquote + (1-squote))^0 * squote -local comment = P("--") * (1-eol) / "" -local query = whitespace - * Cs((quoted + comment + 1 - separator)^1 * Cc(";")) - * whitespace -local splitter = Ct(query * (separator * query)^0) - -helpers.querysplitter = splitter - --- I will add a bit more checking. - -local function validspecification(specification) - local presets = specification.presets - if type(presets) == "string" then - presets = dofile(presets) - end - if type(presets) == "table" then - local m = getmetatable(presets) - if m then - setmetatable(m,defaults) - else - setmetatable(presets,defaults) - end - setmetatable(specification,{ __index = presets }) - else - setmetatable(specification,defaults) - end - local templatefile = specification.templatefile or "query" - local queryfile = specification.queryfile or presets.queryfile or file.nameonly(templatefile) .. "-temp.sql" - local resultfile = specification.resultfile or presets.resultfile or file.nameonly(templatefile) .. "-temp.dat" - specification.queryfile = queryfile - specification.resultfile = resultfile - if trace_sql then - report_state("template file: %s",templatefile or "") - report_state("query file: %s",queryfile) - report_state("result file: %s",resultfile) - end - return true -end - -local function preparetemplate(specification) - local template = specification.template - if template then - local query = replacetemplate(template,specification.variables,'sql') - if not query then - report_state("error in template: %s",template) - elseif trace_queries then - report_state("query from template: %s",query) - end - return query - end - local templatefile = specification.templatefile - if templatefile then - local query = loadtemplate(templatefile,specification.variables,'sql') - if not query then - report_state("error in template file %a",templatefile) - elseif trace_queries then - report_state("query from template file %a: %s",templatefile,query) - end - return query - end - report_state("no query template or templatefile") -end - -helpers.preparetemplate = preparetemplate - --- -- -- we delay setting this -- -- -- - -local currentmethod - -local function firstexecute(...) - local execute = methods[currentmethod].execute - sql.execute = execute - return execute(...) -end - -function sql.setmethod(method) - currentmethod = method - sql.execute = firstexecute -end - -sql.setmethod("library") - --- helper: - -function sql.usedatabase(presets,datatable) - local name = datatable or presets.datatable - if name then - local method = presets.method and sql.methods[presets.method] or sql.methods.client - local base = presets.database or "test" - local basename = format("`%s`.`%s`",base,name) - local execute = nil - local m_execute = method.execute - if method.usesfiles then - local queryfile = presets.queryfile or format("%s-temp.sql",name) - local resultfile = presets.resultfile or format("%s-temp.dat",name) - execute = function(specification) -- variables template - if not specification.presets then specification.presets = presets end - if not specification.queryfile then specification.queryfile = queryfile end - if not specification.resultfile then specification.resultfile = queryfile end - return m_execute(specification) - end - else - execute = function(specification) -- variables template - if not specification.presets then specification.presets = presets end - return m_execute(specification) - end - end - local function unpackdata(records,name) - if records then - name = name or "data" - for i=1,#records do - local record = records[i] - local data = record[name] - if data then - record[name] = deserialize(data) - end - end - end - end - return { - presets = preset, - base = base, - name = name, - basename = basename, - execute = execute, - serialize = serialize, - deserialize = deserialize, - unpackdata = unpackdata, - } - else - report_state("missing name in usedatabase specification") - end -end - --- local data = utilities.sql.prepare { --- templatefile = "test.sql", --- variables = { }, --- host = "...", --- username = "...", --- password = "...", --- database = "...", --- } - --- local presets = { --- host = "...", --- username = "...", --- password = "...", --- database = "...", --- } --- --- local data = utilities.sql.prepare { --- templatefile = "test.sql", --- variables = { }, --- presets = presets, --- } - --- local data = utilities.sql.prepare { --- templatefile = "test.sql", --- variables = { }, --- presets = dofile(...), --- } - --- local data = utilities.sql.prepare { --- templatefile = "test.sql", --- variables = { }, --- presets = "...", --- } - --- for i=1,10 do --- local dummy = uuid() -- else same every time, don't ask --- end - -sql.tokens = { - length = 42, -- but in practice we will reserve some 50 characters - new = function() - return format("%s-%x06",osuuid(),random(0xFFFFF)) -- 36 + 1 + 6 = 42 - end, -} - --- -- -- - --- local func, code = sql.makeconverter { --- { name = "a", type = "number" }, --- { name = "b", type = "string" }, --- { name = "c", type = "boolean" }, --- { name = "d", type = { x = "1" } }, --- { name = "e", type = os.fulltime }, --- } --- --- print(code) - --- -- -- - -if tex and tex.systemmodes then - - local droptable = table.drop - local threshold = 16 * 1024 -- use slower but less memory hungry variant - - function sql.prepare(specification,tag) - -- could go into tuc if needed - -- todo: serialize per column - local tag = tag or specification.tag or "last" - local filename = format("%s-sql-result-%s.tuc",tex.jobname,tag) - if tex.systemmodes["first"] then - local data, keys = sql.execute(specification) - if not data then - data = { } - end - if not keys then - keys = { } - end - io.savedata(filename,droptable({ data = data, keys = keys },#keys*#data>threshold)) - return data, keys - else - local result = table.load(filename) - return result.data, result.keys - end - end - -else - - sql.prepare = sql.execute - -end - -return sql +if not modules then modules = { } end modules ['util-sql'] = { + version = 1.001, + comment = "companion to m-sql.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: templates as table (saves splitting) + +-- Of course we could use a library but we don't want another depedency and there is +-- a bit of flux in these libraries. Also, we want the data back in a way that we +-- like. +-- +-- This is the first of set of sql related modules that are providing functionality +-- for a web based framework that we use for typesetting (related) services. We're +-- talking of session management, job ticket processing, storage, (xml) file processing +-- and dealing with data from databases (often ambitiously called database publishing). +-- +-- There is no generic solution for such services, but from our perspective, as we use +-- context in a regular tds tree (the standard distribution) it makes sense to put shared +-- code in the context distribution. That way we don't need to reinvent wheels every time. + +-- We use the template mechanism from util-tpl which inturn is just using the dos cq +-- windows convention of %whatever% variables that I've used for ages. + +-- util-sql-imp-client.lua +-- util-sql-imp-library.lua +-- util-sql-imp-swiglib.lua +-- util-sql-imp-lmxsql.lua + +-- local sql = require("util-sql") +-- +-- local converter = sql.makeconverter { +-- { name = "id", type = "number" }, +-- { name = "data",type = "string" }, +-- } +-- +-- local execute = sql.methods.swiglib.execute +-- -- local execute = sql.methods.library.execute +-- -- local execute = sql.methods.client.execute +-- -- local execute = sql.methods.lmxsql.execute +-- +-- result = execute { +-- presets = { +-- host = "localhost", +-- username = "root", +-- password = "test", +-- database = "test", +-- id = "test", -- forces persistent session +-- }, +-- template = "select * from `test` where `id` > %criterium% ;", +-- variables = { +-- criterium = 2, +-- }, +-- converter = converter +-- } +-- +-- inspect(result) + +local format, match = string.format, string.match +local random = math.random +local rawset, setmetatable, getmetatable, load, type = rawset, setmetatable, getmetatable, load, type +local P, S, V, C, Cs, Ct, Cc, Cg, Cf, patterns, lpegmatch = lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.patterns, lpeg.match +local concat = table.concat + +local osuuid = os.uuid +local osclock = os.clock or os.time +local ostime = os.time +local setmetatableindex = table.setmetatableindex + +local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end) +local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end) +local report_state = logs.reporter("sql") + +-- trace_sql = true +-- trace_queries = true + +utilities.sql = utilities.sql or { } +local sql = utilities.sql + +local replacetemplate = utilities.templates.replace +local loadtemplate = utilities.templates.load + +local methods = { } +sql.methods = methods + +local helpers = { } +sql.helpers = helpers + +local serialize = table.fastserialize +local deserialize = table.deserialize + +sql.serialize = serialize +sql.deserialize = deserialize + +helpers.serialize = serialize -- bonus +helpers.deserialize = deserialize -- bonus + +local defaults = { __index = + { + resultfile = "result.dat", + templatefile = "template.sql", + queryfile = "query.sql", + variables = { }, + username = "default", + password = "default", + host = "localhost", + port = 3306, + database = "default", + }, +} + +setmetatableindex(sql.methods,function(t,k) + report_state("start loading method %a",k) + require("util-sql-imp-"..k) + report_state("loading method %a done",k) + return rawget(t,k) +end) + +-- converters + +local converters = { } +sql.converters = converters + +local function makeconverter(entries,celltemplate,wraptemplate) + local shortcuts = { } + local assignments = { } + local key = false + for i=1,#entries do + local entry = entries[i] + local name = entry.name + local kind = entry.type or entry.kind + local value = format(celltemplate,i,i) + if kind == "boolean" then + assignments[#assignments+1] = format("[%q] = booleanstring(%s),",name,value) + elseif kind == "number" then + assignments[#assignments+1] = format("[%q] = tonumber(%s),",name,value) + elseif type(kind) == "function" then + local c = #converters + 1 + converters[c] = kind + shortcuts[#shortcuts+1] = format("local fun_%s = converters[%s]",c,c) + assignments[#assignments+1] = format("[%q] = fun_%s(%s),",name,c,value) + elseif type(kind) == "table" then + local c = #converters + 1 + converters[c] = kind + shortcuts[#shortcuts+1] = format("local tab_%s = converters[%s]",c,c) + assignments[#assignments+1] = format("[%q] = tab_%s[%s],",name,#converters,value) + elseif kind == "deserialize" then + assignments[#assignments+1] = format("[%q] = deserialize(%s),",name,value) + elseif kind == "key" then + -- hashed instead of indexed + key = value + elseif kind == "entry" then + -- so we can (efficiently) extend the hashed table + local default = entry.default or "" + if type(default) == "string" then + assignments[#assignments+1] = format("[%q] = %q,",name,default) + else + assignments[#assignments+1] = format("[%q] = %s,",name,tostring(default)) + end + else + assignments[#assignments+1] = format("[%q] = %s,",name,value) + end + end + local code = format(wraptemplate,concat(shortcuts,"\n"),key and "{ }" or "data",key or "i",concat(assignments,"\n ")) + -- print(code) + local func = load(code) + return func and func() +end + +function sql.makeconverter(entries) + local fields = { } + for i=1,#entries do + fields[i] = format("`%s`",entries[i].name) + end + fields = concat(fields, ", ") + local converter = { + fields = fields + } + setmetatableindex(converter, function(t,k) + local sqlmethod = methods[k] + local v = makeconverter(entries,sqlmethod.celltemplate,sqlmethod.wraptemplate) + t[k] = v + return v + end) + return converter, fields +end + +-- helper for libraries: + +local function validspecification(specification) + local presets = specification.presets + if type(presets) == "string" then + presets = dofile(presets) + end + if type(presets) == "table" then + setmetatable(presets,defaults) + setmetatable(specification,{ __index = presets }) + else + setmetatable(specification,defaults) + end + return true +end + +helpers.validspecification = validspecification + +local whitespace = patterns.whitespace^0 +local eol = patterns.eol +local separator = P(";") +local escaped = patterns.escaped +local dquote = patterns.dquote +local squote = patterns.squote +local dsquote = squote * squote +---- quoted = patterns.quoted +local quoted = dquote * (escaped + (1-dquote))^0 * dquote + + squote * (escaped + dsquote + (1-squote))^0 * squote +local comment = P("--") * (1-eol) / "" +local query = whitespace + * Cs((quoted + comment + 1 - separator)^1 * Cc(";")) + * whitespace +local splitter = Ct(query * (separator * query)^0) + +helpers.querysplitter = splitter + +-- I will add a bit more checking. + +local function validspecification(specification) + local presets = specification.presets + if type(presets) == "string" then + presets = dofile(presets) + end + if type(presets) == "table" then + local m = getmetatable(presets) + if m then + setmetatable(m,defaults) + else + setmetatable(presets,defaults) + end + setmetatable(specification,{ __index = presets }) + else + setmetatable(specification,defaults) + end + local templatefile = specification.templatefile or "query" + local queryfile = specification.queryfile or presets.queryfile or file.nameonly(templatefile) .. "-temp.sql" + local resultfile = specification.resultfile or presets.resultfile or file.nameonly(templatefile) .. "-temp.dat" + specification.queryfile = queryfile + specification.resultfile = resultfile + if trace_sql then + report_state("template file: %s",templatefile or "") + report_state("query file: %s",queryfile) + report_state("result file: %s",resultfile) + end + return true +end + +local function preparetemplate(specification) + local template = specification.template + if template then + local query = replacetemplate(template,specification.variables,'sql') + if not query then + report_state("error in template: %s",template) + elseif trace_queries then + report_state("query from template: %s",query) + end + return query + end + local templatefile = specification.templatefile + if templatefile then + local query = loadtemplate(templatefile,specification.variables,'sql') + if not query then + report_state("error in template file %a",templatefile) + elseif trace_queries then + report_state("query from template file %a: %s",templatefile,query) + end + return query + end + report_state("no query template or templatefile") +end + +helpers.preparetemplate = preparetemplate + +-- -- -- we delay setting this -- -- -- + +local currentmethod + +local function firstexecute(...) + local execute = methods[currentmethod].execute + sql.execute = execute + return execute(...) +end + +function sql.setmethod(method) + currentmethod = method + sql.execute = firstexecute +end + +sql.setmethod("library") + +-- helper: + +function sql.usedatabase(presets,datatable) + local name = datatable or presets.datatable + if name then + local method = presets.method and sql.methods[presets.method] or sql.methods.client + local base = presets.database or "test" + local basename = format("`%s`.`%s`",base,name) + local execute = nil + local m_execute = method.execute + if method.usesfiles then + local queryfile = presets.queryfile or format("%s-temp.sql",name) + local resultfile = presets.resultfile or format("%s-temp.dat",name) + execute = function(specification) -- variables template + if not specification.presets then specification.presets = presets end + if not specification.queryfile then specification.queryfile = queryfile end + if not specification.resultfile then specification.resultfile = queryfile end + return m_execute(specification) + end + else + execute = function(specification) -- variables template + if not specification.presets then specification.presets = presets end + return m_execute(specification) + end + end + local function unpackdata(records,name) + if records then + name = name or "data" + for i=1,#records do + local record = records[i] + local data = record[name] + if data then + record[name] = deserialize(data) + end + end + end + end + return { + presets = preset, + base = base, + name = name, + basename = basename, + execute = execute, + serialize = serialize, + deserialize = deserialize, + unpackdata = unpackdata, + } + else + report_state("missing name in usedatabase specification") + end +end + +-- local data = utilities.sql.prepare { +-- templatefile = "test.sql", +-- variables = { }, +-- host = "...", +-- username = "...", +-- password = "...", +-- database = "...", +-- } + +-- local presets = { +-- host = "...", +-- username = "...", +-- password = "...", +-- database = "...", +-- } +-- +-- local data = utilities.sql.prepare { +-- templatefile = "test.sql", +-- variables = { }, +-- presets = presets, +-- } + +-- local data = utilities.sql.prepare { +-- templatefile = "test.sql", +-- variables = { }, +-- presets = dofile(...), +-- } + +-- local data = utilities.sql.prepare { +-- templatefile = "test.sql", +-- variables = { }, +-- presets = "...", +-- } + +-- for i=1,10 do +-- local dummy = uuid() -- else same every time, don't ask +-- end + +sql.tokens = { + length = 42, -- but in practice we will reserve some 50 characters + new = function() + return format("%s-%x06",osuuid(),random(0xFFFFF)) -- 36 + 1 + 6 = 42 + end, +} + +-- -- -- + +-- local func, code = sql.makeconverter { +-- { name = "a", type = "number" }, +-- { name = "b", type = "string" }, +-- { name = "c", type = "boolean" }, +-- { name = "d", type = { x = "1" } }, +-- { name = "e", type = os.fulltime }, +-- } +-- +-- print(code) + +-- -- -- + +if tex and tex.systemmodes then + + local droptable = table.drop + local threshold = 16 * 1024 -- use slower but less memory hungry variant + + function sql.prepare(specification,tag) + -- could go into tuc if needed + -- todo: serialize per column + local tag = tag or specification.tag or "last" + local filename = format("%s-sql-result-%s.tuc",tex.jobname,tag) + if tex.systemmodes["first"] then + local data, keys = sql.execute(specification) + if not data then + data = { } + end + if not keys then + keys = { } + end + io.savedata(filename,droptable({ data = data, keys = keys },#keys*#data>threshold)) + return data, keys + else + local result = table.load(filename) + return result.data, result.keys + end + end + +else + + sql.prepare = sql.execute + +end + +return sql diff --git a/tex/context/base/util-sta.lua b/tex/context/base/util-sta.lua index 1a61ec4e6..1ea713a76 100644 --- a/tex/context/base/util-sta.lua +++ b/tex/context/base/util-sta.lua @@ -1,342 +1,342 @@ -if not modules then modules = { } end modules ['util-sta'] = { - version = 1.001, - comment = "companion to util-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat -local format = string.format -local select, tostring = select, tostring - -local trace_stacker = false trackers.register("stacker.resolve", function(v) trace_stacker = v end) - -local stacker = stacker or { } - -utilities.stacker = stacker - -local function start(s,t,first,last) - if s.mode == "switch" then - local n = tostring(t[last]) - if trace_stacker then - s.report("start: %s",n) - end - return n - else - local r = { } - for i=first,last do - r[#r+1] = tostring(t[i]) - end - local n = concat(r," ") - if trace_stacker then - s.report("start: %s",n) - end - return n - end -end - -local function stop(s,t,first,last) - if s.mode == "switch" then - local n = tostring(false) - if trace_stacker then - s.report("stop: %s",n) - end - return n - else - local r = { } - for i=last,first,-1 do - r[#r+1] = tostring(false) - end - local n = concat(r," ") - if trace_stacker then - s.report("stop: %s",n) - end - return n - end -end - -local function change(s,t1,first1,last1,t2,first2,last2) - if s.mode == "switch" then - local n = tostring(t2[last2]) - if trace_stacker then - s.report("change: %s",n) - end - return n - else - local r = { } - for i=last1,first1,-1 do - r[#r+1] = tostring(false) - end - local n = concat(r," ") - for i=first2,last2 do - r[#r+1] = tostring(t2[i]) - end - if trace_stacker then - s.report("change: %s",n) - end - return n - end -end - -function stacker.new(name) - - local s - - local stack = { } - local list = { } - local ids = { } - local hash = { } - - local hashing = true - - local function push(...) - for i=1,select("#",...) do - insert(stack,(select(i,...))) -- watch the () - end - if hashing then - local c = concat(stack,"|") - local n = hash[c] - if not n then - n = #list+1 - hash[c] = n - list[n] = fastcopy(stack) - end - insert(ids,n) - return n - else - local n = #list+1 - list[n] = fastcopy(stack) - insert(ids,n) - return n - end - end - - local function pop() - remove(stack) - remove(ids) - return ids[#ids] or s.unset or -1 - end - - local function clean() - if #stack == 0 then - if trace_stacker then - s.report("%s list entries, %s stack entries",#list,#stack) - end - end - end - - local tops = { } - local top, switch - - local function resolve_begin(mode) - if mode then - switch = mode == "switch" - else - switch = s.mode == "switch" - end - top = { switch = switch } - insert(tops,top) - end - - local function resolve_step(ti) -- keep track of changes outside function ! - -- todo: optimize for n=1 etc - local result = nil - local noftop = #top - if ti > 0 then - local current = list[ti] - if current then - local noflist = #current - local nofsame = 0 - if noflist > noftop then - for i=1,noflist do - if current[i] == top[i] then - nofsame = i - else - break - end - end - else - for i=1,noflist do - if current[i] == top[i] then - nofsame = i - else - break - end - end - end - local plus = nofsame + 1 - if plus <= noftop then - if plus <= noflist then - if switch then - result = s.change(s,top,plus,noftop,current,nofsame,noflist) - else - result = s.change(s,top,plus,noftop,current,plus,noflist) - end - else - if switch then - result = s.change(s,top,plus,noftop,current,nofsame,noflist) - else - result = s.stop(s,top,plus,noftop) - end - end - elseif plus <= noflist then - if switch then - result = s.start(s,current,nofsame,noflist) - else - result = s.start(s,current,plus,noflist) - end - end - top = current - else - if 1 <= noftop then - result = s.stop(s,top,1,noftop) - end - top = { } - end - return result - else - if 1 <= noftop then - result = s.stop(s,top,1,noftop) - end - top = { } - return result - end - end - - local function resolve_end() - -- resolve_step(s.unset) - local noftop = #top - if noftop > 0 then - local result = s.stop(s,top,1,#top) - remove(tops) - top = tops[#tops] - switch = top and top.switch - return result - end - end - - local function resolve(t) - resolve_begin() - for i=1,#t do - resolve_step(t[i]) - end - resolve_end() - end - - local report = logs.reporter("stacker",name or nil) - - s = { - name = name or "unknown", - unset = -1, - report = report, - start = start, - stop = stop, - change = change, - push = push, - pop = pop, - clean = clean, - resolve = resolve, - resolve_begin = resolve_begin, - resolve_step = resolve_step, - resolve_end = resolve_end, - } - - return s -- we can overload functions - -end - --- local s = utilities.stacker.new("demo") --- --- local unset = s.unset --- local push = s.push --- local pop = s.pop --- --- local t = { --- unset, --- unset, --- push("a"), -- a --- push("b","c"), -- a b c --- pop(), -- a b --- push("d"), -- a b d --- pop(), -- a b --- unset, --- pop(), -- a --- pop(), -- b --- unset, --- unset, --- } --- --- s.resolve(t) - --- demostacker = utilities.stacker.new("demos") --- --- local whatever = { --- one = "1 0 0 RG 1 0 0 rg", --- two = "1 1 0 RG 1 1 0 rg", --- [false] = "0 G 0 g", --- } --- --- local concat = table.concat --- --- local pdfliteral = nodes.pool.pdfliteral --- --- function demostacker.start(s,t,first,last) --- local n = whatever[t[last]] --- -- s.report("start: %s",n) --- return pdfliteral(n) --- end --- --- function demostacker.stop(s,t,first,last) --- local n = whatever[false] --- -- s.report("stop: %s",n) --- return pdfliteral(n) --- end --- --- function demostacker.change(s,t1,first1,last1,t2,first2,last2) --- local n = whatever[t2[last2]] --- -- s.report("change: %s",n) --- return pdfliteral(n) --- end --- --- demostacker.mode = "switch" --- --- local whatever = { --- one = "/OC /test1 BDC", --- two = "/OC /test2 BDC", --- [false] = "EMC", --- } --- --- demostacker = utilities.stacker.new("demos") --- --- function demostacker.start(s,t,first,last) --- local r = { } --- for i=first,last do --- r[#r+1] = whatever[t[i]] --- end --- -- s.report("start: %s",concat(r," ")) --- return pdfliteral(concat(r," ")) --- end --- --- function demostacker.stop(s,t,first,last) --- local r = { } --- for i=last,first,-1 do --- r[#r+1] = whatever[false] --- end --- -- s.report("stop: %s",concat(r," ")) --- return pdfliteral(concat(r," ")) --- end --- --- function demostacker.change(s,t1,first1,last1,t2,first2,last2) --- local r = { } --- for i=last1,first1,-1 do --- r[#r+1] = whatever[false] --- end --- for i=first2,last2 do --- r[#r+1] = whatever[t2[i]] --- end --- -- s.report("change: %s",concat(r," ")) --- return pdfliteral(concat(r," ")) --- end --- --- demostacker.mode = "stack" +if not modules then modules = { } end modules ['util-sta'] = { + version = 1.001, + comment = "companion to util-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat +local format = string.format +local select, tostring = select, tostring + +local trace_stacker = false trackers.register("stacker.resolve", function(v) trace_stacker = v end) + +local stacker = stacker or { } + +utilities.stacker = stacker + +local function start(s,t,first,last) + if s.mode == "switch" then + local n = tostring(t[last]) + if trace_stacker then + s.report("start: %s",n) + end + return n + else + local r = { } + for i=first,last do + r[#r+1] = tostring(t[i]) + end + local n = concat(r," ") + if trace_stacker then + s.report("start: %s",n) + end + return n + end +end + +local function stop(s,t,first,last) + if s.mode == "switch" then + local n = tostring(false) + if trace_stacker then + s.report("stop: %s",n) + end + return n + else + local r = { } + for i=last,first,-1 do + r[#r+1] = tostring(false) + end + local n = concat(r," ") + if trace_stacker then + s.report("stop: %s",n) + end + return n + end +end + +local function change(s,t1,first1,last1,t2,first2,last2) + if s.mode == "switch" then + local n = tostring(t2[last2]) + if trace_stacker then + s.report("change: %s",n) + end + return n + else + local r = { } + for i=last1,first1,-1 do + r[#r+1] = tostring(false) + end + local n = concat(r," ") + for i=first2,last2 do + r[#r+1] = tostring(t2[i]) + end + if trace_stacker then + s.report("change: %s",n) + end + return n + end +end + +function stacker.new(name) + + local s + + local stack = { } + local list = { } + local ids = { } + local hash = { } + + local hashing = true + + local function push(...) + for i=1,select("#",...) do + insert(stack,(select(i,...))) -- watch the () + end + if hashing then + local c = concat(stack,"|") + local n = hash[c] + if not n then + n = #list+1 + hash[c] = n + list[n] = fastcopy(stack) + end + insert(ids,n) + return n + else + local n = #list+1 + list[n] = fastcopy(stack) + insert(ids,n) + return n + end + end + + local function pop() + remove(stack) + remove(ids) + return ids[#ids] or s.unset or -1 + end + + local function clean() + if #stack == 0 then + if trace_stacker then + s.report("%s list entries, %s stack entries",#list,#stack) + end + end + end + + local tops = { } + local top, switch + + local function resolve_begin(mode) + if mode then + switch = mode == "switch" + else + switch = s.mode == "switch" + end + top = { switch = switch } + insert(tops,top) + end + + local function resolve_step(ti) -- keep track of changes outside function ! + -- todo: optimize for n=1 etc + local result = nil + local noftop = #top + if ti > 0 then + local current = list[ti] + if current then + local noflist = #current + local nofsame = 0 + if noflist > noftop then + for i=1,noflist do + if current[i] == top[i] then + nofsame = i + else + break + end + end + else + for i=1,noflist do + if current[i] == top[i] then + nofsame = i + else + break + end + end + end + local plus = nofsame + 1 + if plus <= noftop then + if plus <= noflist then + if switch then + result = s.change(s,top,plus,noftop,current,nofsame,noflist) + else + result = s.change(s,top,plus,noftop,current,plus,noflist) + end + else + if switch then + result = s.change(s,top,plus,noftop,current,nofsame,noflist) + else + result = s.stop(s,top,plus,noftop) + end + end + elseif plus <= noflist then + if switch then + result = s.start(s,current,nofsame,noflist) + else + result = s.start(s,current,plus,noflist) + end + end + top = current + else + if 1 <= noftop then + result = s.stop(s,top,1,noftop) + end + top = { } + end + return result + else + if 1 <= noftop then + result = s.stop(s,top,1,noftop) + end + top = { } + return result + end + end + + local function resolve_end() + -- resolve_step(s.unset) + local noftop = #top + if noftop > 0 then + local result = s.stop(s,top,1,#top) + remove(tops) + top = tops[#tops] + switch = top and top.switch + return result + end + end + + local function resolve(t) + resolve_begin() + for i=1,#t do + resolve_step(t[i]) + end + resolve_end() + end + + local report = logs.reporter("stacker",name or nil) + + s = { + name = name or "unknown", + unset = -1, + report = report, + start = start, + stop = stop, + change = change, + push = push, + pop = pop, + clean = clean, + resolve = resolve, + resolve_begin = resolve_begin, + resolve_step = resolve_step, + resolve_end = resolve_end, + } + + return s -- we can overload functions + +end + +-- local s = utilities.stacker.new("demo") +-- +-- local unset = s.unset +-- local push = s.push +-- local pop = s.pop +-- +-- local t = { +-- unset, +-- unset, +-- push("a"), -- a +-- push("b","c"), -- a b c +-- pop(), -- a b +-- push("d"), -- a b d +-- pop(), -- a b +-- unset, +-- pop(), -- a +-- pop(), -- b +-- unset, +-- unset, +-- } +-- +-- s.resolve(t) + +-- demostacker = utilities.stacker.new("demos") +-- +-- local whatever = { +-- one = "1 0 0 RG 1 0 0 rg", +-- two = "1 1 0 RG 1 1 0 rg", +-- [false] = "0 G 0 g", +-- } +-- +-- local concat = table.concat +-- +-- local pdfliteral = nodes.pool.pdfliteral +-- +-- function demostacker.start(s,t,first,last) +-- local n = whatever[t[last]] +-- -- s.report("start: %s",n) +-- return pdfliteral(n) +-- end +-- +-- function demostacker.stop(s,t,first,last) +-- local n = whatever[false] +-- -- s.report("stop: %s",n) +-- return pdfliteral(n) +-- end +-- +-- function demostacker.change(s,t1,first1,last1,t2,first2,last2) +-- local n = whatever[t2[last2]] +-- -- s.report("change: %s",n) +-- return pdfliteral(n) +-- end +-- +-- demostacker.mode = "switch" +-- +-- local whatever = { +-- one = "/OC /test1 BDC", +-- two = "/OC /test2 BDC", +-- [false] = "EMC", +-- } +-- +-- demostacker = utilities.stacker.new("demos") +-- +-- function demostacker.start(s,t,first,last) +-- local r = { } +-- for i=first,last do +-- r[#r+1] = whatever[t[i]] +-- end +-- -- s.report("start: %s",concat(r," ")) +-- return pdfliteral(concat(r," ")) +-- end +-- +-- function demostacker.stop(s,t,first,last) +-- local r = { } +-- for i=last,first,-1 do +-- r[#r+1] = whatever[false] +-- end +-- -- s.report("stop: %s",concat(r," ")) +-- return pdfliteral(concat(r," ")) +-- end +-- +-- function demostacker.change(s,t1,first1,last1,t2,first2,last2) +-- local r = { } +-- for i=last1,first1,-1 do +-- r[#r+1] = whatever[false] +-- end +-- for i=first2,last2 do +-- r[#r+1] = whatever[t2[i]] +-- end +-- -- s.report("change: %s",concat(r," ")) +-- return pdfliteral(concat(r," ")) +-- end +-- +-- demostacker.mode = "stack" diff --git a/tex/context/base/util-sto.lua b/tex/context/base/util-sto.lua index 191d6cd73..355f0ecd3 100644 --- a/tex/context/base/util-sto.lua +++ b/tex/context/base/util-sto.lua @@ -1,189 +1,189 @@ -if not modules then modules = { } end modules ['util-sto'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local setmetatable, getmetatable, type = setmetatable, getmetatable, type - -utilities = utilities or { } -utilities.storage = utilities.storage or { } -local storage = utilities.storage - -function storage.mark(t) - if not t then - print("\nfatal error: storage cannot be marked\n") - os.exit() - return - end - local m = getmetatable(t) - if not m then - m = { } - setmetatable(t,m) - end - m.__storage__ = true - return t -end - -function storage.allocate(t) - t = t or { } - local m = getmetatable(t) - if not m then - m = { } - setmetatable(t,m) - end - m.__storage__ = true - return t -end - -function storage.marked(t) - local m = getmetatable(t) - return m and m.__storage__ -end - -function storage.checked(t) - if not t then - report("\nfatal error: storage has not been allocated\n") - os.exit() - return - end - return t -end - --- function utilities.storage.delay(parent,name,filename) --- local m = getmetatable(parent) --- m.__list[name] = filename --- end --- --- function utilities.storage.predefine(parent) --- local list = { } --- local m = getmetatable(parent) or { --- __list = list, --- __index = function(t,k) --- local l = require(list[k]) --- t[k] = l --- return l --- end --- } --- setmetatable(parent,m) --- end --- --- bla = { } --- utilities.storage.predefine(bla) --- utilities.storage.delay(bla,"test","oepsoeps") --- local t = bla.test --- table.print(t) --- print(t.a) - -function storage.setinitializer(data,initialize) - local m = getmetatable(data) or { } - m.__index = function(data,k) - m.__index = nil -- so that we can access the entries during initializing - initialize() - return data[k] - end - setmetatable(data, m) -end - -local keyisvalue = { __index = function(t,k) - t[k] = k - return k -end } - -function storage.sparse(t) - t = t or { } - setmetatable(t,keyisvalue) - return t -end - --- table namespace ? - -local function f_empty () return "" end -- t,k -local function f_self (t,k) t[k] = k return k end -local function f_table (t,k) local v = { } t[k] = v return v end -local function f_ignore() end -- t,k,v - -local t_empty = { __index = f_empty } -local t_self = { __index = f_self } -local t_table = { __index = f_table } -local t_ignore = { __newindex = f_ignore } - -function table.setmetatableindex(t,f) - if type(t) ~= "table" then - f, t = t, { } - end - local m = getmetatable(t) - if m then - if f == "empty" then - m.__index = f_empty - elseif f == "key" then - m.__index = f_self - elseif f == "table" then - m.__index = f_table - else - m.__index = f - end - else - if f == "empty" then - setmetatable(t, t_empty) - elseif f == "key" then - setmetatable(t, t_self) - elseif f == "table" then - setmetatable(t, t_table) - else - setmetatable(t,{ __index = f }) - end - end - return t -end - -function table.setmetatablenewindex(t,f) - if type(t) ~= "table" then - f, t = t, { } - end - local m = getmetatable(t) - if m then - if f == "ignore" then - m.__newindex = f_ignore - else - m.__newindex = f - end - else - if f == "ignore" then - setmetatable(t, t_ignore) - else - setmetatable(t,{ __newindex = f }) - end - end - return t -end - -function table.setmetatablecall(t,f) - if type(t) ~= "table" then - f, t = t, { } - end - local m = getmetatable(t) - if m then - m.__call = f - else - setmetatable(t,{ __call = f }) - end - return t -end - -function table.setmetatablekey(t,key,value) - local m = getmetatable(t) - if not m then - m = { } - setmetatable(t,m) - end - m[key] = value - return t -end - -function table.getmetatablekey(t,key,value) - local m = getmetatable(t) - return m and m[key] -end +if not modules then modules = { } end modules ['util-sto'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local setmetatable, getmetatable, type = setmetatable, getmetatable, type + +utilities = utilities or { } +utilities.storage = utilities.storage or { } +local storage = utilities.storage + +function storage.mark(t) + if not t then + print("\nfatal error: storage cannot be marked\n") + os.exit() + return + end + local m = getmetatable(t) + if not m then + m = { } + setmetatable(t,m) + end + m.__storage__ = true + return t +end + +function storage.allocate(t) + t = t or { } + local m = getmetatable(t) + if not m then + m = { } + setmetatable(t,m) + end + m.__storage__ = true + return t +end + +function storage.marked(t) + local m = getmetatable(t) + return m and m.__storage__ +end + +function storage.checked(t) + if not t then + report("\nfatal error: storage has not been allocated\n") + os.exit() + return + end + return t +end + +-- function utilities.storage.delay(parent,name,filename) +-- local m = getmetatable(parent) +-- m.__list[name] = filename +-- end +-- +-- function utilities.storage.predefine(parent) +-- local list = { } +-- local m = getmetatable(parent) or { +-- __list = list, +-- __index = function(t,k) +-- local l = require(list[k]) +-- t[k] = l +-- return l +-- end +-- } +-- setmetatable(parent,m) +-- end +-- +-- bla = { } +-- utilities.storage.predefine(bla) +-- utilities.storage.delay(bla,"test","oepsoeps") +-- local t = bla.test +-- table.print(t) +-- print(t.a) + +function storage.setinitializer(data,initialize) + local m = getmetatable(data) or { } + m.__index = function(data,k) + m.__index = nil -- so that we can access the entries during initializing + initialize() + return data[k] + end + setmetatable(data, m) +end + +local keyisvalue = { __index = function(t,k) + t[k] = k + return k +end } + +function storage.sparse(t) + t = t or { } + setmetatable(t,keyisvalue) + return t +end + +-- table namespace ? + +local function f_empty () return "" end -- t,k +local function f_self (t,k) t[k] = k return k end +local function f_table (t,k) local v = { } t[k] = v return v end +local function f_ignore() end -- t,k,v + +local t_empty = { __index = f_empty } +local t_self = { __index = f_self } +local t_table = { __index = f_table } +local t_ignore = { __newindex = f_ignore } + +function table.setmetatableindex(t,f) + if type(t) ~= "table" then + f, t = t, { } + end + local m = getmetatable(t) + if m then + if f == "empty" then + m.__index = f_empty + elseif f == "key" then + m.__index = f_self + elseif f == "table" then + m.__index = f_table + else + m.__index = f + end + else + if f == "empty" then + setmetatable(t, t_empty) + elseif f == "key" then + setmetatable(t, t_self) + elseif f == "table" then + setmetatable(t, t_table) + else + setmetatable(t,{ __index = f }) + end + end + return t +end + +function table.setmetatablenewindex(t,f) + if type(t) ~= "table" then + f, t = t, { } + end + local m = getmetatable(t) + if m then + if f == "ignore" then + m.__newindex = f_ignore + else + m.__newindex = f + end + else + if f == "ignore" then + setmetatable(t, t_ignore) + else + setmetatable(t,{ __newindex = f }) + end + end + return t +end + +function table.setmetatablecall(t,f) + if type(t) ~= "table" then + f, t = t, { } + end + local m = getmetatable(t) + if m then + m.__call = f + else + setmetatable(t,{ __call = f }) + end + return t +end + +function table.setmetatablekey(t,key,value) + local m = getmetatable(t) + if not m then + m = { } + setmetatable(t,m) + end + m[key] = value + return t +end + +function table.getmetatablekey(t,key,value) + local m = getmetatable(t) + return m and m[key] +end diff --git a/tex/context/base/util-str.lua b/tex/context/base/util-str.lua index 4890a11d6..f671b0012 100644 --- a/tex/context/base/util-str.lua +++ b/tex/context/base/util-str.lua @@ -1,766 +1,766 @@ -if not modules then modules = { } end modules ['util-str'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -utilities = utilities or {} -utilities.strings = utilities.strings or { } -local strings = utilities.strings - -local format, gsub, rep, sub = string.format, string.gsub, string.rep, string.sub -local load, dump = load, string.dump -local tonumber, type, tostring = tonumber, type, tostring -local unpack, concat = table.unpack, table.concat -local P, V, C, S, R, Ct, Cs, Cp, Carg, Cc = lpeg.P, lpeg.V, lpeg.C, lpeg.S, lpeg.R, lpeg.Ct, lpeg.Cs, lpeg.Cp, lpeg.Carg, lpeg.Cc -local patterns, lpegmatch = lpeg.patterns, lpeg.match -local utfchar, utfbyte = utf.char, utf.byte ------ loadstripped = utilities.lua.loadstripped ------ setmetatableindex = table.setmetatableindex - -local loadstripped = _LUAVERSION < 5.2 and load or function(str) - return load(dump(load(str),true)) -- it only makes sense in luajit and luatex where we have a stipped load -end - --- todo: make a special namespace for the formatter - -if not number then number = { } end -- temp hack for luatex-fonts - -local stripper = patterns.stripzeros - -local function points(n) - return (not n or n == 0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536)) -end - -local function basepoints(n) - return (not n or n == 0) and "0bp" or lpegmatch(stripper,format("%.5fbp", n*(7200/7227)/65536)) -end - -number.points = points -number.basepoints = basepoints - --- str = " \n \ntest \n test\ntest " --- print("["..string.gsub(string.collapsecrlf(str),"\n","+").."]") - -local rubish = patterns.spaceortab^0 * patterns.newline -local anyrubish = patterns.spaceortab + patterns.newline -local anything = patterns.anything -local stripped = (patterns.spaceortab^1 / "") * patterns.newline -local leading = rubish^0 / "" -local trailing = (anyrubish^1 * patterns.endofstring) / "" -local redundant = rubish^3 / "\n" - -local pattern = Cs(leading * (trailing + redundant + stripped + anything)^0) - -function strings.collapsecrlf(str) - return lpegmatch(pattern,str) -end - --- The following functions might end up in another namespace. - -local repeaters = { } -- watch how we also moved the -1 in depth-1 to the creator - -function strings.newrepeater(str,offset) - offset = offset or 0 - local s = repeaters[str] - if not s then - s = { } - repeaters[str] = s - end - local t = s[offset] - if t then - return t - end - t = { } - setmetatable(t, { __index = function(t,k) - if not k then - return "" - end - local n = k + offset - local s = n > 0 and rep(str,n) or "" - t[k] = s - return s - end }) - s[offset] = t - return t -end - --- local dashes = strings.newrepeater("--",-1) --- print(dashes[2],dashes[3],dashes[1]) - -local extra, tab, start = 0, 0, 4, 0 - -local nspaces = strings.newrepeater(" ") - -string.nspaces = nspaces - -local pattern = - Carg(1) / function(t) - extra, tab, start = 0, t or 7, 1 - end - * Cs(( - Cp() * patterns.tab / function(position) - local current = (position - start + 1) + extra - local spaces = tab-(current-1) % tab - if spaces > 0 then - extra = extra + spaces - 1 - return nspaces[spaces] -- rep(" ",spaces) - else - return "" - end - end - + patterns.newline * Cp() / function(position) - extra, start = 0, position - end - + patterns.anything - )^1) - -function strings.tabtospace(str,tab) - return lpegmatch(pattern,str,1,tab or 7) -end - --- local t = { --- "1234567123456712345671234567", --- "\tb\tc", --- "a\tb\tc", --- "aa\tbb\tcc", --- "aaa\tbbb\tccc", --- "aaaa\tbbbb\tcccc", --- "aaaaa\tbbbbb\tccccc", --- "aaaaaa\tbbbbbb\tcccccc\n aaaaaa\tbbbbbb\tcccccc", --- "one\n two\nxxx three\nxx four\nx five\nsix", --- } --- for k=1,#t do --- print(strings.tabtospace(t[k])) --- end - -function strings.striplong(str) -- strips all leading spaces - str = gsub(str,"^%s*","") - str = gsub(str,"[\n\r]+ *","\n") - return str -end - --- local template = string.striplong([[ --- aaaa --- bb --- cccccc --- ]]) - -function strings.nice(str) - str = gsub(str,"[:%-+_]+"," ") -- maybe more - return str -end - --- Work in progress. Interesting is that compared to the built-in this is faster in --- luatex than in luajittex where we have a comparable speed. It only makes sense --- to use the formatter when a (somewhat) complex format is used a lot. Each formatter --- is a function so there is some overhead and not all formatted output is worth that --- overhead. Keep in mind that there is an extra function call involved. In principle --- we end up with a string concatination so one could inline such a sequence but often --- at the cost of less readabinity. So, it's a sort of (visual) compromise. Of course --- there is the benefit of more variants. (Concerning the speed: a simple format like --- %05fpt is better off with format than with a formatter, but as soon as you put --- something in front formatters become faster. Passing the pt as extra argument makes --- formatters behave better. Of course this is rather implementation dependent. Also, --- when a specific format is only used a few times the overhead in creating it is not --- compensated by speed.) --- --- More info can be found in cld-mkiv.pdf so here I stick to a simple list. --- --- integer %...i number --- integer %...d number --- unsigned %...u number --- character %...c number --- hexadecimal %...x number --- HEXADECIMAL %...X number --- octal %...o number --- string %...s string number --- float %...f number --- exponential %...e number --- exponential %...E number --- autofloat %...g number --- autofloat %...G number --- utf character %...c number --- force tostring %...S any --- force tostring %Q any --- force tonumber %N number (strip leading zeros) --- signed number %I number --- rounded number %r number --- 0xhexadecimal %...h character number --- 0xHEXADECIMAL %...H character number --- U+hexadecimal %...u character number --- U+HEXADECIMAL %...U character number --- points %p number (scaled points) --- basepoints %b number (scaled points) --- table concat %...t table --- serialize %...T sequenced (no nested tables) --- boolean (logic) %l boolean --- BOOLEAN %L boolean --- whitespace %...w --- automatic %...a 'whatever' (string, table, ...) --- automatic %...a "whatever" (string, table, ...) - -local n = 0 - --- we are somewhat sloppy in parsing prefixes as it's not that critical - --- hard to avoid but we can collect them in a private namespace if needed - --- inline the next two makes no sense as we only use this in logging - -local sequenced = table.sequenced - -function string.autodouble(s,sep) - if s == nil then - return '""' - end - local t = type(s) - if t == "number" then - return tostring(s) -- tostring not really needed - end - if t == "table" then - return ('"' .. sequenced(s,sep or ",") .. '"') - end - return ('"' .. tostring(s) .. '"') -end - -function string.autosingle(s,sep) - if s == nil then - return "''" - end - local t = type(s) - if t == "number" then - return tostring(s) -- tostring not really needed - end - if t == "table" then - return ("'" .. sequenced(s,sep or ",") .. "'") - end - return ("'" .. tostring(s) .. "'") -end - -local tracedchars = { } -string.tracedchars = tracedchars -strings.tracers = tracedchars - -function string.tracedchar(b) - -- todo: table - if type(b) == "number" then - return tracedchars[b] or (utfchar(b) .. " (U+" .. format('%05X',b) .. ")") - else - local c = utfbyte(b) - return tracedchars[c] or (b .. " (U+" .. format('%05X',c) .. ")") - end -end - -function number.signed(i) - if i > 0 then - return "+", i - else - return "-", -i - end -end - -local preamble = [[ -local type = type -local tostring = tostring -local tonumber = tonumber -local format = string.format -local concat = table.concat -local signed = number.signed -local points = number.points -local basepoints = number.basepoints -local utfchar = utf.char -local utfbyte = utf.byte -local lpegmatch = lpeg.match -local nspaces = string.nspaces -local tracedchar = string.tracedchar -local autosingle = string.autosingle -local autodouble = string.autodouble -local sequenced = table.sequenced -]] - -local template = [[ -%s -%s -return function(%s) return %s end -]] - -local arguments = { "a1" } -- faster than previously used (select(n,...)) - -setmetatable(arguments, { __index = - function(t,k) - local v = t[k-1] .. ",a" .. k - t[k] = v - return v - end -}) - -local prefix_any = C((S("+- .") + R("09"))^0) -local prefix_tab = C((1-R("az","AZ","09","%%"))^0) - --- we've split all cases as then we can optimize them (let's omit the fuzzy u) - --- todo: replace outer formats in next by .. - -local format_s = function(f) - n = n + 1 - if f and f ~= "" then - return format("format('%%%ss',a%s)",f,n) - else -- best no tostring in order to stay compatible (.. does a selective tostring too) - return format("(a%s or '')",n) -- goodie: nil check - end -end - -local format_S = function(f) -- can be optimized - n = n + 1 - if f and f ~= "" then - return format("format('%%%ss',tostring(a%s))",f,n) - else - return format("tostring(a%s)",n) - end -end - -local format_q = function() - n = n + 1 - return format("(a%s and format('%%q',a%s) or '')",n,n) -- goodie: nil check (maybe separate lpeg, not faster) -end - -local format_Q = function() -- can be optimized - n = n + 1 - return format("format('%%q',tostring(a%s))",n) -end - -local format_i = function(f) - n = n + 1 - if f and f ~= "" then - return format("format('%%%si',a%s)",f,n) - else - return format("a%s",n) - end -end - -local format_d = format_i - -local format_I = function(f) - n = n + 1 - return format("format('%%s%%%si',signed(a%s))",f,n) -end - -local format_f = function(f) - n = n + 1 - return format("format('%%%sf',a%s)",f,n) -end - -local format_g = function(f) - n = n + 1 - return format("format('%%%sg',a%s)",f,n) -end - -local format_G = function(f) - n = n + 1 - return format("format('%%%sG',a%s)",f,n) -end - -local format_e = function(f) - n = n + 1 - return format("format('%%%se',a%s)",f,n) -end - -local format_E = function(f) - n = n + 1 - return format("format('%%%sE',a%s)",f,n) -end - -local format_x = function(f) - n = n + 1 - return format("format('%%%sx',a%s)",f,n) -end - -local format_X = function(f) - n = n + 1 - return format("format('%%%sX',a%s)",f,n) -end - -local format_o = function(f) - n = n + 1 - return format("format('%%%so',a%s)",f,n) -end - -local format_c = function() - n = n + 1 - return format("utfchar(a%s)",n) -end - -local format_C = function() - n = n + 1 - return format("tracedchar(a%s)",n) -end - -local format_r = function(f) - n = n + 1 - return format("format('%%%s.0f',a%s)",f,n) -end - -local format_h = function(f) - n = n + 1 - if f == "-" then - f = sub(f,2) - return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) - else - return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) - end -end - -local format_H = function(f) - n = n + 1 - if f == "-" then - f = sub(f,2) - return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) - else - return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) - end -end - -local format_u = function(f) - n = n + 1 - if f == "-" then - f = sub(f,2) - return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) - else - return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) - end -end - -local format_U = function(f) - n = n + 1 - if f == "-" then - f = sub(f,2) - return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) - else - return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) - end -end - -local format_p = function() - n = n + 1 - return format("points(a%s)",n) -end - -local format_b = function() - n = n + 1 - return format("basepoints(a%s)",n) -end - -local format_t = function(f) - n = n + 1 - if f and f ~= "" then - return format("concat(a%s,%q)",n,f) - else - return format("concat(a%s)",n) - end -end - -local format_T = function(f) - n = n + 1 - if f and f ~= "" then - return format("sequenced(a%s,%q)",n,f) - else - return format("sequenced(a%s)",n) - end -end - -local format_l = function() - n = n + 1 - return format("(a%s and 'true' or 'false')",n) -end - -local format_L = function() - n = n + 1 - return format("(a%s and 'TRUE' or 'FALSE')",n) -end - -local format_N = function() -- strips leading zeros - n = n + 1 - return format("tostring(tonumber(a%s) or a%s)",n,n) -end - -local format_a = function(f) - n = n + 1 - if f and f ~= "" then - return format("autosingle(a%s,%q)",n,f) - else - return format("autosingle(a%s)",n) - end -end - -local format_A = function(f) - n = n + 1 - if f and f ~= "" then - return format("autodouble(a%s,%q)",n,f) - else - return format("autodouble(a%s)",n) - end -end - -local format_w = function(f) -- handy when doing depth related indent - n = n + 1 - f = tonumber(f) - if f then -- not that useful - return format("nspaces[%s+a%s]",f,n) -- no real need for tonumber - else - return format("nspaces[a%s]",n) -- no real need for tonumber - end -end - -local format_W = function(f) -- handy when doing depth related indent - return format("nspaces[%s]",tonumber(f) or 0) -end - -local format_rest = function(s) - return format("%q",s) -- catches " and \n and such -end - -local format_extension = function(extensions,f,name) - local extension = extensions[name] or "tostring(%s)" - local f = tonumber(f) or 1 - if f == 0 then - return extension - elseif f == 1 then - n = n + 1 - local a = "a" .. n - return format(extension,a,a) -- maybe more times? - elseif f < 0 then - local a = "a" .. (n + f + 1) - return format(extension,a,a) - else - local t = { } - for i=1,f do - n = n + 1 - t[#t+1] = "a" .. n - end - return format(extension,unpack(t)) - end -end - -local builder = Cs { "start", - start = ( - ( - P("%") / "" - * ( - V("!") -- new - + V("s") + V("q") - + V("i") + V("d") - + V("f") + V("g") + V("G") + V("e") + V("E") - + V("x") + V("X") + V("o") - -- - + V("c") - + V("C") - + V("S") -- new - + V("Q") -- new - + V("N") -- new - -- - + V("r") - + V("h") + V("H") + V("u") + V("U") - + V("p") + V("b") - + V("t") + V("T") - + V("l") + V("L") - + V("I") - + V("h") -- new - + V("w") -- new - + V("W") -- new - + V("a") -- new - + V("A") -- new - -- - + V("*") -- ignores probably messed up % - ) - + V("*") - ) - * (P(-1) + Carg(1)) - )^0, - -- - ["s"] = (prefix_any * P("s")) / format_s, -- %s => regular %s (string) - ["q"] = (prefix_any * P("q")) / format_q, -- %q => regular %q (quoted string) - ["i"] = (prefix_any * P("i")) / format_i, -- %i => regular %i (integer) - ["d"] = (prefix_any * P("d")) / format_d, -- %d => regular %d (integer) - ["f"] = (prefix_any * P("f")) / format_f, -- %f => regular %f (float) - ["g"] = (prefix_any * P("g")) / format_g, -- %g => regular %g (float) - ["G"] = (prefix_any * P("G")) / format_G, -- %G => regular %G (float) - ["e"] = (prefix_any * P("e")) / format_e, -- %e => regular %e (float) - ["E"] = (prefix_any * P("E")) / format_E, -- %E => regular %E (float) - ["x"] = (prefix_any * P("x")) / format_x, -- %x => regular %x (hexadecimal) - ["X"] = (prefix_any * P("X")) / format_X, -- %X => regular %X (HEXADECIMAL) - ["o"] = (prefix_any * P("o")) / format_o, -- %o => regular %o (octal) - -- - ["S"] = (prefix_any * P("S")) / format_S, -- %S => %s (tostring) - ["Q"] = (prefix_any * P("Q")) / format_S, -- %Q => %q (tostring) - ["N"] = (prefix_any * P("N")) / format_N, -- %N => tonumber (strips leading zeros) - ["c"] = (prefix_any * P("c")) / format_c, -- %c => utf character (extension to regular) - ["C"] = (prefix_any * P("C")) / format_C, -- %c => U+.... utf character - -- - ["r"] = (prefix_any * P("r")) / format_r, -- %r => round - ["h"] = (prefix_any * P("h")) / format_h, -- %h => 0x0a1b2 (when - no 0x) was v - ["H"] = (prefix_any * P("H")) / format_H, -- %H => 0x0A1B2 (when - no 0x) was V - ["u"] = (prefix_any * P("u")) / format_u, -- %u => u+0a1b2 (when - no u+) - ["U"] = (prefix_any * P("U")) / format_U, -- %U => U+0A1B2 (when - no U+) - ["p"] = (prefix_any * P("p")) / format_p, -- %p => 12.345pt / maybe: P (and more units) - ["b"] = (prefix_any * P("b")) / format_b, -- %b => 12.342bp / maybe: B (and more units) - ["t"] = (prefix_tab * P("t")) / format_t, -- %t => concat - ["T"] = (prefix_tab * P("T")) / format_T, -- %t => sequenced - ["l"] = (prefix_tab * P("l")) / format_l, -- %l => boolean - ["L"] = (prefix_tab * P("L")) / format_L, -- %L => BOOLEAN - ["I"] = (prefix_any * P("I")) / format_I, -- %I => signed integer - -- - ["w"] = (prefix_any * P("w")) / format_w, -- %w => n spaces (optional prefix is added) - ["W"] = (prefix_any * P("W")) / format_W, -- %W => mandate prefix, no specifier - -- - ["a"] = (prefix_any * P("a")) / format_a, -- %a => '...' (forces tostring) - ["A"] = (prefix_any * P("A")) / format_A, -- %A => "..." (forces tostring) - -- - ["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%%%")^1) / format_rest, -- rest (including %%) - -- - ["!"] = Carg(2) * prefix_any * P("!") * C((1-P("!"))^1) * P("!") / format_extension, -} - --- we can be clever and only alias what is needed - -local direct = Cs ( - P("%")/"" - * Cc([[local format = string.format return function(str) return format("%]]) - * (S("+- .") + R("09"))^0 - * S("sqidfgGeExXo") - * Cc([[",str) end]]) - * P(-1) - ) - -local function make(t,str) - local f - local p - local p = lpegmatch(direct,str) - if p then - f = loadstripped(p)() - else - n = 0 - p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n - if n > 0 then - p = format(template,preamble,t._preamble_,arguments[n],p) --- print("builder>",p) - f = loadstripped(p)() - else - f = function() return str end - end - end - t[str] = f - return f -end - --- -- collect periodically --- --- local threshold = 1000 -- max nof cached formats --- --- local function make(t,str) --- local f = rawget(t,str) --- if f then --- return f --- end --- local parent = t._t_ --- if parent._n_ > threshold then --- local m = { _t_ = parent } --- getmetatable(parent).__index = m --- setmetatable(m, { __index = make }) --- else --- parent._n_ = parent._n_ + 1 --- end --- local f --- local p = lpegmatch(direct,str) --- if p then --- f = loadstripped(p)() --- else --- n = 0 --- p = lpegmatch(builder,str,1,"..",parent._extensions_) -- after this we know n --- if n > 0 then --- p = format(template,preamble,parent._preamble_,arguments[n],p) --- -- print("builder>",p) --- f = loadstripped(p)() --- else --- f = function() return str end --- end --- end --- t[str] = f --- return f --- end - -local function use(t,fmt,...) - return t[fmt](...) -end - -strings.formatters = { } - --- we cannot make these tables weak, unless we start using an indirect --- table (metatable) in which case we could better keep a count and --- clear that table when a threshold is reached - -function strings.formatters.new() - local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter" } - setmetatable(t, { __index = make, __call = use }) - return t -end - --- function strings.formatters.new() --- local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter", _n_ = 0 } --- local m = { _t_ = t } --- setmetatable(t, { __index = m, __call = use }) --- setmetatable(m, { __index = make }) --- return t --- end - -local formatters = strings.formatters.new() -- the default instance - -string.formatters = formatters -- in the main string namespace -string.formatter = function(str,...) return formatters[str](...) end -- sometimes nicer name - -local function add(t,name,template,preamble) - if type(t) == "table" and t._type_ == "formatter" then - t._extensions_[name] = template or "%s" - if preamble then - t._preamble_ = preamble .. "\n" .. t._preamble_ -- so no overload ! - end - end -end - -strings.formatters.add = add - --- registered in the default instance (should we fall back on this one?) - -lpeg.patterns.xmlescape = Cs((P("<")/"<" + P(">")/">" + P("&")/"&" + P('"')/""" + P(1))^0) -lpeg.patterns.texescape = Cs((C(S("#$%\\{}"))/"\\%1" + P(1))^0) - -add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]]) -add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]]) - --- -- yes or no: --- --- local function make(t,str) --- local f --- local p = lpegmatch(direct,str) --- if p then --- f = loadstripped(p)() --- else --- n = 0 --- p = lpegmatch(builder,str,1,",") -- after this we know n --- if n > 0 then --- p = format(template,template_shortcuts,arguments[n],p) --- f = loadstripped(p)() --- else --- f = function() return str end --- end --- end --- t[str] = f --- return f --- end --- --- local formatteds = string.formatteds or { } --- string.formatteds = formatteds --- --- setmetatable(formatteds, { __index = make, __call = use }) +if not modules then modules = { } end modules ['util-str'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +utilities = utilities or {} +utilities.strings = utilities.strings or { } +local strings = utilities.strings + +local format, gsub, rep, sub = string.format, string.gsub, string.rep, string.sub +local load, dump = load, string.dump +local tonumber, type, tostring = tonumber, type, tostring +local unpack, concat = table.unpack, table.concat +local P, V, C, S, R, Ct, Cs, Cp, Carg, Cc = lpeg.P, lpeg.V, lpeg.C, lpeg.S, lpeg.R, lpeg.Ct, lpeg.Cs, lpeg.Cp, lpeg.Carg, lpeg.Cc +local patterns, lpegmatch = lpeg.patterns, lpeg.match +local utfchar, utfbyte = utf.char, utf.byte +----- loadstripped = utilities.lua.loadstripped +----- setmetatableindex = table.setmetatableindex + +local loadstripped = _LUAVERSION < 5.2 and load or function(str) + return load(dump(load(str),true)) -- it only makes sense in luajit and luatex where we have a stipped load +end + +-- todo: make a special namespace for the formatter + +if not number then number = { } end -- temp hack for luatex-fonts + +local stripper = patterns.stripzeros + +local function points(n) + return (not n or n == 0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536)) +end + +local function basepoints(n) + return (not n or n == 0) and "0bp" or lpegmatch(stripper,format("%.5fbp", n*(7200/7227)/65536)) +end + +number.points = points +number.basepoints = basepoints + +-- str = " \n \ntest \n test\ntest " +-- print("["..string.gsub(string.collapsecrlf(str),"\n","+").."]") + +local rubish = patterns.spaceortab^0 * patterns.newline +local anyrubish = patterns.spaceortab + patterns.newline +local anything = patterns.anything +local stripped = (patterns.spaceortab^1 / "") * patterns.newline +local leading = rubish^0 / "" +local trailing = (anyrubish^1 * patterns.endofstring) / "" +local redundant = rubish^3 / "\n" + +local pattern = Cs(leading * (trailing + redundant + stripped + anything)^0) + +function strings.collapsecrlf(str) + return lpegmatch(pattern,str) +end + +-- The following functions might end up in another namespace. + +local repeaters = { } -- watch how we also moved the -1 in depth-1 to the creator + +function strings.newrepeater(str,offset) + offset = offset or 0 + local s = repeaters[str] + if not s then + s = { } + repeaters[str] = s + end + local t = s[offset] + if t then + return t + end + t = { } + setmetatable(t, { __index = function(t,k) + if not k then + return "" + end + local n = k + offset + local s = n > 0 and rep(str,n) or "" + t[k] = s + return s + end }) + s[offset] = t + return t +end + +-- local dashes = strings.newrepeater("--",-1) +-- print(dashes[2],dashes[3],dashes[1]) + +local extra, tab, start = 0, 0, 4, 0 + +local nspaces = strings.newrepeater(" ") + +string.nspaces = nspaces + +local pattern = + Carg(1) / function(t) + extra, tab, start = 0, t or 7, 1 + end + * Cs(( + Cp() * patterns.tab / function(position) + local current = (position - start + 1) + extra + local spaces = tab-(current-1) % tab + if spaces > 0 then + extra = extra + spaces - 1 + return nspaces[spaces] -- rep(" ",spaces) + else + return "" + end + end + + patterns.newline * Cp() / function(position) + extra, start = 0, position + end + + patterns.anything + )^1) + +function strings.tabtospace(str,tab) + return lpegmatch(pattern,str,1,tab or 7) +end + +-- local t = { +-- "1234567123456712345671234567", +-- "\tb\tc", +-- "a\tb\tc", +-- "aa\tbb\tcc", +-- "aaa\tbbb\tccc", +-- "aaaa\tbbbb\tcccc", +-- "aaaaa\tbbbbb\tccccc", +-- "aaaaaa\tbbbbbb\tcccccc\n aaaaaa\tbbbbbb\tcccccc", +-- "one\n two\nxxx three\nxx four\nx five\nsix", +-- } +-- for k=1,#t do +-- print(strings.tabtospace(t[k])) +-- end + +function strings.striplong(str) -- strips all leading spaces + str = gsub(str,"^%s*","") + str = gsub(str,"[\n\r]+ *","\n") + return str +end + +-- local template = string.striplong([[ +-- aaaa +-- bb +-- cccccc +-- ]]) + +function strings.nice(str) + str = gsub(str,"[:%-+_]+"," ") -- maybe more + return str +end + +-- Work in progress. Interesting is that compared to the built-in this is faster in +-- luatex than in luajittex where we have a comparable speed. It only makes sense +-- to use the formatter when a (somewhat) complex format is used a lot. Each formatter +-- is a function so there is some overhead and not all formatted output is worth that +-- overhead. Keep in mind that there is an extra function call involved. In principle +-- we end up with a string concatination so one could inline such a sequence but often +-- at the cost of less readabinity. So, it's a sort of (visual) compromise. Of course +-- there is the benefit of more variants. (Concerning the speed: a simple format like +-- %05fpt is better off with format than with a formatter, but as soon as you put +-- something in front formatters become faster. Passing the pt as extra argument makes +-- formatters behave better. Of course this is rather implementation dependent. Also, +-- when a specific format is only used a few times the overhead in creating it is not +-- compensated by speed.) +-- +-- More info can be found in cld-mkiv.pdf so here I stick to a simple list. +-- +-- integer %...i number +-- integer %...d number +-- unsigned %...u number +-- character %...c number +-- hexadecimal %...x number +-- HEXADECIMAL %...X number +-- octal %...o number +-- string %...s string number +-- float %...f number +-- exponential %...e number +-- exponential %...E number +-- autofloat %...g number +-- autofloat %...G number +-- utf character %...c number +-- force tostring %...S any +-- force tostring %Q any +-- force tonumber %N number (strip leading zeros) +-- signed number %I number +-- rounded number %r number +-- 0xhexadecimal %...h character number +-- 0xHEXADECIMAL %...H character number +-- U+hexadecimal %...u character number +-- U+HEXADECIMAL %...U character number +-- points %p number (scaled points) +-- basepoints %b number (scaled points) +-- table concat %...t table +-- serialize %...T sequenced (no nested tables) +-- boolean (logic) %l boolean +-- BOOLEAN %L boolean +-- whitespace %...w +-- automatic %...a 'whatever' (string, table, ...) +-- automatic %...a "whatever" (string, table, ...) + +local n = 0 + +-- we are somewhat sloppy in parsing prefixes as it's not that critical + +-- hard to avoid but we can collect them in a private namespace if needed + +-- inline the next two makes no sense as we only use this in logging + +local sequenced = table.sequenced + +function string.autodouble(s,sep) + if s == nil then + return '""' + end + local t = type(s) + if t == "number" then + return tostring(s) -- tostring not really needed + end + if t == "table" then + return ('"' .. sequenced(s,sep or ",") .. '"') + end + return ('"' .. tostring(s) .. '"') +end + +function string.autosingle(s,sep) + if s == nil then + return "''" + end + local t = type(s) + if t == "number" then + return tostring(s) -- tostring not really needed + end + if t == "table" then + return ("'" .. sequenced(s,sep or ",") .. "'") + end + return ("'" .. tostring(s) .. "'") +end + +local tracedchars = { } +string.tracedchars = tracedchars +strings.tracers = tracedchars + +function string.tracedchar(b) + -- todo: table + if type(b) == "number" then + return tracedchars[b] or (utfchar(b) .. " (U+" .. format('%05X',b) .. ")") + else + local c = utfbyte(b) + return tracedchars[c] or (b .. " (U+" .. format('%05X',c) .. ")") + end +end + +function number.signed(i) + if i > 0 then + return "+", i + else + return "-", -i + end +end + +local preamble = [[ +local type = type +local tostring = tostring +local tonumber = tonumber +local format = string.format +local concat = table.concat +local signed = number.signed +local points = number.points +local basepoints = number.basepoints +local utfchar = utf.char +local utfbyte = utf.byte +local lpegmatch = lpeg.match +local nspaces = string.nspaces +local tracedchar = string.tracedchar +local autosingle = string.autosingle +local autodouble = string.autodouble +local sequenced = table.sequenced +]] + +local template = [[ +%s +%s +return function(%s) return %s end +]] + +local arguments = { "a1" } -- faster than previously used (select(n,...)) + +setmetatable(arguments, { __index = + function(t,k) + local v = t[k-1] .. ",a" .. k + t[k] = v + return v + end +}) + +local prefix_any = C((S("+- .") + R("09"))^0) +local prefix_tab = C((1-R("az","AZ","09","%%"))^0) + +-- we've split all cases as then we can optimize them (let's omit the fuzzy u) + +-- todo: replace outer formats in next by .. + +local format_s = function(f) + n = n + 1 + if f and f ~= "" then + return format("format('%%%ss',a%s)",f,n) + else -- best no tostring in order to stay compatible (.. does a selective tostring too) + return format("(a%s or '')",n) -- goodie: nil check + end +end + +local format_S = function(f) -- can be optimized + n = n + 1 + if f and f ~= "" then + return format("format('%%%ss',tostring(a%s))",f,n) + else + return format("tostring(a%s)",n) + end +end + +local format_q = function() + n = n + 1 + return format("(a%s and format('%%q',a%s) or '')",n,n) -- goodie: nil check (maybe separate lpeg, not faster) +end + +local format_Q = function() -- can be optimized + n = n + 1 + return format("format('%%q',tostring(a%s))",n) +end + +local format_i = function(f) + n = n + 1 + if f and f ~= "" then + return format("format('%%%si',a%s)",f,n) + else + return format("a%s",n) + end +end + +local format_d = format_i + +local format_I = function(f) + n = n + 1 + return format("format('%%s%%%si',signed(a%s))",f,n) +end + +local format_f = function(f) + n = n + 1 + return format("format('%%%sf',a%s)",f,n) +end + +local format_g = function(f) + n = n + 1 + return format("format('%%%sg',a%s)",f,n) +end + +local format_G = function(f) + n = n + 1 + return format("format('%%%sG',a%s)",f,n) +end + +local format_e = function(f) + n = n + 1 + return format("format('%%%se',a%s)",f,n) +end + +local format_E = function(f) + n = n + 1 + return format("format('%%%sE',a%s)",f,n) +end + +local format_x = function(f) + n = n + 1 + return format("format('%%%sx',a%s)",f,n) +end + +local format_X = function(f) + n = n + 1 + return format("format('%%%sX',a%s)",f,n) +end + +local format_o = function(f) + n = n + 1 + return format("format('%%%so',a%s)",f,n) +end + +local format_c = function() + n = n + 1 + return format("utfchar(a%s)",n) +end + +local format_C = function() + n = n + 1 + return format("tracedchar(a%s)",n) +end + +local format_r = function(f) + n = n + 1 + return format("format('%%%s.0f',a%s)",f,n) +end + +local format_h = function(f) + n = n + 1 + if f == "-" then + f = sub(f,2) + return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + else + return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + end +end + +local format_H = function(f) + n = n + 1 + if f == "-" then + f = sub(f,2) + return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + else + return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + end +end + +local format_u = function(f) + n = n + 1 + if f == "-" then + f = sub(f,2) + return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + else + return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + end +end + +local format_U = function(f) + n = n + 1 + if f == "-" then + f = sub(f,2) + return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + else + return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + end +end + +local format_p = function() + n = n + 1 + return format("points(a%s)",n) +end + +local format_b = function() + n = n + 1 + return format("basepoints(a%s)",n) +end + +local format_t = function(f) + n = n + 1 + if f and f ~= "" then + return format("concat(a%s,%q)",n,f) + else + return format("concat(a%s)",n) + end +end + +local format_T = function(f) + n = n + 1 + if f and f ~= "" then + return format("sequenced(a%s,%q)",n,f) + else + return format("sequenced(a%s)",n) + end +end + +local format_l = function() + n = n + 1 + return format("(a%s and 'true' or 'false')",n) +end + +local format_L = function() + n = n + 1 + return format("(a%s and 'TRUE' or 'FALSE')",n) +end + +local format_N = function() -- strips leading zeros + n = n + 1 + return format("tostring(tonumber(a%s) or a%s)",n,n) +end + +local format_a = function(f) + n = n + 1 + if f and f ~= "" then + return format("autosingle(a%s,%q)",n,f) + else + return format("autosingle(a%s)",n) + end +end + +local format_A = function(f) + n = n + 1 + if f and f ~= "" then + return format("autodouble(a%s,%q)",n,f) + else + return format("autodouble(a%s)",n) + end +end + +local format_w = function(f) -- handy when doing depth related indent + n = n + 1 + f = tonumber(f) + if f then -- not that useful + return format("nspaces[%s+a%s]",f,n) -- no real need for tonumber + else + return format("nspaces[a%s]",n) -- no real need for tonumber + end +end + +local format_W = function(f) -- handy when doing depth related indent + return format("nspaces[%s]",tonumber(f) or 0) +end + +local format_rest = function(s) + return format("%q",s) -- catches " and \n and such +end + +local format_extension = function(extensions,f,name) + local extension = extensions[name] or "tostring(%s)" + local f = tonumber(f) or 1 + if f == 0 then + return extension + elseif f == 1 then + n = n + 1 + local a = "a" .. n + return format(extension,a,a) -- maybe more times? + elseif f < 0 then + local a = "a" .. (n + f + 1) + return format(extension,a,a) + else + local t = { } + for i=1,f do + n = n + 1 + t[#t+1] = "a" .. n + end + return format(extension,unpack(t)) + end +end + +local builder = Cs { "start", + start = ( + ( + P("%") / "" + * ( + V("!") -- new + + V("s") + V("q") + + V("i") + V("d") + + V("f") + V("g") + V("G") + V("e") + V("E") + + V("x") + V("X") + V("o") + -- + + V("c") + + V("C") + + V("S") -- new + + V("Q") -- new + + V("N") -- new + -- + + V("r") + + V("h") + V("H") + V("u") + V("U") + + V("p") + V("b") + + V("t") + V("T") + + V("l") + V("L") + + V("I") + + V("h") -- new + + V("w") -- new + + V("W") -- new + + V("a") -- new + + V("A") -- new + -- + + V("*") -- ignores probably messed up % + ) + + V("*") + ) + * (P(-1) + Carg(1)) + )^0, + -- + ["s"] = (prefix_any * P("s")) / format_s, -- %s => regular %s (string) + ["q"] = (prefix_any * P("q")) / format_q, -- %q => regular %q (quoted string) + ["i"] = (prefix_any * P("i")) / format_i, -- %i => regular %i (integer) + ["d"] = (prefix_any * P("d")) / format_d, -- %d => regular %d (integer) + ["f"] = (prefix_any * P("f")) / format_f, -- %f => regular %f (float) + ["g"] = (prefix_any * P("g")) / format_g, -- %g => regular %g (float) + ["G"] = (prefix_any * P("G")) / format_G, -- %G => regular %G (float) + ["e"] = (prefix_any * P("e")) / format_e, -- %e => regular %e (float) + ["E"] = (prefix_any * P("E")) / format_E, -- %E => regular %E (float) + ["x"] = (prefix_any * P("x")) / format_x, -- %x => regular %x (hexadecimal) + ["X"] = (prefix_any * P("X")) / format_X, -- %X => regular %X (HEXADECIMAL) + ["o"] = (prefix_any * P("o")) / format_o, -- %o => regular %o (octal) + -- + ["S"] = (prefix_any * P("S")) / format_S, -- %S => %s (tostring) + ["Q"] = (prefix_any * P("Q")) / format_S, -- %Q => %q (tostring) + ["N"] = (prefix_any * P("N")) / format_N, -- %N => tonumber (strips leading zeros) + ["c"] = (prefix_any * P("c")) / format_c, -- %c => utf character (extension to regular) + ["C"] = (prefix_any * P("C")) / format_C, -- %c => U+.... utf character + -- + ["r"] = (prefix_any * P("r")) / format_r, -- %r => round + ["h"] = (prefix_any * P("h")) / format_h, -- %h => 0x0a1b2 (when - no 0x) was v + ["H"] = (prefix_any * P("H")) / format_H, -- %H => 0x0A1B2 (when - no 0x) was V + ["u"] = (prefix_any * P("u")) / format_u, -- %u => u+0a1b2 (when - no u+) + ["U"] = (prefix_any * P("U")) / format_U, -- %U => U+0A1B2 (when - no U+) + ["p"] = (prefix_any * P("p")) / format_p, -- %p => 12.345pt / maybe: P (and more units) + ["b"] = (prefix_any * P("b")) / format_b, -- %b => 12.342bp / maybe: B (and more units) + ["t"] = (prefix_tab * P("t")) / format_t, -- %t => concat + ["T"] = (prefix_tab * P("T")) / format_T, -- %t => sequenced + ["l"] = (prefix_tab * P("l")) / format_l, -- %l => boolean + ["L"] = (prefix_tab * P("L")) / format_L, -- %L => BOOLEAN + ["I"] = (prefix_any * P("I")) / format_I, -- %I => signed integer + -- + ["w"] = (prefix_any * P("w")) / format_w, -- %w => n spaces (optional prefix is added) + ["W"] = (prefix_any * P("W")) / format_W, -- %W => mandate prefix, no specifier + -- + ["a"] = (prefix_any * P("a")) / format_a, -- %a => '...' (forces tostring) + ["A"] = (prefix_any * P("A")) / format_A, -- %A => "..." (forces tostring) + -- + ["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%%%")^1) / format_rest, -- rest (including %%) + -- + ["!"] = Carg(2) * prefix_any * P("!") * C((1-P("!"))^1) * P("!") / format_extension, +} + +-- we can be clever and only alias what is needed + +local direct = Cs ( + P("%")/"" + * Cc([[local format = string.format return function(str) return format("%]]) + * (S("+- .") + R("09"))^0 + * S("sqidfgGeExXo") + * Cc([[",str) end]]) + * P(-1) + ) + +local function make(t,str) + local f + local p + local p = lpegmatch(direct,str) + if p then + f = loadstripped(p)() + else + n = 0 + p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n + if n > 0 then + p = format(template,preamble,t._preamble_,arguments[n],p) +-- print("builder>",p) + f = loadstripped(p)() + else + f = function() return str end + end + end + t[str] = f + return f +end + +-- -- collect periodically +-- +-- local threshold = 1000 -- max nof cached formats +-- +-- local function make(t,str) +-- local f = rawget(t,str) +-- if f then +-- return f +-- end +-- local parent = t._t_ +-- if parent._n_ > threshold then +-- local m = { _t_ = parent } +-- getmetatable(parent).__index = m +-- setmetatable(m, { __index = make }) +-- else +-- parent._n_ = parent._n_ + 1 +-- end +-- local f +-- local p = lpegmatch(direct,str) +-- if p then +-- f = loadstripped(p)() +-- else +-- n = 0 +-- p = lpegmatch(builder,str,1,"..",parent._extensions_) -- after this we know n +-- if n > 0 then +-- p = format(template,preamble,parent._preamble_,arguments[n],p) +-- -- print("builder>",p) +-- f = loadstripped(p)() +-- else +-- f = function() return str end +-- end +-- end +-- t[str] = f +-- return f +-- end + +local function use(t,fmt,...) + return t[fmt](...) +end + +strings.formatters = { } + +-- we cannot make these tables weak, unless we start using an indirect +-- table (metatable) in which case we could better keep a count and +-- clear that table when a threshold is reached + +function strings.formatters.new() + local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter" } + setmetatable(t, { __index = make, __call = use }) + return t +end + +-- function strings.formatters.new() +-- local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter", _n_ = 0 } +-- local m = { _t_ = t } +-- setmetatable(t, { __index = m, __call = use }) +-- setmetatable(m, { __index = make }) +-- return t +-- end + +local formatters = strings.formatters.new() -- the default instance + +string.formatters = formatters -- in the main string namespace +string.formatter = function(str,...) return formatters[str](...) end -- sometimes nicer name + +local function add(t,name,template,preamble) + if type(t) == "table" and t._type_ == "formatter" then + t._extensions_[name] = template or "%s" + if preamble then + t._preamble_ = preamble .. "\n" .. t._preamble_ -- so no overload ! + end + end +end + +strings.formatters.add = add + +-- registered in the default instance (should we fall back on this one?) + +lpeg.patterns.xmlescape = Cs((P("<")/"<" + P(">")/">" + P("&")/"&" + P('"')/""" + P(1))^0) +lpeg.patterns.texescape = Cs((C(S("#$%\\{}"))/"\\%1" + P(1))^0) + +add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]]) +add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]]) + +-- -- yes or no: +-- +-- local function make(t,str) +-- local f +-- local p = lpegmatch(direct,str) +-- if p then +-- f = loadstripped(p)() +-- else +-- n = 0 +-- p = lpegmatch(builder,str,1,",") -- after this we know n +-- if n > 0 then +-- p = format(template,template_shortcuts,arguments[n],p) +-- f = loadstripped(p)() +-- else +-- f = function() return str end +-- end +-- end +-- t[str] = f +-- return f +-- end +-- +-- local formatteds = string.formatteds or { } +-- string.formatteds = formatteds +-- +-- setmetatable(formatteds, { __index = make, __call = use }) diff --git a/tex/context/base/util-tab.lua b/tex/context/base/util-tab.lua index ecf36b137..30554015b 100644 --- a/tex/context/base/util-tab.lua +++ b/tex/context/base/util-tab.lua @@ -1,493 +1,493 @@ -if not modules then modules = { } end modules ['util-tab'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -utilities = utilities or {} -utilities.tables = utilities.tables or { } -local tables = utilities.tables - -local format, gmatch, gsub = string.format, string.gmatch, string.gsub -local concat, insert, remove = table.concat, table.insert, table.remove -local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring -local type, next, rawset, tonumber, tostring, load, select = type, next, rawset, tonumber, tostring, load, select -local lpegmatch, P, Cs, Cc = lpeg.match, lpeg.P, lpeg.Cs, lpeg.Cc -local serialize, sortedkeys, sortedpairs = table.serialize, table.sortedkeys, table.sortedpairs -local formatters = string.formatters - -local splitter = lpeg.tsplitat(".") - -function tables.definetable(target,nofirst,nolast) -- defines undefined tables - local composed, shortcut, t = nil, nil, { } - local snippets = lpegmatch(splitter,target) - for i=1,#snippets - (nolast and 1 or 0) do - local name = snippets[i] - if composed then - composed = shortcut .. "." .. name - shortcut = shortcut .. "_" .. name - t[#t+1] = formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut) - else - composed = name - shortcut = name - if not nofirst then - t[#t+1] = formatters["%s = %s or { }"](composed,composed) - end - end - end - if nolast then - composed = shortcut .. "." .. snippets[#snippets] - end - return concat(t,"\n"), composed -end - --- local t = tables.definedtable("a","b","c","d") - -function tables.definedtable(...) - local t = _G - for i=1,select("#",...) do - local li = select(i,...) - local tl = t[li] - if not tl then - tl = { } - t[li] = tl - end - t = tl - end - return t -end - -function tables.accesstable(target,root) - local t = root or _G - for name in gmatch(target,"([^%.]+)") do - t = t[name] - if not t then - return - end - end - return t -end - -function tables.migratetable(target,v,root) - local t = root or _G - local names = string.split(target,".") - for i=1,#names-1 do - local name = names[i] - t[name] = t[name] or { } - t = t[name] - if not t then - return - end - end - t[names[#names]] = v -end - -function tables.removevalue(t,value) -- todo: n - if value then - for i=1,#t do - if t[i] == value then - remove(t,i) - -- remove all, so no: return - end - end - end -end - -function tables.insertbeforevalue(t,value,extra) - for i=1,#t do - if t[i] == extra then - remove(t,i) - end - end - for i=1,#t do - if t[i] == value then - insert(t,i,extra) - return - end - end - insert(t,1,extra) -end - -function tables.insertaftervalue(t,value,extra) - for i=1,#t do - if t[i] == extra then - remove(t,i) - end - end - for i=1,#t do - if t[i] == value then - insert(t,i+1,extra) - return - end - end - insert(t,#t+1,extra) -end - --- experimental - -local escape = Cs(Cc('"') * ((P('"')/'""' + P(1))^0) * Cc('"')) - -function table.tocsv(t,specification) - if t and #t > 0 then - local result = { } - local r = { } - specification = specification or { } - local fields = specification.fields - if type(fields) ~= "string" then - fields = sortedkeys(t[1]) - end - local separator = specification.separator or "," - if specification.preamble == true then - for f=1,#fields do - r[f] = lpegmatch(escape,tostring(fields[f])) - end - result[1] = concat(r,separator) - end - for i=1,#t do - local ti = t[i] - for f=1,#fields do - local field = ti[fields[f]] - if type(field) == "string" then - r[f] = lpegmatch(escape,field) - else - r[f] = tostring(field) - end - end - result[#result+1] = concat(r,separator) - end - return concat(result,"\n") - else - return "" - end -end - --- local nspaces = utilities.strings.newrepeater(" ") --- local escape = Cs((P("<")/"<" + P(">")/">" + P("&")/"&" + P(1))^0) --- --- local function toxml(t,d,result,step) --- for k, v in sortedpairs(t) do --- local s = nspaces[d] --- local tk = type(k) --- local tv = type(v) --- if tv == "table" then --- if tk == "number" then --- result[#result+1] = format("%s",s,k) --- toxml(v,d+step,result,step) --- result[#result+1] = format("%s",s,k) --- else --- result[#result+1] = format("%s<%s>",s,k) --- toxml(v,d+step,result,step) --- result[#result+1] = format("%s",s,k) --- end --- elseif tv == "string" then --- if tk == "number" then --- result[#result+1] = format("%s%s",s,k,lpegmatch(escape,v),k) --- else --- result[#result+1] = format("%s<%s>%s",s,k,lpegmatch(escape,v),k) --- end --- elseif tk == "number" then --- result[#result+1] = format("%s%s",s,k,tostring(v),k) --- else --- result[#result+1] = format("%s<%s>%s",s,k,tostring(v),k) --- end --- end --- end --- --- much faster - -local nspaces = utilities.strings.newrepeater(" ") - -local function toxml(t,d,result,step) - for k, v in sortedpairs(t) do - local s = nspaces[d] -- inlining this is somewhat faster but gives more formatters - local tk = type(k) - local tv = type(v) - if tv == "table" then - if tk == "number" then - result[#result+1] = formatters["%s"](s,k) - toxml(v,d+step,result,step) - result[#result+1] = formatters["%s"](s,k) - else - result[#result+1] = formatters["%s<%s>"](s,k) - toxml(v,d+step,result,step) - result[#result+1] = formatters["%s"](s,k) - end - elseif tv == "string" then - if tk == "number" then - result[#result+1] = formatters["%s%!xml!"](s,k,v,k) - else - result[#result+1] = formatters["%s<%s>%!xml!"](s,k,v,k) - end - elseif tk == "number" then - result[#result+1] = formatters["%s%S"](s,k,v,k) - else - result[#result+1] = formatters["%s<%s>%S"](s,k,v,k) - end - end -end - --- function table.toxml(t,name,nobanner,indent,spaces) --- local noroot = name == false --- local result = (nobanner or noroot) and { } or { "" } --- local indent = rep(" ",indent or 0) --- local spaces = rep(" ",spaces or 1) --- if noroot then --- toxml( t, inndent, result, spaces) --- else --- toxml( { [name or "root"] = t }, indent, result, spaces) --- end --- return concat(result,"\n") --- end - -function table.toxml(t,specification) - specification = specification or { } - local name = specification.name - local noroot = name == false - local result = (specification.nobanner or noroot) and { } or { "" } - local indent = specification.indent or 0 - local spaces = specification.spaces or 1 - if noroot then - toxml( t, indent, result, spaces) - else - toxml( { [name or "data"] = t }, indent, result, spaces) - end - return concat(result,"\n") -end - --- also experimental - --- encapsulate(table,utilities.tables) --- encapsulate(table,utilities.tables,true) --- encapsulate(table,true) - -function tables.encapsulate(core,capsule,protect) - if type(capsule) ~= "table" then - protect = true - capsule = { } - end - for key, value in next, core do - if capsule[key] then - print(formatters["\ninvalid %s %a in %a"]("inheritance",key,core)) - os.exit() - else - capsule[key] = value - end - end - if protect then - for key, value in next, core do - core[key] = nil - end - setmetatable(core, { - __index = capsule, - __newindex = function(t,key,value) - if capsule[key] then - print(formatters["\ninvalid %s %a' in %a"]("overload",key,core)) - os.exit() - else - rawset(t,key,value) - end - end - } ) - end -end - -local function fastserialize(t,r,outer) -- no mixes - r[#r+1] = "{" - local n = #t - if n > 0 then - for i=1,n do - local v = t[i] - local tv = type(v) - if tv == "string" then - r[#r+1] = formatters["%q,"](v) - elseif tv == "number" then - r[#r+1] = formatters["%s,"](v) - elseif tv == "table" then - fastserialize(v,r) - elseif tv == "boolean" then - r[#r+1] = formatters["%S,"](v) - end - end - else - for k, v in next, t do - local tv = type(v) - if tv == "string" then - r[#r+1] = formatters["[%q]=%q,"](k,v) - elseif tv == "number" then - r[#r+1] = formatters["[%q]=%s,"](k,v) - elseif tv == "table" then - r[#r+1] = formatters["[%q]="](k) - fastserialize(v,r) - elseif tv == "boolean" then - r[#r+1] = formatters["[%q]=%S,"](k,v) - end - end - end - if outer then - r[#r+1] = "}" - else - r[#r+1] = "}," - end - return r -end - --- local f_hashed_string = formatters["[%q]=%q,"] --- local f_hashed_number = formatters["[%q]=%s,"] --- local f_hashed_table = formatters["[%q]="] --- local f_hashed_true = formatters["[%q]=true,"] --- local f_hashed_false = formatters["[%q]=false,"] --- --- local f_indexed_string = formatters["%q,"] --- local f_indexed_number = formatters["%s,"] --- ----- f_indexed_true = formatters["true,"] --- ----- f_indexed_false = formatters["false,"] --- --- local function fastserialize(t,r,outer) -- no mixes --- r[#r+1] = "{" --- local n = #t --- if n > 0 then --- for i=1,n do --- local v = t[i] --- local tv = type(v) --- if tv == "string" then --- r[#r+1] = f_indexed_string(v) --- elseif tv == "number" then --- r[#r+1] = f_indexed_number(v) --- elseif tv == "table" then --- fastserialize(v,r) --- elseif tv == "boolean" then --- -- r[#r+1] = v and f_indexed_true(k) or f_indexed_false(k) --- r[#r+1] = v and "true," or "false," --- end --- end --- else --- for k, v in next, t do --- local tv = type(v) --- if tv == "string" then --- r[#r+1] = f_hashed_string(k,v) --- elseif tv == "number" then --- r[#r+1] = f_hashed_number(k,v) --- elseif tv == "table" then --- r[#r+1] = f_hashed_table(k) --- fastserialize(v,r) --- elseif tv == "boolean" then --- r[#r+1] = v and f_hashed_true(k) or f_hashed_false(k) --- end --- end --- end --- if outer then --- r[#r+1] = "}" --- else --- r[#r+1] = "}," --- end --- return r --- end - -function table.fastserialize(t,prefix) -- so prefix should contain the = - return concat(fastserialize(t,{ prefix or "return" },true)) -end - -function table.deserialize(str) - if not str or str == "" then - return - end - local code = load(str) - if not code then - return - end - code = code() - if not code then - return - end - return code -end - --- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7, e = 'f"g\nh' } }) - -function table.load(filename) - if filename then - local t = io.loaddata(filename) - if t and t ~= "" then - t = load(t) - if type(t) == "function" then - t = t() - if type(t) == "table" then - return t - end - end - end - end -end - -function table.save(filename,t,n,...) - io.savedata(filename,serialize(t,n == nil and true or n,...)) -end - -local function slowdrop(t) - local r = { } - local l = { } - for i=1,#t do - local ti = t[i] - local j = 0 - for k, v in next, ti do - j = j + 1 - l[j] = formatters["%s=%q"](k,v) - end - r[i] = formatters[" {%t},\n"](l) - end - return formatters["return {\n%st}"](r) -end - -local function fastdrop(t) - local r = { "return {\n" } - for i=1,#t do - local ti = t[i] - r[#r+1] = " {" - for k, v in next, ti do - r[#r+1] = formatters["%s=%q"](k,v) - end - r[#r+1] = "},\n" - end - r[#r+1] = "}" - return concat(r) -end - -function table.drop(t,slow) -- only { { a=2 }, {a=3} } - if #t == 0 then - return "return { }" - elseif slow == true then - return slowdrop(t) -- less memory - else - return fastdrop(t) -- some 15% faster - end -end - -function table.autokey(t,k) - local v = { } - t[k] = v - return v -end - -local selfmapper = { __index = function(t,k) t[k] = k return k end } - -function table.twowaymapper(t) - if not t then - t = { } - else - for i=0,#t do - local ti = t[i] -- t[1] = "one" - if ti then - local i = tostring(i) - t[i] = ti -- t["1"] = "one" - t[ti] = i -- t["one"] = "1" - end - end - t[""] = t[0] or "" - end - -- setmetatableindex(t,"key") - setmetatable(t,selfmapper) - return t -end - +if not modules then modules = { } end modules ['util-tab'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +utilities = utilities or {} +utilities.tables = utilities.tables or { } +local tables = utilities.tables + +local format, gmatch, gsub = string.format, string.gmatch, string.gsub +local concat, insert, remove = table.concat, table.insert, table.remove +local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring +local type, next, rawset, tonumber, tostring, load, select = type, next, rawset, tonumber, tostring, load, select +local lpegmatch, P, Cs, Cc = lpeg.match, lpeg.P, lpeg.Cs, lpeg.Cc +local serialize, sortedkeys, sortedpairs = table.serialize, table.sortedkeys, table.sortedpairs +local formatters = string.formatters + +local splitter = lpeg.tsplitat(".") + +function tables.definetable(target,nofirst,nolast) -- defines undefined tables + local composed, shortcut, t = nil, nil, { } + local snippets = lpegmatch(splitter,target) + for i=1,#snippets - (nolast and 1 or 0) do + local name = snippets[i] + if composed then + composed = shortcut .. "." .. name + shortcut = shortcut .. "_" .. name + t[#t+1] = formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut) + else + composed = name + shortcut = name + if not nofirst then + t[#t+1] = formatters["%s = %s or { }"](composed,composed) + end + end + end + if nolast then + composed = shortcut .. "." .. snippets[#snippets] + end + return concat(t,"\n"), composed +end + +-- local t = tables.definedtable("a","b","c","d") + +function tables.definedtable(...) + local t = _G + for i=1,select("#",...) do + local li = select(i,...) + local tl = t[li] + if not tl then + tl = { } + t[li] = tl + end + t = tl + end + return t +end + +function tables.accesstable(target,root) + local t = root or _G + for name in gmatch(target,"([^%.]+)") do + t = t[name] + if not t then + return + end + end + return t +end + +function tables.migratetable(target,v,root) + local t = root or _G + local names = string.split(target,".") + for i=1,#names-1 do + local name = names[i] + t[name] = t[name] or { } + t = t[name] + if not t then + return + end + end + t[names[#names]] = v +end + +function tables.removevalue(t,value) -- todo: n + if value then + for i=1,#t do + if t[i] == value then + remove(t,i) + -- remove all, so no: return + end + end + end +end + +function tables.insertbeforevalue(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i,extra) + return + end + end + insert(t,1,extra) +end + +function tables.insertaftervalue(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i+1,extra) + return + end + end + insert(t,#t+1,extra) +end + +-- experimental + +local escape = Cs(Cc('"') * ((P('"')/'""' + P(1))^0) * Cc('"')) + +function table.tocsv(t,specification) + if t and #t > 0 then + local result = { } + local r = { } + specification = specification or { } + local fields = specification.fields + if type(fields) ~= "string" then + fields = sortedkeys(t[1]) + end + local separator = specification.separator or "," + if specification.preamble == true then + for f=1,#fields do + r[f] = lpegmatch(escape,tostring(fields[f])) + end + result[1] = concat(r,separator) + end + for i=1,#t do + local ti = t[i] + for f=1,#fields do + local field = ti[fields[f]] + if type(field) == "string" then + r[f] = lpegmatch(escape,field) + else + r[f] = tostring(field) + end + end + result[#result+1] = concat(r,separator) + end + return concat(result,"\n") + else + return "" + end +end + +-- local nspaces = utilities.strings.newrepeater(" ") +-- local escape = Cs((P("<")/"<" + P(">")/">" + P("&")/"&" + P(1))^0) +-- +-- local function toxml(t,d,result,step) +-- for k, v in sortedpairs(t) do +-- local s = nspaces[d] +-- local tk = type(k) +-- local tv = type(v) +-- if tv == "table" then +-- if tk == "number" then +-- result[#result+1] = format("%s",s,k) +-- toxml(v,d+step,result,step) +-- result[#result+1] = format("%s",s,k) +-- else +-- result[#result+1] = format("%s<%s>",s,k) +-- toxml(v,d+step,result,step) +-- result[#result+1] = format("%s",s,k) +-- end +-- elseif tv == "string" then +-- if tk == "number" then +-- result[#result+1] = format("%s%s",s,k,lpegmatch(escape,v),k) +-- else +-- result[#result+1] = format("%s<%s>%s",s,k,lpegmatch(escape,v),k) +-- end +-- elseif tk == "number" then +-- result[#result+1] = format("%s%s",s,k,tostring(v),k) +-- else +-- result[#result+1] = format("%s<%s>%s",s,k,tostring(v),k) +-- end +-- end +-- end +-- +-- much faster + +local nspaces = utilities.strings.newrepeater(" ") + +local function toxml(t,d,result,step) + for k, v in sortedpairs(t) do + local s = nspaces[d] -- inlining this is somewhat faster but gives more formatters + local tk = type(k) + local tv = type(v) + if tv == "table" then + if tk == "number" then + result[#result+1] = formatters["%s"](s,k) + toxml(v,d+step,result,step) + result[#result+1] = formatters["%s"](s,k) + else + result[#result+1] = formatters["%s<%s>"](s,k) + toxml(v,d+step,result,step) + result[#result+1] = formatters["%s"](s,k) + end + elseif tv == "string" then + if tk == "number" then + result[#result+1] = formatters["%s%!xml!"](s,k,v,k) + else + result[#result+1] = formatters["%s<%s>%!xml!"](s,k,v,k) + end + elseif tk == "number" then + result[#result+1] = formatters["%s%S"](s,k,v,k) + else + result[#result+1] = formatters["%s<%s>%S"](s,k,v,k) + end + end +end + +-- function table.toxml(t,name,nobanner,indent,spaces) +-- local noroot = name == false +-- local result = (nobanner or noroot) and { } or { "" } +-- local indent = rep(" ",indent or 0) +-- local spaces = rep(" ",spaces or 1) +-- if noroot then +-- toxml( t, inndent, result, spaces) +-- else +-- toxml( { [name or "root"] = t }, indent, result, spaces) +-- end +-- return concat(result,"\n") +-- end + +function table.toxml(t,specification) + specification = specification or { } + local name = specification.name + local noroot = name == false + local result = (specification.nobanner or noroot) and { } or { "" } + local indent = specification.indent or 0 + local spaces = specification.spaces or 1 + if noroot then + toxml( t, indent, result, spaces) + else + toxml( { [name or "data"] = t }, indent, result, spaces) + end + return concat(result,"\n") +end + +-- also experimental + +-- encapsulate(table,utilities.tables) +-- encapsulate(table,utilities.tables,true) +-- encapsulate(table,true) + +function tables.encapsulate(core,capsule,protect) + if type(capsule) ~= "table" then + protect = true + capsule = { } + end + for key, value in next, core do + if capsule[key] then + print(formatters["\ninvalid %s %a in %a"]("inheritance",key,core)) + os.exit() + else + capsule[key] = value + end + end + if protect then + for key, value in next, core do + core[key] = nil + end + setmetatable(core, { + __index = capsule, + __newindex = function(t,key,value) + if capsule[key] then + print(formatters["\ninvalid %s %a' in %a"]("overload",key,core)) + os.exit() + else + rawset(t,key,value) + end + end + } ) + end +end + +local function fastserialize(t,r,outer) -- no mixes + r[#r+1] = "{" + local n = #t + if n > 0 then + for i=1,n do + local v = t[i] + local tv = type(v) + if tv == "string" then + r[#r+1] = formatters["%q,"](v) + elseif tv == "number" then + r[#r+1] = formatters["%s,"](v) + elseif tv == "table" then + fastserialize(v,r) + elseif tv == "boolean" then + r[#r+1] = formatters["%S,"](v) + end + end + else + for k, v in next, t do + local tv = type(v) + if tv == "string" then + r[#r+1] = formatters["[%q]=%q,"](k,v) + elseif tv == "number" then + r[#r+1] = formatters["[%q]=%s,"](k,v) + elseif tv == "table" then + r[#r+1] = formatters["[%q]="](k) + fastserialize(v,r) + elseif tv == "boolean" then + r[#r+1] = formatters["[%q]=%S,"](k,v) + end + end + end + if outer then + r[#r+1] = "}" + else + r[#r+1] = "}," + end + return r +end + +-- local f_hashed_string = formatters["[%q]=%q,"] +-- local f_hashed_number = formatters["[%q]=%s,"] +-- local f_hashed_table = formatters["[%q]="] +-- local f_hashed_true = formatters["[%q]=true,"] +-- local f_hashed_false = formatters["[%q]=false,"] +-- +-- local f_indexed_string = formatters["%q,"] +-- local f_indexed_number = formatters["%s,"] +-- ----- f_indexed_true = formatters["true,"] +-- ----- f_indexed_false = formatters["false,"] +-- +-- local function fastserialize(t,r,outer) -- no mixes +-- r[#r+1] = "{" +-- local n = #t +-- if n > 0 then +-- for i=1,n do +-- local v = t[i] +-- local tv = type(v) +-- if tv == "string" then +-- r[#r+1] = f_indexed_string(v) +-- elseif tv == "number" then +-- r[#r+1] = f_indexed_number(v) +-- elseif tv == "table" then +-- fastserialize(v,r) +-- elseif tv == "boolean" then +-- -- r[#r+1] = v and f_indexed_true(k) or f_indexed_false(k) +-- r[#r+1] = v and "true," or "false," +-- end +-- end +-- else +-- for k, v in next, t do +-- local tv = type(v) +-- if tv == "string" then +-- r[#r+1] = f_hashed_string(k,v) +-- elseif tv == "number" then +-- r[#r+1] = f_hashed_number(k,v) +-- elseif tv == "table" then +-- r[#r+1] = f_hashed_table(k) +-- fastserialize(v,r) +-- elseif tv == "boolean" then +-- r[#r+1] = v and f_hashed_true(k) or f_hashed_false(k) +-- end +-- end +-- end +-- if outer then +-- r[#r+1] = "}" +-- else +-- r[#r+1] = "}," +-- end +-- return r +-- end + +function table.fastserialize(t,prefix) -- so prefix should contain the = + return concat(fastserialize(t,{ prefix or "return" },true)) +end + +function table.deserialize(str) + if not str or str == "" then + return + end + local code = load(str) + if not code then + return + end + code = code() + if not code then + return + end + return code +end + +-- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7, e = 'f"g\nh' } }) + +function table.load(filename) + if filename then + local t = io.loaddata(filename) + if t and t ~= "" then + t = load(t) + if type(t) == "function" then + t = t() + if type(t) == "table" then + return t + end + end + end + end +end + +function table.save(filename,t,n,...) + io.savedata(filename,serialize(t,n == nil and true or n,...)) +end + +local function slowdrop(t) + local r = { } + local l = { } + for i=1,#t do + local ti = t[i] + local j = 0 + for k, v in next, ti do + j = j + 1 + l[j] = formatters["%s=%q"](k,v) + end + r[i] = formatters[" {%t},\n"](l) + end + return formatters["return {\n%st}"](r) +end + +local function fastdrop(t) + local r = { "return {\n" } + for i=1,#t do + local ti = t[i] + r[#r+1] = " {" + for k, v in next, ti do + r[#r+1] = formatters["%s=%q"](k,v) + end + r[#r+1] = "},\n" + end + r[#r+1] = "}" + return concat(r) +end + +function table.drop(t,slow) -- only { { a=2 }, {a=3} } + if #t == 0 then + return "return { }" + elseif slow == true then + return slowdrop(t) -- less memory + else + return fastdrop(t) -- some 15% faster + end +end + +function table.autokey(t,k) + local v = { } + t[k] = v + return v +end + +local selfmapper = { __index = function(t,k) t[k] = k return k end } + +function table.twowaymapper(t) + if not t then + t = { } + else + for i=0,#t do + local ti = t[i] -- t[1] = "one" + if ti then + local i = tostring(i) + t[i] = ti -- t["1"] = "one" + t[ti] = i -- t["one"] = "1" + end + end + t[""] = t[0] or "" + end + -- setmetatableindex(t,"key") + setmetatable(t,selfmapper) + return t +end + diff --git a/tex/context/base/util-tpl.lua b/tex/context/base/util-tpl.lua index 7a6abefd6..045faf1d0 100644 --- a/tex/context/base/util-tpl.lua +++ b/tex/context/base/util-tpl.lua @@ -1,174 +1,174 @@ -if not modules then modules = { } end modules ['util-tpl'] = { - version = 1.001, - comment = "companion to luat-lib.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This is experimental code. Coming from dos and windows, I've always used %whatever% --- as template variables so let's stick to it. After all, it's easy to parse and stands --- out well. A double %% is turned into a regular %. - -utilities.templates = utilities.templates or { } -local templates = utilities.templates - -local trace_template = false trackers.register("templates.trace",function(v) trace_template = v end) -local report_template = logs.reporter("template") - -local tostring = tostring -local format, sub = string.format, string.sub -local P, C, Cs, Carg, lpegmatch = lpeg.P, lpeg.C, lpeg.Cs, lpeg.Carg, lpeg.match - --- todo: make installable template.new - -local replacer - -local function replacekey(k,t,how,recursive) - local v = t[k] - if not v then - if trace_template then - report_template("unknown key %a",k) - end - return "" - else - v = tostring(v) - if trace_template then - report_template("setting key %a to value %a",k,v) - end - if recursive then - return lpegmatch(replacer,v,1,t,how,recursive) - else - return v - end - end -end - -local sqlescape = lpeg.replacer { - { "'", "''" }, - { "\\", "\\\\" }, - { "\r\n", "\\n" }, - { "\r", "\\n" }, - -- { "\t", "\\t" }, -} - -local sqlquotedescape = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'")) - --- escapeset : \0\1\2\3\4\5\6\7\8\9\10\11\12\13\14\15\16\17\18\19\20\21\22\23\24\25\26\27\28\29\30\31\"\\\127 --- test string: [[1\0\31test23"\\]] .. string.char(19) .. "23" --- --- slow: --- --- local luaescape = lpeg.replacer { --- { '"', [[\"]] }, --- { '\\', [[\\]] }, --- { R("\0\9") * #R("09"), function(s) return "\\00" .. byte(s) end }, --- { R("\10\31") * #R("09"), function(s) return "\\0" .. byte(s) end }, --- { R("\0\31") , function(s) return "\\" .. byte(s) end }, --- } --- --- slightly faster: --- --- local luaescape = Cs (( --- P('"' ) / [[\"]] + --- P('\\') / [[\\]] + --- Cc("\\00") * (R("\0\9") / byte) * #R("09") + --- Cc("\\0") * (R("\10\31") / byte) * #R("09") + --- Cc("\\") * (R("\0\31") / byte) + --- P(1) --- )^0) - -local escapers = { - lua = function(s) - return sub(format("%q",s),2,-2) - end, - sql = function(s) - return lpegmatch(sqlescape,s) - end, -} - -local quotedescapers = { - lua = function(s) - return format("%q",s) - end, - sql = function(s) - return lpegmatch(sqlquotedescape,s) - end, -} - -lpeg.patterns.sqlescape = sqlescape -lpeg.patterns.sqlescape = sqlquotedescape - -local luaescaper = escapers.lua -local quotedluaescaper = quotedescapers.lua - -local function replacekeyunquoted(s,t,how,recurse) -- ".. \" " - local escaper = how and escapers[how] or luaescaper - return escaper(replacekey(s,t,how,recurse)) -end - -local function replacekeyquoted(s,t,how,recurse) -- ".. \" " - local escaper = how and quotedescapers[how] or quotedluaescaper - return escaper(replacekey(s,t,how,recurse)) -end - -local single = P("%") -- test %test% test : resolves test -local double = P("%%") -- test 10%% test : %% becomes % -local lquoted = P("%[") -- test '%[test]%' test : resolves to test with escaped "'s -local rquoted = P("]%") -- -local lquotedq = P("%(") -- test %(test)% test : resolves to 'test' with escaped "'s -local rquotedq = P(")%") -- - -local escape = double / '%%' -local nosingle = single / '' -local nodouble = double / '' -local nolquoted = lquoted / '' -local norquoted = rquoted / '' -local nolquotedq = lquotedq / '' -local norquotedq = rquotedq / '' - -local key = nosingle * ((C((1-nosingle )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekey ) * nosingle -local quoted = nolquotedq * ((C((1-norquotedq)^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyquoted ) * norquotedq -local unquoted = nolquoted * ((C((1-norquoted )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyunquoted) * norquoted -local any = P(1) - - replacer = Cs((unquoted + quoted + escape + key + any)^0) - -local function replace(str,mapping,how,recurse) - if mapping and str then - return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str - else - return str - end -end - --- print(replace("test '%[x]%' test",{ x = [[a 'x'  a]] })) --- print(replace("test '%[x]%' test",{ x = true })) --- print(replace("test '%[x]%' test",{ x = [[a 'x'  a]], y = "oeps" },'sql')) --- print(replace("test '%[x]%' test",{ x = [[a '%y%'  a]], y = "oeps" },'sql',true)) --- print(replace([[test %[x]% test]],{ x = [[a "x"  a]]})) --- print(replace([[test %(x)% test]],{ x = [[a "x"  a]]})) - -templates.replace = replace - -function templates.load(filename,mapping,how,recurse) - local data = io.loaddata(filename) or "" - if mapping and next(mapping) then - return replace(data,mapping,how,recurse) - else - return data - end -end - -function templates.resolve(t,mapping,how,recurse) - if not mapping then - mapping = t - end - for k, v in next, t do - t[k] = replace(v,mapping,how,recurse) - end - return t -end - --- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" })) --- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" })) +if not modules then modules = { } end modules ['util-tpl'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This is experimental code. Coming from dos and windows, I've always used %whatever% +-- as template variables so let's stick to it. After all, it's easy to parse and stands +-- out well. A double %% is turned into a regular %. + +utilities.templates = utilities.templates or { } +local templates = utilities.templates + +local trace_template = false trackers.register("templates.trace",function(v) trace_template = v end) +local report_template = logs.reporter("template") + +local tostring = tostring +local format, sub = string.format, string.sub +local P, C, Cs, Carg, lpegmatch = lpeg.P, lpeg.C, lpeg.Cs, lpeg.Carg, lpeg.match + +-- todo: make installable template.new + +local replacer + +local function replacekey(k,t,how,recursive) + local v = t[k] + if not v then + if trace_template then + report_template("unknown key %a",k) + end + return "" + else + v = tostring(v) + if trace_template then + report_template("setting key %a to value %a",k,v) + end + if recursive then + return lpegmatch(replacer,v,1,t,how,recursive) + else + return v + end + end +end + +local sqlescape = lpeg.replacer { + { "'", "''" }, + { "\\", "\\\\" }, + { "\r\n", "\\n" }, + { "\r", "\\n" }, + -- { "\t", "\\t" }, +} + +local sqlquotedescape = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'")) + +-- escapeset : \0\1\2\3\4\5\6\7\8\9\10\11\12\13\14\15\16\17\18\19\20\21\22\23\24\25\26\27\28\29\30\31\"\\\127 +-- test string: [[1\0\31test23"\\]] .. string.char(19) .. "23" +-- +-- slow: +-- +-- local luaescape = lpeg.replacer { +-- { '"', [[\"]] }, +-- { '\\', [[\\]] }, +-- { R("\0\9") * #R("09"), function(s) return "\\00" .. byte(s) end }, +-- { R("\10\31") * #R("09"), function(s) return "\\0" .. byte(s) end }, +-- { R("\0\31") , function(s) return "\\" .. byte(s) end }, +-- } +-- +-- slightly faster: +-- +-- local luaescape = Cs (( +-- P('"' ) / [[\"]] + +-- P('\\') / [[\\]] + +-- Cc("\\00") * (R("\0\9") / byte) * #R("09") + +-- Cc("\\0") * (R("\10\31") / byte) * #R("09") + +-- Cc("\\") * (R("\0\31") / byte) + +-- P(1) +-- )^0) + +local escapers = { + lua = function(s) + return sub(format("%q",s),2,-2) + end, + sql = function(s) + return lpegmatch(sqlescape,s) + end, +} + +local quotedescapers = { + lua = function(s) + return format("%q",s) + end, + sql = function(s) + return lpegmatch(sqlquotedescape,s) + end, +} + +lpeg.patterns.sqlescape = sqlescape +lpeg.patterns.sqlescape = sqlquotedescape + +local luaescaper = escapers.lua +local quotedluaescaper = quotedescapers.lua + +local function replacekeyunquoted(s,t,how,recurse) -- ".. \" " + local escaper = how and escapers[how] or luaescaper + return escaper(replacekey(s,t,how,recurse)) +end + +local function replacekeyquoted(s,t,how,recurse) -- ".. \" " + local escaper = how and quotedescapers[how] or quotedluaescaper + return escaper(replacekey(s,t,how,recurse)) +end + +local single = P("%") -- test %test% test : resolves test +local double = P("%%") -- test 10%% test : %% becomes % +local lquoted = P("%[") -- test '%[test]%' test : resolves to test with escaped "'s +local rquoted = P("]%") -- +local lquotedq = P("%(") -- test %(test)% test : resolves to 'test' with escaped "'s +local rquotedq = P(")%") -- + +local escape = double / '%%' +local nosingle = single / '' +local nodouble = double / '' +local nolquoted = lquoted / '' +local norquoted = rquoted / '' +local nolquotedq = lquotedq / '' +local norquotedq = rquotedq / '' + +local key = nosingle * ((C((1-nosingle )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekey ) * nosingle +local quoted = nolquotedq * ((C((1-norquotedq)^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyquoted ) * norquotedq +local unquoted = nolquoted * ((C((1-norquoted )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyunquoted) * norquoted +local any = P(1) + + replacer = Cs((unquoted + quoted + escape + key + any)^0) + +local function replace(str,mapping,how,recurse) + if mapping and str then + return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str + else + return str + end +end + +-- print(replace("test '%[x]%' test",{ x = [[a 'x'  a]] })) +-- print(replace("test '%[x]%' test",{ x = true })) +-- print(replace("test '%[x]%' test",{ x = [[a 'x'  a]], y = "oeps" },'sql')) +-- print(replace("test '%[x]%' test",{ x = [[a '%y%'  a]], y = "oeps" },'sql',true)) +-- print(replace([[test %[x]% test]],{ x = [[a "x"  a]]})) +-- print(replace([[test %(x)% test]],{ x = [[a "x"  a]]})) + +templates.replace = replace + +function templates.load(filename,mapping,how,recurse) + local data = io.loaddata(filename) or "" + if mapping and next(mapping) then + return replace(data,mapping,how,recurse) + else + return data + end +end + +function templates.resolve(t,mapping,how,recurse) + if not mapping then + mapping = t + end + for k, v in next, t do + t[k] = replace(v,mapping,how,recurse) + end + return t +end + +-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" })) +-- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" })) diff --git a/tex/context/base/x-asciimath.lua b/tex/context/base/x-asciimath.lua index 5ef741ce3..9f4021212 100644 --- a/tex/context/base/x-asciimath.lua +++ b/tex/context/base/x-asciimath.lua @@ -1,270 +1,270 @@ -if not modules then modules = { } end modules ['x-asciimath'] = { - version = 1.001, - comment = "companion to x-asciimath.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx-- -

    Some backgrounds are discussed in x-asciimath.mkiv.

    ---ldx]]-- - -local trace_mapping = false if trackers then trackers.register("modules.asciimath.mapping", function(v) trace_mapping = v end) end - -local asciimath = { } -local moduledata = moduledata or { } -moduledata.asciimath = asciimath - -local report_asciimath = logs.reporter("mathematics","asciimath") - -local format = string.format -local lpegmatch = lpeg.match -local S, P, R, C, V, Cc, Ct, Cs = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct, lpeg.Cs - -local letter = lpeg.patterns.utf8 -local space = S(" \n\r\t") -local spaces = space^0/"" -local integer = P("-")^-1 * R("09")^1 -local realpart = P("-")^-1 * R("09")^1 * S(".")^1 * R("09")^1 -local number = integer -- so we can support nice formatting if needed -local real = realpart -- so we can support nice formatting if needed -local float = realpart * P("E") * integer -- so we can support nice formatting if needed -local texnic = P("\\") * (R("az","AZ")^1) - -local premapper = Cs ( ( - - P("@") / "\\degrees " + - P("O/") / "\\varnothing " + - P("o+") / "\\oplus " + - P("o.") / "\\ocirc " + - P("!in") / "\\not\\in " + - P("!=") / "\\neq " + - P("**") / "\\star " + - P("*") / "\\cdot " + - P("//") / "\\slash " + - P("/_") / "\\angle " + - P("\\\\") / "\\backslash " + - P("^^^") / "\\wedge " + - P("^^") / "\\wedge " + - P("<<") / "\\left\\langle " + - P(">>") / "\\right\\rangle " + - P("<=") / "\\leq " + - P(">=") / "\\geq " + - P("-<") / "\\precc " + - P(">-") / "\\succ " + - P("~=") / "\\cong " + - P("~~") / "\\approx " + - P("=>") / "\\Rightarrow " + - P("(:") / "\\left\\langle " + - P(":)") / "\\right\\rangle " + - P(":.") / "\\therefore " + - P("~|") / "\\right\\rceil " + - P("_|_") / "\\bot " + - P("_|") / "\\right\\rfloor " + - P("+-") / "\\pm " + - P("|--") / "\\vdash " + - P("|==") / "\\models " + - P("|_") / "\\left\\lfloor " + - P("|~") / "\\left\\lceil " + - P("-:") / "\\div " + - P("_=") / "\\equiv " + - - P("|") / "\\middle\\| " + - - P("dx") / "(dx)" + - P("dy") / "(dy)" + - P("dz") / "(dz)" + - - letter + P(1) - -)^0 ) - -local reserved = { - ["aleph"] = "\\aleph ", - ["vdots"] = "\\vdots ", - ["ddots"] = "\\ddots ", - ["oint"] = "\\oint ", - ["grad"] = "\\nabla ", - ["prod"] = "\\prod ", - ["prop"] = "\\propto ", - ["sube"] = "\\subseteq ", - ["supe"] = "\\supseteq ", - ["sinh"] = "\\sinh ", - ["cosh"] = "\\cosh ", - ["tanh"] = "\\tanh ", - ["sum"] = "\\sum ", - ["vvv"] = "\\vee ", - ["nnn"] = "\\cap ", - ["uuu"] = "\\cup ", - ["sub"] = "\\subset ", - ["sup"] = "\\supset ", - ["not"] = "\\lnot ", - ["iff"] = "\\Leftrightarrow ", - ["int"] = "\\int ", - ["del"] = "\\partial ", - ["and"] = "\\and ", - ["not"] = "\\not ", - ["sin"] = "\\sin ", - ["cos"] = "\\cos ", - ["tan"] = "\\tan ", - ["csc"] = "\\csc ", - ["sec"] = "\\sec ", - ["cot"] = "\\cot ", - ["log"] = "\\log ", - ["det"] = "\\det ", - ["lim"] = "\\lim ", - ["mod"] = "\\mod ", - ["gcd"] = "\\gcd ", - ["lcm"] = "\\lcm ", - ["min"] = "\\min ", - ["max"] = "\\max ", - ["xx"] = "\\times ", - ["in"] = "\\in ", - ["ox"] = "\\otimes ", - ["vv"] = "\\vee ", - ["nn"] = "\\cap ", - ["uu"] = "\\cup ", - ["oo"] = "\\infty ", - ["ln"] = "\\ln ", - ["or"] = "\\or ", - - ["AA"] = "\\forall ", - ["EE"] = "\\exists ", - ["TT"] = "\\top ", - ["CC"] = "\\Bbb{C}", - ["NN"] = "\\Bbb{N}", - ["QQ"] = "\\Bbb{Q}", - ["RR"] = "\\Bbb{R}", - ["ZZ"] = "\\Bbb{Z}", - -} - -local postmapper = Cs ( ( - - P("\\mathoptext ") * spaces * (P("\\bgroup ")/"{") * (1-P("\\egroup "))^1 * (P("\\egroup ")/"}") + - - (P("\\bgroup ")) / "{" + - (P("\\egroup ")) / "}" + - - P("\\") * (R("az","AZ")^2) + - - (R("AZ","az")^2) / reserved + - - P("{:") / "\\left." + - P(":}") / "\\right." + - P("(") / "\\left(" + - P(")") / "\\right)" + - P("[") / "\\left[" + - P("]") / "\\right]" + - P("{") / "\\left\\{" + - P("}") / "\\right\\}" + - - letter + P(1) -)^0 ) - -local parser - -local function converted(original,totex) - local ok, result - if trace_mapping then - report_asciimath("original : %s",original) - end - local premapped = lpegmatch(premapper,original) - if premapped then - if trace_mapping then - report_asciimath("prepared : %s",premapped) - end - local parsed = lpegmatch(parser,premapped) - if parsed then - if trace_mapping then - report_asciimath("parsed : %s",parsed) - end - local postmapped = lpegmatch(postmapper,parsed) - if postmapped then - if trace_mapping then - report_asciimath("finalized: %s",postmapped) - end - result, ok = postmapped, true - else - result = "error in postmapping" - end - else - result = "error in mapping" - end - else - result = "error in premapping" - end - if totex then - if ok then - context.mathematics(result) - else - context.type(result) -- some day monospaced - end - else - return result - end -end - -local function onlyconverted(str) - local parsed = lpegmatch(parser,str) - return parsed or str -end - -local sqrt = P("sqrt") / "\\rootradical \\bgroup \\egroup " -local root = P("root") / "\\rootradical " -local frac = P("frac") / "\\frac " -local stackrel = P("stackrel") / "\\stackrel " -local text = P("text") / "\\mathoptext " -local hat = P("hat") / "\\widehat " -local overbar = P("bar") / "\\overbar " -local underline = P("ul") / "\\underline " -local vec = P("vec") / "\\overrightarrow " -local dot = P("dot") / "\\dot " -local ddot = P("ddot") / "\\ddot " - -local left = P("(:") + P("{:") + P("(") + P("[") + P("{") -local right = P(":)") + P(":}") + P(")") + P("]") + P("}") -local leftnorright = 1 - left - right -local singles = sqrt + text + hat + underline + overbar + vec + ddot + dot -local doubles = root + frac + stackrel -local ignoreleft = (left/"") * spaces * spaces -local ignoreright = spaces * (right/"") * spaces -local ignoreslash = spaces * (P("/")/"") * spaces -local comma = P(",") -local nocomma = 1-comma -local anychar = P(1) -local openmatrix = left * spaces * Cc("\\matrix\\bgroup ") -local closematrix = Cc("\\egroup ") * spaces * right -local nextcolumn = spaces * (comma/"&") * spaces -local nextrow = spaces * (comma/"\\cr ") * spaces -local finishrow = Cc("\\cr ") -local opengroup = left/"\\bgroup " -local closegroup = right/"\\egroup " -local somescript = S("^_") * spaces -local beginargument = Cc("\\bgroup ") -local endargument = Cc("\\egroup ") - -parser = Cs { "main", - - scripts = somescript * V("argument"), - division = Cc("\\frac") * V("argument") * spaces * ignoreslash * spaces * V("argument"), - double = doubles * spaces * V("argument") * spaces * V("argument"), - single = singles * spaces * V("argument"), - - balanced = opengroup * (C((leftnorright + V("balanced"))^0)/onlyconverted) * closegroup, - argument = V("balanced") + V("token"), - - element = (V("step") + (V("argument") + V("step")) - ignoreright - nextcolumn - comma)^1, - commalist = ignoreleft * V("element") * (nextcolumn * spaces * V("element"))^0 * ignoreright, - matrix = openmatrix * spaces * (V("commalist") * (nextrow * V("commalist"))^0) * finishrow * closematrix, - - token = beginargument * (texnic + float + real + number + letter) * endargument, - - step = V("scripts") + V("division") + V("single") + V("double"), - main = (V("matrix") + V("step") + anychar)^0, - -} - -asciimath.reserved = reserved -asciimath.convert = converted +if not modules then modules = { } end modules ['x-asciimath'] = { + version = 1.001, + comment = "companion to x-asciimath.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

    Some backgrounds are discussed in x-asciimath.mkiv.

    +--ldx]]-- + +local trace_mapping = false if trackers then trackers.register("modules.asciimath.mapping", function(v) trace_mapping = v end) end + +local asciimath = { } +local moduledata = moduledata or { } +moduledata.asciimath = asciimath + +local report_asciimath = logs.reporter("mathematics","asciimath") + +local format = string.format +local lpegmatch = lpeg.match +local S, P, R, C, V, Cc, Ct, Cs = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct, lpeg.Cs + +local letter = lpeg.patterns.utf8 +local space = S(" \n\r\t") +local spaces = space^0/"" +local integer = P("-")^-1 * R("09")^1 +local realpart = P("-")^-1 * R("09")^1 * S(".")^1 * R("09")^1 +local number = integer -- so we can support nice formatting if needed +local real = realpart -- so we can support nice formatting if needed +local float = realpart * P("E") * integer -- so we can support nice formatting if needed +local texnic = P("\\") * (R("az","AZ")^1) + +local premapper = Cs ( ( + + P("@") / "\\degrees " + + P("O/") / "\\varnothing " + + P("o+") / "\\oplus " + + P("o.") / "\\ocirc " + + P("!in") / "\\not\\in " + + P("!=") / "\\neq " + + P("**") / "\\star " + + P("*") / "\\cdot " + + P("//") / "\\slash " + + P("/_") / "\\angle " + + P("\\\\") / "\\backslash " + + P("^^^") / "\\wedge " + + P("^^") / "\\wedge " + + P("<<") / "\\left\\langle " + + P(">>") / "\\right\\rangle " + + P("<=") / "\\leq " + + P(">=") / "\\geq " + + P("-<") / "\\precc " + + P(">-") / "\\succ " + + P("~=") / "\\cong " + + P("~~") / "\\approx " + + P("=>") / "\\Rightarrow " + + P("(:") / "\\left\\langle " + + P(":)") / "\\right\\rangle " + + P(":.") / "\\therefore " + + P("~|") / "\\right\\rceil " + + P("_|_") / "\\bot " + + P("_|") / "\\right\\rfloor " + + P("+-") / "\\pm " + + P("|--") / "\\vdash " + + P("|==") / "\\models " + + P("|_") / "\\left\\lfloor " + + P("|~") / "\\left\\lceil " + + P("-:") / "\\div " + + P("_=") / "\\equiv " + + + P("|") / "\\middle\\| " + + + P("dx") / "(dx)" + + P("dy") / "(dy)" + + P("dz") / "(dz)" + + + letter + P(1) + +)^0 ) + +local reserved = { + ["aleph"] = "\\aleph ", + ["vdots"] = "\\vdots ", + ["ddots"] = "\\ddots ", + ["oint"] = "\\oint ", + ["grad"] = "\\nabla ", + ["prod"] = "\\prod ", + ["prop"] = "\\propto ", + ["sube"] = "\\subseteq ", + ["supe"] = "\\supseteq ", + ["sinh"] = "\\sinh ", + ["cosh"] = "\\cosh ", + ["tanh"] = "\\tanh ", + ["sum"] = "\\sum ", + ["vvv"] = "\\vee ", + ["nnn"] = "\\cap ", + ["uuu"] = "\\cup ", + ["sub"] = "\\subset ", + ["sup"] = "\\supset ", + ["not"] = "\\lnot ", + ["iff"] = "\\Leftrightarrow ", + ["int"] = "\\int ", + ["del"] = "\\partial ", + ["and"] = "\\and ", + ["not"] = "\\not ", + ["sin"] = "\\sin ", + ["cos"] = "\\cos ", + ["tan"] = "\\tan ", + ["csc"] = "\\csc ", + ["sec"] = "\\sec ", + ["cot"] = "\\cot ", + ["log"] = "\\log ", + ["det"] = "\\det ", + ["lim"] = "\\lim ", + ["mod"] = "\\mod ", + ["gcd"] = "\\gcd ", + ["lcm"] = "\\lcm ", + ["min"] = "\\min ", + ["max"] = "\\max ", + ["xx"] = "\\times ", + ["in"] = "\\in ", + ["ox"] = "\\otimes ", + ["vv"] = "\\vee ", + ["nn"] = "\\cap ", + ["uu"] = "\\cup ", + ["oo"] = "\\infty ", + ["ln"] = "\\ln ", + ["or"] = "\\or ", + + ["AA"] = "\\forall ", + ["EE"] = "\\exists ", + ["TT"] = "\\top ", + ["CC"] = "\\Bbb{C}", + ["NN"] = "\\Bbb{N}", + ["QQ"] = "\\Bbb{Q}", + ["RR"] = "\\Bbb{R}", + ["ZZ"] = "\\Bbb{Z}", + +} + +local postmapper = Cs ( ( + + P("\\mathoptext ") * spaces * (P("\\bgroup ")/"{") * (1-P("\\egroup "))^1 * (P("\\egroup ")/"}") + + + (P("\\bgroup ")) / "{" + + (P("\\egroup ")) / "}" + + + P("\\") * (R("az","AZ")^2) + + + (R("AZ","az")^2) / reserved + + + P("{:") / "\\left." + + P(":}") / "\\right." + + P("(") / "\\left(" + + P(")") / "\\right)" + + P("[") / "\\left[" + + P("]") / "\\right]" + + P("{") / "\\left\\{" + + P("}") / "\\right\\}" + + + letter + P(1) +)^0 ) + +local parser + +local function converted(original,totex) + local ok, result + if trace_mapping then + report_asciimath("original : %s",original) + end + local premapped = lpegmatch(premapper,original) + if premapped then + if trace_mapping then + report_asciimath("prepared : %s",premapped) + end + local parsed = lpegmatch(parser,premapped) + if parsed then + if trace_mapping then + report_asciimath("parsed : %s",parsed) + end + local postmapped = lpegmatch(postmapper,parsed) + if postmapped then + if trace_mapping then + report_asciimath("finalized: %s",postmapped) + end + result, ok = postmapped, true + else + result = "error in postmapping" + end + else + result = "error in mapping" + end + else + result = "error in premapping" + end + if totex then + if ok then + context.mathematics(result) + else + context.type(result) -- some day monospaced + end + else + return result + end +end + +local function onlyconverted(str) + local parsed = lpegmatch(parser,str) + return parsed or str +end + +local sqrt = P("sqrt") / "\\rootradical \\bgroup \\egroup " +local root = P("root") / "\\rootradical " +local frac = P("frac") / "\\frac " +local stackrel = P("stackrel") / "\\stackrel " +local text = P("text") / "\\mathoptext " +local hat = P("hat") / "\\widehat " +local overbar = P("bar") / "\\overbar " +local underline = P("ul") / "\\underline " +local vec = P("vec") / "\\overrightarrow " +local dot = P("dot") / "\\dot " +local ddot = P("ddot") / "\\ddot " + +local left = P("(:") + P("{:") + P("(") + P("[") + P("{") +local right = P(":)") + P(":}") + P(")") + P("]") + P("}") +local leftnorright = 1 - left - right +local singles = sqrt + text + hat + underline + overbar + vec + ddot + dot +local doubles = root + frac + stackrel +local ignoreleft = (left/"") * spaces * spaces +local ignoreright = spaces * (right/"") * spaces +local ignoreslash = spaces * (P("/")/"") * spaces +local comma = P(",") +local nocomma = 1-comma +local anychar = P(1) +local openmatrix = left * spaces * Cc("\\matrix\\bgroup ") +local closematrix = Cc("\\egroup ") * spaces * right +local nextcolumn = spaces * (comma/"&") * spaces +local nextrow = spaces * (comma/"\\cr ") * spaces +local finishrow = Cc("\\cr ") +local opengroup = left/"\\bgroup " +local closegroup = right/"\\egroup " +local somescript = S("^_") * spaces +local beginargument = Cc("\\bgroup ") +local endargument = Cc("\\egroup ") + +parser = Cs { "main", + + scripts = somescript * V("argument"), + division = Cc("\\frac") * V("argument") * spaces * ignoreslash * spaces * V("argument"), + double = doubles * spaces * V("argument") * spaces * V("argument"), + single = singles * spaces * V("argument"), + + balanced = opengroup * (C((leftnorright + V("balanced"))^0)/onlyconverted) * closegroup, + argument = V("balanced") + V("token"), + + element = (V("step") + (V("argument") + V("step")) - ignoreright - nextcolumn - comma)^1, + commalist = ignoreleft * V("element") * (nextcolumn * spaces * V("element"))^0 * ignoreright, + matrix = openmatrix * spaces * (V("commalist") * (nextrow * V("commalist"))^0) * finishrow * closematrix, + + token = beginargument * (texnic + float + real + number + letter) * endargument, + + step = V("scripts") + V("division") + V("single") + V("double"), + main = (V("matrix") + V("step") + anychar)^0, + +} + +asciimath.reserved = reserved +asciimath.convert = converted diff --git a/tex/context/base/x-calcmath.lua b/tex/context/base/x-calcmath.lua index 1394f3450..631cd613b 100644 --- a/tex/context/base/x-calcmath.lua +++ b/tex/context/base/x-calcmath.lua @@ -1,362 +1,362 @@ -if not modules then modules = { } end modules ['x-calcmath'] = { - version = 1.001, - comment = "companion to x-calcmath.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- this really needs to be redone - -local format, lower, upper, gsub, sub = string.format, string.lower, string.upper, string.gsub, string.sub -local concat = table.concat -local lpegmatch = lpeg.match - -local calcmath = { } -local moduledata = moduledata or { } -moduledata.calcmath = calcmath - -local list_1 = { - "median", "min", "max", "round", "ln", "log", - "sin", "cos", "tan", "sinh", "cosh", "tanh" -} -local list_2 = { - "int", "sum", "prod" -} -local list_3 = { - "f", "g" -} -local list_4 = { - "pi", "inf" -} - -local list_1_1 = { } -local list_2_1 = { } -local list_2_2 = { } -local list_2_3 = { } -local list_4_1 = { } - -local frozen = false - -local function freeze() - for k=1,#list_1 do - local v = list_1[k] - list_1_1[v] = "\\".. upper(v) .." " - end - for k=1,#list_2 do - local v = list_2[k] - list_2_1[v .. "%((.-),(.-),(.-)%)"] = "\\" .. upper(v) .. "^{%1}_{%2}{%3}" - list_2_2[v .. "%((.-),(.-)%)"] = "\\" .. upper(v) .. "^{%1}{%2}" - list_2_3[v .. "%((.-)%)"] = "\\" .. upper(v) .. "{%1}" - end - for k=1,#list_4 do - local v = list_4[k] - list_4_1[v] = "\\" .. upper(v) - end - frozen = true -end - -local entities = { - ['gt'] = '>', - ['lt'] = '<', -} - -local symbols = { - ["<="] = "\\LE ", - [">="] = "\\GE ", - ["=<"] = "\\LE ", - ["=>"] = "\\GE ", - ["=="] = "\\EQ ", - ["<" ] = "\\LT ", - [">" ] = "\\GT ", - ["="] = "\\EQ ", -} - -local function nsub(str,tag,pre,post) - return (gsub(str,tag .. "(%b())", function(body) - return pre .. nsub(sub(body,2,-2),tag,pre,post) .. post - end)) -end - -local function totex(str,mode) - if not frozen then freeze() end - local n = 0 - -- crap - str = gsub(str,"%s+",' ') - -- xml - str = gsub(str,"&(.-);",entities) - -- ...E... - str = gsub(str,"([%-%+]?[%d%.%+%-]+)E([%-%+]?[%d%.]+)", "{\\SCINOT{%1}{%2}}") - -- ^-.. - str = gsub(str,"%^([%-%+]*%d+)", "^{%1}") - -- ^(...) - str = nsub(str,"%^", "^{", "}") - -- 1/x^2 - repeat - str, n = gsub(str,"([%d%w%.]+)/([%d%w%.]+%^{[%d%w%.]+})", "\\frac{%1}{%2}") - until n == 0 - -- todo: autoparenthesis - -- int(a,b,c) - for k, v in next, list_2_1 do - repeat str, n = gsub(str,k,v) until n == 0 - end - -- int(a,b) - for k, v in next, list_2_2 do - repeat str, n = gsub(str,k,v) until n == 0 - end - -- int(a) - for k, v in next, list_2_3 do - repeat str, n = gsub(str,k,v) until n == 0 - end - -- sin(x) => {\\sin(x)} - for k, v in next, list_1_1 do - repeat str, n = gsub(str,k,v) until n == 0 - end - -- mean - str = nsub(str, "mean", "\\OVERLINE{", "}") - -- (1+x)/(1+x) => \\FRAC{1+x}{1+x} - repeat - str, n = gsub(str,"(%b())/(%b())", function(a,b) - return "\\FRAC{" .. sub(a,2,-2) .. "}{" .. sub(b,2,-2) .. "}" - end ) - until n == 0 - -- (1+x)/x => \\FRAC{1+x}{x} - repeat - str, n = gsub(str,"(%b())/([%+%-]?[%.%d%w]+)", function(a,b) - return "\\FRAC{" .. sub(a,2,-2) .. "}{" .. b .. "}" - end ) - until n == 0 - -- 1/(1+x) => \\FRAC{1}{1+x} - repeat - str, n = gsub(str,"([%.%d%w]+)/(%b())", function(a,b) - return "\\FRAC{" .. a .. "}{" .. sub(b,2,-2) .. "}" - end ) - until n == 0 - -- 1/x => \\FRAC{1}{x} - repeat - str, n = gsub(str,"([%.%d%w]+)/([%+%-]?[%.%d%w]+)", "\\FRAC{%1}{%2}") - until n == 0 - -- times - str = gsub(str,"%*", " ") - -- symbols -- we can use a table substitution here - str = gsub(str,"([<>=][<>=]*)", symbols) - -- functions - str = nsub(str,"sqrt", "\\SQRT{", "}") - str = nsub(str,"exp", "e^{", "}") - str = nsub(str,"abs", "\\left|", "\\right|") - -- d/D - str = nsub(str,"D", "{\\FRAC{\\MBOX{d}}{\\MBOX{d}x}{(", ")}}") - str = gsub(str,"D([xy])", "\\FRAC{{\\RM d}%1}{{\\RM d}x}") - -- f/g - for k,v in next, list_3 do -- todo : prepare k,v - str = nsub(str,"D"..v,"{\\RM "..v.."}^{\\PRIME}(",")") - str = nsub(str,v,"{\\RM "..v.."}(",")") - end - -- more symbols - for k,v in next, list_4_1 do - str = gsub(str,k,v) - end - -- parenthesis (optional) - if mode == 2 then - str = gsub(str,"%(", "\\left(") - str = gsub(str,"%)", "\\right)") - end - -- csnames - str = gsub(str,"(\\[A-Z]+)", lower) - -- report - return str -end - -calcmath.totex = totex - -function calcmath.tex(str,mode) - context(totex(str)) -end - -function calcmath.xml(id,mode) - context(totex(lxml.id(id).dt[1],mode)) -end - --- work in progress ... lpeg variant - -if false then - - -- todo: - - -- maybe rewrite to current lpeg, i.e. string replacement and no Cc's - - -- table approach we have now is less efficient but more flexible - - -- D \frac {\rm d} {{\rm d}x} - -- Dx Dy \frac {{\rm d}y} {{\rm d}x} - -- Df Dg {\rm f}^{\prime} - -- f() g() {\rm f}() - - - -- valid utf8 - - local S, P, R, C, V, Cc, Ct = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct - - local space = S(" \n\r\t")^0 - local integer = P("-")^-1 * R("09")^1 - local realpart = P("-")^-1 * R("09")^1 * S(".")^1 * R("09")^1 - local number = Cc("number") * C(integer) * space - local real = Cc("real") * C(realpart) * space - local float = Cc("float") * C(realpart) * lpeg.P("E") * lpeg.C(integer) * space - local identifier = Cc("identifier") * C(R("az","AZ")) * space - local compareop = Cc("compare") * C(P("<") + P("=") + P(">") + P(">=") + P("<=") + P(">") + P("<")) * space - local factorop = Cc("factor") * C(S("+-^_,")) * space - local termop = Cc("term") * C(S("*/")) * space - local constant = Cc("constant") * C(P("pi") + lpeg.P("inf")) * space - local functionop = Cc("function") * C(R("az")^1) * space - local open = P("(") * space - local close = P(")") * space - - local grammar = P { - "expression", - expression = Ct(V("factor") * ((factorop+compareop) * V("factor"))^0), - factor = Ct(V("term") * (termop * V("term"))^0), - term = Ct( - float + real + number + - (open * V("expression") * close) + - (functionop * open * (V("expression") * (P(",") * V("expression"))^0) * close) + - (functionop * V("term")) + - constant + identifier - ), - } - - local parser = space * grammar * -1 - - local function has_factor(t) - for i=1,#t do - if t[i] == "factor" then - return true - end - end - end - - -- can be sped up if needed ... - - function totex(t) - if t then - local one = t[1] - if type(one) == "string" then - local two, three = t[2], t[3] - if one == "number" then - context(two) - elseif one == "real" then - context(two) - elseif one == "float" then - context("\\scinot{",two,"}{",three,"}") - elseif one == "identifier" then - context(two) - elseif one == "constant" then - context("\\"..two) - elseif one == "function" then - if two == "sqrt" then - context("\\sqrt{") - totex(three) - context("}") - elseif two == "exp" then - context(" e^{") - totex(three) - context("}") - elseif two == "abs" then - context("\\left|") - totex(three) - context("\\right|") - elseif two == "mean" then - context("\\overline{") - totex(three) - context("}") - elseif two == "int" or two == "prod" or two == "sum" then - local four, five = t[4], t[5] - if five then - context("\\"..two.."^{") -- context[two]("{") - totex(three) - context("}_{") - totex(four) - context("}") - totex(five) - elseif four then - context("\\"..two.."^{") - totex(three) - context("}") - totex(four) - elseif three then - context("\\"..two.." ") -- " " not needed - totex(three) - else - context("\\"..two) - end - else - context("\\"..two.."(") - totex(three) - context(")") - end - end - else - local nt = #t - local hasfactor = has_factor(t) - if hasfactor then - context("\\left(") - end - totex(one) - for i=2,nt,3 do - local what, how, rest = t[i], t[i+1], t[i+2] - if what == "factor" then - if how == '^' or how == "_" then - context(how) - context("{") - totex(rest) - context("}") - else - context(how) - totex(rest) - end - elseif what == "term" then - if how == '/' then - context("\\frac{") - totex(rest) - context("}{") - totex(t[i+3] or "") - context("}") - elseif how == '*' then - context("\\times") - totex(rest) - else - context(how) - totex(three) - end - elseif what == "compare" then - if two == ">=" then - context("\\ge") - elseif two == "<=" then - context("\\le") - elseif two == ">" then - context(">") - elseif two == "<" then - context("<") - end - totex(three) - end - end - if hasfactor then - context("\\right)") - end - end - end - end - - calcmath = { } - - function calcmath.parse(str) - return lpegmatch(parser,str) - end - - function calcmath.tex(str) - str = totex(lpegmatch(parser,str)) - return (str == "" and "[error]") or str - end - -end +if not modules then modules = { } end modules ['x-calcmath'] = { + version = 1.001, + comment = "companion to x-calcmath.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this really needs to be redone + +local format, lower, upper, gsub, sub = string.format, string.lower, string.upper, string.gsub, string.sub +local concat = table.concat +local lpegmatch = lpeg.match + +local calcmath = { } +local moduledata = moduledata or { } +moduledata.calcmath = calcmath + +local list_1 = { + "median", "min", "max", "round", "ln", "log", + "sin", "cos", "tan", "sinh", "cosh", "tanh" +} +local list_2 = { + "int", "sum", "prod" +} +local list_3 = { + "f", "g" +} +local list_4 = { + "pi", "inf" +} + +local list_1_1 = { } +local list_2_1 = { } +local list_2_2 = { } +local list_2_3 = { } +local list_4_1 = { } + +local frozen = false + +local function freeze() + for k=1,#list_1 do + local v = list_1[k] + list_1_1[v] = "\\".. upper(v) .." " + end + for k=1,#list_2 do + local v = list_2[k] + list_2_1[v .. "%((.-),(.-),(.-)%)"] = "\\" .. upper(v) .. "^{%1}_{%2}{%3}" + list_2_2[v .. "%((.-),(.-)%)"] = "\\" .. upper(v) .. "^{%1}{%2}" + list_2_3[v .. "%((.-)%)"] = "\\" .. upper(v) .. "{%1}" + end + for k=1,#list_4 do + local v = list_4[k] + list_4_1[v] = "\\" .. upper(v) + end + frozen = true +end + +local entities = { + ['gt'] = '>', + ['lt'] = '<', +} + +local symbols = { + ["<="] = "\\LE ", + [">="] = "\\GE ", + ["=<"] = "\\LE ", + ["=>"] = "\\GE ", + ["=="] = "\\EQ ", + ["<" ] = "\\LT ", + [">" ] = "\\GT ", + ["="] = "\\EQ ", +} + +local function nsub(str,tag,pre,post) + return (gsub(str,tag .. "(%b())", function(body) + return pre .. nsub(sub(body,2,-2),tag,pre,post) .. post + end)) +end + +local function totex(str,mode) + if not frozen then freeze() end + local n = 0 + -- crap + str = gsub(str,"%s+",' ') + -- xml + str = gsub(str,"&(.-);",entities) + -- ...E... + str = gsub(str,"([%-%+]?[%d%.%+%-]+)E([%-%+]?[%d%.]+)", "{\\SCINOT{%1}{%2}}") + -- ^-.. + str = gsub(str,"%^([%-%+]*%d+)", "^{%1}") + -- ^(...) + str = nsub(str,"%^", "^{", "}") + -- 1/x^2 + repeat + str, n = gsub(str,"([%d%w%.]+)/([%d%w%.]+%^{[%d%w%.]+})", "\\frac{%1}{%2}") + until n == 0 + -- todo: autoparenthesis + -- int(a,b,c) + for k, v in next, list_2_1 do + repeat str, n = gsub(str,k,v) until n == 0 + end + -- int(a,b) + for k, v in next, list_2_2 do + repeat str, n = gsub(str,k,v) until n == 0 + end + -- int(a) + for k, v in next, list_2_3 do + repeat str, n = gsub(str,k,v) until n == 0 + end + -- sin(x) => {\\sin(x)} + for k, v in next, list_1_1 do + repeat str, n = gsub(str,k,v) until n == 0 + end + -- mean + str = nsub(str, "mean", "\\OVERLINE{", "}") + -- (1+x)/(1+x) => \\FRAC{1+x}{1+x} + repeat + str, n = gsub(str,"(%b())/(%b())", function(a,b) + return "\\FRAC{" .. sub(a,2,-2) .. "}{" .. sub(b,2,-2) .. "}" + end ) + until n == 0 + -- (1+x)/x => \\FRAC{1+x}{x} + repeat + str, n = gsub(str,"(%b())/([%+%-]?[%.%d%w]+)", function(a,b) + return "\\FRAC{" .. sub(a,2,-2) .. "}{" .. b .. "}" + end ) + until n == 0 + -- 1/(1+x) => \\FRAC{1}{1+x} + repeat + str, n = gsub(str,"([%.%d%w]+)/(%b())", function(a,b) + return "\\FRAC{" .. a .. "}{" .. sub(b,2,-2) .. "}" + end ) + until n == 0 + -- 1/x => \\FRAC{1}{x} + repeat + str, n = gsub(str,"([%.%d%w]+)/([%+%-]?[%.%d%w]+)", "\\FRAC{%1}{%2}") + until n == 0 + -- times + str = gsub(str,"%*", " ") + -- symbols -- we can use a table substitution here + str = gsub(str,"([<>=][<>=]*)", symbols) + -- functions + str = nsub(str,"sqrt", "\\SQRT{", "}") + str = nsub(str,"exp", "e^{", "}") + str = nsub(str,"abs", "\\left|", "\\right|") + -- d/D + str = nsub(str,"D", "{\\FRAC{\\MBOX{d}}{\\MBOX{d}x}{(", ")}}") + str = gsub(str,"D([xy])", "\\FRAC{{\\RM d}%1}{{\\RM d}x}") + -- f/g + for k,v in next, list_3 do -- todo : prepare k,v + str = nsub(str,"D"..v,"{\\RM "..v.."}^{\\PRIME}(",")") + str = nsub(str,v,"{\\RM "..v.."}(",")") + end + -- more symbols + for k,v in next, list_4_1 do + str = gsub(str,k,v) + end + -- parenthesis (optional) + if mode == 2 then + str = gsub(str,"%(", "\\left(") + str = gsub(str,"%)", "\\right)") + end + -- csnames + str = gsub(str,"(\\[A-Z]+)", lower) + -- report + return str +end + +calcmath.totex = totex + +function calcmath.tex(str,mode) + context(totex(str)) +end + +function calcmath.xml(id,mode) + context(totex(lxml.id(id).dt[1],mode)) +end + +-- work in progress ... lpeg variant + +if false then + + -- todo: + + -- maybe rewrite to current lpeg, i.e. string replacement and no Cc's + + -- table approach we have now is less efficient but more flexible + + -- D \frac {\rm d} {{\rm d}x} + -- Dx Dy \frac {{\rm d}y} {{\rm d}x} + -- Df Dg {\rm f}^{\prime} + -- f() g() {\rm f}() + + + -- valid utf8 + + local S, P, R, C, V, Cc, Ct = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct + + local space = S(" \n\r\t")^0 + local integer = P("-")^-1 * R("09")^1 + local realpart = P("-")^-1 * R("09")^1 * S(".")^1 * R("09")^1 + local number = Cc("number") * C(integer) * space + local real = Cc("real") * C(realpart) * space + local float = Cc("float") * C(realpart) * lpeg.P("E") * lpeg.C(integer) * space + local identifier = Cc("identifier") * C(R("az","AZ")) * space + local compareop = Cc("compare") * C(P("<") + P("=") + P(">") + P(">=") + P("<=") + P(">") + P("<")) * space + local factorop = Cc("factor") * C(S("+-^_,")) * space + local termop = Cc("term") * C(S("*/")) * space + local constant = Cc("constant") * C(P("pi") + lpeg.P("inf")) * space + local functionop = Cc("function") * C(R("az")^1) * space + local open = P("(") * space + local close = P(")") * space + + local grammar = P { + "expression", + expression = Ct(V("factor") * ((factorop+compareop) * V("factor"))^0), + factor = Ct(V("term") * (termop * V("term"))^0), + term = Ct( + float + real + number + + (open * V("expression") * close) + + (functionop * open * (V("expression") * (P(",") * V("expression"))^0) * close) + + (functionop * V("term")) + + constant + identifier + ), + } + + local parser = space * grammar * -1 + + local function has_factor(t) + for i=1,#t do + if t[i] == "factor" then + return true + end + end + end + + -- can be sped up if needed ... + + function totex(t) + if t then + local one = t[1] + if type(one) == "string" then + local two, three = t[2], t[3] + if one == "number" then + context(two) + elseif one == "real" then + context(two) + elseif one == "float" then + context("\\scinot{",two,"}{",three,"}") + elseif one == "identifier" then + context(two) + elseif one == "constant" then + context("\\"..two) + elseif one == "function" then + if two == "sqrt" then + context("\\sqrt{") + totex(three) + context("}") + elseif two == "exp" then + context(" e^{") + totex(three) + context("}") + elseif two == "abs" then + context("\\left|") + totex(three) + context("\\right|") + elseif two == "mean" then + context("\\overline{") + totex(three) + context("}") + elseif two == "int" or two == "prod" or two == "sum" then + local four, five = t[4], t[5] + if five then + context("\\"..two.."^{") -- context[two]("{") + totex(three) + context("}_{") + totex(four) + context("}") + totex(five) + elseif four then + context("\\"..two.."^{") + totex(three) + context("}") + totex(four) + elseif three then + context("\\"..two.." ") -- " " not needed + totex(three) + else + context("\\"..two) + end + else + context("\\"..two.."(") + totex(three) + context(")") + end + end + else + local nt = #t + local hasfactor = has_factor(t) + if hasfactor then + context("\\left(") + end + totex(one) + for i=2,nt,3 do + local what, how, rest = t[i], t[i+1], t[i+2] + if what == "factor" then + if how == '^' or how == "_" then + context(how) + context("{") + totex(rest) + context("}") + else + context(how) + totex(rest) + end + elseif what == "term" then + if how == '/' then + context("\\frac{") + totex(rest) + context("}{") + totex(t[i+3] or "") + context("}") + elseif how == '*' then + context("\\times") + totex(rest) + else + context(how) + totex(three) + end + elseif what == "compare" then + if two == ">=" then + context("\\ge") + elseif two == "<=" then + context("\\le") + elseif two == ">" then + context(">") + elseif two == "<" then + context("<") + end + totex(three) + end + end + if hasfactor then + context("\\right)") + end + end + end + end + + calcmath = { } + + function calcmath.parse(str) + return lpegmatch(parser,str) + end + + function calcmath.tex(str) + str = totex(lpegmatch(parser,str)) + return (str == "" and "[error]") or str + end + +end diff --git a/tex/context/base/x-cals.lua b/tex/context/base/x-cals.lua index 4051dd157..f88800df9 100644 --- a/tex/context/base/x-cals.lua +++ b/tex/context/base/x-cals.lua @@ -1,218 +1,218 @@ -if not modules then modules = { } end modules ['x-cals'] = { - version = 1.001, - comment = "companion to x-cals.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local format, lower = string.format, string.lower -local xmlsprint, xmlcprint, xmlcollected, xmlelements = xml.sprint, xml.cprint, xml.collected, xml.elements -local n_todimen, s_todimen = number.todimen, string.todimen - --- there is room for speedups as well as cleanup (using context functions) - -local cals = { } -moduledata.cals = cals -lxml.mathml = cals -- for the moment - -cals.ignore_widths = false -cals.shrink_widths = false -cals.stretch_widths = false - --- the following flags only apply to columns that have a specified width --- --- proportional : shrink or stretch proportionally to the width --- equal : shrink or stretch equaly distributed --- n < 1 : shrink or stretch proportionally to the width but multiplied by n --- --- more clever things, e.g. the same but applied to unspecified widths --- has to happen at the core-ntb level (todo) - -local halignments = { - left = "flushleft", - right = "flushright", - center = "middle", - centre = "middle", - justify = "normal", -} - -local valignments = { - top = "high", - bottom = "low", - middle = "lohi", -} - -local function adapt(widths,b,w,delta,sum,n,what) - if b == "equal" then - delta = delta/n - for k, v in next, w do - widths[k] = n_todimen(v - delta) - end - elseif b == "proportional" then - delta = delta/sum - for k, v in next, w do - widths[k] = n_todimen(v - v*delta) - end - elseif type(b) == "number" and b < 1 then - delta = b*delta/sum - for k, v in next, w do - widths[k] = n_todimen(v - v*delta) - end - end -end - -local function getspecs(root, pattern, names, widths) - -- here, but actually we need this in core-ntb.tex - -- but ideally we need an mkiv enhanced core-ntb.tex - local ignore_widths = cals.ignore_widths - local shrink_widths = cals.shrink_widths - local stretch_widths = cals.stretch_widths - for e in xmlcollected(root,pattern) do - local at = e.at - local column = at.colnum - if column then - if not ignore_widths then - local width = at.colwidth - if width then - widths[tonumber(column)] = lower(width) - end - end - local name = at.colname - if name then - names[name] = tonumber(column) - end - end - end - if ignore_width then - -- forget about it - elseif shrink_widths or stretch_widths then - local sum, n, w = 0, 0, { } - for _, v in next, widths do - n = n + 1 - v = (type(v) == "string" and s_todimen(v)) or v - if v then - w[n] = v - sum = sum + v - end - end - local hsize = tex.hsize - if type(hsize) == "string" then - hsize = s_todimen(hsize) - end - local delta = sum - hsize - if shrink_widths and delta > 0 then - adapt(widths,shrink_widths,w,delta,sum,n,"shrink") - elseif stretch_widths and delta < 0 then - adapt(widths,stretch_widths,w,delta,sum,n,"stretch") - end - end -end - -local function getspans(root, pattern, names, spans) - for e in xmlcollected(root,pattern) do - local at = e.at - local name, namest, nameend = at.colname, names[at.namest or "?"], names[at.nameend or "?"] - if name and namest and nameend then - spans[name] = tonumber(nameend) - tonumber(namest) + 1 - end - end -end - -local bTR, eTR, bTD, eTD = context.bTR, context.eTR, context.bTD, context.eTD - -function cals.table(root,namespace) - - local prefix = (namespace or "cals") .. ":" - - local prefix = namespace and namespace ~= "" and (namespace .. ":") or "" - local p = "/" .. prefix - - local tgroupspec = p .. "tgroup" - local colspec = p .. "colspec" - local spanspec = p .. "spanspec" - local hcolspec = p .. "thead" .. p .. "colspec" - local bcolspec = p .. "tbody" .. p .. "colspec" - local fcolspec = p .. "tfoot" .. p .. "colspec" - local entryspec = p .. "entry" .. "|" .. prefix .. "entrytbl" -- shouldn't that be p ? - local hrowspec = p .. "thead" .. p .. "row" - local browspec = p .. "tbody" .. p .. "row" - local frowspec = p .. "tfoot" .. p .. "row" - - local function tablepart(root, xcolspec, xrowspec, before, after) -- move this one outside - before() - local at = root.at - local pphalign, ppvalign = at.align, at.valign - local names, widths, spans = { }, { }, { } - getspecs(root, colspec , names, widths) - getspecs(root, xcolspec, names, widths) - getspans(root, spanspec, names, spans) - for r, d, k in xmlelements(root,xrowspec) do - bTR() - local dk = d[k] - local at = dk.at - local phalign, pvalign = at.align or pphalign, at.valign or ppvalign -- todo: __p__ test - local col = 1 - for rr, dd, kk in xmlelements(dk,entryspec) do - local dk = dd[kk] - if dk.tg == "entrytbl" then - -- bTD(function() cals.table(dk) end) - bTD() - context("{") - cals.table(dk) - context("}") - eTD() - col = col + 1 - else - local at = dk.at - local b, e, s, m = names[at.namest or "?"], names[at.nameend or "?"], spans[at.spanname or "?"], at.morerows - local halign, valign = at.align or phalign, at.valign or pvalign - if b and e then - s = e - b + 1 - end - if halign then - halign = halignments[halign] - end - if valign then - valign = valignments[valign] - end - local width = widths[col] - if s or m or halign or valign or width then -- currently only english interface ! - bTD { - nx = s or 1, - ny = (m or 0) + 1, - align = format("{%s,%s}",halign or "flushleft",valign or "high"), - width = width or "fit", - } - else - bTD { - align = "{flushleft,high}", - width = "fit", -- else problems with vertical material - } - end - xmlcprint(dk) - eTD() - col = col + (s or 1) - end - end - eTR() - end - after() - end - - for tgroup in lxml.collected(root,tgroupspec) do - context.directsetup("cals:table:before") - lxml.directives.before(root,"cdx") -- "cals:table" - context.bgroup() - lxml.directives.setup(root,"cdx") -- "cals:table" - context.bTABLE() - tablepart(tgroup, hcolspec, hrowspec, context.bTABLEhead, context.eTABLEhead) - tablepart(tgroup, bcolspec, browspec, context.bTABLEbody, context.eTABLEbody) - tablepart(tgroup, fcolspec, frowspec, context.bTABLEfoot, context.eTABLEfoot) - context.eTABLE() - context.egroup() - lxml.directives.after(root,"cdx") -- "cals:table" - context.directsetup("cals:table:after") - end - -end +if not modules then modules = { } end modules ['x-cals'] = { + version = 1.001, + comment = "companion to x-cals.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, lower = string.format, string.lower +local xmlsprint, xmlcprint, xmlcollected, xmlelements = xml.sprint, xml.cprint, xml.collected, xml.elements +local n_todimen, s_todimen = number.todimen, string.todimen + +-- there is room for speedups as well as cleanup (using context functions) + +local cals = { } +moduledata.cals = cals +lxml.mathml = cals -- for the moment + +cals.ignore_widths = false +cals.shrink_widths = false +cals.stretch_widths = false + +-- the following flags only apply to columns that have a specified width +-- +-- proportional : shrink or stretch proportionally to the width +-- equal : shrink or stretch equaly distributed +-- n < 1 : shrink or stretch proportionally to the width but multiplied by n +-- +-- more clever things, e.g. the same but applied to unspecified widths +-- has to happen at the core-ntb level (todo) + +local halignments = { + left = "flushleft", + right = "flushright", + center = "middle", + centre = "middle", + justify = "normal", +} + +local valignments = { + top = "high", + bottom = "low", + middle = "lohi", +} + +local function adapt(widths,b,w,delta,sum,n,what) + if b == "equal" then + delta = delta/n + for k, v in next, w do + widths[k] = n_todimen(v - delta) + end + elseif b == "proportional" then + delta = delta/sum + for k, v in next, w do + widths[k] = n_todimen(v - v*delta) + end + elseif type(b) == "number" and b < 1 then + delta = b*delta/sum + for k, v in next, w do + widths[k] = n_todimen(v - v*delta) + end + end +end + +local function getspecs(root, pattern, names, widths) + -- here, but actually we need this in core-ntb.tex + -- but ideally we need an mkiv enhanced core-ntb.tex + local ignore_widths = cals.ignore_widths + local shrink_widths = cals.shrink_widths + local stretch_widths = cals.stretch_widths + for e in xmlcollected(root,pattern) do + local at = e.at + local column = at.colnum + if column then + if not ignore_widths then + local width = at.colwidth + if width then + widths[tonumber(column)] = lower(width) + end + end + local name = at.colname + if name then + names[name] = tonumber(column) + end + end + end + if ignore_width then + -- forget about it + elseif shrink_widths or stretch_widths then + local sum, n, w = 0, 0, { } + for _, v in next, widths do + n = n + 1 + v = (type(v) == "string" and s_todimen(v)) or v + if v then + w[n] = v + sum = sum + v + end + end + local hsize = tex.hsize + if type(hsize) == "string" then + hsize = s_todimen(hsize) + end + local delta = sum - hsize + if shrink_widths and delta > 0 then + adapt(widths,shrink_widths,w,delta,sum,n,"shrink") + elseif stretch_widths and delta < 0 then + adapt(widths,stretch_widths,w,delta,sum,n,"stretch") + end + end +end + +local function getspans(root, pattern, names, spans) + for e in xmlcollected(root,pattern) do + local at = e.at + local name, namest, nameend = at.colname, names[at.namest or "?"], names[at.nameend or "?"] + if name and namest and nameend then + spans[name] = tonumber(nameend) - tonumber(namest) + 1 + end + end +end + +local bTR, eTR, bTD, eTD = context.bTR, context.eTR, context.bTD, context.eTD + +function cals.table(root,namespace) + + local prefix = (namespace or "cals") .. ":" + + local prefix = namespace and namespace ~= "" and (namespace .. ":") or "" + local p = "/" .. prefix + + local tgroupspec = p .. "tgroup" + local colspec = p .. "colspec" + local spanspec = p .. "spanspec" + local hcolspec = p .. "thead" .. p .. "colspec" + local bcolspec = p .. "tbody" .. p .. "colspec" + local fcolspec = p .. "tfoot" .. p .. "colspec" + local entryspec = p .. "entry" .. "|" .. prefix .. "entrytbl" -- shouldn't that be p ? + local hrowspec = p .. "thead" .. p .. "row" + local browspec = p .. "tbody" .. p .. "row" + local frowspec = p .. "tfoot" .. p .. "row" + + local function tablepart(root, xcolspec, xrowspec, before, after) -- move this one outside + before() + local at = root.at + local pphalign, ppvalign = at.align, at.valign + local names, widths, spans = { }, { }, { } + getspecs(root, colspec , names, widths) + getspecs(root, xcolspec, names, widths) + getspans(root, spanspec, names, spans) + for r, d, k in xmlelements(root,xrowspec) do + bTR() + local dk = d[k] + local at = dk.at + local phalign, pvalign = at.align or pphalign, at.valign or ppvalign -- todo: __p__ test + local col = 1 + for rr, dd, kk in xmlelements(dk,entryspec) do + local dk = dd[kk] + if dk.tg == "entrytbl" then + -- bTD(function() cals.table(dk) end) + bTD() + context("{") + cals.table(dk) + context("}") + eTD() + col = col + 1 + else + local at = dk.at + local b, e, s, m = names[at.namest or "?"], names[at.nameend or "?"], spans[at.spanname or "?"], at.morerows + local halign, valign = at.align or phalign, at.valign or pvalign + if b and e then + s = e - b + 1 + end + if halign then + halign = halignments[halign] + end + if valign then + valign = valignments[valign] + end + local width = widths[col] + if s or m or halign or valign or width then -- currently only english interface ! + bTD { + nx = s or 1, + ny = (m or 0) + 1, + align = format("{%s,%s}",halign or "flushleft",valign or "high"), + width = width or "fit", + } + else + bTD { + align = "{flushleft,high}", + width = "fit", -- else problems with vertical material + } + end + xmlcprint(dk) + eTD() + col = col + (s or 1) + end + end + eTR() + end + after() + end + + for tgroup in lxml.collected(root,tgroupspec) do + context.directsetup("cals:table:before") + lxml.directives.before(root,"cdx") -- "cals:table" + context.bgroup() + lxml.directives.setup(root,"cdx") -- "cals:table" + context.bTABLE() + tablepart(tgroup, hcolspec, hrowspec, context.bTABLEhead, context.eTABLEhead) + tablepart(tgroup, bcolspec, browspec, context.bTABLEbody, context.eTABLEbody) + tablepart(tgroup, fcolspec, frowspec, context.bTABLEfoot, context.eTABLEfoot) + context.eTABLE() + context.egroup() + lxml.directives.after(root,"cdx") -- "cals:table" + context.directsetup("cals:table:after") + end + +end diff --git a/tex/context/base/x-chemml.lua b/tex/context/base/x-chemml.lua index 79c1d9525..46a13a37e 100644 --- a/tex/context/base/x-chemml.lua +++ b/tex/context/base/x-chemml.lua @@ -1,51 +1,51 @@ -if not modules then modules = { } end modules ['x-chemml'] = { - version = 1.001, - comment = "companion to x-chemml.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- not yet acceptable cld - -local format, lower, upper, gsub, sub, match = string.format, string.lower, string.upper, string.gsub, string.sub, string.match -local concat = table.concat - -local chemml = { } -local moduledata = moduledata or { } -moduledata.chemml = chemml - -function chemml.pi(id) - local str = xml.content(lxml.id(id)) - local _, class, key, value = match(str,"^(%S+)%s+(%S+)%s+(%S+)%s+(%S+)%s*$") - if key and value then - context("\\setupCMLappearance[%s][%s=%s]",class, key, value) - end -end - -function chemml.do_graphic(id) - local t = { } - for r, d, k in xml.elements(lxml.id(id),"cml:graphic") do - t[#t+1] = xml.tostring(d[k].dt) - end - context(concat(t,",")) -end - -function chemml.no_graphic(id) - local t = { } - for r, d, k in xml.elements(lxml.id(id),"cml:text|cml:oxidation|cml:annotation") do - local dk = d[k] - if dk.tg == "oxidation" then - t[#t+1] = format("\\chemicaloxidation{%s}{%s}{%s}",r.at.sign or "",r.at.n or 1,xml.tostring(dk.dt)) - elseif dk.tg == "annotation" then - local location = r.at.location or "r" - local caption = xml.content(xml.first(dk,"cml:caption")) - local text = xml.content(xml.first(dk,"cml:text")) - t[#t+1] = format("\\doCMLannotation{%s}{%s}{%s}",location,caption,text) - else - t[#t+1] = xml.tostring(dk.dt) or "" - end - end - context(concat(t,",")) -end - +if not modules then modules = { } end modules ['x-chemml'] = { + version = 1.001, + comment = "companion to x-chemml.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- not yet acceptable cld + +local format, lower, upper, gsub, sub, match = string.format, string.lower, string.upper, string.gsub, string.sub, string.match +local concat = table.concat + +local chemml = { } +local moduledata = moduledata or { } +moduledata.chemml = chemml + +function chemml.pi(id) + local str = xml.content(lxml.id(id)) + local _, class, key, value = match(str,"^(%S+)%s+(%S+)%s+(%S+)%s+(%S+)%s*$") + if key and value then + context("\\setupCMLappearance[%s][%s=%s]",class, key, value) + end +end + +function chemml.do_graphic(id) + local t = { } + for r, d, k in xml.elements(lxml.id(id),"cml:graphic") do + t[#t+1] = xml.tostring(d[k].dt) + end + context(concat(t,",")) +end + +function chemml.no_graphic(id) + local t = { } + for r, d, k in xml.elements(lxml.id(id),"cml:text|cml:oxidation|cml:annotation") do + local dk = d[k] + if dk.tg == "oxidation" then + t[#t+1] = format("\\chemicaloxidation{%s}{%s}{%s}",r.at.sign or "",r.at.n or 1,xml.tostring(dk.dt)) + elseif dk.tg == "annotation" then + local location = r.at.location or "r" + local caption = xml.content(xml.first(dk,"cml:caption")) + local text = xml.content(xml.first(dk,"cml:text")) + t[#t+1] = format("\\doCMLannotation{%s}{%s}{%s}",location,caption,text) + else + t[#t+1] = xml.tostring(dk.dt) or "" + end + end + context(concat(t,",")) +end + diff --git a/tex/context/base/x-ct.lua b/tex/context/base/x-ct.lua index 2dee985c3..190da78fc 100644 --- a/tex/context/base/x-ct.lua +++ b/tex/context/base/x-ct.lua @@ -1,165 +1,165 @@ -if not modules then modules = { } end modules ['x-ct'] = { - version = 1.001, - comment = "companion to x-ct.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- needs testing - -local xmlsprint, xmlcprint, xmlfilter, xmlcollected = xml.sprint, xml.cprint, xml.filter, xml.collected -local format, concat, rep, find = string.format, table.concat, string.rep, string.find - -moduledata.ct = moduledata.ct or { } - -local halignments = { - left = 'l', - flushleft = 'l', - right = 'r', - flushright = 'r', - center = 'c', - middle = 'c', - centre = 'c', - justify = '', -} - -local templates = { } - -function moduledata.ct.registertabulatetemplate(name,str) - templates[name] = str -end - -local function roottemplate(root) - local rt = root.at.template - if rt then - local template = templates[rt] - if template then - return template - else - if not find(rt,"|") then - rt = gsub(rt,",","|") - end - if not find(rt,"^|") then rt = "|" .. rt end - if not find(rt,"|$") then rt = rt .. "|" end - return rt - end - end -end - -local function specifiedtemplate(root,templatespec) - local template = { } - for e in xmlcollected(root,templatespec) do - local at = e.at - local tm = halignments[at.align] or "" - if toboolean(at.paragraph) then - tm = tm .. "p" - end - template[#template+1] = tm - end - if #template > 0 then - return "|" .. concat(template,"|") .. "|" - else - return nil - end -end - -local function autotemplate(root,rowspec,cellspec) - local max = 0 - for e in xmlcollected(root,rowspec) do - local n = xml.count(e,cellspec) - if n > max then max = n end - end - if max == 2 then - return "|l|p|" - elseif max > 0 then - return "|" .. rep("p|",max) - else - return nil - end -end - -local defaulttemplate = "|l|p|" - -function moduledata.ct.tabulate(root,namespace) - if not root then - return - else - root = lxml.id(root) - end - - local prefix = (namespace or "context") .. ":" - - local templatespec = "/" .. prefix .. "template" .. "/" .. prefix .. "column" - local bodyrowspec = "/" .. prefix .. "body" .. "/" .. prefix .. "row" - local cellspec = "/" .. prefix .. "cell" - - local template = - roottemplate (root) or - specifiedtemplate (root,templatespec) or - autotemplate (root,bodyrowspec,cellspec) or - defaulttemplate - - -- todo: head and foot - - local NC, NR = context.NC, context.NR - - lxml.directives.before(root,'cdx') - context.bgroup() - lxml.directives.setup(root,'cdx') - context.starttabulate { template } - for e in xmlcollected(root,bodyrowspec) do - NC() - for e in xmlcollected(e,cellspec) do - xmlcprint(e) - NC() - end - NR() - end - context.stoptabulate() - context.egroup() - lxml.directives.after(root,'cdx') - -end - -function moduledata.ct.combination(root,namespace) - - if not root then - return - else - root = lxml.id(root) - end - - local prefix = (namespace or "context") .. ":" - - local pairspec = "/" .. prefix .. "pair" - local contentspec = "/" .. prefix .. "content" .. "/text()" - local captionspec = "/" .. prefix .. "caption" .. "/text()" - - local nx, ny = root.at.nx, root.at.ny - - if not (nx or ny) then - nx = xml.count(root,pairspec) or 2 - end - local template = format("%s*%s", nx or 1, ny or 1) - - lxml.directives.before(root,'cdx') - context.bgroup() - lxml.directives.setup(root,'cdx') - context.startcombination { template } - for e in xmlcollected(root,pairspec) do - -- context.combination( - -- function() xmlfilter(e,contentspec) end, - -- function() xmlfilter(e,captionspec) end - -- ) - context("{") - xmlfilter(e,contentspec) - context("}{") - xmlfilter(e,captionspec) - context("}") - end - context.stopcombination() - context.egroup() - lxml.directives.after(root,'cdx') - -end +if not modules then modules = { } end modules ['x-ct'] = { + version = 1.001, + comment = "companion to x-ct.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- needs testing + +local xmlsprint, xmlcprint, xmlfilter, xmlcollected = xml.sprint, xml.cprint, xml.filter, xml.collected +local format, concat, rep, find = string.format, table.concat, string.rep, string.find + +moduledata.ct = moduledata.ct or { } + +local halignments = { + left = 'l', + flushleft = 'l', + right = 'r', + flushright = 'r', + center = 'c', + middle = 'c', + centre = 'c', + justify = '', +} + +local templates = { } + +function moduledata.ct.registertabulatetemplate(name,str) + templates[name] = str +end + +local function roottemplate(root) + local rt = root.at.template + if rt then + local template = templates[rt] + if template then + return template + else + if not find(rt,"|") then + rt = gsub(rt,",","|") + end + if not find(rt,"^|") then rt = "|" .. rt end + if not find(rt,"|$") then rt = rt .. "|" end + return rt + end + end +end + +local function specifiedtemplate(root,templatespec) + local template = { } + for e in xmlcollected(root,templatespec) do + local at = e.at + local tm = halignments[at.align] or "" + if toboolean(at.paragraph) then + tm = tm .. "p" + end + template[#template+1] = tm + end + if #template > 0 then + return "|" .. concat(template,"|") .. "|" + else + return nil + end +end + +local function autotemplate(root,rowspec,cellspec) + local max = 0 + for e in xmlcollected(root,rowspec) do + local n = xml.count(e,cellspec) + if n > max then max = n end + end + if max == 2 then + return "|l|p|" + elseif max > 0 then + return "|" .. rep("p|",max) + else + return nil + end +end + +local defaulttemplate = "|l|p|" + +function moduledata.ct.tabulate(root,namespace) + if not root then + return + else + root = lxml.id(root) + end + + local prefix = (namespace or "context") .. ":" + + local templatespec = "/" .. prefix .. "template" .. "/" .. prefix .. "column" + local bodyrowspec = "/" .. prefix .. "body" .. "/" .. prefix .. "row" + local cellspec = "/" .. prefix .. "cell" + + local template = + roottemplate (root) or + specifiedtemplate (root,templatespec) or + autotemplate (root,bodyrowspec,cellspec) or + defaulttemplate + + -- todo: head and foot + + local NC, NR = context.NC, context.NR + + lxml.directives.before(root,'cdx') + context.bgroup() + lxml.directives.setup(root,'cdx') + context.starttabulate { template } + for e in xmlcollected(root,bodyrowspec) do + NC() + for e in xmlcollected(e,cellspec) do + xmlcprint(e) + NC() + end + NR() + end + context.stoptabulate() + context.egroup() + lxml.directives.after(root,'cdx') + +end + +function moduledata.ct.combination(root,namespace) + + if not root then + return + else + root = lxml.id(root) + end + + local prefix = (namespace or "context") .. ":" + + local pairspec = "/" .. prefix .. "pair" + local contentspec = "/" .. prefix .. "content" .. "/text()" + local captionspec = "/" .. prefix .. "caption" .. "/text()" + + local nx, ny = root.at.nx, root.at.ny + + if not (nx or ny) then + nx = xml.count(root,pairspec) or 2 + end + local template = format("%s*%s", nx or 1, ny or 1) + + lxml.directives.before(root,'cdx') + context.bgroup() + lxml.directives.setup(root,'cdx') + context.startcombination { template } + for e in xmlcollected(root,pairspec) do + -- context.combination( + -- function() xmlfilter(e,contentspec) end, + -- function() xmlfilter(e,captionspec) end + -- ) + context("{") + xmlfilter(e,contentspec) + context("}{") + xmlfilter(e,captionspec) + context("}") + end + context.stopcombination() + context.egroup() + lxml.directives.after(root,'cdx') + +end diff --git a/tex/context/base/x-ldx.lua b/tex/context/base/x-ldx.lua index 31cbebf13..8a6864033 100644 --- a/tex/context/base/x-ldx.lua +++ b/tex/context/base/x-ldx.lua @@ -1,341 +1,341 @@ -if not modules then modules = { } end modules ['x-ldx'] = { - version = 1.001, - comment = "companion to x-ldx.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- --[[ldx-- --- Introduction --- --ldx]]-- - ---[[ldx-- -Lua Documentation Module - -This file is part of the documentation suite and -itself serves as an example of using in combination -with . - -I will rewrite this using lpeg. On the other hand, we cannot expect proper - and for educational purposed the syntax might be wrong. ---ldx]]-- - --- there is a nice parser on from http://lua-users.org/wiki/LpegRecipes (by --- Patrick Donnelly) but lua crashes when I apply functions to some of the --- matches - -banner = "version 1.0.1 - 2007+ - PRAGMA ADE / CONTEXT" - ---[[ -This script needs a few libraries. Instead of merging the code here -we can use - - -mtxrun --internal x-ldx.lua - - -That way, the libraries included in the runner will be used. -]]-- - --- libraries l-string.lua l-table.lua l-io.lua l-file.lua - --- begin library merge --- end library merge - -local gsub, find, sub = string.gsub, string.find, string.sub -local splitstring, emptystring = string.split, string.is_empty -local concat = table.concat - ---[[ -Just a demo comment line. We will handle such multiline comments but -only when they start and end at the beginning of a line. More rich -comments are tagged differently. -]]-- - ---[[ldx-- -First we define a proper namespace for this module. The l stands for -, the d for documentation and the x for -. ---ldx]]-- - -if not ldx then ldx = { } end - ---[[ldx-- -We load the lua file into a table. The entries in this table themselves are -tables and have keys like code and comment. ---ldx]]-- - -function ldx.load(filename) - local data = file.readdata(filename) - local expr = "%s*%-%-%[%[ldx%-*%s*(.-)%s*%-%-ldx%]%]%-*%s*" - local i, j, t = 0, 0, { } - while true do - local comment, ni - ni, j, comment = find(data, expr, j) - if not ni then break end - t[#t+1] = { code = sub(data, i, ni-1) } - t[#t+1] = { comment = comment } - i = j + 1 - end - local str = sub(data, i, #data) - str = gsub(str, "^%s*(.-)%s*$", "%1") - if #str > 0 then - t[#t+1] = { code = str } - end - return t -end - ---[[ldx-- -We will tag keywords so that we can higlight them using a special font -or color. Users can extend this list when needed. ---ldx]]-- - -ldx.keywords = { } - ---[[ldx-- -Here come the reserved words: ---ldx]]-- - -ldx.keywords.reserved = { - ["and"] = 1, - ["break"] = 1, - ["do"] = 1, - ["else"] = 1, - ["elseif"] = 1, - ["end"] = 1, - ["false"] = 1, - ["for"] = 1, - ["function"] = 1, - ["if"] = 1, - ["in"] = 1, - ["local"] = 1, - ["nil"] = 1, - ["not"] = 1, - ["or"] = 1, - ["repeat"] = 1, - ["return"] = 1, - ["then"] = 1, - ["true"] = 1, - ["until"] = 1, - ["while"] = 1 -} - ---[[ldx-- -We need to escape a few tokens. We keep the hash local to the -definition but set it up only once, hence the do -construction. ---ldx]]-- - -do - local e = { [">"] = ">", ["<"] = "<", ["&"] = "&" } - function ldx.escape(str) - return (gsub(str, "([><&])",e)) - end -end - ---[[ldx-- -Enhancing the code is a bit tricky due to the fact that we have to -deal with strings and escaped quotes within these strings. Before we -mess around with the code, we hide the strings, and after that we -insert them again. Single and double quoted strings are tagged so -that we can use a different font to highlight them. ---ldx]]-- - -ldx.make_index = true - -function ldx.enhance(data) -- i need to use lpeg and then we can properly autoindent -) - local e = ldx.escape - for k=1,#data do - local v = data[k] - if v.code then - local dqs, sqs, com, cmt, cod = { }, { }, { }, { }, e(v.code) - cod = gsub(cod, '\\"', "##d##") - cod = gsub(cod, "\\'", "##s##") - cod = gsub(cod, "%-%-%[%[.-%]%]%-%-", function(s) - cmt[#cmt+1] = s - return ">>l>" - end) - cod = gsub(cod, "%-%-([^\n]*)", function(s) - com[#com+1] = s - return ">>c>" - end) - cod = gsub(cod, "(%b\"\")", function(s) - dqs[#dqs+1] = sub(s,2,-2) or "" - return ">>d>" - end) - cod = gsub(cod, "(%b\'\')", function(s) - sqs[#sqs+1] = sub(s,2,-2) or "" - return ">>s>" - end) - cod = gsub(cod, "(%a+)",function(key) - local class = ldx.keywords.reserved[key] - if class then - return "" .. key .. "" - else - return key - end - end) - cod = gsub(cod, ">>s>", function(s) - return "" .. sqs[tonumber(s)] .. "" - end) - cod = gsub(cod, ">>d>", function(s) - return "" .. dqs[tonumber(s)] .. "" - end) - cod = gsub(cod, ">>c>", function(s) - return "" .. com[tonumber(s)] .. "" - end) - cod = gsub(cod, ">>l>", function(s) - return cmt[tonumber(s)] - end) - cod = gsub(cod, "##d##", "\\\"") - cod = gsub(cod, "##s##", "\\\'") - if ldx.make_index then - local lines = splitstring(cod,"\n") - local f = "(function)%s+([%w%.]+)%s*%(" - for k=1,#lines do - local v = lines[k] - -- functies - v = gsub(v,f,function(key, str) - return "" .. str .. "(" - end) - -- variables - v = gsub(v,"^([%w][%w%,%s]-)(=[^=])",function(str, rest) - local t = splitstring(str,",%s*") - for k=1,#t do - t[k] = "" .. t[k] .. "" - end - return concat(t,", ") .. rest - end) - -- so far - lines[k] = v - end - v.code = concat(lines,"\n") - else - v.code = cod - end - end - end -end - ---[[ldx-- -We're now ready to save the file in format. This boils -down to wrapping the code and comment as well as the whole document. We tag -lines in the code as such so that we don't need messy CDATA constructs -and by calculating the indentation we also avoid space troubles. It also makes -it possible to change the indentation afterwards. ---ldx]]-- - -function ldx.as_xml(data) -- ldx: not needed - local t, cmode = { }, false - t[#t+1] = "\n" - t[#t+1] = "\n\n" - for k=1,#data do - local v = data[k] - if v.code and not emptystring(v.code) then - t[#t+1] = "\n\n" - local split = splitstring(v.code,"\n") - for k=1,#split do -- make this faster - local v = split[k] - local a, b = find(v,"^(%s+)") - if v then v = gsub(v,"[\n\r ]+$","") end - if a and b then - v = sub(v,b+1,#v) - if cmode then - t[#t+1] = "" .. v .. "\n" - else - t[#t+1] = "" .. v .. "\n" - end - elseif emptystring(v) then - if cmode then - t[#t+1] = "\n" - else - t[#t+1] = "\n" - end - elseif find(v,"^%-%-%[%[") then - t[#t+1] = "" .. v .. "\n" - cmode= true - elseif find(v,"^%]%]%-%-") then - t[#t+1] = "" .. v .. "\n" - cmode= false - elseif cmode then - t[#t+1] = "" .. v .. "\n" - else - t[#t+1] = "" .. v .. "\n" - end - end - t[#t+1] = "\n" - elseif v.comment then - t[#t+1] = "\n\n" .. v.comment .. "\n\n" - else - -- cannot happen - end - end - t[#t+1] = "\n\n" - return concat(t,"") -end - ---[[ldx-- -Saving the result is a trivial effort. ---ldx]]-- - -function ldx.save(filename,data) - file.savedata(filename,ldx.as_xml(data)) -end - ---[[ldx-- -The next function wraps it all in one call: ---ldx]]-- - -function ldx.convert(luaname,ldxname) - if not file.is_readable(luaname) then - luaname = luaname .. ".lua" - end - if file.is_readable(luaname) then - if not ldxname then - ldxname = file.replacesuffix(luaname,"ldx") - end - local data = ldx.load(luaname) - if data then - ldx.enhance(data) - if ldxname ~= luaname then - ldx.save(ldxname,data) - end - end - end -end - ---[[ldx-- -This module can be used directly: - - -mtxrun --internal x-ldx somefile.lua - - -will produce an ldx file that can be processed with -by running: - - -context --use=x-ldx --forcexml somefile.ldx - - -You can do this in one step by saying: - - -context --ctx=x-ldx somefile.lua - - -This will trigger into loading the mentioned - file. That file describes the conversion as well -as the module to be used. - -The main conversion call is: ---ldx]]-- - --- todo: assume usage of "mtxrun --script x-ldx", maybe make it mtx-ldx - -if environment.files and environment.files[1] then - ldx.convert(environment.files[1],environment.files[2]) -end - ---~ exit(1) +if not modules then modules = { } end modules ['x-ldx'] = { + version = 1.001, + comment = "companion to x-ldx.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- --[[ldx-- +-- Introduction +-- --ldx]]-- + +--[[ldx-- +Lua Documentation Module + +This file is part of the documentation suite and +itself serves as an example of using in combination +with . + +I will rewrite this using lpeg. On the other hand, we cannot expect proper + and for educational purposed the syntax might be wrong. +--ldx]]-- + +-- there is a nice parser on from http://lua-users.org/wiki/LpegRecipes (by +-- Patrick Donnelly) but lua crashes when I apply functions to some of the +-- matches + +banner = "version 1.0.1 - 2007+ - PRAGMA ADE / CONTEXT" + +--[[ +This script needs a few libraries. Instead of merging the code here +we can use + + +mtxrun --internal x-ldx.lua + + +That way, the libraries included in the runner will be used. +]]-- + +-- libraries l-string.lua l-table.lua l-io.lua l-file.lua + +-- begin library merge +-- end library merge + +local gsub, find, sub = string.gsub, string.find, string.sub +local splitstring, emptystring = string.split, string.is_empty +local concat = table.concat + +--[[ +Just a demo comment line. We will handle such multiline comments but +only when they start and end at the beginning of a line. More rich +comments are tagged differently. +]]-- + +--[[ldx-- +First we define a proper namespace for this module. The l stands for +, the d for documentation and the x for +. +--ldx]]-- + +if not ldx then ldx = { } end + +--[[ldx-- +We load the lua file into a table. The entries in this table themselves are +tables and have keys like code and comment. +--ldx]]-- + +function ldx.load(filename) + local data = file.readdata(filename) + local expr = "%s*%-%-%[%[ldx%-*%s*(.-)%s*%-%-ldx%]%]%-*%s*" + local i, j, t = 0, 0, { } + while true do + local comment, ni + ni, j, comment = find(data, expr, j) + if not ni then break end + t[#t+1] = { code = sub(data, i, ni-1) } + t[#t+1] = { comment = comment } + i = j + 1 + end + local str = sub(data, i, #data) + str = gsub(str, "^%s*(.-)%s*$", "%1") + if #str > 0 then + t[#t+1] = { code = str } + end + return t +end + +--[[ldx-- +We will tag keywords so that we can higlight them using a special font +or color. Users can extend this list when needed. +--ldx]]-- + +ldx.keywords = { } + +--[[ldx-- +Here come the reserved words: +--ldx]]-- + +ldx.keywords.reserved = { + ["and"] = 1, + ["break"] = 1, + ["do"] = 1, + ["else"] = 1, + ["elseif"] = 1, + ["end"] = 1, + ["false"] = 1, + ["for"] = 1, + ["function"] = 1, + ["if"] = 1, + ["in"] = 1, + ["local"] = 1, + ["nil"] = 1, + ["not"] = 1, + ["or"] = 1, + ["repeat"] = 1, + ["return"] = 1, + ["then"] = 1, + ["true"] = 1, + ["until"] = 1, + ["while"] = 1 +} + +--[[ldx-- +We need to escape a few tokens. We keep the hash local to the +definition but set it up only once, hence the do +construction. +--ldx]]-- + +do + local e = { [">"] = ">", ["<"] = "<", ["&"] = "&" } + function ldx.escape(str) + return (gsub(str, "([><&])",e)) + end +end + +--[[ldx-- +Enhancing the code is a bit tricky due to the fact that we have to +deal with strings and escaped quotes within these strings. Before we +mess around with the code, we hide the strings, and after that we +insert them again. Single and double quoted strings are tagged so +that we can use a different font to highlight them. +--ldx]]-- + +ldx.make_index = true + +function ldx.enhance(data) -- i need to use lpeg and then we can properly autoindent -) + local e = ldx.escape + for k=1,#data do + local v = data[k] + if v.code then + local dqs, sqs, com, cmt, cod = { }, { }, { }, { }, e(v.code) + cod = gsub(cod, '\\"', "##d##") + cod = gsub(cod, "\\'", "##s##") + cod = gsub(cod, "%-%-%[%[.-%]%]%-%-", function(s) + cmt[#cmt+1] = s + return ">>l>" + end) + cod = gsub(cod, "%-%-([^\n]*)", function(s) + com[#com+1] = s + return ">>c>" + end) + cod = gsub(cod, "(%b\"\")", function(s) + dqs[#dqs+1] = sub(s,2,-2) or "" + return ">>d>" + end) + cod = gsub(cod, "(%b\'\')", function(s) + sqs[#sqs+1] = sub(s,2,-2) or "" + return ">>s>" + end) + cod = gsub(cod, "(%a+)",function(key) + local class = ldx.keywords.reserved[key] + if class then + return "" .. key .. "" + else + return key + end + end) + cod = gsub(cod, ">>s>", function(s) + return "" .. sqs[tonumber(s)] .. "" + end) + cod = gsub(cod, ">>d>", function(s) + return "" .. dqs[tonumber(s)] .. "" + end) + cod = gsub(cod, ">>c>", function(s) + return "" .. com[tonumber(s)] .. "" + end) + cod = gsub(cod, ">>l>", function(s) + return cmt[tonumber(s)] + end) + cod = gsub(cod, "##d##", "\\\"") + cod = gsub(cod, "##s##", "\\\'") + if ldx.make_index then + local lines = splitstring(cod,"\n") + local f = "(function)%s+([%w%.]+)%s*%(" + for k=1,#lines do + local v = lines[k] + -- functies + v = gsub(v,f,function(key, str) + return "" .. str .. "(" + end) + -- variables + v = gsub(v,"^([%w][%w%,%s]-)(=[^=])",function(str, rest) + local t = splitstring(str,",%s*") + for k=1,#t do + t[k] = "" .. t[k] .. "" + end + return concat(t,", ") .. rest + end) + -- so far + lines[k] = v + end + v.code = concat(lines,"\n") + else + v.code = cod + end + end + end +end + +--[[ldx-- +We're now ready to save the file in format. This boils +down to wrapping the code and comment as well as the whole document. We tag +lines in the code as such so that we don't need messy CDATA constructs +and by calculating the indentation we also avoid space troubles. It also makes +it possible to change the indentation afterwards. +--ldx]]-- + +function ldx.as_xml(data) -- ldx: not needed + local t, cmode = { }, false + t[#t+1] = "\n" + t[#t+1] = "\n\n" + for k=1,#data do + local v = data[k] + if v.code and not emptystring(v.code) then + t[#t+1] = "\n\n" + local split = splitstring(v.code,"\n") + for k=1,#split do -- make this faster + local v = split[k] + local a, b = find(v,"^(%s+)") + if v then v = gsub(v,"[\n\r ]+$","") end + if a and b then + v = sub(v,b+1,#v) + if cmode then + t[#t+1] = "" .. v .. "\n" + else + t[#t+1] = "" .. v .. "\n" + end + elseif emptystring(v) then + if cmode then + t[#t+1] = "\n" + else + t[#t+1] = "\n" + end + elseif find(v,"^%-%-%[%[") then + t[#t+1] = "" .. v .. "\n" + cmode= true + elseif find(v,"^%]%]%-%-") then + t[#t+1] = "" .. v .. "\n" + cmode= false + elseif cmode then + t[#t+1] = "" .. v .. "\n" + else + t[#t+1] = "" .. v .. "\n" + end + end + t[#t+1] = "\n" + elseif v.comment then + t[#t+1] = "\n\n" .. v.comment .. "\n\n" + else + -- cannot happen + end + end + t[#t+1] = "\n\n" + return concat(t,"") +end + +--[[ldx-- +Saving the result is a trivial effort. +--ldx]]-- + +function ldx.save(filename,data) + file.savedata(filename,ldx.as_xml(data)) +end + +--[[ldx-- +The next function wraps it all in one call: +--ldx]]-- + +function ldx.convert(luaname,ldxname) + if not file.is_readable(luaname) then + luaname = luaname .. ".lua" + end + if file.is_readable(luaname) then + if not ldxname then + ldxname = file.replacesuffix(luaname,"ldx") + end + local data = ldx.load(luaname) + if data then + ldx.enhance(data) + if ldxname ~= luaname then + ldx.save(ldxname,data) + end + end + end +end + +--[[ldx-- +This module can be used directly: + + +mtxrun --internal x-ldx somefile.lua + + +will produce an ldx file that can be processed with +by running: + + +context --use=x-ldx --forcexml somefile.ldx + + +You can do this in one step by saying: + + +context --ctx=x-ldx somefile.lua + + +This will trigger into loading the mentioned + file. That file describes the conversion as well +as the module to be used. + +The main conversion call is: +--ldx]]-- + +-- todo: assume usage of "mtxrun --script x-ldx", maybe make it mtx-ldx + +if environment.files and environment.files[1] then + ldx.convert(environment.files[1],environment.files[2]) +end + +--~ exit(1) diff --git a/tex/context/base/x-mathml.lua b/tex/context/base/x-mathml.lua index 31483bbea..676b9a7c6 100644 --- a/tex/context/base/x-mathml.lua +++ b/tex/context/base/x-mathml.lua @@ -1,829 +1,829 @@ -if not modules then modules = { } end modules ['x-mathml'] = { - version = 1.001, - comment = "companion to x-mathml.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- This needs an upgrade to the latest greatest mechanisms. - -local type, next = type, next -local format, lower, find, gsub = string.format, string.lower, string.find, string.gsub -local strip = string.strip -local xmlsprint, xmlcprint, xmltext, xmlcontent = xml.sprint, xml.cprint, xml.text, xml.content -local getid = lxml.getid -local utfchar, utfcharacters, utfvalues = utf.char, utf.characters, utf.values -local lpegmatch = lpeg.match - -local mathml = { } -moduledata.mathml = mathml -lxml.mathml = mathml -- for the moment - --- an alternative is to remap to private codes, where we can have --- different properties .. to be done; this will move and become --- generic; we can then make the private ones active in math mode - --- todo: handle opening/closing mo's here ... presentation mml is such a mess ... - -characters.registerentities() - -local doublebar = utfchar(0x2016) - -local n_replacements = { --- [" "] = utfchar(0x2002), -- "&textspace;" -> tricky, no &; in mkiv - ["."] = "{.}", - [","] = "{,}", - [" "] = "", -} - -local l_replacements = { -- in main table - ["|"] = "\\mmlleftdelimiter\\vert", - ["{"] = "\\mmlleftdelimiter\\lbrace", - ["("] = "\\mmlleftdelimiter(", - ["["] = "\\mmlleftdelimiter[", - ["<"] = "\\mmlleftdelimiter<", - [doublebar] = "\\mmlleftdelimiter\\Vert", -} -local r_replacements = { -- in main table - ["|"] = "\\mmlrightdelimiter\\vert", - ["}"] = "\\mmlrightdelimiter\\rbrace", - [")"] = "\\mmlrightdelimiter)", - ["]"] = "\\mmlrightdelimiter]", - [">"] = "\\mmlrightdelimiter>", - [doublebar] = "\\mmlrightdelimiter\\Vert", -} - --- todo: play with asciimode and avoid mmlchar - -local o_replacements = { -- in main table - ["@l"] = "\\mmlleftdelimiter.", - ["@r"] = "\\mmlrightdelimiter.", - ["{"] = "\\mmlleftdelimiter \\lbrace", - ["}"] = "\\mmlrightdelimiter\\rbrace", - ["|"] = "\\mmlleftorrightdelimiter\\vert", - [doublebar] = "\\mmlleftorrightdelimiter\\Vert", - ["("] = "\\mmlleftdelimiter(", - [")"] = "\\mmlrightdelimiter)", - ["["] = "\\mmlleftdelimiter[", - ["]"] = "\\mmlrightdelimiter]", - -- ["<"] = "\\mmlleftdelimiter<", - -- [">"] = "\\mmlrightdelimiter>", - ["#"] = "\\mmlchar{35}", - ["$"] = "\\mmlchar{36}", -- $ - ["%"] = "\\mmlchar{37}", - ["&"] = "\\mmlchar{38}", - ["^"] = "\\mmlchar{94}{}", -- strange, sometimes luatex math sees the char instead of \char - ["_"] = "\\mmlchar{95}{}", -- so we need the {} - ["~"] = "\\mmlchar{126}", - [" "] = "", - ["°"] = "^\\circ", -- hack - - -- [utfchar(0xF103C)] = "\\mmlleftdelimiter<", - [utfchar(0xF1026)] = "\\mmlchar{38}", - -- [utfchar(0xF103E)] = "\\mmlleftdelimiter>", - -} - -local simpleoperatorremapper = utf.remapper(o_replacements) - ---~ languages.data.labels.functions - -local i_replacements = { - ["sin"] = "\\mathopnolimits{sin}", - ["cos"] = "\\mathopnolimits{cos}", - ["abs"] = "\\mathopnolimits{abs}", - ["arg"] = "\\mathopnolimits{arg}", - ["codomain"] = "\\mathopnolimits{codomain}", - ["curl"] = "\\mathopnolimits{curl}", - ["determinant"] = "\\mathopnolimits{det}", - ["divergence"] = "\\mathopnolimits{div}", - ["domain"] = "\\mathopnolimits{domain}", - ["gcd"] = "\\mathopnolimits{gcd}", - ["grad"] = "\\mathopnolimits{grad}", - ["identity"] = "\\mathopnolimits{id}", - ["image"] = "\\mathopnolimits{image}", - ["lcm"] = "\\mathopnolimits{lcm}", - ["lim"] = "\\mathopnolimits{lim}", - ["max"] = "\\mathopnolimits{max}", - ["median"] = "\\mathopnolimits{median}", - ["min"] = "\\mathopnolimits{min}", - ["mode"] = "\\mathopnolimits{mode}", - ["mod"] = "\\mathopnolimits{mod}", - ["polar"] = "\\mathopnolimits{Polar}", - ["exp"] = "\\mathopnolimits{exp}", - ["ln"] = "\\mathopnolimits{ln}", - ["log"] = "\\mathopnolimits{log}", - ["sin"] = "\\mathopnolimits{sin}", - ["arcsin"] = "\\mathopnolimits{arcsin}", - ["sinh"] = "\\mathopnolimits{sinh}", - ["arcsinh"] = "\\mathopnolimits{arcsinh}", - ["cos"] = "\\mathopnolimits{cos}", - ["arccos"] = "\\mathopnolimits{arccos}", - ["cosh"] = "\\mathopnolimits{cosh}", - ["arccosh"] = "\\mathopnolimits{arccosh}", - ["tan"] = "\\mathopnolimits{tan}", - ["arctan"] = "\\mathopnolimits{arctan}", - ["tanh"] = "\\mathopnolimits{tanh}", - ["arctanh"] = "\\mathopnolimits{arctanh}", - ["cot"] = "\\mathopnolimits{cot}", - ["arccot"] = "\\mathopnolimits{arccot}", - ["coth"] = "\\mathopnolimits{coth}", - ["arccoth"] = "\\mathopnolimits{arccoth}", - ["csc"] = "\\mathopnolimits{csc}", - ["arccsc"] = "\\mathopnolimits{arccsc}", - ["csch"] = "\\mathopnolimits{csch}", - ["arccsch"] = "\\mathopnolimits{arccsch}", - ["sec"] = "\\mathopnolimits{sec}", - ["arcsec"] = "\\mathopnolimits{arcsec}", - ["sech"] = "\\mathopnolimits{sech}", - ["arcsech"] = "\\mathopnolimits{arcsech}", - [" "] = "", - - ["false"] = "{\\mr false}", - ["notanumber"] = "{\\mr NaN}", - ["otherwise"] = "{\\mr otherwise}", - ["true"] = "{\\mr true}", - ["declare"] = "{\\mr declare}", - ["as"] = "{\\mr as}", -} - --- we could use a metatable or when accessing fallback on the --- key but at least we now have an overview - -local csymbols = { - arith1 = { - lcm = "lcm", - big_lcm = "lcm", - gcd = "gcd", - big_gcd = "big_gcd", - plus = "plus", - unary_minus = "minus", - minus = "minus", - times = "times", - divide = "divide", - power = "power", - abs = "abs", - root = "root", - sum = "sum", - product = "product", - }, - fns = { - domain = "domain", - range = "codomain", - image = "image", - identity = "ident", - -- left_inverse = "", - -- right_inverse = "", - inverse = "inverse", - left_compose = "compose", - lambda = "labmda", - }, - linalg1 = { - vectorproduct = "vectorproduct", - scalarproduct = "scalarproduct", - outerproduct = "outerproduct", - transpose = "transpose", - determinant = "determinant", - vector_selector = "selector", - -- matrix_selector = "matrix_selector", - }, - logic1 = { - equivalent = "equivalent", - ["not"] = "not", - ["and"] = "and", - -- big_and = "", - ["xor"] = "xor", - -- big_xor = "", - ["or"] = "or", - -- big-or = "", - implies = "implies", - ["true"] = "true", - ["false"] = "false", - }, - nums1 = { - -- based_integer = "based_integer" - rational = "rational", - inifinity = "infinity", - e = "expenonentiale", - i = "imaginaryi", - pi = "pi", - gamma = "gamma", - NaN = "NaN", - }, - relation1 = { - eq = "eq", - lt = "lt", - gt = "gt", - neq = "neq", - leq = "leq", - geq = "geq", - approx = "approx", - }, - set1 = { - cartesian_product = "cartesianproduct", - empty_set = "emptyset", - map = "map", - size = "card", - -- suchthat = "suchthat", - set = "set", - intersect = "intersect", - -- big_intersect = "", - union = "union", - -- big_union = "", - setdiff = "setdiff", - subset = "subset", - ["in"] = "in", - notin = "notin", - prsubset = "prsubset", - notsubset = "notsubset", - notprsubset = "notprsubset", - }, - veccalc1 = { - divergence = "divergence", - grad = "grad", - curl = "curl", - laplacian = "laplacian", - Laplacian = "laplacian", - }, - calculus1 = { - diff = "diff", - -- nthdiff = "", - partialdiff = "partialdiff", - int = "int", - -- defint = "defint", - }, - integer1 = { - factorof = "factorof", - factorial = "factorial", - quotient = "quotient", - remainder = "rem", - }, - linalg2 = { - vector = "vector", - matrix = "matrix", - matrixrow = "matrixrow", - }, - mathmkeys = { - -- equiv = "", - -- contentequiv = "", - -- contentequiv_strict = "", - }, - rounding1 = { - ceiling = "ceiling", - floor = "floor", - -- trunc = "trunc", - -- round = "round", - }, - setname1 = { - P = "primes", - N = "naturalnumbers", - Z = "integers", - rationals = "rationals", - R = "reals", - complexes = "complexes", - }, - complex1 = { - -- complex_cartesian = "complex_cartesian", -- ci ? - real = "real", - imaginary = "imaginary", - -- complex_polar = "complex_polar", -- ci ? - argument = "arg", - conjugate = "conjugate", - }, - interval1 = { -- not an apply - -- integer_interval = "integer_interval", - interval = "interval", - interval_oo = { tag = "interval", closure = "open" }, - interval_cc = { tag = "interval", closure = "closed" }, - interval_oc = { tag = "interval", closure = "open-closed" }, - interval_co = { tag = "interval", closure = "closed-open" }, - }, - linalg3 = { - -- vector = "vector.column", - -- matrixcolumn = "matrixcolumn", - -- matrix = "matrix.column", - }, - minmax1 = { - min = "min", - -- big_min = "", - max = "max", - -- big_max = "", - }, - piece1 = { - piecewise = "piecewise", - piece = "piece", - otherwise = "otherwise", - }, - error1 = { - -- unhandled_symbol = "", - -- unexpected_symbol = "", - -- unsupported_CD = "", - }, - limit1 = { - -- limit = "limit", - -- both_sides = "both_sides", - -- above = "above", - -- below = "below", - -- null = "null", - tendsto = "tendsto", - }, - list1 = { - -- map = "", - -- suchthat = "", - -- list = "list", - }, - multiset1 = { - size = { tag = "card", type = "multiset" }, - cartesian_product = { tag = "cartesianproduct", type = "multiset" }, - empty_set = { tag = "emptyset", type = "multiset" }, - -- multi_set = { tag = "multiset", type = "multiset" }, - intersect = { tag = "intersect", type = "multiset" }, - -- big_intersect = "", - union = { tag = "union", type = "multiset" }, - -- big_union = "", - setdiff = { tag = "setdiff", type = "multiset" }, - subset = { tag = "subset", type = "multiset" }, - ["in"] = { tag = "in", type = "multiset" }, - notin = { tag = "notin", type = "multiset" }, - prsubset = { tag = "prsubset", type = "multiset" }, - notsubset = { tag = "notsubset", type = "multiset" }, - notprsubset = { tag = "notprsubset", type = "multiset" }, - }, - quant1 = { - forall = "forall", - exists = "exists", - }, - s_dist = { - -- mean = "mean.dist", - -- sdev = "sdev.dist", - -- variance = "variance.dist", - -- moment = "moment.dist", - }, - s_data = { - mean = "mean", - sdev = "sdev", - variance = "vriance", - mode = "mode", - median = "median", - moment = "moment", - }, - transc1 = { - log = "log", - ln = "ln", - exp = "exp", - sin = "sin", - cos = "cos", - tan = "tan", - sec = "sec", - csc = "csc", - cot = "cot", - sinh = "sinh", - cosh = "cosh", - tanh = "tanh", - sech = "sech", - csch = "cscs", - coth = "coth", - arcsin = "arcsin", - arccos = "arccos", - arctan = "arctan", - arcsec = "arcsec", - arcscs = "arccsc", - arccot = "arccot", - arcsinh = "arcsinh", - arccosh = "arccosh", - arctanh = "arstanh", - arcsech = "arcsech", - arccsch = "arccsch", - arccoth = "arccoth", - }, -} - -function xml.functions.remapmmlcsymbol(e) - local at = e.at - local cd = at.cd - if cd then - cd = csymbols[cd] - if cd then - local tx = e.dt[1] - if tx and tx ~= "" then - local tg = cd[tx] - if tg then - at.cd = nil - at.cdbase = nil - e.dt = { } - if type(tg) == "table" then - for k, v in next, tg do - if k == "tag" then - e.tg = v - else - at[k] = v - end - end - else - e.tg = tg - end - end - end - end - end -end - -function xml.functions.remapmmlbind(e) - e.tg = "apply" -end - -function xml.functions.remapopenmath(e) - local tg = e.tg - if tg == "OMOBJ" then - e.tg = "math" - elseif tg == "OMA" then - e.tg = "apply" - elseif tg == "OMB" then - e.tg = "apply" - elseif tg == "OMS" then - local at = e.at - e.tg = "csymbol" - e.dt = { at.name or "unknown" } - at.name = nil - elseif tg == "OMV" then - local at = e.at - e.tg = "ci" - e.dt = { at.name or "unknown" } - at.name = nil - elseif tg == "OMI" then - e.tg = "ci" - end - e.rn = "mml" -end - -function mathml.checked_operator(str) - context(simpleoperatorremapper(str)) -end - -function mathml.stripped(str) - context(strip(str)) -end - -function mathml.mn(id,pattern) - -- maybe at some point we need to interpret the number, but - -- currently we assume an upright font - local str = xmlcontent(getid(id)) or "" - local rep = gsub(str,"&.-;","") - local rep = gsub(rep,"(%s+)",utfchar(0x205F)) -- medspace e.g.: twenty one (nbsp is not seen) - local rep = gsub(rep,".",n_replacements) - context.mn(rep) -end - -function mathml.mo(id) - local str = xmlcontent(getid(id)) or "" - local rep = gsub(str,"&.-;","") -- todo - context(simpleoperatorremapper(rep)) -end - -function mathml.mi(id) - -- we need to strip comments etc .. todo when reading in tree - local e = getid(id) - local str = e.dt - if type(str) == "string" then - local n = #str - if n == 0 then - -- nothing to do - elseif n == 1 then - local str = gsub(str[1],"&.-;","") -- bah - local rep = i_replacements[str] - if not rep then - rep = gsub(str,".",i_replacements) - end - context(rep) - -- context.mi(rep) - else - context.xmlflush(id) -- xmlsprint or so - end - else - context.xmlflush(id) -- xmlsprint or so - end -end - -function mathml.mfenced(id) -- multiple separators - id = getid(id) - local left, right, separators = id.at.open or "(", id.at.close or ")", id.at.separators or "," - local l, r = l_replacements[left], r_replacements[right] - context.enabledelimiter() - if l then - context(l_replacements[left] or o_replacements[left] or "") - else - context(o_replacements["@l"]) - context(left) - end - context.disabledelimiter() - local collected = lxml.filter(id,"/*") -- check the * - if collected then - local n = #collected - if n == 0 then - -- skip - elseif n == 1 then - xmlsprint(collected[1]) -- to be checked - else - local t = utf.split(separators,true) - for i=1,n do - xmlsprint(collected[i]) -- to be checked - if i < n then - local m = t[i] or t[#t] or "" - if m == "|" then - m = "\\enabledelimiter\\middle|\\relax\\disabledelimiter" - elseif m == doublebar then - m = "\\enabledelimiter\\middle|\\relax\\disabledelimiter" - elseif m == "{" then - m = "\\{" - elseif m == "}" then - m = "\\}" - end - context(m) - end - end - end - end - context.enabledelimiter() - if r then - context(r_replacements[right] or o_replacements[right] or "") - else - context(right) - context(o_replacements["@r"]) - end - context.disabledelimiter() -end - ---~ local function flush(e,tag,toggle) ---~ if toggle then ---~ context("^{") ---~ else ---~ context("_{") ---~ end ---~ if tag == "none" then ---~ context("{}") ---~ else ---~ xmlsprint(e.dt) ---~ end ---~ if not toggle then ---~ context("}") ---~ else ---~ context("}{}") ---~ end ---~ return not toggle ---~ end - -local function flush(e,tag,toggle) - if tag == "none" then - -- if not toggle then - context("{}") -- {} starts a new ^_ set - -- end - elseif toggle then - context("^{") - xmlsprint(e.dt) - context("}{}") -- {} starts a new ^_ set - else - context("_{") - xmlsprint(e.dt) - context("}") - end - return not toggle -end - -function mathml.mmultiscripts(id) - local done, toggle = false, false - for e in lxml.collected(id,"/*") do - local tag = e.tg - if tag == "mprescripts" then - context("{}") - done = true - elseif done then - toggle = flush(e,tag,toggle) - end - end - local done, toggle = false, false - for e in lxml.collected(id,"/*") do - local tag = e.tg - if tag == "mprescripts" then - break - elseif done then - toggle = flush(e,tag,toggle) - else - xmlsprint(e.dt) - done = true - end - end -end - -local columnalignments = { - left = "flushleft", - right = "flushright", - center = "middle", -} - -local rowalignments = { - top = "high", - bottom = "low", - center = "lohi", - baseline = "top", - axis = "lohi", -} - -local frametypes = { - none = "off", - solid = "on", - dashed = "on", -} - --- crazy element ... should be a proper structure instead of such a mess - -function mathml.mcolumn(root) - root = getid(root) - local matrix, numbers = { }, 0 - local function collect(m,e) - local tag = e.tg - if tag == "mi" or tag == "mn" or tag == "mo" or tag == "mtext" then - local str = xmltext(e) - str = gsub(str,"&.-;","") - for s in utfcharacters(str) do - m[#m+1] = { tag, s } - end - if tag == "mn" then - local n = utf.len(str) - if n > numbers then - numbers = n - end - end - elseif tag == "mspace" or tag == "mline" then - local str = e.at.spacing or "" - for s in utfcharacters(str) do - m[#m+1] = { tag, s } - end - -- elseif tag == "mline" then - -- m[#m+1] = { tag, e } - end - end - for e in lxml.collected(root,"/*") do - local m = { } - matrix[#matrix+1] = m - if e.tg == "mrow" then - -- only one level - for e in lxml.collected(e,"/*") do - collect(m,e) - end - else - collect(m,e) - end - end - context.halign() - context.bgroup() - context([[\hss\startimath\alignmark\stopimath\aligntab\startimath\alignmark\stopimath\cr]]) - for i=1,#matrix do - local m = matrix[i] - local mline = true - for j=1,#m do - if m[j][1] ~= "mline" then - mline = false - break - end - end - if mline then - context.noalign([[\obeydepth\nointerlineskip]]) - end - for j=1,#m do - local mm = m[j] - local tag, chr = mm[1], mm[2] - if tag == "mline" then - -- This code is under construction ... I need some real motivation - -- to deal with this kind of crap. ---~ local n, p = true, true ---~ for c=1,#matrix do ---~ local mc = matrix[c][j] ---~ if mc then ---~ mc = mc[2] ---~ if type(mc) ~= "string" then ---~ n, p = false, false ---~ break ---~ elseif find(mc,"^[%d ]$") then -- rangecheck is faster ---~ -- digit ---~ elseif not find(mc,"^[%.%,]$") then -- rangecheck is faster ---~ -- punctuation ---~ else ---~ n = false ---~ break ---~ end ---~ end ---~ end ---~ if n then ---~ chr = "\\mmlmcolumndigitrule" ---~ elseif p then ---~ chr = "\\mmlmcolumnpunctuationrule" ---~ else ---~ chr = "\\mmlmcolumnsymbolrule" -- should be widest char ---~ end - chr = "\\hrulefill" - elseif tag == "mspace" then - chr = "\\mmlmcolumndigitspace" -- utfchar(0x2007) - end - if j == numbers + 1 then - context("\\aligntab") - end - local nchr = n_replacements[chr] - context(nchr or chr) - end - context.crcr() - end - context.egroup() -end - -local spacesplitter = lpeg.tsplitat(" ") - -function mathml.mtable(root) - -- todo: align, rowspacing, columnspacing, rowlines, columnlines - root = getid(root) - local at = root.at - local rowalign = at.rowalign - local columnalign = at.columnalign - local frame = at.frame - local rowaligns = rowalign and lpegmatch(spacesplitter,rowalign) - local columnaligns = columnalign and lpegmatch(spacesplitter,columnalign) - local frames = frame and lpegmatch(spacesplitter,frame) - local framespacing = at.framespacing or "0pt" - local framespacing = at.framespacing or "-\\ruledlinewidth" -- make this an option - - context.bTABLE { frame = frametypes[frame or "none"] or "off", offset = framespacing } - for e in lxml.collected(root,"/(mml:mtr|mml:mlabeledtr)") do - context.bTR() - local at = e.at - local col = 0 - local rfr = at.frame or (frames and frames [#frames]) - local rra = at.rowalign or (rowaligns and rowaligns [#rowaligns]) - local rca = at.columnalign or (columnaligns and columnaligns[#columnaligns]) - local ignorelabel = e.tg == "mlabeledtr" - for e in lxml.collected(e,"/mml:mtd") do -- nested we can use xml.collected - col = col + 1 - if ignorelabel and col == 1 then - -- get rid of label, should happen at the document level - else - local at = e.at - local rowspan, columnspan = at.rowspan or 1, at.columnspan or 1 - local cra = rowalignments [at.rowalign or (rowaligns and rowaligns [col]) or rra or "center"] or "lohi" - local cca = columnalignments[at.columnalign or (columnaligns and columnaligns[col]) or rca or "center"] or "middle" - local cfr = frametypes [at.frame or (frames and frames [col]) or rfr or "none" ] or "off" - context.bTD { align = format("{%s,%s}",cra,cca), frame = cfr, nx = columnspan, ny = rowspan } - context.startimath() - context.ignorespaces() - xmlcprint(e) - context.stopimath() - context.removeunwantedspaces() - context.eTD() - end - end - -- if e.tg == "mlabeledtr" then - -- context.bTD() - -- xmlcprint(xml.first(e,"/!mml:mtd")) - -- context.eTD() - -- end - context.eTR() - end - context.eTABLE() -end - -function mathml.csymbol(root) - root = getid(root) - local at = root.at - local encoding = at.encoding or "" - local hash = url.hashed(lower(at.definitionUrl or "")) - local full = hash.original or "" - local base = hash.path or "" - local text = strip(xmltext(root) or "") - context.mmlapplycsymbol(full,base,encoding,text) -end - -function mathml.menclosepattern(root) - root = getid(root) - local a = root.at.notation - if a and a ~= "" then - context("mml:enclose:",(gsub(a," +",",mml:enclose:"))) - end -end - -function xml.is_element(e,name) - return type(e) == "table" and (not name or e.tg == name) -end - -function mathml.cpolar_a(root) - root = getid(root) - local dt = root.dt - context.mathopnolimits("Polar") - context.left(false,"(") - for k=1,#dt do - local dk = dt[k] - if xml.is_element(dk,"sep") then - context(",") - else - xmlsprint(dk) - end - end - context.right(false,")") -end +if not modules then modules = { } end modules ['x-mathml'] = { + version = 1.001, + comment = "companion to x-mathml.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This needs an upgrade to the latest greatest mechanisms. + +local type, next = type, next +local format, lower, find, gsub = string.format, string.lower, string.find, string.gsub +local strip = string.strip +local xmlsprint, xmlcprint, xmltext, xmlcontent = xml.sprint, xml.cprint, xml.text, xml.content +local getid = lxml.getid +local utfchar, utfcharacters, utfvalues = utf.char, utf.characters, utf.values +local lpegmatch = lpeg.match + +local mathml = { } +moduledata.mathml = mathml +lxml.mathml = mathml -- for the moment + +-- an alternative is to remap to private codes, where we can have +-- different properties .. to be done; this will move and become +-- generic; we can then make the private ones active in math mode + +-- todo: handle opening/closing mo's here ... presentation mml is such a mess ... + +characters.registerentities() + +local doublebar = utfchar(0x2016) + +local n_replacements = { +-- [" "] = utfchar(0x2002), -- "&textspace;" -> tricky, no &; in mkiv + ["."] = "{.}", + [","] = "{,}", + [" "] = "", +} + +local l_replacements = { -- in main table + ["|"] = "\\mmlleftdelimiter\\vert", + ["{"] = "\\mmlleftdelimiter\\lbrace", + ["("] = "\\mmlleftdelimiter(", + ["["] = "\\mmlleftdelimiter[", + ["<"] = "\\mmlleftdelimiter<", + [doublebar] = "\\mmlleftdelimiter\\Vert", +} +local r_replacements = { -- in main table + ["|"] = "\\mmlrightdelimiter\\vert", + ["}"] = "\\mmlrightdelimiter\\rbrace", + [")"] = "\\mmlrightdelimiter)", + ["]"] = "\\mmlrightdelimiter]", + [">"] = "\\mmlrightdelimiter>", + [doublebar] = "\\mmlrightdelimiter\\Vert", +} + +-- todo: play with asciimode and avoid mmlchar + +local o_replacements = { -- in main table + ["@l"] = "\\mmlleftdelimiter.", + ["@r"] = "\\mmlrightdelimiter.", + ["{"] = "\\mmlleftdelimiter \\lbrace", + ["}"] = "\\mmlrightdelimiter\\rbrace", + ["|"] = "\\mmlleftorrightdelimiter\\vert", + [doublebar] = "\\mmlleftorrightdelimiter\\Vert", + ["("] = "\\mmlleftdelimiter(", + [")"] = "\\mmlrightdelimiter)", + ["["] = "\\mmlleftdelimiter[", + ["]"] = "\\mmlrightdelimiter]", + -- ["<"] = "\\mmlleftdelimiter<", + -- [">"] = "\\mmlrightdelimiter>", + ["#"] = "\\mmlchar{35}", + ["$"] = "\\mmlchar{36}", -- $ + ["%"] = "\\mmlchar{37}", + ["&"] = "\\mmlchar{38}", + ["^"] = "\\mmlchar{94}{}", -- strange, sometimes luatex math sees the char instead of \char + ["_"] = "\\mmlchar{95}{}", -- so we need the {} + ["~"] = "\\mmlchar{126}", + [" "] = "", + ["°"] = "^\\circ", -- hack + + -- [utfchar(0xF103C)] = "\\mmlleftdelimiter<", + [utfchar(0xF1026)] = "\\mmlchar{38}", + -- [utfchar(0xF103E)] = "\\mmlleftdelimiter>", + +} + +local simpleoperatorremapper = utf.remapper(o_replacements) + +--~ languages.data.labels.functions + +local i_replacements = { + ["sin"] = "\\mathopnolimits{sin}", + ["cos"] = "\\mathopnolimits{cos}", + ["abs"] = "\\mathopnolimits{abs}", + ["arg"] = "\\mathopnolimits{arg}", + ["codomain"] = "\\mathopnolimits{codomain}", + ["curl"] = "\\mathopnolimits{curl}", + ["determinant"] = "\\mathopnolimits{det}", + ["divergence"] = "\\mathopnolimits{div}", + ["domain"] = "\\mathopnolimits{domain}", + ["gcd"] = "\\mathopnolimits{gcd}", + ["grad"] = "\\mathopnolimits{grad}", + ["identity"] = "\\mathopnolimits{id}", + ["image"] = "\\mathopnolimits{image}", + ["lcm"] = "\\mathopnolimits{lcm}", + ["lim"] = "\\mathopnolimits{lim}", + ["max"] = "\\mathopnolimits{max}", + ["median"] = "\\mathopnolimits{median}", + ["min"] = "\\mathopnolimits{min}", + ["mode"] = "\\mathopnolimits{mode}", + ["mod"] = "\\mathopnolimits{mod}", + ["polar"] = "\\mathopnolimits{Polar}", + ["exp"] = "\\mathopnolimits{exp}", + ["ln"] = "\\mathopnolimits{ln}", + ["log"] = "\\mathopnolimits{log}", + ["sin"] = "\\mathopnolimits{sin}", + ["arcsin"] = "\\mathopnolimits{arcsin}", + ["sinh"] = "\\mathopnolimits{sinh}", + ["arcsinh"] = "\\mathopnolimits{arcsinh}", + ["cos"] = "\\mathopnolimits{cos}", + ["arccos"] = "\\mathopnolimits{arccos}", + ["cosh"] = "\\mathopnolimits{cosh}", + ["arccosh"] = "\\mathopnolimits{arccosh}", + ["tan"] = "\\mathopnolimits{tan}", + ["arctan"] = "\\mathopnolimits{arctan}", + ["tanh"] = "\\mathopnolimits{tanh}", + ["arctanh"] = "\\mathopnolimits{arctanh}", + ["cot"] = "\\mathopnolimits{cot}", + ["arccot"] = "\\mathopnolimits{arccot}", + ["coth"] = "\\mathopnolimits{coth}", + ["arccoth"] = "\\mathopnolimits{arccoth}", + ["csc"] = "\\mathopnolimits{csc}", + ["arccsc"] = "\\mathopnolimits{arccsc}", + ["csch"] = "\\mathopnolimits{csch}", + ["arccsch"] = "\\mathopnolimits{arccsch}", + ["sec"] = "\\mathopnolimits{sec}", + ["arcsec"] = "\\mathopnolimits{arcsec}", + ["sech"] = "\\mathopnolimits{sech}", + ["arcsech"] = "\\mathopnolimits{arcsech}", + [" "] = "", + + ["false"] = "{\\mr false}", + ["notanumber"] = "{\\mr NaN}", + ["otherwise"] = "{\\mr otherwise}", + ["true"] = "{\\mr true}", + ["declare"] = "{\\mr declare}", + ["as"] = "{\\mr as}", +} + +-- we could use a metatable or when accessing fallback on the +-- key but at least we now have an overview + +local csymbols = { + arith1 = { + lcm = "lcm", + big_lcm = "lcm", + gcd = "gcd", + big_gcd = "big_gcd", + plus = "plus", + unary_minus = "minus", + minus = "minus", + times = "times", + divide = "divide", + power = "power", + abs = "abs", + root = "root", + sum = "sum", + product = "product", + }, + fns = { + domain = "domain", + range = "codomain", + image = "image", + identity = "ident", + -- left_inverse = "", + -- right_inverse = "", + inverse = "inverse", + left_compose = "compose", + lambda = "labmda", + }, + linalg1 = { + vectorproduct = "vectorproduct", + scalarproduct = "scalarproduct", + outerproduct = "outerproduct", + transpose = "transpose", + determinant = "determinant", + vector_selector = "selector", + -- matrix_selector = "matrix_selector", + }, + logic1 = { + equivalent = "equivalent", + ["not"] = "not", + ["and"] = "and", + -- big_and = "", + ["xor"] = "xor", + -- big_xor = "", + ["or"] = "or", + -- big-or = "", + implies = "implies", + ["true"] = "true", + ["false"] = "false", + }, + nums1 = { + -- based_integer = "based_integer" + rational = "rational", + inifinity = "infinity", + e = "expenonentiale", + i = "imaginaryi", + pi = "pi", + gamma = "gamma", + NaN = "NaN", + }, + relation1 = { + eq = "eq", + lt = "lt", + gt = "gt", + neq = "neq", + leq = "leq", + geq = "geq", + approx = "approx", + }, + set1 = { + cartesian_product = "cartesianproduct", + empty_set = "emptyset", + map = "map", + size = "card", + -- suchthat = "suchthat", + set = "set", + intersect = "intersect", + -- big_intersect = "", + union = "union", + -- big_union = "", + setdiff = "setdiff", + subset = "subset", + ["in"] = "in", + notin = "notin", + prsubset = "prsubset", + notsubset = "notsubset", + notprsubset = "notprsubset", + }, + veccalc1 = { + divergence = "divergence", + grad = "grad", + curl = "curl", + laplacian = "laplacian", + Laplacian = "laplacian", + }, + calculus1 = { + diff = "diff", + -- nthdiff = "", + partialdiff = "partialdiff", + int = "int", + -- defint = "defint", + }, + integer1 = { + factorof = "factorof", + factorial = "factorial", + quotient = "quotient", + remainder = "rem", + }, + linalg2 = { + vector = "vector", + matrix = "matrix", + matrixrow = "matrixrow", + }, + mathmkeys = { + -- equiv = "", + -- contentequiv = "", + -- contentequiv_strict = "", + }, + rounding1 = { + ceiling = "ceiling", + floor = "floor", + -- trunc = "trunc", + -- round = "round", + }, + setname1 = { + P = "primes", + N = "naturalnumbers", + Z = "integers", + rationals = "rationals", + R = "reals", + complexes = "complexes", + }, + complex1 = { + -- complex_cartesian = "complex_cartesian", -- ci ? + real = "real", + imaginary = "imaginary", + -- complex_polar = "complex_polar", -- ci ? + argument = "arg", + conjugate = "conjugate", + }, + interval1 = { -- not an apply + -- integer_interval = "integer_interval", + interval = "interval", + interval_oo = { tag = "interval", closure = "open" }, + interval_cc = { tag = "interval", closure = "closed" }, + interval_oc = { tag = "interval", closure = "open-closed" }, + interval_co = { tag = "interval", closure = "closed-open" }, + }, + linalg3 = { + -- vector = "vector.column", + -- matrixcolumn = "matrixcolumn", + -- matrix = "matrix.column", + }, + minmax1 = { + min = "min", + -- big_min = "", + max = "max", + -- big_max = "", + }, + piece1 = { + piecewise = "piecewise", + piece = "piece", + otherwise = "otherwise", + }, + error1 = { + -- unhandled_symbol = "", + -- unexpected_symbol = "", + -- unsupported_CD = "", + }, + limit1 = { + -- limit = "limit", + -- both_sides = "both_sides", + -- above = "above", + -- below = "below", + -- null = "null", + tendsto = "tendsto", + }, + list1 = { + -- map = "", + -- suchthat = "", + -- list = "list", + }, + multiset1 = { + size = { tag = "card", type = "multiset" }, + cartesian_product = { tag = "cartesianproduct", type = "multiset" }, + empty_set = { tag = "emptyset", type = "multiset" }, + -- multi_set = { tag = "multiset", type = "multiset" }, + intersect = { tag = "intersect", type = "multiset" }, + -- big_intersect = "", + union = { tag = "union", type = "multiset" }, + -- big_union = "", + setdiff = { tag = "setdiff", type = "multiset" }, + subset = { tag = "subset", type = "multiset" }, + ["in"] = { tag = "in", type = "multiset" }, + notin = { tag = "notin", type = "multiset" }, + prsubset = { tag = "prsubset", type = "multiset" }, + notsubset = { tag = "notsubset", type = "multiset" }, + notprsubset = { tag = "notprsubset", type = "multiset" }, + }, + quant1 = { + forall = "forall", + exists = "exists", + }, + s_dist = { + -- mean = "mean.dist", + -- sdev = "sdev.dist", + -- variance = "variance.dist", + -- moment = "moment.dist", + }, + s_data = { + mean = "mean", + sdev = "sdev", + variance = "vriance", + mode = "mode", + median = "median", + moment = "moment", + }, + transc1 = { + log = "log", + ln = "ln", + exp = "exp", + sin = "sin", + cos = "cos", + tan = "tan", + sec = "sec", + csc = "csc", + cot = "cot", + sinh = "sinh", + cosh = "cosh", + tanh = "tanh", + sech = "sech", + csch = "cscs", + coth = "coth", + arcsin = "arcsin", + arccos = "arccos", + arctan = "arctan", + arcsec = "arcsec", + arcscs = "arccsc", + arccot = "arccot", + arcsinh = "arcsinh", + arccosh = "arccosh", + arctanh = "arstanh", + arcsech = "arcsech", + arccsch = "arccsch", + arccoth = "arccoth", + }, +} + +function xml.functions.remapmmlcsymbol(e) + local at = e.at + local cd = at.cd + if cd then + cd = csymbols[cd] + if cd then + local tx = e.dt[1] + if tx and tx ~= "" then + local tg = cd[tx] + if tg then + at.cd = nil + at.cdbase = nil + e.dt = { } + if type(tg) == "table" then + for k, v in next, tg do + if k == "tag" then + e.tg = v + else + at[k] = v + end + end + else + e.tg = tg + end + end + end + end + end +end + +function xml.functions.remapmmlbind(e) + e.tg = "apply" +end + +function xml.functions.remapopenmath(e) + local tg = e.tg + if tg == "OMOBJ" then + e.tg = "math" + elseif tg == "OMA" then + e.tg = "apply" + elseif tg == "OMB" then + e.tg = "apply" + elseif tg == "OMS" then + local at = e.at + e.tg = "csymbol" + e.dt = { at.name or "unknown" } + at.name = nil + elseif tg == "OMV" then + local at = e.at + e.tg = "ci" + e.dt = { at.name or "unknown" } + at.name = nil + elseif tg == "OMI" then + e.tg = "ci" + end + e.rn = "mml" +end + +function mathml.checked_operator(str) + context(simpleoperatorremapper(str)) +end + +function mathml.stripped(str) + context(strip(str)) +end + +function mathml.mn(id,pattern) + -- maybe at some point we need to interpret the number, but + -- currently we assume an upright font + local str = xmlcontent(getid(id)) or "" + local rep = gsub(str,"&.-;","") + local rep = gsub(rep,"(%s+)",utfchar(0x205F)) -- medspace e.g.: twenty one (nbsp is not seen) + local rep = gsub(rep,".",n_replacements) + context.mn(rep) +end + +function mathml.mo(id) + local str = xmlcontent(getid(id)) or "" + local rep = gsub(str,"&.-;","") -- todo + context(simpleoperatorremapper(rep)) +end + +function mathml.mi(id) + -- we need to strip comments etc .. todo when reading in tree + local e = getid(id) + local str = e.dt + if type(str) == "string" then + local n = #str + if n == 0 then + -- nothing to do + elseif n == 1 then + local str = gsub(str[1],"&.-;","") -- bah + local rep = i_replacements[str] + if not rep then + rep = gsub(str,".",i_replacements) + end + context(rep) + -- context.mi(rep) + else + context.xmlflush(id) -- xmlsprint or so + end + else + context.xmlflush(id) -- xmlsprint or so + end +end + +function mathml.mfenced(id) -- multiple separators + id = getid(id) + local left, right, separators = id.at.open or "(", id.at.close or ")", id.at.separators or "," + local l, r = l_replacements[left], r_replacements[right] + context.enabledelimiter() + if l then + context(l_replacements[left] or o_replacements[left] or "") + else + context(o_replacements["@l"]) + context(left) + end + context.disabledelimiter() + local collected = lxml.filter(id,"/*") -- check the * + if collected then + local n = #collected + if n == 0 then + -- skip + elseif n == 1 then + xmlsprint(collected[1]) -- to be checked + else + local t = utf.split(separators,true) + for i=1,n do + xmlsprint(collected[i]) -- to be checked + if i < n then + local m = t[i] or t[#t] or "" + if m == "|" then + m = "\\enabledelimiter\\middle|\\relax\\disabledelimiter" + elseif m == doublebar then + m = "\\enabledelimiter\\middle|\\relax\\disabledelimiter" + elseif m == "{" then + m = "\\{" + elseif m == "}" then + m = "\\}" + end + context(m) + end + end + end + end + context.enabledelimiter() + if r then + context(r_replacements[right] or o_replacements[right] or "") + else + context(right) + context(o_replacements["@r"]) + end + context.disabledelimiter() +end + +--~ local function flush(e,tag,toggle) +--~ if toggle then +--~ context("^{") +--~ else +--~ context("_{") +--~ end +--~ if tag == "none" then +--~ context("{}") +--~ else +--~ xmlsprint(e.dt) +--~ end +--~ if not toggle then +--~ context("}") +--~ else +--~ context("}{}") +--~ end +--~ return not toggle +--~ end + +local function flush(e,tag,toggle) + if tag == "none" then + -- if not toggle then + context("{}") -- {} starts a new ^_ set + -- end + elseif toggle then + context("^{") + xmlsprint(e.dt) + context("}{}") -- {} starts a new ^_ set + else + context("_{") + xmlsprint(e.dt) + context("}") + end + return not toggle +end + +function mathml.mmultiscripts(id) + local done, toggle = false, false + for e in lxml.collected(id,"/*") do + local tag = e.tg + if tag == "mprescripts" then + context("{}") + done = true + elseif done then + toggle = flush(e,tag,toggle) + end + end + local done, toggle = false, false + for e in lxml.collected(id,"/*") do + local tag = e.tg + if tag == "mprescripts" then + break + elseif done then + toggle = flush(e,tag,toggle) + else + xmlsprint(e.dt) + done = true + end + end +end + +local columnalignments = { + left = "flushleft", + right = "flushright", + center = "middle", +} + +local rowalignments = { + top = "high", + bottom = "low", + center = "lohi", + baseline = "top", + axis = "lohi", +} + +local frametypes = { + none = "off", + solid = "on", + dashed = "on", +} + +-- crazy element ... should be a proper structure instead of such a mess + +function mathml.mcolumn(root) + root = getid(root) + local matrix, numbers = { }, 0 + local function collect(m,e) + local tag = e.tg + if tag == "mi" or tag == "mn" or tag == "mo" or tag == "mtext" then + local str = xmltext(e) + str = gsub(str,"&.-;","") + for s in utfcharacters(str) do + m[#m+1] = { tag, s } + end + if tag == "mn" then + local n = utf.len(str) + if n > numbers then + numbers = n + end + end + elseif tag == "mspace" or tag == "mline" then + local str = e.at.spacing or "" + for s in utfcharacters(str) do + m[#m+1] = { tag, s } + end + -- elseif tag == "mline" then + -- m[#m+1] = { tag, e } + end + end + for e in lxml.collected(root,"/*") do + local m = { } + matrix[#matrix+1] = m + if e.tg == "mrow" then + -- only one level + for e in lxml.collected(e,"/*") do + collect(m,e) + end + else + collect(m,e) + end + end + context.halign() + context.bgroup() + context([[\hss\startimath\alignmark\stopimath\aligntab\startimath\alignmark\stopimath\cr]]) + for i=1,#matrix do + local m = matrix[i] + local mline = true + for j=1,#m do + if m[j][1] ~= "mline" then + mline = false + break + end + end + if mline then + context.noalign([[\obeydepth\nointerlineskip]]) + end + for j=1,#m do + local mm = m[j] + local tag, chr = mm[1], mm[2] + if tag == "mline" then + -- This code is under construction ... I need some real motivation + -- to deal with this kind of crap. +--~ local n, p = true, true +--~ for c=1,#matrix do +--~ local mc = matrix[c][j] +--~ if mc then +--~ mc = mc[2] +--~ if type(mc) ~= "string" then +--~ n, p = false, false +--~ break +--~ elseif find(mc,"^[%d ]$") then -- rangecheck is faster +--~ -- digit +--~ elseif not find(mc,"^[%.%,]$") then -- rangecheck is faster +--~ -- punctuation +--~ else +--~ n = false +--~ break +--~ end +--~ end +--~ end +--~ if n then +--~ chr = "\\mmlmcolumndigitrule" +--~ elseif p then +--~ chr = "\\mmlmcolumnpunctuationrule" +--~ else +--~ chr = "\\mmlmcolumnsymbolrule" -- should be widest char +--~ end + chr = "\\hrulefill" + elseif tag == "mspace" then + chr = "\\mmlmcolumndigitspace" -- utfchar(0x2007) + end + if j == numbers + 1 then + context("\\aligntab") + end + local nchr = n_replacements[chr] + context(nchr or chr) + end + context.crcr() + end + context.egroup() +end + +local spacesplitter = lpeg.tsplitat(" ") + +function mathml.mtable(root) + -- todo: align, rowspacing, columnspacing, rowlines, columnlines + root = getid(root) + local at = root.at + local rowalign = at.rowalign + local columnalign = at.columnalign + local frame = at.frame + local rowaligns = rowalign and lpegmatch(spacesplitter,rowalign) + local columnaligns = columnalign and lpegmatch(spacesplitter,columnalign) + local frames = frame and lpegmatch(spacesplitter,frame) + local framespacing = at.framespacing or "0pt" + local framespacing = at.framespacing or "-\\ruledlinewidth" -- make this an option + + context.bTABLE { frame = frametypes[frame or "none"] or "off", offset = framespacing } + for e in lxml.collected(root,"/(mml:mtr|mml:mlabeledtr)") do + context.bTR() + local at = e.at + local col = 0 + local rfr = at.frame or (frames and frames [#frames]) + local rra = at.rowalign or (rowaligns and rowaligns [#rowaligns]) + local rca = at.columnalign or (columnaligns and columnaligns[#columnaligns]) + local ignorelabel = e.tg == "mlabeledtr" + for e in lxml.collected(e,"/mml:mtd") do -- nested we can use xml.collected + col = col + 1 + if ignorelabel and col == 1 then + -- get rid of label, should happen at the document level + else + local at = e.at + local rowspan, columnspan = at.rowspan or 1, at.columnspan or 1 + local cra = rowalignments [at.rowalign or (rowaligns and rowaligns [col]) or rra or "center"] or "lohi" + local cca = columnalignments[at.columnalign or (columnaligns and columnaligns[col]) or rca or "center"] or "middle" + local cfr = frametypes [at.frame or (frames and frames [col]) or rfr or "none" ] or "off" + context.bTD { align = format("{%s,%s}",cra,cca), frame = cfr, nx = columnspan, ny = rowspan } + context.startimath() + context.ignorespaces() + xmlcprint(e) + context.stopimath() + context.removeunwantedspaces() + context.eTD() + end + end + -- if e.tg == "mlabeledtr" then + -- context.bTD() + -- xmlcprint(xml.first(e,"/!mml:mtd")) + -- context.eTD() + -- end + context.eTR() + end + context.eTABLE() +end + +function mathml.csymbol(root) + root = getid(root) + local at = root.at + local encoding = at.encoding or "" + local hash = url.hashed(lower(at.definitionUrl or "")) + local full = hash.original or "" + local base = hash.path or "" + local text = strip(xmltext(root) or "") + context.mmlapplycsymbol(full,base,encoding,text) +end + +function mathml.menclosepattern(root) + root = getid(root) + local a = root.at.notation + if a and a ~= "" then + context("mml:enclose:",(gsub(a," +",",mml:enclose:"))) + end +end + +function xml.is_element(e,name) + return type(e) == "table" and (not name or e.tg == name) +end + +function mathml.cpolar_a(root) + root = getid(root) + local dt = root.dt + context.mathopnolimits("Polar") + context.left(false,"(") + for k=1,#dt do + local dk = dt[k] + if xml.is_element(dk,"sep") then + context(",") + else + xmlsprint(dk) + end + end + context.right(false,")") +end diff --git a/tex/context/patterns/lang-af.lua b/tex/context/patterns/lang-af.lua index 6ebda0cea..0ba36eecc 100644 --- a/tex/context/patterns/lang-af.lua +++ b/tex/context/patterns/lang-af.lua @@ -2,7 +2,9 @@ return { ["comment"]="% generated by mtxrun --script pattern --convert", ["exceptions"]={ ["characters"]="adlns", - ["data"]="sandaal", + ["compression"]="zlib", + ["data"]="xÚ+NÌKILÌ\1\0\11y\2Õ", + ["length"]=7, ["n"]=1, }, ["metadata"]={ @@ -38,7 +40,148 @@ return { }, ["patterns"]={ ["characters"]="'-abcdefghijklmnopqrstuvwxyzäèêëîïôöûü’", - ["data"]=".aan5s4 .aä7lawa .a6bc-b .a4b5la .ab7salo .a6b-ja .ac7cra. .a6farm .af7arm. .a6feet .af7eet. .a4f5en .a6fets .af7ets. .a6foes .af7oes. .a4f5oo .a4f5ra .af6ro' .af6ro’ .a7fro's .a7fro’s .af6ro- .a7fro-h .a6fry. .af3s .ag6aam .agte6r5 .a6guur .a9ha. .ah7lers .a3kw .a6leer .al7eer. .alf4 .al7fagr .al5fr .al6lda .a4l3o .al6oïe .a7loïen .al3p .al5st .al7thea .al7twee .al6zhe .amp4s .amps5w .a6naër .an7aëro .an6cpl .and4 .an5dr .ang4 .an5gl .angs5 .a4n5io .an7thro .a3pr .ap7side .a5rag .ara6p. .ar7thur .ar6zbe .as7jas. .a6snog .a6sof. .a5sti .a7straa .a7s6tral .at6hol .a7thol. .a5tsj .atte4 .au7drey .b6aanv .ba6din .ba4d5o .ba7loi. .ba7ragw .ba7rins .ba6sek .ba7tho. .be7deks .be6kaf .bek7af. .be5la .be7lol. .be7skos .be7thel .be7thul .bi7sho. .bli4 .blus5 .bo7kerf .bo7kies .bo7kors .bo7maat .b4on .bo7plaa .bo5ro .bo7sor. .bo5sta .bo7trit .bo7tswa .bo7uit. .bout5j .b4re .bu6eno .bu6lol .bu7thel .by6ldr .by6lho .by6lne .by6lpi .by7port .bys4 .by6tal .ca7thy. .ca7yenn .chlo7e. .ci6rca .ci7trus .cos7ta. .cy6pri .d2 .da7gon. .dag5s .da6kat .da6koo .da7tage .da6tji .dat7jie .da6wki .de6k7laa .de6klo .de6kwe .de5la .de7roga .de6sal .de6sok .de4sp .diep5l .di6jks .di4si .di7thak .do4m5a .do4m5o .dor7ste. .dr6oef .dun5s .du6pre .dut5j .dy7spie .e6bcu. .ed5wa .ed7win. .eer6sk .ee4t .e6fron .ef7ron. .eg7gofo .e6indu .ei5st .ek4s5k .ek7sopa .ek7sord .eks7tri .eks7tro .en7dres .enk4 .en5kl .e6noft .en7ofta .en4t5j .en7topt .ep7soms .er4d5a .er6dwo .er6fle .er6foo .er6inv .ern4 .er4t4 .er5te .ert5j .ert7se. .erts5w .e2s .e9sau .e4s3k .e3so .es3p .es8p. .es6pma .es3t .es6tco .es6tni .es5tr .e7tage. .et4sn .eur5a .eu7stac .ex7odus .e6zra. .f2 .fo6chv .fo6wle .f4ri .fy6tji .g2 .ga7lage .ga7lago .ga6lap .ga6loo .g6arbo .ga6sen .ge7dart .ge3g .ge7geks .ge7guil .gekun5 .gekuns6 .ge5la .ge7mopp .ge7muit .ge7nève .ge7rogg .ges4 .ge7sjab .ge7sjar .ge5sk .ge5so .ges7pe. .ge7sper. .ge7steg .gif3 .gi7gagr .gi6sen .gly3 .gly5k .g6lyna .g4oo .gou7da. .gr6äbe .g6ruba .gui7do. .hang5s .he6blu .he6gor .he6gra .h6eind .hek5o .he7rakl .he6r5en .he6wle .hi8v. .ho6fet .ho6laa .ho6loo .hooi5 .ho7taze .ho4t5o .hy6gro .ic7teru .i4gl .ile7us. .i2n1 .i9n8a. .in6ari .i7narie .ind4 .in7dwar .ing4 .in5gr .in5gw .in6iti .in5kl .in6kly .in5kn .in5kw .in6osi .i7nosie .in3s4 .in7snee .in7twyf .i5raa .i3sa .i4sk .i3so .ja6gli .jah7we. .ja6spa .ja7taga .j6äger .je7sopp .jo7dofo .jo7safa .ju6kos .juk7os. .j6ü6rge .jy6sel .k2 .kaar4 .kade4 .kadet5 .k4af .kaï7ro. .ka7nont .ka6pla .ka7plak .k6arbe .ka7thar .ka7thu. .ka6toë .kat7oë. .ka6tui .ke6ple .ker6k5a .ker6k5l .kerk5r .ker6sa .ker6sl .ker6s5p .ke4s5t .kie6st .ki6pli .ki4r .kit7se. .k4la .k6leyn .k4li .klip5 .knik5 .kn6opn .ko6maa .ko6maf .k4op .kope4 .koper7a .ko6pla .kop5o .ko7rag. .kor6st .kors7te. .k6rak. .kr6üge .kryt5 .ku7mon. .k4we .k4wo .ky7otop .l'7etji .l’7etji .la6eti .la6kwa .la5sa .lei5s4 .lek7oë. .le6poo .le5pr .le7shab .le6son .le4sp .les5t .le6suu .lig5e .li4gi .li6gom .li6gre .li7pase .l6loyd .lo6chn .lof7ui. .lo6glê .l6ontd .los5k .lu6gen .lui5sl .l6üder .m2 .m'7etji .m’7etji .ma6cdo .ma6nal .ma6nur .ma7stek .ma7thes .ma6zda .mel6k5a .mel6k5l .mer6k5l .mes5m .me4sw .me6tem .mi6dos .mi6rba .mi7traa .m4ne .mo7djad .mo7flam .mo6sin .mo4sk .mu4e .my6nen .my6n5in .my7unis .n2 .n6aand .na5fl .na6gro .na7groe .na7smaa .na7stor .na7uurs .ne4k5a .ne4k5o .ne6kri .nek7rin .ne6kys .ne4s3 .ne7serh .n6etik .ne4t5j .ne6tru .ne6wca .ne6wfo .ne6wla .ne6wma .ni4e .ni6jho .ni6rva .nix7on. .n6kosa .noet4 .noe5tj .no6gee .no4k .n6ondu .nu4l .ny7lont .oe5kr .oe4r .oe7ralg .oe7rang .oer7os. .oe4s3 .o4gl .oh7rigs .o6klah .ok7laho .ol6ieu .o7lieui .oms4 .o2n1 .ond6ui .on7duit .o6nemo .on6ias .o7nias. .on3k .on7parm .on3s4 .on6she .on6sse .on6t7eer .on6t5er .on4tr .ont7ras .ont5ri .o9nus .on6us. .oon4 .oon7de. .o4op .oor5n .oor5s4 .oo4s .o6peet .op7eet. .o6peg. .o6pein .o6p5erd .o4pof .o4pr .op5ra .ops4 .op7smuk .o7ragie .or6kne .o3ro .orto5 .o4sk .os5ko .os7oog. .ot6hel .o7thell .ou6doo .ou7nôi. .ou5tj .p2 .pa4d3 .pa7die. .pa6vlo .pe5la .pel6sk .per6st .pe4sk .pe4st .pie6tj .pi7laf. .pit5s .p4la .po6dzo .p6oefe .poen4 .p6ontw .po6sad .p4re .pu6tad .py6paa .py6pla .py6pol .pyp5r .py7thon .r2 .r'7etji .r’7etji .r6aard .ra7dart .ra6seg .ras7eg. .re7aumu .re6gru .rek5s .re6mas .rem7as. .re6mco .re4sl .rie4t .riet5j .riet5r .ri6ple .roc7ky. .ro6gak .ron7do. .rond5s .ro5py .ros5t .ro6tre .ro6wli .ru7klip .ruk4o .ru7kope .ru7staa .ru6suu .ry4k5a .ry6ste .s6aans .s4af .s4ag .sa7gopa .s6akty .s4am .sa6vlo .s4ca .se6an. .see5ra .see7ys. .se7khuk .se6laa .se6lop .se7reni .se6sle .ses5t .se6suu .se6tap .se4tr .sex5y .s6fale .s4fi .s4gr .s4ha4 .s4he .s4hi .s4ho .s4hu .s4in .si6nes .si7pho. .si7rag. .s4ja .s4ka .s4ke .s4kl .s4ko .s4kr .s4ku .slag5 .s4ma .s4me .s4mi .s4mo .s4mu .s6nags .s4ne .sod4 .so7dafa .so7dwan .so7iets .so6kop .some4 .s6oms. .s4on .so6neg .s4op .so6pek .so7phok .so7ross .s4pe .s4pl .spo4g .s6pren .s4py .s8ri. .s4ta .s6temp .ste7rol .ster6ta .ster6t7j .s4ti .s4to .straf5 .s6trei .s6tuar .stuc5 .su7biet .sub5m .sub5p .su8e. .s4ui .su5kr .su7ther .su7tra. .s4we .s4wi .s4wo .sy1 .sy6lvi .sy7nagr .sy7slag .t2 .t6afsy .t4ag .tee5k .te6flo .te7rafi .te7ragr .ter6tj .tert7ji .te4s5t .te7stud .ti6ene .tie6t5j .ti4k .ti6ner .t6jaai .tjok5 .toe7ys. .to6kla .to7ky7o. .to6lun .to7ront .tou3 .trap5r .trek5 .tre4s .trie4 .tries5 .t4sa .ts4h .ts6jaa .ty6daa .ty6dor .ty6dra .ui6laa .ui4t3 .ui5ti .ui5t6ji .um7hlan .uns4 .un5st .u5raa .u5tra .va6kad .va6kei .va6naf .va4n5o .va7raan .va6sen .va6swa .vas7ys. .ve7cino .ve7laar .ve7lare .ve7lêr. .ve7loer .ve7lome .ve7meng .ve7rema .ve7rena .ve7reve .ve7skaf .ve7tore .vlas5 .vo6gin .vo6lyw .vo6sko .wa7ghri .wa4n .wa7smou .wa6spa .web5m .we4bo .we6b-o .week7lan .wee4t5 .we6kuu .we4l5a .we6lin .wel7ing .we6nan .werk5l .wer6k5r .we4s5k .we6soe .we6swa .w8hê. .w4hi .wi6id. .wins5 .wi4p .wi4t .wî9e. .wy7kwas .wy7nand .wy6net .x2 .y6amin .y6anni .y6asud .yk7loon .ys3 .ys6ere .ys5la .ze5us .z6üric .z4wa .z4wi aa2 aad1 aa4da aa4de aa4do aa4d3r aaf7emme aaf6sat aag5al aag7asem aag7elas aag3r aag7rond aag5s4l aag5sp aag5st aag7swee aai7lag. aak1 aa4ka aa4ko aak3r aak7ster aak3w aal1 aal6dys aald7ys. aal5fe aal6fpo aal5sa aal7sfee aam1 aa4ma aa4me aa4mo aam7smul aan1 aan6dou aand6re aan7dren aan7dros aan7gons aan3k4 aan5kl 5aankon aan7kry. 5aanleg aan7sage aan6see aans7eer aan6sek 5aansig aan7skem aan5sl aan5sn aan6som aan6son aan5sp aan7tuig 5aanva aap1 aa4pa aa4po aap3r aa4pu aar3a aar6dan aard7ang aard7as. aar5de aar7ding aar4du aar3e aar3i aar7kwek aar6lbe aar6lka aar6lva aar6lzi aar6l-o aar3o aar7ser. aar7seri aar6sid aars8teek aars8tell aars6ti 5aartap aar6tin aar7tomo aar7tryb aar3u aas3 aa4so aas7omel aat1 aat7nagt aa4to aat3r aat6sef aat7sfee aat7slim aat6slo aat6sly aat7sonn aat6sow aat6sti aau6wbe aä5ron aba6kas abak7as. aba7komb abare4 aba7ster ab3d aber6sp ab5lau ab5rup 3abso abu7scha ab5wie ac5que a2d a3da a4d3aa adam4 adam7pe. ada4r a3de ades7lan ade7smee a3di adi6eus 5adjud 5admin a3do a4dow a3dr 5adres. ads7erwe ad4sn ads6op. ad5sor ads7teso ad4su a3du ad5uit adu7spel adu5tj 5advert a3dy ady7smit a1e ael7atoo ae4l5ei ae4lo aes5to aes5tr aes7tuur aë1 4afee af5eks afel5aa af4fre af5gha af5inr af3l a4fof af1r af5raa af5ram af5ran a4f3re 3afri a4f5rit a4f3ro a5frod a4f3ru af3s4w 3afva afval5 ag3aa a6gaanv a4gaar ag5adv a4g3ak ag5alg ag5api ag5are 4age. 4ageb 4aged age6ddo a4gei 4agem a4g5erv 4ages a6g5ewen agge7us. a3gi a5ging a4g5ins agi5s6tr ag1l ag5ogg ag5ord ag5ork ag5oud a4g3re ag5rei a4g3ru ag3sa ags7abno ag6sins ag5ska ags7koev ag5skol ag5skr ag5sky ags4lo ag4sn ags6oep ag5som. ags6oom ags6op. ag5spe ag5s6por ags4t ags7taal ag5sti ags6waa ags6wee ag4tu agt7uur. a4g3ui ag5ure ag5uur ag-7lag. aher4 ahe5ri a4hs. ai1 aig6ne. aiï5er ai4lp ain6ste aip6eis ai3s4k ai5sla ais4p ais7prys ais4t ai3tj ai3t4r a4k5aan ak5arb ake6lee ake6lof ak5ess ak5ins akis4 akis7te. 5akkoo a2k3l a5klank ak6leet a2k3n ako6bre ak5oms a1kr a4k3re ak5rig ak4sc ak5sme ak3sp a4k3ui a2kw ak3we ak5win a3ky a4kys a1la ala7gadi a5lagm al5agt ala7kled alan7gaa al5dei a4lef ale6str al4f3e alf6eni al4fh al5fie al4fj alf7olie alf6sko alf6sni alf6sta alfs7tan al4fu alf4- 5algori alien5s ali6gal ali7glas alk7aard alk7laag alk7oond alk5sp al4kui alk7wyk. 5alleen alm7eier alm7lont a1lo als4a als7agti als7ghaa al6skel als7kelk als5li al4s5oo als7pret als5waa als7werw alt6hea alt6hus alt7rots alt6sas alt6wee a1lu alve5o a2m a3ma ama3k4 aman6t5j ama7rins am5atoo 5ambag 5ambass a3me am5egt ame6sin ame5sm ame6spo a3mi ami7skyw amm6afu 5ammun a3mo amp7arre amp7lag. amp7leer amp7lig. amp7lug. amp7omhe amp7seël amp7sfee amp7sier amp7staf amp7staw ams7esel am6s5kop ams7lend am6smet ams7meti ams6mul am4s3o ams7pels am6swan am6swar ams7wyn. a3mu a4mui am5uit a3my an5agt ana7kwal an5alf ana6spi an4c- and7aans and7adel an6dakt an5dan and7anal an4dap an6dase an6datt and7attr an6degt and7egte and5eks and7emal an6derf and7erf. an6dete and7etes and7eval an6dinw and7inwa and6ja. and6jar an4d5om an4don an4d5op and7oud. an6drak an4dro and6ser and6s7kop and7spaa and7spre and7steg and7swee an7dwing and7wyn. ane6ron ang7aal. ang7adem ang7ghor ang6hai ang6lad an5g4li an6glig ang7lig. ang7lip. ang6nol ang6ons ang7ore. ang7repu ang7sakm ang6ska ang7snee ang6s7te. ang7stem ang6sur ang7ure. anie6t5r an5inl ani5sf ani7slaw an6kase ank7asem ank7refe an4kry ank3w an5ops an5opt an5opv an5ord an5org ano7roei ano7stoe anr6hyn ans7aalw an4s5am an6sass ans7asse an4sc ans7eila ans7eura an6sink ans7ink. an6sjek ans7jekk ans7jord an4ske ans5kei an6skin an6s5kop ans7kous an6slat ans7mada an4sn ans7oran an4s5pa an6sper ans7pet. an4spo ans4ti ans7toil ant5aan ant5aar an6tass ant7asso an4tei an6teks ant7ekst 5antenn ant7ete. an6tins ant5jo an4tol ant7opru ant7rest ant5rin ant7rob. ant6ski ant6sko an4tui 5antwo a1ny a1o a2p a3pa a4p5aan a4p5agt ap5aks ap5arm 5aparth a3pe ape6nop a3pi api6rfa ap3l a3po apo6kaa ap5ond apo5sta 5appar ap1r ap5rol ap3ru a4pry ap5ryk a5prys ap6sall aps7alli ap6seko aps7iden ap6skof ap6s5taa ap5sti aps7toet aps5we a3pu a4pui ap5uit a3py a1ra ara7gwan ara7klee ara6kop ara3p4 ara6ppa ar6d5agt ar6das. ar6datm ar5der ar6deti ar6d5opp 3area aree5s areg7swe ar3ei are7knip ar5fla arg4h ar5gha 5argite ar4gl arg4o arie4f ari6jke ar6k5ana ar6kini ark7leer ar5klo ark6los ark7onvo arko6v. ark7snui ark5sp ar4kw ark5wa arn6avo a1ro aroet6j aroe7tji aroo5h aroom4 aroo5p aroo5s ar3op aro6wva ar5rag arres5t ars5ag ars7elek 5arsena ar5sie ars6kou ars7krap ar6skre ars7kree ar6skro ars7pan. ar4spr ar6stal ars7tall ars7tee. ars6-in ar6taas art7aasv ar6talb art7albu ar4t5as ar4tc ar5te. art7eend ar5teh ar4tei art6hol art6hur art6omo art5oog art5oor ar4tor art7reek art7roep art6ryb art7samb art6slu art6spr art7spyn a1ru arus6o. a1ry ary7taal as3ag as5app as3c as4d. as4dh as5egt ase6rak as5ete as3f asg6hit as4hi asi7freu a4sj. a4sjm a2s3k a5skool a5skri as5kru a2s3l as5laag as3m a4smy a4sna as3no as9of. as5ogi a4s3oo as3op as3p as4por as3t as4t. a5staa as4th ast6les a5stof a7strak. a5s6tran a5s6troo as3w a4sys as5yst at5aar a4tag ata3s4 ata6sse atas7se. ata6wba ate5it 5atelj ate6rar ate6rer ate6ron a6tetes ath7cart a5t4hee ath7kinp ath7lone atie6te a2t3j atk6v-s 5atleet 5atmos at5oog at5ry. ats7alma at6sint ats7inte at4sj at6skin ats6kom at6skop ats7kop. ats7krip at4s5le ats7lykh at4sm ats6maa ats7nood at6somw ats7omwe at4son ats5ond ats7onko ats7onlu ats5op ats7ower ats7tend ats7trek ats8treke at6stro ats3w at5the att6hys at4tu atu6maa 2au aud6rey au5gra aug6sbu aul6spo au3p aure5u auri5s4 aus4t aus7tin. au5str aus7tus. aut6ste ava6lop ave7lott avlo6v. 3avon awa7glas awas4 awe5ga awe4r5a awer6ui aws6han ax5ofo a3ya ay4a. ay5ist ayn6ard ayn6or. a3yo a3yw azoo7ka. azz7agti az4zl azz7orke 1ä 1b2 2b. babak4 bab7wiër ba4d5ra bad5sp ba4kin ba3kl ba4kla ba6kleu ba4k3o ba4k3r bak3w bal6kla ba4lo bal7onts bal5or bal7tsas ban4da ban6dek ban4d5r ban7glad bang7ste bangs8te. ban4k5a ban6kre ban4kw bar7kaan bar4s3 bar5th bas7ekst bas7ghit bas7jan. ba4sn ba5spe bas7peer bat5aan ba3t4j 2bb b3ba b3be b3bi b3bl b3bo b3by 2bd b3de b3di b3do bed7slaa bed6sta beds7taa be3dw beel6dr bek7neus bek7wind bel6aga belk6li bel7klik bel6ldo ber6gaa berg7aar ber4gl ber4g5r ber7grys ber6gzi bers7pan ber6spr bers7pre bert6sk bes6aan bes4k be5ska be3sl be3sm be3so be5son be5sôr be3s4t be6s5ter be5sti be6stia bes7tial bes7trol bet4h be5tha bet7hesd be5ton be3tw 2b1f 2b1g b3ge bid3s bi4du bid7ure. bie6dui bie4g bieg5r bi4jl bin6dri bio7sfee bi4rc bis4a bis6ho. bis7scho bi3tr 2bj b3je 2b1k b3kl b3ko b3ku bla4d5a bla6don bla4d5r bla5so 4blau bleem5 ble4s ble7ser. bles5k ble6tji blet7jie blê6rfl blik5o blix7en. blo4k3 blo7kaal blo4m3 blu6sem bly7mare bly3s4 2b1n b3no bob7slee boe6kil boe6kom boe6koo boer6st boers7te boe4s5k bog7gher bog7skut bo2k1 bo3ka bok6aak bok6ale bok6as. bok6erf bok6ies bok3l bo7kleed bok6om. bok6ors bok6ost bok3r bok6rag bo4m5aa bom6aat bo4m3o bon6dam bon6dra bond7raa bon6tel bon4t5r bo9op. bop6laa bor6dak bor6des bor4g5a bor6gri b4ors bor6saa bor4s5k bor4s5l bor4s5t bo2s bos7anem bos7jamb bos7pepe bos7taai bo5s4tr bos7uil. bo5t4ha bot6sto bou6it. bou3s4 b3pr brand5a breek5 br4ei brei5s4 brengs7t brie6kw briek7wa 3bro bro4n bro4sk bro6vni bro6wni bru4l 4brup br4üm 2b1s b3se b3si b3sk b3so b3sp b3st bs4ti b3su 2bt b3te b3ti buc7cleu buik5s bui5t4j bult7af. bul4t5j bul6top bult7op. bul6tui bun7senb bus6had bus7toer but6hel buu7rend 2bv b3ve b3vi 2bw b3we by1 by3d by3k by4lb by4lt by3n4a by3s bys6kot bys4l bys6tek bys7ter. bys6tor bys4w byt7alka byt7eier by3tr 1c2 2c. ca4es cam5ph ca3pr ca3ra car6lto caru7so. cat4h ca5tha 2cc c3ca c3ch c3ci c3co ce4st ces5te 2ch. che6lan 5chemi che6reg che5r4i che7ryl. che7styl che6vvi ch5hoe ch5lei ch5nik cho7rage ch5sia 2cht 2ck c3ke cot7rand cove7ry. 2ct c3ta c3to c3tu cus5to cyp7rian 1d 2d. 4d5aanb d5aand 4d5aank d5aansl daard5u 4dabs 2d1af da2g da5gas dag5et da5gha dag6ham da5gra dag4sk dag5so dag7ster dak7lei. dak7oorh da4k3r dak5wa 4d5alar 4damb dam6plu 3dan dan6k7erk dan6sak dan6sko dans5m dan4so dan4s5t dan4t5r daph7ne. dap4l da5pla 4d3arm 4dart d5arti da4s. das7lag. das7traa da3t4j 2db dby6lvo 2dd dda5kl dda3s4 dde6lee ddel5so dder7aal dde6ras dder7as. d3dh dd4hi deba4t dee4g deeg5r dee4l dee7lig. 4d5eenh deë7skou 4deg. 2dei de3ka dek6aan de6klad de6k5lat de6klei dek7lei. de4kna 6dekono de6krie dek7riet 4deksa dek6ska del7appe del5eeu del5egg d5elekt del7elek 6d5eleme de6leng del7enge del6fer del5fi del6fos delf7os. del5oor del7oper del6ser del4so del7sold del7sone del4sp del6str del7stre delt6ag del7tagt del7weis 4demm dem6pla den6din 4d3eng 4denj den6kar den6kja den4k5l den4kr dens7pre den4t5j den6tri deo7plek deo7sfee 3dep der5ast de6reen der7een. der7emig der7ent. der7flap de6rin. derm7ins der5na de4ro de5rob de5roe der5of de5rol der5on der5ow der5ps der6sjo der5s6kr der6slu der6spu der7thal der6uit de5sag des7alni des5ap de6seng des7enge de4s5in deskat5 de6skor des7leed de4sn des7offe des7oksi de4s5on de4sor de4spa des7pari des7poës des7prik des7taal des7tele de4sti de5stig de4sw des7weë. 4d5ete. deten6te de3tw 4deuro deur5s6w 2d1f 2d1g dge5sp dg4li 2dh dias4 dia7stol dia6zvi dic7kie. dic7tio. die6fal die6kes 5diens die4pl die6tom die4t5u dig6ofa di4gre digs4 dig7skro dig7som. di6kamp dik7amp. di4k3l di4kr dik7ribs di4kw dik7wyn. di4l5al din6gas din4gr 4d3inl 4dins 4d3int 4d3inv di6sass dis7assi di4so di5son dis6pne dis7quis dit6hak dit7jies 2dj dja7dji. d4ji. d4jia 2d1k dklo4 2d1l d3la 2dm 2d1n doek5r doe6lon doe6sko does7kop 2dof 4dogi do4l5os dol6sou dols7ou. 3dom dom6pli dom6sap 4d5omse dom7slim dom6swê 4domt don4sk 4d5oord 4dopn dop6rof dop6rys 3dor 4d3org dor7othy dor4sl dor4st dos6tel dou3t do3y doy4e 2dp d2r 3dra dra6gaa 4d5rand dra7stan 4dreë 4d3reg d4rela d4rew 4driff d5rigt d5riss 4driv droë7ys. 4d5rond d5rooi 4d5roos dr4op dro6pan dro5pn dro7sfee 4d3rug d5ruim d5ruit 4d3rus 4d3ry. 4d3rye 4d3ryk 4drym d4ryw 2ds ds5aamb ds5aar d4s3ad d5sakr ds5aks ds5angs d4s3ar ds3as d3se d4s5een ds5eis. d4s5eko dse4l dse7leer d4s5eng d4s5era dser6tj dsert7ji d4s5erv d5sfeer ds3id ds5imp ds5inde d4s5ins ds5int d2s3j d5skee d4skin ds5kind d4skis d5skole d4skom d5skoo d6skraa d6skrit d6skroo d4s3le ds3li ds6luie ds3m d5s4mee ds6moor d4sna ds5neu ds5noo ds5not d2s1o d3soe d5some ds3on d7sonde. d7sondes dson4t ds5oog ds3op ds3ow d5s4pel d5spes d3spi ds5pop ds6prek d7spreker ds7preki d1st ds5taak d4staf d5stand ds5tea d5stel ds5tent d5ster. ds5terr d5sters ds4ti d3str d3stu ds3w d3sy 2dt dter6tj dtert7ji du4e- duns6te dur6rhe dusie5k dus6kap dus6pel 2dv dverdiens9 dverdien8st dve6sid dvie4 2dw d4waal d5waar 5d4wang dwa6nor 4dwarm dwar7se. d3wat d4weil 5dwerg dwerk5o dwe6tar d3wil d4wing 4dwoo d4wyn dys5ag dys6mit dys7tuin 2d- d-r6hod e1a eam6ses eang4 eate4 eau7mont e3ba eb5adr eb9cu. ebou5t ebou6t. eb4re ebrons5 eb5tui ec5cle e4chn ech7tiaa eda7gaat eda5go e3de ed5eis eder7as. ede7ring ede6sap ed5off edors5 ed4ra ed5rep ed6saks eds7kalm ed4sl eds7lafe eds5om ed3sp ed5s4we ed5uit ed2w ed5woo ed5yst ed3yw ee5agt eed6atu eed5we ee4dy ee2f eef7laag eef7lopi eef7rant eef7rek. eeg3l eeg5ru eeg3s4 eeg6sdi e5eila ee2k eek5ass ee5klaa ee5klag eek5lo eek3n eek5og eek7oors eek7rooi eek3w eek6wal eel5ap eel6doo eel7doos eel5een eel7eer. eel5ei eel7indr eeling7s6 ee6lins eel5int ee4l3o ee5lob eel6ood eel6oon eel5sa eel7snag eem7onde eem5ou ee2n1 5eendj een6ema ee5nen 3eenj eenk4 een5kl een7slot een5sm eens6pa een7swee een7topp ee4ny eep7esel eep7leer eep7loog ee4pop eep7roes eep6sam eep7skep eeps5ko eep7skui eep6sti ee2r eer5ap ee5red eer5ee eer5end eer5in ee5rob eer7oes. eer5om eer5on ee5row eer5ps eer7skur eers7lam eer7smed eer6sow eer3u eer6ust eery4 eer5ys ee2s3 ee4s. ees6ala ees6ap. ees6lep ees5me ees7muil ee5sna ee5sny ees6op. ees6pre ee5staa ees6tal ees4tr ee6styd ee4sw ee5syd ees6yfe eet7appe eet7eenh ee4ti eet5in eet7rek. ee6troe eet7roed eet7ruik eet7wiel eeu3g4 eeu5in eeus4 eeu7spoe eeu5tj eeu6ur. ee5yst e3êr eë5aan eëks5t eël7eier eël7yste eë4na e3ër eër7agti eë5ran eër7arm. eë5rod eër6ske eë4sk eët6ste eëts7te. e4faf ef5afs ef5eks 5effek ef5inh e1fl ef5loo e4fly ef5oms ef5oue ef3st efs6tal eg5amp ega5s4k e3ge ege6las egel7as. eges7per eges4t ege6vwo e6ginko eg5ogg eg5rig egs7enti eg6sins eg6s5int eg3sk egs6lot egs6pre egs6pri egs6pyk eg6s7taal egte6re e4g3ui eher6in ehe7rinn eho6kra eib7niz. eid7rok. eid7saam eid7salo eid7sirk eid7skou eid7sku. eid7spa. eid7spek eid7ste. eid7stoo eid7sug. ei1e eie7naan eig6h-n eig7opro eik7aard ei5kno eik7wydt eil6spa 3eind ei4n5ed ein7eed. ein7glas ei4non ein7oord ei4n5op ein7otte ein6sad ein6sep eis7angs ei5sei ei6s5ind ei5sja eis7kamm eis6kaw ei5skê eis6kot eis6laa 4eiso eis7ouer eis6pir ei5s6tel ei5s6tre eit2 eit7hand ei3tj eit7klin eit7nisp eit7onde ei5tra eits5ko eits5l eits5o eit7spor eit7stak eit7stra eits5w eï5mit eï4na eïn7klin eï4no eï4nu eï5oni eï4sl eja7stas ek5aan ekaars8te ek5aks e3kan ek5asg e3ke ek5een eke7naar eke6tam e3kê e4k5ins ek3k e4k5les e5kleu e4klê ek5log e3koe eko6mol ekom4s ek5omsl ekoms5t 5ekono ek5ooi e3kop eko6pap e4k5opm ek5opn ekor6da eko7rum. ekou6st ek5owe e1kr ek5rad e4k5rok e5krom ek5rug ek6sapp 5eksemp eks5esk eks7inge eks7logi eks7loks eks7outo eks5pir eks5po eks6poe eks6tel ek6sten ek4sti eks7uur. ek5uit e4kwê ek5wie e4kwu e1ky e1la e6l5aand el5aanh e6laanv el5aard el5adm el5adv el3af el3ag e5lag. ela7klon e4l3al e5lari e4l5arm ela7slan el5asp eld7adel el4dap el5de. el6d5ele eld7erfe eld7evan eld7olie eld7onde eld7smid e3le el5eien eleis6t elei7sta e6lelek el5erts ele7sett ele6too el4faa elf6abr elf6eit elf7en-d elf5erk elf6lan elf6les el4fon elf7onth elf7ontp el6foop elf7oops el6foor elf7oors elf6ron elf7twyf e3li eling8stell el5inh e6linko el5inv elk7nage elks4 el4kwi el4lv elm5agt e1lo el4ob e4lol el5oli el5ond el5ont e4loor e4l5opd el6ope. e4l3or el5phi els7angs el5sfe el6sind els7indr el6skan el6skom els7korr els7krit els7lof. el5smi els7mora els6nag els7nood els7onde els7oork el6stek el6s7tran els7ware el5swee elt7akke e1lu el5uit eluit6j elui7tji e3ly ely6kaa em5app e5masj eme6lek eme6lew eme6ron eme4s eme7sis. emes5m emes5t e5metf em5eva e4moef em3op em5org emp7laag emp6skr ems4p em5spl e4naf ena7glas en5agt en5akk en5alt e4n5art ena6spe en6d5agt ende7ro. end7raak end7rit. ends7oë. end7sons end7ure. end6wer en5eil en4en e4n5ent 5energ ener6tj enert7ji eng6hor eng4la eng6lor e3ni en4ig en4im en5inh e6n5ink. eni7soms 3enji en6kinh enkom4 enkoms5 enk3w e5nomm eno7ryn. en5out ens7adem en6sall ens7are. ens7eise ens7elek ens7elik en5sen ens5erv ens7esse ens6haw en5sie ens7inga en5sji ens7koei ens7kyke ens7luik ens6med ens7nuk. en4son ens7onru ens7onva en6spei ens7pist ens7pot. en6spou ens7pous ens7taak en6stak ens6tam en6steh ens6tei ens6tel ens7tele en7sters en7s6tes. ens6tet ens6teu ens6too ens7toom ens7trek ens7uil. ens7ure. en5sy. ent5akt en6teks ent7inte ent7rif. ent7rok. ent6sin ent6son ent6spa ent6wen en3ui enu6lin enu5sk enu5st e3ny en-7steg e1o eoe4s eo5fag eo3g4n eoi6ste eop6lek eo3ro eo1s eos4t eo3tr e4paf e4p3ag epe6loo ep5emm ep5epi epers7te 5epidem e4p5int ep4la ep5lap ep5ligg ep5lus epoet4 epo6nin ep5ops e4p5rei eps7ameu ep6s5eis eps5id ep4sj ep4sk eps7kano ep5ski eps7kohe ep4slu eps7luik eps6oms eps5on ep4s5pr eps7waar ep5uit e1ra er4a. er5aan er5afd er5afh er5afsk er5aft er5afv er5afw er3ag era7gree era7kles er5aks er5akt er5alb er6ald. er5alt er5ana e5randa e4rapp er3ar era6ser era7uitv erd7ryle erd7slip erd7tree er3dw er4eb er6eenk er5eers er5eff er5eie er5eil er4ek er5ekst er5elm e4r5emm e6rengt e4reni e4renj er5erg er5erv e3r4es er5esel ere7spio eres6ta ere6stp ere7temm e5rewa e1rê er4fh er6flaa erf7leen er6flet erf7lett er5flo erf7lug. erf7lus. erf7omhe erf7oom. er4fp erf7reuk erf7ruik erg7aren erg7lyn. erg7renm erg7rymp erg6rys erg6sho erg7stra erg7uitj er4id eri5fr e3rig eri4g5a e4r5ind e6rink. e6rinna er5ins e4r5int e6r5ital eri7trea erk5aan erk7esel erk6has erk7ink. er6kins erk7inwy er4kj er6klat er5kle erk7onde erk6opn erk6s5on erk7spas erk6sto erk7uurr er6kweë erk7weë. erk7ywer er5lik er5lui erm4a erm7aanh erm7afsl ern7eiwi ern7kwes er4nm er4nn ern7oes. er4nr e1ro er5oew er3oë er5ogg e3roï e5rok. e4r5oks e4r5oli er5om. er5omh er5oms er6ona. er5oog e5room er5oond e5roos e4r3op e5ropa er6opla e5ropo e4ror e5rora 5erosi e4r5oss ero7stil er5oud er5oue erou6t. erp7anke er6pinh erp7inho erp6lan erp7ruik erp6sig err6ein ers7assi er6s5eli ers7ete. ers7inda ers7jean ers7kaia er6skaj ers7kaju er7skake ers7kaki ers7kete ers7kiss ers7koet ers7koor ers7kop. er5sky ers7less ers7lone ers7luid ers6mal er7smara er6smat er4s5om ers7onvr ers6opn ers7ower ers7pien ers7put. ers7scen er5ste ers7tele ers6teo ers4ti ers7treg ers7waar ert5aan er6taap ert7aap. er5tap ert7ape. er6tend ert7end. er5tes ert6hal ert7jakk ert7opin ert7orre er6tres ert7rok. erts5l ert7uur. ert6wak er5twi e1ru erug3 er5uin er5uit er3uu e1ry ery7doel ery7salf erys6ma ery7smaa ery7suur ery7trek e5saan. e5sage es5agt es4ak es5all esa6mol es4an es3c es4dh e3se e4s5een e4s5epi es4er e3si es4ia es4ie es4if esi6gei e4sill esin6s5i es4it e2sj e4s5ke. es5kle eskor6s eskors7t e5s4kut e3sla es5lem es4lip e1sm es4me es5me. es5men es9mè. e5smou es4mu es5nie es5noo es4ny esoet6j esoe7tji eso7fagu es4ol e3s4oo es4ou e1sp e5spel es5pen e4sper es5pet es5pir es4pli es4pra ess6opv e1st es4t. e5stad es5tas e4s5te. es5tea es5teli e4s5tes estes5o est6her es6tik. es5toi es5tos e6strak es4tre e5stuk e3sw es4yd e4t3ag eta7stas e3te e4t5eie ete5r6aa etie4l5 5etike et7jie-k etk6ysi eto6nop et5opv e4t5ord e4t5ram e6treke e6treko etre7kor et5rim ets7fyn. et6skat ets7kato ets7kous ets7krie et6s5lap ets6maa ets5ong et6spaa et4spr et6stek ett6re. et5uits et5unie et4wi et5win et4wy et5yst 2eu. eug6rie e3uit euk7inte euk4l eul7eien eu4loo eu5mon eum7uitg eu4na eun6sla eup7aand eu4ra eur5aa eur6aal eu5ral eu4ree eur7eet. eu6reg. eu6regt eur7egth eur7ekst eur7elem eur7spar eu4sa eus7ape. eus7jig. eus6kot eus6tac eute4l eu5tem eu3tr e1uu 2eu- eva7kwaa eva6les evr6ore evu6es. ewal4s5 ew4ar ewee4 ewe7gaan ewe7goed ewe7inde e5weis ewe6nee ewen8stes ewe6res ewe7span ewik4s ewiks7te exy7ste. eyn4o e5yste e3yw e4zka ez9ra. è1r ê1 êe4ro êla7flui ê4rde êre6loe ê4rhe 1ë ë1g ëi3e ëk4sk ëks3p ëks6pek ëk4st ë3laa ël5agt ël5alb ël5as. ë4lei ël5ent ëlf4l ël5fle ë5loop ëls7kuil ën5agt ën4tr ënt5re ëpre4 ër5aan ër5afd ër5off ërog4 ëro3s ër5owe ërs7kent ë1ry ë1s 2f. 1fa f5aanb f3aar f3ad 2f1ag f4agi fah7renh fai6r-n fak6ste faks7te. fan4t5j fan4tr fant6s5t f3ap f3art faru6q. 2fb 2fd f5dein fde4s fde7sake fde7sess fde7skei fde7stor fde7stra fde7sust f3d2w 1fe 4feen 3f4ees f3eie fe4l5ap fel7asem fel7enti fe6loon fel7oond fel5s4m fel6spoo fe4lu fe4ly fel5ys fer6skr fers7kra fer6sku fers7kui f5erts fe2s fes3t fet7ete. f1f ffe6las ffe6ret ffe6tet ffi6eek ffies6m ff5rei f1g fge7sper fg4ha fg4li fg4ly fgod4s5 1fi fi3d fid6ji- fie7ekst fie7lafo fie7smaa fie4s5o fie6tol fi5lag fil4m5a 4finr fi5sto fit4z 2f1k fkom6st fkoms7te fla4p flap5o f2le f5lees f5lese fle4t flet5j flex7or. 2fli 5f4liek 3f4lit 2fm fmo4no f1n fni4s3 1fo f3of 4foff fok4s5t fol4k3 4foms 5fonds fond6sk fond6st fonds7te f5ontb 5fonte f5ontl f5oorl 2fop fo4po fop7spen f5orde f3org for7oksi fo5rom fo3ru fos7feen fout5j fox7hill fox7stra fp4sa 1f2r frag6aa 4fram f4ras f4ren fre4s5k 4f3rig fri6too f4rod fru5ga f3ry 2f1s fs5agt f4s5ank f4s5eko f4skon fs5log f3sm f4sma fs4me fs4mi f3sp fs4pl f4spro f4s5tak fs4ti 2ft ftre4 ftre5d ftrek5 1fu fu4ch fur6ore fu3so 4fuur f5uur. 2fv fva4l 2fw 1fy fyn7goud 2f1ys fyt7appe fyt7jie. 2g. 1ga g4aai gaam6s7te g4aan. g5aanbi g6aandh g5aanl g5aanw g4aat 4g3add 2g1af g4afo 2g1ag g4agr ga5gre gag6rep gak4l ga5kla gal7afsk gal7appe galei5 gal7oog. gan6gra 4g5anke gans7ke. g3app ga3ra 4g3arb 4g3arm gar4s3 gars6ti gar7stig g5arti ga6sarm gas7arm. gas6mok ga4so ga5sol gas6pel gas6tre ga4tr gat7ruik gay7nor. 2gb gbys4 2gd gde7roof gdut7jie g3dw gd4wa 1ge ge3d gedi4s g5eenh gee6tal geet7al. geës3 geë6sti ge3f 4ge4ff gef4l ge1g2 4geg. geges5p geg6uil g3eie geï7migr ge7k6lik. 4geks gek4y gel6agk gel6dad gel4do ge5lol ge5loo ge6loon gel7oond gel6s7te. gemi7au. gem6opp 3gen gen4dr gen6dur genes5t 4geng ge4oi gep4a ge5pag geper6st gepon6s 3ger ge5rap ger6ard ger5ete ger7iden ge6roef ger6ogg ger6spo ger5sw ger6uit ge3sa ge5sfe ge5sin ges7jagt ges4k ges7kade ge3sl ge3s4m ge5sne ge3sp ges7perb ges7pers ges7pes. ge3st ges6tas ges6tig ges4w ge5tja ge3tw ge3ui ge5um. gev7woes gewens7te ge1y 2g1f g3fl gf4li 2g1g2 g3ga gga7kwee gga7stre gga5t4j gge6sti 2gh 5g4hoer gho7ghok gho4l 5gholf g4hs. ghu6moe 1gi g5iden gie6far gi4fa 4gimp gin6gaa 4g5inri gip4s gip7siet gis7enti gi5tra git5sw 2g1k gkaar4 5g4lans gla4sa gla6sel gla6ska glas5o g4le. 5g4len. gli6don g4lif gli6gur 4glik g4lim g4lip 3glis g4lob 4glod 3g4loe g4lof 3g4lom g3lop 3g4lot g4lo- g3lus 3g4luu g4ly. 4glyn 2gm gma7skui 2g1n gneem5 gneet5 gnie6ko 1go god6sak god6sid 4g3oef goe7krui 4goes g5oes. g5oeta goe7the. goë7lary 2g1of 3gogi gol4f5o 4g5olie go4mag 4g3ong gon6sto g3ont goo5gl 4g3oor 2g1op go5pla 3g4opo g5orig go3sl gos7pelr g5osse go3s4t gou4d3 goud6a. gou7dief gou7dini gou7dink goud6s. gow7rie. go9ya. 2gp 1g2r g4rab g5rak. gra4ma gra7mado gra4m5o g4ras gra4s5a gra4se 4g3red g4ree4 g5reek greep5 4greë g4ren gren6st g5rese gre4sp gre6sur gre6tji gret7jie g4reu griek6s7t grie6t5j 4grig gri4p g5rit. 4g3rok g4ron 4g5rooi g5rook g5room g5rowe 4grug g5ruim 4g3ryk 4grym gry6ste 2gs g2s1a g5saam gs3ad gs6ade. g4saf gs5agt gs5aks g5sala g5sale gs5ana gs5ant gs5app gs6appe gs3ar gs3as g4s5een g4s3ef gs5eis. g6seise gs5eko gs5eks g5sekt g4s5ele g4s5eli g4s5ene g4s5est g4sew gs5ewe gs3f g5s6feer g4sid gs5ide g6simpa gs5inde gs5ini gs5inl gs5ins g2s3j g2s1k g5skaal gs5kab g5skad g6skapa g5skatt g5sked g5sker g7skeur. gs7keurd g3ski g4s5kin g4skl g5skof g5skole g4skom g5skoo g6s5koor gs5korr g5skot g5skou gs3kr g2s3l g3s4la g5s4lop gs3m g5snel g2s3o gs4ol gso6pro gs4ou gs3p g5s4pel gs5per g5spes g3s4pi g6spill gs4poe gs6pore g7sports g5s4pru g1st g6s5taak g4s5tab gs6tabi g4stak g4stal g4star g4s5tek g6stelg g6stera gs5teri g6sterr gster6s g6stese g4sti g5stigt g4stoe g7stoele gs5toer gs5toet g4s5ton gs5trad g6strak g6stran g6strap g6strib gs5troe gs7troon g6struu g4s3ui gs3w gs6werw 2gt gte7eenh gte7lagi gte6ras gte6rer gte4ro gte7roer gte7rol. gtes4 gte7sfee gte7smee gt5uri 1gu gu2a gu2e gu5ela guid6o. gui6rla 4guit 2gv 2gw g3yst 2g- 1h2 2h. 2ha. hal4f3 hal6m5ag 4halo hal4s5k hal6s5tr ham7pagn ham6skr hams7kra han6dan han7dja. han4dr han4du han6gli han6gor hangs6l hang5s6w han4s5k han6ska har6dop hard7op. har6sel har6sol har6spa har6t5aa har4t5j har4to har7toem ha4wk haw7shan hay6eli heb7lus. hee4l heer8s7te. hees6e. heg7orga heg7rank heg7spyk 5heid. heid7stem he4ko hek7saan he4k3w hel7ange hend4 hen4so he2r her3a he9ra. her6akl he7rald. he5r4an he3re her7egpa he4r5ek he4r5ev herf4 herfs5 her3i her6ib. he5rid he5rod he5rol her5ond her7ontm he5ros her6oss he5rou her5ow her7sche her3u her5yk he4sp hes7peru he4s3t hete5r6o heu6paa he3us hev3 he4vr hewen7st hie4r hier7in. hie7roni hie7rony hië1 hil6lbr 5hings hing6s5t hipe4 hi4rl hi4sp his5pa hi4v- 2hl h3li 2hm h3ma 2hn hode6sl hodes7la hoe7kaai hoe6kys hoe6spi hoe4s5t hoër7op. ho4fa hof5aa hof7amp. hof7uits hog6hok hoi7swer hok7rakk hol7aar. holes5 4holf hol5in hol7oog. 4hon. hon6daa hon6dag hon6dro hop7land hop7smaa ho3ro hor4s hor4t5j hos6hol ho4ta hou4t5a hout5j hou6tol hou6tom hou6who 2hr hre6sto hrie4 hries5 hris5t h3te h3to hu9go. hui6daa hui6dui hui6sef huit6ji hui7tjie huk6hun hul4p5a hul6pek hul6ple hul6por hul6ste huls7te. hu3mo hum7oes. humus5 huter6s hut7jie. hut6spo hut6ste huts7te. h5vill 2hw hyg7roma hys3k hys7tang i1a iaan6so iaan6sp iaan6st iaans7te iam7son. ia4nop ias6koo ia5spo ia5sta ia5s4tr i4baf i4bag ibou6s. ic5ky. i2d i3da ida7groe id5agt idde6ra i3de ide7snui i3dê i3di idia5s id4ja i5djan i3do i3dr id2s1 id4s5et ids7inoe id4ska ids7kerm ids5kr ids3l ids7lags ids7nye. ids3o ids3p ids6pa. ids6pek ids6pie id6spil ids6pri id6spry ids7taal ids7tee. ids7teks ids7tele ids5ti ids5toe id6s7trek ids7ure. i3du id3uu ieding6s7 ied7ione ied5rol ied3w i1ee i4eee ie5een ieë7aard ieë6lys ief7alar ie5fie ie4f3r ief7stal ief7uitg ie5gla ieg7loka ieg7riem ie6grit ieg5st iek7asyn ie5ke. ie6k5erv iek7esse ie5kie ie6klaa iek7laai ie4kni ie6k5ond iek7ople ie6kops iek7opse iek5opv ie4kre iek7revu iek7rigt iek7ware iek7wees ie4k5wi iek6wos iek7wyd. ie4laa iel6afo ie6lene iel7oor. iel6san iel6s5on ien7anal ien7glor ien6kro ien7olie ie4n5oo ien4s ien7sakk iens5or ien7sout ien7span iens5t ien7stam ien7stel ien8stele ien7stet ien7stoo ien5suu ie4n5ur ie6poog iep7oog. ie6proo iep7rooi ier7afma ie6rafs ier7engt ier7eter ier7neff ier7omtr ie4r5on ier6oni ier6ony ier7swee ies6amp ie5se. ie6senk ies7enke ies7ents ies7ferw ies7kaf. ie6skon ies7kop. ies7kraa ies6kry ies7laag ie6slep ies7lepe ies5li ies7luik ies7meub ies7mooi ie3so ies7oes. ies5ond ies7oorp ie6sopn ies7opne ies7pane ies5per ies7plig ie6s7taal ies6tas ies7tee. ie6steh ies7tele ies7tent ies6tin ies5uil ie4s3w iet7aans iet7aard iet7alba ie5te. iet7erts iet7omse iet7reke iet7reko iet4sl iet7uie. ie5twi ie4tys i1eu i4eub i4eud i4eug ieu7grie i4e3ui ieu7ing. i4eul i4eum ieu7skot i4euu i4euv i4euw ie5wie ie4w- ie5yst ie-7klik i3èr i1ê iël6sku iën6tji iënt7jie ië4s3t i4fei i4fim i4fin if1l i2f3r i4f3ui i4g5aan i4gap igare4 igaret5 ig5eff ige6naa ig5ete ig5ins ig1l ig5loe ig4op ig5opt ig5org igo7roos ig5res ig5roo i4g3ry ig3sa igs6ins igs7kaps ig7skend igs5ko ig5sku igs6mee igs6ona ig5soo igs7poei ig5sti ig7stoei igu7era. ihu6ahu i1i ike6roe iket5j ik5kli i2k3n iko6nat i1kr ik6sakt iks7akte iks7iden ik4sin iks7inve iks7juk. ik6skom iks7paar iks6pad iks7pare iks6tik iks6tuu iks6wel ik5wan i4k3we i4kwy 2il i1la il5agt ila6too ilbe6st ild7agti ild7smaa ild6stj ild7temm ilet5a ile6tji ilet7jie ilf4l il5fli il5gha ilinde6 illo4w 3illu il4m5at ilm7oper i1lo ilo5sk ils7insp ils7orde il4spr ilt7aar. i1lu i2m i3ma im5agt i3me i4mek im5eks ime7laar iment6s ime4s imes5t i3mê i3mi 5immigr i3mo i4m3o4p imo7theu 5implik i3mu imu6maa in5aard i4naf i4n3ag in5akk in5arg in4d5aa in4das inder7as 5indiv ind6oef in6doog ind7oogm in6d5oor ind7sleu ind5sw 3indu in5dwi inee7tji ine5ra 2inf ing7aars in5gan ing7eter ing6hpa in6gind in6ginf ing7infr ing7inst ing6leb ing6ope ing6opl ing5ou ing7pseu in4g5ru ing7saag ing7sap. ing7see. ing7sekr ing7seku ing7sfer ing7sin. ing7sinj ing7skal ing7skêr ing7skud ing7slep ing7slym ing7sofa ing7som. ing7somm ing6s7pil ings9telle ing7stin ing7suie ing7suil ing7swel ing7uil. i3ni ini6gaa in4ik in5inf 5inisia ink7erts ink7laag ink7ler. ink7nerf ink5nu ink7ogie in4kol ink7olie 5inkom ink7ring in6krol ink5st ink7wit. 5innam 5innemi innes6t inne7ste in5ong ino7skaa in5rag 5inrig ins7epou in6s5ete 5insets in4sg ins7kaps ins6kin in3sl ins7molt ins7moor ins6ond 5inspek ins7prie ins4t 5insti insti7t. ins6ton ins7twis int7appe int5ess inte6s5t int6he. int6uit int6wyf inu5e. 5invlo i1o io3pr ior6ubr io1s ios4k io5skl ios4p ios4t io3tr i2p i3pa i4p3ag i3pe i3pi i3pl ip4lo i3po ipo4s5t i3pr ipre4 ip4s. ips7kopi i3pu i4pui i3py i1r ir4ch irke4 irkel5o irop4 iro5pr iru4s i4rwa is3ag i5sagi i4s5aks i6sangs is3ar i4sarg is5asp i4sav is3c i4s3ei i4s3et ise5um i5sfeer ish7nie. i4s5int is5inv i2sj i4sj. is5jan is5joe i2s3k is4k. i4ska is6kaaf is5kan is4kê is5kui i2s3l is5laa i5slag is3m i5s4mit i2s3n is5oes is5ond is3or is3p is4p. is3t is4t. is4th isto7pho i5s4tyn i4s3ui i5suik isu6maa is3w i2t i3ta it3ag ita6tis it3b i3te ite7dwal ite7glas itek7te. ite6mas i5tenh it4er ite5ru i4t5ete i3tê i3ti it4in i4t5ins i3to ito5fa ito7plan ito7rowe it1r it3re it3ry it4sc it5ser its5ete its7jood it4s5oo its7perk its6tek its7tori its7uur. i3tu itu6saa it5win i3ty ity7sokk itz7laan i1u ium1 iu4ma iu4me iu4mi iu5mie ium6uur iwel6s5k iwe7mos. iwes4 iwe7spor iwe5st 1ï ï2m ïn5akt ïn3o ïns4t ïn5und ïs5lam ïs3t 1j 3jaa jaar6s7kr 3jac ja4cq 3jag ja4ga jan7ghai jan7knap jap4l ja5pla 3jare 3jari ja4sm jas7pant jas6tas jaz4z jaz7zeri je4kn je4kr jek7rasi je5rop jes7nië. jes4t je4t3r jet6sjn jeu4g 5jie5kn jie6nan jie6nol jien5s 5jieon 5jieop jie6ska jie7skap jies7kat jie6ski jie6skop jie6s5lo jie6slu jie6sol jie6son jie6spa jie6s5tr jie6sui jie7suik ji4eu jin7gope ji4rp job4s3 joe7kwee joen6sk 5joern 3jong jos6afa jou7kuit juit6sp juk7riem 3jun jun6kre 2k. 1ka k4aal k5aand kaan8s7te. k5aanw kaar7se. kaars7te 5kaart kaar6ti kaar6t5j kade6la kade6sl 4k3adv 5kafee kaf6oef 4kafr 4k3afs 2k1ag 5kagem k4agg 5kagge k4ago k4agr 4kaks kal4k5a kal4k5l 5kalko kal6koo kal4kw kal4s5p kal4st 3kam kam6par kam6ple kams4 3kana kaner5o 3kank kan6ont kan6sko kan6ste kans7te. 3kant kan4t5j kant7om. kan4t5r ka4pak 5kapas kap7inte 5kapit kap6lak kap7lat. kapo4 ka5pri kap7seis kap6spr kap6stek 5kapte 3kar. 5karak 4k5arbe k5arm. ka5roo kar6oor kar4st k5arti kar5to 3kas 4kasg kas7laai kas6maa kas7traa ka5s6tro 5kateg kat7etes kat6har kat6hu. ka4too kat7ryk. kats4 kat5sw kat7uil. kay6aku 2kb kbe6kwi kbout7ji 2kd k3de 1ke kede6lo kee2 keel5a 4keen keep6s5t keer6so keer6ste keë6laa keël7aar 4keff kei6dro keids7pr 4keik keis4 kei5st 4k3eks ke4l5ak ke6lane kel7anem kel7assi kel7eenh 4k5elem ke6linb kel7inbr kel6mag ke4l5ou kels8onde kem6afa 4k3emm ken6aar ken6dra ken7eel. 4k3enj 3kenm 3kenn ke4nou ken7son. kep7laai kep7ler. ke4p5lo kep5sk ker7een. ke4rel ker7els. ker7flan 3kerk ker6kal ker6kin ker6k5or ker6kow ker4kr ker7kris ker6kui kerk7uil kerk5wy ker6m7eng ker6naf ker6nei ker6nen ker4n5o ker7oes. ke4r5on ker6pru ker6set ker4sk ker7skil kers7kom ker6slo ker4sn ker4so ker7son. ker6s5pi kers5w 4kerts 6k5ervar kes6el. ke4sn ke6trol ket6ska 5ketti ke4tu keu6ror 3keus keut7jie key7kleu key7nooi kê4rb 2k1f 2k1g kga7laga kge5la 2kh kha7yeli k4hoi khu7khun 1ki ki2e kie6dro kie7laai kie6mas kiem7as. kie6sent kie4s5k kie7skry kies5l kie6slo kies7tan kie4ta kieu5s ki4kl ki4ma 4kimm 5kind. 6k5indel kin5dr 4kindu k3inh 4kinl 4k5inst kio4s kios7ke. kip7ling ki4rc 3kis. kis7obli ki5s4po ki5s6tew kit4s kits5k 2k1k kk4ag kka7smaa kka5str k3ke kke6nee kker5kr kk4li 1k2l k5ladi 4kland k5lang k4lank klas3 klas6e. kla6sin k5leerd 4kleg k5lege k4l4ei 4klel 4k3len 3kle4p klep7as. k4ler 5klere kle5us k3lê 4klied k4lier 4k3lig 4k3lik 5klikk kli6kop k4lim kli6moe k4lin 5klink k5linn kli4p3 k4lis kli7sjee 4klog klo6kon k5loos. k4lou klub5h klu6bre 4klug 2k3ly 2km kman7spo 1k2n k4nap 4knav 5kneg. 4knei 4knem kne4t knet5j 3kni kni6kla kni4p3 knoe4 knoe7te. 4k3nom k5noot k4nop knor7os. 1ko kob7rego 3kod 4k3oef 3koek koe6kei koe4l5o koe5pl 3koer koe4s3 koe7sist 3koë 4koë. k4ofi 4kogi kok7onth kok6skr ko5lag kol6for 3koll 3kolo 3kolw 3kom kom7aan. kom7bina 4komg kom7ghad k5omhu kom7miss kom7saal kom4sp kon7atoo 4k5ontl kon5tr 4k5onts 4k3oog ko4op 5koord 3koö ko4pag kop7ape. kope7la. kop7las. 4koplo 3kopm 4kopn 5kopno ko4po 6kopper kop7uits kord7aan kor6doe kor6foo k3org koring7s k3ork ko3ro 3korp kor4s5l kor7sten kor4t5a kor6tji kort7jie 3kos kos7eetp kos7inko kos7juff ko4sk ko5ski kos5ko kos5pe kos5taa ko4t5ak 5kotel kous7te. kou5tj kovi7ev. ko4vk ko4vs k5owerh 2kp kpro6pa kp4si k2r k5raad 3kra4g kra7gers krag5o k5rak. 4krand 5krank 5kredi 5kreet. k4reëe 4k3reg 4k3rek k3rel k5rese 3kret 4kri4f3 4krig kri4k3 kri6moo 3krin kri4p krip7lee 3kris 4k3riv k5roet k5rolp 5kroon kr4or 4k3row 4krub 3krui 4kruim kru6kas kruk6s. kru4l k5rusp kry6fin krygs5t 4k3ryk kry7sket 3kryt 2k1s ks6aan. ks5agt ks4ak ks5chi k4s5een k4s5erv k4s5eti k3si k6singe ks5ins ks6jari k4skan ks5kin k4skon k4slê ks3li k5s6maak ks5moo k5smou k2s3n k5snob k4sob ks5obj k4s5ond ks5onl ks5opk k2sp k5spek k5spel ks5pen ks5per k5spes k3spi k4spir k5spra k5spri ks5pur k6s5taal ks5tant k6steken k6stemp ks5tens k6stera k6s5teri k6sterr kster6t7j ks5tet ks5tip k7strado k6s5trah k5s6trak ks5tur ks5tuu k3sty ks3ui k4s5ure ks3w k5swei ksyn4 2kt kte6rad kte6ron kter6sp ktes4 k4the k3ti k3to kto6rev k3tu kt4wi 1ku kud7aksi 3kuik 4kuim kuin4 kuins5t 4kuit kul6der kul6plo kul6poo 3kult 3kun 4k3uni kun6sin 3kurs 3kus ku6seen kus7lang kus7node kus7taak kut3r kut6slu kuus6te kuu7ste. 2kv kvang6s 1k2w 4k5waar k3wae k4wan 3k4war kwa7skaa k4week 4kwees 4kweg 4k3wer kwê7lafl kwik3 kwi6kwa 3kwis 2k3wo 3k4wot k3wu ky4fa kyk7uit. k1ys 2k- k-5kli 2l. 4laanb 4laanh 4laard laat7slo laat6str lad7onde la4du 4ladv 2laf la4fa l3afd 2lag l4ag. la4ga la5ga. la5gas l4agi la5gie l4agl lag7lag. l4ago lag5ri lag7some lai6rgo lak7albu lak6led lak7okul lak7oore 4l5aksi lak3w lak7ware lamb7da. la4m5oo lam6pli lam6pol lamp7oli lam6sko lam6sle lam6spe 3land lan6daa land7aar lan6dad lan4d5r land6sta land6s7te lan6gaa lan7gnol lan4go lang7ste langs8te. lan6gur lan4k5a lan4k5l lan6kop lank7ope lan4k5r lan6kwi lan4s5k lan4sp lan4s5t lan6taa lan7taat lan4t5j lan6tre 4lantw lap3r 4l3art las7elek las6ie. la4sn la4so la5sol la4sp las5pa late5r6a 5lawaa lba6spe lbe6kne lbo6wvi lb4re ld5amb ldan7ha. ld5apt l4d5een ld5eis lde6rat lder7os. ld5ins ld3of ld5oor ld6oor. ld5ord l4dow ld5owe l5draa l4d3re lds4k lds6maa ld3so lds6ond ld3sp l4d5uit 1le lec5tr lee2 lee4g3 4leep3 leer5a leer7eis 5leerl leer5o leer5s lee4s lees7tra lee7tjie lee7vaar 4l3eff leg7slot 4leien lei7gleu lei6kaa lei6naa lei6not lei7skoo lei6spa leis7pan lei7spir leit5s lek7loti le4kn lek6suu 3leli 5lengt 4lenj len6sel len6ste lens7te. len6tji lent7jie le5pel lep5li lep7oog. lep7ratw lep5sk lep6szy leps7zy. ler4a 5leraa ler6kam lerk7amp lerk5sp 4lerts le4see le4set les5ete les6hab les7insl le4ske les7kes. les7lie. les7onde le4s5oo le3st les7taak le4ste le5stel les6tin les5tra les4ty les7uur. les7wete l5etan le4tc let5em let7oorb let7rol. let6sko 4leuf le3u4m leun5s leur7eg. leu4r5o leute4 lew6ein 6l5ewena 4l5ewig 3ley 1lê lê4rw lf5aan lfa7stra lf3ei l4fek lf5eks l4fen l4f3ev lf4ie l4fin lf5ing lf3l l5flap lf5onde l5fone lf5onts lf3op l2f3r lf6skar lfs7karm lfs7kop. lfs7kuil lfs7nier lfs7oog. lfs7perk lft4w l4f3ui l4f3uu l1g lg4ha l4gli lgo7lagn lg6ordy lgs6mee 1li liat6ji lia7tjie 3lid lid7onts 3lied lie6gli lie6kwy lie5la liers5w lie5sme lie7steg lie7stys lie7swak 4l3i4eu lig7inte lig6las lig6ny. lig7omge lig7rekl lig7riet li4gro lig7skag lig7sona lig5s4p lig5s4w lig7ure. lik7aspa 4likk 5likkew li4kl lik7opsi lik6see lik6sju lik6soo li4k5wa lin6gid lin6gin lin6gli lin6goo ling7ooi 4linh lin4k5l lin4kr l5inli 4l3inr l5insp lin4t5j 4linv li4pa li4p3l lip5la li5plo li6poml lip7omly li4p3r lips4 lip7soom 5lisen l5item liter6t7j lit3j litjie6 lit3r lit7sha. lit4sp lit4s5t lit6zdo ljus4 l1k l4kaf lka6tio lkat7ion lk5een lks7emos lks7epos lk6skap lk4sku lk4sl lk4son lks7ower lk5spe lk5spr lk6stel lks7tell lkter6t lktert7j lk5uil lk5wat lk5wit l4kwy lk5wyf 2l1l l3la llat6ji lla7tjie llei5s lle7knop lle6rui lle6swe lleve7ë. llo5sk lls7moor l4maf lmo6kal lmo4no lm3sm l1n lne4s lob7eend loe6dal loe6det loe7dja. loe6don loe4d5r 4loef loe6gos loeg7os. loers7te loe6skr loe4st lof7opri lof6spa 4logig log4o lo5gop log7sot. log4st log7stok lo4k3l lok7onde lok7swin lo5kwi l5olie. lomer4 lomert5 loms4 4lond lon6gaa long7aar lon6spa lon6ste lons7te. 4lont lon4t5j 3lood loofs5w l4oop 5loopb l5oore 5loosh loo7stra lop7emme 4lopl lop6rys lo3ro lo5ryn 5lose. lo4sj lo4sk los7laat los5ta los7trum los7wikk lo4tak lot7riet lot7ruïn lot7swan lot5ui loui7sa. lou3t lou6wna lou6wre lou6wtj lo4wr low5ry lox7era. lö4jd löj6don lp5aan lpe6nin lp4he l4pon lp5ond l1r 2l1s l4s5aar l4sad ls5arm l4s5asp l4s5eko lse4l lse5le lse6mek ls5erva ls5fei lsg6haa lsi6g5aa ls5jas l4s5kin l4skon l6skorr l4skre l4skru l5s6maak l5s4mee l4snaa ls5opw ls6plet l5s4pli l4spu l3st ls4ti l6stoeg ls5waar ls5wet ls5wyn l4t3ag l4t5amp lta7spie lter6sk lt5oond l5t4wak lu4bh lu4bl lub5le lub7loka lu2g1 lug6er. lu5gub 3lui. 4l5uie. lui7masi lui7slan 4luit luk5raa luk7rake luk6s5pa l5unie. luns6a. lur6pag lus7moor lu3t4h lut6zpu luus6te luu7ste. lva7soor lve5ti lwe4r5a lwe6rui 1ly lyce7um. ly4fe ly4fo lyk7aant lyk7lope lyk7lug. ly4kn ly4k3o lyk6ont lyk7rede lyk5sk ly4ma lym5ag lym7uint 3lyn lyn6aaf ly3pl ly3sp lyt7ring 1m 2m. mac7dona made7us. mae4s m1af 4mafd m4afo ma5fro 4mafs mag6sta ma4hd mah5di mak6lot ma3kw ma5lag mal7thus mamat6j mama7tji man7djar man3g4 man7gona man6n-p man7salm man7spen man6spr man6s7taa man6sto man7ure. map4l ma3ra mar6kek mar6kle mar6kom mar6kon mar4k5r mar6lpr mar4s5k mar4s5t mar6tro mary7na. mas6koo mas6kri mas6kui mas6tek ma5s4tr mat6hes mat7thys may7nard 2mb mb4re 2md mdo6poë mdop7oë. md5soo m3dw md4wa 4meder mee7kole mee5kr 5meel. mee5l4o mee5ne mee7reis mee7reke mee5sl mee7spre meest7al mee6tre me3ga megas4 mega5st 4megt mei6nee mel6aar mel7ekwa mel6kal mel6kjo mel6kla mel6kna mel4k5r melk5s mel4k5w mel7spul mel6too mem7phis men7angs men7eise men7opga men6sky men6snu men4s5p men6s5ta men6tin men4t5j men4t5r me5phi me4rak me6rass mer7asse mer5ast mer7dein me4rei me6reng mer7enge mer7esse mering8s9taa mer6kli mer6kna mer7kopn mer4kw mer7kwar mer5oes mer7onth mer7treë mes4a me4sal me4s5ka me6s7koor me6skor mes7kore me6skro mes7kroe me4sl me5slu mes7moss mes7port me6stas me4s5to mes7ware me4t5ee met7emps meter6so meu6las meul7as. 2m1f mfloer6 2m1g mgang4 mgangs5 mgeper6 mges7per 2mh mh4ei 4mid. mid7osea 4mids mids5t mie6kas mie6kwa mie6ret mie4r5y mie6skr mie6taa mie6tji miet7jie mig6re. migu7el. mih7rab. mil6taa min7gopl 4m5insp 6minstu mi4rl mi3sf mis7sêr. mis6tkr mis6tok mit7swa. mi4v- 2m1k mkaar4 mkom6st mkoms7te 2m1l m3la 2mm mma5sp mmas6to mma7stor mmat6ji mma7tjie mme7loor mme6res mme4r5o mmi7stok 2m1n 3mod mode4l mod6jad 3moe moed4s moe6nes moe4st mof6lam mok7alba mole4s5 m5olie. mol4m5a mon6dch mon6dop 4m5onts 3moon moor6da 2mop m3opl 4morg mo3ro mor6sju mor6spo mor4s5t mo5saa mos3f mos7fles mos7inen mo4ske mos7keë. mo5sta 3mot mote7us. mot6heu motor5a mou5fl mou7slip mou6ste mous7te. mou6tek mo9ya. 5môre. 2mp m4pag mpa7gne. mp5agt mpe6lys mpen6to mp4her mp5ops mps7kraa mp5sli mps7taal 2m1r 2m1s ms5app m4s5kat m4skon ms7kraal m5slin m3s4me ms3op ms5pen m6s5taal ms4te m5steg m5steo m3sw 2mt mter6t5j muc7klen mues7li. muf7smaa mui6les 4muit 3mul mum7aant muns4 mun5st mun6tou mur4g mur7gie. 3mus 2mv mvi6tra 2mw myl7afst myl6sla 3myn myn7ent. myn7impa myn7inge m1ys 2m- 2n. 1na 3naal 3n4aam 4n3aan 6n5aardi naar6skr n5admi 4n3adv nae6lys nael7yst n1af n4afi naf6lad nag6aand na6gaap na4g5ap na6gemm nag7emme nag6las n4ago nag5ron nag3s 4nagt n5agtig na4gu nai7set. na5kli nak6lip 4nalf 4nalt 3nam na3p4l na3pr nap7roet 4n3arb 4narea na3s4k nas7klip na3s4l nas6maa na3sp nas6pel nas4pr na5s4ta nas6ten nas6tor na5s4tr na5stu nas6tuu nas4w na5swe na5t4ha nat6jie nat7onde 3nav 5nawee na9yl. 2nb nba6chs 4nche 2nd nda7gesk nd5akt nd5app n4d5arb nd5art n4d5ass nda7stoe nde7eier n4d5een nd5eg. nd3ei nd5eksa ndel8s7kor ndel8s7taa nd5emm nde6rad nde6raf nder7af. nde6rar nde6rem nde6r7ent nde6r7ess nde6rim nder7in. n6deros nde7rosi nde7sill nde6zvo nd5ide ndi5go nd5ins nd3of n5dome nd5omt nd5ond nd5ont n4d5opb ndo5st nd5rak nd5rat n4d3re nd6resd nd5riff nd5riv nd5roe nd5rok nd5rot n6druim nds7ertj nds7geru nd6sinl nds7kenn nds7koor nds7kraa nd6s7laag nds6leg nd5s6maa nds6ons nds7oorn nd5sor nd6spre nd5spu nds7taal nd6stek nds7toet nds7troe nds6wee ndt6wis ndu4e n4d5uit ndu7kraa n4d3ys 1ne nebe6st nec7ticu 5neder nee2 nee4l neel5a n4eem 4n5eend 4n5eenh neer5o neer5s nee6tew neeu3 nee7uur. nee7woor 4neff 4neg. ne5gla n3eie 4neil 4neksp n5ekspe nel6lma ne6loon nel7oond nel6spo 3nem nem6afi nen4sl nep7olie 3ner. ner6faf nerf7af. ne4ros ner7psig ner6sle ner5sw ners6we nes7evan nes6tas ne4ste net7omge ne4tri ne5um. neu7moko neu7raal neu6sji neu6ska neu7stoo neu6toë neut7oë. ne4wt 3nez 1nê 2n1f2 n3fl nfy6tap 2ng n4gad n4g3ak n4g5apt n4g5ase ng5ass n4g5een n4g3ei ng5eks nge6r7aap nge6r5al nger6d5r nge6ret nges7per ng5eten n6geter ng5imp n4g5ink n6ginst ng1l ng6lasu n4goë ng5oë. n4g3on n4g5oog n4gou ngp6seu n6g5raad ng5ran ng5rat n4g3ri ng7sade. ngs7agit ng7sappe ng4see ngs7eise ng4sek ng6serk ngs7erke ng6sero ng6serv ngs6fer ngs7impa ng4sin ngs5int ngs5kan ng7skat. ng7skoel ngs7koep ngs7kop. ng7skors ngs7kort ngs7kurw ngs7kuur ng6s5laa ng6slab ngs7labo ngs7ladi ngs6lep ng7s6loop ngs6lym ng4sn ng5sni ngs6oet ngs6ofa ngs6omm ngs7pelo ngs7pill ng5spoe ng7stabi ngs7tal. ng6steh ng6stei ngs7teik ngs7telg ng6stem ngs7tema ng7stemm ngs7temp ng6s5ten ngs7tese ngs6tin ng6stou ngs7tou. ngs7trap ngs7truu ngs6uie ngs7ure. ngs6wel ng5uit 2nh 1ni nie6kaa nie7knik nie6raa nier7aar niers5w nie6uin nig7aard ni4g5ee ni4g5ie niks7py. nik7warm 4n3ind 4ninf nin6g7ele nin6get 4ninh 4n3ins 4n3int 4ninv n5invo n2is nis7alma nis6ara ni5see nis5id nis7insp nis6oms ni4son ni3tr nit7sare nje7glas nje7krui njie6st 2n1k nkaar4 nk5aard n4k3af n4kak nk5aks nk5eff n4kei nk5eie nke6las nke6lit nke6ree nkers6w nker7swe n4k3li n2kn nk3na nk5nes nk5neu n4k3of n5kofi nk5psi nk5rig nk5rol n4k5roo nk5ros nk5rye nks6noe nk4s5om nk3sp nks4t nk3s4w nk5uit n2kw nk5wat n5kwen 2n1l n3la nli4ga nlu4s n1n nna6spo nna7tjie nne6pol nne6sev nni4s 1no 4noef 4noes noe4st no4g5al nog7eens nok5as no9ko. nok7riww 3n4oma n4omm nomo7yi. no4n3a 4n3ond 4n3ont noo6dan n3oog noor6di 4nopb no5pla nop6laa 4noplo nop7omhu nop6rod 4norg n5orga nor6kla 3norm nor7tham nos6kaa no3sp 3nota 3note not4r no5tre 2np n1r nroet6j nroe7tji 2n1s n4s5aar ns3ag ns4an ns5angs ns4e. ns4el nse4pr nser6to nser6tr ns3f ns6feer ns5gel n3si ns4ia ns4ie ns4ig nsi6gar ns7inges nsi6tri ns4iu ns3ja n6skafe n5skap n4skar n4s5kel ns5kin n6skous n4s5kra n2sl n3sla ns5lam n6sland ns3le n4s3li n4s3lo ns6lotg ns6lott n3slu ns4mee n5snar n4sne n5snel ns5noo n5soek n4soë ns5oë. ns5ond nson4t5 ns5onw ns3op ns5par ns5pas n4s5per ns7portr n4spot n6sprat ns6prek n6staak n4staf ns5teh ns6tel. ns6tels n5s6ter. ns6ters ns5trak n3s4tu n4s3tw nst6wyf ns5tyd ns3w ns6weer n4syw ns5ywe 2nt nt5ags nt5ark n4t5art nta5tj n4t5eie nte6ram nte5sm nte6sti n5t4hon nti7kwaa nt5inv nt3ja n4tjo nto7fakt nt7oksie n4t5oli nto6nad nt5ond n4top nt5ops n4t5org nt7radin nt5raf n5tref ntre7kor nt5rim nt5roe nt5rom nt5ron nt5room n5troos n5trou nt5row nt5rui nt5ryk nt3sa nts7inge nt6skan nts7kand nt4s5le nt3s4m nts7onde nts5paa nt3st nt5uit ntu4m3 n4t3ys 1nu 3nua nu4e. nu4es. 2nui nuk4w nu5kwa nul7soms 4nuni nu5skr nu6skra nus4t nu3tr 4n3uur 2nv nva6lis 2nw ny4so nza6cs. o1a o4bag ob5agt o3bo ob5vor oby6nro ock7wyn. oda5gr od5een ode7leie ode7spaa od5lui od3op od3re ods7akke ods7kish od6slak ods7lake ods7lopi ods5oo ods6op. ods6org ods7paar ods7rogg ods6uit ods6waa ods6war ods6wyg odu4k oe4d5aa oe4d5ag oe4dei oed7eie. oed7ette oe6dind oed6ja. oed7onde oe4d5oo oe4d5op oe4d5or oe4d5ro oed7stry oe4du oed3w oed7wyn. oe4f5aa oe4f5an 3oefe oe4fek oe4f5lo oe4f3o oef7rit. oeg7aand oeg7laer oeg7lam. oe4gog oeg3s oeg7yska oei1 oeien6a oeis4 oei7sker oe4kaa oek7eier oe6kerf oek7erf. oek7eval oe4k3l oek6lap oek7olie oek7oort oe4kr oek5rak oek5re oek5ro oek7sten oe4k3w oe4lei oe5leie oe6lemm oel7emme oel7eter oeling6 oelings7 oe6lins oel7inst oel7onbe oel7opri oel6ser oel6skr oel7slik oe4nei oen7eike oe6nert oen7ertj oen7esse oenk4 oen5kl oen7knoo oen5kw oe4n3o oens5ko oen5sm oens4o oen7ysbe oep7aang oep7inst oe6pinv oe4pl oe5pla oep5li oe5plo oep6s5ee oep6s5in oep4sl oe4pu oeras5e oer3k oe4r5on oer5ou oer6ske oer7twak oe5ser oe5sie oe5sje oes7kraa oes7limt oe4s5lo oes7medi oes7pil. oe4s5po oes5ter oes7troe oe4swe oet7aanp oe4t5am oet6he. oe4t3j oe6t5oli oe4t5oo oe4t5ri oe4t5ru oets7kra oet6sna oet6spe oet6s5te oet6sti oet5wy oë5rug of3at of5een 5offis o4f3in of3l o4fok of5oks o4f3om of5psa o2f3r of6sant ofs7iden of6sins ofs7insi of4s5le of4s5oo ofs7paar o4fui o1g og4d. oge4s5t ogge6lo oggel7oo ogi7faal 3ogig og1l og4nat ogo7steo o4gry og5rye og4s. og4sg ogs6inf og5ska ogs7last ogs6ot. ogs4p ogs7pad. ogs7pris ogs6uip ogs6wan oi1 oig6aff oi3k oile4 ois4a oi5sag oi5ski ois6kuu oi5sky oi5sla ois4p ois4t ois7teïs ois6wer oi3tj oje4k ojek5l o4k5aas o4kag ok4am ok4an oke4t oket5j okie4 o4k5ins ok5lat ok6leed ok3n oko7seil oko6sol o4kou o1kr o4k3ro ok4s. ok5sig ok6sins ok4sj oks7kraa oks6lip ok3sp ok3st oks6win o2k3w ok4win o1la ol5agt olf7ent. ol5fèg olf6lap olf6sku olf6sme ol4gl olg7onde ol4g5or olg7smee 5olieb olies6m olie7sma 5olifa oli7gny. olk6sem olk6sep olk6sow olk6sti o1lo olo5kw o4l5oor olo5sp olp6hta ol4sar ols7are. olt6zha o1lu olyf5o o2m o3ma om5agt oma7pleg o4m5arm omat6ji oma7tjie o3me ome4s ome5us omg6had o3mi omka5s4 5omloop o3mo omos6fe omo7sfee omp7ligs omp7oor. om6pop. oms6aal om5sla oms6lag oms7perk 5omstan oms6tin o4n3ag o5n4age o3n4an ona6skl ona7sten ond7aap. ond5agt ond7ampt ond7dwaa 4ondes onde7us. on4did on4dom ond7rol. ond5sle ond5so ond5sp ond6spl ond7twis on5eff o4n3ei ong5aan on4g5os o3n4ik o4nil onin6gr onk7ert7j on4kj on5kno onk7omge onk7rugh onne5st o4n5oks ono7sfee on4s. onse4p5 ons7iden ons7kepe ons7kori ons7pamp ons7self ons4t on5ste ons7tol. on6t5aar 3ontd ont7elsi 5ontgi ont5raa on4tri ont7rol. on4t5ru ont7slik 3ontw o1ny ony7okol oo2 oo5agt ood3a oo5deb ood5ee oo5dek ood5er ood5et ood5ok ood7onge ood3r ood6san ood7smoo ood7sorg ood7spui ood7suit ood7swaa ood7swar ood5ui oof1 oof6ser oof6sid oog3 oog6enh oog6les 5oogpu oog7sinf oog7suip oog6-lo ooi6spa ook3 oo4ka ook6sst ool1 ool6and ool7snaa oom1 oo4ma oo4me oo4mo oon1 oon6ag. oong4 oons6ko oon5sl oop1 oo4pa oo4pe oop7klik o4opn oo4po oop6swe oor1 oor6daa oor7daad oor6dap oor7darm oor6dca oor6d5om oor7doop oor7frek oor3i oor7klik 5oorlog oor6ot. 4oort. oor6taa oor6t5in oort7ja. oor6tyl oort7yl. oorve7ë. oos3a oos3k oo4so oos7pers oot1 oo4ta oot6aai oot6en. oo4ti oo4to oo4t3r oo4tu oo3v oö5spo o2p o3pa o4paf op5agt opa6les o4p3am 5opdrag o3pe op5een op9eg. 5openi op4er oper7aar ope7rage op6horu o3pi opie6le opk6lik op3l op4lan op6lein 3oplo 1opn 4opno o3po opo7fagi op5off op5ont opo7sfee 5opperv op3r o5p4rot o4pru op6skre ops6maa ops7neus op5son ops5or op3st op3sw ops7wels o3pu o1ra or5afd or5agt ora6lee ora6loo orat6ji ora7tjie ord7akti ord6arm ord7eksa ord7ierl ord7inst ord5oes or4d5oo ord6oop or6dord ord7orde or4d5ri ord7roma ore7ster o3rê orf7oond 4org. or3gh or4glo 4orgp org7ring 3orië orings8ku or5kaa orkes5 ork7lag. ork6lik ork7ney. orkom6s orkoms7t or6maan or4mj or4nj or5ond or5ong o5rot. oro7thy. or3p4h orp6ski orp4sn ors7aar. ors5ag ors7jurk ors5mo ors7pot. ors7teri ors7tery or4sti ors7trek ort7aan. ort5aar ort5akt or4tar ort6ham 5ortod ort7onde ort7rol. ort7ruk. ort5sw or1u o3r4us o3ry. ory4s os3ag osa7phat os5api osas4 osa7tjie os5cen o3se ose7phin ose7phus o3si osi6nen o4sjo os1k o4s3ka os5kee os5kis os5koe os5kop. os5kor os5kou os5kow os3kr o5skri o4sku o2s3l os3m o2s3n os5oli os5oor o4sor os5ord os3p os4pe os5ste os3t os4t. os4ta os5taf os5tak os5tal os5tar os4td os4th ost7impe os4tm o5strat os6trev ost7revo ost6roo os4tw os4t- os3w os-7lond o2t o3ta ot5akk otas4 ota5st ota7tjie o3te oteek5 ote4s5a ote6sno ote6spr ote4s5t o4the oth7nage o3ti ot3j o3to ot5opm oto6ran oto6ren otor5o otos4 oto5sk ot3re o3tro ot5ryk ots7eila ots7karr ot4s5ko ot4sl ots5la ot4s5po ots7rûe. ots7tee. ot6stek ots7toet ot4stu otte6l5o o3tu o4tui otu6set oua6che oud7agti ou6dakt oud7akti oud6ief oud6ini oud6ink oud7oorg oud7styd ou4du oud7uitg oue6rio ou3g ou4gh ou4gl oug4r ou1i ou5ill ouis6a. ou1k oul7ontl ou3m ou5nyw ou5rei ous6kak ous6ken ou5ski ous6lip ous6ouw ou3s4p ous7pan. ou3s4t ous7tert ou7stiek ous7ties out7aar. out7aksy ou4t5as out7ekst out7emme 5outoma out7omhe ou4t5oo out3r out6rap out6rei ou4wb ouw7rens ou4wv ou4-o ove5re over6y. ovie6v. ov5ket owe6nal owen7al. owe6ral ower7al. ower7kon oy4a. oy4eu ô1 ô2i ôi3e ôre5st 1ö öjd7onde ö1l 1p 2p. 5paaie. 4paanv paar7dui 3pad. pa4da pad6ie. pa4d3r pad6-eg p1af pag6ne. 3pak pa4ke pak5es paki3 pakket5 pa4ko pak5os 4p5akti 3pale pal7esse pal5fr pal6mol pa4nop pan5sp pan4t5j 3pap pa5pri pap7ryp. paps4 pap7saf. pap7smee 3pa3ra pa6ramn par7amne 3park par6kar par4ko park5r 4parm par4sk par6ste pars7te. 5party pa5sja pa4so p5aspi 3pass 5paste pas7til. pa4taa pat7are. 4patel pat4j pa5tji 3patr pats4 pat5sj pav7lov. 2pb pbe6koo pbreng6 2pd p3dw pd4wa pd4wi 3pe. pe4ak pe4ar 3ped pede4r pe5dof pee2 peel5a peel5u 4peen peet3 3pei 4peie p5eien 4p3eis pek7nek. pe4k3r pel7aktr pe4l5oe pel7oond pels7kra pel6tak pel7yste 4pemm pe6nars 3penn pen7opsl pen6slu pen7smed pen6sop 4pepi 3pera pe6raap per7aap. per7admi per6ary per7asid pera5s6t 5perd. per6dag per6dry per6dwy per7enke per5est 3peri pe4rok 3pers pers5ag per6set per6sje pers7med 5perso per6s7ond pers8tel. 5pertj per6top per7tsja per7uran 3pes. 4pesi pe4sl pe4s5te peu6rel 2p1f p3fl pf4li 2p1g pges4 pge5sp pg4ly 2ph phe6ars p2hi phi5s4t p3hit piek5n pie6raf pie6rom pie4ru pie7samp pie6ska pik7erts pik3s4 6p5inges ping6la pin7glas pin5kl 4pinst pipe4 pipet5 pi3s4k pit3j pit4s5k pit4st pits5te pits7tek pit6suu pit6zko 2pj 2p1k pkom4s5 p2l pla6kal pla6kok 5plan. 5p4lant 4p3lap 3p4las 3p4lat pla4t5r plee4 pleet5 p5leie ple4k 5pleks 4p3lep pleu7ra. p3lê pli4g pli4t5e 4plits p5loos p5lose plu6ska 4plyn 2pm 2p1n p4neum poe6doe 3poei poe4s5t poe6tol po4fa pog7rest pog5sk pok5aa pok7olie 3pol 3pom 4pomh 3pone pon7opbr pon4s5k pons7te. 4pont 5pont. p5onts 4poor. 5poort. 3poot poo6tel po4pag 4popd pop6lek 4pops p5ordo po3ro por6tak 6portso por6t5ui 3pos po4sk pos6tim pos6tko pos6tna po4t5as 3pote pou6ste 2pp ppe6las ppel7as. ppe6rad ppe4ro ppie6sl ppoor6t ppoort7j p2r p4raa 4praak p5raam 5praat pra6esi 5prakt 3pren 3pres pre4s5t 4preu 3p4ria p3rib p4rie4 prie7ël. 5pries5 priet5j 5prins 3prio 3prob 3prod pr4oe 3prof 3p4rog 3proj 4p5rok. 3p4rop pro6pop pro5pr pr4or 3pros pro5sa pro7sopa pru4t 3pry pry4st 2p1s p4sad p4s5aks p4sakt 5psalm. p4sas ps5asp p4sat p4sid 5p4sigi 5psigo p4s5ing p4s5int ps3j p5skaa p4s5ket ps5kof p4skon ps5loj ps3m ps4my p4son p5son. p4s3oo p4sor ps7portr p6stera ps4ti p6stoet p4ston p5swar ps5wer 2pt ptos4 3pub pue4b 4puit 3pun pun4t5j pus7tipo put7adde put7emme put5ji put7rioo 2pv 2pw 3pyn 3pyp pyp7aard pyp3l pyp7las. pyp3o py4pr pys3k 4p5yste py5tha pyt6hon 2p- 1q qu2 qua7driv que6str qui7nas. 2r. raa6min 4raan r6aans. 4r5aard 4rabs rac5te 5radio 4r3adv 4rafd ra4fek 4rafh ra4foe 4raft ra4fu raf7urn. 4rafv 4rafw 2rag rag6aal 5rageb ra5gie ra6ginl rag7inli r4agr rag7raad rag7ryer rag6sab rag6sak rag6sin rag7soep ragu5e rag6wan rai7gne. rak6les rak7oper rak7wate 4ralb ral7eer. ral7oor. 4ralt ram7argi 4r5ameu ram6pla r4anda ran6daa ran7dafe ran6d7akk ran6dem ran6dev ran6doe ran4dr ran4g5o rank5l ran4k5r ran6saa ran6seu ran6sjo ran6sko ran6sor ran4s5p ran4s5t ran6tad ran6tet ran4t5j ran4tr 4r3any rapa7da. ra6pas. ra5pes rap7ewen rap6loï ra4pon rap7onge rap7para rap7rem. rap7righ r4ari 4rarm 4rarr 4rart r5asia ras6tan ra5s4to ra5s4tr ra4su ras5ui ra5t4ho 4r5atom rats5o rbo6lol rd5agti r4d5ame rd5eil r6delek rd5esel rde5sm rd5euro r3do r4d5oli r4d5ont r4d5oon rdô6nne rd7raais r6droma rds6lip rd3so rd3sp rdt6ree rdu6sol rd4wa r5dwar r4dwu r4d5yst 1re 3r2e. 3reak reed5a ree4k reek5e ree7kier ree7loon 4r5eend 4r5eenh 4reenv ree6pes ree6ple ree6pro 4reers ree7sala ree7stra 3reë 4reë. 4reff 3reg. reg7ruk. reg6sen reg6skw regs7om. reg7spre 4reie 4reil rei6nar 4reind rei6noo rei6ser rei6sou reit7ze. re4kn rek7naar rek5ne rek4r rek7spoe 4rekst re4kwa rel4d rel7dae. rel7diag rel7dopp reld7ran rel7duik rel7oest ren6agt r4end 5rend. r5enig ren7shaw ren4so ren4sp rens7te. ren6sto ren6str ren6tak ren6tcl ren4t5j ren4t5r 4rerg 4r3er4t rert5j 4resel re4s5ka res7lap. res5lo re3s4m res7ore. re4spi re4ste re5stel re4sti res7toet res7toma res7ure. ret5art reu6kin reu4kl reu4k5o re5usg r4ewa rey5no rê4rh r4faa rf5laa rf5lat r3fle r5f4lie r5flui rf5opv r4fre rf5reg rf4sl r1g r4gak rg5akt rgek6li rge7klik rg5eng rgeper6 rge6rid rg4hu rg5hut rg4len rg4let rg5loo r4g3lu rg4ly rgo6wri rgrie4 rg5ros rg3s rg4s. rgs4p rguit6j rgui7tji 1r2i 3rib. rib7file rie6dio rie6dop rieket5 rie6klo rie5me rie6pri ries6e. rie6skr rie6taa ri4fa rif6ree rig6ska rig7smee rig7styf ri4kl rik6sid rik5sj rik7spad ri4kw rim4s 4r3inf rin6gaa rin7gaan ring7aar 6r7ingest rin7gleb rin4g5r rin6gui 4r3inh rin6kar 4r5inko rink5w rin6kwa 4r3inl 4r3inr 4rins r5inst rin7the. r3inv 4rinve rio7rye. rip4s5t ri5s4ko ris4o ri5son ris5op ris6per rit5ji rit7oond rit6rea ri5tro rit6zri 5rivie rix7tont 2r1k r4kaan rk5een rk5eik rke6lap rke4s3 rke7sel. r6kink. rk5inl rk5leie r4klid r5klip r4k5los rk5nei rk5omg rkom6sti r4k5opd r4k5ops rk5rand rk5red rk5rib rk5rok rks6maa rk5spo rks6uid rk5twi rk5uit r4k5ure rk5wag r4k5wat rk5wee r4k5wet rk5wil rk5win r4kwy r1l r3la rli4g rlo6gja rloo7ple rlo6wpa 2rm r6maanh rma5gô rma6gun rma7klot rma7plaa rma7raan rme6raa rmer7aar rme5sa rmi4l rmos4 rmo7stro rm5uit rmy6nim 2r1n r5nagte rna6spl r4n5ele rne4s rne4t5a rne6tom rn5oor rn6stig rns6tin r3nu roduk5 roe6fas roef7as. roe4f5l roe6fri roe4ga roe7glas roe4n5a roe4pa roep5l roe4p5o roe4s5k roe6sla roes7lag roe4s5t roes5w roe6taa 4roew rog7akke 4rogg rog6lis ro5gna ro4kn rok4r rok7slip rok4s5p ro5kyn ro4l5aa rol7gord r6olien rol7mops romp7op. 4roms ron7aar. ron6dag ron6dak ron6d7er6t7 ron6d5et ron4d5o rond6o. ron4d5r ronds4 rond5sw ron4du ron6gaa ron6kaa ronker6 ron6kert ron4k5l ron6kow ron4kr ronk7wa. ron6ske ron6ste rons7te. ron7stel ron6s5ti r5ontp ron4t5r ron6tui 4rontw roo7dewa roo7dist 3roof 4roog roo7gron roo7mens roo7nag. 4roond roop6la 4r3oor roo7taai r4opa rop7aans rop7anys ro5pee rop6een 4r5open r4opi r4opo r1or r4ora ro3ro ros6afr 4r5osea ros7kie. ro3s4p ro5sta ros6til rot6hsc ro5ton ro3tr rot4sa 4roud rou7floe rou5sk rou3t rovi7ch. rov7nik. ro4w- rox9y. r2ö rpe4s3 r4pid rp5ide r4p5lik rp5opd r4p5reg rps7idio rp4sl rps5no rp6spri rp4stu 2r1r2 rre7glob rre7nagt rre4st rre7stau rres5tr rri6gin rron7kaa 2rs r4sakt rs5alm r4s3ar r5scha rs5eila rseuns6 rs3f r5s6feer rs4ie rsi7flag r6sinda rs5inko r6sinsp r6sinst r5sjam r6skaki r5skap r6skapa r6skapi r6skeus r4s5kin rs6kink r6skoet rs6komm rs6koni r4skor r5skors r5skou rs5krib r6skroo r4skur r4slê rs5lis r4s5los rs5lyf rs6maad rs6maai r5s6maak r6smaat rs6magt rs6mak. r6smake rs6mara rs6mede rs4mee r5smeer rs4mel rsnee5m r5snoo r4snot r4s3og rsonde6 rsonder7 rs5ong rs5oog r6soors rs3op rs4op. rs5ord r1sp r6spaar r4s5par r6spien r5spri r1st r6s5tal. r4stb r6stegn rs6terp r5s6ters rs6tigl rs4tik rs4tis r4s5tit r4stj r6s5toet r6streg r3sty rs3un rsu4s rs5wa. rs5wap rs3we rs6werf rs5wyk 2rt rt5afd rt7angel rt5art r6teend rte6loe rtie4s rting4 rti7saan r4t3om rt5ont rt5opr rt5org r6treda rt5reis rt5ren r4t5rie r6trol. r4t3ry rt6s5aar rts5ond rts6pyn rt5uits rt4wis rt4wyf 3rub ru4ga rug6-sk rui6lek rui6moe 5ruimte rui6niv 4ruit5s ru2k3 ru5kaa ruk6lip ruk6opp ruk6-en rul5aa rul7ape. rul7yste r2um rum7grok 4runi rup7lys. ru5spi rus6tak rus6tka rus6tma rus6tvo rus7uur. rut7oond 2ruu ruus6te ruu7ste. rweg5a rwe6gei rwe6skr rwi7sje. rwoes5 rwy6sak rwy6see ry4fa ryf7ink. ry4fo ryf7ode. ryf6sch ryf6sin ryf6ska ryg7stek ryg6str ry6kinr ryk3l ry5klu 3rym. ryn4s5l ryp7arm. ry5ple ryp7lus. ryp7nagt ry4s3a rys6alf ry6sinl rys5pi rys5po ry3st rys4ti ry4su ry4ta ry5tra s2 2s. s'9ie. s’9ie. 1sa 3sa. s4aad saa6dui 3saak 3saal 4s3aan 4s3aap 4s5aard 4s3aas 4s3adm s5adre 4s3adv 2s3af 2sag 3s4ag. sa4gal s3age s4agi s4ag4n sa5gne 3s4ago sag6opa 5sagte. 5s6agtew 3sak. s5akad 3sake 3sakk sa6k5rok sa6krus saks4 s3akt 4s5akti sa6lamm sal7ammo 4salb s5albu 3salf sal6fol 4s3alg 4salm sal6mei sal6tro 3sam 4samba same4n 4s3amp sam6swy 4sana s5anal san6d5ag san6dak sand7akk san6dru 3sang san4g5a san6gre sang7ste 4s5anke sap6hat s5appa s5appel sa3pr 4s3arb 4sarea sar7olie 4s3art 4sase s5asem 4s5aspe sa5spr sat6jie 4s3atl 4s5atta sav7lon. 2sb s3ba s3be s3bi s3bl s3bo s3br s3bu s3by s3ca s3ci 2sd s3da s3de s3di s3do s3dr s3du s3dw sd4wa s3dy 1se 3se. s5eed. see3f see3k s5eenhe see6plo see5ram seer7as. see5rei see7roet see3s4 see7soog see5sw seë7kran s5egpa 4segt 4s3eie 4seik s5eik. 4seila sei6nan 4seind 4seis. sek6huk 4seksa s5eksam sek4s5k sek4s5p sek4st 3sel sel5aan sel7anal se4l5el sel4f5a sel7fabr sel6fer self5i sel6fid 6seliks sel5of sel7oor. sel5op sels7kak sel6slo sel7spen sel6s7taa se4m5ag 4s5emal sem7ekst sen6dan sen7ghor sen7sord sen6str sep6hus ser7afse 4serf s5erf. ser6s5in ser6skr ser6sta sers7taa ser7stad sers7tal ser6tuu s5ervar ses6aan 4se4s5ka ses7lett se4s5po se4st se4s5ur ses7uur. se4sw ses7weke s4et. sewes6t sewe7ste sey7stof 1sê 2s1f2 s3fa s3fi s3fl s3fo s3fr s3fu 2s1g s3ga sga4s5e s3ge sges7per s3gi s3gl sg4ly s3go s3gr s3gu 2sh s3ha s4ha. sha7ron. s3he s3hi s3ho sho7shol s3hu 1si 4s5idea s5idee. 4s5ideo 3sie sie7kwos sies6li sies7mee si5fle sif6reu 4si3go si4gro 4simpl 3sin. si5nag s5indek 5sindr 4sindu sin7enti sin7este 4s3inf s4ing sin6gaa sing7aan 6singes sin7gle. sin6gre s5ingry 4s3inh sin6kch sin4kl 4sinko s5inlig 4s3inm 4s3inr sin6see sin6sin 6sinslu sin5sn 4sinv sip6ho. s4ist sit6are 4s5item si3tr si4tre sit7riem sit6sik sit5sl 3situ siu6mur 1sj 2sja sje6ans 5sjoko 4sjuf 1sk2 2sk. 5skaaf skaar6s 6skaart s5kafe 3skak 6skakeb s5kakt 4s5kalf 4skam 4skant 3skap ska6pin ska6pon skap5r 4skar. 4skara 4skas s5kata ska6tel ska6tit 5skawe 3skem 4skenn 3s4kep ske6pla ske4p5r 4skerk 4s5kern ske7smee 5skets 4skett s5kiem ski7klub s4kil 4s5kilj ski6lol s4kip 5skip. 4skis. 2s3kl 2s3kn 4skod 4s5koeë 5s4koen 4skoer sko6kaa sko4ko 4skoll 4skolo 4skolw 4skomb 4skomi 4skomp s3kon s4kone 4skonf 4skong 4skons 4skont 6skoord s4koot 4skoö 4skos sko6see 5skott 4skow 6skraan 4skrag 4skran sk4re 6skreet s5kres 5skrif 4skrin 5s4krip 4skris 4skrui 3s4kry 4s5kryt sku6dak 4skuik s5kuip 5skuld 4skult 4skun 5skurf 4skus sku6tar 2s3kw sky7drin s4kyf s4kyw 1sl s4laa 4s3lad s4la4g 4slam 4s5land s4lang s5lant 4s3las 4s3lat s4law 4s3led 5sleep 4s5leer 4s3leë s4leë. 4s3lei 4s3lek 4slel 4sleng sle6tji slet7jie s4leu 4s5leue s5leus 5sleut 4s3lew 4sley s3lê 2sli slib3 5slier s3li4g 5slinge slo6bee s4lof 4sloj 4slok 4s5loon 4s5loos 4slop slo4t5a s5loter s4low sl4öj 4s3lug 4slui. 4s5luia s5luih 5sluit 4s3lus 4s3lye 4slyf sly6mui 4slyn sly6paf 4s3lys 2sm s3ma s4mad 5s4mart s3me s5melk 5s4melt s3mi 5smid. smi4s smit4h5 3smok s5mol. s3mon s3mu 3smy smy6nin s4myt 1sn s5naai 4snaam s5naat 4s3na4g snag5e snag6s. 4snam sna6pro s4nar s3nas 4snat 4snav snee7tji 4s3nek 3s4nel 4s3nes 4s3net 4sneu sni6kwa sni6tre s4nob 5snoet 4s3nom 4snoo 4snorm s4nui sny3 s4nye 5snyer sny6-ys 1so 4soef 3s4oek soe6kal soe6kev soe6kol soe4k5u s4oen soe6nys 4soes 5soet. soets6t soet7ste sof6agu 4soff so3fr sof6ree sog4l soi6ets sok7opho 3sol s5olie. sol6lme solo5s s4om. s4ome 4s3omg 4s3oms 4s3omv 3s4on. s6onde. s7ondern 3s4one so6neek son7eekh son5eg 4son3g son7kwas so4n5op son4so 4s3ont son7uit. s4onv 4sonw 4soog 3s4ool 4s5oond soon4s 4s5oor. 4s5oorb 4s5oord 4s5oore s5oorg 4s5oorl s5oors 5soort. s5oortr 2s1op 3sop. sop7ekst so5phi sop6hok s4opi 5sopie s3opl 3sopo so3pr 5s4op4ra s3ops s3opt s3opv 4sord 5sorg. 4s5orga sor6gee sor6gra sorg7raa s5orke so5ror 4sorto sos4h so5sha so3th sot4ho so3tr 3sou 4soud s5oude sou6spa sou4s5t sou6taa sou6tak 4souto s5outom so9ya. s5paal 5spaan 4spad 4spak 3s4pan span5o spa6noo 4spap 4spark 4s5pas. 4spast spa6tar 4spatr 4spe. 3s4pee spe4k5l spe6kne spe6lak 5speler s4pell 6spelot 4spen s5pen. 4sperd 4sperk 5sperm. 4spers 4spes. 5spesi 4spet s4peu 4s5piek s4pio 2s3pl 5s4plee 5s4plin 5splits 3s4ply 4spoei 4spol 4spom spon6st 5spoor. 4spoot 5spore 6sportr 4spos 4spote s5poti 5spraak 6sprakt 4s5prat spreek5 s5prem 4spres 5spreu 4sprob 5sproei 4sprof 4sprog 5s4pron 4spros 5spruit 4spry s3ps sp4si 4spub 3s4pul 4spun 4spyn 4spyp 2s1r s3ra sra4e s3re s3ri s3ro s3ru s3ry 2s1s s3sa ss4af ss4ag ssa6rol ssay7is. s3se sse6nas sse4n5i sser4s sse5st s3si ssie6l7ei s3sk ss4ko s3sl s3sm ss4ma ss4me s3sn s3so s3sp ss4pl s3st ss4ti s3su s3sw s3sy 2st. 4sta. s4taa st5aard 5staat 3s4tad 4stafe sta6las stal7as. 4stale sta6lee sta6lem 4stali 5s4talt 5s4tam. sta4m5o 5s4tan. stand8s7ta 4stari 3s4tat s5tatr 4stea s4tedd 5s4tede s4tee4k 5s4teen 4steer 4steg ste6gre st4ei ste6kli ste6lek stel7eks 6stelev 5s6telse stel6tj stelt7ji 5s4tem. 5stemm ste6mom ste6nou 4stent s5teny 4steo 6sterap ste6r5ei 5sterkt 4sterm ste6rom ster5sm ste6rys ster7ys. ste6ser ste6sin ste6ska ste6ski ste4sl ste6sma 2sth s3ti 4stie s4tigt 4s5tiku sti6laa s4timu sting5a 5stinkh s4tip 4stir 4s5tite s3tj 4sto. s5toeg s5toek 5stoel. 6stoela s5toen 4stoer 4stoes s5toev s4tof sto6fek sto6fem sto6fen sto4fo 4stog sto4ka sto6kle sto6kre 3s4tom 4stone 4stoon st4op sto6poo 4stos sto4st 3s4tot s4tow s4traa 4strad s6trak. 4s5trei s4trew 4s5tril 6strins 4stroe s5trog str6ont 6stroon 6strosp 4strot s5trots 4strou 4strov s4tru 5struk 4s5trus 3s4try stu4c 3s4tud 4stuig 3s4tuk stu6kin stu7klep stu7stra stu4to stu6tys st4wi 2sty 4styd 5styf. 3s4tyl sty6loo 1su su2b1 sub3a sub7gids sub7hoof sub7nasi sub5oo sub7reko 3suid sui6daf sui6dei s4uik s4uil suip5l sui6pro 4s3uit s4uiw sul6tin sum7aans 2sun s3uni su4su sut6her su9yo. 2sv s3va s3ve s3vi s3vl s3vo s3vr s3vu s3vy svy7kraa 2sw 5s4waai 5swael s5waen 3s4wak 3s4wa4m swam5a s5wand 5swart s3wat 3s4waw 6sweefs s6weeft s5week 5s4weep 5s4weet 4sweg 5sweis s4welg 3s4we4m 5s4werm swe6tre s3wê s3wi s4wik 4swil s3wo 5swoeg swor6st 4swort s3wr s3wu 3swyg 1sy 5syfer sy3k syn6agr syn6sin syn5sm sy5pla sy3sk 4syst s5yster 4sywe s5ywer 2s- 1t 2t. 3ta. taa6nam 4taand taan5s6f 6taansi 4taant 4t5aanv 3tabb 4tadm ta4d5ro tad6ser tad4s5i tad6ska tads5n tads5p tad6ste tad6s5to 4taf. 4tafd 3tafe tafe4l 4ta4fr 4t3afs 4tafv 4t3afw t4age 4t5agen 4tags 4t3agt tai4l ta5inv tai7peis 5take. tak6lep ta5kli ta4kr tak7rol. 3takt tak7wyn. 3tale tal7eenh tal7emme 5talig tal6kaa tal6sor ta6mind tam7inde tan6dat tan4dr tand7rin tand6sto tang5st 4t5anna tan4sk tap5ro 3tari 4tark 4tarm t5arm. tar5oo t5artik ta4s. t5asem tas4p tas6tas tat7isol tat4j tat7jies ta4t5ra 2tb tba6lun tby6tei 2td tdy7ing. teby6s. 3tedo teek5r tee6lee tee7lood tee6mev tee4mo tee4n 4teenh 3teer tee7raad tee7renv tees4 tee5sk tee5sl tee5st tee7suik 4t3eeu tef7lon. 3tegn teg6ori teg7ren. 5tehui tei6noo tek2 5tekam tek7bak. 5teken teke8n7aap tek7haak tek7limi 5tekor tek7semp tek6sin teks5k teks5t tek7stel tek5vo te6laap tel7aap. te4l5ak te4l5ap tel7dwei te4l5el tel7fles tel7idee 5telik tel6lho tel7oes. tel7oog. te6loon tel7oond tel7smed tem7asse 4temm tem7omva te6moog tem7oog. 5tempo tena6ge 5tend. t4enh ten6kaa ten6koo ten7ouer ten6san ten7slot 5tenso ten4s5u ten4t5j ten4tr tent7reg ten7treu ten7twen 3teo te3p4h ter7adel 5te4r5af ter6afi ter6ago te4rak ter7akro ter7als. ter7aman ter7amer te6ramp ter7amp. 5terap ter5app ter6arg ter6dro terd7roo ter5een te4r5el te4rem 4terft te6rin. ter7ink. terk7wyn 3term ter7omra ter5ond ter5ont 5teror ter5os. te4r5ow ter7raan 5terrei ter7rein 5terri ter6sas ter6sef ter6seg ter6skop ter7sopn ters6we ter7swee ter7syst ter6tap te4rui ter7uie. te6ryst ter7yste ter6-in 3tesi tes7inst tes7loe. tes7lydi tes7mart tes7meto tes7proe tes7teri tes6tud te5sty te4s3w 3teti teun5s4 2t1f tf4li 2t1g tg4af tge6nap tg4li 2th 3tha. t4has 4thei 3t4hen ther6aa the7raan the5ro 3t2hi 4thit t5hitt th5leh t4ho. tho6nat ths7chil t4hy. tib7niet tie4f tief5o tie6gri tie6kap tie4k5l tie6kom tie4k5r tie4k5w tie6roë tie7smoo 4tif. 4tiff 4tigm tike4 ti4kla tik7lug. tik5ro tik6waa 4til. til7aan. 4tild 4til3s 4tilt 4t3ind tin7erts tin4ga tin7gaal ting7aan ting7eg. tin4g5r ting6su tings7uu 4tinkh tink7wa. 4t3inl 4tinv 5tipe. 5tipes ti4rp tis6aan ti3sj ti3tr tive5r t2j 2tja tje6sni tjo4k 2t1k tki6sob tkom4s5 2t1l t3la tla6sin t3li t3lo t5lont 2tm tme6sti 2t1n tne6ywe tnot4s5 3to. toe7eien 4toef 5toef. toe7gly. toe7klap toe5kr 4toel. 5toe5la 4toele toe6let toe6lop 4toelt 4toep. toe7plei 5toern toe6rou 5toeru 3toes toe5sl toe5sm toe7swel toë7roti to4fa tof7ekst tof7emis to4f5io tof7onde 3tog to4gl tok7las. tok7lett tok5ou t5olie. 4tom. 4tond 3tone ton4gr tong5s 4tont t5ontl t5onts 3toon t5oper to5p4he 4topm top7oorl 4topv tor7eien tor7eval to6rint tor7inte tor6m5ag tor6mom to1s to3sf tos6tro to3tr tot6ste tou3s4 tou7tjie tou6wsr to4wn toy7ota. 2tp tpen6sk tpie6tj tpiet7ji tplek5 tpoor6t tpoort7j tp4sa t5psal t2r 4t5raad t5raam 5tradis 4traf tra6fas traf5o tra6foo 5trakta 5transa 5transf tra6paf trap7as. tra6pew tra7ploï 5trapp trat4 tra5tj 4tratu 4trea t3rec t5reda t5redd t4ree 4treek 4t5reen 4treë 4treg. t5regi tr4ei t5reini tre6ink 4treis tre4ka tre4k5l tre4ko tre4kr trek5w tre6kwi t4rer t5rese tre4st tre4t tre7talb tret5j t5reuk t5rewo 5tribu t5rief trie6kl 5tries tri5g4l 4tring trobo5 4t5roer tro6las trol7as. tro6lin tro6naf troo4 4troom tro6ski 3trou 3trov 4trub 4t3rug truit6j trui7tji 4truk tru7kopp trust5r 4tryk 2t1s ts5agt t4sam t4sar ts5arg t4s5eks t4s5eng tse6raf tse6rys tser7ys. t6singr t4s5ins ts5inv tsi7tsik ts5jae t4skar ts5kok t4skon t4skor t6skrie t6skrip t4skru ts5lam t5s6maak t5s4mee ts4mel t4smo ts5mot t5smou ts5nat ts5neu ts5oon t4s5pas ts5pen ts5pot t4s5pro ts5tea t5stel t6stend ts4ti ts5toer ts5tron t3stu t3su ts5waar t5swar t5swen ts6wing ts5wyn 2tt tta5tj tte6loë tte6ral tte6ram tte7ridg tte4ro tte7ruil tte6slo tte6s5ta tte5us tt4he tting5a tt5uur 3tua 3tue 3tuig 3tuin 4tuits tu4kl tum7aanw tur6kna 4turt tus7aart tus7eter tussen5 tu5têr 2tv tv4li 2tw twee5k twee5l twees4 twerp5o twi6sap 3tyd tyd7aanw tyd7lont tyd7orde tyd3r tyd6sat 3tye tyl7oorw 3tyn tyn7spre ty3o t5yster 2tz tze6nel 2t- u1a ua4e4s uahu6a. u4b3ag ubas4 uba7slag ub5eko ub3f ub5gro ub5int u3bl u4blu ub5lun ub3or ub5sch ub3t ub3v ub5wyk uck6len u4d3ar udi6top u4d3re u4dri uds6med ud5sor ud6stoe uds6tyd u1e ueb5lo uer7ione ue4ron uer7onde ues7tria ue5uni u4fri uf5rin ufs6maa ug5aan u4gei uge6ska ug1l u2go ugo6mol ug3or u2g3r ug3s ugs4k ugs4l ug4soo ugs4p ug5ste ugs6tek ug5sto ug5sui ug4ub ui4dag uid7arts ui4d3o ui4dr uid7reek uid7simb uid7skat uid7skel uid7skil uid7slui uid5spr uids6to uid7stoe uid7uits ui1e uiers6w uies6ma uie7smaa ui4f3a uif7eend uif5le ui4go ui4g3r uig3s4 ui4k3a ui4k3l ui4k3r uik7sfee uik6sta uik7uitk ui4kw uil7aap. uil5eks uil7esel ui4l5oo uil7tjan ui4ma uim7oes. ui4na ui5nae uin5ar uin7asyn uind4 uin7drek uin7ivoo uin7kole ui4n3o uin7ser. uin5si uin6ska ui4pl ui4po uip5oo uip7ore. uip7roes ui4s3a uis3j ui4s3o uis5ta ui4t3a uit7dein ui4t5ee ui5ter uit3j uit3r uit4sj uit6-as uk4aa ukaar4 uk5loo u4kof uk5off u4kor u5krat uk3ry ukse4s uk4ski uks4m uks7pop. uk4sv uk4th uku7yama u2k3w u1la ul3ag uld5erk ule6sta ulê6r-w ulf6api ul5ins u1lo ul5oog ulp7eksa ulp7oort ulp7orga ul4saa uls6oms ult7inge ult7uit. u1lu u2m u3ma um5agt umat4 uma5tj u3me umg6rok u3mi um4ie um5ond um4s. ums7feld u3mu u4mui umu4s un5arm uner6st 5unifo 3univ unk7reda un2s3 uns6enb un4sid uns6kap un5s6kol un7s6kool uns7lagg uns7taal un5str unt7eenh un6tinn unt7real unt6roe unts6ko unt5sw unug6s. u1o upi6lop u4ply u4pon u1ra ur5aar ur3af ur5agt ur3ak u5ra5s4t ur5atl ur5een u4ref ur5eff ure5um urf7loop urg6h-s urg7laag u4r5int urke5s urk7nael u1ro uro7pesi urp7agti urr7heim urs7agte ur4s5ek urs6fee ur6sloo urs7mous ur4sno ur4s5oo urs7paar urs6par ur4spr urs6wee urs6wie ur6t5oor ur4top urt7room u1ru u1ry us3ag usa7lag. u4sap us5een use5st ush7die. ush7koal usie4k usiek5l u4s5ins u2s3k us4k. us4kok us4kri u2s3l u3s4lu u4sno u2s3o us4ol us4or us3p us4pie u4spo us5pot usse7us. us3t us4t. ust5akt usta6v. us4tb us4tf us4tg us4th us6tink us4to us6trek us5tru us4ts us3w u2t u3ta ut5adm u4t3ag uta7spek u3te ute7ling ute7rago u3ti u3to u4topl ut5org ut4rek uts7luis ut4spr u3tu u3ty uu2 uur1 uur3i uur6s5in uus3 uus6khe uut3j uwees4 uwe7smit u5yste u3yu û1 1ü1 1v2 vaar6st va4kar vak7eie. va4kes va4ki va5kie va4k3o va4kr val7este val7fees val7funk val7isog valk7oë. val7opto val4sa val6spa vals7pan val6spo val6spr val6s5te val6sth va4n5ee van7effe van6gap van6gre van7uit. vari5et var6kja var6kle var4k5n var4k5o var4k5r vas7ent. vas6oor vas7waai ve2 ve3d vee7kong vee5kr vee4l veer5a vee3s4 ve9ga. veis4 vei5st vel6don vel4d5r vel5oo ve5lop vel5sm vel7sple ve5lum ve3na ve3ne ve5nor ven4t5j ve3nu vep7legi 3ver1 ve9ra. verd4 verdien7s8 ve5reb ve5rek ver6ema ver6ena ve5rend ver6eve ver6flu ver6fru ve3ri ve6r5inn verk4 ver5kl ver5kw ve7rona. ver5sa vers6ki ver7skin vers8kop. ver5sl ver5sm vers6mag ver5sp ver7stal ver5s4w vers8waar ver4t5j ver5tw ves3p ves3t ves7taal ve5suv vet5in vet5ji vet7opga vet3r vets4 vet5sm vid5so vie7ring vig4s vi5rag vi4rg vi4r- vi4sar vi4so vis5ol vis7oog. vis5tr vi3tr vit7rate vlag5s vla4k vla7koek vla6sak vle4k vlek5l v4lie vlie6so voe6rek voe6rui voë4l voël7oë. vog7inho vo4gr vo4lei vol4g5a vol6gon vo4l3o vol7song vol7uit. vol7ywer vond6s7te von6klo voo7doo. voor5s4 vor4s5t vor7ster vou7pops vou5tj vra6gry v4re. vree6tj vreet7ji vrie6sp vri6jze vry7duik vry7kyk. vry3s4 vry5st vry7uit. vu4e. vu9yo. vyf7armi vy3s 1w 2w. 4wael wae6lat 4wa4eo 3wag. wag6las wal6ste wand6sk wan6gaa wan7inge wans4 wan5sm 5wapen 3warm war4s3 wars6e. 4wart war6thi war4t5j war6toë wart7oë. war6too war4t5r war6tys wa5shi was6kaa was5la was6mou was7pan. wa5str wate6ra water7aa wat5so we4bad we4bm we4b5ru web7taal web7vlie 3wed we4d3r we5dra we5dry we4dy 5weefse weeg6s. wee5ran wee5sa wee7skaa wee5sko wee5sl wee5s4p wee5st wee6tru 3weg we4ga weg7dof. weg3l we4go weg7orde we4g3r weg3s4 weg5st we4gu 4weis we4k5ro wek7uur. wel7aanb we6larg we4l5ee wel7flan wel7fron wel5oor we4l5op wel6ske we4m3o wem6os. we4nak we4n5as wen7eens wen6sad wen6san wen6sar wen6sei wen6ser wen6ses wen4sk wens5ka wen7skud wen4s5l wen4so wen6spr wen6ste wens7tes wen4s5u wer7esse wer6fom wer6gar wer6gre wer6int 3werk wer6kad wer6k5af wer6kes wer4kl werk7laa wer7klan wer7klap wer7klok wer6koms wer6kon wer7kony wer6kre wer6kro wer6kuu wer4kw wer5kwa wer4ky we4r3o wer6paf wer6pan wer6plo wer6pon wer6poo wer6por wer7smed wer7uil. wes6mit wes7oewe we4sp wes7pemi we4s3t we5sta wes4th we5s4tr wes7waar 3wet. wet7regu wet4s5o wet4s5t wet7wysi 3wêr 4wfon wids7tor wiel5a wie4t5j wi4gr wi4kl wi4ko wiks7te. wil6dag wil6sin wil4sk wind7as. win6del win6dop wind7op. win4dr wind7ruk win6kle win4sk win7sky. win4s5t win7ston win6tap win6tes wip7lig. wip7roos wit5el wit7inkb wit5ji wi4t3o wit5ro 2wj wje6tun 4woeg woe4s wo4l3a wol7invo wol6klo wo4l3o woor6dr wo5rum wou6dag w2r wree4 wri6gon 2ws wur4gr wur4m wurm5a 2ww wwe7rint wyd5oo wyd7uite wyk6was wyn6and wyn5sm 3wys wys7aksi wys3k wys3p wys5ta wyt7raak x'9ie. x’9ie. x1a xe1 x3em xerox7e. x2h x1i xys6te. y1a y4ama ybe6lil y2d y3da y3de yden4s ydgele6 ydg6leu y3di yd3of yd3re yd3ro yd4sin yd6skat yds7krit yds7orde y1e y4enn yer2 yer7hof. yer7maat yer4s yer7ton. yer7vill yer7voël yes6agt 3yeu yf3aa y4far yf5as. yf3l y2f3r yf5ren yf4sl yf4su y1g yg4le yg4li yg4ly yg5saa ygs7kans yg5sko ygs5le ygs6tek yg4sto yg4stu ykaar4 yker6st y4k5ins y2kl yk3li yk4lu yk5lui yk5lus ykoms4 y4kor yk3ri yk4s5ad yk4sk yks7kans yk3sp yks4t yk4su y2k3w y1la y1lo y4loe yls7laar y2n1a yn4a. y4nei y4n5ete yn1g2 yn5kli yn5kwa y2n1o y3n4om yns6agt yn4s5am yns5ins yn5sly yns4m yns7maan yn4s5or yn4sp yns7paar yns4t yn3u yo9yo. y2p y3pa y3pe y3pi y5plan yp3li yp5org y3pr y3pu y1r y4sam ys4ig y4s5ind y4s5ins y2sk ys5kar ys3ko y5s4koo y2s3l y2s3n y2s3o ys4ok ys4ou ys5poe ys3t ys4ta y4s5taf y5s4tel y4ster y3s4tr ys4tu ys3ui ys3w y2t y3ta yt3ag y3te yt4ha y3ti y3tj y3to y3tu y3ty yve7sant 1z 2z. 4zbur zee7rust zen7elle zi2c zi5cat 4zman zook6a. 2zz 2-1 -er4t -ert5j -ha-7ha. -k4li -na6gew -s4ti -t4we -5twee", + ["compression"]="zlib", + ["data"]="xÚDKrì8“¥·¢Y®Ì¤ \25™Ëaˆ \3â³øPüqG½ž× ,Ó¬sRÖ£ši'½’>ßqªz\16áŽ\7ñ†Ã\29p8^ëz*·âåµþþ÷ëP?jaÕíã×M°¸•\3îÛu«‡Ù\1¿>ñø¸~¬õ+\30m½Ž‚íU0˜¾\4ëªÉ\19°hÊ\25¨öʯFT¸G yÚ\28uK½=”6QÒµI=!©êë\22Ø_ëÖ!\30¤Š0¸\16B¶~vLš}8‘\3$_·HmÈ\5ÿ\7-w›¯}ZÛ@2c\18d^\3\25ëZ]x+æÉÎe \13nsI\19˽Í\12\19¹·Ýþ×}Í{ Û#|Ž¼;ʱ—Ÿ¤´ªµoG•¦Ùp \9oÇOYŸ\26ZkÀû\28pJ\1—\12¼.óJ\14O&­¼wºâƒvbØ\11y¦Ieý¸\15ó5á“«õ£\6ªl‡*õ1o×Áÿñ¬–Ui6ïúÕ×nž^Aºr\3¨™÷€ÌJ…ïu—ì±\127òQ½_?sx˜\11\27H_ð_ö\3\9Ns»;T\20¦ÂE–Ç>/„húÎ#ß­š3ÄX«æ1\27¶C\ +8‡;3ÇÒ:\21Ž½\27”»ãD¢ë~Ýh.!&7é„ÿÜjêTl\23êrÙHlƒz¥í\15(EÚªe¬í¹Ûµ\127Ì\1'j¸•»†kòè ö^l4Þ±º´‡z¨þ\16ò¯ëÜ0øRõÛ+G«1×ÎÕÇýËðA]Ú‚6kŸ1À:ÅèD!<ê\2™Aª¡^\2RoÑ÷õ\22þ\27„·\19¨™\31]ºtvv&\27 G\30@z’\19n\21ˆG§\"Œó²\4rd§p¾ÿã+\25ÓÈurL:9·Ïúöƒ¬Nƒ\1\4˜\29ëºÐ\24„/^GÀöD\ +¹½ð\127í¼ tù,öð¼øŸ\1ÕUÃs¢H…k8‹ŽÒdÝZ}ÿ;¤º«ÖãFø‘¯\13äì®5€\127×\8\31\14Ãn^\3²”Þ«¤‘‹»gâÜUšñwgM${!î{þã‹ôfVkCOh E¹Ïs.\1êíßDŸ5\\ñ\127*\31ÁüqÝÓª\2ä‚•)\15éz°¤ä÷éMÿ\127N\127P<©ÏÔÁù:\9&\8Ùê–8Gå\21èóè¯3\13 þ«Z\25ÈŸOu¸jÐkt÷u½\22€&`Wëö\5Ë[_\127ÿ£A÷\ +¦êL;Hµ0Rå!¨\"÷Œýd\15QÈõD\14\127Síó÷ß ûUÈéu¨f}R*|$ŠÑ3G\3\25Œôå\26\30Û\25°\13',\23\16\17<\ +’E¸\13•\20Iæ‚ÏòI^úÂ¥\20+õœì\"Ê\23ê6åÞ š\23Âæj¤\31\12©t1“ͼ¸=\4Ök„F½ç…ÁÖÏ0\11ä3¯Q\12-ÕW¯\3}¥q\13ÔLù/Z»_ŸnÒCs{rÑXôO:Ï뼓ßð?Ä’Bq\6±”?h]¥=`ÏZ1ˆ\11\2@ú\11`\1276ë ædj\12©„ÝÒˆßm\2zm\27’[\14ßCócÈ]™\0EG&YÓu\12¸ÚûºÔ\27ˆ¦ÝSóv€P’ØÜŠ›xµG7|ÿE\12\13\13ÇðR4\28UÇ|\30\14•s øû¿\26\6ë¨á6þÔsüÿõ\28ë꣙\13'–d \\á\8»\7\00362¤X¸\20ò\27žwLCŒ›@øæg\0ªä\8(¶\7 Ú\19Î\\5L\6Á\21ª5‡Ðëc\1c3jö|Ö‘v€W\31çjƒM\28gOÌñ(ˆö¬&j\6,\29ú¼\30SVº“ª6Áa*©.Û\1\16ôh\18S³ÎÉÈæ‘\6²C\26…H\14àóT¸:†|#¦\5ê4©‹W2Âã\25\17·\11@£|½“§FHo\127/¯Š·Cû\4\31\31uÀ6\18|\12§›utÊÔg\18“\3\127'¸~ÙW+$Ct‚tà1'–pr'uõ8$lš\11òT·Ã‰LGAuŸb}¡\16ŠÛ¯€Âÿš'Cw\"“‘5ÈÑ\28U™M¥ç»êÙ©‚³hj­ŠÍ°q”m\30ªœ”Ë|\29\4Õ&âDT¤ÙÔ\\\5¨ì'²íårV\7|%R]“ÜuÊ\22(æ\9ÎBñ\22ËŠs\16eEÛ\16€[Àýjá\11¬\12¤Ø\13vU`3RÒ5óŸÓagåÕežayf\ +\2\17˜MDæy-§\0Îm.\\ÃÅÒ鼜Ò)\30Ý\9ékAåÜÈ2·\6\20`±@:/NI\28ÙxP!È\16«ŒÈPÏHž/Œ91çsÉ—Œ]f%~Ûuž$&¨ÿln\31äÐä Æ!^ã?™Úóáþ^4ª\23ÉI\23ÀU¬ñ+Hõ\5_½\4Dzh\2’É’‚\14Šj†³°KDÚ\9eõ&rÒ’wx„ÅÔy™«æ7IÁ8'܉F\\ '\15\7oÌÉÅÂÊrHÈÀ%yI\4\28Nˆð²<\23Ö剬¦V\\Uöõ‡Ø¬ÿŸØ¬š¥4îZŸ<Ú\ +ÓÖ\1·«ûaM×ú\24\15\16Í`ÞBË9Ö†ãÕc\ +\15ØÐU•U\9ÄFÀº\11x*\26®ÀXëÖùãÚ#\29­šE¬œ+ƒe¶ÇÔ8ƒ¹\\ž\0“iEÛW\127§©KÁ+\11\24ˆ–¨pku2\"þ¶\6\9¾>ƒš¬Zæa½7\8“Òß¼¨ë_µÝ\16µà)\20ØïO{xGïn\5ÔcKU\0139ØRòà\19¼>7{\\û;ãO1Ìœ\1\25ñ\ +X\19,¹<6j½Åš³kŽà^;žgÕ–þU’wÕÖŽ\\´|ZÀsmŽ.\12\28pwÀÝE»“PÁTÙ²æ;5Óbe™ZH¬Ë[Á\30ÑVôñï4`Úôï4zgÑ“’xú\18Ç訣£ŽÎntT†Â¦•©s\0192ɶ\25^qÓ¢aN\11äQOF²7˜6‘17Ç<ÂEl•ˆ–\0115;–\ +ÝÙå(šö½¿U\21\2Ñ\8p^‹ËÂÎ˶ˆN’dÊÉž4Û\31kvª»ûQ\11\29éíÈ\9ƒ‘µrˆ‘ë§cº^;õÒú×–þnMÙð€}Ûöã\3ïãzË©í¸±ž\2Hýø#9K¨îv˜ÒoÞ68\17ËU›9\28ý;7øœíùÆ_5|áõ\20Óí>–P<0\26wMÖ½ªÛMµÚ=>w\13¹\30 \9s\6ª_\25\28FøÖ•ú4d\15\0$\24Ã\29)çÐ\12ßs•è­\29\"ÄŒÜ3ËÖΘ!ê³®ùîs†\29Üçslï^‚€W¸³ð\25ŽÉ>«—¸}>D\18UW“œ\29ú`\0yß!\3\1Øב$JZ\26´ü“¥à³j~ l\0éuä˜LG.ö\11 ¤»\0\21\21<Æë}`˜Id,ø÷\6À\17R„\0086I|±cÒ\4¤W¿`©Z`1ÁX|±ÉE\26_§¨\ +|øÃ-ªÿ•®\31™]!!*Íz\"P\"ï¿Öˆ4Ìé\12›Ç\8\27\19ëû\23\20€©ddúA¾\"Îæm3!â\127ð\25j\26éK\20‘é,8<\31†\27Óô!\"ug±}¨øvnã|€„<ôH\30›T }?$xþ2L0\15“±\2Ö[!=ÔG\17‡ÒßUCvøpÍ\20Z\30S|°š‰|˜›\\ý…yÚ\7œt\ +Hs=þ¸\127ÿ¥†x˜0=r•\27\\y¢:\\,þW÷<¾ÿ÷ŸÌ˜Çó*\30~32™M|ÀL*Æ¿4öŸU=R Á\9ê)¸1„ŸªÈ\12ÍxÂ*=·*ÑjÏÍKïïTÂü\22{½æ\15!\5%ûͬ«ëwýš7ý\21â—ùOþŸý\127Y\5Úk\26G|[-³»`WŠù\22Ðâ&–\25$©{@\28¿cð7Ž·\21CÀ%€¿¾nì*kJ±Iò*¤wö½³ïɸw:=L½‘ËCÿ\3±†ªy’ÓÐx\16Ölq'û·ËlçF2ÃukÇè¤G'í:@ªå\13WDÑ&Â'Í®\3ØT+q¼û5ȼ\25éæÉÈ¥/\0Èõ¥`?G¼ž]|<\6\17l<66‚Hzs9¦ÍÜbxôŽ¹å3fïFœ\16\12¦ˆ6'œÂ{qlIÅ?ÿ¢N‹«·¸z®ÿâ–+\22ª³^ð_EBHam®°Õ¸éÖÒ}½Š_‹€¢‰Ïì{É\14ë\31*.©\12·\20°T‡¯\19þÎ\1\127ÍþÌÿ\8\29¯?H„o™!±n\127ˆ`÷?Ø0\24«D¾T§•\21Ÿ¸{v‰5÷ÇHn_Ÿ7§N\0015ÂUÅ\16qª\18ì„ì4ÄÎjÁ\16cÝ’Óm±«Á[\7žCBÈÇ\8\25æ\19>#`ž¦ð˜\31\1w\ +\127T\15*ÿýï%[­õ\13Ò¹\1ûhÉ[-\6k¼ˆ´Û\29#÷vQo0¼ª\23§S\7`=–—K}£\0027qd\31÷\26߇˜óú£ü·Cà]Ÿ]˜Žš¢ÙuS…ÿ½Q'D\18“\"(f“6S0!\26ÒÔïÒ¨ÈV4Mù²n>E\29\4 \26\ +šI’º]š\21¬ºÑà\\\31¤Æn¨œÕ¼à]nZt\8Ý\19…UèÁ—\7A\8Qus°c8€ \6(Á/-²ÄQ{²‘=\18ëM)‹~Ö»D‡\26ªõ\6ˆK\11ñ\9V€†·äùúûï·\23± T§-Ù\16\21.z¬–h‹vµoG«µ’­\21½½¨\8E+\17¨nßp{¥3\24\3L„_ôå¥n\25\28ÚQ4<•\127Ù®s\19®ƒäÄŠ\16“\17®¿¡|\17e#E±ã\28\13ÕEÇz\7\29l캈K7Uì\12–l ì\ +o3ó\127ó¿2éRÕ¸\31:5\3~#¸ä¸/»6r)Ó\3úÓu±#Y_:¥Xvž¦ŠË™“„¹\18ŽLÈÛ@vìø\2Ö&€K4\31ËG›u%ì›]\7\21‚PvÛµ¾i\13¯;61HµÜúðïçô\21îy\8¸\6x\18ìÞë<\\:\13—´8l\30_Ã\13íêÎa\4õwöâI\25Oúxw\14\18E\"e&™b?ê: ýÞ\21;¥ÜÙ÷xu©\15·ê\17Uñ0é~+ˆ˜Ê‚ÿÒ}{§É²\8Bî´p‚\127ÿƒ„^çbP9óda§ÎK•²ªœÕÝj,¶ ”\127\22ÇÌÿuYYh²\11›/\26Ýü3ï\16™\24P\2«æ|Ÿ8…\13È\8ìË$ž\\ÀíÙ+\1ÿ{˯¬{Ž§ê÷ž\17«5D\11ˆBI`·/ÉÎÕ:ö%')õ[ï,/áµ²fôÅöccAë/Ð\22E yÞû\7>\15Ç}xÊÓ]\5;Bõ\27Õ\27Ä(ÕP\8\17£N¤Y\5Å”w?0:\7–:úaÐâ@c\14\16O6\2\25jƒfˆÒ\30Ú\ +Ñ\13ç˜-¤KŽO‚®ó#\14œ™¡ã\ +î¤ÛÒñ“ã\0318‹_/Lœ™®Ó—\28‰Õl-26†|íÌY\12¢´\8ã â5;#³¹Œ¡÷Ò8HDËö~<{ZzP£’ÍxM™Î\23âm'µ\3Å’\20ËÿUÕÏFDPðQ©!hò\16ìAÊÁUÝ8îÇ_‚Õn\127Wyˆ\7T³\15l–Ô\1\15Š¼‹\13Úˆm\ +è‘ý6Pï¯T2\16Ô\5\23¸“±6c1Ö“%\16!q´\\¥éfY·º\11ÀðºÐ÷c™è¼1y\7RPC\"œð\2\23‰¦b\22Å_ˆS®ÇQRÓA\2ãÁ¸`a\29—k½2°„ÄD\0021Ÿ\2’\127|Ž\19™Ç{DÞÒ÷ßC`&Ò ùüL½Üþ 䫵…#„z¬Ø]Zì3$zN>#M)‘ík!•¹²±Ø`#FÎ¥†Í\17‘š\3®\14x<'è¢$nqttü\24+ÒeÔxŸbPO5œô`÷ÐâVÃ(êT|üÒ¿Ø 6;Œ4”Pl`Ýó]iŽ\9ÿÉŸ‹3^\"xK\1÷=Âwf…<Ü\15ò\16$F\19‹\22\30c\29)sdoµ}\13ˆy\21<£~Qóô°‡`\13\"Yðõ„«KSB^\5aFì\\\28_Ä>’^YŽ\20ÀÒ¦Ï6z\6xíϘ\27\27c¬Q\ +\31÷\25ñ\24åpË«|¢©§T™ù™Äõ×Ãk \13‹—î\14aŸºê^gÃA¢%\ +\31œ¦¨DÝ\0ÝRÄ\24SF\22#ÕÄê\"höZ\1’ö\"Æš`b…luï\\*¯LxL.¢¢Æä#\"O7dv\19hd\0249ÒêOQFÈÊ\14£ZO\28=nD\15\9ÄË\22`\15ðe°6\1¨\27\18?\4SȶϤ°V÷'\13¶Ñ`¤\"\18\2?2A\14¶\8Øœ»é9\18B†Dƒ\28k툙õA\30‚îÚí3…‡à‰D)\ +‘-ÜeïB¨­ \8ÓÏœ›è{¨’|\6d8yŒ5\28æä5\\ÎÙ\\\18…\\\"ïÅÆ9¸¿:\0¢2yg\8ï}δè\30‹¡¡çÂ\30•Û©\28\31\20{”h)±_\5wÑ¡iGñÃ\30Ñ‹pýŽQ~Æw\30\25ûu^àWØUOÈÞU\15×ùƇ;µ=a|ydgðëmR7¾AjÕ\12\23ªV,Qd ÓV€’\9¬£>[$†Ü‰ªö\\PyðwJ\127ÉÕÚêû…µûBc,³Æ\15>¥W¢%Ôj”ÄBS,°¢KÉ\22\27b\25”jaLÉ멾+ƒÁXP.!†FÃÀlY\16\16g{ä†\21l¡Z­a¹Gvæ˜\22ú\0ú‰6\22Œÿâ, †ËI\12\23êÎPZµì›„®^è\19ˆ7\30\5/KaçBãHZŒVAl„\2î\26µ\8‹«&ÕÄã8ù\".—ÏRbÝ^“…û\23dH\"¥k?e2)[FöÚ\21w\28ðîjí.C\2WŽu\8RÙ\17k\1Õ'ÃÙG¾“K¥áL‚ý¹<­%*;r‹çÚì?O_ö˜«¯W{lÓá/Ì\29¬\5œ‘\28PÔuªj\"¿A\0315k÷êÓðʶ¿¹¼\27 sáZ\2ÏÅM6W\15DƒÕ:j¬ l'JŒ-íÔô\29,â¯[ráKN÷‘r{v\25\20Þ¯¬'hñ¬éôp—à1Ûca7]¥\22}¶?ZSøï\30+>@fÎ(Í_Ì\5¶oa1Vͺzû\ +á\22\30Ã\\\11MÐÍðƒ\"yÊ­\28a1rqß\29èn\11E9CÈ© åðu\23\11Ôput\3f&d\9Ä’€>°¼Ž\6O=\26\17Ù9\2.ñɶ@\28ߘ\22ë!Iá\21‡†ëú<å‚íBknÌ&\28*õV°¾éÿŽ¿—Ü-U^ìäd,m\23žMË\16£ßG\1õ–¯’\24™\25Ûç«ÿ5˜ßÑ\30ª\17m¨'¢\0131ËžÒ(ŒÜË`3·\11âÙ\6W!BY㸴ý‰²¡bÍHf°+xzt Œ¤¼-ê ƒ$\28úYúä\5\7¥ßÅòoödf[W±w$t\21§\19‰4\31¤\15¥ØÊ'ôï$´ÅNñöšÓI\1ŸJ\ +nVjÀãqSv{*3ÄvOÃ'.µÖ\02605]´ÙLÈ~¿~pÒU—{qgDÊC\19o1\"æ\25\31VTv(ö\11\9öÕ×/¶\25vK0‚#Óñ\28'\"ÔOÊ\2eƒÇÝYÏv{\8’˜zÃÞ^¬v&È\24n\26r7¿\18\9¨ƒ\22Ç/‡dáÙßí1úCNËñžfäxöoƇ=\4#£Ù9\4¡Æ\127‚Æ\26a`\18\18YÎ\15·\0123ìŒÊ®=È\31 É©£$'\31úf/w˜â]s…NÚ-¶î‡KôÎ~ÏÁ¦¢†ÏQ¢ÜT\31âQ˜Ç\16\\úÁp\17·Râ·Z_£>,t\30Ê\26…?}iá\11·w\3Ž=\4دÚGiµ·ÖáH¿\6S¿‹¨›jËž¸…§GM¢Tv5 @×­fã\26\2ùØÄ´)ö¿J”‚êËSaÏ\2†óYfÆÛ\19ý§Æ\16åQÅp4Uý÷<_{bþþ}ŠQ¿‹ßƒóª–zûþ÷—·ÛûË»Vé\27»e\5àúÈ(\13[«V\13rc›iÁÉ ¸I\12\26ì\24\8ª´V\29vJ\0300àCx²›$µˆÃN„\\êH\9[BÄÙ½#tÝ̼\3´nõá,\9Ÿh›\6ÄÌ£\0176$_\29‡CQ¾>ÛM†«*[SÄ\21u„\27Dó.°\5C\3ÒAwnVMžH\8ÖêV{û\3ß…¥ëV\7ǤšîŧÚæör»hªê/ñ—ù\27ø›ù{*†ŠÉ^ß½\27{x·ÔÀÀÖ –ª\5½\"äÒ¨¬)\13’\2^P:žØ\7\4y SwS\0za‚Z:3³âÞ\1ƒ“^Ùä\2Â蓆\23hƒÒÎk\7ã∿I!–«—Øæ\\O\15\26.AîIÚÇÊÀ\2—w˜TÐmðÿèÿÙ\1ódðýŸ«=5\11n\9®‡vKæzpï™ï™\29õ\16\8<Ö-‰h\17m¿\19¼_ïikìvª—ý¡¦|kùëÔŒ\ +˜›‹*’Ùö\22\0302ÃM\4®ÑÄ\16,:þ]ë\\|*‡ŒlEÐ\28r¯¼×\15ý³— ÿŠƒeA¶tUŸ|Ѭ}¿}*³ÏD¶ª¼G÷¥§k{e:ÔV“\21¬\16æ½\2ÑÌ,nl\24ßDZÇ\18PlüÇÖú\13\21­\30`õSÁÐo¾\13ß\127‰MU.êÕr\6üëš\24ìé\3`\24\15v¸\ +Éç6<%\16ÐkÓ%EÅUyXënóMƒÚÎIlØ\16P„: \17B£\3\8Ù\4ñYØmF2¤ï„lýA”÷þMÿ\23FÀÜkXô\1‡\20Pä\13ˆ°\12D½]ðB¦æ[\27{³Íh¸n\1»‰ÃŒRúÍÊ×d2VhÄㆄÌ\8ïc@ˆ–\3Ž9í¡5\18X 0Þæ?YznóâƒÖÛ\12\27Ü\7t™˜\13áß1\"Š(\ +šˆö=ë\15\28NèÚ\19IËáD‹ÏÐ\9qG K‚FXÓ]\0184úùh=àqär°\"“2¤Ÿz\28\21jú‚tÖ»\19·•}\7Å\25ÓxY\11ñrrxI\17œØq\21’Qs7\20M¨_.7­eú\21“ÿ™¯âo¿¦lø0Dm«¸ql¡T¿ÿkdt¨&—\13¢´A”øÌ“˜ÝHý)#\11ŠB\15Å–óÂÀ¸àu|\\?LÖŒ¶ÌckQÂÛ1ì¾\"!Ä*j‚\21\26\14poàÁÔ<&M‚I-w°x5@D!µÁ\17\26K·ãàH¹QÖ_Êõ‹¬¿²\\ª8»£·§FáóÒð§’?\11ñÊüïxLÌæ'”á\9G‚\31ǘ¸Ð1\20Dmø5\14\31î|<—ëšU°7u‡\24x(^­Î3hNg\31N-\9\13û¼b†´lß6}ðV·ò{ïô'þe{á‡XM\\j\30¹4\2F;W;\25Á¾\0202\3â\28NSWrc~5\"Ñå.ļDS[\"¤\12ƒÖ:å©ù¨¿j\17Sxa\23°\11¸¦µ7¶Õ'ìI}B\127´agd\14Pî†\16¦^îW\14\31„\20ÔªD\7ã¼u$\31qÙÊtÕ|PQ6¢m±\5\11b…Óæd\16\26•ˆë4’Xß›æ¥i87\0000ó›&N\31\4\7W¹A\9¹\30\28€\22bxÁEbSÓ \0195I¬‰òO¬púg…Ã1ð\31[tj\17I†ú }ÿÍý$ú\3•·w\14\5š\4ý\22?åeÝ×g4\9%{Köp´ŸVOE/Ñ\9\14lÖ‚ÒXe59\28Õ7’î·HpsÂÃUB\31\17\6•â\8Ø©¨%‚õî\8\22±+ûŒÄT+L]„L?­Zz>q:\17NI&‚­uJñG†å\3\20·\29c\11èî”ÿ6\15͉L)\2–ˆ·Ç‡\27ZwBv±TöØÙ9\1yp°¤j\26\31i´úa“¼³Ë\16 ¤\ +œ>íÙ×kÀOG*P+1´w°RÍyÓ†xÜÔi\18W»Ô\2 fEÄ R8±ö›{`åðƒ¾O,þFÆÜ\0052íáÓr5…¸9¢ŒW6àHd¢(ÅJ;”ë|\11\28$yÕ®!\\S€‡Á⯫ís¶SÔs\13á\8¸\24¢þïÏ+v©”ÚæöCLœ¨\0272?åÚ¢K·Ÿ.Õ\4Ëä§A²—ŽÐ»\0119OH#8ø:·m\ +¤ßr|è‚\22›ã\23¨\4\17¼ÔnK!ó÷ß[`kîx\19ˆ†˜?b®š\9íì|8ô‘Ыgº ;°©\15#Á\20\17¿©N>܈\7ñ€šµü)\1îÞ¨’Þ \127gff„4ý³{<€T¿E››üÁ%¿W#{žA\18:†½V”R\8½&vu\9ß]¼\25Î]T´É]5·ªm.ÐÕ—Ó¹ÀŽQ0\16˜©&k\24Ž*Ox\20p]€Õ¾k¾mv>ìô™€œCé¢L•IsžPÈÔ\8g³½h\24Mvì\1¾ÈÇ{ßMöÞ7õólËfýå[-L5…þÛ‘‰¦å\28’›Íܲ\28h\14|Ö×æ\19²Rüü×4ª¢õâiAU¤Ë€çˆK\9Ï\9\29-\1\9Ìv\6\5ŸS\28‰¼7\26ÕEÃ^N3«NPŠYó\\dOPÃèPÝЬóè;$ÀMƒÔG1â‚ä\14¥\18\7<¾ÿ\"µQc{fcÞÑئ—çBöKµ2€’¤.¨ö©8Gà’à¼ßŸ ¨\ +\27Îfö´\17¿‡ëB„g¡•¿Q§½¯JƒÕoµ‚‹\5k-­rs\8\7ÍY5LÉ\3Ž )Ö\4A\18`ˆ®¹UY8YÞ\13¶Ížê¬U³ÂZV$ÈÆ\9`Îá¤VôR\21­BþkPJ6\8ŠDn\7D{=h\24À\30ž\30\0210\13\6\17óI+­Ï‘b=™)J^tÀëñæ\29¯FR7ËŒ¨\5cró®=@Œm\4â{a\28˜ʧ4\13\20ÈP|Ütúæ…nK±“loÈ\126ÜB[\20hmÑ\8ø\"ßÖ\31l—Üjf¾@‘šÈÌ´\19 f|ß.Ÿ|Ñ'‡±³¡ \30\9ÜÎÍó\16¡\12,ïE¾4ì\0³ö\3¡Þ\17ž]=\"ë_%ÚªáÈvùÃ\2µ\31yŽ^мK©Ì&\22N\1¾\6¸To3\3%GWÃI0\31XḌp\"Ô\0039ˆ\15ÙÈk¼±ÉÿÃù¡ñæMrS/”¨\\\28\3¥ãþ%Ž\21{7\22ËW¯\24o›“Û‘øTFΈ\27¶D'·åžj;I\24\23mˆS\\¶Ýëzº)\26r…òe\17Ö?µ¼<@Ÿ\0265úîT÷m~Ô}›£H¿ô?m&ÌÇZ­wà–Ñ\27\22\20é[\12©×{£®þJ«©êŸÿþAñ¿’•Øš/ôvß\27eZ<¼J \9°Š\18ËÉPzÔÕä)ý€Ùãß›²Íå\1wTˆ9\24\20ù‘˜ïÖ#ÕÎ\0¾<\20¨8Ù<ÂÃ}/bûÒ<}º P¡SÕ „\28\26Vïêök­îsó’Þê—Ä!¹:&ùÒ}ª¹Úžj_(Û_\18\27Mé†Þ—ÀŸÜ.N7np\24T;N®P$‰~è§**òVú(%°½¤‚[]éãÎÆ‹Òá\0146²u‚ùž•¸&Bj˜m\2'×™¸EM]’oQ/DÐê, Y\25h¶“R¹&‚à¯ùXT¹\30FBE\2q\14uKÚ\27\7߉1Ç7Ö\29O¡‰–š÷\0078\13&ÀF¹â‰¤äã,ñ\7U­±’ˆ£¯RÑ<õÿ®’¤64JŒÌ\26Í ¢Ÿ{ ©W-R§ÅÐŒòa‡\22Qjk\20»ô©­Òê_\16¼YÞÐHgÛ `gÿa\6\\&;fû!ŠlFLV\9¦À}…ÒBBámyñN\30•\18WÙ@t“™âÉ\0306ú`l·èË\ +’ÙÔ¬„¡¼\28Ÿd×Ç\6 TªA<\29)³[\30Ð\9¢;K:\"!\20p¼B\13@Ê™zs‘Š¼µ\8ë¿Bw[iqÏí’àf“o‹§¸-. %qÞíÞF€fX\29þè\24¤¸5N\21‹‰ÎXBQ\4ĤÙÈì¦* /¸WŒp\8ÑH\25í!\18ë\16ß\\\ +\15F,1và;é¬Ñ’\26e]ΫÄáÌ“\3Ý&k˜÷À›±\22,®C\31v-›#)\23'ÌÐ\28ÃgŒÄ­:*x9ì:\24‰i}\22þúÉ×Z\28¨’sÙªÚƒGp\9÷àúH²¤Œ\27w»i’\18ªnðt¬ùŒp€÷^G*»GŽ7\18;¥ÏÆYy\30lvlÕ“¹”ö±@,é¡\0OƒíÑ\28ûÏÈç\28*¢\0096\28¹7òÈî¯ã\2©IG|yxr \24êï¬\24*P\29\30«15/ß\127©€ß\127{'\\°GtF§(vNŒ=Ù–\23VPó\11G\6r¬qÐÀ§k|º†)\23û0šåƒ\ +—¿\20ë%\16§% ¡/’ŠV«Oj˺Ý\0è%”I\"C+Ó\22oí\0Î-rÅ\30ž8ÐÆ\3\28J«e‡+µg{w%¼sêê’\13îÄÖrê،ـC\16¸e¿\26ÑÒJ„¯‡’ç>\3C7\0208!1\25€ÐCEO½L`ÌᎽ6…Wž[Ý9\4\12sÀgïè!Á$[’x[2Ýmy\1\8]\6™%¥©…óí:åß*hnÔÓ} \27Öh\2~äµ\15„M@Ž3òRÿ éŒãæ62ŸŸ£J–ò›\ +”Óur÷KD¹ÿ2DÕƒx§²_Êe?…ûñÔÒ.BëÛ\28\23›\7H¹˜JFdž4‚›W#>ð\"„Y+·ÙnÇ„€à±3\22P\1­›€Ì7Ä\17ØH6K¡¥Ù­í\18lŸµÃ%\31Ñ\20p\11\15û÷âòí¦\31\4!ø…ÙÑ5LV{/yutóïÐ\29y\127çïz¯\13ú¥8{n›€L™ENHàìK;r\7\3\14\1ìb²­ìu\127\"?\31¨¸ßÿ”p\13‚žMßÿLgNxÌ\1\14G›§l'\11ï§Å\7µJ(¼¦þTÚ·\7\11õ¥··Ö¼\14‡\3¼6õîÛ\21DSd$Ö*B)Vë\\o\7gà,‘bUå䆸\23J'\6ñࢽ„aÁ±p14\7Ã\9½(c;\9oVÏ\11’\28Ÿ,\16{nG/£\3\23\ +ä\13| ¨\23äB£—3\11Âá\7вEFdPèSM\0\ +¶ÎN\0011†½)4\17 \22Üž\19(\0193±ç0›\25/d@‚\12„æé\25\6ûì¸\30\6@;+ÓÆþ\28\20=\4ŠBzϬó¬œÍÚH掖á†\0®ƒòŠ&¡Ð+2\19û¨Ö‡v1\16%jÛ½†¿ä`\3|/о\1Ý\ +®å2\27­ûË|\17w\0Å7¤\4Ù~\29\29¼±¯L\ +ŒÉáT—LƒÕ\"\19ŠÂ¤\18›q\14Ok{\"_þ°\9`#\30ÎB¶Q¬tºø\11ñ/Ô\127`¾ì†\12<ªâm>\"¬ûF€\24éÝ1*S”A¤\\ñ†¶ªo«a¢Å†VÔó\23u\23u‡^) ªÐZï‚ïf;95¾ÿ T\6ó4Kx ó\23\30ëé\1·6´Ö™ÀÃÆ9\18\"™Y®?6ßg1³å~\8ÂŽ“†ï¹œBm{\22ÈAýÈwÅ@Ø\24L*šÉò‚\17)°³Ä&¥S·Ú‚Áî JÄ>Å‚¯˜\1¯jâí(h¹Üù䇢\13È­ÄÚLІíä\18Qtv“T\22@\9Ð$Y\3YÝŠŒãöÕIŒ‘ä8CT\6ßqµ‡õ-@Î~åæ¸\27¼²=\6àÕ\26-\4=8_$5³\127œöôÐ\12ô¡åë±.Á\22m, µ±Ôº\26èÃÓzvâC™‚©\28ëMqFÆG\127‡¡{\6ƒ\14ük­bá\0133\15\6;ߦ½%%\13MµÙˆ!¦4\"ßÂåJ\"KÖ…® }\11r#@¡Û¢\30)¸$™¦Sµ\"…¦1 ï\13\6:hbgŸX(Q¾°ñ{F³M©W\16N\29û@8È\19²…‰\14¼6”aA|\"-¤B)EÉ#9¦©`²°øM\16Á\0099’\127¤^ ¥Þ„2.ãAb d\2\\œ\23èûTÀcè\127$Ñ\24­@V}tcau.â艊æÂ'hp\0ä6ëÅŠ÷ŸY\20Ñ}e?Pi‰Þ½°imÍà4…n£=¢&è¸n)\16wž‘ì¶ã–©ÜÞm±ÿ\0221«;+îd-ºd]Ø®¶ÛåóE•\19yö\17c0ƒª/Í“Ëc:\\5k\1áž'\0049#\12\3+»F\26\11:/Ffw\11J:ÇéqlFöè¹*–Z¶\21ê¨+:tvGZAܧs\7\25±Ç\27\24 \ +Ú^Ï8û\9€5k×2)§Sû\8Ä\7ÁF¢)ÅÏ\3ö\18õtê×Ê\29ºU kn_\0031?7Yÿ*à|B\4²‰{\9¸Í N‡o¼\ +b#Ê\0†\29¹cú\21jâéMeœ=Ëæ²eªÌ’\0ôÍœƒÕæÔœRÛÄDšßˆh¦wæDTÒ\28s¨X¼\8-ÉÆšÒRrZ\2@\4_NU\2\\\30JÅ\18ì¯íA`x¥G Ã4\11Ò7‹m\8T\0195X¬µÍ—Ü{J(ØŽ´2z´Þ¤ÐÈ\26´ …Æ\127ïH¢Š.\14:ÅvÏ÷ä`ÈÔò3¸\22ßÔw\26´å‚Úôêpï\5¥ÐÀM(ߦ\21u*„Pè @Û\4¸\7 _à\30à+€†üê\6Bu\23õÐ\20ª»›ƒû\19Ä7ƒ\22ŒU’d\19Ù\12á\11§ç`\8Ýjʹ²IšlÇÂ\16c\127ä×p4œŒlXŽ\0Ù#Sô‰Tƒä,ð_,@¯.@b+geù\01408jDA)\ +ÈFN±FïV¨!@!múÁàÓ‘ akÌüËZD%c7`Å(YžA˜i@\13²Å\1»\19U÷>jš\26.i-ܬUëý—µ½ú\22P˜ŸÛOÝ\5ÃZ€Ý\22IŒløš‹\17ÎFHr±sMG\31ˆ‡\0\ +R«\19ﮃéßÊ­…i\12ä\9—¸vÞâ\7n÷Ù\1Á˜\11QÛSù‚1¸f,uªî\16æ5[¹„VóšÍ\1\29\19\24H§®ÁE;xǻ̖CWL\21%\"œ\12ûÚŸ[*k/\"ºÙ#’²BõÇãI1úO{s‚›P¯N\14u]ß›\22ŒñŽ~õ\18)Z!\5\0151­«“àLÌ>q8\6öô\2¶–&õ\2Pšu,(ëx\13¾\21¤…½_%Ïe˜#!J‹L\ +7j¼*ÏØ¡‘s¥×ɼœ“çËìŒC’\22íùþ‡Ñaâ§Æ2O\14„¥b{ç5À=\0ùTó\20SÕÛNœrŒá¢\27âÐC)˜[\11â©O8ݵ“Í‚•EV\14õ±†3öÝœ%›‚\28Œìž\"¾–i@Ûž;¯ë\"Ž\5l•Ìè\6YÔ7\0304Kp¯ò8GžH˜\7ÊŠ¥¼\23h¥OÓlù,¹z[ܪ\0ɦ\0+wFêˆÜ×ÙE\23\7øyz|Š¸±­U»\4øô‘Lbµ‘ìJx½ÝO„Ê®§š.\7¾H%쉥3ªu†\28\30å(ê\14Îh¬×(ÂèñVÄÖîê\21Ù‰ž\3îTȵN¡…\4cšm\31ö)·³ˆ±Ðr<Ñl>)°?Ç^ A›×ýœ\30hÍ3IÑš\26ì?n³ÔŠà=Bôä§ ®»Ç#Ç’ƒý?Íø­((¹'„p£ŽO׈z®¾kˆìëy¡•4\30µ'ÃÎXG\17>Iмàs8©XAÖ˼·Ò²Ïk3{:?Ùôr\28P‡ÇXŸ\8WcA‚s(·\26eT\31²«\\[p¢[AÞ[iöl«CÌ–§òEÕ>YÉ>qŽ–Îs4C–æ­ ?Pû\18Ê8Ò\127ò¿\ +´åŠkÍŠœ2\27,eÄT®ï,´¨ÆѸ[ИÍvñ\2¢ –ØDƒ‹¼p\0297¡ð=ò½—¥·Íø芌‘ÊHá¶?Çïÿp-1û‘lŠGa“KæC0ùÁÃlqÝ\3\24\2Æ6c,ÙßÐ\6—­ˆÈ$ó†¼[ú†»RY¢\21¼‰‡k7@®ÇòŽ+¹0¨7Æî\23_»¡w\23\11‹TH ÐL\20…¢ìœ{\25\12ùô¦%ô϶Ά.\27©W{î#:û\28\0Ū|gÀ\0250Þ”\3‹Ó…­Þ­`§·ØÍ>ì?›8(à¡\20àÅz\23ª\0240{F\20­R\30øsÎþK\9í}õ¤‹÷Ù7 ÒîKo|ï\0295tœàz«Ð‰\15¨2ï\\ù*pIKáûvm½4¢\9ÌLG—¿fÁØÏ\27i)´û“£\4[wêó'«ëw\14Xìöu˜´Ÿ²å¾Wæ„w&ÊfèþÞ±k\"\23÷Ÿq³Ó8·\31\15X#MÛcO®E¹ûssÛo¢\31ªù¯\23I¼Ü‡­Ab\27äK«\23ƒç먼¤>|ÍY° SlâðÏ)ß\20ˆO\0„øÐ;•VsJ˜Ä¡†\15\31“¦ðY\3R~¾Í=Ò‚€¹úô¯ç¹7üœ\ +µr\25{ý>¯+~÷*áï?1\15õý\31oëË÷_oúY\27éû¯¡¾¶0\15ß\127\21«Šðý׊øì¾sãàï—ï¿ß:ýå‹|ÿîáê9_¸,\6¶ eï]à\2“øýwÜ7\20'm¨Öøþ»@‹\13'‚¶`«¶Å‰%ãï¿9\29 QöK8¤ùþ{:Ó±\29@\1Í\1\".Ø\4ùþ;ø\127C\9\0@\14B1›Þ9x¾láûà#¯¼Î\22Ò¯ÿíå]ÒÜ[[¿´¡?Ù^XÌZt\"ÞÛ7Mõ¶¨»üÒÖw˜Ð»\\­¿&Aï/\2ãü£­C™Ìp\0050óv’Zø[…ÖëQýÛ«\18¾é×(Ë\6F£Eï‰\127îä¦@XêpñÔ\8:±hR\24á\16¬½pDûÖ¦—¢e=¹´EÒ\24i/УֻÊݧ%\26»­\21ù´|·\22\15'{˜\13Ã\16ˆÈ;\16Ù|ö·‡ÿŸ\14\20»Ýš·Y¾lwz\28§‡ÆO\27;Š-6¥[›ŠnÓy\17µ}k_Ú6Žo€Üö\7⪛ È²\ +ÑZ¶l5âÚÓz²\0164µ[kvñ¯Ru3\ +\"ª¼|òEuÈMõ™\127\9¦˜ò CÝÎF¼’\11'\2V(„µ™ÝZÁVÅ‹\22Ã'-–½‰³\23¿\25\4*X\2Ònè©&9¨XøÇ\24mû®‘ÛjE¥áÙ}'\28S„\8GŒˆ\1Ã×+ý®Â—\26ïY•½\0E=[Õwœ9/hßÔ!Sæ\26Ì\27…¾ >Õ2œÛ¹·\26l;\15Ü6§øê²UÇ)Glpj:\6Ü\13£ŒìkÞ\28íÇ5\0æuP®šd-ö-õ¿ÐÂ\19ALþÖ\26S-\26S¨ùµ\24¹W\17g®Ú¶³\22\30†Y\27vìÛù_×;,\9H\12Ì\5šúÖ¾«\25W¿» Ò²¦µ\5z³-²©Bâ&\3†aÄp·kö\6\2gõâzp£JAO\26_ß\4sÕrÏ[rr{j\28µ¨øL„r´ÑrãE^bÛZs2­m\8¶(5´6áײÌù³’\29¦Ölì{«„1\28çÿ²1è\25SªìQ|h¾\31AÍÛ\3Eü¢…\13lK3›ïí×KûU\0234'sPCò‰E«Ã¤ƒ\9ó<ƒA>Ñ7|×òò¦Úu\5\23\18´\2Œ¾mo·ÖóÎ4H\1¶d{\15÷\16àáHû\11fq\26åСÂ-/\20ð\14¼[•f‰Jb§¶Gû£«Y€;\27E\17\24®Þ\0241BÁ„¤\\ÚmÛ¤\29V\5ÔHÊHM¯¸«àÊt!µ‹\3/˜h1\24åç‹[ÝioªC-\7eÎ.ô°1É\30Ѷ8R\22\"¾˜2 £Èéî<Ø\19~\16¸»øÐЮ>OÅ»úy<ƒºÛKçG\15Þ»æ¥óƯfI×\28qO§c_¥CcHÍ,g\18Yè\18ê*\15ä;ÎÞÅ@t>«\31T\30IÓ”_ÀÊ\13úFÓ®K…æ]—XœºôÖ½ãE\3qÐ\\.ÀŠå©3¡ÕÇÿ\\ÇLó‹Q«\6\24ËÂ&çõW<õÏý°Þ°©)Ò€)8ŒÎSùä#ñî¤ÉÝ\15MîbŸŸÖOc¾Ö‡‘Ê÷Ã1ð¬_ÑåT5‡¡7㕱Ø;U`¦2KQ“ÁÂ\0I§‘X!\24Ü%\21>+¹7ݱյ’éêK¿‚qQ¾cÿ?µŽÀ\14@gãÙ\17Å\22w§¾±šmsNœ‹\0002Ÿr\23§##|œØ7wÜÁÿZqˆ;…ßòr\30çß~íD¶WGpJ˜B\8È(KÜ\18é0Lî¨\7wð xq\"Ù¥¯ë\3í–\14F*æZz{2eT¥K«b´V\22îèãîÂÜì`èÄzX\25]ˆïÒt¯É)òý¥ÔzÄݘî>c§£\7ÑX)\5\6æ%Ö’ºû¡¡.þI\12DWFkZÏXŸå¢e&¡îØYÑדNË¢fÞƒÑ?Ö_Tå|j-tq\\ÝaÂ÷A‰•iXÇ\"\8FW:-KôÂPÛ¾¼ao7‹ž\"a,\9\0­\25²oDZ¢¶vu\26DEÇ^\18~£ÿ\25jƒØQá3S~\16…¾àHöjá¸\23îÌÚ±;ä\23^Çf/±Ê¬Ø̉á9©äŠmqX\6UCe˜|\25\15°\3PžÕl*ólõµ€¹èx0JÎìE\26ävç–Þ¼\2ìµ\3÷;Ó\6MÛ¡^ÝÝ.+'É\22Q­u4·ÏO»¹\0245;H™yÃÌÐêK5™gž×!Lì\23©¨»f_:QÅØ›Â&\24ãpöö³aÅ—c\22|\11R9°áÌÊPÕ\20ëÀ–s{\"SþAúˆBMæ\7—8@þ|ÖнEÍ¡EµÓ2ªÙQúb»(56\24\5°:2‡Û\29ÍZ‹KKfÀôbse\13a\8 X-\19\19Â\22øR¾ Å.\30¿óê,|2•P\20ê°Úx}Ç\0061Tf«‘‚'É-°\3Ðqñ\13jeÌÆ–”(\13³bŒ\19ý›ýÕe`ª\20œ(rñ\21\21\3ƒ>Àhð ´¨\7t¡åÌgh2wh2w§Uå÷NU|ßÞTÁÒ\ +5\21™»­ª99ï4\17ÔÂÁ;t¡ÖLÄ!â‹cÃSb§ADa…CeŒ…ÑŠÏ\21Ÿ»s¯F𒜦UŸ;Í1NݺàG\12œ\7—iü\13™\20±uh8îÍáéáo\30άåÃʪÐ\ +ÊÔ\3‚A&\"\17.¥¥CÃ\28`\8àf¸|º1hÆÍ\23Q\21Ô{¤lØ£µ•Ÿ¥\14ç¾\0272\24\0Ê\0171\3Ž\6ûtB\20pᜆ¢dÍí¯Á‘ç6@Ô¬w\127Yź³Iœyu¾\28xÛ?2š\15j(Á¡³\17\8ô<£\23\ +hF‡¾uÇ\02608\2-ÉžQ‡aæ5\\N`ñ'æ\18Ø'r\22(K“Þ\2Ï´-°¢\29F\"h(Ìâ%êÆSI[|,\30¶c\3©«N¥éÎìàè\2®ò\30Þf\18\0kDb¶0ð†.àêNÙ±Ã\25nꎖô\22îÍM\4Ùöå\22w9\22Œ:\0272ŠñçÛ•\1cÈp©\26÷\26}潨€Ó\9—€ù\22ñ\\SnjÏg\4ÓÚÍ+ \ +ܶû·²Z(ƒ=…R\"È€Tk-6FøiºB\13Î&¬Ã-ëW\16ñ\\8}\9ÁÈh¤<Ô\0or=Þkþäy”Ü„è\14Écó+°Z¹¥×Á'¼w_ú©`\23ï\29u¿^Þîï/ïwÑ8Þb»Û\24\30 B{ãEBß0Û\23\1oÛ´¼c³³î&\16ˤw¬—!“ÞmHŒ€‰'%HÑ\\’Áá`Þv1œíÒßK<žS=\28/²š¼vޱɣazÇÀ+\27òx°¸\26Î'\\\"\";à Þ\0080¤ì\24\\•ð}¯‹\7é>x\28„\2>+(Ã=ÝâŒìú_ÿ\8®ïŽZ©!ö»Ä™€¬È@ \27\26‰å=aøø~ª\5*“„©ß;P±/‚S£àÎ\6\0êT÷Ä+;wŸÙ\11hA¹sz©\127ï\12¡ÆÈ›@~\27hpÂåZ8\29,%Þ¹æÖQW\31=ö'ü\" -ü/yœ\20³“Ê·HAä\0120Ÿ€ôWëÞ –Ù1ü7\127Ä¡ŽGD{8Ööqwþ—ð}:o-K÷à\19\15;/»€÷`©Üáw\ +Tòƒ\4¿.Dù¢Â\15+\13¼Ü3/dè_Œ®ø!!\24XÎ?ÈS\8\6S%ßVÃmUSg4\127ø÷¾Î=ó@Í=\23\18§õOY²|ÉñõK£YÞ(1½ßU=Œ\17¾ßÕŽ3\ +ûƒ!JÎB\18†\0032ˆ_\24\1rùã>‡9õûŒ2®\7Þ\12ËxŸ[\15²¹+ew¿\6³ó!ª)j—9£\9®šÍ=/<á3 ùI\\Ì\27hNÁ¢ê\15Íb\2-úÉ—VðUþú„]@‘„ûŒþ\14]æ§/\28ãb\127I\127þ÷Ÿ7Û8âI(b\28˜G\2Fàá=—€£áã.éõ®²úøYÉÙxû=l·\11¸\5Øb¿sÿù~üÙ‰ ÜEP\\F \8\29\16cÄwk9ÙÍ!„>:úê~¨ŽÇ€U2 w*\13‡\20\16\"pX±\ +xμ\3CŽ÷cŒCQ!¼\17x?‚¼ßSš¿c2f™\3úûS\23ú^~±\20½ß5xŸØíV÷ߟXGº?m§³{Éb\\r\24¢>árÂÝТJÆ¢6Ý’ë‚ó‚\\o¶\13‘±~\18`¯\13\16—sq\19Ëÿ’ç扊ž?JÞ¼ÈïÍKÆôqnÎ×pr(Oå¸ÑL ~)ì{Éõý\23ÿB\27’\23à=‡\\Š¤N\4Ìü)Kn?\17ˆ¥(AŽEx\1íÄ-þb\4᥂ÌM+û\14<æ\0002=Õ\\øÏþ_ø¯Ðx6T_\25f’ôú\30î5‡[œ}nN•l#)R»Zk'!9û=\7˜#©8Æ#†õ~2æ—sá`N§ø\21ìš9jÍ\\¥ÑÈ\21\0201ÍoZør‘øOf\0123×¹­\22\ +V\13šÆbﯾü®(ØqÅ\2Ûeµ·9\11\16Ÿ}(\\â™@‡zk \26þj3\30?Ñ‚‰©\20Æ\4/]nO2óA\31\"’5;\8°N\23\1Ωò-™l#ü¢,™{â9>€è\01803ü-e-Ûéá\20zŸGñÅj'\15\7\28À„q‹3èÁ~'QË\7©÷ÕcŽ´\30φò\21QŠ¡b‹*û*9\31BnÖW\7lŒ% Ê\15YTÙ†AA:´ê2jrël\15Kj\25õ¼Ù\30Åfï\13úƲ2âk¶\28{ ‹\19Ÿ  v£N\22Hr\30Ö+M?^?‘\"õró@Pn\7Éò¸\23=µ\4¥ÄcuÄ%®\24±„ÔíHm5—ÚÍ\30VÉ1‚­\4\9½\30yd¶Zƒ2¢\31‘Oøt\4«r²\23Î\13\8õë\22\29¾¡'”}s»O'²oFZ˜¼ì\13–6âöNûÔ\"0²º?¸>þ´‡õ1³ß‰YÂ\3s)Þ!‡›½71\29·@¢ºl…â4uäƒ\24WVS]œ$j\6öX¦(©º#Ò^Ü\26òÀx)QÏé{nìäô3S¨þåŸËâL{DÍN¿d\15Ž=}æ%\27zì‚\4âÙh;}ÔÚGø}@\1â\27Æ9näü7Bµv´ôq\31ÙßX“€‹ßLë7ɺšû4‡þ\27ÿSãŠÜ‹\19Ö\27·ˆÈ«ƒ\7ÿö´L„3Rùò?E·\22º2yüÂñô„ÿ\0216¤òåû?ÔdoPäï¿\7Ÿ¸\8™,†ƒ„\28ž¹™s!õ6eþ%8ó¯vjßT†wÓ\31Q!ÊWxK\25(\"w¶¯o°—/¼JÇ@íx“£¶“¥(ì£gÌ¢\11g\3(³\3‚\3û­€U-Ña±\21Bàí\3\3¦ŠEx¹Ø$Ìâ¸#-†+\4È\2(CÈÒ#\31¹’\29ª¦Épž\\’´:´F©cØ8Ï!Q\28ê^ÖŽûQÕw%ð&ÏÞû—@˜Ü—=£ÛFÁs?«Š*;÷\0202W\17$¹s˜*˜ŒxÏŽSVšÑw\18¾\"à\19ÕW>A\6ÆcáØ0s\20*Ñ-Èqrš‡ÚÇ\"y°u.Á&®ieëùÓ5\3'>Ÿö°Ò^æÀ\9ÿØ£É?櫲\15Y3g¬øÚÆK\30ØP¨\4‡\25»ü‚*ŸÏÁÈk\12\3\30\25åù<Ìh­fk¹Ã§ ÎYQ\30¬;‡=\24ÌŒÒy~WA`xsèßgŒjçbd½\29½G’Çói\27!œ’\2\11ÿ›Z_F3\31c~)óè]õ\12;–±5U(÷Ñ\15§\29„.1AFå:†qÀ<Å\5l•rý£®‘C“\28À\24l\29Ç\16Š3ŵÚ\23vXò\23n?$ŒY2Ó}l@ÏÝh\15›7ÁgãzMæÝ0ÚŽwø”z\3¥˜RèÀ\8Á\18ß{žHËfßȬìèx¹c]@¡|¡,ÜXkN\24\31\8®'B÷£kžn†<;\22p\0r”xZÃA\ +_¨õmUSx#,\2 &­Fúõ\0079#·éôA\28:‘Ï@úz8\17®\24\6\6õ\3ñêaä9\6‚iŒ@Æ×\31ÄA\21êfNiû“[\29)BMÌA¸\127\127\"g~ž,\24žFM;£fŸ§ìÝrÕ3»Ki.6Å°§”§þ¤ðÓiÑÞ\8\23zA&, \9)§Ãî¹s†\5o1Ø\13—Æ>“zêãŽu6\23â\8fÄ&Œþí¯Ä›ÄS\0°\12\5àž\13ж\15U´ÙŸÏ´ŸG&6ԼǷVïÅ\29W…™UèkrÑ‹-ƒ8N\27Z¥/T\6\27ÅÍèò±6Æf16U\16ø³w‚åkÒ±ù¶\22®/B>@/^o\5TÈc'’òVnÞî×É\\D\17÷LóækK\25•½lÓ¹ò±H%À*e\19ºYMço\7¸\26\9%c?wr\16ÔQ.ªë\13Õl«º/~iƒ\"^\\€‚«‰Dp✒ä“O۸є-l©[(\2lƒUê²\13éf­§<†çdvdŽØQ”Ó\28áv’Zs]ït\13R¨\21ãò®E‹Qµ_nø«J{âu¾Áˆ¯÷\8é-&\11ñ‹Ž\25Û\20ÊvG\0072³C\127îÁw\\v¯\6Œá7\7\0090÷ÀŽ@Þ¹*\1à5äɈÏ\15òÎ ØÙµâÿÉÇ›zk/7ç\17SNðúÉ]+B-YìÞNêAl–\14G4@|êŸ1–÷Ã6\9sèÃÉ\28ž\"t,3ûoh¼ß\20ó\24% \31œÓðŸü¯äŽrdŽ\30c…FA~X\12êA4­\25Ý\15v\\³u½fG€ø¼}ÿóòý\22Øï\127&ßS\16”è¬\127¦\22ž‡zøû\31F\4‘èÙ·Ï—\11ÏÞ}ú¹(‰\0018?ä,>þ\13´\3í\8Ÿx­#\27é'1†Ÿ6²ö\25FÖ\20“‡~>¹Åø‰ífý›­ßAÌ»\127b©–ÿëo¶Å?\17<ý¿êŸÍ¨\13Otº\5$ûgôØ?}qû\19ÅJ¢‰\29ø䣣\16©üD¦•+Çóp†¢ÄŸñž\9Á¢d\6‹\3™0ŸÙ¯ÜÙÃZ‘gH>áOTLG\24\17Ëb\24\9û\18áRŸ\17E®Œ\0289\18g\14|¸ë\31}\15Q.9%ø|Î7T!>ÙÓC‚\19Â&ò\11S{Ôv¬\7Ÿ³Ä:ÙÏù@gJÅcÛJD…wÂ-ù_>\15\21á\8;¹ï¢\4oªV_ %õq+\21‹¹ç&q\31º!}}šCñmbßñ\1ÛíF/# 8`Nà+õ§¡V\20± \\aUtvöõoæ§\16²:Lâì{~‰bðª’\ +Ò…£KvÌþ'2d³çɘ²>á@ÄA,}aáy\14ï‡\1j\20†ûË¥×hí1\16(Rk8P\20\30\26æ24©Mì\5ÛÕãâ‰ZÃ͉ÇÛC}l›½:šƒ‹¨1Ï\29Œ¯§\0079\20KÝS4.[è?”Uqg>ãb@oÿ¡ÞùLKþKöðåÜÏÑ`~\13!\21|®T”·¨„\28œÝ\20~Í(ÑK«\11`iHa\21Kw_Û\12U\31š2=¦]Ŷ\20\15ÐÛzaMPèîö?V\12ûÓ”9ùpwŒ\23Þml\\Huw#bež\28‹Ý9J¼å\1\31!\27u±\ +\1¾f¬úúYÕb\3ßûÛK\127Ã’©ò¼ÍÞ甧\6\28Ûƒo’“{,Îhæô)½óÇsŽEÏ&Xoë Z¨û0×qBºÅ›b”Ú\22(`üõ‰DÌ>eo.\11Ú¤\17Þš[ú£ŒÜêß\25‚ˆ\12½U\25\9ãÒ\6i\14aïÕ\8·4z[nÑ2¢6·žoï»Â7\7\8® \21'ýNKœ—ÜÛ\31¾‰Ó§Ñs’ÌF¾ä)ñÕ\16\11`‚JZíD¸XâKÏu$þ©v19­)vjû´œ}fƒ++>ÅRºÉ¸ý&pZ\15침5„{Ø^´,bJxu¼\ +¶»[E\1Ùæ22?€PV>ëW\6$þ\7\25sk(ûK‰™O\7ŒìG\25C,2Lù„NÛÏ“ö?†ZússŠ\8\28š\2Åœ:âY‡­,`ÂƈàJ®˜\0114ˆôÎVá\14Íâ\0281 Ž†K-L·ôVÑÀ}Š¯+ÛžîC¿]ƒ<íL„\21ø\30\21o5ƒZé Ò§æWŸžW›<\0™Ø¦êÑ^¾A¼ZþTû®fãYýÙùíì÷þþÒßëë“c¸¾¸óÍýàaåIc]Žü®t1\\Í\24µ.)ýšƒi\17Œg¨ûØ™ÛA|tØ{\5\18ÏÑ{/-\"\12N\"\30é\"¢8&£$!\30FÖÿÈ\0”Äûb“f¯´\14²)a\18-™\29–3ûKö(·5@›~ kž6×\127hëõ™w¦èp›Ò¾ôÙå”Ô~£ª°uËl(&G#\9ö‡\127J¯¦R\21 éú¯c£¡‡Þh\25ì±FÑ÷¡™.¸²«¯¸Jô­\127gy\26x\5\0133ó¬RrQ†Âï±ÉOk#ÿœiö¨&1ªQ›Å†_Ï\11Ÿ¸¼ª\12XYÆÏëÓ€b³\28’!ô¿D«\23š\\/\\vÑ\"©\127žbULl\4ö’â\26\"`ô†Ï%n\25@£õOY²Ÿié­ç„\0115-\\“£PÜrÀ‹\28\18P\28´á¸nŸ\24áëÑ9Õ_Å.+Æ5æ(\18”`8nå\29à\7ç\20õè^x™î©\127e6ò6©Z_\13¦O\11ø,츲\2Oh\24Êẛ¸M¨\17ëŸeìÂ.¾~6Ð/èrM³\4\28þ½î©–\19“‘«9;‰SÇ\9UÞ\13.B]>ß®kêXef\26ÝÊTX\5Q…9€„&Ì&@ñ}\14\\íIï;þÏ‚ÍÀ¢÷Ýú¾˜[JŒrU?÷aüAˆ\15æûÙ\26Ö’°«Ö³v\30œèà2\12\15þUâÙ÷\21_ÜòÄL˜ÇÎÎ\14[Ô\24)¹\31vܘ\3ÙÌ\17aÄDµœ§x\9\19\11%(:Ë\3Æ-œ›«J—ÍlYZS¥q%þ\15^‹\11¸Ä5\16Þt×L7\"ˆFaQÜe´ƒA¢ÿÙßÎ/\12¥Å´ÄQl?¯~\5íÅöQ\24Y‚-Ë°u«åòÞ†4<\7©J}¥\26aÛ\\ðjë%½OVk§À¦’`Ð;Å'#Þ¿Ý\23#¶‹\1òy°¶Úèx?ûµ|Ë\8,—d€e+Eáš;•áL„{CÁRζ.ÕÏ_ùš¾p\23_½ÿ·\23\27vYï\26ÊÊt±JÁ‚õ÷]-À\11¡4ì*‰â¥çŽ5:œB:FTh­iÑ‚:”½U\25\4’H\6€{<}&\26ÓÆF5\13”÷Å+e(¤)u¬\27kÍCS\4HVl¥\2°×H\20h‹UÏxã„ëÊöô@À\ +'ÆhXÏ8×c@X¦_‹Ù\20\3³dJø¸ñ\17Z†½õÐôï'j\5{\14W{›pÇ’\13#p}V­s}vVÃí­´ÖswPë\23)=w¨«¾÷Ë\13ú>´Óz_\27”ããÎ:\20Êfýi ÓÅ«=4\19\2\25z_=þ†½>´µúPŒ\23€\26ö6nÙ—f \11\13ÔǾÞÛ\3rLó\15ø—\28|ût¶l\22àÔ:\3Xzâ/DgŸ©\27|}Üàëã`§\15•¬Þö+•\6÷øðƒ‰\2dG”ÀÛ‡\2–¿ßͬ›‰&%qq\0&xûSÙª¯Nm«þÔ¶ê­m¥Ù\16Qwƒ¬\15}\19¡™ã‹µ¾»öV¥\"\6™\3XH7‰í½·6¨/o öìiôØR¡¨Ï©P)eëI5\1iÛ=\30\31î­\22Õ\23¼UÓ_ÜKâàû}®Öô…C™øÞÜ›˜ëþ\16QàJÅ¥G‚äaM\13©#+\15þcÌ \16öÇÀƆ\16\30Cêc`\28‰=áaRVƒÃÙá§i¦\1275\23\15OÐŒ‡_0îŒL3Lîq\26Ùè\15Díþˆ×£úã°\9OÁ¸=öÞ«è_~õEŠ™\16–8ûË£f•|˜G-x3²çIx67åDŸ´è}*\12ðäE\15\5;K\\ÃQ_?2“ô¡%Œ'\18z¶\31µ2>f'ÇŠuy¨@Oô\\ú'L,\20›\20ïý¯—þ—nÞáÃ\7ßÕ2¸\27ˆœ\15ñ\6õlÄÖÄÅŒÄM{\30\0009ˆ'A÷]EÁCy`µ©Á£{\25\ +Œ6\13Þ\8\0250‰ÿ\26`sH¶CT\23Ç G\23Vå\7‹¿\3o\0gûÚüëÀ\0211û÷ñLØà·h\27{ÌêÄ@ÐN´P³ñ-jZxûx}¨ÇÛµq\25Pè%©°l8“\0\30ˆÀ†C\ +(\26\1271\0076Ô¡Í3ijœëéá\0?‰2ø\1˽>\17ö\13ˆÒÅ7×nr^SáÚœoú\12ÿý¦£\30NÅ2\127@\127aö\ +¾c¤3 ²G¶\28N%;Ãåtî\14ÞϬwîº\12§\4ï\0007\23Ï\15ÊÉ{ìØÕZ‰s\26½³Ì.\0212Äàë%C\\/ÁA6ÖÏ\26긦û\"Æ„{›Ã-\12\ +qÌ&VK‡èTH\27m\29\26\30‡\21P™PPĽ(×0å“Í€ÁZ4;0˜-ùC¡‡†K\\C\28\27\13MeÕ\7œŒW?\25Žã¡lÄàS\22\27“–'›çC\19Rÿ€qa» Èƒ\13µ’?´â>O\31°:\3\"ùÀë\3Z\21\5Å\30\0146Òi€… \0233ÛCxÏ\0016\127â\127ï-€„ö\20È—…õá‚´> tˆz?\0238©zâ\25aI\\BlH\ +8ý@ųÁ16}ð`;\13Kd~ÔÈ!¬\13Bv\23¡ç­-µ¹·\12\7TÉE Åøsan°*\5·\0224\18ØQKC@\6c:·|ð€A\26Òy\30.‘`qDÌÙ\0By\3D=õp€8¤]‹ßOàvýý$ÂZ06’ûÃBø\8´\9w#˜˜GúØ)x!j\27€\ +Ƕ²`u¯o@Îî\6‡÷ö—xÆè@¹Âc56¾†\20\27Ѓo¹\12é$Õx;Á°â<œš\15ƒ_v§xìÏ;º÷¦\7›Îç\3•ÃíÌ»„\28\ +'ê°CtnF¬Õ;¤xÛS•9èßËQ\16í˜Ü%¾AM¤£ˆÑÂ5h‡­cTƒ_S¯!aé!\22LõÔp\20Ë1 x«}£V \30â\19ÂAÉP`\17t\8û 8¦\23?\127ý%¿Â\20\22&jÀbhÇ'ªté\23\21\6n0ÒR\\hLá¤\7Zì†\12ÖnðãØP¹–ßu\12\4ý\23#쉀LÈ… 1 ÚsË\127hµT»0G\14 ¡ý¦BøæéÀÍ\24á3”_\5ì*Ía5ý©£ðF`®­\127(\0243Hœ—æknâ\17µ‹ÅÒ\1½.Gf›í\9d/\2Y•\17œ<@>ä\20V¡\2Ñ\18h\4Û\22šŽÞ|–X\27û–B|­\02281‚1ù9v\14@©e\8„»D\3\15\22Ìv«¥º@Э\24²õãOøp€5ã\6̆2y\11KΘšé\19á…Óµæ\24µé+O\4àç\17áœÍ<\8r®Ýœp\ +èV@\13›x¼QŒ\17üÍ!Ÿ\11Iöƒ\29(ª\14Ù\21ŸÂ\5\5̱6ðÅ\23\25A`\16Šùl‰6-aŸ$Ý/óh_5ËðŒX+n1oøn\\1×;¼iP±‘þ2ôœŠÍÀ\29•B!±zôZ»xÛÑÈbÄ75\4\11T`\0ƒÿçˆm³-~›=\5Xý‰‰ƒ/¢\15 .{@W¡·žÔ@Wì\6™\21ÌÃ\29ÂV<ЛÂyÝa\24ÎA<œƒxà†è\6¸öì'\8©‰€«:<‚æ¼A½Ÿ\31éÊÕÞArbò¿zö‰qk&Ï\19N\18â\8‚\13:E€¥|ú±SùúäÐ55Žfšõd÷}xúΑ€DOźp[uà¥VèØ\19Å\20žqTÓ?÷Ð\13z\27_ÞU”±þ¸6,£cݨp\27>\16·‘;úPÁæeôMý±.[ÍE¼\20Zw–½Æº¸+B}/›,ÐÛ\6ùXK>&>ÒéÈë÷ƒOFȳ¡•ÌƸ洂`/\30w\23%™ªé×bg\31bßY‹ÎèË\6k@?ojlŸ\29Å‹þèƒÿÑ·ýGž\9×<1Ô\0162œÇ\19’ÜjÙ\14÷àdã™Æ€»ý9©\28yƒ\26;pãy‰ÁpÍ\1€»3Šë\12Ôôž¨ó®Ê?AH@’Ó»\4²ÑÒÙû¨†k抗¦@–°ö:6Ö\20\29±\0060Ú\26€f#[*š“Wß^\28yDb})GŸëá\185œ\28¨Ñ!y)>¢k!ÆÍ#, \27ºÅ –LÇÄÕñ1u¼75ÚhýN¦\18\\FÄ¡D”Á§Š#\7•l€àÑ;\13ÁÏ9àpúO†Ñ²‰•þt>ü½èF|Ç–÷˜Æërwq§0O\12⻩ óâ’qTþ\0128\29@\31ÀÛ\13Ñ\7A´\24Ï7Ñ\2’¹-\31غTßø¾ \25 1é\12âm4úvÁÓnx! Û;‡fð˜OÍà1w6÷5Zyš©ï¢ê7áüÄ:.ƒ2\14Æ»\0269rTKùêÛ˜/›ªÆó¼ß\127­|¦Yì\28Ñ3ÔˆÉf\16\8ðµ¸QLñ\24V\11ÆÓªÎøcUGJÐoŒ\"0\16‚\5@JÀ0‚4ŽÁ¡ŽãÉ¡Ž¨\19»«Ç°5Ž>¾\22ÌÁÅ)áéå2ŠÒ\15\30@ ú䤂s?ý\26ˆõ̹¦!,à8cÝ\\Řû¸D0Î\22Ò_Æ“å“ÛFƒF®È}Ü\3ŠGcÇÎòÞÈ™\2ë\0306òßG\5a\17›†ä\20h4W6b¦O\2“ábwPÍ™Kí\2\\\13\0311»3¸hl(@Ãgo(Œ>ï5Å›}\21Lyòí~.B3\20”ÄÕn.é!Y\26\16öhG[ë'™óä\7\15“à°:PŽßÿÉàx\23·1ú”l\\êkÇû›ã⣋\17;Ãæ\5ò¡ââ­„r±h\\Î['roÌòå¼ì¡þ`„‹o\27ãÚýèY»\3½¦Äw**ßÉ}±MŸÑ/fqâ0þœ ˆÓ¦\14¥Åe\3(?\0222F%hQ\8zv| \31 ï\14o¾¨\ +G\27§Ûã‘m¸­\24ao.#¤õ\24ƒŸ\24ÅÊ\20üCÏ\5TIµç±\22\29ÿ׎ /6eöõ2~eÎ\25„‹r=mkg\7±Y»Ë(fB¿x(\18Ä\23û˜@zŸ{Ô\20™^_ÞD\11/\19µãÝ\\BôØÅU¡Ýž_&+óižM¼C\29Þ|É;úCÐO½¼Lp J¢å\19\6t#\24æ…„ —,‚\\\7YìFýE\17BŒ &dlò6÷Ä6·:Gð¢Æšè\127Ûf“0E\18‡þ3/\9¼\ +ñVý\4/£q¦¸CëÿZ‘Ã\5\6cò{½Sí\7ˆv×\1»BZãEç&^bí\0056®Á,áæ›Ø\20ŹØÅ.Ÿ`á¤`\29j{szj8ÿø\7Üð>\12‹‡=\31T6\30ƒžL]pŸ¯„\\|8?ÕhôMõŸ<öû>Ý^¦[]}Üi\8®&¿ÓžÜ­äõ\ +\30Œîw\3mß6P½&¿L\27N­/ĶA\0Ì×û}žéÜSVD6Þ¦†ý2\28<ãŠ\"È\31^Á~Pˆ3¡ôX\19çS\1Û—¸Ñоž\30kÀtFd\4þ`.\8&Çø\8Õÿ‰Ó§Ù\1WÛÚ\5±åQ¢þþšÉ\22ã\20S“y¢lŠ½îÉ{ÝSÙp\0062ñ„£›`vËø\5*ˆø:5P+|á)\12\28ÈÖ÷Ô@Âý‰ŸP4ü2p[ù\17\17ƒ‚ú\\\22ƒg\18»?tÜ\6ç0\3ý˜É7P§)ÙwÒ$ŸwøŠÀÄÓ\127\9XžcË[í›ãê\27÷Èæo+s\1278—ÃÁ¦@>CI}x0’Dy¹ï\3Ü+N»&žÍs;@jä:Ë¢ºkÖ¾‰÷œRÜö™\18/“~\28\26yæ\\'v÷'›\8˜Bín*R‚0øí°\19Þ\9dé›bo\127‚=M\15àqá?ö‰A\30´Fá;‰…\21O¦¸‰:Ù¢•¼òÀ\127Ï,c\8.”m¨\6ÉIÓi©jú±T5…Õ@M\21Ƙ•é4j`1)í\18÷*\20è¬×Š‡¢¸Š\17C\20“\19”t½.˜_ž¬IFf¶/5ax˜éÉmGÞ=™Î‹‚SlO¾Ê×\20¼á;…ݧ)ñ8_?\27ñR\"¤âm\3þv€ï{â±#?Lè’Y~PZ\15HUú­>ùþK³û­Uëc.jjŸ\21æŒßÅÜNEǤÃ\16Z\15ðÉ¡øÜ©‹i^t1©‹ÎÓ¹óÆ÷„U­«‰/\24ϽNXÒb\127Ë>\12¢\31Vpò\13<%!ò¼‡\27³Q“íEõ/ÞQeÄp5o2Å>\8t•º2*dëFþ\0045\24B\21§[*n\12)ÐÛ˜:?ÿeàŠ\\hPn\15aQ‡—îëŽaëûDŒ‡Î§\31\4XÚ°»§\12\27\27êö_ûtzÌ'ü\"ÀÏH\19Á‹ \31f2Ž[\18S\23—\29âæ\17ë\9\0086šùBÈr\"ËOк>ë\30ȱ>NäX¯¯H€\12õÍ!‚ó‰°žvñFœŸø³\9Rû›nq\127kâ\28€cÜÀÌ\12XË‘H[€ðÓèá\ +Zú\29zÊ\19c&I\0187°}>ñÔ\24©ø\17\ +‚-³\16ŒÍL¬k2\0152w&D\19\4¹éó¼™\2b…ŸÉ\ +ÿ´„Ä•)Ä•©›…“µâ\1Ìúx³K\0B:¡Àl\7UîÃÚ©!Ó¦÷ûá@HšáÕœ‡•\19U¡^%ëÅn\2’ˆÁáp¯¬=zvòäÄE\0ŧéÿõt6I®ëH–ÞŠf=Š0\11‰’2—£¸¢\20\12R\4\19?Òã\29õ6z\7mUƒš¤½Iõìî¤WÒç;Îèà\0\4‚ \8\2î€ûqk0í|¼Î\\6\26G\5²r§r\2Ža\30\3\8~\30Í3\24\\\16‡ù\9oXº·ãœ\15è>8.Ñ£«^„±™S#\8\22S³$©Ù.x\0184$°yî­–\0-½^\17ý¬!”ôÊlM0#»Ï!X͉\25R…Ó\127òñu&òÏ1½“:çáõ‚ó\0057ƒPMOt^\7þ†MdD$\30ôÍ)\25ôg¶’ßl‘\11žØÌFÀµÍxO\1do¶2ßÌV\17\26…³ÝFS\19BÙ|4ÐŽâÞ‰9(½í[H‰ï!Ñú³º\8«Pª]æVì\5ígu²\4œ9\7*:Ô›uêR\22„Ø©žmt6\27\29~Þü-+ÅרPÝSz³´öœœ6êË4\0206¨0]w§(ºa³aãgÃÆ\19R7–’”²„Qœá5R\1277júÖ\5,†7]rô¹\19Žµ¹Äpò4›Þ6³hÑv@–æ}ñ]é×052t7œ€=*ϵÎLh:;š÷V_Èí½C>ãƒÓ7œ#\17w\5[^)\20SÁ*b)+ÛR\22Ši³})\31#ùâ¾ÌÙlCç ÑZ¯Ÿ±‘ã›,f\27QH«´ÇŽ•ísÈ+¸\29(£‰ÅôlOCï\27-´æd×ÉÎ`Áñ\25üÈã —ÎÃV\26\18F’ü»º?ȃ\9ôýWRÇõÅ(Q\11ê\17 \20\8ë%\27û,`õ‚þ弡ÊÏV>{@ñøjë̪ÎE\\a.«CàÏÕ¶…sõkíê·z°\2Ã(RæÛ»Rô†f\16èYòkôfWé¾j±Ýeø\24ª8§+¯\31Xz& n‘\0314ú9Ðè!L2Õ@Á&q\9§ª\\\3\28€i‹ì—\9³¬È:Òb\4œº¹ü›køP›\13jOã8\\eTU\3†’oÁL‘ãâ\7ç\4e®1“ÕÖ=\14.»d\28žôé}’t\16³¡øSÝ–>ד¦„]ú\5TŠä­„§gå‡ô—®øP׫\"b„~ý\0018{º2Ä“¥¤t-áæÈ8”¯\29ø˜X-%£ëlÿØñr²¿qˆýÛšêíò¿A\11ˆd]ù‡.„¾|_h\14ºò\127“@îSXþ5½›jÑKý\21)ö=\"¸\3åØ–\21^\25'ÎqùÃïÍWÒ\"Óe£9(\29Ó\3f–Wg4’ú†ÈŒÞê»Ûv\127Ñy‡\ +»ëD¯Ä™“«¾\29 ·p\4Ç!±÷<ˆL—>o‘‡ë»'\30ƒ]\13rW¤ƒÔ\15\31\4X\2AKGx6\\¢®\24Ý€1äv+Îç[däÛ{DžZâ(y€Œ'TXÈ·(”ÂKuu\1*°jtÐ>Hr!뀧\0qC/ß\29}Œá\1¢ŽVÆÄq\2»5ŽT·\7uŠÓFËÙEa£’-¡Ju$ÍŸq\15Ç\19\18\\\\ëm%þ(XL讳ïʉÂèÛâ\12/2¹\29ac?ÙOu\ +?Õ䎳_2ü„«ñ\11™­‰žÂ{5ÉÎ)õº›c×\15wG¶–â¶çÉõ\11µz-O¡Óå\20WÛõX¿E\6:\12¿b¾†á“/ú\27zðãØ°)ÙO\16Än\19§\9/¿‚ctƒýâA¾!ÛžAòÖáÃï­Ä8\3\28ç:8²`Jç\127\22ÿsŒ—±m\5ðÇ\11b”\26zÎqÉ°•=é\3ÝyŠ\25ÚÿÇWÂ<»Qš\\\3é\11ñ_mºôAu‹T®¨˜\127%|87õéí E/Ý<Ëà%ASWÒgB‡¡g¥x\26ù\31Nü¡é[©Ess²z\21ú\19쉦Û\6ŒB†ÇÕÍl¶îx‹™Ú”¶ß~æ˜îÆ\12ö¡VÜ»«\30ö\30èvénÔkSö\18\20\25´b±ïi%„L«K@iIwÔ\28z•\1\8VIó¸èk¼;¤4À27þò7láŽA„”Æ,\0006¨³—‹\27a¤„âÿÛà\127Nè2$¾ýá~ºˆ¥O\3£f˜z\13탦dœ\0“q ï$\0253’«\9\3tð°¦'”€öç¿ŠË¢¾“ì—8a\24Mˆ¢SB\127öB·£••ÆŽ1Â\26&¬Þ\8ÙÄNXŽ¹°»\29\3+þÓDÐkj\3Ò&\9óÔ‰ˆMšU˜q\14ÂM²e\3õÒcã‘Ý–4n¯pD\31Do~\27è’\28ØtM–\24\8«óÐ|I{ÏFZ=I\0X“\2°&ÙA,ý<\29o\127þ÷Œ˜÷¬˜×‚2S¡L§ð¾­\7\19Û\18Ù\25F\21õ\1ËçŽ\16w\16i\ +÷\13οq»á|GÑ-¡úÔ?6º\4\21{aÊ\7\0xM²¦Ž\30ÞJ3NòHÓrúªÔ\5°ƒÈæK3MõôûëÂ¥´w\5(:íu\11\0mR\0ÚH*9/l\24&Î\127$ ¤íD*ýœH% o’Am\20bš–\30¨•\\ù‡rØÓuzž‡·\26\18ð6\18pàê¡\1²šp™:Ü‹#V\28N(\127³„?Š}á¤GŒ¶\7n²ïÐÐdÄí\26êŸüÁ–@\ +\20œtd#ŸÖÍ\30YHqLÕóe[g¬$Nýs€\3:ã±Dä\ +KÐñÂ\ +é8a\18\15~•œ\13a¶Àï*Š¥TÀ\30ži\ +²ìì ‚Y\\Å\12=’fKÍ4%f\14Pªä]ªâf²\0Iêç\26ö\5ô¢\2àåüM±‘ÐÎÌÉö. \17Mvâ‡$‹²Éœ0leB›\127ºuöð·xu„ns™\29\0290Ú÷)\19.r\3Š6DJ?݈ð-ÎöŒf¥­xXã¾æ\29¢(}Q±ç\29vlyß©¦Z]\6qŵ֟>Úæó¹ÆJËå\26§\31³¦‘y\21¿Î×›öúÅË\16çw!qåÓ0³\24©1R9ˆ\11\30“3õyÑ%°Œ„ÆÛ#»`\2äˆ\25A\"€§8b\6ˆ9ÀˆDÕ.qû \8VÁÓ¿\0308Ý\15\4'¶Ÿ¡œ¥±ÍÈ⛌\"t‹ˆçX\21xã³L¡\19ž4¹(èFGO…W–&î2Ì°%1\"³Ûò \23=\29ÂÞ!õÌd‹M»¿Cï\29!\26&P”6ô…ùB߬ã\13Kö7²\26Æ€³3)¼XQ\31\26¥³í&DÏWvF#cÙ2øäÉøu\9ÊÞŠÿñǬÈ-û­€ƒKÒ7³ã\21\24M]ÁbÔÁ\0¾;YãV§jæÅ\22~—íŸuŠ\12N\20Ù4-%Qy,\0\22£æó‰¥Í_BJÕOËÜ–ê\9‹e(\6çä\14\14“C\ +D×”\14â°þümPÑ´gB¢³ìP6ÅAoZ¹U\2®ç±ÃG6`F\9Ø\30\21±à²ü“c\4ý…\23Ò´\0æ\2†˜7\ +1ªÌž|\22\28(7®¤q!Eí4C\20>\4g´T§\127°mò~Í\7oª³Ñe\2\21(-v0G\21vø\45Y¶¯üh“Ì'5êæÇ¥ã¼(\25\0<Ù\00157Ó\30ÑóŒ-¹ê`s0-F“Ô•ŒÅ…“dŠ¼Ø\4H@\13%ÜϦp8\5¡{2ÝÓ\7åãÊÛŠ\127V\4,BGº?_O\30@ÊðÁ\30‘\1«\12GÌóæ«u]RÞ䤌áˆÞ\ +Ƥùꂆ_sì:\3c6ek«ñ\7ܶ&;[e›Lß¹Þ\127>01f¼88‹*ï¡ÈvÐŒ÷ç?w¶L-ÿ`­ÎG\0119y\4ŠW$,ž\20‰7•AÓZ\22GPðÿÕʇ§ñÜ=¾\9gBï2˜°\14esc\25\0287WqX:\26¶\24¦E”\29wü\22\26TN\17Ë™¸Úlytš\15?oÞ¤íà°w78²rS/ÿyƒyMvEÉ\23c¦6\29ikW#yúò¨Î5]]<\24“ü3W\19\1ñ\11\25\15ùƒ\17œ;FÍ!û\25V\22{ïé¥\"þà\11Î\12å†4âcù\25\ +刃̈́\27ÈTzT·æ-âêà£Ë`\\\6”tõ¤àú+z`Žd_ÎuŒ, \8S‘2_`ìý -È‹6Áû±¡1P\11ïÖ°Q\9 ¨d\11P\21µÈQ‚EêJÔâÁ\6N\20ð]¾\23>@…’ᢒOÛ­5|\0112\6™‚d—¸:Ô\11.•ãžÞÉ\7MªÙ½„\6ÝÓ\127‹ò¼õÄf6Å^\14߸)Ñ·³UªÓž-\21n]\13ì—ª{¸ZëNä‡\11£µµÇ¡“ˆa$vϘD KÞò™\26°áLõë<\7‹Äø±<†EgbcL-Æ°“¡m\3OSÎZE|wk¿'cNqæËØßJ•ó1\0245ïheîÚ…\24l¬Ñ„\15ÓKd2µ©Tþóß0£uÃBM›sFgpþDY&îªÇÀX?ajÊ\\N«›¡:RC?AMi\27ldjZÂ\24ôíg*j×\19nHLç-=.Ì\02546bVõwóFŒ’Æ,N\13\1ݵ\29HtÌ*Í,}»wzºöAeGŽÁR\27¬\ +¬<*fÛ¡’ËÐkÇy}AP\24L8]dø@éÛ\22òUÛ\4\17ÑÔ(}°hÕlHö\30éº\11\5&¶%8ÔŽšs\6fѶS:2–u\23°äñýÄ9Â\22ÊQ1Ï£\28\4?Üy›\16n“Y…'\14ÒLÝæî%^¬½p\13Xœ|\18¾é‚gï­œgŸOL\15O-tO(X&»„sG>\18 è9\19$#GFþÉ@\19Rý°âæ\\aßv\127þý¡ß~P`?ŒÿÎ\27(Øß»?\127\127o»k\127þ–Èü±ìöš\16Ø\30eON+ùe~î\16ÅÏ ¦\31,ÿ.—îzQxµÙ$)=#É7É6\11ŠCËå~BÛë\0\\’\ +Œ½Âñ¨^\21\25\14„\8¦þ+ù/ñí@­3¼tÑDù)¶‡\20Á\25÷‚g\7q¶K šë%\"\26,›¡§®!a¤¥\5½ u¡e\24ê,(\0128‚˜¨Â((‹3É—Ç,ª®{ÌÎÏ4\23Ó=n˜£q\25]Zõ„\22_òJ\20aF[\0022Ëý•ëJ\3Ê7\21ÃY-˜\16øiJ¡@\\ÀàšÜi0oË¥†ø¨ê1ÅQØ}S\13§>º²ÒŒê‡ÐúÁ?OMd\26\15ûås·`wª1¶|¢1{RÞu· ¹¼Xs™JxE=@\1„™\12\21ë¯ø0Xúã5ém¡\"²„rˆIS{àÌ\20Ô\3W\12dhf\\Ž¶à\20/‡–óÒ³–®\127d\8\0©4Òf#ôÎ0\31¡Hì=-¨\30»-V3ë©÷¡~eXc‰¾ s³XùX\19\28Ê\28ù¡7w\12T%—Þ]Û»JŽ–õöÍ1òÚ‰ Û¶Ø5Ù\26\25H\28”ç4fÇñÎÕ%í¬À4\ +ž®¯¸ÀPÙ‹ýÞW߉ú:”ˆ\14æ—\9ŽÛµL›¦ß¾Ä[{¾\7ÀüstBÆüyz÷_õÛ\127q|Âíª\7Ž\"…â`ÏezÊ⻺+ìKF\20Gµ“^ö‡^\28Š%Kø![>Ô\22¼š\17úÓ°³Ëýòµ[¾ú\19½»ì¿TÝ—Á\25uí—$¶\5èö\25bý/S\9%\11jÀ\13¢OG’ì²aä-Ã\6“ªˆæÏÝi9Æ\9áb”Y\ +Ìq\"®\8Û¹ÝbÎt±Ï\11B>úÁ\ +z‹íã\22[ÀA)Xã1ŸØ;ß&½Ðßú\28÷Ë7Ϫ?\12à¢Úöê‚éb…xS½!6{gºÙvß\12Wv“P\29¼” •²ÖS_0cvHÓb‹|Áœ˜ZP¹áê~!\15… \13\26ƒ\5-œm;¬GFñ\4~ËbÃ*\19&®)\20†ô'ÎËÄ\7¨áêën\6Õ{±u\26“\14\16ÔË浃\\|],öÙ±$ì0é\22ïLŠ°Û\7Ù4£8;WðÐ=´ì\16Õ4¦@ŸÏg&\18}ûÿíÄ\22d#¡žc\3¸Y¼Et\\B¬T\21‰û¥“ç£À¸Q\25æ\22KV£S<¤¸<\13ok#/\22FÇÝÉ.“¶ôÑ\11\6ýak±\5~mx\4eb…Έe\9uÅÅ_¢ˆ\1²D-ò\6e‹gÙëá;6Op\1ÎtÃNŠøtR*'a‹/‹äÈÇ\13OF¨[åx\15ºN릆J\30ÔÂ\3n“ˆ3R\20J–öW\28îF ¬A¤gž\3\22Gaút¨ææÎ/:ãJ\14‰òîÄ7+ž½Ñ;SÏ\11,NPf\23£ÊP²8K2ŸÈYó\31\0171\24ü§É\ +\9¢2¶TÌVlˆDKjä}cÇóî¤2ôD%\28˜©Dîô‡Hòåˆvˆ=»à$l±{0çÁˆ,°¡7’ð\26\11g\26ß\20Ôë-Ýcå\15ò\6qSâ\\¯žgù9T_6|–Å\22l‹-ØÜ¢\26W²}´`Ç–õlúÃ̲Äh½‰ÖwŸ;p{邦Ò-¸€Å¼›Xá\5–í*¦fùáÐ\22\\É\12Nëõ0<õ{éú•w¿ªGÖ%T\20‘”CÒ(N$UãŠRÅb_,zm^»–õX¿ô\0«¤ÀÄ\23þ¶ûø×î_m¯ßå|Eýô_ÍžáE\7\9\8\28!ë[Ó Ä\22bÇ\8w¸'Ÿ™ís¨\9ᨯ¨Ì/æÁ#\7é\18º³µ´õ×íª¿|dIâË\9\13.\18Õ‰¦ð¦Ud~wf\\ôÒ}õ\29‡c݉Zïý§’Æ&Ñ×pGçU™gÛKgÃr’´vŸ#kïœS¹|nt\12ªçÈ\0060Ñ\28©H;öÎç8$_†PúÏÆ1á¡Æ@‡Ï\6,©n÷D…@0ºg6/\31äó<\15±G\26™ôÍ£çÑ€3¹ÐÂY,hÞ`J2Æl(£8ƒ\3îˆõ>ƒ&—À«\23\4h) ¤Ž““py”*Q¡5\28M¿SÐq£).Ç\ +*o\0$Ù\0$× ½Ó\309|zóúæU©åbt\22õøÂ`Ð\11XÜ-Ë\25L\4\"§)ýù/^$ƪüá­X\"\11jjDrÿxÈ \1J]‘é «Æe„DB5AŸùÀ\5h½R\21«~Ú(OÐ\21ž\15O\24d¢\19ânÆÃ’¤kü~æÏtÂÏlö®>ã\2L“ž4'Eâ•ŒŸB\18h|µž\9ɦÙ9ãìÇ;QS\\ÊÚ\19;SjÎõÏ¿OóL]\01221“p`ˆPù\26‚[6xI6hI¾VkÀåkóÙT6ƒ›Wæ\8Uùj®\24»…\15°†ó\30Ù#÷ŒÑ¾W£!\19#ã\19c>\14à‰X!\25odV†Î¡\12¯KÉ=\ +O~AЩ\15Š\25¦\"ÙÙg;†t\4\3Žƒ\13Ë\12EÆ\24æ\00282½.¼ó°\17¤\8 AÇ\23´\24c6Û5\27X5ÙzÔÙ]ld”씚ç¤&/hqûESƒÖóo>4àdør¶«pIM\31÷cçÄÙš¥ª\12ᕲô\".…\00952]ƒ\"\3s\5\17\\\6+Â;š#\7\4*\"©w\21lŸóVé<É­ Oª\11çëg \14xB;3¡¶\23\9g¡i£Ù´ú…‰þš\\ÞŸÐfO¨Vç;_RŸ;îœácämÆ{ÙÎi\22*÷){\14OÁd§\28݃H–C\3'ß­\17šÑnÉ>+Ï>\5Ïw{K3µrâGÞ‹ÏËøìSp¾\13<\28à§Z·LÅÑàä\21þ:ÛÉ”ó(†Ä\14\31\5¸gÞ¬\14ófu˜íì8\15·øú\7{#…†\28ïH]o”ãu€\29Âs\15#²²\8jPWÿ«a8H\\`üpª”7ÏÉ\25qÅ‹ðæhcwÊ¡óXã?\28gäðŒ\17\23\1ê—\13¤Jrô·iÇ\15¤±²u6†Ã”š‚P\8Ö0\7è*5ÛÏ0ÿ=ýß“§I,³ôÂ\18‹\ +‚š+F×…\20\19'`üt(öK´ÉlH\8ÚŠœ0‚ʸ|NNþV\7c\ +ÃX\27þÒÈÖ4¼Ï’ 2ÚHª1 E Œ%+\31/P€;E8ÂT£\24øÌ_£\29Èæ1$¥\12hªn{´©\21\6Ý\19ƒ†ig€€Å\25\27ý^?p\19vݨ‹\25Ñ\17Ú\7\17Ÿ\13I4$NY\12›”œlƒ\11áV)‡ú15¾\7^1sV\23šÈ$û>’}$‡)ÈL®Düü¡\12””³\5¹<¥Óý›TÂ#@\24í½–Õ8˜Ð\27\127ˆaúóo¨\6Â\12Õ\7(q)û`ŸÆ*\18LÝ#\20Þóc;Çʘ\9“\30:Ýø\1/›\31†ßг=âa\30ëi–€¤—£\ +ŽØÉ©\29V™Ö%ØéÐ.ƒŸ\16‚î)j—ˆyö\6|f*eîãü>\31f}ÝéÚF}z\9\7â\5z³0•Cç-þà#D¡í\2\9\9žô|t\6G ÙÊS‘bÎKYÌÕè\18d«8ñ\0146I–\12>‡\20Ÿ²\4œžÔ=Ô\15;+\14f\14}YûÓñ>S‡W¯äÕ\ +¼\20)4L`µÒq\\ù\23è;ÊNç;ç\12ù„(<;ãáQeµ‡…õ\23ÅÌ\12À+Û¸yóL\25t\12z6XOÄ9\3ÏɶfPû tvšW–Âq³›/ó4’fëw‹ŽžI³«L9ó—d¤¸ntmð¿Ñ&ÔÈò†è’\127$uZ\29‹S²½^\15üÓâ\26j4èT=\25ùô?kØ^½ü\0161Ò®b7þNwgÞÑ®%ò`Ë™È̹\28\5ø\4“ÕÜ™®<ôwå\8X\"¨ßý\18\30Ò\28™W¿±…/LW1y \27‡m-Åã\"uá\7\21I„åz6\9²\29\24x´AzÆ\8yðÃzo>‡)rö>\1O.±ªü\"»ºéhÑf\0140<š\26ngL㉰=’\13\29“ŸýõEµÏó<0g%Ü£åô×?W%ö\127þÞå%f·naJYl”˜A[g\6\\¶Ij‰ExAуµÌºJzy7†ÿàÌÚødó~—3ŸŽÄ~\"6sÍæ-œÖ£5\"v\11‘ó`8«Ì{fäìa'C\\g[qzÂ\127L>–_’13¾ÃùÌ$•èã\22=Üø/Ôù³•÷s\25ÀW“‘Ê®¾(V&«j-\27E4(ß\23¦5­¦##Ë\26üysõ\29t0åt;o\0006Ùî„Fç›MBWâñ\8:óÖ\13—@m˜UåpàQügV7vBr©Æ¸0ÈlÆe`ÜÂë†\8˹gþëFÝÀ@ÇQ%\0156püÇ}£¼`òùŒH3ÞÀV㕆©@\6\25-ºéÁ'Uæ^ì\7Ùs´\8ÐAú›O¥${7\11šÏ´‰³gHr×&?Í\6T!\19MŽ\19Ïü\17}lÅE\30\9“\2Òžž\2Å6ãD<Û\2EUƒÃ]Sû»;¸öyñãÚP€ŒáNƒ;|Á™DgU¯}¸ps]~\29pÑŒWƒÓª},P\5Ë\27\12­.ïAh3žãl\27™oÎ\\G\13?UP­˜À‘÷ŒqHÞx^µ\5î>sbÈ·V\7¯@Z_î\29$\28F‹)Gï3×\16ó8÷Ì&™NSË\24õht\20S_rÄß”a^Ü\27v¸“ÑFu\23Ö°Ø\16=±K“­¨ÏåÝk\8²\"_5uaóÒÕî§7æ\1Ûå\7\5§\1›\7³«Òó\0ÃÕŒ\30™Û~Ô$Ь°\0004³×\27Q‹=¢o´²Åz\3 \7g)D¼û“÷MÏÛ\30̬ìÆ ÿŸÛržV\22WMH|Cà\0201p¡°®ÐÇFŸ\9\26†µ¹m\12Ü\30;»¼á\27å\31|£üêï,ůþt‡±zm|ò˸òüÏj+²ÆÆ\12”oDܼÞÎÁ»\25òˆdÂ.S‘“&— |àë-øëÕVƒ#‘\16ÍV¾|(Jéy\21³×Ôï+Rë:\27w<³y–q4ûÄî\25Š~Db*\\™ÐD´\8L7ê´¡µÒ¸w0¡mèÎ(Á– W4B\22…Õ¸•e¿Û«Òò?þɺQþïÿü_Ž|hI8\0\30V:æ\13\13HûwV–:ã`¸wn\14\18\21dÙ…)Õ5’ÅäúØ)óŠ\0^¼Ù¶/XëíÑÙÕòÄZY@ùvèjôÜê>8ìfý#\6¦rI)<ì]vèûút° %‹ýQSô\26Øœ\5B¤0e\29쉶\\NÁõ\18Ñ \16\29ÅnèNZ\16¼›«þÐ\127ÓE\19n¹h(>\30i\7pÍ'Õ\21¬ŠÔ­\ +N·\20;Ý]Â\23h>\28Lá7UôÁ_Oõèå!6Ïå\31\11©S\17w¬?Åa\\Æó²uÆQ—#\23\0269Ç&›3²ï=ûo\12ôœ{W?–\31à_\30€Ó´rYN¨·\24ºã\18¤§~Ð\28¼Ü}rgä\23…qÄálú\0Óh¶®1]÷\0226õ\25Ç°lÀ\11\20­“ÿ­•*8+ßõ\ +ÕI\7žõðÙ\19\12\4¼ÇÏD\9\26Áªà\23å~\13ºJ\3\9Ï×\5\0236\5—Ö…\29­‚ûê‚ÛçÂYkñV”¢«F!¯“q)ê*Ò÷Z¤\9GgÍ_=)Cs‹\30±Í*½Ï&ß#ƒw\4r\20ó9×1\4ØZbå¡€8Ï‚Ëh\16ð©ò\14*a\1+ÊÎäè\2d8x\5ðò‹Y†â=¤ÙI¶¸pBÄýpq>’D™¬\24,‚öŒfëƒ.A+¥þ‘\27Û…\14\3ƒcÞø\3“\26ÿq»ÐŸýd;m”_C$Åh\1À;ŒÅ\21¥›‹{³×iîÅ!1ã\11[\18wÒ\20ÈcÎ0n…ni¨5€ù\30nÂ#T1\ +.wÜ8œ‘¥ìHaY.Û\6Sé5úø´\12qQÜYZ\1Añ»Ñ†\0286 ŽŒ9(ÃÈè¾5®\21˜^\"gr\17à@Ê挦ôÆç§âØ*ö\\i]¨è\20³ƒN5—ÙŸŒ—“/@ÃJ\7\22Jé_ö\11\9õgTú\21à›\6\26Ð\2\5h\1±Œ¾\27ƒóƾ1BoŒÐ[£Œ\6\14\0cå~¡ËH(ø\1\8PŠ\11Åb\20o\1)ÊåwÿÃå_Šˆû,\29˜¥åKÂ-\31”ò¨é‹k¿tÁ—äدÄÝ¿š\0267ðvÄQ3ªD8wt„‰Gc´`\20ŽËï®Ï4˜žáÓÊpdg¬x¿§éªÁí\9èY¥\30¸\21±“Q•œ™ìçfÔ\4+Ê\20b?7\ +1'\24\"\18\19w}Ä%3Mm{?%¶|æ]øF(.~Ÿø~‡m\2ã\24,¯qù—³Ç_¦l7q71×Å\24³÷(ô\8’]¶ø‘\2hßœ÷䶉ëôµO%ÐieÕB`+C=]¼ü\4’l±­¹ž\30Ó`ý\25>Þ(UøÆá`Ü\29•ZÛé¡\1õQ¾õÒ¾õlßý\9yMŒ>X\22]ùn\12›‘åsdA²kÈbßnew*áò­Ø—›êä#´pÐ\127:3\22¡Ñ`\15\20\26<Åz\127ÑpdL½S:}ö®—ݙ±9¡Gt\28E•P\"!w¡¼7úÈD?›ëÔ‰S÷çïo7 ¹—\12¿Z°öuÑ6|qG6\21]&ºie´!_+Ðt185;…¾‹\20=‚¾\27Œ–$,_èK\17æ\6,“Ê\17HIçõ“ó\6%\0123¨PR¡ÂÚ}\0295r\0300³Ç\7rŠ1Þq¶­|u•÷9\25\1\15ª\2+3#ç\27óö\17)·\28t°]\1?¬7Áw\14…\30¤|\18W\0ýÚQ²øŸêPSë3mjé\17\2ºÙ¯ù\0>†‰‹êå–9ÜŠ@=Õv8µao F ^ã\2Âü €:¯ÌëèÊ78s<®ðô†íx¡¨\29d•\14 ‚‚¡/ü‚\13~ŸA“Óì_ï(5;—­5.eŒ%3\1XmÂ\4`þi& Ý$L0$±^(Ék}Š\3’’´Š+\28NL…\5-!œñ\03082-\27ø â§‰Wi{6nýxwèU’íz“\18äéG`ÁÅ…\4—[W4Ï‘O5@”òˆx:\28¿ˆ\28qá\"z¸;[}Kc0§ZÈèÜ=†ßàoûj¡²§/â{‚Ý¥~Ë1\1M¬¼™ã\27+š¿oôs£×2@­\127\28É)’îKë69Y™‰>Ô\20uËB\127,\27\3™ŒWZlg2Ò¢…Á¥É®\24~‘\11\18].™ãÈÿ\29\18é=gÂêÐO‘ùnl:AK\0\6QJ’{\0314;mí\ +Ú\4ÒQÁ¶!ûÚÊ=Äq‘W`¾Ò¡’ð\9}ñ.èC_•mW®oÈ'¨M!S¥æÝwÒfZMG\23¦âPTV¶q\26\13ƒ0©­K¬\26œ–\17ú{ÁßFAÍ6ANñ\1,\22ž­'ë\13.\127趺\12Ó<ÉjÂ\25<úÿj\29\ +¢Ç\9jO)ÅŠ]L$’ÿ<«-àÏcl?!ÁôƒÖèêí範…\"ä±å\22“òî\\³\11\11œ*ó”n#©!±ö,™ªžêÀç\26\16\9{Í[¸ÊãEË\0219ø(bžê\21kïK“ÌNÿ=Ìú¿ìö\18…XæÒWLi¯\11òÞ«ïoê\24`¢n¼V;Ó£v\4  |NxÕ³oB8¾\23PÔÐë&.×ý\ +»ßa\ ++z¨5¼Ö+Î×ésÄâh‘ÕØÃÇð’Ç\11LÖŠ{#Ä”j\3(’ßØàUÛh蛩Ÿ»ú©…Yßcý\\OÀ_î«\30Ả®¥Z{å\22º¢¿êŽ°X\25â%½†šçÕ\25\15Íמּ\15\23ã裺×\14^‚)j¥gGÐ\1U¤t„h\24Ôpç`R]¦Ø%g=ôbßj\127‹\19‡ƒÏs\21œÀƒ¨è~ÂÊ\2\15×H‡\22gíÇ=yl¨)<\1272Ë\31Ù)á?f\3½ò×\23¬$‘i@äVŒ÷ˆB'˜žµ7\24\11”½{“\26\127³Ë¦ÈñÉíXK)=…TííEr£ñÇ\21ÿ¥uÛ×'ÃðñDØÉåΨHÀiL_ÉùvûîHò{\8„ßúcùEĦ[Õ^ÏÙv77BRòåeg®D+–3¨â\8N)u\27Vœ$Z¥b†\24Ú}(Ú˜º\11çsj~o³Q*È°¯Aû€u\1öÚMQ\0245uql†ï._³ß\"è`\31àƒjoËòjK2õ„jC7úFŽ1ŠMï\20´{\11\23Ä7„#“{\4£B·ˆˆ›ˆ¹á²e,~Ðìnφûv•ùnzª°ÎwÄ\26znƒ_KgÑAù·êjÙ\18¯\6á\30\29ñ¼À“ð-âøâ‘/®&ÞˆU‰|{\"ÔqÞ£v1\31”÷&\"ÿg†ŒS“-#\30\29C\18Óþ¶ÑhwÁ{'\23\20`\23ê\6Á\\\3¯t‹¬þr²Á¹i‹[ØuP°“þ?\14Ù)ø6øY\ +åJ \29\16™’/Pd½Æ_Þ¨r¤gÒâ\3–ˆˆíü+ç)\30€ñàæ\24‡«êÊ\127@õwšU>ôHaRW?ôTwdŸzG¾YHù\15¦\11ŽD\0~gBÿR?ᡈ·ôÅ{¼@£\31\21‰Y\18ë;\21\21»Q\ +E¾ŽSÿE%â%ê\23ÀŠd–ó¯/-¹Ê^•=|âwPùCßÝ\8A\22ª8\3ä‰\12´»øO\4l§ÅwF:oôåü\12\24f5\"\18Üg\29¼˜\1•\14ÓÍ5c¯‰\14UÚ\11‰ð‚¤ˆ—Ìa4´_gëU\5‹@òê\16d\127\17Ö\8ÃóÖa\14KÁŠöìÅé;›\13õçØÅ‘Þ·\8ýZ2Nb²ªÁçZ£BØ|Bk˹êÉÙ’{Äûs¶\14á.]¦\27â\0®\14‡òMȧ>`Q½«ûo½·o5ä\27C~uÝ·\24wÞ±¾Þ\0175z­0›I¡2ÕN”Cët‰™\21TÒ\ +&i=Ú×Ú¾ªÃ\30\1¨©â*1÷'¸–:Û\28_¯›wšŒÐÏ\26Ãv ÂÅ-rïÓ\26‘\17e[Ep*Ô…„B1ìúìùó„ζ)¶¯dW“%êXØÎæª<»\20,½“fÐ;)–­d\3\13.) 6\3^®\25†ÑosC\9=±\23æÈc(Î?\14É\25áÓ\0™G¹w®\30Ã@Ê\17N]\20\1\27¯n[H@\14gø%ä \ +DS†ˆð?|\11¶ì¯›ï\19\11Jh\0151c‚¿òÅŠ\1ˆBMF³šœÔºœrô­#€&\2®€½\26\25öoY\20á´Ø\17>‹ôÁ3áx¦\26=‚'ó \9 \4Q`\1 \4Qq;Wø¿{q§õœàA÷Uï̶È\26<‹]ã˜Z쮘\30EÂ(±þ\24%VLîôhXãi8òº\3=½nƉH{\28¾T,q\21X]—\4\314}èÎÅp@(·>\28fgÁ\14Ø\7a\9JÓK˜j›®\\¹m½TŸcg\23dC –,ŠÚÀ‰qÅ\31aÏßcÔŽ¥t\13\3Î\26º›\0袑\23tq¶&‘\26PÍõG\13³nN\ +kèSª\20Ì4§=|ÎJø\18/fÅ@äÕ»øn\25øÊ5\28ñTo•Fnv\5l•Õ0Êñ÷È:â\29¿ê\25·º²\7\ +po#tÝvyXÃb\20âº\28¼û_Œ¯™\2\29ÙÈ'^\ +k@gÔ\13&YTRÏõN\4ëb§Ù2 \0ê(¦¸@\4­å\22è‚$Ô\31Ç0úv®Xí\22Û\9\13ûd'\24(Ø°ª\0072;XTŸï§¯·\2\13_WÍfŽÔ1Jˆ€œñ\21ǽըœÎ>my\9Ì°–óYB15âü‡¹,›‘!ãäo\28oMŒWàrð\17Ñlˆ\17å¶ü%ž¨œ\22ÓΓS\14ÏX¦ƒë©6~Óÿ^\0Œ½'6M-k\4ê%£ëµr\9(B½qÞX‰^ˆ#ÊV¾ÎW„\22\"câ]\20dg\19XÖ¶qU\0135&åvã»Ã\20\9Ö\4ƒã!?0ZãYöþÌKÇp( y4Pñ\26‡Ò¬Q\0246¶à‚XêŒÛ@ÉkFɃKd£·q&ùôýê§Ã›Ã»C#[û¹!ÌÚ†Å#Å\1Ž3\11µj\28íYfù`+ÎÿÔ\0û…nö\7Í\21àÞ5±=è*9’ÙãjœÏ5àì8Íaª\11³Ž\6§®«ªÆ¹¦2Ò~A@Ê)P¿·½~ùƒà0\16†Šmk|H൉\127jÍÓ[°/\13U×Ç f\6ÂF;¬m÷ç¿?v\31\127þ‚ç~÷´Êb\21í`cŸ—1¼\7Ö4\11\25\20¢ÿç„úÿéÍó§ñÄjïˆ\6`‰ˆ>oG†¢IJ‘1\28y‘•–ÊÅÌN\16ëWˆ\26@nËH\27ÍAQý‰SMÝ\21Ïj¢óY_˜#§»\6ÞsSç\15OoÏ‹„“žG\18;ø}\9:Q\0\3Öy£i£Üª\4\\¶\"'F>\25>Nzöê þpU€§\13½Äg¸+~ÚÿÛ\19ßn\23\8Kð³ÿç]|ÚÓ>\ +\20ò\25<ûÉ\14ÔŸ€\2p«ÞËêS\28¦>-Rå\1Ñx¡Ên¤q=D謙\22mÛ­d6…ËyBF<<{\6Ý8ûÆùÚ9\28Ø´ýÇÎÈwŸAFþ8áÂÊÔõ\27_ ÒÏÞô6µ \25Š\14ÓskæTe®\29\20*“—\8úËqkŒqŸ½mÚ pB¾¨üȘ.\17\23ú‰Ãâ,ÒK\\€\6¸ÓÝ+.´\24ò\4žÀOž•?øà\9éÜm\29У”ö\0201n/ä›FÔð¸¬Èzª_Kõý\7Ãa?‡>@WŸƒ˜E…จt9Â7BDG\8ÅQú‚ÄÆ:éÊŸ\7‡8ia¼jNÔ„.¢ÙN!nÒ\28±%ÑÓHXÏÉsàÓ€\5Ï\9+}UÏN\19ESøX\127êÃa‰LÛG”\0”ù¢(Û>\ +Ù¨z\26¶ý\2=Ý\25kÊæ+\5¯ÙCV‘ø.\20ñ\17àÓ\0à8ú}bG;Q8\1šL™Ä\27\16\13÷·¢\1dûLílœ*Eà\16ž Üh-xv°eO<œÛǦÍÓÐ\3\11ôôý[7Êk`]\16\25בKV\1275yõ‡¢ìh¤ýÃ<ã<ø¹Þ°‚Ru*»ûxíö¯÷]ç#Þ\23\14VQO|IL\18oñb%zm~R_ž3úÝË\7n#Ô*ê¢Á\ +)¢{¿.>9ÅŒƒ\ +{C{Hx€\24A¡ó!ñ\11×é_ƒÿd0:ÌË¿ñn\"+mEr¤Wšr,¾2ÞÖã÷½\25 —­ffÓBÙþlT*§Ç\20t\ +\")ê\21Çt/»ZoÜíNÅw.¸küܨñ~˜œ™œi¡˜¤[dQ\5Ó;ׂ§ÚÎgè/ï®sÅ\24\6\28/Ÿ©‰-|ñ–³o31û¿|„æ&+‚dùê߀K$\0269Ù\4œÓø\3U>NÉ*‰e4\1\28\13¼Oû<{ù ëºÑy£9¨¾® ?i—ç¨öeÿ)\23×SÆvüã\0204Eñe»Œ‘\0240-ÛõG^Þæ†ýÅ|›\30¦wß8{9ƒÂ ª—óèÔèfŠrpæˆë\0032\2ÂîñÅõŽÑA96Ã#’¶:\8\"²\21Ió\26\25Û=G¿‡lg\26¯pþþòt\127‰¤ÇR>D¡ekÉ\0187´)–iÚÒé'í\7‹“Ë—O§&Þ‰¾‚¡ \25µ\127ù•ÅÉ¥\127\12N\30øÛvìʆ9#Å‘ãËÞß\1à|¡Ø«€\3HÚÌ\6XÚhõ\31¯U¼4Š\24Y\3îFÛ\6{ˆU-ƒQ:EâÃÆ6†.\29ØS}á/\17£Ã×0\25î\0Ê\17†¨\7€í\30\3l\0\24k\ +Ê8ä\15–>EØBpšmK\ +ÀŽ\31\21h\4­QÎ\13&£º‘³\15õLyÕƒýP¼;bg^¯¡\30}K$¼ñÓiM¾/\4àä”^æþ¥çúÖ'‹â¢\21N^@J(Ô:¡GOø\"z&\"^\11^±~¼Œ¨¯v'Í5\26žF-Öµ{eþ¤pðz³\127©.\28qg“\7!z6û—FÎË+¬\30j5‚º\8Ó¼\30~åüK\23¢Ì\".ä\21J,‡\23SæŠO-½±—Ñá\8\23Bdû×Ê2«\15ù¯0YýëÇdõ¯Ëî¯þc÷ס\127(’Ó_gr÷_úgØýµ²ñó¾[Ují\16Â×O|1M»u\127Ý­˜%®˜%®W>O‘{/Y\28zÂ^aÅZqµ[åÕÛa„I¡½¥®Öv©¢`²\14\17ñÄ·~è×a}³öyOpþb–$bÃûÕj$+z\14Dž 5;¯˜]w‡•vÜ\14\23\30à¦a¿ÞŽ\12»\21A«Ý\2­7›¯F9\"Ô\5\31÷Ý\ +D‘ÃÁáªÐ>í×»ýº\21§5Ô•>ºdl’©,›d&ª)6EÖ1Dúus}³îõ¥¬v›¹Ž\8jëh\31i&ú›ù¦£´¾5\21Ë.†]°©n2þ´Á\14nV»Ää¯FÕ’±V¶IV6>Ö\14sùu²³hÕ¶Ÿy™3¨Ì«]c­\8\8\26[ëüqWgÏÖôhꢴª°+KåEŸÚÖúât<\12n*V’3ÏÆÏw)šo¼­uÞÄèÕ>HÖù –&s+ë^\127ã°aÅ\25Ê_\5ëd©ŠÅ\29´XÌ[Ñw_ñ_°~(â\19ŠÕ~\"WËÄ×Ò¹ô\16ëŒÊaå¤úl»Ä?~í`Ư\22ŠUC\26\29¶m¿{.©äÔ‹k\4\26~µ\14ëD\26¦nµ2¢ËpÕ\23‡`»J°]\17lW\11´+rìj§ì+²ëŠNèŠ\0»\"›®È¦ë\19ì6}é\31¿wûßbœ~\127¶¼ûÖÄmE´ÞN\26]¿‡ý/\5Ç_ðn¿Qèø»\17\14\1\127ëÊ·Ý›\1ÉÞ\2ìíëòvF3àmdü¾Y‡åµ{ó)Æ[\21û°{;²Wÿÿ\0Ë-‰a", + ["length"]=76601, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=10349, diff --git a/tex/context/patterns/lang-agr.lua b/tex/context/patterns/lang-agr.lua index 20560268e..24bc74a45 100644 --- a/tex/context/patterns/lang-agr.lua +++ b/tex/context/patterns/lang-agr.lua @@ -54,7 +54,61 @@ return { }, ["patterns"]={ ["characters"]="'ʼÎάέήίΰαβγδεζηθικλμνξοπÏςστυφχψωϊϋόÏώϲἀá¼á¼‚ἃἄἅἆἇá¼á¼‘ἒἓἔἕἠἡἢἣἤἥἦἧἰἱἲἳἴἵἶἷὀá½á½‚ὃὄὅá½á½‘ὒὓὔὕὖὗὠὡὢὣὤὥὦὧὰάὲέὴήὶίὸόὺύὼώᾀá¾á¾‚ᾃᾄᾅᾆᾇá¾á¾‘ᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾲᾳᾴᾶᾷ᾽᾿ῂῃῄῆῇῒΐῖῗῢΰῤῥῦῧῲῳῴῶῷ’", - ["data"]="α1 ε1 η1 ι1 ο1 Ï…1 ω1 ÏŠ1 Ï‹1 á¼€1 á¼1 ἂ1 ἃ1 ἄ1 á¼…1 ἆ1 ἇ1 á¼1 ἑ1 á¼’1 ἓ1 á¼”1 ἕ1 á¼ 1 ἡ1 á¼¢1 á¼£1 ἤ1 á¼¥1 ἦ1 ἧ1 á¼°1 á¼±1 á¼²1 á¼³1 á¼´1 á¼µ1 ἶ1 á¼·1 á½€1 á½1 ὂ1 ὃ1 ὄ1 á½…1 á½1 ὑ1 á½’1 ὓ1 á½”1 ὕ1 á½–1 á½—1 á½ 1 ὡ1 á½¢1 á½£1 ὤ1 á½¥1 ὦ1 ὧ1 á½°1 á½²1 á½´1 ὶ1 ὸ1 ὺ1 á½¼1 á¾€1 á¾1 ᾂ1 ᾃ1 ᾄ1 á¾…1 ᾆ1 ᾇ1 á¾1 ᾑ1 á¾’1 ᾓ1 á¾”1 ᾕ1 á¾–1 á¾—1 á¾ 1 ᾡ1 á¾¢1 á¾£1 ᾤ1 á¾¥1 ᾦ1 ᾧ1 á¾²1 á¾³1 á¾´1 ᾶ1 á¾·1 á¿‚1 ῃ1 á¿„1 ῆ1 ῇ1 á¿’1 á¿–1 á¿—1 á¿¢1 ῦ1 ῧ1 ῲ1 ῳ1 á¿´1 ῶ1 á¿·1 ά1 έ1 ή1 ί1 ÏŒ1 Ï1 ÏŽ1 Î1 ΰ1 á½±1 á½³1 á½µ1 á½·1 á½¹1 á½»1 á½½1 á¿“1 á¿£1 α2ι α2ί α2á½· α2ὶ α2á¿– α2á¼° α2á¼´ α2á¼² α2ἶ α2á¼± α2á¼µ α2á¼³ α2á¼· ά3ι á½±3ι ᾶ3ι á¼€3ι á¼3ι α2Ï… α2Ï Î±2á½» α2ὺ α2ῦ α2ὠα2á½” α2á½’ α2á½– α2ὑ α2ὕ α2ὓ α2á½— ά3Ï… á½±3Ï… ᾶ3Ï… á¼€3Ï… á¼3Ï… ε2ι ε2ί ε2á½· ε2ὶ ε2á¿– ε2á¼° ε2á¼´ ε2á¼² ε2ἶ ε2á¼± ε2á¼µ ε2á¼³ ε2á¼· έ3ι á½³3ι á¼3ι ἑ3ι ε2Ï… ε2Ï Îµ2á½» ε2ὺ ε2ῦ ε2ὠε2á½” ε2á½’ ε2á½– ε2ὑ ε2ὕ ε2ὓ ε2á½— έ3Ï… á½³3Ï… ἑ3Ï… á¼3Ï… η2Ï… η2Ï Î·2á½» η2ὺ η2ῦ η2ὠη2á½” η2á½’ η2á½– η2ὑ η2ὕ η2ὓ η2á½— ή3Ï… á½µ3Ï… ῆ3Ï… á¼ 3Ï… ἡ3Ï… ο2ι ο2ί ο2á½· ο2ὶ ο2á¿– ο2á¼° ο2á¼´ ο2á¼² ο2ἶ ο2á¼± ο2á¼µ ο2á¼³ ο2á¼· ÏŒ3ι á½¹3ι á½€3ι á½3ι ο2Ï… ο2Ï Î¿2á½» ο2ὺ ο2ῦ ο2ὠο2á½” ο2á½’ ο2á½– ο2ὑ ο2ὕ ο2ὓ ο2á½— ÏŒ3Ï… á½¹3Ï… á½€3Ï… á½3Ï… Ï…2ι Ï…2ί Ï…2á½· Ï…2ὶ Ï…2á¿– Ï…2á¼° Ï…2á¼´ Ï…2á¼² Ï…2ἶ Ï…2á¼± Ï…2á¼µ Ï…2á¼³ Ï…2á¼· Ï3ι á½»3ι ῦ3ι á½3ι ὑ3ι ου3ι όυ4ι όυ4ι ὀυ4ι á½Ï…4ι ο3υί ο3Ï…á½· ο3Ï…á¿– 4β. 4γ. 4δ. 4ζ. 4θ. 4κ. 4λ. 4μ. 4ν. 4ξ. 4Ï€. 4Ï. 4σ. 4ϲ. 4Ï‚. 4Ï„. 4φ. 4χ. 4ψ. 4' 4ʼ 4᾿ 4β' 4βʼ 4β᾿ 4γ' 4γʼ 4γ᾿ 4δ' 4δʼ 4δ᾿ 4ζ' 4ζʼ 4ζ᾿ 4θ' 4θʼ 4θ᾿ 4κ' 4κʼ 4κ᾿ 4λ' 4λʼ 4λ᾿ 4μ' 4μʼ 4μ᾿ 4ν' 4νʼ 4ν᾿ 4ξ' 4ξʼ 4ξ᾿ 4Ï€' 4πʼ 4π᾿ 4Ï' 4Ïʼ 4Ï᾿ 4σ' 4σʼ 4σ᾿ 4ϲ' 4ϲʼ 4ϲ᾿ 4Ï„' 4τʼ 4τ᾿ 4φ' 4φʼ 4φ᾿ 4χ' 4χʼ 4χ᾿ 4ψ' 4ψʼ 4ψ᾿ .β4 .γ4 .δ4 .ζ4 .θ4 .κ4 .λ4 .μ4 .ν4 .ξ4 .Ï€4 .Ï4 .σ4 .ϲ4 .Ï„4 .φ4 .χ4 .ψ4 2β1β 2γ1γ 2δ1δ 2ζ1ζ 2θ1θ 2κ1κ 2λ1λ 2μ1μ 2ν1ν 2Ï€1Ï€ 2Ï1Ï 2ῤ1á¿¥ 2σ1σ 2ϲ1ϲ 2Ï„1Ï„ 2φ1φ 2χ1χ 2ψ1ψ 2β1γ 2β1ζ 2β1θ 2β1κ 2β1ξ 2β1Ï€ 2β1σ 2β1ϲ 2β1Ï„ 2β1φ 2β1χ 2β1ψ 2γ1β 2γ1ζ 2γ1θ 2γ1κ 2γ1ξ 2γ1Ï€ 2γ1σ 2γ1ϲ 2γ1Ï„ 2γ1φ 2γ1χ 2γ1ψ 2δ1β 2δ1γ 2δ1ζ 2δ1θ 2δ1κ 2δ1λ 2δ1ξ 2δ1Ï€ 2δ1σ 2δ1ϲ 2δ1Ï„ 2δ1φ 2δ1χ 2δ1ψ 2ζ1β 2ζ1γ 2ζ1δ 2ζ1θ 2ζ1κ 2ζ1λ 2ζ1μ 2ζ1ν 2ζ1ξ 2ζ1Ï€ 2ζ1Ï 2ζ1σ 2ζ1ϲ 2ζ1Ï„ 2ζ1φ 2ζ1χ 2ζ1ψ 2θ1β 2θ1γ 2θ1δ 2θ1ζ 2θ1κ 2θ1ξ 2θ1Ï€ 2θ1σ 2θ1ϲ 2θ1Ï„ 2θ1φ 2θ1χ 2θ1ψ 2κ1β 2κ1γ 2κ1δ 2κ1ζ 2κ1θ 2κ1ξ 2κ1Ï€ 2κ1σ 2κ1ϲ 2κ1φ 2κ1χ 2κ1ψ 2λ1β 2λ1γ 2λ1δ 2λ1ζ 2λ1θ 2λ1κ 2λ1μ 2λ1ν 2λ1ξ 2λ1Ï€ 2λ1Ï 2λ1σ 2λ1ϲ 2λ1Ï„ 2λ1φ 2λ1χ 2λ1ψ 2μ1β 2μ1γ 2μ1δ 2μ1ζ 2μ1θ 2μ1κ 2μ1λ 2μ1ξ 2μ1Ï€ 2μ1Ï 2μ1σ 2μ1ϲ 2μ1Ï„ 2μ1φ 2μ1χ 2μ1ψ 2ν1β 2ν1γ 2ν1δ 2ν1ζ 2ν1θ 2ν1κ 2ν1λ 2ν1μ 2ν1ξ 2ν1Ï€ 2ν1Ï 2ν1σ 2ν1ϲ 2νς. 2νϲ. 2ν1Ï„ 2ν1φ 2ν1χ 2ν1ψ 2ξ1β 2ξ1γ 2ξ1δ 2ξ1ζ 2ξ1θ 2ξ1κ 2ξ1λ 2ξ1μ 2ξ1ν 2ξ1Ï€ 2ξ1Ï 2ξ1σ 2ξ1ϲ 2ξ1Ï„ 2ξ1φ 2ξ1χ 2ξ1ψ 2Ï€1β 2Ï€1γ 2Ï€1δ 2Ï€1ζ 2Ï€1θ 2Ï€1κ 2Ï€1ξ 2Ï€1σ 2Ï€1ϲ 2Ï€1φ 2Ï€1χ 2Ï€1ψ 2Ï1β 2Ï1γ 2Ï1δ 2Ï1ζ 2Ï1θ 2Ï1κ 2Ï1λ 2Ï1μ 2Ï1ν 2Ï1ξ 2Ï1Ï€ 2Ï1σ 2Ï1ϲ 2Ï1Ï„ 2Ï1φ 2Ï1χ 2Ï1ψ 2σ1δ 2ϲ1δ 2σ1ζ 2ϲ1ζ 2σ1λ 2ϲ1λ 2σ1ν 2ϲ1ν 2σ1ξ 2ϲ1ξ 2σ1Ï 2ϲ1Ï 2σ1ψ 2ϲ1ψ 2Ï„1β 2Ï„1γ 2Ï„1δ 2Ï„1ζ 2Ï„1θ 2Ï„1κ 2Ï„1ξ 2Ï„1Ï€ 2Ï„1σ 2Ï„1ϲ 2Ï„1φ 2Ï„1χ 2Ï„1ψ 2φ1β 2φ1γ 2φ1δ 2φ1ζ 2φ1κ 2φ1ξ 2φ1Ï€ 2φ1σ 2φ1ϲ 2φ1Ï„ 2φ1χ 2φ1ψ 2χ1β 2χ1γ 2χ1δ 2χ1ζ 2χ1κ 2χ1ξ 2χ1Ï€ 2χ1σ 2χ1ϲ 2χ1Ï„ 2χ1φ 2χ1ψ 2ψ1β 2ψ1γ 2ψ1δ 2ψ1ζ 2ψ1θ 2ψ1κ 2ψ1λ 2ψ1μ 2ψ1ν 2ψ1ξ 2ψ1Ï€ 2ψ1Ï 2ψ1σ 2ψ1ϲ 2ψ1Ï„ 2ψ1φ 2ψ1χ 4βδ' 4βδ’ 4βδʼ 4βδ᾽ 4βδ᾿ 4βλ' 4βλ’ 4βλʼ 4βλ᾽ 4βλ᾿ 4βμ' 4βμ’ 4βμʼ 4βμ᾽ 4βμ᾿ 4βν' 4βν’ 4βνʼ 4βν᾽ 4βν᾿ 4βÏ' 4βÏ’ 4βÏʼ 4βÏá¾½ 4βÏ᾿ 4γδ' 4γδ’ 4γδʼ 4γδ᾽ 4γδ᾿ 4γλ' 4γλ’ 4γλʼ 4γλ᾽ 4γλ᾿ 4γμ' 4γμ’ 4γμʼ 4γμ᾽ 4γμ᾿ 4γν' 4γν’ 4γνʼ 4γν᾽ 4γν᾿ 4γÏ' 4γÏ’ 4γÏʼ 4γÏá¾½ 4γÏ᾿ 4δμ' 4δμ’ 4δμʼ 4δμ᾽ 4δμ᾿ 4δν' 4δν’ 4δνʼ 4δν᾽ 4δν᾿ 4δÏ' 4δÏ’ 4δÏʼ 4δÏá¾½ 4δÏ᾿ 4ζβ' 4ζβ’ 4ζβʼ 4ζβ᾽ 4ζβ᾿ 4θλ' 4θλ’ 4θλʼ 4θλ᾽ 4θλ᾿ 4λμ' 4λμ’ 4λμʼ 4λμ᾽ 4λμ᾿ 4θν' 4θν’ 4θνʼ 4θν᾽ 4θν᾿ 4θÏ' 4θÏ’ 4θÏʼ 4θÏá¾½ 4θÏ᾿ 4κλ' 4κλ’ 4κλʼ 4κλ᾽ 4κλ᾿ 4κμ' 4κμ’ 4κμʼ 4κμ᾽ 4κμ᾿ 4κν' 4κν’ 4κνʼ 4κν᾽ 4κν᾿ 4κÏ' 4κÏ’ 4κÏʼ 4κÏá¾½ 4κÏ᾿ 4κτ' 4κτ’ 4κτʼ 4κτ᾽ 4κτ᾿ 4μν' 4μν’ 4μνʼ 4μν᾽ 4μν᾿ 4πλ' 4πλ’ 4πλʼ 4πλ᾽ 4πλ᾿ 4πμ' 4πμ’ 4πμʼ 4πμ᾽ 4πμ᾿ 4πν' 4πν’ 4πνʼ 4πν᾽ 4πν᾿ 4Ï€Ï' 4πϒ 4Ï€Ïʼ 4Ï€Ïá¾½ 4Ï€Ï᾿ 4πτ' 4πτ’ 4πτʼ 4πτ᾽ 4πτ᾿ 4σβ' 4σβ’ 4σβʼ 4σβ᾽ 4σβ᾿ 4ϲβ' 4ϲβ’ 4ϲβʼ 4ϲβ᾽ 4ϲβ᾿ 4σγ' 4σγ’ 4σγʼ 4σγ᾽ 4σγ᾿ 4ϲγ' 4ϲγ’ 4ϲγʼ 4ϲγ᾽ 4ϲγ᾿ 4σδ' 4σδ’ 4σδʼ 4σδ᾽ 4σδ᾿ 4ϲδ' 4ϲδ’ 4ϲδʼ 4ϲδ᾽ 4ϲδ᾿ 4σθ' 4σθ’ 4σθʼ 4σθ᾽ 4σθ᾿ 4ϲθ' 4ϲθ’ 4ϲθʼ 4ϲθ᾽ 4ϲθ᾿ 4σκ' 4σκ’ 4σκʼ 4σκ᾽ 4σκ᾿ 4ϲκ' 4ϲκ’ 4ϲκʼ 4ϲκ᾽ 4ϲκ᾿ 4σμ' 4σμ’ 4σμʼ 4σμ᾽ 4σμ᾿ 4ϲμ' 4ϲμ’ 4ϲμʼ 4ϲμ᾽ 4ϲμ᾿ 4σπ' 4σπ’ 4σπʼ 4σπ᾽ 4σπ᾿ 4ϲπ' 4ϲπ’ 4ϲπʼ 4ϲπ᾽ 4ϲπ᾿ 4στ' 4στ’ 4στʼ 4στ᾽ 4στ᾿ 4ϲτ' 4ϲτ’ 4ϲτʼ 4ϲτ᾽ 4ϲτ᾿ 4σφ' 4σφ’ 4σφʼ 4σφ᾽ 4σφ᾿ 4ϲφ' 4ϲφ’ 4ϲφʼ 4ϲφ᾽ 4ϲφ᾿ 4σχ' 4σχ’ 4σχʼ 4σχ᾽ 4σχ᾿ 4ϲχ' 4ϲχ’ 4ϲχʼ 4ϲχ᾽ 4ϲχ᾿ 4φθ' 4φθ’ 4φθʼ 4φθ᾽ 4φθ᾿ 4φλ' 4φλ’ 4φλʼ 4φλ᾽ 4φλ᾿ 4φμ' 4φμ’ 4φμʼ 4φμ᾽ 4φμ᾿ 4φν' 4φν’ 4φνʼ 4φν᾽ 4φν᾿ 4φÏ' 4φÏ’ 4φÏʼ 4φÏá¾½ 4φÏ᾿ 4χθ' 4χθ’ 4χθʼ 4χθ᾽ 4χθ᾿ 4χλ' 4χλ’ 4χλʼ 4χλ᾽ 4χλ᾿ 4χμ' 4χμ’ 4χμʼ 4χμ᾽ 4χμ᾿ 4χν' 4χν’ 4χνʼ 4χν᾽ 4χν᾿ 4χÏ' 4χÏ’ 4χÏʼ 4χÏá¾½ 4χÏ᾿ ἀγω2ν1Î¬Ï á¼€Î³Ï‰2ν1á½±Ï á¼€Î³Ï‰2ν1Î±Ï á¼€Î´Î¹Î­2ξ1 ἀδιέ2ξ1 ἀδιε2ξ1 ἀδυ2σ1ÏŽ ἀδυ2σ1á½½ ἀδυ2ϲ1ÏŽ ἀδυ2ϲ1á½½ ἀδυ2σ1ω ἀδυ2ϲ1ω á¼Î»ÏŒ2σ1 á¼Î»á½¹2σ1 á¼Î»ÏŒ2ϲ1 á¼Î»á½¹2ϲ1 á¼Î»Î¿2σ1 á¼Î»Î¿2ϲ1 ἀμπαλί2ν1 ἀμπαλί2ν1 ἀμπαλι2ν1 ἀμφί2σ1β ἀμφί2σ1β ἀμφί2ϲ1β ἀμφί2ϲ1β ἀμφι2σ1β ἀμφι2ϲ1β ἀμφί2σ1ω ἀμφί2σ1ω ἀμφί2ϲ1ω ἀμφί2ϲ1ω ἀμφι2σ1ÏŽ ἀμφι2σ1á½½ ἀμφι2ϲ1ÏŽ ἀμφι2ϲ1á½½ á¼€2ν1αγής. á¼€2ν1αγής. á¼€2ν1αγήϲ. á¼€2ν1αγήϲ. á¼€2ν1αγὴς. á¼€2ν1αγὴϲ. á¼€2ν1αγήσ. á¼€2ν1αγήσ. á¼€2ν1αγὴσ. á¼€2ν1αγο á¼€2ν1αγεῖ. á¼€2ν1αγῆ. á¼€2ν1αγές. á¼€2ν1αγές. á¼€2ν1αγέϲ. á¼€2ν1αγέϲ. á¼€2ν1αγὲς. á¼€2ν1αγὲϲ. á¼€2ν1αγέσ. á¼€2ν1αγέσ. á¼€2ν1αγὲσ. á¼€2ν1αγεῖς. á¼€2ν1αγεῖϲ. á¼€2ν1αγεῖσ. á¼€2ν1αγῶν. á¼€2ν1αγέσι á¼€2ν1αγέσι á¼€2ν1αγέϲι á¼€2ν1αγέϲι á¼€2ν1αγῆ á¼€2ν1άγκυ á¼€2ν1άγκυ á¼€2ν1Î±Î³ÎºÏ á¼€2ν1αγκύ ἄ2ν1αγν á¼€2ν1άγν á¼€2ν1άγν á¼€2ν1αγν á¼€3ν2αγνά á¼€3ν2αγνά á¼€3ν2αγνω á¼€3ν2άγνω á¼€3ν2άγνω á¼€3ν2αγνώ á¼€3ν2αγνώ á¼€2ν1αγÏί á¼€2ν1αγÏá½· á¼€2ν1αγÏá¿– á¼€2ν1αγÏι á¼€2ν1άγωγ á¼€2ν1άγωγ á¼€2ν1αγώγ á¼€2ν1αγώγ á¼€3ν2αγώγι á¼€3ν2αγώγι á¼€3ν2αγωγί á¼€3ν2αγωγί á¼€4ν3αγωγία á¼€4ν3αγωγία á¼€2ν1άδελ á¼€2ν1άδελ á¼€2ν1αδέλ á¼€2ν1αδέλ á¼€2ν1άελπ á¼€2ν1άελπ á¼€2ν1αέλπ á¼€2ν1αέλπ ἄ2ν1αθλ á¼€2ν1άθλ á¼€2ν1άθλ á¼€2ν1αίδ á¼€2ν1αίδ á¼€2ν1αιδ ἄ2ν1αιμ á¼€2ν1αίμ á¼€2ν1αίμ á¼€2ν1αιμ á¼€2ν1αίσθ á¼€2ν1αίσθ á¼€2ν1αίϲθ á¼€2ν1αίϲθ á¼€2ν1αισθ á¼€2ν1αιϲθ á¼€2ν1αισι á¼€2ν1αιϲι á¼€2ν1αισί á¼€2ν1αισί á¼€2ν1αιϲί á¼€2ν1αιϲί á¼€2ν1αίσχ á¼€2ν1αίσχ á¼€2ν1αίϲχ á¼€2ν1αίϲχ á¼€2ν1αισχ á¼€2ν1αιϲχ á¼€2ν1αίτ á¼€2ν1αίτ á¼€2ν1αιτ á¼€2ν1άκαν á¼€2ν1άκαν á¼€2ν1ακάν á¼€2ν1ακάν á¼€2ν1ακόλο á¼€2ν1ακόλο á¼€2ν1ακολο á¼€2ν1αλγ á¼€2ν1αλδ á¼€3ν2αλδα á¼€3ν2αλδήσκ á¼€3ν2αλδήσκ á¼€3ν2αλδήϲκ á¼€3ν2αλδήϲκ á¼€2ν1άλειπ á¼€2ν1άλειπ á¼€2ν1αλείπ á¼€2ν1αλείπ á¼€2ν1αλειφ á¼€2ν1άλειφ á¼€2ν1άλειφ á¼€2ν1αλείφ á¼€2ν1αλείφ á¼€2ν1αλήθ á¼€2ν1αλήθ á¼€2ν1αληθ á¼€2ν1άλθ á¼€2ν1άλθ á¼€2ν1αλθ á¼€2ν1άλιπ á¼€2ν1άλιπ á¼€2ν1αλίπ á¼€2ν1αλίπ á¼€2ν1άλιστ á¼€2ν1άλιστ á¼€2ν1άλιϲτ á¼€2ν1άλιϲτ á¼€2ν1αλίστ á¼€2ν1αλίστ á¼€2ν1αλίϲτ á¼€2ν1αλίϲτ á¼€2ν1αλκ ἄ2ν1αλκ á¼€2ν1άλκ á¼€2ν1άλκ á¼€2ν1άλλ á¼€2ν1άλλ á¼€2ν1αλλ á¼€3ν2άλλο á¼€3ν2άλλο á¼€3ν2άλλε á¼€3ν2άλλε ἄ2ν1αλμ á¼€2ν1άλμ á¼€2ν1άλμ á¼€2ν1αλμ ἄ2ν1αλο á¼€2ν1άλου á¼€2ν1άλου á¼€2ν1άλῳ. á¼€2ν1άλῳ. ἄ2ν1αλε. á¼€2ν1άλοι á¼€2ν1άλοι á¼€2ν1άλων. á¼€2ν1άλων. ἄ2ν1αλτ á¼€2ν1άλτ á¼€2ν1άλτ á¼€2ν1αμάξ á¼€2ν1αμάξ á¼€2ν1αμαξ á¼€2ν1αμάÏÏ„ á¼€2ν1αμάÏÏ„ á¼€2ν1αμαÏÏ„ á¼€2ν1αμέλγ á¼€2ν1αμέλγ á¼€2ν1αμελγ á¼€2ν1αμπ á¼€2ν1άμπ á¼€2ν1άμπ á¼€2ν1αμφ ἀναμφι2σ1 ἀναμφι2ϲ1 á¼€2ν1ανάγκ á¼€2ν1ανάγκ á¼€2ν1αναγκ ἄ2ν1ανδ á¼€2ν1άνδ á¼€2ν1άνδ á¼€2ν1ανθ á¼€3ν2ανθέ á¼€3ν2ανθέ á¼€4ν3ανθές. á¼€4ν3ανθές. á¼€4ν3ανθέϲ. á¼€4ν3ανθέϲ. á¼€4ν3ανθὲς. á¼€4ν3ανθὲϲ. á¼€4ν3ανθέσ. á¼€4ν3ανθέσ. á¼€4ν3ανθὲσ. á¼€4ν3ανθέσι á¼€4ν3ανθέσι á¼€4ν3ανθέϲι á¼€4ν3ανθέϲι á¼€2ν1άνιο á¼€2ν1άνιο á¼€2ν1ανίο á¼€2ν1ανίο á¼€2ν1ανίω á¼€2ν1ανίω á¼€2ν1ανταγ á¼€2ν1ανταπ á¼€2ν1αντί á¼€2ν1αντί á¼€2ν1αντι ἀνα2ξ1αγ ἀνά2ξ1αν ἀνά2ξ1αν ἀνα2ξ1άν ἀνα2ξ1άν ἀνα2ξ1αν ἀνά2ξ1Î±Ï á¼€Î½á½±2ξ1Î±Ï á¼€Î½Î±2ξ1Î¬Ï á¼€Î½Î±2ξ1á½±Ï á¼€Î½Î¬2ξ1ιπ ἀνά2ξ1ιπ ἀνα2ξ1ίπ ἀνα2ξ1á½·Ï€ á¼€2ν1αξιόλ á¼€2ν1αξιόλ á¼€2ν1αξιολ á¼€2ν1αξιόπ á¼€2ν1αξιόπ á¼€2ν1αξιοπ á¼€2ν1άξιο á¼€2ν1άξιο á¼€2ν1αξίο á¼€2ν1αξίο á¼€2ν1αξίω á¼€2ν1αξίω á¼€2ν1αξία á¼€2ν1αξία á¼€2ν1αξῖα á¼€2ν1απάλλα á¼€2ν1απάλλα á¼€2ν1απαλλά á¼€2ν1απαλλά á¼€2ν1απάÏÏ„ á¼€2ν1απάÏÏ„ á¼€2ν1απαÏÏ„ á¼€2ν1απαÏδ á¼€2ν1απαύδ á¼€2ν1απαυδ á¼€2ν1απόβ á¼€2ν1απόβ á¼€2ν1αποβ á¼€2ν1απόγ á¼€2ν1απόγ á¼€2ν1απογ á¼€2ν1αποδή á¼€2ν1αποδή á¼€2ν1αποδη á¼€2ν1απόδο á¼€2ν1απόδο á¼€2ν1αποδό á¼€2ν1αποδό á¼€2ν1Î±Ï€ÏŒÎ´Ï á¼€2ν1Î±Ï€á½¹Î´Ï á¼€2ν1Î±Ï€Î¿Î´Ï á¼€2ν1απόλαυ á¼€2ν1απόλαυ á¼€2ν1Î±Ï€Î¿Î»Î±Ï á¼€2ν1απολαύ á¼€2ν1απολό á¼€2ν1απολό á¼€2ν1απολο á¼€2ν1απόλυ á¼€2ν1απόλυ á¼€2ν1Î±Ï€Î¿Î»Ï á¼€2ν1απολύ á¼€2ν1απόν á¼€2ν1απόν á¼€2ν1απον á¼€2ν1απόπ á¼€2ν1απόπ á¼€2ν1αποπ á¼€2ν1απόσ á¼€2ν1απόσ á¼€2ν1απόϲ á¼€2ν1απόϲ á¼€2ν1αποσ á¼€2ν1αποϲ á¼€2ν1απότε á¼€2ν1απότε á¼€2ν1αποτε á¼€2ν1απότμ á¼€2ν1απότμ á¼€2ν1αποτμ á¼€2ν1Î±Ï€ÏŒÏ„Ï á¼€2ν1Î±Ï€á½¹Ï„Ï á¼€2ν1Î±Ï€Î¿Ï„Ï á¼€2ν1αÏά á¼€2ν1αÏá½± á¼€2ν1αÏα á¼€2ν1Î¬Ï á¼€2ν1á½±Ï á¼€2ν1Î±Ï á¼„2ν1Î±Ï á¼€3ν2αÏίτ á¼€3ν2αÏá½·Ï„ á¼€3ν2αÏá¿–Ï„ á¼€3ν2αÏιτ á¼€3ν2αÏÏ€ á¼€3ν2άÏÏ á¼€3ν2á½±ÏÏ á¼€3ν2αÏÏ á¼€4ν3αÏÏαγ á¼€3ν2αÏÏ„ á¼€3ν2αÏÏÏ„ á¼€3ν2αÏύτ á¼€2ν1άσκη á¼€2ν1άσκη á¼€2ν1άϲκη á¼€2ν1άϲκη á¼€2ν1ασκή á¼€2ν1ασκή á¼€2ν1αϲκή á¼€2ν1αϲκή ἄ2ν1ασπι ἄ2ν1αϲπι á¼€2ν1ασπί á¼€2ν1ασπί á¼€2ν1αϲπί á¼€2ν1αϲπί á¼€2ν1άσσατ á¼€2ν1άσσατ á¼€2ν1άϲϲατ á¼€2ν1άϲϲατ á¼€2ν1ασσάτ á¼€2ν1ασσάτ á¼€2ν1αϲϲάτ á¼€2ν1αϲϲάτ á¼€2ν1άστει á¼€2ν1άστει á¼€2ν1άϲτει á¼€2ν1άϲτει á¼€2ν1αστεί á¼€2ν1αστεί á¼€2ν1αϲτεί á¼€2ν1αϲτεί á¼€3ν2αστείβ á¼€3ν2αστείβ á¼€3ν2αϲτείβ á¼€3ν2αϲτείβ á¼€3ν2Î¬ÏƒÏ„ÎµÎ¹Ï á¼€3ν2á½±ÏƒÏ„ÎµÎ¹Ï á¼€3ν2Î¬Ï²Ï„ÎµÎ¹Ï á¼€3ν2á½±Ï²Ï„ÎµÎ¹Ï á¼€3ν2Î±ÏƒÏ„ÎµÎ¯Ï á¼€3ν2Î±ÏƒÏ„Îµá½·Ï á¼€3ν2Î±Ï²Ï„ÎµÎ¯Ï á¼€3ν2Î±Ï²Ï„Îµá½·Ï á¼€3ν2άστειχ á¼€3ν2άστειχ á¼€3ν2άϲτειχ á¼€3ν2άϲτειχ á¼€3ν2αστείχ á¼€3ν2αστείχ á¼€3ν2αϲτείχ á¼€3ν2αϲτείχ á¼€2ν1ατεὶ. á¼€2ν1ατεί. á¼€2ν1ατεί. á¼€2ν1ατὶ. á¼€2ν1ατί. á¼€2ν1ατί. ἄ2ν1ατος. ἄ2ν1ατοϲ. ἄ2ν1ατοσ. á¼€2ν1άτου. á¼€2ν1άτου. á¼€2ν1άτω á¼€2ν1άτω ἄ2ν1ατον. ἄ2ν1ατε ἄ2ν1ατοι. á¼€2ν1άτοις. á¼€2ν1άτοις. á¼€2ν1άτοιϲ. á¼€2ν1άτοιϲ. á¼€2ν1άτοισ. á¼€2ν1άτοισ. á¼€2ν1άττ á¼€2ν1άττ á¼€2ν1αττ á¼€2ν1αÏγ á¼€2ν1αύγ á¼€2ν1αυγ á¼€2ν1αÏδ á¼€2ν1αύδ á¼€2ν1αυδ á¼€3ν2αυδί á¼€3ν2αυδί á¼€3ν2αυδι ἄ2ν1αυδ ἄ2ν1αυλ á¼€2ν1αÏλ á¼€2ν1αύλ á¼€2ν1αÏξ á¼€2ν1αύξ á¼€2ν1αυξ á¼€2ν1αÏχ á¼€2ν1αύχ á¼€2ν1αυχ á¼€2ν1Î±Ï†Î±Î¯Ï á¼€2ν1Î±Ï†Î±á½·Ï á¼€2ν1Î±Ï†Î±Î¹Ï á¼€2ν1αφή á¼€2ν1αφή á¼€2ν1αφὴ á¼€2ν1αφοῦ á¼€2ν1αφῆ á¼€2ν1αφεῖ á¼€2ν1αφοῖ á¼€2ν1εφῶν. á¼€2ν1αφέ á¼€2ν1αφέ á¼€2ν1αφὲ á¼€3ν2αφῆν á¼€2ν1αφÏόδ á¼€2ν1αφÏόδ á¼€2ν1αφÏοδ ἄ2ν1Î±Ï†Ï á¼€2ν1Î¬Ï†Ï á¼€2ν1á½±Ï†Ï á¼€2ν1αχÏÏ á¼€2ν1Î±Ï‡á½»Ï á¼€2ν1Î±Ï‡Ï…Ï á¼€Î½Î´ÏÏŒ2σ1α ἀνδÏá½¹2σ1α ἀνδÏÏŒ2ϲ1α ἀνδÏá½¹2ϲ1α ἀνδÏο2σ1α ἀνδÏο2ϲ1α á¼€2ν1έγγ á¼€2ν1έγγ á¼€2ν1εγγ á¼€2ν1έγεÏÏ„ á¼€2ν1έγεÏÏ„ á¼€2ν1εγέÏÏ„ á¼€2ν1εγέÏÏ„ á¼€2ν1εγκ á¼€2ν1έγκ á¼€2ν1έγκ á¼€2ν1εγχ á¼€2ν1εδά á¼€2ν1εδά á¼€2ν1εδα á¼€2ν1έδεσ á¼€2ν1έδεσ á¼€2ν1έδεϲ á¼€2ν1έδεϲ á¼€2ν1εδέσ á¼€2ν1εδέσ á¼€2ν1εδέϲ á¼€2ν1εδέϲ á¼€2ν1έδÏασ á¼€2ν1έδÏασ á¼€2ν1έδÏαϲ á¼€2ν1έδÏαϲ á¼€2ν1εδÏάσ á¼€2ν1εδÏάσ á¼€2ν1εδÏάϲ á¼€2ν1εδÏάϲ á¼€2ν1ÎµÎ­Ï á¼€2ν1Îµá½³Ï á¼€2ν1ÎµÎµÏ á¼€2ν1εθέλ á¼€2ν1εθέλ á¼€2ν1εθελ á¼€2ν1έθι á¼€2ν1έθι á¼€2ν1εθί á¼€2ν1εθί á¼€2ν1είδε á¼€2ν1είδε á¼€2ν1ειδέ á¼€2ν1ειδέ á¼€2ν1είδω á¼€2ν1είδω á¼€2ν1ειδώ á¼€2ν1ειδώ á¼€2ν1είκα á¼€2ν1είκα á¼€2ν1εικά á¼€2ν1εικά á¼€2ν1εικό á¼€2ν1εικό á¼€2ν1εικο á¼€2ν1ειλεί á¼€2ν1ειλεί á¼€2ν1ειλει á¼€2ν1είμα á¼€2ν1είμα á¼€2ν1εί2σ1ακ á¼€2ν1εί2σ1ακ á¼€2ν1εί2ϲ1ακ á¼€2ν1εί2ϲ1ακ á¼€2ν1ει2σ1άκ á¼€2ν1ει2σ1άκ á¼€2ν1ει2ϲ1άκ á¼€2ν1ει2ϲ1άκ á¼€2ν1εί2σ1ο á¼€2ν1εί2σ1ο á¼€2ν1εί2ϲ1ο á¼€2ν1εί2ϲ1ο á¼€2ν1ει2σ1ÏŒ á¼€2ν1ει2σ1á½¹ á¼€2ν1ει2ϲ1ÏŒ á¼€2ν1ει2ϲ1á½¹ á¼€2ν1ει2σ1Ï†Î¿Ï á¼€2ν1ει2ϲ1Ï†Î¿Ï á¼€2ν1εί2σ1Ï†Î¿Ï á¼€2ν1εί2σ1Ï†Î¿Ï á¼€2ν1εί2ϲ1Ï†Î¿Ï á¼€2ν1εί2ϲ1Ï†Î¿Ï á¼€2ν1ει2σ1Ï†ÏŒÏ á¼€2ν1ει2σ1Ï†á½¹Ï á¼€2ν1ει2ϲ1Ï†ÏŒÏ á¼€2ν1ει2ϲ1Ï†á½¹Ï á¼€2ν1έκ á¼€2ν1έκ á¼€2ν1εκ á¼€3ν2έκα á¼€3ν2έκα á¼€3ν2εκάς. á¼€3ν2εκάς. á¼€3ν2εκάϲ. á¼€3ν2εκάϲ. á¼€3ν2εκὰς. á¼€3ν2εκὰϲ. á¼€3ν2εκάσ. á¼€3ν2εκάσ. á¼€3ν2εκὰσ. á¼€3ν2εκτ á¼€4ν3έ2κ1τιτ á¼€4ν3á½³2κ1τιτ á¼€4ν3ε2κ1τίτ á¼€4ν3ε2κ1Ï„á½·Ï„ ἀνε2κ1λιπ ἀνε2κ1λό ἀνε2κ1λό ἀνε2κ1λο á¼€2ν1έλαι á¼€2ν1έλαι á¼€2ν1ελαι á¼€2ν1ελάτ á¼€2ν1ελάτ á¼€2ν1ελατ á¼€2ν1έλεγκ á¼€2ν1έλεγκ á¼€2ν1ελέγκ á¼€2ν1ελέγκ á¼€2ν1ελεγξ á¼€2ν1ελέη á¼€2ν1ελέη á¼€2ν1ελεή á¼€2ν1ελεή á¼€2ν1έλεο á¼€2ν1έλεο á¼€2ν1ελέο á¼€2ν1ελέο á¼€2ν1ελέω á¼€2ν1ελέω á¼€2ν1έλεε á¼€2ν1έλεε á¼€2ν1ελκή á¼€2ν1ελκή á¼€2ν1ελκὴ á¼€2ν1ελκο á¼€2ν1ελκῆ á¼€2ν1ελκές. á¼€2ν1ελκές. á¼€2ν1ελκέϲ. á¼€2ν1ελκέϲ. á¼€2ν1ελκὲς. á¼€2ν1ελκὲϲ. á¼€2ν1ελκέσ. á¼€2ν1ελκέσ. á¼€2ν1ελκὲσ. á¼€2ν1ελκε á¼€2ν1ελκῶ á¼€2ν1ελκέσ á¼€2ν1ελκέσ á¼€2ν1ελκέϲ á¼€2ν1ελκέϲ ἄ2ν1ελκτ á¼€2ν1έλκτ á¼€2ν1έλκτ á¼€2ν1έλκω á¼€2ν1έλκω á¼€2ν1ελκώ á¼€2ν1ελκώ á¼€2ν1έλλ á¼€2ν1έλλ á¼€2ν1έλπι á¼€2ν1έλπι á¼€2ν1ελπί á¼€2ν1ελπί á¼€2ν1Î­Î»Ï…Ï„Ï á¼€2ν1á½³Î»Ï…Ï„Ï á¼€2ν1ελÏÏ„Ï á¼€2ν1ÎµÎ»á½»Ï„Ï á¼€2ν1έμβ á¼€2ν1έμβ á¼€2ν1εμβ á¼€2ν1έμετ á¼€2ν1έμετ á¼€2ν1εμέτ á¼€2ν1εμέτ á¼€2ν1έμπ á¼€2ν1έμπ á¼€2ν1εμπ á¼€2ν1έμφ á¼€2ν1έμφ á¼€2ν1εμφ á¼€2ν1έν á¼€2ν1έν á¼€2ν1εν á¼€3ν2ένει á¼€3ν2ένει á¼€3ν2ενή á¼€3ν2ενή á¼€3ν2έντες. á¼€3ν2έντες. á¼€3ν2έντεϲ. á¼€3ν2έντεϲ. á¼€3ν2έντεσ. á¼€3ν2έντεσ. á¼€2ν1ε2ξ1 á¼€3ν2ε3ξ2ίκα á¼€3ν2ε3ξ2ίκα á¼€3ν2ε3ξ2ικά á¼€3ν2ε3ξ2ικά á¼€2ν1Î­Î¿Ï á¼€2ν1á½³Î¿Ï á¼€2ν1ÎµÏŒÏ á¼€2ν1Îµá½¹Ï á¼€2ν1επ á¼€3ν2επν á¼€3ν2επτ á¼€2ν1εÏαστ á¼€2ν1εÏαϲτ á¼€2ν1έÏαστ á¼€2ν1á½³Ïαστ á¼€2ν1έÏαϲτ á¼€2ν1á½³Ïαϲτ á¼€2ν1εÏάστ á¼€2ν1εÏάστ á¼€2ν1εÏάϲτ á¼€2ν1εÏάϲτ á¼€2ν1εÏγ ἄ2ν1εÏγ á¼€2ν1έÏγ á¼€2ν1á½³Ïγ á¼€2ν1έÏεικ á¼€2ν1á½³Ïεικ á¼€2ν1εÏείκ á¼€2ν1εÏείκ á¼€2ν1έÏεισ á¼€2ν1á½³Ïεισ á¼€2ν1έÏειϲ á¼€2ν1á½³Ïειϲ á¼€2ν1εÏείσ á¼€2ν1εÏείσ á¼€2ν1εÏείϲ á¼€2ν1εÏείϲ á¼€2ν1εÏεÏνητ á¼€2ν1εÏεύνητ á¼€2ν1εÏευνήτ á¼€2ν1εÏευνήτ á¼€2ν1εÏί á¼€2ν1εÏá½· á¼€2ν1εÏι á¼€2ν1εÏυθÏίαστ á¼€2ν1εÏυθÏίαστ á¼€2ν1εÏυθÏίαϲτ á¼€2ν1εÏυθÏίαϲτ á¼€2ν1εÏυθÏιάστ á¼€2ν1εÏυθÏιάστ á¼€2ν1εÏυθÏιάϲτ á¼€2ν1εÏυθÏιάϲτ á¼€2ν1έστι á¼€2ν1έστι á¼€2ν1έϲτι á¼€2ν1έϲτι á¼€2ν1εστί á¼€2ν1εστί á¼€2ν1εϲτί á¼€2ν1εϲτί á¼€2ν1έται á¼€2ν1έται á¼€2ν1εταί á¼€2ν1εταί á¼€2ν1έτοι á¼€2ν1έτοι á¼€2ν1ετοί á¼€2ν1ετοί á¼€2ν1ετυ á¼€2ν1έτυ á¼€2ν1έτυ á¼€2ν1ÎµÏ„Ï á¼€2ν1ετύ á¼€2ν1εÏθ á¼€2ν1εύθ á¼€2ν1ευθ ἄ2ν1ευκ á¼€2ν1εÏκ á¼€2ν1εύκ á¼€2ν1ευλ á¼€2ν1εÏÏετ á¼€2ν1εύÏετ á¼€2ν1ευÏέτ á¼€2ν1ευÏέτ á¼€2ν1ευφήμητ á¼€2ν1ευφήμητ á¼€2ν1ευφημήτ á¼€2ν1ευφημήτ á¼€2ν1εÏχ á¼€2ν1εύχ á¼€2ν1ευχ á¼€2ν1εÏξ á¼€2ν1εύξ á¼€2ν1ευξ á¼€2ν1ηυξ á¼€2ν1ηῦγ á¼€2ν1ηυγ á¼€2ν1ευκτ á¼€2ν1έφ á¼€2ν1έφ á¼€2ν1εφ á¼€3ν2εφάλ á¼€3ν2εφάλ á¼€3ν2έφελ á¼€3ν2έφελ á¼€3ν2εφέλ á¼€3ν2εφέλ á¼€2ν1εχέ á¼€2ν1εχέ á¼€2ν1εχε á¼€2ν1έψα á¼€2ν1έψα á¼€2ν1εψά á¼€2ν1εψά á¼€2ν1ηγεμ á¼€2ν1ήδ á¼€2ν1ήδ á¼€2ν1ηδ á¼€2ν1ήκεσ á¼€2ν1ήκεσ á¼€2ν1ήκεϲ á¼€2ν1ήκεϲ á¼€2ν1ηκέσ á¼€2ν1ηκέσ á¼€2ν1ηκέϲ á¼€2ν1ηκέϲ á¼€2ν1ήκο á¼€2ν1ήκο á¼€2ν1ηκό á¼€2ν1ηκό á¼€2ν1ηκο á¼€2ν1ηλάκ á¼€2ν1ηλάκ á¼€2ν1ηλακ á¼€2ν1ήλατος. á¼€2ν1ήλατος. á¼€2ν1ήλατοϲ. á¼€2ν1ήλατοϲ. á¼€2ν1ήλατοσ. á¼€2ν1ήλατοσ. á¼€2ν1ηλάτου á¼€2ν1ηλάτου á¼€2ν1ηλάτω á¼€2ν1ηλάτω á¼€2ν1ήλατον. á¼€2ν1ήλατον. á¼€2ν1ήλατε. á¼€2ν1ήλατε. á¼€2ν1ηλάτοι á¼€2ν1ηλάτοι á¼€2ν1ήλατοι á¼€2ν1ήλατοι á¼€2ν1ήλατα á¼€2ν1ήλατα á¼€2ν1ηλεγ á¼€2ν1ηλεή á¼€2ν1ηλεή á¼€2ν1ηλεὴ á¼€2ν1ηλεο á¼€2ν1ηλεε á¼€2ν1ηλεῶ á¼€2ν1ηλεέ á¼€2ν1ηλεέ á¼€2ν1ηλεὲ á¼€2ν1ηλεῆ á¼€2ν1ηλέη á¼€2ν1ηλέη á¼€2ν1ήλειπ á¼€2ν1ήλειπ á¼€2ν1ηλείπ á¼€2ν1ηλείπ á¼€2ν1ηλή á¼€2ν1ηλή á¼€2ν1ηλὴ á¼€2ν1ηλοῦ á¼€2ν1ηλεῖ á¼€2ν1ηλῆ á¼€2ν1ηλέ á¼€2ν1ηλέ á¼€2ν1ηλὲ á¼€2ν1ηλοῖ á¼€2ν1ηλῶ á¼€2ν1ήλικ á¼€2ν1ήλικ á¼€2ν1ηλίκ á¼€2ν1ηλίκ á¼€2ν1ήλιο á¼€2ν1ήλιο á¼€2ν1ηλίο á¼€2ν1ηλίο á¼€2ν1ηλίω á¼€2ν1ηλίω á¼€2ν1ήλια á¼€2ν1ήλια á¼€2ν1ήλιπ á¼€2ν1ήλιπ á¼€2ν1ηλίπ á¼€2ν1ηλίπ á¼€2ν1ηλιφ á¼€2ν1ήμ á¼€2ν1ήμ á¼€2ν1ημ á¼€2ν1ήνυ á¼€2ν1ήνυ á¼€2ν1Î·Î½Ï á¼€2ν1ηνύ á¼€2ν1ήÏει á¼€2ν1á½µÏει á¼€2ν1ηÏεί á¼€2ν1ηÏεί á¼€2ν1ηÏέμ á¼€2ν1ηÏέμ á¼€2ν1ηÏεμ á¼€2ν1ηÏεφ á¼€2ν1ήÏι á¼€2ν1á½µÏι á¼€2ν1ηÏί á¼€2ν1ηÏá½· á¼€2ν1ήÏοτ á¼€2ν1á½µÏοτ á¼€2ν1ηÏότ á¼€2ν1ηÏότ á¼€2ν1ήσσ á¼€2ν1ήσσ á¼€2ν1ήϲϲ á¼€2ν1ήϲϲ á¼€2ν1ησσ á¼€2ν1ηϲϲ á¼€2ν1ήττ á¼€2ν1ήττ á¼€2ν1ηττ á¼€2ν1ήφα á¼€2ν1ήφα á¼€2ν1ηφα á¼€2ν1ίατ á¼€2ν1ίατ á¼€2ν1ιάτ á¼€2ν1ιάτ á¼€2ν1ίδιο á¼€2ν1ίδιο á¼€2ν1ιδίο á¼€2ν1ιδίο á¼€2ν1ιδίω á¼€2ν1ιδίω á¼€2ν1ίδια á¼€2ν1ίδια á¼€2ν1ιδιτ ἄ2ν1ιδÏος ἄ2ν1ιδÏοϲ ἄ2ν1ιδÏοσ á¼€2ν1ίδÏου á¼€2ν1ίδÏου á¼€2ν1ίδÏω á¼€2ν1ίδÏω ἄ2ν1ιδÏον ἄ2ν1ιδÏε á¼€2ν1ίδÏοι á¼€2ν1ίδÏοι ἄ2ν1ιδÏοι á¼€2ν1ίδÏÏ…Ï„ á¼€2ν1ίδÏÏ…Ï„ á¼€2ν1ιδÏÏÏ„ á¼€2ν1ιδÏύτ á¼€2ν1ιδÏωτ á¼€2ν1ιδÏÏŽÏ„ á¼€2ν1ιδÏώτ á¼€2ν1Î¯ÎµÏ á¼€2ν1á½·ÎµÏ á¼€2ν1Î¹Î­Ï á¼€2ν1Î¹á½³Ï á¼€2ν1ιεÏάτ á¼€2ν1ιεÏάτ á¼€3ν2ιέÏω á¼€3ν2ιέÏω á¼€2ν1ίκ á¼€2ν1ίκ á¼€2ν1ικ ἄ2ν1ικ á¼€3ν2ίκη á¼€3ν2ίκη á¼€3ν2ική á¼€3ν2ική á¼€2ν1ίλ á¼€2ν1ίλ á¼€2ν1ιλ á¼€2ν1ίμαστ á¼€2ν1ίμαστ á¼€2ν1ίμαϲτ á¼€2ν1ίμαϲτ á¼€2ν1ιμάστ á¼€2ν1ιμάστ á¼€2ν1ιμάϲτ á¼€2ν1ιμάϲτ á¼€2ν1ίου á¼€2ν1ίου á¼€2ν1Î¹Î¿Ï á¼€2ν1ιού ἄ2ν1ιππ á¼€2ν1ίππ á¼€2ν1ίππ á¼€2ν1ισ á¼€2ν1ιϲ ἄ2ν1ισ ἄ2ν1ιϲ á¼€2ν1ίσ á¼€2ν1ίσ á¼€2ν1ίϲ á¼€2ν1ίϲ á¼€3ν2ισᾶτ á¼€3ν2ιϲᾶτ á¼€3ν2ισάτ á¼€3ν2ισάτ á¼€3ν2ιϲάτ á¼€3ν2ιϲάτ á¼€3ν2ίστ á¼€3ν2ίστ á¼€3ν2ίϲτ á¼€3ν2ίϲτ á¼€3ν2ιστ á¼€3ν2ιϲτ á¼€4ν3ιστοÏη á¼€4ν3ιϲτοÏη á¼€4ν3ιστόÏη á¼€4ν3ιστόÏη á¼€4ν3ιϲτόÏη á¼€4ν3ιϲτόÏη á¼€4ν3ιστοÏή á¼€4ν3ιστοÏá½µ á¼€4ν3ιϲτοÏή á¼€4ν3ιϲτοÏá½µ á¼€3ν2ίσχ á¼€3ν2ίσχ á¼€3ν2ίϲχ á¼€3ν2ίϲχ á¼€4ν3ίσχυ á¼€4ν3ίσχυ á¼€4ν3ίϲχυ á¼€4ν3ίϲχυ ἄ2ν1ιχ á¼€2ν1ίχ á¼€2ν1ίχ á¼€2ν1ιχνεÏÏ„ á¼€2ν1ιχνεύτ á¼€2ν1ίψ á¼€2ν1ίψ á¼€2ν1ιψ á¼€2ν1όδε á¼€2ν1όδε á¼€2ν1οδέ á¼€2ν1οδέ ἄ2ν1οζ á¼€2ν1όζ á¼€2ν1όζ á¼€2ν1οικε á¼€2ν1οικον ἄ2ν1οικ á¼€2ν1οίκ á¼€2ν1οίκ á¼€2ν1οικτί á¼€2ν1οικτί ἄ2ν1οικτ á¼€2ν1οίκτ á¼€2ν1οίκτ á¼€2ν1οίμωκ á¼€2ν1οίμωκ á¼€2ν1οιμώκ á¼€2ν1οιμώκ á¼€2ν1οιμωκ á¼€2ν1οιν ἄ2ν1οιν á¼€2ν1οίν á¼€2ν1οίν ἄ2ν1Î¿Î¹ÏƒÏ„Ï á¼„2ν1Î¿Î¹Ï²Ï„Ï á¼€2ν1Î¿Î¯ÏƒÏ„Ï á¼€2ν1Î¿á½·ÏƒÏ„Ï á¼€2ν1Î¿Î¯Ï²Ï„Ï á¼€2ν1Î¿á½·Ï²Ï„Ï á¼€2ν1όλ á¼€2ν1όλ á¼€2ν1ολ ἄ2ν1ολ á¼€3ν2ολκ á¼€3ν2ολο á¼€2ν1ομβÏί á¼€2ν1ομβÏá½· á¼€2ν1ομβÏá¿– ἄ2ν1ομβÏο á¼€2ν1όμβÏο á¼€2ν1όμβÏο á¼€2ν1όμβÏω á¼€2ν1όμβÏω ἄ2ν1ομβÏα á¼€2ν1ομήλ á¼€2ν1ομήλ á¼€2ν1ομηλ á¼€2ν1ομίλ á¼€2ν1ομίλ á¼€2ν1ομιλ á¼€2ν1όμιχ á¼€2ν1όμιχ á¼€2ν1ομιχ á¼€2ν1όμο á¼€2ν1όμο á¼€2ν1ομό á¼€2ν1ομό á¼€2ν1ομο á¼€3ν2ομοθ á¼€3ν2όμου. á¼€3ν2όμου. á¼€3ν2όμῳ. á¼€3ν2όμῳ. á¼€3ν2όμω. á¼€3ν2όμω. á¼€2ν2όμοιν. á¼€2ν2όμοιν. á¼€3ν2όμων. á¼€3ν2όμων. á¼€3ν2όμοις. á¼€3ν2όμοις. á¼€3ν2όμοιϲ. á¼€3ν2όμοιϲ. á¼€3ν2όμοισ. á¼€3ν2όμοισ. á¼€3ν2όμους. á¼€3ν2όμους. á¼€3ν2όμουϲ. á¼€3ν2όμουϲ. á¼€3ν2όμουσ. á¼€3ν2όμουσ. á¼€2ν1όν á¼€2ν1όν á¼€2ν1ον ἄ2ν1οπ á¼€2ν1όπ á¼€2ν1όπ á¼€2ν1ÏŒÏ á¼€2ν1á½¹Ï á¼€2ν1Î¿Ï á¼„2ν1Î¿Ï á¼€3ν2οÏγάζ á¼€3ν2οÏγάζ ἄ3ν2οÏθ á¼€3ν2ÏŒÏθ á¼€3ν2á½¹Ïθ á¼€3ν2οÏμά á¼€3ν2οÏμά á¼€3ν2οÏÏ„ á¼€3ν2οÏÏ á¼€3ν2οÏá½» á¼€2ν1όσι á¼€2ν1όσι á¼€2ν1όϲι á¼€2ν1όϲι á¼€2ν1οσί á¼€2ν1οσί á¼€2ν1οϲί á¼€2ν1οϲί á¼€2ν1οσι á¼€2ν1οϲι ἄ2ν1οσμ ἄ2ν1οϲμ á¼€2ν1όσμ á¼€2ν1όσμ á¼€2ν1όϲμ á¼€2ν1όϲμ á¼€2ν1ÏŒÏƒÏ†Ï á¼€2ν1á½¹ÏƒÏ†Ï á¼€2ν1ÏŒÏ²Ï†Ï á¼€2ν1á½¹Ï²Ï†Ï á¼€2ν1Î¿ÏƒÏ†Ï á¼€2ν1Î¿Ï²Ï†Ï á¼€2ν1οÏα á¼€2ν1ούα á¼€2ν1ουά á¼€2ν1ουά á¼€2ν1οÏσι á¼€2ν1ούσι á¼€2ν1οÏϲι á¼€2ν1ούϲι á¼€2ν1ουσί á¼€2ν1ουσί á¼€2ν1ουϲί á¼€2ν1ουϲί á¼€2ν1οÏÏ„ á¼€2ν1ούτ á¼€2ν1ουτ á¼€2ν1οφθ á¼€2ν1όχευτ á¼€2ν1όχευτ á¼€2ν1οχεÏÏ„ á¼€2ν1οχεύτ ἄ2ν1οχλ á¼€2ν1όχλ á¼€2ν1όχλ á¼€2ν1οψ ἄ2ν1οψ á¼€2ν1όψ á¼€2ν1όψ ἀντα2ν1ισ ἀντα2ν1ιϲ ἀντα2ν1ίσ ἀντα2ν1ίσ ἀντα2ν1ίϲ ἀντα2ν1ίϲ ἀντει2σ1 ἀντει2ϲ1 ἀντε2κ1 ἀντε2ν1 ἀντε2ξ1 ἀντιδυ2σ1 ἀντιδυ2ϲ1 ἀντιπαÏε2κ1 ἀντιπαÏε2ξ1 ἀντιπÏο2σ1 ἀντιπÏο2ϲ1 ἀντιπÏοσ3κ2Ï… ἀντιπÏοϲ3κ2Ï… ἀντισÏ2ν1 ἀντισύ2ν1 ἀντιϲÏ2ν1 ἀντιϲύ2ν1 ἀντισυ2ν1 ἀντιϲυ2ν1 á¼€2ν1Ï á¼€2ν1á½» á¼€2ν1Ï… á¼€3ν2υμ á¼€3ν2Ïσ á¼€3ν2ύσ á¼€3ν2Ïϲ á¼€3ν2ύϲ á¼€3ν2υσ á¼€3ν2υϲ á¼€2ν1υπέ2Ï1 á¼€2ν1υπέ2Ï1 á¼€2ν1υπε2Ï1 ἄ2ν1ῳδ á¼€2ν1ῴδ á¼€2ν1ώδυ á¼€2ν1ώδυ á¼€2ν1Ï‰Î´Ï á¼€2ν1ωδύ á¼€2ν1ώι á¼€2ν1ώι á¼€2ν1ωί á¼€2ν1ωί á¼€2ν1ώλ á¼€2ν1ώλ á¼€2ν1ωλ á¼€2ν1ώμ á¼€2ν1ώμ á¼€2ν1ωμ á¼€2ν1ών á¼€2ν1ών á¼€2ν1ων á¼€2ν1Ï‰Ï á¼„2ν1Ï‰Ï á¼€2ν1ÏŽÏ á¼€2ν1á½½Ï á¼„2ν1ωτο á¼€2ν1ώτο á¼€2ν1ώτο á¼€2ν1ωφέλ á¼€2ν1ωφέλ á¼€2ν1ωφελ á¼€2ν1ώχυ á¼€2ν1ώχυ á¼€2ν1Ï‰Ï‡Ï á¼€2ν1ωχύ ἀπα2ν1αι ἀπά2ν1ου ἀπά2ν1ου ἀπα2ν1οÏÏ á¼€Ï€Î±2ν1Î¿á½»Ï á¼Ï€Î±2ξ1 ἀπε2κ1λ á¼Ï€Îµ2Ï1 ἀποσυ2ν1 ἀποϲυ2ν1 ἀπÏÏŒ2σ1 ἀπÏá½¹2σ1 ἀπÏÏŒ2ϲ1 ἀπÏá½¹2ϲ1 ἀπÏο2σ1 ἀπÏο2ϲ1 ἀπÏÏŒ3σ2κε ἀπÏá½¹3σ2κε ἀπÏÏŒ3ϲ2κε ἀπÏá½¹3ϲ2κε ἀπÏο3σ2κέ ἀπÏο3σ2κέ ἀπÏο3ϲ2κέ ἀπÏο3ϲ2κέ ἀπÏÏŒ3σ2κο ἀπÏá½¹3σ2κο ἀπÏÏŒ3ϲ2κο ἀπÏá½¹3ϲ2κο ἀπÏο3σ2κό ἀπÏο3σ2κό ἀπÏο3ϲ2κό ἀπÏο3ϲ2κό ἀπÏο3σ2Ï„ ἀπÏο3ϲ2Ï„ á¼Ïπα2ξ1 á¼€ÏÏε2ν1ω á¼€Ïχισυ2ν1 á¼€Ïχιϲυ2ν1 ἀστε2Ï1ω ἀϲτε2Ï1ω ἀσÏ2ν1 ἀσύ2ν1 ἀϲÏ2ν1 ἀϲύ2ν1 ἀσυ2ν1 ἀϲυ2ν1 ἀξÏ2ν1 ἀξύ2ν1 ἀξυ2ν1 αá½Ï„έ2κ1μ αá½Ï„á½³2κ1μ αá½Ï„ε2κ1μ αá½Ï„ε2ξ1 ἀω2σ1φ ἀω2ϲ1φ .γεÏα2σ1φ .γεÏα2ϲ1φ .δα2σ1Ï€ .δα2ϲ1Ï€ .διαμφι2σ1β .διαμφι2ϲ1β .διέ2κ1Ïο .διέ2κ1Ïο .διε2κ1ÏÏŒ .διε2κ1Ïá½¹ .διέ2ξ1 .διέ2ξ1 .διε2ξ1 .δικα2σ1Ï€ .δικα2ϲ1Ï€ .διό2σ1κ .διό2σ1κ .διό2ϲ1κ .διό2ϲ1κ .διο2σ1κ .διο2ϲ1κ .διό2σ1Ï€ .διό2σ1Ï€ .διό2ϲ1Ï€ .διό2ϲ1Ï€ .διο2σ1Ï€ .διο2ϲ1Ï€ .δί2σ1α .δί2σ1α .δί2ϲ1α .δί2ϲ1α .δι2σ1ά .δι2σ1á½± .δι2ϲ1ά .δι2ϲ1á½± .δί2σ1η .δί2σ1η .δί2ϲ1η .δί2ϲ1η .δι2σ1ή .δι2σ1á½µ .δι2ϲ1ή .δι2ϲ1á½µ .δί2σ1ε .δί2σ1ε .δί2ϲ1ε .δί2ϲ1ε .δι2σ1ε .δι2ϲ1ε .δι2σ1θ .δι2ϲ1θ .δÏ2σ1 .δύ2σ1 .δÏ2ϲ1 .δύ2ϲ1 .δυ2σ1 .δυ2ϲ1 δÏ3σ2ω. δύ3σ2ω. δÏ3ϲ2ω. δύ3ϲ2ω. δÏ3σ2εις. δύ3σ2εις. δÏ3ϲ2ειϲ. δύ3ϲ2ειϲ. δÏ3σ2εισ. δύ3σ2εισ. δÏ3σ2ει. δύ3σ2ει. δÏ3ϲ2ει. δύ3ϲ2ει. .δÏ3σ2ετ .δύ3σ2ετ .δÏ3ϲ2ετ .δύ3ϲ2ετ δÏ3σ2ομεν. δύ3σ2ομεν. δÏ3ϲ2ομεν. δύ3ϲ2ομεν. δÏ3σ2ουσιν. δύ3σ2ουσιν. δÏ3ϲ2ουϲιν. δύ3ϲ2ουϲιν. δÏ3σ2οιμι. δύ3σ2οιμι. δÏ3ϲ2οιμι. δύ3ϲ2οιμι. δÏ3σ2οις. δύ3σ2οις. δÏ3ϲ2οιϲ. δύ3ϲ2οιϲ. δÏ3σ2οισ. δύ3σ2οισ. δÏ3σ2οι. δύ3σ2οι. δÏ3ϲ2οι. δύ3ϲ2οι. δÏ3σ2οιτον. δύ3σ2οιτον. δÏ3ϲ2οιτον. δύ3ϲ2οιτον. δυ3σ2οίτην. δυ3σ2οίτην. δυ3ϲ2οίτην. δυ3ϲ2οίτην. δÏ3σ2οιμεν. δύ3σ2οιμεν. δÏ3ϲ2οιμεν. δύ3ϲ2οιμεν. δÏ3σ2οιτε. δύ3σ2οιτε. δÏ3ϲ2οιτε. δύ3ϲ2οιτε. δÏ3σ2οιεν. δύ3σ2οιεν. δÏ3ϲ2οιεν. δύ3ϲ2οιεν. δÏ3σ2ειν. δύ3σ2ειν. δÏ3ϲ2ειν. δύ3ϲ2ειν. δÏ3σ2ων. δύ3σ2ων. δÏ3ϲ2ων. δύ3ϲ2ων. δÏ3σ2ον δύ3σ2ον δÏ3ϲ2ον δύ3ϲ2ον δυ3σ2όν δυ3σ2όν δυ3ϲ2όν δυ3ϲ2όν δÏ3σ2ουσ δύ3σ2ουσ δÏ3ϲ2ουϲ δύ3ϲ2ουϲ δυ3σ2οÏσ δυ3σ2ούσ δυ3ϲ2οÏϲ δυ3ϲ2ούϲ δÏ3σ2ῃ δύ3σ2ῃ δÏ3ϲ2ῃ δύ3ϲ2ῃ δÏ3σ2ητον. δύ3σ2ητον. δÏ3ϲ2ητον. δύ3ϲ2ητον. δÏ3σ2ωμεν. δύ3σ2ωμεν. δÏ3ϲ2ωμεν. δύ3ϲ2ωμεν. δÏ3σ2ωσι. δύ3σ2ωσι. δÏ3ϲ2ωϲι. δύ3ϲ2ωϲι. δÏ3σ2αιμι. δύ3σ2αιμι. δÏ3ϲ2αιμι. δύ3ϲ2αιμι. δÏ3σ2αις. δύ3σ2αις. δÏ3ϲ2αιϲ. δύ3ϲ2αιϲ. δÏ3σ2ειας. δύ3σ2ειας. δÏ3ϲ2ειαϲ. δύ3ϲ2ειαϲ. δÏ3σ2αισ. δύ3σ2αισ. δÏ3σ2ειασ. δύ3σ2ειασ. δÏ3σ2αι. δύ3σ2αι. δÏ3ϲ2αι. δύ3ϲ2αι. δÏ3σ2ειε. δύ3σ2ειε. δÏ3ϲ2ειε. δύ3ϲ2ειε. δÏ3σ2αιτον. δύ3σ2αιτον. δÏ3ϲ2αιτον. δύ3ϲ2αιτον. δυ3σ2αίτην. δυ3σ2αίτην. δυ3ϲ2αίτην. δυ3ϲ2αίτην. δÏ3σ2αιμεν. δύ3σ2αιμεν. δÏ3ϲ2αιμεν. δύ3ϲ2αιμεν. δÏ3σ2αιτε. δύ3σ2αιτε. δÏ3ϲ2αιτε. δύ3ϲ2αιτε. δÏ3σ2αιεν δύ3σ2αιεν δÏ3ϲ2αιεν δύ3ϲ2αιεν δÏ3σ2ειαν. δύ3σ2ειαν. δÏ3ϲ2ειαν. δύ3ϲ2ειαν. δÏ3σ2ον. δύ3σ2ον. δÏ3ϲ2ον. δύ3ϲ2ον. δυ3σ2άτω. δυ3σ2άτω. δυ3ϲ2άτω. δυ3ϲ2άτω. δÏ3σ2ατον. δύ3σ2ατον. δÏ3ϲ2ατον. δύ3ϲ2ατον. δυ3σ2άτων. δυ3σ2άτων. δυ3ϲ2άτων. δυ3ϲ2άτων. δÏ3σ2ατε. δύ3σ2ατε. δÏ3ϲ2ατε. δύ3ϲ2ατε. δυ3σ2άντων. δυ3σ2άντων. δυ3ϲ2άντων. δυ3ϲ2άντων. δÏ3σ2ας. δύ3σ2ας. δÏ3ϲ2αϲ. δύ3ϲ2αϲ. δÏ3σ2αν. δύ3σ2αν. δÏ3ϲ2αν. δύ3ϲ2αν. δÏ3σ2αντ δύ3σ2αντ δÏ3ϲ2αντ δύ3ϲ2αντ δυ3σ2άντ δυ3σ2άντ δυ3ϲ2άντ δυ3ϲ2άντ δÏ3σ2ασ δύ3σ2ασ δÏ3ϲ2αϲ δύ3ϲ2αϲ δυ3σ2άσ δυ3σ2άσ δυ3ϲ2άϲ δυ3ϲ2άϲ δÏ3σ2ομαι. δύ3σ2ομαι. δÏ3ϲ2ομαι. δύ3ϲ2ομαι. .δÏ3σ2εσ .δύ3σ2εσ .δÏ3ϲ2εϲ .δύ3ϲ2εϲ δυ3σ2όμεθα. δυ3σ2όμεθα. δυ3ϲ2όμεθα. δυ3ϲ2όμεθα. δÏ3σ2ονται. δύ3σ2ονται. δÏ3ϲ2ονται. δύ3ϲ2ονται. δυ3σ2οίμην. δυ3σ2οίμην. δυ3ϲ2οίμην. δυ3ϲ2οίμην. δÏ3σ2οιο. δύ3σ2οιο. δÏ3ϲ2οιο. δύ3ϲ2οιο. δÏ3σ2οιτο. δύ3σ2οιτο. δÏ3ϲ2οιτο. δύ3ϲ2οιτο. δÏ3σ2οισθον. δύ3σ2οισθον. δÏ3ϲ2οιϲθον. δύ3ϲ2οιϲθον. δυ3σ2οίσθην. δυ3σ2οίσθην. δυ3ϲ2οίϲθην. δυ3ϲ2οίϲθην. δυ3σ2οίμεθα. δυ3σ2οίμεθα. δυ3ϲ2οίμεθα. δυ3ϲ2οίμεθα. δÏ3σ2οισθε. δύ3σ2οισθε. δÏ3ϲ2οιϲθε. δύ3ϲ2οιϲθε. δÏ3σ2οιντο. δύ3σ2οιντο. δÏ3ϲ2οιντο. δύ3ϲ2οιντο. δÏ3σ2εσθαι. δύ3σ2εσθαι. δÏ3ϲ2εϲθαι. δύ3ϲ2εϲθαι. .δυ3σ2όμεν .δυ3σ2όμεν .δυ3ϲ2όμεν .δυ3ϲ2όμεν .δυ3σ2ομέν .δυ3σ2ομέν .δυ3ϲ2ομέν .δυ3ϲ2ομέν δÏ3σ2ωμαι. δύ3σ2ωμαι. δÏ3ϲ2ωμαι. δύ3ϲ2ωμαι. δÏ3σ2ηται. δύ3σ2ηται. δÏ3ϲ2ηται. δύ3ϲ2ηται. δυ3σ2ώμεθα δυ3σ2ώμεθα δυ3ϲ2ώμεθα δυ3ϲ2ώμεθα δÏ3σ2ησθε. δύ3σ2ησθε. δÏ3ϲ2ηϲθε. δύ3ϲ2ηϲθε. δυ3σ2αίμην. δυ3σ2αίμην. δυ3ϲ2αίμην. δυ3ϲ2αίμην. δÏ3σ2αιο. δύ3σ2αιο. δÏ3ϲ2αιο. δύ3ϲ2αιο. δÏ3σ2αιτο. δύ3σ2αιτο. δÏ3ϲ2αιτο. δύ3ϲ2αιτο. δÏ3σ2αισθον. δύ3σ2αισθον. δÏ3ϲ2αιϲθον. δύ3ϲ2αιϲθον. δυ3σ2αίσθην. δυ3σ2αίσθην. δυ3ϲ2αίϲθην. δυ3ϲ2αίϲθην. δυ3σ2αίμεθα. δυ3σ2αίμεθα. δυ3ϲ2αίμεθα. δυ3ϲ2αίμεθα. δÏ3σ2αισθαι. δύ3σ2αισθαι. δÏ3ϲ2αιϲθαι. δύ3ϲ2αιϲθαι. δÏ3σ2αιντο. δύ3σ2αιντο. δÏ3ϲ2αιντο. δύ3ϲ2αιντο. δυ3σ2άσθω. δυ3σ2άσθω. δυ3ϲ2άϲθω. δυ3ϲ2άϲθω. δÏ3σ2ασθον. δύ3σ2ασθον. δÏ3ϲ2αϲθον. δύ3ϲ2αϲθον. δυ3σ2άσθων. δυ3σ2άσθων. δυ3ϲ2άϲθων. δυ3ϲ2άϲθων. δÏ3σ2ασθε. δύ3σ2ασθε. δÏ3ϲ2αϲθε. δύ3ϲ2αϲθε. δÏ3σ2ασθαι. δύ3σ2ασθαι. δÏ3ϲ2αϲθαι. δύ3ϲ2αϲθαι. δυ3σ2άμεν δυ3σ2άμεν δυ3ϲ2άμεν δυ3ϲ2άμεν δυσ3σ2αμέν δυσ3σ2αμέν δυϲ3ϲ2αμέν δυϲ3ϲ2αμέν δÏ3σ2ατο. δύ3σ2ατο. δÏ3ϲ2ατο. δύ3ϲ2ατο. δÏ3σ2ετο. δύ3σ2ετο. δÏ3ϲ2ετο. δύ3ϲ2ετο. δÏ3σ2αντο. δύ3σ2αντο. δÏ3ϲ2αντο. δύ3ϲ2αντο. δÏ3σ2εο. δύ3σ2εο. δÏ3ϲ2εο. δύ3ϲ2εο. .δυσεί2σ1β .δυσεί2σ1β .δυϲεί2ϲ1β .δυϲεί2ϲ1β .δυσει2σ1β .δυϲει2ϲ1β .δυσέ2κ1 .δυσέ2κ1 .δυϲέ2κ1 .δυϲέ2κ1 .δυσε2κ1 .δυϲε2κ1 .δυσέ2ξ1 .δυσέ2ξ1 .δυϲέ2ξ1 .δυϲέ2ξ1 .δυσε2ξ1 .δυϲε2ξ1 .δυ3σ2ιθ .δυ3ϲ2ιθ δÏ3σ2ις. δύ3σ2ις. δÏ3ϲ2ιϲ. δύ3ϲ2ιϲ. δÏ3σ2ισ. δύ3σ2ισ. δÏ3σ2εω δύ3σ2εω δÏ3ϲ2εω δύ3ϲ2εω δÏ3σ2ιν. δύ3σ2ιν. δÏ3ϲ2ιν. δύ3ϲ2ιν. δÏ3σ2ι. δύ3σ2ι. δÏ3ϲ2ι. δύ3ϲ2ι. δυ3σ2έοιν. δυ3σ2έοιν. δυ3ϲ2έοιν. δυ3ϲ2έοιν. δÏ3σ2εσι. δύ3σ2εσι. δÏ3ϲ2εϲι. δύ3ϲ2εϲι. δÏ3σ2εσιν. δύ3σ2εσιν. δÏ3ϲ2εϲιν. δύ3ϲ2εϲιν. .δÏ3σ2κε .δύ3σ2κε .δÏ3ϲ2κε .δύ3ϲ2κε .δυ3σ2μή. .δυ3σ2μή. .δυ3ϲ2μή. .δυ3ϲ2μή. .δυ3σ2μὴ. .δυ3ϲ2μὴ. .δυ3σ2μῆς. .δυ3ϲ2μῆϲ. .δυ3σ2μῆσ. .δυ3σ2μῇ .δυ3ϲ2μῇ .δυ3σ2μῆ. .δυ3ϲ2μῆ. .δυ3σ2μᾶ .δυ3ϲ2μᾶ .δυ3σ2μα .δυ3ϲ2μα .δυ3σ2μῶ .δυ3ϲ2μῶ .δυσξÏ2ν1 .δυσξύ2ν1 .δυϲξÏ2ν1 .δυϲξύ2ν1 .δυσξυ2ν1 .δυϲξυ2ν1 .δÏ3σ2ταν .δύ3σ2ταν .δÏ3ϲ2ταν .δύ3ϲ2ταν .δυ3σ2τάν .δυ3σ2τάν .δυ3ϲ2τάν .δυ3ϲ2τάν .δυ3σ2την .δυ3ϲ2την .δυ3σ2τήν .δυ3σ2τήν .δυ3ϲ2τήν .δυ3ϲ2τήν á¼Î´Ï…2σ1Ï„ á¼Î´Ï…2ϲ1Ï„ εἰ2ν1όδ εἰ2ν1όδ εἰ2ν1οδ εἰ2σ1 εἰ2ϲ1 εἴ2σ1 εἴ2ϲ1 εἰ3σ2ί. εἰ3σ2á½·. εἰ3ϲ2ί. εἰ3ϲ2á½·. εἰ3σ2ὶ. εἰ3ϲ2ὶ. εἰ3σ2ι. εἰ3ϲ2ι. εἰ3σ2ίν. εἰ3σ2ίν. εἰ3ϲ2ίν. εἰ3ϲ2ίν. εἰ3σ2ὶν. εἰ3ϲ2ὶν. εἰ3σ2ιν. εἰ3ϲ2ιν. εἴ3σ2ομ εἴ3ϲ2ομ εἴ3σ2ῃ. εἴ3ϲ2ῃ. εἴσει. εἴϲει. εἴ3σ2εται. εἴ3ϲ2εται. εἴ3σ2εσθον. εἴ3ϲ2εϲθον. εἰ3σ2όμ εἰ3σ2όμ εἰ3ϲ2όμ εἰ3ϲ2όμ εἴ3σ2εσθε. εἴ3ϲ2εϲθε. εἴ3σ2ονται εἴ3ϲ2ονται εἰ3σ2οίμην εἰ3σ2οίμην εἰ3ϲ2οίμην εἰ3ϲ2οίμην εἴ3σ2οιο εἴ3ϲ2οιο εἴ3σ2οιτο εἴ3ϲ2οιτο εἴ3σ2οισθον εἴ3ϲ2οιϲθον εἰ3σ2οίσθην εἰ3σ2οίσθην εἰ3ϲ2οίϲθην εἰ3ϲ2οίϲθην εἰ3σ2οίμεθα εἰ3σ2οίμεθα εἰ3ϲ2οίμεθα εἰ3ϲ2οίμεθα εἴ3σ2οισθε εἴ3ϲ2οιϲθε εἴ3σ2οιντο εἴ3ϲ2οιντο εἴ3σ2εσθαι εἴ3ϲ2εϲθαι εἰ3σ2όμεν εἰ3σ2όμεν εἰ3ϲ2όμεν εἰ3ϲ2όμεν εἰ3σ2ομέν εἰ3σ2ομέν εἰ3ϲ2ομέν εἰ3ϲ2ομέν εἴ3σ2άμην. εἴ3σ2άμην. εἴ3ϲ2άμην. εἴ3ϲ2άμην. εἴ3σ2ω εἴ3ϲ2ω εἴ3σ2ατο εἴ3ϲ2ατο εἴ3σ2ασθον εἴ3ϲ2αϲθον εἰ3σ2άσθην εἰ3σ2άσθην εἰ3ϲ2άϲθην εἰ3ϲ2άϲθην εἰ3σ2άμεθα εἰ3σ2άμεθα εἰ3ϲ2άμεθα εἰ3ϲ2άμεθα εἴ3σ2ασθε εἴ3ϲ2αϲθε εἴ3σ2αντο εἴ3ϲ2αντο εἴ3σ2ωμαι εἴ3ϲ2ωμαι εἴ3σ2ησθον εἴ3ϲ2ηϲθον εἰ3σ2ώμεθα εἰ3σ2ώμεθα εἰ3ϲ2ώμεθα εἰ3ϲ2ώμεθα εἴ3σ2ησθε εἴ3ϲ2ηϲθε εἴ3σ2ωνται εἴ3ϲ2ωνται εἰ3σ2αίμην εἰ3σ2αίμην εἰ3ϲ2αίμην εἰ3ϲ2αίμην εἴ3σ2αιο εἴ3ϲ2αιο εἴ3σ2αιτο εἴ3ϲ2αιτο εἴ3σ2αισθον εἴ3ϲ2αιϲθον εἴ3σ2αίσθην εἴ3σ2αίσθην εἴ3ϲ2αίϲθην εἴ3ϲ2αίϲθην εἰ3σ2αίμεθα εἰ3σ2αίμεθα εἰ3ϲ2αίμεθα εἰ3ϲ2αίμεθα εἴ3σ2αισθε εἴ3ϲ2αιϲθε εἴ3σ2αιντο εἴ3ϲ2αιντο εἰ3σ2άσθω εἰ3σ2άσθω εἰ3ϲ2άϲθω εἰ3ϲ2άϲθω εἰ3σ2άσθων εἰ3σ2άσθων εἰ3ϲ2άϲθων εἰ3ϲ2άϲθων εἴ3σ2ασθαι εἴ3ϲ2αϲθαι εἰ3σ2άμεν εἰ3σ2άμεν εἰ3ϲ2άμεν εἰ3ϲ2άμεν εἰ3σ2αμέν εἰ3σ2αμέν εἰ3ϲ2αμέν εἰ3ϲ2αμέν á¼2κ1λ á¼3κ2λήθη á¼3κ2λήθη á¼3κ2λάζ á¼3κ2λάζ á¼3κ2λάγ á¼3κ2λάγ á¼3κ2λάο á¼3κ2λάο á¼3κ2λάσ á¼3κ2λάσ á¼3κ2λάϲ á¼3κ2λάϲ á¼3κ2λαί á¼3κ2λαί á¼3κ2Î»Î±Ï á¼3κ2λαύ á¼3κ2λεί á¼3κ2λεί á¼4κ3λείπ á¼4κ3λείπ á¼4κ3λείψ á¼4κ3λείψ á¼3κ2λῄ á¼3κ2κλέπ á¼3κ2κλέπ á¼3κ2κλέψ á¼3κ2κλέψ á¼3κ2λάπ á¼3κ2λάπ á¼3κ2λαπ á¼4κ3λάπτ á¼4κ3λάπτ á¼4κ3λαπτ á¼3κ2λέφ á¼3κ2λέφ á¼3κ2λεφ á¼3κ2Î»Î®Ï á¼3κ2Î»á½µÏ á¼3κ2Î»Î·Ï á¼3κ2λίν á¼3κ2λίν á¼3κ2λιν á¼3κ2Î»Ï á¼3κ2λύ á¼4κ3λÏσεω á¼4κ3λύσεω á¼4κ3λÏϲεω á¼4κ3λύϲεω á¼4κ3λÏσει á¼4κ3λύσει á¼4κ3λÏϲει á¼4κ3λύϲει á¼4κ3λÏσεοι á¼4κ3λύσεοι á¼4κ3λÏϲεοι á¼4κ3λύϲεοι á¼4κ3λÏσεσι á¼4κ3λύσεσι á¼4κ3λÏϲεϲι á¼4κ3λύϲεϲι á¼3κ2λόμ á¼3κ2λόμ á¼3κ2κλώσ á¼3κ2κλώσ á¼3κ2κλώϲ á¼3κ2κλώϲ á¼”2κ1λει á¼”3κ2λεισ á¼”3κ2λειϲ á¼”2κ1λυσ á¼”2κ1λυϲ á¼2κ1μ á¼”2κ1μ á¼2κ1ν á¼”2κ1ν á¼”3κ2ναι á¼3κ2ναί á¼3κ2ναί á¼”3κ2νησ á¼”3κ2νηϲ á¼3κ2νήσ á¼3κ2νήσ á¼3κ2νήϲ á¼3κ2νήϲ á¼3κ2νυ á¼2κ1Ï á¼”2κ1Ï á¼3κ2Ïάδ á¼3κ2Ïάδ á¼3κ2Ïαδ á¼”3κ2Ïαζ á¼3κ2Ïάζ á¼3κ2Ïάζ á¼”3κ2Ïαγ á¼3κ2Ïάγ á¼3κ2Ïάγ á¼3κ2Ïάτ á¼3κ2Ïάτ á¼3κ2Ïατ á¼3κ2ÏαÏγ á¼3κ2Ïαύγ á¼3κ2Ïαυγ á¼”3κ2Ïαι á¼3κ2Ïαί á¼3κ2Ïαί á¼”3κ2Ïαν á¼3κ2Ïάν á¼3κ2Ïάν á¼3κ2Ïήη á¼3κ2Ïήη á¼3κ2Ïάα á¼3κ2Ïάα á¼3κ2Ïαά á¼3κ2Ïαά á¼3κ2Ïάθ á¼3κ2Ïάθ á¼3κ2Ïαθ á¼”3κ2Ïεκ á¼3κ2Ïέκ á¼3κ2Ïέκ á¼”3κ2Ïεξ á¼3κ2Ïέξ á¼3κ2Ïέξ á¼3κ2Ïέμ á¼3κ2Ïέμ á¼3κ2Ïεμ á¼3κ2Ïήμ á¼3κ2Ïήμ á¼3κ2Ïημ á¼”3κ2Ïιν á¼3κ2Ïίν á¼3κ2Ïίν á¼3κ2Ïίθ á¼3κ2Ïίθ á¼3κ2Ïότ á¼3κ2Ïότ á¼3κ2Ïοτ á¼”3κ2Ïου á¼3κ2ÏÎ¿Ï á¼3κ2Ïού á¼”3κ2ÏÏ…Ï€ á¼3κ2ÏÏÏ€ á¼3κ2Ïύπ á¼”3κ2Ïυψ á¼3κ2ÏÏψ á¼3κ2Ïύψ á¼3κ2ÏÏβ á¼3κ2Ïύβ á¼3κ2ÏÏφ á¼3κ2Ïύφ á¼3κ2Ïυσ á¼3κ2Ïυϲ á¼”3κ2Ïωζ á¼3κ2Ïώζ á¼3κ2Ïώζ á¼”3κ2Ïωξ á¼3κ2Ïώξ á¼3κ2Ïώξ á¼2κ1ταθ á¼”2κ1ταμε. á¼2κ1τάμν á¼2κ1τάμν á¼2κ1ταν á¼2κ1Ï„Î±Ï á¼2κ1τάσ á¼2κ1τάσ á¼2κ1τάϲ á¼2κ1τάϲ á¼2κ1τε á¼2κ1τέ á¼2κ1Ï„á½³ á¼3κ2τείν á¼3κ2τείν á¼2κ1τήκ á¼2κ1τήκ á¼2κ1τι á¼”2κ1τι á¼2κ1τί á¼2κ1Ï„á½· á¼”3κ2τιζ á¼3κ2τίζ á¼3κ2τίζ á¼”3κ2τισα á¼”3κ2τιϲα á¼3κ2τίσα á¼3κ2τίσα á¼3κ2τίϲα á¼3κ2τίϲα á¼2κ1Ï„ÏŒ á¼2κ1Ï„á½¹ á¼2κ1το á¼”2κ1το á¼3κ2τός. á¼3κ2τός. á¼3κ2τόϲ. á¼3κ2τόϲ. á¼3κ2τὸς. á¼3κ2τὸϲ. á¼3κ2τόσ. á¼3κ2τόσ. á¼3κ2τὸσ. á¼2κ1Ï„Ï á¼”2κ1τυπο á¼2κ1Ï„Ïπου. á¼2κ1τύπου. á¼2κ1Ï„Ïπῳ. á¼2κ1τύπῳ. á¼”2κ1τυπε. á¼2κ1Ï„Ïπω. á¼2κ1τύπω. á¼2κτÏποι. á¼2κτύποι. á¼2κ1Ï„Ïπων. á¼2κ1τύπων. á¼2κ1Ï„Ïποις. á¼2κ1τύποις. á¼2κ1Ï„Ïποιϲ. á¼2κ1τύποιϲ. á¼2κ1Ï„Ïποισ. á¼2κ1τύποισ. á¼2κ1Ï„Ïπους. á¼2κ1τύπους. á¼2κ1Ï„Ïπουϲ. á¼2κ1τύπουϲ. á¼2κ1Ï„Ïπουσ. á¼2κ1τύπουσ. á¼”2κ1τυπα. á¼2κ1Ï„Ï… ἑλλή2σ1Ï€ ἑλλή2σ1Ï€ ἑλλή2ϲ1Ï€ ἑλλή2ϲ1Ï€ ἑλλη2σ1Ï€ ἑλλη2ϲ1Ï€ á¼2ν1 á¼”2ν1 á¼3ν2άκις á¼3ν2άκις á¼3ν2άκιϲ á¼3ν2άκιϲ á¼3ν2ακισ á¼3ν2ακιϲ á¼3ν2ακόσ á¼3ν2ακόσ á¼3ν2ακόϲ á¼3ν2ακόϲ á¼3ν2ακοσ á¼3ν2ακοϲ á¼”3ν2αÏα. á¼3ν2άÏων. á¼3ν2á½±Ïων. á¼3ν2άÏοις. á¼3ν2á½±Ïοις. á¼3ν2άÏοιϲ. á¼3ν2á½±Ïοιϲ. á¼3ν2άÏοισ. á¼3ν2á½±Ïοισ. á¼3ν2αÏηφ á¼4ν3αÏÎ±Ï á¼3ν2άÏεε á¼3ν2á½±Ïεε á¼3ν2αÏέω á¼3ν2αÏέω á¼3ν2αÏέα á¼3ν2αÏέα á¼3ν2αÏεά á¼3ν2αÏεά á¼3ν2άÏιε á¼3ν2á½±Ïιε á¼3ν2αÏίω á¼3ν2αÏίω á¼3ν2αÏία á¼3ν2αÏία á¼3ν2αÏιά á¼3ν2αÏιά á¼”3ν2ασσ á¼”3ν2αϲϲ á¼3ν2άσσ á¼3ν2άσσ á¼3ν2άϲϲ á¼3ν2άϲϲ á¼3ν2άσθ á¼3ν2άσθ á¼3ν2άϲθ á¼3ν2άϲθ á¼3ν2ασθ á¼3ν2αϲθ á¼”3ν2ατ á¼3ν2άτ á¼3ν2άτ á¼Î½Î´Ï…2σ1Ï„ á¼Î½Î´Ï…2ϲ1Ï„ á¼3ν2έγκ á¼3ν2έγκ á¼3ν2εγκ á¼”3ν2εικ á¼3ν2εῖκ á¼3ν2εικ á¼3ν2είκ á¼3ν2είκ á¼”3ν2ειμ á¼3ν2είμ á¼3ν2είμ á¼3ν2εμέσσ á¼3ν2εμέσσ á¼3ν2εμέϲϲ á¼3ν2εμέϲϲ á¼3ν2εμήθ á¼3ν2εμήθ á¼3ν2ενή á¼3ν2ενή á¼3ν2εό á¼3ν2εό á¼3ν2εὸ á¼3ν2εο á¼3ν2εῶ á¼3ν2εά á¼3ν2εά á¼3ν2εὰ á¼3ν2εᾶ á¼3ν2έπει á¼3ν2έπει á¼”3ν2εÏθε á¼”3ν2ευσ á¼”3ν2ευϲ á¼3ν2εÏσ á¼3ν2εύσ á¼3ν2εÏϲ á¼3ν2εύϲ á¼3ν2έχθ á¼3ν2έχθ á¼3ν2εχθ á¼”3ν2ησ á¼”3ν2ηϲ á¼3ν2ήσ á¼3ν2ήσ á¼3ν2ήϲ á¼3ν2ήϲ á¼3ν2ηή á¼3ν2ηή á¼3ν2ηὴ á¼”3ν2ην. á¼3ν2ηεί á¼3ν2ηεί á¼3ν2ηο á¼3ν2ηῶ á¼3νηέ á¼3νηέ á¼3ν2ήνο á¼3ν2ήνο á¼3ν2ί á¼3ν2á½· á¼3ν2ι á¼”3ν2ι á¼4ν3Î¹Î±Ï á¼4ν3ιαύ á¼5ν4ιαÏσ á¼5ν4ιαύσ á¼5ν4ιαÏϲ á¼5ν4ιαύϲ á¼5ν4ιαυσ á¼5ν4ιαυϲ á¼4ν3Î¹Î´Ï á¼4ν3Î¯Î´Ï á¼4ν3á½·Î´Ï á¼4ν3ίζ á¼4ν3ίζ á¼4ν3ίη á¼4ν3ίη á¼4ν3ιέτον. á¼4ν3ιέτον. á¼4ν3ίεμεν. á¼4ν3ίεμεν. á¼4ν3ίω. á¼4ν3ίω. á¼4ν3ιππ á¼4ν3ίππ á¼4ν3ίππ á¼4ν3ίπτ á¼4ν3ίπτ á¼4ν3ίψ á¼4ν3ίψ á¼4ν3ίσσ á¼4ν3ίσσ á¼4ν3ίϲϲ á¼4ν3ίϲϲ á¼4ν3ίστ á¼4ν3ίστ á¼4ν3ίϲτ á¼4ν3ίϲτ á¼4ν3ιστ á¼4ν3ιϲτ á¼4ν3ισχ á¼4ν3ιϲχ á¼4ν3ίσχ á¼4ν3ίσχ á¼4ν3ίϲχ á¼4ν3ίϲχ á¼”3ν2ος. á¼”3ν2οϲ. á¼”3ν2οσ. á¼”3ν2ου. á¼”3ν2ον. á¼”3ν2ω á¼”3ν2οι. á¼”3ν2οις. á¼”3ν2οιϲ. á¼”3ν2οισ. á¼”3ν2ης. á¼”3ν2ηϲ. á¼”3ν2ησ. á¼”3ν2ῃ. á¼”3ν2η. á¼”3ν2οσι á¼”3ν2οϲι á¼3ν2όσε á¼3ν2όσε á¼3ν2όϲε á¼3ν2όϲε á¼3ν2υάλ á¼3ν2υάλ á¼3ν2υαλ á¼”3ν2υξ á¼3ν2υξ á¼3ν2Ïξ á¼3ν2ύξ á¼3ν2Ïσ á¼3ν2ύσ á¼3ν2Ïϲ á¼3ν2ύϲ á¼3ν2υσ á¼3ν2υϲ á¼3ν2Ï…ÏŽ á¼3ν2Ï…á½½ á¼3ν2Ï…á½¼ á¼3ν2υόο á¼3ν2υόο á¼3ν2υοῦς á¼3ν2υοῦϲ á¼2ξ1 á¼”2ξ1 á¼3ξ2Î®Ï á¼3ξ2á½µÏ á¼3ξ2Î·Ï á¼3ξ2Ï…2ν1 á¼3ξ2Ï…Ï á¼3ξ2ÏÏ á¼3ξ2á½»Ï á¼”3ξ2υσ á¼”3ξ2υϲ á¼”3ξ2ω. ἑ2ξ1ήÏετμ ἑ2ξ1á½µÏετμ ἑ2ξ1ηÏέτμ ἑ2ξ1ηÏέτμ á¼Ï€ÎµÎ¯2σ1 á¼Ï€Îµá½·2σ1 á¼Ï€ÎµÎ¯2ϲ1 á¼Ï€Îµá½·2ϲ1 á¼Ï€ÎµÎ¹2σ1 á¼Ï€ÎµÎ¹2ϲ1 á¼Ï€ÎµÎ¯3σ2ατον. á¼Ï€Îµá½·3σ2ατον. á¼Ï€ÎµÎ¯3ϲ2ατον. á¼Ï€Îµá½·3ϲ2ατον. á¼Ï€ÎµÎ¹3σ2άτην. á¼Ï€ÎµÎ¹3σ2άτην. á¼Ï€ÎµÎ¹3ϲ2άτην. á¼Ï€ÎµÎ¹3ϲ2άτην. á¼Ï€ÎµÎ¯3σ2αμεν. á¼Ï€Îµá½·3σ2αμεν. á¼Ï€ÎµÎ¯3ϲ2αμεν. á¼Ï€Îµá½·3ϲ2αμεν. á¼Ï€ÎµÎ¯3σ2ατε á¼Ï€Îµá½·3σ2ατε á¼Ï€ÎµÎ¯3ϲ2ατε á¼Ï€Îµá½·3ϲ2ατε á¼Ï€ÎµÎ¹3σ2άμην. á¼Ï€ÎµÎ¹3σ2άμην. á¼Ï€ÎµÎ¹3ϲ2άμην. á¼Ï€ÎµÎ¹3ϲ2άμην. á¼Ï€ÎµÎ¯3σ2ω. á¼Ï€Îµá½·3σ2ω. á¼Ï€ÎµÎ¯3ϲ2ω. á¼Ï€Îµá½·3ϲ2ω. á¼Ï€ÎµÎ¯3σ2ατο á¼Ï€Îµá½·3σ2ατο á¼Ï€ÎµÎ¯3ϲ2ατο á¼Ï€Îµá½·3ϲ2ατο á¼Ï€ÎµÎ¯3σ2ασθον. á¼Ï€Îµá½·3σ2ασθον. á¼Ï€ÎµÎ¯3ϲ2αϲθον. á¼Ï€Îµá½·3ϲ2αϲθον. á¼Ï€ÎµÎ¹3σ2άμεθα. á¼Ï€ÎµÎ¹3σ2άμεθα. á¼Ï€ÎµÎ¹3ϲ2άμεθα. á¼Ï€ÎµÎ¹3ϲ2άμεθα. á¼Ï€ÎµÎ¯3σ2ασθε. á¼Ï€Îµá½·3σ2ασθε. á¼Ï€ÎµÎ¯3ϲ2αϲθε. á¼Ï€Îµá½·3ϲ2αϲθε. á¼Ï€ÎµÎ¯3σ2αντο. á¼Ï€Îµá½·3σ2αντο. á¼Ï€ÎµÎ¯3ϲ2αντο. á¼Ï€Îµá½·3ϲ2αντο. á¼Ï€ÎµÎ¯3σ2θ á¼Ï€Îµá½·3σ2θ á¼Ï€ÎµÎ¯3ϲ2θ á¼Ï€Îµá½·3ϲ2θ á¼Ï€ÎµÎ¹3σ2θ á¼Ï€ÎµÎ¹3ϲ2θ á¼Ï€Îµ2κ1τεί á¼Ï€Îµ2κ1τεί á¼Ï€Î­2κ1τει á¼Ï€á½³2κ1τει á¼Ï€Îµ2κ1Ï„Ï á¼Ï€Î­2κ1Ï„Ï á¼Ï€á½³2κ1Ï„Ï á¼Ï€Îµ2ξ1 á¼Ï€Îµ2σ1β á¼Ï€Îµ2ϲ1β á¼Ï€Î¹Ï€ÏÏŒ2σ1θ á¼Ï€Î¹Ï€Ïá½¹2σ1θ á¼Ï€Î¹Ï€ÏÏŒ2ϲ1θ á¼Ï€Î¹Ï€Ïá½¹2ϲ1θ á¼Ï€Î¹Ï€Ïο2σ1θ á¼Ï€Î¹Ï€Ïο2ϲ1θ á¼Ï€Î¹ÏƒÏ…2ν1 á¼Ï€Î¹Ï²Ï…2ν1 á¼2σ1 á¼2ϲ1 á¼3σ2άω á¼3σ2άω á¼3ϲ2άω á¼3ϲ2άω á¼3σ2ημ á¼3ϲ2ημ á¼3σ2θ á¼3ϲ2θ á¼4σ3θέσ á¼4σ3θέσ á¼4ϲ3θέϲ á¼4ϲ3θέϲ á¼3σ2ιγ á¼3ϲ2ιγ á¼3σ2κ á¼3ϲ2κ á¼4σ3κά á¼4σ3κά á¼4ϲ3κά á¼4ϲ3κά á¼4σ3κα á¼4ϲ3κα á¼3σ2μὲν. á¼3ϲ2μὲν. á¼3σ2μέν. á¼3σ2μέν. á¼3ϲ2μέν. á¼3ϲ2μέν. á¼3σ2Ï„ á¼3ϲ2Ï„ á¼3σ2όμεθα á¼3σ2όμεθα á¼3ϲ2όμεθα á¼3ϲ2όμεθα á¼3σ2οίμην á¼3σ2οίμην á¼3ϲ2οίμην á¼3ϲ2οίμην á¼3σ2όμ á¼3σ2όμ á¼3ϲ2όμ á¼3ϲ2όμ á¼3σ2ομ á¼3ϲ2ομ á¼3σ2οῦ á¼3ϲ2οῦ á¼3σ2Î¿Ï á¼3σ2ού á¼3ϲ2Î¿Ï á¼3ϲ2ού á¼3σ2ου á¼3ϲ2ου á¼3σ2Ï… á¼3ϲ2Ï… á¼3σ2Ï á¼3σ2á½» á¼3ϲ2Ï á¼3ϲ2á½» á¼ÏƒÏ2ν1 á¼Ïƒá½»2ν1 á¼Ï²Ï2ν1 á¼Ï²á½»2ν1 á¼ÏƒÏ…2ν1 á¼Ï²Ï…2ν1 á¼3σ2χ á¼3ϲ2χ á¼4σ3χέ á¼4σ3χέ á¼4ϲ3χέ á¼4ϲ3χέ á¼3σ2ÏŽ á¼3σ2á½½ á¼3ϲ2ÏŽ á¼3ϲ2á½½ á¼3σ2ω á¼3ϲ2ω á¼”2σ1οπ á¼”2ϲ1οπ εá½Îµ2ξ1 εá½Îµ3ξ2ί εá½Îµ3ξ2á½· εá½Îµ3ξ2ι εá½Ï€ÏÏŒ2σ1 εá½Ï€Ïá½¹2σ1 εá½Ï€ÏÏŒ2ϲ1 εá½Ï€Ïá½¹2ϲ1 εá½Ï€Ïο2σ1 εá½Ï€Ïο2ϲ1 εá½ÏƒÏ2ν1 εá½Ïƒá½»2ν1 εá½Ï²Ï2ν1 εá½Ï²á½»2ν1 εá½ÏƒÏ…2ν1 εá½Ï²Ï…2ν1 εá½Î¾Ï2ν1 εá½Î¾á½»2ν1 εá½Î¾Ï…2ν1 á¼Ï‰2σ1φ á¼Ï‰2ϲ1φ ἤ2ν1οψ. ἤ2ν1οπ á¼ 2ν1όπ á¼ 2ν1όπ .θεμι2σ1ÎºÏ .θεμι2ϲ1ÎºÏ .θεό2σ1δ .θεό2σ1δ .θεό2ϲ1δ .θεό2ϲ1δ .θεο2σ1δ .θεο2ϲ1δ .θεοι2σ1ÎµÏ‡Î¸Ï .θεοι2ϲ1ÎµÏ‡Î¸Ï .θη2Ï1Î±Î³Ï .θυο2σ1κ .θυο2ϲ1κ .καθυπε2Ï1 .καλω2σ1Î¿Ï .καλω2ϲ1Î¿Ï .καλω2σ1ÏŒÏ .καλω2σ1á½¹Ï .καλω2ϲ1ÏŒÏ .καλω2ϲ1á½¹Ï .κα2ν1είς. .κα2ν1είς. .κα2ν1είϲ. .κα2ν1είϲ. .κα2ν1εὶς. .κα2ν1εὶϲ. .κα2ν1είσ. .κα2ν1είσ. .κα2ν1εὶσ. .κα2ν1εν .κα2ν1έν .κα2ν1έν .καταδυ2σ1ωπ .καταδυ2ϲ1ωπ .κατεδυ2σ1ÏŽÏ€ .κατεδυ2σ1ώπ .κατεδυ2ϲ1ÏŽÏ€ .κατεδυ2ϲ1ώπ .κατει2σ1 .κατει2ϲ1 .κατε2ν1αί .κατε2ν1αί .κατε2ν1ή .κατε2ν1á½µ .κατε2ξ1α2ν1ί .κατε2ξ1α2ν1á½· .κατε2ξ1α2ν1έσ .κατε2ξ1α2ν1έσ .κατε2ξ1α2ν1έϲ .κατε2ξ1α2ν1έϲ .κεÏα2σ1φ .κεÏα2ϲ1φ .κÏά2σ1Ï€ .κÏá½±2σ1Ï€ .κÏά2ϲ1Ï€ .κÏá½±2ϲ1Ï€ .κÏα2σ1Ï€ .κÏα2ϲ1Ï€ .κυνό2σ1α .κυνό2σ1α .κυνό2ϲ1α .κυνό2ϲ1α .κυνό2σ1β .κυνό2σ1β .κυνό2ϲ1β .κυνό2ϲ1β .κυνο2σ1β .κυνο2ϲ1β .κυνό2σ1ο .κυνό2σ1ο .κυνό2ϲ1ο .κυνό2ϲ1ο .κυνο2σ1ο .κυνο2ϲ1ο .κυνο2σ1φ .κυνο2ϲ1φ .μελα2ν1άγ .μελα2ν1άγ .μελα2ν1αγ .μελα2ν1άε .μελα2ν1άε .μελα2ν1αέ .μελα2ν1αέ .μελα2ν1αθ .μελα2ν1αιγ .μελα2ν1αυγ .μελα2ν1είμ .μελα2ν1είμ .μελα2ν1εῖμ .μελά2ν1ιππ .μελά2ν1ιππ .μελα2ν1ίππ .μελα2ν1ίππ .μελα2ν1όμμ .μελα2ν1όμμ .μελα2ν1ομμ .μελά2ν1οσσ .μελά2ν1οσσ .μελά2ν1οϲϲ .μελά2ν1οϲϲ .μελα2ν1όσσ .μελα2ν1όσσ .μελα2ν1όϲϲ .μελα2ν1όϲϲ .μελά2ν1οστ .μελά2ν1οστ .μελά2ν1οϲτ .μελά2ν1οϲτ .μελα2ν1όστ .μελα2ν1όστ .μελα2ν1όϲτ .μελα2ν1όϲτ .μελά2ν1Î¿Ï…Ï .μελά2ν1Î¿Ï…Ï .μελα2ν1οÏÏ .μελα2ν1Î¿á½»Ï .μελα2ν1Î¿Ï…Ï .μελά2ν1Ï… .μελά2ν1Ï… .μελα2ν1Ï .μελα2ν1á½» .μετε2ξ1α .μετε2ξ1έ .μετε2ξ1á½³ .μετε2ξ1ε .μογι2σ1 .μογι2ϲ1 .μογο2σ1Ï„ .μογο2ϲ1Ï„ .μυ2σ1Ï€ .μυ2ϲ1Ï€ .μυ2σ1επ .μυ2ϲ1επ .νεώ2σ1οικ .νεώ2σ1οικ .νεώ2ϲ1οικ .νεώ2ϲ1οικ .νεω2σ1οίκ .νεω2σ1οίκ .νεω2ϲ1οίκ .νεω2ϲ1οίκ .νου2ν1ε .ξυ2ν1αγ .ξυ2ν1ε .ξυ2ν1έ .ξυ2ν1á½³ .ξÏ2ν1ε .ξύ2ν1ε .ξυ3ν2εώ .ξυ3ν2εώ .ξυ3ν2εῶ .ξυ2ν1ῆκ .ξÏ2ν1ι .ξύ2ν1ι .ξυ2ν1ί .ξυ2ν1á½· οἱο2ν1εί. οἱο2ν1εί. οἱο2ν1εὶ. οἱό2σ1 οἱό2σ1 οἱό2ϲ1 οἱό2ϲ1 οἰ2σ1πώτ οἰ2σ1πώτ οἰ2ϲ1πώτ οἰ2ϲ1πώτ οἰ2σ1πωτ οἰ2ϲ1πωτ á½Î»Î¿2ν1έν. á½Î»Î¿2ν1έν. á½Î»Î¿2ν1ὲν. á½Ï€Ï‰2σ1 á½Ï€Ï‰2ϲ1 á½…2σ1γε. á½…2ϲ1γε. á½ÏƒÎ¿2ν1ῶν. á½Ï²Î¿2ν1ῶν. á½…2σ1πεÏ. á½…2ϲ1πεÏ. á½…2σ1τις á½…2ϲ1τιϲ οἷ2σ1τισι οἷ2ϲ1τιϲι οὕ2σ1τινας οὕ2ϲ1τιναϲ ἧ2σ1τινος ἧ2ϲ1τινοϲ αἷ2σ1τισι αἷ2ϲ1τιϲι á¼…2σ1τινας á¼…2ϲ1τιναϲ á½2σ1τι2σ1οῦν. á½2ϲ1τι2ϲ1οῦν. ἡτι2σ1οῦν. ἡτι2ϲ1οῦν. á½Ï€Î¿Î¹Î¿Ï…2σ1τινα2σ1οῦν. á½Ï€Î¿Î¹Î¿Ï…2ϲ1τινα2ϲ1οῦν. οá½Î´ÎµÎ½ÏŒ2σ1ω οá½Î´ÎµÎ½á½¹2σ1ω οá½Î´ÎµÎ½ÏŒ2ϲ1ω οá½Î´ÎµÎ½á½¹2ϲ1ω οá½Î´ÎµÎ½Î¿2σ1ÏŽ οá½Î´ÎµÎ½Î¿2σ1á½½ οá½Î´ÎµÎ½Î¿2ϲ1ÏŽ οá½Î´ÎµÎ½Î¿2ϲ1á½½ .παλι2ν1 .παλί2ν1 .παλί2ν1 .πα2ν1 .πά2ν1 .πά2ν1 .πα3ν2ός. .πα3ν2ός. .πα3ν2όϲ. .πα3ν2όϲ. .πα3ν2ὸς. .πα3ν2ὸϲ. .πα3ν2όσ. .πα3ν2όσ. .πα3ν2ὸσ. .πα3ν2ί. .πα3ν2á½·. .πα3ν2ὶ. .πάνα. .πάνα. .πα3ν2ῶν. .πα3ν2ικ .πα3ν2ίσδ .πα3ν2ίσδ .πα3ν2ίϲδ .πα3ν2ίϲδ .πα3ν2ισδ .πα3ν2ιϲδ .πα3ν2οῦ. .πα3ν2á¿·. .πα3ν2ÏŒ. .πα3ν2á½¹. .πα3ν2ὸ. .πα3ν2όν. .πα3ν2όν. .πα3ν2ὸν. .πα3ν2έ. .πα3ν2á½³. .πα3ν2á½². .πα3ν2οί. .πα3ν2οί. .πα3ν2οὶ. .πα3ν2οῖς. .πα3ν2οῖϲ. .πα3ν2οῖσ. .πα3ν2οÏÏ‚. .πα3ν2ούς. .πα3ν2οÏϲ. .πα3ν2ούϲ. .πα3ν2οὺς. .πα3ν2οὺϲ. .πα3ν2οÏσ. .πα3ν2ούσ. .πα3ν2οὺσ. .παÏα2ν1ίσχ .παÏα2ν1ίσχ .παÏα2ν1ίϲχ .παÏα2ν1ίϲχ .παÏεί2σ1 .παÏεί2σ1 .παÏεί2ϲ1 .παÏεί2ϲ1 .παÏει2σ1 .παÏει2ϲ1 .παÏε2κ1λ .παÏε2κ1Ï„Ï .παÏε2ν1εῖ .παÏε2ν1ο .παÏε2ξ1 .παÏέ2ξ1 .παÏá½³2ξ1 παÏέ3ξ2ω. παÏá½³3ξ2ω. παÏέ3ξ2εις. παÏá½³3ξ2εις. παÏέ3ξ2ειϲ. παÏá½³3ξ2ειϲ. παÏέ3ξ2εισ. παÏá½³3ξ2εισ. παÏέ3ξ2ει. παÏá½³3ξ2ει. παÏέ3ξ2ετον. παÏá½³3ξ2ετον. παÏε3ξ2έτην. παÏε3ξ2έτην. παÏέ3ξ2ομεν. παÏá½³3ξ2ομεν. παÏέ3ξ2ετε. παÏá½³3ξ2ετε. παÏέ3ξ2ουσι. παÏá½³3ξ2ουσι. παÏέ3ξ2ουϲι. παÏá½³3ξ2ουϲι. παÏέ3ξ2ομαι παÏá½³3ξ2ομαι παÏέ3ξ2ῃ παÏá½³3ξ2ῃ παÏέ3ξ2εται. παÏá½³3ξ2εται. παÏέ3ξ2εσθον. παÏá½³3ξ2εσθον. παÏέ3ξ2εϲθον. παÏá½³3ξ2εϲθον. παÏε3ξ2όμεθα. παÏε3ξ2όμεθα. παÏέ3ξ2εσθε. παÏá½³3ξ2εσθε. παÏέ3ξ2εϲθε. παÏá½³3ξ2εϲθε. παÏέ3ξ2ονται. παÏá½³3ξ2ονται. .πλεο2ν1έ .πλεο2ν1á½³ .πλεο2ν1ε .Ï€Ïοει2σ1 .Ï€Ïοει2ϲ1 .Ï€Ïοε2κ1 .Ï€Ïοε2ν1 .Ï€Ïοε2ξ1 .Ï€Ïοέ2ξ1 .Ï€Ïοέ2ξ1 .Ï€Ïο2σ1 .Ï€Ïο2ϲ1 .Ï€Ïο3σ2άβ .Ï€Ïο3σ2άβ .Ï€Ïο3ϲ2άβ .Ï€Ïο3ϲ2άβ .Ï€Ïο3σ2αβ .Ï€Ïο3ϲ2αβ .Ï€Ïοσει2σ1 .Ï€Ïοϲει2ϲ1 .Ï€Ïο3σ2εί .Ï€Ïο3σ2εί .Ï€Ïο3ϲ2εί .Ï€Ïο3ϲ2εί .Ï€Ïο3σ2έσει .Ï€Ïο3σ2έσει .Ï€Ïο3ϲ2έϲει .Ï€Ïο3ϲ2έϲει .Ï€Ïο3σ2εσεί .Ï€Ïο3σ2εσεί .Ï€Ïο3ϲ2εϲεί .Ï€Ïο3ϲ2εϲεί .Ï€Ïοσε2ν1 .Ï€Ïοϲε2ν1 .Ï€Ïοσε2ξ1 .Ï€Ïοϲε2ξ1 .Ï€ÏÏŒ3σ2θι .Ï€Ïá½¹3σ2θι .Ï€ÏÏŒ3ϲ2θι .Ï€Ïá½¹3ϲ2θι .Ï€Ïο3σ2θί .Ï€Ïο3σ2θί .Ï€Ïο3ϲ2θί .Ï€Ïο3ϲ2θί .Ï€Ïο4σ3θιγ .Ï€Ïο4ϲ3θιγ .Ï€ÏÏŒ3σ2κοπ .Ï€Ïá½¹3σ2κοπ .Ï€ÏÏŒ3ϲ2κοπ .Ï€Ïá½¹3ϲ2κοπ .Ï€Ïο3σ2κόπ .Ï€Ïο3σ2κόπ .Ï€Ïο3ϲ2κόπ .Ï€Ïο3ϲ2κόπ .Ï€Ïο3σ2τασ .Ï€Ïο3ϲ2ταϲ .Ï€Ïο3σ2τάτ .Ï€Ïο3σ2τάτ .Ï€Ïο3ϲ2τάτ .Ï€Ïο3ϲ2τάτ .Ï€Ïο3σ2τατ .Ï€Ïο3ϲ2τατ .Ï€Ïο3σ2ταυ .Ï€Ïο3ϲ2ταυ .Ï€Ïο3σ2τεί .Ï€Ïο3σ2τεί .Ï€Ïο3ϲ2τεί .Ï€Ïο3ϲ2τεί .Ï€Ïο3σ2τεν .Ï€Ïο3ϲ2τεν .Ï€Ïο3σ2τέν .Ï€Ïο3σ2τέν .Ï€Ïο3ϲ2τέν .Ï€Ïο3ϲ2τέν .Ï€Ïο3σ2τεÏν .Ï€Ïο3ϲ2τεÏν .Ï€ÏÏŒ3σ2τεÏν .Ï€Ïá½¹3σ2τεÏν .Ï€ÏÏŒ3ϲ2τεÏν .Ï€Ïá½¹3ϲ2τεÏν .Ï€Ïο3σ2τέÏν .Ï€Ïο3σ2Ï„á½³Ïν .Ï€Ïο3ϲ2τέÏν .Ï€Ïο3ϲ2Ï„á½³Ïν .Ï€Ïο3σ2τήσ .Ï€Ïο3σ2τήσ .Ï€Ïο3ϲ2τήϲ .Ï€Ïο3ϲ2τήϲ .Ï€Ïο3σ2τόμ .Ï€Ïο3σ2τόμ .Ï€Ïο3ϲ2τόμ .Ï€Ïο3ϲ2τόμ .Ï€Ïο3σ2τομ .Ï€Ïο3ϲ2τομ .Ï€ÏÏŒ3σ2τῳ .Ï€Ïá½¹3σ2τῳ .Ï€ÏÏŒ3ϲ2τῳ .Ï€Ïá½¹3ϲ2τῳ .Ï€Ïο3σ2Ï„á¿´ .Ï€Ïο3ϲ2Ï„á¿´ .Ï€Ïο3σ2υγ .Ï€Ïο3ϲ2υγ .Ï€Ïο3σ2υμ .Ï€Ïο3ϲ2υμ .Ï€Ïο3σ2Ï…2ν1 .Ï€Ïο3ϲ2Ï…2ν1 .Ï€ÏÏŒ3σ2φαγμ .Ï€Ïá½¹3σ2φαγμ .Ï€ÏÏŒ3ϲ2φαγμ .Ï€Ïá½¹3ϲ2φαγμ .Ï€Ïο3σ2φάγμ .Ï€Ïο3σ2φάγμ .Ï€Ïο3ϲ2φάγμ .Ï€Ïο3ϲ2φάγμ .Ï€Ïο3σ2φάζ .Ï€Ïο3σ2φάζ .Ï€Ïο3ϲ2φάζ .Ï€Ïο3ϲ2φάζ .Ï€Ïο3σ2φάττ .Ï€Ïο3σ2φάττ .Ï€Ïο3ϲ2φάττ .Ï€Ïο3ϲ2φάττ .Ï€ÏÏŒ3σ2χημ .Ï€Ïá½¹3σ2χημ .Ï€ÏÏŒ3ϲ2χημ .Ï€Ïá½¹3ϲ2χημ .Ï€Ïο3σ2χήμ .Ï€Ïο3σ2χήμ .Ï€Ïο3ϲ2χήμ .Ï€Ïο3ϲ2χήμ .Ï€ÏÏŒ3σ2ω. .Ï€Ïá½¹3σ2ω. .Ï€ÏÏŒ3ϲ2ω. .Ï€Ïá½¹3ϲ2ω. .Ï€ÏÏŒ3σ2ωθεν. .Ï€Ïá½¹3σ2ωθεν. .Ï€ÏÏŒ3ϲ2ωθεν. .Ï€Ïá½¹3ϲ2ωθεν. .Ï€Ïο3σ2ÏŽÏ„ .Ï€Ïο3σ2ώτ .Ï€Ïο3ϲ2ÏŽÏ„ .Ï€Ïο3ϲ2ώτ .Ï€Ïο3σ2ωτ .Ï€Ïο3ϲ2ωτ .Ï€Ïοϋπε2ξ1 .πυ2Ï1άγ .πυ2Ï1άγ .πυ2Ï1αγ .πυ2Ï1αίθ .πυ2Ï1αίθ .πυ2Ï1αιθ .πυ2Ï1ῆθ .πυ2Ï1ηθ .πυ2Ï1ήθ .πυ2Ï1ήθ .πυ2Ï1ακ .Ï€Ï2Ï1αυ .πύ2Ï1αυ .πυ2Ï1Î±Ï .πυ2Ï1αύ .πυ2Ï1αυ .πυ2Ï1ήνεμ .πυ2Ï1ήνεμ .πυ2Ï1ηνέμ .πυ2Ï1ηνέμ .πυ2Ï1ωπ .σελα2σ1φό .σελα2σ1φό .ϲελα2ϲ1φό .ϲελα2ϲ1φό .σελα2σ1φο .ϲελα2ϲ1φο .συμπαÏει2σ1 .ϲυμπαÏει2ϲ1 .συ2ν1 .ϲυ2ν1 .σÏ2ν1 .σύ2ν1 .ϲÏ2ν1 .ϲύ2ν1 .συνδιέ2ξ1 .συνδιέ2ξ1 .ϲυνδιέ2ξ1 .ϲυνδιέ2ξ1 .συνδιε2ξ1 .ϲυνδιε2ξ1 .συνδυ2σ1 .ϲυνδυ2ϲ1 .συνε2ξ1 .ϲυνε2ξ1 .τεσσαÏε2σ1κ .τεϲϲαÏε2ϲ1κ .Ï„Ïει2σ1κ .Ï„Ïει2ϲ1κ .Ï„Ïι2σ1 .Ï„Ïι2ϲ1 .Ï„Ïι3σ2μό .Ï„Ïι3σ2μό .Ï„Ïι3ϲ2μό .Ï„Ïι3ϲ2μό .Ï„Ïι3σ2μο .Ï„Ïι3ϲ2μο .Ï„Ïι3σ2μῶ .Ï„Ïι3ϲ2μῶ .Ï„Ïι3σ2Ï€ .Ï„Ïι3ϲ2Ï€ .Ï„Ïί3σ2Ï„ .Ï„Ïá½·3σ2Ï„ .Ï„Ïί3ϲ2Ï„ .Ï„Ïá½·3ϲ2Ï„ .Ï„Ïι3σ2Ï„ .Ï„Ïι3ϲ2Ï„ .Ï„Ïι3σ2ÏŽ .Ï„Ïι3σ2á½½ .Ï„Ïι3ϲ2ÏŽ .Ï„Ïι3ϲ2á½½ .Ï„Ïι3σ2ω .Ï„Ïι3ϲ2ω ὑο2σ1κ ὑο2ϲ1κ ὑπεί2σ1 ὑπεί2σ1 ὑπεί2ϲ1 ὑπεί2ϲ1 ὑπει2σ1 ὑπει2ϲ1 ὑπεί3σ2ας ὑπεί3σ2ας ὑπεί3ϲ2αϲ ὑπεί3ϲ2αϲ ὑπεί3σ2ασ ὑπεί3σ2ασ ὑπεί3σ2αν ὑπεί3σ2αν ὑπεί3ϲ2αν ὑπεί3ϲ2αν ὑπει3σ2άν ὑπει3σ2άν ὑπει3ϲ2άν ὑπει3ϲ2άν ὑπει3σ2άσ ὑπει3σ2άσ ὑπει3ϲ2άϲ ὑπει3ϲ2άϲ ὑπε2κ1λαμ ὑπε2κ1λήψ ὑπε2κ1λήψ ὑπε2κ1Ï„ ὑπε2ν1 ὑπε2ξ1 ὑπε2Ï1 ὑπέ2Ï1 ὑπέ2Ï1 ὑπέ3Ï2α. ὑπέ3Ï2α. ὑπέ3Ï2ης. ὑπέ3Ï2ης. ὑπέ3Ï2ηϲ. ὑπέ3Ï2ηϲ. ὑπέ3Ï2ησ. ὑπέ3Ï2ησ. ὑπέ3Ï2á¾³. ὑπέ3Ï2á¾³. ὑπέ3Ï2αν. ὑπέ3Ï2αν. ὑπέ3Ï2αι. ὑπέ3Ï2αι. ὑπε3Ï2ῶν. ὑπέ3Ï2αις. ὑπέ3Ï2αις. ὑπέ3Ï2αιϲ. ὑπέ3Ï2αιϲ. ὑπέ3Ï2αισ. ὑπέ3Ï2αισ. ὑπέ3Ï2ας. ὑπέ3Ï2ας. ὑπέ3Ï2αϲ. ὑπέ3Ï2αϲ. ὑπέ3Ï2ασ. ὑπέ3Ï2ασ. ὑπε3Ï2εθ ὑπε3Ï2έθ ὑπε3Ï2έθ ὑπε3Ï2εί ὑπε3Ï2εί ὑπέ3Ï2υθ ὑπέ3Ï2υθ ὑπε3Ï2Ïθ ὑπε3Ï2ύθ ὑπε3Ï2υθ ὑπεÏε2κ1τε ὑπεÏε2κ1τί ὑπεÏε2κ1Ï„á½· ὑπε3Ï2έπτ ὑπε3Ï2έπτ ὑπε3Ï2επτ ὑπε3Ï2έψ ὑπε3Ï2έψ ὑπε3Ï2εψ ὑπε3Ï2έω ὑπε3Ï2έω ὑπε3Ï2ῶ ὑπε3Ï2έε ὑπε3Ï2έε ὑπε3Ï2εῖς. ὑπε3Ï2εῖϲ. ὑπε3Ï2εῖσ. ὑπε3Ï2εῖ. ὑπε3Ï2έο ὑπε3Ï2έο ὑπε3Ï2οῦ ὑπε3Ï2εῖτ ὑπε3Ï2ÏŽ ὑπε3Ï2á½½ ὑπε3Ï2ω ὕ2σ1Ï„Ï á½•2ϲ1Ï„Ï á½‘2σ1Ï„Ï á½‘2ϲ1Ï„Ï .φαε2σ1φ .φαε2ϲ1φ .φω2σ1φ .φω2ϲ1φ .χαÏι2σ1Î±Î½Î´Ï .χαÏι2ϲ1Î±Î½Î´Ï .χαÏι2σ1Î¬Î½Î´Ï .χαÏι2σ1á½±Î½Î´Ï .χαÏι2ϲ1Î¬Î½Î´Ï .χαÏι2ϲ1á½±Î½Î´Ï .χει2Ï1άγ .χει2Ï1άγ .χει2Ï1αγ .χει2Ï1απ .χει2Ï1αψ .χει2Ï1ου .χει2Ï1ῶν .χει2Ï1άν .χει2Ï1άν .χει2Ï1αν .χη2ν1ναλ ὡ2σ1α2ν1εί. ὡ2σ1α2ν1εί. ὡ2ϲ1α2ν1εί. ὡ2ϲ1α2ν1εί. ὡ2σ1α2ν1εὶ. ὡ2ϲ1α2ν1εὶ. ὡ2σ1αÏτως. ὡ2σ1αύτως. ὡ2ϲ1αÏτωϲ. ὡ2ϲ1αύτωϲ. ὡ2σ1αÏτωσ. ὡ2σ1αύτωσ. ὡ2σ1εί. ὡ2σ1εί. ὡ2ϲ1εί. ὡ2ϲ1εί. ὡ2σ1εὶ. ὡ2ϲ1εὶ. á½¥2σ1πεÏ. á½¥2ϲ1πεÏ. ὡ2σ1πε2Ï1εί. ὡ2σ1πε2Ï1εί. ὡ2ϲ1πε2Ï1εί. ὡ2ϲ1πε2Ï1εί. ὡ2σ1πε2Ï1εὶ. ὡ2ϲ1πε2Ï1εὶ. á½¥2σ1τε á½¥2ϲ1τε ι2σ1χίλιοι. ι2σ1χίλιοι. ι2ϲ1χίλιοι. ι2ϲ1χίλιοι. ι2σ1χιλίων. ι2σ1χιλίων. ι2ϲ1χιλίων. ι2ϲ1χιλίων. ι2σ1χιλίοις. ι2σ1χιλίοις. ι2ϲ1χιλίοιϲ. ι2ϲ1χιλίοιϲ. ι2σ1χιλίοισ. ι2σ1χιλίοισ. ι2σ1χιλίους. ι2σ1χιλίους. ι2ϲ1χιλίουϲ. ι2ϲ1χιλίουϲ. ι2σ1χιλίουσ. ι2σ1χιλίουσ. ι2σ1χίλιαι. ι2σ1χίλιαι. ι2ϲ1χίλιαι. ι2ϲ1χίλιαι. ι2σ1χιλίαις. ι2σ1χιλίαις. ι2ϲ1χιλίαιϲ. ι2ϲ1χιλίαιϲ. ι2σ1χιλίαισ. ι2σ1χιλίαισ. ι2σ1χιλίας. ι2σ1χιλίας. ι2ϲ1χιλίαϲ. ι2ϲ1χιλίαϲ. ι2σ1χιλίασ. ι2σ1χιλίασ. ι2σ1χίλια. ι2σ1χίλια. ι2ϲ1χίλια. ι2ϲ1χίλια. ι2σ1μÏÏιοι. ι2σ1μύÏιοι. ι2ϲ1μÏÏιοι. ι2ϲ1μύÏιοι. ι2σ1μυÏίων. ι2σ1μυÏίων. ι2ϲ1μυÏίων. ι2ϲ1μυÏίων. ι2σ1μυÏίοις. ι2σ1μυÏίοις. ι2ϲ1μυÏίοιϲ. ι2ϲ1μυÏίοιϲ. ι2σ1μυÏίοισ. ι2σ1μυÏίοισ. ι2σ1μυÏίους. ι2σ1μυÏίους. ι2ϲ1μυÏίουϲ. ι2ϲ1μυÏίουϲ. ι2σ1μυÏίουσ. ι2σ1μυÏίουσ. ι2σ1μÏÏιαι. ι2σ1μύÏιαι. ι2ϲ1μÏÏιαι. ι2ϲ1μύÏιαι. ι2σ1μυÏίαις. ι2σ1μυÏίαις. ι2ϲ1μυÏίαιϲ. ι2ϲ1μυÏίαιϲ. ι2σ1μυÏίαισ. ι2σ1μυÏίαισ. ι2σ1μυÏίας. ι2σ1μυÏίας. ι2ϲ1μυÏίαϲ. ι2ϲ1μυÏίαϲ. ι2σ1μυÏίασ. ι2σ1μυÏίασ. ι2σ1μÏÏια. ι2σ1μύÏια. ι2ϲ1μÏÏια. ι2ϲ1μύÏια. ι2σ1χιλιοστ ι2ϲ1χιλιοϲτ ι2σ1μυÏιοστ ι2ϲ1μυÏιοϲτ ι2σ1χιλιάκις. ι2σ1χιλιάκις. ι2ϲ1χιλιάκιϲ. ι2ϲ1χιλιάκιϲ. ι2σ1χιλιάκισ. ι2σ1χιλιάκισ. ι2σ1μυÏιάκις. ι2σ1μυÏιάκις. ι2ϲ1μυÏιάκιϲ. ι2ϲ1μυÏιάκιϲ. ι2σ1μυÏιάκισ. ι2σ1μυÏιάκισ.", + ["compression"]="zlib", + ["data"]="xÚu}Ë–ë6²å¯xV³\\ë\28»ÿ/ÓTJ™}g¥ïuù®žUu×c’J™(‰\18\31ó\30HßpúKš\17\0b\7àHÅÆFD\0\4@\0\4Á[ûå‡[·üN˯_~ó—\31îÏËïuùýÏå÷\31_~ø><Òá‰\14?Ó¡¡ÃŠ\14ÏtXÓaC‡w:üJ‡ÿ¤Ã\127Ñá7:ü•\14\127£Ãßéð¿éð\127èð\15:ü“\14ÿ¢Ã¿éð\7\29Z:\4:ìép CG‡#\29\22o¿äÕH^äÕH^äÕH^äËH¾ŒäËH¾ŒäËH¾Œ¿Óá¿é@^äÕH^äÕH^äÕH^äÕH^äÐH¾ŒäÆx¦Ã…\14Ãr˜È¡‰\28šÈ¡‰\28šÈ¡‰\28š(›&ʦ‰\\›Èµ‰\\›Èµ‰\\›Èµ‰\\›Èµ‰\\›Èµ‰\\›Èµ‰\\›Èµ‰\\›Èµ‰¼š(›&rm\"×&Ê¡™Ü˜É™Ü˜Éƒ™<˜ÉîL†f24“ú™ôͤo&}3é›IßLúf*\31\31Ëo»ü>—ßn)\27¿,¿·å÷m‘—4Ý8‹èÂ\20y¤Ë5ò•êép¥ÃHº(Áó’š[ûõÖóqGÇ…,§#Ÿæßù4ü!§ƒœ‚œ„2´rêä´—Ó¢åãÇEñâ\11Ÿ¦#Ÿ†G9=ý(FïÏ||\19›W9]Äô¿Dz—ÓorúO9‰[ã¯rú«œþKNÿM¦\23ÅdšN‹i:-¦ùôD§[Ç©î8Õ¤º“Tw’êNRÝIª;Iu'©î$Õ¤º“Tw’ê­¤z/é|—Ó¯?Š5±LÉí$¹$·“äv’ÜN’ÛIr;In'Éí$¹$·“än%¹{Ià¯rzçtž¾Æã\27\29ÙæIlžÄæIlžÄæIlžÄæIlžÄæIlžÄæ§Øìø4¯Åæßäôw6=s\22Ϝųdñ,Y\29ù´¤“NK:ùtS“P–tò©“Ó^N‹–7IÙ•Oó¿D’\0185J‰šïÏt¾ÿr\127þIòDÿ<êŸ'ùs›\23_ùJ,g¹\22ôgqï§[xX\14{:\28èp¤Ã™\14\23:\\é0Ða¤Ã´\28îtx¢CC\7Rpÿ™\14+:¬é°¡ÃËrøË\15?ýßᇟ¾O3™ú\11\29H¾\5Aö„ì\25Ù\11r äÀÈA#!GFŽ‚œ\00993r\22äBÈ…‘‹ WB®Œ\\\5\25\8\25\24\25\4\25\9\25\25\25\5™\8™\24™\24¹?þ…\14„Ü\31\5y\"䉑'A\26B\26F\26A(¥wNé]Rz_\17²bd%Èš5#kA6„l\24Ù\8òBÈ\11#/„<ÜÂOËaO‡\3\29Žt8ÓáB‡+\29\6:Œt˜–Ãý‘\14Othè@\ +î+:¬é°¡ÃËO?|½…/·°œö_nûåtør;,§ã—Ûq9¿ÜÎËéòåvYN×/·ër\26¾Ü†å4~¹?|½?~¹?.§§/÷§\31–’ÿ/ßç\127.róåÞ,§ðå¾(¿¯¾ÜWËiýå¾^N›/÷Írzùr\127\17û{9\29åt–ÓEN\19ŸÈ\6\0269\0059­ä´–ÓFN/’˜˜¦£œÎrºÈiâ\19ë܋νè܋νè܋νè<ˆÎCʧ£œÎrºÈé*§‰Olá \22\14bá \22\14bá \22\14bá(\22ŽbᘮÄYN\0239]å4Èi”ÓÄ'¶wä+A§FN¢“Í\30ÅìQÌ\30ÅìYÌžÅìYÌžS\1¸Èiâ\19[8‹ê³¨>‹ê³¨>‹ê³¨¾ˆê‹¨¾ˆê‹¨¾¤²5ñ‰U_DõET_DçEt^DçUt^EçUt^EçUt^Sy\29ä4Êiâ\19\27ºJ.]ÅÞUì]%)W1{\21³W1;ˆÙAÌ\14bv\16³ƒ˜\29ÄìªÉÄ'¶7ˆ½Aì\13bo\16{ƒØ\27ÄÞ öF±7Š½Qìbo\20{£Ø\27ÅÞ˜ªåÄ'6;ŠÙQÌŽbv¤f›NáAÀ•œÖrÚȉœ˜Ä‰Iœ˜Ä‰Iœ˜Ä‰Iœ˜Ä‰Iœ˜$Ë'qb\18'&qb\18'&1;‰ÙIÌNlviSÈ,ör:Èé(§³œ.rš¤\17jä$ñ¸™y”fæQt>‰Î'Ñù$:ŸDç“è|\18Oœ\20:\13r\26å4I3\23[»FN¢“[·'1û$fŸÄl#†‚œ\26±'m\29IW‘®\"\"\"M\"MÒš>Ikú$Ò‹H/ܶޤ‰å„­ÄÐJ,¬$a+IØJ”­$\13+IÃ*µÐk9mäôÂí5«^‹êµ¨^‹êµè\\‹Îµè\\‹Îµè\\§V\127#§\23nüYçFtnDçFtnDçFtnDçFtnDçFtnÒä…ï$¬óEt¾ˆÎ\23Ñù\"Yð\"ª_$¯_äÚ¾H^¿ˆ½\23±÷\"™ü\"f_Ä싘}\17³/”¢¥3#½–åôÿ\30ÿ—ü‘N\14uaFýÃ= é—,'e^#óªÌ«2\7a\14Ê\28\"sPæ ÌQ˜£2ÇÈ\28•\25{7û.tŠLéÆÐ92c涗\20í5Eû˜¢½¦h¯)ÚKŠöš¢}LÑ^S´×\20í%E{MÑ>¦h¯)ÚkŠö’¢½¦h\31S´×\20í5E{IÑ^S´)ÚkŠöš¢ƒX?¨õC´~Pë\7µ~\16ë\7µ~ˆÖ\15jý Ö\15bý Ö\15ÑúA­\31ÔúQú¾ËI˜ÇØ\13>RGxÔ?Ü·•ü¨õ!Z\31Ôú\16­ß\0319ítb&ý‘\1BL»ü\17æ ÌA™Cd\14Ê\28”9\ +sTæ\24™£2ÕºŒ@\30cÚé0cÚå0WÂ\\)s\21™+eƱHÃå“NÂl¤|ÒY˜M,Ÿ÷ Ì Ì\16™A™A™\13îè¤:÷Qç^uîUç^tîUç>êܫνê<ˆÎƒê<.‚W\30\31},\29<#\127\31[\4n*\31nýmK£\18•¾{+ÒT²J÷gî˜\127³Ò÷qÌb€À€KÌW ’ôt»Þ\127¡0ùÿ}ì³@\1Á\6¨@ó ù\31Ä¿a¹‘´\11²£\4ZäûxòЭ7Èz‰B£‘ 2E\0€\8Á\19,pë]„Þ…ïL\6d\11\25\16\11Ž`\1±\16ó7ÉšÃÉ$\18Ò%k¾ô.?i`l€ïcç…\18\ +ŠG\14…šƒã,jšBMSDBä6ƒÔ}Ÿ\127Ç\24ó\26ùÛ½Oζp~_ ¡P\19|r¶…óû\2\9>9”\0ï\15c¡‚9mó‘&¦\11ò¬\9|@ˆ’[’<4¯“ø±Ä¹Ès¤ØR p\19ù\13ez\2ñ}X%`\4}#jÃ\0188ÊÓ²ñ«H·\15”—(\8H\21`ù\3å¨Ý\16Dþæ5‚\3÷§Û\14ezl€Àü»‹ÑÛôÝ_—A¸M¡\0058Â7”\23\15\4P§ˆ \13\5a\ +B¤U\\µHtö§Ûøc&ÝZ-4\1£Ó‡[w»Z§\1¸‘¼Ey)7\6ø úýÑj\0`‰±E™ã3 Åä\12úÎèÏ\25¬ïn\7«édÅå¦t0ZûÛ\0\17\7Œ8@D R÷\23È\8,„à\9\0Üz\23¡¯„÷.¼wá;”±,R\4$\4$ì¨+çÒ`\1Jƒ'\0@6\28œ‚\21F_\1ye®èeA ê\3p#ù\3e\"\1pÿ…néHé=t›\29r…êv•Â\18+Í\"ÝZ'\127Ò \0051¾;yn…5b\4c²¯KMè±j8è&ÈÎ#”žÔß×…îu©{]è^—º\17ú„²¹ôŠ:\4n'#“é3šE2R‹ôûÔ»´cÊEC“\11“êh |]¹p\22¤\0EòÊubå5\5)”$¯ébš™«»æ\23t\3\3¯\24x\5­Ws?»jIÖû\25\0‘ÐyB\7~\13`z@Ó\3˜\30 â\12\17gì\4\0@„ïóþ\1\8\17Èúº\7T˜Ûº¨\16n£×å65¢FE’J¼üîÒÃÅ\26\22Âde¢\0póò2FB\21Cé\30—ÎÆŒ\25ÚÃ=k\\º*3^²“'HÏÖ\16\16¸¯èšz\4\ +\19!;”±\3A„^K\23?ÕŒ*©TEY†˜T¦\16ˆ\17>PN÷t£qDqî!kT@5¢œg/TC¼»d\13\9ˆ\26v(û\27í´Dø\5šå‰f>z\15QOÃGóŠhqÒ£\6Õ}òeareareaòeareaòeaÒ1€Õ€Àü»\5îñæ\2˜Þ`(ØG‰Å±ZÖˆ%ëóЭ†¼ÙÖ†e|é¡û3\"Ë\5\12h«G`¹xÁEØû\8{\23ÁËK\23Ð#K¿© œƒ½zbÉA\20íþK©»÷š¤äƒ&„XÓ“‹v¥üò\17\11;Ò­\29Û+&ã{\0\11o¯Î[ß-\23G*n”N”.8\7\22M£×3º8#F°•’# \0uT\"4>Bã\8¾¼Ý]yÃ\8‹ìÊßÒÖv^…ƒ–H\14áhC\25m(¢\13>ZQvQÆ–$8BP‚æe³\\¯ÞÈ!Ê&|‡2Üë9Â\14eKXR¹xÕB/ÛC”ÒÅ3GrÐM¢}x„¨+°\31\28)Ðü\6šk¨>ÀHÂCìSArÐM£í<âóÈ“‚!Åâ\0175ÉíÅbt×µ`¨\16CIL©Ä\18]€9¥H,Àìc‰Q­{*},1$f\03175\0317\21\03175\0317\21\03175\0317\21\03175\31Íl•@Ç\7Dn»\7O9\1RDÙ¹à“\25•.Á³\12F,\16<`fó?\24x¶c]‡\16%÷ç8œE£\16FÆñ\30aƒ{goÉꟽEÀ\18-Th¡¤5\21\26&\18\27…\21Ôl”ÞlG‹úz¶ßõ\12Ò\27Î\4c·ðÙNõ‘dçËIÆzK\4Û‚>Û‰äEº‚Ý+ÚÅÀ\9\3'pÊJo8ýz…âú\12Òš§[Ÿ\16IµÐz‡ØÛÆ\26ï*‹x\0îL¯‹Øpó,ˆÂ;xø!\0172ÐQ\4|0µP¶hp¢müÈ\28tÔÖ˽÷\23¸¦ô|~ì=D½\\s­Ö&\7>¬D%o\13Ù³Y.\1Èt\17𜆗\7r‡\31å¶\25‰O¥-$Ϧ\11\18BòœÚ#ÊaëÛÛÞ>KÚ[qi¸¬DÔÎŽ„\14\16GÙz„f'<é\2z/¨ób©¦ˆvKýù°’í\31v:Ñ\30µ\30\22×\26«\23€H\8ž\16@ßÖD`s{\4h.Æ\19‚5Aý>ç\5B‰\20J\18¨¦®1\26çèöW’,´5…¯ãKc\2;ÎðL®ãy&\4à!Þv‘{›\12#2y‡ºNFÜQî[¿N\8ÐC7SÏY¶55˜)\9Ñ`\1Š!f­†\0174\\L\9b\13\0,1.Pþz~‚„€\25¦FBï4Ì _¡»\23\17Ì\27!õàéà=\5 .(imMÒE%\0Æ…%%уqÉG‰ñƒ6\4C…\24\ +¢ø8—\30Î…\127séÝìýÀŒ\23Ïzïƒ'…’´hZn:P\15$ªGw5®.¬©C\28ªöHÅ2„® 4Œ®8W’CAÞbCk/F~ʸÕB/=ç½\21‰¸´0Ò‰S€\26\24D¨\17*(\30ù£Pó‡ãPƒW¨iŠHˆÈ}†ÇÝ[~¡i¥ƒxÂxu™\7i\25c»\18Ks\4Ë\13”¡ül1#Rª²\28Ë”‰2›kp¥\30”½\14\0ð£\27/\127ÀÝóŠcTŽ`ä-7\23î¦ê Ž´õHy÷•h“‹vò‘N.Ê'ʶK(ÞÍÞ·Ù™˜½\9G€–\8\6\16\197ц‹s\18fC\"pp1fG0]W!Àr°¨d_`¸$,ÑJ,TÔ…‚‡KÃ’º¦\18Õc>O–¾u©»¢ºLO%9©»Ì+ \23ìGÖ\8¯žðŠFàfÎFGÐpÅø¶ŸrµSWq¡\16V9˜ªbÙÞ“Ióº\20\29\0q¬7ðÄ\0304ÉÃ\13º~F\\øV\"j‡Ù\6\0Ó·(Ó…°ù\ +O~÷ðä·\3i«Ï3u\13Ôµ¡Ž–h{T£¹«ŒÚ‡Ñû\ +\0D–\26©R¬\26›¦\28Ì\29ƒ\0218,ÑB…\22JZS¡Ù\26¢+Å£\31oÓ×ÜE̘é&\26bê*\"f»‹[{×ç&Î\22\21¼Ÿ»›¾™‡îèõ\21+Ù\2 c\2±ëL¶žÃC\2\23më£))8Ý\31ÞZœ£s¤P’¼¦}n>¢”]Ù£\27\24Èl$\0Äú¸³\8_G¯\9Æn\30J¤P’‚7g\27ÍhÎC´(¨$9èþ¶T”“ˬŽæ€*ðý™jU\13¥ed\8ïðŠœ °·Ò\18ÿL\17ʲ%!üì·\30đŸGª\6-WïãOÔõ•òe\"…?„u Ñe\8z\21\1 \27œ'\0@\13\7¬t`\025318B@–WS€‰\21ö\3YÞ¡ì5Ì^Ãì4ÌNÃŒN®Ì³Ñ­•X›\9$ꛕì\3S*¤gÛxÙåzTüΦr?CE|Ñè\21\2í\12iGÓjxûã‰5„hj\13à]‘ õís¹yžíºqÛhŸ \1\31m‡¤³âB\28M‡äD=Ü|¿ù”^¹‰‹ÀrÛ|‚yøSì}\3°µ~q/\2\1¸Û°lSh{Ñâ\0šß¡î\19ÄœíЫC€ø´Ú\ +ãÛWc>iõ‹ßØ\27\26­z±ÁÞ›€z‚°O|âÞÁ3ö\19HŸôØØR׶u\3i‡‹{N0µÙÃLhó ;~_x¶q\1àGH;”±f0ÁT\4!\24@L´ÞD\11\26$çW*óÅú¹@B4Ö\12#Ï`\8!!ÙI²S\2@/Ï\14X¤óvl¹<\25\8ÕôÎò3^&\7I4]Ö–\17\\Ø&¤×\2ùVF\27áJwvæä\4-{xÂÙã\19Î>N@¬\16I\5‰»«¬Á¼Õ): \24\\¬õ‹Qv1ù–Ÿ©ìt1žô—OVä©¢O+Ù›×ÎNcžL\23Ú®\21çÇ8‡sòP$…’\20 7\0067Öîå½”¦ …’\20Àœ+¿¶ìÒ\ +õ7+Ù÷y—\27‰¹Sì¬Ä7\17\19h'cz[¥\24_\25\\•5V•©q¶\9Œs.r9hßÉ£-\28´í$\2i©aŠÅ)/3L\26€^\24‹¥ÃŠùE1\13\12h\11\21›\7X\28FsŠ·“ÁB\5[x4Ûè1žsô‘\11b¨\17Õòg‰ÅÂíÜù,1;ÿ›^3Í9´\28Ú`\14m’:Ž)%Ožÿ\16ØHP@ËÏÆ\20“-&\27Sø64Ÿí`Øâíî/VËa§ÿ¼€ß<¾êa)Ä\12\11!f]\6±ŠâÑh9Z\29G\19§‡G?\"Û\27Å\12}Ò\25z¤36yL…©­ˆH\23fe!ÔheÖé\8ÃýÕ›u\16µ;÷o\30¡WÙ=ÉGäŽ`wD›@åºò\4H€ç9³Ôf‹Ä\26íH¡$\1dß\15Â7ƒä •ù/õ`Ö×@“4›8\3mA\7W‰\17ÛÑTˆ‡/+ˆ7\27·\28BÎý\9ÉöNz\3¡î\22\28ø„”\14<¶\0ÂÉÉ;\31Á\19LÞ±c½­Ä=\2\18!Ë\28Á%\21rÕÌÅ°ñ\0304Íær,R~‹QôÆ\21¸Ònõ\14!Š¾óš\24\22 Âý\21ãü5™ Ò h$aY‹\1¢\30G1‹w\127´š\12–h¡B\11%­©Ðš\7ŸC¥QÀ\18-Th¡¤5\21Zž€²¯ùà+>Ð:æ\30ˆ}Í\7^ñ±ïðáÝlÛ\15³ÀnƒûÛ‡´Ó\25¡\25;‚V\0092…ÈJr\23>CìÁ<ƒdÙîn2ÃK($½YÉŒŸéÝ$;H€}%è½$\12´o¢Î°ÅÄŒ\27LÌ°½ÄŒ›KÌ°yÅ\28µ®RØ`¤\7Þì뀾\14à뀾BL\\ÊÛ#@±=\1\0òËÉ>üÍ6w4ËoÄû³™Ìži;y\27ø†ùAÝ\ +Ì 7Ìx\"à•xv×âÙ_gw=žý\21yÃ{¶íØÌ0ú›i;6“o\27~ܱ²9ç %ÊÆu \24Q+«\4]Aï\21uš¦Ÿ;T+ó_#½Ø(/²R‹æ[í\2àç¤Á\4BIÊPg_¬W9\24™VŽYi\4iÊR¯û›yÄhëåÅW§Õ \19 ºv¼ÄBÝ›\31o—¯Òƒ\6<\20xs\127ƒtðPéê PBIjè»\16.ZBø2¿å‹œ\11Ìsj:Ÿ¥Þóÿ7¹vÒˆ^³@5Ê\6„\28ÛžóÐqùÿxÛÒÆÔ\22¡5‡\14¢•0ÂÅs¹©›GSó!\11Kçö`GÑ£\21—{ô!'“$›Ôo¶M\30Msýš«øýÕTï%ÆÕƸš\24Ví`Iƒ!\13†4ZÒhHæ\127¾\15Æÿ1²m`G ­L\23ó›•xzÈ\4¾æ\7*Ã\3H&t6Y:\6Tm\27›Ïô\14‡•b>Síш¢ü¡m¡È¼=€\5nmjIŸ\16Ñ·Bž\4’úÈ%…WÆ\16-7òºo.óòº¯•õM\18•òÞ†)0``\22síÇz/\17\127¼7_u\0\25#{ˆH¡$!DŸJáh[Ä',\ +…‚\20”|šKŸæ§¹ôiö\30H¿\30}ê½\7H\ +%©ù*·³LaùéŽWùIÚà1n·¸È\27lã\"b¯/¿§È{àKœàdÓÖB+kÛWhYÁž±t›Lƒ<Ù¦xŠ$Z†ò¾˜çõÖK3\16å¸Ö:\3±,ƒ\28sàUÖ¶ë\127þâȃ¼]Dß±jœ\28Ã\0151ìQÿ\7ýßÛÍknÁc!c²Hœ\6¯\15iûQ\11Ä…àKéA™®ôƒÙ¾ôÁn^ú`¶.•ÿ\23p5ÊÙÝøÒ×EÕôVŒ/{ÙÀ,Æ—¼Œ\20 b¶\25µZ“\1\3MöÍàílÂâ«%ô?½R¢\1Á\6¨ ¯ŽäÿÔ±B0\1!\5ˆ“µq26NÖÆÉØø´6:cãÓÚèŒÎÚèŒÎÚèŒ\13«Öâgƒóÿ¥ÚP#Êj®ú—ÀAù\27»o\15©ÛF\13KAQÞÚJkÜÖZã¶Ò\26·•Ö¸­µÆm¥5nk­q[kÛJkÜÖZã¶Ò\26·ekÜ\22­q[¶ÆmÑ\26·Òª:Ç:Ûž9Jð\20-\19e[Ü\22­q[¶Ç­oWñ΀w\5wG°×ˆ×Á>˜æµµ@p„\0„˜\23•’T–£J)*Ê\16›²H4\6eåÃ#HÊ>uÞ£ÎùÓyo:ë\11Í\12\22Þx0Tˆ¡ ªOØ~aëåÚ.×Þ¸:€¥ß•{,ñ#÷ƒMÔÔ/\14.ØÈ6\15|ò]Ê}¢mz\27Hn\3©ÅÄÚ2ÐÀåoì•\15pÑ\3vð[ß7\29\\›êI\22²ƒŒ\6\7\25\13\0122\2\0142‚í¥\12¼OEû\0\21\7†\ +1\20ÄT«å5\30W·3˜k¸#z0÷™ù!¸ï3#\24*ÄP\16s¿tö½ÒÙõIgß#Ë‘Ae\\PŽ\ +*c\2?|Yò°2Ê0°\29\0129r\9›±\6©¨Œ6\28\28Ç\27¡Dã\14ü^\7g´+7šÕ®”ÔÈ¡BÆÜèjyÑUr¢«åƒ\31ƒŒ•k•AsÍÇÊõB¢Ô.òÚM\9X0Wv¥‡]á_Wz×\21šü\21®]ßêÕ…k«½ÒA‡Q©_j‘à)ÁQîM,\14Òr#”šîç{ˆ…ÁÐ\18„-¼ŒeŠ\17\31ÿ\20£\31¸\19:\13ÓÐy\13]ѶTJ|YÞ+¥½¸+;OÐ\15ç\5‰\15’‡i[®‘ü5:BÚ–Ë ù‹tYS_Dë\29‡\31¾©\20\31½%º\13\12\24ØÄ…,I3„¥\7rIë\4Z'Ô:Ö\9´ªÄ¹ØÇç:RBIJùíf¶p^ËÍj᜖›‡r³P÷W(O¯¶0½BIz5*±a©a71ŒO2 ŠÛª\13UÚVå­.'ÍuyP(HÁ‘rG²èDú\14dÑyì½\0067\25S<ÕéÊg:\9ÊCÈ‹>ÿ»Z1ª°YŒ\0252Ü>\31¬ø}ì\30L©±ÁÁ\5\11ýàÂ\15\24>¯©lYÆ‚„‚Ó8dƒQ6Žï5bìé\8ÁYäô¶¼\0225cL\21—«¢«\ +’¬+\11´ö!!xB“–\31ä\8Y–^ûŠ?ý‘¯c\6b\29\9\22ˆ}ì\21\127]ÄÈòu‘<\12\1Bð\4Öpr\17N.üÓ[è\\„OoA6VzOß5]e)´4ÇÃ\31ú&I–d[Û$ò–¶\"ÑÓçø/È¿CÂ\14\9ûƒ/êî!\11¼3u”‚\13\ +\16DÄ#„\29M\24×ᤣ·!;®¨Æš‘\3\6\7\23Ì6]ø\17Âc3-«xБ¢J\1$~\30ôò$w=\21‚\17R\127 5P(¡4vž3iœ<ƒÿ4\4¶©í³\24 0˜@k£+-t˜\3qú\9òÁb\127à””ÅÒ8ÎäpA\12\5ñg Àª‘óì\0182,‚sFŽ\23\12ú‡Ÿ\28rið0N\14¹tx\24'‡ÊÜ\0018TÉ¡BÆôu•Ôu./Ç2·\16³s6E¡€«&^°äY(\20¤P’Ìì\12 ±[nò\4HÁ‘\14i@q‚ê+ã…\19Ö¢\2*i4³“Ãó\127\29\17\24u­ËÁ¶RÞÚJiû(ËZ\28ì¹2õQ–³8Øs¥ì£,c:`²ôQbžhÓÒ\21)±%«-ÊUëK•N€Ù\28E$Í3a¾Ê|³3Y9•c™¢’\24\ +¢µÜ\21v;HAÑ\ +:ì\15œö²X­\21l+­`[k\5[×\ +¶®\21l‹V°-ZÁ¶Ú\ +¶®\21<ø\9(‹úVðà' ,Zo\5Ûj+ØÖ[Á¶Ú\ +¶õV°­´‚m¥\21l+­ b\127؉˜¢ff(OÂ\20µÒÆËS:\21]e-GÌLé\20u\18[è¶l¡ó܉o\11Æ¢%\24Ëv\0¯›o¡Û²…n‹\22:‘–niZ¾þN¯ƒÈg`ù]ñ(óg`-\16_KÁ­\21éK“\24¸‡À\25\3g\27Èïlä@+Æ]óM \17ã®{I’w&’ȯ\6˜@›Ö\14bÆ\13eÞ\127º]~4»\26%Y·È±„\23Ox1~Ϋ$\\xSäG+Ó®\21Žðâ\9/\9˜\9NëÚGÜg4ÊL¶À-É1ºì<—tï­\24÷ÉI\5„߃ÈÅÊ·\19Hò²v¢ž¬\24ß뎒½Jñ\26‰§ü^]Ç‹æSb®\30âwë\ +R(HMÜT\0085YH4yR(H\20m®éš+Úæš¾¹Ô\24_\24télJñÅA—Ö\8Æ\12ý…_øIYÚ\27q)[÷o¦~qY\27\17X\8Á\19\8ø-î˜.\25ò[ªHò^ž•\13;¾•¥šž!áCÂÆ„Iç\24ßâyOÒÎJRu•z2Î\20\12õÓ¤“·Ym 0` \17ù\29¡wy»@\29Ì¥¶­á\23âDÍ›\17o\"ý–¤#D\1u\4GôØ\6~æ;\11ï(bÅ%f‹1[ðà\3=h!æ\25cž!æÙ8+ŸVа­•ô\27\12™:\1uB*\6\14\24hĸ\13Y΂\1³À\6Êöjê@\15y·ÃŒ=¹À3\6\0261nF–\2{(\25²sÙoIz†°7+IC\29©ôÞ¤±ñf%j¡\30ú\2Ô\23¤Bà-@ \21—˜kŒiÄØæd)\24\7^m[Æ#PÿF[ÿ\22ê\4Ô\9©Sj\20VZ²’4ÈÞ”\26L]º1ËÒ¡\27!>HÒ¸hdIMŠjEí\30å@#òû|ú\127ki¹ºËW\8G+§\2\21c~JÍи\21åV`þ+¾³Qr\19Ao@\31±•ÈðÑRé\11“ \7Ó\16¬xLÔBt\4x\28ä\8\17\16Ïø-Áä¥u\1276Éš³†¥þð\6\23Ia\0152mCà‚Q>»èg\8§m\14\\ôÆÑ\27SªÌ½hů.Ï&ì_;}~°\9¼\22\24ÑâÆ%–\21!«»s±d;\19\27)#j^¾<(H´ž¡¤G¶3AM€©²˜w\26‡&j¨RCÚT©ÍC™™5\7\28š¨¡J\0135jS¥6þ\ +´6.5Í¿òÇ·?ãë€Q^ê'\2D\8ž`ÛÉE8¥ð÷øBéoñü.Ÿñ\\:…=ï›ø\30?á‰@$\4OHÀMäÆÉ\16\30?ºœdýè²%\4O@\11³³0ë} ~A¸}È\30sk?>\24—\29B\20SÜ\18Éa‰\22*´PÒš\ +-cìæInmú½b½9$5²Y°Ña\0Ž _äyÏ\0310Þ#°tYZG@`ÑørêsE\31zïCï|Øy\31N·÷áä|è\15½øð[ú8lc¤`ŠÂG\12{Ï\31¦mL`€‚êcjçiÔ\25\"\27\19\3xCª†©w+£deUÈÓÂÑ==\28ÍóC¨\31ˆz×o¤\0241}Oê·(õ6ìûü;P!0n'§Ô¸\\Ö3\0u@ª\13\28øË\13EäÛ\13#ÙüŽ$\7Ñ´\20R:\0äã4ïøqš(Ê\13^]ì­`UÌ6{Ž&àÃÆh­ð‡\17¦\28eK·Ê[o®L\2b.ÒNP\\9T~6%·Ó\1r”Þl~é\14(90``0nlláÜ[‘wñÏÅñdÌŸŒŠO[i:cøÓV˜ÎÄ8™\11q²—á$›|G\27¶Q=åy8‘Ò\12\30‹³Q‘®Ë‚oÓßÔ•%Ç\22`6®Y1[±\22òeéS\19Û뼡JqNê\127ÜÆŸ$PòBåtU\12!x\2\0:<1r0\6\15Ò¾Ë\22˜VŠ{ùšÀ£\13:š€“\0138\25åÛ¼½~„ôc$\6Ûqµë\12Ä;ù:ŒÚtK0RÚ(V‰FÊ\27ÅæÀ\21\6®Là‹\13z1\1±)É[‡6&0ä\28•í\27\3Ä\\aÌ\21Ä\\aÌ•IT\3’\15Û@Ø\6,nÐâ\6,nÐâ&•Êø]\7\21‚\21\26+<\27A.\17\11|U¸·œ\0304kß$‹FûÉRO–x24^j‘\2ÀQSÍæï3—ÜîÍns\8ƬJ¢\7ã¦V\5\6¼¼_™ÈyϲwmžµaŽæ«þ§¦/\ +Á\4\4\27@˃âh“\23\6é\127½`ùRýD[`žãG¿¢¤ŸüZÄ !Kú¹¯÷¸\12uŸô¥ÿ´è<¡—¤W>*\26ÿÇÁ\"ëL\1Á\6p\12Kj“öe„\27t\20Wšg™×uoAä\5\29†n‚ƒ\11Ž»Þ½§ýîÞñíê|\21z„BA\ +%\9\22œ&$¯ŸR—\28)”¤¸„Özcý\0\15ŒíÁ˜0¸|RJ-© ûÐX?¯™¥\1Á\6È>7†ôœ¼M¨ÁÞ²ÿIsÖ\27A³\27à»Ý\13ðÝì\6ønw\3|‡Úej\22ÛÜ$C\27-gñƒ~ñ\127\0280S¡Ë\1!\7Èr¾ìyºF\ +†\0126_sõ”¡\6·\13²\9÷oÜ&Ð\127j\19ßc»\21ÿË\23{­Dͦ\13ìE2›e&Y·Ë\4Bð\4\11ÄM3ANáz\1¢¤9-b€À€z!”j¤ôŽD” bz\31‚®`Þ_ñ=í¯ø}ø‡nŽü`\4ÊÖ¿™½Íÿ–÷6\127 »+íê#[\14.w\11ƒ\4ƒÄ;ÂA$½\23\28r`ÀÀ,Æ–ßH6L·à£¹¥d}ÖÍø\12z¢0iiƒÈ4 IÛ$F)n“x¡§ªfO^A®’c¼A»AB4²Ó; ²ß;Dó¤`IñcÙ;y±'\0014NG„Æñ\5Å#ÇBÍÑqh® PÓ\20‘\16áwPTÚZIߧ¿ÈW\11Ó¼ò+—\24@\3¢]â~«¡´$¨€C•\28\ +r\31·T4r0rÜ·wç\17j!\16úDY¶LÀ2²ŒÛ}×P§-‘·¼%I…¾¯\7p÷áO\"H\0n”zq\27¥^hÍJÚÑóÂkV¬H\1\3³xCª\13{^ºÄ¿¤-@/òaé\30XÝ‘\16\28!¾'\ +\26‚Ó\16¼†\4ÄÊ\13rp\ +¨2{\11³³0{\11³³0;\11.<æu\ +g™:/×x\25iu\21 ²Â\ +I%òÁo\24ºh]\17më\17ºÛzÒ¹@úÒ\3^›…˜<Šq˜<ñàü»\1?Ì7½\"¦»b\3˜vÍ/0Ë\5;Šƒ'ö%È/xwfžÉuî\0˜ˆ+˜#\26PÝÈmþ~DS\16‹Èò\29‰PñqUóqUñqUóqUñqUóqUñqUó±4ýÌ71gÚ€°á¹ÃdêÏ\19mä¸k?ZÈ@ô÷Íûz\21 7–NÞ¢œêI\"Håšoût\31IRÈÒ\28ŸŸ\0269D9Þ×\30õ\1270ÿ¹\7cB¢Dß\15û&Í\12?/}¯‡\30\"R(I\8iÿ…Ÿµ\2\"Ï[\19\20\ +R\0Ò,½HΑأŒ-ÔT\9Ùæÿ’©±‡*¤Ø?Õ\24ñ9á7+Q·ßŠüÆ°jœ×ì”êì­ÎÞx±³^,=þùûÐ.\23'6c\15\8È‹ªˆ\28#¢ƒ\2úŸ\6\4\26\16l@\20þ«Î\31°4²|¾2\2Á\17‚'°†W\23?’9>ÑW½´\11ö`\1\29€[$Dä\0307XÏÿiZd”¢¸ç\9³QJb\20žîMÔ1\31UGpH,àÔÓHñAlxµ\30=Uˆ¼¶Ž\19uJ¡ôÜ&\"Ê\16dü«rhùõ¢%b!c4…òoÒϬþÛph1Îr\27ôöZoïûðì¬1‚¶Æ'åÄŠ4ÿ+æ2c݉øð÷’«X€ø²\26Œ«šñÂ[±¬`XF\23åÑ;}-Pû<ôFRÆt\7øWO\0125¢\7cs÷­Ä¨Ò:0TˆÜIÿáV™Ñë\25ò²~”vV¢Ç-ILÿ>ô_¼\13Hp|ŒGc\30\21ã‚I\27\28\\0Êg\23ý\12á²`\18¢7Žnej`Là\9¤£H\31tå\30bZÒÿH’\ +–õq›žµ/Eø\0\6\16à娎\0\0W\2”1œ\ +\19øs‚ÌÀ¬ÀŒ\0\"¤‚wvF2¦r\11{ìÕ »\21ˆ˜Å$\31Qž\127‡Ë+H(\16¸†Ô_q‘–ÎÊÏž\18\ +ŠG.…š‹ãУàBMSDR„G`ñ#T÷\13búØß\19ChA}\22šd}\26\ +„à\9\22ÐŽ’•mx|\15\16d~à`8zpÐlå)«ÜZIwòÑ0}jœB\17\16‚~\27\1H\8\26b¨\17C…ØÔˆMI¬ÐïÁëê|ùâgèNSo¡PBIbŸfOʈæí÷yï26!š«H\1$y8\31œ7\22 Gð·=\16@n仢\24Žiy¶í 02\18Ó²¦\0096§É\0010¦¨$z0Z]Ó“\7ñ³\7çnA\0125bÔx,õ\29\11mÇR×ÑkZŠÁªÐ…`¨\16ƒ'ÆüÛðb\29È=\3żó$„¢\31\27~{\26\16yƒÚúàH\1Iѧ×\7tHåè\13\4\7\12æ=ˆÎÒ-G\29€ª¦’Z 1-ß0×e\14Ò¦ã\27ÊŽÀ_\19Â\8F¾ÿGZFFÈ3?ÿˆµ…¥øì+\5z‰^17²¼dn\9½•irØ\6#ù\19¨ünÕÄ\19,ôi¾§/zWâÏóeQ©ô¬Áút…Àg09Þ´\20$³\14¢\25¿Ë\15ˆ¼ÏŸ ydÎý³«>㥯Q\"Âߣä¾ÙU\31ü2\9\16!A<\30^\3‰‘†Z®bl\31<Ê=Û&µa!ýÓõ.\15y­ËCZçò׸ˆ¥Ñ~H3#¹k\31\ +R(IYSWDë£_\\TFüc|•è\14ÜÐËß<\21\17—“0\26èoACDs¾¡lÂ5WãÿþËŠ;¾|F–‹\23à\8Á\19x\9ߌ\4+§íYAƒ\5–úýhƒ“´‹KüXÒEÑ«\28\02400‹Që\ +´bØ7+Étq¦~³\18\4ò\"1K}ýáûøkZô\19ÿ‡øß®þ\23)­þÏ\1\3³¨«ÿ³dÂt=óÏ&²‡ò*f !”WF—š\26O¢\21Ž”¡¼¬¹0gHºÜÛ!qC•\12…‚\20JRü\ +ׄPü\30×”¡¸\19PK«,\1ù¤w´,B½O€øaY”Æ/ùÿ”ÿó‡´ùÿ6ÿOßj—€\31ïO_yµz\12\0039\6Ë›K–‘D\9\5%xJSP\26 |ŸöÈÈ€:*\15Š¬«\9I”¾ (rëX«>߃HE\26-fh¡B\11%­©Ð\26G+M\22\6Ks…±ÒT6Äé½ñ{¾FÞ¢L7㳋°C™\23ãg£Ë=ã\01263 1îoÞÄÕ\17L„­æÿ¿\26üׄ?ðh­Óµc*…(¥e»ñ\127Ä7²5,³ãÞË“EC\21¥/p×Py\0Yªø¨¡Hæ{_êÕ'Yûõ™PÈÜ•°ò\11È3w¤³Bj’œÅÑ[\28E‘ù92ïæv]2þï’ey!ˆƒøQ\"c¡¤…\ +ÍF=VxGû¿Ñ÷\4¥Ë\14>¹F\8æêSI\0125bÖˆõ)Q±>\0019\20z±>\1¹©knJ²­O†lë\19knØú\4䦮¹)®X[»bm劵µ+Ö–W\12ëS6ßVó·­ço[Í߶ž¿m5\127Ûª\0195\23ª\14ÔÌW×ò´’£e~VrÓUá>®ï†\6 k¹Á9d\02603Sô¸“š·•vT{ðöâÎj\30O;¬=Ô-4\127fÁ_«Â%Å\11—\\„àñÂ%\23¡ù3\11ÍÃÿ\7!¢\2þ", + ["length"]=63005, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=4296, diff --git a/tex/context/patterns/lang-bg.lua b/tex/context/patterns/lang-bg.lua index 5d14a5438..62ec9f03a 100644 --- a/tex/context/patterns/lang-bg.lua +++ b/tex/context/patterns/lang-bg.lua @@ -55,7 +55,10 @@ return { }, ["patterns"]={ ["characters"]="абвгдежзийклмнопрÑтуфхцчшщъьюÑ", - ["data"]="1а1 1б1 1в1 1г1 1д1 1е1 1ж1 1з1 1и1 1й1 1к1 1л1 1м1 1н1 1о1 1п1 1Ñ€1 1Ñ1 1Ñ‚1 1у1 1Ñ„1 1Ñ…1 1ц1 1ч1 1ш1 1щ1 1ÑŠ1 0ÑŒ0 1ÑŽ1 1Ñ1 б4а б4е б4и б4о б4у б4ÑŠ б4ÑŽ б4Ñ Ð²4а в4е в4и в4о в4у в4ÑŠ в4ÑŽ в4Ñ Ð³4а г4е г4и г4о г4у г4ÑŠ г4ÑŽ г4Ñ Ð´4а д4е д4и д4о д4у д4ÑŠ д4ÑŽ д4Ñ Ð¶4а ж4е ж4и ж4о ж4у ж4ÑŠ ж4ÑŽ ж4Ñ Ð·4а з4е з4и з4о з4у з4ÑŠ з4ÑŽ з4Ñ Ð¹4а й4е й4и й4о й4у й4ÑŠ й4ÑŽ й4Ñ Ðº4а к4е к4и к4о к4у к4ÑŠ к4ÑŽ к4Ñ Ð»4а л4е л4и л4о л4у л4ÑŠ л4ÑŽ л4Ñ Ð¼4а м4е м4и м4о м4у м4ÑŠ м4ÑŽ м4Ñ Ð½4а н4е н4и н4о н4у н4ÑŠ н4ÑŽ н4Ñ Ð¿4а п4е п4и п4о п4у п4ÑŠ п4ÑŽ п4Ñ Ñ€4а Ñ€4е Ñ€4и Ñ€4о Ñ€4у Ñ€4ÑŠ Ñ€4ÑŽ Ñ€4Ñ Ñ4а Ñ4е Ñ4и Ñ4о Ñ4у Ñ4ÑŠ Ñ4ÑŽ Ñ4Ñ Ñ‚4а Ñ‚4е Ñ‚4и Ñ‚4о Ñ‚4у Ñ‚4ÑŠ Ñ‚4ÑŽ Ñ‚4Ñ Ñ„4а Ñ„4е Ñ„4и Ñ„4о Ñ„4у Ñ„4ÑŠ Ñ„4ÑŽ Ñ„4Ñ Ñ…4а Ñ…4е Ñ…4и Ñ…4о Ñ…4у Ñ…4ÑŠ Ñ…4ÑŽ Ñ…4Ñ Ñ†4а ц4е ц4и ц4о ц4у ц4ÑŠ ц4ÑŽ ц4Ñ Ñ‡4а ч4е ч4и ч4о ч4у ч4ÑŠ ч4ÑŽ ч4Ñ Ñˆ4а ш4е ш4и ш4о ш4у ш4ÑŠ ш4ÑŽ ш4Ñ Ñ‰4а щ4е щ4и щ4о щ4у щ4ÑŠ щ4ÑŽ щ4Ñ ÑŒ4а ÑŒ4е ÑŒ4и ÑŒ4о ÑŒ4у ÑŒ4ÑŠ ÑŒ4ÑŽ ÑŒ4Ñ 4б3б4 2б3в2 2б3г2 2б3д2 2б3ж2 2б3з2 2б3й2 2б3к2 2б3л2 2б3м2 2б3н2 2б3п2 2б3Ñ€2 2б3Ñ2 2б3Ñ‚2 2б3Ñ„2 2б3Ñ…2 2б3ц2 2б3ч2 2б3ш2 2б3щ2 2в3б2 4в3в4 2в3г2 2в3д2 2в3ж2 2в3з2 2в3й2 2в3к2 2в3л2 2в3м2 2в3н2 2в3п2 2в3Ñ€2 2в3Ñ2 2в3Ñ‚2 2в3Ñ„2 2в3Ñ…2 2в3ц2 2в3ч2 2в3ш2 2в3щ2 2г3б2 2г3в2 4г3г4 2г3д2 2г3ж2 2г3з2 2г3й2 2г3к2 2г3л2 2г3м2 2г3н2 2г3п2 2г3Ñ€2 2г3Ñ2 2г3Ñ‚2 2г3Ñ„2 2г3Ñ…2 2г3ц2 2г3ч2 2г3ш2 2г3щ2 2д3б2 2д3в2 2д3г2 4д3д4 3д4ж 2д3з2 2д3й2 2д3к2 2д3л2 2д3м2 2д3н2 2д3п2 2д3Ñ€2 2д3Ñ2 2д3Ñ‚2 2д3Ñ„2 2д3Ñ…2 2д3ц2 2д3ч2 2д3ш2 2д3щ2 2ж3б2 2ж3в2 2ж3г2 2ж3д2 4ж3ж4 2ж3з2 2ж3й2 2ж3к2 2ж3л2 2ж3м2 2ж3н2 2ж3п2 2ж3Ñ€2 2ж3Ñ2 2ж3Ñ‚2 2ж3Ñ„2 2ж3Ñ…2 2ж3ц2 2ж3ч2 2ж3ш2 2ж3щ2 2з3б2 2з3в2 2з3г2 2з3д2 2з3ж2 4з3з4 2з3й2 2з3к2 2з3л2 2з3м2 2з3н2 2з3п2 2з3Ñ€2 2з3Ñ2 2з3Ñ‚2 2з3Ñ„2 2з3Ñ…2 2з3ц2 2з3ч2 2з3ш2 2з3щ2 2й3б2 2й3в2 2й3г2 2й3д2 2й3ж2 2й3з2 4й3й4 2й3к2 2й3л2 2й3м2 2й3н2 2й3п2 2й3Ñ€2 2й3Ñ2 2й3Ñ‚2 2й3Ñ„2 2й3Ñ…2 2й3ц2 2й3ч2 2й3ш2 2й3щ2 2к3б2 2к3в2 2к3г2 2к3д2 2к3ж2 2к3з2 2к3й2 4к3к4 2к3л2 2к3м2 2к3н2 2к3п2 2к3Ñ€2 2к3Ñ2 2к3Ñ‚2 2к3Ñ„2 2к3Ñ…2 2к3ц2 2к3ч2 2к3ш2 2к3щ2 2л3б2 2л3в2 2л3г2 2л3д2 2л3ж2 2л3з2 2л3й2 2л3к2 4л3л4 2л3м2 2л3н2 2л3п2 2л3Ñ€2 2л3Ñ2 2л3Ñ‚2 2л3Ñ„2 2л3Ñ…2 2л3ц2 2л3ч2 2л3ш2 2л3щ2 2м3б2 2м3в2 2м3г2 2м3д2 2м3ж2 2м3з2 2м3й2 2м3к2 2м3л2 4м3м4 2м3н2 2м3п2 2м3Ñ€2 2м3Ñ2 2м3Ñ‚2 2м3Ñ„2 2м3Ñ…2 2м3ц2 2м3ч2 2м3ш2 2м3щ2 2н3б2 2н3в2 2н3г2 2н3д2 2н3ж2 2н3з2 2н3й2 2н3к2 2н3л2 2н3м2 4н3н4 2н3п2 2н3Ñ€2 2н3Ñ2 2н3Ñ‚2 2н3Ñ„2 2н3Ñ…2 2н3ц2 2н3ч2 2н3ш2 2н3щ2 2п3б2 2п3в2 2п3г2 2п3д2 2п3ж2 2п3з2 2п3й2 2п3к2 2п3л2 2п3м2 2п3н2 4п3п4 2п3Ñ€2 2п3Ñ2 2п3Ñ‚2 2п3Ñ„2 2п3Ñ…2 2п3ц2 2п3ч2 2п3ш2 2п3щ2 2Ñ€3б2 2Ñ€3в2 2Ñ€3г2 2Ñ€3д2 2Ñ€3ж2 2Ñ€3з2 2Ñ€3й2 2Ñ€3к2 2Ñ€3л2 2Ñ€3м2 2Ñ€3н2 2Ñ€3п2 4Ñ€3Ñ€4 2Ñ€3Ñ2 2Ñ€3Ñ‚2 2Ñ€3Ñ„2 2Ñ€3Ñ…2 2Ñ€3ц2 2Ñ€3ч2 2Ñ€3ш2 2Ñ€3щ2 2Ñ3б2 2Ñ3в2 2Ñ3г2 2Ñ3д2 2Ñ3ж2 2Ñ3з2 2Ñ3й2 2Ñ3к2 2Ñ3л2 2Ñ3м2 2Ñ3н2 2Ñ3п2 2Ñ3Ñ€2 4Ñ3Ñ4 2Ñ3Ñ‚2 2Ñ3Ñ„2 2Ñ3Ñ…2 2Ñ3ц2 2Ñ3ч2 2Ñ3ш2 2Ñ3щ2 2Ñ‚3б2 2Ñ‚3в2 2Ñ‚3г2 2Ñ‚3д2 2Ñ‚3ж2 2Ñ‚3з2 2Ñ‚3й2 2Ñ‚3к2 2Ñ‚3л2 2Ñ‚3м2 2Ñ‚3н2 2Ñ‚3п2 2Ñ‚3Ñ€2 2Ñ‚3Ñ2 4Ñ‚3Ñ‚4 2Ñ‚3Ñ„2 2Ñ‚3Ñ…2 2Ñ‚3ц2 2Ñ‚3ч2 2Ñ‚3ш2 2Ñ‚3щ2 2Ñ„3б2 2Ñ„3в2 2Ñ„3г2 2Ñ„3д2 2Ñ„3ж2 2Ñ„3з2 2Ñ„3й2 2Ñ„3к2 2Ñ„3л2 2Ñ„3м2 2Ñ„3н2 2Ñ„3п2 2Ñ„3Ñ€2 2Ñ„3Ñ2 2Ñ„3Ñ‚2 4Ñ„3Ñ„4 2Ñ„3Ñ…2 2Ñ„3ц2 2Ñ„3ч2 2Ñ„3ш2 2Ñ„3щ2 2Ñ…3б2 2Ñ…3в2 2Ñ…3г2 2Ñ…3д2 2Ñ…3ж2 2Ñ…3з2 2Ñ…3й2 2Ñ…3к2 2Ñ…3л2 2Ñ…3м2 2Ñ…3н2 2Ñ…3п2 2Ñ…3Ñ€2 2Ñ…3Ñ2 2Ñ…3Ñ‚2 2Ñ…3Ñ„2 4Ñ…3Ñ…4 2Ñ…3ц2 2Ñ…3ч2 2Ñ…3ш2 2Ñ…3щ2 2ц3б2 2ц3в2 2ц3г2 2ц3д2 2ц3ж2 2ц3з2 2ц3й2 2ц3к2 2ц3л2 2ц3м2 2ц3н2 2ц3п2 2ц3Ñ€2 2ц3Ñ2 2ц3Ñ‚2 2ц3Ñ„2 2ц3Ñ…2 4ц3ц4 2ц3ч2 2ц3ш2 2ц3щ2 2ч3б2 2ч3в2 2ч3г2 2ч3д2 2ч3ж2 2ч3з2 2ч3й2 2ч3к2 2ч3л2 2ч3м2 2ч3н2 2ч3п2 2ч3Ñ€2 2ч3Ñ2 2ч3Ñ‚2 2ч3Ñ„2 2ч3Ñ…2 2ч3ц2 4ч3ч4 2ч3ш2 2ч3щ2 2ш3б2 2ш3в2 2ш3г2 2ш3д2 2ш3ж2 2ш3з2 2ш3й2 2ш3к2 2ш3л2 2ш3м2 2ш3н2 2ш3п2 2ш3Ñ€2 2ш3Ñ2 2ш3Ñ‚2 2ш3Ñ„2 2ш3Ñ…2 2ш3ц2 2ш3ч2 4ш3ш4 2ш3щ2 2щ3б2 2щ3в2 2щ3г2 2щ3д2 2щ3ж2 2щ3з2 2щ3й2 2щ3к2 2щ3л2 2щ3м2 2щ3н2 2щ3п2 2щ3Ñ€2 2щ3Ñ2 2щ3Ñ‚2 2щ3Ñ„2 2щ3Ñ…2 2щ3ц2 2щ3ч2 2щ3ш2 4щ3щ4 ааа4 аае4 ааи4 аао4 аау4 ааъ4 ааю4 ааÑ4 аеа4 аее4 аеи4 аео4 аеу4 аеъ4 аею4 аеÑ4 аиа4 аие4 аии4 аио4 аиу4 аиъ4 аию4 аиÑ4 аоа4 аое4 аои4 аоо4 аоу4 аоъ4 аою4 аоÑ4 ауа4 ауе4 ауи4 ауо4 ауу4 ауъ4 аую4 ауÑ4 аъа4 аъе4 аъи4 аъо4 аъу4 аъъ4 аъю4 аъÑ4 аюа4 аюе4 аюи4 аюо4 аюу4 аюъ4 аюю4 аюÑ4 аÑа4 аÑе4 аÑи4 аÑо4 аÑу4 аÑÑŠ4 аÑÑŽ4 аÑÑ4 еаа4 еае4 еаи4 еао4 еау4 еаъ4 еаю4 еаÑ4 ееа4 еее4 ееи4 еео4 ееу4 ееъ4 еею4 ееÑ4 еиа4 еие4 еии4 еио4 еиу4 еиъ4 еию4 еиÑ4 еоа4 еое4 еои4 еоо4 еоу4 еоъ4 еою4 еоÑ4 еуа4 еуе4 еуи4 еуо4 еуу4 еуъ4 еую4 еуÑ4 еъа4 еъе4 еъи4 еъо4 еъу4 еъъ4 еъю4 еъÑ4 еюа4 еюе4 еюи4 еюо4 еюу4 еюъ4 еюю4 еюÑ4 еÑа4 еÑе4 еÑи4 еÑо4 еÑу4 еÑÑŠ4 еÑÑŽ4 еÑÑ4 иаа4 иае4 иаи4 иао4 иау4 иаъ4 иаю4 иаÑ4 иеа4 иее4 иеи4 иео4 иеу4 иеъ4 иею4 иеÑ4 ииа4 иие4 иии4 иио4 ииу4 ииъ4 иию4 ииÑ4 иоа4 иое4 иои4 иоо4 иоу4 иоъ4 иою4 иоÑ4 иуа4 иуе4 иуи4 иуо4 иуу4 иуъ4 иую4 иуÑ4 иъа4 иъе4 иъи4 иъо4 иъу4 иъъ4 иъю4 иъÑ4 июа4 июе4 июи4 июо4 июу4 июъ4 июю4 июÑ4 иÑа4 иÑе4 иÑи4 иÑо4 иÑу4 иÑÑŠ4 иÑÑŽ4 иÑÑ4 оаа4 оае4 оаи4 оао4 оау4 оаъ4 оаю4 оаÑ4 оеа4 оее4 оеи4 оео4 оеу4 оеъ4 оею4 оеÑ4 оиа4 оие4 оии4 оио4 оиу4 оиъ4 оию4 оиÑ4 ооа4 оое4 оои4 ооо4 ооу4 ооъ4 оою4 ооÑ4 оуа4 оуе4 оуи4 оуо4 оуу4 оуъ4 оую4 оуÑ4 оъа4 оъе4 оъи4 оъо4 оъу4 оъъ4 оъю4 оъÑ4 оюа4 оюе4 оюи4 оюо4 оюу4 оюъ4 оюю4 оюÑ4 оÑа4 оÑе4 оÑи4 оÑо4 оÑу4 оÑÑŠ4 оÑÑŽ4 оÑÑ4 уаа4 уае4 уаи4 уао4 уау4 уаъ4 уаю4 уаÑ4 уеа4 уее4 уеи4 уео4 уеу4 уеъ4 уею4 уеÑ4 уиа4 уие4 уии4 уио4 уиу4 уиъ4 уию4 уиÑ4 уоа4 уое4 уои4 уоо4 уоу4 уоъ4 уою4 уоÑ4 ууа4 ууе4 ууи4 ууо4 ууу4 ууъ4 уую4 ууÑ4 уъа4 уъе4 уъи4 уъо4 уъу4 уъъ4 уъю4 уъÑ4 уюа4 уюе4 уюи4 уюо4 уюу4 уюъ4 уюю4 уюÑ4 уÑа4 уÑе4 уÑи4 уÑо4 уÑу4 уÑÑŠ4 уÑÑŽ4 уÑÑ4 ъаа4 ъае4 ъаи4 ъао4 ъау4 ъаъ4 ъаю4 ъаÑ4 ъеа4 ъее4 ъеи4 ъео4 ъеу4 ъеъ4 ъею4 ъеÑ4 ъиа4 ъие4 ъии4 ъио4 ъиу4 ъиъ4 ъию4 ъиÑ4 ъоа4 ъое4 ъои4 ъоо4 ъоу4 ъоъ4 ъою4 ъоÑ4 ъуа4 ъуе4 ъуи4 ъуо4 ъуу4 ъуъ4 ъую4 ъуÑ4 ъъа4 ъъе4 ъъи4 ъъо4 ъъу4 ÑŠÑŠÑŠ4 ÑŠÑŠÑŽ4 ÑŠÑŠÑ4 ъюа4 ъюе4 ъюи4 ъюо4 ъюу4 ÑŠÑŽÑŠ4 ÑŠÑŽÑŽ4 ÑŠÑŽÑ4 ÑŠÑа4 ÑŠÑе4 ÑŠÑи4 ÑŠÑо4 ÑŠÑу4 ÑŠÑÑŠ4 ÑŠÑÑŽ4 ÑŠÑÑ4 юаа4 юае4 юаи4 юао4 юау4 юаъ4 юаю4 юаÑ4 юеа4 юее4 юеи4 юео4 юеу4 юеъ4 юею4 юеÑ4 юиа4 юие4 юии4 юио4 юиу4 юиъ4 юию4 юиÑ4 юоа4 юое4 юои4 юоо4 юоу4 юоъ4 юою4 юоÑ4 юуа4 юуе4 юуи4 юуо4 юуу4 юуъ4 юую4 юуÑ4 юъа4 юъе4 юъи4 юъо4 юъу4 ÑŽÑŠÑŠ4 ÑŽÑŠÑŽ4 ÑŽÑŠÑ4 ююа4 ююе4 ююи4 ююо4 ююу4 ÑŽÑŽÑŠ4 ÑŽÑŽÑŽ4 ÑŽÑŽÑ4 ÑŽÑа4 ÑŽÑе4 ÑŽÑи4 ÑŽÑо4 ÑŽÑу4 ÑŽÑÑŠ4 ÑŽÑÑŽ4 ÑŽÑÑ4 Ñаа4 Ñае4 Ñаи4 Ñао4 Ñау4 Ñаъ4 Ñаю4 ÑаÑ4 Ñеа4 Ñее4 Ñеи4 Ñео4 Ñеу4 Ñеъ4 Ñею4 ÑеÑ4 Ñиа4 Ñие4 Ñии4 Ñио4 Ñиу4 Ñиъ4 Ñию4 ÑиÑ4 Ñоа4 Ñое4 Ñои4 Ñоо4 Ñоу4 Ñоъ4 Ñою4 ÑоÑ4 Ñуа4 Ñуе4 Ñуи4 Ñуо4 Ñуу4 Ñуъ4 Ñую4 ÑуÑ4 Ñъа4 Ñъе4 Ñъи4 Ñъо4 Ñъу4 ÑÑŠÑŠ4 ÑÑŠÑŽ4 ÑÑŠÑ4 Ñюа4 Ñюе4 Ñюи4 Ñюо4 Ñюу4 ÑÑŽÑŠ4 ÑÑŽÑŽ4 ÑÑŽÑ4 ÑÑа4 ÑÑе4 ÑÑи4 ÑÑо4 ÑÑу4 ÑÑÑŠ4 ÑÑÑŽ4 ÑÑÑ4 й4бб й4бв й4бг й4бд й4бж й4бз й4бй й4бк й4бл й4бм й4бн й4бп й4бр й4Ð±Ñ Ð¹4бт й4бф й4бх й4бц й4бч й4бш й4бщ й4вб й4вв й4вг й4вд й4вж й4вз й4вй й4вк й4вл й4вм й4вн й4вп й4вр й4Ð²Ñ Ð¹4вт й4вф й4вх й4вц й4вч й4вш й4вщ й4гб й4гв й4гг й4гд й4гж й4гз й4гй й4гк й4гл й4гм й4гн й4гп й4гр й4Ð³Ñ Ð¹4гт й4гф й4гх й4гц й4гч й4гш й4гщ й4дб й4дв й4дг й4дд й4дж й4дз й4дй й4дк й4дл й4дм й4дн й4дп й4др й4Ð´Ñ Ð¹4дт й4дф й4дх й4дц й4дч й4дш й4дщ й4жб й4жв й4жг й4жд й4жж й4жз й4жй й4жк й4жл й4жм й4жн й4жп й4жр й4Ð¶Ñ Ð¹4жт й4жф й4жх й4жц й4жч й4жш й4жщ й4зб й4зв й4зг й4зд й4зж й4зз й4зй й4зк й4зл й4зм й4зн й4зп й4зр й4Ð·Ñ Ð¹4зт й4зф й4зх й4зц й4зч й4зш й4зщ й4йб й4йв й4йг й4йд й4йж й4йз й4йй й4йк й4йл й4йм й4йн й4йп й4йр й4Ð¹Ñ Ð¹4йт й4йф й4йх й4йц й4йч й4йш й4йщ й4кб й4кв й4кг й4кд й4кж й4кз й4кй й4кк й4кл й4км й4кн й4кп й4кр й4ÐºÑ Ð¹4кт й4кф й4кх й4кц й4кч й4кш й4кщ й4лб й4лв й4лг й4лд й4лж й4лз й4лй й4лк й4лл й4лм й4лн й4лп й4лр й4Ð»Ñ Ð¹4лт й4лф й4лх й4лц й4лч й4лш й4лщ й4мб й4мв й4мг й4мд й4мж й4мз й4мй й4мк й4мл й4мм й4мн й4мп й4мр й4Ð¼Ñ Ð¹4мт й4мф й4мх й4мц й4мч й4мш й4мщ й4нб й4нв й4нг й4нд й4нж й4нз й4нй й4нк й4нл й4нм й4нн й4нп й4нр й4Ð½Ñ Ð¹4нт й4нф й4нх й4нц й4нч й4нш й4нщ й4пб й4пв й4пг й4пд й4пж й4пз й4пй й4пк й4пл й4пм й4пн й4пп й4пр й4Ð¿Ñ Ð¹4пт й4пф й4пх й4пц й4пч й4пш й4пщ й4рб й4рв й4рг й4рд й4рж й4рз й4рй й4рк й4рл й4рм й4рн й4рп й4рр й4Ñ€Ñ Ð¹4рт й4рф й4рх й4рц й4рч й4рш й4рщ й4Ñб й4Ñв й4Ñг й4Ñд й4Ñж й4Ñз й4Ñй й4Ñк й4Ñл й4Ñм й4Ñн й4Ñп й4ÑÑ€ й4ÑÑ Ð¹4ÑÑ‚ й4ÑÑ„ й4ÑÑ… й4Ñц й4Ñч й4Ñш й4Ñщ й4тб й4тв й4тг й4тд й4тж й4тз й4тй й4тк й4тл й4тм й4тн й4тп й4Ñ‚Ñ€ й4Ñ‚Ñ Ð¹4Ñ‚Ñ‚ й4Ñ‚Ñ„ й4Ñ‚Ñ… й4тц й4тч й4тш й4тщ й4фб й4фв й4фг й4фд й4фж й4фз й4фй й4фк й4фл й4фм й4фн й4фп й4Ñ„Ñ€ й4Ñ„Ñ Ð¹4Ñ„Ñ‚ й4Ñ„Ñ„ й4Ñ„Ñ… й4фц й4фч й4фш й4фщ й4хб й4хв й4хг й4хд й4хж й4хз й4хй й4хк й4хл й4хм й4хн й4хп й4Ñ…Ñ€ й4Ñ…Ñ Ð¹4Ñ…Ñ‚ й4Ñ…Ñ„ й4Ñ…Ñ… й4хц й4хч й4хш й4хщ й4цб й4цв й4цг й4цд й4цж й4цз й4цй й4цк й4цл й4цм й4цн й4цп й4цр й4Ñ†Ñ Ð¹4цт й4цф й4цх й4цц й4цч й4цш й4цщ й4чб й4чв й4чг й4чд й4чж й4чз й4чй й4чк й4чл й4чм й4чн й4чп й4чр й4Ñ‡Ñ Ð¹4чт й4чф й4чх й4чц й4чч й4чш й4чщ й4шб й4шв й4шг й4шд й4шж й4шз й4шй й4шк й4шл й4шм й4шн й4шп й4шр й4ÑˆÑ Ð¹4шт й4шф й4шх й4шц й4шч й4шш й4шщ й4щб й4щв й4щг й4щд й4щж й4щз й4щй й4щк й4щл й4щм й4щн й4щп й4щр й4Ñ‰Ñ Ð¹4щт й4щф й4щх й4щц й4щч й4щш й4щщ б4ÑŒ в4ÑŒ г4ÑŒ д4ÑŒ ж4ÑŒ з4ÑŒ й4ÑŒ к4ÑŒ л4ÑŒ м4ÑŒ н4ÑŒ п4ÑŒ Ñ€4ÑŒ Ñ4ÑŒ Ñ‚4ÑŒ Ñ„4ÑŒ Ñ…4ÑŒ ц4ÑŒ ч4ÑŒ ш4ÑŒ щ4ÑŒ ÑŒ4ÑŒ .дз4в .дж4Ñ€ .дж4л .вг4л .вд4л .вг4Ñ€ .вг4н .вп4л .вк4л .вк4Ñ€ .вт4Ñ€ .Ñг4л .зд4Ñ€ .Ñг4Ñ€ .Ñб4Ñ€ .Ñд4Ñ€ .жд4Ñ€ .Ñк4л .Ñп4л .Ñп4Ñ€ .ÑÑ‚4Ñ€ .Ñк4Ñ€ .шп4Ñ€ .Ñк4в .вз4Ñ€ .вÑ4л .вÑ4м .вÑ4Ñ€ .Ñв4Ñ€ .ÑÑ…4л .ÑÑ…4Ñ€ .хв4Ñ€ .вÑ4Ñ‚ .ÑÑ…4в .Ñм4Ñ€ н4кт. н4кÑ. к4ÑÑ‚.", + ["compression"]="zlib", + ["data"]="xÚMÚ\11®ã\4\18@Ñ­ô\ +ÐÀdmã¿\"{+t\18ÿ\127[èÞÑ<—O‹ÑH®—¼*8ä2\2¤üùóŸ?¿ýùóÇõx]÷õø\\þz\12×c¼\30Óõ˜¯Çr=Öë±]ýz\28×ãüzüú~=’ë‘^¿¯Gv=òëQ\\òzT×£¾\30ÍŸßþóëùŸ¯ŸÚëe÷ç·Ÿ?\30?ÿ‰g\31Ï)žÇõüõw<›x¶ñì¾ý|Åþ+ö_±ÿŠýWì¿bÿ\21û¯Ø\127Çþ;öß±ÿŽýwì¿cÿ\29ûïØÿÄþ'ö?±ÿ‰ýOì\127bÿ\19ûŸØ\31b\127ˆý!ö‡Ø\31b\127ˆý!ö‡Ø\31c\127Œý1öÇØ\31c\127Œý1öÇØŸc\127Žý9öçØŸc\127Žý9öçØ_b\127‰ý%ö—Ø_b\127‰ý%ö—Ø_c\127ý5ö×Ø_c\127ý5ö×Øßb\127‹ý-ö·Øßb\127‹ý-ö·Øßc\127ý=ö÷Øßc\127ý=ö÷Ø?cÿŒý3öÏØ?cÿŒý3öÏkÿ×÷kÿzöñœây\\ϯýëÙijç×~\18ûIì'±ŸÄ~\18ûIì'±ŸÄ~\26ûi짱ŸÆ~\26ûi짱ŸÆ~\22ûYìg±ŸÅ~\22ûYìg±ŸÅ~\30ûyì籟Ç~\30ûyì籟Ç~\17ûEì\23±_Ä~\17ûEì\23±_Ä~\25ûeì—±_Æ~\25ûeì—±_Æ~\21ûUìW±_Å~\21ûUìW±_Å~\29ûuì×±_Ç~\29ûuì×±_Çþ3öŸ±ÿŒýgì?cÿ\25ûÏØ\127^ûŸ?þûõÿüo\127]óõ×=ßæÇ\28ÌÑœÍÅ\\ÍÍÜÍ󞿾›‰™š™™›…Yš•Y_óõåüëËû5_ûõÛû\31s0Gs6\23s57s7Ï{ÞÎ\23ç‹óÅùâ|q¾8_œ/Îw8c¾.ï×|?î×\31ï\15æhÎæb®æfîæyÏÛùæ|s¾9ßœoÎ7ç›óÍùáüèþ‰ÏóqÍÏãÛõø9Üo~=›‹¹š›¹›ç=oæ\7óƒùÁü`~0?˜\31Ì\15æ€9`\14²\15ñq>®9<î×£÷gs1Ws3wó¼çí\0288\7Îsà\0288\7Îsà\0289GΑs”}ŒìkŽûõìýÅ\\ÍÍÜÍóž·sä\0289GΑsä\0289GΑsæœ9gΙsö·ç\28Ÿçãšóã~½x\12757s7Ï{ÞΙsæœ9gΙsæœ9gÎ…sá\\8\23Î…sÑ}‰ÏóqÍåq¿^½¿™»yÞóv.œ\11ç¹p.œ\11ç¹p®œ+çʹr®œ+çªû\26Ÿçãšëã~½y\1277Ï{ÞΕså\\9WΕså\\9WÎsãÜ87ÎsãÜ87Ý·ø<\31×Ü\30÷ëÝûç=oçƹqnœ\27çƹqnœ\27çιsîœ;çιsîœ;ç®û\30Ÿçãšûã~}ÞïßÎsçÜ9wÎsçÜ9wΓóä<9OΓóä<9OΓóÔýŒÏóqÍó\17¯oçÉyržœ'çÉyržœçíüõýv^óe¾Í9˜£9›‹¹š›¹›_Ÿçãk~ý‹_¼\14ç5S33s³0K³2Ùp&œ\9g™p&œ\9g™p&œ\9grwÿš×çù¸fò¸_§ÞÏÌÜ,ÌÒ¬Ìp¦œ)gÊ™r¦œ)gÊ™r¦œ)gÊ™r¦w÷k&—÷k¦ûuæýÜ,ÌÒ¬Ìpfœ\25gÆ™qfœ\25gÆ™qfœ\25gÆ™qfœ™îY|žkfûuîýÂ,ÍÊ\12gΙsæœ9gΙsæœ9gΙsæœ9gΙsæºçñy>®™?î×…÷K³2ÃYp\22œ\5gÁYp\22œ\5gÁYp\22œ\5gÁYp\22œ\5g¡{\17ŸçãšÅã~]z¿2ÃYr–œ%gÉYr–œ%gÉYr–œ%gÉYr–œ%gÉYê^Æçù¸fù¸_WÞ\15gÅYqVœ\21gÅYqVœ\21gÅYqVœ\21gÅYqVœ\21gÅYé^Åçù¸fõ¸_‡³æ¬9kΚ³æ¬9kΚ³æ¬9kΚ³æ¬9kΚ³æ¬9kÝëø<\31׬\31ß~þsýÏìÍÉ<îùëo³1[³‹Ù»ïÝ÷î{÷½ûÞ}ï¾w?¹ŸÜOî'÷“ûÉýä~r\127¸?Ü\31î\15÷‡ûÃýáþ¸ï\127ý}ß\127ÍÞœÌãž÷ý×lÌÖ¼ï\27÷ûÆ}ã¾q߸oÜ7î[÷­ûÖ}ë¾uߺoÝ·î;÷ûÎ}ç¾sß¹ïÜwqßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßëßë?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é?é\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127è\127Üý¯¿úë>foNæqÏë>fc¶æ}ß»ïÝ÷î{÷½ûÞ}ï¾w?¹ŸÜOî'÷“ûÉýä~r\127¸?Ü\31î\15÷‡ûÃýáþ¸ïïþ1{s2{Þ÷wÿ˜­yß7î\27÷ûÆ}ã¾q߸oÜ·î[÷­ûÖ}ë¾uߺoÝwî;÷ûÎ}ç¾sß¹¿û7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú7ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·ú·úwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwúwþýo~üüñó‡ù2ßæÇ\28ÌÑœÍÅ\\ÍÍÜÍ󞿾›‰™š™™›…Yš•YÇ|q¾8_œ/Î\23ç‹óÅùâ|q¾8_œ/Î\23ç‹óÅùâ|q¾8_œ/Î\23ç›óÍùæ|s¾9ßœoÎ7ç›óÍùæ|s¾9ßœoÎ7ç›óÍùæ|s¾9?œ\31Î\15ç‡óÃùáüp~8?œ\31Î\15ç‡óÃùáüp~8?œ\31Î\15ç‡óÃ9p\14œ\3çÀ9p\14œ\3çÀ9p\14œ\3çÀ9p\14œ\3çÀ9p\14œ\3çÀ9pŽœ#çÈ9rŽœ#çÈ9rŽœ#çÈ9rŽœ#çÈ9rŽœ#çÈ9rŽœ3çÌ9sÎœ3çÌ9sÎœ3çÌ9sÎœ3çÌ9sÎœ3çÌ9sÎœ3ç¹p.œ\11ç¹p.œ\11ç¹p.œ\11ç¹p.œ\11ç¹p.œ\11ç¹r®œ+çʹr®œ+çʹr®œ+çʹr®œ+çʹr®œ+çʹrnœ\27çƹqnœ\27çƹqnœ\27çƹqnœ\27çƹqnœ\27çƹqnœ;çιsîœ;çιsîœ;çιsîœ;çιsîœ;çιsîœ;çÉyržœ'çÉyržœ'çÉyržœ'çÉyržœ'çÉyržœ'çy;\127}¿_óe¾Í9˜£9›‹¹š›¹›ç=oç×LÌÔÌÌÜ,ÌÒ¬ÌÛ™p&œ\9g™p&œ\9g™p&œ\9g™p&œ\9g™p&œ\9g™p¦œ)gÊ™r¦œ)gÊ™r¦œ)gÊ™r¦œ)gÊ™r¦œ)gÊ™r¦œ\25gÆ™qfœ\25gÆ™qfœ\25gÆ™qfœ\25gÆ™qfœ\25gÆ™qfœ\25gΙsæœ9gΙsæœ9gΙsæœ9gΙsæœ9gΙsæœ9gÎYp\22œ\5gÁYp\22œ\5gÁYp\22œ\5gÁYp\22œ\5gÁYp\22œ\5gÁYp–œ%gÉYr–œ%gÉYr–œ%gÉYr–œ%gÉYr–œ%gÉYr–œ\21gÅYqVœ\21gÅYqVœ\21gÅYqVœ\21gÅYqVœ\21gÅYqVœ\21gÍYsÖœ5gÍYsÖœ5gÍYsÖœ5gÍYsÖœ5gÍYsÖœ5g}9\127<~=ã«õÏøÂü3¾\6ÿŒ/·?ã+ëÏØ}Æ×ËŸñ¥ñg|\21ü\25_ð~Æ׶Ÿñeìg|Åú\25_œ~ÆסŸñ%çg|uù\25_H~Æ׌Ÿñåág|%øùíë¿\20¾þ3(~øúó~÷Ã×_þ\31×\127¬ýþáóï;±\19¿Úã‡ó÷¯–\127\127¸w.Ì×\15×?\20î_×_àïwüðã÷\15÷¯®ÿBøýÎý\7¼þYðï\15÷¯þý#ß\127®ëïŸÿ»z\5cüÍHÀ®\0316?X~ýþ\3æþ\20ׇv½“ûÕ½œþÞyÅÕvýêúîý׿~þá‡äÈôEûã\127]Û$¸", + ["length"]=12830, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=1660, diff --git a/tex/context/patterns/lang-ca.lua b/tex/context/patterns/lang-ca.lua index 9e3d35517..175ffcf41 100644 --- a/tex/context/patterns/lang-ca.lua +++ b/tex/context/patterns/lang-ca.lua @@ -2,7 +2,11 @@ return { ["comment"]="% generated by mtxrun --script pattern --convert", ["exceptions"]={ ["characters"]="abcdeghijklnoprstuvwxy", - ["data"]="cu-rie cu-ries gei-sha gei-shes goua-che goua-ches hip-py hip-pies hob-by hob-bies jeep jeeps joule joules klee-nex klee-nexs lar-ghet-ti lar-ghet-to lied lieder nos-al-tres ro-yal-ties ro-yal-ty vos-al-tres whis-ky whis-kies", + ["compression"]="zlib", + ["data"]="xÚMŒÑ\13à \12DWñ\2\30ŠÐSì\6\5„¡-Û\23B\20ñsïÙ:ÛWÎ\ +ò\23Œv(›¸›c\17«c/xÄH4qj\19ãFâÆ[›\24ó\27HWt5`¦Ñ\17\0>ñ{Ä(¸Ì» pÑÅ#\5Åë\ +d:£±\11\\r\127‘#·áºx£ÏÒøŠ\26\31ífïý\1}\26Pò", + ["length"]=225, ["n"]=26, }, ["metadata"]={ @@ -139,7 +143,15 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijlmnopqrstuvxyzàçèéíïòóúü", - ["data"]="1ba 1be 1bi 1bo 1bu 1ca 1ce 1ci 1co 1cu 1da 1de 1di 1do 3du 1fa 1fe 1fi 1fo 1fu 1ga 1ge 1gi 1go 1gu 1ha 1he 1hi 1ho 1hu 1ja 1je 1ji 1jo 1ju 1la 1le 1li 1lo 1lu 1ma 1me 1mi 1mo 1mu 1na 1ne 3ni 1no 1nu 1pa 3pe 3pi 3po 1pu 1qu 1ra 1re 1ri 1ro 1ru 1sa 1se 1si 1so 1su 1ta 1te 1ti 1to 1tu 1va 1ve 1vi 1vo 1vu 1xa 1xe 1xi 1xo 1xu 1za 1ze 1zi 1zo 1zu 1bé 1bí 1bó 1bú 1bà 1bè 1bò 1cé 1cí 1có 1cú 1cà 1cè 1cò 1ço 1ça 1çu 1çó 1çú 1çà 1çò 1dé 1dí 1dó 1dú 1dà 1dè 1dò 1fé 1fí 1fó 1fú 1fà 1fè 1fò 1gé 1gí 1gó 1gú 1gà 1gè 1gò 1gü 1hé 1hí 1hó 1hú 1hà 1hè 1hò 1jé 1jí 1jó 1jú 1jà 1jè 1jò 1lé 1lí 1ló 1lú 1là 1lè 1lò 1mé 1mí 1mó 1mú 1mà 1mè 1mò 1né 1ní 1nó 1nú 1nà 1nè 1nò 1pé 1pí 1pó 1pú 1pà 1pè 1pò 1qü 1ré 1rí 1ró 1rú 1rà 1rè 1rò 1sé 1sí 1só 1sú 1sà 1sè 1sò 1té 1tí 1tó 1tú 1tà 1tè 1tò 1vé 1ví 1vó 1vú 1và 1vè 1vò 1xé 1xí 1xó 1xú 1xà 1xè 1xò 1zé 1zí 1zó 1zú 1zà 1zè 1zò 3l2la 1l2le 1l2li 3l2lo 1l2lu 1b2la 1b2le 1b2li 1b2lo 1b2lu 1b2ra 1b2re 1b2ri 1b2ro 1b2ru 1c2la 1c2le 1c2li 1c2lo 1c2lu 1c2ra 1c2re 1c2ri 1c2ro 1c2ru 1d2ra 1d2re 1d2ri 1d2ro 1d2ru 1f2la 1f2le 1f2li 1f2lo 1f2lu 1f2ra 1f2re 1f2ri 1f2ro 1f2ru 1g2la 1g2le 1g2li 1g2lo 1g2lu 1g2ra 1g2re 1g2ri 1g2ro 1g2ru 1p2la 1p2le 1p2li 1p2lo 1p2lu 1p2ra 1p2re 1p2ri 1p2ro 1p2ru 1t2ra 1t2re 1t2ri 1t2ro 1t2ru 1n2ya 1n2ye 1n2yi 1n2yo 1n2yu 1l2lé 1l2lí 1l2ló 1l2lú 1l2là 1l2lè 1l2lò 1b2lé 1b2lí 1b2ló 1b2lú 1b2là 1b2lè 1b2lò 1b2ré 1b2rí 1b2ró 1b2rú 1b2rà 1b2rè 1b2rò 1c2lé 1c2lí 1c2ló 1c2lú 1c2là 1c2lè 1c2lò 1c2ré 1c2rí 1c2ró 1c2rú 1c2rà 1c2rè 1c2rò 1d2ré 1d2rí 1d2ró 1d2rú 1d2rà 1d2rè 1d2rò 1f2lé 1f2lí 1f2ló 1f2lú 1f2là 1f2lè 1f2lò 1f2ré 1f2rí 1f2ró 1f2rú 1f2rà 1f2rè 1f2rò 1g2lé 1g2lí 1g2ló 1g2lú 1g2là 1g2lè 1g2lò 1g2ré 1g2rí 1g2ró 1g2rú 1g2rà 1g2rè 1g2rò 1p2lé 1p2lí 1p2ló 1p2lú 1p2là 1p2lè 1p2lò 1p2ré 1p2rí 1p2ró 1p2rú 1p2rà 1p2rè 1p2rò 1t2ré 1t2rí 1t2ró 1t2rú 1t2rà 1t2rè 1t2rò 1n2yé 1n2yí 1n2yó 1n2yú 1n2yà 1n2yè 1n2yò a1a a1e a1o e1a e1e e1o i1a i1e i1o o1a o1e o1o u1a u1e u1o a1é a1í a1ó a1ú a1à a1è a1ò a1ï a1ü e1é e1í e1ó e1ú e1à e1è e1ò e1ï e1ü i1é i1í i1ó i1ú i1à i1è i1ò i1ï i1ü o1é o1í o1ó o1ú o1à o1è o1ò o1ï o1ü u1é u1í u1ó u1ú u1à u1è u1ò u1ï u1ü é1a é1e é1o é1ï é1ü í1a í1e í1o í1ï í1ü ó1a ó1e ó1o ó1ï ó1ü ú1a ú1e ú1o ú1ï ú1ü à1a à1e à1o à1ï à1ü è1a è1e è1o è1ï è1ü ò1a ò1e ò1o ò1ï ò1ü ï1a ï1e ï1o ï1é ï1í ï1ó ï1ú ï1à ï1è ï1ò ï1i ü1a ü1e ü1o ü1é ü1í ü1ó ü1ú ü1à ü1è ü1ò a1i2a a1i2e a1i2o a1i2u a1u2a a1u2e a1u2i a1u2o a1u2u e1i2a e1i2e e1i2o e1i2u e1u2a e1u2e e1u2i e1u2o e1u2u i1i2a i1i2e i1i2o i1i2u i1u2a i1u2e i1u2i i1u2o i1u2u o1i2a o1i2e o1i2o o1i2u o1u2a o1u2e o1u2o o1u2i o1u2u u1i2a u1i2e u1i2o u1i2u u1u2a u1u2e u1u2i u1u2o u1u2u a1i2é a1i2í a1i2ó a1i2ú a1i2à a1i2è a1i2ò a1u2é a1u2í a1u2ó a1u2ú a1u2à a1u2è a1u2ò e1i2é e1i2í e1i2ó e1i2ú e1i2à e1i2è e1i2ò e1u2é e1u2í e1u2ó e1u2ú e1u2à e1u2è e1u2ò i1i2é i1i2í i1i2ó i1i2ú i1i2à i1i2è i1i2ò i1u2é i1u2í i1u2ó i1u2ú i1u2à i1u2è i1u2ò o1i2é o1i2í o1i2ó o1i2ú o1i2à o1i2è o1i2ò o1u2é o1u2í o1u2ó o1u2ú o1u2à o1u2è o1u2ò u1i2é u1i2í u1i2ó u1i2ú u1i2à u1i2è u1i2ò u1u2é u1u2í u1u2ó u1u2ú u1u2à u1u2è u1u2ò é1i2a é1i2e é1i2o é1i2u é1u2a é1u2e é1u2o é1u2i é1u2u í1i2a í1i2e í1i2o í1i2u í1u2a í1u2e í1u2o í1u2i í1u2u ó1i2a ó1i2e ó1i2o ó1i2u ó1u2a ó1u2e ó1u2o ó1u2i ó1u2u ú1i2a ú1i2e ú1i2o ú1i2u ú1u2a ú1u2e ú1u2o ú1u2i ú1u2u à1i2a à1i2e à1i2o à1i2u à1u2a à1u2e à1u2o à1u2i à1u2u è1i2a è1i2e è1i2o è1i2u è1u2a è1u2e è1u2o è1u2i è1u2u ò1i2a ò1i2e ò1i2o ò1i2u ò1u2a ò1u2e ò1u2o ò1u2i ò1u2u ï1i2a ï1i2e ï1i2o ï1i2é ï1i2í ï1i2ó ï1i2ú ï1i2à ï1i2è ï1i2ò ï1i2u ï1u2a ï1u2e ï1u2o ï1u2é ï1u2í ï1u2ó ï1u2ú ï1u2à ï1u2è ï1u2ò ï1u2i ï1u2u ü1i2a ü1i2e ü1i2o ü1i2é ü1i2í ü1i2ó ü1i2ú ü1i2à ü1i2è ü1i2ò ü1i2u ü1u2a ü1u2e ü1u2o ü1u2é ü1u2í ü1u2ó ü1u2ú ü1u2à ü1u2è ü1u2ò ü1u2i ü1u2u .hi2a .hi2e .hi2o .hi2u .hu2a .hu2e .hu2i .hu2o .i2è .i2ò .u2è .u2ò .hi2é .hi2ó .hi2ú .hi2à .hi2è .hi2ò .hu2é .hu2í .hu2ó .hu2à .hu2è .hu2ò gu2a gu2e gu2i gu2o qu2a qu2e qu2i qu2o gu2é gu2í gu2ó gu2à gu2è gu2ò qu2é qu2í qu2ó qu2à qu2è qu2ò gü2e gü2é gü2í gü2è gü2i qü2e qü2é qü2í qü2è qü2i a1isme. e1isme. i1isme. o1isme. u1isme. a1ista. e1ista. i1ista. o1ista. u1ista. a1um. e1um. i1um. o1um. u1um. .antihi2 .be2n .be2s .bi2s .ca2p .ce2l .cla2r .co2ll .co2n .co2r .de2s .di2s .en3a .hipe2r .hiperm2n .in3ac .in3ad .in3ap .in3es .in3o .inte2r .ma2l .mal1t2hus .pa2n .pe2r .pe3ri .pos2t .psa2l .rebe2s .re2d .su2b .sub3o .subde2s .supe2r .tran2s g2no p2si p2se p2neu g2nò p2sí .ch2 .th2 ein1s2tein ru1t2herford ni2etz1sc2he 3exp 3nef 3nei 3pr 3ser a3ne a3ri bi3se des3ag des3ar des3av des3enc e3ism e3le e3rio e3ris es3aco es3af es3ap es3arr es3as es3int ig3n in3ex n3si o3ro qui3e s3emp s3esp sub3a ui3et o3gnò", + ["compression"]="zlib", + ["data"]="xÚ5˜AŽâJ\19„¯â\19´ÔæDP.\23…ÀUØ.Äã4Þ=¿\5BÂ;Ë»¾ØŸ\17á\127A¦'2¾Š\4ìî™ù=\29«ß“·W´W²W©~iÎ4gš3͙֤֘֘֘êИ֚֚֚֚¯5-˜\22L\11¦\5Ó‚igÓΦM;›v6íbÚÅ´‹i\23Ó.¦]M»šv5íjÚÕ´›i7Ón¦ÝL»™Ö™ÖùêЙ֙֙–Õ!›–£½L˦ÝíÕ›·7¾7oozoÚ`Ú`Ú`Ú`Ú`ÚhÚhÚhÚhÚhÚô‡i\15Ó\30¦=L{šö4íiÚÓ´§i/Ó^¦½L{™ö2íô÷\31Ê\27eAYQ&”\25åk\31+,\14\22\7‹ƒÅÁâ`q°üý›PŽ(\5\5F«+ëÄj¶\6'58©¡Á¼Á¸ÁI\13,-,-,-,-,-,-,-,\1–\0K€%À\18`\9°\4Y6ûúà;Ãw†ï\12ß\25¾3|gø.°\\`¹Àrå\2Ë\5–\11,WX®°\\a¹Âr…å\ +Ë\21–\27,7Xn°Ü`¹Árƒå\6K\7K\7K\7K\7K\7K\7K\7K†%Ã’aÉ°dX2,\25–;ÞV\15_\15_\15_\15_\15_\15_\15ß\0Ë\0Ë\0Ë\0Ë\0Ë\0Ë\0Ë\8Ë\8Ë\8Ë\8Ë\8Ë\8Ë\8Ë\3–\7,\15X\30°<`yÀò€å\9Ë\19–',OXž°Sµ=UP\18¯q‡rzâô„)jbå´ç´ç´ç´ç\20O#ëÈ:²Ž¬#ëÈ:²Ž¬#ëÈ6œ6œ6œ6œ6œ¶<¹åÉ-OnyrË“[²-Ù–lK¶%\27È\6²l \27È\6²l \27È\6²™l&›Éf²™l&›Éf²™l&;r:r:r:r:rÚÕÿ\28Y=kdM¬…ß\5\31„ZB­‡¡ÖãPë¨õHÔ|(N\2N\2N\2N\2N\2N\2Nÿ\7z\1½€^@/ \23Ð\11à]î”à”à”à”à”à”à”à”à”à”à”à”à”à”Ð\8h\0044\2\26\1€F@# ÕJ­VjµR«•Z­Ôj¥V+µJh•Ð*¡UB«„V\9­\18‚\18‚\18‚\18‚\18‚\18‚\18‚\18‚\18‚\18‚\18‚\18‚\18‚\18‚\18²\18²\18²\18²\18²\18²\18²\18²\18²\18²\18²\18²\18²\18²\18F\1£€QÀ(`\0200\ +\24\5Ø-ÈŸŸÖÞj‹Úª6©Íjßêø{´—·Wª¼]{»öv\29í:Úu´ëd×É®“]\23».v]ìúøkQVÞ(\11ÊŠ2¡Ì(8þ١fö0{˜=Ì\30f\15³‡ÙÃìaŽ0G˜#Ì\17æ\8s„9Â\28aŽ0'˜\19Ì\9æ\4s‚9Áœ`N0'˜\11Ì\5æ\2s¹À\\`.0\23˜\11Ì\127ÿÙ»µâQ\18ŠMPmôÆèÑ\27£7GoŽ\22Œ\22Œ\22Œ\22Ž\22ŽVŒVŒVŒVŽVŽ&Œ&Œ&Œ&Ž&ŽfŒfŒfŒfŽfŽ¾\24}1úbôåèËÑ\7£\15F\31Œ>xç¨oÖ…ueXgÖ/j¬þ6Ð\27è\13ôFz#½‘ÞHo¤7Ò›¾ñX\31Y=kb-V\11õB½Ô‘5±\22ûÖAyRž”'åIyRž”'åIER‘T$\21IER‘T$\21IER‰T\"•H%R‰T\"•èOd\19©Bª*¤\ +©Bª*ô\23²…\20Þ;\31\18koµEmU›Ôfµ/?\14\2E@\17P\4\20\1E@©ùÐ0Á+Á+Á+Á+Á+Á+Á+Á+Á+Á+Á+Á+Á+!*!*!*!*!*!*!*!*!*!*!*!*!*!*!)!)!)!)!)!)!)!)!)!)!)!)!)!)¡(¡(¡(¡(¡(¡(¡(¡(¡(¡(¡(¡(¡(¡(Á~,à\6aójI­ \21ÍŠfE3Ü-l\5?:ˆ¿…¿…¿…¿…¿…¿…¿…¿…/Â\23á‹ðEø\"|\17¾\8_„/ÂWá«ðUø*|\21¾\ +_…¯ÂWá“ðIø$|\18>\9Ÿ„OÂ'á“ðYø,|\22>\11Ÿ…ÏÂgá³ðYøWøWøWøWøWøWøWøWøWøGøGøGøG7Œú{ïËÞ×½O{Ÿ÷®Ÿ:³èÌ¢3‹Î,û™e?³ìg–ý̲ŸYö3Ë~&×ýhÝMënZwӺ۾»íënûºÛ¾î¶¯»íënZwÓº›Öݴ»íënûºÛ¾î¶¯»íënûº›ÖݸîÏ\25Û¢zÖÄ\ +½P/Ô\1 Ú”ûýp»\31žûÃSAÙ2l‹Úª6©Íj_žC'·f[Ô&µYí[\5¬\16°AÀ\2\1ùwhwhwhwh\7\6ž\23x\\ài‡\5žu§çNÏž;=wzîÊûÛe\21Ç¡½Õf6K£á.Ã]†»\12w\26ì—Äpó?øQÎ\30÷žö^ö\14ßx”\15=î=í½ìÝ~‡ÜàB¬‰µ°þ\28»1Ú‡Zýœ|ݱ\14V#ª;ÖÙª¯¯V¯Çº·–êë•­c5©!ÑðÝ7Bö˜ ÷78£éN­QËl~`Ã-ÑdnG¤ÝŽWûËî¹Ø8\31q€\14Ìþ`ÿLûÉi¨Gk\3½½×ν¯íì¡Ô'Ô\19\14µ¦í†¢\3ÆþØÙŸCÝ¥*×CDñV:_ Ú÷g\2î(w¶Od´âc÷kiÖª¾`'ß·©oªÎnöñõ;8Sªƒ\127æêÐù\22\5ÿ“ÕW‡Á÷ÕÑþhÅv>E\19ªÆ\15‡cPëÕ\30l¾s•?Ø÷jÕþ)‹·™X‡\ +&—ØZÖÌÚ÷lœÛgWÅpè*|¢Ïª;Ø\27K‡\0307z<øÊŽ¿eÔÁª}0ö7\25“G³\4{Çÿ\3F\11Ê/", + ["length"]=5195, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=869, diff --git a/tex/context/patterns/lang-cs.lua b/tex/context/patterns/lang-cs.lua index 8dd730b2c..5d4962e5d 100644 --- a/tex/context/patterns/lang-cs.lua +++ b/tex/context/patterns/lang-cs.lua @@ -2,7 +2,9 @@ return { ["comment"]="% generated by mtxrun --script pattern --convert", ["exceptions"]={ ["characters"]="acefhijklnoprtyíúÄÅ™", - ["data"]="koe-fi-ci-ent koe-fi-ci-en-ty pro-jek-Äní úhlo-příÄ-ka úhlo-příÄ-ky", + ["compression"]="zlib", + ["data"]="xÚËÎOÕMËÔMÎÔMÍ+QÈFâé–T*\20\20åëf¥fë\30éÍ;¼Vá𮌜|Ý‚£3\15¯=Ò«›ˆ.P\9\0¡ï!", + ["length"]=77, ["n"]=5, }, ["metadata"]={ @@ -99,7 +101,53 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnopqrstuvwxyzáéíóúýÄÄěňřšťůž", - ["data"]=".a2 .a4da .a4de .a4di .a4do .a4dé .a4kl .a4ko .a4kr .a4ku .ale3x .a4ra .a4re .a4ri .a4ro .a4ry .a4rá .a4sa .a4se .a4so .as3t3 .a4sy .a4ta .a4te .at3l .a4to .a4tr .a4ty .a4ve .b2 .c2 .ch2 .cyk3 .d2 .dez3 .d4na .dne4 .dneÅ¡4k .d4ny .dos4 .d4ve .d4vÄ› .d4ví .e2 .e4ch .e4ko .es3k .es3t .e4ve .f4ri .g2 .h2 .h4le .h4ne .i2 .i4na .i4ni .i4no .is3l .j2 .j4ak .je4dl .j4se .j4zd .jád4 .k2 .k4li .k4ly .kří3d .l2 .le4gr .li3kv .m2 .mi3st4 .moud3 .na3Ä4 .ne3c .neÄ4 .ne3Å¡ .ni2t .no4s3t .n4vp .ná1 .nář4k .o2 .o4bé .ode3 .od3l .od3rá .o4ka .o4ko .o4na .o4ne .o4ni .o4no .o4nu .o4ny .o4nÄ› .o4ní .o4pe .o4po .o4se .o4sl .os4to .os3t3r .os4tÄ› .ot3rá .ot3v .o4tí .o4tÅ™ .ovÄ›4t .o4za .oz3do .o4zi .o4zo .o4zu .o4Å¡k .o4Å¡l .o4ži .p2 .pa4re .pa3tÅ™ .polk4l .po3Ä4 .p4ro .p4rý .p4se .pu3b .r2 .rej4 .re3s .ro4k .roze3 .roz3r .ru4dl .s2 .s4ch .s4ci .sem4 .se3pn .s4ke .sk4l .s4ká .s4le .s4na .s4ny .s4pe .s4po .st2 .s4tá .s4ži .t2 .u2 .u4ba .u4be .u4bi .u4bo .u4de .u4di .u4do .u4du .u4dí .uh4n .uj4m .u4ko .u4ku .ul4h .u4ma .u4me .u4mi .u4mu .u4ne .u4ni .u4pa .u4pe .u4pi .up4n .u4po .u4pu .u4pá .u4pÄ› .u4pí .u4ra .u4ro .u4rá .us2 .u4so .u4st .u4sy .u4sí .ut2 .u4vi .u4ze .u4Äe .u4Äi .u4Äí .u4Å¡e .u4Å¡i .u4Å¡k .uÅ¡4t .u4ší .u4ži .už4n .u4žo .u4ží .v2 .va4dl .v4po .vy3 .v4zá .vý1 .v4ži .y4or .y4ve .z2 .za3 .zao3s .zar2 .zaÄ2 .zd2 .z4di .z4dr .z4ky .z4mn .z4no .z4nu .z4nÄ› .z4ní .z4pe .z4po .z4tÅ™ .z4ve .z4vi .Ä2 .Ä4te .é2 .í2 .ó2 .Å¡2 .Å¡e3t .Å¡4ka .Å¡4ke .Å¡4ky .Å¡4Å¥o .Å¡4ťá .ú2 .ú4dů .ž2 a1 2a. aa3t2 ab3lon ab4lý ab3ri ab4sb ab2st ac4ci a2d a3da a3de a3di ad2la a4dli a4dlá a4dlé ad4me ad4mu a3do ado4s a3d3ra ad3ri a3drž a3du a4duž 3a3dva ad3vo a3dy a3dá a3dé a3dÄ› a3dí ad4úz ad4úř a3dů a3dý ae4vi afi2a a2g a3ga ag4fa a3go ag3ro a3gu a3gá ah4li ah3v a2i a3in ai4re a3iv a2jd a2jm aj4me aj2o a2k a3ke a3ki a3kl ak4ni a3ko a3kr a3ku a3ky a3ká a3ké a3kó a3ků a3ký al4fb al4kl al4tz al3ží am4bd am4kl am4nu amo3s am4ži a4nae a4name an4dt ane4sk aneu4 an4sc an4sg an4sl an4sm an2sp an4sv an4tÄ an4žh ao4ed ao4hm ao4stÅ™ ao4tÄ ap4r. a4pso ap3t a4pÅ™. a2r a3ra ar4dw a3re a4rer ar4gl a3ri ar4kh a3ro a4rox ar3st a3ru ar2va a3ry a3rá a3ró ar3Å¡2 ar4Å¡r a3rů arůs3 a3rý a2s a3sa a3se a3sh a3sin as3ná a3so as3pi as4tat a4stk as4tm a4stru. as3tv a3su a3sv a3sy a3sá a3sé a3sí a3sů a2t a3ta at4ch a3te a3ti a4tio at4kl at3lo a3to a3tr at3re at3ron at3rov a4tru at4rá at4th a3tu a3tv a3ty a3tá a3té a3tÄ› a3tí a3tó at1Å™ a4tří. a3tů a3tý a2u au4gs a3uj auj4m aus3t a3uÄ 2av av3d av4d. av3lo a4vlu a4vlí av3t av4ti 2ay ay4on az3k az3la az4lé az3ni a3zp a2Ä a3Äa a3Äe a3Äi a3Äl aÄ4má a3Äo a3Äu a3Äá a3Äí a3Äů a2ň a3ňo a3ňu aÅ™e4k a3ří a4Å¡pl a4Å¡py a2Å¥ aú3t 2b. 3ba. ba4br ba4chr ba3ka ba4se 2b1c b1d be4ef be4et bej4m be3p beu4r be2z3 beze3 b1h 1bi bi2b3 bis3 bist4 bi4tr b1j 2bk 3bl. bl4bl b2lem b2les 3blk b4lán b2lém b1m 2bn 1bo bo4et bo4jm bo4ok bo4tr bou3s bo4Å¡k b2ral b2ran 2bri b4rodit b4rou broz4 b2ru b3ru. b3rub b2rán 2b1s2 bs3tr 2b1t btáh4 bu2c bu4en 3by. bys3 by4sm by4tÄ by4zn b2z 1bá 2b1Ä bé4rc 1bÄ›. bÄ›3ta 1bí 3bín bí4rc 2bň b3Å™a b3Å™e. bÅ™e4s b1ří 2bÅ¡2 2c. 1ca cad4l ca4es 2cc 1ce cech4 ced4l celo3 ce4ns ce4ov ce4ps cer4v ce2u 2ch. 1cha 4chalg 3che 4che. 2chl ch4ly ch4mb 2ch3n 2cht 4chte 1chu ch4u. 1chy 1chá 2chÅ™ 1ci cien4c cik4l 2ck2 c4ket ckte4rý 2cl c3la c3lé 2cn 1co co4at co4mm co4žp c2p 2ct c2ti ctis4 ct4la ct2n c3tv c2tÄ› cuk1 1c2v cy2 1cá 1cí cí4pl 2cň 1ců 2d. 1da da3d da4jÅ¡ da4kl da4tr d1b d2ba 4dbat. d2bá 2d1c dch4l 3dch4n d1d dd4ha 1de de4bre de3hn de3jd dej4mo de3kl de3kv de2na de2oz de3sl de4sm de4so de2sp des4t de3str de1x de4xt de2z de3zn dez3o de3Ät de4žp 2d1h 1di di4gg 4dind dis3k di4so d1j dj4us 2dk d3kv 3dl. d1la d4lab d4lak d3li 1dln d2lou d3lou. d2lu d3luÄ d4láž d1lé 2d1lí d2lů d1m 1dmd dmýš4 2dn 1do 4dobl 4doboj dob4rat do3by do3bÄ› do3bý do1d 4do4dd 4do4dj dod4n do3h doj4m 4dokn 4doly do3mn domoh4 do3p do4pc dop4n dor2v do1s dos4p dos4tiv do3t do3uk do3uÄ do3z2 doz4n do3Ä 4do4Än doÄ4t do4žp 4dran d4rap d1re d4ren 3drobn d3ros d3rou d3roÅ¡ dr4sc d3ruÅ¡ d3ré d3rý d4rýv 2d1s2 ds4ků ds4po d1t d3tl d3tÅ™ 1du dum3Å™ du3na du3p du4pn 2dur du3si du4í. d2v d4vac d3ve d3vl d3vr d3vy d3vá d3vÄ› d3ví 1dy dy4su d3zb d3zd d3zn 1dá 2d1Ä 1dé 1dÄ› 3dÄ›j 1dí 2dň d1Å™a dÅ™e4k d4Å™ep dÅ™e4pn d4Å™ev d1ří d2řít 2dÅ¡2 d3Å¡k d3Å¡t 1dů 3dů. dů3s 1dý d2ž2 2e. e1a ea3dr e2ar e1b eb4er ebez2 eb4li e2bÅ™ e4ch. e3chl. e4chm e3cho e2chr e3chv e4chÅ¥ ed4be ed4kv ed1l ed2ma e3dmn ed3v ed4ří e1e ee4th ee3xi eg4gi e1ha e1he ehno4 eh4nÄ› e1ho e1hr e1hu e1hy e1há e1hý e1i eilus3 ej3ag e3jas e1je e3jed ej3ele e3jez ej3in e3jis ej1m ej3mo e3jmu ej1o ej1u eju3st ej3v e2k e3ka e3ke e4kly e3ko e3kr e3ku e3ky e3ká e3ké e3kó e3kÅ™ e3ků e1la e4lau el4dv e1le e1lo e1lu e1ly el4ze e1lá e1lé e1lí e1ml e4mlí emo3k e1mr e1my e3má e1mÄ› e1mí e3mÅ™ e3mů e1mý em3že en4dv enitos4 en4sc en4si ent3r e1o eo3by eoch3r eod3l eo4du e4ole eo1s eo2st eo4tÅ™ eo3z eo4zb eo4zd eoÅ¡e3 epa3t e2pl e4pni ep2no e4pný epoÄ3t epro4zÅ™ ep4tl ep4tm ep4tn e4ptu epy3 2er e1ra er4a. e1re e1ri e1ro er3s er4s. er4sn e1ru e1ry e1rá e1ré e1rů e1rý e1s e4sag e2sce e4sin esi4s e2sk es4k. e4s4kn es3ku. es3ky es3ké e2sl e4s3li e4sly es2m e4sp. es4pe e2st e4st. e4ste es3tiž es4tol e4strou es3tán e1t e4tki e4tkr e4tli e4tly et3ri et3ro et3rů et1Å™ et4ún e1u eu3b eu3ct eu3d eu3k eu3m eu4m. eu3n eu3p eu3r eu4r. e4ura eu4ras eu4rg eu3s2 eu3t e4u4t. eu4tra eu4ts eu3v eu3z eu3ž e3vd eve4Å¡ e3v2k e4vsk evy3 evyjad4 evypá4t evy4Äk evÄ›4tr ex4ta e3xu ey4or ey4ov ezaos3 ez4ap ez4bo ez3de ez3dov ez3du ez4dÄ› e3ze ez4ed2 ez4ej ez4el ez4er ez4es ez4ez ez4eÅ¡ ezis4 ez4it ez4le ez4ná ez4nÄ› ez4py ez2t ez4ác ez4áh ez4Äe e3zí e3zÅ™ ez4Å™e e1á eÄ4kat e1Ät eÄ4te e4Äti e4Ätí e2ň e3ňo e3ňu e3ňá e3ón e1Å™ eÅ™e4k eÅ™4ku e3ří e2Å¡ e3Å¡e e3Å¡i e4Å¡ka e3Å¡l eÅ¡4lá e3Å¡o eÅ¡4to eÅ¡tíh4 e3ší eú1 eúmy4 eú3n eú3p eú3t eú3Ä ežíš4 1f 2f. fe4in fene4 fe4ue fi4em fi4fl f2l f3lí fló4r fm4no 2fn 2fr f4ran f4ras 3frek f1ri 2fs fs4te 2ft fu4ch 2fé f2ú 1g 2g. ga4uÄ ge2s ghou4 3gic 3gin gi4ím g4lom 2g1m 2gn g4noi g4nos go1 go4hm 3graf gu4el gu4it gu3m gu4m. gus4t gu3v 2h. ha4ag ha4ar ha4bl ha4br ha3dl ha4dla ha4ke has3t hatos4 ha4yd h2b h2c 2hd he4br he4id hej4s he2s he2u he3x hi4an hi3er hi4gh hi4re 2hk 4hla. h4led h3len 2hli 4h3lo. h3lob h3lop h3lov h3luj 2h1ly 4hlá. h4lás h3lí. 4hlík 2hlý h2m 2h2n h3ne h4ned h3niv h4noj 3hnÄ›d 3hodin ho3str hos4tÄ› 4hove 4hovna 4hovny 4hovná 4hovnÄ› h2r hra4p 2h1t h4tin h2tÄ› h4tít hu4ch hu3mo hu4tň 2h2v hyd1 hy4do hy4ps hys3 hy2t3r hy4zd h1Ä 2hň hÅ™2 hÅ™4by hý4bl h2ž 2i. i1a ia3d ia3g2 i4al. ias4t ia4tr i1b ib2l i2b1r i1ch i4chž i1d id4ge id2l id4lo. i4dlý i1em i1en i1et if1r ig4ne i1h i2hl i3hl. i4hli ih3n ih4na i3im i1j ijed4 ij4me ij4mi i2kl ik3le ik3lo. ik3m ik4ry i4kve ik4úř i1l il4ba iliÄ4n i4lnu ilu3 i1m i4mla i4mly i4mun i2n i3na ina3d in4cm in4dl i3ne 3infe in4gh in4gp in4gs in4gt i3ni i3no i3nu i3ny i3ná i3né i3nÄ› i3ní in4Å¡p i3nů i3ný i1o io4sk i2ps i1r iro4s i1sa is3c is4ch is4k. is3ka is3ke is3ko. is3kr is3ku is3kv is3ky i3slav is3lo is3lé is3pl is3po is1t is4tal is4tat is4th ist3v is3tí i1sy i3sá i1t it1r it4rh it4rp it4se it4su i2tv i1um iv3d i1x ix4td i3zp iz1r i1á i1Äl iÄ3t iÄ4tl iÄ4to i2Ä i1é ié4re. i1íc i1ím i1ó i1Å™ iÅ™4kl iÅ™4Äe i2Å¡ i3Å¡e i3Å¡i iÅ¡3k iÅ¡4kr iÅ¡4kv i3Å¡o iÅ¡4to i3Å¡u i3šá i3ší i2ž i3ža i3že i3ži i3žo i3žu i3žá 2j. ja2b2 jac4k ja4cq ja3d ja3g j3akt j1b2 jbyst3 2j1c j2d j3dob j3dok j3dos j3dr j3dá jd4ří j3dů jech4 j3ef j3ex jez3dí jg4ra 2j1h 1ji ji4ch jih3l ji4mž j4ina jis3k jit4ro ji2zv j1j 2jk j3kv 2j1l j2m j3ma j3mi jmou3d 2jmí 2jn jne3 j1ob j1od jod2Å™ j1oh j1op j4ora j1os jo3sv j2ov j3ovl j1o3z2 2jp jpor4 jpo4zv jpříz4 2j1r 2j1s2 j4sem j4si. j4sk. js4ko js4ká j4s4ků j4s4me j3sn j4sou. j4souc js4po j4s4te 2j1t j3tl ju4an ju3na ju3p j1us ju3sp ju3t ju4t. ju3v ju4xt ju3z j1už ju3ži 2jv2 j3vd j3vn 2jz j3zb j3zd j3zk j3zn j3zp jád2r 2j1Ä 2jÄ 1jí j3Å¡t jÅ¡4ti j3Å¡Å¥ 2jú1 jú3n jú3Ä jú3ž 2jž 1k 2k. ka4bl ka4ch ka3dl 3kaj ka3ka 3kami 3kanÄ› ka2p3l ka2p3r ka2ps ka4pv ka2pÅ™ kas3t kast3r 3kat ka4uÄ 3kav 3kaÄ 3kaÅ™ kaÅ¡3l ka4Å¡p 2k1c k2d k2e ke4bl ke3jo ke4pr ke4ps 3ket 2kf 2kk k2l 3kl. 4k3la. k3lej 4k3li. k4lib k3lic 4kliÄka 4klo. k3los 2k3ly k3lá. k3lé k3ló k3lý 2k2m k3mÄ› 2kn kna4s ko3by 3kof ko4jm ko2pÅ™ ko4sk ko2t3v kous3k 3kov ko3zá 4kroa k3rob k3rof kr2s kr4ú. 2ks 2k1t kt2r kuch4 ku4fÅ™ ku4hr 3kuj ku3se ku3si ku3su ku4th ku3v 2k2v k4vrň 3kyn ky2pr kyp3Å™ ky4zn 3kác ká4pl 3kár 3kář 2kÄ k2ň k2Å™2 k3Å™ej kÅ¡4ti 3ků. 2l. 1la. la4br lab4s la3ka la4nq la4ps 4la3si la4vÅ¡ la4y. la2zm 2l1b 2l1c 2l1d ld4ne le4ad le4au lech3t leh3n le2i 1lej le3jo 4lejÅ¡k 1lel 4lench lepa3d lepo4s le4pr le4ps le4sc le4sm le4sv let4li let3m le2tr le4tÄ le4uk le4vh le4vk le3xi lez3n 2lf 2lg 2lh 3lhan 1li li4az li4bl li4bv li4dm lind4 3lio li4tň li4vr 2liž 2lj 2lk l4kat l2kl lk4nu 2ll 2l1m 2ln l4nul lo3br lo4id lo4is 1los lo3sp lo3stÅ™ lo3sv lo2tr lo4tÅ™ lo4u. lo3z loz4d lo4Å¡k 2lp l2pÄ› 2l1s2 l4sla ls3n lst4n l4stí 2l1t lt4ra lt4ru lt4ry lu4id lu4j. lu4k. lu4lk lu4m. lu4mn lu3pr lu3va lu3vl lu3vy lu3ví 2lv 2lz 1lá. lá4jÅ¡ lá4vÅ¡ 2l1Ä 1lé. 1lík lí4pl lí4zn 1líř 2lň 2lÅ¡2 l3Å¡t l4Å¡tý 1lů 1lý lý2t 2l2ž 2m. 1ma maj4s ma4kl ma4kr 4mald mas3k mat3r ma4tra ma4vÅ¡ maz3l 2m1b 2m1c 2m1d2 m2dl 1me 3me. me4go me4is met3re me3x mezi3s 2mf mh4le 1mi mid3l mik3r mi4xt 2mk2 3m2kl mk4la mk4li m2l 4mla. 2mle ml3h ml4h. 2mli ml4sc ml4sk 4mlu. 2mn m3na mna4s m4noh m3nos m4noz 3množ m3ná m3né m4néz m3nÄ›j m3ný 1mo mod3r mo2hl mo2k mo2s mo4s. mot3Å™ 4mout moza4 mo3zÅ™ moú3 2mp m4plo mpo4s m2ps mp4se mp2t mr2s 2m1s2 m4stl 2m1t 1mu mu4fl mu3n mu4n. mu4nd mu4nn mu4ns mu4nÅ¡ 2muÅ¡ 2mv mys3lo my4Å¡k 2mz 3má. málo3 má2s 2mÄ m2Äe mí1c mí4rň 2m2Å¡ mÅ¡4Äi mÅ¡3Å¥ mÅ¡4Å¥an. 3mů. 3mý. m2ž 1n 2n. 3na. na3ch na4do na4em na3h na4h. na3jd na3ka nam4ne na3p2 na3s2 na4s. nat2 na3tl na3tÅ™ na3z naz4k na4zÅ¡ na4Ä. na3Å¡ naž4n 2nb 2n1c n4chc 2n1d nd4hi ndo4t nd2re nd4ri nd4ří ne1d ne4gl ne1h ne3h4n ne2j nej3t nej3u ne3kl ne4kro ne3kv ne4m. ne3p ne3s2 ne4s. nes4le ne4ss 4nesti ne3tl net4r ne3ud ne3v2 ne4v. ne3z nez4n ne3Å¡k ne3Å¡Å¥ 2nf n3fr 2ng ng1l ng4la ng4le ng4lí n4gro ng4vi nik4t ni4mr ni4mž 3nio 3nisk 2nitÅ™ n1j 2nk 2n1l 2nn no3b2 no4bs no3hn no4hs no4ir no4mž no4sky no3sm no3str not4r no3z no4zd no4Å¡k 2nož 2n1s2 n2sa ns3ak ns4ko n4soc ns3po nst4ra 2n1t nte4r3a nt4lem nt4r. nt3ru nt3rá 2nub nu4gg 3ny. 2nz 3nák ná3s2 ná4s. 2n1Ä 2nÄ 2nív 2níž 2nó 2nÅ¡2 n3Å¡t nÅ¡4Å¥o nů2 2nž 2o. o1a oang4 o1ba o1be obe3j obe3s obe3z ob1l ob1r ob4rň o1bu obys4 ob3z o3bé ob3Å™ez o1c o4chl o2chr oc4ke oc4ko o4ct. oct3n ocy3 oc4ún od3b odej4m ode3p ode3s od1l o4doc odos4 odo4tk od3ra od4ran od3rů o3drž od3v od1Å™ o1e2 oe3g oe3ti o2fl ofrek4 og2 o3gn o1h oh4ne o1i oi4ce o4int o1j o4jar oje4dl o4jmi o4jmov o4jmu o4jmů oj2o o4juz 2oka ok2te o1l ol4gl ol4to o1m om4kl om2n o2n o3na ona4s o3ne o3ni o3no ont4ra o3nu o3ny o3ná onář4ka o3nÄ› o3ní o3nů o3ný o1o oo4hÅ™ oote2 opoÄ3t opro4s o2ps o4ptu opá4t o4pÅ™. opÅ™ej4 opÅ™e4jm o1ra o4rae or4dm o1re o1ri o1ro or3st o1ru or4vá o1ry o1rá o3ré o1rů orůs3 o3rý o1sa o4sai ose4s osi4d o1sk o4s3ke o4sku osk3v o4ská o4ský o1sl os4la os4li os4lý os3mo os4mu o4st. o4stg o4stm os4tor os3trů o4sté o4stÅ¡ o4stý o1sy o1t ot4kl o4tlý oto3s ot3ro ot3ví o3tí o3tÅ™ ot3Å™i o2u ou3bÄ› ou3dÄ› ou4fl ou4il ou4is ou4k. ou3ka o4ukl ou3kr ou3ká ou3m oup3n oupo4 ou4s. ou3sa ou3se ou4sk ou3sm ou4tv ou3v ou4vl ou4vn ouz3d o4uÄk ou3ži ovi4dla o4vsk ovy2p o2vÅ¡t o1x o2z o3za oz1b oz4d. oz3dá oz3dÄ› oz3dí o3ze oze3d2 ozer4 oz1h o3zi oz3j oz3k oz4ko oz1l oz3m o4zn. o3zo oz3p oz4py oz4pÄ› oz4pí oz3ro oz3ru oz3rů oz3t o3zu o4zut oz3vr oz3vá o3zí o3zů ozů4s o1Ä oÄ2k oÄ4ka o2ň o3ňa o3ňo o1Å™ oÅ™i2s o3Å¡k o4Å¡ku o4Å¡ky o3Å¡l oÅ¡4lá oÅ¡4mo oÅ¡4ti oÅ¡4Å¥u o3žl ož4mo 1p 2p. pa4ed pa4es pa4kl pa3si pa4t. pat4ri 2p1c pe4al pede4 pe4ig pe4np peri3 pes3t3 pe4tra 3peÄ pi4kr pi4pl 2pk p2kl p2l 3pl. 4p3la. pl3h pl4h. 4p3li. 4plo. 2pn p2nu po1b2 po3c2 3pod podbÄ›4h pod4nes po3dru po3drá po3h poly3 po3m2 po4mp po4ol po3p po4p. po4pm po1s2 pos4p post4r po3t2 po4t. po4tn po3uk po3uÄ po3už 3po3v po3z2 po4zd poÄ2 po3Äk poÄ3te po3ří po4Å¡v 2pp 4pra. pra3st pr2c pro1 prob2 pro3p pro3t4 pro3z pr2s 4prán prů3 pse4s 2p1sk p4sut 2pt p4tej p4ter p4tev pt4ri p3tu p4tá. pu4dl pu4tr pyt3l pá1 pá2c pád3l pá4nv pá4sl 2pÄ pé4rh 2pÅ™. pÅ™e3h pÅ™e3j pÅ™e3t4 pÅ™e3z pÅ™e3Ä2 pÅ™i3 pÅ™ih4 2pÅ¡ pÅ¡4ti 2pÅ¥ qu2 2r. 1ra. ra4br ra4em ra4es ra4ff ra4hl ra4hm ra4jg ra4jÅ¡ 2rak ra4nh ra3si rast4r ra4vv ra4wl ra4y. ra4yo ra4Äm 4raži r1b r2bl r1c rca3 r3cha r3cho rc4ki r1d r4dla rdo2s re4ad re4au red4r re4et re3kl re3kvi re4mr re2sb res3l retis4 ret4r re4um r1ha r3hl. rh3n r1ho r3hu r1há ri4bb 1ric ric4ku ri4dg ri4dr ri4fl ri4gh ri4zm 2rk r2kl r1l 2r1m r4mio 2rn rna4vÅ¡ rn4dr ro4ad ro3by rod2l ro3d4r 3rofy ro3h ro4h. ro4jb ro4kÅ¡ rom3n romy4s ropát4 ro2sb ro4skv ro4sky ro3sv ro3ti ro3tl ro4tÄ ro3vd rovÄ›4t 3rový roz3d roz3n ro4zo roz3v ro3zá ro4Äp rpa3d 2rr rr4ha rr4ho 2r1s r2st r4stu rs3tvÄ› rs3tvý 2r1t r2th r4trá rt4sm rtu3 r2t3v rt4zu 1ru. ru3se ru3si rus3k ru3ži 3rvaní r1x 1ry. rych3 ryd2 rys3ky rys3t ry4zk ry4zn ry4í. ry4Å¡k 2rz rz3d rz3l rád4l rá4dž 1rák rá3ri 1rář r1Ä 4rÄitý. rÄ3t 3ré. 2ró 2rÅ¡ rÅ¡4ní rů4m. růs3ta rů4v. 3rý. rý4zn 2s. sa4pf sa4pr sas3k s2b2 s2c s3ca s3ce. sch2 sch4l sch4n 3schop s3ci sci4e s3cí s2d 1se se4au se3h se4ig se4il sej4m se4ku 3sel se3lh 3sem ser4va se3s2 ses4k se4ss se4stra se4stru se4stÅ™ set2 se3tk se3tÅ™ se4ur se3z se3Ät 2sf s3fo 3sfé s3fú 1si 3sic 3sif si4fl sig4no 3sik si3ste 3sit s2j s3ju s2k 4skac s4kak 4skam s4kok 2skon skos4 4skot sk4ra sk4ru sk4ry 4skve sk4vo s3kán s3ků 3sl. 4s3la. s4lav s3le. s4led s3lem s3len s3let s4lib s4liÄi 3sln 4s3lo. s2ly s3ly. s1lí s2ma s4mek s2mo 2sn s2na s3nat s2ne s3ne. sn4tl s2ná s3ná. s4níd 1so sob4l so3br so4sk so4tv sou3h sou3s souz4 so4Å¡k s2p s4pol spro4s 1sr 2ss ss4sr 2st. 4sta. s3taj s2tan st4at 4stec s4tep st4er s4tero s4tich 2stil s4tink 4stit. 4stiÄ st3lo 2stn 4sto. s4tona 4stou. 4str. 4stram s4trik 4strn 4strác 4stupni s2tv st4ve 3ství 4sty. s4tyl 3styÅ¡ s2tá 4stá. s3tář 4stÄ›. s4tÄ›d 3stÄ›h s2tÄ›r s2těž s1tí 2stí. s3tÅ™ej 1su su4ba su4bo suma4 su3ve s2v sy3c sych3r sy4nes sá2d 3sáh sá2kl 2s2Ä s3Äi 1sé 1sí 2sň 2sÅ¥ s3Å¥o 1sů s2ž 2t. 1ta. ta2bl tac4tvo t2a3d 1taj ta4jf ta4jg 4talt 4tand 3tanÄ› t1ao 2tark tast4 ta3str ta4Äk 2t1b 2t1c 1te 3te. te4ak te4fl te4in 4teném teob4 tep3l ters4 tes3ta te4tr te4uc te4ur te4ut 2tf 2tg 1ti ti4gr 2tih ti3kl tin4g ti4pl ti3sl tis4tr ti4tr 2titu tiz4r 4tizí tiú3 2tiž 2tk2 t4kal 4t2kan t4kat t2kl tk4la tk4li 4tknÄ› t2ká 2tl 3tl. 4tla. t1le tles3 3tlm t3lo. t4lou tlu3 tlu4s t1ly t1lé 2tm t2ma 2tn t3ní 1to to4as to3b tob4l to3dr to4hm to4ir 2toj tol4s to4ol 4top. 4topt 4topu 2torn 2toup 2tp t3rant t4rea t4ref tre4t 4tric. trip4 t4rit t4rog t3rol tro4sk t4rou 4trouh 4troň. 4trun t4rus 4t4ruž t3ráln 4tráš 2trÄ t3rům t3rův 2trý 2t1s ts4ko ts2t 2t1t tt4ch tt4ri 1tu. tu4ff 1tuj tu4lk 2tup tu4r. tu3ry tu4s. tu4Å¥. tu3ži t2v 2tve 2t3vi t4vinn t4viÅ¡ t4výc 1ty. ty4gÅ™ ty2la ty4Å™e ty4Å™h ty4Å™j ty4Å™o ty4Å™r ty4řú 3tá. tá4fl t2Ä t3Äi 2tÄí 1té té2bl 3tém 1tÄ› tÄ›3d4l 2tÄ›h 2tÄ›nn 2tÄ›p 1tíc 4tíc. 4tíce 1tím 2tín 2tír 2tÅ™ t4Å™eb tÅ™eh3n t2Å™el t2Å™ic t3Å™il tÅ™4ti t1Å™u t2řá 3třáb tří4s 2tÅ¡ t3Å¡t tÅ¡4ti 1tů 1tý. 1tým 1týř 3týš u1 2u. u2at u2b u3ba u3be u3bi u3bo ubs4t u3bu u3bá u3bí. u3bů uc4tí 2u2d u3de u3di u3do u3dru u3du u3dy u3dí ue4fa 2uf u2hl uh3lá uh3no u2in u2jm u2k u3ka. uk4aj uk4al uk4at u3ke uk3la uk3le u3ko u3ku u3ky uk4á. u3ků ul4fa ul1h ul4pí u2m u3ma u3me u3mi um4pl um4ru u3mu u3má 3umÅ™ u2n un4dl u3ne u3no u3nu u3nÄ› u3ní u3nů un4žr u2p u3pa u3pe upe2r3 u3pi u3pln u3pu u3py u3pá u3pÄ› u3pí u3pů u2r u3ra u3re u3ri 2u3ro u3ru u3ry. u3rá 1urÄ u3rů u2s us3ky us3ká us3ké us3ký us1l us2lo u3so u4ste u4sty u4sté u4stÄ› u3stÅ™ u4stÅ¡ u4stý u3su. u3sy u3sá u3sí u3sů u4tro u4trá u2v u3vi u3vu u2z u3ze u3zi uz1l u3zo u3zu u3zí u2Ä u3Äa u3Äe u3Äi u3Äo uÄ3t u3Äu u3Äá u3Äí u2Ä u2ň u2Å¡ u3Å¡e u3Å¡i uÅ¡4kl u3Å¡o uÅ¡3tí u3Å¡u u3šá u3ší u2ž u3že u3žo u3žu u3žá u3ží 1v 2v. va3dl va4jÅ¥ va4kl 2v1b 2v1c v2ch 2v2d v4dal v3di v4dÄ›k v4dÄ›Ä ve3dle ve3jd 3ven ve2p ve3ps vep3Å™ ves3l ve4sm ves4p ve3sta ve3t4Å™ ve2z3m vi4ch vide2 vi4dr vi4et vi4kr vi2tr 2vk v2kr v2l 2v3la. 4vle. 4vlem 2vlo 2vm 2vn v4nad vo3b vo4ic vo4ja vo4jb vo4jd vo4jj vo4jm vo4jÅ™ vo2s vo4tÅ™ vou3 vous2 v2p vr2c vr2dl 4vrny v1ro vr4st vrst3v vrs4tvÄ› 2vs2 v1sk v3stv 2v2t vy3c vy3d2 vy4dra vyp2 vy3s2 vy4sn vys4t vy3t vy3Ä vyÄ4k vyÅ¡2 vy4Å¡. vy4Å¡m vy4Å¡Å¡ vy4žl v2z2 vz4no vz4né vz4nÄ› vz4ní vá3ri 2v2Ä v3Äá v3Äí v4Äír vÄ›4cm vÄ›3t4a více3 ví4hat 3vín 2vň 2vří v3řín v2Å¡2 vÅ¡e3s v3Å¡tí. 3výs vý3t 3vý3z v2ž2 wa4fd 3war wa4re we2 2x. xand4 2xf xisk4 2xn 3xov x1t xt4ra xy4sm y1 y2a y2bl yb3ri y2ch y4chr y2d1l yd4lá y2dr yd4y. y2e y2gr y3hn yh4ne yj4ma yj4me y2kl yk3la y3klop yk4ly ymané4 ym4kl yna4s y3ni ype4r yp4si yp4tá y2pÅ™ yr2v y2s y3sa y3se y3si ys3lu y3sm y3so y3sp ys2t ys3te yst4r y3su y3sv y3sy y3sá y3sé y3sí yt4me yu3ž y3vs yvÄ›4t y3zb y3zd y3zk y3zn yz4nÄ› yz4ní y3zp yz4po yÄ2k y2ň yÅ™3b yÅ™k4n yÅ™4Äe y3ří y2Å¡ y3Å¡e y3Å¡i y3Å¡k yÅ¡1l y3Å¡o y3Å¡p y3Å¡u y3ší yž2 y3žd 1z 2z. zab2l za4bs za4dk za3dl za4dn za3h za3i za3j za4jk za3k za4kt zal4k zam4n za3p2 za3s2 zat2 za3tl zat4r za4ut za3z zaz4n za4zÅ¡ za4Ä. za3Å¡ zaÅ¡4k za4Å¡s 2zb zban4 z2by zbys4 2z1c 2z2d z3di zdnÄ›4ní z4doba z4dobný zd4re zd4ví z2e ze3h ze3p2 4zerot ze3s2 zes4p zet2 zev2 ze3vn ze3z ze4z. 2z2f z1há z4ine z2j z3jí 2z2k z3ka. z3ky z3ké z3ků z3ký 2zl 3zl. zlhos4 zlik3 z3ly. z2m2 2zme z3mn z3my z4mÄ›n 2z2n 3znak z4nal z3ne. z3nic z3no z3nu z3ny z3né z3nÄ› z4nÄ›l z3ní z4nít z4nív z3ný zo4tr zo4Å¡k 2z2p z3pt z4pát 3zrak 2z1s2 2zt ztros3 z4trá z3tÅ™ 3zu. zu3mo zu3mÄ› zu3mí zu3Å¡ z2v zva4d z3vaÅ™ z3vi zvik4 zv4nÄ› z3vod z3voj z4von zv4ro z4ván z4vÄ›s z3víj 3zy. 2zz zá1 záh2 zá4kl. 3záp zá3s2 zá3z záš2 2zÄ z3Äl 2zň z2Å™ zÅ™ej3 z3Å™ez z3Å™eÅ¡ 2zÅ¡2 z3Å¡k zÅ¡4ka z3Å¡t 2z2ú1 zú3Ä zú3ž zů3s á1b á2bl áb4ry á4bÅ™. á3cho ác3ti3 á1d á2dl ádo4s ádos4ti ád1Å™ á1ha á3he áh1l á3hl. áh3n á1ho á1hr á1há á1j á4jmu áj4mů á4kli ák4ni á1la á1le á1lo á1lu á1ly á3lé á1lí á3my á3mé á1mÄ› á3mí á3mý áne4v á1ra á1re ár2m á1ro á1ru á3rů á1s á2sc á2s3k ás4k. ás4kl ás4kn á2sla ás4ly á2sm ás4po á2st át3k át1r á1tu á1ty á1tí á3tý áv4si áv4sí áz3k áz3ni ázni4c áz4vi á2ň á1Å™ ář4ke ář4ků á2Å¡ á3Å¡e á3ší 2Ä. 1Äa Äa4br 2Äb 2Ä1c 1Äe 3Äe. Äe1c Äes3k 1Äi 2Äk Ä3ka. Ä3ko Ä3ku Ä3ky 2Ä1m 2Än Ä2ne 1Äo Ä2p 2Äs Ä1sk Äs4la Äs4sr 2Ä2t Ä4tené. Ä4tený Ät4la Ä4tový. 3Ätv 4ÄtÄ›n Ä3tí 1Äu 1Äá 1Äí Äís3l 1Äů 2Ä. 1Äa 1Äo Äs4te 2Ä1t 3Äuj é1 é2d é3di é3do é2f é3fo éf1r é2kl é2l é2m é3ma é3me é3mi é3mo é3mu é3mů 4ére. é2s é2t é3ta é3to é3tá é2Å¡ é2ž Ä›1c Ä›d3r Ä›3ha Ä›3he Ä›3hl. Ä›h3lo Ä›h3n Ä›1ho Ä›3hu Ä›3hů Ä›3ja Ä›1je Ä›1jo Ä›3jů Ä›4klé Ä›3k2t Ä›1l Ä›1ra Ä›ra3d Ä›1re Ä›1ro Ä›r3s Ä›rs4t Ä›1ru Ä›1ry Ä›1rů Ä›s3k Ä›s3n Ä›t1a3 Ä›t4ac Ä›t1l Ä›1tr Ä›t3ra Ä›4traj Ä›t3v Ä›1tí Ä›t3ří Ä›2v Ä›3va Ä›3ve Ä›3vl Ä›3vo Ä›3vu Ä›3vá Ä›v3Ä Ä›2z Ä›3ze Ä›3zi Ä›z3n Ä›3zo Ä›3zí Ä›1Å™ Ä›2Å¡ Ä›3Å¡e Ä›3Å¡i Ä›3Å¡o Ä›3Å¡u Ä›3šá Ä›3ší ěš3Å¥ ěš4Å¥s Ä›2Å¥ Ä›3Å¥o Ä›2ž Ä›3že Ä›3ži Ä›3žo Ä›3žu Ä›3ží í1b íb3Å™ í3cho ích4t íd1l í1h í2hl íh3n í1j íjed4 íj4mů í2kr í1l í1má í3mé í1mÄ› í1r í1sa í2s3k ís4kl ís4kn ís4l. ís3le ís4ln ísáh2 í1t ít3k í3t3Å™e íz3da íz3de íz3k í3zna í3z3ni í3znÄ›n í2ň í1Å™ í2Å¡ í3Å¡e í3Å¡i í3Å¡o í3ší 1ň 2ň. 2ňa ňa3d 2ňk 2ňm 3ňov ň1s 2ň1t ó1 ó2z ó3za ó3zi ó3zo ó3zy 2Å™. Å™a4pl Å™a4Äm 2Å™2b 2Å™c 2Å™d Å™e3ch Å™e4dob Å™e1h Å™e3jd Å™e3kl Å™e3kv Å™e4kří Å™eo4r Å™e3p2 Å™e4p. Å™e4pk Å™e4pÄ Å™er4v 2Å™es Å™e3ska Å™e3sko Å™e2sp Å™es3po Å™e4sr Å™e3sta Å™e3stu Å™e3stá Å™e3stÅ™ Å™e3tl Å™et4Å™ Å™e3zd Å™e3zk 4Å™ezl Å™e3Ät Å™i1 Å™ia3 Å™i3h Å™i4h. Å™i4hn Å™i4jÄ Å™i4l. Å™i4lb Å™il2n 4Å™ine Å™is2 3Å™i4t. Å™i4v. Å™i4vk Å™i4vn Å™i3z Å™iÄ4t Å™i3Å™ Å™i4Å¡. 2Å™k Å™2kl Å™k4la Å™k4li Å™k4ly Å™k4no 2Å™1l 2Å™1m 2Å™n 1Å™o 2Å™ou 2Å™2p 2Å™1s Å™s4to 2Å™1t Å™2v 2Å™z řá4pl řá2sl 2Å™1Ä 2říd ří4kÅ™ ří1s 2řš Å™3Å¡t řš4ti 1Å¡ 2Å¡. Å¡ab3 Å¡a4vl 2Å¡1c Å¡ej4d Å¡ep3t Å¡i4mr 2Å¡2k Å¡3ka Å¡3ke Å¡3k3li 4Å¡3kou 4Å¡kov 3Å¡kr Å¡k4ro Å¡3ku. Å¡3ky 2Å¡l Å¡2la Å¡2li Å¡3liv Å¡2lo Å¡lá2 Å¡2lé Å¡2lý 2Å¡1m Å¡mi4d 2Å¡n Å¡2p 2Å¡1s 2Å¡t Å¡4tip Å¡t4ka Å¡t4kl Å¡4tÄ›k Å¡2tÄ›s Å¡4tÄ›v Å¡4típ Å¡2v ší3d Å¡2ň Å¡3ší 2Å¡2Å¥ Å¡3Å¥o Å¡3Å¥u Å¡3ťá 1Å¥ 2Å¥. 3Å¥al 2Å¥k 2Å¥m 2Å¥t ťáÄ4k 1ú ú2c2 ú2d új4ma ú2k ú2l ú2n ú2p ú2t út4ko ú2v ú2z úz3k ú2Ä 3úÄe úře4z úš4ti ú2ž ů1b ů1c ů1hl ů3jd ů4jmový ů1le ů1my ů1mÄ› ů1ra ůr4va ůr4vy ů1s2 ů2st ůs3te ůs3tán ůt2 ů3tkl ů2v ů3va ů3vo ů3vÄ› ů2z ů3zo ů2ž ů3že ů3ži ů3žo ý1b ý3cho ý1d ýd4la ý1h ý1j ý1l ý1ml ý1mÄ› ý2n ý3no ýpo3Ä4 ý1r ý1s2 ý2sk ý1t ýt4ku ýt4ky ý1u ý4vli ý3zk ý3zn ý4zvu ýÄ4nÄ› ý1Å™ ýš3l 1ž 2ž. ža3d ža4tv 3žaÄ 2ž1b 2ž1c 2ž1d že2b3 žeh3n že4ml že4zg ži4dl ži4jm 3žil ži2vl 2žk žk4ni 2žl ž4lic 3žlo 2ž1m 2žn žon2 2ž1s2 2ž1t ž2v žá4br žá4nr 2Å¾Ä Å¾Ã­4zn 2žň 2žš žš4ti žš4tÄ›", + ["compression"]="zlib", + ["data"]="xÚ5œ;’ã¸Ò…·Â\21T„@nˆ\18Y\18Å\7xI·É\21\\gb쎶Êh£Š1Æëˆ*G¥}ýç;ìßÐI¼ßHd&@½”!{)‹ª4ÖÆÆ\24_¶3:¨Œ‹°«óox&çžœ{rîÉI§ÍøxƒÌN4;ÑLôœ§Ü\30§JŽND§ÜÕ%—‘\\]ršU±gµ÷Âï\6l­Š¨äªê\29W1¨˜j¨\11ãó­h\29¨ÜUœ\9t\25¯\31&÷ì¥Vöº¸Ü@ºXÏykL„þÕ½º*\29•ÞŠ®\6\7a#\127ãJ…Q%43=¸+î^”*ë^\23\21\1îý½Ø+áã­R{Z¥i‹®1ª•íóûã=W|§ˆ®.®ê}×äíš½ô\ +é›|NÊÖÇ¥Rw‡2ÿúKÞ¡Î/àÿ{ž\26ð¡\9jÿ\16\11÷c(ÖQøx;\25Ÿß\25˜¨\18cqfŠcUç í\22zÊbÑ–Æ\8\14vÓçèžF÷T¸\0247#£*¨ÆbtÚÑ©f»gJŸ=±‘ÙŸ\14¯s¥?•¦|%i:ÊHÏï\"š­\"áÝiÄžW.sw+öÃíV<ßÚƒt&ŸJ0ªã±6Ç2wqcì4ØÐcôF¯Váã\3BSÇ%?g/“òNõ½\0óY\24\25µ)\8íŸ\22Oí¬¤³×PÕÎu_€ù8\16ÔªÌÙ•ÊM7g¯¡Ù£:{ôf×ìñš“‹KGJ÷ƒ…_q.µ±1F°rHåê\8YŒŒä¢å*¼\23=A­£ÙÂKWÜp÷.´w\17½‹èypÈàÑiF‡Œ„Œ.Ó-\22:ýH“E˜R|Tmö°x—ƒ\25,³{2;dNÆÍè\12îk±ºÒÝÕ}ýõ‡4\0079Ê}¾Õ\7i\14¢ÉY´åÓáû“†Á[žŸGSŸŸñ D®ªf-=}«;±n9Î&®\19\30çߊ8ð‚]¹ö2\7\"‹b/'‡|ý\5\21íž\4á\4¶\27Ø\15 ;f÷ŽÙ½²\31{e÷¨înÄ~¬ùý¨Ì£à¢µTaŽ_r?Þ\127\5Ï7CÍ\30‡Û•\7©\15²™<\127Æ?”Ž=~“÷wQ=ÿQègÈÊS\22Ê—¬Ôîçœwq\16):í\6ùÄøä™Ï ™*/,ï2TY™ëÌ\16Ô€‚ªÐɯÑlŒªÊä—bXWàBÒ(§8\1Î\\\11£¬\\E^MÏOÈB6ÍW–˳:~Dl\0¥æœIB\31îwŠ~üÞ\15¢¡S ºF”:P3€åk\19TR¸*ô*ǵx¥éW•{Í'J¿Ò´+¥ß`ÆåML¨\12´«Ñ`4°\0159\9»W@Ÿ•w÷ê\30”=´ŠmIÒ’E‡eÙ²gä¤p\29˜\2jhéDëN´îDûø\23t{[ÚÛ\21¯g2º\"©W]îÕZöŹ\2‰éYCeÏú“›%ZŠ“ÔFZ5\20•¦J§àÜB–‚ ùb¼\26;£º1„y´{\5Ó×_çç-+cQWà­\7gV¥¨SˆWjÁ\20£6p9jéÉùü®@O™Ô©¨þ‹“&MõDÀµ# ÁÙÞpFââ7\5ä,¬|R—¦À”ç\19ã4yœ&FhÊYçÊù|s\13Œ—`Î@Ã\22XO3Yg¦a¦‚™©›óÁÅÐÒ9\23×*ÅVKZ<§ÖžÞîiy!AZIËTÍvÑŽù(Àó\5\127\18R\127 Í\18ZÊ\4Û—“Š\0193‘šH(\19%I†\21\12\19~†DÈ\30ƒ¬¤§çÉœQ$¹0šàÆ$šÜ„ä&$¯ûä†$\6'˜\25¸Æãý…@¯¦äaQ1Kqep–»œ°ÿrA P€&2”ªb•¸Q®Eõ‚“æ\22k·\24©c%ñª>)±š\"N¨–ï\18\4lø½ð\30ßs¯÷]k)°Bt²–ÆÚØ\0245 âb½;óõW4.Æ?AîÔ×_\30Þçÿä~þ/\26•êù]Ò\25žï¤ÒJ\24»ƒl$þ™•ßjj8¿dùYìì\\\22ç\9¼Ü ¹8ãÙÒg8Ÿ.ÙùTe纨_IÈÐœu\\\11tH\9ƒäÉsÍQ\127>ݲ“NÚs\19ÎòI¸\3$†\27¤Óóé®B[UÛ©Ú®8wÙ9tuoœ\9n3ØéÛ@Èã—\"N½r\12*4fçèúc!ž\"Œ-H©qÑî>\0312Í9Leg\28”Q›è¬S5ÉtÉÎ’E\ +¢åÌYÉà™\0*U‡uÜžg\22 ÜʤåtSú%h –¢\30ÔÆM™6:¶Á\21„ìs‘Fïj©fH™\9|ü*¦‹B¾~(Ï×\15ö€¢ßUÈã]©\31ïD‡³æï¬É*µ’2êÑÉ\19¨xíèpyÉN—2»ˆwwÂBã\21.*üRg—ú¢F^jÇÔ]Ì…Å0ƒÚ5Â\17÷TàÖ:\15—\27eÝÊL3^v×,¿ÜjܪZq*ㆀ-ìÏ\4ä\3˜H¡«Œ\11q‹ËØ\0:|¹iktà]šz(.\"Èpá\"©ý¢C6e—6Õ–\27ÃE5°!\4Ú\15á¢Ù½Äì\18\0111\27a߃ÏÏ1»„QÑ\ +\12ÚS—ÔH+¹¤‚œ)\12Ê®]¯(mòËÒžTH\127\11r¨A‚÷L¿b¤\21\26áÓE{%hëžt\24W:ó\4Å]â\127e­²úTÎY\21$6\22Õ¹L/¸é\\¥}P1*Y\14\25”NÙ«BCxÒ©^ÕÚ@ü6€:ø*vIÄCÙ5ZIU\7‰°Â¸\0192\19Î\ +\2IÉéRÕb³Ž¥1õé\27±ß\8\9δSþž»à¯¿\8÷@©Úw\18-ª¦¸^ÕøfP\19\26”3…Pº6^u/\22­™Ja4'¯´\7«“FS˦<\27‰Ò™~ª:Õ\19:í\23ùãÂ8tvÃ\9+6¨¤\14åeú ï$ÐðVÚ­§ªWÝýãCb”b5¹’c$ek¯ƒQ\13‰Ú‹šë*j3\0255…\16-Ž*jd•®¨þ\16’WŒxÌo\2xÂÛ\1윻'²Ú\1òh\8¥Gi¶âèL:%)sÎÐiGcj\8Ê]ÿÒ\26éUÌ%®Vâ\14®K!Tÿõ\23>äIÊe¨%ªŠ³Tê€J;1ë:µÅ\22ª)ŠMIB‹³q1²¼&Ä\9y\22<:ƒ\127\25?ÈøøX\25@ñœJÊ\14\3h¥¦\18ç©òÔ\1ì*IzÕÒ‹7ˆä¬¡…~JgÐƬ–\9ÿÜ\16À©VÑãb-©Sr±€rÖ\9Ø€\7Í@©Çý®Ò\21*VFƒ÷3P\1ÌÛ±ô5\20'äÈ\19b$²ä\29¿øR¥]Uà[ÕqâT…èxøÆáð®Nâ\21\2Id\19;«r˜5˜TšzŽ$ªÆ?ÿ\17+?!*½$î žTŸÊ¬FìÍêP\ +´Gës!Q©Ö¡\19pkÍÖb“ß3l\19Ê †¦Å§·'*šÃ\13÷êp‚b—Ò\12…Ú\12µV± H·«óJ+ªÖà\16ç¦×'¥«\0177ê:ÿ¦º®ÅUxÒö\23(î6ÄBhEE!\17 ¡â”‚\13ÐX‚\31Bem:‰\22Y}ÏË«*¼—³‚ï5N\9’\ +®»Ã³ã‘p&w£4wí/\5ˆ±(@:‚\2\"€kA4T¤š-!»æ\20¯‘´51Ú'5òu|]#_×È×µåëÚòumùZÈ ZÊ®a\13µ˜‚RwEµ\18P\3ô®£c\20Û¡uÊéîÁ\15j³ƒúÔk<‹Þn\9ß-!ŒHOµ½S÷\30¬ž\20yïj{WÛ3HZíŸ*xpÅC“°HÕ\22ÌAà€I¤¦ûæ\"uÔ9¥\0›eêˆ>¯Ã\6³ùëˆBV\0316\18eØqk±ƒ\26ïˆV˜Õ\24=4x#-\31%£Õc\22Š›\22b\3Ä“2QÌXh‡‚½q ¥„Ñz”j\28X'‰÷:uKVðÄ(M,\26\9óµÄxbæ\23ã@(\3:±R&\15Îä¡œ< “Wr\0203Ë%Ì\23f\21™½žÅÝ\9ÑøŠ°äE\8×$¿˜lFJ\0113\29›að\"LÞ\28zœ#)Q«k’D|\23¤³\30Ã^#VÏ©\20;ÁÙ\8F\\ªO$Omcœ@\23ž(<¡Á€ÑHO,~×Iº'yU\14¦#Žxa\5´€Zµ\20ý\11Î\1\24‰À‰–-Œ+¦’ÙäJ¬X¨æ,\5Í\23\30©\18‰Ø\20\11³¾äô&_Uת£“Õ¿²[Š•!Ĩ!¸KÄ‚Ž\15\12$rè\24 \22ËšZñ\13û«ø€Úoc\7¨ò÷2²¥÷B\7ƒPòª\14j\9\7 ãQØ\21\1\23ÕAN„´Æ`r7vÆÉ8\27w#ÍÜ\17~ä‘ +ìœ\27u­>¬#\"\18îÅ\13\29ýx»\28ä\6A±P…Þg^¸;|Y\19@v\29m­Žâú„4Q\31†“ZYRs\16r¡_ÔÖ/jë\23 \25Çã_æ‘\"\15þ_c+uü÷#\31‰°<\20(ž_Ú£®J<0×/Ú—LT£NrB)âñû\4ô[\1a5\8Gc2êpªQú\0176N¯Yx}É^ëB[ãµÆ²-÷Rg¯M!\29CøÚe¯A¿\28¾ôÚ=þ•úòÚck\ +¯:K_å+8Þ_½¸òשn³W¶mx³×™Á\9¯){]ÐcëvÕkxüÎN×,\\_²kY I\\µ²ë-.E–_›\0110d×F‡sŸ]‹.J¡¹¢Õ\\\21ªª\27£rÄ“~˜\16òëT¾f×…\21!Ô”_Ù\19Wï‰ë‚„xeI\7\29u·²\16O\0'PBÖÍÚÜMç¥Ý•¸¸ˆ\14‚\27J»Ð¼TA[•ÝÂY?© 7¹kçÓàᾋ¯ÜèÈ\13áÆåÅ­)44·&×\18•ûz\3Å×­͊['6‡µ_™óN\2Q¸‰\21\20rÇ\23BâÙ8\26WPZv¸q†(ïãÍ™\31o31H1\4¾·”\"æw\19Ÿ\ +7‰üREÔbp-ƒä8¹%PJìþúQ‰HÑS¢héùöÇ\\^Ü¢Ä P’“Év\16->S¥¹\5¥ŸJ\9ˆj’ÆH2¢Ê±jqü® Oùmáì•;iK¨EjÀV\4X‘…Ò³n¨†·-p<)DÇË\13\9*Ü”C*R\0\ +\29X\18\5<[’r²Ð¼d„œ\6­Dp\13™ÆZ\18LƒÕE\8ßi$ö4ÒŠ3©Ö'¼jOƒ$ó)·²Uŵ\22’@\18ºF½Á¦ø¡H­}Á\0¨°W2_±V7R\26\26pÖäˆKMÁ”5¨{Í\13k{“7ä¼g¤’Bˆ\25\15Tª µ¦i5ÓFjkµD¥óé\12k$Q\17~Ø\24\27IVM‡)¾é\0261\24•^tÃ\"ß’+R™$&”F²ö‹\18hª\27\4ÝfðˆHŸìÁŠ¦ªá’ˆ^k\2´\6ÁÑ8\27\19I\26 \2\11°\1šnð\23¨yÅýNŽçÛˆGç\19AŒ—2FÌMÐ|6\12ׄ\21¶9ÍjÑœ_\4,†Æ'.J–ƒµÃÀx\4MÆŸ\26iÄÜ•öH†\2iÌœKÞ\0\9ÒÚk0ºu\0079|ÔÄMO3Û”¥f¸(úCúD\3S1ÝŒ#8×FÕ\30¤\"7§E£‡éª‘>Ùèø’\11óS³{\29¹ ,N\5\28æ(u\7Q›Â×ߤQK1fÔ,ÔÇûÅÈâÐØp\0024°þ΄ó¦÷7æýy\127ó|ÓÙÞ`iŸ\14²:&ÚCEò,ÆÇ‘“®²9äþ,.ë³1:ç3|¢¦Ü_²{\25ÎAxÑIt/‹Ë\127„ê´àšIÐnSv?‘à¼i8•C:ý=(^ÇóÙØ\26gp\2Tîý\6p·¡ün;‹¤óWà›ü»­ê÷+÷6*QJø½ÉîlK¡X\25î^¸ëH*åA#¿3OQ$ì«š$6x§f\13ˆJP\0141;‰ø% ²úˆd\20îÈÉá>d÷A¢ªÄþ3 ÖGô*Ü7`TMQm‘Sýˆ˜fïAüöžG©\ +EÉ\13w%\27ãT€\5\24éä^Ѐ\94Å\5l\15Š7\9µÒï3—a÷ãVî^\28\ ++T\\ážKŠ•\27[É…„ZÓÄsjÞµT勤÷…Sän=öŽh'íeÆ5`\"\4¸;\7œœß\8ä¦T\12ãâ\5\16î«\26ˆ\20'ÐQsW<ºë\29ÝUÀpJ½³È¹<\14î\20\\ø®Õ|º{BÑ<﬽Æ\30)„Ꮈq·œq?d\11\8\12ú.8é8Ò0´>e[¬¥BNY1€;Nñ\1ý4iBXL[†1ï\0142™Ìä\27W»5k­Od!‡EŽ\28Ö\30\"„Ü+p8Rûǵ®B«µÛjí¶¡ÎÚÚí‘B\24q“QÒ\11F·ÐJ&j[%¤™bò…¸µNj8÷Ý\30M/—êg‚$¬h\19kç«'rD§ÓB\ +\"\27nÎéÖ|\11ü×ø¡h-Y±\127u9Hùh‡RܲµZ&~ø*'\22Û6\30}6k•\7–ÖÆ…\29¡T¸sn\19Å\31b©Ò¦x6*û$1¤tš¼¨|Z£µÔ&Mj»°\31Û¥x¥à¥¸1Š’+Ú…»ÖV\17p!R|´µÔÔêØn‹uÒ,®¬önAÛF\12,­í¸(Æ\26aÉÿcgÏdT|h5'-\18±@§y‹ÅV5\30\11\9ÅYÔ8Ÿ\24å΢XÇ¥ õ¡á? &Hª5\13”g\21³\20ÙÈ\17vÉ:ÎzÁ\5¨²®â´îꢬŒ‹PÚn\18á°îêШ>µ¢ó\"\16vǸ¢ \14Ï …Ú¡Ü’{ä$ë¼L:/\19¡”êΖHp\21&\12*\"9A!9)Fn‘¥\5×›\0177öN|PÛ°ÓZë$\ +w·,ïnÚå'Š‘$³ƒZ¤à\ +V*¸\25$ShÍE\2§DVmÔ\14…3tb‹Ê·vÒ!mt-7y¡ë\24\20ÆH=Wˆ\"´ÒÔĈà\ +ΪW‹VÁb) J\127g^ØE÷æ°\3ˆˆ[uX\3º(½Œ\0Æ-tÊ\22¸–WEâ„”eÍÜÌPÏ© Ú™ƒX±jZ‚õƒ‹q˺Å\13Y\ +\29IÂÖHW,¾ƒ*AŒo\2×ÒØ\0257£KÖ2íöìä\13'°\26ÊJQ½˜ä´\11YfHÉÝaæ†`·\19e¥vȨ\29ƶÎÌ®+Ю>\20/¾}bãê'…1t–AÕ¸“N¾Dôïm\19ïý‚©èË®’›Ús\19G8Úu\127¬Üžë-ågÍö¬Ùþ$µ¶\15âŒ'\29\13y/¡¡¯‹k\0045;}í«½\30¢—b›kK÷¯Yï·B'±Ï¾ÁˆÓK¢TU\13G@èÛ ‚X\6}Ë\5@ï‡@½¸\26£¶\\¯¼}—ß\4ÅÍþ\6§–6Ø’l!xÈzNžÞ<\ +ÍïF@<Ü»ê\24¢\6£·¸Ø[\\TÄã׎\7Ëgo\25ñ$- çí\16\1ZØ\2*&bÕéc‚™\20:¼“<{Y\8­|÷Q'ŠÚ1ªÜQb`ï\13Ùs4ô~OÓš’\30–§qÔêëµØ<ºIµ.Y¿ Äö\24Fä\28^Œ•ñ\8™,“~1®Y¿Yâ”\18íåÝÓM\22–€»\"\17צEÕ\7„7‰\26šÆžÛ)–P,׋Áq7)šë¨ìýF¢Tý\24íÀLJ\ +d!Ä\9\8\0314-\18ßÅ|\6¿\19J Pˆ\3nŽ¼W øâÀ=}g\12à\0122’C™\28 1\24Ž7I\"»`—”§$»Ú&òõ—˳Ç\15X Õ8¨\27Ò\29n\23œªª*nPÛ_\24´\6\0212\17r\8yCM¢šëw9ÕÌ:ç®g¨Ã] éŸ\16Þ’„£Êž\21öÏÀ¥¨€Ö×n}íÇKxÄïåÓ\25¡x:#&;á^¨2_cu\17ê\29VÌÓÚƒ \12¯Ù¿ŠC\14×l¸JR\28®l\5°6Ò…âJ›®<å\24¤©É\18@'#\15E\6±[6D\24\26&òç€W\5\6­¡AÜT‰ÅyÆ}#¤¸á.š\9¤\28žÊI¡Q¼Náз‡è\30ÁN\7›U‡?ìÔ;Jå3*A*Ô0K\24\23\"M\14’\21/„HT\28fóR¥T«¹+Ì•VG\17«&aùÃä»\24‘ø‡åœ\13\11×]Rð´µ\7-kíZ•üxó\12ˆ]΄[ð\27$ø\5i|«Ñ\13’ü\18\6Øã`ö8üyþ#M0\16¡$\18€¢ÔòXj@å’\ ++¨³xÖ9kœ»P£\23Q˹ÚÒ¦QºEîmV¾3\9rž\14Fßïʧu\25\11nZ£¯%\"7¤ÆH¸ÄßxIÚáñ²å\4c/\21·9\11|ùÎ\11ÄѨ\6ps\17µÁT¤ßmFÖwKz\13f¬lÏÂ#¦\31§C‘»ÈµŒZR‡,ÖÒ\4Zœ1ˆ»Dì^*誨üªÜÚ\11Ño8ãII¤ÝÈ%…&ɯq(p¼ÛDÒkŒ’è ‹‘ºyþ#÷²k\\µãc\27\18\5*OÇŽ\19J\11Œ:Ù£\31îÄ>¨f~ðëh~\29Qþ#ê}D½ƒ\23LDËhùÑl;þy´é\24âÑš~´Š\31;£Tü¨UÍ\0ÄÄ\8ü1îÇÑš~„\29G[óãa\4ŽÇ«8\"í\21\7E¨Øø£Z¡vMˆ5\17K\127Äd\24±ôG?؉˜÷\21ÏÕ[ÄÊ\31må¾\12Œ¶òÇãNôÍ`ÄÒ †”*eæ%@œ%3\17Ü\18Œ©\13¨2ç–©”›â άћa\13`c$|Æv%ç\4\27?x5ö™ŸšN$Jn\14\"Î/“çÛásÉ4]ýñ‹\29-3—›x[\21mèçEªÇ:\29È\0s\18²°Tëâ›^\20Z\19N2asà\12JZR4S'I³³{2Ò?Œ q\25Ù\24‹ŽMÒÏNÏhYä'¤µ›”EZq\3ÅêJV²Jg§xlúñÐ'ãÚØN\26}\13\16W)\4jð\ +Wˆ§or²wÅï’q\16\21Uënû\0„¾\28v€ˆYŸ'¯’\127D¤`+Çà†$w ¥\0öùÎÒßé’„¶\23\18\17Ⱦ¶-\31tÉ~\25Ê\3ZG³öc3ï,Ø|g6w‰\25òK|\6½´ö£=Nøü‡5\4\31Ô2—¤\18mõW¿àTùó\127¥‘ÝÇ„iº\2{\13æm\22¾\28ds˜\26ýÇr\15eE\29ªO4ûd#>?IóIÜIºêø’%ïãF¿Y\25-YŽV}äNÄ&Ná0Š+ŽRo\20[Wu»¹‚Ã(œš\\èwð\ +BüÌÇZ=\26\27dT!ï<Æ6\27\17\18GÔÝ\17uw´º;\"\25Ž–\12\9i ¨·a\28”VìcŒ˜†Æ˜_$gŽQ•ÖjqÃÁÁM\\5-\7QçGž#Œ±\19ƒ–³'o!¡N\24;Bì¤ëžâg’ð\4AÈA©4ɹ’\19¥\16iWãñ\28\1Â-ºâÙv§aQ\24yM/ \25·ê\8)†Õdfæè3\22Æ[\22Ìjá°\0121ä~\16Ú\ +Ý\15âqÐbÍ·‚Œš˜c9Êý3ûÏ¢“\\‚É¡˜¬ŒO–B'/Eáë+¨ƒ\24tøýjDtž$¦È=ܲÉËu*=s\ +ZWð¿Î¶¹è-‚_\127÷™\14\3xÊ$V1\5)½“–õt)ólÊyr5ù9¤³ž4U6™óL\21ÊÃd\21\127²Š?Õ\21uùÉÝd\19”X§ žˆ0«‚š¯'¦ÚO£&‹”Š^Ô‘“«Â¬?a#˜x” ÿ‚KËw’&~Ö¸4j[sáÒN!ÕÕ8bÍ“ïx„X$&\13\5;kBLœtJOE/Y2L*{8ôÀipÞè>Øú3E®#ä¦+Ør\8Ò¤*v¢ð~\6[2ÇžfF))\26\7N]Íö\20ÝI\14¶õ .`ƃ´\2R¾\19ò¬T||\0Á{Q\29N|yP\25)œ\15 p;?Ʀ‰\23>c6Ù8\18&µ~âM\23HßNj\ +Wâ“Î?\16o^Å’M1{M:\ +§Ô\31-tJK\24Q¦´ä\4SKâc‹\19Ï\12'ŸO“MR“^ÓqöäÓZ\"ŸL:h$\26(åv¹©€MgÈ´ùÞ\0¢š¶bo\3È%ÙôG£›ölr?QÄ'¾–1)*Ô±\9™XÀu<\30±ø‰£ ˜¤Ï%Զɲ\15ˆ8\"ˆÃÄŒh#¹a:=¤ÛX>I¥½ÒSS\20øø 9AÇð\\\22ã«q\18ÒÁ\25sü¬\29?ç—\18¨•ŠO‘f¿¤›ý.\23‰#±Bš¢Æ©ZçPe'ØìÍ0Ã\ +fŸ\7 2[\14–[+W\3K@ŽÙ\9›õÌKÇ’\16ñÜ™w\17$”\0026ûýBù‡.\7ÕpÌu\"ažZ£CŠe³\3\\‡YË_¥=Í\\ûÊɽ¯&S\19z\1\20í]3s·F²F¥ñ\9R;©Cw庫V\29ÀZÉ¥ÆE‡ðáîqGÍäÜÆ!\19h;+<*[‹ˆ\11.ÆðµÆ½ÆŒ—\29â׳\31ìä3\7Þì\3\15ñoUxWÛ­Ó\23wo\28Œ‰ðælD±WîÁ¹u,ΡÛH¤Õ8û9ÏÌÃ(\12ý̪Îõ0«\16^3Î\18ÎéÛÀ¼\13T6py¤\0í\7?(§~-#fSí•z¤X\27ìf\27‚…’Òf\9b7ã\12î\5á,íY’\24÷\8Êtˆæ§Y\ +0s9\23v%ºœè±\22§F8$©;bÓj\21/X\24äT„è¼Â-1Š'€ÜßK\27ïì\25˜…Ô\28ei4”\28ƒ‰\0180$‰\17‘tÌõ±<‹SM\7zæÒÔ¸€É©'LÇ0\12ž\16Í\\À©ò•U\21³¹¼­#hÓNS\"\12ß³ÏN?­Áv4'^\13û\14»\"å×\27)¿~L\7ÑÞžO¶CbtF›¢O³VŠ?}šý¹Ó¼ô¥Æsá1à\28Ôš-רl~.5o–mf\29ÌÔÁƒ\17Üâóaæmúì÷è'\30ôŸfW…AhÖÑ:çèÊ'ž÷Ï6\31jðNÌC*9ôRyѼÆ,\0058뉩I:U_׌+Î\4\14ª6ù²$Jx*uÐ$ÎY¡Í\ +©ôë›0/&¥'vTÒRK5\31\12\ +µë’ß|hz\7\30§ZëLÈí‹f|Æmæ•\28Áåb<ÜÚÝéU¿«Ên²Ôð%aHÍMNÎÝĵ2Á#nÞírÜRŽß´+¥\14‡Ôì:ã´–»Sc#ŸŸN…Ô†Lú’ÄÚ\"…V«3Ù¦\24ådcf²1SJ»Ç! ë\4m\"mZd‰íœxz—:u‚PuÐÛ4\21¼ÐMÜ£\11´9\18Ï)Òñ,W:]b×\6­ád\13ø$ý:étV:m@\1{1!Ï\18,é'ÙÄ\19RÔdI\29'\29Ò¬–þøbLÆ…$ZëÂEò}\26UþT\14êR1Õ¥Qó,9„ä’.ÔЩ\25\11\"\26'ŠWr¨ää3ÝA\11iãr3yþú¦…±Òa)·ˆÆÒ–\31Ø\20›\12\9-éèÊü¸¬?ÈJ\24gsÒÙlbJ36:ùó“dqõ”´Ó‚\0(ç\29gÇ*S’Ÿ™é\20\23³MV1…ÏŸ\14âÄNZ´›8ãå-Öf\24LÔ$‘Ç\7‹T›té\21ÂÂùú‘sð\7s\ +p\24LFr¼‹'/\7©\29&¹.ñ!\2ÈÄÓXZ©Å!DnLAŽÎDÇœUùŽHDl^ó-ŽÒ‚…ù<ÞœññŽÚ€Ý Ù°–\14‰üÄÇ6'\11\28`oT|~£´Ë)\11š’%ho,áœ-¹8˜ \6\26 fË™ç0r.€j\5ÅúDTörñ—°a\17/[xy·ð±ÝÂgt‹u·…\23x\11ŸÇ-V×—šÛÂòª\ +¥\2,<=zƒèü^‚øÉÂ×k‹ÎkL\18ª¦-ÄÇÀÎHSZUÕª‘*[jk©G’›R1•‹Ï祣º¥;Ýp¢Õ/AÅsÕ/ o¯ær-\0º½½á]x!»\4µÉÏ`\22,a‹\27Šõk±¡kñ6_lèZø2mRŽQ\1#5ŒÊ0ÖaÊq3.£v’Ü#C2z@G\23äÖå#\5\5•‚)qáÒfAŠ\12\11ƈ%w\19\17Y\23[dO\11»q±qb‘6³Xx],£\28ä×A>D¤F,\0183(FrÁâצàf$¥\15@¢Y–ËaˆZ\14C\20WºÔ:ÓìÙÍžÝ`Ž#´Îh$B{ua—\ +Ôڰ˱Ó]AX`\22Ì.\11V”Å&“%¸\15|’µø“¬Å[oñGX‹\5åÅŸb-ǧXËñ)ÖÂ3™\5{ÊÂmÉâ—/‹_¾ð™mÛÙ\19ñØ6¶øµËr¼vYŽ×.\11çèâw.‹_¸,~á²\28/\\–ãÛÆ“xÄïÕ\15\15V\20ÕŸ\16Nì•\19rÕ\9¹\6¤›UÛ`-*­Õ•}°òð´=ˆº¸Ö* †Üu\4¯\18\ +×ZkEþq\22ú.|µv¹újxå…0Ñ\18¶ ð\9²`ÀZýÞemª:àÖy\"”€¹Ú4³6ܹ†Uu\7¼¦ZH-VÄSPŒhEàZq¨)Å \29rå”Zu&]À{itȽ2Þ½‘Ö B¯ÇÝî*Q\18\6°Ò-ì\31\2Y±NÖ­\24gWô:¡_W‰\20VïÂJ&Œ\30+\"\27\3©D\8M\2)eëÆ' \"#nt\12…H\30^7˜“B\12\12ñ†™M„;†\21Uíå ýA´N Ÿ\26â°+…¿y\6µú×ãYïz|ô¼\30Ú›ZB±ÇÂ[…·ú\11o\13«´ÞK\15ÑÔ¨uðø\28\"}VºÝj6¿\"½­¶\20­6\24©ÕÁ­ã\1üLà›eI¥ÿ÷ñ‘;ï‡ô ÕŸcü·,^µ^þ[N8Å\16þ«9\15ß^²o%×ùáÛkö­‘n\"—t»oq;éàýf;ý7\127/·²-”úéôÚüÍôÆrÝüñáÆ×DÙæ/ŒðL¸Å`¶Pë'±lãBjó]ÄvçKüÍ\0187„©Í|x“È&…ró\127Dl½të_…(;dó%ÂÆýÁ6Ö\18׶‘O\0096[­T\6ÏS6>\24Ú\2Éf\ +“\ +º¡³s…ºàì\8¨\18D\11”s%²ihã­É†bƒ9mfN›?ˆÝÌ¢¶äæú\25ú–¯ªæ0Wl<_Úx¾´ñ|iãùÒv¬íX\3\27™6\127÷¾Ùˆ»Ám¶çwm\18a+]zûóøn;Œ\27œh3'Ú̉6›tµ\28\25c³#p4.Fr1Ír\127Jlß³°¿d{É«Ó½äFPXµB¸\15î\1÷\13h€;w'\0Š6\9»\2w_8­6ÍîëÝ‹ÝÝ\23»;\6`’/$×ZÛËÝ©}»»\31·»»ow÷ÒÿS²ó\14JB†\6m?—C‘íá¼ÉÉ}[Øyƒ°‹ûíð½½Ò z\4w>?+\15Â-ÐΗ[ jÙ®\21¶ce\16¨Å.E1á¡©æ};V‚½^\1^íX\7öºÐ\8©¶×l·am—\30¢’¤ìï9ïÌ\20¥æZ|Ø9\14w\31ƒ»\5‚ÝgaØ%Æí\18õ÷Ž'Ë\"M›+\14í{\15½¶×®%³ó™›@%\20½D;\ +Ö\14Û\7©@\26,qùÝú7\31\23_À\8,\0•\14®”åäEåÔ\30’ÇÍ&«Ã4,þ¦vÿsy»‹{îùH\26lrª\17«¨Æx¦a\ +Öa+­d?ŽÛÝÖ\19¦jˆŸJƒTŠ\20ó\14a\14µÃvþLBÉW\30·í\28ÐûÚˆmì«×¼B¢£¥ŠìÅ\26\7\"ıw®Ð\6È×™øÇû]Õqý»k202\11n\1,xoÄçy\20q*\127O»‹—îþÒ:ìÚI;Ï(w4h\6ß×´&¨\25;\25vïžýøçˆÝ’­F‡\23ƒûñVp?Þ\ +îþ„MM9\11àr\18‹±Û¨Eg\12Ûj\4\22àÇÛ%—6IŠ„\21\9ýo\15\16¬\0188¸QŠ[I¶[-¼:»;JBD':\26'£¦AäN}\\¿>ÞÄ!µØ< ”é¿[PŠ®4ÖF\23Ð-Fšê7~x4kòõ\14ë0&Ô¾#JKFsR\23+q“\11(t’p‹ÛEO\20mÉP\30º\24æ‹Q|âñæ‡Ô&ÝA\6ǹ…sá\6…¹·gŒö$ar^ž?«Ì䶧Íx´,¹e+,ÞÄ¡»3ù\19|‘A’\11”ç\18*VË@Ù=ä\\ ×\127¨[\0133U¡pS\19ö6|鄨¨\31—\11\ +8\3X)`Ä\8/Š¬\21 ¤³'+rX4$KÂ\21 Ѹ\0247\23Ѓƒ\2°¬=å\28\9œåB8‘\11Û\1dvÍ:Šüù΀\25÷ëCŽt$“æ¾¢{aÍ\\3>í‹ Ð\"U\"Ñž,Wœ,V\0\8\127'ÿÀ„[:úw\9¨)\127\31oz¿þÖÙž\127ý-õùñ뤟xîã\23L\23Œ\4¼â|ÅÉg\7\ +`†\127\5COœŽp°6:c\31‹Q•\23_<;W†\25H„'çJNÉÙ­ðçŸÙ×\15Fû\7³$\12i瀵Q{Fº1¦=È@Ò[tÌbTm\"w²ð5&èè»#´:µ\1äm\25ê\31Ú‡‚‰Ä\19F.<Î3‘‡/ü„ˆ„\4-ÆÍèÂX\11 H§2‡\20åÅ>\23œh>†•ÌŸIñÄ»:Š\9ú‘ŽSþëG —„ ;ºvF·}u׸\19þúÌH† ÝI¥\7}ýØÝ\0124!Ð…²\11”òI6/z“æ ñ ËA\30\127Ò8ßñ<\12Z<\127Î.⧣\127F{>íù<Êû<Êû<Êû<ÊCËy¼Ã=ßy.#rðË÷ËMƒùxGBTüM€Öþx7\19|‡ã½û;\21‘ƒã½£o(ÆéÑ¢U”yØûÁÃÞOŽ–œ§¤æDï\7\11z?X\8\\–P\31>\7ú€Q>\26c\30ôž3\23$Ùù£\31“ÃçHÑ&æ9xÙxª\17nónnóîÕû~0—w³I<\8\27\20É\29³Xà¶^?î«žÿk>ãò~U¨8«ü4ì_mÇ\1275Ï\127y·\0006Æh\20á$z~/16@¸¸ä=ó\25¼\0UÆ\29«$s^ºð…„èÉ>4FHÛ\29du\18ÿ\27\27®(YŽpIQþ\14üå íA´\0Eù+\ +,K³SÎ:P\15\26¡ü\15\2q¼\9ó\31a\28å¡x\30tùC5£‡Ccèûa7Èš©¯‰fJœÆ¦µ\31­å.çù½9\1Úu\\\31Ó©†\11I“Á„ï\4 Ý\17ÚÑý¦“Ä¥¢\16ð„\18(0‡q\127\15Yÿö .FÒ†Ðÿ\30€Ïíj¬\7ªó$\12\30C\27Mšƒl&|•øüÎ}«ÐÓ3h\29|wh\\<]£ã\24EÿS\28\30ª\ +\30]*?^°Cùú—x\30Ã1U\12Ž´ÃÖb\23\16üdD-Úà¶Õ\14ù‡\22?ßÊs\14ò”F!b²Z®wIq\"ü±‘V-\23Óè“êÚ\27xÀÚÈGÇ\5\14\12½’£´\\\17§4³’GÄ0‰Z^LXœojó\27VS°!¸kV{H*å0Ø£½lòá\6õòô¼“’g bt0\29£CtG\11ë\13óûA:\7b\21Qâ„DyøWÓÇ;©\3\30ÿÍàÓòÁóÿÏ~¼?íý\25\15²\28„3”çšXŒå-\25­ŸlÔŸÌáOµDIl\0258=~gßá\18@\29¿­ÑÊÝ\2\0290\0# Mý;aÏ–{\5´¹\127›ÃüÆ*?~#qðÅT\18¨§O‘b¹Ï\127ÄN\5\23@àý\31\7Óÿ@8þà¯Ip߀;Ð\1ý°ò\15Fê\3­çññç¿\ +\0215\1j²¢%1É­AüHÜ\25›l\4áÖ’nȾ·FŠ*ö•\24¾WtùæÓ\31þ\8çÄ­ÎóS«õ\0196,ä\ +“¯Ô¼·>1Ã\9/Fâkþ\11I„ƒJ¤P»!ûU„‡g&wØøgcOð\22ûÔºü´´\0300\18=?\11>ÐáyUtÑ,§OJŒCpÀ|\16MÎ'\19ð‰Æ1\29t`c~ÂÔ>\4äó‘òÉ–ÿ<\22ËA¿~ü\31#\1³\4", + ["length"]=21736, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=3636, diff --git a/tex/context/patterns/lang-cy.lua b/tex/context/patterns/lang-cy.lua index 303a3766a..9a19fe385 100644 --- a/tex/context/patterns/lang-cy.lua +++ b/tex/context/patterns/lang-cy.lua @@ -39,7 +39,81 @@ return { }, ["patterns"]={ ["characters"]="abcdefghilmnoprstuwyáâêëïôö", - ["data"]=".ac4t .ad3ae .add5as .add3o .ad4eg .ad4eny .ad4fer .adl4 .ad3r .ae3a .af3a .af4an .aff3 .afl4u .af5lw .ag3w .am4le .am3s .an5ad .an4g3 .anghen5a .anghen4r .an2o .anrhyd4 .ansodd4e .an5te .an3w4 .an5we .ar4bo .ar4cha .ar5ddel .ared4 .ar4en .arff4 .ar4ge .ar2i .ar3we .ar4wed .as3g .as3t .aw4e .ban4as .ban4ed .bara5t .bel3y .be3t4a .bl2 .bl4e .br2 .br4e .call5 .ce4n .ch2 .ch4e .ch4l .ch4o .chollad4 .chr2 .chwyn5 .cl2 .cr2 .cy5we .dad3r .dd2 .ddefn5 .dd4i .ddi5an .ddi5dd .ddi3e .ddill5adas .ddill5ade .ddill5ado .ddill5adw .ddin4 .ddiw5eddas .ddiw5edde .ddiw5eddo .ddiw5eddw .ddwl3 .ddy5fala .ddy5fale .ddy5falo .ddy5falw .ddylad4 .deallad4 .defn3 .der4w .deth5 .di5an .di5dd .di3e .di3gy .dill5adas .dill5ade .dill5ado .dill5adw .din4 .diw5eddas .diw5edde .diw5eddo .diw5eddw .dr4e .dwl3 .dy5fala .dy5fale .dy5falo .dy5falw .dy5fo .dylad4 .dyrchafad4 .eb2 .eb3r .eb4rw .ec2 .ed2 .edl4 .edr4 .eg2 .egn3 .el4or .els4 .en3as .eny5na .er2 .erfy5na .ern4 .ewy5na .fadd3 .falch5 .fan3a .farn4ais. .fasg4 .fas5ge .ff2 .ff4a .ffer4a .ffe5ras .ffer4e .ff4o .ffor5t .ff4y .ffydd5 .ffynad4 .ffy5nas .fign5 .fis5g .fon4edi .fordd4 .for4o .for4w .for4y .fr4i .fryn4d .fydd5 .fyn5as .fyw3 .gal3e .gal5o .gan3l .gan5olas .gan5ole .gen5as .ger5b .geu5d .ghwy5na .gl2 .glaf5y .gl4e .gleid4 .gl4y .glyn3 .glywad4 .god3y .gof3a .goffad4wy .gollad4 .gr2 .grynho4em .grynho4wn .gwedd4er .gyd3 .gyf5al .gyf5arc .gyfer5byna .gyfer5byni .gyfer5bynn .gyffel5 .gym3o .gyn3a .gyn5e .gynef5 .gyth5ru .gy5we .hac4 .hadl4 .haf4a .haf3l4 .hagr3 .ham4le .han5as .han4g5 .hanghen5a .han5t .han5w4 .har5ddel .hared4 .har4en .har3n .harn4a .har3w .has3g .haw4 .heb2 .hec2 .hed2 .hedl4 .he4o .herfy5na .her4w .heur5 .hof4r3 .hol4y .holyn5 .hw2 .hwn4 .hwyl5u .hwyn5a .hwyr5 .hwyth4au .hyd4 .hydr4 .hy3ff .hyf4od .hy5fry .hy3g .hyl4 .hym3e .hym4u .hym4y .hymy5na .hymysg4 .hyn2 .hy3no .hy3rw .iach4 .iac5has .iac5he .iac5hw .ir3 .ladr3 .ledr4e .le3na .le3o .lest4 .lin3 .ll2 .llaw4e .lle5na .llo5nas .llon4e .llythr5 .lo3na .lon4e .ludd3 .lygr3 .lyn3a .lythr5 .man4ed .mant4a .mar4f .mign5 .mis5g .mol3 .mon4edi .mwyth5a .mwyth5w .myn5as .neilltu4ad .neis4i .nen3a .ner4w .ng2 .ngen5as .nghyt5u .nghy5wa .ngl4 .ng4w .ngy4 .ni5an .ni3e .ni5fei .nig2 .ni5ga .ni3ge .ni3gw .ni3gy .ni5re .ni3wa .niwl5 .no4e .no4w .nwl3 .nwyn3 .oddefad4 .od4l .of3a .of4o .of4u .og2 .og4l4 .ol2 .oll3 .ol5yga .ol5yge .olyn3 .or1 .orddad4 .pl4a .pl4e .rad3r .rag3l .ra3na .ran5d4 .rew3 .rhi5a .ria4 .rin4t .rug4l5 .ry3n4a .ryn4e .sas4 .ses4 .st2 .sych3 .sych5e .talad4 .tan4e .th2 .thag5 .th4i .tho5e .th4r4 .thrad4 .th4u .torad4 .tr2 .tr4a .trad4 .tr4o .tro4en .uch2 .wa5r4as .war4es .wedd4er .weithiad4 .welad4 .wen3a .west4 .wn4io .wobr3 .wybr4 .wy3by .wy4r .wyw3 .ydd4 .yd4l .yf4ar .ym4adw .ym3e .ym4yl .ymysg4 .yn4d .ys4b .ysg4 .ys4i .ys4n .ys4t a1a a3ar2 2ab ab3a ab4ad ab3ed ab3el ab5ine abl1 a2b1o ab4or abr3 a1bu a4bu. a4bum 2ac ac1a ac5ade acan3 ac4aol ac3ei ace3ne ac5enni. ach1 a4ch. ach5ac ach5aw a5chef ach3o ach3r ach5us a3chwa achwyn5 achy4 aci5mw acl3 ac3o ac3ta ac3te 4ad3ac ad3ad ad5afa ad3arf adar4g a4dd. add3ad ad3dal ad3dd add3eu add5ew add3f add3i add2o ad4du addun4 add3yc add3ys 2ad1e ad3eg ad3el ad4el. ad4ena ad4ene ad4eni ad4eno ad4enw ad3i 2adl ad3len ad5lys ad2na adnabydd4e adnabydded4 ad2no 2ado ad3oc ad3od ad3of ad3on 4adr. ad4red ad3ri adr4od adr3on ad4ru 4adunia ad5uniad ad5uro adwel4ed. ad3wi ad5wr. 3ad3wys ad5wyt ad1y ady4n ad4yrn 2aea ae4ada ae5an. aedd3 ae3i ael1 ael4edda aen3 ae3oc ae3og aer1 aerw4 aer5we aer5wy aes3 aest4 aeth5a aethr4 ae1w ae5wyd af3adw 4af5aid af4al af3an afan5e af4ann 4afar3 af5arn af4at 4af3au 2af3e 2aff aff3a aff3ed aff3ei aff3i affl3 aff3w aff3y aff4yr af3i afl3a afl5edi af4l3u 2afn1 af3odd 4afol af3ont 2afr af3ra af3res af5rif af4ru af5rwy af1u 2af1w af1y 2a2g ag1a ag3ad ag3al age3na age5ne ag3law agl3o ag3lu agl3w ag3n ag3od ag3of ag4ori ag1r ag3ri ag3ry ag1u ag2w ag3wa ag3wel ag3wn ag3wr ag5yma agy4w a1h2 ahan3 ahanad4 ahedr4 a2i 2aig 2ail1 ailen3 2ain 4ainc 2ait 2al al5abr al3ad 4al3ae alaf3 4alaid al3an al5arc al5aso al3ce alch3w al5cwl al4di al1e al5edau al3ei al3en al4ena al4es. al2fo al3fy al3i al4is all3a all3e all3i all3oc all3w all3y 3aln al3oc al3od al4ogia alo3na alo3n4e al3ono al3or alp4e al1u 4alwc alw3e 4alwr al5wst al3wy 4alwy. al1y 2am ambl3 am3d amdan5 amel5o am3er amgym5r amhen4 amhobl4 amhryd4 am5las am4led am4lf am4lg am5nif am4of am2or amor5w am4pa a4mwa am5wed am5wri am5wyd am3wyt amyn3 a2n 2an. an1a a4nab anadl3 a4nae a4naf an4afi a4nai an2as an3at4 a4nau a4naw 4anco an2da an5dda an4ddy an2de an2do an1e an2ed an3ed. an5eda an5edd. an5edo a4n3eg a4nel an3eli an5er. an5ewi 2anf an3fy 2ang1 ang3ad an4gd ang3e an4gf anghaff4 anghelf4 anghredad4 anghrist4 anghy4 anghyd4 anghydna4 anghyf4 anghyfar4 anghyff4 anghyfiawn4 anghym4 anghyn4e angl4 ang5or an3if an3igi 4annau an3oc an3od. an3odd an5og. an5og4ae 4anol an3ol. an3om an3ont an3or 1anr an5sic ans4ie ans4iw an5siwn an4ta ant5ac an5the ant3rw an1w an3wes 4anwl anwy4 an3wyd anwyn3 an1y any4l3 a1oe ap3a ap4cy ap5elw ap3l apl4a ap5ost ap5rwn ap5ryn ap5wll 2ar ar3ab aradr3 arae3 ar3af. ara5ff ar3an ar4an. aran3a aran3e arat4 ar3aw ar4ber arc3as arch5en ar5clo ar2da ar2de ard5es ar4dr ardyn3 ar1e ar2eb ar2ei ar5eid ar3eit ar3fa arfan5 arf5ed ar5fel ar4ff ar3fod ar1i ar4ian ar2m 4arn. arn5adwy. ar4nd 3arnh ar4no ar4nw ar4ny ar1o ar4od. ar4odi arogl3 3aror ar3os 5aros. ar4p3as arp3w ar3sy ar3te ar4th3 ar3ug ar3ut aru5wc 3arwai ar5wch arwd2 arw5der ar3wed 3ar3wi arw3n ar3wn. ar3wni ar3wy 4arwyn ar3y 2as. 2asas as5awr 1asb as5boe 2asd 2ased as4enn 2asf 2asg as5gal asgl3 asg4oda as3gwr asg3wy 2asia 4asie 2asl 2asn as4ny as4od. 2asoe 2asr 2ast as4tal as3tan astat5 as3te as4tl as4tr as5trus ast2w as5ty. as3tyl astyn3 2asu as3ur as5wir 2aswr 2asy as5ynn 2a2t at3ad at5alf ateb3 at3em ath3a athl3 ath3o ath4r3e athr3w athr5yc ath3w ath3y ato2i at3ol a3tô at3ran atr5oc at3rod atro5e atr5yc at3wy aty3na aty5ne a2u 2aul 2aw aw1a aw5art aw5chw aw5ddr aw5dry aw3ed aw3ei aw3el aw3es aw3f aw3ga aw1i awl5ed awn3a awr1 awr3d awy4r3 3áu. 1â bab4i bab5yd b3ac bach3 badd3 b3adw 1bae 2baet b1af b1ai b1an ban3a ban3e b4ann ban3o 5barch bar4f bar4lys. barn3 bar4wy b1as bas3g2 bast4 bat4a b4ath b1au bawd4a bawe5na b1d b1ec 2bed beir4a be4iw b1em ben4ae be3nas be5ned bengl4 bens4 bent4 b3ent. ben3w benwy5na b3ert b3esi bgal4 2b1i b3ia bi5aidd 3bib1 b3id3 b3ie 3b2ig1 b4inc bin2e b3io b3ir bisg4 b3it bl3af bl5air bla3na bla5nedi bla5nes 2blau bl5awd bl3ec bl4enni. blew3 4blwr b4lyc 4blyn bl5yn. bo4b4l b1oc 4b3odd bol3 b1om b2on bon4d b2r bra3na br3ed breg3y br3em br4enn br2i br4il br3ir brod4iae brog4 br4wd bryf3 bryn4d b1s2 bse3na bse5ne 2bu. 1bua budd4l bu4lo 3buo bw3a b1wc 3bwll b1wn b4wns bwr1 4bwyd b3wyd. 4b3wyf bwyllt4 3bwyn bwy4r3 2by b3ych. bydd5i b2yl 3bylla by3na by3ned by3nes byrf4 b4yrw 3byst. byw3 cabl4en c1ad cad3a cad3l cae4a caethiw4ed c1af c3ai cal3e cal3o cam4enn camn4 can3a ca4ne canghe5na can3l c4ann can5olas can5ole c3ant can4yd car4en car4ped. c1as casg4e 3cat ca4t3r c3au c3áu c1b cd2 c1e c3ed c5edig ceg3 c3el c2en ce3na c3ent cer5by cer4f cer3y ceu4l c3ff 3chae ch3af. ch4afb ch4afi chan3a changhe5na char4enn chasg4e chdr5y ch3eba ch3ebe ch3ebi ch3ebo ch3ebw ch3ech ch3ed. ch3edi 5chein chelad4 ch3ent chen3y ch3er. cher4f ch3esg 3chest 4chestio 4chestol 4chestwa 4chestwe ch3eta ch5ig. chleid4 chl5ent 4chmyni 4chmynnol chn5eg chob3 chobl4 ch3odd chon5ad4 ch3ont chon4y chra4 ch4ro 4chu. ch4ub 4chus 5chwant ch3wch chw4f ch4wi ch3wn. ch3wyf chyd3 chym4an ch4ynn chysg3 chys5o chyt3u chy5wa c1i cib3 cig1 c3in ci3od. cl2e cleid4 cl2i c1ll cllon3 cloe4 cl2w cl4wm cly4w clywad4 cn2 cno4en cn4yw cob1 co4bl4 c1oc c1od cod4l coffad4wy collad4 c1om c1on con4y corn4an cosb3 cr3ae cra4m 3crat credad4 cr4el cr3ie cring4 crof4 crog3 cron4a cro5nas cron4e cryg3 crygl4 cr4yl cr4ym crynho4em crynho4i cs3a c3s4aw cs3yn ct2a c4teg ct4id c1to ctor3 c3tr 1cu 2cus c1w c3wa cwast3 cw4fa cwm3 cwn4ed c3wy c4wyn3 cwy4r cyb3y 2c1yc cych3 cyd3 cydl4 cydr4 cy4f3a cyfer3 cyffel5 c1yl cyll5a cym3 cym4an cym4ero c1yn cyn3a c5ynau cyn3e cynef3 c2yny cy4se cysg3 cys5on cys3t cys3w cyth5rud cy1w cy3wa cy3wi cy3wy d1a2 dach3 d3ach. d5achwr d2ad dad3u dad3w d5adwy dae5ara dae5ared dae5ari dae5arw d2ael d4afe d4afo dag1 dag3w 4dail da5ion d4ait d4al. d4ald d4aln d4alr d2an3a d2an3e dan3f d2ano d2anu d2anw d2any dar3a dar4ana dar4d darf2 d5arne dar3w d5aryd 2das 2dau 2daw dawd3 d5awd. d1b ddad3r 4ddaf 3ddang dd4ani dd3ara dd3ari dd3arw 2ddas dd4aw ddd2 d4dda ddd4e dd4d4i4 dd3dr dd4du dd4dy dd5dy. dd3er. ddeth5 ddeuad4 dd4eug dd4ew dd2f dd4fg 2ddi. dd4ic dd4if ddif3a dd4ig. ddi3gy dd4il dd4im dd4ini 4ddit dd1l2 dd5len 2ddo. 4ddoc dd3odd 4ddom 4ddon3 dd2or3 ddr2 ddr4a ddr4e ddr4i ddr4o ddr4w dd4ry d2du 4dd3un dd5us. dd5waw 4ddwc dd2we 4ddwn 5ddwrn dd4wyn 3ddwyr dd2y 4ddyc dd4ydd dd5yf. ddym4d dd4yn ddy5nad 5ddyni 4ddynt 3ddyr 3deb. debr3 d1ec dech4a d1ed d5edd. deddf3 def3a d1eg d5egol. de1h deheu5 d2eil d4eim delff5 d3ella d3elle d3elli d3ello dellt5 d3ellw del3o d1em d3em. 2d1en1 d4eng d3ent de2o der3f derfyn5 2d1es d3esi 5destu d1et det5an deth4o d1eu1 deul4 deu4ny d4eut d1f2 d3f4ae df4ann df4ao d4fa5ol df4at df4aw dfed5r d3fei dfe5ne d5ferf d4fery dff4y d2fi df4id df4od d4f3ol df4ry d2fu dfwr2 d4fyd dfyn3a dfyn4ed. d1g2 dgam2 dgan5e dg4ei dgl2 dgrynho5 dgyf5ar d1h2 dha5ol dhegl5 2d1i di5ach di1b2 dibryd4 di1d did2e di5den d4ido di5dos di4et di3eu dif4an di5fat di3fe di3ffr di5fli di5flo di5fra di3fw di5gab dig2e di5gel di3gen dige5na dige5ne digl4 di5gof di3gry di3gw dig2y di3gym4 di3gys dil4a dil4e di5lec di5les dill4a di3lu dil2w di3lys d3in. di5niw di3nod d4inoe di3or d2ir d3ir. di3ra d4i3r2e di5rif 3d4iro di4rw di3rym dis3g di3so dis3t di3sw di3sy 4d3it. d2iw2 d4iw. di3w4a d4iwe di5wen d3iwyf d1ï d3ladd dladr3 dl3af d3lam d4lau dl3ed d3l4ei d4len. dle3na dle5ne d4lent dl3er d3lew d2lo dl3oc dl3od d3lon3 dlon5e d1lu dludd3 d2lw dl3yc d3lyd. d3lyn dl4yr d3lyw d1m2 dm4ar dm4er dm4od d3my d1n d3nap dn3as d3naw dneb4 d2n3ed dn3es d2ni d3ni. dno2i d3nos d2n1w d2ny d1o d3och d2od. d2odi d4odia dod3r 2doe do4en d2of dof5yddio d3ol. d4oll dol4wg d3om don2a don2e d3ont dor2a dor4da dor5we dos3 do2w 2dr. 3dra. dr3ad. dr3ada dradd3 dr3adw dr3a4f dr5aid dr5ain dram4g dr4an. dra3na dra3ne dr4ann dr3ant dr5au. dr3c dr3ed dr4edo dr3en d4reu drew3 dr3f drff4 dr4iau d4r3id d4rir d4roe dro3es 4drog drog4e dr3ol dr5ol. dr3on. dron3a dr3ont d4rwg dr3wn dr3wyf dr3yd dr3yn. d3ryw d1s2 d1ug1 d4un. dun3a d4unia d1ur d1us 1dut du5wch d1w dw2a dw3adw d3wae dw3af d3wait d3wal dw3an dw3as dwb3 dwbl4 d3wc dwd2 dw3edi d2wen dwer5y d4w3id d4w3ir d4wit dw5mig dw3o dwr3e d2wrn dwy3b d4wyc dwyn3 dwy4on. d2wyr 3dy. d1yc d5ych. d1yd d3yd. 4dydd dyd2w dydw5y dyf5an dyff4 dyf4n3 dyf4od dyf5odd dy5fodi dyf2r3 dyfrad4 d3yg. d3ygl dy3gy d2yl dyl5ad dy3lan dyll3 dy3lu d1ym dymag5 dym5od 3dyna dy3nas dy3nes dy3n4od dy3r2a dyr2e dy3ri dy5ryd 2dys. 4dysa dys4g dysg5a 4dyso 2ea e1ad e4adf e4adl eaf1 e3af. ea4fa e4afg e1ai e1an3 e4ang ear1 earf2 ear5fo earn4i e1as e1au e3aw eb3ad eb5ar. eb1e ebl2 eb1o eb3ont ebra5ne ebr3e eb4r3i ebr3o eb1w eb3wy eb5yll 2ec1a 2ec3e ech1 ech5od echr4 ech3ry ech5we ech5wyd echwy5na echwy5ne 2eco econ2 eco5no ec5ord ecr1 2ect ec4to 2ecw ec3y 2ed3a ed4al edd3ad edd3al edd3ar edd3e 2eddf eddf3a eddf5i eddf3o eddf3w eddf3y 4eddg edd3o edd3yc edd3yg edd5yla edd5yled edd5ylo edd5ylw edd5yn ed1e ed3eg ed2ei eden5a ed3fa ed3fe ed3fi edf4w 4edia edi4f ed3ig ed3i4n ed3ir ed3iw 3edï 2edl1 ed4lo 4edr. edr3e edr3o edr4yd 2edw ed2we edw3en edw5lw ed3wy4 ed3y edym4 2e1e ef5adwy. ef3an ef5an. ef3ar3 ef3au ef1e efer2 eff4e eff3r4 eff5re effro4er eff3y ef3id ef3ig ef2l3 ef4lo efn1 ef5nos ef1o ef4odo ef2r efr3e ef4ri ef4ry ef4us ef1w efyddad5 efy3na efy5ne e2g1 4eg5an. eg4ana egar3 egeir4 eg5ell 4egen. 2egf eg5ig. egl3a egl3e egl3o 2ego eg4on. 4egos eg5os. egr3a egr3e egr3i egr3o egr3w eg3ry egr3yc eg2u eg3yr e1ho e1hy e2i 2ei1a eiaf3 ei5afr ei3bre eich3 eidal5 eidd3 eidd5y ei5der eidl2 eid5la 2eidr eidr5o ei1e 2eig eigl5ad eig5lenn eigl3w ei4gr 3eilad 4eiladwy. eil5ec eil5eg eil3es ei4ll ein2a eind5i ein4drw 4einf eing4a ein5io 4einl 4einy 2ei1o ei3ont eir3y 2eit eith5e ei1w ei3y 2e2l el1a el5ain elan5e el4co el1e el3ed el4eno el4era el4ere el5far el5fed elgri5 3elh el5iff 4elig ell1 ell5ac ellt4ir ell5wy ell3y el2m3 el5myn el1o el2od el3odd 4elog el4oga el2ri el4wi el3wy el5ybia el5ybr el3yc 4elyd el5yd. el3ydd elyn3 el3yna el5yned elyng4 el3ynn el3yr el3ys el4ysg el4yst em5ain em4at 2em3e 2emo em4os 2emp empr3 em5ryn 2emt em5tas 2emy en5ada e4n3adu e4nae en3af e4nag en5ago en3ai en3an e4nar3 enar4g e4n3aw en5byl en3c en4ct en4cy 2end endig3 endr4 en3ec en3ed. en5edd en3el en3em en3en en3er en3est en3eu e4new enew5y en5fyd eng3h en4gi engl3 en5gl4og en5ise en3it en3o en4oli 4enti ent4ir en3tr ent4wr 4enty en5tya en5uch enw3ad en3wc en3wn en3wr en3wyd en3wyf en3yc en5ych. en5ychase en5ychia en4yg 2eo e5och. e1od e1oe e4olae e4olaid e4olau e1om e1on eor3 ep5ach ep3l er1a er5ain er2c erc3a er4ch 4erco 2er2d er3de erd3y er1e 2erf er5fan erf5au er3fed er3ff er4fl er4fu er3fyd er3gl er2gy er3i er4ic er4il erin3 er5ir. er5it. er2l er5lys er4md er4mw er4my er3na ern4i er5ni5as er5nyw er1o 4erob erog4 4erol er5oli er4ony er2se er5sei 2ert erw3a er4w3e er4wl er3wn er4wre er3wy er4wyc er4wydd er3yc er3ydd er3yg er3yl eryl3e er4yll er3yn eryn4a eryn4e es3a es3ba es3e es5gar es4ge es4gn es4g3w es4gyn es3n es4ne es4t3a es5tam est3er 2estf 2estl est5ol 4estu es5tyll. esty5na esty5ne 2esu esurad4 es4yd. es3yn3 e2t et3ac et3ad e3tae et5eg. eter4 et3er. eth1 eth3e eth3i eth4le eth3os eth4r3 eth3w et5iro et1o et5re. et5swy et1w 4etwr ety5wy e2u eu3a 4euau 2eu1b2 2eud2 eu3da eu3d4e eud4i 2eu1f eu1g eull4 eu5lys 2eun2 eu5nan eu5nos eu5nyddi eu5sil eus3t eu4th eu4tu eu3w 2ew ew1a ew3d ew1e ew3g ewgl4 ewg4w ew3ir ewis3 ewl1 ew3o ew5par e3wyd. e3wyf 2ey e1yc ey4en 1ë 3fa. fab3 fab4i fach3 fac4w fadd2 fad4ei fad4r3 fael3 f1af 3fag fag4d fagl3 f1ai falch4 f4al5on f4alu f3am f4an. fan3d fan5edd fan4es f3anf fan3o fant2 3faoe far3a far4ch3 4far2e f3arf far4fa far4l 3farn farn3a f3arp f3art f4arwe f3arwy f1as fas4iw f3at fat4o fawd4a 3fawr f1b2 f1d2 fdd2 f2dw fd5wr. f4eb. febr3 f1ec fed4n f2edr 3feia 3feie fe4io feiriad4u feith3 fe4iw f4el. f3ell fel5yno f1em fe3na feng3 fent4 fentr5 fenw3 fen3y 2fera ferch4er ferdd4 2f2ere 2f2eri fer4in 2f2ero f2erw ferw5yc f4er3y f1es feth3 f4eth. f4etha feu1 3fey f2f ff3ad ff3ant ff4at ff3au ff3ed. ff5edig ff5eio ff5el. ffen5ed ff3ent ff3er. 3ffert ff3esu ffet4 2ffi ffidl5 ff2l ff4la ffl4ac ff4lo ff5log ff5los ff3n ff3od ffod5e ff4odi 3ffon. ffo3n4a ffo3n4e ff3ont ff2or 5ffor. ff4os ff2ra ff2ri ff4rod ff2rw 4ffry ffr3yn ff2t 5ffurf3 ff5wyf ff5yl. f1g fg4wr f1h2 fha5ol f1i f4iadae 2fic fic4e f2id f3id. fig4en. fil3y fin3 f4in. f3ind fin4t fisg4 f2ï fl2 fl3ad flaf4 fl3ai flamad4 fla3na flan5ed f2las flaw4 fl3ec fl5eis fl3em fle3na fle5ne fl4eo fl3id fl4ig flin3e fl3ir fl4iw fl3om f3lon fl5rwy f4l3wr f1ly f5lychw f4l4yd fl4yf flyn3a flyn3e f2n fn3a fn3d f4n3ec f4n3ed f4n3em f4nen f3nif fn3ig f3nith fn5lu. f4n3oc f4n3om f4n3on fn3w fn2y f4n3yc fn3yn f1oc fodd3 fod4enn f4odf fodr4 fod3rw f4odu f3oedd f1og fol3 fol4enn f1om fon4d 5fonog f4ony f4or. for4c f4ord for3f f3os2 fos4i fos3o f3ot f4otr fr2a f2raf f2rai fra3na fra5ned fras4au f4r3au fr3d frdd2 fre2 f2rec f4red. f4reg freg3y f2rem f4ren f3reo f2rer fr3f f2rh f2rid fr3id. f2rir f4rit fr2o f3roa f5roadw f2roc frod4iae fro2e fro4en fro5esi f3ro2i f2rom f2ron f3roo fr4ot f3row fro4wc fro4wn f1ru fr2w f2rwc f2ry f3ryn f1ta f3ter fudd4l fud3w fu2l f1un3 f4urf f3wa f1wc fwd3 f1we 4fwl. f1wn2 f3wr. fwr5ne f4wy. f3wyd fwyllt4 fwyn3 f4wyn. f4wys f1yc fyd2 fyddad4 fydd4l fydd5y fyd4l3 f4ydr fyd3y 3fyf fyf4y f1yl f4yl. f2yn 4fyn. f3yng fyn3o fyn5od f2yr fy3r2a f3yrd fyr2e fyrf4y fyr4y fys4t fystyr4o fys4w gabl4en g5ach. gad1 gad3a 5gadar g4ad2u 5gaduri g4adwr g1ae gae3a g3af. gaf3a gaf4r3 g1ai 1gal2 gal5ara gal5are gal5ari gal5aro gal5arwy galed5 4gall gam4enn gamn4 gan3a gan4d ga4ne ganghe5na g3ant 4ganwr g3ao gar3eg gar4enn g3arf gar4ge 3gart 4garthia gar4we g1as 5gased gasg4e ga4t3r 2g1au 4gawe 2g1b gb4er g1c 2g1d2 gdd2 gddig5 gdo3ra gdo5r4e g2dw gd5wr. g1ec g1ed gedd3 g2ede g4edi. g4edid g4edir g4edit g2edo g4edu g4edyc geg3 g2egy g2ei. g3eid g4el. gell5a gel3o g1em gen4d g5enni. gen2r g3ent g4enu g3er. 3g4erd ger4f ger3y g1es geu4l g1f2 gfa3na gfa5ne gfe5ne gfyn3 g3ga gh2 ghae4 ghan3a ghanghe5na ghar4en ghasg4e ghen5i gher4f gh4le ghleid4 gh4ne ghob3 ghobl4 ghof5r gh4og ghon4y ghr2 ghra4 ghred4adu ghred4inia ghw4f ghyd3 ghym4an ghysg3 g1i gib3 g4ida gi5en. g2ig1 3gil 1gip g3iw g2l gl3ac gl3adw glaf2 glan5e gl3ant glas3 g5las. g3lat gl5au. gl2e g3le. gl3ech gl3edi g5leisiaso g3leo gl3es gl3eu gl3f gl3ia gl3id g3liw gl4odi gl4ody glo4e gl4of 5gl4oga glo2i g4lu. g4lwc g4l4wm g4l3wn g4lwyf gl3yc g3lyd. gl4ym gl4ys gl4yw g2n gn2i gn3io g4niw g3nï1 gno3e gn2of gn2u gn1w gn4yw gobl4 g1oc goddefad4 go5ddr g2od2y god5yn g2oe go5fau go3fer goff4au gof4un gog2 go3gan gog3e gog4l4 go5gyn g3ol. goleu5 3g4oll3 go4lw gol5yga gol5yge gol5ygwy g3om. go3me gon5ad g4one g3ont gon4yn g2or gor5chy gorddad4 gord5i g4orf gorn4an g4orol gor3t 1gos gosb3a g3ota g3ote g3oti g3oto g3otw g2r gra4m gran3a gr4el gr3f gr2i g4rid4 gr3ie gring4 g4r3ir g4rit. gr2o gr3od gr4oe gr5oed grof4 grog3 gron4a gro5nas gron4ed gron4es gr4ono grwn5a gr3wo gr4wt gr2wy g5rwydd g4ryc gryg3 grygl4 gr4ym gr4yn g1s2 gsym4 gub3 gudr4 gu5edd gu4to gw2 gwa5r4as gwar4es gw4as g3wc gweithiad4 gwelad4 gwel5e gwen3a gwerthad4 gwm3 gwn4a gw4n4e gwobr3 g3wr. g4wrd g5wth. gwy3by g3wyd. gwydr5 g3wyf gwy4r gwyw3 3gyb gyb3y g1yc gych3 g4ycho gydd4f5 g2ydi gydl4 gyd3r4 g4ydu g4ydy 3gyf gyf5an gy4fe gy4fl gy4fr g3yl3a 3gylc g3yle g4ylio g3ylo g3ylw g2ym gym4an gym3u gyng3 g2yno g2yr g4yro g2ys gy4se gysg3 gys5on gys3t gys5to 3gyw gy3wa gy3wed gy3wi gy3wy hab3yd hadd5as hadd3o had4eg had4eny h4adf had4fer hadl4a had3n had3r4 h5aeol ha4f3a h4afl haf5ol h4afs hag3w h1ai h4aif hal3e hall3o hal3o ham4enn hamn4 ham3s han3ad h4anau hanc4 han3d ha4ne han5edd han4er h4ange hanghen4r han3ig han3l han2o han4oda han5olai han5olas han5ole han5olwy hanrhyd4 hansodd4ei har5adwy. hara3t harato4en har4bwr har4cha har4fo h1as h3asf hast4a ha4tr hatr3e h1au hawe5nas hawe5ne hawl3 h2â h1b2 hbl4a h1d2 hddad3 h3di hd4ir hdo3na hdo3ne hd4ra hdr3e hdr5oc hdr5od hdro5ed hdr5wyd h4dwr h2eb h3eb. heb3ra hedd3 hedd4fo h2ede hed5fo hed5fw h4edid h4edir h4edit h2edo hedr5wy h4edu h4edy h2ef h2eg hegn3 h4egy he4ho h2ei2 h4e3ia h4eil heimlad4w h4el. 4helad 4helaf 4helai 4helan 4helas h3eld 2hele 4heli 2hel3o hels4 2helw 4hely hel3yd h1em hen5cy hen4id hens4 hen3wy henwy5na henwy5ne heny5na he3ol her3b h2ero h3esi h2et h3ete h3eto 5heuae heu4aetha heu3d heu2l he4wi hewy5nas h1f2 hfil4 hfonhedd5 hf4os hf4wy h1g2 hgan3 hgap2 hgi5ai h1i2 hiach4 hiac5has hiac5he hiac5hw hi4a4n hib3 hidl3 h2ig1 hig3y hin4t hir3 hi4wa h2ï1 hï4en hl2 hl4ad hl5adw hl4am hla3na h5las. hl3asi hl3aso hl4aw hl5ech. hl5edig hledr5 h3lef 4hleit hl4en hl4et hl3id hlon3a hlon5e h4lus h4lwm h5lyd. hl3ydd hlym4u h4lyn hl3yn. hlywad4 h1m2 h3myg hmygad4 h3myn. hmy3na hmy5ne h5myni hn2 h3nad h2neg h4new hn4ie h1nï1 hnod3 h2nol hn5ole hn4yw ho4ad. ho4bl hod4l ho4dy ho4en hoffad4wy h3og. h3o4ga hog5lu ho2h h2ol h3ol. hol5ud h1om h2or h3or. hor4c horn4an h4os. hosb3 hos3o h2ow hp2 h2r hra3dd hr3adw hr3af. hra3g hr4aid. hr3ant h5raul hr5edig hr3em. hr3f hr2i hr3ia hr3id. hrid4a hr3ie hring4 hr3ir hrisiad4 hr3it hr3iwy hr2o hro4ad4 hr5och. hr3odd hrog3 hr3om. hron4a hro5nas hron4e hrong5 hr3ont hr4ud hr3wn. hr5wyd. h5rwydd. hr3wyf h4ryc hryg3 hrygl4 hry3l hr4ym hrynho4e hrynho4i hrynho4wn h4rys h1s2 hsef4 h2t h2u hub5on hudd3 hudd5y hudr4 hud3w hud5yl h4uge hug4l hun3ad h4unn h3ur. h3us. h4use h4ust h4usw hw2a hw4as hwbl5e hwd3 hw2e hwedl5 h3wei h4wel. hwen3 hwen4y hwe5nychaso hwe5nyched hwerw5 hwe5ug hw2i hwiw5g hwm3 hwn4e h3wr. h2wy h4wy. h4wya hwybr4 hw4yc hwyll5t hw4ym h4wyo h5wyol hwy4r hyb4l hyb4wyl hyb3y hydd4id hyd4fo h2ydi hydl4 hyd4naw hy4f3a hyfad4 hyf4ae hyfar5f hyfer3 hyffel5 hyffred4in. hyf4iai hy4ga hygl4o hygl4w hygr4 hyll3a hym4adw hym4ant hym5el hym4en. hymerad4 hym3o hymp4 hym3u hym5yr hym5ys hyn3a hyn3e hynef3 hyn3yc hyr3a hyrdd5 hyrf3 hyr5n4o hyr2w hys4b hy4se hysg5od hys4ig hys4n hys5oni hyt4bw hyth5ru hyt2u hytun4deba hy3was hy3we hy5wed hy3wi hy3wyd 1ia iab4a iach2 iad3 i4ada 5iadaeth. iad4lo 5iaduro i3ael 3iaet 4iafo iag3w 2ial1 ial4ae 2i3am2 iam3h ia3na 4iand ian5da ia3n2e 4i3ang iang4e ianghen5 ian3o ian3w 2iar i3ard i3arf iar4l iarll5 iar4s i3asg iat5er i2au iawnad4 2ib ib3ed ib3el iben5y ib3es ibetr4 ib3i ib4il ibl3e ibl3o ibl3w ib5og. ib3on ibr3a ibr3w iby4nad4 2ic ic3en ichl4 ic5ied icon2 ic5oni ic5rwy ics4i ic5siw ic3t2 2ida idal4 2idd i4ddai idd4au i2dde idd3f4 idd3i i4ddir i4ddod idd3r 2ide idel4 ider4 2idf idf4w 2id3i idi4a id4lo idl3w 2idm 2i2d2o id3og i3dola i3dole i3dolo i5dolwy ido3na ido5ne i3dor 2idr idr4a idr4o id3rwy 2idu 2idw idw3a idwad4w id4wr 2idy id3yl id2ym 1ie 4iedd 4iedi. iedif5 ied4yl 2i3ef i3eg iegwydd4 2iei i3eid ieis4 4ien. ien4a ien4c 4iend i3ene 2ienn ienw4 i3eny i3esg 2if if4add if4ae if4al ifan3a ifan5e if4ao if4ar if5ard ifar3e if4at if5ath if4aw if5bin i4fec i4fed. i4fedi i5feio i5feiw i4fem ife4n i4fent i4fer. i3fery i4fesi i3fet iffl3 iff5or i3ffu2 iffy5na iffy5ne if3i i3flas if4on. i3fre i3fry i1fu i4fwc i4fwn i4fwyd i4fwyf i1fy i4fyc i4f4yl ify5re ig3ad ig3af ig4ain 2igan 4i3gar ig1e ig3ed ig3es ig5hal4 2ig3i ig5lan. ig5lann ig5law ig5let ig4l3o ig4ly ig5lyd igl3yn ign1 2igo ig3odd ig4ode ig3oe ig3om 2igr i3gre igref4 i3gro ig3rw igryb4 2igw ig5wai i4gwc i4g3wn ig4wrn 2ig1y igyff4 ig5yn. ig4yna ig4yr igys4 ig5yso igysyllt4 igyt4 igy4w 2i1h2 i2han ihat4 ihe4w 2i1i i3iw 2i2l il3a 5ilau. ilc2 ild5ir il3ed il5en. ilew4 il1f ilf4y il3i il4ip ill3 ill5iw illt4 il3oc il3od il5ofy il3on il2s3 il4sy il4ti iludd4 il3un il1w il5wai ilwen3 il4ws il3yd il3yg4 il3yn. ily3na ily5ne i4lysia il5ywa 2im2 im4bi im3i iml3 im4le 2in in1a in3ac in3ad in3af in3ai in3an in2be inc4e in4ci inc2o in4cy in4dai in1e 3in4eb in3f ing5en in4g3o ing3w ing5yl in5gyn in3ia in3id in5iew in3ig iniw4 in4iwe in1o in4ode in4odi in4ody in3oed in3on in3os int4a in4te in2t3r in4ty in3w2 in5wyc in1y 1io 3io. 2iod. i3odde iod5le iod5wy 2ioe 2i1of iog3 4iolc iom3 i2on ion3a ior4c ior4f i4orw 2ios 2iot 2ip ip5ell ip4og ir1 ir2a ir5agl ir3an ir4áf ir3b irch3 irdy5na irdy5ne 2ir3f 2iri i4ria ir3io i3ris ir4li ir4ll ir3na irnad4wy. ir3no irn4y 2ir3o ir3w irw2i ir4wo ir2y ir3yn i3ryw 2is isaf4 is3b is5eld is2er is5er. is4gam is4ge isg3o is3gr isg5wyd is3gy is4la is5myn is2o is5odd is3ol is3on ist2 is4ti is5tol is2w is3wn is5wyd. is4yc is4yr 1it. 3it2a 2ith1 ith3a ith4au ith3eg ith3i ith5or ith3w ith3y 2iw. iw3adw iw3af i4wair i3wal iw3an iw3as 3iwc iw4ch 2iw1d2 iwd4i iw5edd. iw3edi iw3eid iwg4w 2iw1i iw1l2 iwl4e iwl4i iwl4o iwl4w iwm4e iwmp4 3iwn iwn4i 4iwnl iw3o i3wre i3wrt iw5ter 1iwy iw4yd iw4yf iwyn3 4iwyr 1iy 2iyd 2i1ym iyn3 2i1ys ï3ae ï2i l1ac lach3 2lad. l4ad4d3 lad2m lad2o lad3r4w 4laen l3af. 5lafar l1ai l4ain l4air l4ait lam3 l4an. lan5ced lan5de landr3 l2ane lan4es l4ann lan3o 4lant3 lar3a lar4ia lar3n l1as l4as. lasg4 last2 las5ta 4lat. lathr3 lats5i 2l3au law5dde lawen3 law3l law3no lawr2 law5ro law3y 2l1b lb4an l2c lch1 lch5io lch5iw lch3r lch5wyd. l3co lc3yn. 2l1d2 ldd2 ld3i ld4ir ldro3 ldy5na ldy5ne 1le. le3a le4ad. le4ada lebr3 lech3 l3ed. leddf5 l4eddog led5fy led3l4 l4edr lef1 lef3e lef3y l2ega leg5ar. l2egw leg5yr le5iau le3id. lei3l4 le3ir. le3it. le4iw l3el 2l1em l3em. l2ema len2d len5di len5ig l3ent len3y 1leo le3oc le4on. l3er. l4erau ler5ig lesg5e l4esn let4em le4tr l4euad l4euh 4leuon l5euon. le3wch le3wn lew3yn lf2 lf5air l3fan lfe3ne lf4fa lff4y l1fi lf5icy l1fo lf5oda l1fr lf4wy lf3yd lfy5re l1g2 lg4an lgo4f3 2l1h2 l3ha l3he l3hi l3ho l3hw l1i2 liach3 4lian libr3 2lid li3de 1lif1 li4fr 4lio. li5oed li5pal 2lir l3ir. lis4g3 l3it. lith4r3 l4iw. l2l 2ll. ll4ada lladr3 ll5adwy. ll3ant ll5arn lledr4e ll4edy lle3o llest4 lleu4a ll1f llf4y llin3e ll3odd llosgad4 ll5tyr lludd3 llw2 ll3wa llw4e 5llyd. llygr3 ll4ynn ll4yr2 ll5yro lm2 l1ma l4mad l4maf lm3ai l2m3as l4mau lm3o lm3w lm4yn l1n 2lo. lob5yn 2loc loch3 2lod lodd3 lo3ed. l1og3 logl2 l1ol lol2w lolyg4 2l1om l3om. lon2a lon3d lon4es 4l3ont l3or. l4orau l4org l4ory 2lot lo5ynn lp3a l3pu l1r2 l3rh ls4ig l4syn l2t lt3ad lt5eg. lt3em l5tera l5tero l4tia lt4ig l4tio lt1o l3tra ltr4e l3tu l4tu. lt1w 2lu. l2ud ludd5y lud3w lu4edd l2un3 l4un. lur5ig lust5l lw1a lwadd4 lw4ae l1wc l3wch lw3ed lw3er lw3es lw4fa lwfr5e l4wgr lw1i lw4ig l1wn l3wn. lw3o l1wr1 4lwre l4wyc l4wydi lwyd4io l4wyn3 l4wyr 3l4wyt lyb3 2lyc l3ych. lyd2 l4yd. 2lydd lydn3 lydr3 lyf3a lyf5an5 lyf4n3 lyf4r3 5lyfr. l2yg 4lygedd 4lygia lym3 l4ynau lyng3a l4yn3y lyr3a 4lysau 4lysen. lys3ga lys3ge l4ysl 4lysn 4lysr 4lysyn lyw1 m1 mab3 mab4i m3ac mac4w m4adad m4adaf m4adai m4adan m4adas m2adi mad4r3 m4adwc m4adwn m4ady mael3 maf4l3 m3ag 2mai m3am man3a man3e m4anf man2o m3ar m4ar. mar4ch3 m4are m4ari mar4l marn3 m4aru mar4wy masg2 mas3ge m3at mat5eg mat4o m3aw mawd4a mbarato5 m3bi m3by mcan3 md2 m4dai mdan4a mda5nas mda5n4e mdd2 mddadl4 mddef3 mddi4d m5der. m4dera mdog4aetho mdo3na mdo5ne mdro3e mdwy4 mdyng5 mdy5re 4meda 4meddia 4meddwr 4medi 4medï medr3 meg3n4 megni3 meith3 me4iw mel5yno mens4 ment4e mentr5 5menty men5yd m2er m3er. m3erad m4eradwy. m4eraf m4erai m4eran m4eras merch4er merdd4 m4ere m5eriada m4eroc m4erom m4eron m4erw m4ery 4mesia 4mesol mest4 4meswr 4mesy meu1 mfalchi5a mfalchi5e mffl4 mfydd4 mg2 mgyff4 mgyffr5o mgym4 mgym5eria mgys2 mh2 mhar5ad mheir4a mhe3na mhe5ned mhe5nes mhen3t4 mhen5w mhet2 mhe3ta m2heu mhob4l mhr4a mhryf5 mhyd4 mhy3f 2mi m3ias m3id3 m3ie mi5gei min1 min4t m3io m3ir mis2 misg4 mis4i m3it m3iw m3iy ml2 m2las ml5blw m3led mlew3 m3lin m5liwiais m5liwiase m5liwiwy mlo3na mlon4ed mlyn3 m2n m3na mn4as m3ne m4ned mn5edi m5niau m3nï3 m2od m3odd mod4ig mod3r mof5yd m3og m4on. mon3a mon4d m4onï mor2 mor3c mordd4 morddiw5 mor4o m3os2 mos4i mo5siy m2p mpr3a mpr3o mpr3w mp5wai mr2 m2r3ai mra3na m2r4ed mreg3y m4ria m4rie m4rig mro4ad mrod4iae mrod4ir m2roe m2roi m2roo m2row m4roy m4ryn mryn4d mrys4o ms2 m3sa m2se mse3na mse5ne m2so mstr4 m2t mt2a mtas4 m3th m2u mu4an mudd4l mud3w mu2l3 mun3 m3us m3w2a mw3as m3wch m3wi mwr2 mwr3i m3wt mwy3b mwyllt4 mwyn3 m5wyse mwyth4adw mwyth4af mwyth4asan mwyth4aso mwyth4asw mwyth4ec mwyth4em mwyth4er mwyth4i mwyth4oc mwyth4w mwyth4y 2m2y m3yc mych3 m3yd mydd5i mydr3 myd3y myf4y m4yl. myl3a m4yln m3ym myn4ai. m3yr myr4as myr5asa myr4edi myrf4 m3ys m4ysg. mys4w myw3y 3na3b2 na4bl na4bo na4ch3 n2ad n3adl nad4n nadna4 n4ado nad3r nad3u nad3w n3adwr n1ae nae5ara nae5arw nael4 n2afa n5afau n2af3o n4afy n4aic n4aig n4ain n4air n3al nan3a nan3e nan3f nap4om n3ar narllenad4 n3asg n4asol n3as4t 1nat nau3 n1b2 nbyd5r n2c nc3an nc5des nc4ed nc2ei nc5en. n3ch nchwiliad4 n4cia n4cid n4cie n4cio n5ciod. n4cir n4cit n4ciw n4ciy n3cl ncr1 nct1 n5cyd. n5cyny n1d2 nd3as nd3aw ndd2 nd4da nden2 n4d3ia nd3ie n3di3f n3di4g n3dil nd3io nd4ir n3dis n3dit nd3iw nd3iy n3dod nd3oe ndo3ra ndo5r4e n2dwr ndy5na ndy5ne n4dys neallad4 n2eb1 neb3o n5ebry neddf5 n2ede n4edid n5ediga n4edir n4edit n2edo n4edu n3edy n1ef nefn3 n4efy n1eg neg5in ne3h n3eidd n2eis n1el 3nel. nel5yn 3nenty ner3a nerch5 n4erg n4erl 3nert 3nese 4nesia n4esio nes4m 3neso n2est 3nesw n2esy neth5o n2eu neu3d n4euf neul4 3new new5yll. newyn3 n1f2 nfadd4 nf4am nfan3 nfan5e nfan4t nfa5ol nf4at nfel2 nff2 nf4fa nff4o nffyn4 nffynad4 nf4id n4fil nfod4l n2fon nfon5a n5fonedi nf4ri nf4wy n2fy n5fyd. nfyd3a ng2ad ng5adwy. n4gai ngal4 n3gam n3gar n4gau ng4ddy ngel4 nghwyn5 n2gi n2gl2 n3glwm n4gly n5glym nglyn3 ngn2 ng3oe ngof3a ngol4ed ng3on ngop2 n1gr ngr4a n2gw ng4wi ngwy5nas ngy3f n4gyn 2n1h2 nha3o nhar4 nhaws4 nheb5r nhe3na nhe3ne nhep2 nh4es nho3ed nho5esi nho3n4a nhon4e nhudd4ed. nhu4e nhyc4 nhyd2 nhyl4 nhym4 n1i 4ni4ad n5iald ni1b nib4a nib4e nibryd4 ni1d nidd4 ni5dde nid4e n3ie ni4et ni3eu n4iew ni3fed ni3fen ni4feryc ni3ffr ni3fw n2ig n5igam nige5na 4nigiad n5igiad. n5igiada 5nigiadw 4nigion n5igion. 5nigiont 4n5igiwr nigl4 4nigy ni3gym4 nilead4 nill5adas n5illio ni3lu ni3lys nin2 ni3no nin4w ni3or ni3ra nir4e ni3ri ni4rw ni3rym nis3g ni3so nis3ty ni3sw ni3sy nith4e niw2 niw4a ni4wc niw5eddas niw5edde niw5eddo niw5eddw niwl3 niwl5e niwl5o niwl5w ni5ydd n2ïi nï4yc n1l2 nladr3 nlin3 nll2 nllon4 nl4lw n4llyn n2ly nly3na nly3ne n1m2 nmolad4 n1n2 nn4al nn4ar nned4 nneth4 n3nh nni2 nnif4 nni4l nnill4 nni4o nnis4 nni4w n5nos4b nn4wy nny3na nny5ne nn4yw no4ada n3ob n2od. n2odo nod3r n2oe noe4o no3er 3no4et n1of1 nof4el n2ofy n1og nol5eg nom3 n4omi n5ones n1or norch4 nor4f 2nos3 nö5es. np4et n1r2 nre4o n1s2 n2se n3sei ns3en ns3i ns4ic ns4ig n3s4il ns4iy ns5iyc n3siyn nsy3na nsy3ne nt3ad nt5af. nt5aid nt4ana nt3aw n2te n3tei nt3el nt3em nt3er. ntew3 nth2 n4tia nt5il. nt4in n3tis nt3oc nt3od nt5od. nt3oe n4t3or n1tr nt1w nt3yn nty3ra nty3r4e n1u nud2o nun4i nut1 nw3af n3wait nw3an n3war nwar4ed. nw3as nwbl4 nwb5le nwd3e n5wedd nw3edi n3wei nweithi5au nwelad4 nwen5d nw4ia nw3id nwir4 nw3ir. n3wis nw3o nwr5ei n4wy. nwybod4a n4wyc n3wyl n2wyn n4wyn. n3wyt nych3 nyf2 ny5fala ny5fale ny5falo nyff4 nyf4n nyf4o ny5fod nyfr3 n2yg ny3gy n1yl ny3lan ny3lu nym4a nym4y n5ynnau ny3n4od ny3ra nyrchafad4 ny3ri n1ys n4ys. nys4g n3yw 2o1a 2o2b ob3ae ob4an ob5ant ob3ed ob3el ob5en. oben5y ob5er. obl3a obl5ed ob3ler obl5es obl3o obl3w o3b4ly ob3o obr1 ob3yd oc1a oc5byn oc3e och3a och5an och5en ochl3a ochl5es ochl3o ochl3w och3n och4ni och3o ochr3 och3w och3y 2oci 2ocr 2oct 2od3a od4ao odar4 odd3a oddf5y odd5il oddiw3 odd3r odd5ri 4oddu odd3y odd5yd odd5yn odeb3 o5debau o5debu od5edi od5eid od3el od3er od3i odl3a odl3ec odl5esi odl3w od5off 2odog od4oga 2odr odr3a odr5ec odr5em odr3o odr5wyd. od4ry odr5yc 2odw od3wa od5wed od5wen od3yc od3yn od4ynn o1ec o4edd3 oed3i o3edig oedl4a oed5lan oed5ra oeg3 oel3c o1em oen3 o3ent oer3 oes3 oesg4o oet5an oetr3 2of. of3ad of3ai ofan3 ofan5e of3ant ofa5ol of5ebi of3ed of3el of3en of4enn of3er. o4ferl o4fery of4f3a off3ed off5id off3w ofiad4w ofl3 of3n of4na of4nd of4ne of4nf of1o of4odo ofr3a of3re of4rec of4red of4rem of4rer of5wyf of4yn ofy3na ofy3ne og1 og3ai og2an3 o4ganau o4ganu og3as og4edy og5elyn og3er og5erd og3es 2ogf og3i 2ogl ogl3w ogl3y 2ogn3 3og2o4f og5oru og3rwy o3gry og3yd ogyf4 og4yl og5yrn o1h2 oheb3 oher4 o1id oig1 o1ir o1it ol1 2olau ol4ce ol3d ol4da 4oleu ol3eua ol4eued ol5euo ol4euwr olew3 ol3i oll1 oll3e oll5ed ol4lt oll5wy olo2 o3los ol3s ol4sb 2olu 2olwr olw4y ol3wyd ol5wyno ol4yne ol4yni ol4yno ol4ynw 2oma om4at 2omb om2e om5eda om5edi om5eg. om3ei om3en om5isi 2oml om4og4 2omp om5pre on1 on5ach. on5adu on3af o4n3ai 4onair on3an o4n3au on5au. 2onb on5cyf 2ond on5did on2do 2one on5edd. on3el onest3 2onf ongl3 ong2o ong3w on4gyr 2oni 2onn 4onnu on5of. 2onog on2t 4onto on3w 2o1o 2op op3a op4ao op5aon opl3 opr5ai op5ren or1a 4orac or3ach or5aeth. or5aetha or3af or3ai or3an o4r3au or3aw4 or3b or2c or3chw or4dd or5ddyn ord3en or5din or4d5yn or1e or2eb or4edd ore5ddy 4oreg or4egw or4et or3fa orfa5na orfa5ne orff4e or3fo or3f4y 2or3g or3i or3l or4mu or4my orn3a or3nel or1o or3of or4oh oron5a or3one or5oni. or5onid or5onir or5onit or5pws 4orth. ort4i or4ty or5uwc or1w or5wah orw4e or4wel or5wgl or1y or3ydd 2os os3a os4ana osb3as osb5ed osb3o osb3w osb3y os5eai osg3a os3gl osgo5e os3gor osg3wy os5iae os5ibi os2o os3odd os3ol os3on os3te os3tr4 os4tu os3w os3y 2ot1 3ot. ot3e ot5esa oth3 ots4i ot5sia o2u o1wc owg3 owl5as owl3e o1wn owt5er o1wy o1yc oyw3 oy4we ôr3f p1 p2a pab5yd 2p3ad 2p3af 2p3ai 2p3an pa3od para3t par4c par3w past4 p3au pawe5na 2pec 4p5edig p2ei peir4a p5eli. pel3y 2pem pengl4 pens4 pen3t2 pen3w penwy5na 2per 2pes pet2 pe3ta p2h2 pheir4a phen3t4 phen5w phet2 phe3ta phob4l phr4a phryf5 p3ia pib1 p3ie p3io p3iw p2l pla3na p4lau pl5eda p4lyc 3plyg po4b4l pog4y pol3 p2r2 pr3as pryf3 pr5ynn p2s ps4iw pt2 p2ud p4usr pw2 pwd3 pwr1 p4wy. pydr3 p2yr r4abe r4abi rab5lyd rab3y rach5wy r4a4ci racs4 r4a4ct r2ada r4add radd5ol rad4ri radwr4i r2ae raed4 raeddad4 r4aen ra5fann ra5fán r4aff rag1 ra4ge rag3o ra3gra ra4ha ra5hau r1ai 4raidd ram3od ra5mor ra3m2w ran4d3 ran2e r4anf ran3o r4anod. ra5phe r3ar3 rar4c 2r1as ras4ie ras3t2 r3atao rat3e 2r1au raw3e 5rawes 3rawi rawn3 2r1b r2ba r3bar r4bec r4bem r4bent rb4er r4bes r2bl r4boc r4bom r4bont r4bwc r4bwd r4bwn rbyd3 rc2a rc5adw rc5af. r3car rc3e rc4er r2ch rch3ad rch3an rch3ar5 rch5eb r5chei rch3et rch3l r3chm rch3oc rch3oe rch3og r3chu r3chwa r3chwi rch5wyd r5chwyn rch3yc rchyf4 rchym4 r1cy 2rd2 r1da r3dai rdan3 rd5au. r2dd rdd3ad rdd5as rdd5ell rdd5in rdd5iwy rdd3o rdd4od. r5ddodi r3dd4u r4ddu. rddw4 rdd3yc r5ddychw rddyrch5 r5ddyw r1de rd3i rd4in rd4ir r1do r5dod. r1dr rdro3 rdro4ada r3dw r1dy rdy4n rd3yn. re3a r3ebai r3ebas r3ebe r3ebi rebl3 r3ebo rech3 rec3i 4redd r5edd. r4edio r4edol r4edwr red4yn. re4fa refn5y ref3y r4egl r5egl. r4egog re5iau r4eic re5id. reidd5 r4eig r4eil r4eine re5ir. re5it. re4iw r3ell r4emi ren4d r4eng3 r4eni ren3in r4ennyd re1o r1er r4er4id rer5in restr3 r4esw r4eua r4euo r2euy re4wi rew5id re5wn. rew5ynn 2r2f r1fa r4f3ad r4faeth. r4faf r4fai rf4ao r4fas rf4at r4fau r3fedw rfel3 rf3en rf4eny rf4ey r4ff. rff3i rff3l rff3o r3ffw rff3y rf3id r5fil. r3fl rf3lu rfodad4 rf5ol. rf3on rfor2 rf5ord r3fr r3fu rf1w rf5wis rfyn5yc rf4yr r3fys 2r1g2 rgal4 rgan3 r3ge rgel4y rge3na rge5ne rgo4f r1h2 rhag5e rhag3l rhag3o rha3n4a rhan4e r4haw rh4es rhew5y rhif3 rho4ec rhon5a rhost4ir rhugl5 rhyf2 rhy3n4a rhyn4e ri2 2ria r4iaethu riaf3 r4iag ri5agl r3iai r4i5aidd ri5all ri4an r5iant r3ias r4iaw ri5awd rib3e ribl3 rib3w rib3y ri5can r4ida ridd3 ridd5y r4idi rid4yllau 2rie ri3ei rif1 rig3 r4igo ri3i rin5dir rin3e ringl5 r4ini r4inl 2rio r3ioc ri5odad ri5odaf ri5odai ri5odan ri5odasai ri5odasan ri5odase ri5odasi ri5odasoc ri5odasom ri5odasw r3iodd ri3ode ri3odi ri5odoc ri5odod ri5odom ri5odon ri5odwc ri5odwn ri5odwy ri5ody r4ioe r3iom ri3ong r3iont r1ir ris4g risgl3 rist3 3r4ith 2riw ri4wa riw3l4 r5iwr. 2r3iy r1l2 rla3na rla3ne r3lew rl3ia rl3ie rl3io r3ll r4ll. rll4e rllen3 rl4l4w rl5og. r3lon rludd4 r3lw r2lym rlyn3 rl5yn. r1m2 r2ma rm4ac rm3i rm4il r2mo rm4od r3my 2r2n1 r4nai. r4nau rn4es. rn4esa r5nest rng4e rn3i rn5iae rn4ii rn5iol r3n2ï1 rn4os rn3y rn4yw 2roa 4road 4roau rob3l4 roch3 rochl4 ro3cr rodd3 r4odr rod5rw ro4ea roed3 ro4eo ro3er r2of rof3l4 rofun4ed. rof3w r3og. r4ogae ro4ge rol3 r1om3 r4onau rongl4 rong5lwy ron3i r4os. r4osf rosg4 ros3o 2rot rö5edi rp2 r1pa rpar3 r1pe rp5ech rp5em. r2pen rp5ent rp5er. rp5esi rp3i rp3o rp3wy rp3y r1r2 r3ra rr4og r1s2 rs4ai r4sau r2s3en rs3i rs4in rs5li. r2s3t2 r2sy r1t2a r4tau r4ted r3teis r4ten r4tes rth3a rth4eg r3thin rth3la rth3o rth5ol rth5ru r5thryc r4thw rth5wyon. rth5ydd rt4iy r1tr rtr4a rt5rwy rt2u rt3y rub4a r3uc rudd3 3rudd. run4i r1us rw2a rw3ad rw3af r3w4ag r3wait rwb5an rwbl3 r1wc r5wdenn rwedd3 r4weddog r4weddol r4w3eid r3wel r3wer r2wg rw5hel rw1i rw3in r3wl r4wnc rw4ni rw4n3o rwobr4 rw3od rw5old r1wr rwr5es rwr4iaetho rw2y r4wyb r5wydden. rwydd4iad4u r4wyde r4wydo rwydr3 r4wydy 3rwym rwyn3 r4wys 3ry. 3rybl ry3bo rych5wai r2yd r4yd. ry5dano rydd4on. rydl4 ry3f4a ryf2e ry3fer ryf4od ryl3a ryl2e ryl5it rym2r3 ryn3a rync4 4rynd ryn3e ryn3f ryng5a 4rynn rynod4 ryno5ded ryno5der ryn3yc rys3b rys5ba rysg5w rysg3y ry3wa ryw3i s1 sach3 saf3a saf3o san3a san3e san3o sarf5a sat4a sath4 sathraw4 s3au sá4it s2b2 sbad4w s4bai s3bet sb3iw sb5iyc s3bl sbr5io sd4or se2 sec4an sedd3 3sef se5ion sen5ol senw3 s4erc serch5 s4eri s2et sf4am sfedd4 sff4y sf4wy sf4yr s2g s5g4adr sg3adw s3gam sg3an sgar5a s3gaw s3geda s3gede s4gedig s5gedo s5gedw sgel4 sg5en. s3ge3na sge5ne s4gia s4gl. sgl3a sgl3o s3gn sg3ni sg3od sg4od. sgo4g3 sg4ol sg3om sg3on sg5oty sg5rwy sg5ryw s4gwc sg3wn s4gyc sgy4f3 sgy3na sgy5nes 2si s5ial. s5ialu si4am 5siand s4iar s3id3 sid4a s3ie s4iet s2ig s3ig. si4ga s3ige sigl3 5sigl. s3igr s5igyn sil4f sins4 s3io s3ir s3it si4wr s2iyn 2s2ï1 2sl2 sl4au slo3na slo3n4e s3ly slyw4 sm2 sm4ar sm4er smwyt5haso s4nau sn2e sneg2 s2n3i sn4ob s3oc sodd3 sod4l3 s3oe sof4l 2s3og3 s3om3 son3 s4on. s4onau son4deba son4der s3one s4ong sra3na sra5ne s2t st3ac s4tade s4taf st4am st2an st3as s4tau st5awc s4tec s4ted s4tei s4t3em s4ten s4tes st3f s5tiro stl3o st5lyt st2o s3tod. sto3ra sto3r4e st4ra s3tra. str3ec str3es str3oc str3ol s4tr3w str3yc st2u st3ur st5us. s5twyi styr3 s2u sur3 s3us sw2a s3wc swcr3 s3we s4wed sw5edd swen3 2swi swmp3 s3wyf swyn3 swy4r s2y s3yc s5ych. s3yd syf4l3 2syg syll3a syllt3 sym4l3 symudad4 2s3yn. syn4fe s3yr syr2a syr2e s3ys 3syt s4ywa 1tac tach3 3tad4l3 tad3r t1af ta4fa taf4l tag3 t1ai t3aid t5aliae tal2m3 t1an 4tanc tan3e tang5n tan3o tan3w t3ao 3tar4d tar4f t1as tat1 t1au tawl3 t1b t3ch t1ec t1ed tedd3 4teg. 4tegio t3eidi teimlad4w tel4y t3em. t4emi t1en ten4d te4ne terf4 terfyn5 t1es t4esa tes4io tet4a 3tew 4tew. 4tewc tew5id t1f tff2 tff4e tfod4 tfydd4 t1g2 tg4af tg4an tg4en tg4er tgl4a tgn2 t2gor t5gor. t5goria t5gorn tg4wc tg4wy tgy3w t2h thalad4 thal4m3 thang5n th4ar thar4f th4at that5y th1e th4ef th5ell therfyn5 thet4 thl3a thl5ent th5let th3n th5nod th1o th5old thollt4 thon4e thorad4 thr3ac th3red thr5ent thrid4 thro3f th5rwf thryd4 thry5da th3ug th3um th3un th3us th1w th3wa th4wl th3wyd th3wyf thwysg4 th3ych thydd5 th5yma thyrf4au thyr3w thy4w 2t1i t3ia tid3 t3ie t3in ting3 t4ino tion4 t4iono tï5ol. tl3a tl4ae tl1e tl4en. tl3on tl3wy t3lyd t1n2 t3och t4od. t3odd to4ec to3edi to4em to3esi tof3 t3og3 3tois t2ol tollt4 tol3y t1om t3om. t1on ton4e t3ont 3tor tor2a tor4c t3os to4wc to4wn tra3c tra3dd tr3adw tr3af. tra3g tra3na tra5ne tr3ant 3traw tr3ed 3tr4ef tref5a tref3l 4treg tr3em. tr3ent 3trew tr3id4 tr5ig. tro4ada tr3odd tro5fa tr3ola tr3olo tr3olw tron4o tr3ont 2trw tr4wm tr3wn tr5wyd. t5r4wydd tr3wyf try3da tryd4y try3f try3l tr5yn. 3tryw ts2 ts5ach t1se ts3i 3tud tudr4 1tum t1un3 1tur t4urm tw2 t3wai t1wc t1wn t1wr1 twr4n 3twya t3wyd 3twye t3wyf tw4ym 3twyo twysg4 3twyw t1yc t1yd tydd5y ty5gar ty3li tymp4 4t3yn. tyng5ad 1tyr tyr2a tyr4es ty3wr 2u1a ual3 u2and u4ane u3ar u3aw ub1 2uc uch1 uch3e uch5ed ud3ad u5dale udd1 udd3a udd4eg udd3el udd3f udd3i ud5eir ud3er ud3i ud1l udr3 ud5rwydd ud2w ud3wn ud3wr ud3yn u1e uedd3 u4estai. u4estau u4estwr u4esty uf5au. uff4y uf3y ug3ad ug3af ug3en ug3i ugl3a ugl3e ug3lw ugn3 ug1o ug1u ug1w ug3y u1h2 u1i ul3ad ul3af u5lan. u5lann ul3ant u5lath ul3d u2l1e ul1f ul5igr ull1 u1lo ul3oc ul3od ulon3 ulon5e ul1u ul1w ul3yc u3lyd un1 un4edy un5ell un5es. un3i unig3 un5od. un2ol un5ol. 2u1o uog3 u3os3 up2 ur1 urb4w ur5ddu ur3e ur5fau ur4fl ur2gy 4urn. urof4 ur2s3 ur4ty ur4ud u5rwydd ur3y ur4yw 1us. us4edda us5end usg1 4usi. us3o 3usr us3ter us3tod us3tr ut3a ut1e uth4r3 uth3u uth4un ut3o utr3 2u1w u2wc uwch3 u1y 2wa wac5ew wadd3 wad2n3 w5adwy. waen4i waer2 wag1 w1ai w3ai. w3aid w2air w3ais w4ait wallt5 w4an. wan3a wan3e wan3o war5ddr war3e war4edd war5ia warth4 wart5hai wart5has wart5hi wart5hw war3w 3w4as. w3ase was4g w3asi w3aso w4as4t w3asw wat5er w1au 2wb wbl5es w2c 2wca wc4ed wch1 4wchu 2wci wc5wll wc4yn 2wd wdd3eg w5ddew wd2e wd3ed wde3n4a wde5n4e wd3i wd4ih wd3ly w3dod. wdr1 wd4ra wdry4 wd2u w1eb3 2w1ec 2w3ed. w4eda 4weddf 4weddi 4wedi w3edig we4gi wegr4 wein3 well5ti wel3o welw5l 2w1em wen3au wen4d 2w3ent wenwyn5 wen3y 2w3er. wer4i wer5id w4ers wer4yd 2wes 4w3esi w4esir w4esit 5west. west4ai w1et w4eth 2weu weu2g weun3 2wf2 w1fa w1fe wff3a w1fi wf4id w1fo wfor2 w1fw wf4wy w3fy wg1 2wg. w5gig. 2wgl wg3n 2w1h2 w3he w3hw 2wi wi4an wib5an wibl5a wib5ol widl3 wi4fr 3wig1 wigl5e wil3 win5gada win5gade win5gadi win5gado w4ione wir3 wisg3 w1it 3wiw. wiw4e 2wl 3wlad. wlan3 wl4co wl3in w4lip wll5yn wl5ws. wl4yc 2wm wm3a wman3 wm4br wm2i wm5iai wm5ian wm4wl wn1 wn5adwy. wn2ae 2wnd wn3de wn3di wndr3 wn4ei wn4êl 2wn3g wngl4 wn3in wn3l wn2o w4n3oc wn3odd wn3og wn3ol w4n3om w4n3on 2wnw 2w1o w2od w3od. w3odd w2ol w3ol. w3olae w2or 2wp wp3e wpl1 wp5wrd wr5aet wrb5yn wrc2 wr3ca wr4ce wr4ch3 wr4ci wr5cwd 2wrd wr5dei wr3ed wreig3 wr5esi wr3f wr5fau wr4fi 4wri. wrid3 wr3id. wr3ie wr3l wr4ll wr3n2a wrn4es wr3no wr1o wr2t wrth3 wr1w wr4ws w5rwydd wry4w ws5bre ws3e ws3g ws4gl ws4ig ws4og ws4ta wst5an ws5ter. wstr3 ws4us ws3wa 2wt wt3a wtan3 wt3em wt5ery wth1 4wth. wth3w wt3od wt3wy wt3y 2w1w 2wya wy5alc 4wybr wybr5y wy3bu w1yc wych3 wyd3a 2wydd wydd4ly wydd4yd wydr3o 2wydy 2wye wy3fr wy3h 2wyi 2wyl wyl4deb wyll3a wyn5ad. 4wynd wyn3eg wyn3f wyn3g4 wy4ni wyn3o wyn3y 2wyo wyr3ad wy3ran 5wyrdd. wyrl3i 2wys 2wyt 2wyw wy3wr wy3wy 2wyy 2y1a y3ar3 y4ar. y4arn 2yb yb4ac yb5edd yber4 ybl1 yb3ly ybr1 ybr3i yb3w ych1 ychan5 ych4anwr ych5ei ych3r 4ychwe ych3wy ychwy5na ychwy5ne ycl3 2yd. 2yda yd3ad yd4al yd2an3 y3dana y3dane yd3ant y5danw y3dar yd3as yd3au ydd3 ydd5an yd4de yd4df4 yd4di4 ydd4in. ydd4of ydd5yn. yddy5ne ydd4ysg 2yde y3deb yd3ed yd4eddau yd3ei yd3er yd4eu yd5ffu ydfwr3 ydfyn3 yd3i yd1l yd4ma yd2ne ydne5b 2yd3o yd4od. ydol3 yd4os 4ydrau ydr3ec ydr3em ydr5esid yd3rew yd4ri 4ydria ydr3oc 4ydrol ydr5wyd. yd5rwydd 4ydry ydr3yc 2ydw yd3wa yd5wed ydweithi5ol ydwel5e yd3wr yd1y ydy4l y1e y3el y4era y4ern 2yf1 y4f3ag yf3ai yfan3t yf3are yf3arh yfar5wa yf3eda yf3ede yf3edi yf3edo yf3edw yf3ei yfel3 yf5erf yfer3n yf5esi yf5ewi yff1 yf4fa yf5fait yf5fei y4ff3i yff5in y4ffl yffr3a yffro5ed yffro5em yffro5en yffro5wc yffr3w yff3ry yf3i yfl4ed yflo3e yf3ne yf3no yf3ny yf3o yf5od. yfogl4 yf5rait yfra5ne yf5ryw yf3u yf5wng yf3yg yf5yn. yfy3na yfy5ne yfyng5 yf4yt yg1 yg3a yg5adu yg4ar ygeg4 yg4eid yg3i yg4il 3ygin ygl3a ygl3o ygl3w ygl3y ygn3 yg3o yg4oe yg4of ygr1 ygrad4 yg5wyd y4gyc 4ygyd y1h2 y2he yhe3i yhe3w y1i y3ie yl3ad. yl5adwy. yl3af ylan3 yl3ant y5law. 2ylc ylch3w yl4dera yl1e yl1f y3lin y4lit yll5ad. yll3e 4yllf yll5ida yll3o yll3w yll3y yl5nos yl3oc yl3od yl3on yl5ore y4lu. 4ylwe yl3wy yl1y ym5ait ym4al ym5an. yman5t ymar5w ymbl2 ym5edr ym4eri ym5es3u 3ymg ym3heu ym2le ym2li ymlo5ne ym4oli ym3on ymp3a ym4pi ymp5od ym3pr ymra5ne ymr5ig ymro5e ym4ru ym3se ym4um 5ymwyb ym3y ymyn5y ym5yra ym5yre ym5yri ym5yro ym5yrw yn4ada yn3ae yn3af yn3ai yn3an3 ynas3 2yn3au yn4aw yn5byn ync5ed yn3dir yn4eb yn3ec yn3ed. yn3edd yn2eg yn3ei yn3em yn3en yn3er y3nesa y4nesau 2ynf ynfyd3 2yng1 yn4ge yng5er yn3gl yniaw4 yni4d yn3i4f y3nig yn5igy yn3il3 yn3n yn1o yn5o5ad yn5odd yn4odi yn4ody yn3oe yn3os4 2ynr ynt1 ynt4a yn4te yn4ti yn4to yn4tu yn4ud yn1w yn3wa yn2w4e yn3wy yn4wyr yn1y yn2yc ynydd5 y1o ypl3a ypl3e ypl3o ypl3w yp3ly yr3ae yr3af yra3na yra3ne yr3ant y4r3au yr4ch yrch3e yrch3o yrch3w yrch3y yr4dd3 yr5ddyd yr1e yr2ei yr5el. yren5d yrf3e yr3ff4 yr4fu yrf5yd y4ria yr3id yr2l yr3ly yrn3 yr1o yr5ol. yr2s yr1w yr5way yr1y 2ysa ys3aw 2ysb ysb5ïw ys4bl ysb3yd 2yse ys5etl 2ysf 4ysgar. ys5garai ys5garasa ys5garia ys5garwy ysge4 ysgl4e ysg5lw ys4gn 3ysgr ys4gy 2ysi ys5ni. 2yso ys3od 4ysol ys5ol. ys3ta 4yste yst5eb ys5ted ys3ter ys4try yst4w ys3u 2ysw ys4we ys4wi 2ys3y ys4yg yt3ad yt1e yth3a yth3e ythi3e yth3l yth3o yth4re ythr5ec ythr5ed ythr5es yth5reu ythr3o yth5rwb ythr5yc yth5ur yth3w yth3yn yt5iro yt3o ytr2 yt3ras yt3s ytw5ad yt3wy yt5ysa 2yw yw4ae y3wait y1wc y3wedd y5weddia yw5eg. y4wel yw5en. yw3es yw1g2 y4wia yw3id y4wio y4wir. y1wn yw3ol y2wr1 ywr4a ywr5ain y4wyc y3wyf ywy3na ywy5ne y1y", + ["compression"]="zlib", + ["data"]="xÚMY®ëº²e»â\22,À*\26$[\21\1‰4(\9„šsßgæÇùyÈ\14ìŽå\28“^û\\ÀŽ`]3\24$ƒ¡ŸáÝŸal‡\0094öÃaÜ&P7-\21ÅÛxž2xë\28\5óÔ\14‚s…Ý\16As\11ܺ\11ÔoEhi{·‘ËÞ’Gì‡\17Ô-\4Ë:Éå×Ô‘vl(DÌë=’a<Ò¨¢8êiÔ\22»÷\5[î^Éè½’LîÇqÚ0LŽU\9Ъ\16\19#`Êó¯jM¥Úfæ*h{Ón³&…]\21h\8Ç\15ÆcéŒz†ü<7\0Æë¬ÙüÅ}&?;8\12ƒržSf”Ëv\3o\17\7ãè:Î.\14±ÂÂ0œƒ2\16JÌ‘€!\14—²SKî^\16©eÆìœïØ©{\127“Ö˜w‚wQ–a£Ó…ú\4ŠífÔ§@ÕD€©ÆZ¦Ü¿@W¯$—õÛ>\11ShÙD‡n0Õ[¶)P2ÙìvÓþBÅõZÒȤ^’iš\16]Zìò\29°\11ݱ¨ìkê¦ý¯±h¼.¢ \"QêÈå¦[–{î‡í‹óÛ\6Êy»h¿æð_æhóé2\12}!ØBÃÇ¥†½1W’\31Mëe™'\18\13N'ôË`¯eª¨TtÛ/WG'\27\ +3'&\26Dp¦è±xš'VMÏâ4²þÖYžLð\4UÂD†ié(^¢;4퉸õ7e0žÀ5½ü\4h}!ÍÏFÓ~Ler]ª³XŸ\13äžÊšW\4Ì\19$.¯ÖÍaÀI«•f^¾”µj5ÊH\12\18©ÔŽuá˜\12O•ê¸ßk[\17Tá\28*m:\7\7?á3NMû\30Dïœkr¸µcNhLÔà+U>Ó×\6a;½&œ¿.4Ì™“çüeö¥\12}6;T \5à_šW¦p®Á\17Ë´}±\7E©ƒ\\³7(½’^ŒÐr¿(K¹Û×\13‚ç+¦ý·WŽÛ=¤i;ÈCsÒ‹z©ÌPü¾\19³.#÷ѽ€vñ\20Œ†çcx\14¡\21Ã÷h†×cx©TËñ+ãTá&؇8\9mÏÇмž‰0Z–\7Ê;<_×cè^×ᮄÞáMºo3!Ã[‹”`7$%õn5~‡·¦:>ý\20cPÄ÷ª”Å¡ÚØ;\1¡ò\24ú÷:ÍØD\19€Ù>סB‹\27$Ê\19\ +k²\12ïÐïŠõÞÈÐQÚs0œ\30\29ܼR\22TÅÄÑσ-Yép¾*Á8ª\4ã7D;j\2v›.P?\21ÛfÃ\0\20;®N\29í}‰\16ãq¿+:Ô\28ãs\"\25í\25€$ÙMÛQ\28*š*\ +\21¥ŠÈH9(\1\23cÓXS±7¥9ŒcÆáu›÷ÿ5²\22ᙈE2mr“ë“fÃHSd\23 OöÈdœ;‡Ê\4Àë\"Ø\21\3\25õ`·Ú•IVãX´$ÚBܾ(½\22›K׋0\ +=o»srwŽ*Ó¤Ä&%\11\18YS\2\19ÄYÛ%¥21¼œ°ý£Ý)¿ ÚnÊxg-¡‚¬íFÊb:\8ÉL\26&\19fPÆö,äSn•\\\4Mó¤\19Ã3\4¬\29}+G\21NÌ¡h€·i4˜D¥\7c\18íxâÖjýl„& šq†@\2i@P02dðÉ\\\12o jOfønŽ·õ¬$J}kn|â/*L^É%KñÄÇ\17ó`(ª¢rå@þô\15\22\26`~:\17j;«Í›¡Qs-ÌÀÅ#YPI.^^…z†›È/ókÙ˜%²\\6ã\"þE 9\30#fÑT\15¤—q±±\21§ \7â4ŽRœWap\0119BðþÞñ¹µà\12OQÉa5-\16„\12\14«ùý¡a‡\5À\16\8\27}/\11½\17â\27#m¡Äµ…y)Ýju ÕDLl‹Åýº¹G7s• #áö&˜¨GÁí]H¨£ý7f&}¡Þ…¯\14†$P§¦ö\ +ÃÖÌNf¾\4ꂺbs_nðãÀ`ÈxÝÜ\27»`µ<ÉUg7ª64\11sª2)FL`yFg\2]Ý>vz2\5·BÔÒN6ãÙ—ã$¤º\30§›\18ºÛ÷Ç°¿\24}{«Œöq€0î\19;\0059i)\26vñÎ=˜³\4PÒÆ\28œa\13‡½g\19avx4š\13\23|b°…\17±7\20r×>¨àôQ5º!°÷Å\17E\14‚\0173ooM\13v\24ƒ¡\ +0ï#ó‹,:\26\11\20Z–ÉP9hŸƒ-‚±¡PZ0™árº\12™Ìñð†\\ˆ0\18\9¿í6\25âÿ´‘‚ÅÖD+Òã\21ýZ\19išDw‘Q,ó\22ì•kˆÂ\24\20a 㢑\26ëüÒva´e²™@ËÊ´·aÚ¾\6\17[zŒÁä\ +¶ï‹Æ_\28‡¯iþÅCþ5ýu\ +\3\28¿Íû\23{\20™}\20ìé Ø\6—7,¶rÃEDAWÜ\20‡\29ãòSQ70Ìbr\11¤­†Ù\13EŽ@ùñ\28\"‰÷G %1\20SE¥:2ûŲѾ§WríœV‚œl\12Ô\27\0‘ŠƒŒ˜ŠbA;;1X*?ªPtâÝ1.žI±?LµO÷–󧟶‚‹âš³”KbF|D\13£ÑmT¶M\29¥²æ–a–½Û\16šZ;Í?Xz¨y6Õ\16û\22í\6of4è(\5(\4x1‰ò›c\8!±˜Dëß›†Oö0Ì\30xYl\2!º‘࣫”\25†¹™^†x\19\20+‹ìœ Ø‘<{Îæ¹g¸f1û\27ÉÔ2Îü$j\23\\àfW#f\23šcC“‚ÜÅQd'ÇÕædèÂÇ›èvp÷ƒH,‰ð\19#¹©Òñ\16õL‡\3|jM?гÜ\30$\0+%Ÿsu+^‹!\21¸zQ*%S\6W®¼)@ÑþV°\31Ýp-ä¡\5\19¤°ÒÈìò\11\5#ˆšV5{1ÑT\16\1Šqˆ#dø\29/̯ÄZ|Œ\0\26ë\16Ɇ¸\0283`!ÈÂ’qP;\14{\18ý£}84ôXÈHáD‰Å´\7'Åh9\")ÑR‡\27I.5Ÿ\0128q>ªøJ‚žÃÙÛ6Ùk3$ƒþÌp©Ú”\20Ûè\26\5»\9qÞ^ߎ\11§Ë¡Kp\6ÅÜûÛµi”åi\"£É\0049>§W‹Ó¤Y©>\24€Tð4‡Ì>†Q+þ‡.c\11'z®Õ\0067r&-·J€9Þž\127þ\23s¦.gî¡\13²1δ¹):¿\9Ð\\Ãy{É:os\16\13LÇE£)í\0029/\0268'HL9hdø\11Á-\20sJÅkl1\23,x\0gÀBt†vÙ<ô‹ç`å+™ÕŒ}FéŸÿhŸñüó?—¶ \1Ø‹l¼`ì_ì\15\4a(_æöž/‘³Fà|¼žZV\4\2 *\20é¿<Å_æû0§GÿbZËÂ)\0PÜö\15†ØÚªVPüCf\13¤\6$êð\0268:P2çŠï%‡2â2\20ŸT¼ž*ás\18\31óRÕ^S`+ùš ˜rÞeŒÐÝ\23ü™’†?#˜i¹ÐaH>bMÎ\31,êMÁz”'W5ºà¡ºiÄwÊF\13ùj5´_\1nWSî\21^OœÜ4\26íí«\9ZÀÔ„b¯^A«#î\9eg¯(£’\21‹£†\16#…ûæݺ\16%\12_ƒv82\\\14TFb¨¦\28÷{g÷ÚØÒw/8——ZóY“[¡oÍûWêäªfHxx9zqØ\"\7µK£È+±‰}5Š^\15\11´ç¤yò´h4cQÀì¹ÿâÚBæ°áN5’E)'\12ÔHýGÌ[üâ«ž±¾ž‡zñ0g,ĸnØÉjO+‡kd«ýº:Ñ÷öu©y\ +£æ\9{±ºÈ¨l»\18ÕmŒÕîU<\26\5\127¨O¹ÕxåÞ6õžbPï:Ž\27mí_Ú$jÃ˶­W¹\27‘\6íø·M9xž\1©)HéßY«ÿK;‰B°ƒÀ¡À{p[?´á\30eÑÖÕp\19ÔF\11x®¡pÆõf\ +hé\ +r„g\5&ÁÝ',Žâíyñ†\9Â,æ…!öö)óÛóäý{Ôüþž4+E±\6²u7\5ð1(è\3«õ~:¤Æ“ÆÜ{ \\'\2C¤K@³YA^·V‰÷“´('{£åñV\7ã Œ\27’t\31½™\0022s&\12Ò,\21l1«—ä­•²}¯êrÑ\2Vø÷*NòU‘*¾Öú­ÿV­\30ÜF\12.å{\29Eòˆ>½†Š¦ŠBE©¢b$Z\1¢¦ ðà¼\"Z=èÁ•\2‹Ýnk¢™ µà²iRqµw}tFš‚Õ ê\\\13b«¿†ZŒ“Bõa!™z`/Ü“‰‚í\28šW\12\15÷^Ÿª%Åå·†›KÄ,“\13Ž¡Ú]BÍ3J˜\7ܺì‚\\nÀëeóAåŠ;[ˉk^\\®¸iXÇA7n\28ó¿aN\7\26£»ÝÂ÷±ØõèiÄûÔÞ÷]Ï=ߢXï@)!Kï–&\12fRß›hÓû[ÓM\19üýÔÄ{sæ«Ð[šì¬Þغ²\11²Õ|\127/-ÞQÃ*úî­ÚÉ#‰\12¾Esh\8hŽ€ÚÂgžï¿W\26ïïÆ\27\"$ Øµq\18‡úØ\14JšÙ}¾Õ^»ú03¶¿\12þ[#jÃ?à\31â‚Sš\13i€œ8Â\20òñµ­\4¼íwCõ•Â½\25î8}oT¾&5ÁÁ nN+¯Ì\"+ﳑKwÒÛg\23˜ì§ÚøL™9$†äùÖjýV\31¾Å\127¿Ù¯¿‹\22/ùª\15±ì\24}\22þf­\127wfÄß>~|ß/X±÷Säûí\3Ö·û÷æÚâíë„÷Ýq\"òæ¾\6ŸzU£\24ªÆÍÍ£\16Y|G„Ф!&\127,ž“Z\15 \0097Kò›Ë\28…nnqbJúÀɃ‡±C\20®¡ªL½ðQ¹oªv»n7\28&ð~ŒÏ¡yŒæ\0138ÿ\19Å\29{N\13óclD2¹:¾\12\11\30#18›ÊÃ\23Oã×\16¾¸\16Q\29¬%~ž\12“|4lG_çwã \5h\28ú ‚ÊW«¨ ¶RÀÑÐî›\11@Õ”\18[K[’áeèÜ\"…Ê\4õ&¥bÊ•ç†Rç89€« ­ü£áb·áXC@®Z™[üŠfÔ(‚;Ö\27sm–µ&´‚q‘›’V%9\18\29*úÚŠR!AB(1®ÙG¢bÔÐ\21Ð2Û\17V\12ßè£P \ +=ö£\24_\14O3È—ØB\23Ó„¸—³H´™1Î\11y\5\2wÚ_\2q\15\12.,\11>¾áÀ¶\25î†P>y©µÇçÖ5\7¦J,ýà‘H¬ž°É²\27j€sz›A™(0f£/£€Á0\25RÂN|ìبrŠÛ^‘,®ƒòˆ‚Ñïc!F„\26s|ˆÿ-™`Ì%Z¹Ü4Osã\127»rZû‰\127Ï$£I1Ú‘8ðv#Iܵb·è®’P\ +í8½\20|âð}„­\28'–7ÌŠ^Ï4FAM qr»=E\22ä±°¡\31§ç*°N—zBûPÆñD\19NÛ<ËI\11®8\16£©¢PQ\"Èv~ƒ\20lb\31F\24X¹ìÚ*É\28Ÿ$Ç`òº7N\13Ñ2ãškMím\9uà-vU»Âã¼Hƒ '÷\\\12‘ÎÉ^JjºD`\4Ù1Z\20ìɈog\24汞؂\20CdLüˆm§¡Š8OcŸ\9®]‡,°w£öÔy~XfG‰ú\22_c\15\127èæèÛPù¶5-wù¬2Î…\17ÒÍ7aL±@>\4\31ŸÚ\ +ŒË°\27r–<Š“PŠ\\²õ\26\\ÍV/»\21z•ëZ\11»NËæ6Qè\0Y\18z¾ä\31^>‘“Mù\5u\20þã\4å\8c²E\27õ1t4]ànBs\4Ò*Ÿy°\27ÄIpÎvÛBEŽ;3ÁåW°,ƒèAXj\22\11„ûDRZÌ\"UŒ/K\19aÒì0´ï\28‰]ÍœF\25S6Îf€Nxc ‚ìS½8u–¥)6\19G\11ÿ\15Áb°[t_„˜\\•\4Å\12ôg€š\4ŽÅåÛæZtÎÅ[Ù3UDº„\16Z@ÇÀå»,G²ÙsØû`*jŸóCÂ…þ\13Å)—ÎI\23'\\hŒ6ÀØŒÏ?ÿÈ,ö@\5«××£·H¸)«Ž\\˜‰íæAЉ\12)Íz¹=nµ)q<\29êl\16À†®ñAñèƒb9›@\0092¢ž´U½ÊVÐB Hˆš\17ز§Òj|;9q:ã“ѸsG8²Ö\2èΊH}âðyŒ\22¨Á¬\24qz©óšèÂG6ç²0ùÙÇ‘c\3™“Ÿ\20˜IùT¡UfÛ\6n\13H\5UÙÄ\26ÓŒôž™0ùª¥Ò¬-û\24\28ÑË¢Ø-¹Š\127ƒlˆ4‹07ƒ!\20\8îtLÙ.ZôŒ¸ýÑøWS¤†Å‰Ë§1\15JL‹äøE\ +™}\24`[1\18¿*öž›\2£H½S¾õìo¬[L£©:FÇ£\12Šp9é7€FÒBÁ\\Ì­çef\18ú\6{4Á³H\29Ë\8CB›>2í2ãWÜ5©'Ú¸\19&ÿÄæƒË·œÜ0ÜÄ’©M®-¡Mp.Y€ŒIí\127FCBkÈÓûÚÖŽÏ\11¦¤»p¾œFçëGHê峺‘>-4s©Í%Fj²e¶™•Uh³S4ÔP(â„\5 \12-Ë\31ç}cñVˆ•p\19û)­µ\1Jë\22($Wú]\27=\5W#\22έ\20…\5“«oÂ0À͆\ +vn‹¦x\1„§€\25ÕÊi†îI\3´Þt^Rï\17¢r+\7e\13Ý úãž;\18¬R/x%‡ïg\15Ü{n²}}ϯ$á7\4UëÛL\7»ôñF’\14—Íé\"\8…•\12zsï\8\24\8)m\ +ˈº}¶3Ö½<¨fßz\\ߦ`7wk*Iåß8wRe\14¼\15F¨ø_qÑ8¥G3\13‰\13?w©³áö˜†ùù˜¼ë\6˜z9‹š8ô\18P­å ¥y\26ò\19 …tâˆ9bç@\7à\"•ò˜8D\19Ô¢¥\20_ÚžOˆrNÜÿËÑ(\30„# p\11d”ÛBˆ§“\16?­$nŽâ'„\2\4\9Ê…¿\0-$¤\13ÄÄåþm·2UtÛ¯žr}\13\19ñ•¸6d\13°Xú”\9ªjÉW¥ÒFˆ6z«\0o6.\19§\"\19<÷cª—ûF_[6\"呶„q\26ŒúPm©¢R‘–\12áÅ‘ì\3\0296²›ª:|ñ4~\13é‹KÅQˆÖ´HÀ\4\19&h939Í\21Ú{Æc\22ï‰Ü\17î¡£€mX\12»h”\13ËC“N‹“j±=©¬rU4ºntçŒî\21Ñ#†×4R\22XÕ‰ÙJB¥wùZnc&dý&\24R\5¥¨ó÷vaò9ÖX-š-“/ǧÙáÉœ4Ñ0Ï-ý*®2Û†´L¶óM\28Q\3 ª27\27©P≛p%Ï2£\20qÐD\0015DuEÄ”\5C’é.\7TÉ‘–\28Æ\30\0034|š=\\¦F4°›–ZàÅ›§iq±\23\14b…z±³\4a™n¦eƉ}†X3\6ÂÂYÙä{rù&\18\20)\2e\\znJ¦%;¨Ë·x\18,nî…=ÙäûrÌŒ”¥¹p\17\29\19'ž\0òã\6|\ +š\30Sà6{\18/(¾m\ +틶\11l^§0\14â\21…F[4\8ÇÅŠlLÌ -\15\19,ŒvÒ*\"ôd`ÓÊâÞ ZÂlŒ¢\29(\\è\22‘Tm\4¸R7rG‡­\23ÇfDäµJaÕTS`y\22äØRH+\24#4Ä\25›š\24Ôs’%¼\25Þ®\29Å©d#øj‡{/D£zªH\23\6»6Êb£)6¯ÐÓf–zÚ:&>7ö“ù+\4E˜ýBy¨\8/q¿ÙÈA–\28zUn[q\ +ó¬Òl4ÆÆ\28áXâ\13:;&‘¬\16+_ÛO[³·ÄÙ™­\27Eß\26hÕV·Ó–H¥K\11Y7\12Èc°É·òŠw¿ÂP1)ÓñŠ\3A““–)œFl¬nXb\13\29§ê¸tÕ5\26Õ$\14òÐ*P‘šn¯í³³ái¦Ý$lO8hdÊø‘ñÃHß}#*\23G:\7ûª˜œ\0²Lˆ7Юvò\5<‚i³Í‹\0030â#‡ÆÀh\15’Œ\22˜rÔB¸—ÖE…PsÆ\14\18\28¹ªm¦¨êDm\15ˆÁ1‘‚0ªê=üä\11xÛ\28Y;I`4̆T3²»QN\19\25M¬èŠÈ>LûÌv%«…ÒqÍ'…NÁ\16\14W&8\5*!>S;iñÝ„®]\0309\26ÃR²}œôy\15 ëMÚÅ«E„·\1ºlÅ1½Þ\27NÜyO™¹Q¹WQ[åË}´`§\16š\4oèTn”TnG%œM˳é€6¾Š9ÓY4ü\"Œ'JæàœSéÍ°zÜöXpj\22Ò€®eŽe&_²L\25á\\%Èv\12t‚˜¾ÙRj\ +µ†ÅÐI0Ð+·‘{D[\15\27nB(žýà«}h\31Àæb¶˜+ŽsõÛ\8­½\22€àòÿ\24žä™K¢þœÙ\20̃å{ä$ïáìÈÑWÜJD]?ÓÅó“Š»ö¸Ë¹Ê„Š\19|\9ú°ræ°RäF|ë,\14T\\Ç,vÇPÙMÈ@˘‘”¾0!b2ûš|¶ÀìÌÁãcö:™HMíãS•™•\0xv†¹\7\21;ÁRÌp\8\2¼0Ë\24£nT\6Ñ\7£€c'‚j[¢xâiäV~Pîð+3\7—óäRiȯ?\21‘òõ¤\22\ +Ò¨‘‘45\20§3û4r6‡laQEšëý&˜\26\11Q·Ù«¡\0039ž'iËÓ£j;H`:)·¶\5úb\6g±Îä!¶ož·NÓ~6\27­DaPŒ\14¢G@\"ù4Š™â%“æ‘’‡•\21´P}Å“ƒº\12ˆùõ¼wúq\12RjhÉ\25†G.Ù)6°\1273G\4 ˜r:‰we±²*\3ÓH覚šçóÒy̬\26)õ\24t~*¹NýŽ¸Æ¬õE\127ÊÑhAd£ xaéàÏç\0w6³î(\2\14­øP\28:\21Ør\11s£]Ð,~x¶”è¼\13Úöc\14˜whæ\\\5\25fs–\ +ÒpŸ=󼄀\12Rq½á°Ec¬\30ÝÍõèNͬe\\>”mëèHÞ¨LvÊv*˜µ~Ïœà‘–çQ·µ®ö&³H\28\"2rc7¦8´P}™bDÝ\21ÓvOýÎ|“Ñ×\6mˆ\"HêÛ0\19Š’È,Z8Ç~»~\28,ÕHi¯Èi*ßØÜv`pGw\25—¡\26\27ÚZ\8úBœ!2cÓš1s’Wì\4aJ&COF\24Â\26\0025\6LËly^\8o/Ú‚Œ $Šdóˆ¹IèP7%Þ$\8BÇÚ\4ýIâÊfNF\24n³¡BÔ³¹Ù'\14#øàQ\19¢@\8\9ÍÙä'OРì\6ËžrB\11à³Û‰ÆËt¦\\È°uV+€ÎÍuä5\28Ù±ç<)\19eÌIÅè\5!sM¦å~\5Mdh\12Y+fD¨Žà\24Úà\17t7tÆII‰©9m.ŽSÞ\21Ñ”™:5Î\0ç†éÕfwÕ\9½>)r\21P\17¢S/hÁóòÔ¸ öÜ„Î\8«ÌÜþÉ4iž\22Ï¢ÅUþtKÉ\30Ö\29»¾Ù\\íü•X™}ö†\15\19­C´\127f%\20cØ<êv»3¦\12·7¤3Sˆs‹¼Ë‚\4ôÌྐྵg™¹\14–\"4ªJ7;e\13@õÎí…ìŽ\28\9É—è>\29“7#Ægd3B0ä’\13yÀ\"xÞÜÒa-å+\14³ô¾ê]†ñ\9ÐD\18›6jq×þt\20‹‚í\18\21êi¹<Õy‹_m/>A[üv{\25üæna‘~.ƒˆÊbÙòዧ/\14_œ¾Xó]†iì\31ð&K\21²Y,d³øÖwAVY\16Æpù+²xÙP¬H©Ú\0203çDËWNeñJ¾Ô—Úí‚Ý\1Ùf,~«Mq\15*ˆXäR¥Z–*vÓ,œî)¼BÉüz,/VÄåùƪ^]Æ\ +ÂÒ\11%nX„znD\23Võ¥®ê\11+ùÂ}ãâ÷\27‹–s…à˜ê§¢±¢\\Ñé\16É–ËPÃhAÀGîÚZ\8\18±j<Å»sFÅÂR¿Ln¯ï‹!Ù\26\26ˆÅ’¥áÂÌŠ)K¦XHØ,^·\23ÖíÅòA\11·‡ËlB\"ÄÀ_êµàÂøS\26ÚÏ/Z˜–u˜: {ê_q¡åûÎsùJ\11ù}h\0009?sÐËW.G6\18¶ðÍR…o„D8ð\17q\\ªÈÍÂÓöÅr7\22\17çÑÍ×\0208­_,e³X¢fùJÔ,U”fѺ¹ 5³tAŒõ\18zÖÈÅ¢}í\"\14ù¹„*¥\21i\17qà<ëmHOj]l€\28®àFCj\29Tb<\2 7¶\1'ßz,HáÈeÂh±'X\8\14“Â\17xgw²ûax\1g\0µð‚© ”e3÷a¤úo]3Kº\31_\3eàuùÚüù\31\5x5õåF‡ÅX-{¦\0205\16WÄ5\4ý\20ÎhÂ-cqâ£ß$\24 Ä~\13+|êÚÁº­<©A\"WUB·ˆâš'\1ºüæMdã¦ÓH=YY”µ²(keQV³(Þ8Ed—V¿\21X}(\13ª7§_\3M\16¿\14\8\25ˆci_”^3qµ4–Ì'FÑz`zôët\13Ä»ºÁ\7\29+GwÀ†øÜw¬Uߊ*\0gµj{®\2hïGã÷2qh°²uP\0-„+¬/ðƒ™Ç\9r§ùªºˆõW]ÄúU\23±~ÕE¬¡Ó|\17\18a^Ãè¡\5“³\6vt«wÿ+*%\20P”BãNKüúç\31\15kqíëÆ“öµ*ÀÁ¢¦«g\0kevV\0176š\0”\28‚pýÄ\30bÝêÍŠž\ +UJl\14½¸q}µn5\11Ä°V³8ëfqµŠíˆ!¼\14 x\23e\5\1_ëÕϺY«†¼îh·ˆW•[^\17ÞYÛýV¦\2v’\16»/7×½öhoï5\18\26qÁµ‰\12Rß•¬‘—yëÓìÎ\26\19S¢A*|ý’\9ó:kê\16˜Y‘‰\22d+'3c¾Ò„¿BÑkË#AAí\30ä,¾O…OͪDIÓlÌ*\14åbhjŸ»Â~È9ãÌÎ\127ý2\27â}ipËP¯Þõ+¨\ +Â\16C!F˜\7÷k•ÙYë;=\28U/-¼lÉ×*³ö™çGkþvO¶\20â\ +÷±Â}ÈÄ”¬Ûø\21N¤Ú&,°\"«9\17ÞbzÍÆz\26R]\24“5Ó:øÔ[›5ûöo5[‚Åù™;Y¿ÜÉZ¥¹AÚB­UhG\5¿\\%DäW\19¯\31Š\15_âTYËWó'«ù“µò'BÐ}s)ëW\12ü×\16~\13%:*SP\28ÌzLbœÖ\6r¦\14º^¬¨«ÅÆ€Ú¢¯flVŸ\25\8ö7‹ÜŲƒŠ\13Áï’u±–µ\23ý×\"æ*—c2<\13\21\27i¡ÕüÏZ^ð9+\7\13rÆ4q@¹jÉfÍ,ÐÁ\21.ÈPÛž•…Â÷dé¯\25Ú^8tµËµ”b—PzÌ;‘ݺf’ÖÆdÚk™ E±æ\12•ˆ†ä\16£?mÛ\29@ù¨å\25¬æšÖ›—GÀr\27CI`“˜Ä÷wé€[ZÍ-ᄘÜZ\5Ýy–Ëȸ-‰êGºý\12Fü}ýŠ¿ƒë>îÇ!\3ôîö\4¢\127SE¤¹PîÛ\15ÊׯJºÕ;Áý´Ù:9}Ê5k´ \9~ªùrÈ;W¤.ñqâêÓĵÊÓc¡iîlŸl\26}gûä>º@œ9­V\25²š5c§éÅ\23Õ!‹Q|ø\9\7”ç>Å2€¬sI¸1¼´UçùÌ\ +ãv\24‘’_‡¯æ߀ZÅžš¡\1…#\15V€FP\3(XADïó`\24`f§ØK\28Ð>\17Z¤ï5·EsÕ¤ê§`ö¬\9Ãö”y£G\26…ÚIooWAˆ&ofG™c?\14vÒ0í\20,.8²›\15_…O\24Údèd3yŠ!\15>{\9¾>\17D\29–\3ƒTÎ^\\\\hÄ\17ñ@›^jÂë\17¬J%X•Jxq)lËåTÇË\18\4¸R\13<Ÿ3L†EЯ³\3\2W‚ô\28\16»ûfñ~\4\30F\9j%\18ì\3ùYFJ\22:JˆC©ðæ¬U–C;aÅ9\27Ŧ5F¿\27ä©`@°_aõW5\26ôÂ!r\"ª\18¬ð„\0\"› \13\11Ü21\0095‘;\ +ëpPCYvIFbNvG²€Û}\23hPŸ¢ Ií׎bd+š*’W?š™\13•I\20båÃÓÙª –Ú\15–Ô\15\28S߸_\0Õ\0171E Ù1e_\28‹ä¸î\13#›•g` À‡\3ƒ\26[P›%!žáh(iÁ\15\28Â…‰í—Û\\d-ø¼* ç‰˜‘ˆ¬\5À·]\0242(wQpQSÒÙå\6jÄ4ÚJ ¶M»›\4ë%\9U]]¨ŠI‚åÛƒõý\4DÈ\9Ì¡ ÝN»«-\5Ë+(+íuÞ†Z`ŒT\\TG¥ŠŠ\29w\5Gÿ\"fQ\26è*’ãù¶í\8¶‘\7ZM\4Q,€PùÕ`3?Y1¥i\29\28~6Ì\22’’-O†Jï9_$Z\\®â\\!\1F3ÞÎó¶7í\30”,±­c\1¨@\11\ +AÔl\28§hîró.>Ð&\7bV-â!=œ\23\ +Äq\22E1Ž\21\23#êÅR2ºíF8V\"`|’\6¾^ö\3OÈUªá»Ð\28‹‹™Yy±8F&¬Õ…@Îwñ¡[\\\127+I\9ló]›'™[>Uán—¶C$\8D\13Etís$[êñ¼L\21z’q\22\26íô\30aEeA€AÇ>\9˜Ä´\7\22˜>l\28é…í­\24ÛØ3™-Q…¬\23yo\19Ãt{ª½7ŽÛ\3ºMÄÙ‡ Ã@„\17TKa‘õ`‘u%f‡‡VmÍAØîÀ¥;I‚û\ +¼/¼Ÿ$Q[e3g PåÀ›N\16\\\28Öͱ™í\21ò@ãU6\11ÇÖ‹_VÍ ô{÷RB;%Ý)£Uú5L\0054„ÈÑg0k\19,á\20,Ô\20,Ô\20bóR²‘KGí%Þ¸¿!K–bBÂÍA5ÎZY´Ý\12¼Ô\18\15‰þ‡€¦V²\6áÆÐ>ÜS°à¼!\19QT¹Ø\":\18\3M\28ýÂ@\9;+\15®è£Q#ræªÍȉi3\21¬¦Ž\13\15\27\14õ±8liÈ\0059—€þŒ§&{\27ÒZ\1Ñ|žS’Câa”‘)%Ú\13Â3©:0µ]H›¢'±Z××Á›™`\6\30¨`]â=X@Ä,¤S@£âc\25ÊðáX;d-Â\\Ü„Ü\15‹\\­\\#äîÏ\127f,j½ÌÉTÈc%\31Æ”\2Î]Y’i·Ì©mhÅŸ\19]{b \19t´èÉí\9{w;\17ÌtE†yT”‚CsãJŸX@¾!̓›ápP¢£gÿ\29Ž†õû°Ò\23QõEûÄ`Ážp¸–ù.†È„ë@\"\\þ¬Y‡%\5•\0¡,S.oñš@ܵÒ*$“€c-ÜÅgÉ\0192pÔ\13ü麃)ÿäpTÛ‘†á¤N°®‡P\0219ba5²Ü\14\2”\16eKê\4ëxhxÅ\18ª\24\127°\0?Ûbæ¹\5øƒ\5øƒ\5øE\26”gAvL±8Ÿ\9\5¹œªþÕ©xí(u­³ð\12\1qâ™](¼ï\1\6ÃdH¶»ÝáN[tÂ\4”Üi|•è\2бÅkCÉ'¹qŸùdç¥Âи…;ñà\27HEr“P1„—\3RöÁ:4d<\30\127þá%îŸ\127ÔÝÛSÓ|³¬\27TCA-LºU1ÒÍʇ6(öP‘j©¥M–³FCņ°èÖž„9ÝiíI!΋\20Äû5\\înÍ¥tëÁÔæ#©í‚0à\1qóãµíªsì:Î^í€hç¦\13\29}YØ4mȸlu*YÍ\0160\27\30\4aÞ”9{~–\5\15-_r'M\20¸l>—ÛX”dGËÆÒ´Yd\24\8e\16D@q«z\17@ùÑ‚Ô:÷‹\30$pUé²! ³Y.WÎÔæ\30‰t{lßœ\31m¾dëÁ½,h¢ýÇl\2w/*ĽxO*LÛÞ^‚¬$\7\2¤Ñqž#\27k\11<1²\29B0ñBÈ2\24Q•ûØì\23\13³!{—çc×\15iÖÝÒ¬;œònaV\14¢ÔïFsE¡¢XÑñØ›A­³WAW\31\\½+ª!ny!üº[Õ%ikÓ»\15Îe—+Üåní\7\28v͘µ@ï<ÊâíêÏcÿʱbs \28ì¶\1cu¿ìRÈë\16¡Û\7×yGXuçHf\1uNVuª‚«ûË÷^½\28_”æ¥ØÖS»«ëv3üèO$ä8ød×Xãxg5ܹÄÒB³#\30@¤1tj(^ãü\16©±Ö§wŠ¨îõðb¯‡\23»Ÿ~\ +ñÔj\31oNŠ÷Ñ”ºÛÑL\8äÁ—1O\19vx0àŸ\127\30;/¹\4—6v \24°YXv÷B·\127\5dw_\7íHÅNF¹\127ô»_9ì\28B©¼°»»—3 »\26\4åµi®(T\20+RSüŠÒîU”v÷Û›]l3gv¶¦wE{E5j1¼©ÉQ«wˆ°í¦ßXjU\15ÊwiLZ,\26Uп&嘫 \15av:»n›xð´û\9ºÕ\\R\26L‡B­ü}õ)\\\21p\9»OÖªo«â\3\28[•\7Ü\23ÐÙ8¬˜²½Y'\13¶5qr¼¯5•¬™,Ä=ëŽNuo\6T ¬pkçÊaçi½Üµ3\3t'Î\12I­’{ ˆ\22^Ý­3|ç\"bg×. ÖÐ\18°[Fußú׆+o\11w«Ö’YÌêÞo¡„A»š¯é˜¾&¦EU5ºoUpb÷££½Q,;û#\30;<ÆÞ¹%¢•Ó¢ñS\4eG^†àZ v¯¤;²Ž\11Hlãî×ÜøÈÅìÑî-ãn1P\\\24²I\11‹@û\6ºë@âú1xf\"\9º[\18tOýA¥›Ïƒ×Kƒa2TÍ?>0ØI¯É­>Ü•Íu«2ž»7ÀÉPeóU\14¨\ +jVC&žvÀÀ`˜\12\25¨99\29\17ɽê\ +\19:(+%m\15²¤‘«î°½ê\14“\19\0018Ôݵlîìßö\19\29å{{®rS{^°a{•Üܽäí\23\15 w\22¼½½è\11.VvïÏv/k;Gæ;¼½@Ë\0)JÝϤ÷¯°æîu\9•«”ªªü×öïkš\127\13\7™\127é¯é7Üôþ5쿆ü5„/N¿A~㈛ص»Þ¹YØ-s²ÃzîU­Ùî5o·$èn!ÐÝòŸ»EI03\12µŸÛyd\19 DZX÷Û26Bý@KßÙš÷wë@Ûy\ +·ó\18Žd\16úÜov:ê‡öÕ<âÀU&0\1)PDÑ\15g?¸óü \14V³ª\28G\0021£õ\0Eë\1\"¨hQD(4~•\1ů†jR\20W\17Ñ!~Ù¢\17ŠÎÚ\27\24Þ†‹a4̤IîÌè\5/ZÝO\28>ˆddÅ‹\\)L>Ó¾P`fŠ@b\17ÁxF-hZþU\31D*âëF¹IÔ¦.¢ƒM\16}*\15N±TÛ7ÏŠãÛ'z±Õ\24Šïµ„Í7\28q\13†£ád¨\26ôo\31\21aˆ§a1¼IG¥áu|ŸO‚Õ<Ù*Æ‘A\11TœÑ\14¨lˆ£vYJdäïa\127f\29g\27D,¤]ܸ<êˆ\127¿µ\21\127?µ\21\127¿´\21\127?´Å'ZZÃ~ª(U„\23\26sÔ\127þQþüÃ9qäô5Öã’è¯äÄÍ.pyBH\16ÇnCº(6\12ñz…b¤ô‘0Š{ªDñI[Dn\30j…Èw$\"D„©\21EÆb H˜q\14L`Ú·Z\18ð¨fåÉSÍîEJÌâXs•\24[ä(&\31õÄ6)”µ\6\1•Š\25ʈHuL|\ +\9\17mm²Qà¨Q÷L³(wâ) LlÅtÆd\13\5‘+‹¨¥T\13Ô'\24yù*µÄN\5Ô‰1¨\12Š\127þ_Vÿø©ÉŠ©ŠÙÙ!?\19aêbËóâxp§.ˆ‘WÏѧ\"òä¦\30‹ŠpôÎhŲ\18¶Öö¨mìC\19” Ï?F\16ÁÓ\26(ðbbœäu’×ÉédôÉJ¬\15tã\9{\31ùdMô銒\8듇ŠŠÆ‚rr.\9tFnËÓt«;=œŸ§\0§-r¦€çíñ\13b >5\31.ŽÄãÅé|¼XÕ}a\16«ÆŸè«\2,JÇ\18ÏdaÎ4Zë ×I± A…/\0142NëA´ìO¬BÑ<6_¡háØ\19ŒÃðh½@± ‹\00335\23·KúPË‚–ôGôEO,÷+±y>™AÌ‘¡€r½X\31)E\127j š\117ËÒ÷ƒuñû½ºøý\\]ô®1røb˜ìC+Þ3\19Š\3˜h?‘7K±ªú‰Vò\19\17Ì1dḭW?~5úÄÚ¾ÿ~ó.Z«OäšBƒŸagm>±å\18*¡\15'5¯Gzqƒ‘|JŸ^=GžÉB#ÉB#r‚KUv\4[Æ\6\7^Ö—­p›æŠm‡½’ay¤\22µË\4ÀE¬[²ÈtB\23Ozói5!õ\29Ǹ¬È~§ªÚ^h«Ž5Y¬©¢â\8\14\"úk‹}ÔxÉ\31Ú\0jkÞ\1\1§\0ë8}¨Àè°|pä‰Ëȇè\18[†\13\0204ô“EKpS\11ªqÅuáTƒQ…ªK'hDO=\2GWÅ\4ô\30\22\20\8éVD\11%\16w×kôcO!/®Éb)Š’æ™’Š¸¨¤\7È0G,¼Uúj\7b¥ìt\17FJV”(\11Cvá«\30‰W\19„â›H\9Ö0­\30ü\"“\3M†t³L\25 .ÙžÊ\3Å âCߪÒ&Ž^FÎ\ +y:„Ãt\0138M\23mã\27¦j\21Ÿ”|R”\16ãHh¾Iþf I‡X…Ó\22\ +»%•¦åÕ¸‚\3ºãEÖ\23ÀI•Ž`–ÞKĉÎ禡A¡¢¯#DnW¹ªŽš´‹ÔíBîþ\2‹Q0Ò^^+ø\20\12#Ná ¡÷Ȩ\3‘ùƒû‡‘\16U‰XŸxú\1”ZÀR\29©³XG—\"{îdé\14»]\14wý(™øÂüF·Cbäqó\7âc1\13ϨÒ÷Ã0ÉŠi`&N¦\14#ÓZf\4µd&\11|$8|¨[¤´|?JÀ™1Õ\26¿‚VÒ'î'‘ZÓ}rRm¸zI\31SÃO?ˆÙM\31Òÿ Ý\5'Þ('ô½p…£‘ŸQ`,ÔWÁȯa°Çl\24\12©´ŸEc)\29H•FY\12‡p«Šµ;\">Še…G7º¬!ÚÏ„\211É_,IÙ—2)OD 4b¹p\\œ”ø¨ä̣Ê_Ll«\2Ã3\25rZ$LäÖE݈¿_†\26WVF!g¾Á“P\8#s¢f]¢ÞÞ.âä´‘lüùâñ‹ó\23S¢þS\14ÊZ›Jl\19éœäÒ_…¦xRú¾\12$]\\N\4•í¶8\127ÂZZ¿aB ùE¼-\12œŸÇáôò\20:¼º\ +\22CE;ú‰¾8\22ÇA‰ŽÌ|Q\3‹8³T¿@¢pœF‚ µˆ \18O[[|$Y|$ùû\"@èæ“t8/Ó\0301mm:UÇ“Y}j1SžÜ\7¤“\3U¹pn\26Åâ¦,\21VÂDZ@Äá\22,\21K¯Ê¬bñ€,ñª,ݼ(þó¿Èæ|žO3<>õ›\27͇e\0048\27\6Ã(o¸ÑO}]ôáú\6¨Ò~ü±Œ\15Ãòóý2Fó\17=ï>U€ÿÉէ\30Î#P¤¾ýøKÒ\ +µËèïa||©ñጾ1*ÀúÒEá2à“}9³ÿ4¢¯Ÿï™ÿç{¸ÿ©‡û\31\31î\127êáþ§\30ë\127Ö\26ÐÇú\31Ϋ>|;ãÃþ÷ÃñÔ‡³¨O£õ\5ÉÇ:_?þŠ\24\22µ\\ûÙÄ?~ê·->{AMìO£Í\6gÚ*¡¿B¡‰Îòÿiä`\13+\31ÊÃ觻Žüøh#ùA¦þõäÇ\\ðÇ'ª\31ÞÑ©¤¯É0<²z\5éÅÌ“8A‹sà‡8›¬j5[ÎGnØ~eK¢ •Å\30‰òL¸± ÷ZA”îÀ.\16è—£Š \26Á\7“n\1ÿùOÄUÜRFýy\30”âÞéÁ{\0131*rZýª&ÊÈÖtÙ\31$É\3w\9øìš\12\ +½7Eˆ\19BPãZE\18Fˆ\0063{\27\5Wg=²Õ\14f\15­&#Fƒ.ˆ@Þ\7ÃBþç@\25˜\15\4PÞ|mæ!Þ‰ïN‰E/Ôµ A”Ÿ/U÷¥R¶¯&}iH\2wCñCÙoä±\28„Ü0&‡I\14Ñ\4/çìTFC5Ì‹\7Ûù­\9“ß~w\4Òš \2L>ðÜùí”\27ÑtDâ4ŒbE¹\127ø\19S*Ÿ¿*aW‘ZÐF2ën3e\1M\21-öº\12ËP‘ãZh¤Ø0\17ôvD8’ìO5¨‡PÚ–G5á“!Òr\13šÑK/èÕ37t^Õ¡™ë£K\16Ò\127`­\30FŒ¬>¤6ŽZœrU¡éµˆµ)þfx_]\ +eQÕ`Š3O)\0123®‰\16Ný)þ<[*\9èã\5å䨔„OP¢\6M{¨ŒˆRæÛ\30¡¢Ãh2”ÓÄ÷ó²?ó‘-´$Ø2lYÿrå\11|\25‘Œ˜:BbŒxçQ3à 4ó\13ú\27ÄdÔ:¹\17yÙ\28y¡ªØ’lám\11éZ«£Ý\22C'ΗW\9\127ŒNg!R‘­\13I\1vŠÍ\5\21š\9DÚ³?$šÑ}\23m‰ôû¤ÅT\11:\3\25UG¸d÷—¸›ìHG\1^ƒ¡\0027ÓE\0138ÕÌSé\29¥/®b©_§Êfè“êvÞS ]ÊŒ\9†ÙPq-¾Žù°ù´™ñ9£…4ÏÈ\7dï,òì\7» Zmf¶ø£šÀÍ0\17o.6\19’ÃÜÏ\28ŒÈƒ0lѳ¶ò¦Y³5)g\20\26á(ò‹“Xz\5Î\0Â>I®çÌÁ*ê\25‹þb'*ð\14(Ä¢h>fÎ~¿˜\17/È\28,ß \22\\U¤g¤«Ô$Zr2_\25žŒ(úZÉâ:øü6ûÁ¯ª¸\14ÊÛ'½yµjü\6­\11™—^o\16\12Ða}…y½P\23ŸWŽ6òz\127Ó²^º\28\26\0215Ð\23¼Œ\0X•(V¦`Y\\^Ä\5œêg¡pe\12\5Î\0294\26Øýeß\\\19«Ø\31‚Æk\21 \29\21`r€7Þo\"ZùD¶^Ò\\õ’â\20° F\15-p\\ÄfÞK\8ÎZ*‚Ç)ÂõrÅ\17&<ƒSt5ýM\7àF\2ô|€Ö\5dÝÆ/ž¿8|qüâã¯Ëñ¯Ûôkøë÷7½\3Z^MÅ\25¹yZd²¾1~çoþ¿±Ò7òõ/¿ö»b7\9\20Z)ïN1.¶ÐâOלc!ÁÅÌŽ£U”s¥ê48ïZeBºQ=Åû·&s\31–9\7Ε\0211\"\19\20ÊgëÝ\0N†´Ÿ\9\6w>\25\17º‡¯\27•Ý†r\13!¿4ÊÖZ•«¬¾,òh¸\28ɾ\27Éõ»]™óãÜhk—wÔŽe\4ïe„d5{ÂH\19¡m^TB»µÌ§<\127Œ42£¿«j¤\20»P3ø\9–6\0J(š+FMdµ@fÛèW½r\20/ž\17|Ê>NnPÇÔ¡ÉP©s\24¦¢[°\14hKûV#[ÀŽÏ\30ÚÜgÕM\19MEàÐÅæDP/ÊÚvä4×”æ«~é\1\7†‡Û‰£¡‰HP\3Ø;íWv'ï:&ó«~\7Êû%´¸3šyü\ +$u$>²ŸÀª\18j\1N¦\25i\31\22àŠõ1£óü(ƒ\15‘v–á\15äòãï>X\28>>ÅÊŸÖ \1È÷CKqÊ9šÌ\20[öCvme(\17bc¹ñawæ°[î¬\11G\15\7Ž\7,Us\ +ò\12ÚByá:':øä^\19K4”قꙋ\16Fø¹’–Ü6;R,n&7£¬túsåƈ\15J\22;–›Ë\30Ll¼Ø¶9gu‰?ÁžO¿d˼4Ì'u»¸ÅÊí¥D,ÒÙ‚”‚´3jî3ò\20Ùª\\³O·3\31x x>äÎ…#W;‘9Ü—‘3®\\¦:bJ\21Vþ\26˜EU >û›Ç@†LQˆÒ¯¸ b¨¼¨|[\28>*ÙÂY)v@áGG fJQ›Œd®t8ó¦Ð_JNÐ\6\2RnXA\30 q\26ì×Ã]U–hµ¦SEÉ>^Ò‹uzȺã\22«\19ü¯¶\17\2°²w\11§sÃ\0292\24\26X\5\11.æ»\31ù\18Q&\23÷ˆŸÁ*ÂLsk\5šl¡âU™~¶H‡ }¶ž–½‘Ò\17ŠöAý\5\0185£]&Ã\25hÍö\25žB Aw„úq\26\127\13Ùa\25'7ï5\4{XvÞ¦\22#\6‚µ„díZÃãx>\14K/\31ÖãuX@ã°ôÅaé‹Ã\27‹ƒ/­âÄ£šƒ¯g\26fÎL\0146ªÇŸÿˆ\8?ŽæÕ<Ž—Ï/\14ÞQ¥\16+®½¾ˆóñò%œ6™3¢õ‡Ú,?ŽI±&¾›'ä¡yL³Œþ4Ô1ñ\14\31¤möâ^™Í÷bQ\14¨c8|\17\127Ìè²\16âüäðõøa\14åh–ÇÑ£ÃLæÅï=\14ß„cQ\22\11\"ov²\7;T#Õ_T‹M·b#Ô`¤0¾-G”]\29NHV•£ò6Š¡õDPËÇaÕ臕¢+\0249µ\26Ù‡?\27îO­\18&!õm³KEÅ—>7ˆ‰\12ºiU\30Æ\29~\23‡Þ\\̼´\6ÕBÜõ[“\"qœ™P\8º(ÐD\28qh\\‰tisuXîîð‹ÿƒõÝ!½H…[t»+Òb?Ñîƒ\7\127$àšµ<éSÒÜÛ\31a\19;w\4Ž\28\14VÐ\3q½\3é<%Ph\127®ýšÃ+Ssh%>6\30è\28Uâîø~Üûh7Ut»\25UZ7\15\127–åðgY\14„¨z¿‚?¼6\30l~8‰ç<\26–ŒC\11ÞKIˆõ8¼~\29É:ö\14v|Gš;qG2/vÙñŽŒ'¦ìQ—¢#}ŸcW\0035HîP˜£ÊÏ\29õC\14G£ªY\1270ß¹õ89E/Qû¢î\19ùö>ì®”O1ˆ\14;U8\26\6 W™‡\23†Ã\11ƒâ)¡þD“ðqzäœýv“_CËž\0304§%zŒh¸\19]0œ{\13øe®kŒ\14£TmŒ¯îäˆ\9\27C‡ÅAù]™\1\26á–ù[lÑN\0034\27\6Á~\18ƒrYÐáª.O‚hY»Æ¯\30Ä‹Oc!w\30\13\29PLÌ¥\9s™`_|~€Íz5\\\21\19\16|?®ÙGÁ—¹Ñ‹c¸Ë*\26.«h¸ÐÉ\ +Tææ\18/\127@G\14\26<\23×ä×\":%p\1p#\5ŽÌ.MüËJ»/\127S漢\27®ª¸áªO\1±+6\5ã9§Œ\"ñ—ö\27bÜ.®·¯§†ëe½\4—õ\18\\þ’ÞUUr)ø\5(xª›/“„+*^´\8À\21M~Aâ\28.x°+r`uEKX]‘ÉE4GÝ«¼ \12W‹tÙ¥½û¥wå—hÙÅ\17¶rCYÇ•­&ôòG6.\127QCƒ—úY×åÅN\27_ZØú¢®ß\14C‰­œ4.Ÿð2×Áe¬\26öèÑ0r\29|áè:è0Ž\18Z.Žø2ÂT\17õ‡ƒz\\|{â:i±ú\4ó‚„Û\"r._UÄB\"\23mÓh\"]\5\22ãzÞFô½\12ï^”£ø›z‚âN\31å+‹[\6TÐr#¨\18\21˜\14m¶U¬bÖCl–Æ&æãQüªº\12þžhñ»êâÍY1\23RÌ~”![åj±®“*|6Ú5\16#\6#qÏá×p|\13¿\14ű˃ýþá’\28$ÅY[±Æ·b…oøv§-ÄðEh·iÊëQªPUiÞ²¾•µ\5ì\11óU\4|%Ð[\9½{¾\8/Ov\4E\1F\127ß˵•(ŸðDH\14É\7·Â~PV˜¨<º_1j§PZßj”Qã¨X\27¢ŒwG\26—Š…¨HSಚ⧢|Ý]ûçâï—\25\5#jçM^™øDN™Ð2U&dE\11ê•OÜ`Ä\5K¿9ÑýaU¥\23H\28\0249hÒ\21®Xʼn”ú=…bÉŸÂ=\2\0166Keȇ¸\26æ–\19\18\26\24qõ\\Ñùèås\18•\29E 2'^\28s–‰L¯†âBß›\"¦­p»  Vâ#\ +\24\21Ëòä…‡ÏÅgú2\22\\\11m7\11hü5EëZé\23–·†ëü¢-*U\20¡)£¥ÈÁîÑJÅ\127\13Ó¯!ü\26\24Y½VA9aA‹„\ +«vPbbT\5Ôÿ¸¦¶X[AAi€ Ÿº\18S-¦§t[øÈì×\2\8\0033Š7\4€\27­¹e§MvGÚ»—šyo”ùÞsÌo\20ñP\6Eô\13\5¹u¶Fîw›\"êQ\"¹Š`U\6hXÃÜýù?\12‰(æ£ø(³øB© Š´ðJ³Ôï\11È\1ž\1´\24nÕg¯ˆ6G\12¨ˆN\22\30‘\21\127Ö½ÔHPÒbÝ@\21ª 6°)ªò‡Iÿ\0179/Ÿ\30m¸ÅÂ.B~;]ò[ýÈ]ª \"Q¥¾\19\5\5¾\11ã°Æ\27©¹¤’'(¹OØì6c†.+âÌ´É\16¬\12çYªæÀb•\5Q•bý#…£h²õSëbí#\5Y•’\27Êwº\24¢¡\ +^\20àKÆK†»-GÏ'Ý\ +Ÿ\14*\8“\23NT€Áædx*ùÃr{\ +\127z²ù–N^â¹\21\15µ7E™AÒËéþ÷†Û2\28š\0§‰\19WqÅzAŠE„\11l\159Ì\19lØ¢rkOø~pº¨ñ# v‡—e\26\13³baÚâ\7\24M­ˆ5þß\21߶#\5Ùø´±»Rô™´Z¦µ¶Ý\13‚ºús\ +\1†-€ž0ä\17Ü%hbé\3A X1m¬c°%\25:qŒ°Ç¤Î‹Tq›™Ó^!ÄÛ\26Î7\27„€\27ø¸b\22¬XW°\28\4nqc·å\7n¿/\6jˆÞ¯Çýâ\"ã~yË\127¿\16Å»_\26~÷\11š|#:+ ¿¾~ŽQ5Ôn+Ô@ñr™lQî÷ï‡,ïß\15YÞoÎ\6üFü\22\13¹ÍíÝþHåí/Â?à˜‰\1š\28@¤ØDzŎÙn‡á%¨1\11·ªVQ2ãd(Î\2\20@U\17¡Ïsg‡¼«µ\22‡ÎD%\25\7É7ß\24'ÝÉE‚ݸl\13†ÙŽ¸ð•\23¡¹Ð þ.6\1\8õ¤\26ÝNÅ\26'\31§þEêêÍ»\ +\5Ü#·&X´Zh\0169\15Ÿñ\24í æ)ehÙ´Ü\22R!h\24\28Fä\7[Ú\28Ô\27û—ÏÅãv(ˆå=\18ysWa\\9TÁtG®jºoóÃ*;ñîN\30OÚB\\öí\12\2\25,³ºž\27ïåq[ŠöFŠö´EsÛh}X%$ù!\ +ûESE¡¢TQ1›ð{îùL›\21IFÛŽ`T@äì—Vr™á¡ÀÄí|O~#p\27mÛ° ;\11²Úä¯aÿ5įAŒžƒR\14>1BqHÊ/ž„x«.§X¡\11\29\29\8£9â{N,\16²åZ¦ºKÅ~»v\23f­\"˜o`\29zUL÷®êã„x\8Ï\27Q%¡¥öFvî^,Ýy/œ›ÜË$ºpóqõ\17ï€9hõ¼\23jííÆíCéÛ¹·…so6\29÷bWTÌ\3ÕÀ\11\19zñ±È]\21:Ý>wîî\0053|Â݈O¸×‰Œ\4• v(7Ë\1W\29TüWOÉí\13ËíåûÞ¾\19u\27\ +“[ÄõÞ,\19o\ +È8bÛr³m¹ýz[Cfóƒ%ÒbÃÄÍùl7n×o+\26\7\22Û¬ù&ÚímÎímÎíƒ\14y$F¡?ž TŠK\11\9Ú\24×|4Ryí¦3»?‹z‹èqcÀ\ +ñ•_ÞÏ‹÷F騦\28¶C½ØÞ»zooy\6\127ïö¥@ü7k6Pðd«\11²#Þ*§\15.\31”ˆÊãC¢ßñ±[¯‡\16‚‘\ +˜Iµ=lÖ>¿¿w®¼äD¹‘ÌxXÃéPÑTQ¨(U¤òG\31epå4\25ΆÁî‰|䢹Íå\18˜(~\7qó\\v$\28²\8·5½ÝþðÒ]?XiD\0Tcq¢Ì(\127¶òög+ý‰gh\5è\"#\ +À\11FÌŒëÈ1ƒÐ¼C\20\12\\9\9uΟïü*\29\26‡·n·Ý \14\17š€¾8¾“ñƒ¿—\3²Þ¸»êcÉtÝ\19ºÃïH6'\25sPz[w\28ÐáÏd覸HèI{˜PÆ\6öô¶:ñ;ZãÉb9yÜ´‰Ïòn\ +óñÌû0nÕEé|¼tf÷„E”ïzŒu×\15¬ßõ\24뮲ʷ¿`y[Ê­¢TQ©ˆ”:/r\22\12SY\17Pæ€<àÄ«Z\13\ +^\22¡ÒÖ©óÈçö÷,åÔ{Š{éðÇØo>U)£‹ÈÀ€‡»ë·×åwà@Ö\"â„`\23Ìç¸ïƒc\27\02558ŽWÿç\31…9xr~\31~YÓð\0ÿÖÞüÜ0kGvs\15÷ƒ›0\3±\26œ\22¦ðk eê€Ègp½¹9ù%jî\29K¶Ù\5q2È?7h…¼\17\26&'V³£Vá@ª•/ŒЉ\28!ŸT`€×Ã\1>Ÿ\4µ×VŒ\0.oÍãÈ -D&ïÙ\0213=]ôfœžµ™iü2ÓÜ7qÓ¼šið Ë\ +°Ë\16Í´Œ,Xl\9\"W3ÙÕ/‚ÁL«\11I°\6„­-ˆ|#ñe¦Pd¿Íä¶\9dó´Ž!»M\3:\0X\0219®âviœ\25K\"L§Æ\8Üí1±Cš4HvÓ\17ÆtŠÄ\18Ç\ +w k<‚0†CA~A_3L婱\14Utº\"É\5ž±J¯Ê釾[Ý\0042BÇz\8m¾Cæ†6\13‚Ò·Ÿ²õƒ\15\0131-;éÀµ0Fawéh—Žû\25ž“yÂ\30Œ}ÎŒ¼á\0307€4àfË)Bµ‚H\24žÁ<ƒC“çðfÉ Á˜íZ€#§zfÇY†g±ÌWó¬\14f©ý\28¸Ì'F\31Ÿ\8=m•JO÷\31xoó¼]5ökø2×7_lÌ9¿^^\15ó\21l6_ÛhÜ×mÆy2v\30±Üì¦Ô&3O¶šyxò›­\25fop–\25\30o£ß\25\20ç±)Ã\18VÍ¡ý<°|`„'\0123“\1§šƒ‹PÞ̈\3|Ø\17Ã\25Ç9ƒäƒ-ó\5ù`T|kÆͶxL9Î%,\0292îUð4sEEA.c\30êÆF¶î\2š;µ]u\23ÔQçÐõÍ\13g™e\25°\19Ý`<ŸŽÉ\11ñ·ñ„€ñ:=Ø*l3~˜¢ñ“Åe\25'Í\28Â\26àÉ̳Žð|\5ã±X\1bŠ\14£3žc‰ÎÅ\17š\\œ ÚztD\9?r6¿°Û¸\20a’¹žˆ{ˆ\15g\0{³Ð¾g·a“\26D\19 \26˘\13\20X~s8ôè·Ê­è#%7É\"¬ààpD\29§‹ýP@Ö\12q’ü(ùÑo¬Óz\5%»…]sÈ\18MŠ85FÝ%ˆ¨T\26ñ;iÊ''©6a ëS\18ÊÂ$ÏôŽPõ.£ýÈ%û!ëäÜ´?‚Lwh(\11•nx[dy L/“á\3q³• öåu‰\30D\15Oèx“/\25—„EBËX…vó–s\23~êÖ\ +Š\19È\2\8m~ï½ÂÙí-,\11Óª&«5\25LËGVþ”µ.Ùþ’J×èË\"\ +ËÝ\127Í°Lf™,±µL+þ°\16/Ë@Ä/PüÛ\ +Ä\1ÒÂoåG\\,/&_\"x6K.ƒO|”i\23b`Q^°¸Ä\"÷]¤õ\"u\23‡¦\0¶\30–\19\26®»ÜfX'³r©Í¬l¾jsìeWçÓŒÜU%C\31¬³Ž\11\12+?Ô ¯Á²\"ëŒ[ßLoLE~8Òš%#ïf•q×ñ`E1k\29Ø­ö\28€P[=£×dáªQ3cîL‡\8¤?²ÆÚ=\30š¤p U\7Zu U\7ZeúµYiwÎf=“\3»Øí†á&ἦ‡yù\25ë¾¼²ÙË7ó\ +Ö\11ÀüB\127óÊŽx\0rIö5Âp&+@]|]Ùš€Þa|\26NŒë\6\18b ­˜0+’¥¢@‚è‹@{˜ ú\23\20•l\22\\à÷†‹8\4 cÃTAÛ†M\17\30¨Nðlª\127a³Më\8C&/°ˆoS‡½ óï\14£è,±Ñ…Y³Ò„°cN‹w\0ús7!- ˜\16ÈaC²\21™©\17\3ð%1çY\0é.o±\0@«\28ÅCT4sÔÊ0\30Ú×\29\"JõQš\23)®º\8VdÔP¹÷@éVQI$/2@ë@–\20qä\"ƒ.24ªÔCT)E(…\1Dñ\19T\26ƒâæMË`Þª\27H}Ïè÷&&‘ L—\30æͼ•šÞ²Î;î>Îîïkg-gUñPNlA“q˜ÍM§°\11ìi6¥- \ +û†Ç4 …{2\27÷¸q¹\2¢\18„\27í\15P\29\9iõ4›M7’°&ù5™­—ÞqËD\0238^¹1&ëÞf«ª\9›Z\8PdÈé·Þ<9>Ž€>4oœ”\11$…™­És7™I°\"œ\0123lgÇÄ|e\1Î\19§§!qs\\Ðu\6·ëß`7äïf\"ñ\30ÿzÁêL”?ÇY'ã8\15ø<Á\19Iì&’¸©û\ +Y¦Š@ÕÔ‰ˆÅÄ\5‡ê\0163Ça¥gŒX*\6Kf‘o&1“DØ\17Obò\13\27\17\16UƒðÚ\27,šèpؘ-¥5*ݳ’\\@\25¹´ú0Ñ\30•™ÂˆoõÜ{ì\21œÛ’@<*b©E¨\23ê”iãˆ\127Å6j#ü+*GGUø¨|\12 \25î“Ù1Å>rk;ñ¿+ÀvrÝN®ÛÕŠìÁìD£ÙÙ\15’úž\29OÆ?wQÑ~'çí;Õ\12Œ´¹vŽÂùw\29eW\14Ü\7zÍ]½&@+h8Á\ +\24¿ªÜ´÷\3íj…v…ÎÞýa—'ìÊ}\0‰h\18Ñ´ð”Ê'ÂN•Þq§\15KlšpÊ®×$ΕHV6Í&邹#©$$ÚÕDñOºæÔƒ$颓.š§I\0178‘CgÁ\ +©\7³šÕ¤ù\\PÒ\29`\\2N“”GxËh\9ÖÈÂ1BrJ$I\23NŸ‰\27\ +ï\0j\11\19ŽÇÒ&vl”š:Úþ9EîàÖö”?‚5ÿ˜°…Íåaè\5È*²${—®÷¨„*¸v¸uX:\28ÿ¦›`ЪÀ³Îq‡´\19ý\0E:\20þ(v(~n&«GËÔ—,óçnž‹:<òæ:&·¾ÌA›\29ÍA\\ÙÃ\1271¡\4r°ùáõÐ:äò\7}\18,\11,\24m8°ÊAËL7x(\11\9–\14a\29w~ÉðRãÙwHudR”XzX–ñ8¡)Ì_pq\9‡C¿£ÚCÃf\14Rép°×IŽ?܉å\15âÊ\29÷hþ}Ò1\21rG™\\¨Byü\21b§ØÞ›\23…N¡÷\30™£E6ô—Ñ”þø¥¿L•BR<Í\12¤Š£Â\4I\6ndOÍó0)\4L\25è\9\ +ñRôº)ABèÝH\25Å*½\20•jž\2ýË5h2\29\29\"@^TäE€`ʦK(r¨B<\21’\14\ +í\26%S²N€ÇÀu\8\16RM‘FÀ¬î§\12Uò¸Î‚ã2ië†UJµ¬×{¶ôb\4<5fÆ5òJÑ•\23J\18®[\20xE\1\7A9»P(/¥é‚ÔP‡\14³(Uè\13;™«Ðc‘qx‘é\\—ô '\23åärãHUm=úí¦N¼\0284>\12\13d\17‘[¯\28¯Ò¯Öùæ‰õ«¥dam<\6\3Š€¹a[¹ÝJ\30¯êÞà佩DKgT¹m@ÇNˆï\19Îí\1ÏFõ\4\"Cï%ŒBS…ndF\9æà5fÞ¾5jaÔ\4ŠZR°­‰ÈÁ‚È­4–ðÐO\1ö‡áºvØhŒk§4Q’”¤NT¼\24›\19*¶*LFœ—m\8\ +‘K^\31¬ä\1_{‡_i2Õ$«œ“\23Ù‘&…ð\3or\9úG‡³è^Z4§‰”úàè\19åÑ?µ“d\13^6\16²dæ.\31“ªÚŒ•jSûÓ¦ÒxTUQÔ$cÕ›ç\28ÎÁ~\6\13…Ôr£^\"Ù·§A\7º~}h6¨c+Ì~K\16h\24@Ë\14ªÿ\23lãÖþÕFd\16$=¡+ƒSõf£î‡%½´Ø¦\127\127\26nßHù\8m\\@Ãï[Ö»¹e=\2\5\3°Ø]\11\14Ó\8n0\2Ôà’¸:E‚+k\12•ÔZÕ_F­ª·mÚ\5”\19³\13¨;g\29‘s7=\20xê\29æäa\0=šS\127³òñ9_н9û\31jçª\23'\31Ìq:bÿ\28\0259¼êTk×!+iÉ\4X%g:õÇÔÉ;\25ì߬@éSátÚ¢GÀ9(‡Ÿú7è¬NsU2ñZÁN”\ +\4é©è<õH=›\20lÝ‘O¦¯‰·/a‡á‰»Ù\\\\\15µêRc½òQw|á\12\23yù\ +½ƒƒ‘€¹xÇ\\¸õ\21­¹TÁ.G4\\‘Žl û´—\\æ¢_ªˆ—¬x)\127\\ºE:Ó›r{;2êM/}ë?‰Û‘ËîMÅ—q\6Õ}‚^L‰è¨Ž·òÚ4A9ºÕMݪ†·ìrc–M¨ÆU i\25)\8È]ý£–äQ?ߣy|~(m|~/ýC9¶Ï¯þ}ûü˜gùüòàsOpôƒÁ̵}¾‰Ê\25\18àW\127d€\ +\ +/‚Å}~õGAÓ„“8Þ÷©\19>ß¼³µ|a†ŸUÅàÙ\12¦Ç-Dz\0061­bZ?¿ƒà7O%\9\12#\12ãö\7¾é ÓêS_Kó*=Q„Æ\17ÞoZAIØÅ»3¤}`œº˜1\1HøŸŸ§q\9\14Ú\12(”óŽÃÇKÜñù™Í!;\21§å¥//ýX¥ïÈq\127ˆˆ¢C; yª|¾gÑ¿É\8VjÒ*\0I\18Yµv¨]Ã*5*\18’ð_òÙçw3\28\31«(º¥{C\17rpë¶iÝ6­ï|vÛ\11¥S”‰iJ¹¸o]1\19œ¾èó3~\9xÀJh|~z«¦\15þ\15\13\7úüà\0\0ü‹³ð¼á“†YÓ¸ÉçG™\3éÿ_!§ÒhŸþ>â  Š¹jF%X°i‘\18\"^Äûòó+\29~‡· ¼\28\24\5wið›\20\23|ÈJâ*}^;ý–äERÓÅ„ä}Så\0Â\8F.'\ +hÉwQn„LÃ\3©ý\15\14Ó\2þ", + ["length"]=6152, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=1144, diff --git a/tex/context/patterns/lang-de.lua b/tex/context/patterns/lang-de.lua index 1035c8728..2cccae77b 100644 --- a/tex/context/patterns/lang-de.lua +++ b/tex/context/patterns/lang-de.lua @@ -44,7 +44,165 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnopqrstuvwxyzßàáâäçèéêëíñóôöü", - ["data"]=".ab1a .abi4 .ab3l .abo2 .ab3ol .ab1or .ack2 .ag4n .ag4r .ag2u .ai2s .akt2a .al3br .al2e .al5l4en .al4tei .alt3s .ampe4 .amt4s3 .an3d2 .anden6k .and4ri .ang2 .an3gli .angs4 .angst3 .an3s .an4si. .ans2p .ans2t .an4tag .an3th .apo1 .aps2 .ari1e .ark2a .ar4m3ac .ar2sc .ar4t3ei .as3t .as4ta .at4h .au3d .au2f3 .au4s3 .ausch3 .ax4 .äm3 .ät2s .be3erb .be3ra .be3r2e .berg3a .ber6gab .ber4g3r .boge2 .bo4s3k .bu4ser .by4t .ch2 .dab4 .da2r1 .da4rin .darm1 .da4te. .da4tes .de2al .de1i .de4in. .de1o2 .de3r4en .de1s .des2e .de3sk .des2t .dien4e .do2mo .do1pe .dorf1 .dü1b .dys1 .ebe2r1 .ehe1i .ei3e2 .ei4na .einen6g .ei2sp .ei4st .ei4tr .eke2 .el2bi .elb3s .em3m2 .en1 .en4d3er .en5der. .en2d3r .end3s .enn2 .enns3 .en2t3 .en4tei .en4tr .er8brecht .er2da .er4dan .er4dar .er4dei .er4der .er1e .ere3c .erf4 .er1i .er8stein .er8stritt. .er8stritten. .er4zen4 .es1p .es3ta .es5t4e .est2h .es3to .es5tr .et2s .eu1 .eu3g4 .eu3t .eve4r .ext4 .fe2i .fer4no .fi3est .fi4le. .fi4len .fi2s .flug1 .for2t .fs4 .fu2sc .ga4t .gd2 .ge5nar .ge3ne .ge3r2a .ge3r2e .ge3u .gs4 .guss1 .hau2t1 .he2 .he3fe .her3an .he3ri .he6r5inn .hi2s .ho4met .ia4 .im2a .ima4ge .im5m .in1 .in3e .ink4 .inn2e .inu1 .ire3 .is2a .jor3 .ka2b5l .ka2i .kamp2 .ka4t3io .ki4e .kle2i .kopf1 .ks2 .kus2 .le4ar .li2f .li4tu .li4ve. .lo4g3in .lo3ver .lus4tr .ma3d .ma2i .ma3la .ma2st .md2 .me2e .mel2a .men8schl .men8schw .men3t4 .mi4t1 .mm2 .näs1c .ne4s .ni4e .nob4 .no4th .nus2 .oa3 .ob1a .obe2 .oper4 .or2a .ort2 .orts3e .os5t6alg .oste2 .ost5end .os8ten8de .oste6re .ost3r .ozo4 .öd2 .pa4r1e .par3t4h .pf4 .ph4 .poka2 .pro1 .ps2 .ram3s .reb3s2 .re3cha .rein4t .reli1 .reli3e .res6tr .ri2as .richt6e .ro4a .ro3m2a .rö2s1 .rü1b .rü6cker6 .sali3e .sch4 .se3ck .sen3s .ser2u .se2t1 .sha2 .si4te .ski1e .spiege8lei .st4 .sto4re .sucher6 .tage4s .tal2e .tan4k3l .ta2to .te2e .te2f .te3no .te2s .te4st .th4 .ti2a .tid1 .ti4me. .ti4mes .ti2s .ti5ta .tite4 .to4nin .to4pl .to2w .tri3es .tro2s .ts2 .tu3ri .uf2e2 .ufer1 .ul4mei .um3 .umo2 .un3a2 .un3d .un3e .un3g .uni4t .un3s .uns4t .ur1 .ur2i .urin4s .ur3o2m .uro2p .ur3s2 .ut2a .ut3r .übe4 .ve5n2e .vo4r .wah4l .wa2s .wei4ta .wi4e .wor2 .wort5en6 .wor8tend .wor4tu .xe3 .ya4l .za2s .zi2e .zin4st .zwe2 2aa a1ab aa2be aa1c aa2gr 4a1a2n 4a2ar aa2r1a aar3f4 aart2 aas5t aat4s3 a3au a1ä a1b 2aba ab1auf ab1ä ab2äu 1abd ab1eb abe1e abei1 ab1eil 2abel abe2la a3ber ab1erk ab1err ab1erz ab3esse 2abet 2abew 1abf 3abfi 1abg 1abh 2abi ab1ins ab1ir ab1it 1abk ab1l 1a2bla ab5lag 1a2blä 2able ab4le. ab3li ab4lo 3a2blö a2blu 1abn a2bo. ab2of 1a2bon 2abor ab3r a3bra a4brä 2abrü 1abs 2abs. abs2a 2absar ab3s2i ab3sp abst4 2abst. ab3ste ab3sz 1abtei 2abu ab1ur 2abü 1abw 2aby aby4t 1abz 2aca 2ac1c a1cem 2ach. ach1a a1chal ach3au 2achb a1che a2ch1e2c ach1ei a4cherf a4cherk a4cherö a4ch3erw 4achf a1chi ach3l ach3m ach3n a1cho a3cho. ach1o2b ach1or ach3ö ach3r ach3su a4cht acht5erg ach2t1o ach8traum ach8träume. ach8träumen. ach6trit a1chu ach1u2f ach3ü 2achv 4ach1w a1ci ac1in a1ckar ack2en a2ckin ack2se ack3sl ack3sta4 a1cl acon4n 2acu a1ç a1d 2ada. a3d2ab ad2ag ada2m ad3ama a2d1an 3a4dap a3d2ar3 4adav 1a2dä ad1c 1add 2ade. ade2al adefi4 a2dein 2aden ade1r2a a2deri 4ade1s ade3s2p ades4s ade5str 2adf 2adh 4a3di adi3en 5adj 2ado ad2ob 2adp 2adq 2ad3rec ad4res ad3ru 2ads2 ad3st ad3sz ad2t1 ad4te ad4tr 2adu 2a1e ae2b ae2c ae2d a2ek a2ela a2ele ae2o3 ae2p 3a2er2o1 aes5t a2et a2ew ae2x af1a a2fak a2fan a3far af4at a2fau 2afe a2f1ec a2fent af1erl a2fex af2fl af4flu 2afi 2af3l afo1s a2fö af3ra af3rä af3re af3rö af3s2a af2sp af2t1a af2tei af4t3erl af2t3r af4t5re af2tur a2f3ur a1g 2aga ag1ab ag1a2d ag1ar ag1au ag2di ag2dr ag2du age1i age4na age4neb a2gent a4gentu ag2er age4ral 2ages age2sa age4sel age4si age2s3p ag3esse age4s3ti ag3gl 1aggr 3a2git 2a2gl ag4la a4glö ag2n ag4ne. ag4nu a2g3re a2g3ri ag4ro agsa2 ag4sam ag4set ags3p ag4spo ag3sta ag3ste ags4toc 2agt ag2th a2gund 2ah. 2a1ha ah4at 2a1he a2h1erh ahe1s a1h2i ahin3 ahl3a2 ah4l1ei ah4l3erh ah2lö ahl3sz ah4n1a ahner4e ahnt2 1ahor ah1os a2h3ö ahr1a ah3r2e ahre4s3 ah3ri ahrta4 ahr6tri 2ahs aht3s a1hu ah1w a1hy aian3 aid2s ai1e2 aien3 aif2 ai3g4 a3ik. ai3ke ai3ku a2il ai2lo a1ind ain4e a1ing ain3sp 2ais ai2sa a3isch. ai3s2e aiso2 a3iv. aive3 a3ivl a3ivs a1j aje2 ajekt4o 2ak. 1a2k4ad 2akal 2a3kam 2akar ak4at 1a2kaz 2akb 2akc 2akd 4a1ke a2kef aken2n a2keu 2a1ki 2ak3l ak4li 4ako 2a1kr 4akra ak3rau 3akro 2aks ak3sh 2akta ak5tan 2aktb 2aktik ak2t3r ak5t4ri 2aktst 2a1ku a2kun 4a3kü 1akz a1la 2ala. al1ab ala5ch2 al1af ala2g al1age a3lal al1am al3ame alami5 al3amp al1ana a2l1ang al1ans al1anz a2lar a3lar. a3lare al2arm al3arr ala4s al1asi al1ass 2alat al1au al3aug a1lä al1äm alb3ein alb3eis al4berh al4b3erw al2b1l alb3li al2boh al2br alb3ru alb3s al2dä al2dr alds2t al3du 2ale 3a2l1e2b 3a2l1ef a4l1eh a2l1ei a4l3ein a2l1el alen1 al3ends a2leng ale2p al1epo a2l1erf a2l1erh al1erl 3alerm a2l1ert 3a2lerz a2l1esk ale4t al1eta al1eth a2l1eu a4leur 3a2lex alf4r 3algi al2gli 2ali ali4ene ali4nal al1ins a2linv alk1ar 1alkoh alk3s2 alks4t al2lab al2l3a4r al2lau al3lend all5erfa al3les 1allgä alli5er. alli7ers. al2lob 3almb 2alo a2l1o2b alo2ga al1ope al1orc a2l1ö al2ös 3alpe. 1alph al3skl al5s6terb al3sun al2tak al3tam alt3eig al4t3erf al2tre al2tri alt3ric al2tro alt2se alt4stü a1lu al2uf a2lum al1umb al1ur 4aly alzer4z al2zw 2am. 2am2a amab4 amad2 ama3g 2amä 2am4e 4ame. a2meb ame2n1 amer2a a2meri ame3ru a4mesh a3met a2mew 2amf a3mi. a3mie 2a3mir a3mis ami3ta ami3ti 2amk 2aml 2ammal am2mei am2min 2amml ammu2 a2mö amp2fa2 am3pr 2am2s am3sa am4schl am3str 1amt. am2t1a am2t1ä am4tel 2amtem am4t3ern am4tö am2t3r am4tre am2tu 2amu 2ana. 2anab ana3c anadi3 a3nak an1alg ana4lin 2anam 2anan 2ana1s4 an1äs 1anb 2anbu an3ch 2and. an3dac and4art andel4s ande2s an2dex an2d3rü and4sas and6spas and3ste and2su 2andu and1ur 2ane an3e2c a3nee an2ei. an3eif an1e4k 3a4n1erb an1eth 1anf 2anfi anft5s an3f2u 4ang. 3angeb an2g1ei an4g3erf an4g3erl an4gerw an4g3erz 2angf 2angh 2angie ang1l an2gla 2ango ang1r an2g3ra 4angs. ang4s3po 1anh 2a3ni an2i3d ani5ers. 3a4nim a4nins 2anj 2ank. an2k1an an2kei an3kl an4klö an2k3no ank1r an2k3ra an2k3rä ankt4 1anl 2anmu 2ann 3an3na ann2ab 3annä an3n2e an1od a3nol a2n1or a3nos a1nö 2anpr 1anr 1ansä 1ansc ans2en an2seu 2ansk an3skr ans1pa 1anspr an3s2z 2ant. an2t3a4r 1antá 1antei 3antenn an3t4he 1anthr 2anto 1antr ant3rin an2tro 1antw 2a1nu anu3s a1nü 1anw 2anwet 2anzb 1anzei 2anzg an2z1i4n 2anzs 1anzü 2anzw an2zwi 2ao ao1i a1op a1or a1os3 ao3t2 a3ot. a1ö a1p 2ap. 2apa 2ape a2pef a2pé a2pf ap2fa a3pfl a3phä a2pht 2ap3l ap2n a2pot 3appl ap3pu 2apr 2a3pu 2aq 2ar. a1ra a3ra. ar2ab ar3abt ara3d2 a2r3al a3ra3li a2r1ang a2r1ans a2r1anz a2r3app 2a2rar a2r1au a1rä 1arb 2arb. 4arba ar2bau ar2bec 2arben 2arbi ar2bl 2arbr ar2bre 2arbs2 2arbt 2arbu ar2b3un 1ar1c ar2dro 2are a2rea ar1eff a4reg a2reh ar1ehr a2rein a4rek a3ren aren4se are3r2a ar2erf a2r1erh a2reri a2rerl are3u ar2ew 2arf arf1r ar2f3ra ar2gl ar2gn 2arh 2ari ar2ia ari3e4n ari3erd ari3erg ar1im arin3it ar1int a3riu ar2kal ark3amt ar2k1ar ark3aue ark3lag ar2kor ar4kri ark1s4 ark3sa ark3sh ark4tre ar2les arm2ä ar4merk ar3m2or ar2nan arn2e 2a1ro ar1ob a2r1o2d a2r1op a2ror 2arr ar2r3ad arre4n3 ar2rh arr3he 2arsa ar4schl arse3 ar3s2h 2arsi ar2st ar3sta ar3t2e ar2the ar3t2i artin2 2arto ar4t3ram art3re 2arts 2aru ar1uh ar1um a2rü 2arv arwa2 2ary ar2zä 2arze 1arzt ar2z1w as1ala as3au a2s1ä a2sca a3sche a4schec a3schi asch3la a2schm a3schu 4as2e a2seb a2s3e2m a3ses 4ash a3s2hi asin2g 2asis aska3s a3skop a2s1o2f as1or a2sö a2s1p as2ph as2pi as2po a3spu as3s2a as3s2e as4s3ei as3s2i as2s1p as2st ass3ti as3str as3stu 2as3ta a1s4tas as4tau as3te as2th as3ti as3to as4tof 2astr ast3rä as6t3re a2sü aswa2s 3a2syl a1ß aße2 aßen3 2a1t ata1 at1ab at2af at4ag a2t1akt ata3l a3tam at1apf at1au a2taus a2t1ä at2c at2e 4ate. a2teb at3eig a2teli 4aten a2tep ater3s2 ate2ru 4ates at2h at3ha 4athe1 3athl 4a3ti atil4s ati2st 3atm 4atmus ato4man 4ator a2t1ort at1ö 4atr atra4t at3rä at3re at3rom at2sa at4schn at2se at4set at2si at2so at2s1p at3ta at4tak att3ang at4tau at2tei at3t4hä at2t3rä att3s a3tub atu2n a3tü atz1er at4zerk at4zerw at2z1in at2zo atz3t2 at2z1w a2u 2au. 2au1a2 2aub au2bli au2blo 4auc auch3ta au2dr 2aue aue2b au5erein aue2s au2fa auf1an 2aufe. 2aufeh auf1er au4ferk auff4 3aufn 2aufs. 2auft. 2aug 4augeh 4au1i au2is 2auj aule2s au3lü 4aum au2mal au2m1o aum3p2 aum3s6 4aun au3n4a aun2e au2nio au1nu a4unz au1o 2aup2 aup4ter 2au3r2 au2s1ah ausan8ne. au2sau 2ausc au4schm au4scho 1ausd aus3erp au4s3erw 3ausf 1ausg 1ausl au2so au2spr 1ausr aus3s2 3aussag aus4se. 2auste aus5tri 1ausü 1ausz 2aut. au2t1äu 2aute au4ten4g au4t3erh 1auto 2auts4 2auu 2auw 2aux 2auz auz2w 2a1ü 2a1v a3v4a ava3t4 4avi a2vr 2a1w awi3e a1x ax4am ax2e 2a1ya a1yeu ays4 aysi1 ay3t 2a1z az2a az2o az2u ä1a ä1b ä2b3l äb2s ä1che äche1e ä1chi äch3l ä2chr äch2sp äch4st ä1chu ä1ck äck2e ä1d ä2da ä2d1ia ä2dr äd2s 2ä1e äf2fl äf3l äf3r äf2s äft4s3 ä1g äge1i äge3s ä2g3l äg2n ä2g3r äg4ra äg3str 1ä2gy äh1a 2ä3he ä3hi ähl1a ähl2e äh4l3e4be 2ähm äh3ne äh3ri 2ähs 2äh3t4 ä1hu äh1w ä1im ä1is. ä3isch. ä1isk ä1j ä1k ä2k3l ä2k3r ä1la älbe2 äl2bl 2äle äl2l1a äl2p3 äl4schl ä1lu ämi3en 2äml äm2s ämt2e 2än. än5de än2dr 2äne äne2n1 äne1s än2f5 2änge än2gl än2gr äng3se 2ä3ni änk2e än2k3l än2kr änk2s än3n4e2 2äns än2s1c änse3h ä1on ä1pa äp2pl äp2pr äp2s1c äp4st 1äq ä2r3a2 är4af är1ä är1c 4äre ä2r1ei äre2n ä2r1ene är2gr är1int är2k3l ärk2s är4ment ärm2s är1o2 ä1rö ärse2 är4si är2st ärt4e är2th ärt2s3 ä2rü 1ärz är2zw ä5s4e äse3g2 äser4ei äse4ren äser2i äse3t äskop2 äskopf3 ä3s2kr ä2s1p äs6s1c äss2e äs4s3erk äs2st ä4s3t2 äs4tr ä3su ä1ß äß1erk ä4t1a2 ä3te ät2e1i ätein2 äte2n ät2h ät1ob ä2t3r ät2sa ät2sä ät4schl ät4schr ät2s1i äts3l ät2s1p ät2s3t ät4tr ät2zw äu2br äu1c äude3 äu3el ä2uf äuf2e 1äug äug3l 4äul 2äum äu2ma äum4s5 ä2un äun2e äu1nu 2äur 2ä3us. äu4schm äu3se ä3usg ä3usk ä3usn äu2sp äus2s1c 1äuß äu2tr 4ä1v 1äx ä1z â1t á1n ba2bl 2babs bach7t4e backs4 b1a2dr 2b1af 3bah bah2nu bais2 ba2ka ba2k1er ba2k1i bak1l bak1r ba2kra 3bal bal2a bal4l3eh bal6lerg bal3th 2b1am ban2a 3b2and ban2dr ba3n2e b1ang ban2k1a ban4kl ban2kr 2banl 2b1ans ban3t b1anz bar3b bar3de ba2rei bar2en bar3n bar3zw 3bas ba3s2a ba2sc ba2st bau3g bau1s bau3s2k bau3sp ba1yo 3b2ä1c b2är b2äs 4b1b b3be bben3 bbens2 bbe4p bb3ler bb2lö bbru2c bb2s bbu1 2b1c 2b3d4 bde1s 3be. 3bea be3an be3ar be3as 3beb b2ebe 1be1c be2del bedi4 be1eh be2erk be1erl be1eta 3bef4 be3g2 2b1eier bei1f4 bei4ge. beik4 beil2 bei3la 2b1eime b2ein be1ind be1in2h bei3sc beis2e bei1st beit2s 3bek 3bel be3las be3lec be3lei be2l1en be2let be3li bel3la bel3sz bel3t4 1bem 1ben. ben3ar ben3dor be3nei 3ben3g be3n2i ben3n ben2se ben4spa ben4spr benst4 ben2su 2bentb b2enti ben5t4r b1ents 2bentw ben3un ben3z2 be1o be1ra ber3am be2ran ber4ei. be4r3eiw be4rerk bere4s ber6gan. ber4in. ber3iss ber3na b1ernt be2rob be3rop ber3st4a be3rum 3be1s bes2a be2s1er be3slo bes2po bess4e b3esst. bes3sz be6stein be4s3tol be3s4ze 3bet be2tap be3tha be1ur 3b2ew 2b1ex 1bez 2b5f4 bfal2 2b1g2 bge3 bges4 2b5h2 bhut2 1bi bi3ak bib2 bibe2 bien3s bie2s bik2a bi2ke. bi2kes 3bil bil2a bi2lau 4b1illu bi2lu 2b1inb bin2e 2b1inf bin3gl 2b1int bi2o1 bio3d bi3on biri1 bi3se b1iso bi2sol bi2sp bis2s1c bi2s5t b2it. b2it2a b2ite bi2tu b2i3tus biz2 4b1j bjek4to 2b1k4 bl2 2bl. bla3b4 b3lad b2lanc 3blat b2latt 2b3law b2le 3ble2a b3leb 2b3leg 2b3leid b3lein 3blem 3ble4n b3lese ble3sz b4let b3leu 2blich 3blick b2lie 2blig bling4 b4lis b2lit 3blitz b2lo b4loc b3los 2blun 3blut 3blü 2b1m 4b3n2 bni2 bnis1 bo4a bo5as b1ob3 bo2bl bo2br bo2c bo3ch2 bo3d2 boe1 bo2e3i 2b1of bo3fe bo1is bo2l1an 3bon. bond1 bon2de bo2ne 3bons b1op bo1r2a bo4rä bor2d1i bor2d3r bo2rei bo4rig bor2s b1ort bor2t3r bo2sc bo4s3p bote3n4e bo3th bot2st bo2xi bö2b3 2böf b1öl 2b1p2 bpa2g 2b1q b2r4 2br. b4ra. 2b3rad b4rah b4ra3k bra1st4 3brä brä4u 2bre. 3brea 6b5rechte 2b3ref 2breg b3reif 3brem 2b3rep b4rer 2b3riem bri2er 2brig b4rio b3roh 2b3rol b4ron b4ruc bru4s brust3 bru2th 3brü 4b1s b2s1ad b3sand bs3ar bsat2 b3sä b4sär bs2äu b5sc bs2ca b6schan b6schef bs4cu b3se. bse2b b3sel. bse2n1 b4s1erf bs3e4r3in b4s1ers b3s2es bsi4t bs2ku b4sl b2s1of bso2r b2sö bs2pl b3s2pu bss2 bs2t bst1a2b bst3ac bst1ak bs3tät bst3er b2stip b3sto b4stod b3stö b4strac b2s3trä bs3treu bs4tri b3stü b4stüb b2s1un 4b3t btal3 btast3r b5te b4th btil4 bt4r b4ts2 btü1 bu2chi bu2e3 bu2f bu3li bul2la 2b3umk bung4 b2urg bu3r4i bu2sa bu4s3cha bu4schl bu4schm bu4schw bus1er bu2sin bu2s1p bu2s1u bü1c bügel3e 2b1v 2b1w 3by1 by3p bys2 2b1z2 bzeit1 1ca 2c1ab ca2ch ca2e3 ca3g4 ca1h cal3t 3cam c4an ca2pe 3car car3n carri1 ca3s2a3 cas3t ca3t4h ca1y2 cä3 cäs2 2cc c1ce c1ch2 c2d2 c3do 2cec ceco4 ce2dr 2cef ce1i 2cek 1cen cen3g 1cer cere3 ce3sh 1cet 2ceta ce1u 1cé 2c1f c4h 4ch. 2chab ch3a2bi cha2ck 2chaf 2ch1ak ch2anb 3chanc ch1ang ch3anst 4chanz 1chao 4char. 1chara 3charta cha2sc 3chato 4chatu ch1ärm ch1äs 1châ 2chb 2chc 2chd ch3e4ben 1chef 3chef. che4fer 3chefi 3chefs 4chei ch1eim 4chelem che4ler 4chents 4chentw cher3a che3rei 6chergeb cher6zie ch1ess 2cheta 2ch1e4x 1ché 2chf 2chg 2chh 1ch1ia 2chic chi3na 4chind 3chines 2chinf 2chinh ch1ins ch1int 2ch1inv 1chiru 2chj 2chk 2chl2 ch2le ch3lein ch2lu 4ch2m 2chn4 chner8ei. 2chob cho2f ch1off ch1oh ch1orc 2chp ch2r4 4chre chre3s ch3rh 1chron 4chs ch4sper 2cht 2chuf 2chuh 2chum 2ch1unf 2chunt 4chü 2chv 4chw 1chy 2chz ci1c ci2s c1j c4k 4ck. ck1a 1cka. 2ckac 1ckag 2ckal 2ck3an cka4r1 2ckau ck1ä 2ckb 2ckc 2ckd 1cke 4ckeff 2ckeh ck1ehe 4ck1ei 4ckense ck1ent 4ckentw cke2ra ck2ere 6ckergeb ck1erh 4ckerhö 4ckerke ck2ern 2ckero 2ck1err 4ckerze 2ck1ese 2ckex 2ckf 2ckg 2ckh 1cki 2ck1id ck1im ck1in 3ckis 2ckk 2ck3l 2ckm 2ck3n ck1o2 2ckp 2ck3r 4cks ck4stro 2ckt ckt2e 1cku 2ck1um3 2ckunt 2ck1up 2ckv 2ckw 1cky 2ckz c4l2 clet4 clo1 1clu c2m2 3co co2c co3ch co2d2 co3di coff4 coi2 co1it co2ke co2le col2o com4te. comtes4 con2ne co2pe co1ra cor3d co3re cos3t co4te cô4 2cp 2c1q 1c4r2 cre2 cre4mes cry2 2cs cs2a c2si c1s4tr 4c1t cte3e cti2 cti4o ctur6 3cu cu2p3 cussi4 1cy 2c1z 3da. da1a 2d1ab 3d2abä da2ben 3d2abl da2bre dab4rü 2d1ac d2ac. dach3a da2cho dach1s 4d3achse d1af d1ag dagi2 dah3l da1ho 3d4ai da1in da1is dal2a 2d1alar dal3b2 da3lö d1alt d1amma 2d1ammä damo3 d4amp dampf8erf 2d1amt d2an. 2d1ana dan4ce. 2d1an3d2 d1ang 2dange dan4kl dan5kla dan2k1o dan2kr 2d1ans 2dantw 2danw d2anz. 4danzi 2d1ap d2aph 4dapp da2r3a 2darb2 3d2arl dar2ma dar2m1i da2ro d3arr d2ar3s d1art da2ru d2arw da1s da3s2h das4t dat2a dat4e2 da3tei date4n 4d3atl 4datm dau3e 2d1au2f 2dauk 2d1aus3 4daush 2d1äh 2d1ämt 2d1änd 2d1äng 2d1äp 2därz dä2u dä3us 2d1b4 dbu2c 2dc d1ch dco4r 2d1d2 ddar2 d3dh d5do 1de de2ad de3as de3a2t de3b4 2d1e4ben 3de1c de4ca. de2cka de1e4 2d1eff deg2 de3gl dehe2 de3ho 2d1ehr d1ei d2eic 3d2e1im dein2d dein2s de2l1a4g de4l3aug del1än del1ec delei4g de3lein 2delek 2delem 2delfm delle2 del4leb del4lei de2l1ob de2lop de3lor de2lö del4san del5sc del2s5e del2so del2s1p del5ster del3t4 dem2ar 2d1emp d2en. dend2 de4n3end 4denerg den3g d2enh de2ni den4k3li 4den4sem den4sen den6s5tau den3th 2dentw de1nu de1on depi2 d4er. dera2b de1rad de2rap der2bl 2derdb de2re2b de4reck der3edi de4r3eis derer3 de3r4erb de3r4erf de4r3ero derer4t 4d3erhöh 3derie derin4f 4derklä der3m2 4derneu 4d3ersat der3tau der6t5en6d dert4ra de3ru de4ruh de4rum des1 de2sa de3sac desa4g de4sam des2äc de2seb de4seh de2sei des3elt de2sen1 de4set de2sin de2sor de2sp des3s2 dest5alt de2sto dest5rat de4stre des4tum de2su det2 deten4t 2d1etw de1un de1url de3us de2xis 2dexp 2d1f4 2d1g2 dga2 d2ge. dge4t1e d3gl 2d1h2 dha1s4 d2his 1di di4ab di2ad di4am 3dic di1ce di2e di3e2d die4neb di3eni di3ens. die2s3c diet3 die2th dige4s dik2a dil2s5 2d1imb din2a 2d1ind 2d1inf 2d1inh 2d1in1it 4d3inner 2d1ins 2d1int di2ob dion3s di1p di4re. di2ren di2ris 2d1irl di2sp di3s4per 2d1isr dist2 di2s5te di2ta di4teng di4t3erl di4t3erm di4t3ers di2th di4t3r dit3s di2tu di5v di3z2 2d1j 2d1k4 4d1l2 d3la d3le dle2ra dli2f dl3m dl3s 2d3m2 4d5n2 dni2 dnis1 d1ob d2oba 2dobe dob4l d2obr 2d1o2f dole4 doll2 do2mar do5n4a doni1e do2o 2dope 2d1opf d2opp d2o3r4a 2dorc 2dord dor2f1a dor2fä dor2fl dor2fr 2d1org do2rie d2orp 2dort dor2ta d2os. dos3s dost1 dot6h do3un d1ö dö2l1 3d2ör dö2s1c 2d3p2 2d1q d2r4 3d4ra. 2d3rad drag4 2drahm d3rai 3d4ram d3rand 2d3rast 2d3rauc 2dräd d4räh 2d3rät 2d3räu 4dre. d4rea. d4reas 3d4reck 2dreg 3d4reh 2d3reic d4reiv 4drem 4d3ren 2d3rep 4d3rer 4dres. d4resc 2d3rh d3ri d4ri. 3d4ria 2d5ric d4rid d4rie d5rieg d4rif d4rik d4ril d4rin. 3d4risc 3d4rit 4dritu d3rob d3roc 2d3rod d4roi 2d3rot d3rou 2d3rov d3rö drö2s1 d5rub 3d4ruc 2d3ruh drunge3 2d5rut drü1b drü5cke 2d1s d4s1amt d2san ds3assi d2sau2 ds1än 4dsb d4schin d2s1e2b d2s1ef d3sei ds2eig d4seins d2s1eng d2s1ent d2s1erf d2serh d2s1erk ds1err d2s1erz dse4t d4s1eta d3s2ha d3sho d2s1im ds2inf d3s2kan d3skul 4dsl d2s1op dso2r ds1ori d2sö d2s1par ds1pas d2spä ds2po d2spro ds2pu dss4 dst4 ds3tab d4stag d4s3täti d2ste d4stea d3stei d3stell d4stem d3s4tern ds2ti ds4til ds4tip ds2tu ds1ums d2sun ds2zen 2d1t dta2d d5tea d2th d4thei dt3ho dto2 d3tö dt3r dtran2 dt5s2 d3tü 1du du1alv du1ar dub3l du2bli du2f 2d1ufe 2d1uh du1i 2d1umb 2dumd 2d1u2m1e 2dumf 2dumg 2d3umk 2duml d2ump 2dumr d1ums d2ums. 2d1umv 2d1un3d dund2a 2d1unf dung4 dun3ke dun2kl 2dunr dun2s 2dunt du1o dur2 2d1url 2dursa du4schn du4schr du4schw dus3t 2düb 3düf 3dün 2d1v2 2d1w dwa2 dwest3 dy2s 2d1z 2e1a e3a2b eab3l ea2c ea3der eadli4 ea2dr ea2g4 ea3ga ea4ge ea3gl eak1 eakt2 ea2la e3alei e4aler. e2alti2 eam3 eam1o ea2na e2ano e3ar. ea2ra e3a4rene e3arr e3arv e2as eas3s eat4e2 eater1 e3ath ea5tr eat3s2 e3at5t4 e3au2f e3aug eau1st e1ä2 e1b 2eba e3b2ak 2ebed ebe2i 2ebel eb2en ebens3e eben4sp ebert4 2ebet 2ebl eb3ler eb4leu e3blie eb3lo eb2lö 2eb2o ebot2 ebö2s 2ebr eb3rei eb4ru eb2s1 eb6sche ebse2 ebs3pa eb3sta eb4stät ebs3tem ebs3t2h eb3str e3bu ebu2t1 2e3ca e1ce ech1ä 2e3che ech1ei e6ch5erzi e1chi ech3l ech3m ech3n e2cho. ech1o2b e2ch3r ech3ta ech3t4ei e1chu ech1uh ech1w e1ci eci6a eck3se 2eckt 2e1cl 2eco eco3d e3cr ec1s4 2ect e1d e3d2a ed2dr ed2e ede2al e3dei ede3n2e edens1 eden4se eden4sp ede2r eder3t2 edi4al e3d2o ed2ö eds2ä ed2s1es ed2s1o ed2s1p ed2s3tr ed2su edu2s e3dy3 4ee ee3a2 eeb2l ee2ce ee1ch ee2cho ee2ck eede3 eed3s2 ee1e e1eff eef4l eef3s eeg2 e1ei ee1im eein4se eel2e ee2lek ee3len e1emp e1en eena2 ee4nag e2enä e2enc ee3ni e2eno een3s e1e2pi ee1ra e1erbt e1erd ee3r2e ee4r3en4g eere2s ee4ret e1erk ee1rö eer2ös eert2 e1ertr ee3r2u e1erz ee3s2 ees3k ee3ta ee4tat ee2th ee1u2 eewa4r e1e2x e1f 2ef. 2efa e2f1ad ef1ana ef1ar e2fat e2fäu 2efe e3fe. e2f1e2b ef1em e2fent ef2er 2eff. 1effi ef2fl 2efi e2f1i2d e2f1ins efi2s 1efku 2efl e3f4lu 2e3f2o e3fra ef3rea ef3rol ef3rom ef4rü efs2 ef3so ef3sp ef2tan 2efu e2fum 2efü e1g egas3 egd4 e3ge ege4n3a4 ege2ra ege4str ege1u e2glo e2gn eg3ni eg4sal eg4se4r1 eg4sto eg2th 2egu egung4 egus3 2e1ha eh1ach e3h2al eh2aus 2e1hä e1he eh2ec eh1eff eh2el ehen6t3 1e2hep e3her ehe1ra ehe3str e1hi eh1int eh1lam eh1lä ehle2 ehl3ein eh4lent eh5l2er eh2lin eh3lo ehl2se 2ehm eh3mu e1ho e3hol ehr1a2 ehr1ä ehr1e2c eh2rei ehr3erl ehr6erle ehre3s eh3ri eh1ro2 ehr1ob ehr1of ehs2 eh3sh eh1ste 2eht e1hu e2hunt e1hü eh3üb eh1w e1hy 2ei3a2 4eib ei2bar ei2bl eibu4t ei4b3ute ei2cho e2id ei2d1a ei3dan ei3de ei4d3err 2eidn ei3dra ei1e 4eien3 eienge4 1eifr ei3g2a 4eigeno eig2er 2eigew ei3gl 1ei2g3n 2eigru 2eigt 2eigu eik2ar ei3kau eik4la e4il 2eil. ei2lar ei2lau 2eilb eil3d ei4lein eilen1 eil3f4 eil3ins 2eiln 1eilzu ei2m1a4g eim3all ei2mor e1imp eim2pl ei2n1a ei4nas ei4nä ein3dr 2eindu ei4neng ei2neu 2einfo ein4fo. ein4fos ein3g2 ein4hab e1init ein3k ein6karn 3einkom ei2n1o2 3einsat ein6stal ein4sz e4inver ei3o2 ei1p eip2f 2eir ei3re e1irr e2is. ei2sa4 ei6schwu ei4s3erw eis2pe eis4th ei1sto ei2sum e2it ei2tab ei2tan ei2tar 2eitä ei3te ei2th ei2tro eitt4 eit3um 2eiu 2e1j e1k ek2a 1ekd e3ke. e3ken e3kes e3key e3k2l ek4n ek2o ek4r ek1s4t 2ekt ekt4ant ekt3erf ekt3erg ek4t3erz ekt2o ek5tri ek2u e3k2w e1la ela4ben el3abi el2abt ela2c el1af ela2h e2l1ak e2l3a2m el4ami el4amp el1ans el1anz 2elao e2l1ap e2l1a2r el3ari ela4s el1asi el1asp el2ast 2e1lä 3elbis el2da eld5erst el4d3erw eld3s2 2ele. elea2 ele2c 2eleh 2elei e6l5eier. e2l1ein e3leine e4leing 1elek e2l1el 1e2lem e3lem. el1emp 2e3len. e4lense e2l1ent e3lep el1erd el1erf e4ler4fa e2l1erg el1erk el1erl e4ler4la e4l3ernä e2l1err 2eles2 el1ess e4l1e4ta e3leu 2elev ele2x 1elf. el3fe elf4l 1elfm 1elft elgi5er. elgi5ers 2eli e2l1id e3lie eli2ne el1ita el3kl el3lan el3le el5le. ell3ebe el4l3ein ell3eis el3lin ell3sp elm2a 2eln el5na 2elo e2lof e2lol elon2 el1ope e2l1or elo2ri el2öf elö2s el2sum elte2k elt3eng 3eltern elto2 el2t3r elt3s2k elt3s2p 2e1lu e2l1um el1ur el3use e1lü e2lya 2elz elz2e el2zwa e1m 2ema e2m1ad ema2k e2m3anf e2m1ans 3emanz em2d3a2 e3m2en emen4t3h e6mentsp e2m1erw eme2s 1e2meti e2m1im emi5na em1int emi3ti 2emm emma3u em2mei e2mop 3empf em3pfl em2sa em2spr em2st em3t2 1emul 2emü e2n1a 4ena. 2enac e3nad e4naf 4enah e4nak ena3l2i 4enam en4ame e4nand en3ang en3are en2asc 4enat en3att e3naue e2n1är en1äu en4ce. en3d2ac en2dal en4d3ess end4ort end3rom end3si end3s2p end3sz end2um 2ene. ene4ben en1e2c e2neff e4nein e2n1el ene4le 2enem 2enen e4n1ent en4entr 4e3ner. e2n1erd e2nerf 1e2nerg e4nerh e4nerk e2n1erl e4n3ermo 4enern e2n1err e2n1ers e2n1ert e2n3eru e2n1erw e4nerz 2enes e4n3ess en3f enf2a enf2u 1engad 3engag enge3ra en3g2i en2gl en3glo 1engp eng3se e3ni. e3nic e2nid e3nie eni3er. eni5ers. e2n1i4m e2n1in e3nio 2enis e3nit 2eniv en3k2ü e2n1o2b enob4le e2nof en1oh e3nol eno2ma en1on e2n1op e2n1o2r enost3 e3not eno2w 2e1nö en1ö2d en3sac en2sau en5sch4e en2seb ens2el 1ensem ensen1 en3ska en3sp ens2po enst5alt en4s3tät 2ensto e4nt ent4ag 1entd en2teb en4terb 1entfa 3entga en2thi 3entla 1entn en4t3rol 3entspr 2entü 1entw 4entwet 1entz en1u 2enut e1nü enü1st 4enwü e1ny en4z3erf en4z3erg en4z3erk enz3ert e1ñ 2eo e1o2b1 e1of eo2fe e1oh e4ol e1on. e1ond e1onf e1onh e1onl e1onr e1ons e1ope e1opf eop4t e1or e3or. e3orb e3ors e3orw eo1s2 e3os. eo3ul e1ov e1ö2 e1p e3pa epa2g e3p2f4 1episo ep3le 1e2poc ep2pa ep4pl ep2pr ept2a ep2tal e3pu epu2s e1q er1a e3ra. e3rad. er3adm eraf4a era1fr era2g e1rai er3aic e2rak e1ral er3all eran3d e3rane er3anf e2ranh er3anm e1rap er3apf e2rar e3rari e1ras e2r3a4si era2ß e2rath e3rati e2ratm e1raub er3aue erau2f er3aug e1raw e1raz e1rä er1äh er1äm erb2e er3br erb4sp er1c er3chl er3da 1erdb er3de 2erdec erd3erw 4ere. er1eb e3rech er3echs er1e2ck ere4dit er1eff e2r1e2h 4e3rei. er1eig e2rein e4r3eis. ere2l er1ele 2e3rem 2eren 4e3ren. e3rena e4rense e4r3entf e4rentn e3renz eren8z7end 2erer 4erer. e2r3erf e2r1erh e4rerl 4erern e3rero er1err er1ers e2rert er1erw 2eres er1ess ere4t er3eti er1eul ere4vid erf2e erf4r 4erfür 3ergebn 4ergehä erg3el4s3 1ergol erg3s ergs4t 1erhab er3he er3hu 2erhü 2eri e2riat e3rib 4e3ric 4e3rie eri3e4n3 e3ri3k4 4e3rin. er1inb e2r1ini er1ink er1int e3rio er1ita 2erk. 1erklä 2erkre erk3t 3erlebn ermen4s erm3ers ern1os e1ro e3ro. er3oa er1o2b er1of er1oh e3ron er3ony e2r1o2p e4ro2r e3ros e3row er1ö erö2d 2erök er3p4 er3rä 2errü ers2a er3se ers2i er3sk er3smo er3sn er3sp er3sz ert2ak er6terei er4t3erf er4ters er2tho 4erti ert3ins erts2e 2eru eruf4s er1u4m er1und erung4 er1uns er3uz erü4b 3erweck 6erweis es3ab es2ach es3ak es3anz e3s2as e4s3ato 2esb es2c es3cap e3sce esch2 e3scha e2s3ein es2el ese4ler es3eva 2esf 4esh es2har es2hu es2id esi1er e2sil es3int es2ir es2kat e4ske es3kl es3ku e4sky es3l es4log 2esm es2ort e3sot es2ö 2esp e3s2pek e3spi e3s2por e3s4pra 2esr es2sau es3sc es3se 4essem ess4e3re ess3erg 2esso es2sof es2s1pa es2spu es3str es3stu estab4b est1ak e1star e4starb 1e2stas e1stat e1s2tec e3stel es4t3eng es4t3erh es4t3ess e1stil e2stip estmo6de est3ori e1str es4tri es3trop e1stu es4tü e2s1um es3ums es3w e3sy es3z e1ß eße3r2e e1t etab4 et1am 3etap et4at et1äh e3te e4tein et2en eten3d2 ete2o eter4hö eter4tr et2h et3hal et3hü e3ti eti2m eti2ta 2e3to eto2b e4t1of etons4 e3tö 2etr e4traum e6t3rec e2tres et4rig etsch3w ets2p et3su ett1a et2tab et2t3au et2tei ette4n1 et2th et2t3r et4tro ett3sz et4t1um e3tü etwa4r 2etz et2zä et4z3ent etze4s et2zw eu1a2 eu3erei eue6reif eu2esc eu2ga eu4gent eu3g2er eu4gla eugs4 euil4 eu1in 1euk eu2kä e1um e3um. e3umb e3uml e3um2s eum4sc eums1p eum3st 2eun eun2e eu4nei e3un2g eu2nio eun3ka eu1o2 eu1p eur2e 3eu3ro eu3sp eust4 eu1sta eu1sto eu1str 2eut eut2h eut6schn 2eux eu2zw e3ü 2e1v e2vela e2vent 4ever eve5r2i e3vo e1w 2ewa e3wä ewä2s 2ewe e2we. ewinde3 e3wir ewi2s e3wit ew2s 2ex. ex3at 1e2xem ex1er e1xi e2x1in 1exis ex3l 3exp 2ext. ex2tin ex2tu 2exu 2e3xy ey1 ey4n eys4 e1z e3z2a e2z1enn e3zi ezi2s ez2w é1b é1c é1g égi2 é1h é1l élu2 é1o é1p é1r é1s é1t2 é1u2 é1v é1z2 è1c è1m è1n è1r ê1p ê4t 1fa fab4 f1abe fa2ben fab5s 3fac fa4cheb facher5f fa2ch1i fa2cho f1ader fa2dr f4ah faib4 fa2ke f2al fa3l2a fal2kl fal6l5erk fal6scha fal6schm fal3te falt2s 2fanb 2fanf fan2gr 2f1ank 2fanl f1anp 2fanr fan3s 2fanw f1an3z 2f1ap f2ar far2br 2f3arc 3fari farr3s 3f4art 2f3arz fa3s4a fa3sh f3at fa2to3 2f1auf f3aug f1ausb 3f4av fa2xa 1fä fä1c fäh2r1u 2f1ärm fä2ßer f1äu 2f1b2 2f1c 2f3d4 fdie2 1fe featu4 fe2c f2ech 2f1eck fe2dr fe2ei fe1em fef4l feh4lei f4eie 2f1eing 4f1einh fe1ini 2f1einw f1eis fek2ta fe2l1a fel2dr 2fe2lek fe2l1er fe2les fe2l1o fel4soh fel3t f2em. fem4m 2femp fe2nä fen3g fe2no fen3sa f1ent f2er. fe1ra fer2an fe4rang fe4r3anz fe2rau ferde3 f2ere fer2er fer3erz f1erfa f2erl. 4ferneu f4erpa f2ers. f2ert f1erw fe2st fest1a fest3ei 2f1eta fe4tag 3fete fet2t3a feuer3e feu4ru 3few f1ex 2fexp 3fez 1fé 2f1f ff3ar ff1au ff2e ffe2e f2f3ef ff3ei ffe1in ffe2m f2f3emi ff4en f2fex fff4 ff3l ff4la ff4lä ff4lo f3flu f3flü f3f4rä ff3ro ff3rö ff2s ff3sho ffs3t ffs4tr 4f3g2 fge3s 2f1h2 1fi 3fi. fi3at fid2 fien3 fi1er2f fi2kin fi3kl fik1o2 fi2kob fi2kr fi2l1an fil4auf fil3d fi2les filg4 fi3li fi4lin fil2ip f2ina fi3ni fin2s fin3sp 2f1int fi2o fi3ol fi2r fi3ra 3fis fis2a fisch3o fis2p fi2s5t fit1o2 fi2tor fi3tu 3fiz 2f1j 4f1k4 f2l2 2fl. f3lad f3lap 1flä 3f4läc 2f5läd f3län 2f3läu 2f3leb f4lee 2f3lein f3ler f4lé f3li. 3f6lim fli4ne 2f5lon 1f4lop 1f4lot flo2w f3lö f4luc 1f4lug flu4ger f4lü 2f3m2 fma2d 2f3n2 fni2s 1fo fob2l 2f1of foli3 fo2na fon2e fo2nu 2f1op fo1ra 4f3org fo3rin 3form for4m3a4g forni7er. for4st for4tei for2th for2t3r for3tu 2f1o2x 1fö 2föf 2f1ök 2f1öl för2s 4f1p2 2f1q f2r2 f4rac frach6tr f5rad fra4m f3rand f5rap 1f4rän 2fre. f3rec f3red 2freg freik2 frein4 f3rep f4reu 2f3ric fri3d fri2e 2frig 1fris f4risc f3roc 1f4ron fro2na fro2s f3rot f3ru f3rü 4f1s fs1all fs4amm f2san fs3ar f2s1as f2sauf f2saus f2saut f3sc f4sce f4schan f4schef fs4co fs1e2b f4s1ehr f2s1em f2s1ent f2s1er fse4t f4s1eta f3si f2si2d f3s2kie f2s1o2 f3span f2s1pas fs1pen f2sph f3s2pl f3s2por fs1pr f2spre fs2pri f2spro fs2pru fs3s4 fs2t f2stas f4s3täti f4stech f3stei f3s4tel f3stern fs3th f2stip f3st4r f4s3tres f4s3tüte f2s1un f2sü f3sy 4f1t f4ta. f2tab ft1a2be ft1af f2t1al ft1an ft1ar f3tat ft1e2h ft1eig ft1eis f4t1ent f4t1e4ti f2th f4thei ft3ho ft1op f3tö f2t3ro f2t3rö f3t4ru ft2s1 ftsa4 ft4sam ft3s2c ft4sche ftse4 ft4seh fts3el ft3st ft4s3tan ft4s3tä fts2ti ft4stri f2tum ft1url f3tü ftwa4 ft3z2 1fu 3fug 3f2uh f1um 2f1unf fung4 2f1u2ni fun2kl fun2ko fun2k3r 2f1unm 2funt f2ur fu4re. fus2sa fus2s1p fus2st fu2ß1er 3fut 1fü 2füb fü2r 2f1v 2f1w 1fy 2f1z fz2a fzeiten6 fzei8tend fz2ö fzu3 fzu4ga 3ga. 2gabf ga2b5l gab4r 2gabz ga1c 2gadl 2ga2dr ga1fl ga3ge 5gai ga1k ga2ka gal2a g4amo 2g1amt 2ganb gan3d gan2g1a 4gangeb gan2gr 2ganh 2g3anku 2ganl g3anla 3g2ano 2ganw ga1ny 2garb 2garc 3gard 2g1arm ga3r2o g1arti ga3ru 2g1arz ga2sa gas3ei ga2si ga2sor ga3sp ga4spe ga4spr gas3s gas4ta gas5tan ga4ste gas4t3el gat2a 2gatm gat4r gau1c 2g1auf g2auk g1aus 2g1aut 2g1äp 2gärz gäs5 gä4u 2g1b2 gber2 gbi2 gby4t 2g1c 2gd g1da g2d1au g2d1er gd1in g1do g1dö g1d3r gd3s2 gdt4 gd1u 1ge ge3a2 geb2a gebe4am geb4r ge1c ged4 ge1e2 ge3ec ge2es gef4 ge3g2l ge1im ge2in. gein2s ge2int gein2v ge1ir ge2is 2g1eise2 gei3sh 2gek. g2el ge4lanz gelb1r gel4b3ra gel6ders ge3le ge4l3ers ge4less gell2a ge3lor gels2t gel3ste gel3sz gel3t2a ge3lum ge3lü gelz2 ge3mi gem2u 3gen ge3na ge4nam ge4nar gen4aug gen2d1r gen1eb ge3nec gen3eid gen3ern gen3g gen3n gen4sam gen3sz 2g1entf gen3th 4gentw geo2r ge1ou ge3p4 ge1ra ge2rab 4g3ereig ge4reng ge4ren4s ge4r3ent ger2er gerin4f ger4inn gerin4t germ4 ger3no ge1r2ö ger4sto ge3r2u g1erwa g2e1s2 ges3auf ge3sc ges3elt ge2s3er ge3si ges4pi ges3s2t gest2 ge3ste ge4s3ter ges3th ge3t2a 2getap ge5tr ge3t4u ge1ul ge1ur 2g1ex 2g1f4 4g1g gga4t g3ge gge2ne g2g3l gg4lo g2g3n gg4r 2g1h 4gh. gh2e 3g2het 3g2hie gh1l 3gh2r g2hu gh1w gi3alo gie3g gi2e1i gi2el gien2e1 gie1st gi2gu gi2me. gi4mes gi2met 2g1ind gi3ne gin2ga 2g1ins 2g3isel gi3t2a gi3tu gi4us 2g1j 4g3k2 4gl. g1lab g1lac 3glad g2lade 2g1lag 3glanz 3g2laub 2g1lauf 3glät 2gläuf g2l4e 2gle. 3glea 2g3leb g3lec g3leg 2gleh 4g3lein glei4t5r g3len 4g5ler 2gles g3lese g4lia 2glib 3g2lid 3g2lie 2glif g2lik 4glin g2lio 2glis 4g3lisc 3g2lit g2liz 3g2loa 3g2lob g3loch glo3g 3g4lok g2lom 3g2lop 3g2lot 2gls 2g1lu glu2t 3glü g2ly 2g1m2 g1n 2gn. g2n2a g4na. 2gnac g4nat 3g2nä gn2e g3neh gne2tr 2gneu 2gng g2nie g2nif g4nin 2gni2s1 3g2no gno1r 4g3not 2gnp 2gns 2gnt 2gnu 3g2num. g2nü g2ny 2gnz go4a goa3li 2g1of 2gog 2g1oh go1i gol2a 2gonis 2g1ope 2g1opf g2o1ra 2gord 2gorg go2s1 go3st go3th got6t5erg go1y 2g1p2 2g1q g2r4 gra2bi gra2bl 2gradl 2g3rah 2g3rak grammen6 gram8m7end 2g3räu 2g5re. g4reb 2g3rec 2g3rede g4re2e 2g3reic 2g3rein g3reit g4rem 2g3renn gren6z5ei g4rer g3ret g3rev 2g3ric gri2e g3riese 3grif 2grig 2g3ring 2groc 2groh gron4 g4ros gros6sel gro4u 2g3röh g4ruf 2g3rui 2g3rum 3g4rup 2grut 2g3rüc 3g4rün 4g2s1 gsa4g g3s2ah g4s3a2k g3sal g4salt gs3ama gs3an gs3ar gs3aug g3s2c g4sca g4s3ce gsch4 g4schef gs4chi g4sco g4s3cr gse2 gse3e gs2eh g3s2eil g3sel. gs3eli g3seln gsen1 gs3er gs5erk gse4t g4seta gsi2d g3sil g4sl gso2 gsp4 g3s2pek g3spi gs4pie g4spin gs3pl g3s2por gsrat4 gsrü2 gs5s4 gs3ta g3stan g3star g3s4tati g4s3tä g5stäm g3stel gst3ent gst3err g1steu gst2he g3stir g3sto gs3toc g4stol gs3top g4s3tor g3stö gs3tr gst4ra gs4trat gst4ri gs4t3ros g3stu g4stur gs3tü gs4tüc g4sw g3sy 2g1t g3te gti2m gt4r gt2s g3tü 1gu gu3am gu1an. gu1ant gu1as gu1c gu4d3r gu2e 2gued guet4 2g1u2f 2g1uh gu1ins gu1is 3gumm 2g1unf g2ung. gunge2 4gungew 2g1ungl g2un4s 2gunt2 2g1url gurt3s gu2s3a guschi5 gus4ser gus2sp gus2st gu4st gu2t gut1a gu4t3erh gut3h 2güb gür1 güs3 2g1v 2g1w 2g3z2 3haa hab2a hab2e h2abs ha2cho ha2del ha4din h1adle haf3f4l haft4s3p h1ah ha1kl 2h2al. halan4c ha2lau hal2ba hal4bei halb3r 2hale hal2la hal6lerf h1alp hal2st hal4t3r h1amt h2an. h2and hand3s h4ann 2hanr 2hant h1ap ha2pl ha2pr h4a3ra 2harb h2ard h1arm. har4me. har4mes har2th h1arti h2as 2ha3sa hasi1 hat5t2 hau3f4li 2h1aufm h1aukt hau2sa hau2sc hau4spa hau5stei hau6terk 2hauto hau2tr h1äff hä6s5chen häu2s1c hä3usp 2h3b2 hba2r3a 2h1c 2h3d4 hdan2 2hea he2ad he3be he4b1ei he2bl he3br he5ch2e he1cho h1echt he3cke hed2g he3di he2e3l hee4s he2fan he2fä he2f1ei hef3erm 2heff he4f3ing he2f3l he2fr he3fri he2fu he3gu h1eie h1eif h1eig he2im heim3p hei4mu heine2 h1eink 4heio he1ism he1ist heit4s3 h1eiw he2l3au hel1ec h3e2lek he3len hel3ers he3li hel4l3au hel4mei he3lo he4lof he2lö 3hemd he3mi 3hemm 4h1emp h2en. he4n3a4 he2nä hend2s he2n1e2b hen3end hen3erg he2net heng2 2heni he2no henst2 hen5tr h1ents 2h3entw hen3z 4he2o he3on he3op he3ph her3a2b he2ral 2herap he3ras herb4s he4reck 4hereig he4r3eis he2rel he4rerw h1er2fo h1erfü herg2 herin4f he6rin6nu herin4s herin8ter h1erke h3erlau 2herm he3ro he4r3o4b h1erö hert2 her3th her2zw he1sta he2s5tr he2tap heter2 he3th het2i he3t4s h2e2u heu3g he3x he1x4a he1y2 1hè 2h3f4 hfell1 hfel6ler hfi2s 2h3g2 hget4 2h1h2 2hi. 2hia hi2ac hi2ang hi1ce hich6ter 2hi3d h2ide h1i4di hi2e hi3ens hier1i hie4rin hiers2 hif3f4r hi2kr hi2l3a4 hil2fr hi2n h1indu hi3nel hin2en h1inf h1inh hi3n2i hin3n2 hi3no hin3s2 hin4t1a 2hio hi4on hi3or 2hip1 hi2ph hi2pi h2i2r hi3ra 2hi3re hi3ri hirn1 hir4ner hi3ro hir2s his2a hi2se hi2st hi1th hi3ti 2hiu h1j 2h1k4 2hl h4lac hla2n hl1anz h1las h1lat h1laut h3läd h1läs h1läu hlb4 hld4 h3leb hle3e h5len. hlen3g hl2enn h3ler hle2ra hl1erg h6l3ernä hle3run hl1erw h4lerz h3les h4lesi h3lex hlg4 h2lie h2lif hl1ind h2lip h2lis h3list h2lit hll2 hlm2 h2lo h3loc hl1of hl1op h4lor hlo2re h3losi hl2ö h3löc h2lös hl2san hl2ser hl3sku hl3slo hl3t2 h3luf h3luk h1lüf 2h1m h2mab h3mag h3man h3mar h4mäc h4mäh h4mäl h4mäu h3me. hme1e hme1in h3men hmen2s hme2ra h2mo h4mon h3mö hm3p4 hm2s hm3sa hms1p h2mu 2hn h2na hn1ad h3nam hn1an h2nä hn3d4 hn2e hn3eig hn3ein h2nel hne4n1 hne4pf hner3ei h3nerl h3nerz hn3ex h2nic h2nid h2nie hn1im hn1in h2nip hn3k4 h2nor hn3s2k hnts2 h1nu h2nuc h2nul hn1unf h3nunge ho2bl ho2ch3 ho2cka ho6ckerl hock3t 2hod hoe4 ho2ef ho4fa ho2f3r 2hoi hol1au 4holdy 3hole ho2l1ei hol3g4 4holo ho4lor 3hol3s h1o2ly 3holz hol6zene hom2e ho2mec ho2med h2on hono3 2hoo 2hop ho1ra hor3d h1org ho4sei ho3sl ho2sp ho4st 2hot. ho3th hotli4 2hot3s2 3hov 2ho2w1 h1o2x ho1y2 1h2ö hö2c hö3ck h4ör hö2s1 h3öst 2h3p2 h1q 2hr hr1ac hr3ad h1rai h1rane h3räu hr1c hr3d h2rec h3rech h3red h3ref h4rei. hrei4ba h3reic h4r3eig h3rel h3r2en h3rep hr2erg hr2erk h6rerleb hr2erm hr2erz h3re2s1 hre2t h2r1eta h3rev hrf2 hrg2 h2ri h3ric h4rick hri4e h3riesl h3rin h4rine h4rinh h4rist h2rob h3roh h3rol h4rome h4romi h4ron h2ror h3rou hrr4 hr2s1ac hr2s3an hr2sau hr3schl hr2s1en hr2ser hr4set hr4s1in hrs3k hr4s1of hr2su hr4sw hr2tab hr2tan hr2th hr2tor hrt3ri hr2tro hrt2sa hrt2se h3ruh hr1ums h3rü h4rüb h2ry hrz2 4hs h2s1ach h2san h2sau h4schan h2s1ec hse4ler h2s1erl h3s2ex h2s1ing h2s1of h2s1par h2sph hs2por h2sprä h2spro hss2 h1sta hst3alt hst2an h2s3tau h1stec h3stein h5stell h3s4terb hst2he h1s2ti h1sto h2stor h1s4tr hst3ran hst3ri h1stun h2s1u hs2ung 4h1t h2t1a h3t4akt. h3takts h3t2al h4t3alt h4t3a2m hta4n ht3ane h3tank ht2as h4t3ass h4tasy ht3a2t h2tär ht1e2c h2t1ef ht1eh hte2he h2teif h4teilz h2t1eim h2t1eis h4t3elit h2temp h4tentf h4t3ents ht3erfo ht3erfü h2t1erh ht5erken h4terkl h6terneu h4t3erre ht3ersc h6t5erspa ht3erst h6tersta ht6erste h2t1erz hte2s h4t1ese h4t1ess hte3sta h2t1eu h2t1ex h2th h4thei hthe3u h4tho h2t1in hto2 h2toly h2torg h3töp h4t3rak ht3rand h2t3rat ht6raume h4tref ht4ri h4t5rin h2t3rol h2t3ros ht3rö h4t1rös h2t3ru h2t3rü h4ts ht2so ht2sp ht3spri ht4stab hts2ti hts4tie ht4s3tur ht4s3tür htt4 htti2 h2t1urs h3tü ht3z2 hu2b1a hu2b3ei hu2b1en hu2b3l hu4b3r hu2bu hu1c hu2h1a hu2h1i huko3 huk3t4 hu2l3a hu2lä hu2l3ei hu4l3eng hu4lent hu2ler hu2let hu2l1in hu2lo hu3m2a h1ums hu2n h1una hung4s hu3ni1 h1up. h1ups 2hur hurg2 hu3sa hu2so hus4sa hus2sp hu2tab hu3t2h hu2ti hut2t hut4zen hut4z3er h2ü h4übs h3übu hühne4 hüs3 2h1v hvi2 hvil4 2hw h2wall hwe1c h1weib 3hyg 3hyp hy2pe. 2hy2t h1z hz2o hzug4 i1a 2ia. i4aa i2ab iab4l 2iac i2af iaf4l i4a3g2 i2ah i3ai i2aj i2ak i3ak. i3akt 2ial i5al. ia2l1a4 ia2lä ial3b ial3d i3alei i3alent i3alerf i3alerh ia4l3erm i3a2let i3a4lia ialk2 i3all ial3la ia2lor ial3t4 ia2lu ial3z2 i2am i4amo 2ian ia2nal i3and2 ian2e i3ann i2ano i3ant i3anz i2ap ia3p2f ia1q i3ar. ia2ra 2ias i2asc ia3sh i2asi i2a3sp ias3s iast4 i3at. i3a4ta i4ate i3at4h 1iatr i3ats i3au ia3un 2iav 2iä i1äm iär2 i1är. i1ärs i1ät. i1äta i1ät3s4 2i1b i2b1auf ib2bli ib1ei i2beig i2beis ibela2 ibe4n iben3a ibi2k i3bla i3ble ib2o i2bö i4brä ib3ren ib4ste i2bunk i2bunt ibu2s1 2ic ic1c ice1 ich1a ich1ä i1che ich1ei i1chi i2chin ich3l i3chlo ich3m i1cho i2ch3r ich4spe ich2t3r i1chu ich1w i1ci i3ck2e i1cl i1d id2ab4 i3dam id2an i2d1au 1i2dee i2dei idel2ä ide3so ide3sp 1i2dio idni3 i2dol 1idol. 2i2dr i3d2sc id2s1p idt4 i2dy ie3a4 ie2bä ie2bl ie2bre ieb4sto ieb4str ie1c ie2cho ie2ck ie2dr ie1e2 ie2f1ak ie2f1an ie2fau ief3f4 ie2f3l ie2fro ie4g5l ie3g4n ie2g3r ie3g4ra iegs3c i1ei i2e2l1a ie3las iel3au iel3d iel1ec ieler8geb i1ell ielo4b iel3sz iel3ta 2i1en i3en. i3ena iena2b ie4n3a4g i3e2nä i3end i2ene ien1eb ie3ner ien4erf ie4n3erg i3enf i3en3g ienge4f i3enh i3enj i3enk i3enm i3enn i3e2no i3enö i3enp i3enr ien2s ien3sc ien3s2e ien3si iens2k ienst5rä ien3sz ie1nu i3env i3enw i3enz ie1o2 ier3a2 ie2rap i2ere ie3r2er ie4rerf ie4r3erz ie3res i3ereu i4eri ierin3 ier3k2 i1ern i3ern. i2er5ni ie2rö ier4seh iers2t ier3sta ier3ste ier3te iesen3s4 ies2sp ies2s3t ie1sta ie3su ie2t1a ie4t3erh ie4t3ert ie2t3ho ie4t1o ie4t1ö4 ie2tri iet2se i1ett ieu2e ie1un i1ex 2if if1ar i2f3arm if4at if1au i2fec ife2i if2en ifens2 if1erg if1erh if2fl if3l i1f4la if4lä i1flü if3r if4ra i1frau i1fre if4rei if4rü if2s if3se if3sp if2ta ift3erk if2top if4t3ri ift3s2p ift3sz 2i1g iga3i i2g1ang ig1art iga1s4 i4gefar ige4na ig1erz i2g1im i2gl ig1lä ig4na i4gnä i3g4neu ig4no i3go ig4ra ig3rei ig4sal ig3sä ig4se ig3so ig3spr ig3stei ig4sto ig4stö ig3str ig4stre ig3stü igung4 2i1h i2h1am i2har i3he ihe1e ihe4n ih3m ih3n ih3r ihs2 i2h1um ih1w ii2 ii3a4 i1ie i3i4g i1im i1in i1i4s i2is. ii3t i1j 2i1k i2k1a4k ik1amt i2k1ano ik1anz i4kanze ik1art ik3att i2k1au i2kär 4ike i2k1ei ike2l1 i2k1e2r2e ik1erf iker6fah i2k1er2h i2ker2l i2k1eta i3ki. ik1in i2kind i2k3l i3kla i3k4lä i2kn ik3no ik2o3p4 iko3s i2köl i2k3ra ik3rä ik3re ik1s2 ik3so ik3sz ikt2e ikt3erk ikt3r ik2tre i2kun i3kus i1la i2l3ab il1a2d i2l1ak i2l3a2m il1ans il1asp il1au il4aufb il3aus i2laut i1lä1 6ilb il2c il2da il4d3en4t ild2er ild1o il2dor il2dr il1e2c ile2h il1ehe il1ein il1el i4lents i2l1erf i2l1erg i2l1err ilf2 il2f3l il2f3re ilf4s3 ilie4n ilig1a2 ili4gab i2l1ind i2l1ip i3lip. i3lips 2ill. il3l2a il3l2er il3l2i 2ills il2mak il4mang il2m3at il2mau il2min 2ilo i2l1or il3t2h i1lu2 i2lum ilung4 i3lus ilv4 il2z1ar ilz3erk 2im. i2manw i2m1arm im4at ima2tr imat5sc ima4tur i2meg i2mej i2mek i2mele i2melf i2m1erf i2m1erz i4mesh i2meti i2mew i2m1inf i2m1ins im2mei im4m3ent 1immo 2imo im1org 1impo imp4s im3pse 1impu im2st im3sta 2imt imt3s2 2imu in3a2c i4nack i2n1ad in2af in3am i3nap in2ara in2ars in4art ina4s i2n3au in1äs in2dal in2dan 1index in3do 2indr ind4ri in3drü 1indus 2ine i2n1e2be in1ehe in3ei i2n1eng in3erbe i4nerbi in2erh iner4lö i4ner4tr i4nesk in1eu ine3un ine2x in3f 1info. 1infos 2inga ing1af in2g1a4g in2gl ing4sam 1inhab 2inhar 2inhau 4inhe in2i3d i3nie 2inig ini3kr in2ir 2inis ini3se i3nitz 3inkarn inma4le 2inn. in4n3erm 2innl in2nor inn4sta 1innta 2ino in1od in3ols in1or ino1s4 ino3t i1nö in1ö2d 2inp 2inr ins2am insch2 in2seb 2insen ins3ert in3skan in3skr 1insta in4s3tät in3stel in3su 1insuf in4s3um in3s2z 1integ int2h in3t4r in5tri in1u i3n2um in3unz invil4 i1ny in3zw i1ñ 2i1o io1c io2d i2oda io3e4 iof4l i2o3h io2i3d io3k4 i3ol. i3om. i3oms ion2 i3on. ional3a io2nau ion3d i3ons3 ion4spi ion4st i2ony i2o1p io4pf i3ops i3opt i2or i3or. i3orc iore4n i3orp i3ors i3ort io3s2 i2ost i3ot. i3ots i2ou i2ov io2x i3oz. i1ö2k i3ön i1ös. 2ip. i1pa i1pe ipen3 i3per ip3fa iph2 2i1pi ipi3el ipi3en ipi2s i3p4l ip2pl ip3pu i1pr 2ips 2ipu 2i1q i1r2a i3rad 1i2rak irat2 i1rä ir2bl ir1c ir2e i3ree 2irek 2i3ré ir2gl irg4s ir2he ir2i 2irig 2irk ir2k3l irli4n ir2mak ir2mau ir4mä ir2m1ei ir2mum ir4m3unt ir2nar ir2no i1ro 1iron iro2s i1rö irpla4 irr2h ir4sch3w ir3se ir3sh ir2st irt2st iru2s1 i3sac i4s1amt is2ap is3are i2sau i2s1än 2isb i2sca isch3ar i3s2che i4schef i4sch3e4h i4sch3ei i4schin i5sching i2sch1l isch3le i2schm isch3ob isch3re isch3ru i4schwa i6schwir i4schwo isch3wu i2s3cr 2ise ise3e ise3ha ise3hi ise3inf i4seint ise2n1 is2end isen3s i2serh i2s1erm iser2u i2s1ess i4s3etat is2has isi2a i2s1id i2s1of iso6nend is1op 3i2sot is1pa i2spar is1pe is1pic is2pit is2por i2spro is3sa is4s1ac is4sau is4s3che is2st is3sta is3sto iss3tr is3stu is2sum is3t is4tab is4tam ist2an i1s4tat is4tel iste4n istes3 i1s4teu i1s4til is4toc is4tö is5tör ist4ra ist3re is4tü isum3p i2sü i1ß iß1ers it1ab. ital1a it1alt it1am it1an it2an. it3a4re it1art i3tat it1au i3tauc i4t1ax 4itä it2är i2t1äs ität2 i2t1ei i4teig it2eil i4tein 2itel ite2la ite4n iten3s2 i4tepo i2tex i5thr i2t1id 1itii iti4kan iti3k2e i2t1in1 it2inn i6tl itmen2 i5toc i2t1of i3tö it3raf i2t3ran it3ras it3rau it3räu it3re it3ric it3rom it4ron i3tru it3run it2sa its1a4g it2s1e4 its3er1 it2so it2s1pe it4staf it2sto it2teb it4tri itt2sp it1uh i2t1um i2tuns it1urg itut4 i3tü 2itz it2zä it4z3erg it2z1w 2i3u2 ium1 i1ü 2i1v i2v1ak iv1ang i2veb i2v1ei iv1elt ive4n i2v1ene i2v1ent i2v1ur 2i1w iwur2 2i1x i2xa ix2em i3xi 2i1z iz1ap iz1au izei3c ize2n i2z1ene iz4er i2z1ir izo2b i2zö i2z1w í1l ja1c jah4r3ei jahr4s ja3l2a ja3ne jani1 ja1st 2jat je2a jean2s je1c je2g jek4ter jektor4 jek2tr je3na je2p je4s3t je2t1a je2t3h je2t3r jet3s2 jet3t je2t1u2 je3w ji2a jit3 ji2v joa3 jo2b1 job3r jo2i joni1 jo1ra jord2 jo2sc jou4l j2u ju2bl jugen2 jugend3 ju2k jung3s4 ju3ni jur2o jus3 jut2e1 2j1v 1ka 3ka. k3a2a ka3ar kab2bl ka2ben 2kabh 2kabla 2kablä 2k1a2bo ka3b4r 2kabs 2k1abt ka1c k2ad 2k3ada 2k3a2dr ka1f4l ka1fr kaf3t2 k2ag ka1in ka3ka kaken4 2kala. ka2lan ka3lei ka3len. ka4lens kal3eri kal2ka kal2kr k1all kalo5 kal4tr k3ama kamp8ferf kan2al ka4n1a4s ka2nau kand4 2kanda kan2e 2k1ang kan3k4 2kanl 2k1anna k1ans k2ans. 6kantenn ka3nu 2kanw k2anz. ka2o 2k1apf 3kara 2karb k2ard k2arg ka3r2i kari3es k2ark 2k1arm k2arp3 kar2pf k2ars kar3t k2arta 2k1arti karu2 k2arw 3kas ka3se kasi1 kas3s ka2s3t ka3tan ka3t4h ka4t3r 2katt kau2f1o 4kaufr kauf4sp k1aus kau3t2 2kauto 1kä k1äh k1ä2mi k1än kär2 kä2s1c käse3 2k3b4 kbo4n kbu2s kby4 2k3c 2k3d2 kdamp2 2k1e1c k1eff kefi4 kege2 ke2gl ke2he. kehr2s kehr4s3o 2k1eic 2k1eig k1ein ke1in2d 2keinh kei1s 2k1eise keit2 ke2la kel1ac ke3lag kel1au ke2lä kel3b4 2ke2lek ke2len ke2l1er 2ke3let kell4e kel3s2k k4elt 2k1emp k2en. ken3au 4ken4gag 2kenlä ke2no kens2k ken5stei ken3sz k2ente k3enten ken3th k2entr 2k1ents k2entu 2kentw 2keo2 ke2pl k2er. ke1rad k2erc 4kerfah k4erfam k3ergeb ker6gebn k3er2hö ke6rin6nu kerin6st kerin4t ker4ken k2erko k2erl k3er4lau k3er4leb k6erlebe ker4neu k1e2ro k2ers. kerz2 ker4zeu 2k1er2zi k6es. ke2sel ke4t1a ke2t3h ket3s ke1up keu6schl 2k1e2x 2k3f4 2k1g2 2k1h4 kho3m ki3a4 ki1c 2k1i2de ki3dr ki2el kie2l3o ki1f4l ki1f4r ki3k4 2kil2a ki3li ki3lo k2imi k2in. k2ing 2kinh k2ini k2inn ki3n4o3 kin3s 2k1inse 2k1int ki3or kio4s 3kir kis2p kist2 kis4to 2kiz ki3zi 2k3j 2k1k4 kl2 4kl. 4kla. k4lar 4k1last k2le 4kle. kle3ari 4kleh k4leid 4k3leit k3lem. 2k3ler kle2ra 2k3leu kle3us 2klic 2klig k2lin k3lip k2lir k2lisc 2klist klit2s 4kliz 2k3loc klo2i3 k4lop klost4 klö2s k2löt k1lu kluf2 klung4 2k1lüc 2kly 2k1m k2n2 3knab k3ne k4nei 2k5ner kno4bl 2k5nor k3nu 3knü 1ko ko2al 2kobj 2k1o2fe koff4 koh3lu ko1i2 kol4a ko3le kol2k5 3kom ko4mu k2on ko3n2e kon3s4 ko3nu 2kop. ko1pe kop4fen 2kops 2kopz ko1r2a 2k1orc kor6derg ko3ri k2os ko2sp ko2st ko3ta kot3s2 kot4tak 2k1ou 3kow ko2we k1o2x 1kö kö2f k1öl 2k1p2 k1q k2r4 2k3rad k3rats 2kraum k4raz 2k3rät 2k3räum 2kre. 2k3rec 2kred. 2k3rede 2k3ref 2kreg k3reic kre1i2e4 kreier4 k3reih 2k3rh 2krib 2k3ric k3ries 2krip 3kris 3k4ron 2kruf krü1b 2ks k4s1amt k2san ks3ar k2sau ks2än ksch4 ks1e2b k2s1em k2sent ks1erl k2s1ers k2s1erw ks3ha k2s1id k2s1in k2s1o2 k3sof ks1pa k3spe ks2por ks2pu ks3s2 kst4 k1sta k4s3tanz k3stat4 k1ste k1s2ti k1sto k2stor k1str k2strä k1stu k2stum k2s1u ks2zen 4k1t k2t1ad kt1akt k3tal kt1am kt1an k2t3a2r kta4re k2t1au ktä3s kte3e kt1ei k2temp k2tent k4t3erfo k2t1erh kte3ru k2tex k2th kt3ho k2t1id kt1im k2t1ing kt1ins kti4ter k2t1of k3top kt1ope k4torga kt3orie kt4ran kt3ras k4tref kt4ro ktro1s kt3run kt3s4 ktt2 k2tuns k3tü kt3z ku1c kuh1 2k1uhr kul2a ku3l2e ku3l2i 4kulp 2k3uml kum2s1 k2u3n2a kung4 kun4s4 kunst3 2kunt 2k1up. kur2bl ku2rei kuri2e kuri4er ku2ro kur2sp kur2st ku4schl ku2sp kus3t ku2su 1kü 2küb kü1c kür4s 2k1v 2k1w 2k3z2 kze3l 3la. la3ba 2labb 4l3aben 2labf 2labg 2labh 4l1a2bl lab2o l2abr lab4ra lab4ri 2l3abs l1abt 3labu 2labw la1ce la2ce. 1lad lad2i l1adl 2ladm 2l1a2dr 3ladu l1adv 2laf la2fa laf3s laf3t la2ga la2gio la2gn lago2 la2g1ob 2la1ho 1lai la2kes la2k1i l2akk la1k4l 2l1al 4lall 4lalp l2ami la3min 1lammf l2amp 2l1amt lamt4s la4mun l1anal la2nau 2lanb 3l2and lan2d3a2 lan6d5erw lan6d5erz lan2d3r 2lanf lan2gl lang3s4 2lanhä l2anhe 2lanl 4lanli 2l3ann l1anp 2lans 4lansä 2lantr lan2zw 3lao l1a2po2 lap4pl la2r1an la2r1ei la4rene 3l2ar3g lar3ini lar3s 2l1ar3t l3arti la2ru la2sau 4lasd la3se 2lash 2lasi la2so 2lasp 3lasser la2st last1o lat2a la3te la4tel 2l3ath la2t3ra lat2s 2lat2t1a lat4tan lat4t3in lat2t3r laub4se l2auf. lau2fo l2aufz 1laug 2lausl 2lausr 2l1auss 2lauto 1law lawa4 lay1 lä1c 1läd 2läf 2l1ähn 1länd lär2m1a lä2s1c 4lät 2läub 2läuc 2läue 1läuf 1là 2l1b l3bac l2b1ede l4beta l2b1id l2b1ins lb2lat l3blä lb3le l2bli l3blo l4bre. lb3rit lb2s lb3sa lb3se lb4sk lb3sp lbs6t lbst3e lb4sto lb2u l2b3uf lbzei2 2l1c l3che l3chi lch3l lch3r lch3ü lch1w l3cl 4l1d ld3a2b1 l3d2ac ld3a2ck l2d1a2d lda4g l2d1ak ld1al l3dam ld1amm l2d3a2n l2d1a2r ld3ari l3das l3dat ld1au ld1är l2dei l2dele l3der. ld1erp l2d1e2se l2dex l2d1id l2d1im ldo2r ld2os ld2ö2 ld3r l2dran l2dre l3d4ru ld4rü ld3sa ld3st ldt4 ld3th l2d1um 1le 3le. le2ad leben4s3 le2bl 2lec le2chi lecht4e 3led 4ledd le3de le2e le3ei lef2a le2g1as le2gau le2gä le2gl leg4r 3leh leh3r2e 4lehs 4leht 3lei. lei2br l2eic l2eid 4l1eig l2ein. l2eind lein4du l2eine lei6nerb 2leink l2eint leis6s5er l4eist lei4ßer l2eit lei2ta lei8t7er8sc leit3s2 lekt2a 2lektr 3l2ela 2le2lek lel3s 3lemes le2m1o2 4lemp lem3s l2en. le4nad le2nä 4lendet 2lendu le4n3end 4lenerg l2enf le3ni l2enk 2l1enni l2e2no len4sem len3sz l1ents 2l3entw lent4wä 5lentwet 4lentz len2zi le1os 2lep 3lepa 3lepf lepositi8 3lepr l2er. l2e1ra le2ra4g le2rau lerb4 4l3ereig le4r3eim le4rers l1erfo l2erfr l2erfü 3lergeh l3ergen 3l4ergew 2l1ergi lerin4s lerk2 l2erka l2erko l2erle 2l1er2ö 3l2erra l4ers. lers2k lers2t ler3t 6lerwerb l1erz l2erza les2am les2e 2l1esel le3ser le3sh lesi1 le3sk les2t leste3 le1sto 4lesw 2lesy le2tat 2le3th 2leto let4tu le2u 4leud 2leuro 3leut 3lev 2lexe le2xis 2lexz 2l1f l3fah lfang3 l2f1ec lfe1e l4feis l3f4lä lf3lo l3f4lu lf3ram lf2tr lf4u lfur1 l3fü 2l1g lga3t lgd4 lgen2a lge3ra lgeräu3 l2geti l3go lg3re l3gro 2l1h2 3lhi. 1li 3lia li3ac li3ak li3ar lia1s li3b4 libi3 li1c 3lichem 3licher li3chi 4lick li2cka li3d2a li2deo 2l1ido li4ds lid3sc l2ie 3lie. liebe4s li3ene lien3s lie2s3c lie2st 3lig lig4n li2gre li3ke li2kr lik2sp lik4ter li3l lil2a 2lim li3m2a 3limo li3n2a lin3al 2l1indu li2nef li2neh li2nep li2nes 2l1inf lings5 2l1inh 2l1in1it 2l1inj lin2k1a link2s li2nol l2ins. l2insa l2insc 2linsp 2linst 2l1int l1inv 2linz li2o li4om li3os. li2p3a 3lis. li3s2a li4schu 2l1isl 2l1i4so li2sp liss2 lit2a li2tal li3te lit2h lit1s2 lit3sz li3tu 3liu 2lixi li2za lizei3 4l1j 2l1k lk1alp l3k2an l3kar. lken3t lk2l lk3lo l3k4lu lk4ne lkor2b1 lk4ra l2k3ro l2k3ru lk2s1 lk3sä lks3t lk4stä l3k2ü 4l1l ll1abb ll1a2be l2labt ll1aff ll1akt l3l2al l2l1a2m ll3ama lla2n ll2anw ll1anz l3lap ll1arm ll1au ll3aug l2laus l2l1äm llb4 llch4 ll3d4 ll1ech lle3en l2l1ef ll1eim ll2em l3len. lle4n3a ll3endu llen3g l4lents l3ler. lle2ra l4lerfo l6lergen l4lergo ll3ernt ll3ertr l2lerz ll2es l2lex llg4 ll1imb ll1imp l2l1ind ll1ins llk4 ll3l2 ll5m lln2 ll1ob l2lobe l2l1of ll1opf l2l1o2r l3lor. l3lore l2l1ou l3low l2löf ll1ö4se ll3sh ll3s2k ll2spr ll5t4 llti2m llt5s2 llu2f ll1ur llus5t6 ll3z2 2l1m l2m3a2b l2marc lm1aus lm1c lme2e lm3eins l2m1e2p l2m1erz lm1ind lm1ins l2möl lm3p lmpf4 lms2t lm3ste lm3s2z lm3t 4ln lna4r ln3are lnd2 l3n4e l3ni l1nu l1nü 1lo 3l2ob. lo2ber 2lobj 2l1o2bl l2obr lob4ri l1o2fe lo1fl lof4r lo2gau lo3h2e 2l1ohr loi4r 3lok lo2k3r lol2a l1o2ly lo2min lo2n1o lo2o 2lopf 2lopt lo1ra lo4rä 2lorc l1ord lo3ren 2l1or3g2 lo3ro 3lorq 3los. lo4sa 3lose lo4ske lo2spe loss2e lo4ste los3t4r lo2ta lo3tha loti4o 2l1ov lo2ve 2lox 1lö lö2b3 2löd lö2f 2l3öfe 4lög l1öhr 2l1ö4l3 4löß 2l1p l3pa lpe2n3 lp2f l2p1ho lpi4p lp3t4 l3pu 2l1q 2l3r2 lrat4s lre1s lrut4 lrü1b 4l1s l3sac l2s1a2d l3s2al l4s1amb l2sann l3sare l2sau l4schin l4schmü l2s1e2b l2s1ec l2s1em ls1ere ls1erg l2serh ls1erl l2s1ers l2s1erw l3sex l4sha l2s1imp ls2log ls3ohne l4s3ort. l3s2pi ls2po l2spro l3s2pu ls3s2 lst2a lstab6 ls4taf l4s3täti l2ste l3stec l3stei l3stel l4stem ls6terne ls6terns ls2tie l2stit ls4tr ls2tu ls1um l2sun lsu3s ls2zen 4l1t l2tab ltag4 lt1ak lt1a2m l4t3ame lt3and lt1ang l3tarb lt1art l2t3ato l2t1au lt1eh l2t1eis l4te4lem lt3eli lt2en l5ten. lter3a lt2erg lt4erö l4t1e4sk lte2th l2t1eu l2th l4thei lt3ho l3thu ltimo4 l2tob l2t1of lt1op l2t1o2ri lto2w lt1öl l3tör lt1ös l4t3öt ltra3l l3trä lt3räu lt3re lt4rie lt3roc lt3ros l2t3rö l6ts lt3sc lt2so lt4stab lt4stoc ltt2 lt1uh l2t1um ltu4ran ltu2ri l3tü lu1an 4lu4b3 luba2 lubs2 lu2dr lu2es 1luf 2l1ufe 2luff luf2t1a luf2t1e luf2t5r lu2g1a lu2g1e2b lu4g3l lu2go lu2g3r lug3sa lug3sp lu2gu 2l1uh lu1id. lume2 2lumf 2luml l2ump l1ums l1umw 1lu2n 2l1una 2l1unf lung4sc 2l1uni 2lunt 2lunw 4luo 2lur l1urn l1urt 2luse lu2sp lus4s3a lus2s1c luss3er lus6serf lus6serk lus6sers lus2s1o lus2s1p lus2s3t lus4stä lu4st lus4t1a lust3re lu2s1u lu2t1a lu2tä lu4teg lu4t3erg lut1o2f lu2top lu4t3r 3lux 2lüb 5lüd lüh1l 2l1v 2l3w 2lx 1ly ly1ar ly3c 2lymp 3lyn ly3no ly1o ly3u 2l1z l2z3ac l3z2an lz2erk lz1ind lzo2f l2zö lz3t2 l2z1u4fe lz1w lz2wec 1ma m1ab m2abe 2mabk m2ab4r 2mabs 2mabt mach4tr ma2ci ma3da ma2d4r ma4d2s mae2 ma1f ma2ge. ma2geb ma2gef ma2geg ma2gek ma2gep ma4ges. ma2get ma2gev ma2gew 2m1agg magi5er. magi5ers ma3g4n 2m1ago mai4se 2m1akt mal1ak ma4lakt ma2lan ma4l3at ma2lau mal3d ma3ler mali1e mal3lo 2mallt malu4 ma2l3ut mam3m 2m1anal ma2nau 2manb man4ce. man3d2 man3ers ma2net m2anf 2m1angr m2anh 2manl m4ann 2mansa 2mansä 2mansc 2mantw 2manz ma2or m2app 2marb mar3g2 4ma3r2o maro3d 4marr mar6schm mar6schr ma3r2u m1arz 3mas ma3s2pa 4m1aspe massen3 mas4tel ma1s4tr 3maß ma2ta2b ma2tan mat4c ma2tel ma4t3erd ma5tri mat3se mat3sp 2m1au2f ma3un 2mausg m4ay ma1yo 3mä m1ähn mä1i2 4m1änd m1ärg mä3t4r mäu2s1c 2m1b2 mbe2e mb4l m3b4r mby4 2mc m3ch 2m1d md1a m2d1ä m2dei mds2e m2d1um 1me meb4 m2e1c medi3 medie4 medien3 2medy me1ef mee2n1 mega1 3meh 2m1eif 2m1eig m2eil mein4da me1i4so 3meist me3lam me2lau 3meld me2lek me2ler melet4 2melf. mell2 mel2se mel5t4 6mel6tern 2m1e2mi m2en. mena2b me3nal men3ar men3au men3ge men3gl me3nor m2ens men4sk men2so men3ta men6tanz 2mentn 4m3entwi me1o 2meou 2meö 3mer. me1ra me2r3ap me4rens mer2er 4m3ergän 3merin merin4d merin4t me2ro 3mers merz4en 3mes mes1a me2sal me4sä 4meser 2me3sh 4m1essa mes6serg mes2s1o mes2s1p mes2st meste2 me1sto 4mesu me3t2a me3th meu1 2m1ex 1mé 2m1f4 mfi4l 4m1g2 2m1h4 1mi mi2ad mi3ak mibi1 mi1c mi3da mie3dr mi2e1i mie3l mien3s mi2er mierer4 mie2ro mi4et mie4ti 3mig mi2kar mi2ki mi2ku 3mil mi3l2a milch1 mil4che mild4s 4milz 2m1imp minde4s min2en min2eu min2ga ming3s4 mi3ni 3min2o mi1nu 3mir. mi3ra 3miri 3mirs 3mirw mi2sa mi4scha mi4schn mi4schw mise1 mis2s1c mi2s5te 3mit mi2ta mi2th mi2t1r mit3s2 mit5sa mi5tsu mi2t1u 4mitz 2m1j 4m1k4 m3ka mk5re. 4m1l2 ml3c ml3l ml3s 2m1m m2mab m2m1ak m2m1al mm1ang m2m1ans mm1anz m2m1au mmd2 mm1ei mme4lin mme4na m4mentw mme2ra2 mme4rec mme2sa mm1inb mm1inf mm1inh mm1ins mm1int mmi3sc mmi1s4t mmm2 mm3p mm2s mm3si mm3sp mm3sta mm3str m2mum mm2un mmül2 mmüll1 2m3n2 m4nesi 1mo moa3 2mobj 3m2od mode3s mo2dr 4mog. mo2gal 3moh mo2i3 mo2k1l 2mol. 3mom mom2e 3m2on mo3ne mo4n1er mon2s3 mon3su 3mo2o 2m1ope 2mopt mo1ra mo2rar 2m1orc mor2d3a mor2dr mo2rer morgen5s6 mork4 3mos mos4ta moster4 3mot m1o2x mo1y 1mö mö2c 4mök m1öl 2m1p m2pf mp4f3erg mpf3erp mpf3err mp4f3erz mp2fl mpf3li mpf1or m3pi m4p3lem. m2p3len m2p3les m3pon mp3ta m3pu 2m1q 2m3r2 2m1s m2san ms3and m4sap ms1as m2sau m3sä m3sc msch2 m4sco m3se m4s1ef ms1erw m4sex ms1ini mso2r ms1ori m2spä m2sped ms2po m2spot m2spro ms2pu ms3s2 m4stag m3stel m3s2ti m3sto ms4tr ms5trä ms5tren m3s2tu ms4tü ms1um m2sü m3sy 4m1t mt1ab mt1ak m3tam mt1ar mt3are mt1elt m2t1erf m4t1erg m2t1erl m2t1ers m2t1ert m4t1eta m2t1eu m2th mt3ho m2t1im m2t1ins mti2s mtmen2 m3tö mt1ös m4ts1 mt2sa mt2se mt3s2ka mt2spr mtt2 mt1um mt1urt m3tü mt3z 1mu mu1a mu3cke 2m3uh mu3la 2muls 3mun mun2d1a 4m3unf 4m3ungeb mu3ni m4unk munt2 4munz mu3ra mu4r1u2f m4us mu4s1a 3musi mu2s1o mu2sp mus3t mu2su mut1au muts3 mut2st 1mü 2müb mül4len 3mün 3müt mütter3 2m1v mvoll1 2m1w2 mwa2 mwa4r mwel4 1my my4s 2m1z 1na 3na. 2n1ab na2bä 4nabg 4nabh na2bl n2abo na2br 4n3abs 4nabt 3n2ac na2ch1 na3chen nach3s nacht6ra 4nadd n2ade 4na2dr n1af na1f4r 3n2ag na2gem 3n2ah na2h1a n3ahn 3nai nai2e n1aig 2n1ak na2ka 3nako n2al. na2l1a2 na2lä 3n2ald n4ale na4lent na2let nal3la nalmo2 na2lop nal2ph n2als. nal3t4 na2lu 2naly n4am. 3name n4amen 4n3a2mer na3m4n 3namo 2n1amt namt4s n1an. 4n1a2na 4nanb n1and2 4n1ang 2nanh 2nani 4nank 2nanl 3nann na3no n1anp 2nanr 2n1ans 2nantr 2nanw nap2si n1ar 5nar. na2r1a 2narc n2ard 4narg 3nari n2ark n2arle 2narm n2arp 4n3art na3r2u 3nas n2as. na4schw 4nasp 4n1a2sy nasyl2 3nat n4ata na3t4h 4natm nats1 nat4sa nat4sc 4natt n1au 4nauf nauf4fr n3aug 5naui 3n2aul 4nausb 4nausg n2auso 4nauss 4nausw navi5er. navi5ers 1nä 3n2äc 3näe n1ähn 2n1ä2m 2n1än när4s5 3näs nä2sc n2äss 2näu 3nä1um 2n3b4 nbe2in nbe3n nbe3r2e nbes4 nbu2s nby4 2n1c n3ce2n3 nch3m n2ck 2n1d nd2ag n2d1ak n2danl nd1ann n2d1anz ndat2 nd1au nd1c nde4al. n2dei nde4län n4d3ents nde4rob nder5ste nde2se ndi2a3 n2dob ndo2be ndo1c nd1op nd1or n2dö n2d3rat n2d3re n2drob nd3rol nd3ros n2drö n2drui n4d3run nd2sor nd2spr nd4stab nds3tau nd3th ndt4r n2dü4 ndy3 1ne 3ne. ne2ap ne3as ne3at ne2bl 2n1ebn 2nec 3neca ne1ck 3ned ne2de 2nee3 ne2e2i4 ne3ein n1ef neg4 2ne2he. 2nehen2 3nehm 4n1ehr 2n1ei n2eid 4neif 3neigt 4n3eing 4n3eink ne2ke nek3t4 ne2l 3nela nel3b 2n1ele 4nelek 4nelem ne3len ne3li nel4la 3ne3l2o 3ne3lu n2em. 2n1emb n1e2mi 2n3emp 2n1ems 3nen n4en. nen3a2 n2enb n2enc 4n1endb 4n1endd 4n1endf n1endg 4n1endh 4n1endk 4n1endp 4n1endt 4n1endw ne2n1e2b nen3ei nenen1 ne4nene 4nengb nen4ge. nen4gen 4nengs 4nengt n2enh ne2ni n2enj nen3k ne2no n2ens nens4e nen3sk 5n2en3t2a n1entb 4n1entl 4nentn 5nentr n1ents 4n3entw 4nentz ne2n3u n2env n2enw ne2ob ne1os 2nepf 2n1epo ne2pos n2er. ne1ra ne2ra2b ne3r4al ne2r3am ne2ran ne2rap ne2rau 4nerbe. 4nerben n1erbi nere2 ne2reb n1erf 4n5erfo nerfor4 2nerfü 3nergr n1erh 2n3erhö 3neri n1erk n2erli 2n1erlö n1ermä ner4mit n2ern. 4n1ernt ne2ro ne1rös n2erp 3n2ers. 2n3ersa ner8schle n2ert. n1ertr ne2rup n2erv 2n1erz 3n2es n4es. ne3san nes4c ne3ska nes1o ne2s1p 4n3essi ne1sta nes3ti ne2tad ne2t1ak ne2t1an ne2tap n1etat ne2tau ne2th net3ha nett4sc n1e2tu net2zi ne2u neu1c neu3g 2n1eup neur2 n2ew 2n1ex 3nez 1né 2n1f nf1ak nfalt4 nf2ä nff4 n3fi nfi4le. nf4l nf5lin nf2o nfo1s nf4r nf3s nft2o nft4s3 n2f1u 4n1g ng2abs n2g1ac ng1ad n2g1ak n2g3a2m n2g1and ng2anf ng1anz n2g1äl ng3d4 n3gef n2g1ein ng2en ngen2a n3ger nge4ram n4g3erse nge4zän ng3g4 ng3hu n2g1i2d n3gläs n2glic n2glo n3g2loc n2glö ng3m n2gn ng3ne ng1or ng3rat ng3roc ngs3c ng4s3e4h ngs3pa ngs5tri ng3ts n2gum 2n1h4 n3han n3har n3hau n3hä n3he nhe2r n3hu 1ni 3nia nib4l nich1s nich8ters n1id 3n2id. ni2de ni3dr n4ie nie3b ni1el nie3l2a nie4n3 ni3ene ni1ero nifes3 nig2a 2n3i2gel nig3r ni2gre nig4sp 3nik ni2kal ni2kar ni3ker ni4k3ing ni3kl ni2kr 3n2il nim2o 4n1imp nin1 3n2in. n2in4a 4n3ind 2ninf 3n2ing4 4n1inh ni2nor 2n1ins n2ins. 4ninse 4n1int 2n1inv ni2ob ni3ok ni3ol n2ip ni3ra 3n2is ni4schw ni2s1e ni3se. ni2s1p ni3spi nis3s4 ni2s1u 2nit ni2ti ni3t4r nit4s ni3tsc nitts1 nitt4sa ni3tu ni3v 3nix n1j 2n1k n2k3ad n2k1ak n3k2al n4k3alg nk2am n2kans n2k3aus n2käh n2k1är nke2c n4k3erfa nk4erg nk1inh n2k1ins nk3len nk3les n2klie nk2lo nk2lu nk3lun nk4na n2kne n2k1ort nk2öf n2köl n2k3ro nk2s1al nks2ei nk3s2z nk2tak nk2tan nkt1it nk4top nk2tru 2n3l2 2n1m4 nmen2s 4n1n nna2be n2nada n4n1all n2n1an n2nau nnen3g n4nents nn2erh nn2erk nne2rö n4n3er4wa nner2z nne2s nnes1e nne4st nn2ex nn3f nng4 n3ni n2nof nn1o2r nn3sc nn3se nn3s2p nn2th n2n1uf n2n1unf nn1ur 1no 3no. 3nobl no2bla n2o3ble 2n1ob2s no1c 2no2d no3dr n1of 2n3o2fe n3ole no2leu n2on. 3n2opa 3nor. nor2a no2rad no1rak no3ral 2norc nor2d5r 3norh 3norm 3nors n1ort 3n2os. no3sh no2s3p n2oste nost1r 2nostv no3tab no2tä no4t3ei no2tel no3t3h no4tha no2t3in no2t1op no2tr 3nov 3now 2n1o2x 3noz 2nöd 2nö2f 4n1ö4l 2n3p4 npa2g npro1 npsy3 2n1q 4n3r2 nräu3s nre3sz nrö2s1 6n1s n2s1a2d n2s1all n2sang n2sant n2saus n3sav n2s1än n2s1äus ns2ca n6schef n4schro nsch7werd ns1eb ns1e2d nseh5ere nsen4sp ns1ent n2s1ep ns1erf ns1erg n2serh n2s1erk n2s1erö ns1ers n2s1erw n2s1erz nse2t n4s1eta n3sex nsfi4l n3sil n2simp n2s1ini nsi4te nsi2tr ns2kal n2s1op n4s3ort. nsp4 n4spat n4speri n4spers n4sph n3s2pi ns4pie n2spo ns3pon n4sprä n4s3prie n4spro nsrü2 ns3s2 nst1ak n3star n3stat n4stat. n4s3tate nst3eif n3stemm ns4tent ns6terbe n5s6terne n5s6terns nst4erö ns2ti nst5opfe ns4tor n4strac n4strie ns2tu nst2ü nstü1b n2sty ns2um n2s1un ns2ung ns4unr ns4uns n3sy n4s3zi 2n1t nt3abs n3t2a3c n3t2al nta3m nt1ang n4tanza nt2arb nt1ark nt2arm nt4at nt1äm n2t1äu nte3au nte2b nt1ebe nte1e nte3g6 nt1eh n2teig nt2en nt4ene nten6te. n3ter nt4ern nt4ers nt4ert n4t1ess nteu3 nte3v nt2her n2t3ho n3thr n3t4hu nti3c nti3k4l n2tinf n2t1inh ntini1 nt2ins n3ti1t nt4lem ntmen2 ntmo2 n3to nto3me nton2s1 n3tö nt3rec nt3reif n5trep nt4rig n5trop n2t3rü n4ts nt3sa nt4sau nts2o nts2p nt4s3par nts2t nt2sto n3tu 3n4tu. ntum4 ntu2ra ntu4re. ntu4res n3tü nt3z2 1nu. 1nu1a nu3ar nubi1 1nu1c 1nud 3nue nu2es nuf2 nu2fe 1nug 2n1uh 1nui nu3k4 n2um. 2n3umb 2numf 2numg 3numm 2numr 2n1ums 2n3umz nu2n 2nuna 1n2ung4 3nung. n3ungl 2n1uni 2nunt 1nuo 2nup 2nur 3nu2s nu3sc nu3se nu3sl 1nut nu2ta nu4t3r 1nuu 1nux 1nuz 3nü. 2nü4b nür1c 3nüs 1nüt 2n1v2 n3ver 4n1w 1ny. 1nyh 2nymu n1yo 1nyr 1nys 1nyw 2n1z n2zad n2z1a4g n2zan n2z1au n2z1än n2zär nzdi1s nz1ec n4zense n4zentw n4zentz nz3erwe nzi2ga nzig4s nz1ini n2zor nz2öl nz3s n2zurk n2z1wa n2z1wä n2zwö n2z1wu ño1 2o3a2 o4abi o4ac oa3che oa3chi o4ad oa3de oa4g o4ah o4a3i oa3ke oa4k1l o4a3la o4a3mi o2ar o2as 3oa3se o4at o5au o1b ob2al 2oban o3bar 2o3b2ä 2obb ob2e 2obe. 2obea ob3ein 2o3b4en oben3d4 oben3se ober3in4 obe4ris 2obew 2o3b2i obi4t ob3ite 1obj ob1l o2b3li 2o3blo 2o3bo o2b3re o3bri ob3s2h ob3sk obs2p ob2sta ob3sz 2o3bu obu2s 2o3bü 2oby4 2oc o3ca oc1c o1ce och1a ocha2b o1che oche4b o2ch1ec och1ei ocher4k och3l och3m och1o och3ö2 och3r och1s ocht2 och3te o1chu ochu2f och1w o1ci o1ck o2ckar o3cke ock2er o3cki o2cko ock3sz o1cl o1ç o1d o3d2a od2dr o3deb o3d2e1i odein3 ode2n1 odene4 ode3sp o3dex 2o3dia o3dir o3div o2don odo4s 2odr o2dre odt4 2o3du 2o1e2 o2ec oen1 o4e3s o2e3t o3et. o3ets o1ë 2ofa of1a2c of1am of1au o2f1ei of2en o3fer of2f1a of2f1in 1offiz of2f5l of2f3r offs2 of2fu 2ofi of3l of1la of4lä of4lö 2ofo 2o1f1r of3ra of3rä of4rü ofs1a of4sam of2spe of2spr of2s1u 2oft of2tei of3th 2o1g o2g1ab oga3d og1ala og1ang o2g1ei oge2l1i o3gh ogi2er og2lo o3g4n ogs2 og3sp og1ste o1ha o1hä o1he o2h1eis ohen3s o2h1ert o2h1erz o1hi ohl1a ohl3au oh3lec ohl1ei oh3lem oh3len oh3lep oh4lerg oh4l3erh oh4lerw oh3lo ohls2e oh2lu 3ohng oh2ni 1ohnm oh2n1o o1ho oho2la oh1o2p o2h3ö ohr1a oh4rin oh1ro oh1s oh3t o1hu oh1w 2o1hy 2oi o1i2d o3ie o1im oimmu4 o1in oi2r o2isc o3isch. o1ism oiss2 oi1th 2o1j 2o1k oka2la okale4 3o2kel oki2o ok1lä ok4n 4okr ok2s1p okt4 2ol o1la o2lab o2l1ak ol2ar olars2 ol1auf o1lä ol4dam ol4dr ole3e ol1eie ol1eis oler2 ole3s ol1ex o1lé ol2fa ol2fl olf1r ol2fra ol2gl ol2gr ol2i oli3k4 ol2kl olk3r ol2kre ol2lak ol2l3au oll1e2c ol2l1ei ol2lel oll5ends ol4lerk oll5erwe o3lo ol2of olo3p2 ol1ort ol2str o1lu 3oly 1olym ol2z1a ol4z3ern ol2zin ol2zw 2om o2mab oma4ner om2anw om1art o2m1au o2meb ome3c o2m1ei o3m2eis o2mel o3men. o2mep o2meru om1erz om2es omiet1 o2m1ind om1ing o2m1int om3ma om1org om3pf oms2 omtu3 o4munt omy1 2ona ona2b o2nae o3nal on1ap o2narb on2au on3aus 2onä onbe3 2onc onderer5 2one one2i one2n3 onens2 on1erb o2n1erd on1erg on1erö o3nett on3f2 on3g2l ong4r ong3s 4o3ni on2i3d o4nikr o4n1im on3ing on3k2 onli4 onlo2c on3n2an on3n2e ono1 o3nod o2noke on1orc ono3s ons1a onsa4g on4sam on2seb onse2l onsi2 ons3l ons1p onst2h on3t2a ont3ant on4t3end ont3erw ont2h on4t3ri ont3s o1nu 2onuk on3v 1ony on3z o1ñ oof2 oo2k3l o1op o1or oor3f oo4sk oo2tr 2o1ö2 o1pa opab4 o2p3ad op3akt o3pan opa5s o1pec o1pei o1pe4n 2opf. op2f3a op3fah o2pfe op4ferd opf5erde opf1l opf3la op1flü 4oph2 o3phe o1pi opi5a4 opi3er. opi5ers. opin2 op5lag o2p3le op3li 2o3po op4pl op2pr 2o1pr 1opsi op3sz 1op3t4 o1q 2or. or1a or3a2b o1rad 2orak 2oral o2r3alm or4alt 3oram or2and o2ranh or3arb o1ras or3att o3rä or1änd or1ät or2bar orb2l or1c 2orca or2ce 4orda or2d3am or2dau or4d3eng or2deu or2d1ir or2dit 1ordn or2do 2ordr 2ords ord3s2t or2dum 2ordw 4ore ore4as o2r1e2ck o2r1ef ore2h or1eig o2rein or1er o2rerf or1eth 2orf or2fle orf3s4 or3ga 2orget or3g2h 2orgia orgi1e or2gl or3gle or2gn 2orh 2o3ric 4orie. o4rient o3rier 4oril 4orin1 2orit ork2a or2k3ar ork2s 2orm or4mans or4ment or5ne. or3n2o1 2o1ro oro3n2a 2o1rö 2orq 2orr orr4a or3rh 2ors2 or3s4a orsch5li or3sh or3sz or2t1ak or4t1an or2t1au or2tär or2tef ort3eig or4t3ent or4t3ere ort3erf or2t3ev or2the ort3ins or4t3off or2tor or4tö or4trau or4t3räu ort3re ort3ric or2t1um o3ru or2uf o4r3un o2r3ü o2rya 2o3s2a os3ad os4an osa1s o3sche os4co 2o3se ose3e o2s1ei ose2n o4sents 2osh o3s2hi o3sho 2osi o3sk o4ska os3ke o4ski 2os2kl 2os2ko os2lo 2oso 2os1p os2pe os3pec o3s2po os2sa oss3and os4sä os2sei os4s3en4k os4s3enz os2s3o os4son os2s3p os2s3t ost1a2b os4t3am ost3ang os3tarr os4ta4s ost1au os4tei oster3e os6t5er6we os2t3h os3til os3to os4t1ob ost3ran ost3rä ost3re ost3rot ost3uf 2osu4 2o3sy o3s2ze o2ß1el o2ß1en2k o2ß1enz o2ß1ere o2ß1erf 2o1t ota2go o5tark o2t1au ot3aug o2teb o3t2e1i otei4n ote2l1a ote4lei ot4em3 otemp2 o2t1erw ote2s 4ot2h ot4he ot5hel o4t3hi ot3hos o2thr o2til o2t1i2m ot2in otli2 ot4ol ot1opf ot2or oto2ra oto1s o3tra o2t3re ot3rin ot2sa ot3sc ots1p ot4spa ots2pe ot2spr ot4terk ot2th ot2t3r ot4tri o3tü o2u oub4 ou2ce ou1f4l oug2 ou2ge ou3gl o3uh ou4le. o3um o3unds oun4ge. 2our ouri4 our4ne. ou3s2i outu4 2ouv 2o1ü o1v ove3s 2ovi oviso3 2ovo 2o1w o3wec owe2r1 o3wi o1x ox2a ox2e 1o2xid ox3l o2xu 1oxy o1yo oy1s4 2o1z o3z2a oz2e ozen4ta o3zi ozon1 órd2 ö1b öbe2la öbe4li öb2l ö2ble ö2b3r öb2s3 2ö1c öch1l ö2chr öch2s öchs4tu öcht4 ö1d ödi3 öd2st ö1e 1öf öf2fl öf3l ögen2s1 ög3l ög3r ö1he öh3l2e öh3ri ö1hu ö3ig. ö1ke ö2ko3 ök3r 3öl. öl1a2 öl1ei öl1em öl4en öl2f1ei öl1im öl1in öl2k3l öl3la öl2nar öl1o2 öls2 öl3sa öl3sz ö2l1u öl2ung ölz2w öm2s 2ön ön2e ö3ni önizi1 önn2e ön2s ön3sc ön3sp ö1nu öo1 ö1pe öpf3l öp4s3t ör3a2 ör1c ör2dr ö2r3ec ö2r1ei ö2r1e2l ör2erg ö2rerl ö3r2erz ör2f3l ör2gl ö2r1im ör2kl örner2 ör1o2 örs2e ör3s2k ört2e ör2tr öru4 ö2r1une ö2sa ö2scha ö4sch3ei ö2schl ö2sch3m ö2schw ö2s1ei ö2sp ös2s1c ös2st ö2st ös3te ös2th ös3tr ö3su ö1ß 2ö1t ö2t3a öte4n3 öt2h öts2 öt2sc öt2tr ö1v ö1w ö1z öze3 özes4 p2a 1pa. 1paa 1pac pa3da pa2dr pa1f4r pag4 pa3gh pa1ho 1pak pa1k4l pak2to 3pala pala3t 1palä pa3li 2palt pa2nar pa3nei pa2neu pan3k4 2panl 2pann 1pa2no pan3sl pant2 panz4 1pap papi2 papieren8 papie8r7end 3para pa2r3af par3akt 1parc pa5reg pa5rek 2par2er 2parg pargel6d 1park. par4kam par4kau par2kl par2kr 1paro 2parp 1partn 1party par3z2 pa3s2p pa4st 2paß 1pat pat4c pat4e2 pa5t4r 1pau p3auf pa3uni 1pä 3pä2c pä3cke 3päd 3pär 3päs pä4t1e2h pä4t3ent pä2t3h pä2to pät3s4 2p1b 2p3c 2p1d2 pda4 p2e 1pe. pe2a pea4r pech1 1ped pe2en pef4 pei1 2peic pe1im pekt4s 2peku 3pel pe2l1a4 pel3d pe2let pe2lex pe3li4n pe4l3ink pell2a pell4e 1pem pena4 pe3n2al pen3da pe4nen 1penn pe2n1o 3pensi 1pensu pen3z2 1pep pe1ra per2an pere2 1perl per4na 3pero pe2rob per2r1a 1pers 2perse 2persi 5perso perwa4 pe3sa pes3s2 pe2st 3pet 1pé 4pf. p2fab p2fad p2faf pf1ai p2f1ak pf1ans p2fa4r pf3are p2f1au 4p3fe. p2fei pf1eim pf1ein p3fen. p2fent p3fer. pf2erw p3f2es pff4 p2f1in3s p2f3lä pf3lei pf3lie pf3lo pf3lu p2for pf3r pf1ra 2pfs2 pf3sl pf3sz 2pf3t 2p1g pgra2 1ph 4ph. ph2a 2phä 2phb 4phd 2p1hei phen3d phen3s 2ph1ers 2phf 4phg phi2ka 4phk ph2l 2phm 2phn p3hop 2phö ph4r 2phs ph3t2 2phthe phu4s 2p1hü 2phz pi2a3 pias4 pi3as. pi3chl p4id2 piegelei8en pi2el piela2 3pier 3pik 1pil pi3le pil4zer pin2e pingen4 ping3s 3pinse pi2o pi3oi pi3onu 3pip pi2pe pi4pel pi3ri 3pirin 3pis 4piso pi3t2a pi1th pit2s 2pitz pi2z1in p1j 2p1k2 pku2 pkur1 1p2l4 4pl. 3p4la p5la. p5lad plan3g 3plä 2ple. ple1c p4leg ple5n4 2p3ler 2plig p4lik p4liz p4lo 2p3lu 2p1m2 2p1n 1p2o po3b4 po1c 3pod 2p3oh po2i po3id 3poin 3pok 3p4ol po2lau po3li po4lor 2pond po1o2b po2p3ak po2p3ar po1pe po2pl po3pt po1ral po1rau 2porn por3s por4tin por4tre por6tri pos2e po4sta pos4t3ag po4stä po2s3te post3ei po2sto pos6tr post3ra po3ta 3pote po2t1u po2w po3x pö2bl pö2c 2p1p p2p3a2b pp3anl ppe4ler ppe2n1 p2p1f4 p2p1h p3p2ho pp3l pp5lan pp1lä p2ple p2p3ra p2p3re p2pri pp3sa ppt2 p2r2 1prak 1prax p4rä 1präd 1präg 3präm 3präs 2pre. 2prec 1pred pre2e1 1prei 3preis 2p3rer 3p4res pri4e 2prig 1prinz 1p4ro 3prob 2proc 3prod 3prog 3proj 2pross pro1st 3prot 1prüf 2prün 2p1s 4ps. ps4an p3se p3s2h ps1id p2sö ps2po p2st p3sta p3stea p3stel p3s2ti pst3r ps2tu p3stü 3p2sy ps2ze 2p1t pt1a pt2ab pt3alb pt3at p3te p4t3ec p4t1ei pte4l p4tele p4t1ent pt3erei p4t1erw p4t1erz p2th pt1in1 pto3me p4tos pto2w p2t3r pt3s2 ptt2 pt1um pt1urs ptü4 3p2ty pt3z 1pu pu1a pub4 2puc pu2dr 2p1uh pul2sp 2pund pun2s 2punt 2pur pu2s3t 3put put2s 1püf 2pül pün2 2p1v 2p1w pwa4r 3py1 pys4 py3t 2p1z qu4 1queu 1ra. ra2ab 2r3aac r3aal ra3ar r1ab ra2bar rab2bl 2rabd r2aber 2rabf 2rabg 1r4abi ra2br 2rabs 2rabt 2r3abw 1raby ra1ce 2r1acet ra4cheb ra4chin racht3r rach6trä ra2chu r2ack r2ad r4ad. ra2dam 2radap 3radf 3radl r3a2d3r rad5t 1rae r2af raf3ar ra2fer ra3ge ra3gle ra2gn 3r2ahm 4raht r2ai 2raic rail4l 2r3air 1rake 3ra1k4l ra2kre ra2kro 2rakti 3rakü r2al r4al. ra2la2 ral3ab rala4g r3alar ral3b 3r4ald ra3le 2ralg r4ali rali5er. rali5ers ralk2 ral3la rall2e 2rallg 2r3alm. r3alp. 2ralpe r4als r3al3t r4alt2h ra2lu 3raly rama3s ra2mer 1r2ami r2amm ram4man ram6m5ers ram4m3u 2r1amt ramt4s r2an. 4ranc r4anda r4ande ran4dep ran4d3er rand3s 4r3anei r4aner 2ranf 1rangi rani1e ran2kr 2ranl 2r1anm 2r1anp 2ranr r2ans. r2ansp ran4spa 2rantr 2r3anw r2ap 2rapf r1ar r2ara 2rarb 3rarei rar3f4 ra2r1in r2ark r2arp 2r3arz r2as r4as. ras2a ra4schl 2r3asph 2raß 1rat r4at. ra2t1a r3atl rat4r rat2st 2r3atta 4rau. 3raub. 4raud rau3e2n 2rauf 2raug 3raum rau4m3ag rau4man rau2mi 3rausc 2rausg rau2sp 2raus5s raut5s 1raü r2ax 3r2äd 4räf 4räg 2räh 2räm 3rän. 3räni 3räns 2r1är r2är. rä3ra rä2s1c 3rätse rä2u räu2s räu5sche 4räut 2r1b r2b1ab r2b1a2de r2bak rbal3a rba3re rb1art rb1auf rbb2 rb1ech r4belä rb1ent rbe3r2e r3b2la rbla2d r8blasser r4b3last r3blä r2ble. rb3ler rb2lin rb2lö rb2o rb4ri rb2s rb3se rb4sei rb3ska rbs1o rb3sp rb4stä rb3str rb2u rby4t 2rc r1ce r1che. r1chen r1chi rch3l rch3m rch3r rch1s2 rch3sp rchst4r rch3ta rch6terw rch1w r1ci r2ck1 r1cl r1ç 2r1d r3d2ac r2daf r2d1ak r2d1al rd2am rdani1 rd1ant rd1anz r4dap r2dei rd2ei. r2d1elb r3den rden3d rden4gl rde3re rder4er rderin6s r4d3ernt rde3sp rdi3a2 rdia4l r2d1inn rd1it rdo2be r3don rd1os r2dö rd3rat rd4ri rdt4 rd3ta rd3th rdwa4 1re 3re. re3aler re2am re3as re3at. re3ats 2reä re2b1a re2b1l reb1r reb3ra re2bü r2ech rech3ar 4rechs 2reck. 2recki 3red. 4redd 2redit re1el re1er 3refe 4reff 3refl 3refo 3reg 5reg. rege4l3ä re2hac re4h3ent re2h1i rehl4 re2h1o r2ei. rei4bl r2eie 2reig 3reigew rei3l2a rei3l2i reim2p r1ein rei3nec 4reing r3eink 4reinr rein8s7tre re1in2v reister6 3rek 4re2ke re3la 2r1elb rel2e re3lei 2re2lek 2r1elf re3lo 2r1elt relu2 r4em. r2emi 4rempf 4remu r4en. r2ena rena2b re3nal re2nä 3rendi ren3dr re4n3end ren4gl 2rengp re2ni r1ense 2r1entl 2r1ents 2rentw 4r3entz r2enz re3or 3repe 3repo 4repp 3r4er. 2r1erb r2erbr 2r1erd r1erf r1erg r4ergen r1erk 4r3erken r2erki r1erl 4r3erlau 2rerlö 2r1erm rer2n 2r1ernä 4r3erns 4r3ernt r2ero re2rob r1erö 3r2ers. 2r1ersa r2erse 2rersp r1ert r2erte 2rertr 2r1erz rer5ze r2erzy 3r4es. re2sa 3rese 3reso 2ress ress2e res6s5erw 3rest re1sta re2s2tu 3resu re2thy re2u reu3g2 2reul re3uni 2r1eur 2reü 2r3evid r1ew rewa4r re2wi 2r3e2x1 3rez 4rezi 1ré 2r1f rf1ält rf2äu r2fent rf2es rfi4le. rf3lic rf3lin rf4lö r3flü rfolg4s r3for rf4ru rf4rü rf2sa rf2s1ä rf4s1id rf2s3pr rf2s3t rf2ta rf3t4r rf2u 4r1g rg2ab r2g1a2d r2g1ah r2g1ak rg2an rge4an rge2bl rge4l3er rgen4z3w rge4ral rge4tap r2geto rgi4sel r2glan r2gleu r2glig r2gno r2g1ob r2g3ral r2greg r2gres r2gret rg3rin rg3sp rgs4tr rg5s2tu r1h4 2rh. 2rha r2ha. 2rhä 3r4he. 3r4hen r3her r2hoe rho2i3 2rhol 2rhö 2rhs rhu2s 1ri ri3am ria1s ri3at rib2bl ri1ce ri1cha rid2 ri2d3an 2ridol r2ie rie2fr ri1el riene4 rien3s rie2nu ri1er. ri4ere ri3ers. ri3esti ri1eu ri2f1a ri2f1ei ri2fer ri2f1o ri2fr rif3s rif4ter 3rig 5rig. ri4gene 5rigj rig1l 4rigr rik1l ri4kla r2imb 2rimp rim2s rim4sc r2i3na 2r1ind rin4dex rin4diz ri3n4e rine1i 2r1inf rin2fo ring3l rin2gr 2r1inh 2rinit 2rink 3rinn 6r5innenm 4r3inner 4rinnta r1innu 2r1ins 3r4ins. rin4so rin2sp r4inspi 2rint rin4teg rin4t5r 2r1inv 4r1ir r2is ris2a ri4scho ri4schw 3risik ri3so ri4s1p 3riss ri2st ris6t5ers r2it r3i2tal ri3t2i rit4r rit2tr 5ritu rix1 1rí 2r1j 2r1k rk2am r2käh r3klau r2klis rk4lo rk2lu rk4n r2k5nu rk3räu r2k3rea r3kri rk3rin rk2s1e rk3shi rk2sp rk1st rkstati6 rk4stec rk2ta rk4t3eng rk4t3erf rkt3ers rk6tersc rk4t3erw rk2tin rk2t1o2 rk2t3r rk3tra rk4tri rk1uh rk2um rku2n rk1uni 4r1l r3l2a rl2e rle2a r3lec rle2i r3let r3l2i rli2s r3l2o rl2ö rlös3s rl2s1p rl3ste rl2s3to rl3t 4r1m r3m2ag rma2la r2m1ald rm1ans rm1anz rm1a2p r2maph rm2är rm3d2 r3me. r2m1ef r2meo rm2es r2mide r2m1im r2m1o2ri rmo1s rm3sa rm3sta rmt2a rm2u rm3ums 4rn rna2b rna4n rn2and rn3ani r2n1anz rn2a2r rn3are rn3ari r2nau rnd4 rn3dr r3ne rn3e4ben r4nef rn2ei rn3eif r4n3eis rne2n r4n1ene rn3ense r4nerf r4n1erg rn4erhi r4nerk r4n1ert r5nes rn2et r4nex rn3f rng2 r3ni r4n1in r3nod r2n1op r2n1or rn1ö r1nöt rn3s2ä rn3s2p rn3s2z rn3t2e r1nu rn1ur r1nü r1ny ro2bei 2robj 3robo 2robs ro1c 3rock. r2o3de ro3e4 2rof roh1l roh3na 3r2ohr 3roi ro1ir ro3le rol4lan rol3l4en rol3s 2roly 4rom. ro2mad ro2mer 4romm 4romt r2on ro4nerb 3ronn rons2 ron4tan 4ro1ny ro1pe ro3ph r1or r2ora ror3al ro2rat ro2rei ro2r1o ror3th ro3sh ro3s2i ro3smo ros2s1c ro3sta rost1r 4roß ro2ßu ro4tag rote3i ro2tho ro4tri rots2o rot2ta ro3t2u ro3unt 3rout 2rox rö2b3l rö2du 2röf 4rög 1r2öh r1ök 1r2öl 3römi 4röp r1ör r2ös. r2öse 2r1p2 r3p4a r3p4e rpe2re rpe4r3in rpf4 r2pli r3po rpro1 rps3t rp3t r3pu r1q 2r1r rr2ab rr2ar rr1äm rrb2 rr1c r3r2e rre4ale rrer4s rre2st r4rew rr2he rrik2 rr2n3a rr2o r2r3ob rro3m rr2st rr3stu rr2th r3ru r3r2ü rrü1b 4r1s rs3ab r2s1a2d r4samp r4s1amt rs2an r2s3ang rs3anp rs3ant rs3ar r3sch2e r6scherl rsch2l r3schu r3schw r2sein rse2n1 rs2end rse4ne rs1ere rs1erö rs1ers rs1erz rse2t rs1eta r3sho r3si rs2kal rs2kan rs2kie rs2kis rs2kl r4sko r4skr r4sku rs3l rs4no r3so r4sob r4s1op r4sord r4s3ort. rs2p4 r2s3ph rs3s2 r4stant rs2tec r6st5eing rs4temp rs4terb rs4t3er4w rs2th rs2ti r3stie r2stin rst3ing r2stip r3sto rs4tob r4stot r3stö r3s4tr rst3ran r6strang rs2tu r3s4tü r3swi r3sy 4r1t rtal2 r2t1alm rtals1 rt1am rt1ang rt1ann rt1ant rt1anz r2t1ar rt3a4re r2t3att rt1är rte1e2 rt4eif rtei3la rtei1s4 r2telf r2temo rte2n1 rten3s2 rt3erei r4terfa r4terfo r4t3erh r2t1erk r4t3er4la r4t3erle r4t3ernä rter4re rt1ers rte3s2k r2thi rt3hol rt2hum r2t1id r2t1ima r2tinf rto1p rt1or rto2ri r3tö r4t3rak rt3rec r4treis r5tri rt3ros rtrü2c r4ts rt4s1eh rt2so rt2spa rt2spr rtt4 r2t1urt r3tü rt3z 1ru ru1a ru3a2r3 rube2 ruch3st ru6ckerl ru2cku rude2a ru2dr 3ruf ru2fa ruf2s3 4rug 2r1uhr 3ruin ru1ins ru1is 2rum 4rumf ru2mi 4ruml r2ums. 4rumz 2r1una 2rund run2d1a r2unde rund3er run6derl run6ders run6derw 2r1unf 2rungl 2r1u2ni 4r3unio run2kr 2r1unl 2r1unm 4runn 4r3unt 2runw ru3pr 4r3ur ru2ra ru2r1e 5ruro ru2si rus2s1p rus4st ru2st ru3sta 3rut ru4tei rut3h ru2t1o2 ru2t3r 4ruz ru2zw 1rü 2rüb rü1ben rü1ch 4rümm 2r1v rve4n1e 2r1w rwun3s 4r1x 1ry ry2c 2r1z rz1a2c rz2an r2zar r2zas r5zene rz1eng r4z3ents r2z1erf r2z1erg r2z1erk r2z1erw rz1id r3z2of rz1op rz2ö rz3te rz2th rz2t3ro rzug2u r3zwä r3z2wec 1sa 3sa. 3saa 2s1ab sa2be 3sabet sa2bl sa3ble sa2br 4sabs sa2cho2 sach3t 2s1ada s1adm 2s1a2dr 3safa sa2fe 2s3aff 3safi sa1f4r 3saga sa4gent sag4n sa2gr 3s2ai sa3i2k1 sail2 2s1ak sa2ka 3saki 3sakr 4sakt 3s2al. sa4l3erb sa2l1id 3salo sal2se 2s1alt 3s2alz 3sam s3ameri 5samm 6s1amma 4s1amn s1am3p4 sam2to s1an s2an. 2s3a2na s3anb s2an2c s2and s4and. san4dri 3sang. 2s3anh 3sani 2s3anl 2s3ans san4sk 4s3antr 2s3anw 2s1ap sa2po 3sapr 2s1ar 3s4ar. 3s2ara 4s3arb 3s2ard 3sari s3arr 3s2ars 4sarti s1asp 4s3a2sy 3sat sat2a 4s3ath 4s3atl 4satm sa2tr sa3ts sat4z3en s1a4u 3sau. 3sauc 3saue 2s3aufb sau2gr 3saum 3saur sauri1 2s3ausb 3s2ause s3ausw 2s3av sa2vo 1sä s3ähn 3säl s1ält 2s1äm 2s1änd 3sänge 2s1är 3s2ät 3säul 2säuß 4s3b4 sba4n sbe3r2e 1sc 2sc. 2scam s2can s2cap 2scar 2s1ce 6sch. 2schak s4ch2al 4schanc 4schang 2schao s4chä 4schb 4schc 2schd 3sche. 2schef sch3ei. 4schemp sch2en 3sches 4schess 4schex 2schf 2schg 2schh schi4e s4chim 4schiru 3schis 2schk s4chl 4schl. 4schle. 6schlein sch6lit 2schmö 2schn. 2schox s4chö 2schp 2schq 4schre. 4schrin sch3rom 4schrou 6schs schs2e sch3s2k 4sch3t scht2a scht4r s4chu 4schunt sch2up 3schü 2schv 4schwet sch4wil 2schz 2scj 4s3cl 2sco 3s4cop 3sco4r s2cr 2scs 2scu 4s3d2 sda3me sde1s sdien4e sd4r 1se se3at. 2s1e2ben seb4r 2s1echo s1echt 2s1e2ck 3see se1ec se2e1i4 see3ig seein2 se1er. se1erö 2s1eff se2gal se2gl seg4r 3seh se2h1a4 se3he se4h1ei se4hel se4herk se2hin seh1l seh3re seh1s seh3t se2hüb 2s1ei. 2s1eie 2s1eig s1ein 5s4ein. 2seinb sein4du sei3n2e sein4fo 4seing 2seinh 4seink 2seinl 2seinn 4seinr s4eins. 4seinsp 4seinst 2seinw 4s1eis 3s2eit 3s2ek s2el. se2l1a se3lad sela4g se3lam sel1ec 4selem se4l3erl sel3ers 2self. s3elix se2l3ö s2els sel3sz sel3tr s4e3ma 2s1emp 3s2en. se4nag se2nä 2s1endl sen3gl 3s2eni 3senk se2no se4nob 3s2ens s2ent. 4s1entf 2s3entg s2enti 2s1ents 2sentw 2sentz se2n3u seo2r 4s1e2pos 3seq s4er. 3sera ser3a2d se2r3al se5ref s3ereig 6sereign se4r3eim se4r3enk ser2er 2s1erfo s2erfr s3erfü 4serfül ser3g s2ergr s1erh 2serhö 3seri 4serken 2s3ernt se2rob 4s3eröf s2ers. 2sersa 4serseh s4ert. s2erta seru2 se4r1uf se3rum se3rund 3s4erv 5ses. se2sel se1sta se3su 3set 4se4tap se2tat 4s1e2th se1u2n 2s1ex se2xe 4sexp sex3t 1sé 4s3f4 sfal6l5er sflo4 4s3g2 2s1h sh2a 3s2ha. sha2k 4s3han 1shas s3hä s3h2e 3shi. 3shid shi4r sh3n s3hoc 4shof 3shop sho4re 3show s3hö sh4r 1si si2ach si3ach. si2ad si3am. 2siat sib4 5si1c 2s1i2deo s2ido 3s4ie siege4s sien3 si3ene si1err sie2s si1f4 3s4ig si2g1a2 sig4n si3gnu si2g3r sig4st si2k1ab si2k1ä sik3erl si2ki si4k1l si2kr sik3s sik3t4 si2ku 3silo 2s1imm si3n4a 2s1ind 2s1inf sing1a sin3gh sin3g4l sin2gr sing3sa 4s1inh sin1i sini1e 2s1inq 2s1ins 2s1int 4s1inv 3sio sion4 3s2is si2sa si4schu si2s1e si2s1o si2s1p sis3s 3s2it si2tau sit3r si2tra si3tu siv1a sive3 si2vr 1sí 2s1j 2s1k2 4sk. 3skala 4skam 4skanz s3kar 4skas ska4te. 4skateg ska4tes 4skb s4kep 3s2ki. s2kif s2kig 3s2kik 4skir ski1s 3skiz sk4l 4s3klas sk4n 4skom 4skor 4skow 4skö 4sks 4sk3t 3skulp 2s1l2 3slal 4slan sla2ve s2law s3lä sl3b s3le sler3s s3li 3s4lip sli4tu s3lo. slo3be s3loe 2s3m2 2s3n4 4sna snab4 sni3er. sni3ers 4s5not 4snö 1so 3so. so4a 2s1o2b so1c so3et 3soft 3sog s1o2he 4sohng 2s1ohr 3sol so3la so4l1ei sol4ler 4so2ly 3som 3s2on son3au sone2 son5ende son3sä son2s1o so3o 2sopf 3sor. so1ral s1orc 2s3ord so2rei 2s1orga 5s2orge 2s1o2rie so2r1o2 3sors so4ru 3sos s4os. 4s1ost so3unt 3sov 4s1o2ve 3sow 2s1ox 3soz 1sö sö2c sö2f 2s1ök 2s1ö2l s1ös 1sp2 2sp. 2spaa 4spak 2spala spani7er. 4spap 2spara 4sparo 3sparu 3spaß 4spau s2paz s2pä 3späh 2spär s3pe. 2spel 4spensi spe3p4 s1peri 2sperl 2spero s2perr 2spers 4spet 3s2pez 4s3pf 2spha s4phä s3phe 3s2pi4e 4spier4 spi2k 4spil 3spio 4spi4p 4spis 2spl 4spla 4splä 4sple 3s2pli s3p4lu s3pn 2spod 2spog s2poi 2spok 4spol 4s3pos s2pott 4spr. s2prac s2pran 4sprax 2spräm 4spräs 3s4prec 2spred 2spres 2sprob 3spross 3spru 4sprüf 2s3ps 2s4pt 2spun 2spup 3spur 4sput 4spy 2s1q 4s3r4 srat2s srat4sc sret3 srö2s1 srücker6 srü2d 6s1s ssa3bo ss1a2ck s5saf s3sag ss1aj s3sal s4s1ala s4s1alb s4s3amt s4s3ang s2sano s4sans ss2ant s4s3anz s3sa1s2 ss3att s3s2ä s4sce s4sco ss1ec s2s1ega sse3ha sse3inf sse3in4t sse6r5att ss1erö ss3erse s3s2es sse3ta ss3l ss1off ssoi4 s2s1op ss1ori ss2po s2spro ssquet4 ss3s4 sst2a s3stel ss2th ss2ti ss4tip ss2tur s3stü ss1ums s1t 6st. s2ta 4sta. 3staa 2stabb st2ac 3s4tad 3staff 2stag 3stah 2stak 2stale s3ta3li 2stalk st1alm st1alp st1ami 4stan. sta4na 3stand 2stani 4s3tann 2stans 2stanw s4tar. 4stari s4tars st1asi s3tat. s4tau. 2stauf 2staum 3staur 2staus 3staus. 2stax 3s2tä 4stäg 4stält s4tänd 5stätt s3täus 2stb 2st3c 2std st2e 4s5te. 2stea 4stechn s2ted 4stee 3s2teg ste2gr 3s4teh s2te2i 3steig 4steil 3steilh stei4na 1s2tel 2stel. stel4l3ä 2steln 2stels 2stem 4stem. ste4mar 4sten s5ten. s4t3ends s4t3engl st4ens s4t3entf s2tep 2ster 6s5ter. ste6rers 4sterm 3sternc 4stes s4t3ese stes6se. ste4st 2stet s4teti 3s4tett 3s2teu 1steue 4steuf st3ev 4stex 2stf 2stg 4sth s4thä s4thi s2t3ho s2thu 2stia 2stib s2tic sti2e 2stie. s2tieg s2tiel 2stien 3s2tif 2stig 2stik s2til 3s4tim s4tinf s3tinn st1ins 2stio 1s2ti2r 2stis st1i4so 1stitu 2stiv 2stj 2stk 4stl 4stm 2stn s2to 2sto. s3tob 2sto3d 4stod. 1stof s4toff s4t3om 4ston 4stoo 2stopo 2stor. 2store 2storg 2stori s3tort 2stose sto3s2t 1stoß 4stote 4stou 2stow 2stoz 1stö 2stöch 2stöt 2stp 2stq s2tr 2strad 2strag 1strah 4strahi 4strai 4strak 2stral 4strans 3s4tras 5straß 4straum 4s5träg 4sträne 4s5tref 4streib 5st4reif st3renn 2s4trig 1s4tri2k 2s5tris st3roll stro4ma 1stru 2strua 2strug 3struk 4st3run 2strup 2s4t3s4 2st3t4 st2u 1stub 4stuc 3s4tud 2stue 3stuf 3stuh 2stum2s stum4sc 2stumt stu2n 2stun. 3s4tund s2t3uni 4stunn 2s3tuns 2stunt stu3re st3url 2sturn 2st3urt 2s3tus 1stü 2stüch 2stür. 2stüre 2stürg 2stürs 3stüt 2stv 2stw 3s2tyl 4st3z 1su su1an 3su2b3 su4ba2 4subi 3su1c su2cha such4st 2s1u2f 2s1uh su1is su1it. sul2a sul2i sult2 su2mar su2mau 3s2ume su2mel su6m5ents s3umfe 3summ sum1o2 su2mor s3umsa s3umst su2n 3sun. sun6derh su4ne s1unf 2s1uni 4sunt 3s2up sup3p4 su2ra 2s1url s1urt s4u2s1 su3sa su3sh su3si sus3s 3suv 1sü 2sü4b 3süc sü2d1 süden2 3sün 4s3v 2s1w s3wa s3we sweh2 4swie 4swil 1s4y syl1 sy4n3 2s1z 4s3za 4s3zei s2zena 3s4zene 4szent s2zes 4s3zet s2zis sz2o 4s3zu s3zü 4s3zw 2ß1a2 2ß1b2 2ß1c 2ß1d 1ße 2ß1ec 2ß1e2g 2ß1ei ße2l1a ßen3g ße2ni ße2no ße2ro ß2ers. 2ßerse ßer3t 2ß1f 2ß3g2 ßge2bl 2ß1h 1ßi ßi2g1a2 ßig4s 2ß1in ß1j 2ß1k4 2ß1l ßler3 2ß1m 2ß1n2 ß1o2 ßos2 2ß1p2 2ß3r2 2ß1s2 ßst2 2ß1t 1ßu 2ß1um 2ß1ü 2ß1v 2ß1w 2ß1z 1ta 3ta. 4taa 5taan 4tab. ta2b1an 2t1abb 3tabel 2taben ta4bend 2tabf 2tabg 2tabh 2tabk 3table 2t3abn ta2br 4tabs 2t3abt ta2bü 2tabw 2tabz 2t1ac 3tacu t1ada tadi3 2t1a2dr ta3d2s 3taf. 3taf2e 4taff t1afg t1af4r 3t2ag ta2ga2 ta2g1ei 4t3a4gent 4ta3gl t3ago tag4st tah2 tah3le tahl3sk t2ai ta3i2k tai2l ta1ins tai4r ta1ir. t1a2ka ta2kro tak6ta 3taktb 3t2aktu 2takz 3t2al. ta2la ta3lag ta3lak t1alb. t1albk tal3d 3t4ale ta4lens tal2lö 3talo ta2l1op 2talt 2tam 3tame ta2mer ta2mi t1ampl t1amt 3tan. t1a2na 2tanb 4t2and ta3ne 4tanf 2tang 3tani t2ank t3ankl 4tanl 2t1anme 4t1anna t2ano t1ans 3t2ans. 4t3ansi 4t3ansp ta2nu 2tanwa 2tanwä t2anz. t1anza tan6zerh t1anzu ta3or ta2pe. ta2pes 2tapf ta2pl 2tarb ta4rens ta4r3ere 3t4a3ri 4tark 2t1arm 2tart t1arti tar2to ta2ru 2t1arz 3tas ta3sa 4t1asp ta2ta2b ta2tan ta2tau tat3ei ta2tem ta2t3er ta2th tat3he t3atl t4atm ta2tom 4tatue ta2t1um 2t1auf 4taufg tau3f4li 4taufn t1auk 3taum t1ausb 3tausc tau6schr tau6schw t2ause 4t3ausg t1ausk 4tausl 4t3auss 4t1ausw 3tav 3tax taxi1s 1tä 4täb tä1c 4täd 3täe 3täg 4tägy 2täh 2t1ält 4täm t1ämt t1ängs 3tänz t1äp t2är. tä2ru tä2s t2ät 4tätt 2täuß 2täx 1tà 4t3b2 tbe3r2e tblock5e tblocken8 4t1c t3cha t3che tch2i tch3l t2chu tch1w t4ck t3cl t3cr 4t3d4 tdun2 1te 3te. te2a2 2teak te3al te3an 3teba 3t4ebb 4t1e2ben t2ech te3cha 3techn 2teck teck2e te2cki te1em te2en3 te1erw te2es 2teff teg3re 2teh 3teha 3tehä 3tei. t3eifr teik4 3teil 4teilhe 2t1ein tein3e4c t3einge t3einla 4teinn t3eis. t3eisb tekt2 5tel. 3tela te2l3ab te2l1ac te2l1au telb4 3te3le tel1eb tele4be te4l1ec te4l1eh te4lein 2telem tel1en te4lerd te4leu 4t3elf. 3telg te2l1in te2lit 3telk tell2e 5teln te4lost te2l1ö 3telp 5tels tel3s2k 3telt4 tel3ta tel3th 3tem. te2m1ei te2min 2temme te2m1o2r 3temper 2tempf tem3s te4m1u 3ten t6en. ten3a tena2b te4na2d te4na4g te4nas te4nau te2nä t4enb ten3da 4t3endf t6endi 4t1endl t6endo 4t3endp ten3d4r te2n1e2b te2nef te3n4ei. ten3eid ten3ens 4tenerg te2net ten3g 4t1eng. ten4gla t4enh te2ni te4n3in t4enj t4enm ten3n tens2e 4tensem t4enta t3entb 4tentd t4ente 4tentn tent3ri 4t3entw 4t3entz ten6zerh ten3zw t3e2pi 3t4er. tera2b te1raf ter3am te3ran. 4terbs 4terbt 3terc 4t3erde. te2re2b te4r3eif te2rel ter3end te4reng te4rerk terer4z 4terfol t4erfr 4terfül 3ter3g2 6tergrei t6ergru t4eri te3ria 4terklä 2t1erlö ter4mer 3termi ter4n3ar 2ternc t3erneu t4ero t3erö ter4re. t4ers. t6erscha ter4ser terst4 t4erst. t4ersti t4erstu tert2 teru2 te4r1uf ter4wäh 6terwerb ter3za 2t3erzb 3tes tesa2c te2san tesä2c te2sel te2spr tes3s2 t2est tes3tan test3ei tes6ter6g tes6terk testes4 te2su 3tet2 t2et. te2tat 4teth 4tetl teu3ere teu3eri 3teuf 3teum te1un 3teur. teu2r3a te2vi te1xa 2t3e2xe 2t1e2xi 4texp 3text 2t1exz 4t1f4 tfi2l 4t1g2 tger2 t1h 4th. 2th4a 3t4ha. t2hag t3hai t2hak 3thal. 4t3hau 2t3hä th2e 1t2he. 3thea 2theb t2hec 2t3hei t4hein t2hek t2hem 1then t4hene t4heni 3theo 2therr t2hes 3these t2heu 1thi t2hik 2t3hil 2t3him 2thk 4th3l 4th3m 2th3n 1t2ho 2t3hoc t3hof 2t3hoh t4hol. t3hor 2t3hot thou2 4t3hö 2thp 1th2r2 4thrin. 4thrins 2ths 2thub 4thun 2thü 2thv t2hy 1ti ti2ad ti3a2m 3tib4 ti1ce tiden2 ti4dend ti2deo 3tief. tieg4 2tieh ti1el ti3e4n3 3ti2er tie4rec ti1et ti1eu 3tif. ti1fr 4tift 3tig ti4gerz 3tik ti2kam ti2kar ti2kin ti2krä ti2lar ti2lau ti2lei ti2lel 3tilg ti2lö til3s tilt4 ti2lu ti2ma2g t2imi tim2m1a 4t1imp 3t2in. ti3na t1inb 4t1ind ti3n2e t1inf tin2g1a ting3l ting3s t1in1it 2t1inj tin2k1l 3t2ins. 4t1inse 2t1int ti1nu 4t1inv 3tio ti3or 3tip ti4que. ti1rh 3tis ti4scha tisch3w ti2sei ti2sp ti1sta 3ti3t2e 2ti3tu tium2 3tiv ti2van tive3 ti2vel ti4v3erl ti2v1o ti2v3r ti2za 2t1j 4t3k4 4t3l tl4e 5tlem tle2r3a 6t5li tlung4 4t3m2 tmal2 tmen6t3 tmo4des 4t3n4 t5na tnes2 tnes4s 1to 3to. to4as to5at 4tobj tob2l to1c 3tocht to6ckent 3tod tode2 4to2d1er to4d1u toi4r 3tok to3la 3tole 4tolz tom1e2 to2men 2tomg 3ton to2nau to2neh 3too to2pak to2pat 3topo 2topt 3tor. to1ra to2rau to4rän 4torc t1ord 3tore to2rel t1org t3orga 3torin tor3int to2rö 3tors t1ort. to2ru t2orw to3sc 3tose to4sk tos2p 4toss 3tost4 to1sta 4toß 3to3te to2tho 3totr tots2 3t4ou touil4 to3un 3tow 2tö 3töch 4töf 4t1ök tö4l 3tön t1öst 4töß 3töt 4t3p2 tpf4 2t1q 1t2r4 2tr. 5tra. 3trac tra3cha t3rad. tra4dem tra4far 3trag 3trahi 4trahl 6trahm 5t4rai 3trak 3tral 2t3rams 3t4ran. 2trand 3trank t3rann 3trans t3rase t3rasi 4traß 5träc 3träg 3träne 4träs 4träß 4t5re. tre4ale 4treb tre2br 4trec t3rech t4reck 6t3red 5t4ree 3tref 4trefe 4trefo 4treg t4rei. 3t4reib 4treic 2treif t3reig 2t3reih t3rein t3reis 6treit t3reiz 2trek 6t3rel t4rem t4ren. 3trend 4trendi t3rent 2trepe 2trepo t4repr t4rer t4res. t4ret tre2t3r t4reu 3treuh t3rev 2trez 5t4ré 2t3rh 3tri 4tric 5trieb tri4er 5trigg t3rind 4tring tri3ni 4trinn t4rip 4tript tri2x trizi1 3tro. 3troe 3t4roi tro2ke 4trom. tro2mi 4troml 3tron 2t3roo t4rop 3tropf 3troy t3röc 2tröh 3trös 2t3röt 3trua 4truk trum2 trums1 2t3rund 3t4runk 5t4rup tru2th t4rüb trü1be trü1bu 2t3rüc trücker6 t4rüg try1 2ts 4ts. t4sa4b t3s2ac ts1ad t2s1ah ts1al t4s1amt4 t2san t4s3ar ts1as t2sau t2s1än t3s2cha t4schar t3sch2e t4schef ts4chem tsch4li t4schro ts4cor t2s1e2b t3seil t4seind ts1em tse2n1 t2s1eng t2s1ent t2s1er t4s3esse t2s1i2d ts1ini t2s1ir ts3kr t1slal ts1o tso2r t3sou t2sö t3spal ts1par ts4pare t2spä ts2ped t3spek t2sph t3s2pi ts3ple t2spo t3s2pon t3s2por t4sprei ts3s4 t1st4 t4stag ts3tak ts4tal ts3täti t2s3tep t3s4tero t2stip t4stit ts3toc ts3tor ts3trad t4stran ts3trau t2s3trä t4streu t2stri t4strop t2s3trü ts2tu t2s1u 1tsub t3sy 4t1t tt1ab tta2be tt2ac t2t1ad tta6gess tt1ak tt2al tt2ant tt1art tta1s tt1ebe tt1eif tt1eis t3tel tte2la tte4leb tte4len ttel1o ttes1 tte2sa tte2sä tt2häu t2t3ho t3ti t3to tto1s t3tö t3tro tt3ru tt3rü tt2sen tt2sor tts1p tt2spe tt2spr tt2sti ttt4 t3tu tt2un t3tü 1tu tu1alm tu3an 2tub 3tuc tu2chi 2tud 3tue 4tuf tuf2e tu3fen t3u2fer tuff3 2tuh tu2is 2tuk t3u2kr tul2a t2um. 3t2ume 2t3umf 2t3umg 2t3umk 2t3umr tum2si tum2so tums5tr 2t3umt 2t3umz 3tun. 2t1una 2t1und 3tune 2t3unf 3tung t3unga tung4s5 2tunif 2t1u2nio 2t3unt t1up. tu2r1a4g tu2rä tur1c tu2re. tu2rei tu2r1er tu2res tu2r1e4t turin1 3turn tu2ro tu4ru tu2sa tu4schl tu2so tu3ta 2tü 4tüb 3tüch tück2s 3tüf 3tüm 3tür. tür1c 3türe 3türg 3tür3s 3tüten 4tütz 4t3v 4t3w twa2 twi4e 1ty1 3typ ty2pa tys4 4t1z t2za4 tz1ag tz1al tz1ar tz1au tz1ä t3ze. t2z1e2c t2z1eie t2z1eis tze4n1 tz2ene tz3ents tz1erl tz2ers t3ze2s tzgel2 tz1ind tz1int t2zor tz2ö tz2th tz2tin tz1wä tz1wi tz1wu 2ua u1a2b u1a2c uad4r u1al. ua2lau u1alb u3alet u1alf u3a2lo u1alr u1als u1alt ua2lu u1am u1ans u3ar. uara2b u1ars ua3sa ua2th uat2i u3au u1ay u1äm u1äu 2u1b u8be8cken. u3b4i ubi3os. ub2l ub3lic u2b3lu u2bop ub1r ub3rä u2b3rit ub2san ub2s1o ub2spa u2büb 2uc uc1c u1ce uch1a u1cha. uch1ä u1che u2ch1e4c uch1ei u3ches u1chi uch1il uch1in uch3l uch3m uch3n u2ch3r uch2so uch4spr uchst4 uch4tor uch2t3r u1chu uch3ü uch1w u1ci u2ckem u4ckent u3ck2er u2cki u1cl 2u1d u3d2a uden3s2 uder2e udert4 udi3en uditi4 u2don ud3ra u3dru 2u1e ue2ck u2ed ue2en u2eg u2ela ue2le ueli4 ue2mi uen1 ue2nä ue2ner uenge4 ue2ni ue2no uen2zu u2ep ue2r3a ue2r1ä u2ere u3ereh ue3reig u3erer ue4rerg ue4rerk u3erex uer3g2 u4erinn u3erin4t uer2ne uer4ner uern3s4t ue2r3o u3err uer3sc uer3t2 u3erum u3erunf u3erunt ue2ta ue4tek u3fah uf1ak uf3ar u3fas uf1au u2f1äs u2f1ä2ß u2f1ei u2f1em u3fen. u2fent u2f1erh u4ferle uf2ern 2uff uff4l uf2fro uffs4 uf3l u2fob ufo2r uf1ori uf3r uf3sä uf4sin uf4so uf2spo ufs3tem uf2t1eb uft3s2 u2fum 2u1g u4gabte ug1af ug1ak u2g1ap uga4s ug1au ug3d2 u2g1ei u2g1erf u2g1erl ugge4st ug3hu u2g1l ug3lad ug3lo u3g2lö u4glu u2g3n ugo3 ug1or u2gö u4g3reis ug3ro ug3rüs ug3se ug4ser ug3si ug3spa ug4spr ug4spu ug5stä ug3str ug3stü u2gü u1h 2uh. uhe3s6 uh1la uh1lä uh2li uhme4 uhr1a uh2rer uh3ri uh4rin uhrt4 uh2ru uh4rü uh1w 2ui ui2ch u1ie ui1em u3ig u4ige u1in. u1is. u3isch. u3ischs uisi4n ui4s5t u1j uk2a u3käu u1ke u1ki u1k2l ukle1i uk4n uk2ö u1k4r uk2ta uk2t1in uk2t3r u1ku uku2s u1l ul1ab ul1am ula2s ul1äm ulb4 ul2dr uld2se u2l1el ule4n ul1erf ul1erh ul1erw ule2sa ule2t ul1eta u2lex ul3f4 ulg4 uli2k ul1ins ul3ka ul2kn ull2a ul2les ull3s ulm3ein ulo2i ul1or ul2p1h ul2sa ul4sam uls2th uls3z 2ulta ul3th ul4tri ult3s u2lü ul2vr ulz2w u2m3a2k um1all um1anz u2m1art u2m1aus u2maut 1um3d2 um2en ument4s umer2a u2m1erg u2m1erl u2m1erw 1umf 1umg um1ins um1ir 1umk 1um3l 4umm umm2a umpf4li um2p3le 1umr 3umsat um4ser um2sim um2s1pe um2su um3t2 um2un u2m1ur 1umz un1 4un. 2una. 1unab un3ac un4al u3n2am u2n3an 2un2as un3at 1unda un4dab 1undd un4dei un4d3erf und5erha 1undf 2undg un2did 1undn un2dor un2d3r 4unds. und3sp und3st un2d1um 1undv 1undz u3ne une2b une2h un2ei. un3ein unen2t un4es4 1unget 1ungew ung5h 1unglü un2g1r ung3ra ung3ri ung4sa un2id un3ide 1u2nif unik4 un2im uni2r 2unis un3isl u3n2it 3u2niv 2unk un2k1a2 un2kei unks2 unk4tit unk2t3r 3unku unna2 un2n3ad un3n2e uno4r un2os 1unr uns2 2uns. un3se 1unsi un3sk un3sp uns4t1r 1unt un3ta unte4ri un3tr unt3s 2untu unvol2 unvoll3 1unw 2unz 2uo u1o2b u3of u3or. u1or3c u3ors uos2 u1os. uote2 u1pa u1pe2 uper1 up2fa u2pf2e u2pf1i u3pi up2pl up2pr u1pr up4t3a2 upt3erg upt1o up4tr u1q 2ur. u1ra u2rab u3raba ura2be u2r3a2m u2r1ana ur2anb u2r1ang ur2anh u2r1an5s u2rar ur3a4ren u2r3att u2r1au 2u1rä ur1än ur3b2a urch1 urd2 ur3di 2ure ur1eff u2rele ure4n u4r1ep ur1erh ur1erw 2urf urf3t ur2gri urgros4 urg3s4 uri2c ur1im ur1ini ur3ins ur1int urk2s 1urlau 4u1ro u3rol uro1s u1rö ur3p ur3sac ur2san ur2sau ur2ser ur4sin urst4r ur4sw ur3s2ze urt2 u3ru urü2 ur2za ur2zä ur2zi ur2zo ur2z1w 2us u4saf us4ann u6schent usch5wer u2s1ec u2s1ei u3seid u3sep use1ra u2serp u2s1ese usi3er. usi5ers. us3kl u4sko us3oc u3soh u2s1op us1ou us3part u2s1pas u2spat us1pe u3s2pek us1pic u5s4piz u2spo us2por u2spu us4sez us2sof ust3abe u1stal us3tau us2th ust2in us3tr u5stras us6tris u1stu u2stun u2stur us2ur u2sü 2u1ß 2u1t ut1alt ut3a2m u2t1ap u2t1ar u2tär u3te ut1eg ute4ge ut1ei. ut1eie ute2n1 u2tent uter4er u4t3ersa ut2es ut2et u4tev u4t1ex utfi4 ut2he u2thi u2t3ho u2thu uto1 uto4ber uto3c ut1opf u2tops ut4or utos4 u3tö ut3rea ut3rü ut3s2a ut2s1ä ut4schl ut4schm ut4schö ut2spa ut3te ut5t4l utts2 utu4re utu5ru u3tü utz3eng ut2z1in ut2zo ut2z1w 2u1u2 uufe2 u1ü2 2u1v4 u2ve. uve3rä u1w 2u1x ux2e ux2o ux3t u1ya 2u1z uz1we uz3z4 1üb 2übc 2übd übe2 übe3c über3 üb3l üb3r üb2s3t 2üc ü1che üch3l üch2s1c ücht4e ü3cken ück1er ück3eri ü4ckers ück4spe 2üd üd3a4 ü3den. üden4g ü3d2ens üd1o4 üd3r üd3s2 üdsa1 üd3t4 üdwes2 ü2f1a ü2f1ei üfer2 ü2f1erg üf2fl ü2f1i üf3l üf2to ü1g üge6leis ü2g3l ü2gn üg3s üg4st üh1a ü1he ü2h1ei ü2h1eng ü2h1erk ü2h1erz üh1i ühla2 ühl1ac ühl2e üh3mo üh3ne ühn2s üh3r2e ühr3ei. üh1ro ühr3ta üh1s ühs2p üh3t üh4th ü1hu üh1w ü1k ül1a ül2c ül4e ül2la ül2l1ei ül2lo ül2lö ü1lu ü2ment 2ün ü2n1a ün2da ün2dr ünd3s ünen3 ün2fa ün2f1ei ün2fli ün2fr ün2g3l ünn2s ün2s ün3sc ün3se ün3sp ün3str ünt2 ü1nu ün2za ü1pe ü1pi üp2pl ür1a ü2r1ei ür2fl ür2fr ür4g3en4g ü3r2o1 ürr2 ür2s ür3sc ür3se ür3sp ürt2h üs2a ü2schl üse3h üse3l üse1s üs2s1c üss2e üs2st ü2st üste3ne ü1ß 2üt ü2t1al ü2t3r üt2s1 üt2tr ü1v ü1z 2v1ab va1c val2s 2vang 2varb va1s v4at va2t3a4 va2tei va2t3h vatik2 va4t1in vati8ons. va2t3r vat3s4 va2t1u 2v1au 2v1b 2v1d 1ve2 ve3ar ve3b ve3c ve3d ve3g ve3h ve4i veit4 veits3 ve3la ve4l1au ve3le ve3li ve3lo ve3ma 2ve3mu ve3nal ven2c ve3ne venen4d ve3ni ve3nö ve3o ver1 ver3a ve3rad ve3rand ve3ras ver3b2 verd2 vere2 ve4rek verf4 verg4 ve3ri ve4rin ver3k ver3st vert2 ver5te ver3u ves1 2ve3sc 2ve3s2e ves3ti ve3ta vete1 ve3tr 2veü ve3v ve3x2 2v1f4 2v1g 2v1h vi3ar vi4a3t vi2c vid3s2t vie2h3a vi2el vi2er vie4rec vie2w1 vig2 2vii vi2l1a vi4leh vi2l1in 2v1i2m vima2 vi4na vin2s 2v1int vi3sa vise4 vi3s2o vi2sp vis2u 2v1k 2v1l2 2v1m 2v1n 2v1ob vo3ga vo2gu 3vol voll1a vollen4 vol6l5end vol2li 2v1op vo2r1 vor3a vor3d vor3e vor3g vo3ri vo5rig vormen4 3voy vö2c 2v1p v2r 2v3ra v3re v4ree 2v3ro 2vs vs2e v1sta v1steu v3s2z 2v3t vu2et 2vumf 2v1v 2v1w 2v1z w2a 1waa wab2bl wa3che wach6stu wach4t4r waffe2 waffel3 1wag wa5ge wa2g3n wa3go 1wah wahl5ent wah4ler wah2li wai2b 1wal 2walb wal4da wa2les 2walm wal2ta wal2to walt4st wa3na wandels6 wang4s wa2p 1war2e ware1i war3ste wart4e 1was wa3sa wa4scha wa3se wa3sh wass4e w2ä 1wäh 1wäl 2wäng 1wäs wäs2c wä3sche 2w1b2 wbu2 2w1c 2w1d we2a we2ba 4webeb we2bl web3s we3cke. we5cken. we3ckes we2e4 weed3 we2fl 1weg we2g1a we2g3l we2g3r weg3s4 1weh we2i wei4bl 2weie weik4 weis4s3p wei3str wei4tr wel6schl wel6schr wel2t1 wel4t3a4 wel6t5en6d wel4tr wen3a4 we3ni wen4k3ri we2r3a wer2bl 1werbu werd2 5werdens 1werdu werer2 wer2fl wer4gel we4r3io 1werk. wer2ka 1werke wer2kl wer2ku we2rö wer2s wer2ta wer6t5erm wer2to 1werts 1wese we2s1p we4st west1a west3ei wes2th west1o2 west3r wes4tu 1wet wet2s wett3s 2w1ey 2w1g 2w3h wi3cka 1wid wi2e wie3l wien2e wie2st wik2 1wil wim2ma wim4m3u win4d3e4c win2dr win2e 2wing win8n7ersc 1wi4r wi3s2e wi2sp 1wiss wi3th 1witzl 2w1k 2w1l 2w1m 2wn wn3s 1wo1c wo2cha woche4 1woh woh2le 1wolf wolf4s3 wol4ler wor3a wo2r3i wor2t3r wo4r3u wot2 1wöc wört2h 2w1p w2r w3ro 2w1s w3s2k ws2t 2w1t wti2 w2u 1wuc wul2 wul3se wun2s 4wur. wur2fa wur2s 1wurst wus2 wus3te 1wu4t1 1wüh wül2 wün3 2w1w x1a 1xa. 2xa2b 1x2ad 1xae xa1fl 1x2ag x3a2m x2anz 1x2as 2x1b 2xc x1ce x1ch x1cl 4x1d 1xe x1e4g 2xek xe2l xe3lei x1em 3x2em. x2en xen3s2 x2er. x2ere xers2 3xes 2x3eu 2x1f 2x1g 2x1h xib4 xi1c xich2 xide2 xi2d1em x1i2do xie3l xi3g xil1 xil2a xi2lo xi2lu xin3s2 x2is1 xis2c xi2se xi2so2 xis3s xis4tä xi2su x1i2tu x1j 2x1k2 2x2l2 x3lä x3le 2x1m 2x1n x1or 4x1p xpor6ter x1q 2x1r 2x3s2 4x1t x2t1a xt2as xt1ä x2tän xtblo4 x2t1e2d x2t1ei x4tent x2t1er2f x2t3ev xtfi4 x2t1il2l xtra3b4 x2t3ran xt3s2 xt1u x3tur 1xu xu1a x1u2n xu2s 2xv 2x1w 2xy 3xy. 3xys x1z 2y1ab 1yac y1al. y1a2m yan2g y1ank y1ät y1b y1c2 y2chi y3chis ych3n y1d4 y1e y2ef yen4n y2ere y2es. yes2p ye2th y1f2 y1g ygi2 ygie5 yg2l y1h yhr2 y1i4 y1j y1k2 yke3n yk3s2 y1l y2l3a2m yl4ante yl3c y4le. yli4n yloni1 yl3s2 y2l1u yma4t ym3p4 ympi1 y2n1o yno4d ynt2 y1nu y1of yom2 yon4i y1ont y1os y1ou y1p ypa2 yp3an ype2 y2pf y3ph y2p1in ypo3 y4p3s y1r y3r2e y3ri yri2a yri1e y3r4o yrr2 ys2an y3s2c yse1 y3s2h y4s3l ysme3 ys2po ys1pr ys3t4 y1s4ty y2s1u2 y3s2z y1t2 y2te. y2tes y3to1 yu2r yure3 y1v y1w y1y y1z2 2z3a2b zab3l za1c z1a2d za3de 2z1af za3gr 3zah 2z3a2k zale3 2z1all 2z1am z1an za2na 2z3anf 3zani 2z3anl 2zarb 2zarc z1arm z1arti zar2tr 2z1arz z1as za1st4 2z3at3 3zaub z1au2f z3aug 3zaun zä2 2z1äc 3z2äh 2z1äm z1ärg z1ärm 4z3b4 zbü1b zbübe3 2z3c 2z3d2 zdan2 zdä1 2z1e2ben 2zecho 2z1eck ze1e 2z1eff zeik4 zei3la zeile4 2z1ein zei3s4 zeist4 zei2t1a zeit5end zei4t3er zei2tr ze2l1a2 ze2len ze2l1er ze2l1in zell2a zel3sz zel3t2h zelu2 2z1emp 5zen. ze4n3ac zen3n ze2no zens2e zen4sem 3zent zent3s zen4zer z2er. ze2r3a ze2re2b 2z1ergä 4z3ergeb z3erhal 2zerhö zerin4t zerk2 z2erl. 2zerlö z2ern zer4neb zer4n3ei ze2ro 2z1erq zers2 2z1ersa 4z3erste zert1a4 zer4t3ag zert4an zer6tere zer4tin zer6trau 4zerwei 2z1erz 3z2erza ze2sä ze3sc zes1e zes1i ze3sku ze2sp zessen4 zes6s5end zes2sp zes2st ze2s3t ze3sta ze2tr 2zetts 2z1ex 2z1f4 2z1g2 zger2a 2z1h z2hen zhir3 zi3alo zi3ar zid3r zi1erh ziers1 zi1es. zil2e 2z1imp zin2e zin4er 2z1inf 2z1inh zin1it zin2sa zin4ser 4zinsuf 2z1inv zi2o3 zi3op zirk2 zirk6s zi3s2z zi1t2h 2z1j 2z3k4 2z1l2 2z1m2 zme2e 2z3n4 2z1ob 2z1of zo2gl 2z1oh 3zol zon4ter zo2o 2zope z1or zo2ri zor4ne 2z1osz 2z3ot 2zö2f z1öl 2zön 2z3p4 2z1q 2z3r2 4z1s2 z3sa z3sh z3sk z3sz 2z1t z2t1au z4tehe z3t2her zt3ho zt1ins z3tö zt3rec zt3s2 z3tü zu1 zu3a zub4 zu4ch zu3cke zud4 zudi4 zu2el zu3f4 zu2g1ar zu4gent zu3gl zug1un 2z1uhr zu3k 2z1um. zumen2 2zumf 2zumg 2zuml 2z1ums zun2e zung4 2zunt zup2fi zu3r2a z1urk 2z1url 2z1urs 2z1urt zu3s4 zu5t2 zuz2 2züb zür1c 2z1v zw2 z1wac 4zwah zwan2d1 z2wang z1war 2zwas 4zwäl 2zweg z2weig z1weis 2z1wel 2z1wen 2z1wer z2werg 2z1wes 2zwet 4zwir z2wit 2z1wo z1wör z1wur 2z1wü 4z1z z3z4a zzi1s4 z3z2o zz2ö", + ["compression"]="zlib", + ["data"]="xÚL½KŽì¸²¦;•\24Á\2ôˆ89\29¹‹’¸ôòMI\17kùdέB\2÷ì\2ªQÀE¶²\23\19»ÿ÷›çA5D\19ßo£\25i4þènU÷ö£»å\22³Y0÷Úÿ»-Õ^\4î3nc»ÙÄe¬/™¹>dÎgM\"KsÃg©\19æûÒ&‚/í™2ðl\8»>\18Y­g{4‚[Ó“òÖ§ícöO[\8½vnÆ%lG\27àŒH¤´µGþ\1<êG€Ó®g7:Ì9\9<ö\ +ó µ’+JVf—¶´kÓÝù©\15ƒöl\\У!Cé\8œ-©\\MY\15ä~Eɯã>\1\127©dß\127®\13æIsÜR“ÊÍ°t\1h‘[*cc{ù\24;û—vlÔb·}L5@\9«\13nÊ áü»UAúîÖbÖ¥\2¨‰6`YÃz¦\31/¨ìûTw\11 Ê˜mÞ~ØF¯ö*‹{Ev\7=(™\\9lÊ°ÏikqÝëu\7T\15ÛÊ@fß\127W*zÿû%Ý’\11”&ç•rC5Rn·\14°©KG~êãaçã48U·4;èR߈¸Üèд6+Ž\27InmßÐ\8i{ïSùÁOÝ7vè\29xÛ\28v£/äw\26ÄP\3\18²üq+é>‘k©{ÊTÚ¾Û^°\4t\4 í\12TRs\7\12­\28àCIo¯¿’ÏóÇÿõŸ6ÛÚ§rÖÏQQ_\13#²<ÞOZ3\29g=…ë\30®äçñ’.*|5ck@q?\19s,ý:å4¤:c–vSÌAL;\14¹]èuÃ\13HRÃrJlØ\11=90i†ËÃ{ì\24L#“mLï\27µ\31S³%ƒÂ„\24_ãTP3Û\19n¼\14úyÒÈ?ôÙ”š!\1JCSÊÊ|ÒGyÏ\27\14.È´·kR†¹S2y%ý¼v혀ï«Lz9o\13\14ÛL u¨-´FV\23È<ˆ÷s/úŸ»úö¾\24fÌõQ\0034g³Zeδñ¼¸©æýÁP™ó󅹤–\ +/¹\0300Ûó2ø¤\1—]“ž]öæ“A°\\‡‡ÏÚ1éWç§ÿ¥³…¶_iÅ5QÜUC\24´ý!t°ü÷ß—ÿ\26úoU~*ÎÊØÞ¾ÿ<*õÆ–Z5ÒæRo;s{Û[ðÕæòîj¼\0273ï7\26}\127¨û\5ÜQ{9k›\7­·k(}tËÈßé°Çù®IÂÏ\31\26™\127ô)¼>Jü0‰öç\14Öú‹Š<„N\24ö®4Æv\15ÆýcÂØÕÜ\2\5$j\28Zº•ÙW’f,VÍ”©\3æñUÒ’«\0”­¤ãƒ¦,¹îˆ•5\25?pß[\"í\7Fùþ«f˜•À-\2\31÷YHòíÇÑE:jP•æPf3Àè_(’\21èH\30šÇDA\01555Ág#úã‘Ó˜þX˜á\7=qœ{K#\28×}rúZ)Ü\17g¬Y§\22™%ðìjféé.–9`6[8\17>\25—”êÌÔáÌ}…©Qÿã\5\15ûÙ|?\29ädåS\0256†›àƒ¬öZcEX¤q„²;\6m{^žY×PÓ«—\16€r¸\22¥Œ+ë͵‚Ó¯­é\2ô6“Í\0173Ó'—›ëÚ\14[œHaL_ZD¨üU\26azàÎ:*+¹_^Ò/–ï¿o”üSxƒ\6ùÜAN_ÝÔ.\0\ +ü\5ZWø/è/S›\12Ä\15ÿ1\18{ÿyòýbzÿîHàé\4ž™”Ÿ”H¥|~©Êu×½u•\22ÊNS?ÉÔÄÑïXÞZ9×›@­Yݱ(*¤F¯Æ­€&G×iRÈ4Ñ5Ý¥„¾ÿ”qSª7\5ÖÔº\6\0®·úûÏëM9õ¸$exK\26?25–qÉ\11ÑÒ‚S-<Ð5ZºíSæ\0/ÛS@ýx$‡?m~‘òðÖÈÈüŽ\24\19^™Hy;\12œB>ñtš‹þê\27™\9íucØTZÅ[(›±?\4[¶eW\6\4øë\13àÚlüî\4ª÷Áñ÷è;Y©[U\11‘']{+‘¬æ\29Ñ\14þ\15¢}ùï\28ᨳÁ\3\31Í&|N\23A¸ÅàIt\22`y]Ôá*üF²_üþ–+DìOÙï¤\127§[«{ZùŸ”à}¢;+á•\5\11½‡ÏÍnÊ©V€Tß\29P™u-“yxÁù\5i\8ýˆ„øÒ8¹Oƒcg'\24É®67»ïj\12™‘ù^ß\2\22‡ %°\28—“=±hp—‘\31! \29øÇYºk}ýiH\ +þ/ËfÛ\7\4ƒ3½œ‹hÊÈæo×òÓ¥­¾\8Ai5>øé\3Ñà‰N½Ï8ʦq&Ð\28K€Sk­ÂbÛ·–¾¾{àÿ¿2zÙúN%\16¹Í”’©²‹ Tyû¦[Õâu_iAo:‘F\8§u·U˜O\6OÏLéÕWU×;1*\23¤¦€¨\16\18€BÂkíb±ÂQX¬íLpvPšB2‚Gkë»\8(¢\12\24“Â5½ªÝ\11\27noï]ÿ\19çâîLÝþñ/ŒF”Ü…ÏI§)\12’^ˆKÿ ˜O¢imP \6hßFN\4dv'ºÙã(Õšøuš1˜p2í¿7˜\15f–Ö\26-]2Z©“/|\127½u\3õ\30ºÙ¦ªÞ\12ôÖÐv§]Èp`Ü\14\21E®‡´ÄJe±$êa!†È7\0023ƒ\6†é°ÓhõÀ\24\28`&0é\8\20\22û0S•\8ssP•ma\"*Iñ4ä#{ãBïŽYŸšœJ¹\1T£2\28\21k4º\29énƒbSƒh¬é\22™Å&.ü,Ÿ[\23\0¼Y®[\11p,Ф°\18Þâ\4úc5!³ˆX?5K4Ú¨e}¹¯ §–êZo\6LÒEØky²ÙB¨'tå\ +:†•\20\29#^Z¦øZ™\13ëÚjêv\21¢j;SdõÊR%ö\17³¦ RV¨\20Ù<>aÞTÙfõB¿&ç1à\127Ø„¾\23(¶¨7×Ì^\1ý¾Î\24 ¬u¥Ï×\26ž\13`úh]q[¯šÄiéõ!2\0187âšd\3\ +][ï*`;\25'+\20÷\26K<€\14_EÞ8¯S446µùæ\31§\29ÈI6z@6æ׊±u?lª16‘¿˜¢»T¥®ÔR´Œ¸\9Kn\14·ÚŒÿêh\9òý'\3ocTmü›¨hþû\31ü÷\29‰ö­X2`Z@6‚To\19]øË ï ØÑÙûãxÄO¬Â›–(—\22¢c냧`B‰Ñ…xk¶„¥NÙy¦i3Èv[ü_ü_’-GmpÚŒ˜Ö\15¥Ì.@\17)\0Ñeʽ$2ÐÊÏÒ_’‹Êú/§É\5óò/Ÿ™J1Ôd´¬$%6´•\\,ø%\22|Å(9Àâ@ÎßÈ`e¨\\Ôೊ™\12™®\16S´¸R\25?q§í\22°ô/8R2¦«\6a\3‡/\27¼\6¥ó€íÊ,²í´ÕÌ\21ö+\25²Çƒ;é´³s›fåÉRPL†\ +\4&/5«¾ˆ¹š!S´d±÷Qšµv\ +5˜º+L`M\2–ÙRí77Ænv·xôÖeg \21GQÏS\0271\20p\2²“]i&w‹ðZÊ\1õÎ>¡\27æpËÀu—` Š&‚‹xÂ\22a#Ä™7÷þIa´J\21H…r614NP\28‹o©.÷2+\127]<ûŠ(¥òÕ9öoÒ}zM/O0AyºAŸ°6‡\22-å\127xǯ>¼<ÖÇyqxïˆ\ +À„cU™8Á3ǯŸ5\\Y\14Ì®\8oÑ^‡–\25{©µå\3Y j\19WÕ¾8 \0Ž¹\3Å\8½¹U\0155ò@qè‹úð®\28‡BJ\25*Jf¶É¾Ó¡I¬\18›…?‚S:´6€ØØt;þ‰K\19\31Á\23\31&\8\12À\1>kbU>Y;\1N”\5ô0çúO,\26\31¾–u+R8c]\16A\23\28óÁr|x[W´íñ{\1™ýç[÷ýŸpg2584¤T”³\19\1uš‡9kx—³e\16CÌö\6\5E('Ýid\"Zñ:\28ŽŒOVqÆ‹ø9“g'íþ¢ e1“vzãëLj\6Ѧ’ÏT—Ë^\7ñ'âL,°\26t•Š~j¤Šñ¢Úg6éÁn=ë\13¿ˆ÷ô~¡ \25?½\3015j†ÉÅZtUžI¤y\9Iç\0»*|©'.M\9JzÁ–ÕÆN—·Ê.Q%F¹—\9¸ËëÖ5T¦\8¯!ý\0080Ù‘Ò]íàÒ]ÃЪկ!\2\30\17ð4\24ÉuLlü]•K’Á\3×Oý.‘O³¨Æ­7V¯Úä´\0\27®—ÈåÚàø \0Ek¶–R™nQ{e‚™\4h/\22·«by¹\28í!²Ù5ÔbA`á\14Ê~tÛ\31ÞG’‹·î.ˆ™«\13\12a\8uq\29=…ø\31v5ë«J\30ƒ=G›.+c@¦‰¤ë(Ž¥ñK؃éri¶Gã™Þ½8\25Î\14krå: ®Ó%bƸL\14©\ +ˆ\20õ·“\20øtý΃íùË!YÞ®_\24ÔÿY›(2V­„U›OÚë³ã ²í>Y\30?i\20FÌ\23Go]%*ýWË\28þ\21kÉo\16Ío\24à߬N¿\15\14I~7ÞñP\22OðسÞ1®·ï?ũȸɨ‘.ùþó¦^•\11èøûO™U\ +k¶ÕAê»Vxlìh\0029\20r §xŸqœkGì\9ßw6«\28ÈìliitêÞ^\21hÂ,v¢\20ƒ\15ˆ\20j”á\29M@ƒ—ÈuB³³h\11‘ƶÁ\24Ì\24οeå¸B95®PãzL‹«=-.£·ûÚ[\"”\6‘\12ÎÝ\1l\23\9º¤\19} ¢¸ŽlÔé?¯65e”rì¦ÙN\3T?1ø«çhµÙ…d\127éûÏ…cc\1Sa‚É–(ÖR?¨ô\18«91Èrõž»Â®¸­nŸ\21¼,§|\17Ä\0\0241\8Úb.\26X\29ö\27Þí7FÈq\9@¹65›\19ƒ×‘5úo{\21^°„«ÓÑ<æ\4P\127‘,‡æXR3QàN‡øþóQ?–\0Å B>\0241ê¡\127Ñ.…Í×ï?K«5J€U\7p\127k\5’CÀña‰Þ–Õµ+Qò ä°FYK\20\18š+Ü£± ¬(\22{ò²\29)2=œrí\17\\\16\5ÁvN¶Õ\30}¦rT*±”ø=éú÷Ã!Uá±6,­‹x¤\22Z×.u84¤\12áQ¿à@¢Â1nQ¯>rÿˆ†9\0147»)ŒÂà9¢dl·;>§$ÄöLÓºÿýç÷\127V\17´=+7$d\5¢N1_\16‹©\13Ýx¬Ä2!6•ùé!é•ÒÀM\127þ3ðüó\ +\16i\29nàóUhÚçt¸3B¹i.¶\18\5\\Ÿ«OÔöj’§ÀE\15sbN{^Ìê‹y¬Ž¾<\15®Õ\9¬\20çZÛãÝ‘(·×\12RÕjA@ñæòÔ{a\127²9<ÉAð\6s\0'\16Èê:<\4ÉÞ­'¤]È_ØVn¿hUuóÿ\20ýôý?ªííÖy†Þ8w½u÷é?\24!ú™…[oœÃ¨\02876†››–'}µÊwë²V\27{ß6Yn\13³À\\-6ÃI\8K1qANE&¸ˆ„–\5¦åÆžÝä¨\26{E‘vŸ *Ã\25±o=Ù£ÇkKöpF\15b°»¦üaQo{î\14Ø8gcyùRo¹:š¸jþÏ\8\1¦B֙īc¢ì,C²\26ïïõ/¥ñý—úA¥ýþK¥\21Kî\17&*çöàŒTÿÿRó\21\6}Q¡[v>\21¡Ðç²L6\27uDé*pMcA'Œ–¾*Æìì2~ÜÞ-&œ\28=\13öTáõŸ\7‡YÃçA’0²d9ÞJ®mu]UãHûä\0\12ï¶0úÛ\"ÆZK\13ا\\H°³îœ“Kô7#™A!þ“\1wxí=Œ]\0151ï8Qìöð\18wXVíöN\19\30õ]ýð!Ú\4¼\7TÙoG{¿ˆÄ”>àÜù_Â\"Z]\9ù\28TY€!ÁEv9\8W\11\1Ü\16¢$ñùÂkqÑ\24/‡º\22\11\11ç\1ÕM„‡\2\29,¯\28ßÞ\14(Ä\27\16Éz[gr:E¼Ù59…3?ˆ|2òeºÚ\28Va+D„îsg\1\19Y´0Å\14ö·ƒ}ÿ}sÁ8]¿Apœ\"d0-ßz{\7i P{cïG&ËI‹XåMQÕ\ +Z÷Å©\9€:‘Áº]^÷.NYé¿kUÉ/ÏÚú‚Pº\26­\23„\21-‹„¾\5_oWв·\23‰\24P˜åŠ5A¡iáË\4­*£\2Ü1G-«Æ¡Ÿ\24A¿U²ßL‹ß‡‘;\11ÝS«xõV!+wg¯íŽ\0\28¦J~ïÔ¸w\21\14Z£ßš»VÂ{«ñpç¨\1{Ñ/´–L°ðÝd\22Q¡­ïE~•Âïúí.Z\22ƒ¼ï÷·{uO\24Â!÷Z(䮥^\30¢\28ôíÊ6™:½kÐÝa\5ô7«œ\"%î&\3ô«¼\17¦—ÉÞµ\28NB‰\4R„Köï\127S)ÅW\25Z¸[U*N\0083fA„Ýì8`2šT\28\14\28i}auÜD—\18Aô\4i@w\";¸Û\"Ä€\5*\24@Γ\9L¬g„ÑÊ¢d`ã\2\30DùþŸdxøcôä\1\19¿á«*7˜?äšØæ\ +k\14pl¢øPl¶°~\16\20â\19;tMÀ/ÜEÀ\0@8o\31Ø9‰´ŒòSk\8é ™!‡³s;¤ö—\11IóMnš\17ƒ&žØ\4apÓ8\25ŠFù@*6€ädX³\13&ÒætÑàtÚÈ%°\29ƒDÝ}ú‰á.Ð⪶_(N¬ŠØ.R¯Wü7\8D—þ€ú’}§\6lƒ#J9\4p~È\24ÈÿA\2ÂÞJ haß\5AK×\2Ä)\31\\D/‚d‘º”q¹è×dÓ\25\11\7„Ûæ\1ÀŠi9Jæ¡RúõùvÏštwä±ï\"\28îí¬\0³ú\15v\6ùJÊ<\11÷ð?ú\127Á„è×\127[*»]DàðáŽÀÑ\29£ûÜ\19)‘\30‡Ur\16á¯`i²\27[\11xm\"\26°¹”sôü\12ÍúÆV–šÀBïîøÙ\7V„*\19;ÑüÌ)Âm΀c2\7+á+\ +ÒöÃP¤¢L\23ÅU¡E‘{R\8\0175˜«M-ørgH̳+ë*¯þ¥Úlhèÿa\7ç¤Ö›AÒÎ^ècfƒH‰_N\28¡tÁË#IVGüÄ 'fzbVO´\12%‘M\0260Ë\14jÓ0º×k­ÒìowH—;¤\11¿ \29‘ÐûÎ.ò}Ï8 '-Ošd÷ˆÜ—š˜«/B\9žÐÂw\0170›ƒ<0a\21î»XQRdÄíF€;¡÷ïÿOdÄòŠ¨¨îmQ6%Ù°$ÿ½ü¦)ô\3É\127g{ÿ^y¯¤½‹•¿‹ŠQ*'¥;s«²œWùP}T±‹Ý¶ûuhYUÊ´Aõ|kÅí;v\14{º¥r5®zDÝ·°.¶©¤\\þòA˜‚Þßäu'.8\16ìGc\19\17ÑöZv'ž\29\3\25£|F\21ªïØXU~\ ++æ¶Ëü«û1\15™Pû¤ŽÀXÏ5>¢4°È8ž˜ë\26AÖÕÅ\\÷æM\9­\15þ\31Ã\31\20ö>) ˆHK\19SÀ­½§—\21B´7®{¤-ì;S°í}^\28¸Ö \11X\"Îá°œå÷\28Ý“øóÇ\27׸žÙ!\30¸=&œ\30”†í>\2\23UÂB̤_Øò1¨²Ã(\19‹ÀYÊù T\"Qñ¸ìôEÓÐ2>ãì92“\9C\"“­Iùpf#\27<\1\13\127.\20á\\åv±¬+Iè\ +•äšÃv4„¸\16&ì…D^@-fˆ`©á\24Pƒ±÷ž`Ï\22\21fsÑ\24•8›\30úEÿ\26\13Âpoý\27\25ò¢}©¥*×Ëù]\11v%\26\0301í^&»\22˜\"Õ\4”Ž¢ÄšÖôl`ô©½3.\19¨N@ž\14\"¬Ö'1šŠ$6¯OÜ\3Ó¿\6\19¾“Z\17$××I‹Žš<_\5¯û\0ä\ +ƒÑŽd\17B…}Bžp3Ld½°õA²^\\ØIIs€Õ` ÍeqÖK\11³\0220GâûÍP\12\ +IìÅ6\0060bGóh\22\16“™\2î\1ªGør\30ÔÇVGŸVî–P=†xÍžGŸ6Ú—Óq\4ðZÙÙAëMì\16d\"Ïòø\26QvöHë[@\ +±}ˆÅÕ2B,öÝz/\4jê\01376¶ûô`ƶÈðõ©@O#Kï\14ÔjAYC¶B°w¥Kr\24±&\"–äÚ¤>Ûn\1M9Èé-n~–Û??Ã+\0043\16\26à¾q©\5gb8ˆÁÁ#oí@=ÊŒ\12(‰\11OcßD–;‚˜\20»G­Ê‡ïüPØrr\\Ò[¤Ž¼®)\0Í!\6¶· 8\23\2@jéx\14„³¹ˆúýçÝAÜÏj¼)lTLœ\0118\9ëV…oXÁh\2ÑùÇÃA\15ºì8ß»W”s\15{q±Y˨§æ·ËÅfTŸNâp¸æ©™¢ƒ®Í&è$1\19\21ø\23kgŸ~1UÙ½“É,\25;\0255Ûxý˜Ú³R\6Þ\30é+‘Òýdá¹¾ž\20¹¢£r«\5 Ïž¢™“6­wª{†üî¹\22Åm\8¦R\14){_ŽÈ\1Ž\0318‹]\"B\18g‰MãªÏ¾ØÖ{ƒ§ÏŒyòÏ+Ñ·Àö90Ž‰AÀ\20€ÅU\29›·-•p9\2œ”†i–w¶TÀ\7\5†–\7§\18€\28¡i&oGûx%u¨gòAóz«Çõ;) G™£¡Eí^?ë??‡\3Na%³\9'õV~ÿ$\19±JÊá'Æ,¢²¯\22° ‹Š\16Ê[¿˜Òê}\3´_š\21ƒ’Æp~ß\20˜½•Þ{+½±‰êJ;í7Eßoíb\23×\3ª¶ß\23aG™ä³kyQ±öwŽŸû}ãú¡\28Á\8Ö\18å1\16Ÿ\21ª\22ùÑ:e\8a™êÚ½Ô\\#1dž\1—\0‘#ˆFü7“²ÞËÃñN\7 ýê ‚æÀ\20»Øï燚kgÇ´Gš¡ÿþKH\18ìüýW±\0297Õÿávû—Ò\16-.ÚÀ»(½wQúÒ\12êÒ‰³Å)G€°xô\8\"ë\14¼HNL»â±34Ù\21¾? ØÂCE\3¦{Ãéµ\0203a‹ˆ,%Øò§ã­\12I†˜ý\30a+ö:\"±Ãµ\17ÙÜsªŠ\18\31N\15n¨\127/‘\\váhD¹(C,ƒÍÙæbs{Å„M\0042\31d^$ÍÀ\25™íNnÏa9ís…å\19\11íþºÜª\12¯›“»\".Ø°\\\27[·”ï:\9ÊýWÀ;ü„ºEÝÙ\30/¢Ê‹Ø¡%üȶ]\26w‡—ж?n\4„£ÃËbó†ª[cœyÔˆÓ(Lb:Ûùfx\6dE\16F¥\5mI>™D:|µ±?…§@°¡=”‘\1ô'çw+Ù€Mðš)msÌ\23Tѱ8\4k³7‘z\4£\\\11Zˆ\5¸³#‚¿²>˜\1ÞEÇÂ\2å}¦þ\0o²“×#îä*£ê¡ocƒÉ\9‚Säœ\\2Sh€e\9WFîÜ\7MY\19C6z\29ð°\27ùˆ‘qA.\7{z؉ÊïOîEõïNÝè¨=áóû\19R¨?w0\14ûX½‘Ô©9\"—óý\8÷¿…î•ú%ŠúÓ@a.d\"ú\16Ãéƒ\\¬®Á}Ï\0A&†_äöûk5¾Fú%Ù:Ø„\\ôn\21\22\26úZ\31þ‡,‹ŠÈü\17é|\26pS¸¿DÍta\29°iªËä¶@=³^][±åðÿIyTÏ«\24e°\18Ê\25ÁÀþ\ +!¨€å\5µj^°X\"cÿfè\127ÿ=Øtk~:\13\5A¬¯ÿJìŠö¿“Ø#‘Ý\27„êí-Y)I\18Ÿ#\3ºD@h¼Å©ÇRü7£\"X¿\0ÿD˜+\12­5‰‹0¤\6­˜Z.|üxãâ\"ìZêijÊPÅ\20N\12‹<¶Ð\4B\8–\127ÎÜ“\29‹ÍO‚\29ò\7ã¦à\8\18’i\21Þ\26\25©C¿ƒœ ?pz×°\21¤‹“éßÄ©æù–\16#‘©\14N7òºÕÝÌ\127êßP¯‘ý¯úp6†ËÆ¥ÿ\20ÇhÀÂUÜäËÆéF8\31p&Î%.’ã\16\2Ç$ ˆ\21¬Æ²Ó4ì±\0318\17ÅÛO‰íjÂ\ +i¥ØT\22D˜Afóè\8‡gò.¬Ð;ÎÌ,C1Kö§™n$cÅ\17ujîŠ\1A“¼Åf—)läùqŸÞ…b2a¦Œ3}îKºÉ—tSíë¹éu=\23«¦X\ +Y5\3Ä#’%„\8¤É“|6q6Ýó\7ÁfK $v.4Âî\11ÿj\ +v\24TÞ;\9B›É•ŽÁ)’z\0154Qø2|áUîäÖ'\31\11n4V\16ù/øpX¢A\26«ñ í\28“Æg\21–\9•‹E\24õ\8¸\7x\0244§³\21Iš„\27\14\"ÿ\22\11™”\7sC¦zTfM»Rwÿ\11\21\1f™ÈFÈô\24DÎ*™•Kih‰60vaë’ûÀ\\[B\3•Hˆ.%Ž„Iˆ+G\4\19fIœ\21§´9ÿv\19þM\26™ÔCàNXѦüS\12èDE@Ü5ù¬–sðÛiÐ\19¶8\23ø\16$Ù’\24\17ê\9+\19fÇ£±Äôp\29Iðt‰\11C\2—mO,®'*tôϸÐz%*?™\"V:\23Þ_Ü. L¿d\ +ƒ²‰+ƒ‰?p\2“\6ï[\0\ +n$\0Mv\17\ +\28€¤#aYd\21Š‘\31WzÓàà 5ñ7\26:ã20ÈøUŒ¬ÕÃPKp²¾\22\5›0#C½Â\12730BšÖâšo€}\9 Ü\6o\6¥Úª\15w›\0152óÕÄ4\\äÂÞh\26\8X©aGa*™=H\8\28É…]¡5~ŒâFsBÀŠØ#è\2Ù÷4º;¹3»\24$vBù\17\31•|)¶N£¢Œ^@\4”æ–(ƒ4U\29C²™.εbOœ\0\30ð\5ÌU¡ë\8'(Ò™·\19b=+/”]¸óoÛêð\15[\30áU\28œ•G>Ìcyq‡„\28¿ÿÓA ×\20ƈ»pâLÐëæTÀ{%¶™Jì3ÉïËæ\19“\21½øðÙ€zÝj—- róÎ\5wcä‚`›@Ï\16ä¨1YŽ_Œ@\15ßV‚Ìi\19»ú(N¥÷£$\\Bt'™qR›”Ôö\16£q›\27±b{À{\5Y!œ3Û\8¾^ø:¶Ä9AN\22\8 h\28Ka&Î\0\28qs\7%Ö+vçÀ>ÞF8‡p`Êàÿ$¡íç\127pfKüâ\"\27ß–\24ÿ¯\11â–\19_Â7\"C\17—@«å…U‹çE1.%µ¨%8´°;NÕé\23¹1^åö\9\22,ƒÛ\24\0211\28½~ÿ]Þ\26ËúP\25AóÄel8«†û-#\19K\14¤‹\30*ÜÌwø:6&\19¼X؉[í*Xf\1\20\15zsûä{\0\2ûºzcφ³1~6·;’ÜT?o9¬s€Hɵ‡\4ã\14\3{\26q\8Œ•·Ì\8\28Â\3«\26Z‹Àz@Ÿ×©\9QY•¸zŽ\12½çÑÎü\9L\30Œq\9\12Ž°\23Þ`%_P\127Ð\25ÆÝò;l~yÀ\ +g\23£ì\26Hi›G‹Y¢`Þ\25)Èè ±Ÿüï\25z8ì±î\6ÎÎ#Ý‹z9Ùtåì:™Å//½5üDe„Yj‹ûö43*È]‹Ú«k¹\6W¾ºXÕ\4@†%öD°á×\\dõýw{£Õ¾8öú\0²è\29\13{ÔÞ19ÙÇdð\"­ÉJ-+‚îép¸;þw0Gs°ûw ¥É?\27/ÜYg&yÙJGH\29rjþIW\30ZlEp~S\12.LÆé‘‘Zå$ˆ\7‚|\28jf|µÇLn¦È\15t‚áò›\127\28Úe\31Éa%¼‰©æØ„÷\15\23W\12ë\12dO\16›W€£}\20\23ÎYy\21>¸Ð‚É\22Åᥗ+\21æÃ\11–ƒí\21Áäkòá#âŸ%\0006±Á-Mg‘ä¤Åºs€äÕW\4+­Œ3«Ò¡Å\25ꔣ#ê\21t~ü€)üsD\4š*Dš•øº\127°Û\"zc7\ +\"Xx\25ÍÏÜ%Áñ²£)›ÃüÀÑpR#ðE®nPÐ5\8ÿû?_Û¤•JF-drYKˆ†þ?Qƒ–Î@éfè[ß0I§9€Óô.\16ÎZ£¹E¢Ð?\20Í\27öÜ›_\12(\17D¿p\24¬óéÓyá^H˜Óó¶==qO­çì%žîXRjCùe²\26\1VÃÓÈñô=€t¢iá\11ÈÜ>¹^˜N´\24©\4Þ àÊ;½\30Wáå\7•lë\20ž\20¶õ\30\5—⟶¹é|²–Nïëª(øX?„\2<\27ÓÙç\19YˆäëƒÉ·ßÓÕÄL¿ÐŠœU¡«æ\4Y\0òë²\2CByÛ\11;<ñ…Nìt!P®d2;Q×LœÙ[š.Ì\5_Þp`‡¹Ø„´à²!é¯ÞÜçÎ:Äá¥\30òÅCeÁ\5*…F©DŠ«ë‰ã8òe/I&\17\25\9\ +FCø\"\\º8\17M\28&u\1ö\0´\6\4àå>¾Î\15\31ÑÉí\23éÓ\20Ö9š*\14³>Q|\9@:5yoê3½sÃ45ŸÐ\127¬vp£Í\23uýâ^).Ðâ_¬ÿ_yóiCó\5¶øÊ>®øbÑÿrÈ_\ +ó«Ac_ª\1271—\127\25ÓT¿X»~Ec\"N£@¸ˆÔ¤_'‘ê“ÑüËŠ«Ò/ïÿÒ\4ù­Ññ›­&n¡§\ +LÉÕóT?+4\5ɦ”Ÿ.\7—Þ¿ÿÍ\29ô\127sgôßÜöþ7b‘ú0\22\25ËeûŽñÀ(\24\7ÆiŸðÿÄxê÷¿Hê¿ÄœËØ0\20þ\127\17ó\127±Z‹–\31˜ªCÕÝ’~-Õ)—w±êƒXÁZjo‚Hy¿\15\4Aö:àN<Ž:\7ŸqŠîœô›I\15=†o\3;ê\3,2Ù,\28Ö\14\\è|‡xçÏëÀëgå\7Ä0XÓÚ\27:Jo6ÉÕ—³9ùÐÒÛBÖÛÃÿäÏ\9\14ÿ_v\23R\"ìƒ\18à‹b\31¹ˆ\19¿S¯Bù\11ò”Í`­_özRÔ£í\12T\17FÁ€‚íƉ]\3N¢Tù×êFÔO\2ü\18݉0Ìà+ž2'Ñ\6\23q,«/\7\17Ä4R(E\24ª[y'×^M…(”RPÅSw^r€I\23!†Š2T±Î¸Ð¼©ÖÔ\0278ޒɆÕà³\3¹±7îÀìĵ†\19!¡—™va\5\31Ò\12®\28¼‹(`­†õ\16\7hCl¢\25¦#¬;Úc'=nn¨`B\29CZE@(Þú \24\27pƒå\11±ìþ?:²Ü\28£\16ƒÃ‘\0015v\26a¨ZÝFC“\15\3\28Â…7³s°œ9a]˜âMW”u0\\å¶üxã2\3Ûàªzy„«hqÀé_$)Ü5x%5@U\13áêûñfHŒ·@ê‚\23Y\0019Z–§\27í\23ÕÔ<—ýIGÿ›D4\22\02422Ñ\0203@6\15ôJ04ɾô‹»À>kø¬8ò°Å`…º\3Ââ\3ú\30ô#̆IS\14(Ê\30\26Ôìb\ +ÿ\9ø\30ÛÀášM­h\3Ú!ôhË0 à ÓÂÞ\3‡\0ƒUD¨´\19ãK|ú .fÈ\30×Y‹íà\3•\1ZJ|Økô5ËŸiš-TÛ~3(˜¾|7hañlðI\8®Œ•¼ˆ†TäE5ÌÖÅ'§:3\5³˜\31ùlø ¤1¼T°\14¦à•ÀŽ÷N®5Ù°‡£²\18\16\ +y@¡D³Ûö \12—/‡|¾\ +ˆFœ\11—Äñ¼ÿÉ\20\16\0031Ô\\´\0284Z\6ß°Ä|¨%¼)î–f\14¾/È‚É\15\17¥Úð2\4íiz¥°P!‹,\16ñßü#¸5|,Y]‹ÒÌä´Ä!TôÞ#€\ +º°‘3XH“Ó»=@#\11Ëw¤÷7y¬ªÎŠäŽþ7ýo>y\29TñósUL\20Í°/Y}¶#\0022ì,Éü\27­(ÏÁ·\7ÔûÈá\13»\21Í5Ã\14\22ÚýŒL‹sÙP¸ùÃnL\17tÝ3Zwë{\24^÷\31\5iT\18f·\28ÝÎ(x\30ŒØþš\3¨Ï¾ÿâî¤Züa´ö/5{Qé[.È\13%4‰¿\13ï\8êÉ*¬1„P\30Nn¥\18\13\15{>˜&Ãìí¢Ò\22\14é\12¶Ö>\15ÒNî\"¸Ç¡ Œpà†#QDÄU\3ÂC\8Æ\13\22‚#\23õÌP¢ÝüxÁ`Q¸\1©ßÁº\29U\5¹¢šKµÒ:°2g‘f\27|Ëqàæãa'\6?àe#\21²jaq0¹çh\8&8ÚûN²œÃ\15\8¥M‘VZ\3\24Cú\22Þ`Áµá%¸6°­+\31NãÑœ=ç䀌zM\31²¨C\20M‰?ŒRŽÇä %\0003C~Îï\1F•[ÉaÛÃvQ;Q&\3÷\"‡`/†ÿ\22T\27\16F»;Q\16\11¥-a-n\24\6Lp\0218¶%\"CRG*\127ŸQêËå3\"\19Û ¦¦¢g\7ʆ®\30|!3\25Ò¸\2‹-›M%ÛÀñèŸ\29\25€:Ú€ŒÎhE`K©=ŒCæm°Ì›B2;Ì\6XÃø\30\0ks‚í\7tyÈäü\18m;b\\\6\14Dî¶!\3$¿\20~‰\18 Üí §ÝšÓE–Ã\27©=ì§\27\28©m\21\2Y´ÁŒÀ\0#@ü'¸\25Üu±\30Õ—Ò†B×LBÞm0kŽ\5ý!\4Þ\12ö\0M‰ Ä¸<”.5Öeiç\1\29\31]\0Q|†\ +qÕÖ”B–P€F>\28ç˨Ú'Æ—¼~ó£•\23‚uà~'¯dð\19¯dÈ\6|^\13F+Ž¤\02592\25yGbŒwpFî\4Ùé©\127ˆž±ë\23L\8\15¹\12„AÖã}ì2\0143QÅNŒ¾í3j\30î\ +náRÅ\18I8z³r´\2T!»1ô¡Ž/\18q´¢ÒQT\5\18+#”\"–…ÂYd\14·/2Ú~óªÈÑt¡ÌÞ9\9]ªHEË\18–3Ûv…\23µ Q\17TIÙ–0wjò6v\\\4\12P\28îÀä\16rä¹\15\19üÎ\20®\12£Ñ7vTŒ“œ™Ecw¹­LvªÜâßø?‰–ˆ›7£oÞÈ<Þ1[—RôåxÓš.3cð’…œIOíVõ*ˆïù\24h ŒHÍãA…{u©Ì\6gÎ…Ç^l›B\\o•:i´@—Ú›®I·Ä5€\17Q<™Ê`L\"hG¤–\8™pàªöˆ\30‘\17=\"\11¾™85[{cܺ±í\12Û§C\20»¹ºšâN.[»ù˜fEcÃhL¢˜D:Ži¹U„G±w¡XËGÏnØÈ™¯ƒ5aÕb~\0\23\15¬¸}#\27˜+Îî’P$\0028_®Õ@ÓdäÀ\19‹¨¸1­µf­øo\0286‚¶›\27£åͪ‘³@Íè‘ã;—ncãÙ/YÑ*(\0î\3–ÍÐa\27ÿ\27û`C¥Úè#\23[…ÕÌðkô&6\29ÕPûEš\0157ºë.2úöf½¿`ÈÑ[ÌÿÀÖàÍgý˜¸\30_7gF+\25Ù^v{¯$ZPÑKÚÌt\2Á¶!Î6Bf3–؇’#Ú,I‰µp|]~ñ{\9·Õ\12Õ\31\15ƒ&\26+\22czµ<¸ÓA½ªÈ9&…·Æ„œ*n-5®.\15$”\31¦ÐG®µ´£8æÑ\15ˆ`”qähR\5DµÒ8BS\22\5\26G㤊öœ4œ&6+ÆzJ§\22Úq\18³ÝÈC™²ï8\"ê4Št&œè­l½R€\5'‘b\21ã+9Ž\23&z¼ÇxcÉ6ÏCî³()J†jÎ.Ü\24ëM>œXŒ=Ó´Š\29³þ'½*J¨\29EÏŽ\21*×1Á[жšZ\29‡\30¸vÓÔ`ÂqàbWõœ}Ø\0070^YZ¢ £ ¯DILöŽÖ»3†Š‘\17)v\12\18xäöÛù^ì¹Éù\29’˜0‡\14UJÄ?)-ùæ\2hœ\0278§ì\\óL5HNÿ»Ý\15gá«\12£õŽ`F\13ö.€Ëµ‹\26\25—½¡ŽêЙpû\26\1\30\1\\A·™Ø§q¹êÓ\21ÿ›  üJ„öX‰ä\28Á@õæUÆgý#gýü{\28ÀÔŽPØ\0262j‚qc¯@\16Ÿ#³ªæÐ\24s Vv’\25:‚Ø\26(›Èqêµ¹LìVŒîçÍÖËÁ؈#/Šç\21\9”†Î”q·\2d•\23%¨#;Ö#Ç\16#:ÇÝWaÇ}\11$éÛ;£O,ÇÚL€ü¼œÁ\9Œ;…\26wˆ•Ñ\ +LÆýüˆw”œ\27\5ê}\20õ>r¿f,Ök`Àb]bÍnÐVb0ã·®Ð\3üü±Æ±Õ\0247hêñ\29úC³\12U5£‰zƒ>Ù\17R}Œ»3\0015\18\0§}×p\4\27Éüx¾³ÎZŸ\9ΧÍO‡QüÑ„?ÿŒ½f䞌ʛÇ\8À%ÕÑ×`FôÈ\16\23Á++\7ÿLJç[Ù½nB\14Nxru_¶+\7`tÉ‘Þ+^zá\22îvä’@;ºq}/pä\20„4„\11ë\25«ÈX\11¥ªØ‡Ÿ/\2l6‹Í+b1îP\0LTq\17£_’\27_<Äx \27ÁÖ=B\16—Uñà>÷xÔŒOÔ¢d¨\29«Q\25AÀ9lä‡0À\24¸øðþÛhnÃÏËP*Ø\12\5Î.¯\12\30A\25\15V–×QÈè£\17\12N/Êâj<–\8ÁZz”\14báP»\16ùýÀ‚„³\31¬Ù\2ÐPC§+dry|GD\127µ?\29Â^\13k”\21±(<²¨\23V¡h\7ÊN„¥\8íTn;”TÙöˆd÷\8²e\1õñð5O\14)¹Ri««súènôñ\11é\\î\23hôÑg\31Ný‹\0ž'Œ?–+Ÿ<Œ¦ÕØ­\28ãâ\12hÿBË×xUÜ.78\13\14L%vµ&®.O‚KŒ®\12îFŒ~b\0110\17’\21\1ph ]ëjÙ}ñ\8\0b̉Å\0ø\21žãbß\22„pñ&\15®b8Æ«p\13Q9j¸\9pÿê\29è×[G¸‚G\0\ +ÚÚdu¾Ø8\27ÿQÚ*kÃä‡SQ*L„¨Gø…‘÷\17f¦™ºîmê \0161ÓÛÄ\3loSì\23OUÏM]ÛkøL•ðŠBtìnáj\6êû$[Åå\30¤²\127ȲpAŸøˆâN¼ÓB\6<ê’<\20WOS³jœ)4\7\14$¶<쪚M~ë£à(~b²\6€Éj\4'¿äð6µ¼3 ”6§§¾›ØGV¾Åf!\8t&§’7\"\11ÍN0\13”²ø]Å€TÚ;*Sp\17\19\7¤ŠÅÎèÔ¡Žvâ¾M-pQ\127¡›\9z\127%Â5SX+ñ1 ê—5Ó\9¾›\27×\15\7Á3i¢PׯŸRµï?\7UûûÏã\29Õ-ü^¾F9ùn¾\16Ù„æ„éöÒA0Á\26LìEOh4пHÉwñ§„‚Â)µ(õÃMË\1nÊ&ñ\26\16~~—nªü€íÄÓ—8öõˆ¥w$‘Ï\2\28nMÉOM¾ª`\16é\14¾E«Œ)yj‡\6Ì·cr¹”ç\\‹S\19·,‹f\26wr’ÍÁ¦cˆ¹˜´~\0Ú•°Y+·\3l¢;dÝ)t>Ö\0\20:[¿.A¾Hƒ›ÿ‚¾ð?5±7>Åå’)\00571Yýàd‘Î\8ì÷.q&ù\22¡Í)nö7SZÝ’b\27ø_U\8\11ÄN¾®?½®8L±>!ºç†Ú¼e4¥¸ÆoX\\Å-Qf\30žVƒmn“L7Èé\9¥„Œ‚ÐI85f\24ˆý¤ò5\1Ñu‡I\0275\15\13PdpœWí\23Ðdïì‡dÏdi\27Šé\27°J£DS¿îí\19‰)m±”/2.õà\1\0\9ÑGJ\21|\6Ïïæíc»^.GÀ? ü‰ÁØAT\3qû‰!A!öÈloo\14£&|»†bŸ.=Ç„“OÅ)Žß,žBGáÄ\0172A#$\26þù'ã:Õ\20\3ÕrúE\2¿Z\18@ÇT5}ÿ\23­'¶b\26Ä1V\6`\20ý°1+?ê5\26wOl±‹qø¡$2’›˜Œb_Ÿ2\27¡ÐÊ\19û•S{†nn™!0“¯É\11ø!g\1¿UÌÕ”½ÿOÈÙæâ\17“\23ÏŒ\\ƒJ}I\0MJt\6Z\15Ãq°9Ù‡Êg^K±m·Å©o¨Ì¥h¸µŒŽÜì.ë£\"}ÆHæv”Ê]“c @_\0À$ݲ\17´ ·i7’bgxò&>¯-'›'-rº@ˆíN™‰ü“6œiI\21¾…­™\22žNB\0žë3‡ÍӦȰ©ñ¾½oÔ\28\1”ÐrS³,`232Ó\2}4½[î\\\22Øl”]\11ÅOÞÌŸâ¾ý\20BæÓÇ?ÒäD,×\22\30_\20‰#¨É/Xa\0173‹Eƒ†cÉì\13æ@\12\24<,\15›\7!fÌ×L\\ÄŸ\22ñ \19*\21¹åCe!ò1\31$¿S0\20\20Ú›¼îv•ÿº¿ù1¾\00376§}Aˆð\\b6 U¶/\8/R\22sv\3ý=ÐÊšSõ*îqjVqŠ˜›M\22¶•#\17ƒ)À\18à\"\4\11ÛÊõ<ÌìHŒ±\21ñS€\27’§Ø\21e·7e^Ù˜V\7ñÊgÉ\2…c†+\20\27òÓ†Äø԰¿é…Í‹\18<Ø´ù¡\5\3{3Ê7\11_\0Ä÷ Ó‹-9”–‹ê1x:Â/Âç»ÍÞ&é±\0135Y¬\23—\7!gºr£õ7ËÄO\27šø&tž ÏØ)\\äkRL9@}½MVh)ó>5\6³j´[K\22ÎwÄÏêiWÎ{j\9 Š~ڹ܀Ú1Ó-»Š½û)»V°ÿýÆý.§ìGädA\127\30~;Q\25 „€^©v¸ZlOÂ}<¹\1<íkíè++\24€zÓ)ûÆqö´ï\24ª´yÅÉê¦&koPò‡³ä¹VE=\30vr\21Î\31¸3w÷“«Í8ù€iÿÄR\127U.Ð/Ò5\18õ°ýþ«†öø‹W±§\22½\14S(\27à™H§Œv‡IÌg=ɯ =jò#/“\5W1©T0—\19RŸ“\11\\\23/Ð\22æœ|h4Y/æÔZ`s*Ü;ëìH÷{Á\26me|X¿óä\19¥‰Í°1€ÊøQ,-\24ö5ÀÓA]j\1fsñ1Ídnt*ƒjà•ŽÛ\24“ySeˆ‚Õ©ðØôd\22Õù2äÀî)Àd`\4چɚ9'ËPËc_S€l°9T±?m!f]Å;¢Éjó–@H’ÒX壽S8ƒ(ŠÕÐ\0<ö\11÷Pm\3\3\21nñbû⟓\25\3Ǧߋ['^Ãɶæ‹õ¦\27¸ž\23\1}¥\127òÁÚ\4£\12©\\~Ë\3¸\19Ëïá{—S`±(òëì\12?zö%\18h«§´8Ü_¶š<\12}ŸÓK/Ãäã¯)XQ,\28—OqÔ5\29žÇ¦\15Ðù)v\\°Ž¬¬\17È÷\29\25L¡÷xz\15m\12S¨a¸9øÄŠíƒÉâK\19Zz [|æNÂ(v6Œ WTå¢T\23’\25SE/³Ör\19¼›™PÊ~>iª\19i™©}\21\15(\14s:»V©ð\14KrXÑ°Ó\9'á\16‡a'Ö” \30–'wI¦ÓW=È‹ùp¢ã{:“«€ä\26sää~b\4\1\21Öqš6ùH\";%\19©¨¼9‡p‡¤œ,sº¿ ÝKÌBúì+0Òh2ñn&xØ-sÜÂrf%9w|¸`\0017c—3º\127Î\15~Ò+Ù§‹íbYk`@J‘|½ß¡®\0\12\13\6iœüM2\27ç\13{\"\127º•‹Oúß…2\1Ìy\14\5]MokMgœNO\28\15Bmœ\31È\11:c£\22o\27Lìz刻\14\1Ý6&OUÈâ5\26÷+€'‚Û\15…^˜\15Â\31œÈN\28\0202ÙâÜP =Y¤|žx•×\15âØ\19\23\31ed×Cü½G\14iû(qºPooàµ\16+=rùQ“éâˆÄ6\21ŠÝ\8ýN\17\30¡®éšµ:ÈDµ˜ÜDe\0260‘°9EŽS˜}W\\[Æ#•\0asC_&n®†+mSà‚ËdêÅzÏ©&.Í\6\23\\]\0316¡ª©íeDz™\\ðÃ4\19›\21X¼W!77Öe=\14Ø(×Éè¿xbh\11Ø\24u¸Ù…~h'\1Õûûo(\6 Û\23S%ìýIs~\"0YOÂ|õ\0232\0Ó\23GZSõÅ­æfú=b(÷ß5\15Ö‚\16£\26 ¼\"3=/­Ð\25*:w?ÞrÛuo™×í2¯ÞãxÇ>ÈÎV‡üa\0312»…¹Ñ\26§¿Ÿ\0243Öù‡Í“h\ +ûÎ\14Höã¹­¡ú#£ýÐfOX¤À\12Ô#†exA¥ï7hµŽe¿\1ë\0Þ£Wì¹v°Å)-S\23JÃvF^—-O—u¥Ü\28ÇfṌ\26’…訛ËÙºH²•d”(*OÞdôŽäJ£\11\29Ÿ´}Oÿ£×ò-Çñ1Š\"\18eGËHFE!%CSÞ\30àá \25Û–\27\2\ +WVY¦æOF\0@©²·•C™Hæ¬Yî¿ß2ÊldÖ(\27ÍÞvÂÔ\18’­Ðe\127A¥À$Í¡O$û\26O¶´dö)t¶¶Œù\0057CFG‚Ÿ¶­qÊC!6ÇYd=¶\14ÉcJ¶iæ4¢¼.Ç\0\8éÉ\28ÏTää- @\9é\"Ê\31È%(\6Ó+-l_ä8^\6 °žÁÍðý?l’âÆ>LŽ-¡\17W3BÙ[@™Û„á(9û6&6+IÌqÑÒQ¶Á¦xÝЕ\16ÖÉæO›³ÍÕæ\22¹ì¶0>\5\0306zM\0057Ît\13ê\0200\3à‘²o\19zDlju¶ŽHâ\127Úü²i÷\14ñ+G9TDf\11zf_\28 7¶\\—\16ùÄ#1¡\19\2_¢¢\11Ùòp¥Ó\1yñ\ +†\3ІJä}ËNœŠp\\-ºÇ[(§c@1\4L†\6\7åf0xq1h\8^Ehî\0 ¬Ð\29þÚ\12\127ýœöh<ò¸p\16àû/¬Ó…5M¬Rž„½ÜzÈBeŸZ‹\12ËÖß’-€¬þ\24¸%‘-T*7\6Ò€n¥<À«èŸ\23bòàÍ\ +ƒ\9¯AãË㸲\16i\14!Ò\\Yp4ÃaæÁc¸\26\16²\5$;1–CKKF’TA)¬µ´dä²dž¾h‰mDZ5m›‡¸+nød\28«8c×05FëÍÍ–œÁ\17EE¼\0213PMKGàéÞåÜ{\5,8¹Ì£ýÛ1\6½lô»\0£sÜù¥\30>,ÄÂñšlGD¥ð£‘\15gUÅà|\5<÷\0Œ‹x\23ÍÖ\18Q  r¨†QeX8 \0Pj®²e?\0—'cy#UÔ=ecTÔ¦\16ãÂ\0214*j\"g#°*³D¡06»®ÐF2Z\22<.\15*˜Z‰í¯\\þg-ñ‚³'l¥ê³7Ár;ó\4°­´íìKä\14Äh™¡ûÛ<';Qí\0254\21¶šû\24y¶n\5¹—\1ÂÃ/2Õþ\17\\Ânof±ÏÙš·-\21\12¬Ì^Òæ\24aõ¼Q\12\23±ÞÙèÉ\" \15\23æ/'Ç>aŽ\23\1.\3­5»›fã\11ëæÎók¤Í^”f?Ä®\4˜+Í|±L“3$©p\31\0260(“5hä—\6\28:3rèÀÈÞN\9QebðŽá›\21¹Ðà*~õö>—¼ˆOÊÖ‘­\18\3ñ—¼ôFHKϤ^Pbi€iÆ*/È!bal„FŒlÕ\23Ùd±³ŠÖ~)°È/ý\20y\25jÒò„\5PÏ\5aIìáµ0{\8¤qCmëØR4\0043/ùñ#€\8æ¼@\28.¾caàr[+\1~´G½ÒJ\11/ˆŽ¶\"\1nçË\0Á…\12µ\16\26%òbºZt1®\17€Ê‹ç†ò¤\17—Ï–xOc®%®b×y\5\1¯ÈÖYá\13Øl56[;Ž¥\4N\20\27\11¶°4ˆÃŒ6\127ÚœmB*\1\6'\18í·\6¶ˆÇîsh‚È~í\30?v¸\3ªd¡÷Aù®>@Ö”3µ*#¯ÞÝBç\14–\0073pm\30B\0258]D=(«—\8Åàß{[ú—¯–cz]Ø\9\2£ö~%úi\7{±n,graÀ\3”þæ»%yë<Ý7\19\9ñ\22}\14¥\14\6Û\27½«å\0m?”vc˜m(ô´“\31ÝcsŸÎfýƒ¡DZ\30}l‰†+Ý\11\21€'ûð7R¨½JlèK1õ¹Å•:~ŽÙ‰P,nt\1j—c GÔ\ +\0258ã‘z•k\12’\7¥YÁ\2/\6!°Æ)ƒFk\13(\1.á£ÍEô‹ñÙJ\22ä\3õ,‚p.öq`(éÍ\15/\17ê|¾5y³®¢¼iÀ,ŽÆ\18¿…>\9[Ý€l×ê\31þ™\"lî>0’ŸƒW]v怟ƒ—³W£m7Ò5±óÒ¬ (\15\12\2\29ft6_UÍ¡ZA>¨ÅÎ(5t—¢Ta\11XÈÖ¤Â\127kSÀ\29A\6àeox\8¼™KñŽx\5›j`á¾xö%\4µ\21\15³kþžYêÓïxdß\28ϾÿÁóz`¦Óêmí\8Öð}ì‡Úù¿\24¢²ýÒ’ƒH™bA\15g?EM.<Ëj«ÇÜé\29œÓ\2s9.¢ó€Ÿ,gBz(G½O³¼\14\1ÙPŸ,Óïç\20\9g0Á™•ÞiÚ\28ØxïÄ»Í\21\25 T?N\18æ ”Ø4YíëéÙ\23R2;Í\0161q~`ë\17à2àÔ+Gkž>XÊñ²{>} ¤TJ\4¸ÜZŒóˆ\21š[-,\6\\W).\16t·ß­%5VÍÁöÓΨúɧÕ\0äÓ;Ôjákry\0257õ‰Š\8Ü hÏË{rôoÍ’ãB{>_zzr<é.ô\7!y­\21=OØJ8½þ4íþ\25ìbýIÎr£wd2\6>Ý\5¸™êùô%\31 bß\25>ëË*žs¥Ná\22nþU'H±_ ÕJåy\"º…©æy¦ÌþÍ“×\127³/_+ÑgË\"Ákô\2O\20\7ÈB—¸àßÿ[hè'—V~vqxÈð¦€ém\1¥ò³c\23û'÷½Õ?5*\127òNãÏÔ±wò“í)9Œo~\2RÙ\9ž{iÐÅ?}{@!\0302 øg³á§·\22\2\16ÌÔ(à\21âÂ&üý\19\\óSÝÏßçÛÏ½ÓŸÕ ýÜÙí׿ʽ»Œ>íý¹—¾ÆYÔøÏýÒÚ÷S¸ì'ʽerc!@ßà8Ë@k–\ +Ìv½L.ÆüD\127êÏ‹·•UgÞ\12ž»·†×©Ä•ª[¦p6s?û\1cF¢ïÖÏè0\0164\11\15\24“š\6öŒ\14NeÉÞð\24¶Ë^T0-~Íh\14‘BƒÍ€kX5qÒI(îe\16˜\13Ö¹\5\27’Õª¶³ÈàœÌL¶\12p\20ß)æ\22éCýͱ1;£\31Ží¯9öe‰z*Y8cçê\11Ev-Nž]\11[/'ÈSVsÚ]/QƳoñÏñÂ\15–;ù\23ö‘fv µ^Ï¡+êý%ëŒÂ¡FËüßb\1273Û·\31Ç\25?­!õp’ón°8b‹\16`ü¦•\24ߒó+èM­\8\127P.ä\ +ð{r;ÃûZÏL,{ÖÈùÏÉRnsàÕ\25„J}.µjº>,+á­²_Œ!^É™y%Gæ¤Q3í*èM½™\23úxÄœggg4\20Ëä\22Ðœ\17\29Ý\9`ô\4À/&¼Ÿ*ž}A\30“¢g†¹/ÂaÒ\30[°Ã67Bní®\25›­à‚\29¹#\5<ñÜI\127\23âiæÌ/»²³_®™M‘ä“€\23\19!âÙ*“æ\22Ý|õüz-Ç›Ê3w}°rËG–þeã:Ú\28ï\20Ï\\åŸC\12Kÿj3\17¥úáˆ&Ü'‡ÅD3\28ÿ\4µÌ”ÝÄ1Í\\ë×\\‚æ•Ó¥²ÇK4õ¬–xñ§³Å‰f_¯™ÍžÎ¼^€\0197j渇?Çýû™—¹N;‚têx_8à\23‰ˆKœƒ‹3Ø\12@‰\13š¼f3oú§ÿ‚O›ý\2Ììç¼fVŸ\"ÍqAüIà³{9Ó\25\22÷˜-M4‡4Ñl•HXŠ—DØ6l—‹[¹>ˆ8h¶RÛ“È\25eaLH\20EÎæfóN3ŠMj%wšwrp¥ f\8ÜçÇ gsDs\8û\0hö%ã3¿D{\8Z.{ÿÂÄ…c¯9¸ ¥‘×°@âœÞŒÏlzw\14^göÕœùôU5\30C/cG*;ºÿfXÆ\13+¬Ï\28¢68*‹³ìÕa¯Ë!˜>§\9>3#³ù\15¹«q‘h™¯©b8_bÒæË8÷bW<\0¨ìZ\30\0120´^Í(½\18AS‹´uH\16ÈÌ\29\26\3Ô±ÖñLèl!•ùò6Ï|YžlÙ\9#àWìº;\8sòòVÊüzwY~v4Åt!d§)(VhF0nöCË2a-”Ó'\6«/b\23ó“Û\19\13hš×â…\14–îv{ãÚ‰ê…Ëö˜£ÍI>•oíÉ\"b\17íã…\127xr\3a>â\30o‹‰l¥|»\28õKþˆª£©\\3ºâZ©>µÙRùþß‚~Qk\31×’‡F•Ë>Ÿø\12D\27È‚ç:0O\\ÆÎfÞ\0136™£¦\15ÿ¼; x¼f œ2n¨h\7T™bÏ3å™\17žYÐ\6¡5j\9ó÷J”†³\11žVX\7»=\28v%ë\21\25ÿ¥\19\2Þ(%¢*K\16ç0\2·7\24¸\ +n¡ŠZ?\31¨Fÿúï¿ç˳8Æ`ÛH*Á\14áˆ>LÒ™’­.Þ¶D\11o‘ïÃ>‡}8§Äv\22'öü¢\21w‚\9yP\4+\127U1\11“×0QÍxÖ‡\18—f”çгáá\ +CÀ£\13þt3j¢.QŸò<¨¢Ÿ“ÑÿdÓŽÝÿ\15Jp \23Š\27ívpŽ½X!ÁÂÛ¡dÏÆ\8UÒ¼W0&©C¹\0D•`{ºÌ‚<Ön†d¯\27\7³*û50„/.‡Øö¤ç.\15ÛëX\2\20×ç:œ´\25ƒ¥£GP–±t¿«·ÅJ¶*ËþCS3öa\0276»Ñ¡0\12+Ezñ\12mÜyæ~À-À=@r\20-#\2ÿ\15ÉÜÔˆ7Ñî\11BsZ½¸\\&äUHÎ@\29¹Ü¨5A!¹\23Þ1ÂO½ŽÓN,\22Dn¤‰HRh¢°m‡© jŽÙÿ\26ƷムÜs´‡*¬\8\23é5*Ø£ê5ES¡¼Å‡©Œ,ã³Xž\7S˜d±\0¼\25ƒ•JË ¾©½B‹¸­wå‹äN7ÛC¶É‘Çs\9©ucñÓ¹ø4Ü¿ˆðÅñKv˜Ãæé\23&ûl‹e€\22?ƒŠ?¼Â‚r‰‡“€\0·ï/[Ýš¼ò¨0»S‡˜Yxm¨&+§Ç‚\0pz¨hYz‹*ð\\Ag“2h-Õ/\3Ó/™(´¿ï¯-ñ\2Uƒ\21*{í^\127â\1ƒ·$´÷Ö< Á+\18Mò{ìG>ó\31¶»aŠ[™ý/3*ŒþÐ9·p¿íÍ\"’׶ú\7rÐÇø»“\24\"%„žáyF:Ý<ï&»\21$\127½Åq?%‹ûm‚síhs\23 ÒâH“ Üð©=%kÍÌ.\8,Í\1èNÐ;7Ͼ\24\11‹OÉð¤*>¬\4Drð¹jl#ôĉ\14W•*ÿÏ\14Fz\"=™\16¦:¹ËD¡Óñ›\6aS]\22?ù»$Ð\17úK=æYNÒ\5ÖM—è\28Ø\26;V<ž‰Zدì\0^•Æ\4SBn#b\2\11BC\11âBüò\26üÂM½fáª^%”Ý \7»ˆ­¿Ûœm*ÛÌ«Úú×ÈXòM,è\2Ó¯ÐBÂë\11\18Ìè¸]¸æ±dßüY²\31c[Ø\29p~¹W\17rÛ“^\28¨P°N\6Ietè8'VøÅ\"t\0¿”kH\0273,‘‚\"Õ‘ºdž?\\|\25pÉ3¤¥€©myi\4x¯¡F5ìˆ`+•b˜Ì]Y0Yå;ƒ~Åd\0080\5x\0048\"\12~ÛÈ˼K<¿»¼žßõÏO|Ù7\4άvŠº/T“íLƒ.\0\11¯À#À+¾:Ræ§ÝžDvsí.:*õåòh\\\3[а@€\3±U\0188¢&íA¼h\ +n{°›àn€'Zü„\16NTð¬Âßø&4\7.Fç@ÔÔ¡”0k°çOÚ™ø\7á1£NÆÝ\9iJ\24‡\6>°\17\20Cý\ +ðNÓí{âA\3¢2üùÇXíì=6.jÕ:¸\14U\23HÉl\5\17\2˜Ugt;̶¦£ÿõ³ý&×fÜ\14lk±ü5,YEë~ñ\13¹\0ÿ,6ÑE\2¬E\4¤C–øà`\127R(w93/­ó²^ÔŠÅ;þb†WÀž<\3§¨:;¡Šz\14û43Lüó\14씜Î0ñÎ6ʱ›Ùí÷ù¤>Éš…àè¾\16£» 4mnZAЖs\\’Í\25Ånsæ\2T™Œ6åá\22§T¾\0178\25oÊwâQc7[¥Í\28òŒ\ +ƒ´\19H#6Q=3H¸â™>Ù*Q‘Xø\"c9S‰­/ãCÀ\2?­Ïåo\\vŒ$\9ÿGÿg#å\22\13P9F\9lö™'G‘ Sˆ=\2š¶&\23ÿ æÁ÷äo(ˆ\127>¸—<\13Ð\22ÿœí¡µƒæ\11(áŸ\127´\0084kn¦p4{æÁ¡\127þ‹ŸEÈ˼b\6G€É²_¡çu:(vE\28c†7E©ÿ¦Ü¢CÝŠ†½Œ\28V…×Ø9nÄ´w°Öª‰™\26\24?\27§†Ä÷dLµj\ +Ui<É!\23ç7+†!†æ·æ9äñæ¸5›¹\30\27\3°s˜3bŽ;´ù}‡6¿ïÐT>kõPé<\27_ÖÇÔ=ú÷ç:ä\27»$o.¥ýr\11׉è•mÒ\12\15\14ë¤e\21UoéHJiùÀ¨pqîÐU9Û¨²Õ¸}\5˜\2ø£›\27\30\2iÿþ©Ô…œ\21YAœ-Ç7Û¸òl}÷³m+ϵ\15Në{·y§”\22@š\27vÇgkêÇe½#”·¨D\4öÎ\14MúÔÁêûç`]Àî\0226\18減-œ7¿ÅïT\0B\11% ¿g¶z|m)Þ\\\27Üó\14£çE½ÀÖ3[ï%ø]³\5Ñ9Dâfÿ\15Y¸Ù·ujÆú„\ +\28ˆöFè˹Ùú1íA|\21û`O\2½\12fë°Ïí\27¸wŸ[9\14Žd\9Îo.…Ù\\\ +s³ír<ù+@õW»µ¿9\6š\17!s#ÌoÙ7C§o{jëñ%ŒBë¾\30\20të|ã7£YG£H›<'®pú‰YÒ¹žšQ”¯uÙ}A`SÕú¯/í—¸°0\28\3~:ýÕÁ(”cÊwô‘ÙŸí²Cõ«IOÀê@/G\26Ú…æi|ºvk*ÂäõÌ\5ãl+׳\5àpŸ4iï\13\ +A8\3\26\5\15åWx¹=ò•£Ü'\31È~ù¡Ã#Ùu\28Û—/\21gdã˜\21Vóù\5´š'Áß•7ê÷Ÿû¿\127ê;i~Ã5 \24\8E\25\5±\26 ¼ÑWÁËC}»\14ˆÐý;)|Œsè\9â\15“èâhæT÷ƒð0÷ovÆŸ“(¹??ì?0yé‹Áü\6H\16¶Qíü/ØŠç\23O¹ók®{%üЊ/\26ý\26Üí\16A›\17w\20ê+Éfáíy‹#qs#ÌÍ1o¼4ðÜ\15œMðv(õSûÄN˜’NÛÓ‡d!\4(F¸û?¼\8‹y\17\0220®åø…\0¨àþk’‹±!t5\31\8: °e9jè—£È\30…_…Û\27œ\2¼\3¯\1î\1V²^y ´·\5x\4P‡¨]Wr¼\13\17¾ÿP“\5”\28Ÿåƒq;;E˜%\30–Ùðšë\1/<ßKhOZ,5¸\28ýn'\15ÖU\9Ó9¯¯\21Ê‚¯\31œ|èø–aq\13Ü¢.ï[Ô…[Ôå\24Öê–£Íw\0¢‰VR£ŽÔìvÆk±ïæ|*$Ô,é¿\22”\1\0079ðË€Çg[\ +TIÙYוB•>Ù\15K¨h•\23³Øò\22>¦„)÷\31B¬«r±êÖa9ºû*–¶\14\11r\2|y…Ç\15è‹Îå\24òÜJ¬Ã\25æuP3Cz²\29¾ìqJÏyºÒ|»\11fÔÇ\0«?\27¬o\9IÄE(Ý•ï~QÃKH\9ìœKÜ]ê/ïˆ\7¼š¼\0-%…\26\5YþÕߤ\"Oêä\19(á‚Xéb¦™ÅÌ\15‹Ú5`3`Ù©\4,ë.{.ç\0:\2–3ÈÍò¾%Ó\17µŒÂñ\23ì‹ëïYÄ\14îx\8 þ@i„ÚŠ-n¹æ‰\\Æëq§†®\5)rƒ+ÅLj¯¯Žä19¦„\\O-°BhHFO>\5Îg{X\7£…‡áûGÎb“œ\11\ +Zq¹4\20\0þ-è3Û5Â\30²ø2j\9¹\5&*\26\0002\27 \27\\Ç\0³“x&Áïc\27PTŸ8HÑhvúíGºýb³\\!Dðœø\30VƘ™õ#÷5‹\23¤o“\22[>ÓZ\14ó^‚–e#s¹òðHb!Uv\15ç7lÎç.*γa\20`àæM¿‹­þ¢\3+\3Ù\7ðîÅw:š&c­¤ñ~~å÷ó€k@ª\16ÚCOÆM¼tŠ•‹.¾âYƾs‡jë]0l° FuA\127?•Àý°Àý°£¿'îS\23_’,ÓiÒlàJd™¼\21N#l\16KèDÅ7ãr™A\0241X+;\0ùìe:°=LV×=,Ì \9N&\3×ug¦0§&sÚéï×*gì‘\0ÏÂ|\15\11\26\12l\20T\31\20Å\ +­ûÈ \27\\É\316‹Í\16ðlÄÖ1AäÈÃ`†Õ\1ýw´\6\5÷ISèiß7ü\11Ó\27\18[GZ\21'ïbÓ\4#9ù8è†ÅV‹qw|œ/a\5>]ægc@Ì[ɧ4\127Ê_ô¼\8×\5\30¶ånEš\ +aEÌ:\19åÌ8\28L¢\28\23«òY¼û\7P¬÷Ûð1!\23Óþöªº…mÚ,ä‹&\23,\25@á\"Ëañ•ëbM>{\7ó¤¿x*’\7+lKHÜ\24Ü\2Ô\0údub%ƒ\25„\5\23J\0171º þGÿêdwµKï,–Ä[ÌÊ®DÆüùá[\0013“\19mU\11²*“¦¡Æ\12öÆý\2ý9,û¬\25™‘4\22\0ë;,ùú‹ÿWõðä\27ÿÙÚ2\\!ä›\127\17®Ú¬!‡\"Tg†+DH1¦G\5µ)ÅÂ\27J\12í¹„†Õ\5âs1ñ©pŒ \18#êsÉ…\23“€Å‘.Š[Ïú›\127\26S\21FK­\11\\\0ãV„©Hse,¨bÝ¡5i±ÊœÃ‚…%\0244\22Ö5\\{Ëz¸\24Ó\18…?ðÀ\18°ü\27¡¡^\17\9%X\4„\0²/\26\6ý?¬Á³£‚¬Í.`%–~X½\13šÊ\\ 2\23¨LýU\2³U,ÕTÍ\"\ +ZÕÚ\28ÂbÊqñ5ÖâÑ·ÜÌbU¥‹Cì\25\\H\14A(Ÿv›¥Ú4ÍR¹o‘\7#dØäõi¥CYu˜\ +ÄC÷\0041¸˜\22\\L\11.\16\13×%\8³Ç±ä::ÊAˆ\6ˆ\17\3c®æ÷]\14(&X¬P”Çôpæ¿h2k,]PP-\31§–Êc÷8Lž,æyXÌó°tS¼\2,°n)“\29\23\13û\5Ê€5Ï=!¨ÑO¸À\6ÙË<\20nüȱ-ìžúîçÑ\14(Ðsœ9§„°¼\14Þ\27õùÚÔ\6ëuN\12+ˆ\2Gi‚U\2÷æ ù#\9ÝÏþ¯\13$™5‚èöÁeù\23\17\28=é8X™f\18% ý\6€j\24’žÏ”q\30ùÏêOHÉ¥£¹ÿ(ãJ\25WÞ\13\18ú>äA“ƒjÒI¢\6N\ +DÙ\16\4J̾d\19\8Š¼gÊÖ®•¬ÿcoˆý\26BUï\1•§é\24ºXRè÷H¡ËC@û\3EˆÉ‡\26?²Õ_N\"´*YÁ\7z<^\20¥\1\29l£ÿs×Ø£\19>^{°#³[¸P‹Y,’õÜÂþŒ&7L_Ÿ\28¦c†@¸\26“ñ~¹“ãïþ?SZ¢?¡ìRpJ$«]M>·ðÀ™š¸ÀNÇ\21C\11‰Õø™¸{Oæ’ ZÛo2ÿ3š÷¯\20«Ý$™í\25—‡¶Ä…7žÕßTh½é\2%®D¸KâLW)u\15Ò¾ ÏKçÑ€NpõJ£[Í錖p•yd&îáŽ\1¾\28Óhjçog*ôËáR\24ñ~¥ù}òøõÙ\9ê)À•†tá£öÕ\0|ú#\8À÷Ÿª‰\29\19\0e}ƒþ3uLFØø~\9¨®\21\21q¨ŸNSñÁñO6Ø+\18÷5ÄØÚHâ¥+°Ã\0\24Âå5[°\18\5—u2¡‘„ê¤áËW•É\ +F\18¬\5\ +×|<{º\7_¥[µ¾Î;\0154Ú>Ò\25©¹dî\1¹*Kh›g¸é\20|6Ådƒç¼\25\16‚b4Á\2G3\127Ø\ +Ó\25\0211ds$WÓ\0—Èu\22n!Zû©ùx\24Bà\8ˆ\18­½É :0’¢ú›êá8C\27+Åœ½ß¦s\\V¥s¨\13Kæ=HgÈ4júQW_ƒ†HÈ\5¶×Óˆ¼\\\26\7æÙ\8\17ž‚\19\1%\30ŒØÈ\24Ž_š=¢Äîü?“\4æÉ4Ž\3ÿ5*\0072Çž8KÓ\8Ÿk\ +îu^í88PR¿0o10Dñ,\2³\26$(5EO×Æô\15#q†wŠ×ÉF«|JX2F(­A¿mó¯Pk\6\11-a“NÖ€›Ð€ËN5òD\16°Sñâ}w\\Ø\13L°aà\29\6-\2+\9\25bh¸\4¹¶'\15[‡Ü/\127E:ŸÞðü†—\15ƒëÛ{{Ãû\27®oØÞðÉ÷„\18Ýdƒ\27\0Ô £6’çVÜ«#\15W\15\0210Ep\13ÐÜ ›Kr\127¦¿\\˜;Ž}Ëä¤Bêat„¨ÊO\2MeÑŒöþŠ6»DÑ•ŸÉ,÷)8\29Ì/eôd¾\6Š\29Üéa×_Á\\\13N‡\4{\3_%ì(a8œyëÝÁÔh2îNê¡hQÙ|ºÃS€5\0»\19¢â¿ÞÐs\11‘ñ„Ek§\25=xBu\14éÓ\15u\9·xî\5\23\4<\30þ\20Xw¹ûÛŸÐÉw7\14†8|<’\0¹ø@\ +\0292‰ø8?üà—L\30ò- ?D®ìtf‹pùl³æi¹qîaîï—ˤC•·¯\14|D…›óVæ\25\27¼è\4÷A=|Ùs§,0•4šlf(*§L¨~I\24\"Á'BÎkÒxaÀ\20Á+u·XÔÞ\14|\1Ÿl©\22àó€)؈kp¬$8)Ò\8«j²îaZJ³±™JãŸ\14ù¦\23Áa ËÓNSßzŒ\18Æ1µP/¨ZJ0¢§á¢B!Ù™ÂÈ\29¤Ë'dÒèË.0Ï&0‘\4cfº4‡ZßuÂr\20]¯\29ûj-í–ìWîp\21ÛÃ\8^­^Ã^aÿ$\21šH\26vs[\"R¥Wž‰¹p¹8Ì{Õ\21B<\5‡\6Q\5Ï\1îŽd\0195lâ\ +ؼÝ_Qq*÷Ö]\0ö\8’\13–¸YÈ\17\0002{d\3ð1¡®>€®.!Q wýkl÷W?A$ënâŽÝ²Çx×#À×vJÔ\\‡\15Bî;Ò€\22JÜb·ãÒáh‚I(¼æo×èpO*jâ6.¡E«\26üÇö¸\19œn¨]>kh,.’,.’\14\19\127Gí°\9ýPþÏ5G²\26'R±?\17Çj˜.Hߦé\ +SG\26&á’dáI\"\5CHB™\13+eº\19tgåÇ…\ +j\22è÷ép·NõdóŽÉ¬#´Œÿ\"‚™\5\\¢$ÄV\9g_Þ£Á‚UÁ…ºö¿t‰(TÑ&Óÿi²þ…}2\29“‚ã\3K(\24ÃN¾\20p܃”lbÓïvA»'êóµKÂÖSz_ªØ„Š»ªŽ¿Âç„è\30H“­Õ9\16Ä•\29d‚ôS¼ÏàÉ8©<,¼©\25;›Z gfõû £¾5:\127Ñ:f8â…\31fb‘\15\22\0155O\29vœÕaw8¬\18bx5RvCDÕÈ\1÷cº£\19†\0286Qš\16ˆ\"gôQHíÈ\27ÇæÝ\0207†Ü˜\4wô$âvÇ€pܹQ<3À²\31\4ð\29¤Ð«“‚“\4f\27î{Ò\29Ã\31dçy?aßõ\30€Â\26Œ:\9†ûÕ\0242KðDèÓ\23õd¨oÖ`)q2“I\18º|V\11@gj³Fæä[ÿ”ÌΑ|Zé+B»ˆÁH+Þ\ +\29\29‡ç‘B›Ø`q=®\9sæΣ1@×HbF¥Á\7lÊø͇‘¬u\12w´»_ÉÄþª\22VïC\15³’÷¢µ\8ö‘¡`2D\29¬\9tc¶þ=Ø\8àÄM ‡ª„Í%{)ÚºN\26̵€²‘¬ˆ0)k¢´}†­0Á\18\"‡EÊÍ\15ùw(P1Ö™Ÿ¸\6\"Áù³8ùÍîb—­€q¤4N¢Ì5®ÊÁØ\5A µrw,%Á\7)Œff¿±¥|°õZ¼,}E\0147‡ÞÜ\28s\\\3öf\24çT+Ó<û4árIÿ7°þ\127Îv÷\28êæCàëWõÿ*\26UnÉZ1k\21\14«Œ\127³ú9–ÌU§¯(#LSòZ\127óïÄv÷f00ô|©P}\6Í€\5££÷á$Þë\13\9®{0ßß¡\29\"±þ™Ø\2ÿóäa%UTÝáRA\29oŸp\30 ‹…í.…õLJ\27Ã'T%\5[B\ +¶\4Ç\25\13©a9 \5\127Bzó'\4Ü(r\15\23¦7“¹\22RõEx²Q\28%ôþ¸›«T\17n\1ÐÙÈ[\30Æ\3ð/CCÂ|\14-5íXWcE†Õðö‘‚å!…QÄm—<¾Œ#\1÷W”¶òœî\0¢m]'ù\22,ÕÀI®Ž«jèÉ´:š£[Éì¹8\30ó\19©Z§¯ O*¬øÏ\1279#þýÇмÙ\11’¯ÖPò—WÖ\8b…Å•`áÕÖùv\13\11\20B\11\19\ +Õv 󵽈ê‹;‡Ý-4!«”žJ\0ÏŽ—›»\25STËš¯aŒEsr‡näÔŽœõÁS‘7Ø4VŠ*'‡2ÎøHƒæ©ÔÌÌ–¬\3Bk#ÏB\2{§GšTžÝáúÛaL\25+‡Hf¼PA>‰±|Ù8l¢ŽÐ\18‘\12%€n\15ÍÄJÛ\7\23ù \16øDS(\9Dà–Qj\7pœ†>\2\\D\\\18Öã/nªÏ\12ù¦\29Ù}r\8\13u§˜1$Å=¡\0—;CS¹-\15\\Ú4n®w„1lÍ\2y\6Œ=· +e m\6\0313õ­—8¡—X)9$›uŠ ø>Ûu\30ð¥b/Í°¶ˆd¾Iåìê•Ö9J`Ï „°>\26Э§Š°tš”Z\14×_§ºÔyI\"ä\11\23l©ëK̼‘\16êÔ_M:E\25Qî7þNd¾«Â}7•9ô\5RÕ\\\23r¹\2²Å$¹ÆM`¾pªâ ºá½Ø¥½Å½¬ÅŸ‘µ\5%Ÿ)““4êâº\11Ó_©³§ú\18¤ûXê>–:Zò•¬Q2;G°;(\4ì°\127ã@…üù¡¡\127~\14šy\24OòÅͯr~Œ#=\24Ë\7\15‡\9Ó«éE?½ ª^‹†ƒ\7cù)ùE®—÷uÐï͘Ëfu\31øRøºAl¶›q”íŒÈzÚàäJhI¦ý@\17Ÿ\1U\28Š;ž\ +ß&^Í\4PV”¶Øóö\27\11\31£¯3)ÙE·îÍuÛqØ\3رöÛÓ÷(òö?ÿG\7ŠNaQøùp\20y)÷ë#û65€ƒÎü?\19¢ïÿ†3L„Þ\29Êû\13A:Ô\1‹¢´ÔqêÇ-â”YôùSßžµùè¼ç\\Î'u‰\0Þj\4 ›\20æè‘\127Ü ÈU©'_°ˆ·ØŒ*_‘4†\20~\26\17»r\0êúë\25eª-'\7\0201i¿Ýñ:•O4\24µÛ“\19Á\\!\29¦CLÿ\11ù´ß\12îrYnà)ÍÍAµ¦’és|ûÆ\127îʳoáDÖäA‡gFApFn/[A°\\.\0²\21\4Ë3jÊaš‚aÏ¡/˜Ðr¸\3‡Ùîâ¨ìÿÈàd‹\22e“3r[„èÓ²µ\5ËÃÍ\127¶ÌQFipæîʦ/\ +ÍbÈÐ\"\28žÉ1”n…\25ÍÂy÷çÿ–£a7»|>sq®ÿ:î\9á\0259ŸG4½\ +À{ €Öî\28z…IùMœE}á:óôPMg\29žùŒLü>S¦Eˆ2‚B$\23ö›Q\12œ÷ô\7·AùÀû¡üƒ\6p\24upâVZøÿ(±ðù,Ò[ø=`±«\30\8ÃUÙzYópáK/¨\24\14€Tb¾`Ž\30ïçl@^.:µñÐŽ\11\0050\2\23t[æ…0ø‡X&Ìî²#\23D\18nijeg\12•ã©n“\25X\13Ü\16\19GùÒøßÜNKtdQú\0252^|Õr“+”íh\"ŠÄW\20R\8\14WÍÌ«ßï347!\7}î•o0\15\\¶i>u•ðP94oÇ´ÛßÌ̘of\1°W'd@&\0k\30Ó>¸œÉ\8Ó3\0307›;±o\9\2¨*Ûà¹\26Z\7o\4<\29ŸÉ\ +\127K¾AIÁXJJîìvú¿ø?ó\27–ÚŒY\21ª\22^¼Ò(MyùŠ›c«KŠ*¤cöߘ\22;&=“]]x{ÉeÆsC‘‡‰ÏŸTô,ýÀ\127åÇHRÞ£Ü@\9„Íþ\"¼’\8Ñ„lûG*ê/\28­\27+§\1Ìšá\"EîBòó\29Iˆ|·^Ü,šðãE³ç»Éá|÷|f\29‘\19±\0\\Aب•n>b8ÊFe.¤¢”ù€ˆ\31€\4H\127çÙ6Ô\12*aeï˜ê ogüß\20É2˜yVγ§$–§\28tí:H½2[Û\5:\16\8‡¯›ÿ¬@D\12ܺ\24ñ9\20©æ9LÜ\0ùêyþ\28Ó™ê\25Ý{\4p$eò¬Y-7c0&Ϧ©2Ü»…–2ðóK#>¿øJŽAÊÙlc\25ÿ\20€aT\2³Näåh‹Uy± D^Ì¢›ƒY\2Ã9$\25…©åл'LkﮂY\9\0317ØxV»¢®s°æÃ_ dË4¶3s‰’\23KȇWÕ,Ãr$\16Õ¤ò¬ú¶…Q[šPH”G8Ñ‹\3T\24K6¯\19š\25é\14\24žrBÃ\21!B3ÏGr}C¡\12Œ8¯8üWûy>\25Ë'>eO¨•Î¾m\6 W:ûþ—ÒvÐ^ö^\3°@\6î3)þBRÛòÎ\9áÇ\12kf'\4|\14MŸù&&«o•Hí¯æfE.–ƒäfF\31cdœÏÉÚ1²hj*ÊgZ‘9E’Ù+°Z¤ÎLÞó’M­æ0“CAg†€£A\"Èp\7ÿg¡@’ܨ€\11ù\12aAŸ&[39;À›HŠT¡ßšP\14\0ttäÔïd\127h^¥\23ÿØÀþüŸœéˆl\25º/sñ˜s\0254Š\8\22\16É\3fÞùHEWD^Q¥Ÿ\17hRÝ«m\ +ü_Û+E]¸áD|åH\4ŒÁ–eq²+èŽÜ\29\9½÷\22,wÛÃ\\á\25&[wÊ\1Å\26\26\7Xñ“»n‚ºPèl7±© ¡žË}ï\30º\15îÆ»QÍâÁ!×ÐE”O^4UcÚ\27§÷!Y²e\ +ñ\25m)ž\0\20¤±%±›ä³¨èÃ\8Òø©i•­r\20w£zßd¨.¿®„ß#c{F@wh³\9/’\13Ñ&³àŽ\17\19½¨?\15Ã[\4Oñ\13m\16öåp–i±ÎJ@‰\9ñ–` û8zÑíÐXgdöäá,=\20\24z™Âàaûòâë-–˜«×µ¾Rñ\21\17ÒŒ\13®‘ /ãÿ\16\18ÖAŠEn\0229ÊF?0¹f©çL‡@\17pÐÔ\27YªÿÞIsw\13ìŠúϲ¬œ²\6\26\8¤jðØaçÃl3\25¼¥ØD3a•B‚K­‚m‹@7…ç‘\4]\16ÿ6Ç\12Ù~êêûÊ\28²\2\\WzqW‹¼à\31ŒDréÄøÃå&$Ü麽®\4^\30še\11P¿9Ø«õq‘OS\19à\26\27rzùmSËÐMÑ\1d·\0015\9úÞnì¾¾ü¡\27Ý‹zÑù\13Óþþï¿íý§ü›¦\127§²Ú\17QüÙ¸GÊïé×̲‘­\0243\15-è\18}\8hp\11+ Ù\2;\4\31ÆeÀ‹æ¹l&°§Sq8úxi\7æBû¼Ñ2u\27ÃÌÍ\16;\7WCré\2.öêQn\127>°´·'+È^³ ¢\"˜Â\18àyÖø¿îi¶1—Í6–-j“›§D³\5áÜbj\4\11\25š¢Œi5îÖs¨íÈ¡\0044ûî&ïÕ\19ƒ«³¥ænõf¹_÷„\\\9Ã:Ã]†>I6‰Á‹¦\27•ëñ¾¾Ï]\5£,\7÷ཤk¸TMo\30Àþði©\26wZÁ\15ÐÐ}~(þ1UÌ\22æ‡i\"õê€ðF~ŽÚgñpZ\ +Yýf\13~ï¡À÷ß“&ø7góþ»+à[sƒ[”üB\7¸JÑL@pä#o`\30\27zÿ!%7•µ\9íøøóÿ¡&óÏ?»“œ“uû\1±Ó,¨³\6q?m½–ú+\14Þ”2|Éc\21ÆpqÞŠ}š\4€Š0=\127tV*åY\14¬÷rá…SˆÚÎK~ Û\2\3.O¨uÇŸðº>(­?ÿܬÜ\8X&\7Rþ0]\127á¹»y\24õúó\15¸¸(™\8s’\1Æ)À\2àÚC H[‚§%@\4ß]·YËð¢nšØì’ª]n\16\0136êwÝ)¹ð\21ÜöO\1X÷hïÖÏ\13\7mÔÿi›øÀ\20Éýå\7 ƒ•¯I\20—I…Æ´?ÿ¬Ñ9«µ¦þùdže\4ÜÿfÃU\27´Ù|\25Æ7ù€ž\29\15rK\0¬›jEØx$\"Jõéê\12î…ÂV+Àƒ–+ñg—êÖ\22Ëÿ\ +¶ð‚ö\9ôC\20Г‡Á}³7\0319Òáþ:Bæ7\28–÷Ÿ§á»Íþú`17l\14Ä­ÜÉ\16¦•k\31õ¶¬.B²TÀ‰µ\0094?.\11îIÝO@ss‘ ‘Û÷<ª-!0\0èNÈãÃ*\"}5›Ô:Bk\0F‡\23J+¼»c6 S\12ü$ƒŽÙùÉn¿šcÅ`úø\4²<£M•bý*·†ÚŽudNýùß\31\7(8\17pǓݳ]ãewœø\15â‹Gx*1ôðÅ\12çŽëÊ?\\F—À‚¸X邾@1)¢\24X.\"å»ìA=äã\ +b…Khõ¥äà\ +\6¯¸‹\21![F`4Èv;)²[€Cg‰Øä£.^f\23ßJ¯¨wÓ,Ñ’¸\"+²[ojåMUß \5Vn\2åž\8<“\16!ä•ÛÀs\0zó¶‹^½]H¦¢°n~äÿrXç·\5‡ÏÄX1åþ£(d3×›¾åf\13Â+–>åéžl»\27wäëmûXÍ\30ºNG¶¯i€Çwµ]¼õ01¿§Q«yœþÃ\12¶\2W…`Ëo`ÏÀ½ëÃ&‚¡…õï°)\0282\25\31|M\7C´&­æyY¹³Sò<ÙEºhV‚W\"\15^\4¶—®pp4\1a‡rœ+ŠÕ—ƒ«u¡b\1ÀßÁ\3ÌÇÊ}ẻ«å÷n§°\ +÷óA% ײ‰\9ýþË®ºz>­1¬Ö´½‚˜ÉÑ1¢”ìqóø‰†ìÕ‚ Š\23e¥\24}6\29Û©w¡«w,4¾‘W\18¹*\21<\21\ +\4\19+ô*'¯\31\6™ôw$^\0054A\11XL±ìCá\5h/`ûv\2\\qsGê\6ÁŠo~î3\13‚­ÀšH\8ý9‡|ÈûOåÏ=ÊÒy \0H\14e‰{×_.zýåPm²\20T\29¦ñ÷•¬Ö`±l\16Ìm\12\ +Ró\4!\13„Vêe¢ùËB\20ׄÀßË»zì¯uáÂ\16ZN¨ØŠ\11z<¾¨\2Ã\8\6ôM:œ…š\25\14îútæE¡`ô|r²˜JéBï¦+\31\13s,\0L›¨Ùõ¥%Àê°âZµÁ\24D\13\\€\16ÇÍ<\21<‰tòõÂtvžâ4…^?²âå¢\17¾ sÄ,´pQ±<\17…h\15(ð!¨¥Tvä.°\28Có/ñ°z©<ð{¸’•¬yÈÙ¸Ùfš€äcOªÆzhÚèÔYý\23Mè'÷\\gø;ÆZ)­{]õ«\0190\14]½®ÓÍ\127< \29±\11bQªP,aD`]Ã÷ÉPõ& VÅtüfŽszp’\\ì2gàuÝÇ9\"\23®HÀ\20€Ul.Xr2\13E‹¨ÿÊ[ýªµ…kcÇßqae\0|úf”*:Ÿ,Ú\3eiÇ7€Yš‹}uóÉ\22É\0048\5ËÉ\15t%ì2—ÓiÏ\127t“•ÃiäìÄ›H\17òKe@_«¼3LŠå?§·¾[%\31¬î¾„\"W\20;ël,'#C<&0Ð'´9\1²\28ô[\21¸H‹5¹\22\20ÛNöÜ©\0I‚b½®Ä47ÅòÍ\5•®åô:ðšýlp\5þŒ_\1’J2?Fñ5@1\7F\3co\15Å~ݪ§\9¼\24G\0¼zO§zR\6kòë¾ã¯ÊÃBýª\14\29°¼}\\p¡7\12”è\12ws9Û„MA@«\5ÐŒ>°q\22\11e)\25zLÉ4êØT4úl|\30p¸RÔè\1:‹¢\7ÏaéáÃo-Ååšu£œ±C\0148²?òò’(\12>å\18R\\*?;(³o ¿UB|«Ø*b™C!ôå¯Ê\25\26lWØvéÔÑ–Ï\5ýq–½Âm\17ÅE{\25\25ž‘™\22@-\25O;òœ<{±<Í’ðÌ\26Ú\25âð7gþºÿ\ +À\26@á½¢Îg‚x?*#Wϸ…hô\127\21\12ð\127¶›q¯\31Ü4Ц+4o4èÆ@‡›\9uü;}ïx\19ŽoO¦IŒÄ8aµ\0\15{;o\"ðù¡7FÀ²\4\1É=-{u»éE\2\0179Ãöoℱ\16˜}|û”þSÿ\7\4ºØÜɃ nì\127SºÓ!-VFK\16—˜\11#Ç\11AØT(¡×Ãq\23‡æðð1Ø\19ÕÜàøÙÚšTܲ^\12:\17É\17‰¦û¥»„V\18šfQìvæs°‰I\31…¢Ù\18³O5§ëê´\19ßj3\25%ä®\2zà,iU\0063»Q×F%Ùôzö\8ÕT{V0†\3‡*y\11{’Ü\18¾3å—‹]\14áP]XÌglÃÕ˜3!ÃÝ-)s„Býø¶‘g²b3ròy/Gö\9Ñd’¥ú†Íed>\9<»Ä3¼ï)ë»]¢\4ìõ\20(Õƒ\\\"_‹°ön3ßY>·ÑqÛË_ÇQeÕ\14\24ÙöÇóvTàaÅñ¨†¢á§#=±AÀÉ\5žM`Ç×n/\0ÿ;ºyTHgÜ|OEýXìÒ’ûáh\27\31“{™jüS9Ÿ“cöß;\ +Ý\24‚mÒ‚Fø©ìÔÕ\23„ŒÔ\0äž@ö|ÝQ|¿QÞ²O…«Œ¯\0êÿ`£Ò±Í#x¹ä\25NFy5ØŠ+Ý®£è\2\\tå(Ф\7~Ñ,\1]\19ß}±Œ‰<]-Üiì‘šRk®f€7¼\5¸;NÍÐÒ\14\0j\\¼ÐÙ™¸;؆§C iÍÛí\21­»åŠR'Ö÷\21ª\0160v\3-tžz³«É®Üò\7\0059 k€\26@;ú!¨˜W«\\­\23¡\\?=x\5A§}¹1•nÌ£ÛÑ\127½Ò\14œM\6Ê=À¬ø¬\9q³:\13%˳\0193•µ%–\27§úŽ½yâ1°X\127.ÿÕˆÉdA™|ìÉ¥6î_ÊÄ£<ó\127:g¾`\"~Ü_\ +ÉøúÉ,\127%\20É\16•º£À20¹@úÁËÁ¶®ÚäXÒ˜\21Ï€Ók\ +Œ~²5*\0ž‹Ë¼Xkî\0qûYx5)\24+O£}\127Éwݱ~§+YàAU<6°ÕZ˜Ž\11B\1\5ãÁ¸Hû)\2ƒnF\0205'Ðù3~\7œ6Ú‰ŠMyyIt¢\11>”â\23®’fû®%â@»&„œ\0wš©£òwùœÃ\18\25øÇÉ\20–oÉ“zä­\12 \5²¬ˆÚåƒû9p\\ds\28úÛ\12?ßÕ>˜Þ\19ƒ>ñ]Fg-œ•ß\13aªÓï©\17¸[\29FzpXýõ\11/s‘#q\8E¼…K.FÄ\11iò³ƒzšù8iñkÅÿ¿4á/\28•nÙ«\18ÂV\5Ûã,\2Ì\5)ŠË©bÁ©\2w\\Á‚\15ÿã\29¿Øô\12èõYy…`Ó.ø*è–\21'\23L¬ÈEfbúMYV Yî^ñ¶„ÂAù6‰¢?Í\26ÎÊ\29Ü‹ñŽ˜§sD\29(À3\4ƒ»ûe–Tn\7$¼¢@Øïð¢\19‚^„ÂÔŠsÛç)êë\9 )3¼ZŶÁJ\28é(:öÿLrö¹™çŸŠ‘(V|i\31Ïé\5ºNU¨R´Aës¬\9îÆ\18 *CSP\9MA\0\16†ýr\20ÝR\22\27Ò,\11úÝ”\22V80ÉeÌÄ{ÃY&cìèb2°\"ɲð8­¼l±¡ó§,Ür*W'\4ÖüCQ7ÄŸŽŒd2\31PI\3\26#ŠÅÏ6‡î‹CY÷Éö\8Š…Ò\ +ÆöJ \5ˆ€êïx@ ™Gæ\0119Ù\4’e`Š\5ßÕ&XßðÙ\16$‘`\12d(—\8æl×Z-Ì\20‚ïï`\13Â'ª±)¶9ê›üÔs¥‡’Ó›L4'\27\31×\0´\127Çpa|º‘\13ãD\1×\0|)Œ\6¤áÓm+ÖÃ[0þ\\@Y¼Ö€$7û¿šã»Ø’AM\11œÎ*Á† \21­¶eÞ£å²7¡7ïƦ—'²±ÐmºªÀtÉq“çÁÁü\1m‚«òP0\24]àš<\27xÛÉËb\23¤\3”=[–œ¢Áé3l…%[T‡dñ\1\\ë\22\24ÒôEô\8,a\ +àú‚ç›YÊ€\29[q`ù–šÃe\9\8,DÅ»(^&W\8Ñ©*ÑÚ\5\6N›Ð\20Tr\27\7\23ÙØÆr¬Çl9—\2×\3¹Õñd°Ee}ƒ\9Ó¬áõ\27ÿl\8\7yá‰^ØÃ?\\jiùñ%¨j²\7\12ÿÏ?Foÿü³:Æ´ñ?¾‡\0160bº2UÖÃÑ®úƒ\23'\3ïéúÃu\3Wò$P\11-—WV##+\14Zš\ +Üv\5ª¥\24\17áFB®¥žJ4æ1¼\4\29\12‹Úlˆ™¡RüHU„i=Éx#fâÚ¨`©\30\0ŽQ0¹\\\ +ÆïŠ\13\26•bCÂ\5ö*Jî.žÙùÖ<\\XðÕ·XoÁÀ\2£&çNXè*ՈѾš]ˆ¤i\13Ð\12XÄèRƒ,\23(ü\25Ž±ý\28á=\0×6Õ\20M˜k.a®Y>4¯—PQ\\ÞB\127%„þJÈú\21Ëú•\16õ+f³*¨&+!Åg\12@H\0Õ€»ºzÏv‹ÝNƒÉqH.ÃqtØÁ‚€xŠ¿?ä\1UÈzð—3÷ýTXx™ð‡ï}êü®:4M™Á/E¯\1!@ª™ÃÓIov™\27\21ýÅŒŒ»¢™ÑÖÞÕ‘Ù\25£I-7‡\25-\14\\ðÍ^Eµ%Fèá\16J²\4Ÿ®ƒÕ¿Sfàj2×Vób_UÇÛÜX\9¡<ƒ\20 \5Ø\"ƒ*{\27p.V\127ìX\31,Hà©ÔvðöÜ Z†ðéÀö\7!‰y²L¨Çúm–¹¼ïã\11]t9¾!ƒÐ,!P̃u\127ûy\28‹\127¬\2ÿè*ha£Y¡¼K^3s0¿'j\0Ó-m\127ãÄ\14kg\6\11§§eöJËœ¸Í{Y‹cÏ\18xæ)„\18\8I<¿L1•¬t „Vd\17h\127~öŽÄƒüéê˜J°g\29\3PrsoXÇW1[Vñk\1+ׂұô6\8žèÎΕ‘Röß_w/¡¾ÿb¾ö³\17\11?(\12ØÐÓ_º®Cèh˜‘³+6ž¦XæT7*\9à$èl÷\8Ú\21_.ò\31Ì]Çø/{6ç6þËûDy«\22S\ +ßôÊ5ùÓ\19¦\12ç\127ÿÔ\127ÿ<#ûÅÙMÕïúÞè\17¤dvy¾ïU¢ˆ5\22ÜSŠ$Í\25ŸtÆZ\28De°ÉáîÀê±]‚a{\13Á[ûq±Öc\7âú ÑÇóß<‹ú;܈\14¼®\27¯S¥\27ÿ7Þ-LØ¢ÁÌ{ \7'\22Ýnœ\0?‹o¤\31\31\5sÖ;ïýjß³'ßpユ]‡â‹çòS‘›eÊ\22›äæûè\11,QïìlX®æÖa\11åH\5SÖ ,†×7¼¿á“ôLÙaCpB\30¶¥Í\24ãÆ«—þ³™l|“úeëW\14ÀaCŽLÖ‘Ì\21A\21u(G\3[¹£­Ö\20 €“p j5jõhIû\26zÔ*Ï;•·\25õYEgZsÖóñ\3wùˆÃ¡P†æ_åÑåƒ3{2\5M\ +\ +\5jõx%\30ªŒª@Rê+1<Á¨Úi\127ß\9N(5¨ÐÞ5´§Õ#÷srÝž{sŽù\23…A…û+f«è¨G\17\19rP+e1öwڨ壢Ó]‹ö³òâñ›sLëßçYâs\22\4è\21\0077M…}¹ú±ƒÏA)\26gÛÉa\26Ùjä–\7Ï3-\17YXÜHdP}&Þì›Â3\7¨NYï\31\7Ÿ“%BŸnëÊg\8EP.8ó+{-ŒÖå—¿\1\21uÕ¬ôöùcU#AÑ…Z}ê\29lÇ¡Ðquj\30N•Žþ\6['HÓÄ`&u[¨TíPï7\26g“ïî‹C'g÷lé_vc`û…\14ï1r¼XàRB/0\14‘¢F#ÑmnïÓÁ\15êzdÍCMÊ:XÓ\25ú&5åâVÈW7K\0¸»\20—®cøýˆ\9\16ÚéN\1!‡ú–“Æì\4ÁQßO\5;ÞIê\23ãðŨï¿<”_<\15)Äû5~ü¶@—\25ù4Õ\14BM°Nˆ\23ÍU†×ˆÎŽæfO¾“]×p£‘¾ÿ߇æƒ`\6üå\20œúÆ\127R$ª\17ZßðÛy.v£š\27Éy£§2Q_¶âPºs³i\11D;£•sTÕ™\30ƒD\9þž¹f°á‡\127\12S´Nè¯?\"\2Ý\19·¿]‚5Ó\2£\0m Q·ðf—\\\9å\26‘HÎTó<6¼L*€Ö7…wG±}óá}uÓÙT\5\30ŽÃò\23\6\\ŸÓìÐ\13÷/FñË\1LÿÃWvÎL©û/†ëËŸOñP¯õ|äm¾ž±—QÑrM§¡Ã}ÇŒ‹\23‚0yÁ\\\30­\4\30Ë\0237–5æâ\"\0225mu$\7Bµ\21\6ŠIsi\20}q\5 GCœV ûÎ&ÈëhÍ´Õ¦ãj˜Ž«:ñ+wûGÊ@ÃpXËé\2Ç9€¶t\18ÑÕ#¤\\ÅØÜhOµ§9žCÈ|£ñ!Ó\24àJû•÷³\30loΨ³–ãÛÞ\\åU€Â/\4Šw-ÏaÛ:·ï\30¾9@Š@Fo\12?x½yŒaÙ\9ød‹\4å\25ª\13Ó\1ô\25û‘=8˜ç+O\9Ú\14G?x×Ð\20.\31\29Ë\13èˆÏ»5Ÿ<ûê\5cÞ\23•€}Œo\23„\0'ÅV§©›Asë\16°³Ù’ÕÕ'j>¤#uùU¸t¦pk\9w\26öc,ÕU«x#}>EL¥šÔ~ùÃRc\21r\12_#xŠÒš›h\21öÕ\15\0135tºÕ\17­6ä´Ú6Õñ7-ô>=\22úÂœ\9$7-\\ÇOÌàÖ·a:T}\0112\1Þ\22êâ\27\26Lª5LÕÕ0UGF”´\29ªáì\ +ÜTt¶ÕÐÙVß*Û*ç\27)y¼à«x€¨ñô€4ˆIÞú~{¨~z µ'ïÁZ؈lþŠ¾wÓÐÑS±\27¼\4ðÖ|@9Ûgåñ¡†Yö\26\ +J‚ºåŠ‰¿\26—á5ìѹÇ\26‹dgí\13\21ÁTÅ}¤ü&Ý7¬Ä\21VËÊ;}½\28çßÈêßœ\15„^\27¨\16\24\20á1\18>SoGïJÖöµ“OÃk=_Õ–Š¸$üeWcrÃ\\Q½\13‰ÈÌä¼e0\21ø\18å@Aàyº\4ÍÆ›w\21­ái/äG`8rjTkQLJæŠ:qÄN:ˆ>«Ê×]˜«{¬Æ©·´€+lŠ¢Ø«µò×P\13V¹\5/„퉧ŒÔW*à5BЈÒ4\\SwàP\28V©\16EW§€|ítõeÝëu²ª…j\13aDU»¢>jhe¯\19oz˜òY(\31\21aVŠs\14 1ŸÐ\13\7€\15Üà@¾Ö®æ—4òÄÞR1$‡\0117‡óÿ\29 \6h‘ðAµê–)'>”kéjÅìõm\8®†Î0ƒ\28@ゲ0'÷G£ ¯NÍ\29±çR¶Z!X\30níc¤w÷\15Æ[èÊ-t…ÝRˆ\23óà\14ß:RX‹]Q¶\0213pö¨=wìäþ²\11õðshßÕÕ‡ûè\13è>±PîÓÅî5‚˜ƒ÷Iͺ£¡C\21M*\26æ!MÍûì²\17ÿ®w\31°÷ì*3»+fÜåº\22ؾj\24·®¨‹\31êl¼„\27=9تª{¬éV3\0W˜Š*×~\21\11“L}ì!ÖÃ<©çæ\9A\21\5eµwÎÃi´Ç˜\28Œ U㮲An\19\"­5…4i@Úó™2ƒ—ÔÂ\29¶ *%å˜,°{Vî++ª\22ˆ»Øå˜Ê\\E\29ª%øI\ +%ZEƒ+©º_%Xh¥†|8\0091\23¦\20à“\\AV\11@\3D\6ËEœ|t¨ÑG«ÈÉ”Æ$FšJyixð–V‹\27#õ&L¹Æ-$\0250VþY-Ž\25\31€Î£êëɽK¨4ÍH—¶özÈ>\19w™åö¾N¬ùá \6B\30#ïB®ôwcÆiÏ0\19i\13ÃaÕ\23Š\6{#ºÿT¥Zézø¶*R\24ð2ÝýŸžYE4ü\15c@ðêà\18iàÕ\4t\3#¿H\19`\5eÃåÉ\15Ýë\28\7\22\29@fЕŒL à¥\0230•³³\26+âÊ\28€íje7Ú‡j+rø°_yÉ­Hµ+æ¦&\30ÖØ]‘E¶æ«\3C\13·µ\ +^'oÆ0\\£ê/ûÿa5`7XÝ\22/Aó3\3¢˜\25rfÅî¢\0\7„9’årح٭Í.;{Mqú\18ј +ƒ¿_Ñee\28v\4Í\14ÖÙÐÃź<˜o–ðñ\28Àí*Æ\14‚Ý\21Ø#˱ƒ•$‡\21Œhín›Ñ[x5•ª»\5/Æ\25œz \31Ì\4fÀsb-cÓ¾ôV¸†î/îs~ûŸp\5Ñ¢J\11é­Q€¢\22nZE¦‚6T°\28…ýåÿšD\16¬Ì\20C6%èÚ\0220ÑW\"2!Yª\9N\8Õ\127#7\23\1+\18¢£ê·\26\15\23Šý\26íºz°aöa\8vPÙ\0>\19\12\15?¿Ë§‹x_ÎÚZ•)¾ÊK\18)A\7¸XU\18dvkÍàס]­†µ€j®à\26†\1jý»c@¦Z9dµ™¸\26ìÁÕ·¥Õ·¥µ\30¸\23Å\3á\25\\µZ?TÝ5u¦Ñ\0248ï„‘°å7_x4LpR(äì\1e°Ä\\.ŽºÚsóÿ»]¶T}‚%šðjDâÆÔ`5à_Õ׎–;Áwv\6î»(\0\3Ixk\0mܪÛë»™‚ÇW]\26Ës°Î7\2û/gèÑ>\19Ú\13JÛ¾\26¾\26iØ~¬aÐüj×\0003£ÞLJ\127â÷p7ëëS\30苆ŒPmg:…\21üÙ¼]À•í×Ó\27dsì¶|^£>\17Û\24Ä¿ÎÅ›\19ì9p¬m„^ø€\27I'zeW­ð¢¡\25\26ʦ\7cíú›\2¸Uãâ\"\22'Â\22”½jF\24Ϋ¡î¡¾ÿ@|¡Øí_\127»¸9«K*š\7ÊU\\Ôï\18;Zƒ]‡Ö\21Óø\28ìÎʼm6Ã\0195›ôi£{\16\3ÄþØÖ¢\11ú\7Úrúè\2@Ž-©Ž‡ÝÆÔDsW€S7o•\7n¡k¨c&wÒLž”\19WJmÒ’kX\15 dôôèkÀ\28ɵO.\30\3\3rïN0»y\16\18\7ßd#­IWÛ\27ÿjSö\8 Í\6Ÿ'›-;í°—\24myà‚)5vX_\0155îbšç\0‡lãä\31x„àÿàY‘Ï¿(D(4Ï\19\23ƒÁèMËÉnä\\\3”_\1Æ\0×\0žõ¨€Áç‘ÈÖÏ@Á>å\0262\27\0007ÕGnóiÛL’7\4q\3º\12ÿß4Ûßk¥\21€+9Ê\17J\0240\5|ƒ{¤™Ã—|N4tV|âf˜A÷\16\6I<ÀðÍÆÂÂ#`Ôp~ÂÝÉ+ˆ¥Ü½üy¥¤zàžªx<¨NgfqÉšínŸ¿±ôc\0oK¥{L¬ÇÞ¡«‹´°Ÿ‚Ù1yðTî\14•×zlpÝßÝY¸­_캛:l.\0Û*äOÃkš¬uØ‚ÉÌí¦&¬¹\26\00869ÙzÌ(ßñ´î«\11¥1\ +Ñ°k¸··EÚê!úñÐü¼‡è'æ€àø†×7ô–f5zhN•óô„\127yHx\"\17]Ym-R\20&\22Zk?`.òPû‰\21ŠîAEÀ—Tû×-Ö°M›\0nä¥Ëå²Ávx4p'ܶ'+û\1\24Ö¾sÇÔ­\31¨vxâ¹\17¨C_.ô(š\9å€7’(\23GAÊ\0\26n(»V<ŠÐ\2\0305k¼ŒÔ`\24©Tr[VûjÔÌ\15\29ÄCÌù¡¨\30ºÑ†\14É…{³KÓƒ4ê\15pOzÚº\9…#ÿ€‚\ +µØ\1Î6,P‘ÖÑ‘Dçî \31ž´ö©\22=Ç\27ýøœF»33õõQ_³r¿\0166V\6〛/‘70w¬¨rÞ\29üžq¨›ïõ\21Z#‰=tø†Zj…€Öm¾Ç\24PÝôç¿Ð·€S€/»çÝŸÿŽþ;FˆN›€Ó‡b|Ý$ˆ@\28Þ\20¡Bx\0\5ð¾Ý\31|\4À£…J¸àr…ðç¿ÁÈGà\ +)äMqë\15,†D!0ÿ_ÈG¹H-\11Ì\ +™mÆFžÅn\"åÈÿ›k|Êj`«Kò! ýßÚÂÓ¨®ûoü\12›ÀÃntŒ&;W /\7p—O9‰¿§_\31˜1d\17ð\26+|\6ýÂœ\16€¤H\24fÎö^ì^íÞìÞ\26¹\19ô²&\23U\\l æ \26ÔäÁÝ\\Ñ\0239¿úGóËP'¸\7[¼ü\8EÂp¦\18\\~Ù\5—0^¥\4—«].H\27\28KhçP'\3\24P\4ùâEH9¸¸C°/\127`·Ò˜\27IQ\28\7˜ÑŸÒx,j~,\18˜DTµ£Ï9y\14Å\30í04íNC-®#Œîwôç½Ü;§ž¼›}³{u&ý0»‘\3:Î@óN¿\2P\25ÂÎÚln\13–z¨Ô¦Ä)zÎ.\4Ü\22\22¹†»\16±Ü|6€‰ò\22\17AÍ\\\20ƒ‘Ç\22VwPõ­\13¼ùi©!8O·$\15¤\16ûÁ8%±wú)ÝgGÏ\30‰´˜gû£“d\ +å\24k!:COCÿ\5\\iKr\31¤gTÌS!i77\7•¼\ +ü½±oÙßiQöG@U\0260m·Áã9XNtM‰®1ÿóè.CTö`,f\"Xœ¶}Ø2É6\31Jáôkaã³\29)ÃwLÍ\15Zím³…\9O\3\26e™O|B\30\1Cô´°/âD¢6?w¡J8\18€Ÿ(ª{\\,Ãg-1\23B;\19ö؇\11úAìO´²{é(%ÿ«W\29Ò8rÃ<éûÏ“\30äÙ‹^FNÇÉï.¨ÎïÐê\15âUL¥!½(>¼ê¼\16êQ\16}\12¨\14‹¼­¹@\30Ô€ß4äÿ¢¹ÚšÛûU­æüuÿü÷\15º\24ô\29êƒ\19·Y\15lûºé\4­\2ë\13yÑXJqˆuÀ•²àáÒ\21OyÚÇÌå%G\4\13HÌ\30RFÅDafžn<¾Xᶆwœ0ûgÚæ`Ò†,~Ši(7\29\15_NÊs¥!×\27Í\15,xk\20¤yÓÆ»ÎO“A*Ž-‡wãé#´\16}½¡:nœO®Ò›\31/)¤™aˆü@¨•Ó1àí#4\23¹\3f\127­BS„–sÀN§û½…j¯Q›¿çÁ@ºÎ2†Ÿ&ÊÈƽ—Sz‹\27Eô~šRk<Ïìï\14\19ö‰·\29\3¸\27\23\15§\21-\2£iË2F(Ï'$Zyëh–\29‘;PìaÙuâTýo\8>8„(Øl¥è=áaÅðp\13X\3Ða~\8‚\30<9ßùø\17äâÅ…'Ï\30\30‰ìÍïØ5\18svükfˆ?#‚É{ŠV`É\16Á\14ýIÃ隃¯QøÕ‰\15W\6ö€Á¡f\12Å\26[èí\3†‡p\23碗Sõy\9ßêâ(zr\8ƒC¨w?G`¤iÎÑb;}\27\28j!\6ƒjóØ­Q“ñ¤\12tÒ³pXøc‰.Ü•#_fkBúÀbiNXÓj\0Ï…òõ\17&‘c]–è\20?W]\"`v\25 \26Í\6s¯\1\11“\8ÎÄí#˜¸ØwýŠe/¯W\20\15\14\6ßõ•{Y†þt'œÜ¦É«§Ü­î ½-\14ÁßÅ!J~NQô|Àe¸\15bÞ|`c”’í{çáyÛ\ +Þ\127¹&ëí!\28áAX¿™¿D·w2Î#C¦TÑjm~\9kï—0²>Ù{-³\7÷\9ß³q†b´“£adNÂ3CWaªH>«wiñR\6X©Ü¬ƒÚɼÊ|U\4Œól´\2ÿß×\127ÿÝ\29…\ +\31²{‘4gn\30\"?°5”\16âRG÷á\27iÀM\5.ÃŽ\6\127þ{btž))ãáíò;>…g9ú~ÿÍTãyN\25¾›Ã¾\25]ÞªÚ\5\4Lÿ5žíŠJ¥¶£\5ðU´ÛÁÛ6/tm\127\3­\26nàn{ø-´Q€v¡<\13yªæ÷ºÆsÝ®Ù\28š\18Œ´\4©qB¾œˆž9ܼï*ìnwQ\22\4\\ˆ\16Æd09\127vþRœ¬:Hg3žN\30·eº»`ø\17\0à\0027\14lŽ0\\\7h¡Ò¬ì4™ÉÆ«¡=7*Ä\28-ž\18a\26Î[Öœá븡h·•úÐó \"Ëä%\7ä\20ºÙp—ËFiŽ‰v{к—ò©™~ml“í7\13‡Æf1œ6™²kÓáì¥\24¯ŽJ1j£ç\2ID‹ÀäL¼i°ê¨¡Y\25´bl\25˜Èf—i59ëÎKvâ=‡Ë¦†4±2õWC¿Ë\18 \0240\30<42†š\17\17Š”\7`œ\2°ô'ŽžÉ(³þ³åO>@&ä?ä.\24/iû‰\5>ÙÔ1³\11á\28äM8\16,•ÓlØ\24\127µ™\30š\31-ÛdûV@„pZ¨u±N‡É“wb\3ž’í\8»LãÆ;«}q´{\"õ\8\4=›2U \\È­³zâï>º‹0\13ÃmZ{[•n¨ˆ\30ž|G\6kü,®„–\6Ø7?V¶©/{\2\31${°ðýp‰Ç\3uxø-\23ÿ.\27\12îTo4\24—nèÑ’«4èUúl>ùgó\28hoB1g›m\24A‰TW[`úm6Ž®z–¬)ÖÏ\3`û¤OÓÈn„Y5\16Q¦QÖgf\20¢¶üéý\5a\0059\16]\22Sh(•×\127˜S¹ÆhYâwÒ\"d\\(à¬3½åÉ4`Öôñ\11 þb‘P®Ž.t-«n¤\17X\1\25Û\15\\ ¶lÁ\16€ñ³œùÏk™käj±aÏyðýb³UgëT$#réPÎ<\7ÂÚ{v²Ñ\9èh«lnƒ\31\6‰a&gX÷›“\24ïáU„ì´Ùèó>ƒF¢\20”hö\20+\ +nèê¤2^ä2³³‡ÿà›L…Á¬ÙBvA¾Fÿ ÀMûd¦¹}šÉ3x\127梳¹\5¾àDÓ)4Š_\20›Mþ\16\3}òçŸjDþ\31WÂ5¨\6vU\"~ ùò7Û\23öþšz賡\ +d°Ù\0239oô½ lC^M‹Åð\2ÿ¡oM‡÷m)`ÖÜ*hÏ@›ÙqrÜÝ®7ÐrÄ&$Qp'q‘zv¤)ÖÂSÈ\16·«øè7@\20­¦ûb•.\13•7ÿ^¯6¿Û\25p\15ÛlK½½å àÆ>á‹«\12ïgði³/#Ù­ö\"Úíöò†\0177µ-Ĺ›e¸\1W'‡OÄðäÀ‰CÇ7¹¶¾rõ\23ŽÓ-¼)@¥G`‹²osŽw­³\11]ì&w9»ô¡…\4´/‡›Óc“¼Yh™¤à\6àRvA[\14\8ò‰0=àë.«GC\30λù\11\17´mƒ7&£ˆè¾åªÙ]„¨¥}W¦|™¢-pˆ5ä\25#=$Ë¡L«=+ÕNûo\\ô:ªÔìïÈ&ë\17<Ò\127dÉ•\26¹\"|Ë\20¾Ù\9“\27”ýeðõa°æbð¢\17šÙ4\31Ù\27Æû\31_D\21fðàKðƒ/¿å°…É­;'0\19Rƒcüîïî+±¦ù\15æànÁÂý†=Jýù²?^^OG-;\6<›»º\30\15Ê\13ûª\18Ãâ¬Õ^‘\7n6ÒÖBúå@(»¶yr\29W\29Ôœ=‡2¼´|8\20ü–†±\31ê\2ŽI&\7üìÖo«Y„sÍ\18\12ŒdƒêlæÈ;S“óX€Âi\24¾àW\11Xܨ±\26Ó1#\18™&ß\28U„ÄZ\29îrwæ+ipQ4ÛoWMÙÍçp†ÃÀ±«?ï°\"­×´»ÕΞÆ(\24jQZXÀRÙ¼æ7[ÀrXNoèvY=Uóó®\26`ÔÛo¯\13ì÷NEÍÕrwÑÜâ÷¼æ÷7:'DbÈÅŠãÝè+€¿Ëo=-^pÞþ\30…\0245ñ\3\3X\29öæõßø\31¾«Åø\30ÖîÎ\127±ãª*›”oÍüòÍoÈ[£3a¿¯°õ6k¾n6q…›\"\23×\\\0131ê\22¦ª\0ì(Í,•\13\18Z\30ë¥m&ÕOo˜€3Ã#´\127ç4õ\24€\17vˆ€l‹Gņ‰Ó†h³\"áfÑ•Æ:S\8ò]-lD5ÌÏ'\3úÌú„ñ¬c€b@aHª4#+mß“KüQ¿Èßý\12Þº¥¾ys\26xrj¼½L\4°6y\30åÕI?&}GÁ¡Êè\22áVà…\11dv°\14\23˜þÞ\29ËÌôÓL³Q(\4ù\22ßWÛ*\20à\26à\30€ä\11R \6\25ÀÛZD¶\0 ­ÝçQˆ´\0ÜÄ\20%c\15³¡Ü–ÿÂ\3ø½m5*ñÜÚ,¼’#-÷k}ýE»‹5ñ‡ñèÈèá\25#’‰na•\18Þúö\30\26imŸÓ/f„Òn¸ŽôŸ!î¡r\8\0311Ü­ìé|¤¹èl?¦áÞ}ï\14‹\ +îb×·|aš*^Ù†xd3\24\"=\23,\20Ö ä†\7Žð™'\23óO˜yvíEó^š\26/áYr+xäN¨š0PM‹mǢݼ#nH*4Ûªj˜ªR7nô\1’+\16½Ài|Cõ†\0ÀÞÔÚ[þ¥mÖ°A(Ø–Jà‚r»Ž ­[àù[`ç6_Õ,üÒ,ö‚Ëi\28f«\0“]íú:AºÕ›w‹ãô#÷\18:Ç&Ëë‰\16‰³²Ìgä\1;\6ªÈ5\30¥òHÏ1[g¿\11)Šã\23Ð\28FõVjÝgÄ\17ú|Åáå¹Ï~sVø\12û;E‚¶tì_\17ÁåVŸ\23Û\"ì3\ +uûìI1[g,Ú?ImãG}6w£À ú±ÏÔë§\20Ń©*„Òö¨@RÆ\7¥ …¿ï—\1ч¾Øbo\15½\31}\31Ư\12;ù–co\31»nÕ\31B“Øó°\4Ã*XРí¤Þˆ\12ç7|’ç‚s¥l\127ð\2\5±\3ý¢¼ùã\0\31Œ~\20²¬~5U\0136:´\3;3OŒÚ²Ä<\7U[\12Ð\30\1Ôh.Þ©\22L*î®@ß¡÷`¼MXÛ¯\15p7\13u\26„z÷$Ú_ó)¡W¦£è€=&¡»‘\4|,Š)•ì¬?GT&\17}!ûYŸ™öçéì d\15#–¬fô€\17Œ_dC\11“As\28\15ÈdxØÝh–>-A¼áÞHÄÍBO1\25’\14\11r\30¸ÌW\22,\3\25h@Óõóf‡š«Lê¿ú\8\3Ðþ+FÐ;Öè)\18u-ÆOù\"^)‰Yø\15·Ÿ€»dªÑ]<õ‘øAÔ´wx^€î›;»(VÆA+R,9!½¬9¬Š“0a\27§‡¡¯ž\16Ò똾¦Õü…\3¦'w\20\27œBÙÙ\18¬\27=EÏa\20†1æ¼H ·ú‡-Nû(ƒÙ®B\26µ>ò¼\0150\15äagJ¬\18\0164\4\16ú9ظûS@\25¾ìÑWÃŒ£\16š‚õ\22ýgsÕ¼Óßu,:ÖÑҮ郞u\3v¡ATlGo³]6\28\28´\ +óùV\29|\5‚i­(_è(ïè®QB\21®J‘+O1¹è\7–=\19\21•¦\4ïáó\8ï5¼··÷³º\8\21[¬f!EîÖ\"Þ8Œq?\27Ï\"Õ‰¥‡êH¹¬ï2œ'\0264’†'nˆ\15ŽââïP0²M\20\27_ñ2Wj%C-\26¹2\18åZ2\0074:Ç\0\19سmhà‚ø\20\27d²l\"p¬¡ç¾cQªÃÚ¨\8¨Ñn£RÊBÍÂ)+pÖâ;\19\0ÛC‰£¾X=&¾§Ó£ù¹\23£3œR6œÝ\17§·ëÞØo“Ýl×ç—ª=ÀAß\17ÒV©ÖBB[±[õ4jiV÷\30\6:0\4Ì’\18P+ë\24£©f­‘‚#¤†tŒ`ØZë\8öPÍ=ó?3÷j¾9\3ȺÜNÄ\26û1·)Œ¯-‰÷Ø\2-_q·\15\\ÿ\19\19â›ÓP¤ïMðPŒ¶Ð°Ê„ÇL\19s‹ëå™:`†éqŽT[\28ê¶#ÒÍ¿ªZQ†Îf\0—(E6o·pm’­»\26^žº-ŽôZØ‚êiïÙÛŒ=…\22\30ö¼:÷ÙJ\7\6­5|\13\15›]3Ñ×CÓ‡R»ç[è\20í~\\f\23ãñÓn#p|à\"\22ØÛ\5\20œ§:2CƒÄ=G7ótoy‡s@7µ ‹>l*uÞ\2(ò\29ÃäõUHoÖ#Öã\22¤û†\ +¬.°· ¸\3.oè\\ALµøÎÏ\6\6Û¸±ï6‹\13ødNúr¤›š¥+BÓ?0‡\9©mú£÷‹·\"¦°B\30)\15QËý1\6M\23)Õ\3Ø?’£üHEö\29æɺ(ð®Â\20³\13XH1‰'çËîùCŽŠÇU—\0Ê\0À\18έ\27ýœ¬\8qÏ]¨•[Œ\31Ü&8\1ä\27¶i~è&\28[$\9ÿ[\12 ß’ÿü@£i‹%\4)!Š£î³¶,²¡\8Ìuz¸Úo‘[\5ìòÁÉŠ]ø\"\127Îõ¸³¯9ê9:Ôzÿ\12àúü¹ØZ½Xûù\9£Jø\29ëÖ_`!Ó'\17\127\29\127Ï ÜJbKK?¨ýVð@Y\11ÿüÜ\\ƒ/ýØÎí\27bäÈ\127D½ÿlNN‚\27Ú½\1°ê\0001\20ô#J8\27$ûlôH¾\18‘Å*\1(¡äð·£½NÅ‹\17‰Ý¤\3ö\127~lûé\7$\\ÿi„Y^\127æ=UÎ\30™Ùf¬~ÞFWý'\7ФU®™\18ö6÷·‡ëWžäB„¯¼\1c`ýÜ\0020G\17x‰¸w¯ëÏü†NüîË\20ß÷vmÚéÇg½Á\26 9G£³lïé\7›æx°øôƒýÊ??>e¹ªòP‡e§Ÿ\18C[¢Î\"âéßYT°V¨ O…âÚKÔ^¢ö\18µ\23ÛEúa}«Ø°Êôƒ\24S€ð¹ó߶˜~¬$ÕÞæ,¸\21î\31·Õ\6˜~\28Æhè\25ÜlËë'l.ý`séGësÿ€$y\28Uò\3M$\31<ï^q‹ƒëÇ\3ûæ\0158\21µ\\€úp{o\2\13å]£_ŸíýO\6›r|!€óøa¾EWe÷„#´õ¡Å¯Dû³Ü\19Î\23Î\25犣\26ƃj\27'-7Ü:\16¬Éô€—L¥=Ìp†;Ù͸\8ß\3\28.ßLj\14”‡»Hÿ5D®#9\15²¢\2ä\20–õ€×\8?wó\6é\13«ãN4\25Ìåa+>\15îBîx.´°\\\15N<9‚.Q\22Gƒ~?Ì #÷³\14£<\17asî+Àž\24Db(§Ñ’6îü¿@Û¶þ?p¾÷t#/¦\29\3&2í1¹3§\3F®\30 A)Œ‡>&¡ö|˜-Î<ÌPñx3T\16÷T\29\19Òêi\"š5ü@Ãí-|ðÄ=l\31ð1-ÚT\30\22”zL6\4ñ0R¥Ê+AU\4<ÿu\22<ÌS  ½ÇýŽ3»Ù\11ŽËÌ\26øŒÕÑGÞ_ûÇ Ôù\3ô™\6\8`íFð·\0134ó[\2²­d`À²\7LîÙîh÷J¡\12CF£*A\11%©ô×Ç#̦´¨ž˜-{\30\31Ï0Nñ<šßô‰ù\8ð\25þ\28@\26ŸÇ\11\7¬\1\4Ãóx•çóJZßÏ(ç5\19~Óß\27Ðøc)dA:ãyœDW(Éü±\127r;,G„$%@Û\19¶\16Æõ„A\6ðœDé‰Àt\30çú›?È\18<Ñéò8\16ärÛ!`Õž‚œµŠ$\25ãý<\0067\9¾Ñ.-­U©žÈjîÌ}†Kó`õµGÙåh\24\4ÂX©¦ ÖÖó$¬C\127¿pÎ\31OÔ†É\17urxŽ§ñd:t<é(xŽ\28ù¿\4?ãV:\2ˆØk\18>Çñ<ð_[´+\127a¶\1\12s\0õâhšA\9n\4é[CÉùþ\9Fø4+­\\$RWþø° Ále6ÿþqˆö:€e\5\28Ñ4j¿Ï\17FŠ\20\17l?O,Ÿ2OŸq\13+<ŸÏÛÁ©×ñi£\1ù?ƒ‘\16zv(ø\5I/Ô¬Cg\4¢Â1;Ñý—cïÇðá›\3t×¥=\15_µÛ\\±\21ç.\17\16å4×É°ŽÖlðôý¢Bš{1Xÿ@Ú-Bó>‚ùJۜܡAG?WÔL(?wã\11W[ÖS\27üsÒÑP\0173O$\ +Ÿ\19‡žÜ\20\30η'§Ì\14IŸ'lVG€}<'ß\28¾ø\7’𴙬ý\0196\0019ÿIÿcu¹ÊËj›¼¿>½+)¨V‚\26Ósj\27CÎ\22õÜùŸv¨§Ö\31\ +ÓvO؇žÙ\15äϬÙÊ|\17á¤\0317ðú?_>p4I€±<½1=±Ø;áñ‰ø´\29_Æ\7ÁNð´\1Ç\27Uª‡µ\31=½û͵üdÿ–_ÝÐ&u0¢sÏ®Œ][©\28¯;[ä9ó¥\21ýBn\22·#ìé¯YÈæË*ü_¶\0ùš3ÖH^³sØŠìk\17b+\0231Çײ\18k;¯”…@¾ \23^\11¯]Vóó\"oNÂWåO|y®8Äë+VaM/¬³É\29©Aäÿ\11•¬ú\7zõZó \6­\3™Š¢8~_œ\13¯2i¦¾P2OÀA\13€ªxY£í\11¾\29ý\23’ø²\17³×\1\29\6¯ºŒ\3IV¥®\\\ +¾*4+fˆÛKu\"jê\28›Âø=R-¸ª\127àÂâÕµ\25½z¡\28a7/̓×NYw›fùfksÛ\17Z}ƒxجtw;¢\1z¿ñŠ¥ÿÈÝoh)Ø|ñ¿\29g\21Fì<\27,dK\ +·8™\18ÁÛ°Y\13á\22j\0087È\16\\×Pœ\1\25¬íhs·\20\"zwƒŸIÍà\1—œm ˜~\"\2‰ÚÍö´\9Re\127þ\23íß™uoÛ[.ió#\12®èuƒåã°±^¶“õ\11\3Nn;:\0086^&¶³æºÜ?ÿk÷aÞ\1ä|ö›Õ¶áÿÒ÷Ž;w\6ך›\17‰-ôÌ\ +ÌèÈÞ,“C`u\28í\23ð\6 ØŒónà\26H‰9\6°·!eà˜Â;¾ƒ]š_v¶PI\6àtÙl\6…\ +—õ\3ž¿>`làub³dÇæ—á-ä;0K|Ç`)Y\28ÍJ\2±D¹ygÞ\2aÙÞâ\22¡\7\20q°+g!jÀ\0278é\22º¾¶÷³° –'…h× ’;€Íï®›ŸŠO\1Á-6‹Èºì¿\9­ñ\9V\3Fù ¤\ +n(±#“í,\18€õ‹mô\30;FÌô\14€ÅŠÏx¢bgómÉ@훿…\23ÜÍÄØ6¢Ù\9wrˆð&âW‚*¤Ä\22VJ<@õ\29\1®Bª¡9Os™ž¦B~ªëûƽxìEnmW?hÉ£!Ú#\22°Ý¦2|l\"ät.m¦ç¶‰+¨mò\29ø6é³wöi‡Ü&[YÛÌs¾\25÷‘Ë¥¥¯ô.\1Èe†rR¨ïHÂ\5öA\127j\127'z(xŸ]u¦,’Üß• ö\9ÕiDeã°Ûl\24y39·a¥s[F7\5\14m…dÏ\9í‹[Fë ÿoêi\17w\27šÙ™GyÏØæUmædܬÄxËÌ\1§Çäë6d!?›µ\19iaþ3ÛÃBcSVª¿ù‹¤Â†àò\6I²Aƒl<ál6\27»éPÝ,3ù±¡\4Dõ±&h‚/i·Ð<±ùâu\11µÉ›ÏÇÍ÷¤[W\127w&;¦\17·~\16n³u[>Þú™ó„\11e½!‹Éÿ+×ÎJkiáÍ–ê·~E’EíAѱÂîþ/Ägã“Ž4-¹™klCDZãÕ´î‘5\127¼\"\\⺿L\20ÂüQª\18…•ÈSj\0WÍÞÒ?µÇoÝ[7·°[ð\\)‘Æý©(‘t_êD\8M9¼\19ªÏ \8\29Å|‚æ;lAÈmSèé\28oóÀ9@\ +Pœ\4\9øª³5ʘ\28ƒ„ƒ\"2% ­\30\30(OÜ'Ìc›°\12nõu›•roh\19–gÿçŸÿ\31‹\26â©", + ["length"]=94615, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=14270, diff --git a/tex/context/patterns/lang-deo.lua b/tex/context/patterns/lang-deo.lua index dccc156f7..5c235e744 100644 --- a/tex/context/patterns/lang-deo.lua +++ b/tex/context/patterns/lang-deo.lua @@ -44,7 +44,174 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnopqrstuvwxyzßàáâäçèéêëíñóôöü", - ["data"]=".ab1a .ab3l .abo2 .ab3ol .ab1or .ack2 .ag4n .ag4r .ag2u .ai2s .akt2a .al3br .al2e .al5l4en .al4tei .alt3s .ampe4 .amt4s1 .an3d2 .anden6k .and4ri .ang2 .an3gli .ang4s2 .angst3 .an3s .an4si. .ans2p .an4tag .an3th .aps2 .ari1e .ark2a .ar4m3ac .ar2sc .ar4t3ei .as6sest .as2t .ata1 .at4h .au3d .au2f3 .aufs2 .au2s1 .ausch3 .au6stes .ax2 .äm3 .ät2s .äu3 .be3erb .be3ra .be3r2e .berg3a .ber6gab .ber4g3r .boge2 .bo4s3k .bu4ser .bus5sen .bu7sser. .ch2 .dab4 .da2r1 .da4rin .darm1 .da4te. .da4tes .de2al .de1i .de4in. .de1o2 .de3r4en .derma3 .dermas6 .de3sk .dien2 .do2mo .do1pe .dorf1 .dü1b .ebe2r1 .ehe1i .ei3e2 .ei4na .einbus6 .einen6g .ei2sp .ei4s1t .ei2tr .eke2 .el2bi .em3m2 .en1 .en4d3er .en5der. .en2d3r .enn2 .en2t3 .epi1 .er8brecht .er2da .er4dan .er4dar .er4dei .er4der .er1e .ere3c .erf4 .er1i .ers2 .er8stein .er8sterb .er8stritt. .er8stritten. .er4zen4 .es1p .es2st .es2t .est4e .est2h .et2s .eu1 .eu3g4 .eu3t .eve4r .ext4 .fä4s .fe2i .fer4no .fe4sta .fi4le. .fi4len .fi2s .flö8s7se. .flö8s7sen. .flö8sses .fs4 .fu2sc .ga2t .gd2 .ge5nar .ge3ne .ge3r2a .ge3r2e .ge3s4 .get4 .ge3u .guss1 .haft3s .hal2s .hau2t1 .he2 .he3fe .her3an .he3ri .he6r5inn .ho4met .ia2 .im2a .ima4ge .im5m .in1 .in3e .ink4 .inn2e .inu1 .ire3 .is2a .jor3 .ka2b5l .ka2i .kamp2 .ka4t3io .ken6num .ker3s .ki4e .kle2i .kopf1 .ks2 .kus2 .le4ar .li2f .li4ve. .lo4g3in .lo3ver .lö4s3s .lu4str .ma3d .ma2i .ma3la .ma4str .md2 .me2e .mel2a .men8schl .men8schw .men3t4 .mi2t .mm2 .näs5c .ni2e .nob4 .no2c .no2s .no4th .nul2 .nus2 .ob1a .obe2 .ohr5s .om2a .oper4 .or2a .ort2 .orts3e .ort4st .os5t6alg .oste2 .ost5end .osten8de .oste6re .ost3r .ozo4 .pa4r1e .par3t4h .pf4 .ph4 .poka2 .po4str .ps2 .ra4s3s .reb3s2 .re3cha .rein4t .reli1 .reli3e .ri2as .richt6e .ro4a .ro3m4a .rö2s1c .ru5s6ses .rü1b .rücker6 .rü4ss .sali1 .sas2 .sa5sse .säs4 .sä5ss .sch4 .scho7s8se. .scho7s8ses. .sen3s .ser2u .se2t1 .sha2 .si2te .ski1e .spas4 .spä5s4 .spiege8lei .st4 .sto4re .sucher6 .tal2e .tan4k3l .ta2to .te2e .te2f .te3no .th4 .ti2a .tid1 .ti4me. .ti4mes .ti2s .tite4 .to4nin .to4pl .to2w .tras3 .tra4ss .tri3e4s .ts2 .tu3ri .uf2e2 .ufer1 .ul4mei .um3 .umo2 .un3a2 .un3d .un3e .un3g .uni2t .ur1 .ur2i .urin4s .ur3o2m .uro2p .ur3s2 .ut2a .ut3r .übe4 .ve5n2e .vo4r .wah4l .wa2s .weg5s .wei4ta .wi4e .wor2 .wort5en6 .xe3 .ya4l .zeit3s .zi2e .zin4st 2aa a1ab aa2be aa1c aa2gr 4a1a2n 4a2ar aa2r1a aar3f4 aart2 aas1t aat4s1 a3au a1ä a1b 2aba ab1auf ab1ä ab2äu 1abd ab1eb abe1e ab1eil 2abel abe2la 2a3ber ab1erk ab1err ab1erz ab3esse abes2t ab1eß 2abet 2abew 1abf 3abfi 1abg 1abh 2abi ab1ins ab1ir ab1it 1abk ab1l 1a2bla ab5lag 1a2blä 2able ab4le. ab3li ab4lo 3a2blö a2blu abma3s 1abn 2abo a2bo. ab2of 3a2bon ab3r a3bra a4brä 2abrü abs2a 1absc ab3s2i 1ab3sp abs4t4 1absta ab3sz 1abtei 2abu ab1ur 2abü 1abw 2aby 1abz 2aca 2ac1c a1cem 2ach. ach1a a1chal ach3au 2achb a1che a2ch1e2c ach1ei a4cherf a4cherk a4cherö a4ch3erw 4achf a1chi ach3l ach3m ach3n a1cho a3cho. ach1o2b ach1or ach3ö ach3r ach3su a4cht acht5erg ach2t1o ach8traum ach8träume. ach8träumen. ach6trit a1chu ach1u2f ach3ü 2achv 4ach1w a1ci ac1in 2ack. ack2en ackmu4 ackmus3 ack2se ack3sl ack3sta4 a1cl a3co acon4n 2acu a1ç a1d 2ada. a3d2ab ad2ag adai4 ada2m ad3ama a2d1an 3a4dap a3d2ar3 4adav 1a2dä ad1c 1add 2ade. ade2al adefi4 a2dein 2aden ade1r2a a2deri 4ades2 ade3sp ades6s 2adf 2adh 4a3di adi3en 5adj 2ado ad2ob 2adp 2adq 2ad3rec ad4res ad3ru 2ads2 ad3sz ad2t1 2adu 2a1e1 ae2b ae2d ae2i a2ek a2ela a2ele ae2o3 ae2p 3a2er2o a2et a2ew ae2x af1a a2fak a2fan a3far af4at a2fau 2afe a2f1ec a2fent af1erl a2fex af2fl af4flu 2afi 2af3l a2fö af3ra af3rä af3re af3rö af3s2a af2sp af2t1a af2tei af4t3erl af2t3r aft5re af2tur a2f3ur a1g 2aga ag1ab ag1a2d ag1ar ag1au ag2di ag2dr ag2du age1i age4na age4neb a2gent a4gentu ag2er age4ral 2ages age2sa age4sel age4si age2s3p ages5s ag3esse age6stem ag3gl 1aggr 3a2git 2a2gl ag4la a4glö ag2n ag4ne. ag4nu a2g3re a2g3ri ag4ro agsa2 ag4sam ag3sc ags3p ag6spo ag4sti ag2str 2agt ag2th a2gund 2ah. 2a1ha ah4at 2a1he a2h1erh a1h2i ahin3 ahl3a2 ah4l1ei ah4l3erh ah2lö ahl3sz ah4n1a ah2nä ahner4e ahnt2 1ahor ah1o2s a2h3ö ahr1a ah3r2e ahre4s3 ah3ri ahrta4 ahr4tri 2ah2s aht1s a1hu ah1w a1hy aian3 aid4s aids1t ai1e2 aif2 ai3g4 a3ik. ai3ke ai3ku ai2lo a1ind ain4e a1ing ain3sp 2ais ai2sa a3isch. ai3s2e aiso2 aiss2 ais3sen ais5st a3iv. aive3 a3ivl a3ivs a1j ajekt4o 2ak. 1a2k4ad 2akal 2a3kam 2akar ak4at 1a2kaz 2akb 2akc 2akd 4a1ke a2kef aken2n a2keu 2a1ki 2ak3l ak4li 4ako 2a1kr ak3rau 3akro3 2aks ak3sh 2akta 2aktb ak3te ak4tei 2aktik ak2t3r ak3t4ri 2aktst 2a1ku a2kun 4a3kü 1akz a1la 2ala. ala5ch2 al1af ala2g al1age a3lal al1am alami5 al3amp al1ana a2l1ang al1ans al1anz a2lar a3lar. a3lare al2arm al3arr ala2s al1asi al1ass 2alat al1au al3aug a1lä al1äm alb3ein alb3eis al4berh al4b3erw al2b1l alb3li al2boh al2br alb3ru alb3s al2dä al2dr al3du 2ale 3a2l1e2b 3a2l1ef a4l1eh a2l1ei a4l3ein a2l1el alen1 al3ends a2leng ale2p al1epo a2l1erf a2l1erh al1erl 3alerm a2l1ert 3a2lerz a2l1esk ale4t al1eta al1eth a2l1eu a4leur 3a2lex alf4r 3algi al2gli 1algo 2ali ali4ene al2imb ali4nal al1ins a2linv alk1ar 1alkoh alk3s2 al2lab al2l3a4r al2lau al4lec alle4gi al3lend all5erfa al3les 1allgä alli5er. alli7ers. al2lob 3almb 2alo a2l1o2b alo2ga al1ope al1orc a2l1ö al2ös 3alpe. 1alph al3skl al2sum al3sun al2tak alt3eig al3ter al4t3erf al2tö al2tre al2tri alt3ric al2tro alt2se alt4stü a1lu al2uf a2lum al1umb al1ur 4aly alzer4z al2zw 2am. 2am2a amab4 amad2 ama3g 2amä am4e 2ame. a2meb ame2n1 amer2a a2meri ame3ru a4mesh a3met a2mew a3mi. a3mie 2a3mir a3mis ami3t2a ami3ti 2aml am2ma2c 2ammal am2mä am2mei am2min 2amml am4mo2d ammu2 am2mus a2mö amp2fa2 am3pr 2ams am4schl 1amt. am2t1a am2t1ä am2tel am4t3ern am2tö am2t3r am2tu 2amu 2ana. 2anab ana3c anadi3 a3nak an1alg ana4lin 2anam 2anan 2anas an1äs 1anb 2anbu an3ch 2and. an3dac and4art ande4sc an2dex an2d3rü and4sas and6spas and6s5paß and2su 2andu and1ur 2ane an3ec a3nee an2ei. an3eif an1e4k 3a4n1erb an1eth 1anf 2anfi anft3s an3f2u 4ang. 1anga 3angeb an2g1ei an4g3erf an4g3erl an4gerw an4g3erz 2angf 2angh 2angie ang1l an2gla 2ango ang1r an2g3ra 4angs. ang3sc ang6s3po 1anh 2a3ni an2i3d ani5ers. 3a4nim a4nins 2anj 2ank. an2k1an an2kei an3kl an4klö an2k3no ank1r an2k3ra an2k3rä ankt4 1anl anma3s2 2anmu 2ann 3an3na 3annä an3n2e an1od a3nol a2n1or a3nos a1nö 1anr 1ansä 1ansc ans2en an2seu 2ansh 2ansk an3skr ans1pa 1anspr ans2te an3s2z 2ant. an2t3a4r 1antá 1antei 3antenn an3t4he 1anthr 2anto anton4 3antr ant3rin an2tro 1antw 2a1nu anu3s a1nü 1anw 2anwet 2anzb 1anzei 2anzes 2anzg an2z1i4n 2anzs 1anzü 2anzw an2zwi 2ao ao1i4 a1op a1or a1os ao3t2 a3ot. a1ö a1p 2ap. 2a3pa 2ape a2pef a2pé a2pf ap2fa a3pfl a3phä a2pht 2ap3l ap2n a2pot 3appl ap3pu 2apr 2a3pu 2aq 2ar. a1ra a3ra. ar2ab ar3abt ara3d2 a2r3al a3ra3li a2r1ang a2r1ans a2r1anz a2r3app 2a2rar a2r1au a1rä 1arb 2arb. 4arba ar2bau ar2bec 2arben 2arbi ar2bl 2arbr ar2bre 2arbs2 2arbt 2arbu ar2b3un 1ar1c ar2dro 2are a2rea ar1eff a4reg a2reh ar1ehr a2rein a4rek a3ren aren4se are3r2a ar2erf a2r1erh a2reri a2rerl are3u ar2ew 2arf arf1r ar2f3ra ar2gl ar2gn 2arh 2ari ar2ia ari3e4n ari3erd ari3erg ari5ers. ar1im arin3it ar1int a3riu ar2kal ark3amt ar2k1ar ark3aue ark3lag ar2kor ar4kri arks4 ark3sa ark3sh ar2les arm2ä ar4merk ar3m2or ar2nan arn2e 2a1ro ar1ob a2r1o2d a2r1op a2ror 2arr ar2r3ad arre4n3 ar2rh arr3he 2arsa ar4schl arse3 ar3s2h 2arsi ar3t2e ar2the art2i artin2 2arto ar4t3ram art3re 2arts 2aru ar1uh ar1um a2rü 2arv arwa2 2ary ar2zä 2arze 1arzt ar2z1w as1ala asas2 asa3sse as3au asau2s1 as1ä a2sca a3sche a4schec asch3la a2schm a3schu 4a3s2e a4seb as3e2m a4sex 4ash a4s3ha as4hi asin2g 2a5sis asi4st a3skop a4s3l a4sn a1so1 as1o2f a3sol as1or as1p a4s2ph as2pi a4spl as2po a1spu as3s2a ass2e as2s3ei as3sel as3ser asserma6 as3s2i as2s1p as4st ass1ti ass1to as5str as5stu 2asta a4s3tep as2ter 2astr as4trau a4s2t3rä a2s2t3re a4strol a2stum a3su a4sw aswa2s 3a2syl aße2 aßen3 2a1t at1ab at2a1f at4ag a2t1akt ata3l a3tam at1apf at1au a2taus a2t1ä at2c at2e 4ate. a2teb at3eig a4teli 4aten a2tep ate2ru 4ates atex3 at2h at3ha 4athe1 3athl 4a3ti atingma5 3atm 4atmus ato4man 4ator a2t1ort a2t1ö 4atr atra4t at3rä at3re at3rom at2sa at4schn at2se at4set at2si ats1p at3ta at4tak att3ang at4tau at2tei at3t4hä at2t3rä att3s4 at3tu atu2n atz1er at4zerk at4zerw at2z1i at2zo atz3t2 at2z1w a2u 2au. 2au1a2 2aub au2bli au2blo 4auc auch3ta au2dr 2aue aue2b au3en. au2ere au5erein au2fa auf1an 2aufe. 2aufeh auf1er au4ferk auff4 3aufn 2auft. 2aug 4augeh 2auh au3ha au2hu 4au1i au2is 2auj aule2s au3lü 4aum au2mal au2m1o aum3p2 aum3s2 4aun au3n4a aun2e au2nio au1nu a4unz 2aup2 aup4ter 2au3r2 au2s1ah ausan8ne. au2sau 2ausc au4schm au4scho 1ausd aus3erp au4s3erw 1ausf 1ausg 1ausl au2so au2spr 1ausr auss2 3aussag au3sse aus4se. au8ssende au2sta 2auste au4stec aus3tie aus3tri 1ausü 1ausz au3ß a4ut au2t1äu auten4g au4t3erh 3auto 2auts 2auu 2auw 2aux 2auz auz2w 2a1ü 2a1v a3v4a ava3t4 4a3vi a2vr av2s 2a1w awi3 awi1e a1x ax2am ax2e axi2s 2a1ya a1yeu aysi1 ay3t 2a1z az2a3 az2o az2u ä1a ä1b ä5be ä2b3l äb2s ä1che äche1e ächenma5 ächenmas8 ä1chi äch3l ä2chr äch2sp ä1chu äck2e ä1d ä2da ä2d1ia ä2dr äd2s 2ä1e äf2fl äf3l äf3r äf2s äft2 äft4s ä1g ä5ge äge1i ä2g3l äg2n ä2g3r äg4ra äg2s2 äg3sc äg3str 1ä2gy äh1a 2ä3he ä3hi ähl1a ähl2e äh4l3e4be 2ähm äh3ne äh3ri 2äh2s 2äh3t4 ä1hu äh1w ä1im ä1is. ä3isch. ä1isk ä1j ä1k ä2k3l ä2k3r ä1la älbe2 äl2bl ä5le äl2l1a äl2p3 äl4schl ä1lu ämi3en 2äml äm4ma2 äm2s ämt2e 2än. än5de än2dr 2äne äne2n1 än2f5 2änge än2gl än2gr äng3se 2ä3ni änk2e än2k3l än2kr än3n4e2 2äns än2s1c änse3h ä1on ä1pa äp2pl äp2pr äp2s1c 1äq ä2r3a2 är4af är1ä är1c 4äre ä2r1ei äre2n ä2r1ene är2gr är1int är2k3l är4ment ärme3s är1o2 ä1rö ärse2 är2seb är2si ärt4e är2th ärt4s3 ä2rü 1ärz är2zw ä3s äs4c äs4e äse3g2 äser4ei äse4ren äser2i äse3t ä5si ä4sko äskop2 äskopf3 äs2kr ä4sl ä4s1p äs2s äs4s1c äss2e äss3erk äs4s1t äst2 äs2te ä4str ä4sw ä1ß äß1erk ä2t1a2 ä3te ät2e1i ätein2 äte2n ä2t2h ä1ti ä1to ät1ob ät3r ät2sa ät2sä ät4schl ät4schr ät2s1i äts3l äts1p ät2s1t ät4s3te ät4sti ät2tr ä1tu ät2zw äu2br äu1c äude3 äu3el ä2uf äuf2e 1äug äu4g3l 2äul 2äum äu2ma äum2s1 ä2un äun2e äu1nu 2äur äu1s 2ä3us. äu4schä äu4schm äu3se ä3usg ä3usk ä3usn äu2sp äu3s2s äuss1c 1äuß äu2tr 4ä1v 1äx ä1z â1t á1n ba2bl 2b1abs bach7t4e backs4 b1a2dr 2b1af bah2nu bais2 ba2ka ba2k1er ba2k1i bak1l bak1r ba2kra 3bal bal2a bal2lä bal4le4b bal4leh bal6lerg bal4li4g bal3th 2b1am ban2a 3b2and ban2dr ba3n2e b1ang ban2k1a ban4kl ban2kr 2banl 2b1ans ban3t b1anz bar3b bar3de ba2rei bar2en bar3n bar3zw 3bas ba3s2a ba2sc ba2str bau3g bau3s2k bau3sp ba1yo 3b2äc bä1ch b2är b2ä4s3 4b1b b3be bbe4p bb3ler bb2lö bbru2c bb2s bbu1 2b1c 2b3d4 3be. 3bea be3an be3ar be3as 3beb b2ebe 1bec be1ch be2del bedi4 be1eh be2erk be1erl be1eta 3bef4 be3g2 2b1eier bei1f4 bei4ge. beik4 beil2 bei3la 2b1eime b2ein be1ind be1in2h bei3s2 beit2s 3bek 3bel be3las be3lec be3lei be2l1en be2let be3li bel3la bel3li bel3sz bel3t4 1bem bema5sse bemas8sen 1ben. ben3ar ben3dor be3nei 3ben3g be3n2i ben3n ben2se ben4spa ben4spr benst4 ben2su 2bentb b2enti bent4r b1ents 2bentw ben3un ben3z2 be1o be1ra ber3am be2ran beras4 ber4ei. be4r3eiw be4rerk bere4s ber6gan. ber4in. ber3iss berma7sse bermas8se. berma8ssen ber3na b1ernt be1rop berö4 ber3st4a be3rum 3bes bes2a besä5s be2s1er be5slo bes2po bess4e b3esst. bes3sz be6stein be4s3tol be4stor be3s2ze 3bet be2tap be3tha be1ur 3b2ew 2b1ex 1bez 2b5f4 bfal2 bflö4 bflös3 2b1g2 bgas1 bga4st bge3 bges2 2b5h2 bhut2 1bi bi3ak bib2 bibe2 bie4str bik2a bi2ke. bi2kes 3bil bil2a bi2lau 4b1illu bi2lu 2b1inb bin2e 2b1inf bin3gl 2b1int bi2o1 bio3d bi3on biri1 bi3se b1iso bi2sol bi2sp bis4s1c bis3si bi2stu bi2stü b2it. b2ita b2ite bi2tu bi3tum b2i3tus biz2 4b1j bjek4to 2b1k4 bl2 2bl. bla3b6 b3lad b2lanc 3blat b2latt 2b3law b2le 3ble2a b3leb 2b3leg 2b3leid b3lein 3blem 3ble4n b3lese ble3sz b4let b3leu 2blich 3blick b2lie 2blig bling4 b4lis b2lit 3blitz b2lo b4loc b3los2 blo3sse blös4s 2blun 3blut 3blü 2b1m bmas2 4b3n2 bni2 bnis1 bo4a bo5as b1ob3 bo2bl bo2br bo1ch2 bo3d2 boe1 bo2ei 2b1of bo3fe bo1is bo2l1an 3bon. bond1 bon2de bo2ne 3bons b1op bo1r2a bo4rä bor2d1i bor2d3r bo2rei bo4rig b1ort bor2t3r bo2sc bo4s3p bote3n4e bo3th bot2s3t bo2xi bö2b3 2böf b1öl 2b1p2 bpa2g 2b1q b2r4 2br. b4ra. 2b3rad b4rah b4ra3k bra4ss brast4 3brä brä4u 2bre. 3brea 6b5rechte 2b3ref 2breg b3reif 3brem 2b3rep b4rer 2b3riem bri2er 2brig b4rio bro1 b3roh 2b3rol b4ron b4ruc bru4s brust3 bru2th 3brü brü4ss 4b1s b2s1ad b3sand bs3ar bsas2 bsa3sse bsat2z b3sä b4sär b5sc bs2ca b6schan b6schef bs4cu b3se. bs1e2b b3sel. bs1ele bse2n1 b3sen. bs1ent bs1er b4serf bs3e4r3in b4sers b3ses b3set bsi2t b4sl b2s1of bs1op bso2r b2sö bs2pl b3s2pu b4ss2 bs2t bst1a2b bst3ac bst1ak bs3tät bst3er b4stern b2stip b3sto b4stod b3stö b3stra b2s3trä bs3treu b3stü b4stüb b2s1un 4b1t b3ta btal3 bta4st3r b5te b2th bt4r bts2 btü1 bu2chi bu2e3 bu2f bu3li bul2la 2b3umk bunde4s b2urg bu3r4i burt4s bu2sa bu4s3cha bu4schl bu4schm bu4schw bus1er bu2sin bu2s1p bu4sses bu6s5term bu2s1tr bu2s1u bü1c bügel3e bü3s4 2b1v 2b1w bwel3 3by1 by3p bys2 2b1z2 bzeit1 bzu1 1ca 2c1ab ca2ch ca2e3 ca3g4 ca1h cal3t c4an ca2pe 3car car3n carri1 ca3s2a3 cas2t ca3t4h ca1y2 cä1 cäs2 2cc c1ce c1ch2 c2d2 c3do 2cec ceco4 ce2dr 2cef ce1i 2cek 1cen cen3g ce1nu 1cer cere3 ce1ro ce3sh 1cet 2ceta ce1u 1cé c1f c4h 4ch. 2chab ch3a2bi 2ch1ak ch2anb 3chanc ch1ang ch3anst 2chanz 1chao 2char. 1chara 3charta cha2sc 1chato 2chatu ch1ärm ch1äs 1châ 2chb 6chc 2chd ch3e4ben 1chef 3chef. che4fer 3chefs 4chei ch1eim 4chelem che4ler 4chents 4chentw cher3a che3rei 6chergeb cher6zie ch1ess 2cheta 2ch1e4x 1ché 2chf 2chg 2chh 1ch1ia chi3na 4chind 3chines 2chinf 2chinh ch1ins ch1int 2ch1inv 1chiru 2chj 2chk 2chl2 ch2le ch3lein ch2lu 4ch2m 2chn4 chner8ei. 2chob cho2f ch1off ch1oh ch1orc 2chp ch2r4 2chre chre3s ch3rh 1chron 4chs ch4stal 2cht 2chuf 2chuh 2ch1unf 2chunt 2chü 2chv 2chw 5chy 2chz ci1c ci2s c1j 4c4k ck1a ck3an cka4r1 ck1ä ck1ehe ck1ei ck1ent cke2ra ck2ere ck1erh ck2ern ck1err ck1ese ck1id ck1im ck1in ck3l ck3n ck1o2 ck3r ck4stro ckt2e ck1um3 ck1up c4l2 clet2 clo1 1clu c2m2 1co co1ch co2d2 co3di coff4 coi2 co1it co2ke co2le col2o com4te. comtes4 con2ne co2pe co1ra cor3d co3re cos4 co2te 2cp c1q 1c4r2 cre2 cre4mes cry2 2c1s2 c2si 4c1t cte3e cti2 cti4o ctur6 3cu cu2p3 cussi4 1cy c1z 3da. da1a 2d1ab d2abä da2ben 3d2abl da2bre dab4rü 2d1ac d2ac. dach3a da2cho 4d3achse d1af d1ag dagi2o dah3l da1ho 3d4ai2 da1in da1is dal2a 2d1alar dal3b2 da3lö d1alt d1amma 2d1ammä damo3 d2amp dampf8erf 2d1amt d2an. 2d1ana dan4ce. 2d1an3d2 d1ang 2dange dan4kl dan5kla dan2k1o dan2kr 2d1ans 2dantw 2danw d2anz. 4danzi 2d1ap d2aph 4dapp da2r3a 2darb2 3d2arl dar2ma dar2m1i da2ro d3arr d2ars d1art da2ru d2arw da3s2h dat4e2 da3tei date4n 4d3atl 4datm 3dau3e 2d1au2f 2d1aus 4daush 2d1äh 2d1ämt 2d1änd 2d1äng 2d1äp 2därz dä2u dä3us 2d1b4 dbu2c dbu3s2 2dc d1ch dco4r 2d1d2 ddar2m d3dh d5do 1de de2ad de3as de3a2t de3b4 2d1e4ben 3dec de1ch de1e2 2d1eff deg2 de3gl dehe2 de3ho 2d1ehr d1ei d2eic 3d2e1im dein2d dein2s de2l1a4g de4l3aug del1än del1ec delei4g de3lein 2d1elek 2delem 2delfm delle2 del4lei del2lö2 de2l1ob de2lop de3lor de2lö del4san del2s5e del2so del2s1p del3t dem2ar 2d1emp d2en. de4n3end 4denerg den3g d2enh de2ni den4k3li 4den4sem den4sen den6s5tau den3th 2dentw de1nu de1on depi4so d4er. dera2b de2rap der2bl 2derdb de2re2b de4reck der3edi de4r3eis derer3 de3r4erb de3r4erf de4r3ero derer4t 4d3erhöh 3derie derin4f 4derklä der3m2 4derneu de1ro de2rop derö4 4d3ersat der6t5en6d dert4ra de3ru de4ruh de4rum de2s1a de3sac de4sa4g de4sam des3an des1än de4seh des1en1 des1et des1in des1o de2sor de2s1p des5s2 dest5alt de4stam de6stant de4stei de4stit dest5rat de3stri de3stro de2s1u deten4t 2d1etw de1un de1url de3us de2xis 2dexp 2d1f4 2d1g2 dga4str d2ge. dge2ta dge4t1e d3gl 2d1h2 d2his 1di di4ab di2ad di4am 3dic di1ce di2e di3e2d di3end die4neb di3eni di3ens. die4s3c diet3 die2th dige4s dik2a dil2s3 2d1imb din2a 2d1ind 2d1inf 2d1inh 2d1in1it 4d3inner 2d1ins 2d1int di2ob dion5s di1p di4re. di2ren di2ris 2d1irl di2sp di3s4per 2d1isr dist2 di2ta di4teng di4t3erl di4t3erm di4t3ers di2t3r dit1s di2tu di5v diz2 2d1j 2d1k4 4d1l2 d3le dle2ra dli2f dl3m dl3s 2d3m2 4d5n2 dni2 dnis1 d1ob d2oba 2dobe dob4l d2obr do1chi 2d1o2f doll2 do2mar do5n4a doni1 do2o 2dope 2d1opf d2opp d2o3r4a 2dorc 2dord dor2f1a dor2fä dor2fl dor2fr 2d1org do2rie d2orp 2dort dor2ta d2os. dos3s dost1 do4sta dot6h do3un d1ö dö2l1 3d2ör dö2s1c 2d3p2 2d1q d2r4 3d4ra. 2d3rad 2drahm d3rai 3d4ram d3rand 2d3rast 2d3rauc 2dräd d4räh 2d3rät 2d3räu 4dre. d4rea. d4reas 3d4reck 2dreg 3d4reh 2d3reic d4reiv 4drem 4d3ren 2d3rep 4d3rer 4dres. d4resc 2d3rh d3ri d4ri. 3d4ria 2d5ric d4rid d4rie d5rieg d4rif d4rik d4ril d4rin. 3d4risc 2driß 3d4rit 4dritu d3rob d3roc 2d3rod d4roi drom2 2d3rot d3rou 2d3rov d3rö drö2s1 d5rub 3d4ruc 2d3ruh drunge3 2d5rut drü1b 2d1s ds3ab d4s1amt d2san ds3assi d2sau2 ds1än 4dsb d4schef d4schin dsch4r d2s1e2b d2s1ef d3sei ds2eig d4seins d2s1eng d2s1ent d2s1erf d2serh d2s1erk ds1err d2s1erz dse4t d2s1eta d3s2ha d3sho d2s1im ds2inf d3s2kan d3skul 4dsl d2s1op dso2r ds1ori d2sö d2s1par ds1pa4s d2spä ds2po d2spro ds2pu dss2 ds3si dst4 ds1tab d4s3täti d4stea ds2til ds2tip d2s1tis d2stod ds1ums d2sun ds2zen 2d1t dta2d d3tea d2th d4thei dt3ho dto2 dt3r dtran2 dt5s2 1du du1alv du1ar dub3l du2bli du2f 2d1ufe 2d1uh du1i 2d1umb 2dumd 2d1u2m1e 2dumf 2dumg 2d3umk 2duml d2ump 2dumr d1ums d2ums. 2d1umv 2d1un3d dund2a 2d1unf dun3ke dun2kl 2dunr 2dunt du1o dur2c 2d1url 2dursa du4schn du4schr du4schw 2düb 3düf 3dün 2d1v2 2d1w dwa2 dwest3 dy1 dy2s1 2d1z 2e1a e3a2b eab3l ea3der eadli4 ea2dr ea2g4 ea3ga ea4ge ea3gl eakt2 ea2la e3alei e4aler. e2alti2 e2ame eam3m eam1o eam3t ea2na e2ano e3ar. ea2ra e3a4rene e3arr e3arv e2as eas5s eat4e2 eater1 e3ath eat3s2 e3at3t4 e3au2f e3aug eaus3s eau3st e1ä4 e1b 2eba e3b2ak 2ebed ebe2i 2ebel eb2en ebens3e ebert4 2ebet 2ebl eb3ler eb4leu e3blie eb3lo eb2lö 2eb2o ebö2s 2ebr eb3rei eb4ru eb2s eb6sche ebse2 ebs1i ebs1o ebs1p ebs3pa eb4stät ebs3t2h eb4s3ti eb4s3tot eb3str ebs1u e3bu ebus3s ebu2t1 2eca e1ce ech1ä 2e3che ech1ei e6ch5erzi e1chi ech3l ech3m ech3n e2cho. ech1o2b e2ch3r ech3ta ech3t4ei e1chu ech1uh ech1w e1ci eci6a eck3se 2eckt 2e1cl 2eco eco3d 2ect e1d e3d2a ed2dr ed2e ede2al e3dei ede3n2e eden4se eden4s3p ede2r edert2 edi4al edma3s2 e3d2o ed2ö eds2ä ed2s1es ed2s1o ed2s1p ed2s3tr ed2su edu2s e3dy3 4ee ee3a2 eeb2l ee2ce ee1ch ee2cho eede3 eed3s2 ee1e e1eff eef4l eeg2 e1ei ee1im eein4se eei5se eel2e e1e2lek ee3len e1emp e1en eena2 ee4nag e2enä e2enc ee3ni e2eno een3s e1e2pi ee1ra e1erbt e1erd ee3r2e ee4r3en4g eere4s1 ee4ret e1erk ee1ro ee1rö eer2ös eert2 e1ertr ee3r2u e1erz ees2 ee3sh ees3k ee3ta ee4tat ee2th ee1u2 eewa4r e1e2x e1f 2ef. 2efa e2f1ad ef1ana ef1ar e2fat efäs4 e2fäu 2efe e3fe. e2f1e2b ef1em e2fent ef2er 2eff. 1effi ef2fl 2efi e2f1i2d e2f1ins efi2s 1efku 2efl e3f4lu 2e3f2o e3fra ef3rea ef3rol ef3rom ef4rü efs2 ef3so ef3sp ef2tan 2efu e2fum 2efü e1g egd4 e3ge ege4n3a4 ege2ra ege4s3to ege4str ege1u eg1la e2glo e2gn eg3ni egro5sse eg4sal eg3se eg4sei egse4r1 eg4sto eg2th 2egu 2e1ha eh1ach e3h2al eh2aus 2e1hä ehäs3 e1he eh2ec eh1eff eh2el ehen2t3 1e2hep e3her ehe1ra e1hi eh1int ehis4 eh1lam eh1lä ehle2 ehl3ein eh4lent eh5l2er eh2lin eh3lo ehl2se 2ehm eh3mu e1ho e3hol eh2r1a2 ehr1ä ehr1ec eh2rei ehr3erl ehr6erle ehre3s eh3ri eh1ro2 ehr1ob ehr1of eh2s2 eh3se eh3sh eh3si eh3so eh3sp eh3sta e1hu e2hunt e1hü eh3üb eh1w e1hy 2ei3a2 4eib ei2bar ei2bl eibu2t ei4b3ute ei2cho e2id ei2d1a ei3dan ei3de ei4d3err 2eidn ei3dra ei1e 4eien eienge4 eie4s 1eifr ei3g2a 4eigeno eig2er 2eigew ei3gl 1ei2g3n 2eigru 2eigt 2eigu eik2ar ei3kau eik4la e4il 2eil. ei2lar ei2lau 2eilb eil3d ei4lein eilen1 eil3f4 eil3ins 2eiln 1eilzu ei2m1a4g eim3all ei2mor e1imp eim2pl ei2n1a ei4nas ei2nä ein3dr 2eindu ei4neng ei2neu 2einfo ein4fo. ein4fos ein3g2 ein4hab e1init ein3k ein6karn 3einkom ei2n1o2 einsas4 einsa7sse 3einsat ein6stal ein4sz e4inver ei3o2 ei1p eip2f 2eir ei3re e1irr e2is. ei2sa4 ei6schin ei6schwu ei4s3erw eis2pe ei3s2s ei2str ei2sum e2it ei2tab ei2tan ei2tar 2eitä ei3te ei2th ei2tro eitt4 eit3um 2eiu 2e1j e1k ek2a 1ekd e3ke. e3ken e3kes e3key e3k2l ek4n ek2o ek4r 2ekt ekt4ant ekt3erf ekt3erg ek4t3erz ekt2o ek2u e3k2w e1la ela4ben el3abi el2abt el1af ela2h e2l1ak e2l1a2m el1ans el1anz 2elao e2l1ap e2l1a2r el3ari el1asi el1asp el2ast 2e1lä 3elbis el2da eld5erst el4d3erw eld3s2 2ele. 2eleh 2elei e6l5eier. e2l1ein e3leine e4leing e2l1el 1e2lem e3lem. el1emp 2e3len. e4lense e2l1ent e3lep el1erd el1erf e4ler4fa e2l1erg el1erk el1erl e4ler4la e4l3ernä e2l1err 2eles2 el1ess e4l1e4ta e3leu 2elev ele2x 1elf. el3fe elf4l 1elfm 1elft elgi5er. elgi5ers 2eli e2l1id e3lie eli2ne el1ita el3kl elks2 el3lan ell2au el2leb ell3ebe el4l3ein ell3eis el3ler el2lic el3l2in ell3sp elm2a 2eln el5na 2elo e2lof e2lol elon2 el1ope e2l1or elo2ri el2öf el2sum elt2ak elte2k el4t3eng el4tent 3eltern el3tes elto2 el2t3r el3tri elt1s2 elt3se elt3sk 2e1lu e2l1um el1ur el3use e1lü e2lya 2elz elz2e el2zwa e1m 2ema e2m1ad ema2k e2m3anf e2mans 3emanz emas8sens em2d3a2 e3m2en emen4t3h e6mentsp e2m1erw 1e2meti e2m1im emi5na em1int emi3ti 2emm em2map emma3u em2mei e2mop 3empf em3pfl em2sa em2spr em3t2 1emul 2emü emü3s e2n1a 4ena. 2enac e3nad e4naf 4enah e4nak ena3l2i 4enam en4ame e4nand en3ang en3are en2asc 4enat en3att e3naue e2n1är en1äu enbu4s3 en2ce. en3d2ac en2dal endermas8 en4d3ess end4ort end3rom end3si end3s2p end3sz end2um 2ene. ene4ben en1ec e2neff e4nein e2n1el ene4le 2enem 2enen e4n1ent en4entr 4e3ner. e2n1erd e2nerf 1e2nerg e4nerh e4nerk e2n1erl e4n3ermo 4enern e2n1err e2n1ers e2n1ert e2n3eru e2n1erw e4nerz 2enes e4n3ess en3f enf2a enf2u 1engad 3engag enge3ra en3g2i en2gl en3glo 1engp eng1s eng3sc eng3se e3ni. e3nic e2nid e3nie eni3er. eni5ers. e2n1i4m e2n1in e3nio 2enis e3nit 2eniv en3k2ü e2n1o2b enob4le e2nof en1oh e3nol eno2ma en1on e2n1op e2n1o2r eno2s enost3 e3not eno2w 2e1nö en1ö2d en3sac ensas2 ensa5sse en2sau en5sch4e en2seb 1ensem ensen1 ens3eng en3ska en3s2po enst5alt en4s3tät en6s5test 2ensto en6s5trie e4nt ent4ag 1entd en2teb en4terb en3tes 1entfa 3entga en2thi 3entla 1entn en4t3rol 3entspr 2entü 1entw 4entwet 1entz en1u 2enut e1nü 4enwü e1ny en4z3erf en4z3erg en4z3erk enz3ert e1ñ 2eo e1o2b1 e1of eo2fe e1oh e4ol e1on. e1ond e1onf e1onh e1onl e1onr e1ons e1ope e1opf eop4t e1or e3or. e3orb e3ors e3orw eos2 e3os. eo3ul e1ov e1ö2 e1p e3pa epa2g e3p2f4 1episo ep3le 1e2poc ep2pa ep4pl ep2pr ept2a ep2tal e3pu epu2s e1q er1a e3ra. e3rad. er3adm eraf4a era1fr era2g e1rai er3aic e2rak e1ral er3all eran3d e3rane er3anf e2ranh er3anm e1rap er3apf e2rar e3rari e1ras e2r3a6si era4s3s era2ß e2rath e3rati e2ratm e1raub er3aue erau2f er3aug e1raw e1raz e1rä er1äh er1äm erä4s erb2e er3br erb4sp er1c er3chl er3da 1erdb er3de 2erdec erd3erw 4ere. er1eb e3rech er3echs er1eck ere4dit er1eff e2r1e2h 4e3rei. er1eig e2rein e4r3eis. ere2l er1ele 2e3rem 2eren e3ren. e3rena e4rense e4r3entf e4rentn e3renz eren8z7end 2erer. e2r3erf e2r1erh e4rerl 2erern e3rero er1err er1ers e2rert er1erw 2eres er1ess er1eß ere4t er3eti er1eul ere4vid erf2e erf4r 4erfür 3ergebn 4ergehä erg3els 1ergol e2rh 1erhab er3he er3hu 2erhü 2eri e2riat e3rib 4e3ric 4e3rie eri3e4n3 eri5ers. e3ri3k4 4e3rin. er1inb e2r1ini er1ink er1int e3rio er1ita 2erk. 1erklä 2erkli 2erkre erk3t 3erlebn ermen4s erm3ers ern1os e1ro. er3oa er1o2b e2r1of e1rog e1r1oh e1rok e1rol e1rom e3ron er3ony er1o2p e4ro2r e1ros e1rou e1row e1roz er1ö erö2d 2erök er3p4 er3rä erri3er 2errü ers2a er3se er5sen er3s2i er3sk ersma3s4 er5smo er3sn er3sp ers2te er3sz ert2ak er6terei er4t3erf er4ters er2tho 4erti ert3ins ert3s2e 2eru eruf4s er1u2m er1und er1uns er3uz erü4b 3erweck 6erweis e1s e4s3ab es1ad es2an es3ant e3s2as esa3sse esas6sen e4s3ato esäs4 es2äu 2esb e3sc es2ca es3cap es2ce esch2 e4sco e4scu e3se. es1ebe es3ehr e2sein ese4ler es3eva 2esf 4esh es3ha es4har es2hu esi1er e3sig e2s1il es1ini e4s3ins es3int es2kat e4s3ke e4sky e4s3l es4log 2esm e4sn es2ort es2ö 2esp e3s2pek e3s2por e3s4pra es2pu 2esr es2sau 4essem ess4e3re ess3erg es3si 2esso es2sof es2s1pa es4ste estab4b est1ak e3stan e4starb 1e2stas es2tau es2te e4st3eng e4st3erh e4st3ess e5stev e3sti e4stip estmo6de e2stod est3ori 2estro es3trop es2tu e3s2tü es2ty e2s1um es1ur e4sw e3sy eße3r2e e1t e3ta. etab4 et1am 1etap etari1 et4at et1äh e3te e4tein et2en eten3d2 ete2o eter4hö eter4tr et2h et3hal et3hü e3ti eti2m eti2ta 2e3to eto2b e4t1of 2etr e4traum et3rec e2tres etsch3w et1s2p et1su ett1a et2tab et4tanz et2t3au et2tä et2tei ette4n1 et4th et2t3r et4tro ett3sz et2t1um et2tur et2tü etwa4r 2etz et2zä et4z3ent etze4s et2zw eu1a2 eue6reif eu2e5sc eu2g1a eu4gent eu3g2er eu4gla eug1s2 euil4 eu1in 1euk eu2kä e1um e3um. e3umb e3uml e3um2s eums1p eum3st 2eun eun2e eu4nei e3un2g eu2nio eun3ka eu1o2 eu1p e2ur2e 3eu3ro eu1s4 eu3sp eu3ss eust4 2eut eut2h eut6schn 2eux eu2zw e3ü 2e1v e2vela e2vent 4ever eve5r2i e3vo ev2s e1w 2ewa e3wä4 ewä6s 2ewe e2we. e3wir ewi2s e3wit ew2s 2ex. ex3at 1e2xem ex1er e1xi e2x1in 1exis ex3l 3exp 2ext. ex2tin ex2tu 2exu 2e3xy ey4n eys2 e1z e3z2a e2z1enn e3zi ezi2s ez2w é1b é1c é1g égi2 é1h é1l élu2 é1o é1p é1r é1s é1t2 é1u2 é1v é1z2 è1c è1m è1n è1r ê1p 1fa fab4 fa2ben f3abf f2abr fab5s 3fac fa4cheb facher5f fa2ch1i fa2cho f1ader fa2dr f4ah faib4 fa2ke f2al fa3l2a fal2kl falla2 fal6lenk fal6l5erk fal2li4 fallö2 fal6scha fal6schm falt2s 2fanb 2fanf fan2gr 2f1ank 2fanl f1anp 2fanr fan3s 2fanw f1an3z 2f1ap f2ar far2br 2f3arc 3fari 3f4art 2f3arz fa3s4a fa3sh f3at fa2to3 2f1auf f3aug fau2s f1ausb 3f4av fa2xa 1fä fä1c fäh2r1u 2f1ärm fä4s3ser fä2ßer f1äu 2f1b2 2f1c 2f3d4 fdie2 1fe featu4 f2ech 2f1eck fe2dr fe2ei fe1em fef4l feh4lei f4eie 2f1eing 4f1einh fe1ini 2f1einw f1ei3s fek2ta fe2l1a fel2dr 2f1e2lek fe2l1er fe2les fe2l1o fel4soh fel3t f2em. fem4m 2femp fe2nä fen3g fe2no fen3sa fens2t f1ent f2er. fe1ra fer2an fe4rang fe4r3anz fe2rau ferde3 f2ere fer2er fer3erz f1erfa f2erl. 4ferneu fe1ro f4erpa f2ers. f2ert f1erw fes2t fe2st1a fe4st3ei fe2str 2f1eta fe4tag 3fete fet2t3a feuer3e feu4ru 3few f1ex 2fexp 3fez 1fé 2f1f ff2a2b ff3ar ff4arb ff1au ff2e ffe2e f2f3ef ff3ei ffe1in ffe2m f2f3emi f2fetz f2fex fff4 ff3l ff4la ff4lä ff4lo f3flu f3flü ffo2 f3f4rä ff3ro ff3rö ff2sa ff3sho ff2sp 4f3g2 fge3s 2f1h2 1fi 3fi. fi3at fi1er2f fi2kin fi3kl fik1o2 fi2kob fi2kr fi2l1an fil4auf fil3d fi2les filg4 fi3li fi4lin fil2ip f2ina fi3ni 2f1int fi2o fi3ol fi2r fi3ra 3fis fis4a fisch3o fi3so fis2p fit1o2 fi2tor fi3tu 3fiz 2f1j 4f1k4 f2l2 2fl. f3lad f3lap 1flä 3f4läc 2f5läd f3län 2f3läu 2f3leb f4lee 2f3lein f3ler f4lé f3li. 3f6lim fli4ne 2f5lon 1f4lop flo7s8ses. 1f4lot flo2w f3lö f4luc 1f4lug flu4ger flus3se f4lü 2f3m2 fma2d fmas2s fma3sse 2f3n2 fni2s 1fo fob2l 2f1of foli3 fo2na fon2e fo2nu 2f1op 4f3org fo3rin 3form for4m3a4g forni7er. for4sta for4sti for4tei for2th for2t3r for3tu 2f1o2x 1fö 2föf 2f1ök 2f1öl 4f1p2 2f1q f2r2 f4rac frach6tr f5rad fra4m f3rand f5rap 1f4rän 2fre. f3rec f3red 2freg freik2 frein4 f3rep f4reu 2f3ric fri3d fri2e 2frig 1fris f4risc fri6ster f3roc 1f4ron fro2na fro2sc f3rot f3ru f3rü 4f1s fs2amm f2san fs3ar f2s1as f2sauf f2saus f2saut fsä4 f3sc f4sce f4schan f4schef f2s1e2b f4s3ehr f2s1em f2s1ent f2s1er fse4t f2s1eta fsi2d f3s2kie f2s1o2 f3span f2s1pas f2sph f3s2pl f3s2por fs1pr f2spre fs2pri f2spro fs2pru fs3s4 f2stas f4s3täti f4stech f5stel f4stemp f2stip f2s1tis fst4r f4s3tres f4s3tüte f2sty f2s1un f2sü f3sy 4f1t f2ta. f2tab ft1a2be ft1af f2t1al ft1an ft1ar f3tat ft1e2h ft1eig ft1eis f2t1ent f2t1e4ti f2th f4thei ft3ho ft1op f2t3ro f2t3rö f3t4ru fts1 ft2sa4 ft4sag ft4sam fts2c ft4sche ft2se4 ft4seh fts3el ft2si ft4stä ft4ster ft4stes fts2ti fttra4 f2tum ft1url ftwa4 ft3z2 1fu 3fug 3f2uh f1um 2f1unf 2f1u2ni fun2kl fun2ko fun2k3r 2f1unm 2funt f2ur fu4re. fus2 fu3sse fus6sen fu4sser fuss1p fus4s1t fu2ß1er 3fut 1fü 2füb fü2r fü3s2 2f1v 2f1w 1fy 2f1z fz2a fzeiten6 fzei8tend fz2ö fzu3 fzu4ga 3ga. 2gabf ga2b5l gab4r 2gabz ga1ch 2gadl 2ga2dr ga1fl ga1k ga2ka gal2a g4amo 2g1amt 2ganb gan3d gan2g1a 4gangeb gan2gr 2ganh 2g3anku 2ganl g3anla 3g2ano 2ganw ga1ny 2garb 2garc 3gard 2g1arm ga3r2o g1arti ga3ru 2g1arz ga2s ga3sc ga4s3ei ga4sem ga3sp ga4spe ga4spr gas5s ga3s6ses gas3tan ga4st3el ga3str ga4stra ga4stre gas1tu gat2a 2gatm gat4r gau1c 2g1auf g2auk g1aus 2g1aut 2g1äp 2gärz gäs2 gä4u 2g3b2 gbau5s gber2 gbi2 2g1c 2gd g1da g2d1au g2d1er gd1in g1do g1dö g1d3r gd3s2 gdt4 gd1u 1ge ge3a2 geb2a gebe4am geb4r ge1c ged4 ge1e2 ge3ec ge2es gef4 ge3g2l ge1im ge2in. gein2s ge2int gein2v ge1ir ge2is4 2g1eise2 gei3sh gei4sta 2gek. g2el ge4lanz gelb1r gel4b3ra gel6ders ge3le ge4l3ers ge4less gell2a ge3lor gels2t gel3sz gel3t2a ge3lum ge3lü gelz2 ge3mi gem2u 3gen ge3na ge4nam ge4nar gen4aug gen2d1r gen1eb ge3nec gen3eid gen3ern gen3g genma7sse. gen3n gen3sz 2g1entf gen3th 4gentw geo2r ge1ou ge3p4 ge1ra ge2rab 4g3ereig ge4reng ge4ren4s ge4r3ent ger2er gerin4f ger4inn gerin4t germ4 germas6s ger3no ge1ro ge1r2ö ger4sto ge3r2u g1erwa ges2c ges3elt ge2s1er ge3s2i ges2p ges4pi gess2t gest2 get2a ge3tan 2getap ge3t4u ge1ul 2g1ex 2g1f4 4g1g gga2t g3ge gge2ne g2g3l gg4lo g2g3n gg4r 2g1h 4gh. gh2e 3g2het 3g2hie gh1l 3gh2r g2hu gh1w gi3alo gia2s gie3g gi2e1i gi2el gien2e1 gi2gu gi2me. gi4mes gi2met 2g1ind gi3ne gin2ga 2g1ins 2g3isel gi3t2a gi4us 2g1j 4g3k2 4gl. gl2a g1lab 3glad g2lade 2g1lag 3glanz gla4s3ti gla4stu 3g2laub 2g1lauf g1läß 3glät 2gläuf g2l4e 2gle. 3glea 2g3leb g3lec g3leg 2gleh 4g3lein glei4t5r g3len 4g5ler 2gles g3lese g4lia 2glib 3g2lid 3g2lie 2glif g2lik 4glin g2lio 2glis 4g3lisc 3g2lit g2liz 3g2loa 3g2lob g3loch glo3g 3g4lok g2lom 3g2lop 3g2lot 2gls 2g1lu2 glu3te 3glü g2ly 2g1m2 g1n 2gn. g2n2a g4na. 2gnac g4nat 3g2nä gn2e g3neh gne2tr 2gneu 2gng g2nie g2nif g4nin 2gni2s1 g2no1 g3not 2gnp 2gns 2gnt 2gnu 3g2num. g2nü g2ny 2gnz go4a goa3li 2g1of 2gog 2g1oh go1i2 gol2a 2gonis 2g1ope 2g1opf g2o1ra 2gord 2gorg go2s go3th got6t5erg go1y 2g1p2 2g1q g2r4 gra2bi gra2bl 2gradl 2g3rah 2g3rak grammen6 gram8m7end 2g3räu 2g5re. g4reb 2g3rec 2g3rede g4re2e 2g3reic 2g3rein g3reit g4rem 2g3renn gren6z5ei g4rer g3ret g3rev 2g3ric gri2e g3riese 3grif 2grig 2g3ring 2groc 2groh gron4 g4ros2 gro5sse. gro7ssen. gro7sser. gro5sses g4roß gro4u 2g3röh g4ruf 2g3rui 2g3rum 3g4rup grus2s gru3sse 2grut 2g3rüc 3g4rün 4gs g2sa gs1ac gs1ad gs1af g4s1a4g g3sah g4s3a2k g3sal g4salt gs3ama g4s1amb gs3an gs3ar gs1as gs3aug gs1ä g4sca g4sce gsch4 g4schef gs2chi gs3cr g2s1e2 gse3e g3s2eil g3sel. gs3eli g3seln gsen1 g4s3er gse4t g4seu g2s1i gsi2d g3sig g3sil gs1o2 gs1p4 g3s2pek gs4pie gs3pl g5s2por gsrat4 gs3s2 g3star gs1tau g4s1tä g5stäm g5stel g4stemp gst3ent g4sterm gst3err g4s3test gst2he g3sti gs1tis g3sto g4ston g4s1tor gs1tot gs1tr gst4ra gst4ri gst3ros g3stun gs1tü gs2tüc gs1u g3sy 4g1t g3te gti2m gt4r gt2se 1gu gu3am gu1an. gu1ant gu1as gu4d3r gu2e 2gued guet2 2g1u2f 2g1uh gu1ins gu1i4s 3gumm 2g1unf g2ung. gunge2 4gungew 2g1ungl g2uns 2gunt2 3gur 4g1url gurt3s gu2s3a guschi5 gus2sp gus4st gu3sti gu2ß1 gu2t gut1a gu3te gu4t3erh gut3h 2güb gür1 gü3st 2g1v 2g1w 2g3z2 3haa hab2a hab2e ha2cho ha2del ha4din h1adle haf3f4l haft2s hafts3p h1ah ha1kl 2h2al. halan4c ha2lau hal2ba hal4bei halb3r 2hale hal2la hal4leh hal6lerf h1alp halt3r h1amt h2an. h2and h4ann 2hanr 2hant hao2s h1ap ha2pl ha2pr h4a3ra 2harb h2ard h1arm. har4me. har4mes har2th h1arti h2as 2ha3sa hasi1 ha2ß1 hatt2 hau3f4li 2h1aufm h1aukt hau2sa hau2sc hau4spa hau4ss haus5sen hau4s3ti hau4sto h2aut. 2hauto hau2tr h1äff hä4s hä5sc hä6s5chen häu2s1c hä3usp 2h3b2 hba2r3a 2h1c 2h3d4 hdan2 2hea he2ad hea5t he3be he4b1ei he2bl he3br he5ch2e he1cho h1echt hed2g he3di he2e3l hee2s he2fan he2fä he2f1ei hef3erm 2heff he4f3ing he2f3l he2fr he3fri he2fu he3gu h1eie h1eif h1eig he2im heim3p hei4mu heine2 h1eink 4heio he1ism he1i4st heit4s1 h1eiw he2l3au hel1ec h3e2lek he3len hel3ers he3li hel4l3au hel4mei he3lo he4lof he2lö 3hemd he3mi 3hemm 4h1emp h2en. he4n3a4 he2nä he2n1e2b hen3end hen3erg he2net heng2 2heni he2no hen3st2 h1ents 2h3entw hen3z 4he2o he3on he3op he3ph her3a2b he2ral 2herap he3ras herau2 he4reck 4hereig he4r3eis he2rel he4rerw h1er2fo h1erfü herg2 herin4f he6rin6nu herin4s herin8ter h1erke h3erlau 2herm herma3s he3ro he4r3o4b h1erö hert2 her3th her2zw he2tap heter2 he3th het2i he3t4s h2e2u heu3g he3x he1x2a he1y2 1hè 2h3f4 hfell1 hfel6ler hfi2s 2h3g2 hget4 2h1h2 2hi. 2hia hi2ac hi2ang hi1ce hich6ter 2hi3d h2ide h1i4di hi2e hi3ens hier1i hie4rin hif3f4r hi2kr hi2l3a4 hil2fr hi2n h1indu hi3nel hin2en h1inf h1inh hi3n2i hin3n2 hi3no hin2t1a 2hio hi4on hi3or 2hip1 hi2ph hi2pi h2i2r hi3ra 2hi3re hi3ri hirn1 hir4ner hi3ro hir2s his2a hi2se hi2spa hi3ti 2hiu h1j 2h1k4 4hl h4lac hla2n hl1anz h1las h1laß h1lat h1laut h3läd h1läs h1läß h1läu hlb4 hld4 h3leb hle3e h5len. hlen3g hl2enn h3ler hle2ra hl1erg h6l3ernä hle3run hl1erw h4lerz h3les h4lesi h3lex hlg4 h2lie h2lif hl1ind h2lip h2lis h3list h2lit hll2 hlm2 hlma3s h2lo h3loc hl1of hl1op h4lor hlo2re h3losi hl2ö h3löc h2lös3 hlö4ss hl2san hl2ser hl3sku hl3slo hl2sto hl3t2 h3luf h3luk h1lüf 2h1m h2mab h3mag h3man h3mar hma3sse h3mä h4mäc h4mäh h4mäl h4mäu h3me. hme1e hme1in h3meist h3men hmen2s hme2ra h2mo h4mon h3mö hm3p4 hm2s1p h2mu 2hn h2na hn1ad h3nam hn1an hn3d4 hn2e hn3eig hn3ein h2nel hne4n1 hne4pf hner3ei h3nerl h3nerz hn3ex h2nic h2nid h2nie hn1im hn1in h2nip hn3k4 h2nor hn3s2k hns2t hntra4 hnts2 h1nu h2nuc h2nul hn1unf h3nunge ho2bl ho2c hoch3 hock3t 2hod hoe4 ho2ef ho4fa ho2f3r 2hoi hol1au 4holdy 3hole ho2l1ei hol3g4 4holo ho4lor 3hols h1o2ly 3holz hol6zene hom2e ho2mec ho2med h2on hono3 2hoo 2hop ho1ra hor3d h1org ho4sei ho3sl ho2sp ho3ssi ho4sta ho2str 2hot. ho3th hotli4 2hot1s2 3hov 2ho2w1 h1o2x ho1y2 hô1 1h2ö hö2c h4ör hö4s hös1c hös3se h3öst 2h3p2 h1q 2hr hr1ac hr3ad h1rai h1rane h3rat h3räu hr1c hr3d h2rec h3rech h3red h3ref h4rei. hrei4ba h3reic h4r3eig h3rel h3r2en h3rep hr2erg hr2erk h6rerleb hr2erm hr2erz h3re2s1 hre2t h2r1eta h3rev hrf2 hrg2 h2ri h3ric h4rick hri4e h3riesl h3rin h4rine h4rinh h4rist h2rob h2rof h3roh h3rol h4rome h4romi h4ron h2ror h3rou hrr4 hr2s1ac hr2s3an hr2sau hr3sch hr2s1en hr2ser hr2set hr2s1in hrs3k hr2s1of hr4stec hr2su hr4sw hr2tab hr2tan hr2th hr2tor hrt3ri hr2tro hrt2sa hrt2se hrt4ste h3ruh hr1ums h3rü h4rüb h2ry hrz2 4h1s h2s1ach h2san h2sau hsä4s h3sc h4schan h2s1ec hse4ler h2s1erl h3s2ex h2s1ing h2s1of h2s1par h2sper h2sph hs2por h2sprä h2spro hss2 hst3alt hst2an h4starb h2stau h2stäl h4stea h5stel hst2he hs1tie h2stin h2s1tor h3stö h3str hst3ran hst3ri h2stu h3stun h3stü h2s1u hs2ung h3sy 4h1t h2t1a h3t4akt. h3takts h3t2al h4t3alt hta2m hta4n ht3ane h3tank h3t2as h4t3ass h4tasy ht3a2t h2tär h3te. ht1ec h2t1ef ht1eh h3teha h3tehä hte2he h2teif h4teilz h2t1eim h2t1eis h4t3elit htelma4 htelmas5 h2temp h3ten h4tentf h4t3ents ht3erfo ht3erfü h2t1erh ht5erken h4terkl h4t3erre ht3ersc h6t5erspa ht3erst h6tersta ht6erste h2t1erz h2t1ese h2t1ess h3tet h2t1eu h2t1ex h2th h4thei hthe3u h4tho h2t1in htni2 hto2 h2toly h2torg ht3rak ht3rand h2t3rat ht6raume ht4ri h2t5rin h2t3rol h2t3ros ht3rö h2t1rös h2t3ru h2t3rü h4ts ht3spri ht4stab hts2ti ht4s3tur ht4s3tür htt4 htti2 htu2e h2t1urs ht3z2 hu2a hu2b1a hu2bei hu2b1en hu2b3l hu4b3r hu2bu hu2h1a hu2h1i huk3t4 hu2l3a hu2lä hu2l3ei hu4l3eng hu4lent hu2ler hu2let hu2l1in hu2lo hu3ma h1ums hu2n h1una hu3ni1 h1up. h1ups 2hur hurg2 hu3sa hu2so hus4sa hus3se hus2sp hus4st hu2tab hu3t2h hu2ti hut2t hut4zen hut4z3er h2ü h4übs h3übu hühne4 hüs3s 2h1v hvi2 hvil4 2hw h2wall hwe1c h1weib 3hyg 3hyp hy2pe. 2hy2t h1z hz2o hzug4 i1a 2ia. i4aa i2ab iab4l 2iac i2af iaf4l i4a3g2 i2ah i3ai i2aj i2ak i3ak. i3akt 2ial i5al. ia2l1a4 ia2lä ial3b ial3d i3alei i3alent i3alerf i3alerh ia4l3erm i3a2let i3a4lia ialk2 i3all ial3la ia2lor ial3s ial3t4 ia2lu ial3z2 i2am4 i4amo 2ian ia2nal i3and2 ian2e i3ann i2ano i3ant i3anz i2ap ia3p2f ia1q i3ar. ia2ra 2ias i2asc ia3sh i2asi i2a1sp ias5s iast4 i3at. i3a4ta i4ate i3at4h 1iatr i3ats i3au ia3un iau2s1 2iav 2iä i1äm i1är. i1ärs i1ät. i1äta i1ät3s4 2i1b i2b1auf ib2bli ib1ei i2beig i2beis ibe4n iben3a ibi2k i3bla i3ble ib2o i2bö i4brä ib3ren ib4ste i2bunk i2bunt ibus1c ibus3s 2ic ic1c ich1a ich1ä i1che ich1ei ichermas8 ichgro3 i1chi i2chin ich3l i3chlo ich3m ichmas4 i1cho i2ch3r ich2t3r i1chu ich1w i1ci ick2e icks2 i1cl i1d id2ab4 i3dam id2an i2d1au 1i2dee i2dei idel2ä ide3so ide3sp 1i2dio idni3 i2dol 1idol. 2i2dr i3d2sc id2s1p idt4 i2dy ie3a4 ie2bä ie2bl ie2bre ieb4sto ieb4str ie1c ie2cho ie2dr ie1e2 ie2f1ak ie2f1an ie2fau ief3f4 ie2f3l ie2fro ie4g3l ie3g4n ie2g3r ie3g4ra ieg2s iegs1c ieg4se ieg4st i1ei i2e2l1a ie3las iel3au iel3d iel1ec ieler8geb i1ell ielo4b iel3sz iel3ta 2i1en i3en. i3ena iena2b ie4n3a4g i3e2nä ien3d i2ene ien1eb ie3ner ien4erf ie4n3erg i3enf i3en3g ienge4f i3enh i3enj i3enk i3enm ienma3s4 i3enn i3e2no i3enö i3enp i3enr ien3s2e iens2k ien6stof ien6stop iens4tr ienst5rä ien3sz ie1nu i3env i3enw i3enz ie1o2 ier3a2 ie2rad ie2rap i2ere ie3r2er ie4rerf ie4r3erz ie3res i3ereu i4eri ierin3 ier3k4 i1ern i3ern. i2er5ni ie2rö iers2e ier4s3eh ier3sta ier3te ie3s2 ie4sh ie4sk ies4s ie5sset iess1t ie4stas ie2t1a ie4t3erh ie4t3ert ie2t3ho ie4t1o ie2t1ö2 ie2tri iet2se i1ett ieu2e ie1un i1ex 2if if1ab if1ar i2f3arm if4at if1au i2fec ife2i if2en if1erg if1erh if2fah iffe4s if6feste if2fl if3l i1f4la if4lä i1flü if3r if4ra i1frau i1fre if4rei if4rü if2ta ift3erk if2top if2t3ri ift1s2p ift3sz 2i1g iga1i i2g1ang ig1art iga3s i4gefar ige4na igerma3 ig1erz i2g1im i2gl ig1lä ig4na i4gnä i3g4neu ig4no i3go ig4ra ig3rei igro3 ig4sal ig1so ig4sti ig4s1to ig2stö ig4stre 2i1h i2h1am i2har i3he ihe1e ihe4n ih3m ih3n ih3r ih2s ih3sp i2h1um ih1w ii2 ii3a4 i1ie i3i4g i1im i1in i1i4s i2is. ii3t i1j 2i1k i2k1a4k ik1amt i2k1ano ik1anz i4kanze ik1art ik3att i2k1au i2kär ikbu2 4ike i2k1ei ike2l1 i2k1e2r2e ik1erf iker6fah i2k1er2h i2ker2l i2k1eta i3ki. ik1in i2kind i2k3l i3kla i3k4lä i2kn ik3no ik2o3p4 ikot3t i2köl i2k3ra ik3rä ik3re ikro3 ik3so ik3s2z ikt2e ikt3erk ikt3r ik2tre i2kun i3kus i1la i2l3ab il1a2d i2l1ak i2l3a2m il1ans il1asp il1au il4aufb il3aus i2laut i1lä1 4ilb il2c il2da il4d3en4t ild2er ild1o il2dor il2dr il1ec ile2h il1ehe ileid4 il1ein il1el i4lents i2l1erf i2l1erg i2l1err ilf2 il2f3l il2f3re ilf4s1 ilie4n ilig1a2 ili4gab i2l1ind i2l1ip i3lip. i3lips 2ill. il3l2a il4la2d ill4an ille4ge il3l2er il3l2i 2ills il2mak il4mang il2m3at il2mau il2min 2ilo i2l1or il3t2h i1lu2 i2lum i3lus ilv4 il2z1ar ilz3erk 2im. i2manw i2m1arm im4at ima2tr imat5sc ima4tur i2meg i2mej i2mek i2m1ele i2melf i2m1erf i2m1erz i4mesh i2meti i2mew i2m1inf i2m1ins im2mei im4m3ent 1immo imni2 2imo im1org 1impo imp4s im3pse 1impu im2str 2imt imtu2 2imu in3ac i4nack i2n1ad in2af in1am i3nap in2ara in2ars in4art ina4s i2n3au2 inaus1 in1äs in2dal in2dan in3dau indes4t 1index in3do 2indr ind4ri in3drü 1indus 2ine i2n1e2be in1ehe in3ei i2n1eng inenma3 inenmas6 in3erbe i4nerbi in2erh iner4lö i4ner4tr i4nesk in1eu ine3un ine2x in3f 1info. 1infos 2inga ing1af in2g1a4g in2gl ingmas4 ing4sam ing3sc 1inhab 2inhar 2inhau 4inhe in2i3d 2inig ini3kr in2ir 2inis ini3se i3nitz 3inkarn ink4ste inma4le 2inn. in4n3erm 2innl in2nor inn4sta 1innta 2ino in1od in3ols in1or inos4 ino3t i1nö in1ö2d 2inp 2inr ins2am insch2 in2seb 2insen ins3ert in3skan in3skr 1insta in4s3tät in3su 1insuf in4s3um in3s2z i4nt 1integ int2h in3t4r in1u i3n2um in3unz invil4 i1ny in3zw i1ñ 2i1o ioa4 io1c io2d i2oda io3du io3e2 iof4l i2o3h io2i3d io3k4 i3ol. i3om. i3oms ion2 i3on. ional3a io2nau ion3d i3on4s3 i2ony i2o1p io4pf i3ops i3opt i2or i3or. i3orc iore4n i3orp i3ors i3ort io3s i2ost i3ot. i3ots i2ou i2ov io2x i3oz. i1ö2k i3ön i1ös. 2ip. i1pa i1pe ipen3 i3per ipf2 ip3fa iph2 2i1pi ipi3el ipi3en i3p4l ip2pl ip3pu i1pr 2ips 2ipu 2i1q i1r2a i3rad 1i2rak ira4s irat2 i1rä ir2bl ir1c ir2e i3ree 2irek i3ré irg2 ir2gl irg4s ir2he ir2i 2irig 2irk ir2k3l irli4n ir2mak ir2mau ir2mä ir2m1ei ir2mum ir4m3unt ir2nar ir2no i1ro 1iron i1rö irpla4 ir4rei irr2h ir4sch3w ir3se ir3sh irt4st iru2s1 i1s i3sac i4s1amt is2ap is3are i2sau i2s1än 2isb i2sca isch3ar i3s2che i4schef i4sch3e4h i4sch3ei i2sch1l isch3le i2schm isch3ob isch3re isch3ru i4schwa i6schwir i4schwo isch3wu i2s3cr 2ise ise3e ise3ha ise3hi ise3inf i4seint ise2n1 is2end isen3s i2serh i2s1erm iser2u i2s1ess i4s3etat isi2a i2s1id i2s1of iso6nend is1op 3i2sot 2isp is1pa i2spar is1pe is1pic is2pit is2por i2spro is3sa is4s1ac is4sau is3sc is4s3che is3senk issermas8 is3so is3spa is3spi is3spo is2s1t is3sta is4ste is3sto is3stu is2su i2stab i4stam ist2an i4stea iste4n is2ter ist4ra ist3re is1trü i2stur is1tüm i2sty isum3p i2sü i1ß iß1ers i1ta it1ab. it1abs ital1a it1alt it1am it1ang it3a4re it1art i3tat it1au i3tauc i2tauf i2t1ax 4i1tä it2är i2t1äs ität2 i1te i2tei i4t1eig i4tein 2itel ite2la ite4n it2ens2 i4tepo i2tex i3thr i1ti i2t1id 1itii iti4kan iti3k2e i2t1in1 it2inn i3tis i3tiv i4tl itmen2 i1to i3toc i2t1of i1tö i1tr i3tra. it3raf i2t3ran it3ras it3rau it3räu it3re it3ric it3rom it4ron i3tru it3run it2sa it4s1a4g it2s1e4 its3er1 it4set its1pe it4staf it4stec it4s3tem it4s3tes it2sti it4stie it2sto it2teb it4temp it2tri i1tu it1uh i2t1um i2tuns it1urg itut4 i1tü 2itz it2zä it4z3erg it2z1w 2i3u2 ium1 ius1t i1ü 2i1v i2v1ak iv1ang i2veb i2v1ei iv1elt ive4n i2v1ene i2v1ent i2v1ur 2i1w iwur2 2i1x i2xa ix2em i3xi ixt2 2i1z iz1ap iz1au izei3c ize2n i2z1ene iz4er i2z1ir izo2b i2zö i2z1w í1l jah4r3ei jahr4s ja3l2a ja3ne jani1 2jat je2a jean2s je2g jek4ter jektor4 jek2tr je3na je2p je2t1a je2t3h je2t3r jet3t je2t1u2 ji2a ji2v joa3 jo2b1 job3r jo2i joni1 jo1ra jord2 jo2sc jou4l j2u ju2bl jugen2 jugend3 ju2k jung5s ju3ni jur2o ju3t2e3 2j1v 1ka 3ka. k3a2a ka3ar kab2bl ka2ben 2kabh 2kabla 2kablä 2k3a2bo ka3b4r 2k1abs 2k1abt ka1c k2ad 2k3ada 2k3a2dr ka1f4l ka1fr kaf3t2 k2ag ka1in ka3ka kaken4 2kala. ka2lan ka3lei ka3len. ka4lens kal3eri kal2ka kal2kr k1all kalo5 kal4tr k3ama kamp8ferf kan2al ka4n1a4s ka2nau kand4 2kanda kan2e 2k1ang kan3k4 2kanl 2k1anna k1ans k2ans. 6kantenn ka3nu 2kanw k2anz. ka2o 2k1apf 3kara 2karb k2ard k2arg ka3r2i kari3es k2ark 2k1arm k2arp3 kar2pf k2ars kar3t k2arta 2k1arti karu2 k2arw kasi1 ka2sp kas3s ka3t2an ka3t4h ka2t3r kat3se 2katt kau2f1o 4kaufr kauf4sp k1aus kau3t2 2kauto 1kä k1äh k1ä2mi k1än kär2 kä4s5c käse3 kä3th 2k3b4 kbe1 kbo4n kby2 2k3c 2k3d2 kdamp2 2k1ec k1eff kefi4 kege2 ke2gl ke2he. kehr2s kehrs3o kehr4st 2k1eic 2k1eig k1ein ke1in2d 2keinh 2k1eise ke2la kel1ac ke3lag kel1au ke2lä kel3b4 2k1e2lek ke2len ke2l1er 2ke3let kell4e kel3s2k k4elt 2k1emp k2en. ken3au kenbu5s4 4ken4gag 2kenlä ke2no kens2k ken5s4te ken3sz k2ente k3enten ken3th k2entr 2k1ents k2entu 2kentw 2keo2 ke2pl k2er. k2erc 4kerfah k4erfam k3ergeb ker6gebn k3er2hö ke6rin6nu kerin6st kerin4t ker4ken k2erko k2erl k3er4lau k3er4leb k6erlebe ker4neu k1ero k2ers. kerz2 ker4zeu 2k1er2zi k6es. ke2sel ke4t1a ke2t3h ket3s ke1up keu6schl 2k1e2x 2k3f4 2k1g2 2k1h4 kho3m ki3a4 ki1ch 2k1i2de ki3dr ki2el kie2l3o ki1f4l ki1f4r ki3k4 2kil2a ki3li ki3lo k2imi k2in. k2ing 2kinh k2ini k2inn ki3n4o kin3s 2k1inse 2k1int ki3or kio4s 5kir kis2p kist2 2kiz ki3zi 2k3j 2k1k4 kl2 4kl. 4kla. k4lar 4k1last k2le 4kle. kle3ari 4kleh k4leid 4k3leit k3lem. 2k3ler kle2ra 2k3leu kle3us 2klic 2klig k2lin k3lip k2lir k2lisc 2klist klit2s 4kliz 2k3loc klo2i3 k4lop klo3s2 klost4 k2löt k1lu kluf2 2k1lüc 2kly 2k1m kmas2 k2n2 3knab k3ne k4nei 2k5ner kni4e kno4bl 2k5nor k3nu 3knü 1ko ko2al 2kobj 2k1o2fe koff4 koh3lu ko1i2 kol2a ko3le kol2k5 3kom ko4mu k2on ko3n2e kons4 ko3nu 2kop. ko1pe kop4fen 2kops 2kopz ko1r2a 2k1orc kor6derg ko3ri k2os ko2sp ko3ta kot1s2 kot4tak 2k1ou 3kow ko2we k1o2x 1kö kö2f k1öl 2k1p2 k1q k2r4 2k3rad kra4s3 k3rats 2kraum k4raz 2k3rät 2k3räum 2kre. 2k3rec 2kred. 2k3rede 2k3ref 2kreg k3reic kre1i2e4 kreier4 k3reih 2k3rh 2krib 2k3ric k3ries 2krip 3kris 3k4ron kro4ss 2kruf krü1b 2k1s k4s1amt k2san k2sau ks2än ksch4 ks1e2b k2s1em k2sent ks1erl k2s1ers k2s1erw k2s1id k2s1in k2s1o2 k3sof ks1pa k3spe ks2por ks2pu kss2 kst4 k4s3tanz kstat4 k4stea k2s1tis k2s1tor k2strä k2stum k2s1u ks2zen 4k1t kt1abs k2t1ad kt1akt k3tal kt1am kt1an k2t3a2r kta4re k2t1au2 ktä5s kte3e kt1ei k2temp k2tent k4t3erfo k2t1erh kte3ru k2tex k2th kt3ho k2t1id kt1im k2t1ing kt1ins kti4ter k2t1of k3top kt1ope k4torga kt3orie kt4ran kt3ras kt4ro kt3run kt3s2 kts4t ktt2 k2tuns kt3z ku1c 2k1uhr kul2a ku3l2e ku3l2i 2kulp 2k3uml kum2s k2u3n2a kun4s kunst3 2kunt 2k1up. kur2bl ku2rei kuri2e ku2ro kur2sp kur4st ku4schl ku2sp kus3ses ku2su ku2ß 1kü 2küb kü1c kür2s 2k1v 2k1w 2k3z2 kze3l 5la. l1ab la3ba 2labb 4l3aben 2labf 2labg 2labh 4la2bl lab2o l2abr lab4ra lab4ri 2labs 3labu 2labw la1ce la2ce. 1lad lad2i l1adl 2ladm 2l1a2dr 3ladu l1adv 2laf la2fa laf3t la2ga la2gio la2gn lago2 la2g1ob lag5se 2la1ho 1lai lai4s1t la2kes la2k1i l2akk la1k4l 2l1al 4lall 4lalp l2ami la3min lam4ma 1lammf l2amp 2l1amt lamt4s la4mun l1anal la2nau 2lanb 3l2and lan2d3a2 lan6d5erw lan6d5erz lan2d3r 2lanf lan2gl lang3s2 2lanhä l2anhe 2lanl 4lanli 2l3ann l1anp 2lans 4lansä 2lantr lan2zw 3lao l1a2po lap4pl la2r1an la2r1ei la4rene 3l2ar3g lar3ini 2l1ar3t l3arti la2ru la2sau 4lasd la5se 2lash 2lasi la2so 2la2sp 3lasser la2sta last1o la2str las3tur la2stü la2ß3 lat2a la3te la4tel 2l3ath la2t3ra lat2s 2lat2t1a lat4tan lat4t3in lat2t3r laub4se lau2fo l2aufz 1laug 2lausl 2lausr 2l1auss 2lauto 1law lawa4 lä1c 2läf 2l1ähn 1länd lär2m1a lä4s5c lä4s3s 4lät 2läub 2läuc 2läue 1läuf 1là 2l1b l3bac l2b1ede l4beta l2b1id l2b1ins lb2lat l3blä lb3le l2bli l3blo l4bre. lb3rit lb2s lb3sa lb3se lb4sk lb3sp lb4st3e lb4sto lb2u l2b3uf lbus3s lbzei2 2l1c l3che lchermas8 l3chi lch3l lch3r lch3s lch3ü lch1w l3cl l3co 4l1d ld3a2b1 l3d2ac ld3ack l2d1a2d lda4g l2d1ak ld1al l3dam ld1amm l2d3a2n l2d1a2r ld3ari l3das l3dat ld1au ld1är l2dei l2dele l3der. ld1erp l2d1e2se l2dex ldi2c l2d1id l2d1im ldo2r ld2os ld2ö2 ld3r l2dran l2dre l3d4ru ld4rü ld3sa lds2t ldt4 ld3th l2d1um ldy3 ldys2 1le 3le. le2ad 3leba leben4s le2bl 2lec le2chi lecht4e 3led 4ledd le3de le2e le3ei lef2a le2g1as le2gau le2gä le2gl leg4r 3leh leh3r2e 4lehs 4leht 3lei. lei2br l2eic l2eid 4l1eig l2ein. leinbu4 leinbus5 l2eind lein4du l2eine lei6nerb 2leink l2eint lei6ss5er l4eist lei4ßer l2eit lei2ta lei8t7er8sc lekt2a 2lektr 3l2ela 2l1e2lek lel3s 3lemes le2m1o2 4lemp l2en. le4nad le2nä 4lendet 2lendu le4n3end 4lenerg l2enf le3ni l2enk 2l1enni l2e2no len4sem len3sz l1ents 2l3entw lent4wä 5lentwet 4lentz len2zi le1os 2lep 3lepa 3lepf 3lepr l2er. l2e1ra le2ra4g le2rau lerb4 4l3ereig le4r3eim le4rers l1erfo l2erfr l2erfü 3lergeh l3ergen 3l4ergew 2l1ergi lergro3 lerin4s lerk2 l2erka l2erko l2erle le1ro 2l1er2ö 3l2erra l4ers. lers2k ler3t 6lerwerb l1erz l2erza les2am les2e 2l1esel le3sh lesi1 le3sk les3s leste3 4lesw 2lesy le2tat 2le3th 2leto le2u 4leud 2leuro 3leut 3lev 2lexe le2xis 2lexz 2l1f l3fah lfä4s3 l2f1ec lfe1e l4feis l3f4lä lf3lo l3f4lu lf3ram lf2tr lf4u lfur1 l3fü 2l1g lga3t lgd4 lgen2a lge3ra lgeräu3 l2geti l3go lgoa3 lg3re l3gro lgro3s lg2s lg4s1t 2l1h2 3lhi. 1li 3lia li3ac li3ak li3ar li3b4 libi3 li1c 3lichem 3licher li3chi 4lick li3d2a li2deo 2l1ido li4d3s l2ie liebe4s li3ene lie4s3c lie5sse lie4sta 3lig lig4n li2gre lig1s2 li3ke li2kr lik2sp lik4ter li3l lil2a 2lim li3m2a l1imb 3limo li3n2a lin3al 2l1indu li4ned li2nef li2neh li2nep li2nes 2l1inf lings3 2l1inh 2l1in1it 2l1inj lin2k1a link2s li2nol l2ins. l2insa l2insc 2linsp 2linst 2l1int l1inv 2linz li2o li4om lion5s li3os. li2p3a 3lis. li3s2a li4schu 2l1isl 2l1i2so li2sp liss4 2liß li2tal li3te li1t2h lit1s2 lit3sz li2tur 3liu 2lixi li2za lizei3 4l3j 2l1k lk1alp l3k2an l3kar. lken3t lk2l lk3lo l3k4lu lk4ne lkor2b1 lk4ra l2k3ro l2k3ru lk2s1 lk3sä lk4stä l3k2ü 4l1l lla2be l2labt ll1aff ll1akt l3l2al l2l1a2m ll3ama lla2n ll2ang ll2anw ll1anz l3lap ll1arm ll1au ll4aufe ll3aug l2l3aus l2l1äm llb4 llch4 ll3d4 lle2bi l3lec ll1ech lle3en l2l1ef l2legt l2le2gu ll1eim ll2em l3len. lle4n3a ll3endl ll3endu llen3g l4lents l3ler. lle2ra l4lerfo l6lergen l4lergo ll3ernt ll3ertr l2lerz ll2es l2lex llg4 l4lieg ll1imp l2l1ind ll1ins llk4 ll5m lln2 ll1ob l2lobe l2l1of ll1opf l2l1o2r l3lor. l3lore l2l1ou l2löf ll1ö2se ll3sh ll3s2k ll2spr ll4s3tor ll3t4 llti2m llt5s2 llu2f ll1ur llust6 ll3z2 2l1m l2m3a2b l2marc lmas2 lma3sse lm1aus lm1c lme2e lm3eins l2m1e2p l2m1erz lm1ind lm1ins l2möl lm3p lmpf4 lm3s2z lm3t 4ln lna4r ln3are l3n4e l3ni l1nu l1nü 1lo 3l2ob. lo2ber 2lobj 2l1o2bl l2obr lob4ri l1o2fe lo1fl lof4r lo2gau lo3h2e 2l1ohr loi4r 3lok lo2k3r lol2a l1o2ly lo2min lo2n1o lo2o 2lopf 2lopt lo1ra lo4rä 2lorc l1ord lo3ren 2l1or3g2 3lorq 3los. lo4sa 3lose lo4ske lo2spe loss2e lo4steu lo2s3to lo2s3t4r lo2ßu lo2t1a lo3tha loti4o 2l1ov lo2ve 2lox 1lö lö2b3 2löd lö2f 2l3öfe 4lög l1öhr 2l1ö4l 4löß 2l1p l3pa lpe2n3 lp2f l2p1ho lpi4p lp3t4 l3pu 2l1q 2l3r2 lra4ss lrat4s lrom2 lrö4 lrös3 lrut4 lrü1b 4l1s l3sac l2s1a2d l3s2al l4s1amb l2sanf l2sann l3sare l2sau2 lsä4s l4schin l4schmü l3se. l2s1e2b l2s1ec l2s1em ls1ere ls1erg ls1erl l2s1ers l2s1erw l3ses l3sex l4sha l2s1imp ls2log ls3ohne l4s3ort. l3s2pi ls2po l2spro l3s2pu ls3s2 lst2a lstab6 ls2taf l4s3täti l2s1tis l2stit ls2tr ls1um l2sun lsu3s ls2zen 4l1t l2tab lt1abs ltag4 lt1ak lt1a2m l3tami lt3and lt1ang l3tarb lt1art l2t3ato l2t1au l3te. lt1eh l2t1eis lte4lem lt3eli l3t2en lter3a l3t2erg lt4erö l2t1esk lte3str l3tet. lte2th l2t1eu l2th l4thei lt3ho l3thu ltimo4 l2tob l2t1of lt1op l2t1o2ri lto2w lt1öl lt1ös lt3öt lt4rak ltra3l l3trä lt3räu lt3re lt4rie lt3roc lt3ros l2t3rö l4ts lt3sc lt1spa lt4stab lt4stoc ltt2 lt1uh l2t1um ltu4ran ltu2ri lu1an 4lu4b3 luba2 lubs2 lu2dr lu2es 1luf 2l1ufe 2luff luf2t1a luf2t1e luf2t5r lu2g1a lu2g1e2b lug3erp lu4g3l lu2go lu2g3r lug3sa lu2gu 2l1uh lu1id. lume2 2lumf 2luml l2ump l1ums l1umw 1lu2n 2l1una 2l1unf 2l1uni 2lunt 2lunw 4lu2o lu2pf 2lur l1urn l1urt 2luse lu2sp lus4s3a lus2s1c lus3sen lus2s1o lus2s1p lus4s1t lus2t lu2st1a lu4stä lu2sto lu3str lust3re lu2s1u 4lu2ß1 lu2t1a lu2teg lut3erg lut1o2f lu2top lu2t3r 3lux 2lüb 5lüd lüh1l lüs3 2l1v 2l3w 2lx 1ly ly1ar ly3c 2lymp 3lyn ly3no ly1o lys2 ly3te ly1u 2l1z l2z3ac l3z2an lz2erk lz1ind lzo2f l2zö lz3t2 l2z1u4fe lzug4s lz1w lz2wec 1ma maa2 m1ab m2abe 2mabk m2ab4r 2mabs 2mabt mach4tr ma2ci ma3da ma2d4r ma4d2s ma1f ma2ge. ma2geb ma2gef ma2geg ma2gek ma2gep ma4ges. ma2get ma2gev ma2gew 2m1agg magi5er. magi5ers ma3g4n 2m1ago mai4se 2m1akt mal1ak ma4lakt ma2lan ma4l3at ma2lau mal3d ma3ler mali1 mal3lo 2mallt malu2 ma2l3ut mam3m 2m1anal ma2nau 2manb man4ce. man3d2 man3ers ma2net m2anf 2m1angr m2anh 2manl m4ann m1ans m2ans. 2mansa 2mansä 2mansc 2mantw 2manz ma2or m2app 2m3arb mar3g2 4ma3r2o maro3d 4marr mar6schm mar6schr ma3r2u m3arz 3mas ma1s2pa 4m1aspe ma3sses mas6ses. mas6sest ma6sset ma3s2su mas2t 3maß ma2ta2b ma2tan mat4c ma2tel ma4t3erd mat3se mat1sp 2m1au2f ma3un 2mausg m4ay ma1yo 1mä m1ähn mä1i2 4m1änd 3männ m1ärg mä1t4r mäu2s1c 2m1b2 mbe2e mb4l m3b4r mby2 2mc m3ch 2m1d md1a m2d1ä m2dei mds2e m2d1um 1me meb4 m2e1c medi3 medie4 medien3 2medy me1ef mee2n1 mega3 3meh 2m1eif 2m1eig m2eil mein4da meis2 me1i2so m2eist me3lam me2lau 3meld me2lek me2ler melet2 2melf. mel2se mel3t4 6mel6tern 2m1e2mi m2en. mena2b me3nal men3ar men3au men3ge men3gl me3nor m2ens men4sk men2so men3ta 2mentn 4m3entwi me1o 2meou 2meö 3mer. me1ra me2r3ap me4rens mer2er 4m3ergän 3merin merin4d merin4t 3mers merz4en 3mes me2sal me4sä mes2e 4meser 2me3sh 4m1essa mes6serg mes2s1o mes2s1p meste2 me2str 4mesu 3me2ß1 me3t2a me3th meu1 2m1ex 1mé 2m1f4 mfi4l 4m1g2 2m1h4 1mi mi2ad mi3ak mibi1 mi1ch mi3da mie3dr mi2e1i mie3l mi2er mierer4 mi2et mie4ti 3mig mi2kar mi2ki mi2ku 3mil mi3l2a milch1 mil4che 4milz 2m1imp min2en min2eu min2ga mings2 mi3ni 3min2o mi1nu 3mir. mi3ra 3miri 3mirs 3mirw mi2sa mi4scha mi4schn mi4schw mise1 mis4s1c mis4ser mis3si mis4st mi2ß1 3mit1 mi2ta mi2th mi2tr mit3s2 mit5sa mi3tsu mi2tu 4mitz 2m1j 2m1k4 m3ka mk5re. 2m1l2 ml3c ml3l ml3s 2m1m m2mab m2m1ak m2m1al mm1ang m2mans mm1anz m2m1au mmä4 mmd2 mm1ei mme4lin mme4na m4mentw mme2ra2 mme4rec mme2s3a mm1inb mm1inf mm1inh mm1ins mm1int mmi3sc mm3p mms2 m2mum mm2un mmül2 2m3n2 m4nesi 1mo moa3 2mobj 3m2od mode3s mo2dr 4mog. mo2gal 3moh mo2i3 mo2k1l 2mol. 3mom mom2e 3m2on mo3ne mo4n1er mon3s 3mo2o 2m1ope 2mopt mo1ra mo2rar 2m1orc mor2d3a mor2dr mo2rer mork4 3mos moster4 mo2sto 3mot m1o2x mo1y 1mö mö2c 4mök m1öl 2m1p m2pf mp4f3erg mpf3erp mpf3err mp4f3erz mp2fl mpf3li mpf1or m3pi m4p3lem. m2p3len m2p3les m3pon mp3ta m3pu 2m1q 2m3r2 2m1s m2san ms3and ms1as m3sä msch2 m2s1ef ms1erf ms1erw ms1ini mso2r ms1ori m2spä m2sped ms2por m2spot m2spro ms2pu ms3s2 m4stag m2stal ms1um m2sü 4m1t mt1ab mt1ak m3tam mt1ar mt3are mt1elt m2t1erf m2t1erg m2t1erl m2t1ers m2t1ert m2t1eta m2t1eu m2th mt3ho m2t1im m2t1ins mti2s mtmen2 mt1ös mtra4s3 m4ts mt2sa mt2s1e mt3s2ka mts1p mt1spa mtt2 mt1um mt1urt mt3z 1mu mu1a 2m3uh mu3la 2muls 3mun mun2d1a 4m3unf 4m3ungeb mu3ni m4unk munt2 4munz mu3ra mu4r1u2f mu4s1a 3musi mu2s1o mu2sp mu3s4se. mu3s4ses mu2s1to mu2str mu2su muße3 mut1au muts3 mut2st 1mü 2müb mül4len 3mün mü3ssi 3müt 2m1v mvoll1 2m1w2 mwa2 mwa4r 1my 2m1z mzug4 1na 3na. 2n1ab na2bä 4nabg 4nabh na2bl na2br 4n3abs 4nabt 3n2a2c nach1 na3chen nach3s nacht6ra 4nadd n2ade 4na2dr n1af na1f4r 3n2ag na2gem 3n2ah na2h1a n3ahn 3nai nai2e n1aig 2n1ak na2ka 3nako n2al. na2l1a2 na2lä 3n2ald n4ale na4lent na2let nal3l2a nalmo2 na2lop nal2ph n2als. nal3t4 na2lu 2naly 3name n2amen 4n1a2mer na3m4n 3namo nam2sp 2n1amt namt4s n1an. 4n1a2na 4nanb n1and2 4n1ang 2nanh 2nani 4nank 2nanl 3nann na3no n1anp 2nanr 2n1ans 2nantr 2nanw nap2si n1ar 5nar. na2r1a 2narc n2ard 4narg 3nari n2ark n2arle 2narm n2arp 4n3art na3r2u 3nas n2as. na4schw 4nasp 4n1a2sy nasyl2 3naß 3nat n4ata na3t4h 4natm nats1 nat4sa nat4sc 4natt n1au 4nauf nauf4fr n3aug 5naui 3n2aul 4nausb 4nausg n2auso 4nauss 4nausw nau3te navi5er. navi5ers 1nä 2näb 3n2äc 3näe n1ähn 2n1ä2m 2n1än 2näp nä4sc n2ä4s3s 2näu 3nä1um 2n3b4 nbe2in nbe3n nbe3r2e nbu3s nby2 2n1c n3ce2n3 nch3m 2n1d nd2ag n2d1ak n2danl nd1ann n2d1anz ndat2 n2d1au nd1c nde4al. n2dei nde4län n4d3ents nde4rob nde2s ndes1e ndi2a3 n2dob ndo2be nd1op nd1or n2dö n2d3rat n2d3re n2drob nd3rol nd3ros n2drö n2drui n4d3run nd2sor nd2spr nd4stab nds3tau nd3th ndt4r n2dü4 ndy3 1ne 3ne. ne2ap ne3a4s ne3at ne2bl 2n1ebn 2nec 3neca 3ned ne2de 2nee3 ne2e2i4 ne3ein n1ef neg4 2ne2he. 2nehen2 3nehm 4n1ehr 2n1ei n2eid 4neif 3neigt 4n3eing 4n3eink ne2ke nek3t4 ne2l 3nela nel3b 2n1ele 4nelek 4nelem ne3len ne3li nel4la nel2lä 3ne3l2o 3ne3lu n2em. 2n1emb n1e2mi 2n3emp 2n1ems 3nen n2en. nen3a2 n2enb n2enc 4n1endb 4n1endd 4n1endf n1endg 4n1endh 4n1endk 4n1endp 4n1endt 4n1endw ne2n1e2b nen3ei nenen1 ne4nene 4nengb nen4ge. nen4gen 4nengs 4nengt n2enh ne2ni n2enj nen3k ne2no n2ens nens4e nen3sk 5n2en3t2a n1entb 4n1entl 4nentn 5nentr n1ents 4n3entw 4nentz ne2n3u n2env n2enw ne2ob ne1os 2nepf 2n1epo ne2pos n2er. ne1ra ne2ra2b ne3r4al ne2r3am ne2ran ne2rap ne2rau 4nerbe. 4nerben n1erbi nere2 ne2reb n1erf 4n5erfo nerfor4 2nerfü 3nergr n1erh 2n3erhö 3neri n1erk n2erli 2n1erlö nerma3 nermas4 n1ermä ner4mit n2ern. 4n1ernt ne1rös n2erp 3n2ers. 2n3ersa ner8schle n2ert. n1ertr ne2rup n2erv 2n1erz 3n2es n4es. nes2c ne2sei ne2sev ne3ska nes1o ne2sor ne2s1p 4n3essi ne2tad ne2t1ak ne2t1an ne2tap n1etat ne2tau ne2th net3ha nett4sc n1e2tu net2zi ne2u neu1c neu3g 2n1eup neur2 n2ew 2n1ex 3nez 1né 2n1f nf1ak nf2ä nfä4s nff4 n3fi nfi4le. nf4l nf5lin nflös4 nf2o nf4r nft2o nft2s3 nft4st n2f1u 4n1g n2g1ac ng1ad n2g1ak n2g1a2m n2g1and ng2anf ng1anz n2g1äl ng3d4 n3gef n2g1ein ng2en ngen2a ngens2 n3ger nge4ram n4g3erse ng6es nges2t nge4zän ng3g4 ng3hu n2g1i2d n3gläs n2glic n2glo n3g2loc n2glö ng3m n2gn ng3ne ng1or ng3rat ng3roc ngro3s ng2s ngs1c ng4s3e4h ngs3pa ng3ts n2gum 2n1h2 n3han n3har n3hau n3hä n3he nhe2r n3hu 1ni 3nia nib4l nibu2 nich8ters n1id 3n2id. ni2de ni3dr n4ie nie3b ni1el nie3l2a nie4n ni3ene ni1ero nig2a 2n3i2gel nig3r ni2gre 3nik ni2kal ni2kar ni3ker ni4k3ing ni3kl nikma3 ni2kr 3n2il nim2o 4n1imp nin1 3n2in. n2in4a 4n3ind 2ninf 3n2ing4 4n1inh ni2nor 2n1ins n2ins. 4ninse 4n1int 2n1inv ni2ob ni3ok ni3ol n2ip ni3ra 3n2is ni4schw ni2s1e ni3se. ni2s1p ni3spi nis5s2 ni2stu ni3stun ni2s1u 2nit ni1th ni2ti ni3t4r nit2s ni3tsc nit4tec nitt4sa ni3tu ni3v 3nix n1j 2n1k n2k3ad n2k1ak n3k2al n4k3alg nk2am n2kans n2k3au4s n2käh n2k1är n4k3erfa nk4erg nk1inh n2k1ins nk3len nk3les n2klie nk2lo nk2lu nk3lun nk4na n2kne n2k1ort nk2öf n2köl n2k3ro nk2sal nks2ei nk3s2z nk2tak nk2tan nkt1it nk4top nk2tru 2n3l2 2n1m4 nmen2s 4n1n nna2be n2nada n4n1all n2n1an n2nau n3nä nn3d nnen3g n4nents nn2erh nn2erk nne2rö4 n4n3er4wa nner2z nne2s1e nn2ex nn3f nng4 n3ni n2nof nn1o2r nn3se nn3s2p nn2th n2n1uf n2n1unf nn1ur 1no 3no. 3nobl no2bla n2o3ble 2n1obs no1c 2no2d no3dr n1of 2n3o2fe n3ole no2leu n2on. 3n2opa 3nor. nor2a no2rad no1rak no3ral 2norc nor2d5r 3norh 3norm 3nors n1ort 3n2os. no3sh no2s3p no4ss n2oste nost1r 2nostv no3tab no2tä no4t3ei no2tel no3t3h no4tha no2t3in no2t1op no2tr 3nov 3now 2n1o2x 3noz 2nöd 2nö2f 4n1ö4l nö4s3s 2n3p4 npa2g npsy3 2n1q 4n3r2 nra4s3s nräu3s nre3sz nrö2s1 6n1s n2s1a2d n2sall n2sang n2sant n2saus n3sav n2s1än nsä4s n2s1äus ns2ca n6schef n4schro nsch7werd ns1eb ns1e2d nseh5ere n3senk nsen4sp ns1ent n2s1ep ns1erf n4serfo ns1erg n2serh n3seri n2s1erk n2s1erö ns1ers n2s1erw n2s1erz nse2t n2s1eta n3sex nsfi4l n2simp n2s1ini nsi2te nsi2tr ns2kal n2s1op n4s3ort. nsp4 nspas2 n2spat n4speri n2sph ns2pi n2spo ns3pon n2sprä n4s3prie n4spro ns3s2 ns2t1ak n4stat. n4s3tate ns2tau n5s4te. n4st3eif n5s4tel ns4tem. ns4ten. n4stent ns2ter ns4ter. nst4erö ns4tes. n5steu nst5opfe ns2tor n4strac ns2tum nst2ü nstü1b n2sty ns2um n2s1un ns2ung ns2unr n4s3zi 2n1t nt3abs n3t2a3c n3t2al nta3m n4tanza nt2arb nt1ark nt2arm nt4at n2tauf nt1äm n2t1äu n3te. nte3au nte2b nt1ebe nte1e nte3g6 nt1eh n2teig n3t2en nt4ene nten6te. n3ter nt4ern nt4ers nt4ert n2t1ess n3tet nteu3 nte3v nt2her n2t3ho n3thr n3t4hu nti3k4l n2tinf n2t1inh ntini1 nt2ins n3tit nt4lem ntmen2 ntmo2 n3to nto3me nton2s1 ntras3s nt3rec nt3reif n3trep nt4rig n3trop n2t3rü n4t1s nt3sa nt4sau nts2o nts2p nt4s3par nts2t nt4sto 3n4tu. ntum2 ntu2ra ntu4re. ntu4res nt3z2 1nu. 1nu1a nu3ar nubi1 1nuc 1nud 3nue nu2es nuf2 nu2fe 1nug 2n1uh 1nui nu3k4 n2um. 2n3umb 2numf 2numg 3numm 2numr 2n1ums 2n3umz nu2n 2nuna 1n2ung 3nung. n3ungl 2n1uni 2nunt 1nuo 2nup 2nur 3nu2s nu3sc nu3se nu3sl nu4s1t 1nu2ß 1nut nu2t1a nu3te nu2t3r 1nuu 1nux 1nuz 3nü. 2nü4b nür1c 3nüs 1nüt 2n1v2 n3ver 4n1w nwei4st 2nx 1ny. 1nyh 2nymu n1yo 1nyr 1nys 1nyw 2n1z n2zad n2z1a4g n2zan n2z1au n2z1än n2zär nz1ec n4zense n4zentw n4zentz nz3erwe nzi2ga n2zinh nz1ini n2zor nz2öl nzug2s n2zurk n2z1wa n2z1wä n2zwö n2z1wu ño1 2o3a2 o4abi o4ac oa3che oa3chi o4ad oa3de oa4g o4ah o4a3i oa3ke oak1l o4a3la o4a3mi o2ar o2a3s 3oase oa4si o4at oa3te o5au o1b ob2al 2oban o3bar 2o3b2ä 2obb ob2e 2obe. 2obea ob3ein 2o3b4en oben3d4 oben3se ober3in4 obe4ris 2obew 2o3b2i obi2t ob3ite 1obj ob1l o2b3li 2o3blo 2o3bo o2b3re o3bri obs2 ob3sh ob3sk ob2sta ob3sz 2o3bu obus3s 2o3bü 2oby2 2oc oc1c o1ce och1a ocha2b o1che oche4b o2ch1ec och1ei ocher4k och3l och3m och1o och3ö2 och3r ocht2 och3te o1chu ochu2f och1w o1ci ock2er ock3sz o1cl o3co o1ç o1d o3d2a od2dr o3deb ode2c o3d2e1i odein3 ode2n1 odene2 o3dex 2o3dia o3dir o3div o2don odo4s 2odr o2dre odt4 2odu 2o1e oe2b o2ec oe2d oe2h oe2l oe2n1 o4es o2et o3et. o3ets oe2x o1ë 2ofa of1ac of1am of1au o2f1ei of2en o3fer of2f1a of2f1in of2fir 1offiz of2f5l of2fo of2f3r offs2 of2fu 2ofi of3l of1la of4lä of4lö 2ofo 2o1f1r of3ra of3rä of4rü ofs1a of4sam of2spe of2spr of2s1u 2oft of2tei of3th 2o1g o2g1ab oga3d og1ala og1ang o2g1ei oge2l1i ogenmas6 o3gh ogi2er og2lo o3g4n ogoi3 og2s2 og3sc og3si og3sp o1ha o1hä o1he o2h1eis o2h1ert o2h1erz o1hi ohl1a oh3lec ohl1ei oh3lem oh3len oh3lep oh4lerg oh4l3erh oh4lerw oh3lo ohls2e oh2lu 3ohng oh2ni 1ohnm oh2n1o o1ho oho2la oh1o2p o2h3ö ohr1a oh4rin oh1ro oh3t o1hu oh1w 2o1hy 2oi o1i2d o3ie o1im oimmu4 o1in oi2r o2isc o3isch. oi3se o1ism oiss2 oi4st 2o1j 2o1k oka2la okale4 3o2kel oki2o ok1lä ok4n 4okr ok2s1p okt4 2ol o1la o2lab o2l1ak ol2ar ol1auf o1lä ol4dam ol4dr ole3e ol1eie ol1eis oler2 ol1ex o1lé ol2fa ol2fl olf1r ol2fra ol2gl ol2gr ol2i oli3k4 oli3tu ol2kl olk3r ol2kre ol2la2d ol2lak oll3ans ol2las ol2lau ollä2 ol2läd ol4l1ec ol2l1ei ol2l1el oll5ends ol4lerk oll5erwe o3lo ol2of olo1p2 ol1ort ols2t ol2str o1lu 3oly 1olym ol2z1a ol4z3ern ol2zin ol2zw 2om o2mab oma4ner om2anw om1art o2m1au o2meb ome3c o2m1ei o3m2eis o2mel o3men. o2mep o2meru om1erz om2es omiet1 o2m1ind om1ing o2m1int om3ma om1org om3pf oms2 omtu3 o4munt omy1 2ona ona2b o2nae o3nal on1ap o2narb on2au on3aus 2onä onbe3 2onc onderer5 2one one2i one2n1 onen3g on1erb o2n1erd on1erg on1erö o3nett on3f2 on3g2l ong4r ong3s 4o3ni on2i3d o4nikr o4n1im on3ing on3k2 onli4n onlo2c on3n2an on3n2e ono1 o3nod o2noke on1orc ono3s ons1a onsa4g on2seb onse2l on4sh onsi2d ons3l ons1p onst2h on3t2a on4t3end ont3erw ont2h on2t3ri o1nu 2onuk on3v 1ony on3z o1ñ oo2k3l o1op o1or oor3f oo4sk oo2tr 2o1ö2 o1pa opab4 o2p3ad op3akt o3pan o1pec o1pei o1pe4n 2opf. op2f3a op3fah op4ferd opf5erde opf1l opf3la op1flü 4oph2 o3phe o1pi opi5a4 opi3er. opi5ers. opin2 op5lag o2p3le op3li 2o3po op4pl op2pr 2o1pr 1opsi op3sz 1op3t4 o1q 2or. or1a or3a2b 2orak 2oral o2r3alm or4alt 3or2am or2and o2ranh or3arb o1ras or3att o3rä or1änd or1ät or2bar orb2l or1c 2orca or2ce 2orda or2d1am or2dau or4d3eng or2deu or2d1ir or2dit 1ordn or2do 2ordr 2ords or2dum 2ordw 4ore ore4as o2r1eck o2r1ef ore2h or1eig o2rein or1er o2rerf or1eth 2orf or2fle or3g4a 2orget or3g2h 2orgia orgi1e or2gl or3gle or2gn 2orh 2o3ric 4orie. o4rient o3rier 4oril 4orin1 2orit ork2a or2k3ar ork3s 2orm or4mans or4ment or5ne. or3n2o 2o1ro oro3n2a 2o1rö 2orq 2orr orr4a or3rh 2ors2 or3s4a orsch5li or3sh or3si or3sz or2t1ak or2t1an or2t1au or2tär or2tef ort3eig or4t3ent or4t3ere ort3erf or2t3ev or2the ort3ins or4t3off or2tor or2tö or4trau or4t3räu ort3re ort3ric or2t1um o3ru or2uf o4r3un orus3 o2r3ü o2rya o1s 2o3s2a os3ad os4an o3sche 2o3se ose3e o2s1ei ose2n o4sents 2osh o3s2hi 2osi o3sk o4s3ka o4ski 2os2kl 2os2ko os2lo 2oso 2os1p os2pe os3pec o3s2po os2pr os2sa oss3and os4sä o6ssel o3ssem oss3en4k o3ssent oss3enz os2s3o os4son os2s3p os4s3t os2su os2t o2st1a2b o3stal. o4st1am ost3ang osta4s ost1au o4sterd oster3e ost5er6we ost3h o2stin ost1ob o4s3ton. ost3ran o2st3rä ost3re ost3rot ost3uf 2osu4 2o3sy o3s2ze o2ß1el o2ß1en2k o2ß1enz o2ß1ere o2ß1erf oß3t 2o1t ota2go o3tark o2t1au ot3aug o3t2ax o2teb o3t2e1i otei4n ote2l1a ote4lei ot4em3 otemp2 o2t1erw 4ot2h ot4he ot5hel o4t3hi ot3hos o2thr o2til o2t1i2m ot2in o4tl otli2 ot4ol ot1opf ot2or oto2ra o3tra o2t3re ot3rin ot2sa ots1p ot2spa ots2t ots3tri ot2tau ot4terk ot2th ot2t3r o2u oub4 ou2ce ou1f4l oug2 ou2ge ou3gl o3uh ou4le. o3um o3unds oung5 oun4ge. oungs2 o4up 2our ouri2e our4ne. ou3s2i ous2t outu4 2ouv 2o1ü o1v 2ovi oviso3 2ovo 2o1w o3wec owe2r1 o3wi o1x ox2a ox2e 1o2xid ox3l o2xu 1oxy o1yo 2o1z o3z2a oz2e ozen4ta o3zi ozon1 órd2 ö1b öbe2la öbe4li öb2l ö2ble ö2b3r 2öc ö1ch öch1l ö2chr öchs2t öch4str öcht4 ö1d ödi3 ö1e 1öf öf2fl öf3l ögen4s1 ög3l ög3r ög2s ö1he öh3l2e öh3ri öh2s ö1hu ö3ig. ö1ke ö2ko ök3r ök2s 3öl. öl1a2 öl1ei öl1em öl2f1ei öl1im öl1in öl2k3l öl3la öllma4 öllmas3 öl2nar öl1o2 öls2 öl3sa öl3sz ö2l1u öl2ung ölz2w öm2s 2ön ön2e ö3ni önizi1 önn2e ö1nu öo1 ö1pe öpf3l ör3a2 ör1c ör2dr ö2r3ec ö2r1ei ö2r1e2l ör2erg ö2rerl ö3r2erz ör2f3l ör2gl ö2r1im ör2kl örner2 ör1o2 örs2e ör3s2k ört2e ör2tr öru4 ö2r1une ö1s ö2sa ö2scha ö4sch3ei ö2schl ö2sch3m ö2schw ö2s1ei ös4en ös4es ö2sp ö3s2s ös4s1c ös3ses ös3set ös4st ös4t ö2sta ös4u ö1ß 2ö1t ö2t3a öte4n3 öt2h öt2sc öt2tr ö1v ö1w ö1z öze3 özes4 p2a 1pa. 1paa 1pac pa3da pa2dr pa1f4r pag4 pa3gh pa1ho 1pak pa1k4l pak2to 3pala pala3t 1palä pa3li 2palt pa2nar pan3d pan4ds pa3nei pa2neu pan3k4 2panl 2pann 1pa2no pan3sl pant2 panz4 1pap papi2 papieren8 papie8r7end 3para pa2r3af par3akt 1parc pa5reg pa5rek 2par2er 2parg pargel6d 1park. par4kam par4kau par2kl par2kr 1paro 2parp 1partn 1party par3z2 pa1s2p pa2ßu 1pat pat4c pat4e2 pat4r 1pau p3auf pa3uni 1pä 3päc 3päd 3pär 3pä4s3 pä4t1e2h pä4t3ent pät3h pä2to pät3s 2p1b pbe1 2p3c 2p1d2 pda2 p2e 1pe. pe2a pea4r 1ped pe2en pef4 pei1 2peic pe1im pekt4s 2peku 3pel pe2l1a4 pel3d pe2let pe2lex pe3li4n pe4l3ink pel3l4e pel3li 1pem pena4 pe3n2al pen3da pe4nen 1penn pe2n1o pens2 3pensi 1pensu pen3z2 1pep pe1ra per2an 1perl per4na 3pero per2ra perr3an per4rä2 per6rieg 1pers 2perse 2persi 3perso perwa4 pe3sa pes3s2 3pet 1pé 4pf. p2f1ab p2fad p2faf pf1ai p2f1ak pf1ans p2fa4r pf3are p2f1au 4p3fe. p2fei pf1eim pf1ein p3fen. p2fent p3fer. pf2erw p3f2es pff4 pffa3 p2f1ins p2f3lä pf3lei pf3lie pf3lo pf3lu p2for pf3r pf1ra pfs2 pf3sl pf3sz pf3t 2p1g pgra2 1ph 4ph. ph2a 2phä 2phb 4phd 2p1hei phen3d2 phen3s 2ph1ers 2phf 4phg phi2ka 4phk ph2l 2phm 2phn p3hop 2phö ph4r 2phs ph3t2 2phthe phu4s 2p1hü 2phz pi2a3 pi3as. pi3chl p4id piegelei8en pi2el piela2 1pier 3pik 1pil pi3le pil4zer pin2e pingen4 ping3s 3pinse pi2o pi3oi pi3onu 3pip pi2pe pi4pel pi3ri 3pirin 3pis 4piso pis2t pi3t2a pit2s 2pitz pi2z1in p1j 2p1k2 pku2 pkur1 1p2l4 4pl. 3p4la p5lad plan3g 3plä 2ple. ple1c p4leg ple5n4 2p3ler 2plig p4lik p4liz p4lo 2p3lu 2p1m2 pma1 2p1n 1p2o po3b4 po1c 3pod 2p3oh po2i po3id 3poin 3pok 3p4ol po2lau po3li po4lor 2pond 2ponn po1o2b po2p3ak po2p3ar po1pe po2pl po3pt po1ral po1rau 2porn por4tin por4tre por4tri pos2e pos4t po2sta post3ag po4stä po4st3ei post3ra po3ta 3pote po2t1u po2w po3x pö2bl pö2c 2p1p p2p3a2b pp3anl ppa2p ppe4ler ppe2n1 p2p1f4 p2p1h p3p2ho pp3l pp5lan pp1lä p2ple p2p3ra p2p3re p2pri pp3sa ppt2 p2r2 1prak pra4s3 1prax p4rä 1präd 1präg 3präm 3präs 2pre. 2prec 1pred pre2e1 1prei 3preis prei4ss 2p3rer 3p4res 1preß pri4e 2prig 1prinz 1p4ro1 3prob 2proc 3prod 3prog 3proj 2pros3s 3prot 1prüf 2prün 2p1s 4ps. ps4an p3se p3s2h ps1id p2sö ps2po ps2te p2st3r p2stu 3p2sy ps2ze 2p1t pt1a pt2ab pt3alb pt3at p3te p4t3ec p4t1ei pte4l p4tele p4t1ent pt3erei p4t1erw p4t1erz p2th pt1in1 pto3me p4tos pto2w ptpo4 p2t3r pt1s2 ptt2 pt1um p3tung pt1urs p2tü4 3p2ty pt3z 1pu pu1a pub4 2puc pu2dr 2p1uh pul2sp 2pund pun2s 2punt 2pur 3put put2s 1püf 2pül pün2 2p1v 2p1w pwa4r 3py1 py3t 2p1z qu4 1queu qui3s 1ra. ra2ab 2r3aac r3aal ra3ar r1ab ra2bar rab2bl 2rabd 2rabf 2rabg 1r4abi ra2br 2rabs 2rabt 2r3abw 1raby ra1ce 2r1acet ra4cheb ra4chin racht3r rach6trä ra2chu r2ack 1r2ad r4ad. ra2dam 2radap 3radf 3radl r3a2d3r rad3t 1rae r2af raf3ar ra2fer ra3ge ra3gle ra2gn 3r2ahm 4raht r2ai 2raic rail4l 2r3air 1rake 3ra1k4l ra2kre ra2kro 2rakti 3rakü r2al r4al. ra2la4 ral3ab r3alar ral3b 3r4ald ra3le 2ralg r4ali rali5er. rali5ers ralk2 ral3la rall2e 2rallg 2r3alm. r3alp. 2ralpe r4als r3al3t r4alt2h ra2lu 3raly r2ame ra2mer 1r2ami r2amm ram4man ram6m5ers ram4m3u 2r1amt ramt4s r2an. 4ranc r4anda r4ande ran4dep ran4d3er 4r3anei r4aner 2ranf 1rangi rani1e ran2kr 2ranl 2r1anm 2r1anp 2ranr r2ans. r2ansp ran4spa 2rantr 2r3anw r2ap 2rapf r1ar r2ara 2rarb 3rarei rar3f4 ra2r1in r2ark r2arp 2r3arz r2a3s2 r4as. ras4a ra4schl ra5se ra5si ra4sk 2r3asph ra4ssi 2raß 1rat r4at. ra2t1a ra3ta. ra3te r3atl rat4r rat2st 2r3atta 4rau. 3raub. 4raud rau3e2n 2rauf 2raug 3raum rau4m3ag rau4man rau2mi 3rausc 2rausg rau2sp 2rauss rau4sti raus3tr 4raut raut5s 1raü r2ax raxi4s1 räch4s 3r2äd 4räf 4räg 2räh 2räm 3rän. 3räni 3räns 2r1är r2är. rä3ra rä4sa rä4s5c rä5sse rä2st 3rätse rä2u räu2s räu5sche 4räut 2r1b r2b1ab r2b1a2de r2bak rbal3a rba3re rb1art rb1auf rbb2 rb1ech r4belä rb1ent rbe3r2e r3b2la rbla2d r8blasser r4b3last r3blä r2ble. rb3ler rb2lin rb2lö rbmas3 rb2o rb4ri rb2s rb3se rb4sei rb3ska rbs1o rb3sp rb4stä rb3str rb2u 2rc r1ce r1che. r1chen r1chi rch3l rch3m rch3r rchs2 rch3sp rchst4r rch3ta rch6terw rch1w r1ci r1cl r1ç 2r1d r3d2ac r2daf r2d1ak r2d1al rdani1 rd1ant rd1anz r4dap r2dei rd2ei. r2d1elb r3den rden3d2 rden4gl rde3re rder4er rderin6s r4d3ernt rde3sp rdi3a2 rdia4l r2d1inn rd1it rdo2be r3don rd1os rdo4st r2dö rd3rat rd4ri rdrü4 rdt4 rd3ta rd3th rdwa4 1re 3re. re3aler re2am re3as re3at. re3ats 2reä re2b1a re2b1l reb1r reb3ra re2bü r2ech rech3ar 4rechs 2reck. 2recki 3red. 4redd 2redit re1el re1er 3refe 4reff 3refl 3refo 3reg 5reg. rege4l3ä re2hac re4h3ent re2h1i rehl4 re2h1o r2ei. rei4bl r2eie 2reig 3reigew rei3l2a rei3l2i reim2p r1ein rei3nec 4reing r3eink 4reinr rein8s7tre re1in2v reister6 reis5tro 3rek 4re2ke re3la 2r1elb rel2e relea4 re3lei 2re2lek 2r1elf re3lo 2r1elt relu2 r4em. r2emi 4rempf 4remu r4en. r2ena rena2b re3nal re2nä 3rendi ren3dr re4n3end ren4gl 2rengp re2ni ren4nes r1ense 2r1entl 2r1ents 2rentw 4r3entz r2enz re3or 3repe 3repo 4repp 3r4er. 2r1erb r2erbr 2r1erd r2erer r1erf r1erg r4ergen r1erk 4r3erken r2erki r1erl 4r3erlau 2rerlö 2r1erm rer2n 2r1ernä 4r3erns 4r3ernt r2e1ro re2rob r1erö 3r2ers. 2r1ersa r2erse 2rersp r1ert r2erte 2rertr 2r1erz rer5ze r2erzy 3r4es. re2sa res3an 3rese 3reso 2ress ress2e res6s5erw 3rest res3tem re2stu 3resu 2re2ß1 re2thy re2u reu3g2 2reul re3uni 2r1eur 2reü 2r3evid r1ew rewa4r re2wi 2r3e2x1 3rez 4rezi 1ré 2r1f rfall4s rf1ält rfä4s3 rf2äu r2fent rf2es rfi4le. rf3lic rf3lin rf4lö r3flü r3for rf4ru rf4rü rf2sa rf2s1ä rf2s1id rf2s3pr rf2ta rf3t4r rf2u 4r1g r2g1a2d r2g1ah r2g1ak rg2an rga5ssen rgas2t rga4str rge4an rge2bl rge4l3er rgen4z3w rge4ral rge4tap r2geto rgi4sel r2glan rgleich8s7 r2gleu r2glig r2gno r2g1ob r2g3ral r2greg r2gres r2gret rg3rin rgro5sse rg1sp rgs2tr r1h4 2rh. 2rha r2ha. 2rhä 3r4he. 3r4hen r3her r2hoe rho2i3 2rhol 2rhö 2rhs 1ri ri3am ri3at rib2bl ri1ce ri1cha rid2 ri2dan 2ridol r2ie rieb4s3t rie2fr ri1el riene4 rie2nu ri1er. ri4ere ri3e4sti ri1eu ri2f1a ri2f1ei ri2fer ri2f1o ri2fr rif4ter 3rig 5rig. ri4gene 5rigj rig1l 4rigr rik1l ri4kla r2imb 2rimp rim2s r2i3na 2r1ind rin4dex rin4diz ri3n4e rine1i 2r1inf rin2fo ring3l rin2gr 2r1inh 2rinit 2rink 3rinn 6r5innenm 4r3inner 4rinnta r1innu 2r1ins 3r4ins. rin2so rin2sp r4inspi 2rint rin4teg rin4t5r 2r1inv 4r1ir r2is ris4a ri4scho ri4schw 3risik rismu2 ri3so ri4s1p 3riss ris3si rist5ers ristes4 ri6stess ri2ß1 r2it r3i2tal ri3t2i rit4r rit2tr 5ritu rix1 1rí 2r1j 2r1k rk2am r2käh r3klau r2klis rk4lo rk2lu rk4n r2k5nu rk3räu r2k3rea r3kri rk3rin rk2s1e rk3sen rk2sp rkstati6 rk4stec rk4s1ti rk2ta rk4t3eng rk4t3erf rkt3ers rk6tersc rk4t3erw rk2tin rk2t1o2 rk2t3r rk3tra rk1uh rk2um rku2n rk1uni rkus3s rku4s1t 4r1l r3l2a rl2e rle2a r3lec rle2i rle2st r3let r3l2i r3l2o rl2ö rlös5s rl2s1p rl2s1to rl3t rlu4str 4r1m r3m2ag rma2la r2m1ald rm1anz rm1a2p r2maph rma5ssen rmas8sens rm2är rm3d2 r3me. r2m1ef r2meo rm2es r2mide r2m1im r2m1o2ri rm3sa rms2t rm3sta rmt2a rm2u rm3ums 4rn rna2b rna4n rn2and rn3ani r2n1anz rn2a2r rn3are rn3ari r2nau r3näp rn3d4r r3ne rn3e4ben r4nef rn2ei rn3eif r4n3eis rne2n r4n1ene rn3ense r4nerf r4n1erg rn4erhi r4nerk r4n1ert r5nes rn2et r4nex rn3f rng2 r3ni r4n1in r3nod r2n1op r2n1or rn1ö r1nöt rn3s2ä rn3s2p rn3s2z rn3t2e r1nu rn1ur r1nü r1ny ro2bei 2robj 1robo 2robs ro1ch 3rock. r2o3de ro3e4 roh1l roh3na 3r2ohr 3roi ro3le rol4lan rol3l4en 2roly 4rom. ro2mad ro2mer 4romm 4romt r2on ro4nerb 3ronn rons2 ron4tan 4ro1ny ro1pe 2ro2pf ro3ph r1or r2ora ror3al ro2rat ro2rei ro2r1o ror3th ro3sh ro3s2i ro5smo ros6san ross1c ros4t ro3sta ro2st1r ro2ßu ro4tag rote3i ro2tho ro2tri rot1s2 ro3t2u ro3unt 3rout rö2b3l rö2du 2röf 4rög 1r2öh r1ök 1r2öl 3römi 4röp r1ör r2ös. r2öse 2r1p2 r3p4a r3p4e rpe2re rpe4r3in rpf4 r2pli r3po rpo4str rps1t rp3t r3pu r1q 2r1r rr2ab rr2ar rra4s3s rr1äm rrb2 rr1c r3r2e rre4ale rrer4s r4rew rr2he rrik2 rr2n3a rr2o r2r3ob rro3m2 rr2th r3ru r3r2ü rrü1b 4r1s rs3ab r2s1a2d r4samp r4s1amt rs2an r2s3ang rs3anp rs3ar r3sch2e r6scherl rsch2l rs1ebe r2sein rse2n1 rs2end rse4ne rs1ere rs1erö rs1ers rs1erz rse2t rs1eta r3sho rs2kal rs2kan rs2kie rs2kis rs2kl r4sko r4skr r4sku rs3l rs4no r2s1op r4s3ort. rs2p4 rspa3s2 r2s3ph r4s3s2 r5stad r4stant rs2tau r6st5eing r6sterbt r4st3er4w rs2th rst3ing r2stip r2s1tot rs2tr rst3ran r6strang rs2tu rsü3s r3swi 4r1t rt1abs r2t1alm rtals1 rt1am rt1ang rt1ann rt1ant rt1anz r2t1ar rt3a4re r2t3att rt1är r3te. rte1e2 rt4eif rtei3la r2telf rte2n1 r3ten. rt3erei r4terfa r4terfo r4t3erh r2t1erk r4t3er4la r4t3erle r4t3ernä rter4re rt1ers r3tes2 rte3sk r2thi rt3hol rt2hum r2t1id r2t1ima r2tinf rto1p rt1or rto2ri r2t3rak rtra4s3 rt3rec r4treis rt3ros r4ts rt4s1eh rt1spe rtt4 r2t1urt rt3z 1ru ru1a ru3a2r3 rube2 rude2a ru2dr 3ruf ru2fa ruf2s1 ruf4st 4rug 2r1uhr 3ruin ru1ins ru1is 2rum 4rumf ru2mi 4ruml r2ums. 4rumz 2r1una 2rund run2d1a r2unde rund3er run6derl run6ders run6derw 2r1unf 2rungl 2r1u2ni 4r3unio run2kr 2r1unl 2r1unm 4runn 4r3unt 2runw ru3pr 4r3ur ru2ra ru2r1e 5ruro ru2si rus3sen rus2s1p rus6s3t 3rut ru2tei rut3h ru2t1o2 ru2t3r 4ruz ru2zw 1rü 2rüb rü1ben rü1ch rücks2 4rümm rü3s2s rüs3si 2r1v rve4n1e rve5s rv2s 2r1w rwun3s 4r1x 1ry ry2c 2r1z rz1ac rz2an r2zar r2zas r5zene rz1eng r4z3ents r2z1erf r2z1erg r2z1erk r2z1erw rz1id r3z2of rz1op rz2ö rz3te rz2th rz2t3ro rzug2u r3zwä r3z2wec 1sa 3sa. 3saa 2s1ab sa2be 3sabet sa2bl sa3ble sa2br 4sabs sa2cho2 sach3t 2s1ada s1adm 2s1a2dr sa2fe 2s3aff 3safi sa1f4r 3sag sa4gent sag4n 4s1a2gr 3s2ai sa3i2k1 sail2 2s1ak sa2ka 3saki 3sakr 4s3akt 3sal. sa4l3erb sa2l1id s1all 3salo sal2se 2s1alt 3s2alz 3sam s3ameri 5samm 6s1amma 4s1amn s1am3p4 sam2to s1an s2an. 2s3a2na 2s3anb s2an2c s2and s4and. san4dri 3sang. sang4s 2s3anh 3s4ani 2s3anl 2sanp 2s3ans san4sk 4santr 2s3anw s3anz 2s1ap sa2po 3sapr 2s1ar 3s4ar. 3s2ara 4s3arb 3s2ard 3sari s3arr 3s2ars 4sarti s1a2sp sas6sest 4s3a2sy 3sat sat2a 4s3ath 4s3atl 4satm sa2tr sa3ts sat4z3en s1a4u 3sau. 3sauc 3saue 2s3aufb sau2gr 3saum 3saur sauri1 2s3ausb 3s2ause 2s3av sa2vo 3säc s3ähn 3säl s1ält 2s1äm 2s1änd 2s1är sä2s3 3s2ät 1säu 2säuß 4s3b4 sba4n sbe3r2e sbus3 1sc 2sc. 2scam s2cap 4scar 2s1ce 6sch. 3schaf 2schak s2ch2al 4schanc 4schang 5schanz 4schao s2chau 3s2chä 2schb 2schc 2schd 3sche. sch3ei. 4schemp sch2en 3sches 4schess 4schex 4schf 2schg 2schh schi4e s4chim 3sching 4schiru 3schis 2schk 4schl. 4schle. 6schlein sch6lit 2schmö 2schn. 2schox 3s2chö 4schöl 4schp 2schq 4schre. 4schrin sch3rom 4schrou 6schs2 sch3sk 6sch3t scht2a scht4r s4chu 4schunt sch2up 5schü 2schv 4schwet sch4wil 2schz 2scj 6s1cl 2sco 3s2cop 3sco4r s2cr 2scs 2scu 4s3d2 sda3me sdien4e sd4r 1se se3at. seau4 2s1e2ben seb4r 2s1echo s1echt 2s1eck 3see se1ec se2e1i4 see3ig seein2 se1er. se1erö 2s1eff 3seg se2gal se2gl seg4r 3s2eh se2h1a4 se3he se4h1ei se4hel se4herk se2hin seh1l seh3re seh3s seh3t se2hüb 2s1ei. 2s1eie 2s1eig s1ein 5s2ein. 2seinb sein4du sei3n2e sein4fo 2seing 2seinh 4seink 2seinl 2seinn 2seinr s4eins. 4seinsp 4seinst 2seinw 4s1eis 5s2eit 3s2ek s2el. se2l1a se3lad sela4g se3lam 3selb sel1ec 2selem se4l3erl sel3ers 2self. s3elix se2l3ö s2els sel3sz sel3tr s4e3ma 2s1emp s2en. se4nag se2nä 3sendet 4s1endl sen3gl 5s2eni 3senku se2no se4nob s2ens s2ent. 2s1entf 4s3entg s2enti 2s1ents 2sentw 2sentz se2n3u 3senva seo2r 4s1e2pos 3seq s4er. se2r3a2d se2r3al se5ref s3ereig se4r3eim se4r3enk ser2er s1erfo s2erfr s3erfü 4serfül ser3g s1ergä s2ergr s1erh 5serie s3erken s1erkl 3serl. s1ernä 2s3ernt se1rot s3eröf s2ers. 2sersa s4ert. seru2 se4r1uf se3rum se3rund 3s4erv se2sel 2sesh se3su 2se4tap se2tat s1e2th 3setz se1u2n 2s1ex se2xe 4sexp sex3t 6s3f4 sfal6l5er sflo4 4s3g2 sges2 sgro3 2s1h 4sh. sh2a 3s2ha. sha2k 4s3han 4shc s3h2e 3shi. 3shid shi4r 4shk sh3n 4shof 3shop sho4re 3show sh4r 4shs 4sht 4s3hü 1si si2ach si3ach. si2ad si3am. sia4s 2siat sib4 5si1c 2s1i2deo s2ido 3s4ie siege4s si3ene si1err si1f4 si2g1a 3sigh sig4n si3gnu si2g3r si2k1ab si2k1ä sik3erl si2ki si4k1l si2kr sik3s2 sik3t4 si2ku 3silo 2s1imm si3n4a 2s1ind 2s1inf sing1a sin3gh sin3g4l sin2gr sing3sa 2s1inh sin1i1 2s1inq 2s1ins 2s1int 2s1inv 3sio 3s2is si2sa si4schu si2s1e si2s1o si2s1p sis3s2 si2stu 3s2it si2tau sit3r si2tra si3tu 3siv siv1a sive3 si2vr 1sí 4s3j 2s1k2 4sk. 1skala 4skam 4skanz 4skas ska4te. 4skateg ska4tes 4skb skelma4 skelmas5 s2kep 3s2ki. s2kif s2kig 3s2kik 4skir 3skiz sk4l 4s3klas sk4n 4skom 4s3kor 4skow 4skö 4sks 4sk3t 3skulp skus3 2s1l2 4sl. 3slal 4slan sla2ve s2law sl3b s5le s3li 3s4lip 4sln s3lo. slo3be s3loe s3lu 4s3m2 2s3n4 4sna snab4 sni3er. sni3ers 4s5not 4snö 3so. so4a 2s1o2b so3et 3soft 3sog s1o2he 6sohng 2s1ohr 1sol so3la so4l1ei sol4ler 2so2ly 3som 3s2on son3au sone2 son5ende son3sä son2s1o so3o 2s1opf 3sor. so1ral s1orc 2s1ord so2rei 2s1orga 5s2orge 2s1o2rie so2r1o2 3sors so4ru 3so3s2 s4os. 4s1ost 1sou so3unt 3sov 4s1o2ve 3sow 2s1ox 5soz sö2f 2s1ök 2s1ö2l s1ö4s sp2 2sp. 2spaa 2spak 2spala spani7er. 2spano 4spap 2spara 1spare 2sparo 3sparu spa3sse spa3ssi 3s2paß 2spau s2paz s2pä 2spär s3pe. 4spel 4spensi spe3p4 s1peri 4sperl 2spero s2perr 2spers 2spet 1s2pez 2s3pf 2spha s4phä s3phe 1spi 3s2pi4e 4s3pier4 spi2k 2spil 3spio 4spi4p 4spis 2spl 4spla 4splä 3s2pli s3p4lu s3pn 2spod 2spog s2poi 2spok 4spol 1spon 1spor 4s3pos s2pott 4spr. s2prac s2pran 2sprax 2spräm 4spräs 3s4prec 2spred 2spres 2sprob 5spross 3spru 2sprüf 3sprün 2s3ps 2s4pt 2spun 2spup 3spur 4sput 1spü 4spy 2s1q 4s3r4 srat2s srat4sc sret3 srom2 srö2s srös1c srücker6 srü2d 2s1s 6ss. 4ssa ssa3bo ss1ack ss1aj s3sal s4s1ala s4s1alb s4s3amt s5sand s4s3ang s2sano s4sans ss2ant s4sanz s3sas ss3att 4s3s2ä 4ssb 6ssc 4ssd 4ss1ec 4ssee 4sseg s4s1ega 4sseh sse3ha 4ssei sse3inf sse3in4t 4ssek 6ssendet 4s3sendu ssenmas6 sse6r5att s2s1erö 4ss3erse s3sersu ss2es 4ssesc 3ssesh 4sset sse3ta 4ssez 4ssf 4ssg 4ssh 4ssic 4ssie s2sig s4sinf s4sinst s4sint 4ssio 4ssit 4ssk s3skala 4s4s3l 4ssm 4ssn 4sso ss1off ssoi4 s2s1op ss1ori 4ssp s3spe ss2po ssquet4 4ssr 4ss3s2 4sst sst2a s5stad ss2tar ss1te s4ste. s5stel s4sten s4stes s4stet s5steu ss2th ss2tip ss1tis ss2top ss2tur s3s2tü 4ssum ss1ums 4ssü 4ssv 4ssw 4s3sy 4ssz 1st 6st. 3staa 2stabb 2stabh s2tabi 2stabt 2stabz st2ac 3s2tad 4stada 4stadr 3staff 2stag 3stah 2stak 2stal. 2stale 3sta3li 2stalk st1alm st1alp 3stam st1ami 4stamt sta4na 3stand 4stanf 4stanl 4stann 2stanw 4stanza s2tar. s2tars 3start st1asi 3stat 3s4tau. 2stauf 2staum 5staur 2staus 3staus. 2stax 3stä 4stäg 4stält 4stämt s2tär 5stätt 4stäus 4stb 4st3c 4std 3st2e s2tean 4stechn 4stee ste2gr ste2i 5s2teig 4s3teil stei4na s2tel s3telem stel4l3ä ste4mar 4stempf 4st3ends st3engl st4ens 4st3entf 4stentw 4stepi ste6rers s2tern 6sterras s2ters 4st3ese ste4stä 4stests s2teu 4steuf 4st3ev 4stex 4stf 2stg 4sth st3ho 3sti2e 4stief. stierma5 3stif 3stim 2stinb 2stinf 2st1ins s4tio sti2r st3i2so 2stj 2stk 4s4tl 4stm stma3s2 2stn 3stoc sto3d s2tode 2st3om 2stopo 2stord 2storg 3stos 4stou 2stöch 5s2tör 2stöt 4stp 2stq 3s2traf 2strag 3strah 4strai 3s2tral 4strans 3s2tras4 3straß 4straum s2träf 2sträg s2trän 4sträne 2stre. 4strech 4stred 4stref 4streg 3st4reif 4streis st3renn 2strep 2stret 2strev 2stri. 3s4tria 2strib 4strig stri2k 4strisi 2stroc 3s2trof 3s2trok st3roll stro4ma s2tros s2trö 3struk s2trum 4st3run 2strup 4st3s4 stsas2 2st3t4 st2u 3s2tub 4stuc 3stud 2stue 3stuf 3stuh 2stuk 2stumr stum2s 2stumz stu2n 2stun. 2stunf 2st3uni 2stuns 2stunt 3stuö stu3re st3url 2s3turn 2st3urt 4stüch s2tück 2stür. 2stüre 2stürg 2stürs 2stv 2stw 2sty. 2stys 4st3z 1su. su1an 3su2b3 su4ba2 4subi 5su1c su2cha such4st 2s1u2f 2s1uh su1is su1it. sul2a sul2i sult2 su2mar su2mau 3s2ume su2m1el su6m5ents s3umfe 3summ sum1o2 su2mor s3umsa s3umst su2n sun6derh su4ne s1unf 2s1uni 4sunt 3s2up sup3p4 su2ra 2s1url s1urt su2s1 su3sa su3sh su3si sus3s 2sü4b 3süc sü2d1 süden2 3sün süs4 sü3sse sü3ssi 4s3v 2s1w s3we sweh2 4swie 4swil s3wö s3wu 1s2y syl1 sy4n3 sy5s 2s1z 4s3za 4s3zei s2zena 5s4zene 4szent s2zes s2zeß s3zet s2zis sz2o 4s3zu 4s3zw 2ß3a2 ß1ä 2ß1b2 ßbus3 2ß1c 2ß1d4 ßdie3 1ße 2ß1ec 2ß1e2g 2ß1ei ße2l1a ße2le ßen3g ße2ni ße2no ß2ers. 2ßerse ßer3t ße2s ße2t ß1ex 2ß1f 2ß3g2 ßge2bl 2ß1h2 1ßi ßi2g1a 2ß1in ß1j 2ß1k4 2ß1l2 2ß1m 2ß1n2 ß1o2 ß1ö 2ß1p2 2ß1q ßquet2 4ß3r2 ßrus3 2ß3s2 2ß1t ß2th ßts2 1ßu2 ß1uf 2ß1um ß1uni ß1ü 2ß1v 2ß1w 2ß1z 2tab. ta2b1an 2t1abb 1tabel 2taben ta4bend 2tabf 2tabg 2tabh 2tabk 1table 2t3abn ta2br 4tabs 2t3abt ta2bü 2tabw 2tabz 2t1ac 3tacu t1ada tadi3 2t1a2dr ta3d2s 1taf2e 2taff t1afg t1af4r 3t2ag ta2ga2 ta2g1ei 4t3a4gent ta3gl t3ago tag2s tag4st tah2 tah3le tahl3sk t2ai ta3i2k tai2l ta1ins tai4r ta1ir. 1tak t3a2ka ta2kro tak2ta 3taktb 3t2aktu 2takz 3t2al. ta2la ta3lag ta3lak t1alb. t1albk 1talbu tal3d 1t4ale ta4lens tal4leg tal2lö ta2l1op tal2se 2talt 2tam ta2mer ta2mi t1ampl t1amt t1a2na 2tanb t2and ta3ne 4tanf 2tang t2ank t3ankl 2tanl 2t1anme 4t1anna t1ans t2ans. 4t3ansi 2t3ansp ta2nu 2tanwa 2tanwä t2anz. t1anza 4tanzei tan6zerh t1anzu ta3or ta2pe. ta2pes 2tapf ta2pl 2tarb ta4rens ta4r3ere 3t4a3ri 2tark 2t1arm 2tart tar2ta t1arti tar2to ta2ru 2t1arz ta3sa 1tasc t1asp 1tas2t 1tat. ta2ta2b ta2tan ta2tau tat3ei ta2tem ta2t3er ta2th tat3he t3atl t4atm ta2tom 1tats ta2t1um t1auf 4taufg tau3f4li 4taufn 2taufw 1taug t1auk 3taum 1taus t1ausb tau6schr tau6schw t2ause t3ausg t1ausk 2tausl 2t3auss 4t1ausw 1tax taxi3s tä1c 2täd 3täe 1täg 2tägy 2täh 2t1ält 2täm t1ämt t1ängs 1tänz t1äp t2är. tä2ru tä4s t2ät 2tätt 1täus 2täuß 2täx 1tà 4t3b2 tbe3r2e tblock5e tblocken8 tbus3 2t1c t3cha t3che tch2i tch3l t2chu tch1w t3cl t3cr 2t3d4 tdun2 1te2a4 te3al te3an 3t4ebb 4t1e2ben 1t2ech te1cha 3techn 2teck teck2e 1tee te1em te2en3 te1erw te2es 2teff teg2 teg3re 2teh t3eifr teik4 1teil 2t1ein teinbus6 tein3ec t3einge t3einla t3eis. t3eisb tei3st tek3t2 tela4 te2l3ab te2l1ac te2l1au telb4 te3le tel1eb tele4be te4l1ec 3telef 3teleg te4l1eh te4lein 2telem tel1en te4lerd te4leu 4t3elf. te2l1in te2lit tell2e te4lost te2l1ö tel3s2k tel3ta tel3th tel3t4r te3mä te2m1ei te2min 2temo te2m1o2r 3temper 1tempo te4m1u t6en. ten3a tena2b te4na2d te4na4g te4nas te4nau te2nä t4enb ten3da 4t3endf t6endi 2t1endl t6endo 4t3endp ten3d4r te2n1e2b te2nef te3n4ei. ten3eid ten3ens 4tenerg te2net ten3g t1eng. ten4gla t4enh te2ni te4n3in t4enj t2enl t4enm ten3n t2eno tens2e 4tensem t4enta t3entb 4tentd t4ente ten4t3ri 4t3entw t3entz ten6zerh ten3zw t3e2pi t4er. tera2b te2rad te1raf ter3am te3ran. 4terbs 4terbt t3erde. te2re2b te4r3eif te2rel ter3end te4reng te4rerk terer4z 4terfol t4erfr 4terfül ter3g2 6tergrei t6ergru 2t1ergu 2tergü t4eri te3ria 4terklä 2t1erlö 1term terma3s4 ter4mer ter4n3ar 4t3erneu t4ero t3erö 3terras ter4re. 1terro t4ers. t6erscha ter4ser terst4 t4erst. t4ersti t4erstu tert4a teru2 te4r1uf ter4wäh 6terwerb ter3za 4t3erzb te2s tes1ac te3ser te3si te3so te3sp te4spr tess2 3tesse. t2es2t tes3tät te4st3ei te6ster6g te6sterk testes4 1tests t2et. te2tat 4tetl teu3eri 3teuf te1un teu2r3a4 te2vi 1tex te1xa t3e2xe 2t1e2xi 4texp 3text 2t1exz 2t1f4 tfä4s3 tfi2l 2t1g2 tger2 tgro3 t1h 4th. 2th2a 3t4ha. t2hag t3hai t2hak 3thal. 4t3hau 2t3hä 4thc 1th2e t2he. 3thea 2theb t2hec 2t3hei t4hein t2hek t2hem t4hene t4heni 3theo 2therr t2hes 3these t2heu 1thi. t2hik 2t3hil 2t3him 2thk 4th3l 4th3m 2th3n t2ho 2t3hoc t3hof 2t3hoh t4hol. t3hor 2t3hot thou2 2t3hö 2thp 1th2r2 4thrin. 4thrins 2ths 2thub 4thun 2thü 2thv t2hy ti2ad ti3a2m tib4 ti1ce ti3chr tiden2 ti4dend ti2deo t2ie 1tief. ti1el ti3e4n1 ti2er tie4rec tiermas6 1tierr tie5sse 2tieß ti1et ti1eu 1tif. ti1fr ti4gerz tihi4 ti2kam ti2kar ti2kin ti2krä tiks2 ti2lar ti2lau ti2lei ti2lel 1tilg til4le4b til4leg ti2lö tilt4 ti2lu ti2ma2g t2imi tim2m1a 4t1imp t2in. ti3na t1inb 4t1ind ti3n2e t1inf tin2g1a ting3l ting3s2 t1in1it 2t1inj tin2k1l t2ins. 4t1inse 2t1int ti1nu 4t1inv 3tio ti3or 1tip 3tip. 3tipp ti4que. ti1rh t2is ti4scha tisch3w ti2sei ti2sp 3ti3te tium2 ti2van tive3 ti2vel ti4v3erl ti2v1o ti2v3r ti2za 2t1j 2t3k4 2t3l2 tl4e 3tlem tle2r3a 4t5li tli3ni 2t1m2 tmal2 tmen4t3 tmo4des t3mu 2t3n4 t5na tnes4 to4as to5a4t 1tob 2tobj tob2l to1ch 3tocht 2tock 1tod 3tod. tode2 to2d1er tode4s1 to2d1u toi4r to3la tom1e2 to2men 2tomg 1ton to2nau to2neh 3too to2pak to2pat 1topo 2topt to1ra to2rau to4rän 2torc t1ord 1tore. to2r1el t1org t3orga tor3int to2rö 1tort t1ort. to2ru t2orw to3s2 to4sk tost4 1toten to2tho 3t4ou touil4 to3un tö2c 1töch 2töf 2t1ök tö4l 3tön t1ö4st 1töt 4t3p2 tpf4 2t1q t2r4 2tr. 1trac tra3cha t3rad. tra4dem tra4far 1trag 2trahm 3t4rai 1tram 2t3rams 3t4ran. 2trand 1trank t3rann 1trans t3rase t3rasi tra4str 2traß 1traum traus2 1trä 3träg 2träh 3träne 2träs 2träß 2träus 2träuß 4t5re. tre4ale 2treb tre2br 2trec t3rech t4reck 2t3red 3t4ree 1tref 2trefe 3treff 2trefo 2treg t4rei. 1t4reib 2treif t3reig 2t3reih t3rein t3reis t3reiz 2trek 2t3rel t4rem t4ren. 1trend t3rent 1trep 2trepe 2trepo t4repr t4rer t4res. 1t4ret tre2t3r t4reu t3rev 2trez 3t4ré 2t3rh 1trib 3trieb. 3triebs tri2er 1trin t3rind 2tring tri3ni 3trio t4rip t3riß 1triu tri2x trizi1 1troc t4roi tro2ke tro2mi 2t3roo t4rop 3tropf 2t3roß t3röc 2tröh trö4s3s 2t3röt 1trug 2truk trum2 trums1 2t3rund 1t4runk 3t4rup t3ruß tru2th t4rüb trü1be trü1bu 2t3rüc trücker6 t4rüg 3trümm try1 2ts t4s3a4b t3s2ac ts1ad t2s1ah ts1al t4s1amt4 t2san t4s3ar ts1as tsa3sse t2sau t1sä t2s1än t4schar t3sch2e t4schef tsch4li t4schro ts2cor t2s1e2b ts3eh t3seil t4seind ts1em tse2n1 t2s1eng t3sens t2s1ent t2s1er t4s3esse t3set t4seth t2s1i2d ts1ini t2s1ir t3skala ts3kr ts1o tso2r t1spal t1span ts1par ts4pare t1spas ts2ped t1spek ts2pi ts3ple ts2pon ts2por ts3s2 tst4 ts2tat ts3täti t4stea t4s1tep t4sterm t4s3terr ts1tie t2s3tis t2stit ts2to ts3toc ts3tor t4s3trad t2strä t2s1tri ts2tro t4strop t2s3trü ts1u 1tsub t1sü 4t1t tt1ab tta2be tt2ac t2t1ad tta6gess tt1ak tt2al tt2ant tt1art tta3s tt1ebe tt1eif tt1eis tte2la tte4leb tte4len ttel1o t3ter tt2erg tterma8s7s tte4s1ä tt2häu t2t3ho ttras3s t3tro tt3ru tt3rü tt2sen tts1p tt4s3tem tt4ster tt4sti ttt4 t2tuc tt2un ttü2c tu1alm tu3an 1tuc tu2chi 1tue tu2ere 2tuf tuf2e tu3fen t3u2fer 2tuh tu2is t3u2kr tul2a 1tum t2um. t2ume 2t3umf t3umg 2t3umk 2tump t3umr tum2si tum2so 2t3umt t3umz 1tun. 2t1una 2t1und 2t3unf t3unga tung6s 2tunif 2t1u2nio 1tunn 1tuns 2t3unt t1up. tu2r1a4g tu2rä tur1c tu2re. tu2rei tu2r1er tu2res tu2r1e4t turin1 1turn tu2ro tur3s tu4ru tu2sa tu4schl tu2so tu3ta 2tüb 1tüch tück2s 1tüf 1tür. tür1c 1türe 1türg 1türs 1tüten 2tütz 2t3v 4t3w twa2 twä4 twi4e 1ty 3ty. 3typ ty2pa 3tys 4t1z t2za4 tz1ag tz1al tz1ar tz1au tz1ä t3ze. t2z1ec t2z1eie t2z1eis tze4n1 tz2ene tz3ents tz1erl tz2ers t3zes tzes3t tzgel2 tz1int t2zor tz2ö tz2th tz2tin tz1wä tz1wi tz1wu 2ua u3a2b u1a2c uad4r u1ah u1al. ua2lau u1alb u3alet u1alf u3a2lo u1alr u1als u1al3t ua2lu u1am u1ans u3ar. uara2b u1ars ua3sa ua2th uat2i u3au uau2s u1ay u1äm u1än u1äu 2u1b u8becken. ub3ein u3b4i ubi3os. ub2l ub3lic u2b3lu u2bop ub1r ub3rä u2b3rit ub2san ub2s1o ub2spa ubus3 u2büb 2uc uc1c u1ce4 uces3 uch1a u1cha. uch1ä u1che u2ch1ec uch1ei ucherma8s u3ches u1chi uch1il uch1in uch3l uch3m uchma6ss uch3n u2ch3r uch2so uch4spr uchst4 uch4tor uch2t3r u1chu u2chum uch3ü uch1w u1ci uck2er u1cl 2u1d u3d2a uder2e udert4 udi3en uditi4 u2don ud3ra u3dru 2u1e u2ed ue2en u2eg u2ela ue2le ueli4 ue2mi uen1 ue2nä ue2ner uenge4 ue2ni ue2no uen2sa uen2zu u2ep ue2r3a ue2r1ä ue3reig u3eremp u3erent ue4rerg ue4rerk uer3g2 u4erinn u3erin4t uerma6s uer2ne uer4ner uer3o u3err uer3sc uer3t2 u3erum u3erunf u3erunt ue4s ue5se ue5sp ue2ta ue4tek uf1ab u3fah uf1ak uf3ar uf1au u2f1ä4s u2f1ä2ß u2f1ei u2f1em u3fen. u2fent u2f1erh u4ferle uf2ern u2f1eß 2uff uff4l uf2fro uf3l u2fob ufo2r uf1ori uf3r uf3sä uf2spo uf4s3tem uf4ster uf2t1eb uf3ten uft3s2 u2fum 2u1g u4gabte ug1af ug1ak ugang4 u2g1ap uga4s ug1au ug3d2 u2g1ei ugenma3 ugenmas6 u2g1erf u2g1erl ug3hu u2g1l ug3lad ug3lo u3g2lö u4glu u2g3n ugo3 ug1or u2gö u4g3reis ug3ro ugro3s ug3rüs ug3sc ug3se ug3si ugsma3 ugsmas4 ug1spa ug5stä u2gü u1h 2uh. uh1la uh1lä uh2li uhme4 uhr1a uh2rer uh3ri uh4rin uhrt4 uh2ru uh4rü uh1un uh1w 2ui ui2c u1ie ui1em u3ig u4ige u1im u1in. u1is. u3isch. u3ischs uisi4n ui2st u1j uk2a u3käu u1ke u1ki u1k2l ukle1i u1k4n u3ko uk2ö u1k4r uk2ta uk2t1in uk2t3r u1ku uku2s u1l ul1am ul1äm ulb4 ul2dr uld2se u2l1el ule4n ul1erf ul1erh ul1erw ule2sa ule2t ul1eta u2lex ulf4 ulg4 uli2k ul1ins ul3ka ul2kn ull2a ul3len ul2les ulli2n ul2lö2 ulm3ein ulo2i ul1or ul2p1h ul2sa ul4sam uls2t uls3z 2ulta ul3th ul2tri ult3s u2lü ul2vr ulz2w u2m3a2k um1all um2an um3anz u2m1art u2m1aus u2maut 1um3d2 um2en ument4s umer2a u2m1erg u2m1erl u2m1erw 1umf 1umg um1ins um1ir 1umk 1um3l 4umm umm2a umpf4li um2p3le 1umr 3umsat um2sau um2ser um2sim um2s1pe um4stem um2su um3t2 um2un u2m1ur 1umz un1 2un. 2una. 1unab un3ac un4al u3n2am u2n3an 2un2as un3at 1unda un4dab 1undd un4dei un4d3erf und5erha 1undf 2undg un2did 1undn un2dor un2d3r 4unds. und3sp un2d1um 1undv 1undz u3ne une2b une2h un2ei. un3ein unen2t un4es2 1unget 1ungew 1unglü un2gr ung3ra ung3ri ung4s1 un2id un3ide 1u2nif unik4 un2im uni2r 2unis un3isl u3n2it 3u2niv 2unk un2k1a2 un2kei unks2 unk4tit unk2t3r 3unku un2n3a2d un3n2e uno4r un2os 1unr uns2 2uns. un3se 1unsi un3sk un3sp unst1r 1unt un3ta unte4ri un3tr unt3s 2untu u1nü unvol2 unvoll3 1unw 2unz 2uo u1o2b u3of u1op u1or u3or. u3or3c u3ors u1os. uote2 u1pa u1pe2 uper1 up2fa upf2e upf1i u1pfl u3pi up2pl up2pr u1pr upt3a2 upt3erg upt1o u1q 2ur. u1ra u2rab u3raba ura2be u2r3a2m u2r1ana ur2anb u2r1ang ur2anh u2r1an5s u2rar ur3a4ren u2r3att u2r1au 2u1rä ur1än ur3b2a urch1 ur3d2i ur1eff u2rele ure4n u4r1ep ur1erh ur1erw 2urf urf3t ur2gri urgs2 uri2c ur1im ur1ini ur3ins ur1int 1urlau 4u1ro u3rol u1rö ur3p 2urr ur2san ur2sau ur2ser urst4r ur4sw urs2ze urt2 u3ru ur2za ur2zä ur2zi ur2zo ur2z1w 2us u4saf us4ann us5art u1sä u6schent usch5wer u2s1ec u2s1ei u3seid u3sep use1ra u2serp u2s1ese usi3er. usi5ers. us3kl u4sko usmas2 usma5sse u1so us3oc u3soh u3sol u2s1op us1ou u1sö u1sp us3part u2s1pas u2spat us1pe u3s2pek us1pic u3s2piz u2spo us2por u2spu u4s3sel us2sen us5sende us6seni ussenma7s us2ser us4serf uss5erfa us2sez u3ssig us2sof u2stab ust3abe u3stal us2tat us2ten us2ter us2th ust2in u3stis u2s1tor u2strä u4strit u3s4trop u2s1tur u2sty u1su us2ur 2uß u2ß1u 2u1t u3ta. ut1alt ut3a2m u2t1ap u2t1ar u2tär u3te. ut1eg ute4ge ut1ei. ut1eie ute2n1 u3ten. u2tent uter4er u4t3ersa u3t2es u3t2et u2tev u4t1ex utfi4 ut2he u2thi u2t3ho u2thu utmas2 utma5sse u3to. uto4ber uto3c u3tom ut1opf u2tops ut4or ut3rea ut3rü ut3s2a ut2s1ä ut4schl ut4schm ut4schö ut1sp ut2spa ut3te ut3t4l utt1s2 utu4re utu5ru utz3eng ut2z1in ut2zo ut2z1w 2u1u2 u1ü2 u1v4 u2ve. uve3rä u1w 2u1x ux2e ux2o ux3t u1ya 2u1z uz1we uz3z4 1üb 2übc 2übd übe2 über3 üb3l üb3r 2üc ü1che üch3l üch4s1c ücht4e ück1er ück3eri ück4spe üd3a4 ü3den. üden4g ü3d2ens üd1o4 üdö4 üd3r üd3s2 üdsa1 üd3t4 üdwe2 ü2f1a ü2f1ei üfer2 ü2f1erg üf2fl ü2f1i üf3l üf2to ü1g üge6leis ü2g3l ü2gn üg3s üh1a ü1he ü2h1ei ü2h1eng ü2h1erk ü2h1erz üh1i ühl1ac ühl2e üh3mo üh3ne üh3r2e ühr3ei. üh1ro ühr3ta ühs2 üh3stu üh3t üh4th ü1hu üh1w ü1k2 2ül ül1a ül2c ül4e ül2la2 ül2l1ei ül2lo ül2lö ü1lu ü2ment ü2n1a ün2da ün2dr ünen3 ün2fa ün2f1ei ün2fli ün2fr ün2g3l ünt2 ü1nu ün2za ü1pe ü1pi üp2pl ür1a ü2r1ei ür2fl ür2fr ür4g3en4g ü1r2o1 ür4ster ürt2h üs2a üs4c ü2schl ü5se üse3h üse3l ü1sp üs4s1c üss2e ü4s3sel üs4st üs2su üs4t ü2sta üste3ne ü2str ü1su ü1ß 2üt ü1ta ü2t1al ü1te ü1ti üt3r üt2s1 üt2tr ü1tu ü1v ü1z 2v1ab va1c val2s 2vang 2varb vas2 v4at va2t3a4 va2tei va2t3h vatik2 va4t1in vati8ons. va2t3r vat3s4 va2t1u vat3z 2v1au vä1 2v1b 2v1d 1ve2 ve3ar ve3b ve3c ve3d ve3g ve3h ve4i veit4 veits1 ve3la ve4l1au ve3le ve3li ve3lo ve3ma 2ve3mu ve3nal ven2c ve3ne venen4d ve3ni ve3nö ve3o ver1 ver3a ve3rad ve3rand ve3ras ver3b2 verd2 vere2 ve4rek verf4 verg4 vergas6 ve3ri ve4rin ver3k vermas8sen ver3sta vert2 ver5te ver3u ve3s 2vesc 2vese ve4sh ve4s1p ves4t ve3ta vete1 ve3tr 2veü ve3v ve3x2 2v1f4 2v1g 2v1h vi3ar vi4a3t vi2c vie2h3a vi2el vi2er vie4rec vie2w1 vig2 2vii vi2l1a vi4leh vi2l1in 2v1i2m vima2 vi4na vin2s 2v1int vi3sa vise4 vi3s2o vi2sp vis2u 2v1k 2v1l2 2v1m 2v1n 2v1ob vo3ga vo2gu 3vol vollen4 vol6l5end vol2li 2v1op vo2r1 vor3a vor3d vor3e vor3g vo3ri vo5rig vormen4 vorö4 3voy 2v1p v2r 2v3ra v3re v4ree 2v3ro 2v1s vs2e v3s2z 2v1t vu2et 2vumf 2v1v 2v1w 2v1z w2a 1waa wab2bl wa3che wach6stu wach4t4r waffe2 waffel3 1wag wa5ge wa2g3n wa3go 1wah wahl5ent wah4ler wah2li wai2b 1wal 2walb wal4da wa2les 2walm wal2ta wal2to walt4st wa3na wang4s wa2p 1war2e ware1i wart4e 1was wa3sa wa4scha wa3schi wa3sh wass4e w2ä 1wäh 1wäl wäm3 2wäng 1wäs3 wä5sc wä4ss 2w1b2 wbu2 2w1c 2w1d we2a we2ba 4webeb we2bl web3s we2e4 weed3 we2fl 1weg we2g1a we2g3l we2g3r weg1s2 1weh we2i wei4bl 2weie weik4 weis4s3p wei4tr weit1s wel6schl wel6schr wel2t1 wel4t3a4 wel6t5en6d wel4tr wen3a4 we3ni wen4k3ri we2r3a wer2bl 1werbu werd2 5werdens 1werdu werer2 wer2fl wer4gel we4r3io 1werk. wer2ka 1werke wer2kl wer2ku we2rö wer4sta wer2ta wer6t5erm wer2to 1werts 1wese wesen4s3 we2s1p wes2t we2st1a we4st3ei we4steu we4sti we2st1o2 we2stö we2st3r we4stu 1wet wet2s wett3s 2w1ey 2w1g 2w3h 1wid wi2e wie3l wien2e wie4st wik2 1wil wim2ma wim4m3u win4d3ec win2dr win2e 2wing win8n7ersc win4num 1wi4r wi3s2e wi2sp 1wiss wi3st wi3th 1witzl 2w1k 2w1l 2w1m 2wn wn3s 1wo1c wo2cha woche4 1woh woh2le 1wolf wolf2s1 wol4la wol4ler wor3a wo2r3i wor2t3r wo4r3u wot2 1wöc wört2h 2w1p w2r w3ro 2w1s w3s2k 2w1t wti2 w2u 1wuc wuch4sc wuls2 wun2s 4wur. wur2fa 1wurst wus4 1wu2t1 1wüh wüs4 2w1w x1a 1xa. 2xa2b 1x2ad 1xae xa1fl 1x2ag xa2m x2anz 1x2as xau3 xaus2 2x1b 2xc x1ce x1ch x1cl 4x1d 1xe x1e4g 2xek xe2l xe3lei x1em 3x2em. x2en xen3s2 x2er. x2ere xers2 3xes 2x3eu 2x1f 2x1g 2x1h xib4 xi1c xich2 xide2 xi2d1em x1i2do xie3l xi3g xil1 xil2a xi2lo xi2lu xin3s2 x2is xi2s1e xi2s1o2 xis5s xi2su x1i2tu x1j 2x1k2 2x2l2 x3lä x3le 2x1m 2x1n x1or 4x1p xpor6ter x1q 2x1r 2x3s2 4x1t x2t1a x3ta. x3t2as xt1ä x2tän x2t1e2d x2t1ei x2tent x2t1er2f x2t3ev xtfi4 x2t1il2l xtra3b4 x2t3ran xt1s2 xt1u x3t2ur 1xu xu1a x1u2n xu2s 2xv 2x1w 2xy 3xy. 3xys x1z 2y1ab 1yac y1al. y1a2m yan2g y1ank y1ät y1b y1c2 y2chi y3chis ych3n y1d4 y1e y2ef yen4n y2ere yes2 y2es. ye4st ye2th y1f2 y1g ygi2 ygie5 yg2l y1h yhr2 y1i4 y1j y1k2 yke3n yk3s2 y1l y2l3a2m yl4ante yl3c y4le. yli4n yloni1 y2l1u yma2t ym3p2 ympi1 y2n1o yno4d ynt2 y1of yom2 yon4i y1ont y1os y1ou y1p ypa2 yp3an ype2 y2pf y3ph y2p1in ypo3 y4p3s y1r y3r2e y3ri yri2a yri1e y3r4o yrr2 y1s ys2an ys2c yse1 y3s2h y4s3l ysme3 ys2pa yst4 y2s1u2 y3s2z y1t2 y2te. y2tes y3to yu2r yure3 y1v y1w y1y y1z2 za2 2z3ab zab3l za3cha za3chä z1ad 2z1af za3ge za3gr 3zah 2z3ak zale3 za3li 2z1all 2z1am z1an za3ne 2z3anf 3zani 2z3anl za3no za3ra 2zarb 2zarc za3re za3ri z1arm za3ro z1arti zar2tr 2z1arz z1as zast4 2z3at3 3zaub z1au2f z3aug 3zaun zä2 2z1äc 3z2äh 2z1äm z1ärg z1ärm 4z3b4 zbü1b zbübe3 2z3c 2z3d2 zdan2 zdä1 zeau3 zeaus4 2z1e2ben 2zecho 2z1eck ze1e 2z1eff zeik4 zei3la zeile4 2z1ein zeinbus6 zei3s2 zeist4 zei2t1a zeit5end zei4t3er zei2tr ze2l1a2 ze2len ze2l1er ze2l1in zell2a zel4leh zel4li4n zel3sz zel3t2h zelu2 2z1emp 5zen. ze4n3ac zen3n ze2no zens2e zen4sem 3zent zent3s zen4zer z2er. ze2r3a ze2re2b 2z1ergä 4z3ergeb z3erhal 2zerhö zerin4t zerk2 z2erl. 2zerlö z2ern zer4neb zer4n3ei 2z1erq zers2 2z1ersa 4z3erste zert1a4 zer4t3ag zert4an zer6tere zer4tin zer6trau 4zerwei 2z1erz 3z2erza ze2s zes1e zes1i ze3sku zessen4 zes6s5end zes2sp zes1tr ze2ß1 ze2tr 2zetts 2z1ex 2z1f4 zfä4s3 2z1g2 zger2a 2z1h z2hen zhir3 3zi. zi3alo zi3ar zid3r zi1erh zi1es. 3zig zil2e z2imm 2z1imp zin2e zin4er 2z1inf z1inh zin1it zin2sa zin4ser 4zinsuf z1inv zi2o3 zi3op zirk2 zirk6s zi3s2z zit2h 2z1j 2z3k4 2z1l2 2z1m2 zme2e 2z3n4 2z1ob 2z1of zo2gl 2z1oh 3zol zon4ter zo2o 2zope z1or zo2ri zor4ne 2z1osz 2z3ot 2zö2f z1öl 2zön 2z3p4 2z1q 2z3r2 4z1s2 z3sa z3sh z3sk z3str z3sz 2z1t z2t1au z4tehe zte3str z3t2her zt3ho zt1ins zt3rec zt3s2 zu3a zub4 zu4c zud4 zudi4 zu2el zu3f4 zu2g1ar zu4gent zu3gl zugs1t zug4ste zug1un 2z1uhr zuh2u zu1i zu3k 2z1um. zumen2 2zumf 2zumg 2zuml 2z1ums zun2e 2zunt zup2fi zu3r2a z1urk 2z1url 2z1urs 2z1urt zu3s2 zu3t2 zuz2 2züb zür1c 2z1v zw2 z1wac 4zwah zwan2d1 z2wang z1war 2zwas 4zwäl 2zweg z2weig z1wei3s 2z1wel 2z1wen 2z1wer z2werg 2z1wes 2zwet 4zwir z2wit 2z1wo z1wör z1wur 2z1wü 4z1z z3z4a zze3s z3z2o zz2ö", + ["compression"]="zlib", + ["data"]="xÚL½[Žä¸\18¦¹\21_A\2ºDœÚŽä¢$¦n~(1\"Ò7sº\27\5L×\0ó0À Ÿê-66ÿ÷Óë \31DãýN£\25i4ýèúª»ýèúfÅe.WMòµé\9Yë€ù¶¶èk{…\8¼\26ânÐ\2®ö¬\4÷f ç}\8ûûbK›ˆ½Oön¦µ¸ÚÓîé¼\26û“ÕÞžñ\7ð¬\31v^ÝäÀk\22x8EŠ\21µI‹k˜Ú­éîXêÓ ½\26Wî|?Ãya©1¯Žº]-ùäfÀ¬G\ +Σs͵+ŸÏûlß÷ó\ +ÔèKß\127n\13æE×|ÿ™åèC\19Ro˜º\2è£>¤©±;½OÃS;5êÃþ˜B\13hÏF½Òçö\12xçóí¤Sûü¯S>jü}V¼¡ë[Ì:U\0õà\14L[q^áÇ\11ªFC¨»\21PEÌ6î?ìbÐ\7Ũ6„´uÍ\11žï\0149U‘!†xG½\29€ê\17\0i¤ ï¿+µ!ôÁÕ\8³K\8±¡%!¶{\7ØÕ†w[4à\19–ú|8ü¬.;/54,N´Ö=Yl͆k'×½\29\26º\"ìoƒ;@\21\26\26{ìŽT3AÂ#\0189ýѧpŸÉ7Õ\3å§vèö\23L\0052þ†v3[B\ +Í\0290¶ör\4Æ]\25j ãþ1µ-Åëúñ\127ØÃnWûT}e9+šxÖÌ°à\25¦ÉÖ\6ƒZs,x®„Ls3µ\6Äú\8¬µðuÉküþ³U¬1Ô\0173µû\1lÏK\13\27c»2ʆ;\28Çõû÷\31ç¿N‡ücßÿë8™\15ãIÞÙëaê¨ÛÄŠœÂÛN\15M¡ÙƒAb\5M¯©+Hº)\\6\27á‚)Ÿ,Š¹\27½Ðgá\1ƒ\\_x3 shÆ\0H\13£ 'k}\14ïé-îx\28í\22TƒØ)rÜ(0n];\5àÛ&“9\16÷\6}i1÷Ú\14ú.jÜdž¤ûy$Ù—®îßVȹ=j€V}Tç-š‚{Þ°$ª¼DÆdYÝÃËñ`V/Œû’1×ÐÒ#k¬GÌöƒn]\15­XfÄz4\31L!õ¬\22­r[µh™ËZH\3&™Ê¾v€W\16=½\5Z°i²\19\18ö?„TÖÿÚ>mkèæ-2:\27kaÿþó|Ӏ푴ûÁòßúnóÄlÁ{^‰ìÚ\31FñGÏ8\28szS¬Ã=|<4—\4<¾Gºj›'},Ø2góízïÖ\9Ûå\12Îë-ìCqï\127\12¡ØÞS±°\30ç¡\\\31BF,©G—\26£Ó\7kê1c\28\11Ãü8J_\24]§®ô]\ +}c·\22âÜ\1£°;pea\3¨_ŠuGì¨5þŽûh‰|4›á÷o¡iõIÊoÆïx\25G\9Ü5æﶵ§\2ÎÎ\25Ÿ\29¥žÝ›Ö† z¹5xsœ;Õ–yüëüÃkê¿ö\19Gð®$´ÌÎx\6Oûs¦§FŽü\22oFç£s¶\15òµ%†)ü±‚ˆN\6ú¼Ž–ž<ó}v-¯²§^Úâ\22¶èKëTó÷òÌ‘9b6àƒ‹*^‘¡¼âPajIýxÁÓa6/va•³3w\5\31d{ÔšnWêÎÆÀ=#|Ö\4\16ÏE×\\ÙK65Ó \11\13©Œ¼*o|ÙøòÆN’÷¦+`°\25lN˜žÅÙé\18k\"k·\"ÿœ\26m+Àƒí\\N\ +Ì&-²§Ô÷ß=µþ\16j¢á\31\7Èñ³›Û\21@»>Ãôf\16[°â§\23ô§¦¶Mæ¬zó\11,ñ«#Õ3D#«§×Ñ“Š\\·ºën]¥ý¸\19ö\00825‡dÒ­•w½\11ÔB\4\29Û­bjfkN\11håt\29»X×™¸éš.+£ï?eôʵWd-Â<\2ðík‘\0077•4à\19T`\0314A°Ç•\4aÅ«\22ʨ»FăÒRÀËõ\20h\2óUQÙ[ðýþ“_6?)b¼52\"Ö\9c&(\0189î§ó‹\23.a•­îW*ý¶vSq©ÚJ·R˜7\28HÆhÇ¡\2ˆðû\6PÃ{!»“Üv’\28x\31$¨ÑQÔ*UT£\ +iûT2×’T\0H\\iµ+u \2W¼\17¥ ¯Vk„ «sØ\19\7$¦ÒRl•\19Vå\"ÿO¬¿°=e»Ó‘w†³º‡\13û¬\26Ýg†±\18¢Yq0j„ôöSKkE\8­DT1]˪\28_pyAÚ-‹h“OÍû<:ut†%ÛÍæn\127õ†\16ÛQ\ +?ê¾Àä\24ä$P\28gv¶\23\14Mß4a\17f9€\127hæíeÓTb¡ÿ\31ŽÝ®w\8\18\23š]Š\8ÙRÌßnå‡k[}\18ƒÚj:à¿v©EJ\8l¹-@x\1_æÚ}i倫kI½Ò&êuì­óðäÿ¿d\12r\13r\20­Ï²’©v\12]l1µæ»¡é6A=T¢\11šNÄÙ£ÄÖ&Þ*Î\7³o`Í\12\26½ª\27œ%Í-ä¬ÀHnõ\16\\ÿŠ‹¨eOÃS8KÙhyàëi$ÇûIÌ\17cVp3¨ýƒÝ~{놟x\31Ôõ`í\14\15Œ\127c4\")å/\12}RñÄl\25œ3sQ\9„÷åƒw\21„\5\2Ã\27ê\1C%ÔaÁ`]É\12ø\30\13æƒU¡Íƒ•¢•+ã\19ï¯[72?ë±[lªeÍ\8þ\25Ûî²\15ELÔ±¢jõ\24ö‹T!­v‘E=®¤\24WGf±ŒC™n#l\9&=,\16ŠÃ!,D%§ÏF5Í\14V›2\19ïD\9r3[ÇëÍ\9ëK\11P\0257€jRI“\18MF¨\19ÃhljŠL5ý.3ÙÄ\7ÆA&\28ƒ\1˜±žÜ¨\22àT`C\5¦\14L91\22b™Î’â\4q\2cñm\30@ñN\2/\\9\5x¶\13\9L7\9½«ÿ§\8ά'Ò·ŒQ;\25¥MBû°¿L9L…ÜO\0Ji“Æm:µéÉ~vÎ\24ä5•ÂßÏÇáË…âQ­/ì\"Ô”KÞ™ÒBGš5¢x´©u—í”!¼“\20«š™@³è_™+\27,{Ÿq’`ã8síú*˜É8·;#6×;#;ï¢ô\2P[UÕÍ \28ÐÏI\17Æ;³w´Ù4¾\28êÁÆNJH^ç³XæÄô™I6_ÕI½2\25Cæ_·.vT0\14ÚÔezC\20Ý£ÊÆ\17\3þ¦k\"\24&6K°©ô±Ö\22Ò\9ýh~h\31\14¶OØY¯u\23ÉÍ\3ÜÄÓx;jn’ü<Èö\23²hÝ\13\1\ +\6ðª\1{ý\26r*)„µ×±Å±Nn,Ga•\28L$7_Ôíî~[oç^ÆÒ\20]½ÆýC®\5\\«d‹»j¨VŠ\21l, ½6\0217}§Ò…²VÕÏ%6+<žÜ\"<Æ®x@Ñ­ëäÞ]ã\27G0Xþ\21ÒùÃ\25\29ôNÖÒC¦oÖ£žÜZ±›\6éî@°ÎZ\127ÿ>Iõ\8¬ÅõAM›s¡1õ™=§Î̼¨/6Á•C;†Bk„Ú{#\26\29\\²»Êt\4Q\17Yüaq\0308M¾¬0¶\16š\21¤êZg\15¢‹ª²{\0192²íVá³õÉI\14±ž“\0278\26ÎY$‹øn™ââe6lu\27\29³\9{ÉÊ^Qol^bñ™I[(ôÈ\6=\"—',l™ÚÛlÞó76ýf‹?l’@²CcºÅærÁ‚ €M=´Õ›(T\28[Wœ®B\13G\0060=´9f+ÖLã¹m¹vPfšltÙö\16\25gó`cÚ(«õ9„\16ÃõƒØÞú\1%û+8Gz~·ÛÙ\20\4%À‚Û0öî‡MõÂ.ú\16S4–\26´3’;ó\25?áÈÝñ6›Å~\18A<¸*±3Ÿvèû]„3öá\7ö¡#Ë¡\21÷\5Ô†…[\20ß—AcfBá§3\27ÞὋåíщK’U3Ìù‘÷P˜\7V•xÖ;Õ\12Øë\16]\\ˆ#u\ +í\2•*KêíÖ\ +V\29!%wÑV2à*\21\127¬³&Ñ>1­wÍÿF&\19b¯'#œ½ÊÔ-–Õ\22cºâÁ–³OÎwr«§Hu& ò0†ß…\16ðJö‚’£DVã^H]ÔG#l¥*G³Sp\29Å™w;+XQiLÔäç8\0<½CþîlÑ{½@“\3]áfq%\23S\25òäÔA1Ké‹éHCfɾ˜SÛI\0013X“i™\21Pù»æ\0À´‰\28læ{u\12ôú\1mº›%’ƒ-{WÊ\11\\¶ŸJ\1 q§™”]«Ú\25{\31ÝÁ²¢\27Øm÷³ztŽý°‹“\24Âj÷íå&^FƒŠs}ÿO\0035µ\1ìdÍÁU°÷ìÉqÑâK\\Žã)(Æ•\0Å\16\17aG5<_ðY¢=¨x 2ð†•L\30ô\4\"Hý\15MòÄJª{B\5ÂÝ!a7ˆö[mO¶§`‡gaê/›%e£½E9s.D1°¿š´O\2D'@(¤àªB-ÈkvÅL,(d¡QLC\25-ÛK*ÇåÊ®\7©\7J‘b\1«#¹üÀ´ Z\26+Wµ°cÉ\\ˆL7ˆiܨH\24gr{ixÁ\9XÖ³ªÈrÖ¬l`þå‚yÒ,u\16¹]ZDæ]vš\27Ã!§=ø3¯R»¸Øål\29\6\1žL®*\6ä€h¾š)“´‘q\26’š­vÂ\0264Þ%–µ\22\3{oªŽÞám\8ø\0\28L¤ä$\26yZ#Æ\3\22BnŠIÍì¡sÑeW’\3šž#Bw\0124fÒä§þb¨\0\23¼RºâîñfÍrÙ— sÓÕ”Ép±êØ„S•=®\16\0uò‚K\"žÒgçÔ¿Èöéã©ô\4\21¤§{î\9Ïsj\15SÍ|t,³1{yúìï,\23…ŠâEuÞY\30§\15•h\7\27\0127ˆ>\11e+¡l\25…µÑ\\êÉ+pF\"Ç—Bèy±eðˆgË\1Ó©&Bz¼\8glÍùœ\11«ˆ+&§Nçáš\28œÿ4'\8ö4æàrJQj¨.™Á'Ë\\v\24²óACÊ1ÀéZb¨Ù\11àµÖ\2Èæä®ð½ÄŽEƧ«sŠ\11Œ\5(O˜³T\0ØÃ\7yªé\21ˆ_CÓáIŒ–\19.×î*[Jm›»F¸–MBy¸ß\28áðI°(èó—‚¿ÿ\3ã)s‡Õ‚\21½|\16!2ªR?\\-S\29Úf!¨£·š‹9\"/pãeÜ#²Ó´R!~.Î\2™kâ\12Mã]ŒÒ‹&\21Ãfvï\2\29ÔnÒ\21ê”íuâøjH=“bf¯Öt­TßKÓZãN7iÒN[÷†çF\4\19j×Ñn\29,ÞÅ q\4\8ÉC•~ã)?NìiEé©ÒM2\15ZcÆùbÒív\5» 9å H\15ÖÕ\\Žf*ûº\26£gœ™x>õ¹Ø\13K/üSÐÅU \1ÄÊì\22׳bJ\\íÓ(Áð“$ONv\4\14âx³ºÊ\"ª™\8™*W^sY]š…Àc\1‡Ú˜ÕíYË…:f8»Ú\8+û€-7>ïÌB¸ø\9\13\0269gïmy¬LRæ1ü(`¶'•ÌíèJæqdCÏc‰x9âD±S\0Ç€ ²—]®½DsåºEpHþ)«8Ó8«ðGëƒÙ\\›&\23àÀ6‹º®\13„)\20ê5{KŽ¦}Ôu‘h&\28Ú¼C d§x´eQdí*7㔎ڜÝþ‡O¤äãCÀ\12E”Û‚J\12!Gò9\16Y;Äþf¥ñ\30mN6]Íó°ùHö¢s8Ui\0¬“\\p[Öšv©Ü ‹æv\26Ÿ\\ä\19âŠ{OP›Š¼b(0Egi2(s2•\27¨ï6_7_\16s\17Òe-š–rÌVÌ\20|±\25gãéì\22²Wæ/\0122yÖ¦´Œ°+!ìæƒîüè¸/Õbú`³ýP;>jr`¢}Â||r\15×Uâ\17¾j\22û\23Ýÿ\21K¤_\\\6ü‚\25ÿuF¡Ì_\15YTÜSô\14æ‘oß\127Š%’ÑËxëƒÌ\26\1šï?{e$\127ðü÷Ÿ2«\23ÜYÔÿØÎ?Jœh\31§«ï\"+pqÚê@\ +á\24\30×@Œ¡³YÅ\2‰>Pm…\19ÉǼ\2M1“½¨Ì¨µ†Ùºf\0195žHà³Vå49\5§œvpjSg¿“¤f%\12„j*\"ý’“›\19•Ý¸¥Û2¯î•yu­}.Ùªs\20K“R\6â\4\0N°\4Kåg\6L\21sƒ9I”=n6EÍ(ëräg÷\2ø‰­^J×-®3g^ß\127®\\r\11@ÿ©¡k°£Ôk­\31\13 Ð\17¤ ÈÍÇþªÈ†Ÿ\24åŽ\0126wÝ\6ªWÐNùH\0Œ~\4í0wçøfÏ©D™Ö\2¨\24¬˜sû’³Œéþª½ #\9\27¨ædqÚ—Ûk\28¡™©êÁèÀÍ|ÿù¨\31k\1É€˜\26•\127Ó\21©qíSÛ\0ö,ÀýÖ\ +xš&˜O\28e¼åtCR©k¡\22q–ÚAÙ\21-4§#\28\20PqC ×éÞN5ÄŠ¡óF¾\5×5\23×Ù¸(0€ÜO‡=\25hgy¶w›¤Qs§Ú0µ®ç\25Z¨kûÔÅ£¡>o.©\21©ƒŸ\8žú\5GÊ:K§¶çj³zد\20V:Öä\12 a\7(þä{z±˜S$åU²ñ¤\20Òúþóû?UI\0á@ÔÆ15O¼˜\16ª\13K÷²Ù+é\21mR×\11BXÀsÖ{³\7êúgfÚòŠPò==\28Wi\ +¾W‰ö*Þ—\11\4¸ÂÕ•íp\31gŽH\5Üì<\4úG;¦WNf–p¡ÏÀdPCnA\7\\N\23ss\14\27µÌ\27\4-©hš÷-²ÕŽEÄR†Wt“½p½\9¹]¯m‰rOc\11v\30ƒ¥\0gX0Ÿ¢x (ò¼Îî÷LË4‰…éå÷E#5þ\23Ýð?«ýÖwføzî†å¸Ïÿb\14Ê\2ïÒsó”\28:Êo®U律Âm=gû6¡\7\12£ÀR­6‹—PaÓwø ¦ƒÉ¹uï#Ó¶\127Yfàû\ +'fØÚ‚8\"ånrì5\25qÜdÇ@î>ùèÍ\11÷å¼\5Ø.kqRiÎPÈb§a»&\127oV¹\23ÓÓÛ\28h(\12).NDð,¦&€ªNB\19ñ=¬†Í‹Âs3ÙÔj)ð!Pý:¨¥öž[ï}è†#Ùd!·½6½¾\17ZïûÐ*…ö=ú®÷åSߧ,\18¹g\27ìû\\Qsñæ}3ˆÀêE9ÈP=\0022`˜É扷r­ƒ²­àæûà’C=h¢öaˆ-^Á^¬?\28i5¸èÕ0\18\1ÜQ#AA…B¬ì©‘P¹‚‹]k\9ßå˜[ ØHm|ÿdPÏŽs:*òy*`Á D%=\13\\Í\6Ñ\29ä4*:^Pt5¾x®”bP\\¢}\0\28ŒõA3\"lEØ\8\11’D;þ;uÝKÏìÍp¸‡v\14¥pOvÕÑ\20Èá\23@|[÷‚N‰0‘Cµ:\5/÷î~9áÕ*Šê\11i…óÓ¹eg×kqúMï¡\20ÿG\29´)ˆ3b­\14ŽmTË/\6L¤zÿÆL\27;æ\22â•m\1Z-Š¥9ÙOâ´1aˆûI(X\6‡QýÛ¬Ð9s'Ûklb#î«}\0170ƒ÷ >\"ÃÜÇz¡Ã\0ÌɨJǵ\4p‰£¥\25WÑ48\25vÍd\13{ô)\12Ž\17\7WÞv]Ä;T­x4\3E‹ÄècŠø€¦\21G<âp^Ð[T·eûì¹kö¼r\1ÚÛû:Ò¿2;›\16‡7ðéò\18Ô(EM1Uõç­ÿ\25–\22*¿¯X›+\29²*‡µkúw\13ØÚ©^jÙ~Wc¹>Äq] “µûÄÅ(‰õêˆ,\4B@˜\ +ˆƒ=5ÂDÙl¶»ýhÜ\26<\11Z¯WùÑ_k\20Îi\0\11™s\5#‡–Ü*¦¼%²\24>\2.Ǻž8\14ü;™\28 Œõ0§„4”:‹\28²«\8n¥¯Ô\25\27GFm¯µ|ë÷hƒ)‚\16c\127¼aD'hž\28ìk˜š\4GÅMn\127p Ú\31è5gÈŠ:â=\6âPÇÃ7·*ð`õ\29û@\\®F\8Úƒ\3\\ă\20\28Wªd8úþHp\24\0056”Y6\22…Ò\17>x ì*aì&È©“\13Rˆ¬³ƒ=OÎúd¯:ê/%ÿþ­1QE¿\127«¢Õ÷oOA‘lýƒûhÙÿ­®L­lIõm9EV‚ÄøË1Ûl4(E.Q\0ôÖXj\12£eô’w\23Îmßû7‹~\7g\18F\7ªö²ÇÑq¶\18ò c¸k9\"\0249ÅÚN7VMÖà&–H“ŽÙ±X\ +mb¥´)«ñ)ƒïRæi\2[ß5»Z\127S-K—j¢3gĵ3\31Ooþ§±»\15\13ûס¡ È5bФöôŽûF÷žõ]£ó.\2\ +\4\12TƒzÑ̙ؠƒÓ×Ú8ÖâZÉÏŒ\9ž{ñd½\23ìÈ{\2²h@ß`@Þ9ø¼#͈©æÞ;Dqî]…‡H’Û½ÕôºsAtkîšžwÓ2Ù\16î&\31IÅ\0ß;‹s+ñ¯úv\23ˆAÙ÷ûí^Ý\3†ðÔ½\22šº‹‚Q€H%}‡J\12¦ÄïšÃw8&Ù\22UQ{ÿÝÔÿÄX0´‡«»W*R6_\\™Üëð]S\0Q´? \31å>¨;×\18ÈÀŽ\5¸i>W°Cwqr)\23—DrAfÓÑ×J×Æ\24f\13:5a—5<\9ËHl\12ZS/ ž1D\30Öp*ðqC¦/=o\3§O˜M¦ÑUßÞ\6¶qL˘\12²2ÿ‡;\15D\20ƒît£Ô˜AþoÚ*ÑÁÈŽ\01529|ÀÔ¾& ü”¦ ífЮ5ø\28B&g³ƒÅ\1† ŽNqÅG\13§m²kÒ\16:«³@\ +ƒÈò;=\27XåÈ¥×C\1\20\6mÞjâ\9Å[\14r\8ˆ@î†.råÌ‚lAÉxÕ.ÉQ\8„«ÀH¦ëê²9|Š@Î_ê’½°*P\4\27™ˆ§ÅÅLUl‘ Ž}¾…\2\2*\"C\9\12aã‰\11Å2uk\8ÈKy\4\2Û!ìœq\13Þ­\9¤wê\26øRt\20Q”Û­@JÛ!qÄ ’Š\3±Á›Ùཞç§ÄyÄ–ª´H\21\14!AA\14àTªU¤9\4‡â\25\28(Ò]Œš|›0D»-9*\15yÝÊ{ÖÔÿc\25_1˜ÖÄh¯›Ÿ“Îß¿gÆZ\12\0\1qoGš\22\14ùÈ\\x\21÷\30\\W'¯“»Öç\18ÎDÔ;îw¿2b¨E\30\ +g\14\22ó£Ü<\23@¯À\12\16t‚¤DZ¾&\3‚ãC8\27Ïùš\17êÀÙNÄQ\13/ƒX\"¹2g\25à2‚çÛÉ$8¯7#%?\17%ßwÁýåáù‰ñUb&×\30r;¾`É·¢îÜËyY†2dy·\9Ê\8¬BEüâ\30t\8_,SÎÜd²D¦òÒq¨9\127\27&NR\0íU©§}\0121T¢\19‡zVúŠ!Œ-\24>zeÊ\14æÐ:\26\"DåÀ3-žh°”¢'ã\16Ë\3\1;c\1ç\15{‹ \7Š+“\9ñ?DÞ\4\8p2DMø†ò‘ž\29â^p{,xÇ„\15`.€-S£\28÷=¤âs\22pQ)ÖZ<ö7òf\4b\11#ª\0®-\0±Ä¦Ã|ˆ¢jžíã•Õ©þ‰Ü9(Œî‰Ú†…õ€–\9|Y¶\127,§#6¤Bü~ð\1Ë\16ß>d<¢~b,ÌËJd \28r\27V\19'ƒ_¯\14k³aP«2·ßvÅã\12bð\25Ä`ô¡vÑ'G¯äGß®öQ±‡¯-U\4DÛp¬”qhç è\11íáØÅ1È\15”ˆÀ/q\31#éÙ{j\17\16­s†À“©a\24ôƒÇ£2/\ +÷Ãèá¸Þ5ð\7'›\3¢\18ƒ\16äZ—¿\127'»8ÌR7<ÜsÿVV\"5µûûüaðùƒØ•nfóH],AÅá©Òp\16Q`&#±©j\14\7)³}ax\11\20\9W\25\16Êr£è-\16zgÏG׺r\15\22‡Ï´q€¤}ö2pô2øDvà\\Œg7¥Ç|äÂt\16Îïˆ}1Æ€‡ó¿¢óåÐE‰òf\23«…\3pS\30jØų­¡q\14Ƥí\5Ç:\\P=Ã%†e0Bº´(°³\1Uƒª•E\23\127\24(4#\0301\20©žáE\5æÑØáG \6+o\7†¼\25\15#:\19ì\28mB\12úH\6\7]—·‡íP]¥ò2\127”|>\12x‚<ä}(ø\29FP.\30\30\9Ô\11;PÞ“Í‹:¨=9!P?xƒ“?‚‡C.²S\5¦\23„Úæü©‘9Út‡}\24•h§D|pø\12\28\12\14¿4é\1271õ\21ô¼‰*ìnœý-XíJè ?\4„¡[\1ŽHdNØ›Iq­\6\1;qÅ\16\18º:\15h¿ÐòæäÇ×ðU÷\7Š²\9ÑËT«°_$\18·¡Ðý ))\16³ÅÎ\29{°g²ùA´Sá¼›\11…À\23àÅw@\\\13\7ô6\14î³\4\25Ó`z6 õC2‘ä*UKS\17XÍ¡§°¾î\22ìa¸¡/$Ú®†q?…ÏŽú\1A‘P\14º0\9÷=càÈ>“\13çóx\30$…ºU´\26\7X\6\7‘}L\0188¬%–*UNP\5‘\\YE›‡Í\7&ÒÒÁ§‹ÂÞ¸¹ÃÇ\3ù¼\2\15\2,\13C*׆üK³{+½¨Ã]Ù@¸\4\31\17É£qÁå!qx¿ÏoÂ\30‘8ÚPƒÅ€‚Ÿ\ +\7?\21\14µ\31\9‡×#aœZe¡ÈÀ\25 \17,(D$­¢àǼǼá\30߉¶XôD^ä®ìj+\7\3X\25\30\13\3<à-\12žw¢éeøe­ü)b\8¾¥\14…ž\127ÁæáX$\8¼~犖$C\17ö'˃Ì4.AèD\29\16Œ\"Ï\2\2\30\6ÍårO5D¨á$ñ¯æÖ\6•Å2‘©\1–YÓ›fÈ‚{G\0q\6™.\18Y«`\22-„±%¨ÐàÞ6\31\22P$A\3B|3@L\9Î\14î*Àmí8…T\2—¹!ì.»Ý;ÍiMNÚ p'®ÈMìT\1Í\15äòp9¬'ž†\\\6\3qyP\25Ì{ á\22|GZÙ'”X‹\19\0306é/ñ;<Š\ +¥cyG–J>Ù®§\28n/g¢²6®=³B\27‘Hù`‚W™e\"}ò¼ú}É\20\"å\8R\6ˆ`äR\"Œ>„\0$üÈ`´ò‹\0005–‰\12j@|’$l¢Š,š$”ÇÆaô‰úýÇÞøŒÌ4¬J\17µ\127\24j‹\13VN£h‹3fŽi¨°7#\19¦\25é@ž!\23p¬\5¨´Ñg;\1%Lò\18¯ˆù °\11\1Ï0fJ\17‡%+\17+õõ4€›@ž¼%\22žÃbœ7•kßb¡\127'N”Ä\20Y¨'°\ +Ò÷aòPOéðM}à}ïŠïËÁäšQªìt†ô~\29&\26ÅsÞ0W\29S¶™½¦fD%\29Ä|’!F$ðÔW!âþ\21Û3X\14\"\23FêôYdŸ²\0\3ίY\6Ú('²A\28T‹c\21u\ +pÖœ\15Èô³Æ0£\27ˆ˜okíLx#(`ü9¯µÑÄLêfcš±º´Á»Â\9I§0[š\12àjú^P.³)‚ï‚4Â'´Á2~ªI:JJQŒ\6n\25#8»\7gOá\25Ò\5ó°ù°y¹‰Œ©On鯿\9`Ï\13\5Ïqv\27\"\24B¨P¾±î™Ã‘Ó\2y\8\15\11´}“¯€¯1F-\"Xö-86(„\2\16nFž‰\28‡âI'#,ªÌA\00724W€-38Ž\20ÕLÂœŠ0\25\23Ä©,\0059?\29¸\18±Ö\28²'‡å\2—Mµ\12^Ô™,]<:\15-ê?B\\\127P͵´‡Ë}ühãÚЀÖGCrq.€çØ\26˜7•e§àõI¶¢¢8a\8q\19¹°Úã\0#D\16]ܸēßî\14iyx‡‹‘Ž¢žÜ\26ÞÇ\17\6mL`pmö‘\22ïíÈ.ex:\13HWN.b¥‰—}\23Ì÷¥Kû\9¹°¢)õpì\19±\18CK‡4¶:á»Ïõ¹ŸtÎŽ¢%u™Ó±\127Ä\7ÄdˆöM ô\8\17S#<Š‚1Ö||/¼@±|º5E\20:Ä“#n\11û¸åÆ\5Ñ/O•\7U¨!¦\13ö\2Ü'—{¨)3\11t\27ý\0L\1CñjŒˆ¢‘ÀOUJíçä¡\ +\11û-b\28˜»ÍÓæ/Lö¹¥Ý‰|`£,íݼWçøFÐo\6\11œˆqù© <œ€-B¹°:˜Lkç£Ì°6(|AÍ\20\27“Ÿ‚+\12º†ÃÈ¥€zs\16Xº¼ôFSÅQÂ\30¯(Éy±¶Ëënƒ‡s>Mb€zš°ö‘\\\16Y\14ëÀ»$Šõ\ +ûÄÇgµ\1%2˜³Mˆ¢õ\13Ù­\31.Ì\19ÜG êcÏ÷©ø³¨jŽB\9Ý~P\0076íÚ;ø\15ÇÝA.–ɺ\28ëáHlÇåѶo×Zo‚xLÅ\127)`}\5{5¢Z¡ìükUÐÃêÍwõ=[à\0139ê†B\17!Q Hç•ÍVõ\28©\27R\25²¶«}6›tÆT\30P¿,¬Ú5º\20\16Tcâve­Q\20\0185ÊI\12KX\23—Þ¬ÌÆUÝžég„_äjŸãD¸à|<<\14…rV´x·«~…zäxάÂñyÛmõ˜ƒªeRä±»Á\28æøt™¼8Ž!K$:By©\29Ö\11Ò^ Ô\0¦&c†\18HCC@òdä5Œ\\¬áÕ§Yø9¿«ró.ï\14€Å“*»`—\4–¢g\"TÈÓ(ä—«­U°>¡µx¬ÍîÁ\ +Ü\24ãÍ„ÎÖQ­Zhp§m\0273½‘¯fú?\2xªÖ&&“í®ÙÌl\28\1276Z(ï\8BÓap¥<¢%À\17p_¢Šm‘Þ\11[Ù_¯´ÃFˆŠRºM”q¶+8•Ø~•þPU6¿¿TÈÙÙ|$ü\9\11\27b¸a£™\27\2\0·`LÝ\6¿­–©\1mvZ'Ô=Ú\127¶]-Ý;FÚ~ªÅÞš+TЮػß÷XÎPž»VïÝ\17/û]—sÍÁÅ!‹\18v?Ó\8»…\29HÁ\21\2Š5UصÒÕ‚½èŒüãVT5²@ö¡å¬N°Pq‚ \13@ý(ð\9¨/wgZî_T$„†–\0ôv\31&­ª\3]´³B\29¿¤Ú‰PVû®f *ŒÐ¤1É^\22¾òѯ\12'ç—æ\2–W$ÖýÎaëAWxªÚ?½àù‚\23PñòËýY²yº&gÉÄMoTï}„µÚy\12ÎÙ’FŠE\1GÁy\23$\6;&=›\1\28Z~Ĥs¦Šlüü\"”W\4p\30?lºkŒ+vpÅÎ+P:ïõ\ +”ªÅv+pw¬ƒ\ +‚\15\0266eì\31\20¸Ô^D{a3Ñ'¸zÜA\0;·ñÁO²\21Â\5!^¥cŽÇ+Ur\0245EýŸ£_öúdíòv›Ùó»ö´;=],û\4(Tõî7La\127㬮¸…Ð*Ð8Sæ4‘ù@™³çânóI\24’®¾ê0SZxv_7\5ïG»Ir{pð©±!ÜOÿ˜.TÉOøXå\\\22qA…ž\11…iwP\23ç/Š#b\27×Ú9lw\2³'Ñ\2\27ÃŽXRå«-æ o½q1½+ö†=CÆò\28\\ÁŸfQö_dô,{z±LÿXXÁ@\31¡ü¿J¯†0F\21@ÑšÅ\3Ô2>\21²~˜ƒÍÑæl³„&›0«Æä>•\15Ç£¥\0pzs0dö6O›šÜ‡ùy\14ÙÃÑdçõA~ܕp`bA>Yk\17¢Ux :\26\30\\>À\25\31\26óGíh(\27\12~º\18\30è³Ý\4\30ÉÃÃÜ\127õï[H>\"ã\28\30s\16ày°&CBõ\21 ‚þN.µâlž\8^\20\9ÜW¡\27\ +/(Þĵº3b;M/üŸP‰`×æø\15;\30%(9:[’BXø\ +z\7q½´TRò÷\127\28\21âOq½\21$®»I’{ç\6\6Måh,•³1…}Ú|bBX$ß\127\27Ð>kzÕT¬]U\14±Rß²ïðŠG>\8˜\9\12LDîCƒ\31\0ˆïàºZfÑB\23¸l@£-c‰0$ñ\2¢2xÞÕC)´\3ômyãŽÞ^qZ Í„h\14Þ\17zËO)_W«x\7(Ôd\25Ú¸àßd:6™ø\2@7¥BƒùÔã\26‹ÇõŠö$Ÿý翸Ä#¹ñt*«àõ^Þ\18åk\9-É ®SAÇé…“WG2\14&fiÝY\0Ó8š§åŒŽü˜½òû\0o¦Ñ]Œš\29®‡¿ÿN·ÆÒT;î)˜?O“È\22ðAšXc\24-ï\3ŠPrÎ6=Ó§'÷oæ6›@\13¤QÍ£Å,‹ÀZ\12\8ó!OB¬\31\127\26cµ×8j¯ò“´'ÇŒ­\3·Ã¾%ÊÃi/\15\12„E*$iz¿‚Ï+ÒK_\16–Ò\25Bíì÷É3ä2\3\13äyXí=>åѽÇ}„\1\8\22€g“)äûﶧ¿?YYï@¶Y6ïÖ—WÁ⿼UP-¹ág0OŸ°¿ä\127\5QS»—\20ìYE\1m8ër\6w²¡\3,\15¬Lî ¬ÓG¢'â–¨¿9lÂ\2\"\24Œ´\0ü€öÍ\25\26æôR>‹d!¾\31L£\19ª‘S\24ë\21\8§\24wByç\28Έܪr3*8«¸Þ,{\16]Mw\21à\"þ¤Ge9«þ\\~ÙAüvÕlR9\27>T 6AxÖ>²gÄØÉ9‚õŽîòÚ\7”‘o·\20ÅõBhÑߧ|xÐa^ßÏ÷&jq2ûOv\29Ee\18[\7\27ÄÃdQ\3]ß2\18–FFÈ¡sg_h\21\17ê“…ÖðÚßÀ­x10¶\24'a\1½¼)Ï\15çâ®à>M9oÇ;'IåB›\31nóT+\31\12pÊ}xÈ.\15P\13µ€ë—»\22Þæ4oÃkC…Ëûû?¯ÃãŠùr±\25Ò\ +™<(ãºñ\15\18ºáBU]¸Ê^âc‰ÖÏ[Âe6æ2©\14ä˜@S¿Y‘X8ðð\21\7:\9V\3jæÛŽ+2ç/Ë&\8Ý3//cŠö\2SÔÄmQE\26.kÇ\12hàR\23]h·ø¤J&ô\5Ôì\11MR*ÍG*(\23`/@›\0½.\8*(Ê\6\20\21’Þ±æ\18'ÙA_¢v $t¿]V;éôªøå\19oUÍ1žÎ\18‚Šiz=9¼\11~\25\25¬m d4WGM˜\\\7\4î\5'*™­nò†Nv\31˜ÊÍ@žÌ¡æ¸¶ä\0169hË\11©\22Jr}šÌ©DÃe&æj\19Ê&o¾ò@\11\0ÄiÖÀø\25¥2ç…—¢¡Î#\20e\0›JJô\25Y&a™\14êD/ä\ +\4ág{˜ä\127úÎ\12:3{Hóõî«Kù}‘/Ͷö×PqÅ÷\17|ÚýACÛà“µðÆ;ÛÐ|¨„\15“cì­ðÓͧoòd¢=5|2Õ?}õÉéÛgôÕÍ'Dŧoà¾\20öÕ B1Ô_¬Û/£“ê‹]ò«ô\29²CŠ\4\29üPøºHT_LÝ/ë\18\11_¾\30øÒ‚øÅÉ\24bî¡Òà6O6‰úY¡ªI.åút\29Ð\12ðý\23oóÿâÑë_¼yÿkŠ5Ö\25c•±f»\15Œ\7FÂ81.‡”ð\15Œ§¬ÿ›¬þwµaì\24Šÿ\127+e%>adUŽE$uD×òmD™1þoç­\25ÅóŒV\21Ü\11\"«ý6\18\0299ê\2\27w1\20\\‰Äe%WaÔ‘«ƒ\17~žÂV®®\5VÍßѯNE$Øò\6Û@\4.’‰\1‘N\8ÏGþ±lXxÕˆBÙÞ&Õñkuî„”\25~+uÚ\31¶S1î¹°\127Ú¿y:\17Š’%ù4]ºÓ`!¾f´f6û=©üÙv\0063}tѲ\11%˜£Õb&GÔI\127Ö~G\6\31Dû\18­‹`ÐÈ{TL®$2)-‚ïŸ#X\17l¢Ê±ø´B\17ú\26óN-\6õ\8Â`ÊK\29\26º+Ë£†4¶î\\õ›ß&ÈÔR\28¹\8”ɹÝèË\19ùq\17àÈ\28E¶†31Ù\16‹7}ÃY²|\23pæèSS\1k£ôí\25—c9M4\12gq\30Äjσ\28\17ÃTÕ„EÆ°‰‰\23§·=ˆÆIähÑK\28‡í'ù£ÛŒ²w'L$ä’hDë æ$ºr÷É°\1现*™`.PG\11­\19×uJ>QFC\ +S[~ë\27/\0228÷\31}Q©žH\18&\"\22àÂE…VC>²\7ºÝÞ1cñ(\29à^ñßZšQû\28Æÿ‚™’ÜÖ+ÐùEã…\25ä~2\11þ\"\19M\23M;íC#“K&\ +Á\4PØ3BÖŒ¢*66¸®‚\7Ê![\9Ù\"}b´¢ä\0171ù\17ý\25²\0081bÒß#\ +ÎÇ\6õɘBŸã(”,»ŸÌÜOÚÔŽªÒ\25Ñâ?£U'·#W\"#\127Í î3\19Õ!.kŒ^\8PYb\17ÇX/Ô0rÐ;F¿\5Àïè\13\18¦ŸõÚ|¼f|\25„/“(®SKbñ\1ü\9Ä9­ud…F±d\ +)Ó\20zmDä]>\7\5ÕäÌQ”êD>^§(Ýh\28çÄd\15\31ãõª\18ÏdG^s’Ĉà'ëaaAñŠsÔŒ\25ý|\19\19\28ék\1÷'kñmEPNaHmÕ†Ù\16\4©µ\22Šƒ&øèš„\127aGªm|_£F\16•§Áy‰\27©\24#Õpýï/\25ìsá£Í`´œ>\23Íw\7€iV6ø\4\4w¸ˆ¿)vS\0117DžFžhž\0Sè\ +Ú\21´ûþzTŸ\28\8%¨é¢ƒÆcZF\7ò5ãÁŽŽÝ\8èð\4@‚q<¬)°\25\15pÖáß\17µx§\29\21ª?ìǵgÑ\16Rh<¬Ic|=»\20¤çÉ›k\6”x£É{4>ü½\20°2\30\15#¿\127kP’j.\20 䙊ZøÛø†t£œB/c‘gÄ‹±bV3,\28!Œ¦é0\7û¨¾‰{Kƒ½uȃ¼ƒ\7\16fwL¨—\28yJI\18±\14Õˆ\16ìXd\ +eçåu\"éáá€\28Sé¼ä7¥£E\8G„§G«ñTc”Á‰2ÓÛh\9ÀÑ(Gd©O{±\30\0/—’Ÿ+#L“\24¸{°É#Êñ%!8¾D\2ǶpHvo\5\24úÕÞhñ¾ñ%Þ7žH3 \25}‰Á¾Æ\4çƒ|Íq¸üÇì8µ\0–‹Â\\Ä\3T+¿\20‹ë(®L‹N\229‘ñ¿Â{£5*)?xŽµ8Ù\12ÊÃÉñ%Å7žòã±h͒ݼýè'ŸD9á\3Æ\\”ÎŒ¹¶V\25jp± ÁCˆ9È\0007sïã•üñ©\24¿°hÛ‚\0\30yëÉ¿M°üqq¤&\127:÷™\27Œ\22ݳ\19×E\19”éT~‹4ñVÊ^OÙ‘©’}X1!Uä5\18©ZH 6dò\3¨©í¶ã\6ƒÄS\9òqòáêdµ¶ÝM…YËíô\"''ëDt ð3AUâX©’\5\17ñû¤ ý\23vT|N¦!e\14.IØrê\26þ€Cƒ‰+— ê.|<¡;ñÆ{\3vz`p\"í¾8\30¡\0š…@#!þ'œ\13ç\0~©Àd\"äJź\23$õ‰J\9ÕÔï\"w\22ÜÔ¡V‡ª\8ý¨Aâ\4'?n²\23]T^/M~½4ù™ìTÞ«O¼ ›ú.S¡^\4€L±(JA†êÑjP\5üVÊ@ócâÑ\1\1tÅ !–ÙàÍü˜\6ñ~Š‘oÕ¤\26[pN#Á …>ð˜bB\14R¦\ +˜‚¨áɯš\0203àQÓ\31¨Q™P£²\18\26ISs.9•·Kv]Åõá\24É~¼KF©ñéü\"$þ„ˆ\5§\6SX”\30i¦)ˆª\18ñ9…µ¯Hˆjwz9¬ï\3‡q\19·èŽÖ\20§H\19¸zLrAbNEŸ\ +àz\5æÍ@Ëgâ\"\24‡H¼)lµ\22·˜y#ÿ\7¶n¿OÏe1e>½^'MÖª²¿Ü\14ÞZ›œ!bA1ódJ\29\19d@\26®Â¦\"F8A¬S\5ÐëÄYàJ>eǃR­£\3\31˜íÃŽÒë¼U™Â«Ë-x7ù\28\ +WK\3+n¬'\19î\19ÚIì÷ä¿ËMHâM\19W°š\17虚&hëÉ\18RÓd„TÑq³zvæ¨cªçp\25h»fqî\2TGÎ''ÄÀ&ÑÐä\17\8b`|¢µq\1V¼DU¸¦Œ‰nö©ü\17Ë.¯Ržþ('*†JÕ®ø±Ž›x:—2Íb[\22÷OÆMdP;‰Ô<=+ôê7\19T¯ÖOÇ¥\12~“ý˜ökWdˆmv&ZîK4ð\7Â3<·˜V?\28\1\24­¬è”Ÿ–QP n¦”'« šŠÊ“\0091\26twA5O¼ ¼Þ’\3wy¿AE\19ç´—ö$u2ߟol·V¡‚÷eóÃq”~2\11€\9×L<6R}ãT\"ð¸wò#¢\9í*2ÄOð/\0296³\"oû\3\11ˆvÿ¯-ýø'ðtd­\23¹Ë†šxÊ)O^ÿË•c\1Ì;y\ +G¥\12G'P8º)y‹†Ã¸;\14o8ÚI1`⧓W䓯n0™;§E)§æì(H,H½ØµÞ,\24¬\0148ý7+ÇÜz;w›Éyœ¶³ÝX-´¢Ý\29YÜÉä_\12N/Þd:­›D±ï »Ó;õÉÓù‰«©\8\25e}/\19¸;\22\23%!*1Y¸\17\7»C‹vüÉz\4's/“ïs0WêqsŶôº‡™ÀøT¨\17ÿ2½\21þe:S\7qšÖàúÄ-⺄ÆB{+êÅ\29öT8•éÅ©L¾F¹Š\27bη)ɵ´”ÆÄPÝ0cág&+„±Dõ^\ +8JyÇe€ÃOl\13¢3åî‘tyw\20\22¬oY<ˆ™ _ìFÌR¡šÉw\27“©8+E®Ø\0302ÚΦ\\ñ<ßà2Pƹ5­•½@²Øa\25—\23¦_3M<À˜²7\13€öïfÊÛæ5Ÿù\21ÄÄ37¶\13àg\9œV‡‚%2\127gRºDEaRdåw\18*VóF\0009Ö7 'KS¶†í)—ž3\27\1À‹ã·É8uúG¥®<\27p\4¬…ŒTaú&b‚µ˜8èŸà†š¹ëns\7íˆ\25dúŒzoîÚAhaÖÂX\9ã \12_81\3\30‡Ì¨)žÅ?\0089!øþCŽ\21E\7ä‚<3ÿˆí)€\31ýD ?\4¬çÎyZ\3ÏüÒ¬8\23ÍŠ#™®\15œœƒÌfBfkR˜­Uqnù¹„²Ø‘ÆnîÀ«3çÔ*ö±ÚLD„ðT\28ñ\27J+4<ÃhPÉä\127f\22Hs|ö2\23ÎcæÊV©8s;4\4Ï;}î.\13ÜÜeºBhg†!ØH–\23ja=A\6´?[YŸ!%¼~ìl7ô-šý¼\20@\21õlUÈþ™.­þþsTOXLdFíÝ\29ð~¾¡\"\6kö«ÖÙ:\18´¡Íð\25sÿÒù0Ã]Ìœ…Ïh]ôÄl]\8²½©®\1ÕŽshQŽHˆ6\25üTràGR„ù'†så¿+Ïa¨'b\12Ž-ú[\0eØsðÿêf¿\"1(\25Ž~άriEhdž€`§äÅ/\127\ +NÎM\\¹\28Z—¼™\ +6G›N!ödFªý\1h7âFíýŽ°‹v‘ó ¶ñÜ\12X+òóÿ9‰óI&¨`\16´æ…¹)Çósy\0114‡Â‡ÌVç8[®¶Dö/Oñ&ÿ\22QÙ¹(Xhæ°Ñ“0\30Ø7ÕÂÂɳÕ(̯G(s9ÉŸ-?Xk\ +†¢]Á0¹q{ ²ü\9]]µ»7öÃ\17 Àç—öƹ1‹÷“\6×DA‘\31&ýÒ<4y‘=r)µÿ˜'wç0$›fË'Q3¿DV\30©tïK›\2‰Xó–Çù¤àT\30}DfH>‘ºp&üÍ9ò_å—ÏYà\31œÈb\9ôqòs†™i0û_ãîât”B¶w\\õæì\7PTÿr+¸¾œ‹BÆ™«kÂJ\16?|ÀN‰u¨)\31å¦òúbè¿Àd\1uUÕüý¿é61&ó(¾²2\0»Èb%à3§õ³\127n­Õ2Óÿ¨*šE-Ï\17aWL¦¬µ\18Ì‘ãTˆë™SϹŽ\3ó4¶,\7ÈŸÙJ\9\4ü/q\1ÿ }Ž¾= Êbsõ´ˆ«ç\127¬Á®~vr&º\31%ŽÅs´9;„VGþžc×áh ~U\6GËDˆÍáÚ=*2f:Džª©¦5E\21<è—\19˜d˜v¢&dZíGV‰\21í¿k+ù\25l‚ʘó\28Y¢?é0„™fUX¬¾ºjå—¹syMÀë¤Ó¦h6ÀeS$ØÜø>Àï–Î\2J\20\8ÖyíÕ3+\24Ë\\ϼB\5Ío–õ—\3†\28…ãÂü³/\11æ¢í`.‚ýóû?\18ü$Ly/\1ŸÔk®Ùÿ5ÃqF;4a¸H™Í\11aŽ¤€?Äñ°y\0183‚NÌ\4ÍhA˜×͆§3j!yZE\23À\29`>(å ~èTt0E\"Bï\30ø}¿ù\7Žg#O~$~\18ÈY·Ÿg‘‡ã\6d¿ÖÞ$8ÿ ½ˆ^ÌÅýö÷ÈXh}Õ›˜Ò¹ÙĆbî6•ÕëRC.z¦Ý¸—1˜\11X\11ÈÄ`;Üx\\‰\25Epã\5w<9\26\18p¯×ÛAÒÃÑhÙƉǼù±§BYù\ +ã°\127Þ!­ç†Ó\24ìòÞ½3ÁËÍ»ÿ©aàø¬‚Ý\"!\0qQ¨\17ã¸\15­ò¢”\12žNðEüx·9Ø$?Ž³f\11Cãó æÂ8ïŒÉn%ȳ¯Qç݇Å\2'˜\22l&.ÐÙe*azNÅAÂÝf+èœùÿú,®·ÁDú®ž\15\21}„–0\17ôóÁs\19T™Ê9TïÃ?Cl\5‡_7ÞÚ97ÿP\14ôó\17v”yC\12VÇ\1wŒãI´÷'/·çc«zc\27\3Ðn\6áعcŸ\3C6ç9[—×lÅ\26Êýt‰üÓWIχíLÍ¢F\3¿Ëu\02212›E•\21\1\3¼†Qe>pÔŸ•ë÷E1B¶ó÷ÿW\9ãz‚\127ÿ¦‡Z\20oÌLnÌB§üöý\27ÿ …\20QÆ1‹Ù­gÅL°`³ÿ\0124[Ê\23“ö\"p‹é)Šdìì\22ÕÉÛ¸\5^g_ZÍV\0:·–j\19ï\1;{2?¼ÇMv2¬<{öÖ̉ÛT€æÅ{²0eqo\5<\29•S\4eË´Arõ*™\127ÈoT;¼9òRf6G¬\2Ñ-;'þM>›1v¹ÌIö…PÀl`ô‚ª\13L&\28\\òl™s\5\31[( \26쎕\28N¤–Jž¥ûjs¡@È—Ä\127†Jh(¾à\22ÀU|©MâU±]\20Ê_=pg»>±skdà\\.ç\9›6ó¥h\23{G²’{ƒ\0°<Þl­\31s²Þ†Ùw|3ü·[ûK\1è\8ž+Pééç³sÁ„¥\9g¡~9ôŸ_—zÄ£Š/ñF;\21Ä-\127Ùi:óÕ —n\13~¶Q\"³/\22F\23\7×ús¹œ›Q®1£ÊTܽ â\20óKlpæ®.\27\20Œi5\27sa\127çÂÕÎð³Áqb©¥»¨¨8}Û@îh×6ŒŽš\29ÄfU”ÎVƤ\26f\26aFv®˜\31ìö¨\2è\22\22gsñ_X òBsûªõÅÓ>™­òãÇ>Á1E\";âY\"ž†²&ŽçóÅË\31Åa\3¸Ü»ÜïvÌ\14˜»\2è¯+¸½Hï±à.ÞŸ–\20 ÞºÜ\20RTðž©>ÚÀ²†ç›\19B(7ü=iö“±±D‡Ò-¶{¼ {䲜¸ÿŒ¾¼’¤¥´:$6XëKR¥}šfjÅ>×Í\20›QÛõŽ%¼r{U÷|¹O÷d(Ý\28r\1Ì%¦z¹Ûœe6Ù®ÃáŒñ….¥™×mò9„¬\1 “˧s\6&'®‚Å®wä'©pëá¿ÞŒ\16êò¾¤@÷€ib\21’Ð\14`ÿ\\€WO饓‹å™EÆê,·Ÿ³ÿ\19‘ÓË‚¤ûÌ3U\25®)g\26䛓³ÐÒ›3”^æ·\3\0064\21\23ýœý§›9sycWÆœKL$ÚæÌ_Šq‰ª5`zàr6Üî0‹sy‘N@H\5\20—;1›„Ê¢V„ó$²©â\12á\27tLÉñøa\19²æeãÛlöÜ\1274š9!ÁQö˜rl2—c\19Åp\31e+øÀEí.f~æ¯U{\17„{Xè‰)! &\127ÿ\13\29\2ä=ˆ\26.œÿAw~ \26ZÏB‘õ'ÏOæOnÜæê“7êÍükÂP-~Õüò¶\22„\16ÖÜãçBó3kßÐð±ûq‹m×Ý\"¿SŒ\29J¸äyÇ=ÊÍ¡‹ÂaWä3ßb£ýQ¶Ÿ\24\11Îå‡Í‹dŠûÆ)LôO[C\13KD“¥Í¸ˆ¹\25h`\12Óø‚Êß¿HÖî\23ý‹bGð%‚R/µ£­Îi휻Ð\28®ÓæUJÌv<]ã­¥ú\\'G!¿ˆrš•\\öAÁüvÕŽ¨bm¢¥æ£¥×äóP\20\30\27\9ˆVˆÖf\19­ÍF¹ÄÐÊ\0226ÄNRq\1\28­?\28á+÷\16\15x#ÿh³W;ߪÈOÕpœ˜Ô\26%b±üÒO%ˆÜ‰ôžßì`R<à4¸Š‹l\1ÖÛ\28+õ3\11J„zì­\7)úŒ%²¼¦\2”¼G}fô/ \4Ä\31ª\6=]Š\2{R\30D\21&\16%Ã>\21{k\6‹Ö¨MP\22b7Pw¡yúnÀ\28\21\5\18ï¸ÑlÓm@´\"\22µ4ñ>¿^pÊ6ñòhõ4ÑŠ€ñl\24£»x\24;6ÌÇûчB±h©‘§E•¢µÓD«¥‰¨¥‰þ¡•Lmª\17u4±Ò¼C‰+Ã1ˆ\17ÀÁ˜ûÞ\27Õ!F¡‡&¢¦’\ +£¢ð(àá(\17×\30\27\"\ +cVQ¦VVDA¹rè\22‹¶™È%¹ü\127Ý\"ZdÖh“>æÂÔ¾\17­üçxAåÀòEáL´ˆhôÅy´\2•å\5wC¦J€µ·«qžc\"\127«Ãídë˜üfË.ÍØ\24&1P2=VÖ*R€\6°L\"E\26˯Gbð1\20`Àdc\22\8é\15\4/”‚e\24VÎQb¹$\7p\21\31ÁßÑ?ËÃ$ǃ¡XŽ¥&|}0\21y-@¹;5ñ…xô{Z\\Ö¥\25ËSY'ÙG›âËŠŒâœmþ´¹ØÜ\8/ïvpÃ\14æ³Àæ‹ñ\11œˆ`+]Œ\30ˆcüÇòp@ëÁáýgzÕÙm…a#—\15›Ÿ6í\1270fþAV„W\29\ +xÐN\15{ã[öè“.·°\8Â\18\16À\3\1!7QîÚ`#ÇZ³[˜ûÈ\6\16žU&o{tæ´Š÷Häž,eæ$\30ÀË¥²\20B\11¢âÝŒL~\27\17\3W\\\23.D„ü\27\16½)>9Ë‘þËr9 ñ\4åeF‰‡ì7\22××Ô·*z\0177»FH„E_Ô‹`‹#Ú•£µ\0EËlk¸F\30•D‹ÕÊy6¢°+Ž0Jqô\1‹ÁŒßÈ&4\"ó+ð>\6ã#«\2Š^\ +•¥jc‘ª•%i#ìp\28½\12ª\17™d@°\23S¿(ý‰ˆ¨É¼ü,\22\23SÀRdÄ)OL\8F\8#VªÔÔU,œÉš”£\5‡ðd7j§0ÒH\11€\0008‚$Ž\7›{ÿ\13°âåšNŽ×N^\25,`¦\0003w:°R{ß’\ +\24i\0225AÊàt8×3±Üb\9Ö¦ýíͯhcÅ\30Åm\6€šñ°0ú×qön`D‹R°hìÊó¢\21å.\19\6‚\21ñ\17£±Z\21ÙÊP\"\28Ý\26(ªè;©h½(Š\6nùIál\22‹(\2ÁÅ7*vÒ¸ÅÇv±]øGµtâbõ\0ŽÄ¬Xà\17âÒgñkq\9\14 \31\22pVqÕŸ¸ÖwŒ¡#òÐXÙn\\\7c¢u`)¯h;5À4¶_\17ÁÄÎDáW7­]ô˜õ£D“Ö.°ôúKËI|)1‰ëX“£—%€Ö®#\23#qžr+‹†HšE´¹.g †`é5>~\20\0a±BY®~ž¢f¬îžuå—\18\2\1í…\14u£¬”‚$tV½Ñ…+¿·ìD ÞÞÙ\0Y\8±QôŽÄÕÄz´Ì\8\1279‚\26]éÚõƒ>¨ŸÆ_ky?_Ç\13|¼!ah5Hà´Í8më¸L\19¸¸@\19lá\16\7šlþ´¹8\25oq­cq¦\127 ‹c\11&1­\ +\4PJ☾@U­hüPÁ›ïÁµ\"EôÆ\13.Q5Äêƒ8\0202áx°L·æÁ•´\\™\12|\0\23Yœ›¸5¬òÞù½\ +*›îÔÓ‡©h2VÉ»‘I³³¹É‡e°[‰{Üý&'îqÁŽÚF\\™‘ßý'‹Xty\24ì\02020\20û m‰ºËòe_èö)¹£\29Ö^þÁ#×\22L\9H‡r½\21ÈÚS•ÃÜâËhï¦\6^ð|wp\"67\15=YÖÞYvtñ˜àÝËsE,ì”;\\¹‚M›ï(݉èÜ ÷\31\5¸&\19íŸ*÷\12[\2xÑê6ü¯eÈ‘}²¸q,º6¸_Ñœ¯\1©€¬åº»\0095÷<òƒd\23͹$û9\26äûî\127gñÖâzÞš¸[ã•@¡ÎÕP+.‰;”Â^ÔØé\30ç(Zv\31»ª\18»‰60Ü^\29\12nÃ\0090\14¢\29®øaLnúé¥dC)\30\24ÄAb\30À\3áX´j(„Ûæˆ:\13O\5Ôiì\5&J5uò_e\26øg{÷àÍšÛ\11îl=÷‹E³{aîHtS\21h°½.qùqÜÍ\23Gä|åõ„!@«E\4Å\29lZ\7ÔïaÄz€\14fȘڇ™]¡ù™(Œ€\2 ¼\26(}™[1Õ;(+â_f?°vœ@Dž\22•‰Z\5¡ÁF™Q“ú€+8¸OPÈã´É\30B\23[!\6&\21KF‹\13ꧣµc`^Ôƒ•t@¬7‡ùÉØ÷`w<>(ý\11ϧyÂßæç¾\127ïv ¬Öx”—Ê24C\30ü­<6\28KÆ\7XúÑŒ\ +{p\17©\24Z\21ˆ¤²\1\21zÐ3þ%¬¢‚-xgP\23´Ìóé\8\127ì_|E+´\22ÃÄ)TLF\1©»àļEZË~ä0=zÇÖ®ÀTM Am¢\127É›³†ä•“&'çÈ/&#t\11vE6Ñòóؘx¢ƒË8>\21œ.PÊÚL&\0082Gx\12c†5Õ\8å\2\14ªu¨º\28nG+ÆŒé±2SR¡\13“ɇԖ‡ÆÑÚ\0020ñ¼Ì=%óë±b°N#Ë¢Dš‹L\13ãiíGÑgʈ\3úÙK<ÙíËò\19(Óc\8c—Šl–a\19Úù\31[t\2„SÄÛ…ßY–GT}\20e˜KºO•`ut1½<Ž\18áÓµAðK•!\17\23ž˜sW@4ð\30cÍÕ4È¿ûR»\16#k÷¬´Úêè£p*\19\16ýµó„\14\22\2àEG<#“ƒ\19òÁ\0>ë<Þ÷’\21\23—\13?\1ä\24\9ÊÆÏê}ý›ì\00869[\16\21\30ݹœ£Çr~Ο\2©vë»\8 }‚fqX'.N3‰È÷•Ó‡Ó$ÚÉÚ\2b\1xúç·¸®’ñUr¸J‚+;Š\27Êù^,\127@ˆåè>–3z¹¼œyèO+,J\0229\127wk.3\29œÃ»‰×÷ß›B\24'\127rÇA\12~\9\28ýv\3\ +‘êˆ;ë\127\20 ¯«3ß~ù\0071ѯö1Ùü.+]¶\19,â·58²í™S¾ËGE0|_Ú},b\23/ÿ‹5ú¯é\20À_~)š)Ç[±Ø–‡9±¼þç/Œš•W@œ*–6£\16\0žS1èN%\3?]3Ç5&b.&B\21¯(Çeò\31ØøäÆGÜ\21™D3ðÈêa~\31%q\27LF0G×áfxBUæy*\31«]èõ‰œ~»}¾ü°ó, \27pµ\23ˈ\\¾=\3\28ô ï»È¦DÈn\20³ìz\9h⬂0\5O„Rå\0˜é«LWŸŽ\5BN—ß(oÿXN§§+ü[åPœ\7\0UQòõM\5N\8\31^†häò춂Íê\11i>ü ¹¯Ì™“å\17kˆ‚X”\18Äë¥î\0097¢w±\22Ë›ª›=ÃýkûX©kë\15s\26\31eêÔ\31ÔB~Œ·LæÖ‡‡\22?Ó]\31~Ž\5Ì`\17¸ÃÏœ¼…h°yP\29¿jZÜ|)¯Ë!ªÚ\19I9L5é\25bs\7Ôäü,9?[Ö‹\\ ­'ª ä`d݆ïÿG8ðgWîU±\0089\11˜/\16P\6?;Nî럚í?ùoçÏÐ!= ûtóß@•½àu¤\22\8‘þÓ¯6\20ãÁñÇO\31v\20@8\\ŸƒÔ?Af2>n?\15\17—?­0ëçÁ]…쪔\127OñÓ×à?4Ôx\11\25ý<²6ÒŸB?QR/“\23!\5\12\13ž‹Œ}zSe¹~É“¤ŸÜ\30ðß‚Ÿü˜zénÍ¢¹-\30YµX:v¥ã°WÀÚ\1j9g›üa\16€ö\30â÷\7\9ü(k1ö0¸ä©\13yA@hCWb\15dÌŠ\1Ž\0179\16Å›ðÑÒWf\11uXÄRRÒJ½\16½t\16\7ý\6;¾Ö˜)Èù>þkí¤+ÂIªÅJ)ëñ†\9ý½XÆyé¶Ç\31#ÜЯ­‰ÓŠñhÉÈ\4—¼\7\23½\15ã\4·i§†–½ h-~\26áÅü³ZÀ?\\Þ\21tqF¨Jòv‡[Ê­”ýáT\"ÚÔÆäŽL=Ái°I\9\13T‰üE&9WøÁÅ, ŽGCX­\28\0223Frh\18a‡æ^Š¸§<³û4©l‹{ªhm²7$i¼¥,åwˆ\ +b6.uW¢\14‡±Ë<Ó½\9‡f\15\18ª„\30´4“ž¹CLÄ;+Ô‡,Vß‚)Î×PÙ\11áטíùv\7jë\7øGJ\11\127¬Zú šõ‡0ÀÒó˵EëV†æöŸÇðáØ`±¦¯%ŒQiЃ-\19ZnáâVÝ\26¸ß78›Ã\16\"Êç9÷\2&ò`nù'ÙÌH«@p˜\26½x›YÐ\9«Ò8­žŠ+;ˆ\6†ÕÿØZ^Š\16ð\14»\1/j\18\9Q+\26o^ˆÍñïÒ‚ãH$´»X’rá~„l÷>¿‰\17j™æ¨UT\22{)\9\18r)çÇ\11ª\5…[œì|:\15\\ðã.ÞïµìëåçÃ\19;³3ä×jK8Üa\"´\23kXÀ¼Spâ^{F\0189°æ\25ÓÂñ0Cd\4º\4dË5˜YÝ\26ò»\127^ê±ùbþŒ\30¨ÉÓgÖ”™Fíó\9ã\18ýbtñ\5\13~à#?·Z\"2µ\0071Œ\0„•õî¿U/ÖQ€IÝ#3Ý\0121\25@¦\20öâ¿\19soÉÑ\26H8\24ô\2³\"ƒÅâ‹8¸óö¶D¬œ\5/þ§’²z\18ƒ¿à-ÍOÒ¨\22ËZkÔÐi±\24'¶(Ùn\23¤\19Á\0¢Þ\21ÀzXƒ5\29ãb|ýKi~qÆc˜¥¨\30†ÑѼ]ŠÈ¡]Ù\0099\2YV¯\27þ ½XÛ:¿G{Øžlž%˜bWÿf^E¡Na±äà²ÂéR°(ð…ßJ×\0n.\23ä\4•\ +Ÿ˚G\15ÑÊK\7å÷\11‡\6Ê\127˜^jmbͲ‹\16^Ø\127\23+\26ª—7.s–\0299¤e?Z^\18É®\4Ý*\1G9LÅ\3ÌŽ\14\ +wŸuN.þŸär n(Àc§Å–\3Þ\7ûò¦‚_\127{¾\11C.â’\23ÿht1—¼XÏã\13\7rÖ‹5\\--j\27ëåõc'C\30Š/¼¨ÂÉ[*9†—‹'€Kù\1õ‚ê„¥ˆÉ®>\19y*\11÷BÅßØå]®hâu)üŽjǪ½üìŸÜ§©#—\3=\11ìæŽ\19–\7×aG¶\31½wqغ\\¦èÌLÈ_ÝèÇæ‹X\13•“½â2§ê\5°¬óú`ê sl±Ê±¥\22±êˆHãËDC/÷\26ÞO‘ŸY²Ï}–ì\31:È\21!¿ÿ$ˆå•M\8,¯¿[+Ìž§ÿ\23Ž+cŠ\21ÖÚ\18Dz Å·øçÕ2!)TÒ\7\6»(Â\31Ë“g\"o`ß•[ÇUt¯–ùÚõý7\22&”W”\21`N6ç\27×\ +ª¨ì\"ÿVë×ZQbÐ\21\16\29MëCf¶ýS\1Èè+\29ʪ+^æêSG­•\31M®h”µZ{m_¨«É\14ù d$ÙHÞ\"¬±OÍx\24ì2'­\5ìüÜB.þ)£„ü9CE©.Z½ÃŠ\6¯Ó ŠT|Y¨Ø‚8ÏŠ’\0135l-æƒà¤\0137\31k·µ\"´ù«Ç6:èá$\27™ni«‹Á\29œeâöbÚ[M“ï­'\11¥¢RJÔ›ÁÓ\25è^k«Ä“g\3UÀ,\\ƒ37öë`嚌Ž‘ˆe0AWOœÍr\7Ô³LÖÍ3Á®u#Œ·zò0“´5\\«\1\26‰Hôœx™âF~ò9AU9,¦\29’Öl¶ÓÎÞe»NNÝygœbOþ5Åîë\0Ö½%`k@ö’Î’¯|Ñ\13\1\31AäUé±\ +=M6\0137í\28_xA\3Õùr>¨éÒ\17\ +Õƒ¾\31w>4/aë%|`2¨ëz\15°§á´žPU©¤Xiu·²\31Ç=¯j„’6u9¦\25ùƒÜÀ4YS‡\0–…§\9u\27“Í@é\127©¿H¶\25g\4c›-ê\14\3YVÿû\14lÚ…]§Ù÷î\2Ä\24“™miŽä˜)\9H×ÌÑô`àR(\23E\19\15¿Ó¼^Žü…ÿeªîqRO/¨\28ŸÂšÒÔ/G»Ð™\11$¥/Ó&Ί.}ªG’f,h¿/Õè,ÈZ›WW\16”ì\20\23mSB\9ß”¸AU&#X©¿Åq–n\4ŽÆ°Òxô\17N¾ˆ›BHTa&#\19fz\0ì½\12\17n¥x¦M²\22[ä5¾È˜Ï|ÄÆÆñ!^Â\8þËÞ’\16¨áÿàÿl»\\Â\1\ +'-uh\14Ea|ÀhÁŸÿ8Üt\19‚¤€ªÝÌŸz\16õ mé\27\26å÷\27Ú«?ÙWÿü>ÛCÍ{Í\31\16É?¿5U5‹n& þü>N\14Ôv&/;°iÅR“\0S¦[!ͧu<*võÌ‚_F©ÿ¦Ø¬‰Â¥_\1˜¶¶9ó)«h»œÝ™·Ü)nÛ´ý°\4‹)¨\18\4\5û®Fl×þcéíK€ÅI½\16l²|\ +\25Æi·Rnˆ™¯É¦\27¦]5é\20RSÜÕM\\Ê\13\1®\1¦ˆ\3#Û¯ì&_Ëà~S*}l+äjté0Å0•>ÝØvµÊR®ï®õ:\18\13‘\30l\27\14k¤e\1\21\19Å°Sü\"\25çÓÿUP:í×yÈ£ƒQ\23ã+¶h0ÙšøT\26çü~¡7q%bÉ«)®õ¦zbó±]\6\\Öz_}\13‚°âÙlÕ½….§à˜Àn\28v2&ßò\17I·Y0qÚe\13§Š,èL1ƒÉU›BÀ\8\29[+>zQt\14¼EÎ\4:X\7\11eR }SmLw\ +\9ÀÉÿCôoò• ’é„Õ\0145§#ÑÞ\20}\0038Yá©=\8\0cíîI ·\18\0\21ì¹ÆŸ¸\14äËù\4ªÒû2tÚ9 &s@\0044©Zgî\20\18Ó®EuBô\15[yÄ\28à˜™vù?Cg¨\29q-ZÂÈÔæ;HA×\14UG\26\27„úäù侨}2ô»1¹\24ÅBÇ\0fø0Û®ÿjc»Äb6\28\2~8ýÕÁ(\2d\0267”\28ª7šÅ`\8OvÙ±ÚÕDlã0¤ô\27µ\25Ïêû¦Mš/aÿ}âNs²É÷Éò€¸OêÔy§B.Ð ªèû#ßrÊ}Ò²Î_ôæÇÑ¡óc±ë$lf¾Öœ\13dz *È9ákÎe÷§\29îé˜ÉÍ´w\11}çSÛ\17”f†‘©ÅTjÁXD衹.è\12šÚÞw\29ü“S\11‹ž‚lþ\14fþ4_&õSûf\31üù\20¥÷ç‡Ýð\7~³\9éC\26\12‚ØC¹°ujç\127Á\7=½x\8ž^3ÔßkÁ\15Eù¢\29Ü\ +È\15Â÷:¸ã!œ6ãù\24VPêͲïÓ\22gáæZ™ÙcÚx¸†Ùº\0299¦Y,$|’é©\13ë i\22ÍÜï\ +ß\22–€*Š»ÿÃã0›Åa\6i›OBŸÔOó©û\26åbÛ\ +\13ÜG‚ŽgáÌóId”‚®Zã\6Ÿ\1öÀk€{€•\\W\30=í­\1\30\1ÔEªÒ•\28»\1Îý\15\31±p–ã“|ðÖÙw§\8óúý\27^óQà…_}\14V³¥(ç“Ÿ\4å\17‰I\14~µSè\11>íü¤î\27¾¹Ÿý\1înçýîvæîvFQ–Ûj«-€¨¡Õ\2©\11™åd¼fûnΧB¬\0k6#Å\28Œ\0206sy\ +À1\15ü2àQÛÆ/Ulr9«H\15áO™\ +øà?ΡxW^\12ÆË›iY\14+\18û\31B¬Tt¶¡‡^8\23\3V°÷vœ‘†\24Þ\2\3#ÜŠwßÿùC/ü² “ÍÆ\23:‘\5¥Btä\0156gHw×ã—=\3ýåW\27úÛ\12\23\2ÈwÒ+ ˆ³\0056a9-WºåE•^I\19S0ÇMªþòvyÄ«\25Ž*\20÷ÞŸÿj\29\18\9Ö2ÿ£PK%\127j(>Á\27gdqg3ëÌ渘U¯ž\23ñù \26a\\zî¸\19\4謘Ï`Fs\\Ë\29fý\29D6Ì\29’…óp\22\5…;\28\3\8cA‡ªŒ5{¹æíœ\7\17ûªä௠cop¥˜QµñuØ\9¨•¢\22h=âèdx14`ƒç©Ê˜Îö°d\6‹_+ÈÚìf[­\7\27\4Ÿ\7#à¿\4\127Ùt«Ñd\29̳oºæ\30œáÏ¢\2 ¿\1šÁu\00809‰§\24lF¶FƇ\23×\16Òùióp!\"ñ\28i\0ËfH,‰ûŸÙ‹ÕwU³Íñi‡™9AKî‘9_yè$±*»Çó\14«ƒz;\14N„§+®ü‘Õ1û®\8™\14ðóÙwEš\30C)|”\25ËÌ\24â(\8H-„ïv.\9»£òº‹½Ç«\8P§ÙWHóÐ\14îÂo¦áÿðW´ÄŒ±\6¾\2£Å\12£Å\30\30¹§}ã2Ÿl%f¹˜GïŽã\0ÇÅ\28znñMö\16„Á¼£}\21\31ÚØû™i2Â%eàâïÔ’‰3š=O\127¿n|eÂNêƒö\6›¶U\11Cû”A3¸’a¹2ÓlV‚ç*ö‰\17‚G\30†*LJè¿£Õñ¸O>Moúnâ\31¸ìØ2P\11Nׯ€n•\13|ÙK«Ü·*®’”ûÛÙ6¸qIì7T\15\127¨¯l+\\`ЪêVý…#‚v†Gn¾[%ªB&e›tfÊ™p8¡DUÎÖ¢4û,\8 Xo¿Žb\6ξ?pœ>6cy`žÙºÍæ>k~Á\0\2\20ž2\31gßÞÎÖ™Ô9\24Þ\1ü \31ól+€sÈ\17\25Ü\2”\0ê\3µ«C4é<Ó\\3ÒÏs×øÌŸ\31ÌpÌh\6›\17–\0255©4<Üÿu3dg?wIó+!D-\0’wœÓõÿW5¯ŸÓÿlM\9æ\18òMï„ë+Ö6D\17úV‚­DØ/–p\5áÑQ\"HÍ9tÞÎК³iM…cZ“\24\17›sÊ<°\4ÌŽt\17Y\3£\"¨\23\26CÄ€N)L\5™‹cF3î\1ýR³\21\11\29g,_ÌÁÐ1³(Áôæõx1R%¢ÝøgÀüO„\6lE<•`\17\8\2HÖ¨Kõÿ¸\6‡\ +²2À€…XZ½²iͦ\31gèÇ\25úQ\1279wỘ‹I—Ù:_gß^Í–ÂA\17\22›z±\28Ù\0284Û\28Fõ潉<ØÃ\14´\15\16Ÿ\20d\14\11|tB\16i³i´Ù4Ú\12ÆÏ|,\\åÍ&¿fs©kkQŽjDÌDÖ\12uåÿ*°úÎB\30Xìf3@\\vxÝá´Ã²Ã=\29\29\16dÑìÅgzˆq\14Àl­èÔ›ƒ+|\14ªg®Á[3C¶ÌfܞͯM\17Ø\17\20ô¾\26´Ì\12í2›hÁåëðB\28f}·¡'cîE2Ì\13µ\19Z\3\19Ó5ÐÐç\14\6ÒC\19\24\24o4óî|DKÂlu«¼ o„3AE\21YŸ«þhè(Š}§Å†o\ +A\9Ù”ÞÿùS\"ºF<û9\"æ†õ>\1“§\2Å\30¥`Þþ¨ª ó,T.ó@>~ÀH~¬y\11_e:\9k}$´\20êÿSÝð<Ù\17Þq˜_\4ªâÖ\24rжÒ[7öÂ@s0óú´À>{sÐdWK]g´Ðo\"t*.\16oÅ°ÿ/§ÞzDñi%\3POCÒóùM)Ï\3ÿY¯\11\2uËÉü}”q¥ð+O\0\11êHäAË„¾¤í]U\27\21\2k‰²!(´0\19\23›–Pä=Q¶ö—ÅêI:CL\6\17ªï\30Ñ\ +»œBcÌ\18êG\16îâä\18Ô\6áP\17Lò¡ä|åÝi„Á,V=‚‚‘\23_\19\2¦x”×Á°¬s>Ónm”ŽTUN,0×qæ;fµX¬\19Ø\25\22÷‡vh´É\19\8çâbL\\îèø»ÿO\20ºÐ£\16_KpL,VQ»\24QÇ\3ÛéÂEôrZ1g²°6?\22.Ï\23³K\16­-s1‹3†\11®\20«mb1g3.r…\11\23×xVq¦öÆÍ•¸\16á>‰£V¥`Š\6•—BËKg†Ò¡\30ÍêêôJïš»\25]ë*ù„¥–…ë±S€/ÇT*ÜøÛ˜\18ír¼dF¾]iD\27=Žmr‚ò\25àJuš0@ûJ\0:ÀJ‹—Ó#ȳýOÑ\12‡|‘óIiè+ìåa>\25™·ún\14¸8¡&\3wlôÚÎ\27Ahs>\27xYxÃZ>1j\1èÃåzùäÊj1n¿\8\17Yú/ß$.Öy¢ Íɳ§|0\15XXV«ë|ðP\19¨=e9#]·„.\19Å©œópô47Y€Ï&°\22K~k;$\4•o‚}l‹ËYÈ`O\30Çp{LiLö3\7–\"t\16š“‡a\0032ÁÏQ•H\25\20\7FR”£óUxÎÔ’B1gß¾/ç¸;ZÎ0ïQop×å\0121Ä—~Ôe0\22\28`ÙY´\15.\3Ât\11úTŠA%\8Ž\2\20‡0\20\3c5|±Òu”)\18ÆÈeÐö¨ÿêý#Ùà+_8\26—Aû™¢Í”Σ\27g\7šýg&+†š(˜™of\5RHÑãµ2çÃ6ŸáâïêŠÁú¨\22LW£MUS\0236t\23\3·¯|\"„\12fjÂ~¼Xçï‚Î_'ßw£+ü€\ +ÌÞp‡™­À”‘&\9¬\0\14,$d6@,-ÐE=Ÿv¿Üšåü¹Ãó\14/o\6×Ý{Ûá}‡ë\14ë\14Ÿ´+´\8/6V\2@\9<º2yKŽ:òxõh\1—\8.\1ª+tsIî×å/\23æ\14dÓ2ݶ rep„È·\15\2MÜPº·¢N.Q”ÜÇbfú%ø\21ÌõPŸ\17¹¹ØÞ\29¸<ìº\21L×àWX`R Uk\"bõÔõn`úo1ÚLê>k9ÙÞv‡/\1Ö\0lJ\8•¿ïÐs\12áò\5SæN3xð„õ\28—\15? -¸Ùs0¸\26àÔpS`Ïå*\6v~BG\7Þ]9¸âðñp±„N\17ƒrt\"î\29\16_\23\5âäq–ø]ŽF\24\17¼²¹™ÃÁ\31b›5WÊó\15;‹ï.žUÕÛêÀG|ys^•sä†e±m—ÅfŸ\3<è°r§HÐ\25Â’Ë\1Ûb„@=0žqò\"5®\24p‰à•o×XßÞ\26|7¾Øp0Àç\1s±\18Wa@Y`˜X\6\24W\23«d¦¦T{@\28MÕ}:ä›îÜØãÿ\7¿Ö€U+-\23\20>-æœ\16\16µ¼ô\23\21\ +ÉÌ\\FÂ`¹|@Z-\23Tç\30É’ˆPÃ.Õ\127…sõ\0HÇ\5{]t<;÷\0211\21ô\2œÃs\15ÐÍ\1…¾/W_µ‘†íÜÖœôÅ+\15½Üo\\\28æë\ +…¼\4\19\6@(9)2ž#|\28‹\13ý\20\13ãõ\23ƒtµ¥Lb7\31\1W\20¾Ê½5\23Š¡†Å–`˜\24WD\9\0‰À\14ñ\0|Lµkïúº„…Ò}$\\ã,¸ú½`\0096\0056ÅZ¦\22›ˆÐ¹\14ãÂJûêïøX„\3déѪ‰›í6\\\6\3å4\11ZÁùÛ4dœî£Š\24¹\20“Û”yüºýÛöÕ\23\24ÜPT}ÖpYXd±°Èr\28ù;h\27^PYåÿÆæ¬~d\9>\16¢Dç,ã\21î¥\31…_’”wƒ%\24@ôí;ÿïì\3qÅV\6:~<Þ­\\~±AM¥óš4\0085\"H$,³KŽ\5¹VÂÙ¬;\20e°&¸úÖ¦¸\\\"\ +¼‹Éñe´¶†n1³\0047\7¶eÊ\16IjÄ=HÉÎ6öénw\"5ßóýˆþ\23ªj”ÌFiÜE\16\24ö9!âÏËXxÿ^,–ì@ôƒ:\13¨-ÛÊx¨®X%µß\\\23‹•àaeZrÕ0\0168[ð”û \23¿5V\127Qg–\1’†of[‘\15^\14UZ½yšÔ›w¸ª\22$òJ¤lGÿAt,pB’Øæc\23ä¢È\20\22Ò;òÆ!{7I]=fÃ\29•¸Í1´îÎu‰â™\11\22\3©$€ƒ`\9-?Kp‹ÀYC\29ïØF!7Ïó\11&xï\1(«Â”³ÀŒ¿:\16#rK?\25Å›µöB‘µ\6O‰\23s•,B¨Ïª\0HòÄǬ¥zñMý\2\26©„½:j1[ÆâÓMí\9½%\6w\";¿!‡¶ãóDP¦~Þz\7'ü¦$µŠ\25¶ô>…—„߬\20Šcã@ëÆJr†Yu\1ÙîB\29µ\0186\17…\11ÈIzÇ؃³€þKÖ\29\8\23€¨¾\5E\27*\13&y-Ú~ÑÒ›ç\0\29#\0039‘dR΅º\4ëà\2;‡\28Vi²\2µ…[;ÅØ°À­\14\9Î\31ÙÉovg»ì\5Œ\31¥q<%.W\23˜\3Ô$˲\16\1ê/÷ÀÊ\18|ÎØh²qx%´=a¼ì\4Šìo\14½¹RæÀ\6\26;N\\\7êÛÌï䃆»#ýß \4~Ÿív\28üÁ?° ˜Úd\2ꤖUä¬Ü\"dWYÿf;àÌòí…jk\6;à\0‡”¼¿aUúµ\28hIð\3,ÌÊÉ€}\8P\13šÒôåôpJoÿÁ\14\16^bux«E¿BãÄÂöÀ4\23øדW¥ ”\15—Ï”áö\1gÀ\18z\20Pð‚œé\18†L)t\8_vYå\4\23Á\18J\"Èij²ì\8M\9\19\12K0\23,;sAÀ/t{ÁЈf6XŠo²\21èí´‹;­¥ a @¦YÞ¨;«•Xþá@X\ +ý]V¸u\0224K@}¢šÙ5‚œ7‡\2ÿ©¸¯à–]O3…¬<Ž“Ã}Äu\24ŠÜ½yAÌð\5‹\"¹\"ºX\30ÔáL¨Ë\30 ª\1æ÷€K$p'†ª\8\7³\12ÊÎ7à\0æó‡¹^ÐF˜ÖK|‡ƒØ†$¿ìÕÙªh8×\22D\14\14Ð\1è”P\28Qa¢u\9\29Ï\6ÎnéÅå \26T_Ú\24éî¿\2ª¾õ\4\2€\12¦‘PXV¹ÜíÝçÈz4]h}\18K5ßÚb\13\18ìbî…:ðh#Ð932¦ò\28ìö×_\14cÏ©<8-ÁH¡b}Fc|Ô…ôî ú%\"™7\0\127Û:G\22«SVŽÖ»ä\7U„[t\9í…‹ÕP C\8,¨Z×\27qœ¿‹¯\20o\4#Á¿ Â%²©W3z,qÏ(À}\16ÌGKM=·=5ѽo(ö÷Ú­–Õ3`ì1š»RJŽæe&ç®XY]{p\14NÌjõ%\24\6Hv\9\12*ÛëšøªZ¹\26ýÚfêÓàÏ[jX†\13è\"±<»(\28®Ê\26\15hKãµç€Á\0019 OM\0130\3Æ‚¤§þjz)ʸt»ñw$ï]K¨k¦Hû6CÞšqB.wE6E%×(\11ü\19NµQ\28$:ì\19‡ÅSφ\13\24N[¦òù2:EåS‰¿\16¸°ƒ*3µâ%\2w°«\17kæPjK…-­ò™\26M¬n\13L\13Š\0\127lß8\16/\127~¨üŸŸ£&!ö©|Åóã«Ÿ\31£S\15†õÁSßrÐfô´QEP‹n|A ½f\13‘\31{—\23_yQÂËû?HüfÔf³¢\16|Køš÷bôsh 6X¯\0224AÓ2 (Ø€*\7å\29O…o#¯apÛ39·Øôºµ\25^õÆÖŒxw[óκ\29ž§\0ì^ÝöôŒ¼ííÏÿI‡7Ö§î-\29O¢Rå~½%ßÉ\6pЙÿgBÔ\4ùo8ýHèPžk\8ÑÙ\15˜\21£\0‡Ç™t*ÎZ\\V%—Æ$}¨\7’ö¤ôi‰ßô©Ž\17ÞÀs\0t˜Â\28=ð»\25¹ú§¯nHÄ«jBs±È#C>ô9 Îå\0Œ\0268×3ÊT\5>u,PĨ*\28x˜JŸT\30\21ã£\19Á=!\28¦#Nÿ3ù´Ç+›P\8Ü;Õ‚A\28Ïæ\12jÌ®õX\30îÛ“/óD\13%T '\4\3“õ Ëå2!Y\15²<ƒ¦_êЇüå\20B7\8ÍÇ;°ŸìÎŽJþ¤N²@’Ü\26ÿéQ+?–‡÷„d餄\14ä„\14dÒÞ©kB\11rB`)\29þü?r4¸f–Og.Ùõ_ç|:\15¢å\9ç½W>”Ï\18(’E\0£W$ü¦±gÑb¸Î;>Ô”³NËtFB¾K\20iA¢„¸ü“öùÄΟ:š\12F‘P0™¸0Kñ‘£¶ŸÄ“rêa‹Ã-Ä}SíÿWe\8×O\23\8jÜÙ®\26\31F¿’õŦþB³/hK\14 i\3TU…^ÆÍ¾É Ù¥O/\23Æú‚10}†Ò\24ƒ\11\ +9SˆM\24ü&–Ér¸\28È\5u…\27ñìäéRüe«úK\0233­\26¸V&£Ò¥ò¿ºÒ\22ûH\7-1®\3Ô=W­:¹¢\5N\6KD‘øŠ\6\11ÃÐe˜ú«:ðê7ù\4ÁNˆ\8ÛtMcO\8m²²AÜÑ\20\26+‡:\31˜ÝÍœŒ†:G\0032iØ\8°«ô†\20>£v³Ý\24ûæ\0K\0•jSGWCk\13Ž€§ã\19YázH7\8/XCIÉEàAÿgÿg¢Ã<›°OÃ7…H¯ÔFs_¾ìzØ°•¢2éúJŽ†ŸÅ~¸½äª~¾×HýÈò\24Uø8ÏíÈ\127eÅ UêЊ \4Ât5ͬL1Ù‚[\26‘+O±ã'HÕ$R5Y¥\13`\26Žª}w\23\14—îˆI¤»Õ÷&ÑoÇ$¢?ÝMO§»g¾’y\ +!8€\11¶\8ó´ÒMVÒž\"ûtD\"\16@\0122äi²5:ƒB˜¨\2<ßÎò?”Âj˜xZN“g#v½\28tì:HÝ1Y/\6@”8z\19ˆ†›ÿ¬Rä\26è1 õCw\7äSE;û0£¥ªZKlšÂ–P@\ +ž>†å\\œ\0$ß\1œaÉÓ`Òì—›P‡&Ójɦ¶ÓäwÍtðü˜^š\24Ó‹.áؤ´Í¶±ñ\1\24r%0ÛDšO¶$–fKZ¤Ùì»)8%°TD’A\8m\ +•€Bغ˜ó3•ÆÆÔ»=«]\17í)˜ôaBP²y\28êÁ™¹«I³…òëÏÌ\24H¡`UžU-„[\"ÍU((Ê)œèÅ©+\12(™¡\9u‘t\ +\\MiA#\22!B«\19/Vr-\1¢\12L\15^Œø¯ú/gxo>ð)û‚¦ì´Äö\25\23\4ÉwÏ”v€†³w\15e\29õ\\¡R¼°¼äk>\1D(\19\2ÙšÃÜ\11¤P@šŽËÈ”öå\21©Ýj®jäZï¢\0F©\18æá8Ê\23«ßHØ;VAéL5\18èÂbf\12ìD©7\23o‘K1f\17\ +C\19Ä\30UAGyZlBV€Mxñz‚¨!Ư\1JV-á˜\22øóž@dž²î\4ƒ:¨ÝÉðÐTZ^ücOûó\127ÞR²êÈ\4u˜¸ÈL)÷\0263\4\13ˆä4Y±yBûDZ1\28\16}Ò\7WÛºHÂÆ\23¢Y\2rG»0vŠ.ÓLZQnNb„ò¬t„¡X/Z\ + Të\5ü\3†\16\18…’òcBý¦\ +fKF\7gZÇÓ\17ÐC\12âõ½þˆþHë\7š’9FøR 2\"]“\5àU…ÕíX9øVvÿ\21T@ÿy§Jð“pÕ’¼¯†íF\5 Ó$qÏ’xí˜4ê<~T-ÉÌ\28.ÝÎ\19ÈÍÙ˜j\0074LàafÅa˜ƒÝÑ°’\13\28/åOÆ\24œ[\31ùâ»Ý\23ˆ^>ûÿù\16Ÿ@\25°¾{¶\9\23üC‹hð\13A\17cˆ,ö$çÏv‹C¸^–ç©.ewËÑê¡\7àë\30ðB(\19&›¹\17ž 6•\12w‘\25„.öøhö\127í²”¤ÓõD\16\12¿–Zq‚+È\\0\28ë-%j\ +\0153‚\7dM+G4s¨Ïá³_ÜW#t†B'»\11[\4\18î)ß;wɽw¿Ýcf\15‡yÁ€.\"\127ð<ª/Â\26‡ÉoM€œ,jˆÏ\24KöXSŽ\6“´®Qf‹Ê}qNÂ\15Í dͦ¸ñ\127£\14¾ã\8¸ìÐãaËM@÷gµ•3Õ+\0121Ç\31÷¿\21Ÿ8a?<\12o\178µê\16\21I\15Å?Æ‚iÈôð椃ºGr#=\7mÃx885LØΕ\3yÞ}š{ß\28ÐÝwSÀ·&\ +\23-ʯ\9ØÈ[ڨʆ\29\3:rS)›PŒ·?ÿ\31º6ÿü>|Êù´¢@ Ö²\5uö ë§Ù\"\127:20‡ªÔ_7¹V­\12Ÿ§\6\21\31\13\3Ú<\15\127tj*íY\14¼ôú«ÚòH¤\31H¸@{åZüÀŸðöäænF9n|úfmJÀLµn{T“Û×wŽÀî7_®¾£½æË0\26\2ôˆ…‘L\2à\18Õ—Â.&\17{iœ¹Îà¦g6b\1Þ¿ü\0177Í\6rø22À‚5¼\0077·c\20Ð\22W›\17éÜ\9\25Èy²\9\29Ü\0172í°Ÿ÷?Oèxá†Ê ŠY©sñ(\7Ãy\24#-;¬\17\30 :G=ÙCß¡üYý|p„¶6\1\20,3vlO¸åËÀ\13Ѻ“Cm\14ôÐ6ôvËñmÕ²\0176ûŽã\127_o«… VóY®Áb¹\"%©ðë\16”L­:û×P$¥¿\29×Î+W\0198Úˆ\21\15\1«,àŸ+¨âzò¤Z\17+Â=jsYÑË;:J¸Üºkf]­™u…›í@TrLáKðÌÊݎĬú»Ž]\29\6Ë¿ãß¿ó¿ \1¸\24w34«.‚Ù\24:¦7iâ\7:ñ\12î|Ê2\28@\2…ÇM¿ÎNz\127Ç\127¼ëT\12H-=“\12²\19%ç\\ý¿.\1^þâFål h=Y\0ùÀóÒjY\"Ü¡3p1*¹‡ô_‘\30\26\21\2Ó×j^Ã\21Ò\0267Û…\25 \3Icí\127Æ­V̽9€ñ°O«qÕ®·¢¥U$Á\23^m…ëY\11me_Å\12àŠ2äu0g/üÞòkª®ÃEc> 1yE?Î:°ŽÖá\14Cª‚áXuú­C\24õ[m\19\12Ÿæ®Á·@ozP4Є=\ +'B»ª!\28(rqvóÛâóô3\7\25\9\22²û\26h5gK\15pÖE˜\12ÉyÉXE¢¯fÐZ\7ˆ\17B2¥e^ñ1i\28\19\00926i‰Ë\\^èϯŒö\1ò¸m°Ê\24ŒÎZœ÷\25µ,”àw>E1¡þüÏÛ\17ênílÆJ@˜$®ÆR!cDÜí\17fK\12KêbæwÇ5\21Ð_\6\23Ár¸X+ƒª©˜%¢\24c.2å»t #òq\15±Â$çÔ»¼1¾Ò{\5^¬nÙB\6ƒA²ÛH‘\\\13\28w\28wò³Ð.lò+Úç4a´&®ˆŠ\28Ö›*zÓ×o\17+W…r?\9<“\16ÑdÑ©–\0274¤3o‡èÔÛ…t*\11»ó'þß)ˆ•~›qh*f¢)ø·¢Ô\\ojÊͪŠWìÊÓ<ý\0147îÕ×›ji\6UQÃð\23¯¶\15ø¶\30…,h+ÐB\30Æ\1273™­\31V!“Û\1yÕ¯ã¿\4C+ëßqS8d4>\24©Ž†<\\¬f®Y¹ÛSò4ÚEÞh\29W‚W\"^\15¶d¯p07¢¦ŽÌ \20þ\19ËMÅjÞ\24L\22¸\1¼Ü¼­Ü+®‡»úîÞì\8\23:¬ÝtT\9ˆ¿¬ðƒBß«mÓ‰K~µ\ +ï\21¤MŽ\14\17¥ac›†\0154o¯\22\7U¼H0ŨÁ¸\27nrl㋳>6ŸXç\7Ö\13MäAE.¯`kblû¤\5\29抂«jMn]ºS-¡²kòu b'ÒØh¸¨\0258˜ä.”…ÂöÕ÷%÷\29f‚鹄fcå]+!Ü3\24P»”É,roÜ!‹ÆïpzË=’Ï\7äjâåJ\21,\13mÈ\9½\6!ÂWPæ³\ +­'ˆEÀ“píWF\4\13¼Ujh©'\21\23Щ´j\31Wà\26†žWoL¤A˜\14 Nê×N\7¤Ò“ú\3QÝuõm.vR\6\23H5\0öÑŽÕ»ÊÊáÖe&(\23.kˆªàQͬ(ã\0×Á9\0£ŸyN7`2YlåqY\9Ødz6òƒ=LÈÌÍ&®u·òy\22ŸƒI\"ŒbµmpªÄ^˜QúsP\2!jÊ-¢N1éËÿÏv]…ô—ËW\9\20\29µû¹\16ˆX‰:…\21ÀÚ4q½BT¯\16Í\ +€Aoí\ +ËÝ´ï\ +£\3!\16.«™ÍÔ\"\9WS„*K\3Í[®ºŠ­\22\27Ó\1Ø\25ÉÊQÈ*°ªÕ\21J\14\15¬Ö\14c\7õýÃ\24~öÐc˜_^¡™Ö01±ÆÓ½¢DЭ֞°VM%\18Q3«'Z\17\14Z}Û oƒ\6¯aÆX‰àTWÍA\3,0´j–ñ̾BˆukS\21­Ü@-\18Ùµ¶É2\31«(.y\22o\13V °òÒ½6Õ\26ñ\29\14\27÷ìŸ\31&é϶\8Mµ\ +·[-šÓ¯/Uÿ\21\27÷öö·0ØÃßMÈÕßmÔ\8\29°~¡½\28\11S:\4…íáN\ +b5fÎ/¸›ù\31¦\3t`~ží^ìjbd¿\14g\11ó\16TìV\23øùä\19Ÿ/EóȉôÈ—\14IÍä¯Ûð\25P\0119#ÚÓgÃ_V9¡âx±ÔÑý¥]9#€ªïœ]YÞ@`ù\18‡~ä‹Ý‰š[¨<`œù4ý\"ÿÅ9u¼ù©e¢qq'\0Wo¢\20N0ôçÓ­’e¤ta8r&\20£ª`ž\5µ\6\7>e$W9y\0191H¤¿#Û* c(ƒ·dËVd^„Ž\0026ÖÇU©k\2Ç¿ÙÅÏÔ\4v¸\12;#!ôä\20â&ûŸÂ\31\0029Ì\25\11@\24’CYâúõÝE¯ï\14Õ\6JAÅaê\9ßÌjye\11\30Á\29÷¢Š³k¼\17Ý;\14›\21†žW†dþ5ï_ÇZ\\óàÍŒE@ªÞé³å‹/`VÁ€R…Ë\11ý2Äbõ\17ôJ+ŒøÁsd¹ÐË•ÆÂW\11\0{&jò‡–9Àê°ìÏiÏ0ˆ¢¹õ Žkx>ð$ÒÉ×\0113×y²ÓdzûÄ\"—‹*ùŒ\28\19óÎ\2KÙ2J\20¢eŸaLè¨+Ÿ;qí™O¡l8[Å+îè°»óÀ\18f]=ž6ð˜Àí¬üÕÃÏî¤1®^glIÜ\127S\22h>\22¯J¬”ªSH½ÙÞ©jût×6¦GÃ0-E7¯¸vu\2\6ªiXt´ùG¬!õA,Z\21²åš\8,køŠ“bL\6_€xÆ\6$·÷‚˜¼ß\ +ùF_ð›V\"7\27¬\16Î\25Îœ‹]&\30<¶]œ8ráÍ\4Œ\1Ø\4Ì}KNæòŸÿrÌe¨•\29||\1­S\15´[ý`]æ»·áÂÒ\3øð\5,\31nô–(\24”´v\0'7/\7\26ÉO\27Y\19à0ÍŸ~ÞËaç:\127~vüG\9Z>~\14\28Áx\23R„¤UîÑ/+¯ß9ó¿?w¥½JÞ[#\127\14ų(©ÖÙš?NñZÁ\\úD7”€ïL2Š¡³\21heÔÐf+ Íhá\29í¹ó\29D\31²ÕÑ\18S]#«iɨ£íD™f¶Í\12;È{€Å@e˜ý#ûr!›á#s\25åÿ”'g\24ü\31'\0Z\19žüÑÙáýÈp{d\24=Ô£jn¨¥åYåò–C”Ì@‰Î¶”“‘$«\1´PŽlÀÙÂcùÜ¡?•äÃÄ>w¦žçÀüÇ+¥\12\30•³è,ºí\28–'Þü‚“]°­hç3FÝ\1'6ZÞs\22Jƒc:‡Ä™>\28¤ã8ÃM¾àY\14¹³l3\25#Zìp—(‚n°üX>Cº\29`Ø\7EʃÊ\11òt…¯Ø­\17Åž°¡ÿ\18ÿÉb\6¨^Ãç<ŸžÖ˜îfíxr\13aª\0131ú›3\127Ýß\3°8Ðȯ¨3§èÀ\27U\30¸ÕÆÍD£P,c\28…ÿ“Ý„{}㎂:]¡ž£B7Fl8ÞLïã?¨ÙÃM$‚=‰*10Bò>'{89xÛ\16\0214\2–u\8Hîq\22J›Mk\18ˆÄ\0286î„Ïäd³¶Ë¿Ë¿@À³m°<\8â5à—ÿ|ÔìŠ;\3²oy°\\tŽ92pŠÉ\0298\31\7S¥ª™õ€8ÉÅ¡)<4\14}5š9\28vÝ N¥ÎëÅ \17±8b¡)~:Ï¡\11$‡ò[´Èi\30¶Aé³Ð}›cnêËËuuZ§AO\1}`ë\0309„É\2z@->–{3ßñÍ%\15ßêY%ÜUõZAPŽ\28åäÍlWrsøÎö1ïü`‘ýþž¡.1›óÙF¿±ÅBÊûèÐ)B!®|ÓÉË]¶M=ù|P È…¼5ÉÐ…ž÷ÕÕY‰­š\8êžãß—¤e¯\31\27s\23w\29èÕõ$ÈΘk„Õ½î´7\127lƒã¶—[Éq‰\2\19¹ý\14ª?Ý%“œ^\24‡e8:li\14\3ƒ\17-ÌV7öCàP#ê&T&,„\6\28„c\16\25£Mm\1ð\\¬ü·‡ŒþȼÝd\12¿/ƒ}\127Éw=°¬Ç+‰a˜U<öÅUEx§32\12\25ËË„`€ÎØ«&\6\26…†ï€ãFÝP÷)/ϘNtÁ\7\7cæâj²ïš#\14ômDX\11p§~:g\127å\17\25\"è¡ž\127\28daþ—ÃZ \20ã7\23qZ´\2õSö@=üùmTùÏïÕ1&È\127û&EÀÈíÊìXÙ…åªßWô\16\0|\0è\15\23&¼\25@uô\21ºÊYÙ1òÊÊF÷T†U0ÓÀÌu\".ÿC@1gKzå\12\13Î{~\14‚\27¦»É\16ë¬ØÒ~’óFÐÈ—¦'\4=Š\31³8°Ž\12W.A\12\2l]”Å<ÜU!çƒJ*¾oÛE\0313\\öœMa,\23cP¨7co\5¬\6¬S\20céë\22v\20²\13Ç[\7°\28ZF²†…ë\28\22®åCs|\14…Èy\23VÌ!¬˜CF1[F1‡ˆ\"Ìs‰Üˆ\31\26,\6à#€bÀ\21b¹'»Ùn£’ä8\26\7³ØbþGlQYVSYãú\ +ÄòæX|\31¥ú\2U`qûá\14R\27uxš®ãÐÌŸÕ),\23ì4Ê_ª™y3¼\\«¿YS\20\29\9'\23¹sô$Ê\22rAq\20|2ñ˜#¡[Ù\28“&Îbî–Ì[–CŸ²Á\18 2°»vV\11–wóÔÙZ–\29ëÍßÒ‚\25É@µ°\30½‹V¨XΓj\2²Æ8õ–žÌûE\127\6\27»œvH\7WK\"dstÝw?okñùé?PU\25%pT&”Q4\29\\m¾ƒ)9ò\0290Ô\\»\27ÇmØi3˜]3#e5qÏýÔÐc¬O\127é‰\6A\2à&Ed\11×{¶ì¦DÌ÷fŒ\15\0ùÛØk‘ç˾pä?è´ŽÍw{6ç6šÊ\3FÞµŸ)…¯‡åš$i\11&\20§\127þ”\127þ<#ûÅÙM›\31\26d¹Ù6Çäò|W¬D\17kdµ-K$©Îø¤í«/Ÿ9©,õˆ{\0ëÆXŠ<`»Nå¼ëR\22ü\5= †W’ }£¿ýÍ\30cV!0¨\15nüßxá0í‰\0145oA\20‡É¹\27àë®ù\1\13ÈÝ:+ÃWš?F_U›Ç[Æ\\6ˆác\0qzt¾:UõŸ\7s­ o}A\7Ö‹7È|ÐW7d›ò\22»Øæ›îK+QälD˜Çæ*a\11=N\25{Ù`\15†×\29Þwø$=\19´ß¹‡]e32·ù¾zóN°Ñjõ\26\2„l¹\27\18ƒd²ŠeÔð\23Ñpr4ì…«Ùb­\5\ +ø\20:R¬ð­œ,û_Bï[aS(¼ó¨W\11:Þª³žOo¸ó[ìÞ™4hùÖ\6Í­X9]F\5…ª·¢#\20ù\0\8týGº†LW¢xÈÑ\23Çî~\16\28Ñ­P  K(z+'®ãäR\21ó\2É3½S\28$³\27€q\24*3MŽL\ +³2Z\ +‚Û\29\13û\27Qó›Š@µëÛGá\21å\23'–»Oœ…\"fÄø\21\7+NCºø\1…V¡Ã͇ѧ\0035ÌÅÈ&ï£g*$ê.»®ÈÂâÂò@\6!V$\26çÅPübR,˜C>mJÇ\18o$Åo$¸›ë¿Ò@ç*\22\9€ÂVKiùÝíBÏ^1ß¾}gÒ©\18\4EçjÁªh¬áÑã…âv5Êäã‘V9\24\21Pk‚4‹\12&òÕ™ÏW\6\23m/\ +ðt¥¬c#gódj_vcôÛ…Ai1º<àRBËp%‘¢Du[‰\28\15>ò ðM•ÞÚÙðh.Æ]/dæ\0000'\20Ÿ-…÷ˆþÍXpÕ솨ëpÿü‡6|j$?¡\ +ÊþpP\16ïT:-ÐòÅ ~1#º/u²\8Ú“»÷kxûeA2PŒÓåÍ,ƒwRÝx/´\ +Z\20q\25^ß>\12·ð''C]2\16\22\15…~Úõ'og—ªó0ø\15ß\13å_Fg–ˆ-\17Zvøm\16\21¹Ú½‘œ'ÿ³ìì\\Va\6Ì-üÅIï\14œâC\\ˆÙ`3ȑ௉K\5\27‘øm艮uþ½7à·³ \2WGþíÿ\\È\27FIÚræð§æOh3%Xó\26ŸvŒ‚˜ëÉ@\27\2\21oÎÀ9@ãÛê®d‡\22x8\14Ûd˜Ÿ}Ž“CY\16_\127±h¿\28\\Míƒjp¢Ôî‹!ürÓ)\30²³œOð\4\20”dÓey\7Í»\18O\7e8ÁìlÓ\25ÌëÁšæ± qc\3À¨]xïúÊ@6ää\ +\28\26£¦× ªà\ +@ˆ‡¸ü\30À=jãéÊF\ +«È-6rWÂÈÐÒ\27!7ØðT\27¥\31-<\12\28¦\0:\2HDG\15Pb\5»xƒA±[\29ϱfžÕ÷\0C€+PÞ\18¦ñŒ\25k}î–ð\ +/\7¬\12üˆÂ\22cš\6ì\3~H°o\ +°\4`\8‡Ð_„\23¥†5¢Ÿì¨ Cþ¬wà54°q\7»~á•A›ç0!7VBÛ¸új¢r\22FÄúø0Ó\9lô4|ò¥\ +á\23•ƒ…Žo\23‡H)…\23§)›Au\29‘å³%”•\20\11Ÿ?.'\15ˆŸ\27JØÚ£ºXM*¡|œjóDŠj”æ´É\25Ó§K)vkôôR/L5Ák\4{°kj•ùÅ/\16%4ØQèƒö£¥‡ïZWBÿ¦¾ž?y$ö?Tê\3ƒÐe7¬Wþ1¬\23\127\22æ‡\25aK\24Ø+a`䨢³î\0228O\4{O\7\20ž;Õ5òÜÔÞ\1u(e\127Ç°B\23ÎRuû»}V¤Yâ…¢ð@Q\24ª¶ìO\19Å/\0194ÁËJ8—+ˆÆ¡‚mä9\0,¾$zÐ<î²å\20ÖAÏsB‰ëî\18vôèœÊá9¸û\14Ö;QšU‚ï¦}“ø\27ƒ~\5Žr9M¿\16QÕ¿)\29\25˜«êâ4¹˜\17T~&¶6ø\2;$\28M'þ\11Û 5\26Ì\20Ë)\4)\18p|·«á¸a9I‘êð[ïd‰å\13ó£\28È\13<:¼o‘Ž\13üæs–}í L²Œ\16(þÄ\17S¬±\29ߌçd|a¤á£Î®‚‘<ž l\2¯tXÀSß1PðH*u\9g…«ï\12 \11F\27R\17âzåK [Jv]šcz’¡¤ë3 3a¼Çú²š÷2b×Þžì(zn´6Ì\18\26àËÈs\31öfJFë™\21÷œ\3h¸G´ß\1zWAàH‰¾Ò.æÌܳ8ö`¬@¾¿\3”\0005Àƒ\15z§\31i/ï3e7RWBõ™A\ + ‘\24KT9^©\ +7Ë(\18:‘¾Fó¹k-–ŸVÙšˆãÃÕ}\12=±\15\14ˆ?ÿ›qû‹:ÜE\24”û»\2ï°Êëÿi¶ëcÿ.Œ^Ž5\3áã\30=ü\12ÿ]Ý|\31,§\17°|h ïØÖä\ +á\29Ïx±{ fá}äx¸šô01!™6ù3Fœï>kû{Êö°×Þ}TßýE¨¢b«Þ…[cZ0Ñ\2XQËd\20†û:9XÎ*\29¦ƒ\11œNå\3¤\31FPM²idKŸ\22\2’ª9¥þӱɮO×\25T]ã¯ÿBõ\3áZB¾5 \21úX\18k`á\25³PTŠ\9\3_iA)\4Á\23»ìN‰[¥_Å:\5Huc4D§+©:_™-)SB ])lµLy9<¸S,–µ\6ˆ’–‹üúàPDñŠ•ú$Jó\28FŽK™©p0°\22K6\19#\\ºÄ…¢}×\19§\2¢Qyï—¾iì\\B¡nF¾’çß1ùp<$´w—Dâ~°¤‡Ãé}yž.î[…'\13w\24,+¾\0274èŒ\2ÿf­¯t7Ìce5õ†,\7.ݲŠºø—_°ù›Þ`»ZíÑTG'z\30ÂÇJ\18ho¾\2\0152dÌ»\21¶(R)Vž\13×û¾E\24v¤E0KãÂÍ/`zé`½[V¿Å†n¶}d$s\14_1 #\20\8\26‡T=Zº80Ì^,)}àMʦŎìî=\28ß*~ÕÆCò‘3i\29Ý|®­¼ò½êéY@ôŠÕ…É\19Alxåé!yñwÓd\6c«\28IÕ›Y=}~\6Ї0õ7†¯\6Ð\12ª°½ñ™\19S¥rÿd]Æåâ„W{nþ\127·;½\7\24\28c¹6¼êû¸æ6X\29\25\30®Jñ¨õõd)$®ãã›X…\0L\1\22—ej'té¹{ß\3\20gÌÕ…zƒ\5¥Õ&Q¹4!c‹:«Ã\24\29Øc\0‘¯•H\3)^½V*ìœ\6S\13H\21Clÿ\3o݃\27=víR{fe\5ãæI\16.}£·uÀ~\14Pu0ZVy6þ(¡®\15^\11,D\21‹X»]L‘¾\6EVÑq\14k›þaàËE™É«X‘‡f‡\31Ä)à\8¹\20á&”jpe\9j«—û˶Á­)qyó\3\7ê'ìßó\21WòøO7\12¥F|³¯í_}ØÇeIu·º¯nTäÆ™W\17w†!d€t\20à¹Úá\23»Ü-U(c\3—`TTÃ¥%Kvú¨·…,Å\24VN£ïɸ&«6GF8÷8˜P”ÓŸ©jBy½r'\127%­.\2\4ÃÀs6¹±˜®R;\127\127Ý<\20¼\3Úï‘å<¯\127{\9d_L\0098o>±\5\9Œ{ä\20^¸?ì/ÇHæk±j¶dÂá\18¦\20&Vø=)€®o\\òTø#\3ž\3\\\2øÛGkC´\127ô°ç!–\5Ú\17\13j€‡‰)„€Náõ,…á\6×\20X…§Ä‘)–{6‘\5¼»ü41­rÒÄs­}VWsÉ)¸ÝíãíƒÄ-êÒV{Õ\15š<§\24%S4<ÜRxsE|Y›Ç¥yÃhž\29Í\27Jó†‚:D\\îÿìLA—Öæû•Úbú\4ß\27ÞHiܯ6®*jó\13Ž’\24]\18!’—È’c\13ÿ|y\23äÀö\4ø1f\7\28vxÝ¡Ëvß‚HÖ—Ó½bå°Íj«)6Ô)ò\26ø¥\29±Ôy,í“厒uEÀ{Uš¥ãA\11Z ¡jvñ[\18.D}ƒã\4wÄ­\29YYþ\6îÈÆU[ƒÇCƒÔLà\30¤ôm¾ÐŸh””\3ÎLª”\29\5ò\1¨\4ê\8Š÷%>΃m‰G¦bF\24ÕÛýÈ}ai«‘O¿\25\17\15\13ëǵ‚u7LÈžìÞìRik÷+V\23)zàG\7\21:\0ζ\25Q@Çä0UlYgØ!ßîéè\3·óO\5?‡\27\29ù\28\7»ìèe”Û@t_oå5©ä\23òÝåõa²\22Ô¡ß|ѾÄ`ß\22Ââè'¡cÙüB¢ÐbW\11–„\14b\0286t+Ää\23*³þüDz÷ÿŽ‡“ê\19o¹ñ}Ù=\31\21v\30EÜ\30þügpÐ\0161Úù\3ŽJ\17×r†\3\0AE|KD\ +³ûóŸýªG~&€¨Mb‹]þs;C™\23WîJ}‚É‘@u˜ê@qIA\24š\13þ\0037—\\ÄÈ\5x\3\18˜í.n_ŠVþvÐ\26ñ\127+\8¬G£ O¡\30à?yoºwb%¡Fà.\127þS‹¿Ý\\Nsí\16\1Ãu\11QtAØî»V£\5&òþ†EIÖ\16\15ÝBS\14\ +äâ\ +°(\18f¡³½\23»W»7»w§Ft\8U¼‹‹ÒyY-\127EPu\16\31¯ˆauFvøö¢zújoÕOmBt`ª¬ñÔ&D\6\27§*ú‚Œ‘Ñ\31E]®v¹\\®°m¡QEÓ\3À\24\31y¡÷\27\\E®ê\13‰Ë¤ÿ¨³À r!âFz”\0\2&nð+/tÕ/t\2£(ÒzòÙ(Ï1Û£íé\0Â¥\18y´«!nUOfS3îBG¨Ò½6j{ßì›Ü³\19éûÉÕíÑX\7BF§\3Ü}Ó§zÁ†R\15Õ|(ÕV¨Àd¹YA\11uP\26HrÝŸÿ*¯\127\29ˆ]\13\17*ÀHɳHÆj.“\26Æ™Ð÷þIS5’\21õ\6ŒÑâñ\20iC¸[·Ü=î܆Ãü0“\12–ˆ\19åP£\127¢§¡;C*\4‡*\25Ÿñ)^eI»¹ ŽÖÆ<ÐÑ˯ÝÏá4\28Žu\21\0õmÀÄAx\ +k“?é‘\28=b>ríµê)\4š;kÓ1‡Æl\15cœ\25\22ÛÄ\30ÃÇXuÐ¥\4n|S;§z^D\16ÈëêÿZÞ\7+Æ®»Õ\0266V\13¨¨\5vñ\13îpx1\12oŽ\19y_ý¬ˆjéH |ˆ\18‹=¬Æj\17 0d¦ò©õ\23”½ØÏê\19x’£yžë<î=\31Œ5\19R¨T\11\11³ûŸ'ýÌC#š„JdD$Œ<“\7È–¤\28ì¿•ó\27¡Ká›ì ÕZ\28*v£\14ÆÁ\9¹¾\12Xãû\3%øø›\21eW\3,å^Ä/^Í°R\16=]­»½úá²\11äý\16¸;>\0300ß\4þ/¦“6öº¿bÖÏ)}Ý?þùƒ\2\26[~U}…óë0¯Vÿ[¿n:ª«Eª\0055«åŠ\20Ëê絬¢x·ž›vYÑ„ÝI\30älì.Ì¢A\27žUV°×\29ªEgê`ní>è\8·Ð\20\0284S@SÀì£Î@\31EoO~ÚGãx\18«ƒÎ\0059`A\ +Q0WjȆQ'ÀÁ¤‡\13²/\4-jÜ/ÿAF¸Ú\\Ó\16½\3~Ì÷\0Ÿ¤êÙÆ0ßÄ'&7¨³Xgè‰úÚa#öÓÍeGáɉ$\19¬Ÿo\8 sHšà¹\4¸î¡··P4å†On§B—\8\21NoØ\0243?SùknHÇs«\18ñK¢äŠ*–‹w+öW\30®N\1n\1Øb\7\12\13“Øê.ñù9E OI\8g¬ˆ‰\2\9?Î\0075ò\23^\21KR¸^¶ƒ­ñ\5<^\3–\0ôŠ\31Æ Û>\15:ÛTÝÅ…G\15\13eö¦=vÄ®ín\\Š?\3£\13»û\24µÀþ–!T¡þÀéâtÕÁ¬Ï\1ö‰jŸ“ëqsŠÑ5„Õ‘°¿ÞP(?ùÿ쬋Chøb‹¼@FG\9ª§‰-NA~ž#pðG¬b24±<#ÕFø¾\7£¦ÄÁØ\18¨~ž\19a\26½hs\22\21å%´ÑÖ¤ÔÔly[øôJ€úf£Óƒ§BŽ~ñ\3Þ%\2&g\6ƒ¨¶M|\13˜™\12ð[noÁ\0ç¶ò°g/\15zäÓB‚ÅüÊ=µÆC\127¼‹ë\15§¾\31ç\27]·ñäÜw«Ÿ¨»å)M˜LÕ±?UX\16(#̆6Ï\28LušÒ”“Ü\26ˆ­R\15F»w—BìÑ(\"U±®%¢K”&b1¢9B\12Ç\0292벎\5@ëÜ~P4ò>Ùd-\19\9¯\15-Þb>æÍc@\5J¬ê>¾\3¢›ì2-¹ %\25º_\0\12EgSJ\21™&¶`ß/øø\26|\7ñëúÏ?†!„\22\14qõ ¬ÕcÉuŽz“\19mh=]Üû:BS¢±Þ\27¯·Þ~\30#™¿‰ùöTäñ’þï¾™{®p}ƒ{\11\31\23¸5ô\11\1LNä¼È;ƒdµ\9ûºËv(œ\31\4Å\0313ªí«_îx¿¼us\3L\11\6¶<îIÇÅ^µ$¼}Éàê\\#ý\6üt \24åÙ£¬ñ\22Þ%@\9°9éþ¹ÉÅÌv\23\15‚·^.™«}«S¯Q}°^Ĺ³A¸e¯BuÓàÿÀ×\\Æù67\9!ãŠ&åƒo¥QÏ:|¾ïPUËÞ͉tuÍí‚çJTïS0þ¾¶&\18ÄXÍY¿qÑ¥yðµ¶R%F\22Ó\3\6³¯\ +rr\00104bNè\18aœ\14È\7}yè~«Óò?6Ç\28îŽh\30~QÄ\\}wvµæ»\26\12Wô\0ÆËihsõ˜\0\21\22y\15/¼ƒ5xâwØ¢ðŸ/ûãÙØ\9¯ž_pÉ׌\ +|\16\"Þ\1279O`éU\6¸Âß°Î~ºÙà Z¢çH(;­9–\29§Ü%ø\16ˆâ²«øf$,œUoá™’-îc?\20\5\\¡l¯»³\ +\27hv6ã×¥7MY #«\25\19Ï|‰\20\"ANÉØõ–S\12o\13˜]¿Áu²BOJ Ÿ‚\19‹‚°×c¯«æ\23\\}óî&Q\27È1[¢Ÿ\2,D¬nòѬ\24\14¥í\29Š\27-\30r·o¤ ôáa'\6ÏÛŸ1èbij\6âê>¨ÃÉ]\\µ\28ì\5÷\19^v•„bPÝžg[\20Ù@†\22_ú ìù+@4»æ\24;ï+4\0179·êg\22\23ÍätQÈRTìŸ\28j\17FŸýpR«h.IåZ® úÕ¶r¯t&ì×\21¾æj\29äÕÆÈp—ÈÅÖÏÕ\19žÁ™\15Þ3ªYIkµ\6àjúús‡\11p¢ÓmIL…™ 41óïò/ç:Z˜_Q7øÃk˜\13«»Y¯Ú»i,\9»4«Â°)\0;YŽœý'>a±òZ=y2RzŽ›úçGçMm~U®ë“ƒ£y^á\28h\3ÿ\7ßy@&4în•ðÂÇúf‰ôŽ·&¥ònØ`Á«~t9`\9®ÚTWõ#‹Zh\15î5X;Ã(n@™g¸‡\1¢vT\22ºš\26Š™H=\0024R\7T³faý½âh\19n~$lÃDF\27±oØoqw\8àë\3–ªí\27í&‚X΀êëÀjA/\1ܵC`ˆ\\¨À´o¨yÞ|'vÝá]Ðw_«­eq2s5Æ@{WCË\14U´ð½“'§ŠÿåË v\14£\15\1Ëe‡þ\30… I\14×õ¬TóXuT7«\12n¶\7Ó¬%¸Yya³V`må\7_ïÇ\31\12\26¶Ð;bÀǬ\27¸…Æ\26\7\ +ólØ•¡[Q\18¼D0ˆ{\19j­ŸH\22Å\\´?6\20¥+ZTa»€kè³0…5”\2Ë\1…BTt%é~|Yf”Ž¸pÇDF¤m\5ÀÚ(\0123+í \14>^OŸ¢›ˆ÷‹]5îŠ0›R]‘Gkè°qŒšzEƦÅûb³%³~‡åW„çË\14'R{Ö¢GEÿ‘ÿ\00000WßAèã^ÃWÖÃ5Q\20¢·\ +ˆÈ \19\26&ÊåÚB9\30\12>6[FÃ\29ìR\18µ\17\16IÝÐl£\17¼šYÊejÍ\28D\27¶›Æâ†e/\\ân°Ëv›™¾6U¦\16(Šf-ÿ-ì–)†UtƒJ%ˆân\\ý5\0270kJ8²Ë\30´Û·1Æ™}\28¯\3¡ìwܤÀbñNœÅÎ\2ª=cA\5wƒ]IþzkØiýͱaS@ΈÃ\30xŸP%#\15Yú{\"5]¦€Ì\127¦îÝ·*†ÞAî*ç\30;°J˜¼ƒO±\15ó0\"¼ã¬dÓ™§Ô\22öÀÚ„U$¥ò¨N1m'¿ï´ÉZ¥\0Õa|ÒÚÄÛt¡4&ÐÄ\27r›üzÜ&ì­è+wJ\4ÉQ\8\24‚0ñ­°±\11\31Æ›Ú4Ç\0090¡Î¸Mž\23“u÷¢†•Ü6Ò×l\127Ln¯S½MÕÅV'\2—U\8“¦C\1”‚\30\20‚\13„Öͼ]¿µÙb¤\13Ëcr-†\9?\11È©a#÷Œ>ÆC³^\19%¥ÒØïaYÌè1wRoW†Ó\14Ÿä¹à\\ù{a†Š8€¾QÞôv„GF?\ +™W¿\127ê\0116\11u\0±3¿L%\12z\9À¼\4Ù›\13P—Ñf3äÙO\26ïs3X*õhþžš…}\"£‚B\4E&ËÕ¾°ôBØÛr\20&ÓPí®RÐí°ª\19ÍB\2ÚŽ&Q%;+\15ž³=l\2¡MTÓc9\127\12¨\"úBö³Z½tçñì Å\30Fq±>Ø#vKÞÉÆ­½…ÑyÛ•ÿaw£>jÛ\2‰‡{#\17w\12m‰™±èX©T\0i\31eÁ®“ÁÓÀCn\25\20y|Ì\1¨ñ•\0112E´¢G\251^ÚÀƒ$13ÿµÕª\25£;a,ÑAH·‘øAÔ´w,z\0Ý\27\\Ê6,ȃ‚,±ö„:³ø\22:¶ó7¹7m\11BŒ\13“æÔ—¿p±´Å½Â¾¦Pöµ\5®\12\\ºÉÊJ\14>²\22ÐbýÃhª}\20ac\2\2l\26‹{à‘¦.ÀÔ““½\ +aã\6~‡TFëÓ…¿+NÆ\11B'·ÿ2`Ç0ú…é\29ýgWÕ´ÓßuÈêHTåË…¼‘ë}i½Ð[\"o\27ê´í²\5á \6º3ð‚Y+Ø\25JJ\26_ån\17\6/j%WžlÂÒ+\29“\19õ³\4w0p„÷\26ÞÛîý(.BßÊVQ±DîZ#Þ˜Ž1Å\28ø'úE)Qø\20ž3ûM¶,'´\11'tö6xÌØH'Šm0{«ÖJ†Ž8êpe P¦ö†½/\0181“²o\16Z¶É,û˜¨V\22xlhÿk°9Òxöðܯ\20Kõ\3ƒÍ±ôs,ýl\5¢-\31Ë\19\15\ +·[6b“hsçln_·v“]ŸRª€öMÕ\25\9u\21^>¼Õù\30Æ\12\20FQ0(öäk–…\13\0\22_xÏ\5¨\31Ê\16ƒ¥Z­‘‚ƒ£„<`Ø»k\0083ñM\14)NæÎÀ!ú*}bž•t³;¹,&£Üæšq°1ùmE»:žë”\22fà[ì\8€ªÉ7~…wÜœ†oøZ\5O£.È\18\8ݯ\30°\8Á\27*&xßj!Að¯\18)2]U¼Ã\21ô#2×\9g\127*ðþãc\1™q]>¸¼\\\31”›µ¸¾\1ø[¶\16ßÌ_¯”IœÂEM³\18\23×Ñ—1VÊÅ\30bþÙ\20=\12ë}³AxuH£\28¸È›\17N8Û˜ÙdA­q«V†Ðê¾nªq¹ÐÝÒ‚¼YW‹\18‚èk\27\1O©\22)6Àçë³\22šZP»Á䨡)¶ùY–“\24\5T%@uª\7‘p\6¶zdàa\8裸Á£\6Ô˜\16õŸ\9Ñ×ÄÇÓñ“\15Ôä\29\8¾¡\0226¸\26o\16úT=ÒUÕêÜZ\\ã4ßGò-ßÿ´\26W\8\1ç\0292•ªçRØçjÜ#Ù\5Ý®ÖïÔl|\29ðÁŠ2‰Nÿ„ù\7`\ +\31ËI\7\7\4ì\15î\3”ùA>† E#‰º\1ãXr”\17ÙÕvxˆÐ\20R)ŠÙzLå˜&•óe÷ü&G›,nî\1˜Kú\9ËW?_oVe2¼q\29â\8\8HÌ\19ý rïð;÷/@?Vó\7‰-ý9óNýç\7uÀïoæ„=^í·¬³\2\14‰øóŸß\6Ø°’‹u©Ÿs9\29ì«Žzº‚Öµh\0géÏŦ›ìÅôÓO\24ØÂïX×öÂ}¤š@üuø5ç+‰­ný ý]Á=A7\23mÛ[?6u¼CÌYùˆÃýÏæä$¸™ñ\7ˆ¨\31QßÉ`\9_ŽÐl•\12äÉ)ü˜nú¹¹¡7˜·\13+î\17¦Ò\31Ûùú\1Ý×ÿ;,§?Ô׌´?SÇ''÷½mò\6Œ*O\\§\24h\ +*óDA@\ +..@(Ð\14èpìÁÚw‰À½sõgÚ¡Sí]†Å%\21‹Å®\31,ÖãñXc‹ôÏOa®Ç€ê&{o\2\21Åg“_§íýw\2Ss|&\0fš‡\25\27í‹\15êÿŸÿ\30øû‰#Tø¡%£ýA»°ÜOœ/œ3Î\21G\31\27Žúð0jáªI\15ëh~ÀF‘æaÃ\29í&\\\20\31\0\28†åǀΚ‡{Tÿ5¶þÆâ<ˆç\ +3SZ\8*ÃÑÿµï?̇  \20È?\14wœÉÕq\\fÒlHýU9Swmo½0ö7°v\12\28\9þ²mnþq[Aú•”\12_òðÉ=Û\29ì^)ÁHè³%hŽ’xO¤ô\23…¨\12Èœ\7\4Ò\3\6͇ß{\9HÄk\2°~\31Ö5)¿Z×8æ»\7¤­\2\0308OœííÉÓÅótz{†Y’çÉ|©O쇰òç\8úú<].šH\6P%ÏÓUž+i}\27¤œ×DøM\127o´¼òÇBà‚ôÀó4ŠxQ’é­{rƒ-Gô)%pŸ@ØL\24—!\6\9Às\14¥/\4ZeÓ\19¥¦*†SA.\23+\2œ¡\ +$šÁ|ž‚½\4\31×¼@ªVŠÒ=‘n=˜u\13Wíþóß¹W\13`\11vPé\9ûÐäæ¥\1Ã>Od7žŸð2=a;~jox¢\6NŽÈžãsø\28>íQqç\22‡þk>=‡áÜó_{·p‡+\127á»\1ôS\0uÑp\5•Q‚\27Aªo¨šïž uOóÝÊE\12uu\\%Ï(üG`²Ò \127þ\16¡Î;\0,Qàˆª\17ùuŽ0R,\17ÁîòÄÚ-\19ï\25W½¢#hÄ\1Ž¾†Oû\8ÄÅ\25dƒÐ³CA\26HzáË:Œ\6 º.“\19Ýß\29{?…o\8ß\20 ù[ÚÒÈÉ\6Ch\0jšç\8ˆ’ª¿Z(¡`íÓ½É&ó4 žê\14ÝY\4ýgh\1Ç=>uñÇß\12CHŽo”M!HiË59þ<\12/\\íIÏž9\"\"ꉀâsäÀ”»„Çs“sæ€|Ñ\19>¬\19À†^ž£¯Z¾ø\3Šð´\5´î\9S‚œ\127/ÿ²êa’-ܤ<áüy²ýP6Û‚4õžf™–[]•º1-Ø‘ž\7ÿÓ†ôÔ\2DËÝá‰m±g²ÀØ3i-\31\9ÓœJ7^\4ô\127º¼ápî>­\0266\0ËÔ»Ò\19SÎ#\30Ÿ“O›y\22¨4\18.‡§ÍyÞø²†@{ÑÓ;Ï“©h\30iýUuë¨\30ïèÞ¦<~.\1bóôiSLÇ'\23\8r@˜”*ÓÈ\6\31ç\19ª‡¯ý¨â\22õR‘Ï·o\13òá\27\13çß<]\29¾a\7TÀðö}:°º¾‘©ù†zúFtÃ\1E\1­Çá®æ›SýûKE}\01387œéíøÍAÿMˆ*¥Ðaù9Ôï0¨ðÍå°N+Œ&|s›ø\17¾ŽÿÙ!Ú†¿\7,|÷ßìdß½¦Ÿ>vÁ¡<\29lß0,~£1æ{üº)ï\8\23Ø÷\8\23˜*\12ëB’—ùõ=ê,ø\30§\3Žv\24%š’Ý&wÿ²ðïïЯb(\11•ÑøšË«€¿ø:˜ïw§ãëÛÆ\11¿m*雉ó­“ì›K$uÁúö-¢\27^^\5ýM\\¦)\8ª\127kD¿mƒçÛ´ê7âAú\22’\19Dp-óm†ÙsÀ\17À\25`Ÿ†˜?\8\1\127›À$TMSSáØút€• ~›¢û\6'ã\27\\…~ë/:8¿­^è›\11ñîûAõtˆ}¿Ôå¼ý~¿T\29Æ\23H£èµ¯·—ßLåjB¼NB¼ù¿ÜåÂQüÒTx\29¾º·—Ÿÿ_½u²½üÔ÷B&葉}uÃåí…\29\11þj”_\\]ê¯\16É—WÿË\26^‡‹‚5Ô¯«¦½œáC®Ú÷ÒÈ¿n™È‘\"ÿÒO£ñº\15|ÆÊ{^ZÅ/$ ¨æt<Áøþš„d¾l!áeKŸ¯)a:æeóÀ/¡1úò\12/Ük^\29Ž!Ï×’„2¾ -^‡¤Š£eâ•\22!§ò/´9\21\28\21¡Ñ~­Â†^\24à“;Ð,\17ì/ôÝê\31hÓkM½j±ödÊŠâÄ}qR¼ò¨yùBw?\1G};»‘Ji­ÀrÕ€\"¼ïe›t/ëOx•yè‰\19%ÿâÙôeaV§Ù”›Šw\\qન^'À«i‹yµLFá./úëðÒoëÞ65 Û\29ÙNPÛ[pÿ\25hbn0µˆî?]\8\18®‚›ßú\13\5\4\27ö\18·Ó¤r·P;°ùe\0010“u!\28>=¥\\.ä‚St³‚Hb\18.’­\27ô\8î—Cü\25õÑf‘.þ'ÿ×Q´l\1™OˆøÝ`¯ÚNô\3ÅÖžo´OÂ\17ðÝlY Õ\4{ª\29Ì\ +_\ +é,Ø´ù)\8WD»ÁüvÜXLÛ§58\3>Õ<\21þ…£c|;k\29È…ÄÙ\0066F\\6Øm— ê6kÖ3/„úg8\12þ\127Q}Œ„l¡\25X`\26\"›&Êö\16\16±šÒ\27ökœÖ{†`5*¼¸ uæ\24@góÚÀa\9ï°\7»X?;mh3\16^dÈbØB‰\28€ƒh³I\27ª2¯oèn}\127ƒgƒw’Ír'›ß¶·;ÁŒ9r'½Eyq4½\9ÄHéæ\13}\11v\11ž2%Ò\4x*J4á—º\19ÂT\14¯–ê7(IG1­ \26›é@yD¡¡Œyt<Û\25e=‡)À\18 ;\13J\30ð\21ç«\0202:\6ù\9E°ïƒ-\3š'ð\19&ÌMh\ +\23Èj\30k‘ÿš:\27,bÿ?\14[Rˆ", + ["length"]=96185, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=14429, diff --git a/tex/context/patterns/lang-es.lua b/tex/context/patterns/lang-es.lua index 255e2e7b4..722b08b29 100644 --- a/tex/context/patterns/lang-es.lua +++ b/tex/context/patterns/lang-es.lua @@ -38,7 +38,71 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnopqrstuvwxyzáéíñóú", - ["data"]="1b 4b. 2bb 2bc 2bd 2bf 2bg 2b1h 2bj 2bk 2bm 2bn 2bp 2bq 2bs 2bt 2bv 2bw 2bx 2by 2bz 1c 4c. 2cb 2cc 2cd 2cf 2cg 2cj 2ck 2cm 2cn 2cp 2cq 2cs 2ct 2cv 2cw 2cx 2cy 2cz 1d 4d. 2db 2dc 2dd 2df 2dg 2d1h 2dj 2dk 2dl 2dm 2dn 2dp 2dq 2ds 2dt 2dv 2dw 2dx 2dy 2dz 1f 4f. 2fb 2fc 2fd 2ff 2fg 2f1h 2fj 2fk 2fm 2fn 2fp 2fq 2fs 2ft 2fv 2fw 2fx 2fy 2fz 1g 4g. 2gb 2gc 2gd 2gf 2gg 2g1h 2gj 2gk 2gm 2gn 2gp 2gq 2gs 2gt 2gv 2gw 2gx 2gy 2gz 4h. 2hb 2hc 2hd 2hf 2hg 2h1h 2hj 2hk 2hl 2hm 2hn 2hp 2hq 2hr 2hs 2ht 2hv 2hw 2hx 2hy 2hz 1j 4j. 2jb 2jc 2jd 2jf 2jg 2j1h 2jj 2jk 2jl 2jm 2jn 2jp 2jq 2jr 2js 2jt 2jv 2jw 2jx 2jy 2jz 1k 4k. 2kb 2kc 2kd 2kf 2kg 2k1h 2kj 2kk 2km 2kn 2kp 2kq 2ks 2kt 2kv 2kw 2kx 2ky 2kz 1l 4l. 2lb 2lc 2ld 2lf 2lg 2l1h 2lj 2lk 2lm 2ln 2lp 2lq 2lr 2ls 2lt 2lv 2lw 2lx 2ly 2lz 1m 4m. 2mb 2mc 2md 2mf 2mg 2m1h 2mj 2mk 2ml 2mm 2mn 2mp 2mq 2mr 2ms 2mt 2mv 2mw 2mx 2my 2mz 1n 4n. 2nb 2nc 2nd 2nf 2ng 2n1h 2nj 2nk 2nl 2nm 2nn 2np 2nq 2nr 2ns 2nt 2nv 2nw 2nx 2ny 2nz 1p 4p. 2pb 2pc 2pd 2pf 2pg 2p1h 2pj 2pk 2pm 2pn 2pp 2pq 2ps 2pt 2pv 2pw 2px 2py 2pz 1q 4q. 2qb 2qc 2qd 2qf 2qg 2q1h 2qj 2qk 2ql 2qm 2qn 2qp 2qq 2qr 2qs 2qt 2qv 2qw 2qx 2qy 2qz 1r 4r. 2rb 2rc 2rd 2rf 2rg 2r1h 2rj 2rk 2rl 2rm 2rn 2rp 2rq 2rs 2rt 2rv 2rw 2rx 2ry 2rz 1s 4s. 2sb 2sc 2sd 2sf 2sg 2s1h 2sj 2sk 2sl 2sm 2sn 2sp 2sq 2sr 2ss 2st 2sv 2sw 2sx 2sy 2sz 1t 4t. 2tb 2tc 2td 2tf 2tg 2t1h 2tj 2tk 2tm 2tn 2tp 2tq 2ts 2tt 2tv 2tw 2tx 2ty 2tz 1v 4v. 2vb 2vc 2vd 2vf 2vg 2v1h 2vj 2vk 2vm 2vn 2vp 2vq 2vs 2vt 2vv 2vw 2vx 2vy 2vz 1w 4w. 2wb 2wc 2wd 2wf 2wg 2w1h 2wj 2wk 2wl 2wm 2wn 2wp 2wq 2wr 2ws 2wt 2wv 2ww 2wx 2wy 2wz 1x 4x. 2xb 2xc 2xd 2xf 2xg 2x1h 2xj 2xk 2xl 2xm 2xn 2xp 2xq 2xr 2xs 2xt 2xv 2xw 2xx 2xy 2xz 1y 4y. 2yb 2yc 2yd 2yf 2yg 2y1h 2yj 2yk 2yl 2ym 2yn 2yp 2yq 2yr 2ys 2yt 2yv 2yw 2yx 2yy 2yz 1z 4z. 2zb 2zc 2zd 2zf 2zg 2z1h 2zj 2zk 2zl 2zm 2zn 2zp 2zq 2zr 2zs 2zt 2zv 2zw 2zx 2zy 2zz 1ñ 4ñ. c4h 4ch. 2chb 2chc 2chd 2chf 2chg 2chh 2chj 2chk ch2l 2chm 2chn 2chp 2chq ch2r 2chs 2cht 2chv 2chw 2chx 2chy 2chz l4l 4ll. 2llb 2llc 2lld 2llf 2llg 2llh 2llj 2llk 2lll 2llm 2lln 2llp 2llq 2llr 2lls 2llt 2llv 2llw 2llx 2lly 2llz b2l 4bl. 2bl2b 2bl2c 2bl2d 2bl2f 2bl2g 2bl2h 2bl2j 2bl2k 2bl2l 2bl2m 2bl2n 2bl2p 2bl2q 2bl2r 2bl2s 2bl2t 2bl2v 2bl2w 2bl2x 2bl2y 2bl2z c2l 4cl. 2cl2b 2cl2c 2cl2d 2cl2f 2cl2g 2cl2h 2cl2j 2cl2k 2cl2l 2cl2m 2cl2n 2cl2p 2cl2q 2cl2r 2cl2s 2cl2t 2cl2v 2cl2w 2cl2x 2cl2y 2cl2z f2l 4fl. 2fl2b 2fl2c 2fl2d 2fl2f 2fl2g 2fl2h 2fl2j 2fl2k 2fl2l 2fl2m 2fl2n 2fl2p 2fl2q 2fl2r 2fl2s 2fl2t 2fl2v 2fl2w 2fl2x 2fl2y 2fl2z g2l 4gl. 2gl2b 2gl2c 2gl2d 2gl2f 2gl2g 2gl2h 2gl2j 2gl2k 2gl2l 2gl2m 2gl2n 2gl2p 2gl2q 2gl2r 2gl2s 2gl2t 2gl2v 2gl2w 2gl2x 2gl2y 2gl2z k2l 4kl. 2kl2b 2kl2c 2kl2d 2kl2f 2kl2g 2kl2h 2kl2j 2kl2k 2kl2l 2kl2m 2kl2n 2kl2p 2kl2q 2kl2r 2kl2s 2kl2t 2kl2v 2kl2w 2kl2x 2kl2y 2kl2z p2l 4pl. 2pl2b 2pl2c 2pl2d 2pl2f 2pl2g 2pl2h 2pl2j 2pl2k 2pl2l 2pl2m 2pl2n 2pl2p 2pl2q 2pl2r 2pl2s 2pl2t 2pl2v 2pl2w 2pl2x 2pl2y 2pl2z v2l 4vl. 2vl2b 2vl2c 2vl2d 2vl2f 2vl2g 2vl2h 2vl2j 2vl2k 2vl2l 2vl2m 2vl2n 2vl2p 2vl2q 2vl2r 2vl2s 2vl2t 2vl2v 2vl2w 2vl2x 2vl2y 2vl2z b2r 4br. 2br2b 2br2c 2br2d 2br2f 2br2g 2br2h 2br2j 2br2k 2br2l 2br2m 2br2n 2br2p 2br2q 2br2r 2br2s 2br2t 2br2v 2br2w 2br2x 2br2y 2br2z c2r 4cr. 2cr2b 2cr2c 2cr2d 2cr2f 2cr2g 2cr2h 2cr2j 2cr2k 2cr2l 2cr2m 2cr2n 2cr2p 2cr2q 2cr2r 2cr2s 2cr2t 2cr2v 2cr2w 2cr2x 2cr2y 2cr2z d2r 4dr. 2dr2b 2dr2c 2dr2d 2dr2f 2dr2g 2dr2h 2dr2j 2dr2k 2dr2l 2dr2m 2dr2n 2dr2p 2dr2q 2dr2r 2dr2s 2dr2t 2dr2v 2dr2w 2dr2x 2dr2y 2dr2z f2r 4fr. 2fr2b 2fr2c 2fr2d 2fr2f 2fr2g 2fr2h 2fr2j 2fr2k 2fr2l 2fr2m 2fr2n 2fr2p 2fr2q 2fr2r 2fr2s 2fr2t 2fr2v 2fr2w 2fr2x 2fr2y 2fr2z g2r 4gr. 2gr2b 2gr2c 2gr2d 2gr2f 2gr2g 2gr2h 2gr2j 2gr2k 2gr2l 2gr2m 2gr2n 2gr2p 2gr2q 2gr2r 2gr2s 2gr2t 2gr2v 2gr2w 2gr2x 2gr2y 2gr2z k2r 4kr. 2kr2b 2kr2c 2kr2d 2kr2f 2kr2g 2kr2h 2kr2j 2kr2k 2kr2l 2kr2m 2kr2n 2kr2p 2kr2q 2kr2r 2kr2s 2kr2t 2kr2v 2kr2w 2kr2x 2kr2y 2kr2z p2r 4pr. 2pr2b 2pr2c 2pr2d 2pr2f 2pr2g 2pr2h 2pr2j 2pr2k 2pr2l 2pr2m 2pr2n 2pr2p 2pr2q 2pr2r 2pr2s 2pr2t 2pr2v 2pr2w 2pr2x 2pr2y 2pr2z r2r 4rr. 2rr2b 2rr2c 2rr2d 2rr2f 2rr2g 2rr2h 2rr2j 2rr2k 2rr2l 2rr2m 2rr2n 2rr2p 2rr2q 2rr2r 2rr2s 2rr2t 2rr2v 2rr2w 2rr2x 2rr2y 2rr2z t2r 4tr. 2tr2b 2tr2c 2tr2d 2tr2f 2tr2g 2tr2h 2tr2j 2tr2k 2tr2l 2tr2m 2tr2n 2tr2p 2tr2q 2tr2r 2tr2s 2tr2t 2tr2v 2tr2w 2tr2x 2tr2y 2tr2z v2r 4vr. 2vr2b 2vr2c 2vr2d 2vr2f 2vr2g 2vr2h 2vr2j 2vr2k 2vr2l 2vr2m 2vr2n 2vr2p 2vr2q 2vr2r 2vr2s 2vr2t 2vr2v 2vr2w 2vr2x 2vr2y 2vr2z 2b3p2t 2c3p2t 2d3p2t 2l3p2t 2m3p2t 2n3p2t 2r3p2t 2s3p2t 2t3p2t 2x3p2t 2y3p2t 4pt. 2b3c2t 2c3c2t 2d3c2t 2l3c2t 2m3c2t 2n3c2t 2r3c2t 2s3c2t 2t3c2t 2x3c2t 2y3c2t 4ct. 2b3c2n 2c3c2n 2d3c2n 2l3c2n 2m3c2n 2n3c2n 2r3c2n 2s3c2n 2t3c2n 2x3c2n 2y3c2n 4cn. 2b3p2s 2c3p2s 2d3p2s 2l3p2s 2m3p2s 2n3p2s 2r3p2s 2s3p2s 2t3p2s 2x3p2s 2y3p2s 4ps. 2b3m2n 2c3m2n 2d3m2n 2l3m2n 2m3m2n 2n3m2n 2r3m2n 2s3m2n 2t3m2n 2x3m2n 2y3m2n 4mn. 2b3g2n 2c3g2n 2d3g2n 2l3g2n 2m3g2n 2n3g2n 2r3g2n 2s3g2n 2t3g2n 2x3g2n 2y3g2n 4gn. 2b3f2t 2c3f2t 2d3f2t 2l3f2t 2m3f2t 2n3f2t 2r3f2t 2s3f2t 2t3f2t 2x3f2t 2y3f2t 4ft. 2b3p2n 2c3p2n 2d3p2n 2l3p2n 2m3p2n 2n3p2n 2r3p2n 2s3p2n 2t3p2n 2x3p2n 2y3p2n 4pn. 2b3c2z 2c3c2z 2d3c2z 2l3c2z 2m3c2z 2n3c2z 2r3c2z 2s3c2z 2t3c2z 2x3c2z 2y3c2z 4cz. 2b3t2z 2c3t2z 2d3t2z 2l3t2z 2m3t2z 2n3t2z 2r3t2z 2s3t2z 2t3t2z 2x3t2z 2y3t2z 4tz. 2b3t2s 2c3t2s 2d3t2s 2l3t2s 2m3t2s 2n3t2s 2r3t2s 2s3t2s 2t3t2s 2x3t2s 2y3t2s 4ts. san4c5t plan4c5t 2no. 2t2l 4caca4 4cago4 4caga4 4cagas. 4teta. 4tetas. 4puta4 4puto4 .hu4mea .hu4meo .he4mee 4meo. 4meable. 4meables. 4pedo4 4culo4 3mente. 4i3go. 4es. 4és 4e. 4e3mos. 4éis. 4en. 4ía. 4ías. 4ía3mos. 4íais. 4ían. 4í. 4í4s3te. 4í4s3tes. 4í3tes. 4í3mos. 4ís3teis. 4e3ré. 4e3rás. 4e3rés. 4e3rís. 4e3rá. 4e3re3mos. 4e3réis. 4e3rán. 4i3ga. 4i3gas. 4i3gás. 4i3gamos. 4i3gáis. 4a4i3gan. 4e3ría. 4e3rías. 4e3ríamos. 4e3ríais. 4e3rían. 4i3gá3mosme. 4i3gá3mosmele. 4i3gá3mosmelo. 4i3gá3mos3mela. 4i3gá3mosmeles. 4i3gá3mosmelos. 4i3gá3mos3melas. 4i3gá3moste. 4i3gá3mostele. 4i3gá3mostelo. 4i3gá3mos3tela. 4i3gá3mosteles. 4i3gá3mostelos. 4i3gá3mos3telas. 4i3gá3mosle. 4i3gá3mosla. 4i3gá3moslo. 4i3gá3mosele. 4i3gá3moselo. 4i3gá3mosela. 4i3gá3moseles. 4i3gá3moselos. 4i3gá3moselas. 4i3gá3monos. 4i3gá3monosle. 4i3gá3monoslo. 4i3gá3monosla. 4i3gá3monosles. 4i3gá3monoslos. 4i3gá3monoslas. 4i3gá3moos. 4i3gá3moosle. 4i3gá3mooslo. 4i3gá3moosla. 4i3gá3moosles. 4i3gá3mooslos. 4i3gá3mooslas. 4i3gá3mosles. 4i3gá3moslas. 4i3gá3moslos. 4ed. 4é. 4edme. 4édmele. 4édmelo. 4éd3mela. 4édmeles. 4édmelos. 4éd3melas. 4edte. 4édtele. 4édtelo. 4éd3tela. 4édteles. 4édtelos. 4éd3telas. 4edle. 4eedla. 4edlo. 4édsele. 4édselo. 4édsela. 4édseles. 4édselos. 4édselas. 4ednos. 4édnosle. 4édnoslo. 4édnosla. 4édnosles. 4édnoslos. 4édnoslas. 4eos. 4éosle. 4éoslo. 4éosla. 4éosles. 4éoslos. 4éoslas. 4edles. 4edlas. 4edlos. 4er. 4erme. 4érmele. 4érmelo. 4ér3mela. 4érmeles. 4érmelos. 4ér3melas. 4erte. 4értele. 4értelo. 4ér3tela. 4érteles. 4értelos. 4ér3telas. 4erle. 4erla. 4erlo. 4erse. 4érsele. 4érselo. 4érsela. 4érseles. 4érselos. 4érselas. 4ernos. 4érnosle. 4érnoslo. 4érnosla. 4érnosles. 4érnoslos. 4érnoslas. 4e3ros. 4é3rosle. 4é3roslo. 4é3rosla. 4é3rosles. 4é3roslos. 4é3roslas. 4erles. 4erlas. 4erlos. 4í3do. 4í3da. 4í3dos. 4í3das. 4o. 4as. 4a. 4ás. 4a3mos. 4áis. 4an. 4aste. 4astes. 4ó. 4ates. 4asteis. 4a3ron. 4a3ba. 4a3bas. 4á3bamos. 4a3bais. 4a3ban. 4a3ría. 4a3rías. 4a3ríamos. 4a3ríais 4a3rían. 4a3ré. 4a3rás. 4a3rés. 4a3rís. 4a3rá. 4a3remos. 4a3réis. 4a3rán. 4a3ra. 4a3ras. 4á3ramos. 4a3rais. 4a3ran. 4a3re. 4a3res. 4á3remos. 4a3reis. 4a3ren. 4a3se. 4a3ses. 4á3semos. 4a3seis. 4a3sen. 4ad. e5r4as. e5r4a3mos. e5r4áis. e5r4an. e5r4aste. e5r4astes. e5r4ates. e5r4asteis. e5r4a3ron. e5r4a3ba. e5r4a3bas. e5r4á3bamos. e5r4a3bais. e5r4a3ban. e5r4a3ría. e5r4a3rías. e5r4a3ríamos. e5r4a3ríais e5r4a3rían. e5r4a3ré. e5r4a3rás. e5r4a3rés. e5r4a3rís. e5r4a3rá. e5r4a3remos. e5r4a3réis. e5r4a3rán. e5r4a3ra. e5r4a3ras. e5r4á3ramos. e5r4a3rais. e5r4a3ran. e5r4a3re. e5r4a3res. e5r4á3remos. e5r4a3reis. e5r4a3ren. e5r4a3se. e5r4a3ses. e5r4á3semos. e5r4a3seis. e5r4a3sen. e5r4ad. 4adme. 4ádmele. 4ádmelo. 4ád3mela. 4ádmeles. 4ádmelos. 4ád3melas. 4adte. 4ádtele. 4ádtelo. 4ád3tela. 4ádteles. 4ádtelos. 4ád3telas. 4adle. 4eadla. 4adlo. 4ádsele. 4ádselo. 4ádsela. 4ádseles. 4ádselos. 4ádselas. 4adnos. 4ádnosle. 4ádnoslo. 4ádnosla. 4ádnosles. 4ádnoslos. 4ádnoslas. 4aos. 4áosle. 4áoslo. 4áosla. 4áosles. 4áoslos. 4áoslas. 4adles. 4adlas. 4adlos. 4ar. 4a4rme. 4á4rmele. 4á4rmelo. 4á4r3mela. 4á4r3meles. 4á4r3melos. 4á4r3melas. 4a4r3te. 4á4r3tele. 4á4r3telo. 4á4r3tela. 4á4r3teles. 4á4r3telos. 4á4r3telas. 4a4r3le. 4a4r3la. 4a4r3lo. 4a4r3se. 4á4r3sele. 4á4r3selo. 4á4r3sela. 4á4r3seles. 4á4r3selos. 4á4r3selas. 4a4r3nos. 4á4r3nosle. 4á4r3noslo. 4á4r3nosla. 4á4r3nosles. 4á4r3noslos. 4á4r3noslas. 4a3ros. 4árosle. 4ároslo. 4árosla. 4árosles. 4ároslos. 4ároslas. 4a4r3les. 4a4r3las. 4a4r3los. 4a3do. 4a3da. 4a3dos. 4a3das. e5r4a3do. e5r4a3da. e5r4a3dos. e5r4a3das. 4ando 4ándole. 4ándolo. 4ándola. 4ándoles. 4ándolos. 4ándolas. 4ándonos. 4ándoos. 4ándome. 4ándomelo. 4ándomela. 4ándomele. 4ándomelos. 4ándomelas. 4ándomeles. 4ándote. 4ándoteme. 4ándotelo. 4ándotela. 4ándotele. 4ándotelos. 4ándotelas. 4ándoteles. 4ándotenos. 4ándose. 4ándoseme. 4ándoselo. 4ándosela. 4ándosele. 4ándoselos. 4ándoselas. 4ándoseles. 4ándosenos. 4a3dor. 4a3dora. 4a3dores. 4a3doras. e5r4a3dor. e5r4a3dora. e5r4a3dores. e5r4a3doras. acto1h acto1a2 acto1e2 acto1i2 acto1o2 acto1u2 acto1á2 acto1é2 acto1í2 acto1ó2 acto1ú2 afro1h afro1a2 afro1e2 afro1i2 afro1o2 afro1u2 afro1á2 afro1é2 afro1í2 afro1ó2 afro1ú2 .a2 .an2a2 .an2e2 .an2i2 .an2o2 .an2u2 .an2á2 .an2é2 .an2í2 .an2ó2 .an2ú2. ana3lí .aná3li .ana3li .an3aero .an3e2pigr .ane3xa .ane3xá .ane3xe .ane3xé .ane3xio .ane3xió .an3h .ani3mad .ani3mád .ani3dar .ani3ll .ani3m .aniña .ani3q .an3i2so .an3i2só .ani3vel .ano5che .ano5din .ano5mal .ano5nad .anó3nim .anó5mal .ano5nim .ano5ta .ano3tá .anua3l .anua4lm .anu3bl .anu3da .anu3l asu3b2 aero1h aero1a2 aero1e2 aero1i2 aero1o2 aero1u2 aero1á2 aero1é2 aero1í2 aero1ó2 aero1ú2 anfi1h anfi1a2 anfi1e2 anfi1i2 anfi1o2 anfi1u2 anfi1á2 anfi1é2 anfi1í2 anfi1ó2 anfi1ú2 anglo1h anglo1a2 anglo1e2 anglo1i2 anglo1o2 anglo1u2 anglo1á2 anglo1é2 anglo1í2 anglo1ó2 anglo1ú2 ante1h ante1a2 ante1e2 ante1i2 ante1o2 ante1u2 ante1á2 ante1é2 ante1í2 ante1ó2 ante1ú2 .ante2o3je acante2 4ísmo. 4ísmos. 4ísta. 4ístas. 4ístico. 4ísticos. 4ística. 4ísticas. t4eo3nes. mante4a e4a3miento .anti1h .anti1a2 .anti1e2 .anti1i2 .anti1o2 .anti1u2 .anti1á2 .anti1é2 .anti1í2 .anti1ó2 .anti1ú2 ti2o3qu ti2o3co archi1h archi1a2 archi1e2 archi1i2 archi1o2 archi1u2 archi1á2 archi1é2 archi1í2 archi1ó2 archi1ú2 auto1h auto1a2 auto1e2 auto1i2 auto1o2 auto1u2 auto1á2 auto1é2 auto1í2 auto1ó2 auto1ú2 biblio1h biblio1a2 biblio1e2 biblio1i2 biblio1o2 biblio1u2 biblio1á2 biblio1é2 biblio1í2 biblio1ó2 biblio1ú2 bio1h bio1a2 bio1e2 bio1i2 bio1o2 bio1u2 bio1á2 bio1é2 bio1í2 bio1ó2 bio1ú2 bi1u2ní cardio1h cardio1a2 cardio1e2 cardio1i2 cardio1o2 cardio1u2 cardio1á2 cardio1é2 cardio1í2 cardio1ó2 cardio1ú2 cefalo1h cefalo1a2 cefalo1e2 cefalo1i2 cefalo1o2 cefalo1u2 cefalo1á2 cefalo1é2 cefalo1í2 cefalo1ó2 cefalo1ú2 centi1h centi1a2 centi1e2 centi1i2 centi1o2 centi1u2 centi1á2 centi1é2 centi1í2 centi1ó2 centi1ú2 centi5área ciclo1h ciclo1a2 ciclo1e2 ciclo1i2 ciclo1o2 ciclo1u2 ciclo1á2 ciclo1é2 ciclo1í2 ciclo1ó2 ciclo1ú2 o4i3dea. o4i3deas. o4i3dal. o4i3dales. 4o2i3de. 4o2i3des. 4i2dal. 4i2dales. 4i3deo. 4i3deos. cito1h cito1a2 cito1e2 cito1i2 cito1o2 cito1u2 cito1á2 cito1é2 cito1í2 cito1ó2 cito1ú2 3c2neor cnico1h cnico1a2 cnico1e2 cnico1i2 cnico1o2 cnico1u2 cnico1á2 cnico1é2 cnico1í2 cnico1ó2 cnico1ú2 .co2a2 .co2e2 .co2i2 .co3o4 .co2u2 .co2á2 .co2é2 .co2í2 .co2ó2 .co2ú2 co4á3gul co4acci co4acti co4adju co4a3dun co4adyu co3agen co4a3gul co4a3lic co4aptac co4art co4árt co4e3fic co4erc co4erz co4e3tá co3exis co4imbr co4inci co4i3to co3n4imbri co4o3per co4o3pér co4opt co4ord con1imbr con1urb cripto1h cripto1a2 cripto1e2 cripto1i2 cripto1o2 cripto1u2 cripto1á2 cripto1é2 cripto1í2 cripto1ó2 cripto1ú2 crono1h crono1a2 crono1e2 crono1i2 crono1o2 crono1u2 crono1á2 crono1é2 crono1í2 crono1ó2 crono1ú2 contra1h contra1a2 contra1e2 contra1i2 contra1o2 contra1u2 contra1á2 contra1é2 contra1í2 contra1ó2 contra1ú2 deca1h deca1a2 deca1e2 deca1i2 deca1o2 deca1u2 deca1á2 deca1é2 deca1í2 deca1ó2 deca1ú2 4e3dro. 4e3dros. 4é3drico. 4é3dricos. 4é3drica. 4é3dricas. .de2sa2 .de2se2 .de2si2 .de2so2 .de2su2 .de2sá2 .de2sé2 .de2sí2 .de2só2 .de2sú2 deca2i3mient decimo1 3sa. 3sas. de2s3órde de2s3orde de2s3abast de2s3aboll de2s3aboto de2s3abr desa3brid de2s3abroch de2s3aceit de2s3aceler desa3cert desa3ciert de2s3acobar de2s3acomod de2s3acomp de2s3acons de2s3acopl de2s3acorr de2s3acostum de2s3acot desa3craliz de2s3acredit de2s3activ de2s3acuart de2s3aderez de2s3adeud de2s3adorar de2s3adormec de2s3adorn de2s3advert de2s3aferr de2s3afic de2s3afil de2s3afin de2s3afor desa3gú desa3garr de2s3agraci de2s3agrad de2s3agravi de2s3agreg de2s3agrup de2s3agu desa3guisado de2s3aherr de2s3ahij de2s3ajust de2s3alagar de2s3alent de2s3alfom de2s3alfor de2s3aliñ desa3lin de2s3alien de2s3aline desa3liv de2s3alm de2s3almid de2s3aloj de2s3alquil de2s3alter de2s3alumbr desa3marr desa3mobl de2s3amold de2s3amort de2s3amuebl de2s3ampa de2s3and de2s3angel de3sangr de2s3anid de2s3anim de2s3aním de2s3anud desa3pañ desa3pacib de2s3apadr de2s3apare de2s3aparec de2s3aparic de2s3apeg de2s3apercib de2s3apes de2s3aplic de2s3apolill de2s3apoy de2s3aprend de2s3apret de2s3apriet de2s3aprob de2s3apropi de2s3aprovech de2s3arbol de2s3aren de2s3arm des4arme de2s3arraig de2s3arregl de2s3arrend de2s3arrim desa3rroll de2s3arrop de2s3arrug de2s3articul de2s3asent de2s3asist de2s3asn desa3soseg desa3sosieg de2s3atenc de2s3atend de2s3atiend de2s3atent desa3tin de2s3atorn de2s3atranc de2s3autor de2s3avis desa3yun desa3zón desa3zon de2s3embal de2s3embál de2s3embar de2s3embár de2s3embarg de2s3embols de2s3emborr de2s3embosc de2s3embot de2s3embrag de2s3embrág de2s3embrave de2s3embráve de2s3embroll de2s3embróll de2s3embruj de2s3embrúj de3semej de2s3empañ de2s3empáñ de2s3empac de2s3empaquet de2s3empaquét de2s3emparej de2s3emparéj de2s3emparent de2s3empat de2s3empé de2s3empedr de2s3empeg de2s3empeor de2s3emperez de2s3empern de2s3emple de2s3empolv de2s3empotr de2s3empoz de2s3enam de2s3encab de2s3encad de2s3encaj de2s3encáj de2s3encall de2s3encáll de2s3encam de3sencant de2s3encap de2s3encar de2s3encár de2s3ench de2s3encl de2s3enco de2s3encr de2s3encu de2s3end de3senfad de3senfád de2s3enfi de2s3enfo de2s3enfó de3senfren de2s3enfund de2s3enfur de3sengañ de3sengáñ de2s3enganch de2s3engar de2s3engas de2s3engom de2s3engoz de2s3engra de2s3enhebr de2s3enj de2s3enlad de2s3enlaz de2s3enlo de2s3enm de2s3enr de2s3ens de2s3enta de3sentend de3sentien de3sentién de2s3enter de2s3entier de2s3entiér de2s3ento de2s3entr de2s3entu de2s3envain de3senvolvim de3seo de2s3eq de3s4erci de3s4ert de3s4ért de2s3espa de3sesperac de2s3esperanz de3sesper de2s3estabil de2s3estim de3sider de3sidia de3sidio de3siert de3sign de3sigual de3silusi de2s3imagin de2s3iman de2s3impon de2s3impres de2s3incent de2s3inclin de2s3incorp de2s3incrust de3sinenc de3sinfec de3su3dar de3su3das de3su3dan de2s3inflam de2s3infl de2s3inform de2s3inhib de2s3insect de2s3instal ini3ci iní3ci de3s4integr de3s4inter de2s3intox de2s3inver de3sisten de3isti de2s3obedec de2s3oblig de2s3obstr de3socup de2s3odor de3solac de3solad de3soll de2s3orej de2s3orient de3sortij de2s3organi de3suell de3sonce de2s3ovi de2s3oxi de2s3oye de2s3oyé de3s4ubstan de3s4ustan de3s4oseg de2s3ub4ic de2s3unir de2s3unier de2s3unim .dieci1o2 dodeca1h dodeca1a2 dodeca1e2 dodeca1i2 dodeca1o2 dodeca1u2 dodeca1á2 dodeca1é2 dodeca1í2 dodeca1ó2 dodeca1ú2 ecano1h ecano1a2 ecano1e2 ecano1i2 ecano1o2 ecano1u2 ecano1á2 ecano1é2 ecano1í2 ecano1ó2 ecano1ú2 eco1h eco1a2 eco1e2 eco1i2 eco1o2 eco1u2 eco1á2 eco1é2 eco1í2 eco1ó2 eco1ú2 ectro1h ectro1a2 ectro1e2 ectro1i2 ectro1o2 ectro1u2 ectro1á2 ectro1é2 ectro1í2 ectro1ó2 ectro1ú2 .en2a2 .en2e2 .en2i2 .en2o2 .en2u2 .en2á2 .en2é2 .en2í2 .en2ó2 .en2ú2 .ene3mist .ene3míst .eno3jar .enu3mera .enu3merá .enu3mere 4o3lógico. 4o3lógica. 4o3lógicos. 4o3lógicas. 4o3lógicamente. 4o3logía. 4o3logías. 4ó3logo. 4ó3loga. 4ó3logos. 4ó3logas. endo1h endo1a2 endo1e2 endo1i2 endo1o2 endo1u2 endo1á2 endo1é2 endo1í2 endo1ó2 endo1ú2 ento1h ento1a2 ento1e2 ento1i2 ento1o2 ento1u2 ento1á2 ento1é2 ento1í2 ento1ó2 ento1ú2 4emboca entre1h entre1a2 entre1e2 entre1i2 entre1o2 entre1u2 entre1á2 entre1é2 entre1í2 entre1ó2 entre1ú2 euco1h euco1a2 euco1e2 euco1i2 euco1o2 euco1u2 euco1á2 euco1é2 euco1í2 euco1ó2 euco1ú2 euro1h euro1a2 euro1e2 euro1i2 euro1o2 euro1u2 euro1á2 euro1é2 euro1í2 euro1ó2 euro1ú2 extra1h extra1a2 extra1e2 extra1i2 extra1o2 extra1u2 extra1á2 extra1é2 extra1í2 extra1ó2 extra1ú2 u4teri .cau5t .deu5t fono1h fono1a2 fono1e2 fono1i2 fono1o2 fono1u2 fono1á2 fono1é2 fono1í2 fono1ó2 fono1ú2 foto1h foto1a2 foto1e2 foto1i2 foto1o2 foto1u2 foto1á2 foto1é2 foto1í2 foto1ó2 foto1ú2 gastro1h gastro1a2 gastro1e2 gastro1i2 gastro1o2 gastro1u2 gastro1á2 gastro1é2 gastro1í2 gastro1ó2 gastro1ú2 geo1h geo1a2 geo1e2 geo1i2 geo1o2 geo1u2 geo1á2 geo1é2 geo1í2 geo1ó2 geo1ú2 gluco1h gluco1a2 gluco1e2 gluco1i2 gluco1o2 gluco1u2 gluco1á2 gluco1é2 gluco1í2 gluco1ó2 gluco1ú2 hecto1h hecto1a2 hecto1e2 hecto1i2 hecto1o2 hecto1u2 hecto1á2 hecto1é2 hecto1í2 hecto1ó2 hecto1ú2 helio1h helio1a2 helio1e2 helio1i2 helio1o2 helio1u2 helio1á2 helio1é2 helio1í2 helio1ó2 helio1ú2 hemato1h hemato1a2 hemato1e2 hemato1i2 hemato1o2 hemato1u2 hemato1á2 hemato1é2 hemato1í2 hemato1ó2 hemato1ú2 hemi1h hemi1a2 hemi1e2 hemi1i2 hemi1o2 hemi1u2 hemi1á2 hemi1é2 hemi1í2 hemi1ó2 hemi1ú2 hemo1h hemo1a2 hemo1e2 hemo1i2 hemo1o2 hemo1u2 hemo1á2 hemo1é2 hemo1í2 hemo1ó2 hemo1ú2 2al. 2ales. hexa1h hexa1a2 hexa1e2 hexa1i2 hexa1o2 hexa1u2 hexa1á2 hexa1é2 hexa1í2 hexa1ó2 hexa1ú2 hidro1h hidro1a2 hidro1e2 hidro1i2 hidro1o2 hidro1u2 hidro1á2 hidro1é2 hidro1í2 hidro1ó2 hidro1ú2 hipe2r3r hipe2r1a2 hipe2r1e2 hipe2r1i2 hipe2r1o2 hipe2r1u2 hipe2r1á2 hipe2r1é2 hipe2r1í2 hipe2r1ó2 hipe2r1ú2 pe3r4e3mia histo1h histo1a2 histo1e2 histo1i2 histo1o2 histo1u2 histo1á2 histo1é2 histo1í2 histo1ó2 histo1ú2 homo1h homo1a2 homo1e2 homo1i2 homo1o2 homo1u2 homo1á2 homo1é2 homo1í2 homo1ó2 homo1ú2 icono1h icono1a2 icono1e2 icono1i2 icono1o2 icono1u2 icono1á2 icono1é2 icono1í2 icono1ó2 icono1ú2 .i2n2a2 .i2n2e2 .i2n2i2 .i2n2o2 .i2n2u2 .i2n2á2 .i2n2é2 .i2n2í2 .i2n2ó2 .i2n2ú2 .in3abord .in3abarc .in3acent .in3aguant .in3adapt .ina3movib .in3analiz .ina3nic .in3anim .iná3nim .in3apel .in3aplic .in3aprens .in3apreci .in3arrug .in3asist .iné3dit .in3efic .in3efici .in3eludi .ine3narr ini3cia iní3cia ini3ciá ini3cie .rei3na re3ini3cia re3iní3cia re3ini3ciá re3ini3cie .ini3cuo .ini3cua .ino3cuo .ino3cua .ino3cula .ino3culá .ino3cule .inú3til .inu3tiliz infra1h infra1a2 infra1e2 infra1i2 infra1o2 infra1u2 infra1á2 infra1é2 infra1í2 infra1ó2 infra1ú2 .inte2r3r .inte2r1a2 .inte2r1e2 .inte2r1i2 .inte2r1o2 .inte2r1u2 .inte2r1á2 .inte2r1é2 .inte2r1í2 .inte2r1ó2 .inte2r1ú2 .in3ter2e3sa .in3ter2e3se .in3ter2e3so .in3ter2e3sá .in3ter2e3sé .in3ter2e3só .de3s4in3ter2e3sa .de3s4in3ter2e3se .de3s4in3ter2e3so .de3s4in3ter2e3sá .de3s4in3ter2e3sé .de3s4in3ter2e3só 3te3ri3n 4te4r5i4nsu .in3te3r4rog .in3te3r4rupc .in3te3r4rupt .in3te3r4rump intra1h intra1a2 intra1e2 intra1i2 intra1o2 intra1u2 intra1á2 intra1é2 intra1í2 intra1ó2 intra1ú2 iso1h iso1a2 iso1e2 iso1i2 iso1o2 iso1u2 iso1á2 iso1é2 iso1í2 iso1ó2 iso1ú2 kilo1h kilo1a2 kilo1e2 kilo1i2 kilo1o2 kilo1u2 kilo1á2 kilo1é2 kilo1í2 kilo1ó2 kilo1ú2 macro1h macro1a2 macro1e2 macro1i2 macro1o2 macro1u2 macro1á2 macro1é2 macro1í2 macro1ó2 macro1ú2 mal2 ma4l3h .ma4l3e4du mal3b mal3c mal3d mal3f mal3g mal3m mal3p mal3q mal3s mal3t mal3v bien2 bien3h bien3v bien3q bien3m bien3t b4ien3do. .su3b4ien b4ien3das. maxi1h maxi1a2 maxi1e2 maxi1i2 maxi1o2 maxi1u2 maxi1á2 maxi1é2 maxi1í2 maxi1ó2 maxi1ú2 megalo1h megalo1a2 megalo1e2 megalo1i2 megalo1o2 megalo1u2 megalo1á2 megalo1é2 megalo1í2 megalo1ó2 megalo1ú2 mega1h mega1a2 mega1e2 mega1i2 mega1o2 mega1u2 mega1á2 mega1é2 mega1í2 mega1ó2 mega1ú2 melano1h melano1a2 melano1e2 melano1i2 melano1o2 melano1u2 melano1á2 melano1é2 melano1í2 melano1ó2 melano1ú2 micro1h micro1a2 micro1e2 micro1i2 micro1o2 micro1u2 micro1á2 micro1é2 micro1í2 micro1ó2 micro1ú2 mili1h mili1a2 mili1e2 mili1i2 mili1o2 mili1u2 mili1á2 mili1é2 mili1í2 mili1ó2 mili1ú2 familia3ri ia5res. amili6a a3rio li5área mini1h mini1a2 mini1e2 mini1i2 mini1o2 mini1u2 mini1á2 mini1é2 mini1í2 mini1ó2 mini1ú2 2os. 2o3so. 2o3sos. 2o3sa. 2o3sas. 2o3samente. mini4a5tur multi1h multi1a2 multi1e2 multi1i2 multi1o2 multi1u2 multi1á2 multi1é2 multi1í2 multi1ó2 multi1ú2 miria1h miria1a2 miria1e2 miria1i2 miria1o2 miria1u2 miria1á2 miria1é2 miria1í2 miria1ó2 miria1ú2 mono1h mono1a2 mono1e2 mono1i2 mono1o2 mono1u2 mono1á2 mono1é2 mono1í2 mono1ó2 mono1ú2 2i3co. 2i3cos. 2i3ca. 2i3cas. namo1h namo1a2 namo1e2 namo1i2 namo1o2 namo1u2 namo1á2 namo1é2 namo1í2 namo1ó2 namo1ú2 necro1h necro1a2 necro1e2 necro1i2 necro1o2 necro1u2 necro1á2 necro1é2 necro1í2 necro1ó2 necro1ú2 neo1h neo1a2 neo1e2 neo1i2 neo1o2 neo1u2 neo1á2 neo1é2 neo1í2 neo1ó2 neo1ú2 neto1h neto1a2 neto1e2 neto1i2 neto1o2 neto1u2 neto1á2 neto1é2 neto1í2 neto1ó2 neto1ú2 norte1h norte1a2 norte1e2 norte1i2 norte1o2 norte1u2 norte1á2 norte1é2 norte1í2 norte1ó2 norte1ú2 octo1h octo1a2 octo1e2 octo1i2 octo1o2 octo1u2 octo1á2 octo1é2 octo1í2 octo1ó2 octo1ú2 octa1h octa1a2 octa1e2 octa1i2 octa1o2 octa1u2 octa1á2 octa1é2 octa1í2 octa1ó2 octa1ú2 oligo1h oligo1a2 oligo1e2 oligo1i2 oligo1o2 oligo1u2 oligo1á2 oligo1é2 oligo1í2 oligo1ó2 oligo1ú2 omni1h omni1a2 omni1e2 omni1i2 omni1o2 omni1u2 omni1á2 omni1é2 omni1í2 omni1ó2 omni1ú2 i2o. i2os. paleo1h paleo1a2 paleo1e2 paleo1i2 paleo1o2 paleo1u2 paleo1á2 paleo1é2 paleo1í2 paleo1ó2 paleo1ú2 para1h para1a2 para1e2 para1i2 para1o2 para1u2 para1á2 para1é2 para1í2 para1ó2 para1ú2 para2is. aí5so. aí5sos. penta1h penta1a2 penta1e2 penta1i2 penta1o2 penta1u2 penta1á2 penta1é2 penta1í2 penta1ó2 penta1ú2 piezo1h piezo1a2 piezo1e2 piezo1i2 piezo1o2 piezo1u2 piezo1á2 piezo1é2 piezo1í2 piezo1ó2 piezo1ú2 pluri1h pluri1a2 pluri1e2 pluri1i2 pluri1o2 pluri1u2 pluri1á2 pluri1é2 pluri1í2 pluri1ó2 pluri1ú2 poli1h poli1a2 poli1e2 poli1i2 poli1o2 poli1u2 poli1á2 poli1é2 poli1í2 poli1ó2 poli1ú2 poli4u3r poli4o5mie poli4arq poli4árq poli4éste poli4andr poli4antea expoli4 .pos2t2a2 .pos2t2e2 .pos2t2i2 .pos2t2o2 .pos2t2u2 .pos2t2á2 .pos2t2é2 .pos2t2í2 .pos2t2ó2 .pos2t2ú2 .pos3tin .pos3tín pos3ta. pos3tas. s3te. s3tes. s3tal. s3ta3les. s3ti3lla. s3ti3llas. s3ti3llón. s3ti3llones. .pos3tó3ni .pos3terg .pos3te3ri .pos3ti3go .pos3ti3la .pos3ti3ne .pos3ti3za .pos3ti3zo .pos3tu3ra s3tor. s3tora. s3toras. s3tores. .pos3tu3la .pos3tu3lá .pos3tu3le .pos3tu3lé .post3elec .post3impr .post3ind .post3ope .post3rev .pre2a2 .pre2e2 .pre2i2 .pre2o2 .pre2u2 .pre2h2 .pre2á2 .pre2é2 .pre2í2 .pre2ó2 .pre2ú2 pre3elij pre3elig pre3exis pre3emin preo3cup preo2cúp pre3olí pre3opin .pro2a2 .pro2e2 .pro2i2 .pro2o2 .pro2u2 .pro2h2 .pro2á2 .pro2é2 .pro2í2 .pro2ó2 .pro2ú2 proto1h proto1a2 proto1e2 proto1i2 proto1o2 proto1u2 proto1á2 proto1é2 proto1í2 proto1ó2 proto1ú2 radio1h radio1a2 radio1e2 radio1i2 radio1o2 radio1u2 radio1á2 radio1é2 radio1í2 radio1ó2 radio1ú2 ranco1h ranco1a2 ranco1e2 ranco1i2 ranco1o2 ranco1u2 ranco1á2 ranco1é2 ranco1í2 ranco1ó2 ranco1ú2 .re2a2 .re3e4 .re2i2 .re2o2 .re2u2 .re2á2 .re2é2 .re2í2 .re2ó2 .re2ú2 ea3cio. ea3cios. ea3cia. ea3cias. .re3abr .re3ábr .re3afirm .re3afírm .re3ajust .rea3júst .rea3liza .rea3lizá .rea3líza .re3alim .rea3lism .rea3list .re3anim .re3aním .re3aparec .re3ubica .re3ubíca .reu3mati .reu3máti .re3unir .re3unír .re3usar .re3usár .re3utiliz .re3utilíz rmano1h rmano1a2 rmano1e2 rmano1i2 rmano1o2 rmano1u2 rmano1á2 rmano1é2 rmano1í2 rmano1ó2 rmano1ú2 retro1h retro1a2 retro1e2 retro1i2 retro1o2 retro1u2 retro1á2 retro1é2 retro1í2 retro1ó2 retro1ú2 romo1h romo1a2 romo1e2 romo1i2 romo1o2 romo1u2 romo1á2 romo1é2 romo1í2 romo1ó2 romo1ú2 sobre1h sobre1a2 sobre1e2 sobre1i2 sobre1o2 sobre1u2 sobre1á2 sobre1é2 sobre1í2 sobre1ó2 sobre1ú2 semi1h semi1a2 semi1e2 semi1i2 semi1o2 semi1u2 semi1á2 semi1é2 semi1í2 semi1ó2 semi1ú2 i2a. i2as. 2ótic emi2o2 seudo1h seudo1a2 seudo1e2 seudo1i2 seudo1o2 seudo1u2 seudo1á2 seudo1é2 seudo1í2 seudo1ó2 seudo1ú2 o2os. .so3a4s socio1h socio1a2 socio1e2 socio1i2 socio1o2 socio1u2 socio1á2 socio1é2 socio1í2 socio1ó2 socio1ú2 a3rio. a3rios. 3logía 4ón. 4ones. 4i4er. 4o2ico. 4o2icos. 4o2ica. 4o2icas. .su2b2a2 .su2b2e2 .su2b2i2 .su2b2o2 .su2b2u2 .su2b2á2 .su2b2é2 .su2b2í2 .su2b2ó2 .su2b2ú2 .sub2i3ll .sub2i3mien .sub3índ .sub3ími .su4b3ray .sub3aflue .sub3arr .sub3enten .sub3estim .sub3estím .sub3ofici .sub3urba .sub3alter .sub3insp .su3bién .su3bir .su3bam .su3bordin .su3bordín .sub3acuá .sub3espe .sub3esta .su3burbi .su4b5rein supe2r3r supe2r1a2 supe2r1e2 supe2r1i2 supe2r1o2 supe2r1u2 supe2r1á2 supe2r1é2 supe2r1í2 supe2r1ó2 supe2r1ú2 supe3r4a4r supe3r4á4r supe3r4á3vit. supe3r4á3vits. 4a3ción. 4a3ciones. 4e3rior. 4e3riores. 4e3riora. 4e3rioras. 4e3riormente. 4e3rioridad. 4e3rioridades. 4e3ra3ble. 4e3ra3bles. 4e3ra3blemente. pe5r4ante perpon5d6r supra1h supra1a2 supra1e2 supra1i2 supra1o2 supra1u2 supra1á2 supra1é2 supra1í2 supra1ó2 supra1ú2 sup6ra talmo1h talmo1a2 talmo1e2 talmo1i2 talmo1o2 talmo1u2 talmo1á2 talmo1é2 talmo1í2 talmo1ó2 talmo1ú2 tele1h tele1a2 tele1e2 tele1i2 tele1o2 tele1u2 tele1á2 tele1é2 tele1í2 tele1ó2 tele1ú2 4ósteo. 4ósteos. termo1h termo1a2 termo1e2 termo1i2 termo1o2 termo1u2 termo1á2 termo1é2 termo1í2 termo1ó2 termo1ú2 tetra1h tetra1a2 tetra1e2 tetra1i2 tetra1o2 tetra1u2 tetra1á2 tetra1é2 tetra1í2 tetra1ó2 tetra1ú2 topo1h topo1a2 topo1e2 topo1i2 topo1o2 topo1u2 topo1á2 topo1é2 topo1í2 topo1ó2 topo1ú2 tropo1h tropo1a2 tropo1e2 tropo1i2 tropo1o2 tropo1u2 tropo1á2 tropo1é2 tropo1í2 tropo1ó2 tropo1ú2 poi3de. poi3des. ultra1h ultra1a2 ultra1e2 ultra1i2 ultra1o2 ultra1u2 ultra1á2 ultra1é2 ultra1í2 ultra1ó2 ultra1ú2 xeno1h xeno1a2 xeno1e2 xeno1i2 xeno1o2 xeno1u2 xeno1á2 xeno1é2 xeno1í2 xeno1ó2 xeno1ú2 inter4és inter4esar inter4in inter4ino inter4ior mili4ar mili4ario para4íso para4ulata super4able super4ación super4ior tran4sacc trans4ar trans4eúnte trans4iber trans4ición trans4ido trans4igen trans4igir trans4istor trans4itab trans4it trans4itorio trans4ubsta ultra4ísmo wa3s4h .bi1anual .bi1aur .bien1and .bien1apa .bien1ave .bien1est .bien1int .bi1ox .bi1ó2x .bi1un .en1aceit .en1aciy .en1aguach .en1aguaz .en1anch .en1apa .en1arb .en1art .en2artr .en1ej .hepta1e .intra1o .intra1u .mal1acon .mal1acos .mala1e .mal1andant .mal1andanz .mal1est .mal1int .pa4n1a4meri .pa4n1europ .pa4n1afri .pa4n1ópti 3p2sic 3p2siq .re3a2eg .re3a2q .re3a2z .re3a2grup .re3i2m .re3inc .re3ing .re3ins .re3int .re3o2b .re1oc .re1oj .re3orga .re1unt .retro1a .su2d1a2fr .su2d1a2me .su2d1est su4d3oes .sur1a2me .sur1est .sur1oes .tele1imp .tele1obj .tra2s1a .tra2s1o .tra2s2oñ .tran2s1alp .tran2s1and .tran2s1atl .tran2s1oce .tran2s1ur .tri1ó2x", + ["compression"]="zlib", + ["data"]="xÚeœKv£<·†§â\17ÔZ\21»Î|d\16X1 \"$âx6iºáÆ¿èU×\19;û.R_#ìG\2Þý‚„¸:¿Ï‡Óù×áí|†¿\6þZøëே¿ß\23˜¼Ãß\21þFø›ào†¿\15ø[à/Ãß\ +\127Ÿðwƒ¿/ø»\31~7‡S\3š\13h6 Ù€f\3š\13h6 ×€\\\3r\13È5 ×€\\\3r\13È5 ×€\\\3r\13È5 ×\30N-ȵ ׂ\\\11r-ȵ ×¢Å\0224[Ðl\7ø\3Ý\22t[ÐmA·\5Ý\22t[ÐmA·\5Ý\22t[Ðí\14§\14t;Ðí@·\3Ý\14t;ÐíP·\3Ý\14t;Ðì@³\3Í\0144;Ðì@³\3Í\0144;Ðì@³\3ÍþpêA³\7Í\0304{ÐìA³\7Í\0305{ÐìA³\7Í\0304{ÐìA³\7Í\0304{ÐìA³\7Í\0304ûûát\1Á\11\8^@ð\2‚\23\16¼€à\5\5/ x\1Á\11lü\5D/ z\1Ñ\11ˆ^\18üð\5„/ |\1á\11\8_@ø\2fß\15§wÐ~\7íwÐ~\7íwÐ~\7íwÔ~\7íwÐ~\7íwÐ~\7íwÐ~\7íwÐ~\7íwÐ~\7íwÐ~\7íwÐ~\7íëát\5í+h_Aû\ +ÚWо‚ö\21µ¯ }\5í+è^A÷\ +ºWн‚æ\0214¯ y\5Í+h^Aó\ +šÃá4€æ\0š\3h\14 9€æ\0š\3j\14 9€æ\0š\3h\14 9€æ\0^\7Ð\29@w\0Ý\1t\7Ð\29@w\0Ýñp\26Aw\4Ý\17tGÐ\29Aw\4Ý\17uGÐ\29Aw„ý0‚ö\8Ú#h =‚ö\8Ú#h =‚ö\8Ú#h =\29N\19hO =ö\4Ú\19hO =¡ö\4Ú\19hO =ö\4Ú\19hO =ö\4Ú\19hO =ö\4Ú\19hO =\31N3hÏ =ƒö\12Ú3hÏ =£ö\12Ú3hÏ ;ƒî\12º3èΠ9ƒæ\12š3hΠ9ƒæ\12š\31‡Ó\7h~€æ\7h~€æ\7h~€æ\7j~€æ\7h~€ß\15Ðý\0Ý\15Ðý\0Ý\15ðû\1Ú\31 ý\1Ú\31 ý\1Ú\31 ý\1ÚépJ @;v\2í\4Ú\9´\19j'ÐN @;v\2í\4Ú\9´\19è&ÐM ›@7n\2Ý\4ºËá´€î\2º\11è. »€î\2º\11ê. »€î\2º\11è. »€î\2º\11x^@{\1í\5´\23Ð^@{\1í\5´óá”A;ƒv\6í\12Ú\25´3hgÔΠA;ƒn\6Ý\12º\25t3hfÐÌ ™A3ƒf\6Í\12šëá´‚æ\ +š+h® ¹‚æ\ +š+j® ¹‚æ\ +š+h® ¹‚æ\ +š+h® ¹‚æ\ +š+h® ùy8}‚æ'h~‚æ'h~‚æ'h~¢æ'h~‚æ'ìƒOÐý\4ÝOÐý\4ÝOØ\7Ÿ ý\9ÚŸ ý\9ÚŸ ý\9ÚŸ };œn }\3í\27hß@û\6Ú7о¡ö\13´o }\3í\27hß@û\6Ú7оö\13´o }\3í\27hß@û\6Ú7Ðþ:œ¾@û\11´¿@û\11´¿@û\11´¿Pû\11´¿@û\11´¿@û\11´¿@û\11´¿@û\11´¿@û\11´¿@û\11´¿@û\11´a켃ö\29´ï }\7í;hßAûŽÚwоƒö\29´ï }\7í;hßAû\14Úwоƒö\29´ï }\7í;hßAûõ¿Ãéõ¿_‡æt³\29\14Ñ\13ŽÑ\13\14Ò\13ŽÒ\13\14Ó\13ŽÓÍå‚\19<óÁHÝ\\Þ\6$<\1â`ÝàhÝÀp\0133\18\18ž\9qÄnpÈnpÌnpÐnpÔn`Ø\30N8\14Ò@H#!\13…4\22Ò`H£á€£!\13‡4\30\14\3NpT¤a‘ÆE\26\24id¤¡‘ÆF\26\28it¤á‘ÆG\24 ÏàøtÆŒçáíLÓ†¦-M;šö4½Ðô¦Wš\0144\29i:Ñt¦é\7M\19M\23šfš®4ý¤é¦_4½\31\26´Ñ †l4d£!\27\13ÙhÈFC6\26²Ñ†l4d£!\27\13ÙhÈFC6\26²Ñ†l4d£!\27\13ÙhÐF‡6:´Ñ‘Žltd£#\27\29ÙèÈFG6:²Ñ‘Žltd£#\27\29ÙèÈFG6:²Ñ‘Žltd£#\27\29ÚèÑF6z²Ñ“žlôd£'\27=ÙèÉFO6z²Ñ“žlôd£'\27=ÙèÉFO6z²Ñ“žlôd£G\27W´qE\27W²q%\27W²q%\27W²q%\27W²q%\27W²q%\27W²q%\27W²q%\27W²q%\27W²q%\27W²q%\27W´1£\25mÌdc&\0273Ù˜ÉÆL6f²1“™lÌdc&\0273Ù˜ÉÆL6f²1“™lÌdc&\0273Ù˜ÉÆŒ6V´±¢•l¬dc%\27+ÙXÉÆJ6V²±’•l¬dc%\27+ÙXÉÆJ6V²±’•l¬dc%\27+ÙXÑÆ\25–>ñ\\{NtÀ&:`\19\29°‰\14ØD\7l¢\0036Ñ\1›è€MtÀ&:`\19\29°‰\14ØD\7l¢\0036Ñ\1›è€MtÀ&:`\19\29°‰\14ØD\7,ØhÐFC6\26²Ñ†l4d£!\27\13ÙhÈFC6\26²Ñ†l4d£!\27\13ÙhÈFC6\26²Ñ†l4h£E\27-ÚhÉFK6Z²Ñ’–l´d£%\27-ÙhÉFK6Z²Ñ’–l´d£%\27-ÙhÉFK6Z²Ñ’6Ѹ\0016:´Ñ‘Žltd£#\27\29ÙèÈFG6:²Ñ‘Žltd£#\27\29ÙèÈFG6:²Ñ‘Žltd£#\27]¢q\3lôh£'\27=ÙèÉFO6z²Ñ“žlôd£'\27=ÙèÉFO6z²Ñ“žlôd£'\27=ÙèÉFO6úDã\6ظ¢+Ù¸’+Ù¸’+Ù¸’+Ù¸’+Ù¸’+Ù¸’+Ù¸’+Ù¸’+Ù¸’+Ù¸’k¢q\3lÌhc&\0273Ù˜ÉÆL6f²1“™lÌdc&\0273Ù˜ÉÆL6f²1“™lÌdc&\0273Ù˜ÉÆL6f´KŸ\18]\28“D6\18ÙHd#‘D6\18ÙHd#‘D6\18ÙHd#‘D6\18ÙHd#‘D6\18ÙHd#¡Œ62ÚÈd#“L62ÙÈd#“L62ÙÈd#“L62ÙÈd#“L62ÙÈd#“L62ÙȉFQ°±¢•l¬dc%\27+ÙXÉÆJ6V²±’•l¬dc%\27+ÙXÉÆJ6V²±’•l¬dc%\27+ÙXÑÆÛù8Óà¡å0p\0249L\28\18‡…CæpãðEá4ãmÄùØ°bÊ\13+6¬Ø°bÊ\13+6¬Ø°bÊ\24N*N¬8±âÄŠ\19+N¬8±âÄŠ\19+N¬8±\"Ü©6Ó/Þê…·zá­^x«\23Þê…·zá­^x«\23Þê…·zá­†û²y!Å‘=Žìqd#{\28ÙãÈ\30Gö8²Ç‘=Žì\17Ãid=+ö¬Ø³bÏŠ=+ö¬Ø³bÏŠ=+ö¬ˆáÔ³bÇ-ÓqËtÜ2\29·LÇ-ÓqËtÜ2\29·LÇ-ÓqË`8uYöãÄûqâý8ñ~œx?N¼\31'Þ\19ïlj÷ãÄûqâý\8\30çIÚúÎm}綾s[ß¹­ïÜÖwnë;·õÛúÎm}綆;£æNŠ™\0213+fV̬˜Y1³bfÅÌŠ™\0213+b8eU\\XqaÅ…\21\23V\\XqaÅ…\21\23V\\XqaEè=\25zÏâ¦Só'\31æAàmŠ8@ÑÍ€kÜ\9C\0319HÉÁz§ì³“€Å¹dœ\11\1–ýu)§Ñ;‰\17¢‡è\15Xø…Sw\30¼\1­î[ÊQ\6\8ÇÑO\25ç‡cËÓ\18¯\0078ÆJ\127\28#W\4\12~B~:ž.\28t‘§\11\18y)šœ`ŸøJ¼@\5]\21çq†cz=$~[…ÂSᛣڣ…lýï‰7ÇIX8²\30VðJTE+9ª4‰3°¼®æ‘ÍdœT\7Œþgiø·\28÷eØñƒûw…åß5–ÿ¬ò£&ûŸ¥áßòÏ”ùŸ”ùß”ù?)ó¿)\127¦ø©÷3Ý?nü¿sÝ?\11/ÿ,ýOù‡)þSú‘kúÇÊôQZþ_ÿ(þH\24\127\22~¤û'Û?ÉþÍõoª\1273ý³ø\127ærOlé¨$¢~÷z´Òã˜\"“ö2™½ÔùËn\1V̲v6\\u²é䪓w:Ùthm\15ÑqQ$\22“]ü®ÎÕ¹K]QD'­Ò¶\22Œ\21ÝnÝÂû\21YLjLÈdL¤JT\1[½åy¸\18¹I\18M¤5’µF²ÖHµ5RmT[#ÕÖHÒ\26ÉZ#åªc­‘jk¤¼Ó±ÖHÜ\26‰\27#‘‚O‹HZ›$k“dm’j›¤Ú&ÉÚ$i›¤Ú&©¶Iªm’vm’vm’j›\28“Ô!ˆ\24a¬èv\11ì\22Þ¯h\27-QËrž9¶Q¢Ó²Î %q.\1ͦ\19†ßä\\1Ñ\"Ô:\24hÖ†\5f¬ã\5Á\15-{<;\9,\3ÀŠX\21\20xQ9û8;û¸ÝÙÇÉÙGIWyHüÖ…\30¶´Â7G_…\30Á扎$V—©fU—ISz\9ºhÕµm÷¼èâ%È¢‹-ºè¢\11/\ +\3šÿ“hïSäÝŽÈ{ž*']\8÷¿’ά$mÀ:Ô\12ŒØ\18J&®í¡3ÂŽmMn˜Ê˾°[]Z¨\22v\ +Šß»\21\30û•wümè\127$x„ýBU¿úÛm[úan·miçÌWÚ­ù#ë~\127z[sñ•êšË~Íe·æbk¶Ôâ\22\19]\3Ë™ëûd§.Æ(X[„Yòp!î\11¬~JréÿM4ì9îØí—Yö\11í\11¦I:\4N!\ +,–bÙ¥[vé–]ºeŸnÙ§[vé¦Z]\27P\ +q_p?\22û±ÒO\9\29×¥ÖNwßv¶û¶“Ýw=×}×SÝ·éx‡\24Õ:\25détçølçädçø\\LJ$. äj]ÍŠS\0271-\4±Š\20œ‘X¥Ù\21áTk+¾RUµÞÆüc™e¿Ð¾Póg_iÜqMw\9ò.AÞ'Èû\4ùg‚ÝÆ,¾Ò¸ãšmÙe[vÙ–}¶eŸmÙg[$\27¶NÒ¨­Ê'b®ÚµlÚá®qm·\21\\“ãï\11\7÷ÆÑK\12\18£Ä\"ñõ­ðPx*l\ +\127\1ºDÚ\24œ\20½Ä 1J,\18I›à¡ðTØ\20@û—ÿéM‚ç\0168D\14…\3JR|H|JÜ$þ}ƒý0¹ãðzb\5œÆ†€à$\30O‘À¿Í¡Oˆþxs\18_ß\2^+\30\2!*¼6Zÿ‚Óp\28]+\0CN8nŒ(~r\7\23èÁO™ºbƶáÈÇb¦Öa\8\ +Q¡(ÈQ™¹‰„žF›\17lb\14°}\31…c\19\15.5\23ê\18\20q‡\18x… \16\21Š\2íU¦‡ÑÓh3ÂV+<6\22\25\27‹ŒEÆÆ\"cc‘±±èØXtl,:6\22\29\27‹Œçp\30\2ª\0118«òFÁ(\26\21#̤ø¨ø¬¸U¤ŒœNrI\"É\")D_ÅUYeUS\5aé\9ÆÌÆ¥–Ä\5@_È\27\5£hTŒ0â£â³âV\17R7¾stÜ\ +8«òFÁ(\26\21#Ê(ø¨ø¬¸U¤ŒÜÓ9:­ð\ +A!*\20\5N¥=]èi´\25iš?p•çÝ¡\9\13o#ELIà\21‚BT(\ +”’éaô4ÚŒ e<ÁÙÇÃA/°\8¹Á€®1â\27Î6 \7“o´\20GyrÙú¨\17*šy\19²lA–\13Èâ?‹ý,Ïê=«õ¬Îå\16—}>¦C3Áð…I(b\22\2¯\16\20¢BQ TL\15£§Ñf„ãl\19éj\3‚ç\16(\28ñE\12”\ +WÒ¸†ñ!ñ)q“ˆ­\28ñ^¹/\3’kšÀ1slß\11Åc[&®øŠ£ë=—mM¸>i\8æì\24RfqŽþØñ\2>I¸s5žáAÑß‚\21a<'Š\19[\9G\24ÛaþDs¨*\30gŸ\4^\15¦™’ÄÔB˜~‹Èô»¤ó¡Iaæ–gÀVaòFÁ(\26\21#j\26ÁGÅgÅ­\"îÓ\4w\24”\16£Ó\ +¯\16\20¢BQàTD\15£§ÑfDM7åä0\15ƒ³*o\20Œ¢Q1¢l‚ŠÏŠ[EÈØú\6óQpRô\18ƒÄ(±HÄ\28\12\15…§Â¦\0ÚÐ\21ÚD…1ʳÔ6É…à®Úí\16ª\127µþmÁc\2£—\24$F‰E\"\29\24\4\15…§Â¦ Û\11#\12]a`!Œñ÷á¸@j˜@R\\îøÚRë\25£‘;»%+F¸V„Þ,˜\0\22w„NÝZUl.Â\15Ùpð²pãS\22\ +Œ´@<»d<ƶòl8-†ó`˜êzK.£\0214KrC¸kmòm5•ÃªXœYi}òwã¢Nð\14/U\30}S\11“âZ·¨óf\12‡\13¥ÁHWê¢ì™þõWÀÙª}r0ˆ\24·\21×Zí{â{«/¢UÂ\2\22¥öRM]»Ð{±v\30\\o\0279p!ìâXÑ\22€û)N2ØÆ\12ÐÏ\12'¯³u?\15&3Z—\25¢\26\25>ŠíŸ!{ËSFíi#ï\25¤xÖEÇ8´†¶ÿÇâë\18³\19štÉ©÷8\23\14‚©×D“y›8¡×Óú\2äžnø\12­s–Ù³k“aò;l*[O˜­ÑàL°\19ñÚÇç¡.\27‡`Çß\28¿”’·Í\1ΆaÇñ\\q\14•WoÇi‚£[ÑZ/áV/p\ +\28uKRr¡7öýPÙl¤D;\14öLJuÈ\0ž\13‹iÀ-QÑE–ÚÙ–`Ýq™Xl‰\11í.Æ`».û©©¨&rØ1ëŠÙúh®\7,œ\21L\1î%´ýÖ°ðJ_E\28Ü_›R”•ýxvƒá뻲\30BTvÕ½q\28–Ê©.\19—¦r6L®®š^ß»‚[ý~ξd\13@³¶}©¼ïæü}§\3ÁÞj¥‡3¿¾w\5×Tü(>ïK¯Ç®˜öjéõØ—¬±±Tñõ0ômªÜWŒ»j\27¦©`­2\15¶\15àÀY+çºnÔ5'7*5î\\±­ønøú®ìlwbý®@‚ø‚\6Ÿ\30ÔÚ¹bÚ­XùbTµ¢Q]®(µ’§sFøüKævÁÈTº×¦KÚ\14\\¦ºVI²DÏ=€p×\1 ~gµ¯ÛÒ»Å0Ž\21mGÃ)Kñâ϶žíÑ¡îóÁÙZƒ™7M[Õ\18f'NsÝ+™ODŒ¯‡m­Vh‘\29ãe¸\22,i®u¶ßW\23Ty…þ\21¤½u\15*â=BPÊ\12øa,´Ìbyžk‡\20\21¦{£õÙõÄ\8\5É\24ZŸ\4‚Sˆ\12š3ô“Äâøt\23†²H×\8£ëuP\0046šcŤç#¸™±£\22ØÎøÀ1ÍƉ¯% ÏÄc3Rç™\ +=—UZŒLª\27ôpD6ŠÉj/z¦\12Óâ\27ó\3{h8„)À5%„×ó¨;?@ƒ÷©rÒ5r¼)®º\31—Ì}\6@vQ<ûVOßpÁ¡ç¿x^2¯\19\27½âŠm”ªÁ5\ +­À \23×:(Æ$\23ä0\23N„V\11‡\23;/~à\6‹°ãe®^òś—Îú¢±\0196²€57\9W”\19(,ZÎ'½°(SHF¾â\8·\15\1n\21èF(ê\29S´{¦hwMÑÝ9E»wŠõî)Öû§Xï b½‡Šv\23\5‘n=9:­ð\ +A!*\20\5L%ô0z\26mF”†“H\ +I ò\".Ò*¬²*ª’*˜\19Kæ$¢9‰lN\"œ“Hç$â9©|Nš 'M‘“&ÉúªÆók\26ϯi<¿¦ñüšÆók\26/¯i¼¼¦ñòšÆËk\26O¯i0z¸3„#–\9\31S#Æã;¾?ñS9Ž0\"\25á‹\12A\1278ÅãðÚz¾¹Uv;¦;]›ñ£ _ÊBEìùC\29Eþ\ +\8\11ÑÈÕº:›ÞÏM-íq\12¸Ã1z‰Ab”X$Ò¾&x(<\0216\5lÎ)³v\22í,ÚY´³hgÑΪU;«vVmyºr«»ÆaEòœ$yÉ’¼¤I^ò$/‰’—LÉkªä5Wòš,yÍ–ø†/Ü׋tö\"½½Hw/Òß‹tø¢=¾h—/Úç‹vú¢½¾p§/Òç‹tù\"=¾H‡/Òß‹v÷¢½½hg/Ú׋tu\127ã‡C\28Vx… \16\21Š\2¥¸éS!¡§Ñf\4iÊ\9Î\11áð«qåOÆ''\24:~þÕÉã¯Nž~uòð«“g_<úêôÉW§\15¾:}îÕéc¯NŸzu1³v\22í,ÚY´³hgÑΪU;«vVméZp\\ð0$à¬Ê\27\5£hTŒ0“â£â³âV\0173zJç9—çDž³xNáYß‹¸\23e/²^4½\8\14Ü_9:­ð\ +A!*\20\5’\31´Û\ +=6#Hsñü²ž£Ó\ +¯\16\20¢BQÀ4B\15£§ÑfDiøÍ\20G§\21^!(D…¢Àiô•”ÐÓh3¢4£“Í!pVå‚Q4*FœñQñYq«È\25\3ç\11’-H® ™‚ä\9’%hŽ \25‚ê\7U\15ª-Û¢[¢Û¡[¡Û [`þͽy7çìû\13_²¼ñ\27–‹¿9Êssœçæ8ÏÍqž›ã<7ÇynNòÜœä¹9Éss’çÆÃÊ%´t rtZá\21‚BT(\ +”‚éaô4ÚŒ(ÍìßÒ1\9p\"\"o\20Œ¢Q1âlŒŠÏŠ[EÈ8ûcÂß³À]Ç\5.\30x\27\23és\4^!(D…¢ÀY\23ímLO£Í\8·1rwˆÒ\29¢t‡(Ý!JwˆÒ\29¢v‡¨Ý!jwˆÚ\29¢t\7¸P¡‘ž£Ó\ +¯\16\20¢BQÀ\20B\15£§Ñf„×Zá¯Û0z‰Ab”X$ÒÅ\27ÁCá©°)è„\15äS+äRÃD÷jDpã§Øº™\8\31ž®p\31E•\19=\29§Ú)ÈÚt\7\16ðË\29!|.9\8\12º\20>~\\\12ᆋ\30ð\17Ñs<”y\28ñ™;ÖùNVFà\21üPZ\"\127œðá.ßÄ9½‹sR\1—ž\12þð+ù\0Ë\30\18ܨÉ„²¼UÃ*Æ\0303\0”¨à\16¢ÖÄ}ÍP\9¯x\5Iâõ÷˜\3퉂\0{\14nOé\26…#v\31\2¯\16\20¢BQ îÃô0z\26mFÜÒ™\15s!ú‡ˆú\24\"êSˆ¨\15!\0\28k;Ñv¢íDÛ‰¶\19m§ÚNµj;Õæ\3!\14¡'ã\20Vx… \16\21Š\2¥`z\24=6#L3Ò˜FÁIÑK\12\18£Ä\"‘ôG\25Ó\24ž\ +›\2^m½ÁÑ\22hd›Ý@}“#$bð\ +A!*\20\5Ì&ô0z\26mFx\15íè*’‚“¢—\24$F‰E\"ëËå#ÃSaS\16í7ü­ {=ÿà0Í\0177\14ß•bZŠ˜—À+\4…¨P\20(9ÓÃèi´\25¡àï´\15):­ð\ +A!*\20\5JÃô0z\26mF˜f(\9»\4G§\21^!(D…¢@i˜\30FO£Í\8ÓD:¥SpRô\18ƒÄ(±H$ý(§t†§Â¦ Ú§\2wu\4ñÏ\8wª„.}0À\9]é±d;µI)ÃùÞߨpø5Çå-Óc\6&o\20Œ¢Q1¢»BÁGÅgÅ­â_Fú¢†áõœ\14\4p–âˆÿ£…~´(ÿ«\4ë\6\14ü3;\\ý8à/¹”jåk›Œ#ýXBÒà\15n„}ê•ð‚H\12\29ûhˆ·ñ‚“7¼×Ú».\11\13à0\31þ΋‚Ó¸\8T\15¥ê\2â-¨²ßU?ˆóÑ\15¾\17Ä—éŠS+\20g/”ü\ +”<7\30D/1HŒ\18‹Ä‹Dn:„‡ÂSaSÀ®–<Ø\9ï\ +=\3~³L\0\23j\8ø¬c&xk^\127‰Ž\17\127ÊE0S“§(.£¸Œâ2ŠË(.£¸Œê2ªË¨.£ºŒâ’ßptZá\21‚BT(\ +tÌ%}E\"ô4ÚŒ Mrü\19\11ŽN+¼BPˆ\ +E\1Ó\8=ŒžF›\17¥™\26N35’fj$ÍÔHš©‘4S#i¦FÓL¦™\26M35šfÒoé¥Û`3ž¨\20¸2r(\28¨\13´£h?Ñn\"½Äãg¹øKVŠ‹€Óˆ‡\1dÁ/\1271¾¾\5\\\23Ò(ôz*Ò‡¥@îøþú«8\4<\4…ðø!|=¹\22?\27\29uöR)óÌ)ˆ4}–IÄ\31X\"–shœÐëÉXŽ£ËAèõÍÈ\31>0€[¦Å)àWYDü„O\17,\30ÒÈ·­\28±AG¹ie\8\ +Q¡(PƒŽz¿*ô4ÚŒ°ßx~‡ÇÑi…W\8\ +Q¡(P\26¯/ï„žF›\17¦áÇêI\30«'y¬žä±z’ÇêI\30«'}¬žô±zÒÇêI\31«'}¬¾Ä3½Úæè´Â+\4…¨P\0200…ÐÃèi´\25a\26~\9µÈK¨E^B-ò\18j‘—P‹¼„Zô%Ô¢/¡\22}\9µèK¨E_B…7‡W…tGûÚrh\0140çÔ\ +}wÀÑi…W\8\ +¶lQàÜE>?\16z\26mFxÅK\23¤¿–xt§\5¶¹¡!‹#íІ‡,† \16\21Š\2ïÐF†,¡§Ñf„¿ÒÃ'\9¿8@vù*\3?ºÀÿ-ÂgãSà\127Ñ\3Ã>\127ýñ¦_{¼É' oò‹†¥¼ih\"ð\ +A!*\20\5\26¤˜\30FO£Íè/\17hÑo|™ðW\14ÄG¸\"i•Æ€t:ÃYþ‹ë\\7\20/˜\18\3}=(HßØ)ÒHƒ\28ù¥\5bIg'ëÓ‡ê„aZf~0H\31\0282%Žnä\24\19ýœX\16¯›X¥)8\20rÆÙ[nÇ‹B:Ù†?ÉÃúK‘w~\12Ô\27Š¾ó\19\ +FѨ\24Q—(öÎOñYq«ø—ñ˜ð?\"(â\127a¨|\\Cþõ³È¿¤o\2÷\28:ŸpïÁ\0116úé=îÎUª•ú±\16—BKÿò¤\22tuwäÿΧ¸¯\22‰™ÿó\13^Eû4ÇéOû\127´\9t+Æ‘÷$ߌ1\4…¨P\20d'Ê\29™ÐÓh3â\29ø\127p•\9WÁ4ær„l\12^!(D…¢€Ù„\30FO£Í\8\127â\11\23ž˜\5ƒ“¢—\24$F‰E\"é\19<\20ž\ +›\2~¼ôÚàF„¿ÎBÀ_5ûÄ[E‘\18&Ù*‚ \16\21Š\2gMºULO£Íˆ¶Šß½ptZá\21‚BT(\ +œFß½\8=6#L\19gÚ\24\12NŠ^b\24%\22‰¤OðPx*l\ +¨D<©zRù¤úI\19$Í,E²\28É’$Ë’4Í\28ù×¥\28¡…ÊÀ»Ž#äeð\ +A!*\20\5Ì+ô0z\26mF÷æéòˆ‚“¢—\24$F‰E\"ê3<\20ž\ +›Â_z9æ\19ýCM&WlŒ0$*D¥˜èÉ9ÜIk\12‘\30à¯ñ…Êà`ˆÅQ\11Æ\5\24'\20iÐ’\2\ +áÏDN‹k\26\"üUŒ€\127ýÅá„\11áìµ>°‚\20Ú¨„¿9U\12¶ð’£qvgCƒˆÞ¹@ß÷ò\30çÿGpøtÇåt9ü:Ã\21ЄŸy\19•„ÑO¿ñ'OB³SZ½Ç+k¢0\17ýŽ7\ +°ë\25Ê„\31}þæ_ó1…/†¾¸æbxgš´jv\28ÓYbæoWS¦oK\127ûwü\15«3>\127¢·ÍØù\20\ +¾à\27~ãoþŒ\22\"Zšj¦–¾l0¾3Ó\22!Ð\6Íî\4©O#}àG\5üºpÖ\25U¿¶\25n\14ðÿ\19Ãu\30…\15¾¿xó½€VÜ%Ò¯í\16Ã\27ß‹„©‘ØK\\$òíK|;cü\29\27\14ï\\›zºU=M‹Ñ%?]å´p\12uÉpô‚¸‰p1Ð\30£_°*ÙÌÄ›@óxÀ\31g¡x†”°\127ß\22ÌÀ\16\5Þâë\127„\19Î\29æÊØ”ó`\28\27oŒ½-ó“³Ûÿ\0034ȸ\8", + ["length"]=27710, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=3371, diff --git a/tex/context/patterns/lang-et.lua b/tex/context/patterns/lang-et.lua index 32d10e3fc..f9828ff2c 100644 --- a/tex/context/patterns/lang-et.lua +++ b/tex/context/patterns/lang-et.lua @@ -92,7 +92,53 @@ return { }, ["patterns"]={ ["characters"]="abdefghijklmnoprstuvzäõöüšž", - ["data"]=".aa4 .aasta5 .ahe4li .al4a .al4gas .antiik5 .ap3l .ap3r .art5r .au3a .eba3 .ee4 .eela4 .ek3l .ek5r .gu4a .hie5 .idi4 .inim5a .ise5e .is4o .ja4e .ja4t .ka4e .ka2o .ke5hi .kip5r .ku4pl .ku5sa .kusee4 .la4pl .le4e .le5hi .lemmik5 .le5se .lõpp3 .ma4a .maa5a .ma5j .mi5sa .mo4e .nek3 .ni4p .nisu5 .noa3 .oa3 .oo4 .pap4p .pea3a .raadio5 .re4a .re2o .rep3r .re5so .rü5hi .sak5r .sap3r .se4a .sk4 .skaa5 .so5li .sp4 .st4 .teoo4 .tet3r .tina5 .toa3 .ts4 .tsi4s .turba5 .tus2 .tu3sa .ul4ga .ulu4k .um2b .uu4 .vaa3l .vask3 .vere3 .õppe5 .ää4 .õõ4 .öö4 .ühi4s .üle3e .ür2 .üü4 aaa5l a1aas 4aab aa4bi. 5aadel. aadio5a a1ae a4aer aa4fe aa4gan aa2h aa4has aa1i aai4g aa4is a5ait aa2j aa2la aa4lae aala4r aa4las aa4leh aa4lek aa4lel aalg4 aali4ke aal3k a5allik aal3t4 aa2l3õ aa4lü aa4mad aa4man aa4mee a5ameer aa4mö 4aan aa4nam aa4nat a5andm aan4duu aa5ni aant4 aa4pek aa4pen aa4ra4b aa4raf aa4raj aara4l a5arter aa2r3õ aa4rö aa4rü aa4sab aa4sai aa4sar aa2se4 aa4so 3aas5ta a5a4sul aa4sut aa4sü 4aat aa4taa aa4tas aa4teh 5aatom aa4tõ aa4tüh a1au aaÅ¡i5 a1b 4a3ba a5be 3abie a4bij 5abs a1d a4dad a4deo a2der a3di adu4r adu3se a4dö a5eali ae2d 3aed. 2aee a3ees ae4f 1aeg. ae4ga4j ae4gi 3aegn 2a1e2h 4aei 4aek a3eks 2ael a5elani ae2le ae4lo ae4lu. 4ae2m 4aen ae4pi ae2s a3esi 4aett a1f a4fek a1g 3a4gent agia4 a3go 5agreg agud4 1a2hi. ah4kar 5ahnu 4ai. ai2a 5aian ai4hu ai4ke ai4kl a3ilm ai4lu ai4me. aimp4 4ain. a5inf a5inime 4ains a3inse ai4pr ai2sa 5aistin aist4r ai4va a1j a3ja. a4jale 5ajalo a4jalu 1a2jam 4ajo ajut4 a1k akaa4s a4kaat ak4as 3aken. a3ki ak4kis ak4kr a3klas ak4lau a5klii a3krii ak3ro 4aks ak4te4l a2ku. aku4map a5kvali 3alald ala4mas a4laüh ale4le al4et alet4t 5algat al4ged a3li ali4san a4liste 5allee. al4le4h al4luj al2lü a4lob als4 4alt 4alu. a4lud 1alun alü4h a4lüt a1m 4am. a5ma. a4maad 3ametn amit4 am4o am4pal 5amper. 4amü 2an. 3analü 5anatoom an4das an4deo an4dog 2ane a4neh a3ni a4niso 4anj an4kr 3anku an4nah 4ano anos4 3ansam 4anst 3anten 5antilo ao4d ao4he a3oht ao4j ao2le ao2lu ao2m a3oma ao4nu aoo4p ao2pe a5opera ao2r a5oras a1osa ao4su a1ots a1p a2par 5aparaa a5part a3parv a3pi a3pla a3po ap4pal ap4pin ap4pis ap3ri ap3ron a4pru a3pä 4ara arak4k a4rase 5areng 3aretu 3arhit 2ari ari4al ari4ap 4ark ar4kel 5armast 5armee. a4ro4l 1arst 5artikk 3arua 3arvam 5arvestus 1arvu ar4vää asa4las a4same a4sau ase4ma as5ema. 1a2sen a4seos a4sese a4sett 3a4setu as4fä 1a2si. asi4ala asi3an a4sind asip4 5asjan askõ4 as4kõl a5slaav a4sok as3ole aso4p as4pet as3pl as2po a3spor as4san ass5t 4asti 5astme ast4rak a4su4b a4sud asui4 a4suss a5suu a1t a3ta a4taž 5ateljee atii4v atmi4k 5atroo at4ros at4rus at4soo ats4p at4sü att4s 4atu au2a aua3l au4ba. 4aud au4de. a3ui 4au2j au4ke aulu3 aulus4 au4pü4 a5urg aus4o au2su a3usul 1auto autoo4 auto5s aut4r a1v av4a a4vans a3var a4veh a1õ2 a1ö2 a1ä2 a1ü2 až4ni 4a1Å¡ 5baa baa4g baa4k baa4s bab4 bae4 bag4 ba4he ba4ju. ba3k4r bak4s bak4v bap4l bap4r ba4rõ basa4s bas4ko bas4pe bat4r 4bb 2bd be3a4 beb2 bef4 beg4 be4lü4 be2ra2 berk4 bero4 be4rõ4 be4si be4st bes4tis bet4r 4bf 4bh bha4 bi4ala bia4v 2bi1e bi4en 4binõ bi4pla bis4a bi4si bis4k 2b1j 2b1k 2b1l bl4e blet4 b2lon 2bm 2bn 4bort 2bp 2b1r 3brig bros4 2bs b3so 2bt4 4buks bu4lõ 3bum buse5 bu4si4 bus4kr 2b3v 1da daa2 da5ab daala4 da4do dae4 daf4 d5ain da4leh da4lum da4lus 5dam 4dann da4num dao4 darat4 4darb dar4de 4da4re 4daru 2darv d3arv. 4dasen da4sum 2d5b2 2dd 1de dea2 2deaa de5av ded4 2dee. dee4le 4dehe dek3l dek4s 5del. de4lau 4de4lek 4deluv de4pr dep5ressi dere4 de4rel de4rep 5dero deru4 2df 2d5g 2d1h 1di dia4v did4 dig4 dii4g diina4 dii4sa dii4su 4dilm 4dini di2os di4plo dis4ai di4sar di4sas dis4pr dit4r 2d1j 2d3k4 dkop4 2d1l dle4v dlust4 2d1m dmee4 dmi4k 2d1n 1do do4h doi4r 2dos d3osa 5dosk 2d3p4 2d1r4 5draama. drao4 3dreen d4rel d2ren 3drena 2d1s4 dsar4 2d3t2 1du du4b due4 2d5uks d4u2s du4s1a du4see dusk4 2d1v 2dõ dõ4l 4dä 2dül dü4li 4dü4r 5düü 4dž 2d2Å¡ 3dÅ¡em eaa2 ea3aj e1aas ea2b e3abi e1ae ea2hi e1ai ea2j eak4s eak4v ea2la e3ala. e1a4lu ea2me e5amet e5ann ea4nu eao4 ea2pa e1ar ea4re ea2s e4ass e5aste e5asut e3au ea2v e3ava. e1b e1d ed4a e3eali e3ees ee4fe eei4 eek4r ee2la 3eelar e5elekt eel5uu ee4nai ee4nal ee4nam ee4no eent4 ee4pai e5e4pit 5eepos ee4ral eer5ap ee4ros ee4rot ee4sin e5esine ee4s5ist 5eeskir ee4sü ee4tõm ee4tä e1f e1g egaa4 egas4t e4goi 4eha e2he. e2hi e3hin 5ehist 1e4hit 5ehiti eh4ta4 4ehä ei4dul ei4e eie5ri ei2ga e3iga. ei4ha ei4he e3ilm ei2lu ei4mem 5eine. ei4roo e5i4sa. ei4sist ei4tau ei4tõ ei4va. e3j e3ka e4kahi e3ke ek4kap ek2l ek4lo e3k4o ek3re e3krii 4eks. 5eksam. 3eksem eksi3k 3ekska eksp3r 5ekstr ek4tül e3kü elaa4v e4lagr e2lam 3elamu el4an 5elanik eleis4 e4leks 5elektro 3eleme 5elevaato e3li e4lid elii4t el4kus e4l4ob elo4p els4t elt2r el4tü e2lu 1elu. e5lub elu5i4 1e4lun e3lus. e4luss el5uss. el4üh e1m 5emand. e5mat emne3 emos4 emp4r 5emuls endus3 5enelas e5nem 3energ e4nese e4nesl enk5l enk4r en4sal en4tos en4tü eo5a eo4ja e3o4le eo2lu eo2ma eoo4p eo5pl e3osa. e3osak e5osali e5osav eos4f e3ots. e1p e3pl e4pli ep4lu e1r e3raa 5erakon 5erald e4rao erei2 erek4 5ergut erih4 eri4uu er4nau e4ro4r erp4 er2pr ers4k er4taa er4tak er4tes e4rud es1an 1e2se. e5sek e5sen e5sil eska4j eska4s eske4 es5kel esk3l esk3r es2k5ö es4laa es3ole es4or es2po e3spor es4sist 4est es4tü e3su esup4 e1t e3t4a e2ten 5etendus. etii4s et2ra et4sin et4soo et4sü et4taj 1ette et4tru et4tä et4tü 4etu e4tõ4d e4tüm eu4de e3u2j e3u2l e5u2n eu4p eu4s e5usk e3uu e1v e3v4a e1õ2 e3ö4 e1ä2 e1ü4 e1Å¡ 5faa faa4n 4fa4d 2ff 1fi fii4t 4fj 4fk 2f1l f2lo 5floora 2f1n 1fo folk5 fo4ro 2f1r f2re 3frees 2fs 2f1t ftat4 3fu 3gaa gaa4r 4gaast 4gab 4gaed gag4 2gah ga2hi gai4g ga5is ga3k4 5gam ga3o ga3p 4gart 4garv g4as 4ga4suk 5gata gat4r gau4 ga4va. ga4van 2g3b 2gd4 gea4 ge4du. gek4 ge5kl 4gekse ge4lah gela4s ge4leh ge3lu ge4lä ge4nan geok4 ge4or 4geos 4gerg ge4rus 4gf 4gg 2g1h gi2aj gii4s gi3k 4ginf 4gint gio4r gi3pl gi4san gi4sil gi4sö git4r gi4vaj 2g1j 2g1k 2g1l g4lüt 2g1m 2g1n gne4ta gne4te4 4goh go4le go4ma go4po 2g1p4 2g1r g2ran 5granaa 3g2rav g2ru 3grup 4g1s4 2g1t2 g3tr 3gu gub4 gue4 gu5i gu4nel gu3o gup4 gu4sen gu4sä guu4j 2g1v 2g3õ4 4gä gü4l 3ha haa4ri haa4s hal4lai 5har hasa4 hat4r 1he hee4m hee4s hei4s hek4v 2hel. hels4 het4r 4h1h hhe4l 1hi hia2 hii4s 5hil 5him hio2 hi4san hi4sar hise4l hi4st4 hi4sü 5hits h1j hjaa4 4hk h4kal h4kas h4kis hk3l h4ko4 hk3r hks4 hkt4 h4kusk h4ku4su h4kõ h4kä h1l hle4n 4h1m h4mo4 h4mü h1n hnika5 1ho hom4o 4hp4 hrs4 4ht h2ta hta2j h5tak hta4l ht5ar hta4su h4teg hte4n hte4r h4tid h4to h4tr ht5s4 h4tö h4tä 1hu hu3a hue4 hui4d huk4l hup4 hu3sa 2h1v hvad4 hvast4 hve4l h4vini h4vo hvu4s 1hõ 3hö 1hä 3hü 4ia iaa2 iaa4l ia5alane i3aast i5a4bi ia4bo i1ae ia2g ia2h i3aine i1a2j ia4kr i3akt i1a2la i5alb i3alli i1a2lu ia2me i5amet. ia2na i5andj i5andm ia4ne i5anne ia2p4 i3apar ia5pr ia2re i3arm ia4ru i1arv ia2s ias4ke i5aste. i3asu ia4tel iat4r i1au iau4l i5a4vaj i5avald i1b i4bad i4bau i1d 5ideaa idii4 idlu4 ie2 i3eali i1eel iei4 i1eks i5elani i1elu i3eos ie4ri iers4 i3esi ie5so i5ess ie4st i3eten ietu4s i1f i1g i4ga4g i5gar 5igatsu i4gav ig4ram ih4le ih4te ih4tin ii4a ii4deo ii4du. iie3 ii4es ii4gaa ii4ga4s ii4ha iika4 ii4kis iik4ro iiks4t iikt4 ii4lab ii4lad ii4lae ii4last ii4leh ii4lel ii4les ii2lõ4 ii4lü ii4mai iimat4 ii4met ii4nai ii4nau i5inde i5inf ii3ni i5inimen iinit4 iink4 iino4 ii4nol ii4nos ii4nõ ii4pr ii4ran ii4rau iirs4 ii4rü ii4san iise4l ii4sev ii4sik. ii2ta ii4teh iito4 ii4tom ii4tos iit5re ii4tõ ii4tü ii4ves ii4vet ii2vo i1j i1k i4kalu i4ka4re i4kark i4kaut i4k5elu ike4si i3klaa i3klas ik4ler i5klub ikop4 ikos4 i4kov i3kre ik3ro ik4sar iks4k iks4po ik3t2r i3ku iku3a il4a ila4s i4lasi ilia4 i4lisi il4kan il4kõ ille5s illo4 5ilme. il4mot 3ilmut ilo5g2 ilp3l il4ti i5lun ilu3sa 5ilustu ilü4h i1m i4melu imi4g imisu4 i4mo4l 5imper i1n ind3al 5indeks inde4r ind4re 2ine inee4s i4neh i4neks ini4kü i2nim ini4mas i4nime 5inimene 5iniml i4nini inna5a inni4si 3insen 1insp 1inst 3inven io4de io4h i3oht i5ohv io4j i5oks. io2le i3olek io2lu i5oluk io4lü io2ma i5oman ioo4da ioo4ne iooni3 io2p i3oper i5ork i1osa ios4f ios4p ios4t iost4r io4su i3ots. i3otsi io4tü i1p i3pla ip4lu ip4ro ip4sas ipse4 ip4sep ip4sus i4ra4b i4rase ir4dis ir4nõ i2rui iruu4 isaa4v isa4ja i4sana isa4re i4sarh i4selo i3sem iset4s isi4g i2so 3isola i5soli is5oma iso4r i3sot i3s4pek is3pl i3s4por is3sp 5istand i5s4tiil 5istme ist4ru ist4se i5stsee 5istutu i3su i4sõm i2sü4l isü4r 2it itaa2 itee4l it4rak it4ras it2ru4 its3k it4so itte1 itt4s iu4b iu2da iu4gi iu2j i3uju iu4la iu4mar iu4me4 iu2mo iu4ni iu4pu iu5sa i5uss. iu2su iuti4 i3uu i1v i4vad i4vakt i4va4lus i4vant i4vau i4veh ive4l iviil5 i4vü i1õ2 i1ä4 i1ü2 i1ž j4a 3jaa ja1aj jaa4l jaa4r jaa4s jab4 jad4 ja5el jae4r jag4 ja3i2 ja5kra 5jama 5jame jao4h ja3p4 jas4ke jas4tii jat4r jat4su jau4b jau4l je3lu 3jo joo4k jue4 5juma ju4sei juse4l 5jõ 5jä 1ka kaa4da kaa3de ka5aj ka4al kaa4ra kaa4sis kaa4ta 4kader 4kaed 4kaeg kae3lu kag4 kahe5i 4kaia 2kain ka5ist kaitse5 ka4jav ka4ju. k5a4lus ka4pl ka2pr 4karm 4kaset k5asj kat4r 2k3b 2kd4 1k2e ke4el kee3lu 2kehi 4keks ke4laj ke4lak ke4lok 4kemb 2ken. 5kene keo2 kes2k kesk3a4 ket5ra ket4te4 2kf4 4kg 2k1h4 1ki kie4 kik4r 4kinf kio4 ki4pr kis4a ki4san ki4sik 4kiste kit4r kiu4d kiu3su 2k1j 4k1k k3kra kks4 kku4ro 2k1l4 k2la k3la. 3klaas k3lai 3k4lamb k3lan 3klapp 5klassi k3lat 5klaver kle4a k4lee k2lei 3kleit k3lem k4lib 5klibu klig4 k2lii 3kliim 5kliiste k3lik k3lin k4link k2lo k3loom 3klots k2lub k3lus. klu4sa klu4sõ k4lut 2k1m 2k1n knaa4 k4nip 1ko 2koks k4ol ko4len koo4sa 4ko4rie 4korpi k5osa. ko4va 2k3p4 2k2r k4raan 5kraana 3kraav 3krab 3k4rae k3rai 3kramp 4kran k3rau 5kredii 3k4reem 4kreg 5k4res k5ret 4krip 5kristal k4roh 3krohv k4rook k3rut 3kruu 3kruvi 4k3rü 4k1s k5sae k4sai ksek4 ksi2k ksika4 ksit4 ksk4l ks5kõ ksp5lo ks2pr kst3r ks4tü 2kt kt2ra 1ku kui4s 4kujuj 4kujum kuk3la 4kuks kul2d 2kulp kumi4su4 kummi5 3kus ku4siv kut4r kuu4ni kuuse5 4kuž 2k1v k4van 3kvart 5kviitu 1kõ kõ4de. 2kõi k5õis 4kõp 1kö 4köd 1kä käi5s 2käk 4käm 1kü 2kühe 4küle 4küll. 2kž4 4kÅ¡ 1la 5laag la4aj laa2r laar5a laa4sa 2laav 5laava 4ladj 5ladu lae4r 3laev 4lahv lai5ek5 lai4ga la4jap la4jav la3k4r lak4v la4la. 4lald la4les 4lals la4lus 3lama 3lamp 4lams la4nes 2lani lap4l 4l1arv 4larü la4sas 4lasb la4sei la4sem 4lasen l2asi 4la4sut 3lau 5laud lau4n lau5su 4lavh 2l1b l4bau lb4lo 4l1d l4dala l5dist l4don l4dü 1le le1a2 3lee leek5l lee4le leep5r le3hi 4lehit 5leht 3lej 4lekk 5leks. 4lektr lektri5 4lele le4lek 5lema. le4mor lenaa4 leo2 le3oh le5olu le4pl le2s 4lese. le3sõ 3let 4lette 4l1f lfoo4 lf4t l1g l4gae l4gai l4gall l4gann l4gase lgas4k l4ge4le l4gi4d l2go l4gu4j 2l1h 1li li4ala li5am lia4v 3lid li4dan li4ga. 4liia liie4 5liig lii4na 5lik. likai4 li3kla lik4ro 4lilm 4li4lus limi4s 4linf 4linim 4lins lin4te 4linv lio4k li3p4l li4sah li4sik 4lisol li4suj li4sõl li4sö lit4r li5tu 2liže 2l1j lja1a lja5os 2l1k lkk4 l4kli lk4lu l3kr 2l1l l5ler lli3tr l2lor l4lots llp4 lls4 llt4 l3lu l4lää 2l1m lmaa4v lmat4 l4mau l4meks lmp4 2l1n 1lo lod4r lof4 lo4gal 4loha 4lohv 5loi lok4r 2lo2l l1ole l3olij 3lolo lo4man 5loog loo4ram 4lord 4lorn 2losa l3osa. 2l1p l3plaa lp4le l4puk 2l1r lrih4 2l1s ls4ka lske4 ls4o lste4 2lt l4tau lti4l l4tok lt4sel lt4sis l4tuk l4tõm l4tää l4tüt 1lu 3lua lu3al lud4 lue4 3lug4 4luhk 4luim lu4juk 4luk. 3lukk 2lun lu4nin lu4nio luo3r lu4rau lu4see lus4tii lu4sum lu4sö 3luta lut4r 3luu 2l1v lvel4 1lõ 4lõd 4lõie 5lõik 4lõis 4lõit lõ4l lõ3pe lõp2p 1lö 4löe 1lä 4län län4gi lääne5 1lü lü3h lü4ma 1ma 3maa maa2b4 ma3abi maa3la maa3p ma4bi ma1e2 mag4l 5mahl 4mahv 4majam 4ma4ju. ma3kl mak4v 3mal ma4la. mal4ma ma4om ma3pl 4mart 4marv ma5se ma5sin ma4st 4ma4sul ma4sä 4maut 2m1b mbat4 mbe2r3 mblu4 mbo4j mb5usa 2md4 1me me1a4 me4an 3mee mee5la mee4s me3hi 4mekv mers4 mert4 me5s4o m4et 4met. 4metl 2metn met4se 4mett me5u4 2m1f 4mg4 2m1h 1mi mia4m mia4r 4mide mi4kaj mi4nah 4minf mi3ni mip4 mi2s mis1a2 mi3si mi4sig mi4sih 4mi4sik mi4sil mis3k4 mis4tii mist4r mis3ts mitte3 2m1j 2m3k4 2m1l 2m1m m4maat mma4s mm4e mme4r mmika4 mmi4kal m4mor 2m1n4 1mo 4moh mo4ha m3oks 4morg 4morn mo3sa 2mp mpa4l m4pap mp3l mp3r 2m1r mruu4 2m1s ms4o ms4p 2m1t2 m5ts 1mu mua4 mu5ah mud4 mue4 mui4m 4muls mup4 5mus mut4ra 2m1v mõ4d 4mõig 4mõp 3mö 4mök 3mä 4mäk mär4gel 3mü 4müh 4mž 1na n4aa na5ah naa4lu naa4r5a nad4 na5ei nag4l nah4ke nai4d nai4si na4kru 2nal4ü 4na4mer na4mii n5amiid nants5a na3o4 nap4l na4ret 4narv n4as 4nasb nass4 nat4ra nau4b 4nb4 4n1d n4daas n4dab n4dae nda4ko nda4l n4dau ndif4 ndip4 n4do4r n4duj ndu4sõ n4dü 1ne 5ne. nea2 nee3la nee4le nee3lu nei4v ne4lah 4nelas ne4lek neo4d ne4pl 2nerg n4es ne4tan ne4tõm 2n1f 2n1g n4gai n4gall n4gase n4gau ng4lü n4go4r ng3r ngud4 ngus4t n2gü 2n1h 1ni ni3ap nia4v nig4 nii4g nii4tu nikkel5 4nilm 5nima nima4s 5nimet 3nimi ni4mig 4niml 4nint ni4sid nis4ke 4nisl 2n1j 2nk n4ka4n nk3r nk4se nkst4 2n1l4 2n1m 2n1n nna3e nna5i nnak4r n4ne4f nnio4 nnk4 1no 4nob nod4 4noks noo4na n4or 2n3p4 2n1r 2ns n4salp ns4e nsk5r n3so nso4r nste4 ns4tee ns4tem nstis4 nst3r ns4tu nsu4s 2nt ntaa4r n4teh nte4l ntig4 n4tiso n4tots n4tr nt3ra nt3re nt4sab nt4sal nt4san nt4sar nt5sü ntt4 n4tüh n4tük 1nu nu4da nui4g nur2k nus4p 2n1v 1nõ nõ4l 4nõli 1nö 1nä 1nü 4nül 4nž 4nÅ¡ oa2 oa4a o1ae o5aku o3anal o5apa oau4b o1b oba4s 1obj 4o1d odaa4 odaal3 o4deh odu4sõ od4Å¡ oe5a4 oe4fe oe2l o3ele oe4mi oe4mu o4eo oe4rah oe4rõ oe4si oet4r o1f of5r o1g og4la og4li 3ohaka oh5to 5ohvits oi2ta oiu5a o1j oju2s okast4 okku5 ok3la ok5liin okoos4 ok3r ok4re ok4ri o5kris ok4ru o3la o4lae olag4 ola4las ola4su ol4do ole4an 3olek. o4lend olig4 o3lii olii4g o4li4lu o3lis 4olj ol4lae ollis4 ol4lü ol4mai 4o3lo ol4tam ol4tar 1olu. 5olukor o4luks olõ4 o4lä 3omadu om4baj om4ban o4mok ona4a o4naj ona4ko o4neh o4nele on4gos onit4r oniu4 onsoo4 ont4re o4nuu 2o2o oo1a oo4kal ook4r oo4kää oo4lau oo4lee oo4lig oo4list oo4log oo4lõ oo4lü oo4nah oo4nar oo4nau oon4dee oonk4 oono4 oo4noh oons4 oon5t4 o5opti oo4rat oo4ree oo3sa oo4sel oo4tõ o3pa op5last. op4lu op3ra op3ri o2r3ai o4ralg ora4mii o4rana 5orav 3order 1org org4r oria4 3orju 5orkeste 3orna or4nel or4tal ort3r oru4s or4võ o2sa o4sake 3osako 4o3sau 4ose o4seks 4osf o2si. 4osin 3oskus 4osl 4oso o4sok os1ol os4pa 4oss os4saj os4sü os5taas os4tem 4osto ost4ru 5osuti 4osõ oto5a otok4 ot2ra o3tran 3otsa o5tsö ot4ta ot4teo otus4ta otü4l ou4b ou4n o1v ovaa4l ovio4 ovol4 o4vu4n o3õ4 oü2 1pa paa4ka pa3a2m paa4tel pa4e pa4lus pans4 pa4pr 5paras 3park 4parta pa4sp 4pb 4pd 1pe pe2a pea5j peas4 pe4au pea3v pe4f 4pektr pela4 4pe4lev pel4t pe4lü4 p4em pe4ol 5perd pe4tap peti4k 2pf4 pg4 2ph 1pi pii4v pik5r pin2d pin4gis pip3r pis5ta piu4g 2p1j 2p1k 2p2l4 4pla. p4laa p5laasi 3plaat p4lak p3lam p5lane. 3plasm 5plastik plek2 3plekk 4p3lem p3li 5pliia pliig4 3plok p3lom p3lu 3pluus 4plõ 2p1m 2p1n p4neu 1p4o 5po4e 3pon 3poo poo4la poola4v pordi5 4p1p ppa4k p4pas ppe3a ppee4 pp3l p2p3r pps4 p4pud p4põ p4pü p2r4 2pra 3praa 3prag 3prakt p5rau p4rep p4resi 4prib 2prii p5riit p4rog p4roo 4p4ros 3p4rot 4prum 5pruu 2p1s ps4a p4sor 3p2sü 2p1t p4tak 3pu pua4 pu4da puha4 pui4t puk4k pul4ti 4purt pus4p 2p1v 3põ põhi3 p4õi põi4s põli4 5pö 1pä pär4gu 4päs 3pü pü3he 4pž 4pÅ¡ 1ra raal3a 4raale 5raalne raa4lu raamatu5 raa4ta4 4raatt raa4tu 4raav 5raaÅ¡ raie3 2rain r3aine rai4si r5a4la. ral4t ra4lus 3rame ran4dem ra3om 4rapa ra4pl 4rapt 4rarb ra4ren 4rars 2rarv ra4sal 4rasen ra4sis rast4r ra4suk ra4sul ra4sut 5ratas ratiiv5a rat4sen raua3 rau2d raud5o 2rauk 4raum 4raur rau4te 5ravi 2r1b r4bae 4r1d r4dae r2dar rde3a rd4re r4duj r4dü 1re 5re. 5rea reb2 ree4le ree4ma. ree4man 3reer 3reet 5reg 2rehi rei4se rek3l 4reks re4lai re4liit 5relv re4mää 4repp re2pr rep3re re4sin re3te ret4k re4tr ret5s 2r1f rf4r 2r1g r4gae rg3ah rga4re r4geh rgi4me r5g2ra 2r1h 1ri 3ria ri3am ri5ast 5rid 2rig ri4ga. 3rige rig4r 2riib 5riie 4riinu ri4kis ri3k4r 4rilm 3rim ri5oks rio4r 3rip rip4li 4rip2p 4ri4sa. ri4sah ri4sar ri4ses 4risol ri4tol ri3t2r 5riu 5riv 4riö 2r1j 2r1k r4kad rk5ain rka4se r4kek rkt4 2r1l 2r1m r4mald r4me4le r4mig rmo4ra rmo4s rmp4 rms4 2r1n r4nad r4neh rnk4 1ro roa4 ro3e2 rok4k rol4la rol4le 4romb 2romi ro4nop roo4ga 4rosa ros4po ro5staati rost4r 4rož 2r1p rp5ret 2r1r r3ri 2rs rsaa4l rs4l rs4o r5s2po rs4tak 2rt r4tah r4tos rt2r rt4ra r4tri rt5roo rt4sai 1ru 2rua4 rud4 r4uj ru3kr 3ruma 4rumb 2rumm 4rump 5rus. ru4set ru4sin ru4sul rut4r ruum5a 4ruur 2r1v rva4la rva4lu r4vann rvas5k rvis4k r4voh r4vok 1rõ 2rõ2l r5õli rõn4gu 4rõr 3rö 4rök röö4pe rööp5l 1rä 4rä4r 1rü 4rük 4rüm 4rü4r 4rž 4rÅ¡ 1sa 3s2aa saa5j saa4lu sa4an 4s3aas 3saba sa4bi. 4sabo s4ad 5sada 4sade s5ader 2s1ae sae4l 5sa5ga s4age sa4gu 4sahv 3sai. 4saia s1ain s4ajan sa4ju. sak4ro 5saks 4saku. sa4kää 4s3a4la. 4salas s4alat 5salat. sa4lev 4salla 4sallu 3salt sa4lum 5salv 3s4am 4samp 4sank 4s5ann s4ant 4santi s4anu sa4num sa2pr sa4ris s4arv. sar4va sase4 3sasti sas4tii sa4suk sa4sum 3sat 4satl s5avald 2s3b2 2s3d2 1se 3sead se3ala s5eali se5aval se4du. 4seela see4le s5eeln see4ma 4seepi 3seer see4si see4ta4 4se4fe 5seg sega5a 2seh 3seha 3seho 5sehu 5sehä 3sei sei4d sei4si 3seka se3kl 4s5eksp se4laj 4selek 5seli 4sellu 3selt 2selu 4sens se5oh seo4k se5om seo4p seo4r 3sep sepat4 4serem 4sese. se4si. 4se4sin s5esit ses4k 3sest 4s5eten se3t2r 4sette 2s1f2 2s1g2 2s1h 1si 3sia sia4h sie4 5sign sig4ra s5i4ha. sii4g sii4sa sii4se 4siits sii4tu si4ket siko4d siks4p sikt4 sik4vi 4sinim s5inime 4sins sio4le si4pro 4sirr 5siru 4si4sa. si3se s5i4sik sis2p sisse3 s5istuj 5sisu 2s1j sja5ar 2s1k 4ska 5skaa5la ska4no 5skeem. s4kela 5skelet s5ken ski4g sk2l s5klas 5skler skuk4 3skulp sku4si 2s1l 4s1m sma4sa 2s1n sne4p 1so 3sobi 3soe s1oht 3soi s5oks. 2so2l s3olek s3olij 5solo so3lut so2ma 3son 3soo soo5o 4so4pe so4pr 5sor. s3orj 3sort 2s1os 4sotsu 2s1p s5per 2s1r 2s1s ssel4l ss2fä ssin4 s4sinf ssk4 ss4ke ss2p s5spor ss2t s5stat ss4ti sst5r s4suss 2s1t s2taa 3staad 5staaÅ¡ s4tain 4stam 5standard 3start s4tati 5statis s4teno 5stepp stet4 s4toh s4top s4tot 5streik 5strek 5struktu st4so stt4 stu2s stu4s5a 3stuud stä4 s4tär s4tüh 1su sua2 su4bar sud4r su4du su5e su4jul 4suks s5uks. 5sulg sul4ga sun4de4 su4nis supe4 supe5s su4pr 4surb 5sus. sus4aa su4ses s5uss. sus4ti su4sun sut4r sut4ü 4suug suu4ra suusa3 2s1v svus4t 1sõ 5sõda sõ4de 4s3õh 3sõid 4sõie 2s1õig 2s5õis 3sõit sõ2l 2s3õli 3sõlm 3sõn 2s1õp 5sõr sõsa4 2s3õu 1sö 4söd sö2k 2sön 1sä 4säk 4säm 1sü 4süa 5süd 5süg 2süh s3ühi 2sük s3üks sü4la sül5di sü3lem 4sülo sü4ra 3süs 3süü 4sž 1ta 3ta. 3taat. 2tabi 4t3aed t3aine 4tais 4ta4ko t2al 4ta4laj ta4lev tal4las ta3lu ta4lus 3tam 5tama 4tanda ta4ot ta4pla ta4pr 4ta4ret 4tarm 2tart ta4se4r ta4ses ta4set ta4sis ta4sü 2taut 3tav ta4vaj 2t1b4 2td2 1t2e te4aa tea4g tea4h te3an te5as 3tee tee5lu te4et te3hi tehi4s te3kl tek4r tek4v tel4ke te4lok te3lu. 4tepiv tep4l 2tepp 3ter 5ter. te5r4a terd4 4terg ters4 tes4a te4sin tes4k 2t1f 4t1g4 2t1h thõl4 3ti tia2 ti3e tii2k tii4ma tii4sa tii4ve tik4o ti3k4r tik4vi 4tilm 5tima 4tinf tin4ga ti4sai tisa4v ti4sik titee4 2t1j 2t1k2 t3kl t3kr 2t1l4 tlu4 2t1m tmika4 tmis3 2t1n 3t2o 4to4da toen4 tog6 4toks. toksi5k to4lek to4lu. to4lum tonis4 to4o too4no too4pe to4rau tos4p tost4 2t3p4 2t1r t4rad t2raf 3trafo t4rahh 5trall t2ran 3trans t4rate 5traum t2re 3treen t4rei 5treim 3trep t3rig 5triib 5triip 5trikoo. tri4pl t4ross 3trumm 5t4rup 3t2ruu 3t2rü 4trüü 2t1s t3s2a ts4aa tsa5is t4saju tsa4lan 3tsehh tse3la 5tsellu 5tsemen 4tsena 5tsensu tsig4 5tsiitsi t4sink 5tsiste ts4laa t2soo t4sor tsp4 t4suss 2t1t t4tad ttee2 ttes2 t5tsem 3tu 4tuim tu4raj tus1a tu4s5ee tu2si tus3s 4t5uss. tus3t2 tu5su tu4sä4 tusü4 4tutop 4tuud tuule5 2t1v 1tõ 5tõb 2tõi tõ4l 4tõ4n 4tõp 2t5õun 1tö 4töe 4tök töö5k töö3p 1tä täh4t3a tä5ke 4täm 1tü tü5he. tü3hi 2t1ž t2že 5tžek 3tžem uaa4 u3aast ua2b u5abi u1ae ua4he uai4k u3ain ua2ja u3akt ua2l u3all u3amet u1an ua4nu u5apar ua2re ua5ree u5art ua2s u3ast u1au uau4d ua2v u5avald u1b u4dalu u4deh udeo4 udo4r u5eal u3eel u3eks ue2ma ueo4 ue4r u1f u1g u4gi4d ug2ra u5graa u4gü uh4tem uh4ter uh4tin uh4tis uidu5 ui2ga ui4h u3iha u3ilm 5uim. uina2 uinas5 ui4si 3uisut uite4h ui4to ui4vala ui4vel ui4vo u1j u5ja u4juj u4jum 3ujumi u1k ukaa4s ukii4g ukki5 ukop4 u3kraa u5kro uku4sa uk4vi ul4a ul4deh uldi4 ul4kr ul4len ul4lui ultra5 u3lu uluo4 4u1m umaf4 u4mau 5umbroh umet4 umf4 umia4 umit4 um5p4r unaa4 unaal5 un4dak un4dan un4das un4do u4ni. u1o2 uo4h u2or u3org u1p u3pla up3le uppe4 up4pis uraa4l u4rag ura4ju u4rala u4rap ur5auk ura4va 3urb. urde3 urea4 u4reos ures4k u4rett urgas4 5urge. u4ri4met u4rini uris4k urka4v uro4r u4rulu u2sa2 u5sa. usad4 u4sae u4salu us5aste u3sea useg4 us5elek u4sett u3setu u5sev us4fä us5g u4si4d usi4g u4si4h us5ind usi4va us3kr u2so us5o2h us3ole uso4r us3p4 u3s4por usui4 3usuli u4suss u2sõ u2sü us3üh u1t ut4kl utos2 utot4 ut4ru ut4so utt4r utu4s utü4h uu5al 4uud uu4du. uu4kak uuk5ri uu4mal uu4mis uu4pü uu4ran uu4ras uu4rau uu4rav uu3sa uusa3k uus4ke u3uss. uu4sul 4uut uu4tas uu4tis uu4tüh u5uuri u1v u4vau u1õ2 uä2 u3är u1ü2 už4l vaa4re4 va4as 4vabi vae4r vag4 va5he 4vaia 4v3ain vak4v va4la. va4lai 4valdi val4ga4 va4lü vana5i vane2 4vanku van4ta van4te vap4r v4ar 4varm var4p 2vart 4varuh 4varve va4sas vas4k vast4r vat4r 4vatud vau4d vav4 4vb 2v1d 3ve vee3a vee3k vee5la vee3sp 4vehi vei4sa ve4ol veot4 ve4rah ves4p 2vf 2vg4 2v1h 3vi via2 vid4 4videa vip4 vi3pl vir4k vi4sak vis4ko4d 2v1j 2v1k 2v1l 2v1m 2v1n voo5lu 2v5p 2v1r 2v1s 2v1t2 v3tr vu4sa vu4su4 2v1v 3võ 4võig 4võ4li 4võm võr4k5 3vä vää4re4 vää4ru 3zo zook4 zoos4 õ1b õbe3 õ1d õea4 õe4lan õe3lu õet2 õeu4 õ1g õh4vu õi4a õi4b õie5k 3õiel 3õigu õi4ko õise4 õi4su4 õ1j õk4kõ õl2d õ4lim õ4lit õl4mad 3õmbl õnet4s õ2p õ3pa õpi5e õp2pa 4õp3r õranda5 õra4s õr4da õr4gaa õr4gar õr4go 4õs õs4a õ4tü4 õude3 õ1v4 õõ5p õõre4 õõ4rel õõ4ta öa2 ö1b ö1d 5ö2dee ödi4k 3öeld öep4 öet4r ö1g4 ö1h öi4a öi4g öi4s ö1j ö1k ö2kon 3ökono ök4v ö1l öli2s ölis5a ö1m öo2 ö2p3au öp4lu ö4raa ö4rau ör4da ör4dell ör4e ö4ri4l ö4ro4 ö4rä ö4rül ös4tii ö4to4 ö4tü4 ö1v ö3õ öö3a öö3e4 öö5i 3öölan öö3o ööt4r öü4 äa4 ä1b äbus5 äbut4 ä1d äe3a4 äe1o ä1g äga4ri ähe5a äidi4s äike3 äi4lo äi4lu äi2s äisa4 äi4sõ äi4tar ä3j ä2ke 3äke. ä5ker ä4kõ äli2s äl4ko 5ämbl ä4nah än4deo änet4 än4to4 äok4 ä4ra5a ära3o4 ä4rar äre4lis äret4 ärge4l ärii4 äri4kl äri4sel ä4riste är4kar ä4ro 5ärrit är4ta är2va ä4rõ äs4ko äs4nai äs4p äs4san äs4so ät4ri äu4d ä1v ävee4 ää4kin ääo4 ää4rase 5ääris. üaa4 ü1b üdame5 ü1g ü2h ühe5i4 1ühen 3ühik. ü3hit ü1j ükan4 ük4kas ük3l ül2d üld5a2 ü2le 3ülek ü5lel üle4mas 1üles ülesä4 ül4gav ül4gee ü4lih ülii4 3üliko ü4liõ ül5kl ül5la ül4mei ülo4r ülp4 ült4 üma4rak üma4ru ümi4ko ü5ne üni4s üot4 üp3l üp5p4 üp3r üpsu4 ürd4 ürik4 üs4o üs4pr üs4tal üs4tas üta4r ütee4l 5ütlev ü4t3r üt4ru ü1v ü4ü üü2l3a2 üü4le üü4lo üü4pe 1ža 4žb 1že 5žee 5žer 4žf 1ži 4žj 4žk 4žl 4žm 4žn 3žo 4žp 4žt 5žö 5žü 2žž 3Å¡a 4Å¡ak 4Å¡au 1Å¡e 3Å¡4i Å¡is4k 4Å¡k 5Å¡o 4Å¡t 3Å¡u4", + ["compression"]="zlib", + ["data"]="xÚ5œIŽã:@¯â\19\20`\13}\31%Ì´e\16I-ú6\31È+üZ%þ&Q÷ê÷BÕ\11Gˆó\20Œ‰¤\127\13Cwû5\12¹\12=ø•ºy\4ÏÝ\16ð9dðZÆq2yoç€\7ð(½¨¶äL\31C\11L`¶Æ4™5MæyVk{‰*ÆÇH긎KOܘSŸDÝvûõ\30º\20°Ü~Mñ=\13\13ñSê_ôi\26w+›j·Ï¢>\15¢\28­Ò¦±s²Ø|\21˜Ó²D·\9g£\127þÝwº¹\12vg\25†>Pÿ\6ŽQÛ²Y|¥ïÀ±Û…¹Rú9¾\11l4·\15»©{\26\28ý1\12q#Û‘¬ùHöúH1M\7m\27úùŽ>å!f$_s˜#\127ž:Áà\2ä­wþónT\1”\20\13–T,PÆÕ\\%zR²\9yìX¢RH©¹\17¶\14§º~¢ÚM ¥ù\0V\ +ÃàêœCv¤g:\18ÈÙq…~¾~¾:ƒ?ÿŠ~ÿü\22}¿¢Ÿï9µI|4Ÿïî6Ðóù6Ü!¢[7\12\31Dt\31ã¯[Ï´¤ù×íšÁ\28éÆܧÃ\28ŸIø\28VPóòûEùa¸€±{\0263\18Ñ\15c1Ë[0\15FÏÖ3°äÇ\21ÊÒëBÓ…f³<íÝGxÙÐ>7\6ú<\18Dù¬Ž\0146/\8|xu\19«Ð\15¯µR1c3\16\30%‘±{U¡´\7¤ƒí8/~Ï\17¿$ó/ìtŠ®|÷ãú\25p\\RÄÙE`”ß\15+ÏQ{†\13ÜDNíØn-æ©}\15ÔÒ½\7†ß\15 í\ +Uûû\30œ€7Qï*]Þ\25é$mg\ +Ú˜:h¢\29¦d_Úi4fr\7‚\\Í)6ÚÄ~£ûý4£‘‡hjÚ§H/Ibn&æ}€Y/ÃnöÓ5oÙ´3T2»Ály\30‚Ö`Û.ÙÜ¥b  \1اO{\5\11K”h¥\0246rvf\17Q¹8ÆyNNâLéî\21¸2\15s\19{™õg¿ÌÙù‹À\30‘åÁ|Ì•zÈf1·¾\20à\4-®Ã\18óˆ€@—TȺŒNÚ‚˜\2ìÃLãËž\14Ik¡±f`ÒÚaµJ’`\ +›[\ +~à0×kW€¶§YÝ\9«ìª•\\;dŒ³·¾ÍÂTSÏTý^\7I}µèÆ0ˆÏ±Šk.\6\ +äÜ+›]è­{\8^n¤íUü¦¾-¶\2°\ +\23Ó–Á´Õˆ\13\25Fô.Û\24)Íaà°Û÷-GÞ\\ý.Æ¿Ùƒär’\30Lc-è\20I\19»\\ºÝéÔ\30S%’`ET‚ä\27\3n.å~T3ÿ|1.;pHn7\25cv…´º\29Tªè…,h\6Ë\31cgբݢ”9Øj.Ì\1m•ÀA\29ݱA÷á¸\"Ë8MVU\7áÉ„\18y¦Œä4ÓY­èT\8Þ†üWº@vK\ +D*\28×9Ì}’Nî²`\7’Ó\0229S0º,Si\3[¤û¤:³Ê#²]·†±\13jÎãú0\4'è‡ü6.O\ +_Êåly\30†Ó¬\27ãÌíæªæX¾Œ`)Æ¡þ\12¹qÖÛ¼o‡)±Y2B\9‚gX{q\0282ŽÁIÎUa\4²ýŠbæwVèfåØÝ•U¢ .þüGñ’æ·¬\26u°£;e¯\25}l4K¥Î\0¨\6Ê\17™íd\9\0174\0200]qF*|r¨j!\3}\24ÜGv¢²Sä>U¾\\•ù5ø'*L\27m@Ô\30jG_\15\24rUw$¯TÚV\5\31’«\24UÔœD}\22É,ïôúì\28Ñ9\4{=•¢Ýéf¼ÿüÛ\8\127\7ü\ +ø\13üó\31;”îÜÿüsë? ú\15õ–€SÀ\12üè\0Ið\20¸\15o8\14šñDÓ\31Ðu\14x\2w(Rh<âý_P¾*bÉ·@»5ØéîããÖ|^V|Qå\7\18綠ñžŒ‚´‘ó«=D\6Ezî„Q'ß\19y\17>€øbXpq\0\13Ü>š™=Þ|,ü¬gƒU4\31»ùàr\31ÇÈ,\30r¶\6%⣅\0116\31\20ë>*’ä£2²\127ÉV\23¾±\19Œaë\24K6\31íy»?†Û\3-\8€&â§ÆÇ\3Ec\3&?™©\0072s5V\29QD•ò­\127ÈQ\31Ã\26ék$l\22;\6{\2¶Ö\3²4Ð\29`YÍC†÷ƒü2J\30ðp÷0ÖGÏ\0185\15Ä\12¥\30i0 ›GêÙÄ„\2A\4t\14PîQùËŒZLÀÎ^©7?RÈÚNŒb\2ž«åÕ\5\30\9c(e\22\1UËq²Øi¾Ðny\0042 ÚÔ§]Bð<î/ºD‰XâÇH?\30ãS º\13\\‡+\0Û\15dÛª,ÀÕb\13ûû!\25P·d`\\è¡rDÚ·Qâ¢9Èâ\1ñß\30Ó¶Û\17¨ã¤¦m¦¾D\12ó½hÀ=‚•\16±ÒEªG’\1ƨÇV[eQϧ™Ú«¶\3~ù@\ +ɆÁ,[û8’\11qÍEƒðˆ¨u0;tö ³\22mKC3õöÿ=j2®—è\30\29ŒÄè|\31\2Áí\30U\3\ +Ø\22\15(’\31»\23UöËð7\13±\1QO:1¢ñ¡UD\8ŽÙ<\26˜FûøóOZnI:Õ`|ßRØJPÆÇ-©M\27‘\12¿âsô“\\Á2R°Œ\20¶ORúe\22uHâàæI󤈠á$\13\3™‹¤¤6ëaì\17ÕS\27’ ›Y\5J„…@­QÙé×\25õÓ/tz(Õ6C]O¡ž³TØm)±\13S’­¥d¿Z~\2hài¢Â4÷È\0152¯ŽE4_h\9´\0015ŽøÞÍЫ“#œSÚ·hãˆü\7ZF„þFnÖ¬´´\4(E°\31s\20ÍÓxD\4“\15–Y.üÅX>ù=o驳\3\8\19d&ž\27‹–^\12±y±\27SÌ~û¢þ>½¬õŽV\25ý\2’ôB\12v–°Ê\17So\22Ñ‹1õ¨$ilžN×øt\ +Ñþ‡€É(6\17É®™z?*G¢û‘KÑ™z÷O\4³í‚Ë\16yµÈ’\26>‰-\20ÑNTŠ¾\30=E0²\6““45³ßlL¢‘‡ð\18WüRÏ;L$¬q ê­\6“Ô8¡~L\17²Î)ë0O9¬©HØ”w2Ñ=¤@˜Ñ“4Ö{¡\24\30Î(£·Ëîš\8%\24À-¸U¾]´\0\27\"o‚N\13ŸššT+=±e 0¬\8×bî¦*qÎ\29:{šÕlÒ\28«4\0234RÒ‹=q\6ïI=>õs5geê\\'õyê­\12Ô€4\14\13fƒs§‘îÎ:fýÃÂ\11¦EZÖÔ\2\21@iQ\22“^gJ®lù–КÔþR¿&G¼&Ô\13¨W\5/\16\19´N}@wÂÊ\26\26èŠôº^=Öï‘PÅ%ŒMfŸB\25\7.&Ä@·~w®· \1\17SÙƒœ%1sO\31?M+\14\8-<©î±k̲\7/@¢¦V•¼GŸ¶50¶–›‰)? ?!œŒ„§»þ\24_°s«\30lÎjæÍ¡\28»)Íî§\"ždý\6®¼\0Eúá”ðlìié¹Ïi\ +è\30Í#}„º0D\2e\17¼6å^E@{AšÉÍÔÿü\6Ce4t©¸„¶HÛ%šÐj‰Š-Ò%÷I¾&¹ER¥\\íô]vVd[M˜G©ÄjÒ5uVz\0009‘X.6r)©éRO5;áÍw4÷\20\1ļÈÝ^¢¡Ne>XK÷\8üÍ>Re¥M•U!#êk³\26¿\11¤\31\4ˆi®’?o÷Ïñö\25»¥û|óC\16~\"P?ѱ(0oØlÆ°\22ŸÛís›§\30Èz\26y\13~Ð~\0302ñæÓ\0313õYTrÚOL+8ãMîˆRøÔ\25,ú\16`|?Õiám/¾ä‘ÏpÏ=\0078/P\1ß?á\9|n‚ÝbGÔ€ŠôÔ« #'×É|ÅvÔ\15žèñ€àmÐ\20Ÿ-ªî\19½ä‰È\2ÀcI”hŸ©Ÿ\16ºO™—\9³½ÑÓœ#”\12±ý#À‚=\0216+h‹²’\17e7{â.~¦0Rº'úï\19Íè‰fô\28\27Vÿ\25Tò”5vOý1ÂB„û‚h¶ÝóòBˆÆ+\4á>CçyʪßÖ\23`\18år0ð½\8(ºB<Ã…Ø\15\8\"š\15î\0„/\0¡x²ªçÞ\23Z¬h\28ë\261NªÐ#Eßv¡|Ž¨#ò²AF}:Æg@Ö™0†òûË\\Ù6™z‡&=Ž:½G\24Ó\28cs/ƒOåèˆNný\24vݨ±\4œahcb².\25\11ÚŠÕ•ùD#\26ÿ:¨GU\24sÁÆä¦\26\21³Æd¿=0\"oŽT§¹UŠ\17(®Óˆ2;¢ÌŽ2U\16<\21Í\5¥³8\26\"\25í\19¡Ï\4¼d)£´\21\16‰GG\7þH\17ìu\28Ñ‚øF0h\7\14Ã…º\8¾\12!ºývû\16P¤€TÎ@%’fdE Ç…Ò…ìû\24Ì9Ð|!kif9ŒA¨\12´ÀKÆq\25®ê´g@kD†b2öãúH\24ý¨ßt¼\28åŽi\29£\28j˜p‹:Öm¾PŒC\7Æ\24Nt ¼4\16õŽ1ñc\28±Œ\23“\25/îb(ÆÉir\27+\127c8åjŃ“@¶Rú#\6^®ÆÊUåyMì\25cjN7\ +T…l\24eN5æ™ø¸bÑÎ@}PÉ\20Þ›Qïûp!ªc“'†‚HD\9\30ú\6:\0188Ûi6+\12¯I‰¦'‡4\13¯„öL\2ºP3éHž†ð9ñ]tKkà\"t§ËÝ?õ×\26Lq!e\0264¢åì‹\16z7C¶Gá#4x&O”§†Î¦ŽùœR´ÞLée»2°Iƒç}¡)Ð6™´XÚóZ\12êÕò¨Ê“¶´pò¸`JH¥A\20¦F3}¢”N˜<Óýe«ãmBÉ\0èÂ@Ä#^'Ø\4@a9…³\127ºäâ\20’ÐLú\14§0x&\8õ!t\19Q'†é„h›Z\9`R·žP§5B§ûL@Í\15\13\28“/\4ZŽ\0ÜÕSe\6ch¤í¬¬ƒk\25Y\"t²NÓì\25\5`²ºda`1\27,…„ñÃÌã\7\0118ëÞ&×\24¹P0\"åê\127ë5\16á\26…ÖÉŒ›1\30Ý’Ý\19O¢jtK\23\2ÒV×x (ŽRˆ:\6¶\8¨eÕì˜Ø¾HˆÉ!o.]‡\"2iÜ‘\3:ÎR\28ülLâcg|}x~ˆ<¥°pnOˆgVÄ«&î%ù¨S\ +™‰>œ°cp\16GÌ\30Ú\30Fà¤bcTµPzĨá½IÚóòBoˆ.¡§\20£F'ùkζ†\21JU\ +/\3›³s0Bâà6ÂSzlU•XeëÉö!.¤`–£}My”ör¨Œ \"Ô\"™r¯æ0å½wŽ³»bÊ^iš.\31N\3ÿšÂ7sG)™ª6 –Ø»¾/ÄÊV\9Á³ZçæA¡:ï|/žUt~,cOOÝ~ž\02118ö›j0Uû•\26tÍOwÇy\6½ú+ú\9îˆ@¸GOõñ ×4|¸D@;ôó¯kë- ÀÃÏ/²~!SÌùåîøùZŒwH?ß/WÙ;S\23š­ðÏ\127nÁ?ÿÜî\12§g\23·&\14~Í\5q¨`¿\13<*ér]f\4\29UoÈI\12FÃÔ‹½ö5Ë–ö\11™\24§s\28&Ì\30kÿ²\24†\1ˆÂ@Ž\0³GµpYánÂ\18\9«®£0`æ8¿ìæ0­H—\"Ô\29‡¨&\127D \23Z\"\14êŸ\27õÊn¾®%µ\30m1”j\15X\30aï‰\19ÛüE;˜[s\24Zó‡nm\26##Œ9›û‡ü—ж\ +iýÎ\12Ï\9+’z“ŸIÇì|¯¼98ëÛ¿iŽèÓ\7Ù‰ô6fšŒPUëÂi}\11\8-iµXA\28À‘Å£&BËf–Øï³<—ªÙ{QbΡ\6Ìn\31ϘfÔIɬÀîgõã9\16ÌŽ¸*­ÅAšÈI‰s˜Â\17\8s¥æª96{'nV=#\18‰Ëô¿\\Ø\ +1 ,ß5\2\16\20É“Ä\18œeÉ\127\17ݪ\27²€€Âk¾Î]½ì¡‚h°FER\4U\20Û-Áýªd\1#ŸÏ„zq÷€·\6\30\1\17±½hºBùBŒ<\14pížD;¦\21E\127›þ;ùùåç\23½\3¨¹\7E¬ÈŽ»Ö\26¿ö%ÔW{—QB%7~\13Šñ2Äîâ\13Ûˆlw`\23Q÷Ô\0Ÿª¬Ëð‚F\22ù60.ç-—\2¹xÓ\14(£¦b?ƒ[ói{„PO\22oYYD?û\"\27^\6ï8\11™U2åHˆ‹ \"‡´èXh\0228ëâÍ\18`jŽ\22¤ãjùÐ`]>06:‹*)zûâQ3Ðc¾ÖË°üzÇ\21FörqTö\3í‡\11\11hÅ©—Ö\22¯óuáË\19ÎTë]º%…\29fT1«w\20\22¸a·<ã\11\6¶0[ð¬% zé2¢Ê{C\0)¹Œ×½¸EŽ³„+hñêÔ‚ñ\6È\ +\1¢³Ñ\16òóBQ øÍrùíÉéÉ\5(ˆl¹nS\26[¬\6\30ÝÚ\25Xêb>>í?,añb \29wV\29\29[\20ÅÞ5w®¢›³Ùà3”XIØæ‚„X6=jK«bhú3 S²…¯\22i»ìš[^1ô»\5Q\13\13„Í'ÍÆôjó\19,Œ·§Ó÷¥Þ–j\23jÏ\4-nÊÅM¹ MI_žm.º†û¥ú¥\21k\5,^œhu ñ\25hgµÝ\16€ÉO©Gõ\6p }fã¾ûvbQ§î\8µc\27è5yÝ\20ð\8‘Ê̪µH\ +:Á\26ô¿zu6\20`Âl\ +*)ûy\29<±½±ÊÈaôÂ\2­\8²q4óZrTÙnÖ¹Gm]h´«\27a\3§U5\4Í\19\3]ôìVöh·¢Lx7“œ¢€tç1xûJ4G\20#xŒðt ÓæýM\22ÚKÞo¢.+`½ô\15ýGžè¯^÷Yµà\6‘œy½ì¹Õs|`\28]u×Ñòz)\22kòöæ\26zC\19ÇÌ«îØ8%Z\3Ƀ›•M\0020U\21a½T„õÒ\13D4\18gM\6¢§ÏV !\0u\14­ÍSÍtu“±qVç@eܪ­´ÆU !\2w\29§)Í(A«\18¿G´3¦1èÞ\0003Îæ[¬…\21zšm™…k¹…ÓŒ!aê{ÃÕ‘¹›°³V=žŒËó—u’\23¬S\\\7Zµ\171V2°ÆIˆý\13œbîW/ˆ¯«6êªÃ÷¾²·Ö%D\4ûÅÖZuIîÀ5\12ªõî§Ë\7l†5³o×ì[ŒÕ{gk¸˜Ö\16Ž¤•”.´\24祆5Œ\21㘗¬\31¾q˜%ü\"k8…׸ø¼–˜GoÙm\"ÕŒÕ\3œ•\ +†€Ô]â2\127 ùBë…Ì×{œµ–\18Õ¸Ã\2M\12•¶«ÎµÆ2Õ\3kk­Á\7V¶ñ]‡Üz]O\2¡\ +\17ó[ð%pOy­£[Ù°€\127n\27ĺù\26fóðfë\7l¯-®1\27؉M³!2¶\15×ý¾} 0olŸí¡ö+œÛ›.Ð\23kK°\16Öz3Ä…¡-¡ôl^\0041\2’\17ÒT—6?\15vÄ\22·\21EÙd™ñ\6½oŸ½\31ÏÛöÔc&ĪEµB­Ù^}Ùnz[=ùÛÂA¿0?\ +¼oÛÛ«\\Û\20‡^Û4Õ\30è¶Ü&UÝ\21¼é2ß$Cu³\20ÆÃ&Ž€³a‰8ÒØfýAÛ|Ý\9ÞÂŽ\1yÕp›/\9©ƒ÷×-Œ}f(¼\15T\0÷ÚæØXÛ¥6G$Lj›ßVpU>Kf†X§mŽ\19‘Ž|V\14\27X.t°\2^\127\9ï1Ô½©WÙ\25S¶8úö†7öá¶`=½/ÄX‘3Óm[c­Ù\27ïø†×má'\23º6ò\13j[Cë\6!o6¶Æ&*1Cݪʵ5”„fn:æ$–Ðm\13„–¸mqqQÄF\18Á\30\2¡•ˆ·+èz_\14ðØ°¯\11\29\23²¼\23ç­À\14´\31$mf\\sDõ.n¿íe¼…Î\28õ\31QH‘ªÃ%Ù¿8”ÙZIzï=™b™Âù½ínË-.¥o¨CL»õÌôð¸$AM¢ÍC÷\22…<¹\ +‡\25žŽúð „øw5Ï”ô.\17¤ÈvÄ‘ûæµ\25‘,d;d\30›WÌé\15:ô\9M\22\1m®¹·Ì»-;ÛYSƒïO³Ž¿ü\28¥³¬ƒÀ,p\21½\24¾eˆãæQÀ`tö3»Ô9\14È7_\7\13\17+c#\7»g»|ã˜QµHp±\127‹·–€Îzød6,§ ïb\127Q6à*›×e\2º‰‹âÅ`øÏ·à\26Zõ\27li;Ǽ2íí„éð\25©qÑ`Ó÷|§Ó»WªEíÐ,\17òhv÷)à~9&öÁuß}6póé/VvÍ:ß!X´Ë;\1\4ýþ N\ +¦†èä+? …ÙªÕïöô\27½s\15“\127w‹\4 ZSØn¦Çíê½cÂ\0080½ýž0ÉvEó\14*qmuGGØÕ^wäê>Þö¸\31¿Ê—}\\›‡\16C‚\1Œêsû\24/µvÏ\5(¤\\ܽ-²7³]PÝßã\"”´ª×$,½\18‘Óm×1\19Ij\28&eij¨ ßîð ÆX½\26Ý\30^ÑÝ\27wäÐÚ\23ÒU2lQÕ\22ÉÕˆ*QínKº³\8Vš\\SeP(œýîóÈvßVÁvÛcŸ‹f\21ˆ­¡ËdÇ4Ýw/Çûüƒ!ï©\29„ÈÖ]•\22{Ë9Ø]\12¬Ö‡6…ß$zCw‡èÚÝëlÂgÀ‰ñ÷Ú‡{Üq\22êOÚñÃ\2ìT’ÇÑY:`0{\\­ìö¸>Úîq}Ô×&N•\14MFGßôoïì\30ŒÉÝÃ\31£­Á{\26íN[*Ô{\8ݽ¾âÛ+XØÚŽ¯ÆÑ&c8Œ\ +9¼Cðm\12ççß×ØR•\14D\2ºIv…2ú÷®TöÍË\30ú4Û\15{é\4h]¢3í\ +é]7!sq(fÙÕb4M\17\26çqiÙq\5º ߎ8Ñè\"\27¶U\4k„Î(CeÇàq?{\0266r\\W4ŽKý>úË΄÷u–½\28‡§7G¼`ZÀ\8\23ëÛí‘ÚªßEx|\24ã…k\3Ù\22ÐÅ!\20œ87\\#Ä\0228âýI„êt¡ùBÅ^ú2ñð…ÉÙÛJ鮢uh…l%à£ßlA¿\0020ºT­±êq¢Žs$\25¥å@ø1•\7úÊ\17\26þá½ý\27,œÉ<âÐø\8mþ¸´øÃÒì)\0é¾´8.\29^¤ïïÂÐÿá‹P¡}NlâÃÓšÃÛNæö–\17µÃ¼\20\23¥DÒ&™çÓÐ\18’RJÞ\9ê\15\7Ã\22Ž»‰Gj‹\2\17UL/º™\15´¢ãSWÒZt„Ïðx¶HÏã9\\#z&\3ž¨³ª^\0223/œ\9\17‡˜cd¨ý¬f\\„¡G(êo1ŽË)H–'\5½Db¼Ç)@§\17­©šË› Ô\17GF‡¶\1E¢>•ï#®Êµ\30.\28ž/æÑÅ\2Š[ÎÇåþ;®»\\\"ÝÏGx\1\15ïRˆâ‚\2íVÁi2[†Q¼\5̈§v·cŠ‡\29‡wK\29÷„-ux%…<³€N±\\³«¿\\ë\24–ʱ(Ó\3Ñ_\29yÇ’£\20•¡z˜_µè\8\3ãØn\7J2 M\16ÄæÎ?Bm»3³y\18\7d¡UP\24ú¦\27Дl¶¸vql}F\2\0233Å\22 Õ“ˆ\3†yìq(Ã7S¢.Ò°‰Ž\28‚óÈ\23 †>îÂ\0309øS\3×Qµx\9ar‡sv„±K\4uøB\31\30x”8¢¹k\\\31ò³C‹ðè¤ûª—´=tº\23s\28æ`h2A9\30‹\16\13k\20$i &Ã7P9Ž­½¸1F³>ròéÌéÜx?„f|P˜ÿžáç‹—çë\1\20‰Qm™«¸=×äÖ7Q¹} é±WÛœ˜Õ\28ïYÈ\20÷årŠÌท¦\15ØÄ‹\1“'Ík„$U0j\22µ°\0169\30…\4\ +á—Ãäì3\\™Ÿ—xšÌf&ók\8è|§W\13¨ÁäQUöAGÀ\28õN¶¬#–¹òq„½ò\8^s£ Ð\24˜óÄœÓÈ첧5;\20H<'\15=ü^â{\15x˜ßÏ=^˜åt¨Œç8(ÊŽ$†\30{-ûÌ…ÕLn\27JéÓÍq¥ÝιÛãi’\22?Þû3 ¬>F!ÑaœÐ“8àÉã“‚q9ª½ÜG‹a—æëÑW Ø\25\24ƒ:_^ ì\11v:1N:©r\\!\19Ñy`ç™pŽcŸüÿ/׳Ç?yŒ\11ÍÙ{\4›qÇa\15\14§è’\3ÙK@Ñ\17ݲh\6õfFv‹G=°'\ +x¯ ÃöóÛÿGð[úeyúøÇ\12)Do’k:¥´P­¯€#9yÞæ[\3F=Å0}&“ã^éž\ +aHM’sœ$ç85¶\13×\29é‘—ë\0166#\24²þ9&v#÷öáônôþî/>¡ ë®W“=àÉ×]¯|ðx›\9F²yv\1ò®\23EVÑد4·ÉÚ„ÎÓvü²ðñ6¯&iH\6±•k>諆ˆŸ\1˜oÈ\15N”sãSãÌ\26°>.Å'\1†˜Ã\19—c’ûxJÁ·ÓƒH*¦º³\127é’¯7ÀÔJ|ã»V±%\19,¡C*}FY¦n‰>î\\\13ØD梫¦úÖØzƒ{@®®NQÓ\1J7ˆ«WÀ= <­ ;M\23¾P$¾¸U•õ…A\16M\22ʽl¬b9äâí%¯\16|\29¾¥}ÊÕ¡¹ùº\25’ÉÕ³9\2\15ãûä绺ºN²ïþÐø\16j\16Èõ)¹ªïvf\\\29BÝ#°{ë‘8oôäŠÆK!Š\2t|ë\20LQ_þ\27éŒ*-¡–B…T®Õ¦jìCl.4[&û¼å3|´wmð\30€ÄÉq\13AI€5qó\20\25eM'Û\24›>üõM¾n'Dj±\8\4ØXBW™§©K õ*±G݇ù¼¶\31\25«þ¶âßÌéÏï\6å\2´\26­\28Š›\0139n6Ä\127‰\0Üa?߀öÁ™§ªï×\24)\2ί\14\1\7òí±½8,R«pO(£]ÍŸï\24@¼œÌž'`\30·EmT‰mUÖ(cK㣷~J\\ã\14”/tšäkÄrÙ\ +寸(qœPÆX\1\25TÑ9b™ÐS‹7FÏ\8VëÕK[¹œh3æ N÷‹\7\6ÅãK>—[¹ŽàŠ§xÆÀaK\3o-q¡¶l\9žX¶çÿ\24ã¦\23Ž(¦e‹c\24Qýu!*Û<½0Ä\0Âí\24\8&]¶8¦.qé¶lqtQ┡ÀÕ¿!ÑÆ?øÑmö¹EÔëÅ\15\15lJ<æ\9ZŽ$mÖ\18vl‰WZ%\0301ëo\29MHÈVãhKǪ́0ÊD{ iÛèöáËì[ü\11ƒ„\20z|¯wÏÕoâ\14\23È\13VŽØht—\14¨\18ßJ°°’ãM—æ»\26êâ:^AObés\28jõåRzÄÞ¾ïÀë\21½f\11é\\\"¤\ +1ÞâÝ\20aÝ¢åzÞW\26\5_\9·Kñ¯®Ê_‘S\0169º\21™?V½\17f`´E?Ð\29Šw\ +JÿM*ÕWÚ!\16Ü5ˆ‡Ñ¸VžpqaC…\ +âfQ‰‡N‘úô¨JùÓ…\28\1Ì©·\3'ÛTþ\11À\24*:oÊu¢\"Z\3í$ÀsaìwŸãø&'\5„†´Mú¿¸ÝÍðEàë%ëò£Ÿ\"«¬Ô{oÅÿ\5Có+z}F;\0ó+\23]ú\2Dï\19-·êqK½ÞÝT\31Ž×^ŽX5BjüßDõ_†ÌÂî­þáßSäžýœ\3zlW}²Yã©x\127r1Ïa\13½®ó꿤\24•-#?9ýs\14cO3„V_ïtÃ;YU„Ž]\31(¸@µÜªVO\5>‚©ñ/M5©ùÔÈ\"#­ð–z\127Þêu©†§¢úÊm0ŽÙ©¯pU\7:\0029¶Wü…E\29\31µ\7ú躢ÌÒÈørÌÁ] “_D¯ð(a6g¨öuÔÏTá*\22ÑÝ <µCÄvvôQT…ßÔÞI켄(„\4½‰è¤3Ïן'Õ)\20è:M#MÄKŒ\02675)<¡øÖ)îŒÖà|ÕW\22€˜ªÙ?Â#\0\31«s\\\15\21UsÁ\12¨L™UçÊlu\21憉þI¸ƒÔc©{]³z—\2h‚W%„\17Ñûœ¹Æ\0055áLužgO\23Z/”\3ÑG´\28&ë¾1W^Ó®\13û²¶\30qT4Í\26O\5ªžd ªP½þɧ†ÿñæf|\26`Š\"`n\29ƒÄõúèLÂrlQ™hFç\27Ð\7žäò\13\20\1e¡ÂÒ\28Þr»ùç.싪×(h–•É9‡Põù@ˆñ\\™¤ê\\5ÙÕîµ-Pª\30Öï\5U¡ÉùúÓ\3ÿ‰Œ\14fÿ=…¸0çêõ\127=&\21÷„\15\127êõ§=d‘BsPhx®­J{—\7õÏ\127P’çDþ™É9Ä\29ŽS\6wÆ=×Ó£×sèuŒŸ:Sº3øÜ\25פÎËM\18h4\3{‹º|Tæ\9ã9Ä=\2\16rE·ÔT\13xh\21(Ü4g\7;$\29µ\15 G?.\7\27S_N³Æe×3®cž—?û¼þ¬\7ÄÔŸÁ,ÏádóžÈ‘óŽ}dÁ¤çY8\0095_\13xfuê@>ShOg\0285Éõ>¯óñ3ÅñÂùÉOmìD\27ká)§jØé\127¹Àa\30PöéU•3^*Ÿãaÿ¬Rä_\11ù˜üT•:=o:u“ž\30õœ(K'–¨\15\15ÎÞ†4,O\20\4\0\"ôôîä\25¼LX£\7'=ðÆÝyÝ\27:•“c`&\15\27£›zs|Ýâï´®¥½¾ÐEþw»ý¯çÆBöûÏ¿\8\19d.ÌÏ\7@\14\1\12\29\4,;\4Ñ\23`\2OÀ«;÷]£Ð:Æ„\0nÅs ç•\1#A;)u\17ÊW\21oÀ\20¯ÚPl\27›e\8Ë…Šqñ‡Ô²|Ì\4×x°\4\9ï7¯\15Úæ>bRÆ%B¨\18Ü\30„\14\13‹>>\"¿§\15|_yáã›…Ì’c\4n\27;V\31×Dœ\6~þí÷@Gº‚ñÿ6ñ\1ùþü†\2~~;}¿}”Šùæ©9¦œú@‹n‚¨\6B\22@‰”|O\3ÐÏß26a6ú-˜nÚz)Ðk6´_\16{\20šþ={ÛN¤1N\28Sö{³\27Íî¿ÉüüŽÃõŸß\30>]ÈÐ5\13\"tÈøH‘è\29TñÖ\5‚b\2}\27{y\24\9—+õš ßw»âßd†¢5\\ÈéQÿ\26í4½“rŒß\2]Câ_ÒÖ—Söõ\1¿\14T\"Š©úŠ?Ú\2Ý7c˜˜¯g¼}GK\14÷Ë?±ÍâÉUú\26½¡\30¨ŠšHÊQG\\g\14ì埯–¹ýj<îǦF¾…BhÂEƒ_×¼~ÍRk–(Õ}ÅÝ\8¯¢úl׋¨ÑÕõš‘/÷{jˆ®\29q/.ÂV\27çN9>¢ÐáÕq±O–EŠ—ÀÞ“°X(ê\30KNQA¸§\9\30n‡¯#\8îë€)FZô9þ²Läƒ]ñ\30Ð;MM,×üÉ\21™RÖîëÔ¬\11n0k|l\127Ã×_ÿùIg˜¢oÕ˜ŸoWëû\2Û\27`U¾‘²>±ˆ¿€ñCZý~ys=´éb>æû{‚Á‹â\127\0À-#ýŽÝþ=?zwÏ·\0154[\31j@öß½o•\13Ä;Ñ»_9Âa?ð\17¯¬\3»Ñü÷'{â\29Ÿ¨dt>Œuz¾gÿ¹B\4³·Ì‚Y§óCjüö\2:еù^†x]x}TñÅ´¾ûÕVÖ ¹ï-òî1†½ß¯€UíÁÒ¾µê£Tñí¡‘p?\2y\13åÂÖ¤C\20O\27{>ô™Ðï\18Õ•«\15÷ˆB€j26s\27³åàÒ_¼]\24»\24Ó\5\22øç¿\15¿XAÀ\5\15c?\29ýz\11&Á,X\4,ÝŸÿ6¿vA±\28¦•\16CõÏ\127XEíŸ\127¬þŸa\ +X©ïŸdl7Þþü\19j!ñ\24›\127þ±¢\127Šiµû?ý»¿m", + ["length"]=23265, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=3691, diff --git a/tex/context/patterns/lang-eu.lua b/tex/context/patterns/lang-eu.lua index 45cf1f26a..71be1cc6b 100644 --- a/tex/context/patterns/lang-eu.lua +++ b/tex/context/patterns/lang-eu.lua @@ -57,7 +57,10 @@ return { }, ["patterns"]={ ["characters"]="abcdefgijklmnopqrstuvwxyzñ", - ["data"]="1ba 1be 1bo 1bi 1bu 1ca 1ce 1co 1ci 1cu 1da 1de 1do 1di 1du 1fa 1fe 1fo 1fi 1fu 1ga 1ge 1go 1gi 1gu 1ja 1je 1jo 1ji 1ju 1ka 1ke 1ko 1ki 1ku 1la 1le 1lo 1li 1lu 1ma 1me 1mo 1mi 1mu 1na 1ne 1no 1ni 1nu 1ña 1ñe 1ño 1ñi 1ñu 1pa 1pe 1po 1pi 1pu 1qa 1qe 1qo 1qi 1qu 1ra 1re 1ro 1ri 1ru 1sa 1se 1so 1si 1su 1ta 1te 1to 1ti 1tu 1va 1ve 1vo 1vi 1vu 1wa 1we 1wo 1wi 1wu 1xa 1xe 1xo 1xi 1xu 1ya 1ye 1yo 1yi 1yu 1za 1ze 1zo 1zi 1zu 1l2la 1l2le 1l2lo 1l2li 1l2lu 1r2ra 1r2re 1r2ro 1r2ri 1r2ru 1t2sa 1t2se 1t2so 1t2si 1t2su 1t2xa 1t2xe 1t2xo 1t2xi 1t2xu 1t2za 1t2ze 1t2zo 1t2zi 1t2zu 1b2la 1b2le 1b2lo 1b2li 1b2lu 1b2ra 1b2re 1b2ro 1b2ri 1b2ru 1d2ra 1d2re 1d2ro 1d2ri 1d2ru 1f2la 1f2le 1f2lo 1f2li 1f2lu 1f2ra 1f2re 1f2ro 1f2ri 1f2ru 1g2la 1g2le 1g2lo 1g2li 1g2lu 1g2ra 1g2re 1g2ro 1g2ri 1g2ru 1k2la 1k2le 1k2lo 1k2li 1k2lu 1k2ra 1k2re 1k2ro 1k2ri 1k2ru 1p2la 1p2le 1p2lo 1p2li 1p2lu 1p2ra 1p2re 1p2ro 1p2ri 1p2ru 1t2ra 1t2re 1t2ro 1t2ri 1t2ru su2b2r su2b2l", + ["compression"]="zlib", + ["data"]="xÚ-ÑÛ‘£0\20EÑT:\5+£\22X*#Œ… ye51Lb³Ù=\31ÈUëÔQ]]?â÷×#>ù>|/¾Ÿ¯G‡uX‡uX‡õXõXõXÂ\18–°„%,c\25ËXÆ26`\0036`\0036`\5+XÁ\ +V°\17\27±\17\27±\17{coì½±76a\0196a\0196a\127ÿ|ßÇó>>÷ñº\15‚ŠW¸¢\21¬ØŒÍØŒÍØŒ5¬a\13kXÃ\22lÁ\22lÁ\22lÅVlÅVlÅ6lÃ6lÃ6lÇvlÇvlÇ\14ìÀ\14ìÀ\14ìÄNìÄNìÄ.ìÂ.ì®{9Áõ\4\23\20\\QpIá^S\11Î\31|Að\13ÁW„û\29k¸_Âùôüx¾/6\19zð-|ß0J Z¸{À|³z\22\28\0204OÐ<\21B\30©ÄK’¤\26Wå1¹ñÊ‚ï’$L“Ú–RÛR«‹D’I\26,i°\20úc[o0IH\18R5X\21R“2ÊZñ»]\12#~µ;°ÙÜ«Žë0nÕ\17^Üþaù\16Üšj\30LôÒ³8\5›‡4ó±\17Í÷¨…ý7–\30Ùûi]6«D]ì\17r4vWæKï½Bu'äòŸúÍxË[èÖè\3¦9üù»„`Gï¾þg»2ϼ¼åôòv¬mZ·8Þ}%¹¯øhO·=C1_÷\24€ÍW®Õž:çVšK_UáËi1íŠnïÏnß{´‹ë…\16\127—º¹ÖóæèþíÐw¤%ý€¢ÓNêš\20ú8©Ãć]ùŸr0æ\17\30ßÜÀƒ\16C?\00345­f”‹8óT\7DÙ©L¿\15Ô›I•ICMªLªDy±1QŒT?DÝ8â\12·N\3?4Ù\15O\127TC‚É?v6%#?³=ÿ\2h‚\1Z", + ["length"]=1546, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=286, diff --git a/tex/context/patterns/lang-fr.lua b/tex/context/patterns/lang-fr.lua index a738aa2d7..bef99aac7 100644 --- a/tex/context/patterns/lang-fr.lua +++ b/tex/context/patterns/lang-fr.lua @@ -118,7 +118,21 @@ return { }, ["patterns"]={ ["characters"]="'abcdefghijklmnopqrstuvwxyzàâçèéêîïôûœ’", - ["data"]="2'2 2’2 .a4 'a4 ’a4 .â4 'â4 ’â4 ab2h .ab3réa 'ab3réa ’ab3réa ad2h a1è2dre .ae3s4ch 'ae3s4ch ’ae3s4ch 1alcool a2l1algi .amino1a2c 'amino1a2c ’amino1a2c .ana3s4tr 'ana3s4tr ’ana3s4tr 1a2nesthési .anti1a2 'anti1a2 ’anti1a2 .anti1e2 'anti1e2 ’anti1e2 .anti1é2 .anti2enne 'anti2enne ’anti2enne 'anti1é2 ’anti1é2 .anti1s2 'anti1s2 ’anti1s2 .apo2s3ta 'apo2s3ta ’apo2s3ta apo2s3tr archi1é2pis .as2ta 'as2ta ’as2ta a2s3tro 1ba 1bâ .bai2se3main 1be 1bé 1bè 1bê 4be. 4bes. 2bent. 1bi 1bî .bi1a2c .bi1a2t .bi1au .bio1a2 .bi2s1a2 .bi1u2 1b2l 4ble. 4bles. 2blent. 1bo 1bô 1b2r 4bre. 4bres. 2brent. 1bu 1bû 1by 1ç 1ca 1câ ca3ou3t2 1ce 1cé 1cè 1cê 4ce. 4ces. 2cent. ja3cent. ac3cent. é3cent. munifi3cent. réti3cent. privatdo3cent. inno3cent. es3cent. acquies4cent. is3cent. immis4cent. .ch4 1c2h 4ch. 2chb 4che. 4ches. 2chent. .chè2vre3feuille 2chg ch2l 4chle. 4chles. chlo2r3a2c chlo2r3é2t 2chm 2chn 2chp ch2r 4chre. 4chres. 2chs 2cht 2chw 1ci 1cî .ci2s1alp 1c2k 4ck. 2ckb 4cke. 4ckes. 2ckent. 2ckf 2ckg 2ck3h 2ckp 2cks 2ckt 1c2l 4cle. 4cles. 2clent. 1co 1cô co1acc co1acq co1a2d co1ap co1ar co1assoc co1assur co1au co1ax 1cÅ“ co1é2 co1ef co1en co1ex .con4 .cons4 .contre1s2c .contre3maître co2nurb .co1o2 .co2o3lie 1c2r 4cre. 4cres. 2crent. 1cu 1cû 1cy .cul4 1d' 1d’ 1da 1dâ .dacryo1a2 d1d2h 1de 1dé 1dè 1dê 4de. 4des. 2dent. déca3dent. é3dent. cci3dent. inci3dent. confi3dent. tri3dent. dissi3dent. chien3dent. .ar3dent. impu3dent. pru3dent. .dé1a2 .dé1io .dé1o2 .dé2s .dé3s2a3cr .dés2a3m .dé3s2a3tell .dé3s2astr .dé3s2c .dé2s1é2 .dé3s2é3gr .dé3s2ensib .dé3s2ert .dé3s2exu .dé2s1i2 .dé3s2i3d .dé3s2i3gn .dé3s2i3li .dé3s2i3nen .dé3s2invo .dé3s2i3r .dé3s2ist .dé3s2o3dé .dé2s1Å“ .dé3s2o3l .dé3s2o3pil .dé3s2orm .dé3s2orp .dé3s2oufr .dé3s2p .dé3s2t .dé2s1u2n 3d2hal 3d2houd 1di 1dî di2s3cop .di1a2cé .di1a2cid .di1ald .di1a2mi .di1a2tom .di1e2n .di2s3h 2dlent. 1do 1dô 1d2r 4dre. 4dres. 2drent. d1s2 1du 1dû 1dy .dy2s3 .dy2s1a2 .dy2s1i2 .dy2s1o2 .dy2s1u2 .e4 'e4 ’e4 .ê4 'ê4 ’ê4 .é4 'é4 ’é4 .è4 'è4 ’è4 éd2hi 1é2drie 1é2drique 1é2lectr 1é2lément .en1a2 'en1a2 ’en1a2 1é2nerg e2n1i2vr .en1o2 'en1o2 ’en1o2 épi2s3cop épi3s4cope e2s3cop .eu2r1a2 'eu2r1a2 ’eu2r1a2 eu1s2tat extra1 extra2c extra2i 1fa 1fâ 1fe 1fé 1fè 1fê 4fe. 4fes. 2fent. 1fi 1fî 1f2l 4fle. 4fles. 2flent. 1fo 1fô 1f2r 4fre. 4fres. 2frent. f1s2 1fu 1fû 1fy 1ga 1gâ 1ge 1gé 1gè 1gê 4ge. 4ges. 2gent. ré3gent. entre3gent. indi3gent. dili3gent. intelli3gent. indul3gent. tan3gent. rin3gent. contin3gent. .ar3gent. 'ar3gent. ’ar3gent. ser3gent. ter3gent. résur3gent. 1g2ha 1g2he 1g2hi 1g2ho 1g2hy 1gi 1gî 1g2l 4gle. 4gles. 2glent. 1g2n 'a2g3nat ’a2g3nat .a2g3nat a2g3nos co2g3niti 'i2g3né ’i2g3né .i2g3né 'i2g3ni ’i2g3ni .i2g3ni .ma2g3nicide .ma2g3nificat .ma2g3num o2g3nomoni o2g3nosi .pro2g3nath pu2g3nable pu2g3nac .sta2g3n .syn2g3nath wa2g3n 4gne. 4gnes. 2gnent. 1go 1gô 1g2r 4gre. 4gres. 2grent. 1gu 1gû g1s2 4gue. 4gues. 2guent. .on3guent. 'on3guent. ’on3guent. 1gy 1ha 1hâ 1he 1hé 1hè 1hê hémi1é hémo1p2t 4he. 4hes. 1hi 1hî 1ho 1hô 1hu 1hû 1hy hypera2 hypere2 hyperé2 hyperi2 hypero2 hypers2 hype4r1 hyperu2 hypo1a2 hypo1e2 hypo1é2 hypo1i2 hypo1o2 hypo1s2 hypo1u2 .i4 'i4 ’i4 .î4 'î4 ’î4 i1algi i1arthr i1è2dre il2l cil3l rcil4l ucil4l vacil4l gil3l hil3l lil3l l3lion mil3l mil4let émil4l semil4l rmil4l armil5l capil3l papil3la papil3le papil3li papil3lom pupil3l piril3l thril3l cyril3l ibril3l pusil3l .stil3l distil3l instil3l fritil3l boutil3l vanil3lin vanil3lis vil3l avil4l chevil4l uevil4l uvil4l xil3l 1informat .in1a2 'in1a2 ’in1a2 .in2a3nit 'in2a3nit ’in2a3nit .in2augur 'in2augur ’in2augur .in1e2 'in1e2 ’in1e2 .in1é2 'in1é2 ’in1é2 .in2effab 'in2effab ’in2effab .in2é3lucta 'in2é3lucta ’in2é3lucta .in2é3narra 'in2é3narra ’in2é3narra .in2ept 'in2ept ’in2ept .in2er 'in2er ’in2er .in2exora 'in2exora ’in2exora .in1i2 'in1i2 ’in1i2 .in2i3miti 'in2i3miti ’in2i3miti .in2i3q 'in2i3q ’in2i3q .in2i3t 'in2i3t ’in2i3t .in1o2 'in1o2 ’in1o2 .in2o3cul 'in2o3cul ’in2o3cul .in2ond 'in2ond ’in2ond .in1s2tab 'in1s2tab ’in1s2tab 'inte4r3 ’inte4r3 .intera2 'intera2 ’intera2 .intere2 'intere2 ’intere2 .interé2 'interé2 ’interé2 .interi2 'interi2 ’interi2 .intero2 'intero2 ’intero2 .inte4r3 .interu2 'interu2 ’interu2 .inters2 'inters2 ’inters2 .in1u2 'in1u2 ’in1u2 .in2uit 'in2uit ’in2uit .in2u3l 'in2u3l ’in2u3l io1a2ct i1oxy i1s2tat 1j 2jk 4je. 4jes. 2jent. 1ka 1kâ 1ke 1ké 1kè 1kê 4ke. 4kes. 2kent. 1k2h 4kh. .kh4 1ki 1kî 1ko 1kô 1k2r 1ku 1kû 1ky 1la 1lâ 1là la2w3re 1le 1lé 1lè 1lê 4le. 4les. 2lent. .ta3lent. iva3lent. équiva4lent. monova3lent. polyva3lent. re3lent. .do3lent. indo3lent. inso3lent. turbu3lent. succu3lent. fécu3lent. trucu3lent. opu3lent. corpu3lent. ru3lent. sporu4lent. 1li 1lî 1lo 1lô l1s2t 1lu 1lû 1ly 1ma 1mâ .ma2c3k .macro1s2c .ma2l1a2dres .ma2l1a2dro .ma2l1aisé .ma2l1ap .ma2l1a2v .ma2l1en .ma2l1int .ma2l1oc .ma2l1o2d .ma2r1x 1me 1mé 1mè 1mê .mé2g1oh .mé2sa .mé3san .mé2s1es .mé2s1i .mé2s1u2s .méta1s2ta 4me. 4mes. â2ment. da2ment. fa2ment. amalga2ment. cla2ment. ra2ment. tempéra3ment. ta2ment. testa3ment. qua2ment. è2ment. carê2ment. diaphrag2ment. ryth2ment. ai2ment. rai3ment. abî2ment. éci2ment. vidi2ment. subli2ment. éli2ment. reli2ment. mi2ment. ani2ment. veni2ment. ri2ment. détri3ment. nutri3ment. inti2ment. esti2ment. l2ment. flam2ment. gram2ment. .gem2ment. om2ment. .com3ment. ô2ment. slalo2ment. chro2ment. to2ment. ar2ment. .sar3ment. er2ment. antifer3ment. .ser3ment. fir2ment. or2ment. as2ment. au2ment. écu2ment. fu2ment. hu2ment. fichu3ment. llu2ment. plu2ment. bou2ment. bru2ment. su2ment. tu2ment. 1mi 1mî .milli1am 1m2némo 1m2nès 1m2nési 1mo 1mô 1mÅ“ .mono1a2 .mono1e2 .mono1é2 .mono1i2 .mono1ï2dé .mono1o2 .mono1u2 .mono1s2 mon2t3réal m1s2 1mu 1mû 1my moye2n1â2g 1na 1nâ 1ne 1né 1nè 1nê 4ne. 4nes. 2nent. réma3nent. imma3nent. perma3nent. .émi3nent. préémi3nent. proémi3nent. surémi3nent. immi3nent. conti3nent. perti3nent. absti3nent. 1ni 1nî 1no 1nô 1nÅ“ .no2n1obs 1nu 1nû n3s2at. n3s2ats. n1x 1ny .o4 'o4 ’o4 'ô4 ’ô4 .ô4 o2b3long 1octet o1d2l o1è2dre o1ioni ombud2s3 omni1s2 o1s2tas o1s2tat o1s2téro o1s2tim o1s2tom o1s2trad o1s2tratu o1s2triction .oua1ou 'oua1ou ’oua1ou .ovi1s2c 'ovi1s2c ’ovi1s2c oxy1a2 1pa 1pâ paléo1é2 .pa2n1a2f .pa2n1a2mé .pa2n1a2ra .pa2n1is .pa2n1o2ph .pa2n1opt .pa2r1a2che .pa2r1a2chè .para1s2 .pa2r3hé 1pe 1pé 1pè 1pê 4pe. 4pes. 2pent. re3pent. .ar3pent. 'ar3pent. ’ar3pent. ser3pent. .pen2ta per3h pé2nul .pe4r .per1a2 .per1e2 .per1é2 .per1i2 .per1o2 .per1u2 pé1r2é2q .péri1os .péri1s2 .péri2s3s .péri2s3ta .péri1u2 1p2h .ph4 4ph. .phalan3s2t 4phe. 4phes. 2phent. ph2l 4phle. 4phles. 2phn photo1s2 ph2r 4phre. 4phres. 2phs 2pht 3ph2talé 3ph2tis 1pi 1pî 1p2l 4ple. 4ples. 2plent. .pluri1a 1p2né 1p2neu 1po 1pô po1astre poly1a2 poly1e2 poly1é2 poly1è2 poly1i2 poly1o2 poly1s2 poly1u2 .pon2tet .pos2t3h .pos2t1in .pos2t1o2 .pos2t3r .post1s2 1p2r 4pre. 4pres. 2prent. .pré1a2 .pré2a3la .pré2au .pré1é2 .pré1e2 .pré1i2 .pré1o2 .pré1u2 .pré1s2 .pro1é2 .pro1s2cé pro2s3tat .prou3d2h 1p2sych .psycho1a2n 1p2tèr 1p2tér 1pu .pud1d2l 1pû 1py 1q 4que. 4ques. 2quent. é3quent. élo3quent. grandilo3quent. 1ra 1râ radio1a2 1re 1ré 1rè 1rê .ré1a2 .ré2a3le .ré2a3lis .ré2a3lit .ré2aux .ré1é2 .ré1e2 .ré2el .ré2er .ré2èr .ré1i2 .ré2i3fi .ré1o2 .re1s2 .re2s3cap .re2s3cisi .re2s3ciso .re2s3cou .re2s3cri .re2s3pect .re2s3pir .re2s3plend .re2s3pons .re2s3quil .re2s3s .re2s3t .re3s4tab .re3s4tag .re3s4tand .re3s4tat .re3s4tén .re3s4tér .re3s4tim .re3s4tip .re3s4toc .re3s4top .re3s4tr .re4s5trein .re4s5trict .re4s5trin .re3s4tu .re3s4ty .réu2 .ré2uss .rétro1a2 4re. 4res. 2rent. .pa3rent. appa3rent. transpa3rent. é3rent. tor3rent. cur3rent. 1r2h 4rhe. 4rhes. 2r3heur 2r3hydr 1ri 1rî 1ro 1rô 1ru 1rû 1ry 1sa 1sâ .sch4 1s2caph 1s2clér 1s2cop 1s2ch e2s3ch i2s3ché i2s3chia i2s3chio 4sch. 4sche. 4sches. 2schs 1se 1sé 1sè 1sê sesqui1a2 4se. 4ses. 2sent. ab3sent. pré3sent. .res3sent. .seu2le .sh4 1s2h 4sh. 4she. 4shes. 2shent. 2shm 2s3hom 2shr 2shs 1si 1sî 1s2lav 1s2lov 1so 1sô 1sÅ“ 1s2patia 1s2perm 1s2por 1s2phèr 1s2phér 1s2piel 1s2piros 1s2tandard 1s2tein stéréo1s2 1s2tigm 1s2tock 1s2tomos 1s2troph 1s2tructu 1s2tyle 1su 1sû .su2b1a2 .su3b2alt .su2b1é2 .su3b2é3r .su2b1in .su2b3limin .su2b3lin .su2b3lu sub1s2 .su2b1ur supero2 supe4r1 supers2 .su2r1a2 su3r2ah .su3r2a3t .su2r1e2 .su3r2eau .su3r2ell .su3r2et .su2r1é2 .su2r3h .su2r1i2m .su2r1inf .su2r1int .su2r1of .su2r1ox 1sy 1ta 1tâ 1tà tachy1a2 tchin3t2 1te 1té 1tè 1tê télé1e2 télé1i2 télé1o2b télé1o2p télé1s2 4te. 4tes. 2tent. .la3tent. .pa3tent. compé3tent. éni3tent. mécon3tent. omnipo3tent. ventripo3tent. équipo3tent. impo3tent. mit3tent. .th4 1t2h 4th. 4the. 4thes. thermo1s2 2t3heur 2thl 2thm 2thn th2r 4thre. 4thres. 2ths 1ti 1tî 1to 1tô 1t2r tran2s1a2 tran3s2act tran3s2ats tran2s3h tran2s1o2 tran2s3p tran2s1u2 4tre. 4tres. 2trent. .tri1a2c .tri1a2n .tri1a2t .tri1o2n t1t2l 1tu 1tû tung2s3 1ty .u4 'u4 ’u4 .û4 'û4 ’û4 uni1o2v uni1a2x u2s3tr 1va 1vâ 1ve 1vé 1vè 1vê vélo1s2ki 4ve. 4ves. 2vent. conni3vent. .sou3vent. 1vi 1vî 1vo 1vô vol2t1amp 1v2r 4vre. 4vres. 2vrent. 1vu 1vû 1vy 1wa 1we 4we. 4wes. 2went. 1wi 1wo 1wu 1w2r 2xent. .y4 'y4 ’y4 y1asth y1s2tom y1algi 1za 1ze 1zé 1zè 4ze. 4zes. 2zent. privatdo3zent. 1zi 1zo 1zu 1zy", + ["compression"]="zlib", + ["data"]="xÚUZ±–ã:Žý\21e•ù\28Éž\15’%Zb[\18U\20é*w4ÿ°Ñfs^2å\14z“Ê^è³?²_²÷\2 Ü\19˜¸$A\4A\16¤Ü¼5Uó\127ÿüï¦:´§ê\13?d\30ž\127!Ë\4y’öÜŒà9\31ãóÑ‚Ñ\0¹\13¶=8ÚúùÕôÑÕ\29·S7‚Õ\0Y\13ÖíÔ…0Um3\1\14\30ܳ_BÝ6\29øwÈ\22{æÐ.-Z§\8Ž‚ÈP0˜\22·¥ñùØ(oI\30%äU ¬\ +µÒ•J÷ªt¥òù0Ô¸eqʨÐXÿ(\23æ\"aoXoEþöªÝX¹†f;&ê° V\23l VmìF‘·ú\13­¶Fš\8!¿€VXCUŸ[üž\127U‡së›Í\29çÖ/(q,}0ùbò«:ÝÉv¨š³[Ò\1ÅžU¿ÑÔ«ž…&¥™$ˆÒÎ\16l Î\13Ú4\19$M\"oR“I䀞ßd‰¨ŒÂ\18•%\26K&ËßHîUýüwUw˜@‡\9tí1äc‚üαäÁä‹\9\6ßQR'‚:‘ó£=*h;\3χ9/þâ-\3ûL\5¯ÑßÚÔ\7Ëúe)Ðm»´÷ì\29,U9J±Ÿg_\ +\15Ýx¨`ñ°gŽg<\19É\0G\29áX\24±#nÑ\29/.ûir¬\25ªn¤úºqÒ\22¢@ÐÄ#\23Á V?‘\127f²0YÙ2²IÔ–Ñ:Û˜\8ó\7Æå©1¬i'‹6­\28ê\21ìWò^9Ô«4¿jë«\12\21ôÂd`r\28™®L(úš(‚CÖ\17ëŠw¶â]`\127ßU\7Sé:%ïBš^È*i”tÛBg kI–ô\0192þ÷¿\8¹‰@ÜEÒEÒOL&,'I7%):쨮`\24ýó7(Ø›%Ç3ËëÐ4á8yÚ“¨N5gŠ3ƒì2g\0ƒìîh',nÿ†\31¶\26R\24gÏÝÕ·]¼Ëvèkz»ºw¬y0ùb\2#í)½\23á½ÈF=¬º/æ© ë¼!¿ì\16ó¸\20œbA½ß¶côn1|hc\0171¯Ùà\26\11: _Ù¯¤>(\13šo6!Ç­Á\6Š‚\9çWirÓ´ç¶\20\11::-C2ì\12nÙüyÏÄ´ÃÏ\\šú½%¦õ‚Ãò“\127áŽ*–[xUì}úmï&\28¹\28Ö\17¬i/ž^põ¯Lœ_pÝa¾ì²÷ÂT¤æf©ŽXýv\18\18r•÷\\ýßX+xä.°‘xS\25‹ ß+šŒ6³7Â,È5œ'\5`ëõ¶³ú@Áð¥=M·\23ÓíÕt{5Ýž§JÝgòÁ~{Øo\127‡\16%j\1÷¢u‚P\0¼øÁá”wrÈ;\30ú¿xèÿÒCÿ\23\11\30,xhÁƒ\5_,øÒ\2ç\3*Àä¡™>r‹)xÏ\ +'×ñh&z>fŒ\23=.r.+a¿\2Ȳ¸8TÐ\2†z‹Â\24”1\20F€çc-:&d<\17V‡f¦w—›¨\29\24`Kƒ.×<6Så>Slk%0i¥˜Å\5\27ý‚^_\28ÁƒÉ\23\19ìë\11U\127\17Í_tm.lE¯/tŒ\23qŒ\23uŒ\23[¾K Ë7Y°|\23Y¾‹.ßE—ï\"ËwÉäÃò]p\30\14\24ÅÀQ\12ŽàÁä‹\9F1P \2†r´\29\0219q‚ƒ9•Þ\27ìýä÷RnëW®Ï“áÔ.†¢/ˆ^uÏÐÙ(zÛ\17Â7WPÚ\17†\6\15o™zÀŽ‘ÔIê%\13’rÎÌS—\3u9ˆ.\7Õå0\21\1\11:o†ã‚\21dß\6\15\5\8\13\27ý?€O¾zóDÐ Ø\11<\20 •~¯óV\5:‹(MëöÌÅwìLsy®¤“0\7ð+dعƠƒ\25«5\11BPT |ç–¤9À})Œ\31Zt\26\22™ô¢“^lÒÔ\16\13h \1\13b@ƒ\26Ð`G×Éòw5Ð’NC\22–¬,Y\23/`\25\21¾½ æýÊÔ\3\22\0114Òð¸F#\13o¤á0<äfF£\2B½Â\29ž$Ú‘`§ærŽ\\>®èÈñŽ™\0\22Õ\29ï«Ã\6SêŒò\0\17à\6£›ÒS¬5Ÿ%/§®PgÔ\4„Ú\27\13F7£ôo\30îÊ‹·òô^¿é½~«÷\2ñzû\0‰iŒ vyñ\19¬°ó\19N\8ršª¬äÖ*\29¤n”tÒ\20gVXªY23™\\ª¨4²oNiTÒ’þ\3=´«°¯JÛ\2\\\1¾\0\28\17k6f\31…bÀB»»R\127VºæM(lM(Â\7\5~1p‰^Á9d\5·v‘î–\29mÕMjÚ›Œ\24Á¬‚\\¨’OaªýrÁIÊíáÕ»ûâÝ\21 \0241\5¶$k\12I¥a©Ï\3ÂÁ·\29Y½`Šu*Ö\21±NÄŠ\21¼\25Õ\ +‰LÐÔ].íY\4*RŠY\15Ç9厷ª?3ʵgqicÜ\0255³3jVz\\u‚¤ÖÛª“s:3W¦å¢\22\127\6\19«È*\5sr^çæËÔ¼ÎÌ\31gõn;Ô†–Q–w«\127ß+ß­&YMÚktá‚ö\22JoA{Ãõ(OÒD‘6R,õK¯µ V\7D¿Ëè§v\ +EGc,0\21ÐÆÒxCänƒˆû|’¿¸R|Øvxñ…%ì¼[\1ù¥Î\2/\5Œ{‰ïÆlÒ¦©”®;B RPÌûJ”Ñ\23PãÊ\12\3þ\13;ô¸ÆÔíŒl³0 UðµYÁFF\22ÒiÍ|\6 /‘HD€+@Î\6A~/úŸFž\16$\19Ji.\0þ\28´IòæHO®osf_Øòó\29µwÞa±)†ª^°ý\23ºÇÅ\17<˜|1G”p_£ý¥Ü—æö¸”·Æ\2\17\2ïøÀˆ²”ƒÿ?²á\28.^\127äøriPnu/É;nÏÛŽkÜi0F¸¶%\16@‰\11•¸\4L,œ¡å%³üïjáó\16\13\\(¦²Ðå,w\\9\16i\7\9´‰žß\26s\1273\8G\18š3‚Ú\5\26\ +]B \28ê\30\17w(ñ7–CîUó9÷|Ê\8ó\"çA|Ïf4)…ÿ\8Šü¬4\24m_@ʆ|—\24§\31Bnë1F¥\28§¢C¸yñÏo\5°Î N]y­X±®+ÖumqÌ™\25­mÃ`÷²#zÝ‚\25Ð\9äc¾€Ð¬cŒ\18×V\30)\16iÿa,ÈÄV¾\28°ô(ײձ{\2ZÓJkZiM«XÓZÎÈu¿º¯ûÕ}ýãê¾îWwã\4¡o‡]\28qå“\12c<ÞŘÊ\27ŠPgTç\13à\6£Ø.h^GÄÇ\13ÂNúx„,[A2\27\",îö‚Œ·•\31\24V~pZ\17ˆœV†$ëØNí\"\15q(ÙêCûª\15í«¼¨¯ú¢¾ê‹:ê\22T„$ûv•—óU_ÎW{9_ùr¾Ž©:¢:q9\21aêÕS¹¿9\20ŠVÉ&Øâ\15ø0\12—ÖÐÈö\6qØ\27k`ËïŠ×ÖoÒ\12_¨>¡Î(Õ§àË€7\26ŒnFé~Vº\29GK\9PÂq4€\0  ѾTF\1IœÓ*³ÖIÛœ£\13>Ú\0191\1®bS[`¶J]`\"W€/ \20\11ee;h˜\3¥ðQ„‹›¤0\31åå|m¶{Ç9Ð5/,Kϯ¨ôAÊaä^|\3Ô\9ÿº\"¤z¯NïòÈñ®\28ï¹<­ïh\ +\5ã\16]zÿÊ×؈uÄÎ…oÐ\15[5\3Öȵ‹ÜL‘\1RÑ‹©Å툻·Àd0\127j\3™uQ\21kÜd4*åÜ\14E…,ðÇ‹×\18êR¾fð)“\17Ÿ\"Ïw¥\2CtTŠb©^]—\ +ô± Øi_pX6ƒ\8³'ƒ¥HÚòS*¯Ç††\29©\16;ãó±¼`,\16N¸ µ F¦†ö2ipÚþÝá—\29{ƒâ]|.à.êʦ¿¼ér¤(+y\18\27W\19/\22Þ\30\21µë\14q\26,ÛžƒÙXqˆ†º\\\16\28\24nDQ¼MTo\3Ïèr\20zïa£P?l\6>‚Ÿa#ʘ\9`¬\17ÆŠàºÞ\24ÿoòÁ\16\27\2ѪÐIL\28\0*!\25õ\13{¬äy›n^o\11\8ÕiãÇF¦Î\8‡´ñë_½9öô`òÅä\23\28û†u\22Íl¯ÜvÜ\31·=Š0\8\29o\5n.7´ûMG\13-lÒ³vlýªã\5+~µ\8$cdÂñxŽâ7[OíMH \9,†š6D\20(\\Ûä[\1\8s„\6QË:Š+\16 ŠZ=6”Ð\24(¿¡U¶±\23H\27Úh†<Žéõ\24\14\12\"\16ÖwU:[»\24t\13xóƒm\17Ýye݈±r\7Äžgq\0[>ž›vJV$[\\Êh6VH3%€K˜ÿÀ;ʼTÈÖ\22v\24Ï–õ¹“”Ïœ’7\0069dÑElÚQúŠü\26gU®±\"G\15­ˆßé\20\21.\27%MÔJpË(h¹ì¨ð‡R\20\16»m0ZœÃubÔšp©Om7ÊÑ•`‚‹|šOŽU\15&_L~UÈMêù\12ù\29!Ô{Áµ@>U'ÚR\18SJju\19?<–›,^å\13Ñ2ð8Þ ‚+„²–a|¸\6ËÜø!ä••7…=çç\29Î>•Þ\18­<ÑÊ\19­<‰•'±r¤q\22“â™+[?\19“™É\2\6ž¯I£ŠdQEâ\14HžÊÁ\14Hà›]Dq?ú]Žˆq3\\^i³z,1†¦\20­¥(SyÚŸugî.Eû\15‡‚¥€¤\0á{•0\8l#š}‚©§¼\12Œ°kºÖŒH=K ž\25§ÿÍÀýo\13ÜAòB\0097¡móYeýŸJ}ƒµÜh-7Gð`òÅäWuãQ\12Ý]}uºq¼7\25î­ÜD°š7ó7ˆ\11\20Ö7ÏÆPÛ-\16|W·0!¸igøÉ\27u}“©ßtê7óÔ·LfxÝ\27\12ø\3CúpÕ郌\31Â÷¡l\31\16þ\1¹\31`ÿ€°æSû¿c®w™*Ò;ƒ¶±ºÛ]â®_\9êŸú\19süÉ9þÄ\28O?)ÿ§ÈÿùŸ\127.Ñlý“ÍÐÝOt÷óþÿ—±7t", + ["length"]=9581, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=1208, diff --git a/tex/context/patterns/lang-gb.lua b/tex/context/patterns/lang-gb.lua index 20ddeffe8..55f738149 100644 --- a/tex/context/patterns/lang-gb.lua +++ b/tex/context/patterns/lang-gb.lua @@ -2,7 +2,9 @@ return { ["comment"]="% generated by mtxrun --script pattern --convert", ["exceptions"]={ ["characters"]="aceghimnoprstuvwy", - ["data"]="uni-ver-sity uni-ver-sit-ies how-ever ma-nu-script ma-nu-scripts re-ci-pro-city through-out some-thing", + ["compression"]="zlib", + ["data"]="xÚUŒË\9À0\12ÅVñ\2o¨\18LìCâàOJ·on¥'!\29TS±Ù\17š\15Õ'P\14\18»Á'и0\11Ñ\\Wþ$È\25M±Ü\14Î\"Å­ºÀ*)l0Rtö\23•e%ã", + ["length"]=102, ["n"]=8, }, ["metadata"]={ @@ -74,7 +76,109 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnopqrstuvwxyz", - ["data"]=".ab4i .ab3ol .ace4 .acet3 .ach4 .ac5tiva .ad4din .ad3e .ad3o .ae5d .aer3i .af3f .af3t .ag4a .ag5n .air3 .al5im .al1k .al3le .am5ar .ama5te .am2i .am3pe .am3ph .an1 .ana3b .ana3s .and2 .an5da .an4el .an4en .an4gl .an4on. .an3s .ant3a .an3ti3 .ant4ic .an4t5o .any5 .aph5or .ap4i .ar5ab .ar5ap .ar4ci .ar5d .ar4e .ari4 .ar4ise .ar4isi .ar5sen .art5icl .as1 .as4q .as5sib .at5ar .ateli4 .at5omise .at5omiz .at3r .at3t .au3b .au3g4u .aur4e5 .aus5 .authen5 .av4 .av5era .bap5tism .barri5c .bas4i .ba5sic .be3di .be3lo .be5r4a .be5sm .bi4er .blaz5o .bo3lo .bos5om .boun4d .bov4 .bra5ch .bre2 .burn5i .ca3de .ca4gin .cam5i .cam3o .can1 .can5ta .ca5pitu .car4i .cas5ual .ca4ti .cen5so .cen5tena .cent5ri .cer4i .ch4 .cit4a .clem5e .clima5to .co5it .co3pa .cop5ro .co3ru .co3si .co5ter .cotyle5 .cri5tici .custom5 .dav5 .dea5co .de5lec .del5eg .de3li .deli5r .de1m .de5nit .de3no .der2 .de3ra .de5res .de3ri .de5scrib .de5serv .de5signe .de5sir .de5sis .de5spoi .determ5i .de3ve .de4w .di4al. .dia3s .di4at .din4a .dio5c .do2 .do4e .domest5 .du4al. .du4c .dys3 .east5 .echin5 .eco3 .ec3t .ed5em .ed4it. .ed4iti .eg4 .ei3d .ei5r .el3ev3 .el2i .elu5s .em3b .em5in .emp4 .em5py .en1 .en5c .en4ded .en3s .ent2 .en5ta .eos5 .epi1 .epi3d .er2a .er5em5 .er4i4 .er4o2 .eros4 .erot3 .er4ri .es1 .escal5 .es3p .es3t .etern5 .eth3e .eu1 .eur4 .eval3 .evol5ut .ew4 .ex1 .ex3a .eye3 .fal4le .far4i .fec5unda .fen4d .feoff5 .fi2 .fi5lia .fil5tr .fin5ess .fin3g .fi5n4it .fis4c5 .fo3c .fran5ch .fu5ga .ga4m .gam5et .gen4et .ge5neti .gen5ia .ge3ro .glor5io .gnost4 .go3no .gos3 .hab2 .ha5bili .hama5 .han4de .hast5i .he4i .hem5a .hi2 .hi3b .ho2l .ho5rol .hov3 .hy3lo .ico3s .idi2 .ig3 .ig1n .il4i .im5b .in1 .in3d .in3e2 .in2i .in3o .in3t .invest5i .ir3r .is4c .is4li .is4o .iso5m .ka5ro .ki4e .kin3e .lab4o .la4me .lam5enta .lan5i .lash4e .le4m .len5ti .le2p .lep5r .les5son .le5van .librar5 .lig3a .li3o .li4ons .li4p .loc3a .lo4gia .lo2p .loph3 .lous5i .lov5er .lub3 .lyo3 .mac5u .mal5ad5 .ma5lin .mar5ti .math5 .me5lodio .ment4 .men5ta .me5rid .me5rin .met4er .mi4e .mi3gr .min5ue .mirk4 .mis1 .mi5to .mo3bi .mo5lec .mon3a .mor5ti .mu3ni .mu3si .musi5co .myth3 .na5k .nari4 .nast4 .nas5ti .nec3t .ni4c .ni5tro .no4c .nom3o .nos3t .no5tic .nucle5 .obed5 .ob3el .ob3l .od4 .oed5 .oe5so .of5t .oi4 .ol4d .ome2 .om5el .on4ce .on4e .op2i .opt5a .or1 .or4at4 .ora5tori .or5che .or3d .ore4 .or3eo .or4i .orner4 .or2o .os1 .osi4 .oth5 .out1 .ov4 .pal5i .para5dis .par5af .para5t .pa5ta .pa4tio .pec3t4 .pecu3 .ped3e .pend4 .pen5de .pep3t .peri5n .perse5c .pe5titi .ph2 .phe5nom .phon4i .pi2e .pi3la .plast4 .plic4 .plica4 .plos4 .po3la .po5lite .po2p .pop5l .po5sitio .pos5si .pro5bat .pur4r .put4te .ra5cem .ran5gi .re3ca .ref5ere .re5gar .re1i .re5lin .re1m .re5o .res5ci .re5sen .re5spo .re5stat .re5store .re5str .re3ta .re5u .re3w .rib5a .rin4 .rit2 .rol4la .ros3a .sa2 .sac5r .sal4i .sa5lin .salt5er .sanc5 .sap5a .sa3vo .sci3e .sea3s .sect4 .sec5to .se3gr .sen3t .se1q .ser4ie .ses1 .sev5era .sh2 .si5gno .sis3 .st4 .stat4o .stra5to .string5i .su5da .sulph5a .sul3t .tact4i .tac5tic .ta4m .tamar5 .tar5o .tect4 .tel5a .tell5e .te4m .te5ra5t .ter4p .th4 .tho4 .thol4 .ti2 .til4 .ti5ni .tit4is .tor1 .tran4c .tri5bal .tri3d .trin4a .tri5sti .tro4ph .troph5o .tro4v .tular5 .turb4 .turi4 .tu5te .tu3to .ul4l .ulti5mat .un5ce .un5ch .un3d2 .under5 .un3e .un3g .uni3c .uni3o .un3k4 .un5s .un3t4 .un5u .up1 .up3l .ura4 .ur5eth .ur4o .va5led .ve2 .vec5 .ve5lo .vent5il .ver4ie .ver3n .vic5to .vi2s .vis3i .vi5so .vo1c .vo5lut .wine5s .xy3l .za5r a4a 1ab 2ab. 2aba ab5are abay4 2abb ab5ber 2abe4 ab3erd ab3err a3bet ab1ic a3bie 2abin 4abio abi5on ab3ita ab4itu ab3la abli4 4abolic ab3om ab3ota 3about ab1r 2abs. ab1ul abu4lo ab3use ab3usi 2aby ac2a ac5abl ac3al 5acanth ac5ard a5cat ach5al a5chini ach5ism achro4 ach5ur 2aci a4cic aci4ers acif4 4acit ack5a ac3li 4aco. aco3d ac5onr 4acos 4acou ac1r ac3ry act5ate act5ile ac2to act5ory ac2t5r ac5uat a5dai ada3v 4adee ad5eni ad4ha ad3ica a5dif 4adil adi4op adi4p adis4i a3diti 3adju 5admit a2do 4adoe 4adoi ad3ol a3dos ad1ow ad1r adram4 4a2du ad3ula ad3um 4ady ae5a ae4cit aeco3 4aed aed5is ae5g ae3on ae5p aerody5 ae4s ae5si aes3t aet4a aeth4 aet4or. aev3a 4af. 4afe af5ta a4fu ag4ari 4ageri a5ghe a5gia agi4as 4agino 4agl agli4 4ag1n ag3oni agor4a ag5ot a2gr ag3ri agru5 2ah a1h2a ahar2 aha5ra a1he ah4n a5hoo 2ai2 4ai. ai3a a1ic aid4a aid5er aig2 ai5gu ail3er ail3o aim5er ain5ders ai5nea a3ing. ain3i ain5o aint5er air5a air5p air3s ais1i a5ism 2a1j a4ju 2ak akel4 ak5u al5abl alact4 a1lae al5ais ala3ma al5ance al3at a5lav alc3at al3ch ald5ri 2ale a3lec aleg4 ale5ma al5ende a1leo a2let al3ibr ali4ci al5ics al1id al3if 5alig al1in a5lini alin5o al5ipe al5ipot 4alis. 4aliu 4alk alk5ie al4lab al4lag alli5an allig4 al4lish a5loe al3ogr a3lom a3loo al1or al4orim alos4 a4lou al3ous a5low al5pen al3ph al5tati al3tie alu3b al5ued al3ues a5lumnia al1va al5ver alv5u 2a1ly4 a5lyn 2a2m a5mad ama4g aman5d a5marine a3mas. am1at a5m4atic am5atu am4bin 3ambu am5elo a3men amen4d am3era am5erl am1i ami2c am5ica amic5r 3amid a3mili am5ily amini4f am5iniz aminos4 a5mis. a4mium. a3mon amor5a amort3 am5ose am2p am5peri amphi5g amp3li ampo5l am3ul amyl5 a2n an2a a5nadi an3ae an3age ana5k an3ali an3arc a5nast an4con an3d4at and5au and5eer an5del an5dif and5ist an5dit an4doni an4ea an5eer an3ell anel5li an3eu an3gan angov4 an4gur 4anh an3ic ani3f an5ifo 4anig an5ion anis5te 4anity 4aniu an5no 4anny an1o an2oe an3oma anor3 an2os an5ot an2s an3sc an4sco ans3il an4sur an2t2a ant5abl an3tal an5tam an2te 1anth an4thi 3anthr 4antic an4tie an4ting ant4iv an4tone ant4r an4tus an5tym an3ul an3um. an5ums a3nur a5nut an2y an5ya a5nyi 2ao aol3i 5aow 2ap 4ap. 4apa a1pac ap3al ap5aro ape5li a5peu aph5em aph3i aph5ol aphyl3 ap1i ap5icu ap3in ap4ine a5pir a3plan ap5li apo5str apo3th a2pr ap5ron 4aps apt5at apu5lar a5pun a4q a5qui a2r 4arabi ara5bo aract4i ara2g ar3age ar4aged ar5agi ar3ago a3raj ar3all ara3m aran4g aran5te ar5apa ar1at a3rau ara3v ar3ba arb5et ar4bid ar4bl arb3li ar4bul ar5chet arch5o ar5dina ar4done ar3en aren5d ar5ett ar3ev5 ar5gh ar3gu ar3h ar1i ar5iff ar4ill a5ri5net ar5ini a5rishi arm3er ar5mit ar3nal ar3nis ar3od ar5oid aro4mas aro4n a5roti a5rouc ar3ox arp5ers ar4pu 2arr ar2rh ar2s ars5al ar3so art5at ar2th arth4e arth3r ar5tiz 2aru ar3um ar5un4 a3ryo a5ryt ar5z as1a as4af asan2 2asc as5con as5cot as2cr as2e as3ect 4ased asep4 ash5ay ash5il as5ily as3in a5sio a3sit as5iv ask5er aski4 as4la as4lo 2aso as5och a4soned as5or as3ph ass2 assa5gi ass5ibl as4sil assit5 2asta as4tat as4tia as3tis as4tit 4asto2 as3tra as4tri as1u as4un as5ur 2a2ta 4atabi a5talis atam4 ata3p atar3a ata3s ata3t4 at3eau at3ech at5eer a5tel. ate5le at5enat at3ent 4ater at3era at5ernis at5erniz 4atess at5et 4a2th ath3a a3then ath5erin ath5ero ath5ete ath3i ath3od a5thon ath5r 4a3tia ati2c at5icis ati5cit at5iciz a2tif a4t1i4l a4tim a2t3in 4atina at5ing 4at4is. at1it atit3u atitud5i 4atiu at4ivi a5tiviz a2to 5at5od 4atog 2atol 4aton a3too a4tops a5torian a4tory atos4 a5toz 2a2tr at3ra a4tre 5at5ress at1ri atric5u at3ron at5rou at4tag 2a2tu at1ul atu4m at3ura at3urg 4a2ty 2au2 4au. aub5i 4auc au5cer auc3o aud5er audic4 aul3i aul4t aul5ted ault5er ault5i au3ma aun2 aun5chie aun3d aun4dre au5reo aur4o au5ror 4aus. aus5er aus5p aus4ted aut3ar aut3er au3th 2av av4ab ava4g av3age ava5la av5alr av5ant av5ar avas3 av3end av3ern av3ig aviol4 av1is aw5er. aw5ers aw1i aw5nie aw5y a4x ax2id 4ay ay5la ay3m ayn4 ays2 ay5si ay5sta ayth4 2az2 az3ar aze4 az5ee azyg4 azz4l 2ba. ba5bir 3back baen4 bag4a 5bah ba4i bal3a balm5i ba5lon bal5u bam4a ban4a ba5nan b4ane 5bang b4aniti b4ans ba4p1 5barb bar4d bardi4 bar4n ba5rom bar3on 5bars 1bas bas4te ba4th4 3batic ba5tio bat5on battle5 2b1b2 b4bata b3bli b4bone b1c2 bcord4 2b1d bdeac5 bde4b bdi4v b2e 4be. 3bea 4beas be3ca 3becu 2bed be3da bed5el bed2i be4do be5dra be4du 5bee 3bef be3go be5gr be3gu 1bel be3la 2bele be3lit bel4t be3m ben4d bend5a bend5er be1ne be5nig be5nu 4beo be3q 2bere berga5m berl4 5berr ber5s b5ertin be1s2 2bes. be3sl be3tr be3w 2b1f bfa4 4b1h b4ha 2bi. 1bia bi4b1 bicen5 3b2id bid5i b4ie bi4ers bif4 bi4fid. bi5ga bigu3 b1il b2ile 5biles 3b2ill 4bim bimet5 5bina 5bin4d bind3e bin5et bin5i4 1bi2o bio3l bio5m bi3ou bip4 bi5q bir4 bi3re4 bi5rus b2is 5bism bis4o bisul5 3bitua 4bity bi5ve b1j 4b5k4 2bl2 5blac blag4 b3lan 5blast bla5tu blem5at 3bler 5blesp 4blik blim3a bli3o bli2q b3lis 4bly 2b1m bment4 bmi4 4b1n bo2 4bo. 3boa bo5am 5bob bod5i bo5h 2boid 4boke bol4e 4boled bol3i bol4t 3bon bon4c bon4e bon4ie boni4f bon4sp 1boo b3orat bor3d bor5ee bor5et 3bori bor5ic bor5io bor4n bot3an 5boti boun5ti 3bour bous4 bow2 bow3s 4boxy 5boy br4 3brach 4bral bram4 b2ran bran4d 4bre. b4reas 4b2res brev5et b2rid 5brief bring5 bri4os b5rist b4roa bro4ma bros4 brum4 4bry. 4b1s2 b3sc bscon4 bsen4 bserv5an b5si bsin4 bso2 bsol3e bso3lu b4stac bstupe5 2b1t b5tlet 4bu. 5bub buf5fer b4uli b4ulos bun2 bun4a b5u5nat bunt4 bur3e bur4ri busi4e buss2 bus5si 3bust bu5tar b3ute b5utin 3butio but4iv b5ut5o b1v 4b3w 2by 4by. 3byi bys4 5byt 2ca. cab5in c4ace caco3 cad4r 5caf ca3go 5cai 5cak c1al c4ala ca5laman cal5ar 3calc ca5lef call5in cal4m ca3ly ca3ma cam4i ca5nar c2an4e c4ano ca3noe can5tar can5ted can4tic can4tr 5cao 1cap ca5pil capt4 cap3ti cap3u 1car ca3ra5c car5ame ca3ree ca3r4i3c car3if car5m car3ni car3ol car5on car5oo ca3rou car4v case5 cashi4 3cas3s cas5tig 3casu3 c1at c4at. c2atc c4atom ca3t2r c4ats cat4u 3cau caulk4i cav3il 3cay c1c4 ccent5r cces4sa c3ch cci3d4 ccip4 ccle3 4ce. 4ceab cean3 3ceas ce4ci 2ced 5ceda ce3dar 3cede 3cedi 4cef ce5g 3ceiv cel3ai cel5ib5 5cell cel5lin celo4 ce5lom 4cely 2cem ce4met 3cemi ce4mo 1cen2 5cenc cen5ci cen5ded cend5en cend5er cen3i 2cenn 3cent cent4a cen5ted cen5ter. cen5ters cen5tes 1cep cept3a cep5tic 3cera cer4bi 3cerd ce3rem 5cern 5cess cest5o ces5tr ce2t cew4 2ch 4ch. 4chab 3chae 3chai cham5per chan5gi cha3pa chec4 4ched 3chee 3chem che3ol ch1er ch4eri 5cherin ch4erl 4ches 3chete ch5eu che5va 3chew ch5ex 5chi. 3chia 3chico ch3ily ch4in. ch3inn 3chio 5chip chizz4 ch5k 5chlor 4chm 1cho cho3a 5choc 4choi ch5oid 3chor 4chored chor5ol 4choso 3chot 4choti ch5ous chow5 3chr chur4 3chut 5chyd 3chyl 3chym 1c2i2 4ci. 4ciac cia4m ci3ca 4cids 4cie. ci3er ci3est ci5et ci3f cifi4 4cig ci3ga cigar5 3cil cil5lin 2cim cim3a ci3me 5cimen 4cinab 4cind cine5a cine5mat ci5ness 4cint ci3ol ci5om ci4po cisi4 cit3r ck1 ckar5 cka5t c4ke ck5if ck4sc cl2 cla5rif 3clas c2le2 2cle. c5lec clemat4 clev3 cli1m c3ling cli2q clo4q c4lotr clue4 clyp5 5clys cn2 c3ni 1c2o2 4co. 3coa co5ba 3coc co3ci co5cu co3dic co3dif 4cody 3coe co5et co3gr 4c3oid co3inc 4col. col3a co3log 5colou co5ly co5mas co4me co3mo4 comp4 con1 con4ati con4ch cond5er con4ey con4ie con3s con3t conta5d 3coo coop4 co3or cop4e co3ph co5pl co3po cop4t 2cora cor5ded cord5er 4cored co3rel 3corn 4coro co5rol 5cort 3cos. cost3a cost5er co5ta 3co3tr 5coty cous5t cov1 co3va cow5a coz4 co5zi c1q cr2 5craf craft5i c4ran 5crani cra5niu cras3t cra4te c2re 4crean cre3at cre4p3 5creti cre4to cret5or cri3l cron4 crost4 4crou 5c4rus cry2 crym3 cryo3 4c5s4 csim5 2ct c2ta c3tac ctac5u c5ta5g ct1an ct5ant c5taria c3tato c1te c4tea c2t5ee c4tent cter4ia ct5es ct5et ct2ic c5ticia c4tics ctifi4e c3tim ct4in. ct4ina ct5ing c3tini c5tin5o c5tio c3t2is c3tit c4titu c4tity ct5ive ct4ivit ct5olo c1tom c3ton c5toris c5toriz c1tr c2tre ctro5t c1tu c2tum c1ty cub3at c4uf cu5ity cul5ab c2uli cull5er cull5in 1c2ult cu4mi 5cuna cun4e 5cuni 5cuol cu5pa cu3pi c3upl 1cur cur4er cur5ial 4cur4o 1cus cus5a c3utiv c3utr 5cuu cu5v 2cy. cy4bi 1cyc cyl3 cy4m cy5no cys4 cys5to cy4t cz2 4da. d4abr 1d2ac dach4 d5ache 3dact d1ag d4a4gi d4ale d4alg dal5ler dam5a 3dame d3ami da5mu 3dang d1an4t d3ap d3ard 5darm 3d4as2 dast5a d1at dativ4 dat4u daugh3 daun5te 3dav d3b d3c4 d1d4 d4dere d3di d3dler d3dli d3dyi 2de. deac3t de5aw de4bi deb5it 3dec de5cant de4cil de1cr 4dect ded3i defor5e de4fy. de3g de4gu de3io 5de3is de3lat deli4e del5ler del5li de5lo 1d4em 4demie 4dem4is demo4n de4mons de3mor de4mos 4demy de1n2a den4d 4dene d3enh deni4e dens5a dens5er den5tit de3od deo3l deon2 deont5 de1p depen4 deposi4 de2pu d3eq derac4 de3rai d4ere 4dered de5reg 3derer 1deri der3k 3derm der4mi der5min 5derne 3dero4 der5os der3s 5deru 4des. de3sa 5desc des4ca de5scal de3sec des4i de3sid des5igna des1p des5pon de3sq d3est. des3ti 1de1t de3tes de5th de2ti dev3il de3vis de3vit de4voi devol5u 3dex 2d5f dfol4 d2g dg4a dgel4 d4gen d3gr 4d1h dhot4 d4hu 4di. 1dia di2ad 3diar diat5om 4d1ib d1ic. dic5am di4ce di3ch d5icl dic5ol 1dict dic5tat dic4te 5dicul d5icur 1did di4ers 3di3ev d4ifo dig3al di3gam dil4 5dill dilo4 di3lu di5mer dimet4 di1mi 2d1in din4e din5gi di5nos 3di1o dio4c di4ola dip5t 3dire di3ri 4d5iro di4s1 d4isc disen3 3disia 3diss d4itas d4iter dithe4 d3ito ditor3 2dity 1diu 1di1v2 di4val di5vine dix4i d1j 2dl4 d1la 5dlef 5dlest 3dlew dlin4 d1lo d5lu 2d1m 4d1n2 1do 4do. d4ob do4c3u dog4a do4j d4ol. dol3en do5line dol5it do4lon d4ols 5dom. doman4 domin5 dom5ino dom5it do5mo don4at 4dony 3doo d2or 4dor. dor4m dort4 d4os do5sim dossi4 dot1a dot4tin 2dous d4own 3dox d1p dr2 d5rail d3ral 3dram dran4 d4ras drast4 3drel dres4 dress5o dri4e d4rif dri4g3 d4rom dropho4 drunk3 4d1s2 d5sl ds3m ds4mi d4sw dt4 dt5ho 1du 2du. du1at 3duc duch5 duci5an du4co du5eli du5ell du5en du5ett du5in dul3c d3ule dul4l dum4be dun4a d5un4c du2p du3pl 5duro d5use dust5er du3u d1v dver2 dvert3 dvoc5at 2d1w dwell3 2d2y dy4ad. dy5ar 5dy4e 5dyk dyl2 dyll3 5dymi 3dyn dys3p d3zo ea2 4eab e1act eac4te ea5cu e5add ead3er ead1i ead3li ea4g eak1 eal3a eal3er ea3log eam4bl eam3er ean5i eap2 eap5er e3app ear3a ear3er ear4li e5ar2r ear4te earth5i eas5er ea4soni e1as1s eassem4 eas4t east5i eat5eni eat3er eat5ie e3atif eatit4 eat4itu e3atri e4atu eau3 eav5i eavi4e eav5ou eaz5i e1b ebar4 eb2b ebe4 e4bel. e4bels e2ben eb5et eb2i e5bil e4bin e4bis e4bl e4bos ebot3o e2br eb1ra eb2t e4buc ebus5i ec2a ec3ade ecad5en ecal5e e5cam e4capo ec3at ec5ath e1ce ecent5o ech3i e4cib eci4f ecip5i e1cl ec3lip econ4sc econstit5 ec3ora ec5oro ec3rat ec5rean e4crem ec1ro ect5ati ec4ter ecti4c ec4tit ec4t5us ec1ul e5culi 2ed e5dans e2dat ede2 e4ded e5deh e4dele edes3t ede3te edeter5 e3dev e5dew ed4g edi4als ed5ical ed5ics ediges4 ed5igr ed3ime ed1it edi2v ediv5id ed3li edor4 e4dox ed1ro edu5cer e2dul ed3ulo e4d5ur ee4ce eed3er ee4do ee2f ee5g ee1i ee2l1i ee2m eem5er eem3i eep1 ee4pa eer4ine eesi4 ee3to e1f efact5o efal4 ef5eree ef5inite e4fite ef4l efor5est 2efu e4fug efut5a egel3 egi5a e4gib e3gla eg3le eg4mi eg5nab e5g4on e2gr e5gur e1h2 e5ho eh5s ehy2 ehyd5r eid4 5eido 4eif eig2 e5ignit e4in. e3inc e2ine e1ing ein5i e4ins. ei4p4 eir3o 4eis eis3i eit5er eith4 e2iv eiv3er e2iz e1j ejudic4 ek3en ek5is ek4l e4lac e5lad el5age elam4 el5anc elast3 e4lates el5ative elch5er eld3er 2ele elea5g 4e4led el5eni el3eno ele3o ele5ph el1er e1les e5less e4leste el3et3o elev3a ele3vi el5ex e4l3ica4 e1lie eli4ers e3lim el3ing eli3on e4li4s elit4t e3liv el4lab ell5iz e3loa e3loc elo5ca eloc3u elo4di e2log elom5ate el5op. el5ops elp5in el3so el5tie e1lu elu4m elus4 elv4 e5lyi 3elyt em3ago em3ana emar4 emarc5a em5atiz emat5ol em5bi e1me4 e4mee e4mel e3mem e4m3era em5ero emet4e em4icis e4mie e2mig emig5ra em3ina em5ing e3mio em3ism e4mita e4miu em4mae 4emnit emo3bi emod4u e2mog e4moi em3olo em5om 4emon e3moni emon5ol e2mor em5oris em3orr e4motic e5moz empa5r empara5 em5pes 4empli. em4pre em3um e5mut en3ac e4nal en3am3o en4ann e2n3ar en3as. ena5ture 3encep en4cile enct4 2end en4d5al en4dedl end5rit 4ene ene5d en3ee e5nelle e5nep e2ner e5nereo ener5v en5esi e3ness en1et en4ett e2n3eu e3new en3gi en3ic en5ier en3ig3r en5in enit5u en3k en1o en3oi eno2m en3oty enov3 en2s ens5al en3sp en4sum en4sus ent3ar en4ters en5tia en4tify en2to en4tri ent5rin ent5up en4tus 4enu en3ua en3uf en3ur en5ut 5enwa eo3b e4och e4oda eof2 eo2l eol5ar. eol5at eologi4 e5olu eo3m eon4a e3ont eop4t e1or1 eor4de eor3e eor5o eo1s2 eo4to e1pa ep4al ep5arc epa4t epend5en ep5ert e4pete epe5titio ep5ex eph1 eph4i e2pig e5pla ep3lic epol3a epol3i epolit5 ep3reh epres5e ep5rim e4p5rob5 ept3or e1p4u e3pur5 e4puta equin4 equi5no er1 era4cie era4do era4g era4l er3aph er3api er3apy 4erati. 4eratim er5atu er3bat er3be erb5os 2erc er3ch er3cl 2erd erd5ar erdi4e 2ere er3eal 4ered er3egr er5el. er5ell er5els e4reme er3en 5erend eren4e ere5ol e3req er3er ere4s er5ese er3esi er5este er5esti eres5tr eret4 er3et. er3ets er3ett ere4v er3ex ergi3v er3gl er3ia. er4ian eri4cid 5er5ick er2id er3ie er3iff er4imet er3in eri4na eri4on er3iou er4isc eri5sta 4eri2t e3riv er5iz 4erj erk4 er3me er4moi 5ernacl er5nalis ern3er ern3is ern3it 4ero. er3oid ero5is ero5st erpent5in erre5la er4rep er5sine er5ted er4ter ert5er. ert5ers er4thi ert5iz 2eru eru4b eru5d erund5 er4vil 5erwau eryth3 2erz 4es. es5am es5an e2sc es5can es5che esci5e escut5 e3sea e3sect e5see e5seg5 ese4l es5enc e3sh4a e1shi e5shu esi4an es5ic. e5sick es5iden esi5diu es5ies es3im es3in e5sion e4sit es4it. es4its e3skin e3s4mi es4od es3ola es3ol3u es3ona eso3p e1sor es3per3 es5pira es5pit es4pl esplen5 es5pot e5s2pr es4s3an essar5 ess5ee es4sil es2so esta4b est3an e5star es5tau e2sti est5ifi est5igati e3stoc es5too est4r estud4 e1su e2s3ul es4ur5 et2a et3al. et5allis et3al5o eta5me eta3p et3ari et5ary et4as et3ate et3ati et5ay et3eer etell5i etend5er et5eni eter2 et3er3a et5eria etex4 e2th1 ethyl3 2etia e3ticu eti4gi e5tim et3in eti4na e3tir et5itiv eti4u et5olo e5tomete e2ton et3ona etor3i etra5g 4e4tral etra5m et4ran et5ress et1ri et4ria etrib5a e4trim et1ro et2t et3ter etud4 et3ude e4tum et4we et5z eudio5 eue4 euk5 4eum e3urg eur5i eus4 eu5ten eu3ter eut3i ev4abi eval5e eva2p3 ev3ast ev3at ev5eli eve4n ev5erat ev5eren ever4er e4veri e4ves e1via e4viab e2vic evictu4 evid3 ev5ig ev4ile ev5ish evis5in evis5o e4viu evoc3 evol5e evol5ute evu4 e1wa e4wag e5way ew1er e3wh ew5ie ew1in ew5ish e3wit e1wr ex5ic ex4on. 1exp 4ey. ey4as eyl4 ey3s2 ez5er. ez5ers ez5ie 1f2a 2fa. fab4i fa3cet fact2 fa2c3u 2f3ag fall5in 5falo fa5lon fals5ifie 4fan3a fan5tasiz fant3i 5far far3i 5faw 4f5b 2f5d 2fe. 3feas fea3tu feb5r 3fec 2fed1 5fei fe1li fem3i femin5 fend5er f5eni 4fered fer3ee 3fero fe5roc fer5om 3ferr fer3v 2fes. fess3o fest3a fest5i fe4t fet4al fet4in fet4o 3feu fe5veri 2ff f1fe ffec4te f5fet f1fi f5fia f3fic f5fie ffil3 f2f3is ff4le ff3lin ffoc3 ffoni4 ffor3e f3fr ffranch5 4f5h fi5ance fib5u 4fic. 4fical 3fici 4fics fi5del fid3en fiel4 fier4c fight5 1fi2l 2fin fin2a fi3nal find3 fin2e f1ing 5finin fin4ni fir2m1 f3ita f5itee fl2 3fla fle2s f3lica flin4 3flo flo5ric 3flu flum4i 1fo 4fo. 3foc fo2e foeti4 fo1l4i fo4lie foment4 fo2n fon4de 3foo fo5ram for5ay for5b for4di fore3t 5form for4m3a fortu5na fo3v 1fr2 frag5a frant4 frar4 fratch4 fre4s frost5i fruc4 2f3s fs4p 2ft f1ted f4ter. ft5es fti4et ft4ine 3fu 4fu. fu4c fuel5li fug4a fu4min fun2g 4fured fur3n fu3sil fus5o fu5til 4ga. ga4cie gadi4 ga4dos 3gag 3gai 3gale ga5len gali4a gal5ler 3galo gam4bl gan5at 4ganed gang5er g5ant. gan4tr g5ants g5arc g4are gar3ee gariz4a ga5rot gar5p 5garr 1ga4s gas5i gas3o gasol5 gass5in gast3r g1at g4at. gat5iv g4ato. g4atos g4att gat5u gaud5 ga5za g1b g5d4 2ge. 5geal 3gean 2ge4d 3gedi 5gedn 4gef 1gel 4gele ge4li gel4in gel5li ge4lu 2gely gem3i 5gemo 3gen gen4du gen5it gen3o gen5ti ge4o geo3lo 4gere 3germ4 2ges. 5gess gest5at 3get get3a 2g1f 2g1g gg4a g2ge g5gedl g3ger g5gerer ggi4a5 g3gli gglu3 g5gly ggrav3 g4gro 2gh g5hai gh5eni g3ho g4hos gh2t 1g2i 4gi. gi4all gi4at 3gib gi5co gi4g gi5gan gin5gi 3gio gi4or gi4ot 5gip gi5pa g4i4s 5gis. gi2t1 5gitu giv5en. 2gl2 g3lar 5glass. glec4 3gler g4leto g4letr g4ley gli5on g5lis4 3glo 4g5lod glom3 4glop 3glu glu5te glu5ti 3glyp 2g1m4 2gn2 g1na g4nab g5nate 5gnath g5nati gna5tur gn5edl gn5ee gn3er g1ni g4nia g2n3in gn4in. g4ni2o g2no 5gnori gno4s 2go. 5goa 3goc 5god 3goe go4et go4ge 4gogram g5oid go3is go2me 5gonn go5nom 3goo goph4 4gor. 5gorg 4gors g4ory 3gos gos4t 2gou gour4i g1ous gov1 g3p 1gr2 grab4 3gram 4grame gra2p g4re gril4 grim3a g4ro gro4g g5ron grop4 3gru gru3en gru5i grum4b 2g1s gs4c gs4t g4sti gth5eni g5to g4u2a gu5ab 5guan 3guard g5uat 2gue 5gueu 5guit4 gui5ta gu2ma gu4mi 3gun g4uras g4ured gur4n gur4u 4gury gust5a 2g1w 2gy gy2b 5gym 3gyn gyn5o g5z2 ha2 4ha. h4ac hadi4e had4ine hae3o haged5 hagi3o hag5u ha5ic hais4 hak4ine hal5ant ha4m ham5an han4cro han2g h1ani4 h5aniz han4t hant3a ha4pe hap3l har1a har5b har4d har5die harge4 ha5rism har3o har4ted har4ti has4te hat5o haught5 havel4 hav5ersi hav5o h1b h1c h1d hdeac5 hdu4 he2 4he. h2ea 1head 3hear hearch4 heast5 heav5en hec3t4 he5del he3do heek4 h4ei he3is he5lat h5elin he3lio he5liu hel4li h3el3o hem1a he3men hemis4 he5mop hem4p hende5 he3or hep1 h1er. her4as her2b herb3a herb3i here3a here3o h5erett h5erh her5ial h5erine h1erl her5om h4eron h1ers h5erwa hes3tr het1 h4et3a het3i het4ted heu2 heum3 heumat5 he4v4 hev5i hex5o h1f h5h 2hi. hi4ar h1ic hi3c4an hi4cin h4icl h5ie. h1ier h4i4ers h1ies h3ifi4 h3ify hig4o hi5ka hi4l hi5ma4 hi5mer himos4 h1in hin4d h2in2e hi5nie h5iniz hi5nop h2ins hio5lo h4ior hi2p hip3l h4ir hir4r hirr5i hit4a hiv5a 4hl h3la h1le h3let h1l2i hli4a 2h1m h4manic h5mica 2h1n2 hnocen5 4ho. ho3an ho4co ho3don ho5du ho5ep hol3ar hold1 hol4is. ho5lys ho4mag hom5in h2o4n hon5em ho5neu hon3ey hong3i ho5nio hon1o 1hood hoo5r h4ope ho2p5r h4op4te hor5et h4orn horn5i ho5rog hort5h hosi4 ho4ton h2ou 3house3 4h1p 2hr hras5eo hre4 hre5ma hr5er hres4 hri4 hrill5in hrim4 h5rit h3rod hrom4i hry4 h3rym3 2h1s hsi4 h4sk ht5ag ht5ee ht3en. ht5ener ht3eni ht3ens ht5eo ht5es ht4foo h1th ht4ine hu4g hu4mat hu5mer hu4min hun4c hunk4 hun4t hur3i hu3sia huz4 h1w h4wart h2y hy2l hyl5en hy2m hyn4 hy3o hyol5i hy1pe hy3ph hyr4 hys3te hy4t 2i1a2 ia4bl iab5olis iab5oliz i2ach iac3o i2ac2r ia5cri ia5dem i5ae iaf4 i2ag4 ia3gn i5a4g5o ia3gr i3ah i5ai ialect4 i3alit ial5li 4ialn i2a3lo ia5ly i5amb ia3me ianch5 i3ant i5ape ia3ph i2ard 4iarit i3at ia5the i5atom iat4u iatur4a i3au iav4 ib3era ib1i ibio4 ibios4 ib5li 4ibo i4bon ibor4 i4bose i5bou ib1ri 4ibu ib3uta ic3ac ic5ado i4cal ic1an 2icar iccu4 4ice i5ceo 4ich ich4i ich5ing ich5ol 4icin i5cio 2ick ic4lo 2i2co ico3c ic5ola icon3o i5cop icotyle5 2i1cr i4cri i4cru i4cry ic4tedl ic4ter ict5ic 2icu icu4lu ic3um i5cun4 i5cut 2i1cy i2d id1a i5day ide4m id3enc id3era iderm5 i3dicu id3if i5dig i5dil i3dim id4ines idios4 idir4 id1is4 id4ist 2i4d1it idi4v id3li id3ol idol3a 4idomi id3ow 4idr id5ri id3ul ie2 4iec 2ieg2 ie3ga ie5i i5ell 4iem 2i1en ien2d i1er i3eres i2eri ieri4n 4iern ier2o i4ert i3esc ies3el i1es2t i3est. 2i1et i4et. iet3ie 4ieu i5euti iev3a iev3er iev3o 2i1f i2fe if4fa iff5ler if3ic. i4ficac if5ics ifi4d ifi4n 4i2fl i3fo ifoc5 if5tee i3fy 2ig i3gad ig3and 3igar i1ge i3ger ight5er. ight5ers 4igi ign5iz igno5m i3gon ig1or ig3ot i5gret i4g5ro igu5it ig1ur 2i1h ihy4 2ii i5in ija4 4iju 2ik2 ik5an ike4b i2l3a ila4g ila5tel i5later il4ax il5dr il4du i3len ilesi4 il3f il3ia. il3iar ili4arl i3lici i5lien ili4er ili4fe il4ific il1in il5ine. 4iliou il5ipp il5iq il4ite ilit5u il4mo i5lon il3ou ilth4 il2tr 4ilu il5ul i5lum il5ure il3v 4ilym ima4c im2ag im3age im1al im5am i5m2as i4mated i4matin imat5u im1i i3m2ie im4ine im5ino im5mes i2mo i5mog i3mon im5oo i3mos. impar5a imparad5 im5pie impot5 im5pr impu4 im1ul im5um in3ab 4inace in4ado in5agl in3air ina4l 4inalit in5am in3an in3ap in4ars i3nas. 4inata inator5 in3au in4aw 2inc inc4tua 2ind in5dar inde5p indes5 inde3t indeterm5 in5dro 4inea 4ined in5ee in5ega 4in5eo ine4s in3esi ine5te 4ineu inev5 infilt5 infol4 4infu 4inga in5gal 4inge ing5ha 4ingi 4ingle 4ingli 4ingo 4ingu ing3um 2ini in5ia. 4inic in4ici in3ion in4itud 4ink ink4ine 4inl 2inn 2ino 4ino. in3oi i5nole 4inos i3nos. in5ose in3osi 4inq ins2 in4sch5 inse2 insect5 insec5u in3si 5insk insolv5 in4tee int5ess in3til int5res intu5m 2inu in5ul in5um in3un in3ur invol5u 2io2 ioact4 i1od iod3i4 iod5o ioe4 io3gr 4i1ol io3ma i4omani io3mo i5ope io3ph i5opo iop4s i1or iora4m 4iore 4iorit 5ioriz 4iorl ior4n io3sc i3ose i3osi i4oso io5sta i3ot iot4a io5th iot5ic io5tr i4oty i4our. i4ours i5ox 2ip ip3al ipap4 ipar3o ipart5ite ip1at i3pend i1ph2e iphen3 i5pheri iphi4 i4phu ip3id i5pil ip3in ip4ine ipir4 ip5is ip1i4t ip4iti ip3lin ip3lo i3po i4pog i4poli i4pom ipon3 i4pow ip2pl ip3pli ip4re ip5tori ip1ul i5put ipy4 2iq i3qua 2ir ir1a ir4abi ira4c ir4ae. ir4ag ir4alin ir4alli i5raso irassi4 iray4 ird3i ire3a ir3ec ir5ee irel4 ire5li ires4 ir5ess ir1i ir2i4d ir4im ir4is. 5iriz irl5ing ir5och ir5ol ir3om ir4q ir2s ir5ta ir5tee irwo4me i4sa is5ad is3age is1al is3am is1an is3ar is5av 4isb i2s3c is5chi isci5c 4i1sec ise5cr is3ell 4is3en is2er is5ere i2s3et 4iseu is3har ish5ee 4ishio ish3op is5hor 2isia is5ic is3ie 4isim is3inc 4isis is4ke is1l islun4 2isma is1on is5oner iso5p is1p i3s2ph 5ispr 2is1s iss5ad is4sal is5san iss4iv iss4o 4ista is4tal ist5enc ist5ent is5terer 4isth is4t3ic 4istl i4s1to 4is4tom is1tr 3istry 4isty i5sul is3ur 2isy it1a it5ab ita4c 4itai it3am it4ana it4as it3at i3tect it3ee it3enc it3ent it3era 2ith itha5l ith5i i5thol ith3r ithy5 2itia iti4co it5icu it1ie it3ig 4itim it4in. it4ins 4itio. 4itione i5tiq 4i5tit it3iv it4li it5lo 4ito. it5ol 2iton it1ou 2itr it5ress i4tric 2itt it4tit itu4als it5uar 4itue it1ul it1ur it3us 2i1u2 i3um iur5e 2iva iv5anc iv1at i4ved iv5el. iv5eling iv5els i4ver. iv3eri i4vers. iver5sal ives4 iv3et i4vie iv3if i5vilit 5ivist. 5ivists iv1it i2vo ivoc3 i5vore 2i1w 2ix ix3o i5ye 1iz 4izah iz3i2 2izo iz5oi 2izz 1ja 2ja. 3jac ja2c5o jac3u jag5u jal4 ja5lo ja5pan jel5la jeo2 jeop3 4jes jeu4 jew3 2ji 3jig jil4 jill5 5jis. 3jo2 4jo. joc5o joc5u jol4e 4jr 4js ju1di jui4 ju5l ju3ni juscu4 jut3a ju1v k4abi k2a5bu kach4 k3a4g kais5 ka4l ka5lim kal4is k4an ka3o kap4 kar4i 1kas. kaur4 kav4 k1b4 k1c kcom4 k5d2 kdo4 kdol5 4ked ke5da k5ede 3kee ke4g ken4d keno4 kep5t ker5a k4ere k5erel ker4j ker5o kes4i ket5a key4wo k1f kfur4 k3ho 5kih ki2l kilo3 k1in k2in. 3kind kinema4 kin5et k3ing kin4i k2ins kir3m kir4r kis4 3kis. k1ish kit5c ki4w kk4 k5ker k2l2 k3la k5lea k3ler k3let k3li k3lo k1m kn2 k2no 1know ko5a kol4 ko5mi ko5pe k1p k5ro4 k3ru 4k1s k3sl ks2mi ks4t k1t kur5 k5v k1w 3kyl l2a 4la. 5laa lab5ar label4 5labr l4ac la2ca la5ceo la5cer la4ch la2co 5la5col lac5on la3cu la4de l5adm l4ae l4af la3ger la4gis lag3r 5lah4 la4ic. l4al 4lale 5lamandr la5melli lam4ie lam1o l5amu lan3at lan2d 3land. land3i 3lands lan4er lan3et lan5tine lan4tr la4p lapi4 lar5an lar5de 4lared l4as lat5al la4te 5latilis 5latiliz 5latini lat5us l4au 5laur lav5at l4aw 4laz l3b lbe4 l4bit l4by l1c2 l2cat lce4 lcen4 l4cere lch4e l3dar l3ded l3deh l5dera ld3est l5dew ldi2 l3die ld4ine l5di5nes ld3ish ld5li l3do 4le. 3leagu le5atio leav5er l3eb5ra le3ca le5cha lect5ica 2led le5dr leg1a l3egan 3legg le4gin leg3o le3gra lek4 4leled lel5o lelu5 lem5enc lem3is l5emiz 5lemm l3emn le2mo lem5on l5enda len5dar lend4e len4do le1ne le5nie len3o 4lentio len5u le3on leo4s le5q 2ler le5rec 5l4eria l4eric le5rig ler3om leros4 ler3ot 4les. le3sco 3les4s 1let le5tra le5tre 5le5tu5 leur5 2lev l3eva 5leve lev5ita le4wi l5exa 1ley lf5id l2fo lf3on l1g2 l4gal l4gem lgi4a l4gid l4goi l3h 4li. li4ani lias4 lib1r l1ic. 5lich li4cie 5licio l3ic3on lict4o li4cu l3ida l4idar 5lidif 3lieu l4ife l4ifo lift5er 1lig li5ger light5i 5lih 3lik 1l4il lil4i lim2b limet4e lim4p l4ina. l4inas lin4d l4ine 5lin3ea lin4er. lin4ers lin4ger ling3i 5lingt 3lingu 3linq lint5i 3liog li4ol lio3m liot4 li3ou 5liph lipt5 li1q 3lir l1is l4isk 5lisse l1it l2it. l3it5a 5liter 3lith 5litia 3litr lit4u l4iv l5ivat liv3er liv5id lkal5o lk5at lk3er. lk3ers ll2 l1la lla4ba llact4 l5las l4law l5leb l1lec l1leg l3lei l1lel lle5m l1len l3lep l3leu l3lev ll3f l1li lli5am lli4an llib4e llic4 l4licl lli5co l5lie lligat4 l2lin l5lin. l3lina l3line l5lio lli5v ll3m l1lo lloc3a lloc5u llo2q l4lov llow5er ll3p ll3s ll5t l1lu llun4 l5lya l3lyc l3lyg l3lyh l3lyi l5lym lm2 l1ma l1me l4mer lm3ing l5mip l2m3od l1n4 l3ne lneo4 2lo. 5load 5lob3a 1loc loc3al loc5ul lo4cus. 2locy l3odis 3lo3dr 1log lo5gan 4loi. lo5mi lom4m lon4al lon4e l5onel lo5ney long5in 3lonia loni4e l3onis l3oniz loom5er lop4e 5lo5pen l3opm 1lo1q l4ored lor5iat lor4ife lo5rof loros4 l4os. lo1so loss4 los5sie lot5at loth4ie lo5tu 5loup lp1at lp3er lph2 l5phe l3phin l2pho l3pie l3pit lr4 l3ri l3ro l5ru 4ls l5sam ls5an lsi4fia lsi4m ls4is l5sk ls4p l1s2t ltan3e l4tang lt5ant l5tar l1te l4tei ltern3 lth3i lti4ci ltim4a ltin4 lti3t l3t4iv lt4or l1tr ltramont5 l1tu l4tus 4lu. lu1a luch4 lu2c5o luc5ra lu4cu 4lue lu1en lu5er lu1i lu4it lum4bri lu4mo 5lump lu2m5u lunch5eo 5lune l3unta lu3ori 5lup 3lur3o lusk5 luss4 lut5an 4lut5ar 5lutioniz lu5toc lut5r lu1v lv5ate l5vet4 l4vi l4vor l3w lx4 2ly 4ly. ly1c ly4ca lyc4l lyc5os lym2 lymph5 lyp2 ly4pa lypt5o 3lyr lys5er 3lyw 3lyz lz4 4ma. m4aca mac3ad ma5chine 5machy ma4cis mact4 4mad. 4mada 4mads ma4ge 5magn 2mago4 2mah ma5ho 3ma4i 4mai. maid3 5mak mal3ap mal5ari 5male2 mal5ed mal3ef m3alg m3alis mal4is. mal3le mal4li 2mam mament4 m5ament. 1man 3m4an. man3a man5dar man3dr manic4 man4ica ma5nil m4ans mantel5 2map m3aph 1mar 5maran mar5ol ma5ron ma3roo mar5ri mar4shi mar3v ma3son massi4 mass5ing 3mas1t mas4ted mast4ic mas4tin m4at. m4aten ma3ter mater5n4 m4atit mat4iti m4atiza ma3tog mat5om ma3top m4ats 3m4att ma5ture mav4 2m1b mbat4t mb4d m5bec m5berer m4bery m4bes mb2i m2bic m5bil5 m4b3ing m4bis mb5ist mbival5 m5bler m3bli mbru4 mbu3l mbur4 m1c m5d m2e 2me. mea5g me5and me4ba me4bi 2med 4med. 3media med5icat 4medie m5ed5ies 3medit me4do m5edy me2g 5meg2a1 mega5t 4mele mel5ee mel5ler mel3on mel4t melt5er me2m 4m5eme 1men 3men. 2mena men4ag mend5er mend5o me1ne ment5or 5ments 5meog me4p m5eran 4mere mer4ia 2me2s mes5en me5si4a mes5q 3mesti4 1me2t meta3t met1e 4meted meth4i meti4c met5ici met3o met3ri m1f 4m3h 4mi. m1ic mi4cin mi3co 3micro m4ict mi3cul mi4cus m4idi mid4in mid5on mi5fi mig5a migh5ti mi2gr 4mij mi5ka m2il m3ila mil4ad 4m5ilie mil5ies 3mill mi5lo mil4t 3m2im mim5i 5min4d mind5er min4er. min4ers ming5li min5ie m4init min3ol 1m4int minth5o mi3o mi3p mirab4 mi5racu m2is. m4isc mi4se 4misem mis3ha 5missi m3ist. mis4tin m3ists mi2t m5itan 4mity 3miu 5mix 4m1l mlo5cuti mlun4 2m1m2 mman4d mmand5er m3medi mmel5li mmet4e mmig3 mmin3u mmis3 mmob3 m5moc mmor3 mmut3a 4m1n2 mnif4 m4nin mni5o mnis4 mno5l 1mo 4mo. 2moc mod1 mod5ifie mogast4 mo4go mog5ri m5oir mok4i mol3a 4molog. 4mologs 4mologu mo3ly mo1m mo4mis m4on mona4 4moned mon1g mo4no monolo4 monolog5i m4op mophil5i mop4t m3orab m3orat4 mor4ato m5ord mo5rel 3moria m5oriz mor5on 3morp 3morse mor5tal mo3sp 5most mo3sta 2m1ous m1p m3pa m4panc m4pant mpath3 mpel5li m5perer mper3i mpet5it mphal5o m4phe m4phl m2pi mp5id m5pig mp3ily mp1in m3pir mp3is m3po mpol5it mpo2t mpov5 mp3to mp5tr m3pu m5q m3r m4ry 4m1s msel5f m5si msol4 mtu4 muck4e muff4 mul1t2 m5unc mu5nio mun3is mus5co mu4se mus5ke mu3til m1v m3w 2my 5my3c my4d my3e 3myi 5myst4 3myt n1a 2na. na2c nach4 na5cious. na5ciousl nac4te nac5tiva na5culari na4d4a nadi4 nad4op n2ae. naffil4 nag4a n4agen 5na5geri na4gi n5ago 5n4a3gr 5nah 5nail na5iv nak2 4naled n5alg n4alia na3ly 1nam 3name nam4n na5nas nannot4 nan4ta nan5ted nan4to na5o 4n4ard nar5tisti n2as nas5i nas5p nas3s nas5te nat5al na5tat n4atee na3the nath4l nati4 n4ati. nat5ic n4ato. na3tom na4tos nat4r na5turi naugh5ti naus3 3naut naut3i na2v na5vel n3b4 nbarric5 nbeau4 nbe4n nbene4 nbet4 nbit4 n1c2a n4cal. ncarn5at ncel4i ncent5ri n4cept. n3cer ncer4e n4ces. n5cet n5cey n3cha nch4ie n3cho nch5olo n3chu n4cic ncid5en n4cif ncip5ie n1c2l n4cles ncoc4 nco5pat n1cr nc1t nc4tin nct4ivi nct2o n1cu ncu4lo n4cun n4curvi ncus4t 4nd n2da n3da4c n3dal n4dale n3dam nd3anc nde2 n3dea nde3ci n1ded nde4l ndeleg4 nd3enc ndepre4 n3derl nde4s ndes5cr n5dez nd4hi n1dic ndic5u ndid5a n3die nd5ily nd4ine nd3ise nd5is4i nd5ism. nd5ity nd3ler nd1li n5doc ndor4 n2dou nd5our ndrag5 ndram4 n5dron ndu4b nduct5iv n4dun nd2we n3dyi 2ne. ne3alo n3ear ne2b3u 5neck ne4cl ne2co n5ectom 2ned 3nedi ne4du4 neg3a ne3go 5negu neis4 2nele ne5lia neli4g n4ely ne2mo 4n1en n3end neo3l neon4 ne2p n1er 4nered 5nering ner5o ner4r5 ner2v nerv5in 2nes. n1esc ne3sia 1ness n1est nes3tr net3a net3ic ne4tog net1r neuma5to neut5r nev5er n4ew news3 n4eys. n3f nfo4 nform5er nfortu5 nfran3 4ng ng2a n4gae n5gee n3geri n5gero ngh4 n2gi n5gic ngio4g n5glem n3glie n5glio ng1n n1go n4gry n1gu n2gum n1h2 nhab3 nho4 nhy2 nhyd5 n1i 4ni. 3niac ni3ba n4icab ni4cen 4nicl nict5a ni4cul4 ni4dio n2ie ni4ers nif4f nift4 nif5ti ni2g night5i n3igm 3nign nik5e n2il nil4a n3im1 n4ime 5nimet n4ines nin4j 5ninn n4inu 5niol ni1ou 3nipu 5niq n4is. n4isk nis4l nis4o n5iss nis5ter. nis5ters nitch4 ni4te ni3tho n4itos ni5tra nit5res ni3tri nit4ur n2iv niv4a ni3vo nivoc4 niz5en n1j njam2 njur5i 4n2k nk5ar n5kero n3key nk5if nk5il 4n1l2 nland5 n3le nlet4 n3m nmater4 nmor5ti n1n4 nne4 nnel5li nnerv5a n3ni nni3ki nnov3 n5nyi 4no. n5obi no5bil nob4l no5blem nobser4 n5ocula no4di n4ody noe4c no4fa nois5i n5ol. no3la nol4i nom3al 1nomi no2mo 4none 3nonic 5nood nop5i nora4t nor5di nor4ia nor4is nor3ma n4oro nor4t n4os. nos4o no3sp not1a 3note n1ou n4oug 3noun 2nous nou5v nova4l nove2 nov3el novel5e n4ow now5er now3l n3p4 npil4 npla4 npoin4 npo5la npos4 npri4 n1q n4quef n1r nre4i nre3m nres5tr 4n1s ns2c n2sco ns3cot n4scu n5sec nsec4te n2ses n5seu n3sh2 n2si ns3ib n4sic n5sick n3sid n3sie ns5ifi ns3ing n3sio n3s2is nsi2t ns3iv nsolu4 n5son n4sore n4sory n3spir n3s2t nsta4 nstil4 n3su nsur4e n3swa ntab4u nt3age nt1al n4t3anc nt5and ntan5eo n4t3ant nt4ariu n5tasis nt3ast nt1at nt5ath nt3ati nt5ativ n5tau n1te n4tec n4tee. n4tees n3tel ntend5en n4teo n4ter. n3teri n5tern ntern5al nter5nat nth2 n1the nther5 nth5ine nt2i nt4ib n4tic. n5ticis n5ticiz n4tics ntic4u4 n3tid4 n1tie n4tify. n3tig nt5ilati n5till nt3ing nt5ing. nti3p n4tipar n4tis. nt3ism nt3ist n5titio nt3iz n2tj n1t2o n3tom ntoni4 n5top n1tr ntra3d nt3ral n4trant n3trat nt5ress nt3ril ntrol5ler n5trym n1tu n3tua ntub5 ntup5li n5tur n2ty n2u nu1a 5nuc 3nud nud5i nu3en nug4a nu3i nu4is 5nuk n4ulo n3ult nultim5 nu1me 5numenta 5numer 5numi 3nunc nu3tat n5utiv nu4to nu1tr n3v2 nve2 nvel3 nven4e nven5o nvers5an nvi4t nvoc5at n5w nwin4 nwom4 n2x4 2ny2 5nyc nym5it nyth4 n1z2 nzy4 2oa2 o5ace o3act oad5er oad5i o3ag oak5er o3ales oal4i oal5in o5alit oan4t oap5i oar5er oar4se oast5er oat5a oat5ee oat5er 4oba obe4l ob2i ob3ing 2obi3o ob3it o3bla ob1li 4obo ob3oc o5bol o5bot o3bra obrom4 ob5t ob3ul o3bus 2oc oc2a o4cab o3cad oc5ag o5calli o4c5ativ oc5ato 4o3ce2 o4cea ocen5o ocess4i och4e och5in o3chon ochro4n o5chu oci3ab oci4al o1cl o2cle o1cr ocre3 oct2 oc2te oc1to ocu4lu ocum4 oc5uo ocuss4 ocus5si ocut5r o1cy o5cyt ocyt5o od3al. ode4c o5deg ode4ga o5dend o3dent odes4 od3ica o4d1ie od3iga od4il od1is2 odis5ia od5it 5odiz od3li o2do od5olo od5ous o3dro od5ru o2du odu5cer o4duct. o4ducts od3ul o5dyt oe3a oe4bi oe5cu oe4d o5ee oe5ic o3elec oelli4 oelo4 oe3o4p oep5 o5eq o3er oes3t o1et o4et. oet3i oet4r 3oeu o3ev o3ex oflu4 4ofo o4ful ofun4 2o1g o2ga o3gam og5ar5 o3gas ogen1 o5gey o3gi o4gio og2na ogoni4 o4got o2gri o4gro og4sh o2gu o5gyr o1h2 o3ha ohab3 o3he oher4er o3ho4 ohy4 2oi oi4c o3ic. oi5ch o2i4d 4oide oig4 oi5ki5 oil3er oil5i oin3de o3ing oin4t5er oin4tr oi4o 4ois o3ism oi4t oit4al oith4 o1j ok4ine ok3l ok5u ola4c o4lack o5lali ol4an olan5d ol5ast olat5er ol5ch ole2c4 ol5eci ol5efi o3leo ole4on o3lep ol1er o3lest o3leu o1lia ol3ica o3lice ol5iciz ol5ick ol3ics ol5id. oli2e o3lier ol5ies. o5lif oli4f3e oli5go o5lina ol3ing oli5os ol5ip4 olis4 ol2it olle2 ollim3 ol4lope ol4lyi ol3mi o1lo 4oloc ol3oid o4lona olon5el ol1or o3los ol1ou 4ol1ub o3lumi o5lunte ol3us. oly3ph 4olyt 2om o1ma o4mab o2mac o2mal o4mane omast4 o3mat om4be ome4d ome4g omeg5a ome3li om3ena omen4t o3meri om1i o3mia omi2c omic5r om4ie. omil4 om4iny omiss4 om2it omme4 om2na omni3 o4moi omoli3 o2mo4n om5ony o4mos. omot5iv o2mou om5pil ompt5er ona4d on3ai o5nas. onast5i on5ativ 4onau on1c oncat3 on4cho 5ond5ar ond5ent on3der on3dr on5dy o2ne 4onea onec4r 4oned on1ee on5ell o3neo on3ess on1et ong3at on4gu 4onh 4o1nia on5iar 2oni4c onic5a onical4 on4id on3ies on3if o5nig o1nio onk4s 4onnes on5odi on5oi ono4mi 4o5nomic ono3s o5nota ons2 2ont ont5ane. on4ter onti5fi onton5 ont4r on4tre on5ur o5nus onvo5lu on2z 2oo oof3er oo1i ook3er ook3i oo4le ool5ie oo4m oon3i oo2p oop4ie o3opt oo4se oost5er oo2t oot3er ooz5er o1pa o4pab o5pali opa5ra opath5 o5pec opens4 op1er 3opera 4operag o1pha o4phe oph4ie o5phil op5hol o1phy ophy5la op1i op3ies op5ing o3p2it 4opl oplast4 o4poi opol3i opon4 op5ony op5ori opoun4 o2p5ov op2pl op5pli oprac4 op3ran opre4 opro4l op5rop op5so 1op1t op2ta op1u o5quial or1a or5ado ora4g o5rai or5al 4orals oram4 oran3e orator5 orb3in or4ch orch3i or4du 2ore or5ead ore5ar ore5ca ore3f ore3g or3ei oreo5l or3esc ore3sh or3ess orest5at or5este or5ett ore4v 5orex or4fr or5gn or1i 4ori. or3ia. 4orian ori4ci ori5cid orien4 or3if 5orig ori5ga ori4no 4orio. or5ion 4orios ork5a 2orm orm1i or3n4a 5ornis or3nit or3one o5roo or5ose or5oso or1ou orrel3 orres3 or4sc or4sey or4sti 2ort ort3an ort3at ort3er or5tes. or3thi or4thr or4tit ort3iz or4tor or5tra ort3re 4or1u or4un ory5p osa5i os3al osar5 o1sc os4ca os4ce o2sch o4sci osclero5s o3sec osec3u ose5g os5enc osens4 os5eo oser4 o2set os5eu o3sia osi4al osi4an os5ide o3sier os5if os1in o4sis o5ske o5son o3soph os3opo 4osp o3spec os1pi os4sa oss5ar os4sit 4osta ost5age os4tar os5tee os5ten osten5t ost5ica os3til o5stome ost3or 4osu os1ur 2ot ot3a4g o5talit ot3am ot4anic o3tap ot4atio o5ta5v o3tax o4ted oter4m ot5esta 4oth othalam5 oth5erin o5therm otherm5a o5thor o5tia o5till 5ot5iniz ot4iv o3tiva o5tivi o1t2o o5tone o4torn o4tou 4o1tr oturi4 oty3le o4u2 5ou3a oub2 ou5br ou5ca ou5co oud5i 4oue ou3et oug4 ou5ga ought5i ou5gi oul4t oult5i ou3m 2oun oun2d ound5a ound5el oun5gin oun3tr oup5li our3er ou5san 2ouse 5ousia ouss4 out5ish ouv5a ova3le o5var 4ovati ov5eling o4ver. over3b over3s ov4ete ovid5en o1vis ovis5o o2v5os ow3ag ow3an o5way owd4i owd3l ow1el owel5li ow5ha owhith4 ow1i ow5in owi5ne ows4 ow5sh ow5sl ow5y o4x ox3i oxic5ol ox5o 2oy oy5a oys4 2oz o1zo ozo5i o3zyg 4pa. pac4te pa5dou pad4r paes4 pa3gan 4pagat pag4ati pain2 4pairm pa5lan pal3in pa3lo p4als pan5ac pan1e pan3i pa4pa pa3pe pap3u pa3py 1par para5s par3l pa3roc pa3rol par5on 1p4as pass5ive pas1t pas4tin pa3ter pati4n p5ato pat4ric pa5tricia 5pau paul5e pau3p pa5vil 5paw pawk4 paw5ki 2p1b p1c4 p5d2 2pe. pearl5i pe4co pec4tu 2ped 5ped3a 3pede 3pedi ped3is 3peds pe2du p4ee pe2f 4pele pe5leo pel5v pen4at 5p4enc pend5er pen5dr pen4ic 3p4enn pens5ati pen5u pe5on 5perc percent5 4pere perem5i p4eri 5p4er3n p3eron per4os. per5tin pert5is per3v p4ery 2pes pes4s3 pes5til 3pet pet5all pet3en pe2ti pet3r pe4wa 4pex p1f p5g 2ph. 4phae pha5ged ph5al. ph2an phant5i phe4 ph5esi ph3et 3phib 4phic 1phil phi4n ph1is phi5th ph2l 1pho 4phobl 4phoned 3phor ph5oriz phos3p ph3ou 3phra 4phs 1phu phu5i 2phy. 3phyl 4pi. 3piar 4pica p5ical pi3co pi4cr pict4 p2ie p4iest pi5eti p5ifie pig3n p2il 3pile pill5in 5pilo pi3lot pim2 pin4e pin5et 3pinge p4inn 5p4ins 3pi1o pip4a pi4pe 5piq pir5ac pir4t p4is. p4isc pis2s piss5a pis5til pis4tr p2itu 2p3k2 p2l2 1pla pla5no plant5er plas5tici pla5t4o 4ple. 4pled. 3pleg 3plen 2ples 4plism 4plist plu2m plum4be plumb5er p4ly 2p1m 2pn pnos4 1po 4po. po3ca 3pod 4pof 2p5oid pois5i po5lemic po4ly1 poly3s poman5 pom4e p4o2n pon4ac pon4ce pon4i4e 3ponif pon5ta 2pony po4pa po5ple 4porato por3ea 4pored pori4f por3p 3port por5tie 3p4os pos1s2 po1te poult5e pound5er pout5er p5oxi 5poy 4p1p2 ppar3 pparat5 p4pene ppet3 pph4 ppi4c p4pled p5pler p5plet ppress5o pprob5a 1pr2 prac1 pra5d prar4 4pre. preb3 pre1d pref5ere prel5ate 3prem pre5mat pren3 pres3a pre5scin p3rese 5pressi 5prici pri4es 4pri4m pring5er pring5i 4prio p5riol pri4os pris5in priv2 4priva 4pro. pro3bo p3roc3a pro4ch pro1l pron4a proph5e propyl5 pro3r2 pros4i pros5tr pro3th 4pry 2ps2 p3sac psal5t p3sh p1si p5sin. pso3m p1st psul3i 3psyc 2pt2 pt3ab p4tad p4tan p2tar pt5arc p1ted p5tena pt5enn 5ptery p5tet pt4ic p5tie p3til p2t3in pt4ine p3tise p5tisi p5tom p4tr p1tu pub1 pu5be puc4 puch4 pudi4c pu5er puff5er pu4lar pu5lar. pu5lis p4u4m pum4o p4un pun4a 3punc pun5gi pun3i pun2t pu3pi pur5b pur3c p4us push4ie pu3tat p5u5tis pu3tr 4p1w 2p4y py3e 3pyg 3pyl pyr3e py5t 4qf qu4 5quak 4quar qua5tio 2que. 3quera 4quere 4ques. 1quet 5quina 5quir 3quito 4quitu 4ra. ra3ba 5rabe 3ra3bin r2abo ra3bol rac4a r2acu rac5ula ra5culo r2ad ra4de rad4ine rag5ou ra3gr 3raill ra5ist 4ralia ra3ly r5amn ra3mu r4andi ran5dish ran4du ra5nee ran4gen ra3nia ra3noi ran2t ran5ted 5rantel rant5in rant5o rapol5 rap5to 4rarc rare2 rar3ef rar5ia. ras2 ras3c r2ase r4ask ra3so rass5a rass5in r4as5te ra5tap ra5tat rat5eu rath4e rat3if rat4in. ra5toc 5ra5tol 4r4atom ra4tos ra5tui rat5um rat3ur rav5ai rav5eli rav3it rawn4 ra3zie r1b r2ba r4bag rb3ali rb1an rbar3 r2be rbe5c r3bel rbel5o rb3ent r4bes rb2i rbic4 rbic5u r2bin r5bine rbit1 r2bos r4bum rbu5t4 r1c2 rcant5 rca4s r4cele rcen5er rcen5tena r2ces rcha3i rch3al rch5ard rch5ate r3cheo r4cher rch4ier r4chin rch3is r3chit rcil4 rci5nog rcis2 rciz4i r2cl r4cle r5clo rcolo4 rcrit5 rcriti4 rct4 rc5ti r5dam r4d1an4 rd4an. r2dar r5de4l r3dens r4des rd5ess rd5ian r4die r5dig rd2in rd3ing rdi3o rd1is2 rd5ler rd3li r4dol rd5ous r2e 4re. rea4 r4ea. react5iv re3af re3ag re5alt re5amb re3ani re5ant re5asc reas3o r5eau 3reav r5ebrate reb5uc re3cal rec4ce re3ce reced5en re3cha reci5si r4e1c2r rec4t3r re3cu 2r4ed re1de re3dis re4dol re1dr reed5i ree3m 3reer re2fe re3fin re5gali re5gra re3gre reg3ri re3gro reg3ul rei4 re3if re1in re3is reit3 reit4i re1la re1le 4reled re3lia rel3ic re5lig reli4q rel3li r5em. rem5ac reman4d rem5ato r3emp rem5ul rena4 ren5at r4endi rene2 ren4es r4eni renic5 ren4it ren4ter re5num re3oc 3reog re5ola re3oli 3reos re1pe re4per re5ph rep5id re3pin re3ple re4pre re1q rer4a rere4 re5rea re3r2u 2res. re3scr re3sel re3sem re3ser res5ist re5sit re3spe r3esq re5stal rest5er re5stu 3retar re3ten re4t4er3 re5term re1t2o re5ton re3tra re3tre re5tri re3tu re3un reur4 re1v rev3el revi4t r1f rf4l rfu4m r1g2 r4gag rgal4 r2ge r5gee r4gene r3geo r3ger rg5li rgu5f rh2 r5hel4 rhe5ol rhos4 3r2hy 4ri. ri3am ri5ap 2r2ib ri3bo rica5tu 2rice rich5om rick4en r4icl ri5cli ri3col ri5cor ri4cra 2ricu rid4al rid4e ri5el ri3er ri2es rift5er rif5tie 5rifuga ri5gam rig5ant ri5l4a r4ile rill5er. rill5ings 4rim. ri2ma rima4g rim5an4 rim3at r4imb rimen4 4rimm 4rims rin4e r4inet ring5ie rink5er r4ino rin4s rins5i rin4t5er ri3o rio4g 5rione ri4op ri5or ri5p2a ri5pie rip5lica ri5r ris4c ris4is r2isp ris4pa ris4pe ris5ter 4risti ri3ton r5it5r r2i4v riv4al ri5vall riv5eli riv3en riv3il 5ri5zo r1j r2k r5kas rk5ati r5kell rk5eni rk1er r3ket r3key r3kier r5kiest r5kin. r5kins rks4me r1la rlat3 r1le r3l4ic r3line r5lins r4lit r1lo r3mac rma5ce r5mad r2mal r4manc r4mano r4mari r4mary rm4as r4m3ati rma5toc r5ma5tol rme2a r2mic rm4ica r5m2id rm4ie r5mig rmil5 rmin4e rm3ing r4ming. r4mite. r3moc rmol4 r1mu rmu3li r2n2 rn3ab r3nac r5nad rn5ar rn3ate rn5atin rn5edl r3nel r3ness rn5est r3net r3ney r5nia rn5ib r3nic rn3in rn4ine r1nis rn3ist rni5v rn3iz rn5n r3noc r5nog rnt4 rnuc4 r5nut 4ro. ro4be rob3le ro5br 5rocc ro3cu r2od ro3do rody4n ro1fe ro3gn 4roi ro3ic roid3 ro3la r4oled rol5ite ro3ly romant4 ro5mel ro3mit romolec5 rom4p ro3mu ron4ac 4ronal ro5nate ron5ch ron4do rong5i r5onme ro1no ron4ton roo4 1room 5root r2op 4rop. ro3pel rop4ine r4opr r5opte ror5d 4rore r4osa rosi4a ro5sol 4ross ro5stat ros4ti ros5tit ro3tat ro1te ro4ter ro3tu 5roue roul3 round5er rou5sel 4rouss r4out r4ow row3er 4rox rpass5in rp3at rpe2 r3pent rp5er. r2ph rph5e r3phol rp3ing rp5is rpol3a r2p5ou rpre4 rpret5er r3pu r1q 4r1r4 rra4h rran5gi rrap4 rre2l r4reo4 rrhe3 r3ri rric4 rricu4 rri4fy. rrin5ge rri4os rrob3 rrog5 rro4t r5ru rry5 r3ryi r3rym 2r1s2 r4sag r2sal r5salis r5saliz r2san r4sar r2se r3sea r3sec rsel4 rsell5 rs3er. rs3ers r3set r3sha r3shi r4shie r5si2a rs3ib r5sie r4sil rs3ing r3sio r4sit rs3iv rs5li rstor4 rstrat4 r3su r4sus rswear4 rt2 rt3ab rta4g rt3age r3tar r4tare rt3c r1ted r4tedl r3tel4 r5tend rt3eni r5terer r5tet r5teu r4thene rth2i rth5ing. rth3ri r1t4ic r4ticl r5tiet r5tila r5till rtil5le rt5ily r2tin r3tina rt3ing r3titi rti5tu rt3iv r2tiz rt5let rt3li r1t4o rto5l rt5rid rt5si r1tu r4tus rtwis4 ru3a r4ube rub3r ru4ce r2ud rue4l r4uf ru3in ruis5i ru2l r4ume r4umi ru4more run4cl runcu4 runcul5 run2d4 run2e ru5net run4g run4t ru2p rup5lic ru3pu rur4i rus4p rust5at rust5ee rus5tic rus4t5u ru3tal ru3ti r1v2 r4vanc r2ve rvel4i r3ven rven4e rv5er. rv5ers. r3vest r3vet r3vey rvi4t r1w 2r2y ry5er 5rygm ry4go rym4b 3ryngo 4ryngol ryp5a ry2t ryth4i r2z 2sa. 2sab s3abl 5sack sac4q s3act sac4te sad5i sad5o 5sae sa4g 3sai sain4t 5sak sa2l sa5lac 3sale sa3lie s4al4t sa3lu sa4m sa5min sam5o samp4 san3a san4ded s4an4e san5gar san5if 2sant sant5ri s3ap sap3r sar5s 3sas. sas3s sassem4 s2a1t sa2te s5ativ s5atory sat1u 1sau sau5ci saur5 savi2 sa3vou 4s3b s4bei sbe4s sby3 sc2 s1ca sca5len sca2p scar4c scav3 s1ce s4ced 4scei 4s4ces sch2 scid5 s2co scof4 s4coi 3s4cope 5scopic 5scripti 2s1cu 4scura. 4scuras 2s1d2 2se. se2a s4eam seas4 sea3w sec4a sec5an se2co secon4 2sed se4da sed4it 3seed 3sei sei3g 5sela 4sele se3lec selen5 5self 2s4eme sem2i semi5d sem4o sen5g 3sens sen5sati sen5sori sent5ee 5sentm seo5log se2p sep3a sep4si 3sept sep3ti ser4an se5rene ser4to 4servo s2es 4ses. se5sh s5esta 1set 5seum 3sev sev3en sewo4 3sex sexo2 3sey 2s1f sfact5o sfi4 sfor5e sfran5 2s1g4 s2h 4shab sh4abi sh1er sh5et shil5li sh5iness sh3io 5ship s3hon 4shu4 shys4 si4all siast5 4s1ib s3icat 3sicc 2s5icl si4cu si5cul s4id 4sid. si4de side5l sid3en sid5eri 4sids 5sid5u4a si4ers sif4 sif5f si4g 1sili sim4ply 2sin s2ine sin5et 5sing5er sin3i 5sink si5nol si3nus 1sio4 4sio. si5o5s 3sip si4pr s1is2 4sish 4sism sist3a sist3o s1it si4te sit5om 4s1iv 5siva s1j s2k2 4sk. s5kar ske2 s3ket s5key s3kier s5kiest sk5ily sk5ines 4sks sky3l 2sl4 slang5i s1lat 3slau slav5eri s2le s5lea s3let s5ley s3lit slo3c slov5 s5luc 2s1m4 s3man smas4 s3men smi3g 3smith smo4d smu5tatio s1n2 s2na 2so. 2s3od sod3o sody4 3soe 4s3oid s2ol sol3a so5lan sol4er so3lic 3solve solv5er 1so2m soma5to 3some. so5mete so3mo s2ona son5at s4one son5or s2o2p 4sor3ie 5sorio sor4it s5oriz sor3o s3ory sos4 4sose so5th 3sou sov5e so3vi spast4 spens5a 4speo 3sperm s5pero spers5a sph2 s3pha 3spher spic5ul s2pid sp5id. s5pier spil4l s2pin sp3ing spi5ni spital5 s1pl sple2 s4ply s2po 5spom spon5gi 3spons 3spoon spru5d s4py s1r sre2 sreg5 srep5u sre4s 4ss s1sa s5sam2 s1sel s5seng s3sent ssent5er ss3er. s5seri ss3ers s5seu ssev3 s3sia s1sic s1sif ss1in ss4in. s4sine ss4is. s3s2it ss4ivi ss5li ss3m s4sn s1so ssol3u ssolu4b s4sore ssor5ial ss5po s1su ss3w st2 4st. stab2 sta3bi 4stak s4tale stant5iv s3tas. 5static st3c ste2 ste5ar ste5at s4teb s4tec 4s1ted s4tedl s4tedn 4stere ster4ia s4tern. s3tero st5est s1th s4tha s4thu s3ti3a 3stick s3ticu stil5ler s4tily st3ing 5s4tir s5tiz 4stl st3ler st3li s4toe 3ston stone3 ston4ie s5torat stor5ian s4tose s2tou s4tray stre4 strep3 3struc stru5d 2st3s s1tu s4tud stu4m stur4e 4stw s4ty 1styl 4su. su5an su4b1 subt2 suct4 sud4a su3et suf3f sug3 3sui sui5c su5ing 1s2ul s4u2m sum3i sun4a su5pe su3pin supra3 sur4as sur3c s4urg sur3pl su5su su5z 2s3v svers5a sves4 svest5i sw2 5swee swell5i 4swered 2swo s2y 4sy. sy4bi sy1c sy4ce sy4chr sy4d 1syl 3syn syn5e sy5pho syr5i 2ta. 2tab ta5blem 3tabli t2abo ta3bol ta4bou t4a3ce ta5chom ta3chy ta4cid t5ade tad4i 5t2adj ta5dor tad2r tae5n taf4 tage5o ta5gog 3tagr 3tah 1tai 3tail 2tair t4ais 1tak tal2c tal5ent ta5lep t4alia t4alin tal4l3a 5tallu t2alo4 ta3ly tam5ari 5ta3met tamorph5 tan5at tand5er t4ane 5tanel tan5ie t5aniz tant5an ta4pa 1tard tar5ia. tark5i tar3n 3tarr tas3i t3asm 5tass tas4t ta3sta tast5i4c t4ateu 3tatis t4ato. tat4ou tat4r tat3ut tau3to t5awa tawn4 t4ax 4t3b 2tc2 t1ca tcas4 tch5ett tch5u 4t1d4 4te. te5cha 5techn te3cr t4ed te5d2a 4tedd 4tedo 4teei te2g 5tegic t3ego teg1r teg3u tei4 te2l 4teled tel5iz 1tell 4te3lo 3tels tem3a 4teme te5mon ten4ag 4tenar 4tene t5enm 5tenna 4teno te5nog tent4a te2o teo5l 2tep te3pe tep5i tera4c t4erag t4erato 3ter3b 5terd 2tere4 ter3eb ter5ec 5terel te3reo 3teres4 1teri ter3ia ter5id ter5if t4erin ter5iorit ter3it ter5k 5ternit ter5no 3terr 2t2es 4tes. tesi4 t3esq t3ess. t5esses tes4t test3a 5teste test5er test5in test5or tes5tu teti4 tet1r tetr5o tew3ar 3tex 2t3f t3g 2th. tha4 th5al. thal3m 4the. 4thea th5eas 4thed 1thei 3theo theo3l t4her 5therap th5erc t5herd 4thered th3ern th3ery 4thi. t5hill 3think 5th4io th4is. th5lo 2thm2 th4mi th3oli 4t5hoo 4thopt 4thores 3thot 5thoug 1th2r 2ths 5thur 5thym 3thyr thys4 4ti. 1ti2a ti3ab 2t3ib 5ti5bu t1ic t3ic. tic5as t2ici tici5ar 3ti3cin t4icity ti3col tic1u 4ticule t3id. t4ida 3tidi ti3die t5ids 3ti2en 1tif2 ti3fe 4tiff 4tific. 3tigi tigi5o 4tigm 5tigu ti4ka ti4let 5tilin t4ill til4l5ag t4ilt 1tim tim1a 5timet4 t1in 5ti5nad 4tined tin3et ting5ing 3tinn 4tins t4int tin4te tin5ted tint5er tin3ue 1tio ti3oc tiol3a ti5omo 4tionem 1tip ti5plex ti3pli ti4q ti5qua t3iris 2t1is 3tisan tis4c tish5i 3tiss tis2t 5t4iste t4istr ti5t4an tith4e tit5il t3itis 3titl ti3tra 3tiu 2t1iv tiv5all t3ive tiv3is 2tl t1la tlant4 5tleb 5tledr 3tlef 3tlem 5tlen 5tletr 5tlew t1li tlin4 4t3m tmet2 tmo4t5 2t3n2 t4nere 2to. toas4 to1b 4tocc tode5c tod4i to5do 3toe 1tog 2t3oid 5tok 4toled tol4l tolu5 to5ly tom3ac toma4n tomat5ol tom4b to4mog tom5os ton4e ton5ea 3tonn ton3s top4e to5pia to4pos t1or to5rad 4tore tor5er tori4as tor5oi tor5p tor4q 3tos. to3s4p tos4t to5str to5talis to5taliz to3tem tot5u tou4f 5tour t3ous 4tov to3war t3p tr2 tra4co 4tradd 4traist tra5q trarch4 tra5ven tra5vers trav5est 3tray 4tre. 4tred tre4mo tren4 trend5i tre5pr tres4s 4trew t5ricl 3tricu t2rie tri5fli trifu5ga 2tril tri3li tri3me t2rit 4trix t4rod tro5f 5troop tro4pha tro3sp t2rot t5roto tro1v 3troy t4ruc tru3i 2t4ry trys4 4t1s t2sc ts4h ts2i t4sil tstay4 2t1t4 tta4 t3tab t5tan t5tas t3ted t4tere t5terer t5test t3ti tti3tu ttitud4 ttitu5di t3tler t3tli t5toi t5tor t3tos tt5s t4tupe t2ty 4tu. tu1a tu4al5li tuari4 tu4bin tu5bu tu5den tud5ie tu5en 4tuf tu1i tu4is 2tum. 3tumi 4tums 3tun tun4a tu4ne tun5it tup5let tup5lic tu5rac t4uran turb3a tur4d turf5i 5turit tur4n 5tur5o 1tut 4tut4iv t1w t3wa4 t2wi twi5li t3wit t3wo twon4 4ty. ty4a 5tych ty4let tyl5i ty5mi 1typ 3type 1tyr1 2tz2 t5zia t5zie 2ua2 ua3ci u2ag ua5h u1al ua5lu uan4o uant5is uant5it uar3a uar2d uar3i uari4n uar5ters uar4t5i ua5tern uba4 ub5bly u1b2i u4bicu ub3lin ub5lo ub3ra 4uc u1c2a uccen5 u4cend u4ch u5chr uc3l u4com uco5t uc2tr uc3ub uc5ul u5cum u5dac ud1al ud4e ud5ep u4der udev4 ud4g udi4cin ud3ied u5dinis udi3o u5ditio u2do u5doi ud5on u5dor ueb4 u4ed uen4o uen4ter uer3a ues4s uest5rat ues5tri ue4t uf2 3ufa u3fl u4fo uft4 uga4c ug5lif ug2ni u4go ug3ul ug3ura uhem3 2ui2 ui3al u2ic uicent5 uid5o uil4a uild5er ui3lib uil4t uinc5u uin4s uint4 uin4ta ui5pr uis3er uis4t uisti4 uit5er ui5val5 ui3vo u2iz 4ul. u1la u4lab 4ulac ul5ard u5lat ul4bo ul3ca ul4ch 5ulche 5ulchre 4ulea u5lee u1len4 4ulenci u5lent ulet4 ul4ev ul2fa ul2i ul4ia u3line ul3ing ul5ish u5liti u5lity 4ull ul4lat ul4l5ib ul4lis ul4lit ul3m u1lo u5lom ulph3i ulph3o ulp5ing ul4po 2uls ul3sif u1lu ul1v4 u1ma um3am umar4 u5mas um4bar. um2bi umen4t u1mi u4mic u2m5if umi4fy umi5lia umin4ar u4mined u4m3ing u4mora u4mos um2p um4pa ump3er ump5li umpt4 ump5te u1mu umu4lo un1 u4n3a4 un5ab unabu4 un4ae un4as. un2ce un4dal un3ded unde4t undeter5m undi4c un4die un3do un4dus u3n2er unho5li un2i u1nic un4ie un3in un4ine uni5p uni3so un3ist uni1v un3iz unk5eri un5ket un3kn 2unn un4nag un5o un5r un3s4 un5sh un2ti until4 unu4 un3us uo3de uodent4 u5oros u3os uo5tatio u1ou 2up u1pat u1pe u5pee uper3 u1ph u5pid up3ing u4po u5pol u2pr upre4 u5quet u4r ur1a 4ura. ura4ci 4urae ura2g 4uranti uras5 urb5ing ur2c urc3a ur5den. ur5deni ur5die ur4du ur3ea ur5ee ur1er ur3ers ur1e2t ur3ett ur2f ur3fa ur1i u5ri5cu ur4ie. ur5ifie uril4 ur4ili ur5ion uri4os. url5er ur5lie url5ing ur1m4 urn3al urn3er urn5s ur1o uro4d ur5o4m ur5ot uroti4 urpen5t urph4 ur2s urs5al urs5er ur3sh urs3or ur5ta ur1te ur5tes urth2 ur3the urti4 ur1u ur4va u3sad us3ag us3al us4ap us3at 2usc us4can ush5a us5ian usil5 u4s1in usk5er us1p us4pa uss4e 4ust us3tac us5tan ust4ic us5tici ust5ig ust3il us1to4 us1tr us4tre usur4e us5uri u3tane utch4e ut5eni u5teo u4tere ut2i u3tie ut3ing u5tini u3tio ut5ism ut3ist 5u5tiz ut3le utli4 ut2o u4to5s u4t1ra uts2 ut5sm ut4tone u3tu u4tul uu4 uv2 u4va uve2 uven3 uv5eri u5vin ux2o uy4a uy5er 4va. 2v3ab 5vac va1ca va5ceo vacu1 v4ad 3vag3a va4ge 4vaged vager4 vag5r v1al. 1vale vali2 va5lie val4ise 5valu 5val4v vam4i va5mo 5vann vanta4 4vantl var4is 4vase vas5el5 v5a4so vast3a v4at. 5vatee vat4ina 4vatu 2ve. vect4 ve3g 3vei 2vel vel3at 4vele v3eler ve5line v1ell v4ella vel5ler vel3li vel5opi ven4al ven4do ve1ne ve5nia vent5o ven4tr 4venu v5en5ue 5ve3o 5verb verde5v 4v4ere4 ver5ea ver3ei v5erie ver3m4 ver4ne 5verse 4ves. 4vi. 5vialit vi4atr vi1b4 vic2 vi4ca vi5cari vice3r 5vict2 5vicu 5vider vign3 vi4l vil3i 3vili4a v5ilise v5ilize vil5lin vim4 5vime 2v1in vin4ac 3vinci vin2e 5vinit v5iniz vint4 vin5ta 3vi1o viol3 vi5om 5vi3p vire4 vi5rid vir3u 5visecti 5visio v3ism 2v5ist vi2t vit2a vi3tal vi5tel v5itie vit1r vi3tu v3ity viv5al viv5or vi5zo 1vo 2vo. vo2l vo5litio vol4ubi volv4 4von vo5rac 3vorc 4vore 3voro vo3tar 2vow vr4 v5ra4 v5ri v5ro vrot4 4vs v3ure 2vv2 v5ver v5vi 4vy 4wab wag3o wais4 w3al. wall5er w3als wan5gli wank5er war5ded ward5er ward5r war4f war4te war5thi wass4 was4t wa1te wav4ine w1b4 w4bon w5c w5die w3dr we4b w4ed 3weed 5wei weight5i weir4 wel3i weliz4 wel4izi wel4li went4 wes4 west3 w5est. w5f wh2 w5hid wi2 wid4e wi5er will5in wim2p win2e wing5er win4tr 3w4ise with5eri w3la wl1er wl1i wl4ie w1m 1wo wol4 wol5ver 3wom won2t word5i wotch4 woun4 wp5in wra4 ws5ing w5ster wt4 w5te w3to wy2 wz4 x1a x4ach x4ade x2ag x3agg xa5met x3ami xan5d xano4 x2as xas5p x3c4 xcav3 xcor5 xe4 x1ec xec3r xe5cutio xecut5o xe2d x5edl x5edn x5eg x1em x3en xen4op x3er xer4g xer3o x1h xhort4a x1i x3ia. x4ias xi4c x5ige xim3a x4ime ximet4 x3io xi4p x4it. x4its x1o x4ode x5om xo4mat xo4n x4os xotrop4 x3p xpel4 xpo5n2 xpoun4 x1s2 x1t2 x4ted xtens5o xter3i xter4m3 xtern3 x4th xti4 xtra5d xtra3v xtre4 xu4o x1ur xur4b x5us x5w xx4 xys4 xy3t y1a2 y5ac 1y2ar 3yard yas4i 4y1b yb2i yca5m y5chede ych5is y3cho y4chose yc1l yclam4 y4coli y4coll ycom4 y2cos y1d4 yda4 yder4 ydro5s y4drou y3ee yel5o y3en y1er y3est. yes5te y5ett y5f y1g ygi2 ygi5a y3gl ygo4i y1h y1i y3in yle2 ylin5de yllab5i yl3os yl5ou y1me4 y3men y5met y5mia ym5in ymot4 ym4pha yn1 ynago4 ynand5 yn5ap4 yn5ast yn4ci ynd4 yn2e yn3er yng4 yn4gol yni4c yn4y y1o2 yo3d yo4gis youn4 young5 2yp yp5al yper3 y5pere y4peri y4pero y4pet y2ph yph4e yph3i y4p1i yp1n ypo1 y4pox y2pr yp5ri yp4si yp5syf ypt3a y5pu y3rag yr3at yr3ic y5rig yr3is yr3i4t yr5olo yr4r yr4s yr5u 4y2s ys5ag ys5at y3s2c y3sh ys1ic ys3in ysi4o yso5 ys4so ys1t ys4to y3u yv4 y3w yz5er yzy4 z1a1 2za. za4bi za2i z4as za4te zd4 zeb4 ze4d zen4a z5eng zer5a z3et4 z1i zib5 5zic4 z2ie zi5m zin4c3i z3ing zing5i z4is 3zlem z3ler z3li 4zo. 5zoa zo3an 3zoo2 zo3ol zo3on zo5op zo5oti zo5p zot2 z5s 5zum 4zy. zz2 z3zar z5zas z3zie zzo3 z5zot", + ["compression"]="zlib", + ["data"]="xÚ4k–ãª\14…§R#èµü\26c\19›Ž\13nl§Njôw\127›º?\"…÷K\8\9\4þ3>úøõg|ty\19šBoxu ÕŽáŠïQ\127æ~Ž\9Ü\5Ã,\24†\25X:òxvOÃKpéI²\12$ˆ…ܶ!î æ\5ì62Ù‡±€Æá²³%›½;BE«Pj\0c÷¨è\4Í-p˜)\"õa«(\25-Õ•Ó\31p\127uŽÙ]±³³“#]\3mHŸAðX‡L]\14wG\25ÆGE\7¨Ÿªçl\7µ+±÷ÿx†_\\cœ®F¹†8Q‘“ÚŸý?àpFò¼j›¯°9\11Õa¯™øß\15\127:Gp?ÞnøÝ-ý\13VáTö>\13¯5$þ¼Éè=„¢f>ÆC\3vîü+%\14\19\127N\26õ\24U\3œ¡›£Ñ–ACa¬„(öA¥?¶ñ‡Îyä\26)Ÿª\28øNý\12¦ÈG\25‡i\5\7\13Èã.iP¾ÓØÍ\1Ô/ˤA¶ï\14ÁL\30OÁá\26ÁÃ\17¯›?¥w¤s¸Ç͉/ÜjÞ™+¾B\26ýï\26Šƒj\ +HtŠ\23-˜¶°\15”¼E\8Štyˆ\23¨;\8ÏÇPìÛ•Ûˆ1Sœ‹\22Oùúltî¤N»\"\3>Ýç•wyÍê\\Á æfð°…\9¼\13a\1w[´3\14\5Ü쎔([ÉiJk\7C¤°\18ÎêtÂáT¡ú/”wý\19—\20~ÿ•_|V|d§RµÝµÊæí˜ý·`ìÇí\15Øs\5'µˆ‰\30šc†\30æL]2t<ç=œ\23»\127ÓÝ=1>§fJ\24\29\20¦5&ãŒç\4]†y\8;¨×Ÿ_¬ª„EÃ\17b7\3錰uáMª©\29¶{P¥Â\14Qk° °\31½\29ÇG\8êÐ`OÀ~\14äã9¬ao\29\2Ù„\12ù‡#6†.­´\4\20ÕŠ ÑFoDKCɧ]\25®&O:=03Ã9‰-»Ã–©[ÝÚk…Ï…›xšw‚o±-PÞ†›˜ßxþGø\1270˜ð\9\ +~Ž[\15o{V’~†i¸\19œê\25/µaɦÑ%3èëøhÃ#BÕ+Ì\25Äp€E\18x‡ÞP\12\\ˆ¦¬‘A]s»\0015ÑŒ\25ûõc\30\18™pB3±ãÒ\1\26\17@ÜÈ*GÆ_MŸ\13á*1A8rdÃ\11ø\14µ\18ZWÔgôŒ!Õ\21\"â™\7uÈkô|\127E(ýE†_\1276­w\25Ôïv©Ë\0184µf^Ûx®ÄÞ\2=ºApø†ö\0\30ô\22ÄÉsâÏð\30ÁQŒ°\12üY ƒ-RYq÷œNcÒæÉAYLÑØ\25æcí@bç”’aáÂ÷\3ß\15³m×ò{ƒ¶aœ\7þˆ`\18¸¸fûx­x‹%å™1ÝÕœÞÈSE\1%ο˜tá2\127ßÝ'{ì\22;ÒpÛY^¤Ì=š…î¹{PN®”þ£¥íkTÇ5ÒSÚññ\0070~¢jQ¾þ~z|\30ø\31F†hjR `ÚÝ8ÿ½Õ\27óNÝÛ9“$\7CrDcWTÕyœ›ü\13TEæ2î4¸oâÜ›‹½w’©º\30\8½»Ã\2}?ŠbõSw\18º\8tŒ{\24T\31É̳´c%p Æl´”<\0064.Añ\1þ碾” ?*»ç\31€Zþd\29\27û§ê±ô\18\2ä½hRó\23­¥‚’„ÆE\ +\ +.]‘öI{×ÏÔ…p8.ª‹R,,ÉBC¦'ÄA\21@NK¹\7ºh¥Y!£u”š%(n…—JY{å2¬9+šXT?FÕ4v\4C%q&ß8ÃÎǸ(±\24®*\28¥·\20#\13µdS;Xk!)ñ· T¸.™%ÍY\2‰™®\26µÐÍ‚\7°#ÍÙÐrh¶\29›¿ê\23m;¾¾ÆW\16Ó\28_b/\8yÌ‚\13~­\ +nc°\31#³Ý>Ú•&|;SÛ6¾õ\127²cëÄäÆmF=nG\8µCdÓ?)e‚CM/Á!·d—±Ý‚\19J€\21Šìr L”×Hl$è)\ +”`k\31ºróôÛj{\21ù\8\21id¤AÆóÑ\13Tã¶×\16‰ •õQ\0179IQ–à\12vÕ$¹Ÿ+Yg·,3¾R\25vCJi2õ\19•EùY\30\25ÅMnG¾O§ü¦\22\18‡ð;è\7\17ßEE»Ë\21`\31Ežwp«îàT÷ž Á­y»kÞ\12Ýö\30\24˜f\19\7U”O’£U±ê>%ÝÇ~\1Š\20ì¥Õ‹ŽÞG8ÝÞxPvIЖt#XæÞÃT»q\127àü™I@Mwk„ãÞ±Þ\19T62Q­÷Ø:\3s‹=\"„(\3F¤ÛQË\8Ú>\4I€~Ú™âÝîag ´ÊÇ{ÿC\"f4Bûh$=XI2œx—\24§ÿ‡'æ~¬\17\14°\31\11AÚ£~0ðý#Myl•Ob¦\13IÌJÿ;hTp\1!ïãØjH™\28ïT§hí§\ +ZÙ\24Ó<Œ·Q Ï™X›‘È\13ßè4r:él\30 a}ij¦ôO\ +\9+µ°@vÝ\2Y¥\5A›­À\27¦žVB\24\16­îd?Ä'¬&AÔr¸^ñDŽÀóú\24‘Ý`–”’::5™†g763“’ä\127{Ä„/¥–¿ÝIIý9‘@Ëùf×MÛ‹ž\19‹ð,—\\1ºÑ’Ã\28\24´˜{ÝKýµÂþåp\3LMò„A‰ŠK¸zÛ™¡@9‹]·«s}ÈÒã&\8\9H|Ù\21Ô%j2¤Ûõ¥aÃÇ£ùi¨ÊYS^3^ó©\29\15\21~0Ÿ\15Øå1ª\22\7\0114â®Ôµñ\8î|\17zKš\4{!¸!’\27ñÖϦN: éCÄL´\14.rôž7Ã\17™êÒ<ðsf¨\18’Ù…;º¢=ø/5\30±äP\3PÓT÷ã\22ï£%Ç­¤ý?ýûw+yK‡\21É._hS\12²ð+ܪÛJ¥ÔÂR¤Ù¤\9±Äêˬ,ã_; ­¢Y\3LLxÔ\21D\0öƒÕ\23Å\19]ÑoÇ{“\8é«<Ø€Qî\15¦ª\16ù<<—ä`4¬G\18eB¶eG9&\18BãäßÁ\23J0{A´¼ì÷\30p-+\14V§Òñ—^-\"æ'é#u\30Pÿœý`.-÷¹\18k÷jV\6K\18\18<\25D!\22–Òe—•]åÜ‹•\25ÃëK¾œK¾'GüOð\24¼\6J\13€M\"F–¶P\29¨¿œCÍú¤qu¨JË@–‹}\21Pçš\\bWJîÆÜtôpK\21SŸ~2%~Ü\8q´³Q÷œ\18(\4E±JÃü’\ +È´\5)âÙN\5¨üÏNZŒHàdtÏp(ËS‚ãLj¹xVÆyš\12%Ð0ì'½¢\0ãùòú}¾\")Ñ\7\8\15´è\0282kl\127j°f;‰ëõæ<[\0R\13xˆŒýÙŸ.S\5 ¦œ—³CK\5±ôH˜b\16p¹ÚWníYjL˜òÙÜü¿Ý^\11¹í…œu™ÊÅ=6r¸ú\4»\3X\16p.\20P ÂÄÕ\5¨Uˆ\22\\•‹Š¦7ñ†‹\13m{&ª¦8‰Ê°-Žƒº Ö˜\\êŸ\31\7ŸÕM\\±Æ\22±ˆS\9\28Š\25ÿÿ'WÌ\28ºÌ!\4¡»AêcÃÄíÜ'—\23?NP\\`\28,¬Ú-‚h/Öˆþj¢t=öHvü:ë-—'“bŠC²…ãEP1I\30¯î6ºµ¼8.N1Qw¢óÎâ}\18ìg\"äE}}‰ñ?Ñ0„\0171\\ø7†`[r£\6\\uݽò\7È=GÇi\12ƒó,µ»\26†Tãʶ\30QÜzf\24•¹ÆÅ©qXkºniߊv{\12„\22÷õG±n„Ù[í»\31n\15Sô–º«’ï\9©õ®Bí=³3ÞðuÁþ\2jàgp•VÁ\4Z¼5ÉFkÌ,7èÌÀ~F­¼Õ\ +rFÙÅ‘\25´›N¾ÏšÓ‰Ä{‹rÿÕÁ£ïËÜ熡·\8¬ï\30aðmQê]\0252š³\ +\127‹}\20#Q \8‡æ\3ñ$¸\26•\4bñ~G¶\30Æw\3|«\2\127*Â\5sü\30\18mø\0304>½x×\127md`åú¸´\15\28þ\3Ûù0{?Öo>ƒ§é\7Õ¦\29\127äÿãVü ?ÿ\12¨t?\31ÄÖŸ\31\17`û\24ÿ|=Ø,—„ö‚)‡¤:A\14@‡‡T“ǨåçÄ\14äÄæÀšp‰\8\30š¸„$C‰K\ +К\31H,2~X&‰Æ'y\29\13!å¡ÿZ5€R\29í GÉŽ\3\21ŽhçWó\24IÇœf©¢ˆ\20\15è>\11]ƒks]l°¶æѪ8¹U¡Nª>\14V§G3)`Êe²ç ½\23Ô«:ªÆûë!&Ü?Â\31•!iMÿ(Ü{|òÐúß>D\24\28@*oÎ6P«\"‚V@ÁAê«\29R|\31êk¥z\18\127q \20\3þßj”S²\25¡\28żø¯i®ÿ=P\3û°p-(Q³¢@ꆆp6·\24ÝÔ‘¼»\127äT\8,‹äg°ˆ‹Í\19’•AíP\14â0d\"rQl\17¾\18ž®Éåª}Ó1ªðs”\6ûh4ø¨ýíCJgó\16o{Dù\ +r ©¶A\18\21 \0096…\30uËâÁŽ…þ?ã¬\2\"'<¸Ü\6@«‰\18m\16‡àé,´ðKÞP…ã\30´Ì($†4?&¶‹\31\28+]F¢\21Õ¤U“cî6 mÒ£\4\15Ê\29þ\9\20þuìsË£H¨TA'¹žÄæEP|D5\16Ge¤\17œ\21÷\13ü•{xA%[«DÒg¿\0044iØ>Jö‘€/8ˆÕ=8¶Õº#RÓ\8),œ\18Q\17lÿ‘ƒ*¢à\15]­ÊÔ\3Ç\30Ýã\26\29­ ¢W(0+i\30$]K\16\20fwt\30Ä…\30H;ŠõRmÅC ÙÌÎÝÃâ/^Ô‡YÁQa0ŒFè\\¸TÑæ¡EáÑå¢\22<|< \8—0r.%ÚÁŒ«\7oBLÖ,öHg dq°Î¹ˆ¢k‰\23ÐjòÈß- £½ù¿\15QÕÇ…\9,©v•o‘´õð†Ï£-p\14äUZV4\5\31}aúõ\ +\18©Èëm\"h9\16\26\30%2³¼s\12ê3\4^P¼”Žž³,\8¢*åfSéQ>\127èe‘ÿ\3=ç!-\7fw†\ +\11Œ[Ùˆ>Îh?†À\6\17ž\"8\13w/þJÒë>*»Q‰ÃÅN„Dä?ªÙ­±ºŸÃ“ùÚßæ?÷FõXš\4`”Ã= «ÈÅàß…üoŸì>-!jyû8¢\19&²„%åÙÝ°Âáf.+Ä<ð¶:…§ÄòGóV]<™¥\12>>Ð’ô£Çç„!H4m'qýi|p€=õã\20ä`/m\26g©aÃ$iu\26áYú\27\1¯¯©ÑP)®8ÖÄZ·«£8|V…Ä\29·É¾tÛælÇMK¿r\17¡OÞû™4Ì‘hIi¦\22}˜\12¥£*‚$ú\15¤\19_ž¼=‘À¹'6›ZJ\9óâ\24L\5íð\8ýv0@AT®äIÍÇ\18\"\26a\ +1y\17K¿¸€;W3%²Ä\4èǯj.3ÿbIE¿\127Îß?'¥ˆ\12Âa”O…”EÁ…öj\7\25häwj#¡Kð$‡“)\"„ÁÂ\20ZJý\22«‡\31M+ݹª;Õác0Tý׺ÁÄ\31Ÿú\9c#MXc¥øªªb\6ǧWÖ`ZZ\27§éÙšBmF›°ss¢ÓÑ5•¥J‡›TÃ{´ß·½þ#‘\22^ùDûÇIõ–òÁŒZû˜þØåÎ[cvluÊ\26%Ø‘Á\11Ÿ\13\17wZwõØJê¬\14“wžðÍ4ÅŠ³r¨\17s¡ß…ØÿÀ-µ‘ÀËŽ«&Ðr*Ç÷@\8-ĺD\127?Îí³\25Rn˶õ\20éÙ(Æ)\0cˆÈTò™O È˜SÇb(V7EXýÄ~×\20Ÿ,“¦™Ü’%ôƒ´n‚\5ÄJ í\20É“ÅV‘4çEwlQ*YÒh‚Ô,N ÆŠ8^›Øl8]~ra\12ZÄNlŠý¡ÞŠœ€Ky“Æ?½\26ý(Wp`–ké^\3|äÕk)™$)L\27;\22OUmc\22¶[˜¥‰ªÆÙú\0\3/\14æ…ßâ\22[ÔúÏaŽÄäÉÂÁ¤i&(…\29ÚÜî@ÜÏÁtÜ>ÊQ3k‚[©S‘\0188\1ê&­u\19‡ÕüU\25¹cÊåAÂ*‡CqªèIôùC¤@0Ý›9Ëí¥l‰\2äˆ\9ªÈÒ¨\5èIlç\22•Ù«V\26\8Oâ\22m˘Ÿ(Â\14_Ș@i%m\0h¦Æ°¬ü;Ó‘?>F‘d˜D\1©CºÆ\1’ßæìŒ$\8\30½K8Èf86ÿÏög\1ËÌu‘©\25‹DzŠQåM¾Y³\30úË%ÙT6èQS\ +ì*Ÿ´ñ4÷\16ªUä´GA—š|Q]­¾TñMú7‘¿! Ì\4ËÃÚÙh¸\ +ü®°f\ + ‰N=²\12~¬,Z‹ØŽ\21æäI\8Mf’L£ºIÀ\17O(\3\16¡þèH†Ñ\19.ió8Ø¥™J”´;\21Ä\21A\14Ò•ZÃ2¨0æcù´€½\3r,6\13Zí§3î’Q&åΦËÔ!ºL—r&5Xì\127º\26êpYeůÄ\26‘Ò\27ªªúŽ¤G*ÄA<Îщ'ÏÓ\16¿\22b³íáHDÎa¦‹é\27È\25zU¶\5rb“¾‚è'%äLf²R'O\4vÂ.ç%AÛèãd’Ò'ï}P¬X•«ÊÄíØ阼¹qþâ\31Â8Øɘ®’\25Ó†üÚëÞù«< ¿5Š÷àbnN³\20\9\9NŽÍTrWé¦Á[ñï~‡·ß´æF á¿}à%÷À*qw‡2èn‘p3ÝäQzg%Yz„Éz;BAª°\8ŽÎ—L÷6‚\20Þ\26E\9rÓ‡Õ­™>êiö¦åV\19>ìôOˆw\2œÇË[µ“òßÏ’ôæ~|”¯fn5ø³­¢¥Sb\15$©@¤17ãBœ^K›´3 ¼$Þ¡ÒÌœ\4\17WS~æ\24G>Ã~ã£áSj‰g\4\28\0­ºƒdý‹“\18I°3\6w#±\20G¬á݃$\1Í㽬\29È[ÓÊì­ô\15ý´¨Î$¹ŸQhg¬}\5\\\21!;ØîŸÅXÑà5«f1öï/ôx…†\7V³Ý,Ž+\127Îâ\9a¹˜C3‰MÌ챊qtD~¢ç\16áù!»ná¿TtýeQ\586\26€Í3‘\127;¦žÞÌ6‘Pµö+ï],\14Ô;ÝÎNôŒh”œÍžKužŽô¡N\28HÍV÷å•hqH+>µ°\4Q\24¹Lô,ªÂþã\28P‚\5µ2\0¥?+?\13D8Pi„lÅ5‡ö¸ÉöŸþJõÂG22ÃM\15»ŸÉl(a¡ãJ€\\[\4º—½vþCîXŸìš\3\28&§à°ÜÛ7ŸŽ\127:è&ÛÓ]*\9V>'Ãqö\19m\0250\\uP¨¾ÑŽH%Î!.‰H§\27r\14GN\14ýG\11΋,Ùv¦†;\2‰Py^ôY{‘•åiìŠëؽÝaý;;\12óWªýŸHhx~ÍOváæV\3Ïž×¼pž=÷‹Ä‡Ù\11äÜ(c\9?ø®4‹ÝYŒ‰?ì8\127(4IÔš%ÓÌ-‚õŒ‰€˜µ )‰É‚Ù!>§'iÖJ\12ä„TÍ@ÌV´ïDFÿ©…š\13\0185æA\19W½Ô±wÒa-#o×KRÇÉ\127„\3\5H\4\18К\0<µ²ÎÅü¤GDåÿÒáȤÏ\7¶ps¹Ó«£ó`Üé\12ÎNÁ§§}\127ª³©ú5¬°;ºúV«nX{7ß\"„[Ú\1Ðæ\17Xâ«Ð{\8ðHÐf”\12¯\11\4%Þ›tr\12}\2ÿ{\"íý\3\7;%3çjdݪùZJ%ÄÍ7\0048`ˆ5ßUzSˆF¶ÑäxsQ\1x©uï<±3'’PÕ¿U\1È®\21Ïýô㬪\127ؾ\24äbÚ}^ò\20/ xr«ÑZhTÃ\15æõs÷“¿Â¨%\21u;4¬œZ|˜²Ü«¸¿´\4ͳþÏè1BM´c\3õ‹€Tˆàmt ãX¾\14XXl ê‰\29w\24\22@Û$\28\30‡\28Å)KT°\22W‰¥­.×¢\\«Óz¡\8>æ‹ÔôlN|O­IàžŠÛ\2=øÌ̸f{aí‚4*\18\9\0286‘à²é\28¾šÿ¡Ç($Œw'ðv\30oÈ\ +G&à\7¿F=Ä–¾`Ë_M~­Í\28Ö\25©:íC„\16|Þ¬8´å!ª\14¶5\1ž@{h¢\4ö\0263‰TÉG#Ñ_i.\2Euáaóó€\29^˜ºqVe&›­1\2E\8Ø\21{\26¥@\4[\27\5\8cUE'b³ÛB\8'{H\ +ªòÄ\30©àáæˆg\00745\3JŒ˜\30ø¼8\23U\0ʈrDÓ«Ôü-Û\7d|•=5\14ãD™ªöpE9±\30\15–kDÓD\21ûW•‘;[-ËAòT¢ÇfòÕ\ +§Lçê\31Vÿ×Ì\9^\19AÝe×…hÐ*÷vÄoùA„¾^£ÌX^Ä?ŒqÆ\5NS+\30’ÑN.œ<*¬}\0035Ø3A\16\30¬Œ¢Å—\20‹¦ÍõìNµ¤öLçL\4\14|C`ý\11¡N\12\31”„ЪwÙ}\ +\24J˹U¤®\ +¶V\19b0ÂÑDrt@ãHä\3³\12\1ƒÙÐ(—§M\25ÁZ%ª\29v\0K© +ú§ÑSŒ¥ÊzZ9Úð¼\9¹UþóF>\13ZõEÑKä¿\4aQ@·løsÑ/,pÀ°\12l&¨Ö½V†€\25ŸþÓÀfÕ À\21Ã:¨7Wéb\2âÔ_!J\29\4µ\18\5&\0216z\1\9‡®µ6\20¬x‡ÖkЈB4\15P¨ä§\16{)Å!–ÎY({\27Ù†hÆ'$q^‰5Dñí\14n¥ñ\4­šáo=:\13/–·ðÂ@2¼è†ž\3\13\9­\18^0–^TîÆ\30|°u\30\14©ÆŽ†d…ï…Æ\0216¶¥TÂæ‘lMw[@‰ì\021987³\19\22ÔLXg8H‡\15\27»a¡áÔ‡ãú“\9Î2\30\28ýª1±À$Ù;:¯ÿˆÓÙŽ_)aM[\21…8-ÛIèîâx…©¦@ª+®u9†ú¤\26îa\11N¯ˆßŽ†42‹+€\16\ +Øß”–!2„!oy·Å¬ª?\21‘ñÁz¥BOê:`ΤZ‘˜SnAfÐ&\5Gí“~Ò…í£zì¶Í\1I¨\13»™¢à\4¡Q\12õÚ‘\0297Ü\15¸Ínv¹CÆ‚òïvxH_\13í©ƒ\\{Œ~øÏå\26)7cb\14b¥g_fJâ¨Ï4¼šC†³E\0\16åÐñ§ |wtÃE—·¦zT¬ç/^,÷wç•ÝßXâÉ¿'Éuψ7§Óaò¬rÌN°¶–`ĵö \25u3Áá†;^ã\0]Ú`\18OX:/3ˆÍ_ÜHÁ)\8ªA„t˜M\6ßÃó÷ 6üj\23º3‘¬jBr¬\30Wø\15\9ó‚‡^6ˆnƒ—•îÂ\"ZÿÙ8†ãî$f€®:é\20Á™G¶³ñ$¶wë\21;ïfÕ-\27öˆ\127$`cˆz”_™òbÃN²ö©JøµD\12¶DÄÓu¬÷!IáZX%¹˜ÔWç‰T;ùênV™ž}\127%ý¦\127\6‰$7\15\11\8!wݯAE\19ÁæŠmzAd;îê©\6wÍò¾¨+¶€ ªh¾Çö\16¥IŒe²¿­i¾½\1\18Þ’`¾êÝÈêYÈ‹ëtdÖ¿ƒ•ë7³¡yÓ&MbD×öÍ\26%pÝ=x&ÿEìÝ[º‘ã\\\0099ÍWÀÙ©ÕÛoI·_Þñ\12_¿÷þùCN\13kyÿ=²\26~C\28ß\22лoåõím€o6\ +ùOöÝ7s¦ùV”ÿ0\5\18QðDI\19þ;Ô]\31‘èÇd÷A\9ût,Ý?•wýTÖõCžÍS4Ý>ÅП~·å9v“x5*\\+Ø\"‹·OIÌrÔ“–A\127²\\¶8Ôÿ“9%YõÉ\5\13y`2rŠ\25>y%%\18½è\127©\127¿\21ox(G1Çö‰Uß\19û\8NËé3<¸—ò\12\19s£\20AUBÓ\16Dý\20d«ïù;Ožž&ýÓKè“Å*\\´ö\12\18\11&ü\16ñ+Žð&gñ\\\0011&\16gŽÏzÿÿ)B\20¸\16k@j-(“žÚ\13¦‰VkÚ³y†¯çÓû\6_X\22]xEþŠRžÝS\3Â\127\"EMЧúPÌâùä%‡ç“#f!hA0I$\19BÒRJÕ“×\24ظS_­_ÏXoh=5¡nyÁ¼l6>yU\4ÇI4®¼<#g\21Bì™\11–^\21‰Ëª•Cõ“HØ>):rÂñŒ¶Û\127bÒg/\21buW\21O5V¯î}ÆÒ~\"-þR'?·V¥‹Í?7ñÈ/\0264á@Î’¿:VÚ\\Á\6ä)IS?,\26nÉ<9\"\12724™ò2\\H¨á>ô3÷è”Ï\\Mð\20C•È~'BI”©òÔJɦæ\7è\1DQ\20\18wSÍsq„\30Ë\3á\11³.ýÑÀ7O1Suí\"¾D\15SBAû\19¼8ƒ{Zrzr˜\0111”[ººÆM^Z™ô1F\22xö¶yú¨õ©ú3ú—÷Bº'Ct+WOžw=zÞìp?9›TƒîÔ.D2ÍÞ\\Ú}òÚ\6â†Q<ÅÔäè\23ÍÉ¥J©\11—9qp5³[4\25\5\"`#PÊ{\18’²=‚|\18FX–Ë;™‹&%›àÂ*Spaò,\0280S†¹ì:A\18_¹]IΞPBñÇYs}\3çp|q©¾|5ª“ÒpÕ\1ØQâ)¦\0062ó[ظPæìH/¶ÃZØÒ|Û!J0:.\7Ý‚÷L\6Êl\30ª\17ÖÁ‹xÅ° /w\11Ûxòègþkìå?'µ.ðW)›/ã\27QÏísЛî÷¤r›D\13Ù\2d#£º%±‰l—z§n?\8\15îqnú\9¢\31(i$idP\18\"Ò’¼÷‡_›ñÌ䆩­ ú ]2\3GÕCÜKÿ ¿¬\0123ó_PcÛ/yC-¶áZ2<\127É-æOKN*$û¹\12¥S\17Yú/)Šóå\"† së\31ŠBRŽ\6T²º\"ß<\11²4\28B-\24À,’3›E,M%>èH\ +î\1¯VCØ3‰\11'‘‚0Dù¨à’¡\1_ÓÿÃiUB¹Y9¸\0\29â\28t¶Jã9œ…š,=bôrý\18ßࡽµ’,7–\25Ãr‹ª:A)¼‹¯¯·ËMÛo-Ÿ‚œ^\8b׳Üí\14DoPŠDFœ–àQ7\6Í@ñS!‘Ím³\5Õè[@îOK‰\31zó£¨\31LU–á§ýZ9\18ZÅ=¥\0MrYã\0222ƒ^¹Œ.¸ð¶‹P¬.ñuDŠÒä\18U®ãë7öf+œ\21\0119Œ\14Õ>\30.š\ +©àãk3²|¯\ +‘̳Úòb­ïÃ)ÑA\22<î°rÃ\15¨\21kõ5‹ÕWõ\8/â.”^Ø\6\\1{u\20V\26ã(äË\22+›’‚·—óu|³Æ¯~¦íŒþ£Pq̵Q;\26¥®·)ÖYÒå\26è\0201ѵ•úجsrÁò\5`õ[ëã\\+çF¢ƒµ¾\18³\6‹\21kèfe\30‚\20ðµ—P¶Ú\ +cEcVƒ\17¨I\"1 ÛS¢îÊV¯\"v+ïkØé€à{Êr¸—Ù\127û_yeF *Sú³Ã=Øò^»BýJFV^ˇñ·ý úPMwÉýùúÒä§\15lý·rµò\29lˆÛ\25+:í›\13ùß#A¯–ǵʪë­e@\0ËÛõ®´TEÔÕ'ü‚ÌöÛ,íF[‘SÕœû\7zûVu¾Ç¢ÀV\29þ‘n±~6³ŽO«ñà\26Üúab~0à\16jèò\15ö£ë§\16xr4ºb\25×ÆF\\;ŽH«R²‡ÌÖÊ¯Ør9%ŽÜBä\127[ä\24&­ÍB³\6_*RÐÿgOð\"8vKÂW’E¶K)ºqÅ‹D›_&Š¼+páD6ì…\19é\17ò\"\0155\16{\127|'w«d‘Kô\4\28xÑ\22¥Ðr§ÔŒadƒ!r\0095\16\9\11Èhã:Á›\23G\20\1‡ØF|øð(>\26Õè\17sox‚ju\30ª\6÷ääÁá.ÿOr}ˆì”ª8\14\127;6¹ãÄ¡\12öM3éP\15ã„!i\27¹‚\17§I«€Ø\0149L\"\11ýUÝ1[\7ú<\ +Œ‘y„EqI6“ø%\127_XŽ­&,oî¹\24öø\"&Ù¨â,ñÿï]j,§B\21\24\31ÁÛðóekªy«˜\26q\1—\18\20A•Û@\0282EL6{#\19Ƥ¤íü\21g- Rr¥ùEìõ¾¬éNFô#\22qŒÏì\12g.”°¥¹\24n\14 \13¤/ÒškWÏÜKSÎÑŽžËJmì}Ì\30}ý0úx=úiœ8û\\¢\24\28Ùï\27‡šâ'R\"gì_‘…6²ƒ\17Ã\"’\14XÈÇ \9\16½§¯°VižÄh\23»<˜ÚS­–\13†è=rb\22\"ñ\0\28‹\3Ô…Íž\22\8^ªSº³­~R«Èñ\"šþGÖ­@ú›Bï‹,9È\5R˜\16㩵*¶O\17ij\127*ðù´æ\24¥èOÊÄ{\12*íiƒ\4Ö£ÙŠµO:Tê|”&?\16‡M\1ù|”­z\\mVäEsER\6·\3TÛ…\8è<Þ`iÿýs*C©!Qò9s]âö°\19\21Ê_x¨E\25eæÜRÜD$Wùa\20L\4.°ÇF´¼òBU¤Ÿ!ß¿\\˜Œ<‰\19_\26†\23¢[|q¯4¶ŒbÜ8ò‰›¯¬+ÍæÛé’•Çÿ\4‡Ùÿµ(Å\14íš»‘â¿qëž\0\14@ŒˆÅ*OwllÆðü¦ãûÕ[\16\29¬EŒ\29¡è\23o\"œŸkB\ +e*óÖÍaôÏ1/\18ø\0R®éÅN\27\15\7áƒuAܸ\15®ôN}»úL\28ý/$æ­¨¸I4Ž’\19Tì.¦ø…\2À\24ìÜ\27‹;G\14\18 ZÉV‘%@\18Ž1\21Ü­ˆG\30Ž‰Ý.BÖ\127/\25±ZØ\9í¦V׎ãèè\7aä¯5†ÿZ²ãîw\8+æ©I…\30ÎêÈWu\21\28bK*évhDê¸\31\18\19—àxÉ\13v–\6Þq\"Hò‰‚úÍ1̽“[’)€‡\19‰¨ØÜâí Ń=\ +jyw”ÛQ¤F`æ¡_Ï}Ó–\11)<ÈĘ\"Š\30Fç`Ô]FWå1ŠUà <Ú\4tB&€àb/\22^\5Hoä‘PIç\\qá=˜ˆ\"ð&›§†Ó\24{W\5±y$FLm¥ »ŒXBç·#\26n¡¢êrE\0222]àž-¦û˜«Œn;T—zS¦Ô\\ƈç_o\22¬$Ξªž#°‘’Õ\"9CÉiчވµÀì>õÐ&?õC„Ó•\16ᦳ%ïÓ‹d:Ck8]ÕÅõ\6EWl\17ÐIÁgÞÜ\7½9\7¯\\î,ö¾pš\29¦Kr\9u\"µ\9=ýÒÈíѾ\25«j>ÜƬ\"³\31ºŠ\4©˜µ\6ô D€\28økëa…nü—`\22{,H£]2Baô\13\23þ“ì`\8Í„rA\9Db\14†¢½!ú6\3.rDeUjXtçþq÷p\17–œ|²\25ÍÉ22µ|$‹é?« \14ÖL,\11\4o¸#è¤\"ÿ©q¢F¿”\19Q\ +» j\"ˆÃ\28\21u°Ã\22;ŽßUßCZžüV¬€ãp¬^U\14îIÆþXoòŠ3!t¶\31Љõ\1xxA<8!UžQ¢\25!¬ ‡7­ALq:§?˜ù=çóF\26˜#S¢þ\127ë\127{8÷ƒàƒ½\8l—³«âù>\0287Ù›m‹‚º\127ž†ê\6teÕƒsœXÌÁä\16Ç\4-†®\ +˜¼‡Â[*Ši«]á\15Ë\4Ñ\ +b,] \15OÑ‚Ò\28ý\14(ˆu¿TÚãù›XZ¯s\28Y\27²»\23\25äX¶*$\21?Ù\18}\31\15\0270ÇûGJò`o\3袾})+ra4ž\3KâYyði\30,ÇîÿÉÿ‹#ÁºO\22©\0199‹\19`ÕŠ\19à\9ÊÅ:?ò\18+‘»*OœlÛij\13΀ÍI\18sûY1oâ­Î{¥ý=ï÷d\\\0292Û9pÍ°µ8ÇUä^e\7ì ùÏ\21´ÈCá\18‘^®:5ßДŒ9t6°•sÈÉ5ÈpNn\ +Äîl5“DJ‡‹hÈã·\27Ô%d3œnûÉUi\16¬ÇÓ„7mˆpù0»âëËoj±]J¬Õ±°áÁµÑÍÍå\12z‹Ý'wŒ:\5Iô$\6âüy»Û-2œòÀ¾;òz– t†¼ºûòÀh’¦Ñè´\23ó‹'Mq0¼W\21?ëã6±>kÓFêu­R\31@–úxÏ\20GW€\31\4dNk#¶¦™â-°^Mtž\18Ÿ˜o.Ÿ\13I£Ó~ùOEÌÓáÒ”é£ožêMDHúB±W4\24¸mØT\26ãs5\25&\9—ù=°œÍ\"§^dÑלn[£\"}ŒtóuS)ÏÖ\11I‹7oNä­[ÌÖÂ:ï•Êã­\6½m¬\24ßfEý›eñms\22#Ož·\13Z\8„Ã!ÆêbUycsÁ°¿=-ߎ7ýò®âü\27ÁH\20õŽÈ¼\21Ÿ”HÝÛ·zÓç«Š\8—V5Yä%Ïýg-å\19¾\26³ë\0314ÁŸ.¶\ +ýQÈÏ …N\127\127¾š¿\26ÿZ:»¿’}ÿŽ­\20\29!NAÿz—ð/6­\1279ü\4\30\"ß¿\28-ŒBZ\127\4Žî«ÿ«µëo`ó7|K}ÿ\27•Æõ/;±\127Ñü¿†¿p–î/—Fÿj¨þf\23”Y'ÿÖ÷#þªÿÿ*Ÿ»™£ 8Ûß[Tõ×\15dÿ½Oô¹¿<ˆC”÷×ËìòÕŽƒt—:DÜרi#(™éÅc‹»\16›*¤Hú¯®y±¦¼ü¼|óBhzñ\" ÔÔWó\0L_¯)ïú7Ìí×k–¶úâ^Æ—xÂüõÂ\"ZA¾ÌþÒÜœ­‚}JÈ\\\9\23D |ùŠ\19ÏbIð–Wÿ×\1ª†¯\29½0\16üôßò’Žòzº2\28u\12¯¸~½8Æ|Å-w\ +Ö:ðj™'Ý\11\1N °7öªï“¼lŽúBæq45Z,{\7öE\3ˆ\23#¡ŒNr¾Äf_±ÿþz½h«ê¥t›Z̶×KúÑÈ_|½ûõB9|±\"¾\26åš\20‘ƒƒæ•´\0¾2í@´Ó?)Œ‚\18/^⌯\11Z/>\26Ñ¿Ä\22_<ùò:[\"±ÏþjôÃâä5h`EÄÝë³}m­„ÀM´)r\27¿6¿Ì\11bA“×C\0146¼7\17-ÁÖòˆÅå_B2Q¹@,\23Ô\ +ub?Š ±ãIø\\\2àIØRS/¢—'RI.Ò’\23JâÆI¹êÄû1~b&¶”\3Öe,˜5w…šLÖ;Å$ø¨\20_^n™ÿà`©¶‹BRï\"\19³Ÿ7ô/ä’­\30RªØCàˆÔ  ÒTsÕcƒ\13^½ñA\20ÚçË[rE6“~ÿüÔ?):šx™’ÜøÝäþæünC7P†?_[÷øÚ¸*±ñ\26\13ðóµñfÑÖò‚ðÆ·m\4\18\17&ˆzã5\1%C‰\16¤F\29w\0026îßiP|]ήo9Ä\127\20L\23Õ³\9ùû\2<ñ ÇmFHÙØyï¹³.’\27%èo\1#eu©wê)\8³\9åîW‘\20:­ o¯ˆ›a\6¾ñ\12’à¢õNÑ9üSfË\"/\127SEÿ;2ä\16IH´¯\2kBl›6>³ñ塈Áo¶XVmÃîî\12ûN¦;Ù \17\18\15ÂâaXr«:ÕÆûò\1„Áåæg“¶à\29êÍG­*1Õfñ0öæ7ƒ·ÀÑ›¢ý£\29d¢¥kR™½-¡Œ&ûFÚbil«_ç°ë\"WMoåÆó™j³$¯†©«D—\27;ø=5°ÛÈ´SYoš¤EM\1ojø\30°~Pw}GZößH.¢†'w0¶ö©z?]ãfѨöho‚AÕá\8–ÿÄë—Ì€®ªVT­°Ç“;ŽÔ—-=%gZiØ™®Ñ‡ÿ8è\22I:. NX¤\16xãI\23÷‘\30VD^\21\16K’ÔÇŽC0$îÓW\19\26^ÝÝâà9íÍ—H¢•$¯/,0Ti\127Q\29Ã*âO\11\20âK,ÃÁuÈ\13!ˆ;\9î%\13´ä\8fs½­±Õk5ÛËV}ÛË\28áÕ¹] •©õ`ãîã&\22ó0BéD\ + B›¸\0076\24\15\"‰Xj›h/ú¿ZÈÓÉþŸì\127\24Þ†ª\14{L\27&O~Õx\7±R\11=\24\12\127L£µisŒ\9æŠÙ\12\31ó˜ÅÖ¢\29ù\25\14:bãÆQ°gv*—ãJàö·D@̼-·ÿ( \19'\127›ÅlÝ\1 ñZÇ7.YlÖ\1”ãÇÙ\127&C·ô³\26F‡ª\14“ª \0MrÒ°í^š7-ŒÊ¹Ý¹;½5dØQϤ‰¯iÈ‘û–¥1\8rð×pQĵݾ\\[&‚¦¸\"OšÊGd¡\21¹›!x¨*ÛØAíaÊy%Þ$ɨZ˜¾oFôŒÄj\28bÀ\31ü\22v\17•\15&\2@óõÌ‹•FZ\28rö½¨Íïb¨‚~,ZÇNÁ\13}è×/üq\28¨ˆ\15{0)9Rzâ4ŸêÙQQ|©°[>ññ\23/ˆgs\18¡Õëhæm3•s«Ã¬îo‡\9–/ql(ù_\18°·\15Wñ5‘$©\11r—@³§\5\28«Šÿ\28üçæþb\9 )¦¼>¾\\ªÿß\0ÕãG«÷.N­\14W~;Ÿ@˜…ü\3\17ª<ÖÏ\23\27Ã\26áݬKÑ%ˆ\1GCü1gQÜ%}µ‚ÌÆ]ZÓÎË÷_ÝÎ\11”Š©\25%0wÄ|éïÆþ«?-D\15êOhí\12³\3ÃókïxÈ\2èÒëéî^¿Þ·ûáv\ +Úõ¿Ú\0²O\0290Skx߬S›\18ñ15õ›é\26\2\\³\17œ‘\27áÑí–l±Ñ\9‰‚\18_!!ëƒÂµœ(¿B\0211aæ«%ZyöÑ61;Œe{ª\21Ü\15Æ^¸{\19v:Š·šöjå¶Ð!§dö}¬ï”îÜø–`b·&änË7`pö¬Il¹—A³Á»“öò>›?££‰{íõ\17\3»\14\7žî…‹\20õrÕŽšÖîÍãk\127ðä«–æ}xhñ\1²aÂýíò1Rop½W\11|txT¿Èßìx÷=ßýáçÒ÷GĈš8È^»_îÜ\31Ezçþ¸»\13(¾²7d£\2[iÜ»ÄÔÝ\23\16wI©RÈöÀ\18¹û\29\16…΢š Zë\4#þ¾özÙWÜJ\12uöý\3‡_¤“°ˆ¯ê\30ÚE\3\22–vläXxô‰ËxJÅÍÄŠ\\Q\12N\18ˆ¾\8õIÚ›¬½rÒ\ +Ô`ƒ‚!Ê\31ª”¨Fbcqÿµ*6V±–Qw_E†T\2ö‘BŒJ\16O„ûÃ\9v$ÿ½>¿£üZu /aÐ\5\\¹°ó\31\5žØºªô–zñŒ2¨\9da²áë\23\17Ä-dÞÞdß~÷UL Ô(}¸ß\17 wæ\30¦%{5)Ùc‡„»Gl“¸‚xÙK+$\17ÄHå7+\3Ÿ€‚Ñ÷8<ñÃ$VpÅÌnÜ£Uþ\127\9~) e\30uq#Ž”#ÆpˆH\28;‡V\30-Þ¹Ø9¨r\20\9yJ$ªå¼è‹·I ÈøÛ»¿Râþ+%îœo@Y\28Z\4ªÉÇÃ…MÕó\20˜\\\23Ï›ï˜k\9¨ó£­Þv6\127µ\2¨8øˆo\17©½'}\26Ï@\21Øþ¤\14š±´‚#¬v_'äÆéø¨ÈuãShÌé\\ÈnðÃ`;O_ÛO\11)_•ÈÉž‡á\25ìÇŽ°êx\30\26™\12\127Òÿ‹)g›È½³kq”®Â6¤‘\"\29<\12.ô;*<\27È`\29þ¬\16ç'«Å}%Zƒ¡Šj\15\" \12pp©V\31~éo?\26“\14_\26Àëä¿’\30õ™\18áÖð=\16LS\15Îv\20I],>°w°c6¥÷FiOULË\"Šî'›E;×Jö{zA=÷\19Z¸%\15‰0†›vÝ6iÚo_pÛo^¬\23‚äq¼@>DÛ%ì<9º\127ÔaŸNI?ÐëG\18\\·\127˜—\31?L²\127®¯$ùªE½Kc;\9 i¥\17\19\16dëÿÿÛ\8aW%ýÿ\27¾„ð\25­¨\127=_¾I6U\23„\20RË©M\26¹\3'6ÐpZ®1*¥¿Ý“üÊ\21\7¼’Ã\20È\17\2W@¤@¬ÅÓøj¿¸o,\18TTI\16ˆî‘â Ù&Ibè\18–¯\2}\"\21Jj\26SBLì\30\17¹¾big&’æ\18‡Åxñ\1\1¬[Së„X´\3\15`W}Üpï-aX¬U‹§e\2ž¼POØÚ\19\6—÷«{\127\28_<:Ukw\"j1O<ñN–\\7«6Ê\20vWþ«?b\18jÍ}ñ\31\19D\13\ +} 9RÙwb{É_â\6ý\9ã›»L‚)ØA›\31XÛ¦†G8\18æ@*~\26‹/\2¤\9«xPýö-\31/<Ä\23\19tâÍ\23‰ñdì\7î\3\1?„‹—¦Éú\8ŽŒÃ÷Çpݤ¡µSô\29`\\O\\\7Œªløñà@š²dµÄ;‚Ô\7ƒ¡45ÔÌŒ9Õ·äÀm&XYOþÞ—ÒßÎù.\14çex\13¢F°•ð*Ac\28\16EÍlLâPŸÏ\29\\!ñZ‡|ÂÈ_\30fL\13Ûtr0t¼Ü±ôD\0145ò\21\31ñ‹CûóË6\1Tw˜Ã\\ýê“%–_‰§‡ˆÅKÎiæ\6‹ÑÞ\127Ù\18Aý1sçTÐOñÑr:in¿Ý|žfÃà$…nôPaö›BûÐ’$uwzÉ¡aÁK¼$\13a‚J[ø½”q­i‰çÓUXàk¤Éo§+–Œ\20X™\20“~\14þòlâE\8æ'w,’7\4û„f–üÞ~òûh‰kç„jzaþÔ'ßàâ:?bhòq\0ß¿,\3\8êç)h^aJ&Ê\6C¨\20lŽØøº?^¢ŸjÅ›l¿\11d\\x·‘L¯†»~ÊY\127PÔ’/þQÝoýÿÖÌãaLŠèDºO-o‰+EŽS¯\19\9\23^öí©èâÙµŒj¾Ø\25½]™\26¼Me,°ÑÖüm¡&KÄô^ŽM¢Iⲇ\19²)“ø:™\22WH|Ñê ¿7ioEä5“´Ž’\26\18¯B%ž3IXBáÓÓ‚\12ÖïÃ\8\26PUãD\22j99HgLJ°oR…\7Ì0°j2éµ'46`k‘:¾±*ŸHŠø \1ó¾ÞµO~O\17¨T¾6ëXð‘Žï\6\9òÄj:ÑG\8Q\31H$„óúûÐÊŒcm#\22ã\19\17”Dľhòéï¥ÊKU9o¯âÝù­Ö_R–äwÙü$]—Ç«.‰—\21uE±ÕZõVŽ\23_X¤±\\ç=\22\6{±óHškµ\31\20íG´ÞŽª\4ì\4bÖ5\25²î€”C‡¥eº~_\8Á×ÅyV³'\2i²wH\28\4\21G.~–_ê_KΈY\23·8@ƒ×Ô«¥\6½;ûâô$ùÁÛó\23ÿTo\26\16§ž¾”tßkr7ÛRƒPfýUrÝâP\6åÃÂpAs˜/¦ë~Ð)·?z–|ï-ñ]ŸÔ*\ +Û§Cº'Ͳ[…ùƒEÉ×½’¯¦ê?\30ÌaE\19¥ò}„/>÷¦JðõÛ} \0193÷{÷÷Íý§\24E²…’îÎRñà×r•\29ËìíVvo\13Ÿçñ›g½\4Ùt\7±ÐcÿÁ¦jzcp–~\31\9Lƒ¦ø·çð7¦\7©eË4ií\19WTi\31?ó˜ü]Ôü(ï\31LÉòØ~å\1ëÑÜñ$`\30­žƒ\"^‹þú›\\ú(šý)æ<ú>F\30lTš}+ó‰9ÁRÓ\23t¬<Ö÷\0133Š€a\8\21‰id-¾ù$™ÙbËuS­Í|\13Å.åÙ=6\"!ã)¾½%艓kM\4:J!\ +\23A„†‹8÷†?¶7ÄF¨Ï=‹|î¦qþ¢ÃÔ®a²AœBê„t?Š—+’º>ó]‚/_ÄÉ Ì®„9±Î¾\1@f|<+û\19¸t\0062}ž\"\22¹Bœ–d^Þ˼¡Î_õ\3oe\11J?U­.²Â\ ++Wƒ~!Ú ±Ø^쟃Pvódù+cÞ¯‚¤ƒf\1j6ûÌ\11Á\ +™ÃâÿËh‡8U–8®9’y¥–Øìöflöƒ]Dœ9Ê̘õ«Z3«\30žË ç\15ñè'¾€‹yèV‘z—c#;ÊM°Àï3z*@ÂõŸ_|’\3C2ÌT\28#Ãì-ÎÌ3üW5M\25\1õ/ké\18;̘Eô \12ìr\127\8\29\3Qÿ\17IÅøÑÀŒ-\127¶-\127ö\29´Ì³\26_]Ö\18“yq6ó*O~²4ôù©\ +÷OjóôîTnÔc-Ýà—h3Wº\7;TméÞÊ[$Àd^Á’<*v•\23)ÿ‚æNì\30]äP\28L\127,ý¹âs“ôè‰\29g6Ö²ÅRýU[×út†\28´ÏæøYY°™™}§ G>›mbÙç8+\13ßk•÷+ª†õ\19¹Ù›GZ»;Â=ƒXÈ묋6\3Q–Pud¼`¾\25¶‘£ßmÈ~\127/KŠËÕ¶9¿$\29d¾†›1ƒQ“6¾ª…T\0\13lœ­æÍß_å•,1ñ¼Õù,'Õݤ\22õ8Â\20žð\17>w« vȲÏp³ßÓã?Yø@77hDy«$Ê\5@\6^•Œ_5ð´kV\7m±\13ŽY‹(;<ƒû$¨\127v¤ƒ”\4¿[󦃸S]s9¨*ZYæ$\\3\24Á¸w4vÃÄ\25ü¡-xwæ\8¸Ïœ¬Êƒ{Ë~h—¼¹Y·Ñ°ì†¹\4$6ÅnnxÏ\6óçs≙¿ul\22åÍW¼\20‡ïÚhÑË\28ýrqO)Ú]Z\ +p³\15ÃãÃ’/̱U[¿3›wÏŸ\29#2A6§…È\8I\3¨/\14+n›ä\25e\1‹Nþ€½pû£œT“ü4ßÈA~Õÿ/¸f浕ìy„C¤H;\4[M×|°‹•»|\\\4²¶ÿ~y\"³ƒ›óUsú±\23ï݉_CÃÃaÆqø›à™ýe8ø\1ƒ?B‚º\01482Ææ«7¢×ŽÕYÀ)\15ï )Ñ\ +õ\28\3\22ÂDø\16äO\9f>q›Úù‡\15\25sw@µÊ4[6ýA×—ò„\18¥Wz=ØMÇE‹®YTk›|\5\29n\31žW!\28ceou\9æÞ1Š$T¡3\1275ªËEÒËÕb\13øws·:c§Ï[,,£~A÷Y¢½¤\19Ëk;\9@\4(¶\28à\12€[8™ûÛ*³`–˜‹Ÿùå\29—[\3ÊpKâBª)Á3¢ð\\0¨{\26ªœÒ\5R\4ŽDpœ“CX¥J¥üòû\26Gþ}ú.×ßf?k§®a\1-ý“쇅ª°“¢\30ûC\14¾ÇíšÛh\29\20ÜÌË«Iéx±\14å|-F±ÆaM*|{ÔQ!myr×EeS4ß‹•Þ9ˆ,Î\17Žv²gý\30[n¨‰?%\0Tµ¥5k,TA\"N›_Û\19\5£8«YØS\11\13ê¼ú\\\28ž9¾:½›¡,à]ç`¡‡>._»HÛCe¿\\ç š‡>-ÈÙŒŠ¥S‡óEgž^¡OM,ªÌ­ž>Ÿ\7^ž²gsPEîhdn'\20;<Ï8]‚% 3ó\18‡Ã|µÃˆ*\\¼\15ãH^âOŸ¼pÕGK\20Þ•3\9U\29Dª\7\6¶ùfÛ-¿G·axsåA.Í£üÿ\27\11¹^UÈoÞø¬H¤õîy*/¿ë9GnøªE®ï¼åömQìÛšæ·'¢ŸsËß3úÖ÷Œ8úÝPûïºÿ—¿¹ß—¿×*ºò¥YyѬoì…¨ó÷@¿ù\8@æ\3´¹\23Mü\7ãü¯~¼\"ó\0E›\21ò¡U||¦Í\26ÝæGµúÉÖ~\127>Ë—V±?_G=ÈÓ\"ÆÙÂáÏ\12\30#\26Õ1ú£÷Šµˆ{\28\28Ú©CŽ1¦\22Ï(²:øÞ`\18Bú$$ÈÃ\23G\14^šš@\13™³Ú\30#&KŠtàÃ7\1ùÿùjØDñ“Ã$+ê•\3Û©\"\\>\6n\14lº\15[Ú¼É\1C›ãװ樶4\7GnrYÛ=8V‹d3p¿Eƒ®\5›Bo6\14…ºƒ0?ÈyŒßúÿý¢ÕßRB¾Ú£y|\29|ñïà¢C{H>9ÂÈÝ/aîë\28ìúÝ„ÌJ\31¤¬~©e\\|88*Á\7\3DaÕšÇ×Õ3â\24\7ï«÷\7\7%|ä>Ѧ¹ÍWaÔ̓\"‰\13\30¿\22(\7FÓF˜\17u\4&\\§_«?l!­lø$ïÁs³\0Îò(¡PBá‰iJÆ\14\11Ô)}çWO\20è=Ùƒ—M£ÝÌ\6p÷v’\15­ÃƒG7A~LMRÇ×w/Áì\30\29þº\11\14*Û\127Tà?uàSý·(\27>ßwðá>\1Þ½\18\30Ðîµ…€x)‡zò-\18…pQöXá\14\0242>Hªæ7–’¸½H\ +¬~õŸ»“Êd#4\0191?6#ŸZ\0290;eh{\0029ø\\„2æÀáX\17ÊŽõ$¥zqå¹#U”or\30|%¯?8N9\16ŒõW\12ì¨/ó\0316°9xË@\16³¹ƒÓ\19¤H)|\7ßÇ‹ÄÅ\18ãˆ\11\29ÞºÛx\19òø}ï\3uL4cH³·\2ì„\29õ¾Š\"sã—»˜ÄõÍ/ù5$Ñ< tÍ\"eòO\127‹gZd—ýðÙÉaó—#ží\9ä#EG¬ƒw`ñR¨‘i·{©`îµ4<»¬\31ï\"\11%Ë¿È–Þ;u\8ÖæýÁý\3 ¶[\7\6Í@1׃-4\5 †\27©2˜W\2­Ì\31εç#•G#~~¨o8\18Pñä-qéÈþ¢óÁwÒüüB\\•´uÔ\19\11¶ð\3ªÄ!áôÓ\8IÇT\27¹¼;€èÁž×\2‘€é\21”ª`„á®òM’\14\4¹žÙ\30ÈÇG6[âCv2‹y\7Òãh'´ŠH÷´çA\30\18­\14Ÿá#Ÿ‹9´ªK92[Þ‡—,ãßIÌ2\3\30Ä¡5hâÌýÑ\28Š\15»3ä\17¡CC*&\1270‹\4µ\2\28\16\25þ˜; «;“ƒ[\11Çñûé\24ýÉ<°Ú\28E\25Jo€Ã\12”ðÄ›íêÕ\18\30\29°Á¿~‰?›íR»ƒ\15a\28¥~{Q89®Ä:{žX•ñæ6«§‹¥\17ÅdÁwk\24ö‚­ðáOþRGÿ‰öW\13‡Â9ßQ¿\4,ä\7\1…ß­c¼égÞe\22è\30™¢l†Ž\6\"ÙQ¨!µß'?ø\30\14}+üÙ\6'q»3{˜ \14mð\21[P®\16\26\3Ó\16Ã9b´~ \29\28\13,†—UîÉE\3ù(ìô×ì»ãüLJz)éŦç!¡o6d6#\0\30—\31H<ü\16ä\0208â\23<_/x'žÊÑ\6 ‡‰å°T¨ô^-ëã;ø©c‰à\26q:pxŠ²§\127Ü\15§„(Åà\21ÊæÏÇ=›0lâ|ÜÏgÅÜ}Âsã¥w0\28²ç\11\8‡¦ +³Ä§Ãßîé\14öé\15KM ÎPÚîáoâñhú\3ØA~Òµû´¦zÔýcà!¾ÓN¸#×+^M¶MÑñ+ˆƒ\30\31ž2=>\24hþ{~ý»û/iŒãK.n•\ +ø‹‚í¿›KM‚pdP0\18'k„.\18ùËãB…x|«ª\7I\8-’aÊÈi·TNuS‡CÝ[ÚQ´„C´‡~;â5Ýü\0318\28-¶XÊøÎ_Å×ÞÊïÛp\24VdbbƒÄ×Ø6²°-¬Êc+¯Ø܈\24'þïŠ,¹]Ë~aÿ\16‘ÒŸË&\15\9nÁ.¬\0207Õä);²º¼üZ#\13ÅÆɸý\14¹1MÀ \12„µ‚Ê\23Õ\9„\22ˆ%µ\31y(|ÔOYN´I4Å{i/Ê:ÉÅË@ù}Ž“ ̘\ +öKGEÔäBÉ*Ø/\17v¡Ûb­Ç,!\14ï\29\26ku,õ\11Ê¥Ú0á{GçpïN*-§p.\26ñÐ2\15XFJù–â¬jýˆ ŠÄ¬ÒjôJÿœÌ«nÄ|p\23¾<`\ +U´‡Tÿ/^×ßø‰ž¢²=VlÉ\\8f)\15ì¾’Š”Œvòµz’Ç«Á‹šö\15jÈ'»\21™K|…\15\17\14 žà\9Ù\0121ŠYUêw\19Ô3Q\16_\24Ž ”^¡\1«1cº“o\4gòX–9Sì¤.ìnœŽC/LìF\22¾t›\0230ƒ7Å\31±1•´‘ˆj\12\19$:Ù¾²L%ºž…+ãÂ4\0û;Eô©ôþâãW™m\28_ZŒâ¹\21Ù+;ÎJhÝL#f?t ÄÆ\6ÏæR\18O$•¹¥¦³7“ËÌqU©§'ŠÌòS|`¢$̪zTRZMWV™\18F\21Þ‡Ñ\127«\5\18ï.<\13•_Èw\25í\15û¥hWªž’Yø¬ð£A¼éÆgrkšC\0120ØdVÑŽÚnä+b¸¯\26\26Â;h…ïKDƒM¬Ò2@ð~¶ñ&Ùɬ\7í\21áyš\29#ž¸¶lâÐìóŸý.SHÝŵÛÔĺÍëÐ&”þB\27\"”Ç|¨\18\27\0298³«{Õ¡½\\]4H»nàM\4®<(\17$n‹\20!NŸŠTžÂ'­\ +Ï<˹¨Ÿ{žj.\11»é…§\127‹mÄ\ +K\2µ^à\25¾a]l\1_–{P.«R\14¼ž©¿þºGY‘•ÕE«$Hv(KdϨð\ +:­•¦$\31–=)*\\\18k\11'MÅï¹\17\15[b&<†`ìcR\22ÚLuIUb“×+\ +OQ”È×fÈ„}–âO—ØB6¿·I‹mÄ$\27êϽŒ_Þ\5¥´¥Îì8`\24Vü$¾Ÿ;d\31å÷ÝC¬ÎK„#ÏÀ\22Þ©Z@ƒ\25\25OUA³\17~ÁgÃ{Gß\13©\1*“?>F9Èœ\20‘|ªwv\20GD}(ÿ?>,fl¶ÁC8e\16\"vʪ«;a8Z·ãp~ì%Mv\19Æ#¸@¦q\27ÏÃŽc¬ˆè6K£†ìê$ÓÓ€\9\26\9z\17\0126gîò7*t‰¿Ë#÷N“\17[\18\ +þQ%›¿J¥U|x*ðe½_\14NxäòÄ}qÊPº\23`‹4A¯;ÃË*)˜õ\0D\30'Ñ\0203ƒ§\2ç)݆ ZêýЩ\"œÉÂÁ \6]\ +æú\24Á;B“ï\ +ÇwSEÙˆ©\2REvölü\6;ÕÞ«\8AjË\16e\15ôs‹FWvßÖR\24ß)~1€¯ÙŠ\26¸]\2ô`×ë¢|î\22K\28ðÅÒÓq\11¢ìØÍ—\6ql¿Í%Û¤Iä—ÄJ—hÀ¨yb\127\23\127Ö\21ŸÈ%0¯\18*^¨%2ù«t¸*Dè³\4—†è<©ºß.©ÊŽ\13\27ùþêŒR$.Øâø!E\";Àk>×ÚJB¬—Çl)%HJ\15òN~°\21Y2Û§|À@‰2kWi¥\25\23\30K\21œ?½²Ì|‚@^K\"‡È_*•¹ŽWlÜWúìUÅ\23\19\28\23á\21¥™\26džiÀsg¬9ó\12¬\3™»ÞøªÐªL+w>\19Pr}c[.ŽÅ\9tm¬ò±U\7uå&ÙÏ\15¤–,é¥\17ÜiK¾hÅAv\7íí\14\23_ßRU\15(7\31.AR\9ñŠ\3NµÄ{þTá´\12š\25#¿¨EÕÙ\11ü²\14è–tÕ·qNu5ËppÕáÆKª\30°*é…]âàe#ä\27óF§#0ÎÕ‘W\24… 3ýŒ°\2ÛJ\22ßz.þ¤O©v‘Åv‘Åç1Å\6‘åôJy^\24Æ\11ùRQÁà±øƒkåüæë«_E:|±\14_.¯-Õô±t\22\0068Ë\9ø©ÚÖäK}š´`¢È¼ÄH‘p3ÚßG§Š\21{ …]«\23ñkE\3±-\"|‰Wž\"yš©bxH/j™tB®Ð•zøR\4\7¦úå\11\2¥5+ê.\20Þrý6\30[Bb²¦\23?òD¼\31\18±\25$¯­\22¦>º8\ +-\\ç â–‚ÙI(¾D]®oLB\ +\0074¥¿á4÷\3Áø¶0ÝÞJs[_èï'ѨÌí½·r›ðnfw¹Gá\"5½Ç#Á\27È´\7bà9¦±³%Ê`\6ª˜‹!ÿ[±\23ŸÓü¯¥óÊr–W¢èT<‚o-Ò€°M·¹m\3\23\5\16£ÿÏ>òƒK–PŽUR…\7\0051ó­Ù~Oș奄ÚÚ\5WJ¾‰ôWtRî\\D¼íÐÀÌ|Í>‡Ú¬ØÙr-{—9P+ážëú´–ô\127þæ=O\21ªï¿\8Ü\1.%oaM\14{ù\0212S¤Óâèï¿Š•-ÚQ\21Ê\6a] ä‹å6wø\17‚¨ û-hîé8\15ð\26\9\8ƒ\15fG\12õá%˜\29\17¸\18‰\0õQ\23F\2ÁW\8%¥z?ðÜòàã›xp\7Ý‚P‰>Ú—Hû!\22º›ÃøQ–‚Ú5‚å¤ÃhÛŠ$¡C\2ûŠ\22\1®è\19–4ùTI €\0w\24·ŽïZ\20ª^\11–\ +_‹Å¡\29\27’Àî\23*[\11\14LÉaŒMº5a¤RixÐ\0264ª„1Ï-µÍ¼ †Ný£óN_ïÈÔ„{énA$yh„\11„Gµh\"WsE\16\0276rrÇwšþ@Ž8<”¾Ç£\12\30Bš\3bGªÜCÍ\127¬?=\17u.v8Üš+pƒ™]ÔżñV\30+‚£›Ë¤ê\6Byø\9Â.\2hJ\16a«ÎÅJ3°;nð}@XVƒef‚\16“JÝ,š’ÏXZWá\19ï\17¦Ns'¼Sûã¨Â¸l\8\"÷´qò\127\25ü…áè\17QV?·$ùÌ\0039r\23\20‰\25\2=Ïÿ\0nå?ðy\4ž\127&Z©?Ô\23\5ï¿TO]8mLƒiÓ>J\6[t“s\127%g€2´—Ë&¹Y\5‚íë?ã\0059\13²ßœ\0276sØÝ?d¦±ŸŒÀ†éÐÙ¤S\127ϵåo¡;\127nák\0178 Ô>Øæ¯\28\0148¾ÿ2£^*éŲyYY@7ÿ\13•…ª+â¤oÂ`øÖ!\29^Ý̪yÍj[\7ǪÒj\15\ +/Þgë\15 \0272%eE\18m\17Z\4*ßr\23…¡\28ÿ„Ñ\4Ó\18È\22þCK„/-\17þ|Lá,ž\21\127jö_éÞjž\14Ìð\30\6†æín~³þßVO¥À–¹nÝiÁZÓøO\9\16\25á>qÁ<\16ž\24\30\12¼„\14}\15áãeg£\17áÃúéäDMÅÏÚka|\18\23˜3-\22Â\31`äk\3ÂÑ\1}2a}Ò\25àËJ·Nl<¼*…–Ž6B\22V¿d#xKkÍ£IÜ·N\19k¨EmRÀìkX«Ü˜>¢ä €>Ó¿Ö\28«\28\0232ó]K€+Ï\30\17•ú¢\21ˆÄ\6j5½V©ÒÎæ\16êû$ß\20Öyÿ„ÒWì@\6¼qª8u%ì\8\20•Õ››\25¿B}\13VÜm¢R\27—\25×à•oðÓËeWìà;S\4®&Ã6[iNh7úa3Ó)Ö\23ë·w_?©C*òª0‘=8\17M\16¡Ù\20aƒ£4xn+2\11\17Õ¯×5¬\9ñ'ØYÉÇv0\21[‘\27\21Â\5¶€PÑÀMQÂe­cã†JÐ\15\7\1Hy`7S-ØôÔ]uS&\21¹ä+Ó«â˜Á\0028͈\0043ó\28Ù\27 Ö¥\25ƒàx\23E\12\127\15#\20üj\26º‰öѽÆ/•ç‡h\11ÉÕ¡Á¦,ƒ…q8º,‡#oµ1\0180UIL¢p@DÖVTÖq¼·@Ì7+„\3\29Ŧ\19a‹/Jƒð\25tÌBÊP[PÒ\16é¤hæ7;Ì©8Ý\13\31¬åèãÜ(«\29v@T¢[ ‹Ð¦Â\31¡º™]HUdé\8uu„Wâó\12\3Cˆð\"‡j?2TÔt'Ž\23}EG\7¼ì\8` =\26WõÁÑŒ†¢uu\"'F\29®¡Î\14ô<Ö5\17g\1i÷Õ3q™á-üDœ5Zà]\3\18\18÷æ!!¤\30Ì\20ÒO§™‘~)/©\5\9%»JIkE\ +ùXà);$,…\5¿ é³p‰|»\25Ò¶jg²‰šàw3L…þú?kEÄe\2‚\25v\26íü]™Öp\ +„ý!\28­z÷Ðé-`C ½þð\2݆ƒÍ¥(@t](ˆ\24\4t\5é/˜àkÇyªÂj{\23ŠjU\22v‹2À\25\17\ +2Œm\4-Õl¼Å±Ê\12jÚ¡Î%ú¹,Öç2ì§j@b\13Hb>^ZØúˆÂ ôãjÏ\16vþä\27¼LƒR?ÿGħv:…µÀiX\4u\"F\17\\\3™\15¿ÂDT ¯jq|Ý\26”ìêŸè<ÕIsJ%ŠÊlX\31Z\29í\3h&è\8&¸ñ{ñXõ>GÔ\4iÖÂ@'äW•à•$úi.b£\9ž—ˆÑ\0142@‹Äk¸Åj\2/~•uÀn7\17‹K\31¾išÆjÃÉ+s¤\24®÷T©]­þ¾µÉÕ\9ˆÓ-´`§ÁB.n\8Ç}ÈOûX„9‰ú€,E³\127\11…¥O\27\20—\19jk¬\ +\1äééqk\2ˆ¼ž‘0¡5Bu8HΣYÄ(A\31…5·QXr\4KŽ\15ŽÆˆ\2«\24í\ +—h¯ž›±XÕ@ŠX}¼Ô–©{ÐhM)}x¢HT»ÆÓp\5\ +?Ö¯\19'\4•EøŠÔÑ\127mÙ‘'\13A¡n‘K\8Å~;Ÿ7æ\26šh%Ô*à­c(¢Ý7N˜\26Sð؈‰'Ê·‚ž\23µÝ\14Ý=-tÙ´,Ž½P oÈ\"šŸFJ#\8ºV”ÆF#6rD(+Ú€7-‚·ÚNtùpéA¯?IÃŽB¶N\4\9Ñ\28\25­u6b0»Æg!6\22>Œ¶lí˜Ltœ\31gÍœÃgïŽegøsvË×·Ôüv\21lÄÙܶÑf#¢\31(€\4ñ\0087ÑMž&Õ\0)¼¹q²ÏÓÓ\27ÃRÝÕ~î\0¢eF\11²\11\"$\31§\3Íá*ùTÁÚÏ¢¤6¾TÎkTÜÊn\5ƒ¨N4.-þ\25Ž|Áò*\30m\30‚,I^1\1Âëb\15Â`>R‘„f,U\127\15/º–DlOñÕ!®i§\16:Ó¼\23÷\0270\25/t\0166¦n@šþBCj§\19^AŸ™¤¼&‰¶\127­LÄ\23Üø8Èm#Ò\29É\2ù`U±¥o_äªaüMPaÌ¿GãF\19¹QŠ–Yƒã‚é0£\01996žØÈ!E\4\0294\2-Ì,0:\13八\27Ô1ÄúB¢Ï\13+K'£Îm¥\23Æ\20Q¸I‚'É­èAÃõ¤Æs+TUuøiùò3‘òçÇ’\0173%Í/z}\20øË\ +˜\127U»¹ÿ£ÚXJ$è]kò¦\2Úî\6Ïòù\29É[{2V<ˆ†’!Z¶¸•ÜqÃ\23ÉÐÌV \27ý:‚†²\8K\25\31Ù|Щ\20m{\1Çl\8q®È\21\9ÓD1+-XÕg³Qeå¿~\\i\29î\31\"l„\9\27<‰ˆDAä¡Ra(òWOíÚèÚ\8Ã\30Ì.l©~E‰¨¢Ÿ\29\22ð´4\24\13íÓÍ\14u\24Ø åš#!¢#ýMŽ±æ\21é\21¿ÔÉ“\\FVH6k\"÷d¤É‹W\0O\30ñí[ï!¢˜\19ødÄßÓáÇA‹aÜí\28¤S“lÒV{¯:]­Q\21å!ºRSK\20G´â\ +yØËWoÈksWtnîãú„{!\"(¨CRe­tíÊ\0185)\"ÌèèÞQ\17Ë\2¦èœfÈå“Ãgì\23;Öè\30\17“\18ì±¼\18‘OR?Z‹%’3\19½‚±IŽ\15›?\8É\31I³\17\25®ˆ°\6ó\5\\\22ÌŒÁ‡ã•ØÁlg\3jX•'+xí¸¦‹6L\25¹wFÑ×Äß?\23±´ûËáÊN8^ÿCCµfÕhT:Æ5\19ç\24\9Qn»z’ýœÙ¥:=íð‚¡ñÿ\3m¡\16\0317{Õ\21Î\31Íb¢xQHD…þ\25ª3µ÷k¶F\30´\13¹n‹¡UQ\16Æ\7AS\26‰=ãî@•=/0©%s'\8RìlÝd¢=7\"s™ãÈÖ\23™æXó\25¬3YgŠ\21\17ž„Ú%N7x@ïøé÷·yÓ’-³¦·H:A¸«õ_c©az¼¦êÀp™¸—SdQŸ\9Ô½ƒ\22æ&·ðd\1Ž¡ÄS\22lé8L~*`¦\15Í7ª\24²ŠæJ2YÓvu\ +Ù½‰ý­Î›\7t\\FúmFƒ\4nšà6P\18f×{C Ð\14•¯âŠ¼\9yLoÒu\\ê$ÔP§wÃ|BÆXdú¨ÔhlUN\31mƒ° Ã\21›ÐvzKUlXq\25Z¸\15Ð\3LF\31\30Nq¬Â*}lBÌqÀ¥äÖæñð5Ú!ëv#ÿøVÄ,‡=C\14ý…Ö?ºô£*~¬º,-Ò.\29‹rÁæLZÆ{ƒÁ\5 \14Ø´´\15{Ð`–\22Û\16Hh\30‹v 4>üƒ¤$Ö<9ÖjÎÖ$´„º,XLÉN ]!ù.&™i!U¦…´hÖ\1a\21M•wA>Mɼ\11É,5ä2p3¬À¿E#°8ƒ…\11U\18\2\20(Ä€¿Œÿ‚THZ¬Œ&-n\"fc\18š\27\4ŸÖì›\6\20`«Â€õ{›l¥&©R\13Šá\18ìeܸ\8\"ŸB\0ó‹ÛË´}\7ecÞl+k‘%ä·õ4˜ƒ8‰¬NH©ö~ãI \25³=\19\30\17¼>2¨-f[\5ïu®í­:l‡\29=ÁI¹üûº³]úÑÒªÉâ\2É–¥T\14\29¿û6\18ðZ|\17§ýá?k\0079Ó\4—\15ûðn\9uÔLý8Ow\24,S.\7\1Òä7|â¼½\11ìVõž¾Æ¨”Û‡\20‹·¢Ý¦Íå\12®€ºe疜îÿØ¡*«7•}³ä¢Ü\23¾–\20ap.¡\22\4»¼sÜ°h²\"¸7Qœ#°hþ×'•4dzè$W‘ä7kÅýwk3”¯ÎŠÇ-\\\20åj\9F\1©¹eÔ÷vyD‘_¶Žo%B.\11¨mSŽ\22sn¸4h27Õ\2:ìòèy—­°{\"{m½À>+\16ƒÅŠ~õ»‡üfO9\31(·8'òœ\30úóÐz='ë\14_ñ&v•s\18\5wšé\20¸\0\127IñQ^¢‹O\8\26òH¼‹P:±e¤\24ª!†èµ®OuÐi5\21§ÐpU\5|ðÔ©§êÃ=MðÇÿAÞOø%\20e#8:MT\26­à³_i1\11ø\\m\9þDçÍ\9ßà¹FØ3•Zé6øâÎm\29–\22‡n>a\23<ÑL}Z5Á\25y’^q¡NOk*èìjsR\28U\31<áŒ\22TÄé2\14ý•z\26˜ÔbÌwÕHøâ9\28·óÔGîCÎÒÅ[ÁJ|Aô¡)-w¶\5R§ŒH\1ö¥¹ß\ +TnyhØ\20MtŽ\26WÐm\22nÅêŠy0ãé±<š·\0&Ê\8ã\14Ú\14¨]+íC=PxN)Ï\17ÀéXžVcQz¹I9ê°*\22J*Œ[aê–jŽ»L–³*~œ)Z*¥ù½•_­\18\1!<¥ûUQ¿«ê]4¬EÃY@Ì\11oëå…a¥~cäM_ÞàÈå\13ëhiÐYTÌ\17Q<9¹tP†Xf\22d—*\31ß<\21Ñ\27e±í\0149h&-\";¶ÞŽöÞ‚~fAÚÈú,F\31Ëò‹¿‡Ç­X/\31ùùð/ßùò/ßùò/Ò†°!kˆ\26’† %h\9Z‚– %è\8:‚Ž #è\8z‚ž 'è\9z‚` \24\8\6‚`$\24\9F‚‘`tà{¾Ù軾ÝèûM„\19ÙD4‘L\4‰ \17$‚D\28ø~É÷K¾_òý’ï7\19Îd3ÑL2\19,\4\11ÁB°\16,\4+ÁJ°\18¬\4+ÁïßoÁcô˜=’n„\27ÙF´‘l\4;ÁN°\19ì\4;ÁAp\16\28\4\7ÁáBîûÓö§ëOÓéyÀ΀\1;\3v\6ì\12Ø\25°3`gÀ΀Áv\6Û\25lg°Áv\6Û\25lg°Áv\6Û\25°3`gÀ΀\1;Ãcgxì\12á±3vÆÇÎøØ\25\31;£íŒ¶3ÚÎh;£íŒØ\25±3bgÄΈ\17;#vFìŒØ\25±3bgÄΈ\17;#vFìŒØ\25±3bgÄΈ\17;#vFìŒØ\25±3bgÄΈ\17;£íŒ¶3ÚÎh;£íŒØ\25±3bgÄΈ\17;#vFìŒØ\25±3ÚÎh;£íŒ¶3ÚΈ\17;#vFìŒØ\25±3bgÄΈ\17;#vFìŒØ\25±3bg´ÑvFÛ\25mg´\17;#vFìŒØ\25±3bgÄΈ\17;#vFìŒØ\25±3bgÄÎø~úþ´ýéúÓtzž±3cgÆÎŒ\25;3vfìÌØ™±3cg¶ÙvfÛ™mg¶ÙvfÛ™mg¶ÙvfìÌØ™±3cgÆÎüØ™\31;ócg~ìÌÙvfÛ™mg¶ÙvfìÌØ™±3cgÆÎŒ\25;3vfìÌØ™±3cgÆÎŒ\25;3vfìÌØ™±3cgÆÎŒ\25;3vfìÌØ™±3cgÆÎŒÙvfÛ™mg¶ÙvfìÌØ™±3cgÆÎŒ\25;3vfìÌØ™mg¶ÙvfÛ™mgÆÎŒ\25;3vfìÌØ™±3cgÆÎŒ\25;3vfìÌØ™±3ÛÎl;³í̶3ÛÎŒ\25;3vfìÌØ™±3cgÆÎŒ\25;3vfìÌØ™±3cg~?}\127Úþtýi:=/ØY°³`gÁ΂\5;\11v\22ì,ØY°³ØÎb;‹í,¶³ØÎb;‹í,¶³ØÎb;\11v\22ì,ØY°³`gyì,å±³x§ñNðNðNáiè\25\0;\1;\13v\26ì\4ì\4ì4Øi°\19°\19°\19°Ó`§ÁNÀÎÝÑáè~}Äõ\17Ö\7ª\15P\0311}*G\2úˆ§úˆçÃn}´Y\31íÕGDZ\29=ô:t$\14N„ñ\17ÅgáÂU‡\16>\16|\4ðÙY=^zfJk¬=RžÓ\31Ÿg„‚ÅË\31ÇÖ#0¿78\26¿¼4xiô\18ÛÄ\26pú˜<¦g|–f_½ø²å¹Áê\17X}lÎwçû“?Ðôꢛ\23ݼÜÍËݼèæõtór7/ºyÑÍ˲]Èv!Ûåž^ôôrO/÷ô¢§\23=½ÜÓË=½èéµøÇV\6zz¹§\23=½vG‡£ûÕˆ®\17\\\3[\3Z#²Æ`\13\\°\26Q5@5bj„Ô@Ô\8¨§\1§\17M#˜\6–\6”F$ÍÂÅ«\14a4P4‚hvV\15VïW+‚V\4-\4-\4­\8Z\19´\16´\"hEÐBЊ \21A\11A+‚\22‚\22‚V\4­\8Z\8Z\8Z\17´\11\23¯:DÐBЊ ÝY=X½_\8:\17t\16t\16t\"èLÐAЉ \19A\7A'‚N\4\29\4\8:\8:\8:\17t\"è è èDÐ-\\¼ê\16A\7A'‚ngõ`õ~õ\"èEÐCÐCЋ 7A\15A/‚^\4½\8zýž÷*Þó;ÕS·WÝ^u{êöªÛ¿\23\29«\14•ìU²\127ï:\14\22î× jƒª\13T\27¨6¨Úàj\3Õ\6U\27Tmày\7=ï ç\29TrPÉ÷P,^›t$Ž^ƒ\30xÐ\3\15\11ɪCÕ\7U\31T}Põ꣪ª>R}¤ú¨ê£«T\31U}TõQÕG\21\29yΑç\28UnT¹‘r£ÊzÎQ•F*ª4ªÒ¨J£+±­#û:zcGïìÈÖŽÏÞŽÞÜ‘Ý\29ÙÞ‘ý\29Ù\\•£j¯ÞŽÞã‘M\30ÙåÑÛ<²Ï\ +\22®Z|ÕÊ\0\8;®h'Ú\29\29Žî×$¢I@\19<\0198“h&ÃL°LB™D2±õ“öyÒ6O\0Mâ™Pm\2g\18Í$˜\9–\9”I$ÓÂÅ«\14aLPL‚˜vV\15VïW\18A\18A‚ ADL H\"H\"H\"¨’ª'ª'UOTOTOªžT=Q=©zR#’\ +'\ +'\ +'\21N;?}°ª›ÓD7’»‘ÜD7ÒÓän$º‘èF¢\27ªÐÃ\1ˆ\27“hLêÍâÎ$:“èLrg’;“èL¢3‰Î$w&¹3‰Î$:“èLrgfÑÍ‚›a›A›E6\27l†k\22Ö,ªj†j\22Ô,¦\25¤YD3@3<³pfÑÌÀÌ°ÌB™\23.^uˆc\6c\22ż³z°z¿\22!,BX@X@X„°\24a\1a\17Â\"„E\4‹¾\5\22\21_øíX¨»¨î¢º\11u\23Õ]Ô”E%\23J.*¹è·cQµ…jïõçU­?_¯U%ß«JV\26¾˜žLO¦\31Í>žè'*F_pqÁÅT\127ñ׆‰6fm™Èšµc¢\29Z{&úû¿\14ÔÑ—ÇÊwÇ:z}Òlb=i’XI„8·²m«¶m]X_5Y™èA*\13\\°i¶±¦m\\w&‡&‡¯‚Õ¨¿\127õ\26¾^¿\127õ˜\26ôœŒ'Ù‡ìãËü|þð)OÈ蓆yãiË´õ´cÚyÚ3í=\29˜\14.72¥1Zž˜O^N^N^ž™ÏþÁ…éâéÊtõ”çeôÉÆ|ótgº{z0=ž«ý\12<Â&•6=ðÆón<î&•6«´¡Ò&•6©´!ó¦\7Øz~J2oâÞyƒ}“T›˜7ˆ7º²‰t[¸xÕ!À\13™7\17m;«\7«÷k\23Á.‚\29‚\29‚]\4»\9v\8v\17ì\"ØE°Kæ÷^é\127°]\24»0v¶o‡`\23Á.‚\29‚\29‚}ÑçªCÅw\21ߥõ®º;u\15Õ}\31õ«:jíÉÖ‡µ>Ðú@ëC ïÃ\29?ž†\31ôûp»\15uûh¸¨Õ‰š|ÐãC;tÐáC\13>\6bñ\29ð\29â;Ä÷>°÷H¾J=f®Zt¬Z ‘‡ûx¨\7]<´e\7=<Ôƒ\14\30F‚È/¼z±þ¡¯wÍ´ö”ça|NNŸœ>AæûóL-ÂýŸÌ·e¾\31™od¾-óÌ·e¾‘ùî\\\0231o4¾\7GhÌ;òó†¬\5\\¾'/àò­ßÜêyWVˆÑ·æ}ٯ˚bôm£o\27}?Fß\24}ÛhÞžýò¬)FßÇsµ\31„çÐ\127\15{ï‘-m«ç¿˜µÚø¿¡âýç¨ôÍXñÇg©hÈTñ5ËOŒZá·”Ñÿ¾U+og‡´s'uÛ¥â]f©j~‰[«ÙïUkÅ=ø¿åëh+¦ÿuè/æswý‰ñ¨Û\14‹+\13þo¦\26\0128û¢éõ^°|¨ø‚ÛŒ?VüŦÞàB›¾Ûv—›ý\11¬×-­0R ñ@ý·¾ò7ëUõ¾ý¦pð³ñ;¼Tí\127°½¾Ò·^\18__?A\15ùúŠ\30V\13)Tù\127þ\1.Ýý‡ã¹\17ß­£Þê·ÿ\3‘ìh\18", + ["length"]=7616, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=1475, diff --git a/tex/context/patterns/lang-hu.lua b/tex/context/patterns/lang-hu.lua index 06d5005ec..b712feb4a 100644 --- a/tex/context/patterns/lang-hu.lua +++ b/tex/context/patterns/lang-hu.lua @@ -58,7 +58,739 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnopqrstuvwxyzáäéíóöúüőű", - ["data"]=".a2 .adatát1a2 .ada2t1e2 .adás1s .adá2s3z .ad1yé. .ad1yi .ag1os .ag2ra .agyag1g .agy1á2ram .agy1árt .agy1e2 .agyo2 .agyon1 .agy1ó2 .agy1ű2 .akác1c .aká2cs .ak2h .ako2 .ak2t1ér. .ak2t1orr .ak2t1Å‘2 .aku1p2 .ala2g1 .alak1a2 .ala2k1ö2 .ala2k1Å‘2 .ala2pa .ala2p1á .al1eg .al1is .al2járn .alje2 .al2j1el .aljel1ö2 .alo2m1 .al1os .al2t1a2k .al2t1erj .al2tén .al2térn .al2tért .al2tin .am1ak .ango2l1ó2 .an1th .anti1s .apa1p2 .apá2ly1á2z .ara2ny1e2 .ara2sze .ar1á2c .ar2cal .arc3c .ar2c1e2 .ar2cél .ar2c3há .ar2c3hoz .ar2cin .ar2cio .ar2col .ar2cö .ar2c3s .ar1kh .at2h .az1a2 .az1ám .aze2 .az1ó2 .á2 .ács1ék .ág1árn .ág1árt .ág1ó2 .ágy1ala .ágy1asz .ágy1árt .ágy1á2z .ál1a2 .ál1é .ál1i2 .áligaz1 .ál2l1alj .ál2l1alt .ál2lin .ál1o2k .ál1ú .ár1aj .ár1aktá .ár1a2l .ára2m1e .ár1a2p .ára2sz .ár1aszó .ár1ác .ár1ál .ár1á2z .ár1d2 .áre2 .ár1em .áré2 .ár1ir .ár2nyel .ár1ol .ár1om .ár1os .árrés1s .ár1s2 .ár1t2r .ász1ál .ász1árb .ász1á2ré .ász1á2ri .ász1á2ro .át1a2d .át1a2k .át1alt .át1a2n .át1ar .át1a2s .át1av .át1á2 .át1e2 .át1é2 .át1i2 .át1ol .át1o2r .át1o2s .átó2 .át1óh .át1óv .át1ö2 .át1u .át1ü2 .át1ű .b2 .ba2b1a2rá .ba2bál .ba2b1e2 .ba2bol .ba2j1á2rat .ba2j1e .bak1aszt .ba2kál .ba2k1á2ro .baké2 .ba2k1ö2 .ba2kur .ba2l1e2g .ba1ts .ba2u .bá2l1ó2 .bár1a .bá1th .be2at. .be1d2 .bei2 .be1kr .be1str .be1szk .beté2t1e2l .be1tr .bér2c3sí .bé2r1o .bi2ke .bi2os .bi2ot .bita2 .bi2tag .bi2t1á2 .bi2tel .bi2t1er .bi2t1orr .bi2tur .bo2g1ó2 .bol2ta .bo2ly1ó2 .bo2nav .bo2raj .bo2ran .bo2ras .bo2rat .bo2rác .bo2rál .bo2r1odv .bor2sét .bort2 .bo2tá .bra2i .bu2sz1e .c2 .cen2t1á .cen2t1ó2 .ce2t1e2l .ce2t1ű .cé2l1e2 .ci2n1á2 .cito1 .cs2 .csa2k1 .csa2p1á2g .csa2t1é2 .cse2l1Å‘2r .d2 .dac1c .da2c3s .da2i .dal1an .da2lás .da2l1e2 .da2l1ék .da2lén .da2l1í2 .da3lol .da2l1ó2 .dan1n .da2u .den2g1 .dé2la .dére2 .dé2res .dé2sa .di2afo .di2aka .di2al .di2csá .di2ómá .dó2mor .dú2ra .e2 .eb1eg .eb1ir .eb1í .eb1u2 .eg2é .egres1s .egy1a2 .egy1á2 .egy1e2lÅ‘r .egy1ev .egy1in .egy1ö2 .egy1ü2l .el1a2k .el1an .el1ap .ela2s .el1ass .el1aszn .el1aszo .el1aszv .el1á2 .el1e2c .el1eger .ele2gyá .el1e2h .el1ejt .el1e2l .ele2ma .ele2má .ele2meg .ele2mel .el1emele .el1emels .el1emelt .el1e2més .el1e2n .el1e2p .el1e2r .el1e2se .el1e2sés .el1esh .el1e2si .el1esn .el1e2sÅ‘ .el1ess .el1este .el1estél .el1estü .el1e2sü .el1esv .el1e2sz .el1e2t .el1e2vet .el1e2vez .el1evi .elé2d .el1éde .el1é2gek .el1éh .el1ékez .el1é2le .elé2n .el1éne .el1é2p .el1é2r .el1é2tet. .el1é2v .el1id .el1ig .el1i2h .el1ik .el1i2mit .el1in .el1ir .el1i2s .eli2t1o .el1itta. .el1itták .el1izz .el1í .ellege2 .elo2 .el1okk .el1oko .el1or .el1ó2 .elö2 .el1ök .el1öle .el1ölé .el1ölh .el1öli .el1ölj. .el1ölje .el1öljé .el1öljü .el1ölne .el1ölné. .el1ölném .el1ölni .el1ölÅ‘ .el1ölt. .el1ölte .el1ölté .el1öltn .el1öltü .el1ölün .el1ölv .el1öv .előé2 .el1s2 .el1t2 .el1u .elü2 .el1ül. .el1ülh .el1üli .el1ülj .el1üln .el1ülte .el1ülv .el2v1el .el2v1ég. .es1er .ese2tel .es2t1a .es2t1á .es2t1é2ke. .es2t1é2kek .et2h .etilén1g2 .evés1s .ex1el .ez1á .ez1e2l .é2 .édes3s .ég1a2 .ég1eg .ég1e2rei .ége2s .ég1esz .ég1ér. .ég1é2ré .ég1érn .égés3s .égé2sz .ég1észb .ég1észe .ég1észé .ég1észn .ég1észr .ég1ö2 .ég1u2 .éh1év .éj1a2 .éj1á2 .éj1el .éj1u .ék1a2 .ék1á .ékes1s .ék1ir .ék1o .él1a2 .él2c1i .éli2k .él1ike .él2v1á2 .ép1es .ép2pa .ép2p1el .ép2pé .ép2po .ér1a2n .ér2c1e2l .ér2c1é2j .ér2cék .ér2c3sí .ér1e2l .ér1e2s .éré2sz .ér1észé .ér1é2t .érü2 .ér1ül .ér2vad .ér2val .ér2v1ég. .ész1a2l .ész1á .észe2 .ész1el .ész1em .ész1es .ész1é2k .észigaz1 .ész1o .ész1Å‘2 .ész2t1örv .észü2 .ész1ülÅ‘ .év1á2g .év1essz .év1é2g .év1é2k .f2 .fa2it. .fa2leg .fa2n1év .fa2r1ont .fas2 .fa1st .fat2 .fa1tr .fe2leme .fe2l1essz .fe2lev .fé2k1e2l .fé2m1a2 .fé2m1á2 .fil2méré .fin2ge .fogó2s3zá .fol2t1a2 .fö2lÅ‘ .fö2lül. .fölül1e2 .g2 .ga2zan .gát1al .gá2te .gá2z1ó2 .gázs2 .gá2zsu .gáz1z .gene2a .ge2od .ge2os .gesz2t1í .gé2d1 .gé2na .gé2ná .gén3n .gé2pe2lem .gé2p1i2p .giga1s .gonor1 .gonorr2 .gó2ce .gó2la .gó2lá .gó2lel .gó2l1e2s .góli2g .gó2l1iga .gó2lis .gÅ‘2z1á2 .gÅ‘2zen .gÅ‘2z1Å‘ .gÅ‘2zs .gu2i .h2 .ha2b1e2 .ha2b1ol .ha2bor .ha2b1Å‘ .ha2b1u .ha2dal .ha2d1e2 .ha2dz .ha2ik .ha2j1e2 .ha2jom .ha2lác .halá2l1ó2 .ha2lárv .ha2leg .ha2l1el .ha2lep .ha2let .ha2l1e2v .ha2lis .han2gad .han2g1e .ha2sor .has3sz .ha2tag .ha2t1at .ha2t1e2 .ha2told .há2m1a .há2ny1in .háro2m1e .há2t1alj .há2tus .há2zol .há2zó .he2i .hé2t1ez .hé2t1o .hit1a .hi2tis .hodás1 .hol1l .hol2t1e .ho2ne .hÅ‘2sá .i2 .ike2r1i .ikerü2 .ike2r1ülé .ikon1s .ima1s .im1p2la .in1aktí .in2gin .inte2r1a .io2n1a2 .io2n1á .io2ne .ion3n .ipa2re .izo2m1ért .í2 .íjá2t .íj1áto .ín1e .írá2s1ág .írá2s3z .ív1a2 .ív1á2r .ív1ell .íz1a .íz1in .j2 .jaké2 .ja2kér .ja2kov .járműká2 .já2szá .já2szó .je2gy1a2 .je2gy1á2 .je2l1a .je2leg .je2lev .job2b1ol .jó2dal .jó2s1e2 .jó2t1á .k2 .kale2i .ka2nar .ka2n1e .kapolc1 .ka2rala .ka2ras .ka2r1á2s .kar2c3se .kare2 .ka2rem .ka2rék .ka2ring .ka2rí .ka2ró2ra .ka2r1ó2rák .ka2r1ü .ka1th .ka1tz .kár1a2d .kás2 .kása3l .ke2csó .ke2l1á2 .ke2l1e2g .ke2l1e2ve .kel1ö .ker2ta .ké2nét .ké2p1és .kéta2 .ké2tab .ké2tad .ké2t1ag .ké2taka .ké2tal .ké2tan .ké2tap .ké2tas .ké2tat .ké2tau .ké2t1á2 .ké2t1ele .ké2t1ez .ké2t1o2 .kéze2 .ké2z1el .ké2zem .ké2zs .kéz1z .kiá2 .ki1g2 .ki1krá .kió2 .ki2ság .kiű2 .kla2uz .kle2i .kló2re .ko2rác .ko2rí .ko2sis .kó2d1a2 .kó2r1e .kó2r1é2s .kö2b1öl .kö2d1Å‘ .kö2zis .kö2z1Å‘ .köz1z .kr2 .kun1a .kvar3k. .l2 .lak1a2d .la2kal .la2k1an .la2kép .la2kor .la2kÅ‘ .lan2t1e .lan2t1ó2 .la2pal .la2p1ác .la2p1á2r .la2p1e2 .la2p1or .la2p1osz .la2pó .lá2b1e2 .lá2bil .lá2bor .lán2c3s .lá2nyan .lá2ny1e2 .lá2p1e .lá2p1il .lá2z1ó2 .lá2z3s .le3dé .le2g1 .le3g2ali .le3g2elés .le3g2esle2g1 .le3g2esle3g2esle2g1 .lege2t .le4géne .leg3g .le3gy .lei2d .leí2ro .leí2ród .leí2ru .le1kl .le1k2r .lemez1z .le1p2ré .le1s2m .le1t2r .leü2 .lé2c3s .lé2g1a2 .lé2g1á .lége2 .lé2g1el .lé2gér .lé2go .lé2gy1á2 .lé2p1a2 .lépü2 .lép1ülé .lé2t1a2 .lé2t1á2 .lét1elek .lé2t1e2lé .lé2t1érd .lé2tör .lisz2ta .lisz2t1á2 .lisz2ter .lito1s .lob1e .lobi2 .lo2bin .lo2mag .lo2mal .lo2m1á .lom2b1e2 .lo2me .lo2m1é2t .lon2csi .lófo2g1a2d .lÅ‘2cs1ö2v .lÅ‘rés3s .ly1o .m2 .ma2e .ma2gal .ma2gar .ma2gál .ma2g1e2 .mag1g .magó2 .ma2gór .ma2in. .ma2r1ác .ma2r1ing .masz2k1or .masz2k1ö .ma1th .ma2uz .má2r1is .me2g1e2 .me2g1é2 .meg1if .mego2 .me2g1Å‘2 .me2i .mel2lá .me2ny1u2 .me2zá .médi2af .mé2ná .mé2no .mé2sza .mé2szet .mé2szi .mé2szo .mé2zis .mi3rá .mo2lyan .műi2 .mű1kr .mű1s .n2 .na2gya .na2gyá .na2gye .na2gyú .na2pap .na2p1ar .na2pál .na2p1e .na2pés .na2p1o2r .na2pö .ne2ma .ne2meg .ne2m1el .ne2min .ne2ol .ne2szá .net2t1a2 .ne2um .né2gyá .né2pa .né2v1ál .né2vel .no2r1 .nya2k1á2ro .nya2k1e2 .nya2k1ö2 .nyol2c1an .o2 .oda1 .ok1a2d .ok1a2l .okka2 .ola2j1e2 .ola2sz1ó2 .olda2l1út .or2r1aj .or2r1alt .or2ran .or2r1e2 .orré2 .or2r1és .or2v1a2 .or2vá .or2v1é .orvi2 .or2vis .ot1tr .ó2 .ólo2m1a .óme3g2 .ón1a2 .ón1á2 .ón1e2 .ón1év .óvá2s1árt .ö2 .ön1d2 .ön1e2 .öni2 .ön1í .ön1k2 .ön3n .ön1o .ön1Å‘ .ön1s .ön1ü .össz1a .össz1á .öte2 .öt1eg .öt1el .öt1t2 .öv1e2g .öv1e2l .öv1Å‘2 .Å‘2 .Å‘a3l .Å‘i3r .Å‘r1ab .Å‘r1an .Å‘r1e2g .Å‘r1é2g .Å‘r1é2s .Å‘r1ist .Å‘r1o .Å‘r2s1ág .Å‘r1u2 .Å‘r1üg .Å‘s1a2 .Å‘s1á2 .Å‘s1eg .Å‘s1e2l .Å‘s1e2r .Å‘s1e2s .Å‘s1é2g .Å‘s1ért .Å‘s1í2 .Å‘s1o .Å‘s1ó2 .Å‘s1p .Å‘s1tr .Å‘s1u2 .Å‘s1ú .Å‘sz1a2 .Å‘sz1á .Å‘sz1e2g .Å‘sz1el .Å‘sz1em .Å‘s3zene .Å‘s3zené .Å‘z1a2 .Å‘z1eg .Å‘z1e2l .Å‘ze2t .Å‘z1ete .Å‘z1ék .Å‘z1ére .Å‘z1Å‘ .Å‘z3s .Å‘z1u .p2 .pa2da .pa2d1á2 .pa2din .pa2d1ó2 .pa2i .pa2par .pa2pál .pa2pe .para1f2r .parke2 .par2ker .par2ta .par2tel .par2ter .pá2c3s .pán2ta .pár2t1ö .pen3ny .pe2r1a2 .pe2r1á .pe2r1enc .pe2rok .pe2rül .pe2s .pia2c3s .pina1 .pin2t1ó .ple2i .ponta2 .pon2te .po2rad .po2ral .po2ran .po2rác .po2rál .po2re .po2r1us .pó2kis .pó2k1ö .pó2rás .pó2t1e .pó2t1é .pre1k2 .prés3s .proto1p2 .q2 .r2 .ra2b1as .ra2b1á .ra2be .rabic1 .ra2b1il .ra2b1i2z .ra2bí .ra2bor .ra2bö .ra2b1Å‘2 .ra2bú .ra2gal .ra2j1ö2 .rajtó2 .raj2t1ór .rá2cs1á2z .rá1dr .rá1fr .rá1gr .rái2 .rán2c1e .rány2 .rá1spr .rá1s2t .rát2 .rá1tk .rá1tr .re1k2re .ren2d1a2 .ren2d1Å‘2 .re1p2ri .rep2ro .re1prod .rete2k1 .ré2m1emb .ré2mu .ré3p2a .ré2sa .rés3szer .ré2sza .ré2szá2 .ré2sz1e2le .ré2szell .ré2szer .ré2szí .ré2szo .ré2szÅ‘ .ré2t1a2 .ré2t1á2 .ré2v1á .ré2zá .ré2zel .ré2zet .ré2zis .ré2z1o .réz1z .ri2zso .rizs3z .rí2má .ro1c2kos .romé2 .ro2m1ét .ro2min .ro2mis .ros2t1e2 .rug1g .ru2m1a .ru2mil .rú2de .s2 .sa2h1a .saj2te .sa2s1or .sa2vad .sa2v1ara .sa2v1ál .sa2vári .sa2v1e2 .sa2v1é2 .sá2r1ó .sá2rú .sás1s .sá2s3z .sá2v1a .sá2vá .sá2vó .sc2 .se2bal .se2b1á .se2bel .se2besz .se2b1o .sé2fi .s3gr .si2em .si2ók .sín3n .sí2p1Å‘2 .sí2r1a2 .sí2rát .sk2 .so2kél .so2kil .so2kis .so2kol .so2m1ag .so2mat .so1ny2 .so2ral .so2rál .sör1e2g .sp2 .spor2t1á2 .st2 .star2t1a2 .sz2 .szaba2d1e .sza2k1at .sza2k1ás .sza2k1e .sza2kö .sza2k1ü .szá2l1ó2 .száz1z .szeg1ér. .sze2gí .sze2i .sze2m1á .sze2m1ér. .sze2m1é2ri .sze2r1ág .szer2v1a2d .szer2v1e2v .sze2szá .sze2szeg .sze2sz1ó2 .szé2fa .szén1n .szé2t1 .széte2 .szín3n .szk2 .szo2l1ó .szókész1 .szó2szak .szószö2 .szó2sz1öv .szt2 .t2 .ta2gal .ta2g1a2r .ta2g1á .ta2g1e .ta2gép .tag1g .ta2g1i2n .ta2gö .ta2nag .ta2n1as .ta2nál .tan1d2 .ta2n1e2 .ta2n1év .tané2ve .ta2nis .tan1n .ta2n1ó2 .tant2 .ta2n1u2s .ta3rá .ta2t1ál .ta2t1árai .ta2t1e2 .ta2t1í .ta2tor .ta2tur .tá2l1ó2 .tán2cse .tá2p1ol .tár2sor .tár2s1ö2 .tár2t1es .tár2t1ölt .tá2v1a .távi2 .tá2v1ir .tbc1 .te2aka .te2ar .te2j1á2 .te2j1eg .terape2 .ter2v1a2 .ter2v1á2 .tes2ték .tes2t1öl .tetraé2 .teza2 .té3k .té2nyem .té2nyí .té2ra .té2rá .té2ret .tér1int .tí2z1ó2 .tí2zs .tízü2 .to2káro .tol2le .topa3u2 .to2r1á2l .to2r1odv .tor2z3se .tó1st .tó2t1ér. .tön2k1a .tÅ‘a2 .tÅ‘e2 .tÅ‘2gya .tÅ‘2r1éss .tÅ‘2r1é2sz .tra2u .turnus1s .tus3s .tu2sz .tus3zá .tú2r1att .tű2z1ért .tű2z1Å‘2r .tű2zse .tyú2ka .u2 .ugrás1s .un1in .uni2o .utas1s .utás4 .uto2 .utó2d1ö .ú2 .új1as .új1e .úr1a2 .úr1á2 .úr1e .úszós2 .úszó1sp .út1a2 .út1á2 .út1e2 .út1ol .út1Å‘ .út1ü2 .ü2 .ügy1ér. .ük1a2 .üstö2l .üs2t1ölÅ‘ .ütÅ‘kész1 .üve2g1e2l .ű2 .űr1a2 .űr1á2 .űr1e2 .űr1é2s .űr1éte .űri2 .űr1ita .űr1öss .űr1s .űrt2 .v2 .va2d1al .va2dár .va2dét .va2d1o2r .va2dóv .va2d1ö .va2d3z .va2gy1i .va2j1e .va2k1ak .va2kal .va2k1an .va2kap .va2k1ár .va2k1e2 .va2k1ö .va2rak .va2r1á2s .va2r1e2 .va2r1ing .va2sab .va2sar .va2s1a2tom .va2s1á2g .va2sárk .va2sás .va2s1e .va2sék .va2s1i2r .va2sol .va2s1or .va2só .vas3s .vas1tr .va2s1ü .va2s3z .vács1 .vá2dá .vá2d1e .vá2ma .vár1a2dá .vá2r1i2s .vá2r1ol .verés1s .ver2s1ég .ver2s1o .ver2sz .vé2g1é2k .vé2g1o .vé2nyel .vé2r1a2 .vé2rá .vé2r1eb .vé2r1eg .vé2rik .vé2r1o .vé2ró .vér2t1ék .vé2ru .vé2sza .vé2szá .vé2szer .vé2szí .vé2szo .vé2sz1ó2 .vi1g2n .ví2zed .ví2zem .vona2t1út .von2z1e2 .w2 .x2 .y2 .z2 .za2be .za2b1i2n .zai2 .za2j1e2 .za2j1ö .za2jut .zá2r1a2dá .zá2r1e .zá2r1ó2ra .zárta2n .zár2t1any .zár2t1é .zár2t1ö2v .ze2i .zé2t1 .zs2 .zű2r1Å‘ 2a. a1a aa2be aa2cé aa2ch aa2da aadás1s aa2dássz aa2dó aa2du aa2fo aa2ga aa2gi aa2gó aa2gy aa2já aa2ka aa2ká aa2ko aa2ku a2al. aa2la aala2g1 aa2lá aal1eg aa2lé aalmana2 aalmanac3 aa2lo aal1os aa2lu aa2ma aa2na aa2ne aa2ni aa2no a2ans aa2ny aa2pa aa2pá aa2po aa2pu aa2ra aa2rá aa2ré aa2ri a2arl aa2ro aa2sz aa2to aa2ty aa2ur aa2ut aa2va a2avo aa2zo a1á aá2bé aá2cs aá2fá aá2ga aá2gá aá2gé aá2gi aá2go aá2gu aá2gy aá2hí aá2ju a2ál. aá2la aá2lo aá2po aá2ra aá2rá aá2r1e2 aá2ré aá2ri aá2ro aá2ru aá2rú aá2sa aá2sá aá2so aá2só aá2ta aá2t1á2 aá2t1e2 aá2té aá2t1i2 aá2tí aá2to aá2t1ö aá2tu aá2tú aá2tü aá2zá aá2zó 2abab ab1adó aba2dz 1a2bajg ab1akk 2abal 2aban aba2nal aba1pr 2abar aba2rat a2b1a2ráb a2b1au 2abáb abá2b1u2r 2abád 2abán a2b1áp abá2rak ab1á2ron ab1á2rú 2abáz abb2a 1abbah 2abea abeá2 a2b1e2b 2abec ab1edé 2abeé 2abef 2abeh 2abei 2abej ab1ejt ab1ela ab1e2lá 2abele abe2lem 2abels ab1elsz a2b1elt ab1elv 2abem ab1emb a2b1erd 2abetá 2abete 2abeté 2abeto 2abetö 2abev a2b1ex 2abék 2abél 2abén a2b1é2ne a2b1ép 2abér a2b1érz 2abétá a2b1éve ab1fl ab1fr 2abic ab1i2do 2abie ab1i2ke ab1iks a2b1i2nai abi2náb a2b1ing 2abir ab1irkái ab1ism 2abit a2b1i2ta 2abiz a2b1íj ab1írn ab1kr 1ablaká 1ablakh 1ablakk 1ablakos 1ablakr 2ablo a1b2lú 2abog 2aboh 2abok 2abolt ab1oltó 2abom abo2rak abo2r1as abo2rin 2abot a2b1öb abö2l ab1ölÅ‘ ab1ölt a2b1ös a2b1öt a2b1ö2z ab1Å‘rl ab1pr ab2rek 2abri a1b2rike ab2rin a1b2rit 2abró ab2rók ab1st a2b1urd ab1úr. ab1úrt abú2s1á ab1úsz ab1ü2l ab1üs ab1üv a2b1üz aca2la aca2lá aca2l1e2 aca2lét a2c1a2n aca2tá a2c1ág a2c1ál acci2ó ac2cö ac3cse ac3csí ac3csü acc3sz a2ce. ac1ebé a2c1eg ace2l a2c1ele a2c1elé a2c1elh ac1elle a2c1elm a2c1elo a2c1elv ac1emb 2acen ace2ta a2c1e2v a2c1ex a2célá acél1e2 a2célé a2c1ép a2c1é2re a2c1érte ac1fr a2chá ac3héj ac3hí a2c3hoz ac3hó a2c3hö a2c3hú ac1ide a2c1i2gá a2c1i2gé a2c1ike 2acin. a2c1ind ac1inf a2c1ist a2c1ír ac1ív ack1aro ac2kál ac2k1e2v acké2r ac2k1ére ack1éte ac2kil ac2k1osz ac2kös ac2kű ac2lu a2c1op ac1ös ac1Å‘r ac1pl ac1pr 2acsal acs1alj. acsa2p1á2g a2cs1atk acs1áll a2csáru acse2c acs1egy ac3seml 1a2cséb a2cs1ék 2acsi 2acsí 2acson acs1orm a2cs1öc acs1s ac3str a2csúl a2cs1úr acsü2l a2csüt ac3szá ac3sze ac3szö ac1tr a2c1ud 2acula a2c1ut ac1új ac1üg ac1ür ac1üz ac3za ac3zá ac3ze ac3zs ac3zu a2d1a2da a2d1a2dá 1a2dag. ada2gá 1a2dagb a2dagg 1a2dagh 1adagi 1a2dagj 1a2dagn 1a2dago 1a2dagr 1a2dagt 1adagu 1a2dagú a2d1aktá ada2lap ada2l1es 1adalé ada2lén ada2l1osz ada2l1ó2 ada2l1út a2d1ana a2d1ann 1adapt 2adar ada2ral ada2re ada2r1és ada2r1in ada2rut ada2tab ada2tal ada2t1a2n ada2t1ár. ada2t1á2rak ada2t1á2ram ada2t1á2rat ada2t1á2rá ada2t1árb ada2t1árr ada2t1á2ru 1a2datáu ada2t1e2g ada2tel ada2t1es adaté2r ada2t1érd ada2t1érÅ‘ 1a2datin ada2tint ada2tis ada2tív a2datm ada2t1old ada2t1öl ada2t1Å‘ a2datv ad1azo a2d1ág adára2 adá2rak adá2ris ad2del ade2g ad1ege ad1egé ad1egy ade2i a2d1ej adel1ej ad1elh ad1elm ad1eln a2d1e2lÅ‘ ad1elr ad1elt a2d1emb ad1e2mé a2d1eml a2d1emu ad1eng ad1e2pe ad1epr a2derd ad1erÅ‘ ade2ti a2deu a2d1e2v a2d1ex adé2kat adé2k1e2 adé2kés adé2kis adé2kü adé2kű ad1éle 2adémont a2d1ép a2d1érz adé2s adé2te a2d1é2ve ad1fl adfo2k1út ad1gr 1adhé 2adia adia2n ad1i2bo 2adid adi2kut 2adin ad1i2na. ad1i2nai ad1ind a2d1ing adi2o1g2ra 2adip 2adis 2adiv a2d1í2z ad2ji adka2na ad1kr 2adob 2adoc a2d1okke 2adol ad1ola 1a2domá 2adomb 2adomi 1a2dop a2d1orc a2d1org 2adorh 2adorian 2adorig ad1orv a2d1orz a2d1osz a2dógu 1a2dóku a2dómé a2dórá 1a2dósat 2adósi. ad1ó2vás 1a2dózó a2d1ö2k a2d1ö2l ad1örd a2d1ös adÅ‘r1a adÅ‘2rel ad1Å‘2s ad1pl ad1pr a1d2rac ad2ram ad2raz 2adrá ad2rám 2adro ad2rog a1d2rót ad2ruk ad1sp ad1st 2adug 2adum 2adup ad1u2rá ad1úsz adú2t a2d1úto a2d1üg ad1ü2lé a2d1üt ad1üz a2d1űr ad1űz ad1yéh ad1yér ad3zab ad3zav ad3zár ad3zel ad3zón a2d3zö ad3zsí a1e ae2bé ae2ce ae2cs ae2dé ae2dz ae2ge ae2gé ae2gy ae2he ae2ke ae2ké ae2la ae2l1á2 ae2le ael1ej ae3len ael1érh ae2l1í2 ae2lo ae2l1ö2 ae2lÅ‘ ae2lu ae2me ae2mé ae2mi ae2mu ae2ne ae2pe ae2pé ae2pi ae2po ae2red ae2ré aero1s ae2ró ae2rÅ‘ ae2se aes1er ae2sé ae2si ae2sÅ‘ ae2sü ae2sz ae2ta ae2tá ae2te ae2ti ae2tű ae2va ae2ve ae2vé ae2vi ae2vo ae2vÅ‘ ae2xe ae2zü a1é aé2derv aé2ge aé2gé aé2gÅ‘ aé2he aé2je aé2ke aé2kí aé2le aé2lé aé2li aé2lÅ‘ aé2lü aé2lű aé2ne aé2pí aé2pü aérde2m1 aé2ri aé2te aé2va aé2ve aé2vé aé2vi aé2vü 2afa. 2afaj. 2afaja 2afajá 2afajb 2afa2j1e2 2afajj 2afajn 2afajo 2afajt. 2afaju 2afajz 2afak 2afal 2afam 2afao 2afar 2afas afe2l1e2m 2afék 2aféli 2afélÅ‘ 2afélt 2afén 2afér 2afés 1affé afi2ap afi2asz afi2ke afi2t1a2 afi2t1e2 af2le a1f2lo a1f2ló a1f2lö a1f2lu 2afoc 2afog 2afok 2afol 2afon 2aford 2aforg 2aformác 2aformál 2aformátu 2aformáz 2aformu 2aforr 2afos 2afot af2rak 2a1f2re af2riz af2rí 2afro af2ron 2a1f2rö af3tá afus3s a2g1abl ag1abr ag1a2cé ag1aga a2g1a2ka a2g1akk a2g1akt 2agal a2g1a2lak a2g1a2lap a2g1a2lá a2g1alj a2g1alm aga2lom a2g1alt ag1ang ag1a2no ag1ant a2gany a2gap ag1apa ag1apá a2g1arc a2g1aré a2g1a2ro a2g1art aga2tom ag1a2tó a2g1a2ur ag1aut a2g1ava 2agaz ag1a2zon agá2c a2g1ács. a2g1ácsi ag1á2ga 1a2gák a3gála a2g1álm agá2lyan a2g1áp a2gár. a2g1árad a2g1á2rai a2g1á2rak a2g1áras a2g1árat a2gá2rá a2gárb a2g1árc a2gáre a2gáré agá2rév a2gárf a2gárh a2gá2ri a2gárj a2gárk a2gárm a2gárn ag1árná a2gáro a2gárr a2gárs a2gárt a2g1á2ru a2g1á2rú ag1ásv a2g1átá a2g1á2t1e2 ag1átfe a2g1áth a2g1átk a2g1átm agá2tol a2g1áts ag1átü a2g1átv ag1bl ag2del agdí2j1a2da a2ge. ag1e2cs a2g1e2d a2g1e2g age2l a2g1ela ag1elb ag1eld ag1ele ag1elé ag1elf ag1elh ag1eli ag1elm ag1eln a2g1elo a2g1elÅ‘ ag1elr ag1els ag1elt ag1elű ag1elv ag1elz a2g1e2m ag1eng a2g1eny a2g1e2p a2g1erd age2red a2g1erk a2g1erÅ‘ age2s a2g1e2v a2g1ex a2g1ez a2g1é2j a2g1ék. a2g1ékn a2g1é2l agé2né agé2p1i2p a2g1é2pül a2g1ér. a2g1é2re a2g1é2ré a2g1érh a2g1é2ri a2g1érk ag1érl ag1érm ag1érte ag1érté ag1érth a2g1értÅ‘ ag1ész a2g1éte a2g1éve ag1fl ag1fr 1agg. ag2g1a2ty ag2g1em ag2git 1aggl 1aggod 1aggok ag3gyar ag3gye ag3gyi ag3gyo ag3gyü agi3a ag1ide a2g1i2ga a2g1ige a2g1igé ag1ike a2gill ag1inf a2g1ing a2g1int a2g1i2oni agi2ó a2g1ip ag1iro a2g1ist agi2sz a2gita ag1izé ag1izm a2g1íj ag1ín a2g1ír ag1ív a2g1íz agká2rok ag1kl ag1kr ag2ne. a1g2non a2g1ob ag1oltó ago2ly1a2 2agom 2agona agon3n ago2n1osz a2g1op a2g1org ag1orj a2g1orn a2g1orr a2g1ors a2g1orv a2g1otth agó2rái a2g1ö2l ag1önk ag1önt a2g1örö ag1örv a2g1ös a2g1öt a2g1ö2v a2g1ö2z a2g1Å‘r a2g1Å‘2s ag1pl ag1pr 2agrammj a1g2rav 2agrá a1g2róf ag1sk ag1sp ag1sr ag1st agsz2 ag1szt ag1tr a2g1und a2guram agu2rat ag1u2rá ag1urn ag1u2tá a2g1új ag1úta a2g1úti ag1útt a2g1ü2l a2g1ür a2g1üs a2g1üt a2g1üv a2g1üz ag1űr ag1űz a2gy1a2c a2gyad agy1a2dó agy1a2gya a2gyaj 1a2gyak. a2gyakb agya2la agy1alap agy1alg a2gyalj agy1alko agy1alm 1a2gyam agy1ana a2gy1any a2gy1apa agy1apja agy1apjá a2gy1a2pó a2gy1apr agya2sz a2gy1aszó a2gyál agy1árv a2gy1e2c agy1e2g a2gy1el agy1em agy1est agy1esz a2gyev a2gy1ez agy1ék. agy1éke agy1ékk agy1ér. a2gy1é2re agy1érn agy1érr agy1érs a2gyima agy2nyá agyo2r a2gy1oro a2gyorr a2gy1ö2l a2gy1Å‘2r agyu2r agy1urá 1a2gyú. 1a2gyúa a2gyún agy1úr. agy1ú2sz a2gyút 2agyű aha2l1e aha2sábr ahelyü2kü ahert2 aher1tze ahé2j1út ah1ips ahitköz1 ah1o2vi ahú2sár ahús3s a1i ai2bo 2ai2de ai2dom 2ai2dÅ‘ a2iék ai2ga ai2gá ai2ge ai2gé ai2gy ai2ia ai2ib ai2ih ai2ij ai2in ai2ir ai2it ai2je aika2 ai2kab ai2k1ad ai2k1al ai2k1ar ai2k1as ai2k1á ai2ke2 aik1el aiki2 ai2kik ai2kis ai2k1ol ai2k1osz ai2kÅ‘ ai2kü a2ilb a2ile 2aill ail2le. ail2lo ai2ma. 2ai2má 2aimi ai2mit 2aimp ai2na. ai2na2l ain1ala 2ai2nas ainá2 ai2n1álo ai2nár 2aind ai2ne ai2nél 2ainf 2aing ai2n1in ai2nol 2ainp 2ains 2aint ai2nü ai2onb ai2onn ai2ono ai2onr ai2ont ai2pa a2ir. ai2ram 2ai2rat 2ai2rá ai2re. ai2ré ai2ri 2airo ai2rod a2i2se ai2si 2aisk 2aism 2aist 2aisz ai2sza ai2szo 2ai2ta ai2vad ai2var ai2vás 2ai2vó ai2zé ai2zom a1í aí2gé aí2ja aí2já aí2ju aí2ra aí2rá aí2ro aí2ró aí2ru aí2té aí2vá aí2ve aí2vé aí2vi aí2vó aí2vü aí2vű aí2ze aí2zé aí2zü aí2zű a2j1a2dó a2j1adu aj1aga aj1agr aja2kol a2j1akó aja2kú a2j1ana a2j1ant a2j1any aj1apr a2j1áa ajá2c aj1á2go 1a2jánd 1ajánl a2j1áp 2ajár a2j1árb a2j1árc a2j1áre a2j1áré a2j1árr aj1ásá aj1ásv a2j1á2t1e2 ajá2z aj1ázá aj1bl aj1br aj2d1alm aj2d1a2lom. aj2d1a2lomm aj2d1a2lomn aj2d1a2lomr aj2d1a2lomt a2j1e2c a2j1ef a2j1egé a2j1ela aje2leme a2j1elf aj1ellá a2j1elo aj1elÅ‘ aj1elvá aj1elvo a2j1e2m aj1ene aj1enz a2j1e2r aj1e2se a2j1ex a2j1ez a2j1ége a2j1égé aj1égÅ‘ a2j1é2k a2j1él a2j1ép ajé2r a2j1ére a2j1éte aj1fl aj1fr aj1g2r a2j1i2d a2j1ij a2j1ik a2j1im a2j1int a2j1io a2j1ip a2j1iz aj1ír aj1íz aj1kl 1ajkú. ajobb1o ajo2g1á2sza a2j1ola aj1old a2j1o2v a2j1ócskás ajó2sár aj1öb a2j1ök a2j1ör a2j1öz aj1Å‘r aj1Å‘2s aj1pl aj1pr aj1sh aj1sk aj1sp aj2tág aj2teg aj2t1é2t ajt1org aju2hi a2j1új aj1úsz a2j1úto a2j1útr aj1üg aj1ül aj1üs aj1üz aj1űz ajz1aka ajz1atl aj2zer aj2zí aj2zü 2akab aka2cse a2kadag a2k1a2data a2k1a2datb a2k1a2datn a2k1a2dato a2k1a2datr a2k1a2datt 1akadál 1a2kadém a2k1adm a2k1aga 2akal ak1a2lag a2k1a2lak aka2lapú a2k1aleg ak1alko 2akam 2akan 2akao 2akap aka2pád ak1app ak1a2rai ak1arasz a2k1a2ráb a2k1a2rák 2akarc a2karch 2akard ak1a2rén 2akari 2akarr 2akasí 2akast aka2szaj ak1a2szat aka2szel aka2szö a2k1asztr 2akata aka2tab 2akate aka2tel aka2ter akati2 aka2tik aka2tim aka2tin 2akau a2k1autó 2akav 2akaz 1akác. a2k1áf a2k1ág aká2l1a aká2lis a2k1álló ak1árad a2k1árb a2k1árj a2kárk aká2rokn a2k1ártó a2k1á2ru aká2sad aká2saj aká2sal aká2sar aká2sav aká2sást akás3s ak1á2szán aká2szu a2k1ászun akát1a a2k1átla ak1bl akció2s1ű2 a2ke. ake2cs a2k1e2dz ak1e2ge ak1e2gé a2k1ela a2k1elá ake2lem ak1elh a2k1elj a2k1elo ake2lÅ‘k a2k1elr a2k1elv a2k1emb ak1e2mel a2kerd ak1e2rei a2k1e2ró a2k1e2rÅ‘ a2kesp a2k1est ak1eszk ak1eszm a2k1e2te ak1e2ti a2k1e2vez ak1é2kem ak1é2kes a2k1é2ké a2k1ékh ak1ékrÅ‘ 2akém 2akény 2akép. 2aképb 2aképe 2aképé 2aképh 2aképk 2aképl 2aképn 2aképpel 2aképpé 2aképr 2aképt 2aképü 2aképz a2k1érc 2akérd a2kérdek 2akére 2akéré a2k1érm 2akérÅ‘ a2k1é2rÅ‘i a2k1é2rÅ‘j a2k1érr a2kérteke a2k1értel a2k1értet a2k1értÅ‘ 2akérv a2k1érz 2akés a2k1észa a2k1é2te 2akéts a2k1étt 2akéz ak1fl ak1fr akgerinc1 ak1gr 2akia 2akib 2akic 2akid a2k1i2dé ak1idi 2akie2 2akié 2akif ak1iga a2k1igé 2akigy 2akih 2akií 2akij 2akil a2k1ill ak1ily 2akim a2k1i2má a2k1imi 2akin ak1ind a2k1ing a2k1ins a2k1ion 2akir aki2rom 2akis a2k1isi a2k1isk ak1ism a2k1ist 2akit 2akiv 2akiz a2k1izm a2kíté a2k1í2z ak2k1a2d ak2kaj ak2k1a2la akk1alk ak2k1arc ak2kál ak2k1á2p ak2k1ed akk1ell ak2kelm akk1elt ak2kem ak2k1eró akke2s akk1ese ak2ket ak2kola 1akkord akk1ölt ak2k1ös ak2kÅ‘r 1akku. 1akkum ak2lau ak2lav ak2lor ak2lón ak2lór 1aknai 1aknáb 1aknáh 1aknái 1aknáka 1aknáko 1aknás 1aknát. 1aknáv 1aknáz 2akoa ak1obj 2akoc 2akof 2akokt akolás3s a2k1olda a2k1o2ly 2akom 2akonc 2akond 2akonf 2akong 2akonk 2akons 2akont 2akonv 2akony 2akonz 2akoo 2akop a2k1o2pe ak1o2rat 2akorb 2akord a2k1org 2akori a2k1orj 2akorl 2akorm 2akoro 2akorp 2akorr 2akors 2akort 2akoru 2akorú a2k1orv 2akos 2akóni 2akó1p2 a2k1ó2rá ak1ó2ród 1a2kózá ak1össz akö2z1é2l aközrea3 ak1Å‘r. ak1Å‘s. ak1pl ak1pr ak1ps akrádi2 ak2rát 2akre ak2rea 2akré a1k2réme ak2réta ak2rétá 2akri a1k2rit 2akrí ak2ríz 1akrob 2akrónik ak1sp ak1sz2t ak2t1au aktár2s1a2d akt1emb 1akti. ak2tim 1aktívb aktí2ve ak2t1íve. 1aktívk 1aktívo 1aktívs 1aktívt akto2r1ál akt1osz ak1trá 1aktua 1aktú. a2k1udv a2k1ujj 2akun 1akupu a2k1úg a2k1új a2k1úrr a2k1üg a2k1ü2le a2k1ü2lé a2k1üln a2k1ü2t a2k1ü2v ak1ya a2l1abl alac1c alace2 alaci2 ala2cit ala2cor a2lacsé ala2c3sö ala2csü a2ladag a2l1a2datá ala2gál ala2g1e ala2gép ala2gol ala2gya ala2j1a2d ala2jas ala2j1e2 a2lakad ala2k1an a2l1a2kas ala2kál ala2k1áp ala2kes 1a2lakí a2l1akna. a2laknák a2laknát ala2kol a2l1a2kód ala2k1öl a2l1aktu 1alaku a2lakul. a2lakult al1alg a2l1alj a2l1alk al1all al1alm al1a2lo al1alt ala2n1e 1a2lanny a2lany ala2nyal ala2ny1e2 ala2p1a2d ala2pak ala2pal a2lapan ala2p1á2r ala2p1e2 ala2pill ala2pin ala2pir 1a2lapítv ala2p1ol ala2por ala2p1osz alap1ó2 alap1p al1a2ra al1a2rá al1arg ala1s2p a2l1aszp alaszta2l alat1any alatát1a2 ala2t1e2v ala2t1inf ala2tív ala2t1ol alató2 ala2tór a2l1attak 1a2latti ala2t1ü2 al1ava ala2zúr 1a2láa 1a2láá alába2d alá2b1ada al1ábrá 1a2lábúj alá2dal alá2d1ap 1a2láf a2l1ág. a2lága al1á2gá al1ágb a2l1ágg al1ágh al1á2gi al1ágk al1ágn al1á2go a2l1ágr al1ágt al1á2gú al1ágya 1a2láí 1a2lámo a2l1árad a2lárak a2lárá alá2rár a2l1árk aláste2 alás2tel alás2t1ér. a2l1átd a2láte a2láté al1átép a2l1átf alá2ti a2l1átl a2l1átm a2látö a2látr a2látú al1bl al1br 1album al1d2r a2le. 2alea 2aleb al1ebé 2alec al1ece ale2g1e2lé al1egés alegész1 al1egys a2legyü 2aleh 2aleí2 2alej ale2k1a a2l1ela a2l1elág a2l1eld al1e2led ale2lem. ale2lemb ale2leme ale2lemk ale2lemm ale2lemt a2l1elg a2l1elh al1elm a2l1eln ale2lÅ‘k ale2lÅ‘t a2l1elr a2l1els al1elté a2l1elv 2alem a2l1emb a2l1e2mel al1e2mé a2l1eml 2alen a2l1ene 2alep ale2pe. a2lerd a2l1e2re a2l1erk a2l1ern 2ales al1esem a2lesi ale2sik ale2tet alet2t1est aleü2 2alev ale2vol ale2vÅ‘i 2alex a2l1exp 2aléc 2alég al1égÅ‘ alé2kal alé2k1an alé2kat alé2k1em alé2ker alé2kes alé2kor a2lél. al1é2len a2l1é2let a2l1élé a2l1éll al1é2lÅ‘ a2l1élü 2alén a2léneke a2l1érd al1érs a2l1érte a2l1érté a2l1értÅ‘ a2l1érz 2alét alé2tek a2l1é2tel al1étl a2l1évi 1alfás 1algásat 1algor al1gr 2alic al1ide a2lidé al1i2do al1ifj a2l1igé 2alik al1ill 2alim a2l1i2má a2linas ali2nin alió2ra al1i2pa a2l1irá a2l1iro a2l1i2si a2lism ali1s2po al1iste 2alite al1izma al1íj a2l1í2v alja2iké 1aljaka 1aljakb 1aljaké 1aljakk 1aljako 1aljas 1aljái alj1árna al2j1á2ro 1aljb 1aljc 1aljd 1aljer 1aljf 1aljg 1aljh 1aljia al2jí 1aljj 1aljk 1aljl 1aljm 1aljn 1aljr 1aljs 1aljt 1aljú 1al2jü 1aljv 1aljz al2k1a2pó alkele2 1alkím 1alkoh 1alkotá 1alkotm 1alkotó al1kre al1kro 1alku. 1alkud 1alkun al2l1aj al2l1akt al2l1akv alla2l al2l1ala al2lalk al2lau all1áll all1ázs al2led all1egy all1emb 1allerg all1ese all1est all1e2vÅ‘ all1é2jé al2l1id al2lim all1int al2lip al2l1isk al2lí all1óri al2lös al2l1Å‘2 al2lü 1allű 1almád 2alob al1obe alogos1s alo2g1ó2 a2l1okta al2old. aloma2 alo2mad alo2mak alo2m1al alo2m1an alo2map alo2mar alo2mas alo2mác alo2már alo2m1át alo2mer alo2min alo2mis alo2mit alom1p2 alo2m1ú alo2n1á alon1d2 alon3n 2alop al1opc a2l1ope al1o2ra al1orc al1orn al1o2ro a2l1orr alos3s a2l1ostá a2l1oszl 2alov aló2c3se 3alóguses alóigaz1 alói2ko al1ó2lo al1ónn alóó2 aló2ráj aló2rák aló2zan aló2zis a2l1öb a2l1ö2l a2l1ön a2l1ör a2l1ös a2l1ö2z alpe2l alp1ele al2piz al1sh al1sk al1sl al1sm al1sp alsz2 al1szt al2tada al2t1alap alt1anya alt1elv alt1emb al2t1e2p al2t1e2v al2t1é2k alté2n alt1éne alt1érne al2t1é2rÅ‘ al2t1érr alt1értÅ‘ al2t1i2m alti2n alt1ina alti2p alt1ipa al2t1ir al2t1old 1altonk al2t1osz al2tóc al2tön al1trak al1tran al1trav al2tur al2t1út al2tür 1altvé al1ty alu1p 1a2luss alu1str a2l1új al1úr. al1úrb al1úrh al1úri al1úrk al1úrn al1úrr a2l1úté a2l1úth a2l1útj a2l1útn a2l1útt al1üg al1ül al1ün al1ür al1üs al1üt al1üv a2l1üz al1űr al1űz 1alveo 1alvó a2ly1ap a2lyar a2lyál a2ly1e2 a2lyév a2ly1id a2lyim a2lyis a2lyö a2lyug a2ly1ü2 am1abb am1abl 2amad a2m1a2dat am1a2dás a2m1adm a2m1a2dó a2m1a2du 2amag ama2gát 2amaj am1ajt ama2kar a2m1akt a2m1akv a2m1a2lak am1a2lap a2m1a2l1e ama2nya amaó2 2amap 2amas ama2sz1a2k ama2szeg ama2sz1em ama2szél ama2szö ama2tad amata2n ama2tárá ama2tel ama2told ama2t1osz ama2t1ó2 a2m1a2u a2mazo a2m1álm a2m1á2rai a2m1á2rak amá2rá amát1a2dó. a2m1átk a2m1átl a2m1átt am2b1ag am2bal am2b1at am2b1ászá am2b1e2g am2b1e2le am2bep am2b1e2te am2b1ér. 1ambula am1dr a2me. am1e2ce 2ameg am1elá am1e2lem a2m1e2l1i am1elj am1elk a2m1eln a2m1elo a2m1e2lÅ‘ a2m1els a2m1elt a2m1elv a2m1e2me a2m1eng 2amenn amens1s amen2t1á2ro a2m1erd a2m1e2rÅ‘ a2mesk 2amest a2m1e2v a2m1ex am1ezr amé2hes amé2k am1éké amé2let a2m1ép a2m1érd a2m1értek a2mértel a2m1étk a2m1évé 1amforá am1fr am1gr amid1i2 ami2g a2m1iga a2m1igá a2m1igé a2mimá am1imi am1imp ami2na. ami2nan ami2nin a2m1inté amió2 a2m1irá a2m1iro amise2 ami2sel ami2sep ami2sz 1amitbo 1a2mitha 1a2mitm 1a2mitö 1amitro 1amitrú 1a2mits 1a2mittá 1a2mittö a2míg am1kl am1kr amme2g am2m1egy am2meta am2m1é2t 2amod a2m1oke a2m1okm a2m1o2koz am1ola a2m1old a2m1oltá a2m1op a2m1ors 1a2mort a2m1orv 2amos amos3s 2amot 2amoz am1ó2ri am1ö2r am1ös am1öt am1ö2z am1Å‘2r am1Å‘s am1pla am1p2r 1amput am1sk am1sp am1sr am1st am1t2r 2amun a2muni amu2riz amu1sl a2m1u2tas a2m1új am1üg am1ü2l am1üt am1üz 2anad a2n1a2dat a2n1adu a2n1aga a2n1agá an1a2gya a2n1ah 2anai 1a2nakr ana2lap 1analí an1alk an1alm 1a2naló an1a2mo a2n1ana 1a2naná an1ann an1a2ny 2anap ana2pa2 an1apa. a2n1apá a2n1a2pó an1appo an1a2rab an1a2rá a2narc ana2sz1e2 ana2szén ana2szin ana2sz1í2 ana2szó ana2szü ana2t1e ana2tö 2anav a2n1a2va a2n1az a2n1ág a2n1álmi a2n1á2lom a2n1á2p a2násat 2anát a2n1áta a2n1átk a2n1átr an1br anca3u an2c1ál ance2 an2ce. an2c1ér. an2c3hit an2cho anci2al an2c1ó2 an2csaj an2csar ancs1ell ancs1emb an2cs1en ancs1e2p ancs1et an2cséré an2cs1ill an2csiz an2cs1í2z an2cs1or an2csö ancsÅ‘2 ancs1t an2csut an2cs1ü an2d1alk anda1s and1atl and1e2le and1elk an2d1elÅ‘ and1els an2derd an2d1es 1andez an2dél an2dil an2d1ö an2d1Å‘2 an2dús an2dün an2dű an1dy an2dzá a2n1eb an1edd an1e2gé 1a2nekd ane2la ane2l1á ane2l1e2l ane2l1emb ane2lél ane2lÅ‘ 2anem an1emu an1e2re an1ern an1err 2aneu 2anev a2n1ex ane2z an1ez. a2n1eze an1ezé an1ezt an1ezz a2n1é2ké a2n1éle an1épí a2n1éri a2n1érv a2n1étk a2n1étt a2n1évc an1évem an1éves an1évet ané2véb ané2vén ané2vét ané2vév an1évha ané2vig an1évke an1évkö a2n1évü a2névz anfé2l1é2v an1fl an1f2r anga2d an2g1ada an2g1ado an2g1ala an2g1a2ra an2g1ass ang1azo an2g1ác an2g1áll angár1a2d ang1áta an2g1átj an2g1átt an2g1ed an2g1eg an2g1elf an2g1elh an2g1elj an2g1ell an2g1eln an2g1elÅ‘ an2g1elt an2g1elv an2gem ang1emb ang1eng an2g1e2r ang1ese ang1éle ang1élv an2g1é2ne an2g1ér. ang1érÅ‘ an2g1és an2gim an2giz an2gí an2g1osz an2g1óri an2g1öl an2g1ös an2g1Å‘2 ang1sz an2gü an2gű angye2 angy1el an2gyék an1ide ani2g a2n1iga a2n1igé a2n1i2ko an1ind a2n1inf an1i2on 2anip a2n1i2pa an1i2rá a2n1iro a2n1isi a2n1isk a2n1ism a2nita an1itt a2n1íg a2n1íj 2anív a2n1íz ank1abl an2kaj an2k1a2k ank1ale an2k1a2n ank1arc ank1ari an2k1atl an2k1au an2kaz an2k1ál an2k1e2g an2k1ek an2k1e2l an2k1e2m an2k1e2reit an2k1erj an2k1es ank1érde ank1érem an2kérte an2k1érté an2k1i2d an2k1i2p an1klu an2k1old ank1oszt an2k1ö2römb an2kös an2k1ö2v an2kÅ‘r ank1t2 an2k1ü an2ne. an3ny. an3nye an3nyo a2n1oj a2n1okta a2n1old 1a2nomá 2anor a2n1o2ro a2n1orr a2n1ors 2anos a2n1ott a2nódd a2nódo anó1g2 a2n1óni a2n1óno a2n1ó2rá an1óri a2n1öl a2n1ön a2n1ör a2n1ös a2n1ö2t an1pl an1pr an2s1e2l an1ska an2sö an1s2p ans3sze an1sta an2szal an2sz1á2bó an2sz1á2h an2szár ansz1es an2szél an2sz1én an2sz1é2p an2szil an2szin an2szó ansz1t2 ansz1ü2l an2t1abl ant1aga an2t1eg 1anten an2t1e2se ant1esz anti1llát an2t1ing an1t2re a2n1ud a2n1ug a2n1uj 2anuk a2n1u2r anu2szi a2n1u2t a2n1úg an1ü2g anü2l a2n1ülÅ‘ an1ü2z an1űr an1űz any1a2dó anyag1ár. anyag1árr 1anyagb 1a2nyagé anyag1g 1anyagh 1anyagk 1anyagm 1anyagr 1anyagt 1anyagú a2nyakad a2ny1a2kas a2ny1alk a2ny1all a2ny1ass any1aszt a2ny1a2tom a2nyaz 1anyádt 1anyáék any1álo a2ny1árá a2ny1árb a2ny1árf any1árk a2ny1árn a2ny1á2ro a2ny1árr any1á2sz any1d 2anye a2ny1e2c a2ny1ed a2nyeg any1egé any1egy a2ny1e2k a2nyela anye2leme any1elev a2ny1ell a2ny1elo a2ny1em a2ny1en any1e2re. any1e2rei any1e2ret any1e2rén any1e2rér any1e2rét any1e2rév any1e2rÅ‘m any1e2rÅ‘r any1e2rÅ‘t a2ny1ég a2nyéhe a2ny1é2j a2ny1ék any1élv a2ny1ér. a2ny1érb a2ny1érc a2ny1érd a2ny1ére a2ny1érg a2ny1érh a2ny1é2ri a2ny1érk a2ny1érm a2ny1érn a2nyérÅ‘ a2ny1érp a2ny1érr a2ny1érs a2ny1érte a2nyérté a2ny1értÅ‘ a2ny1érv a2nyéve a2nyévé anyha2j1ón anyha2j1ó2r 2anyi anyigaz1 any1ing a2ny1io 2anyí 2anyo any1old a2ny1o2r any1órá any1ök any1ö2r any1öz a2ny1Å‘2 any1s anyt2 any1tr a2nyur 2anyú 2anyü any1ül a1o ao2áz ao2be ao2cs ao2da ao2dú ao2ka ao2ká ao2la aolaja2d aola2j1ada ao2mo ao2pá ao2pe ao2ra ao2ro ao2so ao2ut ao2ve ao2vi ao2xi a1ó aóá2r a2óbar a2óbá a2óbi a2óbo aó2ce aó2dá a2ódi a2ófá a2ófe a2ófo a2ófÅ‘ a2ófü a2óhé a2óhi a2óhü a2óil a2óis a2ói2v a2óká a2óke aó1kré a2óku aó2la a2óle a2ólé a2óli aó2lo aó2lu a2ómag a2ómar a2ómas a2óme a2ómi a2ómo a2óné a2ónö a2ónÅ‘ aó2rá a2óré aó2ri a2ósí a2ósű a2óta a2óte a2óté aó2vo aó2vó a1ö aö2bö aö2cs aö2dé aö2ko aö2kö aö2le aö2lé aö2lÅ‘ aö2ná aö2rö aö2ve aö2vi aö2vö aö2zö a1Å‘ aÅ‘2re aÅ‘2ré aÅ‘2ri aÅ‘2rö aÅ‘2rü aÅ‘2se aÅ‘2sö aÅ‘2sz apa2cs1a2v apa2cs1i a2p1a2da a2p1aga a2p1ajá a2p1akc 2apal apa2lag apa2lak a2p1alb a2p1alj a2p1alt apa2mas a2p1ant 2apap apa2pán ap1a2rán 1apasá a2p1asp apa2tad apa2t1ala apata2n apa2t1as apa2tál apa2t1ö ap1aut 2apav a2pava ap1a2zo a2pájú ap1állap a2p1állá a2p1állo apán1n a2p1á2rad ap1á2rai a2p1á2rak ap1á2ram ap1á2ras ap1á2rat apá2ráé apá2ráh apá2rán apá2rár apá2ráv apárba2 a2p1áre a2p1á2ré a2p1árf ap1árka ap1árko a2p1árny ap1ártó a2p1áru a2p1á2rú apáská2 apá2túr 1a2páu ap1bl ap1dr a2pe. ap1e2dé a2p1e2g ap1e2l1a ap1elb ap1e2lé a2p1elf ap1elg a2p1elh ap1elj ap1elk a2pell ap1elm ap1eln ap1elo ap1e2lÅ‘ ap1elr a2p1elt a2p1elv ape2m ap1emb ap1eme ap1ene ap1eni ap1e2ny ap1e2rÅ‘ ape2s ap1ese ap1esé ap1esÅ‘ a2p1e2v a2p1ex a2p1é2he a2p1éje a2p1é2le a2p1éll ap1élm a2p1é2ne ap1épü a2p1ér. ap1észl a2p1éte a2p1éve ap1fl ap1f2r ap1gr a2p1ide a2pidé apigaz1 ap1i2ko ap1ikr a2p1i2nár ap1ind ap1inj a2p1ins ap1i2rat a2p1i2rá a2p1irk a2p1ism a2p1íg a2p1ín a2píté ap1ív a2p1íz ap1kl ap1kr ap1kv ap2laz ap2léd apmeg1 apmű1 apműt2 a2p1ob ap1o2laj ap1oltár ap1opc ap1ope ap1opt apo2rad ap1orcá ap1orsó apor2t1Å‘2 apo2t1ál a2p1ov 1a2póká ap1ólo ap1óri a2p1öb ap1öl ap1ön ap1örv a2p1ös ap1öv a2p1ö2z ap1Å‘2r 1appará ap2p1árn ap1ph app1ing ap1pla ap1pri ap1pró ap1p2s ap1py ap2res ap2réd a1p2rém ap2rés. a1p2rím 2apro ap2roc a1p2rod 1apród 1aprózó ap2s1i2kon ap2síz aps1ork apsz1ert ap1szf apsz2t aptára2d aptá2r1ada ap1t2r apuá2 a2p1udv apu1p2 apus3s a2p1u2tas a2p1után a2putc a2p1új a2p1üd a2p1üg a2p1ü2l a2p1ün a2p1üt a2p1üv a2p1üz ap1űr ap1wh ara2b1ár ara2b1í2 ar1abla 2arad ar1a2data ar1a2dato ar1a2datr 2arag ara2g1e 2araj ar1ajkáró 2arak a2r1a2kol ara2kóh ara2kói ara2kós ara2kót a2r1akt. ar1alk a2r1alm a2r1ana 1a2rann arany1a2gá ara2nyal 1aranyb 1aranyh ara2nyí 1aranyk 1aranyn 1aranyr 1aranys ara2pa. ar1arc 1arasz. arasz2t1e ar1aszú ara2tal ara2tel ara2tin ara2t1Å‘2 aratű2 ara2tűr ar1aul aravasz1 1a2raw ara2zon 2arác a2r1ác. a2r1á2csi a2rácsom a2r1ág. a2r1á2ga. a2r1á2gai a2r1á2gak a2rágan a2r1á2gat ará2gáb ará2gáh ará2gán ará2gár ará2gát ará2gáv a2r1ágb a2r1áge a2r1á2gé a2r1ágf a2r1ágg a2r1ágh a2r1á2gi a2r1ágk a2r1ágl ará2gon a2r1ágr a2r1ágs a2r1ágt a2r1á2guk a2r1á2gun a2r1á2gú a2r1á2gy ar1álc a2r1állá a2r1állo 2arám ará2m1e2 ará2nye a2r1á2rak a2r1á2rá a2r1árk a2r1árr ar1á2ru ar1árú ar1árv ará2sze a2r1á2szoki ará2tal ará2t1ö aráz4s ará2zsál arázsi2 arázs3z ar2c1a2d arc1agy arc1ala arca2n arc1any ar2car ar2cat ar2ceg ar2c1es ar2cev ar2c1é2h arc1él. arc1élb arc1éll arc1élt ar2c1é2n ar2cés 1archí arc1ing ar2c1int ar2ciz arcolás1s ar2có ar2cÅ‘ arcs1ala ar2csál arc3sere ar2csip ar2c3sor ar2cü ar2cű ard1acé ar2d1alj ar2d1áll arde2l ard1ele ard1elÅ‘ ard1elt ar2d1e2m ar2d1é2l ar2d1ina ar2d1ing ar2dor ar2dö ar2d1ur ar2dü a2r1e2dz ar1egés a2r1e2gy are2i areil2 a2r1e2le ar1elh ar1elm ar1eln ar1elr ar1elt ar1elü ar1elű ar1elv a2r1emb ar1emel ar1e2més a2remu a2r1e2r ar1e2se ar1e2sÅ‘ a2r1eszek a2r1eszé a2r1eszü ar1etű ar1e2ve ar1e2vé a2rew aréé2 ar1ég. ar1é2ge aré2k1a2l aré2kek ar1él. ar1élt 2arém aré2nek aré1p a2r1épü a2r1é2ri a2r1érÅ‘ ar1észj aré1sz2tá aré1tra ar1f2r ar1gha 2ari. 2aria ar1i2de a2ridé a2r1i2ga a2rimád ar1i2mi a2r1i2na. a2r1i2naké a2r1i2nas a2r1i2nat a2r1i2náb a2r1i2náh a2r1i2nán a2r1i2nár a2r1i2nát a2r1i2náv a2r1ingé ari2nit arink2 a2r1inté ari2nü ar1i2ono a2r1i2ón 2arip a2r1isp a2r1iste ari1szf 2ariz ari2zom ar1í2té ar2j1áz ar2j1er arkas3s arká2p arká2s ar2k1eg ark1elá ar2kéj ar2képü ar2k1érd ar2k1in. ar2k1i2ont ar2kiz ar2k1orm ar2k1ovi ar2kud ar2k1u2s ar2les ar2m1a2gy arma2te arm1áll ar2me. ar2meo arme2s arm1ing ar2m1is ar2m1osz ar2m1ö ar2mü arnis3s aroka2 aro2kan aro2k1á2 aro2k1e aro2kin a2r1olda 1a2romát aro2mis a2r1opt ar1org ar1o2ro ar1ors a2r1ovi aró1p a2r1ó2rak a2r1ó2ráj a2r1ó2rám aró2s3zár aró2vár ar1ózd a2r1ö2b a2r1ök a2r1ö2l ar1öng a2r1ör a2r1ös a2r1ö2z ar1Å‘r ar1Å‘s ar1pl ar1pr ar1ry ar1ska ar1srá ar1sta ar1sto ar1str 2art. art1abl ar2t1a2lap arta2n1á2s art1aszt ar2t1a2u ar2t1álla ar2t1e2g art1e2lÅ‘ art1emb art1e2rei ar2tég ar2t1é2l ar2t1érp ar2t1i2na. ar2t1i2nát ar2t1i2náv art1orz ar2t1ö2v ar2t1ut artvis1s ar1ty2 a2r1uml a2runi aru2tas a2r1új a2r1úsz ar1útj ar1útr a2r1üt ar1üz ar1ű2z ar2vala arvas1s arv1ágy ar2v1árh ar2v1á2ri 2asabl as1abr a2sadag asag2 as1aga as1agg as1a2gy 2asaj a2s1a2kar 2asal as1alab a2s1a2lap as1alf a2s1alján a2s1alji a2s1alk as1alm a2s1anó a2s1ant a2s1a2ny 2asap as1apr 2asar as1aran a2s1a2rá asa2t1ó2 as1a2ty 2asav asa2vo a2s1á2c asá2g1ikr as1áll 2asám a2s1árnak a2s1á2ro a2s1árr a2s1árt a2s1á2ru asás1s as1ásvá as1áth 2asáto 2asáv as1bl as1d2r a2s1e2d as1ege a2s1e2gy ase2k as1eke as1eké as1ell a2s1emb a2s1e2n a2serd as1e2ré as1erÅ‘ a2s1es ase2t as1eti a2s1ez a2s1éhe a2s1éke a2s1é2l 2asém a2s1ép 2asér a2s1ér. as1érb as1érc a2s1érd asé1s2 as1étv as1é2ve as1fr as1gl as1gr as1ide as1ido as1i2ga as1i2gá as1inj a2s1i2o 2asiv as1izé as1i2zo a2s1íj 2asík 2asín 2asír asírta2 asír2tal a2s1í2v as1ízü 2aská as1kl asko2s1a2rá asko2s1á as2koz as1k2r as2luk as2már 2asodr a2s1of as1okke as1okl as1okos 2asoks as1okta asom1or aso2né 2asor a2s1ord a2s1orm as1osz a2s1ox asó1p2 as1ó2rá a2s1ö2k a2s1ö2l as1örd as1örv a2s1ös as1ö2ve as1Å‘r as1p2l as2pot asp2r a1s2pu as1s2p as3szabá as3szin asszí2ve assz1íve. assz1ívek assz1ívne 1asszoc 1asszony as3szü as3szű 2asta a1stand 2astá a1s2tád 2asti astil2 as2tim 2astí 2asto as2top 2astr as1trag as1trav a1st2ru 2astú a2s1ud 2asug a2s1uj 2asuly asu2r as1ura as1urn a2s1u2sz as1u2tak a2s1u2tas a2s1u2tá as1u2tu a2s1ú2s a2s1ú2t asút1a2 a2s1üd a2s1ü2lÅ‘ as1üst a2s1üz as1űz 2aszab a2sz1a2d 2aszak a2sz1akc asza2k1e a2sz1akt asza2k1ü a2sz1alk 1aszaló asz1asp asz1ass asza2t1a2 asza2t1e a2szath a2szati 2aszav a2sz1ág. asz1á2ga asz1á2gá asz1ágb asz1ágg asz1á2gi asz1ágk asz1á2go asz1ágr asz1ágt a2szálc a2szálm 2aszám aszá2rada a2száram asz1árnyé a2szárp a2száta a2száth asz1átl a2sz1á2to asz1áts 2aszed 2aszeg aszegész1 asz1eh 2aszek 2aszel asz1e2lem asz1elj 2aszemc 2aszeme 2aszemé 2aszemp a2szemu 2aszemü 2aszemű 2aszen a2sz1eng asz1e2pi 2aszer. 2aszere a2sz1erej 2aszeré 2aszerk 2aszern a2sz1ero a2szerÅ‘ 2aszerr 2aszers 2aszert 2aszerv 2aszerz asze2s a2sz1ese asz1est 2aszez 2aszék aszé2k1el asz1é2let asz1élés aszé2n1ég aszén1n a2sz1é2re asz1éré 2aszét asz2fér a2sz1ill 2aszin a2sz1inf asz1ing 2aszir a2sz1ism asz1ist 2asziv a2sz1iz 2aszí a2sz1ír asz1í2vi asz2karc asz2k1áp asz2kell asz2kes 2aszob 2aszoc 2aszof aszon1n aszo2n1o a2sz1orr asz1ors a2sz1osz 1aszóa asz1ó2dá 2aszót 2aszök asz1ölt a2sz1ön 2aször asz1öss a2szöt 2aszöv 2aszÅ‘ aszÅ‘lőé2 asz3su asz2tab asz2t1apo asz2tác asz2táll asz2t1emb asz2téne asz2t1és asz2t1ing asz2t1olt asz2t1oro asz2t1orr asz2tors asz2t1osz asz2töv asz2tÅ‘s asz2t1ül asztvíz1 2aszur 1aszús asz1útr 2aszü aszü2g asz1ügg a2sz1ü2z 2aszű asz2vit asz1z 2atab at1abla a2t1a2cé ata2dat at1a2dó 2atag a2t1agya a2taján 2atak a2t1akara ata2kár ata2kel ata2k1é2pes ata2k1ö2v a2t1aktu at1a2lag ata2lap. ata2lapb ata2lapj ata2lapo ata2lapp ata2lapr ata2lapú ata2lat a2t1aleg ata2lik a2t1alle a2t1almás 2atan a2t1anal ata2nó at1a2nyag at1a2nyás 2atap at1app ata2puk ata2pun 2atar a2t1a2rab ata2ran a2t1a2rén ata1st atau2r a2t1a2ut a2t1azo 2atáb at1ábr 2atág at1ág. a2t1á2ga at1ágr 2atáj 2atál a2t1állá a2t1álló a2t1álm 2atám atá2nal a2t1á2pol 2atár atá2rada atá2rado atá2ramh a2táramk atá2ramn atá2r1az atá2rét atá2ris at1árkár atársá2g atár2s1ágá at1ártás 2atás atá2s1á2g atá2sz atá2tal atátá2 atá2tár a2t1átb a2t1átf a2t1áth a2t1áti a2t1átj a2t1átk a2t1átl a2t1átr a2t1áts a2t1átt a2t1átu a2t1átv 2atáv at1bl at1br at1cl at1dr at1e2gé ate2jel ateké2r ate2kére ate2kó a2t1e2l1a a2t1elb at1elc a2t1eld at1e2led at1eleg at1e2lem at1e2l1en a2t1elf a2t1elh at1eljá at1elkö at1elkü a2t1elm at1eln a2t1e2lo at1e2lÅ‘n ate2lÅ‘t a2t1elr a2t1els at1elta at1eltá at1elté at1eltü a2t1elu a2t1elű a2t1elv a2t1elz a2t1emb at1e2mel a2t1emé a2t1eml a2t1emó a2t1enc a2t1ene at1epi at1epo a2t1erd at1e2rec ate2r1ék a2t1erÅ‘ a2t1e2se a2t1esé a2tesk ates2t1á ates2tÅ‘ at1eszm at1eszű ate2tol a2t1e2z 2atég até2get a2t1éhe até2k1a2l até2ke2 a2t1éke. aték1el 2atél a2t1é2le a2t1é2lé a2t1élm a2t1élv 2atém até2ne a2t1éps 2atér até2rá a2t1érdek a2t1érin a2t1érl a2t1érm a2t1értel at1érvek a2t1érz at1észa at1észl 2atét até2tét a2t1étv at1fj at1fl at1fr at1gl at1gr a1t2hus 2ati. a2t1iat a2t1i2de a2t1ido ati2g a2t1ige a2t1igé a2t1igy a2till at1ille at1i2má at2i3mádá at1i2mi a2t1imp 2atin a2t1ing. a2t1inga a2t1ingb a2t1inge atinó2 ati2n1ór at1inté at1into 2atip a2t1i2pa 2atir a2t1irá a2t1iro 2atis a2t1isk a2t1ism ati2sz a2t1iszo 2atit a2t1i2ta a2t1izé a2t1izg a2tizmo a2t1i2zo a2t1íg a2t1íj 2atíp 2atír a2t1í2t at1í2vek atív1e2l at1í2vet atí2vét a2t1íz 2atki 2at1kl 2atko 2atkö 2atku at1kv at1ojt atoka2 a2t1o2k1al ato2koss a2t1o2koz a2t1okta a2t1o2ku at1oldá a2t1oldó ato2m1á 1atombe ato2mer 1a2tomj a2toml 1a2tomok 1a2tomos 1a2toms 1atomú at1opc at1ope a2t1opt 2ator ato2rál a2t1ord a2t1org ato2rú a2t1orv a2t1orzá atos3s ato1sz2f a2t1oszl ató1p2 a2t1ó2rák ató2rán ató2riá ató1stá ató1sz ató2s3zár ató2s3zené ató1tr at1öko atö2l a2t1öle a2t1ölé a2t1ölÅ‘ at1öná atön2k1a at1ö2röm a2t1ös a2t1öt atö2vi. a2t1ö2z atÅ‘2ra at1Å‘rl a2t1Å‘2s atpen1 at1pl at1pr at1ps atrac1c a1t2rad 2atraj 2atrak at2ramb a1trap a1trau a1t2rav 2atré a1t2réf at2rén atré2szel a1t2ril at2roj a1t2róg 2atrón a1t2rü at2sán at1sh at1sk at1sl at1sp at1st at1s2v atszá2m1é atszáraz1 att1adó 1attakok 1attasé at2t1e2g at2tez att1ing attó2 at2t1ór at1t2re 2atud a2t1udv a2t1ug 2atul a2t1und a2tuni 2atur at1utá at1utó a2t1új 2atúl at1ú2sz a2t1üg 2atük at1ü2lé at1ült 2atün at1üst a2t1ü2v 2atüz at1üzem at1űri. at1űrl 2atűz a1u au2b1in au2bor a2udá au2de au2ga a2ug2h au2go 1a2ukc aul2l aul2t1a aul2ti a2umae a2umaf a2umak a2umam a2umar a2umav a2umáb a2umád a2umáé a2umáh a2umái a2umám a2umán a2umár a2umáv au1ph au2rad au2r1ikr au2rö au2sz1e a2ut. au2tad au2tal au2tam au2tas au2tat 2autá au2tál a2uti 1a2uton a2utó 1autób 1autóé 1autóh 1autói 1autóm 1autón 1autór 1autós 1autót 1autóv a2utr a2utt au2tu au2zí au2zs a2uzú au2z1ü a1ú aú2jí aú2jo aú2ré aú2r1i aú2sz aú2ti aú2to a1ü aü2dí aü2dü aü2ge aü2gy aü2le aü2lé aü2li aü2lö aü2lÅ‘ aü2lü aü2nÅ‘ aü2re aü2rí aü2rö aü2rü aü2sz aü2te aü2té aü2ti aü2ve aü2vö aü2zem. aü2zemb aü2zemen aü2zemet aü2zemé aü2zemh aü2zemm aü2zemn aü2zemr aü2zen aü2zé a1ű aű2ri aű2rö aű2ze aű2zé aű2zi aű2zö aű2zÅ‘ 2avad 2avak av1akti a2v1anh 1a2vant a2v1a2nya a2vanz ava2rac ava2r1ag ava2r1e2 ava2rék ava2sze ava2szü 1a2vatá 1a2vató 2avád avá2nan 2avár avá2r1al avá2ria. avá2riai a2v1á2riá a2v1áta a2v1átt avá2zal av1bl av1e2le av1elv 2aves av1est 2avet 2avez avi2cse av1ing av1kr a2v1old av1oltó avo1s a2v1ox a2v1öm a2v1ös av1Å‘s av1Å‘z av1pr av1sp av1st a2v1ut av1ü2l av1ür av1ü2z a2wag aw2hi awhisky2 awhiskyk2 a2x1ab a2x1ad a2x1ak a2x1al a2x1an a2x1av ax1bl ax1eg ax1el ax1inf ax1ing ax1int axió2r axi2se ax1ír ax1ös ax1öz ax1pr a2x1ut ax1új ax1üz ax1űr a1ye. a1yed a1yei a1yek ay1e2l a1yen a1yes ayet2 ay1fl a1yi. ay1il ay1ing a1yit ayma2 ay1s2t aza2ch aza2cik azai2ko azal2t1a aza1p2 aza1s2 az1áll az1ált azá2nö azá2r1ó2ra azá2s1e azási2k azá2siko azás3s az2du a2z1e2g az1e2le az1elj az1elm az1elÅ‘ a2z1em a2z1ex a2z1ég azé2k1e2 azé2kol a2z1érté a2z1ing a2z1i2o az1irá az1irt azma1g a2z1ob 2azol azo2nal azo2n1á azont2 a2z1op a2z1or a2z1osz azót2 azó1tr a1z2rí az4sé a2z3sike az3sor az3sp a2z3sü az3sz az1ut a2z1új azú2r1é az1üz 2á. á1a áa2da áa2dá áadás1s áa2do áa2dó áa2du áa2já áa2ka áa2la áa2lu áa2ra áa2sz áa2ut áa2va á1á áá2cs áá2ga áá2gy áá2ju áá2mu áá2ra áá2ru áá2sá áá2sí áá2so áá2só áá2su áá2zo áá2zu á2b1adu áb1akc á2b1a2la á2b1alk á2b1ambu á2b1a2ny ába1p áb1art ába3se á2b1á2g áb1áll áb1álm á2b1áp á2b1árá á2b1árn á2b1á2ru á2b1átj á2b1átl á2b1átm á2b1átv á2b1áz áb2b1a2d ábba2l áb2b1ala áb2b1and áb2b1ár áb2ben áb2b1e2r áb2b1é2l áb2bid áb2bim áb2b1i2s áb2b1ita ább1o2so ább1oss áb2bör áb2b1ül áb2bű áb1dr á2b1ed á2b1e2g ábe2l1a ábe2l1á ábe2l1eg ábe2l1el ábe2l1e2r ábe2lég ábe2l1in á2belnö á2b1e2m á2b1e2n áb1e2ro áb1erÅ‘ 1á2béc á2b1é2g áb1é2ke á2b1éks á2b1é2les á2b1élt á2b1ép á2b1ér. á2b1é2ri á2b1érs á2b1érte á2b1érté áb1fr á2b1i2d ábi2g áb1iga áb1igé á2b1ikr áb1illa á2b1im ábi2nai á2binas á2b1ind á2b1ing á2b1int á2b1is áb1izm áb1izz á2b1ív áb1kl áb1kr ábla1kr ábla1p á2b1olda á2b1op ábo2raj ábo2ran ábo2rál ábo2ris á2b1osz á2b1o2v áb1öb áb1ö2d áb1öl áb1ön áb1ö2r áb1ös áb1öv áb1Å‘r áb1pr 1ábrái áb2rek áb1sp áb1st áb1sz2 áb1tr á2b1ujjal ábu1sz2 áb1úr. áb1üg áb1ü2l áb1üs áb1üt áb1üv á2c1a2g ác1ajt áca1k2l ác1akn á2c1a2la ác1alm á2c1a2ny á2c1ág á2c1ál ác3cse ác3cso ác1e2l1 ácele2 ác1en ác1er á2c1ép á2c1ére á2c3ha á2c3há ác3he á2c3hé ác3hon á2c3hu áci2as á2c1il ác1inc ác1ing ácin2til áci2ósű á2c1i2s ác1ív á2c1or á2c1osz ác1ór ác1ö2l ács1abl á2cs1a2g á2cs1ajt á2csaka á2cs1akn ács1alap ács1alj ács1alom ács1app ács1atk á2csatom á2cs1á2c á2cs1ál ác3sárg á2cs1árk á2cs1árn á2csáru á2cs1á2ta ácsboz1 á2csef á2cs1e2g á2cs1e2l á2csene á2csent á2cser ácse2t á2cs1ev ács1é2g ácsé2k ács1éki ács1ékk ács1ékn á2csél á2csép ács1é2te ácsi2g á2cs1iga á2cs1ille á2csimi ács1int ácsi2p á2cs1ipa ács1isk ácsí2 ács1ír á2csokta 1á2csolat 1á2csomk ács1orr á2csosz á2cs1ov ác3só. ács1ó2r ác3sót ác3sóv ács1ö2k á2cs1ö2l á2cs1ös á2csöt ács1Å‘2s ács1s á2csuj á2csut á2cs1úr ács1ü2l á2cs1ü2t á1csy ács3zen á2c1ut ác1úr ác1ül ác1ür ád1abl á2d1a2do á2d1akc ád1a2lap á2d1alb á2d1alj á2d1alk ád1a2nya ád1a2nyá áda1p á2d1a2pá ád1arc á2d1aty á2d1ág á2d1ál ád1á2sz ád1átl ád1dr ád3dz ád1e2c á2d1e2g á2d1ej á2d1e2l ádele2 ádel1ej ádel1eme á2d1e2m á2d1ep áde2r1á ád1erÅ‘ ád1e2ti ádéd2 ádé1dr ádé1g2 á2d1é2ge ádé1kré ád1éne á2d1érte á2d1érz ádé1st ádi2cse á2d1i2d á2d1i2ga ád1i2ko ád1ill ád1imi ád1ind á2d1int ádi2ódar ádióé2r ádi2óéra ádi2ói2v ádi2óko ádi2ó1k2ra á2d1i2p ád1ist ád1ív ád1kl ád1kr ádo2ga ádo2gá ádo2ge ádo2rak á2d1osz ádós2 ádö2b á2d1ös ád1öv ád1ö2z ád1Å‘2r ád1pr ád1ps á1d2rót ád1st ád1tr ád1udv á2d1ú2s á2d1üg ád1ü2lé á2d1üz ád3za á2d3zá á2d3ze ád3zú á1e áe2cs áe2dz áe2ge áe2gé áe2gy áe2le áe2lÅ‘ áe2me áe2re áe2rÅ‘ áe2se áe2sé áe2si áe2sÅ‘ áe2sü áe2te áe2ve áe2vé áe2vÅ‘ á1é áé2de áé2ge áé2gé áé2gÅ‘ áé2he áé2ke áé2le áé2ne áé2pí áé2pü áé2re áé2ré áé2ri áé2rÅ‘ áé2rü áé2te á2f1a2gya á2f1alg á2fáb á2f1ág 1á2fák á2f1áp 1á2fás á2fát áf1dr áf1elm á2f1e2m á2f1e2t áf1é2ne áfi2ad áfi2am á2f1i2d á2f1im á2f1ing. á2f1ordá á2f1osz áf2rec á1f2rö áfus3s á2f1ün á2g1abl ág1abr ága2c ág1acé 1ágacska á2gad ág1a2dá 1ágadd ág1a2dó á2g1a2j á2gaka ág1a2kar ág1a2kas 1á2gakb 1á2gakh 1á2gaki 1á2gakk 1á2gakn 1á2gakr á2gakt á2g1a2la á2g1alj á2g1all ág1alt 1á2gam ága2n ág1ana á2g1ang 1á2gank ág1ant á2g1any á2g1a2p á2g1a2r ág1asp á2g1asz ága2tol á2g1au 1á2gaza ága2zon á2g1á2g á2g1áld ág1álm ág1á2mu ágá2nyal á2g1áp á2g1árad á2g1árb á2g1á2ré á2g1árh á2g1á2ria ágá2rokb á2g1árv á2g1ásó ágás3s ágá2sz á2g1át1a2 á2g1átá á2g1áté á2g1átf á2g1áth á2g1áti á2g1átl á2g1átm á2g1átn ágá2tokk á2g1átr á2g1áts á2g1átt ág1átü á2g1átv ág1bl ág1br ág1d2r á2g1e2c á2g1e2d ág1ef á2g1e2g á2g1e2l á2g1e2m á2ge2n ág1enc ág1ene á2g1e2p áge2ra áge2rá áge2r1el á2g1e2s á2g1e2t á2g1e2v á2g1ex á2g1é2g á2g1é2he á2g1éj á2g1é2k á2g1é2l á2g1é2ne á2g1épí ág1é2pü á2g1ér. á2g1érá á2g1é2ré á2g1éri ág1érm á2g1é2rÅ‘ ág1érte ág1érté á2g1érv á2g1és á2g1éte á2g1éve á2g1évé ág1fl ág1fr ág1g2l ág1gr ág3gyar ág3gye ág3gyú ághá2nyi ági2al ági2asz ági2d á2g1ide á2g1if ági2g á2g1iga á2g1igén á2gill ág1ima á2g1i2mi á2g1i2na. ág1inf á2g1ing ág1ins á2g1int á2g1iri ág1iro á2g1ist á2g1isz á2g1i2ta á2g1iz á2g1íg á2g1ín á2g1ír á2gíté á2g1íz ágká2rok ág1kl ág1kr ág1kv á2g1oá á2g1okl á2g1o2li ág1oltó á2g1op á2g1or á2g1osko ágos3s á2g1oszl ágó1dr á2g1ó2rá á2g1ö2k á2g1ö2l ág1önt ágö2r ág1örö ág1örv á2g1ös á2g1öt á2g1öv á2g1ö2z ág1Å‘2r ág1Å‘2s ág1pl ágport2 ágpor1tr ág1pr ág1ps ág1sh ág1sk ág1sl ág1sp ág1st ágsz2 ág1tr á2g1ud á2g1uj águ2n á2g1una á2g1und á2g1u2ra á2g1u2rá á2g1u2t á2g1új á2g1ús ág1útt ág1üd ág1ü2g ág1ü2l ág1ün ág1üv ág1üz ág1ű2r ág1űz ágya1g ágy1alj ágy1alk ágy1alm 1á2gyasn á2gyassa á2gy1á2l á2gy1á2ram ágyás1s á2gy1e2 á2gyél ágy1ér. 1ágygyű á2gyid á2gyil á2gyivad á2gyob á2gyosz ágy1otth á2gy1ó2s ágy1ö2l ágy1ös á2gyur á2gyúh á2gyút á2gy1ü2 áh1ajk áh1ass 1á2hít á2h1ors á1i ái2dom ái2dÅ‘ ái2ga ái2gé ái2gy ái2ha ái2je ái2má ái2ram ái2rá ái2sz ái2ta ái2vá ái2vo ái2zé á1í áí2gé áí2rá áí2ve áí2vo á2j1adó á2j1akc á2j1akv á2j1a2la áj1ambu á2j1ana áj1ant á2j1any á2j1ar á2j1atl á2j1ax áj1azo á2j1ág á2j1ál á2j1áp ájás3s á2j1át1a á2j1átá á2j1átt áj1bl áj1br áj2ch á2j1e2c áj1egé áj1elm áj1eln áj1elÅ‘ áj1elv á2j1em á2j1e2n á2j1es áj1e2t á2j1e2v áj1ex á2j1é2g á2j1él á2j1ép ájé2r á2j1ére á2j1éte áj1fl áj1fr á2j1i2d á2j1il á2j1im á2j1iz áj1íj áj1ír áj1ív áj1íz ájk1ell áj2k1Å‘2 áj2kü áj2lad ájl1akt áj2l1an áj2l1as áj2l1at áj2lik áj2lob áj2nár áj2nin áj2nü á2j1ob á2j1op á2j1or á2j1öb áj1ök áj1öl á2j1ör áj1Å‘r áj1Å‘s áj1pl áj1pr áj1sn áj1sp áj1st2 áj1t2r á2j1ud áj1úsz áj1üg áj1ül áj1ür áj1ü2t áj1üv áj1üz áj1űr áj2zab áj2zaj áj2z3sa ák1abr á2k1a2d á2k1aj ák1aka á2k1a2lapí ák1ale ák1alj ák1ant ák1ara á2k1a2rá ák1arm ák1arz á2k1ass á2k1atl á2k1a2u á2k1á2c á2k1á2l ák1áta ák1átk ák1bl ák1ebé á2k1e2g ák1e2le á2k1elk á2kellene á2k1elm á2k1e2lÅ‘ ák1emb ák1e2rÅ‘ á2k1e2vé á2k1e2vo á2k1é2l á2k1é2ne á2k1érm á2k1érte á2k1értÅ‘ á2k1érv á2k1érz ák1éss á2k1é2te á2k1étk á2k1étt á2k1é2ve ák1fl ák1fr á2k1i2d á2k1if áki2g ák1iga á2k1igé ák1ill á2k1i2m ák1ing á2k1int ák1iro á2k1i2s áki2t ák1ita á2k1i2v ák1ír ák1k2l ák1k2r ák2lar á2k1oks á2k1o2la á2k1old á2k1o2li á2k1oll á2k1o2pe á2k1orv ákos3s á2k1ott ák1óni ákö2l á2k1ölÅ‘ ák1ö2r ák1Å‘2r ák1pr ák2rák á1k2reá á1k2ris ák1sp ák1sr ák1st ák1sz2 ák1t2r á2k1uj á2kuni áku2r ák1ura ák1utó ák1új ákú2t á2k1úto ák1üd á2k1üg ákü2l ák1ün ák1ür ák1ü2t ák1űr á1k2vat 2ála. ál1abl ál1acé á2l1adag á2l1a2dó ál1a2já ál1ajt ála2kar á2l1akna. 2álal ál1alak á2l1alát ál1alg á2l1alk á2l1alm á2l1ane á2l1ang á2l1ant á2l1a2nya ál1a2nyái ál1a2nyán á2l1a2nyát ál1a2nyáv á2l1a2pos ál1apó ál1a2ra ála2szek ála2szel ála2sz1ék ála2szö ála2szű ála2tat ála2tet ála2t1ér. ála2tikr ála2tint ál1atlé ála2told ála2t1ó2 á2l1atti á2l1a2ty ál1aut á2l1ábr ál1ág. ál1á2gi á2l1á2gú ál1áll á2l1á2rak ál1árf ál1árk á2l1árn á2l1á2ro álá2s3z á2l1átc á2l1átk á2l1átm á2l1átú á2l1átvá ál1bl ál1br álcai2 1áldás 1áldoz ál1d2r ál1dy á2l1e2c á2l1e2d á2l1ef ál1elk ál1elm ál1elo ál1e2lÅ‘ ál1elr ál1emu á2l1e2r ál1esem á2l1e2sÅ‘ á2l1esz á2l1e2t ál1ez á2l1ég á2l1é2he álé2kal álé2k1an álé2k1e2l álé2kü ál1é2le ál1é2lÅ‘ á2l1é2ne á2l1ér. á2l1érb á2l1érd á2l1érf á2l1érg á2l1érh á2l1é2ri á2l1érm á2l1érr á2l1érs á2l1értá á2l1érte á2l1érté á2l1érz á2l1év. á2l1é2vé ál1fl ál1fr ál1gr 1álhaja áli2as ál1i2bo áli2d ál1ide ál1idé áli2g á2l1iga á2l1ige á2l1igé ál1ill ál1imp á2l1ind ál1inf á2l1i2onb á2l1i2ont á2l1i2p ál1irá á2l1iro áli2s1e áli2s1ék áli2sis ális3s ál1iste á2l1iz ál1ín ál1í2r ál1ít ál1í2v álká2rok ál1k2l ál1k2r ál2l1a2dó 1állag ál2laj ál2l1a2lak 1államá álla2m1e 1államot 1államti ál2l1a2pá ál2l1a2r 1állata álla2tas álla2t1e2 álla2t1or álla2t1ö2 1állatu ál2l1á2g ál2l1á2l ál2l1árr állás1s ál2l1áth ál2l1átm ál2led ál2l1e2h ál2l1ej áll1éké ál2l1iz 1állomá ál2lü 1álmaim 1álmok. 1álmom 1álmot. 1álmuk 1álmunkb ál1obj á2l1o2ka. á2l1o2kai 1álokaih 1álokain 1álokair á2l1o2ká 1álokán 1álokát 1álokáu álo2kok 1álokonk álo2kos á2l1okta 1á2l1o2ku á2l1ol áloma2 álo2mad álo2m1al álo2m1an álo2mar álo2mas álo2m1á álo2m1e álo2m1it álo2mot á2loms álo2m1ú á2l1ont ál1opc á2l1ope á2l1or á2l1osz á2l1ox álóa2 álóá2 álóó2 áló2ráj áló2s1ű2 álót2 ál1öb á2l1öd á2l1ö2l á2l1ös ál1öz á2l1Å‘r ál1p2l ál1p2r ál1p2s ál1sk ál1sl ál1st ál1trad ál1t2rak ál1t2ran ál1t2re ál1tré á2l1ug álu2n á2l1una á2l1u2r á2l1u2t á2l1uz á2l1új á2l1úr álú2t ál1útj ál1útk ál1útn á2l1úto á2l1útr á2l1útt ál1üg ál1ün ál1ür ál1üt ál1üv ál1üz ál1űr ál1űz ály1ada álya1g2 ály1ant ály1anya álya1p ály1ass á2lyál á2ly1e2 á2lyéj á2lyél á2ly1é2ne á2lyére á2lyéve á2lyide á2lyí ály1k2 ály1odv á2lyol á2ly1ó2 á2ly1ö á2lyÅ‘ ály1s á2lyug á2ly1ü2 á2ly1ű2 á2m1abl á2m1abr ám1a2cé ám1adm ám1agg á2m1ajt á2m1akt ám1a2lap ám1all ám1alt ámaó2 á2m1app á2m1arc áma2sz1ál áma2szel áma2szív áma2sz1odv áma2sz1ü2 ám1atl á2m1att á2m1aty ám1aud ám1a2zo ámá2c ám1áf á2m1ág ám1álla ám1állo ámán1n ámán1tr á2m1á2rak ám1á2ram á2m1áras á2m1á2rá á2m1árb á2m1á2ri á2m1árn á2m1á2ro á2m1árr á2m1á2ru ám1ásv á2m1átb ám1á2t1e á2m1áti á2m1átm ám1á2zó ám1bl ám1dr á2m1e2b á2m1ej á2m1ela ám1elá ám1e2lem ám1elé á2m1elh ám1elí á2m1elj ám1elk á2m1elm ám1elo á2m1e2lÅ‘ ám1els ám1elt ám1elü ám1elv á2m1e2m á2m1erd ám1e2rek á2m1erk á2m1e2ro áme2rÅ‘k áme2rÅ‘t áme2rÅ‘v ám1e2se ám1ess ám1esz áme2t á2m1eti á2m1etű á2m1ev á2m1ex á2m1ez ám1éke á2m1é2le á2m1ép á2m1érc á2m1érd á2m1értel á2m1étk ám1fr ám1gr ámi2ab ámi2al ámi2am á2mide á2m1idé ám1i2dÅ‘ á2m1iga á2m1igá á2m1igé á2m1ill á2mimá á2mimm á2m1imp á2m1ind á2m1inf á2m1ing á2m1inte á2m1inté á2m1inv á2m1i2pa á2m1irá á2m1iro á2m1irt á2m1isk á2m1ism ám1i2szá á2m1i2z ám1íg á2m1íj á2m1ír á2m1í2v ám1íz ám1kl ám1kr ám1kv ámla3t2 á2m1ob á2m1of á2m1oká á2m1okl á2m1okm á2m1okta ámo2lyas á2m1op ámo2r1á2l ámo2ris ámo2r1odv ámo2sas ámos3s ám1osto á2m1osz á2m1ov á2m1ox ám1ó2r ám1ö2k ám1öl ám1ön ám1ör ám1ös ám1öt ám1öv ám1ö2z ám1Å‘2r ám1Å‘2s ám1Å‘z ámpa1p2 ám1p2l ám1p2r ám1p2s ám1sk ám1sm ám1sn ám1sp ám1st ám1sz2 ám1t2r á2m1ud á2m1ug á2m1uj á2m1und á2muni á2m1u2r á2m1utá á2m1új ám1üd ám1üg ám1ü2l ám1ür ám1üt ám1üv ám1üz á3műt ám1ű2z ámva2s1u2 á2n1abl á2n1a2cé án1ach án1ada ánae2 án1afr án1agya án1ajt á2n1aká á2n1akc á2n1akr á2n1a2la án1alk á2n1all á2n1anó án1anya á2n1a2o ána2p án1apa ánap1i án1ara á2n1arc án1assz ána2t1ér. ána2tol ána2tor ána2t1ű á2n1a2u án1ava án1ábr á2n1ág á2n1áll án1ár. án1árad án1á2ri án1árm án1árn án1áro án1áru án1ásv áná2t1a áná2t1á á2n1á2z án1bl án1br án2cac án2c1ad án2caj án2cal án2c1a2n án2car án2c1as án2cat án2c1au án2c1ál án2c1á2ro án2cás án2c1ed án2c1e2g ánce2l ánc1ele án2c1elt án2c1er án2c1e2s án2c1et án2cez ánc1éhe ánc1é2ne án2c1ér. án2c3h án2c1ill án2cim ánci2p ánc1ipa án2c1ir án2c1i2s án2c1ó2 án2c1ö2 án2cÅ‘ áncs1an ánc3sás án2c3seb áncs1es ánc3spo ánc3sza án2cü án2cz áne2d án1edé á2n1ef á2n1e2g á2n1e2l án1emb án1e2mi án1eml án1e2mu á2n1en á2n1e2p án1es á2n1e2t án1e2u á2n1ex án1ez á2n1é2d á2n1é2g á2n1ék. á2n1ékn án1éks á2n1é2l á2n1é2ne án1épí án1épü á2n1ér. á2n1érc á2nérd á2n1éré á2n1érl án1éte á2névad á2n1éve á2n1é2vé án1f2r án2gab án2g1a2r án2g1ára ángás1s án2g1át án2ged án2g1el ánge2s án2g1ér. án2g1é2s áng3g án2g1it án2g1iv án2gí án2g1osz án2gÅ‘r án2g1us án2gü 1ángy. án2gyas á2n1i2d á2n1if á2n1i2ga án1igé áni2k1a áni2k1á áni2ke án1ill á2n1im án1ind án1inn á2n1i2p á2nirr án1irt án1isk án1ism á2n1i2sz áni2tá áni2t1e2 áni2t1í áni2tol áni2t1or án1izo án1íg án1ív án1íz án2kaj ánk1aro án2kern án1k2li án1k2lo án1klu án1k2rá án3nye án3nyí án3nyo án3nyu á2n1o2b á2n1okir á2n1okta á2n1o2ku án1old án1oli á3nom áno2n1i2m á2n1o2r án1oszl án1ott á2n1ox án1óri án1ök á2n1öl án1öm á2n1ön á2n1ör á2n1ös á2n1öt án1övö á2n1Å‘r án1Å‘2s á2n1Å‘2z án1pl án1pr án2ses án2s1ér án2sis án2siz án1s2pe án1s2pi ánst2 án1str áns1üld án1szl ánt1acé ánt1a2ny án2taz án2t1á2g ántá2p án2t1árb án2t1ári án2t1ed ánt1eké ánt1elh án2tez ánt1éke án2térte án2tid án2t1ipa ánt1ist án2t1iz án2t1ív án2tök án2t1ös án1t2rak án1tran án2t1ü2l á2n1ud á2n1ug á2n1uj á2n1u2r á2n1u2t án1úr ánú2t án1útj á2n1úto án1útt án1ü2g án1ü2l án1ü2t án1ü2v án1ü2z án1űz ány1adás ány1a2dó ány1agg ány1akc ány1alap ány1alk ány1all ány1anya ány1anyá ány1apá ány1ara ány1ará ány1art ány1assz ány1aszt á2ny1á2l ány1á2ras á2ny1á2rá á2ny1árb ány1árc á2ny1árf ány1árh ány1árk á2ny1árn á2ny1á2ro á2ny1árr á2ny1árs á2ny1árt ány1áru ány1árú á2ny1átl á2ny1á2z ány1ed á2ny1e2g ányegyez1 ány1el ánye2le á2ny1e2m ány1en á2ny1ep ány1esÅ‘ ány1et ány1e2v á2ny1éd á2ny1ég á2nyéhe á2ny1é2j á2ny1é2k á2ny1él á2ny1é2ne ány1érc. ány1érre á2ny1érte á2ny1érté ányé2r1ü ány1érvé á2ny1és á2nyéte á2nyétk á2ny1étt á2nyéve ányfé2l1é2v á2ny1id á2nyigé á2nyike á2ny1ikr á2nyirat á2nyiro á2nyisk á2ny1isz á2nyita ány1í2ró á2ny1oml á2ny1ont á2ny1o2r á2nyosz ány1ök ány1ö2r á2ny1ös ány1öz á2ny1Å‘2 ány1tr á2nyug á2ny1ur ány1ut á2nyúto á2ny1ü2 á2ny1ű2 án2zál á1o áo2cs áo2ká áo2ko áo2mo áo2pe áo2so áo2szan áo2sz1e áo2szis áo2sziv áo2szú á1ó áó2ha áó2va á1ö áö2le áö2mö áö2re áö2rö á1Å‘ áő2sz á2p1a2dot ápa1tr ápa3u2 á2p1ág áp1áll áp1áta áp1dr áp1eg áp1e2l áp1e2m ápe2n á2p1e2s áp1e2t á2p1ég áp1é2te ápi2ac ápi2av á2p1im áp1inj áp1int ápi2t á2p1ita á2p1ín á2poll ápo2r1e áp1Å‘r áp2ro áp1t2r á2p1ug á2p1u2t á2p1úr áp1üg áp1üz á2r1abl á2r1abr á2r1abs ár1adag á2radata á2radatá á2radatb 1áradaté á2radatn á2radato 1á2radatr á2radatt ár1a2gá á2r1ajk á2r1a2kad á2r1a2kas á2r1akc á2r1akna. á2raknát á2r1akt. ár1akti ár1aktí ár1alá á2r1alj. á2r1alja. á2raljak á2r1aljá ár1alk ár1all á2r1alm ár1alt á2r1alv 1á2rama. 1áramai 1á2ramá ára2mál ára2m1el ára2m1érté 1á2rami ára2m1in 1á2ramkö 1á2ramok 1á2ramol 1á2ramot 1á2rams 1á2ramu 1á2ramú ár1ana ár1ang ár1a2no á2r1ant ár1a2pá ár1a2pó ár1aps á2r1a2rá á2r1arc ár1a2ri á2r1asp ára2taj ára2tal ára2tav ára2tál ára2t1inf ár1atlé ára2t1ü2 ár1aty á2r1ábr árá2c árá2g ár1ága ár1ágr ár1ágy ár1á2lo á2r1á2p ár1ár. á2r1á2rak á2r1á2rá á2r1árb á2r1árf á2r1á2ri á2r1árk á2r1á2ro á2r1árr á2r1árt ár1á2ru ár1árú ár1árv á2r1ásás árá2szó á2r1á2ta árát1a2d á2r1átá ár1átb á2r1átc á2r1átd ár1á2t1e2 á2r1áté á2r1átf á2r1áth á2r1á2ti á2r1átj á2r1átk á2r1átm á2r1á2tö á2r1átr á2r1áts á2r1átt á2r1átú á2r1átv ár1bl 1árboc ár1br ár2d1ál árd1ell árd1eme ár2d1é2n ár2d1Å‘r ár1drá ár2dud áre2á ár1e2d á2r1e2g ár1ej á2r1e2l árelÅ‘i3r áre2ma áre2mél áre2n ár1ene ár1eng á2r1e2r ár1e2sé ár1e2sÅ‘ ár1evé á2r1ex ár1ébr ár1éde á2r1é2g ár1éj. ár1é2je ár1éjs á2r1é2ke á2r1é2ké á2r1éks á2r1é2l á2r1é2ne á2r1ép á2r1éré á2r1é2ri ár1éss ár1ész á2rétk á2r1étr á2r1étt á2r1étv á2r1év. áré2vek á2r1évk á2r1évr ár1fl ár1f2r árgás1s ár1gl ár1g2r ár2gyárv ár2gyir ár2gyol ár2gyó á2r1i2de á2r1i2dé á3r2i3dÅ‘tl ár1ifj ári2g á2r1iga á2r1igá á2r1ige á2r1ill á2r1i2má ár1imb á2r1i2mi á2rinas á2r1inc á2r1ind á2r1inf á2r1ing ár1inn á2r1int á2r1inv á2rio á2r1i2paro ári2s1e ár1isko ár1iste ár1i2sza á2r1i2ta ár1i2zo á2r1ír ár1ív á2r1í2z árka1k2 1árkádo ár1k2l 1árkok ár1kré ár1k2v árnás3s árnya2n ár2ny1any árnye2l ár2ny1ele ár2nyelÅ‘ ár2nyem ár2nyes ár2nyok ár2ny1ol ár2nyos ár2nyö ár2nyü ár1odv á2ro2ká áro2k1e á2rokha á2rokm áro2kol á2r1o2koz á2rokre 1á2roks á2rokta á2r1olda ároma2 áro2maj áro2mak áro2m1al áro2m1as áro2már áro2m1ok áro2m1os ár1opt á2r1o2r áro2sas áro2sál áros3s á2r1ostr á2r1osty ár1otth á2r1o2v áróé2r ár1ó2nét ár1ó2név áró1p2 á2r1ó2rak áró2rák á2r1óri áró1ská áró1s2p ár1ö2b ár1öc ár1ök á2r1ö2l á2r1ön ár1ör á2r1ös ár1öv á2r1ö2z ár1Å‘2r ár1Å‘2s 1árpa. ár1pl ár1p2r ár1p2s ár2s1ala árs1asz ár2s1ál ár2sed ár2s1e2l ár2sem ár2s1en ár2ses ár2s1é2g ár2sip ár2sirá árs1okt ár2s1ol ár2sóv ár2s1ön árs3s ár1s2tab ár2sut ársza2ké ár2s3zene ár2tado ár2t1aj 1ártalo árta2n1á2s árt1ará árt1a2ris árta3u2 árt1áll ár2t1árn ár2t1á2ru ár2t1e2g árt1elh árt1eli árt1ell árt1eln ár2t1elÅ‘ árt1emb ár2t1erk árte2s árt1ese árt1esth árt1eti árt1éle ár2t1érd ár2t1érte ár2tid ár2tif ár2t1ins ár2t1int árt1izg ár2tít ár2t1okta ár2top árt1otth ár2t1ön ár2t1ös ár2t1ura árt1u2sz ár2t1ut 1á2ru. 1á2rua áruá2 1á2ru1b2 1á2ruc á2rud 1áruda 1árudá 1á2rue 1á2rué 1á2ruf 1árugy 1á2rui á2ruj 1árujá 1á2rum 1á2run. 1á2runa 1á2runá á2r1und á2runi 1á2ruo 1á2ru1p2 á2rur 1árura. ár1urá 1á2rus árus3s á2rut 1árut. áru2tal áru2tas 1árute áru1tr áru2tun 1á2ruü 1á2ruv ár1u2z 1á2rúé 1á2rúk ár1úré ár1úrf ár1úsz á2r1úta á2r1útb á2r1úté á2r1úth á2r1ú2ti á2r1útj ár1útl á2r1útn á2r1ú2to á2r1útr ár1úts á2r1útt ár1ü2g ár1ü2l ár1ün ár1ür ár1ü2s ár1üt ár1ü2v ár1üz ár1űr ár1ű2z 1árvác ása2b á2s1abl ás1abo ás1a2dat á2s1a2dá ás1a2do á2s1a2dó á2s1a2já ás1ajtó á2s1a2kar á2s1akc á2s1akv ás1a2la ás1alg ás1a2li ás1alj ás1alk ás1all ás1alm á2s1alt á2s1amb ása2n á2s1ana á2s1anó á2s1ant á2s1any ás1a2pá ás1app ás1apu ás1a2ra ás1a2rán á2s1arc á2s1aré á2s1a2ri ás1art ás1arz ás1asp á2sass 1á2satá á2s1atl á2sato ás1aty á2s1a2u á2s1azo á2sábé á2s1ábr ásá2gai á2s1ágb á2s1á2gig á2s1ágk ás1ágn á2s1á2gú á2s1ál á2s1á2p ás1áras ásá2rét ásáró2 ásá2rón 1á2sásb 1á2sási 1ásásk ás1ásv ás1á2ta á2s1átá ás1áté ás1áth ás1áti ás1átj ás1átk á2s1átl á2s1átr ás1áts ás1átt ás1átv á2s1á2z ás1bl ás1br áscsa2p1á2 ás1d2r ás1e2b á2s1e2d á2s1ef ás1e2gy á2s1e2l á2s1e2m á2s1e2n ás1epi ás1erk ás1erÅ‘ á2s1e2s á2s1e2t ás1e2v ás1ex ás1ez á2s1é2g á2s1éhe á2s1éj á2s1é2k á2s1é2l á2s1é2ne á2s1ép á2s1ér. á2s1érd á2s1é2re á2s1é2ré á2s1é2ri á2s1érl á2s1érs á2s1érte á2s1érté á2s1értÅ‘ á2s1érv á2s1é2s á2s1é2te ás1étk ás1étt á2s1é2ve ás1fl ás1fr ás1gl ás1gr á2s1i2d á2s1if ási2g ás1iga ás1ige ási2k1e á2s1ill á2s1imá ás1imp ás1ind ás1inf á2s1ing á2s1ini á2s1int ás1inv á2s1i2p á2s1i2rat á2s1i2rá ás1iro ás1irt á2s1isk á2s1ism ás1ist ás1i2sz ás1ita á2s1iz ás1íg á2s1íj ás1íns ásí2r ás1írá ás1író á2s1í2v á2s1í2z ás1kl ás1k2r ás1kv 1ásnu á2s1ob ásoda2 áso2d1al áso2d1an áso2d1as áso2dau áso2d1ál ás1okl ás1okm ás1okta ás1oldó ás1oli á2s1ont á2s1op ás1o2rá á2s1org á2sori á2s1ork ás1orr á2s1ors á2s1orv ás1osz á2s1o2v 1á2sób 1á2sóé 1á2sóg 1á2sói 1á2sój 1á2sók 1á2són ásó1p2 á2sór ás1ó2rá ás1óri 1ásóró á2sós ás1ó2sá 1á2sót á2sóv á2s1ö2k ás1ö2l ás1ön á2s1ö2r á2s1ös ás1ö2v á2s1ö2z ás1Å‘2r ás1Å‘2s á1spic ás1p2l á1s2pór ásp2r ás1ps ás1s2k ás1s2p ás1sr ás1s2t ássz2 ás3szab ás3szag ás3szaka ás3szaké ás3száj ás3szám ás3száz ás3sze ás3szél ás3szf ás3szi ás3szí ás3szk ás3szoc ás3szok ás3szol ás3szor ás3szó ás3szö ás3szt ás3szú ás3szű ás2teg ást1elem ás2tir ás2t1ös ás1t2re ás1tri ás1tróf á2s1ud á2s1uj á2s1u2r á2s1u2s ásu2t ás1uta á2s1utá ás1utó á2s1u2z ás1úr. ás1úrn ás1ú2sz ás1úti ás1úto ás1üd á2s1üg ás1üld ás1üle á2s1ün á2s1ür ás1ü2ve á2s1üz ás1űr ás1ű2z á2s3zac á2sz1a2d ász1aga ász1agá á2sz1agy á2szaj á2sz1akc ász1a2kol á2sz1akt ász1ale á2sz1alk ásza2n á2szana á2sz1ant á2sz1any á2sz1ap ásza2s ász1asz ásza2t1e á2sz1au ás3zavar ász1ágg ász1ágy ász1á2lo ás3záp ászá2r1as ász1á2rih á2sz1á2rú ászás1s á2száta á2sz1áz ász1e2b ászebé2 ászeb1éde ász1e2gé ász1e2gy ász1eln ász1elv ász1emb ás3zene ás3zené ász1eng ász1e2p á2sz1erd á2sz1e2ré á2sz1e2s ász1e2t ász1e2v ász1ex á2sz1éj á2sz1ékb á2sz1é2l á2sz1é2ne á2sz1ép ász1éré á2sz1éte á2sz1étt á2sz1éve á2szid á2szif á2sz1ill á2szind ász1ing ászi2p á2sz1ipa á2sziro á2sz1isk á2sz1ism ász1ist ász1ita á2sziz á2szír ász1k2 1ászkái á2szokl á2sz1okm ász1ors á2sz1osz ászó1s2p á2sz1ö2b ászö2l ász1ölt á2sz1ö2r á2sz1ös ász1övé ász1Å‘r ász1Å‘2s ász3sa ás3zsu ászt2 ász1tr á2szuni á2szur á2szut ász1ú2s á2sz1ü2g á2sz1ün á2sz1ü2z ász1z át1abr át1a2já át1ajk át1ajt áta1kré á2t1akt. á2t1akto át1alh á2t1alja á2t1alm át1als át1a2lu át1alvá á2t1a2ra á2t1ará á2t1arc át1arz áta2sá át1aut át1azo átá2c át1ág. át1ál átá2p át1ápo á2t1á2rad á2t1á2rai át1áram á2t1á2rá á2t1árb á2t1árn á2t1á2ro á2t1árt á2t1á2ru 1á2t1ásás átá2s3z á2t1átf á2t1átu át1átv át1bl át1br 1átbu át1dr át1e2c át1ej át1ell át1eln át1elv á2t1emb át1eml át1eng áte2rá áteri2 áte2rik át1erÅ‘ át1ex át1é2d átée2 áté2g át1ége át1égé áté2kaj áté2k1a2l áté2kas áté2ke áték1em áték1es áté2kol áté2k1ü át1é2l áté2p á2t1érb á2t1é2ri á2t1érr á2t1érz áté2t1á2 1átfés át1fr át1gr át2hi. át2hia 1áthid áti2ag áti2al áti2d áti2g á2t1igé át1ill á2t1ing át1i2pa á2t1irá át1isk át1ist á2t1i2ta á2t1izé á2t1izm á2t1i2zo át1íg á2t1í2r át1í2v 1átkel át1kl át1k2r átle2g1 átlege2 átműt2r át1oj áto2kol át1okol. át1o2koz át1okta 2átolj 2átolt át1oml át1ont át1op áto2ran áto2ras áto2rác áto2rál áto2re áto2ris áto2r1ol át1orzá á2t1o2x átói2ko átó1p2 át1óri át1öb átö2l át1ölé át1öml át1ön át1öt á2t1ö2v át1ö2z át1Å‘2r át1pl át1pr át1ps át2ril át1sk át1sl át1sm át1sp át1sr át1st 1átszű 1átteki át3t2é át1t2r á2t1udv á2t1ug á2t1uh á2t1uj átu2min átu2n á2t1u2r átu2sze á2t1u2t át1új á2t1úr. á2t1úrb á2t1úrh át1úri á2t1úrn á2t1ú2ro á2t1úrr á2t1ú2s á2t1üg át1ül á2t1üt á2t1ü2v át1űr 1átvár 1átvev á1u áu2ga áu2go áu2ná áu2no áu2nó áu2nu áu2sz áu2ta áu2tá á1ú áú2sz á1ü áü2ge áü2gy áü2le áü2lé áü2lö áü2lÅ‘ áü2lü áü2rí áü2té áü2ti áü2tö áü2tÅ‘ áü2tü áü2vö á1ű áv1adm á2v1ajk áv1akt áv1alk áv1alt áv1asp áva1st áva1sz2 áva1t2 á2v1au áv1azo áv1áls áv1áte áv1átf á2v1áth á2v1átj á2v1átk á2v1átu ávegész1 á2v1e2l ável1é áv1é2de ávé1dr á2v1é2ri á2v1érte á2v1érté áv1fr á2v1iga á2v1igé áv1ind á2v1inf á2v1ing á2v1int á2v1i2rá á2v1iro á2v1i2si áv1isk áv1ism áv1izm áv1izo áv1ír á2v1ob á2v1olv á2v1op á2v1osz áv1órá áv1ör áv1ös áv1öv áv1Å‘r áv1pr áv1sk áv1sp áv1st áv1tr á2v1ug á2v1ur á2v1ú2sz ávú2t á2v1úti á2v1úto áv1üg áv1ü2z á2z1abl áz1abs áza2dal áza2d1e2 áza2dott áza2dü áz1ajt áz1akc á2z1a2kó á2z1akt. á2z1akta áz1aktá á2z1aktu áz1a2lap á2z1a2le á2z1alk áza2n á2z1ana á2z1ano á2z1any á2z1a2p áz1a2rá áz1arc áz1arm á2z1assz á2z1aszt áza2t1a2l áza2tan ázate2 áza2t1el áza2t1ér. áza2t1érv áza2tés áza2tik áza2tí áz1aty á2z1a2u á2z1á2g á2z1á2l á2z1á2rai á2z1á2rak á2z1á2ram á2z1á2ras á2z1á2rat ázá2rár ázá2rát á2z1árb á2z1árc á2z1árd á2z1á2ré áz1árf á2z1á2ri á2z1árjáb á2z1árjáv á2z1árka á2z1á2rokk á2z1á2rokr á2z1árp á2z1árrá áz1á2runkn áz1á2runkr ázá3rus. ázás3s á2z1á2ta á2z1á2t1á áz1átb á2z1á2t1e2 á2z1áté áz1áth á2z1átr áz1áts á2z1átü á2z1áz áz1bl áz1d2r áz1ef áz1e2g áz1e2m áz1ep áz1e2r áz1e2s áz1e2t áz1ex áz1e2z á2z1ég á2z1é2l á2z1é2p á2z1ér. á2z1érÅ‘ á2z1érte á2z1érté á2z1értÅ‘ á2z1érz á2z1é2te á2z1éve áz1évi áz1fl áz1fr áz1g2r á2z1i2d á2z1igé á2z1i2kon á2z1ill á2z1imá á2z1imi áz1imp á2z1inf á2z1ing á2z1inj á2z1int á2z1i2par áz1irá áz1iro ázi2s1e ázi2sir ázi2s1í2 ázis3s ázi2s1ü ázi2szó ázi2z áz1izm áz1izo áz1íj áz1í2v áz1k2l áz1k2r á2z1olda á2zoltal á2z1oltó áz1oml á2z1ont á2z1o2r á2z1osz ázói2ko áz1ól. áz1ó2rá á2z1óri ázó1s2p ázó1sz áz1öb áz1öd áz1ö2l áz1öm áz1ön áz1ös áz1ö2t áz1ö2v áz1öz áz1Å‘2r áz1pl áz1p2r á2zsab á2zs1a2d á2zs1a2g ázs1ajt á2zs1akn ázs1ala ázs1alk ázs1all á2zs1amu ázs1any á2zsar ázs1ará á2zsat á2zs1au á2zs1áll á2z3sár. á2zse áz3seb ázs1e2c ázs1ef ázs1eg ázs1e2l ázs1e2m ázs1es á2zséj á2zs1é2k ázs1éne á2zs1ére á2zs1éri ázsé2t ázs1éte á2zsia á2zsiá á2zside ázsi2g á2zs1iga ázs1igá á2zsige á2zsimm á2zs1ing á2zs1int á2zsinv á2zsió á2zsip ázs1isk á2zs1ita á2zsiz á2zs1í2 áz3sor áz3sóh ázs1ó2r á2zsö á2zs1Å‘2 ázs1s á2zs1uj á2zs1ut á2zsúto á2zsü ázs1ü2v á2z3sű áz3szá ázs3zon áz1t2r á2z1ud á2z1ug á2z1uj á2z1u2r á2z1ut á2z1új á2z1úr á2z1ü2g ázü2l áz1ür áz1ü2z ázy1i áz3zse 2b. 1ba baa2d ba2b1a2dat ba2b1ajk baba1k2 ba2b1ara ba2b1arc ba2b1aszt ba2b1ábr babá2c ba2b1ácsi ba2b1ág bab1áll ba2b1á2ro ba2bátv ba2b1érc babé2t bab1éte ba2bév ba2bik ba2b1i2na. ba2bola bab1old ba2b1ó2r ba2b1ult ba2bü ba2c3hu ba2csor 2b1a2dag ba2das 2b1a2dato ba2d1ár ba2deg ba2d1e2s ba2dog 2badomá ba2dód ba2dói ba2dój ba2dók ba2dót ba2duj ba2dús baegész1 bae2r baé2r ba1fl ba1f2r ba2gai ba1g2n ba1g2r 2b1agy. bai2z ba2jag ba2j1á2ru ba2j1á2to 2bajkú ba2j1ó2r ba2jü ba2jű ba2k1a2pó 2bakas ba2kaszt 2ba2kác bak1á2cs bak1áll 2bakc ba2keg ba2k1ér. ba2k1éri bak1k ba1k2li ba1klu ba2k1o2v ba1k2ri bak1t2 2b1aktu baku2r bak1ura bak1urá ba2ky 2b1alany 2b1a2lál ba2l1e2s ba2l1í bal2lak bal2lan bal2lál bal2l1ás bal2láz balle2 bal2leg bal2l1el bal2lem bal2les bal2l1é2l bal2lin bal2lór bal2té bal2tiz ba2lud 2b1amp 2banal 2b1a2nat banás3s ban2c1e banc3s 2b1angy ban2kab ban2k1a2d ban2k1a2l ban2kar banke2 ban2ker ban2kép ban2kérd ban2kir ban2kol banku2 ban2kut ba2nyó bao2k bao2l baó2r ba1p2l ba1pro 2b1arc. 2b1arcé 2b1arcn 2b1arco 2b1arcr bar2csad bar2csal bar2csan bar2csö 2b1arcú 2b1argu baro2ma 2b1arté baság1g ba2seb ba1slá ba1s2m ba1sni 2b1aspe ba1s2pó bas3sze ba1s2ta ba1s2tá ba2t1esti ba1t2rá ba1tre ba1tré ba2ud 2b1a2ul bau2r 2b1a2vat ba1yi 1bá bá2bal bá2b1ass bá2bál bá2b1es bá2bik bá2bö bá2b1ü bá2csor bá2csü 2b1á2ga b1á2gú bá2gyal bá2gyar bá2gyott bá2gyö bá2gyú bá2j1e2 báj2n1á bá2j1ó2 bá2j1ö2 báj2t1a2k bá2jü bákos1 bá2laj bá2l1ap bá2l1e2 bálé2 bá2l1éj bá2lid bá2l1ing bá2l1i2o bál2is 2b1állí 2b1állo 2b1állv 2bálm bá2lö bá2lü bá2lyad bá2ly1al bá2ly1a2n bá2ly1á2z báni2as bán2ré bánya2ié bá2po 2b1á2rad 2b1á2rai bá2r1aj 2b1á2ram bá2rap bá2ras 2bá2ra2t 2b1á2ráb bá2rág bá2rár bá2r1ás 2b1á2rát bár2das bár2d1á bár2de bá2r1e bá2rén 2b1á2riá bár1i2o bá2r1i2p bá2rí 2b1árny bá2r1ó2né. 2báru. 2báruf 2b1á2rug 2b1á2ruh 2b1áruj 2b1á2ruk 2b1árur bár1u2ra. 2bárus 2b1árut 2báruv 2bárú bá2rúr bá2rü bá2s1á2ré bá2se bá2sis bá2sz1ak bá2szan bá2szas bá2sz1ál bá2sz1á2ru bá2sze bá2szil bá2szip bá2szí bá2szö bá2szü bá2t1a2k bá2t1al bá2t1á2 2b1á2t1e2 bá2tö bá2tü bb1a2da bb1add bb1a2kas b2b1alk b2b1als b2b1alu b2b1alv b2b1a2ny b2b1ap b2b1a2ra bba2t b2b1au bbá2gyas b2b1áll bb1álm b2b1áp bb1árn bb1á2ru b2b1á2s b2b1át bb1dr bbe2g b2b1e2kén b2b1elv b2b1emb bb1eng bb1erj bb1ern bb1erÅ‘ bb1e2rű bb1esté b2b1etn b2b1ex b2b1ég bb1él. bb1éle bb1élh bb1élj bb1éln bb1élt bb1é2lű bb1élv b2b1ép bb1érl b2b1érm b2b1érte b2b1érté bb1érv b2b1évi bb1fr bb1i2de b2b1iga bbigaz1 bb1illa b2b1ind b2b1int b2b1inv bbi2tat b2b1itt b2b1í2r b2b1ív bb1kl bb1kr b2b1okt bb1oltá b2b1olv b2b1op bb1ott b2b1ób bb1ó2r b2b1ö2m bbö2r bb1örö b2b1ös bb1Å‘ri bb1Å‘rz b2b1Å‘2s bb1pl bb1pr bb1sk bb1sp bb1st2 bb1t2r b2b1ud b2b1u2g b2b1uj bbu2r bb1ura b2b1u2t bbúgás1 b2b1új b2b1ú2s b2b1üg b2b1ür b2b1üz bb1ű2z bb2ví bc3sz bda2cs1a2pá bda1d2 bda1p2 bda1s2 bdasz2 bda1t2 bdé2n bd2rá bd2ro bd2ró 1be be2ac bea2d bea2j bea2k bea2l bea2n bea2r bea2s bea2t1e be2ati bea2tin be2atk be2atl bea2v beá2j beá2s beá2z be1bl be1b2r be2csar be2csár be2csért be2cs1é2te be2dén be2dz bee2l bee2s beé2r be1fl be1fr begés3s begész1 be1g2r be2gyel b1egyl bei2g bei2s beí2r be2j1elt 2bejt 2b1e2ke. beke2csa 2b1e2kétÅ‘ be1k2ré be1kri be1kró be1k2v be2lál beleí2 bel1els be2lemz bele1p2r belet2 bele1tr be2l1é2k be2l1ér. be2l1é2ren be2l1érr be2lí be2lof be2löl 2b1e2lÅ‘a be2lÅ‘r bel1p2 2bemel 2b1eml b1e2mus be2n1ál be2n1ék ben2n1a2 ben2ná ben2n1e2r ben2nég be2ny1e2g beo2k beo2l beo2m beó2v beö2r be2pe. be2ped 2b1e2pé be1p2l be1p2r be2r1ad ber1alá ber1all bera2n ber1any be2r1a2p be2r1a2r ber1ass be2r1a2t be2r1av be2raz be3rág ber1áll b1erdei 2berdÅ‘ ber1egé ber1egy be2r1e2ke. bere2k1eg be2r1ekék be2r1ell be2r1elm be2relÅ‘dö be2r1elÅ‘n be2r1e2mé be2r1eml be2r1e2pé be2r1e2r ber1e2szü be2r1e2tet be2rég be2r1ékk ber1éss ber1ingü be2rism be2r1ist ber1iszo ber1old be2ror ber1osz be2ról be2r1ó2r be2rÅ‘ 2b1erÅ‘d ber1Å‘s. ber1Å‘2se ber1Å‘2si ber1Å‘2sü ber3s2 berta2n1á2s be2r1ub be2r1un be2rur be2r1ut be2r1ü2g berü2l be2r1üld be2r1ülé be2r1ült be2sem 2b1esél 2b1e2sés be1ska be1ská be1s2l be1s2m 2b1e2sÅ‘ be1s2p2 bessz1a bessz1á be1s2ta 2beste 2besté be1s2til be1sto 2b1e2szet 2b1e2szét b1eszm besz2t1a2 besz2t1á beteg1ér. beté2t1elb beté2telk be1t2hi be1t2ra be1trá be1t2ré be1t2ro beu2g beu2t beü2t be2vezÅ‘ 2b1e2vol 2b1e2vÅ‘ 2b1ezr 1bé 2b1ébr bé2csú bé2d1as bé2d1á 2bédé 2bédh 2bédj 2bédl bé2d1o bé2dö 2bédr 2bédü 2bédv bé2gÅ‘ bék1alk 2b1éksz bé2l1akt bé2l1a2n bé2l1a2p bé2l1a2r bé2lál bé2l1áto bé2l1á2z bé2l1ed bé2l1e2g bé2lek bé2l1e2r 2b1é2let bé2l1é2j bé2liz bé2lí 2bélj bé2l1o bé2ló bé2lö 2b1é2lÅ‘ bélt2 bél1tr bé2lul bé2lú bé2ly1ec bé2l3yen bé2lyin bé2lyö bé2pí bé2pü 2b1é2rai bé2r1aj bé2ral bé2r1a2n bé2rap bé2rar bé2rá 2b1éráb 2b1éráh 2b1éráv 2b1érde bé2reb bé2r1eleme bé2r1e2lemé bé2r1e2lemh bé2r1e2lemk bé2r1e2lemn bé2r1e2lemr bé2r1ell bé2relm bé2r1elÅ‘i bé2r1eng bér1essz bé2r1esz bé2r1id bé2rir bé2rí b1érni bé2rö bér1s bért2 2bértel 2b1értés bér1tr bé2rut bé2rú bé2sz1a2 bé2szá bé2sz1o béta1s 2bétel bé1t2h 2b1étk 2b1étt 2b1év. bé2vek 2b1é2ven 2b1é2ves bé2vet bé2vi. 2b1évn bé2zsú bfej1els bfe2len bfé2n bf2la bf2rá bf2re bf2ri bf2ro bg2ra bg2rá bgyökö2 bgyö2k1öl 1bi bia2d bi2ae bi2ag bia2la bia2v bi1br bi1by bic3sz bida2 bi2d1ad bi2d1al bi2deg bi2del 2b1i2dÅ‘ bi2ed bie2l bi1fr bi2gaz 2b1i2gáj 2bigén 2b1i2hat bik1ala bi2k1ál bi2k1em bi2kik bi1klu bi2k1Å‘2 bi1k2ro bik1s bil1ima bil1int bilis3s 2billé 2b1illÅ‘ 2billu bi2lü bi2ma. 2b1i2mi 2b1imp bi2nab 2b1i2nad bi2naj 2b1i2nat. bi2n1árb 2b1indá bi2n1éte 2b1inká bin3n bi2nü bi1ny bi2oa bi2oá bi2ob bi2oc bi2od bi2oe bi2oé bi2of bi2oge bi2ok bi2ol bi2om 2b1i2on bi2or bió2r bi2par bi1p2l bi1pr 2b1i2ram 2b1i2rat 2b1i2rán 2birkái 2birodá 2b1irr 2b1irtá 2b1irtó bis2hi 2b1i2si 2bism bi2sö bi1s2p bis3s bi1s2to bit1ará bi2t1e2g bit1elh bit1elr bit1elt bite2r1a bi2t1ing bi2t1int bi2t1i2o bi2t1on bit1t2 bit1ura bi2t1ut biú2s bi2var 2b1i2vás 1bí bí2ja bíróé2r bí2ve bí2vé bí2vű 2b1í2zü bkés3s bk2li bk2lu bk2ra bk2rá bk2re bk2ré bk2ri bk2rí bk2ro bk2ró 1b2labl blai2k blak1a2d bla2kal bla2k1an bla2k1átm bla2k1átt bla2kem bla2kik bla2k1ol bla2kü blap1e bla1p2l bla1s2t blás1s bl2be ble2r1i bles2 ble2t1ak ble2t1a2n ble2t1á2 ble2t1e2l ble2ter ble2tesz ble2tél ble2t1érd ble2t1étk bletigaz1 ble2t1o ble2tó ble2t1ö2 ble2tu ble2tüz bleü2 bleves1s bl2he bli2af bli2as bli2of b1ljana bl2ne 1b2lok blok2k1ö2 blo2n1á blon3n b2l1Å‘zé bl2re bl2rÅ‘ bl2tÅ‘ blu2esz 1bo bo2aá bo2ab bo2ad bo2ae bo2af bo2ah bo2aj bo2am bo2ar bo2at bo2av bo2c1ak bo2ce bo2cé bo2c3h bo2csé bo1dy bo2ei bo1fl bo2g1a2k bo2g1a2t bo2g1á2c bogás1s bo2g1e bo2g1osz bogó2szá bo2gyo boka1p 2b1o2kí b1o2koz bok2szak bok2szal bok2sz1ál bok2szel bokszé2 bok2szél bok2sz1in bok2szó bok2sz1Å‘ 2b1okta 2bolaj. bolás1s 2b1oldó 2b1o2lim bolo1g2 bol2t1e bol2t1ö2 bol2t1ü2 2bolvas bona1 bon1ava bon2can bon2c1e bon2ch 2bond bo2n1e2 bo2n1ér. bo2n1or bon2t1i bo2nü bo2og bo2ok bo2om bo2ot bo2pe bo2r1a2d bo2r1aka bor1akk bo2r1akv bo2r1a2l bora2n bor1any bo2rar bor1asz bor1atr bo2r1au bo2r1av bo2raz bor1ács bo2r1áll bo2r1áz bor1d2 bo2re bor1e2c bor1el bor1e2r bor1es bor1f2 borfi2úké bo2r1i2ko bo2r1il bo2r1ing bo2r1int bo2r1isk bo2r1iss bo2r1ist bo2r1itt bo2r1iz bor1k2 2b1ornam bo2r1ond bo2r1ó2r bo2rö bo2rÅ‘ 2b1orr. 2b1orrú bor2s1eprű bor2sors bor1str bor2sül bor2süt bor1tre bor1tré bo2rü borvíz1 bor2z1á2rak bor2zsa bor2z3se 2b1oszt bo2t1a2g bo2t1al bo2tar bo2tas bo2t1a2u bo2t1ác bo2tár bo2t1e2 bo2t1il bot1inf bot1int bo2t1i2p bo2tí bo2t1ó2 bo2tö bo2tur botú2 bo2túr bo2tü bo2tű bo2ui bo2ul bo2ur bo1ya bo1yá bo1yé bo1yi bo1yo bo1yu bo2zar bo2zál bo2z1e2 bo2zid bo2z1i2p bo2z1ir bo2zí bo2z1old bo2z3s bo2zü bo2zű 1bó bóa2d bóá2g bóá2r bó2bé bó2cal bó2ch bó2cü bó1fl bó1kré bóli2a b1ólm bó1p2l bó1p2r bó2r1ad bó2r1an bó2rat 2bórá bó2rás bór1ásv bó2reg bó2rel bó2r1in bó2riz bó2r1ol bóró2 bó2rós bó2rö bórt2 bó2rü bó1s2p bó1sz 1bö bö2ch bö2lá 2bölb böl2csü 2b1ö2lér 2bölhö 2bölig 2bölk b1ölkú 2bölr 2b1öltö 2böltÅ‘ bö2lú bö2lyökö b1öml bö2ná 2b1önt bö2ra bö2r1e bö2ro 2b1össz 2b1ötl 2b1öts bö2ve 1bÅ‘ bÅ‘2r1a2 bÅ‘2r1á2 bÅ‘2r1e2g bÅ‘2r1e2l bÅ‘2r1em bÅ‘2r1en bÅ‘2r1e2r bÅ‘2r1é2g bÅ‘2rék bÅ‘2r1és bÅ‘2ril bÅ‘2r1ing bÅ‘2rip bÅ‘2r1i2s bÅ‘2riz bÅ‘r1izg 2bÅ‘rl bÅ‘2r1o2 bÅ‘r1öl bÅ‘2rÅ‘ bÅ‘rren2 bÅ‘r1s2 bÅ‘2r1u bÅ‘2rú bÅ‘2r1ü2g bÅ‘2r1ü2l 2bÅ‘si bp2la bp2lá bp2le bp2re bp2ré bp2rí bp2ro b1proj bp2ró bra2k1á2 bra2kös bra1p2 1braty brá2nag brá2nas brá2n1át bránt2 brá2sz1ál brá2sze b2ric bri2da bri2dá bri2der 1b2rig bri2no bri2ód bri2óf bri2óm bri2tel b2rosú bró2m1a bró2me 1b2rum bsé2g1el b1s2ká bs2lá bs2pe bs2pi bs2po bs2ta bs2tá bs2ti bs2tí bs2tú bszá2r1a2da bsz2f b1sz2k bsz2tá btermo1 btermosz2 bt2rá b1t2re bt2ré b1t2ri b1t2ro b1t2ró 1bu bu2ch 2b1udv bué2r bu2g1i2 bu2il 2b1ujj. 2bujja. 2bujjad buj2j1a2da 2bujjai 2bujjak 2bujjam 2bujjas 2bujjat 2b1ujjá 2bujjb 2bujjc 2bujjd 2bujje 2bujjé 2bujjf 2bujjg 2bujjh 2bujji 2bujjk 2bujjl 2bujjm 2bujjn 2b1ujjo 2bujjp 2bujjr 2bujjs 2bujjt 2bujju 2bujjú 2bujjv buk2j1e bu2maj bu2mel bu2m1i2k bu2m1ina bu2mis bu2mol 2b1ural b1urb 2burn 2b1u2rú bu2se bu2sin bu2sol bu1s2p bus3sze bu2szab busz1aj bu2szal bu2szas bu2sz1ál bu2sz1á2rak bu2sz1árn busz1en bu2sz1él bu2sz1é2p bu2szid bu2sz1il bu2szim bu2szin bu2szip bu2sziz buszí2 busz1íj busz3s bu2szü 1bú bú2jí 2búr. 2b1ú2ri 2búrt 2búsz bú2szá b1ú2ti b1útm 1bü bü2dü bü2ge bü2gy bü2ku 2b1üld bü2l1é2n bü2ne 2bürü 2b1ü2te 2b1ü2té b1ü2tÅ‘ 2b1ü2ve bü2ze 1bű bű2na bű2ná bű2nel bű2nem bű2nes bű2n1e2t bűn1n bű2no bű2nó bű2n1Å‘2 bű2nu bű2nű bű2ri bű2v1e2 bű2z1a2 bű2z1á bű2zo bű2z1Å‘ bű2z3s bvá2nyad bvá2nyí by2te 2c. 1ca 2c1abl ca1b2r ca2cél ca2ch ca2dás 2c1a2dó ca2es caéne2 ca1fro 2c1agg ca2gya cagy1a2d ca2gyu 2c1ajk 2c1a2kad ca2kác ca2k1áz 2c1akc cak2kol cak2k1ö ca1kri cala2g1 ca2lan ca2l1a2s 2c1albu ca2l1este ca2l1é2l calé2t cal1éte ca2lim ca2l1ip cal1osz cal1p cal1s ca2lü ca1ly ca2nal ca2nar 2c1a2ni can2ne caó2r ca2pó ca1p2ró ca1p2s ca2ran ca2rán 2c1arc ca2ris ca1s2p 2c1assz cast2 ca1sto ca1str 2c1aszt ca2tau ca2tem 2c1atl c1a2uk cau2n ca2vat 2c1a2zo 1cá 2c1ábr cá2ga cá2gú cá2gy cá2la c1állá 2c1álm cá2ne cá2nét cá2nir cá3p2a3 2c1á2po 2c1á2rad 2c1á2rak cá2r1as cá2ria cá2riá cá2r1i2n 2cárk 2c1árp 2cáru. 2c1á2ruh 2cárus cá2sás 2c1á2só 2c1ásv 2c1á2szai 2c1á2szo 2c1á2t1a2 c1áth 2c1á2t1i2 2c1átm 2c1átr 2c1átt 2c1á2tü 2c1átv cb2lo cb2ra c2c1a2j c2c1ak cc1alb cc1a2ny c2c1a2r c2c1au c2c1ág c2c1ál cc1bl cc1ef c2c1elm c2c1ember c2c1ép c2c1ér. c2c1érte cc3he cc3hí c2c3ho cc3hÅ‘ cc3hú c2c1i2m cci2na. cc1ing cci2óv cc1i2pa cc1iro c2c1i2z cc1ír cc1kl cc1k2r cc1oá c2c1ov cc1ön cc1ös cc1öz cc1pl cc1pr c2cs c3csap c3csar ccs1as c3csat ccs1ál c3csáp ccs1ás ccs1átl ccs1eg ccs1elem ccs1ell ccs1elv ccs1embe ccs1eml c3csep ccs1él ccs1érté ccs1iv c3csop ccs1ork cc3soro ccs1ö2l ccs1önt ccs1s ccs1ut c3csú c3csű cc3sza cc3szá ccs3zen cc3szó cc1új c2c1üg cc1ür cc3zá cc3ze cc3zs cda2l1é2 cda2l1i cde2m1e2ké cde2m1el cd2ra cd2rá 1ce cea2l ce2at ce2au ceá2r ce2béd ce1bl ce2ch cec3he 2c1e2dz ce2gas ce2g1é2k cegés3s ce2gina ce2gor ce1g2rá ce2gu ce2gú c1egy. c1egyb ce2gyen c1e2gyi c1egym c1egyr 2c1egys 2c1e2ke. ce1kl 2c1elad 2c1e2lág cel1ér 2c1elf 2c1elha 2c1eljá 2c1e2los c1e2lÅ‘a 2celÅ‘döt. 2c1e2lÅ‘f 2c1e2lőí c1e2lÅ‘Å‘ 2c1elsÅ‘ 2c1eltá c1elto 2c1elvá 2c1e2melÅ‘ 2c1eml cenc1c cen2c1eg cen2c3s 2c1eng cen2s1ég cen2tau ceo2l ceo2r 2c1e2pe. 2c1e2pi ce1p2r cep2sz1a2 cep2sz1é2p cep2szi cep2t1a2 cep2t1ér. cep2tim cep2t1ol 2c1e2rej ce2róz 2c1e2rÅ‘ cer2t1a2 certá2 cer2tár cer2teg cer2t1e2l cer2t1emb cer2t1est cer2tél cer2t1én cer2t1ol cer2t1ö cer2tu certü2 cer2t1üz ce2sem 2c1e2sés 2c1e2sÅ‘ ces2t1é2j ces2t1ék ce1stra ce2t1e2g ce2t1essz ce2t1esz ce2t1é2t 2ceth cet1ill ce2t1i2n ce2tiz 2cetn 2ceton ce1t2ra ce2t1us ce2tűd cetű2z ceü2t ce2vés ce2vÅ‘ 2c1e2zer 2cezrede 1cé 2c1ébr cé2dél cé2g1a2 cé2g1á2 cé2g1eg cé2g1e2l cé2ge2r cég1ere cég1eré cé2g1esem cé2geté cé2getÅ‘ cé2g1ék cé2gép 2c1é2gés cég1g cé2g1i2d cég1iga cé2gigé cé2gim cé2gir cé2g1iz cégo2 cé2g1ok cé2gol cé2gó cé2gö cé2g1u2 cé2gú céha2 cé2hal cé2han cé2har cé2hed cé2h1e2g cé2hir cé2hu cé2hú 2c1éks cé2la cél1a2n cé3lap cél1a2r cé2láb cé2lál cé2l1á2r cé2l1á2t cé2le. cé2led cé2leg cé2lei cé2lek cé2l1e2l cé2lem cél1emb cé2len cé2l1er cé2l1e2s 2c1é2let 2c1é2lez cé2léb cé2l1é2k cé2lénk cé2lép cé2lére cé2lés cé2lim cé2lin cé2lir cé2l1is cé2liz cé2lí cé2ló cé2l1ö2 cé2l1Å‘2 célt2 cél1tr célu2 cé2l1ut cé2lü 2c1é2lű cé2pí cé2pü cé2rag 2c1érd 2c1é2rés cé2rin 2c1érint cé2ris cér1s 2c1érté 2c1értÅ‘. cé2rú 2c1érz cés3s 2c1észh 2c1étk 2c1étt 2c1év. 2c1évad 2c1é2ve. 2cévei 2c1é2vek 2c1é2ven 2c1é2ves 2c1é2vet 2c1é2vét cé2vi. 2c1évn 2c1é2vü cf2ló cf2ra cf2ri cf2ro cg2ra cg2ri c2h 1cha c3had 2c3haj 2cham 2chang 2c3harc 2charm 2chatá 2c3hav 1chá 2c3hám c3hány c3ház ch1bl 1che 2c3heg 2chev 2c3hez 1ché 1chi 2c3hib 2c3hió 2chitel 2chitet c3hiú 1chí 1cho cho1d2 2c3hor 2c3hoss 1chó 1chö 1chÅ‘ ch1pr ch1sc ch1sp 1chu chu2r 2c3huz 1chú 1chü 1chű 1ci ci2aa ci2aá cia1b2 ci2aba ci2abo ci2abr ci2ac ci2a1d2 ci2ae ci2aé cia1f cia2fag ci2afr ci2a1g2 ci2ah ci2aik ci2aí ci2aj ci2akar ci2akas ci2akén. ci2akó ci2ala ci2alá ci2ale ci2alé ci2alo ci2am ci2any ci2ao ci2aó ci2aö ci2aÅ‘ ci2a1p2 ci2ar ci2asza ci2aszá ci2aszé ci2aszo ci2aszó cia1t2 ci2ata ci2atá ci2até ci2ato ci2atű ci2au2 ci2aú ci2aü ci2aű ci2av ci2az ciá2lan ciá2nár ci2áó 2c1i2deg ci2deo 2c1i2dé 2c1i2dom 2c1i2dÅ‘ cie2r 2c1ifj 2c1i2ga. 2c1i2gaz ci2ge. ci1g2r ci2ker cikka2 cik2kaj cik2kan cik2k1o ci1k2la 2c1ikrá ci2l1á2t 2cillu ci2mit 2cimpu ci2n1al ci2n1árt cin1d2 ci2n1e2re 2cinf 2c1inge 2c1ingr ci2nim cin2kac cin2k1a2l cin2kért cin2kol cin2kors cin2kö ci2nö cin2tar cin2t1es cinus1s ci2nü 2c1inv ci2od ci2of ci2og cio1gra ci2oi ci2ol ci3olo 2c1i2onn ci2op cio2v ci2óa ci2óá ci2óc ci2ódar ci2óe ci2óg ci2óí ci2ókal ci2ókam ci2óká ci2óke ci2ó1kl ci2ókom ci2ókos ci2óku ci2ól ci2ómag ci2ómá ci2óné ci2óo ci2óó ci2óö ci2óő ci2ó1p ci2ósá ci2óse ció2s1ér ci2ó1s2ká ci2ószo ci2ótá ci2óú ci2óü ci2ózón ciÅ‘2r ci2rat 2c1i2rán 2c1i2rod 2c1irt ci2si. 2c1isko 2c1ism 2c1isp ci1stad ci2sz1i ci2t1aj citá2r cit1ár. cit1ára cit1áré cit1áro cit1érr ci2tik ci2t1ol ci1ty ciumi2 ciu2min ciu2m1io ciumköz1 ciu2t 2c1i2vad 2c1i2vás 1cí cí2ja cí2jé cí2m1a2 cí2m1á cí2m1e2l cí2m1é2l cí2mí cí2mo cí2mö cí2mÅ‘ cí2mu cí2rá cí2ró cí2vá cí2ve cí2vé cí2zs cí2zü ckaka2 c2k1alj c2k1arc cka2rom c2k1ág ck1áll c2k1árn c2k1e2g cke1p 1cker. 1ckerk 1ckern cké2p1e2l c2k1é2ré c2k1érl c2k1érté ck1fl ck1fr ck1ill c2k1íz ck1kl ck1k2r ck2lu c2k1o2la ck1o2pe c2k1orro c2korru c2korrú 1c2kosak c2k1ou c2k1öb ck1öss ck2reá c1k2ri c1k2rí ck1sp c2k1üg ck1ült c2k1üt c2k1ü2v ck1űr ck2va cli2s clu2b1a cmelo1 cnya2k cnyol2c1an 1co co2at co2áz co2be co2eu co2kar co2ká co2ke co2kél co2kép 2c1o2kí 2c1o2laj co2l1áro 2c1olda 2c1oldá 2c1oldó co2le co2l1ibr co2li2m col1ima co2l1i2na. co2l1ind co2l1ing co2l1inu co2l1o2r 2colvad 2colvas comba2 com2bal com2b1e2 com2biz com2bol com2bór com2bö 2c1oml co2ol 2c1o2pe cop2f1Å‘ co1py 2c1orc 2c1o2ri 2c1orm c1o2rom 2c1orro 2c1orrú 2corv 2c1oskol co2s1o2ku cos3s cos3zs 2c1oszt co2ul co2un co2uv co2vi 1có c1ó2ni 2c1ó2rá c1ó2ri có2vó 1cö c1öltö c1ösv c1ösz c1ötb c1ö2t1e c1ö2té c1ötf c1öth c1ö2ti c1ötk c1ötm c1ötn c1ötr c1öts c1ött c1ö2tü c1öv. cö2zö 1cÅ‘ cÅ‘2rü cp2la cp2ra cp2re cp2ré cp2ri cp2ro cp2ró c2s 1csa 2csabl 2cs1a2dat 2cs1a2dá 2cs1a2dó 2cs1akc csa2lakj csa2lapú 2csaláí 2csalát 2csalb 2cs1alg 2cs1alk cs1alle 2cs1alm csa2lomb cs1amb 2csant csa2pál 2csapka 2csapká csa2por 2csapóká 2csapókr 2csapósab 2cs1arc 2csarg 2csark 2cs1arz 2cs1ass csa2t1ó2r csavar1a2 cs1azo 1csá csá2be 2csáf 2cság cs1á2gak cs1á2gu 2cs1árad 2cs1á2rak 2csáram 2csárat csá2rát 2cs1árny cs1á2ruk 2cs1ásá 2c3sási 2cs1ásó 2csátásás 2csátf 2csátm 2csátr 2csáv. 2c3sávo cs1bl cs1br cs1d2r 1cse 2csebé cs1e2ce cse2csa 2cs1e2dé 2cs1edz cs1eff cs1e2ges 2csegí 2cs1e2ke. 2cself 2csellá 2cselőí 2cselta 2cseltá 2cselv. 2c3sely 2cs1ember 2cs1e2mel 2cseml 2csemu csen2d1Å‘2 cse2nis cse2n3yen 2csenz cse1p2ré cse2rál cse2r1e2ped 2cseró 2cserÅ‘ 2csesÅ‘ 2cseszm cse2tüz 2csevÅ‘ 2cs1ex 1csé 2cség c3ség. c3ségb c3ségg c3ségh c3ségi c3ségn c3ségr 2cséhes 2cséhs 2cs1é2ka 2cséks 2cs1éle csé2m1a 2csénekb 2cséneke 2cséneké 2csénekk 2csénekr 2cséneks 2csénekt 2csénekü 2csépí 2csépü 2csérde csé2résk 2c3sérv. 2csérvb 2csérvh 2csérvr 2csérvv 2csérz csé2sza csé2tab 2csév. 2csévek 2csévn 2csévr 2csévv cs1fr cs1gl cs1gr 1csi csiá2 2csidÅ‘ 2csigé 2csimí 2csimog 2csimpo 2cs1i2na. 2csinas 2cs1ind 2cs1inp 2cs1inv 2csiono 2csipí 2csiví 2csivó 1csí 2csíki 2c3sírb 2c3sírj cs1í2ró 2cs1í2v 2csíze 2csízl cs1ízn 2csízt cs1ízü cska1s cskasz2 cs1kl cs1kv c3slág 1cso 2cs1o2á 2csobj cso2k1á 2csokk. 2csokoz 2csolaj 2csolda 2csoldá 2csoldó. 2csoldók 2csoldóm 2csoldón 2csoldór 2csoldót 2csoldóv 2cs1oli 2csolló 2csolvas cso2mor cson2t1a2 2csopc 2cs1orc 2csori 2csork 2c3sorv 2csoszl 2csoszt 1csó cs1ódá csó2kes csó2k1ö2 2cs1óné 1csö 2csökrü cs1öml csön3n 2csör. c3söre 2csössz 2csöv. 2csözön 1csÅ‘ 3csÅ‘. 3csÅ‘b csőé2h 2csÅ‘rz csÅ‘2sz1á2ra csÅ‘2szék csÅ‘2szÅ‘ 3csÅ‘v c3s2pek cs1p2l csp2r cs1ps cs1sl cs1s2p cs1s2t cs3sz2 cssza2kü c3s2tab cs2top cst2r c3stru 1csu 2csudv 2csug 2c3suho cs1ujj 2cs1una 2csuni 2cs1ura 2cs1u2rá cs1u2tas 2cs1utá cs1utó 1csú csú1p2 2csútn 1csü 2csüd 2csügé 2csügy 2cs1ünn cs1ü2te 2c3süv 2cs1üz 1csű c4s3zac cs3zaj csza2ké cs3zam c3szál c3szám cs3záp cs3zát c3száz c3sz2c c3szer c3szé c3sz2f c3szi c3szí c3sz2l c3szob cs3zokn c3szol cs3zon c3szor cs3zón c3szÅ‘ cs3zs csz2t cs3zug cs3zú c3szü c3szű c3sz2v ct2ra ct2re ct2ré ct2ri ct2ro ct2rü 1cu cucci2 cuc2cin cuko2r1a cula2te cu2lü 2c1und 2c1u2no 2c1u2ra 2c1u2tá 1cú cú2jí c1úr. c1úrr c1ú2ti cú2to 1cü cü2ge cü2gy 2c1ü2lé cü2lÅ‘ c1ünn cü2re cü2rí cü2rü cü2te cü2té cü2tÅ‘ cü2ve cü2ze 1cű cű2zÅ‘ cva2nem cva2név cve2név cven3n c2z 1cza cza2ib cza2ié cza2ih cza2ik cza2in cza2ir cza2it cza2iv 1czá 1cze 2c3zen 1czé c3zéh 1czi 1czí 1czo 1czó 1czö 1czÅ‘ 1czu 1czú 1czü 1czű 1czy 2d. 1da daa2d daát1 2d1abla da2c1ir da2cz da2dag dad1ala 2dadato 2d1a2dó da1drá da1dro dae2r daé2d daé2r da1f2l da1f2r da2g1el dag3g 2d1ahh da2ire 2d1a2ján 2d1ajt 2d1a2kad daká2r 2d1akkora d1akku da1kl da1k2ré da1k2ri 2d1aktu dal1a2ga dal1ajt da2lakj da2l1akta da2l1aktá da2laku da2lakú 2d1alakz da2l1a2l da2lapc da2lapk da2lapn da2lapr da2l1aps da2lapt 2d1a2lapú da2lar da2l1asz da2latt da2l1á2g da2l1ál dal1á2rak dal1árá da2l1árb da2l1árn da2l1árr dal1á2sz da2l1áti dale2l dal1elá dal1ele dal1ell dal1e2sé dalé2ke2 dal1él. da2l1é2ne da2l1ér. dal1f2 da2lid da2l1i2ko da2l1ikr da2l1i2m da2l1i2nát da2lind da2l1inf da2l1ing da2l1inj da2l1int da2l1i2nu da2l1itt dali2z dal1izo 2d1alji dal3l 2d1allo dalo2m1e dalomo2 da2l1or da2lÅ‘r dal1p2 dal1ud da2l1ur da2l1u2s da2l1u2t dalú2t da2l1úto dal1útr da2lü 2d1amc 2d1amf 2d1a2nal 2d1ang d1a2nyag 2d1a2nyó dao2k daó2r daó2v 2d1a2pa. 2d1a2pai da2paké da2páb da2pád da2páé da2páh da2pái da2pák da2pám da2pát da2páv 2d1apj da1p2l da2pó. 2d1app da1pro da1p2s 2dapt dara2be da2r1a2dó dar1ala da2rant da2r1azo da2r1áta da2r1átf dar2c1e2 dar2c3h dar2cso 2d1arcú dar2d1a2l dar2d1á dar2del dar2d1es dar2d1ó2 da2r1el da2r1il darus3s dar1uta da2r1ü da2sál da1s2l da1spe 2d1assz da1s2ta da1szl daszt2 dasz2tá 2daszú da2t1akt da2t1akv da2t1a2la da2t1alk dat1apu dat1ará dat1ass da2t1att da2taz da2t1áll da2t1árad datá2ramm datá2ramr da2t1áta da2t1átá da2telem dat1e2l1é dat1elí da2t1elk dat1ell da2t1elÅ‘ dat1elt da2t1é2g da2térte da2t1érté da2t1érth da2t1érv da2tid dati2k da2t1iko da2tim da2t1inf dat1inte dat1ist da2tiz da2t1ír dat1íve dat1k2 2d1atlas da2t1osz da2tóc 2datóm da2t1ó2r dató2s dat1ósá dat1t2 da2t1u2t da2tút da2tűr da2tya da2tyá 2d1a2uk da2up 2d1a2ur dau2s dau2ta da3u2tóp daü2t daü2z 2d1a2vat 2d1avv da1ye da1yé 2d1az. da2zál 2d1a2zé da2zok da2zon 1dá 2d1á2bé 2d1ábr dá2c3ho dá2fá dá2ga dá2gá dá2gú 2d1á2gy dá2lál dá2lár dá2l1e 2d1állí dá2lü dá2ly1a2n dályú2 dá2ly1ús dámu2 dá2m1ut dá2m1ú dá2ny1a2d dá2ny1al dá2ny1a2n dá2nyaz dá2nyó dá2po 2d1á2rad dá2r1ag 2d1á2rai dá2r1a2j dá2ral 2d1á2ram dá2r1a2n dá2r1a2p dá2rar dá2ras dár1ass dá2rat dár1atk dá2rá dár1ál dá2r1e2 dá2réb 2d1á2ria. dá2rij dá2ril dá2r1i2p dár1isk dár1ism dá2rí dá2rod dá2r1ond dá2r1ot dá2rö dá2rÅ‘ dár1s2 dárt2 dár1tr 2dáru. dá2ruk dáru2s1ág. dáru2s1á2ga dá2rú2 2d1árú. dá2rü dá2rű dá2s1a2d dá2sal dá2sar dá2s1á2g dá2s1árad dá2s1árh dá2s1á2ru d1á2sás dását1a2 dá2s1e2 dásfé2l1é2v dá2sim dá2sis dá2sol dá2sor dá2só2 dás1ór dá2sö dá2sÅ‘ dást2 dás1tr dá2sut dá2s1ü2 dá2szag dá2szaka dá2szal dá2szar dá2szav dá2sz1ál dá2szárk dá2sz1á2ro dá2sze dász1el dász1em dász1er dá2széb dá2szip dá2szir dá2szis dá2sz1í2 2dászkái dá2sz1ö dá2szÅ‘ dász3s dá2sz1us dá2sz1ú dá2sz1ü2 dá2sz1ű 2d1á2ta dát1al 2d1á2tá 2d1átd dá2t1e2 2d1á2té 2d1átf 2d1á2tí 2d1átj 2d1átk 2d1átm 2d1átr 3dátu 2d1á2tú 2d1átv dba2l1 db2lo db2lú db2ro db2ró dc2lu dcsa2p1á2g dd1elh d2d1i2d ddí2s d2d1o2d dd2rá dd2ró d2dz d3dzs 1de de2aa de2aá de2ac dea2d de2ae de2aé de2agy de2ah de2aí de2ala de2alá de2alo de2am dea2n de2ao de2ap dea1sz2 de2aszf de2at 2d1e2bé ded1ell 2d1e2dén de2d1ó2v de1dra de2d1ú2 2d1e2dz de2ep dee2s deé2r 2d1eff de1fr de2g1a2l de2g1a2n de2g1ál de2g1e2l degen3n de2ger de2g1ék deg1éri de2gés deg1éss degész1 deg3g de2giz 2dego de2gor degö2 de2g1öl de2gör de2g1ös 2de2gÅ‘ 2d1e2gye degyez1 2degz 2d1ehh deho2g de2if dei2g deí2r de2k1a2k de2kaz de2k1e2g de2kellen de2kep dek1e2rÅ‘ de2k1esz dek1ékb dek1é2ke de2k1él de2k1éri de2kérte de2k1érté de2k1érv de2k1érz de1klu dek1old dek1s deksz2 2d1elad de2lef 2d1e2legy dele2m1a dele2má dele2meg de2lemek dele2mel delem1ele dele2mu 2d1e2lemű 2d1e2lemz dele2t1a2 2d1eleteté 2d1e2l1é2k 2d1e2lél delés3s 2d1elha 2d1elho 2delkezdé 2delkezdÅ‘ del2lal del2l1an del2l1e2g delle2l del2l1ele del2lelk 2dellená 2d1ellene del2l1ent del2ler del2l1é2j del2l1é2k del2l1in del2l1is del2los del2lór del2lÅ‘r 2d1elma 2d1elnö de2los 2delö de2löl de2lÅ‘ad 2d1e2lÅ‘ka 2delÅ‘rej 2d1elsa 2d1eltett. 2d1eltér 2d1elv. 2d1elvá 2d1elves 2d1elvo 2d1elvű. 2d1elvűe 2d1elvűk 2d1elvűn 2d1elvűr 2d1elvűs 2d1elvűt 2d1elvűv de2mad de2m1a2l de2maz de2m1ál de2mez de2m1é2rem de2m1érm de2mim dem1ing 2demoi dem1p de2mus demü2l de2nal 2d1e2ner denkié2ne de2nol de2n1ó2 dens1s de2od de2of de2o1g2 de2oj de2olo de2om de2ot de2pe. 2d1e2pé de1p2re de1pro de1p2s de2rad der1aka de2rala de2r1a2n de2r1ar de2r1á2g de2r1á2r de2rás der1ázt 2derdÅ‘ dere2c 2deredm 2d1e2reje 2derején 2derejér 2d1e2rejét de2r1e2kei der1e2le der1ell der1e2lÅ‘ der1elt de2rer de2r1e2sÅ‘ de2r1él de2rid de2r1il de2r1i2m de2r1in de2r1i2p de2r1i2s der2nék de2r1osz de2r1ó2r de2rÅ‘ d1erÅ‘. der1Å‘s. d1erÅ‘t d1erÅ‘v der1sp deru2 de2rut de2r1ü2g de2r1üld der1üle der1ültet dervis1s 2de2s1a2 2desg de2sip 2deskü 2destes de1sto de2sur desú2 de2s1úr 2d1eszm de1sz2ta de2szű. de2t1ék de2tika de2tiká 2d1e2vÅ‘ 2d1evv de2xa dexi2 de2xin de2xiz de2xí de2x1o de2xö 2deza de2zak de2zér de2zil de2zin de2z1or dezÅ‘e2 dezÅ‘kész1 2d1ezr 1dé 2d1ébr dé2dap dé2d1Å‘ dé2du dé1fl dé2g1a2 dé2g1á2 dé2g1e2b dé2g1eg dé2gép dé2g1érk dé2gés dég1ész dég3g dég1iga dé2gigé dé2gin dé2gí dé2g1ok dé2got dé2gó dég1s dé2g1u2 dé2gú dé2gű 2d1é2hes 2d1éhs 2d1éj. 2d1éjb dé2k1ab dé2kac dé2k1a2d dé2k1a2l dé2k1a2n dé2k1ap dé2k1as dé2k1au dé2kaz dé2k1ág dé2k1árt dé2kás dékát1a2 dé2k1e2g déke2l dé2k1ele dék1ell dék1elÅ‘ dé2k1elt dé2k1er dé2k1esz dé2k1eti dé2kez dé2k1é2j dé2k1é2k dé2k1é2l dé2k1ér. dé2k1éte dék1isz dé2kivá dé2kí dékkulcs1 dé2k1old dé2kop dé2k1or dé2k1osz dékó2 dé2kór dé2k1ö2v dé2köz dé2kÅ‘ dék2rém dé2k1ut dé2lad dé2lam dé2l1á2 dé2leg dé2le2l dél1elÅ‘ dé2les dél1est 2d1é2let dé2lim dé2lio délkö2z1ön dé2lo dé2l1ö2 2d1é2lÅ‘ dé2l1u2 dé2lük dé2lyö dé2m1e2l dé2m1e2m déna2 dén1ac dénár1a2 2d1é2neke dé2ny1el dé2nyid dé2nyo dé2nyö dép2i 2d1é2pí 2d1é2pü dé2rag dé2ral dé2r1an dé2rar dé2ras dé2rá dér1d2 dé2reg dér1eml dér1esé dé2r1est dé2rez dé2rés dé2rif dé2r1ik dé2rí dé2rot déró2 dé2rór dé2rö 2d1érté 2d1érth dé2r1út dé1ry 2d1érzé désa2 dé2s1aj dé2sal dé2sap dé2sar dé2s1az désá2 dé2s1ár dé2seg dé2s1e2l dé2s1eti dé2s1ég dé2sí dé2sú dé2s1ü2t dész1ak dé2s3zá dé2sz1ék dé2szév dé2sz1o dé2szú dé2t1as dé2t1e2g dé2t1is 2d1étke dé2tÅ‘r 2d1év. 2d1évb 2d1é2ve. 2dévei 2d1é2vek 2d1é2vem 2d1é2ven 2d1é2ves. 2dévesb 2d1é2vesek 2d1é2vesen 2d1é2vesh 2dévesi 2dévesk 2d1é2vesn 2dévesr 2d1é2vess 2d1é2vet 2d1évez 2d1é2véb 2d1é2véi 2dévén 2dévér 2d1é2vét 2d1é2vév 2d1évf 2d1évh 2d1é2vi 2d1évk 2d1évn 2d1évr 2d1évs 2d1évt 2d1é2vu 2d1é2vü 2d1évv 2d1évz dfé2nyem df2lo df2ló df2rá df2re df2ri df2ro df2rö dgá2zár dgázát1 dgá2zi dgá2zó d2ge. dg2le dg2li dg2ló dg2ra dg2rá dg2ró d2gyu d2hali dhan2g1e dhé2t1 d2hi. d2hié d2hih d2hii d2hij d2hik d2hir dhú2s1á2 1di di2aa di2aá dia1b di2abá di2abi di2abr di2ac dia1d2 dia3da di2adi di2ado di2ae di2aé di2afa di2agy di2ah di2ai2k di2aí di2aj di2akép di2akol di2ala di2alá di2ali di2am di2ana di2aná di2ani di2anó di2ao di2aó di2aö di2aÅ‘ di2a1p2 di2ara di2asza di2aszk di2aszó di2a1t2 di2au2 di2aú di2aü di2aű di2av di2az diá2k1e diá2kol dián3n di1c2k di2cs1aj di2cs1e2r 2d1i2deg 2didej di2deo 2d1i2dén di2d1i2o 2d1i2dÅ‘ di2eu di1fl di2ga. 2d1i2gáná di2ge. di2g1e2l 2d1i2gén di2gét 2dijes di2kép di1k2l 2d1i2konh di1k2ro dik1u2ta di1k2v di2lan dile2m dilig2 di2lö di2l1Å‘ di2lü di1ly di2ma. 2d1i2má di2mit 2d1imp 2d1i2naka 2dind 2d1inf dinga2 din2gal 2d1ingá 2d1inger 2d1i2nic di2n1ing 2d1inj di2nód di2n1óm di2n1ó2n di2n1ó2r 2d1inp 2d1inté 2d1inv di2oa di2oi di2ok di2ol di2ome di2ov di2óa di2óá di2óe di2óg di2óí di2ókam di2óká di2ó1kl di2ókok di2óku di2ómé di2óo di2óó di2óö di2óő di2ó1p2 di2órá di2óri dió2si2 diós1ik di3ósor. di2óspe di2ószű di2óu di2óú di2óü 2dipar. 2d1i2pará 2diparb 2d1i2pari 2d1i2paro 2d1i2rat 2d1i2rá 2d1i2rod 2d1irt di2saj 2d1i2si. 2d1isko 2d1ism 2d1iste di2tal dit1a2la dit1ará di2t1e2g dit1t2 di2tü diumé2 diu2m1én diu2mil diú2t di2vad 2d1i2var diva2t1a 2d1i2zé 1dí dí2gé dí2j1áto dí2je dí2jí dí2jö dí2jü 2d1í2rá díri2 dí2rik 2d1í2ró dí2sz1a dí2szer dí2szö dítés3s 2d1ív. 2d1í2ve 2dívn 2d1í2vü 2d1í2vű dí2zi dí2zs dí2zü dí2zű dj2eg dje2gya dj1is djo2n1 dka2n1á2 dkia2 dkie2 dk2la dk2li dk2lo dk2lu dk2rá dk2ré dk2ri dk2ro dk2ró dk2va dk2vi dlás3s dlá2s3z dleí2 dló1g2 dlót2 dlÅ‘1kr dme2g1ér dna2pe dné2v1á dnö2k1ö2l 1do do2áz do2b1ag do2b1a2l doba2n do2b1any do2b1ár do2bát dobe2 do2b1el do2b1ill do2bí 2dobje do2boá do2b1old do2b1or do2bö do2bü do2bű 2d1o2dú do2gar dogasz1 do2gár dogás1s do2g1ol do2gor dogos3s do1g2rá do2gü do2kal do2kas do2káj do2k1ál do2k1e do2k1é2l do2kép dokkö2 dok2k1öb dok2kÅ‘ do2k1ott 2d1o2koz do2kö dokú2t dok1úto do2kü 2d1o2laj dola2tar dola2t1e dola2t1ör 2d1olda 2d1oldá 2d1oldo 2d1oldó 2d1oltár 2d1oltás 2dolvas dom1a2cé do2m1árb do2m1áré do2m1árh do2m1árj do2m1árk do2m1árl do2m1árn do2m1á2ron do2m1árr do2m1ártó dom2ba2l dom2bel dom2bol dom2bón do2mel 2domí 2doml do2m1ond do2mÅ‘ 2domú do2mü do2n1ad dona2l don1ala do2n1a2r do2n1as do2n1ál do2n1áta do2n1átj do2n1áts do2n1átv don1d2 do2n1e donos1s do2n1osz do2nö don1s dont2 don1tr do2nü do2nyal do2nyar do2nye do2nyó 2d1o2pe do1p2l dor1akn do2r1a2l1 do2r1a2p do2r1as do2rat dor1áll do2r1á2lo dord2 dor1dr do2r1e2 do2rid do2r1il do2r1is do2r1ita dor1k2 do2r1okl dor1oszt do2rö do2rÅ‘ 2d1orrú dors2 dor1sp dor1tró dorú2t do2rü 2dorvos do2ug do2ut do2vi do1ye 1dó dóa2d dóá2g dóá2r dó1bl 2dóez dó1fl dó1f2r 2dógá dó1g2r dói2g dóí2v dó1k2l dó1kré dó1k2v dó2mab dó2mak dóm1org dó2mÅ‘ dóó2r dó1p2l dó1p2r dó2rád d1ó2rák dó2riá dó2sam dó2sas dó2sel 3dósi. dó2sip dó2sis dó2sír dó1s2ká dó1spe dó1spi dó1s2rá dós3s dó1stáb dó1str dó2s1ű2 dósz2 dó1szf dó1szp dó1t2r 2d1ó2vó 1dö d1öbl dö2ga dö2gá dö2g1el dö2gev dög3g dögna2 dö2go dö2gó dög1ö2lÅ‘ dö2g1Å‘ dö2gu dö2ka dö2ká dö2k1el dö2k1e2r dö2kék dö2k1ér. dö2kí 2döntöz dö2ra dö2rá dö2ro 2d1ö2röks 2d1ösz d1ötl döt2tért dö2ve dö2vi 1dÅ‘ dÅ‘a2n dőá2g dÅ‘1bl dÅ‘1cl dÅ‘1dra dÅ‘e2l dÅ‘e2r dőé2l dőé2te dÅ‘1fl dÅ‘1f2r dÅ‘gé2p1e2ké dÅ‘1gr dÅ‘irá2 dÅ‘i2ta dÅ‘1kl dÅ‘1kv dÅ‘2ny1a dÅ‘2nyele dÅ‘1pl dÅ‘1pr 2dÅ‘ráb 2d1Å‘rál 2d1Å‘2reb dÅ‘r1egy dÅ‘r1ele dÅ‘r1elÅ‘ 2d1Å‘2rem 2d1Å‘2ré. 2d1Å‘2réh 2dÅ‘rék 2d1Å‘rél 2dÅ‘rén 2d1Å‘rh 2d1Å‘rif 2d1Å‘2ril 2d1Å‘2r1in 2d1Å‘2rip dÅ‘2r1is 2dÅ‘rok 2d1Å‘ror 2dÅ‘ros dÅ‘2röd dÅ‘2r1öz 2dÅ‘rÅ‘r 2dÅ‘run 2dÅ‘rur 2d1Å‘2rut 2d1Å‘2rü2 dÅ‘r1üg dÅ‘r1ül 2d1Å‘rz dÅ‘2s1érv dÅ‘1ská dÅ‘1s2m dÅ‘1sni dÅ‘so2d dÅ‘s1odo dÅ‘1spe dÅ‘1s2pi dÅ‘1spó dÅ‘1s2ta dÅ‘1sté dÅ‘1str dÅ‘1sv dÅ‘1sz2 dÅ‘t1áll dÅ‘1t2r dp2la dp2le dp2lé dp2ra dp2re dp2ré dp2ri dp2rí dp2ro dp2ró dp2sz dra1ps dravasz1 drág1g drá2sz1ál drá2sze dráta2 drá2tal drá2t1e2 drá2t1ér dren2d1Å‘2 1d2ressz. 1d2resszb 1d2ressze 1d2resszh 1d2resszk 1d2resszr 1d2resszü dré2szá dro2g1a dro2gá dro2gen drogé2n1i2 drog3g dro2g1ó2 dro2n1a2 dro2nyi drosz2 dro1szf dro1t2r dró2baj dró2t1a2 dró2t1á2 dró2tis dró2t1ü2 d2rui dru2se dru2si dság1g dsé2g1el dsé2gül ds2ká ds2li ds2pe ds2pi ds2po ds2rá ds2ta ds2tá ds2tí dst2r dsza2ké dszá2las dszáraz1 dsz2e dsze2ra dsze2r1á dsze2r1elv dsze2r1o dszert2 d1sz2l d1szn d1sz2p d1sz2t2 d1sz2v dta2g1a2 dtalpa2d dtal2p1ada dtal2p1al dta2n1á2s d2t1ékn d1t2rá d1t2ré d1t2ri d1t2ro d1t2róf d2t1ül 1du du2cem du2c3h du2cö du2cü due2l du2gal du2g1ár 2d1ugr 2duit 2d1ujj dula1k2 dula2t1í du2l1e du2lép duli2 du2l1im du2l1in du2lis du2lí du2lö du2lű 2d1u2ni 2d1u2no 2d1unt du2óa du2óá du2ód du2óf du2ól du2óp du2rai du2rak du2ral 2d1u2rat du2ráb du2ráh du2rát du2ruk du2sal du2san du2sar du2s1as du2sál du2seg du2s1érté du2sin du2s1iv du2sol du2sÅ‘ du2s1ű 2d1u2szod 2d1u2szoka 2d1u2szokb 2d1u2szoké 2d1u2szokh 2d1u2szokk 2d1u2szokn 2d1u2szoko 2d1u2szokr 2d1u2szokt 2d1u2szom dus3zs du2t1i 2d1u2to du2tó. du2tór du2tu 1dú dú2ce dú2ch dú2cse dú2c3so dúc3sz dú1dr dú2j1é2 dú2jí dú2r1a2c dú2ral dú2r1e2 dú2rén 2d1ú2ron dú2rö dú2szá dú2szó dú2t1a2 dú2té 2d1útj d1útl 2d1útn dú2ton 2d1útr 1dü dü2dü dü2gy dügy1érn dü2ha dü2há dü2hel dü2ho dü2hÅ‘ dü2két dü2lá 2d1üld dü2lep dülÅ‘s2 2d1ünne dü2rí dü2te dü2té dü2tÅ‘ dü2ve dü2ze dü2zé 1dű dű1pr d1űrl dű1sz dű1tr dű2zé dű2zÅ‘ dva2raj dva2r1e dva2r1ó2 dvá2nyan dvá2nyí dv1áta dv1áte dv1ece dv1e2leg dv1elk dven2t1í dve2ral dve2rár dve2rip dver1s d2v1e2sés dve1s2p d2v1e2teté dv1élm d2v1ép d2v1érd d2v1é2ri d2v1érté d2v1érz dv1fr dvi2csa dvi2csá d2v1iga dvigaz1 dvitéz1 dv1or dvö2l dv1ölÅ‘ dv1Å‘s dv1Å‘z dv1pr dv1un dv1ú2t d2v1üg d2v1ü2z d2v1űz dwa1yi dy1as d1yéb d1yén dy1étÅ‘ d1yév dy2jé dy2ke dyk2k dyk2n dyk2t dy2vé d2z 1dza dzaé2 2d3zaj dzak2 dza1kr 1dzá dzá2r1ó2ra dzás1s d3zász 1dze 1dzé dzé2sa 1dzi 2d3zil 1dzí 1dzo 1dzó 1dzö 1dzÅ‘ dzÅ‘a2 2dzÅ‘bÅ‘ 2dzÅ‘d 2dzÅ‘j 2dzÅ‘né 2dzÅ‘r 1dzsa 1dzsá 1dzse 2dzs1e2g 2dzs1esz 1dzsé 1dzsi 2dzsir 2dzs1is d3zsiv 1dzsí 2dzso2 dzs1ok 1dzsó 1dzsö 1dzsÅ‘ dzs1s 1dzsu 1dzsú 2dzsúj 1dzsü 1dzsű 1dzu 1dzú 1dzü 1dzű 2e. e1a ea2bál e2abo e2abÅ‘ ea2da ea2dá eadás1s ea2dó ea1d2r ea2du eaé2d e2afá e2afe e2afi e2afo e2afö e2afÅ‘ e2afü e2afű ea2gi e2agó e2aid e2ail e2aim e2aip e2ais ea2ja e2ak. ea2kas e2akat e2aká e2akb e2ake e2akép e2akh e2aki e2akí e2a1kl e2ako e2akó e2akö e2a1k2r e2akú e2akü e2alán eal1eg ea2lu e2am. e2ama e2amá e2amel e2amer e2amé e2amin ea2mo e2amu e2amú e2amű e2ane e2ané e2anö e2ans ea2nya e2api e2apo e2apó ea1p2ro e2ar. ea2ran ea2ras ea2rat ea2rá e2arb e2are e2arh e2arj e2arn e2aró e2arr e2aru e2arü e2aso e2a1st2 e2asü e2asű e2aszem e2aszé e2ata e2atá e2atc e2ate ea2t1eg e2até2 ea2tél ea2t1éne e2atf e2atg e2ath ea2tid eat1ing ea2tip ea2tir e2atm eatmo1s eatmosz2 e2atn e2ato ea2t1or e2ató e2atö ea1t2rak eat2rón e2ats e2atu e2a2tü e2atű e2atz e2au. ea2ut e2a2ux e2avi ea1vy ea2zo e1á eá2bé eá2cs eá2ga eá2gá eá2gi eá2go eá2gu eá2gy eá2hí eá2k1e eá2k1osk eála2 eá2lad eá2l1ak eá2lál eá2l1á2r eá2l1e2 eálé2 eá2lél eálfe2 eá2lid eá2l1in eá2lir eá2lism eá2lop eá2l1ór eá2lö eá2mu eá2nac eá2nal eá2n1at eáná2 eá2nár eá2n1át eá2n1e2 eá2ny1a2l eá2ny1a2n eá2nyap eá2nyar eá2ny1as eá2nyav eá2ny1e2 eá2nyén eá2ny1ér. eá2nyif eá2ny1ing eá2nyis eá2ny1o2ku eá2nyö eá2po eá2rad eá2ram eá2ras eá2raz eá2ru eá2rú eá2sa eá2sá eá2sí eá2só eá2su eá2sz eáta2 eá2t1e2 eá2té eá2tí eá2tu eá2tü e2ba2d eb1adó eb1adta eb1aga e2b1ajk e2b1a2la e2b1alk eb1ant eb1a2ny eb1atl e2b1au eb1a2zo eb1ágg e2b1áp eb1ára ebeá2 e2b1e2he eb1ejt ebe2l1á e2b1e2lef ebe2l1eme e2b1elhel e2b1enc eb1este eb1esté ebe2szek e2b1ex e2b1ég eb1ép e2b1érd e2b1érté e2b1érz eb1gr e2b1id e2b1i2na e2b1inf e2b1ing e2b1irá e2b1i2s e2b1izz eb1kl eb1kr eb2lat e1b2lú eb1okt eb1o2la eb1orv eb1öb eb1ös eb2rus eb1st2 eb1tr e2b1üg ebü2l eb1ülé e2b1ür eb1üz ec1ajt ec1alk e2c1a2n e2c1az ec1ág e2c1ál ec1ár ec1bl ec2c1a2 ec2c1ér ec2c3h ec2c1i eccs1át ec3cso ec2cú e2c1eg e2c1elo e2c1elv 2ecento e2cetb ece2t1o 1e2cets 1e2cett e2cetü ece2tüz e2c1evé ec3har ec3hen ec3hi. ech1in 1e2chó ech1u ec2le ec2lu e2csad e2cs1a2la e2cs1a2n ecsa2p1á2g e2cs1a2pák ecs1ará ecsá2r ecs1árá ecs1árb ecs1áro e2cs1á2t e2cs1é2l e2cs1ér. ecs1érté e2cs1ol e2cs1öl ecs1s ecsúszós1 e2c3sükb e2c3süt ec3sze e2c1ud ec3zá ec3ze e2d1ab e2d1a2dá ed1a2ny e2d1a2z e2d1á2c e2d1ág e2d1áp ed1egy edeleme2 e2d1elo e2d1eml ede2rak ede2r1ál ede2rel ede2r1ék ede2r1o ede2r1ü2l ede2s1o ede2tá ede2tel ede2t1ér. e2d1é2j edé2ká edé2kis edé2k1o edé2lyá edé2lyo 1e2dénn 1e2dény e2d1ép e2d1érd edé2sa edé2so edés3s edé2s3z ed1é2vén ed1é2vér ed1gr 2edic e2dide e2d1i2ga edigaz1 ed1ill e2d1int e2d1ira ed1iro e2d1ír ed1ívn e2d1íz ed1old ed1orv e2d1os e2d1ös e2d1Å‘rs edÅ‘2s1ü ed1pl ed1pr ed2ram e1d2rog e1d2ró e2d1üg e2d1üt e2d1űz ed2v1a2 edvá2 ed2v1ár ed2vát edv1ér. ed2v1öz edy1i e2d3zá 1e2dzÅ‘i 1edzÅ‘j 1edzÅ‘r 1e2dzÅ‘v 1edzv e1e ee2bé ee2cs ee2dz ee2ge ee2gé eegész1 ee2gy ee2he ee2ke e2el. ee2la ee2le e2eléb ee2léd e2elg ee2lo ee2lÅ‘ ee2me ee3men ee2mé ee2mu e2en. e2enb e2enj e2ent e2enw ee2ny ee2pe ee2pé ee2po ee2re ee2ro ee2ró ee2rÅ‘ ee2sé ee2sü ee2sz ee2te e2e1t2h ee2to ee2ve ee2vé ee2vi ee2vo ee2vÅ‘ ee2zü e1é eé2de eé2ge eé2gé eé2gÅ‘ eé2gü eé2he eé2je eé2jé eé2ke eé2kí eé2le eé2lé eé2li eé2lÅ‘ eélÅ‘kész1 eé2lü eé2lű eé2me eé2ne eé2pí eépítés1s eé2pü eé2ré eé2ri eé2rÅ‘ eé2rü eé2sz eé2te eé2ve eé2vé eé2vi eé2vü efa2x1i efek2tá efek2t1í2 efenyőé2 ef2f1in ef2f1o e1f2la efle2x1i2k ef2lu efo2n1alk efo2nik eforma2 efor2m1al ef1pl e1f2rá ef2rö e2g1a2bá ega2be eg1abl eg1abr e2g1a2cé ega2cs e2g1a2d e2g1a2g ega2i e2g1a2j e2g1a2k e3gaké ega2lac ega2lak ega2lan eg1a2lap e2g1a2lá e2g1alh e2g1alj e2g1alm e2g1als e2g1alt e2g1a2m eg1ang eg1ann eg1ant eg1a2ny e2g1a2pa eg1apá ega2po eg1a2pó e2g1apr eg1arc ega2ri eg1a2ro eg1art e2g1ass e2g1a2sza e2g1a2szo e2gaszú eg1atk ega1t2r e3g2aty e2g1a2u eg1a2va e3gazol ega2zon e3gába e3gábó eg1á2cs e2g1áf e2g1á2g eg1áh e3g2ái e3gája e3gájá egá2ju egá2m e3g2án e2g1áp e2g1á2rá e2g1árb e3g2árg e2g1árn egá2ro eg1árt egá2ru egá2rú egá2sa e3gát. e3g2átu e3gáva egá2zá egá2zi egá2zu eg1bl eg1br eg1d2r e2g1e2bé ege2cs1ö2 eg1edz eg1e2ge eg1ehe e2g1e2kés e2gela e3geled ege2leg ege2lej e2gelekt eg1e2lemb e2g1e2leme e2g1e2lemn e2g1e2lemr e2g1e2lemt ege2l1e2s eg1elég eg1e2lér e2g1elf e3g2elg e2g1elha e3g2elit e2g1eljá e2g1elm e3g2elne e2g1elo e3g2előá ege2lÅ‘bbe e3g2elÅ‘d e3g2elÅ‘f e3g2elÅ‘i e3g2elÅ‘m e3gelÅ‘nye e2g1eltá e3g2eltet e3g2eltü e2g1elvá e2g1elz e2gemel e3gend e3genye eg1epe eg1epr e3gerá e2gerd 1egere. e2gered 1e2gerek e2geré ege2rény eg1erk e3gerl e2g1eró e2g1erÅ‘ eg2esek e2g1esete e2g1eszk e2getal eg1e2ve e2g1ex e2g1é2g e3gémb e2g1é2neke e2g1é2nekk egé2ny1e2l e2g1épí eg1éps e2gér. e2gérb egé2r1es egé2re2t egér1ete egé2rez e2géré egé2rés e2gérg e2géri egé2ri. e2gérk e2gérn eg1é2rÅ‘. e2gérr e2gérs eg1érté eg1érth e2gértÅ‘ e2gérv e2gész eg1észl 1egészs eg1éte eg1fl eg2gim eg3gyal eg3gyan eg3gyás eg3gyú e3gi. eg1ibr eg1idd eg1i2de e3gie egi2g e3gig. e2g1iga e2g1igé eg1ij e2g1i2ko eg1ikr e2gill eg1imb e2gimm eg1inf e2g1ing eg1ino eg1ins e3g2io eg1ira eg1i2ro eg1iss eg1ist egi2sz e2g1i2ta e3gitá e2g1iva e2g1ivá e3give eg1izé eg1izg eg1izm eg1izz e2g1íg e2g1ín e2g1í2r e2g1ív e2g1íz eg1kl eg1kr eg1kv e2g1ob e2goc e2g1o2d e2goi e2g1o2k eg1o2la e2g1old eg1olv e3goly 2egom ego2mi e2gont e2g1op eg1org e3g2orom ego2ros e2g1orr e2g1orv e2g1o2s e2g1ot e3gó. e3g2ób egó2do e3g2ój eg1ó2ra eg1órá eg1óv e2g1öb eg1ök eg1önt eg1örö e3görög egö2röm eg1öt eg1öv e2g1ö2z egÅ‘kész1 e3gÅ‘re eg1Å‘ri eg1Å‘rö egÅ‘2szi e3g2Å‘z eg1pl eg1pr eg1ps e1g2rat e1g2róf eg1sk eg1sl eg1sm eg1sp eg1s2t egsz2 eg1szt eg1tr e3g2ub eg1ud e3g2um eg1u2n e2g1u2ra. e2g1u2rai e2g1u2rak e2g1u2ras e2g1u2rat e2g1u2rá e3guru e2g1u2s e2g1u2t eg1u2z eg1új e3g2ún eg1úr eg1ús eg1út e2g1üd egü2gye e2g1ü2le eg1ü2li e2g1üs e2g1üt e2g1üv e2g1üz e2g1űz egvárosi2 e2gy1a2d egy1a2g e2gy1aj egy1akt egy1a2la e2gy1ará e2gy1as egy1a2t e2gy1au egy1az e2gy1ál egy1á2rá egy1árf e2gyát egy1eleg egy1elf egy1ell egy1elo egy1e2lÅ‘j egy1e2lÅ‘v egy1elz e2gyeni 1e2gyenl 1e2gyens egy1eszt 1egyezm egy1éks 1egyéni egy1ér. e2gyip e2gyis e2gy1iz egy1ok e2gy1ol egyo2r e2gy1os egy1ot e2gy1ó2r egy1ö2l e2gy1ös e2gy1öz egy1Å‘2r egyu2 egy1ur egy1ut 1együt 1egzis eha2de ehá2zal eh1ellá ehe2lyes ehe2rál ehe2rát ehere2 ehe2r1el ehe2r1em ehe2ren ehe2res ehe2rin ehe2rol e2h1é2je ehé2ná ehé2név ehé2zá ehé2zo e2h1ors eh1sz e1i ei2áb ei2áh ei2áj ei2án ei2ár ei2át ei2áv e2ibn ei2deá ei2dei ei2deo ei2dén ei2dom ei2dÅ‘ e2idp e2ier ei2gá ei2gé e2i1g2n ei2gy ei2ha eil2le. ei2ma. ei2man ei2má ei2mit e2imk e2ims e2imz ei2na2 ein1ad ei2n1á2 ei2neg e2inér e2inét ei2n1ita ei2nol ei2nÅ‘ ein1t2r ei2nü ei2on ei2pa ei2ram ei2rat ei2ri ei2rod e2iró ei2ta e2itb e2itj e2itn e2itr ei2va ei2vá ei2vo ei2vó ei2zé e2izmi ei2zo e1í eí2gé eí2já eí2jú eí2ra eí2rá eí3rásbe eí3rásil eí3rásoc eí3rásonk eí2ró. eí2róa eí2róá eí2rób eí2ródn eí2róf eí2róg eí2róh eí2rói. eí2róik eí2róin eí2róit eíróí2 eí2rója eí2rójá eí2róje eí2róju eí2rók eí2róm eí2rón. eí2róna eí2róná eí2róny eí2rór eí2rót eí2róv eí2té eí2vá eí2ve eí2vé eí2vi eí2vo eí2vó eí2ze eí2zü eí2zű ej1ab e2j1a2d ej1a2g e2jak ej1akk ej1a2l ej1a2n ej1ar ej1au ej1a2z ej1áb e2j1á2g ej1ál e2j1ártó ej1á2t1e2 ej1átv ej1bl ej1br ejcsa2p1 ej1dr eje2c e2j1ecse e2j1ef ej1e2ged e2j1e2gé e2jekc e2j1ela e2j1elc e2jelede e2j1e2lemb e2j1e2leme ej1e2lemé e2j1e2lemn e2j1elf e2j1elha e2j1elhel e2j1elo ej1elszá ej1eltér e2j1elu e2j1eng e2j1enz e2j1essz eje2sz e2j1ex e2j1é2j e2j1é2k e2j1él e2j1ép e2j1éte ej1fr ej1g2r e2j1i2d e2j1im ej1ina e2j1int e2j1ip e2j1iz ej1íg ej1ív ej1kl ej1kv ej2mok ej1ol ej1op ej1óc ej1ón ejó2sá ej1ót ej1óv e2j1öb e2j1öl ej1ö2ve. e2j1Å‘z ej1pl ej1pr ej1sp ej1st2 ej2tad ej2tau ej2tál ej2tát ej2t1elk ej2t1este ej2tev ejté2r ejt1éré ej2tin ej2tiz ej2tos ej2t1ó2r ej2töd ej2t1ö2l ej2tön ej2tös ej1tra ej1tró ej2tür ej1új ej1úr ej1ú2t e2j1üg e2j1ür e2j1ü2t e2j1üv e2j1üz e2k1abl ek1a2cé ek1a2dá e2k1a2dó eka2gy ek1a2ja e2k1ajtó. e2k1ajtói e2k1ajtók e2k1ajtón e2k1ajtór e2k1ajtót ek1a2kar e2k1alj e2k1a2lo ek1alt ek1ang e2k1ani ek1a2nyá ek1arc. ek1arca ek1arcr ek1arcú eka2sip e2k1au ek1azo e2k1ág ek1állo ek1álm ek1árad eká2rai ek1á2rak ek1áras e2k1á2rá e2k1árd e2k1árf e2k1árh e2k1árk e2k1árm e2k1árn e2k1á2ron e2k1árr e2k1á2ru e2k1á2rú e2k1árv ek1ásó ek1áta ek1átd e2k1áth ek1átj e2k1átm ek1á2to e2k1átt ek1bl ek1br 1ekcém ekci2óf ek1cl ek1dr e2k1ebé e2k1e2dz e2k1egg e2k1e2gye e2k1e2kébÅ‘ e2k1e2kés e2kela e2k1e2leg eke2leme ek1elha e2k1elk e2kellá e2k1elm e2k1elo e2k1elÅ‘n e2k1elr ek1elta ekel2t1é2r e2k1e2ma e2k1e2mel ek1e2més ek1e2pi e2k1estr eke2szele eke2szo e2k1e2vé e2k1evi 1e2kééh e2k1ég 1e2kéik ek1ékek e2k1ékt ek1é2le ek1éln ek1élÅ‘ e2k1épí e2k1érin ek1értékb eké2rül e2k1ész. e2k1észh e2k1észn eké2tel e2k1étk e2k1étl e2k1étt e2k1é2ve. e2k1é2vek ek1é2vet e2k1évi ek1fr ek1gn ek1gr ekiá2 ek1i2ga ekigaz1 ek1i2ge e2k1igé ekii2 ek1ill e2k1ima eki2már e2k1iná ek1ind e2k1isk ek1ism e2k1isp ek1i2zo e2k1íj ek1írá ek1íz ekka2ró2 ek2k1ele ek2k1elf ekk1osz ek1kri ek2kű eklés3s ek2lim ek3nÅ‘ e2k1oá ek1obj e2k1odv e2k1o2la ek1olló e2k1olv e2k1o2pe ekor2da ekor2d1á2 ek1o2rom ek1orra e2k1orv ek1otth e2k1ó2h ek1ó2ra ek1ó2rá ekö2k e2k1ökö e2k1ö2lé ek1ö2lÅ‘ e2k1öm e2k1önk e2k1önt ekö2ri e2k1örv ek1pl ek1pr ek1ps e1k2ram e1krétá ek2ris e1k2rí ek2róm ek1sl ek1sm ek1sp ek1st eksz1al ekszes1 ek2szip ek2tau ek2taz ekt1elk ek2t1esz ek2t1érd ek2til ek2tim ek2t1i2o ek2t1ok ektus1s e2k1ud e2k1u2ra ek1uro e2k1utá e2k1utó e2k1uz e2k1új e2k1úr. ekú2t ek1úto ek1útv e2k1ünn ekü2t ek1ütÅ‘ ek1üzl e1k2vó el1a2ba el1abl el1a2bort e2l1a2c e2l1a2d el1agg el1a2gy el1a2j e2l1akad e2l1a2kas el1akc e2l1a2l el1a2m el1ana elan2di el1a2ne el1a2ni el1ann ela2ny el1a2pa ela2r el1ara el1ará el1aré el1asza el1aszi el1aszó el1aszu e2l1a2u el1a2va el1a2ve el1azo elá2bé el1ág. e2l1á2ga el1ágg el1á2gi el1ágn el1á2go el1á2j el1áld el1áll el1á2lo elá2m el1ámí el2án elá2ná elá2ne elá2nó el1á2p el1ár. el1á2rá el1árb el1árc el1á2re el1á2ré el1árf el1árh el1árk el1árn el1á2ro el1árö el1árr el1árt el1á2ru el1á2rú elá2s eláta2 el1át1e2 el1átf el1átk el1átl el1átm el1átr el1ázi el1ázo el1ázt el1bl el1br 2eld. el1d2r ele1bre 2eled. 1e2ledel el1eff ele2g1e2lé el1e2gyene e2legyü el1egz eleí3ran ele1k2l ele2k1os ele1krá 1elektr e2l1elb e2l1e2lemb e2l1elm e2lelÅ‘z el1elr ele2mad e2l1ember 1eleme. 1e2lemei 1elemek ele2mell 1e2lemem. 1e2lememm 1e2lemes. 1e2lemesn ele2mesz ele2mélt ele2mérd 1e2lemük 1e2lemünk 1e2lemzé 1e2lemzÅ‘ 2elend e2lener ele2nél eleó2 ele2pal ele2pap ele2pele ele2pell ele2p1Å‘2r e2lerd el1e2red el1e2rei el1erj e2l1ern ele2róz ele2sege ele2sésb ele2sése ele2sésé ele2sésh ele2sési ele2sésk ele2sésn ele2sésr ele2séss ele2sést ele2sésü ele2sik. e2l1esnén ele2sÅ‘b ele2sÅ‘s e2l1essél ele1sta ele2szek ele2t1e2két ele2teti ele2tetn ele2tetÅ‘ ele2t1ék ele2tél e2l1ették eleü2 1e2l1e2vÅ‘k el1e2xi e2lébb elé2du el1é2ges el1é2get el1égj el1égtek el1égv el1é2het eléka2 elé2kak elé2k1an elé2k1á elé2k1e2le elé2kev elé2keze elé2kezi elé2kezt el1ékí elé2kö elé2ku el1él. el1éld elé2led el1é2let el1é2lé el1élh el1é2li el1élj el1éln el1éls e2l1élt el1é2lü e2l1élv e2l1é2neke e2l1é2nekh e2l1é2red e2l1é2rem elé2rend e2l1é2rez elé2rét. elé2ri. e2l1érp e2l1érrÅ‘ e2lértel e2l1érté el1érth e2l1értÅ‘ e2l1é2rü e2l1érz 2elésétÅ‘ elé2so 2elész. 2elészel 2elészem 2elészne 2elészné 2elészÅ‘ 2elészü 2elészv elé2tel e2l1é2tes elé2tete e2l1étk e2l1étt e2l1év. el1fl el1f2r el1gl 1elhap el2ib el1ideg el1idé el1i2do el1iga e2l1igé el1ill e2l1i2ma e2limá e2l1imp e2l1into el1i2on eliÅ‘2 e2l1i2p e2l1ira e2l1i2ro e2l1i2si e2lism el1iss el1iste eli2tin eli2tol elitu2 el1i2vá e2l2ix el1i2zo e2l1íg el1í2rá el1í2v elka2r1á 1elkezdé 1elkezdÅ‘ el3ki el1k2l el1k2r el1kv el2l1a2dá el2l1a2dó ell1alk el2l1amb el2lamp ella1t el2lau el2l1ábr el2l1áll 1ellátáso 1ellátásü 1ellátm el2lef elle2g1ó2 ell1elké el2lelm 1ellená 1ellenes 1ellenf 1ellenÅ‘ 1ellens 1ellenz el2lid ell1inf ell1ing. ell1int el2l1or ell1osz ell1órá el2lön el2lös el2l1űr 1elmééh 1elmél 1elméte 1elnép 1elnök el1obj el1off el1oj e2l1o2ká el1okm e2l1o2l el1oml el1omo el1ont el1opc e2l1ope el1oro el1orr el1os e2losz e2l1ox eló2ig eló2in e2l1ó2ri el1öb el1ö2m e2l1ön e2l1ör e2l1ös e2l1ö2z 2előáp 2előár 2elÅ‘bé 2elÅ‘bi 2elÅ‘bo 1elÅ‘dei elÅ‘2d1í2 1elÅ‘dje. 2elÅ‘do 1elÅ‘döt. elÅ‘e2r 1e2lÅ‘fú 2elÅ‘fü 2elÅ‘fű 1e2lÅ‘hív 1elÅ‘hű 2elÅ‘ib 2elÅ‘ik 2elÅ‘im 2elÅ‘iv 2elÅ‘ja 2elÅ‘ké. 2elÅ‘kért 1elÅ‘kése 1e2lÅ‘kést 2elÅ‘kl 2elÅ‘kÅ‘ 2elÅ‘kt 2elÅ‘ku 2elÅ‘kü 2elÅ‘mö 2elÅ‘mű 2elÅ‘na 1elÅ‘nn 2elÅ‘nö 1elÅ‘ny. 1elÅ‘nyb 2elÅ‘nyer 1elÅ‘nyh 2elÅ‘nyi 1elÅ‘nyk 1elÅ‘nyn 1elÅ‘nyö 1elÅ‘nyr 1elÅ‘nyt 1elÅ‘nyü 2előö el1Å‘r. 1e2lÅ‘reg 1elÅ‘reh 1elÅ‘rej el1Å‘riz el1Å‘rl 2elÅ‘ro e2l1Å‘rü 2elÅ‘sá 2elÅ‘so 2elÅ‘sö elÅ‘1s2p 2elÅ‘sze 2elÅ‘szé 2elÅ‘szi 2elÅ‘szó 2elÅ‘tü 2előü 2elÅ‘vis 2elÅ‘viz 1e2lÅ‘z. e2lÅ‘ze 2e3lÅ‘zene 1elÅ‘zm el1p2l el1pró el1sk el1sl el1sm el1sz2t el2t1aj eltára2d eltár1ada 2elte. el2t1e2rei 2eltes 2eltet. 2eltete 2elteté 2elteth 2elteti 2eltetj 2eltetn 2eltets 1eltettk 2eltetü 2eltetv 2eltéb 2eltél 2eltét el1t2ra el1trá el1tré 2eltük e2l1ud el1u2g elu2n el1und el1u2r e2l1u2t e2l1uz el1új el1ús el1ú2t el1üc e2l1üd e2l1üg elügy1ér. elügy1érn elü2kén e2l1ültet e2l1ür e2l1üs e2l1üt e2l1üv e2l1üz el1űrt e2l1űz elv1ada elv1a2dó el2v1at el2v1árai el2v1áras el2v1árár elv1ász el2vát el2v1enc el2v1égü elv1éle el2v1ér. el2v1éri el2vik elv1olt el2v1ó2 el2vöd el2vÅ‘ el2vú e2ly1a2 e2ly1á2 e2ly1e2kén. e2ly1el e2lyer ely1eszt ely1é2jé ely1é2ké e2ly1él e2lyés e2ly1i2ko e2ly1ira ely2kéj e2ly1o e2ly1ó e2lyöm e2lyön e2lyös e2lyÅ‘ ely1ul elyü2l e2ly1ülé e2m1ab em1a2dat em1a2dás e2m1a2do e2m1a2dó e2m1adt e2m1a2gi em1a2ja e2m1ajá em1ajk e2m1ajt em1a2ka em1aká e2m1aku em1a2lap e2m1all em1almá em1alv e2m1ana 1emaná e2m1a2ny ema2p em1apá em1apr em1a2rán em1asszo e2m1atl e2m1a2u e2m1a2v e2m1a2zo e2m1áb emá2l em1ála em1áld em1áli e2m1áp emá2r e2m1ára e2m1árn e2m1áro e2m1áru em1árv e2m1ásá emát1a2 e2m1áte e2m1átl em1átm e2m1átt e2m1átu 1embarg 1embered 1emberf ember1Å‘2 1embers emb2len 1emblé em1bre 1embri em1bro em1dr e2m1ef eme3ger eme3gi em1egyet em1egyé e2m1egz e2m1e2kés e2m1ela em1elb 1emelet 1emelke e2m1ellá e2m1elm 2e2m1elo 1emelÅ‘b 1emelÅ‘e em1elÅ‘ny 1emelÅ‘s 1emelÅ‘v 1emelty e2m1ember e2m1e2mel e2m1emu e2mener emenes1s e2m1e2p e2m1e2rény e2m1e2rÅ‘ eme2sa em1e2seté e2m1e2sés em1e2szem e2m1e2szet e2m1eszk e2m1eszm e2m1eu e2m1e2v eme2z1a eme2z1á2 eme2z1o eme2z3s e2m1ég e2m1é2he. e2m1é2hen e2m1é2hes e2méhs e2m1é2j emé2k e2m1éke em1ékr em1é2let e2méneke e2m1ép e2mértel e2m1értéke e2m1értéké e2m1értékn em1észl emé2t1a2 emé2tár e2m1é2tek emé2tel e2m1étel. emét1elh emét1els em1fl emfoga2d emfo2gada em1f2r em1gr e2midé em1i2dÅ‘ emi2g e2m1iga e2m1ige e2m1igé em1iks emi2m em1ima e2mimá e2m1inf e2m1ins e2m1inte e2m1i2p e2m1ira e2mirá e2m1iro e2m1irt e2m1isk e2m1ism e2m1iste emi2sza emi2szá emi2szo em1izé em1izg e2m1i2zo e2míg e2m1íj e2mír e2m1í2v em1kl em1kr 1emlékm 1emlékv emo2k e2m1okl em1oko e2m1okt em1ola e2m1old em1oll e2m1olt e2m1o2p e2m1ork e2m1ors e2m1orv emo1t2 emó2ra em1ó2rá e2m1öb emö2k e2m1ökö emö2l e2m1öld em1ölé e2m1ön e2m1ö2r e2m1ös em1öv e2m1ö2z em1Å‘r. em1Å‘rk em1Å‘rn em1Å‘2rö em1Å‘rr em1Å‘rt 1empát em1pc em1p2re em1pré em1pro em1pró em1p2s em1sk em1sm em1sp em1s2t em1t2r 1e2mu. e2mud e2mug e2muj 1e2muk 1emulz 1e2mum em1uno e2mur e2musz em1utal e2m1u2tá e2mutc e2m1utó e2m1új em1úr e2m1út e2m1üd e2m1üg e2m1ü2lÅ‘ e2m1ünn e2m1ür e2m1üt e2m1üv e2m1üz e2m1űz em1zr e2n1ab en1a2do en1agi e2n1a2j e2n1a2k en1a2la en1alk en1all en1alm e2n1a2n en1a2pá ena2p1e en1a2rá en1arc en1asz en1atk en1aut en1ábr en1ág en1áld en1álm e2n1ára en1árn en1á2ro e2n1áru en1átk en1átm en1átv e2n1áz en1bl en2c1a2 en2c1ár en2c3h en2c1ip en2cí en2c1ol en2c1os en2c1ö2 enc3ség en2c3sor enc3s2p enc3sz en2cu en2d1adá en2d1alk en2dano en2d1áll en2d1árn en2d1átl end1é2jé en2d1ér. en2d1érr en2d1értÅ‘ en2d1érz en2d1or en2d1ón en2d1ó2r en2dös en1d2rá en2dú en2dza eneá2 e2n1egér e2n1egg en1e2lek en1eljá en1elk e2n1ell en1elm en1elny en1elü en1elvá e2n1eml eneó2 ene1p2 2enerá 1e2nerg e2n1ern e2nerv 2enes ene1sztá ene2tal ene2tos e2n1ex ené2k1a ené2kel enéki2 ené2kis en1épí en1épü e2n1ér. e2nérd e2n1éré e2n1éri e2n1érte e2n1érv ené2sza ené2szer ené2sz1in e2n1étk e2n1éves ené2vig en1f2l enfluo2 enga2 en2gan 1engedé enger1Å‘2 engés3s eng1g e2nidé enidi2o e2n1i2ga e2n1ige e2n1igé en1ill e2nimá e2n1ina en1ind e2n1inf e2n1inte e2n1inv e2n1ira en1i2rá en1isk e2n1ism eni2sza eni2szer eni2szo eni2sz1ó2 e2n1ív en1k2j en2n1eme en2nes enn1ége enné2k en2n1éke en2nér en2nir en2n1ol en2nú en2n1ül enny1a2d enny1as enny1á en3nyer enny1í2 en3nyu e2n1ob e2n1of e2n1oi en1old e2n1olv eno2ni en1opt eno2r1á2 e2n1ost en1oszt e2n1ox enóta2 enó2tal enó2t1e2 e2n1öb e2n1öl en1ön e2n1ös e2n1ö2t e3növ enÅ‘2rü en1pr en1ry en2s1ab en2s1a2l en2s1a2n en2sas en2s1el en2s1érté ens3szá en2s3zon en2t1a2c en2tada ent1agy enta1k2 en2t1a2la ent1alj en2t1alk ent1a2lo ent1and en2t1a2ny ent1ass en2t1á2rak en2t1á2rat en2t1árá en2t1á2rú en2t1elmé ente2r1a en2t1este en2t1esté en2t1esti ente1t2r en2t1é2g en2tép en2térm en2t1é2ve. en2tigé en2t1i2o enti2p ent1ipa enti2sz en2t1okt en2tön en1trad ent2ran en2tun en2t1u2r en2t1ü2z en1uta en1úr en1út e2n1űz e2nyab e2ny1a2d e2ny1ae enya2g eny1aga eny1agá e2ny1aj eny1alk e2ny1a2n eny1aré e2ny1as e2ny1at e2ny1au eny1d2 eny1e2dzé e2nyelm eny1elÅ‘n eny1elvo eny1elvű e2ny1e2rÅ‘ eny1eve e2nyéva 1enyhi. eny2h1Å‘s e2ny1id e2nying e2ny1ira e2nyiz eny1í2ró e2nyok e2ny1o2l e2ny1or e2ny1os e2ny1ó2 enyö2k e2ny1ökö e2nyöt eny1s enyt2 eny1tr eny1út enyva2 eny2van 1enyve. eny2v1e2l enyves1s en2zal 1enzim e1o eo2áz e2obo e2ode eo2dú eog2rafi e2oka eo2kád e2okár e2oké e2o1k2l e2okon eo2kos eo2kö eo2laj e2oló eo2ly e2ome e2omé e2omi eo2n1al eo2nan eon1any eo2natom eo2n1au eo2n1ál eo2nár eon1d2 eo2ner eon1f2 eo2niz eo2nö eon1t2r eo2n1ú eo2nü e2opa eo2pe e2o1p2l eo1pro eo2r1a2 eo2r1á2 eo2res eo2r1és eorgi2a e2orgi2áb eori2tá eor1k2 eo2rö eor1s2 eo2so e2ost e2osza eo1sz2f e2otí e2oto eo1t2r eo2vi e1ó eóa2d e2óbö eó2ce eó2dá e2ófa e2óhe e2óje e2ókap e2ókép e2ókor e2ómi e2ómű e2óné eó2no eó1pr eó2rá e2óre e2ósu e2ószo e2óta e2ótá e2óte e2óté e2óti e2ótí eó1tré eó2vak e2óve e2óvé e2óví eó2vo eó2vó e1ö eö2bö eö2dé eö2kö eö2le eö2lé eö2li eö2lö eö2lÅ‘ eö2lü eö2mö eö2nö eö2rö eö2rü e2ös. eö2ve eö2vi eö2vü eö2zö e1Å‘ eÅ‘2re eÅ‘2rö eÅ‘2rü eÅ‘2sz e2p1ab e2p1a2da e2p1a2dá e2p1a2dó ep1a2gy e2p1aka e2p1akk e2p1akn ep1a2lak ep1a2lap e2p1alj e2p1alk epa2lom e2p1a2ny epa2rán ep1atró ep1aut ep1a2zo e2p1áb e2p1á2g ep1állá e2p1á2ra epá2s e2p1ása ep1áta ep1átc e2p1á2t1é ep1átf e2p1átm ep1á2t1o ep1átt ep1átü e2p1átv ep1bl ep1br ep1dr e2p1e2dz ep1e2lemr e2p1e2let e2p1ellá e2p1elo e2p1ene epe2ral epe2r1e2c epe2rev epe2rin epe2rül e2p1esti epe2szá e2peszmé e2p1ex 1e2péd 1epééb 1epééi 1epéén e2p1ég 1e2péit e2péj ep1ékh ep1éle ep1élm 1e2pénk e2p1ér. ep1fl ep1ido e2p1i2ko ep1ind e2p1inga e2p1i2rá e2p1irt e2p1ism e2piz e2p1íg e2p1íj e2p1ín e2p1ív ep1kl ep1kr ep2lag e1p2laká e1p2lan e1p2lán e2p1ob e2poch ep1okt e2p1olv ep1ope e2p1orn ep1orsó 1eposz. 1eposza 1eposzá 1eposzt. ep1osztá e2p1ö2l ep1önt e2p1ös e2p1öv e2p1Å‘ri ep2pan ep2pát ep2p1e2le ep2p1elh epp1eró eppó2 ep2pór ep2pö ep1pro ep1pró ep3ret e1p2réd e1p2rin e1proj e1p2rot ep1s2k ep1sp ep1st ep2tad ep2t1aj ep2t1í2v ep2t1op e2p1ug e2p1u2ta e2p1utó ep1új ep1út e2p1üg e2p1üt e2p1üz e2p1űz er1abla er1a2bor e2r1abr er1abs era2dat e2r1a2dá er1adm er1a2do e2r1a2dó era1dr er1a2ge er1agr e2r1a2ja e2r1ajk er1ajtó e2r1a2kad e2r1a2kas e2r1akc e2r1akti er1a2la e2r1alg e2r1alj er1alk e2r1a2lo er1als e2r1alt er1alv er1amb er1amp er1angy er1ann er1a2nya er1a2pa. er1app er1aps e2r1a2ro e2r1asp era2sz1a2l era2szav era2szárn era2szel era2sz1é2p e2r1atk e2r1atl era1t2r er1att er1aut e2r1a2zo e2r1ábr erá2fé er1áge er1á2gú e2r1á2gy er1állá er1állé er1állo er1álló er1állv er1ápo e2r1ár. er1á2rak er1á2rá e2r1árb er1áre er1áré er1árf er1árk e2r1á2ro e2r1árr er1árs er1á2ru er1árú er1árv er1ásá e2r1á2sz er1á2t1e2 e2r1áth er1á2ti e2r1átí e2r1átj e2r1átk er1átl e2r1átm e2r1átn e2r1átr e2r1átt e2r1átv erb1i2na er2c1a2l er2car er2c1ár er2c1át erc1ell er2c3ho er2cid er2c1ina er2c1iná er2c1i2pá er2cis erc1k2 erco2 er2cö er2csad er2cs1an ercs1ál er2cú er2cz 1erdej 1erdÅ‘ ereá2 1eredetű e2redén 1e2redm eree2 er1eff ere2gál ere2gel e2r1e2ger erei2g 1e2rején 1e2rejér ere2k1el erek1e2szű erek1é2j ere2kot erek1t ere2k1ú2s e2r1e2leg er1elér e2r1ellen er1elli e2r1elö e2r1elr erem1egy ere2m1emb erem1érté ere2m1ut e2r1enz ereo1gra ere2pan ere2pas erepá2 ere2p1ál ere2p1e2sé ere2pin ere2pos er1erk er1ern er1eró e2r1esti e2r1estj e2r1estr e2resze. ere2szí e2reszü ere2tál ere2tát ere2t1eg ere2t1erj ere2t1é2rÅ‘. ere2t1é2ve. eretö2 er1e2vés e2r1ex 1e2rezet. 1e2rezete 1erezÅ‘kh erébe2 eré2bes er1ég. er1é2ge er1égé e2r1éj. e2r1éjb er1éjf e2r1éjn e2r1éjs eré2k1a2 eré2ká eré2kol eré2k1ö e2réneke e2r1é2neké eré2p1a eré2pá e2r1é2ren e2r1é2ré e2r1é2ri eré2sa eré2s1elv eré2sza eré2szá eré2szo eré2szö er1é2ter e2r1étk e2r1év. er1é2ve. e2r1é2vek e2réven e2r1é2ves e2r1é2vet e2r1é2vén er1é2vét er1évf er1évh e2r1é2vi e2r1évn e2r1évr er1évt er1évv erfé2l1é2v er1fl er1f2r er1gl e2r1i2deg e2ridé e2r1ido er1idu eri2ga e2r1i2gá e2r1i2kon e2r1i2mi eri2no erint2 e2rinteg erin1tr e2r1i2on eri2os e2r1i2par e2r1ira er1iró e2r1isk er1ism eri2szo e2r1ita e2r1i2zé e2r1izg er1íg er1íj. er1íjá e2r1ír e2r1í2z er1k2r er1k2v erme2k1a2 erme2ká erme2kesz erme2ko erme2köl erme2sz1á er2mind erm1isi ern1ala ern1ékn ern3n er2n1ó2d 1ernyÅ‘ er1okl e2r1okm er1olda e2r1o2li ero2nal er1ope e2r1opt er1orc er1ord er1orm er1orn er1o2ro er1ors e2r1orv erosz2f e2r1o2ve e2r1ovi e2r1óc er1ó2dá er1ólo er1ó2rá er1óri e2r1ö2c erö2k er1ökl er1öko e2r1ökö e2r1ö2l e2r1ör e2r1ös e2r1öt e2r1ö2z e2rÅ‘dd erőé2n 1erÅ‘lt 1e2rÅ‘mé 1erÅ‘mh 1e2rÅ‘nö 1e2rőö e2r1Å‘ri er1Å‘rö er1p2l er1p2s er2rév er1ry er2s1a2d ers1alk er2s1an er2sat er2s1á2gi ers1ára ers1eml er2sérte er2s1érté er2s1értÅ‘ er2sid er2s1im er2s1i2n er1ská er2s1od er2s1ol er2s1ón er1spor er1srá er1stra er2sut ersz2to er2t1ai ertá2p ert1ápo ertára2d ertár1ada er2t1elké ert1estj ert1esze er2t1é2j er2t1ékn er2t1évén er2t1é2vév er2tid er2t1i2m er2t1íz er2tos ert1órá er2t1ös er2t1öz er1t2ran er1trén ert1s er2t1út erubi2 1e2rup e2r1u2ra er1urá er1uru er1u2sz er1u2tá e2r1új e2r1úr e2r1ú2s er1útj er1útl er1ú2to er1útr e2r1üd e2r1ügg er1ügyb er1ügyn er1ügyr e2r1üled e2r1ür e2r1üs e2r1üt e2r1ü2v e2r1üz e2r1ű2z er2v1ala er2v1alt erva2n erv1any er2v1á2ru er2vása er2v1átk erv1e2lÅ‘ké er2vereze er2v1e2sze er2vék er2vérte er2v1érz er2vos er2vös er2vú er1ya 2esa. e2s1a2b e2s1a2d 2esait e2s1alja es1alm esa2n es1any esa2p es1apá es1arc es1ass es1asz es1a2ty e2s1au 2esábó e2s1á2g 2esán es1ára es1á2ru 2esát es1bl es1br es2ch 1esdÅ‘ es1dró 1esedez ese1fr ese2gye e2s1elm e2seng es1epri e2s1erd eseren2 1e2settü 2esé. esé2g1el 1eséll 1esély e2s1ép es1érc esés3s e2s1i2d esike2t1 e2s1ina es1i2pa e2s1isk 1esítÅ‘st es1í2zű e1s2kat e1s2kál es2kic 1eskü es2lat eslege2l esle2t1o es2lin e1s2lu e1s2mac esná2 2esok. 2esoka 2esokbó 2esokk 2esokr 2eson e2s1op 2esos. 2esosa es1osz es1ott e2s1ó2r esÅ‘ká2 e2sÅ‘z es2pan es2pec es1p2l e1s2pó e1s2rá es1stá es3szab essz1aga essz1a2r 1esszenc es3szerű essz1élet essz1élt essz1int essz1ok es3szö e1s2tab es2taf es2t1a2l es2tana es2t1a2ny estapolc1 es2t1a2ra est1ará e1s2tat es2taz es2t1á2p est1áram es2t1ári es2t1árn es2t1át 1esteko es2tenz este2r1a es2t1este es2t1esti 1estéih 1estéj 1estém 1esténk es2t1ér. es2t1éri es2t1érr es2térte es2t1é2rü es2t1ill es2t1ina es2t1int es2tip es2t1isz es2tiz es2t1ol e1s2top esto2r es2t1os es2t1ó2r estö2 es2t1ös es2t1Å‘r. es2t1Å‘rk es2t1Å‘rö es2t1Å‘rr es2t1Å‘rt es2tun es2tú es2tür es2t1ü2z e2s1ud esu2r es1ura es1urá e2s1utá es1úr. es1úrb es1úré es1úrh es1úri es1úrk es1úrn es1úrp es1úrr es1úrt e2s1ú2t e2s1üz e2sza2c esz1acé e2szad e2sz1a2e esz1ajt esza2k1é esz1akn esz1alj esz1any e2sz1ara e2sz1au e2száru e2sz1ás eszá2t esz1áts e2szece eszeg1ér e2sz1egy esz1ekés e2sz1ela e2sz1e2mel 1eszenc e2sz1erd e2szev e2sz1ex eszé2do esz1é2pí e2sz1é2ri esz1g2 esz1i2pa esz1isk esz1ist e2sz1ita e2sz1iz eszke2l 1eszme. 1eszméb 1eszméi 1eszméj 1eszmék 1eszmén 1eszmét 1eszmév e2sz1old eszö2l esz1ölÅ‘ e2sz1ön es3zsá esz3se esz2tab esz2tad esz2t1a2gá esz2taj esz2t1ala esz2t1alj esz2t1ap eszt1árf esz2t1árn esz2t1e2v esz2t1ér. esztés3s 1esztéti esz2tid esz2t1ol esz2t1ó2r esztö2 esz2t1öl esz2t1Å‘r. esz2tut esz2t1út esz2tüz e2sz1ü2g e2sz1ü2z 1e2szűs 1e2szűt esz1z et1abr eta2c et1acé e2t1a2d e2t1a2gá e2t1a2gy e2t1a2j et1a2kas e2t1akc et1akó e2t1aku eta2lag eta2lak et1a2lás e2t1alb et1ald et1alf e2t1alg et1alj e2t1alk 1e2talo eta2n1é eta2nyag e2tapr et1a2ra e2t1ará e2t1arc e2t1arz et1asp e2t1ass et1a2szá e2t1atk e2t1a2to e2t1aty e2t1a2u e2t1a2z et1ábr e2t1á2cs e2t1áf et1ág. e2t1á2ga et1ágb et1ágg et1ágn et1ágr e2t1áh et1áju et1áll e2t1álm e2t1á2rad et1áre et1árny etá2ron et1á2rú e2t1á2ta etát1a2d e2t1áth et1átl et1áts et1átu e2t1átv et1bl et1br et2ch et1dr etea2 eteg1á ete2g1e2l ete2gó e2t1e2ke. e2t1e2kei e2t1e2kek ete2k1ék e2t1e2kénk e2t1e2kés e2t1elc et1eldo ete2lege ete2legy et1e2lej e2telemz e2t1e2lesé e2t1eléré etelés1s e2t1elha e2t1elhá et1elhel e2t1eliga e2t1eljá e2t1ellá e2tellen e2t1elme e2t1elmén. e2t1elméne e2t1elmét e2t1elnev e2telnö e2t1elny e2t1e2lo ete2lÅ‘ad et1elszá e2t1eltá e2t1elter et1eltérí e2t1elvá e2t1elvez e2t1elvo e2t1elz ete2mal e2t1ember ete2mel e2t1enz ete1p2 e2t1erd ete2rén etero1 e2t1erÅ‘ ete2s1a e2t1este. e2t1eszté ete2teté ete2t1ér. 1e2tetésen 1e2tetésn ete2tos ete2t1ö ete1tra e2t1ezr etéé2 et1ég. et1égb et1é2gé et1égg e2t1é2gi et1égn eté2k1a2 eté2k1á2 eté2k1e2l et1ékí eté2kos e2t1élm e2t1érc e2t1é2ré. e2t1é2rén e2t1é2rér e2t1é2rét eté2rie e2t1érke e2t1érm e2t1é2rÅ‘s e2t1értel e2t1érz eté2sa eté2s1ég eté2so etés3s eté2sza et1észl et1észr eté2t1a2 eté2ter. eté2tere eté1tra e2t1é2ven eté2ves. e2t1é2vet e2t1é2véh eté2véig e2t1é2véne eté2vétÅ‘ et1évh et1évt et1fr et1gl et1gr et2hei eti2d et1ide et1ido eti2g e2t1ige e2t1igé e2t1igy e2till et1i2ma e2t1imá e2t1i2mi 1e2timo e2t1imp etina1 e2t1ind e2t1inf e2t1ins e2t1inte e2t1inv e2tinz e2t1i2pa e2t1i2ra e2t1iri e2t1iro e2t1iró e2t1ism e2t1iste e2t1i2sza e2t1i2szon e2t1i2ta et1izé e2t1izg e2t1izo e2t1izz e2t1íg e2t1íj e2t1ín e2t1í2r e2t1í2v e2t1íz et1kl et1k2r et1kv 1etnol et1oda e2t1okm e2t1okt e2t1oml eto2n1a2 eto2nál eto2n1is eton1n et1opc et1ope e2t1opt 2etor et1ord e2t1org e2t1orm et1orom e2t1ors e2t1orv et1ost etosz2f et1oszl et1oszt e2t1ou e2t1ó2c et1ó2ra et1ó2rá e2t1ó2v et1öko etö2l e2t1öl. et1ölé e2t1ölÅ‘ e2t1ön e2t1ös e2t1öt et1ö2vü e2t1ö2z etÅ‘a2 etÅ‘e2l etőé2b etÅ‘fé2l1é2v e2t1Å‘r. e2t1Å‘rb et1Å‘rc et1Å‘2réh etÅ‘2ri. et1Å‘rk et1Å‘rl e2t1Å‘rn etÅ‘2rök et1Å‘rp e2t1Å‘rr e2t1Å‘rs et1Å‘2rü etÅ‘1s2p et1pl et1pr et1ps e1trap e1trau e1t2rág e1tréf e1t2ril et1sk et1sn et1sp et1st et3tad ettai2 etta2n1ó2 et2telem et2t1ing et2tír et1t2rá et1tri etty1á2 e2t1ug et1una et1und et1u2ra etu2sz et1utá et1utó et1utu e2t1új e2t1ú2ri e2t1út e2t1üd e2t1üg e2t1üld e2t1üt e2t1ü2v et1ü2zem et3ya e1u eu2bo eu2ga eu1k2h eu2mal eu2m1e eu2mim eu2m1i2p eu2mis eu2m1iz eu2mí eu2mór eu2mÅ‘ eum1p2 eu2mü eu2na eu2ná eu2ni eu2no eu2nó e2ur. eu2rá eur1áz eu2ri. eu2rig e2urt eu2szí e2utai eu2tal e2utan eu2taz e2utái e2utájá e2utáka e2utákk e2utákná e2utákr e2utánk e2utár eu2tó e2uts eu2zs e1ú eú2jí eú2sz eú2ti eú2to e1ü eü2dü eü2ge eü2gy eü2le eü2lé eü2li eü2lö eü2lÅ‘ eü2lü eü2re eü2rí eü2rü eü2sz eü2te eü2tÅ‘ eü2tü eü2ve eü2vö eü2ze e1ű eű2ri eű2ze eű2zé eű2zi eű2zö eű2zÅ‘ evá2r1al eve2szö evé2lá evé2l1e2l evé2nyel evé2r1emb evé2rö evé2so evé2sza evé2szö evízi2óét ex1ab ex1al ex1ap ex1áb ex1á2r e2x1át ex1bl ex1br ex1dr e2xeg e2x1elm e2x1elvá e2x1er e2x1ék e2x1él e2x1ép ex1idÅ‘ e2x1igé ex1inf e2x1ing e2x1int ex1izz e2x1íj e2x1ír e2x1ob ex1op ex1ön ex1ör ex1ös 1expan ex1sk ex1sp ex1st ex1új e2x1üg e2x1üv e2x1üz eza2c ez1acé e2z1a2d e2z1af ez1ajt e2z1a2l e2z1a2n ez1arz e2z1as ez1aut ez1áll ezá2ma ez1árb ez1árr ez1á2rú e2z1át ez1bl ez2dál ez1e2gy ez1e2kék e2z1ela e2z1e2leme e2z1elér e2z1elm e2z1el1ö e2z1e2mel ezen2t1e2 eze2r1a eze2rá eze2red eze2r1el eze2r1em eze2r1es eze2r1o ez1erÅ‘ ez1este e2z1e2sz eze2t1a2 eze2t1á eze2t1eg eze2t1e2l eze2t1esz eze2t1ér. eze2t1é2rÅ‘. eze2t1é2ve. eze2t1o eze2tu e2z1ég e2z1é2j ezé2ká ezé2k1o e2z1él ezér1emb ezé2rett ezé2ru ezé2sa ezéskész1 e2z1é2te ez1fr ez1gr ez1i2do ezi2g e2z1igé e2z1i2ko e2z1ill ez1imp ez1iná ez1ind ez1inf ez1int ezi2o ez1ion e2z1i2p ez1i2r ezisé2g ezi2s1égé e2z1ism ezi2ta e2z1í2v ez1k2r ezkupac1 ez1kv e2z1ob ez1old ezo2nár ezon3n ez1opt e2z1ox e2z1ó2l e2z1ó2r ez1ó2t ez1ö2b ez1ös ez1ö2v ezÅ‘e2l e2z1Å‘2ri ez1Å‘rl e2z1Å‘rs ez1Å‘2rü ez1Å‘rz ezÅ‘s1orra ez1pl ez1p2r 1ezrede 1ezreds 1ezrel 1ezrem 1ezresn 1ezrű ez3saj ez3sap ez3sát ez3sáv e2z3sé ezsió2 ez3sl ez3sor ez3s2p ez3s2ta ez3str e2z3sü ez3sz ez1t2r ez1u2r ez1ut ez1új ez1ú2t e2z1üg 1e2züs e2z1üt e2z1ü2z 2é. é1a éa2da éa2dá éa2do éa2dó éa2ga éa2gi éa2já éa2ka éa2la éa2l1e éa2na éa2ny éa2ré éa2ri éa2ro éa2uk é1á éá2fá éá2gy éá2ju éá2ra éá2ro éá2ru éá2rú é2b1ag é2b1a2j é2b1a2k é2b1a2l éba2n é2b1any é2b1a2v éb1ál ébá2r éb2b1á éb1eszű é2b1é2k é2b1él é2b1ép ébié2 éb1isk éb1i2va éb1íz éb1kr éb1pl éb1pr 1ébres é2b1ug éb1üg éc1a2d éc1aj éc1a2k éc1a2l éc1a2n éc1ál éc1ár é2c1e2lem é2c1elv é2c1ember é2c1e2mel éc1gr éc3ha éc3há éc3hí éc3ho é2c1i2d é2c1il éc1ima éc1ob éc1os éc1Å‘r éc1pr éc3sab écs1ol éc3sz é2c1u2t é2c1ül éc3zs é2d1ab é2d1a2c é2d1a2d é2d1a2g é2d1a2j éd1akc éd1akt éd1a2ku éd1alk é2d1a2n éd1apa éd1a2pá é2d1arc éd1asp éd1ass éd1a2ti éd1att é2d1ág éd1áp éd1dr é2d1e2g é2d1ej é2d1e2ke. éde2kétÅ‘ é2d1e2l1a é2d1elk é2d1ell é2d1elo éd1ember é2d1eml é2d1enz é2d1ep éd1erd é2derei é2derem é2derg é2derl é2derné é2d1erÅ‘ é2derrá é2derrÅ‘ é2ders é2dertÅ‘ 1édesa é2desem 1édesg 1é2desí é2d1ég é2d1é2j é2d1ékb é2d1é2ké é2d1ékk édé2l é2d1éle é2d1élm é2d1ép é2d1ér. é2d1é2ri é2d1érs é2d1érte é2d1értÅ‘ é2d1érv é2d1érz éd1fr édi2ad édi2aka édi2al édi2ar édi2a1s édiasz2 é2d1i2d édi2g é2d1iga é2d1ige é2d1igé éd1i2ko éd1ill é2d1i2m éd1i2na éd1ind é2d1ini éd1ins é2d1int é2d1i2p é2d1iro é2d1i2z é2d1ín é2d1ír éd1ív éd1kl éd1okta é2d1op é2d1or é2d1osz éd1ott éd1ó2r éd1öl éd1ön é2d1ö2r éd1öt éd1öv éd1öz é2d1Å‘rm é2d1Å‘rn édÅ‘s2 édÅ‘1sp éd1pl éd1pr é1d2ram éd2raz é1d2rám éd1sk éd1sp éd1sr éd1st éd1t2r é2d1ud éd1uj éd1ura é2d1u2t é2d1új éd1úr éd1ú2t é2d1üd é2d1üg é2d1üt é2d1üz éd3za éd3zá éd3ze éd3zó éd3zü é1e ée2bé ée2la ée2le ée2lÅ‘ ée2me ée2pi ée2rÅ‘ ée2se ée2sé ée2si ée2sÅ‘ ée2sz1a2 ée2sz1á ée2szel ée2szép ée2szir ée2szis éeszt2 éesz1tr ée2uf ée2vé ée2vÅ‘ é1é éé2ge éé2le éé2pí éé2ra éé2te éf1ai éf1ajtó éf1ing é2f1is éf1kl é2f1os ég1abl ég1abr ég1a2d ég1aka ég1akk ég1akn ég1alj ég1am ég1a2ny ég1a2p é2g1a2r ég1aut ég1a2v éga2z é2g1á2g é2g1ál ég1áp ég1árt égát1a2d ég1átá ég1á2t1e2 ég1átf ég1átj ég1átm ég1áts ég1átt ég1átv ég1bl ég1br ég1d2r ég1ece é2gedén é2g1edz ég1e2ge é2g1ej é2g1e2kés é2g1ela é2g1elb ége2leg ég2elek ége2lemb é2ge2leme ége2lemn ége2lemt é2g1elér é2g1elf é2g1elha ég1elhá ége2lin é2g1elis é2g1eljá é2g1elk é2g1ellá é2g1ellen é2g1elm é2gelnö ég1elny é2g1elö é2g1e2lÅ‘a ége2lÅ‘m é2g1e2lÅ‘n ég1e2lÅ‘t é2g1elp é2g1elr é2g1elsa é2g1elszí ég1elta é2g1eltá é2g1elvá é2g1elz é2g1enc é2g1e2ne é2g1eng ége1p ég1epi ége2rál é2g1erede ége2r1el égeri2 ége2rim ége2rin é2g1eró ég1erÅ‘ ége2rül 1é2gesd é2g1essz ége2sze é2g1eszk é2g1eszt 1é2getj 1é2getÅ‘h 1é2getÅ‘n 1é2getÅ‘t ég1eva ég1e2ve ég1e2vé ég1evo é2g1ex é2g1é2g é2g1é2l é2g1é2neke é2g1é2neké é2génekk é2g1é2nekn ég1épí é2g1épü ég1é2ret é2g1é2rez égé2ré. é2g1érh é2g1é2ri ég1érl ég1érm ég1é2rÅ‘. ég1é2rÅ‘k égért2 ég1érte ég1érté 1é2gésé 1é2gésn é2gést é2g1éva ég1fl ég1fr ég1g2l ég1g2r ég3gyo 1éghes 1éghü égi2as ég1i2den égi2g é2g1iga. égig1ap égig1as ég1igaz égig1á é2gill é2g1imá ég1ina ég1inf é2g1ing é2g1inj ég1ins é2g1int é2g1ira ég1iro é2g1i2ta é2g1iva é2g1i2zésn é2g1izg ég1íg ég1íj ég1ín ég1ír ég1ív ég1íz 1égj. égkia2 ég1kl ég1kr ég1kv 1égne. 1égnék 1égnén 1égnét 1égni é2g1ob ég1oki ég1ola ég1old ég1oli ég1oll ég1olt é2g1op é2g1o2r é2g1os ég1ott é2g1o2v é2g1ox é2g1óc ég1ó2r ég1öb é2g1ö2d é2g1ö2l ég1ön égö2r ég1örö é2g1ös é2g1öt é2göv é2g1ö2z 1égÅ‘. 1égÅ‘k é2gÅ‘r ég1Å‘ri ég1Å‘rö ég1Å‘si 1égÅ‘t 1égÅ‘v ég1pl ég1pr ég1ps ég1sk ég1sp ég1s2t égsz2 1égsz. égszáraz1 ég1tr é2g1ud ég1un é2g1u2t ég1úg ég1új ég1úr ég1ús ég1ú2t é2g1üd é2g1ü2g é2g1ür é2g1üs é2g1üt é2g1üv é2g1üz é2g1űz égvezé2 é2gy1a2 é2gyál é2gyedé é2gy1eg é2gyelek é2gyeleme égye2m é2gy1emb é2gy1eme égye2seké é2gy1esz é2gy1e2v é2gyeze é2gy1ék é2gyél égy1é2ne égyi2 é2gyin é2gyir é2gy1is é2gy1iv é2gyí égyo2 égy1ok égy1os égy1ot é2gy1ó2 égy1ö2l é2gy1u2 é2gy1ú é2gy1ü2l é2gyür éha2l éh1ala éh1any é2h1arc éh1art é2h1au é2h1a2v éh1edé 2éheg éh1e2gé éh1e2le éh1elé éh1elf éh1elh éh1ell éh1e2lÅ‘ éh1elt éh1elv é2h1enz é2h1e2r é2h1esem é2h1e2to éh1evé é2h1ex é2heze 1é2hezé 1éhezÅ‘ 1é2hezt é2hezü é2hezv éh1ég éh1é2k é2h1é2l é2h1ép éh1érb éh1fl é2h1ic é2h1if é2h1i2n é2h1ip éhi2r éh1ira éh1irt é2hit éh1ita é2h1iz éh1ín éh1kr é2h1od éh1old éh1öb éh1ö2d éh1ös éh1pl éh1pr 1éhsé éh1sk éh1sp éh1ud éh1új é2h1üg é2h1ür é2h1üt é2h1űr é1i éi2dÅ‘ éi2ga éi2gé éi2má éi2pa éi2rá éi2ro éi2ta éi2vás éi2vó é1í éí2rá éí2ro éí2ró éí2vá éí2ze é2j1ab é2j1a2da é2j1a2l é2j1any é2j1a2r é2j1á2l é2j1áp é2j1árn éj1eb é2j1e2lem é2j1elh éj1elm é2j1elv éj1emb é2j1esz é2j1e2v é2j1é2g éjé2j é2j1é2k é2j1él é2j1ép é2j1é2te 1é2ji. é2j1il é2j1im éj1i2n é2j1ip é2j1iz é2j1o2l éj1ó2r éj1öd éj1ön éj1ör éj1pl éj1pr éj1sp éj1sz éj1ud éj1u2r éju2t éj1uta é2j1úr é2jül é2j1űz ék1abl ék1a2cé éka2dat ék1a2dá ék1a2dó ék1ajt éka2kad é2k1aká é2k1akk é2k1akn éka2lag ék1a2lak é2k1alg é2k1alj ék1alko é2k1all é2k1alt ék1alu ék1amb ék1ang ék1app é2k1a2rá ék1arc ék1a2ré ék1arz é2k1asp é2k1ass ék1aszt ék1a2tom ék1a2ve ék1azo é2k1ál é2k1á2p ék1árad é2k1á2rak é2k1á2rá ék1árb é2k1á2ré ék1árh é2k1árk é2k1árn é2k1á2ro ék1árr é2k1á2ru ék1á2rú ék1ásó ék1ásv é2k1áta é2k1átá é2k1átc é2k1átd é2k1áte ék1áté é2k1átf é2k1áth é2k1áti é2k1átí é2k1átk é2k1átl é2k1átm é2k1átö é2k1átr é2k1áts ék1átt é2k1átu é2k1átv é2k1áz ék1bl ék1br ék1dr ék1egé é2k1e2kés é2k1elb é2k1e2leg ék1e2leme ék1e2lemé éke2lemm ék1e2lér é2k1elf é2k1elk é2kellá é2k1ellen é2k1elm é2k1elo ék1elp é2k1elta ék1eltá é2k1eltér ék1elül ék1elvi é2k1e2mel ék1eng éke1p2 é2k1erd é2k1e2rec é2kered é2k1e2rei é2k1e2rez é2k1erg é2k1e2ró é2k1e2rÅ‘ é2k1e2sete ék1eszk éke1szl é2k1e2tet éke2tik éke2vés é2k1e2vo é2k1ex é2k1ég ék1éjs é2k1épí é2k1épü é2k1érc é2k1é2rem é2k1é2ren é2k1é2ré é2k1é2ri é2k1érk é2k1érl é2k1érm é2k1é2rÅ‘. é2k1é2rÅ‘i é2k1é2rÅ‘k é2k1érp é2k1érr é2k1érte é2k1érté é2k1értÅ‘ é2k1é2rü é2k1érv é2k1érz 2ékés ékés3s ék1észl é2k1étk ék1étv é2k1é2ve. é2k1é2vek ék1é2vet é2k1é2véb é2k1é2vén é2k1é2vét é2k1é2vév é2k1évi ék1fj ék1fl ék1fr ék1g2r ékia2 éki2d é2k1ide ék1idé ék1ido é2k1idÅ‘ éki2g ék1iga ék1ige é2k1igé ék1i2ko ék1ikr é2k1ill é2k1ima é2k1imá ék1imi ék1ind é2k1ing é2k1inh é2k1inn é2k1int é2k1inv é2k1i2o é2k1i2p é2kirán é2k1iro é2k1isk ék1ism é2k1ist éki2sza é2k1i2ta é2k1i2z ék1íj é2k1ír é2k1í2v é2k1í2z ék2kál ék1k2l ék1k2r ék1kv é1k2lu é2k1ob ék1of é2k1oká é2k1o2laj é2k1olda ék1oltás é2k1oml ék1opc ék1o2pe é2k1org é2k1orm é2k1orr é2k1orz é2k1osto é2k1ott é2k1ou é2k1ox é2k1óc ék1ó2l é2k1ón ék1ó2ra ék1ó2rá é2k1ó2v é2k1ó2z é2k1ö2b ékö2l é2k1öm ék1önt ék1ö2rö é2k1ös é2k1ötl ék1ö2vö ék1ö2zö ék1Å‘2r ék1Å‘s ék1pl ék1pr ék1ps é1k2reá ék2rim ék1sh ék1sk ék1sl ék1sp ék1s2r ék1st ék1sz2 ék1t2r é2k1ud é2k1ug é2k1uj é2k1und é2kuni é2k1u2tac é2k1u2tak ék1u2tal é2k1u2tas é2k1utá é2k1új ék1úr. ék1úrn ékú2t é2k1úth é2k1úti é2k1útj é2k1útn é2k1úto é2k1útr é2k1útt é2kútv é2k1üd é2k1üg ék1ü2le é2k1ünn é2k1ür é2k1ü2s é2k1ü2t é2k1ü2v é2k1üz ék1űr. ék1űrb ék1ű2rö é2k1űz él1abl él1abr él1a2cé él1a2da él1a2do él1a2dó él1a2g é2l1a2j él1a2ka él1akc él1akn él1ako é2l1a2l él1amc él1a2me él1and él1apó él1arm él1asp él1ass él1asz él1a2to él1a2u é2l1a2va él1azo él1ábr é2l1á2g élá2l é2l1áll é2l1álmá é2lálmot é2l1álo é2l1á2p él1ár. él1á2rá él1árb él1árf él1á2ri él1árk é2l1árn él1á2ro él1árr él1á2ru él1á2rú él1árv él1át1a2 él1átc élát1e2 é2l1átf él1áth él1á2ti él1átj é2l1átm élá2tok él1átr él1áts él1átt él1átü él1átv él1bl él1br élc3sz él1d2r éle2b é2l1ebé é2l1e2dz él1eff él1e2gé élegész1 él1e2gy éle2k1a2 éle2k1á éle2k1e2l éle2kem éle2ker éle2k1es é2l1e2kés éle2kis éle2kol éle2k1on éle2kot éle2kó éle2k1ö2 éle2ku éle2k1ú él1e2l1a é2l1e2leg él1elf é2l1elo é2l1elÅ‘e éle2lÅ‘j é2l1e2lÅ‘k éle2lÅ‘t é2lemb é2l1e2mi é2l1emp éle2n é2l1ene é2l1eng é2lenj é2l1enn é2l1eny é2l1enz él1e2ró é2l1e2sés éle2szü éle2szű éle2t1a2 éle2t1á2 éle2t1eg éle2t1e2l éle2t1e2r éle2tesz éle2t1ék éle2tél éle2t1ér. éle2t1érd éle2t1é2rÅ‘. éle2térü éle2tés éle2téte éle2tété éle2t1étn éle2t1é2ve. éle2t1é2ved éle2t1é2vén éle2tik éle2tisz éle2t1o éle2tó életö2 éle2t1öl éle2t1ör éle2t1u2 éle2tüz éle2t1ű2z é2l1e2vÅ‘ é2l1ex élé2d é2l1éde é2l1é2g él1é2hes él1ékek él1ékí é2l1é2l él1épü é2l1ér. é2l1érb é2l1érd él1é2rem é2l1é2ret é2l1é2ré é2l1érm é2l1érn é2l1érte é2l1érté é2l1é2rü é2l1érz élé2sa élés3s é2l1é2te é2l1étk é2l1év. é2l1é2v1á é2l1é2ves é2l1é2vet é2l1évez él1é2vén é2l1é2vér é2l1é2vi él1évü él1f2l él1f2r él1g2r é2l1i2d é2l1iga éli2gá é2l1i2ge é2l1igé é2l1i2ko él1ill éli2m él1ima él1imi él1imp é2l1ind é2l1inf é2l1ing él1int él1inv él1inz él1i2on é2l1i2p é2l1irá él1iro é2lism éli1s2p é2l1i2szá él1i2va é2l1i2vá él1izg é2l1izm é2l1i2zo él1íj él1í2r él1í2v él1íz él1k2l él1k2r él3lyu él2mat 1élmén é2l1ob é2l1okm é2l1oks é2l1ol é2l1o2r élos3s é2l1osz é2l1óc él1ó2n é2l1ó2r él1öb él1öc1 élö2k él1ö2l él1ön él1ör él1ös é2l1ö2z élÅ‘e2 él1Å‘rü 1é2lÅ‘sk él1p2l él1p2r élrádi2 élre1pr él1sk él1sp él1st él2sz1árnn él1szt éltal2p1al él1trá él1tré él1tri él1tró é2l1ud é2l1ug é2l1uj él1ult él1u2r é2l1u2tas él1utó é2l1új él1úr é2l1üg él1üll él1ült é2l1ür é2l1üs é2l1üt é2l1üv é2l1üz él1űz 1élvezÅ‘ é2ly1ab é2ly1a2d é2lyaj é2ly1a2l ély1a2n é2ly1ap ély1a2r é2ly1as ély1au é2ly1av ély1az é2ly1á2l élye2c é2lyef ély1egy é2lyekés é2ly1el é2lyeml é2lyene é2ly1ent é2lyer é2lyeti é2ly1é2j é2ly1ék é2lyél é2lyés ély1f2 é2ly1ira ély1k2 é2lyol é2ly1ó é2lyös é2ly1öz é2lyÅ‘ ély1s élyú2 é2lyültet é2lyüt ém1abr éma1d2 ém1a2da ém1a2dó émae2 émai2k ém1ajt ém1akk ém1all ém1alm ém1app ém1arc ém1arm émas2 éma1sp émat2 éma1tr ém1aut ém1a2zo émá2c émá2l ém1álm ém1álo é2m1áp ém1árn é2m1áru émát1a é2m1átt ém1b2l ém1b2r ém1dr éme2c éme2g é2m1e2ke. é2m1e2kés ém1ela éme2led é2m1elh é2m1ell ém1e2lÅ‘ ém1els é2m1elv ém1e2re ém1ern ém1e2rÅ‘ éme2s1á é2m1esemé é2m1esz éme2ta éme2tel éme2t1ék éme2to é2m1e2v é2m1ex ém1éks é2m1é2l é2m1ép é2m1érc ém1érd é2m1éri é2m1érm é2m1érté é2m1és é2m1é2te ém1fl ém1fr ém1gr émi2al é2m1i2d émi2g é2m1igé ém1iks ém1ill ém1ind ém1inf é2m1inv é2m1i2o é2m1i2p ém1irá ém1irh é2m1iro ém1isk ém1ism émi2sz ém1i2ta ém1izo é2m1ír ém1í2v ém1íz ém1kl ém1kr émo2nac émo2ne émon1n émont2 ém1o2p ém1ost ém1ox ém1ó2l ém1ó2r ém1öb ém1ö2l ém1ön ém1ös ém1öt ém1öv ém1Å‘2r ém1p2l ém1p2r ém1sk ém1sl ém1sp ém1s2r ém1s2t ém1sz2 ém1t2r ém1uj ému2n ém1una é2m1u2r é2m1úr é2m1üg é2m1ür é2m1üt é2m1üv é2m1üz ém1wh én1abb é2n1abl é2na2dal én1adá én1a2do én1agá én1agr én1akc é2n1akn én1akt én1alk én1ana én1ano én1ant éna1pré én1arc énas2 éna1sp é2n1ass én1atm é2n1a2tom éna1t2r é2n1au é2n1ág éná2l én1ála én1álc én1áld é2n1áll é2n1á2p é2n1á2rak énás1s én1ásv én1áta én1átb én1áte én1átk én1átö é2n1átr én1átt én1átü é2n1átv énba2 én1bl én1br én1d2r é2n1ef én1e2gy 1é2ne2k1a2 1é2nekd é2neke. 1é2nekei 1é2nekek 1é2nekem éne2ker 1é2nekes éne2k1é2j 1é2nekf 1é2nekg éne2kiz 1é2nekí 1énekj 1énekka 1é2nekl éne2kó éne2k1ö 1é2neks én1ela én1elg én1elh én1elj én1ell én1eln én1e2lÅ‘ én1elp én1els én1elt én1elv én1eml éne2n é2n1ene éne2r1a éne2r1á2 éne2reme én1eró é2n1e2sz éne2t1a2 éne2t1á2 éne2t1e2l éne2test éne2to é2n1e2v é2n1ex én1éjb én1ékb én1é2ké én1é2ki én1é2kű éné2l é2n1éle é2n1élÅ‘ éné2m1a én1éneke é2n1ér. é2n1érc é2n1éré éné2ter 2énéz én1f2r énfüstö2 énfüs2t1öl én1g2r én1ido éni2g é2n1igé én1iko én1ill én1imi én1ina én1ind én1inf én1inh én1int é2n1i2p én1ism é2n1ita é2n1iva é2n1i2z én1íj én1ín é2n1ív én1k2l én1k2rá én1k2ré én1kri énmű1 én3nyo é2n1ol én1oml én1ond é2n1or é2n1osz én1ot é2n1ox én1óc é2n1ó2r én1ök én1öl én1ön én1ör én1öt 2énöv é2n1ö2z én1Å‘2s énpe2 én1pl én1pr én2sas én1s2p én1s2t2 én2sú én1tra én1trá én1t2ri én1tró é2n1u2t én1út é2n1ü2g é2n1üle é2n1ür é2n1üs é2n1ü2v é2n1ü2z é2ny1a2 é2ny1á2 énye2c é2ny1ece é2nyecse é2ny1e2dz é2nyef é2ny1egy é2nyekés é2nyela é2nyelá é2ny1ell é2ny1elo é2ny1elvá ény1elvű ény1elvv é2nyema ény1enc é2ny1ep é2nyerd ény1ered é2ny1e2rÅ‘ é2ny1e2sett é2ny1e2sés é2ny1e2sÅ‘ é2ny1essz é2ny1esté é2ny1e2sz é2nyeta é2nyeti é2nyetű é2ny1ev é2ny1ég é2ny1é2j ényé2k ény1éke ény1ékh ény1ékn ény1ékt é2ny1é2l é2ny1ér. é2ny1érd é2nyéré é2ny1é2ri é2ny1érn é2nyérte é2ny1érté é2ny1é2rü é2ny1érv é2ny1érz é2ny1és é2ny1é2te é2ny1étt é2nyéva é2ny1if ényi2g é2ny1igé é2ny1i2ko é2ny1ing é2ny1ira é2nyiro é2nyisk é2nyita é2nyiz ényí2r ény1írá ény1író é2ny1ok é2ny1o2l é2ny1o2r é2ny1os é2ny1ó2 ényö2 é2nyön ény1ör é2ny1ös ény1öz é2ny1Å‘2 ény1s ényt2 ény1tr é2nyu ény1u2r ény1us é2nyúj ény1út é2ny1ü2lÅ‘ é2ny1üs é2ny1üv é2nyüz énza2 én2z1ad én2zag én2zak én2z1al én2zar én2zau én2z1ál én2z1ás én2z1át én2z1e2r én2z1im én2z1in én2zis én2zí énzo2 én2z1ol én2zor én2zos én2zör én2z1Å‘ én2zsa én2zse én2zur én2zú énz1z é1o éo2la éo2pe éo2sz é1ó éó2ra éó2ri é1ö é1Å‘ ép1ado é2p1a2j ép1alk épa2ny ép1apo épa1pr ép1arc ép1aré ép1ass ép1atl épau2s ép1aut ép1a2va é2p1á2g é2p1álla é2p1állo ép1áp ép1ár. ép1árb ép1árf ép1ári ép1á2ro ép1á2ru ép1á2rú ép1átb ép1átj ép1átl ép1átm ép1áts ép1átü ép1átv ép1bl ép1br ép1dr é2peb ép1e2gé ép1e2gy é2p1e2kés 2épel é2p1ela é2p1elá é2p1elb é2p1elc ép1e2lemb é2p1e2leme ép1e2lemr é2p1elér é2p1elf é2p1e2lin ép1eljá é2p1elk ép1ellá é2p1ellen é2p1elm é2p1elo é2p1e2lÅ‘ke é2p1elr é2p1eltér ép1elu épe2n é2p1ene é2p1e2p é2p1er épe2rÅ‘ é2p1esem é2p1e2ser é2p1e2sett é2p1e2sés é2p1e2sÅ‘ é2p1este é2p1esté é2p1esti é2p1estj épe2sz é2p1esze é2p1eszk é2p1eta ép1e2tete épe2teté é2p1etű ép1eva é2p1ex é2p1ég é2p1é2k é2p1é2l é2p1é2neke é2p1ér. é2p1éri épé2sa é2p1éte ép1fl ép1gr é2p1i2d é2p1i2ko é2p1imp ép1ind ép1ing é2p1ins é2p1int ép1i2pai é2p1i2ra é2p1i2rá é2p1i2ro é2p1irt ép1isk é2p1ism é2p1ist é2p1i2ta é2p1iz ép1ín é2p1í2r 1építm é2p1ív épká2r ép1kl ép1kr ép1kv éple2t1ö ép1ob é2p1ok ép1old ép1olv ép1on ép1op ép1orm ép1ors ép1osz é2p1óc é2p1ó2h ép1ó2r ép1öl ép1öm ép1ön épö2r ép1ös ép1ö2z ép2p1ek ép2pí ép1p2l ép2p1od ép1p2r é1prog ép2rózá ép1sh ép1sk ép1s2n ép1sp ép1s2t épsz2 ép1t2r ép1udv ép1ug é2p1uj épu2n ép1uno é2p1u2r épu2s é2p1u2t ép1új ép1ús ép1út é2p1üd é2p1üg é2p1üld 1épüle é2p1ür ép1üs é2p1üv é2p1üz ér1abl ér1abs é2r1a2d ér1a2gá ér1agg ér1agy ér1a2ja ér1ajt é2r1a2ka é2r1akc é2r1a2la ér1alá ér1alg ér1alk ér1alm ér1alv ér1amb ér1amp ér1ane ér1ani ér1a2no ér1ant ér1a2pá ér1a2pó ér1app ér1apr ér1a2ra ér1a2rá ér1arc ér1asp ér1ass ér1a2ti ér1atk ér1atl ér1a2to éra1t2r ér1att ér1aty é2r1a2u ér1a2va é2r1a2x ér1a2zo é2r1ábr ér1ág. é2r1á2ga ér1ágb ér1ágg ér1ágy é2r1á2l ér1á2p ér1árá ér1árk ér1á2ro ér1árp ér1árr ér1á2ru ér1á2t1a2 ér1á2t1á2 ér1átc ér1á2t1e2 ér1áté ér1átf ér1áth ér1átj ér1átl ér1átm ér1á2tö ér1átr ér1átú ér1átü ér1átv é2r1áz 1érbán ér1bl ér1br ér2caj ér2c1a2l ér2c1a2n ér2c1as érc3c 1ércd ér2cedé ér2c1emb ér2c1esz ér2c3h ér2cil ér2c1im ér2ciz ér2c1o ér2có ér2c1ö ér2cÅ‘ ér2c3seb ér2c3sis ér2cú ér2cz ér2d1am ér2d1á2 1érdekbÅ‘ 1érdeke. 1érdekei 1érdekel 1érdekl 1érdekt 1érdekü 1érdekű 1érdemé ér2d1erÅ‘ érd1este érdés3s ér2d1i2ná ér2d1iz ér1dra érdü2l ér2d1ülé ér2d1ülÅ‘ ér2d3z ére2b ér1eba é2r1ebe é2r1ebé é2r1ebr ér1eff ére2g1a2 ére2g1á ére2ged ére2gel ére2g1em ére2gen ére2g1e2r ére2gev ére2g1él éreg1g ére2gin ére2go ére2g1ö2 é2r1e2gy éregyez1 é2r1ej é2r1e2kén. é2r1elá é2r1e2leg é2relemz ér1e2lég é2r1elis é2r1elk é2r1elo é2r1e2lÅ‘a ére2lÅ‘ir é2r1e2lÅ‘l é2r1e2lÅ‘me é2r1e2lÅ‘n ér1elr ér1eltá ére2m1a ére2má é2rember ére2m1eg é2r1emel ére2mes é2r1e2més ér1e2mis é2reml ére2m1o ére2m1ó é2re2mu ére2n é2r1ene é2r1e2r ére2sÅ‘ é2r1esÅ‘. é2r1esÅ‘b é2r1esÅ‘j é2r1esÅ‘n éres1Å‘2r é2r1esÅ‘t é2r1esté ére2t1a ére2tá ére2t1eg ére2t1el ére2t1ér. ére2t1é2ve. ér1e2ve é2r1e2vé é2r1é2d é2r1é2g é2r1éj. é2r1é2jét é2r1é2k é2r1é2l é2r1é2neke é2r1ép é2r1é2ri éré2s1el éré2sza éré2szá éré2ter é2r1étk ér1é2ve. é2r1évn ér1évv érfiá2 ér1fl érfo2g1ón. érfo2g1ós érfoto1 ér1f2r ér1g2r éri2al éri2csi é2r1ido érié2n ér1ifj éri2g é2r1iga é2r1ige é2r1i2ko é2r1ill é2r1i2ma é2r1i2má é2r1i2mi é2r1i2na é2r1ind é2r1inf é2r1ing é2r1inj é2r1ins é2rint ér1inv é2rinz é2r1i2o é2r1i2pa ér1ira é2r1isk ér1ism é2r1ist é2r1i2sz é2r1i2ta é2r1iva é2r1ivá é2r1i2z ér1íg ér1íj é2r1ín é2r1ír é2r1í2v é2r1í2z ér2jan ér1k2l érkö2z1e2pé ér1k2r ér1k2v érle2t1öl 1érméé 1érméj 1érmék 1érmi. é2r1o2l ér1ont éro2p ér1ope é2r1o2r ér1ott ér1ovi é2r1o2x éró2l ér1ólo ér1óri é2r1ö2b é2r1ö2c ér1öko é2r1ö2l é2r1öm é2r1ön é2r1ör é2r1ös ér1öt ér1öv é2r1ö2z érÅ‘1f2 ér1pl ér1p2r 1érsek ér1s2k ér1sl ér1s2p ér1s2r ér1s2t érsz2 ér1szk ér2tat 1értekez ér2t1ela 1értelm 1értesí 1értékil 1értékm értés3s ért1ö2ve 1értÅ‘c 1értÅ‘g 1értőü ér1t2ran ér1trá ér1t2ren ér1t2ré 1ér1t2ro ér1tró ér1uga é2r1uj éru2m1e éru2n ér1una é2r1u2r éru2s1ér ér1u2sz ér1uta ér1utá ér1u2to é2r1u2z é2r1új ér1ú2r ér1ú2s érú2t é2r1ü2g érü2ké. é2r1ür é2r1üs é2r1üt é2r1ü2v é2r1üz é2r1ű2z érv1a2dó érv1ala ér2v1a2n ér2v1á2gy 1érvel ér2v1elem ér2v1égb ér2v1égh ér2vék ér2vél ér2vu 2érzá 1érzék. 1érzékb 1érzéké 1érzékh 1érzékk 1érzékn 1érzékr 1érzékt 1érzékü 1érzékű 1érzésil és1abl é2s1a2d és1agy és1ajt és1a2ka és1akl és1akn és1ako és1akt és1a2la és1alg és1alk és1all és1alt és1alv é2s1am és1ana és1ant és1a2ny és1apa és1apr és1ara és1a2rá és1arc és1aré és1arm és1arz és1asp és1ass é2s1a2u ésá2c é2s1ál és1árt ésá2s és1áta és1áth és1átl és1bl és1d2r ése2b é2s1ebé é2s1ef és1e2ge és1e2gy é2s1e2ke. é2s1e2kei é2s1e2kés é2s1eks é2s1ekv és1ela és1elá és1elb és1elk é2s1elm és1elo és1elr é2s1elvé és1elz és1ember é2s1e2mel é2s1eml ése2n é2s1ene é2s1eng é2s1eny é2s1e2p és1erÅ‘ é2s1esz é2s1eto é2s1ev é2s1ex és1égb é2s1é2géb é2s1é2géé és1é2géh és1é2gén é2s1é2gév é2s1égh é2s1é2gi é2s1égn é2s1égv é2s1é2hen é2s1é2j é2s1é2k é2s1é2l é2s1ép é2s1ér. és1érb és1érc é2s1érd é2s1é2rem é2s1é2ren é2s1é2ré é2s1érh é2s1érl é2s1érn é2s1é2rÅ‘. é2s1é2rÅ‘k é2s1é2rÅ‘v és1érr é2s1érte é2s1érté é2s1értÅ‘ é2s1é2rü é2s1érv és1ész é2s1é2te és1é2ve. és1é2vet és1é2véb és1é2vét és1é2vév és1fl és1fr és1gr é2s1ic é2s1id é3sidet ésié2 ési2g és1iga és1ige é2s1ikt é2s1ill é2s1imi és1imp és1ind és1inf é2s1ing é2s1int és1inv é2s1i2p é2s1ira é2s1i2rá és1iro é2s1i2s é2s1i2ta é2s1i2vá é2s1í2r é2s1ív és2katu és1kl és1k2r és1kv é1slág é2s1ob és1oko és1okt é2s1o2l é2s1om é2s1op 2ésor és1org és1orn és1orv é2s1os é2s1ot és1o2v és1óc és1ó2l és1ón és1ó2r é2s1ö2l é2s1ön é2s1ös é2s1öt é2s1ö2v é2s1ö2z és1p2l és1p2r és2pri és1ps és1s2k és1s2p és1s2t éssz2 és3szab és3szag és3szak és3szap és3szá és4szel. és3szere és3széle és3szf és3szi és3szí és3szo és3szó és3szö és3szÅ‘ és3szt és3szú és3szül és3szű ést2r és1tra és1trá és1tré é1stru és1ud és1u2r é2s1u2t é2s1ú2t é2s1üg é2s1ünn é2s1ür és1ü2té é2s1üz ész1abl é2sz1a2d ész1agy é2szaj északá2 észa2k1áz é2szakc észa2k1ü ész1ale ész1alg ész1all észa2n ész1ant ész1any észa2p ész1apo é2sz1a2r é2sz1as ész1a2to é2sz1au é2szaz 2észá é2száb é2sz1á2g észá2l ész1állá és3záp ész1áram é2szárá ész1árán ész1árf ész1árk é2sz1árn ész1á2ru é2szás é2sz1á2t ész1á2z é2sz1e2ge. é2sz1e2gei é2sz1e2gy é2sz1ej észe2k1a2 észe2ká ész1ekés észekö2 észe2köv é2sz1ela é2sz1e2leg ész1e2lemb ész1e2leme ész1e2lemé ész1e2lemm ész1e2lemn é2sz1elk ész1elÅ‘d é2sz1elÅ‘r é2szelÅ‘z ész1ember é2sz1emi é2s3zené é2sz1eng é2sz1ep ész1e2rez ész1e2rén ész1erkö é2szerÅ‘ ész1e2sete ész1eszt észeta2 észe2t1ak észe2t1e2l észe2t1ér. észe2t1o észe2t1ör ész1e2ve é2sz1ex é2szég é2sz1ékt é2sz1é2l é2sz1é2p ész1éré é2széró é2szész é2sz1é2te é2sz1étk é2sz1i2d é2szif ész1i2ko é2sz1ikt é2sz1ill é2sz1im é2szina é2sz1ind é2sz1inf ész1ing é2sz1inté ész1io é2sz1i2p é2szira é2sziro é2szirt é2sz1i2s é2sz1ita é2sz1iz ész1í2j é2szír ész1í2vá ész1í2vé ész1í2vó ész1k2 ész1okt észo2l ész1old ész1olt é2szop ész1ors é2sz1os é2sz1ot é2s3zón 2észö és3zöld ész1ölt é2sz1ön é2sz1ös é2szöt ész1Å‘rs ész1p2 ész3su ész3sű észtés1s ész2tors észtorz1 ész1tra ész1tri és3zul ész1ut ész1úrn é2sz1út é2sz1ü2g é2szünn é2szüt é2sz1ü2z é2szűz ész1z ét1abla ét1a2do ét1a2dó ét1adt ét1agg éta1gr étai2k é2t1ajk é2t1ajt ét1akc éta1kr ét1akt é2t1alj é2t1alk ét1als é2t1alt é2t1ani é2t1ann é2t1ant ét1anya ét1anyá é2t1aps é2t1arc é2t1arg ét1aszt état2 ét1atom étaü2 ét1azo ét1ábr é2t1á2g é2t1áll ét1á2rad ét1á2ram ét1á2rá é2t1á2ri é2t1á2rú ét1árv ét1áti é2t1átr é2t1átv ét1bl ét1dr étea2 é2tedén éte2g1a2 éte2gele éte2g1elv éte2ger éte2go é2t1e2ke. é2t1e2kei éte2la éte2l1á éte2l1e2l éte2lemü éte2l1er éte2l1ék éte2l1ér. éte2l1é2ren éte2léré é2telfog é2tellen éte2lo é2t1e2lÅ‘l éte2l1Å‘2r é2t1e2mel étera2 éte2rad éte2r1ag éte2r1á2 éte2reg éte2rel étermo1 é2t1erÅ‘ éte2sik é2t1estün é2t1esv é2t1eszm ét1e2vi été2g ét1ég. ét1ége ét1égé ét1égn ét1égt é2t1é2j é2t1é2l é2t1é2ren é2t1é2ré é2t1é2ri é2t1é2rÅ‘t é2t1érte é2t1érté é2t1é2rü é2t1érv é2t1érz ét1észl é2t1étb é2t1é2té é2t1étn é2t1étr é2t1é2ves ét1fl ét1fr ét1gr é2t1i2d éti2g é2t1ige é2t1igé é2t1i2ko é2t1i2m é2t1ind é2t1inf é2t1int éti2p é2t1ipa é2t1irá ét1iro étis3s éti2sz é2t1i2vá ét1izé é2t1í2r é2t1í2v ét1kl ét1kr 1étkű ét1kv étmeg1g ét1o2ká ét1o2ki ét1okm ét1o2l é2tola é2tolv ét1oml é2t1op ét1ord ét1org éto2ris ét1orm ét1ors é2t1óc ét1ó2ha ét1ón. ét1órá étö2k étö2l é2t1ömö é2t1ön étö2rül é2t1ös ét1ö2t ét1ö2v é2t1ö2z ét1Å‘r. ét1Å‘rb ét1Å‘ri ét1Å‘rm ét1Å‘rö ét1Å‘rü é2t1Å‘2s é2t1Å‘2z étpen1 ét1pf ét1pl ét1pr é1t2rafá ét2réf étro2m1é2 ét1sl ét1sm ét1sp ét1st ét1t2r ét1udv é2t1ug é2t1uj étu2n ét1una ét1und é2t1u2r é2t1u2t ét1ú2s é2t1üg é2t1ür ét1üs é2t1üt é2t1ü2v é2t1ü2z étű2z é2t3ye é1u éu2ni éu2ta éu2tá éu2zs é1ú é1ü éü2gy éü2le éü2lé éü2lö éü2lÅ‘ éü2rü éü2té éü2tö éü2tü éü2ze é1ű é2vad 1évad. év1adó 1évadr év1agy év1ajá é2v1a2la é2valá é2v1alk é2v1am é2v1a2n év1arc év1au év1ava é2v1a2z é2v1állo é2v1áta év1áth év1átk é2v1átr évá2z év1ázá 1évbeo é2v1e2g 1é2vei 1é2vekt év1ela 1évelf 1é2v1eli é2v1ell év1eln é2v1elo é2velÅ‘ é2v1els é2v1elt é2v1elv é2v1eng 1é2venk 1é2vent év1e2ri é2v1e2rÅ‘ 1é2vesb 1évesé év1e2sés 1évesf 1é2vesi 1é2vesk 1é2vesr év1esté év1eve év1é2ge év1é2gé év1égr év1égt év1ér. év1é2ré évé2ri. év1érr é2v1érte é2v1érté évé1ska évé1s2p évé1stá év1gr évi2g év1iga é2v1igé 1é2vih 1é2vik 1é2vim é2vin év1ind 1évine 1éviné év1ing év1int év1ism 1é2vitÅ‘ 1é2viv é2v1ír 1évkös év1kr 1évny év1ok év1os év1ó2r év1ö2r év1ös év1öt 1é2vÅ‘d év1pr év1sk év1st év1sz évsza2k1as évtá2r1a2d 1évtÅ‘. év1ur év1új év1ú2t é2v1üg é2vük. é2vükb é2vüke é2vükh é2vüki é2vükn é2vükt é2vülj é2v1üt é2v1üz 1évzá éz1a2d éza2gal éza2ge éz1a2j éz1akr é2z1a2l éza2n éz1any éz1a2r éz1as é2z1a2t éz1az é2z1á2g é2z1á2l éz1ásv éz1áz éz1bl éz1d2r éze2c éz1ece é2z1e2dé éz1e2gé éz1e2gy é2z1ela é2z1eld é2z1e2le ézel1en é2z1elf é2z1elh é2z1ell éz1elÅ‘b éz1előé éz1elÅ‘g éz1elÅ‘i éz1elÅ‘j é2z1e2lÅ‘ké éz1elÅ‘n éz1elÅ‘p éz1elÅ‘s éz1elÅ‘t éz1elÅ‘v é2z1els é2z1elv é2z1emb éz2en. éze2n1á éz1enz é2z1e2p éze2r1a éze2r1á éze2r1in éze2rip éze2ro éz1erÅ‘ éz1e2rű éz1esz éze2ta éze2t1á éze2t1eg éze2t1e2kéh éze2t1el é2z1e2v é2z1ex é2z1é2d é2z1ég é2z1ék é2z1é2l é2z1ér. é2z1érc é2z1é2ri é2z1érm é2z1érp é2z1érté é2z1érz éz1fr ézgé2 éz1g2r ézi2d éz1ido é2z1igé é2z1ill éz1imp éz1ind éz1ing é2z1int ézi2o éz1ion é2z1i2pa é2z1ipá é2z1i2r é3zire é3zirÅ‘ ézis3s ézi2s1ú ézi2sza ézi2s3zá éz1i2ta é2z1i2vá éz1izm éz1izo é2z1izz éz1í2j éz1ír éz1í2v éz1íz ézkar2c3h éz1k2l éz1k2r éz1ola éz1old éz1op é2z1or éz1osz é2z1ox éz1öb éz1ö2k éz1ön éz1ör éz1ös éz1öt éz1ö2v ézÅ‘a2 éz1Å‘r. éz1pl éz1p2r éz3saj éz3sar éz3seb éz3ser éz3sik éz3sín éz3sod ézsó2 é2z3sók ézs1ór éz3sö éz3spi ézs1s éz3sug ézs1ú2r é2z3sü éz3sz éz1t2r éz1ug éz1uj é2z1u2r éz1ut é2z1ü2g é2z1ür é2z1üs é2z1üt é2z1ü2z éz1űr éz3zsa 2f. 1fa faát1 fa1b2ro fa2chi fac3hi. fa2c3ho fa1d2r faegyez1 fae3lek fae2r faé2d fa1f2r fa1g2r fagyé2 fa2gyék fa2gyol fa2gyÅ‘ fa2gyúr fai2d fai2s fa2j1a2l fa2j1az fa2j1ág fa2j1á2ru fa2j1egys fa2jelem fa2j1elk fa2jelm fa2j1eln fa2j1elv fa2j1es fa2j1ö fa2jÅ‘ fajt2 faj1tr faju2r faj1ura fa2jü fa1k2l fa1k2r fa1k2v fa2l1a2dá fa2l1a2dó fa2lajá fa2l1a2kó fa2l1a2l fala2n fal1any fa2lav fa2l1azú fa2l1ábr fa2l1ál fa2l1á2ro fa2l1áth fa2l1átv fal1e2gy fa2l1elem fa2l1elÅ‘ fal1ese fa2l1éri fal1f2 fa2lid fa2l1inj fa2l1iz fa2l1í2 fa2l1ol fa2l1oml fa2lor fa2l1os fal1órá falö2 fa2lök fa2l1öv fa2l1Å‘r fal1p2 fal2s1ín. falta2n fal2tem fal2tip fal2tüz fa2lü fa2n1e2vet fan1évb fan1évr 2f1a2nya. faó2r fa1p2l fa1p2r fa2r1a2dá fa2ragy fa2r1a2kó fa2r1a2l fa2r1at fa2r1a2v fa2r1ácc fa2r1ál fa2r1á2z fa2r1em fa2r1e2pe fa2r1id fa2r1iná fa2r1ing fa2r1izm fa2r1izo far2k1al far2kol far2mál far2min far2m1unká fa2r1okke fa2r1osz fa2r1ut fa1s2k fa1s2p fa1sta fa1sté fa2sz1ív. fa1tri fau2r fau2s fau2tá faü2t faü2z fa2xe faxi2 fa2x1ik fa2xin fa2xí fa2xö fa2xú fa2xü fa2xű fa1ya fa1ye 1fá f1ábr fá2c3h fá2gép fáj2lak fáj2lá fáj2l1e2 fáj2l1í2 fá2ní fánka2 fán2kar fán2ká fán2tad fán2t1a2g fán2tan fán2t1á2 fán2t1e2 fán2tis fán2tor fán2tö fá2po fá2ram f1á2rá fás3s fá2t1a2 fá2t1á fá2te fá2t1érd fá2t1i2s fátmű1 fá2t1ol fá2t1ü2 fázi2s1ág fázi2s3z fba2l1 fc2lu fd2ró 1fe fe2aa fe2aá fe2ab fe2ac fe2ad fe2ae fe2aé fe2af fe2ag fe2ah fe2ai fe2aj fe2am fe2an fe2a1p2 fe2ar fe2as fe2at fe2aú fe2av 2f1e2dz fe2ed fe2el fe1f2r fe2gy1i fe2gyÅ‘ fei2n1i fe2ja fe2j1á2 fe2j1egy fe2j1elm fe2j1e2r fe2j1esz fe2j1e2v fe2j1o fe2j1ös fej1tr fe2j1u fe2jú fek2t1emb fe2l1a2 fe2l1á2 fele2c fe2l1e2dz fe2l1e2g fe2l1e2h fe2l1elev fe2l1e2mel fe3lemen fe3lemet fe2l1e2més fe2l1eml fe2l1eng fe2l1enn fe2l1e2re fe2l1esd fe2lesed fel1eseg fe2l1e2sett fe2l1esés fe2l1esik fe2l1esk fe2l1esni fe2l1e2sÅ‘ fe2l1essen fe2l1este fe2l1estü fe2l1e2sz fe2l1e2tes fe2l1e2tet fe2l1ettek fe2l1ettél fel1e2ve fel1e2vé fel1evi felé2d fe2l1éde fe2l1é2g fe2l1é2k fe2l1é2l fe2l1é2p fe2l1ér. fe2l1é2rek fe2l1éren fe2l1érh fe2l1éri fe2l1érj fe2l1érk fe2l1érn fe2l1érs fe2l1ért fe2l1érv fe2l1étet fe2l1i2 fe3lib fe2l1í2 fel3l fe2l1o2 fe2l1ó2 fe2l1ö2 fe2l1Å‘g fe2l1Å‘r fel1p2 felt2 fe2l1u2 fe2lú 2f1e2ner fen2n1a2 fen2ná fenne2 fen2nev fen1sc fer1abl fe2rak fe2r1áll fe2r1á2r fere2g fer1egy fe2r1old fe2r1olj fe2r1oln fe2r1olt fe2sem fe2s1er 2f1e2sé feu2m1é2 fe2vÅ‘ fe2z1ér. fe2zó fe2zs 1fé féka2 fé2k1ad fé2k1ag fé2k1al fé2kas fé2k1au fé2k1á2 fé2k1e2g fék1ell fé2k1er fé2k1ék fé2k1é2l fé2k1ér. fé2k1o2l fé2k1or fé2kön fé2k1u2 fé2k1ú fé2l1a2 fé2l1á2j fé2l1á2r fé2leg fé2l1ekk fél1elé fé2l1ell fé2l1elÅ‘ féle2m fél1emb fé2l1eme fé2l1e2r fé2l1ese fé2l1esz 2f1é2let fé2l1ez fé2l1éva féligaz1 fé2lin fé2lir fé2l1is fé2liv fé2lí fé2l1ok fé2los fé2ló fé2l1ö fé2lÅ‘r félpen1 fél1t2r fé2l1ú2 fé2mab fé2mad fé2m1a2g fé2m1a2j fé2m1a2n fé2m1ap fé2mau fé2maz fé2m1ác fé2m1ál fé2m1ec fé2medé fé2me2két. fé2m1e2l fé2m1emb fé2mer fé2mez fé2m1é2k fémé2r fé2m1éré fé2m1i2n fé2m1ir fé2m1is fé2mit fé2m1iz fé2mí fé2m1o fé2mó fé2mö fé2m1u fémü2 fé2m1ül fé2ny1e2g fé2ny1e2l fé2ny1e2r fé2ny1is fé2ny1í fé2nyo fé2nyö fé2nyú fé2pí fé2pü 2féraá 2féraf 2férai 2féral 2férar 2férád 2féráé 2férám fér2cem fére2g1e2 fé2s1orr fész1al fészá2 fész1ár fé2szeg fé2sz1el féta3u2 2f1évk fé2zs fézs1e2 fézs1o ffe3li ffi2ac ffi2ag ffi2aka ffi2am ffi2at ffli2 ff1sh 1f2fy fg2ló 1fi fi2aa fi2aá fi2ae fi2aé fi2af fi2ah fi2aí fia2la fi2ao fi2aó fi2aö fia1p fi2apa fia2ra fia2rá fi2aré fi2aszá fi2aszo fi2au fi2aü fi2avá fi2ave fiá2l fi2c3h fici2t1á fi2dan fi2d1á fi1drá fie2l fie2n fie2r fie2s fies1e fie2t fi1fl fi1fr 2f1i2gaz fi1k2l fi1krá fi1kré fi1kri fi1k2v fi2lac fil1ada fil1akn fil1akt fi2lal fi2lam fi2l1ál fi2lár fil1elt fi2l1en fi2les fil1esz fil1ért fi2l1i2d fi2l1i2m fi2l1in fil2mad fil2mak fil2man fil2m1as fil2mat fil2m1á2 fil2mos fil2mu fi2lö fi2lü 2f1i2má fimeg1 2f1i2nas 2find fin1osz fi2n1ó fint2 fin1tr fi2n1u2 fi2nú 2finx fio2v fi3ók fió2k1a2l fió2kar fió2k1e2 fió2kép fió2kol fió2kö fió2r fiÅ‘2r fi1p2l fi1p2r fi2rod fir2s1 fis1arc fi1s2p fist2 fi1str fi2t1ad fit1a2la fit1alj fi2tág fit1árak fitá2t fi2t1áta fi2t1éh fi2t1érd fi2tik fi2t1in fi2tir fi2t1i2z fito1p2 fi2t1osz fi2tön fi2t1Å‘r fi2t1ur fiu2min fiu2t fi2úa fi2úá fi2úe fi2úf fi2úg fi2úí fi2úl fi2úo fi2úö fi2úő fi2úp fi2úsz fi2úti fi2úu fi2úú fi2úü fi2x1ár fi2x1el fi2xö 1fí fí2rá fírcsa2 fí2r1in f1í2ró fír1tr fí2rü fjú1sz fki2s1 fk2li fk2lu fk2ró f2lak f2lan f2lep f2lip fli2s1é flo2n1á flo2né f2lot f2luk 1fo fo2am fo2g1á2c fo2gál fo2g1árk fo2g1áro fo2g1e fog3g fo2gí fo2g1or fo2g1os fo2gö fo2gur fo2gü fohá2szat fo2kal fok1áll fo2k1árr fo2k1e2 fok1ing fo2kí fok1k2 fo2kola fo2k1or folta2n fol2t1e fol2t1ö fo2n1ada fo2nado fo2nak fon1alap fona2l1e2 fon1alja fon1aljo fona2n fon1any fo2n1asz fo2n1au fo2n1á2r fonás1s fo2n1át fond2 fon1dr fo2neg fo2n1el1 fo2ner fo2n1es fo2nék fon1f2 fon1i2ko fo2n1i2m fo2nin fo2n1is fo2niz fo2ní fon1k fo2n1or fo2n1osz fo2nö fo2nÅ‘2 fons2 fon1st fon2t1i2n fontó2 fon2tón fo2nü fonya1 fo2r1ad forakész1 fo2r1ala fo2r1a2n for1ato fo2reg fo2r1el fo2r1er forgás1s fo2r1il fo2r1i2o for2m1e2l for2m1em for2merÅ‘ for2mes for2m1érté for2mil for2m1in fo2r1ol fo2rö 2f1orrú fors2 for1sp for2t1ál for2t1e2l for2t1érd for2tü fo2rü foto1s fo2vi 1fó fó2rá fósza2 1fö fö2la fö2lá föl2dök fö2l1e2 fö3le. 2f1ö2lésbel fö2liv fö2lí föl1k2 fö2lo fö2ló fö2lön fölt2 föl1tr fö2lu fö2lú fön2n1 fönne2 fö2sá 1fÅ‘ fÅ‘a2n főá2g fÅ‘1bl fÅ‘1br fÅ‘1dra fÅ‘e2l fÅ‘e2r fÅ‘e2s főé2h főé2te fÅ‘1fl fÅ‘1f2r fÅ‘1gl fÅ‘1gn fÅ‘1gr fÅ‘i2d fÅ‘igaz1 fÅ‘i2ta főí2t fÅ‘1kl fÅ‘1kv fÅ‘2n1ag fÅ‘2n1apj fÅ‘1pl fÅ‘1pr fÅ‘2r1aj fÅ‘2rem fÅ‘re2s fÅ‘2r1ék fÅ‘ri2 fÅ‘2r1is fÅ‘2rül fÅ‘r1ülé fÅ‘1sl 2fÅ‘sö fÅ‘1sta fÅ‘1str fÅ‘sz2 fÅ‘1szl fÅ‘1szp fÅ‘1szt fÅ‘1t2r főü2l fÅ‘zőé2 fp2la fp2ro fra1s frá2ma frá2má frá2m1e2 frá2nye f2rás f2resk fré2ná fré2nén 1f2ric f3rich f2rig f3rip 1f2ris fron2t1a2 fron2t1e2 f2röc f2rus fs2ho fs2po fs2tí fst2r ft1aj fta1kr ft1alj ft1bl f2t1ef fte2m f2t1eme f2t1ér. fti2g f2t1ige f2t1ing ft1ös ft1út f2t1üg ft1üt f2t1ü2z 1fu fuga1 fura2t1e fu2s1as fu2seg fu2sis fu2sz1á2 fu2sz1ol fu2szü futókész1 futó1s2p 1fú fúj2tal 2f1ú2ré 2f1úrr fú2zs 1fü 2f1ü2gy fü2la fü2lá füle2c fü2l1eml fü2l1e2p fü2l1e2v fü2l1é2j fü2l1é2k fü2lí fül3l fü2lo 2fülÅ‘ fü2lÅ‘t fül3t2 fü2ma fü2má fü2m1e fü2mo fü2mÅ‘ fü2mú 2f1ünn für2t1Å‘ füstü2 füs2t1ülé 2f1ü2té 2f1ü2tÅ‘ 2f1üzl 1fű fű1dr fű1fr fű1pr fű1sr fűsze2r1el fű1t2r fű2zá fű2zis fű2zo fű2z1ölt fvárosé2 1fy 2g. 1ga 2g1abc 2gabr 2g1abs ga1by 2g1a2dag g1adap 2g1a2data 2g1a2datb gadás1s 2g1adm ga2dog 2gadomá gadói2ko 2gadósat 2g1a2du. 2g1a2duk gae2r 2g1aff ga1fl ga2ga. 2g1agg ga1g2rá g1agy. ga2gyu g1a2gyú 2g1aján 2g1ajk 2g1ajt 2g1a2kad ga2kác 2g1akc g2a3ké 2gakév ga1kl 2gaknai g1aktu g1akv gala2g1ú ga2lat 2galáf 2galáí ga2láv 2g1alb 2g1alc gale2g1e2 2g1alk galo2m1e 3galop g1a2los 2g1a2lud ga2lul ga2lusz 2galvó ga2m1a2gá 2g1amc 2g1amp ga2nab 2g1a2nal 2gancs gan2csal gancs1ág gan2cse gan1d2 3g2ané gan2ga g1a2nim ga2nyag ga2ny1at ganye2l ga2nyér g1a2nyó ga2nyö gaó2r ga2pa. ga2paá ga2pac ga2pas ga2pán ga2pát ga1p2l ga2pok ga2pó. ga2pób ga2pók ga2pón ga2póv 2g1app ga1p2ré ga1pro gara2t1í 2g1a2rák 2g1a2rány ga2rén gar1k2 ga2rÅ‘ ga2seg ga2s1is ga2s1ín ga1s2pi ga1spr gas3sze gas3szí gast2 ga1s2ta ga1sto ga1str gasz1agg ga2sz1an ga2szág ga2szás ga2sz1es gasz2tár ga2t1a2d gat1ará ga2tav ga2t1ál ga2t1á2ram ga2t1eg ga2tep gaté2s ga1thi ga2tim gat1old ga2tomi g1a2uk ga2ul ga2us ga2van ga2var ga2vat 2g1a2vu gaza2n gaz1any gaza2te ga2zál 3g2azd 3g2aze ga2zer 2gazg 2gazítá gazmus1s 2gazod 2gazon ga2zü 1gá g2ába 2g1á2bé g2ábó 2g1ábr 3g2áci gács1a2va gá2cse gá2cs1il gá2csü gáda2 gá2d1al gá2dá gá2d1él gá2d1ia gá2dis gá2dÅ‘ gá2d1ü gá2fo 2g1ág. gá2gak gá2gat gá2gaz 2g1ágg 2g1á2gi 2g1á2gú 2g1á2gy gágy1asz 2g1á2hí g2ája g2ájá gá2j1e2 gá2jü gála1p 2g1álc 2g1áldo gá2l1es g2áli 3gális 2g1áll. 2g1állí 2g1állu 2g1álmo 3g2ály g2ám. gá2mal gá2m1e2 2g1á2mí gá2mü gána2 gá2nac gá2nal gá2nap gá2n1at gá2nás gán2cse gán2csi gán2cs1ö2 gán1d2 gá2n1e 2gángy. gá2nig gá2n1io gá2nis 2g1á2niz gá2nol gá2n1ó2 gán1sz2 gánt2 gán1tr gá2n1ú gá2nü gá2ny1a2n gá2nyar gá2ny1as gá2ny1e2 gá2nyérd gá2nyérz gá2nyij gányká2 gá2nyö gá2po gá2rad gá2r1ag gá2r1aj 2g1á2ram gá2r1a2n gá2rar gá2ras gá2ra2t gár1ato gár1att gá2r1av 2g1á2raz gá2rág gá2r1ál gá2r1á2z gá2re2 gár1em gár1es gár1et gár1éte gá2rif gá2r1i2p gá2r1isk gá2r1ism gá2r1iz gá2rí 2g1árnyé gá2r1ol gá2rop gár1ost gá2r1osz gáró2 gá2r1ór gá2rö gá2rÅ‘ gár1s2 gár1tr 2gáru. 2g1árud gá2rug 2g1á2ruh 2g1á2rulá 2g1á2ruló 2g1árur 2gárus gár1uta gár1utu gá2rúr gá2rút gá2rü gá2rű g2ás. gá2s1a2d gá2sal 2gásatá gá2s1á2g gá2s1árad gá2s1árn 2g1á2sás gá2s1á2to gá2s1e2 gá2s1im gá2sis gá2sí gá2sor gá2só gás1ó2r gá2sö gá2sÅ‘ gás3szé gás3szü gást2 gás1tr gá2sut gá2s1ü2 gá2sű gá2szal gá2szas gá2szatl gá2s3zav gá2sz1ál gá2sz1e2 gá2szis gá2szit gá2szí 2g1ászká gászkész1 gá2sz1okta gá2sz1ól gá2s3zón gá2szöv gá2szü g2át. 2gátad gá2taj 2gá2tal gát1a2la gát1alj gátá2 gá2tál gá2t1ár gá2tát 2g1átbo gá2t1eg gá2t1e2l gá2t1e2m gá2ten gá2tél gáté2r gá2t1ér. gá2t1ére 2gátfe 2g1átg 2g1átha gá2til gá2tis gá2tiz 2g1á2tí g1átmé gá2t1olda gátó2 gá2tór 2g1á2t1ö gá2tÅ‘ 2g1áttű 2gá2tü2 g1átvi g2áva g1á2vó gá2z1a2d gáz1akn gáz1akt gá2z1a2l gá2zar gá2z1a2t gá2z1av gá2z1ár. gá2z1árá gáz1áre gá2z1árh gá2z1árk gá2z1árn gá2z1á2ro gá2z1árr gá2z1árt gá2záru 2g1á2zásos gá2z1e2 gázi2g gá2z1iga gá2zim gá2z1i2p gá2z1i2s gá2z1iz gá2zí gá2zö gá2zÅ‘ gáz3sp gáz3sta gáz3ste gáz3sug gá2zsú gáz3sü gáz3sz gá2zü gbeá2 gbé2lá gbé2rem gb2la gb2le gb2lo gb2lú gb2ra gb2ri gb2ro gb2ru gcsapá2 gcsa2p1ág gda2u gd1ell gde1p2 g2d1é2k g2d1ér. gd1ina. gd1ináb gd1ináh gd1inár gd1inát gd1ináv g1d2rá gd2re gd2ro gd1t2r g2d3zö 1ge gea2c gea2g gea2l geá2r g2ebe g1e2cet 3g2eci g1eddz 2gedénn 2gedény ge2dze ge2dzé ge2dzi ge2dzÅ‘ 2g1eff 2gege 2g1e2gé 3gegom ge2gye ge2gyé ge2gyo 2g1e2gyü ge2hes ge2het 2g1e2hü 2g1ejt 2g1e2ke. 2g1e2kei 2g1e2kek 2g1e2kéé ge2kéj ge2kék 2g1e2kés. ge1k2li 2g1eks 3g2el. g1elad ge2lag 3g2elap ge2las ge2lál ge2l1ár 2g1elcs 3geld. 2g1eldo 2g1eldö ge2lef ge2legi g1elegy 3g2elek. 2ge2lemé ge2lemi 2ge2lemk 2g1e2lemm 2g1e2lemz gele2n 3gelend ge2l1eng ge2l1eny gel1este ge2lev ge2lég 3g2elésb 3g2elések 3g2elésen 3g2elésén 3g2elésér 3g2elésh 3g2elési. 3g2elésk 3g2elésrÅ‘ 3g2eléss 3g2eléstÅ‘ 3g2elésü 2g1elész 3gelész. 3gelészÅ‘ 3gelészü 3gelészv 2g1elhel 3g2elhes 3g2elhet 3g2eli. 3g2elik gel1int 3g2elj. 3g2elje 3g2eljé 3g2eljü 2g1elkü 2gellenÅ‘ 2g1elmé 2g1elnev 3g2elnén 3g2elni 2g1e2los ge2lö 3g2elÅ‘. ge2lÅ‘bbr 3g2elÅ‘be 3gelÅ‘bé 3gelÅ‘bi 3gelÅ‘bo 3g2elÅ‘bÅ‘ 3g2elÅ‘c 3g2elÅ‘e 3g2előé 3g2elÅ‘g 3g2elÅ‘h 3gelÅ‘ja 3g2elÅ‘jé 3g2elÅ‘jo 3g2elÅ‘jü 3g2elÅ‘jű 3g2elÅ‘k. 3g2elÅ‘ka 3g2elÅ‘kb ge2lÅ‘kel 3g2elÅ‘ker 3g2elÅ‘ket 3g2elÅ‘kez 3gelÅ‘ké. 3g2elÅ‘kén 2g1elÅ‘kés 3g2elÅ‘kh 3g2elÅ‘ki 3g2elÅ‘kk 3gelÅ‘kl 3g2elÅ‘kn 3g2elÅ‘kö 3gelÅ‘kÅ‘ 3g2elÅ‘kr 3gelÅ‘kt 3gelÅ‘ku 3gelÅ‘kü 2g1e2lÅ‘l 2g1elÅ‘m 3g2elÅ‘n. 3gelÅ‘na 3g2elÅ‘ne 3g2elÅ‘né 3g2elÅ‘nk 3gelÅ‘nö 3gelÅ‘nyi 3g2elÅ‘o 3gelőö 3g2elÅ‘Å‘ 3g2elÅ‘p g2elÅ‘re. 3g2elÅ‘rés 3gelÅ‘ro 3g2elÅ‘rÅ‘ 3g2elÅ‘s. 3gelÅ‘sá 3gelÅ‘so 3gelÅ‘sö 3g2elÅ‘sza 3gelÅ‘sze 3gelÅ‘szé 3gelÅ‘szi 3gelÅ‘szó ge2lÅ‘szö 3g2elÅ‘t. 3g2elÅ‘tá 2g1elÅ‘té 3g2elÅ‘tÅ‘ 3gelÅ‘tü 3gelőü 3g2elÅ‘vá 3g2elÅ‘ve 3g2elÅ‘vé 3gelÅ‘vis 3gelÅ‘viz 2g1e2lÅ‘z g1elsÅ‘ 3g2elsz. 2g1elszá 2g1elszo 3g2elt. 3gelte. 3g2eltek 3g2eltem 3geltes 2g1eltett. 2g1eltettn 3geltéb 3g2elték 3geltél 2g1eltér 3geltét 2g1elto 2g1eltű ge2lül 3g2elün 2g1elv. 2g1elvb 2g1elven 2g1elvét 2g1elvh 2g1elvn 2g1elvo 2g1elvr 2g1elvű 2g1elvv ge2ly1e2g ge2lyid ge2man 2g1ember 2g1embl g1e2mel 2gemelé 2gemelk ge3mell 2gemelÅ‘ 2g1e2més 2g1eml ge2moc 2g1emp gena2 ge2n1ad ge2nar ge2n1as ge2nau ge2n1ál ge2nár ge2nát g2end ge2n1eg 3generá ge2nere 2generg ge2n1esz ge2n1e2vet ge2n1e2vez gené2k ge2n1éké ge2n1ékn ge2n1ékt geni2d1 ge2n1in ge2n1is gen1k2 g1enni g1ennü gen3nya ge2nop ge2n1or ge2n1os gen2sért gens3s gent2 gen1tr ge2n1ur g2enye 2g1enyh g1enyv 2g1enz ge2ob ge2oc ge2of ge2ok ge2om ge2orgia ge2ot geó2r ge2ped ge2per ge2pés 2gepi ge2pos ge2rab ge2r1ad ge2r1aj ge2ral ge2r1a2n ge2r1a2p ge2r1as ge2rau ge2ráb ge2r1á2g ger1áll gerá2r ge2r1ára ge2rát g1erde 3g2ereb 2geredm ge2reg g1erege ger1egy 2g1e2rej ge2r1e2lÅ‘k ge2r1eml ge2r1eny ge2r1er ge2resz ge2r1e2v ge2réj ge2r1ék ge2rél ger1éle ger1é2lé ge2r1ép ge2r1étt ge2r1i2na ge2r1ing ge2r1i2p ger2is ge2r1iz ge2rog ge2ror ger1osz ge2rot ge2ró ge2rök ge2r1ön ge2rÅ‘ g1erÅ‘. g1erÅ‘k g1erÅ‘v ger1Å‘z geru2 ge2rur ge2r1ü2g ger1üld ge2r1ü2lé g2es. ge2s1á ge1sc gesd2 g2eseb 3g2esei ge2s1e2l ge2s1emb g2esen. ge2send 2g1e2s1er 2g1e2setb 2g1e2setet 2g1e2seté ge2setl 2g1e2setr 2g1e2sett 2g1esél 2g1e2sés g1e2sik 2geskü gesleg1 g1esni 2g1e2sÅ‘ g2esr gess2 3g2essé. 3g2esség g2est. 2g1este 2g1esté 2g1esti 2g1estj g1esvé 2g1e2szek ge2szes ge2szet g1eszl 2g1eszm ge2t1ak ge2tal ge2t1a2ny get1ap ge2t1as get1e2gy 2g1e2tetÅ‘n ge2t1é2k geté2l get1éle ge2t1ér. ge2t1é2rü ge2t1é2ve. getó2 ge2t1ór getÅ‘kész1 get1Å‘rt ge1t2ró get2t1ebbé ge2tut ge2t1ü2z ge2ur geu2s ge2vic 2gevo ge2vol 2g1e2vÅ‘ ge1yé ge2zo gezÅ‘a2 2g1ezr 1gé gé2ber 2g1ébr géc3c gé2d1a2 gé2d1á2 gé2d1e2l gé2d1e2r gé2d1esz gé2dik gé2din gé2dir gé2dí gé2d1o gé2dö gé2d1Å‘2 gé2d1u2 gé2d1ú2 gédü2 gé2d1ülé gé2dű gé2d3z gé2gi gé2gÅ‘ gé2gü 2g1é2he. 2g1é2hek 2g1é2hen 2g1é2hes 2g1é2het 2g1éhh 2g1éhs 2g1éj. gé2jért 2g1éjh 2g1éjj 2g1éjs gé2kei g1é2kel g1é2kes gékes1s g1é2kez 2g1éks gé2lál gé2lel 2géles 2g1é2let 2g1é2lez gé2lén 2g1élm gélvvá2 gé2lya gé2lyá gé2lyeg gé2lyo gé2lyú gé2m1a2 gé2m1á g2émb gé2m1eg gé2mel gé2mer gé2mér gé2m1o géna2 gé2nab gé2n1ad gé2nag gé2n1al gé2n1an gé2n1ar gé2n1at géná2 gé2n1ár gé2nát 3g2énd gé2neg 2génekes 2génekl gé2n1e2l gé2n1e2r gé2n1e2t gé2n1é2g gé2n1in gé2ní g2énn gé2nó gé2n1ö gént2 gé2nú génü2 g2ény gé2ny1e2g gé2nyid gé2nyis gé2nyír gé2nyo gé2nyö gé2nyú 3gép. gé2p1a2 gé2p1á2 gé2p1e2g 3gépel gé2p1ell gé2p1esé gé2p1esz gé2pik gé2pí 2g1épít gé2p1o2 gé2pó gé2pö gé2pÅ‘ 3g2épp géptá2v gé2p1u gé2pú gé2pülé géra2 gé2rag gé2rak gé2ral gé2r1an gé2rap gé2rar gé2ras gé2rá 2g1érc. 2g1érd gé2r1el gér1eml 2géret 2géré. gé2rie 2g1é2rin gé2ris gérká2 2gérl 2gérm gé2ro gé2rö 2gérÅ‘. 2gérÅ‘k 2gérte 2gérté 2gérth g1érti g1értj g1értl g1értÅ‘. g1értÅ‘e g1értÅ‘k g1értÅ‘v g1érts g1értv gé2r1u2 gé2r1ú 2g1érv. 2gérvé 2g1érz 3gérzá gé2sar gé2seg gé2s1e2l gé2s1é2g gé2sim gé2s1o gé2só gés3sza gé2sza gés3zav gé2szá 3gészítőü gé2sz1o gé2szó 2g1é2tel gé2ter gé2tet 2g1étk 2g1é2to 2g1étr 2g1étt 2g1év. 2g1évb 2g1é2ve. 2g1é2ved 2gévei 2g1é2vek 2g1é2ven 2g1é2vet gé2véb 2g1é2vén 2g1é2vét 2g1é2vév 2g1évf 2g1é2vi 2g1évj 2g1évk 2g1évn 2g1évr 2g1évs 2g1évt 2g1évv gé2zan gé2zÅ‘r géz3s2 gfala2d gfa2l1ada gfa2le gfi2úké gf2la gf2lo gf2ló gf2lu gfö2l1üle g1f2ra gf2rá g1f2re gf2rí g1f2ro g1f2rö gf2ru g2g1a2p gg1arc gga2s g2g1aszá g2g1aszó gge2lest ggés3s gg1i2ta g2g1iz gg1orv ggó2n g2g1óni ggÅ‘s2 g1g2raf g1g2ran g2gre gg2ro gg1sp g2g1ü2g g2gy g3gyak g3gyalá g3gyap g3gyara ggy1aro g3gyat g3gyár g3gyáv ggy1á2z ggy1e2lÅ‘ g3gyeng g3gyep g3gyer g3gyérü g3gyil ggy1ol g3gyom g3gyor g3gyó g3gyö g3gyÅ‘ g3gyu g4gyút ggy1ült g3gyür g3gyű gha2d1e2 ghajói2ko g2ham. g2hamb g2hame g2hamh g2hamm g2hamn g2hamr g2hamt gha2sábr g2hia 1ghy 1gi gi2aa gi2aá gia1b2 gi2aba gi2abá gi2abi gi2abo gi2ac gi2a1d2 gi2ae gi2aé gia1f gi2afr gi2a1g2 gi2ah gi2aim gi2aí gi2aj gi2akar gi2akas gi2ala gi2alá gi2alé gi2am gi2anó gi2any gi2ao gi2aó gi2aö gi2aÅ‘ gi2a1p2 gi2ar gia1sz2 gi2asza gi2aszá gi2aszem gi2aszé gi2aszf gi2aszi gi2aszo gi2aszó gi2at gia1t2r gi2au2 gi2aú gi2aü gi2aű gi2av gi2az 2gibr 2g1i2dea 2g1i2deá 2g1i2deg gi2dei 2gidej 2g1i2deo 2g1i2dé gi2dió 2g1i2do 2g1i2dÅ‘ gi1fl gig1ad gig1ara gig1ass gig1au 2gigaz gig1ál 2g1i2ge. gig1e2c gig1eg gig1eh gig1eng gig1enn gige2r gig1ere gig1e2s gig1et gig1e2v gi2gé gig1éh gig1é2l gig1ém gig1é2ne gig1é2r 2g1igéz gig3g gi2g1i2 gi2gí gi2g1o2 gi2g1ó2 gi2g1ö gi2g1u gi2gú gi2g1ü gi2gű g1i2gy 2g1i2ha 2g1ihl 2gijes gi2ker gi2kes 3gile g1ill gi2ma. gi2man gi2máb 2gimád gi2már 2g1i2máz gi2me. 2g1imp gina1p2 gi2nas gi2n1á2z 2g1ind 3giné 2ginf 2g1inkv g1inná g1inni g1inno g1innu 2g1inp 2gins 2g1inta 3gintc 2g1inte 2g1inté g1inth g1inti g1intj g1intÅ‘. g1ints g1intu g1intv 2g1inv 2g1inz gi2one 2g1ioné gi2ono gi2ot gi2óa gi2óá gi2óc gi2óe gi2óf gi2óg gi2óká gi2ókom gi2ól gi2ó1p gi2órá gi2ó1sp gi2óta gi2ótá gi2óú gi2óü gi2óz 2g1i2pa 3g2ips gip2sza gip2szá gip2szo gi2ram 2g1i2rat 2g1i2rá gi2rig 2giro gi2rod 2g1i2rón 2g1irt g1isc 2g1i2si 2g1isk 2g1isl 2g1ism 2g1isp 2g1istál 2g1iste 2g1i2szá 2g1i2szo gi2szu gi2tas g2itá 2gitác 3g2iti 3gitte g1ittu 2g1i2vad 2g1i2vás g1i2vo 2g1i2vó 2g1i2zé. gi2zév 2g1i2zo 1gí gí2gé gí2ja gí2já gí2m1a2 gí2má gí2né gí2ny gí2ra gí2rá gí2re gí2ri gí2ro gí2ró g1í2tész gí2vá gí2ve gí2vé gí2vo gí2vó gí2vü gí2ze gí2zü gí2zű gka2ró2r gké2p1e2l gkia3dóná gkiá2 gkis1s gk2la gk2li gk2lí gk2lo gk2ló gk2lu gkö2zén gkö2z1ér gkÅ‘vá2 gk2ra gk2rá gk2re gk2ré gk2ri gk2rí gk2ro gk2ró gk2va gk2vó gla1p2l gla1s2t gla3t glá2sza gle2g1a2 glege2l gle2g1elé gleí2 gle2t1a2 gle2tá gle2t1el gle2t1é2rÅ‘t gle2t1étn gle2tos gleü2 glé2nyel g2lor 1g2los glóó2 glós2 glÅ‘re2 glÅ‘2reg glÅ‘2rel glÅ‘2ro gme2g1a2 gme2g1e gme2g1é gmens1s gmen2sz gmus3s gna2d gn1ada g2n1a2j g2n1alk gna2pe g2n1a2r 1g2náb 1g2náh 1g2nái g2nán. g2náná g2nára gná2s3z g2nát. g2nátó 1g2náv gn1br gne2i g2n1e2l gne2m1e2l gne2sir gnes3s gne2szá gneto1 gné2l g2n1éle gnév1a gné2v1á gni2g g2n1iga g2n1ing g2n1i2p g2n1i2r g2n1is gnit1a2 gni2tel g2n1okt g2nol gn1olt 1g2nora 1g2nore 1g2noré gn1ök gn1ös gn1pr gn1s2k gn1st gn1t2r g2núj gn1üg gn1üz 1go go2áz go2be 2g1obj 2g1o2dú go2et g1o2ka. gok1a2d g1o2kai 2g1o2k1a2l 2g1o2ká goki2 gok1ir goklá2 2g1okm g1o2kok 2g1o2kos g1o2kot 2g1o2koz 2g1okta 2g1o2laj go2l1a2l 2g1olda 2g1oldá 2g1oldó gol2f1a gol2f1e gol2fin go2lim go2l1ó2rá 2goltalo 2g1oltár 2g1oltás 2g1oltó. 2g1oltv 2golvad 2golvas go2m1as 3g2omb gom2b1árn gom2b1árt gom2b1e2 3g2ome 2g1oml go2n1a2to go2n1áll gonc3c gon2d1á2 gon2d1ér. gondi2 gon2dik gon2doks gon2d1or gon2dó go2n1e2 gon2g1a gon2gál g2ono go2nol 2g1onto go2nü go2nye go2od go2pá 2g1o2pe gor1ass gorasz2 gora1t2 gor1áll 2g1orc go2r1el 2gorie 2g1orm go2roz go2rö 2gorz go2se go2sö 2gosto 2g1ostr g1osty go2sü go2s3za go2s3zá go2szó go2ua go2ub go2ud go2ur go2ut go2vác 2g1o2ve go2xi go1ya go1yá 1gó góa2d góá2g góá2r gó1bl gó2c3h gó2c1ol gócsapá2 gócsa2p1ág gó2cü 2g1ó2dán gó2div gó1dru gó1f2r góí2v gó1kl gó1kré gó2lar góle2l gól1elÅ‘ gó2l1é2h gó2lí gó2lü 2g1ó2nu góó2r gó1p2l gó1p2r g1ó2rad g1ó2ras gó2ráka gó2rár 3g2óro 3g2ósa gó2s1aj gó1s2ká gó1s2p gó1s2rá gós3s gó1s2ta gó1stá gó1str gó1sz2 gó2s3zám gó2s3záras gó2s3zárá gós3zárú gó2tau gó2t1is gó1tré gó1tri 3g2óval 2góvod 2g1ó2vó gó2vu 3g2ózá 3g2ózi 3g2ózo 3g2ózu 1gö 2g1öbl 2g1ö2ko gö2ku 2g1ö2lá 2g1ölb g1ö2le 2g1ölr 3g2ömb göm2b1a gömbe2 göm2bel göm2b1er 2g1öml 2g1ö2na g1ö2ná gö2ne gö2nö 2gönt gö2ra 3g2örb 3g2örc gör2csa gör2csá gör2csel gör2csÅ‘ 3gördí 3gördü 2g1ö2re 3g2örg 3g2örn gö2rök g1ö2rü 2görv gösé2 gö2s1én 2g1össz 2g1ösz 2g1ötl g1öv. g1övb g1ö2ve g1ö2vé g1övh g1övn g1ö2vö g1övr g1övt g1ö2vü g1ö2vű g1övv 1gÅ‘ gÅ‘a2n gőá2g gÅ‘1br gÅ‘e2l gÅ‘e2r gőé2b gÅ‘1fr gÅ‘1gl gÅ‘2g1Å‘2 gÅ‘i2ta gÅ‘1kl gÅ‘1kv gÅ‘nyá2 gÅ‘1pl gÅ‘1pr 2g1Å‘r. g1Å‘rb g1Å‘rh g2Å‘rit gÅ‘2riz 2g1Å‘rj g1Å‘rk 2g1Å‘rl g1Å‘rn gÅ‘2rök 2g1Å‘rr g1Å‘rs g1Å‘rt 2g1Å‘2rü 2g1Å‘rz 3gÅ‘rzÅ‘sö gÅ‘2s1ep gÅ‘2sib gÅ‘1spi gÅ‘1sta gÅ‘2s1ü2v gÅ‘1tr gÅ‘u2t gőü2l gÅ‘2zát gÅ‘2zeg gÅ‘2z1e2k gÅ‘2z1e2l gÅ‘2z1o gÅ‘zÅ‘2s gÅ‘2zsö gÅ‘2z3su gÅ‘2z3sű gÅ‘2zú gpe2c3h gp1e2lu gpia2c1i2o gp2la gp2lá gp2le gp2lu gpon2ga gpo2re gp2ra gp2rá gp2re gp2ré gp2ri gp2rí gp2ro gp2ró gp2rű gp2sz 1graff grafo1 gra2mad gra2maj gra2mal gra2m1a2r gra2m1as gram1a2z gra2m1á2 gra2m1e2 gra2m1érté gra2mik gra2m1in gra2m1is 1grammj gramu2 gra2mut 1g2ra1p 1g2raví grá2cs1i gráfa2 1g2rá2f1an 1g2ráfb grá2f1e2 1g2ráffe 1gráfid 1g2ráfl 1g2ráft grá2lát grá2l1e2 g2ráná grán1d g2ráni grá2rá grá2r1i2p grá2s3za gre2e gren2d1Å‘2 g2ril gril2l1a gri2s1á gris3s grisü2 gri2süt g2ríz gróa2 gróá2 gró2f1a gró2fú gró1p 1g2rup gság1g gsé2gel gs2ho gs2ka gs2ká gs2ko gsk2r gs2la gs2lá gs2li gs2má gs2mi gs2mu gs2ná gso2k1o gsors3s gs2pa gs2pá gs2pe gs2pé gs2pi gs2po gs2pó gsp2r gs2rá g1s2ta gs2tá gs2te gs2té g1s2ti g1s2tí gsto2 gst2r g1stra g1stru gs2tu g1s2tú gsugá2 gs2vé gsza2ké gsza2k1ü gszála2d gszá2l1ada gszá2li gszá2r1a2da g1sz2c gszé2t g1sz2f g1sz2k g1sz2l gsz2m g1sz2p gszt2 g1sz2tá gta2g1ar gtag1g gta2n1ó2 gtára2d gtá2r1ada gtermo1 gtermosz2 gté2rá gti2m gt2ra gt2rá gt2re gt2ré gt2ri gt2ro gt2ró gt2rö gt2ru gt2rü gtű2z1Å‘2r 1gu gu2at gu2ay 2g1u2bo gu2el gu2er2 g1u2ga 3g2ugg g1u2go 2g1ugr gu2id gu2in gu2ir 2g1ujj gula2te gula2t1í gu2ná 2g1u2ni gu2nó gu2nu 3g2urí gus1abl gu2sad gu2s1a2n gu2sas gu2sat gu2s1av gu2sál gu2s1e2 gu2s1érté gu2sil gu2sis gu2s1í gu2sol gu2sor gu2s1ó2 gu2sö gu2sÅ‘ gus3s2 gussz2 gust2 gu2sü gu2szá gus3zs gu2tac gu2tak gu2tal gu2tam gu2tan gu2tas gu2tat gu2taz 2g1utc 2g1u2tó gutó2d1o2ku 2g1u2tu gu1ya 1gú gú2ja gú2jí gú2ju gú2ny1e gú2ny1í2 2g1úr. gú2ré gú2ri 2g1úrn gú2ro 2g1ú2sz 2gú2t1a2 2g1ú2t1á2 2g1útb 2g1útc 2g1útd 2g1ú2t1e2 2g1ú2té 2g1útf 2g1útg 2g1úth 2g1ú2ti. 2g1ú2tia 2g1ú2tig 2g1ú2tih 2gútij 2gútiko 2g1ú2tina 2gútiná 2gútir gú2tis 2g1ú2tit 2g1ú2tiv 2gútiz 2g1útj 2g1útk 2g1útl 2g1útm 2g1útn 2g1ú2to 2g1útp 2g1útr 2g1úts 2gútt 2g1útv 2g1útz gú2zi 1gü gü2cs gü2dé gü2dí gü2dü gü2ge gügy1ér. 2g1ü2gyi 2g1ügyl 2g1ügyv 2g1üld gü2len gü2lep gü2lik gü2löm 2g1ünn 3g2ürc 2g1ü2re 3g2üri 2g1ü2rí 2g1ü2rü gü2te gü2té gü2ti gü2tö gü2tÅ‘ gü2tü gü2ve gü2vö gü2ze gü2zé 1gű 2g1űr. 2g1űrb gűre2 g1űrh gű2ri 2g1űrj 2g1űrl 2g1űrm 2g1űrn 2g1ű2rö 2g1űrt gű2ru gű2rü gű2ze gű2zé gű2zi gű2zö gű2zÅ‘ gva2s1u2 gvá2gy1a2da gvá2nyan gvás1s gverés3s gvezé2rel gvé2nyel gvi2na gvó1s2 g2y 1gya gy1abl 2gy1a2cé 2gyadag gya2dal 2gyadatb gy1a2dót 2gyag. 2gyagb gya2ge gya2gép 2gyagg 2gyagh gya2gis 2gyagk 2gyagn gya2g1ol 2gyagt 2gyagya gy1a2gyu 2gy1aja 2gy1akc 2gyaknák 2gyakt. 2gyaktu gya2laj gyal1akt gya2lapo gya2lapp gy1alat 2gyaláí gya2lel gya2lik gyan1ab gya2n1e 2gy1a2nya gy1a2nyá gya2pak gy1a2páh 2gyapái 2gyapák 2gyapám 2gy1a2pán gy1a2pás 2gyapátó 2gyapjá gya2pón gya2pór 2gyapp gy1aps gy1aran 2gyaraw gyard2 2gyardom gya2rel gy1argó 3gyari gya2r1ón gya2r1ó2r gya2róv 2gyarú 2gyasak 2gyasat 2gyasra 2gyaty 2gyazá 1gyá 2gyábr 2gy1ág gy1álc gy1áll gy1álm 3gyám 2gy1áp 2gyáre 2gyárf 2gy1á2rok 2gyárus 2gy1árú gyá2szó 2gyáta 2gyátk gy1átl 2gyátv gy1bl gy1br gy1dr 1gye gye2d1Å‘s 2gy1e2dz gy1e2ge gy1e2gé gy1e2gye 2gyeke. 2gy1e2kés 2gyeleg gye2legy 2gyelemű 2gyelnö 2gyeltér 2gy1elvá gy1elvű 2gyembl 2gy1e2mel gy1eml 2gyenget 2gy1e2ny 2gyenz gye2pal 2gy1erd gy1e2red 2gyerej gy1e2res gye2rén 2gy1ern 2gyero 2gy1e2rÅ‘ 2gy1ers gye2seg 2gyesél 2gyesg gy1e2sÅ‘ 2gy1este 2gyestés 2gy1esti 2gyestű gye2szü 2gyetem 2gyetet 2gyezm 2gyezrede 2gyezrel 2gyezs 1gyé 2gyébr 2gy1ég 2gyéhes 2gyéhs 2gyéji gy1é2ke. gy1é2kes 2gy1é2le 2gy1élt gy1élv 3gyém 2gyéneke 2gyéni 2gy1ép 2gyér. 2gy1érd 2gy1érem 2gyérez gyé2rét gy1érg 2gy1érh 2gy1é2ri 3gyérí gy1érke 2gy1érm 2gyérn 2gyérr 2gy1érte 2gy1érté gy1értÅ‘ 2gy1érv 2gy1érz 2gy1été 2gyétk 2gyév. 2gyévad 2gyévb 2gyéve. 2gyévei 2gyévek 2gyéven 2gyéves 2gyévet 2gyévh 2gyévi 2gyévn 2gyévr 2gyévt 2gyévü 2gyévv gy1fl gy1f2r gy1gl gy1gr 1gyi 2gy1ide 2gyidé 2gyidÅ‘ 2gyiga 2gyigá 2gy1ige 2gy1i2gé 2gyigm gy1iha 2gyiker 2gy1ill 2gyimá 2gyind 2gyinf 2gy1ing 2gy1ipa 2gy1i2rat 2gyirá 2gy1iro 2gyirt 2gyish gy1isk 2gyism 2gy1isn 2gy1ita 2gyivó 2gyizg 2gy1i2zo 1gyí 2gy1íg 2gy1í2r 2gy1ív gy1íz gy1kl gy1k2r gy1kv gymás1s 1gyo 2gyokos 2gy1old 2gyolvas gyo2m1as gyo2mi 2gyope 2gy1orc gy1orm 2gyorsó 2gy1orv gy1ou 1gyó 2gyódár 2gyódásak gy1ó2rá 2gyóri 1gyö 2gy1öb gy1ödé 2gy1ökl 2gyötöd. 2gyötödd 2gyötöde 2gyötödé 2gyötödne 2gyötödöt 2gyötödr 2gyötödü 2gyött 2gyötv 2gyöv 2gyözön 1gyÅ‘ gy1Å‘re gy1Å‘rü 2gy1Å‘2s 3gyÅ‘z gypárba2 gypen1 gy1pl gy1pr gy1ps gyrövid1 gy1sc gy1sk gy1sl gy1sm gy1sn gy1sp gy1sr gy1s2t gy2sur gy1t2r 1gyu 2gy1ud 2gy1ug 2gy1uj 2gyund 2gyuni 2gy1u2ra 2gyurá 2gyuru 1gyú 2gyúd 2gyúé 2gyúi 2gyújí 2gyújr 2gyújs 2gyúm 2gyúrb 2gyúré 2gyúri 2gyúrk 2gyúrr gy1úti 2gyúü 1gyü 2gy1üd 2gyüg 3gyüle 3gyüm 2gyünn 2gyüre 2gyürü 2gyüs gy1üst 2gyüt 2gyüv gy1üve 2gy1üz 1gyű 3gyűl 2gyűré. 2gy1ű2z gy1zr gza2táp gza2t1e gza2tö gza2t1ű2 gzá2r1ó2r gzá2se gzás3s gze2t1a2 gze2t1á2 gze2t1el gze2tin gze2t1o gze2t1Å‘2 gzÅ‘a2 2h. 1ha haa2d 3hab. ha2bak ha2b1a2l ha2b1a2n ha2b1ág ha2b1árb ha2bed ha2b1ér. ha2bid hab1ill ha2b1im ha2b1i2na. ha2b1int 3habo hab1old hab1o2ra hab1orr ha2b1ost hab1sz habu2r ha2bü ha2d1ag had1alk ha2d1ap hada2s ha2d1asz ha2d1á2c ha2d1ál hadás1s ha2d1ásv ha2d1á2szo ha2deg ha2d1el ha2dem ha2dél hadia2 hadié2 hadi2n ha2d1ina ha2dor ha2dos ha2d1u2r ha2d1úr. ha2d1ú2ré ha2d1úrh ha2d1ú2ri ha2d1úrk ha2d1úrn ha2d1úrr ha2d1úrt ha2dús ha2dü ha2dza ha2d3zá hae2r ha1fl ha1f2r 2hagore ha2if ha2j1á2s ha2j1á2to ha2jáz ha2j1in haj1k2 haj1oml ha2jö ha2jÅ‘ haj1s haj1t2r ha2jü ha2k1ál ha1k2li ha1k2r h1akt ha1k2v ha2l1ach ha2l1a2g ha2l1aj ha2l1a2l hala2n hal1any ha2l1a2r hala2sz ha2l1asza ha2l1aszá ha2l1aszó ha2l1atk ha2l1a2tom ha2l1att 3halá hal1á2cs ha2l1á2g halá2l1e2 hal1á2rak hal1á2rá ha2l1árb ha2l1á2ro ha2l1árr hal1árve hal1áte ha2leb ha2lec hal1e2gy hale2l hal1ele hal1elÅ‘ ha2lem ha2l1en hal1epe ha2l1ese ha2l1e2sÅ‘ hal1ete hal1e2to hal1evo hal1e2vÅ‘ ha2lez ha2l1é2l ha2l1ér. ha2l1éte ha2l1étk ha2l1étt ha2lid ha2l1ik ha2l1i2m ha2l1ing ha2l1inv ha2l1i2ono ha2l1i2ont ha2lip hali2s hal1isz ha2l1iv ha2l1i2z ha2lí hal2k1a2pu halke2l1 hal2k1ele hal2l1aszt hallás1s hal2l1á2t hal2léj hal3ly 3halm halma2z1ó2 ha2l1ol ha2l1ó2ri ha2l1ö ha2l1Å‘2 hal1p2 ha2l1ug ha2l1u2s ha2l1u2t ha2lü ha2lű ha2l3ya ha2m1ál ha2m1árb hamb2 hame2l ham1ele ham1esz ha2mez ham1ism hamkész1 ha2m1osz 3hamu ha2mü hanás1s han2ch 2hanész 3hang han2gal hange2 han2gen han2g1es han2gél hang3g han2g1ó2 han2gö han2gut han2t1ó2 ha2nyél haó2r ha1p2r hara2g1ó2 harang1g ha2r1ál har2c1al har2can harc3c har2c1e2 har2c3h ha2rel ha2ret 3harmó ha2r1ol ha2r1osz ha2r1ór ha2rű 3hasa haság1g ha2s1iz ha2s1ol has1ors ha1s2pe ha1s2po ha3s2út ha2sür ha2s1ű2 ha1szp ha2t1ab hat1agy ha2t1aj 3hatá határa2d határ1ada ha2t1e2v ha2t1én ha2t1é2v ha2t1í2v hat1olda ha2t1osz ha2t1órá ha2t1ö2v ha1t2rá hatt2 hat1tr ha2tül ha2ue hau2n hau2s ha2ut haü2z 1há 3hábo há2gy há2jús háma2 há2m1al há2m1á há2m1e2 hámi2 há2m1is há2m1ol há2mö hán2cs1e hán2csi há2ny1a2l hánya2n há2ny1ar hányás1s há2ny1e2 há2nyö hára2 há2r1ad há2r1al há2r1au hári2 há2r1iv 3hárí hár1k2 hármas1s háro2m1a háro2mo hár2s1al hár2se 3hárt há2rü há1ry há2sí hász1a2tom há2sz1e há2sziv 2h1ászka. há2szÅ‘ hát1a2dó há2t1a2la há2t1a2n há2tar há2táp há2t1e2 háté2 há2tél há2t1ér. há2t1ére há2t1érn há2t1i2s há2tí há2t1ol há2t1o2r hátó2 há2t1ós há2tö há2tÅ‘ hát1u2sz há2t1ü há2tű hátvé2d1el há2z1a2dó há2zaj há2z1ala há2z1asz há2z1av há2z1á2p há2z1árn há2z1á2ru há2z1e2 há2z1isk há2z1ism há2z1ist há2z1í ház1okt ház1old há2zos ház1otth há2zö há2zÅ‘ házs2 há2z3sa ház3se há2z3sé há2z3si ház3sp ház3st ház3sü ház3sz há2z1ü há1zy hb2le 1he he2ad he2av hec2c1emb hecc3s 2hecu he2dén he2e2s he2f1i he2f1u2 he2g1a2 hegész1 he2gy1a he2gyá he2gyeg hegyes1s he2gy1o he2gyó he2gyÅ‘ he2gyú 2heidp 2h1e2kék 3hekt he2lég helés1s 2helf hel2fr 2hellá 2hellen he2lyeg he2lyeml he2lyesz he2ly1é2j 2hema 2h1embl 2h1eml henés1s 3heng he2ny1e2ge. he2nyo he2ol he2rab he2r1a2d he2r1aj he2r1a2r he2rau her1áll her1át1a2 her1áté her1eng here1p her1ese herevíz1 her1int he2rö her1s2 he2rut he2rű he2s1a he2s1á h1e2sés hes2t1o 3hety 2heusz 2hevé he2vés 3heví he2z1á 2hezh 2hezi 2hezn 2hezÅ‘ 2hezz 1hé hé2hé 3héi. 3héit héje2 hé2j1eg hé2j1el hé2jö 3hékn hé2nal hé2nan hé2nar hé2nát héne2 hé2n1el hé2nem hé2n1et hén1év. hé2nid hé2nil hé2n1is hén3n hé2nö hént2 hé2nu hé2nü hé1ph hé2pü héra2 hé2rar hé2r1as hé2rat hé2rin 2h1érz hé2szá hé2szeg hé2szesz hé2sz1ö héta2 hé2t1ab hé2t1aj hé2tal hé2tar hé2tál hé2t1e2l hé2t1e2m hé2t1es hé2t1ezres hé2t1é2v hé2tí hé2tol hé2tor hé2t1os hétó2 hé2t1ór hé2t1ö hé2tu hé2t1ü2 hé2v1á hé2ve. hé2vég hé2v1érz héze2 hé2z1ek hé2zio hé2zip hézo2 hé2z3s hf2ló 1hi 2hia. hi2aa hi2aá hi2abelie hi2ac hi2ad hi2ae hi2aé hi2ag hi2ah hi2aj hi2al hi2am hi2ant hi2ap hi2ar hi2at 3hida hidro1s 2hiév higa2nye 3higi 2hila 2him. 2hime 2himé 2h1i2nán 3hinás 2hing h1ing. 2hink h1insp hi2om hi2pa 2hips h1ips. h1ipse h1ipsé h1ipsh h1ipsr h1ipss hipszes1 2hise hi2se. 2hisn 2hiso hi2ta hit1ak hita2l hit1a2n hi2t1á2 hi2t1e2gy 3hitel hite2l1e2l hi2t1elesé hi2t1elÅ‘ hi2t1elve. hi2t1eng hi2t1er hi2t1esz 3hitet hi2tél hi2t1ér. hi2t1érte hi2t1érv hi2t1é2te hi2t1é2ve. hi2t1im hi2t1int hitkész1 hi2t1o hitó2 hi2t1ón hi2t1ór hitö2 hi2t1ör hi2t1Å‘r hi2t1u2 hi2tú hi2tül hi2zo 1hí hí2da hí2dá hí2de hí2dí hí2dö hí2dÅ‘ hí2dú hí2dü hí2dz hí2g1e hí2jé hí2m1a2 hí2má hí2m1el hí2m1emb hí2mer hí2mo hí2mö hí2mu hí2r1a2 hí2r1á2 hí2r1ing hí2rí hí2r1o hí2r1ó2 hí2rÅ‘ hír1s hí2r1u hí2rú hkas3s hká2r hk2ri hle2g1e2lé hle2t1el hle2tö 1ho ho2dú 2ho2eá ho2ef ho2ii ho2it 2hokl hola2 ho2l1ad ho2l1al ho2lam ho2l1at ho2l1au ho2l1ál ho2l1á2r hol2dá hol2dem hol2d1ó2r ho2l1e2 ho2lig ho2l1in ho2lip ho2l1i2v hol1k2 hol2mes hol2nik ho2lor ho2l1osz ho2lot ho2l1ó2r ho2lö holta2n holt1any holte2 hol2t1el ho2lü ho2ly1al 3homb 3homo homo1s ho2n1a2g ho2n1a2l ho2n1a2n ho2n1a2p ho2n1au ho2n1a2v ho2n1ál ho2n1át1a2 ho2n1á2to hone2 ho2neg ho2n1el ho2n1ik ho2n1i2m ho2ní ho2n1orj ho2n1o2ro ho2n1orr ho2n1ó2 ho2nö ho2nÅ‘ ho2nü hor2d1e2 ho2re 2ho1ry hossz1e2 hosszé2 hossz1ék hossz1ü2 2hosz ho1th ho2us ho2we ho2zál ho2z1e hozi2 ho2zü 1hó hóa2k hóá2g hó1bl hó2cal hó2cat hóc3c hó2cel hó2c1é2g hó2c3h hó2cim hó2có hó2c1ö hó2csü hó2cz hó2d1a2ra. hó2d1a2rá hó2d1á2 hó2dem hó2d1é hó2d1ó2 hó2dö hó2dü hó2d3z hó2l1ej hó2l1e2ped hó2l1e2vet hó2nal hón1apa 2hórá hó2rár hó1sh hóví2 hóza2t1e 1hö hökö1 höl2gya höl2gy1á höl2gyel 1hÅ‘ hÅ‘a2n hÅ‘1br hÅ‘e2l hÅ‘e2m hÅ‘e2r hőé2n hÅ‘1fl hÅ‘1gl hÅ‘1gr hÅ‘1kv hÅ‘1pr hÅ‘sa2 hÅ‘2s1al hÅ‘2sas hÅ‘2s1av hÅ‘2s1el hÅ‘2sep hÅ‘2ses hÅ‘2sim hÅ‘2sis hÅ‘1s2pi hÅ‘s3s hÅ‘1s2tab hÅ‘s1tr hÅ‘2sut hÅ‘2s1ú hÅ‘2s1ült hÅ‘2s1ü2v hÅ‘sű2 hÅ‘2s1űr hÅ‘1tr hőü2l hp2la hp2ra hp2ré hp2ri hp2ro hp2ró hru1s2 hs2ch hsé2gel hs2ka hs2pi hs2po hs1s2t hsza2ké hszá2j1a2da hsz2l ht1cl ht1kl h2t1ol ht2rá h1tref h2t1u2t 1hu hu1hy 2hurá hur2t1e hur2tit 2hus. 2husi huszon1 1hú hú2gy1a2 hú2gye hú2gyi hú2gyú hú2r1a2 hú2r1á hú2re húrt2 húr1tr hú2s1ak hú2sal hú2san hú2sap hú2sar hú2s1á2g hú2s1ál hú2s1e2 hú2sim hú2sí hú2sor hú2sö hú2sÅ‘ hús3szak hús3szá hús3szé hú2sü hú2s3zab hú2sz1ál hú2sz1e2 hú2sz1í2 hú2szol hú2szos hú2sz1ó2 hú2sz1ü2 1hü hü2gy 2h1ünn hü2re hü2rü hü2tÅ‘ h1ü2vö 1hű hű2ré hű2ri hű2rö hy1ér hy1év hy1ig 2i. i1a iaa2d iaát1 iaáta2 i2abaj i2aber i2abes i2abí i2abon i2abor i2abö i2abÅ‘ i2abu i2abú i2abü i2abű ia2c1al iac3c i2acet iac1élé ia2c1ér. ia2c3h ia2cid iac1i2ko ia2c1im ia2c1int i2ací ia2c1or ia2có ia2cö ia2cÅ‘ ia2c3sé iac3sp iac3st iac3sz ia2cü ia2cz iadás1s i2ade i2adí ia2dot ia2dóe iadó1st i2adö i2adÅ‘ i2adú iae2l iae2r iae2t iaé2r i2afá i2afe i2afi i2a1fl i2afó i2afö i2afÅ‘ i2afri ia1fro i2afu i2afú i2afü i2afű i2agá i2age i2agé i2agi i2agö i2agÅ‘ i2agu ia2gyu i2aig i2aip i2ai2z ia2ján i2aje i2ajo ia2kad i2aká i2ake i2akí i2a1k2l i2akód i2akór i2akö i2akÅ‘ ia1k2re ia1k2ré ia1k2ri ia1krí i2aku i2akú i2akü i2a1kv ia3lan ia2lat i2aleg i2alib i2alí ia2lom i2alö ia2lud ia2lus i2aly i2amá i2ame i2amó ia2m1ur i2amú i2amű ia2nek i2ané i2anö i2anÅ‘ iao2k iaó2r ia2pát i2ape i2apé ia1p2l i2apo ia1p2s iarádi2 i2are ia2rén i2aro i2aró i2arö i2aru i2arú i2ase i2a1sh i2asi ia1s2ká i2a1s2l i2a1s2m i2asó i2asö i2a1s2p iast2 ia1s2ta ia1s2tá ia1sti ia1sto ia1str i2asú i2asü i2asű ia2sz1an i3aszerű ia2szes ia2szép iasz2k1e2r ia2szop i2aszú i2ate i2ató i2atö i2atÅ‘ ia1tré ia2ty i2aud i2au2r iau2s iaü2z ia2vat i2avé i2avi i2aví i2avo i2azá i2aze ia2zo i2azs i2azú i1á iá2cs iá2ga iá2gá iá2ge iá2gi iá2go iá2gy iá2hí iáka2 iá2kab iá2kak iá2k1al iá2k1an iá2k1ap iá2k1ar iá2k1as iá2k1á2 iá2keb iá2k1el iá2kem iá2k1en iá2k1e2s iá2k1ér. iá2k1érd iá2kés iáki2 iá2kin iá2kir iá2kit iá2kí iá2kop iá2k1or iá2k1osz iá2k1ó2 iá2k1ö iá2kÅ‘ iá2kur iá2k1ut iá2k1ú2 iá2k1ü iá2kű iála2 iál1ana iá2lál iá2l1ár iá2l1e2 iá2lim iá2l1in iá2lop iá2nar iá2n1as iá2nem iá2nir iá2nis iá2nö iánt2 ián1tr iá2nü iá2ny1ad iá2ny1a2l iá2nyan iá2nyar iá2ny1e2 iá2nyérz iá2nyö iá2ói iá2po iá2rad iá2rak iá2ram iár2das iár2d1e iár2d3z iá2re iá2rim iár1s2 iá2ru iá3run iá2rú iá2sal iá2sar iá2s1as iá2s1á2g iá2s1ám iá2sás iá2s1e2 iá2sikr iá2sí iá2sor iá2só iá2sö iá2sÅ‘ iás3szo iást2 iás1tr iá2s1ü2 iá2sű iásví2 iá2szás iá2s3ze iás3zs iá2ta iá2t1e2l iáti2 iá2t1ir iba1d2 ibas2 ibat2 ibau2 ibaü2 ibá2l1a ibe2lér ibe2r1in ibe1s ibi2o ib2lo ib2ró ib2ru ica1f2 ica1g2 ica1kl ica1k2r ica2los ica1pr ica1t2 icca2l ic2can ic2c1á2 ic2c3h ic2cin ic2cir ic2cí ic2c1o ic2c1ö iccse2l iccs1ol iccs1Å‘2 ic2cú ic2cz ic3ha. ic3hek i2chiná ic3hoz i2c3hű ici2t1a2 ici2tár ici2tel ici2ter i1c2lu i2c1ol i2cs1a2d ics1a2la icsa2p1á2g i2cs1au ics1ág ics1áll i2cs1eb i2cs1e2g icse2t i2cs1ete i2cs1ev ics1é2g i2cs1é2ré. ics1ipa ics1s ics1út i2cs1ül i2cs1ü2t ic3sze ic1üz i2d1a2j id1ana id1a2ny i2d1au id1áll id1áru i2d1ásv id1br 1iddo id3dz ide2av ide2g1á ide2g1él ideg1ér. 1i2dej ide1k2v 2idel id1elj id1elm id1elo id1elt i2d1emb ide1p2 ide2red i2dero ide3sa ideu2 i2d1é2g idé2kel i2dény idér2c3s 2idész 1i2déz id2ge idi2as 1i2dil id1ionj id1ionn i2d1i2ono i2d1i2ont idi2os idi2ód i2d1ita idíja2d idí2j1ada id1kr id1old i2d1olv ido2mac ido2m1an 1i2domá 1i2domo 1idomú id1ös 1i2dÅ‘. 1i2dÅ‘b 1i2dÅ‘d 1i2dőé 1i2dÅ‘h 1i2dÅ‘i 1i2dÅ‘k 1i2dÅ‘m i2dÅ‘ne i2dÅ‘né i2dÅ‘nk 1i2dÅ‘p 1i2dÅ‘r 1i2dÅ‘s idÅ‘2sod idÅ‘1s2p 1i2dÅ‘t idÅ‘2tál 1i2dÅ‘v i2dÅ‘z id1pr id2rót i1d2ru id1st id1t2r id1u2t id1üg i2d1üz i2d3zá i2d3ze i2d3zó i2dzs1a i2dzs1á i2dzsen i2dzsél i2dzs1í2 i1e ie2be ie2cs ie2dz ie2f1a2 ie2fá ie2f1i ie2f1ü2 ie2gé ie2gy ie2he i2eld ie2lo ie2lÅ‘a ie2ma ien2sá ien2s1o iens3s ie2ny ie2pe ie2r1a2d ie2rag ie2r1aj ie2r1a2k ie2ral ie2ram ie2r1an ie2ras ie2r1á ier1d2 ie2reg ier1egy ie2r1el ie2r1ember ie2r1est ie2r1iga ie2r1in ie2r1iste ie2rí ie2r1ol ie2ror ie2rö ie2rÅ‘ iersz2 iert2 ier1tr ie2r1u2 ie2r1ú ie2r1ü2g ie2sel ie2sem ie2sett ie2sés ie2sik ie2sÅ‘ ie2sz ieté1sz2 ietz1 ie2ur ie2ve ie2vé ie2vi ie2vÅ‘ i1é i2édo i2éfe ié2ge ié2gé ié2gÅ‘ ié2gü i2éha ié2hes i2éhi i2éil ié2le ié2lé ié2li ié2lÅ‘ i2éme i2émé i2émo ié2neke ié2nekh ié2nekk ié2nekü ié2pí ié2pü ié2rek i2éren ié2rez ié2ré ié2ri ié2rÅ‘. ié2rÅ‘k ié2rÅ‘t ié2rü i2észi iéta1s ié2tel i2étö ié2ve. ié2vek ifa1st ifa1t2 i2fe. ifenyőé2h ifio2 if1ír 1ifjí 1ifju 1ifjú. 1ifjúb 1ifjúé 1ifjúi 1ifjúké 1ifjún 1ifjúr 1ifjús 1ifjút 1ifjúv i1f2la if2le if2lo if2lö if2lu ifogo2 ifon1n i1f2ri i1f2ro i1f2rö if2ru if2t1a if2t1á2 if2t1e2l if2tin if2tö if2tú ig1a2git ig1a2ka ig1als ig1alv ig1and iga2nyal iganye2 iga2ny1es iga2ras 1i2garz iga1sl iga2szag igasz1al igau2r 1i2gaz. 1igazí ig1á2cs ig1álm ig1ásh i3gász ig1á2t1e2 ig1br ig1d2r ig1edz i2geg igek2 ige1kl ig1e2le ig1ell ig1elm 1i2genl ige2rá ig1erÅ‘ ig1e2se ig1esi ige2tál ige2teg ige2tél ige2t1o ige2tÅ‘r ig1evi i2g1ex 1i2géd ig1é2g ig1é2li ig1é2lÅ‘ ig1é2lü ig1élv i2gém igé2na igé2ná 1i2gény i3gépe ig1épí ig1ér. ig1érj ig1érl ig1érn ig1é2rü ig1érv ig1ész 1i2géü ig1fl ig1g2r ig1ív ig1íz ig1kl ig1kr ig2lac ig2nad ig2n1e2g igne2r ig2n1osz ig2nö ig2nü igo2rál ig1o2s igóé2 ig1öb ig1ö2k ig1öl ig1ö2z ig1pr i1g2raf ig1sk ig1sl ig1sm ig1sp ig1st ig1sz ig1tr ig1ug igu2n igu2t ig1ús ig1üc ig1üd ig1ü2g igü2l ig1üt ig1üv ig1űz i2gya. i2gyam i2gyák i2gyál i2gy1eg i2gy1e2kéh i2gy1ékt igy1ér. i2gy1ért iha2re i1i ii2de ii2dé ii2dÅ‘ ii2ga ii2gá ii2gé ii2gy ii2ha ii2je ii2má ii2mi ii2pa ii2ram ii2rat ii2rá ii2ro ii2sz ii2ta ii2vá ii2vo ii2vó ii2zé ii2zo i1í ií2gé ií2ra ií2rá ií2ro ií2ró ií2ru ií2té ií2ve ií2ze 1i2jed ije2gy1á2 1i2jes ikabe2j1 ikabejá2 i2k1abl ik1ajt ika2lak ik1ang ikaó2 ika1p2l ika1p2r ika2ró2r ikas2 ika1sp ika1t2r i2k1árk i2k1áru ikás1s iká2tol ik1dr ik1ebé i2k1eg ik1elo ik1eng ike2r1a2 ike2r1á2 ike2r1e2dz ike2r1e2l ike2r1ev ike2ris ike2r1o ik1e2rÅ‘ iker1s ike2ru ike2t1ült ik1evo iké2kekk i2k1épí i2k1épü i2k1érz ik1fl ik1fr i2k1id ik1i2ko ik1ikr ik1ind ik1ins i2k1int i2k1i2o ik1isk ikka2l ik2k1ala ikk1any ik2k1a2r ikk1ára ik2káz ik2kev ikk1érde ik2kin ik2k1i2p ik2k1ol ik2k1ó ik2k1ös ik2köt ik2k1ö2z ik2k1u2 ik2küz ikla1tr ik2ler ik2lor i1klub ik2lum i1knéd ikoma2 iko2m1ar 1ikonbá 1ikonén 1i2konl 1i2konta 1ikontö 1i2kontü 1ikonzs ikó1p ik1ó2rá ikó2s3zá ikÅ‘2res ik1pl ik1pr 1ikrei 1ikrek ik2rém i1k2róm i1k2ru ik1st ikszind2 iksz2t iktus1s iktu2sz 2iku. 2ikub ik1udv 2ikuf 2ikuh 2ikui 2ikuí 2ikuj 2ikuk 2ikur iku2sav 2ikut 2ikuv ik1ü2v il1abr il1a2ce il1a2cé il1a2dó il1a2la il1ald i2l1alk il1amb ila2n i2l1ana i2l1any ila2pin ila2pol i2l1a2r ilá2g1e2 ilág3g ilá2gö il1á2gyo ilányfé2 i2l1á2p i2l1árn il1á2ro il1áru il1á2rú ilá2sz i2l1ászo il1átf il1átm il1átr il1bl il2c1a2 ilc3c il2ch il2c3sap il2c3sik ilc3sz il2cz il1e2lem i2l1emb i2l1eml il1exp ilé2n1á i2l1é2nekh i2l1é2nekn ilé1sp il1f2l il1fr il1g2r i2l1icc il1ide i2l1igáh i2l1igáj i2l1igás i2l1igát ili1gra i2l1i2kon. i2l1i2konb i2likoné i2l1i2konh i2l1i2konj i2l1i2konn i2l1i2kono i2l1i2konr i2l1i2kont i2likonz il1ill ili2ma. il1i2mi il1imp ilin1n ili2p1á il1iró il1k2l il1k2r illa2g1ó2 1illatb 1illatr 1illatt il2l1este il2l1esté 1illésé illé2t ill1éte ill1étt il2l1id illig2 illi1gr il2l1ö 1illu 1illú ilm1ada il2m1agy il2m1aj ilm1aka il2m1a2l ilm1ank ilm1any il2m1ap il2m1arc ilm1atl il2m1a2z il2m1ál ilme2g il2m1egé il2m1egy il2m1e2l il2m1ep il2m1e2r il2m1esz il2m1é2j il2m1ék ilmé2l il2m1éle il2m1ér. il2m1érd ilm1é2rést il2mérte il2m1érté il2m1és il2mid il2m1i2k il2mim il2mir il2m1is il2miz il2m1í2 il2m1ok il2mol il2m1or ilm1osz il2m1ó2r il2mö il2mÅ‘ ilmu2 il2m1ut ilo1g2 i2l1or ilót2 iló1tr il1öb il1ös il1p2l il1p2r il1sh il1sp il1s2t 2ilte ilumi2 ilumin1 ilus3s i2l1üg il1ür il1üv i2l1üz ilva1k2 ima1gl im1akk imaó2 ima1p 2imar. i2marit 1i2mád i2mákt im1dr imeg1g im1elem ime2m i2m1eme ime2ra ime2rin i2m1érd im1inh im1inté imi2t1a2 imi2t1á2r imi2tin 1immu i2m1old i2m1om im1ó2rá imót2 im1ös 1impé 1impu 1imrei i2m1ür iműt2r i2n1abl i2n1a2cé i2n1adá in1ade i2nado in1a2já in1ajt inaka2r 1i2nakat. i2n1akc i2nakk i2nakn i2nakt i2n1akv in1ald i2n1alk in1all 1i2nam. i2n1a2mit. in1ana ina1pla ina2rán 1i2nas. ina2sis 1i2nasn ina2tell i2n1ág iná2lad in1á2rak in1árh i2náru i2n1ásv in1bl in1br in2c1a2g in2cal inca2n inc1elt in2c1él in2chi in2c3ho in2c1is in2c1os in2có in2c1ö in2cÅ‘ incs1an in2cs1e2r in2cs1é2j in2cs1ér. inc3sérv in2csor inc3sor. in2csú inc3süt inc3sza in2cú in2d1ab 1indai in2d1az in2deb in2d1ed in2d1e2g ind1ekö in2d1e2m in2d1ett 1index in2d1e2z in2d1ég in2d1én in2dés ind1ink 1indiv 1indít indö2 in2d1ör in2dös in1dra 1indul 2ine. i2n1e2dé i2n1ef ineg1g in1e2gy ine2ku i2n1e2l in1emu i2n1eny 2iner i2n1erd i2n1erj ine2t1a ine2t1ér ine2tül i2n1ex 2inéb in1é2ge iné1kré iné2l i2n1éle i2n1élt i2n1élv in1épí i2n1ér. i2nérd i2n1éré in1érte iné2tá iné2te 2inév 1infek 1infl in2gadó in2g1ala ing1áll ing1árá ing1áré ing1á2ro ing1árt ing1áru ingás3s 1ingec in2g1eg 1ingei ing1eljá ing1elk in2g1ell ing1els in2g1enc 1ingerb 1ingerc 1ingeré inge2r1és 1ingerg 1ingerh 1ingeri 1ingerk 1ingerm 1ingern 1ingerp 1ingerr 1ingers 1ingert 1ingerü in2g1e2v in2g1é2j in2g1ék in2g1él ingés3s ing3g ing1i2na in2gí in1glo in2gor in2g1öl in2g1ös ing2rád. ing2rádb ing2rádd ing2rádn ing2rádo ing2rádt 1inguj in2g1u2t in2gú ing1ült 2inie 2inié i2nigar i2n1ige in1ikra ini1kro i2n1ill i2n1i2ma. in1i2mi in1ind 2ining i2n1inh i2n1i2o 2inir ini2s1ég i2n1isk i2n1ism i2n1ital 2iniu i2n1íz 1injekc ink1acé in2k1a2d in2k1ato in2k1árn in2k1esz ink1érté in2kio ink1old ink1orsó in2kös 1inkub 1inna. in2nor i2n1ob i2n1okl i2n1old i2n1olt i2n1olv in1org i2n1ox in1óda in1ó2dá inó2rá i2n1öl in1ön in1Å‘z in1pl in1pr in1s2k in1s2m 1inspi in1spr 1insta in1s2to in2t1aktu int1ann int1árai int1árak int1árat in2t1á2rá in2t1árf in2t1ári int1áron in2t1árr int1ársz in2t1áru int1á2ta 1intege 1integr in2t1e2gy int1elÅ‘t int1elté in2t1enn in2t1e2ny 1intenz in2t1e2rez 1interfé int1essz inte2t1Å‘s in2t1é2j in2t1ész 1intézé 1intézk 1intézm 1intézÅ‘ 2intézÅ‘c in1t2hos in1thu in2tid in2tigé int1illa in2t1ing in2t1ip in2t1ivá in2t1i2z int1oml in2t1osz in2t1ös intÅ‘kész1 int1Å‘r. int1ura intus1s in2t1ut in2tús in2t1út i2n1ug i2n1uj in1új in1ú2s i2n1ú2t i2n1ü2g in1ült in1ünn in1ür in1üs i2n1ü2t i2n1ü2v i2n1ü2z in1űr 1inven in2xa 1inzu i1o ioá2r io1b2r io2cs io1d2r io2dú i2ogá iog2raf io1g2ráf. io1g2ráff i2ogy io2ik io2ká io2kí io2ko io2ku i3old io2l1i2v iol1k2 iol1okk i2oló i3olv io2mar io2m1árt io2mil io2mö io2nad io2n1a2g io2nak io2n1a2n io2n1a2r io2n1as io2n1a2t io2n1av io2n1át io2neg io2n1el io2nen ionim1 io2nin ion1k2 io2nop io2n1oszt io2nö ions2 ion1st ion1t2r io2pe io1p2r io2r1a io2r1i2ko io2so io1sz2f i2ote io2xidj io2xidt i1ó ióa2d ió2ap ióá2g ióá2r ióát1a2 i2óbá i2óbes i2óbé i2óbí i2ó1bl i2óbo i2óbö i2óbÅ‘ i2ó1b2r i2óbu i2óbú i2óbü i2óbű i2óce i2ócé i2óci i3ócsk i2ócu i2óde i2ódé i2ódi i2ódí i2ódó i2ódö ió1drá i2ódú i2ódü i2ódz i2óég i2óék i2óél i2óép i2óés i2óév. i2ófá i2ófe i2ófi i2ó1fl i2ófó i2ófö i2ófÅ‘ i2ó1f2r i2ófu i2ófü i2ófű ió1g2r i2óha i2óhá i2óhe i2óhé i2óhi i2óhí i2óhó i2óhö i2óhÅ‘ i2óhu i2óhü i2óhű iói2g i2óip i2óis i2óiz ióí2v i2óje i2ójo i2ójó ió2kad ió2kaj iók1arc ió2k1aszt ió2kál ió2k1e2g ió2k1i2d i2ókí iókköz1 iók1old i2ókort i2ókö i2ókÅ‘ ió2küz i2óla i2ólá i2óle i2ólé i2óli i2ólo i2óme i2ómó i2ómu i2ómú i2ómű i2óne i2ónó i2ónö ióo2k ióó2r ió1p2s ió2rab i2órag i2órak i2óran i2órap ió2ras i2órád i2óre i2óré i2ória i2óro i2órö i2óru ió2s1aj ió2sel ió2si. ió1slá ió2só iós3s ió1s2tá ió1str i2ósú ió1sz2 i2óte i2óté i2óti i2ótí i2ótö i2ótÅ‘ i2ó1t2r i2ótu i2ótú i2ótü i2ótű i2óug i2óun i2óur i2óut i2óvár i2óvás i2óve i2óvé i2óvi i2óví i2óvö i2ózár i2óze i2ózs i1ö iö2kö iö2le iö2lé iö2li iö2lö iö2lÅ‘ iö2mö iö2re iö2rö iö2rü iö2tö iö2ve iö2zö i1Å‘ iÅ‘1dr i2Å‘ha i2Å‘ké i2Å‘ku i2Å‘ra iÅ‘2ri2 i2Å‘1s2p i2Å‘1st i2Å‘te i2Å‘té i2Å‘va i2Å‘vá ipa2cse 1i2par. 1ipara. ipa2ral ipa2rál 1i2parán 1i2parát 1iparb ipa2r1en ipa2r1es 1i2paré 1i2parh 1iparil 1i2parin ipa2ris 1i2parm 1i2parn i2parok i2paron 1iparr 1i2pars i2parta 1i2partá 1i2parte 1i2partó 1i2paru ipau2 ipánk2 ipán1n ip1átm i2p1ef ip1e2gy ip1e2lu i2p1esé i2p1ev ip1fl ip2fu ip1kl ip1kr ipo1kl ipor2tel ipor2t1Å‘ ip1ö2l ip1ös ip2p1a2d ip2paj ip2par ip2pár ipp1ing ip2pö i1prof i1prog i1proj ip2rop i1p2rot ipsz1a2l ipsz1ál ip2sz1emb ip2szip ip2sz1í2 ipsz1or i2p1ug iralo2 1i2rama 1i2ramá iramis1s ira2tal irata2n ira2t1any ira2t1as ira2t1at ira2tál 1i2ratb 1i2ratc 1i2ratd ira2tel 1i2raté 1i2ratf 1i2rati 2i3ratill 1i2ratm ira2t1ol 1iratoz. ira2t1ö2 1i2ratp 1i2ratr 1i2ratü irádi2ók irádi2ót irá2f1a2 irá2g1al irá2g1ál irá2g1á2rak irá2g1á2ro irá2g1áto irá2gáz irá2g1e irág1g irá2g1ol irá2g1ó2r irá2gö irá2l1a irá2l1e2 1i2ránn 1i2rány irá2nyal irá2nye2 ir2ch i2rei 1irga irin2c iri2zo irka1s irkasz2 irke1 2iro. 2irob 1i2roda. 1i2rodá iro1g2r iro2ka iro2ká iro2k1e iro2kér iro2l1a iro2m1a iros3s iró2ke i2róno 1irri irsa2 ir2s1al ir2s1á2 ir2sil irs3s ir2s1ü ir2sz irté2 ir2t1él ir2tiz ir2t1o2r ir2t1ö2 ir2tür iru2sze i2s1abl is1a2da is1a2dá is1a2do i2s1a2g is1aja is1ajk isa2k1e isa2kol is1a2la is1alf i2s1alj is1alk is1alm is1amb isa2n i2s1ana i2s1ano i2s1ant i2s1any isa2p is1apá is1apo is1a2rá is1ass is1aut i2s1a2z i2s1ábr is1ággy iságy1út is1áll is1á2po i2s1á2rak i2s1á2rá i2s1árb i2s1árf i2s1árh i2s1árké i2s1árn isá2ron i2s1árr i2s1árt i2s1á2ru isárus1 is1á2t1a2 is1átk i2s1átl is1átv is1bl is1br is1dr is1edz is1eff i2s1ege is1e2gér i2s1e2ke. is1elf is1elm i2s1elo i2s1eml i2s1enc i2s1ene is1eng i2s1e2pi is1epri is1erÅ‘ is1e2set is1esé isé2ge2l isé2gés isé2gid iségkész1 is1élv i2s1ér. i2s1é2ri i2s1érté is1é2ve. is1é2vet is1é2vén is1é2vét is1évh isé2vi. isföl2 is1fr is2hin is2his is1ido isi2g is1iga is1ind is1inf is1int is1inv isió2 i2sipa isi2par is1iro i2s1isten is1ita is1i2zo i1s2katu is1kl is1k2r is1kv is2lag i1s2lat 1isme. 1ismek 1ismere 1ismérv 2ismo 2ismű i2s1ob i2s1o2l i2s1orc i2s1ord iso2rosz i2s1orra i2s1orró is1orv i2s1osko is1osz i2s1ott is1ó2rá i2s1öb i2s1ö2c is1öl is1ön isö2r is1örd is1öre is1ös is1ö2v is1Å‘2r i1s2pek isp2r i1s2pur is1s2p is1sta is1stá issz1e2rei issz1e2rek issz1erem issz1e2res is3szig is3szil is3szí is3szó is3szö is3szÅ‘ is3szú is3szű ista1s istasz2 1istáp 1istenh iste2n1o i1s2til is1trez is1tré is1tri is1tro 1istv i2s1ud is1ujj isu2t is1uta is1utá i2s1új i2s1üg is1üst i2s1ü2tÅ‘t i2s1üz isva2d1áss i1svin isvíz1 isza2k1o isza2p1á2 isza2p1e is3zárá isz1árk is3zárl is3zárú isz1e2gy i2sz1elv is3zene isz1esem i2széj isz2fér i2szimi isz1ing isz1isk isz1ist isz1kl isz2k1ö2v isz2k1ú isz1öl isz1ös isz3s isz2tati iszt1áras isz2t1árb isz2tárt isz2t1ékn isz2t1ér. isz2t1ill iszt1i2nai isz2t1öl isz2t1Å‘r. isz2tüz i2sz1ü2g i2szüt is3zűrödn it1a2cé ita1d2 it1a2dó ita1g2r it1agya i2t1akk i2t1akn 1i2tal. ita2lad ita2l1á2rú i2talb ita2l1el italé2 i2talh itali2n ita2lina i2talj i2talk it1allo 1i2talm 1i2taln i2talo ita2l1ó2 1i2talr i2talt2 i2talu it1anó it1ant i2t1ara i2t1au it1ács. it1ág. it1á2ga itána2 itá2n1at itá2rak i2t1á2ram it1á2ras it1á2rat itá2rár itá2rát i2t1á2ria itá2rig itá2ris itáró2 itá2r1ór itá2ruk itá2rú itáskész1 itá2s3z itá2tal it1átf it1bl it1br it1dr ite2l1a ite2l1á ite2leg i2telemz ite2leng ite2lesé ite2lex ite2lél ite2l1ér. ite2linj it1eljá i2t1ellen it2elmély ite2lo ite2lÅ‘z i2t1e2mel i2t1eml i2t1e2p 2iter i2t1e2rez i2t1erÅ‘ i2t1e2sete it1ezr ité2g i2t1ége it1é2le it1élm i2t1é2ret i2t1érz ité2tek i2t1étt it1fl it1fr it1gl it1gn it1gr i1t2hot i2t1id iti2g itigaz1 i2t1igé it1i2ko it1ikr itikus1s it1ill it1imp i2t1ind i2t1inf it1i2pa i2t1irá i2t1iro it1iró it1isk i2t1ism it1isza it1i2szo i2t1íg i2t1íz it1kl itkos1s it1kr ito2b it1obe ito2k1aj ito2kak ito2k1ol ito2n1á2 itop2la ito2r1as ito2rál ito2ril 2itosz ito1sz2f i2t1ov itóa2 itó1f2 1i2tókán i2t1ó2né i2t1ónn i2t1ónt itó1p2 it1órá itós2 itó1sp itó1st it1önt i2t1ös it1Å‘rl it1pl it1pr it1sp 1ittad 1ittam itta2n1á2s itta2n1é itta2n1ó2 2itte it2t1eg it2t1i2na it2t1ing it1tra it1tró 1ittuk 1ittun itty1i i2t1ug i2t1und itu2n1i itu2ral it1u2rán it1új i2t1üg it1üld it1üst i2t1üt i1t2zé i1t2zi i1u iu2ga iu2go iu2mab iu2mac iu2m1ad iu2maf iu2m1ag iu2mal iu2m1am iu2m1a2n iu2m1a2r iu2m1as iu2m1a2t iu2m1av iu2maz iu2m1á2l iu2meg iu2m1el iu2m1en iu2mer iu2m1es iu2mez iu2mél iu2m1éré iu2m1i2d ium1ill iu2m1im ium1inj iu2m1i2p iu2m1is iu2m1iz iu2mí iu2mol iu2m1or iu2m1ó2 iu2mö iu2mÅ‘ ium1p2 iu2mü iu2na iu2no iu2ra iu2rá iu2ru ius3sze iu2ta iu2tó iu2tu iu2zs i1ú i2úbá i2úbe i2úbé i2úbi i2úbo i2úbö i2ú1br i2úci i2úcí i2údá i2úde i2údo i2údz i2úél iú1fr i2úha i2úhá i2úhe i2úhi i2úhö i2úhÅ‘ i2úhu i2úhú i2úif i2úje iú2jí i2újó i2úke i2úkí i2ú1kl i2úkó i2úkö i2úku i2úme i2úmó i2úmu i2úmű i2úne i2úné i2únö i2úre iú2ri iú2ro i2úru i2úse i2úso i2ú1sp i2úta i2útá i2úte iú2té i2útí i2útö i2útú i2úve i2úvé i2úvi i2úvo i2úze i1ü iü2cs iü2ge iü2gy iü2le iü2lé iü2li iü2lö iü2lÅ‘ iü2lü iü2re iü2rí iü2rü iü2te iü2té iü2ti iü2tö iü2tÅ‘ iü2tü iü2ve iü2vö iü2ze i1ű iű2ze iű2zé iű2zÅ‘ iva2csal iva2cs1e 1i2vadé iva2r1ai iva2raj iva2re iva2rin iva2rol iva2ró ivar1s iva2t1a2n iva2t1e2 iva2tin iva2tol iva2t1ó2 iva2t1ö 1i2vás. 1i2vásb 1i2vásé 1i2vásn 1i2vásr 1i2váss i2ve. 1ivot ivókész1 ivós2 ivőé2 iv2ré i2x1ab i2x1ad i2x1an i2x1ar ix1as i2x1ág ix1bl i2x1ef i2x1eg i2x1ex i2x1ép ix1fr i2x1im i2x1in i2x1io i2x1ir i2x1is ixi2t i2x1ob i2x1op ix1öd ix1ös ix1Å‘r ix1Å‘s ix1pl ix1pr i2x1új i2x1ül iz1akn izala2g1 iz1alk izas2 iza1sp iz1árny iz1bl ize2d1á2 ize2d1ék iz1egy i2z1e2lem izele2tel i2z1e2lér i2z1e2lőí iz1ember izene2g izen3n ize2s1á2 i2z1esemé i2z1ev i2zéd i2z1ég 1i2zéj 1i2zék i2zél i2z1é2p 1i2zésí iz1fl 1izgal 1izgatot i2z1iga i2z1igé iz1inf iz1int iz1iro i2z1isk i2z1ism izi1s2p 1izmok 1izmuk izmus1s 1izmú iz1okt 1i2zolá izo2m1a izo2mál 1i2zomb 1izomé 1izomf 1i2zomm 1i2zomn 1i2zomr 1izoms 1i2zomt 1izomz izo1p2 i2zos izo1szta 1izotó izó2d1a2 iz1órá iz1pl iz1pr i2zs1ad izsa2ik i2zsakn i2zs1all izs1ara izsa3u2tók iz4s1ág i2zs1ál i2zsec i2zs1embe i2zsev izsé2t izs1éte i2zsil i2zs1imp izs1ist i2zsita i2zsiz iz3sor i2zs1Å‘ izs1s iz3str i2zs1ül iz3sze iztos1s iz1udv izura1 iz1ú2t i2z1ü2g i2z1üt i2z1ü2z 1izzi 1izzí 1izzot 2í. í1a í1á íba2l1 í2bis íbo2r1as ícius1s íd1a2c íd1a2d íd1a2l ídala2g1 íd1a2n íd1a2v íd1a2z íd1ág íd1ál íd1á2r íd1át íd1bl íd1e2g íd1e2l íd1e2m í2d1é2g í2d1él í2d1ép í2d1ér. í2d1érz ídi2g í2d1iga í2d1igé í2d1in í2d1i2r íd1ív í2d1ol í2d1om í2d1os íd1ös íd1Å‘2r íd1pr íd1st í2d1ud í2d1ug íd1új íd1üg íd1ün íd1üz íd3zá íd3zs í1e í1é íé2le í2g1a2g íg1e2p íge2s í2g2ér í2g1op íg1tr ígyá2 í1i íi2ro í1í í2j1a2dá í2j1a2dó í2j1a2j í2j1akc í2j1a2l íj1any í2j1a2r í2j1a2u í2j1á2c í2j1á2g í2j1árá í2j1áre í2j1árt í2j1áru 1í2jás íjá2sze íj1ászka í2j1áta í2j1á2te í2j1áth í2j1átl íjá2tos í2j1átt í2j1átu í2j1átv í2j1á2z íj1e2g íj1e2l íjel2i íj1e2m íj1en íj1e2r íj1e2s íj1ép íjé2r í2j1ére íj1fr í2j1i2d í2j1im í2j1int í2j1ir íjirá2 íj1íg íj1ín íj1kr í2j1o2d í2j1ok í2j1ol í2j1os í2j1ot íj1ön íj1ös íj1ö2v íj1öz íj1pl íj1pr íj1sk íj1sp íj1st2 í2j1ug í2j1u2t í2j1út íj1üg íj1ü2t í2k1abl í2k1a2c í2k1a2g í2k1a2l í2k1an ík1ar íka2s ík1asz ík1a2v í2k1áb í2k1ág ík1ál í2k1á2r í2k1eg í2k1e2l ík1em í2k1es ík1ev í2k1é2k í2k1ér. í2k1érb í2k1é2rÅ‘ ík1fr í2k1i2d í2k1ing í2k1i2r ík1ín ík1ír ík1k2r í1k2lu ík1oll í2k1op í2k1orn í2k1orr í2k1ors íkö2l í2k1ö2v ík1pl ík1pr ík1sp ík1sz2 í2k1ug í2kuni í2k1u2r í2k1u2t í2k1ú2t ík1ü2v íl1aj íl1a2k íl1a2l í2l1á2g ílási2 ílá2s1ik ílás3s ílá2s3z í2l1át íl1bl íl1br íl1e2g í2l1érz íli2as í2l1i2m í2l1i2r í2l1is íl1í2r íl1í2v íl1kr íl1ös íl1ö2z íl1st íltá2 íl2t1árk íl2t1áro íl2t1árt íl2t1e íl2t1é2 í2l1u2r ílu2sab ílu2se ílus3s ílu2s3z íma2n íma1p í2m1á2l ímá2ris ímás1s ím1bl ím1b2r ím1dr í2m1e2g ím1elí í2m1elm íme2r1a2 íme2r1á íme2reg ím1e2rÅ‘ í2m1esem í2m1e2v í2m1é2het í2m1é2k í2m1ép í2m1ér. í2m1é2ré í2m1é2rÅ‘. í2m1é2rÅ‘i í2m1érr í2m1érte í2m1érté í2m1i2d ími2g í2m1iga í2m1igé í2m1ill í2m1ind í2m1inf í2m1ira í2m1is í2m1i2v ím1ír ím1kr ím1o2k1 ím1ol ím1on ím1o2p ím1os ím1ök ím1ö2l ím1ös ím1öt ím1Å‘2r ím1p2r ím1sp ím1st ím1u2r ím1u2t í2m1üg í2m1ünn í2m1üt í2n1ab ín1a2cé ína2d ín1ada ín1adá ín1ado í2n1a2g í2n1a2j í2n1aka í2n1akk ín1a2la ín1alj ín1alk ín1ana ín1a2ny í2n1a2p í2n1a2rá í2n1arc ín1ass ín1atl ín1att í2n1au í2n1az ín1ábr í2n1á2c í2n1ág í2nálhaj í2n1álm í2n1á2p ín1á2t1a2 ín1átc ín1áte ín1átf ín1áth ín1átl ín1átm ín1átt ín1átv í2n1á2z ín1bl ín1br ín1d2r í2n1ef í2n1e2g í2n1e2ke. í2n1elc í2n1e2le í2n1elh í2n1elj í2n1elk í2n1ell í2n1elm í2n1elny í2n1e2lo í2n1elö í2n1e2lÅ‘h í2n1elr í2n1eltá í2n1elto í2n1elvá í2n1e2mel í2n1eml í2n1e2mu íne2n í2n1ene í2nesd í2n1e2sz í2n1e2vé ín1ég íné2l í2n1éle ín1élé ín1élÅ‘ ín1élt í2n1é2p í2n1ér. í2nérd í2n1éri í2n1érl í2n1érm í2n1érü íné2sza íné2szer íné2szint íné2szo í2n1éte í2n1é2ven ín1f2r ín1g2r íni2g í2n1iga í2n1igé í2n1i2ko í2n1ill ín1ind í2n1inf í2n1ing í2n1int 1í2nio ín1i2rá í2n1iro í2n1ism í2n1i2z ín1íz ín1k2l ín1k2r ín1k2v ín3nyú í2n1ob í2n1ol í2n1op í2n1or í2n1osz ín1ó2l ín1ön ín1ör ín1ös ín1öt ín1ö2v ín1ö2z ín1pl ín1pr ín1ps ín1s2k ínso2k1 ín1s2p ín1s2t2 ín1sz2 ín1t2r ín1ug ín1új í2n1üd í2n1ü2g í2n1ült í2n1ü2t í2n1ü2v íny1e2c íny1e2g íny1el íny1e2r íny1ing í1o í1ó íó2vo í1ö í1Å‘ í2p1a2g í2p1a2n í2p1álc í2p1él ípés3s í2p1i2z íp3ro í2p1uj ípu2san ípus3s ípus3z ír1a2dó ír1akc ír1akn ír1akt ír1a2la ír1alj ír1alk íra1pl íra1pr ír1arc í2r1a2u í2r1ábr í2r1á2g í2r1á2p ír1ár. ír1árak í2r1á2ro ír1á2ru í2r1ásás írá2se íráskész1 írás3s í2rász ír1á2t1a ír1á2t1e2 í2r1áth ír1áts ír1átv ír1áza ír1bl ír1br ír1d2r ír1e2dé í2r1e2g í2r1e2kés í2r1e2l írel1a í2r1ember ír1eml ír1emu íre2n ír1ene í2r1e2r í2r1esem í2r1e2sÅ‘ í2r1eszk í2r1ev í2r1é2g í2r1éj í2r1é2k í2r1é2l í2r1ép í2r1é2ri í2r1és í2r1é2te ír1évh ír1f2r ír1gl ír1g2r íri2g í2r1iga ír1iko í2r1ill í2r1im ír1ind í2r1inf í2r1inj ír1ins í2r1int í2r1i2p ír1ira í2ris íri2sz1á í2r1i2z ír1ín ír1ír ír1ív ír1í2z ír1k2l ír1k2r ír1kv 1írnok í2r1o2b 1írog í2r1okl í2r1okm íro2l í2r1ola í2r1old í2r1olv í2r1o2r í2r1osz í2r1o2v í2r1o2x íróá2 í2ródj í2ródo í2ródó í2ródt í2róí író1p2 író1s2p író1sz írót2 író1tr ír1ön ír1ör í2r1ös í2r1ö2z í2r1Å‘2r ír1Å‘2s ír1p2l ír1p2r ír1sh ír1s2k ír1s2p ír1s2r ír1s2t írsz2 írszt2 ír2t1ag írt1alap írt1é2te írtha2 ír1tran ír1tro ír2t1u2r íru2n í2r1und í2r1uni í2r1u2r íru2s1e2 írus3s í2r1u2t í2r1új í2r1úr í2r1ú2s í2r1útj í2r1útn í2r1ú2to í2r1útr í2r1útt í2r1ü2g írü2l ír1ür ír1ü2v í2r1üz ír1ű2z ís2po ís2tí ísz1aj ísza2k íszak1o ísz1a2l ísz1as ísz1at ísz1au í2sz1á2 í2sz1eb í2szedé í2sz1e2g í2sz1e2lem í2sz1ell í2szeln í2sz1e2lÅ‘ í2sz1elv í2sz1emb í2sz1e2mel í2sz1eml í2sz1erk í2szég í2sz1é2l í2sz1é2p ísz1érem í2sz1ért í2sz1étk ísz1g2 íszi2 í2sz1id í2sz1in í2szír í2sz1ív ísz1k2 í2sz1o2 í2sz1ó2 ísz1öl ísz1ön í2sz1Å‘2 ísz1p2 ísz3s ísz1tr í2sz1u í2sz1ú íszü2l í2sz1ülé í2szünn í2szüt ísz1z íta3u2 ítá2s1á2g íté2k1a2l íté2k1e2l 1í2tél íté2sa íté2s1ég. íté2s1é2gé íté2s1égr íté2sza íté2szá íté2szo ítész3s ítóa2 ító1f ítógé2p1é2s ító1p2 ító1sp ító1sta ítót2 ító1tr ít2ré ítus3s í1u í1ú í1ü íü2dü í1ű í2v1a2d ív1a2já ív1ajk ív1ajt í2v1a2la í2v1alj í2v1a2na ív1ang í2v1anó í2v1a2ny ív1arc ív1a2ri í2v1aszt ív1ábr í2v1á2g í2v1ál ív1á2rad í2v1á2ram í2v1árk í2v1árn í2v1árt í2váru ívás3s ívá2s3z ív1áta ív1bl ív1dr íve2c í2v1e2dz í2v1e2g í2v1ej í2velek ív1e2lemb ív1e2leme ív1e2lemé ív1e2lemr í2velg ív1e2lÅ‘l í2v1ember íve2n í2v1end í2v1ene íve2r ív1ere ív1eré ív1erÅ‘ í2v1esemé í2v1esz í2v1e2tet í2v1é2g í2v1é2k í2v1é2le í2v1élm í2v1é2lÅ‘ ív1élt í2v1ép í2v1ér. í2v1érd í2v1é2ri í2v1érr ív1érté í2v1érv í2v1érz í2v1é2te ív1fr í2v1i2d ívi2g í2v1iga ívi2k ív1ike í2v1ill í2v1im í2v1in í2v1i2p ív1iro ívi2s1el ívi2ses ívis1s ív1iva í2v1i2z ív1kl ív1kr í2v1ol í2v1op í2v1or í2v1osz í2v1ox ívó1s2p í2v1öb í2v1ö2r í2v1ös ív1öv í2v1ö2z ív1pl ív1pr ívren2de ív1sk ív1sp ív1st ívsz2 ív1szk ív1tr í2v1ug ív1ult í2v1ur í2v1u2t ív1új ív1út í2v1üg í2v1ür í2v1üt í2v1ü2v í2v1üz íz1ab íz1a2d íz1a2g íz1aj íz1a2k íz1a2l íza2n íz1ar íz1a2u íz1á2g íz1ál íz1á2p íz1á2r ízár1ó2 ízás1s íz1á2t ízát1a2d ízát1á ízát1e2 íz1bl íz1d2r í2z1ef í2z1e2g í2z1ej í3zelá íze2l1el íze2lö íze2lÅ‘ í2z1emel í2zemé íze2n í2z1ene í2z1e2r 1í2zesí í2z1e2sz í2z1e2ti í2z1eu í2z1e2v í2z1ex í2z1e2z í2z1ég íz1ékk í2z1é2l í2z1é2p í2z1ér. í2z1érb í2z1érd í2z1érel í2z1é2ren í2z1érh í2z1é2r1i í2z1érk í2z1érm í2z1érn í2z1érr í2z1érs í2z1érte í2z1érté í2z1értÅ‘ í2z1érv í2z1érz ízé2sa íz1ész íz1évi íz1fr íz1gl íz1icc ízi2g í2z1igé íz1iko í2z1ill í2zimá í2z1imi í2z1imp í2z1ind í2z1inf í2z1int ízióé2r ízi2óéra ízi2óto í2z1i2pa í2z1ira íz1irá ízi2so ízi2sza ízi2szo ízi2szó í2z1i2ta í2z1i2vá íz1í2v íz1íz íz1kl íz1k2r ízo2k íz1on íz1or íz1os íz1ó2rá íz1öb íz1öd íz1ök íz1ö2l íz1öm íz1ön íz1ör íz1ös íz1ö2v íz1öz ízpen1 íz1pf íz1pl íz1p2r í2zsa2 ízs1al ízs1as ízs1au íz4s1ág ízsá2r ízs1áro íz3sáv íz3seb í2zs1e2l ízse2s ízs1ese í2zs1in í2zs1it íz3sí í2zso ízs1ok íz3sor í2zs1ó2 í2zsö ízs1s íz3str í2z3su í2zsú íz3sz íz1t2r í2z1ug í2z1uj ízu2me ízu2m1i íz1und í2z1u2r í2z1ut íz1úr íz1ú2t í2z1ü2g í2züle í2z1ür í2z1üs í2z1üt í2z1üv í2z1ü2z í2zűe í2zűn 1í2zűr í2zűv 2j. 1ja j1a2cé 2j1a2dag 2j1a2dal ja2datá 2j1a2dato 2j1adm 2j1a2dom 2j1a2dot ja2dóh ja2dós ja2dót ja2dóv ja2dóz ja2dus ja1f2r ja2ga. 2j1agg 2j1a2gi 2jakad ja2kadé ja2k1ál ja2k1á2p ja2k1árn 2j1akci ja2k1ec ja2k1el ja2k1e2m ja2kes jak1ére ja2k1éri 2jakf ja2k1im. ja2k1iz ja2k1í2r jak1k2 2j1akku jako2v jak1ovi ja2kók ja2k1ölts ja2k1ös 2j1akro jakt2 2j1akt. 2j1akti 2j1aktu 2j1alg 2j1alk 2j1all 2j1alm jam2be 2jambu ja2mes 2jana. 2jana2i ja2nal ja2nat 2j1a2no jan2sze jan2szü ja2nya ja2ran 2j1a2rá 2j1arc ja2rom 2j1a2szás 2j1atk 2j1atró ja2ur ja2uto 2javat ja2xi ja2zon 1já já2ar 2j1á2bé 2j1ábr 2j1ág. 2j1á2ga. 2j1ág1a2d 2j1á2gai 2j1á2gak 2j1á2gas 2j1á2gat 2j1á2gaz 2j1á2gá 2j1ágb 2j1ágf 2j1ágg 2j1á2gi 2j1ágk 2j1ágr 2j1á2gy jáí2r 2j1áll já2n1e já2nék 2jánl já2po 2j1á2rada 2j1á2rai 2j1á2rak 2j1á2ram já2rar já2r1av 2j1á2rán járás3s járá2s3z 2j1á2rát 2j1á2ráv járe2 já2r1em já2res já2rér 2j1á2rig já2r1is 2j1árki 2j1árko 2j1árny 2j1á2ron já2r1ot já3ró já2rÅ‘ 2j1ártal 2j1ártás 2j1á2ruk 2járus j1árut 2j1árvál 2j1árz já2s1ad já2sal já2san já2s1ág já2s1á2rai já2s1árak já2sás já2s1e2 já2sit já2s1í já2s1ol 2j1á2só já2sö jást2 já2s1ü2 já2sz1al jásze2 já2szis já2sz1okta jász1ói jász1ón jász1ó2r ját1a2dó. ját1a2dók. ját1a2dón. ját1a2dót 2j1á2tal já2t1á2 2j1átáz játék1e2 2j1átfe 2j1áthá 2j1á2ti 2j1átír 2j1átlé 2j1átm ját1osz 2j1á2t1ö 2játszű 2j1á2tü 2j1átvé 2j1átvi já3zs jba2l1 jb2lo jb2ri jb2ró jbű2n1ü2 jc3ho jcsapá2 jcs1s jdo2nal j1d2rá j1d2ro j1d2ru jdúköz1 1je jea2l jea2n je2bé 2j1e2dén 2j1e2dz jee2n je1f2r 2jeged je2gés jegész1 je2gyeg je2gyel jegy1els jegy1elv 2j1e2gyesí je2gy1é2k je2gy1in je2gy1o je2gyÅ‘ je2gyu je2gy1ú je2gy1ű2rű j1ekcé je1kri jek2t1a2 jek2t1á2r jek2t1e2l jek2ter je2laj je2l1an je2l1ap je2lar je2lav je2laz jelá2 je2l1áb je2lág je2lál je2l1ár je2l1át1 je2láz jel1e2gy je2l1e2l1e2v je2l1elk je2l1ell je2l1e2lőá je2l1e2lÅ‘b je2l1els je2l1emel 2jelemé je2l1eml 2j1e2lemz je2l1eng je2l1e2r jel1esés jel1esik je2l1esni je2l1e2sÅ‘ jel1este je2l1esz je2l1etet jel1evo je2lég je2l1éj je2lél je2l1ép je2lérte je2l1ige je2lim je2l1i2na je2l1inf je2l1ing je2l1int je2l1í2 2j1ellen je2l1o2 je2l1öv je2lÅ‘a 2j1e2lÅ‘fe je2lőí 2j1e2lÅ‘tu jel1p2 2jelszá 2jelszo 2j1eltett. 2jeltér je2lu je2lú 2j1emba 2j1ember 2j1e2mel je2mu je2n1á jen1d2 je2n1e2ke. je2n1el je2ner jene2s je2n1esz je2n1in je2n1o je2n1Å‘2re je2n1Å‘s je2n1ü je1p2r 2j1erd je2red je2ró 2j1e2rÅ‘ je2s1a je2s1emb 2j1e2setb 2j1e2sett je2s1ér je2sÅ‘ jest2 je1sta je1str je2su je2s1ü2v jes3zac je2t1am je2tál jet1ell je2t1em je2t1ék je2t1o je1tra je2tun je2tut jeu2r 2j1e2vÅ‘ jezÅ‘kész1 1jé 2j1ébr jé2g1a2 jé2g1á2 jégár1a2d jé2gec jé2ged jé2g1e2k jé2g1el jé2g1em jé2g1erk jé2g1es jé2get jé2gép jé2g1ér. jé2gés jég3g jégi2 jé2g1id jé2gigé jé2gim jé2g1is jé2g1o jé2gó jé2gö 2jé2gÅ‘ jé2g1u2 jé2gú 2j1é2hen 2j1é2hes 2j1éhs jé2k1os jé2kü 2j1é2le jé2lÅ‘ 2j1élt jé2nad jé2n1ess jé2n1é2g jé2nil jé2no jé2nú jé2pí jé2pü 2j1ér. 2j1érb 2j1érd jé2reg 2j1é2ren 2j1é2ré 2j1érh 2j1é2ri 2j1érk 2j1érl 2j1érm 2j1érn 2j1érr 2j1érte 2j1érté 2j1értÅ‘ 2j1é2rü 2j1érv 2j1érz jé2tel 2j1étk 2j1étt 2j1év. 2j1évb jé2ve. 2j1é2vek 2j1é2vet jé2vén jé2vér jé2vét j1évh j1é2vi j1évk 2j1évn 2j1évr j1évt 2j1é2vü j1évv jfeles1s jf2le jf2lo jf2ló jfölös1s jf2ra jf2re jf2ri jf2rí jf2ro jf2rö jfu2na j1g2le j1g2ló jg2ru jhá2r1e 1ji ji2do j1i2dÅ‘ 2j1i2ga 2j1i2gé ji2je 2j1ikr 2j1ill ji2ma ji2má ji2mi 2j1i2na. ji2náb 2j1ind 2j1inf 2j1ing 2j1inj 2j1ins 2j1inte 2j1inté ji2on ji2pa 2j1i2rat 2j1i2rá 2j1i2ro 2j1irr 2j1irt 2j1isk 2j1ism 2j1ist 2j1i2ta 2j1i2vá 2j1i2vó ji2zé ji2zo 1jí jí2gé jí2rá jí2ró jítókész1 jí2ve jí2vé jí2vó jí2ze jí2zé jí2zü jí2zű j2j1alj j2j1a2z jjá1s jje2lesé jje2lesi jje2lest jje2l1ül jj1e2r j2j1im j2j1iz jj1ív jj1íz j2j1ol j2j1os jj1pr jj1üg jka1pr jk1ard jkau2 j2k1e2g j2k1e2lÅ‘ j2k1e2s jké2p1e2ké jkia2 jkiá2 j2k1id jkie2 jk2la jk2lá j1k2li j1k2lo jkme2 j2k1old jko2r1á2s jko2r1in jk1órá jk1pl jk1pr j1k2ré j1k2ri j1k2rí jk1st j2k1u2s jk1üg jk2va j1k2vó j2l1abl jl1a2da j2l1a2lap j2l1all j2l1ar jla2t1e2 j2l1azo jlás3s jl1á2t1a jl1átn jl1bl jl1e2gy jleí2 jl1elé jl1ell jl1eln jle2t1é2te jlé2cel jlé2c3s j2l1év. jli2k jl1iko j2l1ind j2l1inf j2l1int jl1obj j2l1ol jlo2n1á jlo2ni j2l1ös jl1pr jl1sp jl1st jl1t2r jme2g jm1oká j2m1old jna2l1e jna2lég jna2lis jn1á2rá jn1á2ru jnév1a jn1ing jn1st jn1ü2g 1jo jo2g1a2d jo2g1aka jo2gal jo2g1an jo2g1a2z jo2g1á2c jo2g1á2l jo2g1e2 jog3g jo2g1í jo2g1ol jo2g1or jo2g1osz jogó2 jo2g1óv jo2gö jo2gÅ‘ jo2gur jo2gü 2j1o2koz 2j1oks 2j1okta jo2laj jo2lim joma2 jo2m1ag jo2m1an jo2mar jo2m1as jo2m1á jo2m1e2 jo2m1é2t jo2mév jom1f2 jo2mij jo2m1ik jo2m1is jo2m1iv jo2m1ol jo2m1or jo2mö jo2mÅ‘ jom1p2 jo2mü jo2mű jonc1c jon2ch jon2cil jon2c3s 2j1ond jo2ób jo2pá jo2pe jo2r1ing 2j1orr. jo2rü 2j1osto 2j1osz jo2ui jo2uk jo2ul jo2ur jo2ut jo2va 1jó jóa2k jóá2g jóá2r jó1bl jó1b2r jó2ce jó2dad jó2dak jód1all jó2d1a2n jóda2t jó2d1ato jóde2 jó2del jó2dig jó2diz jó2d1ol jódó2 jó2dór jó2dü jó2d3z jó1fl jó1kl jó1kro jó2l1e2s 2j1ónn jó1p2r jó2rák 2j1ó2ri jó2sal jó2sas jó2sál jós1árk jó2seg jó2sem jó2sén jó2sip jó2sis jó2s1o2do jó2sö jós3s jós1üld jó2s1ün jó2s3zár jó1t2r 1jö jö2dé jö2ko jö2lé 2j1önt jö2re jö2rö j1ösv j1ötv 2j1öv. 2jöve. j1ö2vez 2j1ö2vén jö2zö 1jÅ‘ j1Å‘rl jÅ‘2rö 2j1Å‘rt jÅ‘2rü jÅ‘2ze jpárba2 jp2la jp2le jp2re jp2ré jp2ri jp2ro jp2ró jra3d2 jra1f jrai2 jra1p2 jra1s2 jrau2 jré2sz1e2l jré2szí jsa2v1e jsa2v1é jsa2vo jsa2v1ón. jsa2v1ó2r jság1g js2ho js2ká js2ki js2ni js2pe js2pi js2po js2pó jsp2r j1s2ta js2ti j1s2tí j1s2to j1st2ra j1st2ru j1s2tú jszabás1s jszá2l1a2da jszáraz1 jszín3n jsz2k jszt2 j2t1a2da jt1a2do jt1a2dó jt1akn jt1a2lap jt1alk j2t1all j2t1alv jt1a2nyag jta1p2 j2t1a2pá jt1a2rom j2t1arz jta1sp j2t1atk j2t1a2ut j2t1ác jt1ága j2t1áll jtá2rai j2t1á2rak j2t1árka jtá2s3z jt1á2t1a j2t1e2gy j2t1e2ke. j2t1e2leg j2t1e2leme j2telemz j2t1elf j2t1elh jt1eljá j2t1ell j2telmél j2t1e2lo j2t1e2lÅ‘ j2t1elp j2t1elr j2t1els j2t1elv j2t1eml j2tener j2t1enz jt1e2red jt1e2rez j2t1erÅ‘ j2t1e2tetÅ‘e jt1e2vés jt1evo j2t1e2vÅ‘ j2t1é2le jt1élm j2t1érl j2t1érm jtés3s jt1fl jt1fr j2t1i2d jti2m j2timá j2t1imi j2t1imp jt1inf j2t1ing jt1int j2t1i2pa j2t1ir jti2s jt1isi j2t1izg jt1i2zo j2t1í2v j2t1í2z jt1kl jt1kr j2t1olda j2t1orjáb j2t1ors jt1osz j2t1o2x jtóa2 jtóé2ra jtó1p2 jtó1s2po jtó1sta jtó1str jtó1sz jtó1tr jt1ödé jt1öng jt1öss jtÅ‘a2 jtÅ‘e2 jtőé2 jtÅ‘1s2p jt1pl jt1pr j1t2ran jtrádi2 j1t2rág jt2ri j1t2roj jt2róf jt1sp jt1st jt1t2r j2t1udv j2t1und jt1utá j2t1új j2t1üg j2t1ü2l j2t1üt 1ju ju2ga jugo1 ju2had ju2h1a2k ju2hal ju2h1a2n juha2ra ju2hat ju2hál ju2h1e2 ju2h1éj ju2h1is ju2hor ju2hÅ‘ ju2hü 2jula 2juls 2jural ju2ru ju2sz1a2v ju2szim ju2sz1í2 ju2tak 2j1u2tál 1jú jú1fr jú2jí jújjáé3 jú1pl jú1p2r jú1s2p jú1s2t jú2szó jú1szv 2j1útb jú2ton 2j1útv 1jü jü2ge jü2gy jü2le jü2lé j1ü2lÅ‘ j1ült jü2re jü2rí jü2rü jü2ta j1ü2tÅ‘ jü2ve jü2ze 1jű jű2ri jű2zÅ‘ j2z1abl j2z1a2d jza2j j2z1ajá jz1ajt jza2kad jz1akc j2z1a2l j2z1a2ny j2z1ar j2z1as jza2tan j2z1ál j2z1á2rad j2z1á2rán j2z1e2l j2z1em jze2r jz1eré j2z1es j2z1él j2z1érté jz1fr jzi2g j2z1igé j2zill j2z1int j2z1i2r j2z1isk j2z1ism jz1k2l jz1k2r j2z1ob jz1okta j2z1olda j2zolvas j2z1osz j2z1ó2d j2z1ó2r jzó1s2 j2z1ös j2z1öt jz1pl jz1p2r jz3sab j2zs1a2l j2zs1ál jzscsa2p1 j2zs1eg j2zsen j2zs1er j2zs1in jz3sín jz3sor j2zs1ö2 j2zs1Å‘ jzs1s j2zs1u2t jz3sza jz3szá jz3sze j2z1ut jz1ü2g 2k. 1ka kaa2d kaát1 kaáta2 kab2a 2kabbah 2k1abhá kabi2ná 2k1ablak. 2k1ablaka 2kablaká 2k1ablakb 2kablakh 2kablakk 2k1ablakok 2k1ablakon 2kablakos 2k1ablakot 2kablakr ka1b2le ka1bre 2k1abs ka2cél. ka2c3h kacs1á2g ka2cs1ús ka2cü ka2cz k1adap 2kadál kadás1s 2k1a2dot ka2dói ka2dój 2k1a2dóso ka2dóu ka2dóz ka1d2rá kaegész1 kae2l kae2r kaé2r ka1f2r ka2gan 2k1agg ka1g2r 2k1agy. ka2gyu kai2z 2kajánl 2k1ajk 2k1ajtóh ka2kadé ka2kadó kakas3s ka2kác 2k1akc ka1k2l ka2kó. ka1k2ré ka1kri 2k1akt. 2k1akti ka1k2va ka2lapa 2kalapítv 2k1alb kal1egy 2k1alf 2k1alj. 2k1alji 2k1alka 2kalko 2k1alle 2k1alti kama2te 2k1ambu kana2g ka2nalí 2k1a2nam k1a2nat ka2n1e2g ka2n1el 2kang ka2nim ka2nio ka2n1iv ka2nol ka2nód 2kante ka2nű 2k1a2nya. 2k1a2nyag 2k1a2nyai ka2nyáb ka2nyó ka2óa ka2óá ka2óc ka2óe ka2óg ka2óha ka2óí ka2óko ka2ókr ka2óp ka2óü ka2óz 2kapád ka2páto 2kapp ka1p2re ka1p2ré 2k1aprí ka1prof ka2r1a2d ka2rakk ka2r1a2kó kar1a2lap 2k1arank ka2r1a2r kara2s 2ka2rasz 2karat. 2karatn 2karato ka2rau ka2r1ácsh ka2r1ácsn ka2r1ácst ka2r1ál 2k1a2rány kar2c1el kar2dac kar2dál kar2dz ka2rel ka2rev kar1éke karé2kor ka2rél ka2r1éne karfe2l1 ka2rid ka2r1i2ko ka2r1ikr ka2rind ka2r1ing. kar1ingb kar1inge kar1ingg kar1ingr ka2r1isk ka2r1ism kari2z ka2r1izo 2k1a2roma. 2karomá ka2ror kars2 2k1arti ka2sag kaság1g ka2sál ka2sid kasi2p kas1ipa ka2siz ka2s1ol ka2s1ó2r ka1s2po kas3sze kas3szé 2k1asszi 2kasszony kas3szÅ‘ kast2 ka1str ka2sür kasz1ajt 2kaszat ka2szág kat1ará ka2tél kat1iko kat1int ka2t1i2o 2k1atká ka2tolda katrádi2 ka1t2ri ka2tyá ka2ud 2k1a2ul kau2n kau2r kau2s kau2ta 2k1a2uto 2k1autó. 2kautób ka3utóc 2kautóé 2kautóh 2kautói 2k1autój 2k1autók 2kautóm 2kautón 2kautós 2kautót 2kautóv kaü2t kaü2z 2k1a2vat 2k1a2zon 1ká 2kábé 2k1ábr ká2cak ká2ce 2ká2c3h ká2có ká2c3sar ká2csat ká2cse ká2cs1ék ká2csip ká2csis ká2c3sor ká2csö ká2csÅ‘ ká2cü ká2dab ká2dar ká2d1ác ká2dát ká2dil ká2dí ká2d1ö ká2d1ü2 ká2d3z ká2fá 2k1ág. ká2ga 2k1ága. 2kágacska ká3gacská 2k1ágai 2k1ágak 2k1ágat 2k1ágaz 2k1á2gá 2k1ágb 2k1á2ge k1á2gé 2k1ágg 2k1ágh 2k1á2gi 2k1ágk 2k1ágn 2k1á2go 2k1ágr 2k1ágs 2k1ágt 2k1á2gu 2k1á2gú 2k1á2gy 2k1á2hí ká2jö ká2jü ká2lág 2kálái 2kálák 2kálán 2káláv 2káláz 2k1álc k1áldo ká2l1e2 ká2lén 2kállamti 2k1állap 2k1állás kál2l1e2 kállé2 kál2lék 2k1állí 2k1állom ká2lü ká2lyod 2k1á2mí 2k1á2mu ká2n1ag ká2n1a2n ká2n1as ká2nem ká2n1is ká2n1ó2 ká2ny1ag ká2ny1a2l ká2ny1a2n ká2nyap ká2nyar ká2ny1á2to ká2ny1e2 ká2nyis ká2nyiv ká2nyö 2k1á2polá ká2poló 2ká2rad ká2r1a2g 2k1á2rakr ká2rakt kár1akta kár1aktá ká2r1a2l 2k1á2ram ká2r1a2p ká2rar ká2ras kár1ass 2k1á2rat ká2raz kár1azo kár1áll ká2r1á2z kár1d2 ká2r1e2 kár1éte 2k1á2riá k1árká kárkié2h ká2r1old ká2r1osz ká2rö ká2rÅ‘ kárpszi2 kárpszic3 kár1s2 kárt2 kár1tr ká2ruh ká2rü kás1ajt kás1ass ká2s1á2go ká2s1á2rai ká2s1árak ká2s1á2rá ká2s1árb ká2s1á2ro ká2s1árr ká2s1árv ká2s1árz 2k1á2sásá 2kásást ká2s1e2 ká2s1i2k ká2sim ká2sis ká2sír káská2r ká2s1ol ká2s1ott ká2só ká2sö ká2sÅ‘ kás1p kást2 kás1tr ká2sut ká2s1ü2 2k1á2száb ká2szár kás3zár. 2k1á2szát ká2sze ká2szé ká2szis ká2sziv 2k1á2szom ká2s3zü ká2tab 2k1átad ká2taj ká2tal kát1a2la ká2t1a2n ká2tar ká2tau ká2t1á2 2k1átbe ká2t1e2 ká2tég k1átfo ká2tip ká2t1ir ká2tis ká2tí k1átló 2k1átme ká2t1osz kátó2 ká2tór ká2t1ö ká2tü k1átvá kba2l1 kbé2rel kb2la kb2le kb2lo kb2rá kb2ri kb2ro kb2ró kci2ófa kci2ósi kci2óspe kc2lu kcsa2p1á2g kdi2al kd2ra kd2rá kd2re kd2ro kd2ró kd2ru 1ke kea2j kea2k kea2l kea2n kea2r kea2s keá2l keá2r keá2t1 2k1eb. ke2bei ke2bek ke2béd kecs1alj kecs1ón kecs1ó2r ke1d2ra keegyez1 kee2n ke2ep kee2s 2k1eff ke1fl ke1f2r 2k1e2gér kegész1 2kegyb ke2gyék ke2gy1in 2k1egyl 2kegyn ke2gyö 2kegyr 2kegys ke2gyu ke2gy1ú 2kegyü 2k1e2ke. 2k1e2kei 2k1e2kek 2k1e2kéé 2k1e2kék ke1k2l ke1k2ré kek2sza 2k1ekv 2k1elad ke2l1an ke2l1a2t 2k1elav ke2l1á2g 2k1e2lef ke2lekc 2k1e2lekt ke2l1e2l k1e2lemb 2keleme 2kelemé k1e2lemh 2kelemm 2k1e2lemn k1e2lemr 2k1e2lemz ke2l1e2pe. ke2l1e2pei kele2p1ü2 ke2l1e2sé ke2l1esze kele2t1el kele2t1o 2keley ke2l1é2k 2k1e2lél kel1érh kelés3s kelé2s3z 2k1elhel 2k1elho kel1id kel2ig 2k1e2lim ke2lio 2k1elism 2k1e2l1í2 2k1eljá k1ellá 2kellát 2kellená 2kellenÅ‘ 2k1ellent 2k1elnev 2k1elny 2k1e2los 2ke2lö2l 2k1e2lÅ‘a kelőé2 2kelÅ‘fú ke2lÅ‘hí 2k1e2lőí 2k1e2lÅ‘képz 2k1e2lÅ‘kés 2k1e2lÅ‘l ke2lÅ‘ny 2k1e2lÅ‘o ke2lÅ‘té 2k1e2lÅ‘tt k1e2lÅ‘z k1elra 2k1elsa 2k1elsÅ‘ 2k1elszá kel2t1é2rÅ‘. 2k1elto 2keltű 2kelül ke2lűr 2k1elv. 2k1elvá 2k1elvei 2k1elvek 2k1elven 2kelvi k1elvk 2k1elvo 2k1elvt 2k1elvű ke2lyemb ke2ly1ékn kelyköz1 2k1elz 2k1ember 2k1embl 2k1embr 2k1eml 2k1emul ke2nal 2k1enci kende2rül ken2d1ék 2k1endo 2k1e2ner 2k1enge ke2n1ip ke2nis ke2n1o kens3s kensz2 2k1enyv keo2l keo2r keó2d keó2h keó2l keó2r ke2pik ke2ral ke2rap ke2r1a2r ke2r1as ke2rau ke2r1ál ke2r1ár ke2rás ker2csá ker2csell kercsí2 ker2csír 2kerde 2kerdÅ‘ 2k1e2rege 2k1e2rej kere2k1eg kerekes3s ker1ell ker1elv ke2r1ember ke2r1er kere2ta kere2t1e2l kere2t1ö ke2rég ke2r1é2l ke2r1ép ker1éte ke2r1id ker1iga ke2r1ill ke2rim ke2r1ind ke2r1int ke2r1inv ke2rio ker1ist ke2r1iz ker1k2 2kernel. ke2r1ol ke2ror ke2ród ke2r1ó2r ke2rÅ‘de ke2rÅ‘e ke2rÅ‘ké ke2rÅ‘o ke2rÅ‘sí ke2rÅ‘te kers2 ker1sp ker1st kersz2 ker2taj ker2táp ker2tás ker2t1ebbé ker2teg ker2t1este ker2t1esz ker2télv ker2t1é2vét ker2tön ker2töv ker2t1Å‘r ker2t1Å‘s ker1tró ke2rút kerü2g 2k1esemé ke2seté 2k1e2sett 2k1esél 2k1e2sés ke2sis 2kesítÅ‘st 2keskü 2k1e2sÅ‘ ke1s2po kes3széke kes3szó ke1sta 2k1este 2k1esté 2k1esti 2k1estj 2k1estk 2k1estn ke1stra 2k1estre 2k1estt 2k1estün ke2szaj ke2szép 2keszmén ke2tál ke1tc ke2t1e2kéh 2k1e2teté ke2tid ke1t2ra ke1t2ré ke1t2ri kettes1s ke2tűd keu2r keu2s kevés3s ke2vid 2k1e2vÅ‘ keze2tés kezÅ‘a2 kezÅ‘e2 kezÅ‘kész1 2k1ezr 1ké 2k1é2ber 2k1ébr ké2ge ké2gé ké2gÅ‘ 2k1é2hem 2k1é2hen 2k1é2hes 2kéhezé 2kéhezt 2k1éhs ké2j1ak ké2j1elé ké2jö ké2jut 2k1é2ke. 2k1é2kei 2kékekk 2kékem ké2k1e2r kékes3s kéke2s3z ké2kén ké2k1o2 2k1é2kük 2k1é2kű ké2lel 2kéles 2k1é2letek 2k1é2letet 2k1élm ké2lÅ‘d 2k1é2lÅ‘l 2k1élr 2k1é2lű ké2lya ké2ly1esz ké2lyid ké2ly1ü2l kéma2 ké2mad ké2m1al ké2m1an ké2map ké2m1as ké2m1á kémi2as ké2min ké2mis ké2mí ké2mo ké2mö ké2mÅ‘ ké2m1u2 kéna2 ké2n1al ké2nan ké2n1ar ké2n1at kéndio2 2k1é2neke 2k1é2nekh 2k1é2neki 2k1é2nekk 2kénekl ké2n1e2l ké2n1em ké2n1er kén1éte ké2nig ké2nil ké2n1ing ké2no ké2ny1e2g ké2ny1elv ké2nyú ké2p1a2 ké2p1á képá2r ké2peg ké2p1e2lÅ‘ ké2p1elté ké2p1elv képe2r ké2p1ill ké2pim ké2p1i2p ké2pí 2k1épít ké2p1o ké2pó ké2p1ö ké2pÅ‘ kép1s ké2pu ké2pú ké2p1ü2lé ké2p1ü2lÅ‘ kéra2 ké2rab ké2ral ké2ram ké2r1an ké2rar ké2r1as ké2rat ké2r1á2 2k1érdeke kér1dr 2k1é2retts ké2ro ké2rö 2kértelm 2kértékb 2k1értéke 2k1értékh 2k1értékn 2k1értékr 2k1értés kértÅ‘2iké ké2r1u2 ké2r1ú kérü2 kér1ülé 2k1érvek 2k1érzé késa2 ké2san ké2s1á2 ké2s1é2g ké2s1o ké2só kés3sza kés3szer kés3szél ké2su ké2süt ké2sza2 kész1al ké2sz1á2 ké2szeg késze2l ké2szeml ké2szesz ké2széd ké2sz1ékné ké2szin ké2szir ké2szo ké2sz1ó2 ké2szö ké2szÅ‘r készt2 ké2szu ké2sz1ú két1akar ké2takn ké2t1akó két1ala két1asp ké2tál ké2t1ed ké2t1eg ké2tel. ké2teln ké2t1e2m ké2t1ep 2ké2te2r két1ere két1erk két1erű ké2tesé ké2t1esté ké2t1esz ké2t1e2v ké2t1é2 ké2t1i2 ké2tí ké2tol ké2t1or kétó2 ké2t1ór ké2t1ö ké2tud ké2t1ü2 2kétv 2k1év. 2k1évad 2k1évb 2kévei 2k1é2vem 2k1é2ven 2kévet 2k1évf 2k1évh 2k1é2vi. ké2vig 2k1évin 2k1évk 2k1évn 2k1évr 2k1évs 2k1évt 2k1é2vü 2k1évv ké2z1a ké2z1á2 ké2zeg kéz1elt kéze2m kéz1eme ké2z1e2r ké2z1ism ké2zí ké2z1o ké2zö kézs2 ké2z3sé kéz3st ké2zsu ké2z1u2 kfe2l1em kfil2mér kfi2sar kf2jo kf2la kf2le kf2li kf2lo kf2ló kfolyói2k kfo2n1ó2 kf2ra kf2rá kf2re kf2ri kf2rí kf2ro kf2rö kf2ru kg2la kg2nó kg2ra k1g2ráf. kg2ri kg2rí kg2ru khá2t1al khelyköz1 1k2hia khiá2b k2hil kh2me 1ki kia2d ki2aé ki2af kia2g kia2j kia2kar kia2kas kia2lap kia2lá kia2n kia2p kia2ra kia2sz ki2aú kia2v kiá2z ki1b2l ki1b2r 2k1i2deá 2k1i2deg 2kidej 2k1i2dét 2kidéz 2kidi ki2dio 2kidÅ‘b 2kidÅ‘k 2kidÅ‘m 2k1i2dÅ‘n 2kidÅ‘t ki1dró kie2l kie2m kie2r ki2eset. kie2t ki2ew kié2d kié2k 2k1ifj ki1f2r 2kiga ki2ga. 2kigá ki2gát 2kige ki2gén ki2géz ki1g2r ki1k2l ki2konn ki2kono ki1k2ré ki1k2ri ki1k2v 2k1ille 2kily ki2lye 2k1i2ma. 2k1i2mai ki2mak ki2máb 2kimád ki2mái ki2máj ki2mák 2k1i2mám ki2mán 2kimár ki2mát ki2máv ki2mit 2k1imp 2k1i2na. 2k1i2nai 2k1i2náb 2k1i2nár kin2csa kin2csá kin2csel kin2cso 2kind 2k1inf kin2gas 2k1inj 2k1integ kio2l kio2m 2k1i2onn 2k1i2ono 2kiont. ki3óh 2k1i2ón. 2kipar ki1pf ki1p2la ki1ple ki1p2r 2k1ips 2k1i2rat 2kiránn 2kirány 2kirg 2k1irh 2k1irk 2k1i2rod 2kirom 2k1irr 2k1irt ki2sad ki2s1ajtó ki2s1akn ki2sal ki2sap ki2sas ki2sál ki2s1e2c ki2s1elÅ‘ ki2s1emb ki2s1es ki2s1é2k ki2si. ki2s1in kis1ipa ki2s1i2ra ki2s1isk ki1skál 2kism ki1smár ki1s2min kis2nyá kis1oko ki2s1oros ki2sör ki2sÅ‘ kisp2 kis1pl ki1s2por ki1s2pó kis3sz kist2 ki1s2tar ki1steri ki1stí ki1stop ki1str ki1stu ki1t2r kit2t1er kit2t1ö kiu2s kiu2t kiú2s kiú2t kivíz1 2k1i2vó 1kí 2k1í2gé k1így. kí2ja kí2nal kí2n1árb kí2n1e kí2nél kí2n1ó2 kí2rá 2k1í2re 2k1í2ró 2kítm kítÅ‘a2 kítÅ‘e2 2k1ív. 2k1ívb 2k1í2ve 2k1í2vé 2k1í2vű kí2zé kí2zü kí2zű kk1abl k2k1a2da kk1ajt k2k1a2kad k2k1akk k2kalá k2k1alj k2k1alm kka2ró2ra k2k1a2u k2k1azo k2k1áll kk1árf kká2rok k2k1á2ru kká2s k2k1ásá k2k1áth k2k1átl kk1átu k2k1átv kk1á2zó kk1br kke2c kk1ecse kk1e2dz kk1e2gé kke2l1e2g k2k1e2lem k2k1ellÅ‘ kke2lőá kk1emu kke2ra kk1erd kke2ró k2k1e2rÅ‘ kk1e2ti kk1e2vé k2k1ég k2k1é2l k2k1épí k2k1érté k2k1érz kk1fr kk1gr kkiá2 kk1i2do kkie2 kk1i2ga k2k1igé k2kill kk1ind k2k1ing k2k1ink k2k1int kk1i2par k2k1i2ro kk1isko kk1ism k2k1í2r k2k1íz k1k2lí kk2ló. k1k2lu kko2laj kk1olló k2k1o2pe kkor2dá k2k1org kko2r1os kko2r1út k2k1orz k2k1oszl kkó1p2 kk1ó2rá kkö2l kk1ölÅ‘ k2k1önt kk1ötl kk1Å‘2ri kk1Å‘2rö kk1Å‘rs kk1Å‘rü kk1pr kk2ris kk2rit kk2rí kk1sm kk1sp kk1st kk1t2r kk1udv kk1urt k2k1utá k2k1új k2k1üg kk1ü2lÅ‘ k2k1ü2t k2k1ü2v kk1üzl kk1űr kk2vó kla1g kla3ko kla1kri kla2p1áta kla1pla kla1s2k kla2uzá kláma2 klá2mal klá2m1an klá2mas klá2m1á2 klámár1a2d klá2m1e2g klá2m1eh klá2mel klá2mer klá2m1ér. klá2m1érté klá2m1i2k klá2min klá2mis klá2mol kleí2 kle2t1a2n kle2tál kle2t1elv kle2t1e2r kle2t1e2v kle2t1ér. kle2t1étbe kle2t1é2té kle2t1é2ve. kle2tin k2lini klió2 kli2pel k2lisé kli2só 1k2lím k2loá k2lopf k2loty kló2rad kló2raj kló2ran kló2rat kló2ris kló2rol k2lub klu2b1a klu2bá klu2b1e klu2bir klu2bol klu2se klus3s klu2s3z kma1b kmai2ko kmas2 kmá2nyan knag2 knak2 kna1kr kni2a2 knika2 kni2k1al kni2k1as kni1sz2 knÅ‘c1c kn2Å‘3r 1ko ko2áz ko2be 2k1obs ko2cho 2k1o2dav 2k1o2dú kogás1s ko1g2ra ko2kád 2koke2 ko2k1er 2k1o2k1ir 2k1o2kí 2k1okl 2k1okm 2k1o2kol ko2k1o2v 2k1o2koz kok2sze 2k1okta kol1ajt kol1áro 2k1oldal 2k1oldá 2k1oldó ko2lid ko2l1igá ko2limp ko2l1i2na. ko2lita kol2l1ad kol2lál kol2lel kol2les kol2l1ét ko2lö 2k1oltár 2kolvas 2k1ombu ko2mil kom2p1ér. kom2p1ol ko2n1a2d ko2naj ko2n1a2l ko2n1a2n ko2n1ar ko2n1as ko2n1au ko2n1ál 2konbá 2konbet konc1c koncé2 kon2ch kon2csá kon2c3sor. kon2c3soro ko2n1e 2konén ko2n1ér. 2kongu 2konhab ko2nir 2konog ko2n1ol 2konola ko2nor 2konosn 2konosr 2konost 2konpa 2konpr 2konrét 2kontö ko2nü ko2nyag ko2nyal ko2nye ko2nyó konz2 2konzs ko2ón kopá2sé 2kopc ko1pro 2kopti ko2r1a2d ko2rag ko2r1aj ko2r1a2n kor1ass ko2r1aszt ko2r1átl 2k1orch kor2csal kor2dad kor2d1ag kor2d1a2s kor2dác kord1d kor2d1e2l kor2dem kor2des k1ordít kor2d1ö kor2d3z ko2r1e2c ko2reg ko2r1el ko2r1er ko2r1es kor1f2 korgás1s 2korie kor1ill ko2r1ing ko2r1inte kor1isk ko2r1i2zom ko2rosz ko2r1o2v ko2r1ói ko2rón ko2rö ko2rÅ‘ 2k1orr. 2korrk 2k1orrt kor1s2 kor1t2r ko2r1úto ko2rü ko2s1as ko2sál ko2se kos3sze kos3szo kos3szü 2k1ostr ko2sü ko2szal ko2szer ko2szir ko2szí k2oszo ko2szó 2kosztá kotókész1 kotó1st ko2ut 2kovaria ko2vié ko2xi koza2tal kozókész1 kozóköz1 2kozóö 1kó kóa2d kó1bl kóc3c kó2cem kó2ch kó2d1ab kó2dak kó2d1a2l kó2d1a2n kó2daz kód1d kó2d1e2l kó2dem kó2d1e2r kó2d1es kó2d1és kó2din kó2dö kó2dü kó2dzá kóé2r kó1f2r kói2g kóí2v kó1kré kó1kv 2k1ó2né. kó2nét 2k1ó2ni. 2k1ó2no kó2pan kó2p1as kó2p1e2l kó2pir kópó2 kó2p1ór kó2pü 2k1ó2raa kó2r1a2c 2k1ó2rai kó2r1a2n kó2ras kó2ráb kó2rád kó2rág kó2rái k1ó2ráj kó2rám kó2rán kó2rár kó2rát kó2ráv kó2rel kóre2s kó2réj kó2r1éve kó2r1ir kó2r1ok kóro2ka kóro2ko kór1osto 2kóród kó2r1ódá kó2rÅ‘ 2k1ó2ságú kó1slá kó2s1os kó1s2pe kó1spi kó1s2po kó1str kó2s1ü2 kó2s3zára 2k1ó2vó kó2z1a2m 1kö kö2ba köb1öll köb1öln köb1ölr köb1ölt kö2da kö2dá kö2del kö2der kö2dev kö2do kö2dó kö2du kö2dú kö2dz 2k1ö2ko 2k1ökrö köl2csal köl2csá köl2csel köl2cs1Å‘2 2k1ölel köles3s 2k1ö2léssz kö2lí kö2lÅ‘e költa2 köl2tal köl2t1e2v 2k1öng k1önté k1öntÅ‘ k1öntv köny2v1a köny2vá kö2ra kö2rá k1ördö kö2red kö2r1e2g kö2rék kö2r1étt kör1f2 kö2rik kö2r1iz kö2ro kö2ró kö2r1ö2l kö2r1Å‘2 kör1p2 kör1s2 kört2 kör1tr kö2ru kö2rú körü2l1et körü2lé kö2r1ülés körü2lÅ‘ kö2r1ülÅ‘. 2körv. 2körvb 2körvh 2körvn 2k1örvö 2körvr 2körvt 2körvü 2körvű 2körvv k1öszt 2kötlé k1ötvö 2k1öv. 2k1övb 2k1övh 2k1ö2vig 2k1övr 2k1övv kö2za kö2zá kö2zen kö2z1é2k kö2z1í kö2zo kö2zó közÅ‘e2 kö2z3s kö2zu kö2zú kö2z1ü2lé kö2z1ü2lÅ‘ 1kÅ‘ kÅ‘a2n kÅ‘1bl kÅ‘2ch kÅ‘1dra kÅ‘e2k kÅ‘e2l kÅ‘e2r kőé2l kőé2ne kÅ‘1f2r kÅ‘1gn kÅ‘1gr kÅ‘i2t kÅ‘1kr kÅ‘1pl kÅ‘1pr kÅ‘r1an k1Å‘2rei kÅ‘re2s kÅ‘r1esz k1Å‘2ri. kÅ‘2ril kÅ‘2rip kÅ‘2riv k1Å‘rl kÅ‘r1s2 kÅ‘2rül 2k1Å‘rz kÅ‘1s2k kÅ‘2sö kÅ‘1s2t kÅ‘1tr kÅ‘u2t kpára2 kpá2rad kpá2r1at kpá2r1ál kpá2r1út kp2la kp2le kp2lé kp2lu kpó2t kp2ra kp2re kp2ré kp2ri kp2rí kp2ro kp2ró kp2sz k2rach kra2chi 1k2rajc k2rakk kra1p krá2sz1e kren2d1Å‘2 k2reo k2repá 1k2rémh 1k2réml 1k2rémr kré3p2 1k2rist 1k2riti kro1g2 1krokett. 1krokod kro2n1a2 kro2ná kron1d2 kro2n1e kro1p2 kro1str kro1sz2f kró2mar kró2m1e kró2mis k2rómo 1k2rónik k2rup k2s1a2rá ks1elo ksé2gele ksé2gis kségü2l ksé2g1ülé k1s2ká ks2ko k1s2la ks2lá ks2lu ks2mi k1s2ni ks2pa ks2pe ks2pi ks2po ks2pó ksp2r ks2rá k1s2ta k1s2tá ks2ti k1s2tí ks2to k1st2r k1s2tu k1s2tú k2sz1a2d kszae2 k2szagy k2sz1aj ksza2ke ksza2ké ksza2k1ü k2sz1am ksz1any ksz1aré k2sz1ág kszáraz1 k2sz1e2gy k2sz1ela k2szemu k2sz1eng k2szev ksz1élet kszé2t1 k2sz1id kszilá2 kszi2p ksz1ipa ksz1isk k2szír k2sz1old k2szoltó k2sz1osz kszö2g1ell kszö2g1elv kszt2 k2szuni k2szut k2sz1üg k2szüt k2sz1ü2z ksz1z kt1a2da kt1ajá kt1a2lap k2t1alb k2t1all kta2n1á2s kta2n1ó2 kta1p2 k2t1arm kt1a2ud kt1azo kt1ábr ktá2lal kt1álló k2t1álm ktára2d ktá2r1ada ktár1adó ktá2raj ktá2r1az k2t1átf k2t1átv k2t1e2g k2t1elg k2t1elh kt1eljá k2t1ell k2t1els k2t1elv k2t1enz kte2rad kte2ral kte2rár kte2rát kte2reg kte2r1e2l kte2r1in kte2rosz k2t1esem kté2lÅ‘ k2t1érl k2térmi k2t1érte k2t1éss k2t1é2vet kt1fr kt1gr k2t1id kti2g k2t1igé kt1ill kti2m kt1ima k2t1ind k2t1inf k2t1i2r k2t1isk k2t1ism k2t1i2vás k2t1íg k2t1í2r ktív1e2l kto1g2ra kto2n1á kt1ope kto2ras kto2r1e kto2rol k2t1ös k2t1öt kt1pr k1t2ran k1trau k1tréf k1t2rén k1t2rió ktro2ná k1t2rón kt1sp kt1st kt1sz kt1t2r ktu2m1e ktu2min ktu2sab ktu2s1an ktu2szár ktu2sze ktu2szé ktü2l k2t1ülé k2t1üt k2t1ü2v 1ku kuá2r 2k1ucc kue2l kue2r kue2s ku1f2r 2k1u2go 2k1ugr ku2gya 2k1ujj. 2kujja. 2kujjad kuj2j1a2da 2kujjai 2kujjak 2k1ujjal 2kujjam 2kujjas 2kujjat 2k1ujjá 2kujjb 2kujjg 2kujjh 2kujji 2kujjk 2kujjn 2k1ujjo 2kujjp 2kujjr 2kujjs 2kujjt 2kujju 2kujjú kuk2k1ó2 kula1k kul2csal kul2cse kulus1s ku2m1a2d ku2mal ku2mü ku2nad ku2nal ku2n1ás ku2ne ku2n1in kun3n ku2nor ku2n1ó2 ku2nü 2k1up. 2kupr 2kupu ku2rad 2k1u2rai ku2rak 2kural ku2ram ku2ráu 2k1urn ku2rol kuru2c3s ku2sal ku2sas ku2sál ku2s1e2 ku2sis ku2s1i2z ku2s1ó2 ku2sö kus3sze ku2s1ú ku2sü ku2szal ku2szar kusz1es ku2sz1é2l ku2sz1é2p ku2szét ku2sz1il ku2szis ku2szü 2kutaló 2k1u2tasí 2k1u2taz ku2tál 2k1u2tán 2k1utc 2k1u2tol 2k1u2tód ku2tóp ku2zs 1kú kú2jí 2k1újs kú2p1a2 kú2p1á kú2p1e kú2t1a2 kú2t1á2 kú2t1e kútfé2 kútfél1 kú2tos kú2t1Å‘ 2kú2tu kú2t1ü2 1kü kü2ge kü2gy 2k1ü2lep kü2lo kül2t1á 2küreg 2k1ü2rí kü2rü 2k1üst kü2szá kü2szí 2k1ütk küvés3s 2k1ü2vö 2k1ü2ze 1kű 2k1űrh 2k1űrm kű2ro kű2r1Å‘s kű2zé kű2zÅ‘ 1k2van k2varc 1k2vark kvá2nyol kvárosi2 kvé2sel kvé2so kvés3s 1k2vin 1k2vitt k2vóc k2vóta k2vótá kvÅ‘csa2 1ky kyd2n ky2fajtán ky2fajtár ky2fajtát ky1i2s ky1i2v kyk2k kyk2ne ky2se ky2s1ü2 ky1üz kza2tal kza2t1e2 kza2t1ó2 2l. 1la laa2d laát1 laáta2 2labár 3labd 2labr 2l1abs la2c1ag la2c1aj la2c1al la2cem la2c1ete la2c3ha la2ché lac1ita lac2k1orr lac1orr la2c1ö2 la2cÅ‘ la2csalj lac3ság la2c3sor lac3sül lac3süt lac3sz la2cú la2cü 2l1a2data 2l1a2datb 2l1a2datk 2l1a2datn 2l1a2dato 2l1a2datr 2l1a2datt 2l1a2datu ladás1s 2l1adl 2l1a2dog 2ladomá 3l2a3don ladói2ko 2l1a2duk la1dy2 lae2d lae2l lae2r laé2d laé2r 2laff la1fl la2gac lag1a2dó la2g1a2n lag1ass la2g1a2t la2gau la2gav la2gág lag1áll lag1ár. lag1árá lag1árb lag1áre lag1á2ré lag1árf lag1árh lag1árk lag1árm lag1árn lag1á2ro lag1árr lag1árs lag1árt la2g1ere la2gés la2gid la2g1i2k la2gim la2gis la2gí la2g1ont la2g1osz la2góc la2g1ó2rá la2góvá la2gön la2g1ud la2g1ur la2g1ut la2g1ü la2gű la2gyú la2ire la2jag la2j1a2l la2jap la2j1ar laj1ass laj1a2sz la2j1az la2j1ál 2lajánl la2j1ár. la2j1árh la2j1árn la2j1árs la2j1árt la2jás la2j1áth la2j1áts la2j1elt la2jen la2j1es laji2 la2jin la2j1is la2jí la2j1ol la2j1oml la2j1or 2lajoz la2j1ór la2j1ös la2jÅ‘ l2ajs laj1sz la2j1u2t la2j1ü2 la2kad lak1alj la2k1alm la2k1a2lo laka2t1a2n la2k1ác la2k1álk lak1áll la2k1árn lak1ásás 2lakci la2k1eg la2k1e2l la2k1ér. la2kérte la2k1iko la2k1ír lak2k1as lak2kec lak2kel lak2kol 2l1akkor lak2köl la1klu 2laknáb 2laknáh 2laknás la2kob la2k1op la2k1orm la2k1osz la2k1ov 2lakóz la2k1ös la1k2rém lak2rip la1k2ru lakt2 2l1akt. 2l1aktiv la2kuj 2lakulá 2lakulg 2lakulh 2lakulj 2lakuln 2lakulo 2lakuló 2lakuls 2lakulu 2lakulv 2lakús la2k1ú2to 2lakúv 2lakza la2la2g1 la2laku 2laláí lalás1s 2l1alb 2l1alc 2l1alf 2laljn 2l1alka 2l1alkot 2l1alku 2l1alle lal2tár lal2te 2l1alti. 2laltonk lal2t1ó2 lal2t1ü2 2laltvé 2l1alvi lam1ada la2m1aka la2m1any la2m1a2p la2m1ará la2m1a2z lame2g la2m1egy la2m1ele lam1emb la2m1esz la2mék lam1éle la2méne la2m1érv la2minv la2m1ism la2m1ist la2m1í2 2l1ammó la2m1ol lam1org la2m1osz la2m1ó2r la2mö la2m1ur la2m1ü la2n1a2d lana2g la2n1aj la2nal lan1ala la2nam la2nar la2n1áll la2n1ár. la2n1á2rai la2n1á2rak la2n1á2rá la2n1árr la2n1árv lanás1s lan2csel lan2csem lan2csi lan2csol lan2d1ál lan2d1el lan2d1ér. lan2diz la2ner la2nes la2n1ép lan2g1as lan2gen lang3g 2l1angin la2nil la2nip la2nis 2l1ankét lan1kr la2n1osz lans2 lan2tag 2lantác la2nü la2nyag lany1a2la la2nyau 2lanyái la2ny1ál 2lanyán la2nyás 2lanyáv la2ny1e2lem lany1e2rÅ‘ la2ny1í2r la2ny1ol la2nyó la2nyü laó2r 2l1a2pa. lap1akk lap1akt lap1a2la lap1alk la2p1atm la2p1att la2p1a2u la2paz la2páb la2p1á2g la2pák la2pán lap1á2rá lap1árh la2p1árr la2pe2l lap1ele la2pem la2p1en la2p1es la2pik lap1illa lap1ille la2p1im la2p1i2na. la2p1ing la2p1int la2p1i2p la2p1i2s la2p1ita la2p1iz lap2lat la2p1o2ku lap1org lap1orn lap1orz lapos1s la2pó. la2pób la2pón la2pór la2pö la2pÅ‘ 2lappar 2laprí lap1s2 2lapún 2lapúv la2pű la2ra. la2ran 2l1arc larc3c 2l1a2rén lar2m1e lar2min 2l1art 2l1arz la1s2pa la1s2pi la1stra la2sz1abl lasz1alt. la2szas 2l1a2szat la2szás la2sz1én la2szód lasz3s lasz2t1alj l1asztam la2sz1út la2t1a2d la2t1aj lat1alak la2t1alk la2t1alt lat1ará la2t1a2ro lat1arti lat1aszt lat1att la2tau la2taz la2t1áj lat1áll la2t1árad la2t1á2rai la2t1á2rak la2t1á2ram la2t1á2rat la2t1á2ráb la2t1á2ráh la2t1á2rán la2t1á2ré la2t1árh la2t1árn la2t1á2rok la2t1árr lat1ártá la2t1ártó la2t1á2rú la2t1árv lat1ász la2t1áta latdi2al la2t1e2g la2telem la2t1ell la2tep la2t1erk late2s la2t1ess la2tesze la2t1eti la2t1eto la2t1e2vÅ‘ la2t1éke lat1é2ké lat1ékk la2t1é2ré la2t1é2rÅ‘ la2térte la2t1érv lat1éss la2t1i2ko lat1ikra lat1inte la2t1inv la2t1ist la2t1iz la2t1ír lat1ívé lat1k2 la2toj la2t1okm lat1olda la2toll lat1oml la2t1osz la2t1otth la2t1órá la2t1óri la2t1övi la2t1ö2vö la2t1Å‘r 2l1attako latta2n1ó2 2lattv la2tut la2tül lat1üze latű2z la2t1űzÅ‘ la2tyá la2ub la2uk la2us la2uto laü2z la2vat 2l1a2vu la1yé la1yig 2layrÅ‘ lazac1c laza2c3s lazma1 2lazmá 2l1a2zon 1lá lá2bar lábas1s lá2bál lá2b1e2l lá2b1ina lá2b1i2z láb1org láb1orr lá2bö lá2bÅ‘ 2lábrá lá2b1u2s lá2bü lácsé2 lá2csét lá2cs1il lá2csip lá2csí láda1s lá2d1az lá2d1e2r lá2det lá2dim lá2d1or lá2d1ott lá2d1ó lá2dül lá2dz lá2gab lá2gac lág1a2da lá2g1a2g lá2g1al lá2gan lá2gas lág1ass lága2t lá2g1atl lá2g1ato lá2g1att lá2gál lág1áll lá2gám lá2g1á2rak lá2g1á2ro lá2g1á2to lá2gép lá2gid lá2gigé lá2gim lá2g1ott lá2g1ó2 lá2gÅ‘ lág1s lá2gü lá2gű 2lágyá 2lágyo lá2has 2láhe láí2r lá2lad lá2l1aka lá2l1al lála2n lá2l1any lá2l1ar lá2lau lá2l1ág lá2l1á2l lálás1s lá2l1átj 2láldás 2láldoz lá2lel lá2l1est lá2l1e2v lá2lél lá2lim lá2l1is lá2lí 2l1állam 2l1állat 2l1állás 2l1állk 2l1állom 2l1állv 2lálmaim 2lálmok. 2lálmom 2l1álmos 2lálmuk 2lálmunkb 2lálokaih 2lálokain 2lálokair 2lálokán 2lálokát 2lálokáu lá2l1o2ko 2lálokonk 2láloku lál1órá lálö2 lá2l1öv 2l1álut lá2l1út lá2lü lá2lű lá2m1a2d lá2m1aj lám1ass lá2m1au lá2m1ál lá2m1á2z lám1b2 lám1ell lám1erÅ‘ lá2mes lá2mék lá2m1érv lá2m1int lám1ist lá2mí lá2m1or lá2mó lá2mö lá2mÅ‘ lá2m1út. lá2mü lá2mű lána2 lá2n1ag lá2nal lá2n1an lá2nar lá2n1as láná2 lá2nár lánc3c lán2cel lán2c1ég lán2csá lá2nem lá2n1er lá2nép lán2g1at lán2gál lán2g1e lá2nil lá2n1is lán2k1e2l lán2k1ó2ra lán2k1ö2v lá3nok lá2nol lán1sp lán1sz lánt2 lán1tr lá2nü lá2nyaj lá2nyala lá2nyam lánya2n lá2nyar lá2ny1as lá2nyav lá2nyel lá2ny1ér. lá2nyó lá2nyö lá2pét lápi2 2l1á2pol lá2rad 2l1á2rai lá2raka l1á2rakb l1á2rakk l1á2rakr lá2r1a2l 2l1á2ram 2l1á2rat lá2r1av l1á2raz 2láren lá2res l1árkot 2l1árnya lár1s2 lárt2 lár1tr 2láru. lá2rug 2l1á2ruh 2l1á2ruk 2l1á2rul 2lárus 2l1árut 2láruv lá2rúa lá2rü lá2s1a2d lá2s1aj lá2sal lá2s1a2r lá2saz lá2s1á2g lá2s1á2rai lá2s1árak lá2s1á2rá lá2s1árb lá2s1á2ré lá2s1árh lá2s1árr lá2s1árt 2l1á2sás lá2s1á2to lá2se lásegyez1 lá2sír lá2sott lás1otth lásó2 2l1á2só. lás1ór lá2sÅ‘ lás3szé lá2sut lá2sű lá2szas lás3zav 2lászed lá2szeg lá2sz1e2m lá2szen lá2szip lászkész1 lászö2 lá2szöl lá2szöv lász3s lá2szü lá2taj lá2t1e2re láté2tel 2l1áthe lá2t1i2o 2l1á2tí 2látkö lá2t1osz lá1t2rá lá1tri 2l1átte 2l1áttö 2l1áttű l1átús 2l1átvé l1átvi 2lávi lá2viz 2l1á2vó 2lávu lá2z1adot lá2z1al lá2z1árh láz2árus 3lázb lá2z1el lá2z1i2s lázo2 lá2z1olt lá2zsal lá2zsan lá2zsó láz3sz l2b1is lb2li lb2lo lb2lú lb2ra lb2re lb2ri lb2ro lb2ró lb2ru lc1ajt lc1alk l2c1a2to l2c1ág l2c1ál lc3csi lc3cso lc3csö l2c1e2le lc1elr l2c1emb lc1emel lc1esz l2c1e2v lce2z l2c1eze l2c1ép lc1évi lc1fr lc3ha lc3há lc3ho lc3hu lc3hú lc3hü l2c1i2d lci2tér lc1ív lc1k2r l1c2lu l2c1ost l2c1osz lc1ó2r lc1ök lc1ös lc1pr l2cs1a2d l2csakt lcs1alap l2cs1a2n lcsa2p1á2g lcs1apr lcsa2r lcs1ara lcs1aro l2cs1a2s l2csaz lcs1ág l2cs1ál l2cs1á2t1a2 l2csátá l2cs1áth l2cs1átj l2csátv l2csec l2cs1e2g l2c3sej lcs1elf l2cs1elt l2cs1elv lcs1emb lcs1eml l2cserd lcse2r1e2l lcs1erÅ‘ lcs1ese lcse2t l2cs1ete l2csél l2cs1é2rés l2csérle l2csérte l2cs1érté lcs1érve l2cs1érvé lcsé2sz1ék l2cs1é2te l2cs1étk l2cs1é2vét l2csidé l2csimi l2cs1inf l2cs1ing l2cs1int l2cs1i2pa l2cs1irt l2cs1isk l2cs1ism l2csita l2cs1í2z lcs1k2 l2cs1okm lcs1ors l2cs1ö2l lcs1Å‘2sé lcs1s lc3str lc3sug lcs1ült lc3szo lc3szó lc1tr l2c1uj lc1ül lc1üt lc3zá lc3zo ld1abl ld1abr l2d1a2cé ld1a2dato l2d1a2dá ldai2 lda2lag lda2laj lda2l1e2g lda2lel lda2les lda2l1é2l lda2liz lda2l1í2 lda2los ldalt2 l2d1ant lda2nya lda2nyá ld1apó l2d1aran l2d1arc l2d1ark lda2t1as lda2t1eg lda2t1in lda2tós ld1ág ld1áll ld1álm ld1árn ldás3s ld1br ld1egy l2d1e2kék l2d1e2kén l2d1e2kétÅ‘ l2d1e2l1a l2d1e2leme l2d1e2lemi l2d1e2lemn l2d1elér l2d1elk l2d1ellá l2d1elo l2d1e2lÅ‘h l2d1e2lÅ‘vez l2d1els l2d1eltá l2d1elvé l2d1ember l2d1e2mel l2d1eml lde1p2 ld1epe l2d1erÅ‘ ld1e2vé l2d1ex l2d1é2g l2d1é2jér ld1élm l2d1ép l2d1érc l2d1éré l2d1é2ri l2d1érte l2d1érz l2d1ész ldé2ves ld1fl ld1fr ld1gl ld1gr l2d1i2ga l2d1igé l2d1ill l2d1imi ld1ind l2d1int ldi2p ld1ipa l2d1isza l2d1ín l2d1ír l2d1íz ld1kr ld1kv ldo2g1as l2d1ola l2d1old l2d1olt ld1oml ld1orc ld1org l2d1osz l2d1ó2rá l2d1óri l2d1öb ld1ökle l2d1örök l2d1öv ld1Å‘ri ld1Å‘sk ld1pl ld1pr ld2rót ld1sp ld1st ld1udv ldu2r ld1ura ldus3s l2d1u2t l2d1új l2d1úr l2d1ú2t l2d1üg l2d1üle l2d1ür l2d1üz l2d1űr ld3zá ld3zó ld3zu 1le lea2bá lea2d lea2g lea2k lea2l lea2n lea2p le2aré lea2sz lea2v leá2j leá2s leá2z 2l1e2béd le2bin le1bla le1bra leb2rek leb2s 2lecc 2l1e2cet 2l1ecset l1eddi le1d2res le1dro lee2s leé2r lefona2 lefo2nal le1fr 2l1eft le2gala lega2r le2g1a2s le2g1áll le3g2áto le3g2áz le3geb le2g1e2g le3g2elem. leg1ell le3g2elÅ‘a le3g2elÅ‘j le3g2elÅ‘ké le3g2elÅ‘t le3gelÅ‘ze le3g2elve. le3g2elvé le2gene le3g2erj le3ges. le3geseb le3gesek le3gesen. le3g2esn le3gesr le3gest le2g1e2sz legé2d legé2l le3gén legg2 le2gid le2g1is legmeg1 le3g2on le2góv le3göng le2g1ös legpec1 legvíz1 legyá2 2legyb 2legyed l1e2gyel legy1e2lÅ‘r l1e2gyenge l1e2gyék le3gyi 2legyí 2l1egyl le3gyo 2legys l1egysé le3gyú le3gyű 2l1egyv le2ic lei2g lei2rá lei2s lej2ta lej2t1á le2kad le2k1a2pu. le2k1ál leká2p le2k1ár. le2k1ára le2k1árb 2l1e2ke. le2k1eg lek1els lek1emb lek1eme lek1erj lek1e2rÅ‘ le2k1értÅ‘ 2l1e2kés. le2k1id le2kij le2k1ik lek1ist le2kiz lekkész1 le1kli lek1olt le2k1orz le2k1ó2r le2k1Å‘2 2lektro 2lektró le2k1út le2küd 2l1elad l1eldo 2l1e2lekt lele2ma 2lelemei 2le2lemek 2l1e2lemes 2lelemz lele2t1a2 lele2tel lele2to 2lelég 2l1e2l1ér lelés1s lelé2s3z 2l1elgo 2l1elha 2l1elhá 2l1elhel l1elho le2lim lel1ing le2l1iz 2l1eljá 2l1ellá 2l1ellé 2l1ellÅ‘ 2l1elméj 2l1elmés 2l1elnev 2l1elnö 2l1elny 2l1e2los 2l1e2l1ö2l 2l1e2lőél 2l1e2lÅ‘fo le2lőí le2lÅ‘szű l1elso l1elsö 2l1elszá 2l1elta l1eltáv 2l1eltér 2l1elto l1eltö 2l1elv. 2l1elvá 2l1elvét 2l1elvh l1elvn 2l1elvs 2l1elz 2l1e2mel. 2l1e2meld l1emeled l1emelek 2le2melé l1emelés 2l1e2melg le2melh l1emelhe 2l1e2meli 2l1e2melj l1emellek 2l1e2meln le2melÅ‘ 2l1emelÅ‘. 2lemelÅ‘s le2mels le2melt l1emelt. l1emelté l1emeltü 2l1e2melü le2melv l1emelve lem1erk le2m1essz lem1eszt le2m1e2ti le2m1ék le2m1éle lem1élte le2m1élv lem1érde 2l1e2méss le2m1ill le2mind le2m1ing le2m1isz le2mita le2m1itt l1emlege le2mok le2mol le2m1org le2m1osz le2m1ó2r le2möl le2m1Å‘2 lem1p2 lem1u2r le2n1a2d le2nal le2n1ál le2nát lenc1c len2cel len2ceme l2end le2n1e2g le2n1e2l le2ner lene2tel lené2k le2n1éke le2n1ékk le2n1ékt lené2l len1élé lenés3s le2n1ész le2n1év. len2g1e2l le2nid le2n1ip le2n1o2k le2nol le2n1or le2n1óv len1Å‘ré len1s2p len1sta len1tra len1t2ren lenu2 le2n1ur le2n1ut le2n1üg le2n1üt 2l1enyv le2oa le2oc leo2k le2oli leo2m le2oz leó2c leö2r le2p1aj lepa2p lep1apa lep1álla le2p1eg lepe2le le2pék le2pél lep1illé lep1iz le1p2lo le2p1osz le2p1ó2d lep2p1elv le1p2rése le1p2ri le1pro le1pró le2p1ü2lÅ‘h 2lerdÅ‘ 2leredm le2rejé le2r1ék ler1éte le2r1il le2rir 2leróz 2l1e2rÅ‘ le2s1al le2s1ál l1esdé 2lesege le2s1e2kéh 2l1esemé l1e2setb 2l1e2setei lese2tel 2l1e2seté l1e2seth l1e2seti l1e2setk l1e2setn lese2t1o2 l1e2setr le2sésű le1s2ka le1ski 2leskü le1sla le1s2li le1sma le1s2mi le1s2p2 les3sza les3szá le1s2tar le1stemp 2l1esten l1estébe 2lesti. le1s2til 2l1estj les2t1or 2lestr l1estt 2l1eszkö le1szto le2t1ab le2tac let1a2la let1all le2t1am le2t1ana let1apr le2t1e2kéb le2t1e2la le2t1elb let1eleg le2t1elf let1elk let1elÅ‘a le2t1e2lÅ‘ké let1elr let1emel le2t1eml le2tene le2t1ent let1ered le2t1eré le2t1erk le2testé let1eszk 2l1e2teté 2l1e2teth 2l1e2tetj 2l1e2tetv le2t1éd le2t1é2j leté2l let1éle let1élé let1élv le2t1é2nekn le2t1érb le2t1é2ren le2t1é2ri le2t1érr le2t1értÅ‘ let1é2rül leté2sz let1éter let1é2té. leté2vede let1éves le2t1é2véb le2t1évéi le2t1é2vér le2t1é2vét le2t1é2vév le2tid let1ikr let1ill le2tim le2t1ing le2t1ist leti2sz let1iszo le2tivá le2tod le2t1o2k le2t1on let1ora le2t1ox le2t1ö2v letÅ‘e3d le2t1Å‘ri le1traf let2teg letü2l le2t1ülé let1üzé 2lety leu2g le2uk leu2r leu2t 2levene 2levení 2levenü l1e2vez. l1e2vezg l1e2vezh l1e2vezn l1e2vezt le2vezü l1e2vezv levél1l le2volú le2xá le2x1el le2x1e2p lexkész1 le2xö le1yé le2zer lezÅ‘a2 lezÅ‘e2r lezőé2n lezÅ‘1s2p 2l1ezr 1lé 2l1ébr lé2ca lé2cá léc3c lé2c1ék lé2c3h léci2 lé2cim lé2co lécs2 léc3sk lé2d1as lé2d1el lé2d1emb lé2dél lé2d1és lé2dil lé2dos lé2dot lé2dö lé2d1Å‘2 lée2r léé2r lé1f2r lé2gal lé2g1e2c 2léged lé2g1eg lég1ell lég1els lé2g1em lé2g1e2r lég1ér. 2l1é2gés l1éghet lé2gil 2légí lé2g1ö 2lé2gÅ‘ lé1g2ráf l1égtem l1égtet l1égtél lé2gú 2légü l1é2gün lé2gyel lé2gyes lé2gyi lé2gyo lé2gyö 3légz léh1asz 2l1é2he. lé2hed lé2h1em 2léhes 2léhet 2léhezÅ‘ lé2h1é lé2hol 2léhs 2l1éj. 2l1é2ji 2l1éjj 2l1éjs lé2kab lé2k1a2d lé2k1a2g lé2k1aj lék1aka lé2k1a2la léka2p lé2k1apo lé2k1as léka2t lék1ato lé2k1au lé2k1av lé2kaz lé2k1e2g lé2k1ep lé2kesí lé2k1e2sz lék1evé lé2k1é2k léké2l lé2k1éle lé2k1élv lé2k1é2te lé2kid lé2kít lékköz1 lék1ors lé2k1osz lé2k1öl lé2kör lé2köz lé2kÅ‘ lé2k1u2r lékü2l lé2k1ülé lé2k1ült lékve2g l1élc 2l1é2le. 2l1é2lei 2l1é2les lé2léb lé2lén lé2lér 2l1élm lélÅ‘kész1 2l1élr lé2lük 2l1é2lű 2l1é2mel lé2nag léná2 lé2neg lé2nekb 2lénekl lé2nel 2lénkí lé2no lé2nyö lé2p1a2l lé2p1el lépés3s 2l1é2pí lé2pó 2lépül lé2pülé lé3rad lé2ral lé2rap lé2ras lé2rat lé2rav lé2r1á 2l1érc. 2l1ércb 2l1érde lé2reg lé2r1e2l lé2r1esÅ‘ 2l1é2rez. 2l1é2rezv lé2r1é2j 2l1é2rés 2l1é2rik lé2ris lé2rit lé2rí 2l1érni 2l1érnü lé2r1osz lé2rö 2l1é2rÅ‘. lé2rÅ‘d 2l1é2rÅ‘e lé2rÅ‘i 2l1é2rÅ‘j 2l1é2rÅ‘k lé2rÅ‘n 2l1é2rÅ‘t 2l1é2rÅ‘v 2lértelm 2l1érték 2l1értj 2l1értÅ‘. lé2rut 2lérvel 2l1érvén 2l1érze 2l1érzé lésa2 lé2s1aj lé2sak lé2sal lé2sar lé2s1az lé2s1á2 lé2seg lé2s1e2l lé2s1eti lé2s1é2g lé2sés lé2s1ikr lé2só lé2s1Å‘ lés3sza lés3szer lésü2l lé2s1ülé lé2s1ülÅ‘ lé2s1üt lész1ék 2lészést 2l1észl lés3zse lé2tag lé2taz lé2t1eg lé2tel. lé2telek léte2leko lé2t1e2lem lé2t1e2let lé2telh lét1elha lé2telm lét1elo lé2t1elÅ‘ lé2t1elv lé2tene lé2tesé 2l1é2teth lé2teti 2l1é2tetn lé2tev lé2t1ér. lé2t1é2te lé2t1is lé2tít 2létl lé2t1o2k 3l2étó lé2tóh lé3tól lé2t1ó2r létö2 lét1örö lét2rág lé2tun 2l1évad 2l1évb 2l1é2ve. 2l1é2ved 2lévei 2l1é2vek 2lévelf 2léveli 2l1é2vem 2l1é2ven 2l1é2vet. 2l1é2véb 2l1é2véh 2l1é2véi 2lévén lév1ért 2l1é2vét 2l1é2vév 2l1évf 2l1évh 2l1é2vi. 2l1é2vig lé2vir lé2vis 2l1évk 2l1évl 2l1évm 2l1évn 2l1é2vó 2l1évr 2l1évs 2l1évt 2lévü lé2vük lé2vün 2l1é2vű 2l1évv 2l1évz lfa2l1e2 lfat2 lfa1tr lf1aut lfá2t1i2 lf1cl lf1ese lfe2t lf1eti lfé2l1é2v l2f1élm lfé2m1e2ké lfi2d1é lfin3n lfi2nos lf1iro lf1isk lf1kl lf2lo lf2ló lf2lö lf2lu l2f1okta l2f1ó2r lf1pr lf2ri l1f2rí l1f2ro lf2rö lf2rÅ‘ lf1sp lf1sz2 lf1ülÅ‘ lga1p2 lgatói2ko lgau2 lgár1as lgés3s lgi2as lg2la lg2lo lg2ló lgör2cso l1g2ra lg2ru l2gy1ad l2gy1ag l2gyaj lgy1ala lgy1alj lgy1any l2gy1a2s l2gy1au l2gyáb l2gyál l2gyát l2gy1e2g l2gyelá l2gy1e2le l2gy1elz l2gyembe lgy1eré l2gy1esem l2gy1e2v l2gyél lgy1ér. l2gy1és l2gyid l2gyikr l2gyip l2gyis l2gy1ok l2gy1ol l2gyop l2gy1os l2gy1ó2r l2gy1ö2r l2gy1ös l2gy1u2t l2gy1út lhai2 lhón1al 1li li2aa li2aá li2abi li2abo lia2cé li2aci li2acs li2acu li2a1d2 li2ae li2aé li2ag li2ah li2aid li2aí li2aj li2akép li2ala li2ale li2ali li2amé li2amo li2ao li2aó li2aÅ‘ li2ap li2ara li2aré li2asá li2aso lia1sz li2asza li2aszá li2aszé li2aszf li2aszi li2a1t2 li2aü li2av li2az li3be lic3sz li2cü li1cy li2deá 2li2deg li2dén li2dét 2l1i2di 2l1i2dÅ‘ li2ec lie2d 2lien lié2d lié2k lié2n lié2vé lifé2l1é2v li2fí li1fl li1f2r l2ig. 2l1i2gaz ligán1n li2ge. 2l1i2geb 2l1i2gek 2l1i2gep li2géb li2géh li2géi li2géj li2gék li2gén li2gér li2gés li2gét li2gév li2géz li2hat 2l1ihl 2l1i2ker. 2l1i2kerb lik2k1ell lik2k1elv lik2k1e2r lik2kérd li1k2l 2l1i2konc 2l1i2konf 2l1i2kong 2l1i2konig 2l1i2konk 2likonl 2l1i2konm 2l1i2konp 2l1i2kons li1kré 2lill 2lima. 2limád li2mák li2máv l1imbo li2me. lime2rá lim1p2 2limpo limű1 li2nakr li2nal lin1any lin1ará 2linas. lin2cs1ö2 2l1indu li2ne. li2neg linék2 2l1inger lin1grá ling2rádi 2l1ingré lin1ing lin1inj lin1kl lin3ny li2nor 2l1integ 2l1intéz li2n1u2s li2n1ut 2l1i2nú li2oc lio2l li2oni li2óke lió2raj li2őá li2Å‘d li2Å‘e li2Å‘f li2Å‘g li2Å‘m li2Å‘p li2pa. li2pai li2p1aj li2p1á2r li2pát li2peg lip1ele li2p1elv li2p1esz li2pö li2p1Å‘2 li1pro 2l1i2ram 2l1i2rat 2l1i2rán 2l1irh li2rig li2rod 2l1irr 2l1irt li2sál li2s1el lise2s li2sid lis1isk 2l1iskol l1ism lis1p2 2l1ispá 2l1isten 2l1istr li2s1ü2t lis1ü2v 2l1iszl lisz2t1á2z li1sztir li2t1a2g 2lital. li2tala lit1alk li2t1áta li2t1e2g li2t1e2l li2t1emb li2tez li2t1érd li2térte li2til lit1int li2t1i2o li2t1is li2t1okt li2t1old li2tön litÅ‘2 li2t1Å‘r l1ittak l1ittas l1ittat li2t1ura liú2t 2l1i2vad li2vot l1i2vó 3lix. l1izél 2l1i2zén 2lizésí l1izgu 2l1izmo 2lizzí 2l1izzó 1lí lí2gé lí2ja lí2ju líma1 2l1íns l1í2ny línyenc1 l1írd l1írh l1írj l1írl lí2rod lí2rog lí2rok lí2rom 2l1í2ró l1írs l1í2ru lítés3s lítÅ‘a2 lítÅ‘2p3r 2lív. líva1 lívak2 2l1íz. 2l1í2ze l2ízi lízis3s 2l1ízl 2l1í2zü l1í2zű l2j1a2da l2j1e2lÅ‘ l2j1er ljes1s ljharc1 l2j1ip l2j1ir l2j1iz lj1ír l2j1or l2j1os lj1pr lj1sp lj1üz lka1sl lka2tel lka2t1é lka2tin lka2t1ó2 lké2p1ell lké2szel lkia2k lkiá2 l1k2lin lk2lí l1k2lu lkö2z1ön. l1k2rá lk2reá l1k2rémb l1k2ri l1k2rí lk2rom l1k2ró lk1sh lkukész1 lk2va lk2vá lk2vó lla2dój lla1f2 lla2g1ad lla2gal lla2g1a2s lla2gál lla2gen lla2gép llag3g lla2gol ll1a2ja ll1akad l2l1akc ll1a2kóz l2l1akti l2l1a2lap l2l1alm lla2mad lla2mal lla2mem lla2mél lla2mor l2l1anal lla2nyer llany1ó2 lla2pal ll1aszf llata2l lla2tala llat1any lla2t1ál lla2t1ár. llatár1a2d lla2t1árb lla2tés lla2tint lla2t1olt l2latomo lla2tors lla2t1ű l2l1aty l2l1aut l2l1á2ga l2l1ágb l2l1ágg ll1ágy. ll1ágyá l2l1állo l2l1álm llá2mag llá2m1al llá2m1e2 llá2m1érté llá2mik llá2mis llá2m1ut llán2k1e2 l2l1á2p llára2 llár1ad llá2rak llá2r1á2 llá2s1ikr llá2sü llá2sza l2l1átd l2l1átf l2l1á2ti l2l1átk ll1bl ll1br ll1cl ll1d2r l2l1e2dz ll1eff lle2ger lleg1g lle2gyé lleí2 l2l1e2kés lle2l1a lle2lin l2l1ell l2l1e2lÅ‘a l2l1e2lÅ‘d lle2m1a2 lle2m1á lle2m1e2g lle2mel ll1emelé llem1ell lle2mer lle2m1él lle2m1ér. lle2m1érté lle2m1o2 lle2mu lle2na llen3n lle2n1Å‘2r llens2 lle2r1in l2l1e2ró l2l1e2sÅ‘t ll1estr lle2tos l2l1e2vet l2l1ex ll1é2het l2l1é2jek llé2k1aps lléká2 llé2kár llé2k1ol llé2kos llé2kó llé2k1út l2l1é2let. l2l1é2letb l2l1é2lete l2l1é2letén l2l1é2letét l2l1é2letk l2l1élt l2l1élv llé3nya llé1sp l2l1é2ter llét2o l2l1év. ll1é2vén ll1f2l ll1fr ll1g2r ll1ide l2l1i2ga l2l1igé ll1ill l2l1i2ma l2l1imp ll1ina. l2l1ind ll1inga l2l1ingf l2l1ingm l2l1ings lli2nin l2l1inv lli2p l2l1ipa2 l2l1irá l2l1iro l2l1isi ll1isko l2lism l2l1iste lli2ta l2l1ita. l2lital l2l1izé ll1izma ll1izmá l2l1i2zo l2l1íg l2l1íj l2l1í2v ll1k2l ll1k2r ll1kv l2l1oá ll1obj l2l1off l2l1o2l ll1ope llos3s llóá2 lló1gr llóó2 lló2rák lló2rát lló2riá l2lóvo l2l1öb l2l1öl ll1önt l2l1ör ll1öss ll1ösz l2l1ö2z l2l1Å‘r. ll1Å‘2rei l2l1Å‘2rö l2l1Å‘rt l2l1Å‘2si l2l1Å‘2ze. ll1p2l ll1p2r ll2si. ll1sk ll1sp lls3s llsz2 ll1szt ll1tré ll1tri ll1tró ll1u2bo l2l1ug ll1ult llus1s l2l1u2t l2l1ús l2l1ú2to l2l1üd l2l1üg l2l1ür l2l1üt l2l1üz l2l1űz llvé2d1e2l l2ly lly1ö l3lyw lma2kad lma1kré l2m1ant lma1trá l2m1att lmá2l lm1álm lm1álo lmá2nyan lmá2ny1út lmá2ris lm1átk lm1átt lm1bl lm1b2r lm1cl lm1dr lme3gon lme1kl lm1elb lm1e2li lm1elm lm1e2lÅ‘ l2m1ember l2m1enc l2m1eng lmeó2 lm1epi lm1e2seté l2m1essz l2m1etn l2m1etű l2m1ex l2m1ég l2m1élv l2m1é2neke l2m1ép l2m1értÅ‘ l2m1éva lm1gl lm1gr lm1i2dÅ‘ lmi2g l2m1iga lmigaz1 l2m1ind l2m1inf l2m1ing l2m1ins l2m1inte l2m1inté l2m1inv l2m1i2p lm1irá lm1iro lm1isk lm1izz lm1íg lm1ír lm1íz lm1kl lm1kr lm1old lm1olv l2m1o2p lm1ors l2m1ov l2m1ódá lm1ó2rá lm1ö2k lm1ön lm1ö2r lm1ös lm1öt lm1ö2z lm1Å‘2r lm1p2l lm1p2r lm1sk lm1sl lm1sn lm1sp lm1st lm1sz2 lm1t2r l2m1u2g l2m1u2r lm1utó l2m1új l2m1üg l2m1ünn l2m1üz lnak2 lna2p1e lná2ris lné2v1á lni2ker lni2s1 1lo lo2áz lo2b1a2r 2lobá lo2b1á2c 2lo2be lo2b1iv lo2b1ó2 2lobu lo1by lo2ch lo1cy lo2éc lo2éo lo2ép lo2éz lo2g1a2d lo2gal lo2g1ál logás1s lo2ger lo2gí lo2g1or lo2gö lo1g2ráf. lo1g2ráff lo2g1ú lo2gü lo2ir lo2kab lo2k1a2d lo2k1aj 2l1o2kak lo2kárb lo2k1á2ro lo2k1árr lo2k1áru lo2k1átj lo2káu lo2k1e2 lo2kék lo2kid lok1is lo2k1i2z lo2kí lokka2l lok2k1ala lok2k1el lok2k1ó2 lok2kös lok2k1ut lo2k1ol lo2konk lo2kor 2l1o2koz lo2kü lo2laj 2l1old 2l1o2li 2l1olló 2l1oltár lom1adá lom1aga lo2m1ajt lom1aka lom1a2lap lom1a2lo lom1ará lom1ass lo2m1att lo2maz lom1ács lo2mág lo2m1ál lo2m1áré lom1ári lom1árk lo2m1árn lo2m1á2ro lomba2l lom2bala lom2bav lo2med lo2meg lo2men lom1erk lom1erÅ‘ lo2m1es lo2méd lo2mék lo2mél lom1é2ne lo2m1éri lo2m1i2d lo2m1i2k lo2m1im lo2mink lom1int lomi2s lom1isz lo2miz lo2m1í2 l1omló lo2m1okoz lo2mol lo2m1o2r lo2m1osz lom1ott lo2m1ó2 lo2mö lo2mÅ‘ lom1p2l lo2mü lo2mű lo2nab lo2n1a2d lo2n1a2g lo2naj lo2nak lo2n1a2l lo2n1ar lo2n1as lo2n1au lo2n1av lo2n1á2z lon2ch lon2cil lon2d1ó2 lo2n1el lo2n1ir lon1k2 lo2nol lo2n1ó2 lo2nö lon1s2 lon1tr lo2nü lo2oi lo2oj lo2or lo2ós lo2pap lop1ál lo2pe lop1e2l lo2piz lo2pí lo2p1o2r lo2p1osz lo2pö lop2p1in lop1t2 2l1opti lo2pü lor2din lore2t 2l1org 2l1orm 2lorn 2l1ors 2l1orv 2l1orz l2os. l2osa l2osb lo2se lo2sü lo1t2ha 2lottl lo2xá lo2xi 1ló lóa2d lóá2r ló1bl ló1br lóc3c ló2cem ló2ch lócsa2p1á2g 2l1ó2dát ló1dro lófé2l1é2v lófi2úké ló1fl lóg1g ló2gí ló1grá lóí2v ló1k2l ló1kré ló2nar 2l1ó2ni. 2lónn 2lóno 2lónr 2lónt lópárba2 ló1p2l ló1p2r ló2rac lór1ada lór1a2la lóra2n lór1any ló2rar 2lórái 2lóráj ló2rámr 2lórár 2lórát ló2rem ló2r1e2s ló2r1ér ló2r1ing lór1ism ló2rí lór1old ló2rü ló2s1aj lósa2n lós1any ló1ski ló1sla ló1spe ló1s2po ló1s2rá lós3s ló1s2ta ló1str ló1sz ló1t2rá ló1tre ló1tré ló1tri ló1tro 2lóvod l1ó2vó ló2zad ló2z1a2k ló2zal lóza2n lóz1any lóza2t1e ló2zál ló2z1es ló2zim ló2z1ir lóz1isk lóz1ism ló2zolv ló2zÅ‘ ló2zü ló2z1ű 1lö lö2bö lö2ca löc3c lö2ch lö2cÅ‘ 2l1ö2dé lö2ka lö2ká lö2ko l1öldö löl2t1a löl2tá 2l1öltés l1ölthe löl2to l1öltöt l1öltöz l1öltÅ‘ 2l1öml lö2möl lö2na lö2ná lön2b1ékét lö2ne lö2né lö2n1o lö2nó lö2nu lö2nú lö2pa lö2pá lö2pe lö2pöl löpü2 lö2p1ülé lö2p1ülÅ‘ lö2re lö2rö lös3szá 2lössze lö2sü lö2sze lö2szis 2l1ötl löt2ter 2l1ötv 2l1öv. 2l1övb l1ö2ve. l1ö2vez 2l1övh 2l1övn l1ö2vön 2l1övr l1ö2vük 2l1övv 1lÅ‘ lÅ‘a2c lÅ‘a2n lőá2g lÅ‘1bl lÅ‘1br lÅ‘2dad lÅ‘2dá 2lÅ‘de. 2lÅ‘dei lÅ‘2del lÅ‘2d1ék lÅ‘2din lÅ‘2d1iv 2lÅ‘dje. 2lÅ‘djét lÅ‘dköz1 2lÅ‘döke 2lÅ‘dökn 2lÅ‘dökr 2lÅ‘döt. lÅ‘2d3ze lÅ‘e2l lÅ‘e2s 2lÅ‘esé lÅ‘1fl lÅ‘1f2r lÅ‘gépi2 lÅ‘gé2p1ip l1Å‘gyel 2lÅ‘hű lÅ‘i2ta 2lőít 2lÅ‘kése lÅ‘1kl lÅ‘ko2r1út lÅ‘1kv 2lÅ‘lege 2lÅ‘nn 2lÅ‘ny. lÅ‘2nyal lÅ‘2nyár 2lÅ‘nyb 2lÅ‘nye. 2lÅ‘nyei lÅ‘2ny1elvi 2lÅ‘nyéb 2lÅ‘nyén 2lÅ‘nyér 2lÅ‘nyét 2lÅ‘nyév 2lÅ‘nyh 2lÅ‘nyk 2lÅ‘nyn 2lÅ‘nyö 2lÅ‘nyr 2lÅ‘nyt 2lÅ‘nyü lÅ‘Å‘r2s1é2g 2lÅ‘pán lÅ‘1pl lÅ‘1pr lÅ‘1ps 2lÅ‘reh 2lÅ‘rej lÅ‘re3m 2lÅ‘ret 2lÅ‘réb lÅ‘2r1is lÅ‘2rül 2l1Å‘rz lÅ‘2sál lÅ‘2sin lÅ‘1s2ka lÅ‘1ská lÅ‘1sla lÅ‘só2 lÅ‘2sór lÅ‘2s1ót lÅ‘2sÅ‘ lÅ‘1s2pi lÅ‘1spr lÅ‘1srá lÅ‘s3s lÅ‘s2tar lÅ‘1str lÅ‘1sy lÅ‘sz2 lÅ‘s3zára lÅ‘s3zárr lÅ‘1szf l1Å‘2szít lÅ‘1szt 2lÅ‘tét. 2lÅ‘tolá lÅ‘1tre lÅ‘u2t lőü2l 2lÅ‘vig 2l1Å‘2zét 2lÅ‘zié 2lÅ‘zl 2lÅ‘zm 2l1Å‘2zük lp1a2lag l2p1a2lap l2p1alát l2p1a2láv l2p1alk lpanto1 l2p1any l2p1áll l2p1átm l2p1áts lpcsa2p1 l2p1eg lp1e2lu l2p1e2m lpen1n l2p1e2rÅ‘ l2p1ég lpé2l l2p1éle lpé2r l2p1éri l2p1i2d l2p1i2na lp1izm lp1i2zo l2p1ív l1p2lá l2p1old lpo2n lpon1á lp1p2r l1prd l1p2ri l1p2ro l1p2rób lpu2s lp1uszo lrá2k1e lrei2 lre1p2ré lre1sz lreü2 lsa2v1a2m lság3g ls1eprik ls2ho ls2ka ls2ká ls2ki ls2la ls2lá ls2li ls2ma ls2mi lsors1s lsóé2r lsÅ‘1sz2 l1s2pa l1s2pe l1s2pé l1s2pi l1s2po l1s2pó l1sp2r l1s2rá l1s2ró l1s2ta ls2tá lste2i l1s2ti l1s2tí l1s2to l1st2r l1s2tu l1s2tú lsza2ké lsza2k1ü lszá2rú l2sz1e2gű l1sz2f l1sz2l l1sz2p lszt2 lsz2tá l1sztr l1sz2v lta2gyá lt1ajá lta2lapb lta2l1á2s lta2l1á2z lta2len lta2l1ev lta2l1é2 lta2liz ltal1l lta2lö l2t1amp l2t1apr lt1ará l2t1arc lta2riá lt1assz lt1aszt l2t1a2u lt1azo lt1ág. lt1á2ga lt1ágb lt1ágg lt1ágn lt1ágr l2t1áll ltána2 ltá2nan ltá2rada ltár2s1ág lt1árur ltá2s1á2g ltá2tal l2t1átr lt1bl lt1br l2t1ell l2t1elö l2t1emu lte2rad l2t1e2reik l2t1erÅ‘ l2teru lte2t1a2 lte2ték l2t1ékné lté2l lt1éle lt1élm lt1érc l2t1érz lté2sa lté2s1é2g ltés3s lté2t1é2 lté2ves. lt1fl lt1gl lt1gr lt1ide ltig2 l2t1i2gé lti1kl l2t1ill lt1imp l2t1ind l2t1ing l2t1i2o l2t1isi lt1ism l2t1ist l2t1i2ta l2t1ín l2t1í2r lt1ít l2t1í2v ltív1e2l l2t1í2z lt1kr l2t1oml l2t1ord l2t1org l2t1orj l2t1orr lt1ors ltos3s l2t1o2x ltó1p ltó1s2p ltó1sz ltót2 ltö2l l2t1ölé lt1önt lt1öss ltÅ‘a2 ltÅ‘e2l lt1Å‘2rö lt1pl lt1pr ltra1s lt2rág lt2rén lt2rik lt2ril lt2róf l1t2rón lt2róp ltsé2g1el lt1sl lt1sp lt1st lt1t2r l2t1udv l2t1una ltu2n1i lt1ura ltu2sze l2t1u2t ltú2ri l2t1üg lt1ü2lé lt1üst l2t1ü2v lt1ü2zem 1lu lua2g luá2r lu2b1a2d lu2bal luba2n lu2b1as lu2bár lu2b1e2g lube2r lu2bes lu2bél lu2b1in lubó2 lu2bór lu2bö lub1t2 lu2bü 2ludj 2ludv lu1dy lue2l lu2esé lugas1s lu2gat lu1g2l 2l1u2gor 2l1ugr lui2r 2l1ujj lu1kl lu2k1os lu2k1o2v lu2l1inf lu2lí 2lulr lu2mad lu2maz lu2mál lumen1n lu2mer lu2mes lu2m1i2k lu2m1ip 2lumí lum2pel 2lund l1unh 2l1u2ni l1unj l1unl l1unn l1u2no l1u2nó l1unv lu2rak lu2ram lu2rat 2l1u2ru 2l1u2rú lu2sad lu2s1aka lu2sakr lu2sal lu2s1a2n lu2s1ág lu2sál lu2s1e2r lu2s1érté lu2sim lu2sis lu2sír luskész1 lu2s1ó2 lusö2 lu2sör lu2sÅ‘ lus3sze lus3szi lust2 2lutánz 2l1utc lu1t2h lu2tód lu1tra lu2zs 1lú lú2d1a lú2dá lú2de lú2dét lú2d3z lú2ga lú2g1á lú2ge lúg3g lú2gi lú2gol 2l1újd 2l1ú2jí lú2ju lú2ri lú2ru lú2rü 2l1ú2sz lú2té. 2l1útv lú2z1a2n 1lü lü2cs lü2dí lü2dü lü2ge lü2gy lü2lá l1üldö lü2lel lü2l1e2m l1ülep lü2lé lül1ér 2l1ülés 2l1ülhet. 2l1ülhetne 2l1ülhets lü2lí 2l1üljek 2l1ülnek 2l1ülnék 2l1ülni 2l1ü2lö 2l1ü2lÅ‘ 2l1ülsz. 2l1ült. l1ülte. 2l1ültek 2l1ültem 2l1ültes 2l1ültet. 2l1ültete 2l1ülteti 2l1ültetj 2lültetl 2l1ültetn 2lültets 2l1ültett 2l1ültetü 2l1ültetv 2lülté l1ültéi l1ülték l1ültél l1ültén l1ültér l1ültét l1ültn 2l1ültü lü2lú lü2lü 2l1ülün 2l1ülve. 2l1ünn lü2re lü2rí lü2rü lü2sz lü2te lü2té lü2ti lü2tö lü2tÅ‘ lü2tü lü2ve lü2vö lü2ze lü2zé 1lű lű2rá 2l1űrb 2l1ű2ri l1űrl lű2ze lű2zé lű2zi lű2zö lű2zÅ‘ lű2zü lva2dat l2v1adm lvaj1ak lva2j1e lv1akad l2v1akc l2v1a2la l2v1alg l2v1alk l2v1ant l2v1a2ny l2v1a2rás l2v1a2z lvá2gy1ón l2v1állat l2v1állt l2v1áp lvá2rain l2v1á2rak. lvá2ras lvá2rár l2v1árny lvá2rol l2v1á2rul lvás3s lvá2szi lv1áta lv1áth lv1átk lv1br l2v1e2dz lv1egys lv1egyv lv1e2kéb l2ve2leme l2v1elk l2v1ell l2v1eró l2v1ex l2v1é2gés lvé2gül l2v1élm l2v1é2neke l2v1ép lvé2ri. l2vértel l2v1érté l2v1é2rü l2vérzési lvé2sza lv1fr l2v1i2de l2v1i2do l2v1iga lv1iko l2v1i2m lv1inté lv1i2pa l2v1iro l2v1irt l2v1isko l2v1ism l2v1izm l2v1izo l2v1í2ve lvíze2 lvíz1es lv1kl lv1kr l2v1ok l2v1old l2v1olv l2v1onto l2v1op l2v1or l2v1os lv1ödé lvö2l l2v1ölé l2v1ölt l2v1ön l2v1ös l2v1öv l2v1ö2z lv1Å‘2r lv1Å‘s lv1pr lv1ps lv1sp lv1st l2v1ut lv1új l2v1üg l2v1üt l2y 1lya lya2dat 2ly1adm 2ly1a2dó 2lyagy. 2lyagyr 2lyajt 2ly1a2kas ly1akc 2ly1akná 2lyakós 2lyakt ly1a2lat 2ly1alb 2ly1alk 2ly1alm ly1a2lom ly1alt lya2maj lya2mará lya2m1el lya2mem lya2m1érté 2lyang 2lyani lya2nyag ly1a2nyá ly1a2pán lya1p2r 2ly1arc lya2sal ly1aspe ly1assz 2ly1atl lya1t2r 2lyaty 2lyazo 1lyá 2lyábé 2lyábr 2ly1ág ly1áld ly1áll 2ly1áp 2ly1á2rad 2ly1á2rak 2ly1á2ram 2ly1á2rat ly1árk 2ly1árn 2ly1árr 2ly1á2ru lyás3s lyá2sz 2lyáta 2lyátf 2lyáth 2lyátlá 2lyátlé 2lyátm 2lyáts 2lyátt 2lyátv ly1bl ly1br ly1dr 1lye lye2ga ly1e2gye lyegyez1 ly1ej 2lyeke. 2lyekek 2ly1e2kéb 2ly1e2kérÅ‘ ly1e2le ly1elf ly1elh ly1ell ly1elm ly1e2lÅ‘ ly1elr ly1els ly1elt ly1elü ly1elv ly1elz lye2m1a 2lyember 2ly1e2mel lye2min 2lyemlí 2ly1eng ly1erk ly1e2rÅ‘ 2ly1esd 2ly1esemé 2ly1e2sete 2lyeseté 2ly1e2sett 2lyesél 2ly1e2sés 2ly1este 2lyeszk 2lyeszm 2lyeszű 2lyetet 2lyevÅ‘ 2ly1ex 1lyé 2ly1ég 2lyéhe. 2lyéhen 2lyéhet lyé2l ly1éle ly1élm 2lyéneke 2ly1ép 2ly1ér. 2ly1érc 2ly1érd 2ly1é2ré 2ly1érm 2lyérÅ‘. 2lyérÅ‘i 2lyérÅ‘k 2ly1érr 2ly1érte 2ly1érté 2ly1é2rü 2ly1érv 2ly1érz ly1ész 2ly1étt 2lyév. 2lyéve. 2lyévei 2lyévek 2lyéven 2lyévet 2lyévév 2lyévi 2lyévr 2lyévv ly1fl lyf1ölt ly1gl ly1gr 1lyi 2lyibo 2lyideá 2lyideg 2lyiden 2lyidi 2ly1ido 2lyidÅ‘ lyié2h lyi2g 2lyiga 2lyigá 2ly1ige 2ly1igé 2ly1iha 2ly1ill ly1ima 2lyimá 2lyimp 2lyind 2lyinf 2ly1ing 2ly1ini 2ly1int 2lyinv 2ly1i2p ly1i2rat 2lyirá 2lyiri 2ly1iro 2lyirr 2lyirt 2ly1isk 2lyism 2lyisp 2lyist 2ly1ita 2lyivad 2ly1i2z 1lyí 2ly1íg 2ly1ín 2ly1í2r ly1ív ly2kiz ly1kl ly1kró 1lyn. 1lyo 2ly1ob 2lyodú 2ly1of 2lyokal 2ly1okl 2lyokm 2lyokoz 2lyokta lyo2l ly1ola ly1old ly1oll ly1olt ly1olv lyo2m1as 2ly1op 2ly1o2r 2ly1osz 2ly1ott 1lyó ly1ódá lyó2s1á 1lyö 2ly1öb 2ly1ö2l 2ly1ö2r ly1öss 2ly1öv 1lyÅ‘ ly1Å‘2r ly1pl ly1pr lyrádi2 lyre1p 1lys. ly1sk ly1sp lys2t lyszá2m1é ly2tac ly1t2r 1lyu 2ly1ud 2lyugr 2ly1uh 2ly1uj lyu2kás 2lyuni 2ly1u2r 2ly1u2t 1lyú 2lyújs 2ly1úr. 2lyúth 2lyútr 2lyútt 2lyútv 1lyü 2ly1üd 2ly1üg 2lyünn 2ly1ür 2ly1ü2v 2ly1üz 1lyű 2ly1űr. 2ly1űrh 2ly1űrl 1lywo lyze2t1el lzás1s lze2ta lze2t1e2l lze2t1ér. lzé2sa lzÅ‘a2 lzÅ‘e2 lzÅ‘2s1orr lzus3s lzu2s3z 2m. 1ma maa2d ma1bra 2m1abs ma2cél. ma2célb ma2célt 2madag 2m1adap ma2datb ma2dato ma2datt madás1s ma2d1é2v madókész1 ma2dóz 3m2a3dz mae2r maé2r ma1f2ra ma1fri ma2gac ma2g1a2dato ma2g1a2dá ma2g1a2dó ma2g1a2du maga2l mag1ala ma2ga2n mag1any maga2r mag1ara magas1s ma2g1asza mag1azo ma2gág mag1áll mag1á2ré mag1árn mag1árr mag1árt mag1áta ma2g1áto ma2geb ma2g1el 2m1aggr ma2gid ma2g1i2k ma2g1í magkia3dó ma2g1or ma2g1osz mag1óra ma2g1óv ma2g1u2r ma2g1ü2 2m1agy. 2m1agyb 2m1a2gyu m1ahh ma2il. ma2ilt mai2z 2m1ajta maj2ti ma2kará ma2kác 2m1akc mak2k1e2 2makkr 2m1akku ma1klu 2m1a2kol. ma1k2rém ma1k2ri 2makro m1akti ma2kus malac3há mala2c3s 2malag mala2g1ú ma2laku 2m1alakz 2m1a2lan 2m1a2lapí malasz2t1a 2m1alb 2m1alc 2m1ald 2malg m1algo 2m1alj 2m1alk malo2m1e mal1os m1alr 2m1alt. mal1th 2m1a2lu 2malv 2m1amp 2m1a2nal ma2nat 2maná ma2n1eg 2m1angi 2m1anto 2m1antr 2m1a2nya. ma2nyag 2m1a2nyá 2m1a2nyó mao2k ma2pa. ma2pas 2m1a2pát ma1p2l ma1p2re ma1p2ré ma1p2ri ma1p2ro 2m1aps 2m1a2rány 2m1arb 2m1arc. 2m1arcc 2m1arch mar1cko 2m1arcu 2m1arcú 2m1a2rén ma2r1i2si mar2k1al mar2k1in mar2k1ón mar2k1ó2r 2marm maro2k1 ma2romá maros1s marók2 maró1kh maró1kk maró1kn maró1kr ma1ry 2m1arz ma1s2ká ma1s2pe ma1spr ma1s2rá mast2 ma1s2ta ma1ste ma1str masz1egy masz1e2me ma2sz1ét ma2szév ma2szis maszí2v masz1íve masz1ös mat1ada mat1alap mat1anya ma2taz ma2tág matá2raka ma2t1á2ras mat1á2rár ma2t1árn ma2t1á2rú ma2t1áz ma2t1e2g mat1eli ma2t1ell mat1elÅ‘ mat1elt ma2t1eme mate2s ma2tél ma2t1érte matfé2 matfél1 matfélé2 ma2tid ma2t1ind ma2t1inf ma2t1ing ma2t1int ma2t1örö ma1t2ran mat2rág mat2tin ma2t1ut 2m1a2tya ma2tyá 2m1a2uk mau2ra ma2uri ma2us mau2ta 2m1a2uto 2m1a2vat ma2z1a2l ma2zál ma2z1átl ma2zel mazókész1 ma2z3sű 1má 2m1á2bé 2m1ábr má2cs1as má2cs1e2 má2csin má2csir má2csis má2csó má2csü má2fá 2m1ág. 2m1á2ga 2m1á2gá 2m1ágb 2m1á2gé 2m1ágg má2gi. 2m1á2gig 2m1ágk m1ágna 2mágo 2mágó 2m1ágr má2guk má2gun 2m1á2gú 2m1á2gy mágya2d mágy1ada má2hí má2jan má2j1árt má2j1e2 májo2 má2j1ol májren2 má2j1ul má2jü máka2 má2k1al má2kar má2k1e2 má2k1ér. má2kil má2k1ó má2kÅ‘ má2k1ü má2l1a2l 2m1álar má2lál 2m1álc má2l1e2 mál1ért 2m1állam m1állap m1állat 2m1állí m1állom 2m1álma 2m1álmo má2los má2m1as 2m1á2mí mána2 má2n1as má2nav má2nár mánc1c mán2ce mán2c1ég mán2cip mán2csé má2n1e má2n1in má2nis má2n1it má2n1ö2 má2nÅ‘ máns3s mán2tac mán2tag mán2t1al mán2t1as mántá2 mán2t1ék mán2t1öl má2nú má2nü má2ny1a2d má2ny1a2g má2nyaj má2ny1aka má2ny1a2l mánya2n má2nyany má2nyap má2nyar má2nyau má2nyav má2nyaz má2ny1e2 má2ny1ér. má2nyérd má2nyérz má2ny1í2 má2ny1ó2 má2nyö má2po 2m1á2rad má2r1a2g 2máram má2r1a2n má2ras már1ass 2m1á2rat má2r1au má2r1ál má2re2 már1em má2rés má2r1id má2r1ik má2r1i2p má2r1i2si már1isk már1istá má2rí 2márkok 2márol má2rö má2rÅ‘ má2rug m1á2ruh má2ruk m1árur má2rü má2s1a2d má2sal má2sap má2s1a2r má2s1av má2saz má2s1á2rá má2s1árn má2sás 2m1ásásá 2m1ásásn 2m1ásásr másbe2j1 másbejá2 má2s1e2 má2sír má2sor 2m1á2só má2sö má2sÅ‘ mást2 más1tr má2s1ü2 más3zav 2m1á2ta mát1a2k 2m1á2tá 2m1átc 2m1átd máte2 má2ten 2máté 2m1átf 2m1átg 2m1áth 2m1á2tir 2m1á2tí m1átj 2m1átló 2m1átn má2t1ol 2m1á2t1ö 2m1átp 2m1átre 2m1áts 2m1átte 2m1á2tú 2m1á2tü 2m1átv má2zal má2ze mázi2a mázi2é má2z1i2s má2zÅ‘ má2zsal mázs1e máz3sz mb1akc m2b1akk mba1k2r mb1akv m2b1a2lag m2b1alj m2b1alk m2b1a2na m2b1a2nya mba1p mb1a2var mb1a2zo mb1á2gy m2b1áll m2b1á2ron m2b1árró mbá2száv m2b1ászná m2b1á2szoka m2b1á2szokk m2b1á2szoko m2b1á2szokr m2b1áta m2b1áth mbeá2 mb1e2ce mbe1k2 mbe2led mb1e2leg m2b1ell mb1elr mb1elsz mb1epe mbe2r1a2g mbe2ral mbe2ran mbe2ras mbe2rá mbe2reg mbere2sz mber1eszű mbe2rev mbe2r1él mbe2r1é2s mbe2rimá mbe2ring mbe2risz mbe2rol mbe2ros mbert2 mbe2r1ú m2b1é2g mb1é2ke m2b1ékí m2b1ép mbé2ress m2b1érté m2b1érz mb1gr m2b1ide mbi2k1al mbi2ke m2b1illa m2b1i2na. m2b1i2nai m2binam mb1ind m2b1inf m2b1ing. mb1inv m2b1ipa m2b1izm mb1i2zom m2b1izz m2b1ív mb1íz mb1kl m1b2lú mb1ly m2b1ob mb1olda mbo2lyak mb1ond m2b1op m2b1ormá m2b1ormú m2b1o2roz m2b1oszl mb1öle m2b1öv mb1Å‘2si m2b1Å‘2sz mb1pl mb1pr mbrádi2 mb2rev mb2rok mb2ron mb1sp mb1st2 mb1sz mb1tr mbu2sze mbu2szí m2b1u2t mb1új m2b1üg m2b1ül m2b1üs m2b1üt m2b1üz mbvezé2 mc2lu mcsa2p1á2g mda1b2 mda1g2 mda1p2 mdi2ale mdi3ap md2ra md2rá md2ro md2ró 1me mea2l mea2n me2av me2béd mede2r1e2 2medény me1d2rá 2m1e2dz mee2s me2et 2m1eff me2gaba me2g1ala me2g1alu me2g1alv me2g1an me2g1a2r mega1sz2 me2gav me2g1á me3gám megá2s megá2t megá2z me2g1eg me2g1el mege2le mege2lÅ‘ me2ger me2g1esem meg1eszt me2geta me2g1e2te me2gez me2g1é2l megés3s megész1 meg1f megg2 meg1gr me2gi meg1igá meg1inn meg1ir meg1is meg1itt me2g1í me2g1o me2g1ó2 me2g1ö2 me2gÅ‘r me2g1u2 me2gú me2g1ü2g me2g1ü2l 2megyez 2m1egyh 2m1egyl 2m1egys 2megyüt me2hetÅ‘ mei2rá me2k1ad me2k1ag mek1alk me2k1am mek1arc me2k1a2s me2k1att me2k1á2l me2k1á2p me2k1ár. me2k1ára me2kát mek1egy mek1ell me2kep me2k1ers meke2sz me2k1esze me2keszm mek1eszte me2kev me2k1ék me2kél me2k1ér. mek1érde me2k1érk me2k1érte me2k1érté me2k1éss me2k1éte me2kido me2kij mek1imá me2k1ing me2k1int me2k1i2p me2k1ist me2k1ita me2kír me2kít mek1k2 mek3lu me2kob me2k1ok me2k1old me2k1olt me2k1onk me2kop meko2r mek1ora mek1oro mek1ort me2k1os mek1ott me2k1ó2v mek1öltö me2kön mek1öröm me2k1öt me2k1Å‘ mek1s meksz2 mek1t2 me2kuj me2kun me2kur me2k1ú2 me2küd me2k1üg me2k1üld me2k1ü2lé me2k1ü2lÅ‘ me2küz m1elad mel1ak me2lág me2lál 2melb 2m1e2lef me2lekt mel1e2l 2m1e2leme 2m1e2lemz 2m1e2les 2melet mele2t1ér. 2m1e2lég 2m1e2l1é2l 2m1e2l1ér melés3s 2m1elf 2m1elgo m1elha 2m1elhá 2m1elhel me2lit. 2me2l1í2 2m1eljá 2melk m1elké m1elkí m1elkö mel2lál m1ellene mel2ler mel2l1ér. mel2ling mel2l1iz mel2lo mel2lö mel2l1u2 2m1elmé 2m1elnö me2los m1elÅ‘de 2m1e2lőí m1e2lÅ‘le m1e2lÅ‘tt 2melÅ‘z me2lÅ‘zÅ‘ 2m1elr 2m1elszá 2m1elta 2m1eltá m1elter 2m1eltett. 2m1eltettn 2m1eltér 2m1elti 2melty 2m1elv. 2m1elvá 2m1elves 2m1elvn 2m1elvo me2ly1ék 2m1elz 2m1eml 2m1e2mul me2n1a2d me2nal me2nar me2nau me2nát me2n1e2g me2n1el me2ner mene2t1á2 mene2tö menés3s 2m1enges me2nil me2n1ip me2ní me2nö men2s1é2g men2t1ell men2tip men2t1is me2n1u me2nú me2nya menye2ma me2om me2óe me1p2h me2pik me2pos me1p2r mera1p2 mer1ass 2merdÅ‘ 2m1e2rec 2meredm mer1egy 2m1e2rej me2r1e2l me2r1eml mere2t1a mere2t1e2l mere2t1ér. 2merezÅ‘kh me2r1ép me2r1il me2rim mer1inf mer1ing me2r1ip 2mernyÅ‘ me2r1ol me2ror me2r1os me2rov 2merÅ‘l 2merÅ‘sí mers2 mer1st mer1tró me2r1ü2g me2sas me2s1emb 2m1esemén mese1s 2meseté 2m1e2sett 2mesély 2m1e2sésé 2m1e2sésh 2mesésk 2mesésr 2meséss 2mesést 2m1e2sÅ‘ me1s2po 2m1estb 2m1este. 2m1estek 2m1esté 2m1estf me1s2tó 2m1estr 2m1estün me2szan 2meszem me1sz2tá met1anya meta1s metasz2 me2t1e2gy met1ell 2m1e2teté met1ing me2tór me2tú me2tűd 2m1e2ug me2uk meus3s meu2t me2vÅ‘ me2zac me2z1aj meza2k me2zau me2zál me2zedé me2zeg me2z1elj me2z1ell me2zelÅ‘h me2zer mez1ere me2z1ék me2z1ér. me2z1érd mez1éret me2z1é2ri me2z1id me2zim me2zin me2zio me2zír me2z1ol me2z1or me2z1ö mezÅ‘e2 me2z1Å‘2rö me2z1Å‘ss 2mezrem m1ezrese m1ezresr m1ezrest me2z3sa me2zsá me2zsö me2zu me2zú 1mé mé2ber 2m1ébr mé2cs1a2 mé2cso mé2g1a mé2ge még1eg mé2gé mé2gi még1is mé2g1o2 mé2gÅ‘ mé2gú mé2h1a2 mé2h1á mé2heg mé2hel mé2hir mé2h1is mé2h1or mé2hö méhren2 2méhsé 2m1éj. 2m1éjb mé2jes 2m1éjs méka2 mé2k1ad mé2k1aj mé2k1ak mé2k1al mé2k1an mé2kar mé2kaz mé2k1á2 mé2k1e2g mék1elh mék1ell mék1e2lÅ‘ mé2k1ese mé2kev mé2kez mé2k1é2k mé2k1é2l mé2kid mé2kirá mé2kí mé2k1o mé2k1ö mé2kÅ‘ mé2k1u2 mé2k1ú mé2le. 2m1é2lel mé2les mé2lez 2m1é2lén 2m1élm mé2lya mé2lyá mé2lyeg mé2ly1ú mé2n1a2r mé2n1a2t 2ménekb 2ménekh 2ménekl mé2n1el mé2n1é2k mé2n1és mé2nid mé2nin mé2n1is mé2niv mént2 mé2ny1e2g mé2ny1e2l mé2nyer mé2nyék mé2nyim mé2nyír mé2ny1o mé2nyö mé2nyú ményü2l mé2ny1ülé mé2pí mé2pü 2m1érc. 2m1é2retts 2m1é2rén mé2rér mérés1s 2m1é2rév 2m1é2rie 2m1é2rin 2m1érké 2m1érlel mé2r1ón 2mérteke 2m1értes 2m1értér 2m1értés 2m1é2rül mér2v1a2 2m1érz mé2s1a2 mé2s1á2 mé2s1e2l mé2s1er mé2sez mé2sin mé2sö més3sza mésza2 mész1al mé2sz1ál mé2sz1á2ra mé2szed mé2sz1el 2mészl mé2sz1Å‘ mész3s 2mészté 2mészth 2mészti 2mésztj 2mésztl 2mésztü 2mésztv mé2sz1ü2 mé2t1ad mé2t1ak mé2t1a2n mét1árt mét1elho mét1elta mé2t1e2v mé2tég 2m1étje 2m1étjér 2m1étjév 2m1étjü 2m1étke. mé2t1o mé2tö métÅ‘2 mé2tÅ‘r mé2t1u mé2t1ű 2m1év. 2m1évb 2m1é2ve. 2mévei 2m1é2vek mé2ven mé2ves 2m1é2vet 2m1é2véb mé2vén 2m1évf 2m1évh 2m1é2vi 2m1évk 2m1évn 2m1évr 2m1évs 2m1évt 2m1évv mé2z1a mé2z1á2 mé2zeg mé2zil mé2zim mé2zin méz1ism mé2zit mé2zí mé2z1o mé2z3s mé2zu mé2zű mfa2l1e2 mfa2lom mfa1s2 mfé2m1a2 mfit2 mfi1tr mf2la m1f2lo mf2ló mf2lu mfog1adat m1f2rak m1f2ran mf2rá m1f2re m1f2ri m1f2rí m1f2ro m1f2rö mgé2p1e2l mgépi2 mgé2p1ip mg2li mg2ló mgör2cso mg2ra mg2rá mha2sábr mhossz1út 1mi mi2aa mi2aá mi2abo mi2ac mi2ae mi2aé mia1f2 mi2afr mi2ag mi2ah mi2aí mi2ale mi2amé mi2ao mi2aó mi2aö mi2aÅ‘ mi2a1p mi2aré mias2 mi2aszá mi2aszé mi2aszi mi2aszó mi2atá mi2ati mi2ato mi2aü mi2av 2m1i2áz mi1bl micsa2p1 2m1i2deá 2m1i2deg 2m1i2dei 2midej 2miden mi2dent 2m1i2deo mi2dén mi2dio mi2dió mi2dol 2midÅ‘ m1idÅ‘z mie2l mi2éf mi1fl mi1fr 2m1i2gaz 2m1i2gén mi1gri 2m1ihl mii2d mi1k2li mi1klu mi2kono mi2kont 2miks mi2lal 2mille 2millu 2millú mi2ma. mi2máh mi2mit mi2mór mi2naj 2m1i2nam mina2n min1any 2m1i2nas mi2n1ára min2c1e2 min2ch min2d1a2 2m1indá min2dek min2d1er min2din 2m1indí 2mindu mi2neg mine2s 2minfl 2m1infú min2g1á 2m1ingé min1g2h min1inj min2k1a2l min2k1an min2k1as min2kec min2kó min2kö 2m1insp 2m1i2nuk mi2nü 2m1inz mio2n1a mio2n1á mio2r mió2r mi1p2l mi1p2r mi2ram 2m1i2rat 2mi2rán 2mirg 2mirh miri2gyel 2m1irk 2m1i2rod 2m1i2rom mi2rón 2m1irr mi2sal mis1elv mis1epe mis1ing 2misit mi2s1í2r mi1s2pi mis3szab mis3szer mi2s1ü2t mi2s1ü2v mi2s1ű mi2szár mis3zár. mi1sz2f mi2tac mita2n mit1any 2m1i2tat 2mitác mit1ár. mit1árá mit1árh mit1ári mit1árk mit1árn mit1árr mit1árt mit1árú 2mitbo mit1ing miti2s 2mitlá mi2tök mi1tri 2mitro 2mitrú mi2tür miumé2 miu2min miu2s 2m1i2vad 2m1i2vó mi2x1i mi2xö mi2xÅ‘ mi2zé. mi2zét 1mí m1í2gé mí2ja mí2já m1íns 2m1í2rá 2m1í2ró m1ív. mí2vá m1ívb mí2vé m1ívh m1ívr m1ívv mí2zü mí2zű mjé2ná mjobb1o mjo2g1á2s m2j1ol mj1ósá mj1Å‘s mkaró2 mka2r1ór mke1p mkia2 mkiá2 mkie2 mk2la mk2li mk2lí mk2lo mk2lu mkó2rost mk2ra mk2rá mk2re mk2ré mk2ri mk2rí mk2ro mk2ró mk2va mk2vó mla1f mlapá2r mla2p1áro mla2p1e2 mla2pin mla1s2t mlás3s mlá2s3z mle1g2 mleí2 mle1kn mle1pla mlé2k1a2 mlé2k1á mlé2k1el mlé2k1est. mlé2k1ér. mlé2k1ol mlé2kos mlé2kó mlé2k1ú2 mlé2szé mlo2k1ál mlo2ké mlÅ‘2s1a2 mlÅ‘2s1ű2 mmag1g mma1gl mmai2 mmas2 m2m1atk m2me. mme2g1é mmifé2l1é2v 1mo mo2be 2m1obj moci1 2m1off mo1g2ráf. mo1g2ráff mo1g2ráfk mo1g2ráfn mo2gy1a2 mohu2 mo2is mo2kab mo2k1ad mo2k1a2k mo2k1a2l mo2k1a2n mo2kar mo2kád mo2k1ál mo2k1á2s mo2k1e2 mo2k1il mok1k2 mo2k1ol mo2k1or mo2k1ó2 mo2kö mok1t 2m1oktat moku2 mo2kur mokú2 mo2kús mo2k1úto mo2kü 2m1o2laj 2m1olda m1oldó 2m1o2lim molli2 mol2l1in 2m1olló molói2ko 2m1oltár 2m1oltás 2molvad 2molvas 2m1oml mon1acé mo2n1a2d mo2n1a2l mo2n1an mo2n1a2p mona2r mo2n1as mo2n1áll mo2neg mo2n1er mo2n1is mon2or mo2nö mons2 mon1sp mon1tré mo2nü mo2nű monyá2 mo2nyáz mo2or 2m1opc 2mope mo2per mo1p2l 2m1opt mo2r1ad mora2n mor1any morá2la 2m1ordí mo2r1el mo2ren mo2r1e2r mo2r1est mo2rid 2morie mori2s mo2r1isk mo2r1iszo mor1izg 2morjaia 2morjaié 2m1orjait mo2r1ol mo2r1ont mo2r1ón mo2r1ó2r mo2rós mo2rö 2m1orr. mor1s2 mort2 mor3tá mor1tr mo2rü mo2rű mo2sál mo2s1e 2mosto 2m1ostr 2m1osty mo2sü mo2szal mo2szis 2m1oszlo mo1t2h 2m1ottha mot2to mo2un mo2us2 mo2vi mo2xi mo3zso 1mó móá2g mó1bl mó2ch mócsa2p1 mócsapá2 mó2d1a2l mó2dau mó2dák mó2dár mó2d1e2 mó2dip mó2d1o2r módó2 mó2d1ór módu2 módus1 mó1fl mói2ko mó1k2l mókus1s mó2lar mó1p2r mó2rak mó2rar mó2rág mó2rái mó2ráj mó2rám móri2as móró2 mó2r1ón mó2rö mó1s2k mó1the 1mö mö2bö mö2ko möl2cs1a möl2csá möl2csel möl2cs1es möl2cs1ér. möl2cs1il möl2cs1o möl2cs1Å‘ mö2le mö2nu mö2ro mö2r1Å‘ m1ötl mö2ve mö2vö mö2vü mö2vű 1mÅ‘ mÅ‘a2n mőá2g mÅ‘e2l mÅ‘e2r mőé2l mőé2te mÅ‘1kl mÅ‘1ps mÅ‘2r1á2r 2m1Å‘rh 2m1Å‘2ri mÅ‘2si mÅ‘2sü mÅ‘s3zár mőü2l mÅ‘2zi mpa1dr m2p1akc m2p1aktá m2p1áll m2p1árko m2p1átj m2p1átk mp2ci mp1elt mp1fr mp1ind mpi2re mp2lak mplo2mal m1p2lu mpon1d2 m2p1ord mporta2 mpor2t1al mpor2t1á2r mpor2t1e2 m2p1osztás m2p1ös m1p2ref m1p2rep m1p2rés m1prib m1p2ric mp2rio m1p2rod m1prof m1prog m1proj m1p2ro1p m1p2rot m1p2rób m1p2ru m1p2szi m2p1u2ta m2p1utó m2p1üz mra1p mren2d1Å‘2 mré2m1 msa2vo ms2ka ms2ká ms2ki ms2ko ms2lá ms2mi ms2ni ms1ond ms2pa ms2pe ms2pi ms2po ms2pó ms2rá ms2ta ms2tá ms2te ms2ti ms2tí ms2to mst2r ms2tú msza2ké msza2k1ü mszáraz1 msz2c mszé2dem m1sz2f mszín3n msz2l msz2m m1sz2p msz2tá m1sz2v mta2n1ó2 mtára2d mtá2r1ada mtés3s mtÅ‘kész1 mtran2s mtransz1 mt2rá mt2re mt2ré mt2ri m1t2ró mt2rö mt2rü 1mu 2m1udv 2m1ugr m1ujj 2mulet 2mulz mu2m1ad mu2m1el mu2mél mu2m1és mu2min mu2m1ir mu2mis mu2m1iv mumkész1 mu2m1ó2 mu2mö mu2mÅ‘ mumus1s mun1g mu2nok 2mur. mu2ral mu2ram mu2rat mu2rál mur1izm mu2r1u2 mu2sal mu2san mu2sar mu2sas mu2sat mu2s1á2g mu2sál mu2s1e mu2s1érté mu2sir mu2sor mu2s1ó2 mu2sÅ‘ muss2 mus3sze mus2tárá mus2t1erj mu2szal mus3zav mu2szál mu2szás mu2t1a2g mu2tal mut1a2la 2m1utalá 2mutalv muta2n mu2t1any mu2tasí m1u2taz mu2t1á2ra mu2t1árb mu2t1á2ru 2m1u2tás 2mutca mu2t1el mu2til mu2t1in 2m1u2tol 2m1u2tód 2m1u2tóp mu2t1ö mu2tü 1mú mú2jí múl2t1e2 múl2tol 2m1úr. mú2ri 2m1úrn 2m1ú2sz 2m1útb m1úth 2m1ú2ti 2m1útj 2m1útk 2m1útm 2m1útn 2m1ú2to 2m1útr 2m1útt 2m1útv 1mü mü2dí mü2dü mü2gy mü2ná mü2re mü2rí mü2rü mü2te mü2té mü2tÅ‘ mü1tz mü2ve mü2vö mü2ze 1mű mű1bl mű1br mű1fl mű1fr mű1gr mű1kl mű1pl mű1pn mű1pr 2m1űrl mű1sp műsú2 mű1sz műtÅ‘kész1 műves3s mű2zé mű2zi mű2zö mű2zÅ‘ mű2zü m2v1a2dot mvágya2d mvá2gy1ada mverés3s mw2hi mza2t1e mzás3s mze2r1o mze2t1a2 mze2t1á2 mze2t1e2g mze2t1el mze2ter mze2tesz mze2t1é2k mze2t1érd mze2to mze2t1ö2 mze2t1Å‘2 mzé2s1a mzé2so mzókész1 mzÅ‘e2r mz2rí 2n. 1na naa2d n1abbó 2n1abr 2n1abs na1cl 2n1a2dag 2n1a2dás 2n1add na2dek 2n1adm 2n1a2dó na1d2re 2n1adt na2du. na2dus na2ei naé2r 2n1aff na2ga. na2gár na2git na2gón na1grá nagy1agg na2gy1a2l na2gyapj na2gy1as na2gyav na2gy1é2k nagyú2 nagy1úr nagy1út na2ire na2ji 2n1ajk 2n1a2kad nakaró2 nak1á2sz na2k1át n1akko na1kli na1klu nako2l nak1ola 2n1a2kó. na1k2ré n1akti 2n1a2kus na2k1útn na2l1a2dó 2n1a2la2g1 na2l1aj na2l1a2l na2lana 2n1a2lapa 2n1a2lapd na2lapr na2lapt na2lar na2lav na2l1ábr na2lág na2l1á2l na2l1á2ro nal1á2t1ö na2l1áts na2l1elá na2l1ell nal1eng nal1ent nal1ég. na2l1ék na2l1éri na2lid na2l1ing na2l1i2o na2l1í2r 2nalízisb 2nalízise 2nalízisé 2nalízish 2nalízisi 2nalízisk 2nalízisn 2nalízisr 2nalízist 2nalízisü 2nalj. n1alja. 3naljac n1aljad n1aljai 2naljak n1aljam n1aljat n1alji 2naljon 2nalju 2naljz 2n1alkat na2l1ob na2l1ol na2lop nal1osz na2l1ó2r na2l1Å‘ nalt2 nal1tr na2lulj na2l1ut na2lü na2mer 2namitbo 2n1a2mite 2n1a2mitg 2namitha 2n1a2mitk 2n1amitl 2namitm 2namitö 2n1a2mitp 2namitro 2namitrú 2namits 2namittá 2namittö 2n1amö 2n1amp 2n1a2nal 2n1ang 2n1anh nano1 nanog2 na2nód 2n1a2nyag nao2l naó2r 2napa. nap1adó na2p1a2g na2p1ala na2p1alk nap1a2pa nap1apá nap1ará na2p1as na2pád na2p1á2g na2pák nap1áll na2pám na2p1árb na2p1átm nape2l nap1ell na2pem nap1est na2p1ill na2p1ing na2p1int nap1isk na2pí nap2lat na2p1ola nap1orm napos1s na2p1ostá na2p1ott na2pó. na2p1ó2r napp2 2nappo nap1pr n1aprí 2napróz na2p1u2t na2p1úr 2n1a2rai 2narann 2n1arany. 2n1a2ranya. 2n1a2ranyá 2naranyb 2naranyh 2naranyk 2naranyn 2naranyr 2naranys 2n1aranyt 2n1arc. 2n1arcá narchi2ab 2n1arco 2n1arcu 2n1arcú n1arró 2n1arz na2sév nas1isk 2nask na1s2ka na1s2rá nast2 na1s2ta na1s2tá na1str na2sz1an na2sz1árad naszkész1 nasz1üg na2t1ab na2t1aj na2t1alk na2t1alt nat1ará nat1áll na2t1á2ré na2t1árn na2t1eg nate3le na2t1elé nat1elle na2tél nat1érke na2t1érv na2t1i2m na2t1ing na2t1old nat1ors na2t1osz na2t1u2t na2tül natű2z 2n1a2ty na2uc na2ul nau2ra na2urá nau2s na2uto naü2z na2vart na2vat 2n1avv na1wh 2n1azb na2zé 2n1a2zo 1ná 2n1á2bé 2nábr ná2caj ná2c3h ná2cí ná2csal ná2csap ná2cs1as ná2cse nác3sik ná2csis 2nácsolat nác3sor ná2csö ná2csü nác3sz ná2d1ala ná2dap ná2d1a2r ná2d1asz ná2d1a2v ná2dá nád1d ná2d1e2 ná2d1ö ná2dud ná2d1ü2 ná2d3z ná2ga ná2gá ná2gi ná2gu ná2gú ná2gy 2n1á2hí ná2k1ér. ná2kol ná2kü ná2lab ná2l1a2l ná2lana n1álar nála2te ná2l1az ná2l1át nále2 ná2l1eg ná2l1el ná2lem ná2les n2álé ná2l1ép ná2l1in ná2lir nál2is ná2lí 2n1áll. 2n1álla. 2n1állap 2n1állat 2n1állí 2n1állom nállóköz1 ná2lü ná2mí ná2mu ná2nad ná2n1al ná2nar ná2n1á2r nán2c1e ná2n1e2 náné2 ná2nét ná2nin ná2ní nán2se ná2nü ná2rad 2n1á2raka 2n1á2rakb 2n1á2rakh 2n1á2rakk 2n1á2rakn 2n1á2rako 2n1á2rakr 2n1á2rakt 2n1á2ram ná2r1a2n ná2rap ná2ras nár1ass 2n1á2rat. ná2r1att ná2r1av ná2r1ác ná2r1ál ná2r1e2 ná2r1éve 2n1á2riai 2n1á2riá ná2r1i2p ná2rí náró2 ná2r1ór ná2rÅ‘ nár1s2 nárt2 nár1tr 2n1árud ná2rug 2nárun. nár1ur 2nárus náru2t nár1utá ná2rút ná2rü ná2s1as nás1áré ná2s1á2ru 2n1á2sás ná2s1e2 ná2s1i2k nást2 nás1tr ná2szan ná2szas ná2szág ná2szál ná2sze ná2szén ná2szil ná2szin ná2szis 2n1ászka. 2n1ászoks ná2sz1ö ná2sz1ú ná2sz1ü ná2tala ná2t1a2n ná2tál nát1ásv ná2t1e2 2n1áthi ná2t1i2o 2n1á2t1ir 2n1á2tí 2n1átlé ná2t1ö 2n1átru 2n1átug 2n1átut 2n1á2tú ná2tü 2n1átvi nba2ká nba2k1e2 n2b1é2kéb n2b1é2kén n2b1é2kér n2b1é2kév nb2la nb2lo nb2lú nbo2n1a2 nb2ra n1b2ri nb2ro nb2ró nburg2hi nc1acé n2c1ajá nc1ajt n2c1akn n2c1akt nc1a2la nc1alj n2c1alk nc1alt nc1alv nc1ana nc1ant nc1a2nya nc1ari nc1att nca2u n2c1ava n2c1ág nc1árb nc1árk n2c1árn nc1árt nc1á2sa nc1ásá nc1bl nc1br nc3csi nc3cso nc3csö nc3csu nc1dr ncea2 nc1egg n2c1eld nc1e2lek nc1e2lem nc1elm n2c1elv nc1e2red nc1eró n2c1eszt nc1etn n2c1ex ncé2g1ér ncé2hes n2c1ép n2c1évi nc1fl nc1fr nc1gr n2c3ha n2c3há n2che. nc3hel nc3het n2c3hé nc3hiá nc3hí nc3hol nc3hon n2c3hoz n2c3hó nc3hö n2c3hu n2c3hú nci2alis nci2aso n2c1ige n2c1i2gé n2c1i2ko nc1i2má n2c1i2na. n2c1ind nc1inf n2c1ing n2c1int n2c1irá nc1iro n2c1ist n2c1i2ta n2c1i2z nc1íj n2c1ír n2c1ív n2c1íz nc1kl nc1kre n2c1ob n2c1okta n2c1o2li nc1orv n2c1ott n2c1ölt. nc1öss ncö2t nc1Å‘r nc1pl nc1pr n2cs1ab n2csac n2csad n2cs1ag n2cs1ajt n2csakt ncs1alap n2cs1alj n2csam n2csan ncsa2p1á2g ncsa2r ncs1ara ncs1arg ncs1aro n2cs1au n2csaz ncs1ágr n2cs1ál n2c3sár. n2cs1á2rá ncs1árb n2cs1árn ncs1árr n2csáru n2csáta n2csátá n2csátv ncs1elf ncs1é2rés n2csérte n2cs1érté n2c3sérü n2c3séta ncs1i2kon ncs1int n2cs1io n2csipa n2csirá ncs1irt n2cs1ism n2csita ncs1izz nc3síki n2cs1í2rá ncs1ízt n2csob n2cs1oks n2csokta n2csosz n2cs1ö2lé n2cs1ös n2cs1öz n2cs1Å‘r. ncs1Å‘rö ncs1s n2csur ncsu2t ncs1uta n2csút n2csüg n2csüt ncs3zár nc3sze nc3szó nc3szö nc1tr n2c1ud n2c1ug nc1uj n2c1ur n2c1új nc1út n2c1üg n2c1üt nc1üv n2c1üz n2c1űr nc1ya n2c3zá nc3ze n2c3zó nc3zö nc3zs nc3zü nczy1i nd1abl nda1br nd1a2dat nda2dás nd1add n2d1ajá n2d1akc n2d1akk nd1akt n2d1alj n2d1alr nd1ann nd1apó nd1a2rán nd1arr ndat1any nda2tap nda2t1eg nda2tin nda2tir nd1azo nd1azt nd1azz nd1á2rak ndás1s nd1bl nd1cl nd1dr nd1ebb n2d1e2kéb n2d1e2kéin nde2köz n2d1elf n2d1ellen n2d1elm nd1elö nde2mer nde2mu nde2m1ü nde2ná n2dennek nde1p2 nde2r1a nde2rál nde2ráz nde2rel nde2ro n2d1erÅ‘ n2d1e2sett n2d1e2sés n2d1e2sÅ‘ nde2sza n2d1e2szü nd1eza ndé2go ndé2kel nd1ékez ndé2kö n2d1éle nd1élm n2d1ép ndé2raj n2d1éssz n2d1észh n2d1észl n2d1észr n2d1é2te n2d1étt nd1fr nd1gr n2d1iga n2d1ige n2d1ill n2d1i2na. n2d1ing n2d1ins n2d1i2onj ndi2óé2 ndi2óf ndi2óm n2d1isza ndí2j1a2da nd1kl n2d1oá ndo2k1út ndo2rál n2d1orni ndo2r1ú n2d1ou n2d1ov ndóé2r nd1ó2ni n2d1ó2rá ndö2b nd1öss n2d1ö2z n2d1Å‘r. n2d1Å‘2r1a2 n2d1Å‘rb n2d1Å‘rc n2d1Å‘rd nd1Å‘2reg nd1Å‘2rei nd1Å‘2rek nd1Å‘2rel nd1Å‘2rék nd1Å‘2rén nd1Å‘2rér nd1Å‘2rét nd1Å‘2rév n2d1Å‘rf n2d1Å‘rg nd1Å‘2rig nd1Å‘ris nd1Å‘2rit n2d1Å‘rj n2d1Å‘rk n2d1Å‘rl n2d1Å‘rm n2d1Å‘rn nd1Å‘rok nd1Å‘ros n2d1Å‘2rö nd1Å‘rÅ‘s n2d1Å‘rp n2d1Å‘rr n2d1Å‘rs n2d1Å‘rt n2d1Å‘rú n2d1Å‘rv nd1pr nd2raz n1d2ruk nd1sl nd1sp nd1st ndszá2m1út ndsze2r1e2l ndtár2s1a2d ndu2t n2d1uta nd1új ndú2rá nd1úsz n2d1üg nd1ünn n2d1üz n2d1űr. n2d1űrr n2d1űrt n2d1űz ndy2b ndy2h ndy2n ndy2r ndy2t ndy2v nd3zav nd3zár n2d3ze n2d3zó n2d3zu 1ne nea2j nea2k nea2la ne2alo nea2n nea2r ne2bé ne1bl ne1dra ne1d2rá ned2v1el ne1dy 2n1e2dzé 2neff 2n1e2ger 2n1egér. ne2g1ö n1egy. n1egyb ne2gyek ne2gyen ne2gyes ne2gyet ne2gyez 2n1e2gyé n1egyf n1egyh ne2gyig n1egyk n1egym n1egyn n1egyr 2n1egys n1egyt ne2gyün nehéz1 2n1ehh nei2g neí2r ne2k1aj ne2k1a2n ne2kát ne2k1e2g nek1erÅ‘ nek1é2jé ne2kék nek1ékn nekie2 2nekj nek1k2 2nekka 2nekki ne1klu ne2k1ok nekö2r ne1kreá nek1t2 ne2k1üg nek1üldö ne2lag ne2laj ne2l1an ne2lál nelá2r ne2lef ne2leg n1elegy. nel1e2le nele2ma nelem1el nel1e2més nel1eng ne2ler ne2l1ép nelfé2 nel1g2 2n1elha ne2l1id ne2lim ne2l1in n1elló 2n1elnev ne2l1ot ne2l1ó2 ne2l1ö2 2n1e2lÅ‘a 2n1e2lőá ne2lÅ‘d ne2lÅ‘f ne2lÅ‘hí 2n1e2lőí 2n1e2lÅ‘ké ne2lÅ‘l 2n1elÅ‘ny 2nelÅ‘rej 2n1e2lÅ‘té 2n1eltér ne2l1ül 2n1elz ne2mak 2nemba 2n1ember neme2g nem1egy 2nemele 2n1emelk ne2m1esz ne2m1ér. ne2m1id nem1isz 2nems 2nemul 2n1eng 2n1enn nen2sa nense2s nen2s1ese nens3s nen2s3z 2n1enyv 2n1enz ne2ob ne2od ne2of ne2og ne2oh ne2oko ne2ola ne2oli ne2oro ne2pad ne2pelm ne2p1est ne2pid ne2p1ó2r ne2p1ut ne2r1a2d ne2r1a2k ne2r1a2n ne2r1a2r ne2r1as ne2raz ne2ráb ner2co ne2r1e2g n1e2rej ne2r1e2ke. ne2r1e2l ner1emel ne2r1er ne2rez ne2rég ne2r1él ne2r1ép ne2r1étt ne2r1id ne2r1iga ne2r1il ne2r1i2m ne2r1inf ne2r1ing ne2r1int ne2ris ner1k2 ne2r1ol ne2ror ne2r1os ne2ró ne2r1ön 2n1e2rÅ‘ 3n2e3rÅ‘kü ner1s ner1tr ne2r1u2 ne2r1üg ne2s1aljá ne2sas ne2s1ál ne2sár 2n1e2setb 2n1e2setr 2n1e2sés 2n1e2sÅ‘ nes3sza nes3szá 2n1esté 2n1esti ne2s1ü2v nesz1ál ne2s3zár ne2sz1él ne2sz1ű2 ne2t1ab net1a2la ne2t1a2ny ne2tál ne2t1át1 ne2t1e2g net1elá net1elm ne2t1elÅ‘a ne2t1eml net1este ne2t1esz ne2t1etet ne2t1eti ne2t1é2k ne2t1é2l ne2t1ér. ne2t1érd ne2t1éré ne2t1é2rÅ‘. ne2t1é2rÅ‘k ne2t1érr ne2térte ne2t1értÅ‘ ne2t1é2rü ne2t1ész ne2t1é2ve. ne2tid ne2t1i2ko ne2t1int ne2tip netké2sz1ü ne2t1o2k ne2tol net1old ne2t1Å‘2 net2tév ne2tun ne2t1ut netü2l net1ülé ne2t1ü2z 2n1e2vÅ‘ 2n1evv nexpor2t1Å‘2 ne1yé 2n1e2zer 2n1ezred 2nezüs 1né 2n1ébr 2nédl né1f2r 2n1ég. 2n1é2gek 2n1é2geté 2n1é2getÅ‘. 2négetÅ‘h 2n1é2getÅ‘k 2négetÅ‘n 2négetÅ‘t 2n1é2gé 2n1égj 2n1égn 2n1é2gÅ‘ 2n1égs 2n1égt 2n1é2gü né2gyer né2gyén né2gy1o né2gyök 2n1é2hes 2n1éhs né2kaj né2kak né2k1a2n né2kar né2k1á néke2l nék1ele né2ker né2kév né2kid nék1isz 2n1é2kí né2kó nékü2 né2kül n1é2les 2n1é2let 2n1élm né2lÅ‘i né2lÅ‘n 2néneke 2n1é2neké népa2 né2pad né2p1ak né2p1al né2p1an né2pap né2p1as né2pau népá2 né2p1ál né2p1ár né2pát né2p1áz né2peg né2p1e2l népe2r nép1etet né2p1etn né2pev né2p1és nép1f2 2né2pí né2p1o né2p1ö né2pÅ‘ nép1s né2p1us né2pú 2népül néra2 né2raj né2r1an n1érd 2nérde 2n1é2rem né2ré. né2rés né2ri. né2rin né2rip né2rö 2n1é2rÅ‘. 2n1érté né2rü 2n1érz né2s1e2l né2s1ég nés3szer nész1ak nész1al né2sz1á né2szeg né2sz1e2l né2sz1emb né2sz1e2sz né2sz1ék né2szik né2sz1í nészkész1 né2szó né2sz1ö2 né2szu né2sz1ú né2szül né2t1eg 2n1é2tel né2t1es 2n1é2tet néti2 né2tir né2tö né1t2r néva2 név1ada név1adá né2vaj né2var né2vav né2v1ág 2n1évbú 2n1é2ve. 2névei 2n1é2vek néve2l né2v1ele név1elÅ‘ 2né2vem 2névenk 2névent né2v1e2r né2ves név1esz 2né2vet 2névéb né2v1é2l né2v1éri né2vérÅ‘ né2v1érz né2vis 2névny né2v1o né2vö né2vÅ‘ né2v1u2 né2vú né2vün né2za néziu2m1i2 nfe2lemé n1f2la n1f2lo nfluor1e nfol2ta n1f2rak n1f2rá n1f2rek n1f2ri n1f2rí n1f2rö n1f2ru nfüs2t1ölé n2g1abl n2g1a2dat n2g1a2dá ng1a2dó n2gadó. n2gadóa n2gadób n2gadói n2gadój n2gadók n2gadór n2gadós n2gadót n2gadóv ng1akn n2g1akr n2g1akt nga2lag nga2lak nga2lap ng1alát ng1alel n2g1alg n2g1alj n2g1a2ny nga1p2 n2g1a2rán n2g1arc n2g1art n2g1arz n2g1asp ng1assz n2g1aszt nga1tr n2g1a2u n2g1ava nga2zon ngá2c n2g1á2g ng1állá ng1állo ng1álló n2g1áp ng1á2rak ng1áras ng1árat ngá2rát ngá2ráv ngá2ré n2g1árnya n2g1árta ngá2ruk n2g1á2rú n2g1á2szaib n2g1á2szair n2g1á2szait n2g1á2száv n2g1á2szé n2g1á2szi. n2g1á2szig n2g1á2szoké n2g1á2szos n2g1átá ng1átc n2g1á2t1e2 n2g1á2ti n2g1átk n2g1átl n2g1átm n2g1áts n2g1átv n2g1á2zój n2g1á2zós n2g1áztato ng1bl ng1br ng1d2r ng1edz ngegész1 n2g1ela nge2lis nge2r1a nge2rál nger1e2sz nger2im nge2ro n2g1esem n2g1eszk nge2tika n2g1ex n2g1é2g ngé2les n2g1épí n2g1érc ng1érl ng1é2rÅ‘. ng1érté ngé2sa n2g1éte ng1fr ng1g2r ng3gyi ng3gyo n2g1i2d ngi2g n2g1iga n2g1ige n2g1igé ng1i2ko n2g1ikr n2gill ngi2m ng1imi ng1inf n2g1ing ng1ins ng1iro n2g1izg ng1íg ng1ír ng1ív ng1íz ng1kl ng1kr ng1kv n1glec ngmeg1 n1g2nó n2g1of n2g1op ngora1 n2g1ord n2g1org n2g1ork ng1osto ng1oszt n2g1otth ngó2riá n2g1öb ng1önt ngö2r ng1örö ng1örv n2g1öv n2g1ö2z ng1Å‘rö ngÅ‘2z1Å‘s ng1pr ng1ps n1g2ram ng2rádih ng2rádj n1g2ráf ng2run ng1sh ng1sk ng1sp ng1tr n2g1ud n2g1ug n2g1uj n2g1und ng1u2ra n2g1uta n2g1új ng1útt n2g1üd n2g1ü2g ng1ür ng1üt n2g1üz ng1űr ngy1a2gya ngya2l1ó2 ngy1ass n2gy1á2l n2gy1em n2gy1es n2gyez n2gy1é2d ngy1éks ngy1ékt ngy1ér. n2gyid n2gyim n2gy1ut n2gy1ü2lÅ‘ nha2b1i nhal1k2 nha2sábr nhá2z1alt nhá2zip nhá2zol nhá2zó nhá2z3s nhe2d3z nhe2i nhú2sá nhús3s 1ni ni2aa ni2aá ni2abo ni2ac ni2ad ni2ae ni2aé ni2afo nia1g2 ni2agy ni2ah ni2aí ni2aj ni2ala ni2alá ni2am ni2ao ni2aó ni2aö ni2aÅ‘ ni2a1p ni2ar ni2asza ni2aszá nia1t2 ni2ato ni2atr nia3u ni2aü ni2av ni2az niát1a2 2n1i2bo ni1br ni2c1e2l ni2cha ni2c3he ni2c3hé ni2c3ho ni2de. 2n1i2deg 2n1i2dÅ‘ ni2dü ni2et 2n1ifj 2n1i2gal 2nigaz. 2n1i2gá ni2ge. ni2géj 2n1i2gén ni2géz 2nigm 2n1ihl ni2keb ni2k1el ni2k1em ni2k1érté nikk2 ni1klu ni2konr 2n1ikrit. ni2kud n1ille 2nillu 2n1i2ly 2nimád n1i2máé 2n1imp 2n1i2na. ni2nas ni2n1áll 2nind 2n1info 2n1infú nin2gas nin2gá 2n1inge. 2ningei nin2g1e2l nin2g1ó2 nin1g2rá nin2gu 2n1ingük ni2n1i2p ni2nol 2n1inté 2n1i2onb ni2onc ni2onh ni2onj ni2onk 2n1i2onn 2n1i2ono 2n1i2onr 2n1i2ont ni2óa ni2ód ni2óe ni2óp ni2óta ni2ótá ni2óü nióvá2 nip2p1i ni1pr ni2rat 2nirá nirész1 2n1irg 2n1irh 2n1irk 2n1i2rod ni2rón ni2si. ni2s1in nisü2v nisz1ak ni2szeg ni2szeml ni2szese ni2sz1é2l ni2szip ni2szis nisz1okt nisz1ol 2n1iszon ni2szö ni2sz1Å‘ ni2szu ni2t1a2d ni2t1ag ni2t1aj ni2tal nit1a2la ni2t1as 2n1i2tat nit1ell ni2t1ép ni2t1ér ni2tim ni2t1in ni2tir ni2tob nit1old nit1olt ni2t1osz ni2tür niu2m1i2o 2n1i2vad 2n1i2var 2n1i2vó ni2xa ni2xÅ‘ ni2zén 2n1izg 2n1izmá n1izom ni2zsol 1ní ní2gé ní2ja ní2ju níli2a ní2ra2 2n1í2rá ní2r1é2 ní2r1ot 2n1í2ró ní2r1ú 2nítél nítÅ‘a2 ní2ve 2n1í2vi ní2ze ní2zű nk1a2dós nkai2k nk1ajt n2k1akk n2k1alv n2k1anó nka2nyá nkaó2 nka1p2l nk1app nka2ris nka1s2k nka1sp nka2tom nka1t2r nk1azo n2k1ág nk1árad nká2rál nká2rol nká2ruk nká2sad nká2sal nká2sav nkás3s nká2s3z nkáta2 n2k1átj n2k1átm n2k1áts n2k1átu nk1br nkci2ósű nk1dr nk1e2cse nk1e2dz nk1ela n2kellá n2k1eltá nke2r1a nk1ered n2k1e2rÅ‘ n2k1e2ti n2k1e2vé n2k1é2l nké2p1el nké2p1és n2k1épí n2k1érc nk1é2szé nk1g2r nkia2 nkiá2 nk1i2ga n2k1igé n2k1imi nk1ind n2k1ing n2k1int n2kinz nk1i2on nki2s1i2 nk1ism nkká2 nk1k2r nk2lar n1k2ló n2k1ob nk1oke nkos3s n2k1oszl n2k1ox n2k1óné n2k1óni nkó1p2 n2k1ó2ri n2k1ö2lé n2k1ö2lÅ‘ nk1öss nk1ötl nk1Å‘rs nk1pl nk1pr nk2rac n1k2ris n1k2rí nk2roma nkron1n nk1sp nk1st nk1sz2 n2k1ud n2k1u2ra n2k1u2s nk1utal n2k1utá n2k1uz n2k1új n2k1ús n2k1üg nlac1 nla2pa nlap1e nla2p1osz nla2tal nla2t1a2n nla2t1e2 nla2t1é2te nlás3s nle2g1á nleí2 nle2t1o nletü2 nle2tüz nlé2tés nlé2t1é2ve. nme2g1a2 nme2g1é nműé2n nműt2 nna2ié nnak2 nna1kr nn1alv nna1p2ré nna1s2 nn1áll n2n1eml nne2p1a2 nne2se nn1ess nn1esz n2n1e2tet n2n1ett nn1evez nné2get nn1éri n2n1id nn1irt nn1ors nnőé2h nnőé2n nn1sy n2n1ug nn1ú2s n2n1ü2c nnü2l nn1ülÅ‘ nn1ült nn1ülv n2ny n3nyak n3nyalá nny1a2n n3nyar nnyá2r nny1áz n3nydr nny1ell n3nyelÅ‘ nny1elt nny1elvá nny1elvez nny1e2sett nny1e2sés nny1e2sÅ‘ nny1ég nny1é2ké nny1é2ki nnyié2h nnyié2ne nnyi2g nny1igé n3nyjé nny1old nny1on nny1öz n3nyst 1no no1d2rá 2n1o2dú 2nog. 2nogh 2nogj 2nogn 2nogo 2nogs 2nogt 2nogv no2ir 2nokal nok1ala no2k1a2r no2kau no2k1ál no2k1é2l no2kép no2k1ing nok1ist nok1k2 2n1okke 2n1o2koz no2kö no2kÅ‘ no1k2ro nok1s noksz2 no2kur no2kúr no2kü 2n1o2la nol1f2 2n1o2lim 2n1olló 2n1o2ly no2m1a2c nom1p nona2 no2n1al nonc3c non2ch nonc3sz no2n1e non1k2 no2nö no2nÅ‘ non1s2 no1ny no2ok 2n1o2pe no1p2la no2r1al no2r1a2t no2raz no2r1el no2r1iv no2rí 2n1ormo 2n1orré nor1s2 no2rü 2n1orvo no2sál no2se nossz2 nos3sze nos3szf nos3szi no1s2tab nosza2u no1sz2f 2noszl no1t2r 2n1otth no1ty no2ui 2n1o2vi. no2xi 1nó nóa2k nóá2r nó2ce nó2ch nó2da. nó2d1a2n nó2dák nó2d1e2s nó2d1is nó1fl nó1fr nó1k2l nó2mac nó2m1em nó2mik nó2m1u2t nó2mü nó2nib nó2non nó1p2r n1ó2rac nó2r1ad n1ó2raf 2n1ó2rai nó2r1a2l n1ó2rar n1ó2ras n1ó2rat nórádi2 nó2rás nó2riá nó2rü nós1akk nó2seg nó1sl nó1s2p nó1s2rá nós3s nó1s2ta nó1sz2 nós3zene nós3zené nót1ala nó2til nó1trá nó2vó 1nö nö2ka nö2ká nö2k1el nöke2t nök1eti nö2k1é2j nö2k1ék nö2k1é2l nö2k1ér. nö2k1éri nö2k1érté nö2ko nö2kó nö2ku nö2kú n1ö2le n1ö2lé nö2lÅ‘ n1öml 2n1ö2nö 2n1önz nö2rö 2n1össz 2n1ö2sz nö2te nö2té nö2ti n1ötl nöt1t2 nö2tü 2n1öv. n1övb n1ö2ve. nö2vön 2n1övr 2n1ö2zön 1nÅ‘ nÅ‘a2l nÅ‘a2n nőá2g nÅ‘1br nÅ‘2ca nÅ‘2ch nÅ‘2csár nÅ‘2csÅ‘s nÅ‘2csü nÅ‘e2r nőé2l nÅ‘fé2l1é2 nÅ‘1kl nÅ‘1pl nÅ‘1pr 2n1Å‘r. 2n1Å‘2r1an 2n1Å‘ras 2n1Å‘rb 2n1Å‘rc 2n1Å‘2rei 2n1Å‘2réh 2n1Å‘2rén nÅ‘2rét. nÅ‘2rétÅ‘ 2n1Å‘2rév 2n1Å‘rg 2n1Å‘rh 2n1Å‘2ri 2n1Å‘rk 2n1Å‘rl 2n1Å‘rn 2n1Å‘2rö 2n1Å‘rr n1Å‘rs 2n1Å‘rt 2n1Å‘rü nÅ‘2rül nÅ‘2rün 2n1Å‘rv 2n1Å‘rz nÅ‘2s1a2l nÅ‘2s1e2l nÅ‘2ses nÅ‘2s1í2r nÅ‘2sok nÅ‘1spe nÅ‘s3s nÅ‘1sz2t nÅ‘1t2r nÅ‘t2tin nÅ‘u2t nőü2l npa2dal npe2s npesz1 np2la np2lá np2le np2lé np2lo np2lü npon2t1a2 npo2r1a np2ra np2re np2ré np2ri np2ro np2ró np2sz npu2t1a npu2t1á2 npu2t1e2 npu2t1i nrefle2x1í2 nren2da n2s1a2d n2s1akc ns1alk ns1a2rá ns1ass n2s1au nsa2vár nsa2v1e2 nsa2vil nsa2vol n2s1a2z nság1g ns1áll n2s1á2rak ns1áta ns1átv ns2ch nsc3he. nsc3hei nsc3hé ns1dr ns1e2lé ns1elm ns1eln ns1elo ns1els ns1elv n2s1ene n2s1esz nsé2gel nsé2g1éj nségü2két n2s1ék n2s1é2l n2s1ép ns1é2ve. ns1fr n2s1i2d ns1imp ns1inf n2s1ing n2s1iró ns1isk nsi2z ns1izo n2s1í2r n2s1í2v n1s2kál ns1kl n2s1ob n2s1ol n2s1op ns1osz n2s1ott n2s1ó2r n2s1ös ns2pec ns1p2l ns2por n1s2rá ns1st ns1sy ns3szer ns3szi ns3szo ns3szö n1s2tab n1s2tác nste2i n1s2tim ns2top nsu2r ns1ura n2s1u2t ns1úr n2sz1a2d nsza2k1ü nsz1alk n2sz1a2n ns3zará n2sz1á2ba. ns3zárá nsz1á2ru n2sz1it n2sziz n2sz1omm nsz1p2 n2szut n2sz1ü2z nsz1z nt1abla n2t1abr nta2cél ntad2 nt1a2dó nt1a2ga. n2t1agg nta2gyu nta2gyú ntai2k n2t1ajk n2t1ajt n2t1akc n2t1aktá nt1alát nt1alel n2t1alf nt1anta nt1a2ra. nta2ran n2t1a2rá n2t1arc n2t1ark nta1s2p nt1assz n2t1aty nt1a2ura ntaü2 n2t1azo nt1ábr ntá2cs nt1ácsi nt1ácso ntá2r1a2d n2táram ntá2ráv nt1árny ntá2ruk n2t1ásó n2t1ásvá n2t1áti n2t1átl n2t1átr n2t1áts n2t1átv ntá2zsi nt1bl nt1br nt1dr nt1ebe n2tedén nt1edi nte3gá n2t1ela n2t1elb ntele2mé nt1elf n2t1eljá n2t1elk n2t1ellen n2telmél n2telnö n2t1e2lo nte2lőá n2t1elr n2t1elto n2t1elvá n2t1elz n2t1ember n2t1e2mel n2t1eml n2t1emu n2t1endr n2t1ent nte2rál nte2rele nte2r1in nter2v1e2l n2t1erz n2t1esth n2t1eszk n2t1eva nt1e2vet nt1e2vez nt1ég. nt1é2gé n2t1é2kek nté2kes nté2ké n2t1éks n2t1é2le n2t1é2lés n2t1élm n2t1élt nt1élű n2t1é2neke n2t1épü n2t1érin n2t1érmé n2t1érté n2t1érz ntés3s nté2ter n2t1é2ven n2t1é2vet n2t1é2véb n2t1é2vén n2t1é2vér n2t1é2vét nt1évh nt1évk nt1évt nt1fl nt1fr nt1gr nt2hon ntia2n ntia2t nt1ido n2t1ige nti1k2l ntil2lát. n2t1ille n2t1imp n2t1info n2t1ing. n2t1inga nti1nk. n2t1inté ntió1 nti2par n2t1irá n2t1iro n2t1isk n2t1ism n2t1iste nti2vás nt1izo n2t1íg n2t1íj nt1írá n2t1ívb n2t1í2z nt1kl nt1kr n2t1of ntoka2 n2t1o2k1al n2t1okl n2t1olda n2t1oldó n2t1o2ly nto2m1e2 nt1opc nto2ras nto2rék nto2rin nt1ormá nt1orro n2t1oszl n2t1oszt n2t1otth ntó1p n2t1ó2rá n2t1ó2ri ntót2 ntó1tr nt1ökl nt1ö2kö nt1ö2lÅ‘ nt1önt n2t1örd ntÅ‘a2 ntőé2n nt1Å‘rb nt1Å‘rl nt1Å‘rn n2t1Å‘z nt1pl nt1pr nt2rans ntransz1 ntranszk2 n1t2réf n1t2róf nt1ry nt1sh nt1sk nt1sp nt1st ntsz2 ntszá2m1é nt1szv nt1t2r n2t1udv n2t1ug n2t1uj ntu2mor ntu2n n2t1una nt1und ntun1i nt1u2rá ntu2sza nt1utá n2t1úg n2t1új ntú2ral ntú2ran nt1ú2sz n2t1üg n2t1ü2lÅ‘ nt1ült n2t1üt n2t1ü2v n2ty1a2l n2ty1a2n n2tyál n2ty1e2l n2ty1él n2ty1ik n2ty1int n2ty1iv n2tyí n2ty1Å‘2r n2tyut 1nu n1ucc nu2ga nu2go 2n1ujj nu1k2la nu1klu nu2mü 2n1und 2n1u2ni 2n1u2no 2n1unt nu2ram nu2rá nu2sal nu2sas nu2s1av nu2s1e nu2s1érté nu2sik nu2sol nu1s2po nuss2 nus3szi nu2szab nu2s3zav nu2szir nu2szí nu2sz1ol nu2tal nu2tat nu2taz nu2tál nu2te 1nú n1újd nú2jí 2n1újs núkész1 nú1pr 2n1úr. 2n1úrb 2n1úrh 2n1úrn 2n1úrr 2n1úrt 2n1ú2szá 2nútb 2núth 2nútj 2n1útk 2n1útn 2nútr 2n1úts 2nútt 2n1útv 1nü nü1bl 2n1ü2dí 2n1üdv nü1fr 2n1ügg nü1gr 2n1ügy. 2n1ügyb 2n1ügyc 2n1ü2gye. 2n1ü2gyei 2n1ü2gyek 2n1ü2gyes 2n1ü2gyet 2n1ü2gyé nügy1és 2n1ügyh 2n1ü2gyi 2n1ügyk 2n1ügyl 2n1ügyn 2n1ügyr 2n1üld nü1pr nü2rí nüst2 nü1str 2n1ü2tem nü2tés nü2ti nü2tÅ‘. nü2tÅ‘k nü2tÅ‘s nü2tü nü2vö nü2zé 2n1üzl 1nű nű2zé nű2zÅ‘ nva2su nvágya2d nvá2gy1ada nvá2gy1ón nvá2r1al n2v1át nven2ta nvé2d1a nvé2d1Å‘2r nv1ind nvona2l1út n2v1os nv1sk nx1ar n2y 1nya 2ny1abl 2ny1abr nya2cél 2ny1adag 2nyadás 2nyadó nya2dóz 2nyaff nya2gar 2nyagáh 2nyagár 2nyagáv 2nyagc nya2gen 2nyagi 2nyagj 2nyagm 2nyagos 2nyagta 2nyaguk 2nyagú 2nyaján 2nyajk 2nyajt 3nyak. nya2k1ala nya2kara 2nyakc nya2kel nya2k1é2k nya2kiz 2nyakt. 2nyaktá 2nyaktb 2nyakti 2nyaktj 2nyakto 2nyaktu 2ny1a2lag 2ny1a2lak. 2nyalakj 2nyalakk 2ny1a2lakr 2nyalany nya2lapb nya2laph nya2lapo nya2lapp nya2laps 2ny1alász 2ny1alb ny1alép 2ny1alm ny1alte 2nyaltú 2nyamal 2nyanal 2nyang 2ny1ant 2nyaot ny1a2pad nya2pát 2nyapp nya2rén 2ny1arma 2ny1arz nya1sp 2nyassz 2nyaszt 2nyatká nya1trá 2nyaty 2nyauk 2nyavat 1nyá 2nyábr 2nyádná 2nyádt 2nyáék 2ny1ág 2nyáld 2nyállí nyá2lom 2nyámék 2nyámná 2nyáp 2ny1á2rad 2ny1á2rai 2ny1á2rak 2ny1á2ram 2nyáras 2ny1á2rat nyá2ráb nyá2rán nyá2rát nyá2ráv 2nyárc 2nyáre 2nyárh 2ny1árj 2nyárk 2nyárp 3nyárt. 2nyá2ru 2nyárú 2nyárv 2nyásó nyá2szak nyá2szár 2nyáta 2nyátá 2nyáté 2nyátf 2nyáth 2nyátk 2nyátm 2nyátn 2nyátö 2nyátr 2nyáts 2nyátt 2nyátü 2nyátv ny1bl ny1br ny1cv 1nydre. 1nye 2nyedén 2nyedzé 2nyeff 2nyegyl 2nyegys 2nyeke. 2nyekei 2nyekéé 2ny1elb 2nyelc 2nyelef 2nyelemz 2ny1elf ny1elha 2nyeljá ny1elk 2ny1ellá ny1elma 2nyelnö 2nyelőí 2ny1e2lÅ‘Å‘ 2nyelÅ‘z 2ny1elr 2nyelsÅ‘ 2ny1eltá 2nyeltér 2nyelto 2ny1elül nyel2vesz 2nyelvev 2nyember 2nyembl 2nyembr 2nyemel 2ny1emit 2ny1eml 2nyenc 2nyene 2ny1eng nye1p 2nyerdÅ‘ 2nyerej nye2rekl 2ny1erk 2nyerny 2nyerÅ‘m 2ny1esemé ny1eser 2nyeseté 2nyesél 2nyestj 2nyeszk 2nyetik 2nyeve nye2vez 2nyevé 2nyevo 2nyevÅ‘ 2ny1ex 2nyezr 2nyezüs 1nyé 2ny1ébr 2nyéhen 2nyéhes 2nyéhs ny1é2jek 2nyélet 2nyélm 2nyéneke 2ny1é2neké 2ny1é2nekn 2ny1ép 2nyérc. 2nyérem. nyé2r1e2s 2nyérin 2nyérÅ‘. 2nyérÅ‘i 2nyérÅ‘t 2nyérték nyérü2 2nyérv. 2nyérzé 2nyétel 2nyétke 2nyétl 2nyév. 2nyévb 2nyéve. 2nyévei 2nyévek 2nyévem 2nyéven 2nyéves 2nyévet 2nyévez 2nyévéb 2nyévér 2nyévét 2nyévév 2nyévf 2nyévi 2nyévk 2nyévm 2nyévn 2nyévr 2nyévs 2nyévt 2nyévü 2nyévű 2nyévv ny1fl ny1f2r ny1gl ny1gr 1nyi 2nyibo 2nyideg 2nyidej 2nyidÅ‘ nyié2b 2nyifjú. 2nyifjúb 2nyifjúé 2nyifjúi 2nyifjúké 2nyifjún 2nyifjúr 2nyifjús 2nyifjút 2nyifjúv 2nyiga 2nyiha 2nyihl 2ny1ill 2ny1ima 2nyimá 2ny1imb 2nyimp 2nyinas 2nyinc 2nyind 2nyinf ny1ing. 2nyinj 2nyins 2ny1int 2nyinv 2ny1i2p 2nyirá 2nyiri 2nyirod 2nyirt 2nyisko 2nyism 2nyisp 2nyist 2nyivad 2nyivás 2nyivó 2ny1izn 2ny1izt 1nyí 2ny1íg 2ny1íj 3nyíl 2ny1ín 2ny1ív 2ny1íz 1nyjéb 3nyk. nykar1óra 1nyket. 1nykk ny1kl 1nykn ny1k2r ny1k2v 1nym. 1nymet. 1nymt 1nyn. 1nyo 2ny1ob 2nyodú 2ny1of 2ny1okke 2ny1okl 2nyokos 2nyokoz 2nyokta 2ny1o2laj nyolc1c 2ny1olda 2nyoldá 2nyoldó ny1olló 2ny1oltár 2nyoltás 2nyolvas 3nyom. 3nyomat. 3nyomatk 3nyomatom 3nyomo 3nyomt 2ny1op ny1orc ny1orm ny1ors ny1orv 2ny1osko 2nyosto 2nyoszl 2nyoszt 2ny1ott 2ny1ov 2ny1ox 1nyó ny1ó2ni nyó2rác nyó2rán 2nyóri nyó2s1ü 1nyö 2ny1öb 2ny1öc 2ny1ö2l ny1önt 2ny1öv 1nyÅ‘ 2nyÅ‘rs ny1pl ny1pr ny1ps 3nys. ny1sc 3nysek ny1sh ny1sk ny1sl ny1sp nys2t 1nyst. ny1sta ny1stá 1nyu 2nyud 2nyuj 2nyuká 2ny1ukrá 3nyul 2nyuni 2nyuno ny1urá 2nyut ny1u2ta ny1u2tá 1nyú 2nyújd 2nyújé 2nyújí 2nyújs 3nyúl nyú2lÅ‘ 2ny1úr. 2nyúrb 2ny1úrh 2nyúri 2nyúrk 2ny1úrn 2ny1ú2ro 2nyúrr 2ny1ú2sz 2nyútb 2ny1úté 2nyúth 2ny1úti 2nyútj 2nyútk ny1útl 2nyútm 2nyútn 2nyútp 2nyútr 2nyútt 2nyútv 1nyü 2ny1üd 2ny1ü2g 2ny1üld ny1üle 2nyünn 2ny1ür 2ny1üt 2nyüze 1nyű 2ny1űr. 2nyűrb 2ny1űré 2ny1űrh 2ny1ű2ri 2ny1űrj 2ny1űrl 2ny1űrn 2ny1ű2rö 2nyűrr 2ny1űrt 2ny1ű2zé 2ny1ű2zÅ‘. 2nyűzÅ‘b 2nyűzÅ‘en 2nyűzőé 2nyűzÅ‘h 2nyűzÅ‘k 2nyűzÅ‘n 2nyűzÅ‘r 2nyűzÅ‘t 2nyűzÅ‘v ny2vék ny2v1isk ny2vó ny2vös ny2vÅ‘ ny2vú nyzé2ke nza2c n2z1acé nz1adá nz1ado nz1adó nz1a2ga nz1agg nz1ajta nz1akc nz1akk nzak2o n2z1akt nz1ald n2z1alk nz1ang n2z1a2ny nz1app nz1ara nz1ará nz1arc nz1ari nz1aut nz1á2gy nz1áll n2z1á2rad nz1árny nzá2r1ó2ra n2z1á2ru nzá2s1e2 nz1ásó nzás3s nz1át1a2 nz1d2r n2z1e2g n2z1elb n2ze2leme n2z1e2lér n2z1elf n2z1elha n2z1elis n2z1elk n2z1ellá nz1ellen n2z1elm n2z1elny n2z1elo n2z1e2lőá n2z1e2lÅ‘l n2z1e2lÅ‘t n2zelÅ‘z n2z1els n2z1elta n2z1eltü n2z1elver n2z1elvé n2z1elvo nz1ember n2z1e2mel nz1e2més n2z1emi nz1eml n2zener nz1erÅ‘ nzer2t1a2 nzer2v1a2d nzervé2t nzer2v1éte nzer2vi nze2su nz1eszk n2z1ez n2z1ég nzé2k1el n2z1é2l n2z1ér. n2z1érd n2z1é2rem n2z1érk n2z1érm n2z1érté n2z1érv n2z1érz nz1étv nz1gr nzi2a n2z1i2ga n2z1igé n2z1ill nzi2m1a2 nzi2má nzi2mi nzi2n1á2 nzi2n1o nzi2n1ó2 n2z1i2p nz1irá nz1ism n2z1ist nzi2tár nzite2 nzi2t1el nzi2ten nzi2t1í2ve. n2z1íb nz1íg nz1ín nz1kl nz1kr n2z1okl nzo2lin nzo2ló nzo2n1a nzo2né nzo2rin n2z1osz nzókész1 n2z1ön nzö2r nz1örö n2z1ös n2z1ö2v n2z1öz nz1pl nz1pr nz3saj n2z3sár n2z3sát n2zsáv nz3seg n2z3ser nz3sik n2z3sis n2z3sod nz3sor n2z3só nz3s2p nz3s2t nz3sz nztá2r1a2d nz1t2r n2z1uj nzu2l1a nzu2mé nz1u2ra nzu2san nzus3s n2z1u2t nz1új nz1ú2t n2z1üd n2z1ü2g nz1üs nz1üv n2z1ü2z nz3zs 2o. o1a oa2cé oa1fr o2aki o2a1k2v o2ami oa2nali o2asi o2aszó o1á oá2ga oá2r1a2n oá2ril oá2rí oá2r1ol oá2zs oba1b2 oba1d2 o2b1alj obal2t1a2 oba1p ob1a2ra obau2 obás3s ob1átm ob2b1eg ob2bö o2b1eg ob1ele o2b1e2m o2b1erÅ‘ o2b1ez o2b1é2g o2b1érz obi2ki obi2k1ó2 obi2lin obi2lip obina2 ob1ina. obi2n1al o2b1ing o2b1i2s ob1ív 1objek ob1kl 1o2boa o2b1oll obo2r1a obo2rin obo2r1os obo2t1á2 obo2tin obókész1 o2b1ó2né ob1ó2rá ob1öt ob1pr 1obst o2b1ut o2b1ú2s ob1üg ob1ür ob1üt ob1űr oca2ké o2c1ág o2c1ál oc1er oc1é2k o2c3hi. oc3hok oc3hot oci3a ocie2 oci1k2r oció2 oci1p oci1sz2 o1c2kef oc2k1é2l ocké2n ock1éne o1c2kér o1c2két o1c2ki. oc2kia o1c2kig o1c2kin o1c2kit o1c2kiv oc2kop o1c2kosn o1c2koso o1c2kosr o1c2koss oc1pr o2c3sap o2cs1ál ocsá2s o2cs1ász o2cs1á2z o2cs1e2 oc3sér ocsié2ra o2cs1ing ocs1izm o2csí2 oc3sín o2csop ocs1s ocs1t o2csuj o2cs1ü2 oc3sza ocs3zá oc3sze oc3zs o2daad oda1b2 o2d1adj oda1dr o2d1akk o2d1alj oda1p2 odas2 o2d1ass od1aszt odat2 oda1tr od1autó odáb2 od1állá o2dáru odáskész1 odás3s odá2s3z 1o2dáz od1ed ode2l odel2l1a ode2min od1e2v o2d1é2g od1é2ne o2d1ép o2d1érd o2d1é2te o2d1é2ve od1é2vé 2odéz od1ide odi2g o2d1iga o2d1ikre odi2l1e odi2lid odi2lik odi2l1is o2d1int o2d1iro od1isp od1í2z od1kl od1obo o2d1okt o2d1op odo2rak odo2rosz od1ö2l od1ö2r od1Å‘2r od1pr o1d2ram o1d2ráma od1st odu2l1a2l o2d1u2r 1odún od1üg od1ün od1üz 1odváb od3ze od3zo o1e oe2ir oe2le oe2mu oe2ne oe1t2he oe2ur oe2uv o1é o2éfa o2éfá o2éfo o2éke o2éki oéna2 oé2n1al oé2n1an oé2n1ar oéná2 oé2n1ár oé2nis o2ész o2évi ofi2lad ofi2lak ofi2l1á ofi2lel ofi2lér ofili2 ofi2l1iga ofi2l1igá ofi2lis ofi2l1osz ofi2tal ofi2t1e2 of2la of2ló ofona2 ofo2n1al ofo2nan ofo2n1á ofo2n1ér. ofon3n ofo2n1ó2 ofor2m1á2 ofÅ‘2r1e ofÅ‘rü2 of2rí og1abr o2g1a2g oga2kar o2g1aká o2g1a2la o2g1a2lá o2g1alj og1all og1alt og1alu o2g1a2ny o2g1ap o2g1a2ran og1arc o2g1a2sz oga2t1a2g oga2t1e2 oga2t1i2n og1a2ty 2ogaz o2g1á2g og1áll og1álm o2g1áp o2g1á2rak o2g1áre o2g1árja o2g1árját o2g1á2rok ogá2ros o2g1á2ru ogáskész1 o2gásó o2g1áta o2g1áte o2g1átj o2g1átk o2g1átl o2g1átn o2g1á2to o2g1átr o2g1áts o2g1átt og1átü o2g1átv og1bl ogdí2j1a2d og1dr o2g1e2d o2g1e2g ogegész1 o2g1e2l o2g1em o2g1e2p oge2r og1ere og1ern og1erÅ‘ oge2s o2g1ese o2g1e2v o2g1ez o2g1é2g o2g1é2l ogén1n o2g1é2p o2g1ér. og1érte og1érté o2g1értÅ‘ o2g1érv o2g1és og1fl og1fr og1g2l o2g1ic o2g1i2d o2g1if ogi2g o2g1iga o2g1igé o2gill og1inf o2g1ing og1ins o2g1int o2g1ip og1i2ro og1ita o2g1íj og1ín og1ír og1ív og1kl og1kr o1g2lic o1g2nai o2g1odv og1org og1orr og1orz o2g1oszl o2g1oszt o2g1o2v og1ö2b og1ö2l og1ö2r og1ös og1Å‘2r og1pl og1pr 2ografi 2ograp 2ográf. 2ográff o1g2ráffa o1g2ráfh o1g2ráfj o1g2ráfr o1g2rál og1sk og1sp og1s2t og1tr og1u2ra og1u2ru o2g1u2s o2g1u2t o2g1új og1ü2g og1ül og1ür og1üt og1üz og1űz ogy1a2c ogy1a2p ogy1is. ogy1os ogyóé2 oha2mal oha2mel oha2mes oha2mis ohas2 oha1sp o2h1ág o2h1ál ohá2nyad ohá2nyan ohá2r1e ohá2szi ohá2sz1odv o2h1á2z oh1e2c oh1ing oh2ni o2h1orr ohó2csi oh1órá oh1ö2v oh1urá o1i oi2ae oi2af oi2an oi2av oi2ába oida1 oi2d1ad oi2dan oi2dál oi2d1e2 oi2dol oi2d3z oilet2 oina2 oi2n1ad oi2ne oi2re oisel2 oi2zo o1í ojá2r1as ojás3s ojás3z ojek2t1á2 ojek2t1í2 ojek2t1o2 oj1in oj2t1ára oj2t1orják ojtóá2 ok1abl ok1a2cé o2k1a2dat o2k1a2dá o2k1a2dó o2k1a2kar ok1akv o2k1alj o2k1alk ok1alm ok1alt ok1ang ok1ani o2k1ant oka1p2l ok1app ok1a2ra ok1arc oka2ris o2k1asp o2k1ass ok1aszf ok1aszt o2k1att o2k1aty oka2u ok1aut o2k1ava o2k1ág ok1ájt ok1árad o2k1á2rak oká2rul o2k1árv oká2sal ok1ásó okás3s ok1ász o2k1áte ok1bl ok1br ok1dr o2ke. ok1e2b o2k1e2c oke2d ok1edé o2k1e2g o2k1e2l o2k1e2m ok1eré ok1erk ok1ernel. ok1e2rÅ‘ o2ke2s ok1esé o2k1e2v ok1e2z o2k1ég ok1é2ke ok1é2ké o2k1él. ok1é2les ok1é2let ok1é2lé ok1éln ok1élt o2k1é2ne oké2p oké3pá ok1épí o2k1épü o2k1ér. o2k1érb o2k1érc o2k1érd o2k1érg o2k1érh o2k1é2ri o2k1érm o2k1érr ok1értá o2k1érte o2k1érté ok1értö o2k1érz oké1s2 okész2 o2k1étk o2k1étt o2k1é2ve ok1fl ok1fr ok1gr o2k1i2de o2k1i2do ok1i2ga okigaz1 ok1igá o2k1igé ok1ind o2k1int o2k1i2rá o2k1i2ro o2k1isk ok1ism o2k1isp ok1iste o2k1i2ta o2k1izm ok1íj ok1ír ok1ív ok1íz ok2kab ok2k1a2d ok2k1aj ok2k1ale okk1alk ok2k1as ok2kaz okk1elh okk1elö okk1elÅ‘ okk1elr 1okkeré ok2k1es ok2képü ok2kid ok1kló ok2kob okk1öss okk1ö2vű ok1kri ok2kud ok2k1ur o1k2lí ok2lor o2k1ob okola2 oko2lár o2k1oltás okon1n oko2n1oks oko2ra oko2r1á oko2ril oko2ris o2k1ormú ok1o2ro o2k1osz ok1o2varia o2k1o2vi o2k1öb o2k1ö2d ok1ö2k o2k1öl o2k1ön okö2r o2k1ös o2k1ö2v ok1ö2z ok1Å‘2r ok1pl ok1pr o1k2ris o1k2róm ok2sel ok1sp oksz1alm ok2szan ok2sz1es ok2sz1is 1oktán o2k1ud o2k1ug o2k1uj ok1u2ra o2k1u2t ok1úr. ok1úrb ok1úrh ok1úrr ok1úsz o2k1útb o2k1úth o2k1ú2ti o2k1útj o2k1útk o2k1útn o2k1útr o2k1útt ok1útu o2kútv ok1üg ok1ü2l ok1ün ok1ür ok1ü2t ok1ü2v ok1üz ok1űz o3la. ol1abl ola1d2 ola1f2 1o2laj. 1o2lajb 1o2lajf 1o2lajg 1o2lajh 1o2laji 1o2lajj 1o2lajk 1o2lajm 1o2lajn 1o2lajp 1o2lajr 1o2lajs o2lajt o2laju 1o2lajú o2lajv ola1k2r o2l1alg ol1alk ol1amn ol1anya ola1p2 ola1s2p ola1s2t ola2t1ala olat1any ola2tál ola2táp ola2ték ola2t1inf ola2t1í2v ola2t1ol ola2t1orn ola2t1öl ola2tüz olau2r ol1ábrá o2l1á2g olá2ha ol1áll o2l1árb o2l1árh o2l1á2ria olá2riá o2l1árk o2l1árn olá2rok olá2ron o2l1árr o2l1árt o2l1á2ru olá2sza o2l1á2ti o2l1átv ol2caj ol2cal olca2n olc1any ol2c1e2k ol2cel ol2ces ol2c1év ol2c3h olc1iko ol2cí ol2có ol2cö ol2c3sor ol2csű olc3sz ol2cü ol2cz ol2dab 1oldal. 1oldalá 1oldalb olda2le 1oldalh 1oldalk 1oldall 1oldaln 1oldalo 1oldalr 1oldalt 1oldalu 1oldalú ol2d1any ol2dap olda2tel ol2d1au ol2dál ol2deg ol2d1elé ol2d1ell ol2d1elv old1emb ol2d1e2r ol2d1e2s ol2dev ol2dez ol2dés ol2d1éve ol2dip ol2d1is ol2dor 1oldós ol2d1ö2 ol2dud ol2d1u2g ol2d1ü ol2dű ol1e2g o2l1e2l o2l1é2d o2l1é2g o2l1él ol1ér. ol1érd ol1é2re ol1éré ol1érh ol1é2ri ol1érj ol1érl ol1érn ol1érÅ‘ ol1érs o2l1érté ol1é2rü ol1érv o2l1érz olfa2k ol2fau ol2f1ár ol2fes ol2fet olf1ing ol2fir ol2fis olfu2 ol2f1ut ol2fúj ol1gl ol1g2r 2olib o2l1i2du 1o2liga oli2gáz o2l1igé ol1ill ol1i2ma. o2l1i2má 1olimp oli2nu o2l1i2pa olipe2 oli2p1et o2l1isk oli2szál ol1ív. oll1a2gy ol2l1aj olla2l ol2l1ala ol2l1alj ollan2d1ó2 oll1any ol2l1atk oll1att ol2l1á2g ol2l1e2c oll1ege oll1egé ol2l1e2gy ol2l1e2h olle2l oll1ele ol2l1emb oll1ese ol2l1é2k ol2l1éri ol2l1inf oll1inge oll1ingé oll1ingi oll1ingj oll1ingn oll1ingr oll1inj ol2lins ol2l1int oll1isk ol2lob ol2lor ol2l1osz ol2l1ö ol2l1Å‘2 oll1s ol2lub ol2lul ol2l1u2s ol2lü ol3lyu oln1ike o2l1o2l o2l1op olo2r1e ol1ott o2l1ox olóe3dénn oló1f2 o2l1ó2né ol1ónn ol1ó2no ol1ónr ol1ónt oló2rái oló2ráj oló2rák oló2rán oló2rár oló2rát ol1ó2ri olót2 oló1tr ol1ö2l ol1ör ol1ös ol1p2l ol1p2r ol1sk ol2t1aj 1oltalm 1oltalo ol2t1ari ol2t1ág ol2t1áta ol2t1eg ol2t1em olte2r ol2t1ere olte2s ol2t1e2v ol2t1ép ol2t1ér. ol2t1érr ol2t1és ol2tid ol2tim ol2tis ol2tiz ol2t1old 1oltóé 1oltóh 1oltóké 1oltólo 1oltóm 1oltón oltö2r ol2t1Å‘r ol2tür o2l1ug o2l1ur o2l1u2tá ol1ús ol1üg ol1ül ol1üv ol1üz 1olvad 1olvas o2lyabr oly1ada oly1aggy olyame2 olya2mes oly1aszt o2lyál o2ly1e2 1o2lyéb 1o2lyéh 1o2lyéi o2lyél 1o2lyén. 1o2lyiér o2lyim 2olyó o2ly1ö o2lyÅ‘ o2lyug o2ly1ü2 o2ly1ű2 o2m1abl om1a2dat o2m1adm o2m1a2dó o2m1adt oma1f omaga2 oma2ga. oma2g1ad oma2gal oma2g1á2 oma2ge omagi2 oma2g1in o2m1a2gya om1agyú o2m1ajá o2m1ajk oma1kré o2m1akt om1a2lag oma2lapo oma2lapr o2m1alm om1alt om1alv o2m1amb om1ang o2m1ann om1apad o2m1app oma1pr o2m1a2ra o2m1arc o2m1arg oma2t1árak o2m1atká o2m1aty o2m1au om1a2zo omá2cs omá2g om1ági om1ágo omá2nyan ományká2 o2m1áp o2m1á2rai om1á2rak om1á2ram om1áras o2m1á2rá o2m1árd o2m1áre omá2ré. omá2rét o2m1árg omá2ria omá2riá o2m1árm o2m1á2ru omá2sí omás3s omá2sz omás3zó o2m1á2t1e om1á2t1é o2m1átk o2m1átm om1átol o2m1átr om2b1a2lo om2bág om2b1eg omb1elh om2b1elt omb1éne om2b1éri omb1ón. ombó2r omb1óra om1bro om2buj omdio2 om1dr o2m1e2b o2m1e2ce o2m1e2dé om1ef ome2g ome3gá om1ege om1egé o2m1egy ome2l o2m1ela om1elá om1elb o2m1ele o2m1elé om1elk om1ellá o2m1elm o2m1eln o2m1elo o2m1elÅ‘ o2m1els om1elte o2m1elu o2m1elv o2m1e2m om1ene om1eng om1eny om1enz ome2o o2mep om1erd o2m1e2red o2m1e2rei o2m1erny om1e2ro ome2rÅ‘ o2m1erÅ‘. o2m1erÅ‘b o2m1erÅ‘h o2m1erÅ‘n o2m1erÅ‘r om1erÅ‘sí omer1Å‘ss o2m1erÅ‘t o2m1erÅ‘v om1e2rű ome2s om1ese om1esé om1ess om1est o2m1e2ti o2m1etn o2m1etű o2m1e2v o2m1ex ome2z o2m1eze o2m1ezred o2m1ébe om1é2de o2m1ég o2m1é2he o2méhs om1éje o2m1é2ke o2m1é2le om1élé om1é2lÅ‘ o2m1ép o2m1ér. o2m1érb o2m1érc o2m1é2re o2m1é2ré omé2ri. o2m1érm o2m1é2rÅ‘ o2m1érr o2m1érte o2m1érté o2m1értÅ‘ o2m1érv o2m1é2s omé2tel o2m1étt o2m1éve om1évé om1f2l om1gl om1gr om1ide omid1i2 om1i2dÅ‘ omi2g o2m1iga o2m1igé om1iks o2m1ill om1imá omime2 omi2mel omi2m1é om1i2na. omi2náb omi2náv o2m1ind om1inf o2m1ing om1inv o2m1i2ono o2m1i2p o2m1irá o2m1i2ri o2m1iro o2m1irt o2m1isk o2m1ism o2m1iste omi2sz1á om1i2tal omi2tan omi2t1ás omi2t1e omi2tis om1izé om1izo om1izz om1íj o2m1ír om1í2v om1í2z omjó2 om2jÅ‘ omká2rok. om1kl om1kr omlá2b1út omo2dor omo2kas 2omoly o2m1ont om1ope omo2ras omo2re omo2riz o2m1orjá om1osto omosz2f o2m1oszt o2m1ox om1ök om1ö2l om1ön om1ö2r om1ös om1ö2t om1öv om1ö2z om1Å‘2r om1Å‘2s om2pel om1pla om2p1ors om2pÅ‘ om2p1u2tá om1sk om1sl om1sp om1st om2te. omtes2s om1t2r o2m1ud o2m1ug o2m1uj omu2n o2m1uno o2m1ur o2m1u2t o2m1u2z o2m1új om1üd om1üg om1ü2l om1ür om1üs om1üt om1üv om1üz om1űr om1űz om1ya o2n1abl ona2cél ona2dat ona1dr on1ads onae2 on1agg on1agi on1agó on1agyh on1ajn o2n1ajt 2onak on1akc o2n1akk on1akn o2n1akt ona2len ona2l1e2s ona2lint on1alj. on1alju ona2lok ona2los 1onani o2n1a2no o2n1ant on1app ona1pr ona1ps ona2rác onarádi2 ona1s2p ona2tál onatü2 ona2tüz on1ábr 2onác o2n1ág o2n1állo o2n1állv o2n1á2p on1árad o2n1á2rak on1á2rat on1árb o2n1árk o2n1árn o2n1á2ro o2n1árt o2n1á2ru o2n1á2rú on1árv o2n1á2sza o2n1á2szokr o2n1átál oná2tás o2n1átc o2n1áte oná2t1ér o2n1átf o2n1áthe o2n1átm o2n1átre on1bl on1br on2cal on2c1ál on2c1e2g once2s on2c1ez on2c1ék on2c1ér. on2c1éré on2che on2cid on2c1ikr onc1ill on2cös onc3ság on2cseg oncs1emb oncs1ég on2cs1ér. onc3sikk on2cs1im on2cú on2cü on2de. ond1iko on2d1o2kos ond1oksá on2d1osz 2ondoz on2d1öl on1e2b o2n1e2d on1e2ge on1egg o2n1e2gy on1e2h one2l o2n1ela o2n1ele on1el1é o2n1elh on1elj o2n1elm o2n1eln o2n1elo on1elÅ‘ o2n1elr on1els on1elt o2n1elv o2n1e2m one2n on1ene on1erj on1erk on1ers one2s o2n1ese ones2s on1est o2n1e2v o2n1ex 2onéb oné2d on1éde o2n1é2g o2n1é2he on1é2ke on1éks o2n1é2l on1é2ne o2n1é2p o2n1érbe o2n1érc o2nérd o2n1éri o2n1érl o2n1érm o2n1érp o2n1érs o2n1érte o2n1érv o2n1év. o2névad o2n1évb o2n1é2ve on1é2vi o2n1évk o2n1évn on1évr on2g1áll ongás1s on2g1e2c on2ged on2g1eg on2g1e2l on2gik on1g2ló on2gü on2gyad on2gyÅ‘ onháro2 onhárom1 o2n1i2d oni2g o2n1iga o2n1ige o2n1igé o2n1ij on1i2ke o2n1ill o2n1inb on1ind o2n1inf o2n1ing o2n1inj o2n1inn o2n1inr o2n1ins o2n1int o2n1i2p on1i2rá o2n1iro o2n1irt o2n1isk o2n1ism on1iste onita2 o2n1i2zé on1izo o2n1izz on1íg o2n1ín o2n1ív on1íz onk1áll onká2rok. onké2t1 on2ne. on3nyár on3nye on3nyo on3nyú on3nyü o2n1ob ono1g2ráf. ono1g2ráfn onoka2 on1o2kal on1o2kos o2n1okta o2n1old on1oll on1opt on1oszl ono1sztr o2n1o2v o2n1ox o2n1óri onó2sak onó2si onó2szen on1öb on1öl on1ön on1ör on1ös on1ö2t on1ö2v on1ö2z on1Å‘2s on1pl on1pr on1ps 2ons. onsa2v1a2m on1s2k on1sl on1s2m on1spe on1spo on1spr on1sr on1sto onsz2 on1szf on1szt ont1agy on2t1aj on2t1alk on2t1ara on2t1atr on2taz on2t1áll ont1árv on2teb on2t1ed on2t1e2g ont1elá ont1eld ont1elh ont1ell ont1elm on2teln on2t1elÅ‘ ont1elt ont1elv ont1emb onte2s on2t1ese ont1éké on2tél on2tigé on2t1i2ko ont1ikr on2t1i2m on2t1inf on2t1int onti2sz ont1iszo on2t1iz on2t1í2v on2t1oml on2t1ors ont1óni 1ontóst on2t1ös on2t1öz on2tül on2tür on2t1üz on2tye o2n1ud o2n1ug o2n1uj onu2n o2n1una o2n1u2r o2n1u2t o2n1új o2n1ú2s onú2t on1útb on1úté on1úth on1úti on1útj on1úto on1útr on1útt on1ü2c o2n1ü2g on1ü2l o2n1ün o2n1ür o2n1ü2t on1ü2v o2n1ü2z on1űr on1űz o2ny1acé ony1a2dó ony1akc ony1alj o2ny1alk ony1alt ony1anya onya1p2 o2ny1apó o2ny1ál ony1árk o2ny1árn ony1ázó ony1e2c ony1e2g o2ny1e2l o2ny1e2m o2nyen o2ny1e2r ony1e2s o2ny1ég o2ny1é2j o2ny1é2k o2ny1él o2ny1é2ne o2ny1ér. o2nyéré onygóc1 o2nyigé ony1iko ony1ing o2nyita o2nyitók o2ny1í2r ony1old ony1oml o2ny1o2r o2nyosz ony1ó2r o2ny1ö o2ny1Å‘2 o2nyug o2ny1ur o2ny1ü2 onz1abl on2zag onz1ajtó on2zar on2zág on2z1á2l on2z1ed on2z1e2l on2z1e2m on2z1es on2z1ék on2z1ére on2z1im onz1ing onz3sel on2zü o1o oo2ib oo2in oo2pe oo2re oo2xi o1ó o1ö o1Å‘ o2p1a2b o2p1a2d o2pal op1a2la o2p1any op1a2po op1a2r opa2u o2p1a2z o2pál o2p1ám o2p1á2rat opáskész1 o2p1áth o2p1átl o2p1átm op1bl op1e2dé op1e2g op1ejt op1elÅ‘ op1em opera1s operasz2 op1erd op1erk op1erÅ‘ op1es op1e2t o2p1é2l o2p1é2n op1gr op1i2ko op1ikr o2p1im op1ind o2p1ing o2p1i2p o2piram o2p1i2rá op1isi op1ist o2p1i2ta opi2z op1izo op1izz op1ív op1kl op1kr o2p1ob 2opol o2p1orj o2p1orr opor2t1a2 opor2t1á2 opor2t1e2 opor2t1érd opor2tö o2p1orzó oposz2f o2p1ov op1ös op2pé op2p1is op1py 2opro op2roc op2rod op1sl op1sp op1sr op1s2t o2p1ud o2p1u2r o2p1u2t op1új o2p1ús op1üd op1üg op1üt op1üz o2r1abl o2r1abr ora2dat o2r1a2dá or1adm o2r1a2dó or1aff or1agg or1a2gó ora2kad ora2kas o2r1akc ora1kl ora2kol o2r1akt or1alg o2r1alj or1alk o2r1alm o2r1alt o2r1alv or1amp o2r1ana o2r1ane o2r1ank o2r1a2no o2r1ant or1a2nya oraó2 or1app o2r1a2rá o2r1arc or1a2ri or1aszk o2r1atk o2r1atl or1att or1aty o2r1a2zo o2r1ábr orá2cs or1ács. or1ácsa or1ácso o2r1á2g orá2le or1álló o2r1állv orá2lö o2r1á2p o2r1ár. o2r1á2rak o2r1árá o2r1árb o2r1árk o2r1á2ro o2r1árp o2r1árr o2r1árt or1á2ru or1árú o2r1árv o2r1ásv orá2sze o2r1áta o2r1á2t1e2 o2r1átf o2r1áth o2r1átj o2r1átk o2r1átm o2r1átr o2r1áts o2r1átt o2r1átv or1bl or1br or2c1a2l or2car 1orcád or2chá or2c3hé or2c3ho orc3ság or2c3seb or2c3sé or2cú or2cz or2d1au ord1eme ord1e2sÅ‘ or2dex or2d1ing 2orea o3reá o2r1e2b or1ecs o2r1e2d o2r1ef or1e2ge or1e2gé o2r1e2gy 2ore2k or1eke or1eké ore2l or1ela or1ele or1elh or1elj or1elm or1eln or1elo or1elÅ‘ or1elr or1els or1elt or1elv o2r1e2m ore2n or1ene o2r1eng or1eny o2r1ep or1eró ore2s or1ese or1esé or1esÅ‘ o2r1ess o2r1esze o2r1e2v or1ez o2r1é2d o2r1é2g o2r1é2j oré2kás oré2k1e2 or1ékné o2r1é2l o2r1é2m o2r1é2ne o2r1ép o2r1éré o2r1é2te o2r1étk o2r1év. o2r1é2ve o2r1é2vé o2r1é2vi o2r1évn o2r1évr orfé2l1é2v orfi2úér 2orgiai or1gl o2r1ibo o2r1i2de o2r1i2dé oridi2 ori2d1io 1o2rie o2r1ifj o2r1i2ga o2r1i2gá o2r1i2gy or1iha o2r1i2ma. o2r1i2má o2r1i2na. o2rinas o2r1i2nán o2r1i2nát or1i2náv o2r1ind o2r1inf o2r1ing. o2r1ingc o2r1ingé o2r1ingi o2r1ingn o2r1ingr o2r1ings or1inh o2r1inj o2r1ins orin2t1a2 orin2tel orin2t1Å‘ ori2og o2r1i2p o2r1i2si o2r1ism or1isp o2r1iste o2r1i2szá ori2tan o2r1i2zé o2r1íg o2r1í2j o2r1í2n o2r1ír o2r1í2v o2r1í2z 1orjaia 1orjaié or3já or1k2l orkö2z1e2p or1k2v or2m1app orma1t2re or2m1att or2m1azo or2m1eb orme2g or2m1egy orm1eli orm1elv orm1erÅ‘k orm1esz ormé2t or2m1éte or2m1osz or2móv or2m1ö or2m1Å‘ or2mü or2ne. ornis1s o2r1o2á orogkész1 oro2k1á2 oro2kor o2r1olda o2r1o2li o2roltól o2r1ontó. o2r1ontób or1ontóé or1ontóig o2r1ontój o2r1ontón o2r1ontór o2r1ontót o2r1ontóv oro2nya oro2nyo or1ope or1opt o2r1o2r o2r1osko o2r1osto 1o2roszi 1o2roszr o2rosztá o2r1o2x or1ó2ia or1ó2ni or1ó2rá or1óri or1ó2sá oróso2r oró2soro or1ö2b or1ö2c or1ö2l or1ön or1ör or1ös or1öt or1öv or1ö2z or1Å‘2r or1Å‘2s or1ph or1pl or1p2n or1p2r orr1abl or2r1a2r or2r1áta 1orrb 1orri. or2rin or2riz 1orrk 1orruk 1orrú. 1orrúc or2s1a2d or2s1ajtók ors1alk ors1ass or2s1ál or2sed or2s1e2s or2s1é2ne or2sérte ors1é2tát or2s1í2r or2sön or2sÅ‘ or1srá or1s2tab ors1ült ors1ü2tÅ‘ 1ország orszi2l1 ors3zóná or1sz2t or2t1agg or2t1alm or2tana orta2n1á2c or2t1aré or2tág ort1árad ort1á2ram ort1á2rán ort1á2rár ort1árbe ort1árná ort1á2ruk or2t1á2rú or2t1e2g ort1ejt or2t1ela ort1e2lem or2t1elé or2t1ell or2t1elm or2t1eln or2t1elo or2t1elÅ‘ or2t1elt ort1elv ort1emb or2tene orte2r1a or2t1ess ort1eti ort1éle ort1élé ort1élt ort1élv or2t1érm or2tid ort1i2ku or2t1int or2t1ist or2t1ita or2tít or2t1okm or2t1o2kol or2t1okta or2t1old or2t1orm or2t1ott or2t1ó2r or2t1ön or2t1ös or2t1ö2v ort1ura ort1úr. or2t1ü2zér o2r1ud o2r1u2g o2r1u2r o2r1u2t o2r1útb o2r1úth o2r1ú2ti o2r1útj o2r1útn o2r1ú2ton o2r1útr or1úts o2r1útt or1üd or1ü2g or1ü2l or1ün or1ür or1ü2s or1üt or1ü2v or1üz or1űr 1orv. or2v1a2n 2orvá orv1isi 1orvos or2vÅ‘ 1orvv or2z1a2var. or2z1e2c or2zes or2zsan orz1z o2s1abl o2s1a2da o2s1a2dó o2s1akc o2s1a2l osa2n o2s1ang o2s1ant o2s1any o2s1a2p os1arc os1assz os1a2ty o2s1au o2s1ábr o2s1á2g os1áld o2s1áll os1á2lom osára2 osá2rak osári2 osá2rik osá2rok o2s1áru osás1s os1á2t1a os1áté os1átj os1átk o2s1átr os1e2d o2s1e2g o2s1e2l os1emb os1e2n o2s1er o2s1es os1e2t os1ex os1ez o2s1é2g o2s1é2l o2s1ép o2s1érd o2s1érte o2s1érté osgé2 o2s1ide o2s1if osi2g os1iga os1ikr os1inf o2s1int o2s1i2p o2s1i2rá os1iro os1irt o2s1ism os1isp os1iste o2s1i2sz o2s1íj o2s1í2r o2s1í2z os1kl os1kró 2osok o2sonh o2sonn o2sono 1o2sont o2sonu 1osonv o2s1o2r os1osz osó1p os1ök os1ö2l os1ön os1ö2v os1Å‘2r os1p2l os1p2r os1ps os1sta ossu2 os2s1ur ossz1áll ossz1á2ro ossz1es ossz1íj osszó2 os3szÅ‘ ost1e2te os2tip os2tir os2tiz 1ostob ost1old os2t1ös 2ostu os2t1ü2 1ostya o2s1ud o2s1uj o2s1u2r o2s1u2t os1úr. o2s1ú2s o2s1üg o2s1ün o2s1ü2v o2s1üz os3zabá o2szaj osz1alá osz1alk osz1alt osza2n osz1any o2szar osz1aré osza2t1e2 o2sz1ál o2sz1ed o2sz1e2g o2sz1e2l o2s3zen osz1ep osz1es o2szég o2széj o2sz1é2l o2sz1é2p o2sz1és o2szige osz1ing osz1ism osz1ist oszí2 osz1ív o1szkl o2sz1o2ro osz1ors o2sz1orz os3zón osz1ó2r o2szö os3zöl osz2t1alm oszt1any osz2t1apo osz2tár. osz2t1árn osz2ted osz2tell osz2t1em oszté2r osz2t1éré osz2t1ív osz2t1osz o2szur osz1úr o2szü osz1üg os3zül ot1abl ot1a2da otag2 ota1gr ota1k ot1ana o2t1ant ot1a2ny ota1p2 ot1arc ot1arz ot1ass o2t1a2z ot1ábr o2t1á2g ot1á2rak o2t1á2rá o2t1árb o2t1á2ré ot1árn ot1bl o2teb o2t1e2g otel1a ote2l1á ote2lel ote2lÅ‘ ote2m1á ote2m1o ot1e2v o2t1é2l o2t1ép o2t1ér. o2t1érté o2t1érz ot1fr ot1gr oth2r o2t1id o2t1im otina2 ot1i2na. oti2nar oti2nár o2t1ind otinká2 2otipa 2otipi 2otipn 2otipt o2t1irá o2t1is o2t1izé ot1íj ot1ín ot1kl ot1k2r o2t1ob oto1gram ot1ola otona2 oto2n1á oto2n1in. oto2nis oton3n oto2rak oto2rál oto2rár oto2re oto2rim oto2rin oto2rol otosz2f otó1f2 2otóp otó2pan otó2pas otó2pin otó2p1osz otó2sétá otós3s otó1sta otó1sz otó2s3zár otót2 otó1tr ot1ö2l ot1ös ot1pl ot1pr ot1ps o1t2rag ot1sp ot1sr ot1st otta1g2 ot2t1é2g 1ottl ot1ura ot1u2sz o2t1új ot1úri ot1ü2l ot1üt ot1ü2z ot1űr otva1k2 otya1 o1u ou2ag ou2il ou2le o1ú o1ü o1ű ova2ga ova2g1i2n ova2r1an ova2rát ova2r1el ova2rék ova2rés ova2ris ovas3s ovau2 ováb2b1o ová2s1á2rá ová2sik ovio2 ow1el 1oxidb 1oxidr o2xigé 1oxilc oy2ce oza2g ozag1a oz1agi o2z1a2l oza2mal oza2m1e2 oza2m1érté oza2min oza2n o2z1any oza2r oz1ara oza2tat oza2tál oza2t1e2 oza2tés oza2t1é2te ozat1ill oza2t1í2 oza2t1ol o2z1au o2z1á2g ozá2ke oz1áll o2z1á2ro o2z1áru ozá2s1e ozás3s ozá2s3z o2z1átl oz1e2g oz1e2m oz1en ozé2k1e2 o2z1é2l o2z1é2p o2z1é2ré oz1i2do o2z1i2par oz1iro ozi2s1aj ozi1sl ozi1st ozi1sz2 ozi2t1a2 oz1í2n o2zor oz1ors o2z1osz ozóegyez1 2ozófia 2ozófiáb 2ozófiáh 2ozófiája 2ozófián. 2ozófiána 2ozófiár 2ozófiát. 2ozófiátó 2ozófiáva ozó2tan oz1p2r o2zs1a2l ozsa2n ozs1any o2zs1Å‘ ozs1s o2zsü oz1ün oz1ü2z oz1űr ozzá1s2 2ó. ó1a óa2cé óa2dag óa2dá óa2do óa2dó óa2ga óa2gi óa2gy óa2já óajtói2 óa2kad óa2kar óa2ká óa2ku óa2la óala2g1 óa2lá óa2l1e óalmana2 óalmanac3 óa2lo óal1os óa2lu óa2ma óa2na óa2nó óa2ny óa2pá óa2po óa2pó óa2pu óa2ra óa2rá óa2ré óa2ri óa2ro óa2to2 óatom1 óa2ty óa2uk óa2ur óa2ut óa2va óa2xi óa2zo ó1á óá2bé óá2cs óá2ga óá2gi óá2go óá2gyú óá2hí óá2la óá2lo óá2po óá2rá óá2ru óá2rú óá2sa óá2só óá2sz óá2ta óát1a2dó. óá2tá óá2t1e2 óá2té óá2t1i2 óá2tí óá2to óá2t1ö óá2tu óá2tú óá2tü 2óbab 2óbaj 2óbak 2óbal 2óband 2óbank óba1p2 2óbará 2óbark 2óbarna óbas2 óba1t2r óbau2 2óbec 2óbef 2óbeg 2óbeh 2óbej 2óbek 2óbele 2óbelé 2óbels 2óbem 2óbeo 2óber 2óbet 2óbev 2óbez ó2bég 2óbil 2óbio 2óbir 2óbit 2óbiz ó1b2le ó1b2lo 2óbój 2óbólé ób2rá ó1b2ro ób2rók ó1b2ru óbuda2ik óbuszt2 óbu2sz1ú ó2c1aj ó2c1akr óc1a2la óc1alk ó2c1ar ó2c1ass óc1att ó2c1ál 1ó2ceá ó2c1e2g óce2l óc1ele óc1elm óc1elÅ‘ óc1emb ó2c1e2v ó2c1ép ó2c1ét óc3há óc3he óc3ho óci2m óc1imi óc1ing ó2c1i2pa ó2c1is óc2lu ó2c1or ó2c1osz óc1ó2r óc1pr 2ócsa óc3ság ó2cs1áru 2ócse 2ócsé 2ócsi ó2csit 2ócsí 2ócso 2ócsö 2ócsÅ‘ ócs1p óc3s2pá óc3sz óc1tr ó2c1ud óc1ün óc1üz óc3za óc3ze óc3zu ód1ada ód1a2dá óda2j ód1ajá ód1akt 2ódal. 2ódala ód1alg ó2d1am ód1ana 2ódarabk 2ódará ód1aut ód1azo ó2d1áf ó2d1ág ó2d1ál ó2d1áp ó2d1árn ód1á2ru ódás3s ód1bl ód1br ó2d1e2d ó2d1e2g ód1e2lem ó2d1elh ód1elj ó2d1ell ó2d1elm ó2d1elo ód1elÅ‘ ód1elr ó2d1elv ód1emb ód1eme ód1ep óde2ra ód1erÅ‘ óde2sés ó2d1e2v ó2d1ez ó2d1é2g ó2d1ép ó2d1érte ó2d1érz ód1fr ód1gl ó2d1i2d ó2d1igé ó2d1int ódi2p ó2d1ipa ó2d1iro ódi2sz ód1izo ó2d1ír ód1kl ód1kr ód1kv 2ódob ódo2ga 2ódok 2ódol 2ódom ó2d1op ó2d1osz ó2d1o2x ódókész1 ód1órá ó2d1ö2l ód1ös ó2d1öt ó2d1öv ód1Å‘r ód1pr 2ó1d2ram 2ódrá ód2rám ó1d2rog ó1d2rót ód2ruk ód1sp ód1st 2ódug ódu2r ó2d1ura ódus3s ódu2s3z ó2d1u2t ó2d1új ód1úr. ód1üg ó2d1ür ód1üv ód1üz ód3zár ó2d3ze ód3zso ó1e óe2bé óe2ch óe2cs óe2dé óe2dz óe2ge óe2gé óegész1 óe2gy óe2ke óe2ké óe2l1a2 óe2l1á2 óe2le óel1en óe2lé óel1ér óe2l2i óe2l1í2 óe2lo óe2lÅ‘ óe2lü óembe2r1ék óe2me óe2mé óe2mu óe2ne óe2pé óe2pi óe2po óe2re óe2ré óe2ró óe2rÅ‘ óe2se óe2sé óe2sÅ‘ óe2sz óe2te óe2ti óe2tű óe2ve óe2vé óe2vÅ‘ óe2zü ó1é óé2be óé2ge óé2gé óé2gÅ‘ 2óé2he 2óéhs óé2ke óé2ké óé2kí óé2le óé2lé óé2lÅ‘ 2óé2ne óé2pí óé2pü 2óér. 2óérd 2óé2re 2óé2ré óé2ri óé2rÅ‘ 2óérte 2óérté 2óérz óé2te 2óétk óé2ve óé2vé óé2vi óé2vü 2ófa. 2ófaa 2ófaá 2ófab 2ófac 2ófad 2ófae 2ófaf 2ófag 2ófah 2ófak 2ófal 2ófao 2ófap 2ófar 2ófas ófa1st 2ófa1t2 2ófaü 2ófav 2ófaz ófe2l1em ófe2len ó2f1ev 2ófék 2ófélÅ‘ 2ófélt 2ófén ó2f1ér. ófi2ab ófi2ad ófi2ag ó1f2la ó1f2lo óf2ló óf2lö óf2lu 2ófoc 2ófog 2ófok 2ófol 2óford 2óforg 2óformác 2óformáj 2óformál 2óforr 2ófos 2ófot ó2f1ov ó1f2rak ó1f2ri ó1f2rí ó1f2ro ó1f2rö ó2f1ud ófu2r óf1ura óf1úr. óf1úrn óga1p óg1dr óge2o ógé2p1i2p óg3gy ógi2al óg1ír óg2la óg2le ógo2ras ó1g2raf óg2rán ógu2sab ógu2s3z ó2gy1el ó2gy1es ó3gyi. ógy1int ógyö2k1érb ógyta2n1á2 ógyte2a ógy1ús óha2de 2óhal 2óhan 2óhas 2óhat óháza2d óhá2z1ada 2óhon 2óhor 2óhos óhús1s ó1i 2ói2de 2óidé ói2dén ói2di 2ói2dom 2ói2dÅ‘ 2ói2ga 2ói2ge 2óigé ói2géb ói2géi ói2géj ói2gék ói2gén ói2gér ói2gét ói2gév ói2konb 2ói2konj 2ói2konn 2ói2kono ói2konr ói2kont óile2 2óill ói2ma. 2ói2má ói2mi 2óimp 2ói2na. ói2nai ói2naka ói2nas ói2nat ói2nán ói2nár ói2nát 2óind 2óinf 2óing 2ói2ni 2óinj 2óinp 2óint 2óinv ói2pa 2ói2rat 2ói2rá 2ói2ro ói2sz 2ói2ta ói2tók ói2vad ói2zé ói2zo ó1í óí2rá óí2ri óí2ro óí2ró óí2té óí2vé óí2vü óí2vű óí2ze óí2zé óí2zi óí2zü óí2zű ója1g2 2ójakén. ójak2r 2ójam ója1p2 2ójav 2ójárá 2ójárm 2ójáró. 2ójáru 2ójárv 2ójáté 2ójáts óje2gy1á2 2ójut 2ókab ók1a2da ók1ajt ó2k1akó ó2k1alj ók1alko 2ókaló 2ókamp 2ókamr 2ókapa 2ókapc 2ókaps 2ókar. 2ókara 2ókarr 2ókart 2ókata 2ókate óka1t2r ók1aut 2ókav 2ókaz ó2k1áll ók1árad ó2k1á2ri ó2k1árn ó2k1áru 2óke2d ó2k1edz ók1egé 2ókem ó2k1emb 2óker óker1este óke2r1ékné óke2rig ó2k1e2rÅ‘ óke2r1üg óke2sz ók1esze óke2t ók1ete 2ókev 2ókez ó2k1é2l 2ókém 2ókény 2ókép. 2óképb 2óképe óké2p1e2l 2óképé 2óképl 2óképn 2óképpel 2óképr 2óképt 2óképz ó2k1ér. 2ókérd 2ókére 2ókéré 2ókérÅ‘ 2ókés ó2k1étt ók1étv ók1gr ók2hiá 2ókia 2ókiá 2ókic 2ókie2 2ókié 2ókif óki2g ók1iga ó2k1igé 2ókií 2ókij ók1ill 2ókim ók1imi 2ókinc 2ókine ó2k1int 2ókiny 2ókio ó2k1isk ó2k1ist 2ókisz 2ókit 2ókiv ók1i2va ók1k2r ók2lim ó1k2lí 2ó1k2ló 2ó1k2lu 2ókock 2ókoco 2ókoe 2ókoh 2ókoll 2ókomé 2ókomf 2ókomp 2ókonc 2ókonf 2ókonj 2ókons 2ókont 2ókony 2ókonz 2ókoo 2ókop 2ókorá óko2r1á2s 2ókorb 2ókore ó2k1org 2ókori 2ókorl 2ókorm 2ókorn 2ókoro 2ókorr 2ókors 2ókoru ó2k1oszl 2ókód ók1ó2l 2ókór ó2k1óv 2óköl ók1ö2lé ók1ö2lÅ‘ ók1örd ók1ötl ók1pl ók1pr ó1k2rá ók2reá 2ó1k2rém ók2rét 2ó1k2ri ó1k2rí ók2ron ók2ros 2ó1k2rón ók1sk ók1st ó2k1ug ó2kum 2ókup 2ókur óku2sz1ál óku2sze 2ókut 2ókúr ó2k1üg 2ókül 2óküs ók1üzl 2ó1k2vó ól1ajt óla2man óla2pa ó2l1a2pál óla2pol óla2p1osz ól1a2rá ól1azo ólá2b1út 1ó2lál ólás3s ólá2sü ól1átl óleí2 ól1e2se ól1e2sé ól1esh ól1esn ól1ess ól1est óle2ta ólete2 óle2t1ev ó2l1érz ólé2tel ólé2tés óli2ako óli2am óli2s ól1iszo ól1ín óloma2 ólo2m1al ólo2m1á ólo2m1e ólo2mis 1ó2lomr ó2l1osz ólóá2 óló1sl óló1sp ólót2 ó2l1öl ó2l1ör ó2l1ös ó2l1ö2z ól1p2r ólu2m1e ólus3s ólu2szá ól1üt ól1üv ólya2ga ólyag1g óly2á óm1abl ó2m1a2cé 2ómagn óm1akn óm1all 2óman 2ómara 2ómarc ómaren2 2ómarh 2ómark ómas2 óma1sp 2ómass 2ómate óm1a2to ó2m1au 2ómax 2ómág óm1áll 2ómárk 2ómárt 2ómáz óm1b2r óm1e2gy ó2men. 2óméd 2ómél óm1éle ó2m1ép 2ómére 2óméré 2ómérg 2ómérk 2ómérn 2ómérÅ‘ 2ómérs 2ómérté ómiac1 ómi2as ó2m1i2b ómi2g ó2m1igé 2ómi2k óm1iko 2ómin ó2m1i2ono ó2m1i2ont ó2m1i2p 2ómis ó2m1isk ó2m1ist ómi2sz ó2m1i2z 2ómod 2ómog 2ómoh ó2m1ola ó2m1old 2ómond 2ómoni 2ómono 2ómont 2ómor 2ómos ó2m1osz 2ómot ó2m1ox 2ómoz óm1öt óm1Å‘r óm1pr óm1üt óm1üz óműt2 2ónad 2ónag óna2kás óna2k1e2 óna2kol ón1alj. ón1aljak ón1aljá ón1aljo ón1alju 2ónap óna2pa. óna2pá ó2n1apába óna2pe ón1arc ó2n1asp ón1aszt óna1t2 2ónav óná2l ó2n1áll ó2n1álo ónás1s ón1br óne2d ón1ez 1ó2néé 2óném ón1épí 1ó2néra ón1érc ó2nérd 1ó2néró óné2v1á 2ónéz óni2g ó2n1igé ó2n1ist ón1odv 1ó2nokul 2ónor ón1ox ón1ön ó2n1ör ón1öt ónőé2h ón1pr ó2n1u2t ón1ür 2ónya 2ónye 2ónyil 2ónyitá 2ónyitó. 2ónyí 2ónyo 2ónyu 2ónyú ó1o óo2dú óo2ka óo2ká óo2k1i2 óo2ku óo2la óo2li óo2pe óo2ra óo2ri óo2ro óo2vi óo2xi ó1ó óó2la óó2lá óó2li óó2lo óó2ra óó2ri óó2sá óó2vá óó2vo óó2vó ó1ö óö2bö óö2ko óö2kö óö2lé óö2lÅ‘ óö2rö óö2ve óö2zö ó1Å‘ óő2re óő2ré óő2ri óő2rö óő2rü óő2sé óő2sö ó2p1a2da ó2p1alk óp1anal ó2p1a2no óp1ant ó2p1any óp1a2rán óp1áll ópcsa2p1 ó2p1ef ó2p1e2g óp1elo ó2p1id óp1ind ó2p1i2o óp1i2rá ó1p2lak ó1p2las ó1p2lu ó2p1ob ó2p1o2la ópo2rad óp1óra ó1p2ri óp2rod óp2rop ó1prób óp1t2r ópus3s ó2p1új óp1üv ó2rabé ór1ace 2órada óra2dat ó2raib ó2raié óra2iér ó2raih ó2raii ó2raij ó2raik óra2iké óra2iko ó2raim ó2rain ó2raip ó2rais ó2rait ó2raiv 1ó2raje 2órajo 2órajz óra1kv ó2rala ór1alk ó2ramo ó2r1ani ór1ant óra1p2l ór1arc 1órare óra1s2p ó2r1a2tom 1ó2raü 1óráén ór1ágg ór1á2gy 1órákh 1ó2ráki ó2r1ál 1ó2rámé 1ó2rámo 1órámr ó2r1á2p órá2se 2óráta ór1átv ór1br ór1ege ó2r1e2gy ó2r1e2le ór1ell ór1elm ó2r1elo ó2r1e2lÅ‘ ór1elv ór1emb ór1eml ó2r1e2r ó2r1e2set ó2r1e2tá ó2r1e2té ó2rezü ór1é2je ó2r1é2l ór1é2ri óré2vek ór1fl órhá2zot óri2aka óri2al ó2riási óriás3s óri2ch ó2r1id ó2r1int ór1i2onn ór1i2ono 2órip ó2r1i2pa ó2r1i2si ó2r1isme 2órit óri2z ór1izo ór1í2j ó2r1ír ór1í2z ór1kl órmé2s ór1o2ká ó3roké ór1o2ki óro2kok óro2koz ór1o2ku ór1okú ó2r1osz ó2r1o2x ó2r1óda óró2dáj órói2ko óró1p ór1ósá 2óróz ó2r1ö2l ó2r1öml ó2r1ös ó2r1öt ór1Å‘r ór1p2r ór1trá óru2mag óru2me ó2r1u2r óru2sab óru2san óru2se órus3s óru2sü óru2s3z ó2r1uta ó2r1ú2s ó2r1út ór1ün ór1ü2v ór1üz 2ósabl ó2s1a2d ó2s1a2g ó2s1alj ós1alt ós1amn ó2s1apr 2ósar ó2s1arcot ó2s1asp ós1ass ó2s1atl ó2s1au ósa2vo ó2s1ábr ó2s1áf óság1g ó2s1áll ós1á2lo ó2s1á2ro ós1áti 2ósáto ós1bl ós1br óscsap1á2 2óseb ós1e2gy 2ósej ós1elj ós1elm ós1e2lÅ‘ ós1els ós1elv óse2m ós1emb ós1emi ó2s1en óse2p ós1epo ós1erÅ‘ ó2s1es ó2s1e2t ó2s1ez ó2s1é2l 2ósém ós1é2ne ó2s1ép ó2s1ér. ó2s1érte 2ósét ósé2táka ó2s1é2te ós1étk ós1étt ós1fr ó2s1i2d ósi2g ós1ige ósi2kerb 2ósikl ós1ind ós1inf ósi2p ós1ipa ós1iro ó2s1isk ós1ist 2ósík ó2s1ính ós1í2rá ós1í2ró ó1s2kat ós2kic óski2s ós1kl ós1kv ós2lag 2óslá ó1s2ni 2ósokas 2ósokko 2ósoks ós1oli 2ósor ó2s1org ó2s1orj ó2s1orm ó2s1ott ó2s1ov ó2s1ó2r ó2s1ö2l ó2s1ös ó2s1öt ós1Å‘r ó1s2pec 2ó1s2pek 2ó1s2pir ós1pl ó1s2pu ós1s2k ós1s2p ós3sze ós3szö 2ó1s2tab ó1s2tad ó1s2taf 2ó1stand 2ó1s2tat 2óstá ós2tább ó1s2tád ó1s2tát ó1s2tég 2óstí ós2topo 2óst2r ós1tré ós1tri ó1stru 2óstú 2ósug ó2s1u2r ó2s1u2t ós1úr. ós1ú2ri ós1úrn 2ósül 2ósür ó2s1üs ó2s1üz 2ósűrí 2ósza ó2s3zac ósza2k1ü ó2sz1a2lap 2ószá ószá2gol 2ószed 2ószeg ó2sz1e2gű 2ószek 2ószel 2ószem 2ószen 2ószer. 2ószere ósze2r1e2pé 2ószerk 2ószerv 2ószerz 2ószez 2ószé ószé2n1é2 2ószi ó2szima 2ószí 2ószk ósz2l 2ószob 2ószoc 2ószof 2ószol 2ószon 2ószó 2ószö ó2sz1ös 2ószÅ‘ 2ószp ós3zs 2ószt ó1sz2tá ószt2rá 2ószú 2ószü ó2sz1üg 2ószűk 2ószűré 2ószűrÅ‘ ó2s3zűrt ósz2v ót1adó 2ótag. 2ótaga óta2gal 2ótagb 2ótag1g 2ótagi 2ótagj 2ótagk 2ótagn 2ótago 2ótags 2ótagu ót1ajk 2ótalap 2ótalá ót1alk ó2t1alm 2ótan ót1anti 2ótap 2ótar ótas2 ó2t1ass 2ótat ót1a2ur 2ótax 2ótág ót1á2ga ót1ágg 2ótáp ótára2d ótá2r1ada 2ótáro 2ótárs 2ótávc 2ótávi2 ótá2v1ir 2ótáví 2ótávk ó2t1e2g ó2t1elh ó2t1e2lÅ‘ ó2t1emb ó2terd ót1égé ó2t1é2ké óté2t1ö2 ót1fr ó2t1i2d óti2g ó2t1igé ót1ill ó2t1im ó2t1ing ót1i2pa óti2sz ó2t1í2r ó2t1í2v ó2t1íz ót1kr ótlé2ke 2ótoj 2ótol ótol2l1a2d ót1oml 2óton 2ótor ó2t1ors 2ótov 2ótó. 2ótón ót1öko ótÅ‘e2 ót2rad ót2raf ót2rak ót2ran ót2rén ót2rik ót2ril ót2rió ót2rom ót1sl ót1sp ótsze2r1ep ótu2sze ót1ülé ót1ü2lÅ‘ ót1üst ó2t1ü2v ótű2z3s ó1u óu2bo óu2ga óugrás1s óuj2j1a2da óu2ni óu2no óu2ra óu2sz óu2ta óu2tá óu2to óu2tó óu2tu ó1ú óú2jí óú2ré óú2sz óú2ti óú2to ó1ü óü2gy óü2le óü2lé óü2lÅ‘ óü2nÅ‘ óü2re óü2rí óü2rü óü2sz óü2te óü2té óü2tÅ‘ óü2ve óü2ze ó1ű óű2zÅ‘ 2óvad 2óvag 2óvaj 2óvak. 2óvaks 2óvaku 2óvaló 2óvar 2óvas ó2vat 2óvág 2óvák 2óvál 2óván óvá2r1al óvárosi2h ó2vása 1ó2vási ó2váso 1ó2vásr 1ó2váss 1ó2vást 2óváz óve2r1a óveri2 óve2rip óv1in 1ó2vod ó2vom 2óvon óza1d2 óz1a2dá óz1a2dó 2ózaj óz1akc óza1kr óz1akt óz1a2la óz1arc óza1s óza2t1a2l óza2tan óza2tál óza2tés óza2told óza2t1ü2 2ózav ó2z1á2g óz1áll ó2z1á2ru ó2z1á2rú óz1bl 1ózdi ó2z1e2g ó2z1el óz1em óz1erÅ‘ ó2z1ex óz1ez ó2z1é2l ó2z1é2te óz1fl óz1fr ózhajói2 óz1imp óz1ing ó2z1i2p ózi2s1e2 ózi2sir ózis3s ózi2s3z ó2z1old ózo2n1a2 ózo2n1á ózo2ni ózós2 ózó1sp óz1Å‘r óz1pr ózsa1k2 ó2z3ser óz3sor óz3sz óz1t2r 2ózuh ó2z1u2r ó2z1u2t óz1ú2s ó2z1út óz1ü2g óz1ül óz1ü2z 2ö. ö1a ö1á öb1a2n öb1ál öb1á2r öb1át öb2b1a2 öb2b1á2 öb2b1eg öbbe2l öb2b1ele öbbe2m öbb1eme öb2b1e2r öb2b1esz öbb1eve öb2bél öb2bid öb2b1is öb2bí öb2b1ol öb2b1os öb2bot öb2bó öb2bö öbbü2 öb2b1ül ö2b1ef ö2b1eg ö2b1e2l ö2b1e2m ö2b1e2n ö2b1er ö2b1él ö2b1ér. ö2b1érté ö2b1érz öb1fr ö2b1i2d ö2b1ing ö2b1int 1öbli 1öblö öb1or öb1ón ö2böll 1öböltÅ‘ öc1aj öccs1a2 öccs1i ö2c1ép ö2c1éve öc3he öc3hö öció2 öc1Å‘r ö2cs1a öcs1éj öcs1ék ö2cs1é2te ö2csiz öcs1izz öcs1ó ö2cs1ö2l öcs1ű2r öc3sz öd1a2l öd1a2n öd1ar öd1á2l öd1ár ö2d1ef öd1ell ö2d1em öd1e2vé ödé2m1o ödé2sa ödés3s ödé2s3z ö2d1é2ves öd1é2vén öd1é2vér öd1gr öd1íz öd1os öd1óc öd1ó2r öd1sp öd1u2s öd1új öd1ú2s ö2d1üv ö2d1űz öd3zá ödzáró2 öd3zu ö1e ö1é öfés3s ög1ab ög1a2c ög1a2d ög1ag ög1a2k ög1a2l ög1a2n ög1ap ög1ar ög1as ög1a2t ög1áb ög1ág ög1á2l ög1á2r ög1át ög1dr ö2g1e2g ö2g1e2kéi ög1elb ö2ge2leme ö2g1elf ö2g1elha ö2g1elm ö2g1elo ö2gemel ö2g1er ö2g1esz ög1e2vé ö2g1é2g ög1ékt ö2g1é2p ö2g1ér. ö2g1érs ög1érté ögés3s ög1fr ö2g1id ö2g1i2m ög1inf ö2g1ist ö2g1iva ö2g1i2z ög1ín ög1ír ög1ív ög1kr ög1o2l ög1op ög1o2r ög1os ög1ó2r ög1öli ög1ölö ö2g1öv ög1pr ögre1p2 ög1sk ög1sp ög1tr ög1ud ög1u2n ög1u2t ö2g1üg ö2g1üs ö2g1üt ö2g1üv ö2g1üz ö2g1űz ög3ya ö1i ö1í öj2tél öj2t1o ök1ab ök1a2g ök1a2k ök1a2l ök1a2n ök1a2p ök1ar ök1as ök1a2t ök1au ök1a2v ök1ág ök1ál ök1á2p ök1á2r ök1át ök1áz ök1dr ö2k1e2d ö2k1e2g ö2k1eke öke2l ök1ela ök1ele ök1elh ök1elm ök1eln ök1elÅ‘ ök1elv ö2k1e2m öke2né öken1s ök1erd ö2k1e2resz ö2k1e2rÅ‘ ö2k1es öke2vés ö2k1ez ö2k1ég ö2k1ékn ök1éks ök1é2les ö2k1ép öké2r1e2l öké2r1em ö2k1é2rez ö2k1é2rés ökés3s ö2k1é2te ö2k1é2ve. ö2k1é2vek ök1é2vet ök1fr ök1gl öki2d ök1ide ök1ido öki2g ök1iga ö2k1igé ö2k1ikt ö2k1i2na ök1ind ö2k1ing ö2k1int ö2k1i2o ö2k1i2p ö2k1iro ö2k1is ö2k1iz ök1íj ö2k1ír ök1ív ök1íz ök1kl ök1k2r 1öklű ök1ok ök1old 1ökoló 1ökon ök1o2p ök1o2ro ök1orr ök1ors ök1o2v ök1ó2r ök1óv 1ökör. ökö2rö ökőár1a2d ökőé2 ök1pr 1ökrös 1ökrü 1ökrű ök1sp ök1sr ökszi2l1 ök1t2r ök1u2n ök1u2r ök1us ök1u2t ök1új ök1úr ök1út ö2k1üg ökü2l ö2k1ülé ö2k1ült ö2k1ü2t ö2k1ü2v ö2k1üz öl1a2d öl1ag öl1a2j öl1a2k öl1al öl1ap öl1a2r öl1au öl1a2v öl1á2g öl1ál öl1á2m öl1á2p öl1á2r öl1á2s öl1át öl1á2z öl1br ölcsa2l ölcs1á2p ölcs1á2r ölcs1ell öl2csev öl2csid öl2csiz öl2cs1ok ölcs1ol öl2csos öl2csüg öl2csül öl2dab öl2d1a2d öl2d1a2k öl2d1a2la öl2d1alj öl2d1alk öl2d1a2n öl2dap öl2d1as öl2d1á2 öl2deg öl2dep öl2dev öl2d1éd öl2dél öl2d1ing öl2dip öl2d1o2r öl2dos öl2d1ó2 öldÅ‘2 öl2dÅ‘r öl2dÅ‘s öl1dró öl2dur öl2d3z 1ö2l1e2b öle2gel öleg1g ö2lel ö2l1e2r ö3l2e3sü öle2t1á2 öle2t1el öle2to öle2t1u ö2l1e2v ölé2d ö2l1éde 1ö2léé ö2l1é2g ö2l1é2kes ö2l1ékí ö2l1él. ö2l1é2le ö2l1é2lé ö2l1élh ö2l1é2li ö2l1élj ö2l1éln ö2l1éls ö2l1élt ö2l1élv ö2l1éme ö2l1é2neke ö2l1é2p ö2l1ér. ö2l1é2red ö2l1é2rek ö2l1é2rezn ö2l1éré ö2l1érh ö2l1é2ri ö2l1érj ö2l1érk ö2l1érl ö2l1érn ö2l1érs ö2l1érte ö2l1érté ö2l1értü ö2l1é2rü ö2l1érv ö2l1érz ölés3s ö2l1é2tet öl1fr ölgyá2 öl2gyer ö2l1i2d öli2g ö2l1iga ö2l1igé ö2l1igy ö2l1ij ö2l1il ö2l1im ö2l1i2n ö2l1i2p ö2l1i2r ö2l1i2s ö2l1i2ta ö2l1itt ö2l1iz öl1í2r öl1í2v öl1o2c öl1o2k öl1ol öl1or öl1o2s öl1ó2v ölö2ki ö2l1ökl ö2l1öl öl1önt ö2l1ör ö2l1ö2v öl1Å‘rl öl1Å‘rö ölpárba2 öl1p2r öl1sk öl1sr öl1st öl2t1ad öl2taj ölt1ala ölt1alj ölta2n öl2tid öl2til öl2tí öl1tro öl2tur öl1u2g öl1uj öl1u2s öl1u2t öl1új öl1ús ö2l1üd ö2l1üg ölü2le öl1ülté ö2l1ültö ö2l1ülve ö2l1üs ö2l1üt ö2l1üv ö2l1üz ö2l1űz ö2ly1a2 ö2ly1á öly1e2g ö2lyel öly2föl öm1a2d öm1a2g öm1al öm1a2n öm1a2p öm1ar öm1au öm1áb öm1ág öm1áh öm1ál öm1áp öm1á2r öm1á2t öm1áz öm2b1a2c öm2b1ak ömba2l öm2b1a2n öm2baz öm2b1á öm2bec ömb1ele öm2b1e2m öm2b1es öm2bék öm2b1i2d öm2bin öm2bí öm2b1os öm2b1ó2 öm2bú öm2bür ö2m1e2b öme2g1a2 ömeg1e2r öme2gesz öme2g1ék öme2gép ömeg3g öm1egy ö2m1e2l ö2m1ember ö2m1emel ö2m1e2r öme2s ö2m1ese ö2m1este ö2m1ég ö2méhs ö2m1é2l öm1é2nekb ö2m1é2neke ö2m1é2neké öm1é2nekh ö2m1é2nekk ö2m1é2nekr ömé2ny1ü ö2m1ép ö2m1ér. ö2m1érte ö2m1érté ömés3s öm1gr ömi2g ö2m1igé ö2m1in ö2m1i2p ö2m1i2ta ö2m1itt ö2m1izm ö2m1i2zo öm1í2z ömkész1 öm1kl öm1kr ömlés3s 1ömlöt öm1o2k öm1o2l öm1or öm1os öm1ó2d öm1ó2r ö2m1önt öm1p2r öm1sp öm1st öm1tr öm1u2g öm1uj öm1u2t ö2m1üg ö2m1ünn ö2m1üv ö2m1üz ö2m1űz ön1ab ön1a2d ön1a2g ön1a2j ön1a2k ön1a2l ön1am ön1a2n ön1a2p ön1ar ön1as ön1at ön1au ön1a2v ön1az ön1áb ön1ág ön1ál ön1ám ön1á2p ön1á2r ön1á2t önát1a2dó. önát1é ön1áz önbé2két. ön1bl ön2c1ál ön2ch ön2cÅ‘ önc3ség önc3sz ön2cz ön2dab ön2dap önde2m ön2d1ér. ön2d1érn ön2d1érr ön2d1értÅ‘ ön2d1or ön2d1Å‘ ön2dz ö2n1eb ö2n1e2d ö2n1ef ö2n1e2g ö2n1e2l ö2n1e2m öne2n ön1ene ö2n1e2r ö2n1es ön1e2v ön1ex ön1é2g ö2n1éj ö2n1é2k ö2n1é2l ö2n1é2p ö2nérd ön1éri ön1érl ön1érte ön1érv önés3s ön1ész ö2n1éte ö2n1étt ö2n1év. ö2n1éves ön1évet ö2n1é2vér ö2n1é2vét ö2n1évv önfe2lem ön1f2r ön1g2l ön1g2r öngy1as ön2gyék öngy1ó2r 2öngyö ön2gyÅ‘ ö2n1i2d ön1if öni2g ö2n1iga ön1ige ö2n1igé ön1ill ö2n1im ö2n1in ö2n1i2p ö2n1i2r ö2n1is ön1ita ö2n1i2z ön1íj ö2n1ír ö2n1íz ön2k1ag ön2k1any önk1áru ön2kát önk1olt ön2k1ú önkü2 önmeg1g önna2k ön2n1á önny1a2d önny1á önny1e2dz ön3nyú ön1o2d ön1o2k ön1op ön1or ön1os ön1ox ön1ó2c ön1ó2r ön1óv ön1öb ö2n1ör ö2n1ö2v ön1Å‘r önÅ‘2re ön1Å‘z ön1pl ön1pr ön1ps önségü2ké ön1s2p ön1s2t2 önsz2 ön2t1ell öntgen1n öntös3s 1öntöz ön1t2ra ön1t2rá ön1t2ré ön1ud ön1un ön1u2r ön1u2s ön1u2t ön1új ön1ú2s ön1út ön1üd ö2n1ü2g ö2n1ür ö2n1ü2t ö2n1üz ö2ny1a2 ö2ny1á öny1d ö2ny1el ö2ny1id ö2ny1in ö2nyí ö2ny1o ö2nyüz öny2vaj öny2v1a2l öny2van öny2v1á2r öny2v1e2g öny2v1er öny2v1esz öny2vev öny2v1ég öny2vél öny2v1ér. öny2v1ill öny2v1í öny2v1o ö1o ö1ó ö1ö ö1Å‘ öp1aj öp1a2l öp1ál öp1e2l öpe2nyá öp1e2r ö2p1ép öp1ö2lÅ‘ ör1ab ör1a2c ör1a2d ör1a2g ör1aj ör1a2k ör1a2l ör1a2n ör1a2r ör1as ör1a2t ör1a2u ör1a2x ör1a2z ör1áb ör1á2c ör1á2g ör1á2l ör1á2r ör1á2s ör1á2t ör1br örcs1ál örcs1ell örcskész1 ör2csos 2ördí ör1d2r 2ördü ö2r1e2c ör1e2dz ö2r1ef öre2ga öre2g1ék ör1e2gy öre2j1á öre2k1e2sz öreké2 ö2r1e2l ö2r1em ör1enc ö2r1e2p ö2r1e2r ör1e2se ö2r1e2tetÅ‘ ö2r1e2v ö2r1ex ö2r1ez ö2r1é2de ö2r1é2g ö2r1éj. ör1éks ö2r1é2l ö2r1éneke ö2r1ép ö2r1éré ö2r1é2ri öré2sel öré2t1e2g ö2r1étv ör1é2ve. ö2r1évk ör2fá ör2f1év ör2f1i2p ör2fis ör2f1os ör2fÅ‘r ör1g2r ö2r1i2d öri2g ö2r1iga ö2r1igá öri2k ör1iko ö2r1ill ö2r1im ö2r1ind ö2r1ing ö2r1inj ö2r1ink ö2r1int ö2r1inv ö2r1i2p ö2r1ira ö2r1i2s ö2r1i2ta ör1itt ö2r1ivá ör1i2zo ö2r1ír ö2r1í2v ö2r1í2z ör1k2l ör2k1öltÅ‘ ör1k2r örle2ta ör2l1in örny1a2l örny1a2n örny1as örnye2l örny1ele ör2ny1er ör2nyéj ör2nyés örny1í2r ör2nyó ör1ob ör1o2k ör1o2l ör1op ör1o2r ör1os ör1ó2r 2örög örö3g2e 1örömü ö2r1önt ö2r1ör ö2rössze ö2r1ösz örpe1t2 ör1s2p ör1s2v örta2r örtá2v1 ör2t1éks örté2l ör2t1élé ör2t1élÅ‘ ört1érne örté2sz ör2tív ör2t1ok ör2top ör1ud ör1uj ör1u2n ör1u2r ör1u2s ör1u2t ör1új ör1úr ör1ú2t ö2r1üd ö2r1ü2g örü2l1ék ö2r1ür ö2r1üs ö2r1üt ö2r1ü2v ö2r1üz 1örv. örva2s 1örvb 1örvek 1örvem 1örvet 1örvéb 1örvéh 1örvév 1örvh 1örvn 1örvr 1örvt 1örvü 1örvű 1örvv örz4s ör2zs1á2 ör2zs1e2l ör2zsid ör2zsin ör2zsir ör2zs1í2 ör2zs1o ör2zsó ör2zsu ör2zsú ös1a2g ös1al ös1ár ö2s1el öses3s ö2s1ez ösié2 ö2s1i2p ös1k2r ös1o2l ös1o2r összá2r 1összeg össz1emb 1összes ös3szí öst1arc ö2s1ü2v ö2sz1a2 ösz1e2r öszi2s ösz1isz ö2sz1o2 ö2sz1Å‘ ösz2t1ell öt1ab öt1aj öt1a2k öt1a2l öt1am öt1as öt1a2t öt1áb öt1ág öt1ál öt1ár öt1á2s öt1eké öt1e2m öt1ep öt1es öte2t1a2 öte2tel öte2u öt1e2v öt1e2z ötélé2 öté2lék öté2l1o öt1érté öt1érz ötés3s öt1é2ves öt1fl öt1fr öt1gr öt1i2r öt1í2v öt1kr 1ötlet ötle2t1á 1ötlé öt1ok öt1ol öt1or öt1os öt1ó2r ö2tödb 1ö2tödd 1ö2tödne 1ötödöt 1ö2tödr 1ötödü ö2tös ötÅ‘a2 ötÅ‘e2 ötőé2 öt1pr öt1sc öt1st öt2t1a2c öt2tar öt2t1as öt2t1ál öttá2r ött1erk ött1érte öt2t1ut öt1uj öt1un öt1u2t öt1ü2l ötve2n1ez ö1u ö1ú ö1ü ö1ű öv1ab öv1ak öv1á2r öv1e2dz öve2g1a2 öveg1és öveg1g öve2go öv1e2r öve2t1a2 öve2teg öve2t1é2l öve2to öve2t1ú övetü2l öve2t1ülé ö2vé. öv1ég öv1é2j övé2nyel övé2nyer övé2nyö övé2sza övé2szá övé2szer övé2szo övé2szö övé2szül övi2dá ö2vih övis3s ö2viv öv1or öv1ó2d öv1ölt övÅ‘rés3s öv1ut öz1ab öz1a2c öz1a2d öz1a2j öz1a2k öz1a2l öz1a2m öz1a2n öz1a2p öz1a2r öz1at öz1au öz1az öz1á2g öz1ál öz1á2m öz1á2p öz1á2r öz1á2t özát1é öz1d2r ö2z1e2b ö2z1e2d öze2gel ö2z1egés özegész1 ö2z1e2gy ö2z1ela öze2leme ö2z1e2m öz1eng öz1ent öz1epr ö2z1er ö2z1es öze2t1é2k özetÅ‘2 öze2t1Å‘r ö2z1e2v ö2z1ég ö2z1é2je özé2k1e2l öz1élel öz1é2let. öz1é2lé öz1élm öz1élt özé2m öz1éme özé2p1a özé2p1el özé2p1em özé2pí öz1épít özé2p1o ö2z1ér. ö2z1érb ö2z1érd ö2z1érh ö2z1é2ri özér2t1e2h ö2z1értele ö2z1érté ö2z1értÅ‘ ö2z1érv ö2z1érz öz1étk öz1fr özi2g ö2z1iga özigaz1 ö2z1igá ö2z1igé ö2z1igy ö2z1i2ko ö2z1ikt ö2z1ill ö2z1i2m ö2z1inf ö2z1ing ö2z1inp ö2z1int ö2z1inú ö2z1inv ö2z1ira öz1irá ö2z1i2ri öz1iro ö2z1iró öz1isko ö2z1ism ö2z1isp ö2z1i2sz ö2z1iz öz1ír öz1íz özmeg1g öz1ob öz1o2k öz1ol öz1op öz1os öz1ov öz1ó2h öz1ón 1özönt ö2z1ö2r öz1össz ö2z1öv özÅ‘a2 özÅ‘e2r öz1Å‘r. öz1Å‘rk öz1Å‘2rö öz1pl öz1p2r öz3sa öz3sá öz3se öz3sé öz3s2k öz3so öz3s2p öz3sú öz3sz öz1t2r öz1ug öz1u2n öz1ur öz1ut öz1ú2r öz1út ö2z1üd ö2z1ü2g öz1ünn ö2z1üt ö2z1üv ö2z1ü2z öz3zs 2Å‘. Å‘1a Å‘a2da Å‘a2dá Å‘adás1s Å‘adá2sz Å‘a2do Å‘a2dó Å‘a2du Å‘a2ga Å‘a2gá Å‘a2gi Å‘a2gy Å‘agyag1 Å‘a2ja Å‘a2já Å‘a2ka Å‘a2ká Å‘a2kó Å‘a2la Å‘ala2g1 Å‘a2l1e Å‘a2lo Å‘a2mÅ‘ Å‘a2na Å‘a2no Å‘a2nó Å‘a2nyá Å‘a2pa Å‘2apar Å‘a2pá Å‘a2po Å‘a2pó Å‘a2pu Å‘a2ra Å‘a2rá Å‘a2ri Å‘a2ro Å‘a2sz Å‘a2to Å‘a2ty Å‘a2ul Å‘a2ur Å‘a2ut Å‘autói2k Å‘a2va Å‘a2xi Å‘a2zo Å‘1á őá2cs őá2gai őá2gak őá2gas őá2gat őá2gá őá2gé őá2gi őá2go őá2gú őá2gy őá2hí őá2la őá2lo őá2mí őá2po őá2ra őá2rá őá2re2 őár1em őá2ri őá2ro őá2ru őá2rú őá2sa őá2sá őá2so őá2só őá2su őá2sz őá2ta őá2t1á2 őá2t1e2 őá2té őá2tí őá2tü őá2vó Å‘ba1p Å‘b2le Å‘b2lo Å‘b2ri Å‘b2ro Å‘b2ró Å‘b2ru Å‘c1ap Å‘c3cso Å‘c1gr Å‘c3há Å‘c3hé Å‘c3hö Å‘c2lu Å‘2cs1ala Å‘csa2p1á2g Å‘2cs1é2j Å‘2cs1é2rü Å‘cs1Å‘st Å‘cs1s Å‘c3sz Å‘d1a2da Å‘d1a2dá Å‘2d1a2lap Å‘2d1au Å‘d1ál Å‘d1á2z Å‘dea2 Å‘2d1e2g Å‘d1eld Å‘d1elj Å‘d1elk Å‘d1e2lÅ‘ Å‘d1els Å‘2d1ép Å‘2d1érte Å‘2d1érz Å‘dé2sa Å‘dé2so Å‘dés3s Å‘dé2s3z Å‘2d1id Å‘di2g Å‘2d1iga Å‘d1ind Å‘d1int Å‘2d1isk Å‘2d1op Å‘d1ost Å‘2d1ö2l Å‘2d1öv Å‘d1Å‘r. Å‘d1Å‘2ré Å‘d1Å‘rn Å‘d1Å‘rr Å‘d1Å‘rt Å‘2d1Å‘st Å‘d1pr Å‘d2ram Å‘d2rap Å‘1d2rá Å‘1d2res Å‘1d2rog Å‘1d2ró Å‘1d2ru Å‘d1st Å‘d1t2r Å‘2d1üg Å‘2d1üz Å‘2d3zá Å‘1e Å‘e2ce Å‘e2cs Å‘e2dé Å‘e2dz Å‘e2ge Å‘e2gé Å‘e2gy Å‘e2ke. Å‘e2kék Å‘e2la Å‘e2l1á2 Å‘e2lek Å‘e2lemé Å‘e2lemg Å‘e2lemh Å‘e2lemm Å‘e2lemn Å‘e2lemr Å‘e2lemü Å‘e2li Å‘e2lo Å‘e2lö Å‘e2lÅ‘d Å‘e2lü Å‘e2ma Å‘e2me Å‘e2mé Å‘e2mu Å‘e2ne Å‘e2pi Å‘e2po Å‘e2re Å‘e2ré Å‘e2rÅ‘d Å‘e2rőé Å‘e2rÅ‘h Å‘e2rÅ‘i. Å‘e2rÅ‘k Å‘e2rÅ‘m Å‘e2rÅ‘rÅ‘ Å‘e2rü Å‘e2sé Å‘e2si Å‘e2sÅ‘ Å‘e2ta Å‘e2te Å‘e2ti Å‘e2un Å‘e2vé Å‘e2vi Å‘e2vo Å‘e2vÅ‘ Å‘e2ze Å‘1é őé2de őé2et őé2ge őé2gé őé2gi őé2gÅ‘ őé2hem őé2hes őé2ji őé2ke őé2ké őé2kí őé2lé őé2lÅ‘ őé2lű őé2nekb őé2neke őéne2kest őé2neké őé2nekk őé2nekr őé2pí őé2pü őé2rem őé2ren őé2rez őé2ré őé2ri őé2tek őé2va őé2ve. őé2vek őé2ves őé2vet őé2véb őé2vén őé2vér őé2vét őé2vi Å‘fa2l1a2d Å‘f2la Å‘f2le Å‘f2lo Å‘f2ló Å‘f2lö Å‘f2lu Å‘fo2kál Å‘fo2kér Å‘fo2kin Å‘f2ra Å‘f2rá Å‘f2ri Å‘1f2ro Å‘f2rö Å‘gé2p1e2l Å‘gépü2l Å‘gép1ülé Å‘gés3s Å‘g2le Å‘g2ló Å‘g2nó Å‘2g1ö2l Å‘g2ra Å‘g2rá Å‘g2ri Å‘g2ró Å‘guba2 Å‘gy1a2la Å‘gy1art Å‘2gyeg Å‘2gyel Å‘gy1e2lÅ‘ Å‘gy1elv Å‘gy1elz Å‘2gyin Å‘2gy1ör Å‘hangá2 Å‘han2g1ára Å‘1i Å‘i2dea Å‘i2deá Å‘i2deg Å‘i2deo Å‘i2dén Å‘i2do Å‘i2dÅ‘ Å‘i2ga Å‘i2ge Å‘i2gé Å‘i2gy Å‘i2ko Å‘i2ma Å‘i2má Å‘i2mi Å‘im1p2l Å‘i2nas Å‘i2on Å‘i2pa Å‘i2ra Å‘i2rá Å‘i2ri Å‘i2ro Å‘2iru Å‘i2si Å‘i2sz Å‘i2tala Å‘i2talá Å‘i2talé Å‘i2tali Å‘i2tall Å‘i2va Å‘i2vá Å‘i2vó Å‘i2zé Å‘i2zo Å‘1í őí2gé őí2ja őí2ra őí2rá őí2ro őí2ró őí2ru őí2vá őí2ve őí2vé őí2vi őí2vükb őí2vüke őí2vün őí2vű őí2ze őí2zü őí2zű Å‘je2gy1á2 Å‘job2b1ol Å‘job2b1ó Å‘jogá2szi Å‘ke1k2 Å‘ke1p2 Å‘kes2 Å‘ke1sp Å‘ke1st Å‘ké2p1el Å‘ké2s1el Å‘kiá2 Å‘kie2 Å‘k2la Å‘k2le Å‘k2li Å‘k2lí Å‘k2ló Å‘k2lu Å‘kóro2 Å‘1k2ra Å‘1k2rá Å‘1k2reá Å‘k2red Å‘1k2ré Å‘1k2ri Å‘1k2rí Å‘1k2ro Å‘1k2ró Å‘k2va Å‘leí2 Å‘lés3s Å‘lőé2r Å‘lÅ‘t2 Å‘lÅ‘1tr Å‘ma2gár Å‘mag1g Å‘ma2g1ó2 Å‘műé2h Å‘műé2n Å‘műt2 Å‘2n1e2ke Å‘n1ems Å‘néve2 Å‘név1es Å‘2ny1a2d Å‘nya2g Å‘2ny1aga Å‘ny1a2la Å‘ny1ára Å‘ny1á2ro Å‘2nyát Å‘2nyef Å‘nye2lem Å‘2ny1elh Å‘2ny1ell Å‘2ny1elo Å‘2ny1em Å‘2ny1élv Å‘2ny1Å‘ Å‘2nyüz Å‘1o Å‘o2áz Å‘o2be Å‘o2dú Å‘o2ká Å‘o2k1i2 Å‘o2kí Å‘o2ko Å‘o2la Å‘ola2j1á2r Å‘ola2je Å‘o2pe Å‘o2rá Å‘o2ri Å‘o2ro Å‘o2so Å‘o2ve Å‘o2xi Å‘1ó őó2ce őó2ha őó2no őó2nu őó2ra őó2rá őó2ri őó2va őó2vó Å‘1ö őö2bö őö2dé őö2ko őö2kö őö2lé őö2lÅ‘ őö2na őö2re őö2rö őö2ve őö2vé őö2vi őö2vö őö2zö Å‘1Å‘ Å‘Å‘2re Å‘Å‘2ré Å‘Å‘2ri Å‘Å‘2rö Å‘Å‘r2s1égb Å‘Å‘r2s1égn Å‘Å‘2se Å‘Å‘2si Å‘párba2jo Å‘p2la Å‘p2le Å‘p2lé Å‘p2ne Å‘ponc1 Å‘po2ral Å‘p2re Å‘p2ré Å‘prés1s Å‘p2ri Å‘p2ro Å‘p2ró Å‘p2sz Å‘r1a2dó Å‘r1a2gá Å‘r1agg Å‘r1ajk Å‘raj2t1ól Å‘r1akc Å‘2r1a2l Å‘ral1e Å‘ra2n Å‘r1any Å‘r1ap Å‘2r1a2r Å‘2r1a2s Å‘2r1at Å‘2r1au Å‘2r1a2z 1Å‘2r1áb Å‘rádi2ók Å‘r1ágy Å‘rá2k1e Å‘2r1á2l Å‘2r1á2p Å‘r1á2ru 1Å‘rbí 1Å‘r1bl 1Å‘r1br 2Å‘rea Å‘reá2li Å‘ree2 Å‘re2get Å‘r1e2gye Å‘rei2g Å‘2r1ekc Å‘2r1ekh Å‘re2lem Å‘r1elh Å‘2r1ell Å‘r1e2lÅ‘a Å‘r1els Å‘r1elt Å‘2r1elv Å‘2r1emb Å‘r1eml Å‘ren2d1Å‘2 Å‘r1enz Å‘reo2 Å‘re1prog Å‘2r1e2sÅ‘ Å‘reu2 Å‘reü2 Å‘2r1ex 1Å‘rezr 1Å‘2r1ég. 1Å‘régn 1Å‘r1égt 1Å‘2r1ékh Å‘r1ékné Å‘r1éks Å‘2r1é2l Å‘r1épü Å‘2r1é2ri Å‘ré2sa Å‘ré2sza 1Å‘2réü Å‘r1é2ve. Å‘r1é2vek Å‘r1fl 1Å‘rfÅ‘ Å‘r1fr Å‘r1g2r Å‘2r1i2d Å‘2r1if Å‘2r1i2ga Å‘2r1i2gá Å‘r1iha Å‘r1ill Å‘2rim Å‘r1imi Å‘r1i2na Å‘r1ind Å‘2r1inf Å‘2r1int Å‘r1i2pa Å‘ri2s1á Å‘r1ivá Å‘r1i2zé. Å‘2rizg Å‘r1izm Å‘r1i2zo Å‘r1ír 1Å‘rjö Å‘r1k2l Å‘r1k2r 1Å‘rlö Å‘2r1okt Å‘r1old 1Å‘2r1o2li Å‘r1oll Å‘2r1or Å‘2r1osz Å‘2r1ó2r Å‘3rög 1Å‘2rökü Å‘2röl 1Å‘römü Å‘r1öng Å‘2r1ör Å‘2r1össze 1Å‘2r1Å‘r Å‘2r1Å‘2s 1Å‘rÅ‘sr Å‘2r1Å‘2z Å‘r1pl Å‘r1p2r 1Å‘r1p2s 1Å‘rs. Å‘r2s1ál 1Å‘rsí 1Å‘rsö Å‘r1s2pe Å‘r1spi Å‘r1srá Å‘rs3s Å‘r1sz2 1Å‘rszen Å‘r2s3zöm 1Å‘r1t2r 1Å‘2r1un 1Å‘2r1u2r Å‘r1utá Å‘2r1új Å‘r1úr Å‘2r1út Å‘2r1üd Å‘r1üld 1Å‘2rüle Å‘2r1üs Å‘2r1üt Å‘2r1üz 2Å‘rzÅ‘sö Å‘2s1ad Å‘2s1a2g Å‘s1ajtób Å‘2s1ajtók Å‘sa2n Å‘s1ana Å‘sa2p Å‘s1arc Å‘s1ass Å‘s1au Å‘2s1áb Å‘2s1á2g Å‘sá2l Å‘s1áll Å‘s1ára Å‘s1árv Å‘s1dr Å‘s1e2d Å‘2s1e2ge Å‘2s1e2gy Å‘s1elm Å‘s1e2lÅ‘ Å‘s1elv Å‘2s1e2m Å‘se2n Å‘s1ene Å‘se2p Å‘s1epi Å‘s1epo Å‘s1e2rej Å‘s1erÅ‘ Å‘s1ess Å‘s1esz Å‘s1etn Å‘2s1e2v Å‘2s1ez Å‘sé2g1e2l Å‘2s1éger Å‘sé2gés Å‘2s1ék Å‘2s1é2l Å‘s1ép Å‘s1fl Å‘s1fr Å‘s1gn Å‘s1gr Å‘2s1i2d Å‘2s1if Å‘si2g Å‘s1iga Å‘s1i2ma Å‘s1i2má Å‘s1imi Å‘s1inf Å‘s1ing Å‘s1int Å‘2s1i2pa Å‘s1i2ra Å‘s1ist Å‘s1i2sz Å‘2s1i2z Å‘sí2ka Å‘s1í2ró Å‘s1í2z Å‘s2kál Å‘s1kl Å‘s1k2r Å‘s1kv Å‘s2lat Å‘s2nit Å‘s1ob Å‘so2k Å‘s1oko Å‘2s1o2l Å‘2s1op Å‘2s1org Å‘2s1os Å‘s1óc Å‘s1óri Å‘2s1ö2l Å‘s1önz Å‘sö2r Å‘s1örd Å‘s1öre Å‘s1örö Å‘s1örv Å‘2s1ö2z Å‘s1Å‘r Å‘s1Å‘2s Å‘s2pec Å‘s2pek Å‘s1p2l Å‘s2pór Å‘sp2r Å‘s2rác Å‘s1sk Å‘s1s2p Å‘s1s2t Å‘ssz2 Å‘s3sza Å‘s3szá Å‘s3szeg Å‘s3szek Å‘s3szell Å‘s3szem Å‘s3szen Å‘s3szer Å‘s3szes Å‘s3szék Å‘s3szén Å‘s3szf Å‘s3szi Å‘s3szí Å‘s3szl Å‘s3szo Å‘s3szó Å‘s3szö Å‘s3szÅ‘ Å‘s3szu Å‘s3szü Å‘s2tad Å‘s2tat Å‘1s2tát Å‘1s2teri Å‘s2tég Å‘s2til Å‘st2r Å‘s1tre Å‘s1un Å‘su2t Å‘s1uta Å‘s1úr. Å‘s1ú2s Å‘sza2k1e Å‘sza2k1ü Å‘sz1e2lÅ‘ 2Å‘szerk Å‘2szerÅ‘ Å‘sz1est Å‘szi2l1i2 Å‘sz1ill Å‘sz1ist Å‘szö2l Å‘sz1ölé Å‘sz1ölÅ‘ Å‘sz1ölt Å‘s3zse Å‘sz3sir Å‘szt2 Å‘2s3zűrű Å‘termo1 Å‘termosz2 Å‘tes2t1Å‘ Å‘tol2l1a2d Å‘1t2ra Å‘1t2ré Å‘1t2ri Å‘t2ro Å‘1t2ró Å‘ttes3s Å‘t2tés Å‘tt1int Å‘t2t1o2 Å‘t2t1u2 Å‘tűfé2 Å‘tűfél1 Å‘1u Å‘u2go Å‘u2ni Å‘u2ra Å‘u2rá Å‘u2ru Å‘u2ta Å‘u2tó Å‘u2tu Å‘1ú őú2jo őú2ré őú2ri őú2ro őú2sz őú2té őú2ti őú2to Å‘1ü őü2dü őü2ge őü2gy őü2le őü2re őü2rí őü2sz őü2te őü2té őü2ti őü2tö őü2tÅ‘ őü2ve őü2vö őü2ze őü2zé Å‘1ű őű2ré őű2ri őű2rö őű2zé őű2zÅ‘ Å‘vasfé2 Å‘vasfél1 Å‘vár1al Å‘ve2r1a 2Å‘vet Å‘vé2res Å‘vé2ret Å‘z1abs Å‘2z1a2d Å‘z1a2g Å‘2z1ak Å‘2z1a2l Å‘2z1a2t Å‘2z1au Å‘2z1ál Å‘z1á2t1e2 Å‘z1bl Å‘zeg1g Å‘ze2g1i Å‘z1e2gy 1Å‘2zekn Å‘z1elo Å‘z1els Å‘z1elv Å‘2zem Å‘z1emb Å‘z1e2mel Å‘2z1e2r Å‘ze2t1a2 Å‘ze2t1eg Å‘ze2t1el Å‘ze2ter Å‘ze2t1o Å‘2z1ér. Å‘zé2rem Å‘2z1é2ri Å‘2z1érl Å‘z1érté 1Å‘2zié Å‘2zi2g Å‘z1igé Å‘2z1in Å‘2z1iz Å‘z1k2r Å‘z1o2k Å‘z1ol Å‘z1os Å‘z1össz Å‘zÅ‘a2 Å‘zÅ‘e2l Å‘zÅ‘e2r Å‘z1p2r Å‘z3saj Å‘z3sap Å‘z3sát Å‘z3sik Å‘z3sis Å‘z3s2t Å‘2z3sü Å‘z3sz Å‘z1t2r Å‘z1út Å‘z1üg Å‘2z1üs Å‘2z1ü2z Å‘z1ű2z 2p. 1pa 2p1abl pa2cem pa2ch pa1cl pa2csú p1a2dag pad1ala pa2d1as pa2d1á2l pade2 pa2d1el pa2d1em pa2d1id pa2d1iga p1a2dott pa2du. pa2dut pa2dül pae2r paé2r pa1fl pa1f2r pa2ga. pai2dé 2p1ajtó pa2kad paka2r1ó pa2k1e2m pa2k1ér. 2pakku pa1klu pa2k1ó2 pa1k2ré 2p1akta. pak2tal pak2t1e2l pak2t1es pak2t1e2v 2p1akti pak2t1or. pak2t1orr pak2tos 2palag pala2g1ú pa2lapí paláza2d palá2z1adá pa2l1é2l 2p1alf 2p1alg 2p1alle 2p1almá pal1ud pam1ass pa2m1ur pamu2ta pana2d pa2n1ag pa2nal pan1ala pa2n1á2r panás1s pan1d2 pang1g pa2n1il pan1kro p1anny pa2nol pans2 pansz2 pan1szt pa2nü 2p1a2nya p1a2nyu pa2p1ad pa2p1a2pa papa2r pap1ara pa2p1aszt pap1áll pa2p1il pa2p1i2na. pa2p1i2p pa2p1o2ku pa2pö pap1p2 pa2p1ur pa2p1u2t pa2pú 2para. para2je par1a2la 2parann 2p1a2rany 2pa2r1a2r pa2r1ágá pa2r1ágn pa2r1ágo pa2r1ágv 2paráh 2p1a2ráj par1áll 2parár 2paráv 2parb 2p1arc. 2p1arca 2p1arcc 2p1arch 2parci 2p1arco 2p1arct 2p1arcu 2p1arcú pa2r1el pa2r1é2l 2parig 2paril pa2r1ill par1isk par2k1a2l par2k1á par2kel par2k1ing par2kov par2kó par2kön par2k1ö2v par2k1Å‘2 par2k1ü 2paros. 2parosi 2paroso pa2rö pa2rÅ‘ 2parr pars2 par2tem par2t1ol par2t1Å‘2 pa2r1ü2 pa2rű past2 pasz1alj pasz1alt pa2szas pat1ada patakö2 pa2t1alk pat1anya pa2t1ara pat1álc pa2t1árk pa2t1e2g pa2t1ell pate2s pa2tél pa2t1ér. pa2t1érd pa2t1ére pa2t1érte pa2tid pa2t1int pa2tir 2p1atka. 2p1atkánk pa2t1old pa2t1olt 2patombe pa2t1osz pa2t1otth pa2t1ut pa2tús patü2 pa2t1üz 2p1a2tya 2p1a2tyá 2paur pau2ra p1a2vat pa1wh 2p1a2xi 1pá 2p1á2bé 2p1ábr 2páca 2pácá pá2c1e pácé2 pá2c3h pá2c3só pá2csö pá2csü 2p1ág. 2p1á2ga 2p1ágg 2p1ágy. 2p1á2gya. 2p1á2gyac 2p1á2gyad pá2gyaib pá2gyaid pá2gyaih pá2gyaik pá2gyaim 2p1á2gyain pá2gyair p1á2gyakb pá2gyaké pá2gyaki p1á2gyako 2p1á2gyakr pá2gyakt 2p1á2gyal 2p1á2gyam 2p1á2gyan pá2gyast 2p1ágyaz 2p1á2gyáb 2p1ágyál 2p1á2gyán pá2gyátó 2p1á2gyáv 2p1ágyb 2p1ágyc 2p1ágye 2p1á2gyé 2p1ágyf 2p1ágyga 2p1ágygo 2p1ágyh 2p1á2gyi 2p1ágyj 2p1ágyk 2p1ágyl 2p1ágym 2p1ágyná 2p1á2gyos 2p1á2gyö 2p1ágyp 2p1ágyr 2p1ágys 2p1ágyt 2p1á2gyu pá2gyú 2p1ágyü 2p1ágyv 2p1ágyz 2p1ájt pála2 pá2lab pá2lac 2p1álar 2p1áldo pá2le p1álla. 2pállap 2p1állí 2p1állom 2p1állv 2pálmo pá2lü p2ály pálya1s 2pámi 2p1á2mí pá2mu pána2 pá2naf pá2n1am pá2n1an pá2nar pá2n1as pá2nár pá2n1e2 pá2nék pá2nil pá2nir pá2nis pá2ní pán1kré 2pánkt pá2n1ó2 pá2nö pá2nÅ‘ pán1sz pánte2 pán2tek pán2t1el pá2nü pá2ny1ad pá2ny1a2l pá2ny1a2n pá2nyar pá2nyat pá2nyau pá2nyaz pá2ny1e2 pá2nyérd pá2nyim pá2nyö 2p1á2po pár1adó 2páras 2páráé pár2d1a2 pár2del pá2r1e2 pá2r1i2p pá2rÅ‘ pár1s2 pár2t1ag 2pártal párt1any pár2tari pár2tál párte2 pár2t1el pár2tem pár2tes pár2tet pár2tél pár2t1ér. pár2t1é2te pár2tiz pár2tott pártö2 pár2tök pár2t1Å‘ pár2tus pártü2 pár2t1üz pá2ruh pá2ruk páru2t pár1utu pá2rún pá2rü 2p1á2sás pá2s1e pá2sir pá2sÅ‘ pás3s pá2s1ü2 2p1ásván pá2t1a2 pá2tá pá2tel pá2t1e2m pá2tis 2p1átka. 2p1átkát 2p1átkáv p1átlag 2p1átló pá2t1or 2p1átrak 2p1átté pá2t1uk pát1úrt pá2tü 2p1átvét pba2l1 pbé2r1e2l pb2lo pb2ra pb2ri pb2ro pci2ófo pcsa2p1á2g pcsÅ‘2s1orr pda2l1ad pdal1an pda2leg pda2lel pda2l1es pda2lén pda2l1í2 pd2ra pd2rá pd2ro pd2ró 1pe pea2p pea2r peca1 pec3c pe2cz pe2ed 2p1eff pe1fl 2p1e2gese pegész1 pe1g2r 2p1egz 2p1e2ke. 2p1e2kei pe1k2ré 2pektr pe2lál pe2lár pe2lekt 2pelemb 2pe2lemek pe2lemet 2p1e2lemé 2p1e2lemg 2p1e2lemh 2p1e2lemi 2p1e2lemk 2p1e2lemm 2p1e2lemn 2pelemr 2p1e2lemz pel1eró pele2t pel1ete 2p1elgo 2p1elha 2p1elhel 2peljá 2p1elleb 2p1elnev 2p1elny pe2los 2p1e2lö 2p1e2lÅ‘a 2p1e2lőá 2p1e2lÅ‘e pe2lÅ‘g 2p1e2lÅ‘h pe2lőí 2p1e2lÅ‘já 2p1e2lÅ‘jeg 2p1e2lÅ‘l 2p1e2lÅ‘z 2p1elren 2p1elsö 2p1eltá 2p1elto 2pelu 2p1elv. 2p1elvá pel2v1el 2p1elven 2p1elvh 2p1elvi 2p1elvo 2p1elvű 3pely 2p1elz 2p1ember 2p1e2mel 2p1e2més 2p1eml 2p1ems 2p1e2mu 2p1e2ner 2penged pen3ny. pen3nyb penny1ér pen3nyh 2p1ennyi. pen3nyj pen3nyk pen3nym pen3nyn pen3nyr pen3nyt pen3nyv pe2nya penyá2 pe2ny1e2l pe2ny1e2r peo2l peo2p peó2r pe2pe. 2p1e2pé pe1p2r pe2r1akt per1all pera1p2 perc1c per2c1el per2c1in 2perdÅ‘ perec1c peregyez1 p1e2rej pe2r1elk pe2r1e2lÅ‘f pe2r1e2lÅ‘ké pere2mért per1evé 2perfa pe2rid pe2r1il pe2r1i2na pe2r1i2ná pe2r1ind pe2r1ing pe2rí per1k2 p2erl per1okt pe2r1os pe2r1o2x pe2r1ó2r pe2rÅ‘d pe2rÅ‘f pe2rÅ‘i pe2rÅ‘m pe2rÅ‘n pe2rÅ‘r 2p1e2rÅ‘s pe2rÅ‘t pe2rÅ‘v per1st persz2 pe2rú pe2r1ü2g perü2l per1ülÅ‘ per1ült p2erz pe2s1ebbel pe2s1ebbÅ‘ pe2sésig 2p1e2sésű pe2sÅ‘i pe1s2p 2p1esszév pes2t1a2 pes2t1er pe2szak pe2sz1ál pesz1ell pe2sz1elv pesze2m pe2szeme pe2szu pe2sz1ü2l pe2tal pe2t1a2n pet2á petet2 2petete 2peteté 2p1e2tetÅ‘ 2p1e2tika 2petim 2p1e2to pet2t1i pe2tűd pe2ug pe2vÅ‘ 2p1ezr 1pé pé2csu pédü2 pé2d1ülé 2pééb 2pééi 2péén 2p1é2ge 2p1é2gé pégés3s 2p1é2hen 2p1é2hes 2p1é2het 2p1éhs 2p1éj. péje2 pé2j1eg 2p1é2ji 2p1éjj pé2k1as pé2k1au pé2k1á2 pé2k1er pé2k1ék pé2k1é2l péké2t pé2k1éte pé2k1i2n pé2kis 2p1él. 2p1élb pé2le. pé2lei pé2lek péle2l pél1ele pél1elmei pé2len pé2let 2p1é2lé 2pélm 2p1éln 2p1é2lÅ‘ 2p1élt 2p1é2lű 2p1élv 2péneka 2p1é2nekb 2pénekd 2p1é2neké 2pénekf 2pénekg 2p1é2neki 2pénekí 2pénekj 2p1é2nekk 2p1é2nekn 2p1é2nekr 2p1é2nekt 2p1é2nekü pé2ny1el pén2z1a pén2z1á2 pén2zs pé2p1i2p 2p1é2pí 2pépü 2p1érc 2p1érd 2p1é2ré 2p1érh pé2rig 2p1é2rin 2p1érm 2p1érn 2p1é2rÅ‘. 2p1érr 2p1érte 2p1érté 2p1értÅ‘ 2p1érv 2p1érz pé2s1aj pé2sal pé2s1e2l pé2sés péskész1 pé2so pés3sza pé2s1ü2t pé2sza pés3zaj pész1ak pés3zav pé2sz1á2 pé2sz1emb 2p1é2széné pé2szin pé2sz1o pé2sz1ö pész3s pé2szu pé2szú pé2sz1ű péta1 pé2tel 2p1étk 2pétl p1étla 2p1étr 2p1étv 2p1év. 2p1évb 2p1é2ve. 2pévei 2p1é2vek pé2ven pé2ves 2p1é2vet 2p1évf 2p1évh p1évk 2p1évn 2p1évr 2p1évs 2p1évt 2p1évv pé2zak pé2z1ár pé2zel pé2z1e2m pézi2 pé2zid pé2zin pé2ziz pé2zol pé2zsa pé2zu p2f1ép pfi2úé pfi2úkér pf2lo pf2ló pf2lu pf2rá p1f2re p1f2ri p1f2rí p1f2ro pf2ru pf1st pg2ra pg2ru pg2rü pha2de p2ha2i 1phanész 1p2hed phelyü2kü 1p2hia2 p2hic 1phila 1p2hoi phó2i 1pi pi2aa pi2aá pi2aba pia2cel pia2cél pia2cik pia2c3se pi2ad pi2ae pi2aé pi2ag pi2ah pi2aj pi2akén. pi2al pi2am pi2ao pi2aö pi2ap pi2aré pi2asza pi2at pi2aú pi2aü pi2ave pi2az 2picc pi2ce. 2picl pida2 pi2dan pi2dea pi2deá pi2dei pi2den pi2deo 2p1i2dom 2p1i2dÅ‘ 2p1i2du pi2eg pi2er pi2ég pi1fr 2p1i2ga 2p1i2ge 2p1i2gé 2pigo 2p1ihl pi2k1ö pil2i pilis3s 2pilles 2p1illet 2pillés 2pillu 2p1i2ly 2p1i2má 2p1i2mi 2p1imm pinak2 pina1p 2pind 2p1inf pin2gas pin2g1á2r pin2gelj pin2gép pin2gos 2p1ingó 2pinj 2p1inp pin2tác pin2t1or pin2tÅ‘r pintu2 pin2tur pi2nü 2p1inv 2p1inz pion1n pi2óa pi2óá pi2óe pi2óf pi2óg pi2óki pi2ól pi2óma pi2ómé pi2óo pi2óp 2pipai 2pipar. 2p1i2pará 2piparb 2piparh 2piparn 2piparr 2p1irh 2p1i2rod pisau2 2pisko pi2s1op 2p1iste 2p1isza piszkos1s pi2t1aj pi2tala pi2tall pi2t1a2n pi2t1á2p pi1tä pi1the pitos1s pi2t1ü2 pi2vás 2p1i2zé 1pí pí2gé pí2já pí2né pí2r1a2 pí2rá pír1ál pír1á2r 2p1írás pí2r1e2 pí2r1in pír1i2s pí2rí pí2rol 2p1í2ró pí2r1ö2 pír1s pírt2 pír1tr pí2rü pí2rű 2pítél 2pítm pítÅ‘a2 pítÅ‘e2l pí2ve pí2ze pí2zé pí2zü pí2zű pke1p2 pke1s2 pkés1s pkia2 pkiá2 pkie2 pk2la pk2li pk2lí pk2lu pk2rá pk2re pk2ré pk2ri pk2ro pk2ró pk2va pk2vó p2lacc pla1k2l pla2pal plap1áll pla2p1osz p2latf 1plazmá pleí2 ple2t1a2n ple2t1e2l pléé2 plé2has pli2s3zá plo2m1e plo2mén plom1ol plót2 pló1tr plÅ‘2sa plÅ‘1stá plÅ‘2szár plu2m1e p2lur pmás1s pmeg1g 1po po2be pocsi2 po2cs1iz po2da. po2dz pogás1s po2in poki2 2p1o2k1ir po2kí 2p1okle po1k2ló 2p1okm poko2la pokol1l 2p1okta 2p1oktá polás1s pol2can polc3c pol2c3s 2p1olda polóá2 2poltár 2p1oltás 2p1olti 2p1oltó 2polvas po2n1a pon2c3so po2nel pon2g1e poni2 po2niz pon3n po2n1osz pon2t1a2d pon2tag pon2t1a2l pont1any pon2tál pon2tár ponte2 pon2t1el pon2tem pon2ten pon2tes pon2ték pon2t1ér. pon2t1éte pon2tisz pon2tí pon2t1Å‘2 2ponz po2ol po2p1a2 po2p1á2 2popc 2po2pe pop1el popi2 po2pik po2p1in po2p1ir po2pis po2p1o2l pop1p2 pop1s popsz2 2pop1t2 po2pú po2pü po2r1a2c po2r1ag po2rakk por1ala por1all po2ram pora2n por1any po2r1a2r po2rat por1ács por1áll por2can por2c3h por2c3sí pore2 po2rec po2res por1f2 por1g2 po2rid 2porie po2rih po2r1il po2r1i2m po2r1in por1k2 po2rö po2rÅ‘ por2tamb por2t1au por2t1á2rá por2tárb por2tárn por2tej por2t1em por2tet por2tél por2t1ér. por2t1érk por2t1i2k por2tiz por2t1ív por2tur por2t1usz por2tut por2túr por2tű po2rü 2p1orvo pos3szo pos3szö 2postol 2p1osty po2sü po2sze po2szí 2p1oszl posz2tü po2tab po2tad po2t1a2l po2tan po2t1a2u po2t1e2l po2t1inf po2t1ip po2t1í2 po2tol po2tö po2tus po2tü po2vi 1pó pó2ce pó1fr pói2g pó2k1ass pó2kau pó2k1e2l pó2kem pó2ket pó2k1id pó2kim póki2s pók1isz pó2kiv pó2k1ös pó2k1ú pó2l1an pó2l1á pó2lom 2p1ó2né. pó1p2r p1ó2rad pór1á2sz póre2 pó2reg pó2rem p1ó2riá pó2rö pó2s1or pós3s pó2s1ü2v póta2 pó2t1ad pó2tal pó2t1an póté2r pó2til pótü2 pó2tül 2póün 2p1ó2vó pó2ze pó2z3sá 1pö pö2ka pö2ká pö2kel pö2ker pö2kid p1ö2ko pö2ku pö2kú pö2lye 2pönt pö2ra p1ördö pö2res 3pörg pö2ro pö2r1Å‘ pö2rú 2p1öss p1ötl pötty1in 2p1ötv pö2ve 1pÅ‘ pÅ‘a2n pőá2g pÅ‘1bl pÅ‘1dro pÅ‘e2r pőé2h pőé2l pÅ‘1kl pÅ‘1pl pÅ‘1pr 2p1Å‘r. pÅ‘2ri. p1Å‘rj p1Å‘rl 2p1Å‘rn p1Å‘rs 2p1Å‘rt p1Å‘2rü 2p1Å‘rz pÅ‘1sz pp1ada ppai2 ppa2j pp1ajá pp1akk ppa2nad ppa2n1ell ppan1k2 pp1any pp1arc ppa1s pp1áll p2p1árj p2p1á2rok pp1átm p2p1á2to pp1átv p2p1e2gy p2p1e2kéh p2p1elo p2p1elr ppe2r1a ppe2ró p2p1ég p2p1éks pp2hó ppin2g1e2 pp1íg pp1kl pp2las pp2lat pp1o2ly p2p1o2rom ppor2t1á2r ppor2t1e2 ppor2t1Å‘2 p2p1ó2l pp1órá pp1ön pp1öv pp1ö2z p1p2ref p1p2rem p1p2rez p1p2rém pp2rin pp2rio p1p2roc p1p2rod p1prof p1prog p1proj p1p2ros p1p2rot pp2rób pp1sp p2p1úg p2p1úr. ppví2 ppvíz1 1p2rax p2remi 1p2repa pressz2b 2pret pret2á 1p2rédi p2rép pré2sa pré2s3zá p2riccs p2rius p2rizm prí2m1e2l prí2mem 1probl 1p2roce p2rof 1profe p2rog p2roj 1proje pro2mo p2ropa 1p2rote 1proté 1p2roto 1p2rovo 1próbá pró2d1e pró2dz 1p2róza pru2s1ág pru2se 1p2rüs p2s1a2d psé2gel ps1i2ha ps1í2ze p1s2ká p1s2mi p2s1ón p2s1ö2z p1s2pe ps2pi p1s2po ps1pr ps2rá ps3szó p1s2tá ps2ti p1s2tí ps2to p1st2r ps2tu p1s2tú psza2k1ü psz1any p2sz1as psz1atl p2sz1ág ps3záp p2szég psz2fé 1p2szic p2sz1ill pszis3s psz1k2 psz1old psz1orn p2sz1ön p2sz1ös psz3s pta2d pt1ada pta2n1á2s pta2ne ptá2raj ptáró2 ptá2rór p2t1eg p2t1e2ke. p2t1e2lÅ‘ p2t1els pte2rál pté2ká pté2k1el p2t1érd pt1i2ma. p2t1i2n p2t1i2o p2t1i2r p2t1í2r pt1kl p2t1öt p1t2rag p1t2ran p1t2rá pt2ré ptu2sze ptu2szi p2t1úrr p2t1üt 1pu pua2d pu2csor pue2l 2p1ugr 2puká pul2tas 2p1u2ni 2puno 2p1u2nó puo2r pu1pr pu2rak pu2ral pu2sad pu2sal pusa2n pus1any pu2sap pu2sál pu2s1átl pu2s1e2 pu2s1érté pu2sik pu2sis pu2sí pu2sol pu2sö pu2sÅ‘ puss2 pussz2 pust2 pu2sü pu2szag pu2szal pu2s3zár. pusz1é2p pu2szö pu2tab pu2t1a2d pu2tak puta2n 2p1u2taz put1ing pu2tol pu2tód 2p1u2tóp pu1trá 1pú 2p1úrf pú2sz 2p1útb pú2té 2p1ú2ti 2p1útj 2p1útk 2p1útn 2p1ú2to 2p1útp 2p1útr 2p1útt 2p1útv 1pü pü2ge pü2gy pü2kü 2püle püle2t1o 2p1ünn pü2re 2p1ü2rí 2p1üst 2p1ü2te pü2té 2p1ütk 2p1ü2tö pü2ve pü2ze 1pű pű2zÅ‘ pvá2r1al pw2hi pwhisky2 py2ba py2bó py2do py2ho py2ja py2já py2ka py2ké py2ki py2ko py2ma py2na py2ná py2ra py2ró py1t2h py2tó py2va py2vá pze2t1a2 pze2t1á2 pze2t1e2r pzÅ‘a2 pzÅ‘e2 pzőé2 2q. 1qa 1qá 1qe 1qé 1qi 1qí 1qo 1qó 1qö 1qÅ‘ 1qu qu2ae qu2at qu2er qu2ez que2zi qu2éb qu2ie qu2ij qu2il qu2in qu2is qu2od 1qú 1qü 1qű 2r. 1ra raa2d raáta2d ra2bad rab1asz rab1á2ra rab1árb rab1árr ra2b1át ra2b1ele ra2b1ell ra2bid rabi2g ra2b1iga ra2b1igá ra2b1im ra2b1i2na. ra2b1i2ná ra2bind ra2b1int 2rabla ra1bri ra2buj ra2c1aj rac3csa ra2cet ra2cha rac3hig ra2c3hok ra2c3hos ra2c3hot 2r1a2cid rac2kar rac2kit rac2k1ö2 rac2kü ra2dag ra2dalé rada2rad 2radaté radás1s ra2de. r2a3dí 2radíc 2radm 3radósi. 2r1a2dóz ra1drá ra1dru 2r1a2du. ra2dus rae2r raé2derb raé2dere raé2derh raé2dern raé2derr raé2dert raé2r rafikus1s ra1fl 2r1a2ga. ra2gak rag1alo ra2g1ác ra2gáé 2r1a2gán rag2de ragdi2 rag2din ra2gel ra2g1i2na. 2r1agy. ra2gyat 2r1agyn ra2gyon 2r1agyr rai2o 2raizá 2raizi 2raizn 2raizó 2raizu ra2j1a2d ra2j1au ra2j1á2ru ra2j1es ra2jin 2rajkáró 2r1ajkú rajob2 raj2t1e2l raj2t1en raj2zat raj2z1e raj2z1ón raj2z3s 2r1a2kara rak2kép rak2kis ra1k2li ra1klo ra1kló ra1klu 2raknáb 2raknái r1a2kóz rak2re ra1k2ré ra1k2ri rak2rom raktus1s 2r1a2kus r2alab 2r1a2la2g1 2ra2laku ra2lapa r1a2lapú ra2lázá ra2lázó 2r1alb 2r1aljai. 2r1alji 2ralk ralla2k ral2l1akó ral2l1at rallá2 ral2lág ral2lál ral2lev ralo2m1e ra2mab ram1ada 2ramai ram1akk rama2l ram1ala ra2m1any ram1áll ra2m1árá ra2m1árb ra2m1áre ra2m1árh ra2m1árn ra2m1árr ram1b ra2m1éhe ram1ide ram1i2ko ram1ill ra2m1im ram1ina ram1inf ram1ing ram1inj ra2m1i2p 2rammb 2rammet 2rammi 2rammj 2rammn 2rammr 2rammt ra2m1osz ra2m1ó2r ra2mö ra2mÅ‘ ra2mü 2r1a2nal 2r1a2nat ranás1s ran2csal rancs1ág ran2cse ran2csik ran2csÅ‘ rancs3z 2rand. ran2dat ran2d1au 2randj ran2dz ra2nek ran2g1a2 ran2g1á ran2ge rang1e2l ran2g1ó2 ran2gye ra2nil ra2nim 2ranka ran2szál ran2sze ransz1omb 2rany. ra2nyag rany1a2la ra2ny1a2n ra2nyap ra2ny1ar r1a2nyáit r1a2nyáka r1a2nyákh r1a2nyákk ra2ny1ál ra2nyás r1anyás. 2ranyb ra2ny1e2s 2ranyé ra2nyél 2ranyh ra2nyid ra2nyin ra2nyir rany1í2r 2ranyk 2ranyn ra2ny1ol 2ra2nyó rany1ó2r ra2nyö 2ranyr 2ranys 2ranyt ra2ny1ü 2ranza ra2óa ra2óá ra2óf ra2ólá ra2óm ra2óp ra2ósz 2rapa. ra2pák 2rapp rap2pin ra1p2re ra1pré ra1p2ri ra1p2ro 2rarann 2r1arany. 2r1a2ranya. 2r1a2ranyá 2raranyb 2raranyn 2raranyr 2r1aranyt ra2rat 2r1arco ra2rén 2r1art 2r1arz ra1s2ká ra1s2l ra1s2pe ra1s2po rassz1e 2rasszoc rast2 ra1s2ta ra1stá ra1str ra2sz1abl ra2sz1aj ra2szas ra2szat ra2szág ra2sz1árr rasz1emb ra2sz1étt ra2szób ra2szói ra2szón ra2szös rasz2t1a2ny rasz2t1el rasz2t1é2te rasz2tö rasz2tü ra2t1a2d rat1ajt rat1a2la rat1alt rat1ará 2r1a2tád rat1áll ra2t1árad ra2t1á2rai ra2t1á2rak ra2t1árb ra2t1árv ratát1a2 ratá2vi ra2t1e2g ra2t1elk ra2t1ell 2ratég rat1ége ra2t1érd rat1érke ra2térte ra2t1érv rat1ing ra2t1ír ra2t1íve 2r1atká 2ratlé 2ratoz. ra1t2ran 2r1attr ra2tür 2raty ra2tya ra2tyá ra2tyu ra2udi 2r1a2uk ra2ulá 2raum rau2n rau2ta rau2tá ra2uto 2r1autó rautóé2 raü2t raü2z 2ravató 2ravú ra2xü ra1ye ra1yé raz1ajt raza2n ra2z1any ra2zel ra2z1olt ra2zono 1rá rá1bre rá2cal rá2c1e rá2c3h rá2c1i2n ráci2óf rá2cö rá2csala rác3ság rá2csét rá2csir rá2cs1is rá2cú rá2cü rá2d1any 2rádáh 2ráddá 2rádju 2ráfha 2ráfhe 2ráfhi 2ráfid 2ráfre rág1ad 2rágam 2r1á2gaz rág1á2zá rá2gia rá2gil rá2gim rá2gis 2rágí rá2g1osz rágus4 rá2gü 2rágya 2rágyáb 2rágyáé 2rágyáh 2rágyái 2rágyáj 2rágyák 2rágyán 2rágyár 2rágyás 2rágyát 2rágyáv rá2gyu ráí2r ráj2k1a2 ráj2kel ráj2k1ó2 rá2jü rá2kát rá2k1ered rá2k1e2s rá1kla 2rákog rá2k1osz rá2k1ón rá2k1ó2r rá1krá rá1krét rá2l1ak rá2l1a2l rá2lana 2r1álar rála2t1e rá2lál rál1átl r1álcá 2r1áldá rá2lid rá2lim rá2lí 2r1állam 2r1állap 2r1állat 2r1állí 2r1állom 2r1álma rá2lü rá2lyal rá2ly1a2n rá2lyap rá2ly1as rá2lyav rá2lyús rá2lyút rá2mö rá2nal rá2n1a2n rá2n1a2t rá2n1ár rá2nás ránc1c rá2nék rán2gál rá2niz rá2ní rán1kl rá2nol rá2not rá2n1ó2 rán2sav ráns3s rán2t1ak rá2nü rá2ny1a2d rá2nyag rá2nyako rány1alt rá2ny1a2n rá2ny1ap rá2nyar rá2nyaz rá2ny1ár. rá2nyérm rá2nyérz rá2ny1í2r rá2nyol rá2nyó rá1p2l 2rápo rá2pol rá1p2r 2r1á2rad rár1a2dá 2r1á2rai 2r1á2ram 2r1á2ras 2r1á2rat rá2raz rá2ráb rá2ráh rá2r1ál rá2rár rá2rát rá2ráv rá2r1em rá2rér 2r1árh rá2riai 2r1á2riá rá2rin 2r1árj 2r1árl 2r1árn rá2r1ol r1ártó 2ráru rá2rul rá2run 2rárú rá2rú. rá2rút rá2rúv rá2s1a2d rá2s1akn rá2sal rása2r rás1ará rá2s1as 2rásatá rás1a2to rá2saz rás1á2ga rá2s1árad rá2s1árak rá2s1á2rá rá2s1á2ré rá2sás rá2ser rá2sis rá2s1ol 2r1á2só. 2rásób 2rásóé 2rásóg 2r1á2sóh 2rásói 2rásój 2rásók 2rásón rás1ó2ra 2rásóró 2r1ásós 2rásót 2r1ásóv rá2sÅ‘ rást2 rás1tr rá2sut rá2szan rá2szas rá2sz1á2ru rá2sziv rás3zón rá2tad rát1a2da rát1ala r1á2tál rá2tég rá2tél 2rátép rá2t1i2o rá2tí 2r1átlé rá1tri rá1tro rátus1s rátú2 rá2túr 2r1á2tül rá2zal 2rázisb 2rázisé 2rázish 2rázisk 2rázisn 2rázisr 2ráziss 2rázist rázi2sz rá2zsal ráz4sá rázs1e2 rázsé2 rá2zsén rázsi2a rá2zsis rá2zsó rázs1ü2 rba2j1e rba2jokt rba2n1á rbátyá2t rbá2ty1áto r2b1ell r1b2la r1b2lo rb2lú rboc1c r2b1olda rbo2n1a2 r1b2ra r1b2rá rb2ri rb2ro r1b2ró rb2ru rbu2t1e rc1adá rc1ajt rca2lak r2c1alk r2c1ana rc1anya rca1p rc1ara rc1ará rc1ass rc1a2to r2c1ág r2c1á2l rc1ár. r2c1ári r2c1árn r2c1á2ro r2c1árr r2c1árt r2c1á2ru rc3csi rc3csí rc3cso rc3csó rc3csö rce2g1a2 rceg1g rc1egy rc1e2leg rc1elk rc1e2lÅ‘d r2c1els r2c1ember r2c1epe rc1e2ve r2c1ex r2c1ez rc1é2két r2c1éle r2c1é2lé r2c1é2li r2c1élm r2c1élü r2c1ép r2c1ér. r2c1érb r2c1é2ri r2c1érr rc1ész rc1fr rc1gr r2c3has rc3hel r2chen r2chig r2chom rc3hu r2c3hú r2c3hű rci2d r2c1ide r2c1i2ko r2c1ikr rc1ill r2cimá r2c1imi rc1i2na. rci2náb rci2náé rc1i2nár r2c1i2nát r2c1ind rc1i2oni r2c1i2pa r2c1isi r2c1isk r2c1ist rc1izm rc1i2zo r2c1íj r2c1ív r1c2ké. r1c2kéh r1c2kéi r1c2két r1c2ki. r1c2kia r1c2kig r1c2kok rc1k2ré rc1kv r1c2lu r2c1op r2c1o2r r2c1osz rc1ón rc1ó2r rc1óv r2c1öb r2c1ön r2c1ör r2c1ös rc1Å‘r rc1pl rc1pr rcs1abl rcs1alak rcsa2p1á2g r2c3sara r2csarl r2c3sáp r2c3sár. rcs1áru r2c3sej r2cseleme r2cs1elr r2cs1érb r2csikr rcs1írás rc3s2ká r2c3sora r2c3soro rcsóna2k1á2 rcs1Å‘s. rcs1p rc3s2pi rcs1s rc3sto rc3sz rc1tr r2c1ud r2c1ur r2c1ut r2c1új rc1üg r2c1ür r2c1üs r2c1üt rc1üv rc1üz rc1űz r2c3zá rc3zó rc3zs rc3zu rc3zú rda2cél rd1a2dat r2d1akc rda2l1í2 rd1alka rd1alko r2d1a2nya rda1p rd1a2rán r2d1arc rd1aszt r2d1azo rd1ács. rd1á2rak rd1árat rd1á2ron r2d1árv rdás1s rd1bl rd3dz r2d1e2g rde2k1a2 rde2ker rde2kék rde2kol rde2k1ö2 r2d1ela rde2sÅ‘t rd1exp rdezÅ‘2p3 r2d1é2g rd1é2lé rd1élr rd1élt rd1é2lü r2d1ép r2d1ér. r2d1érd r2d1é2ri rdé2sa rdé2sza r2d1é2vé rd1fr r2d1i2d r2d1i2ga r2d1igé r2d1ima r2d1i2na. r2d1i2nai r2d1i2nát. rdio2x rd1izo r2d1íg r2d1íz rd1kl r2d1okta r2d1old rd1orr r2d1osz r2d1ou rdóa2 rd1ó2rá rdö2g1öl r2d1öl r2d1ös rd1öv rdÅ‘2s1orr rd1pr r1d2ram rd2rap r1d2raz rd2rog r1d2rót rd1sk rd1sp rd1st rdsza2k1 rd1t2r rd1udv r2d1urá rd1üg r2d1üt r2d3za r2d3zá rd3zo rd3zó r2d3zö rd3zsí r2d3zú 1re rea2d rea2j rea2ka rea2la rea2r 2reálá 2reáló 2r1eb. 2r1ebbÅ‘ reb1egy 2r1e2béd 2r1ebh 2r1ebk 2r1ebm 2r1ebn 2r1ebs 2r1ebv re2caj re2cal re2cá 2recets re2ch rec3sar 2r1e2cset rec3sor re2cz re2dan red1elv 2redend re2d1e2r 2redeti 2redetű 2redény re2d1é2ves re2dir re2dor 2r1edzÅ‘b 2redzÅ‘j 2r1e2dzÅ‘k 2redzÅ‘r 2r1e2dzÅ‘t 2redzÅ‘v re2et re2ew 2reff re2gap re2gas reg1áll re2gár reg1e2le reg1elh reg1ell re2g1elr reg1elv reg1eng reg1e2tetÅ‘ reg1e2vé regész1 re2gid reg1inj re2giz re2góc reg2óv re2gú regü2l re2g1ülé 2r1egy. 2r1egyb re2gye. re2gyed re2gyen 2r1e2gyes re2gyet 2r1e2gyez 2r1e2gyé 2r1egyh 2r1egyk 2r1egyl 2r1egyn 2r1egyr 2r1egys 2r1egyt re2gyün r1egyv rei2rá re2ja 2rejéé 2rejűe 2rejűn 2rejűr 2rejűt re2k1ag re2k1ará re2k1asz re2k1á2l re2k1á2p re2k1ár. re2kás rek1elh rek1elt rek1elü rek1erd re2k1e2rÅ‘ reke2sz1á 2rekééh re2kéj re2kék 2r1ekék. rek1ékh re2kél 2r1e2kénk re2k1ér. re2k1érd re2k1érte 2r1e2kés. re2k1éss re2k1id re2kij re2kil re2k1ing re2k1int re2k1ip re2kír rek1k2 re1klu re2k1ok re2k1old rek1opt re2k1os rek1ott re2kór re2kök re2kötl re2kÅ‘ rek1s reksz2 re2k1u2s re2küd re2k1üg rekü2l re2k1ülé re2k1ülÅ‘ re2küt 2r1elad 2r1e2lág 2r1elbí 2r1elbo 2r1elc 2r1eldo 2r1e2lef 2relektr rele2ma 2re2leme. 2r1e2lemed rele2meg 2relemei 2re2lemek 2relemem. 2r1e2lememe 2relememm 2r1e2lemen rele2m1er 2relemes. 2r1e2lemese 2relemesn 2r1e2lemet re2lemezé rele2m1ér. 2r1e2lemtel 2relemük 2relemünk 2relemzé 2relemzÅ‘ 2r1e2l1eng 2relér 2r1elf 2r1elha 2r1elhá 2r1elhel 2r1elhú 2r1eljá 2r1elké rella2 rel2l1an 2r1ellá 2r1ellÅ‘ 2r1elma 2r1elmééé 2relmééh 2r1elméi. 2relmél 2relméte 2r1elnev 2r1elnö 2r1elny 2r1e2los 2r1e2lÅ‘ad 2relÅ‘dei 2r1e2lÅ‘dök 2relÅ‘nn 2r1elÅ‘nye 2relÅ‘zm 2r1elso 2r1elszá 2r1elta rel2tár 2r1eltáv 2r1eltett. 2reltettk 2r1eltéré 2r1eltév 2relú 2r1e2l1ül 2relű 2r1elv. 2r1elvá 2r1elvb 2relved 2r1elvei 2r1elvek 2r1elvem 2r1elven 2r1elves 2r1elvé. 2r1elvéne 2r1elvér 2r1elvéü 2r1elvév 2r1elvh 2r1elvi 2r1elvk 2r1elvn 2r1elvo 2r1elvr 2r1elvs 2r1elvt 2r1elvü 2r1elvű 2r1elvv 2r1elz re2m1a2d re2m1asz re2maz re2m1ál re2mát 2rembar 2r1ember. 2r1emberb 2r1embere. 2rembered 2r1emberei 2r1emberek 2r1emberes 2r1emberé 2r1emberh 2r1emberi 2r1emberk 2r1embern 2r1emberr 2rembers 2r1embert 2r1emberü rem1ble r1embó 2rembr re2mel rem1e2lem 2r1emelé 2r1emelg r1emelk rem1elÅ‘d rem1els re2m1eng re2m1esz re2m1ér. re2m1érm re2m1é2te re2migr re2m1ór re2mö re2m1Å‘2r rem1p2 2remuk 2r1e2mul rem1ur 2r1e2mus 2r1encik ren2d1e2r ren2d1ég ren2déj ren2dék ren2d1o ren2d1ö ren2d1Å‘2s 2r1e2ner renés3s 2r1enged 2rení ren2s1ég rens3s re2of re2óa re2óc re2óe re2óé re2óf re2óka re2ól re2óo re2óp re2ósá re2ósz re2p1aj rep1ana rep1asz re2pau re2paz rep1áll rep1álm re2pás re2pát re2p1e2g re2p1elf re2p1elk re2p1ell re2pelm re2p1eng re2p1ep 2r1e2per. re2p1erk 2repéd rep1é2j 2repénk 2r1e2pés re2p1i2d rep1ille rep1int re2pok 2reposz. 2reposzt. rep1osztó repö2 re2p1örö rep1pl rep1pr re2pú 2r1erd re2rei re2rej re2res re2rez 2r1erg re2róz 2r1e2rÅ‘ 2r1ers re2sas re1s2c 2r1esemé res1epr rese2t1e2l 2r1esél 2reskü r1essi re2szaj 2r1e2szet resz1ív 2reszkö 2reszm reszta2 resz2t1an resz2tál resz2t1ár resz2t1í2v resz2t1o2r resz2t1os 2reszű. 2r1e2szűe re2t1ab re2t1akt re2tal ret1a2la re2tant re2t1a2ny re2tág re2t1ára re2t1áro ret1átf re2t1elb re2t1ell re2telm ret1emel re2t1erk rete2sz1á 2r1e2tetés. 2r1e2tetésé 2retetésn re2t1é2k re2t1él re2t1érb re2t1érd re2tid 2r1e2tika re2tiká ret1ill r1e2tilt re2t1ing re2t1int re2tis r1etnik re2t1ok re2t1old re2t1os re2tór re2t1örö re2t1ö2v ret2t1est ret2téne ret2t1in re2t1ut re2t1ült re2tűd re2ud re2ute re2vezÅ‘ 2r1e2vÅ‘ 2r1exp 2rezetn 2rezetü 2rezor 2rezÅ‘kh 2r1ezr 2rezüs 1ré ré2bá ré2bis ré2biv ré2bí 2rébres ré2bü 2r1é2des 2réfá 2r1égb ré2gét 2r1égn ré2g1ó2 ré2gÅ‘ 2régt 2r1éh. 2r1é2hen 2r1é2hes 2r1é2het 2r1éhs 2r1éht ré2jen 2r1é2jet 2r1é2jün ré2k1ab ré2kac ré2k1a2g ré2k1a2n ré2k1au rék1á2sá ré2k1e2b ré2k1eg ré2kel rék1ele rék1elÅ‘ rék1elr rék1els ré2k1esz ré2k1éte ré2kik ré2kin ré2kit rék1ola rék1old ré2kör ré2k1ö2v ré2köz ré2kÅ‘ réku2 ré2k1ut ré2k1ú ré2let. ré2letb 2r1é2lete ré2leté ré2leth ré2letn ré2letr ré2lett ré2letü 2r1élf 2r1élm 2rély ré2mad ré2m1a2l ré2m1an ré2mar ré2mau ré2m1ál ré2már ré2mel rém1e2le ré2mer ré2m1é2ré ré2mil ré2m1ist ré2mos ré2mó ré2mö 2rémtu réna1p2 2r1é2nekh 2r1é2neki 2rénekl 2r1é2nekü ré2nel 2réner ré2n1é2j ré2nö ré2ny1e2l ré2p1ed ré2peg ré2pes répi2 ré2p1ip 2r1é2pí ré2pol ré2pö ré2pü 2r1épül 2r1ér. 2r1érb 2r1érc 2r1érd 2r1é2ret 2r1é2rez ré2ré. ré2réb ré2rén ré2rér ré2rés. ré2rése ré2résé ré2résh ré2rési ré2résk ré2résn ré2résr ré2réss ré2rést ré2résü ré2rét 2r1érf 2r1érh ré2rie ré2rig 2r1érk 2r1érl 2r1érm 2r1érn 2r1é2rÅ‘. ré2rÅ‘k 2r1érr 2r1értá 2r1érte 2r1érté 2r1értí 2r1értÅ‘ 2r1é2rü 2r1érv 2r1érz résa2 ré2sal ré2sar ré2s1ár ré2sát ré2seg rés1ell ré2s1er ré2sés ré2s1Å‘2 rés3sza ré2sú ré2s1ü2t ré2s1ü2v rész1a2l ré2szan rés3zav rész1ál ré2sz1eml ré2sz1e2sz ré2sz1é2k ré2szinte rész1í2v ré2szok ré2szol ré2szó rész1ó2r ré2szÅ‘r rész3s rész1t2r ré2szu ré2szú részü2l ré2sz1ülÅ‘ 2rétáz ré2t1egy 2ré2tel rét1elm 2réter ré2t1e2sz 2r1é2tetÅ‘ ré2t1é2k ré2tiz ré2tol ré2t1os ré1tré ré2tud révá2 ré2vát 2réve. 2révei rév1eké ré2v1é2l ré2v1éri 2révé2t ré2v1éte 2r1é2vév 2révf 2révh 2révt ré2vú 2révv ré2z1a2 rézá2 ré2z1ár ré2z1e2g réze2l réz1elÅ‘ ré2zer ré2zes réze2t réz1ete ré2zid ré2zin ré2zio ré2zip réz1isz ré2zos rézó2 ré2z1ór ré2zö ré2zsa ré2z3sá ré2z3se ré2z3si ré2zsí ré2zso ré2zsu rf1ál r2f1ép rfé2sza rfé2szá rf1fl rfia2n rfi1b2 rfid2 rfii2 rfik2 rfi1kr rf1ind rf1isk rfi1sz2 rfit2 rfi1tr rf1kl rf1kr rf2la rf2lo rf2ló rf2lu rf1okt rf1Å‘rü rf1pr r1f2rá rf2rí r1f2rö rf1sp rf1st r2f1út rga1k2 rgá2csal rgá2csan rgá2csav rgá2z1é rge2o rge2r1a rgés3s rg2ha. rgi2aia rgi2ako rgi2al rgi2aso rg2il rg2la rg2le rg2lo rg2ló r1g2nó rg2öl rgö3le rgÅ‘csa2 rgÅ‘2z1ölt rgÅ‘2z1Å‘s r1g2rá r1g2ru r2gyado r2gy1aj r2gyalak r2gy1alap r2gy1algá r2gy1alk r2gy1any r2gyap r2gyarc r2gy1asz r2gyau r2gy1az r2gyál r2gy1á2z r2gy1eg r2gy1e2l r2gy1enc r2gy1e2s r2gy1és r2gyévé r2gyim r2gy1int r2gyip rgy1ira r2gy1ok r2gy1öz r2gyÅ‘r rhajóé2r rhatá2s1út rháza2d rhá2z1ada rhá2zal rhá2zip rhá2z3s rhitköz1 1rhoeá 1ri ri2aa ri2aá ria1b ri2aba ri2abá ri2abo ri2ac ri2ae ri2aé ria1f ri2afag ri2afr ri2a1g2 ri2ah ri2ai2k ri2aí ri2aj ri2akén. ri2akó ri2ala ri2alá ri2alu ri2am ri2ana ri2ao ri2aó ri2aö ri2aÅ‘ ri2a1p2 ri2ará ri2aré ri2asza ri2aszá ri2aszi ri2aszo ri2até ri2ato ri2a1t2r ri2au ri2aú ri2aü ri2av ri2az 2riásá 2riász r1i2ázós 2ribé ri2bol 2r1ibr 2ribü ri2c1e2l rics1il ri2d1a2l ri2dál rid1d ri2de. 2r1i2deá ri2dei 2ridej ri2del ri2deo rid1ere ri2d1esz 2ridil 2r1i2dom 2r1i2dÅ‘ ri2du ri2ed 3rie2l 3rier ri1fl ri1f2rá 2r1i2ge. 2r1i2gé 2rigi ri1gla 2r1igr ri2har 2r1ihl rihó2 rihón1 2rijes ri1klo ri1k2ló ri1k2ri ri2lal ril2l1e2h ril2lét ril2l1in ril2l1Å‘ 2rillu 2rillú 2riló 2rimáda 2rimádá 2r1imp ri2n1a2d 2r1i2nai ri2naj ri2n1a2l ri2n1a2n ri2n1a2r 2rinas. 2r1i2nasá 2rinasn 2r1i2naso ri2nass 2r1i2nast rina1sz2 ri2nat. rin2c1a rin2cá rinc3c rin2c1eg rin2cel rin2cer rin2co rin2csér rin2csi 2r1inde 2r1indu ri2neg 2rinfl rin2gál rin2gel 2r1ingét rin2gül 2r1inha 2r1i2nic ri2nil ri2n1ip 2r1injek rin1kré rin3n 2r1insp 2r1inst rin2t1ad rintá2r rin2tára rin2táro rin2társ 2rinten 2rinterf rinté2r rin2t1ér. rin2t1éré rin2t1éri 2rintÅ‘. 2rintÅ‘k ri2nül ri2o1k2 rio2lok ri2om 2rionn 2riono 2riox ri2óa ri2óá ri2óc ri2ódar ri2óe ri2óg ri2óke ri2ól ri2ómá ri2ómé ri2óo ri2ó1p ri2óü 2r1i2pai 2ripar. 2r1ipara 2r1i2pará 2riparb 2riparé 2riparh 2riparil 2riparin 2riparn 2riparr 2ripars 2riparte 2ripartó 2riparu 2ripl ripor2ta 2r1i2ram 2r1i2rat 2r1i2rá 2r1irh 2r1i2ri 2r1i2ro 2r1i2rón 2r1irr 2r1irt ri2s1ar ri2s1as ri2s1ál ri2sáp ri2s1e2l ri2s1emb ri2s1e2r ri2sid ri2sii ri2sil ri2s1is 2rismérv ri2s1or. ri2s1ort 2r1ispá 2ristál ri2sut ris1üté ri2s1ü2tÅ‘. ri2s1ü2v 2r1i2szák ri2t1a2d ri2tal 2rital. 2r1italb rit1alk 2r1itall 2ritaln 2r1italo 2ritalr 2r1italt rit1a2ny ri2tág rit1ell ri2t1i2o ri2t1í riu2méne riumhid2 riumhidr1 riu2m1i2o 2r1i2vad ri2var 2r1i2vás 2rivo 2r1i2vó rixe2 ri2x1el ri2xí ri2xö 2riza r1i2zét 2r1izmi 2r1izmo 2rizmuk 2rizmú ri2zsar ri2zseb ri2zsel 2r1izza 2r1izzó 1rí rí2gé 2ríj. rí2jai rí2jak rí2jam rí2jas 2ríjá rí2ján rí2ját rí2jáv 2r1íjh 2r1íjj 2r1íjl 2r1íjr 2r1íjv rí2m1a2l 2rímá rím1emb rí2mo rí2mö rí2né 2r1íns rí2ra r1í2rá r1í2ró 2rítél rítés3s rítÅ‘a2 rítÅ‘e2 rítÅ‘kész1 2r1ív. ríva2 2r1ívb 2r1í2ve 2r1í2vé 2r1ívh rí2vi r1í2vü rí2vű 2r1ívv rí2za rí2z1el rízis3s rí2zo r2j1ado rj1any r2j1ág r2j1áp rjet2 r2j1id r2j1ös r2j1u2r rka1b rk1ang rka1pr rkaros3 rka2ró2r rka2s1ü2v rka1t2r r2k1ács. rká2csá rká2ne r2k1á2ri rkár1om r2k1á2ru r2k1e2dz rk1e2lem rk2elm r2k1eltá rke1p r2k1erd rk1eredÅ‘ r2k1e2rei r2k1e2rez r2k1e2rÅ‘ rke2szö r2k1ex rké2p1e2l rké2p1éss r2k1ér. r2k1érté r2k1étt r1k2hé rk2hón rkia2 rkie2 rki2g rk1iga rkigaz1 rkilenc1 r2k1imi rk1inga rki2ont. rki2sem rki2z1a rk1izo rk1kl r1klie r1k2lí r1k2lo r1k2ló rk1o2laj rko2nya rko2vi. rko2vit r2k1óné rk1ó2rá rk1ó2ri r2k1öltési r2k1ötl rkö2zÅ‘ rk1Å‘r. rk1Å‘2ré rk1Å‘rk rk1Å‘rn rk1Å‘2rö rk1Å‘rr rk1Å‘rs rk1Å‘rt rk2rém r1k2ri r1k2rí r1k2rom rk1sh rktus1s rk1udv r2k1ug rku2sze r2k1ú2sz r2k1üd r2k1üg rk2vó rlag1g rla2g1ol rla2p1a rla2pál rlap1e rla2pol rla2p1osz rla1pro rla1s2t rla2t1a2n rla2t1ó2 rlá2pe rlás3s rlá2s1út rlá2t1e2 rlá2t1é rlá2tor rle2g1a2 rle2g1á rle2ger rleg1g rlegigaz1 rleí2 rle2t1á2 rle2t1eg rle2tell rle2t1elÅ‘ rle2t1ék rle2tél rle2t1ér. rle2t1érv rleü2 rlésát1 rlésáta2 rlés3s rl1gr rló1g2 rlóó2 rlót2 rlÅ‘e2 rlÅ‘1sta r2m1akad rmaké2sz1ü r2m1a2kó rma1kré r2m1alt r2m1a2nya rmaó2 rma2t1á2ru rma2t1ur rma2zon rmá2lér rmá2nyan rm1á2ram r2m1á2ro r2máru rm1cl rmege2 rmeg1er rme2g1es rme2g1é rme3g2ö rme2k1an rme2kar rmeká2 rme2k1eg rme2k1e2l rme2kérd rme2kor rme2kot rme2k1ó2 rmekö2r rme2körö rm1elk r2m1elm rme2ran rme2ras rme2rin r2m1e2rÅ‘i rme2rÅ‘k. rme2rÅ‘ke rme2rÅ‘kn r2m1e2rÅ‘m rmert2 rmer1tr rm1esem rme2t1él rme2tin rmé2k1e2l rmé2ker r2m1é2le r2m1ép r2m1ér. r2m1érte r2m1étk r2m1étr r2m1étt rmi2g r2m1iga r2m1igé rm1illa rminc3s rm1inte r2m1irá rmi2si r2m1isko r2m1ism rmite2 rmi2t1el r2m1íg rmjob2 rm1kl rmo2n1a rmo2nár rmo2n1e rmon1n rm1ope r2m1ors rmo1sz2fér rmÅ‘2szá rm1p2l rm1p2r rm1sk rm1st rm1sz2 rm1tr rm1üg rm1üz rmű1s rm1ya rna1b rna1f2 rna2gyú rnai2k rna1k2r rna2pes rna1pro rnas2 rna1sp rna1sz2 rna1t2 rná2cs r2n1el. r2n1elb r2n1elf r2n1elh r2n1elj r2n1ell r2n1eln r2n1elr r2n1elt rne2m1is r2n1est rne2t1a2 rne2t1e2l1 rnet1o rnés2 rné1st r2n1i2d rni2g r2n1iga rno2kis rnó1d2 rnö2ket rnus3sze rny1a2dó r2nyakad r2nyalako r2ny1alk r2nyarc rny1álm r2ny1ell r2nyelm r2ny1eln r2ny1elo rny1elvo rny1emb r2nyerd r2ny1e2rez rny1e2sÅ‘ rny1ékné rny1ész r2ny1id r2nyiz rnyolc1 rny1old r2ny1or rny1ök r2nyöt rny1s r2ny1ur 1ro ro2ad roa2n 2r1obj 2robl r1obs roc2ke ro1cker roc2kén ro1c2kok ro1c2kon roc2kos. 2r1o2dú rofi2tár ro1fl ro1fr ro2g1ad ro2g1ak ro2gal ro2g1am ro2gar ro2g1á2r ro2g1áz rog1enc ro2ger ro1g2ló ro2g1o2l ro2gor ro2g1osz ro2gö 3rográ ro1g2ráf. ro1g2ráff rogu2 ro2gur ro2gü 3rogy ro2he ro2hö ro2is 3rojt ro2ka. ro2kab ro2kac ro2k1a2d ro2kait ro2k1aj ro2kak ro2k1al rok1any ro2k1a2s ro2káb r1o2kád ro2k1ál ro2kán ro2k1á2s roke2 ro2ked ro2k1en 2rokett. r2oké ro2kék ro2k1é2l ro2kép ro2kid ro2k1ing ro2k1í 2rokod rok1old ro2kÅ‘ 2r1okság rok1t2 2r1oktat 2roktán ro2k1u2s rokú2 ro2kú. ro2kús ro2kü 2r1o2laj rola2n rolás1s 2r1oldá 2r1oldó ro2l1i2d roligar2 roligarc3 rol2l1a2d rol2lag rol2l1akn rol2lan rol2lat rol2leg rol2lel rol2lis 2r1olló 2r1oltás 2r1oltó. 2r1oltób 2roltóé 2roltóh 2r1oltói 2roltóké 2roltóm 2roltón 2r1oltót 2r1oltóv 2r1oltv ro2lü 2rolvad 2rolvas 2r1o2lyai 2r1o2lyáh 2r1o2lyán 2r1o2lyár 2rolyéb 2rolyéh 2rolyéi 2rolyén. 2rolyiér 2r1o2lyuk rom1ajt rom1akk rom1akó ro2m1a2ro ro2m1ál 2romám ro2m1á2ri ro2m1árk ro2m1árn ro2m1á2ro rombé2 rom2bén ro2meg ro2m1el ro2m1e2r ro2m1es ro2méj ro2m1é2ne rom2f1os ro2mim rom1iná rom1ist ro2mita ro2miz ro2mí romköz1 2r1omni ro2m1o2r ro2m1ó2r ro2mö ro2mÅ‘ rom1pr ro2mü ron1alj ron1alt ron1alu ron1any rona1t2 rona2ut ro2n1áta ro2n1átr ron2csel ron2csem ro2nen ron1esz ro2n1ér. ron2gal ro2n1i2ko ro2n1i2m ro2ní ro2nop ro2nóc ro2n1ó2r ro2nö ro2nÅ‘ ron1s2 ron2tab ron2tem ron2tev ron2t1én ron2tin ron2tí 2rontóé 2r1ontóh 2rontóig 2rontós ro2nü ro2nyac rony1a2l ro2nyid ro2nyik ro2nyir ro2nyó ron2zab ron2z1al ron2zer ron2zin ron2z1ol ron2z1or ron2z1ó2 ronz1z ro2om ro2os 2r1opc 2rope ro2per ro1p2l ropo1s rop2s ro2rak ro2r1a2l ro2rat ror1áll rore2 ro2reg ro2r1el ro2r1es ro2r1in ro2rosz rort2 ror1tr ro2rü ro2sar ros1ass ro2s1atl ro2s1av ro2s1árk ro2s1áro ro2sem ro2s1ist ro2s1ol ro2s1ó2 ro2sö ro2sÅ‘ ros3sze ros3szék ros2tet 2rostob ros2tol 2rostya ro2s1út ro2szal ro2szan ro2szás 2r1oszlo ro2szó r1osztá 2roté ro1t2he roto2na ro1t2ró 2rotth ro2un ro2us rova2re rovás1s 2r1o2vi. 2r1o2vib 2r1o2vik ro2vis 2ro2xidb 2ro2xidr 2roxilc ro1xy ro1ya roza2tal ro2zsan 1ró róa2d rób1ajt 2róbá róc3c ró2c3h ró2cin ró2c1ö ró2c3sá ró2cz ró2dai ró2dep ródi2ad ró2dÅ‘ ró2dü 2r1óév. 2róéve 2r1óévé ró2f1ag 2rófea 2rófeá ró2f1iv 2rófs ró2fur ró1g2r rói2g róke2rest róke2r1in ró1k2l ró1kré 2r1ólm r1ó2lom ró2m1a2l róma2r róm1ara ró2mál róme2l ró2mél róm1isz ró2mí 2rómk ró2mö ró2mü róne2m 2r1ó2néb 2rónéé 2r1ó2néh 2r1ó2nén 2rónér 2rónét 2rónév 2rónj 2rónn 2rónr rónus3s 2rónú ró2nü róo2k róó2r ró1p2l ró1pro 2r1ó2raa 2r1ó2rac 2r1ó2rad 2r1ó2rae 2r1ó2raf 2r1ó2rag 2róraje 2r1ó2ram 2r1ó2ran 2r1ó2rap 2r1ó2rar 2r1ó2ras 2r1ó2rat 2róraü 2r1ó2rav 2r1ó2ráb 2r1ó2rád 2r1ó2ráé. 2ró2ráén 2r1ó2ráév 2r1ó2ráh 2r1ó2rái ró2rája 2r1ó2ráka 2r1ó2rákb 2r1ó2ráké 2ró2rákh 2róráki 2r1ó2rákk 2r1ó2rákn 2r1ó2ráko 2r1ó2rákr 2r1ó2rákt 2r1ó2rán. 2r1ó2rána 2r1ó2ráná 2r1ó2ránk 2r1ó2rány 2r1ó2rár 2r1ó2rás 2r1ó2rát 2r1ó2ráv 2r1ó2riá rós1orom ró1spi 2rótb 2róth ró2tip rót1ist 2rótj 2rótk 2rótm 2rótn ró2t1ö2v ró1tri ró2tül 2r1ó2vó róza1t2 ró2z1e ró2z1in ró2zú ró2zü 1rö 2r1öbl 2r1ö2böl. 2r1öbölb 2r1öbölh 2r1öböli 2r1öböll 2r1ö2böln 2r1öbölr 2röböltÅ‘ rö2ga rö2go rö2gó rö2k1é2l rö2k1érv 2rökí 2röklé rö2ko 2r1ö2kör 2rökösé 2rökösö 2r1ökrö rö2lÅ‘s rö2lyü rö2ma rö2má 2röme. 2römed rö2meg 2römei 2römén 2römét 2römév rö2mí 2römmű rö2mo rö2mó 2römöd 2römök 2römöt rö2mu 2römü 2römű rön2d1e rön2k1a2 rön2k1e2 rön2kép rön2kol rön2kos rön2k1öl rön2k1ü rö2p1i2 rö2re rö2rö rö2sa rö2sá rö2so rös3sza 2r1öv. 2r1övb 2r1ö2ve r1ö2vé 2r1övh 2r1övn 2r1ö2vö 2r1övr r1ö2vü 2r1övv 1rÅ‘ rÅ‘a2n rőát1 rőáta2 rÅ‘1bl rÅ‘1br rÅ‘2dá rÅ‘2d1e2l rÅ‘2dos rÅ‘2d1Å‘2r rÅ‘e2l rÅ‘e2r rÅ‘e2s rőé2h rőé2l rőé2r rőé2te rÅ‘2f1as rÅ‘1fl rÅ‘2f1Å‘ rÅ‘gé2p1és rÅ‘i2ta rÅ‘1kl rÅ‘1kv 2rÅ‘lt rÅ‘1pl rÅ‘1pr 2r1Å‘r. rÅ‘r1ak rÅ‘r1eg r1Å‘2rei r1Å‘rh rÅ‘r1in rÅ‘2riz rÅ‘rköz1 2r1Å‘rl 2r1Å‘rm 2r1Å‘rn rÅ‘2rö 2r1Å‘rp 2r1Å‘rr 2r1Å‘rs 2r1Å‘rt 2r1Å‘2rü 2r1Å‘rz rÅ‘sá2 rÅ‘2sár rÅ‘2sír rÅ‘1ská rÅ‘2sÅ‘ rÅ‘1spe rÅ‘1s2pi rÅ‘1sta rÅ‘1str rÅ‘sü2l rÅ‘2s1ülÅ‘ rÅ‘2s1ült rÅ‘2s1ü2t rÅ‘2s1ű2 rÅ‘sza2kál rÅ‘u2t rőü2l rÅ‘va2s1 rÅ‘2zi. rÅ‘2zön rpei2 rpe2szel rp2he r2p1ig rpis1á2 rpi2t1a rpi2t1e2 r2p1ivá r1p2la rp2le rp2lé rp2lu rp2lü rprecíz1 r1p2ré r1p2ri r1p2rí r1p2ro r1p2ró rpu2szé r2p1üz r2r1a2lap rra2sza rra2szi rrata2n rrat1any rra2t1á2r rra2t1e r2r1ábr rrádiók2 rrádió1kb r2r1á2ga r2r1á2gy r2r1árb rrá2saj rrá2sar rrá2ság rrá2se rrás3s rrá2sü rrá2s3z rrá2tal r2r1áts rre2l1i rr1emb rren2d1Å‘2 r2r1ék rré2r r2r1éri rré2sem rr1fl rr1fr r2r1i2de rr1in. rr1ing r2r1ir rri2ta rr1i2zé rr1izg rr1izm rr1k2r rr1ope rro2rál rror1osz rró1dr rró1p r2r1öb r2r1ös rr1pl rr1pr rr1sp rr1t2r r2r1u2r r2r1ür r2r1üt rry2n r2s1a2dó r2s1a2g r2s1aia rsa2il2 rs1akku rs1alan rsa2lap r2s1ale rs1alm r2s1a2lomb r2s1ana rs1a2ny r2s1a2pá r2s1a2po rs1a2ra. r2s1arc rs1arom rs1arz rs1att rsa2v1ar rsa2v1é2 rsa2v1i r2s1ax r2s1a2z r2s1ábr rság1g rs1áld r2s1á2ré r2s1árn r2s1árr r2s1á2ru rs1áté rs1áti rs1bl rs1br rs2ch2 r1sche rsc3hé rs1d2r rs1edz r2s1ef rs1e2gét r2s1e2gy rse2il rse2k1a2 rse2keg r2s1e2leme rs1elf r2s1elk r2s1e2lÅ‘a r2s1e2lÅ‘l rs1e2mi r2s1e2mu rse2ny1a2 rse2nyeg rse2ny1e2l rse2nyer rse2nyí rse2nyÅ‘ r2s1erd r2s1erk rs1erÅ‘ r2s1ess r2s1esz rsé2g1el r2s1él r2s1ép r2s1ér. r2s1érd r2s1érl r2s1érték r2s1és rs1é2te rs1étk rs1étt rs1fr rs1gl rs1gr rsi2d rs1ide rs1ido rsi2g rs1iga r2s1ikr r2s1ill r2simm rs1ind r2s1ing r2s1int r2s1i2o rs1i2pa rs1iro r2s1i2si r2s1isk r2s1ism r2s1ist rs1írn r2s1í2ró r2s1írt r1s2kál r1skj rs1kl rs1k2r r1s2lu rs2mink rs2ni rs1ond rsonya2 rso2nyal rso2nyan rso2nyat rso2nyé r2s1op r2s1ord r2s1org rs1osz rsócsa2 rsócsap1 r2s1ódá r2s1óné rsó2s3zárr r2s1öb rs1önt rs1örv r2s1ös r2s1ö2z rs1Å‘r rs1pl rsp2r r1s2rác rs1s2t rs3szag rs3szak rs3szá rs3szem. rs3szemet rs3szer. rs3szerű. rs3szerűb rs3szerűe rs3szerűn rs3szerűs rs3szerűt rs3szerűv rs3szi rs3szí rs3szó rs3szö r1stand r1s2tat r1s2tác r1s2tát rs2top rst2r r1strat rs1tre rs1tré r1stró r1stru r1s2tú rsu2r rs1ura rsu2t r2s1uta rs1utá rs1úr. rs1ú2ri r2s1ú2s rs1úto r2s1üd rs1üld r2s1üz rsza2k1a2l rsza2k1e rsza2kö rsza2k1ü r2szaló r2s3zam rszág1g rszáraz1 rs3zárl r2s3zené r1sz2f rsz2lo rsz2m rszom2j1a2d rs3zse rs3zsi rsz2ta rszt1al rszt1árv rszt2rá r2s3zűr. r1sz2v r2t1abla r2t1abr r2t1a2dat rt1a2dó r2t1a2ga. rt1agit r2t1ajá rt1ajtó r2t1a2kara r2t1akc rta1kre rta1k2ré rt1alapj rt1a2láb r2t1alár r2t1alb r2t1alel r2t1alg r2t1alj r2t1alk r2t1alt r2t1alv rta2m1ér rt1anim rta2n1ó2r r2t1anto r2t1antr rt1a2nyag rt1app rt1arán r2t1arc rta2rén rt1a2ty rt1a2uto r2t1azo rt1ábr r2t1áf rt1ág. rt1á2ga rt1ágb rt1ágg rt1ágj rt1ágn rt1ágr rt1ágs rtá2lad rtá2lala rtá2rada rtá2raj rtá2ramr rt1ária rt1áriá r2t1árin rt1árszi rt1árszí r2t1árta rtá2ruké rtá2rukk rtá2rukn rtá2rukr rtá2rukt r2t1árur r2t1árut rtá2s1ág r2t1ásás rtá2s3z r2t1á2t1a r2t1átl r2t1áts r2t1átv rtá2v1é rtávi2 rt1bl rt1br rt1cl rt1cr rt1dr rtea2 rt1e2gé rt1e2gye rtei2 r2t1eks r2t1elad r2t1elb r2t1elf rt1eljá rt1elker rt1elkö rt1elkü rtel2la rtel2l1á2 rt1elma rt1e2lÅ‘a rte2lÅ‘l r2t1elr r2t1ember r2t1eml r2t1enc r2t1ent rte1p rte2rac rte2rál rte2r1in rtermo1 rtermosz2 rte2r1os r2t1e2ró r2t1erÅ‘ rte1sp rt1esték rt1estné r2t1estün rte2sze. rte2szei rte2szek rte2szem rte2szet r2t1eszk r2t1eszm r2t1e2szü r2t1é2d rt1é2gi rté2kaj rté2k1a2l rtékát1a2 rték1í rté2kos rté2ku r2t1é2let rté2lén rt1é2li r2t1élm r2t1é2neke r2t1é2nekk r2t1é2nekr r2t1épü r2t1érdem r2t1érem r2t1é2ré r2t1érin r2t1é2rit r2t1értek r2t1érté r2t1érv r2t1érz rté2s1ég rtéskész1 rtés3sza rté2sül rté2szá rté2szo rté2szö rtés3zsí r2t1étk r2t1étl r2t1étt r2t1étv r2t1é2ve. r2t1é2vet rté2véné r2t1é2vér rt1évk rt1fl rt1fr rt1gr r1t2ha. r1t2hág r2t1i2de rt1idé rt1idi rt1ifj r2t1i2gé r2t1ill r2timá r2t1i2mi r2t1imp r2t1i2náb r2t1i2náé r2t1i2náh r2t1i2nán r2t1ind r2t1inf r2t1ing rti2nik r2t1i2pa r2t1irá r2t1iro r2t1isi r2t1isk r2t1ism r2t1isza rt1i2tal r2t1izé rt1izm r2t1izz r2t1íg r2t1íj r2t1ín rtí2ra rtí2rá rt1í2ve rt1í2vű rtí2z rt1ízl rt1kl rt1kr rt1kv rtmű1 rtműt2 r2t1of rto2kad rto2k1a2l rto2k1ar r2t1o2li r2t1oml rto2n1a2 rtoná2 rto2nár rt1opá rt1ope rt1opt r2t1org r2t1o2rom r2t1orr r2t1ors r2t1orv r2t1osz rtóe3rejü rtó1p rtó2rák rtó2s1ín. rtó1sp rtó2s3zár r2t1ökl rt1ököl r2t1ö2lÅ‘se rtön3n rt1ö2rök rt1öröm r2t1össz rt1ö2vez rtőé2h rtÅ‘kész1 r2t1Å‘rköd rt1Å‘rl r2tÅ‘rül rt1pl rt1pr rt1ps rt2rad r1t2raf r1t2ranz r1t2rág r1t2riá r1t2rik r1t2ril r1t2rió r1t2rón r1t2rü rt1sk rt1sl rt1sp rt1st rtsz2 rt1t2r r2t1udv rtu2k rt1uká r2tunik rtu2rai rtu2ral rt1u2rá rtu2sze rt1u2tat rt1utá r2t1új r2t1ú2s r2t1üg r2t1üld r2t1ü2lé rt1ülÅ‘ r2t1üt r2t1ü2v rt1ű2zÅ‘ r2tying rty2j rty2s r1t2zé r1t2zi rt2zÅ‘ 1ru ru2ac ru2b1i2k ru2c3h ru2cz 2r1udv rue2l rue2r ru1fr 2rugar 2r1u2gat rug3gy 2r1ugr ruhás1s 2r1ujj ru1k2l 2ruktu 2ruktú rum1agr ru2maj ru2mal ru2maz ru2m1eng ru2mer ru2mes ru2mél ru2m1i2k rum1ill ru2m1i2p ru2mis ru2m1iv ru2miz ru2mí ru2mol ru2m1o2r ru2mö ru2mÅ‘ ru2mü 2r1unc run2da ru2nit r1u2nok ru2pad ru2pe ru2p1il ru2pü ru2rad ru2rai r1u2ral ru2ras ru2rat r1u2raz ru2rán ru2rát 2r1urb ru2ruk rus1abl ru2sad ru2sal rusa2n rus1any rusa2r rus1aro ru2sas ru2sál ru2s1iz ru2sír ru2s1ol ru2s1ó2 ru2sÅ‘ russ2 russz2 rust2 rus1tra ru2s1ű2 ru2sz1é2p ru2szip r1u2tac r1u2tak r1u2tat r1u2taz 2r1utc r1u2tó. r1u2tób r1u2tód r1u2tói r1u2tój r1u2tók r1u2tón ru2tóp r1u2tór r1u2tós r1u2tót r1u2tóv 1rú rúda2 rú2dad rú2d1al rú2d1ar rú2d1á rú2del rú2din rú2du rú2dü rú1dy rú2d3z rú1gr 2r1újd rú2jí 2r1újk 2r1újs 2r1úr. 2r1úrb 2r1úrh 2r1ú2ri 2r1úrk 2r1úrn rú2ro 2r1úrr 2r1úrt rú2szó rú2t1a2l 2r1ú2t1e2 2r1ú2té. rú2tér rú2tit 2r1útk 2r1útle 2r1útm rúto2 2rúts 2r1ú2tü 2r1útv rú2zsad rú2zse 1rü rü2dí rü2dü rü2gyel 2rügyn rü2ha rü2la rü2lá rü2led rü2les rü2lép rü2lér rü2lí rü2l1o rü2ló rülö2 rü2l1ön rü2l1öz rü2l1Å‘g rü2lu rü2lú rü2l1ül 2r1ünn 2r1ü2nÅ‘ rü2rí rü2rü rü2te rü2té rü2tö rü2tÅ‘ rü2vö rü2ze rü2zé 1rű rű1gr rű1kl rű1pr 2r1űr. 2r1űrh 2r1űrm rű2rö 2r1űrt rű2s1orr rűs1ors rű1sp rű1str r2v1agá r2v1akc rva2lap r2v1alj r2v1alk r2v1a2rán rva2sáb rva2sék rvasu2 r2v1a2szó rva1t2r rvavá2 r2v1ága rvá2gyi r2vállam r2v1állo rv1állv rvá2nyad rvá2nyan rvá2nyú r2v1áp rvá2r1al r2v1á2ram rváro2s1os rv1á2rú r2v1á2sás r2v1áta rvá2tors r2v1e2b r2v1e2dz r2v1e2gye rv1ela rv1e2led rve2lemb r2ve2leme rv1e2les r2v1elk r2v1ell r2v1elo r2v1e2lÅ‘a rv1e2lőá r2v1e2lÅ‘kép r2v1elÅ‘r r2velÅ‘z r2v1eng r2v1ep rve2r1a rve2r1á2 r2v1e2retn rverés3s rve2rip rve2r1o r2v1e2rÅ‘i r2v1e2sete r2v1e2sés r2v1ex rv1éke rv1ékk rv1ékn rv1ékü rv1é2lel r2v1é2lé rvé2ny1el r2v1ép r2v1é2rem r2v1é2ré. r2v1érés r2v1érp r2v1érté r2v1érv r2v1é2vet r2v1évi rv1fr r2v1ido r2v1iga rvigaz1 r2v1igé r2v1ik r2v1inf rv1inte rv1inté r2v1i2p r2virán r2v1isme rvis3sze rv1ital rvi2z1a2 rvi2z1á2 rvi2z1elv rviz1o rvi2zó rvíze2 rvíz1es rv1kl rv1kr r2v1ob r2v1olv r2v1op r2v1or rv1osz r2v1ov r2v1ó2h r2v1órá rv1öss rv1Å‘s rv1pr rv1sk rv1sp rv1st rv1tr r2v1ub r2v1u2t r2v1üg rvü2l r2v1ülé r2v1ülÅ‘ r2v1ült r2v1üt r2v1ü2v r2v1ü2z ry2be ry2bó ry2bÅ‘ ry1é2n ry2ho ry2na ry2ne ry2ra ry2re rys2n ry2tó ry2tÅ‘ ry2va ry2ve r2z1a2dot r2z1a2la r2z1alj r2z1alk rz1almás rza2sz r2z1aszó rza2tal rza2t1e2 rza2tol r2z1áll rzá2r1ó2r rzá2se rzás1s rz1e2be rze1p2 rz1est rze2tal rze2t1eg rze2t1e2l rzetes3s rze2t1o rzetü2két rzéna2 rzé2n1al rzé2nat rzé2ná r2z1érm rzé2so rzi2ab rzis3s r2z1ín rzo2r1ál rzó2s3zár rzÅ‘a2 rzÅ‘e2 rz1p2r rzs1ala rzs1any rzs1apa r2zsaz r2z3sár. rz3seben r2zs1e2g r2zs1e2r r2zség r2zs1ék r2zs1ér. r2zséva r2zs1iga rzs1inj rzs1int r2zsip r2zsita rzs1or r2zs1ö2r r2zs1Å‘2 rzs1s rzs1u2t rzs1úr rzs1ült rzu2sak rzu2san rzus3s rzu2s3z r2z1ut 2s. 1sa 2sabo 2sabr 2s1abs 2s1a2cé sa2dalé 2s1adap 2s1a2data 2s1a2dato 2s1adató sadás1s 2s1adm sa2dog sa2dóc sa2dód sa2dóé sa2dóh sa2dói sa2dój sa2dóm sa2dón sa2dór sa2dóu sa2dóz sae2r sa1f2r 2saga sa2gan sa2g1ál 2sagg sa1gne 2s1a2gó sa2gÅ‘ sa1grá s1agy. s1agyb s1a2gyo sa2gyú 3sah. saha2r sa2hov sa2ig 2saja sa2jak sa2jág 2saján sa2j1ö saj2tal saj2t1a2n saj2tar sajté2r saj2t1éré saj2tor saj2t1ö saj2t1ü2 sa2k1ab 2s1a2kad sa2k1an sa2kas 2s1akci sak2k1a2r sak2k1as sakke2 sak2k1eg sak2ker sak2kes sak2kis sak2kol sak2k1orr sak2k1ó2 2sakku sa1klu 2s1akna. 2saknáb 2saknái 2s1aknák sak1ola sa2k1orm sa2k1ös sa1k2ru 2s1akt. 2s1akti 2s1aktu sa2k1ug 2s1a2kus sa1k2va 2salab sala2g1 sala2ka sa2laku 2s1a2lapa sa2lapí sa2lapl sa2lapoz s1a2lapr sa2lapt salá2da 2saláí salán1n salás1s 2s1aláté 2s1alb s1a2lel s1alépí salé2t sa2l1éte 2salf 2s1algáh 2s1algái 2s1algán 2salgásat 2s1aljai 2s1aljáh 2s1aljár 2s1alját. 2s1alka 2s1alkot s1alku 2salm 2s1a2lomn 2s1a2lomr sal2tag 2s1alte 2s1alth 2s1altit sa2l1ú sa2m1any sa2mec sa2m1il sa2m1í sa2mol sa2m1osz 2s1a2mÅ‘ sa2nal sa2nat s2and 2sane sa2nek 2s1angy sa2nód sa3nö s1ante sa2nya. s1a2nyag sa3nyar 2sanyá saó2r sa2p1a2dó sa2p1a2g sapa2te sa2pác sap1á2cs sap1áll sa2p1átm sa2páto sa2p1el sa2p1ék 3sapka 3sapkáb 3sapkán 3sapkás 3sapkát 3sapkáv sa2p1osz s1a2pókr s1a2pósab 2sapp s1aprí sa1pro 2sapu sa2ratá sa2rató sa2rány sar2c3ho sar2cin sa2rén sa2rit sar2j1e sar2k1e2 3sarki sar2kin sar2kir sar2k1ö2 3sarkú 3sarló 2sarm 2sarom 2sarz sase2 sa2s1eb sa1s2m sa1s2po sas3sza sas3szá sas3sze 2s1asszo 3sast sa2s1u2t sa2sú s1aszf 2saszt sata2kés sa2t1alj sa2t1alt sa2t1a2nyá sat1apu sa2t1á2rak. sa2t1árka sa2t1érk sa2t1érr sa2t1i2k sa2t1ing s1atlé satmo1 satmosz2 2s1a2tombó 2s1a2tomo sa2t1orm sa2t1ó2d sa2t1ó2né sa1t2rá 2saty sa2tyá sa2ul sa2urá 2saut sa2uto s1autó. sautóé2r sautói2ko sa2uv 3sav. 3sava. sa2v1a2da sa2v1a2l 3saván sav1ára sav1ária sa2v1árt sa2v1el sa2v1ér sav1ill sa2v1i2n sa2vÅ‘ sa2vü sa2xi sa2zon 1sá sá2b1ak sá2b1e2l sá2b1e2r sá2big sá2b1il sá2bö sá2buj sá2b1ut sá2bü sá2csá sá2gab sága2d ság1ada 2ságadd sá2gal sá2gan sá2gas 2s1ágas. ság1ass sá2gat 2s1á2gaz sá2g1ál sá2g1á2rak sá2g1árn sá2g1á2ru sá2g1e2 sá2gép sá2g1i2ko sá2gí sá2g1osz sá2g1ó2 sá2g1ö sá2gÅ‘ ság1s sá2gü sá2gű 4s1á2gy ságy1a2d sáka2 sá2kal sá2k1an sá2kar sá2kát sá2k1e2 sákköz1 sá2k1ö sá2kü s1álc sá2l1in 2s1álm s1á2lomb 3sámá sán2c1é sán2c3so sán1k2r sánt2 sán1th sá2nü sá2p1ost sá2rad sár1adá sár1a2dó sá2r1ag sá2raj 2s1á2rak. sá2raka 2s1á2rakb 2s1á2rakh 2s1á2rakk 2s1á2rakn 2s1á2rako 2s1á2rakr sá2rakt sá2r1a2l 2s1á2ram sá2r1a2n 2sá2ras sár1ass sár1asz 2s1á2rat sá2rág sá2r1ál sár1d2 sá2r1e2 2sárét sá2r1év sá2ria. 2s1á2riá sár1iko sá2r1i2p sá2r1i2s sá2rí sár1k2 sá2r1osz sár1ott sár1óni sá2rö sá2rÅ‘ 2sárro sár1s2 sárt2 sár1tr sá2rug 2s1á2ruh 2s1á2rul 2sárus sárú2 2s1á2rú. sá2rü sá2s1ad sá2sar sá2sás sá2s1e2 sá2sis sáskész1 sá2só 2s1ásó. sás1ó2r 2s1ásóv sá2sö sá2sÅ‘ sás3sz sá2s3za sászá2r1a2d 2s1á2szo 2sáta s1átad sá2t1alj s1á2tál s1á2t1á2z 2s1átb 2s1átc 2s1átd 2s1á2t1e2 2sáté sá2tél 2s1átf 2sáth 2sáti sá2t1ir 2s1á2tí 2sátj 2sátk 2s1átm s1á2tokt 3sátor sátókész1 2s1á2t1ö 3sátra 3sátrá s1átre 2sáts 2sátt s1áttö 2sátu 2s1á2tü 2sátv s1átve s1átvé s1átvi 3sáv. sá2v1a2d sáva2l sá2v1a2r sá2v1ál sá2v1á2r 3sávb sá2v1e2 sá2v1érz sá2vis sá2viz 3sávj sá2v1or sá2vö 3sávr 3sávú 3sávv sba2l1 sbeá2 sbér2cs sb2lo sb2lú sb2ra sb2ri sb2ro sb2ró scar2 1scheie sc3hek sc3hen sc3het sc3hé. 1schébe. sc3héd 1schéére 1schére sc3hés sc3hi. sc3hig 1schil sc2ri sda2dal sdai2 sdeá2 sde1b2 sde2del sde1kr sdes2 sde1sp sde1st sdesz2t sdi2ale sd2ra sd2rá sd2ro sd2ru 1se sea2d sea2l sea2n seá2l seá2r se2bag se2b1ak seb1alt sebá2 se2b1ág se2bár se2b1e2g seb1ell seb1elz se2b1esh seb1esze s1e2béd 3sebés se2b1ó2 se2bö se2b1Å‘2 seb1p seb1s se2b1u se2bú se2bül 3sebzé secs1a2p se2csev se2cs1ék 2sedez sedél1 2sedény se1dra se1d2rá se2dzé se2dzÅ‘ see2l see2n 2seff se1f2ra 2s1e2ger 3segéd 2segér segés3s s1egész1 2segét 3segg seg2ga seg2go 3segí se2gyed segy1e2l s1e2gyé 2s1egyh 2s1együ sehü2 se2h1üv seí2r sej2t1a2 sej2t1e2ny sej2ter sej2tér sej2t1o sej2tö seka2r se2k1ál 2s1e2kés. sek2k1á sek2kos sek2k1ö se1klu se1k2ra se1k2ré se2kur se2kúr sel1ak se2lál 2selb s1eldö 2s1e2lef s1e2legy se2l1e2h se2l1eké se2lemek 2s1e2lemé se2lemk se2lemm 2s1e2lemz se2l1ere s1e2lesé sele2sz se2l1eszü sele2t1a2 sele2t1e2l se2l1e2vé se2l1e2vÅ‘ selés3s 2self 2s1elha 2s1elhel s1elho sel1id se2lis 2s1eljá s1ellá 2selmél selnö2k1öl 2s1elny s1e2los 2s1e2lö se2lÅ‘ad s1e2lőí 2s1e2lÅ‘z 2selr s1elta s1eltá 2s1eltö 2s1elv. 2s1elvek s1elvű 2s1elvv 2selz 2sember s1ember. s1emberb s1embern 2s1embr 2semén 2semlé se2nat se2n1ál sen2d1a sen2d1á s1e2ner senés3s 2s1enged sen1ist se2n1or s1enta seny1ag seny1ak se2ny1á2 seny1ell seny1ere se2ny1él se2ny1ér. se2nyérd se2nyérte 2senyhi. se2nyigé se2nyös se2nyu se2nyú se2ny1ü2l 2senyve. s1enz seó2r se2pid s1epiz sep2pa sep2p1á2 sep2per 2sepri se1p2ro se1p2ró se2r1agy se2ral se2ram se2r1a2n se2r1a2r se2r1as se2rau se2r1á2g ser1áll se2rár s1erde 2serdÅ‘ sereg1g 3seregs 2serej se2r1e2lem. se2r1e2lÅ‘ se2r1elt se2r1elv sere1p sere2pedÅ‘ ser1e2sze. ser1eszk sere1t se2r1észb se2r1iga se2r1il seri2n se2r1ina ser1inf ser1ing ser1int se2ris se2riz ser1k se2r1ol se2ró s1eróz 2se2rÅ‘ s1erÅ‘. s1erÅ‘b s1erÅ‘d s1erőé s1erÅ‘f s1erÅ‘k s1erÅ‘m s1erÅ‘n ser1Å‘s. s1erÅ‘t s1erÅ‘v ser1s sert2 ser1tr se2r1ü2g serví2 2s1esemé 2s1e2seté 2s1esél 2s1e2sés 2seskü 2s1e2sÅ‘ se1s2p sessz1a2 sesszé2l sessz1o 2s1este se1sti s1eszen 2seszk s1eszkö 2s1eszm se2szű se2t1a2la se2tana se2t1a2ny se2tát se2t1e2g set1eleg se2t1elk se2t1elm 2setenk 2setes. 2setese 2s1e2teté 2s1e2tetÅ‘ seté2k se2t1ékb se2t1éké se2t1é2l se2t1énk se2t1ér. se2t1é2ri se2t1érté se2tika se2tiká se2tike se2t1ing se2tol set1old se1tran se1tri se1t2ro 2settk seü2t 2s1e2vés 2s1e2vÅ‘ se1ye se1yé se1yi s1ezr 1sé 2s1ébr sé2f1a sé2f1á sé2g1a2 sé2g1á2 3sége. sé2g1eg 3ségei 3ségek ség1ele sége2lem sé2g1ell 3ségem sé2g1ent sé2ger ség1erk sé2g1esz sé2geté sé2g1e2ti sé2gev ség1é2je sé2g1é2k sé2gép sé2g1ér. sé2g1é2ré sé2g1érn sé2g1érr sé2g1értÅ‘ sé2g1é2rü sé2g1érv ség1ész ség3g ség1iga sé2g1igé ség1ist sé2gí 3ségk ségo2 sé2g1ok sé2gol sé2g1ó2 sé2gör sé2gÅ‘2 ség1Å‘r ség1Å‘s ség1s sé2gu sé2gú 3ségü sé2g1ü2lÅ‘ ség1ült 2s1éh. 2s1é2he. 2s1é2hek 2s1é2hes 2s1é2het 2s1é2hé 2s1éhh 2s1éhr 2s1éhs sé2jem s1é2ji s1éjs sé2kek s1ékho 2s1ékm 2s1éks sé2k1ú sé2lel s1é2let 2séll 2s1élm sélÅ‘kész1 sé2lű 2sély sé2lya sé2lyeg 3sémá sé2mu s1é2nekb 2s1é2neke s1é2neké 2s1é2nekh 2s1é2neki s1é2nekk 2sénekl 2s1é2nekn s1é2nekr s1é2nekt s1é2nekü 2s1é2pí 2s1é2pü sé2ral sé2ran sé2rát 2sérb 2sérc s1érde sé2réé sé2rie sé2rig s1érlel 2sérm sé3ró 2sérr s1értéke 3sérvb s1érvei 3sérvem 3sérves 3sérvh 3sérvr 3sérvv 4sé1ry 2s1érz 2sésdi 2séseki sé2s1el 2séssz 2séstú sész1ak sé2sz1á sé2sz1emb sé2szir sé2sz1o sész3s sét1abl sé2t1a2d 2sétk sé2t1o sétÅ‘2 sé2t1Å‘r 2sétr 2sétt 2sétv 2s1év. 2s1évad 2s1évb 2séve. 2s1é2ved 2sévei 2s1é2vek 2s1é2ven 2sévet 2sévéb 2s1é2véi 2sévén 2sévét 2sévév 2s1évf 2sévh 2sévi. 2s1évk 2s1évn 2s1évr 2s1évt 2s1é2vü 2s1évv sfaá3gac sfa2gy1al sfenyőé2 sfé2má sfé2m1e2kéh sfé2m1é sfé2mi sfiúé2 sfi2úéra sfi2úét sf2le sf2lo sf2ló sf2lu sfo2k1út sf2ra sf2rá sf2re sf2ri sf2rí sf2ro sf2rö sga1b sga1d2 sga1p sga1tr sge2o sg2le sg2ló sg2nó s1g2ra sg2rá sg2ri sg2ró shelyü2kü 1s2hi. 1s2hih 1s2hij 1s2hik shitköz1 s2hop s2horr 3s2how shú2sár shús1s 1si si2ac sia2d si2aé si3ak sia2l sia2ny siá2ro si2b1á2 sibilis1 sidás1s si2dea si2deá si2deg si2dei si2den si2deo s2idet 2sidé s1i2dén si2dom 2s1i2dÅ‘ si2du si2eg sie2l si2en si2eu 2s1ifj 2siga s1i2gazo 2sigá 2sige s1i2ge. s1i2gek s1i2ges 2s1i2gé sig2n1e sig2n1ó2 2siha 3sihed 2s1ihl si2k1a2n sike2r1esz sike2s si2k1in si2k1ir si2k1old s1i2konh s1i2konj 2s1i2konk s1i2konn 2s1i2kono s1i2konr 2s1i2kont s1i2konu sik1orr si1kré sikus1s 2simád si2m1el s1i2mit 3simí 3simog 2simp s1impor si2m1u2t 2s1i2nas 2sind s1indu si2ner 2sinf 2singa s1ingá s1inget s1ingé s1ingók si2nic 2sinj 2s1inku 2s1ins s1inte 2sinv s1invá 2s1inz si2onn s1i2ono si2óa si2óá si2óc si2óda si2óe si2óg si2ói2k si2óo si2óp si2óú 2sipar. 2sipara 2sipará 2siparb 2s1iparc 2siparh 2siparm 2siparn 2s1i2paro 2siparr 2sipartá 2sipartó 2siparu 3sipí 3sipka 3sipká 3sipol 2s1i2ram s1irg 2s1irh 2siro s1i2rod 2s1i2rón 2sirt s1irtó si2sad si2sél si2s1is si2s1í2 sis3s sist2 si2s1ü2 si2s3zab s1i2tal sita2li si2te. si2t1i si2t1ö sit2tin 3sity siú2t 2s1i2vad 2s1i2var 2sivás 3siví 2s1i2vó 2sizé si2zé. si2zéb s1izmo 1sí sí2gé sí2ja sí2ju sí2kas síke2 sí2k1er sí2kí sí2kü sína2 sí2nac sí2nal sí2nan sí2n1á2 sí2nel sí2nö 2síns sí2n1ú sí2r1ad sí2r1ál sírá2s3z sír1á2to sí2red sí2r1e2m sí2ren sí2res sí2r1e2t sí2r1én sí2rir sí2rí s1í2róa sí2róf sí2ról sí2róp sí2rö sír2t1e2v sí2r1ü sí1sp sí1st síté2kol 2sítél sítés3s sítÅ‘a2 sítÅ‘e2 sí1tr 2s1ív. sí2vá 2s1ívb s1í2vek s1í2ven s1í2vet sí2vé sí2vű sí2za sí2ze. 2s1ízl s1ízr 2sízü sí2zül sje2gy1á2 skao2 skapocs1 ska2r1i2m skaros3 ska2ró2r ska1sm skat2 ska1tr skau2 1skálák 1skálár ske1p2 ske1s2p ské2p1el skiá2 1s2kicc skie2 sk2jö sk2la sk2li sk2lí sk2lo sk2ló sk2lu skolás1s s2korpi sko2s1a2rai skó1p s1k2rá s1k2ré s1k2rit sk2rí sk2rón sk2ru sk2va sk2vi sk2vó sky1ér sky1i sla1d2 sla2g1e2 sla1s2p sla2t1a2l sla2t1a2n sla2tel sla2tev sla2tés sla2t1é2t s2lág 1slágerei slá2nyal slás3s slega2 sleg1g sleí2 sle2tal sle2t1el sle2t1em sle2tél sleves1s sléi2 sléke2 slé2kev slé2k1ol slés3s slic2c1elv sli2d sli2ká 1s2liss slÅ‘s2 s2lus smaó2 smarc2 smas2 sma1sp 3s2már. 3s2máru sme2g1a2 sme2g1é smen2tel 1smirg sna2pal sna2p1e sna2p1or snap2sz1e2 sne1yi snit2t1elv snőé2h 1so s1obj so1c2k sodaéne2 soda1g2 so2d1e so2dén so2dév so2did so2dis so2dí so2dob so2d1org so2d1os so2dö so2dÅ‘ sodrás1s so2dú sod1út so2dü so2d3z so2kab so2kai so2k1aj so2k1a2la so2k1ap so2kar soka2t so2k1ato so2k1ál so2k1árn so2k1e2 sok1ill so2kí 3sokk. sokka2l sok2k1ala sok2k1a2p 2sokke sok2k1el sok2kir sok2k1ö2v sok2k1ü 2sokl 2sokm so2kok sok1old 2so2kos 2s1o2koz so2k1ó2 2sokta s1oktat so2k1ú2 so2kü 2s1o2laj sola2j1á2r sola2je sola2tel solás1s 2s1olda 2s1oldá 2soldó s1oldó. s1oldók s1oldóm s1oldón s1oldór s1oldót s1oldóv so2lim 2s1olló solóó2 s1oltás 2solvas so2lyan so2lyó so2m1ad somag1g so2m1a2l soma2t som1ato so2m1e2 so2mél so2m1éte so2mil so2m1ita so2mí so2m1odo som1ort so2m1osz som1p so2mú so2mü son2ch 2sond so2n1e son2kál son2k1e2 2sonó son2tab son2t1a2l son2t1a2n son2tar son2t1á2r son2t1e2 sonté2r son2t1éré son2tik son2tip son2tö son2tÅ‘ 2sonv so2nye so2nyis so2ór so2ós s1opc s1o2pe sor1ada sor1ala sor1áll so2r1átl 3so2r1e2 sor1f2 s1orgi so2rid 2sorie so2rif so2rim so2r1ing so2ris 3sorké sormű1 sorműt2 sor1oszt 3soroza so2r1ó2 so2rö so2rÅ‘ 2s1orr. s1orrát 2sorrb 2s1orrn 2s1orro 2s1orrú 3sors. sor2sal sor2sar sor2sas sor2s1e2 sor2sir 2sorsó sor2sü sor1sz sor2szón sor1tre sorú2t so2r1úto so2rü so2rű 2s1osto 2s1ostr s1osty 2sosz s1oszl s1oszt so2ul so2ur so2ve so2vis so2xi 1só sóá2g sóá2r sócsapá2 só2dái só2dák só2dáv sógé2p1e2ké só1g2r sói2g sóí2v só2k1a2d só2kil só2kim só2kó só1kré sómű1 só2né. só2ni sóó2r só2rai só2ráb só2rái só2ráj só2rám só2rán só2rár só2rát só2riá só2s1ü2 só1sz sótá2ny1ér só1t2r 2s1ótv 2s1ó2vó 1sö sö2bű sö2ga sö2gá sö2g1e2l sö2g1em sö2g1ék sög3g sö2go sö2g1ö2lé sö2g1ölt sö2gű sö2ka sö2k1el sö2kid 2s1ö2ko s1ö2kör sö2kú sö2lÅ‘j s1ö2lÅ‘z s1öly sö2na sö2ná sön2d1a2 sö2no sö2n1öl sö2n1Å‘ s1öntv sö2nú 3sör. sö2ra sö2rá 3sörb 2sörd sö2r1ed sö2reg sö2ren sör1f2 sö2r1iz 3sörk sö2ro sö2ró sö2r1ö2l sö2r1Å‘ sör1s sö2ru 2sörv s1össz s1ösv s1ösz 2s1ötl 2s1ötv 2s1öv. 2söve s1övv s1ö2zön 1sÅ‘ sÅ‘a2c sÅ‘a2l sÅ‘a2n 2sőá2g sÅ‘1bl 2sÅ‘bok sÅ‘2dad sÅ‘2dalk sÅ‘2dá sÅ‘2del sÅ‘2din sÅ‘2d1Å‘2r sÅ‘1dro sÅ‘e2l sÅ‘e2r sÅ‘e2s sÅ‘1gr sÅ‘1kl sÅ‘1pl sÅ‘1pn sÅ‘1pr sÅ‘2r1aj sÅ‘2r1á2csot sÅ‘2riz sÅ‘2rol sÅ‘r1ö2l s1Å‘rti s1Å‘rz sÅ‘2s1av sÅ‘2sim sÅ‘s1orra. sÅ‘2s1orrá sÅ‘2s1orró sÅ‘1s2p sÅ‘1sta sÅ‘1str sÅ‘sü2 sÅ‘2s1ül sÅ‘2s1üt sÅ‘2szap sÅ‘2sz1áll sÅ‘szé2k sÅ‘sz1éké sÅ‘2sz1í spa2d1a2 1s2pann s2patu s2páj spá2n1a 1s2párg 3s2peci 1spektr 1s2peku 1s2pék s2p1ér 1spirálú spis3s sp2le2 s1p2lé sp2lu sp2ne 1s2pong sporta2 spor2t1al 1s2portág spor2tár 3s2portb spor2t1e2 1s2porté spor2t1érd 1s2porth spor2t1i2n 1s2portj 1s2portn 1s2portoka 1s2portol 1s2porton 1s2portos 3s2portot spor2t1ö spor2tÅ‘ 1s2portra 1s2portró 1s2porttó 1s2portu spor2t1ü2 1spórák. 1spórol sp2ra s1prak 3spray s1p2re s1p2ré 1spric s1p2ro s1p2ró sp2sz 1s2raf srau2 1s2ráco sren2d1Å‘2 3s2róf srú2de ssab2b1i ssa2vo sság3g ssé2g1e2l ssé2g1é2j ssé2gid s2s1ékt s2s1i2z ss2kál ss2ko ss1k2r ss2lá sso2m1o ss2pó s1s2rá ss3sz ss2tad ss2tar ss2tat s1s2tí ss2to s1st2r ss2tú ss1ura ssy1ér ssy1ét s2sz sszae2 ssza1p2r sszat2 ssza1tr ssz1á2g ssz1á2ram ssz1á2ruk ssz1á2sz s3száza s3szedé sszeg1ér. sszegü2l ssze2g1ülÅ‘ ssz1egye s3szekv ssz1e2lÅ‘a s3szeml s3szemm ssze1p2 ssze2reib ssze2reid ssze2rein ssze2reké ssze2rekt ssze2rem. ssze2remm ssze2rese sszer2t1á2r ssze1t2r s3széki ssz1é2lÅ‘ sszér1in s3széto s3szige ssz1ing sszis1s sszí2vel ssz1k2 sszkész1 s3szob ssz1osz ssz1ósá s3szöc s3ször ssz1ös s3szöv ssz1p2 ss3zsi sszt2 ssz1tro s3szur sszus1s ssz1úr. 1stabil 1stadio st1a2dó 1stafí s2t1alj s2t1alk s2tand 1standa sta2n1ó2r stan2s1é st1a2nyag s2tarc 1s2tart. star2tas 1startos 1s2tartot 1startt sta1sl sta1s2t sta1t2r sta3u stau2t s2t1a2x st1azo 3stáb. 1stábj 1stábk 1stábn 1stábot st1ág. st1áld stán2c1ol stán2s1á2 stá2riá s2t1árka s2t1árká st1á2t1a2 st1áts 1s2tátu st1átv st1br st1dr ste2a 1s2teak s2t1ebé s2tedén s2t1e2gy s2t1e2ke. s2t1elf s2t1elh s2t1ellen s2t1elm s2t1e2lo s2t1elö s2t1e2lÅ‘ s2t1elr s2t1elt s2t1elv s2t1ember s2t1e2mel s2t1eml ste2n1a ste2n1á ste2neg ste2n1és sten3n ste2nos ste2n1Å‘2 1stepp ste2rad ste2raj ste2ral ste2rav ste2rác ste2rál ste2r1e2le 1sterili ste2r1int ste2ris ste2r1o s2t1erÅ‘ ste2sz s2t1eszm ste2u 1stégek 1stéget 1stégg 1stégr sté2k1a2 sté2ká stékát1 stékáta2 sté2k1el st1é2kesí st1ékí sté2k1o sté2ku s2t1é2le st1é2lé s2t1élm s2t1élt st1élv s2t1érc s2t1érem s2t1é2ré s2t1é2rÅ‘t s2t1érté s2t1érz stés3s s2t1észl s2t1étk s2t1é2ve. st1fl st1fr st1gr s2t1i2d sti2g s2t1ige s2t1igé s2t1i2ma s2t1i2má 1s2timm s2t1imp sti2na. s2t1ind s2tinf s2t1ing s2t1inv s2t1i2o st1i2pa st1irá st1iró stis3s sti2sz s2t1isza s2t1i2vá st1i2zo s2t1íj 3s2tíl s2tír st1írá st1í2v st1í2z stká2ro st1kr st1kv stola2t sto2lato sto2p1a 1s2topb 1s2toph 1s2topr 1s2topt sto2rál sto2rás sto2re sto2ris st1o2x 3s2tóla 3s2tólá st1ó2rá 1s2tós stö2k1ölt s2t1önt s2t1örö s2t1össz stÅ‘a2 stÅ‘e2 stÅ‘kész1 s2t1Å‘ra s2t1Å‘rb s2t1Å‘rc s2t1Å‘reg s2t1Å‘rei s2t1Å‘ré. s2t1Å‘rén s2t1Å‘rér s2t1Å‘rév s2t1Å‘rf s2t1Å‘rg s2t1Å‘rh s2t1Å‘2ri st1Å‘rl s2t1Å‘rm s2t1Å‘rn st1Å‘rp s2t1Å‘rs s2t1Å‘2rü s2t1Å‘rv st1pf st1pl st1pr 1strando 1st2rap 1straté s1trág 1st2réb s1t2rén st2riá st2róf 1strófá 1st2ruc 1struktú st1sc st1st st1sz st1t2r s2t1udv 1stukk st1uni st1u2ral 3s2túd st1úr. st1út s2t1üg s2t1ü2lÅ‘ st1üst s2t1üt s2t1ü2v s2t1űr. s2t1ű2ri s2t1űrn s2t1űrt 1su suá2r subi1 s1udm 2s1udv 3sugár. 3sugárr 2s1ugr 3suhi 2sujj suj2j1a2da su2k1ará su2ke su2k1ö su2k1ü 3summ su2nal 2s1und 2s1u2ni su2no supe2 2sura s1u2rad su2rai su2rak su2ral su2rat su2rát 2surn 2s1u2ru su2szo 2s1u2tac 2s1u2tad 2sutak s1u2tal 2s1u2tam 2s1u2tan sutas1s 2s1u2tat 2su2tazi s1u2tazó 2s1utc 2sutó su2tód 2sutu 1sú sú2csal sú2csat sú2csem sú2cser sú2csip súcská2 sú2cs1ö sú2cs1ü2 súcs3z súdi2 súi2m s1újh 2s1ú2jí 2s1újs 3súly 2súr. 2súrb 2súré 2súrh 2súri 2súrk 3súrl 2súrn 2súrp 2súrr 2súrt sú2sén sú2só 2súsz súszó2s3z 2s1útb sút1en 2súté s1úth 2súti sú2tig 2s1útj 2s1útk s1útn 2súto s1ú2ton sú2t1Å‘ 2s1útr 2s1útt sútü2 sút1üz 2s1útv 1sü sü2d1e sü2dí sü2dü 3sü3gé s1üggy 2s1ü2gy 3sü2k1a sü2kü 2süldö sü2led 3sültr sülye2 sü2lyes sü2ná süne2 sü2nev sü2n1é 2s1ü2nÅ‘ sü2rí 2s1ü2rü 2süst 2s1ü2sz 2sütk 3sütÅ‘b sü2ze sü2zé 1sű sű1pr sű2r1a 3sűrí s1ű2ru sű2zÅ‘ sva2s1u2 svá2gy1ón. svá2nyan svá2r1al svárosi2 sven1 svezetőé2 své2nyel své2tes svíze2 svíz1es s2vun sw2hi swhisky2 sy2bÅ‘ sy1ig s2z 1sza szaa2 3szabáz 3szabd sz1a2dás 2sz1a2dó szaé2 3szaft 3szag. 3szagb 3szagú 2szagy. 2szagya 2szaj. 2szajá 2szajj 2szajú 2szakci 3szakm sza2k1ó2r sza2köz 2szalás 2sz1alb 2sz1alf 2szalg 2szalj sz1alj. sz1aljak sz1aljo 2szall 2szaln 2sz1alp 2sz1als 2szalt. 2sz1alte 2szalto 2szaltu 2szane sz1antr 2szanya 2szanyó 2szap. 2szapá 2szapb sza2pel 2szapé 2szaph 2szapi 2szapj 2szapn 2szapr 2szapt 2szarán 3szarb 2szarc 2szaré 3szart 3szarv 2szasp sz1assz sz1aszt sza2tala 3szaty 2szaut szau2ta 3szax 2szazo 1szá 2szábr 2szágó 2szágy szá2gyá szá2gyo szá2j1e szá2ke 2száldo szá2lin 3szálú szá2man szá2mar 3számá szá2mál 3számc szá2m1e2 3számé szá2m1érté 3számí 3számk 3száml 3számm 3számn szá2mor 3számt 3számú 3szánd 3szánt 2száp 2száradd 2szárás szá2r1e 2száriá 3szárí 3szárm szá2r1ó2ra szárt2 2száru. 2száruh 2szárus 2szásó. 2szátd 2sz1á2t1é 2száti 2szátl 2szátm 2szá2t1ö 2száts 2sz1átu 2sz1á2tü 2szátv sz1bl sz1br 1szcéna sz1cl sz1d2r 1sze 2szeb. 2szebé 2szech 2szecse 2szedz 2szeger 3szegfű 2szegz 2szeh 3szekcsÅ‘. sze2keg 2szekéit 2szekés sze2kés. 3szekrén 3szekto 3szele. 2szelef 2szelemb 2szelemek 2szelemé 2sz1e2lemk 2szelemm 2sz1e2lemr 2szelemü 2szelemz 2szelere 3szelet 3szelén 2sz1elf 2sz1elgo 2sz1elha 2szelhá 2sz1eljá 2szelkü 2sz1ellá 3szellÅ‘ 2sz1elm 2szelnev 2szelnö 2szelo 2szelö 2sz1e2lőá sze2lÅ‘dö 2sz1e2lÅ‘f sze2lÅ‘ré 2szelsÅ‘ 2sz1elszá 2sz1elta 2szeltá 2szeltű 2szelu 2sz1elül 2szelv. 2sz1elvá 2szelvek 2szelves 2szelvez sz1elvi 2szelvn 2szelvt 2sz1elz 2szember 3szemek 3személ 3szem1p2 3szemű. 2szenc 2szener 3szenny 3szentm 3szenz szeo2 2szepi sze2r1á2l 2szeredm szer1e2ge sze2rej 3szerel szer1eszt 3szerke 3szers 3szerta szer2tálo 3szerv szervíz1 3szerz 2sz1esd 2sz1esemé 2szesetb 2szesete 2szeseté 2szeseti 2szesetr 2sz1e2sés 2szeskü 2szesÅ‘ sze1sp 2szeste 2szesté 2szesti 2szestj 2szestr sze2t1e2lÅ‘ sze2t1é2k 2szetika sze2ton sze2tÅ‘r 2szeve 2szevÅ‘ 2szexp 3szezo 2szezr 1szé 2széber 2szébr 2szég. 2szége 2szégÅ‘ 2szégr 2széhe. 2széhen 2széhet 2széhs 2széji 3széke. 3széked 3székei 3székek 3székem 3székes 2székez 3székéb 3székév szé2k1ol szé2kos 3széks 3székü 3szélek 2széneke 2szénekn szé1pró 3széps 2sz1ér. 2sz1érb 2sz1érc 2sz1érd sz1é2rel 2szérem szé2rem. szé2remm 2sz1é2ren 2széré szé2ré. szé2rén szé2rér szé2rét szé2rév sz1érf sz1érg sz1érh 2sz1érin sz1érj 2sz1érk 2sz1érl 2sz1érm sz1érn 2szérÅ‘. 2szérÅ‘i 2szérÅ‘k 2szérÅ‘t sz1érp 2sz1érr sz1érs sz1értá 2sz1érte 2sz1érté sz1értí 2sz1értÅ‘ 3széru 2sz1é2rü 2sz1érv 2sz1érz szé2tel széte2s 2szév. 2szévad 2szévb 2széve. 2szévei 2szévek 2sz1é2vet 2szévén 2szévét 2szévév 2szévf 2szévi 2szévk 2szévn 2szévr 2szévs 2szévt 2szévü 1szférai 1szféráé 1szféráso sz1fl sz1fr sz1gl 1szi szi2ab 2szide 2sz1idé 2szidÅ‘ 2szifj 2sz1i2ga 2sz1igé 3szign szi2k1a2s szi2k1e2r szi2k1ó2 sz1imak 2sz1i2má 3szimb sz1impr 2szimpu 2szinas 2szinde 2szindí 2szindu sz1info 2szing sz1inko 2szinteg 2szio 2szirat 2szirá 2sz1i2ri 2szirod szi2sí 2sz1i2sza szi2szá 3szitu 2szivad 2szivás 2szivó sz1izg 2sz1izz 1szí 2sz1íg 3színe 2színna szí2vár 2szívi 3szívű 2sz1íz szka1pr 1szkarab szk1arch 1szkenn 1szlávh 1szlávok 1szleng. 1szlengn 1szlengr szle2t1e2l szle2t1o 1szloge 1sz2mok 1sznob 1szo 2szobj 2szoda. 2szodai 2szodak 2szodú 3szof 3szoká 2szokke 2szokoz 2szoksö 2szolaj szo2l1ál 2szolda sz1oldá 2szoldó 2szo2lim 2szolló 2sz1oltár 2szoltás 2szolvad 2szolvas 2szombor. 3szoms szo2nas szo2nár 3szond 2szora szo2r1ál 2szorm 2szorn 2szors 2szorv 2szostá 2szosto 2sz1otth 3szov 2sz1ox 1szó 2szólm 3szóló 2szónád 2szónái 2szóni 2szónod 2sz1ó2rán 2szórát 2szóri szó2sík 3szósz. szósza2k szó2száll szó2szón szó2szülÅ‘ szó1tr 1szö 2szöbli 3szöge 2szöldes 2sz1öle 2szöre 2szöv. 2szövei 2sz1öz 1szÅ‘ szÅ‘a2 szÅ‘e2r szőé2 3szÅ‘lÅ‘ szÅ‘2ra sz1Å‘si 2szÅ‘z sz1Å‘ze 1sz2pí sz1pl 1szponz szrádi2 sz3saj sz3sap sz3sas sz3sav s3zsák sz3sán sz3sár sz3sás sz3sát sz3sáv sz3seg s3zsem s3zsen sz3sep sz3ser sz3sé sz3sh sz3sik s3zsin sz3sis sz3siv sz3sín sz3s2k sz3sl sz3sod sz3sok s3zsol sz3sor sz3só sz3sör sz3söv sz3s2p sz3s2r sz3s2t s3zsúll sz3sü sz3sz sz2t1álla sztá2r1a2d szt1á2ram sz2tá2ras sztá2rat sz2táre sz2tárf sz2tárh 1sz2tárj sz2tárny sztáró2 sz2táru szt1á2ruk sz2tárv szte2r1el 1sztereo szté2g szt1ége sz2térte sz2t1érv sz2t1été szt1örök sz2t1Å‘rn 1sztrádá 1sztrájk sz2tür 1sztye 1szu szu2b szub1o szuc1 2szud sz1udv 2szugo 2sz1ugr 2szuh 2sz1uj 3szuká sz1u2ra 2sz1u2rá 2szuta sz1utó 2szutu 2s3zuz 1szú 2szúg 2szúj sz1úron 2sz1úrr sz1úrtó 2szús 2szúté 2szúth 2szúti 2szútj 2szútn 2szúto 2szútr 2szútt 2szútv 2szúz 1szü 2sz1üd 2szügg 3szüks 2szüld 2sz1ü2led 2szülÅ‘se 3szüne 3szürke 3szürkés 2sz1üs 2sz1üv 1szű 3szűk 2szűrödn 1szvit. 1szvitet 1szvitj 1szvitn 1szvitt sz3zs 2t. 1ta taa2d taa2l taát1 taáta2 taboz1 ta1bra 2t1abro ta1bró 2t1abs ta1cl ta2dalé 2t1adap ta2das t1a2data 2t1a2datb 2t1a2dato ta2datu 2t1a2dá 2tadio t1adj t1adl 2t1adm ta2dod 2t1a2dog 2t1a2dot 2tadó t1a2dóa ta2dóá ta2dób ta2dód ta2dóf ta2dóg t1a2dóh t1a2dóig t1a2dóik t1a2dóin t1a2dóit ta2dóí t1a2dój t1a2dóka t1a2dóké t1a2dóko t1a2dókra ta2dól t1a2dóna t1a2dóná ta2dóp t1a2dór t1a2dótó ta2dóü t1a2dóv ta1drá ta1d2re ta1dro t1ads ta2dun t1adv tae2l tae2r ta2ero taé2r 2tafí ta1f2r taf2ta ta2g1aj ta2gav taga2z tag1azo ta2gág ta2g1ál ta2gec ta2g1el ta2g1e2r ta2g1é2g 2taggo 2t1aggr ta2gid ta2giz ta2g1osz ta2g1ott ta2góc 2t1a2gón ta2g1ó2r ta2góv ta1g2raf ta1g2ram tagrádi2 ta2g1u2s ta2gut ta2g1ü2 2t1agy. ta2gya. ta2gyáb ta2gyáh 2t1agyb 2t1agyn 2t1agyr 2t1agyv ta2ire tai2rón tai2z ta2jé ta2j1u2s ta2jús 2t1a2kad ta2k1ál taká2r tak1áro ta2kás ta2kátk 2t1akce 2t1akci take2l tak1ele ta2k1ér. tak1é2sze 2t1akkora 2takkord 2t1akku ta1k2la ta1klu t1akna. ta2kó. ta2k1öb ta2k1öröm ta2k1ös ta1krá tak2reá ta1krí 2t1akro ta1k2rón 2t1akta. tak2tem 2takti. 2t1aktiv 2t1aktí 2t1aktj taktus1s ta2kus ta1k2va ta2l1a2da ta2l1a2dá ta2l1adh ta2l1adj ta2l1adn ta2l1a2do ta2l1a2dó ta2l1adt ta2l1a2du ta2l1adv 2tala2g1 t1a2lagu ta2lagú 2t1a2lakb tal1akc 2talakí 2t1a2lakj 2ta2laku 2t1alakz ta2l1a2l 2t1alany 2t1a2lapa 2t1a2lapí ta2lapk t1a2lapl 2t1a2lapsz ta2l1a2r ta2l1as tala2te 2talatti ta2l1au 2taláá ta2l1á2g 2taláí ta2l1állo tal1á2rak talás1s ta2l1áta tal1átr tal1ell ta2l1elÅ‘ ta2l1eng tal1esi talé2g talé2k1e2 ta2l1ér. 2t1alge ta2l1i2ko tal1ikr tal1imp tal1ina. ta2lip ta2l1isk ta2l1í2r 2taljas 2t1alji 2taljz 2t1alkal 2talkím 2talkoh 2talkot 2taller tal3ly ta2l1ol talo2m1e ta2l1osz ta2l1Å‘r tal2p1á2ro tal2pel tal2p1il tal2pus tal1t2re ta2lud 2t1a2lulj ta2l1u2r ta2l1u2t ta2lúr ta2l1út. ta2lü ta2lű 2talveo ta2mal tam1alm ta2maz ta2m1i2d 2t1a2mÅ‘ t1ampa 2t1ampu 2t1amur ta2mü ta2n1aj ta2nal ta2nan 2t1a2nat tan1áll tan1álo tanás1s tan2del 2tandr ta2n1el ta2ner 2ta2n1es ta2n1ez ta2n1éj ta2n1ér. ta2n1érk tan1évb tan1é2vé tan1é2vi tan1évm ta2nid 2ta2nim tan1ist tanké2r tan2kére tan2kés tankó2 tan2k1óra 2tanny ta2n1osz ta2nód tan2te. tan2t1el tan2tors tan1trá ta2nü ta2nű 2tanyag 2tanyád 2t1a2nyó tao2l taó2r 2t1a2pa. 2t1a2pai ta2paké ta2pasá 2t1a2páb 2t1a2pád 2t1a2páé 2t1a2páh 2t1a2pái 2t1a2páj 2t1a2pák 2t1a2pám ta2pára ta2páró 2tapáu 2t1a2páv ta1p2la ta1plé t1a2pó. 2tapp ta1p2ré 2t1aprí ta1p2ro tap2sor taps3s tap2s1ü2 2tapun ta2rabe ta2rai 2t1a2ras 2t1a2rat 2t1a2ráb tarádi2 2t1a2ráh 2ta2rán 2t1a2rát 2t1a2ráv 2t1arbi 2t1arc. 2t1arch 2t1arco t1arcu 2t1arcú ta2r1i2k ta2romá tar1s2 tar2tab tar2t1e2l tar2t1em tar2t1en tar2t1ér. tar2tit tar2told 2tartos 2tartr 2tartt 2tasc ta2sem 2tasf ta1s2ká ta1slu ta1s2m 2tasnÅ‘ ta2s1ol 2t1aspi ta1spr 2tasru 2tassz tas3szá tas3szt tast2 ta1s2ta 2taste ta1str 2tasv ta1sy 2t1aszk ta1szl ta2t1alj ta2t1alm ta2t1aszt tatá2rai. 2t1a2tei tate2s 2tatika 2tatiká ta2t1i2na. ta2t1i2ná ta2t1ing t1atlas t1a2tomo tat1orj ta2t1Å‘r ta1t2ri 2t1atti tat1u2ra ta2tya ta2tyá 2t1a2uk tau2ta tau2tá 2tauto taü2z 2tavan 2tavatá 2tavató ta1wh ta2zon 1tá 2táb. tá2b1á tábe2sz1é2l 2tábé tá2big tá2bin 2tábj 2tábk 2tábn 2tábok 2tábon 2tábot tá2bö 2tábr t1ábrá tá2bü 2táciu tá2fa tá2fá 2tág. tá2ga. tága2d tág1ada 2t1á2gai tá2gaz 2t1á2gá 2tágb 2t1ágc 2t1á2ge 2t1á2gé 2t1ágf 2tágg 2t1ágh 2t1á2gi 2tágj 2t1ágk 2t1ágm 2tágn 2t1á2go 2tágr 2t1ágt 2t1á2guk 2t1á2gun 2t1á2gú 2t1ágv 2t1á2gy tá2hí tá2jal tá2jaz tá2j1e2g tá2j1e2l tá2jí tá2j1ok tá2j1ö2 tá2jÅ‘ t1á2jul tá2lab tála2d tál1ada tá2laj tál1a2lap tá2lap 2t1álar tá2l1ál tá2l1áth 2t1áldá 2t1áldo tále2 tá2l1eg tá2l1el tá2lél 2t1áll. 2t1állam 2t1állan 2t1állat 2t1állás 2t1állh 2t1állí 2t1állj 2t1álln 2t1állo 2t1álls 2t1állt 2t1állu t1állú 2t1állv 2t1álma 2t1álmi 2t1álmok 2t1á2lomr tá2lomt tá2lö 2t1álru tá2lü tá2lyab tá2ly1a2c tá2lyad tá2ly1a2g tá2ly1a2l tá2ly1a2n tá2lyap tá2ly1at tá2lyau tá2lyátl tá2ly1á2z tá2lyid tá2lyir tá2lyis táma2sze tá2mí tá2mos 2t1á2mu tán1alm tá2nár tánc3c tán2c1e tán2céh tán2cél tán2cén tánckész1 táncs2 tán2csá tán2csor tán1d2 tá2n1e2 tá2ní tá2n1ó2 tá2nö 2tánpó tán2s1e tá2nü tá2nű tá2nyal tá2ny1as tá2nye 2tánz tá2p1a2 tápá2 tá2pál tá2p1ár tá2pát tá2p1e2 tá2p1il tá2p1in táp1oll tá2p1osz tá2pÅ‘ tá2pü 2t1á2radá tár1a2dot tá2radó tá2r1a2g tár1ajt tá2r1a2l 2tárama. 2táramá 2tárami 2t1á2raml 2táramok 2táramol 2táramot 2t1á2ramt 2táramu 2táramú tára2n tá2r1any tá2rap tá2r1asz tá2r1att tá2r1au tá2r1av tá2rág tá2r1ál tá2r1á2sz tá2r1átl 2tárboc tá2r1e2 tá2réd tá2rés tár2gyö tá2r1i2k tá2r1i2p tár1isk tá2r1ism tá2rí tár1k2 2t1árkád 2t1árkána 2tárkár 2t1árnyé tár1okm tá2r1osz tá2róc tár1órá tá2rö tá2rÅ‘ tár2s1alt 2tártás tárt1öltÅ‘ tár1tro 2táru. 2tárua 2tárub 2táruc 2t1á2rug 2t1á2ruh 2tárui 2t1áruj 2táruna 2tárus 2tárut. tár1uta 2táruü 2táruv 2tárúé tá2rúj 2tárúk tá2rús tá2rü tá2rű tá2s1a2d tá2s1aj tá2sal tá2s1a2r tá2saz tás1á2ga tá2s1á2rai tá2s1á2rá tá2s1á2ré tá2s1árh tá2s1árn tá2s1á2ro tá2s1árr tá2s1árt tá2sás 2t1ásáso tá2s1á2to tá2s1e2 tá2sis tá2sodv tá2s1ol tá2sor tá2só 2tásók tás1ó2r tá2sö tá2sÅ‘ tás3s tást2 tás1tr tá2sut tá2s1ü2 tá2sű t1ásván tá2sz1ak tá2szal tás3zav tá2s3zá tá2sze tás3zen 2tászi 2tászo tá2szos tá2s3zó 2tászt 2t1átad 2t1á2tál 2t1átc 2t1átd 2t1á2t1e2 2t1á2t1é 2t1átfo 2t1átg 2t1áthe 2t1áthi tá2t1ir 2t1á2tí 2t1átje 2t1átkö 2t1átlag 2t1átm 2t1á2t1ol 2t1á2t1ö 2t1á2tÅ‘ 2t1átp 2t1átre 2t1átru 2t1átsz 2t1átte 2t1átté 2t1áttö 2t1áttű 2t1átut 2t1á2tü 2t1átvi 2t1átvo tá2v1a2d tá2vak táva2l tá2v1a2n tá2vas tá2vaz tá2v1ál tá2v1e2 tá2véd tá2v1érz tá2v1és tá2vin tá2vis tá2ví tá2v1or 2t1á2vó tá2vö tá2vÅ‘ tá2vü tá2zsal tá2zsál tá2zsó tázs1p tbal2le tbeá2 tb2la tb2le tb2li tb2lo tb2lú tb2ra tb2re t1b2ri tb2ro tb2ró tb2ru tca1k tca1s tca1t2 tc2lu tc2re tcsap1á2g tdíja2d tdí2j1ada td2ra td2rá td2re td2ro td2ró td2ru 1te te2aa te2aá te2ab te2ac te2a3d te2ae te2aé te2ag te2ah teai2 te2aiv te2aí te2aj te2aku te2alap te2aláz te2ali te2ana te2ao te2aö te2aÅ‘ te2apa te2apá teas2 te2a1sp te2asza te2aszá te2aszo tea1tró te2aú te2aü te2av te2az te2ber te2béd 2techó te2csá te2dit te2dí 2t1e2dz 2t1eff te1f2r te2gan te2g1a2r tega2z teg1azo te2gá teg1ál teg1ár te2g1eg teg1e2lem te2g1ell te2g1elr te2gene 2tegere. teg1ered te2g1él te2g1é2p tegész1 teg3g te2gid te2gis te2giz te2g1on te2g1ö te2gú te2g1ü2g tegü2l te2g1ülé te2g1ülÅ‘ 2t1egy. 2t1e2gyes t1e2gyez t1egyén 2t1egyh 2t1egyl 2t1egys 2tegyüt tei2g tein1s te2j1a teje2g te2j1ell te2j1elv te2j1er te2jin te2jí te2jo te2j1ó te2j1ös te2jÅ‘ te2j1u2 te2jú te2k1ag te2k1ál te2k1el tek1éret te2k1i2p te2kí te1klu te2k1ok te2k1ös te2k1und te2k1út te2lab te2lag te2laj te2l1an te2lap te2lar te2las te2lav te2l1á2g te2lál telá2r te2l1át 2telbü teleí3rá 2telej tel1ejt 2telektr tel1e2len te2l1elk te2l1ell te2lemba te2leme. 2telemei 2te2lemek te2lemes te2lemén 2t1e2lemű tel1esés te2l1e2sÅ‘ te2l1este tel1esti tele2t1ér. t1e2lég tel1érét te2l1éte t1elfoga telié2h te2l1imi te2lind te2l1inf te2l1ing 2t1elix te2lír tel2l1eg 2tellenf 2tellenÅ‘ 2tellenz 2t1ellniv 2telméle te2l1ó2 te2l1öl 2telÅ‘adá 2t1e2lőí 2telÅ‘nn 2t1e2lÅ‘ny te2lÅ‘tt 2t1e2lÅ‘tu te2lÅ‘vét tel1Å‘zi. tel1p2 2t1elsa 2t1elsÅ‘ 2t1eltelé 2t1eltett. 2t1eltéré te2lú telü2ké. 2t1elv. 2t1elvb 2t1elvei 2t1elvek 2t1elvet 2t1elvév 2t1elvh 2t1elvi. 2t1elvil 2t1elvk 2t1elvn 2t1elvr 2t1elvt 2t1elvü 2t1elvű 2t1elvv te2map te2m1as 2t1embl 2t1embr te2m1e2g tem1e2leg 2t1emelé 2t1emelk 2te2melÅ‘ te2melv te2m1él te2m1ér. te2m1é2rÅ‘. tem1érté 2t1e2més te2m1éte te2m1étk te2mid te2migr tem1ill te2mim tem1ing te2m1int te2móc te2m1ó2r te2m1Å‘2 2tempá 2templi 2t1e2mul te2mus te2mut temü2l te2m1ülé te2nad te2n1a2g te2nal te2n1a2r te2n1as te2nat te2nau te2n1á2t ten1d2h tene2g ten1egy te2n1el te2ner 2tenerg te2n1esz te2n1ékt te2n1év. te2n1évi 2t1enged te2n1i2p te2n1ol te2n1ó2 te2nö ten2tin ten2t1í2v ten1tri te2n1u2 te2nú te2n1üg te2nünn 2t1enyh t1enyv te2oc te2of teo2s 2t1epiz 2t1e2pos 2tepp tep2p1é2k ter1abl ter1a2cé te2r1ada ter1aka te2r1a2n te2r1a2r te2rau ter1á2cs te2ráf te2r1áll ter1álm te2r1á2ri ter1áta ter2ch ter1d2 2terdÅ‘ 2teredm te2r1e2dz ter1egé ter1egy 2t1e2rej te2r1e2ke. 2t1e2rekl te2r1elm tereme2 te2r1ent 2tereo tere2pa tere2p1e2l tere2p1ü2lé te2r1er te2r1e2sÅ‘ te2r1este te2r1esté te2reta te2r1eti te2retn te2rég te2r1é2j ter1éka te2ré2l ter1éle ter1élv ter1g2 ter1iko 2terili ter1illa teri2na te2r1i2p te2rism ter1ist ter1izm ter1k2 t1erkölc termés1s te2r1old te2ror te2r1ox te2r1ó2r te2rök te2r1ön te2rÅ‘ 2t1erÅ‘. 2t1erÅ‘b 2t1erÅ‘f 2t1erÅ‘s t1erÅ‘t t1erÅ‘v ter1s2 tersz2 tert2 ter1tr te2rur te2r1ut te2r1út te2r1üg te2r1üld ter2vaj ter2van te2sar te2sár te2sel tes1ell 2t1esemé 2t1e2setb 2t1e2sete. 2t1e2setei 2t1e2seten 2t1e2setet 2t1e2seté 2t1e2seth 2t1e2seti 2t1e2setn 2t1e2setr 2t1e2sett te2sél 2teséll 2tesély 2t1e2sés te2s1int tesí2r te2sírn te2s1író te2síz 2teskü t1esni te2sot 2t1e2sÅ‘ tesp2 2t1esszen tes2tak tes2t1áll testá2r tes2tára tes2t1elk tes2t1ell tes2t1er 2testéj 2testék tes2t1ékn tes2t1éks 2testém tes2tism tes2t1o testÅ‘2 tes2t1Å‘re tes2t1Å‘ré tes2tur te2s1ú te2s1ü2v 2teszet 2t1eszmé tesz2t1a2 teszte2r tesz2t1ere tesz2t1é2te tesz2tor te2t1a2k te2t1a2l te2tana te2t1a2p te2tág te2t1ál tetá2r te2t1ára te2tát te2t1e2g te2t1ell tet1e2lÅ‘ tet1elr te2t1elv te2tene tete2r te2t1eré te2t1esz te2t1éj tet1ékk te2t1é2l te2t1éri te2t1érv te2tid 2t1e2tika 2t1e2tiká te2tim te2t1int tetkész1 te2t1olt te2t1ot te2tór te2tur te2t1üz 2t1e2tűd te2t1ű2z tevés3s te2vol te2w1a 2t1exp 2t1e2zer tezÅ‘a2 t1ezred 1té 2t1ébr té2cÅ‘ té2des 2tég. 2tégb 2téget 2tégé té2gép té2gés 2tégg 2tégj 2t1égk tégla1 2tégn 2t1é2gö 2t1é2gÅ‘ 2tégr 2tégt tégü2 té2g1ül 2t1éh. 2t1é2hek 2t1é2hen 2t1é2hes 2t1é2het 2t1é2hé 2t1éhs 2t1é2jen té2jes 2t1éjr 2t1éjs 2t1éjt té2kab té2kad ték1ada té2k1aka ték1alk té2kam té2k1a2n té2k1ar téka2t té2k1att té2kaz té2k1ág té2k1e2c té2kedé té2k1e2g té2k1e2kéh té2k1e2lőá té2k1elr té2k1er té2k1esz té2k1eti té2k1é2k té2k1é2l té2kép té2k1ér. té2k1és tékfélé2 tékfé2l1év té2kid té2kik té2kim té2kirá té2k1i2s té2kí2 té2k1o2r té2k1osz tékó2 té2kór té2k1ö2 té2kÅ‘ té2k1u2t tékü2l téla2 té2lak té2l1an té2lap té2las t2élá té2láb té2lál té2lár té2l1á2t té2lei té2le2l tél1ele tél1ell tél1elÅ‘ tél1elv té2l1e2r té2les tél1est té2lez tél1ékb tél1éké té2lí té2ló té2l1ö2 télÅ‘kész1 télu2 té2lut 2té2lű té2lyeg télyigaz1 téma1p té2mé té2mil téna2 té2n1an 2t1é2nekb 2t1éneke. 2ténekek 2t1é2neké 2ténekl té2n1in té2n1ö2 té2ny1e2g té2ny1e2l té2nyék tényigaz1 té2nyim té2nyo té2nyö tépés3s 2t1é2pí té1p2la 2t1é2pül té2rab té2raj tér1akt té2r1a2n té2rar té2ras té2rav té2r1á2r 2térdekb 2térdekl 2térdekn 2térdekü 2térdekű tér2d1e2mel tér2d1í2j té2reg té2r1e2l té2rem tér1eml té2r1eng té2r1esÅ‘ té2r1essz té2r1esz 2t1é2rett térés1s té2r1ész tér1éte téri2d té2r1ide té2ril té2rip té2ris 2térkez 2t1érmü té2r1os té2rö 2t1értes 2t1érték 2tértÅ‘. 2t1értÅ‘k tér1t2r 2t1értv té2r1u2 2t1érv. 2t1érvei 2térvek 2t1érvény tésa2 té2sab té2sag té2s1aj té2sak té2s1al té2san té2sap té2s1as té2s1az té2s1á2 tésát1a2 té2s1eg té2s1e2l té2s1e2r té2sés tés1iko té2s1or té2só té2s1Å‘2 té2su tésü2l té2s1ülé tés1ülÅ‘ té2s1üt tés3zav tész1ál tés3zár té2szeg té2sz1emb tés3zene té2szesz té2sz1ék tés3zsi té2t1a2l tétcsa2 té2t1e2g tét1e2lemt tét1elkés té2t1elve 2t1éteri té2t1ers té2t1e2v té2t1é2te té2tok té2tol té2t1os tétó2 té2tón té2t1ór 2t1év. 2t1évad 2t1évb 2t1évc 2t1é2vedb 2tévei 2t1é2vek 2t1é2vem 2téves. tévé1sz2 2t1évf 2tévh 2t1é2vi 2tévk 2t1évn 2t1évr 2t1évs 2tévt té2vú 2t1évü té2vükö té2vün 2t1é2vű 2t1évv té2zs tfa2l1aj tfa2lan tfa2le tfa2lom tf2jo tf2la tf2le tf2li tf2lo tf2ló tf2ra tf2rá tf2re tf2ri tf2rí tf2ro tf2rö tf2ru tgá2zak tgá2zár tgá2zé tge2n1el tge2né tgen1s tg2lo tg2ló tg2ne tg2ra tg2rá tg2ri tg2ró tg2ru tg2rü tha2de 1thagore t2har. 1t2hau 1t2he. the2i2d1 1theidp 1theusz t2holi thon1n tho1ny 1thosz th1sc 1thy 1ti ti2aa ti2ae ti2aé ti2af ti2ah ti2aj tiakész1 ti2ala ti2am ti2a1p ti2ata ti2atl ti2aü ti2av ti2chi ti2dea 2t1i2deá 2t1i2deg ti2dei 2tidej 2t1i2deo ti2dén 2tidéz ti2dio 2t1i2dom 2t1i2dÅ‘ tie2le tie2n tié2b ti1fl 2t1i2ga 2t1i2gá 2t1i2ge. 2t1i2gék ti1g2lo ti1g2ra 2t1ihl ti3ki ti1kle ti1klu ti2konb ti2kono ti2konr tiko1sz2 ti1k2ri ti2lac ti2lad ti2lan ti2l1ág tile2g til1egy ti2lex ti2lim til1ing ti2l1i2p ti2lir til1isk 2t1illato 2tillú ti2l1ö2 2t1i2ly ti2mak ti2máb 2timádó ti2máj ti2már ti2máz ti2mes 2timmu 2timrei 2tinakat. 2t1inakk tin1akt ti2n1a2n tin1ará tina1t2 ti2n1au ti2n1áll tin1árt 2t1indá 2t1inde 2t1indí 2t1indu ti2ne. ti2neg ti2n1eké ti2nem 2t1infar 2tinfl 2t1infr tin2gal tin2gas tin2g1ár tin2g1e2l 2t1ingeni tin2g1eti tin2gos tin2g1ó2 2t1i2nic tini2g tin1iga ti2n1i2m tini1sz2 2t1inku 2t1inkv tin3n tin1órá ti2nö 2t1insp 2t1integ 2t1intéz ti2n1ut ti2nű ti2ol ti2onb ti2onh ti2oni ti2onj 2t1i2onn 2t1i2ono 2t1i2onr 2t1i2ont tio2x ti2pad ti1p2la tip2pin tip2po 2t1i2rat 2t1i2rán 2t1irg ti2rig 2t1irk 2t1i2rod 2tirol ti2rom ti2rón 2t1irr tir2s1 2t1irt tis2as ti2s1a2u ti2s1el 2t1i2si. ti2sim ti2sin ti2s1i2r 2t1i2sis 2t1isko ti2sor 2tistv tisz2tina ti2t1i2o ti1t2ri ti1t2ro tiu2mé tiu2m1i2 2tivad 2t1i2var 2t1i2ván 2t1i2vó ti2xa tize2n1 ti2zé. 2tizéj 2tizék ti2zér ti2zét ti2zév tizo2m 1tí tí2gé tí2ja tí2já tí2jú tí2la tí2l1ó2 2tínio 2t1í2no 2t1ínr 2t1íns 2t1í2ny tí2ra2n tí2rar tí2r1ál 2t1í2rás tí2r1e 2t1í2ró tí2rÅ‘ tí2rü títés3s títÅ‘a2 títÅ‘e2 tí2v1ec 2t1í2vei tí2vel tí2ver 2t1í2véi tívi2 tí2vik tí2vir tí2viv tí2v1ó2 tí2vö tí2vÅ‘ tív1s tí2za tí2zá tí2zel tíz1ele tí2z1emb tízi2 tí2zik tí2z1is tí2zí 2tízl tí2z1o tí2zö tí2zsá tí2zu tí2zül tíz1ülé 2t1í2zű tje2gy1á2 tjó2t1 tka1pr tka2ró2r tkende2 tké2p1e2kéh tkia2l tkiá2 tkie2 tkié2 tk2la tk2li tk2lí tk2ló tk2lu tkö2z1él tkőé2h tk2ra tk2rá t1k2reác t1k2ré t1k2ri tk2rí t1k2ro t1k2ró tk2ru tk2va tk2vi tk2vó tla2c3 tla2g1a2d tla2g1ar tla2gas tla2gál tla2g1e tlag3g tla2g1ó tla2n1e2 tlanká2 tla2nó tla2pár tleí2 tlen3n tle2t1a2n tle2tas tle2tele tle2t1elv tle2t1ér. tle2tos tle2tÅ‘ tleü2 tlé2kal tlé2kem tlé3pe tlés3s tlé1trá tló1dr tma2gál tma1k tmá2nyir tme2g1e2 tme2g1é tme3gif tmu2sí tmus3s tmu2sz tna2gya tná2d1 tne2k1el tne2küld tne2m1e2r tne2r1a tne2r1á tne2sz1a tne2szi tne2sz1ó2 tnév1a tnómen1 tnómenk2 tnö2k1öl tnőé2n 1to 2t1obj 2t1obl 2t1obs to1cki to2daa to2das to2dí 2t1o2dú 2t1odv 2t1off togás1s to1g2ráf. to1g2ráff to2il to2k1aka to2k1a2n to2k1ap tok1ari to2k1ál tok1á2rok to2k1átl to2k1átm to2k1átr to2k1áts to2k1átt to2k1átv to2ker to2k1é2l to2kép to2kid to2k1im to2k1in to2k1ip to2k1iv to2kí t1o2kozá to2k1ö to2kÅ‘ to1k2ro tok1s toksz2 to2k1ur to2kúr to2kü to2kű to2l1a2d 2t1o2laj to2l1akt tola2n to2l1any to2l1a2r tol1atom tol1ábr 2toldalo 2toldalt 2t1o2lim tol1k2 tol2l1árb tol2l1á2ré tol2l1árh tol2l1árr tol2leg tol2l1in tol2l1í2 to2l1o2r 2t1oltás to2l1ut 2tolvas to2m1ag to2m1ak to2m1a2l to2m1a2n to2m1ap to2m1ál to2m1á2r tom1b 2tombe to2m1el to2men tom1erÅ‘ to2m1es to2mí to2m1osz to2m1ó2 to2mö tom1p2 tom1s 2tomtö 2tomú to2mü to2mű to2nalm tona1t2 tonau2 ton1áll ton1álm to2n1á2z to2n1el to2n1en to2ner ton1gr to2nil ton1k2 to2nol to2n1or to2n1osz to2nóc to2nór to2nö to2nÅ‘ ton1s2 tont2 ton1tr to2n1út to2nü to2nű to2nyal to2nye to2p1at 2topc 2to2pe to2pik to2piz 2topp. 2toppo 2toppr top1t2 to2pü to2r1a2d to2r1ag to2r1aj to2r1aka to2r1akn to2r1ako to2r1a2l tora2n to2r1any to2r1a2p to2r1ar tor1ass to2rat to2r1au to2r1av tor1ács to2r1áll to2r1álm tor1áté to2r1átl to2r1á2z 2t1orcá tord2 tor1dr tore2 to2rec to2rek to2r1el to2ren to2r1er tor1f2 tor1g2 2t1organ 2torie to2r1ill to2r1int to2r1isk tor1k2 2tornó to2rop to2rosz tor1oszt to2r1ov toró2 to2rón to2r1ór to2rö to2rÅ‘ 2t1orr. 2t1orráv 2t1orri tors2 tor1sc tor1sk 2t1orsó tor1sp tor1st tort2 tor1tr to2rü 3torvá 2torvos 2torvv to1ry to2s1as to2s1e to2sik to2s1in to2sis tos3sze t1osto 2t1osty to2sü to2szal to2szan to2sz1e tosz1k 2t1oszlop. tosz1tr 3t2ot. 2t1otthon to2ut tova1 2t1o2ve 2t1o2vi 2t1o2vu 2t1oxid 2t1o2xig to1yo toza2t1al 1tó tóá2gy tóá2r tóát1 tóáta2 tó1bl tó1bra tó1bró tó1cl tócsa2p1á2g tó2daj tó2d1a2n tó2d1esé tó1dro tó2dúr tó1fl tó1gl tó1g2r tói2g tóí2v tóká2rok tó1k2l tó1kre tó1kré tó1kro tónus3s tóo2k tóó2r tóp1ass tó2p1e2l tóp1int tó1pro tó2ras tó2ráb tó2ráh 2t1ó2rái tó2rár 2t1ó2rás tó2rát t1ó2ráv tó2sep tós1éták tós1é2tát tó2sír tó1s2kan tó1skál tó2s1ol tó1s2porto tó1s2portt tó1s2rác tó2s1ü2l tó2s3zene tó1sz2f tó1szk tó2taj tó2tág tó2tom tó1t2rá tó1t2re tó1tro 2tóvod 2t1ó2vó 1tö töb2bev töb2b1o 2t1öbl 2tödéé tö2dém tö2dí 2tödné tö2do 2tödöt 2tödü tö2ka tö2ká tö2k1e2v tö2k1ér. tö2k1érd tö2kid tö2kí 2tö2ko 2t1ö2kör tökös3s tö2k1Å‘ 2tökrös tö2ku 2t1ölb 2t1ölci töl2gya töl2t1á2 t1öltön 2t1öltöz töm2b1a tömbe2 töm2bel töm2b1o t1ömle 2t1ömlé t1ömli tö2möl tö2na tö2ná tön1d2 tö2nen tö2n1í tön2kár tönkész1 tö2no tö2n1Å‘ tön1s tö2nu tö2nú 2t1ö2reg törés3s 2töröks tör2t1a2 tör2teg tör2t1e2lem tör2ter tör2térn tör2t1és tör2t1o2 tör2t1öl tör2tös tör2zsa tör2zs1ö2l 2t1ösv 2t1ösz 2t1ötl töt2t1á 2t1ötv 2t1öv. 2t1övb 2t1övh 2t1ö2vie 2tövig 2t1övj 2t1övk 2t1övn 2t1övr 2t1övv 2t1ö2zön tözÅ‘e2 1tÅ‘ tÅ‘a2c tÅ‘a2l tőá2g tÅ‘1bl tÅ‘1br tÅ‘e2ké tÅ‘e2r tőé2l tÅ‘1fl tÅ‘1gr tőí2t tőí2v tÅ‘1kl tÅ‘1kv tÅ‘1pl tÅ‘1pr tÅ‘1ps tÅ‘2rag t1Å‘ral 2t1Å‘2r1an tÅ‘r1egy 2t1Å‘rez 2t1Å‘2rék 2tÅ‘réü tÅ‘2rin tÅ‘2r1is 2t1Å‘2rí 2t1Å‘rjá 2t1Å‘rjel 2tÅ‘rjö 2tÅ‘rka 2tÅ‘rl 2tÅ‘r1öl 2tÅ‘rp 2t1Å‘rti 2tÅ‘rtr 2t1Å‘rz tÅ‘2s1a2l 2t1Å‘sny tÅ‘1spe tÅ‘1s2pi tÅ‘1spr tÅ‘1srá tÅ‘1sta tÅ‘1s2tá tÅ‘1sté tÅ‘1str 2tÅ‘sű tÅ‘2sű. tÅ‘sz2 tÅ‘1szt tÅ‘1tr tőü2l tÅ‘zá2 tÅ‘zár1 tpen3n tp2fe tp2la tp2lá tp2lé tp2lu tp2ra tp2re tp2ré tp2ri tp2ro tp2ró tp2ru tp2sz tpu2tin t2rafó 1t2ragé 1tragi tra1k2l tra1kro tran2d1a2 tran2dá trans1s tran2sz1ai tran2sz1o2m tra1s2p trat2 t2rati tra1tr 1trágya 1trágyáb 1trágyáé 1trágyáh 1trágyái 1trágyáj 1trágyák 1trágyán 1trágyár 1trágyás 1trágyát 1trágyáv t2rájk trán2sz trá2nyal trá2t1ér tre2csa tre2cso t2rego tren2da tren2d1Å‘2 1trení tréé2 1t2réni tré1p2 tré2sz1á2 tri1g2 trii2 t2rikó t2rill 1t2rió. t2riój t2riók trol2l1in tro2mad tro2maj trona2 tro2n1ag tro2nal tro2nan tro2nes tron3n tron1tr tr2os tro1sz2f tro1sz2ta t2r1o2x tró2de tró2nas tró2n1e 1trónj 1t2róno t2rónt tró1sz tró2zsi t3röm 1t2rös 1t2rub tru2mad 1t2rup tsa2vo ts2ch 1tsche tsé2g1éj ts2ká ts2ko ts2la ts2lá ts2le ts2li ts2má ts2mi ts2ni tsói2 ts2pa ts2pe ts2pi ts2po ts2pó tsp2r ts2rá t1s2ta t1s2tá ts2te ts2té ts2ti ts2tí ts2to tst2r ts2tu ts2tú 1t2sub tsza2ké tsza2k1ü tsza2t1e2 tsza2tö tszé2t t1sz2f t1sz2k t1sz2l tszö2g1e2le t1sz2p t1sz2t2 ttad2 tta2dó. ttag1g tta2g1o2ku tta2n1ér t2t1ará t2t1aszt tta1t2r ttaü2 t2t1a2z ttán2s1á2g ttá2v1i2 tte2leme t2t1e2lÅ‘a tte2r1in t2t1erÅ‘ tte2s1a2 tt1ég. t2t1é2le tté2rak ttér1em tté2r1es tté2r1é2ne tté2rid ttér1in tt1éss tt1é2vér t2t1i2o t2t1iro t2t1isk t2tizz ttír1a tt1írá tt1kr ttornác1 t2t1ors ttó1dr ttó1p2 t2t1ó2rá ttó1s2r ttö2l tt1ölÅ‘ tt1pr t1t2raf t1t2rag t1t2ran ttransz1 ttranszk2 t1t2rav t1t2róf tt2rón ttsé2gel tt2si t2t1ug t2t1üd t2t1üt t2t1ü2v ttwe2 t2ty tty1or tty1ö2l ttyü2 tty1ül ttyülés1 1tu tuá2r tubus1s tu1ck tuda2te tuda2t1ö 2tudí tu2ga tu2go 2t1ugr tu2hu tu2in 2t1ujj 2tuká tulo2 2t1ulti 2t1ultr tu2lü tu2mab tu2m1a2d tu2maj tu2mal tu2man tu2mar tu2maz tu2mál tu2meg tu2m1el tu2mer tu2mes tu2m1i2k tu2mim tu2m1inf tu2m1int tu2m1ir tu2mis tu2miz tu2mí tu2mol tu2mö tu2mÅ‘ tum1p2 tu2mü tu2nal 2t1unc 2tunió tu2nit 2t1u2no 2t1u2nó 2t1unt tu1p2r tu2rac 2turai 2t1u2rak tu2ralo tu2ram 2t1u2ras tu2ráh 2turán tu2rár tur2g1a tu2run tus1abl tu2sar tu2ság tu2se tu2s1érté tu2sis tu2s1í tu2sor tu2s1ó2 tu2s1ö tu2sÅ‘ tuss2 tus3sze tus3szi tus3szo tu2sü tu2szab tu2sz1a2d tu2szag tu2szal tu2szap tu2szar tus3zav tu2szál tusz1em tu2sz1é2l tu2sz1é2p tuszi2 tu2sz1il tu2szin tu2sz1ip tu2szir tu2szis tu2sz1it tu2szí tusz1k2 tu2szol tu2s3zó tu2szö tu2szÅ‘ tusz3sa tus3zse tuszt2 tusz1tr tu2szü 2t1u2tad 2t1u2tak 2t1u2tal 2t1u2tam 2t1u2tas 2t1u2taz 2tutá tu2tán 2t1utc 2t1u2to tu2tód tu2tóp tu2tót tu2tun tu2um 2t1uzso 1tú tú2ja tú2jí tú2jo tú2ju tú2l1a2 tú2l1á2 tú2l1e2 tú2l1é2 tú2list tú2lí tú2lok tú2lop tú2l1ó2 tú2lö tú2l1Å‘ túl1s túlt2 tú2lú tú2lü tú2lű túra1sz tú2rál tú2r1e2 tú2r1ér tú2r1és tú2r1ol tú2rot tú2rö tú2rü tú2sze tú2szö tú2szü tú2tá 2t1útb tú2t1e2 2t1ú2té 2t1úth 2t1ú2ti 2t1útj 2t1útl 2t1útn 2t1ú2to 2t1útr 2t1útt 2t1útv 1tü 2t1ü2dí 2t1ü2dü 2t1üdv tü2ge tü2gy tü2l1a tü2lá 2t1ü2led tü2l1e2m tü2len tü2lep tü2l1e2s tü2l1e2v tü2l1ér tü2lir tü2lí tü2l1o2 tü2l1ökl tü2löm tü2l1ön tü2l1öz tü2lÅ‘k tü2lÅ‘r tü2lu tü2lú tü2ma tü2mék tü2m1Å‘ tüne2tel 2t1ünn 2türeg 2türes 2t1ü2rí 2t1ü2rü 2tüst 2t1ü2te 2t1ü2té tü2ti 2t1ütk tü2tö 2t1ü2tÅ‘ tü2vö 2t1ü2zen 2t1üzl 1tű tűcsa2p1 tű2d1al tű2dá tűe2két tűé2h tűfélé2 tű1gr tű1kv tűleü2 tű1pl tű1pr tűrés1s tű1sp tű2z1a2 tűzá2r tű2z1ára tű2zát tű2zene tű2zis tű2zí tű2zo tű2z1ös tűz3seb tű2zú tva2n1e tva2név tva2raj tvá2nyad tvá2nyí tvá2z1al tvá2zip tve2n1e2v tven3n tven3t tvenü2 tve2nül tve2n3y tve2raj tve2ral tve2r1á2 tve2reg tve2r1é2sz tve2rint tve2rip tve2r1o tvers2 tver1st tve3se tvé2nyel tvér1int tvé2r1o tviselőé2 tvis3szá t2víve t2y 1tya tyai2ko tya1kl tya1p2 tya1sz2 tyat2 tya1tr tyau2 1tyá 2ty1ág ty1áld ty1áll tyá2ran tyár1s tyás3s 1tye ty1e2gy ty1ela ty1ell ty1emb ty1e2rÅ‘ 2ty1ex 1tyé 2ty1éte 1tyi 1tyí ty1í2r ty1ív tynk2 1tyo tyo2lan tyo2r1os 1tyó 1työ 1tyÅ‘ ty1Å‘s ty2pe ty1pr ty2sa ty1sp 1tyu 2ty1ug ty1u2tá 1tyú tyú2kü 1tyü 1tyű 1t2zekn 1tzekrÅ‘ 1t2zen. t2zenj 1t2zer 1t2zes tz1Å‘2r tz3sc 2u. u1a ua2ch ua2da ua2dá u2ador. u2adorb u2adoré u2adorn u2adoro u2adorr u2adort ua2dó ua2es ua2gá ua2ja ua2já ua2la ua2lá ua2ny ua2sz ua2ty ua2ut ua1yá ua1yé ua1yi ua1yo ua2zo u1á uá2ga uá2gy uá2po uá2r1á uá2r1e uá2réj uá2r1is uá2sz uá2t1a2 uá2t1e ub1a2la ub1alk u2b1ang ub1arc ub1a2ri ub1ár. ub1á2ro ub1dr u2b1e2b u2b1ed ube2lá ube2lel ub1elö ub1e2se ub1est u2b1éj ub1éle u2b1é2n u2b1ép u2b1érd u2b1é2re u2b1érm u2b1érte u2b1éve ubi2g u2b1iga u2b1ige ubi2ke u2b1ina. ub1ism ub1kr ub1olda u2bork u2b1orv u2b1osz ub1óri ub1öl ub1ös ub1öv ub1pl ub1pr ub1sl ub1sp u2b1ud ubu2sz u2b1új ub1üd ub1üg ub1ü2l ub1üz uca2t1á2 uc2c1e uc2c3h uc2c1ina uc2cip uc2cö ucc3sz uc2cú uc2cü uc2cz uc1emb uc3há u2c3hé u2c3ho uciá2r uci1p u1c2kig u1c2kon u1c2kot uc2ky uc1ö2l uc1pr ucsa2p1á2 uc3ság u2cs1ál u2csáru u2cs1e2l u2cs1id ucs1s u2c3sü uc3sz u2c1ug uc1üg uc3zá uc3ze uda2tal uda2tál uda2t1á2ram uda2t1eg uda2ter udáskész1 udás3s udi2o u2d1isk udo1kr udó2se udó2sor u1d2rá u1d2ro u1e ue2bé ue2dz ue2gé uegész1 ue2gy uel1ér ue2lÅ‘ ue2me ue2rÅ‘ uervíz1 u2es. u2esb ue2s1e u2esh u2esi u2esk u2esn ue2sÅ‘ u2esr u2est u2esu ue2s3zen ue2te ue2ve u1é ué2ge ué2gé ué2ké ué2le ué2pí ué2pü ué2te ufé2nye u1f2le uf2ri uf2ru uf2tü ug1agy ug1alj uga1p ug1apa 1u2garo uga2t1el uga2tés uga2tol uga2t1ó2r ug1ág ugára2 ugár1ad ugá2rá ugá2ros ugá2ru ugár1ú ugás3s ug1á2sz ug1el ug1e2v ug1é2l ugg2l ugi2e ug1ing ug1int ug1iro ugi2t ug1ita ug1ivá ug1ír ug1kl ugo2r1á2 ugó1sv ugó2s3zá ug1ös ug1pr ug1sk 2ugui u2g1ut u2g1új ug1üg ug1üz ug1űr ugya2n uh1a2dó uh1alk uha2r1as uha1t2r uh1att u2h1á2g uh1áll u2h1ár. u2h1árb u2h1árf u2h1árh u2h1árn u2h1árr u2h1árv uhá2szak uh1em uh1ex uh1ind uh1ing uh1orz uh1Å‘r uh1pr uh1tr u2hu. u2huj uh1ujj uh1üz u1i ui2de ui2dÅ‘ ui2eu ui2ga ui2gé u2i1gn ui2pa ui2rá ui2zé u1í uí2ja uí2ju uí2ve uí2vű u2j1an 1ujja. 1ujjad 1ujjai 1ujjak 1ujjam 1ujjas 1ujjat uj2j1á2ru 1ujjb 1ujjc 1ujjd 1uj2j1e2 1ujjé 1ujjf 1ujjg 1ujjh 1ujji uj2jí 1ujjk 1ujjl 1ujjm 1ujjn 1ujjp 1ujjr 1ujjs 1ujjt 1ujju 1ujjú uj2jü 1ujjv u2j1op uk1abl uka2c3se u2k1a2lap uka1pl uka2rán u2k1arc uka2szás uka1t2r uk1áll uká2sar u2k1ásó ukás3s u2k1átm uk1bl uke2l uk1e2m uk1ex u2k1ég u2k1érz u2k1i2p uk2k1alt uk2k1eg uk2k1em uk2k1ola uk2köl uklás1s ukló2s3zá u2k1old ukora2 uko2ras uko2r1á2 uko2ril uko2rin uko2r1o ukós2 uk1öb uk1pr u1k2rón uk1ü2t uk1űr ula2cs1ö2 ula1g2 ula2jas ul1aleg u2l1alj ula1s2p ulata2l ula2t1ala ula2t1a2n ula2tál ula2tem ula2tik ula2tol u2l1ábr ulá2k1e ulá2k1ü u2l1á2ria ulá2s1i2k uláskész1 ulás3s ulá2sza ulá2sze ul1bl ulcs1e2l ulcs1es ul2cs1ér. ul2csérv ul2cs1é2vé ul2csiga ul2csip ul2cs1í2 ul2csor ul2cs1ö2 ul2cs1Å‘ ul2csut ul2csü ulcs3z ule2i uleí2 ule2l ul1elÅ‘ ulet2ta u2l1ex ulé2kal ulé2k1e2 ulé2k1ol ulé2kut ul1épü u2l1érté uli2nar uli2nin ul1í2r ul1k2r ul2lef ul2l1e2l ul2l1em ul2l1en ul2l1ér ulot2 uló1f2 ulói2kon ulókész1 ulóó2 ulót2 uló2zan uló2z1á2ra uló2z3s ul1öt ul1pr ul1st ul2t1aj ulta2r ult1asz ul2taz ul2tül ul1úr ul1üt ul1űr u2lyi u2m1abl um1abr um1a2cé um1ach um1a2dat u2m1adm um1a2do um1a2dó 2umaé um1ajá u2m1a2ka umakész1 u2m1akk u2m1akt u2m1a2la um1alg um1all um1alt u2m1ana u2m1ank u2m1a2no u2m1a2ny 2umao 2uma1p2 u2m1a2rá um1arc um1arg u2m1a2ri um1a2ro um1asp u2m1atl u2m1au um1a2zo u2m1ág u2m1áll um1álm u2m1á2rak um1á2ram u2m1áras u2m1á2rá u2m1árf u2m1árk u2m1árn u2m1á2ro u2m1árr u2m1árt u2m1á2ru u2m1árv umát1a u2m1á2t1e u2m1átm u2m1átu um1bl um1b2r um1dr u2m1e2d u2m1ef ume2g um1egy um1ela um1elb u2m1e2le um1elé um1elh um1elí um1elj um1elm u2m1eln um1elo um1e2lÅ‘ u2m1elt um1elv u2m1e2m ume2n1á ume2n1ó2 um1e2re um1erk um1e2rÅ‘ um1e2se um1ess um1e2sz u2m1e2t u2m1e2v u2m1ex um1ezr u2m1ég u2méhs um1é2le um1élv u2m1ép u2m1ér. u2m1érc u2m1érm u2m1érte u2m1érté u2m1érv u2m1é2te um1fl um1f2r um1gl um1gr umia2 um1i2dÅ‘ umig2 umi1gr um1imp umi2na. u2m1ind u2m1ing u2m1inv um1i2onb um1i2oné um1i2onh u2m1i2onj um1i2onk u2m1i2onn u2m1i2ono um1i2onr um1i2ont u2m1irt um1isk umi1sl um1ism umi1sp umi2szü umit2 umi1tr um1izé um1ív um1íz umkész1e um1kl um1kr um1kv umna2 u2m1o2koz um1ola um1old um1oll um1olt um1olv u2m1o2p umo2ran um1o2rat umo2rál umo2rin um1osto u2m1osz u2m1ox um1ó2rá um1ö2l um1öm um1ön um1ö2r um1ös um1öt um1öv um1ö2z um1Å‘2r um1Å‘s umpe2l ump1ele um2p1ing um1p2r um1sk um1sp um1st um1sz um1t2r u2m1ud u2m1ug u2muni umu2r um1ura u2m1u2t um1üg um1ü2l um1ür um1üs um1üt um1üv um1üz umva2s una1b un1adu un1akt u2n1arc u2n1á2g un2cs1e un2csiv un2dz un1e2r un1e2t un1ég un2g1a2g un2g1eg un2g1er ung3g un2g1ol u2n1i2d 1u2nif 1u2niku u2n1il u2n1ingo 1u2nió 1u2niv unka1p2 unka1s un2k1eg u2nod u2n1orr un1ors u2not un1pr un1s2t2 unta2i u2nun un1útj un1ü2l u1o uo2la uo2li uo2r1a2 uore2 uo2r1et uo2r1i2o uo2xi u1ó u2óbu u2óne uó1p2r u2óré uó2ri u2óso u2ósz u2óve u1ö uö2ko uö2kö uö2rö uö2zö u1Å‘ uÅ‘2re uÅ‘2ré uÅ‘2ri uÅ‘2rö uÅ‘2rü upa1b2 up1ada upa1pr upas2 upa1sp upa1t2r upda2 upe2r1a upe2rel upe2r1in uper1s up1e2s upé1p2 up2hi upli2n up1üz 1u2raim 1u2rak. 1u2rakh 1u2rakk 1u2rakn 1u2rakr u2ralh 1uralk 1u2ralm 1u2ram. ura2m1is 1u2rasa ura1s2p ur1áll urát1a ur2dar ur2d1e u2r1ef ur2fí ur2f1ú 1urná ur2ne urn2é uro1b2 uroka2 uro2kan uro2k1á uro2ke uro2ne uro1p uro1t2 ur1öl urpi2 ur2t1ag ur2t1e2t ur2t1e2v urti2t urt1ita ur2t1okta uru2c1e 2urul uru1p2 uru2szál u2rú. us1abla us1a2da us1a2dá u2s1a2dó u2s1a2g u2s1a2j usa2kar u2s1akc u2s1a2la us1alg us1alj us1alk u2s1alt us1alv u2s1ana us1ane us1ant us1apá u2s1a2ra u2s1a2rá u2s1arc us1arz u2s1ass u2s1att us1a2ty u2s1au u2s1a2z u2s1ábr u2s1á2gá us1ágb u2s1ágg us1ágh u2s1ágr us1áld us1áll u2s1á2p u2s1árad u2s1á2rai u2s1á2rak u2s1á2rá u2s1árb u2s1árh u2s1á2ri u2s1árk u2s1árn u2s1á2ro u2s1árr u2s1árt u2s1á2ru us1áta us1áth us1áti us1átk us1átt us1átu us1átv us1bl us1br us1dr us1e2c us1e2d u2s1ef us1e2gy u2s1e2l u2s1e2m u2s1e2n us1erd u2s1e2s use1t u2s1e2v u2s1ex us1ez u2s1ég u2s1éhe u2s1é2k u2s1é2l u2s1é2ne u2s1ép u2s1érd u2s1érte u2s1érv u2s1és u2s1é2te us1étk us1étt u2s1éve us1fr us1gr u2s1i2d usi2g us1iga u2s1i2ko u2s1ill u2s1i2ma u2s1i2má us1imi u2simm us1imp u2s1inc us1ind us1inf u2s1ing u2s1ink u2s1int us1inv u2s1i2p u2s1i2rat u2s1i2rá us1iro us1irt u2s1isk u2s1ism us1ita us1izé us1íg u2s1íj usí2r us1írá us1író u2s1í2v u1s2kál us1kl uskói2k us1k2r us1kv u2sodú u2s1of us1okl us1okm us1okta us1ola u2s1old us1oli u2s1oml us1ond u2s1op u2s1org u2sori u2s1orr u2s1ors us1osz u2s1ott us1óc us1ó2s u2s1ö2l u2s1ön us1örd us1ö2rö u2s1ös u2s1öt us1ö2v u2s1ö2z us1Å‘2r u1s2pec us1pl us1pn us1pr us1ps 2uss. us2se. us1ská us1spe us1spi us1sta us1sy us3szab us3szag us3szak us3szál us3szám us3szen us3szé us3szig us3szí us3szó us3szö us3szÅ‘ usszü2 ussz1ül us3szű ust1á2rár us2teg us2t1il us1trad us1tre us1tré us1tro u1stru us2t1ül u2s1uj usu2s us1usz u2s1u2t u2s1új us1úté us1üd u2s1üg usü2l us1ülé u2s1ün u2s1ür us1üz usz1abl u2szaj usz1a2la usz1alk usz1alv u2sz1a2n usz1app usza2r usz1ará usz1aré usz1aro usz1asz u2sz1au u2sz1á2g usz1állá usz1á2p u2sz1á2rad u2s3zárako u2sz1á2ram usz1árje usz1ásv u2száth usz1átö u2sz1e2c u2szef usz1e2ge usz1e2gy usze2k usz1eke u2sz1e2l usz1emb usz1eml us3zene usz1eng u2sz1erd usz1e2ré usze2s u2sz1e2v u2sz1ex u2széne usz1éte usz1éto usz1imp usz1ind usz1inj usz1isk usz1ism u2sz1isz uszí2j1a2d u2sz1ír 1u2szodá u2szola u2sz1old u2szony u2szop u2szosz u2sz1ö2b usz1öl usz1ön u2sz1ös usz1p2 uszte2r1a usz1t2ran u2sz1u2s u2szut usz1útr uszü2 usz1üg u2sz1ül u2sz1üz usz1z ut1abl uta2csel ut1a2dó 2utakép 1u2taló 1u2talv utame2 uta2mel uta2mer uta1p ut1ará 1utasc uta2se 1utasf 1u2tasl 1utasv utaü2 2utav 1utazi 2utáb 2utáé 2utáib 2utáin 2utáju 2utákb 2utákt 2utám 2utáná után1n 1utánz 2utáró utá2rú ut1bl 1utca 1utcá u2t1e2g ute2r1a ute2rá ute2reg ute2rim ute2ru utén3n ut1fr uti2k1á2r uti2ke ut1ill uti2m uti2n1e2 uti2nér utini2 uti2nig uti2n1ik uti2n1ó utin1s u2t1i2pa u2t1isk ut1kl ut1okke u2t1old uto2l1é u2tols 2utomata uto2rim 2utos 2utot utó2dal u2tódá utó2del utó1p2 utó2s1aj utós3s utó1s2to utó1sz utó1tr utótűz1 ut1pr ut2rak ut2ran ut2rák ut1sp ut1st ut1t2r ut1üg ut1ü2z utya1s2 u1u uu2m1ag uu2mal uu2m1as uu2mál uu2m1e2 uu2m1é2r uu2mim uu2min uu2mö uum1p2 uu2mü uu2ta uu2tá uu2zs u1ú u1ü uü2gy uü2rí uü2té uü2ve uü2ze u1ű uva2r1a uva2r1á2 uva2r1e uva2rin uva2szál uvata2 uva2t1ag uvi1g2 uv2re uxi2t1a uxi2t1á uxi2t1e uzala2 uza2lac uza2lad uza2lan uza2lág uza2l1át uza2lel uza2l1ék uza1p2 uza1s2 2uzál. 2uzálb 2uzáll 2uzálr u2z1id uz1i2gye uz1ír uzki2 uzói2 uz3sap uz3sz uz1t2r uz1ü2g uzü2l 2ú. ú1a úa2da úa2dá úa2dó úa2gy úa2já úa2kar úa2kas úa2la úa2lá úa2ny úa2sz úa2ud úa2va ú1á úá2ga úá2gá úá2gi úá2go úá2gy úá2hí úá2lo úá2po úá2ra úá2ri úá2ru 2úbab 2úband 2úbar úb2lo 2úbód úb2ri úb2ro ú2c1aj ú2c1a2l ú2c1a2n ú2c1a2v úc1e2t úc3he úc3ho ú2c1i2d úci2ókép úc1pr 2úcsa ú2csab ú2csad ú2cs1ag ú2cs1aj ú2csakt úcs1ala ú2cs1a2n úcsa2p1á2 ú2cs1a2s ú2cs1au ú2csaz úcs1á2g ú2cs1ál ú2cs1árá ú2cs1árf ú2cs1ári ú2cs1árv ú2csátv 2úcse ú2cs1eb ú2cs1e2g úc3sej ú2cs1e2l úcs1emb ú2cs1en úcs1erÅ‘ ú2cs1e2s ú2cs1él ú2cs1ér. ú2csérd ú2cs1érte ú2cs1érté ú2cs1é2v ú2cs1id ú2csigaz ú2cs1il ú2csim ú2cs1inf ú2cs1int úcs1i2pa ú2csirá ú2cs1is ú2cs1iz 2úcso ú2csokta ú2csosz 2úcsó ú2cs1ó2r úcs1öl úcs1ös úcs1p úcs1s úcs1t úcsús1s úcs1ü2t úcs3za ú2d1a2c úda2d úd1ada ú2d1a2k ú2d1a2n úd1ág úd1ál úd1á2r ú2d1e2g ú2d1ej úde2l úd1ele úd1elh úd1elÅ‘ ú2d1e2m úde2ra úde2r1ec úd1erÅ‘ úd1e2v ú2d1ék ú2d1ér. ú2d1érc údé2t úd1éte ú2d1i2d údi2g ú2d1igé úd1ing údi2ódar údi2óz ú2d1os úd1pr úd2rá úd2ro úd1üv údy1éh údy1ét údy1i údy2s úd3zá ú1e úe2bé úe2gé úegész1 úe2gy úe2la úe2le úe2lo úe2lö úe2lÅ‘ úe2me úe2pi úe2re úe2ré úe2rÅ‘ úe2rű úe2sz úe2ta úe2te úe2ve úe2vÅ‘ ú1é úé2he úé2le úé2lÅ‘ 2úé2ne úé2pí 2úér. úé2r1á úé2re úé2ri 2úérz úé2te úé2ve úfélé2 úfé2l1év úf2lö úf2rá úf2ri úf2rí úf2ro úg1a2d úgás1s úg1el úg1i2v úg1old úgós2 úg2rá úgy1ag úgy1el úgy1ér. úgyi2 úgy1is úgy1iv ú2gy1u2 úgy1út ú1i 2úi2de úi2dÅ‘ úi2ga úi2gé 2úi2ke 2úikre 2úill 2úi2má úi2mi 2úing 2úint úi2pa úi2rat úi2rá 2úisk 2úism 2úist úi2ta 2úi2vad úi2vás ú1í úí2rá úí2ve úí2vi úí2vü 2újáté új1es új1ez új1éve új1évé új1k2r 1ú2jon új1or új1pl újra1 1újsá új1ud ú2jul 2úkab ú2k1a2g ú2k1a2j úk1a2lak 2úkalan ú2k1alk ú2k1any 2úkap ú2k1a2pó 2úkar 2úkate ú2k1atk ú2k1ál ú2káru úke2l úk1ele úk1ell ú2k1em úke2s úke2t úk1ete úk1e2vé 2úkérd 2úkéré 2úkés ú2k1éss ú2k1észr ú2k1é2te 2úkéz úkia2 ú2k1i2d 2úkin ú2k1is ú2k1i2t ú1k2li úk2lu 2úkol ú2k1olt 2úkom 2úkonf 2úkong 2úkorá 2úkoro 2úkos úk1ó2l úkö2l úk1ölÅ‘ úk1pr ú1k2re úk1t2r ú2k1ud ú2k1úr úkü2l úk1ülÅ‘ úk1ült ú2k1ür ú2k1ü2t ú2k1üz úl1a2d úl1a2ja úl1a2l úl1a2m úla2n ú2l1any úl1a2ro ú2l1á2g ú2l1ál úl1árn ú2l1á2sz úl1átv úl1br úl1d2r úl1e2d úle2l úl1ele úl1ell ú2l1emb úl1en ú2l1e2re úl1e2s ú2l1e2vÅ‘ úl1ex ú3lé. úl1é2d úlé2g ú3l2é3va úl1fr úl1gl ú2l1i2d ú2l1igé ú2l1ij ú2l1il ú2limá ú2l1ind ú2l1inf ú2l1ing ú2l1inj ú2l1int ú2l1inv ú2l1i2p ú2l1irá ú2l1isk ú2l1i2sz ú2l1i2ta ú2l1itt ú2livás ú2livo ú2lizgato ú2l1izz úl1íg úl1í2v úl1k2l úl1kv úlo2k úl1oko ú2l1ol úl1ope ú2l1or ú2l1os ú2l1ox úl1öb úl1öl úl1ö2m úl1ö2n úl1ör ú2l1ös úlövés1s úl1Å‘rü úl1p2l úl1p2r úl1p2s úl1sk úl1sm úl1sp úl1s2t úlsz2 úlsztá2 últ1agg últ1agy úl2t1aj úl2t1al úl2t1árn úl2tél úl2térte úl2t1és últ1éves últi2 úl2tid úl2t1im úl2t1in úl2t1ip úl2tis úl2tí últ1old úl2tös úl2t1Å‘2r úl1trá ú2l1ud ú2l1u2g ú2l1ur ú2l1u2t ú2l1úr úl1ús úl1üg úl1ül úl1ün úl1ür úl1üt úl1üv úl1üz úl1űz ú2ly1a2d ú2ly1a2l ú2ly1an ú2ly1a2r ú2ly1au ú2lyál ú2ly1átl ú2ly1e2 ú2lyél ú2lyés ú2lyol ú2ly1ö ú2lyÅ‘ úly1s ú2lyug 2úmac 2úmad 2úmag 2úmaj 2úmar 2úmatr úmi2al 2úmoz 2únac 2únap únau2 ú2ny1a2n ú2ny1i2r úny1tr ú1o úo2ko úo2ve ú1ó úó2ra úó2rá úó2sá úó2vo ú1ö úö2le úö2lé úö2lÅ‘ úö2rö úö2ve ú1Å‘ úő2rö úp1eg úpe2l úp1es ú2p1in úp2la úp2lé ú2p1or úp1p2l úp2rí úp2ro úraát1 2úrab 2úraj úr1akk úr1alu ú2r1ant úr1a2nya úra1p2 úra1szp ú2rattas úrau2r ú2r1ábr úr1áll ú2r1á2ri úrás1s ú2r1átm úr1br úr1d2r úr1e2c úr1e2l úr1ez ú2rék úr1ékk ú2r1é2l úr1é2ne 2úrés úrfé2l1é2v úri2al ú2r1i2d ú2rie úr1ifj úri2g ú2r1iga úri3gényé úr1i2m ú2r1inf úr1ing ú2r1int ú2ris úr1ist úr1k2r 1úrnÅ‘r úr1ott úró1p2 úró1sp úr1ö2c úr1ö2l úr1ön úr1öt úr1Å‘s úr1pr úr1s2k úr1sn úr1s2r úr1s2t úr1szn úr1u2t úr1ü2l úr1ü2v 2úsabl ús1abr ú2s1a2d ú2s1a2j ús1a2la úsa2n ús1any ús1apr úsa2r ús1ará ú2s1arc 2úsarj úsá2gol ús1áld ú2s1á2p ú2s1á2rai ú2s1á2rak ú2s1árb ú2s1áre ú2s1á2ro ú2s1á2ru ú2s1á2rú ú2s1árv 2úsát ús1átl ú2s1á2z ús1dr ús1e2c ús1e2l ús1e2v ús1ex ú2s1é2g ú2s1é2l ús1é2ne ú2s1é2ré ús1érm ú2s1érté ú2s1é2tá ú2s1é2te ús1étr ús1fr úsi2g ús1iga ú2s1il ús1imp ú2s1in ú2s1i2p ú2s1i2r ú2s1is ús1í2z ús1kl ús1kv ú2s1ola ú2s1old ús1org ú2s1orr ús1osz ús1ó2h ús1ös ús1Å‘r ús2pe ús1pr ús1s2p ús3szag ússza2k1 ús3szav ús3sze ús3szi ús3szí ús3szó ús3szö ús3szú ús3szü ús2tat ús1t2r ú2s1u2ga ú2s1u2t ús1üg ús1ün ús1ü2t ús1üz úsvé2t1e ú2sz1a2d ú2sz1akc ú2sz1á2g úszás1s 2úsze úsz1ej úsz1e2s úsz1e2v 2úszé ú2sz1év ú2szigá 2úszí úsz1k2 úsz1old úsz1osz ú2szóe 2úszö úsz1ös úsz1p ús3zse ú2s3zú 2úszü úsz1ü2g úsz2ve út1a2d út1a2i ú2t1a2j út1a2v ú2t1ál út1á2ro út1á2s ú2t1á2t1 útá2v1i2 út1bl út1ef ú2t1e2g út1elá út1ell út1elz ú2téh út1é2l ú2tén ú2t1ép út1érd ú2t1é2ri út1érz útfélé2 út1gr ú2tiakh ú2tiakn út1id útie2 ú2tié úti2g út1igé 1ú2tij 1ú2t1i2ko út1ill 1ú2tiná út1ind út1inf út1ing út1int út1i2pa 1ú2tir út1irá út1ism út1ist 1ú2t1i2z út1íg út1íj út1ív út1okm ú2t1ola út1old út1oml úto2n1 út1ont út1op 2útor úto2ran úto2r1as úto2rál úto2re út1ost út1osz út1ös útÅ‘2r út1pl út1pr ú1t2rag 2ú1t2ri útsá2gi út1st útu2m1é útu2r út1urá ú2t1út ú2t1üg 1útvo ú1u úu2no úu2ra ú1ú ú1ü úü2gy úü2lé úü2re úü2te úü2ve úü2vö úü2ze ú1ű úű2zÅ‘ 2úvál úv2ár úvá2rad úvá2ral úvá2ris úvá2rosz úvá2r1ó2 úváru2 úzae2 úzaé2 úzak2 úza1p2 úz1arc úza1t2r ú2z1ál ú2záru úzás1s úz1i2d úzido2 úzót2 úzó1tr úz1p2r ú2zs1a2l ú2zsál 2úzse úzs1e2c 2úzsi úz3sz úz1t2r 2ü. ü1a üa2já üa2la ü1á üá2ga üá2go üá2gu üá2ra üá2sz üb2lo ücsö2k1 ü2des 1ü2dít üd1íz ü2d1ör üdös3s üdőé2 1ü2dül üd2v1a2 üd2v1el üd2v1e2s üd2vél üd2vid üd2v1í üd2vo 1üdvö üd2völ üd2vÅ‘ üd2vu üd2vú ü1e üe2bé üe2ge üe2gé üe2le üe2l1é2 üe2me ü1é üé2ke üé2pí üf2f1ö2 üfö3le üf2ro üge1k2 üge3l üg2ra ü2gy1a2 ü2gy1á ü2gyef ügy1e2lemb ügy1e2lemm ügy1elf ü2gy1ell ügy1elm ügy1elo ügy1éjé ügy1ékb ügy1éré ügy1érr ü2gyés ü2gyil 1ü2gyin ügy1int ügy1ira ü3gyí 1ügynö ügy1os ü2gy1Å‘2 üh1af üh1at üh1ás ü2h1e2le üh1elf üh1ellen ü2h1e2lÅ‘ üh1elv ü2h1éh ü2h1ér. ü2h1in ü2h1it üh1or üh1Å‘s ü1i üi2gé üi2ko ü1í üí2rá ük1a2n üka2p ük2kal ük2ká ükke2 ükkel1 ük2ker ükko2 ük2kop ük1u2n ül1ab ül1a2d ül1ag ül1aj ül1a2k ü2l1a2l ül1a2r ül1at ül1au ül1á2c ül1á2g ül1ál ül1á2p ül1á2r ül1á2s ül1br ül1d2r ü2l1ecse ül1e2dz ü2l1e2g 2ülek ül1ell ü2l1e2lÅ‘te ül1eng ül1enn ü2l1e2r ül1e2sÅ‘ ületa2 üle2tal üle2t1an üle2t1as üle2tav üle2t1á2 üle2t1eg üle2t1e2l üle2t1ék üle2t1ér. üle2t1érn üle2t1é2rÅ‘. üle2térü üle2t1é2ve. ületo2 üle2t1or üle2tos üle2t1ó2 üle2t1ö2 ület1t2 ül1ette. ül1etted üle2t1u üle2tüz ü2l1ex ü2l1é2g ülé2k1a2 ülé2k1á2 ülé2k1e2l ülé2kev ülé2kir ülé2k1o ülé2ku ü2l1él ül1é2pü ü2l1ér. ü2l1é2ré ül1éri ül1érj ül1érn ül1érs ü2l1é2rü ül1érv ü2l1érz ülé2sa ülé2so ülés3s ülé2s3zá ül1fr ü2l1i2d üli2g ü2l1iga ül1ill ü2l1im ü2l1int ül1ira ü2l1itt ü2l1iz ül1íg ül1í2r ül1í2v ül2l1a2n ül2l1in ül2l1ö2vü ül2l1u2 ül3lyu ülnö2k1öl ül1o2d ül1o2l ül1om ül1op ül1or ül1ó2v ü2l1öb ü2l1ö2l ü2l1ömö ü2l1ör ü2l1ö2v ülÅ‘e2 ülőé2l ülÅ‘1sl ülÅ‘1s2p ülÅ‘1sta ülÅ‘t2 ül1p2l ül1p2r ül1sl ül1sp ül1st ül2t1ad ültá2r 1ültetl 1ültets ül2t1e2v ül1tra ül1ud ül1u2g ül1u2t ül1ús ü2l1üg ülü2l ül1ülé ül1ülh ül1üli ül1ülj ül1üln ül1ült ül1ülv ü2l1ür ü2l1üt ü2l1üv ü2lyel üly1ess üly1esz üm1a2l üm1a2n üm1a2r üm1á2r üme3gi üm1éks ü2m1ép ü2m1érd üm1fl üm1fr ümi2g ü2m1iga ü2m1il ü2m1im ü2m1i2p ü2m1is ü2m1iz üm1kl üm1kr üm1o2l üm1or üm1os ü2m1öss üm1Å‘r üm1p2r üm1tr üm1új ü2m1ü2l ü2m1üv ü2m1üz ün1ál ün1á2r ün1e2l üne2t1elet üne2tés ün1evé ü2n1é2p ün1id ü2n1ing ü2n1irt 1ünnep ü1o üo2k1i2 ü1ó üó2rá ü1ö ü1Å‘ üő2re üp2ri üp2ro ürdés1s 1ü2reg üre2g1a üre2gá üreg1g 1ü2res üre2tö ü2röm ür2t1a ür2t1á ür2t1e2 ür2tis ür2t1o2 ürtok1 ürtü2 ür2tül 1ü2rül üs2s1a2 üs2s1á üs2s1eg üs2s1o2 üss1s üssü2 üs2s1ül üs2t1a2 üs2tá2 üst1ág üst1ál üst1ár üs2t1e2 üst1ég üs2t1ék üs2tél üs2t1ér. üs2t1ére üs2t1érm üs2t1il üs2tim üs2tir üs2t1is üs2tí üs2t1o2 üs2t1ó2 üs2t1ör üs2t1ö2v üs2t1Å‘r üs2t1Å‘s üs2t1u2 üs2tú üstű2 üs2t1űz üsz1ál üszi2 ü2sz1iv üsz1í2v ü2sz1önt ü2szür üt1ab üt1aj 1ü2teg 1ü2tem. üte2m1a2 ü2temb 1ü2teme üte2m1el ü2teméb üte2mért üte2min ütési2 üté2sik ütés3s üté2s3z 1ütkö ütÅ‘1s2p üt2t1á üt2t1é2 ütty1e2g üt2zi ü1u üu2ta ü1ú ü1ü ü1ű üve2g1a2 üveg3g üvezé2r 1üze2m1a2 üze2m1á üze2meg üze2m1érté üze2m1étk 1ü2zemi üze2m1o ü2zemt üze2m1u2 ü2zemű üze2t1a üze2t1o üzé2ra üzé2r1e2l 1üzlet. 1üzletn 2ű. ű1a űa2da űa2dó űa2gy űa2ka űa2la űala2g1 űa2lo űa2na űa2ny űa3nyagoc űa2or űa2pa űa2pá űa2ra űa2rá űa2to űa2ut űa2va űa2xi űa2zo ű1á űá2bé űá2ga űá2gá űá2gy űá2hí űá2je űá2lo űá2po űá2ra űá2ri űá2ro űá2ru űá2sí űá2té űá2tí űba2l1 űbé2rel űb2lo űb2ró űcsapá2 űcsa2p1ág űcs1as ű2cs1ék űcs1i2pa űd1ál ű2d1e2l ű1d2rá űd2ro ű1d2ró űd1sk ű1e űe2cs űe2ge űe2gé űe2gy űe2kére űe2la űe2le űe2lé űe2l1í2 űe2lÅ‘ űe2lü űe2me űe2pi űe2po űe2re űe2rÅ‘ űe2se űe2sé űe2sÅ‘ űe2te űe2ti űe2vé űe2vÅ‘ ű1é űé2le űé2lé űé2nekb űé2neke űéne2kest űé2neké űé2nekr űé2pí űé2te űfa2j1e2 űf2lo űfo2g1a2l űf2ra űf2ri űf2ro ű2g1ö2lé ű2g1ö2lÅ‘. űg1Å‘s űgÅ‘2z űg2ra űg2rá űholda2d űhol2d1ada ű1i űi2do űi2ga űi2gé űi2má űi2mi űi2pa űi2rá űi2ro űi2sz űi2ta űi2zé űi2zo ű1í űí2rá űí2ri űí2ró űí2té űí2ve űí2zü űí2zű űkia2 űk2la űk2li űk2lí űk2lo űk2rep ű1k2ré ű1k2ri ű1k2ro ű1k2ró űk2va űme2g1 űn1al űn1ar űn1ál űn1á2r űn1e2le űn1elh űn1e2li űn1elk űn1e2lÅ‘ űn1elr űn1elt űn1eml űn1e2se űn1est űne1t2 ű2n1ér. ű2n1érte űni2g ű2n1iga ű2n1in ű2n1i2p ű2n1i2r ű2n1is űnme2 űn3ny űn1o2k űn1ol űn1os űn1ó2r űn1pr űn1s2k űn1s2t űn1u2n űn1u2t ű2n1üs ű2n1üz űn1űz ű1o űo2dú űo2ká űo2ko űo2la űo2rá űo2ro ű1ó űó2ce űó2ra űó2rá űó2ri ű1ö űö2lÅ‘ űö2rö űö2ve űö2zö ű1Å‘ űő2rö űp2la űp2lü űp2ne űp2ré űp2ri űp2rí űp2ro űp2ró űr1ad űr1a2l űr1a2m űr1a2n űr1au űr1a2v űr1áb űr1ál űr1ás űr1áz űr1egy űr1e2l űre2n ű2r1e2r űr1ex ű2r1é2j űr1é2l űr1ép űrés3szer űré2sza űré2szá űré2szí űré2szo űr1fl űr1id űri2g űr1iga ű2r1i2m űr1int űr1i2p űr1ist ű2r1ír űr1o2d ű2r1ol űr1o2p űr1or űros2t1a2 űr1ot űr1ón ű2r1ö2l űr1ör űrÅ‘1f2 űr1pl űr1pr űr1p2s űr1s2p űr1s2t űrszt2 űr1tra űr1uj űr1un űr1u2t űr1út űr1üg űr1üz ű2s1ajtó űs1ál űsé2g1el ű2s1ín. ű2s1í2r ű1s2ka ű1s2ká űso2rad űso2raj űso2ral űso2ran űso2rál űso2rosz űsor1s űsort2 űs2pe űs2pi űs2po űsp2r űs2rá űs3sz ű1s2ta ű1s2tí ű1s2to űst2r űs1tro ű2s1uj ű2s1ü2t űsze2r1á űsze2r1e2le űszere2p űsze2r1epé űsze2r1ült űsz2k űsz2t űtés3s űtÅ‘a2 űtÅ‘e2 ű1t2ra ű1t2rágá ű1t2re űt2ri ű1t2ro ű1u űu2ga űu2ra űu2sz űu2ta űu2tá űu2tu ű1ú űú2ri űú2sz űú2ti űú2to ű1ü űü2gy űü2lé űü2rí űü2te űü2té űü2tö űü2ve űü2vö űü2ze ű1ű űű2zÅ‘ űvé2sz1a űvé2sz1á űvé2sz1e2l űvé2szer űvé2szint űvé2szo űvé2sz1ó2 űvé2szö űvé2szú űví2z1 űvíze2 ű2z1a2b űz1a2d űz1a2g űz1ak űz1a2l űzal1e űz1a2p űz1a2r űz1a2s űz1a2t űz1a2u űz1á2g ű2z1ál ű2z1árb ű2z1árj űz1átl űz1átu ű2z1ed ű2z1ef ű2z1e2l ű2z1em ű2z1e2r űze2s ű2z1ese ű2z1esé ű2z1est ű2z1esz űze2teg űze2tel ű2z1e2v ű2z1ég ű2z1é2l ű2z1érm ű2z1érté ű2z1érz ű2z1ész űz1fr űz1gl űz1g2r űzi2g ű2z1iga űzigaz1 ű2z1igé ű2z1i2m ű2z1i2r űz1isk űz1ism űz1ist űz1i2sz ű2z1iz űz1ín űz1ír űz1í2v űz1kl űz1kr űz1o2k űz1o2l ű3zom űz1on űz1op űz1or űz1os ű2z1óc ű2z1ó2r ű2z1ö2kö űzöl2d1el ű2z1ö2v ű2z1öz űzÅ‘a2 űzÅ‘e2 ű2z1Å‘r. űz1Å‘2rei ű2z1Å‘2ri űz1Å‘rk űz1Å‘rm ű2z1Å‘rn ű2z1Å‘2rö ű2z1Å‘rr ű2z1Å‘rs ű2z1Å‘rt űz1Å‘rz űz1p2r űz3sa ű2zsám ű2z3sár ű2z3sáv ű2z3ser ű2z3sé ű2z3só ű2z3sö űz3s2p ű2z3suga űz3sü ű2z3sű űz3sz űz1t2r ű2z1ug űz1úr űz1út ű2z1ü2g ű2z1ül űz1ünn ű2z1üt űz3zs 2v. 1va vaa2d vaát1 vaáta2 2v1abl va1bra v1abs vacs1ala va2cs1an va2csap va2csál va2cs1ü2 vacs3z 2v1a2dag va2d1aj va2dalé 2v1adap va2d1asz v1a2data v1a2datb v1a2datk v1a2dato v1a2datr va2daz va2d1ál va2deg va2d1e2l va2den va2dep va2d1e2r va2d1e2t va2dél vad1éte va2dib va2d1id va2d1ir va2d1i2t va2d1ír va2d1ol 2vadom va2dóh va2dói v1a2dój va2dór v1a2dóu va2d1ör va2d1Å‘2 va2d1ú va2dű va2dza va2dzá va2dze vaé2r va2g1as va2gav va2gás va2g1e2 va2g1é2n vag1g va2gid va1gle va2g1o2ku va2goli vag1ost va2g1osz va2g1ura va2gú 2v1a2gyú va2j1ar va2j1ác va2j1á2ro va2j1e2g va2jí va2j1ol va2j1ó2s va2jö va2jü 2va2kad vak1a2dá va2k1aj vak1akn vak1apá vak1árn va2k1ás va2k1át va2keg va2kem va2k1érté va2kis va2k1ír va2k1ó2s va1krí vak1t2 2vakti 2v1aktu 2vakup va2k1út va2lac va2laku va2lap. va2lapí va2lapú vallás1s 2v1amp 2v1a2nal va2n1e2g vane2m van1eme va2n1es vané2v van1éve van1évi 2vang van3n va2nol va2nó 2v1a2nyá 2v1a2nyó va1p2l va2pos va1p2r 2v1a2pu va2r1ab vara2c var1acé va2r1a2d var1ajt var1akt va2r1al va2rany var1a2nya va2r1a2p va2r1a2r va2r1assz va2r1at va2r1av va2rág va2r1ál 2v1a2rány va2r1ászn var1áta va2r1átf va2r1átm va2r1á2to va2r1á2zá varca2 var2cag var2cal var2can varc1c var2c3ho va2r1es va2r1e2t va2rev va2rég var1ék. var1éks va2r1é2l va2rép va2r1éré va2rid va2r1i2ko va2r1ikr va2r1ill va2rim var1inf var1ink va2r1inv va2r1i2p var1isk var1ism va2rí var2k1an var1k2b var1k2j var1k2ká var1kl va2r1okm var1old va2rop va2ror va2rön va2rÅ‘ vars2 varsz2 2vartett. var2t1i2n var2tor vartó2 var2t1ór va2r1ut va2r1út va2r1ü2 va2rű vas1abl va2sag va2s1aj vas1arc vas1asz va2s1ábr va2s1ál va2s1árl va2sek va2s1e2l va2sem va2s1e2r va2set va2s1ék. vas1ékk va2s1ékn vas1ékt vasé2r va2s1ére va2s1érté va2s1érv vasfélé2v va2sid va2s1i2na. va2s1ing va2s1i2s va2s1iz va2s1ön va2sÅ‘ vas3sze vas3szi vas3szÅ‘ vast2 vas1tró va2sus va2s1ü2t va2sű vasz1e2l va2szis va2t1a2d va2t1aj va2tau va2t1ál va2t1á2ru va2t1e2v va2t1ék va2tél va2t1érte va2t1é2te va2t1id va2tim vat1inf vat1ing va2t1is va2t1ír vatmo1 vatmosz2 va2t1okm va2t1old va2t1ö2v vatÅ‘2 va2t1Å‘r vatt2 2vattako va2t1ur 2v1a2uk vau2n vaza2t1e 2v1a2zon 1vá váb2baj váb2b1as váb2b1e2 váb2b1é váb2bil váb2bol váb2bos váb2bÅ‘ váb2bu 2vábr vá2c3h vá2cs1a2p vá2cse vá2cs1ék vá2csip vá2cs1is vá2csÅ‘ vá2csú vá2csü vács3z vá2d1al vá2d1a2n váde2 vá2des vádé2 vá2dén vá2dik vá2d1ir vá2d1or vá2dö vá2dü 2v1á2gaz 2v1ágr vá2gú vá2gyal vá2gyan vá2gyö vá2gyÅ‘ vá2k1e vá2k1ü vá2laj vá2l1a2l vála2n vá2l1any vá2l1ap vá2lar vála2szin vá2l1á2r vá2l1át vá2l1e2 vá2lél vá2lid vá2lin vá2lir vá2lism 2v1állás vál2l1e2 2v1állom 2v1álló vál2l1ö2 vá2lú vá2lü váma2 vá2m1ad vá2m1aj vá2m1ak vá2m1al vá2m1as vá2mau vá2m1á2 vám1b2 vá2m1e2 vá2m1ér. vá2m1érté vá2mil vá2m1in vá2m1ir vá2mis vá2mí vá2m1or vá2mö vá2mÅ‘ vámu2 vá2m1ut vá2mü vá2naj vána2n ván1any vá2n1e vá2nis váns3s vá2n1ú vá2nü ványa2d vá2nyaj vá2ny1a2l ványa2n vá2nyany vá2ny1ap vá2nyar vá2ny1as vá2nyau vá2nyaz vá2ny1e2 vá2ny1ér. vá2nyérd vá2nyérr vá2nyérv vá2nyérz vá2ny1ing vá2nyis vány1í2r vá2ny1ó2 vá2nyö vá2po vár1a2dato vá2radá vá2r1a2dó vá2r1a2du vára2l vá2r1ala vár1ale vá2raml vára2n vá2r1any vá2r1ar vá2r1assz vá2r1a2sz vá2rág vá2r1ál várd2 vá2r1e2 vár1isk vár1ism vár1ist vá2rí vár1old vá2r1oml vá2r1ont váro2sz vár1oszt vá2r1ó2né vá2rö vá2rÅ‘ várs2 vár1sp vár1sr vár2t1es vár2t1ér. 2váru. vá2ruh vár1u2r vár1usz vár1ut várú2 vá2rús vá2rü vá2s1a2d vá2sam vá2saz vásár1a2d vá2s1e vási2k vás1iko vá2sis vá2sír váskész1 vásó2 vást2 vás1tr vá2s1ü vá2sű vá2szan vá2s3zav vá2sz1e vá2szin váta2 vá2t1al 2v1átd 2v1á2t1e2r 2v1átfe vá2t1ir 2v1á2tí v1átlé 2v1átm 2v1á2t1ö 2v1átp 2v1á2tü 2v1átv vá2z1alj vá2z1alt vá2z1a2tom vá2z1a2v vá2z1e2 vázi1s2 vá2ziz vá2zí vá2zos vá2zö vá2zÅ‘ vá2z3sa vá2z3sá váz3se vá2z3sé váz3sö vá2z3su váz3sz vá2zü vb2lo vb2ra vcsőé2ne vd2rá 1ve vea2g vea2l vea2n vea2r veá2l 2vece ve2cet ve1cl ve2cseg 2vedény ve2gab ve2gac ve2gar ve2g1as vega2z ve2g1á2 ve2g1eg ve2g1e2kéh ve2g1ell vege2lÅ‘ké ve2g1elr ve2g1e2r ve2g1esem ve2g1esz ve2g1e2tet ve2g1ék ve2g1él ve2g1ép veg1éré veg1érte veg1érv vegés3s vegé2sz vegg2 veg1gr ve2gid ve2gik ve2gim ve2gí ve2gol ve2gor ve2g1ó2 ve2g1ö2 ve2g1u ve2gú ve2gyelem ve2gyemb ve2gyez 2v1egyl 2vegyv ve2k1ak 2v1e2ke. 2v1e2kei 2vekéb ve2kébÅ‘ 2v1e2kék 2v1e2kés ve1klu 2vela 2v1e2lef 2v1e2lemz vele2t1e2l 2v1e2lég velés3s 2v1elf v1elgy 2v1elha 2v1elhel ve2lis 2v1e2l1í2 v1eljá 2v1elm ve2los 2v1e2lÅ‘irá 2v1e2lőí 2velÅ‘kés 2v1elÅ‘leg 2v1elr 2v1elsa 2v1elszá 2v1elszo 2v1eltá 2v1elter 2v1eltér 2v1elto 2v1elv. 2v1elvá 2v1elvek ve2lyö ve2lyu 2v1elz 2v1embó 2v1e2mel 2v1eml 2v1e2mu ve2n1ad ve2nau ve2n1á ve2n1e2g ve2n1e2l ve2n1esz ve2n1é2vi ven1f2 ven1g2 ven3k2 ve2n1o ve2n1ó2 ve2nö ventü2 ven2t1ül ven1ülé ve2nyö 2v1enz veó2r ve2ör ve2pe ve1p2r ve2rab ve2r1a2c ve2r1a2d ve2r1a2g ve2rajá vera2l vera2n ve2r1a2r ve2r1au ve2raz ve2r1ág ve2r1ál 2verdÅ‘ ver1egy ve2r1e2h ve2r1e2ke. ve2rekl ve2r1e2l ve2r1eng ve2r1er ve2r1e2sÅ‘ ve2r1esz ve2r1é2l ver1g2 ve2r1iga ve2r1ill ve2rim ve2r1inc ve2rind ve2r1inf ve2r1ing ver1inte ver1ipa ve2ris ve2r1ol ve2ror ve2ró ver1ó2r ve2rö ver1p2 ver2s1al ver2sár versé2g ver2s1égé ver2s1égg ver2s1égn ver2s1égt ver2sir ver2s1í2 ver2só vers3s ver2s1üt vers3zár vers3zen ver1tra ve2rur ve2r1ut ve2rút ve2r1ü2g ve2san ve2sas ve2s1ál ve2s1e2kétÅ‘ veset2 2v1e2sett ve2sip ve2sis ve2s1íz ve1ská 2v1e2sÅ‘ ves3sza ves3szer vest2 ve2s1u2 2v1e2sze. 2v1e2szekn 2v1eszm 2v1e2szű veta2l ve2tas ve2tál ve2t1á2r vet1egy ve2t1e2lÅ‘l ve2tég ve2t1é2k ve2tid 2v1e2tika v1e2tim ve2t1ing ve2t1ol ve2t1öl ve2töv vetőé2 ve2tur ve2t1ut ve2t1ű2z ve2vet 2v1e2vo vezÅ‘e2r vezÅ‘kész1 vezÅ‘2sorr vezÅ‘s1s 2v1ezr 1vé véá2g 2v1ébr vé2dak véda2l vé2d1as vé2dá vé2d1emb vé2dos vé2dóv vé2dö vé2dú vé2dz vé1f2r véga2 vé2gab vé2gak vé2gal vé2g1á2 vé2g1eg vé2g1e2le vé2g1elg vé2g1elh vé2g1eli vé2g1ell vé2g1els vé2g1elv vég2em vége2n vé2g1eny vé2g1ep vé2g1er vé2g1esem vé2g1e2sete vé2geté vé2getÅ‘ vé2g1ér. vé2g1é2ren vé2g1é2ré vé2g1érr vég1érv vég3g 2véghü vé2g1id végigaz1 vé2gim vé2giz vé2g1í végkötés1 vé2gol vé2g1ó2 vé2gö vé2gú 2v1éh. 2v1é2he. 2v1é2hen 2v1é2hes 2v1éhs vé2kei vé2kít vé1k2l vé1k2ré vé1k2ri 2v1éks vé2kük vé2l1a2 vé2láb vé2lál vé2l1ár vé2leg vél1ell vél1els vél1elt vél1emb vé2l1eme vé2l1e2r vé2les vél1ess vél1est vé2lev vé2lez vé2l1é2k vé2lin vé2lir vé2lí vé2los vé2lÅ‘i vé2lÅ‘s véltá2v vé2l1u vé2lú vé2lű véna1p 2vénekl vé2n1emb vé2ny1eg vénye2l vény1ele vény1els vé2nyid vé2nyis vé2ny1í vé2nyú vényü2l vé2ny1ülé vé2ny1ült 2v1é2pí vé1p2l vé1p2r 2v1é2pü vé2ral vé2r1a2n vé2rap vé2rar vé2raz vé2r1ár vé2rát vér1d2 vé2r1e2l vér1eml véres3s vér1eti vé2rés vér1ész vér1éte vér1ikr vé2rir vé2r1o2k vé2rot vé2róv vér1s vértes1s vér2t1ón. vér2töv véru2 vé2rug vé2rut vé2r1ú vérü2kü 2v1érzet. vé2sak vé2sal vé2seg vése2l vés1ele vés1elt vé2s1er vé2só vé1s2pe vés3szer vés2táb vé2s1ü2v vés3zav vész1á2r vé2szeg vész1ell vé2sz1ékn vé2sz1Å‘2 vész3s vé2szu vészú2 vész1úr vészü2l vész1ülé vét1est 2v1év. vé3va 2v1évb 2vévei 2v1é2vek 2v1é2ven 2v1é2véé 2v1é2vér vé2vi. 2v1évr 2v1évt 2v1é2vü vfe2l1em vf2ra vf2re vf2ri vf2ro vge2o vg2ra vg2rá vhang2a3 vhez1 1vi vi2aa vi2ab vi2ac vi2aé vi2ag via2p via2szal via2sz1ál via2sz1árn via2sz1em viaszé2 via2szél via2szö viára2 viár1ad vic2c1a 2vické vi2csag vi2csal vi2cs1an vi2cs1as vi2csál vi2cs1e2l vi2cs1ér. vi2csérté vicsi2 vi2csim vi2csip vi2cs1ö2 vi2csú vics3z vi2deg vi2d1esz 2vidéz 2v1i2dÅ‘ vi2dz vi2ew 2v1i2gaz 2v1i2gén vika2 vi2k1ag vi2k1el vi2kon vi2l1ék villa1s villasz2 2villu vi2má 2v1imm vina1t2 2vind vi2n1emb vi2n1ó vin2tess vin2tesz 2vinté vin2tos 2v1i2on 2vipa vi1p2s 2v1i2rat. 2v1i2ratot 2viratoz. 2v1i2rod visa2 vi2sal vi2s1au vi2s1á2g vi2ser vi2s1ék vi2sim vi2s1is vi2siz vi2sö vi2szár vi2szok vit2a vi3tad vitakész1 2vital vitelÅ‘2 2vitn vi2t1ú vi2zeg vi2z1ell viz1é2l vi2zin vi2zok vi2zol vi2zom viz1os vi2zsá viz3sz vi2zud vi2zú 1ví vídi2 ví2ge ví2ny vínyenc1 2v1í2ra 2v1í2rá 2v1í2ró 2vítélet. v2í3téletb 2vítélete 2v1ívb vívókész1 ví2z1a2 ví2zá ví2zel víz1e2le víz1elf víz1elh víz1ell víz1eln víz1elÅ‘ víz1elp víz1els víz1elt víz1elv víze2m víz1emb víz1eme víz1emé ví2zék ví2zés ví2z1ing vízi1sk ví2z1ist ví2z1isz ví2zí ví2z1o ví2z1ó2 ví2zö ví2z1Å‘2 ví2z3s2 ví2zú vízü2 2v1í2zű víz3z vjára2t1út vje2gya vje2tel vje2tu vkia2l vkiá2 vk2li vk2ló vk2lu vk2ri vk2rí vk2ró vla2te vleí2 vleü2 vme2g1 vna2p1in 1vo vo2il vo2ji vo2kí 2v1okm voks3s 2v1okta vola1t 2v1oldá 2v1oldó. vol2t1a2d vome2 vomec3 vona2l1e2 vona2t1e vonás1s voná2s3z vo2od vo2pe vo2r1a2 vo2r1in 2v1orm vo2s1as vo2s1av vo2s1e vos3s2 vo2s1ú vo2sü vo2uc vo2vá vo2xi 1vó vóa2k vóá2g vóá2r vócsa2p1á2 vó1d2re vó1f2r vó1k2l 2v1ónn vóó2r vó1p2l vó1p2r vó2ran vó2rák vó2rán vó2rát vó2s1akk vó2s1a2l vó2seg vó2s1iv vó2sol vó2s1orr vó2só vó2sö vó2sÅ‘ vó1spi vós3s vós1tr vó1sz2 vósza2k1 vó2s3zár vó2s3zen vóta1t2 vó1t2r vóváros1u 1vö vö2bö vö2dém 2v1ö2ko völ2gya 2v1ö2rök 2v1ötl vö2ve vö2vé 2v1özön vözÅ‘e3 vö2zs 1vÅ‘ vÅ‘a2n vőá2g vÅ‘e2l vÅ‘e2r vőé2j vőé2n vÅ‘1fl vÅ‘1f2r vÅ‘i3dejű vÅ‘1kl vÅ‘o2l vÅ‘1pl vÅ‘1pr v1Å‘rm v1Å‘2rü vÅ‘1s2p vÅ‘1sta vÅ‘1str vÅ‘1t2r vőü2l vÅ‘2zi vp2la vp2ra vp2re vp2ré vp2ri vp2ro vp2ró vp2sz vs2ká vs2ki vs2ko vs2lá vs2pe vs2ta vs2ti vs2tí vs2to vst2r vs2tú vsz2p v1sz2t vta2n1á2s vta2n1á2to vta2n1ó2 vtá2raj vtá2r1ass vtá2ris vtáró2 vtá2r1ór vtelés1s vt2ra vt2ré vt2ri vt2ro 1vu vu2bo vu2mal vu2man vu2m1e2 vu2mis vu2mö 2v1u2ni vu2ra vu2ru 2v1u2tak 2v1u2tas 2v1u2tat vu2tá v1utc v1u2tó 1vú vú1fr v1újd v1ú2jí v1újs vú2r1ad vú2re vúré2 vú2rén vú2rö vú2szó v1útn 1vü vü2gy vü2kü vü2lá 2v1üld 2v1ünn vü1pr vü2rí vü1st vü1sz2 vü2te vü2té vü2zé 1vű 2v1űrl vű2zÅ‘ vvágy1ób vverés3s vzá2r1ó2ra 2w. 1wa wa2ii wa2le war2d1i wa2re wa1s2h wa1ye wa1yé 1wá 1we we2b1a2 we2b1á2 we2b1ele we2b1é we2bir we2b1o we2b1ú we2bü we2ed we2ek we2ig we2is we2le we2ör 1wé whi2t 1wi wi2ch wi2cz 1wí 1wo wo2od 1wó 1wö 1wÅ‘ wri2t wsto2 1wu 1wú 1wü 1wű wyo2m1 2x. 1xa x1a2da xa2dá xaé2d xa1fl xa1k2l 2x1akt 2x1a2la 2x1alg 2x1alj 2x1alk xa2na x1a2ny x1a2rá xa2ri xa2vi 1xá 2x1ábr xá2gy 2x1áll xá2rak 2x1á2ram xá2ras 2x1á2ro 2x1árr 2x1á2ru 2x1átj 2x1átr 2x1átv xba2l1 xb2la xb2lo xb2ra xd2ró 1xe x1e2gy 2x1e2ke. xe2l1a xe2l1á 2x1e2lekt xe2lel xe2l1emb 2x1e2lemr xe2l1esz xe2l1in xe2l1o 2x1emel 2x1eml 2x1e2rÅ‘ 2x1e2sé 2x1e2zü 1xé 2x1éhs xé2pí 2x1ér. 2x1érte 2x1érté 2x1értÅ‘ 2x1érz 2xévei xf2re xha2u 1xi xia2n xi2av xi2d1a2l 2xidá xi2dás 2xi2de 2xidé 2xidgá xi2d1i2 2xidjá 2xidjé 2xidl 2xidm xi2dol xi2dö 2xi2dÅ‘ xid1Å‘z xi1d2ro 2xidró 2xids 2xidu 2xidv 2xi2dz xi2el 2x1i2ga xigén1n 2xilc 2x1ill xina2 xi2n1an 2x1ind xine2 xi2n1et xi2n1i2 xi2nö xi2on xi2óc xi2óe xi2óg xi2ómé xi2óo xi2óö xi2óp xió2rá 2x1i2rá 2x1i2ro xi2sad xi2sal xise2 xi2s1el xi2s1es xi1s2ká xis1p xis3s xi2s1ü xi2t1e2g xi2t1e2r xi2t1é xi2t1i2 xi2t1ü2 1xí xí2ja xí2já xí2jo xí2ju x1í2rá xí2ró 2x1ívn 2x1í2vü 2x1ívv 1xo 2x1old xo2n1ai xo2n1al xo2n1e xo2pe xo1p2l 2x1osz 1xó 1xö xö2dé xö2rö xö2zö 1xÅ‘ xÅ‘2sé xp2la xpor2t1a2 xpor2t1á2r xpor2t1e2 xpor2t1érd xpor2t1ü2 xp2ri x1p2ro x1p2ró xs2ká xs2pe xs2ta xst2r x2t1e2d xti2la xti2lá xti2leg xti2lin xti2lis xti2l1o xt1ört 1xu xu2sad xu2sal xu2s1a2n xu2s1e xu2s1érté xusi2 xu2sil xu2sim xu2sin xu2sit xu2s1ol xu2s1ó2 xu2s1ö2 xus3s xust2 xus1tr xu2sus xusú2 xu2s1út xu2sü xu2s1ű xu2s3z xu2ta 1xú 1xü xü2lé xü2ve xü2ze 1xű 2y. yaa2d y1abla y1a2bon y1abra ya1bri y1abs ya2cél. ya2dako ya2dalé y1adap y1a2data y1a2datb ya2dati y1a2dato ya2datu yadé2ki ya2d1és ya2dév y1admi ya2dóan ya2dóbó ya2dóé y1a2dói y1a2dój ya2dóké ya2dókt y1a2dóm ya2dóná y1a2dór y1a2dós y1a2dóu yae2r ya2ero yae2t yaé2r y1aff ya1f2r ya2g1a2d ya2g1ag ya2g1am ya2gan y1aganc ya2g1a2s ya2g1atl ya2g1ato ya2gau yaga2z yag1azo ya2g1áll yag1árá yag1árb yag1á2ré yag1árh yag1ári yag1árj yag1árk yag1á2ro yag1árra. yag1árs yag1árt ya2gás yag1át1a2 ya2g1átf ya2g1átl ya2g1átr yag1d2 ya2gec ya2g1el ya2g1es ya2g1é2g ya2gék y1aggr yag3gyú ya2gid ya2gim ya2g1io yag1isz ya2giz ya2gí ya2g1osz ya2g1ó2r ya2gÅ‘ yag1s ya2gur ya2g1ut y1agy. y1a2gyat y1agyb y1agyf y1agyk y1a2gyon y1agyr y1a2jak y1a2ján y1ajk y1ajt y1akadá ya2k1áll ya2k1átk ya2k1átm yak1elm yak1elt yak1ékn ya2k1ér. ya2k1éri yak1i2zo ya1klu y1akna. y1aknák ya1kné y1a2kol. ya2k1ola ya2k1old ya2k1ón y1a2kós ya2k1örv ya1k2rém ya1kri ya1krí y1akt. y1akták y1aktb y1aktiv y1aktj y1aktot y1aktu yala2g1ú ya2l1agy yal1ajk ya2lakb y1a2lakj y1a2lakk y1a2lakok y1a2lakom y1a2lakot ya2lakt ya2laku y1alakz ya2l1a2l y1alany ya2lapa y1a2lapí ya2lapjá y1a2lapú yala2te yal1ága ya2l1ál yalás1s y1aláté ya2l1é2ne ya2lif yal1i2ko ya2l1inv y1aljai y1alji ya2lor yalókész1 ya2l1órá ya2l1Å‘2 y2alta yal1t2r y1altú ya2lü ya2lű y1amalg ya2m1any yam1a2rár yama2tal ya2m1árb ya2m1árn yamászás1 ya2m1á2to yam1b2 yam1emb yam1esés yami2k ya2m1iko ya2mind ya2m1i2o ya2m1is ya2m1osz ya2m1ó2r ya2mö ya2mÅ‘ ya2mü y1a2nal ya2nan ya2nar yan1arr ya2nat yan1att ya2n3e. ya2n1e2g ya2nek ya2ner ya2n1et ya2nez y1a2ném y1ang ya2nid ya2n1in ya2n1is ya2nit y1a2niz y1anny yano2d yan1oda yan1onn ya2nÅ‘. ya2nyas y1anyj y1a2nyó y1a2nyuk yaó2r y1a2pa. y1a2pai y1a2pas y1a2páb y1a2pád y1a2páé y1a2pái y1a2pák y1a2pám y1a2pára y1a2páró y1a2pát. y1a2páto y1a2pátó y1a2páv y1a2pi. y1apjáh y1apjái y1apju ya1p2l ya2post yapo2tá y1a2pó. y1a2póé y1a2póh y1a2pói y1a2pój ya2póké ya2póna y1a2pós ya2pót. ya2pótó y1a2póv y1app ya1pre ya1p2ri y1aprí ya1pro ya2r1a2dá ya2r1a2dó ya2r1a2du ya2rasz ya2ránn y1a2rány ya2r1átv y1arc. y1arca y1arcá y1arcb y1arcc y1arch y1arck y1arcn y1arco y1arcr y1arcu y1arcú 1yard. 1yardn 3yardom 1yardos yar1drá yar1ell ya2roma ya2ror yar1ó2rá yar1óvá ya2róz yar1s2 yas1alj ya2sap ya2s1as yaság1g ya1sl ya1sne ya1s2pi ya1s2po ya1s2rá yast2 ya1s2ta ya1sto ya1str ya1svi yasz2tár y1a2tád yatékát1a2 y1atka. y1atká ya2tomh y1a2tomm y1a2tomo yat2rág ya1tré ya1tróf y1attr y1a2ty y1a2uk y1a2ulá yau2tá y1a2uto y1autó yautói2ko yaü2t yaü2z y1a2vat y1a2zon y1a2zúr y1á2bé y1ábr yá2ga yá2gá yá2gé yá2gi yá2go yá2gu yá2gú yá2gy yá2jal yá2j1e yá2j1ö yá2jÅ‘ yáka2 yá2k1an yá2k1á yá2k1e yá2kü yálas3s yá2lál y1áldo yá2l1e y1állam y1állat y1állás y1állí y1állo y1álló y1állv yá2lü yáma2 yá2m1al yá2m1an yá2m1ap yá2m1as yámi2 yá2m1in yá2mü yá2n1e yá2nék yán3n yá2nö yánt2 yán1tr yá2nü yá1ny2 y1á2po yá2r1a2l y1á2raml y1á2ramo yára2n yár1any yá2rap yá2rar yá2r1ass yá2r1av yá2raz y1árazá yár1azo yá2r1ál y1árbev yár1d2 yá2r1e2 y1árem yá2réj y1á2riá yá2rim yá2r1is y1árnya yáró2 yá2rór yá2rö yá2rÅ‘ y1árpá yár2sé y2árt. y1ártám yár1tr y1á2ruh y1árur yá2rú2 y1árú. yá2rü y1árva y1árver yá2sal yá2sas yá2s1á2g yá2s1á2rá yá2s1árt y1á2sás yá2se yáská2 y1á2só yá2sö yá2szab yá2szag yá2szal yá2szas yá2sz1ál yá2sze yász1el yász1em yás3zen yá2szét yá2szév yá2szim yá2szin yá2szis yá2szit yász1ó2d yász1ó2r yá2sz1ö2 yá2szÅ‘ yá2sz1ü2 y1á2ta yát1a2l y1á2tá y1átb y1á2t1e2 y1á2té y1átf y1áth y1á2t1i2 y1átk y1átlag y1átlá y1átlé y1átm y1átn y1á2t1ö y1átp y1átr y1áts y1átt y1á2tü y1átv ybeá2 yb2la yb2le yb2lé yb2li yb2lo yb2lú yb2ra yb2ri yb2ro yb2ró ycsa2p1á2g ycsőé2 yc2vi yd2be y2desd yd2ni y1d2ra y1d2rá yd2ro yd2ró y1d2ru yea2v yeá2r ye1bl y1ecset ye2d1á ye2deg ye2d1esz ye2dol ye2d1ó2 ye2d1u2 ye2d1ú ye2d1ü2lÅ‘ yed2vér yee2s y1eff yega2 ye2g1ala ye2gan ye2g1az ye2g1á2 ye2g1el yeg1e2red ye2g1él yeg1érbe y1egérr yegész1 yeg1észn yeg3g2 ye2gid ye2gí ye2gú ye2g1üg ye2gyed y1e2gyen y1e2gyes ye2gyet ye2gyez y1e2gyé y1egyh y1egyl y1egys y1együ y1egyv yei2g y1ejt y1e2ke. y1e2kei y1e2kek y1e2kéé ye2kéj y1e2kés. y1e2kése ye1k2ré y1elad y1e2lág ye2lál y1elc y1e2lef ye2lege ye2l1e2h ye2lemek y1e2lemez y1e2lemű y1e2l1e2re ye2légt ye2l1é2ké ye2l1ér. yelés3s yelé2s3z y1elhal y1elhel y1elhú y1e2l1in ye2liv y1e2l1í2 y1eljá y1elka y1elnev y1elnö y1elny ye2los y1e2lö y1e2lÅ‘ad y1e2lőí ye2lÅ‘tt y1e2lÅ‘z y1elsa y1elsÅ‘ y1eltér y1elto y1eltö yel2vad yel2v1áll yel2vás yel2v1eg yel2v1e2r yel2vessz yelve2sz yelv1esze y1elvevé yel2véd yel2v1ég yel2vél yel2v1érz yel2v1í yel2vol ye2mak ye2m1a2l ye2map yema2r ye2m1at ye2mál y1ember y1e2melé y1emelk ye2m1er y1e2més yemi2 yem1ing ye2m1is ye2mit yem1ita ye2miz y1emlék y1emlí y1emlÅ‘ yem1ost ye2m1ö yem1p2 yemu2s yem1usz ye2n1á2 yen2csa yen2c3ser ye2n1elj ye2n1eln ye2n1elv y1e2ner yenes3s ye2n1é2l yenfé2l1é2v ye2nid ye2n1i2p yen1k2 ye2n1o ye2n1üg yenyá2 y1enz ye2pag yep1alo ye2p1ará ye2p1áll ye2pát ye2p1e2l ye2p1é2k y1e2pik y1epiz ye2pos yep2p1e2r ye1pro yep1t2 y1erde yere2ga yere2gete yereg1g y1e2rej yere2k1a yere2ká yere2k1e2sz yere2ko yere2k1ö2 y1e2rény y1erké y1erkö y1erny y1e2ro ye2rőér ye2rÅ‘ig ye2rÅ‘né ye2rÅ‘rÅ‘ ye2rÅ‘sí ye2rÅ‘ss ye2rÅ‘vá yer2s1a y1erszé ye2sá yes1egé yes1egy ye2s1ej yes1e2kétÅ‘ y1esemén ye2s1er y1e2setb y1e2seté y1e2seti y1e2setk y1e2setr y1e2setű y1esél ye2sip ye2s1í2r y1esőá ye2sÅ‘be y1e2sÅ‘h ye2sÅ‘je ye2sÅ‘k. ye2sÅ‘kh ye2sÅ‘kr y1e2sÅ‘n y1e2sÅ‘r y1e2sÅ‘s ye2sÅ‘vé ye1s2pe yes2t1a2 y1estés y1estév y1estjü yes2tÅ‘r y1estű y1eszk y1eszm y1esztét y1e2szű y1e2tal yeteg1ér. y1e2tet ye2t1ért y1e2tik y1etn y1e2tűd ye2vet ye2vezÅ‘ y1e2vé y1e2vol y1e2vÅ‘ y1e2zer yeze2t1ék yezÅ‘a2 yé2b1á yé2b1ér. yé2b1i2 yé2bü yé2des y1égb y1é2ge y1é2gé y1é2gi y1é2gÅ‘ y1égt y1é2he. y1é2hen y1é2hes y1é2het y1éhs y1éj. y1éjb yé2jei yé2jen yé2jért y1é2ji y1éjj y1éjs y1éjt yé2kab yé2kad yé2k1aka yé2k1a2l y1é2kat yé2k1el yé2ker yé2k1é2k yé2k1é2l yé2k1ér. yé2kik yé2k1ó2 yé2k1ö yé2kÅ‘ yé2kúr yé2l1á yé2le. yé2les y1é2let y1é2lén yé2lét yé2lim yél3l yé2lÅ‘s y1élsp y1é2lű yéná2 yé2n1ár yén1e2ke. yé2nel yé2n1ikr yé2no yé2pí yé2pü yéra2 yé2r1aj yé2r1ak yé2ral yé2r1an yé2rar yé2r1á2 y1érch yér1d2 y1érdek yé2reg yé2r1e2l yé2rem y1érem. yér1eme yé2r1enc yér1esÅ‘ y1é2ret yé2rev y1é2rez y1é2rés yé2ril y1é2rin y1érmü yé2r1osz yéró2 yé2r1ór y1é2rÅ‘. y1é2rÅ‘i y1é2rÅ‘k y1é2rÅ‘t y1érpa y1érték yéru2 yé2rut y1érv. y1érvén y1érvv y1érzet y1érzé y1érzÅ‘ yé2sz1á yé2sz1emb yé2sz1o yé2sz1ú yé2t1a y1é2tel y1é2ter yé2tén y1étke y1étl y1év. y1évad y1évb y1é2ve. y1é2vek y1é2vem y1é2ven y1é2ves y1é2vet y1évez y1é2véb y1é2vén y1é2vér y1é2vét y1é2vév y1évf y1évh y1é2vi y1évk y1évm y1évn y1évr y1évs y1évt y1é2vü y1é2vű y1évv yfas2 yfé2lá yf1gl yf2la yf2le yf2li yf2lo yf2ló y2föl. yf1Å‘2sö y1f2ra yf2ri yf2rí y1f2ro yf2rö yg2le yg2ló ygó1g2 ygót2 yg2ra yg2rá yg2ri yg2ró yg2ru yg2rü y2h1a2dó yha1p2 yha1t2r yhá2zal y2h1elv y1i2bo y1i2deá y1i2deg y1i2den y1i2deo y1i2dé y1i2dom y1i2dÅ‘ y1i2ga y1i2gá y1i2géz y1igm y1i2har yi2hat y1ihl y1i2ker yi2k1érn yi2kont yi1k2ri y1i2ma. y1i2má y1i2mit y1imp y1i2nas y1inc y1ind y1inf y1inga yi2nic y1inj y1ins y1inv yi2ono yi2par y1i2ram y1i2rat. y1i2rata yira2t1é y1i2ratn y1i2rato yi2ratt y1i2rá y1i2ri y1i2rod y1irr y1irt y1ish y1isko y1ism y1isp y1ist y1i2sza yi2szel yi2szon y1i2tal y1i2tat y1i2var y1i2vás y1i2vó y1i2zé y1izg y1izm yi2zom yí2gé yí2ja yí2já yí2ju yí2ka yí2ká yí2ke yí2kí yí2kol yí2k1ö yí2la yí2l1e2 yí2lí yí2lö yí2ral yí2ráb yír1á2sz yí2rog yí2rÅ‘ yítÅ‘a2 yí2vá yí2ve yí2vé yí2vó yí2vü yí2vű yí2zü yí2zű yje2gy1á2 y2jév yjob2b1o ykai2 yka1k2 yka2ró2ra yk2bó y2k1e2lem yk1é2jév yké2p1el yk2ho ykiá2 ykie2 yk1izm yk2ka yk2la yk2le yk2li yk2lí yk2ló yk2lu yk2ná ykóro2 yk2ra y1k2rá y1k2ri yk2rí yk2ro yk2ró yk2vó ylal2ta yla2t1a2n yla2t1é2t ylei2 yleí2 yle2tal yle2tá ylóá2 ylót2 yma1d2 y2mat. ymá2s1ik yme2g1é ym2ma yné2vér yn2ka ynk2r ynőé2n ynÅ‘2ié yo2be y1obj y1o2dú yo2gal yo2gál yo2g1ár yo2ge yog3g yogi2 yo2git yo2g1iv yo2gí yo2g1osz yo2gö yo2gü yo2gű yoka2 y1o2k1al y1o2ká y1o2k1ir y1okm y1o2kol y1o2kos y1o2koz y1okta yo2l1a2l yola2n yol1any yo2l1ári yol2cem yolci2 yol2cik y1oldá y1oldó y1o2lim y1oltás yol2ter yo2lú yo2lü yolví2 yo2m1a2l yoma2n yo2m1any yoma2s yo2m1aszó yo2maz yo2m1ál yo2m1árk yo2m1á2to yo2m1á2z yo2m1e yo2m1ik yo2mil yo2m1is yo2mí yo2m1ol yo2mö yo2mÅ‘ yom1p2 yo2n1a2d yo2nag yo2naj yon1a2ka yo2n1a2l yo2n1am yo2n1a2n yo2nap yo2n1a2r yo2n1a2s yo2n1a2t yo2n1au yo2n1ál yo2n1át1a2 yo2n1áté yo2n1átj yon1átk yo2n1átr yo2n1áts yo2n1átv yo2n1á2z yon1d2 yo2n1e2 yo2néd yo2néh yo2nék yo2nén yon1f2 yo2nim yo2n1is yon1itt yo2niz yo2ní yon1k2 yon3n yo2n1ol yo2nop yo2n1osz yo2nö yo2n1Å‘2 yon1s2 yont2 yon1tr yo2n1ü y1o2pe y1opt yo2rad yo2r1a2k yo2r1a2p y1ord y1orgi y1orrú yor2sol y1orsó yors3s yo2se yos3s y1osto y1oszl yos3zs y1oszt yo2ut yo2ve yo2xi yóá2g yóá2r yócsa2p1á2 yó2dák y1ó2dár y1ó2dásak yó1f2l yó1g2r yó2gyár yó2gyi yó2gyú yóí2v yóo2k yóó2r yó1p2r y1ó2rac y1ó2rad yó2rag yó2rai y1ó2ras y1ó2ráb y1ó2rád y1ó2ráé y1ó2ráh y1ó2rái y1ó2ráj y1ó2rák y1ó2rám y1ó2rár y1ó2rás y1ó2rát y1ó2ráv y1ó2ri yó2s1aj yóscsa2 yó2sír yó2s1ol yó2s1orr yós1ors yós3s yósü2l yó2s1ű2 yö2bö yö2bű yö2cs yö2dém yö2ka yö2ká yö2kel yö2k1ék yö2kid yö2kí yö2ko yök1os yö2kó yö2kör yö2k1öss yö2ku yö2lÅ‘k y1öltés y1öml y1ö2ná yön2gya yön2gyá yön2gyó yön2gy1öl y1önk y1önté y1önz yö2ra y1örd yö2rit yö2ro yö2ru yö2so y1össz y1ösv y1öszt y1ötl y1ö2töd. y1ö2töde y1ö2tödé y1ötp y1ött y1ötv yö2ve yö2vé yö2vi y1ö2vö y1ö2zön yÅ‘a2n yÅ‘1bl yÅ‘e2l yÅ‘e2r yÅ‘1fr yÅ‘1pr yÅ‘2ra y1Å‘2rei y1Å‘rg y1Å‘rl y1Å‘rp y1Å‘rszi y1Å‘2rül y1Å‘rz yÅ‘2seg yÅ‘1str yÅ‘sz2 yÅ‘2s3zár. yÅ‘s3zs yÅ‘1szt yÅ‘zÅ‘kész1 ypen1n ype2te yp2la yp2le yp2lé yp2lu yponté2 yp2ra yp2re yp2ré yp2ri yp2rí yp2ro yp2ró y2prÅ‘ yp2ru yp2sz yrádi2ón yrágás1 yrea2 yren2d1Å‘2 yreu2 ysa2van yság1g ys2ch ysé2g1e2l ysé2gés ys2ho ys2ka ys2ká ys2ki ys2la ys2lá ys2le ys2lu ys2mi ys2na ys2ni ys2pe ys2pi ys2po ys2pó ysp2r ys2ra ys2rá ys2rÅ‘ ys2sa ys2sá ys2se y1s2tí y1s2to y1s2tó y1st2r y1s2tu y1s2tú ys2tü ysza2k1ü y2szaló y2száld yszáraz1 ysze2ra yszer2v1ó2 y1sz2f y1sz2k y1sz2l y1sz2m yszö2g1el y1sz2p y1sz2t y1sz2v yta2c yt1acé yta2n1é yta2n1ó2 ytá2l1é2 y2t1állá y2t1álló yt2raf yt2ran yt2rap y1t2rá y1t2re y1t2ré y1t2ri y1t2ro yt2rón yt2rö y1udv yu2g1á yu2ge y1ugr yu2gy yu2hu y1ujj yuj2j1a2da yu2kab yu2kar yu2k1aszá yu2k1ác yu2kál yuká2s yuk1ásá yu2k1e yu2k1ö yu2kÅ‘ yuk1t2 yu2kű y1und y1u2ni y1u2no yu2rad yu2rai yu2rak yu2ral yu2ram yu2ras yu2rat y1u2rán y1u2ráv y1urn y1u2ru yu2sál yus3s yu2szá y1u2szo yu2tak yu2tal yu2tam yu2tat yu2taz yu2tál y1utc yu2tód y1u2tu y1u2tú yú1bl yú1br yú1gr y1újd y1ú2jé y1ú2jí y1újr y1újs yú2kel yú2k1es yú2ket yú2kev yú2kó yú2köl yú2l1ag yú2laj yú2lar yú2l1á2r yú2lát yú2l1eg yú2l1el yú2l1e2t yú2lin yú2l1is yúl2tag yú2lü yú2lű yú1pl yú1p2r y1úrb y1ú2ré. y1úrél y1ú2rév y1úrho y1ú2ri y1úrk y1úrnÅ‘ y1úrr y1úrtó yú1sta yú2sü y1úszt y1útb yú2té. y1úth yúti2 yú2ti. yú2tig y1útj y1útk y1útm y1útn y1ú2ton y1útp y1útr y1úts y1útt y1útv yü2dü y1ü2gy yü2két yü2lá yü2led y1ülteté y1ünn yü2rí y1ü2rü y1ü2te y1ü2té y1ütk y1ü2tÅ‘ y1ü2veg yü2völ y1ü2ze y1ü2zé y1üzl yű1bl yűé2n yű2g1Å‘ yű1kl yű1pl y1űrb y1ű2ré. yű2réb yű2rén yű2rön y1űrr yű2sá yű2s1orra. yűs1s y1ű2zÅ‘b y1ű2zÅ‘en y1ű2zőé y1ű2zÅ‘h y1ű2zÅ‘k y1ű2zÅ‘n y1ű2zÅ‘r y1ű2zÅ‘t y1ű2zÅ‘v y2v1abr y2v1a2dó yva2d3z yvai2 y2v1akc y2v1alb y2v1alk y2v1ank y2v1ant yv1a2nya y2v1apo y2v1arz yva2s1u2 y2v1atk y2v1au y2v1azo yv1ábr yv1állo yv1állv yv1á2rai yv1á2rán y2v1áre y2v1árj y2v1árl yvárosi2b yv1árre y2v1á2ru y2v1árv y2v1áta y2v1áti y2v1átt yv1br yv1dr yv1e2gye yv1egys yv1e2kéb yv1ela y2v1e2lá y2v1ell y2v1elo y2v1elti y2v1ember yven3n y2v1e2p yve2r1a yve2r1á2 yve2reg yve2r1ész yve2rip yve2r1o yvert2 y2v1esszé y2v1eva y2v1e2vé yv1e2vÅ‘. y2v1ex yvezé2rel yvé2du yv1ékí yv1é2let y2v1élm y2v1élt y2v1ép y2v1érd y2v1é2ri y2v1érrÅ‘ y2v1érté y2v1é2rü y2v1érze y2v1é2ve. yv1fr yv1gr yvigaz1 y2v1igé y2v1i2k y2v1ille y2v1im yv1ind y2vint y2v1i2p y2v1ism y2v1ist y2v1íg y2v1ín yví2zis yví2zü yv1kl yv1kr y2v1old y2v1olv y2v1or y2v1os yv1ó2r yv1öss y2v1ö2z yv1Å‘r yv1pr yv1sk yv1sl yv1st yv1sz2 yv1tr y2v1ub y2v1ud y2v1ur y2v1u2t yv1új yv1út y2v1üg y2v1ü2z yza2t1a2l yza2t1e yza2t1érd yzá2r1ó2ra yzás3s yze2t1a2 yze2t1á yze2t1ér. yze2t1é2rÅ‘. yze2tés yze2téte yze2t1o yze2t1öl yzé2k1e2l yzÅ‘a2 yz2rí 2z. 1za 3zaba. zab2b1i2ko za2b1i2ga za1bre za1bri z1abro 2zabs za2ce za2c1ég za2cho zac1ikr za2c1im za2cipa za2c1iv za2cí zac1k2 zac3st za2dad 2zadag zad1alj za2dan za2data 2z1a2dato z1a2datr zadás1s zadi2a za2d1ír za2dóme za2dóz za1d2rá za2d1ut za2dzá za1fl za1f2r za2g1a2d za2gak za2ga2n zag1any za2gara za2g1a2t za2gau za2gág za2g1ál zag1árn zag1árt za2g1áz za2gel za2gés z1aggy 2zagi za2gin z1a2git za2g1osz zagren2 za2g1ü z1agy. z1a2gyak z1a2gyar z1agyb za2ijá zai2z 3zaj. 3zaja. za2j1a2d za2jan za2j1átv 3zajáv za2jes 3zajf za2jin 3zajj zajká2rok 3zajol za2j1or 2zajta 2zajtó. z1ajtók 2zajtót zaj1tr 3zajú za2j1ü2 2zakad za2k1a2dat. zakai2ko za2k1aka zak1alk za2k1alv za2kana za2k1ant za2kara z1akarat z1a2karás z1a2karó z1akarv za2k1atl za2k1á2p za2k1áta za2k1átl 2zakc z1akció za2k1e2g za2k1ele za2kelm za2k1eln za2k1elÅ‘ za2ket za2k1é2l za2k1int za2k1i2p zak1iro za2k1ír zak1k2 za1klu 2z1aknák zak1oszt za2k1o2v za2kÅ‘s za1krí 2zaktá 2zakti z1aktív 2zaktú zaku1p2 zaku2r zak1ura za2k1urá zakü2 za1kvó zal1acé zala2gá zalag1g 2zalakí za2lakj za2laku zala2n zal1any z1a2lapb za2lapj zal1átk zale2l zal1ele zal1ell zal1elÅ‘ za2l1e2m z1algá z1alge za2lid z1alkal z1alkat z1alkot 2zalmás za2lól zalta2n zal2tel zal2tem z1alti za2m1a2d za2m1aj zam1ala za2m1a2p zama2t1Å‘2 za2maz 2z1ambu za2mem zam1ing za2m1is za2mí za2m1osz za2mü z1a2nal z1a2nek 2zang z1angy 2z1ankét zan2tes z1antil za2nya. z1a2nyag z1a2nyó zao2k zao2s zaó2h zaó2v z1a2pa. za2pad za2p1a2g z1a2pai za2p1ág z1a2pán za2p1ára z1a2pát z1a2páv zape2l zap1ele za2pem za2pes za2pí za1p2lán z1a2pó. z1a2póé z1a2pók 2zap1p2 zappa2ne zap3rof zap1s2 za2r1a2dá za2ran zar1a2nya zar1apa za2rar z1a2rái za2r1ál z1a2rány z1a2rár 2zarc z1arc. z1arca z1arcá z1arch z1arci z1arck z1arco z1arcú zare2t za2r1ete za2rev za2réna za2réná 2zarm za2r1ön za2r1ü zar2vág 2zarz za2sem za1s2ká za1s2ko za1s2li za1s2pó za1spr za2t1ab za2t1a2d za2t1aj zat1alap zat1alj zat1ana za2t1a2ny zat1ará zat1att za2tau za2taz za2t1áll zat1álo zatá2p zat1ápo za2t1árad za2t1áré za2t1á2ru za2t1elé zat1elk za2t1e2ré za2t1erk za2t1é2g za2tél zat1i2ko za2tim za2t1ind za2t1ing za2tins za2t1int za2t1ist za2tiz zat1k2 2z1atka. z1atlas zatmo1s zatmosz2 za2t1okos zat1olda 2zatomj zat1oml za2t1osz za2tök za2t1ön zat2rok za1trom zatt2 zat1tr za2tura za2tül za2tür 2zaty za2tya za2tyá za2ula za2ulá za2urá zau2tak z1a2uto z1autó zaü2t zaü2z za3vará 3zavarb 2zavat z1az. z1azh z1a2zo 1zá 2z1ábr zá2de zág1a2d zá2gak. zá2gako zá2g1al z1á2gas z1á2gat zá2gaz zág1azo zá2g1ál zá2g1á2rak zá2g1áru zá2g1e zá2gép zá2golta z1á2gói zá2g1út. zá2gü z1ágy. z1á2gya zágy1a2da z1ágyb z1ágyh z1ágyn z1ágyr z1á2gyu záí2r zá2j1ék zá2jí zá2j1ol zá2j1ös zá2jÅ‘ zá2jü zá2jű zá2kin zá2k1ö2v zála2n zá2l1any zál1apr z1álar zál1assz z1álca z1álcá z1áldá zá2l1e2g zá2l1e2l zá2l1em zá2lib zá2lid zál1ing zá2lir z1állam z1állap z1álma z1álmá z1álmo zá2lob zálo2d zá2l1oda zá2los zá2lü zá2ly1a2l zá2mac zá2m1a2d zá2m1aj zá2m1a2l zám1any zá2m1a2p záma2r zám1ará zá2maz zá2m1átl zá2m1á2to zá2mél zá2m1é2rÅ‘ zá2m1ik zá2mip zá2mir zám1orm zám1ors zá2mó zá2mö zá2mÅ‘ zá2m1ut zá2mü zá2mű zá2naj zá2n1as zá2n1at zá2n1át zá2nem zá2nis zá2n1it zán3n zá2n1osz zán1sp zá2nü zány2 zá2p1ad zá2p1ag zá2p1orzó zá1p2r z1á2radd zá2r1a2dot zá2r1a2l z1á2ramk z1á2raml zára2n zá2r1any zá2r1ap zá2rar zá2r1ati zá2r1a2to zá2r1att zá2r1av zá2r1ál 3zárás. 3zárása 3zárásb 3zárási zárás1s zá2r1á2sz z1árbev 3zárdá zár2dem zár2d3z 2zá2r1em zá2réb 2zárf 2z1á2riá zá2rid zá2r1i2k zá2rim zá2r1is zá2r1iz zár1k2 3zárkák 2zárkö 3zárlat 2zárnö 2zárny zár2nyál zá2r1osz 3záró. 3záróa zá2r1óbo 3záród zár1óév zá2r1ó2rá zá2rÅ‘ 2zárré zárs2 zár1sk zárt1anyá zár2t1es zárté2r zár1tro zá2rug z1á2ruh z1áruj 2zárunkn 2zárunkr zá2rü z1árvah zá2s1a2d zá2saj zá2sal zá2s1a2r zá2s1at zá2saz zá2s1á2g zá2s1árad zá2s1á2rá zá2s1á2ré zá2s1árh zá2s1árn zá2s1árt z1á2sás zá2sep zás1ikon zá2sis zá2s1í zá2s1ol z1á2só. zás1ó2r z1ásóv zá2sut zá2sü zá2s3zav 3zászl zász3se z1átad zát1a2dó. zát1a2dók zátá2r 2zátb 2z1átc z1átd zát1e2m zá2tél 2z1átf 2záth z1átha z1áthe zá1t2hi z1á2t1i2 2z1átj 2z1átm zá1tra zá1tro 2záts 2z1átv zá2z1ala záza2t zá2z1ato zá2z1e2 zá2zév zá2z1ol zá2zos zázó2 zá2zór zá2zö zá2zsaj zá2zsas záz4sá zá2zsál zá2zsol zba2ki zbeá2 zb2le zb2lo zb2lú zb2ri zb2ró zb2rú zc2lu zcsa2p1á2g zda1p2 zdas2 z2d1ass zd1áll zdés1s zdÅ‘1s2p zd2ri zd1u2r 1ze zea2d zea2g zea2k zea2l zea2s zeán2sze zeá2r zeá2z 2z1eb. 2zebe ze2be. ze2bei ze2bek z1e2béd z1ebf z1ebh z1ebk 2z1ebn ze1bru 2zece z1e2cet z1e2ch z1e2cset ze2d1ál. ze2d1álj ze2d1áln ze2d1ált ze2d1álv zede2r1e ze2dil ze2d1ó2 2z1e2dz zee2s z1eff ze1fr ze2g1a2l ze2gap ze2gar ze2g1ál ze2g1ár ze2g1eg zeg1ell ze2gep zeget2t1eb zeg1érr zegé2sz zeg1észb zeg1észé ze2gid ze2gil ze2gol ze2gú ze2gyed ze2gy1e2l ze2gyene ze2gyez z1egyl z1egys ze2het ze2ia zei2g zei2s zeí2r zekci2ósz zek1e2gy ze2k1ell z1e2kéit 2zekék ze2kil ze1k2lap ze2k1ott zek1övek ze2kÅ‘ ze1k2reá zek1t2 ze2k1ü2lÅ‘ z1ekviv ze2lag ze2lar ze2lál z1elbo 2zeledel z1e2lefá ze2leji 2zelekt ze2leme. ze2lemek zelem1el zele2m1ér. ze2lemű zele2pá zele2p1el zele2po zele2p1ü2l z1e2l1e2re ze2l1esz zele2tá zele2ter 2z1e2lég 2z1eléré zel1érh zelés1s z1elhá 2z1elhel 2z1elher ze2litet z1elkü 2z1elláto 2zellen z1ellene 2z1elnev z1elnö ze2lof z1e2los z1e2lö2l z1elön 2z1e2lÅ‘ha zelÅ‘2saj z1e2lÅ‘zÅ‘ z1elsÅ‘ z1eltá 2z1eltér 2z1elto 2z1eltű z1e2lu 2z1e2lú z1elv. z1elvek z1elves 2z1elvev z1elvez z1elvn z1elvt 2z1elvű z1elzá 2zema ze2mak ze2m1ág ze2m1ár ze2mát 2zember z1ember. z1emberb z1emberé z1emberh z1emberk z1embern z1emberr z1embert z1embl 2zeme. ze2m1egé 2zemei 2zemek ze2m1ell ze2m1eng 2zeméb 2zeméh ze2m1ék ze2m1éle ze2m1é2lé ze2m1é2lÅ‘ ze2m1élt ze2m1élv ze2m1é2rét 2zemés zemé2te ze2m1éten ze2m1étet ze2m1étjé 2zemév ze2migr ze2m1ing ze2m1int ze2mis ze2m1iz ze2m1í 2zemk 2zeml 2zemmó ze2mol ze2m1osz ze2m1ó2 ze2m1Å‘ 2zems z1emul zem1ur ze2mú 2zemü 2zemv zemvíz1 3zeneir 3zeneka zenesz2 3zenéé 3zenéi 3zenéj ze2nép zenké2 zenkét1 ze2n1o zen2tag zen2tal zen2t1an zen2t1as zente2g zen2t1egy zen2telm zen2t1ere zen2tev zen2t1é2j zen2t1imr zen2tip zen2tis zen2tí zen2tol zen2t1ó2 zen2t1ö zentÅ‘2s zen2t1Å‘se 2zenz zeo2k zeo2m zeö2r 2zepééb 2zepééi 2zepéén 2zepéit 2z1epéj 2z1e2pém 2z1e2pés z1epiz ze1pla ze1ple 2zepr ze1p2ri ze1pró ze2r1a2d ze2rajá ze2r1ajt ze2rakr ze2r1al ze2ram ze2r1a2n ze2r1a2r ze2r1as ze2r1a2t ze2r1au ze2raz ze2r1ága ze2r1á2r ze2rás ze2r1á2t zer2bin zer1d2 z1erdé ze2r1eb ze2r1e2ce ze2redé ze2rege zer1e2ge. ze2r1e2gé ze2r1egg ze2r1egy z1e2reje zer1ejt z1e2rejü z1e2rejű ze2r1e2ke. z1e2rekl ze2relÅ‘dj ze2r1ember ze2r1emi ze2r1eng zere2p1a ze2r1e2r ze2r1e2sés ze2r1essz ze2r1este zer1e2sze. zer1eszk ze2reszt ze2r1eu ze2r1e2v ze2rég ze2r1é2j ze2r1ék ze2r1é2l ze2r1ép zer1g2 ze2r1i2d ze2r1iga ze2r1iko ze2r1ill ze2r1i2m ze2r1ina ze2rind ze2r1inf ze2r1ing ze2r1inj ze2r1ink ze2r1intel ze2r1inté zer1intÅ‘ ze2rio ze2rism ze2riz ze2r1í2j zer1k2 zer2nal ze2r1okt ze2r1old ze2rolv ze2ror ze2r1ost ze2r1osz ze2roti ze2r1ó2r ze2rök 2ze2rÅ‘ z1erÅ‘. z1erÅ‘f z1erÅ‘i z1erÅ‘k z1erÅ‘l z1erÅ‘n z1erÅ‘s z1erÅ‘t z1erÅ‘v zer1Å‘z zer1p2 zer1s zers2k zers2p zers2t zert1á2lom zer2tit zer2tór zer1tra zer1tren ze2r1un ze2rur ze2rus ze2r1u2t ze2r1út ze2r1ü2g 2zerű zer2v1a2dó zer2van zer2vel zer2vél 2z1e2sedék ze2s1e2kéh z1e2setb z1e2sete. z1esetes z1e2setet z1e2seté z1e2seti z1e2setr 2z1e2sés. 2z1e2sésb 2z1esésd 2z1e2sése. 2z1e2sései 2z1e2sések 2z1e2sésem 2z1e2sésen 2z1e2séses 2z1e2sésé 2z1e2sésh 2z1e2sési 2z1e2sésk 2z1e2sésn 2z1e2sésr 2z1e2séss 2z1e2sést ze2s1it ze1s2mi 2z1e2sÅ‘ zesp2 ze1s2pe ze1spr 2zeste z1estek z1estem 2z1esté z1esti. ze1s2til 2z1estj ze1sto z1estre z1estt ze2s1ü2v zes3zac ze2szág zesz1e2get ze2szelle ze2szeng ze2széh ze2sz1é2le ze2sz1im ze2szip 2zeszk z1eszkö ze2szol zeszt2 zesz1tr ze2t1ab ze2tac ze2t1ala ze2t1a2ny ze2táj ze2t1á2l ze2t1á2p ze2t1át zet1egé zet1elá ze2t1ell ze2telm ze2t1e2lőá ze2t1e2lÅ‘d ze2t1elr zet1emel ze2t1eml ze2tene zet1erk zet1e2ró zete2s1ég zetest2 zetes1tr z1e2teté ze2t1eti ze2tetn ze2t1éd ze2t1é2j ze2t1é2l ze2térte ze2t1érté zet1értÅ‘ ze2t1é2rü zeté2sz zet1észe zet1éter ze2tid z1e2tika ze2t1i2ko zet1ill ze2t1ing ze2tít zetkész1e ze2tom zet1ont ze2top ze2t1ó2r ze2t1ö2v zet1Å‘ri zet1Å‘rö zet1Å‘rt ze2tut ze2tür ze2t1ü2z ze2ty zeu2g z1e2ur zeü2t z1e2vÅ‘ ze2x1id ze2xim ze2x1i2p z1expe zex2t1ö ze2xú z1e2zer z1ezr 1zé z1é2ber 2z1ébr zé2dak zé2d1a2l zé2d1a2r zé2d1as zé2dat zé2d1á2 zé2d1ekét zé2d1e2lem zé2delm zéde2r1 zéd1esem zé2d1esz zé2d1eti zé2d1és zé2dik zé2dó zé2d1Å‘ zédren2 zédu2 zé2dur zé2dú zé2d3z zé2fá z1ég. z1égb z1é2ge zé2gé z1é2gi z1égn z1é2gÅ‘ z1égr z1égt z1é2he. z1é2hen 2z1é2hes z1é2het 2z1éhs 2zéid z1éj. zé2jen z1é2ji zéjje2l1e2 z1éjs 2z1éjül 2z1é2jün zé2k1a2d zé2k1a2l zé2k1a2n zé2k1ap zé2k1as zé2kás zé2k1eg zé2k1e2két zé2k1elr zé2ker z1é2kez zé2k1ék zé2k1é2l zé2k1és zék1old zék1osz zé2k1ó2 zé2kör zé2kúr zékü2l zé2k1ülé zé2k1ülÅ‘ zéla2 zé2lab zé2lad zé2lak zé2l1an zé2las zé2l1á2 2zé2le2l zél1ele zél1ell zél1elÅ‘ zél1elt zé2l1e2r 2zélet. 2z1é2letb 2z1é2lete. 2z1é2leten 2z1é2leté zé2letf 2z1é2leth 2z1é2leti 2z1é2letn 2z1é2letp 2z1é2letr 2z1é2lets 2z1é2lettÅ‘ 2z1é2letü 2z1é2letű 2zélé zé2l1ékh zé2lim zé2l1ist zé2lí 2zélm zé2l1o zé2l1ö2 2zélÅ‘s 2zélt zé2l1u2 zé2lú zé2lya zé2lyá zé2ly1esh zé2lyo 2zéme zé2n1árn zéndio2 zé2neg 2z1é2nekb 2z1é2neke z1é2neké zé2nekk 2zénekl z1é2nekn zé2nekr zé2nekü zé2n1e2l zé2n1e2r zéni2 zé2nik zé2nil zé2nim zé2n1io zé2n1is zé2ní zé2nom zé2nö zé2nÅ‘ zént2 zé2n1ü2lÅ‘ zé2p1a2g zé2p1a2l zé2p1any zé2p1a2p zé2p1a2r zé2pas zé2pat zé2pau zé2pav zé2p1á2r zé2pát zé2peg zép1ell zé2pez zép1f2 zé2pin zé2p1i2p zé2pir zé2p1i2s 2zépít zépo2 zé2pol zé2pos zé2pó zé2pö zép1s zé2pud zé2p1us zé2pú 2zépül zé2rab zé2rag zé2r1a2l z1é2ram zé2r1a2n zé2rap zé2rar zé2r1as zé2rat zé2rav zé2r1á2 zér1d2 zé2r1e2lemé zé2r1e2lemk zé2r1e2lemn zé2remb zér1eml zé2r1esz 2zérett zé2rev zé2rés. zé2résb zé2rése zé2rési zé2résk zé2résn zé2r1id zé2rim zé2ris zé2r1o2k zé2r1os 3zérós zé2rö 2z1é2rÅ‘. z1é2rÅ‘i z1é2rÅ‘k zé2rÅ‘s z1é2rÅ‘t zér1s zér2taj 2zértekez 2zértelm 2z1értetl 2z1értetÅ‘ 2z1érték 2z1érthet z1érto zé2rú zé2sab zé2san zé2sar zé2s1az zé2s1á2 zé2seg zé2s1e2l zé2s1e2r zé2s1eti zé2s1ég zé2sés zé2s1i2k zé2sok zé2só zés3s zé2s1ü2t zé2s3za z1észb z1é2szé z1észl zé2szó zés3zs zé2t1a2b zé2t1ad zé2tag zé2t1aj zé2t1a2k zé2t1a2l zé2t1ap zé2t1a2s zé2t1au zé2t1á2 zéte2n zéte2se zéte2sü zé2tég zé2t1é2n zé2t1i2 2zétk zé2tol zé2t1o2m zé2tor zé2t1o2s zé2tun zé2tú zé2t1ü2 zé2t1ű 2zétv 2z1év. 2z1évad z1évb 2z1é2ve. 2z1é2vek 2z1é2ven 2z1é2ves z1é2vén z1é2vét z1é2vév z1évf 2zévi z1é2vi. z1évk 2z1évn z1évr z1évs z1évt z1é2vü 2z1évv zfa2l1a2da zfa2l1aj zfa2l1e2l zfa2les zfe2l1em zfe2len zfe2reg zf2la zf2le zf2li zf2lo zf2ló zfora2 zfo2rat zfo2riz zföldié2 zf2ra zf2rá zf2re zf2ri zf2rí zf2ro zf2rö zgás3s zgá2s3z zgá2zó zgé2p1e2két zgé2sa zgé2sá zgé2sem zgés3s zg2la zg2ló zgó1g2 zgÅ‘2nyá zgÅ‘2z1á2 zgÅ‘2zér zgÅ‘2z1Å‘ z1g2ra z1g2ráf. z1g2ráff z1g2ráfh z1g2ráfj z1g2ráfk z1g2ráfn z1g2ráfr zg2ró z1g2ru zhán2 zhá2t1a2d zházköz1 1zi zi2aa zi2aá zi2abo zi2ac zi2ad zi2ae zi2aé zi2ag zi2ah zi2aí zi2aj zi2akép zi2akó zi2al zi2am zi2ao zi2aö zi2a1p2 zi2ar zi2asza zi2aszo zi2aszó zi2at zi2au2 zi2aú zi2aü zi2av 2z1ibr 2zicc zi1ch z1i2deá z1i2deg z1i2deo zi2dén 2z1i2dÅ‘ zie2l zie2m zi2éa zi2éá zi2éke zi2ép zi2éta zi2étá zi2éü z1ifj z2ig. zi2ga. 2z1igazg 2z1i2gazo z1i2gás zi2géj 2z1i2gén zi1g2r 2zigye zi2k1a2n zik1ékek zi2kik zi2kim zi2k1ing zi1klu zi2k1u2r zikus1s zi2ler zi2l1é2j zi2l1ék zi2l1ing zi2l1ü z1i2ma. zi2mak zi2máb zi2máj zi2mák zim1áll zi2már zi2mát zi2máz zim1b zi2m1e2g zi2m1e2l zi2m1é2ré zi2m1érté zim1ind zi2m1ip z1i2mit zi2mö zi2n1a2d zi2n1a2l zi2n1a2n zi2n1a2r 2z1i2nas zi2n1au zi2nái zi2n1áll z1indu zin1emb zi2nég zin2gas zin2gál zi2n1i2m zi2n1in zi2ní zin3n zi2nol zin1s2 zin2t1a2d zin2t1a2l zin2t1a2s zin2tát zin2t1á2z zinte2l zin2t1ele zin2telÅ‘ zin2telt z1interj z1interp z1interv zin2tom zin2t1ón zi2n1u2t zi2nú zi2ol 2z1i2ono 2z1i2ons zi2ot zi2óa zi2óá zi2óc zi2óe zi2óg zi2óí zi2ókam zi2ókap zi2óká zi2óke zi2ó1kl zi2ól zi2ómá zi2ómé zi2óné zi2óny zi2óo zi2ó1p2 zi2órá zi2ósá zi2ósé zi2ó1s2ká zi2ó1sp zi2ószű zi2óta zi2ótá zi2óú zi2óü zi2óű zi2pai zi2pari zi2páj zi1p2l zi1p2r z1i2rat 2zirá z1i2rán z2i3re 2ziro z1i2rod zi2rón z2i3rÅ‘ 2zirt zir2t1a2 z1irtá zir2to z1irtó zi2sad zi2sal zi2sam zi2sar zi2s1a2s zi2sál zi2s1áro zi2s1e2g zi2s1e2l zi2s1er zise2s zi2s1ese zi2s1esz zi2s1ék zi2sérte zis1évi zi2s1il zi2sin zi2s1is zi2sit zi2s1o2v zi2s1ó2 zi2sö zi2sÅ‘ ziss2 zis3sza zis3szá zis3szo zist2 zis1tra zis1tre zi2sut zi2s1ü2v zi2s3zaj zis3zav zis3zón zi2t1áll zit1áru zi2t1e2g zi2t1elr zit1eng zi2tét zi2til zi2tin zi2tip zi2tir zi2t1or zitó2 zi2tór zi2t1út ziú2r z1i2vad 2z1i2vó 2z1i2zé 2zizm z1izmo z1i2zom 1zí zí2gé zí2jan zí2je zí2jö zí2jü zí2nac zí2nad zí2naku zí2nal zí2nan zí2nas zí2nat zí2n1áll zí2n1ár zí2nát zí2n1e2r zí2nél zí2ní z1ínna zín3nye zíno2 zí2n1od zí2n1ok zí2nos zí2n1ó2 zí2nö zí2nu zí2nú z1í2ny zínyenc1 zí2r1a 2z1í2rá zí2rog 2z1í2ró zítÅ‘a2 zítÅ‘e2 zítÅ‘kész1 zí2vaj zív1akt zí2val zí2var zí2ver zí2v1Å‘ zí2vul zí2vú zí2ze zí2zü zí2zű zje2gy1á2 z2k1abl z2k1alak z2k1alj z2k1alt zka1p2l z2k1a2rád z2k1a2ráj z2k1a2rén zkaró2 zka2rór zka1s2k z2k1állv zk1bl zk1dr zk1egys z2kenn zk1eszk z2k1etű z2k1ér. z2k1érté z1k2hü zkia2 zkiá2 zkie2 zki2g z2k1igé zki2sé zk2la z1k2lá zk2ler z1k2lí zk2ló z1k2lu z2k1olda zkon2t1ár z2k1ontó zko2r1os z2k1oszl z3koszt z2kókor zk1ókort zkó2pá zkó2z3s zkö2zele zköz1ell zkö2z1é2l zkőé2h zk1pr z1k2rak zk2rá z1k2reá z1k2rém z1k2réta z1k2rétá zk2rí z1k2ro z1k2rón zkultú3 zkultúr2 z2k1u2rán z2k1u2rát. z2k1utá z1k2val zk2vó zky2b zky2h zky2j zky2k zky2n zky2r zky2t zky2v zla2tal zlata2n zlat1any zlá2m1e zlá2mis zlás3s z2láv. z2láve z2lávé z2lávk z2lávn z2lávo z2lávv zleg1g zlei2 zleí2 z2leng. zle2tak zle2tal zle2t1a2n zle2tál zle2t1á2r zle2t1eg zle2t1ell zle2tesz zle2tél zle2t1ér. zlet1érv zle2tés zle2t1é2té zle2t1é2ve. zletigaz1 zle2t1u zle2tüz zlé2sí zlé2so zlés3s zlina2 zli2nan zli2nin z2loge zlo2p1as zlo2pó zlóó2 zló2s1orr zma1pr zmas2 zma1sp zmeá2 zme2ge zme3gom zmi2n1a2 zmi2n1e zmuskész1 zmu2sö zmu2s3z zmu2tok zna2pes znie2 zni2ó z2nob znó1str znót2 znó1tr 1zo z1obj z1o2dú zo2es z1o2ka. z1o2k1a2d z1o2kai zo2k1ál zoki2 zo2k1ip z1o2k1ir z1okke z1okle 2z1o2koz 2zokta zo2lab 2z1o2laj zolajá2 zola2j1ár zola2je zo2l1a2l zo2l1as zol1ábr zo2l1áro z1oldal 2z1oldh 2z1oldó zo2l1e zol1f2 zol1inf zo2l1is zo2l1í z1olló zo2lö z1oltás z1oltó. zol1t2re zo2lü 2zolvad zo2m1a2g zo2mak zo2m1a2l zo2m1a2n zo2map zo2m1a2s zom1áll zo2m1á2r zom1b2 3zombor. 2zome zo2meg zo2m1el zo2men 2zomé zo2mél zomé2t zo2m1éte 2zomf 2zomg zomi2 zo2min zo2miz zom2jó 2zoml zo2m1ors zo2m1os 2zo2mö 2zom1p2 2zom1s 2zomz zo2n1aj zo2n1ak zo2n1a2la zo2n1alá zo2n1alk zona2n zo2n1any zo2n1a2r zon1asz zo2n1au zo2náld zo2n1áll zo2n1átl zone2 zo2neg zo2nel zo2n1er zo2nes zo2név zo2nin zo2n1i2o zo2n1or zono2sé zo2n1ó2 zo2nö zon1s2 zon2t1es zon2tin zon1tre zo2nü zo2ny1ad zo2ny1a2l zo2ny1a2n zo2nyar zo2nyav zo2nye zo2nyid zo2nyij zo2nyó zo2ol zo2om z1opál z1o2pe zo1pho zo2r1a2d zo2raf zo2r1ag zo2r1aj zo2r1a2l zo2ran zo2r1as z1o2rat zo2r1au zo2r1e2 zor1f2 zo2rid zo2r1il zor1ing z1ormá zo2r1ol zo2rop zo2rosz zo2r1ó2 zo2rö z1orr. z1orrú z1orsó zor1t2re zo2rü 2z1osko z1ostá z1osto zo1sz2f z1oszl zosz2tat 2zote 2zotó zotó2pa zo2vi zo2xi 1zó zóa2d zóá2g zóá2r zó1bl zó1b2r zó1cl zó2d1e2l zó1fl zó1f2r zó1gl zógyö2k1ér zói2g zóí2v zója1 zó1k2l zó1kré zókupac1 2zól. z1ólm zó2lomb 3zónád 3zónái z1ó2ni 2z1ónj 2z1ónn z1ó2nod 2z1ónt zóó2r zó1p2l zó1p2r z1ó2rad z1ó2rar zó2rasz zó2rái z1ó2ráj z1ó2rák z1ó2rár zórás1s z1ó2rát z1ó2ráv z1ó2riá zós1ík. zó2s1ín. zó1s2ká zó1sl zó1s2ta zó1sté zósz1akad zósz1állí zó2szene zó2sz1é2le zó2sz1íj zósz1ü2lÅ‘. zóta2g1a2 zóté2g zót1ége zó1tré z1ó2vó 1zö zö2bá zö2bo zö2bó z1öcc zö2dé zö2dú zö2ga zö2gá zög3g zö2gil zö2gí zö2go zögö2 zö2g1öl zö2gu zö2gy 2z1ökl 2z1ö2ko 2zökrü 3zöldes 3zölds 2zöleb zö2les 2z1ölniv z1ölyv 2z1öml z1ö2na z1ö2ná 2zönb 2zöne 2zöné 2zöni 2zönl 2zönne 2zönö 2zönr 2zönt 3zörej zör2f1e zör2f1o zör2nya zör2nyá zör2nyel zör2nyesz zör2nyék zör2nyí zö2rök zör2p1a2 zör2pá 2z1örv 2zössz z1ösv z1ösztö zö2te 2z1ötl 2z1ötv z1öv. z1ö2vei z1ö2vek zöve2t1e2l z1ö2vez zö2zö 1zÅ‘ zÅ‘a2c zÅ‘a2l zőá2g zÅ‘1bl zÅ‘1br zőé2l zÅ‘1fl zÅ‘1f2r zÅ‘gépi2 zÅ‘gé2p1ip zÅ‘1gr zÅ‘1kl zÅ‘mű1 zÅ‘nyá2 zÅ‘2ny1er zÅ‘2ny1é2k zÅ‘2nyért zÅ‘1pl zÅ‘1pr zÅ‘2ran zÅ‘2r1e2c zÅ‘2r1ege zÅ‘2rel zÅ‘2r1er 2zÅ‘rk 2zÅ‘rl z1Å‘rlÅ‘ 2zÅ‘rm zÅ‘2rol 2zÅ‘rü zÅ‘2r1ü2g 2zÅ‘rz z1Å‘rzé z1Å‘rzÅ‘ zÅ‘2sis zÅ‘1ská zÅ‘1spe zÅ‘1s2pi zÅ‘1sta zÅ‘1sti zÅ‘1str zÅ‘sz2 zÅ‘1szf zÅ‘1szt zÅ‘1tr zőü2l zpen1n zp2fe zp2la zp2lé zp2lo zpor2t1e2 zpor2t1Å‘2 zpor2t1ú z1p2ra z1p2ri z1p2ro z1p2ru zrae3le zrádiói2t zre2d1á2 zre2del zre2d1é2k zre2d3z zren2d1Å‘2 3z2rí. z2s 1zsa zs1abla 2zs1a2dat 2zs1a2dó zsag2 2zsah. 2zsajt 2zs1akc 2zs1alj 2zs1alm 2zs1alt zsa2nyá zsa2pa. 2z3sapk zsa2rán z3sarj 2z3sark 2z3sarl 2zs1arz 2zs1aszt zs1atl zsa1tr 2zs1a2ty 2zsautó. 2zsautón zs1azo 1zsá 2zs1ábr 2z3sáf 2z3ság zs1ággy zsá2kó 2zs1ála 2zsálc 2zsálm 2zs1á2p 2zs1á2rak 2zsárat 2z3sári 2zsáru 2zsáte 2zs1átk 2zsáto 2zsátö 2zs1áts 2zs1á2zós zs1bl zs1br z3s2ch zscsapá2 zsde1s zs1dr 1zse zse2ba zse2biro zse2b1o2 2zsebz zs1e2ce zs1e2dé z3sej 2zsell zs1elm 2zselö zs1e2lÅ‘f zs1e2lÅ‘r 2zsember 2zs1e2mel zs1enc zs1ene zse2raj zse2r1á2 zse2rel zsere2s zse2r1esz zse2r1ék zse2rin zse2r1o 2zsese zse2szü zs1evé 2zs1ex 2zsez 1zsé z3ség zsé2ge2l 2zséhs 2zsél zs1éle 2zsép 2zs1érd 2zs1érte 2zs1érté 3zséry 2zsérz 2zséte 2zs1étk 2zs1étr 2zs1étt 2zs1é2ve. zs1fl zs1fr zs1gr 1zsi 2zsidÅ‘ 2zsigá 2zsigé zsi2kor 2zs1ill 2zs1i2má zsi2min zsi2m1u 2zsinas zs1i2pa z3sipk 2zsirat zs1irato 2zs1iro 2zs1irt 2zs1ism 2zsist zsi2tat 2zsivó zs1izé 2zs1izm 2zs1i2zo zs1izz 1zsí 2z3síb zs1íj 2z3sík 2z3síp zsí2r1a2 zsí2rá2 3zsírbó 3zsíré 3zsíros 3zsírr 3zsírt2 2zs1í2v 2zs1í2z zs1kl zs1k2r 1zso 2z3sof 2zs1okm 2zs1okta 2zsolaj zs1oli 3zsom 2zs1op 2zsor z3soro 2zsosty 2zs1osz 1zsó zs1óné zs1órá 1zsö 2zsökör 2zsölÅ‘z 2zs1ös zs1ö2ve 1zsÅ‘ zs2pir zs1p2l zsp2r zs1s2k zs1s2p zs1st zs3sz zssze2r1e2l z3s2tad z3s2tat z3s2top zst2r zs1tro z3s2tú 1zsu 2zs1ud 2zsugár. 2zsugárr 2zsugr 3zsul 2zsum zsu1s zsusz2 1zsú 2zs1új 2zsúl zsú2r1a 1zsü 2zsüg 2zsült 2zsünn zs1üs 2zsüt 2zs1üz 1zsű zs1wh zs3zac z3szag zsza2ké zsza2k1ü z3szav zs3záp zs3zás z3szem zs3zené z3szer z3szé zsz2f z3szi zszind2 z3szí z3sz2k zsz2l z3szo z3szó z3szö zsz2p zs3zs zsz2t2 z3szü z3szű zt1abla z2t1a2bor z2t1a2cé z2ta2dal zt1a2dat zt1a2dó z2t1agre z2t1a2ká z2t1akc z2t1akk zt1akn ztaköz1 z2t1akv zt1a2lapú ztale2l zta2lele z2t1anal ztan2e zt1anes zt1anké zta2n1ó2 zt1a2nyag zt1anyak zt1anyas zt1anyád zt1anyám zt1anyáu z2t1anyu zt1a2pán zt1a2pás zt1a2pát z2t1apó zt1app z2t1aps zt1a2puk z2t1a2ra. zt1arán z2t1arc ztaso2 zta1sp zt1assz z2taszt zt1aszta z2tatig z2t1atk z2t1aty z2t1a2ut zt1ábr ztá2cs zt1ácso zt1ág. z2t1á2ga z2táli z2t1álm ztá2raj ztá2r1ass zt1árem ztá2ris z2t1árnö zt1árnya z2tároc z2tárokk ztá2r1olt ztá2r1óra ztár1sm ztár1s2p ztárt2 zt1árví ztá2sz zt1ászo zt1ászt z2t1átha z2t1áts z2t1átv ztávi2 ztá2v1ir zt1bl zt1dr z2t1e2b z2t1ef z2t1e2gé z2t1egy z2t1e2ke. z2t1e2kés z2t1ela z2t1elá z2t1elb z2t1e2legy z2t1e2leme z2t1e2lemm z2telemz z2t1elf z2t1eljá z2t1ellát z2t1elm z2telnö z2t1e2lo zt1e2lÅ‘adá z2t1e2lÅ‘c z2t1elr z2t1eltá z2t1elto z2t1ember z2t1e2mel z2t1eml z2tent zt1ente zte2raj zte2rak zte2rál zte2rár z2terde ztere2ot zte2repo zte2rill zte2ris zte2riz zte2rosz z2t1e2rot z2t1e2ró z2t1erÅ‘ zte2r1ü2lé zte2sz z2t1esze z2t1eszé z2t1eszk z2t1ezr zt1ég. zt1égb zt1é2gé zt1égr z2t1é2j zté2k1e2l z2t1ékné z2t1é2le z2télé zt1é2lés z2t1élm z2t1é2lÅ‘ z2t1élt zt1élű z2t1épü zté2rá z2t1érd z2t1érl z2t1érté z2t1érz zté2sz z2t1észh z2t1észn zté2tét z2t1étk zt1é2ves. zt1évt zt1fl zt1fr zt1gr z1t2hen ztia2g zt1i2den z2t1idé z2tidi z2t1ifj z2t1i2gé z2t1i2kon z2t1ille z2timá z2t1imp z2t1ind z2tinfe z2t1info z2t1ing. z2t1ingá z2t1ingb z2t1inge z2t1ingé z2t1ingg z2tingu z2t1inte z2t1inté z2t1invá zti2p z2t1ipa z2t1irá z2t1isk z2t1ism z2t1istá z2t1iste z2tital z2t1i2tat z2t1íj z2t1í2r zt1í2veke zt1kl zt1kr zt1kv z2toi z2t1o2koz z2t1oltó zto2lyag zt1ope zto2ras zto2r1e z2t1orgi ztorkész1 zt1ornam z2torrú z2torvo z2t1oszl zt1osztá ztóá2 ztóigaz1 z2t1ó2ni ztó1p2 ztó1sp ztó1tr zt1öko ztö2l z2t1öle ztön3n ztö2röks z2t1öss zt1ö2vez zt1övig z2t1öz ztÅ‘a2 ztÅ‘e2l ztőé2b z2t1Å‘rh zt1Å‘rl z2t1Å‘rs zt1Å‘2sei z2t1Å‘2sö zt1Å‘sű zt1pl zt1pr zt1ps z1tranz zt2rádá z1t2rág z1tréf z1t2rén z1trikó z1tril z1t2rió z1t2rü zt1sl zt1sn zt1sp zt1st zt1sz zt1t2r z2t1udv z2t1ug z2t1ura z2t1uru ztus3s zt1u2tat z2t1utu z2t1új z2t1úr. z2t1úrb z2t1ú2ré z2t1úrh z2t1ú2ri z2t1úrk z2t1úrn z2t1ú2ro z2t1úrr z2t1úrt z2t1üg ztü2lé z2t1ülés ztül1l z2t1üt z2t1ü2v zt1ü2zem z2t1űr. z2t1ű2ri z2tye 1zu zu2b1a 3zubb zu2b1i zu2bu zu2c3s 3zug. zu2gag zu2gap zu2gáru zu2g1ás 3zugb zu2g1e2 zu2gé zu2gin zu2git zu2giv zu2gí 3zugo zu2g1or zu2gö zu2gü zu2gű 3zuha zule2 zu2lel zu2l1enc zu2l1es zuli2ná zu2lú zulya2 zu2ly1ag zu2mab zu2m1a2d zu2maj zu2mal zu2mar zu2m1as zume2 zumegyez1 zu2mel zu2m1en zu2mél zu2m1é2n zu2m1id zu2m1in zu2m1i2p zu2mí zumkész1 zu2mol zu2mÅ‘ zu2mur zu2mü 2zund z1undo zu2ne zu2n1é 2zuni zu2n1ö2 zu2ral zu2ram zurat2 2z1urn zu2rú zu2sad zus1aka zu2sal zusa2n zus1any zu2sas zu2s1ág zu2s1e2 zu2s1érté zu2sis zu2s1í zuskész1 zu2s1ol zu2s1ó2 zu2sÅ‘ zuss2 zussz2 zus3sze zus3szi zust2 zus1tr zu2s1ü2 zu2sz1i2k zu2szon zu2s3zó 2z1u2ta z1u2tá 2z1utc zu2tol zu2tó. zu2tób zu2tói zu2tój zu2tón z1u2tu 1zú zú1dr zú1fl 3zúgá z1újd z1ú2jé z1ú2jí z1újs zú2r1e2 zúré2t z1ú2szás zú2té. z1ú2tér z1úth z1úti zú2ti. zú2tiak. 3z2útiaka zú2tiakr zú2tib zú2tig zú2tih zú2tin zú2tiv z1ú2tí z1útj z1útn z1ú2ton z1útró z1útt zú2tü z1útv 3zúzá 3zúzd 3zúzó 1zü zü2dü zügy1és z1ügyk z1üldö z1ü2lep z1ü2lÅ‘seb z1ü2lÅ‘set zü2ni 2zünn zü2rí z1ü2rü z1ü2te z1ü2té z1ütk z1ü2tÅ‘ zü2ve zü2vö zü2zen z1üzl 1zű zű2csa zű2csá zű2csip zű2cs1o zű2ra zű2rá zűrés3s zű2r1i2z z1űrla zű2ró zű2ru zű2z1a2 zű2z1á zű2ze zűzé2r zű2z1ére zű2zo z1ű2zÅ‘ zű2zs zű2zú zva2su zvárosé2 zváro2s1om zve2gya zvé2nyel z2vit. zvi2tác z2vitj zy1ak z1yar zy2be zy2je zy2ne zy2né zy2re zy2rÅ‘ zy2sa zy2so zy2tÅ‘ zy2ve z2z1áll zzá2r1ó2r zzát2 zzá1tr zzi2n1é2 zz2le zz1ly z2zs z3zsák z3zsám z3zsi z3zsí z3zso", + ["compression"]="zlib", + ["data"]="xÚT½Orëªó>¼•³‚TÙùnH¶\21Y–„ô\"ìϱwq—p‡\25dp+U\25¤2K}ýxþ´rÞÝ\26„\16‚¦i\26xjö¿žšSS¾ÿ-;ã}ÙµDßÿ®»U`¿>?€v÷ï×'¾’n7#B·Ï\13Ƚ2:‚]½%7Sà\\„˜nwŸMÒÎ\17ÞÅØýù\15`øþ÷¸;\ +ìxÀ°?ãŸ÷\0135\127߯ùÉpÎÙèÏ?\12¾î\22бÙw;ÒA/V\25ÃîûcƒŠ^áÒ˜ÖŒ\2íÚŽ¤Ç“Çý¥f?\1]˜ûÊص£®ÇHoÞO|–ŠcÜ£,\7£6_„¾_S€üƒŠP\15δkpWêæýèBI»‚WO¥ç§X\26½ÝRKfd)ã³äfŸ\\¸\21®\22\0(ļ?6Èo>>ûÒ1÷Çï×QàùÌW'š\31BÌ\17è,:;î÷‡c®|Ê€ì\21~Ÿ‡\ +úQŸŒ/ÿh}…\23©™Áßq­ßn\0êv*W£b¤¸µ*Ô/\18h}\24mñâÍ¿ÿ\29ùLÐïWÑ^×}W\31MT˲~¬\31X\12û¤\27æýà\20>A󮹘\14\5ÅB¸\31\9šú¡Û`-f)\127\25\25ý~\23¬o\26`\12°w´\0193˜[‘];‘~¿úºÏ¤õ‹¶¾u\14:™®¤õ\22¶O°Vß\\ö¼{}ă‰òaƒû¬ròEÿ\23žQsO\1\6\3\21\0258É \7c5¸‰ú3K‚ñjЛêe*Ýç\0Jß¾Þò~\14\16©~8èêë/_ÿùï×Ó¡âC³?ÔÌd|.`¾<™­Cg1.\18LÅ\23-è€\15'Î\0167\14Q\"5˜¯pؤ\7Ð5“VQ±ï€ve%£æ©z|`¥ e+>´û¦<²\14\28Úžÿ»!“¬Åô1€Vá@I<’É°*.Ðî¾ßˆ÷µ&TÐ\4Õ\2\4¯Ò\0234‹zUšNTŸ\6\8ÕŠœ6\27P†\2ñ½æ½Ûa-´z?9ã=Xû„o]iF3!M¦«i\17e\27\16\24\5vó‰÷æýúýÊX¹(Q6´CmKµB\30®µEáÓ\28kرMÌû†ÞÉŒ’!B-¨²ŒŸ£¢~Ÿô²Ç¾ÌU\4\28Ñ8Ž+> (\4=¾\27°kèq­É¡K¨\5€osjØ\3ÕÎrîÄŒª€ÇËÖ«±¶\26\1>“€‚a¯)Xo\12|\30QýÄy''í\20¥V˜S}/ôT'¼ACJÙ€ëÜ®\2+\2ú}ó2‹\14¾\30IŽ+\ +¨‚ï÷‰¨>ešñ*ߟ욑^{`ÇV\9$L%¨F•\\\17Öí!\22Ú®>\16\"¥…ØÝ‹²([vÞãŸ\127²ñM\0202”‘>\"ö\23>K;Jz´*0\5„ò\2WkÐG\ +0\7¸\17è±(Þ£h‡êÚŽµÅÝñŽ\12:‹^Š¯GŘ\26SE¬€oN Üµ\21´\27Z7\20\9MU¶\26&ÓÅ4›®m€-îz\14VoFܼþùÇœˆZÚ\0ì…\13¿¶4Ö[p\30\6‘Å[ûƒ\28vãcku9©\8_O­Á¾k\7óéà»PíZß—ÌIÛ}K€\28 ´aÂÊ\\¯Çõˆ¿J?ørê•Ñ^©÷Ù|–\4Å\24¥4O¾ÿõÝ\15g\18uu\28k%`­˜U7æa0U\18³3ùÎ`ÕIèh~õ\15\127ò\ +XÛ‰Î\1ú\0—§\13mñ/?7\\üi*L[x¢Z¼áiÃ[ª®\0\21•-fÙî/?é—ôÃÜžôýµqÝ<>HÿüCñU\25«H\17¹²\0¾\\\0_ãS€s€>À%@\ +\16ÙúÒ³ö·ZM\5߯\29’ZÙw´Uh²7iWh¼¦ls«\4ëÐ>ý}ï@U±-=¤ä®C\6oÖcÚßzÎCI<Ôž¥@Ôš¼B\16×çKá«\20mš´ŠÉž°Ý;JK•¬\2\13\20„¬úˆ›ˆê“*dìvÏú8üÀö\7þÜO¹\0210\27~8cWÒs\13ƒþòzqŽ/ÖŽ^5€\0…>ó:8|à{£aJ­«\12ê‚•B5{µªû:V\13žï[\27Ï €~h\21r‹g,»–i,\28Þú©\21é=*`ºÙzÝ«†\6cÀZ$\23céì?Ú\7nŠ˜;—zÞŠ0ÿUX™\"CáÔÚÀùò3nÍ)@p\\À•î×õá‚Y9¢ £\16jÏ\4kÄ}U©¬\24\2€;›rÔ\7„:ù‘oºpÖp÷\23[jýtV\18€ªÔ~˜÷\26<=æ¥Þ÷R•\3´éJGÔÉJ“¾}EUçI\5he̪ä\20]@©{©åX{\30\1?‰<܇¨ \1'V\0!~æ—~DOÅzýÒW5\2ÉÌ]•\127u¸Ž2{™5\22Å]\31{¾\25$Â\11\5‹4\24´Ä®Ù?Ð]wRú\9ö\16\6 \30ÇUøX÷f­W1v5Ï]›ªz\11ºŸO\"+ˆJú\13\17k¿´\19M)2YAzNb,,\13ãÚGÔŸ\17í¡›Óœw¦™Yxß\31[ѱ1U‚\21´£êg…µ¹tÁ«i\26b”ßÕZñP‘\18¶)x(1\"ĺB\9<×Hç\24Y\16@·\3@ïC\6n\"¸’ž\26E8Å-§\7\9:È3G!b_0¾;S·<\2Œ?#\9qQ[Ï®dgªž£\25‹i‰€½£âíÎM­\25hi\4P¬kȪÜVñ§Ìpœ\0ºkŠ³Uæ\17÷ÖÌLñ\0IŠ_…yÖ€\24Ü¢a6áu\21x°p\00002>·,A\13l\30jÓ<÷ì?ÎU\27`ŽgÚž\0ÆÝH‚;\0öèsÏõ‹PåÅ\0ªÊ¾ÚÊz\2É\24qjÍF»è‡9¡úôS#²[X[úÄñý\27оÃëô©à>„ÕÇ°Õ\16ðAzp?³¦öKmظ|àõeÄ¡ªÿývÁÛ\19Ôû\ +¤Î[¢Åà-ÊV™Ýv\1“Ú÷ÛMbý\2'\11UU\7àÌ€ {—\26ëâÑè\5CÕ×,0×o\13#Õôç¿u\24yXÙþjÑ_ 7óIBŠ\8‘Óˆ¢R],w.óÁõúR«\31ë/ÀÊ*\1¤^~¨WC3ò£\14UèÁ*\0Š÷\29še\30;22m9\4«(\30OÈ\30…Ñ9æA\24š?(û\28€>uæ¼™¾sP£„Þ1¾\28âê\11€ƒmZ¾ƒL5'\"\8.: « Å¸éÀÃ\13\"\14énÈ\26µ-ÐÌÑð@¹…áë@!Eí¿BŽ¸Á*Í!ÀÉ`‡Ö%ÖÐ\4\26\3¤\0K€5@\9p””Q5!è³ÃÖš„f\5³«\4G}%\17K\22@ÉSl\15½\18ì©‹U2ЈRÙï¼F;CÞ{šf‡:‚»â.\127ô\17ÅLÄpŸàtíù˜Z\127v*™\26•5ƒ\0Ê\2ðG­hµ\27\18‚¥ÜBî\15ׄJ;Üšü<Ônl¬A®a ¼ãe·o Í\19a\20·\6^ÿŽƒ«ÿ?³ãXrlÿ¯Ó€¯rž;žƒô,϶\ +6\24Â\4¾ß7Þ\21`7Œ\", ªÇð3UÆ\"m»¢u?‘\22E¡„ÆXÓ%Q\21*Ö\15!Ô½‘š|pÚшòŽh6µ\20\27Ù\12Æ\18é/Ñ\1Œj€û@q\15ÛÏ\16ÜvÿWäú¨“qÕ\17+ê¡Å4\1œ\4qËà2£o\25ç\3¿ß|@¿T\7©\7ÈmL\14 ù“Ž¢zÏyŠºRYm„P]\30çZU`E¨-ìef\17!Gµßƒ\5ý\3Ý;Œ1\30¿Œwt¢SMijö-ÿ;<Š4‹Ò\8\7À'Nš©©„µ‹\17Þ\21³OO¤6b\19Q\4OM}á\21ß\16bq’¸$\24&4\21ÎœLm<\ +€½ÖÔv»þ…tŽ\0êã\19¥ÉTG˜Ôßêem\19W\5°\31«J.­_DÒ\26\9fÑõÑ\4€\ +$Ô\7p$Š”©\127¦|›`ÒD\3¬&>V%4ÂÖh©²R-“{cŠ{\8Ú`|\18,Ù 4‰\3°œÉiÍ@ƒ$ƒ&o²j¹%™©’S ¬ë\0¨6•ÎºTWžêÈYÕ¸²®µI%6\1\6¡\0014¢7\25þ\9™\28Ì®•Üÿ²ië\2ßÆì\15Â\25\3K”\9>Î|jêm³¥*é\8: \7ó\24j+Ðj\17UõDZ8?ë'˜ó^ó(\2£YL\29\28Þ›5éA\6KiÆ\0°\17ë†7›5\"\4¸õæã3Î…c'M\25¼³Ý@SzŸ â\0¤¾j<\\A\27\12ŽÊßoVǨ»qäþ‘45óá¸\31©÷%‡¼•\14º†\ +ˆËY„ãÅ\15*™Œü\5º®RÞ\0088|ý(JµÈp\1:Š\22òoÒ<\4F\1¶\14ÿQiùóO­À µP\15¦I”w\3hŒj´\ +õ\24w\2Ì$¡ˆ‚sÝ;ò\23\25+K\14”%\7Ð:€Ù\18È\1VÇõ3WkÃDo¾\127öõ»¯\23‘âD®ñ¼OÒGd@åFàwóÀ_`\"x®ƒµvC¨*\127þ‰\20\30Îù#rþ`GFi\5¨e\ +d³ð=+}^uYû9L¹.\24ȉ¨d€ÐPÅy\23§çÿ\2a°„0\0hAr³{Ù3$\15-ãçýЊÁî…´\29\3 \4“¸ÈÉ\2åq07X(z—¶VÅ;(ÚØÞ\0…FЦ£Ð<ˆÒî²ð›-½g6–>¡¡WB\29¨\"©vËœ¨ÐVJ\19À‚‰“éhšDÙO,1Ñ\3à\27vWæü}?ô\1”qiíæQ\9\19À÷[rˆ¶Dï¶ä¹ö¯ø\6ÿ_ýA\3Ìœî[\13ðÂ\0-È¡Ç ƒ|hP\2˜y\5B;Î\30©óúÃ1ØÄÈù$eï™9]øÁ‹¦'+`1á~ú&xZ·^ìNbî^L;QÈLÍ\15¯\9t\23g·.Ž¹¢VVP\28P\6S„S·Âm²&-¤,SÑê\1\22Šv02ì/¹ÖpNxeš‹Úé`x%}^`«ÉžYRI?ÚlÖ#Â\30¬é‚hDm\\p€j¸ÝÅâ.W\0m)o\ +XþQÀ²û*¡Ç\6Ú1@1@Õ!Ø)Uj˜¹~Ó\21×ýƒÃè:èÐlOžwǪçãžyb÷’¥U\21!´XRÆXmæÈWªCùªŽ\4”µçûs)\20Œ\27×f\127FØŠ*Ðòz¥.T\1혠U\11hŒØ\22€j{íÍk÷\17ˆŒ­Ô•ÐæˆPõV»Ü¬a\30\0¸ñ±\0xA\1Ü„éѵÝ\31PWAYš\0­\25´‚3dÆ}¯û\23dä\25\21sí÷\16 +¦\12\7\4¾±W«´ê)¬Z€’*DµvV„v¹Î\24P\2}ÐUtÖõÄ\1/\0F°ë¼c¥_-;Ö\16\21kÕ®%ÛW4ïu™sÔµð\15rP™xð¯9ÐŽFˆ\1^\9Di\"\24¡hÝ\14ü\2ú1§\1³\26ÕÚ\27s\0040.w¨Á\0½ˆôt£-\22uôì(Y\29)š\1u—Ó†iƒC\20éoBm\23hËIý,\0'…É);\1ª\12hXú:\15–ÿCÞA\12x\31hµ\22FÃ\29\4ëïc¿q5[´>P¦üYÂÂmÂ\8Ù$hE9l.\26)ßcž€!\31¤©QH¢0\6àg-4)†´\1¨wÕ j¦âõ«\"§ˆòÎÈ©Ä=×=cHg/œ­×\3ˆrÓ\27û!24\3Ìz':4”Ÿ\15_(ˆap*\28d£º\22t¨4‡\ +Iä\23u³˜U(ÑãB‹-Ñ + J*\6&jÊ\1]Oýd´ñ€f\18Ïû\16áû׎½Y˜ã6ô]!G[á\0236\24É8RÚ’\27\ +Ò>xC\21á\3\9ü„¦@|}ÔÎÆ”åFG\2æý\21C9¢·0,\16ò-ß4\13RоÿEgRêˆ\0\"¿ÌKó|U\16Mw£!Ý9jYÃ2Á2}çüFq_Ž\6S\7Ð\9\13µ¢ªÂîIZ\17\13¯\0¨þ¯?˜óHõá\30Q?aºR(–+ÕrUðÕs\28\5>\14U\18àÁ\127þÛ?¬\ +Ë£ƒ˜9¬ã¶=¾\14ÞæÚe‹Ûk¢…õZ\21þúÚ×Òˆ[?íú\127 \24\16]\11MY¨ù5\13ü]XëAaßý”°\4Õã3‹Ïæ¸\15T{}ÀâÈá1õi©O;H}\22\13+>ÃÙI\1270yhFó˳†_kÁ„\14‘j\11ïûªE\25¢áû릱8t`Xñþü§¼‚JÉÿO#224f ¢’ü_î\29ÔCÿdÈÇê8\"h¯7ü œ!X\0j\0292@ßË \14€É‚g\23Y(O\0tv7\12¬aÉ¿Ù7ëF\25?\0088]YõÈẌ'iL+–’;9\12΄Žd«FE+†R¤J\7C ‚É\24bM\3\2Öç\12V‡¶f¼:¤ÊH'2æÌf@dß\26ÖáJ¨Ýé\1_\2,\2ºH’¢\4\13ø\20LÀ4¤4hG`–'… ŸÙ†c`EHþµ\0118\27ðA¯2Ë\12g\1¹\29Þè^†Jr\0111\"V{ØPgÔ\15Áš#ú;\1¥æk„^E©\\Þ¬\\\6‚\14yÛtÈ[è\2’S7X¨ñå!¯ÚS\0|¥9QôÃîP1,Þ5þÿêïwýAñ€îðÐ\8á¡\1A\2êÅ–%ã!m_àZ“z°D]Òºh\3x\6â\1ó\1'Ï\31ê#\26\12ÇŒ1–\9HƒôŠèÞ9‹ú¨í1£¥ï›§_M•’\13sYÿõv3þO\8°;vÃ\28aš˜èäŠÿ—\25ÿ\29\19ézþ+¸»ãÿRߣA³á¿/xËPoß7ã\19ðˆ`ùN7œtÄ%¾5¯§qjjy\0078>3df´™Ù\27™›‰ÏIúç+%æ)ÍxXbÌÄŒ-Œ³(G\11s´0‰Ì€¬€¬\2©zW½=ÄŒ«’(Ä…\9ÖN\31ÿ\5ÿ·\6ño\12­Uª’ÓÀ\"ää^\14òb6J\15$.\29\11Å\0092‹\\Eî$çZeA/(Ūž<ñjTB£îXD²˜Ù©S\0205öÉmäŽÛÈFGruà'骛W߼ξ|'-\ +U\127\"äÄ‹\19§\11®\24Êp™ã–\15«ƒ?M¿H\31~ ¦\17÷Uÿ>üjêà›5ïÀ¹ìÚF\14Í¥#{\24\24gä\127bŒT¯*­ãR23™p\18Ý|v\15‚W„óêÀY”ë>‹s\18I¿4Ú_\20\1²½9Ȧ™\2Õ¼3n­\19‡C\29âî*iÎ൵€\14-K‡V÷\3™GÜØbB\5W&/ü×]=ÿ/Œv)$ø´\7N\24üËÀª\"Õ\127ú-ðrU,ÔK·ou5ˆ\14ë/‹Îªu×Pèúz\11t£ˆŸ\25/WÑÒ\19­,¾ø¾ZCq©Rù~c‘~¿e~º!ãKXðap6\29L«h\17`NѬv‡ú\5ø¹g>\127æ\7›Y.³\ +½\18UÚ\25Å>»Ê@smVªö!Xyüþ`݃:Õ\28¬J\9Døj\26×{Ô³*¿3ïXP\2UÕf\22Жk\22sÏòã“t]\24Ê惞’•xU‚×Z\31ðÀϪâ‰â5ªº¹Rl³ò‰ô²%]ET7¾¿jèQBühé}´²\24¯xБ\14_`¨6\0289†\21­É\30°\ +Tʵ\30UÂcÀD\2ÁAú…XϬáǶæõXõ\2ˆ\25\24¹jJGèš¼@Û ÐñÌØã\0220™Î¦7†OhžÇ61-È6º¦ÝD\127ÿbÿ¨×ƒû\27^O\28=\5µ_\20¦3¡\\ð\18¨éèUyëóùûõ\"ú\6.×¾ðò]—x}ÒOÜÚŸ”V¯îÁHOÄW®\25ÆÜ\25/Ó‰w¤\23]®*öZÝÁþ~ÃKVE\22‚þ¨E\7 zÃã@o[2h”FTêâ`õŽŠYgÞ‹:YéŸÿ@ÐõÖçÌ\11\31à ÔNeä?šÏq…H®zf\3_×æ/—ø\6&ͦ\12\12­ÙâW„Þ{\5\7®Ø\8hÑóÁoc\26!ük„׃n•XÂÄ%þk©‚@:\31¡\6OŽô¡dV&R²žñ9:ô\19Ù]UÃ\17ðU\24ï¡O\6\127L]¢jît÷î\ +9z¼Žª(èùëë\127^H¾:\17\21=[Èó£á¿’TŠ\15fç\2„™\21*—\0ER½îjQ8‘êk¼s%/!±*é}y1M¦³i6-Š~õ%ª\24žÇ%GxÎX‡7\22\24¬ŒÊÊ\29Ë\11\20ÂJ°-+h¶é7¦”ü\6)ñö\5rç„î÷D\28ÑV„ÓoB½\18Ï(D¬\5D\127Oª;´\8H¨Ö‹§\13J¸þ\\L\127_”¿.ü~Eë’6˜ÿŠâB©Eqý\21\11\18;¡6²Ñ2ÃEmEwbÖ~ƒUv+\17¿PáC@¯ZØ\12\17#²\ +G7ßÿ\17A\23€85j-KhœÿAn¸ö‹ÔïN\13\14‰ïO̦2•\11\"ü€j='Œ\5hʼàb\20•x¦Ÿ@Ñ?Õ¬±”ªîÁ4«teFÚ«\31u\19ý÷©£C~:»¡\26©®\0õ\1Ð!\16@\12Õòy\29!\14+g‚÷+‹Œ\"ù$M„q}/ä\26ù°\ +Ö{¡œ^f\8?ÔäÓŽ-æt¦Ntêk¥®\127¬‚Uü\30f2ñBUQ¹²Š÷\14K\24RBa9I&ïOTW\16}ÞqÙ*îXø¿ò_ï±%>ä\5·\14\26ÇœvT>NóÿGFœ\7JþÓŒl×\26Ô°ÖÍ\0195ºJ\21w’T˜U\6sö½¹ch>‹ôM2àw›óÍÑ\30¢ìo«âm¡Qu‡«\24þþßïÙÂéû}mX\24\21ô,†* n°Z(ôÁ\30\14Ö—:þ7à\11|\127ä“\25ø>œ_&­¢‚\17ày\9ºð\ +5jwªÅxDa©ÑW\2]󤆎ö>ñræUÕÕxÇ÷{áõ\21\13\7F1ü3ËW\22Ê•7]É¿ZhXýÁ¢&WªÏâ\6ùÅ\18C§á¦Ànã$yÙ²ÿØrþü‡û±ˆùl\ +ví\14\14\"7\18Ø“@ùÆõòÍð™]N%Tƒvµ²¶\26E¶ðBÆÿŠÿ“Xu\\Tÿ;\6tb¡Ý·û3Yƒþ\21€n+|\16\27β5–\8íó\8\29ˆ‹mj-Q¬7Åš}ù¡K´æ–\ +\0\3½\23âÃŽ†KHÂ\14vG…<\24Ô@\"Ev7 ”œñ›Í)|\6ŒÝL´°_\0ºêÕ®E×·†/ðP´\7TúŽû\11hÞð¸>m¨G\28™ÒvT—ë»>Ãý°q\20¾\24÷/€…ˆ,\22‚ÔI^ÃÕDhI¾\13\14[øº¡¢{ÕG0•C„\29ÍhMù™do»™õbzŽTz3.¦ƒédšôz9m›M³éjZ¶|_7\0045\31h½™Uü‘6‹\29™/m\4Ÿ\3Ä«\23—^™Çା\11’Iœ\0278¬¤RE»Sí5.1¬é0dÇ„ÐQ¥X{+SÔ-Ý;Ù¹@\14\"'‘VD5¾\29_DÎ\"½È$’œÎl\ +\1ÛI‰\5YEŠ\8¤m§ñ?ÈÃ9RR¨ô¸T5çb\\R(+û-^\15¦|\20<\20]¥MÃA\7_ùFs\31\23ƒá)@Š \20š\28ý\5´\24É\11<©„£ÊÚÒ°­3tèyãõÁ\26øµ^óh:™–6€Ê\22(nÏEe‡\25=³J<îÆû^˜Ü\11tå\14#ÕŽ­\24öp ˜\29+í\11ƒGþÏ'\17äç¹»cd\8ÚŠô\"³\8jV×?³BÈ\12\2g\0‹»¾3£SÆiñª\15\27™%Ù@:©Ý¤Å·Ï˜\16èdk\2ƒÂ«·h¢É¤\6ê}1óˆÀ‡ŸðL¤\21\17ÄŸí-ëÒë\13âj ˜…>0C\3\"Õ^\29\19.•Îºwf-—Á°é¸áAƒî·›'þc\0Ðqõ\15B“•ñŽ¦\22L±]øî¹¹‘\1\17„Ëïw¶æ•Ï_ù9V–(¿\5ïžÕxï®öwWj :bÔ¸W‘Z14T…'ýS\0\21â÷§\31L38A\20\15L\2µÆÞÑYh¡#éúý/T±s;VI%kĹ…W\2È®ÀÀ‡¥†\23™\20ÎUÔÔ\0239÷…ËŠÀ˜© Ÿñ \14þ*¢®·«\\[\25zx\2âŸ\2¡RZ†z(ÑD¡íÂ=Ç$6\13÷\28õõûžáýÿgþ_øŸøŸù_ð!K\15/ûŠ‡†7\12;TfÒÑ4›®¢z.œŠë½¬\22ýÐ+‰~\16qÌÙ)P~õ\\\9\5BÕ¡GW_ÿaРôîáÿó$:#ÞÄ‘K¿—y£ŸX<\19§4úiÁ\5­. øðXc96¼#1«‰C\\mТ\20\19Ê»FH|ÃÔŠÇi-ô\28øït‡Š*Í\ +Zø¿ò¿è¦/9\29D’È,’E\24\19-³\0229ó™\27}ÍLsI/±UI«P}ÂŒñ\17;¦Šg\8¼žãà~OCw¿\14ügJ´aô*\\¸:¨þ\20^Ýô-oú„´Éì\9ÞqýÐ\19\31PÈw\28q¾yl[õ·F„ó±oœo†g¦¸Ù\\Î\29¿iЮUZ ÅIÜ\28‹£Ó7Nß4:}S\30H¿D9\2†—…ˆc?\28ü@°´JŠ¬ŠàˆpÑ8\8¤¾\"\23¨Ž\ +\29\16KW?Å Ô¼x\16s‘ð¼H&Òk\14Éh”A\31:ÙÐ+'Au\1\29\29\17µá–ËKŽ\0.1\2 h\3P7»Øâ\12Ê\9õKèä—\31MüBßn\6=\20\7\26uýÇttš\0120H{ú\11ÿÍO\127áü\23Ö+S¤ƒ¾ˆtÎ\30•ï‹·\6\16ã\5ÏnG\19/R«/ÖªAoÊc\5³“žx6\5ò0—¯-C\14®\127‹(\20+ï\2 '\4\20t\23ù\ +\9¸ØaÒ½È\0ÏË(dê¦\23ª¤\23N¸] ]0¬çP£Ò‹ˆRì'\17׃^/\0…\16DßàMŸëW\3TØË€®£¹Ì\00788Wªa\20[]½of\17ʨ˽?ïûqÅ\26`dýÝâþ¢©_:\0ù\29?üR\31|\30õ©‹Õ©\11Õ©\11Ÿ—ÝzæÿÀ\127\20\8TŽ´\21Ñ’¾æR¤%^®ûs¯Ä¡\11]lÕTï4\7P®`Ö¼p¿\12’UDY‚ÂsyìàlJZ\16i\15W*\16ÈŽ\11›*ÜQëË\13\13\29_a›85Ð\31\6M4?ðð\3Ó\15œ\127`þ\24I\12˜òà\20à@[¿BO¦]ÃG×\\qñØöL\26?\6š>(\7Ы¡¤\6©W¸gâ\127â¿8\11oYà\11‚ˆË¢Ti\28@wH;\20“·?‰áÀ»)\6@κ<ùnY¸*£\23É$š·¬´ð¡µ>]\20¿¢`¡‹%à\20±vÓÝ(ÐAóf¼lué\27°¨\7\\tθì\7ÓÉT\25º*Ñ«Ü\23\6êéC\3W\27ìùôK^/¦(<ú87\6èì\0252Òâ6l\22•!„#\1Úà \19‡GI)‚d\8\26Âz1Ðÿè\20à\18`\12\3Ü\12j\11+„Ô£†·$H\14~\\|Eâa\3\8±\ +\26î@a;\28{î;\0·Õ†»\0095ƒÍ\22˜‚‘\29\20B‹”Òsô$…L\28䛃\24°S\1271\21üçŸÁŒlz\19Å\12\27RƾaÈ€ªŽv\28R\30²KÊ\22ò\26\7R\0\12\22\0´ñÁtrÄâì–H\3\27x¡Œ`©ž6äøªi\2Âá¬Ð!ÓT; Ñ‘Ôž“ty2=˜¶¦œ>\0038›\14¦£i2]ÚàüÜ“MK$öeðp¾òQ\12N\30‘¶N?;\3y{‡ì,»¸8äøóOÿ\23¾DÌìÔJK“ 3Éø\3Ë\6£<ò-X\0151¢\20Õ-\12ÚÁLA%Š_Œ_á…\18\11ÝÖе¹OÇ\29®;–AO™Ö³p{¾s¯fÕkÞ§‚\19eIOÛýЫ\8û\23\6uÊ\1í$àvühýY1)tú\11ÿõŠÔÁA\20oò“&j\0\21MzTb¤äŒ¤ÎT/×Ó’=`„1@{eá[>ôkoÊ\26\ +\13V—,Ž^ÿ\20=½>´ì.Ã÷[‰¯ÉiÒA\2\1eƒå|E\20êæ@\ +\15öv\25$[\22!˜ï†»žàš\6D^\23]OŽÆ¶6\12´ë!\28ý\25Â\21M“¯Ã0g%'¿->\9\6“\1ƒÌ\24×'\17¦;BÚ‚ÜHæLÂI?RÞ‚9d\16t.¢gÓà\15M€Ù`5-O\0067SÖÌ™Bn>ðƒs\ +d˜_ø? ×óhÑÉ1ÛI5\7f)FáWœ“îJ'\17ÝMg¼J\6‘U¤ˆÜD”HR>ØÁ΋\31°P6Í\30\16Õ²<ˆ¨vi\22¦^ö¾Tîó(¢le¥˜\23‘,¢Œd§z\21±\2\0³Øž>~h†ï©\23ÅšOÖ²w\13oŒÞOT90‹-æ\7½¡µ[ˆL·\24Øç¶yFpýèO¢+éÂF\5å­þ£^Ô´OìµÊ­>:³^Õ\4xºŽE˜UØŠ½^\2p <ÐÙo°s߀©$\6¿±ÇÎt\24¨Üúblh+µ\23ì\30ñ‹\27W³\2zñ‘ÚQaσy#ÌÞ\15Ô\15´]ÑíÀ¸\26Âq+ì·[û\20C€9À\26\0Ïòú‘O \5`Ø\21\25b†rm\20õSêÅõ$Izåôãp…\23Ñp]Üo\127v¦–ÕŸ’ÕÒXI9õg\20ÒâkLÁ+\1Ðêv4ÌšW\27¹õ*\8\6a•ô$û#ì# h¡µ¹\0309û\3·XQ¦\22hðÞ*‡z<Êâg£œ&6Ê!`'FàH}Ö\19—ÚÜ\28Ù\31`™ãnVÎ\26GŽm\0‹²\15sÇè­ž\0010¢\26›ÝÊÉ\25n\31Ë¢¯ê8-Pc£E\25\0Þâ\30\15(š\19\"â\4Å\24þ`£\28Átûc£\19`ê¥4ƒr_ ¼12–f!'7Už\7\28\0245ŸÛ˜2«µçPùÑ·½9ñ=`·”\29\20álM\8?±ð¹ò§Ž|\20úÂ\28TeŸ_½R' £#C\14\17¥3ã\28Qz3\6Ó\20\1sÜ’\29R¶T?͹o¯ñf0Å]\26T`g:ÎÌ\2ø]ñ}rÄ\26ÈZ\11«\4vá¢\14'\20¶gD,NS[ làœÙ|]\"΋ҧîLÆ\24`Š»>\12r0ø*”-0ÞÔÊ\127¸²5œh•€‰s_Ç \13ÿ\15\8 73®Ž¼‚\127Ïè5tÊ\19\0125hö¤°!ƒu_™\26§Ëpó™ÿð×\1½0a'Q§!ÊÈ!Jgtb25…\19£ÖAËS€C€6À\16`\ +PœL$ǯ/\7ëQ³±c\12u\4â†lºê\6)w£—,pè„«IÕËc¡qs;\28évˆˆ’ˆ08ájác––_—\3'ÞM#Ñè©ZÐÄØzöª‡µPLG8\22)¯TôGn€ÃQ•v‘Âm7†ßØ`é/Ô“ý[iÿ†\ +R¿ØQDÍB\6­‘Σ\1¥ù\24þŽb!+D\24²1\"#P\15ôý\ +\9ì\13”õî„ÅptñT4F<Žy\20ø¥L%%•4Ä\25íÅ:ÆlË\24SÃF[šš\18\22|()g½Ž–\"3jh\24ḕ¼Þ ¯Ž/ÒNÇ\14›á\21\"Š^\13sÆžÕ^\19½c\127R•çº\16Ð\23õ/\30ÆŒüF\28«àB•%\6)cO»ÿØï\19Å\127¯…sLmQ\3èå·1zæ—\27C³ms82ö\16ízîÊqÛØ£4°3ô$\9ø¦ì@%ª×\23ì«Y3†n°¡N\14z0Ý\2\6ÓY”…qáTìHÛ0,Ó£O\11P\28%qäÿ‰ÿ-EÉå…ÿ\29ÿÏüïu3ååx¹ð_\15\28ù?ñ?ñ_ièùE¹ø\4Ùc›i\\ÞøÞ£*Î\28Ž\3ö2G@U,˜Ú0ŸE\ +»\19€)\24¸aG\22„o£\17P%'\17|\27ôo\23S\12A\4P¦£ºRê\20|µQº…†Nãè•\2\4p£\31-ÂFù:£µÙ±ŠN×\24¹‘\22Q¹ú¬´\23Ö·=÷\21\7E\26GY‡q¹8tU\30 ZáÆw(á¸þP\22¸\5\25,H8˜@\11Òr²q¦œŸ±Ørœ»™«\17Æ8ù€¾õCá»ÖÁ\23Zù\"\"Œ\15ãB‡¥q‘oÒX\21Lpi—\31i—¯ÿ”‚”'\28|ôe\24åË€3.¨q\3[~;êŒ\13\1-£Æc:Îg¿\4¸\9hbdä\14\12ŒËí\20\0052‘\"Ñà%Ì\18/›\20\31¹8\19Ù+½Óè)\ +Žp¹8‡0äŒ^«\0-\30\3}1¨1CW=Š~ð+\23ªf¤q­l_\16§Ä\17ÿ‹¢¨Ü$íá94^wÒ>¯ëÊ+-Ö\0255ì\27=AOz0=›ö¦ƒi2û·~ì“>N\4—\0)€”Ñ/õÛ_êʾ”Ô—\20Ö¯UÄ1oºó‹5€ž!#'Jê‹ÝZJ>ÎqÒ»‡\26%Ýž*áØ\13lóîr\16¬×´ïwõkwUÑ»4ÌûµS\28Ž\0&,5\21A£¡˜àŽÝ'¨\20“V\18­âqŠdÚ&N‰®¼©¦¨Í\26\11//¼\19ëN'N`(2¤3éÍ7sJeÚÜI‰0È8\3E\19D‚\11ÿW\6q§µÁ°í‚\7½‡ˆó|SLrL\13÷³­”+Þym}\127òüÆäÍ¢\9T!‰$R‘+\12]'­¢™ì\9ª½Ìåå9m^žS8tN>’èûýÉ\17è\0099…æO€òÁ ª#…<æ¥Ù˜d`R\\÷Û\5€´¨\8zª\24¥u|ŒHvÍtàz²\9\27{Õç@vôõ¯EÈâòü´ó\4Þ\8>#“¦\24@”[9CNv†œbI\0159«i1½9\6g~':?â‰\16¦õŸ=ÕäSq$Â'9Bò&YÕ§–>\9\19ûWòèofê‘Q°ð’_\5X}¾_9Ñ0mJìäU;“•RïuMõrÚLñÜ\\ËßäõF\13kz™õñhFŸ U6S\127â\2ô©§÷Òds8(»›ÉþƒµË›t/ÝK@\22ÞEË\9i\18íU¤U/`¶û¨fÖ('i”µ'mõØ•u\20T\9®\11µƒ=Э ‚sc0™ÖzÏ\24Tš@©›!d5•º%ôÁbycÍ€«áDWÃir›¤\13UÐB³˜b¯×ú™f\21î<´¦ªLØO\11mHSÙ`ŒŽ8\22¿â¬ï\3ó*²1çâë\27ÓÅçeÇŽ\11=Š\9¢\7fáÖ>4‹®\"Å\\F“Ó\22(C\23µ‡ežbZ®Œ»²î Wè-8Ñ[pâv¿õyW~¤+\\=§«|í§+;äšÍ+·ºfÍùds¡¬Ÿì\0038I®O”çû&Q¢¦¨@p7\1eM\2E©¤ÍÓ¯¢3oäê«„µãغb¤M%5Ô\28“ìd)\\úÀ~W\"4s$z©0 9u4D„ÓŒž q¹\17ìžÜ…ö½\20Þ÷)Tvp0ŽI´a\29\12\20‡3#IÂwoÄ!Q\31@+\127\20ú\30à‹\0[(’r«\0œÐ¥Gß\\\12\15\18Y\26è{…¦¥c²´¾ y6\6þXZ:&«$\3J\19`\8€’¤#L:6Ïõ+¤X<žhµMZ\22ž´òZèù\12]¶‚3ŠáÈë\20ã]wp^™”鮚\15\"€ò…\29ÆÙä ³\4Jsõ\12##ÑäÈý“\31æhžŠ\16ã\\ÞðÁ\0048H@BNçZâ\22”,½|P=N8< \18¹Y¤“û\15\0†cÉ$%:9+9\20Ê\12AãL§–Y8±[­´\23áî\27±¡#Ã?u;5JáeUµü;/\30®U-kP{:‘pâ\0275²\29ÀЊ²¤%e\6T’…X˜´d™Å¹Ó„\30,iéh²&ÑöS €|\127H¾\15þºƒW4$ÍØㆲw\0¿3\23o`ïc“VDŸa¾ˆ\12î¶4RÞ'¯ÉN4Þ¦0\127$¯ÑH;MQ'èL¸Òçú~?LQ7¾ßqL†¶ªL}€Ù@5\"¹Þ‚Ź»d\11F²\5#Ù‚\1º§€\\(H\22öL«?KU¬ØæÕQi‚+­Úÿ¢^J ¬\15u¢:Áõ@µÀ\23gAšš’ÎÇ\18ãu»ƒÒQˆ©CÙp`ï ¦Xc°üy<–êLqE-R¾x’\6L ©°‡¦y„â¡h9\2Î/†ëjQ ½T\18tÄ–\5qU‹»ªi\\Ù2®jE\\0®ÜÐ_×nGŸ’Ú_¬Õ^5’|~—\2Ø5Ñ\ +ä.™¶õ é®U|O?\16J-ðµåÞP,$\29i­³é`:™ÆÅ”^\13\21\12ÒXï1ILÈ\1!ÁhÀ®à®Ch\29½H)»Óå\15Ë,NÅ@¢ìnÿù=‘\5Ñ=œû„^\28/\15\27+\5rÝ¿Û\13Z¼U%\4ó(›”æ~\6z\21ö1wû)\19Ü#¢\30Ãy­J‹ Þ|‰Å׋Í\21\11W°\8\12˜ ¦7ÍbOù%6\9Xà­`z1-\12×WY¸z\6w/ä.ô±^d\\À\20ãÒ¬ñ˜UQhÆ]\26Mw\0É +Î*@¥qi¼9äÂ\0ð\16Tµ…®.ËN\27j≘³„¯N\29ºÐ\0-èÇ\2ÖxÈØ.ý\ +/!æáÇò»l–ßeÛ>ÊhÝPa*x1|\19Ãó†Ò†ò†nDù\0ë\12\31Bñ¬ôXq\23w•¤C\19`Ž ø\14-?\30ñ„ן$>™þÊs÷øÌb÷Ÿ…{W-ô:YdB¦+\4\28£N~2\21òe§Cø\0ð¥—p2á­/âw¾<ëò\"ÂúÁNu‘ŸÇ¢qÝ¢\14v\9+ób\31ÅFæEC¼…jÿ¢\1\30HK’Lz¥ \2°a™‡‡àrU$ºàRp-¶3/ZU„£ñ¸\17\11Ñ%€\28\3\23»EN\17¤gËÃ\\¬ü$\14¦\"Å)qû‘_˜\8\7á\11\13ÍXªOÏ…Ež\11‹;²Å\3¼J\7GÓêCpÒIDMŒÎÓ\8×î\18Ë6Ø[°¿¥èêL¿u¦¬Ûv^¼\8}Ѹn¡=x¡=¸þ£ìkó~|¿âÑ8\26\11äÏ\127&E\21væ§AGÆoNÃ/\19á\20ï¢\25^\16´9šÕœÈ.\0÷‡Õ¹\ +ìtk$m¢ÏÔoªªê;àl§zã\1Ì¢¹ØEã˜EØ%Ö¡/\26Æ,<\\@—{¾+­Æ0w6,´Å\13\9A\11*ð\"…b‘=¹’Ü‹0«»EUlA½«Bx%a)éP¹É\12xó’ó\6·ã\5ý6Î@9ŠËÝ\13\22;\11ƒr(³pƒî\1kþ+Ô·I\16r\7c$˜Ï—Ýúxá5F\26Ká6o'!ì\28zb¦\11+ÜUûÉ.ò–]®œ¦_¼O̲ٷ+*Ú¿v¹–£\ +ëSUíûëdêz¤Ì¢‰NÒbêRþR!ÿÇjð¿Z >\4'\7‚•8s³ÔZ.¬\17y[ê•·5]9–t!NÇ{á\24‹«\11C/X\26ÄõGòîÃ\13ôPÍôO=\7è\3¬\1Šb\15å‰\9¾›Í<‡==ÓŽž5&£á>ÛítGîÁôì\0zÓ€1˜&ÓlªÇÓþži\29Ѳ0\\6<µ¯e@\29Ó~2&w4ÌžºÌZ{Eª†R›>W\29î÷^\25óÖÀã¯ð?†?h â1H>6ïÉ€gø±+>®ódf·…vÍ_°ÿƒîéècl^-Ô,oÏÆÎ\27J\27Ê\27ú¹ã\22O>\4h·„ÙÕ÷\18 \11pÞbEæº!ÀèÔçÈ#e/Á\26 l÷_‡\31øóR\28(\11ÞQÀU.E\17Z}ÈV\31X¼“\0309q–$óäÌí=¢Žn\27\1å\24úf\15rs,f#ƒ•À{+ð0ÞÇ–Ôú˜‡^\\Õ‘m“ìŠ\30ÿ·ŠóX)EÁZû½Ásm›™ûë¢ÕUÊ÷:JÑÊGêY¸D§ZãA¿\6)$\24[ó\8öU—7]¢\19å]ò7\0208\4\24\3”-v\"Ò~]ùȽmóQ27soZE…Õ5\31¹Îd§'BBV‚¾[­F¬ ð!À÷íÓ°w@‰\0}\9ï ÁÛa)Á›©¼ú5(ôÑÜSi@ðƒ÷s\3‡¼‹­\0232]I\31\2põ«C\13*\9Ø-\29¶WR¶Âó89\22¯f\24Ù/¦‹ÚÙ`ƒSæ:¿'\3moQ!³ÀUU“\1†¹\21\\}ÏUO\26%h¦NˆÈtã\1Ö*§\23\16è\11\19uÿÈ,BmŒj“\13ì:þŒ£ê\\šB_G\26èÙ@«~Vß\23¥\0\17Î\5ØòšÇY®Za°¢ŽÖ?h›+\23ð2Î\27ÉLÞÌJYø.…ƒðÕþÙ5Õ=×H¯\\¼V‹€'\28¬œ ]5Aº^GäŠó£••\27\17\9\15ž‘‡ë}q»\15¹»Æf|\4W•ïç~\13€²ôùt:œìdªž\4hµ´øâ#0³Š}€%\31\31\28ûðzðõ\0éä“9Í)Á¡Î\9Öˆa=¦³!)Á€~Eº*®r$¤t\26ºŽƒözàMii„­Yp¶ÆǶLR¡‡\0Ý\22Ø\7kØXs°r\0¾öÃT\2~Ô\7¥,Ï\23Í´\14iÚ½™|_Æœ[p\23ƒ\18ñð\18ŒFwf=»Ä³\11›\29Ž;#a~·%‡0X\0210ˆŒbÊ\27™‡e_\0200\29M[S5Ø\ +”Ÿ–›%3àË@UüÁ…mH*uN|Qi·ùÉÔ_µ\2?-GêÙùÊ‘ˆzžG«íK\0²©_”û\3€ÞL\31|eUχìΤŽ§ªÇùZ\30œªÍïäÒ@'f‘Ã2FHšøãV\27»?äÖQ#ïœS®ƒ\23÷\25\15¯p“\28á5܃H;ñ#ÞªÂïÇÞÕ²w^ßüÐ7×+m¿V\31¥}Š\0´€\28ˆ½$\ +ˆ[\9P0‘0\0073_\4ÇÂâHz&NŸ\22R\23ÜLggøs¸AhF’\25zW>©ëò¬Ø±8‹\31É!Î*Ž}Ôz€¸EŸ‰‚¡þW\9Á±éúx^¯Ì7\15¡õ­Yf!šÉ\4ürVNÉãÒ\25òüÍÂ[„ˆç:\9åyCYˆ/+\22…\31’ƒi\26€\ +¹’ýâCË­ö;æþšU4Ÿ«Þò³¨RRØ?ä`B×\23\26Æä\4£o‰azMôÖ«¢É\23.²¾JhÑ\24Rìµìµ\12ŒØ)‚|•\11÷³#\127\16\127ÀYÒ¸qàð\4\0u»x\19›\5\ +‘.´ýkÑ\22\1M‰Ý¶\ +UÕ§\0‡\0—\0s€%@\14€^‡¨(eîÐ\5FïÜé\0§¢\21u+óÄàyo<\11ø]™KL ˆ»á\7/PEHU\0Ù%G?éBSqpè\0R\26ú˜—†}ŸF@Î'·Z,´\15\20®®×e§KZ^Kô\11EÂ\17.\"£Ãm},Û\22Z‚“bÕö¹\0¾¨’[¸+¥æfÊÖ\15\24Í&ôWœOÀêmó’\17´‹æ%‹\17\6¿<\28gˆ”óªsšâ¨â\127Ù­\21MPªl¹Ê\8·ûèRa'N\5²pÛñ½9²• \9n\0W¼È^à\28 \15p\0090\4ˆR([:k€\18à\26à¦,Þi\12\30\ +½ÈËîH|\"öF^80™5¾õ‰X…'f@T\19é³xö\20häWoÙ=\23­§/±ž\30׬ÃÑ=êÎ69î‹éYQ/*Öv¤_À—£èæ1Åãg'öçŸÄœye}ñ¬k‘C8HiL#u\26‡\4\"õ«)ý#¼þô!:é%½#\25\24T.Š\15ÿ!Ã%“\\\16\16©•à¤\15YÜìâÉØä ð$ó×ÁA\28s…\15\31WÝë!XVÔø\\ê\127\16·x{3R伦§\13ê‘\8å\"»ÝB[¥k\5GmåÇjIÈõw<6\22ª9\14ÂÞµj[¯®gž@\22ò»{\14¹Èc‹±'%˜\"î¢ÆñšÅÏÑÀµW™aŸ\2ÅÃr¤«ÕND·Ÿ\27\30bqs±âij>¦è1BŒ[Xç±x¾pÒºp0Y8˜,\28LîÊþ|e6{\9ªÞâ×vÔ¢Qf‘\7sñÆë%<˜\1àW¸þ¡@aá‡÷bü²ïŸavUݳáµpyÕ^§d©«}\ +Ð\0048\4@r}¢1¢p\19ßw¾Í¦\4”¿´‘\22¹JãRÍ 2ï³bILÈ]ºHo*Þñ\29—’å}\20Aq~\30ñ²\15–Bÿ˜fÇx\8hJ¾xP]‡\\‹ˆÅÓÛžý0Ç\20Ão7¹ÜšÇ/÷æ%\3¼õw€Ú[0ÂÆ\5³6pµM\25Øé\14ü¼3Öj\22Ù<¡ˆxÏfœ©>¯kðf½¢]”Éb\26óxr\17\1½ó>Xc«~Ræ\9.oEKÅå\"JQ<ë\16¾Jç!Àj°êFvæò\24(ò\24À\19¸›_Á˜\29ÛBEߧÁ»·*eÀ§¯o¦\\‚R´\\¼Ì˜\16ìäjqlš£m»Ên[æ]„’QÏ\4xVü†\30\ +\11«¬qKç2D(¬hUEegª•á<¦­\13\16’@§v\2$¥îóçÉ¢¯¹ã¹“ú(Jñæ6'K.O¢çM:ÜS³Âxã¥M;ð\23Ö\24jLÜF¬\14ܹo\21<\3\26Œ×\ +&ÒIP\4èê\15\8ÍÐ}@®ŽKy¥MÆ\ +:ÿ\23Æ–¢“u:ö¨0øýU\2\7wF}ït+\\8ÉøBøª’æ\2øÂåy…ëí\ +\23é\21©N»\21:cá uâ¡JEƒV\12,KÑù¾ÚïˆÕª\2v\1%lË\21ÀúV¬\23m–TøÙY&ôí®Ù»ª>i;±²ãé^åª\"Õé\1åÊ\13߸\"½ÐSÂÔzקšñ§šèg\8\8xL€ÿÅ7Œmƨà\23\5$]®nÇØkŒ|\9í¯‡Ô\0\28pÿdÀÆMWñ]ý\0247/vÙÿ\15¥ø?n/þ¿sÕ«0ÿ&À\9îß;Úh\127ó8\9Ad\20I\"µÝýæ{ÿæLôoZÔ~ËÜõ[¯ö[N$¿¹\19(Ç\9¿eÓú­Wù-oÿß|‡š.òý[s~¿e:þ-'¶ÝGñÜ1J«ÿ=ÿkÎîÒÊêEâ\127MóÞÂ-òNõyw‡Ô¾ïÐ)z‰BeՇܹSÏ\29–ø_Í\3‡=‹Àrñhä\3úh,ž\31\13µ•GÃ\9Ÿ‡g¾Iq7*\21ê9m»;o^õàغ%X{Ì]Ó3aïÚúØsmúCåÃ5H\22Y’I„\3-í¹\1ò›„C¥Çvè*\17\7SŸ-À\30^•ñàÌRMKÚ5(2>5;…Ò`øÀÝ°\0156¦\28½=æD/Ó‡v\17xp6g/[a\3³ Ÿ-Ýk÷\8{üŸF‚ç•Ç<=ä\2TÉ\".äÚƒ'ƒ?øÉ‘g|ó\7%0î}h=ÿ÷¿O¿¾ÿ­ßàû_zÆ“ÔlUzÒü#9³\3ÞE¯$\23ÅÃZ\15ÑDYW5\11 5\15 70w¼^y¢]cz\23½\\E'Óìðìë5\18À*\69\24ï\6ŽúpÀ\3×Üð\17àÀÙ\0161”m q0˜\14\17\27óÃØ1r·è®\\x‰\13˜\24–\29„°Â\26L\17¶\4ÈÌ°`ÚîÌ׀岡qC[2uÄj„²<0_' \3Æëæð-ˆÒÉ\8\2˜¨MæÀ\15Êa¯qkïèýä *E\2\21&zØÍ*` Õ0F;©/§T»-”À)+»íÉTE$\3‘\1˃¨Ý\2ÛqCÊ&\29ð·à^\5׎\4Nx\ +À\23Ä}³\0æRÌW\127gX<ôx\14©yú…Â\13Çx,â\11b¢:\"Ön9˜Û\29XþµÁÚºð \23—C¯Õûù=kûAÖ\2E\24²8cTE}\11\29™\12\6·ïSH:\5è\0028·ýªT\30“éÃYz»‘1¨‚êacó\3ü’tû\17bm‡\27û%@2€Ö)Ô;Góêçàd\15\22ðÇÁT/N·uÑ\20\1Ù`5Õ˜O\6­]•·díY³r;¿ª\21®Eä±'-.çëåÒðA×-ˆ;c\17|ùã\127í—/?ú«˜²‘Áe\0201Üí©Rl'=Š1¤ˆÁÏt¤[‰9\20\19Ü{£\11À»ž”\20 3ïAM\6àÆd$‘ìûTåŽZÁ\8ô|nLÙh*\13>ëO¥s2\3ò\4\27z¬J¢WÆût4å›õ\9–)ÅÔú/Ef«?º¶TÆì\28éû\0309ž$ýP¬ôŒ¢\20w¡\17±Ø°yÈÐ\4‹\5'GÖ%Ð%À<\25-\17V\6§PæÉIе& \ +\22'Æt\27/\15?P¥±rÐv;%Ùq=̘í‹\3Û-ïíÞï\3K­ß†ŸçH¿\23ǽ)·\22+Øød?\4kè7ôËl½Ž\1–-\9JãÚoY‘ð&*Î\12öaâ\13jñˆ¾D¤¥qЪ\7bñ\18\127Óg\\iä’+üŒé$ÃÉ\25Äì8jÎ`Ý\\ÐïONëí–¬\18 Êác¿}‚(G7pîò¢,ËzCäëÅ4J\22\11·”ä×O:_,øÝq½3\12&)ÕNÞvŒ›Ü\23\29©×ñHTÑÓÎ*\12\16{þ“\29ÈÄ\26\15\1.\1\6GÂH)\16\27àÉ\2ó¤Õe\ +ÌG³Ê]À‚àäúzòÂn¡\"\22;ÊÓóIìvï4\\!O8yÜ\12Æ·È8éLrƒ©8“Á ºÐ+]ô…z@A\5:}¿žö¢ÎA\5ÝÞ¹¥\15³x\\¹Ê\12¿¦6‚ÝÑdtWDÊã“F]\12SGЩô¨î\19ŒzwUæSôg'Wê\19dÒ©a¶z\24/ö9¸õMàž]\13¡Ò\5‚GU\19Ï]”º²f¡vR\23xRÏwš¹™>)\2­(l…LH­¡ª½«Š\12\14©z}Öì“»®“Ì•\0\\u\5°˜ \30<óÔfN.-õZ'Úã”à'\27\6\16ûªS˜ÒÌcêÏ\15½ ̳\6­øߟx\12.Z©Õt¿\7éÄ”¶ÑJÇæ0ˆ„uƒ{Üd\19sW]®¾uíE\"´Ž1@‹bÝL\28ùÆX;^ñ xÑ.h\4tŒ§\21Š¤ƒéhšL\23éú:ù ;$GZÔ̼çC\28UµöÅ[’\9Žlf/°ò‘&»\19g\8Î\18œÕqññ^Ôn^8|dD·>\0Eˆ|ãhö“©£¸q¼HÉ\3Å\\\15Ñœ5ò\2dÅ{á¬`-F€^\25\\Ó¦Ì}Q\4v–q:\6\29‹•(G:­þÀK5ÇU}qǬtÚ\2SA§\31λSÛ_\20—÷tö¿\13´þòʨC€s€>À\16 \5ÈN±Ä#¤çv!o;îÂÁgŒÅ7Mz—$vŠ\27Rçð48$\18•\ +†Ô—\0Î÷\26\12–)ÏY\31͹:5¶«NKÈ\24â6‚R!ydÇ0ŽL;.}jƈ·\4È,hÁÖžª©¸ç\31nß8©<\15[ä|\11¤\17m'{†\"ª‡æÖSpÆ\11¬ª#¸=©¼lh{&»\1¡-ï\26m\18%?¦ÌÔ^ÄÌ\27Z7T\\\26…­,N<\7Ï5R\21’nÉ\12w\23§“Î\17Ò¾Äu\23`\12 \12µ®\1m:š¶\17\1ÅCÇ\27S¾?€F’Œ³\6(\1\\°íogùuûÎ\22A„—7lhÜPÚâI(ÅaäÁÕ`‘hû(¯\127}ÿW\ +*Ÿ:¾\5Shu[\31ÛýŒ%uOT‰×ø\2¯eËÇí\7ñ\14\30DN¢OÐíuÙñRÇŒ\11´¦ì@º3«4sÇ\1EÕn,¸tò¸À‹ø.A+­:\\í¨w»ÆQÅ\ +ÚO} ¬l``r5Ð\8¥ãºo3üÝz—XÏ\1¾Ï%7xDz%ž\17íã->í[´jiÖ‡›Å—‹ãÉñAy\30TtƒË}ŽO9\15®\0093&\31\16E&eñ,\4fWùy¥‚ÒÍ–ÛægÑÂÞm)éÂ\9[øc«r\31þh<‚\28w|X¢é(r£¨\22\31Q->J€-ˆºI\23ºI\23Z8\15$¯d™qH³€\20“NêK'õ¥Ã$&‰\ +hU¾(W;)4†ÜÝ.$ÅÕ•…ú}wÝû\11\\C’_SD°Ö\22§•\27ÆK|FSüTN¸\0295Á—Åò×¾\11àòúJ¦7S¿þ\127ñúÿ‘q‡q\22ÔýN\22\0150©k¸7«²^ÁªŒr—¦ñ\7fuV÷0—êŒp\1\13÷âlo®‘á±Ù\12ìO¦N¬¿¹ã¸Ï\7S5½»Nª÷\19ßõñî[\13¹o_\31Ûö蹟ç\0%îûBžÎXQ.ºª7?×Ö€(gº©Ve\3•Z\7l“R.Y“ï­¸õÒ#ûýYÜKK2ñûõ.‘Þß³Wå\6ÊÖEg’\7SÜI½{\11ÍðÍ7k€(ÛþE“âB²â^¸\7·€t‹ËfĽXy¸„ªp\9Uá²£\16\4U?XÁoÅ|øI\30Å]<Šó\25ÇØhÅÝ19…vMAUàKt\23u…\23u…\23L‚0X=¡\15\26& nq¡?›(‹ç\8û–)nM\6«\"ìK\4ÜÄøí,¸kÓIÁ\6ʽ†UqX°\17{“‹:Ž‹­——PW/²&é`Ñ\7ãÁ»G4›ÞL\25®Ó\4ñê\3×ó\19±§¼à|0PsAÆŽvF‚5€ƒúA”-㢥¥\4}2ãKùRÓ¹X\18_,‰/6KêhaÑ(”\15å\\ÖÇ\11§þ@\23\21Ä\"îªïB™wÁº\16Ò\18\5Iqç3„\0098ˆ»Ø q±Aâ\18–Œ‹…ÓÅÂéÂ98¼\7Öoˆ^DŸ)u\6«÷>âQà¢\0i÷Cœæ%&‡Oƒ%ÛàÊÏ#z#²Ú\23X“éÃA´ñ{+KG¾\ +„!n\8\0178hÒÖ` 8(€‡71®Ô»aça'W\7°“ƒ_½5º!FSÃ.†¥:÷U7J;Ò)­?ÉJ\"\12›v6ühgæ]\13?f‹íŒÒÀ·\13=ô\14¯ñîÛ qÐ\6¸Ê\22|S\9¿èm_üuúø:TŽ†Þ¯nÝh\8³ÿ`KˆN\18\21C\22ýÁZÏ\16ÊÎ`ÛlMËü\18i±Å\15n{ƒMÕ\3—–UZ«ƒ35\15~«ÙC/m\1\26¬>Xc°\22¿úÌ\18\26B{\25¸\31'\31‰}7+\8cß`/†IO\25Bß\24Ô†\6¹¡UQ3\29YùxV#ßM\13kÀÖå$zŠ”Š!šÙ mb?\\õð«“¿fÕÀ«Ô°ÁZÃÀeTÊ\28ŽôfÈ—_[\13t°Ò0Xi\24ÜP‡h¨ƒ\27fÍé\13þ7µ# Ö:zØ=jœ­cŸ±ªUÈãi M’Ž¶Œ1žÞN!d’Nj”Ý\9H¦†1Ì\20cLOŽ1í\0aÙ\6ˆ8êâÆÍn9ÚnÙÿ…ÓO”í!Ä·\8XæUìe{\11–ðH7aCc º,\27S3%¢šƒ³ù\26¿9Î\15\19ˆAÒȳ·s äì\20\25¾F\31d\17÷¼ïâ!쨕¸¼åê7çb\3p¸Ì@\0ëÊ\28ÈÍNÄUm\31c×\0181óK€!Bsú‰‡J„cï°Ãˆ™å¸¡í–2mˆ\19¼¹&¸â(ŸÇ¦ßÿ¢‘g\1Ì\15†kØdI\25\127æeÇMîÚª3к!+iãO¿0þ5Ö\30£7Ð9m[ŽÜïŒ\18ø£\4>OjÃG;7\23¶\16Ͼáø´Ã,ÆIקÖTÉxÔt¥˜Àm\22ç¼1˽„¼OxÓmª‰\26?ól7·­·\18\12\8ñgô<º\19\26Ý\9›4Ô¶\8xT]\20\6™§0\30pÌÙ„ãPÅ›K ¶yÝ£y\23áìpvŠ#ÅÍ\26HÃ0AꂵƒŠ›®NÇö>ÃqƒYwÅÐNL\14ÅÆŸ–®ƒÖÄj÷\17ÈI\26\30¥6è{ª@µ“Ò\20o\0o:X\22›~\18˜‡'ƒ`\0203®ƒi¢¡pä©ÕüNØÕãé\7Ò\20;Î\21œ7”6”·ˆ²\2W&·¨4*\27bÙÀM~0k¦Ñ•,·¦mVq”ó¼\16‹'µ1öD=?Îh\11”\"0\7X#Le3³\26\24ôÅhV\29‡S}Ķ„Ý{ÀÁ^ŒÅÕxŽW\14Á6ÿ¦lzW\14¹‡¬Á»¶+\17ÄÚj³©ö\30Kðì³NÖƒF\15ÅGºÝ‘¡qÅÈeò¢q­ˆ«ÚͪÐU/QdC\30éš>l(\5j\29+dÖ•µ×f–1Ì,ãN*\18…Ã5dÛç%\0óceÉG€\25\12\1¢\15”\30%”7ä\27©RV¥F«R£},F\15yF\15yFkV£Í18\11Œ\14(#ì2{sôMï:\8Na‹9ÒÔuf\24.85ì\18 ‚~º™{ŒyÇ{ØMÇ»Mšãç\26\127p\6fÍÏ÷yKÉZ\8Œ.\2êdï;ççÚEøWÄTýÙO1\31=y47y]*Ñi\18í:G‘+Åäáñ´ÍXOž.™8]Riã¹/2J<®ß\30'݆èÈ‚\28‡ÅQ\31Nt\6uôçµ=8ó#\8\\fƒ‡\31Ã\30³R¾€¬)8ÄMò:Ù+!…WB²-#Ù\28ë©7c\12ÆÕQSÜ¢\26-§òdS\ +hDü­ë‡«Æ«Œ:i›¶\1\26ž6ä*gõ´™¼ÓO©oþ\8ÉÛˆ:8o©¨MÑÅ&o´\19ЭÀ~\0058l­ÙâISN?†\16¸†rìØ\9§Íå&Ù\9\6°‹‘³ÏJ\19r\29ìä©‘º¨\\ÝOµ¡O\4¹ÝsgN_\2Ü\4ô¶>«ŒHƒ-°®NR\3ìÔÝ®û­\20–ú$K=\25d„µÔsŲ\1\0271ʺ\15©¦Êl¥\0004EjT\23RŸ%FÕ¿'wïɽ;#*óý¾Ä3l¸ Ò;ö!äú0eÔ\27\31\22?Ô\5’§Þ’û}\29^V)fv$\5†Ö’kÍŸ@i\12ãÕ\12i&:ÎKT9àÁ^¢®Â³Ô_œñÕç@¥‰ÀA)Ê€\11ŠG>'N+§™/>E\\½]#’g\25\16¢VÂð\9>Ü©H\17H\\¶gF\ +\16òý#šÊGqÜ›†lÉcò\20Ú\0009–Í‹\18^TVI\17íI(Ô›Ó+þªÉ\18\2fRsƒ‰Ûˆâ\18s€*\2¿^‰9ƒTbé@Ú—F\31­„)*\21nI\27¼|ø½¡šQÑ^ŽB£„m‘p.¡u'¬i•±\19îÿ''`Á[ìNCÖ–\13Õ&lÞ4\4‹%š6›DÒqæ\14ü\ +tu»ºZŽIùI¡ë$Û\"’-\15É–‡\20–\7ž*Væ`9Ln\30)ž“b†&iE¹¾‰,\ +ØÒþäžá¾ÍÉ\0rˆ\13@õ…§€qdÂ\3Ä\7ƒ\9H[\17Rã¸ë\\Y¡\28¡ù‡U\12¬®øT1:W…–ì\3À,î\127\13qã\0041DZоÇì„ÐyCÃOpú+­ù‡\127àÏÓ\"¡¼Ü½\29{„kü¯Äü\18ZÑÅ\3¼T€mwo¹ ‚‡‰‰å±\29cM\14Š|µ.äVs\13@Î…\29óxRÖiCª@wk\24Ûá`\27Ü^ýuüaª3Ô¹`O\27Ì?)Øèo¬¦sÇ\4ÂNŠNër¿x\15‡;D‰\27cF˜¡–UaMJ÷¿N'UœÞo\20£ÌŠ†ÈBoÅùÞç¦\4ò·óH\16±4Bª¬âÚF\7™w\7ÏS¼¿ç\0|\14\24Ñ\28uÐ\13ù\30\19´ÌþÇ\26ÛYM‡žH\25ÍvD\23Ôø3{.]†Ä$=3Î>aŠ\3íT\4Tg2§Ø\29k¼,%n¿\18Ì‘æì4Ëp–\"\23\18èæ°*\\›Gù&oé§\29ÍJ\4Ö;É’JW\14s˜„ÜÙÃ\0213ªíºdfÿò¯HLê·%ÜrtÅà\0214*›\23S\6á=r\31ÎÔ\29—\17ž¢ˆêd*å¤âáža‰¹ñò¦\16e¶'·zøEÿv¹ï\17;Ó>òП\2Ä âý8Þ‰#¯!òçƺd%›Ý:–¼§Ïï\3¹Y_œ\23­8–˜ýÊ|Ÿ|ø_\22­-á±ôñi?\6š>-Çí—ïµþý­~¿¬ÔÜñ\23ôfà1\31²ӎÎ;Úç\11ßû4ðû9\13”Ì ÝYý3Ü°3s„›v–{¹Ÿ˜øH6—­\2”½UI\15wD?{”ÕÀýÑ\127îŽþÓÇç\21\6ê˨v’”9Ì€\14¢4å./›¬šv;\15ÓóLó´+aÓ<É“iEì)V‘“\17M±‰9YûšŒÚŒ»icêLI){Š¥à„\3HÊ~o·T²¦Ø\6õ5D\6ï~Í\24\25¥\"\15èbŸ|\ +`\21î)ÌN\0è\31O±\30›,ƒœ|ªcŠSÐSœúšb­>…ª\15\12®(¨\22_S\28û˜â@T\25\26Ïñ\"/Ì&ß™iÞõük·x^_w®Êª€þé­EC\12!bŽ\14'©è\20VØ\9³6\2Å|¨å`1uÞ\17›0@¼nZ\13|4tÚ—TÓ¾Pšbu4…îÏ\20K£i_\25M»]\21Ë’$šÇgŽ^ÊúDBJ©QÁ5ŽÖPè5õî-¨P4\13\21çëÓ@s:;êw4\25Qq,u\5“õŽ&«\29M1#™lŽ[ÈÅ8D\31:¨ä‡èL´MÛ€Qß\0067\18\27;š|à•@mÚH\19§õêW\30ÑÁ<<\22O0ølP\7¸\7h\2ðC÷Iÿ$óYÓó¬÷¤ù¾L\13\26SÊ›\8vNÄåž9#=\2|\25ÌÁø6 îPù(Êô'^YщC‘Æ´Ï~§PÅÆ…aµéï‡ÛüâîdqgºÄ¿YXìÓâŽy‰vºÄà´ì³”ÅÿÌ–ì\8zƒhçe\30V\7P'áSn“\5úû\0224Aô®?Qg~\\3\127ÔKúJ1Vÿ\\Q¤I†UÕISŽÍbJ\0õ\0ny\27/\\&:݃Ã9\22ãY\0à\27ÈÄt®¶˜Xm1oÚbÞ´íÓ¥-Np\3\14=9!•‰¯'s\24v0%ë+'|qçX ÷@’\26Óö½Rµ\\{÷ÏõÍIjJî@>õïëÄ\"€òáѽÔæ3®d\18\ +™!½µ'(è¬h‘H°\26h2\25‹Ú°É/¿þ\26\9Œ~{ò\24¼ù\2ÇÀS\4œ\3DúÿD¾?î;’9\26_âõ„}Úñè¢ùx¾$6I\5ç\29ºjmµs&¥\27ßç%Nï\ +Uûƒêý{¤§€0’¼óZ¯fGãMþ¬\24ø6Y\2AGá\31/¹Zé0t$’wŒÇ§y´ÙöQaóp€>Ã]ž®ãò/î×wuí¾y‡Qu¾ÖøY\18m\19øoè|wù£4Y[z#Õ\18ï¾N›UEy?–¨¿2,†IiÿºÃþÉt\29\4™½…7ï«¡Ù›k³\21;çSH\21u‡ÕËŽ\6…¢TƒwMñ¬›n²×䤩F\4†\14;úþ(£¼ó¸p™½8™O¶è;û\24çì\5Çì]Ÿ9\14sΞ\"„j\12ÜÔî³$F;„Ƶ\3ø<Áü×nÏ®Pó\20æØ\127OLÓ¬\16ð\28Œ½{3?€Îû>‹/XÒK¹\16˜5ýçÝJÜÑo¥ÿümÜ»\8\17QHˆæÐç\15ñÐleÃÙí}\14‹GsÊõÑŒZÿÓ‹€YÍz\14åÂ2™æ6Ñì)?<8§%àŸçÝ?쳟\23\2\5w\ +”\4\26™¤\16Ú=‡=‚´)æè=À\27]N\31{áÇb`Þ\15kÎqþ’÷ö\28,1šß¤†1kî>kÒ>\31oõ‹)e;óýË\\\0319ó˜}0`ÖÔ~ö~Â\28\26~sô>³;y×ÑŸCG\127vW\19JRq\9ŽÐ\22\13Ffwãþ\27~÷§_æéµîµa.›¤_٘ȿ-#ÀAàª\31\5UnÇ\29øSBz\14)ÝЪI„\24Ý’»#‚áÂIR5§ÁUG“åYså9ìú\18L\6’ºÌ»©_ dZG Cd\"oÑ †#ë\26l36j»žúÎ!מ}lnv¿\25×Å\8uÁêM£ýiþ:ÇôuŽéë,Aõ,M½YSTÝÎ\2§ÿÙd—ú©IƤ\24\19%v3ç{\\Ä'\26n_磳e‰9Ì4ÎV(Ô}!çvórìh†dö\1¼Ùs½y9j\30KŽÊÞçîæ˜Ò\9wts¨½]|ïýÓ÷³\127úÞ\27‹GÙs½Ù&GȈ2üq\25b>ç“è\6<¬s@¿´Hû•w¨€HFºHC`9JhU¨Âr`\\Ô\14\22+®ê$fÌKÕüu}‡è»hk·þ½ïî\16ÝFÝÞAe\21HWr\8ÔÁp¤9\"Í\17iµv\15fñ«ÏÕáÖ\11Žv«'«gš«%v«%dëI‡:@¹žYq§SX\20\13\28·TpØãu\21)À›‚Æ–ûºï³¯»4ib;dÉ”gM­Œf®‡0›¹†¥Öãúì0ק¤d}JJVÛ!YcÇt®nýÎ5„\18k\8%ÖØç\\wy\3 ê\26x4ü¹º\31\\=áZÝå­ÖC^ã\4Òê\19E¸jc5\24MY3Vô\8|ÍêªÕs¥Õ\27U«çI«\26÷zð›õ‡üƒ@'¡·¸[`‹ý\12‚=Ð5¢ë°ó¶oXlÓ^Ï'ïT ñ‡Áè”$ŒÙö=‡M§c˜]Õv^PDŠ«è”ëGüTi\26n“uÏ7KÔ¶\0028\19¡s¾Éüø6Åì\22?„S¾íÕÇ¥·²D\31Táæ¨å‹;„Å­Ùé‡lv‹÷Ä4}³nôq\23²lÞäÞ,8Ý\5-Û;O­n¯¨\14Çó˯Ãùôë|B==ŸtiO©M‚e*z>©ç%F>\5¸\24`˜&âNk\9Maˆ9—2%3¼ÂOµÒ\28¬\18\0)…\16ïƒÉžYc\24\28\21†a>\20¤n\20RVy\1‡öÄ 8¿¥D𻈖VY+\127 \16^<\2Š?ìïD¦\ +™Â=3G\21¯$\2H\ +À1\5`¸–§ŠfqÎܹ¯LkÓ»icÊ\28T‹ÙßH&fÓ\0051§ÔÈ*…üÖò©Üc³ú\12\3#½ˆ²ŒÓç\19zÝ\18ä®ìß-û\9ˆ;uÏåÏ•UŽX.»Šá^*ßùD™\9\20§KPˆLÎTV/¿òxæ\13òø\0132£8\25•ÿ†°üµÊ¥Ñp\25¨fH|¸\14¼±OE¾C•¥\ +¾\0073Ëâ\\˜C\25È\19Uù6ï¿tgT/P\22\\ÿcõiãÀ¶S¡-Í3‰ö¢\ +)ëGS°Pn-›_ÑêJ'1:Ó)¢bŠ@XG²L€Y\0iÍ\127Ñ–\6Ë+¹F”\18.NÓµ\\lIVz,\0Ç’A/¯ªr}Yl‚Ý”á‚ô fØ[¦%?¼±o˜k\30Ž\20…XÛ ;Nm¯1K±Ê¹°\18öPk=Ÿ†cÃ'‚¸jp?\13\4\23-³m¿˜–\20èM#\0¢e\\~S\5h\3ô\6¸íX‰|\11\\ñ¯©\14)·Šp¢½od\16ÃRÉÅÔòç—a»#Ám¯ègÆ$îÈÏÐÎ\27\25óÉT•†Jh¬}óQõ¨¬À’\8ßxô¿:.Èóâ¦E»‰g\\\ +X:DDÃeMø*Ý°6MB¬VºÍË\28tIôù²\7Ú™î¨É\6…ãKØJVC«ä,kÍm€l€•›S¤Ñó³mç\2Üu#ŸXP71ú²/\22LÊÔ]yi†é@7\12še‘p4P’0€g\6Fx¢º2\7cì9Œ·1ô±žôE-ŒÃ\24\13\1Ö_,§N‘ü\21­‹¥}?9]œ]Ü\17ú9›#‚NÊ™GˆO,âžë!€÷Ó±\22<Ž~›nO>Ç*íìÓ8;K™ÈþÜÌ®žà8G\24,¦Î\2×\0Ù)¡ï؃ñ¿dŽ\23ç8(`”\28Þ\20vå\29­v˜(;ie;˜Ë.—þí\28*·ýï\23f\18J‰¢o‘\24®C6º9ê²\127ëÒ\4+ë0Y\30ILá7›¡Ÿ”]µ2¦\15\2úMS¬CéÐgáHñ9vDN}€Ý/šˆ÷\31\23;œÆV‡o=\26 \11\23ø\ + L<+ò\28uE’v}ñìÊK!›h‰§‘œ´ªää\8ç«\19Eý˜®¢8åL0Ú\1\9}Šœ%´\27«âk6ãË\20ϾKñìë\19Ïqy¢<Ž\17e†OUJûŒ…¬®;lPa\8\19l¦”`ØC\1Éw‘^¤¬:ϼF\16c9z\"v¥ˆ6;™\127ô²\15FÿýQ\22H¢Éôfz7íMgÑc«”\11guJú0hBÉ»\0˜¢=¯\20T¨ˆUÖ½g^1x>ë4\5ïkD7x.OL‚|ŸàÙ—\6ž}U èŠp4̪{\30E¹Qqö•gÞ\24xæ…àA3\27\23@¶\28\14x_ 23ð\11†HãÁ\2Ö\8xäe\127\29þ.¶EÁçÕ\5ò˜Àøóo®M7ò¹1Z\24#_>2VY^œyý\31žeŠSÈìô\23}Úâ_Ž\9áyÑ»8ë¡\31rþû›ÇVôî2¹\23Úós5u®\1276f\ +z\15çóqEK‚\1‘_çê¤\11ï ¨Y\28‡êH2ŠL$\16\28Ã…œV<×t®4dVœ›ó6¨2\26–¿–pêóœ8;I¸{\7φϖϞÏÌçÄ'\20\27\16\15cq¢Õ\\9\27\17E+¿&aœ\18™DJö\19d?x2IL+LÙ5\19”ê&d‘KÁ\21›\17¯”:§Äœ%%Ê)vâ\20;±:r\18ŽIÙ>\27Oœe£Q¾c>XÖÂï\8\5)žLæS!î´ôó¿T¤#K°J™\27Ufœ™F\17`dGýó¡X\1øgÕ¿L\9‰Âaˆ\21Èò™¦`Ì\6,þ|B)¿jìOÙ]:4S\18æ…ò/f] àwÄoÏ\9ª–W3\27\21\15¯÷i\3t\0048JQ¡[\17óÏ¿}$Ô1mÀ.â¸\4|Þ/öÀ(\30ŒTŸ©6ã7ó\22G~Â\7¿•çÄ\28\30[!\12„½azMÛ@Àe3¬ìé\29®1Õ‡ó¡\14R†í\26{ô\11þü;½\24p~`TίèjÒ_GX˜êr6U\9,þ»‹\127\13E\24%\22E\24æ8Ÿ@Q0\0052\2ÎðpB\14\21<Öž\9BÿRop£\24\9«KéöZ‘ÎÁÔBËš\2™œ0¯1ðب.œ9‰¨RN¼™\20€kmþ˜4\7‚&82³¡ÈK\23;«™\0161Ù9]£ÅJw µçÀ0­X\18†¢€hæ\7ÌY\25š£/™ÙA'\14'‰\3G¢‘3.¸Ûü¯ŽëàòXÍÙ2\0227\\Ôñ`($-¥+ý& ®«@y\27d.\21ƒ\22z\19¹‹´\0145ˆr¡WØÙq~DWúâÞ¼3T/0÷ò]Æ\27}tŸ†\17§ÿB㎔?u²Uñ¿yØá1’IU\0”\6­66;'ÿŠk“ã]4ÍAX;\5NÃ5\25b\8¿\3Ý4éW¤¡oiÑ\17\23ÂŽ\24ìÅùs9ÂÂëEè\21·Ñ‹W\7à\\U÷\4ž}\31—/læúåCë\23‚S\27\12—Ö1 zuæÒ\5^ç\29Ýv´\26q}‚¡áì\4¡*š\2c¬áL\127wÝþv4\127;ú¿\29yOÍ\25eOgÖŸ\127ãs8‹-¹€uÎ-x;ªõûr\29Ÿôù‹yî\29]å•\15¬›™‚#[ÜÜ'òŠÿ5Žbê\127P™Óˆ‹1²ø“ç2\21bzH®\0164<%;»¶ÎZ&âøñÙ‡õ·ÖÔïh²ŸÞ»Ö/Ž£ÂÚØÀÞÒ]Cû\27Š®ø¼iÆõ†3<ç7Í»p‘\"Ÿ5Ÿ%Wö\0WyC$Ðp™\15¤Ë,Jƒ.kÎÌÊŠ9ñy%\7\9לcÕPí³\12]µ&ˆ5ôÞKØŠcj-íwPŒd ,_™#-n4²šSªšó'\24!Û\20â\ +ýèãY·©‘sĽn¸«q–Q²V\0\18¶B!3©%\29¬}ã\14Üüâ\18¯”RÝò26R,\7jÙ}/¯i[u`\5°W+”ÉPÀP\31;.yu¢šW”¿xCùù—ÅÆ•Ü÷pÏ/dhWV«\16”5Y\31ìø\11‹\3HÝ¿öôà›\14˜k”E=?qP”áÌç…O¾hHòþ yãó*^Ã'\11fè”›éc,®5ðjÏ®¼lTÑY\1!Ø0˜\13pܪ”F¦Î(ÀP©w€e7’9ÜØf;×\19\6\25Æ\24aâ¿Ü®¼\8›fg\0229\29èwk«Fžk\31\"\5\7gHE³)~ÖÌ\9–BRzTû0§\0$!4\16×3\14úÓZç3ÉFû­µæ©P†f&W^QÙ½!»¥Gz\19™HÐ\14\14í\29ª\"%HŸX¬h\15åÁ^\13i\12–r¶\3›\29-ÔC±ÄÀŸj9%-„ˬv9â›\14ç’ÍáxB¼\1F—ñ¬øL|¾ñyãóÎgÇgæsæsÅóÂ\18-”ñ \8\7yeL,>\11Å'ÅUÖ\9\5ce9дd\19`\22Ð~Ø`c\20d%\17N¶‹ÇÃ\3Ó\0éo!\13ŒÇCbQª\4\6CiN\22>ÄlM€Ö@+a\"À\20ôh\14ÿ¯‚a\2\"Þ#Xš\21J\29\19\27i÷Ò\21\14®j”œTÜ!âÝK(§v€ÅûBµ¸'åÏ\18ú92™\21\127yèO\7Ùo\18u”¾\15T\2S\2\15EP1ÌX‹\14s\25}ÄüVªª~3‹á¸°J.Ì\23ƒ\31ÞO|²\17\23ÂÞáý\2\27ôá™Ýô\13›\26ìqsVqÖ„Nç\12j9\ +÷)'×è\3ÊHlæ…ÚÀg®Q\30¬ö¼\30äjŠ!ñ‰-é…\11‰\7Ú\28\9Åë\15öY¸^bþ\7úgL½\31œÄ?4\0‘*5I_\4z\1N>¤%y–ö\13¦Â¬CÍd¥«i븵ãrõõ’6ÓàF\22\18yL\6_ô˜ƒ¯\\sf\"ežƒ6ݾôa_X*\"?_XO—çsç\13^\31Y~7­W¿Úú*Úà£\11á>e\1Z2~µs\4ä\8Ã4¾E5\13g4Èk\ +«÷B\12WàRˆ|\18á†Ð×Q\27«¿¿°þ!˜[Ó‰þ+†@¼\5úD\\­\16p\4'ât˨\13Ô\5èw¿\0281?\"\2ÅQæMBu¤Àž…œ18Ç\8³f\26Þ8\2Eœá(\15,=ÈQ¶sN½}¦Èôâ\28|G\6~ö¯ d\7éB`4r¥2ò÷$<³ž¨š£æa4\16\8Qûp\23¯Ôý¬©ÕQˆ²úL)wYð@½ó\12\3f=ÖC\2“öa0gFíÊ\127m\\åج*ePz–\\s3\9\4¹\3M\16¢\20ß+ý@BM\16Ò7ÓŽ”\11Ë’q®\1QÇ;Ì‹\0098íù3T¤hdúŒ£«xϤ ÑÞ3-öâ9à‰ínovC\16%ǧHyG\28\7d¶§íˆ©V!\13\29ŒUfÝp0å\30€7ÊuJôl!\19\\µÈ Âùoù©‹\6ož68/š€/¼Ú\26´fÕ^îwŒ#…œ‚–\2Zî¼û¶:™U›6¦éd:;)¶®BÏ\"\23‘J$‰pUXè›ÈUä&â\23ù=­ˆßÖû%ƒœ£H\22qNf‘ůú\22Åç7Ø\13ÀwÃÜ\19\8~ãÂ+0\26\3Lsqôb\"‘Ln¡p§\16~RV\30ÔV\22ÊSË\19=e!è\31\23u{‹Õ\30p°\1³Û\5²»Ý­é$ªê¼Ä6ì©Ù\29ØF\\x\"3X\31{È\15¾F\7 Ūí‡\25 ©cÕ\17pcjܾ@ªÐý\4xu>`4æÌžóûx—¨í[s\12\28\20©åæ8òM™Ð·göþ\2Z\22Q\7jq•n£2Å\"Z§5@š…\9RJŒ¾Efú\0{Tß?Ù#ÁÏQKzŸÕ\16ÐÔG\7‹\17gcw\\Æú?ÿ\29ñó@%Pü^ÒÎt\18¥¾qA¸7ˆŒÁñ\30ö§\3h\17qúhc…¬œ\12`#OÝÿ\127G\9¡\1\6»ÙÓþ§yÀJ+¶hSBè\1Þùm——_‡Ki`\23\30\20¸œ¸‡vá5\26-é\13O\26œæ\13˜ã\23FùŽË\9šœGÄyÈu¡\5ê⇛5.§+S˽Ð\23íéb“§\23ë]Ô#oôiàn¨D\ +“·—\19wÂÊœä„sd—\19u¶@\14Ø:D´¶ô2dPòNDM¬\18‡ª…J6\3¯R/\25¢ÔGð±¨\2•k¢_ù÷å­í;\28ÐÍ\"ÉÊ{¬ô¨&\23éC]¤\24wÁ0’\0030¬|JÒr¡\14äE&¸.TL\"sâû±û\11æ<ˆèe˜Ü^pŠŸ7cÛâB³6ÈÈÒ\20&\14f]¤‹ÄÜ•\127\127¸ ÷»H¿ò\"%¢‹uˆHßIÊ\16z‘BŽC—Y\28øø0Ý’0\11ÔLåu<ž^\21º4—]—æ\18ÝÅ%Ε\19Ô'Ó’þÅj,(\6\26\28e¤<Ê\9µ•Ë®¤r‘öÉÅÆoì5aayÑ4Ñœ\13£adˆ{'\23\27\8N\25e\8çÎ4›Î\17\4Mý¢Ã­\23\8\22ð,\19²\11Kô.Z>\21\4j\31òÍ¢\11\9ö\4/¾©¤\4;ó™Þȃ¤›´;'E‚ŠÄ%n\0201@\21½¼Þô,Mô\2\17Â@WiÌd~3,î\24¸\\¤ÕZ^‚‰\25œ°¥\1'\14{‚–V©Ð\27œ8ÈTHÃ|A³¾\16\8\"\17d`D(²“L\"Œ62üÈ\\\23þëe:\"Haâ\31‡k¦‹\31ÿŠ÷ÑîÉ\20õöBq\18hO\ +˜ß®¢¥„\12”RrB\31­)õF\0ë•\1\6\5€\25”RDeÍ9( Ôô\5zåM9Yfeð›¤t«\23Ý¡B‚â¸Ð\14Ý\15–Ä·ª\1ô)àüÁ—¿*p\9ËgI¼rÏcTzŠ*ñR2¬ÖK–*Ö.©N\28.%6µ\".Ðrà³Ô¦Ä5Yq`g¬PV¦Ä¾8±‚ðêšj\3óŠ\31t³H)ÐT\0\11S\9Ð\1i\29´+\0÷Â'J\0ÛÓ/\"g2±\3ÅKtÊz”ìN„­¦ÐI¯ÆVvI²Ô%Vnu\4ÔN(%ij~ŠÐ¾‰ÜN¢wu7P/˜ø\30j\21\28/Þëž_ìûçß·\29¡!\8aàE*S\0LPI\7¹×H¾£\29B68”\\\1279\\@ø\13\2¯ú’^ΉÊO@ìm©z€§¾‚ê\6\0025>{äÏ\25½UdĹpM:‡§Ïš\17bø!kPÑ•)ô\29\5_†ŽÍ\0124õ”#:ÄðG1 ³ à7dKjºs ivððÄN‹Ð°³¾\4\22½âç\24ü\31VªIƒ·Æ…˜¯IÇÛï;D•ÃˆÅš&A¿\0G6!\3\14Æå_ϨÆ:¤N>F\6€z£oÏç\0žv´\25ha•žÿü‡ö0S…é¢MìÂ^‘Ë‹7®‘Ù-áÏ¥-§*aHü`\23ÿÁ!\17{Ð(\26®¢PÆ\4,e-¬®\1Ž\14……r\1…“S\0004i…BY)Ø\28Ì„9¢ÓE\9\1¡Ãg&\0'%\19o‰.2jŽÖ\03103\23iÕN½Î\17xcØ!ò;ø\5ƒs‹>‹ôË\1\22‡ä\12àãæO¾ZÓÞT/¸¥JTÿ\18ȯ¾-\"XÏ\\dÚ\25î–YÖ®|\1¯8õi†âµØm¾ÄÞü%6äó\19΂èTH+Ó«imÚ8Jü™VEߪú“ÑG¼‡\\îØÔ\15´9C\31çÈ\5{Oñú\29\1ôãüÑþ-Ðô\23÷Õ\17 ^ñiúˆ7}\290Ç¿HCàb\13\1Ð%ü\23—‹g(Ò³¼X\27àbm€\0117ø¯\ +‘+‡Ì‘Ñ\\÷ö‚HS\28ú`‹ÜÓŽh\0308 ¨_ã\31g~Åôê°Ó¦™\0217¼/Þð¾hÃ[ô\0209XÙgâPc½sš\29õ;šv\20\127gÕäÓ[ã\23m‡Wùà7\21æ\27û\6î~_¸û}áî7ž…s¼•v_†2\\2K…Ü\11¹AhPžýU¼2)‡3‹Ë-O°WD¶\3V¼H0í\3-Er£1ø\11Gâ2\30#±[ZåØ\24÷\3Ïšœú,ÂIì­¦.\2)f\"…ýÍ\8Ÿx–¸\13Ø\19àÌ/‹À¶e\9ðàó\11Oô.7ÎÉn‡éÂç\8~i 7žPÀé%åC‰ÿ0Ö\127åYŠ¦>žN|bô¬±â£ë,æy\16É$\23>‘£\11Õ\5\24íƒÑÞð<¾•ª\7æ›ÂcÇ\7ôÆgÝ(Â'ÉO\28P\17DK{{1z¶ÊHËì\29aDKÎ\15Qå¯ã\19¿¤\16±\28\29ý^!(#dhT†ôRNôH8ö¼\12†`<øy³âÍŠ0;üìà³BCÄ\15º(0zX\31å@~+Ÿ¥ÒÔ\\wõ\2=Gó2gÿŸÖ6R¥¸€\12v«Y\0220t\6ü¦¤Y\9ŒÉ“{=½\4ÀKÊèƒâ¤zê¥æ\1 KÝ4èõë†fpD{Ñ\3>\4'°8C«›¬\15u·|¼P_â\2ý\8ºº‘.Ü\127)J›B—ºw\21¡R'Z\0056RŽ\\š$Sf§G¿Yã˜ÒE”;lDTÑ%\26ÌÁö‘¼øK{’ž›;¤Ø\24+`áÔ^Š\22|\21K|¨ø|ã\19%;\28®™?s`\27àK^‡Ö¥=ô=¹#B\30™ÂïÇIDEòûq\17©T™~?’ÈÕ¡>E\27\21\13@g\16\0094ŽÁE\24\25C\4\25&ƒEÔ\1º“Sï\"^5ð÷cðk\31¦_¢ª÷؈\17\"â„—óâwÌÎ\21FòcyoNp\14Žjs¡?¢\27Ì?^j\30b¾X—ä²ë’\16\13ìýkþÈã¤î›7\8ª\26OÈX™2žøƒ 2\4¥\25óK=kbPϺÛÄà\20€ß>ëŽR\2(-#¶z\26MpëÃŒnaéjTIY±!=Ô\3ù¥>m\7²ü\5\30µBkäÂá•Z#$œŠ}B\22\1274 TãSWX\7ë# cwƒ_¢œ\18~âNÍ\11¯¹úŸéƒtµ{MvêXWZ)åҜ؊ÑfÚ»(dJ…™Q(H+µÅÇ/åÎ=\1§rMY¹”+=‹H#R‚4˜Lècx:“2}Á\28<­»\27ì\3â™ñ¬ãEŸ\27œ\13½([ ÎÌ…g9ÑŸƒŽIŒL¹DiÝy1å(UÐ\4A\11‚(*NÞãs¾Ê\8xÆ\13—{Ô¶!AA7\24ú\24øGß\14-_¹ƒ¢57´\18T\18YKfZì[]Ê<ë|(®²V\28JmÀq±\6dh!ÓéKŽJ6©æQž\28õ\7¨Ó”gZðä@¦»!@’\28XY\12:n\9¹\ +u2\8p˜®ø´ª¼`Á¦¨–­R @˜¤õ™oh)î\25¤â&\15I~\8û*À5À\" å+4,*ÓòÕCwF%\26:\30W$…,™\0SVÐÁ\0308½*ðÅüuü6­\\õ;‡ñø\6Aóe8ŒïdsB…W×rB~0°‚ÒéOÏš`Â@-œ\19»ü\22jÑ\1ò–\ +<·Iþ\27\127ÂÂ@KÏçŠçZ£µ>~ñBÝ^\19M™u\16ÀÂáQ\26\19æNh‡±QK!×*²‘Ìg\18ªª\8 ¶ÃãMäfv-g#Ò‰ô\"Yd\18™#¡\31‚\21Së¯ãÆ)\28ÊŒ»Å\27šå™õL|âí#?€\27™\23K—q«\5O”Ç\25?\",Æ\27±&…™x\"\8Õ'HÍ›»ÁȲŸ0Ábå$š\5ΊÔ^MËgÒ¸D²»S\26\3Ö[²\6ÏC¼3Ù#ÖwpÍÉ\20\127\2>œË\22†\6À;\19\0060ø\2VŸé”¯\"¹›(úé$-Œ\12¸ž¸ÿ}‘1÷Ã…ƒ\29Œ©Ÿ™áßÿ{\19¹þºÈ>û©\9´0I݉-D1¶\14\13q•@0+½ì\18òñK‡_\26ó&­\21d¯=X\15¥1ÛÀ¢\28ÎÐì\23P\28](×Oˆ¿\14H\28ò>ÚW¿È¢zù2~\16ì­‚— iVœG×…Còt± ˆ\5a£ãM\1¯\\aMéª\127má\0298\20Î\21¢=€)IÖF4ŸLÃËùKí»\18¡\28[Éñ\4P\9Ó™,ÈXÏ\11¯ŽÌc\5'(õÛ\17¦G\22)Gãç$™å#8øTOqd• \0150‚NA·Ža©D\8†Ä>´\1\127 ¶\29uîÊ?\127\5y1=›\6ÿfZ›ö¦ú#\0317•[\1Ó/\31\27S­¦àC\28tÔØÇ.=}úÔœw”v\20Y*°ÙQÞÑ´£ù\25åGpÔ/ÑÕ×:ÈVù­Xã7ª6\31™?ˆà\28à\22 \7X\0136%ÀÕ\16\13®œ\28)RÑR}òÊ{âY¢+jÕ\27+æ••ôÊÚY\ +\17ÖY°pêJÿƒ2,ÐNßPwÃU”[H“‡1p jAN_™Ž¦Ìr=ôŒQ»4êu§\15þr•ÑgS«8>ó9ÀmUó¬ãÅvß\25xK¦­‚l½Ý³Ý蛧\6:ÿ P}¸ÐHü…¶áKò\20w—×ëk0I\0\24Îxç Ëµánš\23ÑÕ\21š“¢\28ÿ§\24ÿ\9\30/;jvÔí¨ßQÞѼ£UùhkqZ}²\7þ\9\23“â¿QMòÄ?5Œ\23EQ·;dÅ̪U\26'Þ_-:³°\31,ŸG¥¾öQú“É€²-–2—)\7™uÀ³áH7I\5\ +\12héÒ'«}~eý\14)=\1¸.~mØ–*I•J…\13›\127_D0\0•\14ëãÈj΃¶ »\2C¸(€\21Œ\20øóJ7ñ\0zk\23š„¿Ð\"<6Šø±E{‘\25ø\11\20)ðûu\25Ó\15ã£Ñ\\&Þ't™pp·0gXÕ½L\28[ ï\2BIÓ´Üر/wþû\3níg¯á\2Ê줚»\0Ìn\0184Ÿ~±õôƒ6®ÊƒÇ/\1f\22Ž;‡ŸJÄ-¯\0õ׿\127°¼B¡\1\5þ³Úgcáþ÷ëòÿd\8\29û_˜«ÆÅStC\0046mÚÝ“aû‹MŠ‹Îæo¤Ç\11IÊâ~ˆùFR‹÷)žÒ\27ÎLfhz9[9íÊzÇCNÌœ81¼@C‰x¹*Ä·’þQ¸ÿô†ò³µ‹@u¥‹Ô•.ÔVºPY‰,\8ÈÊÀµ`\27\21\4\2ŒB\27*ó\22Ðb\3¾Ð\16Æ.ì¨\0144$JÊ\31xÑÐÃ…¢v)°\\d\18õ\"ë¦\23ë©\\t\9ÑáB\9.õS.ÒOá>#͈^l<ôâß\7Ë¡\ +Æ•\15©bÏbÏŽ¤Ý‡\31­%¥ŸrAq`×\4^ë‰\ +)¤0•tYÓ\14Ð./¸¬è‚Á¡Tú,‚´Ao\"H/’Ef\17HO7–\3ûXn§‚ƒzPžÌn59\16zrⶱrknŒ<–çBÆ7Ÿ?dC~¹•\18ªÊúµô\21­PU'Ø€ý…\19Iç\22L\ + H70zÄt`Á\"vãû\0200¿Ð÷íHF]‰O\13z¡&\7«\11À[—\25¼ðý[€k€{€y·\24±€Z¨`àÝ°xÈ¿rç×½¶rµÈA‹›\28ùqC7(8Ö\0Š#÷ˆ>êû\22geq~qó†\1ßF\11±úhØ•c‘ࢳJÊMxew\17yÓ\31¦\9ªŠ7(Óù~ºÚã­\4\7ò+)<‘¬òÅåŠ\6øžB\26þ\12¬\4Ϧ•éîq3­M\27ÓÎt6Õ;F”-\7ir\31zá8’Ë&K}«Ò²GDÌ',ýJм·ð̦_Q!k¶×6\8ðn‹@ø¯™ÛÇ\2¯7ÑIÿŒ¦©À¨Ü”lÒ‡ \5#Máó8*ezé€FñáÍzÈÔ²¿ù\7tR»á¡sQýÉÄ\6¡£å \27|y\2\13rp}\6¤öÉ{\13\2¬\6G}ýÌÕ|éþxû\26©ZåÌuµB¨9ϧMn\ +\182û±Yf’º'Ì\17`>íH­}¦’\15^ û\18‚Ø\16­dŠ·1§\13FéøÅñGP/½¢U^(G™GAäŽo;Ô\7³]Ï5+,”\11Ôº¡xQÅ\5ÝxGi«I`ÒËêøæÏLþïÏU\1p&¦‚ÞÝÉ¥4¨“š±!\0Ù\11&èÕ.• ƒ\13\18ŒÉEÌ\31f{éŒû-\ +©$è»\ +ïÿSê}ŽŒv!Zù¢šðŠ™Æ\3­€:\16$›\2S\27°€\21åqxO|b>Xx\27ûEÎã\24vSIâf!’2NWÊ\2m”}\8Ëëq.\13Êh\0oø(©\22’„SCŽ”\12+\26ñB\22P\18¥¿V\16Z\2«lè«zZõ*°}Çq\26ó`\3±Àn\17§Ãž¸\0j\8ÆÁ¹d–ÖâugÕÁ:Þ…Ô3Ú:Xøõ;\26\29*›2»2 RÙ¶³\24s#†\ ++\31¢l2û\27™Ï:Ç«êÒ±’W;\23u\4®õFê2p¾\\¢Ø„QX\28Þ2š\29äK\20\13M·Í’Î\"P@€\8\17:™•å[tê¾Ã—¿\28ñï3þ•L`þþv,ÿQœ\2«âÒJ!ÿžÉ¥5yÚ`\24ÿƒøö\12QÖTU܆\8î¤KY\28@¥7½…\22Ç*~Ý™ê?@-^Ô¯f÷ìûÊÌQÙL*›i¶ÿloW<®¡„p\15˜As\ +Ô\6È\1œ¡mÿãX'åæÉÅ\"¤ò9’Ê7$\5è\2Dr¨$U\24D3\8¿øVZRæŸô@Áõ\23nþD¨†ÛkyFýÞÑþ¡¸u@5sæ‡ò˜\25Ñ]ÌúyˆJm\4ƒéŸÿ^v”vÔì¨ßQÞÑ´£yG¬_*N\23m¸SMï¢\5w)Ü°±Ó\5ÎB\24\24SÇÕkùÈn¨é\28é‡Q uªvûiÕn7\5öãoƒ:EB¸¼Ö´óPÂMêJÒÄX0—\15\7w¬*\28ÁBvy–\9¾Xº¤‰3zÄÄ\9y®’·Ž¡6Î(`\8\13M¤JÖ)\7¨õÇ\11u»’zw\1+›Á?ðþ§fÂÿ¨Ôþ©Y0ÿh\20þç0Èõ…ŒmL\13\0221@ðoAUü›¢o”·À \26&• †4¼\20ÆÐ*Ïh>¸\"ú€ÐAôÀ\25U\1\11\8TdªP/¯võòÊZåç@W\1èŠ8Ô\7ç…V\17¯ \17Ž\12\16½^Ũ9ó\14\21q¡Þ‘>ÎàT8í—J8ãNö_œ\25Îõ`uMÓ;(\127OB7Óû‹©2]FW\3̇|™^µ£vG} 1À\20`q\2›\25ØD\11”•áF3ýfŸé\127Ä@^˜i\127\15G\21ÞÔ׶\1ü'\0265B¡\28`Š—&ˆ\0239ÈïÙà@\19°yÂv‡\16 \26Î~q\29)Ö+\23v\31TøW³´—éà\8œ\0ð\"Á(!ò4(…F\11\18Ä\127„\23ï\15\17ï˯Ñ×QZÜ9Ô¢Ïät‚T\30í^çZW´Ö\5×>K\9¶ëÅ™fW\1hÉWÖp\23\29\24\4–Ät×\9¹C¼äë\24\17#ͨ^ei¯b¤\21»Šûäñï:Þ\21‚\9­~o\127P…ê¹VŽ$¹‰N[³ïZ\19\1Õ•Á`ªÄÇcíX£W\25Ò¯¬\31_ÙD^%\19y½\25ÙTÕS«ó\15še¡[E'K›\2“[]f™)ÐæÈ\31N\5óRY«k죖™Õ süë\28ÿ:³Ú•ñ«l1\17ÊŠJ¯’ßí³)\11“[\6®ˆ\17ðçMnu“¿\14Ú1Œ\0165‚+l!×iÿ3ÓÞ6t(ŠÈùŸÜah#‘IÒ¸7y<\16WÉ\\\30ûp™ÐûXƒ9\4K‰ÌÑ'ÌѲ!žs¿3ûÇÏžÇI5ôìŸË\3\6•\15\24Tû\1\3£nGýŽ¦ˆ0=Ó˜þŠ3ý\29ö\22aë\0\127\5샗Ÿ¼i‡nJkÚ‚\21¢\29B'¨‰\6A~zÎO¸:™7Ó[xù{Wghu®×()çdÝÓZöD\127ì\21i—zAË‚ï˜'¾±YëxE%\27ƒ\21m\12V\27>3¢|KrTFì\18€ç\29*w¨pÞŽ3]…*\4Ftžw@\8´þúôZ)šÓ¨\6’¤¤>HÞ\20\2\0117\30zÀ“¢gz¨xê¡Ò)‡Q\0003g\29r¨|ÈTïèøìå×Û¯¯í|*\27vàOè°CåÃ\14 Y\9@¿G´1u,Š£ul¡Ò±…JÇ\22*\29[¨xl¡â±…\ +j=\13þ\4\1³ÿû\127”\13ÔPÚE°‹º Z\ +a¬†:Ïp¬ê*чÇ\26*\29kèÉ a?±”ý´ É7–ÏU;K4\23ér`Uª-Ÿ°Ÿ’ºªÙÔwŒfõÑeÍ[Ä\24®\25ú›\24¨¿us\16½¦Qí\18\1‡2ªºåˆT·õ•¥ÒªhÛƒ2G9u\13«\0•­F\"åNYãሊÖ#ÉîO\\BR›©Ò–CYo¡»®K­Ö²\ +æ–ÿg\\d}úW–UZig\17›J|n5\8\28û\29)v¯—÷\30A :UÑàdÅ#\15\21mHV<ø€gÇŠ<0\16N:T>éPédC¥“\13•O6Tq ¡Š\3\13U\28d\16GióüB¥ó\ +\31\2ƒÓx˜~‰ªTu.AŽ4ÑÓðœ\2¦ö\5L\28Eëׂ†ü¢0Ø\30\19ØT]\127?ü^Õe\30Z(?`y·\19V`€IÄ!îœ.}ò²\23Œ\1Ÿžô|âÂmxà¾íðxCK×\2œJ\127êò\2æ]XôûÓ\00340”Vþéaés\31ude\19Ñ1FèpCåÃ\13¤ð½\0311#¹'Z\17©î\20ðÜaáïWÕØ*xAõ‰O,Óxt©¢¡N<\7>\23<ù]V¬ ZE¥\14@¶9+ž\5À³x´ºs\9”W W2°_Ü\15Š1\ +Åÿ*S\\\\\28Qu\18‡\22T¦¯¨Š=\6Wn\6÷¶´[úž\3F\12\29\29(\4÷q™b¦5œ¹H¢»\127\23àh:ðŠ3\4À×\13¼åC\20‡-èéípÆ\127-.˜p 7×\24\4NåKä‡DK¼\1·ŠÁ}E5\26®'\ +l\7ô²t˘cEs“|\25ŶÕ\ +ùÕ`\27\2ŒÀdqÖ‰„‘\26˜ö­\6[ð%Hvs\25ås\17ÕÐÐ,qeóg!ô\8\8‹;7˜UèK2\14>¤QK\24\26ë4\12G\9\28|²¢\26 ¡Åju™„\1äË\8EeË*\14[\16\12Áx\8ð€} TjiNV\3$m8C^\13\20²AƒÅè/æmG÷\0295;jwÔ\7*uqÇ9ï\8j~\21g°È\ +H¦ƒ)4á\16\26z׸\\ŽÕ¡ó\27¸ç7ð´\17=ù·;ý­\30&ùª¡Wʽô\"È>fƒIÔÿ¯÷¶¾Ñ}G{0ìË\12<üG\6Š½\31\\‡z­m\7\30ÜC(paA\13xÖ\27±wÕ£þý$ö{\18ñ¯a\27\27¤\5B3,\0Zç<\4\26\13øjn»\14¶·YÉÞ&û†!3«\25Wê¯FFqœ­hŠN¦ü|\26Ƥ{h”\5*ÃV2‡YÙ\28fås-Å\127ÒK n“qKüÓì*Ì\29Òòsò:ð-\11û……^èŒ\6lÚ\31¨¼\"c•5V6ÖX(N\8@¿…Kχ\4N\15j¨«\29ù\7Ï€â¶AÇý¤8áÁ‰Q\21¦\28Å G™×œM±|{”Ú”¯bpmÿ¼ÑÆ\30+\27{¬tâ¦úUùìM#V­L”!³3DK½}-\0#(ÝÜÈ|x«öŦ^¢Ã‰Døÿ¤µ}4f=Ø-1SáˆH9špÍ\13ƒlGy`s”¯ãU;ÞIƒî!î%¨Ð\25áþ´½ýeÕ†/ëü\1$”Ô—¤q_WI/¾p»¼‚?ÈfÿÏ\"CÔ\0\11I£”\27§ÜDÊܲ\23WÏ_O™Ó—Ž‘U0e#Q\16mF’8\29M`Èøj4\"ãhS%ë‘°­\"-\26\6†f\11H]¾\27¹ûó/\7£?ÿªš•Q\14ÅPÈE„ÛŒŠ¶\"™a?äÔ}:\8ö¦Ðoò¿úL!UÂ\ +ûJ..\19>\18p\14Ž\1U± ¯ôç_ªn\24PºW|F\5€1óBuw\1$®Ùj,8G•Î\8£ëV\4\20YW\11졺@¼5-àÍ\9\1274‘0n\6\17«7ëfZ¿E¼z\1279döÁ\28™ù,AF\9?Dšƒ³6Ï×~U\14Š?J/É:ÊséMs¤»Ì{v\127Žú0\28x4pN°é\8EwïD–2ãU&\4XÛ€b1ùç߉ûß…\30\6LRà1&\7\28k3P™Éñâ…&\2YÔ/Ù˜£P\23+\28´­ŠÇÚ*\26èÄ\19qyÆ­ÒF\14ÿT<ãVÉfgÅ£n•ŽºU#•AaŸs\2Y5Wɺ\0²úË\8g\21F8\1f´ÈLM\5ûP#Áˆs¶¼ŸNÂÙ*\24ªyÙÑyGiG·\0295;Ê;Â\8–㶈*Ó˜·h¸±ëœ\0074‡\30æÈ€Ñ}0(%Yc°À{\13Ê2̓z­BY¢\27ž1Ù\1`\\ˆMŠ\16}¨y¨'¥â 9Úˆ$)._¡YífD…P‹*w¹\19gÓ4'Z©BÐœhå~W5BVE+Ù\19­x\2¢Š3\3\21íŠRcŽÚ.0\12_Ð\17ém‰µ€”Sf!\24\0003\28\8¨•tà\25\1^ŽQ$üJœùä–òÇG\14—\5”žç\4ñaË`¾S¨Ò&\19\18”)ÓÊ7åT2bZɈ©¸7…ÿÁ|¾ôàËñ\2QÙ\"õÐB0n,4’\\-ì \23®ðA\14žP.8SV-\18\18à\4Hµ´¼%¹Z4GýDpèÆpŠ¼´¨)`ÖiOŠ\31¼Ðª\12\9_Þjr¿ð\4låC\9…b3}Ñâ~ñâ~¡éY’7‘V\4ï”úÛ\"½í%Tß\22ͦ\22]})z3\21\127aðI_+]I2\9'd‹Õ[\23É›—ãnÇ­’áR²ê•tP@ŒK\11‡qebÚ´\16'¢xÅðü„’|\8ßž°yÂþ\9‡'ÌO8ï\16å΃&%\27°+O>–\30T½|1åWÎ\11&\16\16k\127ã`&ÉMd²óuÂ\20á›·©\20Êù'NˆÐ`\\å³\" ˜Ë^„(Îÿ\14\13Áïc\12G€X;õEªÞGàAà³ï]\13ë\27ówÖN¥\0K[rsƒ\19Kq™M­t\30¥Â!\"Þ°CÎí$Â×\21ÊÙK¡ƒ\8ç:?¼ÏŽ µz\0247–á†îMq•¡G›ÚŽ°RBw~q¢ºBg@„i¢LW\\¸† \\¶à…˜qâ8C\1\127þ+ôÿx\11rõΖ\14Mã³H\15žî5¤»´îwÚÒ(\4»;ï\13„ÇåÙó9ƒ¿òÓ¶òo!ºÞp\2§Ô\18žG+.ô˜\27ìÁÂ\31\31¹Å-¸\24P6\11/°\01551‰Äp\31ðÂf\24\\5“«[ú|â9\16>øüÂ\19ùÝ0#.A\11=sú¸á\";’»HÏVµaÊVâ(q\20<\8ä_\27Ï^\27(;\19+íÆ\13¥òÌò¬™å\9‡¦\16ä“\\œÿ(~C#æC„ÙcǸáKáXäóÍX°SIç\2þ\7Â\0168FUmâ£CM/¿R™¦¤“®¨N4û„gI<Ñâv’1tªT¬IÇ)Ò‰çÑáZÊ“\7¥Jk>\7y|‘ ]¸~äú\15)\\\25\14k¦Bj¦S3/eÈÃs䓯¾Ÿ€›\23`\8›à(ƒ\18ˆÞ\\F\1<“X\31ŒZ†\0A°Ø€k\16ï!ò%/|ב\7§H˜Õ\22ǹ\18Î\25]ñîv\1³{á“yêô~\8cH²˜\31$5âÂ\16\12\28ŠªÔ¡ WHÏüö\ +Ý+\31=?¸tFpŒÌüÈ\4F\21?çÈÅ™Y\02402L2‰Ì$ÊSf™d¾#³,òÏžO}}f†ó\"\22¿ybú¼’\22.3•ã2/ëD•é™ÙœõÂùÂgú¥û\\¯ò*M8dŽ2餆>{~ãS¡nôC%8é\14(:G\17fqîà×\13¬‹³LуÍ)Ã2Ó\30\20TŸ\6C\31‰÷º—ùL‰ƒÙœB³\2ÍüfJà“,ylx.,Û…\21ì¸ü\3RÖÊ%­õ\29\30¥ÏH˜¹&\31’H¼¶¤t¹ÉG\"Hk‘Ad\17y'É·ä½5Ñaj€Êœ\12ht$]¿Þ\8°­êÚuÈR\\Éž|%»\2}0Pû–̨#\25TG2\28\21›\22\4Ã\24É>ì…¢Ã1\11å¸?]Lõþ²Z™\1z,Âo\8\14«\31UgÅâP\11êH6.¤àSÆwëO\26 /´ž¡@û\30~í;ë\8ô1ú\0ïNh4Eƒ•&™©RØ=Yé\11\31-}¥þ\0093 ¤\30©ªYLu¥r­9BVZ””0ØÓLR[\16E\15UÑt=sðÉ”¸™,7߇­N\16´el»Lòü\18-srdO’Ð\31\4\28[>‘–º|Lb‡«('\11•š;©r‚µMÂ\"\7Í”ËB9Ùˆ+Øë›ÁwñÓç\0111ÞÑ~\\…°T7Ül\11«LX:ˆæðXÉ(\31)€\14ü¤Ä©\4\15ò$\28¾á“\30>_\3÷;ž7úqΛÚ\23`4Bª¥ƒƒ…XⱎŠî+]\3Ÿ˜Žã @y¾vhóºT<ñ\16I\9Ü¿ðyæóÎçÌçÿ!LÏ×|ý¨h#ÓÍdea”«TÓ“\14é“üðÛfæ“—èÂÅX+SX\21\26s.\\KͧÒÁ\26&\0298¢–¥G\"¹š˜‹\3DrÿÞä}7q¨FN®\11¨õ*b߶\22QRí®\8žlŽ9Ù\0243h§¨½È¨\4GéF \31²\30j²UŤµ<‰sšíÍ\9\4wS\18õ\8E\28g­í,aßNÇ\127PÍÞR£\14€§þ\ +ìß¹)Sàñs>PLYyypzk¡\26\15-µâ~üâg\12`@5iîPí\11Å1“ÒzÞ؆\14R\ +LR\1Ôé1èÑ¥+ݧ+/‘!Éöƾ9¼Q©É¨L¯d×vÝMËË_¯´\"\0ß\22óNÒÆ´gê°Há\8-'\4\5µ7S'Õv¦~q;;FÇ$0¡\0Q‚¨Ý×èä¯4h@ÆèCõ&›\5<2\25YùÃÿ„wV°ìWMñÍPÄ\11„Žíª\19û\8;óË(ß/_~Ä-d\ +¹(ÉõÄ\18M\28¼\9G´‹û÷ÿÎ'SäH׆êm°y'Ê\">@X‹”q\14”1îŽyç·•`÷E´s¸ÞñÇH'»Œ5@3L¾\6§Wd}:5øåv¢œB_=‡Ækç\23'y0\24øB+“\26Ô¦,\2U¨3Ë›¢\7¡`\13ñ\127„\31ìÞ°;(\20ÍžAÑt\1SË\28$˜—GT\8ñDïò†ù5¦ÐÂœ¼OBjú{ëÌw˜ŸpV2´4ø-ç½W\29©s¸ö_Í^˜îÛÉŒz6çîRÆ]1òêýjtÚd`ƒV/ûóï\25\13ÎÜjGo;ªwÔé³ÿü\11µ\ +%ÈNƒÞ8ÝcÄ\5ÉÕ\7Ù„6–\12…\27%‰¾\18yW\9&¬¹É•$•)C™\4¥\12\17\19œ\9n‚F\12ÿ¹¬é\6îRh”@«—f·1ÚýK%g“b⦅Ù\31€­8$†M>–ôj\127~ù*”++ׇ\127dœ%xb4;\31,8¶æ³ï&`…ñÄ\ +ô¬Ð¼ŠU\8÷w\1\21\0222uõ9\0\5ç'Ɖ\0±®¦µ}êHº1íõê#\15q‰•M'yi¶{õÙ\0yÍ\20¬]¹á«\19¹›‚ÀRÈAS\6Gg6±G_Ê\20·Jà˜î;Ë\15´\23…â“À7\ +\16¹¼Ò\14-HU‘pÜ-~H¯VA×Wæ™\"gQeµVgL\27;p6ü$NúŠK?§îØ#k©wÕR\15D1 \14+Õ³–+Ÿ”\3õ6¸Ï–dfN¦Í/›Ù¶êÙÕ¸^©Uµ¾^Y\18õæ[7¶«Øé>X‘\31Çj°ûκA\1:\1[1€?\18ËdFDãäï/\14ÿ¯W\2”žôN:y:L¤€\5Óõ9ƒz½â\\-‚@›Â\20É]i£OÝ\22v!ŠÏÈÂÕX‹6\9…&®î¯Ú™-ì‰9›\24pb\6&–ÔÄöI!aqlÌÓ¬1vá7-ê4\23ÆYŽúËØë|ÙQ½£fGÓŽæ@ªU\11e¨W\26c\18•²\26ä÷]%þ­vþEÔ6¿•\22Ì÷•²üáÑw181\21¨Íò0ÿ\19q\\Î?ª·\\¶\\W,¡¡@µ‚&]¤W3îrsÀ{Õ;{q‰âè5۽ȽÉɵ+©¾Ÿ0³\5¿setʼn-Ž°ï\26ýH[ÓA\20ãØý\9Wûê\29©¯\127É\12@ß\6P¦\18-QÒ4@§WÃ$+\24eL©ÍQßýÎ]„+e‚Èw½Ñ{hää¬ê}PÓ~çB\18ÄßLu7¦ö¥1âÝkL\2¥Dkø\5,G:\23EX”?ü¤Ba˜#ÝNì7o˜Ø ×½\29dy·LTÊR\28ãÊÍvp\13f€\12)ÂMú\17\1:\1,ÛH\29µ¶›¢ÛÁK\30\28Ÿéù\26‚Uh\11\22磷\3®Z(YB\23Š…\5î¥8‹ÜDî\"½H\22™EP\3ë3½*Øh'­E\6\18\ +\7t“EÒy\18£Â¯©\19Óº*\26‡#.¯`x¬skl9§ºåõ@Ig/H\24¢S¼Ž³¨ºkøœøÜà\1\5½TSñ5ÕV\31\7 \24­î5?«yÝ!ý¡ï\9€B,\4\25-±9;×\5\19\9WEà92 ¥\9:6\0ÂƉ’(ç1\26ŒŽ5äråyç³ç“I®LeÕW¬ƒ\28Œ§\13–2*1Mm9`iùé….4øÿgúMŠ>Üêü…¾âô;戆ØZ3ÄÐ\20°o\28\9C„@$ópú™Ã†@Õ\7z\11p\13p\11PïIÕ{êõ\30\17¿ê÷g¦>jðîû;ïÏ—rÅ.´\4ÚÓë\2ôûËú=‘þ™\8æ˜B9À\28`%˜]œ«#­ÉN³k‘ÁÜ\7é¦@\20GøèBºKjxW\127{ww‹ëŠáhDБÜ%%½Ãt\1ž‹X\27\8Û\29o ö]\29-ÝÔ\12'Ç{\11wé_\23zf¢˜¥Ý%z…³‚3QJy?$(¢\16¼Á\15sçJn·¹{j\28’c\1(ÝX.9f¬‹îûºÈH\9<—Hwõø¤·HM»\7wuþ TY!šÕ\12\1\23QŠûA7Q˜ÁÃÇLvÿ£2¡ÀS 1piQ‚y÷¬÷\14\21‰ò¼\30õšú¨oÇ$øÎ\11—éâLë~¨\21µÖïøÔ_ød97,gLóîÇnà\15\29ÈÂ쬄z\\Dðs©Éü?¹g‘Uyúò\31þbTªã¾ÃYϳž;g=wNfîÚ˜¸\31qe\ +ÉBªA:“R\31E€[3\5%du¦\9 B\15^š\20\0154Ç;Þ`Ü»‡\1;&ùU™Ã‘œÞ\20a\15¸ð„u\18û\9Ì.§9wMoîR\28á§þ\\M³éîá2á쥑ø¨\9±Q\19òw™\25À«š\19\7\5¸©\19Ð`\3†}WÀú\9›'ìŸ0?ᬔp“Gòµ*z\23êh#™Q#Y\17ؘ[42Šù?¢|y1=™fStÉ%¯\19+\19\30€ÏÁV\22\15’®i\25\12:QîK66DGŽ&Ÿò›\28Û\2\0252«\0o\1n\1š\0]€~O`\8˜óÎ[žÉ\127‡ïªws§³‘B”h¼\23+ÑF‡CÌð§P +\14KQ\127ö\12Ùwsõ‚Bjª!\22ö…ž•òÂK\14d¹©RµH׫\25š\0257\18ëP[%\28,\28u^U´R\19\29U\19=Q£FBkP.DÊ[H\7Ó?ÿöFYQùÕ©•EJgòØÅ«(\30!`.\26Ý\19*³R\24å\27îŒb\"Oä·h€\1Z9Ï-Ÿ\0Mt™ºšSëß\1274É?ó£™Åá€ƩzS\23†Å\24¶WeOˆ\14°ûÕHÚÜÚ\31šÏn;ê\21Ž;L2v\17/ŸwÎlྫ‰ã÷©‰#ðb®¬Æo,Ç+3ƒÞ†W\5\20‡¶ƒ\26o\7Ñ­ÿ+\1BS×\ +ÖêŸ\0§01ëÓ_¨{µDœ´¥{b\22¨Ø@ç¨T£á}Þ™ÃϬh\\Ããn4ZdHMŠ\24 fÊ>G\9ŠKÈÁÇ@ßØFVjh%#5¯½\11\127Pº¸\2€Îj\21å\2\11f@Z\ +·\26­ø¾\21)5C¦Î\21éÁe£[s\18/LRU\27Ô\28‡yöÏz\28Õú´~oâãuþg_RoÒ‘Duª>œ\"¦û†¯>Âc\20Dü\\›¡—Ž¬ã#\127$—âwÒpˆˆ’–†7K&_Ì”\26ÏÒ\26.Î\27.ÎyY\19žx\7N•¶¤\9[´L\26b#ƒ½%­Ð¦Æ£Zã‹dµÏÁ}É‚0§mhÛ•|\28('ÀÐÜ̼\23\14ß°¨r,.¥%«6,³{€eö_YÔÝp\20#Å\"R'\15“\15\30ŠêçÑ,;®ê°ÿ\15¥q\0[Ë‚àl\17–·!­o5ºÑ9p‹ ¥ª¯hE\31ôs4ùý.z—oÃ}öö`å·–·ú$ÛôfÀŽ\0043™öÔ\31«Z\\\8[ÝZOï\30ÞÜäh½ÇÑâè\31\\Y9dõ!ý åÎ\5imêOš g\14\22¥¶žLõN´¸6ô£Z™\15M6\20~2ç*J-)2ú`\12\6wѶ2mÍoz'7Î@–î™ËÓVêH\6)\24\15Ç\28EñcÛ}(µ]rÑKø¤=ˆ?\1clë!¶õ\8kCæ\14é|SžeÃæ¢s„XöD¿•µ‰„ZC4{Ω}+«™¢~ËìŸýѳÓÞ\\z›_¾ñ]ªjglÁ·\21?–›'m*¼\4f‚\21}Œ°]]¼a\31\18†\2½oA\22e.è[\ +3È»n\8†å#VàmâiBDl(>iÙ#`ë\27û)™ÿ<µgÑXA´\30t[Œµ\27“m™9Ú¶“Ö\17.\24â5ñÉùìRmFóË\22\16Ûðéö0]\23hÚySï\8Ó&€;w\3q¿‚«˜ŸfG}@*š\11A\15¸ÄáH\3“…ÊpÏ%G)\16\12 Å=žZÓQ”\3Š€},3‚‘ÃJÅŒM\18\3þÌ”ÕðSVÎy;8À”®J\13jäç\29=yüq‚·\29Õ;jvÔï(ïhÚÑüLðG°nÔ~KaR˜ÓÒ¦Ý9ÀdÏÉÅû\29å·ùwÍšÑ(å\0256†\12ú\0è<Û°\0G¤å\\IwžÍ馃êÒúçßF¥ö\15†*è; ;²ÂÆ;èy÷U µÀëÝtNÑê07†)/b[¤²ÆàÀºn#e¤i¡‹\7Ö\0ÛÎb»\20ò»©lÐÊž—sÚ¾˜Vâ·ª\0166Çeän製\5§6ÃßÔö¦ú\27®å‡ÐY\16k5ˆ\13±À·À¬Œ\1;å'»Ú\31b›«Õî\0226sÛ};‹—,Œ\1¸}Ç-PÍ$Û}\11«Ý·°Úþ‡¥´÷|âÊ7Ú\"Õ\9\2¾\5†øÉ+ÓV\3œ7ê\2õiGPÒ\23Të\5Âák!ÿ´göŽ3+\12Á\28…ÃÙo\27“ÞV¦¨Ê7¼±Û|ÓøyEWT–\24hô¸2¥°j\24À!Õ7×Üui½GÖzŠÛzrË['èQK¥¼¥9™¤k%\20‘’Ç\22\23°:‚ý³\19”fxKë$LiâPÁm²V¶IJt-þA!»hëy9*umû–ìÿ#÷¦Ô(\2iÃPˆ\16J®Ñµ\9øn\27†=ìvaSûÚ°rjpn}KÉI«é\14“2ÄØÜJ\3…,Œ\17°+‹ïoÛÓa–s‘7íÑ\27q\4ÀJ\14\22\27†¿0Ž:ÈÑ1,Ä^-G7uÔ\24q¨eB“´Ý¯CX¤µ=ÚÉàÍ\20WÒ\16„ÇƨÐWmu'ˆ(f9­n\2a\22¡^Ü\\D´ml°ÁP-ä)¤“BÂô+†E-\3…ZÓòãhÁöc\20ýb×ÅUF!\28¸\15ƒ†\12]H\ +wÓ™Á?\0\3\17$¬JÜU,dÔ4r\0249×\0254\127\0254káÎbËœ#\8*E)»újÚ«rð’MTŠ/ÍŸ¾Žz-¿4›ºSÂ~ U\25Æ\0Yà¬æZ@mÊ_úç_î&\20*+2æÝÓ‹BU\17ê÷\23û#P×–Ò\27ŽÆP1B;\0\28‹¡ß\13R=ƼñL\12”&ΦigºŠÞO¢\13Ï\27åY©4\28‰mB˜ƒ¨B´¦ê‚\ +\8%’pÖpU°@ä§Lè%èíÆ\25 ©r¼\4p–ûwM˜€nÁªƒÓ\4è\3Ø/¥y\24­Óf\26©¬X\23fÈ?ZÎn0y#g\18™_L“©¾¨€›©ÃÃ\26>ioÊN\6¶¦›ˆùcÀz8Óœ ikÊæÌ{Î@ÝQÏÙoÄÔ\22ÿbá¬bÁÎBË-ï–\23™‘§fÊ]kPþL.„[mS·\22ç\22ú£^ƒÛÕ¤ì\19~öm×öyd\20\24Ó>w\6²&,è~áÇýÂOð=AùQuúÏ+eîk·+‹Ôã\6\20tgQüŸú\9§\29RZÔ\2²\27¶î.¨.Ú®ºÕÓ<~Œ\18†<®„\25Úx%\7“’ äå­”Tá„ü”7šˆêÀEû®)0•k[Z4åÖ/2çMm€PR\21ä Å44\23~—¤³0¤TCÄßþŽ°wqìA±\6¬¯v¦½©’`nß\15\11êü»4Í\25ËG\29:îgu¾`X€úA4=ŽÉŒ\0ßÒáèŠ9XÏw!¡/€{zϲt:2ÑQ˜Aª#|\0‹øÒî„âiKFÛ9\1N^;Ê9Ê\ +‘\6\18õFˆ4:Ø\0B¨Ñ¡!¬‚'ï§\7š$|í|\\¦“ð‚Ô\9\31\29€»p\29\4\13ˆˆ#Y¦•ií`£‚e;ó)@\31 \18Ì‹¢æÕ\12î\14u¶p,\22çœ×ú×úbÌ\1\22|ú\0257$k‘Lý:€2\11\0‘a\11Â\9\28¬UéæOí¸ü‡3×r\13 •>\0³¤Žºx¢\12\2=‹ÙÀõ‚B\0Úðµà½Ó¶aÇ¥>Ž\12aÙBÚ${·þY˜m\29\5\6\5Á2Ò )\13ŒAÁš\2¬\2Tí, P&tø\18<þN­ÍA$\0DÑÔZ‡Â¨ÿ]¢f*ÜSšãûtL[\\x4Ôo°þa';æ\4‹Ã¬L\18\6óDÙô‰\6ÑWe‰:¢²Ñ\127£\8Ý°\127B\6¤)n³î¨/Úêì¸)ÀJòÑdQ­\21\17ÅK»Îû Ý¾\6ëb/ ýíðWÛÕ+5(\16òuªŸD\18»wZ)5fî\9Ï<…€é*o]\9ÄrÄb©{\27®\28@\0118^Ù‘wZ?u:ÿÕi©ÔÙ$hê¨]XÒÖ’\0094™:\\31TG\7—O—O\21\ +;©\16’ºyÕ®\13Z7u>SÖùÐIGS¤ScÚ™\"®–:7)›p­MÎ\26…4\ +ù\"u'Ò%ì´ý\0§K‘먎»Ê\29Oµ§\14â‡.\0|\7ÿîA¡†f°“MrhU0ƒ:¦¡míÔ\23\12þÒ!;‘<™2iØ ,\127I»\7ݾ{€¬}±ÿûŠº¦]„ÎzP`¸'üŠ¡BcKG\11T\6“B¬ÁßàædR´1íEu¦€œlÊ>„W\2⊊\11Ÿ”Ëv˜¶}ˆ\14v>äÍ·NL{b½˜8*P\31°£&\15zŒ…MŽ\27\3Ý¿³Üå™r·´›\\LaéYè\11?l™ø\29\11Uq!™N`\20\15\30\127îb7¡Óì©Óf8îÃ9mê¼)Þ\29|Ë«`ïRüqœŸˆãRüQÇËÙPwØ\16ªç0Ý{H.”çê{ï\30ô:TÑ[ÉÏ\7=z­½IôRHéuP¯\0è$1X%z\29uèyñ'ȬDpDº÷Š¼×¾vïa²÷¶6•®òIŒÜ‹JRÑ{Xì-åî=âÙ° \2ð}f÷>2Ù\31}’²×‘I¸±qT(äkp\14\14Ž%,ƒãˆ@¯ëÝ\21\15÷€‘ƒ\22ÒÓ*\11øÈ\12\14ùQ)üÅ%¸ì\25€u¢\18\18\23ìâ·~klúVµàÈ\9\11\19ü÷®¢\4øºwêlÂ…öJª†)yÞ0ÚDÄ^\12÷Cªä¤³éÂ\0Õ‘\4ÊL\14©î÷=4ˆ _*@C$ŒÀ\5\ +\16O7Á\4ÆŠ…oÿÎóùýûñvÐÎ\27Ì_è3d(¡·L\0ŒIØš78ƒ?€‚a‚lê¤Ôë¿{ö\7†•HÀ›™\28ªÕû¬\15Ã&/\2}Óke{{?®ê+ßõÃßièªe\0­ÊzjÁCÃ\19Ú\27X\28ÉRv:\0144E4@ëTæªÓ€c\31´*\4Ão‰\"îŠ.-H\6­`\6í6\20'Z\21ìGO\ +ü\5\2+Ѹ‡\12¥P\\X\4\14<Û7H\127u€¢µNÂ’¢zÁF2š\18œ3”q迈Rýr°é\19Ù@†3Éõ&\23Š_\6Ád#aÜo±1¸\13#¿id^$•\0284­•acQvsƒ´ýéÆútÈ×úˆÿL@¡ÅkMHe´8ÉD1\\\19]4îþ²<1HÀÆä\17\29î@\3%…àÍ‘UèoQŒ$KÄ\9GˆhJå\1Cw RÐüýx;‘ðØ¥t·ycÀ(ª…(î\9Ȥ<ÜT\8+;\20¶?˜\24fY0+œéÊN:+1˜Y¡ÉÿtÖ\11g\7š“\31¢µŸLqÎ~Ãʳ<¿\31«‚¯\14¾*œÔ¼e\0188\29ø¡_dz)\7uÚâ•›gÆ­U\5ZÛi_\30îþòqñ¯cg~ošwŠJP\6\23:W%º*±Õ‘aš,Áp¢\25[\17ðbXâÙ©_P.`Ç6\30dÄK€%4†¨n´~\0178Ã@9´—?\15\\»(n´jì¨îoÄÓ9M´1„l…!•àQkƒ1Dj£\21ÛÃ*Ȩ; ÿ\23,ôV£´aÈ€Lf´¾)é%BÎ<ý?ZA†<¬\25ì7Øk6eáŽ^OŒ\\GŒT”\31%ó\26­d:>Oõ\ +ÎB{\30¥\29Z(ôaFYú#=PÅ\30\8J\0£\15â€JÏrô 9&Ë\11JKŸ6õ9#æĺ\1\17R¼\17{\127ç\0u€^Ÿ ¥ÐÂáY—‘²^°±s?Zl=ʆ Âõ#bêu§§-€,Ñ\127–Ò½}¤Ó˜C§\17ˆJ h/9Ž%\22ÀSóYPö\20,{â›mâ!K]?k— K‹\0d$éÑåeé@òòÐw\127ûˆód\5Œ\ +7:-I(pŸÀȲ€¶*j\18!Ì“\17H\6@˜Â“\23Ï)îìŒÎôÓ˜\ +§ò¹Dénz•Ä(0Tý†Þ\1\12\21'\13Ê}ÿŠKNßÅS×dô\17hØY@«À\24oÉ/N”K‚üÜТ]TÑ€\127»¦óÒ©¥½÷jÊïîw±Šý®\26\15Ï­\6jÑï—S0æ\0YÉœe¤88Œ\0*d/¤¿´n¦E¥d{A\0223]<\28\12¢m¿ko²aË\16\13…’\15\19Kò4ɾ)Ôtí\20}£¥±>ý•¥sÃ~wÏ*[¼ô܈’ÚF\20¼ùþÌ\7\21|\1Kƒ”®Ró½Hx?™ž[´!Þ\22éÝ“ãÄ3ø“<¢¾ïÙÛAÞJZßJ°\15”/g\26\5Âߌ˅ò\24\8ºL«\9ÓÎ\27¢‰8›•‡èq0IªÂ\4g\5eÍ\27æ{=™\9ÖxKöýzp\8\27ÒnˆÊO¥a’|—¬\"´Ï\27=¸E&76sŠ~ —ߦñËÿú¿üݳW\31õÑ#\11âÚE\5\3âï\5 16Ñ\18AÐÃ\1ѯ\3úÄ+¥Ó„¥7|º<\22j™2Á\11!d-ÊÏ\28€-ÌúOú–bÒe\23\17 ü§ÛË@Cùt“(ª½O•º(n\18Ñn\22Ãb\0173yïSòªeò`Ÿä>@TUóK¥'Ç\1:ªòhÚ«›øî\31/\1æ\0\17—\22îDÁq5þv5þ¦MNò¤%Á\19\11çîN€\0Ÿz§&6Ý¥ªK4ù’\4\ +C¯|\8\30åÆ{3Ï$\27Ãk§\0084ïÐC+¦Ú\7⟽—)åÛ\7,\0013\15ŽAKx1=’ËÙ:\26\6¦BY\11®™YÎÊq¦ÝR¶ÝRÖJk–î>kÁ\21©Ñî%CûXSõ¹onXâ0Eµ®ì\" ¿à\30ŠLÅG†kŠZa)bæ\29¼Åà¶Ê˜xQǽš96ÚfoÔÉÜ‘ŸqØ× 6ºÈÜÁ•Î/™ÎÈF1Ã0¶k®eè.{\ +ª/ *íû•1¹rÆV$‡\17²’\0243?EGmà>Lèoí\0232¿*N^À,šç\2Ñ\127\13>u?\14¯ˆÌcܳÎ\4Î\19N#ª\8~nf>†ƒ\7à$\9\7\14f\30k]¿e\25\127‹\28D^h\127šéP\4$‘ÌÌܲò&ë‰Ìœ/úwKQ\21вnFíãšUæÞ§,¹\22ÎvøßÙÏ –*HyÜÊœ\13\20kGPðeLG!+\16Ц\1.‹`͈(µkÿ¯Â \22*\13\26Ñ:\29\0K\1\\Žúä«\ +“®äD¢9\18îù#ÚÂR.‡u™^wÁc³‹S%Sôئ\24êW3W›°eT»2-ëgÅÀx¯˜ŸÌAqÓì`«Y°œ·g5¤êŽTmÅç—“Óÿ²¾\16Sfrf>k=Jñ>ä\6U3cƒ1y‹Ûè{´¨w÷\8RÉýþç\0318ýÀEI¥¡‹\20Ɔ®¿žúÍx“MÌ‹'KÅš>R™»\23{\16ð  ûM}†ìÍ\5TR6$Dؤ¾\14ÿo4&{ãëÞ8Õ{“\25×›ýÏíß´\\þF)úRô›\12«Þ¸Îõfýá[\19è*òƒ\20ß¼¯õMjÁ·:’²ï{óàùæ1ïÍ.ÀÞ¤‡yó€óææÍKPo4(z£Ì›ºÏ7­”¼±\19­SxjÛÞØ¥¼±3ycgòæFüæ¦úæ=7oRgH™Ñt\25›ç\25•žÈ†&co\19\17Ò™l\8B¸Í¼Ãluót\7\ +\11ˆ\1›m.6ÛQlωͦÂÝX¸\27V‘@wrÿ³i.12ž”\24ÛÓ_ÅÖ–ä7ÿ\1P-冪£Ûd½\7’ŽmëÔÒ7\31I´ÅÁÓ[\28<-\13\22æƒBôfå\28“GCØ:\11p›f[\6¨\22›§\7Zti\9¶ÿ³v¾ý\\;߬¸\5½©ˆ¸róºË\22kØ[ø¼b\16ËûÝÍ`ã\0025t²D7Qˆª\27NHò1õJ\18_´Q\6Ü(ým>Åe“¨·…ˆ·ÅnìMšÃM\18Üf#‘M\18ܦÊl\5ìFË÷zC\25kS\31\14‚÷\12Yvô5RnËú›¤µÍ‡Jo\22¦6‰OÛx[\15ÐÊnrD°±-X\29´…¡r•èþ°nrÉ\19‘Þ”Ô‡«®4/¤ª®Ÿ{UÏÏì[¼Ò’FOq\1!–\0186K\0[“\0ÈÙøPöa†\27e\13fp¿05Â\25âYt\18¹Šä™\20½åö'C!\5²’¨€ò£ÿp~¼å’@½x…Ô·i·\23\8ʯR®\\á‘oÞ\"2ÒÚ´`۹ɢkØBù±‹ØÑ\25Uö}ðq¸\9¦Wu‚r¨\23Zá’Ôj@ºøöƒ´Whç\\\28i\20w2©c=è¬Û:»\6ÁÔŸT&¥{\27ñf&C32ƒþ!z¹‰¦ƒéb\26üÚï@rÜ\29zÓÚ¨\12Æ\0S\5/èàtÏü0àŽ\0ÚâTÊ\1¢RzŸd@GÛ\"Æå–\"£)ÀŠX\3æLj\13­))\14\22dºu¾\0:2YÔ\31’Z\127*…Y<“¹õŠü\13úÊ^šôâÛÑt2I•g\25Õd^Õ‘\26Nw\3j\25\28Œ\14\20OôàÀ}¢\8?\21ô¯è¢ø\3³\1 §°U\25´6P’L\2Çp «ÒÌ\7†rU›÷›ÒÀ\17\29\4°/FÀÆo§ÏBÑ:T\25\28\3ô\1P\16G*,E‹(N\"#˜Æˆ9‹±\30\28cåÇ!(ùἚfG)ƒ@)Š\9q\30\12œ“\0zLâw‘Ÿî\18\12èÒj\20\17W\7\20³'6#ºQ\28\3L\1\22&Úþ‹|,\ +Ì›^\12\5\16\29ÆáŒ\22ƒ«Ao:™ÎlYx69\31]Jþö.=yÙ\20¹Ý!ëYYí2º-2zPpX\31¸é³\15°¿\4\26_\26oŒwW.¾—~\26ƒ3u\13]\3­\1Òï–H\26\02637Tº'Œâå6É@\27\11ê„’<\14ûÃÑ”}\17Àd\26\17vüéÃ\1.V˜†*ùqˆ\127;ô‡\0]\0~ÞQƒ%AüL˜@(dV}À©y0#\7£Ä3þúý\0249þíP;\24$ÕÀ¿wÒ‘Ÿ\1]‚3·8ÅUhÉ*ˆ¥¨ep8$øœL#éÏ\22⨟wSç¢6èkClNu\4Ü‹îÜxV¥Ê6¿“wÊzOGÄbüûJ†ýá1\30ËzuÖ›ËÞ¹¿ñ\23ìnjå·ät—qä@Æ,~9ë_-äû\24 ªèw\11b‘üÙ\14\"l\11GxJÕí‡é7r\12¦Ü³r\8£!=\9+\30ýN‚¬\3I27‹›ýhVhŽP\8Õ˜\3ªm –\0004¤\0¨/PãîEa\31‡Û‰äî÷Üù‚\29ïdHI:™®lºhU\7Q6¤\19L†HhÑ\14„ùI\13>íøÞ“j\26\28/âVŽ\17E“([K¿S\3ë¹\5Gt\22.¢WGç8K\31„¿h]qØ;¡[q\0j\31\\\4*œ†!Fzµza;éƒLéå\18ñ\ +\11µo\22Ùæž\2\\\2\\\3ä\0%\0s°ó—úCÎ'–cY¾4a–ÈqSJ\29œ‹©ÖIlFr\8€ŽÒžû*Ài¬cp®/J]3 `Î\13•H\3Ó\"ÃS€³þÖLôÄ\16yœ†–\15Ég„c€éÉÃ\2ŠáU™á¢ˆR–˜ÔË\28Ú߈…\17¿\14^úžÜÙÅ‚£Îâ±5@Ë~nåBÝ‹ÞS\26³‰\20ƒrº˜;6½\30v£æ\12×\0Q\12‰M£´÷ž(ŸÌzt¬Ì\30·—ý•YÛØPqüZ\1\13þ÷Ÿó\19ÎO¨jØÝý3¡p1`ãFÐâdßÜ2Þ[\27ñ8ût\21øãFƒrO\19èñg€þ“Ž+0{e7(O~]i‘qh*§qZ}1;Ÿ\17\6?–¦\0×–\16&ÂO<*¥Tö\17³t\13Á²S™Í?àì\15È-G.¥“ÞvR\27í÷“)ïÿôp\17]\1Î\"\"ÕÇÕÑ_m\31æ̳8.Ç:è3«\3|¤\6päžc\0268®Ž\30ý{-„0Î|0uÛt¯\ +pqH\14†¿eH~&ùïrz,t\15\0¥šKk\23,®¿AÝ·ýõÿü›Lï¦\27¿þÂ/\27‡ƒÊ}TYŠŒ¥\11,ÔÁ†\3”†\26\2s€\"0(\27\20þ+\25õç—é`z4\13þdêï]Ü\19,{÷\4\30a$±0àn\16ÕüãU\31òá¾\24'\31(äs\127lÈ\21òMoé&œý9N\14 WIâ\9/\127ø8ÔTQ5\20ÍiìëÏÀ}b•…\6Ç-¦*ïU\25Yõ䪯Ë\26\30³:§L±…îýv¤¿u\11\11³mÇ(Å¥sSqÞüó%\ +Áçž\127þ—\127þ—?÷ËÚ%£>$£^\22%F)@”Èw‹\29Eòí®\22»ípÊ4ÕLd=$ÊÐUžZÀƒGŠ(¬ëÍÒè&¤1íÑí¯\17Ë\3Þc÷\12Ë­«âq‚\\\29\30ûWúËóû9ý&Gã@ÿ\24œ5÷â!ù!ƒ\0218=9e¯$–Ñ4›–xÛ‡c¸Žw‹’Æ\1à;\2UºgMÒÏ4;\"Õ\16zöüóL?ËbÜLù¥ç\29\11s_\17‹òl\31WB”ëΔ\3\12N¦gÓ)brÐ\2§˜Þõ\26O.\17'\25dÍ&Á*‹bkÐ\1‹­ðŒÒ×¹n›:æ3\20bÁ*ŽÃ\14\22@_¢ùâ¹)LÎm :{ò\7š^\8N\14\24^MO¦ÒÕœqJR70×g÷›gnóEð Ï\28Ôyž±†ÄäÙ+žÕÏÁSà‘·ê¦ÎîJÎÑ“œÝIœÕŠÏVÐœ³‹›Íø¬f|VÛ<»\13â“4÷8G\3;G»:óxä*¦£\19Ñép œa\14þ»r\6\8ºŠ|ËqÁƒ\2O¹Î\6\31\20ü)ßÿul€%\24\31\2÷\8Qû¹XÑrÙY»·¿X¡\4 Jz±x.GnS 5\0gƒ—]ç„šºéâzx\9\25òâjw‰\6\127‰–}‰†}\0091æ\18J…K«/—P°]BWp±Z›\26ˆá·XƒãP„»D…¹¨ÂÈ\23\26éÂO¹Ä@\2çeGÓÙTüUñVÝño_vÌ÷E?ýB\5k¥ê/<­EÙûRÁI»\5¯hì?GÏ Â9\24éÇ\6e±¤_\0137aD—¢H#u\25°\25%wŒžl\17ÌŠ4Qã9úè\3…M}€‹Â&j-È™\0028\3ÓM”?I.ÃH×Õ\17]¿F÷^â¼›³9RŽØÔ¨nñ8~Dˆb­œ‰Ù\16h\ +À¹aø\17\11^Š\15ò>_ÇxùÁ|\15æ9˜©=ê\26^Ã\"^zònæYy\28ÞÂŒî\17Q¿wl\19MÁ׆Z~Kçg‹:Ð1f¡án,ÐðLðoƒ-Û¥\21\ +'¬ŽøÙ`û‚’ã}¥±n\13µü«\14º\ +rj;J9òüµ_6ÄŠiëèÙjóEƈ19\29›ßD\28©\5x\13®§§cLOǘlŽm®9>çšcô\19£Õ¡cÌ\20ǘ\11ŽÏ©à\24¾\22ÅU\11\3º\15‘UéµGO\6GZÐ)(\29#NêôÛxölã\13O¸Å#}ã±F\8Jû8j׈KC\19¾\17§ÇM\17Z8}ÂvAÚ-ëлk¿F×Ñ#å¨]­æ¬­bxZ&§hQéäßâ‰ç'nU/´©|plhjèG\2š¥µ›áçÍóѵ¡Ô”³á¨í‰£¨¼3!ø÷†¸Úä:7ʪN…ÐÊ‘[eÅk•Y‹©O\28\17h3×Ø°Bÿq3ÿ¼ù?ÑZ²¬Fãî¤îò¤štRuÓ”Ò“§Ñ\11\"#íç\16<\28Ý\9\13Ç%Bøõ£¦–£ÕÉch“ÇÐ&¡M\30é^^AÑ?k¡…À=ßpU&­_\30c†\9p\0140\7(\1üX²5Ð\127\28xîX)²\ +\20‹µf\20}IÞ\";ûÒz\22MBé£.êÅßý½!FÒæ&Ä\30÷AUº#b‚ëžkÑ0´¸\11YÚȧs°\4§£JtAÿàF³\\•0lçHq.•B¢5/Qç—¨ÁK\12`K..ƒ%zÓ%r\20íôC\9s½Ùœ98éÐP\27$>Z)|ìý\26.I£Â\127¶4>Õò”\23}j½ýgô\8ŸeŠÐ;C\0097Cºe\18P3Z\21{\21—\19Q”x§ªd׸Ëg\17ýõ¬‡²†âì$U\3vZ,\25Ca?J\24\2uáÞüsn®ž·ÙÝ\8l¨^ŸPï’™Ucú3oÏ\17öKM‘›}\12XØ1»¥3»sCCC—†æ†–†RCJ…\30ñÄúŽüÛýñ·¦\\á.Ï(RøÞç†JC-1µ*ûÿ\14ðbðü½’\15'ˇ“5ìS“\19§Ó§×ߧ&&N±4:yiT>ö\4¸F:YF\4]\"¦_tu\4-hL<€‘tÄÙT&\11v“å:zÏó“%òs‹´ï~û¦\16n¼\13§\20úSL(ìÿ&:8X‚MWM½É^ÚëÓcYNOip\ +iÐ.î\"p\8Ö\24I¤¹\5FæRj¬Û3Ù¯\8½\27\20i<¦ù&;ºsÂ%Þʺ8ù\4\24³.-ÖÕŸ_¨j˜BŒ›BŠ›BŠ›|ô—‘ráê¡ÚáåôÉË\19SÇ\30\5¶\22Zº¢!ÝQQpº\ +©'š\19\4¾L5“|ä)Ñß(§·:U\22,p_\3¤\22˜ãU\30ÃÁ\29\2,ñän™ƒU\12TÁì×ÓðÖX_ΚWϧ&ƒN!VN–\23\9pî>\31…‡›ö€„–é¹\ +\17‹,\8å(J?}«\";×jÂd\1\18Î\7\"ŹEy\4pù†x8ÅÁûL1K\0252ùP\26¢Òʘ»~\2êÛª¬A—sÑŒ¹¹–\19ÔÄxjfgO||â\16êô@Š\12•–ÉBçL\13º~ðÙ2ÿHG\2×óîç;,\\M!é\18<³¼Dòúå¥ýðâ5iÃøZéÖøÄwK„»ç]ÄZñ\4|cƒz—06Ñf!úšpD 8vc IÖS¨£¦§ˆ=5c„)TSBÇHÐR÷ôcdú!uOM¬žäC&Péžð\0255$ã)Œ\24&Yó\17H \14ßn\1Kôh8=¦ÙÇ{UbŠÓbž¸eñÞmñ\17!\17ÇMzÞ¸çz¿»\7:¹ˆNª\22…é§Í\5/ÙvÒ©/\14ê»\8‹oµœ;Y¼d31YÈ,ÚÊoœ\30˜#ýù\20@¿U’ídÁ\22T\31Ƴcü²H$F\9Ë·“ÄÚi U¿ãæ-\"iüÚ‡?9ò6\15±Ãv°\24Ýþ^L]6\22z'›{M–t'KºÓŸéÁnn\127=pÑ€ÛÒ”è⪆ý\"\6þ‰‹GI­˜LK«\0212ö \0273\13å8‚48‘£OŸ¯ìñyF‹\24Qõ?=.~Æ\19Qy>ÙèèŒ!ò…ÅFSeL%³:•Uï«¥}¤\ +~J$Ïiç˜\22\"(JN2ƒH³^-=QðO<£“¼\18?®$ËAÅcziýíÍ\21äæ?tÓï¸Mª\30·¨©!]NÚú ¯ûò¿ûŠºÿí>ã{š\2DÓùnq¢l¾[Ð=€þ8ä9:ª¤~\28A\15k\\§G˜´M\15ÍbÈáË\30¶X#k\13N\ +N6ç\22Œ»\25›\25–ª¦G'ë¹\ +NŠÒõ\0153B¹#ßMâ]\3xà«Aš5Ò±“¨äj»vjP—`ì؉/<í\29G\26ú\ +Fs\\‘éØIÏ|F†ltD§N|F‰UAß1åzË\24àjIùzÀÑż—œ|mâñõÀŠ{=\12¬óW+U¯V ^­\8¥‹&Qê;¯Vr^-\0_\15Yɨ\ +צë[ŠoW›°ØýR\5Øclªä¹1Ø€ÐÕÊôkSPÚÅ\18Ÿ*\18{®!\14^w/Nèe¯7RswÕ¯¾Ú\26ñÚ¬\16Ÿž\16Wö'WºJv uôòj¤(^,ºÒ݃î\14J\0óym\26°«Noqìlm |\ +)¸ø½…•íú\20_®^e²Ã!7\21Çûè\12ÄBÑ54ÿ×Ð}]c4\22khèÚP‰Ì¸Æ_Û\0z•^ç*½ÎU\6°WÛ\7r'ÒQ\12—©‡/ùê!u™ipºÆØt\13%Ë5”,WA×\24‚\0\\ð\26‹®Vµ\\­j¡'\30Ý»ð¸?Qùÿ«üz€¹z€¹j\25ÿªå­+\14\6{5à×r/!©ê,]êfÕ¬Ååþ\17UõÃULãÆ5†‰«‡‰«‡‡«\23ѯ6\27¼†úáê\17áê\17áºó7fqÕ„¬X¸z%üjÍÂÕš…«zòëm¯wÞf·\8÷è×赯±æv>ú\26]ó5ºæ«»æëîÿCñÃß\12»ãÙSîZfµã˜\20¤…˜9¦Ú³NÐ\23H¢œWãiN¬g\27!϶=žéÞMÔ)°C\15òùC\14ëñ¬nev·‚\0209¿†{š«\25^7™\15¡r™=\6Ì6NžÝÉÌòòfð\26à\24q5Óžc\"-¤¥•šDÞe?Á\219\22<\8^\2t\1Æ\0ÒaÌ1“£ÇšÛ\4v?Ç\12v~áÜgÖDvÖDvö\4\22î;N¼ç„Tëùš’ÚVI_Ò2P!\26\26\26\27bii’\26¬l–†/sO\1z‡\14[pêÜ`G[©‹éxˆ É‘9º(ÍÏ\8S\9vþ\9\29Wßf÷µtec:™Îþ\\u½³\23>fwÁ³×íg÷ijÃgOUgOUgm}2Ðœ\18P\0115s\24±!¾:—¹ÍBçç,t~Î=gN8‹Ñ\18ÏÞ\13Þôwq*§À\24ÀÆß‚CCœúÎÑ›ÏÍ\14œHŸÎ6êêö\30Æks›‡ÍmÑ#Üæè\25œ»°'äwy~2ŸªŒæI&q›gΞ¹ÌVÒÏîäçèä+\24Õd\7ÿ%MIfÛuÍîôgwú³uîshÚgwùsèÎç0G˜Ã–k¶Æ|öÜa\14[ïÙöZ³§\12 É]ϸžcäp7_ÿ÷_\8óð<ã?´è\25é¿+Ýò\23·.M\21f[µÌê÷gO\28´ñØ-øÓ­Ûe¦n\127öì`f·ÏRÿt•ð,aÆ\0Àš»vjç«\18à$@\14ep›]Bõ¿ì\21@u\15=£˜ú«‹txs“÷g[?É\27Š^þ\29ÿ\16Gò\5J\1r‹uoˆræ\0286Ló#š@HÎpX\"[ßÊzÍ]ð¤N\3ó\20,IÖóS²ÆžÕC\0\9GtŠ25´4¤éeó™\18ðîÇ9)\6kn¹Z\29Dá\7\12/`6g+qœÝó¦åm\23æèIJ[\21´¼¤(ñ™%¾Dò?€6@Ñ¥K\20žv„Ìmr\0007/”¶Éb[\22:747TžÏN\13ºÕ?š˜‡$SËa[Óœ\31M¿Â\24¥{rËÿ‰î‘èÑ\22\31\5·\6ó3òT¢\16mmJ·4l÷è9\30M½1?B¿A8·ðäG­~¨€ò\16@\0200ÍSêk¤D ·›\20•\3ðÃi-c€VZ6¢œm¶\22îo˜Ü§+¸[ðÃMX\30qr°¢\16à\9Œ,…\20ß\21?rÓí-²xóë4‹¶ÿ\28'þí1M7-_ß.zIcó¦A\8Û‹\2´a\1\29#`2#™Þ\28 uD¡ÜP1’æ\20ˆ+\\\4³ÀàØÔÿÍÛ\18YXœÞâ'—ˆ÷\25Ii¨ÚoTÍv¢7GP/†#¹þ}§ÎsÑî\17¯@.jWšùÆR¡¼Ïƒ\0119J¯Xkq°¦¬^ÒY-X®ÞCQï×…÷êZW‹“«\\9\17PŽ\\yp?\"Þö¾çDuUšðfb4M‡'ä\27‘ý\13–\ +W[T0r‰\0206Õ÷¿þ·\28\13d\4T®ÁbS\24½7yÕÜ\127ÕÜ\127•\17ÅêÕ\20r\22WäEõP6¯òOCª\26»èËe\23A?5¢ñg-#\2ìÏÊä‡3 !qµýÂjaqµñýêqf\13\25‘ÎhF\1öÀ«•\5\8XŽfÐPvMKÏ€:üI9¿ÊPa•>\1žg”ý¼ú–…-=Âêéòº»\29õ­7ÿNê\21Ö›Ÿ½ÍþU\26×Vu”rM£Œ\127¹¯úò‡|ùÇÙP`\13ýê£q´Ž4Eåüv1|ç`Ü\3 <’5\16rYÓN¤ N>¶‘¨ïM\31\14¹\28\4¨Ì´\11\26#é&ì{†‘¦Hfr2ì”’Užr7CÊ^O\14gHÙ·€\14~-Ë*Y‘ÚŽj¡\15¡Õ\15sdJ:ÙÑ \"s°J¶)H\30«Rì¾–×\25Ñ)\2øâP$ïÏN<ëÑ_{sÔ{\20ÄþÍ\28)ÐR\24\"$\29øh–,°“Ž|4è\3iD‡HŽr\26\24Ž@\1\ +®e>|¯\15Ú¿rxB\ +¹{\9D™N~hH•ÆQ[O\19·ñS‹”ŽÝø¢-\11€R$\17\13\13M\26(\1´ò\5TG-¡«?²íµOÇ\0248Ó1Vo\0197vϪ1ÀjùGµŒ£÷0ŸßSK°b\127ëñ\15\31…GÚu/®âÝK\23@\3*~·,>àGǵ­Û{Ó\26}ë\28\3L\17F\25¬¹±ÔØÀ»·$Þõ\4Ü\4‹5Dü¥ÅÑ´#…(Vc?:\26›€w‰°‘GsêÆÒQjf\30û¤3Cù-°)î#æ\3\1–ö¬¶£&\"mû1{ú¯Ý\27Uw‰Z©\25\26Ó‹“²YRŠS\26ä÷'òs\"¼v‘±úÄ{vίήTw|t\9 Y\29àM¬Ù\9Ì-ƒ~Ÿe2œ[d\19]Á—'¼<¡~H\14u¼¹% »\20*\1\13â›m‡\"4\5²€•þ!HŠí©ûÔö§ÊÕ̱¡>Ðåwã]lz›š`—š`—~\ +v)V}R›ï‡Ë€´Em>w\4K—â!\"Ocaû¿\17“j–t\26¢\3¦€˜N\11w\27²bÆM\22.ËN\17=\6I•˜¼pTé+7üÛ#\0148ô[‚¨\3\13ŠÓ\16%âƒ%R˜\2§&†¦°üM:d3P´ÁvK)Ι\0š\1N\1âEó%€êáà¡ÛKUIf\19ò¹\19`Õˆh½E²ðšBvM!»¦W«ˆJľ7ðÌ´\26²¶«¦°\20N!ê¦0\25NÍd8…Épª²Ž\ +QJÑ\4›ÙMî!ÍN¦ü¡ÍªˆË8ï Ð%À(0¸N,\30Ü\23\21Íâ!~Y»\8×\11\22Ë\28Ë}ˆ\0J\24^8³w\28ä(¶ó\13ÿ8\12üŒ?\29Æ¿ÉÂ3A\20Èg\20ÈgöcÅôÞžfùüï?ZnOÒ¶&H…“S:ý¶¼7|»š&S&,©¹Þræ–è\2A&Kc§ÿP8{ö1¹Wƒ¬¥\18hÛÆajƒIiãŸ\14üì~yKÀk€>€\5\18ù„!\ +ùªx[\3‘\127&à\18Ñ$vÞ¢EQºO:_’@\15ߢ±H̯|úïP˜jqò\14®\20æÇt*³Äc®èš\14Ð{L\ +Ào\12ƒä\20\26éTéè­ò)TÒ)f\2)\22)SÓM§Øq›ÂØ\13.alÈîÞFJ·1–½îö¦H­¦úEøÑVs[\12É?LPó_ù‡\9jngsä¶Û+·Å’üs·Wþ¹Á+nîñîÔž,Ý\19þxI‰?ð\\fɱʒ¹«\5—.JV\2Y~\26§æ\31\27¶ò-ZùÇ\14­,\25-Ëp'[——½[¼R”N\12\30•¢\15âË’º²…®\0282W\5Z|Ê!re/öf«\2³%«\28‚U\14Á*[\21˜CŽÊamšClÊM!˜Ãî\20¬\28 ´X\22“r˜\18ñ{á\18Š\29ÅνždìÍRy’\1\8|7©Ü\22÷†K|Ù\18\21lqm“§\26cQ:Ú\17•é PÔ_³8Ÿ‹¾Rçqdë\1sØ\ +eï|Ê¡\14DÖ?[þŒºö™\3”\22éÞº„Õ)JùW…”¤_±ª\31”\16“-½dK-YR‹}J\25ô\1Æ\0«~Çÿá[¿ÍêhÛÆ@Y%\0žL‡\8úk°\4ã#À§\1\27\1@‰¯\0Ü£Ê8Puçâ\127X<Ž”¨%6r…?ýô›*ß-ŠÖg åv²G\14Õcn;“²\5ÌíH­™R®À\1d\30h·\24i7\15µ˜L]È8Œš\17Ù«Ñ?›C©W4ó۪Ķ\24l·\24e7\9(t\127T\2<\28²š±.-+)Pv˜å.yKÒû¹QÔÿ\19¾\23Çð\2\22\29(éíáZ\26ži\1K¶Æ‡k<ìBߤɛ\27>54¶äS\11–\22\9äçÛK\11vYqÈýý\3\15Oü\8¨Òn›n\8#SaMP™£´)\ +–ìoŸO‘¤\20&†Ô´åí¹p\21X#œo®?ðÜ’\28Íþ÷Ÿccêø\27\30T\7goŠÑ†o`}ÜŸ­ó±“`º›Üd±ÀWiSµ ÆUú§\26e@Foÿ‰\8ÞV\13ì\22µÁiË\1Þ‰\26ìüÏ\127(*ò\22›NÌæ\\Š\9Kc!GXú‘ï‘Ó°E'¬é\9£ú„\1\2\30òŠ<`ÞžQK×p‰ú㕦ڛ¸nÅT\31°½2† /X#ú\28¿Øóû¼y\28\"è\27«Ä¯\29Z²\30‹¶\24‹¶\246\31•ÂHQ…m… w^úÖ¿\22V6MÌź·6ó×\ +ŸÀ\31£þ‡ŸÚ]F{Ô²\19_¿¸TµÔDŸ_\13\20W(\12.jóŸnÊŸ‘NEíg}Î\0139\93Ùç8\19­ÊÚŸ|3UgLo^;Å©}ŽcW@u%O\\<\4Ðh°ÝüM·hö_©eà«eÊFIèþÝ1ožï…;/Bíü̲\25€\31/¶ë\18¶§¥Mÿ*:\22Q.O”ƒÄŸbÃv4ŠK\3ŽÉ.»Æäê`ñ\28ÉÎÏ\12F…Èæ»xv\4 ]»\0s\0§:?\14\1Ô\25ÇZ\29M™äR\127b6Ã\9%÷=šœÐMÑù`4»6}ø»½õ­Ä,\5ÂÁU\18HñHRxðÂw\11ÖÛ*³\4ˆº ©‹=\18\25¼\4\24\2\\\3ð5Å;蘂Íe…˜ÖÚÍ;ÆZO\"*f­\12í¨\14ħ*”1Ò¢-Œ–\21;ë•Y\5˜•oM;Š­!ä,H\20-¨HOZ¬'-qzC‰ÙGñ죄¾Ó\30\12”¶U›%T›¥©6KŒy%¶ñ–?\15Ôí\29Æcºí\1¡ róz\20ÝâÀ2î‹W”™¼Ýêtcù»1ø4`'d§6\4%┈S\"ˆ\22K;\25µÞ\15´Þ¨\4¿óŽ \31f$1øòJmßu\15uc}tj,éÃîR,6eíÝcã]3šJ´Nrßù¼ã{3º“¯ï\3cYíwo'CÝc\4»{¾\1†5·÷—ÎÏcC˜º›Á”cÿ=Tøw˜­iÏpÇS:\9&G™ƒ¡Ô:ÛrÞcoØÝ;\19\8î\6s¼q\30\3ø•îwï6Qv7ø¢|èX½{\24¡‰wŠH‘û\28ifý‰XŬˆ\"ý}\23\7_ßÃ\17¡`Ÿ\2\20ƒô;bih O¡!˜ÒwÝŸ£Âý9*Ü19§\30šˆ+‘êë]Ýø]½÷=\22ùîê¼µ¯úlêÏ‘¬\127\31TAÐó\12¢¹3õgHÚ¿»[¿s!NÏ—(ÏÁ¿\1ò:ž\29ÕqÝÝCßgÕ\\šÝ%kßCksƒàï»x\ +\0184ÓÕTð®\30è.ë»ú”»ŽÃ»K'À©û½È¹Ó^\31R¤a¼ïnNk'÷¦¿¸G\23rÿ÷{ü\29à%@\23à\28`\0080\7(\6’hïÑóÜ©ò@\30¤0\8}Ç\6‡%“A瀋¨Ž^Úlâb\21F(.BKa\29\5£\21Ýoº\15±u\11s¡-öRmÖ¢l’47«ì7í6Øb¯\1槲™Ùš9ëSeÐæü\0LjÏ^°NÍw’x¶0GÝb‡çæf­©üKC‘ú„E¸@CC—ö\2øz\127Æ\27Z\27Ê\13•†îñî\28 8TPlûŽ#s-‚yç\31ä3I7ëö·ØÕ´Iú\12Ķ\0024D¬EKö¤zÝùæ%ÍÛPÉ“\17¥Ø­y´=·»)r\0179~ó_µeÃ\22¦©[ì€~*\12¶fŒº5½ýö”<·¶Eukç¨m?¤Î-äËM‚áÖkdßlk°I>Ü∱­É…¡@جº\14¥AÓ\20lî<6­óËg“øZï\7²\127\20ypú÷ýÏ6P;\ +ªÂµ¸(—N_F´Â\0ø\19m-´Û[“&7Ÿœ°ÅvÖîÏØ6þº\1JÕ°…Œ¹y\127ë6\30Â^l³\13ÀfEøæ#§CÁ°ùdéM{ž¶0`Ý´×ióŽVº€\26\13fS¿û3›–ˆÈ¼Ð;%\24–õ6If›ÕÕré$ê{\26§úž\19\"8j¢É\3ü91ÇY{5èÂéƒQ L÷3”V*å9ö›ô\20PaôŽ§%h»\127bŠ´¸t\20Ñ'rm\22嬹Ø?•\20[,No!ÁmÑ6]Åæãk·?Øþ°?ýþµ;\29~\14ت^\9¶¥/•î_ÏC%õwÕ\17\21·p t:°Ç;\29l\7VÁ\31œ\12])ÙlY5Ò‰w¬ë§\3NvÞ#\9€Q`™DkeTÀ\23b\14|\26ê¬ÊÔA¶\2›(\22I\12 ¿%¬9Qt®•‹5æ4Æ\28ànàWÔÿ\2Êl\\\ +ryÁ.™JnÌû…~K\16¥þ—úE#³ÄJKÂä&ŸùÚà\7!eKóFóüM\19†¤J8$1n$„Õ\6\"jQŒ¦\0þ5>*ÐIqx!+JBÇ))0wŽ\14ÃzpNü%Óp\20\31æE\2›#þU„Ýâ·c:\7°8[Kf:šU\4í6\2°çP)|ú£ xFÔ\21\17&.ŠÍ¨SSq1ì\0113\\i\29\ +H1æ31”ºö¬v¥Â™²q€T«/—÷\15HŽ2Pý+«þÑÊ;É0ú\17é \18JÏ\31’¢’Á\0009\24w\127ÿy}\0134\5ØoB*a\24Ì­J2ÉËžQo°]\3àÿ%\30Ö+°ˆ^õŠ\ +†Y´ÎÛÆHh\25G¿\4Ý Á\13¹åZ\26ÉJR\14\"u$©‘¨Ò½£d¨ò<\29T¥ol^šš\14<ꎄ\9¿á=o\3ÿäÛnàŸ|c–öoÿþ\21ù\20ù\18á\15z«ãt}̓ﯳÁ\29&·'×aØFÔN\31´Çêx\5\23žl,€˜\0040•6bå«ñg¼\19þV\15¼Ÿ÷u\0041Ðcó\30nw\5x|žysð ì2ì\2\13Ù`qb˜LâuëB\2­ä)t’•¡ò\20æÞ'Ûz\11tq\5}Av_0àƶ^±ð“\21í›\9hÔý§\15ü§–ý\11ZʯÓ+ŽÍ<\29y\ +ÙîT“ïö‡\3¯xe%/¼¾òzäU‘ðÏ+9ñÚózæuàõÂë•×™W6Ç\14>´pͼ\22%õErGëâNÙz×ñ]U¤:uêÝ;ž¾/ŠN¦\27°\29\26÷\23æ÷¢¢ïÔE\27LW\3¥ óÃ͹\11,~ø\0199RgŒû\27\9s6â_^Y\14\19ÿˆ4±\0\19\4ñS\23\7æ\25õ\1Î\6Sw\15Ö•Ÿô\7Kls€Ò\0021\21Fç×ù(9ƒ9¢¥Î(³¦ÚÙ\2°×mïæ&-G{&\11a¨yhÏ°`ÉÌ]¼\14&éÐú\28µ}méò\19Æ+KéÆ\6a,Æìa\17Í€Õ§¢;^®cØT®8†Í(\ +’f/¦†Ö@U®:5åv‹™â\11hÎ\18hhèÒÐó‘ç\19¹¡ÒÐ=PûNt\\õ\31\14®\28ìFºéó¸D]ù\8ði€©“Ab9°mLÅÁ7ÑZûØ\"æ\14‘f9}'`Ûœçη¬^ó.£.j[Щãù\28'Í€¦)\16ÛAR\21MÑP\18…`ƒK€9\0¿\21;\1A°ÄÀLeþÁ›uˆ'¬wêb\"\3„!¯ƒ*®vÏ52Ïš\7Ápt\12Ð\7˜\4Ð3ˆq3P—ª“¯\25›g€\7+E,ˆzq0óÔ ²\18‡ÿ\6ò#àO>?6â\127ý:ù(R\0038\14}B=‰es±à\18餓%õ…líSC(\18\30Bxu4t#>;²\11\20©Jh2ÚPÊ|W\9Þ\22ùÀŽr$\7§B;1‡Ù4Ò\26²\25wQ\14l:\24N`q\4ü$\31Ø&À*YS§B™¹.-‡8Q\11èzx1Õ\127¼z,\20º44\7Z\29ýfº9 Ê<\13MFYÜ\6$Dǘ¿#|ßbº8¯®\5×®¥Ë.\3\7b½;óW/ß|ºRdmˆ`—Øu(Ápb.:l¿Ðý‡é§ù7½é{\31/úV\6y\"Bÿ„?¸) ßÊÍúF‹©ÓÇ6y‚Õ1pÖäþ„Mmµ\27\0168™\14¦“i\18ý÷Ÿc\0008‡\17b•Än+\1mÛá\11è¨\24ˆf9\2n„²jQ¬ÍÍ€Æ\30@åð§¶$ÖÛ»jÙƯ۲S® ~\28{ÌJ\6Š\21 ½Èx\16½ŠÔÿpšÐÞ°y³J\1§Ú[õ¬±»SM` ¤2HR\25(—\12’K\6Ê%\3%\18°j¹\13Ô·ãn\17ïCä\19A»•7ëñ’‰“MN‘\6D\2Jâ¦\20¾Iî\14Ä\0087PWï(†\14¯ƒ\5¸:Ý„,4pWY¥»£ÞÁš1Àx\21×ÄkƵ–\27o\ +\"Ÿ&^ÙýÂûˆ[Î\15+QB ï¢ƒBïxÝ„r\30j§v<ˆŽ³iQð$r%QK\28 ('FƒÆž\1\0293?a¼\31ÔGjˆ\28|¤§“\26ø\"u\19¤£©ùP‰ŠQÌ`=\3Z\28rS^>E¾õõW~.V\11u?×tö'(®êE“–çÒà\30£ê0S²\27x\12\12)\7×a~«7\11‹é\15´('¸Î\0305I#„¸*\30ª0\17ç\20DKÄbö4\31\28¸@^\11\127Õ¯á´\16Ûº‘·TÛV%2ü>é\20ÑzË\28Âz\0151\11\7Ç¡ì\\cet\0Jð\3m\19X…\22d'U¯r0z?\7Hü\0298W—\12þŒzë·`kMMc¡ì\1†‹­p\\\28¸´è¨7ЛæŠÃMïü÷ë ¢šýïW'r\18é\29øWt\18YÌý\20Åð\8ºŠèý<ŠôæØ_¦lioê€\0:¦Š\9b\0210Ð7þõ\12êozÍ\28ÁÿJí{ÚÙ¥\17‚X\17x_Ó«\18>Ö\"NðOPÿLmN\3¯7\\ù„fŽõ:ãÚ­¸Bk0É9}\5‹4Ñ\4`Ô±µàz\27k¾ê\23/ìÑ\22¹%}%b\19\19\7Ò§Ñ\"T\11réÿôŠøW<È,‹ÜÝ‘ýIr\19\23\31²œaø¿¡\29-µÖ\"ùÑ’ß²·Û\26\"TãEž\31pÏ\23À—\2ï \20e,¾Ïú’eÂ\4ÈTožÕ—àÄÀã\"ŠÏÀ1v\24ê—ùàé5Y˜ \9,\ +›u\15]\20ÓÂ\31Xt:Ýiñérˆ¢³åÌ—ÍG¥yäÇÌ]¯À\0143ÙEr2ïõ\16EÂe¦êi™eÿÀpöO\21 jð^ñ\7eƒE2ïF…-Nsq&]\0008ñ¦ÆÊJ:3slTü¶B‘\31\28X¢ñ1ü£ùq`F\19›øR[±ÎR\23KeŸ$6-P\0161»I_I§£¢ÈQê[á`ãŸé\30O¤‘„®\2\\å\16Ê\14òò\1o†ˆ¯2IÔ!,t4\0?¾\9vQ5\2¿7Á@\0±‹Æ\ +¢xct<€üh54ìdn넯¶V4Å\15·Ö\15,\3‚\9yç“\30\8Ã>§ý‘\ +>p$E|Ö9óoJ\25ˆôž_(||ZÔý´¨û9±Nã~1ûÔbÿ''[ˆÆîà“MžÁ\16¶>1Ù\"õÎ:Ýù§f\17E\8…þ\12Âe»J±6G’H°7û„S'‘$’ùÈ;t\2¤˜Uc\3žž=)Ú®×m?‹Éaµ\18Iû¸/|CíÃ\ +£zh¼ƒ \25õ\1Ö\11CVEX™\28jØE\0u£vòÎ\0287¹Žä@ê\"cp\8EÚ\26àmæH-CÈüß\127°ÔÀ[eJ£Xeo{Ýn“éjª<\23}07°Wº±L~Vþþ•zç\0047ë§ZG°›U\20u‚€U\1h~ÔrDEÊ Ôj`/﬘\0pAZƒ†Úóþ©×3â\13=oV…à-Ëlu›\16’Gå\127\5¹Õ\8yåˆL•]ÞÃëÖ‰öø§ÂÒ”©äÉc5\13\3OH¦Ž†…³?ê:RMJiªu²¥\22)zd™\13|\21Eþ\22÷;nákôTëê­G_rK\7uÎØ•\8aªRô\23•\12ºÛ$R\17¡Mßä[à„³¬£û\1¦,R\27!ªÿ×…¾UÐƾhë@Ä9Á—çó\20ÄdTsÂO<ˆ°Ñ~K-†{)²„ ë\24Ýh…×àè4þ2?~pÁ„å[Séoùc`0Ûï7ë\6ˆÞÌÍ›¤‹ˆ\31»J—B“ÚzŸ(ÝüÒiœ·é¨GVn}Xy¢\26“¨VåÚ\5ü·Ö×ÿr\4ªä$²Šd’¼ÙE4XªçÿÝsò\0Ê\22õ_œ,B\"{äÓ\29Ã\127F\19ØÕYξÿýk×\31~Á~ôåµ\18\28VÍ›ü«þö—\7oêl¢ÿÕc$^ã¾\28\26z©1\26#À:^ëýþ¸ôHí¸ ¸\16þÁAqO˜«ð §o¿\3Œ5\22º0ÜžNxyí°z˜B(J߃׳'Çíã7C\0317ÜqÑ.Ø(A0ŠøMpOØcqäŸW2Æ×_ýþð\7v\3õ\21#6oÕäkÿ†»¹No\17§0ñ\17AÓ\1’Ó\23Ò˜˜{\24SLk=\2ÿß\127îL{z\17yÅsží’oœ\22Öœ?@+ó\14\5\9ó8ݘÇé6‰@5S£Át½Þ_}Ø\14¢^õ\17×\21\1óAï«€ÙŸ_ñïæ}•L‘P½¥F^¬\14\20'‘ÿ©Ÿ\15!\18ìZ\1øüÀ_We‡^\20+G5ü\17†\12ìFḟòDÕA¯õ©z»\30~‹ðŸWú*’IðgD‘*g-¸]Fq?~›¾˜\6ßO}¨p×U\15'æ\29\0070U¾©þ\5\22C­!cCUòë\15ÚÌQ£a¤Æmmg• ÿ\2ÁØCŠuïž‹N\3銯â\14,SþhL¢\24«¶\1вˆ0òæšÊmA³€Jß;r\20ÔeFÆL‹%G#ú\ +`”®Â+‡»\2 ø\24pý†X9/\29\ +\3–Él­å<ËßX¨jíy43kóžMl¯ÊucFîÊáýDÜVîµÚ\31h[\4AàÀ\"Üð6ÄÚ\31Tží(\".”µ5\22\27}-¦ÂØ®7õ\ +‡­NJIf¥ñ]{\29DÙÿûÏ\11{’úqØ\19LF­[äÔ®è\15\24¯5ÿõšupR¹\13ê1)$R¡šf\5Ç:²âþ\8}\18\1^C\6\22\7„\6¥rÄ\127\7EuP\20¦±?-Ê@í\28{ž¾4š\22ÓÍá½³Þ\15\1Ô\13iÇ{Ó™8ç\16ëŒêS¿§ÎRHœ3,\17\9èõ\19´D|bz5­S\31D˜Xoê£uîø\0071‡ì\8Óï\0j\5@7ƒ:@±\24§\7Ÿ½ê£®.Ÿ«{'\ +=\127ÅR>f\23%Îj\23LWÑËcf½Ær£ÿËÌcF\12¸\22\1Œ^GÏu¿Ð“ÍìÃÁ¨B’\2†Å\12\127\23gJä,~7—8˜\26ä>€â{´?Åør®ô3gù¬\16<¤ÆË\\úìÝŽOè\23?†\11Ó\127pË¡C?\9Vå\22[\17zÙ\20ö\1.Î?\27¬í\13gÇN¦ÙtÏRÔ,Ì ÄC÷–Ž²“Ø—0Œý‚ÑÞ¡ú\20®á‹fS¿‚'\0132æpòð¿7ÊcC‘ë!^ëZ•P4\14ó/I˪ÄqV¿C2\31KþW{ž6â„TtIM\14'{ˆ–Ä:‘n®ÈéæB½E\27K·sC“ÆAãx$ÒðWã¨\12ƒ›ßþ\21Ùør^]I`Ž‡Æ‘U#uÈ\3\17ÇÔ\26p(n®YV”\1ãïgmÂT®²[„\"ë¿òD\0ƒAå›Ý\1e7K´Î\18™¦ö2XEY,/‹Ãº>À~jH\127¥t³\31Ó€Q¸²Ó‡5Äï'T?VNSwU-g}nñØT\\feˆ‘\0³:Ƽº5\21ù\9åÛ>âC>RDßù÷s‚À\7q&©Š„\127—ÆR\3Ë\14C$\30º«\18í·¨Ë\27Õñ\6¥…)›»%Ù\12\27Ý\13x®nƒúÉ„Ý“{~Âñ\9ç\0061Ñnìô„~\25ÝÊèk!&-Ù1Tï¶Á-n£Å:‘ÛÐÖz0m\0235Úœì_S•ß¦æ\3sESÕL€Îàæ7e#2\\\20ˆDëýKÇ\ +÷\"_\2Ðrô/¬™/°zÆuá\21I½`¹\7×W\6Ôo¾2-h’W‰êG\28¾Ò\31¹ÂÛ\31é\30º§\4Sg®\2ø!G\28øoŠ­æ\6ç\0)@\9P\127(Ì\28‘á#Œ;p]\16ˆÉc3ü âîk@w¨\19k\\{^k6:êJû=\14ôÔæµ\14*U®è íºã\17u›\27\18ç\0UÞ€±J'‚š\14:ˆ¨&ã\8À\26»g\11’AùŸz»\\\17©\127t\"z´\127,ŽõÀ\31èè2¤ˆ\127†® ‚‹ï±ÃÑ`\0080\26ð8•žàbú\12Â(Ðí¨¢\7kÌüÐ*caG%ú¤ŽžìÉ„0„[febç#«\0296ÞnzÅÃÝtTV ɉ¢F¢æ$Ò³\20'ØI0ÙnÄ\3±+^`\8ŽsÊMòjÝìè \18oëfe’F=\6ø\0276²a*weøÝß\1ýfCµ˜\26œ\27Ä$ê‰SÃ熆ß\13>SÀX\03187T~pýß&nTGþ\1~7Ę‚5fÀ»ž9w“ÒAm0PÅœ\"7ÃÈO‡q9ï/æ_:SÖ:\2gd$à\14U×SxÀ\19€\13\12cÏ­ËdLÁ+\1n-\19ßþ\27ØKì\28\\ãÁù·£ÍíKçV®ó³ÈæÈÒÜ^_§\2\17º˜õüÑϬ­¿DS×Ê\"éû\9Û¯HÏGrd\ +º\127£¥±Ú;°‹ÁÌ­k¨Õ\"œ\5Ó˜\31þ{<˜ÀÏ—–¡\"ùY°}sqC\3ó;¾°Õ—;ó&Ø=™ñöû\27ÚÚ\15ØØ«åøT´j¾—&\14F.‘¢B(Ql%:!˜P+$ë‘Ò•òû\9Õå\21œË¤øèÊÍr\5(ê®\8ŠYîˆ!H±Í~»îa+¸¶Îû\29÷\23Ó.\2Z\"÷³i„8Ñ»;ÿ;¥4ùKÁKd¦C4°‡†}\2\15¶}é’ÁÄÑ\21Ö›µ\7ºÒÜK\0½ØŸ+D\2Ý»g’-'ŸåØs]^u‡‰søŽ«¨¤òx›u{Ó­‡-É\12¤\5µX£È\12a¹\22ÞÜQp • Æk0zG€hÔ5kö\6Á%µð5<Ÿ/àÜ\16ž˜a¬\"Îà\16ˆÍÝL\29ŒR\7\17Žýp)t`¬eUäÅ_Æ.\24îz`,R\17Lœ+Á䨓’\0±n”d°¤ƒ¢š\31g¦ýИ2óc\22v€Ë+¯\28¥\23~†d“%õ\0033°à-š§Ábþ(¢;ý˜n¥È³jpH\7¦›üG¨+ {òí~\14°\26ø9þ«DAÏ&–(}[]öø9Ê\4Ïô>8rÁ‡¥£\26lêP•á‡ß€õ^„v\20ød¤©#•;å)üS\11_C‰\18Z\30f*úùQ¦Æz>¡h9ú>mY&\24 7vyd\11Ê\19lšzYe+\"ÊeÝË54ï•í\28C:PÏp÷½\16$E-\25Áë†è…éÞ͇ý\30ÿ@¥Ù”Õ\18+¶|`cuä‘I j\ +EÇ[ö\29Žn1#ó–û˜r-\20ì÷ì}ê3\30/4ÊE4׶6kïž'fô?Ž‰Æ#\31{³ ¦Bš¡Àèh›„êÄ£]Q¡\ +T\5//ú\7åVü$–K+buÁF”\30熠'îê €›Åÿ\3ÖË59Oj6\4jë\30B·\4Å8t\2ï{\15\9u\\Ãtù\29g׃}ÜQ/ûî\19ã…¤Å\16J8_­\8ò/élê\24œ\28#æâûOßÃäDèÖ^õEt”.é½\29\3OL½Ò;€ïuÜ$\9'Ûï´\26âwÔ™Úï†Æ†æ†rCÒ×¼ŸÏ¦\14¹üfŠ\23vîâ8Æåbš\25\0033>&\5éÕ€!•ìrp¤Ž/×^óx\21âı\23Õ'24E†*Ú\28ùÝùž ‡x¯#|ü\9#“<\0ÿc@m\19]\11GÈ\23Á5þåu§Õwt\ +²8Ö#\3z\27\30“r\9ÎFÂ8ûÓ\9`\17°—ãÍ£\25}0¦\0s€\20 0Èùl•Ù»$\0¬\26¼ÏN®ëY&òTi\20\9Gí›[í\3*‘æû>²â\0268k!\4ª\2Ý~DTÖ@©ôßi9Éì¡Îí¥Px\26;\11\14Ç\0Ù\0[t{›7÷aÞ܇yó\31lÔR-Z£ð×Öè™Iq\\î«t0ö\24ä\ +¿¶¶µºm­V‚Ò\15Žã,NÓ_·:\27«„á\26\21:¨÷\21šË»\31¹9æ—i4±äœ&ÿÔt\24M•·\20ÿ6qmæÝ«\12ï^ex§Ê‡ùK¯n‚éè'õ¡\26ßñ_\19ë:MØUNiè\\ù“ÿ_Ra'.‰0®\31½*xñ;?ÅÅ@k0\ +pè°Q˜À™m2•Áôb:™j°\22ê\26\26\27º\27eÓ»¿Î\29\24ŽÀöwß\25\15O\\éÁ\31çÑÒ=Ý2©ì²›`nõ;?ër\30®féc¥²×Qžbˆ†>ÝçÿãEИë8ñÞ‡ÉEø‡Ë¹¸ò\0217ú\18ÝO‰^S³\23žÚ$ê\8wÿ]ÍY<Â\5:òÃïR†éÀ•†æ†T}yÌ­Y­«ÛÇÌ'λջNÁrÂw÷Åw'\127÷ãwgöînõ\30™V\1m®Ã\27¬{n\21¯?[Š {Ù•\19Õ*@K]Î'NT¹ž¨l=Ñü\14¤¶%ÙòÑÑQ\15û+Æ’\1ɉêÏ\19W¶x·ˆ ‘ž¨—åRxmM=M¨{¬Û‹É\31\25¨¾‹Ú4¬}éÿã\9\26Î!\ +ÄÔžGÖÖÐ\15ˆörÝP«y\29\30ñ}0i9œL\25ŽŒõÌP\15óG>Aá“kÊ\127ú\7\26?\8\21Ï\0«\8¾\15\11Îx\20÷…„]:éÁZ-ì\31öé‹ j\9A•Pç\7Þ)*U„uŒú\0\22üê\29&@ Žõ!òIâ\4kñý?˜ä\20½ñ{rV¾õ\12ä…óÂ\ +ÀÅÆAýþ|Àê4È‹H'r\22¹ŠÌ\"I¤0­Lsʨ³²]\127~ÔK-cnÜèµq£Î×v/{Þ½ˆùBn¥Š‰µ!î\20Á•‹ÕÜêÑk«\7\30?ñæ”\20£WŒ3¯è\14´\1\4äÂ+LúE3é¤×N~-ûvnBÁ•ƒ èƒDy1\15…[\9\ +wÐöVÐÄa\30þKrç\21òÓ ƒ±:{ï\14\0014Ø\0õˆwd3\28»K°\23\3eì8¨ç\27öÇ\8à§Ò\2lè)Ñ€ ~ƒæ,Š\5#تò\14Â\27ŸÅ©¶`À2´\0239‹Ì¾…ì1ô”W\6N‚ýÒF/È[~(%ò‚¨é~2¸\6cî\2©<Ðó‡E€Œ)¥¿º]t+s\6‚OÑ›¢}™ËRïQêø´‡>ñ¬’>㋇KÇÚ\5Ý,\9Ôp\3{<\28·ÒÃCËo‘™\4\ +„úØ\21[µtï\31xÅ\9@àhÈÀ!-ý0Gu›U‰gõ\31\8Ÿa\0ÄóŸà`é$\30Œå*™ÙÉ\14RÚT²ˆÜ\20iå\19Y7åÀdÊ«o;S”9èYD\9Q.¨Dâ@\5Yä&r׳&üšeV‚ˬʶ̬ôP\17Ô\1äã ¢6ƒÓùH:‘“H/2F¤q¹\ +L\"»U4E\4vÛ\0ÅÉ—\8Ñ\127­ô[T%‰ÓmªH8¬(âU¢\3)\31\3`žaLÂèI–Z:¼\29\1\3„s\28—Økÿ\3?8ðâ8ÝzÍ*à*¹æQD­&+弊”hMžä\15ûPØ\14‘™¼±¦\22T‹ý …2\8W+†\2UôP\ +«a)úï<¼N€ö\17\0K0Ü\3l”:Iïæ,˜\18cªúW\13\17‡ñ\28D(±ýiÔ_Úæ‚ÎŽÇ\25Ã_\14üÞ²CÚ‰\12\"‹\3? :þŲ?…{%LÚùV‰Þ\23ß~˜~“nŠµÅ-Úì\8ÓCê\6ÇpZápøs¬ÿ\6©tíT\9g¥#\5\25®;âŠ\30)ÕŒ’j¸]¨§s!\0264ÊÍ\16g„#\21ôHŠ_+§ì nÞI\6Ý0Ý´è†éÞù\12?i²iæD\19GÐ?¸ÊJ¤\2Ÿ5[AÏÏ‘…+\7³©Ã¾ô~’òF\0Ù˜|h•@œl\26we\22„~´¦€ÉÞdßäµjáT–]¯¥µZ\8ì/+…èÄE\22Qé2\5&ƒú}×È,AgŠ¬^»™E~…®¸þîë¢ÛLYS›Žznƒ¿ˆBû^\3×ÎlfŠ«î¢gSØ`T\2\30)EÅY\ +ÙYv/b\20Ç(ØdJtÇ‹!ÄÌݞɨºÌwÙ\0åáÊkç\30bîàö«Ÿ5,qÆþ\127æ\9Žâß©_˜iFyx¯Ï£z•Û”VÓ$Šä\7ýÄú\16ç#ЭCS/[¼™Ûõ‘ñ%\29L;S”/æò£\8Ò‚Ýz½âT—ž›˜êU6\0073\14ÜC¼ï^d«©Ô_·ì9\28-û\23ö@Ë\11…‡¥Ê\12_àRƒˆýgµ\24—Q&.¼\31\20M;+\5ñéˈ\1\24ÛÑ\18Ȥ™+OzeœetdÔ3Ò\18ŒM\17eñ\14wÙ\23d`j/€áŽ¨ºH€ÚŽ–\9Gø˜v¢˜?×G!,.:®A\19sl€›\22=\\< \19å@\31¿îŒ®ž\0204#)no­ýî\2¹y¹î_l{\19°\24v{ÅR‘B¡ÏÍp´PÓÞ7ˆ\12ËL]ã2‡ŠQˆJÔe>\14æ@õ\1º@\127Æ(˜\18\0ðóY\127yß³\24fmFì5\2/¬FÈÅì—£ß\\¸Ë¦’:†Õëê\2e³[䄵Ò\3eÞ…®VÉg¶¹w\17Ù–N£–Ž\21\0238„\23Ü…ÕIZ‰%ñ&óeY¬¬IõBKn‡\2™/ÌãMY4]¯ôFöí…Wæ\26*àzE™c\8yÕ'Üùª·Ú°\23œ&„+6zõ\28E>d¢õaËÂ\15-åTA\2«µ–`{7©Œ.?šÍ\16aX\13!‚u­\31°{žÉ:\14w&uĤ¶Ò“’æ\9v¸§\22ïC\27º\17\127¢úãc’ÒãÇK0„ºYA\14È\31Ú'Í÷ÍLÝ\6‹\31\26BHQ˜ô$\127\12ù(Ìô\15déý¡Õql\16a@æâ\24w\12É´P{‡D5š\127°+$ƒ¦\\\31:éT@Y‘¡ð‡~Ýõ\9##º‰Ôˆ¿\24P\0147Qö‡5¥â2â©SÌè]æ¡\31w\9]5²Ä‚\15\24Ñ3†´>\0~dó7Vùi׳æáÈ¿I”Ó`›ïÃÒõszù%Fç{\21Ù'\21ÆŸhÜ\7‚\23ZÌA%\18C¼ðy·Ì\7%Æá\9´\19‘Æîs.¼M\7½\"½˜b\1â3acI\0=\15¤·\1Q¿Y!Ö\23\12¾ýÞÔ9¥ÞtÖ‹8Vìt¨5s\0…I\0299°M\9\17 x“\18é3Ûœþ3¨T„Ÿ÷ß\".¦{gÊiDåŸEæ`\127ê>‰”`\127\27pÆU\3îõ'¡úk/fx½6aöÚ|ÙkóeÏ…/\12<ñ\22›,+©Ù䊊7TöÚHÙk#e\15Ã\18¶emŸìµ}’Ï ÃÕÝ$Ђ`Ëda¢If\22•qQðèÛI·³b¡hÅOâg‘\"®K\28\28¨AAds‚‡3\\£\16\12ú,œòØ{\11¦\"ððl°\ +¿\24[ö{o¶D„+Ð\0]/\ +\11»†\28\9\26PîÌÄì\27ŒÈ\0}a\24à0ÅÚ8×N]\"%ê\26c\29\14pϼ¯ép_'®¨š+5…«â,Úè´.\20ŠWÊÉ«º‘5X¨-+åäUr27ˆòæƒ\4¹À„¬V‹Ä}jõ\ +á+\29xö„èÅt\18¥¸h”\1®!\ +Ö&ëuæ•]úíR\27ímÒ^:RìZ¬Á³†y:\25àí\7\9eˆ\27;¢›0Ý,S!Üg‘bö”ÂöÍ»NnOŸˆÀƒ‚†ì\16æ!/â.ÉÜ\15=ˆbºÉ:\4“d0éÓ®’â\24ß$”Œk”É\22hÒAF‘Iä*2‹8f\17á€v“&\16Çx¢\16`œqÄ<ìf.þ\11$è\29•¤ÿ~I\25T\9EÒJo$X%\14\0­\4·ïr\27ƒ6š“\14fϺբÚמ¢\5\14€ß\29ü ÏÛ$–MN¥¯¦Ç\22§kѽ¸øUN¦½é9b\12¿\27:4Ô7tf\22Êp1\29—\0224\31ÌS­\1Jü€â}s@¥¡»ãlÎÁÅt4L¯¦s<é7–Õ4™f%è7HéZéÆÿ÷_}R¥\16þKE\21îθsÍ«·\23ÓÉôj:‹zº\ +Nу7\17d\4½U'‚lsÏ¿o?E‘íûn\25+à®TwÝwm‡œ‰¨\9»wÉë—÷nãn–\9¡ˆ»Ë(ïΩV\29w±Â†yìƒÝ’\18~e­\127\28x\\À\3/š|C\2\30:AºÕÿ\22yaDÔ\4\16Ø\2’Ý‹œÅfµ®·£È,înQê½Stnx&\0ós9ˆŽ¯Œ0ÎÚ\24^QùmzcRTé<\14:§L\12\12É\2+SÕ¦ÿGìöG\16J\5\20õ¯–äî oÑöÖ‡Noþe€žñ­ð£8+4†Ho…ÊÐ`\12pu\2Ú2o”#´PW^á%RåN:¤ e\26ë\9X\15¦‘1Ö‡ÿ?DKǽnXǨñO#\30”\17\\H\30ÄßEÂÉ;\17\31Ôgßõ8-*\30‡|\24MUB9±Ø\15åAB·/X}Æ\29\23i÷\\„F\13Ñ\22gÑÉ\20¶Þ(\2øÅäc48~Ð÷¥øµ³\30ͺeó8o\127h\31á‡\2©´{Ð/\13£°³{ðdñzÿ—b\"÷À1\27;n8zt”E•(\15Û%e\21õ\22%\1\26\15?:í2z„gj°pl'Ò™dQ\11Æ•ÝÂ\3»F>\5h“Î\7'.4<š½øƒVàJóª\127#ã\28,ÔÓ\20\4!³’Æ*W}Õzð\19°ëá¹4÷xt´å\21#3fÒT\26QgÅXœb¢E9\2\0203Óæë\17¶¤\21ôJ*GD-\15=h\3šƒ5˜…©Ò£Ó¦Np`Ç/ÊOè6Ýn5§] ½\7\7¿p×\23÷~E•yïu/;Á‡­\2+½\12ü¹ïÜð\21È•â]\ +\19 \24!ÞYÅÞ¯z\26\22mFƒcªÒiPzÈVÊ óCp\20ÊÓÿ\19,b\20ÖG¤s¼šZ\"ÚQüuœ12“\"¡Ù4>R¶Q&­=Â\127µ™÷\0›A‰r*j\16²\1zÐ%”Á‹i\23!\\'ˆgº9@\14P\12ΦñLÄL¦\17O¿Yæ<\15®Ý?¤l|P\15òÀ!S;ïÝ \127>\28•ûá¨/\28x\22M%ê/xd\17)Û\29CÐg톳âÊm \2øUmRÚR?´*ýà2\7ézpjZA}\12)Þ£F0$±ó™/á\26éC‹£¸U»\25Ô©\12Z³|\12›\19×J僢\5dNׇ¿{ÿÚ¿,\23NW\31T\0=èm\1\4!WÃ;ms|,ã¢*Œc<èW‚ºþ‡•ý\4,ÊeU\17Qùý ¶\27Ü”•=\25\31UrcæÔ/~\28µÈ ”5*µÕˆÊM\3c\127*çŸ\28½ÿýôÏÂÒŽúÑÏ* \29\127?áñ\9»'t\5%ž\127ò?Ëó&ýà\127\27Gð]Ôdƒ\29ÿN¶F\15(®:S='u´EBI×\0013½@7ðà1Ã5|eѯ,yŽUƺ\15G†e–afydÆˬo™ãpæ°šù$×F\31û|ã]á”Ñ‚ÇMÝÅM\21à\6\17ïqS¼¹¹í5JÞ\\â·¤ßÃ!õß/•ã—\11îK-ï뢪UA2U[ýR_ò•^Lã¹ä\7ÓhʼòØK¥ÿÍ—ªØþýÖ[¿{vZ0š#Uâß³šý·Gâo—6œ±ðᬟõm¢ºþ}wgÆU½\7w\28Ö++O\21i±ÖÏ?\6\127{•n5\1278h\6Ã>\1¦pd|ú¾Æ¬ÈGJ ñ\0c\21g“3ƒ~‹µg»R\ +„a\20\0³q»^\22¥ÖÉ\6øçß¿vµc9\31 zøs>¼ü®xÿR›\7\8\23ü\12f\1ˆ1\6µðê¨+\6F\15 \1Œ\23vP\12\24®¦{ìÝ\22ÄÆÔ3ìÏ\16\19í\20U„ þ6ÆÃ\1\27`Ôéi%ðy\1vý\15´¥; +G®Të~Å=¬\22u˧´¢÷\26\8yòÑ]æä{‹\5C\15à®\23«›tË\0158B&¨\15\15µÜ@ ©\7UÉpg:Á’Dü\2gú¨¹¸\17çãÁ=?¹CcŽ\13Í\13=\19*ÊЗÞáòØô~.×y²Ø™GŠåÃg\127>ôX±®q†\19®ôê\27*üö\11Ö É\26ðâ\11ö¢`é”ÁŸ$µ'\0027óZô‰\23åct)k7ùY\14vÎœhœåfçLÃÖ׳Á¾7¸\4\3Oªrr«c~\18_ÿuÚÑ\0020\16?ÚððŒñ$ü\27›Çš71:Ï\127Í\ +á2\7C¤àupb[˜Â…ˆ£ªî‡\27Ÿ`&ÇL÷Ψ°¼§NÑ»W²¹›ç|àz#ï§Î”ÅŠ½íJ²S\9tkçû\28€\2)ÊŸÕÿƒ×\7Sn»aj..\26K\24©*ÊßBCcC¬_\19\0272Nšv\0TŽ\2s\31à\30AXãnÐÏ£=â,_fhˆß6´gĨ}|\13§ýÂz\3\28±}ß<—~,’¦h½“Ê¿\8b\11^\5\127¦\7ÿ/r:]\0150\22úØë}K|ý‡\27\26Ïå?Ë«‘þ÷ê¨7\127ßmŸ\3è“Tŧڭƒüy°þ]]ã\9X3®/Hèª\31|õG\\¹‡\2ÑøG®´A«Ô;¯ø<öóÕì_o¼åÛæøÞy_›Ì\30ŒwÅâ_˜é:\0177¬³3v©Šâ,$\2u\\3\0123\21 \"™yÖ1é­–(,œy„g8âœå\127éŒ3çâÙ:Á†þú\28\30•Îð\6·;\24\28ƒ\4“\ +ñ™3¹‹;kòM–˜®j®ÉÿG{\26•ð\7û\21¬@Õˆ\25eíeŒ3Ï°ÛDù\\¦Çá3²ëLQ!ÿd\28z͈°Åæ\3\24R\17cCýÄà‰_VèGV÷\23¼¯°µ‡Z^ˆjyÆéÔ\21.Ó\0264Ö_\"ŸpC–¿)|\30\11~2Ú®¸c)eϧ\ +³Y°=»Ò\27¾æƾе‘?K‡•ž±Ô^/\24Hu\2Û™§ªa`ø÷Ÿ+F*\30x†\31C€\23Å\17hˆ1DŒ!\27,ŽŠªáã̺@\3ƒx¼\24cÍ\15vÒæ%±\\]ÛIcç8@ìÌ¢ä=÷\2\27L\1nŒ1DŒÚMàÓ0“\4á@Téõàäécù\16ˆ\5€#à•^‚ˆÄÇ‹’CSú÷Ÿ]R\9e%š·\24\26|ä“\1\20Ègï\4‰…g»š\14\13ª ŠŠA\18ÝÙ§,ñIÊ\11qÈÒùy¶Òùy¶RƒN‹Çûœ}zÒ9ÜO\25ì“’ýØÇs\31\17ùS4²|ãˆi§U\14ú/ƒîÚ6©4·\31߶a0\22ÏŸ¸IžŠc’ÎqÈÑÚ s½Ù£ß9N3\18ÀÔ2е¡\18ðûxp–Á¤\ +²-ÙŒRÎâøóüG¶ì7üÉÌ(œ;\6\3B–Ðà Õ´˜ªøõÜ\13Ç\21µÔ\7ùü9×ôÙçÛœy`ÌŽ¹›N¸ÝùE¥´®9óœ\23ÆÖ[A!à\17 ,xr\3O\16®‘¯\7¾ˆ*ºóN\17g¥__;3\1mQì~\0113ã\11\19Äîÿsg÷Ãgoÿ7#)\2ŠÏ&m\2E…Eˆ\127ˆú|Mêv«nñ“ëíýß¿,ÎÄ)Å™{ßy›Yêé&&ª;¶™\31DPâ±OüÜeÎê—@\127{îx\8í¹»óÅX4æW¸àL\15L,¿íÌëÀëÌ+Ô9•°WF½{߃Ö>è\29§ò€ Ò½_Ø\"ÞéJ-À$€sKÎ<&âÌí¬“iÜ'QØ!œ¡ÅSBs¨V¡\18®R’çædæ\8\1w`5½\16óÆ—[ªÎn\29o=\27cÌ\21b%\19\0147ƒÉ”Ëñ\4.?ÌœßÓ>½\27•f‡k ãåq>õ¦¬«\18»\3¾\"\3\11¶P\6úˆl¡¥V‡ÏpøtÂp­”`‘K8H¾™@0oïì\15ï8 %aƒ®´±Ñ®ì\1D&ó—é·èFr;(XËñÏ—T\"\11\14Uï~^Œ\8…\8:Ú§þ„\13\16_˜q\12“êÎÎÜ„œºé0†Ÿ\15ûQ:'.\20áàØì\12µÖ•PàøÎ\27\"w«,ÇŽ{ÜÜ €nÀ{]BøÇJqÓ\15äŠoJü\22¸ä\26dì\\JÊ_L;Rô>\26‹é\"Ê¡lŒ÷ïjxÕ\0(\6sPqýè©Qäé&#\0244£vÙ4bˆ\27ù4\7\2â)\18Ð\1»÷‘—Jè;‘‘ïèÜC\"Ÿ\8%ïgi@Åw© Q|*”Q[à\17˃š·ç;äïñE!õ\24>î’¯¿\5è\2\12\1\30\6KX¬\2.Œa_\0ø¹\02606ÆA™‡R†ûè9ÏËdÓ#‹\24z\22îzZ¦5 ƒFV“ãüP\\Ì1ž›‘\9ùštE÷N‰ÉZ«Ü0Œ³ÔÁ\18p\25O\27,»Œ¾ÁJUÉ\30°L\29¡n¼~ÿ¯ÁwsθÁ\24·t”å7º\31–Û\31§#æ\6ÉÂß%\3\127‡hû)(&Ö\15î§all­»ùwKË“bð PüGxù\20ÐJ·‚\4*U&îmŒÙC?|ý¨éÃæ-À‡\28éÅqB\11ÊÒòM\27€Š¡\2¯\\|xIóÇS0\1x/\28òðÈùÌ|¬\16_ªÔ\12ñZLøágþóy\0ípÊ\30@s0a-—Ã\29µ)Ñå»D–ï\18Y&éEh‰\5\14ý\\éˆ\19»»Þ…¹KNù.¹äJðàÕ¿§N\0K\30Y¬\6]š‡©}¤Þ”fiÚG”Ïa.XðT\11îûá\19kVI\26\127\5Àuòûô1¼“‰A+<\1\127—<òÝ‚ÈwÊ\4ß)ñ{—ïB¾wJ÷Þ%Ý{Ÿ\22®“g0–î!\25z§Lè]\9\21\23eæÙÏ=d'ï\20‰l%Ws§Ðã½\28^øÅ0èmªù\0265]\24-9\11•ZAµÛ/‡;–Ú<\ +ª_V2)‡Òe~â·þ¾®ÅÇá€\6€\9çK2\"Fi2µ—)|Lj\25_Gn¬¿&®m¿¨w\27¶3\23\9\0l” ƒéÃ4<º_\ +Ú³6¶_ª_PNF_”%”ùS”SÑ\23µdä@J˜UZË÷·èóÆåïWè¢6ŒŸm\7õ\12À1\\¹r•ãÇ¿:$\29\15É`AŽ\19­»„Ãî\22\4»[Ôë~°pWí@u¯Dy«7ÑdcÍ\15OáEV\16\8ïÕÍMªûªÄ\19¤tÂØ-R«Š\7Z’:Í“Ì$µ°¨~`\16‘[ý\3HM\13È\"»/‘o9ýû‹bïµÆÒ\9Ã!\22Âul©\6H9Ô\20Ó™ë. :*‚¤\11}Pã‚Äæ{ÓºÄB\20H\14ÌHȉƒ'É'\9Òƒ¸fFÿ\2\30\2I\17Ùä÷›\4\6Ÿ.auÞð«Ø/c!ÁUkxÂm\02993Û\23e\27ËŒtÂx•x‚ƒ/Âñ’=41Ô¦\3˜¯\9_Œ_tù\16atWEw­µ\7-\14µÏôÈÇ—È·<þ\11rS¼·F†7’$\3#¼)ÂÛ‚LàØ\23\26 nü>ø=² ø\24\13v\30Œ¨eyã!\26è„Ð/r#\3[\0ßؤñÃ~>&Qþ4³ì\15¼Ó+Ê:8pÕ\6:)š¼(Зȷ¢^«—g¼-˜ô‚\13¶B\13“ŒG¥©Ž‚5ÓÕ=\9M¬o›8ÉndSš7EV˜ƒ¢´\21¥­¨E\0205„caõ,l;\11cYXjäŽ'=ðÂ\7dY\29k’mfe\1n*e»Éj–CM\3f¤#ÂÄWPDl‹Î¢—PHF‘W\18ì«\18®û\29iÌuT\23Í¢\28Â\4†\0\0S€Ù\0k2¢ÆÑ`µ!‹>,\"\26°\31\29ŠÃ`À‹ ØYHXrŒÉá’\127Zg`ù{\23\29#Uc¤\ +Ë\\Çúq\12ô)€Ú\1]ÂóRÂÇ×î÷Û~ÿ\5íTB8æUéR'\29\1U³¥8Ž%H½-œìÎÉ\27\\fC”Ùà‚\25œ/¼¯Bª”‚Ñ•ü„JòË)ÉÇ\24—\29\29;C×\17\31SIûÁFúy7%Å\25\7ÀŸ\15µˆ‡ZÄä\24'×=tZ$œP\\”Vn9\12ž\21Õ¤–5)»df&1Õ+yž\22%‡\29Ý\15vˆ*us\20‚W.\1U\"|¨#Åã\28\4); +}\30ÃüaªŒÍª^¬[˜µY¥8G)êÝäw7àÂ…|’|jüöyk\20Ë&‹¢z\0237¯¢â¦Ypq=©—¨’™ßÂïÂ/\127vÖ\ +¸\127Ÿ\8ø0m\5˜†Ï¸#—ø.AýrÀä+\4é\5b\17$7‘ºt%Á8ò¢GÊ`~Ј‹×éå…mâ…'œ ê™/ZiT’d„W‚℧Ñ~?\17ÃÌlVŠ52)\14Ÿéý‹¤6\26]Ý›p:y!cæqûŧ¯Q¾|û\26Ýç%éU-\2Œ\12¤Õ|xÁ=À¤KÔéø¢G^\18iÝ\31¤¸IFB×…Žñ\17í\28\12ã\14é‘.\\“ÐÌ1\26`Uˆ·ˆJ‚¢O´…\16\"è,?_\14\0126”\1â{á«cé…¢S‰»Òºt¼h`¸H5*­\23\24•¶\11\31Áù•B§ZÔ\29ß!].µÌ/Ï—\26Ñ…Ç6 7\14ç\2o\8ÜÜ4FRA\19þÕP<åX\1ãnºV¤\23\25E\ +ÿ\7\6VÒC\21ˆ\19ÂLj\6óò¥ÁYT%h§zÄ¢–Êź9©\4\ +y¹ð‰x\28!\1â‚%fÎ-\23Ì…´LLN\26‡Öt`¼\18º\8T\24\2ö\2=;\0239 wÕ\21hËŠ¿€9Ç3ì\26\13:ý…çlð8v(¤ñØŸ^Dy_\18ú©°ø\17\24Q²#\23<\23j¹Þª'Ó³éÅ\20wC\8î¦É4›bíq¡6zQÎÒ\23j¢—‡‡édŠ¼aË;^\8ô¬&]Š\\x6 ›Uq¡4Ù…ñ\8ÈGõ†×@Ø‚°üÖ3 é­)2õ}S©\9>kypáE#Ö\"—\13\7I¢rßðf\4)\27–\\0òa`o Ï£~±ˆ p}© úm¥\8¯³B\18\14i’¸OäQ'<öQclPM\13\0071*±;öø\14|>—\20\3Ô\127'\ +Ë'paðÕá\27Àéfs\27öYöHoëAÆYFþ`â@,\29Úɺ³“_ã\20¥JwA\20.ÇáÛÉ`0…\0=\16\6ÇFR\31Iú¬“Nï’Äòë/på®’Â/'™Fú-•¨/Sh\ +LÝïDz‘Â$ðŒ/I\21t’Ø\20\8j¡üy;è\23e;Àvá\31V¦oU\21¬¬\24HR¥\3›çŸ7\12-•`\11FŽ‰\8\28­\0018Y\0030|ÝO4Þ¹ï­ÆD‚þ,9ýd½à¤I„;­?oÜmTâßö#])»op\15\3p\25#ݬÉ'v‰\23›2㘸J„y\19õŸ'ý™jK\13r€bŸÊ\15\20é%œÖa0·ÞNØsyîSŒdE›ézb'»jò<^ávm†W¿Õœ®i¬öx@\30\29ü\ +a_E†¯'Ó³)Ç\19€dšw›ÁÔñ|ͦŴ\14\6\7êÌLW–:5gâ‹dSsfºŽ7¦f\28\14\3}£P¤&3YMf¢šÌ„·‚O\"œþ¯±v¹RØ\22DžqÞ’øV+VÕz“•´›EV’¡vM=BÚ\1àñQ[à0\19hÒóá2{ ˜J…uúL|1\9¤Áh…ÇŠª¡Épo¸JÒ\3<¤Œ5^*\"\28`¤\26~Z³#KK;)ö¡·FÃ4¢Ö™\25\1Æ0P\12ú\8Á=[¥¿\127éFÄå—T\14ÞLÑi\4ô+ö§o!”/ Ï<@7Õ„vÓ,,ÕÇ\ +ðpyqãDÚšv¦ƒÿ1Å?¦UTÓwK\14W\6¸©\24q\15‚„…­\2Äì{«-ò\5dÀ\22A꯯\27bªŸbýh×rÓ.…\ +}X|‡\17[=¨ ycÕáÒB’J\21ÑÎFþ\22s 5¯VÓL/lG÷OÔ»Š/\27ªZEaâ\23¤\8ògøÒ\21Ý\11äûEDUÂQ˜·Re”×oæü¿\127ÙŽ^±¨\5éIpýO´#=9\19÷ÊîÂ|+#ÅÙ\20÷„~%J\16Ö,ÕúǺ‹_\12gR¥™\18wëI›uÝ¥IT'™¨L²~[†éåÞ3š\7\0298\11êÂL’ÂA\17_\20\12\15™\0195ú‘Œ2|€lú\13¶G\7ŽÁïž\25¨‘/Y#_’\14¾d\29|¤\11I±gÌAR°w@Bk\9·o\1ʦÖ@>ÕN{\16h9\18gp®…™ù\ +tu¨\13,Ó8Ü`ä†=›Ï”%Ö™v}\21ÍrG[ÈR‹šÈ~˜r€šÎl\6gæÙ\28\127wAèºKà\\•Y‘Y\11æÌãò”µØ0`b³\30ÐÀ\16cØ\5Z\5°g§…âѲ C¸Gö\11IÑñ\8| .2—ÙÙ\9V—6ú6Â(šÙE3ÔÖÀ\18,âl\14q¥™ùÁ*’Y~È-ÎTº\2kô¡œ¹#Ìäèœ`湩,X¦Yo“À\6¢õ ÌŸ˜2\13-0kdÆü\16\24\29'Z\7éç¬\24>KX\28\21\23—CpÁŠ5w'öÚ\\Gg\17,«êˆ³œiZz˜\6Ž¢yìY\31\\êcÚÈãpæȘG¾óÅÒ\24:ÓÚæéÂ#qÙà\0\19V\27Û\2”¤\28Wª`aÍa)SG!<á\"\18¾üÝÔ$\17\20#–\"L\28Ú¢\1ª—\3R®«Ž\1‹Ï\0125\26•”E\13°P²ñ˜2ÞP­_äò°\\V\26®üÞùMüâ2Q%-¿™_”\18\20I(Háwe>pΖº\3^‡\0079¾4¦hãE%\9Pï8éÇFªãm\20\24±=ä\29\6Zbî9ê.\3l\31¨qPîËuµ!áæ\1_N\7‚°¶l0öëvÂëH«áõÊ\17_b\3\15\3LMq\31\0åçk\11_Š\ +£\23ý’\127Ãë\9WÓÞ”™Å\16Òá˜\5Iá1K‡\19B|Ÿq:&€\5u§\3Ž\\‹NÏòñ\31Ü\29“vtøP~oA‰ˆ\21ìŽ\7úàØÓa±FÂôp\26\5Óï\5IáEJ\26o¸R\30¨ÝѼ£ÚVº\4ÕA²A«}úL¡Zûåý\7¶?pøã\15œ~`‰¸X\14¸úTÿL=ĉ÷&\18i¼%.0«ëƒeQ­'µ¤¬<“ÿÔu!kÈ L@t2EÎŽ~Û0\16WÞ*üE€ÎÒù’‡\3b¼ëº\4ÎWÇ\"’5;7ÞÑfDh6½Ø\5\29F\9Ìž\4­\28r80áà\24dÓðø0^äPº\8±\9pîïz®{:¾\1´\0199\18Î\11\4°˜\12æˆ\5‚ªFL\2¯ºØ¢k\2a\1ataz´\5šY&½d´ÂC‰tpÿX\1K\12‚òX]ÁÜ‹8-ɾ\"EØý\3Œò=:9\24;]ó¿\15g†½üØc†ë áŠ®àžw#ù“Üá\24ñQ8@|\28ÔW¹èë4AtšØ;Íë\29Ï~:w(\28à\31SÇ\28.\16û'\25\14 ä\24ðÙ€\27ãúVÌX»Ñ\22i]Á\24ý•ú\19ä$\18*?ÃÄ\21EÏçÛ+á\ +­’„}בú¶I°„ë¹Hè\27\\ßH¼\18ÓÃÈ\3(ˆƒ54¡«‚`|Ük\ +ªÀî\"¬–>\24Ÿ½…ú&Á„I«ï\23\6\21c«RüfŸ–z•_oöUÿ`§é\31hñ=f$þ\21\5Ðÿ÷¯Ö?ƒÖT”TâàÏ\23g\16\25(†¢º|\31i¢zf\0¬¾†S>i?\6Tž\0200¿0\0×'\3^'&)v\\\25š)—\6^\26;EÒ;ŠÚÊ\16Û Y\5êÊ;Rê\31“×ù‰\22\16:‘y ¹p»ÆA¸V\2”óâm×4Ä=7¢\9Cà`¾ê`¾êÀIÊzÓÀ©áF€}çðÂinx\17otà\9|G\0~Ç\0~õ({ô\18ÐQ\20ãÞ`Îø “ö\1lèÆY=}0;¹Ý!šÙÀ\3yì¾`‰®\1›‘Nvú²\23ìC\0¶“þ\8{0‘1 \14—S²q#\5›\27F,¸I™ËJ³Rx¡œ±@)Œ¡nLí (¨´Á€Ù¸p\0\25p?43DZIðj\14ÈçÑÞ?'yGO\30jg9ÑÛÒÕž54ªúF[\28 :q\15êKƒ¤Y«ñ˜\23{c]5½Ð›QÕ˜¡.C´e\24ò2\6¿\25&$Æü€M÷\17ÚîÏl\24âuU£\20§&)_N¡|Ù \4X\21Hëï!¶oƒz´,\24‹ec\25\29O˜\7ÉÅòÏ,ªk»²IÝN\23µš›\22ÜÃÍG\1\4ìãFoF\\\16É®\4X\0t\127\26â¤Í‹b¬û\22\26\19ýáuÜ\127Œ²=øOM7ÛbxQél\26fý_¯üù/“£Ÿî¦É4›ö¦ƒéÃt2xJüãÛ©8®\2ÑMnš\8‡8ê\24üPKâ%@䀺à\1õ3ÔA=ôÆŽt4ó@€¡0¶\\žvtÞÑeGÃŽÆ\29)¡‹ÓDþÁÀ§\30X\15ضÕjN¬ì¤–n'µÓÄÖ†\13\2ÒY7\4”x\24´¶’À牣 VW\3wkˆ\7ÛM*ú¾ÛßHkdŒª\21ßì>gÓ^\20÷\12àÑÃÞ;\24:CÛ`À\30êfŽCÿp”âm2\ +hñ\13Ä\21É÷\5DµK#m8\29RK\5\0284\3\13صà¡\8f\6ºÄùûñ,’•ÙÑSÂèþ5ª{“ò2þfGúÀJm°*dt$Oz|fŒC\2ôHTÂc‘»1ê)OÒSNÂ6\8­§0q§6@÷)Ý”aˆ×\23ô È\8\15¤Øâ¦\29å\29\21ù÷Ýiã麣=Ì8ìvSر,e·ØîÈmèPð~§è$?\30\2%±_Œ\\\5…OŸØ[\29\11\25pØÂ\6ÌpZMWú‡ýÀ+=½\"\4vªRŠOòN2ò[\127øÌÎ\21\18ôI\18ô #\23i#®äÀ\19Ü°¸%ñl\24\29\9\22\8£\24¹!\0Os\14ó\16`2˜Ã¢\24¬¢Ü¶JÒ=YÒFD0Ô•+M‰&¥”/\2Óëˆ+òI’íÕ\25ܼ‘o\15€\20ý\25¼îQ;\4è?‰$\23ÈŒ¯´šâÅ\24ø=þN—Ö\20ç§äXR}{%GJ-}XÎDZÜIµ@?þù8s\4ªTG`\31â\17‚¾“ra\9óhëOQ‚}¨\9Àb±û—é·ýýKúÒˆ8ö—:²Œ­~ت\30[5\19\9\127’¶ Y¼\0XH¡\3¡D¾>(½v3Jh`|ƒè\1ë|‰!\13Qh6Ä«Dèƒô£\\HþìÃÜPF\25ÆI\"\ +\4¬­\7Ÿ”\16\17ô¡\21}\5èE \19\9ÛÕ㡥؃٦ºX\30¯\"7\17\6™0\1cN…ŒÿƒZ%:\1Žl\15^á\0G\28\8ƒè#®rÐ\19×ü\15.¼j­Ž<â›NýÉ\20\13¡\18>˜U\1/DWÊ›z4‹á*4\7(\2n\29“XÈ /¦\23yhÂ]\13¢‚«)\14MžA»ðÒ;ò\17\22Õ”\17™¼\\\5eŽ`~“»’,å'•rLÇ\1./Và\5<ˆéMØZM3\11W×ÖHyð:Q\27zš\20’R»¤(Bj)I\19w9\12\3%Ú4Lž«\26€+ý\9'pø±tµ§Y—ÂD‘\3‚Q\0300·ÏTRLRku>)\127 £ƒwrî®\ +\4\25ÌY¼Pž\4}iÖáÚìÃ5Pÿ‡§1³z\5$\\\25äáÔ@*{öG€y&Y\28\6›\19\22\0208\20…Åð\3µC¯Ú!…X,\11ó1Ú·\26©!W\27³tA\25\\\3Ü\3h’˜Í<˜Ù²‡°™\2”=Ò…ž¦º\127ó¿µÂ\6,ñëât•\21Tõ¡|\\ø¥XL\4·-±lš9aÍ:þ›y{\9opÜè\5×—À©jžD²\8ÞŸ«T:r+\24ùåbk\22\11iÆóm¢Ó‹)2€[Ñóf‹é\20\0ËÄ™úSi1gv–ÝgQ\25~ü4ñO·ðO\12\127³ØL³ØL3ÖN2ñ— è,rd3åã\19‰z»ÐÇ\31aZ\24xV÷Uf|é*é:6Ok\2fC¨+Þ-ñ›ç:áÞL;Q, I?L?EÙVŸuûˆæZŽ³$\29+ÁP®wB\31¿T—hH¥Á¶›\9.Œ\31·]7QµÍ¢®P¦‘ÁT²\11\11\4oc¤™\\ÑjP\14—\18E\11Î\11\7g6?ª\27•\25Âqñ«Õz,±ðW¶`©ÅIzãd\20xhô\23DEÄ#`‰ë\20ˆøȪÛݾè¦\21\24×0茸ÀÁn¿A\11\15LJ\13Ê÷\18¸\6\30H¶\4žlõÁÅÓ,íO¢…”m}“ Ë\26P¿S\27ÚÜL \23\5Î\5ë‘\ +ÄW´‡X=®2•°„\6€\0245\8É7Fxp¾S¸ë_Çà“1&î\29u…ö¦Üá\16©f\27ú[?Áç.q\20Xt;%´‚ÁÌiœÊäЫŠ\14JI¹¬¨‹·§_TM‚q\9\29’Ü0U·³í°f-x/û(û;Ð5J÷Ä)¯pr\4Éüe×þEo:ÈyŒ?Qž\0006“\28Š£_\24ž›Úâù³ðõiRºþùç\5§Š\0007SŠØÕ’\0310\17\21]¿*Bš\14Åù…8Ѽ£ðÇU¶QÙƒ¤“-Ñw\5f‚‰cl±:Ü\8ºäˆã‹`Ž\9¤øýQ\2®›‹¥^\ +§ÇÂé±pz¤\22˜“)ºE¡:û_¼SÓô›-Р\0098•\17ý\22åŠI¡Ùì\0\19¤HJ\28%Q‰\0064o¥rl ¥€ë¥Ebý…rý›QozÄ\9;/õìLk© á\22¼hŠmø¯Å¢›8è\0027Z[Þœ\11»Þ.ÔÁh¥4›\2•Æ\21GQ‘ÂíUáìXxò_¿ì\0`¼“¡­\0080s\23Δ\5rÇ\7Y’¯]Ì\\,\18«+)‹\7í.Y$\22Cž÷ÅôªP\15µ¿4¹ì8“\22ËË\20Ÿà\20à€àŒ·ø½fæáÝE\0Á[ª&H%þ9Ó2\30ÑáêTÙÐÉH\1@ó¥\5V¥ÅŠH€¨X\0016T{EÀÝ\ +@Â\1RálYv&j}EÑ~¨“€Ç>È×µ\7¥ïã(\25$êô\9»¡Èÿƒ\3ŒteÀbv<ë(¾FŽ‘̽–ÃfÑ«x•>ôeý͘¹J9]DPjz\127\ +j€\3¿í¨ö´#\31´®­µP¦³\28‹\15\14KœQ@]åI„›ûR–,‚fR^!Ÿ‹Ã\6Õ\4t‚§²à¸tR—\\$º€lpÖ+šõŠžÀ!;\12ŵÝQÐ\22\20Búu|â«T‰¯QÕo\15¶Û¢»L\0116J\23\25¯6ÞdìlìM1º.~¼N`\14‹b°*ä&#öDbä°Mª˜\26ÇÓ(žFÁ8N,’nA6\22J\21!‰½º\4-z™1~ÈÃÃÀiJŽìdt°Ñ\127ä0¹ˆ¥µH¬¨Æö°å7ÈÀ\2\27X`\0191[Ø­\6d‚°¯[(쬗©\18Ÿ¢ª_±ØÈâû2{ÿëÜØø&šDFÛ’W÷ÅѶÒ\23¹¾ˆ•÷uq\28bÏ\127é‚Í—¸ì_,­\26ôª âr\127™Ëý%.÷—¸Û_æf\127™›ý%nö×ÝiMW\0180ªýz\22bå~eE•mÍá‚f;;î¬8Å-þ2·ø«·­\24_â\18\127\13.Áa'ý™×møø\22-\21tVY–ìÝÐ\\«ì\22g·(\2³e¿Ìý2?öË|×/qN¿Ì9ý\18çôkUÔ¨V]ÂþÖÅöo]búÖÝõoqK¿Í-ý\22·ôÛÜÒosK¿uËBÊD’ß—JV*’ôºTòëRI¯K%¿.•¬r$ùu©¤×¥’\21$½.•\14(J½÷”üÞSò\3Oi\5[\20Ý—à@öâzâ±Î\ +&'Öw\0\24ÍWÝ\19\0ÁÒ\24tTHN#ë‰âäëÉ:\\…Àå\1pˆâ\16ÒlD9h0«Ÿ\2œ\3ˆ•\0064\4˜\2€Mºr\31·bî\\1\15h±²jl_y=ªR]Ï:þ>ð\16á7G1A¤ö‡ß\28¡ªò\\¿u”ö›ŒS›ÈoyÁ©Ôou£ß\28^@\20\23d\20@ìVãüxnû[»ÈJ\24¸®~EàƒÚåHhÂDó›\19\13~Æaü·®øn\\û¦íD™Vš±`ÝxÛ`Óm\3é}\5@&¶Æš¨Œp¶I$T¯¨bß±5PÑÆ…ÒvˆÛß‚µ\8ß\25BW\18±aÁÚ¤Òçqš\21vÔ\30†å¼‘¡Œ†v¡\9Ç{\7š[Ó,çÎκ|ºAÞ–?Ë¡iƒ6n’Â¥Ñ&Æ\1)~³iy³‰u°™;à\13’•ã¦-ùjëÖ™\4+Üúås)\21 ŸKY.Ù\8<æÙÌ\18\0045×wƒv¶ê\127ìÙ\"+½Úº7\29L'9Ï6\22\0257ÄÆYƒzxa¨;¨\19\0299#lV\31Æ\ +Ô\ +ecKØÔ\18pS¶–æ6Ÿx\8_Í\18y„=f:PÌ=Õþ™“\12²ùÿ|\3ý¸Iß\18\0$´`F…ÊbE(‰8Ã\31Eœ«}’\127-*·Y\27NØs$­\20yz¦À ¼a4Ù¸\"„R`[ªÕj\2ÜŠ\22Œ\27ï\21¤m™NlÂ\22„Øw‰›–&²€xȆg\1ë——\26·Z^¿Ž\127ÞŸ~ýy¯uT?µ¨þ¼Ÿ±)«ôx®}®\26½ªüóþ’XÕ\127ÞQ¸/¦\23Ó\14ÔI6ƒéjº‘¢\16E;Óº®\17(¤gÙ£kˆ†¹Gš|oݨ\11ð\0080=íˆÿKá\25\13Þ 6;¡Á4Rð¾Êbt¼£\1279*Ó\24a@É\27\2xˆÌEÞ–‹©sX‡\27Òo›¿\7S¦Œ×PH\17k.PþH™öDÔ\\ybêoÜhU\ +-*°Ç¥Yy\24i\15©ëJø|u%Õ\15ï†!Æ÷_º^OÆ÷\14?\2¶\6ù%l÷–lj[L\1\22\1>\\\18è\22hŠèùH\\ ²£å×J8ÿA\22ZÞûhø')4\15\127eG˜¬Ãò¾£Î¡\11ëDvûOʞ²îÑlü‹šSëæÔ6Ý1ÙÜ‹\0146N¦³\18Ç–ÕRÑ•\"|›”ì«-\18{\0@o:8=‰Qñ„Fq½ßLõ¯ì\24ƈ¡Žª¢©³8\22…û\28L¶Ov-\0åñ¡ª{(\11s\22yˆ”£\"[\\c`ð)G_ŽþÛÉû–‹ní\9¨Â³Û#A8!MÔ\14Tɉ¥F\25\ +Q&²ˆç\0007º¨÷K‰Ð$Ô8jÕP¥½ÍŠ³‰¨ÞŽ9:;Ñyw\5ï\1\1®Ž×\21”¹3´Í¤D¼\15¦2gÚãÆËBóØ9‘ìeÔ64\4˜\0020u<¼U\2T)Y•‘U\25Yµ)È@oª…Œg¨dŽRX¢ð9”3uߌ‘/Ñ0ÿ]TA\23½\16/¤£\0\24ûÙvÏsØ=o\17 0\6Vo‡í\15ˆzˆøDðØQÑ\7m’ÆÞÎ%\7ÀHõ¨š\18™mþ´‡Oö¿NcdWÔ\23:1Ió\15\28\127`1lÂêí¨\127ªŒ:\\Œ;\27ÐÏâ,.Îa¯©‡÷?¨Õ—\27âjaŽ\29‘&²^eÐSž\11ô2)ˆ\ +\3[g\13 =\15ªoŒ\26qÌë¥}‚ÞÝú¸»V\3î)\7­t@åBÙq\14\20Ét{Ž\27@?\24\26\16þ2¤Ýÿ´#\13–û%!bµüÞS`\31S`¿OÚÙ\11\12—\0Wƒ)|Ï\17ÝÊLª»ôªê\30OB\31\4T”ã`뇨ÊùÏg6U]¸…€\22ROª=…\0Ag…Ÿå¼ØÖý£÷pE0\12èÆ\23`kß6;¼\"­R\6^\26\19eÅ\1Œ\14¡2\26bF\28xÌ ;¶Î¡»ÖÔ\14ƒíš3¥MØÀÓâ@\0093úšgQNe\3Ÿ~’‡Åt£=Ïzi\17óíàÑ\19×`ºû©\13;6Lù{8¨Ža…ËK€&À5À=@\23 \15P\2¬{ìJØY~™ºâ¯to®¦Q’:µ\21ì^ÂN+\27XÝ\3´\1r€.@\31 J–\\iÛÕ½nØA›m#9\24kv¸‡YwK2³\5ûn·C\29Ô]Ù`‹A©mæKøàÌF´ªmqGV)﨤Þ:—4•\127\26q\\ðe\20{‹V\18\29_×PÂ*íhwêwôçÛ?\5\1279P3íP«\16á1B•fÿõÊ\5Ï@1NЛhŠÚóX1ìcÅ †¹P§6àAcˆAcˆ9và*\8³ÂFyýé\9iŠhæ>bv›\127\23ÍÇÎt2E#¬»d¨Óeˆñlê²\25]”£«\127œ\29㇣Òâià…&ÑÙÔ\13]óö\0163Ö \9|Ð\4>àJ™£ö0{Ð\27¸#\22-êpšâ%Ø\12Ê™~ðf€’æQÅßQÖ”V\15»\31Äô@ïÄK€›A\23\22“€Jý \5?–ö5Û«Lè#\7ÍI\24å\30ÞœñðU]óáÍÔC7rh£Zy<«º\30½¯Æ…*‰\1ä\17\20þ¦}_1y71‘Ã\"ZìƒCèä!tò\16:TØÓI¥=y°äT»ü²¢}\13‡Ó¾é˜bÀ\3`\7š|Ï#\28™ðÉr*¶œµ\1™ô\\%AœËÉ ÜOTBãÐåpúÍ1¢âèI0\7X\0136\5àP9i¨œ[n.\28-¦§}/\13ÔîVyG] Çn¥qI÷ÇÂŽãÊ$1\22€«szSj5¶L)þ¥±e‚š\6[tþ\17÷PSŒ+SŒ+\19O\28ä2‡E1p\27ðrE\11¼IGåÿ„\23Wåû`:™º¾ÞíAãÎäqg‚¸\11<\15£K\4\7s°\24\17\\„2`ÂÇ(°±;\5¸\4X#\14W‘†(ÚìN¿Ù\8?´J\6+øÒ\6\24\3hï\14T\12¸Dt(' Q‰h\19(áTœ÷O—Ågt«Ï¨O\13€\21yI6é%_€‡\11èa‡Yõ­QqŠßVjgŽf“†ÄJ”\8ô'\20à\4á€ÓChoEåΕ\0114«\6ƒÑÁ´\8œ\22^\19\15î\12Ü*w\31³:ê´D‡Ž•ã\20C1@äýë\24™ÿ*»c\25v×2î–{\18Ñi\28Ÿ¨dˆEúí\"þŽŠ\21ÇgÒûº\127Þ©¾\30¤°æ ™Û’jk\7ì\28Ë-²+0›\22SŽhläG£æ,Ðxx‘ÌŽ:\8\20˜éì³\27~œ5JXžÆ¨?ïî^.ÑvGÜÂQÒ&þ\19ƒÆ¼Å\2‡hÚ\127N=I\1‹ò\11]\2 )²’.\1œ@uYÉ'¯\ +“Ãó\24\0Ç úßg\23 2\8u¦´zˆ>»ýe8jݵ»MaØö b –}W01c:Ø'=\\E¡c\12³üÑî*\8\31Ù\27\20E­Á¢Ä`Q¼\25-îK,*•ÓËÐZV\23\2œŽ\19`¶ðjÝ\3Dm6-áEcäêÅ\0­¸“Z©\24J6ƒF»56W«—\6°ÐL´êÆ\14¬b°îk„ÕŒ¦õ`•Çá\ +ñ\31cµáU\"¯”H²-kèX\22¬;\23e\13VåªI~\21ç`Õb­ÆA\21RFNJ£l6]£¢`—<ÿÀæ\7²¼'‡º…Õ\127ÿë\28¡×\5«7,Õf¸\4hä0)ÔäȧˆÙ|Š5NšŒ÷4—Æõ\20K„u_\25¬‡C½Jng·d¤ë¾ÉYcå°î{œõ\16›œõg5±\6/cý‹\127±J] m\15S”—«ëZ\\Ž5E2µºXÓQ¥œ²SêEÆjFíê±eõ\ +bõ¾d•d±\17™k\18·¨zYO\17F­ «ò³Ó2ú\23£s=†}\20ìø›Í(fhdèó\28à\24­÷S\127ûó¹îNúÝC¿ã*y\26(‡ Å¬¼ÎÊ\8Y&«fÝjÚäX\28û¢†´D\21-a\127t/âô¸š/Œ¿\127G\3øŽ\4~ïNÇH\"§¸M¬—Í\3Àæ\29‡'¸Í¼Ë˜Ö6mC6q7/ö·è½›;ï\22\11ý-¸Å\16\3õh¿\5+oÓ«Ë´)ñ\127@\14@D\\\17lê°[p.6s.öÉrSG}®³£‚6’|\20â†jÛ§K̆š\0247w£Íq‹U9¥~9îoÍ,nâ¶3\18€J2ZÂf5ø\29\22ö«i–o<å°Ñ4µÏ¶DêjÛÎ\23ßöN§¹ø'ÈÔ\12»õýÇövÛýGb>lÒ7\25hÚѼ£Òü@ueá(±èÖ[të-&ÂM·Ø\5ÖDpUÛ¸©Ò l«Ò\20UeîÄ\22{‰-ºù–z-ÿ«MŸ\2¸„¼¿Øb\127±y;±%ª­œô\11`n*ˆKÄ\15\1@!9J\18¾æÑt;\5Øm4‘m\20t\13$nÔ\22ÜóÍ\12MÃÊæ}È6º×Œê-£Jdt«ÿ0geóP²Q B4›\30Ý>{ÓÁt2ãÓÁ\19¥íÑ\12\7š\31W\17EópÏ™¹Æ੹élÊ~ì\19sØD¿ƒ×GnÿY\5´nÅ1º’9ëð\13\22sc'Õ\0Å¡Õ‘TⳎÎt‚.\127\26\29*R‡õ\24ÁÓtX?Ï‹¿d¯r/1&,n_äyl’3=¨U.Ñ‚–=DQÉ}¹Hu|\19Gñ@u\29\25=ã;úÍwt›ïÝ÷º\7S¯ÿïßÆtÐ(\2\29\5¢ë¯cûôëО~µfvûý«[€î\23t¬óÕpÛÔvLmë½-Æ\29\20úýóq7MÃ~5ÝH\0238s§_M·Ó\19c¹ùÇxš³=á–uË—vjÊ\0008 \27ÔÑʨ\14':N²i^Lí\23G´\0\13þ˜u\2« `’â?W™ñ\ +¨Àf\15ïL\30Nöô‹¼Àˆ\0137lÇU\127„€‡ü\127ve\0148+Hm®Õoq\4åÉ4™.¤òÝe‘N\4Éî¡g½¦±?ã×|ÿ°š\6–W%G¦`P=\13usÇâ\27ðÏ'ä¢uuÝR\0233\8öÅ:˜w­N,°)jR¯Z\0\22¥‡B°ÕǺZØ\2Nx„¦š~3\1\27®Íb©Ž£õ:\27ëäü¬\6Åe5éíÉ.®oÜhÄL\27–iGyGóŽÊŽ¶@Ê@ælz5½…\15Gys„·HÛí\21ie\5Ó¢–yKYÿF\20rQ|÷IöÑá&œ\1\5L;Ê;êé\31Ï\8·z\0~\13'\8\3·\20tÀn@ˆÛݽüÀ•\30êŠC±4Ž¶™E!Àa¿éf?ÉE5å\20À©†\12š}ƒ}£»âÙ\21T-, ×E\"\23§.Û/# 8&t[Šc8­ÓŠ>)¸1&\ +Gµ~ÁEt°\3†ó@,¾0 \0ÛxÆ¥g\\\8R\9_ï\6£ÿÇûÿ­_riõpK»?Ø\"\17‘ƒÒ0o\17Ý–æp\26sIéxÀã6†\29\29Y‡¾\6óô\23Î\127\27†¿\13QƒÅy—2u‰”à:oË·¿1&ÙòÚ\24Ü£-\31KŠ\0ï®äÒEO*½~†\13}ƒGª)¼ÛeËÚá-Dz‡\\™EH\11µ’Ðjù|LýÖQ°Õ#2íù¿\127uxð«¥~»öeÆ\29S\0241\5¶—Qƒ\13V¿èî £H\29Ÿ.\20(Þ\14u,¨Ý¨Á*\29ßZ¢M\12\6~Q^¨–GÓЙ\19Á±mnÍ\5žy{²wå[¿#/Š8¡N»›\3¬Š\14Ì£‘H§46›‚i0\24EkK']Âë—\1\30\4¬EXãʘ\11kª2Š¦É:\14\21àRD°aþ2_¥©1tu“ÒòºÍ`úµ]W\17dºc‘êJΙ€]£Ñ;Qv˜\2”ƒ\24T7×Z¿îÌÕ¾\12]\14ÐíÎÿý\15µ´\27Îác6Àæ£\22»X\6¶êTœ¼\29$«á\22qLJÁ¬ú\1Jñ×yHákf\17[‘«]#.0\6趎ÊÕ›#Ç\9Bë\11G¶xØbŠh >\13 nÁe\6ÇG¨®Ä\13n\1ŠãyggÓ\29%ÚŒQøŸ*;¼<¢<ÿ÷¿kc+èU¶]Y˜äÇ‘eE¾™À¨ˆkž8±S9—ªmQÒ¿è¡?ŸL›I‘ölÆ8/n\27©\15n\27\28†Ë¨£oAú\27ØÂ\6,le«â\28ܶ\0075í\28÷& ýbîo¸æÆŸ'ö¯IÝ\12]žÉ™Ô'ð\2\12\9øJt˜›òƒŠ\28EU2\7݆¬?\0189-öùÍƉ÷?_`Sp+¥õK\9mS¢©\22ÎoÒÖ˜\5!^Âi\25D€Å\25áó%m³ízÁ\14­‡º·3Æí7Š?\26°»Õ–†}?\22\28´Å:‘Ôf\20{ .@\31`ÚÝf\7+¢l¦ú\0116Îm\\þÆÿžo$)Ò‘ü+n;…â\7ÉÑ\"çŒâÔ£»Þ`‘‡Å±©qQ(hØ‘&ì·»bûv\2ž\16ÈxkÍj£E§l\12'%\13mk6\"ëŽ0©<\6¥mÀˆé\7YZ?È\"ÑÆ'Ó³©â„t“~>í)¢²¦{Ø$[dÓδ7uÐi2-M€=®\18¹â\13Ù­¦\27ÓR\26G[ü›¢)\27Ú¬DÏô‡Ë\12Š\9¾\4RYñ\6¾Á\20 üju€)¯IF\7]öu’u‰\24Ñ9aQ»ÉµŽÂœcù˜KËÇ\\ðECÀ…þº£+:Å•ýœÏ¹à‹Šà£.­Þti¯¼\9Õ\30nŒ\11\4ÑÜ8Kßµ¢¬\29¦\6Æ\19X¿Z=.õ‹â¨·“)š(\31”€1³ÔÁ\28i¡£’_ŒZÔSIÿØ“´Ô±ËÚ§âÊ–ã2ÉM¤\21™ET…R€Üâ¹/|\31NÀ¤\21\127Š}\ +E’)\23«´Lrž³H/â0Å‘¬\17\22oY´zË¢¥’‚CËæ«·,Z?bÑúõŠ–ç\23\30YZ½]Ab¿«üà\30\15‰­¹ÑÒÞúWË=\\+Z÷âm]}¡\2Û¸fk”\2\20\1\ +ê·\18%n%\23L‚\24xzE²Ñ².…[\9ú¶”Ûm%†ÛfJB´«½À„!ÃzÈDÑý[¿«Ùf\30íP„ \26\18FÍœ8dê\13‰jÂò9ãÄ\5_.ü2®P‘Ôö“ûF¾q^ÝæQò\19³!&©ì›\20-%[[\ +¶¶~û¶ÕÓ·2¾Ã\17eAaÖ™!™¥ãz¢\23\22=VWç\26O'Í,4\31q\24L„V\ +Z\127Ñùâ\19͵Ö[‹´¶]Hf\16•\1T1j}Õ5X7ÀˆÅwëe\21\8¼j®ÂQB‹ëìx¼^\0QsíRÇ“¶Ã!B\13\3æbË%ÉÅôjZà<ž[\26Q›Ýèå\0@R\\ŸL.\ +ŒzÏ[JÃÖ/¸M-µÌ·PÉŽPV\17Þ\14¼n/Ê\21\22\0¦˜!t¹\24Õñ\0\26‰Q\14’¾j©Z±µ|Ñ¡­)\31Ú¦\11ä\19\1v\0014\15áa\19 ¸a {B\5Œœ\13iÿ.:ÚuœL±ª\25o`½É\31XF#§@\0164ÙJ\22ùÖø\14•—Ü\9y\22)L\30×â£\22Š|\18¤\29uE¶õ#€­´^Ê8‹¢TF\ +ÜÚŒ{8@PÙ\\£Á%g\24S+w,}G\9¸Š®¢ÊWï|õJ~Ïäó¶*-¿eYljqx9 Ä Y\\\4Ó-è³ò4°èÿ|œA\30Jå£ÁwŠuœ°ô\24cÚ\27g±éT¢Ç%ñËä.Lœ\ +qaöj¿:´\28Ï>N\\\1IñekÅ—­´Y¶ÖZY)^\13\1¹pÍ\0ÊP\23u<_ž\26h…;ƶ)#m¸\17¯\22½’}\1ºÙãH‡\11[\1•þ~L\6ߢÏô‰‹i ÙdbôÚ~pvÅó–çÔ\ +?áe&:p°þÀSg:‹rYúçCšŒdÕÜL{{áÔ\15e„\15S‡Å5ŒÑ~>\25\11™<\31³®{Ë\15®ð´ñ†ŒŠ\22¸\22?‚|R°\11\20ã…Uû\29[Ý‚o¥Ã¯µ\14?^ãXIŠV8Ÿ\\ÁT‚­\3Yñx\4Ÿ±`‘z¿C‹–¨«ô-6\12\\aÂ\\lþ&ÅÔ\6uqgô\30*\8À\23–LÇCC÷ƒ…ùàÚã¡©s:=c#3\14W|“ðCd&Á|4Qî@›haŒ\11ó\9ÇǦø\1À\24f2[\12Q­³t¢µó\17¼\ +½\4\14’ð\29øEG™¡Á©¥Ø\12¾H$5ò´z\7¼…š\27\25\4©„æ•\23,µH\23;×…ï¼Î:4l±\19<Æ\20\4Ã\4%\17@\0166\5@\127¢ÜRK\30\2ü\21é\14kC½J+-*4s\6+d\0043\0'²Â7õè>¼b\8+*MÆÈá\0\0ì[Za¡[xÿ”Æâ¨ØÙÉ¿xA€Zp'›ñ“\18\12¹Ðn’\13'\12÷¡p¤-û¤\25Ú©\5¸]\21G\4\29ƨo\0¥f„vÝÕôŽ¨¤>D\22|R‹€ƒ\1¸T§‡éd:›®¢½\"á^™ê±™`ovC½H\27êE\20yݹÿ÷¿FÎ+wmEœ\9º®öçm“µŒÈfê\2ô( Š½¶Ô,ÒR³HK­\18ø3n¿T\19\15\20iÛ'SV¶\22\3RlÇ3LZã¨~½ÝzÒßÕyvq\9€ß³ÿÊâÓéüDA±Aè@¾Ýžq/®å-veT\23ÕÛmÖ‹ê²âøΧ¡eœD±Ç㱺èhk¨Õl}%½å):M\21‚·í$ÂÊãÕôVGá­Ï½ùéסNAù„úÎ'2ÃIêX’Oçcí6•œO÷_¼pŠS”j&\27B6Ý©F\17H¾+øËõ\28v÷\0yw\27ÿ‚C8×ÝönYÂrª¿­ëãÚ\11@kk§—¹šp®Þ=\17<ß+y±„g¦–\15\29­’²­ËmB*á\9Ì\30Y\12·'Æ\\Á9@\19à\22`²ï9\7@C‚òe›·Ñ9\27û“JiìÕ\4&¦sªCg”7Acʃ6Áô\0080G¸1Âýù4@µ2îoÒK])ˆÊç…ÛJ\"H±\16¤Î\22ïáå3À÷Q¨n)A¯š)\31”¥€SF6sЗ\25k€?ÿ<\19îÞO)@\14P\2l\2’\17Ê–\17’M-\4¹¸@o7Ó{øˆx#Ú!\28F[L¦³i\9\15ËþÛ¯@¯\6w—Dë‚h]˜<ûçs!\127þI\1r€!À\26À9ë^˜î2*\22l7\20Ý›Ãt§¾8'‡` ÈØÀy\15ÂGUmE¹'û{\0154öŽÙ\9~\29/ÎS\31žp”¥,õ\22\0]NH-khz[¸©I\29#Ñë\30î•ËÏ\29:Ž×Ó#À\20ŽTã\18†&\"Š¸_1ó\18€Á%‘Aê”ÊF\31ê7Xég‰j\29Ý\0160é/\19®žf\30\27cÕ\29ȱpnÛƒô»ÝÃa#ŽÙ\1çy÷\\ì´Ùi\27\5ÈŽQ<¼1OËËÑVÊ䤫؎*!1Ò½ÎTM9á$Ø!p\26`4«·Mn~“úôôཀྵ@/ÏúÃ,«â\31\22geq¼j\0093GtYÅ>»›ä¿Å¼ò_b^?\14.D«”ÿ±ÿ\9\28_ÔDmâ—Æ hÖ\0005¹/U\ +©i!&\28|\4ülVv˜\4ê`@¹s}\1Š\23C\12T[µ”ÎgÆ×Éna7+NÂÍ!ÞXèzI²s²ÿ\28\2„;´ñ\6RÅñ2\ +3ûÅ`f\19Ç=äLIÕ\24\28(´JG\12¯²Â\0\3_‰ÃWÍ{©Qs\8œsÌf\\ë5æ}4âypò÷\11+Y\18¯ŒY+ñJýÓY{®J[Ól:(Ü\20\1§\8Y\"†oNwu2lEѤ°­Ç¦.[r¶ÆR^hÒ”zW¢\11ó\13k¶;\30¢™ÒÞ\12’Z$”3„Çÿþ…OrE\26rEš5æãš.Šwealø9‹nãA\30icšc}Áw\26\14Ù;ë·£G0Êäf=¶ž­› [¨U\30ïM¿£aG¨¢š¿F,'\"í´!àŠ\8p‡Ê€rZ„\\RVºØo,Nˆ¸Ã®•I.?@OÏ\28/~Ñ‚ù\7à¼KG–¾äi\17ÓŸï\28\16ƒ:ψ;ÆÙ5³]º¦4ù/ì\4uúg°.áˆÉNnÓnåX_O¦êA„¨aÙñô²b\0089Áª?É©?p\7ð&q'¹<Âb6à\22ì­§¦FÚ${M6“eQéh£J¶çV\8\17,üéà\127\15ñÏÁ¿„Úº\0…>/i/Á¡\14Ç?ðþ\3Ó\15T%Uàx5\18\11õ\1ø\11ë'£Uº™†_œ­g\0090“H\25V@ŒF„XªP\22Òùy¸€\30Þ'V›æö+Ä%;—ÂÃr\7fl&BhÔ\7J}Ø\28U\19\15sOÞp”a·Ñ.\31‘\2\21ùC?{\28T1ÅÞ\"Å>zý1ÈÿäœLdؽiN&UZ¦h\28“+jŠÆ!\22Å›ø\16Jej3Kx‚¶\ +kK­Ã‰\3(å\20'Ê\20Ïîz£Ú™ÎŽ¢p ýÁ÷¿ðð\23ž~0£‡h÷1egqrã\"\29:‹§ø6YÛŠ\2¯Ñù&m«ßf\23Çì\\KazÞU¦\24*'b\0`8ÜN\1ÔÖ4!¨$çÅž¿U^ÐÂóKã¡ËÚJv\4Õ~p\31-\28±š3r÷ž©­=‚Örq®¡•/Àd0îþ>â\23n1àÅ+µ[\9§e÷ÍRƒä‘ë½@\6‰ Ú)ãT¯d0«Ájú¡¯æ\18@¥V¸&\17ˆ˜póÖèÁ¦P\29Õ-\ +UŠ\24ä\0\30ñ\ +Å£\29p.\127ámx5\18çò'\4N¢†¬2FJGý1\ +ˆ¯Î\4RQ•%2#æMõ-žËÛê‰c=y¤^Ït÷\6€—\2ú\29\13vó0¿^MclÃë|™ôf—ä6¿º¡®avû_=“¬1•­Z¢ðÊ\0¢ª­Ì4Ú˜›\24ô0±En®\5ªš°ß\24œ$h@äbÛÜøÝŠ¶Ù‘>»Jp\5^6nø\27»âUWÇ*H\29äe' #\15 ®ÇvÄ\23m‰W\27ðMüÊ\30­í:v¯x‘;\3\6_˜W\29ðÅ(Ì\27\15™7\30²nØ\8tò/ŠU”:\17)_6Î<\28\26³]8[ðìÅ\22³ã§\30W\3‡.KX¬²8’g¡È5g6dh)Áa|Ö5øl^=¥J?\7ëY¨c«Ïq¥;“ýG‹‰\5‹½Y< \7åù^–~(R\0202Óõv3=v\6؈ä}Á\29éº\"Íx\0241ëNXæ”1ÂØØtSqR¶1c\20f9¹åg\19D³iQ0\14A\0301hƒ‡yI•g*ç\16\0_\23\23Ïj*A 1¦g\22rÖ]ŸÌ×\22œwÞŽ©.”ô‘M\29©2ëê~ÚW±Ó&:oH¶$žQkÒ‹”õ6C¦r\23”(u»(\11\127>Uã¸4‘\2 _\2M³)\0260¯»åLí²$…„ù9 »|á!óâ[ý²»g\ +0ƒLJ)\5˜ùÿ/5i\0080ç¼ïöhs,\1Ôö¾7%ñ_þ\31CAîN\16\8êNÏ(îN\2w•r·\11–g\7uÊ¢PÌ\ +§…ŒóÚ¥Èe€ö\2¼3(À\29¤¬æ°bC÷¸pm{6lûQ\19\17ahspÜ$þÁl’2q…H˜â¯)þŠ\5¿níe\30µpÕ\5ÏÌç0k \20±QØù·‚<\9Ýïù1\0056à‚\13\13H\2\30CEññ™ìJjÓëhËõr—¸‹TÛE«îFÖa7bâ­¤¼\"ñ\31\18B\16h\3\12\1ŠrúÁ§ÞÐúë(Ý-Ç3v\0 ¬\"˜\27Qì\17AY.|ì\"ë­‹ì§.r\127:œñ•ðZ§Ër/!‹úóát;â›ùÅe‚<Ô%*L)‹p\\\0130\3P\127y\30þû\31î´åáøßÿžÁ\16¬ÑGˆ®Træ¨?B8v<¾Üy.1Öñ|5À\26x¼Iê5\7­âGœòc]1f\12;П.ÖâÈÅ…‘&9¨U&ém\18೧Ç5l’ÌcwúD»\31;M'c'-cºÎªÃ•\ +4ã\1| ¶\14,¸‘:)Pð¼kø°\13×\127´R¼8À¼\8°-\0024Agû \27|¤†@þ\7ï\2#¿\20î†\13\5Sqý\16!{*cE{%T\14\7õ5èhiü;\12\6“élºˆj\0294\14gæ³Ò\6ÉáÕD\16î³uE‘„ÓŸî(ŽL…á¨Ø\26ÆÁµý\24/\20À†òQãp\7;\ +\23\\™Ãa¼ÉÛ¨$@û5Ì£]çÁ4Ì…”rÀ•ÈvÒæ§.x1\2×Ð\24wGÉÓŠª$êÞ€„µ8ð€«\18\22\6ÝÇ\7e>°\30\25\31È;„\\iÀ<:†¨k\5Šx:¨¬'\23±Ä|d\1Ų¾\12]ÔŒ&–ߤ‡\0309C±aL¸ y\11€5†&/ü\127º\28Â\11·d@X\21VŸ5Ò\11Ù‡ôÂÌO\18­\27)1ôBÐ8±lv>ã\"å¿p³µ’[ˆšSjhϹ<î™\26\20;a\5=J=:äR*¤˜Çh•äÊø‡\ +ME«Ùq”²©¬K«ø™v\14•2\23”=ª„³ §êQ¡Y¥³Û®ö\6#\7¶Ñò©¤£©X%#—Òð®Ð›\26‚xy£yi£®\31æã¨àº\ +SÓV«-ýG\27\0001'j^ ­~Ö\19žö†Z¸„A\127„R´:Âœ$„3n\127\5%Ö1/0®cŠÛöÁV¥«¶•¼®®ÜÚ‚ŒÄ¸s+7°É>ØlèЄ\23Gv1ÿ‡h6\16Ÿ\21\23q\29\11\25@ºvKÊ%€õ[f¾øLšÈ5ýà\13´Ì‹¸\\¹\127\28$Ê¡W Ÿ\24Å .J»ô\20h¤Ûã¤ß>Ä\127þ\16‡]ˆÌ·ö\30p\17\3ïCŠG´T;h3ñ­±°K»\"Ÿ\28÷$¹,‚K€[\0œà\11µaÕ\7\24\2L\1J€U a¢'HåÈáͱ€g×\24&\7‡ÞIøÆÛÜ\1X\30“¯p\3òTW1|\\T\9уtiùFnnm“]g.sui¯\17x¤°\24\5Êd¯âœÇä“£]Õ*ygªG#Ekø<ž‘ÌÏ3\22ÃÝŽ†\29M;*ô~9‰0]Ÿ¼ô-:™®¢£½}ˆ.6~‰JÞ·¢\9²D\24?;\15£BúGg.Š\16Õ¶K1£-yº§è bCŽò§v\22 <&Gò¹y­q”ø\22ŠZýS\"î“ZK\4°ü&`~†Wh\21\15ä¼O*ŠIÒšŸÓÅ%:‘‘þéÇ<ä…²ÙŸl2\20FüÔ\0\13«\20NI)ŸF‡ùˆ Ü®\16\"ó\12ü0E‹Eµ0-Ž‚ÅÍw\8\14MÙ1»3cÆÉʼÛó€Éö’¤©\14kгéÝT|í\ +t¨\4›®Õ\2½Ÿf(|š¡ðiFûçÎ`ÿ4\31ýsUCÛT\9›ëdk\6QsŽ\4Õ\20¶Ñ^Y°>ë†\5O‡+]ìþ\21á~Êj‹=Ù!£”À\ +\25@8¸×ZÁh^M\23´\9DœE:‘\9D›sR\\9©¾¯r8Ü\20Õ&A?S¯À8=-â¢\22ßO‚ƈ\14S§\20L07Eo\13Âmbõ\21\13\4L\11\25E\\\0H\21E»;o\14Z®\1Ö_Ò¾y\19í‚Þñ;K¸Z«D¶¢ˆ\28Ì\31+†È8ˆÆ`T,§X\26¯ê\4Š\0vM¥ñ›ZBX\30\3°bx(Ý0OofÂY7D\6ÏhBÏ\0136MDól@ur™º\"ê\23Ã9\14±‘¤äœ‰¡Z´\9£\26\9\24ú“\28É^•ú\8Ò£c`£*æ¢\22©@ó?ßo¦GV‘ž\3©(˜,ÅʧêoFôà2jÉ\\¸S\20\29U¨¸G.ÊL`Ʊֆ\12µ\11ÉŸ·«¬É\8 ü\25k8†<:agH\5\14™\ +\28ðe#eÏ/‹†8P–¼^o%åô\6 Ë\21DMØ à4œñTŸc™U<ˆÖÅÜ!×d.”í\1{õ¥Î\8\11\11dá$¼pº\\B˜~ñí¯\5UU\13¯\\*/-o\25Wr\ +Zëei­-ÝVÉ4;\8™J ½élZìL˜JÏ\"7‘»ˆcË\"ƒƒŒ2>D&\17Ç[D\22\17Œ¡KÖ:¿\"°€A¼š&h@ùZ{F\13°».bú.T^•\23ªŒ$¡%\26\14}c¹²H5i^ \6\7¦q²\31þ‘Ì\25¤úÁ2#\7gHÓÂN©÷Œ±{[¤Z;S%\6M=ÉŸ\127xÏn™\24;Ûä2-T˜•\23$.:B\\Ì&ð‹^ IÖ|ƒ…À‰â\26f1#ö_²fnÅ7 E^fj|ÈËþfÕ\14\31‚dçÑ.ÙÑÿ\21[b©\27\11Ý“Á=ÔYŒK@¦©ø\0263‘*¸¼ØbÜ]>.òûÁ_n3¶iXJõ‡\14\ +fš-Ñö\21\18m\0\13i\9—b\1’/>\\i¹J¶†\0·q¨\7d¶\23î?–\8ýe\14™\5ß\18óüŽ«P\14ò \5Ö\30µ3òg\8ÿMÕ ôÂ¥›5ƒÐ†r&ßz¥Ê@™ú.Xª\127[RÕ‡\29Þ\18HuHÖ½úÿþÕÉt¥µÙ@•È(\"ab(\21Á°`å\"Ô0\0Žþ\ +…ø4A2i%?\28-\12Ï9x\"»¢·#w™\4#)Ò‚@i\16ÁÆ\13»è\23\17¬tH‘›õ¿ÿá\14RõX\23\23¯\23l\14^W\28‰ÿó\23œ~`ëõ\0\29@$u\26{ÍXx×/zÛ+Y`¯±—\127åc:9øO›5 \ + ­\31»º\14­Ë™ŽJ5:)Õè¬T£NÛgüû¹Ò\11MµwvÔeÑÕå3Ø”¢­iGZ'A\26›:ÑuTuq\"½×\18îN/\7pÂ+õÛç°\17­A #\4 Ö\1ÈŒ\19Ûê\1/|è_¼,›ï.@\17Øè£6ûNJ4¬Ž\11ÂÙFç\29å\29\13;\26w4í¨ìh©Ñ†þ\13ðð;»Œ7”Ì…÷ãéù2\14ôúÁ£\20yZrµ:\\^‘»†\5É¥£êîD¹™N\0268j\\×+<_Y˜·ÓK%7ë\7ƒ\0059X´šç°(òºˆ¬$*±›N²\5¦§\0005­Fç\0\1wQ¶»\6¸\7È\1ú\0Ã\30n\12«)À\28@\9¤ \31Óö¦¤§‹ìq”Gs/\"×ÚÇé:\14\14>ªŠoèI´ðÁ¬ìÖ@Ÿƒ\\\23ǾL¦Ž¦6\13Ð:0€¼ªÁ$%­U›nÉK¥ùa3\"i]èz\26Ã\14¦\28¦;kC±\13\11\\è¾£aGóŽ˜2¼5\0166e÷_l\7\17>€Æá\27&$±¿´É–*¹V%G°¢}\0006°vŒ4\127L\6Ÿ\ +ˆ~w<µÊcäpQû¢r&\0ˆWt'‰SÀHy\ +\1ŒêЭã#×N¯²¼\4è2\3ºIÆ‹-²â{\0155qx»…nÍÍô\24žßTž’(–\29”CÈñ\29YÉ:|$h^L;ÑQ=\22Â\0²ÀÓŽXðÔ+þ+±,{\0\22K\30ÏúÉø0œa·H¼ÿÂ\16PžÒÅ›/5Ùôü\29Ø)²A7å+05Û&vÛº&cD\11ïä\17ÜLï¦áa0\29#À‡Álº˜:i_³“Æ7He%§ã3û†@fÐ\14¢ü\"Q¯Ó\8Ú½ˆ\\é­Õ\16J:¤c1XD;TVwäÊ‘\2Að¾¢aôÜ$ÖTöT?+P·ï\2LJ‡…žºhk(ì\16i†b \1¦ª§ä˜,T—=X®pàÛë²\24\4¤B\28\8w³hƒ­•A±ŸwÕlßkÀîÕ\11{\ +žÈì\31éj\22ÿð)\11T½G(\29ávÒ?$3;ÜÀyv Hv']D$“GµN°\19ŸLϦƒ©r¦î…k3`nAÖ\12tâàö\0?\27Æ?_ƒé*ß\\\30LÊ\9ßÉêøpÁ»\11ín\4î­ÃÁÝupS¤`)B€›L\11n\0«\5\22P |,E€W\17\4gaÐ\17§x\26GèþÈýB1ryÁ®‘¯\20éQ»\28‚‡ ¨;\31aøT«~\30F\21\31”³Â\13‡P¢¬‚¡\28e,‘\18•IÝ8´\1ºS &»R÷Rê©ë¬Än\14«ÕÀ]\13»\6n–¤¦î#€ªåá!û‘ŽjnÐZG\23\30’tûòJ£ZM‰Ç·óŽòŽ”\24+¬ëb\5\22¨„ë\26\0016ö\3¼×ÕùÝÑN/ŠºkOxL¶³¦¸Nšâ:kŠcÈEÙš–[üe¹ï(勉o\15\19|W”\22*ªº4;\25¨ðç½CÍîP<î\9§É6›m´-ú[\23]÷—.ºîo]tÝ.º¿ìß~ìï?pú.\15=]º\7ôd2³uÎÖÛÕ…î9\2Ï!³WF`†}\28÷È>žäÈ팵Ïéé¢-ÒäÁgöPÃ]\4½l'\13¬uµé¢ÂàÎò-Ç>ì\6\3-\3æÍRªÄ1Ú\1u;Zåø<‡*¥¸&ôÈ\2`‰§Ö8çh’\11-®zy‰óFuËní^*þùGºþ… \ +š1ˆ{\4€ƒ\30!ä›Ï¤~¹ñkÔù­SD¾ªº×´ù¯+ùeÕA\3oÝ‚A£¶ag\16U½\29Õ0ŸÏvlÂWÒRk\27¶\24;Ç\20íq›=6n³º6Ÿ\9ï°¶«Aϸ\4ÝQ™\29¾Ø‚œ9HY„Tl×Q±]'Åv 5Ù/¼¤\0‚m\28\14`ØØ0˜bî'­ÿ¯£-˜5 £\8Ê\25\0074«ê^øŒ\6Í`…½è%PP&ñE3ÿKs´\9KjŽ×\15xÂÅuÐ넨qÒS¿¨3¾\25‹ï\"«/‘o\6Æk\24ÝKâV…±¼ã\23Tnß\29¨™\15¾ Ý/ºÑ\19Ùe/|‹žd\6y0í\30\11pd\4\14\ +5èu/—\0016?¬\17Ø!\24ì&\25QÚ¤c„™\21ÛF{—§´à\27è¼Tâ¦ÜNÊ\26\29ب\13ו\0\12i¸pÓˆBÓ2r¼…\"‡®\4XåÂ\ +™©å‹1L(¶9¤PèÄ•À‹\30¸§[ÄQ\26ýþ-RÌÍâlË©\11çÉ\30ãzU'¸þØ®²Ý•Pìñý„,?–Tâ\4\3.Ÿ\11ôI®\16P1¸\5prùƉÐ\20Vs\14Ð+žb/|Ú\4é̪\0m>A§Ù>>]>\20HT¦Ð\\ž!@\7²¨\29ðQ\ +˜·Q\4½ë…Ó|mxKˆö݉Ðç¦Æ\13ÿ\23ño@à\31b÷x,\24\22:O”\29Džë¦\24G5\23ðõn¦­¨Ú\3\16º\29él\23J\19\16b\27.«wG3ÒS‡¹\22\127A9^ô®»(\127ZÛ92\4\15`a\9¼˜fú,\\“\01647\0030]\1 ¦º»¨õ_¼‹ºHt‹\20\\–‹\30Ñ«f\20ÂE\12vî\22³šŠáð\3\11™x\25áp\ +\4\20;L?Ðá:Ž\20DŽ”j\11…Æðþßÿî?\16^É}6-{\0086i ªzS\16\14{´C6ñN.óópºØÑ.¾:¨‡mEf\17®V¬\24¶†9£g6‡3Ê¿\18ä®Á\"!“bíÓ¼¼(ÈKÃå(î–×¼\29šË%!ÌE\11ø\ +0ÕQclý\17$\15ºæ:rs×\\õ²{õôÿ™º’ÜÆun½•ZA\1v­ÈŽ\21Y–H\25\18­WÒ.þ%Üa\6\25\\\4È È¬p÷õø5G®‰ÎÇF$E±çi^yæÖ¼\22dÞr‹Ó´œ4\27\30ØÌ\2\26\25š_í‘Ö\"Œ6‚æ¬8\28å\16€³ÒŸHGÛYyA[è\14oOÈ3Ïp\20%\8åžMøB\0027 ãâ\ +)B'¥ÔÌ?MY\24‚>@Ž÷çl¯É´DÁùƒZqiªØ\28Óš¶=2^wQt,èš65¸KEºà\6iÈŸ — 2¢JûdÌ{󂘭´4\12У\11%[\3Õó’`å~°½ûfî1ºMá\9í‚\28ok\30k§\23u\30Cm¾ô\30åý\27ˆ«–\12\16 <ŸB\8Jzìj'n:þ4™Ý\5E‰o`Õ&9ÐWü\0\13¯ÿïØê5ºë¯€êÕÃcÚƒØl…Îj¬}ã@LAM¯!»éµ\0\"mDùGC¥\18±©çAsËì„ô;úîfgÏ÷:ýÔ\30ÓiÓÇ&§9ôX€7½\6²Æª\0”ü‡Z{/ù!(\20žFSô~ÆùÒ[\127¾¹\23§Êb˨bQ\11ñ\0¥¿\9WäÔ=Ì_ÄY¯H8ËQè¦ØZ`\18ñ×\0030oÚqÕ[4ÁN\31.pž:‚Q€V\5\24®'S\29GKyð ÝÁƒt\5\15P!¬¦9\28:¿\127‹øÃ\19¼\25¸Îa§ï¶£Yˆ\ +‚\9´E“¦à!4\5»àŸ:\127\30‹·'~U¡¨\25X`Þ¸Á«­b\20‰dÉ\17H„6©\25ÞYsk!4:È/-?Mã]®X…®Œºd;ý=\30…\19ôš\5â/®´ÙAÏ_š¨|R^Q!ðdÕ&p¦+ìÚìAÝŽn\14\28¢U$hPÓ‹®p\"\20ChvhÐâ\20px'°—¦ˆ'NEüvüÅXî7I×U\13¹!8ê‘-¢Ø\11R\25D\26hŽÁ#Ñ\16`\9c¿Å~TÈË\28uý„`®N\26Ü”^ì¡V‡CÊM!\28_àQô5\3ÔøÂgT¶£RÐ\ +£\9ŽŠfç¨@~ŸŽƒ^‹ÂÜE\30\ +7kEãyÎ2ï\0008\23\30\26ʺW¢\3JR.O\27\21:{6³\18ÊAª”k¼è©™BdJ˜7xFO¿âXŒ:ãhœÅ<êHúª,Ýs(E»g×¹\28Ý]ttv£?gôgjÚÉT>Â\12h蔵Œ\19MÓ#Oøšü8êÕ‡“x\20'õÝ\6PoÉš(Fþ­\17õåRŒ\28M¡ˆ\ +\14|\4Ä–A>Uýwž×5w­„ª\11«Ñæ®\11\3EàDp×\9EƒóÏÞt`ÌnP…‰k\0kõaÔ‹n\11w\25¾«1¬ˆ“‘°\1n\12¹\28ºs\21T‰ÆòЗ‰ãIë‘®´¹ðK¦æ¦\21Æä\14`ý΃ô9“ò®¡™hDkxNSóAíÌ\7\8µS]8Öõ‡šö¼«ï\29B}ñ ¥Åž!+jP⹉ôÐnlt\13Ð\5è\3d¿\7µöâo€þc\12¬ÐnÜsE9÷œ—¨ý\24N­3gžOUšìLr\30Ù©¤\20S”\11\2«´\2(\13Ô ñ\26\9K*\28¡ô\0036ŽÜ\13ŽƒF1KM#cLzEÍ\13‡’Ÿ|cã\26\19Ó#×–\5w‰Í[\16hˆ)\16ï\18Í\14¸O?†]Wò9°Ú\28îîÏŒÖ\12l}6S+ŸÞT\11Õ°F«å©\12Ñ’r\15ÙØ\4-'n.F)jT\20\5R¾òQ—\12&\1D¦†Kƪ_«Ÿ,UÎ\1¯;ºíhùau4—\0ÜD\20.%Fé²\0153%\6iÅÎMŸÃ1wo\7ÆÝ\30aÚ\17÷ñÙ\16¾\7çÉþGµ9Í\20Á\5rÿ\ +^šK\20†[ÛfWý½ç\\q÷wÀô·£üíÐ7p0$?ƒ¨\127\127ç¿ï\9ÆL\13\13\12誈\29O}\16sÑJ¶Œ®B\15d\21dÆ\28'ÿõñ·³çÅ\17-\6ÿò;Ú¨6\24PÙh¨ù\28ßü}t›ì\17¿ý{Sÿ/ku?4™€\9¡Ñ\4…j\13]ÐzD¹–'øf\31^šíg€6À5@\14ÀÚªôù\22Š½@Šƒ…ZF\03077Çߪß\0075àß0jQÁï}\\c°÷­j´\27\127®ô\127\15Òÿm\0154(!Î5h¥P\4î•#Õ\127c‘úr\18áùþ›n\4ßx®Ü\11ýº2\0Çfpò¶¹Ò‘¾¼iªoÍŠ|ÑÅÚ›y\15\5°w\0Ò…õ›\21[\16uö\25íæÉÿ\27\21](&—\13\127ÞÄòð¦ýñŸ7Ê\127!B{R\2-•ªàN¼åý\ +}p¿òæÝ­À\28Ai\127kR˜´áð¤O\\ñ¸\7½6*NË\17¸ï~KX+KGy¤ÏTo\21¤.P\12âËiŒ\9}ÿp^ßY!«ë«\2—ríLGÓšé/¼Œ+™·ëö\25\7)eÿÉ(W\127û\21\31ˆ|¨­´˜n*0- \11Œú¶«.lÞnQ\13·Î\0307S\21«?éoö¾ô\19j\3áÒé­7§„Âx\31ûÖs!¯û\24hvPñke\15y\4XœíffϬq‚ÍìÿÒûŒ\0I5‹?Pêàé©Ø~\24Š\3.ϘÅÞ?\14j|ð®TÄ0i\30nKóü\28¥ÒÜ\27•$\4p˜k¼×š\23Åø~\22C#Q`eµð\19Q®\23ÿ‹¡ù¹£nG*ÅÀ\1[ \7˜\20i`;\31žÆ\0\6i³W¬ï>’Ò‰\3´Ç»\21fò\20¼™c\0003Sk*.\15©\127—ÚOîý\19²~¡¯Ì¥¹}0RêwK‰1Ç»_»ë\26°\6~;ö^7¿È¼hMé¢wÓÙ´˜êGN\7í(qØû3ÀÙà¢\31<ù‹|ƒ#4koÇàíç\19îÉbF·¯÷”œ“5øM‹Ó¹8æÃy›r\23€·³o6ý6„nöa·}3\8\\ž~MøuO¿Û\19ö\17œŸ~å\9yë\28Šß‡Ðä\30ð\22`ÏZwý¸u\26\28¶Ðv!áÖ\4ЙOj\29¾ö‡\14÷Þt0\"Âf\16-jvýÏ{ýÏâÀ|*~'Œ©bÖ²Â:à壞åuï®\12\30hïbó_]l–Z”\29ó/ù&šgêgÍ®;hžd’Û¬\31PÔ#Äe\27JÖ‡P²nÐðó\11OšÆˆ&Öô\29—ˆÌiµèTÈ^)¼ö—£¼\18•W´ì\18Q%»þ4W¤ò,ÝÓ/ÛÏo\6g‹•´{¨+\7·Û‚±\14\21Q\6\7`ùõk8RS»£|0Ÿ_\21D$íõ\17‡ü\9oE—\7„RâÄx\15·¢ÅŒ2TÛ>„òÆ@\23\22`ÙG9š\11 \15…\\\8žaiGyGeO\12\3ã\14¯OØ)!\15—\11;À3ô/¸¸ ¯¦{\26ÝžG§¯[ºÉÔƒÂâ>¶¸\27-.ê²\23”ãÕÊ\15V+?X­<‹§‘bÁ0m°¿\27ãõ\18%¬mòUW˜\0…O^ž¾Â\0n%fõ€ÇË€'¯©_yKýªŽ\7Ùb%ÿc8¾zöx¥I‘F<ϯ\29×n\0\16\"‡;cåözè°}\127ÕMñë\1S\3UÉ\15R%OòIò`ÚTîÈLØv^yD=òÃAŠäI™À¤7¹\127å\13Ó+\21ZVâîÜRýE%Ųž\21âð¤•O\0æ´4ª2P!=ž#Ÿ(`[›,U|\15R»0P\21ý\0«z˜{H[RŒtp1‰Uìþ àÿd42,\0è}òΓ\14¦Eáâ>Àº—qºNhŒ†Ma©îÑ‘¼¶¨\12¢à¢/J\17‹Ë6ØÖh\8ðMeèTt\14œ w\17\23\16‡x¤*Øx·ÓÁú5DŸOä@]àÑæ`\5WÞ©_ÿ|ÐØÌ\1‡\20µ¦O|âã*9Ëï\\k¸óí|u¾È÷e\22y@ã\14h£÷\21³åóÊ'>\11!ï$7>{^ðB @Ù\14zÛEIN%$zâ¿W‚\22T‰Þžôöäø³K?³ØäA„k;™Fð\22ñ·WSå{(ú\20ôåJ\22>‘È/ü×N²Ò\29…¤‡îð²Â![\2ƒL\9\12´ M94Áz€©Š\14v–\14ÂÌõ%œ\20Hýÿ õÿ$ö䊸û»›wÇWÖ\"oµe!\0–\11\127*ý–EmÁnÉ\18´M\0044ç\0}\0Ö_˵(éÕ´3½™ö¦Ùt2M‹ébÊ\ +ºŠ{¹»\14Ê®o¦Ÿ;BŽÖǽ£%\16¾\8\8\11?|b\127Œ$Æü²£×\29µ;êž\16\31ÙUú|5íè¾£™éOœ“qôë\28( \25dÔ`°\17ƒÁ&\ +ê[‰Ý¡.ºk¿íR£Û½ŽgäT]‹Ðôß¿\7DÊä\23ítPÞe\0138 “šK>áƆ\ +(½Rå\11{TfúZÃw¹þ\2‡¶X,Õ$Z囹‰uãÊ­ú€ì4‰ÞH{æÿ‹ÙK\31ì`Ë\7\2þeY‚\15\14®¥\23'\28\29O¨i#\0018³}üùÀÙ|g]ËÕƒ\ +ÕI/\"È«H+’DØô(À\7Â\4u°Ý‰ÓŒõvçÀÛÑZÔÐYº®‹3i\2uFJ©ÁÍ£‡Nªf\7š)H\1üA“øØim¡¾2u,\23m-\0122²0ˆ‹ÊF\19\6ÙJ¨tnT93Ç2±X‘\31Òу\24­`¸@\127j¾k?ÓñôV`RjÔ.Ž¨ÔžÃ \25l’£Â'á ¶cü\"{ÚÐ<°ºŠÆK\11 u»pU\0232U--\ +$öîÒlŽ«Nµ\11:uöýH~ÑêÆ\30«>c\11þ¾\13\17A§R2ì0„¶$\0\25V#ª?çF[…ÃízšðµPwp\23™Dj󸙉ª\14Hð\6×y7.ânbåéë¬:€è®—¬œ\0xlúP2„)·A°nõ€:²õÈ\12Âp ê÷ê¤\8Ì s\4ƒµ?~‚Á„Š\0þAŒ\9Óì`Å\0g¡Î\30ïŒ0&;ë¿è\15sý“ý#Na¡‰†O%†67\12Ô\13r\3€jÐA:A.\2\24µ­$d\22â€\0€9[Ê8î\0\16¹¦\27#\1DÖÐò({„Õ\28À‹ü¥äÀJ\11\4Žâµ\29¤\9\2\"ìÎ?9ÿÔØŸ‹F\0þ\30È/9FmÃ\19@^Uí\20lVI¸º\25N…ç9\0208ÅàAAT´\127Ëf\14;Âr´††½Ýû,È9\127à\127.\14$Û\7Ä\8G¬Ô(¼89Òm\18r‡«èƒQÉI|2jÏ\1P… ëÏ\0\\d[Ø)\16ê‚F\18Z\3Þ\14†Y„@ÁŽ+Ë\6}€9ÂQ]¼%Êt§\31”l’ª\29„¨Û`\29›ÎÂG\27\18Eø6Øâ£Ê%Àk$îß\13N}|Þ™¿\7Çù\0037w\3˜Ë\24ƒ†L††È\7²d!RÓ\30Z9y‡ û\12Ãθà 2„’%%+ˆ¼ŒC\24”Þá…±\18\25dtóÿ\1§ÁÜœÁ†\1\31‹O1hŠ·Þ†\29±Ù4\127WÜV>È(\12÷‡Ü˜‰ú\29\21£N\5ûó±¸\\Ÿn ŸL›vnäžä\6?\15éfÿ£@]Gñ?[×°¼tš¢Ð\18~ó\30º5|å®ï¸ó›i¹«ºfŽ5hWƒ¾Œ‡0ÃlI\0035KŠvv¢í\31`ë„©?8$?Ðb¥ÄžG\127Ì×l*Á™Š¾=ì}û·|O¦~ãÛ_ù/\6¸E\23GþmLE8ªø1ü\26Öÿû1$kJJ'íÎ\6jª)ô(jaI²ÊI\2ºÉs\3é(oš_1Rg\6æ‰\127ò@LŠ\0\14ȉÖ\1+y¡ã\2Üüjq\26n¢’ø\0280Râ˜\2Îd'\23[igŠO0$oÚâ}6¡êºë½`@ÙÙÅ\0pøKʉ3‰K>I¼sç\14#Õ_ứx\ +\8±ÒIäuOlÂ)Ž\30\18¹RÁ\31†žžê\19K#r’]L_M\29\15Ëe‰Ã6\1\\nŽ\0004&tg\14ü/I\29?é\\1aiÎjFWMZ`&1È'×hO/n-@”æ¨4%“’ \"Š\31FÅýCÚùá\19õÙ‹f;•Ã§þòg±/3Ô”“ÔY’:K:¨ 3½f¦23ó™ï²Ë$ªQ\29h©¹5å»\15ž²Sä÷&úí\127õ³AÍšÖ\127ê“ú\"‡\28Í\16Ll\11§ßÌc\17ÒùPw2µ\ +e角3\0052ÃH\6y¸¡l\0\30°\1D\15(Àb\0E\31ÇsqÇÃy…çË\21ø…øÏÛ‹ˆ’GË\1a.­\22|£Ö½£8Ó+°1\19ø €£tË!|´ó\19\25„uÐ\29¾:‘/Eú\6éø\6¯£Ç¸\30u\5-\9r¬´h¢hr\20+\23\16œ¦\29=\2ï‰oØ\7˹Ñâ3£®„Çž;YZ8:n\ +Æ'ÀŒ\29¾PfíNFMx±ÃŽÔu6Û\11ËÆÑ«†Q§L¤“‹?:u~ìà¯\26.\ +Õ\29ˆÌ«\17H§×\8­\\ü±\21 SŽ”q/òÀ<¨v\9B£À\20/ͳ_*,^Â\17àˆ‘íE\1¼Î\25C<^€ƒ*#aì\23è÷À\28H•ŸÎª¨t<«ž\0ØâRs\17Q\14Ø\22f„\29CÒ~ä\16§’¼]L{ÓA™S;¿ò|c8¬\\\2¸d`c᦬ä¹\7\25\0199ëG³À\"xsJX!C®\6=\20\30£ÿO\26]\13ãq2˜Ue£kÐÿ^’ý£\4ûG\15\27£„úGÉôCÚƒM5GkÎb­ñ(’>¼\7ƒÉt6}˜.¢:\2’‘(\18\28ÐŒYb@ŠÒ8)v¨LQ¾Q\12¬aõ\23€¥Ï”,\31%~/{N•Œ¬ä‘%T?¦¼Z\29vÙÎîÑV°,\26eC\6.Õí]#À=*ÐŒ££ÎáÆ;uVñ=iê£Í'\6#q*mt\26\4Nd\18œ¤@M\21\29³È\"‚ùqÄai}žød¥Ïl4Ü€5/²ê×\127ȲþÖ`ù»Ã¹\16nB?Äüò¡ÓE™\8\"¡KLT²\0204ÈR\16|Ÿ’¯XÀÀŒ\15V}õ-Š\0ýùøûd¾ººãŸ/^»Á°\15“ç¾­¾(>¤\15\31ÜÚÐ\15<¸X\13ƒ?ˆ™O“3£uŸê•³È(2‰°\28°\ +qæ¹Ò‡š')¿\8Æ\26@¥\26^@Œ=\31“Ô+À‡§`\31´[Ti·ÇàöÃëÜ´‡í (Ì•\5†ŒÙˆ×\3²Î“[eÂ5?“z—\7'{z|“š\25ácv¹æ(\23™jIUð:37¢G2$\1¸BuÌö!k\8ƒMón$!\14_ÑÔ˜úOqýKª\26^Fž\4Êz\15Ê·T^ØKèíúÞ\\à-\ +,-ÁŠ\"í\26x«Qål]²\7¯ž?dºÀÀA\\\0\1‰àC\7||§\0147\7^ÉÂ~)\25\1?Õv?Õf?©\1X»\ +\ +\17c…t\18a\5Ð\0\0167'bÙ£Qž“™p>‡bŽºŠ¨v\0~­pUe´\25Ež`Ý­YHˆ ‚¬¬³²ÎX£¼Iõ |\27\7ª˜ù0Úý!ú°ó‹ô®¤îþŠ{c§rºSÃ*بà-Õ“²F4È\26\17‰êvj®¦·\31$¿’ÝJtâµ.6<\\üîF‡\6\25\29\26(ÛÿêdV£°$\3\0G\ +€\30T©¢W>Ôò Q¥\8©½ZC#€\11:OJ„\3s%\18l@@1]é¿\29\21KfÓvè88¿æÇF®Nzé\11q¾¦\31WÆÁååÐ.ƒIh8ßGµ¨E\23Ã8-‰ÿµu¼ô­@Q¶´Ç\0óŽ´ˆb+wßè¡PT¬ ôã…ðÑZFï§\\ƃÜd+¹[°Wæ€S€Z\21w-*èƒu3WW\0159!ösoxwO'\23Ð4QŠˆâ…¥\0197¡w±VË‚iGÀ5³Õˆ\"ånK$:1»KW\8\22\ +j¤^=ب<\14ÈøÅ^JÒ›+\9™U\26æÒ\28õj1áuÕ¬Y‘ö•\6ÙW\26l_i\8»IÃn7‰2—Ò“i«H”ͤA*’A䎘àCÁ•°Ê´Q §ºë\15,T½^½mz\8\30µ‰ž\5djh‡› ¶fôj–\8ƒNÃbý\24åD¡\0068yÚ\8ùŸ»(ºs(7¥¹¢\23ÆÓ©'4`à\31‡†S^W?èFÓÀÝQûS”‡ÒÅW>Å7>¤Ùtú!“\22è7ÅÚÒ\ +•¢e‰+Yì›Ö+ÃË)HÛ•¿ôT\21êR\26œZaù1q\22Λ¡h”Ô\31\11kTƒm\25\13Ö¿„†Iù\"öÁ:ó+Š\4‹\1¤¢ƒ:E¹‚±ÐPÈ\12\21³ã•P`Q¬¢‚‚>'S³ÍÆ=q\9FOÁ…â÷…\19háy\30M\28\13Egý¥ƒ\0=¯ìyÒ_:NGËñ\20óÛÊÑ\16â\6\22Š\23æ\15{Áª\18V5¤Yô¡\11ˇ”=>â4‰\0l`\15ju|$Ú›¯Nô“ú»®*!8ªëÆçÀ'Vñ\15\ +L€pžxd\22Ž×IJ­DR”Æô0Ũ\15kJ,쬣=¸ûIt°¿þ—Ç7Û]\26dwirˆ/Ya„)‰ÌŠŠãõÇ\28\28\15a”©z‘Ï›¶™”\6Æ4Ûh\18í@y\\óÀÐËÍ\11Œ%ÕŸQŽW¼CCIpN,8ì$Qþ­¶Û\11÷_ÒÖ\4ÚØÉ}Þ\23Íhƒ¶ŠÖ\30\28¯e<®\5àêDdlâÏ×MLç2Ä\0162ò¨—€Ø%ÉüŠfeByIé\18]è±±N\15Ò)ú}Ä©h%\23&úMÃ̃l-\13²µ4°ãaÖ­+fí‘¿­Öõ[\18ù\ +¢,“ûhu\30B„ç;t\23Tt\13ÞvÂÜ<ñì—ßíu\19_fEù‰Bî£BqÐ\127Ë°ý\16–Q\0218o‘\9U\23€6»ÏžZ1ï=м£òŒXš'잪n‰†§gÞ=ÿJª<¡ÿ\ +ñâ¸R+HÐí¨ßÑ°£¼£iGÅ(GºþojƒßT\25ëÚÿŽ8!¥À»\24\24ÅÒ?žüë''Á†ó-ÙŠïcÑÿä:N5\0^®Ñ­—\22Å^\28¼É‰Kë\3µÖÂ@–NSþûWªZàƒï®îáÜ4Ëz\22igç§(7Vÿê\4h¡V´:\7Ô%4$ê\23ØÑéé\127ÃÒ{\9f£EÌF\\jSÓT\5ПAÚ‹f'D\0228‚‰Z7\9k]ÀV\23¹É3ý¬Š8` »\"N§.¢a\8ü\25¾³\1ç\27\6OÈŽ~d__Bs)ü¸\0X¤F\19_#–ÆÅ:\1\23_×.ZÀ-f’Y\14ÒȳXñβ˦׿\19ZÑ\22k=[¼ê[À˜ÿ!ð[Å\8ùÑEÒ.œÔ-\127]¶.ºl](/†ÃèEbZ\14Ò ¬7Ø´\24\14æÅN/‘Ah!ÏFÕq˜\20\24\5Z†Ó.\15ý\19ºp]BÍßbÖŠÅÚþ–à­X¼Ž[´í#•?–kh$\127Þ7ÜQ]RÊÃD»hͶHº`±f²Åw±\11lH)\25Š\27,â\14\\87פ?yȸxQ¶Ä¢LÈmåÓ-äs6]LÙÚ|-»Ðœ\\%w:îÄXU-\\UÕø¸~[|Óºø¦u1{ÁZ»\29\24iVu”ã°²£ÒþWE'ð–‰N¤7G¬\27ÏšÛÊ®C\15˜/bŒ\30×\"\4…\17ŽC$Ns>+;•hr„Q\0ß¾‚‹ïfÊ\13Ô*ó9\0028=_OO®)äCnöJ:\6ÉÜÊz\8¥„@:\23ZOܧ3kì.jd.! &\25[ÞU\0273†\23æÇU)Ò.+Im\27µÊô\127þ9+\127šÅK\\}Ð’ËÅt°ÿÝÔê“×Ð\6¿Ãô„EïN\17ÊÛUW¹\17©Nã«\7€•\3€ÊÁÿ•,t¢WÓ!Š\\†(sI¦³i1]ÿ™Õ‚‘c\5OGýhÔOÃÕÈ*=`p[¹põÁ„·6='\15P•ÞcË\14¹/\12\9 Uê.VšÏ%\25D’ã8ò0‰Ì\"E\4ƒÅ*®v\11\18­dÐcþd(Q®äË[\27›J©AZ>¬â1Yuñ\26ætè?_L©\22E©Ìšè×f¶z\12{\22yŠýš~ZǬTA¢@jÖ\0M¦\20k[›Òèí%2þÍÆ¥Ô¥qk…,ûÏ\0009\0ª@\27çÕ\27çUc/Ã9ò*\13·;\9ý¯ÚT\27¸\29Ê¢‡ýœÀ$…OBÝŽúˆ6\5(ÍŽöD¼¸\\¥\23Þ`S\9ݹêÂDímq>K³ƒ.@\31 \7ˆ—tj¼R\21°èdÊÆ€ÿúê¡tå\9ÀŠ\19€Z¹ŒßG\18K \1´¦Y”Ñ\14ÝÅñØ\14!Y„fÚ\29\21¹=‰(:\0155¦®‰Ž7««N\21VÚNFüä<©§fÅ\25\3É«âj$«Ô%ÈEÁªH2\13­»dÆÚMNlrôI\5Ö\15ê&º’¹X1Ï‘(ãÙ\0262ºÅã\18˜=Ð\8ÕEÀ$ê1ˆG\26«ŽDW©µ[9\31®¸‹…8Ášaà“ªÿÖÃÈ®?^¨\14¢:ù•c\127Rg\25ûAî$2n¢XŬ£Z75\127®š^AìYD\22F\4;‡\18Ts\31=ÆnnàPÀ7}°èb‡ZyƒZ\17\2>õqŸgÓã\16@_‹\19\18yà®iÕ°¤\9xåñÈÊùw¥XÑ\17 9Ü\17\19\7Q+y¥VÎÊëŒS‹•'š‰üÿ«´\9­œhê\27\15fôàÏÀ&Ÿ®«\8&Å\7NzYš‡ÛÈÃ_‹\3\17|\12«šÖwYä/uýº±»šN¦Å”ß\20}öÛ÷FŸïì\31ÿí>/é\27™yEUp$Ãêüg€k\0HH®ÿWWAë&ÕF?†Í|Qð8‰è,lkv©íÍ'}VõBM/$ÇY\7`›Ô«n\15é\29Lµ½Õ¾•hP6¨¤\20Mâ-H\\&m?~%Øaݪ\27ç\13I–U\19Ä#¦\19ií IÆU“„#\"—Öúšwø±ÃGE-\24`\18Œ¯\14'¥Pwåt×}\1ƒ'9™Nô>ÖÍ\12Ù”\15Ö&)̳¦§åÕ\20VUSØRMaBU (ã\0035­\0025gyàð\7ìWíDww‘7ΊôÂ;Rè»Z3ñUãd:oJýcŠÔ?\22ÇÑqh\17écýIz&­KZ3‡ÓõŠxþn7 ¸\24¼Ä\8†ToØ’)vR\5÷0ËG;Žø\13\18›DCèq7\11ï\30U.ûœÈ¨\31™x\24Õ\20êøF\29h“äQî\3•\14“ÀWf$»Áˆ‡Û0ºÉÓ—ÂÔe¢E4•\21\6þ\2ÜYea`Mñôу\ +\14¶¸„­)0”m&\ +f“ ÖÅ7\6€\6|õe8â€'Æ\0fõ`:\11]˜‰à\9ɘ\4ÂÞ‘Ug\25×md$n;Ñ2šN~m=ñ\23pÙm\31LKFøó£šÄ]\17ï\24­“\22åøkäÁ!™\26S¶KÝ£‰*Ï»_„<âJ8©z¨þ„ôÅ\20c:¼ô£Ý\15S\12),éÕÂL0¦€\\&24\26tÙ\0Ûù@\24g+FC˜ÀÐÈ×i†\25\30êyu\14좇þ\26 \15\3pt˜ü\13è\11\7]á\17ÜY\13¸\18¦S­\23çŒt²mCÓ\8\"áZ¬]\13ê‚\23%£¾\"„õ\18A73\18o\"\24»NäÑ'Â\ +ùž’löÝ\5 k‘dv\15D\3I\9Ã/i7¤§÷ãø\"ívñÒ_¦îŒ•\18OÓñ\14T]¤0f':Ç\ +\20‡ð«hÖŽ…x\27œ\22\23¡5„\22ãE‡C\0ùQa[:ùJÇà5@\27À9I©GâÖï„Òûº\3 tþ\28ˆú¢\25\21×ÍÞâ\31ø×\15\13ûLJ†}\14\15\15rf0Š{\16M½Õàí A^ÌOôÀ>¨\1Ùþžo¶_P¡xHÊöÖÞ„êf-ÉÈ\26~…P\29à„PpˆÕE$tQo‚×Hºýé,ÚS€=è¼û8÷¶åËm·¿Ôµ\14©UÂ\27K¤Ré(‚ã…¤\27L¾ˆš#Í{Ò_V\4µ+gx\0µR˜2È\9%æ'}Ÿ[\11êãoãÑîq {jrø<\6\1}y\127’¿\6\2€Ód&B¸F!ì†ðRýAÿ•=Tƒ\12\30‘…Ø\9\0134òJŸ\23ÇQʃ•³¤°ffp\15°‡áƒwSf’à8\5\24•æ¨?«e1+0©š²?0G\11ɧE”=\22†²\14,\23ôÏšr#*ØÝ\13pG¥t\26S7¯ìFU=Š\1.QˆT=\25k7¦‚u¯3(®÷,\27\17@\24fÂ\11ÇÞ\1?\7—ûËTUN\19»—'l\13O·Ý¯?=ƒ•\27íf…ìË\26è\30`\ +ð\8°\4Ø\")7°©%Ì=ö\14÷¸`±\14ø\17\16#\14lù‡á˜(ÙhVËþ‚#¢ðQq'ÕÑDföx­D¤‡\1Ç\20šÑ:*vãtpN¡(]äÕõ\1Ž÷\29avÄ{ØF\26€½Bi¼«lýØ\11ŒÎÌß3¹OÀ\18W\ +C\\Éf¸’Ù\16’Ml¥ÝÄV²e-Ñû\0302\5Z\28´ÙÃf´Â1©‚dá=ÙÖ{\12]Àù\9™æ|nêh`ts›;îÃ'ï\8\9ÆÉ5\29£æì/õ¥œ¥±c(QV,cÓn\25‹ï\0226ÁB¾XûDáÊ‹)+¡8\127jCH´låàWS\15±Z×QbvÚ“Ô@QnŽ2Ä€[T;BŽ¸ÔóÈ°»i]Š\9¸\6i\5Ë\9\127íèÛú\31›\127ÙÆQaëŽ'S\15\0232\\Eäv±ù/o³F\18Z©Jg­ÐqýŽ¥Ó™ëny/ò&›\25Q]ýŠö\14©³Œ\1¦æúîoÖÉVI€«+cV¡\\\16\22\29\18\16šÆl8á4#eiìŸÅ¾óÆe±\"ÏÛØŸþvô\127;Æ¿\29“\29%â×\31–Î\13\27Ú™¶3à„HB%G¨x§7Ô\2#>—Eg\30„’¨’`(\6Ñ5F\16¡6I³é,ŠBƒ6Œ7Q{$ÀA‡‘\ +[œ\26×VFG'Àc+¡ì¼:¥Á‹)QÆ…1>¿ý¥}c騲±\15d‰„ðw¨{°ŽaòáQ¢!¿³Uíy';ÏÎ\0J†Á\9h\6ý\2ÛD\127`2ê}=Xè\16ö¸‰d‘Éo~)\20gØòøn\3\12»\23Yv¬^Øs “\29Ô@Ñ\12|\7îú›Ø´5:ŠC\23æ8B#;ˆÝh—gæ‰@[ 6v¨CMÍ°Ÿr+Œw|ÉVuöTÈ\19%c.m¼$‘\11ü…\30°b‚\8\24c\29À~+MÔ\2\9`\"ƒx\0\5N\127u•ê¨Í ¿ê\29®\25m\1%qiÊ\\¸´k¦\24\18'²ªV¼ªO<þÁ2u²s–sa´Ú\8MgÖé4ëë8ŠNRó¡ì=ÑÍ'¾/›©”Ö\6·\0F¤¹9°FÌ)vFD>\1W\6ø\4öàŠ¯\14Çe­À\20`\14P\28_$\12æ\\\\\00220\ +ô\6Îb.¯z¥h\23HÕ@ŽñÍatÞx\15y.1ª¤(R\19'Àu=ÂÁ¸œ¼\22âùíÊ\24hk2ØÔh\30+ñ\31\ +˜ IÙÜÊ\127ÿ^\20óÁ`\28\0034\15-c¸ølŽ‹þ×vâ\28¿AÊ55\16\6‘/\27¯NJa\20â¢Ñ|Ó:\0116’o\1\28ƒÖu’\12HÔ„`\15E1<]o1Â\9]\24©\2ŽØôã5\4`Çî½±ÅU’EF•HÓëæf¶\29F»Ù7u‹I·\25ëõs7\8\5'Ø­€ºbƒ)€\ +òkf\7ÝfMþÛìD\\!_8\19Æ>ûíxÖHHË\23pÓ¼°ÑHÚ¢»b×ý¦Uª8úD;yó4\15±G½ÞrÛ.ó\14‰Æ\21œî•ëaª“jM\7Ñnr\4§ue½\0ðàäíÊÓXh5¸º;È4\3é™1o\26i™!ÑŠ\2½µJ\12k\12\6}€!@\0228M¦›\3tÈ\19†\22\18M)\12×\0ƒV\8Š67BXî‘îIA\1í\14sçÂu“«†s®ôgEäO=‹G\20Éõ;4®\9rR'\27C\16Ý\"„\2–Iì?\12ZO¦Î\24lZ\6‘pöašPaýËäÀ5ÀàHM€ý[i/LÈŸš»ì.\2XS¹è³ò\26U-è$¡‡O@\7¼@]\ +¯÷\8t­ù¨”€ß\2AÐ=)K1'Û[\16Õ!Õ›ï\8yØP|¬õæ{@\1f5É°á\30º\4ìš\29¹Â§>Zì\20ÿgâ!*¯®í{¿1•hÁS¬B\4ŸÙ|óõ‰ÌÄ\14WëڻØy¯;Xã\22p[œý\11t*i‹\1\0›R™·è\25¸ñûç/È]¸ÔM^—\23ü2\17`\31µÔyãª\4@ó\11Ñ5@\23à\22`Øc\127\7Zö¬¿õU%ºs‰^\\|ÒýVâRGV\4Æ@åäxÐ\16ždõI\7¢oåÖ\4Øk\28pÙ¡[E\1Ç¢R\25\6«ŽzZé掗\17\30¦`zIÒ/'zö\127$\23[2\19[\ +µþÉŠû“+GH\9\4\14É\20jê”â«é5â8Á¥7ˆþ¶ÅÉ.NtQ…l\30ç·½ñl\30\11¶n0M¦, ”*È£ˆº\31m®Ÿí—:÷¦Úàña¨ä'À\ +ò\21\23Õ•P©>šÞkWøäÉÓ+˜ìÓšó“4ç'ªÌO¯#4‰â\18ƒ¬3½iF8ÇQ8\27‘N„»\8êЗ³þ=Iȳ“Xl>íBó©…@l’:ü´«ÃOÔ†ŸZår=áî\0³çuœÑF¿J]ëkÔöž¤í=IÍ;È\11ŸBêðÓQ1*\\¯\19IËçU\17ÞI\6EçœMUíIªÚI>IÐÛ \\ýN2)åY\9s\31”B'{².v{(•â\24EþÅY|“°#tÔ–•:pû¦Î\"ÖÒ\"Ôh7$íì\6hƒÝ\5œ¾‰¬’\9ž¹D0Ó§\22wÐNÎN…¹ð®œ”5ò…û»Ž\127\8·\3¯(ÃëÀç¤ä “]@\9\30ZrÞP5zê ²¨ä¼#EÛ¡nòÑ\20…êú\25®áĬëÖVä!‚Ù\3ª\29\127’üù‡?'¡ÑÃùÁÿ\6mD,\4‘»ÌñÈ:ÉíÏ,²\15– •œý £\30\"‹\7ô.“Ë\11>\24\18\24‚Á\27@ó\9bà:ÚÌ“Ô˜W\7õBTú:(Ò+\11žu\4)sl\0ùÐ\30™%\21˜#F¯\27N¡\28`\22h^Dù{@?•Ø|÷—aeÞñb•þø_Ô·dÊá!\16µ™ã/§ÓþÐZÄSðª¦P\"ž:h.JÐ$^\31ì!ÞÔ;úx\9˜Ž&Tª\25‡Ú*Ì_ÝL¾oR\\‚@±xf²³~à,NÕZ\26\ +¯§Ž3áÙ ™\28\7:Å\3-B\24Î:ÞùLŒ½éð°“¼yêtmÝ\0215…òl\ +EŸYÀc†\0¿GÀÊ\"º\6è\2ô\1r€)@ÙßÿbÒ\28m¼³ê`U¾ƒºy\25ûÔQ/\15¼É2U‰Úy\1\3gê\30‰'õ݃Üð ³JOößd­â5þï\3\7ß\26‰~k$Ú $(ŠñP\23ŒïÞ5Ps8\9Çiª\12—B1\31É[Ý7Àò“1\23Ç\\Îv¾Éy\21™D\22\6n¼ –ÊîtÃb\15/ßÆó\25óÒMª÷p\26l>[]ˆq§t£„L\2\19 βû\19×h5í\30<º\9ŠµñäüØwè¼=§'-Ò°\0132òY{fö>bSÖsΦmFŽ7’N\14¾\28¬tU,¾3“¿c´†/*€Ä¨H%‚t—r­ín,G5Ë\7Kû`a°IT,˜Æ_[]p×é\3\0124\31º(–bÁ$\21‚)T\8\26ݵwùàZJ±.do¢fÁ>Àä8#“[\13\30\\¥@ìã#’áø_}xÞRÉ̬¸\2uOHV2XiO-ÎxQ\\|Ò\21˜¨ÿ¯7µ›\23F\2]€[\0l«>&Zæ\2ˆÒø·2Î'“ŸyÂòq€\14¹C¢§”×%©¡ƒš8žé\5â9\15\17O!\20ʃ\"Aó¤ÉÑ=£<“â¾þ“—ÓR!\0072\"\12•>·D%m\".Љù“¶Ú\14Pú™¤€-Y\1[’F‘$ƒ½‰æz\7S\\oJ3Y’’®DÝ[œ\13Ð<<]E9š\9‚\26µ¾Aµ:cZßJ\19âàé~‚Äb‚V)²\17²\19„~)‰,:w±H\25ÔŠ¿\31_†8ïؤ»øGêF\18Sü\29Lü•Pk,nÍî\\\22@\21ÔåÈ„ ¾¥º§‚)¦R36\ +iª\23n\28\31'#ÿÌ*\4Ø°É\ +ÿjz\23åiÛá>ug¹»\23\20eÂ6‹\12ò\23†Ž¯\"­ÈÍ\7'2\22'öáT\30$Ü:ÖÌÉ£L€k\2\22\7\23£\19· 8¨•â4q_ÿ#A;UÍŸ:¨’\25Ø©ƒ\ +O\6\12òJôÊxŠ!†š¦’XݵDçmF’v©d¦w1»K™\20\8#—N^ï$x\7¢SIú£RèJ»þ¨ÄÅpãU6)•¬\9pÅdéÊ6Ð €50)Ên¹K¬8B\4ó\"\24i’ñ‹ˆCÿ¦;\9\3Dz\ +§lžã71†C\0168Ÿ~d\ +fÖ•ÿùŒí\8À$2×0˜Ð\6\2ÒcYÒ3Ñ¥¾uâq\25])‚?ðÚ\5\13„þ…±\30?Eæ±éê“2˜ˆQ÷\25’“Œ\1[©¤]‘³.¾jrÔ0O4\127Ú2@ò\1D§û-¼fû,ö`Õá=4\8¾_»Q\0æÐÕrV\2+óµ,7}\12\13uä“\15%*\16£0\"R??ª«¯3h†4b'ò@¼‘eê)\22­„þ|üd8g’,™D…¨.zõË|¢<ÊÅ¿àH™DûÞ\"”ß;œò)âܟ袰ûdZDí\\”\4\15ÍóI¬SöqªVŒŸOÃÎqnï2\11Ñ@¬ÑÀˆÐÐ Zôâ[ûÓo½õ\1¦Ž¨S\1y:&p\28\29å\13á$kpç'lž\16»¯À×'ìž°\127Âü„\127%\\þJí›øö\19?d¸Õ–÷\11ÎÓ‹Ý\23S%\127«ËI¹“i\17uð¨ün\15‘¿dèOú\3à;\22U=wVÕ¨Ö4h¯HT‡\26RŽ\12/ò|\12þù\15¥VKžOTÿ†3o\0309²\1TØì¨uàõ´{©Yã4ÒaÉ\20;SǹÛ\11'“\0028×$šM)\7a¤\23SлÒ\1o\21\0234Zäk-o\30\15|¶ø4è—»8.$RóI}Fò›ùtç8p‡ý\8\12$TKÛ\26\12§\0½â`•JpgÓ¼K£§â°ÕÞq\2”wŽgùèen5ä‘\"Êt\14TàwoX´»\27|]\13'9ç¢xý£UßÉè‰(hLü\29NjØaø8¨Ä°\12P)ÅR\21@©\29Á¢È\26:P\28µ‘;Ìp‚ŒL\2]\29šL9–ƒ~lŠ\14F\29½÷¥\17\22ú¬\16§®q3=*€Ô¶‚( ¼cýa s€k€>@\0140\5˜÷t‹ÐËOS~ÕôrÅuœ½FÓGDù—P¤ƒ\30üŽ\25·Áùd›¤\21 B¥Î9‡Øm¦Øm¶Ø­(½)x›)e{Ê\6Ø\16 1Ì›'ÄL¹Úoþ¹:9ŸMo¦ld\4¨Î\18m«ì-§„Œ~Þei‰\26¤Xš_Cc÷À8dÃ’\23\22ÿLémê\29‰ÚXˆ \"+‡ìk>ÙD)¢C±¨<4z”øÕ\5\12\ +5\6V-ú•e…ÿã…O„Qæ\21\14}\5n\22²d]kß«K1¸–ÓTDõ\19\23”èð\127W:6ÖÏÆ‘Xb:\7*”Ê»kÖô\2YUT\"诫(º\1eXëð`p7`O%j@_~Í]o\15œcÁk\30Ù\127\0248:\7°Pä\16‹UÐF‹Æ\9 çq!7Ñ3VÝ/\18¡>ê0q¸8 .Ã\5œÃå\17!à… ú¥DZåÓ*\5°e‘<ìû%ººŽ®®\4\31ã\19ŽªŽ^ßP‡‰³¨&{ú`‰f\17Õ\12‚%¶ãl\6\\‘Ô0—|`\11\20p*\24¸Hk]\31kL4H½úv7ª\27\24‚NùàÌn\14/ló?\3œžè¾£\18èùÂÈ\\qˆ-õûJOŸš\\\27Iµ•OªdZn—Çd\15žÈd\9»6öÓ—ÂÔ”aÿ„ù\9Ç'œž°ìPU-9Påy7eš#בËψ]J Å\0W•y\23\18\21ò/\6›íÒD*\29‡wA·EɈf\11\"[®dõꇪ\22ÒŸ,ÐÌ\16\28HÓYüY“\27>¤C)‚üÈ?\21ã1ÙC_ÄQˆþ%²çêÚ‚£YB¡Œ;ÛNSÞåB\31.:¥Asˆu\ +@WI–Ôf¶Ôf–QÒ\28Àïl\\Ñ\26\13FMø¼Eô.‚ºÝÇÿ¢Î\ +Ѭ!\17\24ÉÆ @.“\29é›J\0124eÿ×Å™c:˜\23\7B\8*S¨°³\15Ö¾úê‚ó\30Ãè8%º§ä>é\21ÕTø/\8ÊþÞ—óþvÐRó9SŸŠ(+T¢~T÷öÄù/<ý…kÑÏ8MÁsä\19™œuc\0047&\19X\22ìè`\20Lßùü˜Zl„󋮟Ði©Ï\29\30·\"wŸM\11½™Ñ\11eEé‰÷EÓî\11E¾@²âd¿\2–g€IY¡ó¼@I;\19XN¤l\18/^Ö½è2_þ˜©_t…O\ +…F\4³Jz\30ø¬5òò«NC\"£\8\26\4è\3Q.ˆÒ JêØ„)Æ(ÌÖ/”-é\3$‚!9Â\"ÿ©QLÖ\28\2 [\6¢\ +j~W\23\25|ùw^ȸ;ë\0270n“.¬W–ø\21±p\\’1õžDðIÇ\23¨©«å\6ã/Iqà\27\0286à|'\25\21iÌŠ4nŽü¡HŸr>ì[[Æ\0118¦0c\0 ¦jÑ q”\28\8BØ3ƒ&\21Ê2¡D™u\0011\0019[Sµ\25²é\"|râ³ý¹ð#Øø\11ßÕˆÀ˜*º˜2¸g5õ“J†­\25Hï4hÏ®†ÃD\23ÝE™@$ê'S‡¦±ú\31>üO`=¬äÎDïÌpæ:\18ë›\23‘‹}[SõYm\127–\26\23…¨ùϧ$‚&ú”Õ$žô\ +ú\30ik::…‡ÞÜèMÍ\29ôç˜\4•Gäy±—ÖÏ„Ó9<Ù/f1¹P\4Œ\18\ +¨Š\0ôëfŠ•#6ÄŸøÖ›nA¤€\2²sÙ·[Eÿÿ\23¼1v\ +\22}\6ùÁ„ô¯+Ð\7Aê—4Š\ +yŠ0«’ºHcÃ7×Dßûι¾??ð}Ók£¿“£;Üþï3—Ó|ë“âgÆs\0‡Ò’m\22à(ÀÍ[\13yðÇp\22¬~¨ª/eË\29\6)Ãu)†Òb~\2Q·š7¦É9®¶¹ÇE„CØCúá6ýuccürëTú\7¥_‰[ü7‹£Qx¯\26\0136uô­±‹Y+ç_Û,GýC/Ûz¨5YWÉ\24\ +!'<ÑE=´Õ­S@øà\20\16«iŽñ\0ý‹iÏp´s®Éo¦J¨nDAp›ÄtÉ}\6„–w±)wdTÐ?.±­ºÐL»©Rª\27\17’\"²X{k¶r¸êÃÏÀaf}^øbs>³DM̉†LžÂ›<ѼIaoD(1\1ÈÕ!*ŽcH\31\"\\\31\\(R†øM†òóê\1‰¬L±è“(;¦À&ÐØcTF\\¡±`”P2T'»XÒ\8ñÁFn\31þ¾ŠàsÁÜ1ŠöL\26\7sÍf\31Žã\23)(e\16秋æ\22ĘN7{Ìó\22h»îhØÑdÄÍ\9QáÿxeU·\ +†JNQEâé\9¨'‹Gý\11\13ô*d̵\8—îHc8F¯¦*m7ëSß7¶OdØ+a˜ÇÎ\23³â\16©Ê\0174åN>\\×Áç!² 1™ÞÉ\23Y,üa;•\28Q.¸íg\16'\3î\19ª\29\13\16º…¸P’ç9ÀK€\11\18 EìvGÝŽú\29\13x \0260?áô„å\9—Èç5ÀžMg\4[½áU\"Ö-@\31`\8\0028sè00˜÷of‡€\31xú\28ÿ\30`\ +°\7íùú\23T„ÚçT\ +½\3µVq‘ð`N3ëb¾ó‰o\13-_r„àb¾Ø–\14¯5.\28•kÚ\28•/\26;/4±ðÜѲ¿Y1Ô¨)·ÿé¿ñOÿ/ø7\ +þ/zÒÊÖ°\30¯|f>'>\11Ÿø\"^b‚pè—i9\18.ü*}ü8äêUW’7>{>±.nêÂj¤+ó9Á\11Ç-¹á ÖÔáìD¢Oj.Ô\ +O\31ž<@Õ\2Og _Aâ­<>>@/\1¾Mê\3~Àcý)r¦ïŠ†HšMgÓbº9Á•w\27õÅW‘«ÂÑÖàìE’H\22q\25ÖYN'\9iÇÜ\\!éÀð+’‚JßÜðž 7’'\19eH[\0=\26Fì=pR°ÿxc]Éz²¼°ýhÈK\ +%ø7úörðL ’\14õÇ;\29\11ÝWOª›…÷Ô°¢)îžCIË´Fõƒþ\27\18–tà!¤%Õ3îQô%TÏR#\3\17.Õ¿$ÖÑ\24\6\ +ý‚&ýQÚéàèßøò¥¡ xnâ$§NV\28'kxË]'¥Ñ\25Ž\11˜†œ†rò7Èp3@n\22\5ŒÅé}\28\13>•\18díN;b\21Ðb¢é«éUÛØ\16!\15Øë7\12\20äÍ”5\7kvnlï/â\21\0290Z \\\5øö+\27Ü\9jÙs“Î*\0115‰W§~}’43ÜC£pÈÖã5ì®\4¢Ù'Õ‰¬,ã™Ï‡rÓ\13Fƒá\0¢Û¨Å<ƒÛŸNŠ\27Âç—=~©/äu\17e^#;ÒÈ*\26YA#\127ÚÈ\14\2%D êæ#ö\30•Lô¼ŸøÒkæè;\7ÈzËßw\2\13™@'w\2}€\28`2`\15æÈÊe\7Ê7\29_F…²\14-\8n\15ˆ_å}h…´xbS„ÒlœÖ[+ožm‡ ¸Aq\17;—+\1ˈ‹\30“A~\13Ð\6Ð˘¯jþýQÞ£Þ\28ýa£>\12ã)³üÌjQ\24\8~åcó\11úѱ4‚Ì8ŸÅo>œ :ñ‘\02715îYµå\13\20\22^ÿhÐÂjì¼£ð£¼`ŽUY#á>Q“[Ü› sÊ\16l¨q¶=\15Ï\17\20„\14FàϦ\ +\2ì.\27³ºÈ\3\6O¨’@\9Ð:ÜAH}¢øæ– 9„}Y0\13\ +ƒÊRù¨£TP4Ò\3uNüM«H”6ïj\0òΞ\16H\29ž\17©°þ/ÇžÄ4©ìÚ.Ú¯°þ\28™ÿŒ8JeA>àιé`¡±šP† í•>d\24\0218ºì£¾˜×*Á9Q=P\14\14‚å‘\21À®UȽˆ\23$½ªB|\31=ùÑ:@\5¸,i~‹qP\9\30Vÿnʶ\3l8ñ©cÌ\6MM‡¬`Êã\8û‚£Ã7Z”¥w«óHˆ÷;*ñ\0180í\0Ôw\8¯\127û÷Oÿü„%¢D\"íÍ4Gˆ¾¦ú̦Ïwpêù†i\1272й*Ða\12¯Ï(*Nªˆ®3ÃzÞ\9ö¢>4•ðw\14ù\7˜‡âX#AïF!ÎR'“\22ßf\12\13Ý\7Ëog‹A H—5pãnì`á죑¿\ +ý\1n˜Q0PuÉFC¶¡\28žÇ¾ÝOJ‘c3\5\22ý)w]Z\16dG¹‡Çl\7“\0ç\2tR,¤¯»ë\30G^›\16orB\\\18\0094¼\12y»»s*¬8×f‰÷ßfÅz=òst\17Í 1¢|\ +ð\18 Æt9\15\15\3^þ£ÄßÌxòÇOþ\19?Õ†*²\20(©¾¦F•:A\28)ï§B60”-¡-zw„ÏxW-z?4\ +s\21Y\18×y\23¯Î¶3Éê±ÐX–\0u\31À±67+\\\8´áµ§±QAÈî;oûKo½a×ï~ïJ;.%aó±\7\127\30\13\31»×WÄR5ê\28C\31[šÝkÞ½øOK§„xÆa¡g\12\13E?\127Ñ¿XÈ‚fào\\ü{\22÷©å¤&±ð4œ™,gþÚ] :[ :‡@4=\\?KôÁEJŸÙ˜\22\24]c¬Ü\7(\17ý茛ٯ±‡†Du¦ys”émÏFŒB’²ÖÅ”}õC\22Ý\5¿-˜í\0244:¶šñ²¿ò8:àËô[mcUmì%þȯ<—çÆ…\2ÏY\2ÏùuxŒ\19.\26_LJË\22u&E-SÆÙîÎÞ烈¢õ÷¿Ö!~7ç\9qPž¤…m@H/ì\ +T/0½ü\12p\ +p\14Ð\5¸\5è\3L\1æ\0%ÀÂ,xç\2:™ÖðVŠ:E{Ó;£Ó(4Aãb\15­éÍ…ÇŸh©fHn\29á\1òœ\3Ô߉\127H›\7J[‡I­¬ë\"‰â2ñþ¦ÕýM\11\22ƒš\30tõ)9ƒËB‚êwׇ̻cÝ壟FS\9\24ƒ\5Š\18œôuBK 7'0ñrI\16\13 µB\\\23B§\21’gÜNÝùoÇô·£<\29Ìew¼=q÷ó/Ü>ñØÿ\29‹G,4ñ\16\31^ö:)ºÝ\19î\"^”¶\12\1R€=­½H›[”qDØ\ +„Òkvh¹-€[\0283€4—ú\15ÛPm˜©A\17U…\29æLÊSÎ6N9[l\0298ºVtÄ&´õ1§u+\ +l=\3J×ë\15à\6L\26\21ñë=‡Ë‡3š®Æ^X'oÓ êåf»Ï\"­L–(6\22š-\15&[H\28Vò«];\17•\7Òë¹å¡\3\\íÉ´1e’^sÂC]Š§™-72Õ/u$¼Íjµ™iu¬Ùú\14«¥˜\25Jùn2‰,\"¨*\30d¶0G€g\0138´Cƒ¯…NH¸h­\21iÊ\8Ü…í8ôK è\"Úš²GPŽŒtS\19…d\24jéÃü\3<\16:³\0240B\\C>õ×¥{_À­ç3è‘\5öÕH\11Y\31JöV¿;ß½Ï,0ù#(éyé®n\ +úóÏ+}°ün\15ó•O\22Y#‡Ç‰‡¾ê¡z¨éÂÊm¦y\18ý.Ý´:ðkÅãN÷÷Åԣɷ¾ì;‚õ\25¸4!q‹±\0Œß>\17!—1ÛàjæY 5`\24\21#m6GxcS¢fº9@1à¹bmz\23\17§ð(~UÚ\15óõ\0045· \3÷¾»î‹|¥Êf^“\19b1E€íÍU]Ù\0‡\20×F|KWØ|¯Þ_G^vW€3Œ\3N¹©>#K}F–úŒLõ\25x^øl\20þFòŠp*…«]I®ŠñNrãsPªƒ“M|*m\20Q\0265²4jdiÔ\0™øä.:4j »¢ìŠ\18@›èN¿\30Jæ›dá³~k'5ÞXÛtG~\13G±Žj \6‚+SǸ©¾\13·Þ \23®BÏ\6\ +yQ^ÚHt¯7\5¶d\127­CÆæ×Z}s‹\13+/Â#\"÷N¤\27ß û<´gdhN>“ðÔ4‰Æ€Öõh\ +R¬‘yÊ-c\29‰ b\18>è#\7)ÕÈRª\\¡Ü.C=)Cq\11¯`'ö]_ÄHoFîB\12\24\\Ud›ªÃÙhŠ9\16Z.\0241Ëð\4\2ZÖ\23h§\0Wt¶²Úܹ§Ëïá·þ|÷Ê\\K\21ŒƒB¼ÝÅE\16ëdÌ/\"W‘›H\31‘r€1À\20  *—U \23‘Fä.R\28Xô×þ|ð'×%\20·mÝ\29\12¾¨÷;\27\0175gdÝNw“fBä5©¥LW\17—\13\"€Ùú22Ôö³¦g\30Ä‚%\29\7A÷/÷*¤i0˜\27\1/›\1»»é¬—GŒ.\4ª¿™k(*ÌøôËj¾Ü¢toBæƒÉ»%_C¿ËóÀ¼š‰u‡\29Pç&Àe{¦©XÄQ\31®…¸ù%NR Yi|ªÈµ]\31êÂ\"aÈ Öè‘ÆSóÈÃú±½òyã3ó9ò9óYø¬u6\30ÉöKS«õ)‘\5)°™\8p\\\28Šk\0048~Q}Ï]>\28aFi’Ï£¯°ë(¡ƒë\26\3l›Ò\\\3‚*\26\15Tv„Ø3žì¢R`Ø_¦ßJ…\26t†l-5\0qi\12#\26ˆ\15\21‚µŒ\9ëvè’§\24ùG°Ê‚@q\8]ó¦Ð\6Nîh¤F\6„EÌì\18ã!3í±WNwÄǬÇzš”8o6\11Ái“»±\127ÇJ–l\20„­FQö:i`Éc\28-Ó‚:‘8;(òšU7\18R\23}5íP\20H\28á±ñØj”®‚cÖ°<Ê0yöÖº:ù5N_³`ñGu+\7NðuUJN÷\15Ë:|ÀâL–º•L}*?E}gbÍ*Ù\ +Uf#ŠlPO\ +\9S¢š\20\4'Œà ÜM\0t½=Ä¿_\17¯xÀñt\22\29™\25\21ªP­Âä4¨“Ç>d*!ê\"ì8DàþÞ\28€9Ù\"J\22O•ŠoÑ…\15‹\12|ÌZ\12Á\0140í?È\1Dý+wS-Ò>8¦Ó\3ëw€í(ÿ\13êò\3qå÷Q,\30ôAÑ}Ä.N†‹Ë\3Û\"\20¹œD\24öé½ØŸÏºì(¤º\18e\8˜LvØ?=‡\29RÜÇðù–¯8¨4Fy}ˆ>ì„XœÔ¾ä}æŽi»z$®ñ¡\16Æ÷'ŸXdà¬&ܳoÇ>u¯ñ)Ù¡ÏcdŒOð´_ŸÜY3T7.ŸËO\6/gåÏ©Oªe²#LN~ƒ×\29\24KàA$ƒèØö¿ÿqÅ÷ßÿŽ/'‘«\8/Õ…x\\$¿o\0^%X!M¥»®1¦ÖÓk\20\16\22„üw¤ºž’—6.dÃ\19}‰8ºõ\16ËÜõ\9YlrÔýÜ‘®Eƒ»Né´¦û«NN;>2ωæýÝOû _p™%g1ÕwKß°A|Edº1`>¸’ã:\12¨™í#N)@p.Õê™1|B!Ï,ç†F\\AQ<\25άà!\127MøwÜT€r€¹7<èÕ\8|§â\24FŽ7’QŽú\17÷1KßAEZ§ß¹ü£\13ÙìÕ\15íÇfZ¥×\7\8\26êª:L¹4&j\2Ô·¦æµ®Ò~ÓF¦ê\27,.Íô\7\ +6äY²$³.'\8xt'VyhÇaã\3`â\0\24\20¨7gpzµ<µB3^žc6ï÷\11³¬†‰Ö9gfÞqžôÓ´\19Å\23ÏÜå̲-B€\11h,2ŠÌ\"\11³â\24FŽ#\22\5K¼Á\0J×\9ÛúËz²€òV´7=º°X2ÌûêuæÔÀ-È\5.œþ€ð°zöÎ\5Üöø#’¿Í3å+Ài?*I61\1œKS»\16Š%ÞÛY²\21óÁÕ82\7ñÙÏ–«˜ÍÃt4¯=´\13ñ§qo\11}C\19\19ö¯Ã*gæB5®{=\23ƒŒ\"¼\127óÌ,Y`$Xxâ\0097N& ”\8¥!ëšàcRÃaÓŸ¥\30‚bì›PR”yŸÜG\16þÖ¯M\2ÁÖÏ}Æ\20-î\29–w³p\27Ãy\21T\11»É9¦Ä\24ÜžÍÛ£8\21ŠáVPŸàôáηP\31G.2Œ\14p92Xw†T‚Cîé\"Õ\24E\6¸E1ƒ\201ðd'h\17%«~±ú-$¥[¿\18·~\0¯tg´ñB\17ñŸL\27JŸÕÅU\9Å·~ =bhš.qÛW\1–@x5^NÜiÁ\31?¯X5E¡D¯Ü\20¼\18\24\21b\0307ê\11Â)rÙ/ð$~½Úë¡ï„8×G ÅŬ_Ø\5\24\2L\1æ\0‹’ÚT\6\\€\21N`…=¸ŽEgtÊBÓft_\16‘\22Ç~ȺËÉô\12ÿŒ›ŒHq‚bÓ?B½©\4\12\ +íó\12B\\ŽÈê\ +*ag½düÉ\20Gá\4Ë\30¤:7w$ßOþ¥Üa’>DóÅQpRQâ®P\26¡\17àQ^i&Ž“~}r\0223\22¸â\26ów,l+\13/ü\13Ø¢¥\30¤\4\3R6;\25îÒ\11Ál°\7ösÄ\26š\29é̦„¼\2AQº\3Ž’\20\12<ÂwÊ\28ñ|²ËÒž‰Å“Š™\13¬$Œ%)®=kˆßaÙ¡DG\2ç¿ðô\23vñ–«ioJÿW6®W6+HJ”#\5ïJÇq\6„Ñ`dŒV‚p\15Y:­ëë\26\22SqÑ\0Àó~\2Œè2£í—`PÑàÄ×s\31\30üüÒÕ•0Èñ~RÉ»èغ®,š\7H“iaI0‹²Ëjn(ºÐ$½ñ;ß#%ª\18\0228²Eôüô^\25â\ +³ŒýIÃÂ(+!„½ép9\5pÇÖ\22´Œ4Yƒ´ O\20nî8@9\25\2°\9Û¾#?Ëçh%®C‹÷l\5\27\31•öc\31ä|’VðV.Ü=ð\27?õ)\\¶\127\26q^|‡*Ëåuôò\25q‰Ã—b\9‘bq‹b!\11²\17²–îô½³i@M]¥ÖVg€]4µÓ½½\26|pØž8âÎlu3ÛÜÌ1\25Siá&©„\16\5ǦyãPÊU!¸\31/‹¨~'.ZË\3êPIUÄGfgçíkyä\3‡Ë‡ê«<$˜T∯VÁ—›\6Îø\ +Ä0ð\5X\23ÒnÆ(ߎú½W$è\21ôÈ\18Z\23”\0˵JΧ¬1^­\7\15©+¬(‹ê·¬<+À+ïrbÝMôÀqO\29$\15—Ú¨\30<ñ}\28y C\29\127ùq .jP\\Ä=¸F ®Ë\30\16\29\18Õ;ÈPŠø²«H\ +÷²TìPú_\26õò_\26õ€QnhÔC|\25—ÊÔ¤—\31^\8±ºÏ¤\0188\7n_lA””+2éÌ#)\"\27\9kí]â!k÷ùu\3¯£ÔÔ\29éÆ^ûkg€ûòÖ‹ÊêDϦWÓl\26ñt‹òu4[4ÕØ‘\\EÄ’ú%ERZ—¥ªŽÎYN§\2uRîðYZ‡–\23\23ö\27\13¸\6¼êÝo¬ª³\13§tà\0œ\3¼8‰vm~>a÷„ý\19ÎOXvÈõ7/øçHôº‡váÕ\7\24\2ä\0.\29Ž kaÑí­g¯RiBø¦z\27¥Yxšóícj€N„<0\2}\0ÇP}™wî;tºüùÞpQ†)ÔÊñ²µáååt„¼û®\2/ÿ¥\2/0ï5\23­ÒÐÓ\22©%Y`ÚŽ±Þp|\21@lÑuÉ2J\21\22\5Z\23òò/\28°~\0318\25­µX+Ä=VrщրՋbzHAÝz²~ºU\ +¤V®“7z@ÑÜŠåòDg\11£\18\6»Ï\"𢘰ë\13g'r\19I\"£²iËIôÑ;‘/Ò\27í)\0È¿.ºyHþ“IûÈ\23\8ò܈Ð+S\ +j2†xí\1;}@\15»‰¤ì9\0gÓÎôf:š>T9äó\11Ô+•\ +n\1úgàd?¬¦W2\4ž\3\\\3Œ\1î\1f%€\27–Í\16¯Áçín$\15j*«9\20WT\"ÏÄzÊA]R\24\9©NÈ#­Tõuafä’F€3Ÿtý[£LÉm\4˸•— ˆ§C°•Ü† …gzëI\7\127ôYIü\3¡ýè`EX+·%¤—\28>—\"Js“¼Ñhå1\\L¡\3‡W\30PC¯äÈ\21íéÜý¶µÒ\24vOØ?¡“Ña–ýŠò\24QÞÑ3tñ»/¦éÕIM7{¸„Ó\29-\21Ì?å!%-«-]\0168Mn°Vñ;öÜx0\"\26_,n\14€WÓ«©³.þN\14ú|áÓ ’œMKDø6XÐL°Q[¹Q[\15/˜\26ÖËÔ@ÏåÊÖv@ƒÞ*¡ÈUÆI™AÓsø\7íDÿ¼é\5îèà3¼ˆ4¯¦is8†\25›E\0÷y+·y\ +\31Â#9†Îoy\5Æ™k•(\28Ï\28WY[Õ›“OµAgQH\27lR—Ì·?èd\29þšÙŠÈŠ·ýu¯ºùä\127ãQÿö€Þ\26Ò¤1J\28¼›¬ÃT*NäMWç›Îô7š\23ÑW}»QQÒ\ +T½ÿ{\9oä\15­sÇñç±æ5ò<¯\18œØŽÇSßáÉå]¥µŒT0OßYOÈ8àå\29¥yÜUï\18uƒh¢\0212Ê£f:žO‡ó‘äRÉñLÑÖê\28Ô÷\16p¯O^zÃõ€Ÿ\6±ê˳Šñ\\_«?\2´þÜ1\\\20š¦ë˜Dѯ\0096\18\ +4\ +Ô\0144ž;0©’°½\3¡á‰¢\0200n‹„;HèÀ›ü•H\1ì\26¤G•«.ªa~±.Ÿª\11‹å\0268ž\24¥®J«§\24rH™Å(õ)\0â¶!bÐÞœYÔ\0156úñ\28\23b@Ÿ\5¤¶Ùšg퉈÷\16!\0231¢|·\"“\8_€|Ýø\"SP£UµŠÖ\18B#*ž<ÿ\28!ŠÕÕo†DV/R“xé~ð¬½\29f&±\17¨”uWw>qÃ5\30^ê„ûZÝÁ“7¾ô¼x«ô€¼¢`’1(\2̳’“­i6h\11£Õu9ãœ\3Œ\1¦\0003¾ê>ñ› \0}\12íœ#ØàP]ò˜·@G#~&õg\"nG=\12\ +áï§\ +ÌD\0150D1j\29ÓàFÁ¨¨\18ÏB¯Ú»•ü·¢n¨I²®ÈÙ€ ‡\28/§:Ù\23u\20\28ß_nt^&9û^t÷ý\0082;.>÷\"!ïê[\24†ëRxÒ€Y݉ýƒ„/!Ò=ÒNàƒ\1s´¼‹»ÜÅr&d¹þgÃk\13\ +‡\11¾ú\0288ZÁ\9KZ\8ä¨qqW»èpÆ>wÓ:í»ÞB£¥ql\30+ÕlߘWw\127Çn+U†¤P%\11ïá`Ú]L{ûw³bæ\"Z\23öH®nc\13.ÂG*,¬ÏÚñ\24\7R\\¤ˆ3ò ˜\20sÌxÑöJ”µÉ»”‘jóÈ™~J?Ì¡žP#¸î\29/\15+•Gº`}:`\31ÌŠb\7¥æ;\17TñeÁ¡òHEux¢-WÔ€m}¤z²‘ú.ë\19•ÚÀÆ$ý\30\12\127,\24”ѵÿ¼½žDø\127+\29IúF¤Žzµ\19blÃÿá˜&\3LŒÁq‰\30˜\16‰X±T\11\4R\23Aã+¬‰_L{QÎ\13\0u9@Ê>ôÚÁÖª\"ð_\26Ed¤M/Vù«äáHÙ\21©0\2O´âW²¼Wâò\3 ø¯¶Ãm€Å\22âþÚƒ>øÖ„ëw|Ý+ÙV\27\2žòÒ,1¶¼í\26)rR\127UË;#¹{Öj+1=ƒðB§\4\25DŠÈÃÑêr˜àn7îßGén\7>¼=ÙÊš\16>½•‰\ +E†™‰ã\8[ðc(h\24¥Á49à\30\17Ø–¥Z¡\9p;í\8‹œˆ‰Ñžª\02309ÙïA¯}l€Ü)–6\ +.‘N‰¤Ë-@äZ†\0009\18-cxM\1\"ÃRô\29Xî¡4a¤B„±ÝUŽÂç¢w\27¸HQ\19¡\30a´\24,i²óŽ\8è¸-$ØD²\8Ö\ +msT) ö©7\22QW¶V\16­'5È\19\31rx¸ºÙä¤\6¡\9À^Ù†š-ÃÅ`F¬Wþ¼×ÿgê]r#Ç™®á­Ô\ +\12¤êÛPÚ)g*%Qz(Jo[»è%4ðO<ð a \7\5Ï\ +½¯Ÿç\18rM\20‡Á‹(Š—`\0122gWçºS­èüy]\29P¯3 ¾Ð†\17&\25B˜dû`²íƒI¶\15äÔ\31†Tså±XÒøw\31¬´\28¼d\18a’I„‰¶\16&ÚBÀMÑ\3r\4û\7çŽ\17k\5_X: É\"*+jÀ\12ôö©apšr\21\29LõÆ\7KB\29ë\21‚ùD3\6µ®çók':“ðJkƒ×\31ÇÕÖç\3Þ\14t?P\14Ä„—žO\22\8Dó‰–\14&\0273\16U³]]' qO6n0Ù–\1©¾âK\31ñ¥Úûµó[þ\5yãQ\29Ñ™´[žHÑÈ®o2³;Ýμ;•´\13ºˆv¤\11C1ÓÍÍâÛM²\12,\14`ùý@Iˆ}ÛM–N\2è¿1&\6Â\27Î\127‚°ÒÜ ‹„'\127ç\13Ça õßN<@B€\25T¥TÝM§êI\19\5x¾ò™øÜð¬\3Z-Ì®Ê pA–!U\11sÞéö\23ÐIÎ*oLÝÐBx‘\4NÁ›1\19߃–Û-Uú€ƒÃ$zì»f!\11\17¥\23ÒšX•ËCÊ6ü8à\4„­–Ó]÷\9…&ô‹µŽÜ\11\15\8ýÀy1ô@8-Æù\18ÏÁª\17G\0Ÿ26âˆóÅ›\24\24\28H{²ÑQ÷\26$zjŒH:…A‹õ‘ãIGŽùjd¯×€á\19È\12Í*ÑKü\3Ù_M½\18ôñVú®fnç\31“\15(O ÷\"\ +¥“¯oç\8¢\3ʃyè¿â€2ƒsLð\1e1v\7EGØŸTzÏ,\18öÜXîp\\øù‡î†ÇG±3¯Ž‹Ë˜:é`ÊRÃ\13¢½Hj\7§¦Y_Ï®¼º—#¡MþÎÕ»¾–ªA\3‡üý®¤lù&P\9¤7×PÉ´8\26«g¯¡ ’Ÿ¸ mŠsΓO<\127\25a˜\16x\14ð\18à\18à\26à\22¯È]°Æ\0Y/ÉÅUÐCÏ÷q§ð~\4sgþp oâ¨Þ؃#v/Û¡~ßÆÒyå¿{å›®üw´Î\17`\2Ÿš§©‡\29Ž\19ÝWçGc”NiO>­ˆ9BpD\2Xø;¡g’s–³Ä«Šê*'^½F®^CV¯!«çÊÖÔ×Æ÷L\"a¡—\5cQHÒ}´C6\27EØÉn‡›)\ +Œ\0005Š\0\"|ß»ò¡R)¦ÿkO\19‰8\21J\25µ:§gFäao\1Þ\9Ž ø‘=m†0%Èð<}þ\1÷0©\25\127\26Ð\9âšKŠá`kU«ò(\127ô\19Ï ’¡\30‚&Öÿ\17€:†T}\0056ãþB\14¦(tŠÛpoçÜ©|y7=kÀõó¡v‰½¶f¸#x \3\26Ãé\\\"üæðL\\£:·O½&M:×N\ +³òõ«qUûÄóì8%ë\14R\23\15‘º¸q¶cÁ(X#›tÚDÍee¥Ð8Σïj¹X\17\18}6½™ªöüŠþ\ +‹=\6nv¼å׬{€>@\ +\3ø…\5\2DÏ•IGêIÔ¥sê×Kn ·˜:°Š\13\2ÄOÜý5\13\26\127\6iÎ*ym~hyôÉôÙôÕôjz3íL尿éhšLgÓlŠ?:`+\29Éj.*\17èÆÌP\23„ÙîpeVÑ´*\25\19\0096\18\"Ô¬œC#*Z@\27\29\15Å}s¼‚,Ê\6\2³Á{ïДrec\0\7¡\4§!\0$zù>\"\13”æ€ëø\29l/cÇ3xö\4Ûà·³¼ GÓƒÝ2Á-‚²u\12qã—}û\0É~˜B\9¤ðÊ\1Ê‘Ôª@Ô\3‰Å*6xº3ð:?\18|Ã\11•«Ã‹Ë‡¦”z\0024\25Elìý\0øycð®ŸèD¯\2ò)òP\24(ª\1`\12¤††nšÞ_$t\\ΨUÃå<<™ba– –O%PKÈ}3íM\7Ód:™fÓbºF¿øÂøD\\$È\23\20}\"\20X¤ª\25´\13E6oz4\26\2 \20.´2*\6ÿ¼Ïm3ò&¢ä$l *\ +ã]E˜)\13:Ý\13:)Ó°\31£p†\0\29Ï@åŽ3£²»°@-»\12–]\6Y&3¸\26\12¬v”\6\6\15ýƒ–„ÌàÇY\4\24B\2 ãn\26I$SŒV\4‹ß!!`\8ÓÎòÜÂ\19EðzVuzU\25¿JáSêÕ+„ŸáU3„êìäÉÂy]\27E8Y)§G\3¶\5á‰\3ùÓЩ-UIA½\7g¯\3\13uíò¹*‡œÆ\"àˆ.QFU8lÉA”´š?3‰\25S‡Ê†=^y@‚\24:_I‡OýØP¶\3ÍÊ!‡¼Ü¹2¨˜£“\15<|óˆb¥ˆ„\14\31ŒRÄpßÁÿûB¯öÚšªF²\6¼\5Âÿ\27X\21\16dhÍg\21\29¤q\24T7ú@üÕ¼7à•`â+\0_AÔ\5¸\7H\1²\1»’ê\0¥Wä±èUÓ³H–÷¤Ž\0·\16ˆÂ´6Â+úªÐ«‹oU“\26X³~\14o¨A0‹Öò\127L®ûP«\14“æ£\3wÃû\23ºÁÏö'öˆ²\3ýĸ6Éð_rµýLªÚ0Ž`F6-Œ\3A²;Ðý@ýÒòŠ\19ÍŠŠùæ ÃEÓ`í\4¨øHMÇ\6Úf¨D¥Gë]\16É ¯ˆNæëÿ\21M­\4ÊY¨5\7ª©¡¸\127*ÒJÁ½ØÍñÍ\6¿\12ØW„%w#\5ï.\"£ˆ™»\2Á¾\ +%ÇÏZ¬\0027Óþਗ.\16ÈDQpEâÝÀÓQ\2\20[ðóÕ­vqß\14Šë\23‹‹\"î’ ÙD ì\30°­í¤MY8PFmë Ó\13¤W¶œ·óÈ\22þfe\8|4«\29xÜ\8\20ê\3\30JÃ^6ƒ[€NA߇`$‰K؈ÈLÃ\8dí¢Þ8k\31ÞXç+å$²V櫹_\0.\5 õ\21\8cã¡\2¨è2ŠZ\1\0Nõ\25« †ç\0212ü¨säÔh1˜A5\3PË\8]TN0a˜ÅÄ;yƘ¬;呧ŒñªžÃ\5\24XÍ\24}\6\1Éð\0AV\16ˆ×£ô\15#$‘‹þh”:\2N4½Jfê\2F)\"ê‹\"\13\9Ù£UØ#ïÇÃ[¤JéÅ+}d\19ú\7ÐU™ƒîfÔécRÄEcéLí-uVEéF»&jïgSnߟÆCM1Ææ}¢Ìú5Æå{‚ù\18 Õ\27h6=P\9Ï«Y\16ûÆo±~ã‘ê*Ï\0052Öhõ\7\25»Ý»ëƒì\6;ƒåä?e=ùèÅmRȲâÔb\30±JŽ“È@ìIF/yÏš¬ÒÍßøìÕÕQ«ÜXŸ\9\31÷Ì“ÿbdV¤ç¬D1a\26/\29Ta£T3£µ2 /­\1U2\21 >S´\25uô\26<ÖY\15|.„A }\7ÎeÃwpàÁI\15NypÂC/2¸˜Ûa4M¦“)[çÈMBŒQ\"¥ÕtsžY¢mRîÒUäMdg\22™d;“åz¡KB\3uBÙÑ8k\30CÍ4råàé@Ï\7º\29(\29(ë-ÿý­ÊÒfXë]\14ïr Í¯‚\0289r0\24%!ŒVh´NFâXš?Œ¼—@\20‘éãÂø‹iírì.±=>µvžª›ïRìŒR‘‘r\17;Rn‹‚Ü#\4µgBƒ£ûoêb\7{º­Jé5†Òk\12¥×\24\18¯\17SðÝ\5\0146\30žG¢9\7(í¢UÅRË\24².#/J—S\ +rŠ\27;å~Råþµa÷~Õ“ÿ‹Ñc‡\11òè¤QÚÊhTJZ‘\25-ÊVÚëÇR¤­ddåï8œÕHTñƒž\28œf`ÁI\\\5\22PÆ©8\27½¸3J\ +\7±/\13¯\ +©œ»èé:©\11G-€“ê{)ø‘Ž¦…\31ç‹\27˜\31®¾Ž IôD\19‘\2S¨Aw\127\2»sÞ\11Š¢„n\14Ÿ÷‘ål6Sxß)_Íe(\27˜=¾Ÿ+N#WœFLŸyBq\26'NÂ@{lʯ`ÐX\2\27³5Â4·ôe\127\15jg§úNu>Ãaƒvõ¡å0úHœ\24)‹Ž8T.¢\ +@õÖè5ªQb Et³“Ÿ,%\23)‚á\")8gv\1270›˜Åæ7ãÖ\15‰K#\5É\17[­ñdW„V=ÖzŠ\")í¢\30 4ªñ«Z쪊ÇÞ\27GÃépUX\29²)¦*\6NÎFî\18#Ñ€ûåoýÒ7~é\27¿ô‰_úÄ/}á¿úÀ\127é‚8‚¥\25ˆBÉÇB\1(\18%íXÁÊÍ\2\23ê}âžZ’N\4C#èÛôž”â½Ô_œ0š'î»%³·Óaz¾¤Á¦S\29ee?\9Äé\13ö\5ÜŸLWy@5C\ +õaÒZ\11.×f¡%-º$É8I2\14ˆRåÖü\ +lFmJÖc¥PV¥3\13YŠâJ²¯R?„Q¹1áj:\12Ó6#¦v¬»¤C ‰[ëíûlOjž’5O\14\30‰²™§\16OŒ 4&¯Üøú÷ýü\13'´9Ýe®ïÑÕêK0ã\19ÐSø¢ô÷#Æk\0Ž\20ÉÂŒ¯QG\4.ÿ$.ÿ$)µRl=ã­ÛèEÓ‹~!Ü»\3@é'€±Ã\8ý\13¯×&Á¬‡·Qã\3p]õ DÙXyC1Š\29·\1·|É¢I·®O¶O¤ÿ‚M½y˜G!_\"_ °M>¥‹4k¼»\20›ÌÈšz\24w\23\19ç\6*oÚå¦&2Y¨JÚ± [!I¯W³!6%é\ +’¤§$é‰T\7IŽI²\31ÈÝÎÑ4™Nòæ8˜¸zA÷\"RÌÝüî‘ïLôLz\21TLIkl \11C¨BP\26Iê™’¥dY#ñ\24`­$ï(fi½°\25çâº!íW\ +Á\"…\28‘d}Æ^ƒ}Ò\17k6ÈÏÁ¢ô¼bf\3¬\6C€1À\17=^!¹!…„À{zž”à\22m\16‡÷¦¸¦Ç9Úâ%[\31 ÉkcõŽÝ8¸Ä„»\0e’ý… ½Ø}\13>këµc!_µ½)ñæ/\16®éƒâ/âb\0121]`<ñõÜ ’:\23ªäD“5¢®.\29\127f'‰-©•ÐÜû3ùébç«©“KwÓdšM\23ÓâLÌJ^²G²È‘,r$‹\28I\"G²Ä‘:X¯V|J\17IR\4MŠ«\11ýp]ùp/‡E¾¤E>šÓfA\127\11\16´1]N(³Ä¦ªSÕ\2­Èdö/Ó/¦\12%XŠÍ\"O\127`ü[šÈ™há•}W4{.¼\21•8\20,SÒ†ßJfvØÚ\6CS¥E%7¹@ÞàŽ4ÔYšÜdg\15\16Œ½U½z<‹¨I<ÌÌ\"‹HSL7S\22ŸD‘Äu·¤\17\13\3ZmœØ‚\"+|g6}š1\7\25ä\16onE&\17Æ_ô„\28•¸v\\\29Øp\0‚LÀ(\26ú1iÆD‡Þ\0003OY&cS\17”Iæ¿XlÅã\19ìp=ËG\13¦xˆðÅŠ\2\23Ó›é`:*\1t„É\23/Ú§˜n¢\28\7ŠGâNF°4Ä\20‰4UÒ½š¡+h\16HÃ\0139£\1ÚŽí;!HG\29\0‚ÂXž|âs)\14ÓðÑ8\24°\127-²$\14±ö÷'zU\0251Z\2(>5jÉj8Yå‘Ç›\26÷ªÆ¼ªí@BL’\16\19\7‘6ÙԲ¯»)Kƒ'\ +¦äÕÐä“o\0027ÓÎôn:™fS%\0{Ö“m±˜á¾ùËMú+G€ˆ±\5C•øß,\2!\22¶&¸\1‡\6*¨k\3‚HK\23w‡a{“z‹–- ­Â\2ÅÜ8ÄL½\12­1\12\12\0192Õ›…*\25Îö\27Ô\127¿yQGæ\2\2pˆÁÍvg‡\\œÚû5@s?P¼ê}8X\26êÞb¢¬£çüÔ*(¿°·\127sM|³\4ò\22=õ›¶óâŠ2¥óᬨkzSeÓ¹Z–ù\27Uý:ƒš\29ç!J?Nf:ïkøSÆÝ-õ7;ô‹:%§k?³Jî\22ºv]ØC¤–¼Ç·[èh´°¯£M}\0J8k42}~%à(.:íS-€©ÁâR}&<Ñ]iîGCÏÓI'Uê'(\5ÁÂû³)šÈŒÎ|Ž¼³V{àF‡7Ó0ø4s¿\20£ þÍR;ãºÈÑTÂùüç\30Ù™²p€ˆ\0™x¦$<‡>mV…ª\4\11ì³ûª™ë\2s›Ï\16\31@ÙÑÎRQÍ\18ÉfïZY|H¨è=”\4\18< ¿˜ÝqÍê·p©\ +3£q\127v-š­C˜;*Péæ€^\1F¥™G¿ÄGu›a¡\20Ü]IïtqŒž9“ŸO~\27ÎOó¤ŒM\16,I‘³É\7á\21l¿™\20ñˆYA\17—-sŽ™üŒ£Õx9úʹþ¦w\18®àΧ\25{¦gˆ$0}û\"¢\\˜K̺gŽl³¶%`z®’QG9«£œÝOÎZâ˜5•ž5•ž5ižÕ\27c3\"~5iM\"ÇZAŽí‚Ù‹\0069:²\ +°Ÿ5kš\0126'È\25[\9/¦‹\" ¿«µ\3e\12öä\23aRœµ\11ƒ6¡îrövúmƒ³a°\18,­ÒxóÙ´5u4ͳæÇt£/ͼ\23\5îÙñ-ûe­\6Ñ‘âx$ù%è@~Qz\\\21È^\15àÎ%–—\23\0062­‘>\0058\7ˆ°h>\8‹éQŽ[\9åŹ5ýXi²'Ù\4ìaó÷öxòœ\127ϲsŒ\8ù˜\\“u$‘\3\20½š³m%ô+¼6ƒÅYáQ\31rÊ9’VM\23ó5À-À=@d¥Œ\1\"\3e\9P\2ð×>³œYšu‚Ì\21ï\ +ÎØÍ_p#\21œ7ÖE_Ã%P?5ÇÄ\25Œ\5Â\24ƒàû2÷ˆdïÌÈØnÑŠòn\8ø@çœy\16çZÛ_në\23ÿÌ­K™³áJ^\22;/¢l\1š\27“rþ’5;F\"\0249«³—?%µÌ!%k®œ5UÎ\18\12³&ÈYbaÖô8kvœÝÝfMŽ³&ÇYrböÜ8k˜Êœ\27gÍÁLW9Uy±‚€ëÙÙ^9òg‰“Ù\26{PŽAYª{R×\5N›³N\25äØ&’c¢œ-7dîQ…B&ëh1[_\0292“KÈ\29?Á\24ÀÙ•\14>‡öD~%ü¢jiþ›c¶kô\293ßì\9¯\0Êí°\18Ïxí\26\24qâ+_»3{€«òÕ=«\13yï(\1˦£J-c/x‡\21èÚq8ÀëÝ\1¯ç\0®>\29µ%•ÞÂG›F²7\8%ñhÊÅ\28í\0264ä>tíÙãaö<8ËÌ‹ÁK\0\23ˆö^\8¤\0009\0«@—nvû;0m®¹ñˆG\4‰F,ÖÅ®™®Îßlºø5\11k0wÅÒY¢\20iÖ\17Q©¬'k¿öá:ôÑÜ\3øç}ä`l\1°4ŸïçîlÊ¿óó®Ñªg#ÃfÏ]gi²Ž\9çX\24Î#¯½jÍb÷\14€~\29€½ˆÖ\14s¬\18f,×u¦›(o«'‚xš¹d£(<€ODɵ\2ܵ+¯‡¨º\30X\5­„:€œ:ªapæ¶á§L×Ô²w¸\11M*\17Ú˜Qp\30|ÎÚ\27!\22&fOß}\24‘Z9awý\14pÿ†é\27æoX¾áÆlhx¥xÎ\20çVÄ\1\27Ç…É\25¢63á‡\1Ã`mÃ-¡ÊÃì->›îl:ƒ¬ß+M\0049TdÖ¾láË28Sfftz‡ôÅt\16M\"YD\3÷C\3ãcsPü2/“dë&òif/=3™Y,7Ød\11PY²…\0‡M\8ŽÏ|b0ëLuæb\15˜=Ÿ«\8ŒL‰\"¿¾­@à®™RÖ•\5¤è™sã\3Â\21´\14«\30]öý\19kàbýa&¯¨'\9#ýôg‰x(¬¢¥\ +Y†ò\25ë‹£\12\4c­\18\ +\0Íš*Å1H\0l\30HŽ¹«\17\20K‡T«Œ\4”Ür9K•ÿbϬ±º(Í\18+\11DÚ aÈ\14ñÀ9\2CË*¤\28(ÀÚÿ\0081=K\ +°j¦ús\14“m‰\12\24\21\9–³#=Àá—\2L\1ÔŒ­³ÉÖÙdël2,L0{º‘N!ùó\ +/‰an߇6\0ßm«å\6Û˜Y(\23Ê\6÷xʇZœ,£Û\6E¯úýaÆÔû\11&‰àâ9”fÝ\0049‚\21GÔ|›è‘\2,\1\26}*Œö3³Üºže|yתKÖô$kë)©ÙE@mcϱ=\31ÛØslcϱ{]~SÀ¢6ü+¤Ê_’Ó9ÕÉÖãdëq²Ô8YZ\28rÕô¿\28G¢\13gD™š\28ØÌâçl4\25Q¥„Õkã\20“\23C± 6µ‰ÿ\9¦t¶³Ša—.¦‚–\1w˜Æ¨ÓÁ\19ħœ.¶ós\0Î\8\22/%ê\30—iážj¸8õ]4½!}s¨:¤-šÌ,ºS\1´)ö^I4YÑí$W\4 IR1PÑé\1“¤\19ÌŠ`Ð_bÎ\1F\23Œ®7À\"%cc2±x\5aÑ4á,DUÜâ¹Á\18SƒÅ3‚Å¢ôb]•n«™¼ì\5—¾»U¾ÛEÌBò\23Ÿ*IËŸK\8–‹¥È%Î(-Ç\"Êa4{Z®<͈»L.òÁÁÕ¥Sápub‘\14b±µx\17a±Ä³„ÒaÑ\"Ââ5„E\2Ð\"\1h\9ùg9IÏʾ÷nÚä\0üi=¿¿‡tÞL‹Jx¢h¶L\\ØX°‡âDZäÄNåJ6¦ÃaPw©Ô_\2\27õ(vl\21X<\2.jÈ‹[ïâQN\23«Èņí´iY°szÁ5(ðZö¨0¾Æd\18äï\1\11߶Àö\ +bý\\`#ºŽü¼–\16É\20lY_xó+\9†Â\5\23À\"\16{¢ÅL-\1œLYÔŸ0Ø›ZʪŸºª\20W\23⪪¡^\8…J\0131íѹz|%Óf3@9ýÜÏÏ”>–\29ë\0<ÑÃß*3Y¢HzgK„S-*Äe÷\8\6oOUF\2ܶ´ìQÇ­Ulxë\24ccFæòã­¢WÓ»\19rÞce‘H¹v¶Xé¸ì®w²{4Á¬{C'õ\\u\\îNT\1äÅœ¬‚ è‡\24R³òÒ\28ò±¤ƒ]Cy\21×T\6bÍ“”\21äI1\13ˆï'\29\"‰V)`T„~w\9ê\30\8£I­e_UÊ0£É,}ɉŸŠ¬a‰¡¨7-îLËùÚàIb%=½¨6*ê>‹­\28\20éÙuC\13ÉN™o‘*µDßYÜw–omLùÞ°YB\29cÞ»\2¢\28xI̤¥¢\16Cj ›Ä­-ÜÊ\4€\25–®q!\29Ãc„\6Ù›¬}щÁlÊ*_¾w–ùÂ’‰w‡LE\5qkô\21 x\17Håéär‰\25h\1Í¢:ÅW4Õ¬\1¹§µ©h>‹t\"I¤(p\20F§\18Ô¦+Ý÷AÂâèù³zç\7¿L5õÐ\9 \\d8£„Œ¢EÜ'!$^d.£Ø\6\ +ïó@£\19È\2­\8?Õ6\ +çI ÒÈ\22íåo\0f:›\25\3µÀbÐ\5GÕ²pòñÎYKÑ-ƒLˆ=&A\4â­OÄظï[A¦âîØâ\20ïñ˜x\7ž8nˆë?‹ÜEz‘A\4–¿IQ–Ï”mè¦õb\2‡Ïü»Ï0Õ€ز\11°*Bû\"ò*r\21¹‰èÍ­RÂ\18†(,¢\17,¢£È$’EŠÈ&‚\18ÄoTú>¢SŒN1:Åè\16ôôŒqHT µ\20\29XºÏþ5\4+\27ÜŸ½\25(Ñçõrn:–ÎZeP~=o>b1cíénÚ£@^NnY/œ\ +È'›.‹I´ö/\4uÂë\8›Óª’\1RÇš×?¢­\8¾î\5;z\16¿\27;ÑtU,\30;\21b&Õ´«sræ'ÖÞ\23\9ú\0003‹û…µÜ+j´©£á†^­ˆþ3¬íÈèN1ûCt²ó!ª/¬\19J¥ëþƒg¸ñÚâü¬\23e§v®¢ô®33\17}ôŽ¸ðè\29©{ÔúGïb¨×¼py¹Çðdê\24\3¿ª‚QnŒ\31ðÏççÞ(’`\7táš\0£üþç5À5À\16`\14\19cE\7bû„`=\15\"ù‡/æ¾\4¸ÒC*H±†›XÃ=\24C€#Èä ,à\11\23\6í³‰Á:¥ÅNRäñÒª‹¾hK\6\25\11D\21ge3Øý=ïM|ë{|\"\21)\1w¦õšI®Îd\23ŸF‰L(±4¡7‘[ÕS\22\15é±(©î(î\15%Ú«Øz»Øq\\&~Ú„\14ŸNý»i\16q\17M~—ª:w@ÿÅ_\18vKù£>õÃ\27[N$ï±\4§\4P™âd\20¨\26‹Í)â®54«øC>ÓÕ\0\11'ôã\0w\28c\18aEmyp»Òñ¹å‚nOæ¨÷Ž~︒$qgsg¥<+½,ÏlÏÌñ\\çä@\23y/ö^‚­r(ò-J\17G×@7q7ÇÙ\28ggŽO\26÷ß›çVô\0264<®¸\12€èÖ\ +Ü\22ùô\14ÚGÐ^\18ûŠÓVž\4ŽD’½æ\8;Kt –@ôâ ¹\13\16ÉPTóñ5…-m\0õünöšÖÐ]z¹7¿x‹Ä¶.\24ÌÀëùIä,\"¡äU\18ÍëùEä\"ÒŠ¼Š\\En\"½È 2‰Ì\"Y\4\5øzv+{…€ãw:'›\8¾äµ¥}\3#ÖÏæõ$\25å•Ws‘º€_¹6#à ,XL0šó³éÅ”À«d\8PTA™\6\21}ˆr„}T\2“>V½Ú«zµ×Iÿì\21F\12EGœÖ9àý\27F\4•Ä´ˆ\20åuÚ”\19èL\4:SÖ\22€)\24\15EáØüª.êÕ=Ôkt?¯º˜â÷çõÌ1žÖ.+ÁÕJPûNêö¯?ÙW\\k©\12\12¨.þªÂ¹ªF_u2®fàZÁ+=¸îW\1l‹?\27¨W¼â*«\0ˆU_Ñ!W×7\0159Uô\127„õ2¸bíQŽ¶9+06Éýþ¼±ß£\1\24D’È\"‚\4«\28¶Ÿ9ÀÁHáNa\4ž“‚NY„ÉÕTOü–\14ÌΉwêZ¹¥$\9„ÿ4\26¨ªu\30à:t\25 WǼâÄŠA\23à\30 \15\2ä\0%ÀFÐOéY/¨è~ t )‚å\0LahÙš:Š%ÚÏÂ\24#‡AZì‡{œÅ†fQ´3íÏ\6‹©2–ô§­{4`cë4³ÁB\12ÉÕ)ëEÎ{Òû’cè\27g\23!ö¾\ +¨ÛéTÑ)yÐY”§¢\9C×llÄÚ BªÉ'ûÔ˜(~¸ÃüPjôø\20ð|ï#ºÃ\15vƒ\2\0287°èÓšF½\11†ƒî\12z§Ê¯\1èi\26‚¨Éb\ +1;\4;®»v\9\26\7ëó)à\26 »\127¿s›^¥-oîüG©®,½ž\13P\22%\5îìW`<’ßmó‘ô\26úIqxÝ\17€*D¥YTÿ¦Ò\23ÓETã”.¿$uøìL\20ó9\26÷1{•©Hzl\"»rD%\26üµ³À<ý9ÙˆL\7b©ô–ÎaßqgLZ\12 ÏhñYn¾¹Å&9åÆÒIâ\31…«»:†E\12… ÌÖ[®èµÅŽŒ\"·ÆÖ^£PßÆ—@c×P‚\29Mq{\30Áü\20à9\0“d\23Ü\30ñf\127HE\7+\5ÀI`Ã\28 \0048òãM\26“zn‚78R·\0Ñ{vakü\11¸\15\4àʲkn**Ìé“„Sµ¢SgÓwNºÃ¨Ðwœ™ô\\¨eò]ü¡N3ѾS5ä\21jpŽrªoêyW\26iëøê6*C\5ÚMæ/}\0\7P‡Ñwvê[:©ªz.jà«\6¾Ð¦\18\27£Ï@ªfÓKoª–2©\24§›ˆs\14S\0\6¯¦jG“¿aJæ«\31¬t1-¦o¦Êùä·9\21ue¶ÅØ8j~6uñXÎÔ™SßÒÚ¨\19w›…âWtuB¼q\12\21ãó¢êð\25uó3»¦|ªÖcÕ‘!\30\22sû¸9]0;GQ¸Ù$«àý]\0137ò6f½âñ\26Bñº\8þ!¯)™.G¬O²N¬\ +=„JæsUË^U\2«Š“úË~5`cýXwOXÉÔ‡~©P±†)ºè‹¾TFضÈnupG;@;›\0044…ÆLkÖ‹À›\2xÚ;œB\27;X•ñm¬€%﨡§ ­?Ž=ƒV\17*h5!ãÁõ6\0ÿ\7ÌÝL“ébÊì¶\26U‡¶P\11\ +7\4kæÛóˆÁv.ŒÔ]\12UHì'ƒQT\9óˆ\26s÷Á7ˆ\15)-\5\5Ò\23Ópˆ\1,qâµÓ˜õz—¬&\19`AÃ`\22 ~öņ\0009À\18 Q9Ïl÷Ã\26ïô\20~Xµé•Yþ*¦(‚áͺÿ\ +®§+AòÚT“\31mĹV…ñ|MòêMÕ;Œ’QG•#Ö±IÚÔ˜q3íéAï¨\11é^D9zá…ÔEËø\17=þ\"‘ÖkÔX*†tÉ#\21\8ì\12òÌ’\24OV\14Ô9zzR€÷‹é ´Þ)óÛòŠ|4’ŒÇH2Òº°A\31 \5Ð 3ò°´ä®±;¿œH\27\ +™´2ò,ÆÕn\24•Ã¢áI5ºSD\4lX\2*¯]$«\1‚ÆI”¾\20N\8ÉþvœôÙš×ÓM¾\19ç=\4öOA;ÓÉT嫹Å8ù½ÓâÔ¯I?jÒOîj´ÊjT§8º\ +ŽV\13ÿý«yqÒ¬;q¾šÎÚ¾/¤õ‹¤³;\21È\\…\1'‘D¬¸DS€UIΊ>s\18b\5\9f,fš]\23³U,Ö^ø±\9&\25QCˆSd\20M'ñ¬\30\3Z¶\19šä§YW’®5I–KPjÚXŸ~{ÒXl÷Ú~œÝúȾ9—Ã}1\12\5àþz§rL®SéPy&׆D#ö2\9دl9‰:xž«æ›\30Éù\127d3\24\17×\01567røï|\4–!¾X\19xí=H+¢õ\19\30¬üç@’÷uë*è$²š‹U\19Š\13\23çü\"åÜlòú¢¬Y\11¬$ƒ¼‡Ž„ÿm’RyÒP:i*4Iû3iíî¤Yѧ\23Þ>½>\9ЙN¢Ù\1²ùK\4Ü\0140\5ã“)sÅìÁK¶\0088p€\6Ã:²\16\29`$'¼¤²ª}8\25'úcG\\kʘ\0]0\30\1¾\4–\8²H_2Çä\8ˆkF8®É¡‰~ìQfîB\18‡\11«³ÆéDè:=¿,\24È\21¬}5eM›½\0\0åÊ…nLånô‚.–ÂfÞwc°\24¬\ +LE:¨þË<5š Ͳ½Æ°üt\29C\20ÉÏ8R€š|Í\30ïšÙ\ +êÙÃ\\MQ\26æ|:¿°¦f-àøœ!CtϦ\ +yn:)-Àº™v¦wÓÞ!û#\14\127<¼FÓd:›.¦ñVµÌ|¾;_÷ÉtgŠXn ?\11'Ç\18`>z\17oA \7\127fÖ5\8dÔÞƒEÈŠ”e‚GQ\26ÜÝ«×âž\\.@HU\3ñûz5ÀØ)ÏþöÇ\31z\21Á±ÐˆÕ\6ìy1œ\28A\18Ž\15\17’Óhí.óP‚Òç\20,«‹Ì´‹Ç81fç“5öÙ‹O9ÖžrT½|\ +Ý|ö¢Sö¢SæÍ•\14’\3,m\9¨ÕqCö—¹•ÆCgÈœ\25ÎvÍc-ÌT2óÇÃÔ\127%TÂq\4Ì\16\25Ï\6z{Hr•’€µ³ú½¨>©Ýð¤\19cs„O\7REpÅ9–S}ÆIh\25U¨ß¾+2ÅS\31cÒW|äà(\8—´xXˆY:E_û3O®Ë`1ãØ®ß\7ØÃk\21èµfc…+{…‹w®ª‰aÑ\7ºaÂN=cæ¾`æèsÑHQYNÁ‹`Dã–\0**É\20Ù\"o>\21õ4¹Ê¾\20 \0ü\11¥5ΡºÍº\31R€ÕËÝ\6ܪ\0‡r·F..uîä5R\22´Pì}û\2”†\22KÎ:>cp5 bjáf^Ò1ÙCÊŽE+ûÜI/¡j±\28²x±\31Œ\18éã/À.É$7÷ˆ\26¡¯Æ\18úé\26\28¶!o±\15žÆIÀ‡[<ÅyVh&¸ÔQ`\0145\28ηÂGgLáÖ'µCÐÑþj™\11OŠŠ¢œ–¶q€Ñ\9qç\0v’%yësÛY\25ñ*±vß‹6.\25©¤–C%µHœŠ“Gö›\3p±à؈¯ðü\13:”Ä–«Ô8\17YN^VZBo´h™™Ûèñg\23É\\\11\13\8ÑÙ·RZ,]¯òÓ˜¨íû\12¡Óâóâµgn××›¬áY~\127ôÎÍGº)\15¡ð5¤à±TQ‰9k ªª´×$u9ùý\28>\22š¶E²\18yj4É÷ËÔka¡\2M<\22܈Óà\16þb(_\12î\1FƒâÿÁ…\29äY;:šåhËK´àÅ-xq\11ƨ¤>D} ŽõNê\20¸YÓiwG”ÅøS\26KPíõð‘1ƒK€Wù\21ïM\"OŬQ`Áï\127Žx8ë\29¨\4Òþ E»¥*cšU\\’<\22ô?ï\6”5—\"5î‚ÍS «‹o¢‘$½Ä\"\22Ï\ +0îâ{ÖQ\11¾\\¯¿¢<¿¢\20Ý×àŽrŠÕË®êûs?³\"ìg*wÈãvS-NZ\17\0Ò\\µ®W\11ñbzuðÚ¶q):y½i„\29M“©4±\0ü\23{Ë1Õ*^°#\1)ù\1v\3S•Ü®Û\19ßÕÉìê\19ön<;¿’\21`w{ߧgÓ\23ÓWÓðw.¥õ\\$bS/öP½ß5{_vU¨Ý\\ý÷ÝR\2viy%£&ñË4ÊÖ\26´šÔ¿}€\28Ÿ_¡»¯Ÿ;/gîÑNŠ.Q¯¡ÊùúdŠ\127XdèZŒgÓÓÕ 3½›ö¦Ét2]LW½êîp®\6嬞 „@©S\21ôIâ§âW9BfÖ¨ºit:ì¢ÐØ3I75\0Eª\26\29V8\27]¯öBQ\23\30ðº\8A&Õ\26&ØS€Å`{\9Ài \"l'íÍ\3S5\3È_¢ùIS¼‰‰\28EM‰}n¥¥æ\22[£=].§c“C¡]4\\\\\16êó‹ûüÒEêžgkKµ8£iÒ×[f+¡Ž):m\21h\11´34÷\23•A[.ðE“þ1u\30E\23+(\11´MD/ýõÉiZÑ]¦M%¢‰w‘ê\24\31Dá«ü÷7õ)Å\19­âUîâ\5ù»6I5)(€XŠg¾ýkpyá\16\0\127\7 \20Ù%¤¨òß¿¸\14\28Í\0155wmž'\18Ö¢\21·\24J³²Þu·%Ù\13ç@«æµ«¦ï«Ê|•öwuK^¥Z„óStåü\27\\\1275w-­þŠ½(¿¼Ãö—vÛðä'ÃseôK\19\21|Nkªhñ}\21¤\0ÙA¤â'p\"zÅ—„\14ÐH¤DÜÍ>,É\19WcQLêº6éÍ6ð›6ånçþÉt1]E½\0´i“Ê&Åä¦ápC³\21íM\7SÔƒM-u\16Ì8¨ÈöU\29Ë™SÀͳ\29‚é`å\3-\7Š\23âã7L\6Èlâ`pâ³Á¢?£pê_\9;©ë#;ïwªô؆¹û\0k\3Ï»\24ý‹‚²yíÜ—©€œQïÖ÷ígV«]\23Ê\14†çd %Š8T!\4©càl ßÉ&í#\20H”ÅÉ¡?_L\27ù£ù=.¼nˆ|hCIÉh¼=\23\0†\"«×ON\11\30—ûs\26±V4ž–-\8ßñ\26™Æ}ß\"Í‹éEôjgo:˜&ÑY$‹8)´ì\7Ì\0?›^M\29ÛÍôêfªÍ}\15ßamÐs\15ZõÃ}\25àq'{kïá5Àí\28h\0120)<î\24\16'Rg-¸\30\1271®´fNÞ1ˆ<â.k#µ¢«¬&+˜›Óõ»ä^<{\1žZ\28ɧ~€ŒÅIwÛ9‚('\\m~xÏâC×NƒörN.¯i¶Ólçå3Jñ1t\1ئã‡Rš\25$·ØÆ\5÷¢ÿ¸(Å¢\4VýëÕ?•=3\19ùº\6X\2\28^[€]@Õîúó\29tÇ:†nð.3²\4l2½êZ/\21\26io:˜&ÓY4‹,æ\22ÑÕÎT•¬÷PÐ[M-\13ŠéNªJ×ëÌÄ×í\ +`ƒô£o\31˜s\17mMo¦£i2e_ØK+öðu½Lˆû¶*H'}\4§*\ +‘U-¿·r=t³*ãm”MÈq ¨-\12z½–VÃ\5xc¯}ùõPq{ä0\30Ã\31Êño¬÷ôQ«ÅUWë‹pŸþÀ}¼okU¦¯*â+_¤a¥§\5\12ÑIlý!î­zÄÞ*!5¾žçÅ\21(¹€<4i'•ÃL\1fƒ\28\28ç½Ó\31ÆépeúÃUàÕåC\1zý`l®ÂTj€\28^Ý“¾W†a†\5\1›@\5>¹fiI\18 gÓÅ\30~ÍgT½Ï\9ü~°Géc­ðÑÿ÷wížÝÛö\\œe„Y\25ÊÐi\8|‰:‡‹r°0yÛK\2£ø…«›Ð\26îÅN7\1Ž\30¼ýÕÔe«\22ßóL·\25Cü$Zl2j¾Ñ\22\8Eêù,èÕλi/:ˆÌæfÑÕÎT²6Á\16Œ1À\28 \7X\12J0”“g\6Àjç pÄ\22´·\7y\\\4Ún\6ìʇ\0162\0T'\16|ŠÐì†\1ThBò¼°‡ã­*\14¢þ@ìP\8‡û¾½“\19™ƒ³\24H†Õퟢ\14âœC²ðûÔç\14¶>IÔEz\26Hp±ç‘0\5„á\2³ÇäH\24#Pñ^¨ž\6gµÏOì|xðæ\17~-Žóð-W(\0304<á\0ˆòshÚŸ\\ŽypÇ–?†›·\6£)8kÄä‡Ñ&$\25¼FÁ[÷T1‡\24Ä}{¤zÀVÌ?\2\14OG\0vâ†ß)°K7·;˜÷\3¥\3\29É«5\0122¦e4~'ž4ŠÈ1\7Êß\25ÁÕS߸ÿ\3ïÇÛòw\14ów\14ó‘Ã|ä0\31\9äá@ßé\28¹Î¥ý†\127¤^øcšãšÔà\31ŸÆ¹Ãp\12\16ƒ\6\8µ9vÿÃõ-êçõM2×à)ÆÐÅ\31rç?\28?Л?¦ó\7tc¤‘\2Ì\1Žä—\0%\18.þ/j»ÖÏ\9¨{™(オOR‹Ö$\7\\÷'Ÿ®{\15®šò‹\31}”íÃ\29Ó#E-xä\0~MmBC\0vöÃ\12S.œ\31ycßc@6¨'\31¸\7ôÁË3ÕŒ\11§Uƒ®ˆ6\16'z‰â¾ªtjï\\Ψ)\21ŽMÕ­æJCi¤w;\23S÷š¿î¦ñw¿¢vªw³JìÁû,¿+\14ðãÀ[Ô­¯#•(ž¯¨H’\23\7Ë‹¸T’%Âk&1a\25ÞB\16\27ÞÔ/¼5¯*ïÑÝéhÑQ{3éL¦³h\22YI4\15\25-\"‚ÞL\7ÓÙ´É\1Š\1ò¨+\8_\2á‡Ï\26Uä•„Î\17˜ßQAË8¡_\24¹;Ñ\" ¹íxÌÆ+êœ\30ÿä\24Ê\3&Ìn\26^¿L¿XëF÷Á0.ªâ\28Û«»]ð$bÒ×ïã¡ËY\1\127ªXZµÃ14\13#\20ëm¤ïÙÔøêb÷Ò\6(F–Qucž9Ëš\29ãóÁ^2\28¬Xá¸ýéÓÿé`\22\0ßNê«ÆÐ3ŒG';~÷vãwo7F\0156jæ=ºk\26\30\9wË™6s\0õ.cô.¸çm\12?ìÍxøn·\ +â\28ðc”|9j:7ºïä\22\8®˜V\31uA£gz£ú Ñ?\28¿û\18ÀÿÁ\29Îèþc”$8ªã\0245¡\27ÝØG5ö1&tcLè\0R\ +´\5Ø\5Ô0“fiÉM.¹É%KtÉòLò¬-ñð;Ét\22Í\"‹H\17Y\29f\19Õ\11ÕN“ÛirûL¼j@Ôiº¡¦h¨éO;\25rñW&7àô¬u BoîƒxÄý[D7\17ξxe\22+p²Þ(ÁÒ7ˆÄºdÉ,]Ôš“\14­?}Ãô\13ó7”¦Œ÷c\00578,ù\4\3Á¢’v’u€):Ä\20M3¹#I­K\27æ@ÍÏ\6*ðÖE\12-ó#\29¢R²rM˳ý†\3±ækŸª\"RèH!_¤£q¥\18R´,p–=R*íJ íé@mÄÐTÒwMI9v8¾#òE¯¶jP㽺&\\]\11¯r_ß\\ã`¥…=\30XlF\0131\7-_,Å´;Wsœ\3z¤.ŠIòŠî“2ÃU«\27\\X\18S’{\20½F\ +)%iZšh?Õ\30®ð\31ñ\15>âs?TÓzÍŸ\8°“ó‘|ŒŒ\\£-\29¥Ž’µ½É»bÉP¡Ìú‘³Ršù&4Èúi½‹~q?°4…Ÿ\11#QøÈâ¹\\*×6Ø2ªtâN\23Po(\\N\20P\17\16)íUß½ª]­þ ‰KÞ+ýH\22—R¨_“gÐ)Ä&ß÷c”\15VTdõµ)dŸã_¼.vk¸Å\0057ÁqýzÓ¬\6·ØØÍÄÒ[³QX\4ˆN\25¼\20¼è5Õ¡\8~3—HˆÓIEz€šPš©Þ\14XÍ@ì@È™\30œ4r3õC½Ý¬•‚ÙYœÝ\9ÏîÚæ\22_ô9,¸ÙCû|\28Ózd\13NÙbY(P²\7©¬wdQÙïÊ\30œ²õ\14Y}D¶J\17t5ýËt'ÕÀ”e]Úà\26`\8\3,\1”*õ\16ùeñ—æCåPÑ!+d)\20\26\28?c9f\24ù°›\18N¦Ý_x¸µê²ˆÊh¹”Njñ.vÜ‚wWÕÂÅ\17'\29\12­¸—¢éP\14†f\0167:¼„Ç\28 ;IIzÜ_Ûj4óí\13\2\127™:sºJ9à5\16Žy\26œÉ@\29Töˆ¢Û\26ÀèâežÊ\2(áε;Æé1øÃ4DäÐjæÐjêf\4ƒ>@\9°Å‹]\20]Ž76K\0Ž)9äÒ\ +6e&äÒØ*üˆ«\14\2©j÷ª¿\24Ab-/ë°é#ûœ]õ\29ÔùdvXÃ\14Þ\"Ð\30~šñdvdÙH\18‡#ÄäœÌO¾{RS›\\–“Û\23ÇŸlÍUöO:\6ô:¿f{üýøym¨\31ÊôÝnb’žc\24‚Z•{XÄS›˜Û\19G“ìá\5”µ¤¸·Ð&%æ¶DõŤÂ%\24†Ö\15¬Â¤ÉTJg\12«wa–¾9ä¤ÊYü…«:²U\29Øê\14kõw¯î\\V÷-\26‡²\21¹2dîÏúº\4P_…\9|¬¤æ\24”rLÕsLÕó¡ÓÕîg”\25ŵ¼›EÎg‘¶7\29M‹(ÌH\24Ü\2l\2v'‘,\18ѾDÿûW”\5´ÿ?5Ç}±¾Ð\21­BŽŽ\4)@>\"|\28\17&\3U²\ +Ö`@ÜñnnÐÄëÀ‹F᥵ÎKÛ“\31KçÞsqÓ\\Üb\22W×ÅõtážL~墭˜\15Ú´\30Ÿƒ…d¹×\21]þ\"{ÁL8~\0027zÂow?\12eÿ\"F'!›¦—\13Xçd\14™}OÑ@Y4 \22\15ˆÞ\15\3:Š,æ\22RwÅ\19±â«¸XJŒqåäùzñt¤p‡\24\8½[Yí\20bIV°:üfŠO(l9\16´²}@þ¸ò={/¡ˆ,1Ù(ßkì…\27e\30ÚÞ÷(šß\23‹á±=ïÁ]y<ÝÍ\9G±.Z,¿dR)MJnRüɯó\\¼Š“\23þG¢Ë\18û\31 Lòƒ›\15æ—#3µÿþV\1iÿ\30¨ËAòo9-/\"Ì*ËóEˆójr\22\3ÿ¥bQ¤\20Þ“G\16s5\4[õWU\8ʹÅý‰ùV°µMRe§&^Ó\11©Z¸Yí±©bmR‡\29Âå\22ÂÉvh¢¶özR‡¿yV\2¿É³Ü%ÂÖŠr\13–»ÖÍë\2âý\"*Ψx±FUû™'å†R릛£* ¦ˆ5Ð0\31¢éÆžù| \14¡‚GPž\16\15ÿïHZòÙ:mW«yÀɲêv—±ÁCÍɤұFgãh\11\0Ý°kò¦_³«dwÿêݲínÅËî6¼»\13ï¼ÀV4™Î¦YÔ©®\";‰%X[\19\16c\0120\7È\1˜Ä¡WÙ)›ú\"¤ç\0Ìe,\2sÕoß\15Ãn\ +'5ãîeõý{_Çîndçíb¢Ê{;Çë‚*aïð%.Z»\"Ó[‰ö\16N÷PIîq\16ì\17&‹ýõïZ¬Òþ»–Š¢ý{‰ˆ\1Æ\0Åу1Fz3w.\9)9Á1 »ù]GƒËá?EΤ·ÜmWÊèr Ûñ\5”‚q\21\ +…߃_\27ùÐ~;Ô(öoÝ“ðQ\"ÙUGb\30Ú_øØ-ßî–o÷JâßYÐ\21z\15äß©s`DZÍßCþݽ%\6@{bö\16€\1æ\0\17+±‘\0199¯\18|w™wŠ*ƒXû§ß§8Ë‘‘%^»ÄK$€\1ùã?\\Ç©xÙCq²K\14Ý-‡î\26\ +v‰¡»†‚‡c\30¼Öàf@Áf\15\19g\18¢\13a¸\15\30£¹ëßÑõ+L­µO\1z\3ï\19Ø¥\22Ù­öÝ\127rÏÙþ“ǘA[;ßE•ëŸ0ÈHçlï_¢úöâ÷®úâÕýÇj®š\30vN\27Ä7}E•üŠ>ÄÚä=„È=ÔÊÞ»‰—â\28Æ\127\127?ýøïïÚJðñ\23“ú\9•^´\13\\\8\127‰~“ƒ|Š®$WE¼*\"ì2“¼\\aõ‚î»BÝ\29ª—³\15§“\28È–kºO­¨Þ<Öv\3š\20;‰›\28™š\ +€\25ÞÍ\25&Íé\12öd§‚ÏÊ~VZÙ²²Ÿ\21Ö\31^ä*ü¦f\29D”zý1•¬´áÝ“³)Å¿”ÒŽ¸'¦.+Ö¤×s\23 \15px\21ƒˆ„Ö-\16‘¦ðøeð&\ +Ó×\4,H\29D$\29Ãc6#;@Ž—ä*òhûKí ÅñÛrDY#Šßº8%ÒX¦`|\0268\ +ËQf´E9}0l\3Åg–È,VÁ\0096&\8[× Xú$™H˜Ïge“¶ Eñê\23lm©äç\11³ö\2!\20Î\27s¬KFE\31 82_k\14ö¼²\0u6Ÿƒ³Ø”`\14˜™=llý{)B\11“ãç^NnTaĸƻøœ˜àJO\12ù¢\13£µçFþ-_{áÕò¢wÓ^”ê7q\22EšÆ\0\"f@¦«­±\6“\0D\29s~î\ +Þñe>ùÂa\7¯€®D´Øcé\5¦™\30Ób\15î÷5ÚèÅžT€Š]óR€\28À)¸8i‚·\18\26à\21Å«hn÷\31ƒv1˜®áõi ÒuJè\\•§¯k\0}07æÖ\24(­\22\23J,$\23f–24ÈUžWsÙè`\21÷I\20\19%\0V°ŠKÔÚ£\29\29µ¢k€[€1@\ +h_BÈ$ÂJËZp1CÁFe`TvããJ’Ä•Ô¬¤²˜ÙAs¤˜1\3\ +t\11Ð=\5ê\3Œ\0012«dë¶!\27R¤zßbï¢,\22½·ÈsÕ‡oŽ²‰»)‹›cî-ûSv\21Ti’¶Eôj÷5\2¨Ë”ù]Y[Q÷\6°\8Ü\29¤wÜ>âöê‡$v\ +D*Ø›HÀåó@J ¡N,åàFd®š\7ÊBs¼bVW‡ýFc€\20`7ˆ”²s\\Z'È!ÇZVƒð‰¯Ü¢ˆ¨\16\ +”\14”\15\20áð’׳\15ÅU¨ŠýªÞ÷UµPfnE\31¢¨f¯4w0\4bâ@]b˜¬„ÔŒ_õ92J+6Rº\30&\13‰gN1\5=ŤC=ØU™º:7WË\4Üy­hzçÕï¼êWw\23×õ™îõíd\25\4HÝÑõ­½š\14òˆ~—†i·\0»\2ñ\11iSó¾ÓUÝ@EÈÎ?ÌÇ\9o‡­Ø³)sÕq'¡è$ª?ÔIô’ÍXЫⱶw®ì:$\ +þ £ÂŒNzä+GZ/;QÞ€\25\12’Y¡³‰#eå“?¦éØ¥vjÊÆth¼‡s\0Ç\2z\15Ô\5Ðk7\5Þ\28T\3»ì¯’JZb£øˆVü!é\17Úé ’8>,›|ø?\2¬¢[„ØÚ`8-Ök\25jUÓ\21<‚}%#5ñ\15õ86×*@ŸoÓ©\21O>^\20Ù¹OWÊ>xaßâh\22é,ºØ¹Ì¦¨n}ÌV\5\23ÃN¯¡™ÌJTG{Uú^ÂlPŠ~Š®L\5vEP¨½JÏ6\19\5Tïzn~´×»A\23Œ\15ƒ)\24J?Rf\3+†8¨\127\18*(öÁÔ!‚s\4!LëÄá&ª\ +ÿû—–°\2%!¦À}\20ü?\21ŒxKÂþ„ÆàÄ\30®ñRzåÁä^pØPÒѬ¹²›\15䊉C\17h_é§\29\13J\3ç\11\2\13\7š\2E0î;\21d#õRpýz\4ÅUÏ;ésKr¡\16=y\6d\11\\b|ˆ*\26³=Õ\25Ñ]\26 ;”È,’\6ÿÙ¤š$ãMªÿ“f#ª–¼½Dôv\22åL\ +t\21Íæ»}zÄùô@óév{bOm³\\\4’˜dŸK4|†ðñ\24úÐ,\14SòÖ4‚nfl\17EM\22\12‡Ø\9TÐÚ³ Ê\8²äeƃ@—%_Ÿÿt$\5Y\28—½š·„Þ‘ùYe?«•Íþ€Y\2Ô<¥—\19),G‘Ÿ\21Ly˜¡Gäl`Vffý–ÙígV÷™O1æb9K;óJeÒ;\6ñ|¾óŽÞA¼þ\5•K‹èðÔ\0209ŸÙd2÷§Î\17,\7X\12Šé\26\30û\15Jæ\\h€\6ôŸ*þÃ@6Òùý\15G\21Ø—êù¦ÆËìf\7CÏ\11\25\30û$Ayò–4CÓ9Ô¡ÃQw•[6`¬’S0Ñ\0269?¥õ䣲âc±ùÕ,s4Êì6i3Ugû-¦%Âl\6ã³|FFjµé«\17/ñ‡´“²Uç\30SäÂRknWùa[|þâ'¶{Ž\18|¿>‘ÅúÅϯ¨\28¾úŒ0\29mèÿBýºX\18\6%v¦§É[¶jÜS)†;Ö×\15¨_÷ª\31ðªÌCŸ\8r\ +ѱ·\4x\13\6{K!WÅî¦2íÔõáŽE:)Md\29\16\"H‘XŠÔRq\16Š\22¹£9$±¶\0\24÷Ÿ”ð®:œöšôA\31,Ü|gSÖ\18¹¨ø’;a\23«×\11yNH\28WµÓäÜCï/ÊÖ×xÉú¿¿\127rÉÚ\ +¾Þ¥]ÎÚ->F9?’네;­WcíZ/„¢Û\0‹³œ\13-ß,µøYùŸÿ\ +\20tá\15ôuïd¸%-þòEý}\5³>kQ÷œ5ôòRvEØ9uȼ—ò1ê5¥‰Jº¦\0ªÄZ¤Ð\0026?ôW|Þ/·!¨\25éó\21Å[Qô\7_Q…¿ŽÐ;\27ý^¿_ÿgÁ\30zQ¶nßxÿlÞ½¨ÃYÔ-¼µ‰ÎYÎü\"º,¢«\"ªËŠ\11¯ñ6õM6Ç%ÀÑŒ`#¸d’¶qŽ<³olr‹~Ãè0\26®–èD\22®ŠTF댶\28\15hT‹Nþ\27\25×R\2¹½\11åHÉßв\18VZR¤\28¯ \7f­ž\0iÄjs°±”cn\31 ¾û]\25y•óUßzUf©þ²u-\1´Ö¥ó/‘²\7Þã\1X5\22·øÅí{áŠh‰$çˆãòî(ÑúFl‡Ñ‡}Hÿ{ØÙ2¤gL\28i`‹¤Ñ'ôüyuPfªMê”ú„\ +°LÔ½â\\ؤWMQnTOÙ¼–€J\31GºEÙÆ)-²\30Iù|øÕ´BnÐ\6`½&ÚŽøú¡ê\6\22õ\2È+\12p‰*—šƒáž´O†œõ\30ˆ[ÊÔ¢p‹+ÕÒð[Ѿ+ù‰\1@T?ç'w3\24ô\1Tý\127Ò ”A\ +\3,‘Ð{\127 \8õjڅχA$<…Çg€‡jz\5kø|ñ\3\11;\1Y\7«ò›-€\9µü\0156\5FÐñEÅ?¡¸è©%ZV\21\9Í\7ò7ü¢ŽÑ\23‚W@ó\\í”Ý\13º\1Æz<~Ñ\30-s?Im£\19œ”Ç—ÝCŸ/Ü\6ˆz²ó„÷û\1#\17.&ã³÷Eïÿ‰]0\0šÎÈP\21çõ£Çé\20@ÿ·´\11×L\1\15›D˜DyþVb’V$\27\20OÍŠ›R)­\6„\"[*นbÃ\1å\27î< øïßWìq\0084 3'üµU«\0124ý\3—¯ž\17¬šg¯ÒãÉØ\15éÊY\1×\7~IŒ¶­\31ÎÔ\30Z\22ø\21šÿ_R\4Ê\8\16Òá|û«¹\4vïËË\29_’ÌeúGTŠƒ/'ü%ý¢-þ\8tÁx\24h6òåIÇWÌ1¾<íÿ²jâÄÿ\5\11\22šcüë9Æ¿1Çø×\1m7è¿¿·óâ¢\21bÑn<_Œêc“<\13P¡Ï»ÕÙBàaó\1;úÝ•`÷è\9F\31\30C€b°Šª\31Õ†\1-²Ð\28N%­æßØ\30@M”÷\1PFiû$\14§¸»¥é=Æ?õ&»Eiî\17h–\13u÷ö\17£öz Á¨=‚MÎè;Ûî~h?eõðö‡h\21ÙìXØÍ®¡ËKà\12.\13\28—”á¡áCKǤJkÒWi!¸\"®ÿ’jÄõB0¨wÆ\"ïÝt\22Uß…æÝ›.¢þ\19<^K†²Rœ „ª=Ö\"ö\16 ¼<\11/Ô¢\31Í\\'\19¸>gæ]\1ó¹ÎÛG’[}ž^ÄÂ2²ìQ_«ûÂ¥«Êç9eRŠDÕ§V\1ºÛÁt\20í.¦uôgB07\9Îú$\"\23¶ÖÌg\20Ê|¦Ž§fáuà“Ž:{@x*-™eÈtàãRãùÜŸ)n“A\25Š@÷ÌTß~EJý°šÿÙÐÍæ†ÄúÂÔ{h\18E)\26\25-\7ksèÎœ)?\29(\11ac÷|\30X\\\\jF\1r]®v\0313´˜¼ü‹ˆ—\127Õ.\15þ¶‚\ +‹ß¯\"W\17ÜŽ\11:2à€M£óy¤¬:ã¾ñ\21/\30ÙYθ¡™åNÌ@“ø=)~›SUŽVàáuAY$´Y\6è\20¡¯½(,‹×\9?Ø\19¹\11ƒb A\16˜Y„\31îãYò7ýa˜†Ÿ±´É?C¯\25oŸÏ|õÌ»Ïå¡$l®œ¬.(¯ü2œ\5h·\24¨ö‰ˆ57â³\4@›\"ï_ø\5™ñ3ÕY•JIG>n'£µt¨3šùlE\6Áï\127®ú!„é@Ó6&»ÅmrýÎôu­\5½²©‚>3`~y2=›¾˜Þ\24ꥳs2-¦«©êQv\3ËQ_θ\29ŠÄüŽE™¹„Zi#[+Bü´\ +Ú`@Ð&š6Q¶!ÐGŠ88Å#\8m†™_|ç´<™v¦*'þ ]\0009ŸÙ2ò¢¨…ýBÖ­õ\2JU›‘\21íßJªd†çÎCÐ\6ü½UâBÅ-´9Yé\25\0226à!c˜ôaU$‡Uû%_Äá¥#DX\1\23`‰\21œrϲ`£ ïlÞB—\3µ\7*‚ý \6ÑùËJ\127~2øýOòK¡?0À\15Æ\21æÏN\16j\3RnBê\28\11\14j× .Ÿ¢\0090*_y;\7€}Óù̦@q\ +\\Ø\5¬ýÜÿ¹ªþÕÕ\14ŸáN¾ûœ\8*´Êf½¬\4u\4{IªxKg\0212èþy3Åþ\11\"L¾\13¾”Ôõɉ_Ïf\\Mß\14Ÿ·ó\31ðå\27^˜V\5Ýó¾y·\3õ\7\26¿#×a9¸µ\4Ìì„°J\26°ûöŸ¾\19èó\17 |s‡oøÇËŽw-%¾î¼\31ÞP\26˜Kû‹\7;¢áš¼Ï?ø[\4?â½\4h¿ƒ½\7ï5ÀQÆo×)ÐíˆÐ\5ë\30 \0150\4\24\3¤£V\\ß8€ù¥\0080\7È\1–\0ßŵÆ\7þ:\ + êÅÛñ‘.¨{Aà:@2Îp~6õ§\15g¿h¸Lòi\127h˜`í!pž\6ÞŽ&46…\26\29»fdnªûo}ƒµHø.%î`BÀ‘ßœ­t~\21ÅÞyƒdŸ\28ŒE\0½¾8­#cÖK€ž™Ô!ºˆÂwb¬åßE'Qœ\6%\20†Q\19ƒ\29R†E\31QiÑK\18\23é\005800Ê—èÛÉmÊ'y\14èOx‹ox;—\0k€=‚Ç×¼©\15$ìÆโ`§ÇÌÉ\7Íw\2žU¼š‚~Üždä¬æH]瞈ô¡ÐG*l$Ô(›r\25BžuØ3B[£o{DÔ[<æ\8,\1JD{@îî\3cv'G·\27PrÅ+8à(ú£(Îx¥.\14õuä\4\0296¿n½™*\26%\22|íZTòù÷¯dঃýoª/‹zd\26ù$Õ\0271\29—7͘\"βEoS\\æ´ŸKê¢×1\14¢ÎM9Æ+ Þ™\26pcÓ+4B/ä\17€Ç%Ä‚ía¡â¾¶Ž^üHìãÏÅ™ðG\21n\4™aXãTÉa¹l涻úÄ\0ƽw3·ÞÍ/\29o þ1ÿ±i®bëÞjy\\¸•ä\0020°‘‚Q§‡¢ƒ\3àÿ\3@\13%\14V˜ç\11ߧ_ “\28Ÿu¸¬EÞBMŒg}Mûr>áù³J\19-L\5Ô'̯ãvÚW´˜³èŠ\29˜Ê˜c7|õÀÊ\8|®»\2`c—A\7ÿ˜Œ´}Á{t­ž¨Ü-\7¦vൡ³vÒ÷?\12ŠÒÒ\30°€×\3Ý\14Ô\29¨?Ðx ä7䃃/¬å†Â€\15ê+Ü…#S;hä¡eFQØ™¯!î\26PªdÕ>\11¤v3xc¦5ÌhŸ™\0VØ\0029:`Ëàÿý}=87sÔï\19Þÿˆpo¿ƒ\14\7R±\15Ø÷D°ø½C‰¬–‰Y_åÚžLQ'Ú\1G¦\7s\"‰ífÚ™º0°·ãgó&§ÞLó*ÌÌè„°wN…0Š1ºHFe¡IŒÑ¦k­a•üLUˆ\18}\6嶀lŽ²Ry]„¹›ö¦£i2˜ÅtûÁóØg8i܉N5K#D™èžØ$¸¾U=ç¨Ë¸6`n©Ó˜©Â€ÖfÞ4X)/\24®ôå„æ“!c\14\6\29«^Æ\22¤JZ‡h«\8„æã…\14¦‰\13ª\2ø­¯\127`Jy5RÃ;üøâvS£Ê¯g\6„DÙr’dŠ%Æ@üÓ>¹kp%À@ÝbQ°\ +\17mf–¹ È@µ\"‹6\127\9D©pŸ£è«ig:š&S×\12,è™SL7¾lá×pâÝr‡´^„=ù•ÏÍk\8öûkPùÉ~\0312Ë\30ªŽ\28ÏÏ,jA\6ÂæÉ¥sS\1Æ´‹«Ñì°:\0/œk|G.\22\15&\4-¿°NÅzÑ“z+(¹‡Á,èôÀâ@\3\14Žþ\16¬\17I\25—ÖEš=”Hiø'ê ñ\12´5ÎMGÒ\5»þLßÎÝXmÆH¢Nǘæ\127ÿò'¬ü“›caIuµŽ½DL‚Õ€ÍS‡šr°(m¾Ç\13Ï3Lâ½S¼\16âÌ»Ìn\ +y܇½<Ñgò‡öÉ´3eÂC«T\7ž÷6\240ÉÔ\31ÙD\9\14£\19\18\25ô\11x´*8ÿšC¡\29v\1ÎöâþWó.ß<§]Ñk€ë·o\23<Ž\15D÷oßþ\27¦o˜¿aùã=_ü*Yè@JPØ\5ð?¨P¿GŠ*ÅýêÙ\2\15®‚\20¯°æ‘'½fª£ã\11rç|e\23]Ž|f\28©\17ËùÍ¥\13p¤W¢€ófºó\13X\7\23 \28Íí\127\17\6¢\ +ÂDŬMü3ëô~vhš4™½E„~»R\\¸^`Îæ G\29õ!h}\2W\0Ýð–½K\17b:b=”âÏÅœÕ\20ž‚ Ÿªõù¬|\22¦<؆\"/Ã\15º\6סâ¢*.ÍE¸E–6\14^Øó×\29\0285¥Íµz;\26ñ\22õ{{5½ñm›kÕæ_µù¥›#n\17M¥³«¸X‰Ô~wU0\31‹¬hïTz;u  ÉT\127\19Wä.ú=('\26»¨ãòk\21–\127¡\9¸AzæŽêY\27ªgµEz†‚½\21éD0ÜiÃ4 $\7 îrÆså\19ãvÆ\30ù¹©\20º¥Û9Ñ\26T\29æoYª4øFS>_du\24>*yAЮþ 0§úÒ[\0292\17¿Â®9ŸùDÖ*y†\11K\19ƒ(çk\0Ð\3â8\28\2^ø\20Æ·WråóÆçO\\vÿD4ð9ò9)Òƒdæ3;\9VõJ‹‚ü\18ù\"Ùô2ˆwÝË\11ð\11«R‡¥“î‚íš\11æ\"Ú‹={+6i'bO\ +ŽØ‡=\26¨\13w8\5W\3´ü\18tõõ‡¾Ã\23\12 %PçñH»µk\22$—w7~fÏVÕ\13\13^:t\26§ºaPµ\6(bH\30­`Ub’`µMC€M¤\27Qné\12y¨’\19z=ÈLðJ¯àñ¼\25©\23\28\0qˆ€•\20@s€*eQi\0ó(tÊ·`»\2\1f™\4\16”*(k#÷Ê\18ñ\2\4\14’Ô\6ÒM0\9…Òú<‹¨Ü\127\127¶\"¯\"W‘¾\19\29DFÇ\25U\9~\127N\"üÎù܉pi‰\27Sx·\20YϦ7ÓdªŸE6v°L¨¬Ëy…xÃ\19¬s§½$ð^Ô§wªzËÞO\\©é\26\26v˜½'~ö®wñ›$À\29“µv”ßÿ\31É\13Ÿ[¾\19 ŒÃýñ*y®*Ÿ¸2å\13ñ wê\ +>šdw–\18A\27ã+Õõ¡\6šÖV,5ÅG(u>(?+\20¬X(¾^”'õÙÚŸ£°Ò©ÔÀ\12š‹]˜\0123Ü—(e…ß\31ÔÛ\16 ·üm\5\1\13jÜš?k\127=‰¿ewBØe?k»<\8´M½6ÕÎ=û)튟¹)~æžø™{ágm…Ÿ¹\3~ÖvwVŽw\18\6Cÿ©\29í37´ã\9\7´œµf\15gî6¬\20ë… ±\0146p±\11š>ìGz­?h8ï\\þÓŽøy8deYTf2N\27ÒÚ\29×Ï\24xû\11cé\22{Rj>†iäò‹î§\7a9\15P§œIO¼ZOœƒ”¯¥ÇÅ¿Õ5zýPg¨OPÿqóù<½,\24¹¦†·u\0\\°\0147á”ÛŒ\19\11Œ65¨\26SßQaÅýé\25\\‰lun\5\17s:é´\1\25#‚sÃ:èp\26Ä.gSäv\26\"ù¡yA¿;\13TÔÀ)Ù|\26°f\4\27ï¼\13³¢¢Õº\19‘Ô.8Ÿ\12ƒ[–fÞœSIBZüÜ„1iB\31‡Ì&}vâG×ù´bð/N‰¿ë\"„aÉ\28Æ·:sJÖa\ +dz¡îM¡Û\5€&“4™.Ž\0éŸ!¥Å4,­`\23Ù@Í¥\31\22Ýê·%þ\"Ô…‰KD¢.œ\25B+Ï\1T¬,ÌúÊ\25£ïÄÞÖ4‹½ÈI%Á¤\21ÚJÀ1{E¢°¼„@\24L'žLÃ6wZÀ\16¸’ž{¼\"kåz²\ +aâiÍêd埲‹.\22pá]ȯcÇbÀxÙ\21\"k\29‹taad–qm¿z}«h¯\13ÉU~\29ƶ)w­\\7½²\27L!±Mîî&©\9&­NÒgOÐTW\25˜€³8\2\31¦\3G\3\14¤œ\22h<\24\127ÝZë)”ÖÓ·Î:`oˆ³âD¬ŒôüØ„ÐxÉY\23û­‘è/{¡gœB\1=å\13•]\27òD©8›–¢N¼\2þ‚E?\18Û•IÜ’±Ÿožx\21#ýq¿'É…Ä€WuÙ½\ +´öHA0‹²\11œp\23‰Ê™Z÷)Þ²q¥\19C\11O£T\2iÆ)ú+™½öO\0qRýiû D\\.\0-ö¡\12^\1×=>{\13gŸ=2ÉcsH˜ú1úE e4\1\7OÜR¯yÝ'Æ×'æú2]èÍ7ɲ ÓfõäíÖWÓÑA;§ÇŠæ\27]\1´\30ð\25v¥fÞHINтЧô-b0sE;nÀáZÕ§×.>Qž\28g?qpM¹Þ\\®\28Xy\25\24\12*qO\6NäœE˜¹‡v\26fQ\20åI'xè^\29ü\23éÀ\21OšÔ€“ËØ´Š¨ÄÑ:\127V¯r*¬¥—\7\21cÈã\3¿–&žÀ.åíä)õ£l\12XEƒÓŒH4ì\0¢\5…:îa'\20Nx#mmÒšuž×TÞ½ÈlÂ\17„çÐgž6¨”‡\ +D\6û&9\23;\11Ñΰ‰á·ÏÚÛ€íNx6w²(–aûQOfÂÏä–\29ªÜ¸i§a\0ô‰Ü2\2.:ß\16,ص#[êãóÔË\15B”XPži†\28nÜ\19èaà\16\19IéÒkXí3$¥l3ÓúŽhW77ð5\23h™¿ß\31W\16”!\4œE¤€7aÊ1s\0ÁÕ”ów?\25˜ILǶ\17UÉÉ~f\21\16·®Ì´\15@ÒÔ\18E\19k_MGSóqëô\31(Ç•Ï;YØ:•«Ô8‘7Ÿ\21«´ôänIº'Ñ\13\20e€üðâxHF\4»¾á\19s\26ÜÏNss³®p§Ï\23ò¬+×g\30B@Ç=ý·3¨\5ùÓ¢ž¥Ò±c\4˜á™c{ü¬S,3. –cBäÚ>çEC­7”ÏÚ\28\14véìú \11?Ÿ›ÂáXíU;§cŸ÷\12m\23Z\24Íõ-r£§‘ÎëŠwì¸e`¦²\11î½yUQ-{÷¢€Ü¤µè¢F¤€–»è–FÒœœàã\0\12ˆà……äýOÅ›úÄOõãi\19\0143ÅâËàÄÁa€Y[fI¸\0009\31—ë\18\13H\5-\25rh¡‚\127\12(þ\29;¡\ +\15q(>õɤ“©^DsŸ5dïˆ\15txØf~5M¢|…fN¾nV´S´_ÙÉ}•Z„õ¬üü\0216t«ÏÚj×äzE'¼2Ã+îÒ`7Ëýå•&»ðïW\24lœWV‹•—æ’\12 Ëù\"\2\23\11U¢ÿ\13îLÂâQ%.Fè–VßM/©¸ëE\22Åù ™\20\30ãÙª\5üuÁÜsåñ‹JŠ\18BEC! °H\21\0155\11}ɺì²ÛNO¥FÉj™\6ÜÈ\0ï\14ÑÞHìéXu»1¼••:ǹØ\31\ +ŽZ,%k<×Xú+c\11\0127ÓÓYžé\12u37ÕËãnÚ›¦\0080™1›fÓbºám˜•s\11þ¬\29ø³õ†˜ p½ákОnÄ\9ªY;óé¤bÈ;²¾´{ãëÈç—³¥½ù³väÏÚ‡_E*ñzgý|Ü’;ÿ_s«uðÿnÝÒ¿Õ¿òÖ@\23ùÆë<+Á¾¤·æÆç\30\28Ÿß(\1½ÑÌ\7HÇ'CôHz*l¦ƒ#ç[m\0117¸ŠÒßèÅÕçØô.@µ„ö¼£\30koùÌ\29å$¼‚ý\127O?Nÿ«Ýõÿð/ÿ×\2Ô\30è\127\29ÀG}L\0Ÿx<ê£~÷é\127ëÿ­P âYð¬\"\27ž{}¶MmŽÕõ°J:†êî|\14|&>\23<ë€YÓý…Ç\23\18ÿ÷GSëì©~k>£næsm7\2µHAž±\19˜T¶£„ê„È 3(oý\"À\ +”Á@Ð1\21\8÷dc¯¹Aý~¡Ñ”ûŠ\3Ú\23ÊK±ªàY3ð< þé9wd¯w\16^{šÏ°öÄÔ_Zææåv&÷Öñå\21T©Ê`\9€T1Ãe>_j\21É¢]\17¥ö‹¨\22YÖn{\18,£ÕIÁ™3\3<ÎE\28i:\16¦vã¹9ÿ„%q\4øýñB:þø\9×çR¥ÑÆ'ÁQƸÜë\31ÑÕ\30+\11ä²\"=Ô©|¦í\27–¿P{ ÛÒ¾c\20!0^»~U\22¹'†'ÐUò0JVGÚÆ&:1Y'àV±ÆgÕ‘ü\21«\11•\\ºF®.1`;(¢~&u\6oJ\26[ÙäVÈ7\\±N7ò„!ª–M\7‰´\19Iæ~Š®ˆÊëÉ\13ÌÐ1t–e\127ï\24óÞs¤å‹î˜ÊTÖôŒ,ß=‘5J\4û¹ˆžZS\0081DÐRÑ’ý™­ G72\11t,Jª AYv•Ô×flÀH–¸ùT †;ÑöÝÎ\4ø\15½ËH \19\127BËè\11\127–^ŽŠÐœ±'S–\0h0¯¦ZQÏï\31ÎU\20”×ÌÏÅ.¥]‰He1(á~v\29\28î,ð\1ÿ\127\24`Û´RL‘{~\6a¡\31:92 Q\25\12\2í\6*•f}ßxF5\29)\13A;tîäì‘øxfáÔ\"!0‡r08!\"‡{\20\15ø| ö@·\3¥\3e¦\18¡ßo-ÝÝÅ\20]>A¼…ý\15n\27:›¾š^Mï\14‡5äÊ\25ŸEÚ\"Ú‰ÜE’H\22)Š:±\15m|ƒ\16Píܳì\26Òõ¥ÿ‰3!\6ˆ™¢+IU¤:#»éEò¹X­i×\0270¹\26æçŽ×§Ë\19ù\23%&Ësäßåf¦R«Ø´\19œm¬H€é_ÝL’m¶\16½ñ\23`û,ÉÈdû3=¡•\30Œ˜\2¤f\21ZR_Þ؉†y\22r¨4—ß\28œÌšŒ-Oè\13ûó7¼}ÃÞ‘ônò–\31ÔBVð¤—?;\12Ž\19Á\6Ç\13´ƒ\0247¹9\24€:K]Vv)83`/b\127¨ì\27¥ôéþÑÚˆ\11–#úÍ%²ûE÷ÎRÀ’\24‰j=ÖÂH†p\"³È¿<«ëžk\25Ð\9Ï\25Kv?¸©KËì¾eVßÂ9!BëÄMÆa\12ü\27ikqFã\15¬\30Yåg\2ä#2?(»ÇQ\0258¹)'Ó¢\22\9G9M\20\9\6‘¹5E,šfGú\21L\24} xÓ»pèçò\7)\11\24s¼ç!Ð]༘\22Q5\25M\3Ù?xçƒy¸\18Iá>Ÿ\3t\1’Áca<-ï¼\5æxG¤}Ér°u[³šÏ\22ü3Ž¿Ü‹¨ê}áÉ\25R®Tò\14”ßÿ(lôƒ<\24CaÌ\ +\23ö£û#ÈóÐ\13\23]Æ%\4­kŽƒ5\2C\31€U—‡¯ùÖw6û˜OŠ•{ñ´­%|ù\26÷“œUæ\0[ËoÁ\14e&Î-G•N;ꪧ˜\12¡ŸXp\15\2\2¼Ñõv\22Qµ/o\28ÙÖK§\2Z™óuPÚ+ÚÄʾcU\13Y\27ÕfÅü\6ön\ +[%\8$ð*\7a»\12\9[Ðv.’/6˜\127ñ¯ØÇÔGìþs»{©Ý£\21w‡Ð=ñG6ûT'´'\14[¿ÿ©bj\11Ú¼ óöÉì\19;rcŠŽÓÿZù&\15•‡Gw82V?¨q¯æas¶\16ú$\2Ük¥ˆ¿L¿H/Ì#fñ\23\30‰\3¸°Ð*¸¯¤¯Ø\29LÚšv¢XW\1Õ\7\\Og¹¯8l£‹w꜕>8+©Ïm®ÝÙt0\29M\23Å®‚0Ý\26\16+c]þŸ8ì\7y\\ÇTâ’\16kàí@Ýî\7ê\15”\14”\15´\28¨\28hÓëQÅê{нãÔ Oæ\25µC°8úÕÐw\0230·âÛ@\18\12©\0256ü\29UðÓ§ôÓÕ>þh¦”\14¤Wâ¤É?\1ü!}\5hT¢\3Ì£ð\7à\24Næ1âJ%9¬>¤ü` \23õ*8¬ã\1274t\23Óѱ>\28bˆ\127«\3)\23\16U\12Ü=.Ø4u\4»\5¯\11p\15Ð\7HÎ4íþ™çÙ2ñ\18Ìr°\\t˜hTwiþ\127¦¾$¹qév+µ\2GH~\27RCK\20;\5\0ò+s\23ÿ\18îP\3\15n8Â\3‡g\21w_\15§Iª&ÌÃDK\16=\18™Š¡8óŠ¸N¼Æ\0þ„5–èRŒÉh°gꎰ¨¤ŠTg\11aôÛé~”þ­æ(\2ªï…kpÞhÂ\6B²\14qf–“\14´‚‚)/)}h9ÍÀß{\7ùŽvÂCUðÖƒRXÛ|\12àr®è\26 \0110\6H\1r\0–ÉJeHŒÙulý\127Yµ€†”\0048=¡\31ÌœÉj÷\7³\\\27h=IÁp\20Lµ…\24\1¨¾°+\00443™Žõ\27ßi5\4°Î£P*µ\12bwnwä,tÇ›]©>ÑfŽ\19ûRj5ů8RºfOoI¾Y{y\5,ñ\ +˜xŸ|«\127ø8kH:Iƒ\2(æN'nRüÂ\18n3€bÉ}Òðê¾»ó-‰™\28¶\22$\8›\26Å^L1$žÜo’\12\9Œá%E Ìú\5ÊæVóŠ-ÃV\4ÃôÅëõÓ´ŽG'[Ñ©\0002U5U\24i\6áE6\0|Ñigܺ>“Ey»ˆèÞÈO\29óý·ÈÊÏe\\åíÑ7\6\20Ö\15؆ë\16\0£\17ÑÝ4Ê\2–K\28,J\5’éL(3Á7¾]˜µWH¥Õ¯Å\21±úŠ;\18 -KúŠ]¨ê6Ë'ê\8è\127ÿV.Ô‰ñŠÐY\25Öæ\ +@Çè¹Ç²?µ#{hÉÖNï‰Û¼Ç\0øÒ“¶~“c\0279œð\14ÒYŽÓØÚ\13û\\\0ٌܙ\22ú\\\7\5X]\3>n¦µëÙa7÷ñ\18à\26 \13P\4Z{Á´–ô\":ñg{Ãî\4\5e`÷,!ÈŠ’ìõ\17“Š›\29ÝÉÓ½\19-³2;_þO_®²_®§_¬õª­äΚ~\7ÎZíÊR\1²ñ¼tIÁ)¶\30¬\29R/\6Î;\13P9¸\127”ô/sÃ2É\13-ÿ\0Qr ÉÊjPÖŸÌ!FZk‚×ï\20¶Sj\0h6\24R\14¾\3“d\30AIÞ\21îÞòM±\02064•NÑWϪM³ßÜ\26¾o,±\31·û\31—ÐO6-r^D\24\31lg#_\\˜€°1¯LxŪõøl©õ$“\13HïLE«i»€\ +§\30[N¤“|ð.^õƒï:KÑÈ(—t\"\7ÊSøŽÚw¦ÈÛ‹€VìgN\8J°&…ÖÊ=Î\15Î\16xIçWìÒÙ(D:7^™\0 [9Ûü\0\1¦FôÁ#\11*ü9“qi­FØüF†›õ¿ÿÛß_•äƒÑn=\13\16ªÀY·€ÂåÇžï¦ìfÎÞ!P4èfljB\20[ZrZ\0203z\28h\27ÚŸM/òA\13N\4ƒ\25ê\30ŒÚ@µ/@!¶¸ÍW£kWý\13Ȭˆ®`cv\15ã\20?%èÏ`OI©³9‚ÎÈæ'Ërg…Ç\16f¢æxEñ¤ü\27\16ZÁ%H_kN´RAó°uòœd¤B¯+_±ì“]\ +–cæoϬ1è¢Î\18[À+d\26*™Ï\11Ó›•\23×ø³j8ìU\28DèúÊš¥šMæ\23_9Šñýû×\14k÷†sÜ\6‚Jx¢&7Ú|â­ij\2×N\ +\1ç~ÍñE\4·\17SsähGáñãŸÇYNW‘Nd\16\25E²Hýšf\127–\\£\13J”Ò©)™¯µçmØi)úÚ\21\21q¦D÷\21OÜÓ¨Ëk^Z„Âò\6cAÃ&‘ÄÀA7)\0154\27(Ï\127—\23_\17\2n\25åyRZçÚ\8Žô\\ÁMÉ\3vÁKO^\9\30¿ˆYÜ7ÿ\3óíí\23ÍþÞEÐÅûv”žƒY‚@\1275•\7u·d,¢ØKc\8]£\20fû‰ûê\8‡%|uáÉ\4^W%÷y\2{ϱ¥¡ „ÂENÙ$ÖÝÉ„ßùbz¤Ó;\14*IϦú“@Ùœ²qÖ@[lWÓδ7\29M“i6-Š\19‚‹|G~e{\0†’±\20in™'øïßÆt4M¦Œ†&ÚE\29A§#ëF\3Q¿¡{ Ä¯í8œ5ÿJGµÑ¢èï\0Òù—í5'þŒÎ÷i‘~‡\12^\21Ñãfª\2\0xa\12Tü,'•\9ÐØ9#\15gÄý¨Qi6ŸysÏY¨•¿V\9¶þ8nÁ\ +¸LZ},·dk>ºÚÍ5<Š¤ëä\28°o¬ÎÓÝÁ&•\8´i0øg\18ýêL‹RTiìè\27Ò1\12\12£¸ôõ\19\31ƒ\14\12W5·ŸÀ\26\24‘À»ªV\127P×¢ƒErŽZËU0‰žDΓ}6oø\21=\0213€Bc/máÍKø\25X«áˆ{¼‰³vóÖ\5kø+ÀÐlÜáÉ\29\29\13nöÚ9ÿ\21(oòøä²$\1V~:ÃKù}ð–$Ãýùé64\6\\Õ\12€póŒØO€é-•\6zé’u=jÇJ\31Äù\22¸…S§ü@ÏO%’†–^õðÑ;Á~p´\3ê¼ó3¨þ\7ŸÇÈD}\0Wd©— àQ˜ôLèC¦ü+4\4ðï7X>ñçx)…š'öèÜÑJƒy«~NŸ]=ò\26YÇà_?JWH’TI8\23µg-Ê,P\23Î4o\30xQé~GySÔ\26Iþ+/Ë‹i¤·p,é—æìw\127D¿4]€!À\24 G,-¾ÇØl0mHçÄþŠÅE¿D:‘LDî\"Y\"’H«DL\17åöMŽ™]ç ½1\2u¦ÃÁ\14Ü\"\7î¢×å­Go¬s_6tÜ\16\27¢ÐùÉlŸ°{¼Aÿ\12À놞žaÆ\13¥Hç\25KyƇ.}¨Sêæ\0239œéTÊ\14\14j?à*3\3\12ÒÄXYÑå—h'OZâ\19e\21‰†m€\28…ä¾}Ðeq#\30\16VØ^’Yê`¹éw\83êûžÙ›õûaV\6ç\20ïúÆ\17wme΀ÒADÜ%\3Ò¸D\16¾¦ðóõk³‚à6\8•&•ge\9ˆüÂ\31Öpã¼zÎ\0173·Å›ýÄÉ\15ϬAN\"ˆúyžitöÔ‹L\"w‘¬q›\7Ú•ÞyŒÛܵ=ÔÜ£\6Þ\15³ÈJ®§Y\4,á»\6tТx¸f\"èß\2t\1˜•{ã°ñ\15ï°Íò¸7ú…÷ª[\3M\20J\16Æ\0Å\24ýƒp1^ž¹¼itwW@cò}R$u\1ò\18\0뙆7qxBÙܹpkx\31\"ñ\23ݹö\7Iú4uIš. \21%©_!É\"šŸ%~\14井ÃäÎ<ÑWæ\\u—÷\26Q3•\19Õ\24hâ·Ò¸»HG÷é™\18pP;Âd3T\12ÈJÆÊQ\14S£\15öœyíØÕW0СpÑáùh¤~DGçœ(\11Â:¸1·P\12³¢ûïß—H\19óÂDñË£iÇ‚.\\jÄ\17;Þõ\3âܾAÒ\23qh\" ÐÄ`µò¼‰U§\29\1XWŠêJa_`—„êTžÓBÍÜ9sûëEC¦ðèô`/Ù¨\15Ž\27â\15.­§DÔ¤B\6·^j\30¸³F—Þ_çI`‰\ +\7åa\0096\6Z'4\5íÏ\6Y¥¡N¨l•Ž¿¡PÇn\17âØ$^믘6uÚ$«t©tÖSÝÝÒl3—Åà7ÚX³Â\0‚(G£fåâ\12Þ;uú«Þ±×³Ó1Éc\127ä®y¥Ü\15\7]ÌÇdñÏãȆ@\6‡8Š?r3þñæ£/\24\4JR\16SÌ\24Åð\25 Ì\6\"ÈÅ\30®/Žëªá;\20ȤML²\2\25ÒÂHn›ß[øÀá±ÒêXq\1\14§` ¿2Ú<áÌäÑYÝœ™0÷mä\16<¥~t’¿\21à\25\13Q\ +#\20Ï\"6í3„¨&¤N¹-\ +5Q\ +ááÕ\2÷©¿R„f5\17Ï1:Õyo/s\9¿ß\4´êmpt™ô3ÑSÑ5À\24 \5(›oÿbM€¥Ó¦:õïô0\28”ãÁççD£’él\72=0±±“Êtðúœ\20ž\31û¨C\27Q먧¢);¦OÓ/æj(Lk?h'˺õ•§\"—?_¯ô#Sé\4{\27À1†¾D\26÷*Q—øDÊ5\30õxS&\0\12H\25ñn`¸bê\14ÿòÃ6òyžszd½ëUõAfZÅáLi\127ñ\24I0ž‚“íù!À&G\4!9¸pN÷š<^yîYÁäìqúS\1»©ëáFISÂ\"1\31üΫ\13L\0оÄÁ\16?‚\0Ñ\19àÇ]Û‚iÕ®N}¯\19´SípÆF-\\IZ¸R{À=\7éâ\"!³Ò‰äÄg#ï\15z\127ãË\27öÌA\19\9”i$êãÂ\19**\24âƒäƧÔs\9}’öJ²w’¨ðTÛ…ç(7åÂþQÉ+AÇ‚$ïJ298;Mkõ\18µ\3n›Š*¶bE¯\26\6*Pò\24\2¤\6\12dá\19Ýx\11á¥\127\12ꯪËt^†Á|½=*Ê£¤¬Z(Ý\7“qœT_ZˆÙ‚òøW€u¬=ïÎ|Ó~³õˆ%é\17ƒz’æ¦7…i&\6áµ-ÄÕpiÏmïÐ’­”š±D%cõY's¯urRÓ\5Á÷²;kÝ•Èpcd€jÆê\20\6Ŷ»P\8uÇ\13'\\Øæä\4ºÇR{eO\0122â\ +Q{C\23ßêúRký>\4ªu=*i‹}âf\127\21¢œ\4Yèü\0098ÖJK\25\8Ç VÇŠ\16\2áE \2g{¸#î‘»ÖùÊÔFU»1\ +Ûb \2ü?ã!¿D\16ÿÚ\ +Æ5)\26Ša‰ƒÌÖ¹7;s0°\14iGˆ!‰ò¿PK$6:\12\0þ:Ð$:‰dÎ_‰¸/YnjƔ?mäiC;¾)¸äQ¥lMÞ.*?8ý˜¥ýû\26¶eÓ\29[\127¼fëíxÃ]Öjé+xÕ÷ŽÙîúFi\19©€·ËÅÑN‡Ñ´!Öÿ±p\ +šÞ\20î\17ἓ\25PÉ\ +·\ +CÍ—\2ì7FŠÐµû‰cW;Õa„|Öìvâ>~\11)yßpá\22¢5Á‘žD·$Ј\\D:¿ö\"C„\26ÔŒ¹»\8²»;Ö\31\21*ÕÃ%«‡«\12Ø›\9\0235#ë‰#U+’Â8R6Q‚Ñ h2ͦœ¡\17¨âWÄö€­¼V\26‡”®ÅnÛ¸7ÓnÍY›Þ´Ã.:…û§~¹–\25-¤Ù  N%E\25U\27\\eéHú§µ>Qá\24\28…iÏ\"­ˆ½Qb¿Íƒ®yP\7\30ë\12›ï*´\\\"5¬õ[¬\29ÜIkEÁ5R\27k\ +~\6zwV\ +ߌ¡Æ<ÄU‰~NÁ\21ÅúÎ\25€\25ác\12ÆdF\ +FQ\16îóµÞçk½×†(µíal›©s\13µi\30®œ\12\ +¤\28wº”ºƒñzfrqÿi-}©]Â}åï†=Ëï:ûß\26@\127sõßb\\“V?•Þ:´¦üŠU{ý•j\8[³~éš›£©úˆu=˜â’8§Ö\15ˆùäí…¯7ô¢¤é`šå\19lª\19\28\3”\0Z}~Ü®¦7ÓÞ4™.¿¤Âä áóA‘\14ªbpšD8¿—ÎB†\0283ß)MjÕ…)4\14\"\"Š÷ŠJ\2ßJ\4“•\8\26X#.£\\øÝËaïWíPPÓ A$¾\\™8.D‰ÿ£w\29ƒUw}\22×Æ\31X\0271+R\7B\14毷\29ç±7OKo–J\5­íí\6íØxCÓ\2\19\2D•Âìnê8eëjÈ\11_îä¥)¿‚¢\12äk[ƒwžãPtrdn\2ý#„\ +,\1\7\12Ը#ĦY3Qûž\15¸R·çV6U‡P˜\29Ú\22õŽg]gÁ\4D²\15X¦\26­àJq\8ü\\°~#í‡Tú\7Ô>Ì&ê`ù\6N­;ߺ«ƒÄ\21\12\16ˆHRÃc¢jÇúĺ´“΀®>!þÒµ}\3\19ì4‘\5qRŠ?ÐÃ~\0261¦C\9\24¿²¥NhxÀOë¸d«\15ìòì¤*’tòë'ü@azt“dü:(,{1-Ê÷çÈ\12oRcB*#¬€´Kŗ¢ø¢úŠÔI\15\21©v\15ÉéLÇÍåËœdšM\11Š‰:‘’§e”AÕgð²x·ÃV™oŒ×7‰”UŠr”\6\23fŒJCd,ìl\ +\31êÇú\3…œy·\28›\27Ôyy\16eO\15u˜Þ7wÊÕõ¨Î\19)L!¦þ`e˜Bœpâþ8\4¡{™Gá»×G}\24¬4B\17\17A\26¬\15\25l\2”;@C\23å·¹¸ŠH\21gêC%‡Ps\17à` \14w9ä\13'¤@ìzÌÂÏ\ +¸0Ú\31æí\13·Ý†\14fòk(Q\13­\23r?ýÍPæ™zu^Òá‰\5$îÂcd\25°¼Òn•¸4öVÙžè…±N¬—Ã\17\15q\13t6†èìÀ[ˆ•òj\22&©ƒî\26\23·}Ò~Øäã\7I\4\0150,™†æ‚LB{(\ +w ˜Z6@f†æ\21»\0ätÜN\0078ÐoÇâ¦\3Š[`ß\11éT\11hJ¦Å~ô1MGÃËò¬³¨A'¶8ZGU]’SÄE%R¬=¨H‹F\28ȪcÈš\13ýå‘qA“-\8wt\6ž‚Ò¯Ïä€Z–ãcû\12\30µ$•ÞƒÛ$DwSörƒ÷|…J\23 ‚¡Û\27صaŠq9˜²lq®‡ß‹uHæûè˜Zö5\3¦hzÏ”F\6@®Û¿ÖúJ2\18‚\\ì m\23\3û¾AŠRI¹ùG\6ʇ:™«¯é®Ä&ô9ô“~,ø´¶Yxº³$x¹¬\18l‘\13ܵ\26´´\26\\œìR\6Id\15ÿý»ãç îŽ\28\1G˜\15\5‘U²\ +¸ï0Rÿ\5ù<³\24Ý¥Ôõ\27}s—,Vq#†ÆJ8\30ÖŒCßÞ‹éÑôÍôjz3íMGÓdZ?dlX¬bd1t—zô¹ø\14\16*\18ä%˜…‡vîF\9\0Œú¿£þï8IIÈXû…3}\127Q…e\26g\26ã®à]j`\16æ]\2@Ï­:^\29d3\24µñ%5\7ƒ\\ü)ïÍ“1\6˜è—Ò@\4ÃQ^ÓÙ>4¤K%\2:C\26®é8À\17r˜¨¨U€–ž§\30ã/µ\0308\0304høÿR&¡P\12ïY®µSÚá\31NP\5Ÿ&ª½ª“”#&–Ðw\7\\½N§=\26ë´;±‰M¼‰1’¡+\23\2£œ&-ÿ§ZjµöLo­„½ª',¸'J›O{^\\\22íH±‹Á×A¯ö¥…²PýÆé¢ÝÁI#Ì´ó6(<@.Àä\0\28ù*¨ÝÕk\13Èf:iãóíå\9ß\16+NE«×YAko\16ïx–’úDŸ7AYSuÀ¥©Zq°y\11r\20a®:-›ðÞ\0223nzïüŠLvšŸÊ\127&à}›\29\"ÐR‘Œ1\24ôÛ5Ìl×Ø\31÷\8*\23Òsi?ñÈbòíƒ)l›ˆs'm\29‚\9ôñÁ\24¦3³ÆÊ3ñÀ\24ÿ±Ó%÷êPöb\20.Š©@Ú9›•1ÞÛCBß/¦þ¶\31Õ\8Îঞ\19P<Í›£Þu\2àÏìÕX§¾¶Ò´\15pz\5²ýu\"ì–ŠÓæ\4-¢} ï\5m¢%êC…µÐ爰N!àò„èº9¶Áêž®C€qóV6´\24-ü8\22\7TdŸM³‹ç]÷nt¡Þp|ÂÄ ï4²#p\13Ð\6\24_„ZK*\"$t3Lƒ.üMVõCª\2ÇüE=ÎÛ1kýóÏ\16\12Îi\5S·¡ñ錠Ñ·$§\1·\17äÆÒç\28ÁTyàÄ…šÕoŽãÁåR\13ûÆãºÊÃ\14ÿ„q–íÖ‡øàc†\6º*Š\15¸j?\30ß:ŒÎê\20iiá6i¹;IµOïnfÙM#-æŠ\22ÓY”íÔãY¥û™¹À=Nåƒ\8QQïO\31€E7B2y\26%ÕA¯œˆ@ÿ¹º»Ñ\23ó„\20†\31´\31§»Þ>O\14ê¯\24õ\21#¿b„\18þD\13éGQ&\12ºˆîô+F͘¬Î\28\23›¢jï\8¯Áã\9;QVj?$ïìÞl\20“\12u\"•v¦Ê\29/¾×dVghU‡7êÀ\18Ôù Ýw\3»h¶9ëŽÅ5±<$‰L½ê‰³ (Wg×ÏéÎtŸ0šMwu£IÝlŠ|ò.W¨4OT\\\14®êer½L®ŽÔF\14ª‘ƒ³ÐI“Péö®„»?SÖí×\9\27{¥7XD£‰d‹®M{׆\28U8ëÓ©‰•ùÉú§\28è'O<¤6œ}x¦\22sd«LG½óD\4jÄ\15Žè[1¯ªW’\8 Jq÷nyí'spjHÉFô¹ÔH[\11³h +\19”-ò]›>´«\\ÃÍŒ\15ZΦå@ÝEÓòìÀ¹\02278\6`),ìp§ýïö|\12À>ìwÛ£Jí~£}qú9­\7É\8NØd\27±‡\6yƒO)\3ø<²óÚ[Wn%<\16€²rêl©”'ÝŸ¼¤i\7W\127ò¾\18Têr÷­Ò\6gàŸgšÀ!ƒ§ê•ZÒãS6\18A³±(\12ðY»(Xõ¬è­9˜:·]»ˆ“õŽ\9\5ל¤œ{bŸ\28\"“%àNù¦e\11RËXÿùÄÄqgõçˆ.Ä¥>\7©Aø\28t)\27N\18øø\28$*\0ÆC\12\31ô\127²«Dކί_¦ØhûÄÜZ©b?W뱟\18Y\1óú„c¸§\0%€\ +`¼‰Ø#sËIµ½}3miíøœ(ºñi©’OµlPí®S\13ÂaC§\137ÔlèmCúGép{:\14\27\0267tßPÚPÞPq$Q1 Œ(4á\24žŸP·\6\2\127;ü\21öú„ª•Pfsx2»¿ñ_\9iÎa|Uö*jÿòÑý…ÿJ¾›þÂé/\\žx|ù\11ÿ•ƒÑBSzù+\1]\25\17þ+Îü„\127ž}¾ÔŠ|âÌBu».\31)SóYTõŠÚt¡´Êga×I¾ê\21¯‡T2ˆ¨á‡t,M³¨LKÈÈI\7?µis(‡\22~ªç\1uDZºVÂÞÍ.éNÌŸ¯£\"øª³š/T^…Ç'¼>aû„\127‡\27Ÿl¶\24\"\9/}I1I%“\8óùµMÜ\13){V×qjÄ_”hÐ(€’¡ô§¼|Y#† ¿£ã¾\13\12\6P\2„–\3Øü¾öƒ20H\7Õ—®hžÅç)Þ[Q·\127€\18`q\20ÊÞ0@\13\1\24“ùŸöøu\14Ð\5(ò2ûýGT\17ð\6…€U?\0175¤g\4ˆâ^\0049|Q$ÆP_Šk\0\4TŸõµ2É*\1\23_{Ê0}I\18…·øÇûü‚æ\16DÝA~-®\4Ë\24>¢à—\20^\127ÌXjõ¿—)…\4S\ +ض$Å7ʦ\2I\2¡j Rív\1á+ÉÑÕ\20ÙpJ2¿@BwZaH¶Â šL±}Uþñh²&õÖû]9»ÄA\1^Z*p“\17\7\18TÆÿþ\15se™tH6鼕^Ÿ\\؃b²Å\\â¸\2îWñ[~:ŽÖø¾Máeþt0µO•h}¿›FŠÙ´ˆ†Ä&\13DÀ@,w?±SÆ\8‚\15‚]î\\´YŸ]û¡F”J\17bG˜4)®\31ÿ‚\28w%\3—\13î\3þ÷/\19æ•rŽÐÿýß,LjfÁ\18}¯í‹(uNÜ\27ÔTÙ€­\30¡þÖÒ¼·Yûç²dfŠ6\1\15\11íî”\24»s+ô.\29\2w\ +¿Ý¥6àžš\19­*$YJ\16mýú!:ùµöHÒšþ`\18Ü:´žÛÚü¤ÑÑ´\5¥\6õDõ‰X?mJ\17\13\27†®Ã'\24uèlëDh¿Á]w´»\20\17\8½\27á*†4;Ý\2$\3îSHO\19)7þåòcðº\ +pž)mDðƒCd\\\27qÛ\5,yÁ\28<©áHú·Õ;§õ‰í&I‡‚ÄŒðÚB&KÆÉF\22Õˆ|ÖL°^D\24\15wW“vz¹Þ@uÁúƒ«\17\12jç$zWF¾\\L8ÄLl„‰‹ÚÄ­ØäÃɤÓMš>\14Z+`zÇ\31’ª-înf]r\0…4b>ìÛ¾\22\0004pA1ÕšŒäK\28\16ŒFüÁÁû‰Û˜Y÷òûét§Ü Ñ$§DM\18Yš9@0\15ÈREŠUUaZ\11\5(\8¸¹@Ô*àoǸ’ª6¡\14à̉‚\22g;¨zkA6\6H›ã,ï\\\8\17´ è€3ûß¼?]¡f)ŸÐ\0043TíÐç\25åš¡±€Q5o|ÓÝ\0212XisC\1Î\28\ +B\0š‹=H¿Könxö™J\14Ëî\27ì\21õК5ÌŒˆëq#Ä™7ëãæ%\3´g\2ž\29g&“vŒ9\5ŸÇÂY\27\0224)¢C‹¬ƒ\23Ò»iz\9p\14\16^\18m«\9g\22ª†›ìc—ìÖÌf“w\23~\31\14è2÷ý²ô\22LäñêD&K…\17(÷×3,kÂuT6Ôä²\5C³$K²Õ\18Uʃ5¼g\21¦Ô\19eÖÄ\26Zu\\U$þ ˆ“^©ÉÅæxînðÚ1÷l¾Ðœ‹_Sÿ\20”\9d˜Ò¨n\19ò–§ñ\127‹Û)}€1@1p]TΓKwJ,ƒI?æS½\2÷r÷yv#óAx†A+Z¿p]G‡\0012ê\11¿pvJ~6ݳíQT6³CÉw\127U¢¾ù¬Óc΋.¦(’æÎÅð\18 ±W^×\20À-Á\13\30Ÿ°yÂñ\9ó\19–'\\\4[§ûaúiú…ü–\3Jœš‰õÇh\24Õ\0qí\11Ê—FkàÛDЖ׈p8¬lî•T:;l]\31äYÍ~ÆÂ;s0¯e7\23Ö®™B\27YÖ‹H%\127@¤–ð]T\1yÀÏiƒ›\15ÆT©žá\"_°1Àgmæt\18÷yT)_W\28˜P‘2û=ünžµëÏ÷òÓŒú¢uNj¥hE%\0129\13¡·¿zÌHKw~ÔZ¸…\7*=H\21I¾\0021ÿ÷/¾\0201£2áÎ'¦\29\4ITú›Ê6Д°jPÁ…¢\27Evº\18µ0o~¤ÏŸ×Gë¯+8roDÕŸS\17áý¦¨{\30ŸÀs/%e…ªôEÙ…\1\\Lo¦©3\1]/Eºé\19£…þô$kAÜšà-Yþ?\2y²žÑŠ á[\ +£GyI̸oVÁ+Ô(Ó^_D\2ù—ⱋs¤7½_^D9\3\"ãhz1½™Ž¦É´Ö²B}´ç\0ø!2ptÞÐ-Ààp©=\4à¯\21tä‰Í͈ŠŒˆKD7óÌÉ°Ûи¡´¡\18ç´¡\"W+Ð'S*7KLÙ4}äT—°ô\1r€EÞ—³²`ú\\8Œ\23\14ã…R\20õIŒ¹Ti\14ôÃ1Û\0º¥\11§Þ˜§vŠž*PRÜ<&å\127’Ò\14\2žŒ\18 ©\ +ü ªžópRÍÙ‹ôv¤²ò%ÆxƬr\9í„E×·h\9ãdŠò¢\\\24ìNac>ìO\17p]U\26H\9˜òHžN“¿ÇCZñÈ_cˬ»MÖÈMÄ>C÷Œy5–·©¡H \13Ð\5\24\2\20‡[»\0ƒÓÅ\0290ý±\7z\28R\8ÄÓ^–jå#z@d%t¥\23Þ´ý°û”\13æˆ\11æ¸ÉêÝÖB-c\9µŒÅ×6›¿p÷\23vuÄ­J£tn\"`Úç’’³\0307¦û1Ê—t\27|z]\2¬Ê¦U6@$ÍÒ’©lWÿèÂÚe»'%4m¬/\7°b±\18\2-\4}€ÈU‰ô÷\11Mß\24ºÌ ÜdËê~qï÷ç±°&¼±Õ¼‘‡^\6G\5è¾A£ÁjÙSéY\13©=³¿hßnv¾8~ÍÙŠõN\22™G'À…\3YF‹®|Ó=\25øú„*yÍùdî4ÅMû\2ñüÎ^ïê1$£C ŽWÊ)‹g\127ų¿\"\11âȹWˆ\21­þ$×ávu\127ôá¾êãfÊîNÒ³%¤g‹e]\13°§\5·UŒ•EÛéÉ\2†ÆÊ\2¡\28‘¢~\8\ +=\ +d\8΢n\31Dª·SÔöi‹TÖiTŸ\19‚EÕÃ]ùâ:³HÃ\20ÍÅ_\28\17\23bä8ê”MU‰8\5­Ãtó\ +e`?Ä쌼™,„Ù2ÖÀ…[»æqJ¢ÒúÒWs‹ÒÕU;“ÈU¢¾ª\21s‡®\19Ħ }æ¼Úy˜L‰®§ð±¬]bS‰Ý\12÷”`§\23\23þ˜ò]O–É\29ý‰TqÓ\4›é¸\ +°š\3pl$èLûpø´WÈÊ\18 d(„UŸLK],–\23E=²WèEÒ eÞÓó,%žef-.³ÌJ\20™Ûƒ»k–¥F\11ÍÔq†¥Ù'\13\0ºNrÒI5\16®¬šmÒ¨}¯zí=,[\11´ÛÂ2“u·ÊzWÃzǽöúÌüB¶\11PäŽ>w˜'ϼÝ6ÓVX\7Àã¹\25GpØ‹À—Òþ žõëgô,ubpÑéË\\'<5\23óåUš\13a¢0Í×í¤q¾Ý\16¦£d{MÌ\"˜˜ÏCOÂ÷~À@\2Ò‹¬$R$3KW\0H&áÂVw\ +:FÂ.j¶Y!€6û}Ñ«¢C·[ÉdÏ”6˜u¨6KÆ`\14óA3Fîy¤\2mXWÄ’¤Ò‰ÅsG­›)y;c÷Žñq\16šU#]\1v®\0³\4,g\29m“Ë\12iÖ9o\7.36Êf”N%VN+[I¶\26“l5&ÛjäkÒ+:ÉYªhlº‘öGõåÚ:}Š=Ç)¶L3&šfL2͘hš\0171–tWî‡Îû0ÅHˆ#\30Vâ“igZLUsŠ!¥cp\12p\14Ð\6¸\5è\2°th°ÑŒ\20 \7(\1°AÏàï3Ï\28¾ë¿;‹r1$\12xhð­«„ \\QpÏØPú \8ô]¥ÕÁÝwq\2¥o\12pbW×±\8[iÞ¢ø±ûÂ\8Öì2êrÇ\14±v-g^Œùq±Àn¥\20hüÐü\\eÀ6 º'\19\22n¥<‹\2ÍæóàçÇ‚Ô\4Š¸‡ô'ßñÁ?=U7‘Ï}u£Õè¿ÿ»\8Í\14ô\29^|pø#ýv•G‘\27\27Î\20Õ\23ð½Ìf&\27ËLº^Eºø}•7t›»„‰@\29åÑ_UÂS•\127ã\0å_ýòJ¯¦\3œ·ã\127ñKë»ÕÝþ÷oÖ\24\13QÞ»\8Eš—Ú\31r@X´„^bëvñ\18xñ\18xÙ4%ã0‚Ó/\2¬\4–C†l$ý¨\14-¾ö²\28¤@\1K®\127°L]¸MiÈb£%rë±Û °ÈW]1Ÿ74\6¢\2óE7u6‹¢âH2~@\13>;+>\26/·\7-\24‰‹óR4sY¤”‡”[µK¬ó\22éa^¸\17‹\12qC–~ckVnŽÆ…%¹ÞER¼ŒMË8#\0239±Ž'é—»j5bhG\20‹\27•\11Ôš!eª\6 R…”\8N¶—&ù\18\3œÛð\30‰Kºž0Sùƒ ®Ì/¼ÿƒEqטv¦£)j±Ô[4.n«œ^¬º&¸÷pÕjhÙmjd¤\19#R¬0¼z\9´Ä\18hÙÖ\29„¸ëµèèdÑ^/(ë’oŒðÝq´*~Nö\23Ëß“†ûžÉ¶Þ\127Y0©‡—Ö\18U\11o\"‚a¥\27\4,m\"êú]Ú\21%\11\6+{¤ã\\\6È\27Ö;4Ö…ÓõšÄ¤š5õúºI\31\14¶Å\26­+õ—\127b†JÀ\9ÜÂy,(Ïã\23ÎE\23Î\24\0236ä…sÅeçÆ<+)í8.šÒ-:Eäk\31Eí\9Ýâ#I‚ û%@ÍÛûþØð‰/­\4ÁÞ¹\28ÆëuÂ\19'9ï”ͤå]<³ü\20…dWGó»x¢\ +¢€Ï¸EBÄ-ÁսΪ^\7d`£]\15ºô\4Óà,ô\3[\15>Õ\4Àdƒ¿\11­o•‘\27nÊ­>ð[­ž\29ZSð]kCU\6«n\ +¬Ö?·Æ£54éUT\0266¯UÖ’\19ÕžÙÚÀ\ +µP{R¬©z£C1`k_­L\18\28ˆ\14¬Ðô\0ÂháŒÅá:…ažõ¯Ål\17'Ù\"®¹åŽûšeeeõÜ\12”KÙ•\6m¤‰\5\27¯PÄr¤U\8\26+¹\4H\4Üc\0CÇ1ëvl\3§Eñ©½Biõ\26\7'kFG\3Rä<%ÇðåÈu@ºªÌ³êåÊ­oQV¾\21ÓÈÎ\20e0«¤gïë­PC¶Ï/¿vùP)ôcàY\7BLY3\9L\1üÊaÙ\23œ3„¶²¶–\15\27šì†J™ÃÚ/Y\3ƒO\23\18HÔŠžM\29=¤`E[Ó›é`:š&ÓÙt­\20ežqu\6y?Ô2ÍØæfˆ‹Dª+\0239Ø] I–Ç­Ì(5Ú^!Ë,/Éñ\23=½Oô„áñ5\31®Õñp=0±ÚD\23\16ÈìæÃIâb1\8U5ç\3ï\17g\26âýªôÆF@Êë\127â ®›T\11ÈIú\4ˆ§÷\21àg\24©%Ÿm\0091˜TwP©w'\20cÇÝŠ\20(\3à\2ƒÞ›‹@c\15M\22mM§Þ\0301Ó\17Âb\3uªCáˈ/­DÁ0f\14k¾ÙÖ|å„M‰Ê¡Â¯\5¥¶¼ðyà\23.2øfÔÓEcÉ\"å¼\6­ýÐ~\23‘ê1§üôÔöf´\28p¨j\22¤®;*ùͱ\22×¢w\16¾„¤Þ\17’ƒ\2˜Ÿ\5JBu°\19§µ\19\26.è|³\3¾œ\14?¤§ˆ÷‚Î\26KU”¨L§¢ß\2ç|¶Þt4eo\11\31uŠ¢0uÊ.§\"'Ú[%‡£| ÞD‰—4þÅŸQÇi`µ.¬ÄÑï ïÃ,¶g\5ýÜ\7Ò\7sÚ€,9CþÜË\127ÿþú\127Ù\"sp—AÌ\12rÅÐù+5`“¡²e\127’m+•!$“)–“Åz‘ÒU'¾RÅ”ôÚ\12äK\ +\12óÓAå>ÒÆ™Ñkæ\23Ž\20—\1({½×\1 ÛŒ'(z2å\5{ ™\0062ÏŠ/EŸ«-‚‹}Ýü;ÔD\5\14OÞñ\9¯OØ=áø„Ó\19¦ˆªDzØ<˱%\17¼‘õˆ;³ÊaÞÀºyw\20˜™åÍH'}÷æ`;\14\28,=ìç±\8µœàl69\25®u}¡ª¤\13e§ó!O]DhE„ Îh[{ÖÏ¥.r8¥I>2\3'ý%^…¢7M‡ˆ¶Òœ{\5õé¸\11\26Òwßþ#þ¹˜’\11¸âqóF.n\21°©\8®OsäéS\17ÒÔf\14û˜\27o±/}IVëÀØ wXi\2cõ/;;w\0242¨¨õÀê[œ5\15\ +›™K!i-dÀâ\26UN¦\17a‰¿XTëm\0203K9\29®\"­SjS\4Å&0\\n\"®žÅ-«@k%Z\22gåHà3TíDö¾äž\14¦\\R\0004Š1‹\20q±UÉ÷yËÀ\24‹<Ôµ¼(>… eÄ‹~è\18]\11¶\0227N2ŠîP[u\ +w4Ç¿yÑ‘8¡þx¥«¼Þìcr|þ¶Å1q\14QiÍé\17\19ÆJ\26ìåäc’\19.ZÏ̲ž™i#3Ó6f¦mÌ,Ó˜ù„\11Æ”ílëÇÂÔb'2Š”_’ø|¡§?cóbÎÙ\28ˆ\7\24¦Æn™´•×:dÁ\25\3Ö‰\0258s1\11ÚÖ\12Ÿ•ïs\3姕ð \1¯è}ÎM–[¾‹\20òVÌlp\127­¯)žùmgÎ\ +@&>çšhu¤1<{>ëW54¾DR\19höGô¡‡Ýæ¨IPsd¦Àf‡\5\19cÿÈ;÷\25à\17\27¡¢«\29òU\12LÈr˜%Ã\22\5K„‘}*V4S¼c\19\1Aî|ÚÓ,/ß\"?=£ÀÆynNœÓÂá”›E”Sæ}nÎ\13òq®ÍmÇWZ\27ËÍîŒâiv. ì\0023*š\16«”“ŒúD'\14ÃaÕë\27íîrG¥A}ƒ/´oP\20‚U¸V\15—h€\13å~á\21…sà\22ɤà\31Hv¼ê›dtóf«+ÛV\23):ÉæÊUv³¿Ò~bnhb:7Z·\27@žW°±›2wã.–\24(åî ÿÜy\27b3aUÈ\20˜r0\16ˆ+k½\ +OBwˆeVdØçÉM¯J#\19çµ\16¸\24mú3;\22\27‚\18Ål¤±’JN\17ÚÖ“Å¡7^g:lN«Ã¥ÆQfzî©\16ZN\20/\"ËåÔÇ.ŸÓ^\28¿M\30äÆúu®Žeª)ÛB\19éÄïlÏ\12Øf¹ÞسÚ&S¶}¥LSJ¾xDoøCaO‰@õ^f•,ÜÍN_pe\\‰1s@’\26°½€Ê”6Ž²-\24å0\20”m((£1âj\2ÄÇr\24ÿÉaû'o¶yˆ\18½ò:W¥½\ +‡‹æ†\23ÛñI¸\11q0ÕGÓ\"MÞ,Òd[¤©\28JO3ìÄÔÇÂ‘ò·RMÊ^\28u\"\0ø¯rcQ\ +!óì­Ÿ7Ä]ƒ\26†\29,x<#ëW\22˜ýþ\29QýpæVCA\4\ +yC¥ÑB¾Ùß[þŽ{\11æ\29bC$ÎæWÎkà;zïF7cD1„ððFõ\27Gô$ƒÙìp\9’A–‡Y¯4\16›\27ßPÏ®QRBŸ\31\9Öp§*­W\2ÖVØ–a\4l\26/\27¦oYË\ +°0<{X\26ÖqŒ;‹ó\17BT¾\28\ +½ÚQ°eO\6€ü¥6¾¨\29\15\12‹ó\24Ñ‹©âhõ¡,Ñ:\11U ÉßøÉO¤\13œ,¥uº…ðbz4=›rþ\1ðfÚ™\14¦#S¡][1Š)¿ž\3\11g±R*¥oüQÑ/PJÆK\15ì~Ø\22sãí5\27ÙÉq¤V]hnG\28–$6&*ÉÔÈ/ÀëvbMŠ\5{B\24½Û_¼]ѰűÔwa•'KP2ˆί¬N¬gé—Ãx\8\14{}\9²Û\28\16>\3¢£ÍÅœ¾\11€î³)Í؉B7\15i£/(ÛwZ™>?|ïàu\\=nH-L’•ÆÍ£Ú©…!7fq XÉÉa%G ‘'Õ\26\30ïà\27 {¦–SÁ\5mR¶·B©†L«p¹áõ:fyÑ\31Ù:óÝ;Kù©îÞYÖkÂÎ\2¿\17&h~áVË\27:4R®¤\30R`'€\22ÿŠ“\0194\13Ý€¹˜Ñšbaÿ¸Ð¢\11ß©»1nËôö´qÐ\28€šä`©\11'®Mथ\2y{.\11À\\äŸFj\"sü52sßxéå霶˜h\13cƒé\9ù›Ÿþ\127ž.[ŠÊØåõ\"\6»\0zRGK^öwÕá‹_¬R™¢\28'g\20?v3à£\0_Î\12ç~ˆ\11gã\6Y ËÇì\16ßJâ™×\31õtÌ-\14šøw¯Z!ÃìÏ\19u\27Ê\27*¢^\\¯¦ÉTéß\26®¼*hIobw\28xaBT;\5n0µ;w¯}¢žC<\25ýG¯… $’ñ¶I+Ò3‡qšÈáëûÁÔõop]\29fÅ96ÞÔ°Xs ø&›°É›\9›¼I=#\25\25³\9Þ¸¹¦\13•g”?öy×\\ÅVh\27\14x¤ªl\18?˲E“iŠFwÄ\26;?\\AaÓ%Û¦‹.5½B°l^©~$«æÆ•²†\5‘°Šduu‹L0áh\13®¦Étùõÿ*Ý¥wå?­Œ¼.¡DY>²Ë¢\\äl\31Eÿ\18»j´,Šå°­r4lSð&ù\127eeà\24º\20o:PP[\12ú,n\6\18X•\7\19•dY”çÅ•šz£\8TÄËVÛiå2Ë~Ipº\13vS¬\11•|dK|·Á\27\3l¾\"õ71®\"mdÆñ/£©[ÐR¶È]y°R\127;üùçõr8\1ÉŠDE\13®%âvg~“•'\3­S®ñ†zCTÿÔ[ûç[!Ú=\16Ö(†µÖ¾áÒs¦Ý,»\31™v?ò\27•}Ó\11ƒhEø\6™\6<[±>H&½Ôùú\5z\17ñÄ>Ý\5ÆåñÄ4‚†=2Írdi¢Ë²Ç‘¡òå€7¦paÜ\23M|êÂâò\5?udÚc\14Kr\21¹‰Ô*²™™¨ïÓÏ„ê\\éÿª+„ú8W¬\8Ç‚»\\“ m‰ÜrÍ/Û\18¹}EÅm¹\1ÐjöÐJeWõqÔ\12·=¶ueSSj}lŽ{˜ÍA\4\31\0z\17iEF‘\26˾=³NQø\0317+ÏÑ›œÔá^\16ER´lÌ™RB™¢?x2Ý…I½ë°»ÒÊì5O PPC Ï²Îõ~C£Pc\14Ö{ Ü*—ZvJð`o\21\7\8 ´Ï\13$\21\2ÙºÚs]ù£ºHáz–šõÜKä1[¥z€A€U®oêŒ\25¥Ó×)àž„Í·ç*g1à®7Jsßž`‚\7»J=”5à‰\14\18ª\15Z\28{â\18×\30ÙÆA Ô¡W’N$™O´„:\25(ÃhP‹v°&ù\28*Í+\24Y@µ6¡»ÍP>O\26)0cŠ‘p<Üeß°B­üÇ–6r‘ÏÑ×Ãv8|¦zãLÅ•œ\15X\9퉠\25>OÔ7\5Ân€t!Å\14!H\22÷ƒd:*\8®Ï\9Øù‹\4óžéœ\\\5¥\18¹24\7x\7\2\4=–4\9“´$P\28œ}Ùï`t—‡úÁSmlÅ\\ôr¡0Ø Bü´N2\24\19{â×úŽ\19òúdeœ(—Ç\20$«\7ÑÊêؘÑØO›Ì Ž5Ã\31úíù\28˜\2–ýÖ!¼ç;'h\0«óÆy\25”\8ã_P—°øß{eñG\1z~~OÑE\30T\0177¢l4ÓS\30®&w0e·(EÂzÇa±@\23`\0080\6H\1J\0þô¾\29\20-»”Éf\3ò¦68K{o¦îÞQ”>!¹†ú2ÐdƒÞYÚƒ~Û\16\127mÐ/Ò=´,U¸…\21\16ækÉ€\4n–¦[¾Oç‰1ÀÜ\24˜ÂB£­¼}‹ jû\19f'Ð?’¥R\30¼N\21ÅzÔàÌEŒ-Ò¨˜©Q±>dž'Œ%÷\9:\127H)E&—D›wb¢3y\21ó\24þšýæï\17<žy›{\13&Î_Í»\5Ø8uV¸Á-áééœ#]ü|'òe„\11ürL[$‰}\21ayÂy\11ûƒ\\ߥ€øÅ\16õèÎö]kÞ\1²ì•¼³Ì(¹!m“ÕojOzÌýDÀÚ\6¡oìPGé¤\21#•U?OµŒ¯ÙKp]»þ•óá\8‰`€ýR½Bî\21Ýdœ©õ\1q–cŒù¼Nm\ +A»¯õܺÝ@\17ÄT¹\4«„s-¼Þ9¹ÌZ5JÄ\16ªÆÎ\"I¤Ð\11DúðŠo¤º±,=bY\26ÄröPAÊŒ ²õ€ZV\9¯ƒ\1\20¿\23¾Zl@€ÛQ†s\23\16ñPB\30Ó\21HÈsw¶Ò81\4¤P\4\16ÏÔx¤†U\20nÇ2L·ˆ¡\27®RëÖ›2Q­Ü\17A]™\0307tÞÐ\24¨sò€%Ðð²¡!\16„Ö ém›i\"\21–œ\20â·úÂÇÞÙ­_ÃM[º¢œ¡\28îÂH4¹«¯\\~Au\21.àÒ¥c¶ãø­\6™˜{Í¥iážbãÔ(w2uÑC„ï*\28\22\0U¦ehF#¯$åcfÙÉë6?:ÃcÛ“žÛê/T£ñ†ê\18§ÖQ†Ê4ž\127I«]ý\11O…dà°\ ++\18]VA\13<ý¢î»T^à\7vM3ó\11°Sí\2Ä*ð\3\29%${½\22Eÿ:ãÉ{\20\\\11üfJ¼eQ»±#¿¦Ò›ig:š¢›±:3Rì¥\23‰„£› ”*Û\\6Éæçí\ +#0C\1Ø^°è3(ºš­ÿ+SÏW¦n¯\\š={±Ò\28X‚;È \0024ZØKnâÝ ÃNªõz‰^MûÆžqd+Y²É\12ôœû\18B7ÖÝ%ZL\23Q\9cÑï€úg­^È$\6&Q~eCëÄf`#¨4P…CÆ”í€.°–oƒ\11=T\1v6½™ö¦‹(/\"…Ž0\2J\0131ŽÔb\3ÍzÊP›Íñ—R<§HÌ.Ôyá}æ_¦@„A1¸˜âGX¡—\17¿ÒŠ½6´¹r’ʯ®áf³$O>Ây2˜U7hèMþ{ý]Ÿ­\11¨ò=ü\23t\22½©óÊOu^\1)4ô—àJhìʱ/K\6—F¡p+?µjeªÊÊT••©*+K3V}ãz\6oè—H\29\127\11Ýë\6ÚD+Ðgš­\17\11!q³1[Ù\21œÇ7¿:Êqq\4ìJ¨ò*[ã•è'¢aþK\27ÿz®\20l±G\24PÍVhõÊ‹ß\9m/(Æ\15G¨5\25)J¦óQ\"\21Xe*°ªëÊž»\28\5»8è‘+¸k\2V¦ûÑôjšL\21Àut’\13\1‚F„µrWלÊÝg\127\8àŒÉê'»\5ì\31”ÄôO…ÚSä–ëù¿÷Ú½ÓþT!•u\"\127\8p\12p\ +Ð\\6Ô\6â²3àø„é\9—€o\1¶x®\6øC€Z0ÕÝ\27¨R·´ÏT·Ô:̪\02773¯¸Ì<´¶%¼¢÷ç†Áw]Lò\12 \24pÛ‚ \25`\31ðÏ÷)wÚ\22ú¦äþ׆¸‹Ry|÷}nùÊó%hC’$“õ$eéGz­\30zlZIKR–’$\16Ê@IW\18H+Ò1Dêõ6ŠÜE’ˆrŸu\4øí;G\127¾¹µó­·A\25ÀÚ­>w’«ùV3ªTiRŠ\15J”.ö}3íä˸\5S½MJ\15M×þ’)sT\\:…j‚å€æÄ£º\31\29YX·R¶n¥ú©?¯:Ú¯è\"¹'¨[¢\11ª.½j¿\29záø+¤h\9>zÞõúéßyüø\0039´L ÍŸŸÑü‘rŠ?œ¯8‘[.R”$Nr*¹˜!骟Â_òS(ð+\29I9t$el`I+RF‹Þ¡¿¯í˜§±`Ìäó\14Ç¢Û¾\21ðÞ#.¶½è…\ +†ò¦N(S‡PFåZšq‡çÚ\20‹\31IáMÓ\11\21|¯õ¿äÐÿ’÷Ë\\ãûß¾ö\8ù\127×6wïÕUºSê\18\14²l{,`1º®‡\3–ªëá\8Ó•\4µ}S3Š\14\15ָɸ\30>|¼\21’Ë‹ÈQ„R\16+5Cˆ\30Hoz½ñÈ£Ò›_å\27ú\23¨ {øeÍÑ\\2H›ôúK:¤#\31°fCú&‡‹ÈùíkB¢èN\8&¹÷\"£\3ßM³ØT\15°Æ-\1270\28Œ÷ÍV^È_¥H™¯þ,Úã6¾ïîï«3\7äþÞôf?D¯\"­ˆ\ +á>Š(æ{!IºA¿Jhd•È\8Ø\15qUö0´S\9\14ëW^)\23ÅN¸\20Ó\28è»å+¯Y¯X ‰û›Ì\21CìªlS·3éEŸ…+¹¿x\13ñ‹ÛB“\0/Ó\3tüög»ô”êX½\29\5΀š-ä4D|´\6&ÖÉ\28ÝΧ§G°<Ñ5ûà3\24\6ÓHmŠÔJ„ü\22\24ϦEy¿‹$\\¯\22âœN70ý}œ\\¼ÊÑ\25ÀLç©àçÀ\23ÈîËÿü\18à\26 \11ð^(\17ïb®»°U.^kÚ›\14¢Ú\2\19+;˜êçóBäªõåŠm\\<1•ZOÐ\12„×\19™°\6±“\"‚µ9¾ˆ(ñ†‡p•d¹žÙú\26]-«ô\0072\21ˆ\127%·;e^‡Xe/‚Š–ÿ<Ú\18(ˉ\23¹èŸÓ5¢2‘ör;X¢Ã1\0ÅB‰”Åï\\™9¨\17\16ŒåÅœ%ü.Qž\11\5\18\9ØoTК1š\22\7V)rk@æ\18:Q\28\4\19`g‰\0V\19àw<‰8Ä8¾‹–A\20)6\19\27J\3Q—Õ\23Œ”íÔ@\15ÒÊ‹?X†r³íÆ\9cÞª{@ú—©Ó¿MY\4û÷Ü“«\29‹j\23V\24k#Åhb¬ú°|6ºUQ9\6h\2<ÝÚ\0É?Í\23mÖ¸iS\1†×•—`ù\26Ñl±D$œf&~à¶;³únŠ~“‡y\23¨$¯×fQÄ‹jHóûÎO[UÛx•du‡ò86ÉÀôòbÚ˜*\18nƒêZ\2Á\24 \24¨\15{Ü8fC\ +ç%À9ÀæÔ\5\24\0028t׬æ`\13\23\8?j\19ãY­?]ŽA9_¬ W=æí€fCìø\31u\"V\0070z»»£|¤—\0Ç\0§\0œëP[ z\\íº¬Ò\31ø²¡ÁþyÆ'\127\26 ¬\\0и¡´¡²¡E‰aEMz1½Fn°CLp\11N\23 \0150ØKä\3ýo vC݆Šýß#‚dF6u_²‹+‹ë¶µdô±1k]aѦy+\15÷‹2ý'°êƒ)þ‚Ýn\28ÿ17‹s*)ÏÕw\3Ö¸\28°Æ\0‚øòý\18ÕÏ‚ÿë&ù¿n¢ÿ«EÿWªk\20u\17,\17Æõ{‰Ìl1@Â}}ƒ|~\27ˆ*Ý7\12‰¬•ûf+÷ÍVØ\2ÚÁÜD–\18=ܬ8«ì(_Îw5,\9Œ¯\20\22\23¸h2Ó^F\6—qu!ž—\18Ql£z\30\14ƒ\15\26wZÜ倃f•ò_´\18¬®Tù¨TÿMBÖ8dx›ô~Ñkç×¢q»Õ+m‰‚ºR´Ôà\11ÆtfÖ²«C»×ò¡•ê\127äL\19’Ö\127W²»\2ŸLr½(äʵGT«\11ëÔǨ¦ü1r¾\"ÙQý²Eʇî\16#\12ªXwÐvÑ\ +\27,(\20ltœ®äÔa\7´ŽÕË5ÀÔ\0175ãå%À\24\0Åþ¼…-Èéq?]°\5½Ö\9%ƒ\19v\6ÕÕBF\15äL¥' ­i'ª)çôÆg§\18¥Ð\26é´ŠfÍ\3&Ê’­\19-pôâœ9u\11y±U\2c{z¢Ì×j\19ñôT\\^›Ø×jëí\2â\12lj›G\21e¦†zCªóÞu\26\21‚SÚÐ\5I†\18T¿\3¥µ$l|Sv\31biuGƒÌˆMÂô›\127\\+\11\24\12¦-¤>\24#\13Ó\26µFA'O“-èB¦7˜€Ð\6±áñçC\3Îg^Ù\29\127r\13iÇUGôÆŸã\6uš¸Ò\22+³¨Iö×±W•ûò0ùÕŸ\27\15'_½YÉÔ½Ú—û.ªm®Q1bmÚ®–„Xµ„\127\5èí®µ\3î\02024EYðºªÚQMÞŠÝÑúvŸ8{¢\5BDûšYe \\M$‹Ôq\6&7 ÅD\29œ£i2ͦÅt!Å>1l+\13\"\ +Ô(æÆA\31$W>!9‡\19Eyl\21i«¸ J\20t¯,ô|²\7d'ÃMf:æO‘/¿~)ž½’ß‹‹íz|Øw¯°ØXZy°½ò\16®‡ÐÝZ6Ý9²\6¥£çu_B÷‘|`\15LlÝ\11¼\5`ŸAt\11\22n$!`RÏLÞ¼¥€\19mño\29^ñ—•:m\24òÓ\21x\127\\œ Ç[e·Z…4òú\4@Ñj?\29±á×sW›ðæT\127XwË;«\01225ïÑ\31ÎG\28E­ó\9š?Ö™S\29ì!ãÅkžY³>­Rw3'Ú4í€7©\23YmË\1Þ8ÁÞͺˆ²Îìð_×Yý97tþ|_D¸)óç;MZmqóRÔ¡ÿ|k þöôØ›’+w%Eo¦£édšL‹©f\0ßʆç#?êQ~.\26^~:¥öÓ»;Ñ\30bô\1¹‘¯ÑÔ+\11\0Oó±¥oº°yÿËvü/»zlø}ù‹–¶¼˜ò \17àf\26\30ØîÖ\26\29¼Ö2-\7ÔV<ûúä¡#Ií;Êá8­x­ëùú7h±lÒ+Š±Pÿl}=!d(Ÿ-R>KFuÝTÐ\26\02974É\15þ£Î÷ëoÖy?¢¸ñÙ+¾^Ñ\25Ó'®y\21«OFš\25K\8\28è‘\23ª\4Ž¦gÓ7ÓK„¸\6h7Ví[\2\27*\17óG°n\1êJ)P-€€Ó†Räª\15Ö¸…\24·œÞƒ•\2 ®:ÑŸà-(oˆË“âäï*³Lï33}†Ï†ÿ\20Ý\127uhøï(ˆ¼—\24E‘2Þú²gEÀÝ‚\27é\1¡/0\28Z\9¥\25ÀÅùW\9e½\0ÍIïMoª”(D„\127t¸ ©ÃŠÝE\14íYd•7ˆ’\8”¢\20>Oúµ\23\12”ŠêÓ‘úã/”|2@ŸxH¢ïyÏB¸ÌÅ¡\127öÊÀû\11\25ÐZJ\ +ÙVƒ«}\28MGÓdŠd÷-‹Ú·Ö\0˜ó\27~8\4ðî\29K±Êßrð•ƒrà!0(Ä5XšÔQ*\ +-f4âט¶ð©Ÿ×Q‰‚bÂb’QPý/“û|qt_Ç\0´\27TBÝ/\13\9²†uµ‚©ªu<©@<ê\1|㇜˜»ºæ\26˜“‚›Ït¨\3²@\4.¨^]¡¸O‘®àb]Á¸³§cÃ@j\26\21ž¯\1n\1ÆÍ×ôôÿ\25®eãÍÁBN¬†X:C.rÒÑX\29\ +¤§)\8zîœm8ÝÀ²ÚbÚUìÖH¤7§ŽÒö}?lˆÍ\12 sº÷~sË[\20É\0ÅÁË\27Ê@)­\29f2êÊÐeBé6³>‚\0053\18ø\0ëÖ,¡\14ÙÎlã\4‰\0š8è\4‹\11FãEN¹ep4cQ^„PDÜÓ ±ÈF\28jåD0Ø%\29t}{·ÏÜ9’\15uCýí \6ÓßZ½¯z…\ +N¼×\1270\8LWSvú}ß0­×þ]1Nø˜ž×ËâÅÎUu’®\"VßžG$âµ\6¬Š=\14ã\21~öhÓϽkÝ\28¿\9Â^\0Pð¥¯ù.úNtÔ\7驨‘-\13+'¬,•Ã@3Ð|e¾\6\ +¾0\9hd®tpµáª\26tfôƒ\"\29Ý7ŒwAu÷ä€ÌŒ\18\0046@²cüû‘\"Ó”KŠHÝ5o>ë=›¿ŠþyÜ\2°Ç\17ê\20;öM\4xk2`\27®üÆ\17\"ô¤\3ùP…RiÇÑ\7Yê¨\9Ð(Ëís/\6Ì?°OÃö-S÷¯\0285Ž¼ÖKªO\25eÈ\4\9\21u\\Ô’\ +¢?A±?R.ö¬¼\24ÝŤY\15µd‚}?¼\4`Ó»\31:uñ÷Cæ\4E–º\27:oÈ“\31Àë†Ú\13Ý6Ômˆeu§²`\3Ϋpž7ož8êÝÝ›ß{O1î<Ê)PYMçä䩼Z\28Ô‚;îÀBº‰¯\30\18\15÷™}h:\28\27Qg5¹Ar‘\" ±ÒÃl°Ø\26½,6ëé_ý~:*Îtz1½šr¾’N³_k\23\\tW–ÝCš\6þE*W•pæQt§AQF?\3Œ\6®­‰v™HyÇÎ2¤I„=Hæä%kÐÊœRP¶™ Ÿõ.ÇQý#íy!ÇùÞÒùÎørbïœYI­s[\0ÙÀ]uI²Ncc\30¯ÊJæÏÍ\0268 ë\ +o+›§$\\\13\6\3E©%¤GÙÚ\24\25mtVŠ{Qµ\4Êë\27¶eHÑ\19ÞÒ{–NÍÚ5ÜnJNVý&m÷1%F\18I¢w“h\0243ò?k.\0091\24NègNôiN¥¾-ì¶*±ërÐ,w÷?ŽñÔr-'ÊÎ\22ª«F–ëkã=Ù^®h\8Vw-:Š\127\19éDÌœü:Å{Q ì¹\20\30@ËÆ°’¢flZÄng¾¿\29Dœ³‹r\6\27ÍÅê²Ë¦.ÛF‰[{Yƒ\19ab\\NáÐl>\30vz“Ï‹_¯á¡\21ÿf~g:ˆ=†·IïÉÎ%øs·¡Í¯f?Õ×\18œwfüŠn\4Çí\24nHWQ*S\11d·ð\11•V\2PdOÄ¡\13`–_؃À\17™Ë¬g™Éáf\6mp‰uW¦úC\18C³i£ârÑ\14¥ÐÄ8š½=5—\0\14FUuÚ®y\9\0/£qC%\16·3\5¯\27ï#Ð-À\22r\ +°\5Û¢š\1279øwp\\è°vlÐ\6˜âgÑ$D\17(þŠ/ûJ³\25?¢ï*Ý\ +`\20&xç'ïò„ý\19Žáó¾ñJ°œÂ;å\2ìJ\29ÏÂmDÞ¦\0\25`8pR(j¡ê1L.\18(è‚`»:6ê4ÿ‡¡ÇÓ+3\13é÷Æ\0ú\2õ\27\26åÝr¨Äyo׬F\0124)Z¨\27`:;׌ÑY’\18\28qÔ\19Œ¸¹b©ûÆ.*\\Ì%Jè\21ð\29ÆC6j\20X¥\2C\23\0¼ÝIFT[LAS°ŠYΓ¦¢\2ü¼;nU›Ã\25\16\16Ú\19ƒÿü\ +ƒ\11g}oòŽˆ\13™ë3¸\25y±ó­l,õ¡u\1Íù\14‘û§ŠÚ-êaóç\14\20èÉ+O%˜ó\22ß7“Ú•h¨D™»\7Ç_•0¸\4š\3,\2ÜuØ´Á\7ÚoA%°‚¤ŽÓÉ<\23i‚Nb\3þ©„\5ÿ—=q¶±é†g\17q}\"V\30\28òCNÝþ×vÙâü„ãAiw<\0!\23\ +X\21\6Wþ˦Y^Ñ}žä&\27gŪå‹UË34ó¢hyšB@9mûàí\25í\2û÷ÍÎÆ|4Ñ…Šè˦ˆžnþ¿i¾™\17\31\0019!Òò¢tæ\18.\30\16Ó¼ˆB‘˜sÿ}\11V\23œlðã\15SË¡\21ª@7\1·¥¬E09,)\27Žø\21fï9A}¾¸èüòØ^Òõ\9Ç¿¼LOvzBÕ¹¼uñ™Ç­[¨\18ØÕ)«k«Sêób—ÉÙŸ\28+çï\20´êü\21Ÿñ]þËYÿóñJÊ^þŠ}Í%Òÿq¢(<äÆEÈ¡¢=!\21\31ô\0319G¯k\20zßÊh4çÁ‘±¨?kò\22ÆÙ^Ý¢)\28]6Õþâ¹J•Í‰º\17ÊSˆŒ®o\30\0‹ë]¹6\1ô\3¥Í¿lÚüév\11O]Œmub\0261\12[*SÿLð+ κéϳ\5ªò\23˜\13²ga¥„SÙ2\\¶\20)ÏD4—-‡îg¡×ß@Å·l•yÑßXbH]¶\17uñø°xò´l]Ø\18uj‰.ÊŠþ\3:`ëˆ\\ó\22[Ôù¯\28.îè\23ׯÅ#ÄâÆ·F\19[s$¾f…Yó®ö{ÇCKBEÆ\1Ê‘+Í£XÐ\29Th% ÈJ@¡•\0<«óŽÆ\2\ +\5\20\25\11\0©\29øé°ëøÌ|¢’Ÿp\17¸>\17ðd£Oµc?ÿù¸± ÏP“¥Ù µõ\23)£/Úä¦Î~²>IæºL¨ìf\1278ð\9Ÿ\13¤!ð<ñùz&‘/t\17•\\ø¬}D\3»\2Àí\"÷\15’\27ŸØ@­„ÓPP\8'\19´$£’œ\20ð‹\4…^É]Nwf\7ª|ð\6™,PÈ,ˆ:·l‰\13n¬â«\16Ù·È\15‰rƤ!IUl, .øNW\5ÐT§ÙŸ±àm(ÜÎVyfu‡¦þÒh‹¿¡Åž\"õAx×æ~ãÍý†Ë\19¼²Ž*\20§ÐÍE’Žæ`V\" ?\13Ä'\ +¤5±/\3ÏP'V¤¢¨\15€16Œ\1TðÊ\127Á\19\1,²Ê÷伪t/*—‹ôQ\23_Ê5§×¯Û¹´]ùþòË·\23±µ%°þ¢\11ÅxŠ­\9ö¦™ßðŽK3uIÍ„ÚqÇ|ÕzYÉ+\31¼¹\4n¼¿(À’¸µÌ÷Mµé6ÉMÿꦽõFK¢†g\6òÌëvªœ>0htÓ¯4ÜöoŠ8œ§4TRTd\127€l¬ºšØ¼\7ÀE\26q¾\25²WÃè•D¯jÞïT#\\͹Æ\2É\"‹ü¨•Ú|AizcÈ\5dÓ\31Y]¡Që•GµÀH .ºnòA\21\2dìqY”¡¡€Û€Ÿ\11ÑÛƒ\1\4²\ +epÑç†ý\3ƒl\16?\18\24C$¢§X¤bÕ8+“\7ì)\8ZætÛ+¢¹Ç…®”ª+þ4(\24â]×\9S¦o©\29‹níÀ^ oUÆ=µ»\27pÀª¾\127«Ä>øÕZ†âƒêÇ¿\5`U%R[­+Hž–ÈNBã||î\13`-¡44Š Åîf\23ÜqÜx˜d\19p—K¼2›·è\19±'¾¶/D÷½ÒÏ\7S7¢\30?û\17°”\23CI\31òû¾Y!h\7Ö®KУ)ÿ -1\24\20\3\ +Ï\17]MÛ\8Ýö\6\17h4M¦[\28?\6\26¹ià¡fmP•\30vÚˬU«7Mâ«%\0152»!§ík\7TMT»Aç\"\4Š6º³ÁõGhO1HDgÙE]cv]´ºª@¬Çƒz½¡Å±h3Pù\24ß\7½Ž\23ùÇ}c\0.\26\24ÁçÞ_€\27Î8q»«É\13÷¾uº³âš•:æ“LßGÆ£Ã\28\15Ìèõa£s\5½'ƒ,~\17™\127ù\0186N?\26¬©Ñ\"F\21åˆ\30T\30\26ÅÃÓ\5Hb_ÌΫ#€:\9£åeCÊ>Ídå¾nä\20»‰•zÃ…zÒ;öµT¨…û¶È\2táÓëlŸß\14ù£<ÈR=RyDZ?\12]€?±p'´âi¯\ +\3+\23,Í;÷,p¼ÈÚ\5\4¤K“¨³š”¶w)tù\11XÝÁŒý\24€EšX†V.óK×ÍßäC§72r1\4‹³+ò\ +c†ª2¼Ÿùëi÷\2\20Rë¾²~^é\1ìÁ»¾#±K¦Ÿ®y\9^×›É4S­ú{3Æ¢¨'ò÷œ×€jÏO\16•*¾]ã_zö»)ú]!ûkŠ‹¦aW,óí,\8öÂT-º¿é£\31.FîøŠÃ~1éÂ:ÀeO\2#o{_Ôç;d±\ +\13q89W¦ÄU=|à<\ +tÕ{]Üã\22\127]|ô¨\12\9WC\127¢¢>€ÉŸ8ývNÝ\22))%Η>Æ=¨¤£‹­u\24¼\5àÄ„\"ÒÅV8\17N\25\19L_\22Zã(²ÆXg§Ï\22-e\6?Qv?Ìjªë›)3”5´gÏé2[(Mó03¾†PdÔ㸡¨'€í\19ŽOX6øW\4×\13=\3=ä\13\21å‰\27²5ÿ\11¸ÎÆhžÝ\15RS¬\2}$}\24/Ý\127š·2|Ga™&¬`yŠ<²Zfv™öEê;ä\21D£\9J͆x:™k²\13†\4ê\3±‹Cå¾\5Ø\"{tã\19æp\30ÄS5Ì”ó…\3:s½ã¢ü\19²ÕT¬_;­¦¸Ð(Wÿ‚¼\14ò»J\3\8Piô\25+sÚ\4¤p¼_T¥)®\18€=Bñ\26‡\7\4Ü„kTJ¿`\18ÅÓÀ²s\25ÉÚJ\9k+\2,©¸aBÔ§pb׋\1\2\14MÄÖxrQb¼(:nXŽ]pœM^[ØbäÉóÎ\22U\2i™T8¾òì\11Y-±{LëÞ¹ŸD)‡ÕDÑûV1cûïß³X¸Z[Ñ\"Õ\23•µh¨úßN\9ÿ¾+ÄÊÎQâ¸\5æVê\16·‹ý\7Ü|æ‰ËöÇž¢¾EWe@Ž\"üÏ°ïñ /\02671°ÿ‹ˆ\14$`k\4oýa'¶Ž'hZ$\16{)]½\1)\12 ]'\25\16Ñ^\11m†”ÍRˆÑ¸¡¼¡\18(>í\26n746RsnÉ4Þ™:õ»ŠbOäÑùpˆêQºƒY}g?C8íÇ@,Kê}5£DÄ«9ÚM–<ËIˆzŸ‚÷títò \23]ž\11\13,)Ð\6rÄÏM)qª\16°\23Œÿ\22Ò\\†™ù~«}\31d´¹\16Yä‡Ç, Ž²õÇ·iË\22Ìš*½YÓ>2¨ý燄,èé3œtTW‘k`'ù\22ÞyR¶ûƒ|ô\7¥®•#\25wÓšô\30¹WfzJ\12\8ô\1’Ãj\2IÍ4­©“‘¨›ÀÆQ®zõ\24Œ ñWõM¶SŽ\8W1h•)\127nÏ­³\7U_;#úð§\25¼ÌþÔY\13Ž²<%ŒÌÀÃ{{9Èï\0;\23…\0069”ÄÀ\3œÇèâ\26w‡h&²DC{Pæ_BÝÓÆFg\20º\29\13â\15Á\4œ+èøîÓ×\7í½ñ¿ŒÏ¬A]ì`0…/ì\15>î6IWÂXMÁ½5ìù\5ëGÑ&·ÅÄú\7¦µ*å´5¸¤öF‘\17ÑÅ>8~\1£\11FoÆhz\15‡\28`5Щæ\3w8v‘À®‰˜ã×ä­f÷o™\19u¹Máôi\6§@@³âþÙ¢ñŠ\21áµÅ\23lþ¼8úxH\23¦YÑÛàʸ\1\13\17É•“\14¹ºZHg¦³º%φj58E9–h\127Þw*Æ}çOô\22‡þ\3V ÁL›ó~1òD\12dêL{»©Î”è­\11…Ž\21êÓu\"*À⣚Çâ\30f9ý\ +•[g5=#£Án™ÃP­3¸vâ(Þäv\13Ϫb‹\3/îÖ–ÈŠjÿ¢þa±xÃcq\19¨´Sç\3\24]¢·lhõ\8Nkýä7ʺÞ\4P\19A\27‘©¶ô7ñ„âM.<¡ !£\"CF…†‹Š\12\23\21\26.*4\\Td¸¨ÐpQ‘á\"Zá.8\20AS `å\17ªõî\18{\26\0bpWøÂ4eÒ¨\18T)\0264*2hThШȠ\17Ȭ—Z$×\0034rî*½LÈÝþ\ +ã-;Й\4»è×f_û\29¼5íùN:£¶î¯\19>ù:»\17dW›zuä\0\127…v©ú\2NõÓòt¤å\25ˆL\29¼ñyå³\22u{ˆÑ\15¶’å\127à\19£[%Eê€0\17A§ž(²3R\4ý`Ð\6ƒ­¢Î\13\23:ýù\28 7Ï £SX¼/2´\4!wv\14Ôâ\0Ò‹ÔŒN»\19f(ÓþŒÉÛ¤[´SÈvÈ@\18\0o´í&ÈxL\23ßõ™vœ|¾½\20\20õirc©ÙE\14:NÕàgVª¼ž%{I\12ò/H/\27³Åö“ÄÁÜ\7\26yG¿£ƒžâbÞÔÃ\18Ñ@ Uu¸Wr  7Aqt­üàO='…é¸A5öà_Ÿ0\9bfD\22K¢×\0219åbÒHjÕ\26dqwêÿ3u%¹ëìv+µ‚\0VÞ†äXQdµ (ÝH»ø—p‡\25dP0àáYáîëñ4Tj`Ÿ#v¢Ø7\31?J±FäÍIµ±5*¹ûœÑ½2\26û¿ÿv™ðãú\19†’cS@£.Ö\4bÁ6ÙQ±µÌõ&dH¾PIÁÜ\ +YÜéa*\0083ÔdÝHI`Ý·\4P>\12”ý\30=¼L¸\16<¸\20aÙ\24,m>z\26FäG\13,]\3ôHã©‘?&\127ê{ít\12FExàV:Q\22*O<à“<ÏŒ„btŠöʶQrà£Fu£¥Àu=“”73K&ô¶¼\9ÿM+Øi\5e€Q€Ð'}=Ź¡ÁYm9Im¼\26UuA†LÆì\0071\9Îê‹oÐæ\13\9Þ\5qkf\"\6‰2^Œ+­S·$ûœ\9EÞŽ¦%÷pˤÌxª'%ò\27ëg€œ}r|áû*~Z¨Þ\4­ü(\15CåÈVŠÚ»<Ö…‚«¹rÊ\11©èªÉñáÎ\13\9$µG‹eýV2\31õÅÊm_IE÷#¿Ñ¥U‡¸uCU\1‹wnyC•ÐGÙ\18C\3\25fE\19GÇ\0­ì¸Š…çI€Wróv\12.D\24ª½¦GªÃ\3R¬`Ä¥Û©\25\11Ì/-µ:w@…gvÓHÙã‘÷J IœÝ\127àJ*تüH\30˜(O;ÃÂe\14r½wãô\"“\20±×XŒš\7BeÎH/˜ºŒ+·N¼ÊJØ\02495\26?\0277¡Ÿ8|5ž°Æ<î¥/sá‰.]wµ™\4!\6aD\14SoP\22IÀ\0‰(ŸT(Áû±Š,&Š\11±¸fyãE6ƒ\25¶‹éƒ*\16xkÖƒïÂ2\1 6(\6‚’À\8Ô5¸—ÃíX…\\¶X‘É·e‰0äAÈ?·‘»:7Ëuß&ÕªôhÙ\8\24±”âæLÇJ\11ä¾QËDkk¾\\+æ\27´\14³Ã\3ä\3Ä\24ç\2Zq ¾ÚZþm3÷þ«´\17)\9æ4Ì\26ÄZ-Þ²Äú-_wó\23æ¼\12!æ{»ì-/šâ\14¯âÝ„/O#Aå‹öˆAÐYêb/ŽÖo>TMEG<·p[¥Ë#ßõ\0219¼¿Ÿ‹sµšœ4yºsdôç~ùóÅü¸cñ¨7ù–ÕmF;½G“'͹sx×ÑNÝóÅì¿ÿl™^D\27£Ã/ZGE×€%“„*\12ð§mÔ{*/w\27IèüÞPç@\3O¨i\9Ot@f¤È\20‹Ñ÷>}<\29¦>¹°\1'R´\25é¯ï4)L„s\8\16¾\5\0291ÝRôÅ\23‘á%:\2•ž¹\21Ë{ȾiQ´Z,¿\15Ç.\24/+‹ùŠ2úÖ§\13‹Ã|8E´#s\15ySIzƒè8X\18Ìβ¼f6\13&Ô(h\7ZI'\29‹ÃP\31\5§w[ïs™‰.äÒu\5«Qû0wžY»G&¹MìÂëçw/›ß½Ø}Om\29\15sÝW­™$r5¶F瘗¿qÛ™|òº3d#§ß'NztïYÔ½gÑ÷EÝwF\8¿8[炘TuIÙ¶¬ß\5µL¿±sJ\\iÜʶÕÓ¤§)\8f@jzpÌ\23—40…tËÙ\0«C:\0122'{¶ÔŽ\20eOP}xÏ k\2ÃÈ\13áC[,ž1_€ýÎTeW\18÷©¤ßYG‰!UŠIsà©x¯ðe€‰—^E]t\22yÑYäµ^‘÷xE\29Ê\0074ü\31et#Ð-F)Ó\"QF¨µa[ˆ,ƒ\2Ș0y\12¥ú& ‚\8¥/\21\20C\12\18áô\15\6˜Ÿ6\7ÅJ\22|B—\25.²(øž¨`¡uŽ—Z¤vÉ\4ýÓA\21\9ñ\31ÚüÐë\15m\127èðCÃ\15\127hü¡ë/ö\18×Ô•\4\30ºÜI<¼\14>Z”2&à´Ci\28᫪‘\26¸ñ¬4êÚ\31p´m(Á_Å/_ÓÑЀó“à\13?äoh(D\24š†IÓà²L`×ÑcCm9Ä«\0171=&j!%0–d‰W 7ÿ\3¦\13¨xa”\\-Qî\7̤’»W>j–\17Š‘†î\\EP 1pÅÊ/\6®Ø{\6Bi¢È©bBÞ†«¾ò60a\18Fº8)5o\5·4_©å‡\14ï3qIƒ‘°ð\11ø˜Êˆn‡‹3DMOq~ûH/Í÷Â]aÎ^”w¿¥\127Ô\2]ý\6¨øßà¿—QÏ\7¤=o,ÅÃD/\19ÝN´ž\24\20ÝâV<æÐåà{Ä\"5$È\28“ÝŸk¿j>\20+Ö¾£@Ðoœ‹¾˜Jfœ®SD\16Å‚`/´ÑEPQR\26-I \ +,\20‘\25…Qü€Pôåq\12\12#ÝhÍ/ú\28®Ùs—Àˆi\25–Øù\17ß\\>ËW#EÞ€\0209[û÷íD÷ÔÛ\18½ÊÇ!³RÁW\21E\15IAÐÇG\14\5ø‘ìçØZ¾\27k#\"‹&\17c\25\17Š÷Fɧœ|MOŒž\6ª|c„•ÊK÷/µ\"ñ¼\24£\17ýnü§b\28SÏ\0207®/$pì6æ\"žŸ~Äš&vqS9н:q9KýÕrzKiµ\\¨îI¨C\11F¶qáé×EÚ\28¨+1=| ˜‚š.N¸l'9f…\\º‘3祓Ðí‚»I’K*\6@\13?\19´¼°¨­Z¤˜há-\9„ Ø\9œ1$ÄÐmñÂ\16\30å\8\5F;2-\9Ö¾øŒÃ0K>º@\"\15<4\14œ\31ˆ£ä|º\19­‹–\22­\22ùf \\†„u•ehÐJà~ ®¹-ÞbY¸lMgx)o„ŒÐRBOÖl£ë‚hŒmá…ÚãlθyN¶pëLÏ\8&`]ÏP˜“²Ž’ÿ‹„ª±Æü/\19h®øŸ€€ë“ç“>–G„\23]plÂϦÒ2ceañôžØ\24G½…©\11R„9'q׉±3NÆÀ\0(\29³H\11-\12vÖúåP¥rЉ¶Ma“ÆVÍ\13\28®ò\0197Ff‹è\23|+üÖA(su¸X\14ô½»>\24ºb\21C&\29/†¤ÿè\23èl/j\18i3é2é3™3ÁHvaA\0214ª*:»¼°£ð\5MÂÉÈoˆÌábQ {\26fœ\"·ù\30Úý{XÓɃb(€…Ði¢óÐB¯Yu\24}™ñÐ\1ˆ\3é¸ô\0œ²Ó›r>ü î2Žó¯G\23³õÃø\20RpîqÞNƒ éþ#kBx°a\15\7›ÍF»\26£½Ý\ +WGÈ\31Î:\17ÙDϦpÅ\19ŸóQðG‘\5›\31VÏ‚\27žlp5vÆ!;\24m\16ŒÑ¸\"\3t\0û™w@|y“šgÌãŸÐ \19uq\19 ãìùÉa‰ïEÙ\"ÚT\24\15Ód0N‡Í|°ÕLIö” \6Cü¶\13g¦tr§¸&ìîýa6\28l\23“H\31‰ƒZìëAì\21å^BhOžï\2\25¸Á¦Ï ˆÀŸg ¸\1pöç…#a‚„)y‰\20\13|Üý©Q\19Pyk¥øh¡Ä§vèÐÁ\29&»…¼Ÿ;ÆÂœ;ýÖ’\28\24æ \8ç[¿9…ÅDê·¶=ÿû}ˆ3ÿ÷[\19Øßš©þöF n®ê\4´Ír˜É\0+©\16¯gÅúï÷®ž“&:o\0Ê\5/\16.M4³m¾…cös§\5tIŸí\"¥ûÊ\13P!wþ×RÒ®¾\21ë’\25‚\3ÛùÉdh\13WJ†¡´¬š\28\2\"ß·òÈY'òº\17\21¾n %ªÅ\0CG»ú\8\20ê18{Q’É–#ëÐk¥¥æµzE\11z\\Í\5æ…òU\2¨‰43$©™\27k“u”AÀŠÑ$äÅ°8µ•R帕\\ZØJvÆ[))¨óÇ\19Ç\4¸yÚº¿6\9@mº¿V\8ÿTÛ1àvy Ö‰NÐ\22\0297]/\11ıO¾èK/‚TnzlhöÍ—ñX\0160eņ\29×d…¯\24% µ’Â…‡\27ÿïøGßN<ˆµqB²±\28nÅÌØ̹Û\22¾táÇI¥\25ü?ð)\15î,â‘ÿ©‚¤¡å\14ÑæSÄeC¸-\28\6\3Fà\9®z\12‚ôÖ]7×ÇT,S÷´¼üZR˵”˜z-T„ºH\9êR”\23(Î\23ž)=D\6ãh\12Æ(ÿ7@zÝÂ-tÀ•!_õÐñ¡Óðá?\21¾\5jÝð¿ ”ÓFë\19ò\0Ðð\127„}ªb\11–Ö\22ikY´è\15HÓºÅúyL*!&’‹uìà}ùêb‘äò|RÜÎ<ð±`Mt¨ù\24Þd\27\26 ï£$b%‘K ëª8\11/É°Ò\7\2S\5IŽx-\18\16ã6`ªHp͸uÒPÏÅ`2\25Ã%Û„ì&ô™Äl´\"Xl7à\17²fB™¶rEí¨ém8\13– Í 4\29÷.$Û1è«qË\1\17©t>ñÒ\1à½\19Ì‚\0210Ñlb83ù¬X/H\4\ +@0v\15}äó\"¨\5…B|&Go¥×¨—7jâ\2¼~è\9âZ ©E\2 \29ßÞ \7ŸO\15Á“\0003Vç%ùfA\6~\9Sf½5l×\19žR`§·¢mjá8\24#\2j7\4Ä)VB|ß±5„ 9\4_xCf\26ö,ÔŠ\17\22™`K†$Íe“¯Ô\8áå3#ÈH§/Jµ\27Á#\29Þ¨>\7>ú»ì„\12ùR\30ê®üÚlMàRœpe¡—ÐÉSC·\\0?^R_‰yñr\25™É\23îâ\24Ó¬`9IÕ\9\17õ)ÙQ×\7à²ã¿æƒ•g,l7\23é,À\19&Ä\11ŽLà?ðÁ·N-\5Ä»ñ\127†\29¶\22ñðÁÿ†ÿ-ÿ\7ZÃkz\8üü_hA%B\11\6#øGᆬ͉³À­±“1\14˜\8ŸDø\127WG•‚ \8÷Báì…‚Øé\31ã½¥†ÚmB—Šj\13Id´Þ´|¨û–\1û\15ɲ˜¼gò‘ÉIÈ$}Ƈ\20Tá˜sâÿ\19ÿPœAà«Æ°\0030.N0ñ?2˜…o^®0ÁZ\7â…¯;¥Ï‡(9ÿQbSê/øG+Û¨$\22Í©\30ð4ÑŒyMùN\20\\\20IÊs\18\22\2Š4å\25\127'ÏWìà©}&\\\4 \21ô‚Y\18\16÷\30«\24ÁäÌÿ7þÓ?\21fò\17\11ë\9Þù_óÿƒÿ\13\3I\17ă^Óñ_ïR¤&þ\7þëå‘ÿz-Ê.ÂxòiåפùåÒrÝbi!7ˆÆ‡B`8¿·@*R6\\ùÐÍo4à‘tÁ‚Òº`´TÓ\22蔺ÎhÄê’…Ä’[¾ŽíoËy\23àS.¾j#2ßzm–\22/î¢\8\26Vb/\0281\22h±\3‹ð¬×\28ä¨*t”J\22\20â\23\0®ë\16U½ÀšÎ8ØfDÔosÁ»Ÿ\1(ƒY¿=LŸ¨Á­ª\20Ä.¹Ý\8Š\3˜ \12,iÖ\9\25®\5ÓO4Z6]\18B!8ÝQI|f¨™]îX¨ò½\0236­\16MYa‰³\5jwZ4Þ\"h#-UÖ”2œ¹\20\7ztBó¤SEÌ\4¡0sºÜ?’À!?\14[â¤#D£¶5:(ŽÓ¡Þ¾\22(\6m1\\$Ú\23ÖlM6¾qä?fš2fcÝ«\13\2ÔÙ¼QLƒbÌ¡m\15\5ÖÂE¶»B×°wƒÕKŽm±Ø#ú¤þ\24ÈÑm…­ýéîéHÚL†ÃU~\15û¼^ga²Ý’RýéyL2[qnC\26ó«#c®|;\23Ì·‹ÂL£?aŠ\7ä›a…={€Ò¼êÎv‰‘\20ž•\127U÷áÇoáU wVÝ G¦¤Ç42_\29hÏ÷\14œÂõY\7\15½\4½.´~D\8½§S\16\22ý¸;¤htÈŸ´Þtì\14zê\11XzoU‘8\"š|Yé’É[&}&±:\24‡\0½tBˆpäÙCœ\2ÿJåš\0155xSò»< è\27´ì}#;Þf‘ž¨E][\27\23cm\\å\25g|L”\13:ï£}\18œõ±Q{\24\13\7\27³eÈDI†ã70À8=ÅhîôÔëI\17ã²l\"Q±Ž\ +bW\28°*BHYÑ«\0263)Z†ÔÒm\11'عÆ\27)n\13ÓQeŒ½jO]Ä„(Xí\24Q\24¹Ì\3c\ +—Ò€\29[ᄊ—ÄâbÉa\"Ë€÷ùÀ8½\3ö‚Á¦A¨²q‚Õ–Œ(—Qˆp4±%ï'ž:YzªSfÕ™äŠ)©¤c³ÛcÕ<ýsˆ“\"¶(w\23fî2 ÕYäs\9ªÇЀ×?k#þTLŸŠéÓn\20S\12\\û\21—m.CyJÅdHMðB`3:¨½“´s\ +vàäUج@L\4\7žK D\0kÎÀ# &UF8Ãy\15>q,1ðÂ\17l–5ï‚v‘±m‡z”9VÉ€xo«–\25ˆ¸{hVàfÄ…âÕŒ\15¶qaÈXM4š±9¾\12¸:\9VËÀ\8c\19\0È4Ãèkdï\1éyü\7®\1Qt˜OU\20r\26\13qSŒßÓp³øs;/„\1\3÷›ò¶À!=Ì?oì,Òã<\ +v‚æ­÷ä€B}‹DúˆÁˆŽ…ƒ\22\20ªJ€0)\8åÇ»1Uº)efŠëDM%xÂÇOÐü‰\7\20²Éƒçé‚O›t¤„ˆùääÝûD°BºL܈X&‰ª,Sñ‘^:aô\2;\20#nX6½°}1~\24[ã`Dª„²ƒuÀì¦}÷©=\11¼‰f–Á\\Â@\3WKc;”ýWHc‡@`\23\22Ø+…\2÷ˆ\0010iNazD®„¡@Â…‘)\20ƶ$P(•Èn\5¤\"Ðψ©~úGÙ‘ÄØ\18¦\6\15¼‹KÈÒ\31|˜~\9‘sì\0165ɦà`K¶pñªH\4Á,LÑ÷B±ÁûH\9À\13Ý’È5×ù”W]ç®z\28ä\22_\12åt¼”ÍÌœ‡B`h\27\0»\30ѽ\2£|r­\22Ï‹ýÉ™¦\14\22#AH`õÙFumƒl\16d€æ~ÎK\0ô<™\4¬ühã>h{PÇ׫\5s^$˜­ÌÎFÙ=GSó1ššóhjΣ©9¦féÀ\19~\24\27ckŒÆì~\5b\\5CÊ2ý_ø\15½?\4}\12Šñ¬­ˆ¥È\11†³\6?ÄöÁ®Q;çê\20ý¤\12ÅxfÆõI’\0¨\31•IÑfÒe2d;'î—_\0072s\30½Ìܯµ¯È\18C5’B§\18×›gÜ\15þk¥!Zûeæ\26ôì%èYWu‘8{q¨<\19•\17(8…ôBßëI\17l\6¦\26Ç>3Åkx\3óÙ\1Ìz«2[c˜™÷4K’\2ÕJ3¸ÙREKV\2'\7È\17ÝãÃŒÇ('Mˆoœ\28ÏšôÍ\28ï@‹;\22K \18Ï\"0Ò-uö\19\20\5\14‚(6\127Ñ£JñØËñ \\\27õ#'+i\26lG.Íì\29g|f^vˆXßÞ\4…³÷žKÇEîÏ=¨&ºw¢Mv\26m³f¯;\0124\16Bû]1쉑œ\24ÜÄ/ǽôËŒ5á9ÕÙüáp\0qª\4¬W³>\127Nµ\2Û…åÙX\27[¡’™¤\23Á´™\6_ÂÆ>ÐO\16oÆ»\16}íŒ\17lA¤ä”,~'ŒnV\16yŠ‘\0038bÊüò\"¬\4*,‘kèsd»Á\13l%+V\18ç…õ÷´8+8’£b=¶×º\15\22äéÚˆ!ž”Ñ,Y\19ÌÝt=U\0181¼ƒPΙ¯Ú1‹§Œ\14;…]ý\0‘ùµkù\0078M@Î÷ñ¨\2Mòe2\ +\21áÝmüîa¢”Þtö•›j¯6ð\19¨\11§T¥ß9/i¸Vf³¢å¶uça¯»|°µÄªÑ;m*.ø‹ hìU¡O«¸©E‹N\6Ø÷!2?¹IÏg5\25»šTzðÇVª\8{nVw­\23¢(Ù/7\127M”.lžx¥üŨ¤g{Bì\21T33‹¤d^MÿÎeøÓ¢ëÒÙsí®î»ê;\12†M¨”\29s>¤*§ïã\24…8d›™\6\24mø:f•\2É&.ºÕXŸŠe+^L\28Y\0T\9v\21»\"W\6\17•°ô\31UÌ\"–\"1„ŒÇê\13®\25ýÂò0/†¼™¬p\11Í®ðÒW1\0001Ú²L*îú˜ßh™&ñ|zW\8s§G\6ÄÛã\18®4Õ\18\16¶:\11°G\15lü\12AEàu\17¶6OS\30b/à ‘÷ô`ت{thÁfÝWŠ ¢gÆä­ä?sL\26™–œÊÒ©+¬lžÆÊÄ\5A}AP#…„ž\18z^t\12–´‚9»E(Ná?ÏÇ“p\17êCôŒVMêCZ\19F•\ +ã\23ÊÅbçÁ\2±)Ü–\0255¶¬%Q¥‹'yÙ Ä\17·E%ƒ\30Úv`XÃ@Æ\17QÇ\0098”\6H¯)Án¼Ùq±Øì¢3[h{p&\12‹Œ’žEUÑ-!Ê\26DÞþû\21=\ +Ò.QÃ8\23QÊä3Þ3\11\8¦ÉQÓ˜¨Â)\9¡„\16\14Iï>¥Ô]t\16y‘`©\30g\"{\9-\0\9y\15ä\"¹Q w©Ñ‘\4è\"\9Ð…·É-’¼H°Ï˜Á=ð‡\13ŧ„\13$\2µXÌ\9Èí\28ˆ0a2—ú‘µTù r7€¬\18âík™ç\5+Vìi \25ÇÊ#\2ËÊéà'\20•”Fĉ$Y츷\21@\21A‹q\16rMr÷]¬bÈ>™|µ`üôIZ,ˆÎ‹ñlìŒ\24XìÜÔÞ¡ñ\8Û©jΖ½E\25Ý)o¿@Èr\"ìpÀ\28Üu²³[+0\9úóHßôçÁ\9\16!}™ðFL©\12¸Ú\24“\31áLì䫳õ ×é•€åBXáæD\23\18\21\17\30\6q4nB\\CGÒÙb2\6‡\16ì\17W½üyœÓ8…º\3\"\"ÊûKð\4±O<Ò\15o-ùóHSÄTc„)9L†LÖDÞ8áLøš\6ô\4{Ä\8;=7\0057\1&:Å•\9ç’N xOx\17¢D™\\mÓ2ì™›C¶âû\127d\28²éœÙb¯»|r, +”àÌ\2\19×üý‡6?t\21åd‰Ñ®lW3aÐØ3¼\30&^‹\30ÞFƒM°\14œ]åØB\27JfáÅ/Ätç0ŒÕ_<ÕàüP8z(ç$Ðo™\28hj@l0¼\31Ì)ÊFX®~Ò¢Éñ©|8SfÎ\28ð‹Í¶aégT M ¤¾w™Ì\"“ÀOzóŸÇaÀíC°×Á_x1ߟǥdá¹páÈæm&\3mÐ^\8;#cAõ˜µ\0092å¢\28¹pÁSøaTvPŸ¦\28²Ú\\¨ýMn”e\23\14ºè\22M‘‰²ë¢õöäˆ'd\24•/åÙÅuàÒä\024553ð©!-R­c5$Ýåjœéf¢y¡Ì¹¨B^¸fš\16²~\31™D‘†À¢uá¶ojS\16\17‰Ë\0õú,*C“ÀÚÅ}\18Â(H=\14‘‰@\17\26ÀÔ\16‚ž‚ƒ\14v„ëÌ€là \9G Ç«nOôIåÈÂ.#D\27É\6›@R†&Ls+Q5³\19\\ÃJ\23»ž£ÍùÊ,lKfÍÁ‰û\11ßÆïjû¤\9‘ˆt\ +¡„ÚV‡AÕ\17\26\22Ššã\21Øc§<¡C«75gõf÷[.3õÖ\0202`u\3²tÕ¢´Ë\7òôÄo¢8\7ÉÖ¨\127,\7­Ó°\9ˆÃùÀ®“qÏh48g†gÜ¿\2ˆ4œ\20\6V\30ˆTM\00424€^ K\\s\7\\ÙNS¡ØÌücïóíï•œˆ°1Bâ:Í\30¥\30#\17l¤\19v\2WfLì\0k\16'ž!\24ø<\6„\20¼†ò\4ËëÌ\23^Oì=yyjzK«Þ¤åj¤\9\26\0‰r´r‚á†ìºÖ\4\27­°š²'\28¶£I\16¨2C]¶½¸\27i)H—\\¨…iÝ´\20եê—õ,ˆ²vE¡\1¼í½Éa4û]8V–Ù\0302¥ì:Ý!%¡“K6¼5\29\6þÌfÎæ,OÔÍ•¬!a\15g£#Š}+>÷\2ÜÄC¬….\30-äh‰ü\"´üür.ÎÐஶ¢UãE}^|Œ…£®,k¹M\15ŸÏìó™}&\22í晿WÝ\4\25¾·s•\4^K“Έ4ïï]o¯ý\22õ+ jÑÁ®F9iTûy\0187»Õ\24 sçC’=±U Yó\27&“p\0044·ÙŽ‰D¦aAbQ!4n\28À”àÍ^Cs§\\íNÝïÚX(•[§gËÇ‘}—f„ö7Êvœ*?\7£ÓtüTp÷³Ñ\25{Wi\0\25LìS#”Ôþ¯:W\1Kœë\127’MŽÎä\\Ÿ\ +¹˜\21¯Y¡Î“€…³Ã)\0BÔ¨\20\18*u\9K¡DXL:\19—>H°|u™hÔ×Yã\3à6bRö\30\29/s\27\14óH†L&;™\29â·\2QWÕIS„œrY\20ß\17œ^\\b–\\S–\13\\NT}J\24L{…Nù§ÓûiwÏh\\*\8i–Í\0007×è|ù²Ùp\24…Ì\22\17·Á¼n9fZ\21¶üê2Q\1ÙÆÃõ]DUp;Ù~aC×ãL$à\"°áU\16\4\17Qé¡\14˜Ï#‡Ãƒ|\14ìE¨®+\5;ly²”XÃOH„þO(Ø% °\23óg\23\26râCTrB\30.²™\ +Ìtúp\3\12\21\\ð×¼_e_ÛšC´Ð¼ÖP6Ϥ\12Ô¥+[6Ä!·Ã!7¿µU÷/\1=\22\ +\3ä\127a0²‘\13ZÑ\18a\27„½`\127«»b]·$T`\16ÿ\0N\ +j.”\8óàÇl,×ó.óÅÏ/×IÕ\30ˬ˜É•Ltß^lr•ÊÒ¬:0»+Æ^nqУ›\29mä2èÀ\0¸à¡—pß•n¸ŠŸ\25öY\127x›y8\31¬ú±\31\127èòCÿ<\14·ú.¬>é•jVféqƒÑE\17V±ÊSýÙsÃYݹv\"ëƒ9þžÊÈLS¦YòY6ÌSû¬­ê‡Wv\27õvŽ\4f•´Ù\5­ðŒ_\27’z\30Œ9¹Ôþ\20žâs3P_ÕÊ'{ÞB[z&\23Z`ÛN\6A1\24gyL\13Ò‡È]Aª˜R\127\5\30•Í”—H¨í±D¤g\"\27­&•±1²™ñŽ˜É=“G&O¾.–ʱ˜¿p)rª¨øÎîžfwKǺÃì~h^uû\0=íGIÞ)À æQ#e¾¡À\17”qÞµÌÀ™9\19\21¥ÝYº{¾‰sáµFÒù\3y œèäÞ¼Xo½Uvz—Óy?)Aw­Ga\19ëa‡O;|:²ê\0¢¿\8È\ +\19]G£×ð|s\21\12rM!›³\29\20\4cûϪ6`}–ûêý—O8×zÖ:gôˆ\24ÈωZ®ˆ¹Nð6\23\7ÏÅÁxÊË^º~¤Éfð\30\127&ð\17;ø<7\13¹\30“\22\28\15A#¾Œ™ú±qÄ8RÖë«PÛý´ƒB\27Z\13Ž½\6ÍÑMutS\29ÝR{\7C^‚\12BöÉÁad#ž_³ë[¾kãÕ¸\18±§N‡ªzy\22¹ž\16êrôì—\19²;²gŽ’.\0209±\3–/\20S\1\24g{\29\23\22¥è±_Ôü:zÖ&u!…\24óa΋ÐÜíH m\ +1…°¸\25^»(´\23ƒê\28¶ Ñ¥N\ +@Ђ÷ƒÿ(µÚ­ z<óôÒÎÓK9O//è\0206É®á\13\22œplYËF«¾}-°/ÎGîöfÖeÆvpÕ\17§=SÊð’\7Žáö²\18¨ðíe+P\7¼³Ëâ³\27Ÿ=/\14ì^\28Øó8e÷˜ao.#ýR\16ž¨!áîÙ\6T¶y\8¬KÉÓ7©ªïê{øŒÌàÆ\5<ò*œ?Ï4|b«âé͈§zà'×ÊžÞXxz\31¸\8ƒÍ\17Ü“\27\2\127žo3\21øý‚F4D´Ž”³Ï\11Åeñ¬éÔó\"\127‰è\16µµ\16t0á}ï&U6aK\9Æ:\8ª.­Ð,‚Œ¿¨Á\0009M‡w;ÄX\22¸ØüïK‰óô\"ç“{ð\4?vz”ª\0090,c>YzŸÔ±-œøæwÈêÝáîýÏý•^µ\12ø¬+6Õ\9_\17•Z‰†5;~ŸÔ©‹ M|Ö›´ùÿâ½y÷n\31l$etØ\"\127¾®Œ\"i{Î,ü\24¢;F(\28ª‚4\29“>‘AnØZ‘0²¯5΄£Zn\3\0196ÙŒöKí`\127ž\31§ò]\16\9œÌ?‹“ÓñÑ\7BMD¶c¶|P8žFlíE°ä\9ÖØu£Ñz=?4,}béúÙ8ËÔ,?U\20òòâSj¹\19Boo\2\30y!ʺEE\5t'\25WÏc¡Ç‘¾ ¹›°;a±ðéu$`-¸Ú´ý%]\25¨‡O©º&FÁBÀMµ™Ô&]6˜2\9™Ì$g=sŽB…\26ÜVzvºŸÖ:6Ðú>»ª•\5\11‰\14áü÷¿h×Cm\28²ep033£«\"\11&µÑw&œQ›ÍÙrÍF\\·\16­êƒ\21‡gNœž?:ì\127øðÃuß³\30Q\\Ÿ\127Y­•lâx¼ˆÅÀºð³ÃÛa«ª˜<@ä•_\23£Â »dwK~á3'áç/+)QÎø*ăúSóý…™WkfM8ìÇÌ–\28¦3Y'w³2”—㪣îß4¹f2d2ÿåþ™\13×#¸ÝoÅ^©ˆã¡¦–&Úäyjyð™×\5Ÿ\29Ç<|f‹ßq€#ƒÞ8¨$«aÈëw$»âòí\18ý\0110—êž\29kÅ`Öd‚µ\6}\5W‰@^»Ùò£ß\31ÁŒ®p£kÖØ\11T]F¿ëVä„ÀŠžHÑeÖ³ñ*¼šg[FŽªE‰\26f‚áTˆ‰*¥õa’¹XMŽÍä•\23N®Ÿ<~†\15‹8\29ÕiVj®SW±Ëd¶›J1ü\29qÑ÷.n!8#yzáŒQ\127ª€>\29\5‰Ñ™}dÒdrÍdÈ$f’“ì™\19å\0253‘ÕÆ\14\24ëdóœ\9r»w;×;g{·w}n¾xk„\12p]/UðpÔ.‚aû“LJ\8ôá\"Ø»\8‚tÆÞX8€Æ!6ŠK«`ZÅ`tÌF?æ·ßù\9½æœÏÞ9×s(…x>®v¨t-úœ\ +:n’&‚n¬‡ü‘ƒ\27‚A·uTÑ\\Ýk²å\22Õ“Ú\ +\25õ\19Ž§\14‡˜\4\12úŸÃPÁ\1[\14Þ×P°?»¡?óºÞ“ËxO®Í=ušâÏ“ÊP\9ð\25.Zš¦î\"6È¡¢º5\17†’ÌOµ]̲ˆª\29Ôg™\12(‘#”\15©£\"aÊ[—p\"¸+‚\00816Z`\16gÕIHšYª\19E\24Ú¬Ë\13E\24ÊÌán\2\0052ëŽB²˜=k…üIùÕ:“.“`ÇUvóUÛD\29Ðì•r\27±á5­~hoÚØecƒ&\7Ï—’L\25ŒEö};Ø=\28ŒõÚWO\31Ìa¨É“bMÄ9ÍF²\19,p?}ý\"\8Ë\2e.W[¨a•p$›e\30YÃ;¢†)Ü5ªÚØã£cÅ£Ÿ¿drÎVU¶bu‡ÉŸ¯³Í°Ðdʶ\27¥›1â=Mk#v1Q]Ì/*Äb¡ŠG#\26sq²úL0é,¢\25–qžì•)¢†ÒÎ:»\8NØž+ËrA†£XÀ’o¨ Ç—‘ñM`,j2òŠY~Ší/©åê›lˆ:\4ƒ˜\13˜I0a\4öÊUƒ\4Žùú2\0196\7TðUÅ—L†_\5ô#ÿ÷;ùüï7EÂ\8©n\3Ó\20\22ÐÊ´#ðJ!=„A¶\3ݾ\14[Yo4Im\28`’=N¼\0ƒ\30ƒ\31£\2Y\"a•ågCØaw¢ÃTÃ0\31\"©Kc¶a4-?Fr­„Œ¡äȈÁ>CcÌæ‹pÎ\1Äü®H“3¤ö€\20ÐèÀ\20ö™*ž©*mB#•¦¡\29\0Ç°4U\30\ +8µ2i” ’ô¡j5dLú΋Óä¢hñùF—3¼žðMЇL¨õT3ž•R€JÙ‚,˜Wœä\16ìL÷Ì¥†Z6Ob/‡S#\24\9\14+Øí¬ÇÙÍ6Ž2ŽòºÚv¥íIéø¥ˆðBf\27ð’äÌd‰[™ ‰&›þå4ˆMÊŸ/½ó½”–Ä”\27ï#ë`G\19æõ»²ú]I\ +\5*wGÁ\28ƒûÿ~×lú\18¦žk\7Êwí\28ù€ž¦4Ð#+$Å•¾\13!ã\26K\2\11eã\12‘°\ +QŽ&Ù\6›\6ù™wB”ån¯.õüÒo{\1i²ÁMÄe”b*Äyé+ÑþûM)Š\4*\11\20• 0äV)\6†\9¯Óýd\"qÌ\0227ºdÝäÍ[\9\7lø\18\2A…Ùƒ\15\16—½òi6hlÐf\3–‘w¬\11£°ïìbv\24,\21C…iSʺAÝdf,\12¼ë‘&\13³BWj\11‹)“`‚Ì\30z–á•-×€£ÐB½|œõA7\127Ð$˜í\ +;K@LÀ…Ñ\17zΙì´A—™\18\18©©\11½€-³•;¦\0fÑèÌ\30ôÊè[ñV\9ÕzÝr‘¸9§RW–žîNMo—’¨`èœkrH{ï›þ÷{ÒK'7\1\19vt\0*\7“\2×\6)qô#\"EÅJ\4V¶@1\21á \\ü¸\18qf@Ø\25gãN¬Ø~\5·„¡b(첂Ì?õœjþU\30¿üb\12Û¥L\18›.Aœ\ +P2cb™~\31t¤ïw\5ÒðcT€B.?Úà¤Á\16…,GÚÏTt¾\21½‘mƒT¬êY\14Ù\1†Ñ#Eš($h›Q\8÷ü\29wºÅ%\16r9É|Ò\27 °\1œ\29²J_ \30]\16™Ÿ³sFÊßÙÁñ|Ýð’\0253‚—Ì\24™¬óȵh“k&]&ƒ‰C÷]M`8ÿLä\7pÐ\8=Ô\15'•wd°Þ\29\29\7õÍ:ó죜óH\24cÍÄ(¼;–lvßì—©›(”›Šéjã‰5ÀΟü²ÚC\9|\2\15)\19á\29œD¶%Üj0¦!\0ߥûp’\27W×èÚ\26²¨ÛXäkQ‡°è\3u\12‚¸°š£á஺n¥pW¨6ƒ]ÀÓC¢gîoŸ®ÇÚ}\16f›¨æâéæBû\17${Åf\3‰÷#þû½ê>…ò‡2–æªÀz`m\20U\13Zš—}ÞŠãï4}âBP‘‰K\5±hG´£)¬…­\31ñö½ì8VÚy%š0\24gc4.D®wZßl—I8\31ìjW±Ëd‘]u1¾\27\11{¯úlÀ7W|1žÙ¡‘h\20²»o#Ùå\22Ó\"“\28\28ÛQ^6š#ú•_Źaf1‡Ê\02173…‹…\12@­¨\21³&\127¹Z½\29B÷§l’\3SCH¢0š¹5öF%¨‡4p©w¦&Ä\24Œü’\29«\"\4\25«ëݹ:òßï×}T°£<ÊÅÑn”8Qk¢n}÷­Mp…‹—.§œ|œìš1^®¶»«m\1å¬\28PHKkÕdCÕ6Z·™ô‡áÇ\25_+Óp°ù`1{V²¨e£Rt¸˜q8\14\4Ú-ÃÁV³ê0úÊä–É]áh„“È¢l¤ÒK™°úî¯.\0041§Õ¢š\3Á7¡K!÷ÚÍ\\Ü¡ŒÚ&üŽ×}þU¬i\14™F„k‰º¸JpŠR\22›dç.=ò²7<ÌéÁ§^VjUK]\3I9\9Qñdƒu\0228N3un¶]Ê\0266\23,\26\0K^ý»b\16>Ù\"}›\\Æ2“s&m&c&AÁìòì\23ã\14{>£\ +óy\16LÙ8d\18I°d³–>\28\1ƒæ,\7ÍÅh\15=°\14¤Q\2dÏS!g@·\15c£\24âö\25\25„l°8„»CÀæ–L\30Ĕǀ½\20¤\22™ˆˆq\9bå}Á3±\\\9\24‚Ѹ*„Ô~»ò>\19<ó3RcQÉ–—›€áL<\\a\7^6ó.\2…9\12ù¡Œ«7ÅîŠ9\2\17wÍ™ \19\\5\127ãû \7‘Ïcg'8)O‹»à‰DKã\15Ä«µÚ\14\24¨\\´¼*“8Ñ¢• *]ø[©­D™ëˆe¬†\19¼É.•YmŽ\7®\8üf°\17å»l¡[}…>Ÿ…Ëdç\15¾\1Ç\2\9Jµ®œ^ŒúN\16$Ngå†\8ªŸ”n¸\5CÚÊ™4¸¡\24pÂJ˜Ì\17¡B|0ç©\3\19D\ +ª“Óß+?~PzòÊ\12\6N\29Þf7|ؤ\2?³ž‚¬'F<`Ån-\3Ž1%8•oJ'Ý]J“k\20¶Ñ\22\12.@:Žæ”´—)“`2«ð\4ìD\ +Y:1xúåã›â\3u•[6ƒ8\31\2§ž\16›Å÷ƒõ™aP”©jEx+QÞCñÆf\5Ø\25\25äÛéM8÷§c£³Qõ8TŽáW­(|µ/™Ø%ûf¹™²Ipº©u\8’Õ\17k\24ØÔ¢“žáATGØÚ~X³ßI6skÔ\7³h\5ž\1¡y[œWc«4@ïË ªƒgHèÀ`T|GÅ\8\"—À4îKŽf¦\27¤ÂS\17Ksî\24ùÝ\20\8\26Äè/àd²mØ„áE‹³×Õ#¸•ç骵œÝ]\20³ÚúYuZ\18”D\21\19k¹1ë2\9bU+\3·à³j÷œ›î¹Šö \28›%0›†l\4Wsn5g­ÂgæVÂêZ’;Ëq1cfå-Vø ¥&7AÓ?&~èØt,dÕFæ\9³’\ ++tøïø?ð?eH…v7}sõ\6GÅ\27Æf\21n‚ÇÃŒK¯Ö\ +½Û\6ƒ\26s\16À› üÒf{ú°ª†þC>²\1®¤\21UˆíÐ\15sŒ­«š[NØW´aÈ.3á QlÏv‘‘ã\12¹Í¤ËdB°y|/†A’Ø\ +¢µb\18Vʪ®\11:¨õRtm\0…ÝèýȼŠÇ…\8Ž\30‹v¥K‘E®\31„­ê\28ý\13â\19\"ìì«zcj×\27¢„\25BKã4\5~É\4SGì¹Ò#\16âç«R±=\24¾$M\0310\24‚Vj\26C«ºI\15<ä,eÈ™ReI?ŠM§ábúÇ}ÔÀÒXñk»f¶GnVÃæª9+„Šá`Ì\14þû\31)ä\7(UX+å°ßôßÿ:\22&æ5aöÛ¸1d6ŠÄl‚\27^lò•™Ý¬/Æìv­˜q\29{\25à\"s¥{\127ÖÄ»¢..š8YzfÝÀÁFÂÒ¨ŠUžû‹t&*‘\20×Jóüµ\26°U\4`y\26^ÛBö£©°\12Œ×@\25¨„V‘\7o’̃“M-R5à\13\\NM¦\\lª°1“r|’YPM\12Tkbrɤ–‹+?A=g¥Î±Ês~’Eîö_¾\17­Î„E4@+\4n;ÞàªmšLP\\y{ZÛe£L\6\7S…lÑ6éœßõ¥H)Å|®Q¤S8ª~\ +0u%Û%\27¼gR3ˆf`=OdR\24è’*í\1\2Ug9m«Â)'*ûÝ\ +ûy\0*‹\21Áº/®z£€¯¬R½aþ˜ÿe<üÐ(Ú„lôíõb5=´xF\25ìùU¼æ\7‘Áæ!â¶8é–¨¨>¢“í©¼ËAÀ/õ\20œ÷\12ñ6@&wše\23*à‰Ñ?ç@•ºlúúFnH\17¥\28Ê#NÏ\25™‡ìÀ+\30z³»=7V3/^\3÷>\27¡[¯¢Š\29¯\8\0058ŠÑÓŒ*æbå+•e«\22JS\\Öf\28Eg°±i±¤\\i®[\21žäRÂ*‡|w w´­QGF` äŒNNˆÎ©«JŽ\20øŠšÊ=ˆ ’G1àP,jc/\16BƒþieÏüÅ\29+\12\25¾8!ýJCZ\12‘¾.\26°~qU\\„;\12Ø'€Ù涚Ü\15!ºø/*ÎOXs\12ô¥^˜Ø\26õ&÷¼Ú9®3)8JÖnòaØ}\28¬9Xw°ù`ŒW] 7KXq€M\27\14°É¦LB&³œóí3´’ð¡Ò|‹ŒC\31]oÎ)‡hjN†¿\30²ó“g _¹+Oì\21‰žðƒcI8âÄô«Ö¶\25\26Ç‚ã0†óM'íŸ{äõÐ4\29Ä»9ñœ\9^>O#—\23añQý°á`³])ÑÐ}\19q¤\9™×ª\28X®Ç¬‘ŸÖ~þ<•“¼Ñ–\4r\26&­ØZP€‹ÙØå\28ër†u\20Û\17q\9ë´V-VdÿÙõ|h›Õ¸çw²\26‚6ƒÑ(-9\4X¥Y©ÉD¡òL§ƒYìça'\24÷\127\13Pƒ‰L\28؉`ÈzÄ\27ÓR~å°UJ?˜¨$“Ív¦\\Ï3Ozt̆M¯\0276JjgKuµÇCt6Nò¥5xb8lT‚kYðZ\16L&c0îv‘³*h!å\11š\3låÏ\9\28\127€TšúÃ$6öõe\19î\24›©\26y•šçXܳ%\27£ý«9dÇÊaðlÆ…àáÅœídb‹ã¶ÔƘ?ƒ\9˜fŠ¼r“É\17vȬR:À­Ïìt™•a³?pÎÙ5çR‰îIQf—˜\18™Â&ë!1\4VÄ\\ògi\4 µ¦…_Ö¤ûËR\13µÍrËÅãÐí9\0230á@é\11µ¯zÖ\\˜¡=‚\\D¤ûU%$²^ð«W~2TæèñÌr»V®Æ\24®\31,7\12è\27¾~¸b½6nAV\23¯5\30NR1{¯t\17ÒJ¡IüWüoøŸ:©Ô\2㟶’‰\\?Ê¡.Ê×DªÔð0 mŠ²äÿ™ÿoüÇG%H‰Öp¡´)u{¹ˆ†\16™b»N\28‘iJ\0288/lÄ)—Xj'\27ÝñGä\8ºÁe•H§æ­Õ+߸—@ì„Ü\22\17™Eôö¬æPL]C#µ…ì;š7ˆ”Ó\8m{ãeÝ&_}%Ç\15:Ä\18n£Íä\6].”\11®Í\5j­xÔœƒÛ†=lSTÿÈÐ+²\20\30E\20[~\26fiµ°bäp\5U\2©\15]Óˆ\21—t\16µ)“Ø\2\7½\6VPé¾6C©\13K\14d›ÜÖY…m2M³[´Ç\"Œì \30x„Ñ\26Q¸8Ìm(ˆFÃPÆ—ƒ(O\13Èž\13G¼oÖ—8\11¨Ë’Èá\11\24ê_Sxż)œÀZ-Ç#Ó‰‹\16¼øæ_9ßG¸Ž\5bô\26™ÿ1_ZU€ãu\17³?~zdºE6,M±+s\\\27‹ÒÀ¤¡;\5^àF{BOG##´s©¤Ñ*Hz\\˜¨\16XJ\00334åß\23\20•4\8¯+\2Ç+ß©;\25ÞN¬rߨD&Ê'‹ôâzfl»°¡+þ|¿êáü—E%ç+Zªïõ¸‹L’R¥ÞË]N Ê\12n\5õH\12\12ón‘L²£nÈ„c$²)“9“˜ÉúKRY½\13Ø\127’äwõlÍ\16\31î…ÌŽª\22n¿±fÕf#Ž\5Äv»ÿ¶ÉhÔHéÛ+YßEnk¿±:•­\30\12™z«™´œ\22$\19TÌ+[\13\29è_¯X\29)‰\16³\"¦Ò\9¡é\14ÀA-…¦ñÊB-s+‡QøGü$\28½òF¿\4¼ÜnÕ…ç«$¥×¡”.¢\19FûcEÿ±¸6øo¹`¡Í°±õê\0085“®cªÜl£Çî¢b\2r{MÞÛ\27!¿Œÿ·Ô\12Cé}\0190l‡%\"Ù\2\18ž|IïDµ\28Ù\1úþ\29bÃîc\12ˆH¡ö‘¸\ +ázf\26TŸODÏ1\22Ë\27þ¹áÆ\27{R=@/{+9:¸©¢\19Ñ\21Ý~Ôܦ\7œp¨H4Õ¸qÔŠ|»\13\3}Ýl®ñ‘.þY)\9=\8¡¸Úä0‰$3•6™É»\7\0110iV‘Ñ\22T\6C¦è«Í\1²>ÜxQÂ*EÛ\0­*ßxU:ž¥\3fÍ\23T†L9³¸E6Âp\31õ\1\\¢O3[$\23Þs/ÎÆÔUh¡˜—\22AO\0‹©LŸ»zü{ìè\26K¬<ªª.ÿ¾sW9ašT¾ÒjŸÓ;ð\13\16y\27\0Ê\18Ì:;A )$«…t\5Ñh‚¬›×KuEUJF-mFy‡œ2\1²J\20Ž[O¾.iõéªÕ\7T…\12NéÞöT(8`µR =ý\7ýWüG3BYô•\"è«$Ð\19 ™\0242 áÿˆÿNF(â\20\21Æ?­¹&Oaᕲ«η­)\15S$O”í]c©[¹æ\31\ +\15âh€xZ\22{b1ïï˜6\"áÇ•Ä\31\"—>Q\15)\28Œ\127|\20ÅW\ +\3ŸÖÔ®à\22püC›:a °J/Ú\18”Îô”Ѹ®\11\8n¡2앺ûÛLæL\"\28`]s…\18ý_4Ä]\1+\27Ê\7dBW\28.½\16$…E¤°-4¶\2A9ƒ\18±BÚw~x\27IÊ}è1r÷\29S\23Š ¯\30į”DVw÷ì´…òd\29\127²äX>\25ì\8ž®X”V^-¬\12Ä\25™\19îíM!ü÷\27‚\31\22L^S•‚\22‰[ê‰Ö*xÑ}Ïû^ètÿyùuú§üõOY4\13þS¿øO‰\27®ø\20ð”Êë\7`ã\3\20þÁVÆ?鹂\16A!D\3F‚Ù†L¾ˆivÄÇѦ\15bJ\4Õ…ÿ-þ›šÿ3þ\21\4–[ÓËR0\31\24<ý“b•\6–\31üßaõþR¸lÃÓã\13\127÷ô—¾þŸ\0?ÿÌPDpúgÍ\3\127OXÿþõÏ6¦’ô«øL)ðYþúÔåWŸ’¡û,©¨ó³D•Oÿlƒ?)Ï\5@¥\4vµà*há}PXióéû«>ysgúO\19‚Ó'rüS2;Ÿ:¦ÇÇ4êø´†3>ó–\18™ÌÙd\20I­²\13\22‘x5Ú\"®¿>u:ïóŒ¨~rOê“{RŸ:Awú¬Rü*¿^‹ÈŸ˜ò”\2Å\18ù>\25&i8ðYY÷©mú`£Ôì¤\30“ÈxV\\ÜÿÔâþ§oÈ$ÁF\26\9öËR‚ð‰«6ŸY\13ô§7V?½wd’†¨°¸ó]|jþ÷É9Úç\7\4ØO©×ýlи\127¦‰ÖŠÿ\11{¼Ô\0293\127\27d3\18–Š—iúE¨m‹óÜxæ6\11Qö\ +¢§›ÔQ2 »ÂIqJV©¥OÉÑè&{¸å@6!ßvYø¿Ê\7\28\22J&\30–û„x9î\0I¶Ý\27Q.\26\\Lù‰ùI9Ð0Í[’aeÃ*\ +\27=§è|r~ò\9uão‚JP\0110\16%\25\5òòç6%ðY*Fé‡Ð!tø\127jæòÙÌz=Ö\20Œ3>[ÝO²>!048´L9­{\"Š:“ ¢¨DÇž·j P|§ÂÀ+Ê\9\8\0188\ +–Tv=YøÌS\5˜¬ƒPJ©i²¦ÀX\30!\25ð\0095[ecì„\21`Â?‡Tpš\ +tŠ\2j\9RF#\16bð3†Û§Od8\22¥aÉŽús\26ƒÎ5™qˆg^ý˜B¢#ó'ŒÙ@œØ›\19Ò»?’ìµ?Ùk\127²‹þdX)€Øð\0008\4¦±\28\0175‘8\11S•üäI÷Ôˆ~¦Ô[”—‹òRWæ\13\"•ÀÕí\19÷§Ñ¤‘Ã4\25%Èu\19åzt(èç?}\31\23ü\"û\23lZ|òZ*Ú-4ÃÊ\\>\20\6J\8žSÓüÉ[›\0é£S\30<ð—¬}tçSÇr>u\24'eCêù¶—_\27¥î7]f\8Áœsª\3xL­Þ\6©û†OszÀ¥Ð\29|`‹`\20\"äM\2ô[–›ß²Ü<]Ä&\27ØKš\27må…šdh@=¾\27åàW†ÕË<ü\7\19l}Šéu\18qß²ˆ»¬°Zc\22³]o\19(¯³QÈdÎ\4ñAÅJN«0ò!\5 q÷ääý\29\9ñ.\7Ry/R\27ù’ºd²%xËîf“4Â6\25å\18/¤´A‚S¹ËT½\25Lt\3‡ÙÙ„C=›}dÒdrͤ=œÙ(”/™Î™D¾\18m¹\12Xû\28‹ø~°î`\14/rTù\19+ÛWþRî82à4SGbÔôöJñýM'\1\8J±S£8bžNcAjĶ|\26@ÁÞœö©å€{½mq~,Ìì\26E™'\5J?Ÿ\5ï‚Ö¶*Üõ¦Rp-e~Åœ/‘+\31¯\12¡¥@×&Y\127挤þÛƒá+ZŠ\18\8#‘+¸›Ï\2¼\28¬¡%d§Ò\4±\7¼\3Ò½DÌ>a>¸€·cg¿P}hrqh·AnX†ir\15+½£”ôd|¡Ž\26à\5Qï‰|\15ÈYЬ«`Ô§³ŠR;ƒ2®£šîd¤$âi\4\6\0rͤÍdüaýÁ¢ýed\2$Üý‚Ô‰Ð`Øä`*íuRHäêZ¬#\15Œa\26å ZiÄ^*\28,Ä&\19­W @Ý° ;\ +\216ïôFÑyZA\"\30®¯e#d\27Ôåõ1…rSõ,:®\26mEÙ¡ÑKÁE5\24]tª¥Ö\23ZÚdÚcÜ»•¼Ò\30Ÿè+ëñ‚¾¤\"2ZºÊç[ã“%Î(às²iÁf„Ò¡\0\12-‰³ÚÑ\30\"ntÚèãz\12|dPŒ&³ÐU¬?ª\24f…\0Ö4\ +n>F\2IMµÇC\9§8®\31ô\24õ\24™³ÃkÅ‚+¹\19ª\21\4\27ËYµ+h–×\19\14šÀT\13\4ÏÛ\11g\25«A\31\26z\26˜~ÃȆx8—R8È\19ô0l<+Â\0020l* <¤\"² FúdÈ\\«\14&lŒê\25&lä˜\\2qõL¬É¤Í¤Ï$”\7ó;\19/\7\27\15öc½Š4t…’þ‘I#‚*É¡ÖÆ\0196HÅiä„\\!Ürø·#Ž·LrdÕYNGg™Ø#«Æ$a|ÉäˆÞÑ›&Æ!TŠŠ†\6\19›\24\24³p\5Ÿ§:èí \11Y©2‡e65c:Ž³ùÀ\13_\20Þ^\4¥@\31\25Þ΂7Á‡ \21\12‚Q\16\4‹ý§±ÐV†Ë‹`øõ\ +H\19\31G|x8]T­\3ײ\17±/…Æ\30â‹®þDÜL\4“\25•Q÷7o8|2\17°ü™ŒSëtBù†ò4ü³\17:ñˆºp\20*x\12üøÈVå4GÙ²\11>Í+ü`‰M\7$9Y:ËÔȵ¹\31Ç},*Ô±U<ãØ«,$Òg‚puº\28!G\0130x>„fŒ~ÁFXTΗŽ!.GÉ[Tœ\23\22\22‚[Už+!¨ª¯ê™y®‚ˆÍK¨\6¥:žM“ûMJy6ëä\17~\9\27Á(Xlù\16n„+Z*`\26\11Ñ  EKu\21ÉÃS\7hËtüà_\19ùÀ\26\19®PÀÁ\25\26P€a£žu½\25GÂùÜaä'\18M8¢\18û6\25³ÁÍdu°|Oï\8ñ<€ÉÉ”\9c’\26v?7v¡ 8\25#rØ•jÓëàg~û\16éoÀLb³`~ÂÓ°\21JþIß\21Ü\7gôÌhKé»{¤ƒ’:8z\16\30ØŽ\5L³ÕV;CI¨T\14\26í\30òê´Ê'\5ò¾2Ç\0284‹Ì“p•h£´¼\"\0iy\"Òöä%iøæä0KÏo–ž\7¶%a¨V!ëSÂÞÆÙ\31z\23þ÷¿üé\20¬ß¤¬9f#EfV3[i‹Ëo’–×ó\29±çE{DOÊAÙhÒŒE„5\"±jf©ïV^»\8‚zÀ²—ÌV&\19‚ø:‚ýª³Qw\24…=Óo\17Õ¡\30Ë\7ÒÇèçÔ T}ɗØ\25Ò;£ûsÅO‚ü¾ŠD\15ýet™m8…«ØÓW=¥\27hÝ°D÷hqa\28K=2åúN«- ßBVÞžz\21èiÇë‘\19à\ +q<-j\14\6××\1âMBª\9¡U…á¬È‰Ê\4§sÕP©Té(\1Ò`øëZ ˜«òJqZ²…¢øM'\11䋵\24š\4XÀ\6FiBC^M©¡«I÷à9Ó9w’I¥¤Ÿ$鉨M\13qj\20\24Jc5Ñ%[IÎSRè‘o\12\23Ô\30(-\16Ž\1ãÕë*›4f4alhâR\5>f3ö{º\30\21\"½£Uv‡V5\"Ș\13:7xÔÈ‚5µÉ \ +œ\24Ç-d³*\15Øl²**˜i0d^¬²U¼\23+µƒl`MX7ç\19¿æ/9ûÒÎØ…•µ\ +â\\±\31&QäÁšLÚL\14×jfU\26\ +êoUÖö\5\11\8\11Ðè¿ÿ©å\7û°Éµ2i_2ÉVmÈŽ‡L\14“ÙnT‹%Œ™¾­ð$¨š© Ød\21ò\15ºq8sŽùÞ\ +zAä`(Ÿ\6\0á\ +M*\28\22ùÞ|œk³žw¹Ž*{Q‘Ma³ø¯rG\9|Z¬NS¶#¾ö\16¸³VíY£üVI½P길Sœ\9ßOÚØ’Cú/žêÆ)Øšƒ(\8ª\0275\16úâ\28Š¨Á0»Œ,\24¾e¹ð-‹…›È͇ž¯rx=ó…Wt‰ÄAè”\0Õ«®W}G:k˳ñ\"Ärkf\26\17ÂÒŽ5fýâ½\0052P]Ï´;¨Ó¥m\14Û[‘Ù]„cQt\15\5Öåtíª\23£?¼ó‡£kÕ×u\26!Cˆ\28Ãâ¯îU¯¦È8œvód/(4É\27Û5\ +†sºŽmJ'¸\22ÙÙ\18\"Ñd#+ž–ÜÆ9e\23\1hHÉD\31\24Ê.\27\0126\0086ðÌá‹Ë$\22àæó¥²_t–‡D·Lz9\14åM\"ÞUvƒm\13\26Í.:Éy´·5\27ì&¬w\20ñv^†FÅ*ô.©Ak\16ξå¯ÓÞL¾'!³æ`íÁ”1KÄÚrã7A\16|³\0008-Ö\23#sLq…\20¸‰«FØ]2¤†ÎŒ‹«¢ãaû ‹'½2\127&Jü˜ß\22[Õ¿(\7Ž\13‡0\18ÂÞòÕ\15[\22Â6é3\0252™3Q¼×œÖ+ŽYf6\28,\28,\30LŸ¾¾\11>l®ô]õêU/^\21ЪPV½{=Bzš°eL\22)à÷\18+`ï:ð‘\8tÃmïœ/½s¾ôΉ\18µëâ\31³˜âýÏ\29»\28ï'n\7£o¤ØøFñM—MÓlÔSrQ3°Z\1Ô\127n8\127\7DoN©òMRå€F\0157¢‡\20õâ#¯K~ð²\"\0Wî?p\24\0255ªøð8§$\21àRqä+ylá`´\3\22#H7ôBÖ\18Š'\16콆øv\"µ\\} º\2´ÍGGC6sÚ³\12Gñ·FÇnà¢çâžÄ³‰\\\31Ç\5w\0275Æ ¿pìp£ÌÃÆ\19‡TrP\"¸¡‘Í•ÿr»ÂbDó\3\5Ãᄲ7Æ—L\16€\4_¿²Ñ\9=\7n\3X\8‚¨(Cˆ\27—í0hv\17ÍüÁÿ–\0098ó]l>!À»Qÿ\31£;Cþ˜8êU숉r·:¾š\7܈ªÚÍÎ\28CÇ.\25ì\0132Õ\\S¤¤ÄfI‰M\"\18€V¦­MQ‹ùø-D§\13TŸòÍB\14àŠ\5ˆ\29v¶W+MÁ\11´&ßÁ—)Ól¬…l}¾£ûûoÊÁnÒȬÇ/ãÍø\20²£‘ÂæÍBÊ\27\5’Õ\3 GN™{\29Ï\20%ÛÚ\18C†¶ä@¼å\0026kM[p]w¾ ùaÏ*Ï-º\"vÂmñ0(Ñœ\0ßÛ*i[&\26\21Dã¿â\127Ã\127¤†äž7Ê=§\127nÇ·©½\31\17‚Ö9Z\23\25•rIEoT\29½I6:\1¿¾+;.\31uRM4˜¡áM´Â÷I|zó\5?D„ÝQz˜ˆö¢/Ù)¦ê„\2Þs-\14#‡^K\21‰\20Ø\1\24Ž\22uàG\14¸|t\27,à\ +RàB½mä\26Îi<£bI=s2ÃÙF\0'#—G\2Ir;â\20`úG”“\9j1\\`Ú=jÁcÌûà#Oõ\17ž\4äóÈål\28ŸâêñIº eРR@\30œ£­95\7î´E2Žyóu¤Â\4Zâ\29ub¯Iñ†\"1voŒizä0W\2åÂ\27Ãå¸\8W¦³*B¾¼âÇvJ\11®yÝÊÌ\25ó¼yìõÞ¼3ŠçÙÏ”\6\5-w™8\25{Ë:Œ?;¢¦vW\9\26¹iì©Q¸JÙ\30ò7|dÂrÏsÔ,™‚NXÔL\4“Ñ‘zp¶\17¦m™í;“>\27\12r9åçÉœI4Y„þšáÀ0Wq\26$¤ˆ×j•ÓFá`óÁÖ̘\0\3\0116Å´„\\ß ~\24[ã@çïrÖøS˜N\3o>¢ñ.Çß4EË1r\1N•†Ãèva\29”¦ƒvÈÇûc#w\"FmD0¶O–š‰Õfâ» \3zÓy‚6\19t\ +c¸ðŸ\19˜1\4\22¨@©{1‘¹y‰^‡\12¶¿\15\25lXåÖf¯X8ØÌÑ8Ž\30tÄZ~±àøofM6y0xè®M8r\26sóª¹\14%l\0182~ËäBŸ«ü7Þëk‹ÙDÛÔb—ƒiXKúq°æ`׃µ\7ë\15\22\14öó’x°ÕLß4kVr›‘V2*\29Y±«Ì‡\"à\8yEò* Ø¾œàf˜Íç\23ˆìÚbËÇ\25@Ø;ßÝ;'ä,Š\23iq\22\24\22èâ[ˆ¡û½ÅÁ+\25ߌ÷}ÞgÛ-„Γ{ç%„?wmˆÝÕuý¹\15êµù\26>‘X\25íP±{\22Úë \12|M¯«;hŠÃ\11í\5CÅÿ~«Çû­I\30/yB\8¸ÆHÖ\19;¯ßÌ\30\\ÀÂ4\0199g2˜ +…ÛÀgÖ[^'2J8\27WÌ\ +&uiçƒUÃA\0255уµ\7ûq\24\14\22\15–’™zôƒP‹_PëŽà•+\2T@þ&ìÎÆV8dL!®ÖÂM\3ô-ÀÀ`¤_‰\6Ñ>\22\1Z€Õò]k\22ŠZ³0Ô*9£&362ÜàE»+rÍ$%ºN¼6ÅY\30ÂáŠíœ­&±Ì¤É„\31¡˜\\øO\17\1\18Ê\2X;\29C‡^ºÍªçþ\21Ãþ#q4:doïB{Ú`?\19\0301ƒ1r\26Acu6z\20Ò&;FÊT¸Ê†aÌÚÐ\3]sT8d[µkô\"³OxÚ}£ÜFÅK\11Sç‹càõØÎàn6\4ÖE²ÉdÄhTÚãÎ\6µØ_~öò×ë^žSó”þ¹Ó˜¦Q)†g-jï8\26T\9š_;\26—1yÄA¡äè­â?…)@>àõÛ\\4OY\14„\"\16=¯Äôn8k\11ÀkJí\29ã£\11ÂÅÕ\29éŸ\2¶4\29\4©‚Sc\5\23íùVŽ\29g7Ц&Ò\20|‰.ÍØu*¨2Ùñ\1\18°¢“E¯ÜùŒC¬»\14þìùà\15H\26È\16\ +D¡æš\21ŸC™ÝE\25,\4ˆûì>ßC\15”ëßó9\28[í$U'/)çv\28ŸÙðéuCÃ4~à-Eý`¥%Yb¶á@žô´½È݆xŠ\4Yœá®¹òãšT\4Sþ^_øÏ\\Öp”D©{ÕÂ\18\28`¼\7“4lÂã;\31RŒð€\12¹¶/\4s.eÑåXè\3°\23½ûä\15bJU¤;o\25Ùy\2è{µ9’»]°à\9,ô¢%øS\23ÕŠ\22¹ðV~j§iÖÎÓAtÐñž\23lœ•-+°Î\4í>äC—ƒ|¢¦èøÎYöÓ•\22\\ÝL¤bztNCé‡\19*å0šî\17DÇW\3™ØÐ鎧6M.„Q\8eB)€žÙ\00397\6\24\29#nÝ\ +\21ldUës= Úö\29'uº2[ \127zÝ*Àz‡•idI\127fuï\17R)Ñ­ÝGm€J™Þµ–¢Û»OÖ\16+V\9xÂY˜A\14\28\8'S(T¢ElX`Ò\0L•\0287äš0w°žˆ\127¼3u\30\31‚•N&UòIµj¢–ÕÝÇ^ö|Ñì®Ã(C6QÕ‘¸Õž\15ªì夜šriŸøÕ\9ø±\19?\22gU\24R>²\31§SÌøÉ“Êß4!\17@^Ãøΰg¦o>O²ó\"˜\4ù\30\0242u A-œV8÷|×Ë~œ-ÙóQ«ô¾Àš‹\3&»\14˜ì>`²ëH\9¡\17´‚ÑnP_p‹¸^\0019±]—¸ì¼ EíN Ô\7ßÔ;.÷Áä‰\0°³Çú\18X\18f&\15€‹ŒÂ®\17NÌÚÓ<±\31âÅ×{¾•c÷­\28{\9½âådb\3Å(J¯\2MT§#\15kíºÉƒ°Ë!Åë`Ÿ\8b\17Ý\6Â`\26³›P^\14ʶà¸\1D#™Ž†‘·³Ê¤8ÜUÁf<+ºû~\16¸ÎM{Ôà rDh\"—ج¶A4Ñ\24!6;ƒh\11Ö\27\30~\1v%<àÒ\16#†n»n\13\25i„\17\"»§8öJ4(À—\19ÕÔ\8E9Š1ó1²›ÛqNfDDq­\8üE厛O^\15/d©‹\28#ÄM–›Êó¢V…'j€Ê\28-•ì>Y³ûdÍ®ã4»ŽÓìåëʼ„bÁ2MA\11 «èÎOßYqØôDåÔ»f_;oþ\0x8ÃÓ6íK&£È‰­\18æL¢Ýìö½g·¬jf\\EÊ\15(\15¤•\16Ó\12’\17ç%\21,N¦Ùý#:\26l\26¡‚åÅŽjTÉÂÕ2Ù\31ÆÁ\24²\31¾œâ‡»Î\1})fX\24’ÉØe«û,†^†Nž~þMl1ôÒY!\\-³ûj’ý¸š\4&8\8ÇwcH\6X·§É[id3ƒõP\16_OR\31¬Ë¬\23iÎÆ‹\2d=ð•%{>€d2‰ô~SŸßÔ+›º‘au£3½ã©IÙèã;\127t§Ë9vÞHò&<ÊŠ:D\27ÑQŸ\19 wŽ<Ò\20l\21ü½Š\13whcwP¬,“³\13ð­&\\‘ÖC£,ë›Éèp¡]Ìdv\0007ã]x\4±D[úB\17\21?ÞƒÖ\28A9¡ƒ\26þ|Fk?\14bq%’^\14V\30ì|0†\23<³²ßÂ…œÇ¸èÐ…;¤–¦7Á\5Œòà4å\"\25ŒÂ»\26'\29í¢U“S\3+\5»\15{íù°—\9\19\30÷é-ÆÃ…Ø]F8”G\19ìE‹lŠ\14ÄÍ;\7„,ÔçÝò·ßÊüq·ó˜Í.z\31†/Gºé<èî³e|\5û9]‚B÷3¿  /Þr²ð\"\20šRÃÄ®ófΠ¥vV/jÙÂrUÈË\0=åfÎb·”a-?h0\31\21uvùÆ!´l\0192‹¶ÚmÀMïLƒËìqFí¯‡¯\31W\31?tø¡Q‘ç16\26V,”¼mÅÎœ…\16\0137\25»ìëö\"×\\™Ù¹ÃqSjϮdzë±4ê\"o°±OÀ•#¼PW\31\16µÎùò\23g^P\26\19\18\ +1éáMþìS\12\29;ëìà]î•#ñ£4²_;Åâ£Ñ\7ðDš|\\½œ„Ò8*¤ÙÖú¸]ó‹?ÿî\\:‰k,oe!É\5oÏ\29×®–{Ç\30—\\Ü‚‰ZÁ=—‚}V+·ÿßì\12Ýç\\\5vˆPìgÜW™€çÜvžpÛy¨mס¶ÇÙv\29d#$³7l@î\127jÛ/\20¨L€òOÅñé½\23\15+/Ò]·_¬Ìo¿0Ì\11ïü9íé\21‹//zÁ\127ËÿŽÿÉ_ŠÛÀƒŒ»N¼\0178ùªÎ/\9ª3¬Šsõ\"h\4h;¡8í\11sÄêüÎÿ\15þ·òœJfu:C-ØŽKdà\28·È\16?\0048S·ëL]÷r°ëÁ†ƒý¸KÙU]xi\40uËÇ—Tÿx²nçɺ\20‡w|\20Wè¶F“®[jö*´*‹í¾Ÿf¯¤ù\30ϨmU]E41gÚðxÝž/ŒÙ}¼î|0Ti\29±#(˜Q€,ö\1;¢F(:iW™ íçIº]«çÉôCIÕ¤\18Ístøgîq,VµoØ6gdZiRÛy¦c\24XyoÐnW¼*\6VŒ\20TÉ)5ÚÓˆ.¬Â`\12\2Ó0BoÏ“vÒ…\21Æä<Š£“Œ)àvÅ\26cú\2ôÄ\21wñvŸÁÙØ3*fšÌ^*E£«Þ\25T\"×FVmÔs¯ò¥sy°è±Ù\7’ïgÛó\9=\26N\12¨«$B)6f\19Š¿ì?\7ùö¬\25Žö1{å-2{¾\1‡\ +‡uOP²Å;?@\\½xï_9ÂÉ>\19}o\19U°;êC§Ú^ê£À\23vÕ@«\14ÙK;œïÛ}¾\15¾ÇwÅuœ…”¡\3L\20¿ÿþ‡æ‘ú’ÙúøˆŸ2§ûö|=Î~ÜŠ³ûV\28\"Ò\13¾–üÅ\15zY_\4ªÏ\29\4â\11‘Õ\6»P\31\1µäD\5×i\22Ta|¬s{;³ëß:\19¦O±\31œa/gמÎK&çL¸‚Aö‘I›ÉÉá?Št\12We§×¹,\0264\2GÈ5¹ç%4°Ð8\9øá˜~å¸\127uUf\20q98«ƒ\\ă­‡}Àª’ÂD½ìu\25§mc5üÐÃw¼:ºì„pô°VziN\"\"×ÍìçÝž¿é±å¿\18£õd7ú[9Œ®¤…F„éªg1ë\0238ë±µÂ\28z(\"O\2bÓSMô/\28d¯š Äâ0N'î\5ŸyPW¤1²í\0308'­°ÐV\24ã‰\22'TÑ{ôÂNˆƒ6&øÐ!r\2GƒŠm\25\22ë°ô+#Vi\24­6¡r\\Ѧ\15bÍdœ…\24\25\1ÇüN¥Ë ƒëx)j—Mþûߌڊ£“{Åõ¿ô÷CŒ\12\22\19>þ|°æ`ƒY£ª2!MXå&*\0146c\30LÌÍÓ„a\ +€oÄ|·’\26\24`P®ê\30§=_ß$“k”I\27lÐé¹Ï>†L²ƒ9\27D“E>v=Rß“™ýpÌi\19x\ +Å\25óöŠ'ŸvœôT\29\9ê\27yîéå>TÙ\ +‡¯á\3}éKvT\31þê:“mT¨®v\127Ù€cr2v\0Ç]S2lõáh\31/W[«±\17mLX«\2Ͼæ@²›bV½åmTóžYÌ1çÝF¤8ÓÈ·q€Î–\28Ȫ4ûò\23¹X’µ‡Q—ÙÄ\0ëBÏ3˜w^‰`å(_~e7ÎÛf\27®æåë¯ö|ý•É5“6\19®Ì›2ýÉÔ²…FoÃÞ01\23Šo~\4g…ÈÿÁÅ\12‚î&\"1ª9„f\31™ÏÙ\19tŒÎ\9ME*ëËÞ¥\21¥\9Û¬Âwcclq0ÎÆh\\\25Wœ\127ßu\23\0009›f­`\18°8s\21·ÐâÓb£\"\0309æ÷Å[ÂÊui\17ªÙ\12‹‹Ì’+\0206¥÷ãJ.œ{bê ^´z?kk˱V8ýSâÔþqCã†6ßk\3ίÁ\11„¨\31\26Øo?epí#ÿ¼jèJxy#g;úß ñ™$Õ m\15<öñÒψTgÿ\21ȳo\7ÂhÖ†ýûƃŸ\28Õž!!„‘Ù\0\14)\28TÆö@ï缇6C3w\23 â‡4¶lÆíô V{öÑ\ +Ç\18%°ûóö_G·JF¯ßŽoß°ÿ†ÿyêú\13—o˜¾!\\ñÛ;¾¾±x£•Îö^ý7Ãþ\18N숑=\0027¹=óél‚9\1Ðò!*Ê\24\21ddóO’•KZ\19ã}w–Sß/®%S[Œ~ŸŽýìh¦î\20)¯sÂçoØýÓP|FEl Ø–_óœšßÒ\0ëÏïaL·!|§>^\25mÁÔG€È•ItÇ\1\\Z&–ÀÅú\16!X·}\23ƒ)\17æ}û®XÆâ\28.«£Æ†ØÚ°ÛuCNô5Êæ5ÚÁ«¦RÈ[„ô\20\ +¾‰_\17ƵòÚŠlu(vxuš®ý\20\15÷í…ý²¹ø—^é…ªŒ\127q#®9\18\20åÕŸ}Ý…ózŒˆÖ\22îwÄ÷å\8–¨ŽËÁ9±í•½{J¹Ù8,aã°\0043Fø¤pÈ!oá\17ýƒ'Œ\25\3óSÿ¹\27þ{\19¯\16KDi&\17Ã;±åõ¾t9^ÓÅk\24Þ6ô¼¡nCý††\13Å‹èCÑÎÏŒ’d\19Š¤¢\7¯Fø±ýµ©†–V£l´\26Å mNÙŸC\4u’q8û[ê(µ+\13Ž\23G[a\30¿ak6LÊ\175aÐ\28iríNñ'S´£)~RÚ\29J€Öi§è(ÒVóR«yBÑ%Ç\0\14\19Ñ7§ØG¬h\14é‚'ÚÒì4ú'iØLBY#ß1\30‹¾×ûLï¶OÃÓÅkTûÜ\127OŽ‚iÕœ\0083lhlèº9mQ­\1â³sçb¬±i×@´A¹}*CÈ)`ï†5Çëæö2\22…pY6—xë\26ÏÆO±u\5£hìóÍ\127ò\22}(LJØ‘,Mó»¡aCÓ†¢hAQR6b’²\17“\20\19“°Ë\21%S–&‹™IüN?yó€àæ\24o[$ÑCÝj\\¯æ\19—‚ƒá9\0%GHéi¦*\1\26÷¼Z3¿ÀZR`-)°–\20XKŠYKÊë,>ž*Ü‚Hj\1B\\&ÇžuDHLtU—…y•\2£I1£IФ˜Ð¤œ¬Ü-\0091\31€2v2íB\12‹ØÂ9„TÄ\0¥ÿ\20EùDºMR‚\26¥Ô!âÞ[ÏBÅå\ +¨•÷pd\24\9{\ +âϯ×\31\27zÝÐÛ†Î\27\02664mHÑzJµƒw¥¼iã\14Ñ´Ö*Ô\28Ræ>KÍ\23Ì}êªtÊêçŒxázäÚÙÿ\29qâúf§\15Ä™ëÀR\21à\28¹^¸:Råy\21¬·T¹p\21Õˆå\0282žÏ\\W‡UýøBP%úgžú\23%uÇfapÅ”àŠ)Á\17S ˆ¡B˜\28¦ô”Æžíæ~ÿçý`á,¨mggyµÈá›?댽~ž׬AZùÔA\29¹8ù='´´L?“\8\18k¼a\31´¦ùÄ°³×\9˜Ò·1{Ï([K\18=\\Zˆ‹\0030ÅìÑDæ\30½á~XÙ{’-\21rtŒ©p³1\ +ð£r!íÐؔ޻1Ù<‡<‡”û%vqXBæ…\0~̪T\6\14ßlp\7ö仿X±PnZx\15ú\28â£h˜ù!ÀØÀÔÀâü\19ÉŽ]ȇ\9ÍÓÞ”\14\26‹ÔwÈÃFR\9 ÆºŸ¬R'ÉÖ£\13\16]\2ø%”h”ŒêÚï\30ú\4=\20Õ'и!G™1“»\15å¶'Z(¦!\24ge,i”Ijî–s\3×\6n\14£±©£|D\30°„¨tR\29æÑ\25\"ò \0äÌL ”i°•äbûx±pá~œÂóÃrpM\21¸\6hÏEuxè|\15 Ä¥\5¸øGËÔK\3wË9žt·õ\6@ú\6ñHSÃ5Ž4À\0\8ŠJøh•ðá,¨òËÒ\1Ѩ†GÉÒ\5\26î~ÄB©[ÐOê—h,8öVëòÓÒáìŠ+Ú¤Òû1ž\12¯D T/>\13TħD‚\22Ô¼¸¥õJn;“[ÁäÌMnòR”\26ŸÚ-¶¤»ÌFQ“R«IÐB\22Ù®Jq›º\6J<ìJžb¹«—Ë­·_ïH\\¼1ÈËmö-ç\22ú°‚Tl¢W‚Bš(ø2¯}\8É—\ +(­¬ÒõÉ«ßH§k¸½\26Ú3®;óœÔ†Qœ›ÎsŸCcµ™Û2Ð2DõêÜrA\ +)é/Êþ\"öï$\22?4óL|\23èû0\21[zÑåÏÞ8×\20lU€Âj´Ø”Š8•\\\0DXµ+L\20ÍZU`­š,;\11&P\31¡·úQÛ¦—Ç:‡\1\24CN!SÈl\25™\17p1ò¤ðc›éÊé=B)a:‹§Mž*ž¦;Iš˜òé‘ùØÀ`0§\22ÿ#ÂDê§H#ë\127¶w\\6{ÇòÑAÊ}i\22ÌJg5\23\13ʃ>Ë k (\"ôÀñøÇ\13“„v\24CúËDÞŽô\2^\5k\4`p,cÈ\22‘Õ4\13ßt[E=èóhÉ*\17à\28²¾s°\13\15îMزÁó\6Õ5s2i/i*{=ɾPØ#ªÅxÐ\17åz=Ê7ôoöC7MܲX¯¥44*\26=f\0o/\12û7}‡\12\21ëÊ\23ÀêU¯ª\2êî)s\0035»J†˜RÆÑ\31\24ÜÛïÌñUÌñÅíʳèÖ—av‡´Øm¢q\26fÏTqbjõ4ø(“Žà\0153_\4PžÕŒ@‹F h¨;è¨&\29\26£HV)p‹5Á\1r®7Eru\ +QL7ÅXÙ4…š½¤\0ù°!\127\30£õ\29\28döP›1¬²ïóÔÀ⿾¶\6<`vV¯9ÞCA3Y\25îš0Öë\27×3×ëÄuáš¹Öÿ<Ú¸4r¶£V\17\21ç.€ÚÕ1f\19úAÒ˜Av!õ\7\1CÈ)ä\28Rï‚÷¸@£VL£V4=Ò\16\19:µ!ähé¡‘u•š“ד€Ý©1|é1\8ÿ¾…§`ŽÞ\22¹5ß´ùr–\127»ÑTS}´ØÎk<ô¥¸™ }\4˜‘äɈAÈ*ÜþIöäÀ¬eöqÖšY2Ð_\26áeB\6\127Q¹\28(N\23Ô\27/\7F\8\23«LŠ3®“x:iÄtñ`1€<ÖÔÚ!1p«Í»Ø^ŸdVë8Å\1ª‰\0268iŒ2RòØ\0038.º\4ì\8ë1* É#\14¡\4³\28i¯M\4\3È\ +ÞBê³g¬¬è‘Wb·ÖÀlö\12GðÁãc„V†6â9\3©þΣÙ[Š¹çêgŒ,]˜{î\4P‰n\\t\1&{\\Û½^Ùæ!A3·àô,õ¢ùò<Ëêj\5¼éÒ9bÔýt;á§2<ÇQ‡\25¥«ˆL§³\21à•«ž½ø×\\z'DË\11ó¥vï\15BD\"8û\0Ðr«iìä­q.\18ç‹žUY<‡\28BÒ’Û §š(#yNñý“Û˜ÆdWD~—Zl+\18fŸ2oã†à¢\19’™UÝ;/¦.|»Å·ÉÏkågv5„vn?\27̼ov§3·Á‚iåŠÙä\ +Ý\ +\17M\30‰A,×9Ø\23â¾óÏžîíï‚ü6ŽE#ˆ»\31½÷ñHï\28»»€Í<®ª½›¯Ö\26…¸®Ì»ë\27©\14ŧ\25.!ߟBž›ÿhÿ)î]SYFšCÇi¶ù›2/.ù>\0141£Zƒ+“h€ñ_[vNÙ2_-ü‹––c‹+Æ‚\26£yóJðå)¶V;\22¯ŸÍbc•p·\5y^ýBÈ™JtÔ:D—™»Ì.¸tèºî¯\20©\27ÍÌÏ^ãVÊ\30Þ\27|Ä1ƒGl7<\24½HÄí\11·Þ‘Åá5DøŸ¸=5®7\28ûˆòƒsYóaGÐ!b\24|fá1¬×C\29Ej]mdUñÁŽÔCú4\28o‘Ðc\3,@>D–¤\1Åc:‡œÂ}>†CæÝHß5^\26\7{ÌÝ\23€\16¶}V\26\21_Ù¨øÊFÅW6\2¾ÂDÕ\26½€¯l\4|F>2óÐY\ +Îð=8V1\1ÛTVh\12\0075áô$ΕĹ÷cÀ0ZE,\5\29\9PS\7‰›s\11Ξ\20¯Êf=\0úXg\5Zˆç1\27\22#Á\26ýì|:A4\127¿,ç¸ÕØñϧ–ÒÂt°åo¤4Í$üxS¹eí\19®T$q´»hÒ0ñ\\€V\11\14Vÿù\28¼a\0015 ŽK\12*ÿõï\127ŽG\24\14\4ØÄú\28¥,(=g«\13U‡©'û?Ç»\23ÅÅ\12H\\Ó!$ªÆ\127>§g‹Î‚Ù]•½Åh±ù~Z.\22™DÈô„Äþ•3”€Ù€\19Ð\6Î\18¡nÜ\16\5ÍÞì—\27:—ÐeÃéêß&\20§DeHT)ˆ³‹â\28,æ\28ŒßF7û ¥\3 ž\13­`ß\0︅^Ns$Y6\4R¸&l/\22£„\27\9ó\13\"\22\\ßíK“ ±ÇYGJÕU‚´óxÅ÷„'+E\127ÿwùûïNÒ6F\ +ËátCFX\0180Æt\30¼ú%W\7¢¹–”¾hC*Ú0\29ŽÍE?îïÿ–ÁBß+ÁrM\5\23\7c±Lj5_ñX(ßÁÈ\19Þn\18pîøÀ”NÌP›\5®¥\8þ\0ªÖÈvOÂ9ë\12WâkH\127}­ú`ÖÄrÝ¿vºjH`ºD‰ZÒ6\19Üà<\8\11ÜêH®Þ3°\14°\13q#jm[\14Ý“Ú\13ˆ\18ÕP×·/]ÓÂ\17RÞÙ…\127 ¨áð7}â\19t4šÅ¤ZZ´ù‘l¦z_’IA\26RÛ‘\14'\13|Òáí\7â\28¾Ã‹åx\14y\9™õLÔŸd~\4Í.\15×{Ïçêýr¶;ÿVrt\4K±$c“yC’Î…Gªòt¬\12E\27š\8ʹìD•K\30\20+æ?¿^CžþÁ]\0040\ +\6-ƒúëu–8ä%<ö×\0064ÕÅÏ+ŒŠoéÃeµ¤\18'ónØa\14I;”vqFø\ +;ɉ\26™¨O0X\0221¾^ùéØí\8sÔ¤D‹ƒ©Û?\31,XË\20¨c\29ÅÙ=\23\5µöµ$mýSêøAL\20ÓÎÛË©c`Ÿ¬Ù÷º!²ØŠÓ{ÜPòKX©@8\5‹F^©Qo\8'Íê\20>´”iÓ|N¨*‡\11™ÒÅç$Ö|jä7Zò$Ö-y\23~%_À\26?džjñ”¡V³Â,Ž2óIUÒ®\14™+®§\0í—xmæI–»\3» ÅÌB$w\20žžEC\5Ðhï)õW\18Ó3Š­Î\24\2 †¥I§MÚÚ\ +(Õ ÔÇ\0162õ¬ˆTß\18¹ªõO'@“[Ý;ƒ?\\”?žÉÈsÜ\14!¯ÊxÖ\8÷ôË•\31‹†\22F‘³\31\11J\18\2Kȼw\6j \25€R:\3'\20ÓÌËæWBˆ[Ý\18‚£43#ÖqV\0291Isøò§g*Íì\28©£ê»ý’?\12ˆ‹\20æ¥åNU¬}µôÖ@\24K#uŸü\3ÓøÖ ê+’Up’Ç¡p½Ö\27–\12“Ï[¦\29yÿD‰Oq\16`¤:sh\22™-5€êµ$\31N~2Õk}\25Å@„gU¬'¸y\27ZŒÈÔ•‚´Öž/­4\14«z$}×o\127ÃoêûŸßJ­–Åw\7|¿pý:YŒÙr¢áüóåêô\21Åý‹\12ÔúfÚýŸZ$«=?a\20³@GK¦6^Z|n„ûóë\0262á¬ôä£A¾ç—\21?®éÏ“ˆ‹‹\12aJ=H^\31\0082¸8\31ËlŸ‡Å'>~O\2çxÔIQ³»´b=\18\21\20\16$Kõwìµ>Rrtw%·ÞNNš»Y·DuUH¼TÈ°R`Ë\7ï´Ûýæ8lA½úYM1K'bŒØØ%ȇiß\17–Éz6\27\17!c6ž7þ!V#\15C\3\17üÎâv Ë†Öx‰\8:òÆ6\20(m(;\24+q\25ËÓ¾o!Ö!>]\4šÅÌ6“Äð“\15I[\16Ùkt9¸8j~ÒYg:m2ý 2S\30Ë\17£T™‰y%¤—¬tjü%ùþE‹Š\2Z\19õr牸jo܇‹¦—œ4W›7ë¸<&3KvÓÐÎkã,läfÇÖÖç—\6ø›¦+Ш%\7ý€Æ^\25–K\3*lBy\31ÑÝTJsÐ4È…2jãíœh7j\9Ò¨¾>uÓèZOßX\28ÌŒ\0012\27\14Œ\11Ÿ-_ãöäBëóQ¸pòɈ\21]V‚\15!£ÐvcD#ã—÷\13^ºox\17ÔQÛ\18OÄ\0271Hi4¶2ƒ\18·B“§ŽO\14K”í\9ݽÄ\19ñ=œK\0134\27øp\22á\25c´s\4:F@ùÐ\17À*c°\28,Y%2 \ +l\13V‡:øÐæ¸|:\4\22@ƒ\31B\22Ë9Å'ëLRCÑ\0tœEÊqˆ‡a{Æ !ž\28 0h¿Å[B\25­ú’CÝ<‡¾yn†âì⯶\22GÞˆ\"í6L\17c¬\13°˜ßžTB¢\24½ûWl‡Ríæjõ>²-%$3g¼c‰ìg\\\20 ½Ó[V†ÅÁãKµ¬þ¶¡É~ÞªÅ-\15‘°[—â{o¤á•ÂüJ^ BŸÑ˜/¹gm8‡™'ÅÒ\31ýöþè\26nÅ›lEš@P\\쳉M\21ö\18Ÿ#ÃL¥\17Zi­òµ‹û×9@4%\21´g¦Ós\3Ýæ÷ÞÐÉ\17ÖpÈ-LÞÂÈ€Pý”}¼ûêZ×·<îÓ\16ò\0182o>Ž­g“‡OcX Ìü8‡Ü»\4iÇt Ÿ\24ÈÌ\1gõ2u\8á¬ò\14\2hd©0ÏÒê&ƒgÍeó\12Ï8rçïÀÎGuXÚž‰œ¦\3)EL›³Ïë’H¾!ÛÌdl\18ù ËÔã†\"H\14\5ì\20}˧Ö>³Ï´›\"µã\30Uœ\28ë&Q²?£1%\13¡[t$µ³­\14ވζ˜ ©ã+ø×ÙË›žð*\29’\29ö:Ìf'o\17q¼×£„+Ÿx%©ê\5¥ó 3¿°ÔÓ¨™°þ„ן÷×p@á2/=\26q\2£=zßûèQf°^éŠÝe&ꈻ\13\6‚Úg;Þ,ý©!Õ™Ãü^\3¯[qY³Ë§†z–Ë\6ž\3XCËno›[ßœ†\6¦ÍonNK\3Q8¿T¦ZKˆƒ›\"‘±íâ§~µ°{FB°\127»\2üý7\18'ÆnÞ\127×غ~ÁZ§®¢CZŸŸ}Ó#ðyyÒLbUý]ë0àdqEX¡ÄÖ\0\28êÙ·ZPYÝr¬&v]Íë*q³ß\7\15Ì~@ÃÂ\21\11Pˆ/D-\0235ˆúîutŒ£z¨U[gš\7\3’|ÑOÃA5g­µÏÁï;'ør aÁó¥û³Åh±„¯\"»ðªK'ò\29î~©(\11ÌuWkßÙ\5­ud»ß_\29ôCQµ:¾š\17`5{W\21«ßËŒ@\22\12Šì\24#šß¯¶ó ¯>î>ù¬ÅÉæ(@-¡Ìì0U G2ÀšQk2‰ìj\13©5±3$Wí{É5!Ì\18±¢\14\21\14Ñ\27­^[ƒÍè{‹w\13Z£µ©5­ÖgZÑgª×‚x¢Fê\19ÐfZ}âpµö\18 Xƒ}µ\9¼Â¶±•†V–ô1ŽÀò­L'”ÕZßXPø\17ò9d\31ò\28ròÓ«–åµÐþ›±]\21¯p‚ý6Ï*ö\21J³¯Pš}…bÃ\ +…É\27ê6²MÀ±?Ù\22ðÔª™\19À\13ºbÃ\2ˆÞ\1¤ê\20\23áSÙˇ\127\19ŽK ç§o!§·ö\26'.Ÿ-¦pž\13²•“1Càô}ù\22F©ß\5^Å*q˦…ô}lª J©ûÀŒ È®Í×xd§\13k\5×\127ÚþHêžÿƒ3qx\7€\25mX0(Í‚A\9\11\6¥Y0(¶`Pš\5ƒ‚\1\3\11\22äÕ~ñU²Z°cFYÛ/™ò¶dÑX€µq\1í ¨‰sˆÅ\1–Б‹–^´\6ýï2¶0\15Ë\21QXc\1Dì*Å\127ÿ-¦3³ÇûÒ\25ÎDµ÷rv•~\9¥M&\3j”æóGs\30¨ºS«î­3ó£Øì½µ±¿õR«¹I«î׋ïë\31¾³©½c?÷.\27|õzæ:ùª|¼³Ñy÷ñý;šôw´CîÎ×;Ùº/±“ýÍð\6VÅ4W•½­WÄà·ìFMZôaZJÒ&\30ò‚ì}÷˜ÿ\0318uh®", + ["length"]=528089, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=62851, diff --git a/tex/context/patterns/lang-is.lua b/tex/context/patterns/lang-is.lua index c522d9434..f4e9f38cc 100644 --- a/tex/context/patterns/lang-is.lua +++ b/tex/context/patterns/lang-is.lua @@ -91,7 +91,63 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnopqrstuvwxyzáæéíðóöúýþ", - ["data"]=".að3 .aða4 .aðk2 .aðl4a .af3n .af3re .afr3í .af1s4 .agn5ú .al3e .al4l .am4s .an3k .ann4e .ar2 .ar3a4b5 .arth5 .asp3 .aum5a .ám4 .án4a .ba4 .da4 .dam5a .dav3 .dr4 .ds4 .du4 .dust5 .ed5ik .eftir5 .eigin5g .eink4 .ekt4 .er4m .eyf2 .fa4 .fi4 .fjár3 .fl4 .fla4 .fr4 .frá1 .ga2 .gar4 .gas5l .gd4 .gem5 .gja2 .gl2 .gr4 .gu4 .her5sk .il4 .ingj5 .is4m .ís1l .ja2 .jar4 .ka4 .kal5e .kam5e .kapí3 .kart5 .kast3 .kn4 .kr4 .kur4 .la4 .lag5e .leik5sv .líb5 .lu4 .ma2 .maj5 .mak5r .mi4 .mj4 .mu4 .mynst4 .na2 .ní5f .ný5f .ol4íu. .óa4 .óð5al .óf4 .óg2 .ók4 .óm4i .ós2 .pa4 .pl4 .ra2 .ram4 .rí4 .róð4 .rúm3 .saman5 .sk4 .ski4 .skj4 .skr4 .sl2 .sm4 .smá3l .sn4 .snar5a .sp4 .spr4 .st4 .sta2 .ta4 .ti2 .til3 .tí5f .tr4 .tu2 .tví1 .ung3l .unn5usta .úr1 .út1 .út5s .vegg5s .vist3 .yfir3 .ævi3s .öfl3 .ögr5 .ör3s .öt3u 2a3a4 a4ab a4ad 2a1á2 2a1b ab4a 3abís a5by abæk4l 4ac 2a1d4 a5dó a4du 1að 4aðað að3al að3ar 2aðfa 4aðfö að3g að3i 2aðl að4li 4að2m 2aðsh 3aðu. 2aðv 2a1e a5ei a4es 2af af3ald af3arn a3fá af4fr af3g a1fj a3fló a1flu af5ork a1fr af3ré afr4u a5fræ 4afsd af1sk 4afsso af2u a1ful a1fun 3afurð 2ag a3gau ag5ál ag5is a5gj agn4ara ag2ne ag5ot a1gr ag4ra ags4ið ag4sp ag4stj ag4ul ag3æð 4a1h 4ai a3ið a4in aí4h 2aj a3jö 2ak akaup4s a5ká a1ke a5kinnin a5kistu a1kj ak5rei aks4l 4akss ak3u a3kv a5ky a5ký a3kö 2al a1lag 4aland a5landið a5landinu alas4 alá4 a4lem al3er al1f al1g 5alge 3alh alis4 al4ín al3kj alk5s al3k2u al4lí all2s allst4 al4lý 4almað al5mann al5m2i a3log a5lok al5op a5lóð al4sæt al5up al5ú al3v4 a3ly a1læ a1lö 2am am3ang a3mann am3ari am3ars am3a4s am3dr a3mí am3k am2m amm4is a1mó am3se ams2i am1t 1an 3an. 3ana. an3ada 4anag 2anal 4anau 2aná 4and. and1e 4an4dí andr4 4andö 4ane an5ei 2ang an4ga ang1d an4gj ang5spi an2g3ö 4anib 4anið an5ín 2ank ank5l ank3v 3anm ann5d 4annf 4annh ann1k 4annl 4anns annsk4 4an4o anst4 an3sti an3tí 3anu 4anú 4aný 4anæ 4anö 2a3o 2a3ó 2ap ap3al a3per a3pil a3po a5pre ap3te a5p4un a5pör 1ar 3ar. ar3afu ara5kr ar3ang 4arani ar4arp ar2as ar5ast. ar3au 2ará4 4aráð 5ar3ák 3ar3ár 3ar1ás ar1át 3ard2 4ardí 4arð. arðar5á 4arði 4arðs 3arðu. 2are 3ar1ef 4aref. 5areig 5arein a3rek ar3el ar3en ar5er a3ret 4a1ré 2arf 3arfe 3arfé arfs3k ar1fu ar3gj ar4græ args4 arg3sl 4ar5ið 4arif ar3in ar4ind ar5ist. 4a5rit 4a1rí ar3j4 4ark. ark4is ar5kj ar3kr4 4arms ar3mú 4arna. 4aroð 4arol 4arot 2aró 2arp 4arra arr2i arr4u ar4sá 3arse ar1sk ar4spi arst4 5arstí ar3su ar5til artr2 ar5trað ar3u ar4uð 4ar4ug 5arum 2ar1ú a5rús ar3v4 ar3yf 4arý 2aræ a3ræð ar5æv 4arö as2 4a1sa as3an 2a3sá 4ase 4asé 2asi a5sinna 2asj 4a1sk 2asl a3slag 2a1sm 4asn 4aso 4asó 2asp aspít4 4ass ass4v 1ast 4astað ast4and 4astau 4aste 4asté 4astig 4astin ast4ing 4astirð 4a1stí 4astj ast3l 4asto 2astó 4astrá ast5ráð 4astrí 4a3stræ 4aströ 4astul 4a3st4un 4astú 4a3sty 4astæ 4astö 2a1su 2a1sv 4as4y 2asý 4asæ 2asö 2at at3að a3tak a3tal at4anv ata4r a1te at3ey 3ath a1til a3tj 3atk atns3 a4tó atr4a at1re 3at1ri at2sp a5t2un a1tú at4ve 3atvi a1tö 2a4u2 4auð. auð5li 5auðn aug4as au4m aum5ba aum5un aun5dr aup5e aup3l a5upp au4s ausa5m au4t 2a1ú 2a1v av4ar. a3vir 2ay a5yf 4az 2a1þ a2þó 2a3æ 4a5ö á1a áak4 áa5lo á1b á4bu á1d4 4áð áð2s áð3sn áð3sta áð2u áð3us á1e áf2 á5fá á5fí á3fl áflæð4 áf5un ág2i ágl4 ágr2 ág1s ág3v á1h á1i ák2a ákam4 ákap4 ákó4 ála3m ál3arf ál2as á5lau á3let álf3d4 álf5sk ál4m álm5an álm5u álp3 ál4se ál5sva ál5ug á5lú ál3æð 2ám ámál5a ámsl4 á2n án4aro án2as án4o á3ný 5ánæ á3o 4áp ára5g ár3an árar4 á5ré ár3f4 ár5g ár5k4 ár1m árns4 árn5sl ár2st ár5t ár1u ár3v á5ræ ás5en á1sk á2ska á4ske á3s4l ás2m ást2 ást5i ást5ráð ást5rík ást5v 4ásu4 ás5va át3að á1te átr4as áttar3 5áttin. átt3un á1tæ á1u áuk4 á3ve á3ví á1væ á1þ á5æð b2a 4ba. baðk4 ba4h bak5sl ba2n 4bana 4baní bank5an 4bans b5ant bar2b barð4as bar2n bá4s 2b4b bb3að bb3an bb5arn bb3i bb3uð 1be bess4 b2i2 bið5i bið5lis bið5r4 bif1 bilj5 bill5 4binn bisk3 b4it bit2a 4bía 2b3ís 2b3íu 3bj bjar2 b4l2 bl4íunn 4bn 5bo 3bó4 bók3s b4r2 br5að bra4s 3bro br4u 3brú 4bum4 bur2 3burð burst5að bu4s 1bú búf4 bú3sta 1by 1bý bæk2i bæn4a 1bær b5ættism 1bö 4böki c4a ci4 ck4 4da. daf4 dak2 3dal. 5dals da1m4 d5ang d4aní d4arad d4ar4að dar4ár d3arf d5arp dask4 da3sl 4dav d1b 2dd 4dea d5ef 3deg 1deil d1en d5eó 5dep d1f4 d1g2 d4gi d1h d1i di5fu d3ill dim4 di3ma di3nav d2ine dir3s dí3d d5íð d5ínu 3dís dív3 3dj4 djarf5a d1k2 d1l2 d2la dl3að d3lag d3lau d4lið dl3u d1m d5olf d5óð d3óf dó5lí 1dó2m dóm5an dó5ne 3dór dó2s dós5e 4dq d5raðf dr4aðs5 d5raðst drafn4 dr4ara 3dreg d5reip d5rey d5riða d3rík d5roð dru4 d4ræt 2ds d5sal d4sjö dsk2 ds4ko ds5kun ds1l d5snö ds1s4 d1st4 d4sti ds4uð d4söf d1t2 d1u duf4 5duft duk4 d4umb du5ræ d4usta d3ún dún4a dút4 d1v4 4dw 1dy 1dý d3þ 1dæ 4dæf 4dær 3dö ð1a ðaf4 ða3g ðak4 ð4albo ð3ald ðal4is ð4a1m4 ð3a4n ðanmeg4 ðar1a ð3ar3f ðar3l ðar4lis ðar3t4 ð3arú ðat4a ð3au ð3á2 ð1b ð1d2 ð1e ð3f4 ð1g ð2ga ð2gi ðg4l ð4gn ðgu4 ð3g4ul ð1h ð1i ði3g ðik4 ð3in ðis5l ðist4 ð4ista ðis4ve ðj3as ð3jó ð4ju ð3ke ð1kr ðk2un ð3kunn ð1kv ð1lá ðl2in ðl4ist ð3lí ðl4ur ð1læ ð1m2 ðnum4 ð3o ð3ó ð1p2 ðr5át ð1re ð1ré ð3rik ð3rík ð5ró ðr1u ð3rú ð1ræ ðræðis5 ðs5afl ðs5á ð4seg ðs4inn ð1sk ðskr4 ð4skú ðs5kv ð4skö ðs4lu ð2so ðs3or ðss4 ð1st2 ð2stí ð3stö ðs4u ð4svi ð2sæ ð1t4 ð1u ð3uð ð3ug ð3ull ð5uls ð5unn ð3unu ð3ur ður5á ður3f ður5g4 ður5st ð1v ðv5að ðv3ann ðv3arn ð1y ð5ý ð1þ eat4 ebr5e ed4e e1dik ed3it eð3a2 eðal1 eð3il eð4is e2f e3fal ef3i efj5an ef4n 4eft. 2efts5 eg4ge eg3i egr5u eg3u4l egur4s e4i eið5ar eið4st eif4as ei4ka eik3li eik3u eil3ag eil5ö ei2m eim5u eing4 1eink 4eino eist5að e4k ek2is ekj5a ekn3 ek3u ek5ú e2l el5ás el4d el3eg elf3in el4gr el1i 4elí el3ín el4ke el2l ell5an ell3e elleg4 el3ó el3r el1sí 5elsk el4te elu5s e2m em4ba 1embe em5ens em3i4 emj3 em4l eml2i empl5 em5u e2n 4enc en4g eng3a enik4 2enn enn4t en4t ep4h ep3i epl5i e4pr ep4t e4r er3al er5au er5á 4erð 5ere er5et erf5ar er2fr erf3u erg5l er4gr er3i er2k er4la erl3i er4lí er4mi erm5in er5ól er5skj er1un er2v er3ö eröf4 e2s es3a es4ban es4bu 4esj e5ske 4eso esp3a es2s est3að est5ö e3sæ etik4 et5ils etna4 et5o etr5an etr3u etul4 et2us ext4u 5eyrar é3b é1d éð2s é3fe é1g4 é5ky é1l éla4g él4ara é2li é4lj él3r ér1 ér4á ér4r érs4 ér4un ér3v é3sm étt4ug ét4un étur4 f3að fa2g fa5k 1fall fal4u fa4ná fa5p4 f2ar f3arð f4are f4arg farka5l f3arl fast3e fá3f f5áns fá3rá fá3ta fá3v f1b4 fd2an f1dr f5dú fða4l 1fe f3ef f3ein 3fel f3end ferl5i fer3t f1ey 1fé f4fa f2fo f1fr ff4st ffær4a f1g2 f2ga f2gi fg4n f1h f3ið fik4 f5il. f5ili f3ill f5ils f5ilv fim3a f4ine f3inu 3firði f3irðu firg2 f3irn 1firs 3f4isk fi5so f3ist. f3istu fis5v fí4 f5íkv f3ís fj4a fj5aðu fj5ann fj4e f1k2 fl3and fl4at 3flata f5lát f4len fl5g 1flo 5flut flv4 4flý 1flö f1m 4fn fn1g f5n4í fn3k fn5ok 1fo 4fop forf4 fork4 1fó fr2 4fra. fr3að fra4m fram3l fr5and 4frar 2fri. fr5in frí5m4 f3róf fru4 fr3uð 1fræ 2f3ræk 4fræn fs3á f4sel f4skaf fsl4 fs3li fs5na f2so fs5or fsr4 fss4 f4sti f5s4tæ fs1v f4sæ ft3að fta4sk ft5á f3te ft2s fts5l ft3u ft4ul f1u 4fu. f3uð 3fugl fund4as 2f3ur 5furðul fur5e furs4 fur3t 4f2us f5ust. f3ustu f1v4 1fy f3yrt f1þ 1fæ 1fö för4l g1a 4ga. 4g3að gaf4ar gagl4 3gagn gak4 ga4lag gal4is 5gals 5galt gam3al gam4i g5and 1gang gap4a g4ard. g2arð 3garð. 3garða 3garði 3garð4s 5garður g3ar3f gar4kl gar3l gar3t 4gas g4as. g4así g3ast. ga5stað 1gata gat4r gá2 3gáf gá5le g1b gd4ans g3de g4du gd4v gð4aro 1ge 3gei 2g1el gen4 g5end 3geng g3ent 5gerð ger5í germ4 gerv5a get3r g1f2 ggis5 ggjaf3 ggj5ar gg5rá gg3ræ ggs4v gg3ug gg2v g4gö g1h g1i 4gi. 4gik gi5kv 3gild g5ill 5gilt gim4a 4g3i4n gis4a g4isp g5istæ gír5a gís4 gjaf5ar gjaf5o gjak4 gjal4 gj4asta 1gjö g1k4 gkal4 g1lá g4leð g3leg gl5é gl3f2 g3lit g5lín gl3ót glsk4 3glugg glu3s gl4y g3lær g1m4 gnap4 gnart4 g4ná gn3g g3nó gn4se gn5ug g4ný. 3gol g3or 3góð gór5 gó4ur g1p g3rak4 gr4an gra2s gras5sv g2rá 1grei g3reis g5reyð gr2i 2gri. grið4 g3rík 3gró 3grund 3grunn g3ræði g2ræn 4gs gs1á g4sei gsk4 gs4len gsl2u gs4lö gs3m gs5or gs3s4 gs4si gs1v gt3að g3te gt4s 2g1u g4uði guðl4 5g2uðs guð4só g3uðu 5gufu gu4lagi g5ulin g3ull g3uls gu5mið g3un gurf4 gur3g g4usta g3ú gúst5a g1v g2va g3val 5gyð g1þ 1gæ gæð4as gæl4 gær3 1gö4 h2a4 4ha. hað4 haf4sp hand5r hat3r há3g hál4 há3re há4ski há5sta hás4u hát4 há5ti hátr4 há1v he4 hei2 hellis5 hen2r herk4 her3l4 hersk4 4hersla hey5st 4hg h4i hig4 him5b hin4 hit4as hit4n 2hí2 h5ía hí5b hj4 h2l2 4hl. 4hls hlu4 h4n2 hnja4 hnjá3l holl3u horm5 hó2 hóm5e hót3 hr4 4hs hu4 hug1r hug3u hul5i hundr4 hú2 húm3 4húr hús5k hv4 hvar4 hvik3 5hy hæf5a hætt4us höst3 2ia i5as 4i5á 4i1b ibb3 ibl3í 4ic 2id4 i3da i1de i3dr 1ið. 4iðf 2ið3g ið5jarð 4iðr 4iðs ið5sal 2iðu 4ie i3ef 4if if5arn ifat4 i2fen if4g if3is if4t i4fy 4i2g iga3m igð5u ig1en ig5rí 2i1h 2ii i5ið 4ij 4ik ik5á ik3i ik4is ikt5o i4kvö 2il ila2g il4ara i1lá 4ile ilf4a il1fi il1g4 i5liðu il5in il3ip il5ís ilí4um il4kv il3lag ill5an il3lá i1lo il4sa il1sk il3sn ilst4 ilæk4 i5lön 4im im4b imb3u im3i im4ið im4la im4m i5mó im4sv 1i2n 4inaf in3an 4inar inat5r 2i3nau 2ina2v inav3í 4iná 4ind. in4ga ingj6arni 4ingr ingv4 2inns 2innu in1o 4inó in1s4k in2sp in3sta in5ul 4iný 2i3o 4ió 2ip i5pil 3ir. 4ira 2i1rá 2irði 4irðn 4ire 4iré irf2i 4irfs 4irig 2irit 4i1rí 2irk irk3u ir3l ir1m4 4irnd 4irni. 4irnin 4irnis 4iró irs4á irst4 ir3tal irt5i 4irtl irt4ö ir3ug 4irú 4irö i2s 1is. 2isa 4isag i5saga i3sam is2as 3isd 2ise 4ishv 3isins 4isí4 4isj i3sjó 2isk is4ka isk5el iski3m isk5inn. 3isko isk2u isk1v 5iskö 2islé is3læ 5isma is5me 4isni isp3að isp3u iss2i is5sú ist3al 2iste 4isté 4istig i5stik ist4is ist5ín 4isto 4istó 4isty 4istæ 2istö is5tök is1v 4isvi is4við 5isvin i3svæ 2isö 2it it3að it5ann i5tei 4ith it4ha it5ill itis4m it2ka itn5es it3rí it4stö it3un it4urn it4ve 4iu 4iú 4i1v i2v5ís 4ixs 2i3þ 4iæ 4iö í1a 4í5ak íal3 í5ald í3ali í2as í5at í5au í5á íb4a í5bar í4be 5íbú í5dag íð2s3 íðsk4 í3el í3en íet3n í5ett í2f íf3eð í4f3i íf5rí ífs3k íf4sp í2g íg3e í3gil í1h í1i ík3a4 4ík4anar 4ík4ani ík5is íkk2i 5íkm ík2n íkn5e ík4s ík3us 2íl ílak4 íl2as íl5ár íl5f4 í3lí 5ílmá íl3sk 4ím ím3að íma3l4 ím3an ím4g ím5o ím5t ím3un í2n í5ná ín5e ín3ge ín5t ínu3g4 ín3ú í3óp íóp5íu 4íp ír5ak íra4s í4r5i ír4sv ír3t í1ræ ír5ö í2se í4s5j ísl2 3íslen ísl5i ís2m í2st ís5ter ít5að ít5als ít3i ítj5 ít3re ítr5ó 4íub í5uð 4íul íu1ma í3un íu5p í3ur í4ura 4íusa íu3t 4íuv ívo4 í5þ j2a j3að j5aði 3jaf4n ja5kl jal2 jal5ið ja5ló j4am j3an j4aram j3ari jark4i jarn4ið j4aræ jas4 jask4u j3ast jál5as jál4f jálf3a 3járn jár4u jáv3 jávar5 j1e jend4a j4ep j4er j4et j1i jó5b jó5fr jó2s jó5ug jó3ve j1u juf4 jug2 jug5as ju4gó j4u1l2 jur4e 5j2urt jurt4i j2us4 j3ust. 3jöfn 3jök jöl3k jöl1m kað4s kaf4an ka4fr kag4as ka4j kaj5a kak4l kal2 3kald 4kalf k4all 3kam4b kam2s5 k3ana k4anaf k3ann. k4anó k4ant 5kanta 4kapa kark4 1k2ar2l k4arm k3arn kar2r k4art kar4v kask4 3kass 5kastal k5astu. 1kau 5kaup kák5 kám3 k1b k1d4 2k1end 3kenn 3kerf 5kerl ker4m k1f4 kfalls5 k3g kgl4 k1h k3ið kið5l kig4 k5ill k5ilsi kim2 k5ing k4ip k1ir k3irð 5kirk k5irn k4irt k1is k5ise kisk4 k3ist. k4it ki3te 3kíl kís3 kít3u kja4l kjar4a kjar5f kju3g 1kjö 3kjör kka5l kk5e kkj1ó k2kl kk1lá kk4li kk3rí kk4se kkul4 kkv5ið kl2 kl3að kl3ann klá4 5klef kleif5a k4lem k1lin 3klj 3kluk 3klú 1klæ k1m2 k4mið. kn3ar k2ney kni5svi 3knú 1ko 2kob koff5 kol5d kol4l kol5sv 3kon 4kons 3kos kot4as kó3d kó3m4 kór5a k1ótt kra5l kr2as k2rá k5ré kr2i krif3a 1krö 3kröf 4ks ks4lí ks4lö ks4má ks4n ksp4 k4ste k4stó k4str ksyf4 kt3að kt5er k4tí k3tora k5ty 2k1u k3uð k4uði kuf4 kuk4 kulegr4 kum4 kumb5 k5un. k5una. k3unar k5uni 5kunnátta k3unu k3ups k2urð kur5k k2us ku5sl k3ust. k5ustum ku3sty kú2 3kúlu k3ún kú3re kút4us 3kven 4kvé 4kvu kv4un. 1k4væ 3kvö 4kvör k5þ 3kök 5köld 5könn 5kös 2la. 4lac la4dan 2lað lað4al 4laf laf4as laf4r l3afs 3lagð 4lage 2lak la5kó la1l2 l3ald lam4b 2lana l2and 3land. 3lands 1lang lank5as 4lann l5anna. l4anó lan4t la3pl4 2lar l4aram l4are larg4 l4aris l3arn la3ró lart4 l4ary 2las las3i las3le la5sli last5að lat3ín lat4u 1lau 2laum laut5as lá2g lám5a lán5e 4lár lá4t 5látum lá3v 2l1b 2ld ld3ar ld3d ldem4 ld3ey ldig4 ldr4as ld5rau l4dri ld5ro ldr3ó 1le 4le. 2lef l1efn 4legn leif5as leik3v 4lein 4lek 4lel 4lenn 4lep 2ler le5rí les5e 2let l3ex 2ley. 4leyj 2leym lf4as lf5át lf4dr lf3f lf2i lf5inn. l3fj lf2l lfla4 l4fó lf4sp lftr4 lft4un 4lg lg2a lgar4s lg5ast l1gá l4ges lgil4 lg4isi lg3í lgl4 lgni5s l1gr lg4ú l1h 4li. lik4a li5kv lil2 li5la li3li l2ind 4lingsá l3inn l5inn. 2l3inu 4l3ir l1is l5is. l4isá l3isi lisk4 l5isr l4isti l5istu. lit4ar 3litl l3ía. lí4b 1líf líf5a lík3k lím5ug 4líp 1lít l1íu l4íuf l4íuh l4íutr lj3ar lj5arð 1ljós ljós5k ljós3l lj3ug ljur4 lj4uru 2lk lkap4 lk4as l1ke l3kr l3kúr l1kv l3ky 2ll ll3et l4ley ll3f ll1g2 ll3iða lli3g lliss4 l1lit llít4 ll3k4 ll3m ll1ót lls5tæ ll5te ll5ug ll3v 2lm lm5ari lm3ars lmb4 l3me l1mó lnar4 ln4ið l5no lo4 5loð 5loki 4lon 4l1or 5los lóð3r ló5gr ló4ma lp5t 2l1r l2ri l3rit 2ls lsí4 l2sj4 l5skin ls4kon ls4nesi l3stað lst4in ls1ve 4lt lt3að l5til lt4s lt4ú l1tæ 2lu 4lu. l3uð lugl4 luk4i l4u1l4 l1um lu1ma l1un 3lund l3unu lu5pe l1ur 2l1v l2va lv3að l3val l4víu ly4 3lyf 1lyn l1yr 1lý 2lýf lýt4a 4lýti 2l1þ 1læg lækj3 lækjar5 5lækk 1læti 1lög l5öl 4löt m2a 2ma. m4ab m4ad m3að. m3aða m3aði m3aðr m5aðs 1maðu maf4 m4ag 3mag4n m4ak mal4as mal3dr m3alf m3all m4alp mal4t ma1m4 4man. 4mana. m4aná m3anb 2m3and2 m3ank m3anl mann4as 3manni 3manns mann5t 2m3ans man5sa m3ant 4manu ma2r m3ara m3arb m3arð 1m4ark marks5 m3arn mar3o mar4s mars5m m4arú m1as mask4 ma4sp m3ast. 1má 4má. 3má2l mál3f4 2mám má5mu má5p 4már 4mb mb5aði m1ban mb3i mb4ir mb3un md2as md5asta md4v 1me 4me. með3 m5ef megin3 5m2eis meltr4 m1end 3menn m5erh m3er3í mest5a m1f4 m4fí mför4u m1g2 mgl4 m1h mið3i miðr4 mi3ge 3mik milj3 mil4l millj3 m3ing 5minj m3inn. min4s m3inu m3ir m1is mis3k mis3lu mism4 2mí mjó3sl m1k m2ka mk4arg mk4as mk2i mk4l m1l m2la m3lag m3lau m2lá ml3ár m2li m5luk 4mm m2m3a mm5b m4mó mm4sv mm3u m4nesku 2mog 4mok 3mol mong5 mód3 móð4s mó5g4 m5óní mó3rau mó4s 3mót mp3á m5pe mp3i m3rau m5rá m3re m3rý m1ræ 4ms ms5ál m2se ms5ei m5skau ms5kj ms5lá msn4 mssetr4 m1sv m4sví mt3að m4ti mt2is m3tug mt5un mt4us mt2v m1tö m1u 4mu. mug4u 4mul 4mum m2un 1mund m3ung 4mur m4uru mu5sta m3úð múg4u m1ún mús4a m1v4 1my 3mý mý5m m1þ 1mæ 1mö mör4 n1a 4na. na2da na4dí naðar3 naf4 nafl4 nafl5an 3nafn nak4a na4kr nal2 na3la n3ald na3li na1ma n4anaf n5ang n5ann n4arak n3ar3f n4arfi n4arfö narg4 n4aris nar5m nar5rin n4aru n4arú nat4 n5aug n3auk naum5a 1ná ná1g ná1k 3ná4m nán5ast n1b nbæj4 nd3er nd4isv n4dj nd3ót nd3rek ndr1u nd1ræ nd4sen nd3ug nd5ul ndur5g nd3ú 1ne neð2s 3n2ef 4nefl n4em nem5a 2n1en 4n4er nestr4 netl4 n5eyi n3eyj né5sk n1f2 nfr4 nga1m ng5are ng2as n2ge ng5ek n3ger n3get ng1ey ngi5k ngil4i ngi5lið ng5lag ngl2i ngl5ið ng5ólfs ng4ra ngr5an n3grí ngr3u ng3ræ ng4sj ng4sp ng4ste ngurs3 ngu3t n1h 4n1i n4iði ni5fr ni3gr ni3lu ni5m n4iru n3isa n4isá nisk4 ni3skó nis2m nis5s ni3ste ni5stæ n3ía n3ísk n1íu ní4um njál4 nk2 nk3að nk5and nk3ans n5ká n1ke nk3i n5kó n3kunn n5ky n5kö n1l2 nli4 n4list. n1m4 n2n nn1á nn1e nn3g2 nnk4i nn4sj nn5ske nn4sto nn5stun nn5tó nn3ug nn2us nn3úð n1or 1not n5ólf 5nóttin n3p4 n1r n3ram n5rau n2r4i n3rit n3sak n5sát n4seg ns5es ns5is ns1í n3skil nskír4 ns5kja n5skö nsn4a ns5r nst4e nt3að n5tak. n5taki n3tal n1te ntgen5 n3til n2tí nt2s nt5sk nt5sm nt3ug nt4v n1tý n1tö n1u nuf4 n4ugr nuk2 nu1l nu1ma 5numd nungs3 nur5f nur4l n3ust. nu5stað nu4sv nu5ta 3núm3 nú4s n1v n5yf n3yrk 1ný n3ýg ný3l ný5sk n1þ 1næ næl4a 3nöf n3öld n5ön 5obs oða3l oðr4 oðs5l oð4ug o2f of3ang off4u of3o of5r of4sj of5su oga3l4 ogast4 ogs4u ok4as olak4 old3u ol2g oll5eg ol5lit oll4st ol3m ol3ó oltal4 o2m om3a om4m3 om2u om3un on3sv on4t on5tó on4us opa5p op2h3 op5u4 ora4 or4di or1e or2fe or2gr 3orí orlag4 or2ma or4mi 4orn or4ne or4sta ortr4 ort3ug or1u or3ug or3v4 or4við or3y o4s osk3i os3m os2s ost5i ost5un 4o4t ot3að ota5l4 otam4 ot3ro ots4á ot5un o4u o3ve ox4 ó1a óaflat4 ó5an óar4s ób3ak ód4a óði4 óðl4 óðm4 óðv4 ó5e óf3ar óf4as óflu5s ófr4 óf5us ó2fy ó4fö ó2g ó1h ó1i ók5lo ó3kr ók2u óla3m ól2as ól5ik ól4is ól4kv ól5om 3ólsk ó4m3að óm3ar óm4bæ óm2g óm4is óml4 óm3p óm3stu óm3u ó2mö ó4n ón3í ón5kv óp4e óp2h5 ór4as órá4 ór2d ór4dö ór5es órf4 órg4 ó4ri ór4is órík4 órj4 órk4 órm4 órn4o ór3ón órr4 órs4a ór1u ósa5f ósak4 ó3sem ós3end ó5skaf ósk4as ósk5in ós2l óslav3 ó2só ósp4 ós4se ó4ta ót3að ótap4 ót5ef ótil4t ó5tí ót4ó ót2v ó1u óum5b óv4a ó5vat óy4 ó5þ p1a pag4 pa4le pa3m pa4n pan3gó pa2r p4arat pari3f par5í p2art par5te p4aru p2ák p4ál pá5m p4ár pá2s p4át p1b p5d p1el 1peni 3pers p1f2 p1g2 p1h p1i pil5ar 5pilt p4ink pist5i pit4 3píp pí2t pl2 pl3að pla4s p4læ 3plö p1m4 1pok 3pós p2p pp5aks pp5e pp3í ppk4 pp1l pp3ó pp1r ppsk4 4pra pr2i p4ris prí4 3prj 1pró pró5fas pró5m p1sa pss4 p1st pt2ú p1u puk4i pu1l pur4a 3pú púf4 p1v qu4 2ra 4ra. 5raðsh raf4f r3ald ral4i r4all r3als 3ranal r4ani. 3ranns r4anó 4r3ar r4arað r4arú r4ary r4aræ ras3i ra3tu r4aum 1ráð ráf4i rá5kvæ rárs4 r4ása r1b4 rba4 r1d r2dí r4ð rð4ar4á rð5is rð1l2 rð4mu rð5rá rð5su rð3sv rðv4 r4ef. r4efs 5reftsk 1regl r3eig 3reik r5enu r5eu r4eyn r4é r1fal2 rfa5li rf4ar r3fá r5feð rf1is r1fj rfjár4 rf4lö rfóg5 r1fr rfr4u rf4st r3fund rf4urð rgj4að rgj4ar rg2l4 r5gly r1gr r2gra r4grey r5h 4ri. 4r3ið. rig2 riga5s r3ild 4rin rin5e ringj5ar r3inu rip4s 4r3ir ri3sk ris5l2 3risn rist5að ris4un 1ri4t rit3li 4rí. 4ríf ríkis3 4ríl ríst4 4ríu 4ríþ rj3ar rjá4l rjó3sa rjósk5a rka1m rkaup4s r3ká r1ke rk1ef r4kell r4kelss rkju3s rk4se rk1sm rk4sp rk4ú rk5vei r3kö r1l2 rl3að rl5ann r2li4 rl4ið rlis5s r3lit rl3m rlof4 rmak4 r1man rmáls5 rmb4 rm2i r1mið rmil4 r5mj rmk4 rml4 r3móð rmr4 rn3ar rnar5l rn1g rn5ór rn5sí rn5sm rn4so r4nu r5numi ro4 rog4as ron4 r1or ró5gr ró4m ró4sa rós3ó 5róti r5pal r3pó r1pr r4pri rp2s rp3sk r5py rr5ar r1rá r1re r1ré r1rí rr2k r3ró rr1u r5rú r1ræ r1sa r4saf rs4ár rsegl4 r3ser rs4inn r3ske r5skil rs4lan rs2má r1st r2sti rst4v r3stö rs2u r5sund r4sú r1sv rt3að rta5g r3tak rta4sk r5tá rt5er rt3ey rt2hu r4tík r3tó r3trö rt4se rt5sl rtt4 rt4uru rt4ú r4tv rt4ve r1tæ rt1öf r1uð r4uðun ruk4i ru5li ru2m r1ur r4uss r3ust. rut4v r4úð 3rúm rúst5 r1v rv3að r3ve r2vi rvi3g rv3ing rv4un r3væ r3yr 1rým r1þ 1rækt ræmd5a 1ræn ræt3i röf4ug rök1r röl4 2sa. 4sab 2s3að s4aðf 1sag 4sagr 4sagt sal5at 4s3ald 1sa2m sam5b 4sam1e sam3m sam1s sam5y 2san s4ands 2s3ar s4are sarg4 s4aro s4arp 2s1as 2sat sat4a 2sau s3auk 4sav sá4 s3áb s5áf sálar5 s1ár s3ás s1b s3d4 1se 2s1ef 4seld. 5semi 5sens 2ser 2ses 3set sex3 2s1ey s4eyð 1sé s5f4 sfl4 sfr4 s1g4 sgl2 s1h shá4s s1i s3ið. 5s4iða s4iði sif2 si5fi 1sig sind4ar sind4as 5sinnu 2s5ins s3ir s3ist s4isv si3ta sí3br 1síð síðk4 sígl4 s3ík sí4m sínk5a s3ír s1ís s5íu. sí3v sí4ve 1sj s3jaf sjar4 s4já sjó5l sjó3m4 sjó3s 4sju 3sjú s2k 2sk. 4ska. 4skað skaf4a 4skal 2skan 1ska4p 4skas 1ská 1skei 3skemm 4sken 3skey s4k2i 2ski. 4skið sk3inu 3skip 2skir 4skis 3skír 5skjá 4sk4n 3skoð 4skon 4skos 1s4kot 1skó 5skóg 1skrá 4skró 3skrú 4sks sk3uri sk3ust 2skv 1sky 3ský 1skæ sla2 sla3f s4lam slands5 3s4lé sl2is s1lí s4líð s3lof s3lok 1sló slu3s 1slys s3læk s1m2 4sma smá1s smá5v 4sme s4mek s2mi 1smí smj4 4smö s2ne 3sneið 5sner s3nes 3sneyd sn4ið sn2o 1snú 4snúm s4ný snæð5 1so 4sod 3son 2sor s5orði 1só4 s4ól sól3e sól3s 2sóm s5ómag 4sós 1sp2 spít3 spj4 4spl 4spra 2spró s5pund sr2an s5rau s1rá s1re s1ré s1rí s5ro s5ræ s3rö s3s4á ss3er ss5í s1s4k4 ssl2 ssm4 ss5or s1st s4stir s1sv s2t 2st. s4ta 2sta. 4stal 4stap 5starfi 5starfs 4staæ 1ste 3stef 3stein 5stekk 4stet 1sté st4he 4sti. 1stig st4isl 3stíg 2stím 4stíu 1stj4 4stjón 5stjór 5stjör 4stl st3le 1sto 3stof 1stó 4stón 3stór st2r str5al 5stranda 5strang 5straum 5strá. 1stre 3strí 4strú s3try 5strön st5t 2s4tu 3stund 1st2ú 4stv 3stý 1stæ 2stæk 1s4tö 3stöð 4stöl 5stöng s1u su3f4 5sum3a s2ung s5upp s5ura sust4i sú4 súln4 s5ún s5úrs sút5 sv4 4svag 4sval s3vat s5veð s4vef s2vei s3veik 3svein 5svep 4svex s4viði 5sviði. s4vik svik3u s5vin s1vo s2væ 1svæð 1sy 2s3yfi 3syn 4s3yr 3s4ý s1þ 1sæ 4sæð sæf4 sæ5fa 3sæj 3sæl 4sæs 1sö 3sög sögu5s 2s3öld 3söm 2s3ör t2að ta2fr 1taka 5takend t4al. tala4m 4tam ta1ma t5amt ta4ná 3tang 4tar t4ar4að tar5is5 tark4 tarp4 tar5sá taræv4 t1as t4as. ta5sl tat4 ta4ví t4á4 5tákn 4tánd t1b t1d 4tegí 5tegu tei4 tein5g t1eis 1tek 3tekj tekkj5 t1end ten5ó 4tepl t3ett 2tey té4l t3f2 tfirr4 t1g t1h th5ers t1i ti4an ti5kis tild4 4tin tirk4 t4iræ tist4 t5ist. ti3sta tistil4 t2isv 1tit 1tí 4tí. 4tía 3tíð tí4l 3tím 4tín 4tít tív3 t4jö t1k tk4a t4k2i tlaf4 t3lag t4lag. tl2an tl5ann t1lá tl2i tlur4 t1læ 2t3m4 tn2s tnsk4 tnskr4 to4 1tog t3on 3torg 5toru 1tóm tóm3a tóm5as 5tónl tór1 tór5au tóri3s t1ót t3p tra2b 5traðari tr3alí tr2an tr3ann tr5ar 3traus t4rey 1tré tr4is t5risi t5ríka 3trj t5róf tr3ótt tr3ug tr3una tr5uni 1t4rú 1try t5ryð t3ræn 3tröð 4ts t1sa ts4inn t5sí tsj4 t1sk4 ts1s t1st4 ts4u t2sy tt3að ttak4i ttar5f tt5á tte5rí tt5ern t4tí tt5j tt1l tt1or tt3ræ tt3ug tt4ugl ttv4 t1tæ t1u 4tu. t4uði tugl4 tuk4 tu3l4ið tum2 tu5min t2ung 3t4ungl tup4 t3ur3e tur3k t5urs t4usa t4usu tutr4 túd3 t3úð tú4l 1tún tún4a t1úr tú3sk t1v 5tveggj t5ver 5týs t1þ 3tæk 1tök 1töl t5öls 2u1a u3af u5au 2u1á4 uáætl4 4u1b4 4uc 4ud4 u1da u5dá u3de u3dó u3dr 2uða uð4are uð3k4 2uðl uðm4 2uð4n 2uðr uð5ris uð4se uð3sv uð5sæ 4uðul 2u5e 4ué 4uf uf5á ufd4 u5fit u3fj u1fr u3ful u5fú 2ug 4ugal 4ugat 4uge 4ugil 4u1gj 4ugla ug1lj 4uglu ug4n 4ugó u5gran 4ugre 4ugrj 4ugró ug3ræ ug3ta 4ugö 4u1h 4ui u5ið 4uí4 2uj 2uk ukak4 uk4as u1ke u5kinnin ukku3s u5kó u3kv 2ul 3ul. 4u1lag ul3ar 3ulb 4ule u5lind ul4is u1lí ul3k2 ul4la 4ulln ullt4 4u3lo u3ló ul5sv ult4i ul3us ul5v4 u3læ 4u1lö 3um. 2uma umak4 u1man um4aran 2umá umál4ar um4ba um5bæ um4bö 2umd 2ume um3ein 4umi umj4 um4ki 4uml umm4a 4u5mo 2umó 4ump 2umr ums4 um3sl 3umst. um1t4 4umu um5unn 4umú 2umy 2umý 2umæ 2umö un3ar1 unar5a un2as 4unat 2u1nau 2uná 2und 4une 4unk 2unná 4unns 2unnu unn5ug 4uno 4unó uns4an 4unt 1unu 4unur 4unæ 4unö 2u3o uol4 4uó 2up upp1 upp5a uppal5 upp4i 3ur. 2ura ura4f ur5a4m ur2an ur5anna 2urá urða5rá 2ure u1rek ur3ey 4u1ré urf4a ur3fl urg4e ur3gj ur5in 4urit 4urí u3rík ur3j4 urk4a ur1m ur3ní 4uro 4uró ur1sk4 ur3sna ur4sta ur4svö ur5til urt4ir ur1u ur4unn 4urus ur3v4 ur4vis 4uryk 4urý 2uræ 4urö 2usa u3sal 4us4á 2use 4usí 2usj 4u1sk 4usl 2u3s4m 2usn 4uso 4usó 4usp u5starf 4ustá 4uste 2usté 2usti 2ustí 4usto 4ustó 4ustr 3ustu. 2ustú 4usty 4ustý 4ustæ 4ustö u5sund 4u3sv 4usy 2usý 2usö 2ut ut3að ut2as u3te u5til u3tó ut4stó ut2ú u1tæ 2u3u 4u5ú 4u3v 2uy u3yf 2u1þ 4uæ 2u5ö ú1a úal4 ú3arf úb3an úbli3 úð3ar úð5g ú3e úf5ar úfl2 úf5li úf5ly úfs4á ú5gala úgó3 ú4gæ 4ú1i úk1l úkr1u úk4s úl5er 5úl4f úlf5al úl4í3 úlíp4 ú3lo 4ú2m úm4r úm4si ú4n únd4ug ún4gö úpl5i úr5ef úrít4 úr5sl úr1t úr3un úr5v ú3ræ 2ús ú4sa ús3ei ús3í ús4sti ús4sv ús1v út3e út2he úti5f út2is útj4 út1l2 út1r 4ú1u ú1v ú4va ú5þ v2a2 vaðr5 va5fo va5h 5vallag va4n var4ma varps3 var5ú var4v v3ast va4t 3vax veð5l ve3fe 1veg v2ei 3veið vein4as 5veis 4velg 2vep v4er 1verð ver3gj 3verk ver2s vé2 v5és v3ét5 2v3h v2i 2vi. v4ið 5viðar við3l vil4i vi3lið 3viln vi4lo vin3gj 4vip 3virk visk5un 3viss vis4v 5vita vitk5 vit2n 4viv 4víb 2víð 5vík 3vísi vísl3a ví1v 2vn vo3k vol4 vork4 vor4r 4vr1 4vs 2vu v1uð v1un 5væg wa4 win4s4 x5ar x2as x3e x3f x5i xis4 xí3 xík4 x5íu xt5að x1u4 y5ba yð2s yf5a yf1i yfj5að yft4is y1i yj3ar yj5ó yk5e yk3i yk3su yk3u yk3v ylf5i ylgn4 yll5a yl4v ym3a ymp5í y4n ynj5ar ynj3ó yn4k yn4t yn5u yp2us yr5e yr1i yr3il yrir3 yrj3 yrkv3a yr2l yr2s yr1u y2s ysj3 ys2s yst3ug yt4h ytil4 yt4k y3v ý5a ýaf5 ý5á ýð2s ýfl4 ý3flu ýg4r ýg4uð ý1i ýja5f4 ým4a ým4k ýpru4 ýp2s ýrf4 ýri3m ýri5p4 ýr3l ý4si ýsig4 ýs4l ýs4m ý3stá ý5u ý5ú ý3v ý5y zó4 þ2a þam4 þarf5a þar4m 4þb þ2i þist3i þjó4 þol5a þor3f4 þor3g þorm4 þ1ól 3þór þór5i þ2r2 þra4 þrás4 þris4 þríl4 þrí3t þru4 þum5a þumb3 þur2 þús3 þ2v þver5sk æ1b æðn5 æðnis5 æð2s æð4ug 4æf æf1a æ4fi æf3ust æg5ist æ1i æj3 æj4al æjark4 æk1a æk1li æk3u æ2l æl3an æl3e æl3us æm3a æm4al æ4mundu ænak4 æn4k ær1a ær3e ærgöng5 ærif4 ær3ist æri3sv ær3l ær4n ær2s ær5un ær1us æ5rú æs4is æt5ise ætl5i æ5u æva5g æ5val æv3ar æv4ara æ3ve æv3in öð2 öðl3 öðm5 öð3un öðv5an öðv3ar ö1fa öfl3ó öfn5 öfr3u öf3u ö4g ög3gj öggv3 ög1re ög2us ök5e ök5rá ök3st ök3ul ökv3a ö4l öl1f4 ölk2 ölu5mi öl5un öl2v ölv5an öm4b öm3u önd4l öng5sv öng4us öng4v öngv3a önk3 önn4l ön3ug ön5un ör1e ör4ge ör4lan örm5un ör3ó ör3ug ör1un ör3und örus4 ör2v 4örva örv5al örv5an örv5ar örv4i ör1y ör5æ ö4s ösk3ul ös3u öt4s ö3tug öt2v ötv3a ö5u þ6ó", + ["compression"]="zlib", + ["data"]="xÚ5Knã<Ðh·¢\0214 ×ÝóYRd=ìKJþÛw7=\9Ѓ\0\13\4È @zâÝsJý\15RER|³X¬*\22\31§çŸºø\1<5¦*ÐÜœÀ}½\6L(ÕÏwq™Í:¬íó\11<×~œ›\25¸4\25¸Ö“pmü¬/Õ§æ¥5°½ŠòÍ6÷¥¥‘篥\17®¶øb/Î\7ˆ¯çÓ¬çdŠÍž÷\0y£šîÜŽ´ÔõÛ˜ŒŽÃ¸¶ƒu\"W7mÂÔ,ÀGO?z+îGÁåù+Qq?\27™#=\5xþ*‹\31é\18˜2œrËІ³án¡á\18_gAä°K¯]j3\25­p\\‡\11\25ÇlÓÏ÷\\RA\20ºD•Ó)ÀÜv¢å@·ç{-NŽl:åÍØjÆ(²\11£Ÿói°ÄÜS›ï\4žïNíl/\22\27YN6¾œ¦6\28ír\17D†Çš•ÕŒëó½íEߢëÜ<ß÷\31tøÃfž\31Ï?íi6ÐGt¨„S„—f\20e’nf¾9êdéäj¦ç{@ê\8üµ0š|ZN+=ËÖ‘§1à% ƒËNh¶t^ž¿jZÎ\14?¯§$!䛑[äÜ\2ØÜfëÛhhœic;†´™mÛM¾?ßYÐ}\29¬q_×\22Ú‘ê¾R)Ü\14ØB·÷n\24\21ù~”Bžo÷±&õùÙÛÀósÖžŸéHÜ꽨NwqjN/‚3ñòù«\18‘ð\2Yק\23¨ 8µ/\15\18žo\19›¥9ýg\14ÈêÔžŸ\31”<ïEÉÎã\11›\17ìÎd\1\2%ò>ÿô§øØ??#u\0088ƧÈ×Ìcd¨–H˯´üüÚ\26»Û\\GkÝHc]&Ú\23ìíÓ|\14”ÖâT÷Ï_Dš>™Dýe\1271u¶ƒe?ï$·×4\25‰,éù\27”\26>´ì7šï³õ•l\6ÃùJ¤Ú-°Ï\1W:Õï‰\1V'Z¨‡\19_‡öùk\22NÓ@£\3ü ÀÕÚùáºQz Ñ¡‰äÜŒÎÑÐä[ÀÍ2m\12õó-¦±|\0050Ø:r6#\3„._i×A]˜ÅêÄP¦Ó~klvrðåä\28M㺚Ÿ@ÞìýD\0176”s7åX¾)SfbùOõt7ç#ªø6\30U;\\6+YçÓÊœ´¢èÊ\17Z):Ÿ`j§ùùKú™»¥2ȹì\5CÁ\22\28:fl~%>Ff¶éj>»4Ã\3\"ì\20ÃaЧy®L›Ý&‘öm\23– ©¹e\15®+gf¾\14vç:™t½\25vÓZ.?ß6S÷›\16VO3÷Æ2\15\7ÆZ\11\29(½^êÓêb\30µ\19Kãr &Ð9™Ã..œ\16§¥² ¬„å\"…-ufò—l×–rc;H,ë\15ÁéGÁÁr:»\5Vfµ\2Î\17v\3®,\29á³™ÎP9áælK«G‡_è)ˆêW7@\21Ý]›\1RZ‡ò\28á‹á6ßh}­ ¢(1¾\4tN8ð˜yŠÒùuâT\0Öw;Ç@à,gs®}ÀWSÊ)ÂÑÍ5›\"ï³oìŠ`Ä\12)o¶Wù¬i÷3“-ü\14ø\22Ðy®¯\2&ª:±&·`\14õMr©oc„©¶½qXóq“ŒoÍ.\17ß`WLf¢Ä\12%7 \8rM\17[%ÒÄ(‰\17 òTè/Œ7oG\1'9I¦ˆÙhI}þš¬\18\28UÃõ,\4ÚŒž+3»\ +–ùc=0¶ÔÆR\17\28\15”ÍûK¥.êéœÃÔõ?l…cý@ò§ÔMv§›\3®v1ÆŸºÍ\13/;¢–ÞZú¨«—A¥>Kp©Œa×.tj\6ÙÕ)\13î©4ÔÙUJí\24œ#½\25m2Á7ζ3:\21Í©M㿦ÞÍr‰\25™\28Ü\20”œZ÷eª§ »´˜\4Ñ»¢I\"\6]6®Ñ\"|Í™uQ™x\18än)¹\5RðÕÄNüåPÜ\27I¾j’Dš¤ Vtô%›»Ý$…´!w\25I±íS\29\21íGÃÍ\0303º{J°Zìì–#::ê\6Oõ#æ\31ú³goNopT×îí\30Ÿ8€²Ë[fz›¡ )3ÇÊfw_Ž…Èt²Í°Ò“‘KäŸ\12J¬yŽ]\\æÅü«à\26%\ +™zF$ÚœD\25mÎÍ\29\18ÎÎ=\7¸½aôòÕˆï¢å¦Eãp 5rŽë¿h:Ž†˜3\19.~®ç\8»¿øð\17‘äi€œyû‘b‘ÚÀÛ¿„ÏÀû|¤»Û¢\7_GüqḎ̌…˼\7t\22só°½Øæ0Û\8›\7®»Õ1Âzó|\0022_\14–c\6\7z/ÝÝ[Ý= ‹íÕxì\127\6Cœ\"Ûšk\14\20‡rB\22b¶2ö\22ˆ5Ù*ç·Ý*¹Ciw©þ\30ßïc¤Øfw…÷عÀ\22á¢5@™}hä\15;ò­RüË)µí°A:¸ßQLì‰\19äf^\11äSk‘f\11!éKÈ îMð¥ú>*çxŽ\6\5þ¿Èô·8UÏ¿A\21uLeKïÒO€\19ì\20È\1fÊ\11 yÙ\13\"V5±n\2ŽCQ\13‘\29x;\29éû\17ßýΡñü…’ðüÕ*\4\5z\7!üøav\7ØXï0Ÿ¿\0066(p6iH\22\27J«\0258\16¨ìU`Ž©²­I©Xt\11ôü\16Í\28†\"¹”X–K³óÉnÕ3\28ľ>GÞ^í\2ÜD\17T£õÀæѦü”\29ÂŒRp\ +Ì.·:fÙFb\7W([Ä\22?›iÉ1€ÊÊ”·œÆõè‡g\20Ýð\20‚c{\12\17»:«7Bœ\29ÖîÙ\17±d5­ì×ÄÞX:r´±D©´Ý´æˆ¬mvRS•·È\18°Ü£ìý¨ÈærÛY{\25#¯òd›<9ÊZ\25Œ\28•Õæ­\ +ØŽ\7JÿV>‚ïÓ\17ºÛõ¼7QoÌбɨ\127³F·‰ãÞ6\24 CÞ`\29?\"¡Ž\21/·èTtsAÕ÷èÊ=褼\31Ÿ¡W\6\16³ýÂÚ7/ðü\23õê\6„ðù‚\24Éè_NL:\31×È‚&F\ +2ÅéHÌÅ\11Á´T½\8¡=zgt-^œ…¢ziøò\18c\16‘N™$B%\0ÈìË—®xéàô\5%é…ÓI\ +„0y\4Xº—±/\0013z#pné\3L›pžêâ¥áÀ{\25·\24Ìó\29VþR«Í\4Ú‹úåR¼\\Tó_\0264¸—Ð\"W‡±\22íË•ïR;`Bazi̇Xa§“baý\2ѽxÐ\17’o¾ìl•—|õKh\9@–/JÈCÊ\23rñ×[éWìå\18µŠäïBÕŠ}\9Ò˜@Ò[b\"‘c·‘S†ø§CøœÆâ?¾ÿ‡\6úŸòØ™%:£õ\0&š=Ÿf\4\15`&¡¤7çV\9éÜÄ2•”ÎTµÈ’\26åŸslc²rŠŸOÊyçS\8\21çÓ½8Ûª3gÕ¹;‘\7ù¦>wCQž;ø6òêj\",®=w”.íJ‰¾}n†‘Ð+\127à±E|9×,\16a»5\"n\11W[¨F„Ûó¨ZŠÈUÓC¤Uû§ÐÊìž]3À½&ŒÔrfÍz˜À¹dÌç’¥;W3Ñ9(ê\\{:\11i\18µÒ¤\25!âÌ>>·×Ù‘†¢pF\30%òühÕ=J\2lI`p(“é\21m\127$#•=ø`[3\17ÿ—\26\20N(\28\19™Û\127\9ð„s:õèþ~@\26Bo`®Î*_·@\15!]¢·ulp¢ÊTçÄö>7IÅ¥:ÓV›99ÏMVÕ;gÇ™›é\ +l'¶ôY»\12yÖøªQë\\*QQ\0‘œ¬î \"hþ|Ùœ%&`gqZ ½”\11œ›}y!\24,‹ˆôÈœ|9ø/©\16då]Bû\31fèá,}›é¯!N4@\03109U\28n\127<ÜþH‘@Ôp \12\7\0300³£Â,p6qVâ4Y*5¹YM^—nˆ¢)êQBï#Êi,jæ(F|;Š¹ñˆ#bDt\23jÁ #/‚s\4;“£O¥]ª†S@˜îŸAnü§\25l^s˜Æ\9uq²¾\ +Ì3Æ@Æ\24ˆ’5a-k¢è\6cñ\\&ª(òüs©åÇ\127ÐÓ?üz‰^Éüÿ”¨-\26*eË$ÉmL¼\11gÏî?s\21\0138?›y¤M£{:ò¼‰\22GµîÇÔÅ´FKåÍtÕ\20‹*6\9=ÙþÔiœ\2Å©\2ëŒ\2qr™øuä´ò\16˜\25_!aŸ\20!ÀÑ5Niç!7ãÑm\15·?a\1óÛ\20•@Ÿ÷\127ÑÏÈ;ÛB•íd®¯\14Bžni>¾l1À:¤L\11ìQü>ÆÇc¸1ÉGW%lÑ\16p¶wí>ç@Ñ+N¼#£Míéèù~\17Ñ .pLniWïÁ Áõ)*¸‡=‰\15+…Ü\9Bï\0294Vt\28\0]Ñ\27@yfF;˜ÚVt’^%:ÍeÄà]ÐEÑU}ÑÕ=»¹ë9ݺþ\"{ézè½ézô²\ +ÈlC÷\3µ\14æ\25\18Â\17Á\29Úì†\29©è\26Ò=óP…ÅltpïÑÚ\13òE7Nõ<\6¢ì\8/\28DJœÝ\8cëF%.TQf ÔÌLóãz%åß\17Õ5\12‡3ˆ\ +'ú\8\\k€µMÚPºŠÎP\31Šr77g@\0139tˆy\16,)\3\29›Ù.M'ÉòU‹\3éÐ}7GÙ9F>ku\7ºÑÍõ!Œ²™rm7CV\20ÛÌ´·N _\16-èö‚PÐ-ÈV$/5‡`·\\j¿RýÂÎ\1Þ\1º\24ªRJ·þWtkC?סfL«›¸êXhþ\16å»\0007„›îæÌS\26ØÜ’‰|Acé’ÖŠŽ“\127\23ª(v\30îhï\9\8»Ç‘9«^Ø;gih-\21Ó’¬:U“q\14ª.Í‘\0166/üF–Öi¤þh,£ŠwlOÓª»U¸hmp±ŽÃ¨Ë\14'+q\5B‹ìPS»V!“ \11›o‘%2\31\2#8è¡v_u›sÑ!z²}ºm=E„r[ŠuÚÔ¼;\20DÓ+\20î'Z\"Ã~ 3\23Ïß5üõw\0093ÿ}¨)¿ë¾3Å\29ö[“\"a¶çïùÄôƒâ@|þ® Òçïf¾˜V[Q*\5\27\21\20)r\8ÂrÉß!Wÿ®‘‚ž¿·M\3\0øø´yµÐÇÐúS5\0Ú©(Ùk3AX&°Ñ¼ÖkN*úŠ~÷a±)z4\1-•&ô°ødA&‹OªOE¯þ#¢N-†Ñ‹îE_¾Pç¹b®ú\18űo9/ùöç\0045–ÌE_#,\1XW¦Æú;tþžÕ‡Æ@õFA$‚RCOßôT\\õWÒ ¢¾wƒ÷=\7+'[¯DÕ{jõ\30ZýÐØæ+5*Ýô®cÏ2þ\0088šì\12ÄÂ\ +éë¸@\11}£ å~E¨êÇ°c\17Ó‚EŽd\27ÄV:4&Ä[²³\21û±…˜ø \13)™ÑC\24ê»ír \16\14¹º¿ØÙ‹ìd\15Ì>#6•Ôzx’\19\0%lÔ>ŸœJd/\14«¾™\17$û\25½«ìшÛ~ÞI•æ›™Â\0168<ß½Í\4A0ö$:!¾Š¦Hœ…aÖdü—è€\31¦ø\16\18.òvç€k\4€b@žø\13¬\29#{„n¨Œ­&=?ˆÍj£õ0w4DHêa^UŽAåaX5M\1\19\2Ú\16ü~Xkçweç\15«¶¥aÕ¤äÇoÉæÊr(‹²¢*‚ÀÔ\ +\21²‡’!×)Æ\3ï¥+éÄV\3f﬇ÊI/\7¯ÓȆôD—Ñë¬FËw5È>\6u¼Æ\12ŠÛõ ´\13Ücñ“‚êp\24¦Ç¨\16ÖÑ\12T”˘ lÝ\14œ5•/\14y®öˆ8¡¹^\0ò\15‚92eóÃ3†ƒ[\0122ˆaÓÒ2À\16\6µArìª\17¬vµ{sa´Ñb=È*8ׇ\29\29\29\13ˆ\29ê\22ÜçÑ^*b\11-Ð.cÔÎÙ‹P\ +‡\5:чê8 :2‰_Š“Ì¢Su7õ~’Äb‚‚y\12AKo‡Yˆ€”ÁbÖ~ùlŠ×\ +A¤y…™¼z-\12ì½'}…%´\9äžxå\0\30„”5ÌQþ\26öµQìþ\21«J€¶ÈÓnñÍÛõW-^ÅkG\8É\0240Ïî®×n­¨ºóÐ\0Ös ¸õ\18#·½v\15õ†æ•¶aS¯#¢Î+bõ\11p5¸9$ÑZT¯Ïwênµ:\0174ÏÅÁÍ\28D¯œÔ\0²êùðÚ¬d\\/\12\27\24î\3¯×Y‹Åë\21©Ò\31•@g\11ÐV\23¯ÞϼRÜÒûP&¡ùw%‹×=.\14_Ÿ_\22Ó}¡\0019i_(gÅ+üüõ®ÕóõŽ®P´¯\15>½iOyÕÜÄb\18øÔ \26OÅØ2¤f\12±w„ç//5@éž„ÿÈsnŠ±>“³„\13ŽÞ”–£ÜÙKîžïqÛ¯ær\9æmr\ +˜#U;‡™8¸FË{6öÅ؇EpìU»Æªï\12#HŽŠ \6·bl8–š\17¹o\28´GƒJ H£Ô(w|§f\24`5ŽŒ#®ÇÆ\11\127S1N\14ˆáó!î¾ÆIÑ\23Eêîý\1ÚÛ8+PŽ‡Ô:\6çjFøþ8s´\1‘‘„PÀØÎÑ}t-v\12Š×x3¬<4ÂËš}±\26x.Ÿ<ýÆC\0152f\31JÄ\29¾ç¨”3žôìW-\11”\127›¢…ç',b¤&\20!À\11«H7\26w\\ è\19HŽÖûh‚0«rT\7\26WäqÕÎj8\17>mì¥JóÛ\14ZOœ\23Àû±¨q!=z!=Æ-³.Aÿ‡Å\24M\29,>@C\20ãì\19Ò•µ¼FÁ\15ƒ\25Er\\½§\25×0rŽpß9¾\127Û¨Y½\19qžZo\127\17:%\23&šå’½V‡lÚˆì4ûÛðïbL}\21é½D™ÆÁ¬^jŽå±Ú‰¦“:ëèQ?&\15\0082*Ý\1Ç\31\7Z\15\20uØgD —\".%)è¥Õ˜´Éó}‹p\3Yð…CÄ\"_\1Iá`(GD\8´fÎN\0Ë\11E;iu>±\24Ù\27\9(öl\22G‘_ïÆÇÕƳ\0184èböK̉\20Õç\9¡\11‹Fi›\24SýòÓÕ('\2\16FÆñ\28N\27cž¡ìÑhâb\29í\18²v£ê ¤\2¶¨ž\ +|Ï\12F§¡Ø…q\0139\30×c\\C2šÍýâ5d\22…\31\31¯GÎ@#ò\22u81¹\5YŽ\14òén[Í]Rm­\14ø~ä¾³›³kÜBk\24Û­sú·WR8\13LWV\25·ðL\27·Ê)Úֶ˖tñÉw´\29—\30Äö\20èî˜ä.±lôg¬î±;›ñ§\4¬1“o‚Ïâù®,\11Ã>M„Os]\24ÖfùÎ\28¡º¾Ç\29\19i[À]¨êú®·”Œ^åø½yé\ +\26ÑàOÚ\25ºÐ¢]å:°‡\ +õ±¼B4ÙwD=QÛmV[õ\0ÔEíRïf#¢ÁÏÀú\3€<\20ÉiÅH’V„\24g÷_\5–˜tíb(“ž&é\127CñE_%7\14êo‹\17oÒÞ'fTÔÄw¯\23«ç»ÕÎaÆ}?núÞ5\3¥À}ŒDC\6\21ÍKÌ\4\28l²9«]Ž›ª÷åä\26qÛY´\9¼/ð\\á\22\31Téߣ\23í\26õ\28}A–\11\20¹Ö½Ö¾@bÌ,B¢³\0l½Ð¡M£éX¼¸¤aþ’—kïI†\8ª­æ0u’1V¼òöñ½Éí\5¤/ŸÊì\28ë’ç(\28—uïqç÷\14iwŽý°š\5Öþø¾Å2m—6ÂÚ^ß7Í9öj\127qTáÃ@ÄùÜËEr9\6½··\8\7åì)\8pÏ~ßUU@vý~m‚Øþ\22—êT\\bbC×FóA\12G踜ZÔ\28¤ðJ\16'Þ%Ü¢\ +dòÅ\"«¡t„ÓÈW]?Dkœ#~df.ºv]¼\9ÚÍÈ°/qíš\0037} ¾FóÒ\23u5ê5\24è^\7P¹\20Ó)4ÅIMu\5é78Â7€È\5 \13t:éï89¡õ$\23€3Ï}Á>‚\23Õ^¿ðu©\16\31'\29½ŠØk}D`ÖÆè®\8Mo\2ž¬áfÅa›˜Ph«Ù\12i±P²+\0091Ô”Í0JÝ\20Wr´–³•d§I÷&ôür‚\3µº\1\22Óó×Ô\ +‘ø&$µIO‚j*Cãœ4y\2SOî.QA8\23OÞ×M\26Ìb\4C1©±Oð)LKS\\·‚ØtS0`!êÆ4.•aôX\8ˆÆË1Y&\12£“Ç0\31\29'¨_³\9ì³it0ÓaNš¼Ÿå|ãØ©'ù\12\0\0229¹öbºhI›t<>\5j™Ø\11\12€Q«ZÖBZ\13ãÝÄi\9¸”Îw5™\16BÛ4éY:MqD\16±\11“6ÍiºÇ\6™XÛé¸9\0201MSø4²“\16E\1J¢ôT\15GªÔ˜7Í\23Á>\9áCåä‘;y+35K\8¿Óªã+<µ{\16æ\20å\4¬§52_Y•+ësí{&ý:·g¡C½ê\1A¾+G, \27d⮡WLì‹s@dšI••n1\\NŒ)Å\20„çÛ\20\ +ê\20\14\13“\26é”F·j9¥˜3MÈÔNÆ\28öçéP+A2o\16\19 Ã2SŲ\8ƒ‚óÆpòCr9\14j\4æN:õúfª·+\\kj\17\4 8–.ÌOÓ¡xNîâÉ\27G&¾Óï|òæ\ +ðÂøÛÝ\2D㣘'•1\14&oÇ©!ңқ㋫õiO(3“\6«i×+a:¶ü¤õ\ +‘{ÚÉiR\11‚¶¾fË{·IXæÅ•(\21\27r¬J\7\3¨³ÈÄW*ü\0Éäæ¸1ƒÕð\21\30Ã\25ì–žÃg4\16R謱l\14?Œl]ôJ5ÃY\15.D\26JÓ|ªõx§¦TÌÇQ0‡-œ\0046¹aöê\28¬hÖ{΂ZVüò°œ½ÏœuÂÙrÌ~ÄŽ\3qFCWF\4s$”ÞÂSˆ\21\1lvŽÝ…Ô\2ô‘\4ÈS\30\18DžpßÁ'µ\8›?¬ìÕ¬ÿ3 —‚Ð`\7ý\16dsÍgyÓ|\14\23˜Ùkø®ž9ÛŒ\\ýâ\21S9ÛH‡¤î®žËŽcø@?cƒg±\30±$Žñi\18¸jrNàÍ¢t \0119lît\5¨ôtšë‡‹ß=.\17¦ûÇB÷q\25K\4½˜#“¶Õ`H\14i~®û‹)3À\23\18œ­Nx\8ws¯±bî\15×¼™)\0278ögm™ÔëqH½åÀ¦›¡3É\7\28³F;\26\24Ôåæ!Rä?fM~…\5Íz—Ï(C3Ê/U†Ep\30%6TM\0194W{\17}6ß:è\29‰^\0274\21ý®æ¸Dh@T*wçC–ÔÆ\127yíC°z?¤ø°\17ñØš5•¤B‹\31C§«’©R3\11…lk¸\15ê@\6D‹’üê\0229\28¹\2_xìô\7z=\16|j¾\4©h%ÕŽ;#QÐA!,#0º!™¨rV†\0\"uI§|\14G·yŠÅÓ¡\30™Á:iB™½©'ì> \6VžŽ¸è†éÆì=\13Ápð˜gÝ\6´-!Ì¥ÆÍy\14·PrL\1\23‹hçôüU}šg„K¡]›ƒüÉÃF‘ž\15Ÿôyy¡l^7‡çù¼jË™\15ùmnWH\0309±õ*Ñ;\30-jö<™Ëk2Á\13øῸ\29\0025È¢ó\13\1›ÝFBekµzu…d;‡–:Wùâb¢ŽB\5ú¢\4Z;\23º>,æs8«‚Ju®™a\29GÅ\28Ž½³\6I@\16`芳\20$5Äi1ïA­»\2鬀ç¬É\7BTž½\ +­g-¨sœ\0063B³s€ÐL¯Y\22­óýh0ŒŽ3\28\\\26y <Í\30Bxйò‘$¡oJ=¿¥´o}G|j\0}R“&JŽs9ÔÛt©\15¤<Û\26šâ#9}?@\30\20\8yÄós+\22vgµ@È‹¯i\22_Ó„òóã@§\3\7JÅ\"·„q/qY¶è1C™¡¨—“×{„'Rgé\16¤YÍ\27’>àìçù\22ßi8|hXC6¦ðè‚*”Ê\22ÜSt®\"6\5¤¼oÜ4M\24\24\15”#U*0OÄÚ\28ÝFzµb»YE?R$§—€î±EOr>§\9Ar‰ÓƒH}\0216V•r»Ø-'K\25ƒ’;,'YÝr\\½”J Š!\28k@¸¢ÞžúíT‹ÞŸ€vÙ\3Ý\"\31JíB\31^\14%h)½\12_ô!dÀ˜4“‘pV,ZÎmÜJ,^Ñ”ZE\22NƒE_\13–‚3aé†q­‹v©´é/Ý,÷]\14éyñ\12 Wz¥«]¼%\\º°s/e¬›þ¶K\\†Ñ=™À\"-/ðYÅA;¤Õ“”Qe¶^Fæj„\3\9\17ü€\17©•§Ûe\\/\17f=\9;}Ám\23™í\"³]F\0B¶\16h‰\25¢\11êNÈBKÉW-$Ë\0207ÜK°³Eu\127Q¡YJþt™[Â\"¹„¯\28\9’Ì\28O\"\22¯é—V\25·Y˜yI¢X4u/\\g\9ûâ‚ÒN\2L`‚{.WXôrE*^®Ô\127]\7Háùq®…¡m-r\28f\1­X\127D¢µç5XwÊEN¸Ü¼B]ÜÛ‹\30\25Ë‘#®º\22\0057\0›w\9\13¾Y¨3Çû«E%žpg·³Z\17‘é\"Œaù oɹ;VÕ¾3\0ûp°¨¥aO/›Þ/K½Á}—Mgé%dDÒÉ\31®å‹\23¬\11\7X·¡[[S¬³R=_õ\21Y· Tàb\29Žrã(¤\12\28g\13涺}TÖ×f—\18v—\8AL\0oh×}a•`n\18Ò®9\6Èq²\30ª÷ºÿóœXw\15\7¢0=ö³W@XüŠœ¶ú@f­\31ˆ\11¥\23BLò·›ÿ\27™ÜçÜ\7¹üõ#TäõpTòi¡Ð¹Y„OTîëK.®ú\24Ï\"æíªóiDô\17»V}qíã}àµïá·D®\0æñÚK\6\0043©CØ¥AÞý\\\7qZ^Ãâ}EË$eFc½\"–w¢VQž˜\30ZW…økx1^çM/‡+;âêiy夬Õn\28\18»²Ðw \"Ûõ 6ÂÐÙõvBž¹ÞªW²ßÚ:ÒIÐœG D|MU\31U©¯n÷k‚?˜\7‰Óœ\11bþ\21¡‹àjÎÆ\3çšdZ@Wû*s¼¦#èc92Å…\12±Gqeu®ÙívÕ¿àª×à5ž¥\8U@¯vû ½ëvj\29éæÓ ÒP³¯[\\¡]#ïÕIÔ†{ýÙ\20Ï\15=¹?Nú{\25‹ç?\31!\21>?^j-õ\31Úˆ\21\17Æ&Ð| å@÷(Õ\1tÐ\19Åk—>¼CÁú\31\127èµ\4ªú\7°ñh\"2Øú«`\4LñÖêC}Žˆ/¨>ŽwL\31Ç­ÆÇìo2€Â5ý#îlM¼.\5ë«[ªšÒqŸñ±\28}Yš\23o\18>–hk9J.1€¥¾\5Ô‹J,¬–蘾î\31òFQ8L\127Ü\26G\8\13´ ã=ÏGA\7²œ7Ÿ\18ÜN«Î=`\20–›\18ÌF8H0·S¸ZÝ*-ìF`š·Un•¯‚oòäM]#ÂÉ0ÛÁ0\5Gní\25ÜÍEyëTžn\29zùM\17à¦ì\127ƒänPÜmœu‘òŠ›r.×·1öÔmd+Ô7­\25€Š¯\28e·Ã\8}›½¶º5š”뛦ٛÇZyS°¾iɸU\20»µ§)‹èüMJºÝX´Û\13nmüà =¿…ÿÊ\13qá¦-øÖ(§Ýâ7.ê[ºP­\22>AÛŸò\17`ؾ½i· Dï¶\ +‰éÆrÜB;¿y&ÜvÍòôˆ/ñ¤é\6sÿ¿0¯Êû+]-'1¯EòW\25Š\20²fÒõ¯Hq\127b\ +ÒWŠ×ðºZ?\"†\12\14“e“Üeñ”†™I‡(™Â\16™Ž›ª\20¦ÈtBf7\9ñ¢<žÐ\1{[b\25'\13¹\4ttÔ͘±%}w“¿Ø’J:U)DóÉb:ïÉÊ’ïͲH9ÃtTÏ\20/'Ž9¢²ô\20L*5¾ú\22f†®S?'\\ê\6\7êCpŦ‚í‹*\7´¿ÝcµÙßô¢W*Oýɧª)¼'Sü¢\5§¬×À©WïKþ¦EŠ\31€iÌ%u †\127 h%½†SüœE\ +ßaŠ«Œ\16\9{yÒ-ï\12719‡\ +&…8<3•Z\6=[˜—fðRj‘\15õcc\1â:#Ð5à\4Ç%É‹0E|þ ¿\20¿\25\19=VMM£?IaIR\20\20\28Ì\21\0190fËü3\20\19вY¦\17\6€°¤\18Ò@™?\2B0024܈ÍÆ<¡ì\1Ùý)ì|‰é@\8I¡ûž\2Ú\8LJâéßOd¤Z©2)U¦ÉWú©™:‰P”ùîÍ’H–EŽ¼Dø&”è¦öŽj™â÷1RPı]SXÙ™>\5Ñ4‡\21.Í!\17§phLJ\8iFè(Ò\"cMÈR\20д‘Y7ÍyÉ_´ =\ +k\18`\5\22Vz1»§‰Ïð?â£\11\31wKI=‹þë\ +\20LMjud\21Ñ$âæ•á\5µ!¹ÑÀ•²×¸âLH\0304ˆ\\›\14#`òh\11è\20Êô}Ò—äÏ\20¿¹Ae>”¹¹!oœ8é\6SL·Xàö\6Ѥ pàI¾=Jñô(…gNòõCŠë„äY“ÚØÊ¡ª%¹\13ò\0§K8âP\9‚rŒ:wFâ½\17\17×®\13Y™´ÙIÌ\26†¬€y®40ëÂs7¯ò,Ÿm)Ç>hr4èv=N¢´ù\0168ÅCõtøWs\22X]Ül¥x£ž¶êÕºéÇI^û_Çãù´\5¥lÞ=¥\13ÒL[(û),œ”°!OR\24;ÓV*Ã2öàdº\\Òÿ`§iM¿#8&Í™|• \15¡:í1 &4\17\31j¹JúX’—ôÃÞ™\20¶Ru§–»Fg’µ$%¯²ühûµvÏôü¶\17ÅkýÖ™µç›–±ˆ®ÆôbˆW&ȉ੔6>½1ÊÞnåÓ\11¡h4Çïù\20¥NO¤CD­ȧYç˜&\7Ãç3ãÊ'Oq¾/H³À:’Ê\28\31\30Ö½ZŸ7YÖžŒ$s*àäp§Îñë\30UÖvH~ÛÑpKp/r˜\20¨þ^de&Ýð_\ +\13C½\9³æ[}kS|¡U\14ñìëô’%¤JÕÿÜÍgÎ,džQ¨o›Ô\7àŒÊèë¹ûYGæ\7}\9‡ßÒŸŠÈºÀdM.YI4ë\18˜ý…­Œ\24Ã-•àH{ÁIÛøñŸS‘\15Í;H\13\25õ{¤6\14‰<ê0‘þa\14’\28\14v\21ê\"=ÊòÔìݹå!å<ú\30…M_¿$»ã«Ù€0\14°›ˆ\17CºÚð\23¡ºv¤%gä=;Kþt–uÜ#\27¤T¢\31e}JŠ\28?û•\27xl¡‡\26\28Gä\21t`\24}¾ì…Þk_EfWy’L¦Ó\1íOøXDlö3K]\2›[$e#zT³»\17¥€Ëâ‡nˆs­Ù’rcÔ8FÇ\19\22b·ø’â‹ë¤\ +]øxJK2²OÔ\18?Z’ãV={¡®‡\"Ú|\0256\12ÕìÁH:Ê„¯¶èËX¶µ\29v—ã‚Ùæîf~DcßQÉ[ô[\9\16/åL‹aˆ¹-|ì/‘ÌZ\22³WWE\\»;ŠšC! rBÖI'‡ƒ;á‡Kíu\3E–Šn äùËbn\24­ßw“Ø\30ÍÒ‘¥Z$\30í¿Ù_Kã\19ì)ûÛSu^»ð¹\3K8kÐòÚ=ÎE>.ŒòZ])\28?Û#¤ç:É“®‡v˧+\31®gŠ1}UæÄÐõ\\Ò…Ü>$\13\31¬\17ôU_ 7\14úŽ„õ±\4oPh-ó\25ògQjPtó6\11ôúÌ!…æö&§ÎÉ'U9l)9N“ìi’ã4Éqšdovs¼[εÌXGx3f-„Y’.|\21í.П+k!Ïá3ŸËØ<þšŠaH¾rQá´Y5…PP®æ\20á­Ð¦¡ÅóÀ9RµÉjìÒâÕ\7D\24\"C7É…¶NÚÒ‰’Ãèµ3\5Ú-ß2ü(g‹°;‹xü*¹oJ5äˆyÙ.*hmàtàO)|cž7¯ÜK\13WTqí£¨àcZ)‘·Jü\9\14Ë÷ 0¯ûr­r˜‘Óâpº³2¹Rhヒ7’ú±L÷NÖsï~\22‡Kªk\27øG$°£îñˆ–ªÈŸËûÕŠbÍïñã\24eöȪ\31½\12ë!Wñp•\4¿eª\127Ý\26oÒý[°ã7\15ˆç\27\ +VáÃËKÀ9¾»/bª¼L\20hÒ ê0t™º\28±Tlþ\14^±|aZ\"²°¢@5sNÁù\7_f_´AÃ\11aÍuHÙËF8^ÄpLx'\1\9›ÿøù‡-~\25ª\21³Q€·€\26+ÅÏ7&xó¸Ýâ™Ô¦£\7p3—W«¤‡£\0202ÓÄ$€ì\13Gë†NÕlÝ S(h/Ø\31ÀŸ¸ÜJï×Jö\ +ê¦K᮹´¦[¼[Ãerëà\14H`\27¤·q\ +l>\25%ÓrëGM\23[i]¯ÅöÚª~o\28±Û¨/ß6úËw Ù_ÒÑLË^w€úl¿\17QØŽßÄÚÆpP7M©Ûû\31¸û†Ø^Æ,m¡Žlš²ý•3'\19rŽÈ\18\31bàï›é÷š6tLÛʩؼpÙâìÚôö¡ïZô7Ý|hW\31\30`è\14[8ªmsdÝch*ÿÕæ)»­HÙ[üøšÐ¯ˆñåv¥®\26–¬ç\21»\26¸Ûgøî\22Æ©@úµhh\\g£©\12¨Ï²ØŸ†ÜÂû`«o\5\\¡z)Ž\31ÝÒÛ`S;wƒ\27\27³§!â×r\16©BU±ÜdË[\24Ÿ¶6é÷qüt3–.\17ùè­ <Õ¶0@\ +דÕéèUR“722\29²+YéÑýæàÒÁB6»Š–°\29ºÀ\22zΦG\2ÉN\13¬Ü +«\9w«Ø¢Û!çoì\19¸‰?ŽÓÒ‘xB¹m‡\27Ϧ¨O}áÅF„þ2ù\2ŽŠ-z!²Ë¾<öKl\ +¹ÝæU Î,Ûá涅ÿ¦ŸÛ¶×‡&¸í\28ß›\15§h$ø\\­;\21íî6$\11ŽÍ\13\8ÅÀûœU\29\127…Œb×~»=¿ÎPÖq\23AÄ×Å›Üp;~¥ƒ5üJFÔÄ6\31\4lþ\28¨S\127ç4dý¿c¡ÿJµ²çðÌ/ƒ)û\28|†ãìå©Økä½]ê êÆÞŸ¿Ð\7\24S³k.iöÿøcOí%çÉÞž™Çݧ»?’\"DZÞ•m÷0¢tb\29]Lœ,GØ×Q».\16þ2—ö¨x‰vdgû™\26üsgªÕ®\127Ø®{ÞNÿô§\2Ú\13„æfû\11=‚+\18\";©þ6—ÞBûà!\14Ü„V¢‡<ƒan\\L::”ó\17Þ\9û‹\12»FÃ\29ø\20‘\20¥RäQDÙû5ž¼»/,©ï\0210R*^\24íÚÖªýÂßTì:\16\3\21åwm\15ûÿþ<ç>é¡h<æ\14¡²¢÷õîÓ°=~{s×\9Ï\20\20§}¶è¬CÖ\30æé=DÉÝ_Ï4e¶7óLµó¬•\4ú»R«Bå\30>¥ûì‰J~6.)\16ñ\30ïDlË\3h_~Ð\1N=l\20{Ø(ö%~ZÑ\15Nº\22\11u“=~ŒA»¬S¡\5üÓ,g\1Ý\\âÅ{£ÅaW\30%‡NHûÂâ,ñÆ”ýpJ?L¾\25´Öl^Ý\23èÌy_Ê\24˲\23ñ\11mQi,îòˆâß\1ß\2~\22»f‘²Ø\31ÝÝã·ÀØk¬>ô\28\15vOÂJ\9†tWvŒÆ{§=^3í*[ñK»ÒÏz\0218‡«Ê©\17\14\6\8¡¨ûñ- \19P3ã×Ø->äÙo\5\2M)°7·\27Z±ˆU`Ó;Ù\8:ü5Ð4ýåèÞƒÓ\26‰\31\29Ü•|5ÝÂâHœ¹-½ç–g<\\99¯ï0­ÈßzÛÑ–;Ã\16ø\30\15Ø›=^GíòºýxŒºÇ\15?îiŠb¥íÖ:cé\0269?L–©ò!¯fŠË$‘¾°ûñ3ámŸÌºûéX$uÅåÒîå’óŸ\30SÔú\29#z‹°³•e896hˆî¤¸\"òu‚n¸ø\25Þ=Ë\1jßül\27*${\14ÒÉÌñ!–\27Ùb\25\21Ë«=¤oÑxDÞãËõÈ\22E7·Vø\"F†¯H{\28\25¾\15ôv O\0279ˆFöDšÔ—cDñxi‡\11\29G;It»þìû1Ka1ڷËz\15±y?œßöZ2j£e\29ýö\7¹\31}0ß¿\18‘yâ}Ê—\23h_Þ-Âãã\7÷¾âGÓ@óX\23ž\11q\25\5öwë¾|\13ô\21¯³Ah?F|¹$zˆbŸ_¾¼·fXžµ4>Ìmhͬ“?Ýñ5ŵÐW¼\6úšµŠÑ_ß\127\0{Õ\11cþø7øù~‹\14ÎþÐÞ—ïe¾\22\127Ì\3˜­Ñ;¯¯õ\28¿ßÁ¹5ÄÈâçV8¾â.è+…w¤Q\127cï+•›0žÈv·öøÑ\5\127ðÓûd;Æ7\6òúè+\20»\3›Ý×gè\ +1![õ\26hôŠë+<}@^’}meÌÒV¦\24¿CŽ‚¿µ÷å=Ó½B¿¿ë·×‚Úþ*|-Zô\14YõÝ+'ßÔÂ@A·\\‹\\Û{¼à¸Ç\27\26rm\5JÊÏB\21¥AþdJÉiM\3Z?î¡­«¦„í箤Œ\"3£)ª´ÜuE!{ŠL±Í)’&#H‰÷ço:Ù>\127\19¬Ÿ¿Q‘ª{ýJÕl‚;ÊÍ=D’VU\7Ú\16Õô!\28Oîzüj`$ÊHƆUDù±\5öñÍt›ñ\21¢n˜\4²1†Fu0\7ÀÔ\ +}oz\31Ù%è\7/4\0272s{Wð@(AÈÉ&f’«;Í]‘~î²Î{üb\7\16Âiî0óæ\14S¾ïÅ=̦w}@©ìm(þçÔ\20ÿ£o\\Sü”Ò\127ºñ~²Ð?ëž”±øé\15Nÿ”6\127Æ…èÏx öó¸zøYîMñð·?\31á\1ôÐÙøÑCùã×KÀñàñaRÜ2<.ê%\15_™<¼&\7 ¡=Ô\19\31ú?té\6\14(«\15_ø\2™ž‡¢øcñyZñ€H\30kÜ”€4°?Öf\18l€–Šnº<üYŽG²e¯Z€þ˜û#]\4ÓÝú|3ôpÉ\31nЇìç\28¡W\31[ó\ +P«!H+þŠÎ·¿œù}ê[ƒò€ïã§{¾5_>¿k\127%ýù=¸m¿ãW\3À²ƒïË)\30ú}{|\11'àÍ_C\1E\5q\29ý|ª*òçvÀþhÛwlÿ﬛>È_[û–Ó\\°lz²Gw¾LŠN>Šÿ\23?3ú×\31 ý\27¿?ú÷øÅ?±Zîóï‹Ÿ©ø¯WÅ—£È5~\29ôïõø%Oñ\16(*)µUÕþ\"+#\20Ê}þVþ\6ê_].€þl€x<Ðó}þ‡}7ø7†ü7<×D/µ(ŠË‚¬‹\1ü½\31ÿPáùæOн=ÿ¬íâGÕÞŽ)\127;ÜT\26\127??Yü[Ó†ÃÎ\8‘·ñ\11po.À\27««\15œ,÷-\30í§(ä›$‘þ\5o^Å¿Íq8¼i\19ê\23ñ&\25\2\ +\26Ý\20;ÆEÿ›Tø|‹_Ü{KQ*\13Újìm\26G’Ψ5Þ#nEI¦þ–b<)~\\–Z¢½¸Þy¾åðIxÛâé—Ê„\19þæ‚¿ÝãgXßd¡Æâ\4{»\31?\18õ\22?OúvßÚC\1¬\2ú\22\23´´âXøŒŸoû\23ˆ*>Ëž\ +ü‡\7:\18|öÎý§¿Ù#\ +èËÓÏAÖ\6\26tZø\28â§ò>\7·\31Ê‘Î&ŸSˆ]àÚƒ|¶ù\25û:\12Ïúþ‚&»7«ä‰c\26>g\9ásþ×9ßî\0£yŽ@\11ûkí÷ÀM4\ +>¢G\3ëd¿ÖõÈ\27?w÷¹\30Uë\16¤/èp 9ZHË¿Ç°ÓQ\"~?,bzp|&ŸF‚*\127Pö3yº}úK3ó?¼þÃ)p3F\13\15a«“˧bÀgþ7\0199F³EZxÌ\18‰AoÇ\8\\ä¿ÿçùñÿ\1B\29ŠI", + ["length"]=26022, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=4188, diff --git a/tex/context/patterns/lang-it.lua b/tex/context/patterns/lang-it.lua index 07beccaad..3a5369e4b 100644 --- a/tex/context/patterns/lang-it.lua +++ b/tex/context/patterns/lang-it.lua @@ -56,7 +56,13 @@ return { }, ["patterns"]={ ["characters"]="'abcdefghijklmnopqrstuvwxyz’", - ["data"]=".a3p2n .anti1 .anti3m2n .bio1 .ca4p3s .circu2m1 .contro1 .di2s3cine .e2x1eu .fran2k3 .free3 .li3p2sa .narco1 .opto1 .orto3p2 .para1 .poli3p2 .pre1 .p2s .re1i2scr .sha2re3 .tran2s3c .tran2s3d .tran2s3l .tran2s3n .tran2s3p .tran2s3r .tran2s3t .su2b3lu .su2b3r .wa2g3n .wel2t1 2'2 2’2 a1ia a1ie a1io a1iu a1uo a1ya 2at. e1iu e2w o1ia o1ie o1io o1iu 1b 2bb 2bc 2bd 2bf 2bm 2bn 2bp 2bs 2bt 2bv b2l b2r 2b. 2b' 2b’ 1c 2cb 2cc 2cd 2cf 2ck 2cm 2cn 2cq 2cs 2ct 2cz 2chh c2h 2ch. 2ch'. 2ch’. 2ch''. 2ch’’. 2chb ch2r 2chn c2l c2r 2c. 2c' 2c’ .c2 1d 2db 2dd 2dg 2dl 2dm 2dn 2dp d2r 2ds 2dt 2dv 2dw 2d. 2d' 2d’ .d2 1f 2fb 2fg 2ff 2fn f2l f2r 2fs 2ft 2f. 2f' 2f’ 1g 2gb 2gd 2gf 2gg g2h g2l 2gm g2n 2gp g2r 2gs 2gt 2gv 2gw 2gz 2gh2t 2g. 2g' 2g’ 1h 2hb 2hd 2hh hi3p2n h2l 2hm 2hn 2hr 2hv 2h. 2h' 2h’ 1j 2j. 2j' 2j’ 1k 2kg 2kf k2h 2kk k2l 2km k2r 2ks 2kt 2k. 2k' 2k’ 1l 2lb 2lc 2ld 2l3f2 2lg l2h l2j 2lk 2ll 2lm 2ln 2lp 2lq 2lr 2ls 2lt 2lv 2lw 2lz 2l. 2l'. 2l’. 2l'' 2l’’ 1m 2mb 2mc 2mf 2ml 2mm 2mn 2mp 2mq 2mr 2ms 2mt 2mv 2mw 2m. 2m' 2m’ 1n 2nb 2nc 2nd 2nf 2ng 2nk 2nl 2nm 2nn 2np 2nq 2nr 2ns n2s3fer 2nt 2nv 2nz n2g3n 2nheit 2n. 2n' 2n’ 1p 2pd p2h p2l 2pn 3p2ne 2pp p2r 2ps 3p2sic 2pt 2pz 2p. 2p' 2p’ 1q 2qq 2q. 2q' 2q’ 1r 2rb 2rc 2rd 2rf r2h 2rg 2rk 2rl 2rm 2rn 2rp 2rq 2rr 2rs 2rt r2t2s3 2rv 2rx 2rw 2rz 2r. 2r' 2r’ 1s2 2shm 2sh. 2sh' 2sh’ 2s3s s4s3m 2s3p2n 2stb 2stc 2std 2stf 2stg 2stm 2stn 2stp 2sts 2stt 2stv 2sz 4s. 4s'. 4s’. 4s'' 4s’’ 1t 2tb 2tc 2td 2tf 2tg t2h t2l 2tm 2tn 2tp t2r t2s 3t2sch 2tt t2t3s 2tv 2tw t2z 2tzk tz2s 2t. 2t'. 2t’. 2t'' 2t’’ 1v 2vc v2l v2r 2vv 2v. 2v'. 2v’. 2v'' 2v’’ 1w w2h wa2r 2w1y 2w. 2w' 2w’ 1x 2xb 2xc 2xf 2xh 2xm 2xp 2xt 2xw 2x. 2x' 2x’ y1ou y1i 1z 2zb 2zd 2zl 2zn 2zp 2zt 2zs 2zv 2zz 2z. 2z'. 2z’. 2z'' 2z’’ .z2", + ["compression"]="zlib", + ["data"]="xÚ=”A–«8\12E·âYfœƒó7D\12\24\ +c\\Æ!\20£ÞFo¯WÒ÷9¿2,\28½+ÉvUÓÝ“¦éb™Û÷r_µñ˜7¾]÷'ÝwÖ9»§]µ³Å’õS?Ûýîæ8˜f°g;Ÿ¿;\15ã&\21uSD\0230}è':pê qÖ´Tîé¢×ê±€ÑAO\7}2½D=]ôtÑ\31Ø\11\3Ò\3é+¤\7Bë#\17À¨8š‘Š£Ä#â\17ñˆhD4ÖÙIô\8±\2ð\0<\0OuOuÏYøÉj\7¤\7é+’“af;䤦¹>ýIĉy8\4;A› èü&tSÕ}\25ûÅÆ\23\27_uƒ;Xèo\25Í¢ã^\22V(ËÊ\ +a¡Ÿ…ê\11¢\5ÑRE$\4ª\7.3ÐA¸<´àM\0\17,5\2Ø ,š\0094\19x#+\14 \3È\0002Ð\\`ÄÀˆ\1|Ð¥†÷…†Ûí\29×r@VÊ­”[9¿\21ðª=À+à\21ð\ +x\5¼\2^\1¯€W@+œµBHŽ@\"HÏ\17PdðH§\17`\4\24•\0030\2Œ\0ãnô—6\14Š\1GÀñbO\127d6NìMªDªÄZ\5uêMâ\28’Ž1E£›\25ˆ\18;pÒ®}¦„:1|‚ ¤J ö·ŒÍo6¿ë&ÂLó\25U¦ù<š¬ÛÊ\12\25 S*3@¦­L\11\25@–†\19É…ÜÂ\24„ôŸOŒÃÉ\20ÎÔÈÔȵÆÎ\29îz=»žÌ®7³×Gƒv7ûŸý®ßêC³{yÈ9¹^n”órJ*5%ÉírEŽâûeþì\13v“«WM|{ǵ\0092…\22Y`qÁ\22†-:OÑ\5‡]8MÆ2wœã,(Rl¡S«JåÅ\23#–k1å²Úe¨¢'VÞO¬è‰•ßºH\14g\14j\28º¥Cß$\29\18\28oÁ!Áñ+x™\23Mñ\15—ìWûƒ#åEÆ«þÌ)ŸÌq2ÇÉ\12'\13žô~r&'3žÜÀ‰àDpJðÓnOÜlZš¾P^L\1271ñÅ´\23ª\11ÕÅ\24\23]ÊA}©»ëÝݥÝ5—ý\31ª,»", + ["length"]=1806, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=377, diff --git a/tex/context/patterns/lang-la.lua b/tex/context/patterns/lang-la.lua index 657000f29..c5f7fed3f 100644 --- a/tex/context/patterns/lang-la.lua +++ b/tex/context/patterns/lang-la.lua @@ -118,7 +118,16 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnopqrstuvxzæœ", - ["data"]=".a2b3l .anti1 .anti3m2n .circu2m1 .co2n1iun .di2s3cine .e2x1 .o2b3 .para1i .para1u .su2b3lu .su2b3r 2s3que. 2s3dem. 3p2sic 3p2neu æ1 Å“1 a1ia a1ie a1io a1iu ae1a ae1o ae1u e1iu io1i o1ia o1ie o1io o1iu uo3u 1b 2bb 2bd b2l 2bm 2bn b2r 2bt 2bs 2b. 1c 2cc c2h2 c2l 2cm 2cn 2cq c2r 2cs 2ct 2cz 2c. 1d 2dd 2dg 2dm d2r 2ds 2dv 2d. 1f 2ff f2l 2fn f2r 2ft 2f. 1g 2gg 2gd 2gf g2l 2gm g2n g2r 2gs 2gv 2g. 1h 2hp 2ht 2h. 1j 1k 2kk k2h2 1l 2lb 2lc 2ld 2lf l3f2t 2lg 2lk 2ll 2lm 2ln 2lp 2lq 2lr 2ls 2lt 2lv 2l. 1m 2mm 2mb 2mp 2ml 2mn 2mq 2mr 2mv 2m. 1n 2nb 2nc 2nd 2nf 2ng 2nl 2nm 2nn 2np 2nq 2nr 2ns n2s3m n2s3f 2nt 2nv 2nx 2n. 1p p2h p2l 2pn 2pp p2r 2ps 2pt 2pz 2php 2pht 2p. 1qu2 1r 2rb 2rc 2rd 2rf 2rg r2h 2rl 2rm 2rn 2rp 2rq 2rr 2rs 2rt 2rv 2rz 2r. 1s2 2s3ph 2s3s 2stb 2stc 2std 2stf 2stg 2st3l 2stm 2stn 2stp 2stq 2sts 2stt 2stv 2s. 2st. 1t 2tb 2tc 2td 2tf 2tg t2h t2l t2r 2tm 2tn 2tp 2tq 2tt 2tv 2t. 1v v2l v2r 2vv 1x 2xt 2xx 2x. 1z 2z. a1ua a1ue a1ui a1uo a1uu e1ua e1ue e1ui e1uo e1uu i1ua i1ue i1ui i1uo i1uu o1ua o1ue o1ui o1uo o1uu u1ua u1ue u1ui u1uo u1uu a2l1ua a2l1ue a2l1ui a2l1uo a2l1uu e2l1ua e2l1ue e2l1ui e2l1uo e2l1uu i2l1ua i2l1ue i2l1ui i2l1uo i2l1uu o2l1ua o2l1ue o2l1ui o2l1uo o2l1uu u2l1ua u2l1ue u2l1ui u2l1uo u2l1uu a2m1ua a2m1ue a2m1ui a2m1uo a2m1uu e2m1ua e2m1ue e2m1ui e2m1uo e2m1uu i2m1ua i2m1ue i2m1ui i2m1uo i2m1uu o2m1ua o2m1ue o2m1ui o2m1uo o2m1uu u2m1ua u2m1ue u2m1ui u2m1uo u2m1uu a2n1ua a2n1ue a2n1ui a2n1uo a2n1uu e2n1ua e2n1ue e2n1ui e2n1uo e2n1uu i2n1ua i2n1ue i2n1ui i2n1uo i2n1uu o2n1ua o2n1ue o2n1ui o2n1uo o2n1uu u2n1ua u2n1ue u2n1ui u2n1uo u2n1uu a2r1ua a2r1ue a2r1ui a2r1uo a2r1uu e2r1ua e2r1ue e2r1ui e2r1uo e2r1uu i2r1ua i2r1ue i2r1ui i2r1uo i2r1uu o2r1ua o2r1ue o2r1ui o2r1uo o2r1uu u2r1ua u2r1ue u2r1ui u2r1uo u2r1uu", + ["compression"]="zlib", + ["data"]="xÚ5”á±Ú0\16„[Q\5ž±¨ÈزQÐ̘7¯ôF2é+»lòãNàÝ[}\2Ä´ä륥iñWµ\\,{šÖ\26ëȆgkÏ>×g[ÍÏËZ½¤©ä/H\29Ãi:—Xæúo\29iz\14fþ\127\17\9CQ&®[±)]Îü¬+\23/#ýþ5§??焈…­°u¶‘–2/lm¤Âgµc¯Ns§¹ÓÜ)Œ~\25i¾¦|eméš\27VC9^\3ãúB=QSš×”×5­ù–Ñà[á[\29õÀ{xWøVø×o\20ü[Ê\27ë@YÚhÙ`ÙÞ(È{Êûžv&íŽ\21òŽé\29\18&\14\22¦=\29´\28†ÕQ°\29H9rÀzKùv¢0yÃÛ\31i¾§|¿§;1g\0126œ¬\1½!¬í©]ö\12oC|ƒ³Ñs4œ£!§=PØ¢a‹F\31¶iÈ…ÅX\0083Ø\12c†\17ƒÝ`7Øð-Íxä°8ösìç8£c'‡Ý1îÔ1î\24sŒù39¾aûtz±£#Ê¿Pˆ;Ó™o(ŒŸ\24=ù\30c'àNXO|Ð'O\127òø'\6\30\3‡†#\0\17€\8@\4‚ãH¤\28H\ +€\4Ò\2ƒ\1 \31‰ŒÀæÔ@Ö3ó×wÞØ!?_W¶•mcÛÙ\0146\\\6tcs¶“íÁö™{±!øÉŸó\11ÉxÀ0f1ŠI\8z\1ð…£¾xD¦1ŒYŒb\00838þNoØÞ´½ßiÆgõ\5õ‹+TÐ\127O¸\ +ƒ7cðfŒÊÆë1x\31 \20\ +…B¡P(T\ +•B¥P)T\ +B§Ð)t\ + 0(\12\ +ƒÂ °äöÙžKÑRµt-À¥ÈRd)²\20Yª,U–*K•¥ÊÒeé²tYº,]–!ËeÈ2d\25²,øËúàšpM¸&\\\19®\9ׄkÂ5ášpM¸&\\\19®\9ׄkÂ5ášpM¸&\\\19®\9ׄkÂ5áºp]¸.\\\23®\11×…ëÂuáºp]¸.\\\23®\11×…ëÂuáºp]¸.\\\23®\11×…ëÂuáºpC¸!Ü\16n\0087„\27Â\13á†pC¸!Ü\16n\0087„\27Â\13á†pC¸!Ü\16n\0087„\27Â\13á†più\11‚2\24H", + ["length"]=1756, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=335, diff --git a/tex/context/patterns/lang-lt.lua b/tex/context/patterns/lang-lt.lua index 3b2b7c0c1..9959f4d86 100644 --- a/tex/context/patterns/lang-lt.lua +++ b/tex/context/patterns/lang-lt.lua @@ -59,7 +59,23 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnoprstuvwyzÄ…Äėęįšūųž", - ["data"]=".ap1 .api1 .ap3r .arbi1 .arti1 .as3p .at1 .ata1 .at3r .aÅ¡3v .dina1 .ek3r .i2Å¡3 .iÅ¡i2 .kirti1 .nu1 .nusi1 .pie2Ä .sam1 .sida1 .sk4 .st4 .su5kr .te3s2 .uk3 .u2Å¡3 .už1 .įs4 .Å¡ven1 a1a2 a1b a1c a2ch a1d a4dra a1e ae2l a1f 2a1g a4gr ag3ra a5grio a5gro a1h a5inf ai4sk ai4tr a1j a1k ak2l ak2vi a2kÄ—t a1l a3li a5lo1 a3ly a3lÄ— a3lÅ« a1m a1n an3k2l an4sk an4tr a2o a1p ap1a4k ap3ei ap3i2m ap3l a3pr ap3s2 a1r ar1eit a4rg ari4s a1s asi1 asi5s a4sk as3kl a3sl as2mi as2mu a5s4n a4stu a1t at3aug ate5ist at3i2m ato1 atp4 a4tru at1Ä—2 atÅ«2ž a4u au4kl au4sk au4sl au4t3r a1v a1w a1y a1z a1Ä… 2a1Ä a1Ä™ a1Ä—2 a1į a1Å¡ a3Å¡n aÅ¡4tr aÅ¡2v a1ų a1Å«2 a1ž až2l ba3c balta1 ba4sl 2b1b 2b1c 4b3d bep4 be3s2 besi1 be3t2 2b1f 2b1g 2b1h b1j 2b1k 2b1l b2la b2liz 3b2lo b2lu 2b1m 2b1n 2b1p 2b2r b3ri 2b1s 2b3t bu4k 2b1v 2b1w 2b1z 2b1Ä 2b1Å¡ 2b1ž car4 2c1b 2c1c 2c1d 2c1f 2c1g 3chi 2c1k 2c1l 2c1m 2c1n 2c1p 2c1r 2c1s 2c1t cu4 cuk5 2c1v 2c1w 2c1z 2c1Ä 2c1Å¡ 2c1ž da3b4 2d1b 2d1c 2d1d de4k dema1 de4pr de4sp 2d1f 2d1g 2d1h di4p di4s dis5k 2d1j 2d3k 4d3l 2d1m 2d1n do3ri 2d1p 2d1r dro1 dro2b d2rÄ— 4drų 2d1s 2d1t du2a du4k du5ka du4sl 2d1v d3va dvi3a 2d1w d4z 2d1Ä 2d1Å¡ d4ž džio1 e1a2 e2a3l eapi1 eat1 e1b2 ebe1 e3bl eb3r e1c e2ch e3d2 ed3ri e4dro ed3rÄ— e1e e1f e1g eg3ra e1h e1ie ei4k3l 1ein ei4sk ei4sl 2eis3t eist2ra ei2Å¡1 e1j e1k e3kr e1l e1m ema5s4 2e1n en3k2l enk4la eno1 ens4 4enta enu1 e2o e3or e3o2Å¡ e3p ep4li ep4r epra1 epri1 e1r ere3a4 eri4s ero1 erÄ—2 e1s esi5s es4k eska1 e5sko e3s2v e1t e1u4 euž3 e1v e3vi e1w e1y e1z e1Ä… e1Ä e1Ä™ e1Ä— e1į4 eįp3 eįs3k eįt3r e1Å¡ e3Å¡n eÅ¡2v eÅ¡Ä—3 e1ų e1Å« e1ž 2f1b 2f1c 2f1d 2f1f 2f1g 2f1h fi4s5 2f1k 2f1l 2f1m 2f1n 2f1p 2f1r fri1 2f1s 2f1t 2f1v 2f1w 2f1z 2f1Ä 2f1Å¡ 2f1ž gaÅ¡3 2g1b 2g1c 2g1d ge4o1 2g1f 2g1g 2g1h 2g1k 2g1l g2le g2lo 2g1m 2g1n 3g2nų 2g1p2 3g2r grai2 g3ran 5g3re g4rei g3ri 4g4rio g3ro. g4rÄ… 5grį 4grų 2g1s 4g1t 3gu 2g1v 2g1w gyva1 2g1z 2g1Ä 2g1Å¡ 2g1ž 2h1b 2h1c 2h1d 2h1f 2h1g 2h1h hi4b 2h1k 2h2l 2h1m h2me 2h1n 2h1p 2h1r 2h1s 2h1t 2h1v 2h1w 2h1z 2h1Ä 2h1Å¡ 2h1ž i2a ia5g4 i3aiÅ¡ ia3k i3antÄ™ i3antÄ— iap4 i3ar i3b2 i1c ice1 i1d id2r id3rÄ— i2dÄ—m i2e ie4d3r ie3g i3ei ie3kl i3ent ie4p5r ie4sk i1f 2i1g i3g2l ig3ru 2i1h i1i2 i1j i1k i3k2n ik3r i1l ilo1 i1m 1imd 4i1n in4kl 5inv i2o io4g3r iok2 io4pl i3or i1p i2p1j ip3ru i1r ira3s2 i3ri 4i1s i5sa i5si is4k i3sl is4li i5s4n i4ste isto1 2i3t2 i2u i1v i1w i1y i1z i2Ä… i1Ä i1Ä™ i1Ä—2 i1į i1Å¡ i3Å¡n iÅ¡5t i4Å¡v iÅ¡Ä—2 1iÅ¡Å¡ i2ų i2Å« i3ž 3ja. jauna1 2j1b 2j1c 2j1d 2j1f 2j1g 2j1h 2j1j 2j1k 2j1l 2j1m 2j1n jot3 jo4tv 2j1p 2j1r 2j1s 2j1t 1ju 2j1v 2j1w 2j1z 2j1Ä 2j1Å¡ 1jÅ« 2j1ž 3ka. 5ka3d kak3 ka4kl ka4pr 3kar 3kas 3kati 5kav kavar1 ka3z2 2k1b 2k1c 2k1d 3ke ke4b3 keren1 2k1f 2k1g 2k1h k4i 5ki. 5kia 3kib 3kil 5kit 2k1k 2k1l 3k2la k4lan 4kle. k2lel 4klo. 4klu. k2ly 4klÄ… 4klų 2k1m 2k1n 3ko 2k1p 2k2r k4rau kris2 kri5st k3ro k4rov 4k3ru 5krun k4ry k5rÄ… 2k1s k4sk ks3l k4s3p 4k1t k3ta 3ku 4kub ku4k ku4pr 2k1v k2va k3vo k2vÄ— 2k1w 3kyt 2k1z 5kÄ… 2k1Ä 2k1Å¡ kÅ¡2ly kÅ¡Ä—3 2k1ž 3la. lap4s3to 2l1b lb3r 2l1c 2l1d 3le. le3c le4gr 4leÄ 2l1f 2l1g lg3s2t 2l1h 3li. li4a 5li4o 3lių 2l1j 2l1k lko1 2l1l 2l1m 2l1n 3lo. log4 4lop 4l1p lpna1 2l1r 4l1s l4sk l4s3p 4l3t 3lu. 2lup 4l1v 2l1w 3lyÄ 2l1z 3lÄ… 2l1Ä 3lÄ™s 3lÄ—m 3lÄ—s 2lÄ—Ä 3lį4 4l1Å¡ 3lų 2l1ž m2a 2m3aid 3mas mas3ki 2m1b mb3r 2m1c 2m1d me3c meis1 2m1f 2m1g 2m1h mi4gl mi4gr min4s mi4tr 2m1k 2m1l 2m1m 2m1n m2o 2mod 2m1p m4pl m3pr 2m1r 4m1s 2m1t m3ta 3muo 2m1v 2m1w 2m1z 2m1Ä 2m1Å¡ 2m1ž na3s2 na3t2 2n1b 2n1c ncen1 2n1d n3drÄ— ne1 neg4 nei2m ne4o3 neor2 nerÄ—3 nesi1 ne3sl ne3s2t ne3t2 neįs3t 2n1f 4n1g ng3l ng4r 2n1h ni4s 2n1j 2n1k nk3la nk3r n3kry 2n1l 2n1m 2n1n 5no 2n1p 2n1r 4n1s ns4ku ns3l n4s3p nst3r 4n1t n3ta nt4pl n3tru nt2ruo nua4 nu3b2 nu3g nu1i nuk2 nu4o3 nuos2 nu5s4 2n1v 2n1w 2n1z 4n1Ä 2n1Å¡ 2n1ž o1a o1b o1c o1d o3dr o1e o2et o1f o1g o1h o1i o1j o1k o3kr o1l olen1 o1m om4pr o1n ono1 o1o2 o1p o1r o2rie ori4s or4tr o2rÄ™ o1s o3sl os3le os4lo o3s2v o3t2 oto1 o1v o1w o1y o1z o1Ä… o1Ä o1Ä™ o1Ä— o1į o1Å¡ oÅ¡2v o1ų o1Å« o1ž pa3 pad2 pai2l pai2m pai2r 3pan pap4 p2ar parsi1 pa4r1Ä—2 pas2 pa5sr p2at2 2p1b 4p1c 4p3d2 p2e per1 pe2re pe2r3im per3s pe2rÄ— 2p1f 2p1g 2p1h p2i p3ieÅ¡ 2p3k2 3p2l p3le p3li 4plio p4liu p3lo p4loj p4lu p4ly 2p1m 4p1n 3po po4g poli1 2p1p 2p2r p3rai prau2si p3raÅ¡ p3ri pris2 p5ro. p3rom p3ry 4prÄ… p3rÄ— p3rÅ« 4p1s psi1 p5s4k p4s3ty p3s2v 4p3t2 3p4u pusiau1 pu4sk pu4sl pu4tr 2p1v 2p1w p2y 2p1z 2p1Ä p2Ä— 4p3Å¡2 4p3ž r2a 3ra. ra3b 3rac ra1im rai4tį ra3kr 3ral 3ram ras4l ra3sm rau4ka. 3rav 2r1b rbo1 r3b4r 2r1c 4r1d 2reb 4rein 4reit 3rel re4p5 5res res3l 3ret 2r1f 2r1g 2r1h r2i 3ri. ri3d 3rij ri3kr 2ril ri2ma. 2r3imt 3rio ri3p risi1 ri3st rivin1 3riÄ… 3rių 2riÅ« 4r1k rk3ly r3k4r 4r1l 2r1m 4r1n r2o 3rod2 ro4gr 3roj 3rok 3ron 3rop 2r3org 5ros 2r1p r3p4r 2r1r 4r1s rs4ko. rs4p 4r1t rti5k4 rti3s2 r3t2r r3t2v r2u 2ruk 3rul 4run 3ruos 2rup 3rus ru4sk 4ruto 4r1v 2r1w 2r1z 3rÄ… 4rÄ…s 4r1Ä 4r1Å¡ r3Å¡2l r3Å¡2m 5rų 2r1ž sala1 2s1amž san5t sarka1 2s3b 2s1c 2s3d s2e se4kr senat4 2s1f 2s1g 2s1h si3auk si3a2v si3a2Å¡ si3d si3k4 si3p4 si3s2 si5Å¡2v 3s2k2 4sk. s5ka. 4ske 5s4ken 5ski 5s4kle 5sk4r 5skub sk3va sk3vi 5sky 4skÄ… 5skÄ™ 2s2l 3sle s3li s3lo 4s3lu 4s3lÅ« 2s1m 4sme. 4smÄ™ 4s3n so4dr 3s2p s3pe 4spn 4spu 4s1r s3ri 2s1s 2s1t s2tal s2ten 4stin s2tod s2toj 3s2tov st2rai st2v 4stÄ™ 4s3tÄ— 4stų s2tÅ« su1 su3bl su3d2 su3g2 suk2l su3s2 susi1 suž4 2s1v s2ve 3s2vy 2s1w 2s1z sÄ…3 4s3Ä 5sÄ— 2s1Å¡ 2s1ž t2a 2tab ta3kr ta5s 2tat taura1 2taÄ 2t1b 2t1c 2t1d 2teb 3tem te4o te3t2 2t1f 4t3g 2t1h t2i ti4gr ti4k3l 3tin 2tip 4t3j 4t3k t4kl 4t3l 4t3m 2t1n t2o 3toje 2tolį to3s2 2tow 2t3p t4pj tp3lÅ« tp4r 2t1r 4trio t2riÅ¡ 4tro 4trÄ… 4trų 4t3s4 tsi1 tskri1 4t1t tua4 3tur 2t1v t2vo 4tvÄ—j 2t1w 3tyd 2t1z 2t1Ä 3tÄ™ t2Ä—m t3Ä—mu t3Ä—mÄ™ t3Ä—mÄ— 2t3Å¡2 2t3ž u1a2 u3ai u1b u2bj u1c u1d u1e2 u1f u1g ug4r ug5rio u1h u1i2m u5in ui2r u1j u1k u3kl uk2le u3kr u3kv u5kų u1l u1m u1n u2o u3or uo4sl u3p up4l up3ro u1r urk2 ur3kl u5ro1 ur3s2 u1s u2s1al u3sl us3la us3le usva1 us3ve u3t2 uto1 2utr u1u u1v u1w u1y u1z u1Ä… u1Ä u1Ä™ u1Ä—2 u1į u1Å¡ u3Å¡2l u3Å¡2n uÅ¡2v u1ų u1Å«2 u1ž uži2m u3žl už3v už1Ä—2 3va. vap4 va3t 2v1b 2v1c 2v1d v2e 2vep 3ves 2v1f 2v1g 2v1h viesia1 vi4s5k vi4t3r 2v1j 2v1k 2v1l 2v1m 2v1n 2v1p 2v1r 2v4s 2v1t 2v1v 2v1w 2vydau 2v1z 3vÄ… 2v1Ä 3vÄ— 4vÄ—p 2v1Å¡ 2v1ž 2w1b 2w1c 2w1d 2w1f 2w1g 2w1h 2w1k 2w1l 2w1m 2w1n 2w1p 2w1r 2w1s 2w1t 2w1v 2w1w 2w1z 2w1Ä 2w1Å¡ 2w1ž y1a y1b y1c y1d y1e y1f y1g ygia1 y1h y1i y1j y1k y4k3l y1l y1m y1n y1o y1p y3r y1s y4sk y1t y1u y1v y1w y1y y1z y1Ä… y1Ä y1Ä™ y1Ä— y1į y1Å¡ y1ų y1Å« y1ž 2z1b 2z1c 2z1d 2z1f 2z1g 2z1h 2z1k 2z1l 2z1m 2z1n 2z1p 2z1r 2z1s 2z1t 2z1v 2z1w 2z1z 2z1Ä 2z1Å¡ 2z1ž Ä…1a Ä…1b Ä…1c Ä…1d Ä…1e Ä…1f Ä…1g Ä…1h Ä…1i Ä…1j Ä…1k Ä…1l Ä…1m Ä…1n Ä…1o Ä…1p Ä…1r Ä…1s Ä…1t Ä…1u Ä…1v Ä…1w Ä…1y Ä…1z Ä…1Ä… Ä…1Ä Ä…1Ä™ Ä…1Ä— Ä…1į Ä…1Å¡ Ä…1ų Ä…1Å« Ä…1ž 2Ä1b 2Ä1c 2Ä1d Äeko1 2Ä1f 2Ä1g 2Ä1h Äin1 2Ä1k 2Ä1l 2Ä1m 2Ä1n 2Ä1p 2Ä1r 2Ä1s 2Ä1t 2Ä1v 2Ä1w 2Ä1z 2Ä1Ä 2Ä1Å¡ 2Ä1ž Ä™1a Ä™1b Ä™1c Ä™1d Ä™1e Ä™1f Ä™1g Ä™1h Ä™1i Ä™1j Ä™1k Ä™1l Ä™1m Ä™1n Ä™1o Ä™1p Ä™1r Ä™1s Ä™1t Ä™1u Ä™1v Ä™1w Ä™1y Ä™1z Ä™1Ä… Ä™1Ä Ä™1Ä™ Ä™1Ä— Ä™1į Ä™1Å¡ Ä™1ų Ä™1Å« Ä™1ž Ä—1a Ä—1b Ä—1c Ä—1d Ä—1e Ä—1f Ä—1g Ä—1h Ä—1i Ä—1j Ä—1k Ä—4k3l 2Ä—3l Ä—1m Ä—3me Ä—1n Ä—1o Ä—1p Ä—1r 2Ä—1s Ä—s3l 2Ä—3t Ä—4tr Ä—1u Ä—1v Ä—1w Ä—1y Ä—1z Ä—1Ä… Ä—1Ä Ä—1Ä™ Ä—1Ä— Ä—1į Ä—1Å¡ Ä—1ų Ä—1Å« Ä—1ž į1a į1b į1c į1d2 į2e į1f į4g į1h į1i į1j į2k4 į2l į2m į1n į1o į4p į2r į1s įsi1 įs3l įs2m įs2r įst2 į2t2 į1u į2v į1w į1y į1z į1Ä… į1Ä Ä¯1Ä™ į1Ä—2 į1į į1Å¡ į1ų į1Å« į1ž 2Å¡1b2 2Å¡1c 2Å¡3d2 Å¡1ei Å¡e2v 2Å¡1f 2Å¡1g4 2Å¡1h 2Å¡5ist Å¡iuk1 2Å¡1k2 2Å¡1l Å¡2lij Å¡2lu 3Å¡ly 2Å¡1m 2Å¡1n Å¡2ne Å¡no1 Å¡o2r 2Å¡1p4 2Å¡1r 2Å¡3s4 Å¡si1 Å¡sikap1 4Å¡3t2 Å¡u4Å¡ 3Å¡1v Å¡2vi Å¡4vyd 2Å¡1w 2Å¡1z 4Å¡3Ä Å¡Ä—2j 2Å¡1Å¡ 2Å¡1ž ų1a ų1b ų1c ų1d ų1e ų1f ų1g ų1h ų1i ų1j ų1k ų1l ų1m ų1n ų1o ų1p ų1r ų1s ų1t ų1u ų1v ų1w ų1y ų1z ų1Ä… ų1Ä Å³1Ä™ ų1Ä— ų1į ų1Å¡ ų1ų ų1Å« ų1ž Å«1a Å«1b Å«1c Å«1d Å«1e Å«1f Å«1g Å«1h Å«1i Å«1j Å«1k Å«4k3l Å«1l Å«1m Å«1n Å«1o Å«1p Å«1r Å«1s Å«4s3k Å«s3l Å«s3t Å«1t Å«1u Å«1v Å«1w Å«1y Å«1z Å«1Ä… Å«1Ä Å«1Ä™ Å«1Ä— Å«1į Å«1Å¡ Å«1ų Å«1Å« Å«1ž žant4 žants5 2ž3b2 2ž1c 2ž3d2 žen1 2ž3f4 2ž3g 2ž1h ži3mu žio3 ž1j 2ž3k2 2ž1l ž2lu 4ž1m 2ž1n 2ž3p 2ž1r 2ž1s žsi1 ž4sk ž4s5l žs3t 4ž3t2 ž2u žu3s2 3ž2v ž4vi ž3vo 2ž1w 2ž1z 2ž1Ä Å¾Ä¯s3 2ž1Å¡ 2ž1ž", + ["compression"]="zlib", + ["data"]="xÚ-™;’ë<¯E§Â\17œ*Kö€ä¶lËz–^.9ÿÃ\19»\28žØA\7_ÞQ÷¼îZè\27h\11¤(\18Ø\4\8ÈþS\12»ô§\24ªÀ|\4Çc4Æ9nS>€³â\\\4Æ Ÿ\127ùšþœªÎ¾²¶¯Êèäöó¯ÊÒŸºú [\2&å¡*³ï¿éÏT´´¦êäËS½\7fa9ÔÌ3—ùÄ\4KÍdËïœËÏ\23#¿?'Fýü[Ën—Š]‘\1G®Td\31W„S*ö§±@*SQf\13Â9eÅîBÿeLÅ%÷áá2V}ÜÀ\29ï\29ªîœŠj?ÕâÌÀÝ‹Ví\28u¶V¬P\127?gzéÈ\27Ú‡¦ß)nÂ÷Süyó¼åêRÑåñn\23“v1iærC‚ðbOç—•Xe­7ç\29F¥I»Æ]Y±â~Dý\17íèå’Hà€\24sOyí«\0190em\21¸ Þ´ï\0281#ï˜e΋…yæòPMÑŒegm˜‡=#ç‘‘óîûÉêóÏ;ûù¢“®eïüK¬\5†¬\11\20»•ëÎ\5\3»\7×÷ÿäšý\5^‚Sí¾?Ÿ\127¨øó\15~þ\5\23?ÿ2_ÿùOx;Îõ~¾àìXä\31@£·\29\11WÌŽl3ð‘öÇü”Ž%\ +\31ÃIŽ¥t Ï™\3ÎÂE¸¦#{ˆP\11Lš5…P=RέW^|Ô\ +0\0Ù˜ŽùXÙœ€|NGì·¹\ +wá!`%ˆYâWú(Æ}Ê>Tó\0035“p\22.)ÿ¸VJµÐ\8­Ð\9ƒ0\ +“0§eÏU\31l­Â]x\8.ú\17‹~¸è©È¬zrÕ“«žXõT¢ï©l!\15\17‡\2§Ággá\"\\Ó©Ú\15Â\4L‡ÚNØ:åuÚŸðDš­Ð¥S\31tœv1\5³x\12\29Ó)\27q{âMäÙ$Ìé´d\5 \18DsˆSL¸¦S¾Ò^«¼°}O§ýCA«NZuÚkÔÏWÅ\18¥Ñ]f\4U*ãp*=‚ÊÝ‘Þc‰”\31yÂ^Ñ÷Á@¿ÌO<<©o‰V}ÈhXr\24”X_b|\25'@\9\3客»Ú׬@œuʸ¸ˆºe5±÷àœ9Þ“ÍÕo\\ŒÉk—e}X‚iB7 «ü\13ú²«÷x[ÙiGÇÃ}ÙͶ9\9K\142ïGI¹\13\9wæ@\1é\29F6\14Ôbè.Ç2/öÜܪRîËÑ°*¡»ŒC œ ºœj_;Lµ³O„VÉV”;\\©äèÌ\17éÊ9ÆJx/‰Ø\18*ØRþK\3\22¬ïO^úþ\28r‘ÃÅÛ\28<‡º\6q\25Ñ\11~?šý\7ÞÂWÊκãYw<\27\4gýî¬ßaýŒ\29\7ÅZh„Vè„A\24ÓYã‘&a\22Vá.<\4\29æ\28apvÅ‹¹(e\23×½¸î…u/å\30²\16ÏÂE¸\ +µÐ¤KÖ”Bo³\21º”_²NW¾ì†ÌƘ.cA&ÓeºtàÆ+û‘Cû¢í‘I#Èý\31ûá’ŒÂiG¦ùfBÄ€ü²ØZ…{ºlk\17Š=\4M¹„)— ïª\17W¸JÞUõ¯ª\127Eýkµ§ØpÅÍÚtÍÚR©\19\6a\20&a\22Vá.<\4\23¼Æ‚W\23¬\8ÖŠL¸OD%‰›\6;ŽÜÍxÄïýIç\16\3F€\0¬Ð¯ú \8+t¬8\6€ˆ´*;}?[neª\8Aœ¦*ó\11/Á\25\18y\4±›}8\28|hÄUÚXí\28Ɔ¤\ +B\23;®<©\\ìÆ¥NuÖ¥ÊB£b\3+³o\5\1»ª=¥}\5\1Ug¦\"—¯hЧªß_\28Ü×™òàâ½/\15<\30œup¥\ +ª±0™T±­\21ä\17X…€Ú†WevE\"J«È«xñŒ“é3«L>UæT¬\12×\21ñUAw•á\19•”WÆW\21\9±2!Vî@\21±\4é\7\24ÙSÕ(;dÇÝç\25n\4¼\29ù•ò[ñ'ÝŠÅ‚+»é'7ý䦟Üäð¦ŸÜtó›‡ùM?¹\25c7=ü\6C·~Îý¼Ú\30„Q˜„9ín‹B<¼\11\15A¹©ïî†*ŠèR£\11§;ɸ.(ÓêBêAò\13Ï\2&a®\28¶òh¥œá–?HÔµÚ×j_£}^—©.ÉìàheGïY¸\8×Tï¤rÁª`tu\20\26›³#j¡I\30½Eòäí\18Ú”\127\18\29e£L€‚KôlŠì\11h”ÖrS\27ý\28\8RãÐõ~,–T\21n\1\30(™ð¼Þþ~å]=‡ZuéìÙR}0üy}¢O×\19Y\5‰Ã}_ï|wVó…ÖrLµÙ\17€,^œŒ¼Xçk¯D\24Ñygô\22Ö=Xçwn7¢v#jŽ_ì¨\127Ï_ûØ‘†\29iˆSò\22†40ܘ\29‘>\4h–’¦¤¶jJ‹á}S:c#×\13\\7\23b`V¾2\22º›j_¤\3ØÛ–¬F¿j ¼©uüFçj$°‘@inzŽ’}Ócw\3—Í\16ÞÚì\\\14r\26Éi~yiH°¹›B!\22ÃW\7bx³ýêE¥æF!ÑF|M\"狈˂¿\15>]RbòæWKøh9ز–3\13Ã[|±µN¦i!¦\13bZ‰i!¦•“–\\¿³}\22.Â5µÕþÒ\4Ž ¥¼ò\28ïÖB#´BÇ‚ÐÞö'[Cj=oZëyš\24ß\26c-žÐ†'´‹ƒµ¸5ÒZ#­\13nãln5¡‹C\9´¶í\12™\14•»\8‘\14Å»üä©Ûq\18w%¼w¥5}GæËÁ~äe«\5\27VÉ]é1&²Í]”Ì]¤øÙéÎißawwÁs;Ò™}×ÔYw Ý„:uTK…8²xãÓÛ\8­@’ìz…A\24pJÔ?ø}g@t±ñÝd5Á3”Œn–+DBª£Ü‚šn¡æé\22s\13ˆRË®\2j›aÜÒËÌ\18…W'4vÐȼÒØ\54ö»‚ëÈõÁuJ=¬!”©ÏÊ\25áÌuáºrU\\7®šQµ£šÔ7òÝcaß\26±=föVvý®Ï€‹Þl¤œì£LëG}„\30Îü\30\6ziï!€\1äÞ\14ʧÞ\13è瘉\22\6ôä\30\27zë²^;ú]ÌÁ&÷¦^«ú(¾z«®Þª«×ȡȹ¨\127\7*–&°\13ä(\0308\16\0073øq2\15Ũ3\12|VF>\26ø^\4\14Óès]m€¬ýàÇÖ`E=͇’Ã{(³±\12Ì«Ö®|Š–'Ö`Ü\12ÆÍ\0‘CV%¾kK÷`à\\F\7•’Áog6œÊɪw±#Äþ&.Âæ,­:pª\12<%“\3eá s\13˜Eò.X…c:›ªh±Ú`\6\31âÔ¦Àà<¢£oEŽýÁ3zˆB\5„9\22À‚`ã`šÃsKCl\15ÆÏ*Î'ð°LUAí>ÄGð\16\0314 \7Á ó\13ìÝ…Ö\15\0272?\0062|\22·¯4r ¡äŸD­qTú@¢x\0019SØ\\úq9\0304‚ý8‹½“2)ƒwyÀz#\0274\30qœ‘ï¿Ñ6{5Z\7Œ%[7ú5#r¾Ž$ÀÑj+\29ÆrB4\14é}ë,\\„+êUôsê\21I\29é¦T;;©vÄX>vÞYÙ=ž\14€ì!’\31Çj­ˆ\20\30B3èQ\12¾U­Ncí/%cNº´£qÕV©ci\18ÌØãjcïI‹|\19j¡\19\6WîÇ\11Fô“o²r>üš\30Óa\25\27È~s\27ì@¹:Ô{ož¢#›9\6®¬G™3.N¿P\30˜Ä‘bb\18\17\"³ÅNóˆ\\ÊlÁù] 'éEVû“OØj\16Ï\27Ýêæ÷Ö¢gXodNEc\ +œvE\27­Žjo*Æ::ñ\4ž|(ÒD M|½`WÌ{\31…‹pM\19\0218Z{ʸ±ðä~\1Ø\ +\14\24Ì·aœ\19ÈÄ\31ÖüI|糇ˆeÒÝKŽë‰”¨ÜØåÖ€\20(Sí—ºèãzó%?oÀ\23º`'G\26Ê\26Ë“ñKà\16º¢%âäÎNméb-oÐߥ©ç›\\u\6^\25J:‡Nð56qŠŸ\25&ÓäÄî‘¢ˆ\3°tÐŒC#÷§À›³Ì”añq^y[\29ô»’ß+4àŸAh3\17¸Óâ¯\5 >\6^D?Ô‘¥*~›œøN\14Ê™7[K×X7ÛwᑦïÿåÎÏŽ\31&¼)RÌä\30Ï\22\27sqLsÄðÌij{FZüžG6)ÍæðÙíž\13Ö™`Íç²M3\25\27ˆ\19/\19ï¾ä›“5Ó\18ù\127™üøG^]œý^â\11r™UoáB\19\24[(\4\22øZ,\4\0229[,\4–HØ‹•À¢ó-¿gNÜP8â}Ù…\29þ¤ºèšxuÐ\0“|\21c\"büOZ-\12ÖÂ\26pÕ=WÝs…Û•Óˆ8à@\\I\28t…‹pMkEE‰%«?çÔÞ¬éxt\19j¡\17Z¡\19\6Á!û˜,–[\5üqÝN|l!sº®Vük¸‰ŸB{ ^5ÖÖø‰ä®šwÕ¼\27Ew\21»«ØÝOß»«ß]ýîêwW¿»úÝÕïž/wW¿»úÝÕï:æ=ªÆ{,sw™ªqc¥…6ÖÙ(\0247–ÚXi»hûÆr\27\21ã†Ñ\27«n\17 ÛΫåê¸z®!mp³±òfRÙX}c¯7\20ØX\127c¯7TØÜëM56÷z³èÛÜêM¥6·u³ÜÛ‚„‡$<$á!\9\15IxHÂC\18\30’ð„‡$<$á!\9\15IxHÂC\18\30’ð„‡$<‚„Gðp™ïÿÁ\2p\20>„“P\ +gá\"\\…J¸\9µÐ\8­Ð\9½0\8£0\9³°\8«p\0236á!@†ø7ð\21ø\12ü\20ÑPü/ð\29\8)ß\127e\5ü\8D׿e|¨Ò8\7^\2Qø¯¥Šr\29Ø\4¶]à\0168\6Nsà\26x\15|\4Ê\0257Ió\6k/Y{ÉÚKÖ^jò’µ—¬½dí¥\18/Y{ÉÚKÖ^²ö’µ—¬½dí%k/Y{ÉÚKÖ^²ö’µ—¬½dí%k¯`í\21¬½‚µW°ö\ +Ö^ÁÚ+X{\5k¯Ðö©¶Oµ}ªíSmŸjûTÛ§Ú>Õö©¶Oµ}ªí3ÜS\28¤\7­Ÿy\27ouB/\12‚\28>Uþ9ýÿ\11³osäѽ\8«p\0236á!hÉ3,y†%Ï°ä\25–<Ã’gXò\12Kžaɧ–|jɧ–|îH÷ߟœcˆ˜òÉW\4ÒU¨„›O©›ÀFhíì„Þу}£M”ÿ4ëù±ìÈ)†NñêÃqšóÉ!Œt\0236á!hÎg˜ó\25æ|Æ\9ìíSԞϰç3ìù\12\127þùçß\29Þ>D«\23ä²\2KVñÁ9o\127oW1þÞûùW-õ.:ëß\25šd® ª÷¶$R†ŸY~×›n‹ö±Þ”±Éñ’žÐë‹×OÎq`-Çwãê×Ý\27-„°0פ²H\31˜*Yž¿ÚÌÚ-6…^âŒÉ˜âå\15†m;Æ8êÞ\26ª\24¢Ó³´ÈpÇï\7Y—_»® 5ø#\6ú\22¯ªÑ·îãÆ‹Ý-VÙ¦Qx0Òu/ûÉ\3lš¥¯­§…|È‚|ý­d7>´Q´\4{Ùš'O\23_º1óñê3Ëp‹·Yõ\0307\27OÝ]«@otíà°wZ\17GLÍŒ‰Ž§>âUböÛb40ac¯™W\26™þ_²QqÕ칊‡\11×£ÕöHºÇÊ‹c\22›xýQËeÒÌÈâÄ«†ŠÍ±±øG­¢ß;ÖF?ZcØQ°\8cð÷X\9×ÿ4\6³ÐÆ®©a¾kÉþYX”1b\25ÓœµÃ\4QÞôMÝä.\31™Á)ñP•kÜF^nÂŒÇ>’‰\30+í:œèÏøIb­…\25je—4Â\24¿‰‰\25ùYŽy´\0¸Fýg\19\12\11®ú\9øý\0üÅ=\30\30E>7\9Þjí¯Àj\6ñ·¾‚[œ5W2hš:­bí¯\7–5ŒÑµ\25´Ü²–\13~œ\1âŠ1¥mñå™A”¹ùÙk cŽØ\2‚¢!¤j\28ÈÄ\12Ô\4O\31¬Þž\13¶h\16bj\8*\18[¢¬²w\5CŽü„o\0g,\3\28àHHØjÏh´²&â×¢ýIô¤É\18ZJš‹\19ߥëtxK-\9\13\28›nÂÌò\0062Ùµ\22oÌSØþ…‰\17èÿ£- Å|ÓOîx\6\2\17½¾Ðã$£M|úG{ä¶h\2°(úÅ\17Þà¡Káþ\127èù6Œ Ͷè*18oÂYño¡]‚´-F5·`\7\13t#ÃùųÆda\11æJ¡Û»LZŒnÆ\9œz²»®U˯i§h=5Zn‡..w\127\8¨`°{-‘'ûUPhÑrYØÓ­ S|rb_a\0197ýz³›\23zÄMsÁùi#>Ñ\26Ñ›\ +‰dE߀¬5ÌqÖü­\12{r€ª\9|ô‚àÂ\8¶} ‚/âg\30ñ†úÁB–9ŒÕ\5†0å6±ó0’‡\30pøô\11Å\16¿\1€4›\13/ý$†×‚Í»nÜc5å‚¿CM\8ÿÚrßô®7…x)\30`#\6Æ€¶2tº[§\15N»‚Lí\11½ËMW\30`Ÿ\9\4Ì<¨7¦‘ð`’@€B\29±Z…\21\14‚qùŽøÍ-ž… ظnÇKÈä\8=°ó¾\11\29k(\30šôdǤyMº¥<7Þ‰§žY=“\3Cð‰?pñæ`J\24PŽ\3ê`d c\24Ž¸ª6cø‘^az\31/Yi_~ÄBíÕ÷øD®\4txÄeÖV®êCÖç\9\22Óö\19†_ÃØ\17±Ì1\14©’¿y‚n ¦z,SLǤmwØ;òb\19\16\ +¦$.§ý~ßå\1cÙt\16!,äQ«’áëì‘@\19\"™zn\12fü^É\18Ç\"\25õ‹!öçQ1Á1\4ø&-¦\6ÆK\127\127ƒ?iõ\19YvÑ\6^šàˆ€ŠK¡¸îÚ\26\0eX„¬÷}ƒÚ\"\ +ÖVe­,h#O}a\6ÑÈáÖ;ÈO;‹àpHÐ5Ö\21®éÖËËj¹¥x\4Íã¼ó|r¹ú\29lÍ\26ë¡ÙÙÜŠyã•ï‚\5ñÏ\27þ\15‹Ð\3\5W~°ôX\20\3o\16\22l\7yµ&¦\26ã•\31Ø ]G[^±oÃü…ÇÐ\26Škj\25\30컓?\18+»Q\28Q(\ +L®lÖwRÌ\8Ïö3\28™h—\7`¸\5»›\26cœØ·\16dú¿\24\20\7U™ ·UxML\9'âe\ +þ\21\ +\27ËCêQ³¨ebô¨\24o²õÖ\ +Öê9bd}9\5ʇ\"j\17~ZR†’#qù.c\21o—Z\5áïìù§BlÙÍ\27¾_a$q ÆèÑ\3\15> ]böÄwlÆ­ñ­MÑ®†\31Ðv0ò;ÛŒ=\5HˆÛÏz…,?xÄ„É\2\23ŠŽ(2L—fM\15ŠƒŠ[\28\2îÀ«\ + ´\31?αù&1\21qEÁ\25A\23¡£$+`ÂDAä/hÈïL \25\28ìœO|\31ªò\20'cÂ÷]$ZؘSÖg”gVè÷#ž@æ}ó$Ç\31,ô:þŽçØ*¹µ\24ÎS£1ïøœ0å™hå\9\12ö¦­LôÀ%ECÆ\2Ü!­Äûî6«Àˆ\27wÝ´n\9oˆ”ßMGÙŠ8„‹K\13Z@²Ž\7a›ÖmÄh•/Äö¸ÁÀâ\0115-Z›B±‹–\31ÀŽ¨Cák\\bˆ¥´µ“\25\ +£×\23|}cŠF‘á,\ +SB+,ç\31v\9Ë«Ž{&8~O§™@\16 žƒèxì±|ŠL \16vQ¯ñb\ +àŀͭDüX\17\127#G÷'†È$Páa\4ù¡\14wÀ3\30Í$ŒL-A‹~pòÐ7Á©Ox\24ˆNe\20p?b.\15\6Ic'Lö™«\21’\27ç‹8UÆc\18ó!óþ\16J{ƒ—“ù)\ +\"ÞT+Õ^ò Š\12[sÃŽñf0RDFPÞÞý«˜h­£\13x÷ÐÝOª\15É¡\0Z2\1\6èGl\11Iœ\1ØJÈ\24\127Ç\127\ +CÂv\3ÈžpÙX§\29_ún\19!Úç\23\20„n#ÆÃù\27æ$r\15òd\4\"\4ZŠ°z\17-ïXæx!Ñv¸6ùÖS7\24åqx%!°? Á'”»\6\29\22qfç\24£ãüä?Í\"Æn\24ô²w}<~'¿\1ï-ß\23\1¾\3½ŠUa\16jpi<ƒø©i']!v÷Ç—#_È\5;=­iíäZ\19GÞ@KQüÓH\ +¡Uô/;Áx\16\11Œæ†\13\27\23™/…9?ô\22Y ÏIŒxÞ»‘P¥8OP’Í*ë8i5uü\28?\127sˆ.‹I\14\9»O>E³§]Û\9\15±\1c}¾Ù¼ê* Ñ_æ Ÿ@8MË\15­¸Ó\16O[sÖm€€Š¬ßlU4\20\27›C@õN¼¢D ;S¤ƒÌs-#Òµ²…ñ¬c\11rÒLßØ÷²²f²Øå2ÝŠ«0SO²-KFô\29ÓªýòGo5Á\22›YÄ›*ÍüeþÐX\30è\ +›íP¸$‡c7\0084…å_ÂÞ}‘r\\Â\6ŶPØ\15í\2hé•F{â. Gb§M`\15X\29ù@8ݤyf+\12N›\ +ð±¦Ã\3ï°®\0g\0Ç\8­Æg\30ήˆný\0139¦±#ü`P•\4s\6Š…Ã\1€™õaãisn\2_¢²Q$Ä‹ŠÇ\17\26ºkÊ¿c\26Œ\9ä¶Á…-`ŒíÌ\2ø»È哧Ɖ)%+82a›…CF1UÞÄÆ~O¹R%\22´„0rFUÊLý²Áe‹\"¿–UÖÐuúh5Ä°ô96J[³S3™¼M¬ð\8ÆXµ@®OÇ…'%ľ~2ǛƷÃ\\õ£–Àõ³@_™m•ŠO\16¯‰Ë']d\22œŸ…\11\30¸†¯¿¤Ó\19¤\\\18\1øõ]Y¹Y\22ú\17\11çz¾ëVí¨äIFU’\20´Ÿ2Y\3™ŸM»ïŒ€áz¸\0M‰ -+®ê4ŽËìdy-Ži%\28`\28öÑaæ °ob¦\7\2ø§¾\16\15\256+þ\30Û.+Þ|–¾`Җܯ\17\11ÀØêÑ®g\ +\15Ùn\2+[ø×k4D*n\ +'’Ø”Q±Í\0267½þ.l®Éª®\17‰8ä9à€>º°FË\20¯»\\¿\27t…ðþYv±o\0ù³Ÿ1\12X?k7¼o$,’vSüîšãUÞäz®N\19©\28øu\15øô1^àú`db?ÿ(­8° ·€/×\12Èx\14ØžÏI¢•“¸0\27Eulð.Vo?ÊÕö›è8­žkN/?µ×±â®‰üÒõv0d\30£^ïð÷\21y&Å„\27á× èu\21U¿ ¯\9\20›Å2Œ’d<\31šÊë\22!ѹ¦H\\Ä(«^3f›\"Ó\21Âèd'pƒLóõÂ×Cì\25ð¼\9l^ËXÉž˜ô@•×8Ý4òA\1$\13h_cÊN=b&P\27¥\27¸æäÂ.ÕDfô9*\0011ÉÌ\12¨E6ù¤Ø,×cŸÜkBÁ0ˆ\ +\24c3\\¯/“ú\16Àšˆ~³ÀÊh§qB9óÔ·ð×\7´4grWI{úš¨ˆåï\26…ëñ®\127\\ëze£Ä0µ\30¯)¾¿‘—»†-’ñÏ›üf’5\27ˆ¡ÞWç\27\27…\16ƒöЬùN˜ýIL9+»ƒèîxÚS»W6'væŒ}\9\28s-AíïšdÕ\28/~Md,ØUn6Á?ß\4,6\17]›hèXPU,Žð½S,®~\0\11hÃV¼sŠ%zM\22\0dÃFð\ +-\6ï\26‚5”´\24göØ©™\22d˜½=c+Öb‹¯…‰—ª1\1#\0159²%³–ÁõGÇÌ\19?Ÿ2\"²™µ8ûLèuí¢H€\\Ÿ\11 y­»k<®\11\6ÏÿÐ-WÒ\16Œhüf‰É\27å\7&¶LÌÜ c˜a?WàD¼ˆ\22þ@F\18;xmÌHyñ|×\"“ÀåFÂH‹âúÙĦ™d;O]i…Ç\29!0F¤D\13Kj”Lá®\13\21[tÔ#DÙ¯z ^KéšÞ\8·V)\0162Ñɦ€b&\24Á:ÆŠ_´ú´WãcZÂ{ü«fžYðºÂ\16kõú\127-Ãá‹’Ìžµ8VÁ±ëu.ìvm\2iÖV}ifè’Bõ˜û%&'Ëu)z—%o¯é¾~\19†JÆMÚI™³ØmÒu$ÖÁ ‹_O){!\7\18ן\16¡%qà5\"Upd\16h<\13\30\26IT®¯EbT[÷z\14R‰ú×5Ç‘I8õ¥G\12Ü5‰›^-”’Z»½7ã¦\15ùa/»\19»,\12..)ñXA¬cC/4‘õ$â„\30âŸ\14-ç=\22{\8”„&zÍÿõö“Æ2iŸ^ƒ|\1„0Y¯\27\4ý»\5fè¯\21\29ŒïŒàéº[Òk\9Ônp&¶=§¨Il\127/ksÝíÔ²‰\13uÍjÀ‚CQB\"·pZ‹2\5–Žgø’‰T$.Ó|Mq\16\13§ˆž\9 ¹aâA!1R™Ù\15jcC,q ì\19¹\22vÞCù|ö˵R\15Mß*WsÄR›Ú\1\5áGd׎0Þ×H+Ëw²¯&\25×£`™@\23]¬†k\26\21Îq¿Ë.ñn›có\13h?B\6\8h]óÕhK´õmêâG×\11‡‚kµuÿü\21x}U¾è\20œ\8çyÈû;\30kN¹:…5wùˆ\ +«|=ÐÄ’*.2’eµÆ\6\3¨\29ùη?â\17È\15m,Åk\1]«åzá\0;›’g\19ÉW,E\6ÛŽ‚\29ãëm'™èIÃw}'6N+;\20xxdK ÙS¶o,[;.\23øE7’îg#UuÄ-Â]iŠþ¢Zêd$F,Ð*”‚­ûà‰Wm†Aî’[„\31Š\13Ž#\ +´;\01105\26re°G-ß$6yczpµûÈ \8×^Ÿ®µw‚º\9—z½Kx\0¡ˆ\26cx\13÷\6p\11^ôó7Ñûû†¡—ÏÕ¿Ba×\00827šì™tÉ ª16y\31–—W\13·ÒÈÑœ\4T¦q\7u^\0114òñC†ðú\\-;\19òˆëQb\20zìý5X\17J4\26«îå\2bï­b\24OÆl†\21Îzýë\3¢NŸ\27Q†`—œB\16â×Àw\26– d:-ôDú\19âÅ\29k¨×d¶a”\19×c@Þ\5¥°\2\9\19\31$|D\7B-YNâ(>QÆ'»ëBd´Ä·>üf‚D+\ +\0Ðò\5@”x‹\11û†¼îz\7A¹@Â\3\11¥\4_à\28Ô*J]Ÿõñ`%ŽòæÄú\27èl\18ôê\28h_\31\15Âkõ¾ÂÛÄEjM§rKñ\25Ö‰µ\0\27\14Xs]/ÛOía½ïÚ‰²F‡0U„‡•\0å\2üѦ‰Ð¢&•±*±\25£\16\31\16ظž\21ü®9›äµæ\8å¯\15\7´Hb67ÏâÌjõ£i,…®ÛM±pD]²[§?\17Ÿ\127ȃ¬Ø˜ŒÙî\ +¤•Z)‘Šˆ€S¾YLÏD\25ALA+(\31h Ÿ\28\6)ÐŒ=ÛÇà„8D ÷ºuÈGÂU¶Ø’gü>¼•¶J@î]\3YÅÄ$­´Èy$í\19yĸe'gª4ÿ†òôCë£\26KÔ©`èµm‰-«Ø\";fÄ6YY_ÐÞ©;lÎJu„!Úë×ÿå\13Àw×è\31zD”!Ãk\24\2u†Ï•?yèi²VÛ)v;ÛÝ?4\"\17ÍmÈF\7\15Y\18öFµ8“ŠÚ\0025T²\20aÀAºõŽIÆ'¯ÿ\31Â&Á¿½\"fœ†*\4\4“4\ +²ÛYft€™yÊ8_ß\30µ7\7…\8\2¬æ%V;\8%Ã!&&\11Ç\6ETcdS‹iø#Ži'–ÍÂQ‰ì9Q],ˆ°\9›Bæ\9rö£±6¦AÓ¢·­†òF™A™\23\0275á«ZP'*^®õ¦,àeø&B\4›?Iýp\26ïŠ\\µ^£à\"k…n–B¬ò1BÁa\6'\17@YàH´·F±Õ¥–Ø\1ÖµÔ¶÷X>Á,)÷•„®Eæ\11N+dÔ\16¬Úô³ïñëpà`ƒ${€½\15KùŒà_a–Zl~6ã\19\6±Õ+˜âÙ\21³VD0€œ°y›µÿ\27¹S\5a×F3Þ\18¨\25ea\27‚ÕQÛ{\3÷Ã,\18_?-#ÛÈQd.«`X˧-(\18kS\11oÅ\0FÐßË\19öakfX\ +\5íÈOGÇ›6nLñÇM<ð®`\"\12Í—\\›\2ÛZWÛÌ“g#€,—\16I×°Ð},¸Ù\8X›R\9«M\17ÙI\26»ì·›Fá.L?/ÞY\0311Q‡6êeª—Îb.!VOÚo¤ËFH\18µ>ɯÝ\29y\9;o„èYש•hÑâé5ò—+˜XÙ\19xwR0\30yÀ\20\11Q4×¹‹H\24ð\28òŠ\25üÑ^\8Iµ5IY‹þEš\12¼n%R¼2\17 o0ƒ\1=0\5\17Ã(SS±€‡x”]OR)Ö©ä“dí\20¬Üd¹BY2›š;€!A$(z\11R3æàVÞ*÷¶–ï‚_x2\21dÔ@òCºU ›7\\Ìêº\19ã=qD‚\17O_×\0209Ì~øÀ“\15òÙ+1ý\12ö x+“ò‰\31ë\21\4y@Ø×õ#-[‹ÎìÄvlh\31ÀGÅ{\19‚-âß%›?ˆa¼&¨\0Ç\6^/R]O¹™p=\26ü‡â¯\25¢r$x“œ4\30[&:B¦å4‹…“W\2.+>ÝÄ×Õ‚C\15‡åoºr$\24Äð ñGƒ˜æMá¶|³Ìø{˜‹¬P^Aô,\22å]¼kÐ\26¡JýHP«£¼OÒ~\18÷SiûîŠTƒ»‹ÅFp\29(Ãñº#…÷X\5w;ÊQ¨²Óë;2œZW\12Ž\28èç\15!€ÕbìAH\0JgÇ0_ÖaK‘öx° ÊK\2J1l\\ÜÀƒ¡0'&YÀ[ëÄÐì)OÖÃÀ¿Çk^±²lð»,ת¼Yr\16+¢¸-ß¿¦«\19&QT0“×ÏÚüƒ&JAT…ѽ¾t\18ÍÌì†Ctý´3v\31vÑ\127\20ó\0292V\20WEÀ(_\ +lˆ{\\áIlËFæŒX2ü:ÎYæ&Ènv±\"@\25©ëE\31â'I:iá·Š°¯©\ +!‘ÅL¸Ík5<\24\18UƒV¥t^J¢¤º²Ê\0149µF\0nÕ‚{@³.\24_)!3u0ð£•\8oe2x‰S,Ö$èTËwf\2\30\5\20£žµ\17ïuBî\2+roS3àÕG3Ù=n4\30ô&ø¼!â„Ö\8³q\ +f\6…)ißÄÂ\6.ZdãEÛÝHݬ¶#£Àó†|,K—]3«2y^&a=qÒµýÖ\18&î&Ô4‘\24ºÉ\27NÄyŒ’,Y¯\0Es%Îaƒ—²Gÿ\14¿÷]Hr%n˜{­ðÕ5 YӒŘt6§\127zqm’æÎðüY‚@™:-$M÷!\26èf2B~g\"*éhá\7\3\1†Gx2r P\7>²›í/fNo€\\\2µo<¢\16u\13ìJ\"}>ÿ|\25†ˆ‚P˜®@¤×\27U2€—\21\24\9Àƒ#‚#¨à\\ÄW™Ô\12{*gq-¥à;Y}×~z\8¦„ß¿6;\22G\0ãšµ‰xTBæÑù™Ó<øÀª¿ÃØ\0197ÜÄöŽØÊEV\127S¬\22ÿ‘ˆß\28àß —\"Y\7û³8\"š©J}'LXQfˆ\11eÏbkï¢ÈÅ”\28lÜÁŒ3VUÊ“\21\29„Bw-3³Õƒ ܈W”\20‰ªºÐ\7…R#y=Á\22Àìì\0300¶bã‹Ï‚Š<\4á&ü¢hp(Ø¥Ñ`\9åöá,µ4•d\11ÄÊ„Ï\ +´¿«¾ìºw]1,B'ò_3Û½#8È\ +F‚Ò\13Á\25ñíå\15k9™S+9öÚ²:Tÿè,K\17Ë!|¦8otºFk²,l\5NYáËèTÙ{l¢\26l®¤Ê5e§ìZ&W4‰>»¶Yu8(B·\25I\9%|cMv \2ÅþᥠÑÞØ&¨mkÍz\9\26WvÜ„÷$á+o”áž\14Ñæ“r¢§\0™\22^äßØÔ•æ|\20Ø‘žÍ–2vÆ*è\2â=ÁÙ§0Ö^v©˜½¹ðvR\0ôrJ‘@Ð8Þe³g1„âª\18ŸV‚®J°²˜ÖÆyD™Nn¡\18çØ\19êÄ–{hþU‰¬_Ud”ß\21B\0s\23çÈd)\2Ì‚Ó\24ÑeðÒ·YTB+ÌϨpmPø\26\31W„Ú‹x\8¢’Øo\3±5¢¡UÚ³I¿s} “ÿÌñ3H©ðFNw¼+\16ŠEۊψ×\23V\2Õü)®á°o´[Øœ_•¶ŸCÓYF$JVØÅ\11P6’,\7±ï¤¸}V\\D\4÷0õÖÆ(LàâPB-h\23”ôBÃ0+üiD\0×Jm­D‰\25ꔤÃa¶((1\28¹·‰\12\127¤kÄkBGbn³’á‘ë(VÈN¶ÞX÷Z\6J¿·Z{ræœHêCA\\E¸ÒË\27_Oô %Œ9\19/ó½Vþ7Rø»(>\12U\27>^ÉßJÛMN1Ö SÍc¬~„\27…ê\23œfýÌòa*ý\8úPã7\ +³\"Xè•eÝ•T¿^ý\0136N6ÇŒsæîYQøÁ}•‹½Ö\0”þHn\29Žò»ê@Æ0i­\28Ö¢Äe 5Øñ¤ó\6\29º\8•„1èEõVö“;\0142¼ƒÄO¶u1ì;\17:´ð!ð=ˆôb/ŸÄ^bxÃ?É´(%£kÄ\14À8¾}²\"P劽\12Ø_ Ï¡aQÞö£³ûš|eáÓÈ´]/ߟ ‘UüY™Û]p8Äò\18®DŽ\24Öjíq„¤>{\6|QJ»ýš*¬”àÝ¡©? ÓU\24rÆ\ +Ø\15ÁgÙ\17ÙI¸F\1?ÖÄ¢8gQ6¢RH\12¹”4}\1øÂ\ +EñHgºpÀ’?4ƒ#¨§ËŠ»GdôÄW\ +¶â\2§-–ˆß\26j±ƒ›šdõ6EGää%ün¢N Õs횈¯ÉôÙ„žÂ\16Jã(‰É®8¶-È¥Þ‘2\16E_\6üú\13\2¨Y6i”æÅ©0R)\24‰U\14&tU3\0264‚øxçaH\24\20b\\>b\19\25\19RØx?\22\22¸_\0éQLµ~µö ì'ËHϹ!ÚG\2\19S\0097\20\31=DÄu¦€»W›‰“hýÔ›+q¸9\23Ý Àh*\2mqâÈŠNñ†Uùòp\9\8Ã>¸Ö‚[‚@E³\11ÄdØ` H‡ì\6ÄúV\22žp“Æ øÍ\24\22”AráJÍñ4H !Å\3Å'T\31»Ì¿:4„9SŸ%ð\ +=Žhžk­¬æ‹[¤Nz¸˜Cí3’æaU0t…Y4TFá´9 \127\".\16%«\26,\24ëDN‚…\26cÖ\"\17Ón\6#¬¶rìGT('¹\7ã†Æ{ËIµïZ²«Å¹]I«$ÅT+™\30\0¹ÌAË£\5+SḄ\31\30ÚbÁºu¨4ÍÕéRÛ\23\7\22Ê\9é\"\22rOµ¶CF+™)ð\6¯\15\18?¤R)x–¬lVŒ¸‰h•FJûêzþYËÆ\9‚YðK~%ð,)Mop9õYì1‚\9éXN±ŸJ\3î£Í§Æó²;I\9\3?€ì»æty¹\\=\"B»äè\24\6\9Ð\26©Ü\0156g–Syè‘Â’õÞà‚\14‡‚èˆBïÊš&]:­úŒ\15>\4©cÉ\5&‘cÔš®\26dn\24\30ë¼hÜz\13Û.f:,UõK(…eåÿn“i-V§|¸æNV¥ZA€+÷(\8X¨³\18¯ß“G;K\"¢Ó¾B¼j˜½Q5x\20Ê5!…‚\25ŠšIm®vu¯\0311Ç\ +ý/³®©š\5öÎB\5ŽÄO5 èF€rà×6ÒfŠ-ʺ\7\28¡Ê{×DÙ‚~Ä.\30à¸W‘Â[KN\7í[<î,Òx3ÿïßD\21Ty\21HC`ð©u\127\22\"4âºk×\28ÚµIÛàB\20×On\2\1\29l®ìe\7üSI1æÖš«ÉÉ\0´„EóÙÒÒrÏëZ-I“Æ\20xä\12\22œßnû¶c\19–ÁXRö©ÊΦ#éS\29‡\"Ë]³\17\6D\30o³\17œ¾Ì<ÈÏ‹\12?ñÄaCjré«\0243kÆ\30šlÖp½\3\18\4{Nk\26¡('a1ixO:ž\13Z†!·\18:=%\8œ´—O`x܈›…Ü3;n\0213:\16ßcõ\27l­*\19¤\4É´}ÙÐF\12NÊl\27A*™ìEÑ \26K¯c\5wZŽ§^­3[(œ8‹ç­Aõ\19ôŒyû$ÐÌËL-\9O'c¤7>µÛTu~ D\4\3¢ìáï.¯\2o\127Ó@4b\4ã'“â\ +L_›13èµ®-Ò(Ø\2F\4&ÉŠ\\G1^\27LâTòžV˜MÂ{ƒ…r1•5ªU\17>½ñê‡`n\15™Ý\2Í‚fbÙ½\21ÎVÙ¼\1ì\28ÚÙ\24Å$Ï\"æüKNí†dr%6Ã02M2$Éá¥öKKð¶\19Ä\15\20ñ̦X‚R\20íZ¡Ç\\j3\12–«Ö–&^+Š~9¤e\25å,BÓÅFt\20'8´7/Í\19é\11±Š\13Éš\29\17¢\2·‘\4„bz´BU÷¢‚¬ÆpbÄ\21WŠÀ®µšn[HL=c\"nŠ±\7(î[¯Ý7ãD:ħYXa•«\30L3„£ïÅñD[¼VZÓúÈ’%™×0¼CÙ±A#\15¶;\15î5áy7½ô›¥˜\7É®™ j\16l»ÉY?¤®UÔÖ`èf»º»õºVcâz\29T2¥¨ÌFícþÞè\17ß´bå‹Gg9T\11xýk#ÏvëI\13\9\9\28žgsu(r\17\6È1Æ\21Á=µ†%\19W%\2°\24ŸÑh®µZ\21\12?¡ï>\28’k÷5ò„k\9þôJ5ˆh\18\13sô°*púìžDèx\20oŠìʹ™ÚæOxKl‡öD;R¦,x±lô£\17Û®¤ú\31ñ&*Èc\5Xû¹h\19ÆèÆ\31,QŒ29 ™\12t\4\ +\24¯E¾¦œZ°vg'l‡‹#1þ¥<\0²: \0054G\"´\28I,Ô¦]”}\1DB?>;\23¢\9²¶òÙ=Ùդb#­\0D\17s\7=62J\7KiÛÍ®kMæ¢<Öƒ†¿…Ç^9½[:ûÔÝ*GXhD•ëi-*\6Œ?Zg\19dG\\ð«êˆHý[¾¨Zvª\22Þ1Ÿ\22Xû\26îÁ™\5z\4Ju±­š¤{:©§‚6n'l·`Ï+hoì5<ÿ©€Z™[rÿ\".\30ÖHƒE&6ò&Þ÷ô\3½Ç‚ŸÑ=LÔ\29ôdرº\8vG…@X\0222éË‹\31•‚OkME ×ÄQ¥ /Üž\\‹rŠÊ_Wrè*Ê\16žžÙõ““äÔ³d=$µ!\3üDnäÆÊÞð>Éá˜*P6À¦¥ƒHÀ\6ç\15QÏ´¯”\28MI\29íºIþ®&\5…l,¦äó\7ûq*ü†KÎF\7‚÷ÊùyEÙ568\26oçJ\\ñ$íú¦xÚ\9rT_Qº.x\5Sy\13†’–T)DõÌâ…‡Ö°­BÞ= ‰˜3>A\30ò\15U¯â;\"S­ÐÔìÛjÎý(¢*2¯coBÒ/ÚKÔöPöÿ!Ùú(ù¢\127Ÿ -,\0…µ~hÑ\"CV’Ëy¢\22hXQs€veÕ\27Gº8¡Ê\31ÈEÁT+æ–\30\5Å÷\ +>ÕNE‚\24jÖ\3òktEˆ¯,\21ì\29MÖèÊÒ±°#?­Ò†ó‘\31•.3Q/#xÜK>T‰Ùé•/ëI,ôxüýUG£U‘Ä\6éj;Z™Á‚·\0258M~^骚·Û\\\27‰\29h©zˆ¹¹Qøe½ÒÎ÷0\28“VM‡šUKmtƒ%$ïÏá%þw\ +aV\16\27äÆ;² ž\5¼#?\14Êb\171\19\7ß\23–Ÿ¨’\9µA_¦\20­¼ˆ¢²¤òˆ¨gÑ­ö\27\25TÊçV\9aUí¨\28oeuØ\ +V‘˜$X‹ p\ +¡'Z·š\30mDÙ\15rï6ít2\ +H½à>SQ…ORqN°Œ\ +¿6WHíT•½C’A\27ž,vQE\18\7EÕ’©­`Ú\15&ƒV:t\17ô(q¡kG«È­ÄãìNaÁ•FºÞf-\1±JiB…ª¡¹)/™[\127Ìù«Xt×\3j\7áo‡\"\26½á%@¿\15l¥¾~𮇰?œö®\18»ê…\8Í\7¨\11£^iQÉÑŽî›À¤Ÿ\17¹âzÔÂ^\16*3Ñ\0:pÆbR›\5+›µe•T_D›,dÖ®å\22?²¬g\3Úîæ¿UYÙ+«/‡ú‹ÇEÜø ë€ˆ¿•\13”ÜuC\18ë\29¶k,”Ÿ(V!õEk¶ïh±:Z_S˜³\9²Ü\28˜¨ÖñT’W„²ÞïdÜf–l\"\5+󻣼ý°Úy(\25Eù“­8¨š5 \4Ë¡ÑA\2º»\4T.1dÔ\2…¤\24\30\\Ouó*ÆY´^v¹‚v¶Æáp¥èAªt“O£\28eU´\7ß•\5LFíxj¢ÃÍ]Æda\18…\24Òì¶ÉÕk“EO\11\25^Q\16Y3_KJ,’O±\8Ü,\\&)6ÁY#\2iœ^1ø™vj5ÙaoŠ\2,Ozcq7\ +¸6팇4<ò\2‡‚øU¦©!\0054\1ƒ\14vÎ(Â|¥þÓ\")™bëEæ\"n±\3‡š•iŸšÄ~\13Ú¾×\14t\13\22²ñy!ŸK\14 \25Qÿ.\11SªÁy!‹Hùm~ÕŸäOè{¶\\£\16ým§\22ò\29u‹¨îZù••å³šd?wÇ‚ïÄ‚‹Þ¿³h\14áàÁ¶0Ä’_@¡\5MØ+PÛ»â\1\5x\30.‹ÑÏŠ’â¥ÿQ꧕\5'î_ ÇÑr­2´Âr']¿Ñ-…7Q#Žø •¡\21ž\6\26ë”bÒ%L*æ\127JI#“WÁ´3Œò~\31l—@)\15Üp&Ú_E‰$Hˆ´8o‹@b£´x†%\27ɨ­è5\28ÈfÕPë\\[Œ\9Ù\\S\30\31¿\23ªÔõ…\26FâñS„z’½Wò\25\15³$·Ó’T¬\29‹\23m-²ó•E\23‡ÚOô[\16Õ¶\23\20J/\7×\0297èy\27Kˆ\15‡d$º\31PO\19䨵yÆ‹¿¥ÁHR—*ÿºü«(•®bÐ\18¾‘} ¬û€÷Ä'†ý{J°T©˜l'Ÿ]uæûN\13\20ÄÁ¬H\31Âý§õÐó7\20\23ç\2ô”ÉMÊÚ&-“H'÷Ö®É\22–Ûâ ‘dìøp#£6+•:Ðè#ÉT4\2Rd\22\14êÄTrP;›‰`±£Û\ +ÎTͺ7‹n&®ü\13Útšr‰ž4ª•MK\0279\31;\28RH$\20QÊà\8Víü‡….¿HµyDŠðÍtßjÙ€œÄˆ\6ˆ.\0037á\\üAЋ\"+Z‹\ +Û›\11ë\27·0`EÞñá‚zQ˜›\17\0þ nzUî6`ø¡µØØÈŽ$)kX¬,\"t\0m\28Ô\9\11µ¦Ú\24+F˜>\4ÌÜájÓHRÁõW«}æ ý€\0039˜ûÍË[\12d£-G» mÓwQÔ\21”MEʱUc†Ö\ +¼\ +ÑK¤&¥¥«Ap³õnÔ‘TÙµßzà{(j\26Ö]Z\\SEþ}à‰k?EáÎ\22\21`\24RŽÎè*›˜\12\13E¢ä\15º:2\0›†ãðÊ\31Š\8ýP„qâw&õLÉ\14TAÅÔú×Èè\0048\30_¥u\9*Í\13\26g&mcQõaüm—\18\127œÎ\0055r(\15ºÓD6°±©MîBbDw’\8%\25Ñ€qá\27\26íwé*>\127”ʶÔ\22ïú”Då$¼ËèÝÈ%¼’ì#´Ä¬Õ\28x]›¦¥á‘r\8•“Z¸ze\11ö\7vIRÀe-â\7’Ÿ\19I…\17\20<õtPyÊ]EXI\25•x¥Ó²\13\8*Bä“\26²k^o\5ÖS5|\8¯¡K?íçXf\19YñÆ\"^0êD\19‚Ñj:\8Г\26XÉ\21p57‘ݳØ(\19îj‡æ\22 eYVô\14\8z—âíÉ\2iqá\7J¤Öni×;‰ 67¶KKö]áÓà&\127\20œÅãñ²rø4Þ&1Â\16¥½²R½ºGõ¢÷5G Zª’±ÃIÁýÙ“±V\26’\\µ\21{™ñ\23\12¤²$\18N;<\\cM(1ö*ÑÒV˜\9U\16ÿ¥†.QUi1cýþµˆ\21\24¬\18ŒLîê­,a+HØ#Nˆ’ºÒŒ\1\17‘«›É¾$}p,Jz¥¨X©ý1;¹bKgT5wf'à, \17¶²•rby\12ɬ·š4ïT6l¤Yh\31àÈ¥\17ƒ.G5”šúNŸs“„›‚Ù\24¢ë­£bIQZ+Á~¯õЊ=ë‰\0203`’ê‚ÊÙ£e4nuH§ÄWÐø~'\26AO\8÷»\21Í7á8–ûêMŒ\\J\11¤\26\13`ÑÇ\28XÕµ'’5©\30¯\6”¬@‡\21\2ÃÖÙh\21…¤\22Ul²î½U‡\16mP)2zÈŸÅ\9ÛéE½×-Ù\4HÑšw\26\14\9Ì\30f\13\27\30êoOD@mœ«ŠI\0Oˆ‘Gà\28\21!YÔÖ@R•æ:=\5\0h¥\15dï\30ܼ¹\0ø†xdRU\0‚݆ú(·\19èÜZäCCz\"¸XÉs\9çI{Ð×î[ÀéNˆÃcù5\22œ)\13“]ü\18\15%qB\5Þ’`‹\8î®7É0ò«°‹KJoΰ‹]¹Y‘\13Æ¡–‹öuoz»;6d2u4‘ún$U1DZR\11²¾rËò\13w Åª\29\18‡ (äXŒ\24ˆ|gÊúç2\8òMƒ\18À\19Œ=Q\21\29ŸÈó9ãP³\25b“OT\6MV›ÎP\26ê’0:\6ÊB¬\9¬L)ßÏRtJs©Ó²§\9\11¬Ž·Ê\31èn3þâ ã\1â‘äÊ\19zÚ‘Ñú\18)ŸZ¢\9Í°õ$5Ÿ~Ý•=·¡S\27\\\29S\25$1ó\13©Ì”“\26ÌRJ[C4¡\4à¦Y;\26qÕDÓ±¹@8¶é(\\\2§r\3ªP\127;PdEµ\25íf3Î:[“'r}Á \5cÑÒÙA~ƒ˜ë¤FÖí\0040\29Õ„\16O¥7ýQ\26%àVo=ˆMá\1}\17t|dçð¡î]ÀY*f¨\24®+$L8\27FÀŠºŽZU‚ÍC’ªQ\2ÅÃMT†\ +.N¸ÒPæ7\20\29\26¼SˆW\127Ùr*¹\20\11H\11\23ÕªÓ\19ºîÍ=*B¡¨BÿEÁ^+\9zTr›\16šé©\18‰.ª«v‚y\ +ÓÜûñ\20$\29],»£v|\15Ñ游ÄÄ­¯Dt$«™t£¦%}…¨$\18\25ÿJš#{ý[)Ž’\7|¸3šÎQ8\ +!TiÔZƒ3”“ëcóBi+EZ8)HÄdeöM{6€¤\20dì_ùIJ\\J=ÈC6æР¦–N1\16Œ«B¼•ì‡Ò€\13õ3C©ìà‚AžÁuèaQc\18GZÛNõó •%–'¦5¬íî2É\29ñmU\27°Q£ÒÒ\19\"„oô“‰—Rå\23\7wµ’\31™\4 lG'\23í¹\5”ãUÑ\11\5©/t“{ҌĂº=Õâ+N\31\26\16¬m¥î\2»\3ˆ\14?â§iÍ¢\12œF™]@–”‡ši…çJú…÷\18pJd×µ| iàY\30Ôä—þ8\14kñ\ +œ‘3\20™*o\19Vo-\14î\9\22Õ·\7Ðþ)\20Uï4ŸÓ¡W'Û!™Õƒ\1mZgSŸÈQäÅ\14BѲ•E‰xgÞ‚¼¹Ã.š\11\26M@v”+¢sÔW'ö\21½\27\27\21bSu§¶ü«+µßè\16+ßsC%O»Jšé©ïÐhE\28mDäíG\4&\26ÖÌn¬§»Â¤Ð\11ÞSânùðj(•É3£Ûà\19O\0¨£y+P]ó~¢ÀG\9û—ÒÏĪ\6ù\24šî^(!’\ +ÝßÜÀo`Ä\0157‰\21ÿ­r±\29J¬\23%)ÓéŽI¿\1qá\31V÷ì*5å\26‰ÆñºC†îf§ ÚA\22ÞG4\11òÈÈæ\13X=YqVôj\20\27F\17þƒ°g\\òhéj£—\27y9QÑ#hRåÙÊ\19,ZÁ½üC;³PÎ’cnÐÊ+îz(\6kô©¡äÿô¯ŽVj\15·Ÿµ\11ê­\"ÓŠVs\30ˆB\26=½²BÙÚ3Ô~£{\29Ò¡\5ryí­Ç\16¡Ù\5–d äJð cÉáPàG\4)“\12·o1DE\25ÑìCä¶â/݉ìl)Ý|Ê7ÔåN\ +âWZO\0\19\18™\21\24™z§Û„^àVp ^K­\6•SYNÃtS–=UŒ0Ÿ¨G\21άÂü0®«^`‘ÍÊüÆÝ^Y%vÜ\20›êÀ\26Áò^P¥‚á;ÝŠ½›±ú¯A¤\18u–Âñ\6âw\25óäÖ\16\7\30Pñå\27Jÿî%w¤·K©ÓÀj:\31º\2Êh6\31pFE¶³õa»åMEŽ‚\30Áâ’žÔd\15NkS„ÉGºËÑeK§\0034´\18Êú‰\14Ý%Õ°4ö\22”­\17+â««dAí\ +‹ÑÀrÕ•™¥Êá p2M\28B|¥¶BTéÔäéÕPÏ\12M\18\25YkmT\15,\18:\11²¹îu+ËŽ%¹Ñ\30Ž\4{^ÐTiÀ\30»™Ü§8¦/jÇ&ªžŠŒ(¬OÃñÜÒúªØ[­L’\24û7\13\11‰ÿȽ\21QÎÚã‘K-¢bj\21@¯>f“î[1`­ªý\4TTf.¥d«\8£rë‡~,ˆh¶¬Hý7ìñÖÓÏæƒ\12˜™c2ljö)¾?Äæ/X™N.ðä.M©+ñ{ôñ]¿Uˉã TûQ¹7&1߇\2ÞjfÚp$Àn\4¤P3\21z¤\29e5}³šñî^5”ëÆÒÕÓ©LA\127¬;\"6MYG\23-*_¥Hhº\25à»ü9Uâ;ñ~ÆKÎ-°»‹g\11Õ)\7D#YyŸ^ÐD%c‹hh‡/ý\0Q\3›\5VèïS»N–¦ÓôOZ ÍÔr¦‘wq¼öÐ\23©¾sëó¬W\"ˆ™\\Tfl¡Ÿ¡ ¢\31\17^Ì#Ҡ̧†æX\0087Åf¾Šwè/3³,µ\26V—ó.tg\21µ2ióÂ0™Î­\\šJs\15äVw‚\4\23ù?\\Âsÿ\"€j\28—)G5·4>`g6d'neÅ\1Ï\30\"(\16\13Þ(„ëÉPiÑŽ;ÕÏ0ÜX­’\19\21†úje\22m¶lz¦«¹p\18~ô`\1$$þúæ‹IÒÙ©¦Ì\26Sy$ß\21ì|Hi³7n‰à®’Æ[JPOì\1”`˜Ý†b\22*p\27ñ¸NU×Z¦tf¿)°§ð.³\15\0í¥†¢kÐ\16¾ª©­©¥¿Vq\14Ë+äi®k’>Q¥O=ùTù²}C>›¼´©î\127\16‹¹e´-‰Éƒ‰ƒd\26\31{O\24%u¿–Æd\"æ$\27[ìlÞ‰E'ÇÑVáÁòôN8÷ênQ)Ó°Èy‡]mÙcn/òTܲ\1îaíÃ\25Ü‘a:SN“$Ò£\21]Ȫ̱8›\11(M\26¬4¹ã\\\0055§™\20!vnÐ¥\28\5©å¬\0291“[\31)ëÛÖ¥!?‡ôIC(\25äB,ý\12{5Ó\18\6\9ç,qÁ`\1ÿ\7ÍMdIP\31«gÉ\2 $\23ÿ\1¿ªÛÔ»y-±N\27FFšžnäòŠåþë:YÇWî€Ã’*y§ß\ +ý²ÆħH訵gçÃ\15Á\22t>v‰\0015\24&þQT x\7üÔ´\5h”\4¨qØôÒ›™6Èç,úâ¡Ê>Øý¹·\\KYÁDß_Ôg'ýÍ´‹Ño¹æ*¹þÀÝU¡þ\8Ià9\14Á3„|K]Š\24N#¼©”Lk\29¤‚K¤öœÝ‚e\29úƒÈ‹\28qǘ\30ô¤Øt\"DÓR³)5Û¤¥ßYÑÁ,Ï Ð7€,z`kÅÝ\5ÜøM×¾ë]\6Ì„+O+\6\17ƒÊ\28Ò~\13í¶P·„û{*5Ð4,àX\27ùܾ+\15’é4W!ÿ@Ç_Äkß_*¶]Œ(¹À\7ùò£¥\16ÖÝ÷õ4'\6cT\28w”Vû½3o˜}šlI\0230÷¦i\1}\127©íÆ\23„ÀÐÙ+ºP|·\28A•/\26`·!ù o\"2\25å¨å¢h³âFc-!\8ÌfiG[Ñ\2É—/ÅÃâcò«K3°;UîóC?ù›¬èH—¦l½ø‡zÌѹg\16ÎZ‘·l@DI\28³¸´v*ÍÕj=¹¦ö\3Ia;;›ú\12x ï‰~R`ÿìVN\7µ‚±oP¥ƒWÔÏ\29ZNj¾‰\26&`\5QÏŒúË}Í)‰4\25(ë{#»\5A\127gZê\5a_+w”ØoàóÁéê:æDv€‰¦²ZÊ\ +8ùâ\1‘\0ç¾6g±3T'JáCÌ‚ ‹N;ï\"9\22J\0165†ŠÝ“ÜÍÍôšh2\4ô\7®ªA»…J¢\4 \28\8¬~Å•eM=æ6¶¥\8É©7ëéžÉ{ y\26ÐX„uºNSÅ.#@\8É!’n~³\16¡õt*%7ú$wµ\3\20v·˜~j’i\31?ÚÁZpô“W>ÔÝAÂæ¿>Ø\3€8Še°èK¤z&Ã1*œ=}þ\13êóÓt*”4ýVÞ$\21!ÕËIX§\0—OŠà\24\7Y\11¥IZ‰R”\\øR\12BíôæPo¥\24æÖÂcŠƒTŸ¿ÕÁ‡°¹ˆëVÓU•ÚAÓjëQNÊÚÐá7x1êK\26V\16©ž¡Gæ©°\31,ƒÄüñ’­\18”Ñd¤Õd\15…\1ý Éõ¤Æ÷é†e•ç¶*nŽ6²”˜¹\30\17ýú]Ó?Ê1MŽJè4h³ÈsK•µÃŸU©´¯Ìì±\27o=¢í ñ¯¶²\3\18Ë\0181\14è$FÇBäÄŽ…<§¾û€\15+ä÷ÝŠç\24éZ‚é›ëŠÉj+¹3µ(Ùˆ­é´˜ñ\13E(x°9)ª”òœs‚\\z4²'Ð\11NæR)¹\29uñÆ{,\4)\20vôÄâÕTúj¹æHw\28ÿ̸þ7]ßÔPñ\127u/–Çš-áo–\"ñ°®ÝPKoñùË\29›8s‘.TJewx¸J§‘8lDØ\14(¥1Ö›ù§Œ}hp$‘Ù16PA\16Ý\7å\0299ågÅ\17®ÀEZïP²DWk\12x֛ܩ„²~ ¨!…¸AÛPØç\6ä«[6ÒmÄŽ&k©\15¼}G\0271\21?\28\4\3\13ùhZà'wÉ©©´si:Q™ø§çÜÉù9EÁønè\8µËé\ +wPµ<à†ÖÖ\9xᓬ:y\23ìŠeWÝD5–*\23\3\11ŒéØ\"™yÒÚƒ}ô‹F’⬳ì,Þwt_¡/\21L'ê¦ï.†i \1\21e%WšÊÚ¹Ûn\4˜Æ\3Æ¿7¥©\26\8ÌW»Êgp0ZÍ\17É꟤þBª\27ÑÑ€Ÿ¿}H\4vúæ§!\22=J­\27yÊk'mØI\\lÖi\"}Ý÷áS*è\3•¥ý¸‰¡˜T1:#A÷¤¹þ‚­åÕzÊ\17ö¡Ô]”'ûÕº±-2gú¼ùX’Þm‰°5œ¸×ˆU=„ROwépÊYrÏIFï\1›úaº«/\19g\ +=`)Ã}c.\23ºnª;wŸœ~¦}RGÄM\28xøœ'\14NUNb·3èË\5ÂO,ònɇ'\28¨Ê±§e-0c…l—cIÌ?Š²Äf™hFS*„9ÔF^ÿF{R¿ÓÞý%.ï\29˜ð*î+Y±¶˜‘\3™ÒLã\20š;\29$6h†ÄÛ¬èhÈùnZfèã\8ë£ÐÈ\14ÑkL0´”øÛRå\17é)ƤJÅ*gzJÏÒÔoŒÓ¢°E*€\25oMcêá¢Ã‘\27TÁ¹\28g©ï\17Và;oZ•SiÜ.»Ò H¦\22_mã+Y Ç÷å>\25œ§õFrŽc\ +P\ +f*;†\29~ØÅûåàIWX·B{‰øeV@½IªÅd¤\18\29]å\0298!¼s+\0ë·\16ç¦ÄoôKéG—ᆒE)E$§ÒMn–¢°q–<Ÿ£¥hþ×À·4tL&DD3t§\21D·sk\26Ž»w7\11Eª+·ï€°îPÄ‹¥Ìè\19{úÅõ±\24n>{Ìç\15qÎâJ½V-À6·àY“¡§Û滕%Â6ÛBQö]mQóf4ÇrSšš£Ü´m(d<:ŽP\12=Å\16Oú\17\0112s\26‘²\20>ks@\1ÁÙD£\11q·\29Ëõ®zMÎWòÑ\31Šâ;ÜÛ\3±þdMÓŒ%¾µ>‰L‰ÛÑ”£\29鉢È4Ňܩ.è}\18Ö“^Ênîf€ÅÑr;ËfQ'5\5ãd%¿(ã_f‡±\15@pc9‰–êésP×ùr%½öüÆò5»Õz\0078¼­J\0145|É,åƒyy\16æ÷ô{÷AG¿ÑÑPìºÐ\19–n†òùåÈ(‚ÍÜš7`fécÇA2›OôÑ\0\17£%„iH¦\18”›âHñe­š®«\6ŵª‡\\䦚Ë\28 zt‚žD÷(NsäÐ$ôñh\13g¤\0©ôQÖbBûÃ\1V\18»Ho[[ó¢–‚;•\"»N©ìµÆ*\ +£u:\27‡,ìœÓ3rp…\0271‹Üì|,'Ey\ +#\23¶íÂM{AÕ¶+•\11+I\"™»\17g¾*áA\9B2a\31ömq\23—ƒ\7\24\24Ó8wK›\21K(…Œ\15jQ£uŽýœ˜¬I\29ͨe´âôMeò3ù5ŸþH—ôTˆuºyO­\15Š\19\8ÊD\11¥\7\18 ‚\8t¥ùwE¯â\30\"‰S›(F\5\0119ûM+\0043!5MWF³}Ô#Ð\23œC„ÑW¯è©&Z\23p~c#ì±Y¬K’SÒ8«5\29õ.1p\22\21U¥\0ÉÀÉ\ +Œ®:´Ç–§\14\\áS'–þð(µŽSÄ\0Ô´òYš*Ë\14[ü(=ˆ!\22FÄà.]Æ\23}§ÚT'ï\17‚6\8êüÔa\26k÷ë›iBéfz=§éöÔ,>.M.b+\13¼b”9ÓLJ«ÝYNªïhÎ|çpI*\27KQ\4\21Ð;ÈæIõ\14¢€¤„!¦ºƒ†\17·•­ú’s\25Ü%”CÛ!zÿ?äú\27ù®Qçóº}T¢‰‰h“¬ºí³(å@ðÓ—\3ÔSC¨eŽ:RJÍ2»©ò²%6ê!ÐK/8if\14\5ÈÄ¿!ˆÿ\14\4‘\3hœA™9AÒGŒíœÛF\8\0049%—oµ›C¨}ó‘•\27g\0O>Є“3B\23òÃÂ@2°/\"L\15IKJ#ì¯ïÜÙmë\14d\25…\2µqà0W\ +Â&NÕ-š¾žCDu7•7œHj¨å™i<‡g\23Ò¤ŠÆ'\15\1{[â°Ñç\23…€ÎÊJ…î¬ÂÕ¤'uŒ¤ëÄãOV¶q0jé7DÚH\21õÎ\15w\ +à“‚B+¸NŠ/UÀ[(Öº'\15,[\127§ÇøYBMšzã*GNÒ=wj\5˜âÕ•\16v^G!¯¬ã®\\&G<#™–Ú\4í‰üîà5NwQ\25’ÚÍØ\30š\11pWBú³ÒšÖ«êN=©,ÑÄAV\4Û±já](ëh·Ò€+ÉÎ\18É–æ{Ⱦè .·qØ–œZܳÛ}»\5\16©}N\30\17#TZwX%æF3\"ÿÞ‰\24+\14È–ÎTÝB\16ë.ô^Ej­„\12mÕQÔEa pý\127¦b†Â\0\26¡L…{¡²€\26TØ,Žê…ó’ºu¦óA/\22Ú\17—ìûM£òF_\13q‡\15Š_¡YkN8Έ\22Æ\0052õ\29,HøF¯å‰c\9\\Í\2õ3\25\8t²â#‡ð¡ös“G)E6wi\11v«§6åa%æ@\29Ó\14ó+ ³¶Ntr\18«\27â\0096¦\2Ãiq\13­•Ø\2\ +`O\31ê™ÝúC\25’IQÎÀÉ\22M\17_bQV=\17Z¯\3ã\1'\3µ4>ßÖE¢\7^ô´HÊ3(ŠZ.õ$‚’Þé:ªÆ½ûXz±ŒÄÊ2\17½XÚ\29µ\3\24ŽD­HŒÞIk\ +8—T’œTy×âB\18]êîÒòÑ¥™E›¬\15bÈ\14\0317àSóhŸÃI\\rÍ\19\26\25÷\14\23%pëÝv\20×\ +ir¨ÿ§\ +ÂOwä\27‘‡½M;™PÔ4¨B\20 èPÕ\\Ú¥¯~\6„µ5Ð¥š‹¶[®2µÚõ€Næ\4‰âaÉ:D¤¼\17\3©–i¡·ìç/‹’¨Ù,Q\28<\"tûPðü:\15ˆ%˜m\\é\22…nÞÏ\18{¾R\11ž…ôT\5­Ðú\12¸]bäŠnÕƒ\24¨Š˜cð‚˜PVrDÙJ\17ĉJ¹AE½)!\7½6rÔ\00656ô vãàÃ…kΛA]”w£\22õárBWÌAÄÖT3f‘ü¨†“«¼äÕÑÅRn×7îŒ?Sáne•ÍÐ_´ÂÏ4\127›ð1oPB\30\21³¼>›F\20\9*#NEƒ‹\17ÃGQå(@\26yÃ;aRc¹Ë&^©ƒêš\20Æ\31p\6ƒØ\"Ú9‹\ +€7i’§\25…*Ì\28À»Û¹ˆL\2oN¨H¥¿i\28 ©¢»¾H6\21Éï¨ðGWñà,^Åá;öŒ>sŠãŠWŠ’8Ú³ë\4]uÖkO\9ü²•®[™¨ûA?dŸß¢‚ñ\4O¶¹ÕÏF‹„žÞ\23ÎR!3[ègmǯ\"ÜA.ò¡ùFÀNˆÐ-œ,ÿ®°\13¯ÜÆvâÀ•Š¨\17U\25\12x ¡·×¡Í\13CéŽL\0157ÖÏNsdºP€mž§–… Ö‡£€W:Ð\28Îos¨ã(…OÕ¤‹&³.\25ûØ\\ê\3$C2bÐËYà?`\0\30\2¹È挣OÁŽ³%˜ã\8IÊ”Õÿ⸥Ý\21ò\29\ +\29øµžV\0040\1-9àj¶¿9 ”³ÝÁF\12\28Ÿ'\25îÎ{«•—˜ü\9*e(M\8Ÿ_•øD$I§“ÓOra”\26Õ»‹)uÅ\1EkÏYªð\12ôTB\0200\23m<½Û\14NŽu?7ŸOìÃIþâVf·=µ\18\13üãg\24\17Œ‘ ¤r\24DÈy¨Vr>\24¯šì}9Óä\23Qr2ü’ˆ\"Wt-¦\23\19ŒUƒ \16“Ì«\29böK\2\22èé)†i¦\25—8ÿší[º&½õ\28°ÎÁ{_ª?±\11\9ŠHyíÆG¯»E\14™€5[§ â­ x\30è¶FË>Gë\18\6G6+\20gf|R)B\29`n\0068â:q¨\7¡@¢xõî¶~ëæ\127¢Áv÷ÄÚ]µr‡DÂI…tPGšÝ\3®v××;\5nÍ\6/°¢›£\11mÖÓ’û &íÂ\27¼ç›[\26]¿ø#¶¼#[ýõsb¯¦U\22)ÓC®\30]xÉéÁzŸkZF÷XÊH4\127BŽŒÔž_÷ºµ$Ì¡¸×\28á×ÏF~ýNV)å\29QAæ\0\5˜­Ú1FÑ«6\27Ô¦»\8ǽð?µBN\02978ù!é=6¹áQ·¡½Þ \1\18•–i\5‹Äxè™Ü-ŸãNóÌ1xa}\27™RÉ$ýÀ\19‰Ö\24ù8\5s@~—Ô52V\ +\5½dÖ;קoìDž/f52è¢Ä\27\23ÍÆhcÝÖ‘’Ρ4]?yè•šŒ4óǤ?&„\1(‚™QŽ)Ûè+²ðJ½yÝúä·ZÑ_CGœ™l,\28·\16]Ý„·ÛŠÔ:ûüÌŠŠ×\11˜³ú½¬%\27LÊwl‹kRE©þq;—Øþ¹à3zä\4­Ðëú¥…´ÒAÌ[hÍ…àÆØ5®±æàãÉõe‰žµ‰ðl\29\29\19$7×Þ”æIî1Þ\8B½©ºÌüÞ\\×T[E2 ÙLðÈE©9Éä´œ'M~â4#mI«—¹\26ÈMÿ¥ùp—ôÝXk\5ÍtÐ<Íi\11ñõZÞ¤èNK\14\127Òßpä¤T´Ô\26-€ýê•S»™\0042¶“Ó£Í\17±\6¯ýx açm¯9ã¤?Í@X87Ûæȇ²`YúÃ\1ç°!æOÎÍN<|¦á031èT÷°T#+\2\5ÁÊQßžå¸Të\24NÕÔ—óyPB\5[¸Z!\\^o3”M™Üe*ª€¡$\0258R§¦ÛÚ×Oræ\13‡jwÎ\17¤FH§,òLr®ÞX¢53\19ÿ\12{‘-¾Ü(º^GØ\1m ­_Ââ\21\23ºÑf/ÁÝê©Âz\21™U*ž|{Ée\21‡Ä\23!ûÏÃå\27¥æ¡.Ç\8&Ó{i²«š>}ª\2m\5Š!Ê\27-ùXN­Ø)\12M¢ADÞ^;…%6̲[{É7d–®\0ä;¥µ\12R\"ÔÐì±™Hgu”Ge2 Ãá+\ +s»×ï÷\5w…\0247¬Rß•µ6d€Eâ¸ô4K¶´ú<«:Óí<\13>\31…«\5¹9‘šËn%\18ƒYf\9\1Z–°r£¯òí@Õ±ùÍ\14=m7š\29\18‰'WO?\29΃[½\25µ¹ìçt¢Äe‚6\9\23óæÆnœž”|\22u˜ƒ`\0303§á¦®²Ã¡“TBz¾¢|¸¶Ö(@Þe›3Eí™\14dwêFîÅ\29ÌTI¥ì„RÊÅy;ßvöì\5.-ç\22)\4\21”ð‚!éß8àÀ5=ÂZ\7Ïu…ê–u¢a¬î\20R\"€Lw(ë-ã/‹Gû÷LÎ9\3I\6W5ëSá\2\15Š'³B^•XÅTb›\18º§Ú'â¦á¥©>\0–ó!Ì~à\3\5Ô\"fºž×\7Ü8¹¿5¥?¸j®<Þ\24ˆÝ\8Í=T\7\8àÁ©y·ª_i3‚hêôF§\27½êß\17®Xˆ‘í;a+Ë›NtºHªT¾\30^,7Ã|\26u\28P—ÚeÓõ\12_jár÷Ö\29¼&\19rº†œ~vk2`Ãà…(GF4×Ô´j™¼<Ð2ûH•z3\25km\25ºÞle9Š\14H«ƒ½ýŒÉÈ4\18ym3õš\8ÿ\"\"9Æ>¶\3Uà\25zzPu}¤ó¬·[KOʬd\127r\26¯¦\2’;Ž‚\127á·ÏÊ6ÎþŽ%\5ìO.ÄÏôèô{æ\0029 ’‡\0257|³&š\1fkc²¼Sè\21·rp¥u³Ùg–\11IĦ™ïRZq\19(ÐæiñyHÀ\26Ž\15êŠS×òMzB}\25¡gãƒÐ“±@òÙŸ³;¥Å.Xpzñ\4no\17+8ÌሠE·Ü1,l‘•tÂê–oø>L-K9èFNŸÓ\14¾\30Ô’\28}HhQTvàÞè/™‹gq˜¶â::ù@\22\24 Ð\18éÍcåãá\18GöåâJز±Ô¢™Ž\16\25n¸+H…Sz\7·ÎÌ›×£¢~‚\127Ùb$¦ñ>\0267kZ|zöpëéYý\7%õlêfö\25!6µƒÛ\"\ +@\6dÄv¬^\17¤\29†Iš³ÙG¹xƒ€\19\21–\2mc÷…g3¯]¼d9Ï¥¦{\4€æ¤˜:û«Ú\6\\Ovd\3-\16\6¤m´‘U—ì¥\21聆ÝaÆê\31û\12Ç´I.¦\16/ÖßÂ\ +z-O\14\17ʲT+y\1­¨k\3OÀèq\7íÑdgk.ÿí\24Ìeð›’\23ëŠðâDL+Ì°\21ßR£Ä\21Ý0Ë\1ÅË\30ö—~×Ó$Gn\1À`\15³\9„*©¬+Ž\14Š}\16\11GQN¶p4Îö¦™º{¾\12³Hœz\3}™üÐoeö ½\19\29\0303ÇQ\24F¬F\6\27˜pvlônë\"‹DÏQ‡kq\19Åt&›júñl\30\14\18™¶Ìù ˜&Q²EØ©¡öéX‚P±–tñˆ†hôð\23´¶z‰eîY»ó„Æ>áçwÇö²\7\28Ï\8Ò\29]¶§ y#Έ`g£Õ»âª\17moŒpEÜ\21¼µi‚å¨pHkU¢ç¾¬\24ðóŠívoúÃ\17ŽªuGö÷â@wò(( Á\21w\4ùi|™\1l÷àà›’ÀS•¿÷CÕÈ+Ǿ3>‘xÙÐi>Ê9Ÿ+‚M™\0149…øP¼\19Î#ÞT¶\0092‘nÔBETTçŲ#µ±˜Ë\17\30«Dƒ!\24ðŒ+‚Ú\9dúÇ¿ØÁÔh¬¶è%~Šü\13§\24\127ä\127D‹«íl'†Ñ¥ü?ZlŸ·`)cûîÎt‚\11ð¢\13¼˜l³}Ú\9u¨\0088n\12ïä“í(ŒK4]OîÛ*^a\0231‡Ì¾0\28XÆ®,\19ÂÆ¡ÐyèÏ\27\14µ\29\20˜^ÿDÁ»&ó2.\6r\\KÝþ\11Ÿ7%Ó³:'…3\15»–[rk\"£fˆ#Y-\\¹‹\21V#ÔgÆŠÕÂZ£ëÓ¿ym\20Öëp<\31…\\\7Ë\0302`v}ëf¿O5©ü† \20™O#¢ìV\29Ùí\28´G'è\31–ã\31\3•¹ðg\18ß¿¢ÜÚ´?/\29f©}1´\4ÎפÝÉt˜A“\28æ³2êÈu”Dð4³b\13æà\"2Ñ\17½­˜ãs|\ +‹€£ŽR\17*„\15—9pÓSô%ÿ\21t;‹4{ÆgS\2æ1š×Q]¿\0292PÓ±9\25ô²El\7Ÿ_9Œ†%ÄÌ‚4ì¨\5‚p\7s+9ñŠ\7èJ‘G{¬×9ŸTŽœdúnuevF’îÆñ\14´.DXt\24÷á\"iø÷ü\14H¶þßOäÔ\26Å1\14ŠœèÀxòg\24pÒìÉØML›ß¥\6?•\13+ˆ\127ß̤™ñè\11!H÷÷ÁÙ·Þ¬$­bÜ]Q\1%éƒ\ +k«˜ÁÛaÛ\11×\8_%ó\22Ü%ÉÏ|'©•û\18o‘‡ŸÈ¿ÔNe«Ôµy\9ë€÷ º9ˆ983b)êNGalhÍHç¡0\14 @°Â\6)kKØÁš‰’Œ3Ë]¶ó’R¨”4¾½C\9…)-{“ÈÛ§\6ò-mJŸ3_¬EÙ÷(\5µ8þ-æƒ\13žÐ³­¡F9çû\16\9‘&'”g[–ÎÍsšÍMA\19Å­Ù\29ü1™«\11½Íô]Oø\23~pÔ«öº$èÔ઴\3ä9”€L(Íî\23\14yN49-JÎÍ!së¸Q/ï«Ùä|þA\16Ç=¯Gã4\3õè¨6B\23@\8ÿnú‚pX\127µ;]­lò ‰ú’-0Ó°Ÿÿ¸\9Å<=ìftZ\28}õHUCt\17!ùÚÌ\0280¾\1ˆ32˜n´'Ô_\22§\9\5n(ÿ\2ÌDV®\"¢º£\13jÁÒL\ +„LÜ88ÉG\9-]ó>@±æ›ý\25¤\ +ø°#Ьá×}ËD‰GãV×y¥¾\0154§ ¡º…\25´\6æt»š[\17\9üq\7ouÕ9\24´*•À4w\0Ÿ‘“IþLtQNaø¨g{ؤ^IÇ+\14SÂôIVM\18¨QJ<ÑvÅDDDH»QÀî/½\27¿ˆ\4-,¯Côºˆ\12hk;qêÃlÉ“©„ƒ°ëàøáDŸÂz³]R™G]b¹2£\29A›ŸW·àLäTzM—h(\16tØ\21ç¸u© \6ƒùS{<:šÐpÜQónâ)ÛV®ˆ¿sÿJ!z˜Õ*Žp´'Å\28¾äáüÎäÎo\9®Yú\4\17ä¯ß?\13z€M]ìâ\6a*w·‹?goÞ±¨ê\29\19\ +…ŒÌ„’\0ã&\0125\8\24bùÛ¯\23ô–ÿ‹4Dï”Ý$ï¬÷x‰Lè\27\12\"\17á.6ÇÒ|g4\6NÇ0\ +\21O4\25Ù•¹ó\3\23Ÿ¶’Ÿö\22Å)¤µ*cñ^\28Ô—\21\25…Gôù‹›A&œF€³È\13Ì\8¾ã…B\9|¼˜\0144ÅÙÌ\20“\15Nô\14EØœ©­Š‹J¨ô†7œh2Oº?JrG\31ô1ú(#N{¯i<“á£\27*£…zkc¡õ•Ä¡‡–ŒFC©‚GÜ ‘\"L÷¥¡‹Çéä¸\22->…$ž}æ:º\15»H†\13ß\20F-úHöe{–©ÓîJ¬ð¹`i\18£\29†©`ÁÄÙâ+î`6ù0í6œ;7o\15òQÙwÙg/Êf.áLUÖì“T˜—Ñ\127\127\23]SM‘’÷Îõ„7'—Z)Ùj‡\16«»\19LTBD\\\28íI!1$_œÀAp’!:M\6d\30ãõ%m\ +\4±;O‡|áƒ\21è¦Õ\3¹ýÚ'\28燈ҡt”ã\24ÍØ­_E¯\2\21Œ\21SX_OŽ\9I\13ÎgáAJ^\21ä\127¼RµŠ\127牭á3c\25͇9ÿ !\9à9¦~žà\19P8v>Õj\30KÆÇ^7~}‡–Âùö%¡Ày3ÞñÙíjš³ìV\19>´&™­(3¥¥`í–_av‹²Ê„Ç,í¤<Élxôü·\29þÙs\14B0¥hr\0ª£…ÖlMé%¼€Ö¯¶ÏJiÔD›Ç¢MÊ\4«\1h8…jô™öƒË°ºB\13›ö,Ð:\26Jµè*óùc\0¼\25\17èÆ#É}ĸ(AIõŠ«R¨Dé:'±èŽÄ\18\21‡ö¤k!‘ŒdÖ±\127Yj«-‹\27AÏ$ÚA*W·ƒ«o=«\30\ +9ÝÜ–º\16|MÿB\4;ú¢Ð\30/(Bd\"W÷ç¬\19/鶦)þúC6«\16_ÿ,±»À½øòžT‚‰„Ý\"\0153œdÂÛàˆ?½\9öóŸ#8è¡\127SøÞ`Š\6ØÀ„+é²9KkÂFêäd¬÷­ˆE\20d~96\13äs’cIcñ+åxjáå¢\13ÛÛ¤¬¾ÀXi±j³Ê—WcUÝ\2\23ÅÂ2ÇW{Ñ’)®·\2\3v’Ÿ$°èÇ僧\27^«\31£›\13®»¢É\1»RÔ¹\13ꆔ\29\26þu\17bø'\16W¸k\5ÚÝtÍÕ¤¬\20©IÑAk\2{Äæ˜åâü›¸soƒ–튔æ'¶Èp\"C©ƒ\24\22G¿¬ºÐ“-\0146wðF»¾h‚¼90üÞcK½EŠáuŒætõÈ!ö‰PÍݳkÚ=4Nê*\31*yé×Ç+åÁ!yØ¡Æ]»I\17$wÜ\19\"\15÷³z\2Çâ\11tôî¨QŠËÂé¤u÷Ýz^\7óžN-i²JÀñ\2ÞHBù!ƒ×Ûÿ))\7|Ù0[ùôŠÖ\28„\0”NwTüÙYm\5ùÙmAäè×7\19s•aÎ\7›pT_°(#aÄl\23æ$sQ\2ÿã>Z/Ü\"\\RRé,\18åI…Á\2z\"¾è0ªMÐ\22[½\22…×f\28¼‘·ˆ1½»¿5颣\3d÷\19±Qj¬rR\127WÐHaÀ‹…\0232\11ët ,n6/D\14€ÉtöZ‹Ô\18½t†‰F¡˜^éû—7tËÂa35á–HH‡»›™\ +Ó¿%»¿\27«9ÿ¥Âeh–Ædø‡OgÚ;\11Él›_W£rY\9¯\2hwš¤ÏR[4ÈÀŬGч3A5¤8}]í( ›|Ø(ê$\3ÖºRœlΫñ:<ì¾\12ÌëW?™?Î\16PdÚÆ\4So¬FØ\12õ»¥c \17•ù5¸RÒà…þ2£/ê C­„(ûi\23Ý\23ÒÏ©bÄ79)f“£œ}z7‘:ò™Sµd\1|\22|T;Z’¹\02129³ñÃ7RØ×!]=-+Pr\7\17|Õ½’ cÏCBßnæmä|\12w̹ìVŸÏ…Ä\\³\19\21tXklÇ×sÁö…òêýlO‚\20\31:\3ã`a™Hœè\127[~é1\27\8pêüÊZùÀî\17\0085\21+‘AüI&NÜ\4©uë¾~iw9‚pÈ{qZHÝÄ\6G»HǨçñoõÒwxC¼>\27\"Õ\11›T\24éÒ‡eu-ÖŸ—_,ôšðí£(\9XÐ\1âLÙüÊÂËo\18‹\11æ«\3=†7ù’æê•\7¤¿\20ñ¦2½\21F 2õ)Kbð#6s%ÚsRà\13ß»4/ÉÀj¿\24tf5Ù1ðé\8)¼ê’³\16\30Iv\6ã\127S_oE·Íé¾ø‰\ +{çƒÇÂ,JÃM=Ä\23€\13:0&ÝÛ’b\\ð\9†#–?Ùšq^êàòø¡bÅù\\ÏS‰ÒðHqÎ\6÷\25_K\0141a—KÒÚÅ\6Eç\26¡})ñ3)è¥ S{[·P:‹¦ÃÞÚ\7\27dˆ\18L|—½+]e÷ù\27ëá,°¥¥ì]‹±fò>›Ñr‚°&\28R&½p“.7€Ë\5°\8lìzñ\13œ’(ž>8Ã]â Ø½Ñé+\27és\8ƒ4¢?È?ÙòF\15ßbæ\11-=Œ¥Qø° \14+\"^K@ò«¶Õ½_òfB§àžršù0—@£µòó\29\23è4…›–˜\1°ö˜J\6ä“\28\21“}\18¤X™\30½–BZôW\25\12“†\"×ð§f»±Ñ¨6œáøíÞaU6#\127\11-&[.Òón=¥è#ò«C\17[)Ÿ³n\22\\8\11¨\21Ó—‹¡/†Z…–\29Š\7Üxⱉ+ÊY–¹E-þe}ÍN†\1ÍÒ­$\0058Ol2bôðÖ›;Á³Å\7%‘\13Α[´M.`-9A\ +X> ¨vt Kñpª³{„Ѽž,k¤c\13û4(‡×Ïar4BÐ’_v±Ë`\17~¢\20®s·™š“b\16Èt^M:…utQ¾Èϸr\4Oošç/?n õd9ÆŠ¬ÀJ?·\17›Ä8˯OÿŠ\0126\31ö\ +\31Ý\20\29¬ëºé×\27±}¸lW¾uwÒ\2Ph]S\7³f‘Ù¡bÕ´ø‹eÁšdg?c\9˜e{'y¾sFÍàÆû·±õ\7H¶\30/xÓšý€×²¬WjÈÇK¨dÊþ•\27\18dI›\ +›_Ucð¢Ú\6/=…gm\1j\3–¨Ïÿ\22Ä+\13È SY\24¶$ze=ì9v\8ž’zÓË\31œD H\15€·€í[’u;r´b\26iE|öæÃP2N%Ë@£™Á=Á\ +ˆ:§—\18ãIìZ\21­•jr\19þ}\5¡\ +=j“WvGd\12]o“V3\12 P\0113_e35b?+Ic\29…\"¡y%Ÿ9Rh¶·¤±\127òÆÛbáÂ\7u˜¤\14çUéck˸–¤!Gµ60ú.RŒ™ÕS¦Â\16¨'CÊ…¼«ÔÉEùŒR&°)4\3iq:«½B\26ËjGf{8u«¤y<ßi\31‡¨LF\6AAËÜDÍwBÏѹ˜Z\1^ÒžSê£x\13º'3o¹\20\20“stÒpÿG “®X^ºÎ¤íP\6ŠžÕc\15\11á8¨Tœ\22M觵AÅ]ZAÈž\24,×é\16{ç’¿\28JM¯%\27#’ðÃ…å\11Ådë\\íÒ8\27í\20·¹0¤nr0D0¬¾´–J‰\23U\\¹ÔZçý)–_ÿ\9®’‹˜]²ñ\2Ù>U\29¶gúGê9|²\25\17=UQ,#ˆD\15ôÍre’üÖgO´#A/†yr÷·Í®Â‘ÚúÞa¶ºÃ”uæìÑä*ÿÃs·™¥2/Gÿ,kÄ×’º^Ç¥ä\12-|ä ‰4VÖ›x\23”˜¢G\2Ê¡¤zõÕsö€ZKóE»âÎŒõ†(˜Ôó„[!çÊ1×c1%\21Éæ—*¸zÕë?\0095Wçj\18\22“Ú\\4œ\0091(%é\ +V·\2(ŠáÅZŒKaµ'û(ä(¬ôí2+H\15\28ßÔå¬_‰@hƒ\16ûâ\0—\12ÅŠNEŽí#ŽÌðš™*Ñì?*Ã9±ñ_Äü\"b[&…\3Ò¶¢âl\11©4\".3å·*»ã%:·\30ž¨\31ÎEºá\16N\9\14Óg1³ó?]¥Ë»\21\"¥š®+)—Z\13WÓù¤4È\16N\30t-D…q\15x€\6ʸµä–½¢÷³\8±§\9\8ûùc¨¬\ +òÖuù”ÕÎÞ\31ÉMó\21\3\8\ +»ø©õ¹\"\127|˜[©ê\30\0\"Û\9+u\20\3U¹DM©¯¢\19¥~¡£]š_ £ÿ̹èÈŸ¥\12Ïf6Ó ¸.Æ«5·\6Hs…Aì5õ¨•za\20óߟ\5r‡\0gŸþ%kŽ\"\20g)à\24™Lm ¤¶ðìµÃœè³øÛ:\ +X©a§*–ÓÉ­a\6 °[ÇiË}\21vV±ÁIh­Íom`\28ëjàÝ\28ÐX>îDÆ\8Óø@\31l1œŽÁpHýõñ_4“ ½MµÂ°¥\20zž.¡1/×ò:ËK\0170d·ûð^ð^™X‘rÅï¬!\29“µY2aQä莽)Û¦\15t-×£\12¯tRq.¥\1Ù#\23DØn¨bÉM\22ðR:´)CHpzˆ\9ËX9ƒÃb*isþeg{C\27\9¤2\6ÜÚáU‚µÃ­•Lp©òåÐyí\28\30\5¹P¡bG\23\23¾´¶\31&ê¹úáS£7¿ýKòâ\18å\9þû\6fkHâ¼ôŸ®\7lý2¿ìä‰óvÖ‰!hÇR¥¯äFSºO•hCuoªU¨ØŇ\27WÈB\0284‹ê(\8¥ÐçÄ\0ìc)ù-µ‘ÿöåùŠ–\\/çXÐU“0_\31%r\13ó/™e£3n÷­è€5Û“4Œ»ã£ý¥\127\"\6Ã\ +Ð\31jp\ +O)ð2š;s±[\25¶þƒ1§su­Niƒ\13\16ª—}-Z\21Žá\4\8)w]2Ê}Qc¾Ôû «\30¥´÷Æ\7%OålÊÒ“C°L:Mww!ï–lÞÃd‰\23Ñ\"+µO±Uè1Sh5\14\28.§}”úQ\0239•fNk1›x½†žÀùu\25!ÈØØ ìI˃¡XKe,j´‡æù_9\13Ž\27tU¼‡¬¦–må¨/‰´c\0183¢\22=Më°6ˆ£W\0211\5îÒÿõÝú\14Ú¬« ½1xqG™~|ÑÓ~sU\6öÖ¯\9\1Àh¾(‹Rs1|ùIÑ€&#œ’¢\ +¯Ú“\9é\1>-4r;\23\17Ëðº–{?)ûÜ’ˆíÇb¨\20J &‘ìã»ùDŒðY„®t\27¾9ßP»a¬¶\9~RÖWÅÖ\31ëñã{\13Šá¤ÅÒ£\18Zʬ\27ÂO°\29\19®dq“ ¸BŸ\27è„`i[ã¨ÛíoW䥫݃…QLx@I/Ë\12J³\\ORѽ.µƒb¯NE†\8´œy){¸\0ÝÝî냙Äó$ºþ7\11¡L\17q=éC4\0229ƒ0½²6ÛªÊ\6¸üéÊ)*K²í\27=\31­Èv¼3˜ë„¢*UFé_}…Œ%ü¼…c\28žhiå×w§p\19¥i\22\13¹G‘€´š…ÏVêÑ\24BÅ|\29½m2ꔊ*‘\29¦¾\15i‹\27ú\26\21bPŸˆ\20‚›wäOý{Ô©ê)•Ã§ò,\30àæ‚éR\27ìF&ùU>hwïæ>¥SOÑö\23PèêA½•tnÒrRa²\22¡Ø£”îäÙ ¨Ä}''.RFH°êšÃÉEÙFô;îe§Ö3¶¯€ˆ\3ß/WÃàá\20L\28Öº\30\0046“\11[w\23&{\29Ú+R\20gОW…‰ûb´.\18§k‚º\17Ìÿí\14v6?ñYri–\19„Ó¿ê\19Â\27Ú‰\21y”9ÄõÕºäEbV/÷Š]8œé@¸”K)¬\ +Ÿã}]}øbì9/ÄMQòa±š{Ö­%\25á\02878kÝIÀ\22ç˜Ö\14\25FMÖ¦vs¦\14¡Ú”éÕ?h(\29C¸\14™ú\9ÒȪ\23ØíÙzw%ΘŠ\1I߀\8&sšÏéÅø\ +$ů4ÿ\18t4aŒ`d±-\28^\17Ë«úî%×Ùh²\8ÕG³:\20\16»…m\8;梠ÿ°¦Z>-\8¾ .p”.«Q6e.äÖP\18\17Ûâ|é‹û%kC7—Fm&>\5—\29\9×Vb³Ýò¿\"w\23\8Ñù<ý«`)Í¿(á\8Iˆ“\30ÌKL¨)ÁI¤Ìž‹¿\\ék§4\\X/tÉN_\22\17÷)£\18ºà•ýÔ¨âD©_ë¯;û\5×f—¥Usð¡\27©DFg&Í“ªrx˜ÃpÑ…\7dÉuá‡CdU\0Áê\5áuk‰mK¼Ñ³UwsßÖø!ïÈÎî4”1CÐ’]QNYÆà\30À\"ôÖÒ'i0æ\ +׳\12ÿ„T\6ß=ž¥wG£¾\0Y.…|\1%\\F£#õœ£ØͦïÿäÔ‘ƒu^C¡Õ³*\11ªmð\5cmkЊŠg´>©Ÿ\0196®?‹«\26\7kÜM d´ÕVº\4\22ÅKÉÓm\13ÁñXJJæ—Èw+z\2ÖR;ÎN£n¯.3ÖûÎɪ\22l=·y½[<1ýß®o4–N¢J¨j‡ÝSi³WmuÁz\9\26nß^dšSQ\27Vfo\ +k\21CƒÖ”Ì\31\3Ãՠ€äuÒyš5P­>¡\30‡#6o_\31hæW´èyˆkÓ\3eKH~®uNúl©¹\8?Hj†Sj†!òûI^®\127Ÿ*\29Éõƒ=¹&W Âømüÿ~!í û¼\26\29D)†dzNnÚ`uÈÌÍzJ?[È\20_}‰°Ü²hl\12ÈFG¦#AØ^\23Ó»¾^îU\3~¾z«½0ÙV~4\14îøçn§~±vÎßÆ\8\00067†\26½B\7ßw-EVÑ^ŸOŠb“uá\9-H‚œæ±\20”:£ä5Ç…®qz£[…ÞLaë\\êHÆL\2(®‰ ÁJõµŒuÞ\\ÔÊÍ.WåJ¥Vã%ÜôÀ\28¤ÈÁ\\ïÕü×òpxFQÐ^†_¯&{øê^ ­¥„‰[\30ÆœŠ;\26\11I¤[§Þƒ\30†+\9Úk€‚|}¦l\6Ú\5ª›gÐËb·aÎWïJ,ûU:\22íë˜J§\25ï9†¹÷~Q,YdÁ×.|(ƒë‰Û\0267ÁJ¥—z‹#\23hþk\15U¬%-Õq¶œÄ[»q—ÓÑ­KÎÒ3ןߜI\27Ì`É:FÐ1fwcËðñeY\0246\19^sûùÿ­mÍU©¹ðcÒbÔ—ù_Íuáý_Ïî\14{[¹S˜îVpɈq{Í}SÚc\14N¹—ôðlÖcüWâù_\30¡\11ñ\21\9~¬io3cµ¸»î®…Ñûú^j]¦Õ¡UyÇ\17³Ý~ý-3´\13¯¬ÊLOMû\7ÝZG縷²ÔP\21ü\14š“ï¸ó\2¦òè-Ê#Y(\13lù\0020iÎ2öñT¦s*={ù7®ãsËý|ƒ¯WzÜXT׊Ûϥ܈\9ü»œq\25Pþ‘ïç‰_\2µrI|\27\25lðñ\25¹¦\17€\"¾üÏs^©òG4¸îÊŸRÊ•X~l ü³S½å]\25¢ð€‘ßXFVRÎñüˆü²\27ÖãfêºoàÐNC·|·Œ)˜\29)?˜•÷\17J¡/µG×JÖV%Kþý\6!g0\19\20JΠpr\1úÚÊS…–3\16^.(\16l\0BÍå\19„›Ë8<'윑ÐsFRÿ\\Pÿ\\Pÿ\\Pÿ\\Pÿ\\Xÿ\\Pÿ\\Pÿ\\Xÿ\\Pÿ\\Pÿ\\Xÿ\\´þ¹ þ¹°þ¹°þ¹Ð\14Ȫ¡ª\26Šª¡¦\26•TCE5\20T£zj(§†jjTLMÕRC)5ª¤F…Ô¨Ž\30\18á(_Žêå(^ŽÚåTº\28•ËQ¸œê–£l9ª–SÑrU³\28%Ë©b9\21,GþIàŸ\4þIàŸ\4þIäŸ\4þIàŸDþIàŸ\4þI䟤ü“À?‰ü“È?‰ücÁ?\22ücÁ?\22ücÉ?\22ücÁ?–ücÁ?\22ücÉ?VùÇ‚\127,ùÇ’\127,ã\7:a¨\19†:a¨\19FuÂP'\12u¨N\24ê„¡N\24Õ\9SuÂP'Œê„Q0Íý>\11^´àE\11^´àEK^´àE\11^´äE\11^´àEK^´Ê‹\22¼hÉ‹–¼hyŸ?á>\127Â}þ„ûü\9÷ù\19ïó'ÜçO¸ÏŸxŸ?á>\127Â}þÄûüIïó'ÜçO¼ÏŸxŸ¿îç°~~hýüÐúù¡õó£ÖÏ\15­Ÿ\31Z??jýüÐúù¡õó£ÖϦ~~hýü¨õó£ÖÏ6­ZµjѪ5k-YµbÕ‚µÖ«Z®jµZ‹Õ¦VÕRµVªµP¥Žì‘\29*²CDvhÈN\9Ù¡ ;\4d§~ì\29ê±S\20nŠ’¢½vüV(Óž¯«N\\0¦.\7}ƒ‡\6í\27>šƒ©ÁKƒMû†gs`Û\23¾š\3×`ß`8E\14Bƒcƒ×ö\13æ`kpjðÞ¾á»9\0167ýü[Ý$\24n’ƒ¾ÁCƒÇö\13\31ÍÁÔà¥Á¦}ó9°í\11_Ík°o0Ü$\7¡Á±Ákû†Gs°585xoßðÝ\28toãÙ‹\0\14\7\1õ\4\3Á¨>ˆ&‚…Àh§'‘ÕS_DŽÀ\19\0283\7\ +\4‘`ÕN\15¢ \17ìÚé›(OïÍŸ1?Ae‚\2{Eƒ¢±vüP8)Z\20™Úñ©ÐÖ“_\ +\"¯¨ÌV`P\20\21­µãCá¦()ÚkÇo…Ý›Ÿ\15=(Ó&:¦MØ+\26\20µã‡ÂIÑ¢ÈÔŽO…¶žüRè\20yEÇ´\9ƒ¢¨h­\29\31\ +7EIÑ^;~+,Ó~»\\9o2qÁ}…C…cÓù£â©Â¥BÓt~Vl›Ó_\21»\ +}…â\5Á¡ÂXáÚt~T¼U˜*Ü›Îß\21\31îøù|»Hô+Ìî î+\28*\28›Î\31\21O\21.\21š¦ó³bÛœþªØUè+Ìî \14\21Æ\ +צó£â­ÂTáÞtþ®¸{û}(\25(<Ü¡¸¯p¨pl:\127TÞpìÑ¢ßz\22WæÃ\16ªù Ã×—¿û\127öë«»¾üµÝã)¼º—ÙžÌë6\31gÖpàñu{q?ï¯þZú¿–û\28«›·—1Ÿ\24ÏöÅ\16lñìOëÑç\22·—ô󾞶WO`^Âù ÝòüûŽSo†À\1œ\14u=P<\25¢7sÝ^ÖóÉ\30ǹéÑžú×—0Ç“{h´§õõå¶âkI\16`2?ݯ÷_/Ÿüö#l\31ÿE¼`»‡íŸŸðaûï?ˆ¿aûýõñ\3AmŒÿöÛ\1À\1À\1À\1À\1À\1À\1À\1À\1À!\1\7\0\7Š\3p\4p\4p\4p\4p\4p\4p\4p\4p\4pLÀ\17À‘â\0œ\0œ\0œ\0œ\0œ\0œ\0œ\0œ\0œ\0œ\18p\2p¢8\0o\0Þ\0¼\1x\3ð\6à\13À\27€7\0o Ü@¸0ƒ0ƒ0ƒ0ƒ0ƒ0ƒ0ƒ0ƒ0£KsêÒ\12àLq\0\26\0\13€\6@\3 \1Ð\0e€2¨kPנ+ꮨ»¢îŠº+:³‚°‚°¢3+8+8+8\0278\0278\0278\0278\0278\0278\0278\0278\0278\0278\0278\0278\22\28\11Ž\5Ç‚cÁ±àXp,8\22\28›’c\1´\20\7 \3Ð\1è\0t\0:\0\29€\14@\7 \3Ð% \3ÐQì÷\0ôÛ°\13@\31/Øîa\27€Þ?†m\0úø\27¶=¶?°\13œ\0038\7p\14à\28À9€s\0ç\0Î\1œ\0038\7p\14à\28ÀiÀiÀiÀiÀiÀiÀiÀiÀiÀih€>è±q\0¶\0¶\0¶\0¶\0¶\0¶@µ€´¨Û¢n‹º\29êv¨Û¡n‡º\29êvèL\7B‡ÎtàtàtàtàœÀ9s\2ç\4Î\9œ\0198'pNàœÀ9s\2ç\4Î\25œ38gpÎàœÁ9ƒs\6ç\12Î\25„3\8g\16. \\@¸€p\1á\2Â\5„\11\8\23\16. \\@¸€ð\5„/ |AÝ/¾üëý×'\127Øïüá×ûõóãÇ\27lD\30õz\31²7doÌÞ˜½){SönÙ»eoÎÞœ½å“ï×ëýwÚù\17b÷#îB\7×\\mÍÕ¶ìmٳٳÙÛ#wÜ=\2wŸº×??qèÏOŸÁ×»Ëu]ªûxO^Œà\29²wÈ^“½&{G´ô8~\14\3y\28ÃÑ\127ÿÁ‘.—îréSöNÙ;gKö.Ùûú9\12ÃïÂ\8ÃîGÜ…æ¿Çcßã±ïÑì£Ùûyâ¯ÀÏáì‡Ý\18wûË\127üŒ\24ÂÕë\15½ÝG¥'¥Ò«ÒVêÇë«*\127}{¼s=¼Ý\7¥EùQ•Ÿ”^Þî›ÒVi1^£øFñö·ûUi£´“úÑpí$ïqõƒ^•Þ”vR\11~Ð'®\27Õ^«ø­:ÞÕZðOò|õ߆{\4æÇ ô¤ôª´UZñ¨ÿE\31”n”>r}Uí•–üIéEõßëYéMi1\30£òaTûyþ\21=+m¸vª¾Sí9Õ\127'ó\21æÛ ´‘Zä³Ì?Òä?Ž²¿^ëã’×Êþ—ùJºSÇ;u¼—ù\14ÚJ\29Ëû'™&YwJ÷‰W.hÃ.g#rVž#è{yÕ1b\12e\0293qÝZãnó«œÉ\11˜Õ\0142È\28ʹpÐ\26sƪÌX‘ǪÖR•YªÖME6\21‡ž†¤ã´£k¹Šìt™t¾¥ã´ƒËZ:tšŠÜêÖÓý–9½\30Wš/ÒY+ÇjG·Þ§<\127„÷Gäkã:g\"ê’‡¢\29×¹\23\11›YC匕3UÎV9V;Ô·¥š³Ü9VN/kÕçkE\30+gªÆE\15ñÒ±ÚÑ=¤\27!sLÕ\31SqLÕ\31z„ŽÓN]K÷ÇU­;]«Ì‚¥º\26–êjàN[9tšª­V÷'­Ìéõ¹¨çaúyO:V;™¼†OËb\30’3Vev]¦\\{¾›\13JÏJ¯JoJ[©©\15E7JŸ¸¾*þUÕ\31ÕñQ\29_T\127\23Õ¿EõÏ(žQ¼]ågWå]¥e}—fížïÆNj‘2«ö|7\22Ç;Ù^™a{¾;OJ[¥Ô’ߧ™¿‹OOY\27¥Ëøŧ ¤Ùx£¦‡Ô-ÞÖñ¢rŒAž?\19×ijù\23ÂI(Ë\21/IóȦ_\15•²\\ñz4\127ŠÚ„bõŒ`\26Aqâ˜\24C:c6ÿz8\11e…r\\a&Ù˜¹ýñ.¾Mr½)m¥FóYOJS\2²6ê¸Sí¹ê8º™túÖµ‘\26\0190ëFòÒ„gZ\30ïU}šÀîþ+üEI~\20åzUzSÚ*í¤Æ‚•õUñFUŸ¦WÖF•7êø®úëTyÊoÒé\2cZŒ?}»Êº‘í¥ü2-÷ê8å;êrþ\29~\2*ß>¹ž”ž•6J¯JoJ[©uû4^ÒWu|TüQ\29_Tÿ\22Õ¾Qõª¿«ñ쪼«µ•ZðÊùÍšòOú¨Êw’_Îçžoø“ÒFéUi+µl?]oá/·\ +-ª\31Ô–x˜vâã†\19\15ÑN<@;ñ`ìÄã­Ë­£P³P«PŒiø±œcñÐæÄë‹\19fN<–9ñHæÄë\ +”XÉ„SjäÕ«8-\27->·9¬ýÈ-}dÊñÈâ‰Å¦Ä¡?9nYܧ8¶\22ã8ïslK\\ê\26VÞ•\24kLøâ\16Túr\\úRœp^¹SÚgß›\27áÐCvø+?ù×\11á—Úðƒ\15\127P¨<£½+Ѹ÷Œ\23g–ðèNVyN{MÍ£«¬òlí!Û‡Ç1}~ s-©rLåXíP¯¸ÓKg¬ÊÄ\15BÜəᎩœ­rT\127LÊÈQ~haNÉøQ~z‘\14f\12sºŠ\19_”c´C™\15WžßáÊkâ®».|(È¿\24OÒ¡Þ\23'õ¬‹•ûðÇÁwó9ÎëÊH7ÒgöøÜ6ÏmûÔÆ©Ö6]\13Ú¦S\\Û˜\11ÚŽo÷Ïìá¹=>·§ç¶yn»§öÿé ÎÃ\19»}nŸk;LmÓµ®íþi¿Ó\21_Û\30ò9ýy\25\"\19–:ŸÀ\16;üíu¼:½œJ¸åÐ\15ÅŸ ð…Óù)½B„ŸÞÑA¿ì’nâGßøÓ: á2\12\127Ìó\18W÷ûâƒ=üñhhßxáCßÉÐß—8ÉC\15qe†r/ñ\1ôþ\17ƒ.\6D-!s÷\28>þ¦\0167\28~”p)ážÃ0š\20~CèrkéN”Ã\18.%Ü_Jj¸ð\27¸Ïç\0242Šûvx¿x‰KIø–èÃËkø\11q?ÕSèçC<Ñ\127áŸ5b\12tÿ\22Oð[*\18Ô†¾„\25€Õ~$AË\23~•ñyü\31¬U‚Ã", + ["length"]=12893, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=988, diff --git a/tex/context/patterns/lang-nb.lua b/tex/context/patterns/lang-nb.lua index 77a13e69c..18f5fd51d 100644 --- a/tex/context/patterns/lang-nb.lua +++ b/tex/context/patterns/lang-nb.lua @@ -2,7 +2,9 @@ return { ["comment"]="% generated by mtxrun --script pattern --convert", ["exceptions"]={ ["characters"]="abdenrt", - ["data"]="at-ten-de be-tre", + ["compression"]="zlib", + ["data"]="xÚK,Ñ-IÍÓMIUHJÕ-)J\5\0/¨\5Ÿ", + ["length"]=16, ["n"]=2, }, ["metadata"]={ @@ -49,7 +51,370 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnopqrstuvwxyzåæèéêòóôø", - ["data"]=".a4 .aa4 .ab4ba .ab7be .ab6s5t .ac6tin .adel4s .ad8la. .ad6le. .ad2r .ad1s2 .af7f .af5t4 .af5f8u .ag6na. .ag6ne. .ag4r .ai2 .a6k .ak6ka. .ak6ke. .ak3kl .ak3kr .ak6ne. .ak6r .ak8sa. .ak4se. .ak6s3t .ak6ta .ak4te. .akte4r .akter5e .ak5tr .akva7 .a6l .albu5en .alfa3 .al4ge. .al5gi .al6ka. .al4ke. .al4kom .al4la. .al4le. .al5leg .al5lo .all5s6 .al4ma. .al8me. .al5m6in .al4na .alpe5 .al6s7k .als5o .alt4a .al4ta. .al5tar .al4te. .alt3o .al6ve. .am7b .a6me .am6ma. .am4me. .am4s4 .am6ta .a6n .ana3b .an4da. .an4de. .an5d4ra .an4d5ø .and6ø6v .an3er .anes5 .an4ga. .an3ge .an4ge. .ange4s .an4ja. .an4ka. .an4ke. .an4la. .an3n .an4na. .an7nal .ann4e .an4ne. .an5s8 .an4sa. .an4se. .ant2 .an4ta. .an4te. .an5tem .an5tep .an4ti. .an6tin .an4tis .an4tiv .ap8lan .ap1p6 .ap8t .arbi5 .ar6de. .are4o7 .ar4ge. .ar5ges .ari6a .ari8e .arie5ne .ar5ka .ar8ka. .arlan9 .ar4me. .ar7mea .ar4na. .ar3ne .ar4ne. .ar5nes .ar6ra. .ar6r7u .ar6ta. .ar4te. .ar3t4h .ar9ti .ar6va. .ar4ve. .ar4ved .ar4ver .arvi6 .a6s .as8ka .as4ke. .as5kese .as4le. .as6pa. .as5s .ast5ru .at4h .at4ki .at4le. .at4na. .at2o4 .at3s .atte4 .att6r .a5ty .au8de .au6e .au8ga .au4ge .au4ka. .au8le. .au4r .au6sa. .au6se. .aus9k .au4sp .avi2 .av7i6d .av7inde .av4la. .av4le. .av7s6 .b6 .ba6by .ba6la. .ba4le. .ba4ne. .ban4k3l .ba4re. .ba4ri. .ba8te. .be4de. .bede4n .be6d5ene .be4d5et .be8di. .be3e4d .be6ke. .be4la. .be9nar .be4ne. .be6ra. .be6re. .be4res .ber6ett .be8ri. .be7ska .be6sten .be4ta. .be6te. .be5t4v .bi6de. .bi6en .bi4le. .bilet5 .bi6t3r .bl4 .bla4d .ble4k3 .ble5k4e .blek4k3f .blekk3s4t6 .blekk9s8v4 .bling5 .bo8da .bo8de. .bogs8 .bok5 .bo4k4en. .bo8la .bon2 .bo4na. .bor6t5 .bort6e .bor8ti8 .bort9r .brudd7s6 .bruddsÃ¥7 .bu6da. .bu3d6r .bu4et .bul4k5 .bus6sy .by6de. .by8ta. .bæ2 .bø6k5a .bø8nen .bø8ner .c4 .ca4en. .car4s5 .ca6se .ce6b .cos1 .co4st .d6 .da4ge. .da4le. .dags6e .da2m .da4ne. .da4ta .dau3s .deka9r .deko7d .de4le. .de4l5ei .del5s6 .de4mi. .demo7e .den5sl .de4re. .de4r5et .de4ri. .de6r3i4 .de7r4i5v .de4r5om .de8ru .de6s5m .de4so .de4sto .de4u .devi5s .di2a .di5e4l .di6er .di4et. .di9f6t .di4ne. .dings7a .di4sk .di8s3t .do4en .do4er .do8et .dome5 .do4ra .do6re. .drau4m .ds4 .du4a .du4en. .du4er .duft3 .du4ge. .duk3n .du4se. .du4st .dy4re. .dø6la. .dø6r3 .dø7r4a .dø7r4e .dø4ve. .dÃ¥5ren .dÃ¥7ret .e6 .eb4ba .ecu6 .ed4da .ed4le. .efo7 .ef4ta .efta5s .ef6ter .eg8de. .ege2 .ege5l .eg6ga. .eg4ge. .eg4gel .eg8la. .eg4le. .eg6na. .eg4ne. .ego7t8 .egs4 .eg8se. .eg5si .ei2 .eid5a .ei5den .ei4ga. .ei7ger .eik5a .ei4ke. .ei3l .ei3m .ei8ma. .ei8me. .ein7as .ei3ni .ein5og5 .ein5s6ta .ein8s7to .ei9rar .ei5res .eir9u .ei5te .eit9r .ekk4o .ek4le. .ek3li .ek6ne. .ek4r .ek8sa. .ek3se .ek8se. .ek4sp .eks6pi .eks3t4 .ek5s4ta .ek8ta .ek4te. .ek7to .el8da. .el4de. .el4g5r .eli5ne .el3k .el5l .el6s .els6a .els4kl .els6t .el6ta. .el6te. .elve7l8 .el6veg .em3b2 .emfa9 .em4ma. .em8me. .em6na. .em5p .en5a .en4da. .en4de. .end5r .en4ga. .en6gav .en3ge .en4ge. .en6g5r .engs6 .en6ka .en4ke. .en5og5 .en6sa .en4se. .en5so .en3sp .en5t .en6te. .ent4r .en6t5ra .ent8re .en3ø4 .ep6t .er8a .er4e .er4g5r .er4ke .er4la. .er4le. .er6ma. .er4me. .er4mek .er4na. .er6ta. .er4te. .es9ast .es1k .es3ka .es8ka. .es4ke. .es5l .es8la. .es8le. .es3p .es6pa. .es6r .es8sa. .es4se. .es4so .es5ta .es3te .es6te. .es7tet .es5ti .es6t6r .es7tu .et8la. .et8le. .et4na .et4ne. .et4s .et8sa. .et5s4e .et6se. .ets4i .et1te4 .et6ter5 .et6ti .euro5 .ev8ja .ev4je. .ev4ne. .ex4 .f4 .fa2e .fag3s4 .fa4ne. .fan3t4o .fe4e .fei8e .feil5i .fe2l .fela9 .fel5l .fel4ta .fe4ma .fe8me .fem5o6g5 .fem5t .fer8ro .fer6s .fe8sl .fe8st5 .fes3t6e .fett5a .fi4b5 .fi6a .fi4le. .fire5o6g5 .fis6k .fjel4 .fjor4d3 .fla4t5o .fo4bi. .fol2 .fo2r .for3d6ri .for7d6ra .for5en. .form5s .for7s6o .fre6e .fri5e6re .fug2 .fu6ge. .ful2 .fu8se. .fy8se .fø8rel .g6 .gaf7 .ga4le. .ga8li. .ga4me .ga4ne. .gang5s4 .gas4 .gas5ta .ga2t .ga4ve. .ga9vee .ge2a .gel4e .ge3ne .ge6ni5 .gen5s .ge4o .ge4st .ge5s6tap .ge9sv .gha5 .gift5s .gi4n .gi4s .gis7p .gi8v3a .gje8n7 .gl6 .glo6i .glo9v .gly5s .glø9se .gnÃ¥7la .go5des .gos7 .gra4v3end .grei4e .grunn5s .grus5s .gu4de. .gud3v4 .gu4lat .gus1 .gu4tu. .gø2 .gÃ¥s4e5 .gÃ¥4v .h4 .hai5s6 .ha6ka. .ha8ma. .hand5s6l .handsla9 .ha6v5ak .ha6vi .he4er. .hei5e .hel3u .he2n .hen3i2 .he6r5i6 .he6r5o6 .he9r6oe .he7r6oi .he7r6ol .he9r8os .he8ru .hes4s5 .het6s5 .het7s6e .he4t5 .he5t6e .hil4l .hi4n .hi6re. .ho2d .ho2e .ho4re. .ho5ren .ho7ret .ho8ta. .ho8va. .hu4di .hull7s .hu4s3en .hus6val .hvit3 .hvi3t4e .hvo8r5 .hy6ra. .hy4re. .høi5 .hø4re .hø4va .hø4ve. .høy6st5æ .hÃ¥8en. .hÃ¥8er .hÃ¥nd5s6l .i2 .i3a4 .i3bl .i4da .ifø5re .ig8la. .ig4le. .i4her .ik2 .ik6k .ikke5 .ikkes8 .i3k6l .ik5t .ik8te. .i5kv .i3la .i4la. .i8lande .ilbo8da .il6d3r .il6de. .i3leg .i6les .il4ja .il6je. .il6ke. .il4le. .il6sk .il4te. .i6me .imø5te. .in5ad .in5de .in8ga. .in4ge. .in3gr .in6gri .in5k6 .in6ka. .in4n3 .in4ne. .in5n6e .in6n7eks .in6n7ets .in6n7ett .in4n3i .in5s .in7tes .io4na .io4nom .i4rer .ir7k8 .ir8ra. .ir6re. .i6sa .i3sc .i6se .is7k6 .isla8ga .is6lam .is4le. .is4let .is4me. .is8na. .is8ne. .is5pa .i5s8pe .is3s4 .is6sa. .is4se. .is5sk .ist6 .is8te. .i5sted .i5stem .is7ti .i6s7tj .i6s7tr .is8ut. .i6s5ø .i5ti .i3va .i4van .i4var. .i3ve .i4vel .i6ven .i4ver. .i4vere .iv8ra. .iv6re. .i3ø .j4 .ja4de. .ja6e .ja4ne .ja8se. .ja4va. .je2a .je2l .je2m .je5re .jern5s4 .je4ta .ji4 .jo8en. .jor8d7r .jor8d7s8 .jor8d7u .jo4se. .ju6a .ju6la .jule3 .ju4li3 .jø4dep .k6 .kaf2 .ka6la. .ka6li. .kal7t8 .kalve5l8 .ka5me .ka3na .ka4ne. .ka4p3r .ka4ra. .ka5r6a5v .ka6re. .ka5rin .kar8t7r .ka4te. .ka5t6h .kau9k .ka6va. .ka4ve. .ke6e .kel3 .kier3 .ki4na .ki7ni .kino3 .kir2 .ki4se. .ki8va. .ki6ve. .kje3de5 .kje4k .kjek7l .kjønn4 .kle4s .kli5ne .klo9va .kly7sa .klø7s8 .kna7se .kne4p3r .kne7r8 .ko6da. .ko7gr .ko4la .ko3le .ko8le. .kol5j .ko3pe .ko6pe. .kor6s5ed .ko4se. .ko6ta. .ko4te. .ko6ve. .kring5s .kru4s3t .ks6 .ku5f4l .ku4le .ku8ra. .ku4re. .ku8ta. .ku8te. .kva4r1a .kve9ka .kve5ke .ky8la. .ky4le. .ky6te. .kø3s .kÃ¥6pa .l6 .la4da. .la4de. .la4ga .la4ge. .la5ger .la4ma. .lapp8e7 .lap5s4e5 .la6ta. .la4te. .la3tr .la4va .la3ve .la4ve. .la6v5æ .le8ar .le4da. .le4de. .le4er .le6et .le2f .lef3l .lega5ta .le4ge. .le4gel .le8gi. .lei8e .lei7er. .lei4ve .le4ke. .le4k5r .lemu9 .le4ne. .le6o7 .le7s6a .le4se. .le4sek .le4ses .le4s5p .le2t .le7ta .let6tan .le2u3 .le3va .le4va. .le4ve. .le4ves .li8a .li4de. .li4e4 .li5ene .li2g .liga3 .li2k .lik5k6 .lik3o .lik3s4 .li4ma .link6 .li6ra .li4re. .li4sa .li4se. .li4ta. .li4te. .li5ti .li4ve. .li4v5en .liv8s7u6 .liv4s5v .lo5e6 .lo6ge. .lo8gi. .lo6g5r .lo4i .lo6na .lo8o .lo6ri .lo8sa. .lo4se. .lo6te .lo4va .lo3ve .lo4ve. .lu4d .lu2e .lu4na .lu6pa .lu4pe. .lu6ra. .lu4re. .lu6se. .lu4ta. .lu9tas .ly4de. .lyd3s .ly8e .ly8ge .ly4se. .ly4sk .ly5s6e .lys5s6 .ly8str .ly4te. .ly4ve. .lø8de. .lø6en. .lø8ne. .lø6pa. .lø4pe. .lø8s7a .lø8s7i .lø8s7m .lø8s7p .løv5i .løye5ne .lÃ¥8ma .lÃ¥5re .lÃ¥6re. .lÃ¥4te. .m8 .ma4ge. .mag9r .mah6 .ma3ka .ma4ke. .mak6t5at .makt5s .ma4le. .ma4li. .ma4na. .mande8l .ma4ne. .ma4ni. .ma4ra. .ma4re. .ma4ri. .mar8sj7ø .ma4sa. .ma4si .ma6st .ma8ta .ma6t8h .mat5t8o .ma4v .me4d3 .me6d5ei .me6d4i .me6d7in .me6d5r .mei5er. .mei5et .mek5l .me6la. .mel3l6o .me4l5ø .me4ne. .me3ri .me8s7al .me4sk .me2t6a3 .me3ta4ll .mes5ti .me4tri .mi6kj .mi4le. .mi4me. .mi4ni. .mi4nik .min4k3 .min5k4e .mi6s3t4 .mne6 .mo5de .mo6er .mo4na. .mo8ne. .mon4st .mons6t5r .mor5d6e .mo4re. .mor3t6a5 .mo4se. .mo4ta .mo4tek .mo4ter .mo4tr .mo5v .mu8ga .mu8ge. .mu6le. .mul5es .mur7 .mu5r8e .my8ka .my4ke. .mø4re. .mø5res .mø2t .mÃ¥6la. .mÃ¥l3o .n8 .nabo3 .na6ge. .nak2 .na8ke. .nan5s .na4r .na4sa. .nat2 .na3tr .nat6tr .ne4de. .ne4den .ne4d5i .ne4d3r .ned3s4 .ne4f5r .ne4g7 .nei5e .ne6k6r .ne4o7 .ne4o8n .ne8pa. .ne6pe. .ne2s .ne4t .ni8ar .ni6er .nig4l .ni4na. .ni5o6g5 .ni4pa. .ni6pe. .ni8sa .ni6se. .nita9 .ni4to .ni4va .no6de. .no6e .no8ka .no4me. .no8mi. .no4r .no8se .no8si. .no4va. .nu4e .ny5a .ny3o .ny8sa. .ny6se. .ny4te. .nær9ast. .næ4re. .nød4d .nø4re. .nÃ¥4de. .nÃ¥6le. .o6 .obo5e .od4da. .od4de. .odel8s7 .od8la. .od8le. .offsi6 .of4te. .of5teb .og4 .ok1k .ok8ka. .ok8ke .ok4se. .old3s .ol4ga. .oli5 .ol8la .ol8le. .oli5v6 .ol6m .om1 .om4ar. .om6bo. .om4e6n .om4gÃ¥. .om4me .om5s2 .on4de. .on5des .ond3s .on6kl .on6na .on8ne. .op5ar .op4pa .op4pe .opp7el .op4pi .opp3l .oppla8te. .opp5s6 .op7r .opt6 .or4da .or4de. .or4del .or4dr .ord5s4 .or5ga .or6ka. .or4ke. .or6me. .or4re. .ort6 .or6va .osa5 .os3el .os4k .os4s .os4te. .ot5r .ot7t8a .ot4ta. .ot4te. .ot4tes .ot3to .ot4to. .ove6 .over3 .overen6d .over3i4 .over5k8 .ov8re .ov5u .ov6ul .p6 .pa8la. .pa4le. .pal6i .pa8n7a .pa5n6e .pa6n7eu .pa5n6i .pa6re. .pe3do .pe4ke. .pek5t .pel4sj .pe4ne. .pe2p .pep3r .pe8ri7 .per6m7 .pe8sa. .pese5t .pe4st .ph2 .pi6le. .pi6p .pi9ra .pi2s .plen5g6 .plus6s5 .plus7s6e .plø7se .po4et .po3la .po8la. .po4le. .po4log .po2p1 .po2p2e .pop3p .po3p2u .po8ra .po6re. .po4res .po6st .po8ta. .po6tek .pro7k8 .ps2 .pøn3 .pøn9s6 .pÃ¥5d6 .r6 .ra6da. .ra6et .ra4ga. .ra4ge. .ra4ja. .ra4ka .raks7 .ra2m .ra4na .ra4ne. .ran7s6a .ra6pa. .ra6pe. .rap4s .rap5s4e .rap5s4o4 .ra4re. .ra6sa. .ra7ti .ra6va. .ra4ve. .re4al .re6da. .re4de. .ree6l .rege4l .re4gi. .rei2 .rei7de .rei5er. .re4ins .re6ka. .re4ke. .re8klar .re8kle .re4kli .re6kn .re8le. .re4ma. .re4na. .re4ne. .ren6sk .ren4t .rep5s .res9s .re5s6tan .rett7s8kri .re4ve. .re4ves .rev9n .ri2d .ri8e .rie5ne .ri6ka. .ri6ma .ri4me. .ri4pa. .ri4pe. .ri4p5o .ri4sa. .ri4se. .ris5ko .ri4s3ø .ri4ta. .ri6te. .ri6va. .ri4ve. .ro4an .ro4de. .ro8di .ro4e .ro8ke. .ro4ma. .ro4mi .ro6pa .ro9par .ro3pe .ro4pe. .ro4sa .ro9sar .ro4se. .ro4sed .ro4ta. .ro4te. .ro4tek4 .ro8va .ro3ve .ro8ve. .ru8ga. .ru4ge. .rug9le .ru8ka .ru4na. .ru4ne. .runes6 .run7g .ru4se. .ru4sk .ru8va. .ru4ve. .ry2e .rye7ne .ry6ke. .rød2 .rød7s4 .rø8le. .rø4re. .rø1v .rø8va. .rø4ve. .røy7e .rÃ¥8da. .rÃ¥4de. .rÃ¥4en .rÃ¥8er. .rÃ¥g8 .rÃ¥6ka. .rÃ¥4ke. .rÃ¥3k4l .rÃ¥6na. .rÃ¥8sa. .s6 .sa4ge. .sag6n .sa4la. .sa4le. .sa4me. .sa4mer .san6d7 .san7d8e .san7d8i .san8s7ku .sau6e .sch6 .se2b .se2e .se4i .se4k5l .sek4s5o6g5 .sel4v5 .sel4v5i6 .se6na. .se4ne. .sen7s .se4ra. .ser7vel .se4te. .sha9ke. .si2d .si6er .si6ga. .si4ge. .sik5k6e .si8la. .si4le. .si4ne. .sinn6s5 .si4ra. .si5str .si4va .si4ve. .sje4i .sju5o6g5 .sk6 .ska7ka .ska5ke .skat4 .skei5d .ski6n .skinns6 .sko7de. .skog5u .skritt9s8 .skudd5s6 .skuddsÃ¥7 .sku6m5 .sky6f7la .skÃ¥5ra .sle6s5v .sli6b7r .slim5 .slotts5 .slu9ka .slu5ke. .slø5se .sma4s .smas5k .smi6g7r .smo9g .smÃ¥3 .smÃ¥7k6 .smÃ¥9l .smÃ¥5t4 .smÃ¥t5t .sn4 .sne3 .sne4k .snitt5s6 .snø3k .soa8 .so8de .somma4 .son6a .sop4pa .sor4t5 .so4ta. .s8p6 .spa5ra .spe6e .spi9la .spri5s6 .st6 .sta9ka .sta5li .stat4s .sted4s .ste4i .stein7a8 .stein7s8 .stem4m .stev9na .sti7me .sto5ne .sto6ra .strek5s6 .stu8a .stu9va .stÃ¥l5l6 .sund5s6 .su4ri .su5te .sv8 .sva5la .sva5ra. .sva5re .svar5s .sver8n .syd5 .sy3d6a .sy5d6e .sy6na. .sy4ne. .sy5ter .sy3ti .sy6v5 .sy6v5o6g5 .sz6 .sæ2 .sær1 .sær3e4g .sø8ka .sø4ke. .søl6v5 .sør3a .søye5 .søy6en .sÃ¥5pen .sÃ¥3re .t8 .ta8ke. .tak9r .tak5sk .tak9s8p .ta4la. .ta4le. .ta4les .tal4li .tam5m .tan5de .ta4p5r .ta4ra. .ta4re. .ta2s .te4e .te6i .tei9er .te8ke. .te4ma. .te6o .te8ne. .te4se. .te7si .te4sta .te4str .th2 .ti8a .tid5r .tid6s3 .ti4de. .ti4e .ti4g3r .ti4ki .ti4l5 .tili6 .ti4med .ti4na. .ti4ne. .ti4p5l .ti4s .to5arm .to6en. .to4er. .to6et. .tokk8 .to6le. .to6na. .to5o6g5 .to4ra. .to4re. .to6reb .tor6g5a .to3ro .torsk6e5 .tor4s5v .to5rye .to4str .tota9la. .to5t8 .to8te. .to6va .to4ve. .tr6 .tra7c .tre3b .tre7p .tre5o6g5 .tre6skja .tre3s8ko .tre7sl .tre3sp .tre5s6t .ts2 .tsj2 .tu4en. .tu6na .tu4ne. .tu6ra. .tu4re. .tu8ve. .tve6n .tver6r5a6 .tvÃ¥7g .ty6da. .ty4de. .ty8et .ty8re. .tyr8s9 .ty5ter .ty5ti .tæ4re. .tøv9d .tø6ve. .tø9ver .tø5vet .tÃ¥5ker .tÃ¥4le. .tÃ¥p9n .tÃ¥5ren .tÃ¥7ret .u5a6 .u1b2 .u6berg .u3d2 .u7de .u3e6 .u4er. .u6era .u4ere .uf4f .uf8sa .u4ga .u5gi .ug6la. .ug8n .u1i .ui6m .u1k6 .u4ka. .u6ke .u7kn .u3le .u4le. .ule8k .ulen4d .u6lende .u8lene .u6let .u4lin .ul6ka .ul4ke. .ul4la. .ul8le. .ul8ma. .ul4me. .ul4ne. .ul5t .ul6te .u1lu .ul4v5i .u1ly .u3lÃ¥ .u5ma .umu7 .u2n .unde6ri .und5r .une8 .u5nek .u9net .un4ge. .un2n .unn3s .u3no .un4se. .un4t .u5nu .u1o .u1p .u4pi .up2l .up4p3 .u2ra .u5raf .ur6an .u6rane .ur4d .ure4n .u4ret. .u6rets .u2r2i .ur4ke .ur6na .ur4ne. .u1ro .ur1s .ur8ta .ur4te. .urte5m8 .ur6tet .u5rut .u3ry .u5rÃ¥ .u1s2 .u4sa. .u4sas .u5se .u3ska .u5sn .u7sp .us3se .us6t4 .u7stek .u5stel .usy5r .u2t .uta4g .ut5age .u3tak .u4ten. .ut5esk .ut6e7sko .u4ti. .ut3kan6t5 .ut4ne. .u5tol .ut3o6v .ut6rer .ut6rov .u3trø .ut6se. .ut4si. .ut3t4 .u3tu .v6 .va4da. .va4de. .vai4 .va4ke. .va6les .valg5s .va4ne. .van4n5 .vann5s4 .va6reta .vari4e .var5sk .ve8en. .ve4ga. .ve6ge. .ve4g3i .veg3s4 .vei3g4 .vei3s .vei4ta .ve4l3 .ve5l4ar .ve6l5art .vel5d .ve4l3e .vel5l .ve5l4os .ve4l5ov .ve4ly .ve8læ .ve4ne. .ve4net .ve4ra. .ve4ras .ve6re. .vermo9 .vers4t .ver5ste .ve4sl .ve4st .vete5 .ve8te. .ve4ve. .vi4da. .vide4 .vi6de. .vi4d5Ã¥4 .vi2e .vi4ka. .vil5l6a5t .vi6ma. .vi8me. .ving7s .vin5n .vi4sa. .vi4se. .vi6seri .vi4ta. .vi4te. .vi5tr .vok4s3 .vok5s6e .vo6r .vo6ta. .vy4 .vye5ne .væ4ra .væ4re. .vÃ¥4d .vÃ¥4r3 .vÃ¥rs6 .wa4r .we4g .wi6e .xe2 .y6 .ye4 .yn8da. .yn4de. .yn6ge. .yn8ka. .yn6ke. .yn4kv .yns2 .yp8pa. .yp4pe. .yr8ja. .yr8je. .yr8ka .yr4ke. .yrkes5 .yr6t .ys4 .ysterie8 .yt9ren .yt5t .yt6te. .yv6 .z4 .zj8 .æ8 .ære4s5a .ære4st .æt4te. .ø6 .øg8la .øg4le. .øi5 .øi6e .øko5 .øk6ta .øk4te. .øl3ed .øl3s6 .øl3v6 .øm4me. .øres8t .ør4je .ør5k .ør8na. .ør4ne. .ør6ski .ør9sm .ør3st .ør8ta .ørt9an .ør8te. .øs2 .øs4t .øv4d .øve4r5 .øve4r6s .øv4re. .øy6de. .øy2e .øyele8ge. .øy5ene .øy4n .øy6na .øy6ra .øy4re .øy8rer .øy4str .Ã¥2 .Ã¥6e .Ã¥6f .Ã¥6g .Ã¥6k4 .Ã¥8l .Ã¥l6t .Ã¥l8ut. .Ã¥8m .Ã¥n8da. .Ã¥n4de. .Ã¥nd6s5 .Ã¥p6na .Ã¥rs3k .Ã¥6se .Ã¥s3k .Ã¥s7l .Ã¥s3m .Ã¥3ste .Ã¥s5v .Ã¥4t .Ã¥t4te. .Ã¥tte5o6g5 .Ã¥6v a1ad 4aaf a3aft aa4g a1aks aak5v aa3la aa2m a1a2n a6an. aans9t a7antr a1ap aa4ri6 aarie9ne aar5n aa2s 3aase aa5t6h a1av a6bab ab7av ab9b8l ab4but abe4lei abel5t4 abe9na. abes9ka 4abev a5bh abie6 abi9er abi9la a4bist ab1l ab9lara ab4lok abl7u a5bo9a abo3b abo3e a3boer abo3kl a3bord 5aborte ab1r ab8re ab2sl abu5e a4buel a4buf a6busk a4bute a4by. a4byb a4byk aby3r 8ac aca5 a6ca. ache3a ack3 a1co 6ada a5dal ad5ant a4dart ad9da a2deb ade5i6s a2dek a4del a4d5elv a5dend a3dene a2deo a2dep a5der. aderle7 a2des a5de4ser a3desl a3det. a3dets 3adfer 1adg ad8ge adi4ene ad1j 1adju 2adl 1adm ad5n ado7a a4dob ad7opp 4ador a7dora ad5raf ad7ran a2dre ad7rel ad5rep ad3ret ad3rid ad1ro a7drø ad4s5amt ad4si adsle6ge. ad5s4let ads5te ad5s6tek ad3str ad5un 1adv adva5re 2adve a4dy. a4d5øy 2a1e2 4aea a4ed 8a7ede aed7r aek5t a4el. ae5la ae3li ael5o aen5t a3ep aes8ke. aes4t 6afa 6afc a4fe. afei5 af4fan af4fei affe3s af7fi. affi3d af5f4u af4i afia1 afi5an a2fib a6f5inn afi7re a4fiti a5fjo af9la. af9lar af3le af5li a4frik 6afo afo7ra. afo7r8e afore5ne afra5s 1afri afs1l aft5ei af4t5e4l af6t5o aft1s4 af5yr afø4r afø5ri 4aga a2gaa a5gae a4ganf agang7s8 a4gart a5gas. aga6ve. ag5de a7gelen a2gem 3a4gent age4r3a age6rek age5risk a7gers a5geru age5s2 a4geta a4gé ag5gar ag4gas ag5ge agg1s2 agg7u ag6gut a4gi. ag5id agi6s ag1la ag4lem ag6lesa ag5lÃ¥ agmo8e agn5om 4ago ago5d8 ag1or a5gos a3got ag5ov ag7ras ag1re ag1ri 4a3gru ag1rÃ¥ ag4samb ags4ang ag3s4ei ag4sel ag3sem ag5s6ing ag6sju ag4ska ags4kul ag2sl agsmÃ¥6la ags3tr ags3t2v ag2sy a6gu. agu3ay agÃ¥8va agÃ¥8ve. a1h a5hi ah4n5 ah4v ai1a4 ai3e2 ai3er. ai4is ai5ke. ai5ko ai9kv ai5ne a3ing a1inn ai4n5o4 ain8sm ai1ro ais4e ai4s3k aisk2h ai8sm ais6om ais5s ai5s4v ai5ve ai5ø a1j aja9d a7je a8je. ajes7 a4jé a4ji a6jl 6ak. a1ka 4aka. 1a2kad 6akaf ak3aks 6akan aka4o5 4akar ak6arb aka4t5r ak4au 2akd 2ake a9kec a2keh a3kei5 a6kek akel8e akelei9er ake5l4i ake4rek a4kerø ake5sm akes6p a8kete akhe6n 4a1ki a6kid a6kik ak7ind akis1 2a1kj akk6and ak5ke. ak3ken akk5erst ak4kes akki4 ak5kim ak4k5is ak6kj ak1ko akk3ol ak6k5ri ak1ku ak4kul ak4k5v 2ak1l ak3lev ak6lik ak6lus 6akn ak5ne a5kno 2a1ko ak5om. a5kon a7kos ak1o2v ak4pe6 akra8sa ak5reg akregi4 ak3res ak3ro. ak3roe ak1ru ak4s5and ak4sek akse3l4o ak4s5elv ak4ses ak7s6id 3aksje ak2s1k ak7sku ak6sl 4akso 4aksr aks5ti ak5stol aks4tr akst7ren 4aksu ak4tab ak4tai ak4tak akta6le. akt5all ak6tam ak6tans ak4tap ak4tas ak4tav ak4teg ak4tek ak5teme 4akth 7aktig ak5tit aktle6ge. ak2tr ak6tre akt3rÃ¥ akt5s4la 5aktue ak8t7æ akut2 2akv ak3val ak5øl a5kÃ¥ 8ala. al7adr ala4g ala9ga a5lagm alag8ra a5lah a1lai al3all al3a4me al3ana a3land a5lande. a5lane alan5gr al3anl al3anv ala4o ala5pr a5lar. a3lara a5laren al3ark a7larl 3a4larm al5arr a5lars al3art a7larv al3a4si 2alat ala7tr 4alau al5auk al1av ala4va al4ba. albo4g albu7er. 5album al3de al7der 4aldo ald3re 2ale a2lef a2leg a9leg. aleie6n a9l8eik a5leke a4leks a4leli a2lem al5e4mu al8en. a4l3enh a4l5en5tr ale4pos a7lept a4lered alere6de. a4le5ro a4leru ale1s2 ale7se a4lesk ale6s5kr a4lesl a6lesu a4leta a4le3te a2leu ale5v a4leva a4levi 3alfab 2alg. al3geb al2gu al4gÃ¥r al3ill a2lim a9lin. ali5na. a4l3ind a4l3ins al3int al9ja. al3je. alj5end al3jer al1jo al4jor al2j1u al1jø alj5ø6v al9kera alk7s6 alla4ga alla6ge. al4lap all5art al5lea alle6ge. al4lek al5len. all5erst alle3s4 al4lest al5let 3allia al4lid 5alli5e alli9ne al6list. al4lo3m all4sen all4sti al6lul al6løs. al4løse al8løst al6lÃ¥ 8almal al6mek al4met 4aln a5lo. al1o4b a5loi al1om a1lon al3ope al7ord alori5 alo6rit al3ove alow7 alp2 al3ps 2als al7sed al9skap al7skare als1l als5lø al2sn al6spu als4te als6ter 6alsu alt6ak alta8le. al3ted al7tema 5alterna alte4t al4t5eta al4t5ete al4t3op a4l5u4k alul8la al5und alu8re. al5va. alv5aks alvak8se. al4ved al5v6er al8v9er8m alve5s al9ves. alvi8se al1vo alv3s 6a1ly a2l5y4te alø5se al3øv a1lÃ¥ al7Ã¥6t a4mah ama5is a2mak a5maki am3aks a2mal am5ald a3man a3mar. a5mara a7mas. amaso7 3a4matø am4bar 3ambas am4bat am5be a2mei am3eie a3meis a2mek am3eks amen8de. amen4s3 amen6t7arv ame4ram a4meret 3a4meri 4amerin ame5u amhu7 2ami ami7na ami7ne amisk3 amis4ku am4lese am6lest am4mad 6amn. am4ned a2mo amo5e am1op am1or amo6ve am4pap am4pare amp5ei am8peria am6perie ampes6 amp7inn am2p3l am5p4let am4p4re am4pun am2pÃ¥ amru4 am5rÃ¥ am1s am7s6ku ams4l amst6 am4s3tr ams9ut ams2v am4s5ve am2sø am3ti amti5da 5amtm am7tv am6ul am5yr am5øy 6ana. anak8te. ana3la anal4f 3analy 8anan a3nane a5nang anant8 4anar a6narb an5art ana3to 6an7au anaus7 an9av. 3anbef 4and. an4da5m6 anda5ta an4dek an5den anderle7 an9det 6ando an4dos 4ands and4sel and8send and7slet and5s6tre a2ned an5egg a4n5elv ane8l7u a2nem 2anen 2aner a8n7erme ane3ru a5nes. a4nest ane5sv an5e6tika a5netike an5e6tikk an5e4tis a2nev 3anfal anfø5re. ang4e an4ged an4g5enh ang5erme an8geste an4gi. 5angiv ang5of an5gos an4g9ra an4gre. an6gres an4gret an4g3ry ang4sm angs6tro angs8t9rÃ¥ angst5y ang5sva ang6søy ang4t5re ang5t6ve an4gun an4gÃ¥. ania7 a6nib a4nisj 4aniv an4kana 4anki 4ankj an4kob an4kop an4kos an2k1r ankra8na. ank3re ank3ut 8an2kv an4kø an6k5Ã¥6 2anla anland6 anlø9pa ann5ans an6neme an5nen an5ner an5nid an4ninn an4nom 5annon an4nov ann4sei ann4sid ann6s7kÃ¥ ann6sl ann3st ann5sta ann4s3u an4ny an2nø a3no. a9noa. ano6deb 2anog a7norm a6not a2no4v ano5va an3ove anri7ke ans5aft ansa7ka 4ansan 3ansat an4sek an4sent anseri8e9ne an4ses 3ansik ans5ind ans5ins an4ski an3skj ans4k3l ans6kun an5skø an6slø an4sn ans5or an4s3pi ansport7s6 an1st6 6anstar ans4te4 an8s7tu 6ans6ti 2ansu an9s6und 5ansunda 1an1sv 4ansve 6ansy an4sÃ¥ anta8la an5t6and an4tans an3ted ant5emi 5antenn an4tesl anti7kl an4tim an5toi ant5ord ant5rab ant7rom ant5ryg ant5s6 an4tul antus4t 2anu anus7a an5ut 3anven 6anvin 6any a4nya a2n1æ2 anær8 2anø a2nøk an7Ã¥ a1o ao9a aog9 a2oi ao6k6 aon8de. ao2p ao4r a5pea a7pé a1pi a2pia ap1id a6pik a6pins a2pio api6r5 api7r6e api7se ap1j a1pla a5p6las ap3li ap9lo ap2ly 4apol a4pon a4poo apo3p apo5s4ti a2pot 3ap3par 4appar. 6appare app7esk ap5plas ap4ple ap5pli ap6pri ap3ra ap5ren ap5ret ap3rin ap2s1 apsa4 ap3sel apse4s ap8s9l a3psy ap4s5ø 8apu a6p5ut a1py ap7ø a3p8øls a5pÃ¥. a1ra ara9bi ar5aks arak5t ar3alt a4rami 2aran a4ranor a4rans a4ranv 2arar a4r5ass ara5te. ar5aug ar7auk arau9ken ar7avh 1arb 2arba arba8ne. 6arbeh 4arben 6arbet 2arbi 4arbj 6arbl 2arbo arbo8da 2arbr 2arbu 2arby 2arbø 4arbÃ¥ ar7deb ar6deli ardfø5re ard3re ard5sta a1re 1a2rea 4arear a4reb areba4r a2red a2ref a2rei are3in a2rek a6r5ekt a4rela ar7elek a4r6eli ar7emn a5remo. 3arena a6r5eng a4reni aren5tes a2reo a2rep a6rerel a4r5erfa a6rerk a4rero a6rerø a5res. ares8ka ar4et a7reta. a4rev ar7e6va ar3evn arev6ne. 8a1ré 6arf ar7g6h ar5g6i 6argj arg4l ar7go arg5stj 2a1ri aria7ne a3rib ariba9 ar5idr a4rim ar3inn ar3ins arins9k ar3int ari3se arise4a a4risto a2riv ar4kau ar3ke ar4ke5s ar5kha 3arkit 3arkiv ar4kle ar4k5løf ar6kod ark6ste ark4str ark7veg ar4køy arla4ga ar4map 5armbÃ¥ ar4medi arme7t ar6metr ar4minf armle6ne. armÃ¥6la 2ar2n ar4nad ar4nal arneva7la ar3ni ar3no ar3nu ar3nÃ¥ a1ro. aro8de. a5rok a1ron ar1op aro6pa a4r1o4r a1ros ar7ost a1rot ar3ove ar6ped ar9po arp5ret 1arra ar5ree ar7resk arri8e7 arrÃ¥6da 2ars arsa6ka ars5ans ar3sel ar6s5ers ar4sin ars5kam ar5skar ars5kes ars7kre ar4spr ars6tr ars4vei ars7æ arta4la arta6le. ar3te ar7tele 8artets art4ha. art4has ar4tika 6ar4tim 4artn 4arto ar4top 6artr art9ra ar6trin art6s5t arts5ø 6artu 8arty ar4tyv ar1ul ar1un ar5u6r a1rus ar5utb ar5utn arve3s ar6vete arvi8sa arvi8se. a1ry ar7æ6 arø8ve. ar1øy a1rÃ¥ arÃ¥8de. ar7Ã¥p a6r5Ã¥t 6as. 4a1sa asab4 asak4 a4salo as6an as7aug asbe2 a1sc a2se. asea4 a2sed a2seg a2sek as4el ase5le a2sem a6senet a4senk a7seol a2se5s a6sest a4sete a2se3u as2h as5hu a4sib a2sip a2sir a7sis. asis5t a2siv as4ja a6sjett as5jor as3kar ask6et as5keti ask2i as5kis a5skj as5ko. as5koe a4s5kopi as3kor as3kot a1skr as7kra as3kul a5s6kula as6kule ask9u8t as4kv ask5øy as1l a5s4lag asla8ga as4lev as4lit a1so a4so. a2s5om as5ov as4pan as3pe as7pis as5pl as9sa. as4sab as4sal ass5ald ass5alt as4sam as5se. as4sed as4sek asseri7e6 as4serv as4ses as6sifr as5sing assi3s as4sit as2sj as6s1k assku6le. as2s3n as5so. asso9a as7sos. as6s1p as2s3t as4stan as4str ass5tru as2s1v as4sys as4søk as2s5Ã¥ as1ta as6tab a6stande a4statu as5te. as3ted as4teg as4tek as4t5enh a1stj a9stof a5stok as5t6os ast5ov ast5ren as4tro as9tua astu8ve. ast5ø4v a1su asu3n asu9sa as1va a6sym asyn7d as7øye as5Ã¥ 4at. 4ata1 a5taene at6af ata8ka. ata8la. a4tang ata9rar a6t7arv atas4 a5tas. atat8 a2tau at5avh atch5 a2tea a2tec at7edd a2tee a4tei at5eid at3eig ate3in a4teka ate5k8e ateke7ta a6t5ekte ate7le at3emb 4aten a5tene. a4teni a4tenu a2teo 4ater a5ter. a6terat a8terek a4teril a2tes a4t3ett at6e5u a4té 3atfer at4ha. at8has 4ati atik6ka atili5 ati5nea ati4rep ati8sta at3jo 5atlant atl9øy 3atmos at4nel ato5a ato5gr at1oi a2tom a6t5oppr a1tor a5tose a3to1v a1tra at5reg at3ren at3rer a4trif at3rin at5rot a9tru at5røs at5røy 2ats at3ser at7sje at7s6kat at7skj ats6kul at2s3ø 4att. at6tat 4at5te. at5tens attfø7re at4tid atti4s att5ise at3tit att1o att7o6p atto6v at1tr at4traf at4t3re at6trin att3s6k att5s8l att3sp att3sv at6tys at2t3ø2 a1tu atu5e4 a2tut at5v atvi5er. a3t3w a1ty atyr8ke. a5t6yv a1tø atør3s atø9se a4t5øy a3tÃ¥. a7tÃ¥a a1tÃ¥e at5Ã¥r at5Ã¥4se at7Ã¥t8 4au. 8a8ua au9ar. 4au5b auba6ne. au3c au5da. au9det au4di. 4a6ue au7en au7er aue5re au3est au6e5ta au5e6te. au4gal au4gas au4gel augele8ge. 5aug2n 3augu au5i au5keli au5ket auk5la 1auk7s6 au5kve au6las au4lat aul5l aul8la au5lu au9men au6mo aum5s6k aum7s4t a5und au5rae aure5s au5ret au5ri aur5s6 au1ru auru4e au1sa aus9kj au6skr au1so aus5s au6s8tas aus8tett austi6s aus6t7ise aus6ton au5str au6stra au6stri au1su au4sun au2s5ø4 aut6a au3ta. au7tar au5te 1auto auto5v au3t4re au4ty 2aux a1va av5ab a5vae ava7g 6aval av3alv a4v3and av3ang a4v5anl 3a4van5s a9var. av3art avar6ta. 4avas avat6a 5avbi 1avd 3avdel 2a1ve a2ve3d2 a5vede. a7veil ave3in a2vek a4vela a4vele a5veleg avens4 a5ver. ave7ras aver5d a6verei a2ves a2vev 1avfa 1avg avi4ar 4a5vig a1vik avi9ke a1vin a4v5int 2avir 5avisa avi5sa. 3a6vise av5ising avis3t a5vit avlu9t avlø5se 6avn av7na av4nes a1vo a4vok avo3r avo4v av1r av4res av5ri av1sa av5seg avs2i avsko7g 3avsni av1s2p avspe9g4 avs4te av5su av1s2v 1avt avta9ka 2avu 1avvi av3øl 4a1vÃ¥ a6v7Ã¥6l a1wa awat4 a5we awe9ne a1wi ay2a ay7ane. ay5ar ay9s8t a5y4t a5zu azz3o a1ø a7Ã¥6 1b4a ba3a baby5 ba4bys ba5cl ba2d ba4da. ba7dan b5add ba8de. ba4deb ba5den. ba7dens ba3di bad1s 4baf ba2k ba5kan7 ba3kar bakars7 ba3ken ba4k5end baken6de. ba3ker bake3s ba9ket. bak9ett ba3ki ba8ki. bak6kers bak6ko bak5kr 4bakr bak4re ba5kri bak3ro bak3sm bak5sp bak5s6ti bak5str bak7t6 bak1v ba3la bal7ak ba4les ba4li. bal3j bal4lag bal4lan bal4led bal4leg bal4lei bal4lev bal4lig bal6lov ball5s6 bal6læ ba1lo bal5t ba1lu ban4a ba4na. ba5nan ba5nar ban9da ba4nel ba4nes ban6kap ban4kor ban2k3u bantu5 ba3re baret5t ba3ri bari6e7n bark5s bar5skr bar5tr ba5ru ba5sen ba4seru ba4set ba3si ba2s1k bas6sak bas4san bassi4 bassis5 bas4so bas4st bass6tr ba2st ba7s8u ba1ta 2ba1tr bat6ti bau9la 6bav ba5z 2b1b b3ba b6bak b4b5arb b6base b3be. b4beb b4beda b2bef b4beg b6beh b2bei bb3eie bb5eig b2bek b6belo bbel6t3 bbelte4 b4bem b9bene. b2beo b3ber b4b5erf b4bes6 b7beska b6beta b6beten b4beti b6beto b6betr b2bev b8b1h b3bi bbi9e8 b4b5inn b2b1l bb7len bb5op bb5rek bbu9ra bb5ut. bb5ute b2by5 bby3e bby1k b4bø b6bÃ¥ 2bc 2b1d b2dek b7den b4dep 1be be2au be4bo. bebo5er. bebo9k be4da. be9dar be3der bedi9e8 be4dre bed5red bed2s1 bedy9ra be1dÃ¥3 2bee be5ed be6ef b4ef befa5re be3g bega7 be4ga. be4gi. be4gn be5go be4g5re. be4g5rene. be4gÃ¥. be6ha. beha7g behers7 4behu behÃ¥4r be6i be5ke be2k3i bek6kel bekke5r bek4kes bek6l 4beks be4l3ab bela9ga bel5eie bel5eig bel5ein be4lek beli9na beli9v belle5sa bel5let bel5læ be2l1o bel5s4p bel7s4t bel4tag bel4te5s bel4t3ø bel3u bel5v belæ5re belÃ¥7ne be4na be5nat be3ne 4bened be4nest ben5g be5ni ben5s4i ben5skj ben5sp ben5te 2bep b4er 4berai be7ras ber5d be4rep 8berest bere5te ber4ga ber4g5ende ber5ges berg3j berg3l berg3o ber4g3Ã¥ be5rib beri5ke be7ris ber6kl ber5na ber5ne be1ro be4ro. bero9a berri6 ber5te ber5ti be1ru beru9sa berø5v b4e1s2 5bes. be4se. be5s4i 4besik 6besj bes5ke bes6k5n be3sn bes7ne 4be3so beso9v be5sp bes3s bes6s7a6 bes6sel bes7si bes7tens bes6tes be5stes. be6sÃ¥ 3b4et 5bet. be4tab be3te 4betei be7tes beto5ne be6trar be6tre. be4t3ri 4betu be4ty. beva5re be6ve. bevi5se. be9vo bev9r 6beÃ¥ beÃ¥n9 7bé 2b5f 6bg 2bh bhu1 1b4i bi5ak bi6bla bi5ce bi1d bi5dee bi8dé bid6r bidu8 bi5el bi6ele bi5erv 4bifa bi5g bi3ka bik1k bik6ki bi5k4l 4bi7kr bi6la. bi4l5ap bi4las bi4lau bi3let bilet5r bi4lin bil3j bil5la billa8ga bil5leg bil5m6 bil1o 6bi5m bi4nans bingo5 bin4gol bi7no 4b5int bio7 bi3o8m3s4 4bip bi4ri5 bi7ris. bir4ken bi1ro bis2a bi3se b5ish bis6hi bis7ke bi5s4la 8bisn bi4sp bis4p5i bis5se bi1st bi5s6ta bis4til bis5tru bi3s4v bi6ta. bi4te. bi5tes bi8ti. bi4tre bi6tri bjar3 bjek4t5o bjø6r 2b1k4 b5kh bl2 1b2lad blad3a bla6f3 bla8ga blak5r bla5me blan9da b4lank blan5ke b4lant b1lar b8larar b3lat bla4u b4lef blei8e5 blei5er b4lek ble4k3a ble5kes ble4mo b3ler ble7r6a b6lesa blesse9ne b4lest 2blet bli9ke bli5ma bli5me blings6i blis3s b5lj blja4 blo6dr 1blok blom5m b6lu blues3 blu9sa bly7gl blæ5re. blÃ¥5n blÃ¥3r blÃ¥7sa blÃ¥5se. 6b5m 6bn b3ne. b3ner b7nes. b7ni b2o boa5s4 bobba6ne. bob5by. bob9bye bo2bl bob7la bobs4 bo3de bo6din bo6d7r bo2dø 4boef 1boen bo2er bo4et. 2bo3f6 bo4gel bogn7 bog1s 2boh 2boj 3bok. bo2ka bo6kel bo4k3et bok1i bok1k bo6kop bo6k5ri 5bok1s boksi8da boks4p bo2ku bok3ve b4ol bol5eie bo7li boli7n bo7lo. bo5loi bo4l5o4r bol5s4p bolst6 1bomb bom4bel bom5m bo6mo bom1s4 bo5nap bon5ato 4bonn bon5ne bons4 bo5nus boo6 boom1 bo1p4 bo1ra bo4ra. bo6re. bo7rel bo3ren bo3ret bo4rete bo6ri. bo3ro bor6t7ef bor6tei bor4tes bor6t7et bor4ti bort5s6 b4o1s bose8te. 3boska bo2sl bos5se bo4sto 1bot bo4ta. bo4tak bo4tal bo6te. 2bo7to 4bot6r 4bot4v bou3c 4bo1v bow4e bo6y 4bp b7pl br8 1b4ra bra5ka brak5s bra5se bred5sp brei5e brei3s bret7te b6rev brevi9er. bre4vin bri5a b4rio b2ro bro9ar bro1s4 3b4ru bru5ke bruk4s3 bru4na bru7na. bru5nes bru5pl bru7ren bru5sa brus4l 1bry bryn4s 3b4rø brø5de brÃ¥e6 brÃ¥9ne brÃ¥5te. 6b1s bs4e b5s6e6a5m4 b4s5el b5s6i bsk4 b4sli b2sm b2s1of bs2t6 b6st7f b7s6v bsÃ¥5 6b5t b7t6s 1b2u bu4ar 4bub bu3da bud5d6h bu3de 4bu3em bu4en. bu9ene bu4er. bue5s bu1i bu6is 6buki buk3l bu5la bu4le. bul3le bun7de bund4s3 bun8ge. bunk3r bunnla8ga bunn3s 2bu3o bu1p bu6ra. bu4re. 4bu3ro bu3ru bus6e bu4se. bus4h5e busk7ø bus6sek bus4sel bus6sent bus6set busse6te. bus6sj bus4sn bus4sp bus4st buss3v bu1st 4bustr 4busy bu1ta bu4tal bu7tem b5utg 4butst bu2tu bu1tø 4buv 6buø 2b1v 4b5w 1by bya2 by5al by9are by7dep by4en. by7ene by5ens by2er by3ers by4ge. byg3l 3byi by1lo 4bylø by5n by1re by6re. by1s 5bys. 4byse 6bysi 6bysp byst4 by4ste 4bysv by5tar by4te. by1tr 2by5v bæ5rar bæ6rar. bæ4re. bæ5ren bæ5rer. bæ5rere bæ5res. bæ5ret. bæ3ri bæ4r5is bæ3ru 1b4ø bø4e bø7ens bøf3 bø4kes bø6la. bø6le. bø2n bøne5 bøn6nes bøn7o bø6re. bør4s5k bør4sp børs3t børs8ta. børs3v bø5ta bø1v bøy7ar bøye5 bøy4ene bøy4es bøy4et 1b4Ã¥ bÃ¥4de bÃ¥9des bÃ¥6e bÃ¥7ene bÃ¥4la bÃ¥1re bÃ¥6reg bÃ¥4rep bÃ¥4s3te bÃ¥6s7ti bÃ¥6t5j bÃ¥6to bÃ¥t5r 1c4a cab4 ca4e ca5le ca5me ca5mo ca4pe. cap1r ca6pris ca3ra car5n ca5ro car4te. ca2sa3 ca6set cash5 ca5s4til cas5to ca1t cath5 ca3th6r ca4to. 6cb 4cc c1ci c2d cde6 cebo9 ce3d ce1i 1cel ce5le cel4les cel4lev 3cen. ce4ned ce4nee ce4nem ce4ne5s4 ce5nes. ce4net ce4nev 5cens cen4ti9m cen6to ce5o4 1cer cerba6ne. ce3re ce1ro5 cer3t 6ceru cest3o 2ch. cha5le 3cham4 cha7ne cha5t 2chb 4chei chel5s che7te chi4li 4ch3h 6ch5m 4chn ch5ne chom4 cho5s6 2chp 8chs 6cht ch1v ci1c 3cid ci2e cie8n cie2s5 cil5l ci3ne ci5ta 2c6k ck5ar. ck5art ck1en ck3er. ck4ere ck5e4t3 ck5et. cke8y5 ck1i ck1k ck1o2 ck7r ck5s2 cku6 ckup3 ck9ut c2l clai4r5 cly4 c1m cmini4 coa6 coat5 co6bid 1co4c 2cod 6cof 6cog co4la. co4lab co4lak co4la5r co2m co5ma com5t co6n5os con5s con7t co2o co7pa 2cor co3r6a co1re co4so 4cost co4ver c1pr cr8 cras4h crack4 cre2e creen7 cros2 5cru 4c4s 4c1t c6ta cty5 1cu 4cu. cu5la cu5le cu5lu cup1l cu6po cup1r cup7s cu4pÃ¥ 2cur cures4 cu6t cy2a cy6p7 cys3 c6z cæ5 1da 4daa 4daber 8dabo d5abs 6d5adel d4adg dadø4 dadør5 4daef 2daf 5dagb da4gev 4dagj dag7l da6go. da4g1r da6gun 4dagÃ¥ 4dahe 5dahl da1i 2daka 4dako 4dakr dak4se. 4daktig da5kv 7d6al5j da1la da8la. da4las 6dalau dalbu8er 4d5ald da4les dal6so da1lu da4ma. da4man da3mas da4me. da3men 4d7ameri dame3s dame5t6 da3mo dam7pe 5dan. 6danal d8ane 9dani 2d1anl 3dann dan5neb 4danno 7dano d4ans. dan3sa dan4sel dan9s8kan 4dansv 9dant. 9danti 8dany 2da3o 2dap da3pe d3app 3dar. 5dara 2darb dard3s4 4darea da4res d5arn da2ro dar5os. dar5s6 dar4ta. dar6va. dar8ve. 4dasei 2dash 6dasi 4dasjef da4sk da9sko 6dast6r 6dasu data5 4da4tal data6le. datali8na da4tek 7d6ato da3toa 4da5tr dats6j d6aud daud7s8 dau5go 2daut 2dav d5avb d5avis d1avl d3av5s 4daø 2d1b2 dba8le. dba4ne. dbe5s6t dbe8ta. dbe6te. dbling7 dbo6en. d6byf 4d1c 6d1d d2dad ddag4 d4dak d4d5arm dd3eie d2dek dde4lap dde4l5o ddel5sv d4demi dder5s d4desu ddie8 d7dom d8d7orm d4d1re dd5run ddsa4 dd4sala dd4skap dds5tab dd3s4te dds5tr d2dyb ddø2 ddør3 d4d5øy dd1Ã¥r 1de 2dea deak3 de2al de7b 6debar 4debas 3debat 2debe 4debl 2debo 5debon. 2debr 3debut 2deby 4debø de1c 5ded. 2deda 6d7edd 7dede. 4dedek 4dedel 4dedia 2ded4r 2dedy 2dee 5dee. de7er 2def d5eff d5eft 2deg 6d6e5ge 5degg de3gl 2deh 5dehavs 2dei dei4d dei4ene 3deig. dei8ge. 5deigen dei4g5r 5deik d2e1in 3deir de5is. de3ist 7de7it 2dej 8deka 2deke 2deki 2dekj 5dekk dekk3a dek4kan dek4kel dekk8s dekk7s6t de6k6kv dek2l 6dekly 5dekni de5ko 4dekod 4dekon 4dekos 2de5k6r dek6st deks7ti deks6tr 5dekt 2deku 4dekv 4dekø 4dekÃ¥ d4el. 6delad 6delage de6lans 4delau 4deled de4lef 4delei del5ei4d del5eie 4de4lek de5lele 4d5elem de4lest 6d3e4lev 4delid 6delinj del5int 4delis 4deliv del4lap del6lei del4lek del6lerf del6lig d4els del4s5at del3se delses7 del4si del4spo d4elt del6tala del3te del7tr 6deluk de8l7ur del5ve 4dely 4delæ 2delø delø6pa. 6deløy 2delÃ¥ 2de5ma d3emb 2deme demie4 4demj dem8na. de4mo. de4mog 5demok 4demol demo5no 5demos dem5pe 3demr 5dems 4demu 2demø 2demÃ¥ d2en. 4dena de7nak 6d5en6den. 8d7en6dene 6d5en6der. de5neb 4dened 6denel 4denem 4denet 4denev 6d5engen. 4deni 4deno de4nom d2ens den4sin den4s3t8 den6t5ant 4dentif 4dentit dent3o den4tr dent5s8 den6t5u6 den6tÃ¥ 4denu 2denæ 2denø de2ob de4og 2deol 4deopp 4de3or de4ove 4depak 4depap 2depe 4depi 4depl 6depu d2er der8am de6rant de4rare de4rark 5de7rast dera5te der3av der5d 4derea 4dered de4r5edd de4ref 4dereg 4derep 6deresi der5est 8derest. 6dereste 4derett de4rif de4ril 5derin de4rinæ der5k der5ne de5rob 4derog 4deros de5rose dero8se. 6de7rot dero4v der5s6n der4sp der3t der5un de5rup 6derus der3v 4derør 2derÃ¥ de1s 3d6es. 2desa 4desc 2dese de5seg des5ers 4desho de8s9hop desi4s5t 2desj 2desk 9desk. 5deske 4des4le 2deso 2desp 7despl 6de5s6pr d4es2t 5dest. 5deste. des6tem de5sti 4destj 4desto 4destrÃ¥ 4destu 6desty de4s3tÃ¥ 6desup des6v 2desy desÃ¥7 d2et. 2deta deta8ka. de3te 4detea 6deteg 6detekk 4detel 4deten 4detep detes7 2deti 2detj 4deto 4det6r 2detu 4dety 4detø 6detÃ¥ 2deu d1eur 5deus 2dev deva8ne. devi9er. 3devik d7ev8n de5vu 2dey 4deø 2deÃ¥ 2d1f dfø6rarar 2d1g4 d4gel d5gi dgi6n dgjø6re7n6h d3go 2d1h dhav4s dheim2s5 dhu9ga dhø4r 1di di6ak di5alg dia5li di1ar dia3re dia1s dia7sp dias6t diats4 2diav 2dib did5ri di7ell di5en. 4di5end di1er dies4 3diff diffe5 3diful 4difø di8g9and di4gat di3ge4s5 dig7g 2digj dig8la. dig6le. 4digren dig5ret 4digru di2gu 4dih 4dii 2dij di5ka di8ka. di5kem 2dikj 6dikn 2diko 2di1kr dik7v 2dili dil4les di5ma 6dimed 6d5i6mel 4dimes 4dimet 2dimo 4d5imp 4dimÃ¥ di5na 2d1ind di7nen 4d5ingk din3gr ding8s5en ding6s5er din8g9Ã¥ dini4 di4ni. 4d1inn 2dino 2dinv 4dinø 3diog 7diol dio3na dio4no di3ori dio5tr 2dip 3dipl dippe7 4dired 4direg 4d5irs 5dis. di6sc di6sed dis4i disie5ne d6isk dis6kam di4skj dis6k7l di4sk6o dis5ko. dis7koe dis7ku dis5s dis1t dis7tik di5sv dis4vi 2disy di4tal di7te 4ditek dit4tet ditt3r di9us. di4va. 4dival di7van 4divek div6i 2divu 2diøk d1ja d1je. d1jen djer5ve d7jes. dje9ve d6jingan d6jingar d1jo dju8la. 5djup 6djupar d4jø. 2d5k2 dka8ra. dki6 d5kj dko2r 8d7out dku4le. 4d1l4 d3la dla4ga dland7as dla6te. dla4v d4ledi d2lef d4lega d7legar dle3ge dlei7er. d5leka d2lel dlem4st d4lenet d4leru dleva8ne. d5li dli4f dli5ke d5lo dly8se. dlø8pa. d5lÃ¥ dlÃ¥9re 2d1m 4d1t dme6la. dmo4e dmors6 dmo8s dmø4re. dmÃ¥6la. 8d1n dn6a dno8de. 1do 2doav do2b3 4doba do3be 8do9b8lan 2dobÃ¥ do4da dodø4 dodør5 4doeu 2dof d3off d5ofr dog6med dog4me5s 2dogr 6do5i do3ki 4dokine dok6kan 4dokn do5le 4doled doli8ne. 4dolinj 6dolis 4dolj dol5l4 4domes do5mis 4doml do4mo dom7p 2domr dom8sa dom8s5e6 dom6s5i dom4s3k doms3l domsla6ga dom4so do4mu do4na. don5g 4donke 6d7on6kl don5st don5t4 2dop do4pa. do4pe. 3dopi d6o1ra do5ran 2d1ord dor4da do1re dor6gest d5orie dorm8 do3ro 5dorp 3dorr dors6 dors7ke do1ru do7rye 5dos. do4se. do4set do1sk 6do7s6l 2doso 4dosp 2dost do3str 4dotr 2dov do5va do5ven do4ver dov9n 4doø 2d7p8 5dq dr4 d1ra. d5raa d9rad. d5rade 1d6rag dra5gen dra6kes 3drakt 1dram 5d6rang 3drap. 5drapa 3drape d3rapp d1rar d6rarar d3rarb dra6ret d5rarm d5rarr d9ra8sa 9dratt drau7ma 2dre. 5dreass 2dred d5rede. d5reder dre7d6r d5reds 9dreg. 9drege. d3regj 3dreie drei5en drei5er. drei7ern d5rej d6reka d7rekk 3drekt 4drel d7relig 4drem d1ren 4drer d3ret. d3retn d6retr d3rets drett4 dretts5 5drevet 3drevn 4dria 3d2rif 2d1rig d2rik d3rik. dri8ka. d3rike 3drikk drik7s6 d9rikt 5d4ril 2d1rin dring6 drit7tr drit5u dritun8ge. 1d2riv 3dronn dro6pa. drop5s 2d1ror 3d4ros dro8ta. 2d1rov dro5va dro3ve 2dru 3druk d1rul d1rum d3rup 6d5rut 2dryk drykk4 d2rys d4røm drø6re. d7røt drø3ve 5d8røye d1rÃ¥d drÃ¥4de. 3drÃ¥p 8ds d5sa. d3sabl ds1ad ds3a6del d1sag dsa9ga dsak6se. d1sal dsa8la. d4s3alf d6sall d9same ds1a6n ds5ane ds1a2r d2sas d4satf dsbø6n d5se. dse4d ds1ef d2s1ei ds7eks ds3ela dsel4s5a dse2m ds3emi d6s7enda dsen8de. d4s3eng d5sens ds3e2p d7ser. d6serf ds5erk d4s5e4sk ds5e4st ds3eta dse4te. d5setj ds1ev d2s1i d3s2id d7sida ds5ident d4s5ide4o3 ds3idr d7sil ds5ind d3sir ds3i4s dsi4ve d4s3jen d2sjo ds7jor ds6ju ds5kab d4s3kan ds3kar d4skat d1skj ds5kjen d6s5kjøt ds1ko d5skot ds3kro d4s1kv ds1l ds5la. ds5lan ds6lem dsli6k ds6lo. d9slott. ds4luk ds3ma d6smo dsmÃ¥6la ds3ne ds5no ds1o2 d7somst d4s5os d7sot ds1p d3spek d3spel ds2pi d9s8py 6d7run ds5s4 dst4 ds3tak d4s3tal d3s8tar d5s4tat ds6tau dss8t d8s9te. ds9teik dste6ma. d6s5temp ds5teo d3s4tig d3s4tik d5stilli ds1tj d3stor d3stri dstu8na d5stund ds3tv dsty8e ds3tyv ds1u2 dsure6 d5s6us d6s7usk ds1v dsva8ne. d3s4vet dsvi6ka. d5syk d3syn dsy6na ds1yt d2s1ø d6sør ds1Ã¥6 6dt. d3t4a dta6la. dta4le. dta4s d3tem dte4ma. dt4eng dterle7 dte4se. d5t4et d4tg d3ti dt6j d7to d5t4rag d6troc d5t4rø d6t1s2 d3t2va dtø8 dt6øk dtørs3 dtÃ¥4 1du dub5b dub5l du1c du9ene du9ens due5s 4dufo 3dug 4dugu duit6 du6ka. dul5l 4duly dums2 du4na. dun7de 4d5uni dun5s6 dun7sta 7duo d3upk dupp6e5 3dur du6ra. du6re. du6rei dur8ta du1s du5s4a du9sem du5s2i du2sk du2sl du2sp duss4 dust4 du2sv 2d1ut d6utt du6va. du4ve. 2d1v 5d8va6le dva4ne. dve6s dve6va dve8ve. d7vo dvo8r 2d1w 1dy dy9a 2dyba 2dyf dy5ke. 4dykl 4dykø 2dyl dy4na. dy4ne. 4dynk dynk6s5 dy3pes dy2p3r dy4ra. dy5reba dyrle8ge. dyr5s4 dy2r5u dyr5ø 7dys. dy8sa 4dyse dy6se. dys7s dyst7r dy2t d5z 8dz. 1dæ dærs4 1dø 4dø. død2s1 dø3gr 2døk d5økn dø4ma dø4pe. dø4pen d2ør dør3i 2døs d6øs. dø2st3 døt3 dø9va dø3vel dø1vi 6døvl døv8le. 2døy. døya8 døy8g 4d9øys 1dÃ¥d dÃ¥d6s5 dÃ¥8na. 1dÃ¥p dÃ¥8ra. dÃ¥5ri d3Ã¥s. dÃ¥8sa. d8Ã¥se dÃ¥7sem d7Ã¥t. 2ea e1a2b e1ad ea2d1i eadli5 e1af ea2gu e1a2k e4akr 6eakt eak6se. e1al. e5alle eal8le. ea4lov e3alte ea2lø ea4m1 e3aman e5amn e1an e2an. ea4ne. e6a5net e4ani ean7n ea4nor e6ans. ean3sl e1ap ea4pe e1ar ea2re ea5rer ea5ret ea5r4i ear6ka. ear8ma. ear4ta. ea2s1i e3asp e1ass ea7ta e5a4tel eate6ren ea4t3et eat8h eat6le e8ato ea5tri e1att e1au eau6ga eau6ge. eau8ra. eau6re eau5s eau8sa eau8se. e1av eav8la eav8le. 2e1b2 eba3d eba4ne. eb3be eb4be. ebe4d3e6 ebei7er. eben6s5k ebers6 ebesku5 ebet5s eb3h e5b6lo eblæ5 ebob3 ebo4da ebo5ers. ebo6kr ebo6la eb5s ebu4e5re ebu5ern ebu6et eby4ta ebø6n 2ec e4ca ec4c e1ce e5cha e3com e2dad ed5ad. e5d4ag edag4s5 e2dar ed3ark ed3arv ed7dela ed4dyr e3de. ed5e6ge ed5eg4n ed3eie e4deks ede8le. e4denf e8d5eng eden5t e6depr ede4ra e4derø e4desk ede4sl ede4sm e4desn ede4s5p ede4ta e6d5ett e8dé e5d6ia edi6a5ne edi4ene edi6gj ed4is edi3si edi4s5k 3e4dit e7div edle6ge. ed8ob edok8se. e4dol ed1op ed1ov e1dr ed2ra ed3reg e5drev. ed3rom e6d5ryg ed7ski ed3skr ed3s4la ed2sm ed5ta ed3te edt6r edu8a ed7va edvi6s e3dyk ed5ynk edyr6ke. edæ4 e3d6ø1r e3e2 eea7 ee3b e2ed ee3di eeg4ga eeg6n ee5gÃ¥ eei4d eei7ni eek1e eek4te ee3la e5e4li eem6na ee4n e5enb een3in e5en6k een5t e3epl ee8ra eer3en ee3ri e4er3l eer4me. e6e5sh ees8ka ees6ke. e3e6t eev4ne. e1f 4efa efal6s7 e4fana ef3ank e4fark efar6s5 ef7ea ef5ef ef3fe 3effek ef3fo 4efi e4f3id e5fig e5f4ilm efi7ren efisken8 4e3fj 2efl efle6ge. eflÃ¥3 e2fn efo8bi. efo4no 8efr ef2sj ef2sk ef2sp ef6str e8ft ef2ta efta5r eft5ei ef5ter eftle8ge. eft1s4 6efu efyrs5 e3fæ 4efø efø5le eføy9 2ega e3gaf e6gala e7gam ega4ve. egde8l eg7des e5gedo egei9e ege4let e2gem e3geme egen5s 3e4gensk e7geom e2ges2 ege5sp eg9gen egg3l eg8g9ut egi3an egie2 e4giko egis4p e3g6lad e3g4led eg7lesa e5g4lit eg7ly e5g6lø 6egm e9g8nag eg4nem eg6no e7gnÃ¥ 4ego e4go. 7egoi eg2r egrans5 e7g8rø egs4am egs4ki egs4ta e1gø egøy6e5ne 4egÃ¥ egÃ¥4va egÃ¥4ve. e1h ehea4 ehei9e eh5er. e4h5ere eh7ern eher4s ehog5 ehov2 ehy6re. ehø8va e6hÃ¥. 2e8ia e9iakt ei9a4n ei7ar. ei4c e8id. ei3d8ar e3idea e3ideo ei9der ei5det 5eidf 5eidg e6i2do e1idr ei3d4u e8i7e eie5d ei8eg eie2n ei9en. eier3a ei4ert ei6es ei1fl 8eig. ei3ga eig6e ei6gev ei5gi ei6gra ei6gu 4eii ei5kaa ei3ke. ei6kee ei6keh ei4kel ei6keri ei4ket ei1kr eiks3a eik6se eik4so 4eil ei9led eil5egg e3illu ei3lo. ei9loa eil5op ei5los eil5s6 4eim ei7ma. e5imag ei4m5a6l ei4med ei6mei ei7men ei4me5s ei4met eim9ett ei6mo 8e1imp ei4n3al ei4nans ein5ant ei7nare ein1d 4e3indu ei5ned ei7nel ein9f einga6 ein3gr e4ini 6ei6nit ein3k4 e5inkar 4e1inn ei4nom ei4nov einsi9d ein4s3l eins6o e4inspi ein7sto ei2n1u4 ei3num 6einv ei2n3ø ei5or e4ip ei9pa ei3pe eip5s 2eir eir9ak ei7ren ei3ri eir5s 2eis ei5sar e4ise ei3se. eise5i ei3sen eiseri9e8 ei3s4ha ei4sil e3isk. e3iske ei6s3kj eis3ko ei3s4pe ei4spi ei4tek ei4tera ei4tere ei2to eit7ta ei6t7ut ei3tve ei6t7ø ei4vak eiva9r ei9ven ei3vi eiv5s4 e1j ejo8en. eju6la 4e1ka ekali7 ekalie6 ekam6s5 eka6ra. e5kavr ek3eie e4kero e4kes ek5e4ta e6key e5k6hov e1ki e4ki. 4e1kj 2ekk ekke7le ekk9ist ek1kj ek4kjø ekk3l ek5kok ekko5v ek6ku ek4ky e1kl e8klane ek6leg e6klen ek5let e3klu e5klæ ek2lø 4ekn ek9na. e3knek ekne7s ekni7p ek3no e3k4nok e1knu e5knø e1k2o e4ko. eko7le ekords8 eko6te. e4kov 2e1kr e3k2ra e3k4red e7kref e3kren e4k5ret. e5krets e5k4rev e3k2ri e3kro ek4ry e3k4rÃ¥ ek5rÃ¥d ek3s4ak ek4sal 3eksam ek3sel 3eksem ek4sig eks1k ek4ska ek8sn 1eksp eks5pe ek4sta ek7s6tel ek3sti ek4stil ekst5o ekst3Ã¥ ek4sæ ek6t7arm ekt3av ek3tef 5ek3tep ek5tes ek6test ek4tid ekti5m ek5tiv 4ekto ektori6a ek4t3ra e6k6t7ro e1ku eku4le. e1k2v ek4val ek6var. 6ekve e4k5ve4d e4k5vik ek4vin ek6vis 3ekviv 6e5kw e5ky eky6te. e3kæ e1kø e1kÃ¥ ekÃ¥6pa 2e1la ela4ga el3agg e2l1ak e5lake elak8se. el7akt el1al e8lame el3anl e6l5arg e4larr el7arti e5las. e5lase ela4te. el5ato el5avl el3avs elbo8da el4ch el5do eld4rer eld5sle 4eled e4ledi e4ledr e3lee eleg5d ele7ger eleg8na e7leir e7leis e5leke e2lel e2lem e5lem. e5lemat 5elemen e5lemet e3lemm e3lemp 4elen ele9na e4l5enh elens3 e4lentu e5lepa 2e3ler ele8ra. e7leret e6l7erg ele3sk e6lesku e4leta e4lete 5e8lev. ele4vak 6e5levn el5fi el9ga. el5gele el5gi elg3s8 2eli e9lie eli5e6rer e4lif e4li5g4r e4l3ind elin5es e4linsp eli6o5s eli8ta. eli4tet 6e5litt eli6v7en el1j elk2 el6k5al el4ke5s el4kete el3kn el5la. el5l6ar el4led el4leg ell5eie el5ler el3le4s el6lete elli7ga. el4lisj ell7sa ell5sk ell5s4l ell5sp ell5sv el5l4ur el4læ el5m4o el5mu el5n 2e1lo elo6ka. e2lom e3lomm el3omn el3oms el1op e2l1or e3lort el3ost el4ot elo7vers. e4l5ovn el4pet el5ph elro4s el4sei els5eie el3sen el3se6s3 el3si el4sje el4skal 7elskand els5kare els1l el6sno el4spe el3spr els4ten el4sti el4stj el2sø els5øk el5s6Ã¥pe elta8le. el5te. el6teg el4tero el4t5ett el4t3op el4tro elt5rop el2tu e3lua e1lu2e e4lu4he e3luk e7lum el3ung elun6ge. e7lup elu8pe. e1lur e7lus el3utr elv3an el4ve3d el4vei el6vere elv1r 5elvs. elvæ6re. 4e1ly ely8et. el5ynd 2elæ elæ5res el9ærv e6l5æt 2elø e4l3ø4r el3øy. el5øya el5øyd e6l3øyn 2e1lÃ¥ el1Ã¥r e5lÃ¥t 4em. e2mad emafo5 ema4ge. e4magr e2mah ema4ke. e4mako em5akti e4maku ema6le. 5e4malj e4mam e4mana ema4ni. e6manu e4mare em7ar8m e4masi e6masku e4mata ema3uk ema5ve 5emball 3embed embe6r5 3embet 6emd 8eme emei9e eme4li e4mend eme5tri eme6trisk em5e4v e4mib emi5ert e4mig 4emin emi5ni emini6st e4minor e6mir emi9sa em7je e5mju em5le emle6s em5me. em5men em8mi. 8emnd em4ne. em4ned em4nem 6emni emo5nol em1op e2m3o4v em4ped em6peri em2p3l emp9lane. em6pli empo3 em4pol em4pos em4ses emse8te. em4s5ju em4s1l em2sn em4sor em1st8 ems4te ems3u emta8la em4til e3mug e2muk e5my emy4k 6emø emø8r e5møy emÃ¥4la emÃ¥9lan emÃ¥l4s em1Ã¥8r 2ena en1ak en3ald ena5li en3all en5and e5nane e6n7ang e4n3ant e5nar. en8are e4narr en3ass enat6s5p ena2v en3avi 4enb enbe9na en4d5and en4dek en3del endelø7se en5demi en3der en4desl end5l 1endr en4d3ro end3st en7dø endø4r endør5e en5egg eneg8ga en3eie en5eks e3nekt 4enem 2enen en5eng 3energ e4nesk 4enev ene7ven eng6a en7gar en4gem en4geri enge6r5u en4gle eng4les eng4r eng3s4e eng5so eng5sp engs4ti en4gut en4g5Ã¥ 4enhj e4ni. e4nierk e2nif e5nir en4kera en4keri 2enna en9nal 2enne en4nem ennes6t enne4s5v enn2i enn2o enn4sj enn5sta enn7ø 2eno en3ok en3og e8n7old en3om. en1op e6n5ord 2enp en4pÃ¥s en4rem en5sab ens5af en6s7a6ker en4sek en4seli 5ensemb en4ses4 ense3u en3si ensi8de. ensi5ert en4sim ensle7g ens6lev en7s6opt en1st en6s7tal ens4ter ens4ti ens7und ens4ve ensvi6 en2sø ens3øk ens7Ã¥ en5ta. en4tap en5te. en4tec en4teg en4tel en3ti en5tie en5tig en5tik en9tima en7timen en4to4r ent5ori ent5rab en4tre ent5ren en4t5rol entropi5 en4t3rÃ¥ en6tul entun6ge. 3entus 2enu e2n7ul e4n5ur 2en3v 4enw e4ny. e2nyb e6nyr e2nys e4nyta 4e7næ enæ8m enø4k e2n3øv en5øyd 4enÃ¥ en1Ã¥s en6Ã¥s. e3o6 eo3a e6obe eo2bl eo5d eod8de. e1og1 eo7gr e2o1i e2ok e3oks e2ol e4o7lo eom1s4 eon8de. eo5ne e7onk eon4kl e3ont eo2p eo3pa eo5pl eopo3 eopp5r eo3pr eor4da eo9re e2ori eo4rid eori5e6 eo4rik eo4ris eo1ro eo1s eos6l e2o1u 2e1p epa3t e6peno epe6p eper5r e3pes epe4st e4peta e6peu 3epid epi6ka. epi7kr 8e7pin 3episo ep2l e8plen ep5lene ep5ler eple9s6 3eplet epo6et 3epoke epp2s ep6sem ep4ses4 ep6s5lu eps1t ep2su e3p2sy ep6tin ept6r 2e1ra e4r3abb era8da. e4rael e2raf er3aft e4ra4g e2r1ak e3rake era5kl eral3u e4rama era6na. e5rande. e5ra7ne er7anli era4no er5ape. er5aper er5a4pin er3apo er5appe e5rar. e4rarg e5rasane e5rasar e4rasje e4rask e6raso e4r3ass er6ast e4ratek era7tor e4ratr er5att e2rau er5auk erau9ken erau8s er3av. er3avh er5avl e3ravn er3avr er3avs 4erb erbi9ti erbo8da erbo8de. er8byl erd2e er4ded erden8s er4dis er1dr erd4ra erd8re erd4skj erds3t erd4sto erdsto8ga er3d4v e1re er3eff er3eft er5e4gen er3eid er3eie ere6ka ere6ke. e4r3eks e6r5ekte e4r3e4le ere4lit e6relli e4r3els e9rem. e5remm er3emn er5enden e4r3eng e4r5enh e3re4o7 er8er. e4r3erf e5reri er3ess ere4t e5ret. e4r3eta eret6h ere5to e7rets 6erett eretts5 e4r3e4va e4r5e4ve e4r3evn erev6ne. e8ré 5erfari 6erfi 4erfr 2erg erg5elv ergi3f er6gli er4go. erg5ret erg5sko erg5sp 2erh erhø6re. e1ri e4riad e4riak erib3b eri3bl e4rice e2rid e5rid. e5ridn eri4kat e6rikn 4eril e7ri6ma. er3ind e3ring 6eringar ering6si er3inn er3ins er3int e5riori e5ris. e4risa eri3se e4riso e6risp e5rist. e7rista e5riste eri5stil e4ri5s6to eri5sø e4rite e4rito er7ivo er7jes. 4erka 8erke erkeri6e er4kesa er4kese er4kete er6k5ett 3erklæ 6erkn 4er5k4o 4erkr erland7as erle6ge. erle7s8t er3me ermo8de. erm1s ermÃ¥6la. 2ern er4nad er4ned er5nede er6nee er4nero er4nest er2no er3nob 2ero e3roa er3obl 5ero4b7r e1roe er3off e1rog er5oks e1rol er3oly e1rom er5omk ero5mo e1ron er5ond erono5 er1op e5rop. e7ro6pa e5ropen er1o2r e5ror. e3rore e7ro4sa 3erosj er3osl ero4ta er1ov 2erp 2err er3ra er4rae er4rel errie9n 6ersa ersa8ka ers6al er3sep 6ersj er1sk er3ska ers4ka. er8skaran er9s6karar ers5kor er1sl ers4la ers6led ers4mi er5sne 6ersp ers5tens ers5ter er7stev er1su 4ersv er4sva ers4ve er1sÃ¥ er4s5Ã¥r 2erta er4tare er4te3s4 er5tia er3tib erti4e er5til erti4mo er4top ert5rol 4erts ert3sa erty6e ertÃ¥7ren 2eru e1rua e5rud3 e1rui er1u2k e3rum e4r3ung er3uni e3rup. er1u8r erure8 e1rus er5ut. e5ruta e3rute er5utn er9uto e4ruts er7u6v ervele8 6ervi ervs3 ervæ6re. e1ry4 e2r3ya erye7ne er1yr erys3 e1ræ e9r4ære 2e1rø er5økn e4røko er1ø4l e4r5øn e5r6ønn erø8ra e6r5ør6s er3øya e4r5øy7e erøy4n e4r5øys. er7øysk 2e1rÃ¥ e9rÃ¥. erÃ¥4da erÃ¥8de. erÃ¥6e erÃ¥4k er5Ã¥ke er3Ã¥4l e2r1Ã¥n er3Ã¥4p er3Ã¥4se erÃ¥6t er7Ã¥ta9 e6rÃ¥v 2es. e1sa es5aa e2sad esag6 es4al esa6la. esa9met e4s3ant es5arab es5arv esbi9e es8ce 2ese es5ege es3ei4d es5eie. es3eig esei8ge. e3sek e4seku esel5s4 e3s2en e5ser. e5sere e5seri e4s3erk ese5s2 e3sest e4sesv ese5tas ese4te. e5setj e9sets e4seu e4sha es6har esh9ar. es4h5er e4shi e1sho e7s8hop esi6ar esi5ert e4sil e5sili es3ill e4s3ins esi6v esi3st es3jor e5skab es6ked es4kee es4ker 8eskil e4sking eskinns5 e1skj e8s9kjer. es4kjæ e6skjøn es5k4n e5sko. es7koa e3skot e1skr e1sku esku5et es3kvi e5sky e6skyr e1skø e7skÃ¥ eskÃ¥7r e1sla es4lek es4let es7let. es7lets es4lit e4s5lok es4løk es4løv es4make e7s4mett es9na. es4ner es2no es4nu es2ny esnæ5re 4e1so eso4b e2s3od e2s1of es3oks e2som e3somm e4s3ori e1s2p espa9ra es3pas es5pe. e4s4ped es5perm es3pl es4p5le es3pol e3spor es3pun es4sed es4see es4seg esse7i es6sendi es4s5enk es4ses es4sete ess5e4va es4s3ja es4sje es4skr ess4let ess3om es4sos es4spa ess5tilt es4stol ess5tor ess9tua ess5tue esstu8en. 4essu es2s1v es6s5Ã¥ e7stad es5tae es7takt es9tala e8stane e5stans es5tant es3tap e5start e3stat es5tato e4s3tau e7stav. est5avs e6s5te. es4teg est5eie est5eig es4tek es4tel e5stell es5tels e4stema e6steme e4stemo es5temp 6esten e7steng es5tenk e8s7ter. e5s6terk esterne8 este3s4 es4test es4tien e3stif es4tig e3stik es4tikk 5e6s5tima es4tis e4stiv e1stj es3t6on e4stog e3stok e3s4tol es6tone est5ord est5ori es5trak est5rene est5rer est9ré es5trib e5stryk e3strÃ¥ e1stu e5stud es5tus 8estÃ¥ estÃ¥5ren estÃ¥6s est7Ã¥se e1su es5ut esva5re e5s4vek es5vig e5svikt es3vis 2e3sy esy4na e4s5yt es5øvi es7øye e1ta 4eta. e4tae e2taf etafo7 e3tak e6tako e3tal eta3la eta4la. et5ald 8e7tank eta5le 4e5tall etal6list etall5s6 etal4s7 et3amb e6tand eta4nen et4ap e9tapa e5tape 3etappe et8ar. et8are et5art 5e6tasjes 1e2tat 4e5tati e5tato e3tatt et5aun e8tax et1c 2ete e4teie e4teka ete4ma. e5teran ete5ru etes5i e4tesl et8et e5tet. e7tets 4eté 4eth et6her 2eti e3tit 2e1tj e7tjer et3jø 6etl et5m 2etn et6nev etning4 etnings7 2e1to eto4er et5oppd et3ord e4tot 2e1tr e5t4rad e7t6rak 8etre e5tree e5tref e4t3ris e9tru. et5rum ets1 et9sar. et4sku etsku8le. ets7l etsla8ga etsnæ4 et2sø et4tak et4tal etta6le. ett5alt et4tank et8tap et5te. et6tei et6telt et4tenk et4tenn ettera4 etter5at et5t6erf 5etterk 5etterr ett5esk 2etti et6tia ettian8 et4til et2tj etto5a et4tr ett3re ett4ski ett4sti et3tug et4tur ettvi5se et2ty 2e1tu 5e6tui etu6na et3uni 2e3t2v e4t3va e5t6vang 2e1ty 4etz 4etæ etæ3ra e1tø e1tÃ¥ etÃ¥5re e1u eu4a e8uf eug8la eu4h eu4ka. eu2ke eu5kem eu7kr eu2l eum2 e3u2n eun4ge. eu5nu e4up5a e6upk e2ur eu6rat eure4 2euri e3urn eur8na eur6ne. euro1 3europ e3urt e2us eu4si eu1ta e3uts eut7t e4u3z e1v evad6r ev5akti eva4la evann4s e4v5anta e4varar e4varb e4v5ark e6vatf 6eved eve5d6a eve5del 6eveg e5vegg evei6s5 6evenn even5s 5eventy e4veny e9ver. e5ves. e5veta e5vev. evi4e evi5ere evi5ert evin4s evi4se. evis3t e2vj ev2na evn5a4k ev7nu ev7oms e4vre e7vri. ev9rÃ¥ ev1s evs8v evta8la evti4 e2v3un e5vyrk e2vø ew3a ey1 eybal4 e1y2d ey5k e1yng e1yr eyr6ka eyr4ke. e1y2t e7yti ey3tr e3zi e1zu e1æ2 eær3 e1ø4 eør6na. eøy4 eøy9an eøy7en e1Ã¥6 eÃ¥k4 eÃ¥n8da eÃ¥t8t é1a é1b é1d édø4r5 é5e8 é1f é1g é1h é1i é1k é1l é1m é4n5e é5n6et é5o é1p é1r é2rj é1s ése2 é1t é1v éva8la évo8re. é5Ã¥ è1r è2red è2ref è2rek4 è4rener è2rep ère3s2 è4rest è2rev è6ves ê8lan ê6ra ê1re êr7o êr9sl 1fa 4fabo fac8 fa1ci fa3de fa6de. 6fadm fa4f fa2g1a fa5ger fage4t fag5eti fa2gi fa2go fa2gr 6f5agro fag3sk fa2g1u fai5 2fakr fakse9t fakta3 fa1ku 5fal fa1la fal6kes fal4k3l fal6lers fal5m fa4lo fal4sk 4fa5mo fa4na. 7fan5d 5fane fanfa5 fang4s fangst7ev 4fank 2f3anl fan5s6 fan7tesk fan5ti fan3to fan4try 6fanu 2fap 3far 9f8ar. fa3ra fa4ra. fa4re. fa4res far4gel far4ges fa4rit far5n far6skj fart2 far4vel 4fary fa7sa fa4sel fa4seru 2fasl fast3r fa3tal fa4te. fa6t7ers fa1to fat2r fa3t6re fav5ne fa1vø 2f1b fba4ne. 2f1d 1fe 3fe. 2fe1a4 5fea. fe5a6l 2fec fe4da 3fede fe2dr fe3dra fe3d4ri fe7ene 2fef 2fe3g 2fehj fei9en fei9er feig4de fei5ge feil5es fei4li 9fein. 7feine fei7ter 8fej 2fek4a 4fekl 2feko 4fekr fek6tes fekt5ev fe5lag fe5len fe5li feli7e6 6felik felles5 fel7læ 2felo fel5ok fel9ta. fel4tek fel4ti fel4tra fem5ak fem9b fem9ne fe4mo fem5s fem4tid fe2m1ø 5fen. 4fena fend9r 4feni 5fens fentleg5 2feo 2fe3p2 5fer. fe6ral 4feram fe5ras fer6at fer4dam fer5de 4ferea 4fereg fe4rek fer6en fe7rer. fer5ers feri6e feri8e5ne 7fern fer2r fers9kar fers7ke 2fe1ru 3fes. 2fesi 2fe1sk fe2st fes9t6i fes8t3r fes9tum fe6st9ø 4fesy fe6ta. 4fetap fe4te. 4fetea fe5ti 4fetil 2fet6r fett5j fett7s6 4fe4ty 2feu 2fev fe4ven 2feø fe6øy. 2feÃ¥ 1fér 2ff f7fa. f4fab f2f3a2g f4f3ak ffa7l f6fann f4farb ffar6d ffa7re f9fas f6fat f8f9au f2f3av f2feb f2fe3d f2fee f6feh ffel7t8 f2fem f6ferdi ffe4rer f2fes f4feti f4feto ff5eve f2fib f4fice f2fid f9fien f4finn ff5int f2fip ffi5s4 f2fj ff3la ffla8ga ff9leg ff5lek ff3li f2fo ff4ol f2f3re ff1ro ff5sl ff5t4 ffu6r f6f7ut f3fæ f4fø 2f3g2 2f1h 1fi fia7l fia4n 4fiap fi4as1 fibi6en fi9cen fici7 fi7di fie2 fi1er fi7e6re fi4f fi5fi fi5fo fi7fl f6ig figh9t fi7g6r 4fih fi9ka fikali7 fikalie6 fi7ken fik6ka fik6k5n fi5ko fik7sa 4fiku fi4la. fi3li fil6lel fil6leri fil4les fil7leti fil8m7at fi1lo filt6re fil7tres fil4tri 4filæ fin5art fin5d fi4ne. fi5ner fing3r 6finj finn5ei 6finnsa fi4n3o fi9nor 2fins fin6sleg fin6te5s fi2nu 7fio5 fi2r fi3ra fi6ra. fi9rene fi5res. fi3ri fi5s4a 4fisc fi6se. fiser6s fi8si. fi2sk fi6ska fisk5a6d fis7kal fisk5and fis6kart fis6k5end fis4k3l 6fiskol 6fisku fis5ti 6fistu fiti6me. 4fitr fitt5s fi7ty fi4ve. 1fj f1jeg fjel6lend f5jen fjer5ne fje4sk fje2t3 f5ju fjære5 fjæ4res8 2f5k6 f2l2 f5lag. f6lagg fla7ke f4las 2fleg fle6i f4lek f6lel fle6r5u6 fle4sl f4lett f3lev fli7ne flis7t flit9t8 1flo flo9ga flok9s flo2m3 flo6r5o flos6kl f6lu flue3s flus8 fl5ut 3fly fly3s4 f3løn flø5s 1flÃ¥ flÃ¥9sa flÃ¥7se 2f1m fn6 1fo 2fo. fo9ar 6fobs 2foe 4fof foi7la fo8ke. fo6la. fol4dr fo4le. fo5led fol6k5v fol9ler fol5lese folke5s6 fol5li fo4lu 3fon fo4na fond2 fo5ne fo3n4id fo4nin fon1s2 f1op for1a for9drev for7dro for5d6u fo6re. for9ei for7e6n for5ent for5ess for4et for9e8te. for9e8ten for9g6 forh6 fo4r9in fork8 forlo7v forlø9pa forlø9se formo9r for3n foro6 for5p4 for3se forsi9da forsi7de. for3s4m fors8t for9ste fors4v forta8le. for5t6e fortei9e for7t6i forti5da for3t4v for3u4l for7v6 forva9re fo8r5æ4 for5ø4 for9Ã¥ fo9se fosfo5re 4fosj fo4ta fo6te. foto5 fo4tok fo4tom fo6top fo4tor fot3s fots6v fot3t 4f1ov 3fô 2f3p2 fpo6e fr2 1fra fra5l fra4m5e fra6mi framma8ne. franko5 fran7se fra7r8 fra1s fra5se. fra7sep frasi8e fra7sk fras6p fra7v fra7v6r f2re. fred8sel freds5t fre4e3 8freg f4rek fre4m5 fremma8ne. frems4 fre7ne fre6sk fres7ko. fres5koe fre8s9v fri5a6re 6f5ridi fri4erf f1rin fris6ka fri5s4p fri5s6til frite8re. friti8me. 6froc fro7f fronta9le fro8st frus1 fruta6le. f1ryt frø5b f4røk frø7ko frÃ¥9r frÃ¥ve4 2f1s fs2h fsho6 fsi2 f5sjuk f2sl fsle6ge. f4sm f2sn fs7ne8 fs5ov fs2t f2s3tab f6s5tan f4s9v 2ft f1ta f3ta. fta7f ft1ak fta8la. ft3alg f2t1am f4tana f7tane ft3anl f5tar. ft8as4 f6t1av f2tea f3ted f4tee f2teg f2tei f2tek f6tem fte4na ften3d f4t3enh ften5s6v f2teo fte4r5a4 f4terin fter5s f4test f4teta f4t5ett f8t7id ft3ind f2t5i4s ft5l ft7n f2t1o2 f5tog 3fton ft3r fts5alt fts3ei fts5ers fts5erv ft2s1i fts1k ft2skaf ftsla6ga ft5s4lan ftsle6ge. ft3sto ft6s5top ft5s6tri ft5stø fts1u ft2s1ø4 ft5t4 ftta8ka ft1u 1fu fu8ga. fug9le. fug6l7eg fu6le full3 ful9lar fulle6 ful9le. ful4len fullen8de. ful4ler ful7les fullfø7re funn2 funns3 funnsla8ga fu6ra. fu6re. fu9ret fu7ro furu1 fu7sel fu6sk fus6o7 fu4s5t fut4h futu1 4f1v fva8la 1fy fy5la fyr2 fy8ra. fy4r5a4b fy1re fy4re. fyr7k fyr8ke. fyr5t fy3s fysa7 fy8sa. 2fæ1 1fø fø4da. fø4dek fø5den fø4des fø5des. fødsel8s7 fø1f 2føk fø4le. f8ør før6a fø5rar fø4re. fø4res fø7resv fø8rete før6tin 4fÃ¥ fÃ¥7ren fÃ¥7ret fÃ¥5ri fÃ¥r7u fÃ¥7va 1ga 4gaba ga1bo 6gabø ga3da ga4ded 4gadg 2gadm 4g5adr ga4e5k gaffel5 6gaflo g3aft 6gah 6gak g1aks gak8ta. gak8te ga3kv ga1la ga4la. galei5 ga7len gal4les 4galli 5ga7lo ga4ma. 4gamb ga5mer gamme6l5 6gand. 6ganden 9gane gan5g6en gan4g5j 4gan5k 2g1anl 4gann 4gansa 4gansv 4g5antre ga6pa. ga4pe. ga4p5l gap3s2 ga6p7u 9gar. ga6raf ga6r5ak 2garb 5garbr 5garby gar4dek 7gare. 4garea gar5es gares6ke. gar2i 4garki gar3ne gar6ta. g5arte gar8te. g3arti gart5s6la gar4un ga4ryl ga4sc ga5s2i gas8ka. gas8ke. ga6ski 4gasp gas6sel gas5sen gas7ser gass5ett gass3l 5gast gas5te ga5sto ga6s6t6r g2at ga4te. ga5tens gate5s ga3ti ga1to ga3tr gat6tap gau5la. 9gav. 2gavd 6ga4vei ga4veri 4gavg g5avk 2g1avs 2gavt 4gavv 2gaw 4gaø 2g1b gba4ne. g6byk 2g1c 8g1d gd5alt g6d5au gde3a g2deb g3deba g4dedi gde5lo g2dem g4dend g2de5o g4dera g4de5re g6dero g2des g2det g3det. gdevi8sa gd5op gd1or gdy4d g6d1øy 1ge 2ge1a geak8ta geak6te. gea7ren ge4ark 2geb 6geba 4ge5be 3gebri 4gebrÃ¥ 3gebyr ged4 4gedan 6gedel gedia5 9gedom. 7gedoma 9gedome 7gedomme 6gedoms 4gedos 2gedr 2gedy 2gedø 2gee geen8de. 2gef g5eft 3gefæ 2ge1g2 g5egg g3e2gn 2geh gehø8ve. 2gei g1eie g4e1in gei4r3 gei9re gei8s7p gei6st5 3geit geit3a geit3o gei4t3r 2gej 2gek2 ge9kl ge3kn g1eks 6gekt 5gel. ge4lar 4gelau ge6le. ge7lea 4geled 4gelei gelei5er 4gelek 4g3elem ge5len. ge5lens 5gelet gel5ett 3geli 4gelid 6geligg 4gelit 6geliv 2gelo ge5lov 7gels4 gel3se gel5si gelsk7l gel5t4 4gelu ge5lun gel7ve 4gely 2gelæ 2gelø 2gelÃ¥ 5gem. 2gema 9gema. 2gemi 3geml 4g5emn gem6na 2gemo 9gemr 3gems 3gem4s5t ge3mu 2gemy 2gemø 2gemÃ¥ g2en 3gen. ge2n1a ge7n6am gends6t ge3nea 4genem gen5erf ge5nes gene4t gen5eti 4genev gene5ve. gen8ga. gen5gr ge4ni6n 6geniv gen5k genle6ge. gen3n gens5l gen6sun gen5tr 4genum 4geny 4genæ 4genø 2geo 5geog 3geol geome5 geomet4 ge5on 6geop ge9ope 2gep g2er 3ger. ge5ra. ge6ral ge4r5ant ge9ras ger5di 4gered 4geref 4gereg 4gerek ge6rene 4gerep 6gerese 4gerett ger4i5d 4gerik geri8ka 6gerike ge4rim ge4ris ge4rit ge4riv gerle9g g9er8ma. ger3n gerne6 4geroa 4gerom. 4geromm 6gerop. 4gerope ger5p ger4s5af 5gerud ge5rup ger5v gerø6re. ge4r3ø4v 4gerÃ¥ g6es 5ges. 2gesa 2gese 5gesen. 5gesens 4gesi 4ge5sja 4gesje 2ge1sk ge1sl geslu7 ges4lø 4geso 4gespe ge4spr ge5spra 2ge1st 3gest. gesta7b6l 5geste. ges6tekk ges8ti. gest5rid gestri8de. 6gestÃ¥ 2ge9s8u ges5vik. 2gesy 6gesæ 2gesø 2ge1sÃ¥ gesÃ¥5re 5get. ge5tak geta6le. 6getat. 6getate 4ge3te 5gete. 2geti 2getj 2geto 2getr 5get5s 2getu 2gety gety8e 2getø 2geu g2ev 2ge7v8a 2geve 4gevir gevi6sa 4g5evn gev6ne. 4ge5vo 5gevÃ¥ 2gey 4geæ 2geø 2geÃ¥ 1gé 2g1f gfe2l 4g1g g6g5al gg5ask g2g1av g4gedi gg1ei g4gela g4gele g2gem g4genu g6gerei gge4rin g4gerø gges6b gges5l g4gesm g6geso g4gesp g6ges6tio g6ge5sv g2gev ggie6 g4gif g4g5i4m gg4je ggje5s g2gl g7glu g4gn g2g1o2 g3go. g5gos gg3rad ggra6da gg5rat gg8re. gg3red gg7rek gg5s4par ggs7s gg3sto gg4sy gg3sø g7g8ud g4g5u4r 2g1h ghe8n ght5ene gh4to ghæ8 1gi 3gi. 4giak gi1ar 4giarb 6giav 2gib gi8c 2gid gi3de gi1e 4giek 3gien gi9end 5gier 4gifa 2gifo 4gifr 4gifu gi6ga. 2gigr 2gih 2gii 5gika 3gike 4giki 3gikk gi5k4r 4giku gi5le 4gimess gi4metr 4gimi 4gimø 2g1ind 6gingeni gi4nin 2g1inn gin5nu 4gino 4g1ins 4g1int 2ginv 4gi5om 2gi1op gio4r 2gip gip5si 6gira gi3re gi4re. giro3 gi6rob 5gis. 2gisa gi3se 4gisel 4gisen 5gisk 6giska gis8lu gis4lø 4giso 4gisp gi3s4pa gi5s4pr giste6ru gis5ti gist5ra gi5stré gi6st5rer 4gistu 6gisty gi5ta 4gi5te gite4s gi2t9r git5te 2giu gi7va 4gival 4givo 4givu 1gj 8gjag 2g1jak g5jarn 2gjaz gje5f 4g3jeg gjels4 gje2n gjen7n8 gjen1opp3s4 gjen5s8 6gjent 3gjer gje7sk gje4s3p gje8v9ak 2gjob 2g1ju 7g6jut gjø9de 2g3k2 gkly9 gl2a 6gla. gla3de 4glag gla4ga gla8ge. g3land g1lar gl5art glar7v 1g2las 7glase gla6s7k g6lass 6glast g5last. 3glat g5lau gl6e gle6dero 4g3lef gle3ge gleg8ga 6glei glei7er. g2lek g7lek. g5leke g2lel 3glemt 4glen g9len. g9lene. g9lenes g9lens gle9pl 4gler gle4ra glere8de. g9les. gle6se. g4le5sk g6le7sm 4glet g2lev 1g2lid gli4del gli7e6n gli8er glig1 3g2lim gli9me 4glio glitt4 6g7liv 4glj gl7ja gl5l g4lo. g2loa 5glob 5g6lome 1g4lor gloria7 glori6an glorie7ne g4los glo5ste 6glov gls4 8glu 5glug g5luk 6glun gl5ung glun8ge. gl5ut g5lyd 3glyf gly4se. g2løg glø8pa 2gløs gl7øy. 5gløym 2g1m g4mete g4metr g5mé gmini6 gmo4e g3mu gmu8le. gmÃ¥6la g1n gn1ak gna5lem gna4lo g2nav gn1d gndø6 gndør5 gn4e g2nee g4nelo g6nero g6nerø gne1s4 gne8se. g4nesk g4nest g5net g6netr g2nev g4nib g4nid 5g6nik gning4 gnings5 gni4s g6ni5sk gni6st gnist9r gn5k4 g2nom gn5o6p gn7o6v gn5r gn1s gn4skr gn6sm gn4som gns4pr gn4sti gn2s1v gn5t4 gnu5re gnæ6re g2nÃ¥ 1go 7goan go1ar 7goar. 2gob gobe5 go4da go5dal god5ar 2godd go4de. gods9t 2gof go9ga go5ge. go9g8r 6goi 2gok gok4se. go3le gol6fa go5lo gol4va gol6var go4lÃ¥ 2gom g7o6ma 6gometris 7gomm go4n5and go9ne. go7ni go5nok gon9s 2g1op 3g6or4a5 go5ra. go7rae go7ras 2gord gor6da go1re gore8a7 2gorg g4ori gori5e6 g5orv gor8va go5rø gos3p 8g7ost 6goto got6r 4gov go8ve. go8vi. 2g3p6 1gr4 8gr. 4grab 6grac gra5ce 9grads graf5f graf5t6 gra4m5 grand3s4 gran5to grant4r gra9se. gra9set gra6sk gras3t gra5t gra8te. grati4s gra4u gra4v3ak gra7vis gravta8 2grea 4g5reds gre4en 2gref g7refe g4rega greie5n grei6ene 6g3reis 4grek 2g5rel g5rent 4grepi grep4s3 g7rese gres6sak gres6sent 6g5rest 4grett 4g5ri. grib3 4grid 4grif 4g1rig 7g6rip gri4s5o gri4s5ø gri5sete 4grit gro9ar g4ro7i grom5m g4ron gro5sk gro3ve gro6v5in 4g5rui 4g3rul grun7g 5g4rup gru7sa grus5t 2g1rut 2gryd 4g5ryg 6g5rytm grø5de. 6g5rør 4g5røv grø5ve 4grøy gr7øy. 6grÃ¥d grÃ¥6da grÃ¥k4 grÃ¥7n grÃ¥5te. 8gs1 gsa2 gs5akers g7sal. g7sale g5salg. g5salge gsam3 gs3amb gs7amn g7s8ang g3s6ank g5sard g2sas g7sast gsbø6n gs2ce gs5ef g2seg g2sei g2sek g5sekr g7sekv gse9la. gsel4s5a gsel4st g2sem gs6ende. gsen6ke. g5sens g3sent g2ser g5ser. g3seri gse4st gse4t g3sete gs3eti g9sets gs4ett gse2v gsfø2 g2si g5sider gs5is gsi2v gs4jar g3sjef g5sji g2sjo g5sjÃ¥ gs4ka. g5skad gs4kal g5skall g6skam g4skan g4skapa g5skape g4skapi gs7kav g5skilt g7s4kj g7s6kjæ g3skot g5skren g7skug g6skv gsla6ga gsle6ge. gs6leri g4sluk gs4lun gsl9ut. g5slyn gs5med g9s6mert g7snel g5sog gs9ope g7s8opp g7s8oppe g5spal g3spel gs2pi gs5pik g3spil gs5pile g6s5pors g3spur gs3s4 gsse4e gs5tak g9s8tand g5stang gs4tasj g7s4tat gstatsrÃ¥7 gst6e gs5te. g5ste4e gs4tell gs5tema gste6ma. g7stemt gs4tere g5s4terf g5s4terk g5s4tern g5s4ters g5s4terv gst7evnen g5s4ti. g3s4tie g3stif g3s4tig gs5tiÃ¥ g5sto. gs4tol g5stol. g7stola g5stole gs5toll gs4trap g5strau gst5rea g5strede gst5rei g5streke gs6tret gstri8de. g5s4trof g3strø g5s6trÃ¥ g5stue gst5ut gstyr8ka gs4tør g5s4tÃ¥ gsu2 gsu5g gs4vane gs5vik gs7væ gsy2d g7sym gsy6na gsy6t g2sø gsø6ki gs6øt gsÃ¥4 2g1t g3ta gta8ka. gta8ke. gta6la. gta4le. g9t6e gte6ke. gte6ma. g3ti gti8de. g4t5if gt6re. gtrÃ¥d5s6 gt7sv gt5t gtu8en. gtvek8 gty8e gt9yt 1gu 4g5u4bÃ¥ gu2di guds3t gu4el 6gug g5ug6l gui4d guid5ar gu4le. 4g5ulk gul8ke. gul4la gul5o gul7ø 7gum gun4g gur2g gu4ri gur8na gur6ne. gu5rua gu3rue gu4st gu1ta 2gutb 4gutd g5ute. 6gutg g7utl 4gutn 2guts gut4tak gut4tes 4guttr 2g1v gva4ke. gva8la gvedli6 gvi8ta 6g1w 1gy g9y8a gy4da. 7gym gy3ne gyr6 gy8sa. gy6se. gy4te. gy4ve. g5æt 4g5øk gør3s 2g3øv gøy9a gøye6r gøy5n 1gÃ¥ gÃ¥6as gÃ¥6en. gÃ¥4er. 4g5Ã¥4k 4gÃ¥m 4g3Ã¥n gÃ¥n8da 2g1Ã¥4p 2g3Ã¥re gÃ¥2s gÃ¥3st gÃ¥9ven gÃ¥4vet h2a ha4a haba9 4hae ha2el 4hafi ha2g ha3ga ha4ga. ha3gen hai1 ha7ism ha5ka ha4ke. ha5ken ha7kera ha4ke5s ha1la ha6la. hald4s7 ha4le. ha4lev hal6lei hal6lø hal4s3k hal4so hal4s3t ha5lu hal4v5 hal5v6ar hal5v6o ha5ma ha8me. ham6nest ha5mo ham4st hams4t7r ha5na han6d5r hand7skr hand5ø ha4ne. hani1 hanis4 han2n3 han4ne han6nel han5nen han5n4es han5n4o5 han3se han4sk ha4pe ha4re. ha5rei ha4rel ha3rem ha4res har5k ha5rov har7se har5tre har4tri ha4sj hasj5e ha2sl has6p7l has5v ha3tar ha4te. hat6le h4au hau5ke. hau6st hau4t5r hav4a ha4va. ha5van5 ha4ve. ha4veg ha4vei have3s ha8v7ert ha1vi ha4vo hav4sl ha6vu6 hav1ø 4havÃ¥ 2h1b hba4ne. 4hc 2he. he2a he7ans heat4r heb5n he4de. heder6s5 he2d9r hef9ta hef7tes he3ge he4ge. he2g3r he2i heia3 hei5en 3heim he3ins hei4sk heit4s he2k hek4sek hek6serin hel1a he4la. he4le. he4l5ei4 hel6lesu hel3o hel2s2 hel3sa hel4se7 helses4 hel3sk hel5sp hel5ste hel3sv hel4tes hel9ve he4mak hem5ne hem3s8k henfø5re hen5o6 hen1s2 hen5se hera9da he5ret he4ri. her5j herle9ge herli9ga 4her5n he1ro he4ro. hero9a her7p her6rei her6resi her6ret her6ska. her8s7kare her6sv her5un4 he2r3ø he4se. he2sp hes7pa hes5pel hes5pen hes9per heste5ri hes4tes he6stø het2 he4te. he5t6i het4s3a4 het4s5p het3t he6va. he6v7arm he4ve. hev9na. 2hf 2hh h5hu hi5ao hi4bak hie4n hi2et hi4f1 hiff2 hif9r hi6ge. hik7e himme6l5o hi6n7an hin4nes hins4 hins9ke hi4pl hip3p hi1ro hi2s1 hi9se. hi3sen hi5s4i 4hisk hit7o hi4t5r hit5ti hi8va. hi4ve. hiv1s hjar4 hje4 hjem7e hjorte5 hju6l7 hju7l8e 2hl h5lan 2h1m hma8n 2hn h2na hne6n hn1s h4o ho5ar. hob6 ho6da ho4de. ho5den hode3r ho5der. ho5dy hof4f3 hof4f3a4 hof4f3e4 hof5f6er hof4fi hogs6 hog6str hog7stra hoi5 ho2la ho5lag ho5lan ho4le. ho4lin hol5l ho4lom hols4 ho4lu hol7ut ho4me. ho4mo. ho4mod ho4mos ho5n6o ho4o ho8pa. ho4pe. ho8pi. ho5ra ho6ra. hor2e1 ho1ro 2hort ho4s hos5a ho5sen ho5ser ho5si5 hou2 ho1v hove5re ho4vé 2how h1p 2hr h1ra h1re hri5ne hri2s3 6h1s h5s4e 4ht h5ter htere4 h1tr ht5t hu4da hu4d3 hud5s6 hudsÃ¥9re hu4er hu6et. hu4ga hu4ge. hu8ja. hu6ka. huk8ra hu4la hu4le. hu4leg hu5les hu4lev hul5v hu5ma hun6des hun2n3 hu1ru4 hus1a hu8sa. hu5s6a6r hu6s7arr husa7r8e hu4se. hu4s3ed hu2s1i hu2s5j hu2s1k hus7m hus5s4 hus1t hu8str hust9ran hus4tre hu6sty hu6s5u6 hu2sø hu4va hu4ve. hu4ves hv4 hva5le hvas5 hve2r hver3a hvi5l6i h4v4ir hvits4 hvo2 hvor5 hvor5i6 h6y hya3 hy2bl hyd4 hy2e hye5ne. hye9nes hyg5gel hy6la. hyldes7 hy4le. hyr4des hys3 hy8sa hy4se. hy2s1j hys5t hæ5g hærfø9 hær3s6 hø4e høf5 hø2g høg3ri høg7rø høk6 hø6le. hø4na hø4ne. hø5rar hø5ren hø5rer hø4res hør6sp hø2s høst7a høst5ø hø1va hø1ve hø1vi høy5a6 høy5k høy7n høy7rar høys4 høyse6te. h6Ã¥ hÃ¥7a hÃ¥8le hÃ¥6na. hÃ¥n8dr hÃ¥nd5skr hÃ¥5nel hÃ¥6pa. hÃ¥4pe. hÃ¥4p5l hÃ¥1re hÃ¥1ri hÃ¥r3s hÃ¥r7u hÃ¥8va. hÃ¥4ve. ia9al i1abl ia1bo i2a3de i1adj ia5d8r ia1g2 ia2ge ia1in ia1kr i1aks iak8se. ia5ku i1al. ial1a ial5ein iale4t ial5eti ia4l5ett i2alf ial3g ia4lin i4alk ial3op ia6lov i5als. ials4t i3alt. ial1u ia2lø i1an. i2ana ia4nal ian5ald ia5nar i7andr i5a4ne. ia8nes ia7net i5ank i1anm ia2no1 i1ans ian3sa ian3sl i1ant ia7nø ia2pa i3app i1ar. iar4do iard5s4 iar4du i2are i5area ia5r6e8l i5arn. i9arns i7arska i6asi i1asm ia4sp ias3s6 i3assi i1ast i7ast. ia1t ia5te iat6r iaty6ra i1att i7auk i1av i1b2 iba4ne. ib5bo ib3bu i3bere ibe4ro ibi5er. ib7lar i2b3le ib4leg ib4le5s i5bo ibo4e ibo7n ib4r ib3st ib8t ibya7 iby9ar. 8icanarar ice5ne i1ci ick1 ick7ete i1co i5cy 2ida i5dal i3das id1av id3del iddel5u id6dep 2ide. 3ideal i2deb i6def id5e4ge i7deki i9del. id3elv 2iden i5dend iden5sv 5identif 5identit ideo3v i6derap i9de5re. i7deren i9deres iderla8ga iderle7g i7dern i5ders i6desm id6gem id4ges idi4en 1i2dio i8dj id7jer id9na 1i4dol id9ran id5reg 4i3drev 2idri id2s1 id5sam idse4 id6sel id5sim ids5l idsla6ga ids3t id4s5tu id2t1 i2d1un i2dy 3idyl i3dyr i3dø i6d7øy id7Ã¥ 2ie ie1a2 i2ed i1eff ie4ge iego4 ie1i iei6d i1eie i3eig i5eini iek8l i1eks ie5l6a i2ele i5elem ie4lev i7e6lim i1els i9elt ielø8pa i8e9ma iem7b8 ie3me i1en ie7na i2end ien6dela ie8né i2e5ni ien7n ie5no i5ens ien4sk ien4s5v ien4t3r ie5nu i6eny ie4ran i4erd ie6re. ie5reg ie4r5eng i6eret i2eri ie4ril ie4ris ie4riv i2erl i6ero ie7ro. ier4ra ier4sp iers3v ier5t ie3run ier5v i2e1s i6es. ies4c ie3se ies5s ies4ti i8es6v i1et i2eta i5e4tabl i3etat ie9te ie5té iet4re ieu2 ie7ved 2if ife4s1 if2fa ifi6e ifjø8 if3le if4les iform4 if1re i8ft if4tal if4tere ifte5s ift2s i3fø 2ig i2gae ig3ann i7gark ig4art iga3ru iga7te ig3att i2gav ig3d i6geb i6gedi i6gedomm igedoms5 i2gem i3gen ige6no i5gers i4gerø i4gesp ig3e4ta ig5ett ig4ged igges4 ig4gra igg7s2 i6gh i2g1ia i5gib i4gim igion4 igje9va ig1l igl6d igli6se. ig5loa ig5neu ig5no igof6 igo5fr ig1om igo4no ig2ra ig2re i2g1rø ig3rÃ¥d igrÃ¥5t igs4al ig5sel igs2j ig5s4ka ig3s4kr igs4mu ig3s4pa igst4 igs4ta ig4ste ig5stek ig7stel ig5stem igs4tra ig5s4va ig1un ig9ut i3h ihen3 iho7le. i4huk ihu9la ihu5le. i1i i5in i7is. i4is8e i6ita i1j 2ik i1k2a i2kab i5kabel i2kaf i2kak ikan9d i2kao i4kapo i5kar. i5kara ika5re i4karei i6kasa ika3sp i2ke. i2ked ik8ei i9keleg i5keli ike5lu i5ken. i4kena i5kene i9kens i3ker. i4kera i5kere. ike5ri ike4r5o i2ke1s2 ike3si i6kesk i5keso ike5su i5ket. i5kets i1ki iki9ne ik5inn iki5st i1kj ik4kaf ik6kana ik4kap ikk5arv ik4kas ik4kat ikk5att ik6k7ent ik4keru ik8kesk ik4kest ik3kjø ik4kjøp ik4kl ikk5lag ik2ko ikk5ord ik4kr ikk3re ikk5sv 4ik2ku2 ikk5und ik2kv ik6ky ik1l i5klæ i1ko i2koa i9koar i2kob i4kod iko5de i2ko3f i4kog i4koh i2kok iko5na. i2koo i5k6ord i4kore i2kov ik1r ik4rak ik5rem ik5robe ik3ros ik2ry ik2sa ik4sek iksmÃ¥8la ik7sot ik5spel iks5ti iks5to ik8stu ik4tav ik4teg ik2t1r iktsla8ga i1ku iku6le. i6k7u6t ik1v ik4vin i1ky i3kø i3kÃ¥ ikÃ¥8pa i1la il5adr il7af i2l3ak il3al i4lana il6and i4lark il3art il5ass ilas5t6 ila5t il5av il4dak il4deko il4dete il7dj ild3re ildsfa9re ild3s4t ilea9r ile8are i3le7e i2lef ile4ge. i4l3egn i2l1ei i6leig i2lek i2lel i4leno i4lero ile1s i4lese i4le5sk i4lest ile6tri ilet5te ile4tu i1lé ilfø5re il5ge il1gl i4lid ili5ers ili9ga i5lik i2l1im i4l3ind i7line i4lins i4l3i4r ili5s6tik il3ja. il1je il5jese il1jo il1ju ilke3 il5ker il4kes ilk4o6s il5ku il4lab il6lam illan8da il6lap il9lau il4lee illei9e il6l7ender il6lesk il4lev illi9ga il4lo5m ill3s2 ills5kÃ¥ illæ4 illæ9re. il4m5est ilm5e4v il4mi ilm5s i1loe i9lolo i4lom il5omv i5lon il3opp i4l1o4r i5loso ilot3u i4love ilo1w ilret4 ilset8je il3sl ilsla7 ilsle7ge il6s7li ils4mu ils2p ils2t il5str il7su il3s2v ilsva9ra ilsva5re ilta9la ilt5rett il6t7Ã¥ ilu4h i5luk il7ul8 il3un i5lur i9lus il1ut il5v6 ilve8d ilve4r i1ly il1Ã¥8 ilÃ¥r6 8im. 4ima i2mad i4maen i2mag i6m5akk i4mako imal8n i2mam i9man i2map i5mar. ima5s i4matr i8mb2 i4mee i2me1g i2mek i6melu im5enh im8et i9met. i4meta i4meti i6mey i8mé imi9la imini6 i7mj im4lev im8l9u im5m i6mog imo9l im5ord imor8da im7o6v imp2 5imperi imp4l 1impo imp9s impse8 1impu im4ref im4rek im4res im9se ims4k5l ims3kr im5s4me im1s4t im6sti imta8la 4imu i3mø2 2in. 6ina i4nag in5agg i5nakk ina4let i4na2m in3ame in2an i9nane in5ann i2nap in5app i5nar. i5nars i4nask i4nasp i4n5ass ina4t5ak ina7t8r i6nau in7auk in5avl in5avr ince2 in7der 6indig in4dog ind9ra in3dru indr5ø ind5sk ind3sp ind5s4t 5industr ind7Ã¥ 4ine ine8are i2ned inedy6 in5egg in5eid in3eie i6nele ine4li in7eng ine8pa ine8pe. i7nere. i5neren i9neres ine4rik i7nern i5nert i5nes. i4nesk ines8ka ines8ke. ines4st ine5s4ti i4nesø i2nev 8iné 3infek in4fos in4fot infu9 4infy in4g5ald in4gav in5gebj in6gem 5ingeni in5geri in4geru in3gev ing5jen in4goa in4gos in2gr ing5rep ing7ris ing2s ings5om ingst8 ings5v 6ingu in4g5un in2g5ø i7nia ini9ar. i2nib i5nie i4nif i2nig i4nil ini7m i4ninn i2nip i4nisa ini3se i4nises 5initia i4nito 5injek 2ink in5kel in4kero in4k3la in4kok in5l inla8ga 2inn. in4nal in4nem in4nerø in4nesi innes6t 4innet in6n7ette innfa9s 1inng 5innhal 3innho 2inni4 in4ni. in4n5om in5n6ova inn1s4 6inns. 3innsa inn5se inn9sen inn9ste inn7sve 1innt in4n5u4 4innæ in2nø4 2ino ino5a i5noar i4nod inok6 in7oks in7org in1s 4ins. 8insa in9sa. in7sal in5sar in5se. in4sek in3sen inseri8e9ne in3si insis5 6insj in4sja in3sje 4insk in9s8kas in5skat inske4t ins4ki in3skj ins5kjøt ins6kor ins4k3v ins6kø insle7g ins8lega in3slo in3s2lø in5s4ma in3sop 1insp in4spa in5s4pl ins4po 5instal in8s5te. ins5ten. ins9tens 3in3s4ti in4stin 4in3s6to in5stra inst5rel 5instru in6stÃ¥ in7sul in2sv 6in3sy 8int. 2in3ta inta8la 4inte. 5in7teg in8t7ege in5ter int5ess in5te4t5 4in5ti inti5me int2r intran4 int4rer in5tres 6inu inu6i i8n9ul8 in7ut. in9u8te 1inv 6invev iny4i iny4t 6inø in7øk i7nøt in3øv 4io io1a4 i2ob io8d8 io4dere i1off io5gn io1i i1oks iol4 io3le iol7jen io3mu io4nans io4narr io6n5erst ion3g4 io4nin io4nu io1pl i1opp io5ra. ior8da io1re io4rie io4r5in iorla8ga iosk3v io1st iota6le. io3t4e io4tra i5pap ipa4ti. i4pee ipela9 i7pe6p i1pi ipi1e ipi9ne i4p5ings i1pl i2ple ip4peli ippel7s6 ipp6lan ipplæ8re. ip2pr ipp4s3t ippsy8na ipp4sø ip7pun i1pr i6pra ip5ru ipru8te. ips1a ipse4l ipsfø5 ip2si ip2s1k ipsle6ge. ips1t ip6tar i5py7 i3pø 2ir i1ra i9r6and ir5ark i9rast ira6tr ir5de ird5s i1re ir3eie ir6ek i3repo i3rer ire6r5a ire7st i3ret i8reto ir7g i1ri iri4a iri8ka. ir3ind ir3inn ir5inst iri9t ir5ka. ir4kat ir5ker. irke3s6 ir4kest ir4kle ir5kv irk5ø ir4mag ir4mal ir4mast ir9mé i5roa i1roe i4roi i1rol ir5opp iro1s ir3re ir7sko irs2p ir3s4t irt6 ir3ta irti6g5r i1ru ir5u4k iru8m iru7sa. i1rø i1rÃ¥ ir7Ã¥6l i1sa i4saks i4sanb i4s3ang is3ann i4s3ans is3ant i7sas is5aud 3isbry is6cen isch5 is3co i5scr i2sed i4seel i4seen ise5ern i2s2e3g4 i2sei is3eie is5eig ise5is i2sek i3sekt i4sekte i6s7eld is4e5li i2sem ise5ne i4seno isensi6 i5ser. i5sers i2ses is5ess i4sete iset8je i2seu 8isé isha7ne 3ishav is4h5in ishø9ve isi6ene is5ild i4s3ind isis5t is7jun is9ka. is3kal is3kar isk9art. isk5arte i8ske is5ke. is8kee is4kel isk5els iske5s4 is3kje i3skjo iskla8ga is6k5le isk3lo isko5g is7kogr is3kop is3kot is1kr is6k5rin is1ku is4kul i5skula is1kv is4kvi is1l i6slu isl7ut isl9øy. i5smak is4med ismo5e i6smus is5ne is5nu 1isol 6isold 5isomo is5omr iso3pa iso5pe iso7pr 3isot is1ov is1p is4pan is5pane. is6pis is7pru is4ses is5si issis5 is6s7kj is4sko is7skr is5sky is4skÃ¥ is5sok is7sto is6sve is5svo is9s4ær is4søk i5stab ista9le is5tane is5tap i4s5tar ist5avb ist7avi i2ste is5te. is4tek ist5ekt istel5l iste6ma. ist6en is5ten. is5tene is5ter. is5tern is6terø is6té is4tik is5tilb is5tis is3tok is3tol i2str i8s8tre. i4st5ren i5strer i6strer. i4stres i6stret is3try is3tus 6isu is5uf is5ul is1un is1v is4vak is7w isy8na isy5r i7s8ys is5yt i2s1øk i4søy is7Ã¥k is3Ã¥r i1ta ita4l3a ita6l5ers ita4lo ita4ly i6tang itan7s6v itau4 i4tav i4tec i2teg ite8ke. i7teky i6t7elg i4teni i2teo ite3re i4tero iter6s5 i4te5ru i2te1s ite6se. itet6s5 i1té it2i iti6er. i4ties i2tif i4tikam i4ti3kv iti5me i2tio i2tip iti3r i4tisa i4tisen i4tisi i4tisj i4tisko i4ti3sp i4tist iti5str i2tiu itiv4 itivs5 it7jaran it7jer it7ji itma6le. itnes4s i1to ito9a i4tom it5ord ito4s itostra6 ito8v i1tr i4tra. i6trae it3rel it3ren it5rim itri8ma it3rin i5tris it7ro6s it1sa it4s3ei it3ser it6ses it5sku itsla8ga it1sn it6sti it3sun it4tag it6tak it4tal itta9la it6tann it6tap it4t5at it5ted itte4l5o it4tenk itte5s4 itt5ett it4tid it4til it4tj itt1o it6tof it7ton it4tr itt5skr itt4sø it4ty it7tys i1tu itu5e itær1 i1tø i5tÃ¥9 i1u iu6a iu2b i5u6l i5um. ium4f5ar iu2m1i ium5m iu4mo i6umse i4um5s4t i4umsu iu2n iu8p i2ur iu5se 4i1va i4vad i6vaj i4v3aks i5val iva6la. i4vana i4v5ank i4varb ivar5s6 i4v5ass ivat3t i1ve i2veg ive6ge iv5eg6n i4vei iv7eig i2vek ive4ra ive5ras iver5d i6veret i4vesk i4vesti i6vesv i6vetø i2vev i1vé i1vi iv5ins iv5is. iv5isb ivle6ge. iv3n i1vo iv7om iv3r iv4sal iv2si iv2sk iv2sl iv2sn iv4so ivs5v i2v3un i5v6y ivyr8ke. i9væ iv7Ã¥pn i1wa iwi5 i5wie6 i5y iyr8ke. i5za. i7zas i4ze i1zo i1ø i1ø4k i1ør. iør5i iør5s i4øs iø3se i1øy i1Ã¥4 iÃ¥rs7l iÃ¥t4te. ja1ak ja9ar ja4da 1jae ja7en 5jag. ja4ga. ja7gar ja4ge. jag6r ja4ha ja4hv ja4j jak4kel jak7kele jak4ko ja8k7ok jakk5s4 ja1k8r ja8kre jaktel jakts8 ja7ku ja3lo ja1lu jam7b 6jambi jamhø8ve. 5jamm jams2 jan4gr j5anl ja6ra. ja6r5ap ja9rek jarn3s4 jar5ta jar5te. jar5tet. jar7ti 6jarø ja6rÃ¥ ja7se ja8ski ja5sti ja5ta jau4ere ja9vi 1jaz j1b 2jd j2e 2je1a2 je5ak 2jeb jeb9b je4deb 6je4dek 4jedr je4ep1 6jeev 2jef je4f3et je2fj jef5lag je4f3re jef4sa jef6s5i jef3t4r je5ge jeg5ge 6jegl jeg6les 4jego 4jegr 4jegy 2jeh je1i je5i8s 4jej 2jeka jek6keru 4jekl je4kli 2jeko jekt3a jek4tan jek6tes jekt5ev jek4t5in jek4t3r jekts4t 4jekø 2jel je6legr jelei7er je7lele je4les jel7ge jel6lag jell5ang jel5len jel4lo jelt3 jel4tr jel9ut jel7va. 2jem je4mia je4mit jemp6 jem5pl jem5se jem4sti 2je7na jen1d jendis5 4jendr je3ne. je7ne8s 4jenet jen5ged jen7g8la jeng5s4 2jeni jen3k 4jenl 4jeno 4jenu je2n1y4 4jenæ 2je5o4 2jep je3raf je4r5ant jer4dep jerd6s je3r6e 4jered 6jereg 6jerei 6jerenn je5rik je6rim je6rinæ jer4kes jer6k5l jer4kv jerle7 jer6mu jer6n5as jern5sl je6ron jer4sp jert5s 4je1ru 6jerÃ¥ 5jesf 5jesh jes4kil 2je3sp jes4tea je6stim je3str 5jesut je5sve je4t3ag jeta6ka je9tar. je7tara je9tare je4t3ru jets2 jet4t5an jet6term jett3o jett5s6 je4t3v jety8e 2jeu6 4jev6a je7vark je6vas 4jeve jevi4s jev7na. jev7ne 6jeø j1f 4jg j1h 4ji. jib5b ji7e6 6jik 2j3inf ji7ro ji6sj 4jiø 2j1k j4kap 6j1l 6j1m 6j1n 6jn. j5ni j2o 1job 5joc jo4da. jo6dat jo6dis jo4fl jojo5 jok9ker jokk9o8 jol5t6 j3omr jon2 jo5ne jon7n jons1 jons5a jonse4 jons3p jons5t jon6sti jon4str jonsu4 4jop jo5ra jor6dek jor6d7erv jor6d7is. jor4d3o jord3r jord5s jords4l jor6du jo1re jo4rie jort6a jor5tet jo3r4u 2jos jo3se jo4ses jo5stei 1jou jour5 j1p 4j1r 2j3s2 j1t ju8are 1jub jubi3 ju9bo juda8s7 ju3de ju2do ju7doe jue7ni ju4e5re jue9s jug9le 5jugo 4ju2k ju3ka ju3ke. ju3ken juk9sar ju2l ju3la. ju5lar ju5las ju8l9ei ju4li. ju6lid ju4lik jul7inn jull6 1jung jun7ge j6u6ni 8jup jup7s ju6p5Ã¥6 ju4ra ju7ras 3juris ju9ro ju3ru 5jury 1jus ju5so5 ju1ta jut5o ju6va. ju6vak ju1Ã¥ j5v jy1 jy8de. jy2p3 jæ5le. jæ3ra. jæ9ran jæ9res. jæ7te. j2ø 1jø. 1jøa. jø5b4 1jøe 2jøef 4jøeg 6jøei 2jøek 4jøend 4jøeng 4jøeta jø1f jø3gr jø9kj 4jøl jø4les jøl6ver jø5me jøn5ne jø5pa jør4kel jør4sp jørs5v jør4tel jø3ru jør7ut jø1rø jø4ses jøst2 jø5ta jø6t7av jø3te. jøte4t jøt5eta jø1tr jøtte4 4jø1v jø4vel 2jÃ¥ jÃ¥5ar. jÃ¥9ge. jÃ¥6la. jÃ¥4le. jÃ¥7les jÃ¥5let 1ka. 2kaa 5kaa. 5ka4an4 k3aas ka3at 6kabes ka7bo 4ka1br 4kaby ka3da 6kadale ka5del ka6del. ka4deri 4kadg kadi7e 2kadr 5kadu 4kady 3kaen ka3fe kaf5fere 2kafo ka9fr 2kaft kaf5ta 6kafø 2kag k5a4gi kag2r 2kah ka5isk ka5ism 6kajo ka4ka. ka4ke. ka4ked ka4kel ka4kes ka4ki. ka4kis 6k5aksj 4kakt ka3ku ka3lam ka3lan ka6lant ka3las kald5s 5kaldt kal2e 3kalen ka5lev 5kalis k6al8k5 kal7k6e 5kalky kal7la kal6lero kall4s ka1lo kal3p kal7s6v kal4ves 1kam ka6ma. ka8me. 4kameri 4kammel kam4p3i kam4pr 7kamu 1kana ka8na. 4kanda kan6da. 5k6ande. 1kane ka5ner kan4i ka5nin 2k1anl 4k1anm k3anno 5kano ka4no. ka8nom 4kansa 6kansi 3kansl 1kant kan6t7end kan6t9r 2kap. 5kapad ka4pak 3kapas ka5pe 5kapell 4kapen 3kapit 1kapp kap3re kap4s3t 1kar. 1kara 4karab 4karam karan5 ka3rav 1karb 4karbei 4karea ka6rek ka5rel 1karf 5karg ka4ri. ka6rid 6k3arki 6k5arkt 1karm kar3n ka3rol kar3om kar5pes karp3s 3karri 7karsj kars4t 1kart kar5ti 5karu 4karÃ¥ 1kas 7kas. ka5se4i ka3sek kas5ev kas2ju 6kasjø ka4sk kas3ka kasko5 6kasp ka4spi kass6 kas6sad kas4sak 4k3assi kas4so ka4st 8kas5to ka6su8s7 kas2t3r 1ka7t6a ka8tak8 ka3ted 3kateg ka3tet ka1to ka4t5op 4ka1t4r kat7s kat5ta kat6tak kat4tel kat6term katte5s kat4til kat4tri ka4t5y k7au4r 2kav ka7ves k9avis kav8l9u 9kay 6k1b4 2kc k5ce kcen4 2k1d k5d6v 1ke 2ke1a8 ke9al 2keb 4kebr 4kebuk 4kec ked4 ke5da 4kedag 4kedal 5kedan. 9kedant ke5de 6kedei 4kedem 4kedes 7kedes. 2kedi 2ke5dr ked8sk kedu6 4kedy 2kedø 2keeg 2keek 2keel ke4ep ke7ern 2keev 4kef k3eff k5eft 2ke1g k3egg keg8ga 5kegladh ke5h 3kehet 4kehj 2kei k3ei2d k1eig ke5il ke1im k6e1in keis2 2kej 2kek kek2l ke3kn k1eks k5ek4t k6el. 2kela ke3l4ak 5kelal ke6lans ke6lat kel5eier kel5eig kel7eik kelei8ke. 6keleis kel3e4l 4k5elem 4keleng 4ke5lep 4keles 7k6elet 4kelev keli8e8 keli9en 3kelig 4kelign 4kelinj kel5int 4kelis ke4lise 5kelit 4keliv kel5l 4kelof 6kelov. 4kelove kels4 kels7kar kel5sl kel5sp kel5st kel3sv kelsva9 kel2t kelt3e 6kelua 4kelue kel9uka kel5uke ke4l3ur 6keluv kel5v 2kely 2kelæ 2kelø kelø8pa. 6ke5l4Ã¥ kelÃ¥7re 2kem ke4mit 5kena. 6kenam ken6ap ke4nas 5kenatts ken5de 6k5en6den 6k5en4der 4kend5r ke3ne 4keneb 4kenek 4kenel 4kenem 6kenett 4kenev 3kenf ke4ni 5k6enh 4ke5niv ken3k ken7n kenne4l3 ke4nom ken1s2 4k5ense kens6t ken7te ken5t6r 4k3entu 6ke3ny 6kenø 2keo ke7of ke3on 4kep 5kepi. 9kepia 7kepie 6kepl kera7l 4keram ke4ran ker5ant 6kerau 4kered 4kereg kerei4d 4kereir 6kereis 4kerek 4ke5rem kere6n 6kerene ke6r5eng 4kerenn 4kerep ke4r5ert ke3res 4keress 4kerest 4keresu 4keret 5keret. 5kereta 7kerets 3keri ke4rif 6kerik. 4kerike 6kerikt ke4rim ke4rine 6keringi 6keringj ke4rinæ ke4ris 8kerist. 8kerista 6keriste 3kerm 7kerop 5keror 4ke3ros kero6se. ke3rot k4ert ker5to ker3t4v ker3un 4kerus keru6se. kerø6re. 8ker8øya 6ker6øye 2kerÃ¥ 5kes. 4kesam 9kesam. 7kesdale 5kese. k5esel ke5sil 4kesit 2kesj k9es8ka. 4k5eske kes8ke. 4keski 4keskj 6keskÃ¥ kesle6ge. kes5ped 5kespot 5ke4spu 5kest. 4ke3sta 5keste. 4ke5sted 4ke5stei kes6tem 4ke5s4ten ke3sto 4kestri 4ke5stru 4kestu 4kestø ke4s5un kes1v 2kesy kes1Ã¥ 2keta keta6ka keta6le. 5ketals k6etar ke3te 5kete. 4ketek 7keth 2keti 2ketj 2keto 2ket2r kets2 ket6t7enh ket8tes 2ketu 2kety kety6e 4ketæ ketøy6s5 2ketÃ¥ 2keu2 ke5ur 2kev kev2a keva8ne. 5kevas 3kevel kevi6sa. k1evj kev6ja kev4je. k3evn kev6ne. ke3vr kevæ8ra ke2yt 6keæ 2keø 2keÃ¥ 1ké4 két5 2k1f k5fi 4k5g2 k3ge k5gh kgu4 4k1h kh5ak k4har khe4o khjul8s kh6m ki3a2r k3ideo 4kidol ki3dr k4ie2 kie5re ki6el ki3er. 2kif ki5g4 ki6ka. kikk8s9 ki1k8l ki3k2r kik4s ki8la. 3kilde 6kildr ki4le. ki5li 4kilj kil9ja kil6le5st ki7loa ki4log ki3los ki4lov 2kilt ki7lÃ¥ ki8ma. ki4me. 5kimo5 ki4mol 2k1imp kin9and ki4nark ki5nas. 6kind ki5ne ki6ne. 2kinf 1king king5and kin3ge kin5ing 2kinn kinnsi8da kinnsi8de. kinn4sp ki2no kin5ov 2kins kinsi6de. 2kint kinti8me. ki6nu 2kinv kio4 4kiol 7kios 2kip ki5pe ki4p5l kip5pe kip2s3 1kir ki5re ki5ri 4kirs ki3se ki5sko ki3smu ki1s2p ki5stav ki5str ki6ten ki7ti 5kito ki4ton 4kiu k6iv ki4vek ki4v5enh ki6vi. kiv3s kj2 3k8jan kjap5pe 6k7jarn kje7fj kje7f6r 4kjegg 3kjeld kjel7leg 5kjemas 3kjemp 6kjenk kje9ny kje1s kje6t5r kjet5t 6kjor k8juk kju5le. kju5len kju9r 3kjærl k5jødi 1kjøp 3kjøri kjø4tr 8kk k1ka k2kab kk3aft k2k1ak k2k3al k2kam k4kanb kk5and k8kand. k6kanden k4k5ang k4kann k4kao k4karb k6karg k4k7art kkar6ta. k5kast k6kasu k6katt k2k1au kk1av k6kedo k4kedu k2kee k2keh kk1ei k5kel. kke6lei kk5e4lev kkel3t4 kkelu4 kke4luk kkel5un kke6nan k5ker. k4kerei k4kerel k6kerent k4kerig k4kerit kke5r6und k4kesa k5kesau k4kese k4kesi kke3s4l k4kesp k6kestad k4kete k5ketet k4ketø k4k9g2 k1ki kk3ind kk3inf kk9inn kk3ins kk3int k2kir k1kja kkjek8 kk5jent k4k5jern kkje7t kk5ju kk5k kkla4g kk3lan kk8lar kk7lau kk1lo kk1ly kk3læ k4kn kk5ne kk5ny kk6s5v k1kod k1koe k4k1of kko6lig kk5oml kk7oms kk9omt k6koo kk1op kko7s k4kosi k6kosk k4kost kk3ove kk5p k4kra kk5ram kk3rap kk3rea kk5rei kk3ren kk3res kkri6k kk3ro kkro8e kkro4s kk3ru kk4rus kk7rø kk7rÃ¥ k8k7s kks6al kk4sar kk5sed kk3s4ei kk4sin kk1sk kks4kj kks4l kk3sla kk7sle kk5sli kk3spe kk1s2t kks5tak kks7tep kkst4r kk8s7va kk5s4Ã¥7 kk7t8 kk5uk k4kuni k2k1u6t kk1v k2kvo k1ky6 kkyl4 kk5yr kk7yt k2k1ø k8køl kkø6r kkøy6 kk1Ã¥2 kl4 6kl. 4kla. 4k5lagt 2klak kla9ke. kla4mel 3k2las k7lati kla4tr kleby7t 3k4ledd kle4deb kle4des 2kleg klei9e k7leik k4leiv k2lem 5k6lem. 7k4leno k6lerik kles7j kle5sno kle4sp kle5sti kles7s8 2klet 8klia 4klie. 4klig 1k2lim2 3k4lini k5linj 1klip 4klit klon7a8l 1klos 2klov k3lukt 4klun klun5t k5lyd 6k7lyf 3k4lyng klyse7te k2læ klæ5res klø7na k6lør 6kløs klø9va klø5ve 6k1lÃ¥n klÃ¥5re 2k3m kmann6 kmÃ¥l2 2k1n2 3k2nap kna5t4re kne4b3l k4nedi 5k2nee kne8se. k4nest kn5f knas8t7a8 3k4nip knippe6 kni2t3 7k6niv kno7g kno6kl 5knop kno9ta kno5te 3k2nu 4k3num 3k2ny kny4s k2nø knø9le kn3øy 9koa. 4ko1ak 9koane ko7ar ko2b3 ko3bo 4ko3br ko5da 4kodd ko4de. 3kodek 5koden 2kodø ko3e 3koef 4koek 4koel 4koes 4koev ko4g5eng kog9ger kog3l ko4gre kogst6 ko4gu ko6gÃ¥ 2koi koi4e5ne ko7i6k ko1in 2koj ko8ja. ko4ka. ko4kab ko4ke. ko6kel ko1ki kokk6o k3okku 4kok4l 4koko 2koku kolba5 kol8d7a 2kole ko4leg ko4lei ko6lel ko4let ko4leu 3kolle ko5lo. kol5p 4koly 2kolÃ¥ 1kom ko4ma. 4komaf ko4me. 6komg ko4mi. 4komil 4komisb ko4mise 2komr 4komse kom3sl kom5so kom4st 4kom5t 4komu 4komÃ¥ k6on ko4na. kon3d kon6dr kone5l6Ã¥ 4koniv kon3s2 kon6s7v kon6ton ko4nu 1ko1o 4koom ko5pa ko5pers 4kopet ko4pi. ko4pip ko4p9l 4kopo kop5per 2kopr ko1ra ko4ra. kor6da. kor4d3o kord5s4ø kor4du ko1r6e 6koref 4ko7rei9 ko5rem ko7ret ko3ri ko6rik 4korm ko5rol 6korpa 6korpe 1kor3r kor4sal kor6s5inn kor4s5l kor6str kor4sv kor5tes kor6top kor4t5r kort5s4 ko5ru ko6rume 1kos 2kosa ko6sa. ko4sed ko4sek 5kosele ko5sen ko4ses ko9si kosi9d ko2sk ko1sl 4kosmu 2koso 2ko1s2p ko6sta kost7as kos6t5ers kost5j ko8s8t7ø8 4kosø kotes7 ko5t4i 4kotil 4kotr ko6tre kot2s kots5tr kot4ti kott4s kotts5t 4koty 2kou4 kout3 kou7ta ko1va kover7e 4kovr 1kov4s3 4koø 4koÃ¥ 6k1p kr6 k3rad 1k4raf 5krag 3krakk krani6e7n 6krank 3krans 4krap kra9se 7k8ra4sj kras5s kra4to kra4u 1krav 4kred. 3kredi 4k1reg kregi3 4krei krei9e krei5s 6krek kre4kli 2krel k5rela kre8m7o 4k3renn 4krent 6kreo k5repr 5kreps 5kresj 4k3ress kreta9 6k7retn k4rets k3rett 2krib 4krif 7k6rig krig6s5 kri9ga 4krik kri5ke kri4kj krik6ka 1krim kri4me 3k6rins krin6s7t kri2p 1k2ris 7krisene k2rit 3krite 6kriv k2ro kro5b4 kro5de k3rofo k4rofor krok7k k6roni k5ronl kro4nom k4rop krop9n kropp4 k4rost kro9t8e kroten9 kro3v kru3i4 krum3 kru5sa krutt5i kru3tr 2k5ryg kry7pa kry5pe kry4pi 3krys krø5ke 6k5rør krø3v 4krÃ¥d krÃ¥8da krÃ¥e6 5krÃ¥k krÃ¥5le krÃ¥9m krÃ¥7ne k1sa k3sa. k2s3ad ks3alt ksa5me k3sane k4sans k4s3ant k3sar. k7sara k6s3ark k7sas k1sc k2sed k2see kse6ge k2s1ei k5sekt k5sela kse5lev ksen5to k4serf ks3erk k5serol k4sesa k4ses6k k4se3sm k5seso kses4p k5sess kses6sel kses8serie kses4sp kses4st kse6te. ksfø4 k5sia k2sif ksi5ko ks3ind k3sing ksi7ni ks3inn k4s3ins k7sio ksi3st k4siti 6ksj ks9kab k6s3kal k6skio ks1kj ks5kl ksko7na ksko5ne ks1kr ksk8u ks9ku. ks7kua ks8kut ks1k6v ks1l ksla8ga. ks5lo k7s6lu k8s7lug k8s5løs kslø8va kslø8ve. k5s4mak ks4mel k8smid ksmÃ¥ k5s4no k7s8nø k1so k6s7oa k6s5o6d k4sog k4somb k2s1or k2sot ks1ov k3spal ks3pek ks5pio ks3pir kspor6t5r 6k6s5pors ks1pr k8sr k3spy k8s7s ks3sm k5stad k4stak ks3tal ks5tant k7s8vak ks6tav kste4da ks4teg k3stei k4stek kst3ev kstev6ne. ks4til k4stin ks1tj ks1tr kst6ran ks4t5rek k6stren k6strer ks4tri ks4tro ks9tum k5stus ks3tyv k2stÃ¥ k1su k6sun k8suni ks1v k9s8vake ksva8ne. ks5veg k7s6vert k5s4vin k8svis ks5w ksy8na ksy8s7m ks5ær ksø4ke. k6s3øl ks1Ã¥ k3sÃ¥. k4sÃ¥p ksÃ¥r4 ksÃ¥t4 6k1t k5ta. kta7f6 k4tag kta6ka k4t3aks k4takt kta6la. k4t3ana kta9na. kt3anl k4t3ann k4tanv kt3app k9tar. k4t3arr k4t3art k6tass k5tast k6t7atl kt5avh kt5avk k6t3avl k3ted k4teda k6tee k4tefø kt5e4ge k2te5i k6teka kte5le k6t5elsk kt5e4lit ktem8 kt7emn k4t3enh k9ter. kterie5n kterie7ns kter5s6 k7tes. k6tesa k6tese kt5eske ktes6ke. kt5esti k4t5e4ta k4tete kt5evn ktev6ne. k5tib k3til k4t3ins k5tir ktis5t kt7i6te kti6v7end k2tja kt7ju kto9a kt5off k6togram k4toli k2tom kt5omf k2t1op k9tor. k7tora kt5ord. k5tore ktori9e8 ktor7s kt1ov kt3ral k2t1re kt2ro ktro5s k2t1ru kt4s5amb kts5ar kt5sek kt7sem kt4s5er kt6sin kts5kra kt5sku kts4kv ktsle6ge. kt6sok kt4spa kt5spre kt4sta kts6ton kts5top kt4stÃ¥ kt3t8 ktu9er. k4tuk kt7und k2t1ut kt8yo kt7yrk ktyr8ke. ktør7 ktør8e k6t5øs kt1øv k2t3Ã¥r ku2a ku9an ku4be. ku3bj 2kud ku4dal ku4er. kue4r5a ku4et 6kuf ku4f3l 6kug k5u4gr ku7is kuit6 ku7k8l 6kuldi kuld3r ku4led ku4leg ku6lei ku4lem ku5len ku4let ku4lev ku5li ku6li. 3kull 1kult ku4man kumen6tal ku6mi kum2s 3kumu 1kun kund5s6l 2k1ung kun4ge. k6unn ku4o 1kup ku4pe. kup4pl ku7ra ku4rek ku8r7o ku7r6op kuro8pa 1kurs kur6sk kur4sp kur4s3t kurs3v kur3t ku5ru6 kuru9ken kuru7ker 1kurv kur4v3i 1kus 5kus. ku7sa. kus5a6k kus5ar 5kuse ku4s5el ku4ska ku7s6pe kus3t 6k1ut. ku1ta 4ku5te. ku5to ku6t7r 2kuts 1kuu8 kuøy6 kv2 kva8ke k4vali k6vard kvari6e5n 5k6vart kva9se kve3d2 kve7e kve8g7r k4veik k6veil 7k8vein kvei9se 1k4vel kve5la 4k3verd 5kvern kverns8 2kves kves5t 7k6via 4kvid 3k4vie kvi4ene kvi5er kvi7la 3kvinn kvi5se 7kvist 2kviv 1kvo k1vog 6kvok k4vot k2vu 4k3væ k1vÃ¥ k7we k5wu ky5a ky6el 4kyf kyf5le 2kyg ky4leg 2k3yns kyn6skjer ky2p1 ky4p5r 1kyr kyrie5ne kyr8ke. ky8sa 4kyse ky6se. kyse8te. ky3skr kys6sk 1ky4st kyst3a kys4tel 2kyt ky6ta. kyte3s k1æt3 1kø. kø2ar 1køe kø4en. kø2er 6k1øk kø4le k1øn køn6skjer k3ørr køs4 kø5se køy7ar. k7øyd køy4e5ne 1k6øyr 4kÃ¥ke kÃ¥3le kÃ¥n8da kÃ¥5ne kÃ¥4pe. kÃ¥7pen kÃ¥9ras kÃ¥1re kÃ¥1ri kÃ¥2t5 k5Ã¥t6t 1la. 2laa la1b 4laba la6bl 1labo 2l3abon 4laby 4lad. l6ade la4dem 4ladg ladi9e8 2ladm 2ladr lad3s4 1lae 2laei 4laent 4laep 2laf la3fl 4lagg lagi4 6lagne la2go8 lag5om la2g1r lag5san lag4sj 2laha 2la1in la5ka la4ke. 4lakj l2akk la2kr 8l7aksel l7aksj l5aksl 4lakt lakterie6 la5kø 2lal l1alk la5mab 4lamb la4mes la4met la3mo l8a8mu 4l3anal lan2c lan6das lan6deb land3r l6a3ne 3lane. 4lanet lan6gel lan4gem lan6germ lan4gr lang3s4 lang4s5e lan6g5ø6 4lanl 4lann l3anno la4nor 6l5ansi lan8s7p lan5ti 4lanv l3anvi 2lao la6pal la7pi la8pl lap8p7r8 lapp7s8 lap5sa lap5sen lap5s4i 1l8ar. 6l7arab 2larb l8are. 4l7area la9red la5rem l6aren. 4larena lare6t la6r7eta 5larf la7ria lariar lari6e7n 4l3arki l2arm lar3n 6l7arr 4lart lar6ta. lar7vet la6sc la5se lasi9e la2sk 7laskj 4lasm la5smi 4la4sp las6sak las6sam las6sat las6sel l6ast 4lastr las3v la1ta lat5ang 6latau la4teg lat6ek la4teno late5s4 la5ti la6ti. 4latm la1to lat7ra. lat9ran lat5rar lat4t5is lat4tra latt6u lau7a 2laud laue6 4laun 4laur lau7se lau6st 2laut 2lav la4v5al la4vel l5avh lav5inn 6lavs 7lavÃ¥ 1law la6y5 6l1b2 lba3de lba4k lba5ke. lba4ne. l5be lbe9na lbe8re. l4boa l6buh lby4e l5bÃ¥ 2l1c l4ce. 6ld l7da. ld5aks ld5akt l9dand lda6t5o lda6t5y l2deb l5den l2de5o l2dep l3der l7der. lder7a l4derek l6derik l6derklæ lderle9g lder5s6te lder6s5ti l4desh l6destr l9dé ldis7k ldo9a l8d5oks l2dol l4dore ld5ovn l3drak ld3ran ld5ras l6dre. ld7regn ld5rer. l8dres. ld1ri l7dry ld1rø ldse8 ld4sek ld4sel ld4ses lds1k ld4ska lds4kj ldsla8ga lds6leg lds4let ld4s9m ld5s4om lds3tr ld6tus ldø2 ldø5d ldø5l ld7øy 1le 2lea le7ag le3ak le1al lea5la 3leas le5at 2leb le4bem le6bosta 5lebæk lec4 2leda 5ledar led4dø 4ledeb le4dem 6ledep le5der le3des 2ledo 3ledop 5ledov le4dro le5dry led1s2 6ledu 4ledy 2ledø 4le3dÃ¥ 2lee 6leei 6lefa lefa6ne. 4l1eff 2lefi 4lefl 4lefor 4lefot 2lefr l1eft 6lefy 4lefø 6legap 5legas 3lege le4ged le9gen le4gera le9geran le9geras le4gero le4geta le4gev 7leggi le3gi 6legif 4legj 2le1g2l le4gol 4legre leg3s4 2legu 2legÃ¥ 2leh 6lehal 6lehamr 6lehan 6lehau 6lehave le3ido lei8e l6eie. lei5ende lei4e5ne lei9er l6ei8et leig6d lei4ger lei7ger. lei4get leig4h5 leig6na leig8ne 2lein le3int lei6r7u leis7t l6eit 2lej 2leka le4ka. 6lekan lek6e le3ki 2lekj lekk7s 2lek2l 2lekn lek4na 2leko 2lekr 4l3ekso 4leksp lekt5ord lekt4s5t 2leku 2lekv 4lekø 4lekÃ¥ 2le3la 8l9eld 2lele l3elem 4l5elg 5lelig 2lelo 3lels 4l3elsk le1lu 4leluk l5elv 2lely 4lelæ 2lelø lelø6pa 4lema le3me le8me. 4lemet 8lemetrisk 8l9emn lem8na 2lemo lem9ped lem5pes 3lemr lem4si lem4s5ø 2lemt lem5ti 2lemu 6lemy 4lemø 2lemÃ¥ le4na. le4nal 4lenav lend8a len9dan 6lenden l6endre 4lendri 4lened 4lenem 4lenev l4engd len4g5r leng9u le5ni 4leniv lensa4 len4s5ak 4lensem len5ses len4sta len4tam len4tr len8t9ra 2lenø 4lenÃ¥ 2leo 5leol 2lep le5pa le4pel le8p9enden le2p3j le8pla. le4ple 4lepo 4leraba 4leram le2r3amb ler5d l8ere le9re. 6lerea 4leref 4lereg 4le9r8ei 4lerek le5res 4lereso 4leresu 4leret 4lerev 9leri. 7lerie le6riei le7rina le4riv 4l5ernæ le5ro. 4lerom 6lerope le3ros 4le3rot 7lers. lers4p ler7te le1ru 4lerus leru8se. 2lery 5leryr 2lerø 2lerÃ¥ 5l6es. le4sab 4lesam 7lesar 4lesau 2lesc le5sed le4seg 4lesek le5sen le3ser 4leset 4lesho le6si. 4lesig le7sing les5inn 4lesit 4lesju le7skap le4s3kj 6leskol le6sk9u8t 4lesla 2lesp 6lespal les3s6 lesse6ne. 4lesta 9lesta. 7lestal le5sted le5steg le5stel le4stim le3sto 4lestrø 4lestyr 4lestø 4lestÃ¥ 4lesug les1v le5s6vi 4le5sy 4lesø 5lesøy 3let. le4tab 6letak leta8ka leta8le. l5e4tas 5lete. 6letea 4leteg 4letek le5ten 6letenk 6le7tep le5ter. 4letes 2le3ti l5e4tik 2letj 2leto 2let2r le7tre le5t4ri 3lets let6sj lets8k let6tea let6tr lett3s4 2letu le4tv 2lety lety8e 4letø 8letÃ¥ 4leul 2leun l1eur 2leut le4ute 2lev. le6vad le6valu le4ved 4leveg 4levei 4levek 6levern 4le4vev levi6d lev5n 2levo lev1r 4levs 4levæ lex1 4leø 2leÃ¥ 5lée 1lér 7lét. 7lè 5lê 4l1f l4fa. lf5ant l5far l5fe lf3f l5fig l5f6in lf5ja l7fje l6f3nul l4fom l4fut 2lg l6gaf lga8le. l4g5alt l4g3ana lga5t lga4ve. l2ged l4gela l4gele l2gem lge3na lge4r5an lge4rap lge5ri lger5un l4gery l4gesl l6ge7sp lg5g lgie8n lg1lo lg5ly lg5n lg3ob lgo2d lg1ok lg1om lg5ov lg3ref lgr9øy lg3s2e lg2sk lg5s4kre lgsle9g lg5s4tr lgu4l lg3un lg3ur l1gæ lg5ø l6gÃ¥. lg5Ã¥r. lgÃ¥8va lgÃ¥8ve. 8l1h lha8ka. lhav6s5 l5hj 1li li1a li4ala li2am li5ans li4as liba5 libe5ro libi5 libie6 liby5e li4dak li4ded li4do. 2l1idr lid3t li4ene li3er. li5ers. lie5s 3lif 4lifat lif5f 3lig li4ga. li4g3an li4gar lig9art li8gas. li4g5enh li4get 5lign li4g3re lig3se lig3s4i lig3sl lig3s4p lig5s4ti lig5str li2gu 4lih li5kan li9kar li7kas li5kee li3ken li5kesÃ¥ li9ki likk3o li1k2l 9likn liks4t li5ku 6lila 8l5ild li3le lil5lebe lil5let li4mar li4mas li6mate l6ime li4me. 4limh limp3r lim7r lim9u li4na. 4lina6l lin6c linch5 5l4indr 4lindu lind3s4 l5indus li4ne. li3ne6a li6nem 2l1inf lin4g3j ling5l ling7sen. 5lingv 4l5inju lin5kes lin5kle lin4kv 2l1inn lin5net 3linni 6linns 6l5inntr li4nor l4inse 5linsk 4l3inst 4lint 2linv li6n5ø6 lio4no lion5sp lion5sv li5o6s 2lip lipes4 lipp4s3 li9rar li4rek 4lisak li5set lis4kl 2liso 4lisp lis6sp lis7tan lis6terk lis5ti 4lis6til list3o li6s8tr list7rø lis4t3u4 li4st5y listyr8ke. li4tee li4tek li4ti3a li4tid li4tig li4til li4tim liti5st li4tiv lit5j 6litn li5to li2t1r lit5rer lit6tele lit6terk lit6term litt5s6 li5ty 7liu li6va. li4v3ak liv5eg livi5e liv2s3 2lj l1jan l1jar l4jarb ljar5s6 ljas4 l4jed l6jee l2jei l2jek lje3l lj9elv l1jen l3jer. l4je1s l5jes. ljes4t l5jet. l4jete l5jete. l6je5t6r l2jev l5jé l1ji 3l2jod ljo8e l4jom lj5ori 3ljos lj5un l7jur lj7ut ljø3 l5jøs. 6l1k l3ka l5ka6l lkali9e8 lk4an l6k5b lk4e l3ke. l2ked l5kedal l3kede lkeei4 l6keh l5keleg l8k7els l3ken l7ker. l4kera l4keri l4k5ers l4ke3ru l5kestad l5ket. l5kets lk2l l7k8lem lk4li l5k4lu l6k5nin l6ko. lko9ma lko9sa l2kot lkras5 lk5ri6k lk4ser l4k3s2h lk1s4t lku4le. lku8t lk9ute l6kveg lkÃ¥1 6l1l l5la6a ll6a5f l2lak ll1al l4lami l5lane ll3anl l4l5ano llans4 ll5ansk ll7apa llap3s ll4as ll5asp ll5aur llau6re ll7avl ll5avr ll3d2 lle5a l5leak lle8da. lle4d3r l3lee l2lef lle5gev lle5gr l5leha l5leho l5lehÃ¥ l4l5eig l4leki l6l5ekst l4leli l2lem llen6da. ll5enden l4lendr l6lendt l4lenk l5l8ens l4lentu l4lenu ll5eple l6l5e6rik l4lerob ll6es lle4so lles5pr lle5stø l5let. l4leta l5lets llet6tele l8letæ l2leu ll5even lle7vev l5levÃ¥ lle6y l2lé. ll7g2 lli1e lli5e4n l9lig l2lim l4l3ind l4lins ll3int l9lint. l3lip l4lir lli6sen l4liv ll1j ll7k2 ll5m llmue9ne llmu7ens llmu9er l5lo. llob5 l2lod l4l3off llo5id ll5oks llo2m1 llom5s6 ll3op ll5opp l2l1or l4loso l6lote ll5p llra7n ll4sak llsa6me. ll4sem lls5end llsk4 lls6kar lls5kv ll5skÃ¥ lls5lag ll3smy lls4no lls4te lls6tig lls7øk ll3t4 llto4e l8lua l4lu4e l4luf ll7ug llu4k l8l7uka l4l3uke l6luks l5lum l4l1un llun6ge. llu4pi l2l1ur l3lus l2l1ut ll1v4 llva8n lly4se. l4løk ll5øks llø6pa. l4løve ll7øy. ll5øya ll3øye lløy6er ll5øy6n ll1Ã¥8r ll3Ã¥2s 2l1m l5ma. lm3anm l4map l4marki lm3av lm7b l4m3els l4melu lm5e4po l4m5erf lme7ri lme7sti lme5t4r l5mil lmi8le. lm3ind l5ming lmini6 lmin7ne lm3ins lm5int l7mis lm5l lm5m lm1op lm1s lms6j lm5s2p lm3t lmu4le. lm3ut lmyr8ke. l4mø. l6m5øs lm5øy6 lmøya9 lmÃ¥6la. lm5Ã¥4r l4m5Ã¥s 2l1n l2nab l3ne lni4u l7ny lo1al 7loane lo1a4r 5loar. 2lob lob5by. lob9byan lob9byar. lob5bye lo2bl lo2d3a lo3de lo4d3ri lod3s4m lod7ste lod7s6v lo6d5u 6loeng 6lofj 3loft 1log lo5ge lo6g5e8v lo4gir log2o lo3gop log8res lo6g5ro log5sa 4loh lo5id. lo3ide lo1in 3loja lo1ki lok4ko lok6kul 2lo1k4l 4lokon 2lok7s4 lok8se. lo1k4v lole6ge. 2lolj lolo9 l5omdr lo4m5in lom4mel lomst9r lo4nal 6lond lo5ne lon4g3r lo4nit lon7skj 2lop lo3pa lo4pea lop5pa 5l4opt lo1ra 2lord lor6da. lo1re 4l1org lor4g5l lorie6 6lorm lo1ro lo7rød lo7sa lo6sek lo4ses lo5sev los4kle lo1sl lo7sp 2lost lo9tes lo8ti. lo3to 4lottet lo1un 1lov lov7ald lo3van lo9vand lo7vane lo3var lo3ve7d6 l6over. love5re l5overn 6l5overs 6l5overt 7lovo lov3sa 2l1p l7pa lpa5re lpas5 l4pedo l4pee l2pei l2pel l5peleg l3peli l4pelin l3pesm l4peti l2peu lp2h lp2i lpin3e lpi5ne. lp2l lpo6et l4put l5q 2l5r4 lra8da lradi4u lra6ne. lre4de lre4i lre8ka lre8ke. lre8va lri8ma lri4ve. lro8de. lro6pa. lro8sa. lro4se. lrot5s lrø6re. lrø5v lrÃ¥8da. lrÃ¥4de. 8l1s l2sad lsag6 ls5ald ls4am l7sana lsan6ke. l4s5ano l4s3ans ls5ark l4sat. l4scu l3se. l2sed l2see ls3eid ls5eig l4s3ele l4s3elv l2sem ls5emb l4seno l7ser. lse2s lses5ku lses3l lses5pa lses5s lse8s5tr lse4te. l2seu l6sev l4sf l2sh l5sig l5sik l6s5ill l4sim l5simu l4s3inn ls5jakk l5sjef ls3jeg ls3jen 8lsk. ls5kabi l4s5kan ls5kari lsk5art l6s5ke lskif5 l2s3kild lskinn6 lskinns5 ls1kjed ls3kjo l6skl lsk3læ ls3koa ls5kor ls3kov l4skra ls8ks lsk5un ls7kva ls3kvi l4skÃ¥ ls3kÃ¥p lskÃ¥6pa l2sl lsla4ga l7slagn lsle6ge. ls5lo l5s4luk l6s5løs l2sm l5smør lsmÃ¥6la l2snu lsok3 ls5ond lson6de. l2s1or ls7o6se l5sot ls1ov l2spa l9speg l5spei l9spel l5spi l4spr l5spred ls3pri l8s7s l4s3tak l4s3tal l6stank 8l7s8tat l6ste. ls4ted l4steg l3ste4i ls4tel ls5terr ls6ti. ls4tie ls6ton ls5tren l9stri l2su l5sub l3suk ls1un l2s1v ls7vea ls5vee lsve8en. l6s5vek lsve7re l4svik ls6vind l4sør l2s1øy l2s1Ã¥ 2l1t l4taa l4taf lt3akt l3tal lta4la. l4t3alb lta6lev lt3anl lta6no lt3art lta4st lta8tes lta4t3o ltat3r lt3avk l5tavæ lt9b l3tede l4teka lte6ma. lt4en. l4t5enh lt3epi l4teras l4t5erst ltesa8me. ltid6s l4tiet l2tif lti6g5 l2tik l4tina lt3ind lt9inn lt3ins l4tivi l2t1ja ltle8ge. lt9n lto9ar l3tog lto4s lt3ost lt8ep lt2r l3tra lt3reg lt3rei l6treko l6t5rel l6t7rem l4trens lt3res l5t4rest l5trev l4t5rit lt3rol lt3rom ltro8pa l5tros l4t5rød l5trÃ¥ lt1s2 ltsa8me. lt5s4i lt7s6t lt2s3v lts4vi lt7t8 lt3und ltu4ra ltur5e6 ltu5r6en ltu5r6er ltur5s6 ltur5Ã¥6 l2tut l5tv. ltva8la l3tvs l4t5w l3ty lty8d lt7øl lt3øv lua8r 2lub lub6band lu6bri lu5c lu7ere 1luf 3lugar lu7go lui6 luid6er 1luj 4luk. lu4ka. 4luket luk6keri lu3kr 5l6ukt 4luly l4um lu6mel lu2mo lum6sk lu4mø lun5d4r lu4ne. lun4gel l7u2ni lun5ne 3lunsj 4lunt lu6o lup4s lu2p3u lur2e 6lurei lu5ren lu5ri 6l5urn lur8na lur8ne. lur8ta 1lus. lusa5 lu7sak lu6sel lush7e lu2s5k lus4o5 lus4sid lus4s3t lus1t lus2t3r luta3 lu3ta. lu7tet 6lutf 4lutg luth 3lu5t6he lutla9 lut6m lu1to lu4t5r 2luts lut4tal lut4tap lut4t5at 6lutv 1luv lu4va lu4ve. lu9ven lu9ver 8lv l1va l4va. lva6k l4vakti lva6la l4va4m l4vana lv8and lv7a8p l7var. lv7asa l4v5ass lve3d4a lve5i6s l4vela l4vele lv4en lve9ne l9ver. lve9rau l4veru l4vesk lve5sl l7v6et. lve5str l9vé l1vi lvi8ene lvi9er lvi6ka. lv9im lvin5g6r lv3iro lv1j lvly8se. l6v3n lv1of lvoks6 lv9op lvo8re. lvor4s lv5o4v l6v5p lvra8r lv9ri lv1s2 lv5se lv7s6k lvs6l lv9ta lv1Ã¥ l3vÃ¥g lvÃ¥4p4 lvÃ¥6r l5w ly1a 2lyb ly4d7r ly5e lyes3 ly4gel ly5g6l ly8is 2lykj lyk6keri ly5ku ly7kv 6lykø ly5l ly5me 2lymp ly2n3a ly4ne. ly4n5il lyn3s2 2lyo ly5ok ly3pe ly1r8 ly8ra. ly6re. 6l7yrk ly4sa. ly4s5a4k lyse6te. lysk4 lys3kj lys9kl ly2s1l 4lysp lyst9ra 6lysy ly8ta. 4lytek ly4teri lyte5s l5y4tin ly1tr ly5vak ly3ve l3z l6z5b 1læ1 6læd læ6ra. læ5rar lær4arm læ4re. lære3i læ3rer læ4re3s læ5res. lær6sv læ6ta 1lø lø3de. lø4del lø5dem lø4er løk5kj 4l1økn 2løko lø4k5r l5ø6l lø9me lø2na løns5t løp6s5 5l4ørd lør5k6 lør5n 4l5ørr 5løs. lø2sa lø5san lø5ser løs3k løs3s 2løt lø9ta. lø3te. lø4teg løva9r 6l7øv8d lø4ve. lø3ver lø5ves 6løy. 4løya 2løyd 4løye løy8ed løy5el 2løyf løy4g løyg5e 4løym 6l5øys. løy5ter lÃ¥6gal lÃ¥6gre lÃ¥gs4 lÃ¥4gÃ¥ 2lÃ¥i lÃ¥1k4 lÃ¥6ke. lÃ¥9me lÃ¥2m5o lÃ¥2n 6l5Ã¥nd lÃ¥n2s1 2lÃ¥p l1Ã¥p9n 2lÃ¥r lÃ¥8ra. lÃ¥9rar lÃ¥5ras lÃ¥3rin lÃ¥1ru lÃ¥6sa. lÃ¥1sk lÃ¥6sko lÃ¥4s5l lÃ¥4ste lÃ¥s7ten lÃ¥s5ter lÃ¥5su lÃ¥3te. lÃ¥4teg lÃ¥4tek lÃ¥8ti. 4lÃ¥tt lÃ¥4ve. 1ma 2maa 3maa. ma3ar 2mab ma5br ma3che mada5me ma4del ma3dra m6ad9s 4maef 4maek 7maene 2ma1f 3mafia 3mafr 6mafrÃ¥ ma8ga. ma4ged ma4gel ma4ges 3magn ma2gr 2mahe ma4is ma4ja 2mak. ma5kab 4makad ma4kes mak7ke 4makl mak6le ma4kot mak2r ma3kre mak5ron mak4tal 4m3akti mak4to makts4t ma1ku 2ma1kv ma1la ma7leb ma6leng ma4let mali9e8n mal5l ma4lov mals4 mal4t5ek mal3u mal3Ã¥ 4mamer ma3mo m4an. ma3nak m3anal 6manam 4manav man8ce man4dom man4don ma3ne m4anf man5g4a m6ange 4mangr mania8 2manl m3anle 4manm 5m4ann man4nem manns5l ma4no 2mao 4mapa 2mapr 4marb mar5d6 7mare. 6mareg ma3rei ma7rel 5ma9ren ma5res 3mar4i mari8e9ne ma3rin m4arka 4markit mar4kv mar5m marmo9ra. mar7n ma1ro ma4ro6p mar3s4h mar7sl mar5te ma4ry. ma6rÃ¥ 4masel ma5set mash3 ma4sia ma4sik ma4sis 6masju mas4ki 4masko 4masp mas4see mas6set massø9 7mast. ma5stem 4masto 4ma5str 2masy m6at ma1ta ma5tad ma3te ma4te. ma6ted ma4tel 7maten 7mater. 6matera 7matet 6m7atfe 4matil 7matil. ma1to ma1tr 4matra ma4t5ras ma4t3re ma6t7rom mat3s2 mats8l mat5ta m8at7t8r 2mau mau4k mau7l ma1un 5maur mau7su 2mav 5m6a6ve ma5ven m7avh m5avk ma4ze ma3zo 2m1b m4bao mbar3d m2bea m4beb mbe9da m4bedø m2bef m2bek m4beli m2bem m4bena m4beo m4bereg m4bero mbet2 mbi6ar m2bib m3b4l mboe2 mbo4end mbo5er. mbo5ere mbo5id mbo5n m4bop mbo5re mbo9t mbu7ar. mbu4e mbue7re mbus5 m1c m6co 2m1d2 m3de md7om 1me 2mea2 me5al mea5m 2meb 2mec meck4 4medat 2medb 5mede. me4ded med5ei me7den me7det 4medf me3di 4medik 4medir 2medm me6dok 4medom 2medr me6dret med1s4 4medu me6dun me5d4us med5v 7medve 2medy 4medÃ¥ me7e 2meef 6meek 2meel 4mees 4meev 2mef m3eff 6me2ga me7gal 2megr 2megu 4meh mei2e meie5n m1eig me3ild me3ind mei5ni me7isk 4meiso 2mej 4meki 4mekj 2mekk me2k1l me3kn 4meko 2me5k8r 4meks mek5t 6mekte m2el 4mela me5l4aks 5melane 5melar me3le me4le. 4meled 4melei 4melek 6m5e6lem me4l5eng 4melik 4melis 4melit 4meliv mel5le mello6m3 4melok 4melov mel4si mel2s3j melsk4 mel5s4t mel7tr me1lu mel5v melvi6 2mely 2melæ 4melø 4me5l4Ã¥5 2mem me6mo. me6n5e6d me6n5ers 4m5enga men5k menle6ge. m6enn men4ny me4nom me4nor men4si men3sm men5spl men5te men6tek men4tom men5tr me4nyt 2me3o me6os 2mep 5mer. me6rab 4merad me4ra5l me4r5ant mer5di 4mered 4mereg 4merei 4merek 4merel me6rense me3res 4meress mere6t m4eri 4me5rib meri5ke 5merin merle7g mer9n mer5os 5mers mer5sk me3run mer5und 4merus 2mery me2r3ø 2merÃ¥ me4rÃ¥k 5mes. 2me5sa 4me5s4h 2mesj 2me1sk mes5ke 2me1sl mes6le 4me3s4o 2mesp 5mess messe7s8 7mest. 4mesta 5meste. mes4ti 6me7sto mest3r me5stro 6mestrø 4mestrÃ¥ 4mestu 4mesu 2mesy 2mesø 4metab metall7s8 4me9tap me4tar me3te 4meteg 4metei 4metek 4metel 4metep 4metil 2metj 4meton me4t3ra me4tre met5ren met7rer met5res met5rik me4tru 4metræ 2metu 4metv 2mety 2metø 4metÃ¥ 2meu4 7m4eu3s 2mev meva8n 2meø 2meÃ¥ 2mé 2m1f mfa9ra mfar8ta mfav5 m2fek m6f3ess m4fi. m4fibr m2fit m5fru5s6 m5funn8s5 mfu6se. 2m5g6 m4ga. m4gi. mgÃ¥8va mgÃ¥8ve. 2m1h mhea4 m4hu. 1mi mi1a 5mi6al mia2n 9miane 7miar. 2miav mi6c miche6l 8m9idé 2midi mid5j mid3s4 mid4t 4mi3ele mi3er. mie9s8 2mi5f 5migraf 2mih 2mii 4mij mik6h 4mikj 2miko mik5ro. mik5ron mik5sa 6miku mi4la. mile6t mil7eti mi4lev mi7li 4milin mil6s5v 4milæ 2mim mi8ma. 5mimo m7imp mi4na. mi5nar mi4ne. mi4n5ers 4m3innh 4m3inns 6minnt min2s min3sp mins4t mi3nu m4inv mi4n5y mi7ov 2mip 4misa mi4san mi3se 4misek 4misel 9misé misha9ge 4mi7si mi2sk mis4ko mi3s4la mis9le 7mism mis4s5k mis4s7p mista9k 5mis7te mis4tra 4mistu 2misy 4misÃ¥ 6mita mi3te 4mitj 4mito 2mit6r mitt3s mi5ur 2miv mi5vÃ¥ m6ja. m6jan m5jar. m7jarn 2mje m7je. m1ji m8jingan m8jingar 8mjo mju7ke 1mjø mjøs5t 2m3k2 m4ko. mkro5 mku6le. mkÃ¥8pa 4m3l m7la mla6ga m2lef mlei5er. m2lek m2lel m2lem m4lera mle6se. m4lesk m6le5s8v m4leta mle4ve. ml6i ml5ja mly6se. mlø6pe. mløy3 2m7m4 m6mai mma5kl mmal5 mma8le. m4mam mmando5 mman6dol m5mar mmar7in mmar7o mmatik7ka m4medø m2meg m2mei m2me3k2 m4melu m6meni m4menu mmer5t mme4run m2me1s mmest6 m5met. m4meta m4meti m4metra mme5t6re mme3u m3mé m4mia m7mian m2mib m4mid mmie6n m2mik mmi5sk mmi5so mmi3st mm5n m3mu mmø2 mmÃ¥8la. 2m1n m2nee mn7eid m2ne9l mne4ra m5nese m4nesk m4nesta mnes9tie m9net. m4nete m2nev m6nip mn7sk 1mo 3moa mo2ar 4moba 5mode mo3dem mo5di mo6di. 2mody 3moe mo4en. m1off mofo6bi. mo6gi 5mogl mo5go m5ogs 4moh 2moka mo8ka. mo8ke mo1ki mo6la. mo3le mo4le. mo7le6s 4molt mo3ly m5om. 4momf 2momr mom4s5ø mo5ne mo4nisa mo2no mon1s mon4ste mon5t6 2mop 5mo1ra mo4ra. mo4rar mo7rar. mor5d6e 4mordl mor6d5r mo3re mores7 m2ori mo4ri. mo6rid 4m3o4rie mor4kl morla8ga mo5rok mo4rom mor4si mor4skj mor7sky mor4sp mo1rø mo9sen mo2s7k mos4o 6mostab 4motap mo4te. mo6te6g6e mo4tei mote7kl mo5ter. mo4tes mo5to 4motr mot7re mot1s2 6motsagd motsva5 mot7t mour5 3mo3va mo5w 8m1p m4pana m4p5anta m4pee m4pelot m6pena m6pep mpera8te. mpe5res m6peress m4p5erfa mperi6e7n mpes2 mpes6te mpe4sti m5pett m2pe5u mp3id m4pinje m8pla. m3p4lan mp5le. m6p5lin m6p5n m6p5ob mpoe4 m4poeta m4pog m6pok m2pop mp3opp mp1p8 mp3rad mp5ret mp3rop mpro8pa mp1s mp3sek mps4p mp5s6t mpun6ge. m9pur mp5ut. mp5ys mpø5 m9pÃ¥. m3q 2m1r6 mro8sa. mro6se. mru7te. mrø9de m9rÃ¥r 6ms m5sa. ms1ak ms5ant m1sc mse5lu m9s6ei m4sem m4s3eng ms5e4pl m4serv mse5s m5s6etn mse6t7jare mse8t9jas msi6e ms4ing ms3inn m4s5ja ms5kab mska9k ms6kin msk5ing ms3k4n ms3kor ms5kren msk8u ms3lan ms6lega ms9lua ms3lue ms4ly m2sm m1s2n ms9ne ms5no ms3næ m1so m4soms ms3ori ms1ov ms3s2 m4s3tal ms8tav m8s7te. ms7tea ms2ti ms5tilb ms3tim m1sto m4s5top m5s6topp mst5ord m8strand m5strane mst5red ms5tref mst5ren ms5trik ms4tru ms3unn msu9ta msu7ten ms1v msva9ra msva5re m3s4vet ms3ynd msø4ke. m4s5ør ms1øy ms1Ã¥ 2m1t m3ta mt4b mtbe6 m2tee m2teg mteks7 m6te7kv m4tel m6testu mti9a m9tida mti5e m6tien. m6tif m2tik m6tini m4tiø mtiør6 m5to mt4r mt6ve mt4vin mtÃ¥5 1mu mu6a m1u2b mu2e mu3el mue7r mues1 2mug mug5l mu4he mu2k 8m9uka 4m1uke mu5la mu4leg mule6s mull2 mull6s7 mul6tiv 4muly 7mum mu3mi mum2s mun2c munes6 4mung mun6ge. 6m5univ mun3k munn5s6 mun4t3r mu6ra. mu4re. 2murn murs4 6murt m2us mu4se. mu4ses4 mu4sé mu2s3k muske6l5a must4 mus5tan 2mut 3mu1ta mu8ta. mutsa8la mut7t6r 2m1v mvak5t mva6la mvari6 mveg5s4 mvi7e6 mvi6se. 2mw 1my my5a myg4ga my3ke myk4kes myk3l my8kr my2ra my9ran my9rar my1re my4re. 4myrk m5yrke my4r5u mys3 my6sa my4se. my4so my4te. mytt6s5 5mæ mæ6la. 1mø møb3l mø7des mø6e mø9ens møkk6a m3økn 2møko mø6na mø4ne. møne9s mø8nest 6m5ønsk mø2o 3mørke mør4k5r mør5sm mør3ø mø5se 7møt mø9tas mø4tere møt9t 2møy møy9ar m5øys. 1mÃ¥ 2mÃ¥. mÃ¥5a 6mÃ¥enh mÃ¥7g2 mÃ¥1k mÃ¥4ka. mÃ¥4ke. mÃ¥4kes m6Ã¥l mÃ¥5lar mÃ¥4le. mÃ¥6led mÃ¥le3i mÃ¥lø6pe. mÃ¥4ne5s6 mÃ¥9nese mÃ¥8pa. mÃ¥4pe. mÃ¥7pl 2mÃ¥r mÃ¥ra6r mÃ¥1re mÃ¥1ro mÃ¥1ru mÃ¥1s4 mÃ¥6tak 6mÃ¥tf mÃ¥5tr mÃ¥3tø 1na na6ans 2nabo 7na7bortr na6bot 4naby na4ded na8dem nadi4 n3adop nad2s1 4nadv 8naf n1aff nafo7r na3fr naf7t na8ga. na3ge 4nagen nag3s4 nagsÃ¥5 na7gø 6nah 4nair 2nak n6akk nak7ka nak6ko 5nakl na1kr n1aks nak8sa nak8se. 8n1akt nak6ta. na7kv na9lag na6lare 6nalarv na7leg na4l3ei na4lek nal5epo na4les na4l5ett na4lev nal5g nal6ge. na4lil nal6lag nal4løp nal3op nal5s2 nal3t na3lur 2naly na2l5ø 4nalÃ¥ na3lÃ¥5r 5nam. na7mere na5mes na5mo9 5nams 2namø 9nan. 4nana n3anal n6ane nanfø8 4n5angr nan5k nan6ke. 4n3anl 6n5anle nan4ne na4n5o n4n5ans nan4sin nan4skj nan4s5t 4nany na7o na5pe nape4s nap3s4 na7p8 n4ar. na4rap 2narb nar5dr 4narea nar7ei 4na5rek nari4e5n 7nariksk 4narkiv 8n7arm 6n5armé 6n5arme nar8ma. nar5s nar8ste 2nart nar6ta. nar5ti nar7v nar5ø nasa3r na4sas nas7h 8nasik na4ski na2s5t4 nasta5 8nasu na1ta nate8k na7tem 4natl 4natom natori5e6 nat8ra nat3sp nat6tak nat6tea naty5 2nauk naus6p na6va. 4navd 6navf 2navg 2n3avh na1vi 4n5avk 4navl nav4les 3navn 6navo 4navr n1avs 4navt 4n1b2 n5ba2 nba3d nba9ser nben4s nbo5et n6buf n6butr nbyr5 nbø9le nbø6n n1c n5ca ncel5 n3che nch3ei n6cot n2cy1 8nd n7daa. n6dabi n4dad nda5f n7dag4 ndags5 n4daks n3dal n4dalf n4d5all ndals3 n4dark n6d3art n8dask nda5tal nd5d4 n3de. nde5a n2deb n2ded n5defi nd1ei nd4ein nd6ek n4dekl n4deko n5d4e5l nde4le. n4delik ndel4sk ndel4st nde6lt n2dem n5dem. nde5mo n5den. n6dener n5dens n2de5o2 n2dep n4derab n4deras n4derei n4derim nd6erk nderla6ga n4desi n4dest n4desu n1dé ndi4en n4d3int ndi6sk nditori5 nditorie6 ndit4t5a nd3jer nd1ju nd5k nd4lem nd4lese nd4lest nd4lev n8d7m ndo9a n6dob n6d5okk nd4om4s3 n2doo nd1op n2dor nd3org nd5orm n6dote nd5rae n4d3ram nd3ran nd6rek ndre4ra nd5resen nd5rett nd1ri n1dro nd4sag nds3ak nd4s5amb nds5elv nds5ende nd4ser nd4sje nd4skj nds7kul ndsla6ga nds6lett nd7spu nd3s4te nds9teg nds5trek nd5stry ndta6ka. n6duf ndu3is n4dun nd3ung ndun6ge. n2dup ndu6si ndus7k nd7v n2dyg ndy1k n2dys ndy5sp ndø5l n2d1øy 1ne 2nea 5neae nea4g ne7a6le ne5alp ne5als nea9m6 ne9a8r9an nea9ren ne1as 6ne7av 2neb 3nebb 4nebe ne4ble neck5 2neda 3nedal ned3d 6n5eddik 4nedeb 4nedeg 4nedel 5nedg ne4di. nedi4s 2nedo ned5over7 ned1r ne6dre 5neds ned3st 6nedu 4nedø ne5dÃ¥r 4neeg 4neei 2neek 5ne4e4r5 2nef n1eff n5eft 2neg 5negati ne3gla neg5lesp neg8r 6negru 6neh 2nei n2eid nei8dan n5eig n2e1in n3eini 5neir 5n6eisk 2nej 2nek ne4ka. 6nek2l ne3kn nek2r nek5rin n7ekser nek5t 4nela nel3de 4neled 6nelegg 7neleggj 4nelei nelei5er 4n3elem 6nelê 4nelid 4neliga 4nelis 4nelit 4neliv nel4lov nel7s 2ne1lu 4nely 2nelæ 2nelø nelø8pa 4nelÃ¥s 2nema 4n3emb 4nemel 4nemer 6nemes 2nemi nemie8 6nemj nem6k 5nemn nem5ne. nem7nel 2nemo 2nemu 2nemy 2nemø 2nemÃ¥ 3nen 4nena nen5at. 6n5en6den 6nendr nene4 4nened 4nenem ne3nes 4nenesl 4nenet nen5eta 4nenev nen4ga. 4n3enhe 6nenhet 4neni nen5se nent5ei 6n3en5tr 4nenu 4neny 4nenæ 2neo 5neo. ne5o4r 7neos 2nep ne5pe 3n4er. 4nerad 6nerap 3nerb 4nerea 4nered nere6de. 4neref 4nereg 4nerek 4nerent 4nerep ne5res 4neress 4neresu 4neret 4nerev 4nerfar 4ner2g ner3ga ner3ge 4ne3rib 4nerik 4nerit ne4ri9v 5nerl nerla8ga n3erob 6neroc ne7rof ne5rok 4nerom ne5rot ne1r4ov n6ers. ner7se ner5s4i ners4p ners8ten 4ne3rul ner5un 2nery 3neryr 4nerør 2nerÃ¥ 2nesa ne4sa. 5nesar 4nesc 4nese 6nesek nes6er 5nesets nes5eva ne3si nes5ind 4nesit 2ne5sj ne5skam 4ne5skar nesk4e 4neski 5neskins 4neskj 4ne5sko 4neskr 6nesku 2nesl ne3sli ne7slø nes6mi 6nesn ne7snø 2neso 3nesot 2nesp nes6sam nes4stu 5nest. 4nestas nes9te. 4nesto nes5tor 4ne3str 4nestu 4nestø 2nesu 2ne1sv nes8va 6nesy 4nesæ 5nesøy 4ne5sÃ¥ nesÃ¥5re 3net. 2neta 4netaks n7e8tas neta6le. ne3te 5nete. 4ne3ti 6netid 2netj 2ne5to 6netom 2net4r 3nets net5s4p nett3a4 net6tel 2netu ne4tv 4nety 4netÃ¥ netÃ¥5ker 2neu neu1r ne5us ne6va. ne4ve. 6ne8vent 5nevik nevi8sa 3nevn nev1r ne4y5t 2neø 4neÃ¥ 1né 4néb 4néd 2néf 6néh 2nél 4ném 6néo 2nép 5nér 4név 4néå 4n1f nfall4 nfalls5 nfan5t n6foa n4fob n4fole n4fom n4fora nfor9en. nfos4 nfø5des nføy8ed 8ng n8g8ad n4gaf n6gag ng1ak ng5and. n8gan8da ng3ank n4g5arm n4gart ng5art. n5garta ngar5u n2gat ng5ato nga4ve. ng7avi ngd4 ng4ded ng4del ng4dep ng3dr n4geda n4gela n3gen n4gena n7gene n4g3eng n6geno n4genu n3ger n4geret nge5run n2ges nge3sl n4geta n2ge7v ngfø4 ng5g2 ng5id ngjen5g ng1l n5glem ng4lu ng7n ngo4d ng5odd n7goe n3g4ok n8g7o8m ng9ome ng5ond ng7o6pe n2gor ng5ord ng3ork ngos6p n2got ng3ra. n5grad. ng5rand ng7rea ng3reg ng3ren n4grend n4grer ng5rest ng3ret ng3rev ng3rid ng3rin ng7ro. ng9roa ng7roe n5g6ros5 ng3rÃ¥d ng8sa. ngs5elv ng4sem ng4ses ng2sj ng2sk ng6skj ngs7leg ng5sløy ngsmÃ¥6la. ngs3ne ngs7tep ngst5rid ngstyr8ke. ngs9tør ng3und ngu5ru. ngvi4s ng5ye n1gø n2g1øy ngÃ¥8va ngÃ¥8ve. 4n1h2 nhat5 nhet4s 1ni ni1ak ni1a2n nian5dep ni2bl 4niby 4nibÃ¥ 4nicr n8ida ni4del ni7dele n5idé ni6do nid7r nids4 nid5st 4nieg ni1el 7ni3e2n ni3er. nie5ri ni7ers ni5ert 4n3i4fr ni5gla ni9glo 2nih 8nij ni3ke 2nikj 4ni5ko 2nikr 4nikv ni5l4i 5nilu 4nimar 4nimas 4nimo 2nimp ni6n7al 2n1ind 4ninf ning3o nin5gr ning6s5 ningse4 nings5te 2n1inj n1inn 4ninnb 4ninnh 4ninns 4ninnt 2n1ins 2n1int 2ninv 5nio. nion2 nions3 4niop 7nios 4ni5ov ni4pet ni9pet. ni9pets ni2pl nip3li nip5si 4nir nir7kel 4nisem ni5set nis5im 4ni3skj nis4k3o ni9sol niso5ne niss4 4nistas 6nistat nis5tik 4nistil ni3str 4nisty 4nistø 6nisu 6nisy n4it ni3ted 6nitj ni4tog ni4tos nitt4r nitt4s3 nitt6sk 4nitu ni3ty nit5z nitær6e 2niub ni4umf 2niut 4niva 2nive ni8v7eg niv5ei8 4n5ivo niv3s2 6niÃ¥ 4n1j n6jah njav9 n2jed n5jede. n4jee n6jei n2je1s n3jes. n2jet n3jet. njet6r n5jett 2nk n1ka n2kak nk3aks nkal5l n2kau n4kedo n4keer n4keh nk5eie nkel5s nkelt3 n4k5enh n8keno n4k5erfa nker4st n4ke1s4 n6ketø n1ki n4kid n4kie nk3ier nk5ind nk7inf nk3inn nk7int n5kj nk1k4 n1kla n5k6lang n7k4lis nk3lok n1klu nk3ly n3klæ nk1lÃ¥ n1knu n1kny n3ko n4kof nkofi8 nk5oks nko5le nkol4la n6kot n6kov n1kr n3krav n7k8ry nk1s2 nk4tak nk6tal nk4tin nkt3sk n1ku nku4le. nkuri9 nkurie8 n4ku6t nk9ute n1kv nk3ve n7kvit n6kvo n9ky n1kø n1kÃ¥ nkÃ¥6pa 4n1l nland9a nland6se n5le nlei7er. nle8ma. nli4en. n3lj nlu4e nly6de. nly6di nly4se. nlø6pa. n7lÃ¥ 4n3m nma4le. nmik3 nmi8l nmini6 nmo6se. nmusik7k nmÃ¥6la. 4nn nn4ab n2nad nn5ad. nn9aft nn5air n6n1ak n4nala nn5alg n4name nn4an nn5ana n9nane nna8ni n4n5ank n5nanl n6n7anta n9nar. n4nari n6nasi n4nask n2nat n2nav nn3avl nnbo9e nnbu9e nn5d2 nndø4 nndør3 n5nea. n5neap n5nebar nnebe8r n5nebu n4nedi n2nee n6nef nn1ei n3neke n4nelo n3n4en nne6nat nne4n3o n9ner. n4nerel n4nero n7nes. n6nesj n5nes4la n4ne3st n5n6et. n4nesø n4nete n2nev n2ney nn7g8 nnhø8re. n4nid nn3ide nni3e n2nim nn7inf n5nis nni4sj nni4s3t6 nn7k2 nnle6ge. nnlø6pe. n4no. nn5of nno2m1 nn6oms nnomsy8na nn1op n2n1o4r nn7ord n4n3o4ve nn9r nnsa9ka nn4s5amb nns5and nns3ar nn5seg nnsei8g nn4s3em nn6s5enk nn4s3es nnse6te. nnsi7da nn4s3in nn4s5i6s nn6s5jak nns5kan nn5s4kli nns5op nns3or nnst6 nn6s5tab nns5tal nns5telt nns4ten nns5tep nns7tin nns5top nn6s5tre nns5tro nns3tv nn4s5ul nns3va nn6s5vo nnsy8na nn2sø nns3øk nn3sÃ¥5r nn7t4 nnte6se. nnto9ga nnu1i nn6ung nn1ut nnvi4s nn1yn nny4t nn7øk nn1øv nn1Ã¥ 1no noa4g no1ak 5noane no7ar. 2nob nobe4l no4bl nob5le no3b4r no5co nodi4e5n 3noe 4noef 4noei 4noek 4noent 2nof n1off 3no3ft n6ogra no5id no3in 2nok no1ki nok8r nok6se. n3oksi no9le 2nolj no6mid no6mik no4mil 4nomj nom9m nom7s2 5nomy non1s4 non5t 2noo 2nop n1opp nopp7s8 no1r4a no5ran no5rar no5ras nor4da nor6d5end 4nordn nor4d5r nord3s4 nor4dø no9ree no3rek no3ren no7ret nor5g 3norge norges5 n3o4rie 7norit 4nork nor6kla nor6kle n4orm normlø7se nor2s nors6ka. no3ræ no4sel no4ses nose8te. 4nosp no3stj nostra4 2nosy no4ta. no4te. note5i 5notek no4tel no4tes not3s 4noty 4nou no1v no3ve 4no5vi 4n5ovn 7nó 6n3p2 n5pe npo8ta n4pÃ¥k n3q 2n1r2 n9ra nra8na nra8sa n3re n4reb nrei7e n6renem n6reo n6ri. nro6de. nro6t nry6 nrÃ¥8da 6ns n1sa. ns1a4d n5sag n5sakk n5sa4kr ns3aks nsak6se. 6n5akti ns5akv n3sala ns3ald n6s5alp n5sane n4sanf n5s6ann n5sanse ns5ansi ns5ant n5saren. ns6arm n6sart n4sati n1sch n3sco nse9a8l n2sed ns5edd ns8ede n2see nse5ei nse6er n6sef n2seg ns5ege nsei6d ns5eie. ns9eig n4seks n4s3elv n2sem n5sen. n5sene. n4sener n9senes n4senet n4seni n7sens n3ser. n4sera nseri7e6 n4sesa n4sesi n4se3sk nse3sl n4ses4p n4sest n6sesu n4se3s4v n4sete n9sets n2seu ns5fr n2sh ns3ha ns7he ns5hi nsi5de. nsi4ent nsi6er. nsin5d n3s4ing ns3inn n4sinte nsi8ra nsis7k ns3jak n4sjav n5sjef n6s5jord n4skan n6s5kauk ns7kele ns4kete n9skim ns5kir n5skis n6skja n6skjele ns5kjen n3skjæ n8s7kÃ¥p nskog6s ns4kolli ns5kor ns3kro nsku9et n4skug ns5kuli n5skum ns1kv n5s6ky. n5s4kye n5skyss n5slang n7slarar ns5las nsle6ge. n3s4lekt ns6leri ns5lid n5slyn n1slÃ¥ ns3mi n4smal n4s5mo n4smur n4smus nsmÃ¥6la. n5s4nar ns5nes nso4d ns1of n3s2ok n1sol n4sond nson6de. nso5ris ns1ov n3sovn nspa9ra ns5pels n5spet ns1pr n6spros n7spur ns7s6 nsse4e nssÃ¥5 ns4ta. n4stak n4stag ns5take n3stam n4stank nst5art nstar8ta. ns4tau n3s6ted n3s4tei ns4tel nste6ma. ns5tenk n6step n5s4tik ns5tild ns7tilf n4s5tils ns4tin ns4t3i4s ns1tj n5sto. n3stru ns7trøy n3stue n6stuk ns5tur nst3ut n1su ns1uk n4sung n5sup ns5vang nsva9ra n2s3ve ns9veg ns5verd ns5vern ns1vi n5s4vill ns4vind ns9vis n4svu n3s4væ nsy4d ns1yt n5søn n4s1øv ns5øya n5sÃ¥pe nsÃ¥7pen ns7Ã¥s. 6n1t n5tab n4tagr nta4lan ntall4 nta5na. n4t5and nt3anl nt4anv nt3app n7tart n6t5arvi nt6as nta5sia nt4at nt3avi nt5avr nt7avs n2tea n5teat n2tee n4tef n4tege n4tegi n7tegn n2tei nt5eini nte5ins n5teis n4teka n7tekn n4tekse n9teleg n5telig n5tell n4tels n9telt nte4ma. nt3emb nte4mis n5ten. n4te7na n5tene. n4t3enh n5tens n2teo n5teori n5tepp nt4er n5ter. n7terek n6terest nte6risk nteriø7ra. n5ters n7tes. n4tesel n4teta n4tete nt4e3u nt5e4va nt9g nti1a n4tiat n4tid n5tiem n4tig n4tikap n5tiki n4tikl nti5kli n5tile nti3lo n9time. n7times n4t3ind nti6net n6tini n2tio n2tip n4tisa n4ti5sti nt5l nt5n nto3a n2tob nt5omf nt5omn nt3oms n2t1op n5tor. n5to5re n6torg n2tou nt1ov nt6ran n4trep n5trer nt5resi ntres9kjare n4t3rin ntrol6li n6trom n6trul ntrøy4 nt5skj nt5s6la nts2t nt3t4 n6t7ub ntu9e ntu4l n2t1u2n n4t5uro n2t1ut ntva8la nty6e ntyr3s nt7Ã¥r. 1nu 4nu. nu3an nu4av nuft4 nufts5 2nug nug6la 4nu4h nuit8e nuk5 nul9t8 4nulu 6nuly nu9me 2n1un nun4ge. nuo2 6nup nu3pl nu4re nu5sa. nu5sen nu7ser nus4k5l nus7l nuss4 nu6st nus3ta 2nut nu7ta nu3te. nute6r nut5eri n7utl nu6ume 6n1v nva6la nvand5 nverle9 nves1 n5vi nvi4et nvi4ka. nvi5ke n3vu nvæ8ra n3w 1ny ny5ar. ny9ast 2nyd ny4de. 4nyef 4nyg ny5ge 5n6yhe ny3ke ny3k4le 4nyko ny1lo 2nyn nyn8da. nyn4de. 4nyo 4nyp ny8pa ny6ra. ny4re. ny7rev ny4ru ny1s ny5se ny9s6k nyst4 nystu4 ny3te ny3tr 2nyv ny8va. ny5vak ny8ve. 4nyø n1z 1næ1 næ8ra. nær9and næ8rast. 1nø nø2da 4nødd nødde5 nød3sk nød5sto nø9dun 2nøe 4nøf nø1fl nø1fr nø7gr 4nøko 4nøks nø4le. nø9mo nø5mu nø3p4 nø7ra nø6red nø1ry nø3se nø1sk nøs4l nø7te nø7tr n6øtt nøtt6r nøv4d nø3ver nøy1 2n1øy. nøy8a9n n7øyh 4nøys n3øys. nÃ¥6as 1nÃ¥d nÃ¥5ded nÃ¥8en nÃ¥6et nÃ¥8j n1Ã¥2k4 1nÃ¥l nÃ¥2la nÃ¥5lev nÃ¥8ma nÃ¥n6de. n1Ã¥4p nÃ¥p8na n1Ã¥6r nÃ¥2s nÃ¥5sa nÃ¥se4 2oa oa5c o1af oa4k oak6kana oak4ku o1all o3alt o1a2m o3a6n o9and o1a6p oar6d7e oar4d5in oa4r5e4g oa4r5enh oa6ré oa4r5i oar5m oa9té o1au o1a4v 2o3ba obakk8s obb4l ob4bo ob4b5r ob4b3u ob4e o4bea ober5et ober4s obers5ta obers5te o3bes 2o3bi obi5e6 1ob1j o1b2l ob5la. ob3le. ob3len o2b5li 3oblig ob4lo o1b4o obo9a 2obr ob5rar ob1re ob1ri 3obser ob5st 2o3bu 2oby obyl5 o5bø o3bÃ¥ 4oc ock5ers o5cy 2o1d o6dab oda4f od7att odd5ei od4del od4dest od9do od4e o2deb o6d5ei o4deko o5deku o2dep oderle7 o2des o5det. o5dets odie4n o4d3igl o4dj odko5 od3oml od3ov o2d5re o4d3rø od3sk ods4ka od3s4po od5s4tol ods8t odu7s o4dy. o4dyb od5øk o5døs od5øy o2d1Ã¥6 6o1e oe6f o4ein oek6s oe2l oe4mu oe2n oen5a o7ene oeng5d oen2g7r oeng3s oeng3u o5ens4 o7ens. oen5t o3e4re o6erer oer8ma o3ersk o2es oe6sip oe6sit oes8ka oes8ke. o4etan oet2h oe5ti oev6ne. 2ofa o2fa. ofag6a o4fara ofa4se o2fav o2feb o4fei o4fera o4fe3st o1fé off9and of4fek of5fes of6fia off5id of6fr of2fu of6fy 2ofi o4fip o1fl of5le 2ofo o6fra. of9ras of1re 6o5fri of8sa of8se. 6oft of4tel oft2s3 2ofy 2og o2ga. og7ank o4gare o2g7av 4oge o2ge. o4ged og1ei oge7na. o3get og4gera og4g5j ogg3s4 og6g7u ogi3a4 ogie2 o4giev o2gif o6gig o4giko o2gil o2gim o2gin o3ging o2gi1o o6gista o6gi7str o2giv og5jer og7la ogly7 og4ned 5o4g5ni og6nel o5g6nos o2go. og6ra og4re. o4g3reg o4g3rei og4rer og7rett o4g3ri og3ryd o4g5rø og5rÃ¥ og3s4 ogs4a ogs6an og3s2e og1sk ogs4kj og5sl ogs4le og1s4p ogs5pa og5ste og3sti og4s5tj ogs4to ogs4tr ogs5t6ra og5stre og5sy 5og5s6y4v3 ogt6r 5o4g5Ã¥t og1un og5ø og7Ã¥s 2o1h6 ohe5te ohm9a o4ho. ohø4 oia4 o1i2d9i o1idr oid5t oie4n oi5er oi6es oi5k oi4la o1im o3in. o1ing oi2n1o4 o4insp oi6r o1is o2is. o6isa o2ise o4isi 4o1j o8je. o4jo. o1ka ok7aft ok1ak oka4n5i6 ok2ar ok5ark o3kas ok1ef ok5els5 ok5elv o5k6en o4kesk o6kev o1kj ok4kand 4okke ok6kera ok4ke5s ok3kje ok3ko. ok5kol okk8s7m ok4kun 4okl ok8la. okla5m ok8le. o1klu ok7lut oklÃ¥5 2okn okna8sa o3ko o8ko. ok8ol o4k3oms ok3omt oko5pe ok5opp ok3orm oko4s oko9se o6kov o1kr ok5reo o6kret o4k5ru oks4al oks4e ok4sek okse5kr ok4sem ok7sen ok6serin ok4ses okse9te ok5s4i oksi7da okst2 oks6ti ok6s5vi oks6tr ok5ta 3oktan ok8t8a8v7 ok3ti ok5to okto4r5i okt6r o1ku ok5u4k oku8le. oku6t ok7ute o5kva o6kvak o3kvi ok5øy o3kÃ¥ 8ol. 4o1la ola6ded ola8ga ol5anl ol3ant o9lar. o6larbe ola5t olber4 olbo7e olbo7ge. ol4bol olbu7 2old ol7dan ol5de. ol4dem ol4dest ol5det ol3do oldo7ve7 ol5drev ol3dri old3s4kr 2ole o5leaks o6leb o2led o2lef ol5eig ole5in o2lek o7leki o2lel olele6ge. o2lem o5lem. o5lems o7lene. o9lenes o4lenet o4leni o4leno o6lered o4leru o2les ole5sta ole5str o9let. o2lev o1lé ol4f5i ol4fj ol3g4e ol1g4l 2oli o4liba oli9e6n oli7ert oli5ne 3o6lj olke3s4 ol4kese ol4kesk ol4kest ol2k3l ol9ko ol6ku ol5kv ol5la. oll3ak ol4lam ol7le. ol4led ol4lel ol7len ol4les ol6linj ol4log ol4lom ol5los ol4lov oll5over olls4t oll5sv ol2lu ol4løp ol6løs ol6løy ol2lÃ¥ olme5s ol4mest ol4mÃ¥ ol9n 2o1lo o4lo. o5loan o3loe o4lof olo3i ol5oks ol3oms ol3op o6l7os. o6l7o6se o6l5osf olo5ve o8l7ovn ol4ped ol7so ols8t ol5sva ols5vik ol3ted ol5tep ol2tr ol6t7ra olt5re. olt5rer olt5ret o1lu olu5l o4l3ung olun8ge. o4l5ur ol5va ol1vo o1ly oly7d ol5ør o1lÃ¥ 2om. 2oma om3ald o2ma2m om5ang oma4nif omann4 oman5t om3ars omar7ø8 om4as om1av omb6 om4bet om4bis om3bl 5ombod 1ombu 2ome o2m1ek o5meka o7menes omer6s o6mese o5met ome4tak ome7tar om1eu 3omfan 3omgre 8omh o2mia o2mib omi3e4 o4miek o2mig o7mikk o7mine om7inn om3in5s o4mio o2mir o4misj o4mist o2miu om1j 2omm om6mat om4med om4metr 3ommø 2omo o4mofo om1op omo1v omp2 ompa3t 1omr 2omre 2omro 2omru 6oms. oms3al om4sek 3omset omsko9d omsku9la om4som oms3un oms4ø omt2 3omtal om4tes om3ti om3tv o2m1u o3mus omvæ8re. 6omy omyr8ke. o2møk omø4r om5øy6 o5mÃ¥ 2on on3abo o4nac o6nak o4n3ang o4nap on3app o6narb o4n3arg o4n3arv o7n8a7sj onat5r o2n1a4v on5b on4dar onder6e ond3re on4d5ri onds4i o4ne. on5eie o4nele o7nell o2nem o3ner o4neru onesi6 o4nesk o4ne3s8t o4nete o2nev on7f on7ga. ong3d on5ge. on6ged onges4 on5gi on5go. ong2r on8gro ong9ros ongs4j on2gu on4gy on4g5ø o9ni. o2n3i2d oni1e o2nil on5ild o2nim oni6mi on3inf o6n7inn o2ni1o o4ni5p o6niso on3j on1k on5k6a 3onkel on5k6i on4kle on5ku onle6g on5nad on3ni on5ny o4no. o5nor. o4no4v on3ove on4sh ons1i onsi3s onsi4v ons1k4 ons7ke ons1l onsla6ga onstitue9ra onstitue9re onst5rum ons5und ons1v on4sve ons1Ã¥ ont6a on3te on4ted on5ten on5ti on4tok on5tor ont2r on7ul8 on5ur onu4sk o4n5ø4 on6øy. on7Ã¥6 2oo ood1s o1off oo2k1 oo5k8a ook5es ool7a oo4m5i oo6mo o1o2p oopa8 oop9an oo4pe oop5en oop5et o6opi o1o4r oor6da oo5s oo6sp o1o4v 2op. 2opa o4pab o2pak opan9d o4pa3re o7paren o4pasj o4pau o3pea ope1i opel6lø 2open o4pena o9pend o4penet o7pent o2per 6o3per. 3opera ope6rar o7pero o5pert 2opet o4peta 4oph o1pi o4piek oping9s o4pir 2opl o1pla o4p5land op3li op9lu op4na. op4ne 2o3po 2opp. 4oppa op4p5art op4pas 4oppe 1oppg op4pi. op6pia op4pis opp3li 3opply op6poe op6pr opp5rop opp5und opp1Ã¥2 2o1pr op6re op7ru op7rÃ¥ op6sa. o8p8si op2t1r o4q 4or. o6r7add or3adr o2rag or1ak 2oral or5ald o4r5alg oral4st or6alt. o3r4am o4rana o3rane o4rang o4rans or6ap or3att o4rau ora3uk or1a4v 4orb orbit5 or7by 4orc orda9ta or3dea 6ordel ord7e6pl or7dik ordi4s or4d5ise ord3it 1ordn ordre8gi. or7d6rø ordsa6me. ords6e ord3st ordy9ra 8ore orear8a orear8e o5rebra o8redi or1ei or1el o4rela o6reld ore6na. ore4ned ore4net or3enk oren3s ore6o5g6 o6rerf o4rero ore1s2 o5resc ores6te o4reta or9et8n o6retv o2rev 2orf orfa6re. orfi7ne orfø9re. 1or3g4a 4orga. or3ge or5ger orge4s orgi6e5ne org4sk orha9g orha9le o1ri o3ria ori5b4 ori4e5ne 5orient o7riet o2rig ori7ka o6rim or3ind or3ink or3inn or3ins oriro8 6oris oris4a 6orit o4riti oriti6me. or5ka 3orkes or5ko orko6se. ork7s2 6orl orla7te or5le orm6al5t orm5ang or6map or5mel or4m3un ormæ9 or4nar orned5 orne6dr or5net or4nol orn3t 4oro o3roa o3roe or3off o5rog oro5i o1ron or1op o4r1or oror9da o5rosa o5rot or3ove or5ovn orpe6s 4orr 2ors ors5alt or5sen or4serk orse9ta orse7te. or3si ors5kar orsk5ei4 ors8k9l ors4ku or1s4l orsmÃ¥8la or1sn or3s2o orso9na orso9ne orso9v ors4pa ors4ten or1su orsva9re or3sy or9sø 2ort or4t3ak orta8la. or4t5and orta9pe or4t5av orte5i6g or4tek or3tem or6t5erf or8t7erm or5tii or7til or5tis 3ortod or6tok orto9ne orto4r ort5ori or4tou ort5res ort5rÃ¥d ortÃ¥4 or4t3Ã¥r or1u oru8d or9ude oru4h o5rum oru4t5 oruta4 or4utf orva9ra or5veg orv3s 4ory o7rya or1yn 2orø or9ø8k orøk8t or5ør or3ø4v orøv8d or1øy or5Ã¥s 2os o1sa o4sa5b o6sad o2saf o4saku 6o5sau osbi7 osefi5 o2seg o5selei o2sem os2en o6sena ose5sl ose5sm oses4sk oses6sp oses4st o4seu osfa9ta os2hi os7his osi6e7ne osi5ere osi5ert o2s1ing osk3ei oskei6e o4skil osk9lar osk5len o1sko osko5p o3skri os8k3v os2l os3le oslo1 os2lo5d4 os5lu os5lø os9ma os5me os3mo o2sn o3so os7ove os6pa. os6pe. os4pil os4por os4sek os3sem os5sens osseri7e6 osse5v ossis5te os6ski oss3kj os4s5ko oss9l os6spa oss7tro os7sty os8sv ost5adr os5tal osta5le o8ste. os4teg ost5egg os4tek ost6el os5teo o4sti. o8stia o4stie o4stin os5tis ost1o os6tons os6trad ost5ran ost3re o3stro ost5rup ost7rÃ¥ o5stø os1v os5øy os7Ã¥r 2ot o1ta o5tad ota4l5a ot5ald otal7ev ota4lov o9tane o9tar. ot3arg o8t9arm o5tas ote7d ot6ei ote5int otek5i ote6k7la otekla8ga ote4kle ot5elem ote5lev otel6lan otel6lek otel4li otel8lø o4tena o4t5ende oten8de. oten4s5p o2teo o4teram o4teret o6terev o4tero oter5s ote5r8u o4terÃ¥ otes6ter ote5sté otet5a otet5o otet3s6 otfø4 oti7ert oti2k otika3 ot7i6ko o1tj o6tja ot5jer ot6nero otno7te. ot6nÃ¥ o1to o2to. o4to3a o5toa. o2tob oto6en. o6tof ot3off o6toga o6togram o4toi o2tol oto5ne oto4ral otor5d o2tot o2tou o1tr ot7red ot5rer ot5rev o6t7ri o6t5rom o6t5rø ots5el ot6sh otshusvæ8 ot4s3ki ots6op otsopp6 ots5pr ots5tab ot7ste ots6Ã¥ ot6tank ot5tas ot5tegn ot6tenk ot5tese ot5tin ot5toa ott4s3k otts5po otts4ti ot7tug ot4typ o1tu o6t5ut o1ty oty8e ot1yt o1tø ot7Ã¥ oub8 oul5l oun6ge. ou4r ou7ri ou5ro ou9sa out8a ouve4 o5vae o1vak ovanfø8 ovan9o ov3anv o7var. o6varb ova9re ov5art ov7arv oved3 o1vei o1vel ove4la 4oven o4vend o5vende ove6nya ove2r over3a ove7ra. ove9ras o6verdr overe6 ove8r5es 3o6verf 3overg o5verks 5overr 5oversik overs4p over9v o1vet 6ovh 2o1vi oviso3 2ovj 6ovna ov4nes 2ovni 4ovo o5vo. o7vom ov1or o5vot ov1o2v ov5sal ov4sek ov4sen ov9ske ov4sle ovs1p ovs5te ov4sti ov7sun ovsy5k ovta6le. ovveg7i ow1 owa8 o7was owat2 ow5h ox3 oy9ar oys5l o1y2t o1za o3zy o1ø o1Ã¥ ô6ra ô2re ôr5ei ô1ri ô4t ó9sa ó7t ó8v ò9re 1pa 4paa 2pac pa8cen p5ad. pa3deb p4adg pads4 2paj 6pak. 6paka 4pake pak4kas pakke4s pakk7esl 4pakti pa3kv 3pa1la pa3le pa6le4o7 pa7lim pal5in pal1j pal5lø 5palm 4palø 2pam pa5me p3anal pa4nap pan9de pa5ne pang5s6 pan3ka n7k8rea 9panne. pan5se pan5sl pant8r 4panv pa4ny 2papa pa8pa. pa3pe 4papo pap4pr 4papr pap3ri pa4ra. 5parad 2parb 4parek 4parena par5ess par5g 2pari pa4ri. pa3ris 4parki par6k7l par4kv par3m8 pa1ro 4parr par8ra. par9s4 par6tid par4tig par4tin par5u par7v parvi6 6parø 4parÃ¥ p6as pasa5 pa9se pase6r5 4pasp 3pass pas5sab pa4s3t pas4tar pas5ti pas9v pa1t pa3te pa4tist pa6tre p8atta pat6tak 2patu p8at6v pau7k 2pav pa4ve. 3pa1vi 2p1b6 pba4ne. pbo6da pce6 2p1d4 pde4le. pdø2 pdø9d 1pe 2pea peak3 pe4a3re 4peb pec3 pe7d6a 5pedas 4pedd pe3de pedfø9 pedi9e8 pe6doa pe6don 4pedr pe4dro 4pedu 4pedy6 2pedø pe7dÃ¥ pee6 pe3er 2pef p5eft 2peg p5egg peg8ge. pego4 2peh pe4il pei4leg 2pe1in pe6ism 2pej 2pek pe8ka. pek4l pek4tro pekt7ros 8peku p6el 4pela pe5l4aks 5pelal pe5lar pel9d 4peled pe4lef 4pelei pe6lep 4peler pe6lev 6peliv 6pelj pel4lo 4pelov pel5s6e pel5s4i pel7st 4pelu pe5lun 2pely 2pelø 6pelÃ¥ 2pem p1emb pe4nan pe4n3ar pen3de 6p5enden. 4pener pe3net 5peng8 pen4gel pen7gl peni4n 4peniv penly4 pen7s8a pensa7k pensa8la pen5sk pen4s5l pen3s6m pen5s6o pens4t pen9sta pen7ste pen7tag pen5tr 6peny 2penÃ¥ 2peo pe7o6s 2pep pep5ar. pep7p pera3a 4pe4rab 4perad pe4rai pe4ral 4perap pera5t 4perate 5p6erc 4pered 4perei 4perek 4peresp 4perest 4peresu 4peret pe4rev 3peri perie8ns 4pe5rik peri3s4 4perit per7k8 per6les per5mu per5n 4pero pe3ros pero6se. per6regj per6rei 5perro pers6m per4tro per4t5rÃ¥ pervi6k 2pery 6perø 4pe7rÃ¥ 2pe1s 3pes. pe3se pe6se. pe7si pes8ka. p3eske pes4ke. 6pesl pes4n 5pess 3pest. 5peste. pes5til pe7t8 4petab peta6ka 4petal 4petas 4petau pe3te 5pete. 4peted 6petei 6petek 4petel 4petem pe3ti 4petid 4petil pe4tim 2petj 2peto 2pet4r pe4t5ru pet1s6 4pett 2petu 4petø 4petÃ¥ pe2u 2pev peva8ne. pe5vi p5evn pev6ne. 4peø 4peÃ¥ 3pér 2p1f 3p6fenn p3fo p5fr pfri4 pfø5re 2p1g2 pgjer4 2p1h phav2 pha9va phavs5 3p4her phe9te phe7va phe5ve phi5li 7pi3ane 7piar pi4as 7piase 4piav 4pib pi6ca. 2pid pid3s2 pie2n 6piend pi3er. pi9ers 2pif pi9fr pig5ge pig6g9u p7i6gj 2pih 6pii pi4keh pi6kel pike5r6o pi4ké 8pikk pik3ko 4pikl piku5 pi5la pi6la. pi3lep pil4lag pillba6 pil6led pil4leg pillega6 pille6ra pil4le5s6 pi1lo 2pim pi4na. pi4ne. pi9ned pi4nel pine4v 3ping pin6go ping3r ping5sk 6pinns pin4sl pi2nø pio6n5an pion5s 7pi2p pi4pi pip9la p8pip pi4rar pi4res pi4rut pisi9e pi4ski 2piso piss4l pis4sp pis4st pis1t pis4t5r pis9t8ra. 1pit pi9ta pita7la. pi5té 2piti 4pitj 4pitr pit5t 2piu 2pi5v 4piø 4p5k4 pka5v pka8va pl6 8pl. 2plad pla8de. p4lak p4lan. plan7de 4plane. 4p5lan5g plan5s plap3 4p1lar p2las pla4st p2lat platina5 2ple. 2pled p8legi 4pleis p2lek p6le5n4u ple5n4a ple6r5u ple8se. 4plev p2li 4plit p3liv pli7va plo4gj p4lo8i p1lok 4p5lov plu4e p4luk plun3 plun6d7ri p1ly ply5d8 plæ5re plø6pa p1lÃ¥ 2p3m 2p1n4 p7ner pne6se. 1po po9a pod8 po6da. po4de. po6em 2pof po6f7r pog6 4poh po5id 2poke po6lan po3le po4lek 6polj pol5li po6lom pol6s4ka po2lu 2pom pom6p9u pom5s 4pon. pon5d pon4gr pongs6 2pon1s pon5sa pon4sv po2p1a po6pe. p5opn pop1s 2por. po1ra 2p5ord po1r4e po4ref po6reg po8ré 2p1org 2pori 2pork por6s7v por4tor por4t5ro por4trÃ¥ 2porv po1ræ po1rø po4se. po4ses 4posj po2st po4sta pos5tas po5stat pos3te 4pos4v 5pot. 7pota potak9 po3te po4te. potek5l po5t6ha 3poti 4potr 4pou 2po4v pove6 pover6n pow4 2pp p2pad p2pak pp3akt p2p3a4l p9pane pp5angr pp5ank p4p3anl p2pap p9par. pp5arr p7past p4p7at p4pee p2pe5i4 ppel5s6 p4pena p8p9ender p4pendr ppe9nes ppe5p8 p4penø ppe5ra p7pere. p4perkl pp7e6sen pp9esn ppes8ti p4peta pp5e4tas p4pete pp5ett p2peu pph8 p1pi pp1id p2p1il pp5im pp3inf p4p5inn pp7ir ppir8re. p4pist pp1j pp3k pp1l pp5last pp3led pp5lei pp9lev pp5n pp1of p2pol4 pp3old pp5om p2p1op pp3ork pp1ov pp5p pp1r pp5rei pp5rin pp4ris pp7riss pp9riv pp9ro. ppropri6 pp7rot pp1s ppse6te. pp3ska ppska9k pps2p pp9spe pp3spl pps2t pp7sto pp7t2 p2pu pp1uk pp5ut pp3ø4 ppøs8 p6pÃ¥ pr6 4pr. 4prad 3praks pra7li 2pran pran3s 5prat. 5prate pra5te. 4pray 5pref prei7er. 4preii pr5elem 1prem pren4s 1pres 6pres. pres6sak p6restas pre7t6en7 4prett p3rif pri5ke pri4l3e 4pring 5prino 3pr6in6s5 3pr6in7s6e 3pr6in7s6i 1pris pri6s5k pris3t 2prit pri9ve 1p2ro 8pro. 6p7roc 3prof 4prog. 4proge 4progl 4p3roi p5rop. 3pros 7prose 6pru prun7ge pru5ta pru5te 6prør prørs5t prø5s4 5prøv prø5vels 2prøy 4prÃ¥ prÃ¥8da prÃ¥k3i prÃ¥4ko prÃ¥k5k6 2ps p1sa. ps5a6n p1sc p3se. psei8ge p2sek p2s1el ps5e4ly p3sen. p5sens p7ser. p5sete p5sets p2sh p6sib ps5ins p7sis p3s4jo p4s3kil ps7kjen p2s1ko p7s6ko. p3skod p5s4koe p2sle p9s8lo. pslø8va pslø6ve. p3s4lÃ¥ p2sm psmÃ¥8la. p2sn ps4no ps1o p3sod pspi9la ps5ple p3s4pre p4s5pro ps5pu ps3s2 pst2 ps7tal p4s5tem ps3tv p5s6tÃ¥ psu4r p3s4us ps1v p2sva p2sve ps4ving p4svæ ps5w psy3ke 3psyko 4psys p2s1ø p3s2øk psø4ke. psøy8 psøy9ene ps1Ã¥ 2p1t pta8la. pta4le. pt7ark p3te pte6k pte4ma. pte7re p5ti pto7g p4tou ptus5t p4tut 1pu pu4br 5puc 6pu6dy p5ugl pu2k pu7la pu8le. pull6 5pum pu4ma. punk4t5 punk5t6e 2punn 2pur pu4re. pu8rea 3puri 3purk pur5u pur3v pu7sa pus4h pusl7u pu1ta pu4ta. pu5tas pu5ter pu5tev 4putg 2puts put6tr put4tu 2putv 6putø 2p1v pver7 pvi4se. py4dr py8o 3pyra py1re py1ro 6pys py6sa. py4se. pys6t 4p5z 1pæ pæ4re. pø1kj pør4ret pø8sa. pø9ta pø9te p1ø2v8 5pÃ¥b pÃ¥4by. pÃ¥1k2 1pÃ¥l pÃ¥4la. pÃ¥4le. p5Ã¥n pÃ¥3pe pÃ¥1pl p1Ã¥pn pÃ¥1r 1pÃ¥s pÃ¥7sko pÃ¥5sm pÃ¥3t2 6pÃ¥tÃ¥7 1pÃ¥v qa5 qu2 qu9ar. 1que que7r 4raam 4raar 4rabis ra1bo4 4rabr 2raby ra3cet ra3ch 5raci ra3d2a 4radf 3radio3 4radir 4radj 2radm 2ra1dr r3adre rad2s3 radvi4 ra5ede rael4 4raerk raf4fer ra4fiu ra2fj 2rafo ra5fo. ra4fos 2rafr ra5fre 6raft ra4fu ra6fy ra7gea ra5gee 2ragl 2rah6 7raid ra5int ra3isk 6r5akad ra9kar ra7kel ra5k6ha r2akk rak4kel r6akr ra5kro 2raks rak6sa rak3s4e rak6se. rakst6 4rakti rakt3r r4a5ku 4rakv ral5ans 6ralbum 5ralds 4raled ra5leo ral3g 4r5alge 4r5algo rali5e6n ra4lin ral1j ral5le ral7m ralo6i ral3op ra5ly 4ralø 4ra5l8Ã¥ ra4mag ra4mas ram3b 6r9ambi rambu9ens 6ramed 4ra4mer ram8et. 4ramil ram1o ramperi8 ram6p3u ramse8te. ramta8la. ramta6le. 4ramu 6ramy r4an. ra4naa 6r3anal ra5nar ran9cs r4an9de. r5andel rand3r rand5s6a r4ane 4ranfa ran5ge 6rangi rang5st rania8 ra6nin ran3kv 2r5anl r6anli 2r1anm r4ann ran6n5ett ran4n5in rann3s4 rans7kar 4ransv ran4tik ra5nu ran6ut 4ranø 2rao 4rapa ra4pel 4rapin ra4pir ra4pis ra6pit ra1pl 4rapo ra4pos 4rappa 4r5appl 3rappo 2ra1pr 4raps 4rapu 1r4ar. 2rarb 7rarbeh 4rarea 4rareg rar5e6l 4ra3rep rar7eta r1arg 6rargu 8rarinna 6rarinne rar8ka. 4r1arm rar8ma. 6rarn 4rarr rar3re rar5s 2r1ar5t rar6ta. ra3rø 4rasa 2rasc ra3sea 4rasel ras3h raska8ra ras3ke ra4ski ra7s6ko ra6sl rass4l ras7st 4rasti ra5s4til 4rastj rast5re 6rasty ras7v ra1ta ratak9 r6ate ra4te. 6ra5teg 8r7a6teli 4ratfe ra1to ra1t4r ra4t5ro ra4trø ra5t6røy rat5tel ratte4s 4ratub rat5ut 6raty 7raud raud3s 6raug rau6ga rau6ge. 4rau4k raus6s 2rav 6ravd rave5s4 4ravg ra1vi r3a4vis 4ravl rav8l9ut ravlø8pa ravlø8s rav6r 4ravt ra5vy ra3vør 4raw raz5z6 2raø raøy4 2r1b8 rba3d rbe2d rbede4 rbed5en rbed5et rbed9ra rbie8 rbi9er r2big r2bik rbist6 rbis5tr rbo8di. rbo4ni rbon7s rbo5re rbra5s rbrei6 rbu5en r5bø r1c 4rd r4dab r5dag rdag4s5 r5dah r4d5ak rdal4 r4dala rda4le. rdals5 rda4mes rda8n9o r4dant rd5anta r4d3arm r4d3art rd5atl r4datm r4d3au rd3d2 r6dedi rde4en r3defi rd5e4ge rd1ei r9deka r4dekl r4deks r6delo r6demi r7dena r6d7eng r6d7enh rden4s3 rdense4 rdenta8le. r4deo r6depo r4derik r4d5erst rde6sm rd3e4ta r6d7ett r8dé rdfes5 rdi3an r4diana r4dians r6diau r4did rdi3e2 r2dif rdi6gres rdig3s4 r4dika r4dikl r4diku r2dil r6dimi r2dio rdi3ov r4dish r2dit r2diu rd5je r1dju r8d7m rd3n rdnæ4 r9dob r4d5o4d r4dol rdon8na. rd1op r6dor rd3ost rd1o4v rdover5 r9drad r3drak rd5ran rd7rara rd3ras r3drei rd3ret r5drev. rd1ri rd3rot rd3sei rd8ske rds4kv rds5tan rd3sto rdsto5g rds7tre rds7tu rdsva9r rd5t rdta8ka rd5tr rd7tø rd3und rd5ve rdvi8ka. r4dyg rdy5pe rdy3re r6d7yt r6d5æ r7d6ær r7d6æ5r6e rd1øs6 rdø4ve. rd3Ã¥r rd7Ã¥6s 1re. 2rean re7a6r7an rear5e 4reav 2reb 3redak 6redam re3def re5den re3des re3di 3redig r1edl 2redo 2redr re4d5ri red5sku red4sl reds5la red7s6led 3reduk 6redy 2redø 2ree re4el. re3er 8refa 4reff 4refi 2refj 6refos 6refr 4reft 2refø 4regar rega7ta re3ge 4regeb re5gel 4regen 4reger 4regg 1regi re4gia re4gil reg1l 2regr 4regub 4regud 2regÃ¥ 2reh 2reid 2reie 2r1eig rei8ga. rei8ge. 4reik rei9l rei5na. rei7nas re5inde rei5ne. rei7nes rein6skj re4inva rei9ra rei5sa rei3si rei7ska reis6led re7ism rei7v4a rei5ve 2rej 6rek. re5ka. re5kav re7ken 4rekj rekk6an rek4k5v 5reklam rek4led re5k6lir re7ko 4rekob 4rekom 4rekon 6rekos 4rekra 3rekru r3eksa 6reksp rek4ter 4reku r8el. re9la 4relag 9relandsk. 9relandske 2r2ele rele8ge. r4e5lei 6relek re7len 7relene 6relg r3elit 4re3lj r2ell rel4lag rel6land rel5led rel4lev 5r4elm rel5ses rel4sk 4re1lu 4relv 2re5ly 2relæ 2relø 4re7l6Ã¥ relÃ¥7r 2rem remann6 r1emb remi6ene remi6l re7mis rem9ji remmed5 6remn rem8na rem5p4 rem1s remti9da 4remu 8remy 4remÃ¥ 2re3na ren4del 4rendr 8rened re5neg re7nei 4renek r3e4nel 4renest 6renet 6rengd reng5l 4rengn reng5st re5ni 2renk ren6kl r3enl ren8ne8sl rennes9la re5no ren5sa r3enss 6r7entit 4r3en5tr 6rentu 4renu 5renz 6renæ 4renø re5og 2reop re3o2r 5reou 8repen 6r5e6pi 1repu 6repus 1rer 6rerad 6re9rang 4rer6at re5re. 4rered 4reref 4rereg 4rerei re4rek 4rerent 4rerep 4rerese 4reresu 4reret 6rerfa 4rerig 4rerik 4r3ernæ 4rerol 4rerom re3ros rero6se. re5rot 3rers rers4p 4rerute re6r7øy 2rerÃ¥ 1res. 2resa re2s3c re3se 4resel re4sem 4reset resi7ere 4resin 2resj 2resk res5ke re6s7kje. res6kl res7kod re6sky 6re1sl re4slu 1resm re5smo re3sov re9spe 4respi 4respr 2ress res4sal res4sek res4sit res4sj res6sk res6sort res4sp res4st res4sy res9tane 5restau res6t5erv res4tes res3té 4re3sti res4til re3str 4restre 7restv 4re5sty 4restÃ¥ 1resu 6resuk 4resun re3s4ve re3svi 2resy 2resø 4retaki 4retal re4tap 4retea 8retek. 6reteke 4retekn 6retel 6re5tem re5ten 4re5ti 4retj 4retoa 2re5t2r rets4i ret4st ret7ted ret5ter rett8o rett6set 4re5tu 2rety 2retø 5retøya. 7retøyets 4re7tÃ¥ 2reu reu6r 4revak re5van reva5re. 6reveg 4revei 4revel re6v7enh re5ver reve5s r3e4vig re4v5inn re7vom 1revy re4v5Ã¥p 3rew 2reÃ¥ 2r1f rfa5re rfat5 rfe8en. rfe8er rfe4et. rfei5li r4fik rflÃ¥3 rfe5m6ø rft2 rfu8se rfyr4 r9fæ rfø8rarar 2r1g r5ga. rga8le. rga8li rg5and r7gann r4g5anv rga3ri rg3art rga4ve. r6gedi r4gef r2gem rgent4 r3geo r4gerei r4geret r4ge3ru r4gesj r4gesl r4gesta rgi1a rgie4n r2gik r2gil r2gim r2gio r2gir rg3i4ri rgi7sl r2giø rg2le rg5le. rglem5 rg3len rg3ler r2gn r4gog r3gom r2got rg5rab rg3rea rg3rel rg5reps r6g5ri r3gru rg5sc rgs6kor rg5s4le rg1sn rg5s6till rg5sto rg9stu rg2sy rgu7d rg6ut rgÃ¥9as rgÃ¥6va rgÃ¥6ve. 2r1h rhav2 rhavs3 rhjul8s rho3d r7hu rhø5re rhÃ¥nd6s9 rhÃ¥9ne ri1an 4rianf 4rianl 6riansv ri1ar 4riarb 4riarr 6riau 2riav ri4ava 3ri6avo 2rib ri2b3l ri8ca. ri4co. ri5da 4ridale ri5der ri5di 2r1idr rid3t 4rieie 4rieig 4rieks ri1el 6rield 4rielem 6rielim riel4la rie4n ri3end 4rienh 4rient rien5t4r ri1er rie5ra rie5ri 4rietat ri5even 2rifa riferi9e8 rif4fi rifiserba8 6rifj 1rifl rif5la 2rifo 2rifu 4rifø4 ri4ga. rig4gr ri3gi 4rigj 4rigre rig2s rig6s7t 2rih 2rii 2rij 5rij. ri4kali ri5kan 5rikd ri3kes ri7ki rik7ken rikk5j 4rikl ri8k9la 2ri7ko 6rikon 2ri1k2r rik4sk rik4s5u rikt6 rikts3 riku6m ri3k4v 4rikÃ¥ 2rila 6r5ilde 6riled ri5lei ril6lest 2ri5lo ril4sn 2rilø 4rima ri9mab ri9mar 6rimes ri5met 2rimi ri4mi. 7rimis rim7l 4rimm 4rimo ri4mor 4r1imp 4rimu rim9ut 4rimy rina5l ri5n6am 4rinas 4r5inc 4rind r3indu ri4nee ri4nes 2r1inf rin7gom ring3r ring4sa4 rings5ak ring8spa 2r3inj 2rink 4rinn rin9nes 4rinor 2rins rins6k rin6sm 2rint rin4t5j rin4tr 2rinv ri6nø 2ri1of 6ri3om 2ri1op 2rior rio5s4 riot3r 2rip 4ripe ripo4s3 4ri1r 4risau 4rised rise5i 6risek 4risel ri4seli 4rish 5risiko 2risj ri6sju 4riska ri4ski 6risku 4risky 6ristad 4ri5sted ris5tik 4ristil ri4sto ris5tof ri5stun 6ristø ri6stÃ¥ 4risv 4risy 4risø ri3te 6riteo ri7ti riti9da 4ritil 4ritj ri5t6o ri5tr rit1s6 ri6tun 4rity 2riun riu4r 2riut 4rivar ri6ved rive9ge ri5vei 4riverk 2rivi ri4vi4s riv5ise 6rivo 4riøk riø4r3a 8riøya 4riÃ¥ r1j rj7ambi r4jere r2jes r4jeti r4jetr r4jetu rju6la 8r1k rk3akt r6kanf r4kao r4kapr r6katr r3ke. r2keh rk5eik rkei8ka r6kek r4kelo r4ke3lu r3ken. r4kenav r3kene r4keni r5kens rke5ri r4keris r4kero r5kers r4ke3ru r4kerø rke4se rkeslø7se r9ket. rk4han r3ki rki3d rki3e rki4vi rkjek8 rk5jor rk1k2 r5k8led rk2li rk5lun rk9lut rklæ5re rk2lø rklÃ¥9ra r3knek r5knep rk7nes r3kno r2kob rko6b5r r6kofo r2koh r4kola r4koli r4kope r4kora r4koru r4kosel rko6sele r4kosj r6k7ras rk5rei r5k6rem r8ks rk4sar r6k6seg rk2s1i rk4ska rk1st rk6stal rk4sten rk4s5ti rk4stj rk4sto rk6s5vi rk5ti rkti4s rk5to rku4le. r6k7u6t r4k5ve6d rk5vei r5kvel r4kver rk3ves rk5vik r4k5øl r2k3øy rk9øys rkÃ¥4k rk5Ã¥ke rkÃ¥6pa rk5Ã¥4s 2r1l rla4te. rle4a r2lef r3lep r4l5e4ri r6le7sl rle4st r4lesu r4leu r3l4i rli9ke rling3s4 rli8ta rli4te. r3lj rl6o rlog2 rlo5ve rl4sk rlu4e r3ly rlys7k r6l5z rlø8pa. r5løy9 2r1m rma6ge. rma5k4l r4m5alte rm3anl rm4ans r6manv r4mare r4marr rm5av rm3b r2me7g r2mek rme6lap rme5ne r4menet r6merev r4mese rme9tar r4mey r4m5ide rmi6ene rmini6 rmin5ski r6m5inst rm5i4v rm1je rmlø8pa rmo4e r2mof rm1op rmo7st rm7p rm5s6ko rm1sl rms6n rm1st rms5til rm1su rmta8la rm3te rmu7an rmue4 rmue5ne rmu8la. rmu6le. rm5øy. r9mÃ¥. rmÃ¥l4 r6m5Ã¥p r9mÃ¥r r6m7Ã¥ta 4rn r3na r4n1ak r4n3ald r6n5appar r4narb r4n3art rnat7r r4natv r6n9a6vis rn3avl rn3d rndø4 rndør5 r3ne. rne5a2 r5neboe r2nec r4nef r2nel rnele6ge. r3nell r3nels r4n3eng r4nenh rne7p r4nerei r6neris rne3ro rne1s2 r5nes. rne6se. r8nesi r4ne3sk r4nesm rne3so rn5e4tab r4nete r2nev rne5vr r2nés rn5g6 r4ninn rn5k4 rn3n r5no. r4noa rno5b r4nod r4noi r6nok r2nom rn5omn rn3oms r4n1op rn3ork r2nos r4n1o4v rn7se rn4s3in rn5ske rn3skr rn5sla rnsle7ge rn7s6mi rn6s3ovn rn5spon rn3s4pr rn1st rn4sti rn3te rn5ti rn7tr rntre4 rnt4v rnu4 rn5ug r6n3ut r7nøt r4n5øv rnÃ¥8le. rn3Ã¥s ro1ak roa4s ro5asi 9roban 9robar 1robe ro4bed ro4bef 5roben ro4bes 5robølgj 1roc r3odds 7rodo ro5end ro7ens ro7fa ro4fel ro4fem roff5ri ro7ga. roga9ta ro7ge. rog5ret ro6gry rog1s4 ro2gu 1roi ro4kel ro5ki rok6kat rok6keri rok5kl rok4kom ro7k6l rok5n rok7s rok8se. rok5v roli7ga 4rolj rol4lab rol4lap rol4leg rolle8ge. rolle8se. rol4lis rol6ly rolo9v 2roly ro6mak ro4mal 3ro5m8an ro4mate 2rom7b 4ro3me rome5d 4romk 4r3omn ro4mor 2romr rom5sla r7omsy romsø3 rom3t ro5ne. ro7nim 6ronism 6ronista 8roniste 6ronisti 4ronn ron4na rono5s ron1s4 ronta6le. ro4pad 5ropet ro4pia ro8pla. 2ropp ro9py ror3a ror6da. ro3re ro1ru ro4sat ro5sel ro3sen 4rosl ros3la ro4sm ro6sov ro1s1p ros4st ros4sy ros7tas ro3sti ro3str rost7rø rot5ekte ro5tes rote7ster roti7ka rot5ord ro5tu 6roty roun2 round3 ro5ut ro5va. ro9vare ro7vas ro5vek ro7ven rove5re rov5sm rovve6 rò6te. 8r1p r5pa r6parb r5pefo r4peno r6penø r5pesk r5pet rp6j rp2l rp3lad 4rp3n rprø5ve r3pu rpun7g r6p5ut. r6p5øy r2pÃ¥k 8r1r r8raa r2rag rra3r r4raro rr3d rr6e r4reb r7rebart. r2ref rre7int r4rekl r5relat r4reo r4repl r4re3ru rre5sk r4resm r4reso r4respe r3ress rre4st rres5ta rre7sti rre5str rret6s5 rre5u r3ri rri6ka. r6rip rri5v rr3m4 rrmÃ¥8la rro6e r5rom rro8sa rro8se. rro8ta rr5s2 r2r3un rr5v rrÃ¥5de 4rs 6rs. r1sa rs3ab r2s7ad r3sak rsa5ka r6sakt rsa6la. r8s9ald rs3all r5sam r4sarr rs3arv r1sc 5s6ch6l r8sedi rse6g7 r4seku rs4ela rs7eld r4seli r4s3elv rs5e4rik r3ses r4sesi r6sesu rse4te. rs6ett rsett8o r7sim6 rsimp7 rs1in r5s4ing r4sins rsis5t r7sja r4sjh rsj3or r3skad r6s7kaf rs4kam r3skap r4skar. r4skas r4ski r5skil rsk7inns r1skj rs5kjens r6skl rskla8g rsk5lar rs4k5le rs4k3læ r5sko. r5skoe rsk3op r4skor r3skot r1skr r4s3kra r5skriv r4s3kro r1sku r5sku. r5skue rsku7et rsk5und rsk5var rs5kys rsk5ø rs4lef r4slei rs4lek rs4les r5s6lit rs3lok r4slun rs4make rs4mo. rsmÃ¥l4 rs6nev r1so rso9a rs5oml rs8o6n5 rso7n6al rso5n8e rso5n6i rsons4 r2s1or rs5ord rso7ris rs1ov r1sp rs4pan rs6pat r5spel r4sper r7spes r5spi rs4por r5spred r4spÃ¥ rs3s6 r1s2t r4s5tabb rs5tank rs3tap r6st7b rs4ted rs4tem rs5tend rsten6s rs5ter. rstev9na r3stil r4s5tilf r4s5tilh r4s5tils r4s5tilv r5stis r4stit r6st5k rst4r rs9tre. rs7tree r4strin r4s5tro r5strø rst7ut rstu9va rstyg7 r3st6ø r7stÃ¥ rs5ukl rsu9r rsu7sa rs5usi r1sv rs8vak rsva9ra rsvar4s5 rs1ve r3s4vek rs5vit rsy4na rsy3t r1sz rsøks3 rs9øye 6r1t rt5ad r4t5af rta4ka. r4tana rt3anl rta9pa r4tarr rt3art rt3avs r4tec r4teda r3tede r2tee r4tego r4tegr r4teie r4te3in r4teka rte6ke. r4teki r4teku rt5e4lit rte6ma. r3temp rten4s5k rtentle8 r2teo r7t6er r5terd r5teres r4teris r5terk r4terÃ¥ r4tese r6tesk r6testi r4teta r4tete rteus8 r4tev r4th rtian8d r4tians r4tiar rti8ar. rti5en rti6gra r4ti3kv r4tila r4tili r4tilo r4ti7na r2tio r2tip rti7sa r6tiska r4tiski rt6i9so r4tisp rtis3s r4ti5str r3titu r2tiv rtma6le. r5to. rt3off r9tofo r9tok. rt3omk r4t3opp rt6opp. rt3ord r6t7o6s rt1o4v rt2r rt8ra r9tre. rt6red rt3reg rt3rei r4t5reko rt5rel rt5rep r7trib r4t3ris r4t5ros rt3rut r5t6rÃ¥l rts3ar rt4seg rts5eng rt2si rt4s5ja rt5ske rt3skj rt5s4no rt3s4pe rt4sti rt7s6trek rt4s5tøy rts5unde rt3t4 rtu6en. r7tug rt3und r2t1ut rtu8ve. rty8da. rty8de. rtyr5s rty6ra rt5yt rtæ9ra rt5øl r6t5Ã¥s3 ru3and 6ruav ru6avh rub6a ru8bl ru5bo ru4di. rud4r ruds4l ru4ele rue4r rue3s4 ruga8l ruga5t rui3d6 4ruk ruk4su rukt3s ru9la 4ruli 6r7ulk rul8ke. r2ull r7ulv r4um rum3al rum4p9l 5r2unde run5del 6r3under 7r4under. r5unders rund3r rund3s4 run6ge. 4ru2ni run5kr r7uly ru4nøy ru2r ru5ra ru8ran ru8rar ru9rer rur8ta r4us rus2h3 6r7u6sik rus5j rus7lu ru1s4o rus5sel rus4st ru4s4t3r r4uta ru3tal ruta8la. r7utan. r4utbe 4rutd rut8e rute3i ru4tel ru9tene. ru9ter 2rutg r1utk rut9o 6rutr rut4re ru6trø rutto5 2rutv rut5ø ru5va ru4ved ru4veg ru4vei ru4vel ru4vere ru4ves ruvi8 ru6v7is 2r1v rv4a rva7ka rva6la r8v7arb rve3de rve4den r4ve5dr r4vega r4vegi r4vegr r4veim rveis7e6 rve5kl r4vela rvel9le r4v5eng r5ver. rve5re r2ves r2vev rvil9l rville9d rv2j r4vos r4vov r3vr rv2s1 rv5ung rvÃ¥7r 4r1w rx1 ry1a4 ry2dr ry7fe ry5fl ry5ke. 2ry1kl ry7le ryl4l5i8s 4r5yndl ry4nes ry5nes. ry4pa. 2ryr ry8re r1yrk ryr4ke. ry9ro ry5rø rys6sal ry5ta ry4tek 1rytm r3ytr r4z ræ7le 8r3æ4re 8r3æ4ren rær5in rær8t ræ8v 2rø. rø8ar 6røb rø4be. rø4dek rød1s 4røep røf5l rø4ke. rø4kero røk3l 4røkn røk5s4 røk7t røk1v 2røl rø6m røm1a røn5nes rønn5s4a rønn5sk røn5sko 5r6øntg rø4pe. røp9l 1rør rør5d6 r6ørende. rø7ret 5rørl rø2r3o rør4sp r8øs. rø3se rø5sla røs5v rø8ta røt9as rø1va rø5vede rø9vers rø1vi r4ø5væ 2r1øy. 4røya røy9ar røy6ed røy6ene røy5es røy4et 5r6øyk 3røyr røy5re røy8senes 6røy4s3k røy4st 2rÃ¥. rÃ¥7a 4rÃ¥ag 4rÃ¥b 3rÃ¥det rÃ¥dy9ra rÃ¥9ene 2rÃ¥f 4rÃ¥5g4 2rÃ¥kj rÃ¥k3re rÃ¥2k3u rÃ¥k1v 4rÃ¥l rÃ¥le7s8 rÃ¥4let rÃ¥5let. rÃ¥5lu r5Ã¥nd rÃ¥n6da 2rÃ¥p 2r1Ã¥r rÃ¥rs5k rÃ¥5ru rÃ¥1s 4rÃ¥sb 2rÃ¥se rÃ¥5si 2rÃ¥sj rÃ¥s4t 6rÃ¥ta rÃ¥ta8ka. rÃ¥5tr rÃ¥5tu 2saa 5saa. sa4ba. s6abe s8abl sa5bok s3abon sa5by sa3ce sa4dag 4sadam sa4dere 4sadm sa4do 2sadv sae5d 3saen 7saer 5safa 1safe 4s3affæ 5saga sa4ga. sa4gas sa4gat 6sagent 6s5agg 6s5a6gi sag8na sa6go. sa4gog 2s1a2gr sag3s4 sa3ik sa5ir sa1is 5sak. sa2ka 3saka. 4sakad sa5kai 3sakb 3sakd 3s4a3ke 5sakf 1sakh sakh5e 1saki sa4kj sak5kr s4akky 5sakl 5sakm 4sako 3sakp sa1k2r 5s4aks. sak4s3i 4s3aksj sak4sp 5saksr 5saksu 2s1akt sak6ta. 5s4akto s4aku 4sa7kø 1s4al. sa1la 4s5alarm 7sala. 5sa5lat 4salb 1sa3le sa9let 1salg s4alge 4s5algo salg6s5 s6ali sal4mes sa5lo 5sal2s3 sals4a 4salter sa1lu 3salv sal5ved 2saly 1s2am 5sam. sa2ma sa5ma. sam4an7 sa5mas sam5ei sa4mel sa4met 5saml sam4lev 5samm sammen5 sa4my 4s1a2na sa9na. sa7nar sa3nat san6da. san7d8al 5s6ande. sand5r sand5s6lo sand5st san4d5ø 1s2a5ne 4s3a6nek 5sang. 3sange 4s3angr 8s7angst s7anken 2s1anl s5anm san5ne 6sanno sa2no 4s5anor san5os san1s 5s4ans. 4sansa 5s4ansen san7s6k 4s5ans8l 6sansv s8ant. san9te 6santr 4santy 4s1anv 2s1ap sa2po 1s2ar. 1sara 6sarab 2sarb s4ard 9s8are. 4sareal 4sareg sa5rev 3sari sa4ri. sar6ka. 4s3arki 2s1arm sar5me sar8me. s1arr 2s1art sar4ta. sa4ru 4sarv s4ary 1sas 8sasju 2s1a4sp 4s1ass s6ast 4sasty 2sasu 4sasy 1sat sa1ta sa5ten sa3ti 2s1atl 4s3atmos sa1to 4sa5t6r s5atsk 5satt. 4s3atta 6s5atten satte4s 5sau. sau4d sau5di. 5saue 4saug sau6ga s3auge sau6ge. 2s1auk 5saum 3saus 4saut 2s1av s2ava sa4ve. 5s6a5v6in sav5n 3savu 8s9b4 sba4ne. sbi6e sbo4da sbu6et s6bug sbul3 sby8ta 4sca 5s4can 1sce 2scel s4cene s3cer 6s6ch. 8schl. sch8m 6s7cl 4sco. 6scoc 4scos s4cus 8s9d6 sdag4s5 sdam9p sde6le. sdu8en. sdu8er sdø8v 1se 2se1a 3sea. sea4g se3an5d se7ansa sea9re 5se4au3sk 2seb 4sec 4sedag sed5d se3de 5sede. 5sedl 4sedo 2sedr 2sedu 6sedvanle 2sedø 5see. see3d 2seef 2seeg se6e3i se3e4l se5en seer1 5sees 2seev 2sef s1eff 4s1eft 6sega sega6l se2ge se6ges se3gev seg8ga. 9segl. 7seglet 3segm 4s2e7g8r 2seh 2seid sei8dan sei9den sei8e9nes s5ei4et 2seig sei6ga. sei5ge sei5k 3se8il 6seim 2sein se3ind s3eini se6ink se3ins se3int sei5r 3seis 2sej 5sej. 2se5ka 3seke 4seki 2sekj 5sekk sek4kes 2sek4l 4se3kn 2seko se2k1r 4sekra 4sekri 4sekro 3s4eksj 4seksp sek4st sek2t3an sek6te. sekte9ra 3sek5to 4seky 4sekÃ¥ s2el 2sela se6la. 3selak 5selane 5selar sel5art s3e4las se6l5at sel1d se4le. 4se5led 6selega 4selei 4s3elem 4seleng 4seles 4s3e4lev 5s6elg 4selik 4selil 4selis 4s3e4lit sel6løp 2selo 7selol se3lom 3sels sel4sin 8s5elske sel9s8lag sel4spo sel5t6 2selu se6l7u6r sel4v5ak sel4van selv3e4 sel4ve. selvei4 sel4ver sel8vin 2sely 2selæ 2selø selø8pa. 6selÃ¥ 2se3ma 3semb 7semd se5me se4mi. semini6 2s1emn sem4na 9semnd 2semo sem5pe 2semÃ¥ s2en. se5nak 6senau sen9d8a sen6dela 6senden 4sendr s2ene 4sened se3neg 4senem 8senesa 6senese sene8se. s5engas 2s1enh se6nin s3enkj 5sen3n s2ens 4s5ensem sen6s5end sen4sj sen3so 7s6ent sen5ter 8s7en5trep 4senum 4seny 2senæ 6senø 2seo2 7seod se3or 2sep se3pe seperso5 3sept s2er. se3ra 4seram 5seran 4serap 5seras 6serau ser5d s2e5r4e 5sere. 4serea 4sered 4seref 4sereg 4serei 4serek 4serel 4serenn 4serent 4serep 4sereso 4seress 4serest 4seresu 4seret 4serev s1erf s2eri seri6e5ne 4serik 4serkje 5serkr 5sern ser7nev 2se1ro se4rop se4ror se4ros 9s6ert ser4tak ser6tat ser4tr se1ru 4serul se4r3un ser4ved ser4vel 2sery 2se5r6ø 2serÃ¥ 5ses. ses5ald 5sesals ses5alt 4sesc 2sese se4sene ses5in se3sj 4sesje 4sesjuk ses5kal ses5kar ses5kv ses5lit se3sna 5seso ses3pr ses4s5in se1st 5sest. 5seste. 4sesto ses5un ses1v 2sesy 4sesø ses3Ã¥ 3s2et. 2se2t6a 3seta. se5tae seta8ka se5tar 6setea 4seteg 4setei 4setek se7tel se4tera se5tero 4setes 2se3ti se8ti. s3e4tik s3e4tis se8t7ja 4setje 5setje. 7setl 5setn 2seto 2set6r s4ets s5etters 2setu 2setv 2sety 6setø 3seum 4seun seure9ne seu2t 2sev seva6ne. s8e5var sev4d sevi4sa s1evn sev4ne. se3vr 3sevÃ¥g 2seyn 2seø 2seÃ¥ 1sé 2séa 6séb 4sée 4séf 4séj 4sék 2sél 4séo 6sép 9sér 4sés 2sév 4s3f sfa4ne. 6sfe sfe6et. sfes5 sfisken8 sflÃ¥3 s5fo sfo8ra sforlø9 sfra5s sfri5e6re sfyrs5 5s4fæ sfø9ren sfø5rer sfø5ri 6s9g6 sga4l sga8va sga4ve. sgen9s sge4st sgÃ¥4va sgÃ¥4ve. s1h 4sh. sha2k s7hat s3hau 6she shea4 s5hei5 7sh6e4rif 8s5hett s4hi. s6hip sh5isk 4shj 6s7hopp 3short 5show 6shs 2shu 4s5hy s5h6ø shø8l shø6va shø6ve. s7hÃ¥ 1si sial5v6 4sia5m si7ans 4siap 4siav si2bl 3si2da 3si4de. 3si3den si4de5o s4ider si5der. si4dete 2si2do 4s1idr sid8ra 4sidy 4sieg si1el sie4n si5er. si6eren sie4s si3est 6sife si3fl 2sifo si4f3r 2sifu 4sifø 3sig siger4s 4sigi 4s3iglo si5gr 4sih 3sik si3ka si5ke. sik2h7 sik4ka sikk8artet sik4k5el sik4ko siko3 si4kom si4kop si4kos si4kot sikt4s3 s2il 5sil. 3sild sil4del sil4der sil4des sil5j sil2k sil4les 2silø si6mel 4simet 2simp si3mu 4sinde 4s3indu si3nek 2s1inf sing4s5a4 7singu si4ni 4sinj 2sinn 3s4inn. 7s4inna s6inne. 5sinnet s3innh s5innk s3innl 4sinnt si5nob sinsk5e 2s1int 4sinv si6nø 4si5ov si4pa. si8pe. si6re. si7ren si4ri. sir8kl s2is si5sel si4serf sis5e4v si6sin 6sisju si4sk sis5ke si4sn si4s5te si4sti sis3to 4sisy 3s2it si5ta sit8ji si6t7ra si4t5re si4tri sit3s4 sitsva9 sit6term sit4tes si4umf 4siut 5siva si9van si6vek si8vi. si9vÃ¥t 1sj 2sj. s2ja. 8s5ja9g 7s8jakk. 4sjam s4jan 4sjanl s7jarn 2sj3av 6s7jaz 2sjb 6sjd 5s4je. sje4f3i sje4fla sjef5t sje3g sjek4t5o 3sjel sje4lev 3s2jen. 5s2jene 4s3jent 5s4jer. s2je5s4 5s4jet. sjet4ti 2sjf 2sjg s6jim 2sjk 2sjl 2sjm 2sjn 2sjob 5sjok 4sjom 9sjon sjons7 2sjor 2sjou 2sjp 2sjr 2sjs 2s6jt s6j3t6sj sju1a 6sjub 6s7jug 4s5juks sju8la 4sjun 4sjur 2s7jus 5s2jø sjø3k6 sjø1p sjø9rø sjø1s2 sjø3t8 8sk. 1ska 2ska. 4skab skabe3 s2kad 8skae 4s6kag 2skak 5s4kal8a ska5lar 2skam s4kaml 8s7kamp 4s5kana 4skande 4skane 4skano 6skant 5s6kap. 9s8kapa. 4skapas 5skapen. 6skapit 4skapp 2sk2ar sk5arab sk7arbe 5skard 4s5kark 5s6karp 6skars 4skart 3s8karv 2skas 4skate 8skay 4skb 6skd 2ske s4keda s6kedo s2keh s6kei skei5er. s8k5e4lev s6kelik skel3t s4kelu s3ken. s4kena s6kenav s4keno s5kens 5skept s5ker. s4kero s5kers s2ke1s ske3si skes4m ske5sn s4ketet s3ke4t3j s6kev s2key 2skf 2skh 1ski 5s6ki. 7skia 4skid 5skie 5skif 5s4kik s6kill 5s4kilt ski6net ski4nin 5sk8inns5 4skino6 5skiol 5skip 2skir s4kire 6s5kirk sk5irs s6kis. 7skiss 4s5kist 5s2kiv s6kje. 5skjema 6skjemas 5s6kjer4m3 3skjer5m4e 7skjers 3skjor 4s5kjærl 5skjøn 6skjøp skjø5res 2sk1k4 2s1k2l 6skla sk5lak s6k5lan s5klas sk9leg s6klei sk3lek 7s8klero sk5ling sk5lit s4k5luf s2klyd 2skm 2skn s1knu 4skoa 7skodd 4skof 3s2kog 2s3koi 1skol 7s4kola 3skole 4s3koll 4skom s6koma s5k6ome 6s1kon 4skoo sk5ord s6korpi s7kors s8korst 4skort 2skos s2kot 3skott sk8ra s4k9ra. 5s4kral s4krat s2k7re. 4s3kref 6skreg 5s8krek 4s3kret 5skrevn 3skrib 3skrif 4skrig 3skrik 3skrip 4skrite 3skriv 5s4krog s3kron 4s3krop sk6rud skru3s6 6skryss 1s2krÃ¥ 2sk1s2 2sk3t4 5skud 1s4kue sku4end sku4er skue5re 1skuf 5skulan 5skular 3skule 6s5kulis 4skull s4kulp 4skult skum3s 2skun s6k5underv 4skup 8s5kurs 8skurv 2skv sk5va. 3skvad sk3vas s6kven sk3ver sk5vit 3skvu 1sky s6kya s6kye 4skyr sky3re 4skys 6s6kyss 5s6kysk 3skyt 6skæ 6skø. 6s5køa 4skøe 1skÃ¥ skÃ¥5re 6skÃ¥t sl4 sla8da s3lade sla5ge sla8ge. sla5ke s5laks 3slakt 3slal 4s3land 4s5langs slap5pe s5laran s5lare 4s5last s1lat sla4te. 4s1lau 8s5laus sla1v 3slave sla4vin 2sle. s8leda 4slega s4legi s4legn s6legr 6slei slei5er. s4leiv 8s5leke s5lekk s2lel s4leme 4sle7ne s6lener s6len6t3 2sler 8s5les s6lesj s4leso s6lest s4leta s5lev s4leva 4sley s5lia 2slid sli4en. 6slig sli5ke s4likk 2slin 8s3linj slin6t5 3slip 5s4lit. 3s4lite 4s5liv slob5b slott4 s6lott. 7slottet 1slu 8slu7a 2slu2e slu5es 6sluf 4slug sluk3s6 slum4p5 slum5p6e s3lund s5luns s6lup slu7sa s4lut s1ly sly8et. 1s4lyn 4s5lys sly8t 6s7lær slæ6rari 2sløn 8s5løp slø4pa. s6lør 2sløs slø8s5a 3s4løsi slø5va slø5ve slø5vi 5sløyd sløye7 9s4lÃ¥. s4lÃ¥e s1lÃ¥n slÃ¥5ner 1slÃ¥t s1m sma6d 3smak. 5s4maken sma9let 8sman s4med. s4mede 6s5medi s4medk smeg5 4smei smek7l s4mekt 8s5meld 3s4mell 5s4melt 2s5men 3s4mert 6smes s6mesi s6mia s4mie smi4ene s4mig s6mil. smi7la s6mile smini6 s2mit smo7a smo8de. s2mok 6smot 3smug 6s5mugg smul2 s6mula s4muld s6mule 5smurt 1s2my4k 6smøn 1s2mør smør3s 4smøt 5smÃ¥. smÃ¥5r s8mÃ¥tt s1n 8sna. s4nab 7snak 4s3nas 6s3nat 4snav 4sne. 4sneb 4sned 5s8nedd s6nee s4neg 3snei snei5er sne4kri s7nekt s4nel 2snem 4sner 6snes sne9sa snes9v 4snet s6nif sni6gl s2nik snik5ko 3s2nil 6s3nin 3s2nip snir4 5s2nit 2s3niv s8no. s6noe s5nord 6s7not 5s6nud s3num s4nur 7snut 8snyh 2snæ snæ4re. 5s6nø. snø5d4r 5snøe 1s2nør snø1s snø5vi so3al so8ar 4soav 2s1ob so2bl 1sod so4da. 5soe sofa1 so4fag so4fas 6s5off 6s5ofre soft1 so2ga so8gi so7gl sog6nem 6soh 5s4oi4 so5id 5sokn 4soks sok4se. 7sol. so4la. sol5av 1sold so4le. solei5er so4lene sol5f6 1so7li 8s7olj so2lo so8lo. sol3s2 sol5t4 so4lum so4lø 3som. so6mend s3omf 4s3omfa 4s1omg 5somh 2s1omk 4soml 1som5m sommar5a 3s6omme somme6r5e6 2somr 7somren 5somrer 6s3omrÃ¥ s1oms som5sl s4omst som5sti 4somsy 1s2omt 5somt. 6somta 3son so4na. son7da 7s6one so8nea song3s4 so4n3o sons4k son5st so2nu so4ny. 2s1o2p so7pak so9par so7pet 3sopp. 5soppa 3soppe sop4pi 8sopple so3pr 1sor. 7sora so3ran 5sorb 4s1ord sor4da 1so1re so4rek 7sorga. sor4gl so2ri 4s3orie 7soris so5riu 4sorke sor5n 3sor5s 7s8ort 1sos 4s5osc so8se. 6sosf 4sosj so4sl so5te so8te. so4tra sots4 sot5te sot4ti 4sou 2sov so3va so4ve. so5ven sover5e sove3s4 5sovet 3sovj sp2 4sp. spa5g6h 7spak. 7spaka 5spake 8spakk s2pal s3pall 5spalt 3s4pann 3spari 4spas 5s6paser 4spe. spe4a 8speda spe9dé s4pee 1s2pei s2pel spe4leg spe4les 5spelet 8s7peng 3s4penn s4pent s5pep spe4r5and 6speri 4sperl s4perr s3pers 3spesi s4pest s9pet. 1spi 6s5pilo 4sping s2pio 2spip spi7res spi7ri spi7ro 5s6piss 2s3piz 2s1pl 8s9plant sp9lar 4sple s4plin 3split sp6o 2spoe spo6et 2spol 5s6pole 6s7poli s4pora s4pore 5s4pors sport6s5 3sporv 2s5pos 4spot s3pote spo5v 9sprag 5spran 6s3preg 5sprei 8s7preik s4prek spre5ke s3prem 5s4pren 6s5pres 5s4pret s6print s5prio 8sprinsi 8s3pris 6s5prob s3prod 4sprof 5s6prog 6s7progn 6s7progr 5s6pross 1spru s4pry 3sprøy 5sprÃ¥ 2s3ps 8spub 2spul 3s2pyd 5s2pø 6s3pøl sp5øy 9spÃ¥. 5spÃ¥d 8s9r sre8ka sre8ke. sri8e9 sri8ka sri8ma sri4ve. s5ro sro4e sro6pa sro6sa sro6se. sro2t sro9te srot5o srø1v srÃ¥4da. srÃ¥de9r srÃ¥6ders srÃ¥8ma 4s1s s7sabel s7sabl s6sae s4saj ssa8ke. ssa6la. ss5all ssa4me. ss5amp s5sane s7s8ar. ssari8e9n s7s6arm s4sarr ssar8ve. ss6as ssau8sa ssau6se. s6seet s2seg sse5ge ss5egg ss4el sse5li ssel5s ss9elv. ssel5v6Ã¥ s2sem s5sen. sse8na. ss5enden s5sene s4seni ssen6ke. s4senu s6senÃ¥ s5ser. s6serab s6serat sse7sk s4seski s4sest s7set. s9sets s2seu s4sey s2sh ssi4a ssi7e6rer s4siff s6silo s8s9ime s6s3ind ss5init ss3inj ss3inn s4s3ins s5sis4 ssis5m ssi6v7end s5s6jarg ss4kape ss3kar sska8ra ss3kn ss3kof s7skog s4skor ss6ky. ss4kye ss4kÃ¥ ss5kÃ¥l s7skÃ¥p s2sl s8slip ss1lo ss5ly s2sm ss6nar ss2no ss4nø s4s5nød ss5nøk s2sof s4sok6 sso5lo s2som ss4oma sso3ra s4sorga ss3ove ss6pil ss4por ss9ri ss2t s5stad ss4ted ss3tek sstel6li ss5tep ss7tilp s7stip sst6r ss5tren ss4trÃ¥ ss3tus ss3t4v s2s3ul s3sur ssva7ra ss1ve ss8vek ssy6na ssy5r ss7øye s6sÃ¥ 6st. 2s1ta. stabs3 3s6t2ad stad3a sta6d3o s5tafa 3stafe sta5fl st3aft sta7ge 4stah sta6ka. 5stakk 4stakl 6stal. 6sta4la sta5lak st5ald 4stale sta4le. 5s6tali 5stalt s3tame 3stamm sta5na. 3s4tand 6stande. st8ane s4tanf s4tang stang5s6 st3anl sta8n9o s6t5antr st3anv 4stao 2stap s7tapa s4tapp s8tapÃ¥ s3tari 1stas 3s8tasj 4s5tast 1stat 4stato sta5top sta3tu 3stave 2s4t5b st7c 2std 2s3te. 2stea 2steb 3sted. s4teda 5stedt s2tee ste4en 2s4tef 3s6teg. stega6l s4tege 3stegh 4s5tegn s4tegr 4stegsp 5stegspl ste4her s2t4ei stei5er 4steikn ste6in6s5 3s4tek. 4s4teka s6teke ste4ket s4teki 4s5tekn 4steko v4s5tekst s4tekt s8te5kv 4stela ste5le. s5teleg ste5lev s5telig 5stemd 3s4temm 5stemn 4stemø sten5a ste4nar 5s4tenen s4tener 4ste5nes 3s6teng s4teni 6stenk 4steno stens5l 4steo s5teol 2step step7per st8er. 8s5tera s4teram s6terest s4terev s7t6erf s4terid ste7ris s6teri6v 4s3term 6sterr ste5run ster8ø 2stes s4tese ste7sle 4stet s4teta ste7t6r s6tetø 4steve 4stevi 6stevl 5stevn 6steÃ¥ 2s4t5f 2stg stga6ve. stgÃ¥8va stgÃ¥8ve. 2s6t5h st5he 2s3tia 4stib s5tibe s9tibl 3stic s6tick 6s7tid s6tidel sti5en s2tif 3stift s4tigen 7s6tigi st9i8gj 3stign s5tij 6s5tika 4s5tike s4tikl 3s6til. 5s4tila sti4lag 4stild 3stile stile9ge s6tilk 6stiln s4tilo 6s5tils 5s4tilt. 6s5tilta 5stilte 4stime sti7mer 6stimé 3s6timu s6t3ind s6tini 4s6tinn s6tinst s6tint 1s2tip 4s3tipp 6stips 5stir 2stis 6s7tisk 4stitt 4stiÃ¥ s4tja 5stjel 8s7tjene s6t5jent 3stjer 4stju 2stk 2stl 2stm 2st5n2 s2to. 1stof 2s3tog 4stoks 6stokt 3stoli 4stolj s5toll 4stom s9tomm st3oms 6s5tomt 4stoni st3ope st5oppl 6s8t5ord sto4ret s6torm stor3o 4stors st5o4s s1tot sto4t5r 5s6tore. st5ou 5stova 2stp 8str. st3ra. 4s5trad s8traff 6strafi 6strail st3ral st4ran stra8na. 5strand 4strane 6strans 6strap 4strar st7ras. st7rast. 5s4traum s4tre3a s4tred 7s6treik st5reis 7strek. st5rekl strek4sa8 6streni st5rer. 6strers 6strh 9stria 9s8trid 5strie st5rige st3rin 3s4t6rip 5stri3s 6stroe s5t4rog 5s4trok st3rol 6s4t5rom st3ron st7rop. st7ro6pa s6trope s6t7rug 9s6truk st5rum. 2stry 6stryg s7trykt st5ryt 4s3træ 1s2trø 6s5trøb s5trøkk st6røm 4strør 4strøs st5røse s5trøst 4strøy 6strÃ¥d 2st5s6 stsa6me. stsy8na 4st7t6 stta6le. st3t4r s4tua 1s2tub 3s4tud s2tue stu4el stu4er stue5re 3s4tum 6s3tung 6stunn 3s8tunt 6s5tur 2s4tut 2stv s3t4ved 1sty 2stya 2styd 3s4tyk sty6l 2s5tyn 2s7typ 3s4tyr styrs3 6stysk 4styv sty5ve 1stø 2støk 5støl 6støm 5støp 6størk 5støt stø7va. stø5var stø3ve stø5vi st8øy 1stÃ¥ 5s4tÃ¥e stÃ¥5k 4su. 1su4a su7a8l 1su2b sub7l sub3o 5subs s3ubÃ¥ 1sue su8er su2f 5s2ug su6ga. su4ge. sug3g su2h 3sui6 su3is5 suit5a su2k 4s1uke 3suk5k suk3r 1suk5s su4le. 3s4ulf su4lik sul4t5r s1ulu 2suly sulæ5 3s2um 4s3umid su2mo sums6 5sund. 8s5under 5sundet 5s4undi sun6d7r 9sunds sun4ge. 6s7u2ni su4o 3s2up su4pe. super3 supi9 su8p4r su8pre sur5d su4re. 4s3u4rei 1surf su4rie su4riu surs5k sur4sp sur4sti 1sus su3san su4se. su6s5es 4susi su4sik sus3p sus3s su2sy 2s1u4t1 sut6ra. sut8reg 7suv 8s5u6vi sv2 6sv. sva5a svai5 5svak. 3svake 6svaks sva4la sva4les 6svalet 5s6vali 7s6vam s4var. s4varet s3varm 3s4vart 6s1vas 6s1veg sve6g7i s5veie 3s4veis s5veka 6s5veks 5s4vens sverd5s4 sve4re sve8res 3s4verg 5s4verj s5verk sver4ki s4verm 6s7verv 7s4vev s4vie svi6e3l 8svift 4svig svi5ke. 6svikg 4sviks. 3svikt 4s5vikti 4svil s6vindel svi4nes sving5s4 8s7vink ll6svint 4svir 6s7virk2 2svis svi4sa. svi4se svi5sen svi5ser s8viv svi9ve 4svol 5s2vor svor7t s3vr 3svul 5s6vulm s3vy4 svye7ne 1s2vø 4svæs 2s7vÃ¥ 1sy sy4c sy4de. sy5den 5sye sy2er sy8ka sy4ker sy5kere 2syko sy6kog 7s4ym sy7me s2yn 4s5yndl synk4 sy6n5o6 syn6sk syn3te sy4ra sy5rar s6yre sy4re. 2s1yrk syr4ka syr4ke. 3sy1s sy4se sy2sl sy6s7t sys4tr 2sy2t sy5ten sy5t6h 9sytt sy2vÃ¥ s4z sz3c4z 6szt. 1sæ sæ2l 8s3æ4re 8s3æ4ren sæ4r1i særle9 sær5s8 7s6æ5te4 4sætt 1sø 4sø. sø2d s1øde 4søf søg4 4søj 9søk. 3s6øke 5søkj 5søkk søk7kj 3s6økna 2søko søk4sk søk6s3e6 5s6øksm søk4ta 3s4øl sø4la. sø4le. sø5let 3s2øm sø6mÃ¥ 3sønn s1øns s4øp sø4ras sø4re. sø4r5end søren6de. sø8ret sør9ett 7sørl sør3s sør5ø 3s2øs 4søs. sø8sa 7søt sø7tast sø7tel sø5tes 2søv s1øve 3søvn 2s5øy. 4s3øya søy8de. 6s5øyd 8søye 3søyl 6s5øy4n søy2r 2s7øys 5sÃ¥g s3Ã¥2ke 5s4Ã¥l sÃ¥4le. 6s1Ã¥n sÃ¥n4da sÃ¥n6de. sÃ¥2p 9sÃ¥pa sÃ¥5pet 6s1Ã¥pn sÃ¥p9u sÃ¥2r sÃ¥6ra sÃ¥r9ast. sÃ¥4re. sÃ¥r3sk sÃ¥r7ø sÃ¥2s s7Ã¥sa s4Ã¥sb s5Ã¥se sÃ¥s5k sÃ¥s5t 9sÃ¥v sÃ¥va7 ta1a 4tabo tab2r tab2s ta4bu. 4tabø ta1c 4tad ta2da ta8d3ei ta6d3e4t tad5spa tad7s6v ta6du t8ae 6taei 4taek 4tael 1taen 4taend 4taenh ta6es 2ta5fe taf7f6 5tafis 4tafl 1tage 4tagj 8tah ta7is 1tak. ta4ka. 4takau ta7ken ta5ker tak7kel tak5k4l ta8k9la 2takr tak4sal tak6se. tak4si 4t3aksj 5takst tak4tal 4taktig tak4to t5aktø ta1ku tak3v 1tal. ta9lam ta3lan tal5ang ta7las 3tale ta4lei tal5eie ta4lek ta5ler. tale7s6 1talg tal6ge. 7talj talj5es tal4led tal4leg 8t7allerg tall7es tal8lign tall6s7a tal4ly tal3op 6talter t5altern ta6l9u ta6lÃ¥ 2tamb tamba6ne. t3ambi t4ame 4tamet tam4i 2ta3m4o tam5v t6an. ta2na tan5ab 4t3anal ta3nar ta5nas ta5nat t5andak 5t6ande. t7andel. t5andele tand5r tandø4 tandør5e 1tane ta4nel ta5nem 4tanett t3anfa tanfø6 6tangf 6t3angr 1tank 2tanl t3anle 2t1anm 3tannl t5anno tanns4 tan6sk tans5ka t5an5sl 4t5anstr 4tansv 8tantil tan4tra 6t7antre t3anve 2ta3o4 5tap. ta4pa. 1tape ta4pe. 3tapi 4tapl 4tappa 5tappi 1t4ar. t6ara ta4r3ak 4taram ta6rare 2tarb 3tarbo 5tarbø 4t9arena 6t7ark. tar8ka. 4t5arke 4t3arki 4tarra tar9si tar7sp tars6v 4tart tart7est t5artik tar7tit tar4t3r ta6r5Ã¥8k 4tasa ta3se 6t9asf t3a4sia tasi3e ta4sif ta4sil ta4si5s4 tas4p tas5s4 tas8tas 6ta7sto ta7str 4tasu ta1ta ta7tes tate8se. tat5e4v tati1 tat3op tat2s1 tat3te t3attr 1tau 9tau. 7taue 4t5aug 2t1a4uk 4taun tau4ne. tau4sk 2taut 2tav 6tavin 3tavl 4tavli ta9xy 6t1b2 tba3d tba2n t6be. tbe6te. tbe6t7r tbe6t8ra tbo6da tbu8da tby3k tby4s tby7te 2tc t1ce tcen4 tch5e t5co 2t3d tdy5pe 1te 4tead 4te3ag 2teak 2te9a8l tea6m 2tea2n 2teap 2tear te5arb tea4s 3teatr 4teatt 2teau 2teav 4teb t5ebb tebu8da 4tecel te1co 2ted 6t5eddi 3tede. 4tedeb te7def 4tedek 4tedel 4tedem te5den 4tedi 5tedil 4tedo tedo6en. tedo4er tedo8et. te4dor ted4sk ted4sl 6tedu 4tedy tedyr9k 4tedø te3e4 2tef 8tefa 4tefi 4tefl 4tefo tefo8r t3efte 3tefø 2tega 5tegat 4t3egd te7gea te9gee 4tegg teg8ga. 2tegh te4gim 2tegj 2tegl te3gla te9g8li 8tegs. 6tegspl 6tegsv 2tegu tegvi8 2tegÃ¥ 5tegÃ¥s 4teh 5tei. 2tei4d t3eie t3eiga tei6ga. t7ei6ge. 6teigr 3teikn tei5le te1im 6teinf te4inne te5inte te5i4s 6teiso 2tej te7ken te5ker 4tekil 2tekj 5tek1k 2tekl te5k4la tek6lede. tek4li 4teknu 2teko 2tekr te7kra te6k5ru 9teks. 4t3eksa t3ekse 4teksp t4ek8st tek8t9r tekt4s tek6ty 2teku te2k3v 4teky 2tekø 4tekÃ¥ 2te3la tel6ak 5te4l5ar 3telav 8te4le. 4teled 4telei 4telek 4teleme te4lene 6teler te4les te5lesk 4te4lev te6leva 4telid 4telik 4telil 4telin te4lir 4telis 4telit 4teliv tel5lef tel4lei tel4lev tel8lign tel4lo 2telo tel3se telses5 t3elsk tel7s6v 5teltet 2te1lu 7telut 2tely 6telæ 2telø telø8pa. 2telÃ¥ 2tem 3temae 6teman te4mans t5emba 6temenn 5temik tem9ma 6temn tem5ne. tem5ord tem5pe tempe8l7 3tempo tem3s 4temø 3ten. te5nab te4n5al ten8am te4nan te4nat ten5at. 6t7enc 6t5en6den 4tendr 4tened te5neg 6tenem 6tenerg 5tenes 4tenet 2teng 7tengon t5e4nig 7tenik 5tenis 4teniv ten9no te4nom te5nor 6tenorm 4tenot te6nov 6tenó ten3sa ten5sko ten6slu ten3so tens5v ten4t5in tentle8ge. 4tenum 4teny 4tenæ 2tenø ten5øks 2teom 5teon 4teop te6ora te6ori 4teord 2teov 2tep te3pa 4tepak tepa9ra 4tepas 4te5pe 7tepee 9tepé tep6pere t5e6ple 4tepr 4terac te7rakk 6teramm ter7apa 4terapp te4rare 6terasa 4terase 7terast terba6ne. t6er5d 4tered 4teref 4tereg 4tereis te4rel 8terem. 6teremm te4r5eng teren6ga 6terenn 4terep te4rerk ter5esti 4terett 6terevo 3ter5g 3terh te4rig 4terik 4terisi te4riv ter5j 4terk. 4terke 4terkh 7terkr 6terks4 4terkt terla6ga terle7ge terle6ve. ter8ma. ter3no tern5s te5ro. tero6e te3rof 4teroll 4terom 5teromr 4te3ros 4te3rot tero6v ter5r 5ters. 5tersjøo ters4kl ter7s6ko ters4l ters4h ter3s4m ters6ne ter5sv tersø6ke. 3ter3t ter9to ter9ul 8terum ter7v tervi6se 2tery ter5ør 4terøt te4r3øv 4terÃ¥d te4r3Ã¥s t4es 5tes. 2tesa 5tesar te7sen 2te1sh 4tesid 4tesik 6te3sin 4tesit 4tesj 6tesjef 4te1sk t9es6ka. 6teski te5sko 2te1s2m 7tesmÃ¥e 2te1sn 2te3so 6tesp 5tesr 3tess 5test. 4testaf tes5tas 4testat 5tes7te. 4te5stei te5s6tiv 5testid tes4tik 4te5s4til 4te3sto 4te3str 4testy 4testø 2tesu 4te1sv tes8væ 2tesy 2te3sø te7sÃ¥ te8sÃ¥r tesÃ¥9re 3tet. teta6ka teta6le. te4tap te3te 5tete. tete4e 4teteg 4tetei 4tetek 4tetem 2teti 3tetik 3tetis 2tetj 2teto 2t8et2r 3tet2s tet5ti tett3s 2tetu 5tetu. 2tetv 2tety te2t3Ã¥ 2teu teu8k t1eur 3teus 2tev t6eva te4valu 3tevl tevo6r te3vr 2tey6 2teø 2teÃ¥ 4téa 2téb 2téf 2téh 2téi 4téle 2tém 2tése 4tést 6t1f tfe6e tfe4l tflÃ¥3 t7fo 2t3g2 t4ga. tge4a tgei5 t4gi. tgi5ros tgrei9e t4gÃ¥. 2t1h t4hap t4hea3 thei5m t6hesi tho7li tho3re 3thr thu5le thu6s thy5r 1ti 2tiad ti3ag 2ti1ak 4ti3all ti5als 2tia2m 2ti1ap ti3asp 4tiau 2tiav 2tib 2tick 3tid ti2da 4tidan tid7d 6t3idee 4ti5dem 4t7idé 4tidis 4tido 4tidr 2tidu tidvi4 6tidy 4tidø ti1e2 2tief 2tiei 2tiem 4tienh tie5ra 4tierf 4tierk ti4ert ties3 ti3esk ti3et 4tieta 4tiett 2tifa tifa8ne. 2tife 4tifl 2tifo 2ti5f6r t8ifrÃ¥ 4tift tif5te 2tifu 4tifø ti9ge. 4tigj 2tign 3tigno ti6gun 2tih 2tii 2tij ti3ka. 6tikarr ti5ke 4tiket 4ti5ki 6tikj tik4kj 2ti3ko 2ti1k2r ti3ku 6tikular 6tikult 6tiky 4tikø t4il tila6ga til1d4 3tilde 2tile ti3lei 3tilfe 3tilg til3id 4tilke 3tilkn 4tille. 4tillh 4tillin tillø7 4tilm 7tilnæ 3tilst til3t4 4tilte 2tilu 2tilø ti7mab 5timat ti4me. 5timedie ti4mera 5timeri 2timl 4timo7 4tim6o8r 2t1imp tim7s 2timu 2timy 2timø ti6nab tin5art 4tinaz 2tind t3indu ti4nem 4t1inf tin5ge4s 6ti4nit 6t5inj 2tink 7tinks 4t1inn 6t5inne ti5no tinsk5 4tinsp 4t3inst 4t1int 2tinv 4tinva 4tiny 9tiol 6ti7om tion9s6 4tiop ti1or ti1ov ti3pa ti6pla 4tipr 3tips 2tir tira4t ti4ref ti1ro 6tisak 4tisam ti5s6an ti3se 4tis4ei 4tisek 4tisel 4tisem 4tisep tise8ra. 4tisers 6tisig 4tisio 4tisje 4tisju tis4ka 4tiskan 4tiskil tis5kok 6tisku tis5l 4tisn 4tiso 6tisprÃ¥ 4tistat tis4ti. tis4til ti5stre 4tisty 4tistø ti8støy 2tisy 2tit ti7ta ti8tar ti3te ti1tj 3titl tit4r tit3s 3titt 4tiub 2tiut ti4v5and tiva9re ti4v5art tiv6is 2tivo tiv5si tiv5sk 4tiy ti5ær. ti7ære ti9ært. ti1ø8 2tiÃ¥ 6t3jag 2t1jak t5jarn 2t1je. 2t1jeg 2tjel 1tjen tjen6st 6tjep 2tjer t5jev 2t1ji 6t5jing 2tjo t5jord 2tjub tju4e 2tjun 6tjur 3t8juv tjæ4res 2tjø 4tjÃ¥ 4t3k2 t6kag tka8ra. tkly9 tku6le. tkÃ¥8pa 6t3l6 tla4ga tla8te. tle6da. t4ledr t6lef tlei5er. tle8se. tle5ti tligh6 tlight5 tli6nes tli4te. tlu4e t6l7ut tlø4pa. tl7øy. tlÃ¥3ne tlÃ¥7r 2t1m tma3d tma8ge. t6maku t2mam t2meg4 tme3in t2mek tmini6 tmin5s6 t2mos tmÃ¥4la. 8t1n t3na t6nee t4neli t7nelig tne4rek tne4r5ø tne8se. t4nesk t4nest t5ni t8no. tn5sk tnæ6re 7toan 7toar. toa6t 6toau 2toav tobe6r t3o2bl 6tobs 4toc to5da to9de. to4dera 3todo 1toe 2toek 6toel toe4t 2tof 6toff. t3ofre to5fri to2g3at to7gen 4togg to6gl 6tograv to4gre tog3st to2gu 2toh toil3 2to1in 4toj t6ok 1to3ke to3ki 2tokj 2tokk tokk5end to3kl 5toksi 5tokt 2toku to4l5arm 5tolat to5lel tolele7 5tolera toli8ne. 2tolj 1to4l3k 7tolki tol4lab tol4lag tol6leg tol6lett 3tolo tol5tr 4tolÃ¥ to9ma. 2tomb to5mene. 2tomf 4tomg to3mi to5mise to7mist 4tomk tomlø8pa t8omm 2tomr 4tom1s 5toms. 1ton to5na to4ned to4nel tong5s4 to8nib 2tono ton3sa ton5s4l ton5sp 2tonu to4ny. 2to3o 4to1pl 4toppd 6toppg 6t5opple 4t5opplæ top4po 4toppt 4topr to1ra to4rak tor5alt tor5ang to4r5ant to4r3as 4tord tor8da. to1re 6toreg to4rek tor7eks to7rem 6torent 5torer tore4t tor5ete 4torga tor5int tor7k8u tor7me torm5s tor5na tors4i tor5t8 torvei5 to4rø 4torÃ¥ to1s 4tosa 4to3se to2sh 6tosj tos5ke to8sku 4tos2p 2tosy 4tota to5te to8ti. to5to. to4tor 6toty 2tov to1va to7veg to5verd tove7re t5overs 5tow 4toø 6t3p2 t7pa tpi7ne tpi7pi tpo4et t4pÃ¥k tr4 8tr. 6t9rabat 3trafi tra7in tra8ka. trak7to tra7mu t5rane 3t6rans tran7ske 5trap tra3pe tra5po tra5r4 5trase tra5se. tra5ses 5trasé t2rat t5rate. 3trav tre4al. tre1c 4t3reds t3refu 6t1reg t5regi 4trei tre5int 6trek. 4treke 4trekl 4t3rekor 4trela 3t6re7na tre5ne t6reng 3t6reni 4t3renn tren6sk t7repe t5repres t2rer t4rero t4re3ru 2tres 6tresi tre5sko tres4l 3tresn t5resu t1ret t4reta t6reto t5retted 5t6rettel 4trev tre9va t6rib 4t1ric 4t4rid tri7ei t5rig. t4rikk t8rina t4ri5ne 4t5ring 3trinn 7t6rio 6t5risk t4rist tri5to t4riu t2riv tri7vi 5trix 3t4ro. 5troa tro9ar 3troe t4rof tro5g 3t2roi tro3in t4rok t2rol 4t3rom. t6rond 5t4rong tro5per t2ros 5t6rosk tros3l tro1v t8ru. t8rua 7trua. 5trued 5truet 3trug tru1i 5trukn t3rull 2t1run9 t3rund 4trupe tru5sa. 7t6rusk 2trut t5ruta t3rute t3ruti tru5v try3dr 3t2ryg tryg5ge 7trykk. 5trykke try7pe. t6ræ trø8a t5rød. t5rødt 4trøk 4trøm 2trør 5trøt 5trøya 5trøye5 trøy9et 4trøyr 6trøys 5trøyt trÃ¥8da. 4trÃ¥det trÃ¥4dr t6rÃ¥kl t3rÃ¥st 4ts 6ts. t5sa. t5sab tsa2g tsak6se. ts1an ts4ang tsau6r t1sc t6scha t6sch5k t5se. tse6d tse4er ts5ege tsei4d ts3eig ts2em t3sen. ts4ende t5sendi tsen8ka tsen6ke. t5sens t4sentu t5ser. t3seri t4s3erk tser3o t5s6ers. ts4es t4sesk ts5e6st ts3eta tse4te. ts1e4v ts3f ts4fæ t7s8hop ts7ill ts3inn t4s3ins t5sir ts5jeg tsje5t6sj ts5jor 5t6sjov. t7sjuk t4sjur t5skad t4skan ts6kis t4s5kjed ts6kjø t5skol t6skoll t7skot ts5kren tsk5ru ts5krø ts1k6v ts9kvi ts6ky. t6s7kyn ts1l t7s6lo. t3s4lot ts2lu ts5lø t5smit tsmÃ¥6la. ts1o t4s5o4d t5sok t5somst t5spek t3spel ts2pi t2s3pike t3s4por ts5s4 t1st4 t2s3tabb t5stad ts3tak t4s3tal t5stand t5stang t5star t5stat ts6tau t3sted t4steg t4stek ts6tig ts4tik t4stilk t4stin t4stit t2s1tj t8stolk ts5ton ts5tors ts5tree t4s5tren t4s5tro ts3try ts6trÃ¥l t5stu ts3tv t4sua tsu7ge tsu4l ts3uli ts3u4r ts5usi ts1v t3svik ts1w t5syk ts5yt tsø8ka ts5økni ts5øv tsøy8er ts1Ã¥ tsÃ¥t8 8tt t1ta t3ta. t2tab ttaba6ne. tt9abo tt5adr t2taf tta9fr tt3aft tta4la. t6t3ald tta7lel ttall4 t4t3ana t5t6ane t4t3ang t4tans t4tanv tt5appe t7t8ar. ttare6 t4tarr tt5ar5t tt6arta t4tase t4tasj t4tav tt3avi tt5avk tt5avr t3te. t2tea t3ted t4teda t5tede. t8tedr t2tee t4tef tt3eff t2teg t2tei tt7eid tte3in t2tek tte7kv tt7elev tteli6te. t4tem tte4ma. tt5emn ttem8na ttem6ne. t3ten t5ten. t4tena t5tene. t4teni tten5s t9tens. t4t5ense t2teo t5ter. t4terei tter5un tte4r5ø t4terÃ¥ t4tese t4tesi ttes6ke. t4tesl t5tet. t4teta t4tete t1té t8th tt5he tti5a t4tidel tti3e t2tif t5tig tti4g5j tt3ind t5tine t7tiné t5ting t4tini t4t3ins t4tisj t6tisti t7tite ttle6de. ttlin5 t1to. t5tofr tt5om. t7tomr t2ton tton6na tt3opp t5tora tt3ord tto5re tt5o4ri tt3orm tto5u ttpar4 tt6pÃ¥s tt9raka tt5rake tt3ram tt3rap tt7rat t4t3rep tt3res tt3rev tt5rom t5trop t5try t2t1rø t6ts tt7saf tts3ar tts5kan tts5kj ttsla6ga tt3sok tt5still tts5top tts5tra tts5tøy tt3ugl t1tum tt1un ttun6ge. tt1ut tt8ute ttva8la tt5ve tt6vun t5tw t5ty. tty8e tt1y6t ttæ6 tt3ø4l t1tør tt3øv tt7øy6r tt1Ã¥4 5tual 4tu4av tuba3 tu4be. tu6bÃ¥ tu6dem 5tuel tu4er. tug8la tu1in tu2k t5uka t1uke 7tukt 3tula 1tule tul5les tum5m tum4s 3tun. 4tund 3tune tun5gesv 6tungd tung9s 6t5uni tu4nio 5t4un5n 9tuns 3t2ur tur7ant tu4ras 4tu4rei tu4rek turi4 tu4rin tur1o tur3p tur5s4 tur3uk tu5rus tur1ø 1tus. tu1sa 3tusb 3tusf tu4sin tu4sm tus5o tus7s6t tust6r tus5u 2tutg t5utj t5utk t1utr tut8t7rÃ¥ tut6tenk tut5tor tutt5ov tu8va. 2t1v tv4a tva8k tva6ne. 3tvang t4v5anl 8t9var tve5del 3tveit 4t7ver tvert5 t2vet 3tvet. 7tvets t5vik tvi8ka. t8viklas 5t4vil. tvi5la 5t4vile t4vi5l4i t6ving tvi4sa. tvi4se. 6t7vo 3tvung 6tw twa2 1tyd tyg9gel tyg5ges 6tyh tykk5s6 ty4med 1tyn ty4ne. 6t5yo 1typ 5t6ype ty4pere ty4pet ty4rak 5tyrann 6tyreg ty4rep tyre5st ty4ri. tyri9e ty4ri5s ty2s 3tysk tys3t 2ty2t ty8ta t3ytel ty5ten ty4ve. 2tz t5za 5t6zel 6t6z5l 1tæ tæ5l tær6ene tæ4renh tær4er tær4et tær5s4 tøf3l 2t1øk tø9ket tøk8ta 4tøl t7ø4let 5t4øm5 tøn5nes 4tøp tøp5se t2ør tø4r3as tør3in tørk4 7tørk. 9tørka 5tørke tør5ne tørs7ta. tørs7ter tørs5tin tør3t tøs4 tø8sa 4tøt tøt5a tø5ta. tø3te tøtt6 2tøv tøv6d tø7vele t4øy tøy5a t9øyem tøy5r tøy5te tÃ¥4en. tÃ¥2k tÃ¥5ket. tÃ¥5ki tÃ¥k6r tÃ¥2l3a tÃ¥9la. tÃ¥4leg tÃ¥7lel tÃ¥l5s4 tÃ¥n8da tÃ¥n6de. tÃ¥3ne 4tÃ¥p tÃ¥6pe. t1Ã¥pn tÃ¥6re. tÃ¥4rep t3Ã¥ret 4t5Ã¥ri 3t8Ã¥rn 2t1Ã¥rs1 tÃ¥3s tÃ¥2t t3Ã¥tak tÃ¥7ten ua7g u1ak uak3s u1al. ua7la ual5l ual3o ual3u u3a4ne. u5anl u3ans u1ar. uar5d u4arr u5ars. uash3 ua3t u5au8 u1av 1uavh u4ba. ub7alt ub7ant ub5ark ub4bo ub2br ub4bu ubel5s ub5h ubi6s ubis7t ub1j ub7lande. uble3s4 u1b4r ub5rin 2ubs ubu8e u4bÃ¥ uch5en uch5er u1ci uck1 ucu5 2ud u3da. u7das. ud4dag ud2dr udd4s5e ud2dÃ¥ udea9 ude8are u2deb udeie5 udei5er u2dek ude3lu u2dep u6dero u2des u3d2et u6d5ett u5devo udia2 udi3an udi4en udio5 ud7ir ud7ji ud5leg udle6ge. u7dob u6doe u2dot udover7 ud1r udse4 ud3t ud5v 3u4dy udy9ra ud5Ã¥4s ue5a8 u5ei u1el uem8na u1e2n uend5r u7ene u8e7net uensar4 ue7o u1er u5e6rast ue4re. ue5ren ue4rer ue4res ue5ri uerle7 u2erm ue4se ue2si ue4skj ues6n u1et uet8r ueul8 u4fe. u6feb ufe6e uf1f4 uf4f5erm ufi9l ufi5ne u1fl u6foa ufor3s u1fr uft3a uf4tan uft5s4la uft3sp ufø6 u5gag u5gar u6g5av ugby7 u2gem u5gen u3ger u4gerø u2ge5s4 u9get. u8geta u2gev ugg3s4 u5gis u4gl ug1la u5g4lad ug9lar. ug4le. ug9ler. ugle7s ug5ly ug7na ug7ne. ugo1 u4go. ug4re ugrei9er u4grø ug5s4i ugs4k ug7so ug3s4pr ug5s4va7 uguay7ans ug5øyn u1h 3uhel ui8a u4i6c ui2d uid5el uid5en. uid5ens uid5er uid5in ui3e6 u4il uil5l u1im ui3ne u1ing uinnsmi8 uinnsmig9 u1ism uista7 uit3en uit5er u1itt u1j u4jas u8je. u7jÃ¥ 4uk. u1k2a u7kar ukare6 ukar5t4 u5kat u2ke. u4ked uke5l4 4ukem uker5an uke4rek uker5i ukho7 u7ki u1kj 4ukk ukk6a uk7kestal uk8k9l ukk7n ukks6 u5klem uk8lu ukl7ut u5k4no u1ko u4k5og u2kra uk3rus 2uks uk2sa uk4sek uk4sel ukse7te uk2si uk2s1k uk7ski uk2so uks5t uk4s3un uk2sø ukt5e4sk ukteslø9se uk4t3id uk6tj uk4t5ori uk2t7r ukts6l ukt3sp ukt9s6v ukt4sti uktu8e9ra uktue5re u1ku uku6e uku5le uk5vik ukys8 uk3ø4 uk5Ã¥4 6ul. u1la u4la. ula7d u6l1af u8l7aks ul5art ul4det u4lef u2lek u2lel ulele8ge. ule5ma u3len. u6leni u3lens u5ler. u5lere. ule3ri u2l3erl u4lero u2les ule3st u5lest. u5leste. u2leu u1lé ul5f6l ul4fo ul4fr u3l4i ulie8 uli9en uli5ke. 3ulikh uli9na. uli5ne u7linj ul1j ul5ka ul5k6e ul4k3v ul4lag ul4lam ull7arm ul4leg ull5egg ulleg8ga ul6leng ul9lere ul4le3s4 ul8li. ul8lig ulling7s ull5i4v ul4l3o6s ul8lot ullo3v ull5sle ull3s6m ulls4t ull3sv ul2lu4 ul6ly ul2lø ulm4u u1lo ulo6i ul5op ulo7va ul4sk uls4po ul8sn ulss8 ul2st ul8s3t6r 4ult ul4teg ul4tek ul4tes ul4til ul4tim ul4to ul7tor ul4t3re ult3ri u7lua u3lue 1uluk u5lup u5lus ul9ut. u7luta u9lute ul7va. ul5van ul4var ul4ve. ul4veg ul4vei ul4ves ulv3t ul4v3u 1uly ulyk5kes ulæ6ra ulæ5re ulø3se uløy6e ul5Ã¥6l 2uma uma8ge. u6mare um5au umau7ken um3av 2umb um5be 4umc 4umd 2ume u4mend u4m3enh umen4t5i ume5ra 2umfo 2umh 2umi u6midd umie6n umi9na u4mint um9ja 2umk 4uml 4umm um3me um4mil um4mis um3n umna8 4umo umo6g um5ok 2ump um7pan um4ple um4p7ut 4umr ums1 um7se. um4sku umsku8le. um2sn um4sor ums3t ums4tø 6um7t umta8l um5ut 4umve umør3s um7øy umÃ¥7 2un. una6da un5alg u4n3ap u9nar u7nas u3nat 2und. 6unda 6unde. un4dek un4del 6unden 5undersk 5underv 4undet un6d7im 4unds und5s4i u2ned u4nee u9nei. u2nel unele6ge. u4n5eng u4neno u2ne1s2 u3nes. u2nev unev5n 1ungd ungele8ge. un4gem un2g1j un4g5l un4go un2g1r un9g8ru ung3s4i un6i u4ni. u2nid 1u2nif u4nim u3nin 3union uni4st 4unk un4k5l un7ko un4kr un7nak unn5erf un4n5ers un4n5erv un5nes un4niv un2no unn5ste unnta8la un4n3y u4no. u2n3os uno7t 4uns un6sj5i un3skr un5s6kv un5sn un5s4p un3stek unst5e6l un7s6tenk unst3o un1s2v un4tal un4tam un4tas un3t2r unt5rep un4tri untun8ge. unu6 un5ut u3ny u3nø un5øy6d u7nøyg u3nÃ¥ uo9a u1ob u1om uom7s u1op uor8da uo1re uo5ro u1ov up3av u3pea4 u5per uper3a upe6ren u4peta 6upf up6h u3pi u8pi. up3ins up3li up6nev upo9pe up1p2 up4ped up4pla up6pre upp5s 4upr u3pra 6ups up6sen up2s3k ups4ke ups5pr 6upt 6upu u1pÃ¥ upÃ¥vi6 u1ra u2rad u2raf ur3aft u2r1ak ura6ly u4ram u5rane u3rans uran6t5re u5rar. u6r7arl ur3arv u9ras. ur5asp ur3att ur7au urau9ken ur1av ur4bod ur4c urd4 ur3di ur4d5o u1re ure8al ureer6e ur5egg uregi6 uregist5 3urein ure4l ur3ele u4reli u7r8ell u7r8elt u4r3els u4r5eng u7reom ure7o6s u4resu u4reta u4rete ur4e5v 2urf ur4feb ur6fed ur3ge u1ri u4ri. uri6a7ne uri9en. uri4e5ne uri9er. u3rik u3rim uri5ne ur3inn ur3ins ur3int uri6sp ur4ke5s ur7k6j ur6k7ja ur8kla ur4kle ur6k5n urk5s urla8ga urle6ge. urle8se. urma7g ur3nea urn1s2 urnæ6re u4ro. u6roc 5uroe urom5s u2rop uro7pi ur3opp ur1or uro3s4 uro4sta uro8ta ur4pel ur7pi ur6p9ut ur7re ursa8ka ur4sal ur4s3el ur4sin ur4skr ur4spe ur5ste urs5til ur4stj ur2sv urs3va ur2s1ø ur3teo ur5ti ur4to ur6t5ri urts4t urue7re u6rug8 ur7ugl uruk6 uru4ke uru7kn ur6ul urumo7 urun4g u7runk uru7v ur4vel urv3s u6rys urø2 ur1ød ur1øy 4usa u9sane us3ang u9sar. u8s7arb u7saren u6s7ark us7au us1c 2use u5seal use4al. us7edvanle u2s1ei u2sek u4sele u2sem u6s5endi us7erk u4s5erm u6sesi uset8je us3eva 4usg u2sh u2s3h4a u2s3h4o us4id us3ind us5inn us7jag u4sji us5kaf u4s3kan uska8ra us7kat us5ken u6s5kis usk4l us6k5le us5klo us1ko u1skr us7kru usk7u6t u8s7kÃ¥ us4ky us1l usle6ge. us5lin u4slu us7mø u4sn us1ok uso5l uso6l5d us1or u3sori usove7r6e us1p u1s2pin us5sar us6sat us4sjÃ¥ us4skj uss5kor us4skur uss1l us7smi uss7mø us4s5ti us4str us2sv u6stande u4stat us3tav u2ste us3ted u5stei us7ten ust9ette us5ti u5s6tig us1tj ust5of uston8 ust7onn us5tor ust5o6v us8tra. us8tran us8trar u6stre ust5ren us4tri us3tro ust5rød u6st5rÃ¥ ust7ut u4stÃ¥ us3ung u2sur us1v u5svale u7s6valt usva8n usvi8ka. 6usæ usæ9le usø6m us1øy u1tae u2tag u7tagg u9tal. ut3alb ut5ald u3tall u5tande uta7no ut6anv ut3arm ut3art uta1s4 u5tas. u6t5a6sa u6t5a6se u2ta1u4 1utb ut4ba. utbe8d 4utbi ut6by. utch5 1utd 4utdat utda7ta. u2te. u2tea u4tee u4tei u2tek ut3eks u5tekst ute6ma. u5temm u9temt ut2en u6tena u3tenk u4tenu u2teo 2uter u6terø u2te1s6 u4teta u3tett ut6eu uteva9 1utf utfa7s utfø5re 1utg 8utgÃ¥e 6u3ti u4t5if utikk6u utiku9 u4tili u6tiven ut1j u5tjen ut6ju 3utlei utlø5se utlÃ¥7na utmÃ¥5le u8tni 1utny 2uto ut7oms uto5s uto4v 3utpr ut4pÃ¥ ut1r u5t4radi ut3re u5treng u3triv u3trol u3tros 5utru u5t6rul ut4rø ut9rød u7t8rÃ¥k 1ut1s4 4uts. 4utsei utse8t9ja 2utsj ut3sk 5utst 7uttak. 5uttake ut4terk utt5eva ut4tid ut4til ut5torea ut1tr ut4tra 3uttry utt1s2 ut8tÃ¥r u1tu utu5e u6t5un utun8ge. u2t5ut 1utv u3t4vil utvi5se 2u3ty 3u4t6yd u8ty. 8utz ut1ø u5tør ut4ø8s utøs7t 1utø4v ut5øya ut7Ã¥k u3tÃ¥l ut1Ã¥n ut7Ã¥r u1u u7uma u5ume u7ut u1v uve5di uve3in uve6ris uve3s u7ves. uvet5ø u6vev u5vi uvi6k uv5ra uv9sa u5vu uvø9re u1w 4ux1 ux4a4 uy6a u7yn u1æ uærle9ge u1ø6 uøv5 u9Ã¥ra u7Ã¥re va3am va8an 4vab 6vadj vad1r vaf3 vaf6r vai5r vai5s va6ka. va7kan va5ker 6vakh va8ki. 4vako vak3r 7v6aksi 4v3aksj vak5s6t 1vakt vakt5r va1ku vak3v va3lan va4led val9eks 4valel val4g3r vali9e8 val3la val6mes va6l1o va1lu va4lun 4valv va4løy val7Ã¥ vam8pu 4v3anal vand5s v4ane vane5s va4net 2vang van4gr vang4s7 va8ni. 1v4ann van4nan van4niv vann6sti va4nom v4ans van5sem 5vansk van5s6ki van3sm van3ti van9tr va4ny 2vap va5po va4ra. va6rak va4ral va4ram va4rap va4re. va4reg va6rem va4res var8ka var5ma var4mee var4sa var4sk var4s5ti var4sv varta9la var6tem var4t5i4s 4vasa va4sa. va9set 6vasid va6sie va4sif va4sik va4sil va6sis va2sk vass5a vas4sau vas4sel vas6s7ø 4vasta vat5a va3ta. va6t7e8l va2terf vatle6ge. 5vatn va1to va6tr vat7ra vat1s 4vau 2v1av va4z 2v1b2 vba4ne. vber5g vbo6da 6v1d v7dev vding5s4 v7doi v7don vd6r vdu6en. vdu6er v7dø 1ve. 2ve1a2 ve6ag ve6ar. 2veb 2vec ve2d1 ve9dob vedom5 veds2 ved5s4k 3ve8d5t vedø4 vedÃ¥6 2vee ve4er 2vef ve6g5av ve5ge 3vegg veggs4 2vegj 4vegl ve3gle ve5g6lød veg5n 2vego 6vegre veg5rett 4vegru ve4gut ve4g5Ã¥ 2veh 1vei1e vei8eb vei3en 4veill 6veit 2vej 4veka 5ve4ka. ve4ke. ve4ked 4vekee 2veki 4vekj 2vekl vek8la vek8le ve4kli 2veko 2vekr 3veks vek4st veks4t5r 1vekt vek4t5an vek5to vek4t5r 2vekÃ¥ 5vel. ve4l5as 5velat. 5velatp 6veld. vel4del 8velds 4veled 6velei 7velet 4velis 4velit 6veliv 2velo 5veloc vels2 vel3se velses5 vel3sm vel5sp vel5st 4velug vel5un vel5v 2vely 2velæ 2velø velø8pa 2vem ve5ma 1ven ve8na. 4venem ve5net ve6nete 4venev ve4nin ven6n5i vennle7 ve2no ven6sti ven6str vent6a ven5t6r vent4s 4venty 2venu ve4nus ve5ny. 4venya ve4nym 2ve3o4 2vep 1v8er. ver5ak ver3al 8verau ver6des ver6din ver6dir verd7v ve3re. 4vered 4vereg 5vereig 4verek ve4rel ve6r7eng 4verenn 4verep ve5rer ve5res. 4verett 4veretu verfø5re ve9ri. veri3a ve4rial ve7rie 4verik ver3il 5verkn ver5m6 ver6nal vern6s5 ve9ro. ve3rom ver8ska. ver5ski ver7spo verta8la. verta6le. ve1ru 5verum. 6very verø6 ver3øs4 4verÃ¥ v4es 3ves. 2ves4a ve5san 4veset 4vesh 4vesi 4vesj 4veski 2vesl ve9sli ve1s2m 6ve3so 4vesp ve6s3per ve5sted ves6t5end vest5o4v 4vestr 4vestu 4vestø ve3sv vesva7 5vesynds 2vesø ve1sÃ¥ vesÃ¥5re v4et 1vet. 2veta 8vetaka 4veteg 4vetek ve5te6s7 6vetev 2ve7ti 4vetj 2veto ve4to. veto7e 9vetoke 9vetoks. ve4tor 2vetr 1vets vett5sk 6vetv 4vety vety8e 2veu ve8um 2veva ve6vak ve5van ve9var 2vevi vevi8sa vev4sm vev4sp 2veø 2veÃ¥ 1vê 2v1f vfall4 vfø7re. 2v1g2 v4ga. vga4le. vga6li vga8ve. v9ge v4gi. vgjer6s7 vgo4 v4gÃ¥. 2v1h vhø4re. 1via vi1an vi6bl vice5s 1vid vi5de vide4o7 vi4d5o4 vid3r vid3s4 vi1el vi5end vig4m vi4gu vi5gø vi4kat 5vikb vi5ked 7vikg vik5ke vik5n vi2k3o6 3viks. vik5sa vik5s6l 4vikt 5vikti vi4k5ø4 vi2l1a vi5la. vi4les4 vi4l3in 1vilj vil4l5an vil4led ville6de. vil4lei vill9ep 6v5illu vil5m vil1o vil4sti vilt7o vil6tret vil4tri vi2lø 4vim vim7p vim7u vin6d5au 4vindl 5vindu vine5st v6ing 4vingn vin5gom. ving3r vin8g9u vin3na vin4ned 6vinnh 4v5innl 4v5inns 4vinnt vi6n5o6ver v2in2s vin7sja vin4tes vin5tre vi8pa. vi6pe. vip5s 1vir 4viro 7viru vis3ab vis3ak vis5and vi4see vi3sen vi6seng vi2s1k visk6re vis5m vi1so vis3om vi4sp vis9pa viss4p vis5ti vi4tak vi5tese 5vi6t5j vi4t3o vi4tr vit7ra vit5re vit5skr vit6te6s7 vi4t5un vitun6ge. vi4t5øy4 vi4va. vi6ves vivi3 v1ja v1je vje4t5a vje5t6e vje4t5r vjet3t vje4tu v1ji 4v5jo vju3t 6v3k6 vk4le vk4ler vku4le. 6v1l2 v6lab vla6ga v9led v9leg vlei9e vl6el vle4se. v5lesn vle5str v6lesv vli7v vls9 vlu9sa 2v1m vma8le. vmo8de. vmord4 vmÃ¥8la 2v1n v3nad vna8ke. vn5al v9nar vn5dy vne7b v2ned vne7de v4nele v4neli v4nelo v2nem vne5r6e v4nero vne1s v4nesta v6neste v2nev vnor9s8kas vn4s5pa vn1s4t vn5t4 vnæ4re. vn5ør vo1al vo2ar 6vob 2vof vof8fan vo4gu voi1 1vok 4voko voks3k vok6s5 1vol voli7 vo4litu vol4tes 2vom vo4na vo9nal vo9nar von5de vons6 von5sh 2v1o6p 2vord vor8da. vo1re vo9ren 2v1org 4voro vors7k 1vos 4vosj vos4se vo4teg vo4tel 6votr vo1v vo4ve. vo6vi. 2v1p4 2vr8 3v6rak v7rar v1re vregi3 6vrenge 4vrengi v6rengj v6rengt vrett4s v6ri. 3vrid 7vrie v6rigas vri6ma. vri8me. v3ring vrin5s vri6ve. v1ro v4rob v4rof v4rok v4rop vro8te. v1ru v7ry vrø3ve v1rÃ¥ vrÃ¥8da. 8vs v6sa. v7sali v6se. v4seg vs3egg v3s4el v4s5e4li vse4n v5sen. v4s5eng v5s4ens v2ser v6si. vs1in v1sk vs8ke v6s5kum vs1kv v1sl vs3lan vs6let v6slik vsmÃ¥8la v1s2n v6s7na vs3ne vs3ny vs3næ v4s3nø vs5od vsom5 vs5oms4 v7son vs1or vs2pe vsre6de. vs7s6t vs4s5Ã¥ v1s2t2 vste6ma. vs3til vs1v v6s7va vsø6ke. vsøl5 v2s5øy vsøy4er v8sÃ¥. vsÃ¥7re 2v1t vta1 v4ta. vta8ka. vta6led vta9len vta4lev vta6s v5tepp v5t4i vti8na vti8ne. v5t4r vt7s v3t2v vty8e 1vu vu2d1 vud5d vuden5t vuds4 8vué vu8k 2vul 2vung vun4ge. 6vu6o 2v1ut 2v1v vva8la vve8g9ing. vve6g7inga vven7n6i vvi5ke vvi6k7l vvi8s9an vvi4se. 2vy1 vya4 vy7ens 1væ væ1ra væ9ret væ8ta. v3ø4d 8v7øks vø4l vø7li vøm4met 3vøre vø6re. v3ørke vør8na. vø3se. v5øv 4v1øy vøy6e vøy7ene. vøy9enes 1vÃ¥a 1vÃ¥e 4vÃ¥end v4Ã¥k vÃ¥8ka. vÃ¥4ke. vÃ¥4kene vÃ¥9kene. vÃ¥4le. 1vÃ¥n vÃ¥5ne 4vÃ¥nu vÃ¥3ren vÃ¥8sa. vÃ¥5tes 6vÃ¥tf vÃ¥t7Ã¥ w2a wa6l 5wald wal4k walk5o4 wal4l 2wap war6d7er wash3 1wat wa3z w1b wboy5 we2 weate7 we7b we3g we5re wes2 wes3s wester6 western7 wet5 wett4 w4i2 wi9ar. wich3 3wicz wi3d 5wi6en wi9er. wi5f wi3ni wi5ra wi3ren wi5ta wk3r w1l w1m wn5s wob5 wou6 wout7 w3s4 ws6k wurs6 wy2 w1yo wyor2 wyork3 w1z xan5t x7b x3f xga6 x7h x1ic 5x6id xi5e4 x5k x1l x1p6 x3r x1s2 x7t x6u xy2 ya7b 1yac ya5f ya4h y1ak yak6te. ya2l y7am y1an y6an. y2ana ya4ne. yan7k yan7sl y1ark y7arr yas4 ya5si ya5t y1av y1b yba4ne. yb4bed ybde3 ybo2 ybu6er ybue7re ybu7ern ybyg5 y1c y8ce. y8cé y2co3 y6dab yd5av 4ydd yd6dela yd4dep y6d7enh y4deo y2dep y4dero y2d1is yd6je yd1ji ydo4 yd3op yd5ov y1dr y4d4r5au yd1re yd5rem y8d7ri y4dro y4d4r5oks yd3si yds4v y2du yd1un ydø4 y4døm ydør5 yd1øs 4y1e yed8l ye6dre y2ek y5eks ye4len ye6let yel4s5j yel4sk yels3m yel6s5t y2em ye2n y7e6ne y8ener y2enh ye5ni y2enk y2enl yenle6ge. yens4 y4ense y3e6re ye4rel y3ern yer8sk y4erst yes2 ye5sm ye2t ye9te ye5ty y2ev y1f yfan9 yfjell4 y4fle yf5le. y8ga. yga8ve. yg4dal yg4dek yg4del yg4dep yg2do yg2d1r yg2d1y yg2dø yge5i ygen5s y6geret y2ges y4gev yg6gam ygg7art ygges4 yg5gj ygg5l yg6gr ygg3s2 yg6gu yg6g5Ã¥ ygi2 y5glo yg7m yg6r y1h yhes5 y1i y9in. yis7t y1j 6yk y1ka ykap3 y6kara y2ke y7kel. y5kelen y3keli y7kels y3ken y3ker y4kerel y4kero y5ket. y9kets y1ki y1kj y4kjar y5kjare ykjeva8ne. yk4kana yk4k5enh yk5kesj ykke6s5ta yk1kj ykk5ni ykk8s5k ykk8s5o ykk8s5s yk8k5vi6 yk4ky yk1la yk6lest y1klo y3klub yk3lus y5klæ y1ko y2koa y6koli y4komo y5kosa y2kot y5koti y1kr yk1s yku6le. yk3var y1la yla8ga yl4dan yl4derk ylde4s yld1r yld5s6k yle6ge. y7les y4lev yligh9 yl5k8 ylke2 ylke4s3 ylkesla8ga yl4lev yll5is. yl3s6 ylst7re yl5tet y8luse 3ylven y1ly ylæ7re y6mei y4mete ym4fe5d6 ym5jara y4mo. ymo5e ym3p2 ym1s ym8sa. ym4se ym8sl ym6ta. ym6te. ymÃ¥6la y9n8and yndes8 yn5det y2nel yn7eld yner3g yne5s4 ynesi8 y6nev yn5gel yn4gem yn2gr yn4g5ø ynko3 yn6k5v yn5na yn5ne ynn3s4 ynn4ø y3no y4no. yns1 yns6are yn4sem yn9set yn2si yn2sk yns9ket. yns7ki yn5skj yns7kjele yn8s9kjer. yn4st yn2su yn4s5ver yn4tap yn8teg ynt5ei yn4tek yn6tete yn8t7r yo4gat yo6gi. y1om yon4 yon5nen yo6nu y1o2p y1o2r yor6da yo5re you4 yout5 yp8e y2pe. y4pee y4pena y4penet ype4rel y6peta y1pi y4pi. y7ping yp5inn ypin8na y1p2l y5po yp7p6a yp3pe y1pr yp5s8 yp4tok y3pu y1ra yra8ka y4r3a6l y7rarb y4r5au yr5av yr7da yr5dø y7real y8r7edd yre3in y5reis yrek4 y7rek7k y3rels y5rems y1ren yre9ne y4reo y6rerel y4rero yre5sc y5rest. yre5ste y5reste. yres6ten y1ret y4reta y4rete y4reto y7retø yr7ga y1ri yrin5g6 yri6ene. y4rif yri8ka. yr3inn yris6p yr4kee yrke4s yr5kj yr3m y7roe y3romm yr1op y5rosk yr5r8 yr4san yr7set yr5sk yr6skj yr6skr yr2sp yrs7tan yrs5tar yrs7tene yrs5tens yr3sti yr4sv yr3te yr6tek yr2t3r yr3tra yrty8 y1ru yrul8la. yr5ut5r yr3v y1ry yr3øk yrøy4 y1rÃ¥1 yrÃ¥d2 yrÃ¥8da yrÃ¥de7r yrÃ¥6ders y4rÃ¥di y2rÃ¥m y6rÃ¥st y4rÃ¥t y4rÃ¥v 2ys y1sa ys6a7kers y8s7al ys4e yse4bu y4sed y4see yse5ei y2seg y2sek y8s7ekte y2sem y4sera yse7rid yseri7e6 y4sesj y4sesk y4se5s6l y4sest y5seste y6sesu y9set. y4seu ys6i ysi8e y5sjk y7skag ys6kara ys7kare ysk9au ys2k3l ysk5øs ys4mi ys4nø y1so y4s5or ys2pe ys5pis y5s4po ys6sak ys6sam ys4s3ei ys4s3ek ys4s5il ys2sj ys8s7ko ys4sku ys2so ys2sp yss5pe ys4sta yss5tab ys4s5ti ys4su ys2s5v ys2t ys3ta y8star. ys4tat ys3tel ys3ten y4s3ter ysterie7n ys7tesl y2s3ti yst3op yst3r ys8tra ys5t6ry y7s8tu ys6ty y1su ys1ve y8s7øm ys5Ã¥r y1ta y4t3ana y2tee y4tef y2teg yt5e4ge yte3in y4teka y4teki y4tena y4teno y2teo y4teret yte6ro yters6 yter5ske yte5r6ø y2tes2 y2tet y3t2hi 6y5til y6tj yt9ja yt5jen yt9jer yt4mei yt4mes y3to yt8ra yt4re. yt5rer yt1ri yt4rik y5trÃ¥ yt4s3en yt3sk yt5sv 4ytt yt5t6a ytt4e ytte8r9end yttes6 yt4test yt8tien. yttsa6me. 2y3tu y1ty yt4ys y7tÃ¥8 y1u y6ua y8ue yu8g yu4l y1v y4vak yva8la y2ved yve4ria yve4ris y4vese y6vesy y6vev yvi8sa yvi6se. yv1Ã¥r yvÃ¥5te y1w y5æ y5ø y1Ã¥2 za5b 3zae 4zaes za4g5 za5k6h zani4 zania5 5za5v z1b4 zbu4 z1c z1d 1ze zea1 zebu9 zeli5 zen3s 4z3ense 4z3ent z1f 8z1g 5zha z3hi z5hu 5zie 1zifi 5zing z5int 1zis 6zisty 4zi5s4v 2zisy z1k 4z1l z1m 5zo. 5zoe zo1f2 zo4no zo5re 6zos z1p z5r z1s 4z1t zu3e z1un z1v z1za. z2z3el æ5by æ2bø æ5de æ7di æ1f æg6 æ2k æ3ke æ5ki æ8kj æk1ja æk7je. æk9jer. ække7 ækkel6 æk6l æ1la æ4le. æ2le3d æ4lena æ4lev æl5j ælsle9 æ4r1ak æ3rane æ4rans æ4rarb æ6rarbe ær4arte æ4r1at ærbu5e ær1d4 æ1re æ2rea æ4red æ2ref æ2reg æ8r7ei ærei6e æ2rek æ6r7eld ære5ne. ære7nes æ2rep æ5rern æresa4m ære6skr ære4sl ære4sp æ4reta æ4retek æ9rets æ2rev 4ærf ærgen5 ærhø8 æ1ri æri6e7ne æ4r3il ær3ins æri6s ær5is. 4ærk2 ærle6ge. 4ærm ærmÃ¥8la ær3n ær5ne4 ær4nÃ¥ æ2r7o6 ærom5 æroms4 ær5r ær6sel ær4s5il ær1sk ær7s6no ær2sp æ2r1u ærut5 ær3v æ2r1ø4 ærøy7e ær3Ã¥4 æ8se. æ2sj æ2s1k æ5ta 4æte æ4te. æt6he æt4r æt4ta æ5va ævar5 æve4s æ5vi 6ø1a2 øa7re øau4 ø1b øbe6lev øbe4li ø2bl ø3b6li øbl9u øb4r ød3ag ø4dak ød3d ød9de. ø2de. ø4dede ø6def ø4dela ø4dem ø4deo ø4deri ø4dero ø5dj ød1r ød4red ød4rek ød4rep ød2sc ød6s5ek øds7ke øds4ko ød6sku6 øds4mu ød8t5om ødt6r ø2du ød3und ø4d5ur ø6d5ø 4ø1e2 ø4ed ø5ene ø5ep ø3e6re øfe8 øf3fe øf6j øf8la. øf8le. øffe8l7a ø5fn øfte5s øg1 øg2a øg5ak øg5al ø7gar ø5gas ø3ger øg5gl øg8gÃ¥ ø7g4j øglo8ve øgn6s7p ø2g2r øg5re. øg5res. øgs2 øg4ste øg4str øgta5 4øg4u ø1h ø4i øi4e øi5er. øi5e4ne. ø5ing ø5isk ø4it ø1j 6øk. 6ø1ka ø5kav øka8ve. ø6keh ø4kelo ø6kerada ø6kerel ø4ke5ru øke1s ø6kese ø8kesl ø4kest ø4kete ø1ki 4økj ø5kjare ø5kjas ø1kje 2økk øk5kel øk5ket øk4k5l økk5r øk3lag øk3lan 2økna 1øko øk5opp ø1kr ø8krar øk8sa. øk6se. øk4ses øk4si øk2so øks1t6 øks5ti øku4r øk5ur. øk5ure øk7ve ø7ky ø1la ø2lah ø6lam øl3d øled ø4ledi ø6lelu ø4lero ø6leta ø4le5te ø2lev øl4gera ølge5s6v øl1j øl2ke ølke7s øl4lese øl9leti øl6lev ø1lo øl7op ølrÃ¥4 øl6sel øl4sere øl4se3s øls6t4 øl5s4v ø5luk øl7ut ølva6 øl4var øl4v3ei øl6vek øl8vel ølv7erk øl4v3in ølv5o ølv7r ø1ly ø4me. ø9met 1ømfi ø5mi ømini6 øm4med øm4mero ømmet6 øm4metr øm3op øm4pel øm7s6p øm1st øm5svi ø2m1u ømÃ¥8la øn2ad ø6n5al ø5nas ø2nem ø3nes. ø9nese ønhø8re. øn8k7r 4ønn øn4nal ønni4 øn4n5it ønns5al ønn4s3e ønn4s5i4d ønns5kje ønns5kre ønns5ku ønns3l ønns3t øn2nø ø6nom øn2s øn9sa. øn5sak øn5sam øn5se. øn3ser øn3skj øn8skjer. ønst3r ønst9ra ønt1 øn2ta øn2to øn7u8 ø1o ø4peda ø6pe5i ø4penu ø5pero ø4peru øpes4 ø4pete ø4peti ø1pi øp3li øp5p øp6pl ø1pr øp6s1 øpsa4 øpsla8ga øps8leg øp5ste øp7s8tik øp4s5ø ø1ra ø2ra. ø2r1af ø2r1ak ø2r1a2m ø4rans ø4r3ant øra8sa øra6si ø4r7au ørd4e ø1re ø2rea ør7edd ø6redu ø2ref ø2reg ø6rein ø2rek ørek6l ør3eks ør5ekt ørele8se ø9relet ø4r5eng ø2re5o4 ø2rep ø6reret ø4reru øre1s2 ø5res. ø4rese ø6resi ø7reskrivi ø8reso ø4rest øre6s7v ør6et ø4reta ør1eu ø2rev ør5fe ørge5s ø1ri øri8m ør4jet ør4keri ør4kes ør8k9lag ør6k5n ør4kve ør4kÃ¥ ørla8ga ørm9ut ørn4e ør4nere ør4nes4 ør4n3u ør4n5ø ør1o ø5r6ok ø1ros ø4r3ost øro4v ør4rek ør4rep ørri4 ør4r5is ør4rÃ¥t ør4sak ørsa8ka ørsa6me. ør4sc ør6ses ør2si ør5ski ør4skr ørs6lev ør4som ør4spe ør2st ørs9tar ørste5i ørs5tens ørs5tig ør3sto ørs9u ør2sv ørs1Ã¥ ør9tar ør4tek ør6ti9a ør4tide ør8tien. ør6tif ør3to ørum5 ø6rut ør5v ør1ø2s ørø1v ør1Ã¥p ø1sa øsa6me. ø8sarar ø1sc ø2se. ø2sem øse4n5o øseri7 øserie6 øs5jo øs7kar øs1l øs4lag ø4slu øs2me ø1s2p øss8n øs8tas øst9ast øs6teg øs5terse øst9ett østo2 øst5of øst5ov øst1r øst3re østø4 ø2s1ø2 4øt øt3ak ø9tar øta4s øt5asa ø2te. ø2tea ø6tef ø6tege ø2tei øte5k ø2t5eks ø2tel ø5tela. ø3ten ø4teni ø3ter. øte3ru ø2te1s øte7sn ø4teta øt7ri øt1sa øt3s4p øt7s6Ã¥ øtsÃ¥9re øt4tero øt6test øtt5av øtt5eks øtt5eta øtt5ete øtt5ra øtt7ri øtt1s2 øttsÃ¥7re øt4tur øt4ty ø1tu ø5ty ø1u2 øug8la øv5aa øv5ans øve5in ø4vek øve5no øveren8 øve1s øves4t øve3te ø5vik øv6l7ut øv7ne øv6nø øv9o8 øv1r øv5reb øv1s øv9sa ø5vÃ¥ øy3al øya2n 7øyan. 9øy8ans øy4dek øy4dere øyd8i 5øyeb øy7e4de øy2ee øy4ele øye4n øy4ep øy4erf øy1gl øy3ke. øyks4 øyk5t 2øyl øy4led øy4leg øy5na øy5ni øy3o 6øyp øy3pe. øy4pel øy4ra. øy5rer. øyri6v øy3rø øy1s øy5s4ar øy5si øys2l øy4sm øy4spi 5øysund øy4tei øy4tel øy4tere øyte5s6 øyti7da øy1tr øyt5s øy2t3y6 øy5tø øy4vei 5øyvÃ¥ ø1ø2 ø5Ã¥6 Ã¥1a Ã¥ak6ta Ã¥a4n5 Ã¥an8ka. Ã¥an6ke. Ã¥2ar Ã¥5aran Ã¥au4re Ã¥1b4 Ã¥ba8ne. Ã¥ber2 Ã¥bo9ta 6Ã¥1d Ã¥7dan Ã¥9dar Ã¥4ded Ã¥4dek Ã¥4dem Ã¥4deo Ã¥4dep Ã¥4dere Ã¥4deri Ã¥6dero Ã¥2des Ã¥7di Ã¥d4ra Ã¥d8re Ã¥ds1 Ã¥d2sk Ã¥d3s4la Ã¥d7slo Ã¥d3slÃ¥ Ã¥d1s2n Ã¥d3s4pe Ã¥d7spu Ã¥d5st Ã¥d6s7te 4Ã¥1e Ã¥2ele Ã¥em8na Ã¥e2n Ã¥7ens Ã¥3er Ã¥3e6re. Ã¥7e8ri Ã¥1f Ã¥fe6 Ã¥fø5re 2Ã¥g Ã¥4ga. Ã¥g7ald Ã¥4ge. Ã¥g7ei Ã¥2ges Ã¥gna5 Ã¥g5ne Ã¥g4r Ã¥g5rin Ã¥gs6k Ã¥g5sl Ã¥g7sp Ã¥gs4t Ã¥gu4l Ã¥g5ø Ã¥4gÃ¥. Ã¥1h Ã¥1i Ã¥1j Ã¥1ka Ã¥3kan Ã¥3kar Ã¥4karb Ã¥k7av Ã¥4keh Ã¥3ken Ã¥4kena Ã¥4k3enh Ã¥4kere Ã¥ke5s Ã¥4kesl Ã¥5kevi Ã¥1ki Ã¥6kid Ã¥1kj Ã¥k1k Ã¥k5ka Ã¥kk6l Ã¥k7kr Ã¥k7lau Ã¥klist7 Ã¥k3læ Ã¥1ko Ã¥k3opp Ã¥ko4s Ã¥k6ra Ã¥k5røk Ã¥k3rÃ¥ Ã¥k1s2 Ã¥ks7l Ã¥k3t4 Ã¥ku8 Ã¥5k8ul Ã¥kva8la. Ã¥køy8rar. Ã¥2k3Ã¥ Ã¥1la Ã¥5l6amp Ã¥6larb Ã¥9las Ã¥l5au8 Ã¥l9av Ã¥l3d Ã¥2lef Ã¥l5eini Ã¥2lek Ã¥2lem Ã¥4l5enh Ã¥4leni Ã¥le6ris Ã¥2le1s2 Ã¥le9s6und Ã¥4lete Ã¥2lev Ã¥l5g Ã¥lgÃ¥6 Ã¥6lio Ã¥l5j Ã¥lla6ga Ã¥l3or Ã¥l4san Ã¥l4sek Ã¥l4ser Ã¥ls7i Ã¥l2sp Ã¥l4sti Ã¥lsy8na Ã¥l5ti Ã¥lti5d Ã¥l1u Ã¥lul8la Ã¥1ly Ã¥2l1øy Ã¥løy4e Ã¥l5Ã¥6k Ã¥lÃ¥8te. 6Ã¥m Ã¥6me. Ã¥m4li. Ã¥m1om Ã¥m5øy 8Ã¥na Ã¥3nar Ã¥9nas Ã¥nd3r Ã¥nd4sa Ã¥nd4se Ã¥nd4so Ã¥4ne. Ã¥ned4 Ã¥neds5 Ã¥ne5i6 Ã¥2nel Ã¥nely6 Ã¥5nes. Ã¥3net Ã¥4netr Ã¥3ni Ã¥n5o Ã¥9ny Ã¥3o Ã¥p2e Ã¥pe4n3a Ã¥pe4n5i Ã¥pen3s Ã¥5per Ã¥6pero Ã¥1pi Ã¥p6j Ã¥1pla Ã¥p4ne. Ã¥p7p Ã¥1pr Ã¥pra5 Ã¥p2s1 Ã¥p6ta Ã¥4pÃ¥ Ã¥r3ak Ã¥9ran Ã¥7rara Ã¥rd5str Ã¥2re. Ã¥5rei Ã¥6rel Ã¥re7s Ã¥4rest Ã¥rfi4 Ã¥7riv Ã¥r3k2 Ã¥r4le. 2Ã¥rn Ã¥rn5s Ã¥r3on Ã¥r3op Ã¥rpo8 Ã¥r2s Ã¥rs5af Ã¥rs3el Ã¥r3sem Ã¥rs3ko Ã¥r8sku Ã¥rsla8ga Ã¥r3sol Ã¥rs3ta Ã¥r7ste Ã¥rs3ti Ã¥r7sto Ã¥r5stu Ã¥rs1u Ã¥rs1v Ã¥r5sy Ã¥r5u4k Ã¥r3un Ã¥r5y4 Ã¥1rø Ã¥5r8Ã¥ Ã¥s2 Ã¥1sa Ã¥s7au Ã¥4se. Ã¥4see Ã¥s5eid Ã¥4sek Ã¥4sem Ã¥seri7e6 Ã¥se3s2 Ã¥4sesm Ã¥4seso Ã¥se5ta Ã¥s3h Ã¥8si. Ã¥s5ka. Ã¥s9kene Ã¥s8ki Ã¥1skj Ã¥2s3kjør Ã¥s7kop Ã¥1skr Ã¥5sku Ã¥1sl Ã¥s5ne Ã¥s3ni Ã¥5so Ã¥1sp Ã¥s7pl Ã¥s5sa Ã¥s5se Ã¥s3si Ã¥ss3k Ã¥8s7s6kÃ¥ 4Ã¥1st Ã¥7stei Ã¥su4 Ã¥s3un Ã¥s7ut Ã¥s7væ Ã¥s5øy Ã¥sÃ¥7 Ã¥1ta Ã¥4ta. Ã¥taks5 Ã¥ta2l Ã¥ta9la Ã¥tale3 Ã¥ta5le. Ã¥ta5len Ã¥4tang Ã¥4tark Ã¥4t5arm Ã¥2te. Ã¥2tea Ã¥4tee Ã¥2t1ei Ã¥te6ke. Ã¥4teo Ã¥9ter Ã¥2tes 5Ã¥tferd Ã¥5ti Ã¥ti7da 2Ã¥tn Ã¥1to Ã¥t7ov 6Ã¥tp Ã¥t1re Ã¥tri6pa. Ã¥tri8pe. Ã¥t1ru Ã¥t5sk Ã¥t1t Ã¥t4ted Ã¥t4ti Ã¥tt3o Ã¥t4tri Ã¥tts4 Ã¥t6t5æ Ã¥t1u Ã¥t4un Ã¥tva5ra Ã¥tva7re Ã¥t5ve Ã¥5ty Ã¥t1øv Ã¥3u Ã¥un8ge. Ã¥1v Ã¥v4a Ã¥2ve7d Ã¥4veg Ã¥ve5l Ã¥4veni Ã¥9ver. Ã¥2vev Ã¥v7s6 Ã¥7vy Ã¥væ4re. Ã¥1ø8 åøy4", + ["compression"]="zlib", + ["data"]="xÚD½W’ëJ\18e;•\28Á5#¡Èဉ \19\9ù n³æsʬgpþsb½×vd½\15„‡–\30.BáŸ:ÿø§¶ñÈ\0315 z$@¹\22›àg¹µ£`“ú|\5Þúú\31`Ù'Ãë‚yY¯\2Ïê‰Yl¹Áó¶\11¾ÊÑ)\4â•“¢%~ÙÉèÊÎá‚\14ï²®\15°Ø7Ru¥]·5âæëé»f›áVÛ{\11ï-¹\16Á¥ 5]±Ù}Ô\21Å’\127ÿØ‹DËúg\1ò—ÓöÅ«\5œµêó¨•à4\24ö§\127\127FïÓ+à\4苵tð\16ÑnÃ\25m(Ý}>RÓ~N…KY+ú _\11§Þr\7æ[$.¶z\9wd²eŽU\30v\14ÕƒÆ\0124p(£¼!ò†|Ííí~))y¬3ây㘂Ž9\22M¾ÔáQüüÅÒ”?\127Ë\3[–(\127Lká\8¯H™½R8#ƒW2fŒù÷™q÷\11#üì±1s-òÀ†±\26k†a\28óÈl#ËÙº¥x%òXÚ²6¸%ƒTŒ¶\20½oÑæE\5Ü:†a©†äð³ÕK\22©ÎÖ.ÊÄ™—K\4—Kµ\27FC—³¡K¶å_ÀûÖ:ø8ƒ\3\17€Í\9\25¯åhi_IÖëÍ\21\\ÏÁX‹.­É\30¿k9×\17àØ[±P(nË»Ö ¢ng#¶ë\4nm\25)6M5\3ÏÏb{ËÜo\13Eì¥ÍÛ‹\ +칑f?‘c¿Ež»çé^Æp\11†ïzï\28¸2¬‡iÅQµe\19ptöljWÇY½£bæ=øêòñ6pŒG\0291\4Ç€Bͬ·Çr\6,­áÍýýH17\30©Iù\8,\27ÑŠ\20!EÚ°Ü\26'I™¼\28¥;“F™é>2wåŽ2S\0121ðŒ·0ô´”i‹\12£\14©Z\0251E\\·äÂ\3ëåqVNäUØüh\3c\5\29­=\27Ùöi+ì½eT Ï1jW²Oj¸aÑ1ídéäó\12[—­ùVžöûz;œ²\29_d7Ýš:€K™^ÌÓÇÔ9,Wn£½5Ã\0ãÕÞÆ—Ç$tv´e\3#\4o[{\11;U\\ö¦‰ÁöþüÑ\4|ì¥éÔcÏ\26Pë±çî÷]„˜Ìöµ\\\25ã÷Ù\11ï[ôÒÏ\127)Zô«+j[n£»Ç\22åó©F}ÖQÝOÍ\24(Û§¦Š*ö™J\17ÈÏi½`æ«ŠkT©¦\14RÐœhÔÔ¯•v4õu°ïx†Bj›zgZ4©«išàT5À3­`‘Z,Á(ä1´\14\24¦ŠL“(`oÿåL°\24á°DÄrÉÚ\28Kµämqœq`Nphw”µ°3_§\0[@\7\30-S½i¯T·-T#`Iç4­:™BÚû³ÜìŽÆ\9\9ÖÊñóµ\3ÜÌw›)§s\1¤žn®é4ÀÛä\9[i¦@øfÑt¦Rð¥f‡ÏÉô0\0I¾?·Ì®èaȹ†3\6ä}ö‹FØ\19\13Ë’\25ª?ê_K²%ècóó§X\\OaÖB\21“z>…¼“>w\28M\14z\11x˜Ò\19\30ž\30ÒôÜjz,©K¨gzÅ\28H¯tµYô€ÒÌ1½¢ö†ö\15yIîÈù”†ä\30Ã=UÛ\13H¿(ö\26¾Å*$I\16¾Ô6`rj‹†6¤6¸pj«—kÓv\17\28\4>µ\0166™\3æmˆ¨!„¤v¬êÕ¡£s\31‹‰¹eµ  Ûm­@–ÔÞ—ÚÙ\23¦S©]î»[\2xâŠLäÄí扨’ï)²ÉÛqBdS s\12ç\25\8mOøÓ\8Ÿè€®X£Ó»[€à€©‹Zõ7\19\5É¢1\4\18Ú\ +\ +é[³ãÔg\29¦G¤‡ÿ¥~…o\11ä]\31ÎÍa[ä\01945õGªú›=\14Ĺ4d\15ú~xÖw@Hsi8;r8\7q(hÃè\0018åªtÊUil\\³ShJ£ðã\0ZxJ§ð„wD{A\13äì\"¯\24Ìß\1\26Å\31í½žÞÌkeµFñ›£DKÆͽ.g±8Ív[\\`¦é 8»ýË0O\19™Q…E…\26\4Â.ç .!Y¦S¤1ì\12£\15NY%²JZï5\0195­\23b­™\27´†|”N1Dòdoß(j\13a@‘iÏ)‘\8.\14[Ï”ÑöÕ4M\25\24=„5ÉQϲ«ÍÓ[¡mxG\30Õ\6îngqÛYÜfQ\28p:W\7Fy›\16‘¬·\16IÒ¶æd¹],ïÈ\27Õ\",øïË„ë¸}“å‘\127;Ñqfý\127”äÉW_\19æ+[íŠà§Dã‰ôL\20ùL–1\5ú¢\5^{Ì\30L|\6n\11x–(Á`p\27œb(¦\18„Á¶\1–Ûâ|\23æ‚¢­}€Í‘Ô{¥Óm\27(üló\7þ–tŸ§<ñl—ô›k»¢°=¿\0192Åó{ZòF”÷)ábC}yNù\3\22õœú+&šásZàß­m•lµm…\9¿,\3”• µ$‡%¹B‹ØR\9Ú>÷\23Yí¥gËswÆ{çû\6EyŠ³/Ú—&ÐK*(fÔýUßúÖ0§\127^g\127¿¤²\20\12À«>M#Ó«¾nŽtD¤û‘H”à“¢å¹\29–ß_©\28Q\26^ÌD\0ƒ‡\22DúdZ:c»¯šñ¯¯š¨íssÜ\22R¦íkE¼övd”ñn#µïiH?•­Á<ú·\19÷?\127ï´ù5Š“!i½&q\5B¦•”‹*Ÿ‰ò`\21G æ²QÉe·ˆÿÚƒ>½ö&CÀ“»¯©÷ŽØ#׶\19úó÷ŠùG“ °\5‘óKñ¿êÖ‚ËW\29\ +òW\29LæKj£\2ú°­ÆVÅ9Šº\11‹Zó%ü^ˆ+6’\0}¶\3®#æ˜Áî¾4ð\5ê‹-“-÷¥œ\28¿’¥ýµô\17t›V,–|¾ \15\5PÓ6`eYM%ov\23Æø¯¶Gêùò`|µ!¡|M×Æ&\17¦üô\11Éák\ +Áák\ +1SÐ*Ø—z“\ +í}_­v®™ãK<=Ðo¿Ž\22¡F@SœŒé\6Ýøz‡>ðuÊ3_?\127Á(\28¼\7\30õ\9Ïð·\20‚âç¿ØÿÜ<\127lY\12\127;Ÿ.l3ÖsÚìÛrM«IRo{Š#í)Ž´ù\23\25´\29É:fwÛuŒ8\0\9¿Í:gÛAOÚ.ô£¶è\14‚ÀÀ6ØE‹jZÖö§ªÐöeƒ\2\2t¢Ì\11%mÙƒ°\26€ïˆdJ)Ø\5<ë%æÝÙ\25\5z‘£\29ÔŠp‹å6\6.r¼™Ç¶'SmÅc)X¼\21£X]igwÆ\26³\0gVcéLʱ’\24òkÛþ\127Ûv&Š¼ì/>\3œÌA\0ˆß­†Žr—ª£çD{]Þ\18èÕš‡·Ùúi;E®•+¦Éb\29Y䵯ÉçÔÍ\0137Ã!Ü7³\\`¸‹™dÅz›¹ŠrY#VpLÅrW®›Ëú\29A)—Í\9]d\5\27S½ªíû„‹£ï[ÔÝKC­ùj›\29\30øƒÅ\20Àâá=’]âVRT„y¾\27\18zœ]rœ]’‘é·*ý]\7Uú®\25‹o\0085 \8½œžlß&Åßæ‡2\7LcõwZF\19ôïd¶ø~ô=Å$\17›º5Õòk\1©Ã¶c‰>úÞay2{ƒ>e€¼oš‚\13ËKŒUW?¯˜¡{\0[ÃÞB¼à!mÀ¶\2Œíê\12\4éNÆ#83%\4—H_,e\ +§œ–tú°tÕÕÒ‘«ˆº\1[ù\5ÜY\26Qü#r\8òЙov\"¨2Û´\24\0249»¶BØïÚq²çru›ÝµAź6V\20»ï”5Ì~YòΠ«zàÏßqTŸv½—úºSÜîàN”Ñ¿«ÕPº\23\29Üu\5†wc:[<Šd\0190…V/­˜)ÚM9=ÞM¢\13€\16̺©/¾í;Û·œÃWBL\1Òvç¨)d‹ÌÎ>šÎf,¬YPÏ\5z¬\25ÔÁ±º½xBõ;\13,ùîŒrF×ïAÚv\31\26¥\11µ;Ò½\11X !wï ¡Ý;¦i÷\14é³ûù‹êßýü)™–pq–]ˆÚŸè-ÈDïÏ\21…¾.¬ÃÉ=D´y¾¥Ê–\"8o_G\27û\19\15ú:Û\"ÅáŒ<ëú\19\11zø-\28¢O7”7V\20\127®gõÉŠv/dÙ\0×'æ\19Q´¹¶\20¤8¯3®\21Xeõjí¶T*PyNË’»ì\20\"=°pæÃ~·s\12ï\18UºO•õ0ð猽¢E\24®\1Ñ¥T'׬ŠªH¸†ÆÈwÏ\0&<Š{œ­:Ò/$\22¥¦o϶JüÉ\1±p×·×\23拵~9([L9-Ȳ:`u\ +ËÔ};F EÖ¾\13$\17\\ùžÎ\24ö\28 }{ÖLÐ{\12½(ßZíÎïPK5ãû©`\21Bb^t÷tvó\20\ +a?¡nÈÅ\28VàdÇb¿ÐPús\22È\27\29HNwÎ\20\02415ØY÷ëwd\26ÙÙ\30è¹çsÄ\8A¤?§A\127®ÁÊ\29MÛï\27‹\6ýûì×w“Ùilxß^\6gUÞ^\29BX-í½ZTT¬ÕHû>{é}VN’{sZJÓêÞ«t§µ@YÎz*ÈKPai\127-ïe¶å(\"ä\29ëó½$$¦—€\12ƒÐÊ\18•\25D•†s>\14õ‹e¡þ*1­º*¬‹°N*5’²l\22å‡SË\0¶\1Íž\7\4¡[\31\30g„ñŒ°Ô'<ý—ð_nëw\5/”×zFa\1h¨ËÕ%z1D®íö\5ÜŠ\13ŒP,aÒ¬†\13^¤nÃ’Ÿ°‚™8„v!v3wm!ßÔ¡M*Ü\4m\16ûèQÃ\6V)]s\22\15)[œ#Cà\20\30èA³µ¬]v¶©\23\28\18º·blNÒ–Ý7àì«6D\25Áè\18 9µ£W¦\7„5äd¥‹\21¡adš\12“%½aò2åp®,\15S`Ë0Ö½\4WÖ=°-ES:ÅÙ×ÒCU×Â>køXV\0@‰\12#ïÍ€I:Äæ\5ÀIöØô\28ö¾€â\12ûRa\22\11“ax{«exŸ\8cQ>,±†&\11Ôm\16\14F\127ÿüé¡<£Pp¬\31ðç±\14r0ÖHäc}ëÂe±s¬YÚ\25O\20\25ëÍQÌ\15ä(\13OJo8\6dF\0003‡7&rr?‹ˆÿª\0VÈÔÕÞf\29c\15\12ps&7ÏE\5Ï‘ùuu¨\0263¶æ4c¬\31è\21€\24\31)ɱr È¡=shoѱ\13Z3¶\27ê¢b±È7¶¦dã\20zƒ U›Ü¹ã\20ø#§—Íå^ìZ#ÎzzšAŒ;˜4¾YÛ\24ßîéwÐÎñ}\22|R¤ñç¿\11ËVaa\27\127þ6\16Ïñw\28¥sŸË\0bA;\30\19]75Áj\14jR\127C'ŸÎ\13s \3žÏ\21õvzFÑÓS*ÍCð¥a™:\22ͦ.VË€d}nvO½)ïÔÇŠâÔ£0N½w\\\0éô<Ƚ/E\24§á‚‘[@Ÿ†ò1\25æ‰ý`A©õá¬:\13\5{øÓ¹~9±²0Qª´§Þ\0\03021ñ¦¹`ô§9‡¥\0Èhž+$\7œ­™]s\127n¯Éj¾0Í•\19£˜°–T\7p\20`\318Kcù^³úåX¡ÊMKL5¹‡3Õ\0180²,Á£ieÖOÒËÉoE®•¹ÚŒê˜bL›Dx¢oÁ÷€é\23\18{Ë@NœîÅ\3²$\19Q\27Fv'íö¥@\13œ\14¯¿NG±c–»jÁîó\\\7^Ì'\23™ëž\5\31yp¹¹\14µt®QA÷ÓÃ\17‚Í\18Õ'@tÁœ¬¡ÏœÒø¶÷\24Þ×\25ÓBøÌ\22c\5T\127\25Æl˜Óš\"±©èü%4˜Û@qA2`7\0ÀÄŸû4\22,¶Í=›pEXbe¶\6€eòvÝ\9AzEµíâħD³ŠÐ\30ÿƒ­ÓEe×s–¬idIXÎ%¼—Êkcòð¸¯_õÝí]=\9Ö \4<Ük{¶ÒJ*Uiƒ„®§$½ž[ók;Ž&Âòˆ‚XÓsN\22£Ö\19Í×ïhË÷þÛ\2ÔÛµ«+ÐHÐk\27‚\8ÝkÇv3°u\31w*Ä}Ïi\6çÖM/\24ÚªÙ½mwÖ|Önoš\"býï\20‡¬åàÂÞåÓ;\22k‡*†¥ç˜Â¥-\31H\2²8n?m›[$ÝÓ•ë÷\"zJÜ¥€»¬èG\6šì@õšs\24¦û\11 \20\ +à\21V,÷> Ï&bÙ`y+«ZëÈzŸLdƒUÂè\22ÍäÇvò:Õ4oò¡¦u\26\6V×WIBTí\20}V‰\30l0¬çä^oðøULÛM½<·Šº\11æ%öP¼$»Jöu+·º€\8\11z'’\5Ú\19zà6oßß~mkØ\6ÎT\8Šœ:¶B–[·É´TÐË\23B\8vÖ]Ú~³Çî•»u“\14R°Tµîã9z{\14Å^wïô¯\7å\28ªZ\0290p\12‹Cë\5Ú¿JÚASXß\13ðÎ\26wÎ;t°õ}ÎŒ÷‰²ïª–âÁû\20ì™\0048Qó?ÔÃ'yd.—\0™¸\29¶¿&|‚A.dé#ƒŸ¿K\22!ï\20î·F\9\21‹ù´dT›µÚíÔ­¶ºCç\23ð29N\13\30– +ÛIW\12W`Ÿ3L[=pÆf«c\15BÁsá|b\22n'ŸßjD¦Í\27»[B€ÓèÝiý–Î\ +œ\\V¡“}ÇÓw\13X­m8ÑV\13IŒŒ¶yÁkk­Ü\11Hc\6\6½Ý¼ý'ó•9ÔGù\4ú\2ÐC±6XkcèñÙN’²ÁZ{Cê>IƧ¥S¬ÑlSlâáÞ\12;Ä.9£Ÿ¦\24l%‹Áܦ³GNM\28”DáÙ$Ÿ¿PͶ)c“Zn‘¤ä\20K¬‘)\15ñ&'VOš+ý™»\7q\ +By\30Î)h݆˜·I´ú\4$Ž½\ +°B´ýo3[6\0091ìA\17CbÊ\20‘Ø!·Ï\25ÛçE6dÊmýÆ<\15-m»5¡íä»Û¹Œ¶ËhšfQ•ÃÊ–ÀR.EM½\14Q%!òö\14Ùn;\23Ö¶·OO\9D\6ïå¶Þ1Y€Œà¯zºýü=îa¬zËr÷ñOYŠÃY\9ï»ðùs\"ñÏŸ\25¹kû=\17µýžˆÚ]·ý©—½|¤E5Ü3Ä€ÝBåž¡ïì1ôâ–P\21\\\4=ó'¦UúÝ\11Ü»ÏPï¯XçØ_Іý‚W‹^º_:gf¾¿—0ž½Bòܽ\15°Ge÷^3Ä`D\15gíÅ;Œ\28\29…¸á±96‹\\{ï£3ûy^{?Ïiï§b,8„;Ä\3Á1 l`fº_úÝA^E”ëí*ýü¡E\8£û°‹©íìW‹\\&¯ÇîqÐGr×h#ºÈ~\31]µsCr\31#ň&½gãä 5‚r÷þHÁ\23\2.3mBsÞgvºvq˜ŒBÝ墿töR\"³‚sî‰Å\29´ø°*\8¸¹_\23ob*áµu\28÷ób´ÝÏ\19Èû…©·/\23\".^eÜσ<û²¥‚ÕQ%Ù\2E–\29-o;Ü->÷¿‡\8\14 \31³è=ó1V9¨SÅtÚW\31\1“Ò\8\15–Ÿõ¶-HÚ)¶@?²D¶‹|‚€[Q¿œ×Æ)‚]\21cêÉ;A«÷­ä°ìäÖ\1YÇáï\2ëÙÀbã €B&N´+‰÷hÓáœ\23”„ý<æ£d뙓ë˜qpˆ\5•ãÜÉ9Îœ£FË?ÎÅᣎ]í£î½ñtœ\ +âÁ¹öÂ06(\15´wzYŒÓ\4Ú\12Tm9RìV\30)´Q‘\13£\14n6ž\20g†ŽÔf¯\19®\6^ÅT4¶þØ}ÌQ:”¼\23ÙÞ°õHrŽÂÙŸ\17§5ü\11z\2ËÛõèÙA:N)ÖÐÙ\4\0217tºPÊEq$s\25®¹ã-Œ§#®}\0{oPvå¾¹šf\29íÙ«mÃfÍq\30iÆ»\16͆\24.w\23ÑUjfìÑƱ´ã<Ûx´ã\11qû`?ß)Ö\8Živ°ÒÈl=Nmð8wlŽÖ7C\14\22Ù2ÃØ»8¦ÒÞ±ûv¼©Ë¹§p@‚ë\19:\15tœ\0¬\6\9.\8Rÿz¹ö_K-ÿ¶dú\12780úVЛ־ÇP‘ÞçŠÛ{Œ!—\127\23þ¡Ž)œã\22ï‘©öžc-ö=‡²).áÕ\ +àéfڽϕ±÷ÒùîÆ{½Á ·†Gº¶&ö{»›\9¼-\ +¿·Ðß û\127\20ñ?ߊóó_\27\11ûsõ¯mÃr®‰ý`¼¼\12)\24´;N³È¤Ù?\127»É®¸¥#ø›T(Ù\4\\Ë€‡áy‹E\2]Z¹¤!Kþíœ\22ä}8\9ÿxÆ\0207om¹¯ƒa¶FÊ[\20ºlwh¦=\"\9ý)3w´#wMÀïâס9B–ˆ\127\0305—å꺼\19›£§Wl3Ê’\17w¬\3.\1}¬Gðfúƒ‡EšŸ?Tâ»éOù´ù²É’€ôÞ\30³gô\4â4\6{Z˜'þÈ\18\8Ä) «‚âòQ¼\16\17\13F¹­.àt­U\31.g“y¾Ê‰ÈŶ\24æïèþÙ¶_‰I¹\28\31õ¥n>òZL¨ÎêçöQC¨åÙ­²vÊ¡®Yñ«Y.“·ø^-Võ<Æõ®èU=ªÙ\ +™IºH\12­¹L#~‰ES·F`ÎêzÅÃg\30\20YÙ–úñQ?*ìûCýÂ…51¤š+\23©\5ö(wµ¯^ü\3\\Q¬rY”¦x(§G+)F&\0028€šæv%—\0119ÊG㥜û©\19è«”Ó\29¿){ØTŲÇä\28&®ªáZš¢æ>C\"'‚Xõ­\31ו\\÷\"QÎ.6\7xÒš]T\31Çæ ÷?6\0316)ù-©ýV\127~ÔŸu¡èŸ4èó+I¿©?»L}ò9}”u#gÑÔʶ)Ô±\0310)\17ýº¹\19rm$g×M*Düì¢ÄÆ•hŠÔÓ)\13‡\5ë¬\1y‰1Ùœ\29\"IO‰—>Uö\\í)rº8\1\13\19\16>\0260fÍSaÂ\15!DþpÍu\0022n.ßø\127ï\31׺é±\14ÔXƒÝL\21CÐLT´šæYÃÕL*¡’Y\19\9yGA\18t¨\5*Gr‹\23*Údȯ€¶¡$É3J\12S¯\27hÖ°Ù\"ühXq`ºÖl&ôNµ¢äâö\26²raRʽÔR=Ô„¦«z\31\12TãâžÜ[~\23Ñó¼N´@tìVWIòi8mT{=^þ½\18$´ç:q:¾\22’ª†>´]g4€³ÑT*AˆÊúYchàó'¾Ï$BZK¦ý\2 úó)õäƒË–-1žm¦–?‹g¾\19…\8m}Á,ÜiÏV[>‹v$‹¶¢\15óg+m¢.žßªÌÓ:•*þD\6WvT6\127.mG}ˆ5U‹£\9Þˆ1-æ‡õsázÄE¢WæÑs+\\Ñ|óõ’úYr@XÞ\23q åý¦\28\22\19\13\ +%Ìë\23\24ûªÁèWM\21_õ¨¡ç°.‹\29¸Am\5®ÿàm­§~¡}×ÕK¤˜¶¾Ò D”ÔÍdx‰„3a^È*\29P%1í¸9\1:\11ìö\22' ÑÆH¾~þ/پ苗\ +]íR!¯\23®\0DáU¾ =^\127W8¸÷ò4{]\24kØà@,‰…8\11”ˆú5LôÜk䶎Z¬.á\0ïT }ñ’HVg¯‰Ê#•Õ¯jq\5.\12šL:*{¹Ö—ÅYJÎ\0254¾¯5WG}ød9]/\127&‰œ®‡\16¼up¹~ï\14íj'êvb™L½ÖØ™ÆƦ²Áõpð[\24ôÚiêžÕr¼ÄƒŽú„ŒÃEÔµøRÁ_\8¼2•®½Ô\26í6Ó,Á„ž´y«æ´^Î\3¨\7Ú{G\\£RæJ^Œ©­2šH?ÞÄЕ\0253»e;¥fi¿ÃÑ]Ul\27ák9\25\20.`u\5\ +&mËQ†Z$¨þ®¥1ו$‰ÚÒRýV\17·ü›!Ï¿Uûò»\23ºw4Ȭ\3AL\\¬\19Ó“7¤¨Ëàu8TÇ®Î%Ù\16MÝÕ•õò°\31\27Šu—×л®ÁP‰÷.iV_»¤*g§µä;5£K=X!\16+C²I8o¹ñV¢œé™üiiÇ\13‰\15–¯ ]Ý\23\11\12¹êKõ;\16Q&ÙrOR ]/ªNíïTEûEÿwª\6è\ +M‡\26wPù®“jCœv°_Áu¥3¸0b]—I§Â‹™‹ç\18ñ\15Z\12\25èDJ\14\"ö®PÙïî9Êôp\23TaêF¦š\19ÿà5)¸ê˜\8Ýe\2ý:IºâÚÝR³Â ˆKzáL/×3[\\él™þ\9@§\\\22WIôßíõa3›Hp>E€ù_çÓgê*M惑\0057º+wWðuÃJM\14\13ñ:Ùd||Ä…\5ÃÍý°rbD`«\16§‰äÂ7\4\22@\27€,·Ú«dœ ª{÷áV\15\1Æ5¢Í\1N—+¹¹Á>§ÂMö4$JÙ¾>*™íËž-\3¸ýr¸îê*•\27t£Û2Ó\ +Ž-qÞ²mÇÿ¶UÒõênß®\12\25eeœZW~’Å\25\12%»Õæ\14}U7ʲ·¼××wȵØÚkÀõb£\22'ÒZOƒûÌí\19àp\1P\2)¢!ãQø¬ø?a!´\30‹×\18±\"Ñx¸¨\9³˜\23Ç„~d!Ÿá\2u\21sžËÙC÷{V*k4ß3É\26q¶ˆsD…$\12\\k.=Èà˜s.¸;Ù®Ìz‹›qÀ²îó‡\27ÿ˜ÜìÇîÃ\0056\ +Ê`>}%)‰L\26ª‹ïŸBsµ×­Ë^HœýõElŽ—xP[0áÚCNî}Kc8?\\G£L¤\12=V-Œ»¿\127»×3\19éþ»°”Š“ü.È,}þ\13Çì¯ß—Ý~\16HÅôÃ\5Ê c…²î™Ù\2qî\22\24ÓƒÅÁ\25·×Hü†ƒƒÓÿ‚™jx»¶ýI \21E\24‡¯gº>è[ãJߢ\6ôœ9%rk­¦\23õãd\17áSÆÀö¢:c@èˆ\"@9\5¤ŒF_\"»=‚\25¥aÙs\"™\29\14\23ºF\4Â\21_ÑS\6vdy;Åòï°Þ‘ñ‡\21)d`KŠ\12¯«‡ƒ­K)—­Ôï\15´Ì¼6\"rÄKe ã\12V\20%t”c\29קxˆ\5Ô\29…”Ï\15\24£Ðë&\0*Œfˆ¢Ž\8Â2·›†‚—$êr´x9\6ÉâÜ'+\0c\5ë\26ë\29Av¼×Ç?äø\16ãQ²†ÂòF˜¥V\15øûÝ—îÃÛ–#¾§†?ޥœ!\\LqÄYÈa%Jè\18c£™Ád\27›ju'{ÇØ\"ÆÕ#;\22é¥jKp·t5¦›\23P\0208\8yx‡\1Sm¹â£È\5cÊ`M<ƒ\2;\244£fäáÜÊ­EOQ°,é\127^®~òã.d~Ðä'ÓjŒ{n4û…¢ÀþKcP˜yŽ‚Qîí•Xö\"¨…¡¯öpøô$s+b\ +ãØ\0270ò,_–5íÞ\12Ù\11¡,är®poð[YnÛÝ8&ëV8ŠšDVRÀ@\9Òl,,\16²\25ñ•\27˜:Æ©ÌzlëŠAGyÏÇvýf8\\ǼC–’«km~Û\21\20À\28€j^;V¢F\4ç@@de\3¡õðÈ\14|\30%Ì‹‚1@ÆM$4†\27§3\30Â;8ôXŽÉ\29XŒFŸÂã)кŸGOÆÑ÷á”`œN\23\5±ãÕ\6tä‘·ÜI#\"vÍöØæ’Vã)¤jwrúë:RÍl„ÑÝÇÉíá„0ÂÇ8\9ëªqBø+G´ç+g€‰Q¸×ƒÅK[\25‹¤\22°t9®>ü’c\25Á¡µö¨ZY\0ŒŽ”8ŸfV>Z] \"²›²±ðbh´`\5Z…­\30’ÕoÌ\8Š¾¸§Ö³£WóLÅ\30\22ÚGSgÒ®ó´l–YÆ\11´Hóqå\13(²ãâ¹,m»ý¹x®¶Ã^9ä\8\31dŒ\4ké¬J\127¸ij9±ß.ÅÝ\13\19¦W„x\22ÇyVÉ\13\8¦<ŠåŠÐ³ ˜b47‹†ã¦îëíF”Srä\12ŶT1òÞ\0008(-\8É\3÷ûeèæäÛîÍÞË7Øì\7ßž¥ÄÊ\\Y‚™3\19ðyåö^=ßÖ;‹·381ç¾Ùy«\17ÝÊ™¡PË\8¨<ßæ›4AÖÊfS¥\11å-õý¡Â—±–Ú+‹Ä`äŸ\5!áZ{½VŽ\17ô\6®\1\14‡Ù¯¨W'÷•^rÛ….Ke%láš#’ûø\18:/ÌóåAéÚGdXmI_ê81\\›ã´öû¶WoŸ‰T¾•Œk±¹Û|ÛTKI\2.-•\23Íy\13Œ~^šóæ´lY€ N,È]êë’ \0312Ý\"§L¼&õAPcói“¼RÆÀ\\½¦#´Jû+FK•¬räB3—[¥\1¹~IÃ\4G×\0ב\8¹@‰FgÈÃg«³œlÎÄI^\30çÙ åY‡‡sNð&\\\12,7Rèu¿Ì!Èi횣 VË\22–mT\7NÕhdÓÁH¤ÃïºÜÔvM\31õî“8/vh–âU¶x½41\22î€\16B·‹ïm߬ñ€®’à,4\ +Cé©öQßIÛ6î=ˆÊ’™,¡\14\ +‘y\8ÌNP¬åü€€ð¸v\ +Þ5R»a‹‹ØâNT(<»ŸHõKÑ}Õt_×n\1\"f\31qÐ^hDÙM\26«¥+-\18,·æ\4+Þ‚!¢Yörj‘‹dÚùƒCQ1\28¢¢qCª¨¤´3§Î‡–õëeV9\26ÃÏåUa\28Ûb¼ŸÒ\4 »$Æ°FA\11<¶œ\18Át›F“9¯p-ñ\2—\6\29*ù‘•Åè8•Ì¥U“:\5Ñ\3Õ\4S•ÃsÔ|p‘\12MÉ÷™\"‚lh†yjËA\12a\2ÄgõU@U=¦\15y®¼ý\0\12™`\9òâë¹\14\23ƒ\31\29Ü¡YpÀù±øí9`Õy*åëlïÒýµJCk\29ÊšÔâów†±b¶x!b‘¨Á»ÉŸ='ü\11Œ5tÑ–\24K[Xc_¶Ñæä0u“‚\\ÜfÑnQÙ®ëæç.\5M\14‰´»”·Ó½A›\11\12L&Ñ‹½t\23ïF²}{\4 èðæÌR\30Ö[—ÃW\2z¡\28‰qQ\27ÅŸ|~Ú»\24ÃbIßg§í+îæ\25-É\12Ñ\\\0196¯/ä¶Ö\28³“™¯5LgåDP-†\ +\21]\31¬¹_V\22œ]êšà\3W+ó2_65ÀkÎÈ­©ðz•÷\ +JÉ=\16\3ä\31ÖÖÖ\4\31S\24\19Jk„m‘\0\9me\21~-¾X7Z™Ù×µm²‘·¶\16\26™ô¯ü\14Jý†0­ß¼­ ”^Êᥠ@Ç­\12¿S\8“\\»kk\23²~aÉŠóýÿ\4`\29\11\8'Wú3\27Pý²²¨\5¦Õöcì\ +D1Ê0‘ù}¿Q\24\7,䲶V{iÃ+¥ôd¼Íàç\18]ó¾uî,«­T㺲™£ÚL\14ž=\ +Ü\7Pá3•^űeÞ9¢\8+B\18\0Ï5–\1\12·ðv^1f¾/`ÀH!‡V,„ã±\28\1É¿\\ÛçâTÞ=YWN\6\17JM×ëúí8\23çÑÅ\13F¼³ÑiÜ•«Å§µZ'•bÏ\17'*µ¹QA\21U×mñ ¯—¨Ä;J³@&ß\2\28^/›ûÙkà%\0254\30«­ÞH\27,\127\13ysõí5Ö·WV|ÐÕ.0Žú.\18ÿd86È\18–P5)Uîî-$\31\22à™\2÷m§Ôóì%ÁÜK +\ +ÝÝâýî\9t9Œoºû=VT£âD\7e¨\1y½1Ù6öQ¥5{\23[Ò²j¹aFr‹û[¼úG¯oµßK«\127—U6ž\22\"éê¸Rãëë\6ƒÚ\ +\4šzûü*ðB\28•©‰ºU©iì §6h!»¨òÚ²Ä\"ÿ\22ò\3ÝTã*:¶äòmy•ŒDaŸ\ +äVªáA\27è!u¸y\16'èÚ\0»Ëš\28cq\12\22¡X´C\11º\9Z á°Pï¨HpÊ”)»•^\21Ê7\9\2Y½±ï_Ÿ”x»Aˆ•)õçI\25êÚrWQ@Ú¢¹E•M$‘šoÙ÷$~ºõ>°õw¦¡²\29\24jÎñ©|N!ÛdŸ`» ã_7&\30;Ë3|ä²y\11u›\17²m\2;/\27Ó\127‹\29£-3²\07DøŸvÑ©Š\2Õ¸\27µåøù»ž\16\9\17N³e>ù åŒ-\"t´Ž~’\5š´™¤\8^}éHÍgÅ—ç\11X¢Ý\2ß\1ðËm{òF!ÙˆK1À>i-P´«Gλ9Û†b³mÕÄvßÆ\25gš¾¹¯7Ÿ‹\16´TºllS-ËÎ9­œÁ=\0078\28‹¹º]·ŒçÔ;4v/Ð+¯\27’ÿÆ\2ÿvÄý1ubö/±D\0199ì=CMÀ·;\22!ró)+û÷\20¶  Ùfe¡\18¨\29÷\15u,¼\29\0ÈÝJq7¯níÈ”7fî~G\5’O!ª±?ê›öL³c/8ë¤\24¬RÕ¼ºDÄ’m¬½bXÙ™‘é\7a”Älj\23š‚`{\17o§ò\8-twmýÝ/\\ˆaþžãB\5¹²\18°³±\23-Fg\28HŽ\29G9.‚VÒå‹@µ‡\14\24/iÕ;',ës}z/z²º\15®cÉÒç>\20\30¤}¨Ð…ëXÁæd/Y-æ³{èr»7X÷Å*ôîMÍ}áV³\281ÞÉý&_s»ÝlÉÏ|áuó&â΋?ÌÙ}Ý|@ã·•ñ\12\27âãî\11C$ñt1l\29åå+\18Ï\14aÿ+DÙE\6é`+\ +{åÅ\9_\21Q§ eÉ\0‰\20Á믻„'Í ýÿ\8\15 ¹G\1G(\14Ú*÷%±Æ;Ž‡ÔG6—\4é\13\0ŠŽ¢ÞÙ;|\5¾¾û±!‚Y\4=·’sÙVœÔMôU*à¥>ØÐ=8î$íƒQºJ2k®\20ž,^Iº‡²\29§Zv˜Ü\29VÇ\0É1½ywhÚævºð\20‡28êʪ\1—\127[g°Ú<(ü\9–\28¤ms«÷Å\1\13¿\28,1\28í\29-år˜ž\31\5ÚÍU¾\11•o\25XÞh­QüÊÃC¥8ÁÝå„)+7ÚÏã\28€Ø‘PW*¿ÃËó¼<¸R\4‚Ê\1ï<¦lÁ„!\30,\12\30¾N«¬\25ìÃøt\20–\13\21¡ëà¶×‹N\\LjÁ2ʱÎéþʱx7DIö\8t»©N\\hRƒv|\14Òfì'Kx=\16\14ʃÍ\18uýå_•ù/7Îêâ_åõo,³]þU’÷Uaïª6\9xsͼ~ß9\12Z³\13#ó?*ö?ÿÉÔ¾\11´ˆ,?.\15)†:Ãx¼\11™ùC$ïQ\23Ÿ½Ìkƒ\15„äQW„šGQ7øYÞ&H\26üçâ\"\ +äZÙI½{ðÄýGþ\16ÕU>\29±ºz¬\8El}p\4d\13§fº²ê¼i)\127.†;÷Œ»\24xdd*\12ØðíîÌNB)äÖµö,;4)[&\0[´*¼s\6L,Ê_HÁ‰\7ƒupDÑ{\3V\7mÙœ¦ÚJ€˜âû\15\0156ôi†o5=â\9“\7{¬˜–}\3Ž\1SsÂÓ?µ'<\2¶ö/ûÉn¿ml·t¹\7»¸ŠÍöýƒ\7¸©‚¯\29=¼Û\17€ªŽœ]ôSÝ}\0ªÇÛm³Ó!ä?X»ÎœØ\23-rˆdºˆÌ\29³´eªÈwQ'\0ŠÕÝ·\20î\17Þ9—¹Æh­ÉNô\0262XÉÀ§@\30HÃî(„sâJ¾Î\3¬ExO\0016û–ÎýjWµÞÈ\21Yø\ + d‹Ù9{Z>jº«ø‚/¼졖ËSÅåBÍåa—f¶B@¡ü\1~ʤ‡®lòàx\17+}ÙGY?¼\15øx\20\8øu\14g\8\30¼w›\25²˜Lb!ÍýaÙ¨“KZ\\|Z\"{F²\18è\\7ve\13è7…Ò\24œS€ÅÙ¨Q·Çå‹Ì¨P{O7gÉÚŽÂ¥_=\30\21\7öTK©ä2‘5\31\29\\.N9\27$¢3\31ïŒ6½/î\26Íw•&Rr}|ÒuÂMŸn}0c\21\19¬—‡\18$q\30™9\15_¨ï&Ë5Xî\29Þ¦\4Iø¦jóò\25 ‰Ú*\16œJœ2m€,§\ +¼]Åti¸aª¦\18§pŒÒÑž>3ª,5\0éUWd÷rYÞm\2Œ¤{M¶{Çê´Œ§\21\1Jy~9Õ\23ŒR AbÔé_4ê‹ë\6Äiɪsk»\12\7Ô£\7v’†€>×…\127Oê\14{ÞgbÄ\20'y8:m¬1|pt<ú5q–¡\14x\0zI)‘Œ@Cfzºö—ÉNiÂ@K8\15¿Vû\ +ˆxc\11ròÃ/€\18ß¹þü7ú­ÿùÃ:!oçSˆ\8\3™ðôj\14®6\14AÂ{ð\18…c´¶óF¯¡¤\"ÃÙ@x¤¡š\25\30Ȩ4\28¢™‹?ÌÄý\26ÿüq“+²•\20\6òñ6r\29 ðå3¢¿Ü™Ë\11R\9è\3L\17/\19Z>8hJï.m\12Lµ´NÃ\27*dáfñó\7Pi1\22,FÑ\5E\\€ƒñĈJpÜŒˆtÕb\5öÁ*.–ûPOñ`5÷\17wFU:ýÀÔåð®\0\21]£&’<\11Æ5[1×*ú4ƒ†¥uòàF¯­p)E_«º\12‹qjå:êÃ\15-\17aswpéÇU(ÙcÊ\30¬'«V›kµ\25ÕXÀƒb$w~$‹{ÊüÚ`ñ$d79’@ĉíù»½ñ<Îiå“¿\15¿coËý öÇ}¡±îþŸ?ãý£zH;½>Š§¼_²ˆ\30}í\23d\4U å]ÚG[>à…mñ©|[è\8¯¬c¿5Jühùõ€ÌýF\8\29 ¦Ò';¤åçöéäÂÁ–×Â\30-\15øúSæ@Úéìô\\ɽ\30!n\5fÞzo[=2T<~§±\19\1jï‡\17\11ìÓ\20Ó\127Ú ¤äEoÜ\3ܵ'JÏûÅðQŸ\14—s]-ù>â\\Ùõadè­%\8põÔÀ.Ž\25´Xz§âú‚-\24âF.={ùjÐ :8\13\"kdä\1šX\18ýâ.\17c úXÔ\27\19vB^šF“Èi<¸ZŠl!½¯æH…ÉÁÚÙ\7œ¼¢\9J\\4 ë\21½¯\\·JŽj-Õp^Ã`Zk\\¤\20®\16Jµk÷*Åc\23‚î\25\19`oŠ¦ü²\11¶½g¨\ +ñöät\22Ä\31ñ\26Àc·HÇõþÇ^JÎ)\31;ÜoçøËc7ïŠ\27ý½çnÑc\31yR@ q\31î£\23ß\4;(Ð>Ž'•Ž\11óW•=‘ÿLþFðxcJ1L;GÈø\31«d‘@\29ýÅÈïkÇ1\6ÿ,\8ª¼¯>Ê\22nˆ»-†k,\17Úç;bŽ\1æ\0\17+;¨Š\28*\29Õ\26ðÆks3Ù;§Z?Ð_^„oNzE’Ù½n*¿ƒ^’UL\0111¢øWàlÞõUFA&ï;Ç@\30ï\ +eæñŽŽ\127Wîx.›ªËß&ÒŒX*xû©Ç\27y4{¼5\0oÔîüñ渑’Œø8Ë \15oᔈ4›Š2“*ô–h‡Iyë–“çj™í½\30äÀZß#^LTê\13–ûF\6…“\16ôó_\13\15\24q^Œv\0øo¸üzP\7[Ö_ŸÍ\22«í$åì¬Ý;³œÚÿü\5÷\127þVn÷Ï_X¹ß\127±+\4+ \11ý{¥È¿#LY \28#ÖXMŽ\0205ûË\19#Ýi™\13YX3ô»ráCëþ²†üð‹W\15ž·rSýÄ\12À7\28òžpsµ%‡úí+À½q ×|eÆ ò&\7ùþñ ø]Ú—}ÑD€þ·\9\1~›\29ËV|\7¤%\127\20/ŸyýñÉn0ÿ´’ÁFî§Å\26Ì\9oî‰\127Öóe‘ɱ¡UsCŸ¾oûÉI\2ì\30×ÏúºÖÙ‡\127ˆµ\9¬_\0051B`ûdÿ¸\23‚6\7eÛWIÆ~\21±ü||䟟\31Ÿ—ÏöãóÚ||6\"gŸèÛ23¹“¨ÄåSSðÓ[βyý)àñ‘}‚æŸÉ\7…\13R€!\0RÆgŠÓ¢Ÿñ2Áñ!\9~$\31\14§Ý‰=ÒKŠ;å\20¨*¦å\20NT“…l/ü²EÞ\26öò“õŸO\9_\"5×Ï/EúrWf‚¢µ2Ñ”ðÜ\8§™_b”2Äâ\1p–Ï/äv‚²/eù%þ.»ºø«pâÉ9MpPå!uóók%ž\26ñ%äúl/Ÿ*°UËyõà³M·\17󺪚\8ý2Ñ?[\16òúYv\31Ÿ]Á„3P.ÝE³í³óU»Ïί³(\8eÊpÃ7ÝÞ\0051[\12r¸LW™’¾\20eź—\0243i8ÿüyUÁ}Ýr/þ³\127«\9\23õïÐŽ­¬“”C\25›‚&é úzÊ?U¹©QËÄd¼\20æÇX\12\30\1º\0BáÏéªü&„ÜωßÖ(§±˜Ô«\19Kþ2+ü$f~NÕLÃ'\18eHП\19“‡\127µ‰Ò}rÒåsò?\16?/³\12‰\9ŸRòó/€:D€gÝd&IŽŸË„êü©‘Ï?ÅŒóOð\26\13åsS\31]>ñßUmó0ÌdSˆ²Ï\23uÊ^Γí\11f¥ºî9Ç̯Ÿ;>\"p9‘”)‹ÙŸïrV¡o±¼Ïò?\31Ÿ¢|\31\23qÚ¼©m°î&Ñó1}4Eý\16b\8°{Ñäܨnj~g\22@\3¡ø’×®Mýü(šúõøðOê\14ü_ßr¼*%¬Ë—&%!ª!®}t\4ö·›ú+‘ô‹xB\6eÕ¹\26Ýd“\4ñî,..™55\127ý¨š²æuþ†{^2½\9Οñjjì‹T\13·£ø/IÞøxC\19kÌ\13\23D&'$Ž\31½2\24\5²¡^íJÿØÅ=º¦ò\29\4¹\7–Ì\1…¤\25‚Éf¨f·@\4C\5sˆª¹qx^ªÜH{.Þ9’ƒü‘¡\0314e”&ÌÒÿD¿Ž+¥IÈ %£Eƒ†³´7.w\18Y|ì4q\12Z\6h|ÓYЉFºlª†TÌy¦0.‡5\\ãPà¨,~o™kù‰B¼å¢nY¨\20B¦\2ŠÉÕð&_\3\29vÇÄ?>\5}–A\25pxZ¹JÿVsÅ©ñú\22\18(Óµ“yç%\28‚\16¤ôòV\27W,—à2‹ÄÒ··Ñ\21âÐ\5ƒºQ—l›Œ\4,‘7ljk Ëzg\12wÿ:CUKÕC”AæAcŽ‡M1\22uúÑÓ\27‡f®òAÂixSªyÄÅŸæü\7góˆ—µ\4Ím›óï–\21ÜJ0^\27k$<•“(cÙ\\T\15•)SH¬¹\0fbú}²¦ñºwã…à†ÿ,2:þßâ\4ì¹SÀãÌC‹“gëpÑK\13¯¡4U3)—[SqŒ\\AP–¦)\22Mš¦á\24mÓpj§6ìœ9ÿzP²·Âì^¨Á\27O¶Ø1—Ìudc¼i.ì_$\20¨šÊ)ÕBÓ•ÑIÕãƒóìÈá\0i\7\0º9=’ýzÛ'¡˜Lõ\14®ÅÑöˆöv4õz“ÔcŠÖ8’¨ŒôkŽ—HšlŒN‰n2è\13ÚÚ\17¥µ\2@ñÄìJŒIbk]\30µ\"=Ãty/e[&.Ç+æK$\"e/×ñ\11¯úXq¨¿S›76‘|TßöE¶mlºãÔ|'˜_\31àÖ\8^%*ŒŽ«.M\5ëRÊž§:Ô†ªuùßš\28†aÌÝ¥]kó›L:eÂ?MéæŽ;\19\1™ãò¾­\6•‘gÏEÛä!^§1èú·s\24©zaŠ˜8\"j0\6pÓ\ +žK—‹\7‰\5,¨\1Ù\4\"\3ײÛâ°‰ø\15ø#¤àm\8Î\25sC\ + ~lxŠ\127dî$H) Ñs^j²‹\2X‚p‡ÆZDÞÄ2}ƒLÕã–b(‘ÉÞªY©©añŠ\12Ú(¬\29¿ž%A{G™-½\16·#\5˳¸3wÜË3,0\0045`uðÊ\127\17da‰P`åV½ý#õ:OŽì8â¯Ì¡Ä\13@\0\23p©ÐN\9Ü\23Z\\¯ÃÍêÐl-\\\1àõïŸ!J[ß\17ðǃ!\9¢ñ¡'9\0062\31Ú\4íM\3m\29|\9†ÿ­N'x1FÃäy0LàÅ0\21ã\20¾´l(f£ëàÑ\28ÜEÃîìMÙ\4þ€©£g\20ô4U£h’ÿ\22\\z\23Yô$¬é\127¾\11ÅŸüÈ»\25\14îÃ5\4Ø\2\28‘èõ[Bks¢úÜ°¡hê9úÀ­a¶Ý°ð«Š\24Yq­ç\9éÿ\17\9—˜PªÄåŒßø\18ûl±tÀñ0µoŒž\31Ýó×éAÁê6\30Šq}ý`‹¦å´8$\6m®»\0003\17çðsÍgÏ®y§â¤Xn5XÊ+vÔ-_Py\3vô8;4\4øŽ\0000ƒáxÇF¹ùì}ãKÏñ—Ü&,1SЦ\12fÊ\20Ûm’ùp³{ûç\12ØRDÜ¢\14í3@O\13\22÷«\\ô\4é;›#µ‘Þä!\\¦(É(#OO‚eòqß’FLn\3\7/ÌwJ²´ÖÙXùÀ\19&£´{Tv_\29ä\9»ˆƒÐ\28\13\22U‡+”¨?¼Èãö¯Ÿ¶»FœÞÝÇ°\9úb¬oëýk¢°µÍ9„Kìo›Ý\7Šig\26¼vî\6ι:l²9Cm×\0247Õ\\Bµ&òzÝœb‹„æÛÞ\8\26\\‡ÍcÍéMƒ)À\18xµry\9ð¦_WNŠÙ½»‚åáR\9óûÁ“ÍMݘ[çÑË&v’äNðµ´™\15\9t]øö\1Æ\0d¼A‘ÈÆü!j\22\21CXÂË´v3¹a‰†ü‚ª\8Y/i7\1ØMó\25Å#® 4¿kˆ\0256¡F•Ž›GòðôqvA)œÙE¨õd\9Â{\9r[Ša-»áÁÌæÅkËÍËû\25¢$å—虥­‹lb§RR\26©\3\26aóµ³sÛø\18)”¼åŽuÓJè\22\22´~M¸i/*I\14T^\1áL^c\0200ïk%æå*¢­éûVSœ÷dIY¥¾'» >­Ï³(?fn‹j“5-€ŒTرÓñüjPž·×½vüü\5oh3~©Z·¯\ +úÑ¢¥œ?#\20´Xª´/–‹\4|úÍnu\127Ës\2r|a0€-iYöU1F\8vY\7\2ÄùK™£ít\\{A“i»Ês‡ÄŠCã]\21E\30L€\11þ0èê\15i\13ÀDi\7¤h‡Ù~LA^.`DZ·®\26fb£/ºÞ?\30ägÙ·µpK¸ïY¸ÇFõˆÓ£77ñû›¼ñS7*gteGf|\11µUwŠªT2\25‚‰¿È5þÍ ]9yx&„M%Bïhg¢ÍsªÈ\0ŠhðrÕD\11ŠÆòS[ŠX`B2[6‰eƳûMÉËH\13\127x…&ó›ïïp¢Iâ,);ÎÞ7ìüM)ànozt½löÚZ£!ò¶\ +9Ö\30Nq”ÎÑS¬åÔysù®1˜‘2Õïß\"©bFMõ\13©”óŽ«üÖx \7†e!¶\26õ½\27\13\11Y ¿2\9Ê5\11!=E'1¾«ý¯¸¦kÅ$\11z¦ãï¶(\11’¼›.Ö·5œ½Æ–ãUMÜ\21j¸FË?·\27þãFýxW䃿Œ3aý@\8\14¢V\0\21Ü'þ\16Ýüþh­á‰Úq{|\7V¢IƒÐÄÓ\30Mÿ?ŠäIß·ù\19“­\ +ù¨ü‚ËÅÿüõû‘\31¤’óÏçJš\11r\7Ãwþ\11ª\25øÿL^²\20@\\þý¡Qóû\3£[sQgò\14z3Æ­¤K\3\30OpééúÈ>xØM¡S&\5æÖL<ÛWƒï\19‡wš‰_¾4S,lL¿\11\27Sb`'5 ›¬lLOõÊô*™©‚ùÀ~ƒb¼\16\28§¢%\127®B7SׂÉ\19‡ÎG»U½©°DäÍ\4¹‡\27½µ ãN\11”cZh‹GyZ¼ŸOZjXñô¶¢­.×Ø3ù|œ\"¬È¸SÅ\31gU?\22ßdºk\\q¿¢'/\19¨\9dà\0266&“3Vèä\127÷ ™?VóMeý\127\31Í\"T¸x®©YJs_ê&\28Rš/ü?Z¼máé¶\17X²\5É\"UëÒðEAœñeÏ™Y-P‡‹uš……\26JP\13N>Œ'k5‹ßæuY¬\0\0\22W€¾+\24\1q©÷j€\1,V]%ƒ®«]͇Šy\14\2•T<œó#v¯äÀ\19@\6Ä\18G\22\19Tżlá­Êñ„LûÅó\127tjVa\22SjAÊ!Q\7ƒdƒ×Ï!¶/»\6šfŽÄ\29\7?\24%m£³ØNÏ•ü7a—ø2-áÍpr>œ¾¥Ó®HÃB­\5uïÊ[€¼³ØQ½“õâDGZx•\1“sñê´¶CF´\24íô4\13n\8ØÐù€Ånpn\0^\26ßäT^ì{7¾ÎHIÓìù~Y&Ö:rËÙKüüǾ\7N#Xü:E£±“\11\26¥P¹·9PUÓw¿b­8oêû–½Y\13’¨­h˹G¤~ýù»ÙM®Es[|A¨á–^ƒ\127üÙ$Ã6‹¼±°·ºOÖú¡RWžJmÖ¬.½Œ{YAÝÕçí\4:?8/QÏuT‘Žº\23Í+W^Ïjîk\30«Œ\11¹m\25®+«L¼µÍŠÔ¦ø+\27[àŠó\\\19‹\3ë\5ær\21\16­Xýk㆟`QzèìX®ƒ}Y\20+\21‰·\3ä\27÷æ\27ÿÌúålÇH}Õä©V#(\15*StÁõbžÚô?\1\27ÃÍ‘7çå}%²ØM¥\14Wª¥›®mCv­\11-ZôQçÄkÐSF\30\\\13&Fï\8ˆ\\\25·\ +\9A\24öÏM\\O³ýë\ +?_+î\0196kù½“ªc‰\14š\12çK†^ÀÛè|X¹¢85\15a}{ÈW^¤S•¹IH\18Öisy\30\4¹*\30-\0y–^lá7&]HHwÿÅ„H¹—6֌Չr…ü¾³(O\15(k\14ÊxºÒT1Œè\4Ð\\NWg¦Ý3Ò\15€\26\\á\0ü©â\13ãð\"åÊîXÃÆ­2Þ¼4\ +d=–-M\8Ñšs!©á\24\22}ÃqúFê¡G\8À\4—>秵é\15©tó‡\31I>¼Ê\9\0290ìÈmk{Ä\13É}ßxs\1Ë/¨â·í^`V$®·P#zŸ\28Øñv_îjòº/ðOi—¨½BÂÝht!¯TÃ\0011HózÄÏ¡\20ûíÎxÓn¿µ¬\20ïͨ…þÁ++àÀ…[\1ec*¸åŠµÅ¯››ó·\30À•0Fðü\19G³åÆú-^ÆiÎ_q4âÆ\22¹6ú€…¾åqN¯\16dÆÄû/Ÿát-6\14g6¼¢IÑ’½Hd‘uóö2ðO.ʧÑØ\31Åæ\6l¿(—8ea *ú”EÞìÏI\20g‡ÜsI©Ù[¶Döè\21ßH¼M‚ï•5dy\14\23üØzT‚a$±Œ+{‰)pŽCVhX‰W—ä9V£ƒDâRéÃÄÆaOÒ™\16I—‰µ–Ä;\02829è\18`3ÈÉ%þ[)蟑¤s³/Õfˆª‘¦‹2‘D™jîôªr9ËY©ÞxÂØ5à\6.\30·/L¦Iº±y—Ø´kI®Y%S}Uï(\30\6î‹øá9n×P\19Öžk\29 :Q½U\31\\]4\0\15\18\27x‰kP˜Ñ;\\\19‘™?ìà粢ᲄÎ*\11ÿ$ë° Z$®ì\5?'+µ*_<Ä)eç ªÀ$uQ&ÚK\\Ô ‹9ž¹\24P¡‡Óî¹ïx&^U—|,ȃ\1‰ƒIı tMŸ\26¹O¹?sÙ.ŸŠ_|~É}NƒFš]ÄÄ\6;(Óä¢Ý‰=Eá2a*²ñË¢\6ꑦb?FP“Z\25Ò‘’ó\\¤á+\31‰‹$ÏŸ\26%j¥&~¢èÿð\ +{o^ÇÇ›—¿y s¦\20Öº\29\7~\1\\;û‚X€!<Çp\21³!íd_€ö9u5¡l©^k¬Åâ6<¤c¨Ú-5j¹×D\24C“%–`>¤¢´4ð|ú25·IsPªsà\3š±œ¼/‚©˜Ò²dåg4É*\12…KgøÇNú\22\9ûMK+žÃâ\127ÃûӇĹG3Ðe›Ó3)\27–`SÃoÉ”æÀäo\29ôò›Î( ÎIýî¿a$QÊœ0\17¶\11C‘„›š-22Uü*=L¶F\5'\30ï\6\8\31R*8õ¼m&“ƒË)u—„ÉÞ­ÒP;^+'d@¢HüNG^£òM#—DqHÆK1†\25û\25)ñômJKÆôôo²S.\23!‹3$qÔUø\30/í$n`à«\9£<Ò\17óéòüÈÓSÁONI¨5<øŽ3ãH:.°ñé7V\4+Èå³€¯\24Oé_éùL\29\19\25Q‰§„gÕøÙ2DϼíÕï~ý}\4®„|EÍžßš-OU÷ù‹\2OŽ‚«+ŸDœn\22?¯=ÞÒS]þäI\9ÌΦ0ãÉaáâsÇ¡}n\28íIñ\14|zú\23Vr¼3ÅCðezŠJ=ß\11mÊžbƒª:ó€7— l‚ﻪÇ8f/˜€\8\26ãT½¤éËÛBCzùwëéÅVÅ\7ÛÑèÊ‹Ç´?ü£Tȃ_†gem°''‹øŸœD«Žì’©Â+IfJ/߀I¯;k\12éÅÊôº½8\11•^m\6§xµ ]þb‘Y\14\14y+ï’\\Aß–Tu|º_õñ›!¸ß¸xæUM\127©ÀûKlSÃóò\00970u¢2¼ö“§×D\9Ò%*YÕ‰/i}éÅKZ\5‘n¦\24<û>\0240×^«iÃåå ÿþ\14r ¼@~\25¼i\28ÐH'¬üâa\24™î¬//=$ÎÎBa¿¼\0Áµ3ÉÈékz\21˜‡Zþ\21gHÓ—\127\0ªaùâ¾Ú5Ý B÷Ö|¹½×ÌŸ¶âPŒ&žèð­…Þ¶YÃ?Óð\0194\6LDgÍ„ßNrs(µÍÓ¦z§l¯\12è\5]ÄùNN•*ØŠö|pýâ…ýJiwv+äàá\127Î\27,Ô¤\4/Ú‹°üæ£\7üC‰àWI&%\7§T𫵃é,°«ÛZ|¤@â“yÖJýH)À\23ùsœÀ.æ=NŠcÏAªÔê* \0|D\11窣ñ£íýZfâyä\2Ð\27\9œj\7BrÛ¢ŸV»ÄE•z ?-:\20í\0æ´ùPHV¶Å™–¼\5K$p·µRC*6ø®1‘˜Ë\23¶5\0206J:2äàAjGï\28C#Ù•ûÒ@'¤|SËbt1Õè†÷'æ«.\1È©ürX²¤Ê\24Azyv9µ\29Ñ»óäý\14Êb¾µ~\24RqV\13>0gÉ\20w99£u¦-¨) Èu¼ì9\0295î\3%Œ‡=¹G¨zM.›\6ñ^¥b!µ^:â\8‰¾»Tât\18ÀÖ„º]\"˜f\23ü¬W9x´23?Ù‹6\\£]‹ï†â‘\127\25½X˜àPJ÷O\0£ÆšußDÏ \14ÄuMòhŒÏ[\25\24Ï€„^ÝÂÍB_[n\21´¦•ÊœÂé&jò’ô¨ï4ç~D;\14r=PEÒEÅ~O~.<}ó&\5\29\14Ïéê¾­\2 ¦uõ`6ÒÕÖð’0üP–Ý)¿t<\20—|¸:ùô(\19¼K\16¯®ü²\12\0±É¹ªN\0010N5ë:?<#x瘎Ø+ýÐå,ªàËØvE79*DpQuwŒ7y*˜ÿR#·t%7ÜT*—„•È$<ë˜'EÇq\16Nê *¥ŽpÿïGáü{O>©¢æc[Í4\ +š*$\28)÷Ò΂-69®n'Ó®›¢êÓÒ¬7 wk\8<\16€™ÐYg¡'ãú½°•7Ìž8Oy\23ÇØ•·—W\5ýˆžSµŽ6ÑÒå\0299@»Â\11‰ª¢ˆ8\21÷£P¼Ø\12Iïü¨\26.XƒÂàã\29WÒqxTo’\11/ò£™+GU\8a´|¬‰\9ªL¶6¼ÁÔŽw“\0122W _Ý•~³ˆB6ÎW(Ñ– ¾@²öE^“4ÜŒ\19«ÛGEû\18Î\17od¤x\"#õq2Ï÷ã\9Tw!Cq‚Î9ˆ\30G3U=¤é¾çÖ%Éè7@Þ\7œ\3\28Î+ߣ$½/†ÜX6옣Q\29)\22\25®ýä¡\24á>›Xæ\0P\13ësÜÕ7‚M\8I\ +bz÷ù„9UGhàþ;$)ÅÅ\8(fp‚\127ÙãÇ™äÔ¯g[ƒO\2ø\0191–Ö‘¾“ØdÙ^ÐãÃÊ<¼Ï¿\18PϬ\16j\14\11FV¨Çþ•€\0117\25\3ÐW~\12X\18ã`oùóǩο\11ð\0318ƒróXÀd÷\22:óù/\1 ýyü\11'W´Ô\29;\"t¿_¡Gýž\127%{¢&ô»»rG‘ï÷ø]#¾$Þo³‰[¿{Bî«c¢\22õ‡u\6þ@5\1iŒß\13\"Tj,ï¥ÓÛýÁ\29ªd¾Š¶ÐóO`7è­>»¦\24ø¸<ÅTùù/«\19\"N?ÿÝ\28ü×\19ÀG‚\18ðíÄ,á°!:\1/PÊÞ‡’\19'\0026\21\12½¸òj¿HÆs*\0¾ZÆâ8\2Þ•?\28àÙžˆ9\3ÿ”iù\19ê\19=üÁ¤î¿í=Ø„²\12µ\15¿°Ë\0007–/ý0T¬ŠÙ\9Ú(0H‡è–ÓdŒÀÀ\27LÊvà™\15Îz2çdòJ³Ìè¡‘t\11ñ\30B­\25B«—\16\12âq‹\0–<ðØ\0‡^TyQ\"Â[iú\3û¯›]/z\3ö#?\22\12¸óÂI_Âb!p€ú\14-÷ªÕ\0ð¼\24¾i¿ÕW^~]qx\25\0@^·AWý\\—ÐþãÇ\8\6H¶ƒ\11ã(ª&Æ\16suÈ8?¨(³cú/\8‚þëˆ÷½¯#§’ç¡å1eŽì\28ü›\24\14Ž€4Ãyi\23wáºæž€\3ÿðÀAÆ˺݈뵑wØÓ\16φ+Še‰lØá¤\3ÃR\12ÂÕáCÌ9\25‹q\3¥\6NèrJÖk?‚ÔÓ–\30\0022\\X*\22\"‚\22£y2ʈ\0268úôš]*kôoœx\7\4á°\28+6ø\19Š\11š\ +ï\6©=ãÍx$5\6\14­0–QÇš‡]g,\8\29ò•¼œ{Ig|\4£\27¹@@öñc\4Åa»–×>4xî;ñ«„¡%â­ŽQô’¿Æ&ü8U!\7oا±b!_®˜‰£Ï¶8'«|#ë\9\20\29\18·|Y1ÌFoêû\16°º\4d\25½h˜Éaácdy0÷¹`\25•5€ñ…Ä5Vœ\29Jüï`\8\0zŒüù®Øí\1BŽ/Ÿ}\3\18ÙÿŠ\3\22ë\20€Žzùur’ „ðë\4/GŒ_ÌcO\\™ìùJá†ójj¸dÿq'`KýÝ·wîåà œX僇@ÿå]–kk“JH\7ü\6ø•mATŸ+GžÕY“GgROÜÆj2šdü4L¨Ã\28áÖØ\2•\28i\0067±hj¾¸Ð‚wW“_þÿ`qºâèˆë½zйüÓ~p¯–\3äv£R~áVå@“FÞ\6`êÈ\18D‚ƒ×ä¾\"˜\1ËÞc#ù{š\9¿\24\27¸Á‰ áGü—€Î¯Úxìn\2ŽVÚÙ\24ìmä!ÉΑ Òca©eô\15Âü¤M¸Xè\30ý€ª\1…dÎ[‚y\ +ð\ +@^÷­\29Œ-‚Fž|›Œ\ +<âß\26.î'?·aç\25M,’¼Å0gvdÆ<þ)ÆñÝ\1';Ì°¢TCÚ¯cE`>\22»g9'šÓø/>0¦ëø~0l,©Ë¾Ú\127Ck­8nÍés8ÂÈM_bø\127;j‹Ù˜2rù—\24æÒ›L)›Ôƒ\19\11/åÄ\14…ßH\19²óÔÄà]¦×E®Ê¼lbßå\ +b\9»V¬Ô—ç\28\20Å2¤ó'\2É/Õ¤jb=v\18šC\3'è\15\127\23P™¬:L\\ûO“©ï4Ï,*\0ÓDzÒÄi;\21B_«ãQ¤ø\25\19J9Î.€ê1ñòJâ•…4qÊŠšîpl\0215×\25¢öìY1Ë‚¹P–7.åÈͤf\11ÝŠ·kTfÊšc÷\\\23in©š¹b#'óŸŸÍ§Ûl\29`æÏW) ò•ç}-‰iE|žØq‘kBݘç+…–ÖSçsÞÈÉEÉ4s&TY¯HV3ÿ‘TÐ\6O}ò:]¼ÿ±dõC¨°Ôþ£1›\"Vax㔕<Ö•å—Ãp\22³\9©—”Íë\15=€g¤ü#£\26Wéõ\0ÎÅyädáönZÄ7à*\11ûx\2Em‘Íp1Ìé\16JœÏpÄÊ—Ú}+áe×j6dËb\127K¸@t\30)š\4\3ZÊzÚ{áG5Ù¦H´E‘\8¤jènÇî(ñë\3,·ÕµA[\2|9\18J¥Ú\127DMY¹Y¬bæi¡\15\31í\29\2pþð  ›y{¼é«\6ÁváÎ\23\14±ÍÕN°nñæÎÒxSjiø…1.NÉ\8r?ÞN”[9·ÉLlÉ\26$“‹#³\17\17€¸…WØí\6ý–`wKâQ`\3K‹\127R•â\8)<¸… \24±r.‰¸·\0§€^15‰¬Ô.V^å‰j³ø®1bY‡r¹.^c‹†±Bˆè»Y\25ðÅ«\13\6ižü «˜'3bA\31Dg‹\17„1®úÏ\13Ïö#\16]zy(lGÂ^/Þ\19¬îxY>ª\15\11½\2ÐÒs\11øšÊ\3#^\23\19‹×“©¦«'ª·'ï'ìê\6ìí«¾Óå\30™ÜúqtÍuëêgĶä%o¸\9Š\4¥O°õ‚M\9î\18ë\21‘…Õ³\18é„Õ_LN:E–&ÌÈ\11\14¿V\25Q#t“ª½Õ\4QÃIß<\6Â[Î?!l<+\26ý5y\\Ü\31Šç•²3ãnkøY·VÌâðÑ•úãí£àÚjŒãïŸùã\127}`ŽôÜc°Ò±µW*÷÷\15óž¢Ñ¦Üé²!÷œYÿ¹\29„äå’éƒ\7Qðj*,\\¿ì›Joì\27¨.Ïߦ^äËozŒ\18³®€\"ø\24Lwû\21~zþ\8Tk\6‚jp±Ù\9ü8Tä³)¿fÞ5è\2$WÀ\23ºlKè|oœEàp ŠÄ»\15'o\23œ¼B\8ýΡD`^™÷°\\ßY†ùn5îu0­Ž„/\9aDöMd\17»œL4ý¹GÅCZ•‘ÐRs0\24K¥|¨()µH¾Xç\2ÞÞ\14‹2t”0\8ú¡ô£,\0¬6>8\4X\11\0?™›Ù\23aÈñÑEõÆ\7“²Î8äÙý_\21ÊshSEA(Q˜†Õ\\,–\27\0å`j@Üy²\9ÅÙ„bõºÏn+ûîò\11³\22¤|y\ +TF˜Ë\28T(¼¼\27)–Y]¾Lõ’#¨À”};}k[&\6]\0†&w\13ã..^<2Läó\24ñ9\0ûR†§üp\15jü\3>\"\27F%ÿô¨ªþtè\18\31sŠa®$M›ýXëv€+\22ÖBìß]ȶûé´Ô_÷\",ÈT\14úG\7«¬›¼qˆqŸ\31=\8–fï°cá;A¶%='óŠ.|]'˜i^`ÿ$b½Ã\18½GÞ ø\25Û `Âa\31¥\11Ã(\4\27o6\28\"x\12àE³!§I¦FŽQÙÅ-\8[V`'ö/\17ÿäPÍ&©sÐyyp\2Ž´ Ù>;\3&@È\31“¼œ\24ùs¦Q ×`¹%²àm±\15ï\16¾¨\21’ •\27abC¶Nê\1£™­ºÐ/5h\8c\18ßú\ +®¤\30\"Ì\18Dƒ!è\4L®ëÚºfV”üùÃXbmó¿‘€ó\22¥çogh±c\ +Ý\29ºÇ\16ê\4Ô6±y‰np©Ò\28þq²d\12S°Ûž<\13°ˆ§}ЪgÑ?u¸ï­Í\0313¥ÉÓ àyò¬òXxð\22{!¿=ÖZ¾Ô=¬Ët'Ó7LË)=•Î\27—\\9ÜQ̶ßÕ›iêæÝWŒ9Ól\14K\31mV©sÁä\11öYKUÉ\22=9qX›ö?3𧄡lÃt°ó\6ñÛàãT£·’ù©YlŒoCFŠ»ï½ÛÅÛø\22tÕÍ*¦0¤Áu7\127¨ù\22Eû)4æ´*Íî¤Ô,ûËÖþ\31yN¿Õ9FÚ·`1°\9›îÃoªe£r\27ø\8\0Aº.¬NªxÖ¶’øš`«_ê?¶\\]‚tCC9\9’Ýͦè°g†5\27ÆÂÕꇹ­6rª «‚álµI¨Û¬\19¢ü@\4Äõ©š°-¥?Å®\12]Ux\18jjFæ\28?°ûÃ\3°ÕLHÕГcÃvš\1\15’h0沺­»ÇÍ\24Ú–Or®9z¶ôy²Éw«M‡\1nè?ÔŸÙÌŽ[P\0047«Ï4 ÕG™þØx”Ã=%.ÞsÂ挚ŠBOƒO\"/wCz‚®/\3¢ÄƒÏ¸¢~U\14\26?pu1q©m‹üÀoõ¿/ÎŽ3w;Óõ›mõ\16æÏ\ +ÏžÍÆQ\13ý}±ñ| 4»\19Y—³3x»“wÏ4\16Üm7/æf”VKlƒŽ¯Î=ßKví]ã´\15~ÕØË;Ž/\23¨•l·Py·C*Sš·?AŽvøÌö9ÞÄöÊOXû‹\22×ÈPtgZ„ì„zS(w©Úíf\1Ù¹\14î«\31î÷5h\3ûºœ´âv?@*\5Ä ðó\29Ý)µJ*\24ü`ß\26Ѓ½ø·Z .8Zd\11»ãóðvØ\\ž\0\20t¥ÃpŠßܸ\27\1¿#œÍ]ý¶õZ\23\24E—SÝxD\0êH!”Ííð[Å\1GSå@&ÌaÖÉ\ +¨.‡â\1²q=âì°\13u€ïò‡Ÿù\15ß}lSè\7n@×S‘f½\8³&ÝùÐü9ÎôÐ1W-èøÑÐÃGã‡ëòðåó@w-\ +¿\25ÖƒK÷‘/ôÉç‰êÐ\13‹Ì\ +ö êxû±â`&ü\11Òð[kðÕ\ +õäû¬\14xê~zs\30úÎ\2¯1éÖxã©{kõì[\11,\1º\15 ¸ÕjF\2f\7'šŒ\6Ps•ën$¸\4© XÎßòÁ—Ò·Ö×\127줨äŒ/£²A¥nî+ß\14{\12àEçœ\\ãí\28I\0169éÛ£rOœX‹ç^3½ÿ\8”ÈsÓ4\16öÑÛ×t*U}GK:s¸*ì·þssW\26fFw¾)ï\14Ñn\0035IÐÊU•¸Ç)\6Ü»ÚfÎà\0|”†ÒY$”—õæÅÍ@\24\5®‚3£×¡N@.˜ñ×¥ï\30d6–ÌÕntP\13&—\28Í\31\7Ô9yPiLè\0\\,¯Ù}7Ù€cÍŸ\17ï$L\7ØXÃU\0229ÕâÈëæÆOÆö\13S\0007\23†\11ÀõÛ®+\ +U‰\24ªñ4›žúè‰<¤z’›MÏC»œÛuµ'9‚äó\6‡\25uXÜo¯3QL*\0304˜\13\29Âõ½_=\4ÖZÇ[Ï»CW´W“¦k\3ÐôruÅ×Ú=Ь‘ÝjåD}f{ÛVgR¨·W’­Ì\23¥¸Â/Ì\31T\0180\31éî­ø¨G—\19A,\14K;=\23±šîÉ\23ÏIü;\21Ó-è\ +ŸhR¡¾\11ka=\24.\21dÆâëè\21\22\26žä‘åuª\7éy\24 Õ^òÅ›H–€\7™ãÿ*B™›Ý\6ß\31Må¿ÿ\21ZÚk»Ò. v³½}õ,óó€¯`ø´º´rXγSè„îY7G±b®=Ý­\20j䥿ê¶è<\14@ç\12á)\2t$ëîú\18j\23‡=\29¶ênÝ{8V‡fÊòf\9Дé9ΉÒÝIa<\21à¹Q<7=¨aomqD©÷zÌ>àQ\7õ\\Qú>ÐÖ¾¿Â\2§Ô;ˆK$£\28\20èS\127ZÖó©Dì$=Š.ú\30›ºª0ÌϽÅAT°7Šb8³CÜÕ­Ê+¹×\18Ò\5ª\24#™ÊVxµªŸ<×Òõ·ÁïÔÈmh>ò\26½u\23õ¬A*€Ž\0205Â9VT/5=£¡‹]Ÿ†;'Bj\6”Ò÷éN¦B\9ú\20¬Ô\1µ–•dtY6tiÀŽšP\26Ø£\12õt ôÉ÷·>¹;\18GÈô«\3Ì™!\0089È\1—gè'˜úä\0294¡//\18bÀ’ÌÙ;z¸ö×ÍðFYì©=Ìú4x\30\ +ÎA\20ž0ˆäà›ÙÙaÿ'ÿ+\12M\26Yvƒ4×ÙC¨?¶×Ó§³\14œ¦OKÅ\127òôQ“Ì?®}\29WyÖéÝ›¹ÞóíϿI™]#\0315ù]r‹\12\17qk\18-\31H'Ct°o÷!\14Á\18O\27uÆ*&#¦EÜ'‹Àœ˜½§\7m}˜9“¼+´\1ôPó&ÈÁ.pÞ\ +â”ï\3\"veÈézQ¿U£æÂyÒ$cßWÓê\9Nc-\9ö¯ÞêqϽ;uÂbSoZ`\31³`2—NÍ\31\4ÿˆÛX\2ð’ô\24˜è\0Yø¿ÀvÕâ>õLŠiA—„ÀxÍ€ó³\0(3Gf”ÉôX\0é§\29\13º\2ªl?AÒ(úI½0½¡ì¨$\8 ½Í\8’·60\28ØÚz\27\4a\11\22‚Ùϵ¢9!\0225ÑÖ\17ê~ù¦i\11ûÿ¢\21µ$l|ô‹Í…ö‹)Ýý2•œÕ‹\25À{TòÜ\8Ôh\30À+\\\6=êw së\15ü¶u„$\127Ææ¿Ìüå\4[æÛ™l蘥˜K¶*Å$\18̈Ü÷C5„þÎ#Å\ +2O\5›õ\2Ð=§·=\16'jxQ\20ÌÂ7Â?Ú[;”¡Âå\0:{VèÎs1΋îžj\3–]!,иM 6\ +zRr÷ÿW\6–I€×ìÊ«\0€·\4džÍrý¹ä+E¦\ +CÜQÖÄ1µ\"±¦Ed…9òeŸ*¼|—¯€5øIÏ\19ø%ê©Mîòô\13\9Õ\19rš\30W—\127­*þ]@Ö\3’×>\19TÀ›P-xŽ|žßìk8¶°>²\26\28P/z:\18\19ï}¾3Usb—Ë]Ÿ å¡ÏŸwñ\30qå\0306Ò\30»b\26\4\27EïQ\23ÙcK\28\12¨6iœ´ê€s¯\24h\28½ÍÉö[Kå78|ú}í~+PgØŸ\21ªÜ¡q)ycr;q\\š0LAÊ\0116·ûz;\25?2Vg\11ãú\ +ôè\0125\0297ÙeûEíG¿ÙÌœÀ\\8m\1Û\8ÞÊÓÔF²I\4\127óòàlV!Ý\7M\28@3>\28æýekØ“6sO(‡\ +m5ýÆLÞšÙ­YèE¨ßE\17ÞÞxë\22†à\5\11j¹…&Ï€ªÊf=\1òžF{Æ–îú(\24SöÙö:\9ùŒÈ\6e]€^g\25b½\7´?PÆÈj|½Ô7#b›\21m2\16»\22¡â÷\11Â\ +ý>\\=yö\0019\15õ%ÆäåNS{ÅP\22>ð\"¼N\11b3Gp0ÒES6Æy\20õ±‡Ýïó|¶›‹\0\31”2T‘ô¡Š¤ß¯ðÄô;¢Eý¾î'þ{±î^p{®\23áh»µöûVÞq•L›z1h\29§^çß\27Íþý[›Mÿ¶$Kÿö0k¢ÚV\ +^J|¯Íˆ\27‡Ú{%ß7[Ù;·\13ߢÙâ,”õ\4æ¯~UoÂÉfÈL³}ŸùãÏ\31\127v\2´1]rÃß\19—V¯EÞÁ)ê‚F\30’Rë åõ\31=|\1™:/\31ü¹Øº“?€ËNø³vî\15\15AÀ\13ˆ^\14µÙ\29z¨;ÔÍåÐ~·_C{ú^¾4£Ð#>´\24\3\26Z³‹)þ¦\11õ\0)¦\28ªVGºbºj\20àÚQñ\21˜É`–=yî8ŠÖŠÎJ4ZôÆ°#ëñ ´É\5LŽÁˆš€ƒ\14m ÅØì†7—à…¤<\26(ðù-Oõì¨Å\19½….\127¾ý\ + \28„\30Í\0204WCí\12áf~ðí¬JCFÒöo¶›Û\0‡[7CÔ\31Z+!\27Š\ +@kxh_кˆS\7ª•¨\26nE\0007su’.s\21î\26à­¸Õ“\1VíÎ-àº,\23Ú\3žÐ\12 \127çäØ1á)Ü„0t=€åÛk\\d(,†&/\11Ûý¸–;\13-×7õ,…{«PÌ3´Ù\26Û\12]L\13E\30Kè/\2mßQÐ\22q\5Ñò\5ÌÞÑø#|[-ÉöT®EÅîBF5×åáÜ\18ç[õÐ\6gÍÐú\2¡±v=ON\15\9uÀ\16nKÉ»å¨Ô‹Ç/ºí¸1|–‘1X]Çc íG\12\26jå\7LM\19qðÿ_üåª8œ4)>4¥¡þ~ûƒß_—átû\26nÞ\\\7\12kìòu…:\12­ùêG4í\127\13V„O\12:3‰z:L{8\30aÕøÖH\8\0184 ¬9Rf»›³Ú~‘ǑЬ¢Bµñ\14hg”û.o|tB,â4\8«\30:¡¨Cç•ahLJ\127¼b‡®„Z¯Tßê\20ªX\14]õÝ}\21ò¸c:l„ÙûVþÝ­$\12ËÖJ\14-\127@³\127¥ÞínpÊ7†íÇß}üúSûOöר¶\31=Ä}Ûu×ê\12Õ/6qù4Ùm\7@…sW§)º¦\15–p\24Šî<Ì„Ýõµ\4ÄGêÝ\19O€ƒEÁ\21—ʵ\0€z\ +h#|\1ë¼Uäœ6|\27#\5Xü\5G¥òzàŒ*²»Ž´\20©¼\1yXZƒúÁ\1\5ûƒe6i\24\ +õ‡ÎøπĢ{sò¾f½úváX±\127\20(¬F\1270\17ï×\7fçËH£}\127sû'\15…Uë\3Ô\1önáe†BÚã{áCêóIxÊ\16êòÉ%»ìŒ¦]¼›\7qÚý\1«¹›\0264«\15¨Ã'«èí)\6…Ëšjôt÷Æв?âIÔíÉ\14WÁ\";„Z\19b\22'[\29Ÿíú¬\28êq\2\23ôŒ\18P{<À›m@z°±&¼Pé\29º¥Ht8´:<\28³1\12â\7æ±îrµç𜜌#¦›?Xþ\21´fÎìÅHW¸“fô!“óþ4xÛE©È`\5öjÉBOr\13è¯G釮ñ\6\27\3R-.“õÚ]———狦»£VÏ€ÕÕ4Mt0J8Ó¬+TÑ\0012¨\27·Æ¤²&ú!4Ñ\3F\127\9©ÄÞ\23…­Üxíe—íÖ2Uñm\"µµl:\25Z6)0=\3ä\0[\0:iº«VÞµ\11åR\3l¤¸³Ð12\\Ú\0Ï_\31øtÆð|ÚÿêÜ‚—ka9\25\30Xö›[½;¸r9\31®c\ +FÒððçÌ\17Ôl`\29Ù³/{¦©q„0o\2z‹Éî%Ý!\\%xyØ=2-†uSAÓÞ\0J\11ù\15h£Çu\21KDC­£emã;¯A؈qÛ滞\\X줰³ŒäœmIv0ÓHºùÏ\26ò\7FåÛ€\21ÌÈ>8/á{…O(ìâ\127ˆ?“³Ü\1ª«\18¥\30W÷*}9”šAÃð0¦q\30”e3°}be*ëª5\20°Ó\15Õ Ãm\24\ +¸7†amkÛ¡â­~\24|¡&²#²ñz\29¸yëø\24\6ô÷)Ò\24ÏPjZà¡1Ípa‘¨2;Ös†“Ž¼;¦Û†»nƒ o÷’î\127ÿsQ'³)£8c€M\127°z~þ¯œðÖÁ?$uMºüæŸòM¼‚*\29cœT%}\13fÀsÖM#š|^a†ZÉQþ$7BÔ\1©\30|\20$\20\8\14éŽ\3\"´©(ŸÑyŽ®\17K\4{Õþvä[3\22\13èÃWíAÆqÏd7-®·ª6\6J\18‰ Á9˜\26ÎðF(-¶§\9Ë\0016j¦{\21±<ñ@¬\27ÒâÞKü}q$*_P­Äk²öà¤u)h\22–7Ô\12ï4\8\23\12–-PŽ ±\6³ãGrº,÷”H?[Jò–‚\12Â`\21-$a¬WÒ mω¡&ã¯Öˆß`3\27Œ¬Ññ¶g\1x“\0æ.\0005Fm¨Ày»’çf#ôC\"[î[J†þ\11€+rèª=<„¨>Z®X§\7³£z`(쬰\127\1271½9N\ +¨Õò\24‡x ôHîÜÌ\23Cu嫈˜\25Ó.5\127Ø\31ᆒ\3Ùk€¤]¼€—ãêËÊ\3y\3\21º\11;©\31»VÃãï?WÌT\13\5\24Ö8½¯_Ãtæ,òEmòã|9ðÈ9„r»º/%HL\8\19\3í\26&¿K(nm\14µð\12!\\«\19}\25rë¬{5z³rvÎ\\.P¦¡³\24°´O]6Õ$ï\2ÝV\31 7\127\8EùÃ\20´úœ­æGm¾Jbý\ +Í\27Y÷ÖŒ1X3†2îžÜ\29|Ǽ\6j\7è~`\14˜Éíúšœ”¦XY\31ÿ;\31\27$Ê¿\\/ˆŒÈÖ¢½\1c\22M~ú«ÍeFÃYÌS*•û!x\6^œs\26Nª—Ò<ñ\\ŸîZ6?tS”j[\3\26©°‡‚š\7µðΉÊ(}¦£½\26&¯‰z\2¯?\17EÆËšÚ&`ÝÎñÇÄI\18ÐÀ\5þQ:O˜éÀœº°u*·\27\23j\15\20\7~5Uˆ\22ð?FÙ¯\8Jñ¾Ñ£oÆãm^ž\1Í<¤´©\1Í \9E¤ª¶ŸG+¼o–ý‰sâ\9¹bˆÍe¨ž¬.KÙ\11`€`\0\31\29žÁ\0307|Ô&\15'UÇÒâ\3bâœ7³™0†óÌ鄪\1‘ë:À*\28o&³327èZ#mmÉ\27\3vñ¹CØSà\18ciQë!Är\6k\2%)µ<\27uÕV÷m—M¸žÁÖ‚›Ð S$4?¢<)Bs€@ö~ Œ¶$«…ÊWK­Õ87KMí+¢O™:ÃÞ<\8¿}Úã´±EÍæ¢\27fó(\15¶\0190Ì»¯¤³e2ÈÛgú¢Õ¶0ò\11'\13ÿW6MÍ\21ýÀ~­qsXª\27Çü‚-=¢o7ÇÐ\21\11š¡\9Ó\16ûÁI‰=\3乬š\30\11˜Ì¢6.a0rX°ò<,SÝ;1½½L0p\19v°+,¥mv\0134IÍÆ\18Ë\0\22\14^«:\ +5U’úW1^™\11:\27\7”\"PÒ|\5Kâà(†zYË––\4r¾4È\25\25\6æM9>ém\21Ažî¢EA”¦i\9kñð‘Â\29ªe=ð]\\ÛÊSca³¼¨ž˜R\27\22Îò0V>°f\22_m\1‰Ž-^µz}e\29­œò\8t Sî÷—œêw§‹—Œ¯×~n°Õ@Þ\18Pïgƒ‘üAê\16¸é`\20¼ºo\0137 ¤N\1øæÈÚnP…„Îxî)ÏÝî\1·ÁÀ\3Gr0|rª«¹5†°N0ð¾1;Pµ\18òÓ³TVT/\14X(¨fÃÚ†ÌÀ‘°1€#ÉAs\7.êÍa¯DÚ\23ƽɉ­\30âVv%ºéíš\15r|°m®\7è9r‹„¥›]N<[\31à%\30Ü`E{éòŒ\14\"\17l߆Âf»j¾4ÔbyVOû¨þâ•\24F\2\4ꣂDª‚wpš\2S\1úÏÍð…*!®<ëÎ\11¾€íf©&˜\14\24VÄÏ\7k9¥\21ïME„\5÷_\14€f[\26\30\17a\4\29ù$yc\27Ô”@÷\23®gæj†@@3\27˜{÷2äÓ×[Àº¼\5\12ª\16T\13€ZQ\9\12¿>°#íSžÍë-”\21ÝÆJzIVèj\13«\25iY\0196 @>ìK\31\3\2C>ÿ&\0234‹*\1˜Æ\25\29?¸þ\18^'}ƒ5Ã!wWã\2?Ö\4âÏæÄê‘\\w\22¿Vð\28„Í0#0\20\30\127R­\14ã´.,ˆ¤¤Î ›XPx\\\7\12\9(*ä†\\ú¢œ»35è1²©œ\18\25%´s¨îÌñœ\28_>˜\12…í¤*ÅÃ-y,þÏ\29.¤\9‘­åB€o\ +ß$\8[\2“\7…E\1À\18‰å\30j?UnF\14\ +©£¾)Ç\7 ¶tÑ\16òCD› 'ü‹ƒUYS½x¸\25~žk†\28Ü Ê›ƒwÀt\0c2]\17ÖS\30Ó\27o…¦GõGý„“VYÎ^~yQÆÙt\11†\28Ó|\31èyò¢ia=`°õ\0eôâØRXšÂÇŒB]ʲz˜^;ýéí(kH<1|ɽbO€½Ùò.\20[\"\21àÖ#\0200 ç’™Ìü­ãØ\1[äSZhÅÁOzàcs`@,å¹E‚ÕõæOÿógüù3ÿüÉ?\127èO\24*\17kpPò´BV\5ˆ¨Êð±`@±‰á‡~ýË…-Søbi\9¶\31\127Ô}qea½{9\28yýÁ—€H‡Á—O@ú\4€oòØÖ¹;>”\6Wl]\\•Í‡‹\31ä>uÙ#Ý_ðØÔºP¶\19\127è\25»³\25ì•;ýðó†‰\21Lă\9—áhV\27Þ,A[F\0x½Qžœšw…\\céBÞ?%\27\27Ë\13\ +ôÇÄžagh\6Žác.a¸z4·\16Ý\31~FÍÄýíÓÄr«èë-ˆD4\14Û\14ú×p‹ß8¨>’VÃvt£î-&p\12Ûõ½éèäʨ›4\6¢†ý\12™a·æa/áM\31Œ—ò\2©\16¤ å\26i\25>6¸\0209©7ö)\26Àcc\11àÐØABu”ìê\0278øå®g\\XG‡`Í\31>¬ùƒÐ©\29¯Î…Ž$lLfÈ?\15ûöý…z\28VÀ¾y×ß7åÓìÛäˆÙ‰47÷\16Ú0ôSÖ¾m&\18¨+ŽP¡5Ä{äpt·\9Ä÷@ArĦÁ \19ãú¾(öí÷Äf`Xß~\18z¯J\28/C\24\25P¢-€Q\18+\5C}Ï?ª\0F\1@‡¬Ce0SºAÇàÎ÷\19Z‚õ«9\23\16‰eˆ4?:#2•¥cK\5pµ)\16‰\29>üëjŽEcÉý\15«ö³@dSÀªJCëðöu×%òÞ–8ßíõ«¼\11Y»·gîí÷¶Oü\31ä  ÷νò—=ÊàÞ¦“œ&\9\29¾·˜ÿ»G\7âq|c\21D\14­² /‹÷˜À÷vº!\6uylp\16j\19·†š\25w¸¸‹1àòñn\20 ;\17¾£\2T\7«ñ‡TX\18¼·æ¸·ÏÚw†»\25Áå-íA“VÃ7X\15¼·3JÒ(I?•Ë÷Õ¼nª.\11%à<\23\0ÅùS¶û»-šÏ\1Ë.\127þ,Ôo†\22BúLKÐ>{\15fð{ëýÉ>²(Pt\15\14ñ{‹mSR,ôËÚ8\21a& ™\13•T›ü];}å1Ëd’ëWcXñ]±¹g<\15ïažà^¶tÞ^ÅXíµ;dG¥Ð\0236-=b‡Ç¨Ò†VÙ\27c„\28Ê=Þ\2•\18\0226uóÑpìit\15×èXœ‹ëR\31{\15–gÍ&\19;ï§ï¯ûçE°¼ÿV\0ywÌ®AXóÞµ`Ø÷î[kàÞùÒsÇ€\23b/J\8³ô½ë¯\27_ôðå\11By¸w–\1R\18ø+\5TŸ.µ\5.¶¼\ +Lu’6¹”äñèÒVâ;ó\127´ò©;ö£ÍTrG÷(ùzÊv1U­‰3•D¢OsçO±àžóÙžÌ7(cl€VÀã`²¶\6Iƒ­‹ÐÉ£\25\19v\17ïÖÚ'ð„¡N È\23¼³\25\14\8á†\ +8¹À\25Q­»–\26,jw+M!\15Ðw…V\15\\Þ\"œhJ\\\25K\"h\31L¿¤]œV¸: ¡†«íË\4Ì?\1276Ã<º;Ö\11¸–+¤°Ã…i\23§3yˆ ß\28ãá&çæEî\8)O\31H\13ò\21e?wž\7à‚¸‡X«`mŽ˜{·ÑƘÀ\8˜‘dCB¿Œ?\21yùvïl¹\6`õ¹÷.¦mwX2û|ïõ»\127Ý«»†,U­\26žÊo::aXàžÎ4/•½–zêû3.S-\25\23¼§±QKS06ð­ÕÞ\9²\15d Ã,÷:’®|©‘饛ê=¹“\19ÆcîÉ·Æ{¨°¾CáÔUç\14aôž¶ÆUò:„>©–¦‹›\20̱÷„hÕ](´\ +ztvžTê\1s¡Ö\6:¦\27@3ñBqW\13l\17KkNÝ\1OœþªXä¹î3\4¯;„{©r—\ +BÄ}ùÖÄZ¸\2݃ܠ\00887îË­cQá[#ô-\23Æù\15`<ü§óŸª¯\25VBÔŽEø\13®í\18\0086 lú“Tëå̱°X>ÀÒb©-(‹\21€¸†gyâ¡ŸÍ‚ªÿð¦ËóŒÔX\0187¸\5Èd8s&,n£MmÞÃh#¾ä–¸&fhº/ë¹Ó(YÛ×ùŽ\30Ö;šUï\11÷)%õ¨\1Ü\21ÙÕ\23\2¨\0154ý—ÃL\8úâàåæ¾üëë~Ò)|WbÔjÝ¡vÜ×äU½j>\20_õþ¯PûWÞU˜µžÜAµÕFD[ïàg÷\29š\16®:{7ö&ÃV¨‚ù\6£+¿ð\12Né)»_\0304*ŒQÝ÷\17•\29Š›Âu\ +kǽï0ú†Oí\16n†ëStÇ@:Qqèíºq«f{f[Üë(¡î±µ« \11îÊŸ\12ϧ«–£\20Ý\0255$»Ùå\0lJøF\0187O\\¤\23ÈxsÆ›³Ù®È!ëÕßQÀö6€;›\12Ø v(Y÷°We ê\30Êë°(¶\0ÇÆ¡=\9\0233\4÷#U\19ûHy”‰Ð„Ååû±œqÖ*\\¡~÷Z¥½9@Þ¨­»¿o%\127•é›gô_À«\23ÿ{€\9A0›÷tÃÀõý\29½ü^ÍdvGóÊݶ¨îAšUfùô ‚Qþû\31Ý\"ä®Úç¯þS0Ò\127ÿÑìÛW¸ c\127ÿ\25àè\7b\19‘?£\19ú¥“\15Xà€ðšKÍékh\127 \26VàòrÞöéÖÖ\6¬\"ïÓÑ\6è\2¸äwÕÖ\1Gƒf\14ð)êM\2:3ÐÜk\29ýwÝ?ÉéÏ…q\17B˶|7κ\26báÏÿ*£TV\24î\20±`•Ã½\0Tå\15ˆô=˜çÈüø|Ã|HíU×ìtÂþc2\31]:û¥DA·‡‚°\"IÌp–{Ö\17©ÿÚräª&Élr风\\‰Ç°‰ÿ \6r…\9z‚@b¯víÔ–Á‰¢²¦žàbp IËV·!‚–—@\13™\31\3áÙyfrÁ6V”°S'ð`\21*Ü_ÙÍ\20Z¢Ã3\0093Có@\2ƒUUu\22©\23••\7=ÁÿLÕ\27(å\ +Ggò$Çy9ÙCü\12²’Âr\22a›¿¡Èö¬ãZ•y½ˆXÉSÓx\1`·0¼;•Ò`¨\4®ÞªÌZcû\5?5¾\ +d*·òè—jT\9+·L=ÊL\15dæxB7_Äl¤Îî€\19•á™.µw徑WM¶¨¥T»Û¤þÃ\30Vú ”é»b”¿‹oÕ­ø†h‘¾Ap\4Â’kúnx\9KçoÈâéÛ[ .))¾æ­C®&µ\"V²£bß\23œwÛàZ§Î%ýÖ˜0çÓo‹Í%$eÓïñ„Ó@/Sˆr¬~¿¿ÎØÕL&ì§â¦K]º¹ú7+¸M7¹•*}Ã\22\21‰ÑÞ‰I\22Å™+.)BÓéf-~_È!Œ*ëzƒ«J™ ´Ž¯fÊà1šÿ¼Ô‡!ÏÔÿüÙˆYŠÃ\25BA!Š\11\9y²GØö\11ó熺5^óü\15ªN\18E¬Ù90˜·\0266ƒt³0k²µ«¯“jÔô\11\11\13Ëßê¤\27\26~OÉV¬ÒÍ»;ÖíaÈI6uK\11´ª“Mí%ì·’mÖq˜næˆ$,9¬Âó!Ù%“$\18f:7ºð¼è´Ó>\3ÞôãÛ½þ^q™ßõ\13Ê/5ãV’ԡزSrTé„ÚÄ\20ýÜ\13KI¤jÕé+\"Im\29O‰õOÄȤ7¯[êP@O>Ì.kø''NV\30\25yRTÂI\9¯h€HÝç].]º«NÜ$”îû\"€*;%T\3ºfv~\12iÇy<‘ì2\11ÉPhåò1‡Ç£\0S\13¢nê|Ý4ÐižBA¬“¨\127j¸±\18ϧìHÝJÆ&\15¥Îƒ‘BYã@J~\21àt6\31œ‚Ý)\5Ÿ\19Q“Ó-Ô”‹D²j9\3–¸æ\9s\13mk4\11ӨɬKúôÄ\20âÑ6\9íÿMt¦Ç1_Ÿ¬¨†në_=ájl¬ 4µ°¡£\25…<¯,3Ý\13è\16]\9Xò°²„Fæ›ÆVCÙë.¢)ÑŸ{eÐ#êšú\7ö?Sï] 7gsB\22HƒÝsÀ$Ìg¥ÞÚ=R\31ôÏÔ›}6Á¡¯ò-\13É48\15­\"‡\2Îe,\9¬êú¡ä¥=\13ºÖï€f‹$>\24\6\22þP¨ãá#µ{K\1xmKÁþY‘’ùc‚‹\2k\15ðà¥÷aÞIÁ £Ìl\25&\13qô˜'H\0¦-û\24‘Aˆ÷™Rîd|Jt&ì-\9\14\18E§e&ñ£Ã\28e\26Njø01å‡)\4\6•;Û‰\23ØÝ@õ\25–¾Æ­ú•\22r²y²4œ]*\6\24)ÏÕ\29\ +¿´¤xT\17´\14¶ÄÃ\3 Ÿ\31ö”œ\23\16ˆ9‡•æ¹‡—•2`º4…q-5-»c¹PÓëP¬§ÿ\25©\\\9T\5¹QÌ?\27\15K…ºA×çB.æfÔ¼òΡr߯,1£Å\ +dåó2•\26\24\\R™²pÛT'J\23övNœCã™5Š\0224\12T}S…3â ¸Š\31Ûùz³GÝ¢¶½\22§ã8\2´¤àò@\28÷n\20\3g‡\22\12î\25Z\13®ò\24/Äë–és«\26ávM#¬©öqì–#‚åöuNÊŽPŒ>ýB/8q^ð£oÝ\2%RŠ\20\1QA~4lcH‹]D‰óâ´9\ +Ù~\5È´œÏ\19\18ïi¬à4’¯â¸ÄÜP\26K÷\2â¸T\9¯Ú3ŽV©go\14°E(sW‰\27ž¤\8G¿š\28õÀOÎca\27F)l\25½ügò÷\\\"Óx\30\23ûxk&nÅ\7ß\18ˆ\19„\29B^4~ã=\\®¶Å‘i\31V\18v§Õ'Lù«ÜÕ\30Ö˸ð\21·a\7\21½Ã\6»w\7Ž\7ÅÅçá®]\31¥Xã+Š•F\12VC£F y¡?*¿#)\2ª\1\29p\25J\31m­»\26q+e´¹È\31¥F\9í\26iÜ}uÐx7{M†'\127­\31%x3yèï\2-zIŽ\15,ȹi²ØIš\26Ïõ‰‡í4aÀ\12ùjF\\‡!=\12ÓwD±)ꫬ¶M\8.ÔÀ*\2Uꤻîh€6\7CÖ÷Ô€FL7ât˽…#\0090ÒÌDöº<Ëm\9š‘Œ2èëÓÄ\0254}\8u\9£9rOƒóNê\6Ìä°y\7\25N÷ÐäÂOl¿\31K9©™Ðs‘l('BAA§\0166t\31r­W¶\15\23ñpýä[ìª\22“V/1¨_L“\13‡)¬\\êìPRh8¾\25›\9üjÒHñn`¿V\19’c‘¨#çÉ‚«DbÞyÚP¤`È>­‘w©œ\31â8\2¹ò„™&äW\13®Þt&„GÝaꃀd\31\27Ê´°iMËä¡à8™ªåÉ=iZfšÏSªbNp(Ú\"\31-_¶bwúƒº.§\127É]á#OSûÊ·œI3¿1Àx\19É\13[è\20‡Ì„¢E\\ÏH\8\17š{l{4ŠªæÃ:\24ý'F\25ÑN\2xndÿš°\16G»Qb•Ì´¥<öéB&\28?\21FiÔÄ\1ÁövâéBîÑ]n\6´\12‹3\9c0|§ûm­ë\6ÌâhñK¶\8“Êg\11bpÆö`ªÑA2:•ól§KÄ©\7¯ØVæ¿ÚV=}î<[z\27{.*ëòä\14U>\25ðó³;\13\6#™\"ùžÖÝž(\127ƒ\31Ž\3\0.±6@rÂ7ù°\0žÉ\29\18œc©yjå>cª—Õ\19ñiÞ«ôDí‰\\cŸÏW9}a#‘¶½®\0257w\23\7‘e\9§ŒÁ\24€\20ÈH§§°\8n\11Ï\\p\20h–• ט\19Pkž5¼[écNE½ËÅñ\9#Ã\25µ3ub\23+±\11ŠR‡3wv?ÃäFw(î¬æÌ\5\6ÊÒ|¦§¯6bÛôûL¿Ï•/Óa\30Å ûco+3b_\9\30+6Fe½!¶¦?Í\6…¢žY‘s\\zgëœ7PÔü»;\19ÅÊTeolj³nX€›gꬫ\21_ßl\19=™Î\0020jOÝPaOÒ³}ö¨ô\0304\127öÓ3ʲ廽kÊE¤\8ÀŽ\0226S\18¯`þ\0=ªo\15úœƒ\2àÛå\0288\7j0|Ëœã–9w¶µàø9¢7\3£[æüS\26›ë\13H&³5ˆ\2«¸(æ™\31ð’¸\30i?Å\12¯º£ç(\5ðE¿_i_ÿ&h\0082Ê\\\26%Ÿµc\127kºÎ¾ÐV\31Nj\0073\7mÈ…®,`~W¬uS\16\12Š\0 \24Ô\\\17‡H‰`ãëÒ<œíf˜¬Û%¼\7£6ìQ\23v\7}ïqjf\16ô9™Ò f¨ë¶ÇÑ€¡\23B\6ÿg.h•ù\127L·ä¹\4'¶b|§Â‡8ˆüh\ +Ä/\28AÞ\7,ciö´ÂzjšÃà$\01652ÀűÄ\5Ò ô ™sP¢ÂŽŒ1|?ˆÐYaX¦ä\15Ý^kVl\28–ó¬Ó;\127Á–>P…ùŽEàâ¬SéLR\20\17ý\5\25\31ü}žm©\24m)¿üÛè'0köpÌVÆ Ød\6”³EV{éê€ÚÍgì˜P\30mEC¢:×(ž®.Lˆed®7¨ä\5¬Ìg!\4˜A…ð\19\5_!c\17Õ\29a›¥\0OÐ@Öæ\"ê”­ZÉh²­×`e\21=~‚\15'~x¥\"|C\17£3¿\24=žáq¢\5y4q¶ua\18ƒaÏ:e\0310`§\21}ëŽçÊß\3\18æp\4/\19\12„|Éá*\16äÅ™\0291—\23ýåÁ\ +{\29÷šœéË\19&ûB<_‚…ˆâ7_:æ|µˆiA6^‘æŠiNa“\1äåËØlÎœ)rózª-ðCGîd~Î^\21èoUOë°Ñ0Á\15£ÿŸ]z¦p}ß\24«/[ƒ*î4û\5E±V\22b¯vRÒ\0273ž±’I2Ô É][.¼ptú«Êº|T0UÚk¨\15óÕçöÜÀò¥!Ù\17•>!/¡d‡7w™ìn\4¹;\27X<\18tTFÅ,\30%¤°\5-jéÌ\13b¹Ü”-olì¯'\12\7¤¥\2=Å\4Ž\2˜vˤäæÞÕßÆ[Í\2\7vZÂ8°áªš/5â}l‡‹¶¦Ò\17àî\11\ +¨•#\3x2Ú\20L¹ésΚ\1\23I\20À×Üå‡æ·dO£åäŒ?‚TªÁV:µ‡²zq´½Zów¥ò\5\2¡»H{Uûm\11'^jÏ\11\25¹žÿå‹\21Tz_´ï¥O99^\2(÷ÒŸš§@A¡\22\5J¯ók%ÀL ‚ù\13ƒ\3èöWƒjkeÊy‰0Zz¡¿6½ÖÝ|¸éų•\\Q\0°t2e1¾ü\24¥À\15ë_²¡œôªáŸP#ßÂ\24‹—É=`fd~]ãf³ZƒgBäP_¬mÍ!j\5l\9J9©ù\27ÇäZ³'è\18\3\22SxÖ­h\27j\13\25\20&¸¡7@)V–-Ï\28iM%i’ÙÁR˜\1Iaí#­^GÄ^q«H\18w굊kÿʾv°1oAº]Ñ\7‘>pÖÒx$`\ +à”Wãt\21\2‹É†.t\31Y’ÿ“¬ò¼Z±¤\ +¹­\13¶QÒjLz\11ÚÊ=oõ\26^·\4W.\25ì|¿—ê\18Ñ\19·ñ†\26¤\"[\9Hè󯙿l¸eö“ŒÀÌ5\28Eýªq\14›½9¼¼\16ÀP«=¶ÝoÚ”ò7÷è\\£Š,åß÷Št&ÍçßÜëчŽòSß\3\5HÕUFHÎùÜyU™÷6å\24Êlí߶I\29ÅÀf\4ù\30Ë9I7‹\14ä\"—V8˜ÌˆË\23^\5Ù—MxpÁ’ƒñÖ ²sݽ‹…Úõôs9£âÿ’2£’ï6¥Tðç  »—p¾ÿý\7V’”ƒC\\9¥É틉ÃÑ£zææÁŠA¹ß/Ú\5»­:$Â;ØóÐú…’ƒtÁž¶>4ú\5\22†Ï\22×Ic’v†DT:\15jÊ\1E\19Çφ‚º0fö•ÉáÚñ¨Å¸€1a û\21€ºù\5Ž/V·Èt\ +\21Çù óp\0075Q \27Þÿ„ÕDõ8·•l¾\0«™»•\30†š”m“=åçRypž»û8úG\27å)et—áÞt¶äåI%…\127P\17\27\20\19À®³@£]F½\0305…’“OÔ½|±geíŠ~ŒÊõ‹É‘•ÚµÿŒ*;Nöñ¯\20jþÃq¬™lþ3’@u!% /(X¶áu>ñÐVùÀwÍ\24ñpb\31;ð\0kmäP8”¬ßç\3Á\ +¤F·½“5˜`Ô¤Ú\"‘«m\29—¤ñt†¢\ +ö÷Ã\21œà!ž#5'¿áçëõãgÝXs=\11Y\127oHô¨àáf–ýL„Õ?×\18+3\9¶Ó\4\15naXOAÕÙ,öd OjóbˇùÁƒ?B?SPãl(0Y\3\18ª£¼¤„ÊŒþL›Æà$\\=¬îHi¼³†å\\ùÌ\12‡×'œÒ°ûn!ø&¸9úä^ßÎÊeã…†^Û\18væ9\16\15\28õ@T¿¤\15rtFÌ\23÷E\16¯FX\12h\0030\22À\20à\17€©K.¾Ãn(¼'3\15¥òÙñ\29¥]êµ5\15Û\ +â\15c*@\21?FjÓæ«\29Õ§©ð¤%ë.Ka~@AÄn\11¸dÍŸ\11ïC›ëiäF'0Ï1h:Ÿ\2PV…¬~‚ó\27T\23mðL+‹8%¬¢C\29Û|ÈÀ^ÆF¿!\5!P{Ùnì:\2?„S¥\29¹ERæ\23ÜÅŒ(\22\3ÂG\5>ô!Bç9à˱hx·$*­ÚlÛÙᨄ'À{é\22:¬\8ç¾\29ªÝ\1\15âN‹3\\z\26âªd=ïÖ­8\26\6r´¡*•$o÷,\13ÙÙ\0057í\29'O•\127è¿\127„¸Áx°ó†¹Ÿ¹rÕþTÏîϲ‡¹[1OÐÔÝ„œ\29E·©ÞŸ¾hîO\19\20øÃ\"D={Ú/ñy¥c›à\7¡#\25µßï\0206’‰®\28Éq\11q(\17\18@ƒ “Âõûø¨ëv8˜¬ÿ\20«#ØUÈŠ[؃¡Fw\16ÃÕ…¹þ4“çKç×|…7n\9Ï\0264{š*Nä\7ØLGÞl’ú>ž_Ô\17™f‡zd -:qcÃ\23\15Û}.Éj¦SçÂË^´\7÷\5Ü\1OïÒ>Ô†¨ó\23\127ó¢\28\20Ÿ=~”áboˆ\12'Â@t\30(Ý[\3ÑeB\7²²Ÿ˜*BÀ„ùÙG–S\3ÕGpª©4;\26BÒªF½×ªÕ屫Û÷WCNõ«ÂÀ‰>f\20v\11á\23\29¬W­dª!PG¥u{Rn\4붩Ž²lÝc\23ÒM6‡kYssR\12gšP“Ç[óâm1ÃÇûüRÇýýyf\0306÷ch&?Cžsô§ñyyÖÌ>=þþóË®“þS}—ö±Øä²b\4Øž\4R\4Ž\0178ß>p\8HU­¯SnÁ™ð÷ŸëøpÜ„'\14\4yÀð\\–n:˜\25ô4—÷å\12Ö@<øÃæ#\8\6fÿ\22\17Þ?ä‡À@‰>å•\127ŽüóvvvQŸšËµ¿r›ÿþ\0035Ú°ú©ñæÚ€ü»²''·Á3\18ÑúY0Cðj”écËö-Ìð7Ñ´?\21‘'(9ç±m¿*9¿pKhÜcÑjàǶІPß„§Ñ$×¾rú^\1ßoâ…¼)þ†0ûØÚüÐˆÅ—é— ÄêDÒÛ ßM{\23EélS)Z¢D¼¿4á5É•SO\14=V+œ¬_ä½beyD\25)Q›Ë2\19ݨí{¬„\22&Å\12g§ºS\3Dc\12ž¤}‰eÄÇ54¸\5˜\2dƒ\20qšæ:«t³xP½‘r\11!†#FÜž\1¨,¤æ-¼|>±\1U@\2'!bÆܬŠgl¢”óX·Óe¬HÒŒ:T\9\30ßöê$\31Ã.\0135ž¦’l±\28 öM’@\19\24‘aÄxÛèÚ˜º2†ä¢ªûüt÷óé¶=ËW‘\2®_àÎw3%]8p”r¾á›ë›‡¾\14{ë$òhb}@ÿËd\15ª¡Æи:š5|„ŠºÐŠÙÝZsò·%Fø¼gpSP’†¾ØXÉc\27T\15Aï•öwîGS2\"\"M\1×\20¹iµ4­õ´¡µAUa5Œ×\22Å(\ +1·Èulßj÷I§Ãyü­mâ·æÇïnÆ+<{¬n5\19€]F·%µ CèC\30&DçÍ­ûÞé—N_£“uìмÏΡmÇ`ÒH¡§õ—J\3nN#ìM>O¨\14å³\6\12ageÑðc&ÜJ{àˆ‚ú\17\0š8ZSë\24šZÇN=‰;ÚU\23pS“Û@&%èà‹^+\16¾óÑj\\iÌ@È 7Tj©ŠÃÔÞîÔí®\17êî\26\26}x\127:‘8Õ!'è\28JCgw°–isBªEi„ò(©ÓS×ñL\18\20µŽf^WѺo‘~róØƺb*5®0\7´¤f¯Ì\1é%D`ØWü‡b§¦Kä­K™É\15µÿòÅTðòVŽÁ\20ÏŽíó\27ÞÕî\21~:·¶:W{\15¾J—îbxõfBWǦa\0148?\\<ªìì§,áJèº\28­öաΫšìYzWk9~…WØûh\13n¸,g§ÍS€W\0·\22y]\18\29í\21xvàVx¦L»'Õ´;·ëΪE¦cì\\¡b_#UÔäp\7{–Xƒìh\13²ãG\30ÀýVM¥¶\13‚Ð\20N’'9q\13…·7\18Íl†Ý\\·T\19>¢ˆÛ6ºaŽI\\Yn`Ž\127\28ãÔsFæ}äZj_,—¹\27\3L\1ž.Â\26F+‰\5‡í]\18»[\13[\24#8»\127¹­Ž\22F\11/Pᔋk\8c1ƒ¯\11\4^xCH»ÙWYƒ“fðŒ\2?jõvɱ~\22\22ËB±ÅâA\127ÑHä7¯€–%ùJ\30„\23ƒ¶b’ÈœÎî0Pб‹û0$‰vw$ZaG_€qQ^aoò0…F¥Ñjbiàê¾_‘šu,G*/·Ÿ)싳¡‡aµUÅ\17ªý'—ü\3·\15üÔÁC¹B:\ +趬°VðœYòt]QZŽ¡{6ü[DG#áØrð<¤Ÿ?OŒ&Ø\24r\27\23\"\16°úüi#)Ï?\20øtñ‹ûw]&\12\26®HÁ|NÖ2=&ËÂWcz\ +YÄ·€\ +Á#+ß|…ce„Ût\29)x\6\23Uu\28ÊUxL\30\8e­Mþ$—Bç¡Šïfwnšaßqš\25Ÿ®\0040\19ýüs“°zK‰g®\18ú ª\11Þ\0ß›Mˆâ§úóÇ`\14Eï\14ÕŒJ\11c—TùF.Ó3‘\29·‡1Y\12X\1á{!H¯ªÒ¡1L•×\\Z)\13j\16ò(ãbîL\26\31\24S,¤\2Î\25ï”*Ý+75ÆÚaŽ\28yvd7Kú¨æ|K¶6\0140;´¾AÉà˜\14Ý\5ÆÇYã{ypÔps\27«Ç§¾\21Ôw‡7›ÃéÊŠ¦£\27˜Å–86Sô5ô‰\17±\3\18°õ—Ü\12Fo\12r\9¯!ˆ©Øåf×­=™K\13bŽÔÕs\"OÌW@^~zl\23g-¼B\9¹4–ãB\31)‚K\19>*Z\24\19\31«\23Aœ\0ce\20n,S¶\12W\4z˜ý°o@I«¶aû(i].Aé\30[i^i¤d´Ê\4ƒ?L`*3æš\25®ÚÝQÁ\11¦¯rébÊÌÝiD\31¾R¢N\01609\5\23$e1¹Öyr­P‹Orî\6chè\19\20žK\11²/Î:(P:íwL´ò)Í\3¸»30­Î\ +@\22kD\22\11ù´…\14\127ëº?¾'&sõöнc±Ð¦Ë\8µs\28A¶\12œZ脦•>©Ç\9,a\ +\28ij\7P§‰ÚMíÕÄ4M\14HM:¸L\7Ó´ÐfH(\27äÔ}¿5›ŠQkëvÃï7È€ìÐlbc\8\11éS_\2•”}óÌ}O—…©3N\23bV£•+Rz—›\7 Ê\4ctñ\21^—®XL:Ž\\\ +/㔸cMÉ\8\15\19î4¢¦ùìz!‹­–q!<)òå$êŸi™›ö\2\9hòYÂé©\25¾àâètÄц:Z}²¶mô'“!&ÈÇé;ßRÎ^\19r*Ï8]Ôxí\27QUÆ•Áz•q‘ž\5¢g´ÖzB7›ü¶N ÍTóù©ÝQcóüûgâB~š©¿¥Dƹ­,Â<êúô]°}ÌP\25ªÑÚ’ÇBä14!sÕóI¾lj U†õ\127œa£­\24ÉÔÚ·³yi\6\\ÌPV‚$\\P£%P\25ýǪ¨î`XìÀ£õ>¿Y@°Êë¿ZÆD‡¡ùý…<$c°pFàa“X\26\22ÐrþÖ½o)L^\22 (\8.è1\30C±P!O\7Yaáà8\11PÆRP\5Ú¿ÜÑîd7Û=ø|ð¥k\\†+\22Ç\5é¦\5ŵxòVÛ§%¿ÔƒñÕEÓbI¥ù.UGo\20ËÉ\4ÐE3o±ž\28}3þ€o\3úy©MX^¼-/Úÿ4C\11A:j\25MÎXF°ë\5²ò2}·\21àrkZ\2é¯Å\26&\12ÈÃ6FíÛ\2ì´xrÊ\ +\5âòTLÝÅÄe2ÆË\21z\9K[úßöö™º²<ù3E”ïE\2ù;B±\21:ò®G(§ÓòD\12N ‚8¸X!\30qU\0007\12\0193jêL\22³;\5[|rkðÖeFÇ\1BBúËÌZ$p´¶5Ãíó-$êåäyà\22ð΂\11»œÂ^Ñ\7\\ä\13^\6W7áEíÀ£¸e,/Ê…ïZ\9V×h5•{ü<\30Ž~5,=‡¬˜„Ô5ëoYc65:4®\20î›übÓkšo«Ç„\8Á\\×\17‚/ß½Ú\0\29X‹ÕYöCáÙ²Î\14©>!Û'\5]ÀËdv0†þ\0086â¶ðü]º\16ÏÏu\127:wv¥\5Tb‰»\18ï—·\0^#¹‹ù\1ÿÝ\24›ãrå\4]0Mʚ˞ÔÙ}\7žK†ž˜î*c–ú\2Ã'\31ëÎj¦EB*/\0026\17T\"ð½ûqC‰‘\ +ÝPR&oL-·\18Eª¤8“Á–+\7Bß\5 Q\15cJmžÆ `˾±1ìk\2\15ب\3\127m¸k.\7´èå@Kµ|h\12”û‡\13Tµ_µ¦m%ä„°4düµ\5…^‘Ä“;ënÖ@™Áü¨Ã1o1\26k°Šn킺R¢/Q~ôK\8`\6d´RîÓÈ#‚Òw7v¦•\29WxICrRḄÇÇ›@ÅË’Ÿ\23Ö\15cËh¬‘·†–À˳aÞ\23\31ÊQI¾XœæÅ«™@6ðóTa\"Ñ\8\13èÊ!¤? Ð¦\5\17¹q^¯\0092\29\20!mëÆJåp9\23´f¿ÑgêŠx>ÀèÊ\26æ°ÕJHE+wDµ\18¶\14ÊCú\0064qMš':N­ô]¾l\26×\25”W]’0K0®>ÎA±Ö…çZ€n´`]<𕀕бQ-àüàD\22˜\8\12J`\9yI¾×u6x•\14ã8[u\30¸­š{ó\21X¨¼u/\18EíÏ\2·b¬û¶UÉq(p\29­ý{\\ß(\25\20ðÕp}kK¡îoš©óØt³Ð\7Ž¿ð£\2×wXA\29œ\21p´\15~uàõi\0GÇ\8“¨Ð<Ö©îž ô\25¥M\2˜?t\20É\16R\0Ó\13q\4B¹e4™·,áÑ…ïà\22RPŽ¿É‹%/Wßf30Ê\19÷\27‹\21\0˜Z¹ó#¤ UÒ£\20`í©\3ú˜Iä\7+öˆ6\0Ç n¿ŽfÛG)å+|Ù>+×áÏ\5Aµ.R€‡å³\28?š¼FÄfJ¾L\\\0233TlóêcÈ\22¨¦\26dä\23#pv'x²59‘2¡O\9Ä\26V9\30gGd\16¾Ý-\8!ŒØB$!:$„CÔM{6è\22Ï`t¯\5\4ä»ìþz§n͸“â2î›\19ðÄŠ\\À\24L»$Áüª\25n°pìj\8V¼eˆïlKñÇ\20Çlmœc6j¬¤ÏÄ€û„TÜìÜ.FNô¬ÖÒâñ­–šË\30jÀqŸß\30NVÇ\25±Ñb\3Ü\22ho.^l%(ývO¾¸Üå\23ÇG\5nù£\5\\_q\18^òêOßn\3ÁŸÁ\11fö\3\11¿d\0ÀæZ\30\14®Mö@ó73>[/„23)³´\22\21E\22]¤ù\16ær<Ý…@Ÿ*±=ìRÉ­6y=sÀñ­\25òç\15\\\29Á\25kEیԶGMwW{{³¡XúOýÇý\31FjµÊw è¢ãÕ\21gÄ?\4F5\9._Fñ0\21\26Ý.É_ùÝ0Wÿ’cÎ~\0\ +\6\21†Ä˜áw%\11M4_–LmUëÿþ\1©\22`Êÿý³–\6Øz\25O”`J\5öu\16™É{üfa=ãå\12½Ò›C¹`9˜òµ—\27\ +ÓÈ\17<;5d®ÍúÕÆ`Wsäº~ ·æ\13öfÊ6}D\7÷¦,\16ܸ\7 ·•öP¨\31mÑñÁ˜ÖØWÆ\3o\5\28•¥7“­cÇÄþ\26\21ë¼µ\5L \14¤òei랺Šn\13&\ +ÇõeÕÐ$\18l-cz\"þ4¬hÛ]V§ ˆ÷—sÏQ\3pÍíCŠß>¶T\29\0’€afÔ÷PÉ-\18\30Îö3ߪMçœ\26•¢Ï¼w(L³YtIõƒ`‡·>‚ûà¼A\ +Ù\26h\29–\26P¦ˆUª.Ë`*\2jå¹X#L@ä³Çc¢Äu[¼9\11ćë͵@U‹rÓÙÊ\27¤þ°Ø6¯Ü­XM\22ÛNNãCqæE„Q\11Ì0°î1#MÕ+¿õo\13b`Že}lµé\11J3FÙˆ\25Àí:RÈÿ{¥P²eôW¯Hõr±¥±º-\7ƽ…]i\0073¯·\2²Â¶_ƒÀµí.~îBñĸ]ÞÔ»ysvl\31Žñ\17\29÷M€KL\18oÛ\9QV}j)˜q‡ê¿_Yý{ùÍw{ñÍ{Ã~#„Wõq·ºòq7;ª½`A{Ïß¾\0Ïf\3®vø®Ç½aùî‰\27ÜÞ@EWô$\\\12\23ü\27\13¥·\0új\15r\31>õç‡î¹ÇÅ*t™\18˜œ0×í\0194€}rüÓõ~vº¬¸¢\24²—ÿ\12\5v7o\13\27ÑŽ~„Ú4t8íFÔÔ{ÿØg\23¶îÅ¿—;à…°ô\8Ÿ\29aÞ\16÷Ë*L°š±ÙWôÐò\25ûùŽ:e€\15[@Aí¬3MÀ\30Ý\21èuG““ÿðFç\28œ¨<\ +H»;¸äÎ\"\12)Jý¯ZÎþÝÓ¸ŽJiÚy`PÉHÒ\26\\i§\\íuˆ9fý+U°OüÝXÆ^£0]c»Á®´ïšW{P˜\14]ý\14ì\19h†\29-ýQ¡\27«Ðrm\23•C6B@ÅÇ£+n|„’\30¹—¡a×;L2Ò–ž úëôeYÈå“Óhž8y±\16Žx;Zë*ùW'ZgÓˆÌ=\"G;„Ðãô¢#ÁÍ¢\3‚²ÐLhè¥w$\24µ\ +A\6ó@øRß\29È/)¯\4W‰Én‡ÎðZ\127Á£\14ÎîóaB\9\"\24Še¥5ÿ‚¤û/­Ü7óü]›lñÖ<\127÷l´ç\17üôí‰{\30‹7\27Ú[ˆ÷ˆ\4Ôø>¿Nľ*†\21ÂÞÛ\27mÇŸX’Ö\27«\28é¿0/À\127\0116ï\2Y\31ù™KúÞ\8Û;Ãa/`v\28•OXPÜß\27ºÛÇÓßÿl<À…)çÜ®ötxJ$û\8D«ÈhQ¸Ñl”|öÏŒçÿÕ^')Ç–¶\8^åþ¡\23!>6ïêßÍþ ¿\8)Ó\127È\17B±Iø\7ùöñcº`´òW@¬'aÜ°ˆè:\6Q\14ݯcè~\0218oºùT:¨ëíëÄÁ{žÚökjOº%MXOŸÚú{\"ê[wá©\16˜\29óÆÕ\30?Õ­î/\19¼O\7\13òܼÝ+«ÛÓîJ\24R\31ʧ#\4– ¹0œ\8¼\8ꕦè'\2\6ò\24teAî`&÷ó°\\\8C—\5¾ÓjŸ.\8@n¥ÊáÞâB–šlNa\ +\22Ge($x:sÓÕ·\26èËÔ\8åР\26*\14\6G—,\12\4‡Ã¹v.¼™Â¶3Ð;;a\26Á=£SÉÄÞ\28€ÏŠçò5]Ú\11\20 \9äE_µóù7nºRÃŽ\15g6cõ\28¬#¼$D-ggƒæ\20\127Y¢A%üë3\2hõŒ\22?C6¢HPa\27Ž\28Ü\8­Å)X\17'˜\0165ÿÛȉ´¶G=µÖæ@ÒÃID+I_¿\\íæEêË‹ÿ¯Ë«Yéý×\11B² \23Hƒn\14XjûTÓ5]ku)L†Êný¦;l\25žÀŽo®ðŒL­IHjÿÊ\"A³\25êö\0005=P¯\13÷pt3)š592µ«\9\21ô0,‡\26Vw\14¬†.{u~d\19¯c6Ô莭3CÁâšÚ\12\21B“AË];\24|\13ú*?\29ÿtÏpœLm\24ù\ +øüÀ- ƒTƒXòåFÕ8r4ÿ6j»ù\25\15\22¯vg\24¸#ȃö\2û<8! ˆò6z»F\3„2ÛžÎÅ\9šÕM\8=Þ人\19\16¼H9þ¬.n«)fo¼\4vúxï<%v/×}u¬Ûnƒ\21¤²K•‘\3œ\12i”ÑcE˜>§\22\28™>b“Öêý\23\21}W\ +GiðômeÐ\2åˆkÑöé£Ix°ûôÝ¡²VÀ*(¦ò[—»©þÞUÄ·v4RAbžN\26œò7\4ÙIµo MN7Öe\0uúõÆ“étSGUË\7¾5\1¼¤¬d\127\ +£ö\0\13^\1›ÖÔXßþ$W}3Y]ˆªŠâÞôÖT8Ý>ö¦ùƒ¢¸Îÿj+\4ä³|wjôõö÷¿ŠFð^\0TyºÜ*4Ž¨èe\"=x¯*¾\28¬Å\27ê\9AÀ\24Ä›\5&•—ûãÖ¬èÔ#Ôõ¼ÜÌæ?ÝØŸUùõíÿÚ†¦\27ʺ°úB\3\0S\0z\8å\27xØõn~b›n\0319[ýAÌËá`v$»>)1—ì¦hþU›n5\23Êé†Þ.Üê\22€B¬è÷Ä!Œ‘ù©kt}›:T\21N݉™Óµ`\21:‡hY‡T®RjT:aµ*¢«¿\23³\0)ðï\127øêwIŠ›ƒnLéîVò~\1z\26\31rœÔø^äè¡\20ÒÃ;ÙÄ[vûòçºK^×\7\18ÒaÝ\13V9¾ÞãÛ\31EöÅÍÓM÷;Å#³5u=mêÛ°,=Yu1Š\15Ù\6:\31H\02949\0037¬W]N°˜òíÛ\17Ê›Nn]£\1&„\9u:Öœ¨ŽÄxÈ\28>–+^Ï…ø“?1K@ö\15àñ\5[ÃhøàŠb$\26\4ˆÓ®Ã\4«\19.®\"7r9\27\0209ìv£©w¾EE”Ás\0138\7Ø\3\28|]¤\27uHº²LuÇ\19ádË/,ñîóüã÷ÉÕñè²C\7d}sôà1J-ËG@Ä—÷*Ò±\17\8^fÏ$ÎéÎoñú_£}\0306×fsÞîh·stwð²Tãá#d\5ôé˜\28ÿ „gh<ê\19€S¡Å\31Ïb—\17,º\0173Éò±»wqOõŸÒ\6\17õÏ\29§›#‰bƌыèòB¬HÛÓÙR’ÊÍl¸S…º\1;¯©DyvNÙåA\26ÐXMžˆ0Oúp2Ï\28s®Næ\\µ«ó—à'FƒõÔ…0ƒ‚x¾5X‘é *Ošù¼D3Á>ºçõåùö4—½ªñdLž%‡? 2>ƒE:ÒlîÀ§§ÀÓÕy~âÕä©óË•g\24¶håÞ.Ômf7æ;³­L˜FÔ\4$\21êÇpøMf€5зB‰\7*‡ž6ª5\15WÆ\28ù;’$gŸÛÒI°\15éàì\29\4`9\"¼>ærkŸ\1Õ…#’ªÏ1hs\12Ú¸,ž\19ÞG^žLèzUǾ®ŸÚwçWñpÈ\20-F£’2yy®¬ Á†\20x^m‹r²¼ûté¼ì¬•“çø.Rv}€\1p]/Fwãäéüöm_LÅàdÌÉjp|]yØÿÅò‡’óÑ\22Óm‹é¶E\23oÑÅpá9n\30f›qðú+Ry£Ä¡3Drv2—ëd.×É\\®|ç™\13/Erý˜!\27Rµü¿¸~õft`«½\6·ísøl»›â]nã8¶ü/e«É—©ûtÎîe)üT‡é³-•ûæãá Cj4\\\24LÎôˆ]åˆ\22\31±®à)¬\1«gJ‰v‰\9®Y>=ªÙ¹1ÅŽ“Géð²8¼tþωÿ1Ã=ÁÿþWWl¹«æÃßÿnL‹¿ÿCøÿ‚\27è>Röì\17½y᧪\7y©zuU_ôücöV}ÍáÖW\15uOÓ?8NûbÞÁ\12{p®²7ò­¤õÀÅwøŒ°•in@\19¿\21®SI®m±Lgã\18:V§6@ç0å7t…“‡Ìzüa\12VN.€a1\127\3z„Â{./ƒÖ\17‰*ÕcÀ ‚\0,l\19VÐq\9(\22mvÃr¾\17¹Œv)\17ŽÔ\9»ÉT\127EO\20Þ|V\6®U“uÐ3‡0\7\11b\29¶B\5÷r\"íîì¹™œ\6ÆÂJ;'8N~Ùó\0074b°Aš\15„\1~:\9\17¿‡UOÁƒ§Û©º?4hji:)-6b\0g6ll·d‡áÂ]\"—\29\19˜ìó]?}¿¹M'k϶Yû›Á†xJ0Ò­Ø\8ƒ\0026\5\23s¨£\6\"¡ULFÐRÏ\0291a\14»\0'Ð'<\1\ +F\7/Ù\13W_T“°ô\28ÑæŠ\2sª,ŸãèE”÷\127@™\2N\31ï\11\24J\\ý‡NNg\12N‰ì*cMé:ºTäç\28ع\21PDíËœùJD&0\18Ò™ SWyIÂ\3”S‚.$#CÖ‰6¡,\\.:!¾\127þ¸Ÿž.\15-x\2õ³åÄ©ÓÓ½\28HMzR¿ç\11jHz6vÁ\9’Q\15øÑÚšVÎõo\\”ÎUS‰\"^GÞH\26öŧпëOY+©˜»šIPƒ0‚sObõæ\0038TÎ\23ÓÏó/úz\30|¤¦ùáLé€nÜÌc‚s™\29\ +©Fc;ÏàåðmCcAMª[\12Í…j\"@5Y\23©U“›V\0¾{žàѦn&ÜLÉÖ\30\4fd¥\2\18_¡Sé\12\31ž~Ø¡˜By#ã´FçZºfJ¦RØp¸@F™¿>3~‘œc®\0034›§‚\21‹ñE\14:\16\1‰\14É›\7¾F}˜½fö$Ë­Ø)\31}boÇ|èª\19J´º\0^8[*Ú€·\0C€)\0KÒª­ÂK3aŸQ/nžˆÐ=’uì\19Á\3è„~&ovþãz\127D#§\20š&øî­:1v\22µŸB!’ :Œ\0ÉËû€ÙýtM!\6ý\12ò°q6\0156Î\7Rô\19òñÎâ\31„\17&H\1\8¼NÕèÕƒ¬5äØi,iq=Vßüç\2ã\1´UŒ©\ +)N¸îiwH%‰u{$Î\20„ËØp¯™bÓhÆh´ía\24@)\25Í\0205Yìi÷§~SŸÂèÅ\20F/&6–‰\7*N/\24O‰Á$!ÕC²ZP;ï4.Øô\1@r<£Øp2ÿ‘@e†åÉì\27\20¨ÃGu\29­”|\26Ã@Ÿà\5ßu÷æ3‚8LP¿é'h]SË.1Õ­6ëé\0123ï4™¤\179\1»ýä×g\5VÈv£ïŸ\25b²*_4XRW(2ÍÓä£\6\11\13/\3\6\ +ª\27»¸R\30“C\15ByÅ™ ˜PP绋I_ÿ\31Sï’ä:ÎdëN%Ff\18_Òp¤-†‚A\2`\1$ó(ç³\127³êžSìÇÄîú\22´ËnCp\1Äûáp8\28îp4ÀŠÍâé‡\29\6\"Â\0210ÌNñU\19B?\3Ààm}FŠe\1P9šõf\22\27gÍÉ9\5r¨OÜU‘÷\17¨žÜˆ/X£G\14«:á°}â…¬\3îNÆ\17ɹ[ôoñéãNO‚7!•é&^†eWØ”±w†¿\28y«5ß\28ùÏú6mø\31W²\22‚e}\31LÑ!´C\11E\14Âö\19uF\3†']ˆv܅͸U‚«7]¤š)ÊB5K5\6±,•ßp]¬\20Y½\11nÕ7*‰…\30÷\4HIk‘Ìg2ïè±°[÷4\127\6w‰þ˜\17b‘ÓeI÷Ž2“Ëj¸7WX\7\14\\*gpIçp\2\4ã\24\27°ãÛº’îä-æ\24Ô«‰ÙŠ\16ë²äÛ@\31øaú²”[ÏΉßãX\24?àÌt,½·xÌFЄÎ\15õð¢¾\9£\18:äËëÍ\6[y£Ó\24C/Å:—\23¿÷X–\13\3n\26•½ìx\4hÓÀ‘fAå¬>\15<Ñ¥½<ÑU½w\26Ú-»;\30…‘Š\25}ýN’Õóï´{Z³WŸm»A}}P&Â)\2Õ²£Ò×ú`í›Þ3kä\29~x%Y5©#¼X|\31C\18ñ…åÔú¡'G,@xµÙŒ·\ +Uõ:$—Р·ai1ë€ë\27\5\5ª÷Pv¦ Æ\8¯µ\17\7\27ÛXÓ‡­op5\0178‘\27 “!Èi‰¼ÙìÂTiúP' \0029ÐV“\14\0$áü±±§å\16&{Hͤ\11ˆVÈ%°ôß„\"ŠªTŠ\28Þ\24.4ôaø³\29·ÛÈ¥\15–8X‚{A^º\2«þ\19>ü¦²-\127Ü+ðϣΟ\11·RKœZµxˆš1\9„È3:ÂQÁ?°bÅ#¾…5¯©\127g(Òýz\127±ÅÔ?ù¯÷'RaÿSî\3Ú ùÎ àÔk\9ú-\0±à÷õOùó\7ÖVâtŸ\14Lqj\30«—\16b]V[—\17€ZP‡ÁÛ¶~õ弚š3\27\16kÚ\16\5Õ§/Ö¢\30íå:PÞ­FÕbX¡/Ö³ü+V;\0¶b«0e´¦ž\6·+˱û/դ˚\"ùƵ¼ÀÃ\11*×k†%sÃa0ášÑ“«\30[ GÞªhU žþsòÓPÁŠ\8å/\21V„™\19Šá–?/ÅùÓ\29€ß•¢Ð\31¿\27Ñ\9\26Ô‹Â\19m'Oõt±\5\15m\8œ—5\1`%Èí+>)•äyë\6'2\20¸ö#ïäå—\26Õ¸\2\22¿],~» ÿ•,ß´Hi¼¿\27\30Ž\19ü\17FjUã­r=Úe\"g«\26!_«Êr\\);\27`¡O¬ñšæ\29´î\24=4‡g÷1^`\27Ðm0\31\9@¿ˆjq0Ûàôšè,îFkÂ~Uè«nÐĆ˜\\M8\4`\21TÛ‰îkÓ9\ +]îøam:RôgLX.UÀ•ÈÅÀ*\8ðúü®þaÿ@²•i½ÝÌëÜ®4móñ®µ\12ëòÖG¾l–VZ\16*…\1µ5£Gg«—“íVmc,ˆ¦ú:jÙ°&Ë—\9œgmØ‹­\18Ø3û\11hd«ÄÏæ\7¸K•Cå\27óV‡|öÛMG\30ÏÈíJ”„\18\\U“=ÞŠª•Ê»Ìv‘mætc¡\7ôR?+à £ÁdÇî«e\20LˆYÈ\4Ëç±Ôh\16§¼\13$;\6ÿpÛxŸÁgn\16\0J´U\17C\"¥Ú\1u{”Ÿ¥¶q½¼lÿÛ\25\22€Y¶\1,ª¡å@6Dª\9Ü.d»»\27¬DP w°\28·\29kÏñÏŸ\\¿”¾B´\14ª¼›l0־[Ôú£Öéoþk^m¯\11™\15Èc«(DLE¸_`©3›÷{\127÷N»÷w&úÞý’3psu‚¦\23-n~ä><Õ\127;äé®áE@rÙ\17ŒÙÁ€»¯`ñ0â»\31w“\25\22Áº¥÷SÐe‡oÜBþゥœý̵ä\30«S\22\\/òRô\1å¹WN\5Bá\6ìç8ù«…¢vÌú(Gèâ½'“•É°Ÿ×FëtÏg.ðw\15ýn\13÷\0¸qÝžñd®E\13\\H\22Ò =0fö\2\23\24\25O*U%yöò5Œd/싯MAa\27\3BrïØ4‘k½_˾Ý\26:ÀRFû\0¹£\26mp€÷íÉ÷/ÚÑmý\0239oØt\17èé\29‹÷ì~Ò¦Aâ\20¼Weç\21®\21\"HA–\0075gP±ÀŠë&]}:\6 Ñ\6/Š¯­¹_\7r«üŸ!_¬\13Ü^\8íª\19|9.ž\20\"Ê.PL‡éuùŠcZ1ø‚œiKܱ›¼Þz÷px;?°R­o>\23_+µ&\31Úˆ’+`ä•žÍh8zŸÿ±2©¯‡¹^¬“cº˜­~Té‚£ª\4YŽ+ûõý0˜eG3A­\30\28ÎåU‰ï\30ýà\ +KÂA‡ŽÚ=É •TeŽ\14Þ\20\ +t\9\"½?8W\31™\5¢ˆLâÃëù°hØ1ðÜg9YÖgø\20\7h{iŽŸß\26Õ\3ÉÊÖ\26±\8_'èΗ–Ú«}ÀÄöMÂk„Qûª“û¥ú\3.\19G­\0232\11¯?KèeRç¥=]ƒýBè@AN\19¸\8{\5U÷uŽœœ^uÉ\8 ›lyùÙ¨¢$bs9ój¸ú}×{],<öêëå´%Ô9];\16²Ž3üëý,I\127`!¼|¡*p…0yyûÓÊK\21Š¯ØUÅò\"óÍèàe®ï‹­‚¦XQÝÒ½@ÿÔcsoðlFUãØÞü£\17ø§»sÇö\31¸p?ÿÑ$üùU…ûe8¨èç?¹µ\\ßÏ\127Z\15¡\2Æf\2ÚæO\13‡ÙVß’×\24=Lseå•N7þüÛ˜Dùù·E\17'/Ê\17+ÂïLþ;Kþ!Ë©N ùëŒ|\2¼y'ºreñóï™Uóó¯\21ö\ +®Ü<¡`ì_\14\\¨|Ÿû\ +¹…ì,Òk¢Ê\21Àü,å[bÕ\"¾®€º»‚ââñÕ\8å­ó”Ú˜²\17VÇBk\15ß㸸Ċ\7x†W3ëÞæ1ý‹‘1[„L_ês#^—Ñi_È°û맽­‹xé4]##H\0ë Öþe«`‹¦ü\19\28\5„ÜÑ‚à\20¬\21Q%~~3D¿O³\3ëù—\127Žz\14ˆæ\9ú\12÷ó\27ä#\23;õ¤Ô\12Ó2ãI#>\23Vç¯%—í¯‚s?¿my‚rÀ0*§Ôh藰׃øÛÏŠ\0137—\2\9'+\127\26ƒ1f²«ýüûÝÏ¿ëE†Š¯ò˜?¿·ÍÁôöI'²s\16éÖ\4´Ã†›è0\2îúÛÝ3\1¿„íÃíá—Œá执Ї–NèokùÀæè§ÝùcÀþèH\14§Oòüœœõ§\6\26W½\26ê»»p³$™ÁRA!¦húpÃ^¢²pÁ­0ŒÜo×s&!G\23Š›oNÏÅV°e\0\7’\25š$üI¤»|g·ƒ»[yx{\ +ôîÔ†Æ;Šý\9`¥òª=rS€\3öˆp\27\0´I’ø$s.\5C½Š C4\9\18šHæ@¹\16š3\127´Åã\"'\16ÐâJ}DH„\22\5“ú\31…PB\21±V?¡À×(7²Š—_Ô\27\3žá\13£“ŒNþIX÷l=\26ñé>ˆO\ +ˆÓíB\27De“õR?…ŽdÑYq•ˆæ‰Rë.B\\ñ\19ñVw·\01430\17ivî\30=£kÜ\11 \15L©ªglº/ \12M¾\22\8ÒPæ–\127Œ\28\18[Ó\\…gQê•7I\7Ze<ùS•\28Ò5{vä¬àå¨b°\8\8kJûåoeq’Í£œ_$°]\5îf=|s(ÏWC\12ž¦\26Ì\21\20š€ä\\,'’lv»Ëê0ÎüÁBÑÎEÇž+Gý\27y[¹\27^§ñÞ¯ÞÒ6¢1ðìÙ(¯ÛõÂRŠ>)ÛæÞªæ÷4ò!µ\\\31\8vôh…£áTƒ™€wÑRü¯_®\26°ûî§)‚-a»\15¿‚Ú\16Ãé—Fõ—[÷P\17°zÃ\3³-'¨™0Þ\0208\"n®¿šÃ\ +¹ã(ÕøKû\6\22Æ\16\31\22¸«#Göï0Z\26CžŽõ3\14ÜX\25lŽ¯~\17:ù?j(\0ÙY¨{Æþ‘jXriì0¤í\2rµÈÝ;Ùêp²\23\23vh\22ÃÕ~¼\28\15„Ì£+…h8tÆA%m@%-¦Ÿ‹Ýƒ\16Õɺh\21åü¼‘ˆS\31Œª‘ÏUTÍŠqÒAF.¯¼‚õΪ-ðŽ\0ŒÍˆÉZÒ#ô©4Üê+ƒoþ{)\"\0‹©iÍ‹ñ<ŸèXôÑòÁ1;l¼à£Q3ZÏ‚ë…s­5È\26E©ˆÚwx…|\12é(D;Âè‹\6¢\"}\5˜*˜•W‡Î°àHU•§>ÔA@¬ÁVЖ\13‡\0–Ô‡ÆÁup¼\13Œ°ç\0:A\127\3!IIã=f\\†*!K\27¬þU®Î\9´\17E/Á²´ä…\12^¨º_;Â=\11Bb\"aæ³\127¼ÿ B\5»Ü\30ŸØÑñÍ\30\11=J'äo㋶¡wÁ ;ÐÕŒ<\25'aY\23C:•×£s²É-\7g'}yV7‰ÂQW\18ÐÁ\26¼îÆÞz¹å\1\3\"»Å\6óÇ\0I@ÔÔ=žkÿ[®4XךÁ\\Á⌬\0296¼õ¦†ª7\21Ð+£ÖúÐ\3Baw\2­ã‚:°ÑØ\ +\9Ó/_íë’çCvr\12^“é^?ÁÍ óÝíÈtÑ97HÑ\5ˉRw\20@(=jžU\"\23/\0mHò\22OÙ“35çÔ>\26À[\5bjëòì-î$·¡”‘÷GZ“V+\26,¼ÉGo\21£åŸzßÅÑÇ…3zðA3\17ü–Õ¬\127~×?^„ÆÚh\14Å­QÂÄ]`6\18påH\30\16¿ô¢Ø\\Å­\14ÃV‡a«Ã°ÕE…Ð¥íº{Ío^­0\29ƒ-¨Ü\12Á™cµÎ'8p®©þR!»/Ñ\\KUú?ΪúŒc6×y{×\25\ +<Œ{«\14jǽñÈ°F|­ªÿut\28K§n¡g¡¨Ï\27G:\1³G\4\15að3&¿CÿÙ¸ÛÛOQÁ¸P·çOhî\19\29\"=\127öˆ\29c¥¬ŠjEã?\21nq7¹¤|\11ð…?\2|*Xhïk¼ii·_;$µ\22\19r{]˜Ð\"\23¦›Ž\8W\1!¢A€{F\1µeêµML\"°u.»„ëôpC¦\7\25<ºo\\dÁ\4°t\20&ß:„*©\23¦ñÊ\19×0uŸ\20õD\11Ž|Ìlj\17œH?÷_ü5J„?åX¸ºÂhˆ\30\127Aa¡É¬Ú0-^]Ó2pYD(\29?\13‹³å:I®õþáõ8NB\13Uᨪ\2e; `4T¹·`Ûöö1³1j_q•M|¿!4™ír\7¬=ãbî'`fš°&ª~-bfNÿ¢>è\16\13(u\11X#\0126\2KT\12Ê¡çj®`¡ó1Ž\0260Šz5<\13 ¼\9Á΀|Yrº–½c*(5ãê:ø\27\0124Ãê`E\19<€èÙèpH0ÊA[®ÀË\30ô\20…‰98ÕÕ5yÍ ET‘\16”’+⌾AV8°›L\29ò ¡Gù\24®\26Ó}›0©jCÏá{ä?ˆhU¸|OBöñÏŸ¬Iô­–|ó\28\\ó\16“\0018~\ +\18šY”J‹\20R@IÜÝ*„ì™Ì+Ðh²òþ9,·ž=ÄR:ÁOf¨\4ïHì.v!ÕxD@\12/\22¼ô§5Õ^\14û7\127öQ3,½ª¼ C\28–ú`9X\ +âýçÕ¨’ZëÊ Ü\20“ƒÞ\2X:\\\11\31\4ŽOøâ#94ö¼O\11\29¢™“\1“\19˜\0Û¤c!´dxÓœ\1ZRîd·öˆE!T&æµð1ÑØ\0196@ë\29ÂØ\28/ªäT\24L¿`¼ZAMhZZ\17²Êè¼w\9V7i\29®idã\22¨uNO>1*ç•sâÚ Ð\22ÖÓzÁc‚ví|Ú’Ï1êÝš¢\20ÂØvŠ\16»âq\22Ö·ÈX¸®\28¬×\14•\26\2/âcr^\31x#\16šÿbYg¡˜·,Dx«Ø\15y·‘¬ )pe\9^3,Å>(‡Î\17\11J7C©ô5ÊÃ\2JÁÔ1×Ò3ö\22¯\11È+À¼*È}-\14͇£:£\30ýr\1\6:ûÍÈßËvý†³€Ùp¹íää\0223`Ë-J 0³1†‚òp|Ý;êÜF\3N\26h´\25ýõ²\19èÑñk^ò¹\"Gèî¢X×ÜÞ«9g¢™‰T:Öoix'\22P¯\21ФE\5\27/<ßÓËÃŒyß´\7î×\21x)µ\15\1”sf\22½MO+\5’ÜÊqs–VšCl\30ýRßU*\11áx\14±ÿŒþÂ!&øîýóíÕ\15N/¦w•=ÚaBÙ¹\127\0170ë6p\19\0310\28\2Êa…Ñç¢BxÞ¯$/Jú£Ÿª¢3úÐwï¡^½‡“b6ä¹µjÅv\0315yÎȯ\7›’\14¢âÁJ=KŸ\29\25r\30\31ÔJئ«Rj?€/³!O®O“±wÏ\13xðÕ7ÿ½5n\24\31\12¸LQ°F@<0læ\13mÖ¸\0216Ψ'ö+\12\6‡\19\22ƒÃ\14O`o(\27Þ\ +.\\ò°«†;\"\\aoaöb!Otðn\6á\9\25Ì€\1¼Põû„ݘ_`9ÛíiÛ¾ôˆÝ·ËÚ!좠Á­º'ìh¬+ôA\28&å{=ꬿóHSþfÆ>\4ìÑ\6FU\127ï\1»/lTW\17`rDŽ÷\2`\30¸*»e•\12 Ô\5Ùïw\4f\4\ +\ +È;ê_6¾Zó\26ymŸ֩\19vß;)¤ø2\\\127\6¨C¬*vÜào„zÝ\25¬=G`´=ÐpØTdÀ˜‚+ø·º]\19\3m3áõDH&¼\26ºð5W\30ùk†À{]f5îufν¬†Á€0dëÂËím\3·k¡“[Ã@²¯ÒÈéÙG«P+ áza½6[\25ï\2kóç?ÞºOÜë‡âb\7ï‚ \19¡2\4\25\4癩ÒÔK«àK+â°_höûÌðóoÄ¢¢àÅzŽ-\14Š¬:T\17¤Š&åìL³/»‚M\22\7æVÁº|\2wRÁwRÎ{>'Û®›Ë~áy!³\17Þ×E§PϘ`hŒï©ìŸßˆ•\8 ¬\29¸ µ@áø\7²³ýü^œÐ\12¨·•>Ë!7ô‘{@à\31RXQâè™øóûZ‰8\19èõÓ;Ê°.®“³Ì·ÞÐã\7H\21ì\6Ì\21\21‡ð\0175ß>]›Í)\26\14Û'ut¼õHÃ!/›>†ˆ%À¬(\ +¾k÷Ö)îþ’ÇlËx»@ÿE\4\0?bsCm<Ü{)Þãø¸DDãé&úOÿD<\0094Ÿ¸Ÿ\"\31b½ÿQ\16\7±xC‘€\\ι\2\5ô¡âŸ( ˆ·/bÀûä‚$öHµÅ›iz^óD\7K‘4ñ†êG\0219\23> \24É\0é@B7Gß\\,x0b\127ýIëÐÑBA‚\4Ûî‚Ú¹`î\0298R ¯\3Rõ\22\3Û_·ÿðgÒ,Æ)\4N‹½µ\4ËwÓA–'²¶&|d ­Îm[èÜ3÷“´\23ýÎ|ã‚\6\6W\27ž— ü!ƒtõ7\25‹ì\26¹KRrF³^%©Ã\"i⧵øÆÎ7Còw´ªÊC¶ÑrX˜èåŠ(úVˆfÄNMvšâPtÒ\25Š65ìD>Ç\7o“øòlñ<µgð…µÛðd\11Ïò\9ènËB”E\27\15¡\25$ð°¨ ÊÁ45È\12\"/>º\7؈ë•øà\1R´\30\29\\\21Ù=P\127\18\31(Ï\22U\29ñî\"lŸÍK\5‰xšyäV¯\8\8†ù/?R©o¸\1ûesæÁ™‡Zjp\22\16P¶®Fçâ-ŽÙ¥³áêlaWX*\28§\ +E†RµìÒråÄð¡Ôï”\14Ð4<Á.Œ\15Þï\18æ—@òö®èä)øç\15SQÿÀÒ\31^÷Ñ\25ß;ýFl+L4ÐvSáVáAO\15|D-\16ì–;n—f§ã˜ÝЬ”È\17\20§ÿäÞ ¢¯ì\17Ê'ÙFÆ]¾®+º'bU ¤ÖzÙ?²\25\23D\0269ãú\15xÖªƒÔÚL\17Zªd[x\11‰Ïú4å·\22\18C´»ðÅmlËwõµ\30¨\3ftË»[­IÈE\12\24øòË[׳\\¡žÉn«uƒà\23¦{XÇ«‡—UòØ›É÷SÝ\6âRa•ºT?ì+Þž‘{ìœ×UÎÁ‡\23ñ^¢\26ø_øÏ;ÙXÕ\19)Ì$þ\0099Š(4ÑÉQ¥F\12µÄq@\2APkc­ ðí\0264Äãõvɨß$€sH\28Oš_=©\0142»\11KŒ÷»°\1O›1(á¼~Í !Ô\0235ѯ&å6àŸn|ø\0062Z‹‘Á³‚…j=¨P\11\3\8“\20mq&\9Ÿ2\12ü;i\24|?é\4ÅYCÀE+3ŠfÌ©\25\15h\25ykþ\24wŽÜE*M˽\17^ˆ\ +® c5‡\0251‘$\7“$qlž\"‘åë¬\"C\127.™2ž\24•ã\23Ñ\21í\12£Lÿ.({‰\22æW\24e¢Ð\23\6\12#Öþº¨C³–‘\18}ãÌ4Ó\26{¢5òDßHòŸÆñpS¾AG­±úA·£ëƒÒ.{Fº“—\25Oík†ß5œÊW\22+{±5ð\16\17õ\12\2°\0(rõ\0062Ö\27Èè\27H¹\0229\17\28ÜýÖÌ\19}\24«2žè\11Äø6#é~ \21QÊC‘Á#‹x«û\13*ƒ\24Ô.LãÅaßxz\15J ùÁ¯E\4i‘£'»»]×Àúw¢õïhjEJ€>\25µm\127y‚½íPF?×”‹ÝëXµíDkÛ‰\24¤,ÕW–\ +7g\1ËÆ^Ú\31}IC{â×èül\0125ÂÖV·\3ظqï‰\4\5ÌWw—¨\24ÜÚ]P•c¢a]ÒùVKî\12/7\0179M´\ +[%€\27Ô\3VZ—ÝV™\19}µ‰ë7\5ö\127Vð¬`® n\21:ï\\›YMCÆ·\"X\21éD+Ò\1 ¡\3xfñå\6<\6ð4kl\13 Z\23œ\1\29ÕæézЦÌÌ}ó\30Õ\5¼o¥öé\23h%§O×!Õ”1øÈà”[¦XoÍ\16ú6¸/³¥ÄcUÀ\3¸lu€›¼» žv¨Û„7éœW­5<\12\0029SE4CÇÑ\18œ°«kÃÊ/»\30Ãâ¥WÝ6?{\3\127`\25™t\13ˆÕ\ +u!…ä²…Z\13\14.\20ŒÜ«‰ÀÏÄ)èÓbºÅ\127,®zN¸ié\19µmàò©ÄŒò_Å4ƒWé}2}úIŽû\"#^fè\\ýòÆ_ßà0p{,íH>ì$ÏkfÊà\27?,ûŸ˜OŠ„±‘ø4·?>ßTìÓÜz\3zð\12F³\14¢øìMÈ>KeT<;Ò‹€?ï õ\23ÑP¢l\30Y8Ò®Ðÿj8Ñß+ŠÜ)\127wcQ(Nø\1\13§\\_Êà\4\11Ftª\5T©‡øGêAËãKc÷uÛÔ\12m´Jwb£<…åžÕøéÆñG•˜x-ÝÆ\9%Ër½¼¦_ªÀ\5Ösœê\\œ†\7ò\13±³\28DœzHɉ\7\8rYH\19d8IiûtB„2NÍè¢,\19\17'ë‡RNÈ\23ÄÉ6—E\18L-짩38]Ÿ\11\11\127â°?}“\228y'°å\18<àƒ\9^Ñ„†\30¡ˆÉ”Õ\20¼KM(\1\0028r }\28ˆ‹§º\31¡ŸGŽæ\28\0\5\23ø`\2ËØVØ™SqÂB]´6\30RÆ{\5_\21”\ +¶\26±T€\15q\8UŒ\0256¥x¶‹µ$Á•¾IN‰f+õ1¦µÔø•\29¡Bºè¼ÒókÃ\0307­]¹0Q×<̦\8'ÏŪŠGžnb[\26ÏC$'ܾkIdS|Ù*ˆ\20bœ¼Õõ†5íf²D~XdÓ{«CõN\5æùMe·«I×B¾L6d¡\2[ÞÑ\25PùÈ€Cè\4ÿÙ\5°õL<%Óßî\31\\^Sx÷;Éw.ŠåñlÒ¼ÖÿƒŠ_ŽÁsë\16™hŽÙtмƒû$U¡î\11ªGÿ}ûúˆß·ãª%òm\14÷E;c‹z\29}÷iÃêsbcõ9ø6{è\127¹}v*Œ8Þ±¶\26Ï3Ëg¶\1‘8[¦œ05§µÅS€q-jlâ¬Zª0Û¢7Ø`“Ì~ó\24µÔ¢çzŸªÏÙ¬\11%…%\12÷\14ܳGl\28Û%»f¢ˆÙtUœ\7OäÙW~ö)\15^™Ä™g\17JΚ”#ú¡#¶\0289\18Ï\13‚|ÝíÑP4ÖK\28mþ/ì\28rýÅ\21…\0•£ŽWáÉ=\16¯\20@{\13U\6c÷<#Žfø6¶Ò\21±äõ\"gX«¶»\"`#\14ø@ÃóÖ\20W‡ÚT=\19‚¢‹+\24áRbÐ@^kÖ‰\8þGŒ\ +û1¹X¦Âu~ñ±v\28íx?NÖ´Pi(B‡K\2äñptõÑ!kì„îSP´U®\0096‹ºÑÊHâòòAÁp\2ø~%þÑ\18\18mÒY˜LKÑO\15\20\20¦¹Á½,¸hሡ^\23Ç°\23$_ôç½+´\12^lá9#d‡Î_lwñŠ]_|¬ù>\26{¾9pyÚǾ\26ѵD\24Ô\0¼fÂn\23p¾™ÄJ×E8É\17<ˆ\0\0±h5Ü[8ƒ0Víc$T‹Í®º9Ú`^º’é}7è\30\26LX\"m\5¹¡„Ñ›µÀjp‡¶‡—qÉÕÏJ±yÌhÑ\24•\ +Û šï§}q\30ý]ø?rŠ£E˜ü¥\11Fõ.\3\28ÇZi\11 \26*Tg@–0D÷·\11²¨X¬ô:\1~\7\24MŽÿkÈ$²T\13®æŒˆ\22M²ø¥Cx6žˆ¬¸hu\29‘틈ðþb]pBë…\15˜\0020h¶ž3½ò\22êñÏ¥‘²c\19ˆ\29k(Z1OŒ=\23ãò…jùH½`öœ\0X3\14¦‰huË4W\2Ëí\ +\30Š\127\24k±Ø®³`ã®îP\127*ŸÐãÓÑ\26v‡ØÃw›ß\1Å\17ü\ +\15\3àãfØL±fÌËK'ùf¶Åª€¬m5\0ê\25¹Ñ\7¸Ýè-¿Õúx]\3GX°ˆ\7z\28\9p²Á+>Ö;üš’›\19\2\24Jå»\29µ\"»3BŒÅÑ\14\127}÷ÕÙ\3\0251\ +E%\27\31¨è5n\3âV%Þô'ac/ÆýDÍ{s\0#J€b¬ÄT<½¨Í«%h¨™Ùh\14€ãŠ\ +MæìÙnj\23­•&&?H8GŽ\24é>¶ªijï¸wã”ÔÜ\25ÄÔý\"ý£^˜41qƵ±T¹“ÝÙ®É\5&‡Å²\20±aÝ÷ØA\"\23æbb€\20i¦*l\ +\9A{¹µ©02Ü\4(\22¨+õÁ\9‘\30¥~65ªùƧp\13¸ƒP²Ú\20´\2\16ºj\1>±%\8'\6è„„…þÙ\22ƒJέkdÎpòëI\3u%f<ù†ØÕh\26K38ÖpÆF>ß:ágðÒ5î*³pS¥á«™OÅÑaCÝŸÄ\11]ö\22Ó\26\20`\14›O\11*Î[œ!èÃ\12m9Xû-N}våJå\12«\28¶¹d}ö±ªœ‰ém˜EyÂ\28HÂ\26t\18R$-ý\0µ“ZßSUé­˜l›«\3Î5p©€Ü\16\28UV¦›ÒN¯\29dzxô»zA„¦þ!þü?¸]+7?\28‡Ö„\\tlW„Ù#òFñ”õ\17\9”˜ožú\2°e\16j‹mæ\14%³i¶äÊë1Y)\23¶oNuß\18T¯æ—ÖêÛrbϵÇÉçšrºqzë̺—ËÕ@Ë\22^*­£@O3®ôx\7Þa\11nŠ÷\"Ì)*‡¾T®wg{Š\17y_PeéyÐæP\6¤Tã ±JC\17l³\28QÑ|\23Q|~Fá\11×\ +å×\23Å°Ž\ +üs(.\20¿(9V­c¹ `\"Z\13ŒbÀ?\4»A\"©ÆŸþòtì§# ¥\23\31Úæc¹šÅŒ‰\3î\9ªº˜hu1 Rj\0050\6/õÂ\8ù¾±¼ý[…*\18½žÐ’¥nO\5ÁÛˆeF„@bµíh0\25˜Ü)ï³wµíh¸¹ÖÜ\0309R{ø\15›Õµr׬zFõçÀ„Ö\25uý\23%\13_îÙ/Z?u\30ó\2\25³\1{Wªè˜ó Š–\18‹UJÌ\23Çä_&ß„\22\27™ÐG#\127ínP\0UQ·„º)\21o\9(¡ñÝhá,B\30\"^k]g¶J}†léä+\0316>t« F\22•ÊŠç¨ÂŠ¼(\23žåÆ2§'[\16bßË≂tšê4³A”Y\4ˆ;\30Kd|܉„\0216Ê4e¨)7¿\"‰\17û“vŸÕó\4¯m˜‹!‚sð%\\dï\3Û)µûÄÊ}ˆÊ­\27>Ðüu³¢3\"¡\31Ópñg\6ýZÉÃm´‰tU‘k.{§šqtËG,ÈÚk\28\\[Ú_›ìA´\20H‡l­¯õ-BM•L\\ ½×ÈuC}(¸\23ý\17\"\16*sq3;ª\26ĤP“\6\24º¼U0:Ûfw\15 \19%nWµhã\21‘\"Lt¡õ\15c抽ÃZˆqg\31„\4§êeü¦Ît4³ŸŒ§†ãÎu›Ðí£JL– j«&#}î½ÍT!O5yJvWçWjù\29\18ת\27¹wL Ô0Í7¨Ój6Ó€/\15\29Õˆ&Öˆ³ÛïǶ\24G&@éh©ñNµ\21‹Ñ\11/e\27ˆ~3F\2×٢ȖÉd\21‹†_zš×o¾£\18€Œ÷êeNwfL¡D\25ºa+gÏOˆö~\27`þl>^nèl§šûÙÅw{NÕOüª˜(n¯ž¨¯Ü0å\7+p?ùÂ\16néŽîó¸·l`û'%È-ÜT³mìO* ˜­æÓ>aÄ:î³Pä¾\\7XKû\2Ÿ\11™Õ¸£QB”\24\\ó·9ɸ'8N`—½E‡\24ª\\K{\3XéØ¿RwíÚ„\24š½\12v¡m\19+bÃ\17æ\22²uefêrgž\2:oBþ‘²Çö¼ð†\26TEN#\26\13Uçö¤€E\7K\8LN7L\27\1S½‡Ÿ^Æ\6ä%Ìå­¾ù[½¥æ½:ó\1^WŒ‹ãK³îÕÖkÑ—*ˆC¼P\127©sô\11:Ãr«¸˜\23ÔwøC¯“¹¶ì’¯h\13€‚\127²áîzqÃ\29_–ÜU’5¾\6óååÛÉ¥P\22øãuE1N|!š‹»\3¸\9ÃåzêEª‹¯¤ªÊ\23|µ<ó\127þá²é?ê\16·÷/ Â\23w\0÷}'®\7õ;?˜\11?ÿBP⎡©4ÁΗ€ÈöûÊ\20‹‹Äþ$ìô¹T\1ÃÓ÷Tÿú¶Òšd£0þåäÁ¹ta\0074kë\20\12…u(ºì\19œ2}¤\7ä«\21(­ËàE›\1›ýÏ¿\8Æàö.ùhÞºY\4_'f¬U³à»Ü®ˆ@èß—+†TKSåg£(•\27×\3¾ê\16™àû'\29\18˜ÂúÆ4’\15þ÷ÏïóÜ:&õù}f\ +Úâúá(ÁÞ7ýrB\24\22ÿêSËÉê‹¢5â’¦8.lösº}¤[÷ëC\7ìOýkg9³\14k‘ð¹w¾hëH¶Ù®ÿçÀÿ>~$\15§BúUQuîÔ1HPÇM¡\"å”1¤üþ£}#¡„â¿«\127\"b§Œn×°ÓÍňà>ëì®’ï:\6]Š ú\11Ò½½'»:È\2´\23\8¨´\22\13\ +é>f„ó€m1€¨úóGÑš;\2\27\2*÷nÑÃSºŸ¾UäýLö\29´bº7Ì\20\3Õÿ|Ç\22n“îlŸ*MëKÑ[ê¬×9Ý©‹àÊKè\7×\9@;úÂrV‘;Q_\ +x-ÝGêîšò\ +…_™Ô鿪\2õÔýz)ÞI݉0àGzÜZÆc¸iš¥‡5!¤‡/‚\12\ +¡×G§’-̇\6]¢Ua½¤‰ÄØY¤.ÙÜÝ`_ñ'­>\3ù\30\19j´ÑÏ;=\23 úå1'Uö¡íj18HÊ.e5¾4Á«3a|îf\15š’\19Fæ¶D’r¡†BÁ¤Ð*À¥Yèw¦d´OÚ÷\"g¦çGŸNÊ\127ìÕn‹ ¦Q‡^¹Œ\8“¿„ê\12ü‘\6.\\Ñ\21¬3•À\25“¸x›RbkÒ\9×\19SËŒdªQ3²Q$È\24£\17ÁX\0023m9Ó5Ø%Z+Pô± ©Õ€™!œ~£\6ÛY³yD`6½\13pŸÓ§â±\21£\127Ï^žöSg\28|:Aú\19}øÉ8µŸ\30¦O\14©\0˜¤éôÉ:øü¬kê³åUvúì>©Ôg¢ }„\19…Ouÿ<\127îþϼùt~T\28|˜ª\5Ýô©\17Q\\×Éæ`Ó'ó´O:¾*Á'Œ\12\\\24\11hZV©\28%ô×V\19’³ÖÜ?#:”ž\3ç’Äý:íáÒ\\Søéÿî\28\20\22¥'¼d…\14ÐΩÁ²IzV;xÀNsëiiv\0292ŸƒZðœ\16Œ\16\24Ïä1t\19–ïx\18ùt\16³YA‹Ý`7’7çh<§äȼà\4\14\\œ\17®œT \3ýäÝnz.¯Z =Õ)c4m¨\26Hg¥NP§7%K´µ¯õ͵a¾æ®pªÁÎÓò—\14&´ÉÚºÉÕË\3ð›P7µ´d\7#ÆAç‘ŽbÎ=m¼\17MÕ‹=Ký\"Ô-\31Jpù\4\6SªÍe\11¡};ªöDƒì¨[­1‚˜£áKM\20è_íÑ(€mÊmþù͈@L%[àQ;,ï–N_ýGú\26/¿Â•©ùE_|qÇ&\6é4\31×\9øP©Óƒ\21U‘‡\5ôÒ„5\0ýWÃx+ODöŠI‡\9Tn«\15'óוµ\0140+\0092_è÷‰EÚCRó\31D3qÛšÀÒ—oÃ7Õá\"0Í\3\0233i†'œf¤ñ'U~æ-nš­±85ؽQ\12‘P\ +\26\17P1Ô¤èfÔp&\30 (b?3åN\12ƒv;V_›f\17XÉ\ +ûn\14å\5`š›ù{4 \30s‡\13²Ä.5\4GÂôjš\9ó\19dýEã\13¾Ñõ\22E«Š/;\21_¬«$éÀ¦Ÿ9Š‰k¾t©y_ÈšËãàrS M‚G©rá\8\19˜É\28•©\9{\25`5®þ\18W\127Š\5\23R!°s\19o~(\\Óp1 \31\27\22•gìœÀB\21è9§kà6ržppq¬ëæÂ\17jƒÕ͵…àvÆ홚3w\4Sõº\6ZŽMš1çËv¹]Žz;.\2Sú–*)0ož\7lW˜¶§Z{í5ß:ªVÜ:jÜ\14Vø\12±™¬\12Yѽƒ \11ùCö\23\19†‰·Üzè¨T¥ù\18–;ÔrÌz(»ë\2ÅÍãé|\31‰Ã<^ \30\0\9*\6`¬†\5[RÞ÷Aãµå€tlZÌåK‹-—\26€Å\9\0054lÍË#\13\7»®B!²\9\6U,l—êô3²QÉ–@\ +ÕaoÀr©ÝO’Á\30U$ÓSÖ/Àën\29f“Õ\12$ìRšq–¬p€¬Â_\21\20GekâJ\2ÞW²]ƭ©‚äb¡}“Í\2:£â\"M—ª¹“,¶Z\ +\19la§ZÚO†Nà›†=™SËé)jMÍrö\19t\28\ +à \21\23.yé™Éú^U,ÉæÑø\17ó1Làå½$\13‰|æM^Z®,¥g~,غHK%Ø°ÓâèË1\24–:\"îB[@©±úh\23Ö›¾íec\3\4ç\6ꯢÂi©Šæùƒn\13ü…à3ë÷ý^‰ŒLÁ\24¾ü\21tM\8ç\20jýÛàP´\28&\14hê•ä\1JL¡fI£ƒ\24k¡IjžºÓ\11ßo¢ÔSC2n\\ªÊí„&üâ$Ý!ïEᇛ¸ºÝC¡³L\127QsÆ€…I?YVD¡0\6UeFÖŠ“‰\3Wá¯7Ìo\8YâVï\29øh±\8¿|ñRw|”ï’Ÿ\0119\29\16È\11ñ\26\\\12BTÉò\12g\20\16ÈQ¼à»Št\14>G\4ÛÍNh×cËGÛ^kÀ’TTHcôUðB+\5,í¤p‚’\ +÷ž€;u\20`\11\9Í}Ñ\30\23îé!ò>˜ð\14'áÄÊé\2l²4„º\26\2ÖžÕ™Á3\15¥\21„Y:B\16¾\30E»æjøÔh\9 \4ó’Â\23YBYUEBá\12;Z˜(EaO\ +™æÙ\0*5\12°ú\19z\\˜\29Š™\0281û\127ù®`sØN©ßÔœÞAé݇MH<*Щ\"\22\27Où\20LåÙrˆ\2NôÌzƹ5\27½\0–\9y´›ìî\31Ü|kø\2×FdËFÀŒs\23\20m&\15Ãý\ +&õ\11rG†d\8¥…f\8ÂûìJ5ýæþ4b\15Ü\30ÓãjG\3+>\5ó;Lb\6Í?†K’_²Ê/ê‚ž®vK4\14ž-*)6¼µÄ\\†NI=\15\28“\31ã=\29¶:\2\27aï'sþ–Ÿo¨*\12ñr\27èÚˆ\21u\21\23}ºL±Sd?–\18Àâ÷\8ä6PÄãäõ&ç—µ)Zˆ(ù-\2¹ÂDMˆì«}\\Hñ\5¼‰€uo_qM…ØXUÔ!YÔ!Åá\19ÇÔ42¥äî…„À(¾'¯¤\9œì&Ç;SÓËS£—,(Y€¥¥i\24ýÃzÈË®©¹kœèÜ؈TS(†’äƒvŽ6ß—,F¡\15Èi«\15‘¦P\31Ö\25ªo'÷ùÔ¹gQ>§8\20ušÉ—ÈMB–ªúÈÂfBðQ\23ö#/n\9æÙT²å1ÕAìM«uh\1@rÍ\ +’[W\8Á•Ö¯(\\ðXi¤CÇ\15„*ž\14b±¬ý:Õ˜ì\"«3G­\9[™¾±G\11d>u¨4\1zb­+,9¶v\127E¯QZ\7°\\fÙj]Åé²^\ +õµ™ÃÔþ—jaú3\15·‡²ÉÍãÔ9ßT§Ì‘æœ2˜5ÛP\01142¢f„!¢¨\17¿-PcMÆ‚\11Jx\24”Æ\0267ì{VP\28uu\1>¨¢©]¹4ûìrZÎïàÎ|Ÿ6Žò€\0rÊ·ùqC\21‡R>4\25zùYñù1Œ=Ñ\7ž½ÉÏ…¡åK|bËfbÓAìDN\30/¨„#vï3q~¼í™ñ¯¯\9Ü \7*\22´µÒy¼eV‰!;s\30ïÌ$í!‰zÕ… ÉHâ«\0311Ò®\4½gB\30}À¯Ü\27®Cå`\22Åâ¬ïTùÓy01óˆt$…\21š_•éðѽp\29·Kt\18ïnÐò\26\"¥ÎŸ7«[ן\9\3_‚?ÿÚ:¸z¡Aí°úÓøÞO\16«aÏÈÏÑ\29÷Ô¹s²§±IFÿZ\5àT؉\26W&gžº{\11h\0\3‘^tCÆ\"‡ª\4Õ‘1öJ5ÁðÙ\23X\6s\5±\2\ +žrº0¬“=p'z\11õ á\3úÃ}T7ƒò3Ê\26ðÐWs•£Ô\31ä—”’)±ðÒ›(Ô<\7ÍQH¸ü&ðr\31<\19Ñ4ÊŒ\9¦&røùÏ\21/Wô)ó0Ò g=ä®\14Ÿ6\11’\8WÓ™‰\30Ižï,ÌlÓ{(Š¢\4*=¡¸O3„•ÚfŒÕèCF¹.ÑJ\5ž\23Þ}²ER^­5+eP\14Tg.¶Í¬ï>\17gdJèHNÂ7CëËQ6¬íl\11A@¤Ž\25©r™¯Ô»˜ƒžO\\\"ÈW\21”9ÀãQÎôhѾ}{ñÂÃiW‡Z†$ääkt7½¼˜›Åø9»«6Ÿ‡2\22Ä=éÞwׄ\\ÙG\28@Î\\!³Sgk?W^¼Ò×(m¶t—òe\0270¸šÐ\ +@ó\16\26¨¾ÂlØÒñ“K{Wzó†”7[/%ç´Û›=u°©òÛUùùÝÖºò¦U­\"Ö~áÓu\127\17—‚©îá\25£\25€íæDØÛH¹\ +\18¨:H\16ÈÛxä^,‡—ûöū✫Ž\14ÔRÔæÅÃéS\9ZO 0rÕ®¯$>Æåj\28\15\26ëÄLiË\13z±ÇzV:\23.H°Ì\8\9Ý\21°i¹O:ß—ñsêˆ@u4_x¶š,Ɣʙ¡ÃÔäxæó\1øÄã·ÙÐ\27·ý¬AdŒt«(¸œ¿”Y\25¾X«Å:¿FþXus… €RùkeFÏ€ÀˆFª„–\6\ +œay\0;Ÿ†O<ç“/Aæ5ef¼4_›ƒâˆB¹T–tr@By@*(ÌJ6\28%€Vy8_\21tµ«\6œh!\24Ëà…U,š\13`™h*»&íÊ‚,•ùTšÚG¥·Rù#°Å¿ŽÊ”©Þߨƒàz\20›«HV:IA…\21FçQ\23dT\\:\15&Ô\28\14í\8Ñ€KªX,zàŒž.Ö!›ŠM\0\16‰wÇöR©­\7GQ0´pÁ,\0)¦[õ\21DGñ l\16Ͷ2U\12y÷J®Üݧbãˆ4ºvЦŒõV)„j(5™B-•—…€\ +HˆÃ7\19ÀzÒ\19Â)Ü{l•&Ps\6Ž\20[U©Ÿ4U˜\22\2P\22[=\11]¶+\"}ä Šm#§ò\13}g–ždûPƒ;{ûÙ\28rùß\28³m¬t½úƬ%¥0ógƒ\"¤]þC\127m¼±v&ËÅs\3lE_mUÙ€>D›²ãFN­?×®EŸ{¨ÐdíæG¼ö'b£®„‚óe¯¡ôØæ½9ûCñÝá6nî\25\1'Ûš¢)´ù\17]ÚÞ|§m:S×i¾5\2Ävœ„¤“Šýv¿ú¢`ó‹_Á˜*zW\0¥¢U^Õæ\12ÓZ¼\3µ€LoKy¤-õHÌ(/x9[Ý”ä¡'\1ïv¦É\9\22\18xß\7eÞìÍÜcéÛf—ËP¸3è\27dÐ*O\6p!fH\19–¯8™÷[½qØx\30G…9Mmåk/œ¼õ·Å<\29A=[âVD·öŽ\14ÕºUÉq•äk\6Er'÷\22BKÛ{êt\22C\"x|\7›k\"È:Øè\11\1—\4TÖÉ\16\29ÍhEÜŸÔc{q<Ùv×y÷üÖ|ß^y\27B\\\ +pK\6ê°ßUù7>é­3\"íl0»[¿câ3í؉Nû\6Á¸\31°aºã\6åd\22ññVíß«b\31\13¢[©Ú>JýaÆÁ{7=ê)á\24Ì=8ÆGC.¬\21¹\11!\\h´éð9èð\1\8uÿLðC”'ûÎ1rL<ÆÜØcé¦d\11I0š\16s«ŸÁpÇxÉ\28\3\27\127øü€\24aÍ\"\5‡fv¼¹\2\20î\24r+$\8ipB\9^ŸŽ/N\0310Â\15Õ¹³;¾\9fý\29ð\"\8‰\\£Àœë\14æïp0k\14\19F\ +ÙüÿL£µ¥ÑÒŠ›\1jêq-œ½å\3\27\28åD\29*R>¬iR]V åŽòâÞçx¿šN‡öæAŸÿÖ\30ò7\7Úáoºáï\27W\6\127wš£ÿG]lUbé……tz™\2ÿ@Üý£‰qb.pZÿù\31.·~þG„·ÜÌ%÷Ïÿ@\28ÿüO»}üü?&ÁÏÿ\27ø{9>~þ/\3zâÕÑz»}èàûëC'ê_jÍêg=:ür]¾¶·ÇS\30ÞN*’úmÅ€\ +®“ªÙ+ò\15ªµà\12Å\14\28¬eaµ\24¶ršfµ\9\20y\22’¨ùm\26ôgÀ\"õ*:XkDàôm\15(²ÓŸ@&>[¯7¢±•®U1Õz3‡Ë‡ur\4UÈÃm\30°±´ß|á\9îU~øT\2Hí\24AÄ‚\5*G\15çö¢”•:ZSšª*jPŸµLå´«ãÊ•Ãeæz³‚WU“­LIÑÝ\0¯`® ’\19Ó·\24>\29¥&›œ;\7\11bÎ\4f6\23 ¦L\4šp\17@\13\27Q(4[@H𪱠\1Â|$@Ž¯Âè’vÜÁ¹`\ +@ce-ï€ß\31kï¡ÂÚÛŠî&þ}îø.úJÃäúŠÚ\16ÂS]±ñ&¯µ¼\19ì\1-Wr?ñ\5æ¢m°f5¬\\¬®\23¤i|ÕT¥f\11›\17Ø“j\31\28ä.G\18Š‰¡µy^Ow\13ÜýfŽâzGÂãcý%, /¢¬Ö·:¥õ\ +cÜëCówÔg°~s#·5˦]™¹ã/Bt¬¾iŒÆ\7|\21iüuDŸŠþr\ +\5b}G°\127¤[\5Ñ13ù=<\0V3#÷Em¬m†ŒéÍQ\21T†¨\"XÇO­\29«•Á¶çjzIÿ|\13 ˜XAã\23¹ŠÈ[ÇÉú(Ï«UǨ\\t…Ë÷3+ÀšŒõ·]ìB\25\9b”¥|\\VÄVVh0”ãRï?Ö*ð.ö3bã¢n\"ÊHÃ[nÌì›(qA!?¾\\}‡æŠ5Â\0¾IŒ<µï\30øÐîZaKúeà%õŠêkŠCìlµÒ˜ÕÚbÈ\4>õ*Êh=!:«\ +Ä[4h\\9tm)žI \20Ï­~¾°¢¬e£%ñÉÀT[ˆ‚ÃÓ`j=@¨Ý”»¼Z\127+\23z!–Û0Wxq· Vï<Àž”XúPƒûäxBŽ\2Wnòœ\13óyDdùéh\"0P;ìVÅÚ\28ÒqËÄ\127! qµl¢à€/ß\0260#z¨À\7ÖïbßTÁR\3kÔn«>tgh4ó/{ÇG\5S\5s\5e}Ãí\13÷\ +7ç;‚b\17ÄÄ\25/ѳÝö\18\8à\2‘È\19‘³Ð\" ç¶O\16Y?€{Õ³¬aŽñß,\12¢æñùýþ312ÙñríÌüžœÙ6AY\21 \30–9Š[PËlü3ššgÂ\23êZÈ\25\25Âu|\23ƒ>ýÕÒDò5#{ª¼V…©L@ÙòEŠ/5‡í/{6§¶¨>¹ \26»Ú^\5nïjÜjÇo\21\13ðF`­*KÖª²\4ð \24„È\13æ\26X“1‡­ÔÄƯ\31\21PZ‹Î_µióºE5ÿjÕ&|Až@¾\19Zc­išo\30µ­vÈæÎ:ï|`2c–ÂmAF@Ëâ`±TÙ)Å®iЂ´¢jDxQ\20ÑÚ\127b•em>5\2¼¸Y?ó¤\21Â\16n´ÖÓSèòùͽ½ø\0ªd¡Ô\2OmÅöÄZ#žìÚÖªàu3mqZÁ\15\19\28\21ý\19iæöuœ\31•›'¿MPÈ%ÙÆ#ÌuböN'€gh7kúÍ7…ÈEáǺô\26¡…i±€\12—›Oº(VŸí\ +õ+0\14¨9ã§;·\24\"šð«'\4ÖF¡'ïjg\30D)È‘gÃqåŽ2F—Äh\\\22Þ‡(Ož€¬–\13Y±,\17[M1Cj\7ùc¿…üˆ}\16Ù\9é†-Q\127½(–Ô‚\9µ1^4&'›jR]Ù ý\ +\127µípýÝþÁIlåÊ\127]^ÝC˜·Úç\4ò0ŸoF‹\13xä\20ÕyƒwÁXÑâ\9ò\19YÚ5‘\24ºHÝ•,µ.ßH*§õ÷sPªô\4Ù$-\21¿ÈÕ7°h2;bµ\13„5Y%m¿òþvE®cr\4\29xäë-«º&\0049”˜°Ð¯,É\20:pgb´\18¯ôVîS)2>\11„‘-\"ð©Üü­\28䳞𙿵vi¥\22h¹Wt\16M%œWëÖÁÓº~(:[¹IÐø§Köü;ñîƒT“]õqÊ}Ì5\0235WÈ×\127êV“òál\127þSÁ¿äîͦ>ªUs\ +pf\"Á±¢®Åçù\21¶\0193+\21[_\17Ô’Ö™]›Î —˜\"o¯ôêæZ{\0272·h!õÖ\127Ý„\2’×\13–\17äºO[êt@8rPíé‘¿AÎZŸ\"Œ\31vÕ¼Õ\22Ðåin¬`\0141*f­\ +x\13Y=«\31\\‘du\28:\21‚DQ†•Ç\18Š2Ü\12Æ‘xcÇöP©9‡F#•+4X®\1¾+\6\23\11°[/5˜Þƒ¦¸‘õ˜½„\5A¶ëÀdUK”O\1h\23ç¤Po/U!dFJõ»\17ì†TFójýRþ\\m®ëirëOì¬+ZbÔ\9ܽs?ÉՌʙ¨Û”/ïâ}ìX95®\13}vrÊÅ\13×Â\5\9`¹Ì¯^×ØmP4VŒ†b¡+\26$ÖôÉÎ<[\29–]r\18BNîô쬬¬q][Npª“\0ðšÁ\19\2‰1ÈiEÕ:Ÿ“ëH”·J\3å_Xek5x \8š_¯e\29ým]\28F2[î\18ØÎTŽîâí —«n!ì»uýù·¨\15û\21r\"³þQ-¸By6ru*‹å¥ó\ +·\21§á¤o& L~®\16£cMö\"%˜ßZGZþ©á¹ò9O+Väå ó\23O\0176±ÅaÜ\30\19âÜóZuB†­\23AíL\24u{ÉÚ}\4ГÍ7¶Ê\14¨t?ñr÷ýoàÜñ¿\127'Š¤ãÕ½ØUÂÓ°\19™ºEO!Œ³\16Áee(zFà\23é\19‡Ÿœž\127U0V\0'‚\23‚+#\16Ðø…ê€Ëá\4\26.ï(/\3\"¯iàOÜ’ØE‘5\127Pªßñç¨Þƒw„çÕÏBU˜Ç§>m\7ÎÍdˆé(û±–|^Õ4¿y_KÇ\3’\21S\28Ø\4!\4\25,c¸Or¹á]±ÄÁöÒø1øZï\9ÖÁ¯¼ñ1º_f¯¼Æ^û\2¥Vü£\5$Ã~&´¿Í.\24\5\12gïz\25… \02067Hóç·~\16¾m=-ƒ=«sYno\8ç\17x(l±Í\2Õ\127¹k5©o—Gá#\20\5–HGÚ½4Ï*w4¾¡Cy:\23i5–¢²yÚd¸Pƒe\8¸©Ÿœ^´\6¹½œ5§'|\23!\19¥\14:£\26ÐÐÜU›ë-ÜI\24ÐoUë‡ÐkK4\15N¸ ‚.?jt䞨Í`]øÓ¯\13\3\17Þšdôï®üÏW\18¤Ýñ‹>‰\30Øx»)¨òÝó͆\24EC\\\127\21G¹r\20Á,ëà¨\1:U\28Þ¹ÅOF\2YNrOÚÁQ˜Á¶™qMè\14‹Ü\23œ³_#hT#„K>av<Ûê¸b ôysJ^µ8\8}\7@í]uÑÄr8\2Š\29¨ôë\19\4ûdÌ”g8þjâ\ +£G\30gf¡:\3ú¾çX¬õ²Ö\24É_’§\3ÒsŒüªo\13^O=$ÝøX£ì\31è#B\19SæŠ\0;p÷ñ‹OVóœm\7=sS눆gì\25{\22BTj·Ž=ˆ\7>÷\15\29ÒE\31Å›ýúÄÊæ–µÄ\11ö‚öù\305ÍÈ—8\23Åô#V\9óÛ¢\5¥Õi§u¢ú‰ÒPP©õ*\30ÎÒ|áê(ŸoöÎdf\22ƒÖ#”\1]T\28\21Vƒ€ùÂ<^&ŠUk,ÕÏB°u+jˆ\4\2‘\25%6\20‘¡\28§4â›K`†ö¦JŸjù€ÙsÎäÙ\22ͳM˜ã¶\30)\31ü€4‡D½i®Œ¹r7cÛ¸›\"õ~w°&‚rW\0294 ;‹z\1274^Q;£±#=kP×\4ïm2æ5Œá\14â\29$:0ßK„ƒT\\dh™´há Œ‚ËrÝéñã­<ýý™uôž(\7Ÿ»Ã\8ý€¦TØßòüÓýÓSœ›dÝ\4\26¿»VŠmoh\22í>ÆÖ\0é×\ +7CÄ\127ò\29Å„r¯à‡ó}zÚ\9\2\9\2àqç{º ƒ_°\19@¤6€ÁÊw†\0170N$Ù]’\31éæÓ/ÕVÕhy›»\7H\11«\28\"Þñ}ñEHœ@í&D\19½\"7S\2‰ø\0\127\21à…;hb±\9<ª\21@y\27Ïí |[\28k{‡jm?´ñiò<\16€Ìê\18ªØØd‡â­ÐN¶éŽ,t5;SŒv\24\20§[’AP\20Œ©ß>üV[tb…_äŠú%C\30…\3·jS”ljòÕÕ·òtZ>f\16ÜC§ã@\0219\6×üÀ^\23tÔæÇçè>€¿Jš\9IÕ\ +]±‰ÖÉûpœQé h\4­ÜéV\1•Fµ·ò}ÖXBå\6\15‡q\6˘éÈXèÀ…Â<ñ<0snf(}~\6£,ÐQGN\\ë0‚Ù\17™„ˆ}Aÿ\18>¾ÃÝÏ“\21ƒQ™)\0307'Ï\6p<\5\16r%6¼\29ÁÚ­\26]nv\9?Ú\26û ^£ûl&ÿγ\12p\16}®]á’0kÇH‹ÆíŽ\15l[-ž\7s[óFi.´ŠR%'J÷\ +B\5Ñ…˜X\25gU\22=†Y“ \17Ö»ù[YÝV;ž\9r©£ÍiÜÂãÏ+ >Šjû¿\127iÙ÷>\ +\24Ý©\25»GÜž\17BïúUá€/Ž\14}R°-h\22,jß™—By¬\0232¹JÍÑ,¨åGý0ÒlMÀààâ°!%åaC%™çÊêOÈh\1Û+Éc¥¦U\5Dµðþ\03086dÃ\3ºž¥\18˜aê©ŸÿP\25¬vã»~3š! o­Ê\7†! 'D [[ÀÉ\17·\9¹i\21\22 Ñ0ZÂÿ:\27\27Ç·\13(Ba­^²-“ddÔiô€U\25¾ÑgÍè\11${±p“ýz\4ð$IÄÐ%\31Ÿ±úÀ³#OôUV¤Ïc\15òoFhçQ¸y¼x±G¬Ex¹cü4fÒ\19­°fµ½m\30•·M\19ÊÛܵ¢š1FúƒÔ³\4(Ç.=)\19¤?6霉–Ü~\30ÁôP³ëÄ\"^9MÈ5ÊsÖ¶x’‘øŠœŸF(>rÌuaúú;Wc'¹\26;ɾ\12ǶÓ8W_Ü*\\+(ã\27î\21ºËrETHý\27Ìn`~7#§¥\2£UX~ùÏ…w®FK´V²C‘)#*ï«óØóÐÒS\13~ÞÉ|OyŒ\2tŠþEvµ>& G¿\25·V\0228ªõ+ªR3Ê€q\25»\2KT\5˜ýF5Š\7s”?=üa~Ñ°S©ù.»‹§\1]\9É\5³³f$SÄ:Uà©_hLi‘_3ô\26\21œ¶\ +¿]b­I_\16–wðZÁÛ÷\2TAZ›.€8AØ´Ã\ +016¯ÐÒpkÈ2÷)`¬‡€ü6MBBuÂ\0<ðvÖߎ÷Ý¥ž4e÷h#9—­hr4<<Û‹‡ Ôe®“ÃT¡{\6\19ÖøtŽ¹\0X3\28kð\28«\127\1˜\127h`´\11?ŽHßvÓͨ…§mò!÷’1T@glÃæe¼™ó\5oû’\12zFÚ9í$Þ\\QÈ‹Îà%d?Ô\127ðdAR[E\22tåî#Áˆh¦r8ŒümÕ×M8êj8*Æ8<¿ú£R§ÜÖS\15Ž$F$‡7¾öðÕ‰JA¤Q}Ë9ƒPôXjŠÿMÉ.ÿ$ÊâÓ\6„\0056QlŸã\5Ž²a\6´p\23\4-\17ÛOöÐO¡Ö†À.ô`†ÏM\20êç~)dóÊПH\8™f+´TIªXçýøù¦ŸÊÒvKDF=}ÎÇZI<øˆ¼]~Vêÿm$ó\28”Z<¡ˆÎO\6QDĦ\2E`$\1270ÖxÛ\20\0016l}‚LuÀR\1TÉ\19=yrQÔ¡Üh™µ©dkSÁMv3õ˜Z×G;}@»ŸgöÂgW›³h &; \1§¯§Û”Ø\5ŸÉ™BÐb.äN$Ø\1\6‹\3Ç\21òÿiÚ­Á\6™\0029Ÿ?9sgûZ\23É#\25|ý†2HþA\3?¯˜ÿQµX²Ï}P·>á~dÑIWÈ\28Á\30*ÆÐÄèI³ÈÒ!¸\28t¾¾÷…ƒêWâ¼9|)¿¯*W\"ø\27ýôWÿãiKV\7²€&ôé\26°º¦Ê†Ñ·ì@d$\1ÙßX\31Ȏ䩽\29\16'SåÉ3\22¨òtÆàož.H”(Æ/˜ŒSç­vÒÁq´Ž\0210‹\8…+úÄ\27¨‰ÆsY•›\2y\13¬\20€çy\6“µ\18(\"\7Ü©\13\19Øu2\0137…aqÚ`7ù;&²4UÂê0ê\29®f\ +Np<‘!¡á]ìÍ€Ÿ8\17 \127%þ²\3¬\\’ab°\2óÕ¸§V„Á+»Êú\0´Ý¶þSÌ\5™°Ó³Ò¥\13:6Tí8;cº>úÊ\29_ÊþD\27\"\26\0063Gà0¦\24Ê&¿+¬\17YB}ejNÜm«sš\20ªo\5€0¦\4¯J\0~·ÂÜ\3\28i'D\31¸&˜N^¥<Ú\2xx\ +:ÄQHjÒk2MÃ]yqp\16P\1Œö)å\8^úò]“s\ +ª²HÖgú\14|ÙÇs\29yQÛäBªY—\\ͺ\ +\20Fp¢W\21m5\29¨VE°c\18Ý“\11#frý‰‹6EäíŒf-´Çô‡>Ÿ*\31\18\17&e¹õÉ€Ž±ˆž²¥\20\"°cOP\29\19÷ˆü÷\00498ÆNB–Tù\24¯O£ º\25Oº\\\17'>£7³WÝŒ\9Æ<×\3½\2ÙÔ93ó\\\23½‰ê\15t…\16\8ërNl\9\00075­î\6\12´\25¸…ÉKÂÏ}—ú™eÐσ9Z³EFTpï³ç…}H\8óæ¢zÞBª®hu ”\1™OP—údZVÐ¥@\8Sû¿+HŽÕ¹›ê·Í“Îj£òÛB\13õ@;\20\22í±§Û‡qx\26¨¼0jm®ÀÎË&ø\12¯<ür\15?\11 ðD:Ûûr¹XqÉ\127˜\17Õ\24\15 óMK\31\16×Ø(\20¥\0319œàÔ†?W\ +!Ñès\0[ZÍB\6G\31øßÕkšàƒc(ÜN\6O³P…¦ñ©óëm Mr\15ìfI†\29V·\\7&ìõ¾0ì¾-ÌVI#x\13?¿ñþü†Ë¯Úrºp(\19=\12?¿Ñb‘YêænÛJPæ\29€h±ŒÐ뺺7¬¨Æ\31 õãm\27j¨Î‰Šv½õÜ­äjîG@‰«}\31\3ØÌyv\0245\6WÄñžè™8þ\"\27\31\21`°äøG+\24\9 ›\1Å%›ç)×/¢Ñ¶äÃ(ÄŠì°Oš\0èC â\"+óà‚–\27§Aù'\19:>’X\18Î-D.'[ç\13µ=8 ÄŸÿ&wT/d›kÃ3Ó8Øð\29Úaxé¯\14Œ©s&ð\5äºn‰3K„üˆV¤­ThÈ¢³™\17Ñ2mÄx‡1¢‘ñÛ²Nf²áåµ³}Ì\6\12\15 ýD\\ôád‘\"\13ê\0ˆ°&—ƒ°Ž çTô‹·lµ1ÙŠb”’]\8]Ü´esÁ;\127;.Ûúˆfü< (˜šuˆOe”õzŠÅ\6ŠlÓ69Ý@q©Ã\14Ô5§»f¨A\22!î*\16e·\ +>Eš(,V_±Oxí\9Ñ4\29šôx\20\17‹é‘È\18‚?[\17*\0â\29µ£K\5êT4‹‚¾’u\19åôôÓó\\UàetY²ÊSÿÌ/¼\24ªÉ\9åC'„ËrªR\ +©c9£6‘ÃBUŸè€\14‚\27\13‰\12`\26æÞÞŽÚÏ\\Å¥ÙâÉ„©wP\21‡=çŒ\16o¶r·û\27®\21²³¤å\15kÖÿjúÅÖÕ¬\2Š.hQ8\11â\17ž£“[ŒFhŠwár‹Õ‰ÁœÂÀù-5ž\22~ƒˆKÁŒ5ϸdúšˆ™\8u\2\13XÂ+\ +­\0016´¼òÖÓ€¦¥>§\8“¼Â\13ÂÆ\127Æ?!PX‰å0þ\5@—O®æ€\4ÞÂ\11\25í<\15\6zõXð2Aಂ³T«•¾¹®T\6\ +,[/\16É|¥“Nlèè.pý¼­'„ê(\28ÌÉ“\127O\11\22´ö{xméÄkÏœ*‡,U\14YB8­85ÓßïÚ\9å=»¨\18ދϬŽÔ™[Æ«ób&\18/±!\2Òf‰h\"À\9ÃJON{<Û}ЉÜ_g\30òRýëá}‰G»^\29\7Û\16j\21é±c4EÄËUª} yœ\127þ¯\5QE@ªò¨Pͽ\0310êÿ\8ýƒ¼/\23šUì—PpYçn]{M¹õ¬\14Y›Åtü\ +zz\11\25A\4•šoàÕ\12Z÷%šm\7å!c¤l¸RÒ§Ñ&\4ùʢ鸒¨Þdw]\12LÂ[œí¥õ€\26\7\14)‚\0170Es%D`À²¢3£1Õ¤Œ›Ö5²¨ø¡PX®:\5øΦç\20•9\4Pë ¹˜ßºK²&ÈHÝXΚ‚åV\1³4Û‚RÎ\29»‡N‚\16°ÙYX³¹:\20¦<{\ +ú5riXÛç2øú\20éÚ\\n‰÷bÊ¿Üü¾%_ÊÕÛ)*ëè\23ŒGghÍì0\24´È’Š\26øõ\5zQmà \21m„\3\17}ûSZÞ\19æ2À%Éõ —ßV‡r1\9Ç=…/bª­ \\­\1)ŸJž•Þ·ÛåÍ\17\29Ê\0204ãä®*\5\21\28\0316ûí]¶øk\13\22Äü†6(ßÚxË7¦Û2rÒ\16\8Hõ|R9Lr;”m\25»Ü\127UèÚ€Z;kóÈ\0053“ä}ò•]5ä\3%Wf\127Eƒ\3ÐÏ$³­\31Œ†\13\23HäÂf\11 x¶¤–\ +x×\9¾ÝÉ›$\"³\28\7:\20„\28o\127ò×Ý¡{Ífw6ûÀ\2R©¾f\22ô‘R•ƒ¼.ÝÔÁD3oɧê›+(dÔs}¦Ñäá\9q˜;Eøyví\3•.o*LtÞè!Wu\ +/HT\16ªÊs¹ð̈À!öœ2°¹z\25+D2­$”‚kÊñþTࢂ\30%ª‰P¼r]?¬z‡ò…;¸®/«\20\5\29g\26̵Öxu+Ðh‚‡Û\29¢pä-è PNg\22¯U,Ü?¬”ž›,ÌN Q ¤I¨ÕÏ\25\4ÇñnÍÙ¨/ÕÏDÿ@GPn*\31¡šïùüóçëÏŸòçÏÿÇÓ›$¹Í3]Û[©\0218‚b'-‡²(\21ÅNA\0´éýøŽx¿é?©ymì?×ü\12„\20z\16m\"‘ÍN›¢ï¯(Çsu5•›Ì\23.q󄆋 w\26\24ÿ×—ÝÙÔDvã\16\0192u‚\7ëIm0A¼5±BmLžy\9\14„`®ZÂX?˜dß°ÈNÍYm\15p3\19M(¼§òúAt½{ü\15\31ƒá(=\17u½\9\24r(i3ï\ +Ú>\ +Å ØåÃêŒÔ\7±«27C4ƒI¶ÔcG\3Ù%bF»1ÆCŸDcÍ\17‹Þ(?â‚b+<„Áë\8Ø\12†\28h,2›ÚÙàvö«pD\25¯A¢I}å)ü¶Äd¥J/¼ˆèŽþ\19'\8¦Ö\127²!<À̉Ø?2èsË<\23ñÙK\31Û,Ž\0061ú´\2Ä\\³¹ˆl$Gå§p¶Õ\0265'‑t<™å\6H_\12gï.q0»S„ÝÆÅ\12ð'\2§ìõ\6)à\14‡€¶û¹\0\0198Êè±nb¦ˆ!*O\127Ä\6‹ÇÙïTZ\13öåC§DW£‹P]q~ã75ÊU¶¬:e»D“*\4Æ\0288»+J£9±±‚?‚é¸&Zby‹\25í÷{V4Gß{ŽG\27÷Ø¢µ¸\27ø«à8X?¬ŸjÊàŘ˜@os:!§âN!/ËÀ¼w\127Im{Ÿ[¬Lå@ó\ +[~<1å\21X³ÙÇ|㈶\5\15±\0¼ht$\21\3\24[ô\0051ýÉ\22HDë­tVž|b²n™ÍZT\20æý5›âQœM®lñ°É\23Ãì‡%R\0\25OÕ\13?Oüþï’}¦€4\17]WZV\24êáÕ:qåOM·kÞ¤+ü­é|UÂT_ÕîT™‡.ÝØ=ÒÍì©2YH·h¬ÊyKÈã¹®¿§\0171\9ù‹Ý©è´¿\16­Ïü\17&î2k«û÷\0310}ÞÃñ-(®¡Ì—{\1ú§\28£6˜\127vÁy\6ý\13W't·ÿ­Ã}n\ +jI8FÝù\127ͪšëŠ[\6éã\20t}Ö\29O\17¡«®€¦Óv\24ÎN$t2êŠÀÝq]ì;ä–?{\18Ã¥­{!,¥øÌ\20\16\11·Ròm×\127è;ºoÂÄ'w£²{÷¡;éôew‡±\3‹¼\29\25\31¹!\15Í\19\3}€¢x8ÇäîãaÐh\27S(ltºµ¢¼›´ð’…¢;isS$‡šª£BÍ\30\8Âø\0096\127I}BÕªþ#õ\16,\29äËðàÀ«]Z]u\8*éN‹ÇÏ\15œº·‡6`RF\1279Û”x\28\15'žìÎ.zua/êç½›õ8\6š0B.u\15Œá™ý/ç\11[\6ÉŸ£«9&‰‘³pÞ¸º¦Dáí¨=¬ÀÔ\8\5¢\25\11ké\19lî-æŠÉÒÕ5à׺Òjd÷\3:o#¾éAa–<\ +Yòˆ0è\20š\5\19\31hû\ +tŽ\16S”–!á%À®B™œ‡ÔœøʉážXÔ\12¾\14z]Ê49  ¸Ïg†«ž=Â<‰´ÙKñ3šÆ\2\12üS\6ÑÙ&§4÷¦þ0³æ¹×¥“$‡\26 Ñö\28¸,.´E¦H߸ttÚb²\\@%ĹcHšÎ[Y@ªhË@7áõýÏY*ìd¹ÝyóÈ n, `ÌK¦´Â1ªÙøR”\13\3Î(2&pËqôéf\9OJðœÝv×ËäÐÈŸå<óDgúÓü\16˜|Ás\22†>µ58gp\30ŠDÐ'tæµÓhÇÜ8¹]qvÇ\"؃JYÌݨÿbð Ƙ\27f16’˜‹Èb=¨™¥¡Â¿qk°p¼­%zÄò<Êž\30}ö¨êäÂ…?«·„«’<:Bßz‚')dn3¦`½7\3uîõBò=©\15.WíUo¥eáŠ:Ùp]11\31®\9«„¡¹Rÿ5MZ|׃ô\ +?=U~\ +k.‚6ä“\28M½ê'gSÐ\22-ä\3\18Ý\15•ÿó“\13âççyVXû“Ž‚\15ª‘ûÓÿ\3\25\19Ãqѱ\31Þ¢7‚óEÓèÖûÞ\24nÉœ|†êÕ\27¢Úªœºû‚ý´g {mÅrµD´ñ‡¾õ¤V(ª\2…žW]âEKY®ªº§þž“D®ÓƒÀ*ÙÍÝ…\24\2ñœ&ˆ!à&µ»¿íZh®\24\1\3%¤}½2\19ÔßíRf\3\30¯B{]¢0™¾j+œ#8\21‹E\25BaY\4\0G\13ø2NC¶\19ÃÍ;_ ¨‡²\4\20²éc/ú£®m\1\26'õÀƒ]ÿÔ·³[û‰3ð]Ã\25[F‚—›[1œ³n\12\12´ƒ´Ž\22\15À~¨]+\22¾Tìy`ùƒ°“ŽüÖ9\30J›)Vãà\0r`È :ïFÞÁ\31ú䛟Ì×ÞzƃùXCïeŒ˜€¢GÊ@\16Ö9xÊÆ6û¸Ø·Ò\19#¼=ÁB\0\6C\6\28rUÏ9f\3ó/\23ÃÆØ'HDüñSAàÉ\2,Šúk¯Ñ~<ì~ÿÕbÑ\20V]\19ojt‰‡Õ…¼@Ÿ¡¦^?Y‡h`\\9?”¸fg观¾6Ó‚Ê4ÿ¿9*\30n/´P\19¡á¥Æ\11ë808ø}Ð4ˆ\24˜d=f0eð/ÑàŠ\26['¢±+ã?­Ì”\18ý7´™6MÐ¥µ¤°ˆ62–\19Z\14<Õ\21õZý\25±q\25ÉßзKì\14›—ÿ¸ï¦\29»7ö»\15§\29ÅçÙï\12ç}ð !¼\16²ðB°ð‚]s,Ð\13°Ï©‘³»\127¾Òî™þòîÝ›ëM!f\26@+ÄLÝ34BÍóyñÚšiöŒ\12\19žo0Ñ'±‘ª\ +¹ƒ€\4½µð…Þ·IÌvà\30n¨>Q\31“W”{ mœzü9ùÈ£w÷κ\9ù÷C–\2@Àz˜\31;&ä¾8ÌizˆI*rÿþËm¯?ܪh__Îúcé…Pß5Ö÷\21‚¸\0¼b\23ý±Ô³\0\ +\0296Ê8t\7uãÞ\27²\17“Å\127¬z<ÿ\25Táå!TìÚ.¹(Å\9o¡…ð@Õ(ÀgìãûoåX ñïOµ]ˆ_øÔ=W—“OÖZùÙ9}ª\5ŸBÑÔÚO!\8\26ßφ×Ø;\7Õ'O®¡údz4Ÿƒ–ñg\6\13•ö\4süäQ \12ŸHõÔ\2¿(îúdýÖŸ\7¥²<çNÉ\0é€\12iZûéQÒ,\28¸)6Ê6`º3\12à‰ö½ìjb\12§ë¤Ú\6L–\11XW`iü\9oÍUx€\30\29ÌÃÿ#\7ÃdB.&¾¹ùÃp;{'\30n¬¼\1ÌÐ\\üÁlú6ì@^L\21/\14Tç\14¥EçÂÀ¸\15å-\8¾zUq‡ó9˜½>À^O«(òa›Ø\ +{\24Í\24\30Ü-…Ç1­†OR\20ÅZ\30LÙTÀé³\5T\14\28G]Å\"\8?!µ[\8/\15`-©\26¾\30ƒW\6!\3ç@'¼¶îå:Lî+mŠr,7˜áö†ä›ê'.SdÈF\27õQF\0260HÃê\24Ì\20Ï›;ížý½¼\7˜ÑKˆ¸\2¹\29Û\"†–Š\25ÕkÍ­–¿D£\5€“€Ãp~¹“m7H3,n¾É\14Ûyœè-z¯\6\1~t\14¿\1þ\4—\9þ&‡\27*±£ã@å\6Ä\ +pQá\5d_E\24s\14Nrí胛ÌMuˆç'ñÙþ£’[y\5Svˆ&U\13‘G[ 6\23\24åh)8%K3›\23ìãê'–Óp1^\8\0199m\18ö÷Ã\24\28.\16\5\12BôÃéÙqù«ŸX:jÃùÙ#gÒ\19‚]õ46ò4½BûþÆFú,QJ÷\15¾«þ?9V+N\2”šùC†`èÏþ^G@ùÀEE\20÷€'}ùÌÈ´ºá\9zX\27zâ<ÁM\\äæ6Zõ…ýÑuÄÊ÷ù'w¸§\ +nž¾ô<™fO\22áÓ^7WÓ¡–Ë\6õÔ¸ÈU·<±bçèÍ.ÇÒ\19Týi?Ó¼yr©~–±\1+|¦‚óâ™®þøıZ\11!\8DMU{&÷V¢\4¥\8þ\"–Šœrl\12‹—Áe{‡ÃÖéøxÖ0Œ`´ÊO!SPÆNsYNÏš…\0©$\29=ÔŒÝé¸ö q\3\"=vf1!˜Á\27!agA\2\5leé\\f^£µå\0008ÝÆÎ=®r_ÜX…K¼Þ-ÀL[\13dŒHˆŽ\26GÐ]¶ñ\28°ñ즶6áKòí–ëÜÆ\\ìöræ-8ïÆ=X•l»››\3µLø>¶ß‘^\30¹L°’tÙ„ÎÑŒ\\òƒ…\11ðhšÉÉûòy¬ól\"‚K™¢¦2:/\23§,g\16,gà4ˆ\27Ø«\18³œ\1ïÃàå\8\22ä¤[ŽS›©´\8\20Šä½@¨f@Í\18WJïǪ£ŠåÓÅïÎ\0022:2IÇO\6vpW 6¤ÿ\30‹•3\14î2\29¯Œ%mW¢iÊ>¡¨Šj¼ñì•\11iÏp\28Õ.aY\27ç^'\3\15\ +¤låEû>`ØèŽzà;TrÈõ\27ÍW¤1ÒÓ8¸ëžÞg\5f&»ap£Ñê<—\31<ž£Æ[WÖ\127ƒƒt‚ïéû¿Í­Ð„^œŸë¯­N©e\24D×ß\9RÅÈ\14\1_”?\0255¼ô·/ëã\5¶R¤’=Ì%¬IÚžäeP”\22¦.Cq=%n$ãtxΰúG\21VŒ\11çѸv4w½Ýì¹³åŒ~\9(Çu`dÔymÈ2\24ð\\MÞ…\4'g˜i\8ÖrÕº\6\27¶\13Ú\ +\23ǹ50>XVò¥Æ¶#\6ËÔÖuó\21j\4\19R{VO#¤½áêb8\4VãeËtû\17•í\6Ü\14Ç6ßðÊ‘K:‹K]­bóu®´)qxÁÀ}m4êš\1çÀˆ(º}c\6¯\28¨%Vfö1ק>0\19Ù’\11\4a\24›-écÆ-Á2Eµ‡fŠ¶©Ñzn5‚Ú±\4à…€ÙìÆÛ\2\28\5ÙX\21¾äàÞñS$fÄ\7À@¿Î:`ÃÌ,éí{8†çŽy@©©}N\2U3œf\14•yÕ\8Ë5\7—ÂV(—³·ï9ù‰Y@%Íi:QBš\\Uâ:…3}N\0273æ4\31•s3Lx\25\22|\27–\18ùÃNn¹&\\Þ\29Ï‚œ\17\5ãê‡Ëjé ÆBAÕ~°˜2èGL]30Š\16¬Ì^îÕ.ˆÅyÁ\ +\13̹|7\4Ô2 \2\6§f).}\5y>h}ò„l…0§°˜ò½øqȯ\15KI'þ_rE Æ\24ˆ\15ê¯\0077¤\5¬ENí§l\27¸¦uÃ’}/\"7\14\12y\24³\18“*j\31¯òͲ²s,\28¢šÜËjDtá\20*!´ªYB©õå‰Y°\28<—@sű\6VR³\13+૆!®æ\31û0¦\0146‡ƒ¼-yz‡\21öµ°ÂzS…µóK(—Æ\21ZBŠàÀÃ\26\5Ó²õÞ\21„Ü¡Ê\3èù\26¾HƒÍI\"INìšë™MsÅ‚LX\31\13Ù„õ“\9»B·²|•<²®Wî\15ëh\5³mX™’«•Àê?2\127jÍäæx{^§÷¸ág.+ѽ!\17*·…ê#¢£f@RX­+Y)ÐcLB63å£CW6\0ÌÊ“ûªµâW¢Õ7H\0»l±Î \27ëüéîAò&ÀÒ­Êfh1ë\1ð»ýz\18.HÔá÷XL{\7,n“\2mööù}Îì¬u°Ájû¨\1\13ðêbìPã…®«®Øp‚ù\8]ó÷ßÆ,Óü£\30í)ò\8O¼2Ó\2*ŸY\18:­ÄÐåF¾E´\2Ï[\19ÞÊü6òB¡…I©'¿%\14\2Œ;ð\24\16Ò˜M'\11q\17aVö\0›\13Þ„\\\0250\127Œ\\†Õ&–±\14ÌE&Ø\14]@¬ËaÌEž¯É\127é§ýGöî_„ýøZÛ€‚­Ú3„lŸ-‡ûËÐ\3\17x£õ·ùéÕ•c T[n‹©O†‘ä-dXžÏà¿ó\3\27\31hâáÅæ\9¨>9êàÿ§2\13\21߇½c¿°@µ¿S&‹;h:\12\28rMÈ,\21\16ŸY’ÁºU\12ØWBæ­¨ÃÀ&\11!x\6Xo\22H\127hž(Y\14\16ç8Utéæày«<Æ»ØÕÜ\15\16,༳$XàòùÃ\25Þ€ïçÚù׳a„\9T\0312ÑT.7!`>*pq œf4f|\11°˜©\0HáÆÂÏ}%\17žËæõ\16ï¢'(ÔüX/'z{\5íFU\23㶺\127¸C7\0—\1\6L 7aíH¦àG\14°‹\\vRk*¡g¸a\18\7†(#ýRs¿Sì9kE\8\24„7ú\26W#¹åêL`fJ\3\19@•xRbâ°vRÏ»Z·*\20$îÞé XG°•Xn~YÓùƉs\0220)\0018d8L9\25 2}WÆØÌ”``Ÿ“/!ÃW\14fµ¡\17êÇ\27Æ<«¶Î´\20ªe—áí#K;\14>“P#­€hMÑöóöW(áÙ5\24\25#fû‘ý[®ùSÈsÜ\6X\0õ9&oD®³¤\29˜­ˆošF\23­ÆÀÑeÎkÚ¾r}äV\"I¢ä+£`qÏ9û\23\127\13V³\12`«ñ\0306ÞÌÓ\3\14²ˆh#Õ¦Ù§\12Faq\31ìXÛÁƬÈ#zöb«É¼ù2PG‹aeÈmÕ†\29f÷©-%\24zØý;¥Ÿx¢ï\29ÔÄÝÙêÓW¨¸qï\24Ž³ß^£¶4yß:D\2\2¥ š1unHºº |ÉC¿%ãÒ1¿¨Äü¢â$ì¬:h\23.G1ù\2}\22´\4Lô\11´­:Ðzkœ©ç¡Æœòñð„<òqsð¥GÃB«£9¥Úx¼r\20u\30›ð#Õqäõ|ì„A-.¬!ï„Ë\19oä*…¶¼¯9û^Ù·½#éùï¯v7Òeºóæ?%8[|Sæâ™~,,ÆÎl0¹YnM݉¥ŸÀÒS‹P«<'WºB$N×’«uºj€Êt5\3\17=˜¹ÐÓ‰Ëä\9Â|ʬөò›KzÀ—N0Þ$h¿©\28àÚJCäÀÅî•.^\9{]!j†Ü\18ÊLt9`Ë zD”ðd\27zºêqUVG\30ªìhÁpNùÙÌ¿áXÆŠTK½6øÐ<.€Ú0\21\8ž«[\"ûQsl9(Ëì \18\28\16\9î2ðZ? Ã\28î›Ã\23îÃÚpÃ\17¬\"Ux²ÜÑiV-Úƒ§œã´\27\11úó\17þ”?\5šðÇTDÞ2¾ÿ;1êÿ¤‡Ã?éá\0,\6à6A|Bš8œ™ÜÚQt!€\27õ¿\8VæNõ;È÷×I\11¼ÐÍÞl­_w‚\30Núõ„‰õËk©\17„@(ðÌ`$áØŽÏwìÂI!¸:ÜÌm‚:yz¶,R@Y ÊôùÊä´ï¯ü‚\1ì\13ë,÷râl\8ˆ!›\25yc·’¥Tùå\18}¥›€9kµeÞ|Ë\0247·\28xöBVÐ\5^Ô6XÚ8äç4@í×\19§5Ÿ\28r¦Ð9©s¶¾ÁúOï¬uf~F\31\28MÚ{7pç<Ìz\7µQZÀØ\22\25o^µ_IÏÁ´\5’\31Ó;\20†Q“9Á¬ÑRÝû\23f\11D~M”5áæmé¯Áª#Yþ.\22è²h¯?õïÉLÇ\127_\14µR,'~95Æ’I³á¢ZB`»\24ã£èÜ…\127Ñ\"dØr•Trèˆß\127ýâlÉàP[2XQÞí\1ѵîxö®ýˆ]!ô%v×U\127¯ª\0157È­®:•‰PÑJô“ÿ7ýƒ\0297vç[©“ÅÄ”¾Šúsƒê\15dovDúˆçN;zìàP—;ÚÕé\30‘\6æ?è.à“\12Œ\18t˜^ž{{×4\20„DÀ™ ÉÈ}~œ!ÌÄ\14•\8\ +Ö„\127k¥Aº KÄð6\29aºà{F\27ü‹ˆéVÀ·>ꈩ¿ˆÜmçXS?í§¹Y,WMàæO(Ê#¨@\23¾ìW‡Õòæî\25©v,w\0264QÌ\5\19+±ƒ»C\0K€dká€*M\0292qhp\28\7’½#žÀÁI¢žtqdœ,lˆµÒ'¾'G² \13¸døP·´Ý4q<)djsŠóÄ\5•\0íe]N{\0PF\5]\ +\17˜h}¥\27éšé’\12¸uÇn¾~àd\22}u\ +š…c\5ýª‚ŠE×ÌÕ@Êr®˜Bs­ÑoP¬\17-œ\27»¥î®îÐÅý\\Bç0Ñ+d\16©~Á@Q\29ß\"ºú\16\14º\0319¦woYh\23€¢Î\12y»*lE\"v~à¦<^¸\9ÓöAw7á®­‰¯]\30ºÅ,¾[XE”Z¾LN‰€¹_ð×ó\13a¸g0eP»§±•\"÷í6ÉàÙŠµ{;0O\17\5VPôdYTL\127ÌÁ-8\20¸Lu\1Tà‹\30Œ\24=\5ÒÇ%\"÷«¥…¤¯>KW\9æA•ü\9‰E‘,\3\2\12ž1\\¿\"¼Òr¸ýF\20ÉV¸ôÚå·.Œ±¸ž>¢mìÉ=©”\6\21 ñšw\27\ +ë7ä… ^Wt>Äk:\27\28åˆ[\5Ü\22™†øó#\22?ÕöŸý¢~ù‰\30‚Xÿ\\\21C%Ù²X\1)(ö(TŒ}\9'±<´¹¿p±’§™\29¦\22\1^vÕŽ¾fâè\127Å\6Õwîcè\4É.“ W¢º¿’47\21ý^,½¾pS¸T‚\27ÖÄ¢Åx¡þ’«o1;fߘÁ”ÁLí\22\15Œð\11Õ¸9JÃ-ÇJ™øÃU\25h\6*!H؉‹X\"\0273Ð\25\29\17\16&ëAб]rUlÒH\1ÓÀ»Ö¨ÍE\12Ẳ;•!½£ÉÑßᦌ¶¦œ\0093%g¨×?h±‰Ø†»“&ÊBï\15\30®Ë\6\28„ý'žÁ•\12Õ–rtÈ}Pp–\29Öú\28|Z¨ÈAû\\™ÉàPÅÙHû\2]©ò-wZi\11 \17ke1Ã*86x0Ÿ|ª\15ÓÞ‡©)ä\0191à•r\11Ï­qr‚ÊŸ7b±Œ)Ž\4±;\24VÝ“‰é¸›ËdÊ÷͈.“\11äõ\31î[ì‰\25¸W‘$ŽU?žÙo\4â…¼\8º¸\26H\30Ö\7\23û\19G/9r÷<‹\20ËWºUSã\3¦¯\16\29¦O¦ŽÍú-+\12éþ–ÁÁ˜ÁL§d–\28L‡z\2úJj*\127p*3§G„äwÏ*8\23\13r\25y.óÚOÒa˾AÌ€ü˜+cH¦Œ\23¼Í–\9¾p\2܃\0193ub[\21@¸.›¼\8&c_¼<@oWZTs€\8{ÇB>˜¯@>8Z>8þO>Ø\1î/ο~6憑³\15›\26\23\"¯U-DÀ¡°Z`ñˆý¿Ÿ/s\14%él¼@ˆ¬1‹\ +£œ¯?O­\11~Ñz˜\127«ü\2¢0/ï\26&8žAjЄ~9sªñ*²dÀ\20P\28´oÍð姷\25iï\27ˆ\21ûÉä梄\13ñ\\ÒÏ\25pcÖüã4î\11ác-îºðeÕ2dÿàY²äñY<0Ë\5,@IVïT(ð TÈƤYiY³¬»C¿ÿ?r”Ìã>ëZ²zÉ)å`{QŠ„ªÂX{RXUá#OC‹\31GÄqó@-\30(4Z È½šÁ\11¿›“­oÖ¼šÖÍ“—ž'™w\19âË—ç%\12Q¸ÈßÛëod€ôá°Üh!¾¾ÿ4Í‹;xÄþÚä¥ør÷nÝO\22ïÆKhã÷#zdk»\\\1¶‹é(c\"Ž\15ïðà: X“þ\31:ÙXðÙïNopÏà‘Á\16\\\26o\ +xgoiXÓs°\21“á_PËá˜eÉ\25s+ ÌD\24œÌ~F¸N3§ÛW¦ÝV?\12¼[ÛÀ[ÌrÎÖ\0119äÐÝE<\0298þÈ ÏàÓß4z6l#\22|þ©k¼#oYë¹ÿøÍŠG24à\8–žUè?gVm«\3Ñ÷\27Q\12Ÿ{Â\28Ñöå,Ë;{\24lh.Ø3åVnN¤­Ôàùýå\ +BåíyËf–\0290eðéf ï‚·ñA°ÕÁ…q™7U„$®å\18û’òp¤<ôN¼\15\13j@\"2Ú\20a¢:ê4¿¢{±äòõkÞÞ\1:Ï´µ\7š\26¼û\4\31²Á¸H\11]ŠÝ+|’-ä\13\21Ê£ººD,Ï^ožº¥¡£ó™çNÁ6xÁæ]çÙ\2›Hì½\14]ÞifÐ`Åís€ëa]’øå&øx\8¹iÑC\14—€ŠñëøÛ\31\29ÝƼly­Ì\0215èÚtÎOõ\27`N‘-XS\25b‚ñmÅŽ”G\6\25Ù\8)\127\7\29\27Î{Þ\2‘›é …Õ\1\\\23½`ŠÃ:‰Ñ\21cEæWÈpò„³!;eÖù^£½Ô‰úÊ“8æ¥\22ó©\23ó©Ç\17 Ú¢18šoà­'\26™A\ +<\ +\9;¹§\"ä\2\12ØÑ\1\17•“$I®,yh£w¢ÈÜ\16®šO\25öä[B,úäb’óqyä\24U\3÷\14µR~\12–³6|+\20H¥:\26ÜÜ_Øð‰H|Ÿ\"\18߸w»Ÿv\7Çún÷ý\127³]#\23\2l\6…Æ\21©n\\–E\22åŽíÝhõC(¼U’Å\7|Šr‡šÁ\ +Kã\3\11®=¡ð˜éæ UU}ÒåÕgß©,䬵VšOØ\12ã§\25z\5JÙ§¾ê3\0251ûLŠû„«¥@\0062\14Ð[†ŒÄ\15…ïzƒ•rÇ¡FÏ\1)NsŽ|9%*”¢-¢ED¨O<|ŸüÆ]z:Z–Z)PE#·\5W/‡›qXD¨9{Ú\1\30L\18ù\12\28£ŽˆQ« h:Ãm\31ª\15ÞÊ\15G0%‡¢?\17æ\29¹n¼\11³å³˜mœáÛî\25h܇\ +¦~yBIÛÁ¥\0Ñ\9bNîÛ\7&ÐpÎœ\25xÝZ\4²£\13©åw&Çy¸ož\12×åÔ¾4YL;ZL;\14—|Ê\14ÆÉAªJ\\mkB˜\19;\1fÏâàö?iNÞE†\17ƹhé\19Ösn$6\24ˆ#áˆ\"¸\19\25Ü «zלÂrQùO4tϥƊ‹ïð>.†©¸U´\9Óf'³\5¨\16°AÂî½Áƒt¥wÄa\26s\24Ü*ØíË_7MŸ\25€]\8|\127µö³õ\13\19H\5y8ŠUP¬\28—ûjòÌ1b8´˜IÓ¢žÁ¼\6s\26âÓݬÏþ­Ë\1›»kfo\27æÕUÍ\13÷(­\5DÇåm=Yg—>\31v]GcœoX¬ÍGù\22\4#|\ +1K˜G\30:˜H–0'%J\15èÃ\ +&½\6\12Ê\29¿Œ|Ü2†œvÉQœhˆ“3Œµ«ð\26)Íõञb°LEÛc\6\28„\16ºT5-8ß°.—Ð\16\5º5 Ï\28wgt z\14Â’œóÅN\6ËÄ\9Ž\9ýºÊ\7b£\28\27§Ì\16òJF¯>zš\27¯E\27{U`å8ÛI‹¶“f0g@í¡?›Ë€\127›»\"\24u\17ðJµF“h¡uÊ\27Ý0„€3ô|\11õˆÌK´A5¼¹šœ$7òõ^K>ê(\8¥!\25z\3²íUÇ\31\25x@ÏÁŠ•õù>± \11\13ØëŽØ^ò’à–9\20:GÐMMI¼“G„à\29âi¼k%wÝn\6n\29ªV –\3¼oì$Œ¢ÉAñ¥ûí …ßÿA,\27ZA2^\0049 ‡B7\0302Â/\24˧·×â\9n\\g9xù¼Ó\8\16÷4ÉúÉ%ø\9*Ï´{ö/Ç@:}¢ýF‰Ù\13ê'œâŠ8ü̈8rårªÞÿ™™OŸxçgÒ\12zú5ˆ/yúÖút·—£N\30K‚G3ªÝãt\\äf“IQw[l6é\3¦æ#N¨Ê\4X\30@×\9«>ŒP‡ÕÂÆ÷Ê\127r}ñmÃEÐG¶.—ŸM\6±\0066Ü“âÛLV´%¬ˆn*uú?ñ³8µ~9‰œ–,²·eìXhNϦyÍYô1b\12Fsì43ÝOsÿ¨\20g\21ïц®b–ÌŠ6RÕ\16È¡:ó|‚ú‚X(!Öž¢E\"d^5º]Ì[\23—lÍÙ§¡øÖ\22E:\14”jÉ´ÊEy,’\20­\0077.\8Øq'Yµ.p阵k\24ÛÕç'ÂCq½öœ,¥Å‡\20se{Yu\7Z± \26׋é\28+Š5:(6€Š¸2ÐkÏúZ!Œ­<œ`lRÀý\14\25Ý¢E*䮽3®§GÉ\26Ã\6\16+p…˜´\"|¥ôÍí¨\30Îð(ù\26Û\3R:YWíàü-Œ\18¯:Š\26-lµ\1½¥¸ìͫϦUw6\28?º\0175iû^Ç0\24¸…\12Ôj\2\9×ur«êÉ\31!·o\29ʷ‹õ>aÇ*ÈV²\5¯\19\8÷Û”P†|\13v9ÿA­qu\21D\12D—@W(\15êÍÙtˆ•7‡\21Ñ¿\\>²K\17‘%5z\30\28…=Dõ×\23yu\24Â\2dvîúkÍlZ+“\ +Öül°ægô¸ž\23cL+GÆšoñÈ\24US†/ǹ—,k¤\22±ç®…iõëë\5B%\0õ¡ÎòDUþ£C7\"h´ætn¶N\11euoêÊÌ7lÖ[eÈÝvµøJôŸÒ7\15\19UÖÍ*è£eÌ“–Óö.¢…A³™ø±Z\23X\13¤¶Íu*¿7ØœÞx\7ײµã9\25BöZ1ùæXw؆Ýj|ñ\12Ø\ +¯èfvA\30ÁÂmô¥\1275}j…\1_­Ð%MNíIy¶\21OyOîN´aÅ•3Â2MŠ÷I³ÚJ\1¬w&·¬&öqä®(ŽØ%â«á¿ Kö¾5Ábµ&iåýE©p\127Åò¥½´ef¼\6¸\20\0/}ÊkEïb¬lL(n•6\27˜ßâeë®\29sÔ\\{r[6«-kO\22\28[.Â\ +\22>\19kóê•0½@\"ÛÐÙÊ+°yöä \29N ~9K d49\4Ÿw*Ã!¾œ6¿ˆÉõíÛë_ÃæWÅ­÷;¬®\0147U&p\7Á,<\1ê\13‘n˜ÿh´\9º\31Mfù#Ш«\25ÿœ\31É\3ühEÀj\3\6'ÈEÿÈ‹H•á“/ô@Ÿ\31ÀÔÑ|\20V\0288¸âÉS\11rËj€Å¤S$ŠêCn\9aXpŸø¦>,.€'œbó(`vÝUû]\21R\13r®\14ˆÆ‚àM–ƒq\20X\13yR,¶îÀ’³¢†–/‡-‘ý\31\27ÄÚ\11°½kŸõØ\17ëO³­\\˜\7°\29ŒÏŽNÅöŠ=W\127ÉÀ¹e\ +ÊFxZð÷\7Œ}+¸ð¶ÒeëÅäd˜\29‰¸\19dºÒ\9ƒmòø¨«`~$(w?Ò™\17«d|\"FÉ\30Î\6›6‰‚?|¥M6\23\6(v¾F÷iÜNm—ëV$÷“€§l¢¤T¸±\9ª¿FcâªTli¹ØëG÷-yb¦\26† :\"ùe\11Û\27°X2\22\24âè3\24œ”Ž8ÊÛæ;¨èx \29döËÑ\17¤Ø£}ùì‡\0133Z\21þ‡Y+o?Þý\15^Ì\12خ͈i¶Ë˜Áѽa_»ŒãÒ¿s\29l\8[æ½0dЄ\15ÝŒ\22¿õµG\11^mnÄ_\8`(r÷qÁ¼\0124„OÇÕq\19ºÓ\3÷Í\4€\22e¹•wa\4£T\14F¯b\19~~v\25Ô Œ95&Qåúy\ +I©\30ŸßgPÐJ\17'\30¹JKD©X$¡œ\25þ?%]à\127ŠÿD¡\28\1uL &{ÁNJ”0\18Zúú,_ÉJAµbÈ¥æ<Ì\0265\2<•ê£Kθ\\(ü,Š^»˜õÏÅ6œ?¹Ô„\22µ41K;Å·´“J\31üI ÂQ\23‹Ú\ +“\8X\25,yÖ\7'«OŒYA\18ÆX \\XÿÒ¢–\6\31¶³nöƒè’J‹AEX\21Klm\14K%°FÅà‡!{I\28Š\17\18g¸Œûà\"À=1çu¶pþ\6¡htiM.è49ÿ”ëBì?Bõ³\0²¬nÒÊ\24!°mw6ª\24^œ£¥ÍPEd>µ$‘çdb—Y\12*æ—é\"èòL¤mNÅ,\16\21ñº3lnÄ¡Ëí\13\31\25n\25Ф†\23èhA\25ò@3\24\29ç¹c\ +¢ØLo¸d\16©œ«T<Ã\21ïq7Š\3Ð)\24-hÕûK-q\21ß6§h#”Z˜ªaV¢5É¡;i\18S&™n\13ã+\17\24‰æ'¥T8A#Ê,£\25\21ùÿ‹B\0147å\8S–ç4\22 ßÁö8£Y¬¼L\ +\16†ðý\23åX°jø!¿Dp+ž°m\26弟\9âÅÜIØ¡Ú\28{'ör—'\11lEKa1%¢MÁF\24m\24>Ø^4D™ùD-„¿¤\127ûéèèC\27°»|äÁc\27-@©Ì[ß8\22â\15ü\0145U5º~v\14\14}\6OƒÝ­a~*é>fÀåÁKçsÿg˜-\14…˜¶¯\15º³åïòPñ>\29y¾;èa—B[ôHÇø¾:y†ðŽº;Ê/…<$fæ\2\0278‹~¸ãCj¿Í)\12Vuà››\7FFš±ä,¹‹ŒvÇ,e\20ý†õ\0171€\19\28jÕº®\127uâ-§5Ò\1å?¹¢÷eìmQ+fY \3'\11yŸÃ?¹”˜}¹_AM5\29À…Îñ“æ\127*\0š'±\22щÑì#–Ððü ¤zÔOºÎä#\5.\30Ëa¡ ¼\30p„_b¦ÿØËàAîàˆƒ…š¸>{ÞMº“Ò\0206\22ŒÈ{\26øìn}½ˆ'/79;ÖF³H¹e\31˜©PT\16S\4Jؾ\9\29ª\5\23_\16BM©—ŸCÀ0æû;ç\0026ª\24ú\17׻Ζ¼çBÐã\5\17s³ŠímYL±½Ÿó`œ'tƒ\22u²6\ +ùÐÒ\4\0%ñ³\15ù6v–\15–Ju\"ê©{{Ùrðso…uþA´9à\13Í!Óú)x·´\13³òWwR'\30\26Åãqy0uŒã2œ‡ö\"á·¦R\29è4\"áÂ\127+\8Žõ\1õçà\13±98HŽ*¿¢\3#`3\11ʱu¦O\31¾¶Á‘®1=l'Ö^n¿Çæ7%|ì¿Ç‰9ƒœŽœ\18Z\13¬èÑ– byD·Ó|éÊb9ÈøG]õ‡žhþ`¹M †Œ\9Î÷ýŸþÊÝl=)fvôO‡\24…5Œ†žhß_÷ÒW\8\16vù.£c¿F\27¢°t\16«Ó&ÔóÂçk¢³\21ªÊòB8µ\15\14¯i˜¿!h°Â}ž!LTiŒ?´H€4\30Ø;Ú7SA¤™\127üûçÖ~a¹ó]Hé†åFŸCnƒùA\20éÙdh…¢~?z\127‘\3\22\28“ùM+ïs˜ŽRF›Ãœ³wËÀ\5üµv²\8ç÷‡e™|’ñg\0Œ¬+EN%•ÿ½\24G1cø\3˜q\20!`¹—ÿ.æüûÇ!\0303i\21I)úïoã+Uf\16Ç»å4yú”ß\127­Å\29Kƒ\27ÏEgA“²1\27U¸°à¶D'6Šª\0220eR×>>\18ï‹©\27•J\127µ’\18¼Êrµ\3âê®›>R™\21b'/c,\26:ÇF\14¸9´!\11]R,·’Ô…ÏR®†E!éLÊý£HX>L¶6•®-$/À\2€\31U \18—®§ëæÿªõje\19ŠWÆëÐðw@6#]‹'¹§Ì8œ®“5Ù¤âZ‘—ûþÇ\9ѱtM:iT©ö°\4ïä’R\21?\7ý\31\0119©Vj}DÉ]2µ7D-Óͦ®Ò\13ûtévþ€}”të»\11.6t>Ò\9FGù\6ÝY\1.þ\4¿£|¥n$x^\31\28MBÃñ\4ê:iè\20†.uõÓ\13ö”t\27´\19ÉÅz†\0Âò\2+å¶\3íhŸjõ­f.¥ÛÔûìU‹×+E­nŒ.Bé\6E¬\21,Ȥ› \\†ã¦ë¼ðùÛ¡¿¶k¦\16Í&µ§¯;\13\20\6™\18J²SF\27õÿ¤&ôfÃN-;‡:´…™J˜ù\17lWÒ1ü}cf\31\12VÒ.¤\27—ìÛ2pEšªÉ<2j-‚S\9-ýr±îÉ\127!'IØâB©\20\22ÏäN“ÚWÝ)·¹Óá~ÀO÷â®6Ü«{í¢îÃeÂeãHÅ}\"ñªï¸¯›'¹°¹t\23ÆN–H'ß1\28Êœ¿sû\19€g<Õ\15Æ^®’7\15\20w¥Çõ Á\15í\8Šà»Ê\7ŸU=àxq\12+:éüÐvÎ\15\16[BÉk5VʦKqBKTz`·K\1\21VÐÓ\3\29AÊô0_\"ÞÞ^¤\1\4Ðù‘\30\16ëpb-(hu\30æ`æGpsÜ\26(ÛêÍ\7\ +³”)hˆhÃkËQÈ€¤Gê\14+`W67úûS³ã“á\31Ϊ¬\26šŸú{R\11‡[í`\1í}\25†\0127\3­¸4 F”xÜNƒw\16˜N\19êÂù«Ã6¡åv\30Îÿþ<.D\4\18¡~C@w \ +Šy‰ò2˜XêÕSçF:?=ÛýX–tb¤b<©™-:W„Ñqq\3 \25-\9ÕUÞ\19WŽ„ºaÅôB\ +ÈÈ\8Ž=Dv Ñ.ûÕ_ãçÚR «@ÓP‰‰\27\27Õ‚<\9ª<ô\15Q\18‚[\ +\24\3óeœ\\ê™Õ>úÙLa\24¤U9+-¨×\7ÍaÉ%¸Û\9UÓi<\5B0…“\1\5Û¢ƒã\6»\5q¨øµo%….¥$Çh\6a\12ù\8Cc )7,\29Ø,ª‘[\15\31ß\8·KÖÖ½¹\24ØЕ24T˜§þ\24/ð‘¦1ÛÎ\21ÔZ÷&1¾%•\19\18/iL,¼Ñü+i4j–Æ#œù2Ä4\20\6²Þ$\14\20ÏsßÓ…a·jI3\21º“¤óÔ\"ò™²MŸ„zúHB]„Ó\9FÜd£‰\23(¿)ê_=«¬rò\12Déè}*%‹Ï\24ä¤%_©#\24Í_”ê\23¬ž8¶(Püøã\13É£ØDsuqLhá£îê¾Úݨˆõ4\13ý\25÷Â,²\20'd’ñÓ¡‹?3o>­5”¦‰é;!;‚KÇM\21lË)?’\25hæLê\14ö¯,Ó#¯U/\9š¿\\\127\26Û-JØgÝúœÎ{\ +|Â?2pz-3ö\12\0210T¹–\18ëÍ$à`˜¦Õ•ë\\eô¦©\12«Çj¤½Äž¦T¹ÎÃ\30f×4Wî\30:dmèŠZwDýowšç‰7A(£ª@ï\4fÄt¢³\21ÂÂÖÓ䱎þFÛ]5\8\6Ô»!RPËU\3OéoŽ\30ÕvJž\6©ÿ@\"{d\12ÓË.å\\\18êDl¿—‰û¨\ +Ú=25\26ô\19\6–\\ìî¹b{¼\6C\6”²—nè.ܧHî†c¬GGYK«¡—Ä”­Ÿ\2\15\15o\13½QË›Éú~\15OÍlLa\22:D`2w=f—w\18^‰Á\26qš\127âÜ\8\4m±\"G\18L]Af\30æ\15œUJ\3#TB¡cš\7*\25nJ>\15=gçÌt$3OBi¾ÔæSp¶°:%ý;£\17àK\27Å\12N7c¢™ªry;¥g\17PŃ—Ëû·U+XþK‡ÜL‚m\\Í«–²SÓ.\8™¥\22\25³dMª'\11á7r;»´c±1^ƒ)\7.\31osÖãÇÿ4Rgi}\11ék>V\22ÒO\24Œd\0238ñŠŒtPOHf\16¤¶Ëy£dÕbFe\18q\8(\0265Ûj\21|¯xwRc\14°0ù@Îÿ6:T\1ht–Óƒ½C>ŽR5‡é\19—Ëã¼%2•nЂ¬–Ü\16m÷\5àî ™J‘VJË°j\0\22ìVðAîƒÑE·\28QòPr‹>Ú´¨ùÛ@þ„\127\127h´ïwø\7|(\31#u`™-‹­l;¶<¨~u‹Ê•\28këzùÛ„'óxAu9E¢ÑÛp±[½\28çía\9𣻕!“ \8âb² xN]>”\1s\6ÔÀc•@¶:O(;Ç\18ÓòîâÔP\19Çt¹\0288ìq0Û\31º\15j\ +éÏÃÁÂö×\11ˆ0(v±ªŠ\21Æ6ýU¹~“\23àI>­(\12L0ˆ¥\23\11;•/Lî$?ðY{C\7hŒ\19C\28`N¾Ôɯæ“ÄjàùÅ\16¾L§\20˜\6\"=W^ëå¥:^ÅK³è¥Ü7\3:ûÕ \4\"½tÕVÿ‚ᕯÍeS\8ªõ\4ÐŒ/€i\3@­dJ\16qØÖÑ\13–äìƒú\5æ„tB™\13®\26¸™Þ.¯‹[çs¡‚L™òKx*ý\16®(ÔqÓ\26ÄïÙ÷6\29k“K`ò\\¬Å&\9õ\2ßØ\16Çû€\28–Èš.Þ\0207.‹iÓ]ð\6\16\2ºÝ*\18ß\6ü·šN¦Š­ÇÀ«@¿±ïnùÄä=¼É  \25\\ÿ@M7ø|U\8·|(mtm»Qf\0Æ…7Í¡\22s¼ª\17émH#Ák$\20?5Õ@6ê€ÑÞ\0202‡ˆ¢ÿÙFMXJäˆò…Âf.í™?R¶Š88õæÈ|eÖ\14Á=#èžýtƒ,Ж\2÷•ð‰S~VÝ\27ªû°¾E2^\\’6Q$\28•*\18ò\4Ï\12\28>(Ã$.+ÃL\24®‰ŽÍ_†©\18bY˜¶\22Ò;\18\0123øÒTxÈ•£.Œmj¢{\8\9¿„âL\18’ùßlFß?{+rWÊ\7·qÂf“’­tâÊi\23Öf‚b\21»œ\ +<çâ\14\14\25i\7œyé­LR·‘†×Yü`à\23\1BO^\18~ŒMeÆr,\22èR@ò!z“ “š\20jœ$æ\12œ£±á\2\27{\12Š©¥š\17ñÞªÄleÀÇÇ9¡6±×@`\0222q¡ å0ŽM2L\23B\7\14H¤šhX\4\15­Í\12­ZžI§*ŒÅò``‚n4+0‡‡×+ù\28u=W˜TM캚–3Yk»\21„È\8¶b^<*‚ˆz<ö¨|ã3\3\3xX„!\11°N\25\4\29D‘}\1ž)våhžÄ\20/y¶ñ\12\7\16-`\2hÐ5Npè|àžc¾\16Åð¤2£þQ\3¤ÙmÝ\"µ!_fñ¾\15ž»Ø\23“\31£3 VóX\18\\D\127\ +\15¦ú.žbåú\20çˆÈ–f7\29œ #û•\16Ï\27\29=ñêF[ý‘¼b)Ò\ +šÔÄ2\30ÐlcsÜèVÍFá\31ªäƒýZBÍ<}\8~\127A /øÃý?Öæ©RïÓ\27*\11Î\15²þ]r \13S»Z_Wk_;½ÂYÌ<Ó\13\0^k\4\0264‚â•Ûî¾Éì=5è‹Py•Ðù–„Cª¦+jÚ±3¡`Õ°Ûæ¥\ +þ¥qø­‹âï\ +ìø€¶ÖšøÈƵ\"\25s2!öûk¯µLÔN’ñÞñ±w¥0Om+\0087ìÝõC;Íí©\0ˆÝ{w/q\26þb\ +\0277ȵhàÞµ\28I{Ö\17£|ã'\5aÁK%¡öµC\27\\»7\29,Òºþ[\31ŒBÙq?Š\29\19>8.\24bØnõ/{Öøb½e7é¢G–Mè€ÜêQ’\1ºÑ\25ˆpº\0FMhÚTP3BÛ”üYÓn\15û¢<úrù,dF¡SeG7Š²W \17òôþÌÊrÎ~t•S=¨\22\3-ßÉ\5± ÃâÈ¥{ú\22„\28\6\23!¬—T”§Û\24ïéú£%bo3’H׳ÙÀY–KÜœó î\23\29ü⓪ÍÞð\4jß”ÁœÁË ÿ‘ÁÃ)û\28çŽA]\9\0ºŸ@5÷½aÈ^·hË\7¬ÿð\25›•ðð\7át‡G«7PŸ:ŸŸ‚µG\6T/)l`Àš0ôŽÓfh0f0å8\26sru¨]\7¨””¡GX\24\2ʘ¨#\18\31ÊŒM* ¶ý™TˆÊR~|#,êÖÈ\16\20qu2úÐÖ\"\5\ +·8©?¹\15©1\127ø{=}ì×ü\20·_-ʼg\13\"Í^è;Ú›Vá~{뇓w\29쪒\27+â–¬UÃp#\6æ\1¿\28ï}Ñ©ì¾ÑÙ‹»9ìŠóS\1§[!÷Â;ÓÞß„û\3\2\25 ‰Œ\31å®#³Žø¡\0\3þ6dV¯šCR\127ï\20ì·”Ýêå•çAe‡ÚªøÇó\3Rœ:\ +•\25½SaðèF\26ÝaH²êC{¤\"\8²d7Yt\14¨–GŠ\0065¢¦{ÿÉ—\13\5I‡sO»\7Þ\22 ó\9yhÌy d®”Wgeõ\22Ñ[-œÁÍqú\6¥COÎŽþ\11þÓÆ\17J‡¿qÔIÔjwãë´\5ì([¥Y£uŸÒ¤Ñ^+\16â\15\\ÇÙïN\2·US&·`ªar\15ý\7\25¾hùt#:[x=ãe®X—\5‘:áüší¾±Ê‰Ý*'ˆÓJW%B±)med'¢õJìo½\18öÎ\0d\31\12rYéa_Z\12rQ‡]ä•wë–Øߺ%ðS\6k·\0‹Ø³I\0004-æ\8v*͈ÅB\9„21l\15Qžf©\7àÂ3¡æß²:Ô;”á\6ÄnöŽ\25÷&{½Ì\5ݨųbIÁ•\29¹ê£sà1“\2•L\2/5ÐZ²±‹ÂN\5©Æ=‹v¹\13/¶†n\28ú¶[K“K«Ýܳ:ƒÝz\12O€\5Ø^\ +´:\3\14Ð\22|ÆÁË’!=l\27Ð\6á]TžÙ\27\18Õ\2o\4N+\16J‚\2†Òß±\13ì¨hêv\23B> ñ\24\3FC¥\27[ÏÛì3£ß¬\18‚Ö\31n;{}¿Cžû\27\22'm`\3ï7\19ô~ä\127–AÛm·âÑÈ«9%Lg\7-u-¥Ù0h\ +\7\22ÌÉ¥ñ\4õr¨h2æ*ˆ\15yz\"é¿÷>j\17éÇ\29½Ž\2½„‰ÆX~¿Ù-¯O\ +z¬±\29\23\23ÈuI¡V\12â\127pÝíN‰œ\17 eàéÉ\11‡<¼€ê+Â\1e—\ +\29Y dÇ–þv\24b\ +3\"Á”Ô1%P6Gi–¡ß-<¿cq]\7\0mD–]iÛèOŠÞ.¼Â«è\14kÛ\127\\€ã?\24~äh¯ÿè=\"2Ý`È\27]¦?&ÒÏfœS2fÅ9y\18ït+¨\12íؽ©˜ÉŠ(zO\23/ν~¯¼¦w4¿*\0141úÝÖÓ÷ïÿÇÑ¢íùnÆÙ]³Í¦*\21øÐæ`Qøý‘­\26ìhê\31\0Ö¼¾ƒªí\22ß\31OÍ2\16‡–Õþ\22Œß\11aXŸoõÅ>¨%\3löB\18¯\26âá'h‹Â5pˆ¢Ë½õÕªB\6(}\21~P'¹\28h¶ß½\15y ‡G5“î‘\8z0~C\5‰\23Éë‡u¬Þ´\21¢buÇ’Yo@ݧ±\\\27MM«]%\30\0024“º\26%¬o\21¬”XûD\27NSA¢ÚËa@\27Pe\8n¬\15˜„&bPÛÛúUôí\8g›­'‡\12\14¹ôlâµþÑð©æ#Œ\1*÷·aÒ]\9€$Mt8—rš`\28c ÇܾìRÆÒÜxXÂðã¥o!j¹f.Û›Á[Ž\\Z¶Ô\15xYñºcu\3¹81â–\2–ƒonaiÖÙ\22·ap!\11\13²âUxööÓ°œ\2ùÚðÌùyÍ£\26È\ +š{/w—Ù˜öá•\7›U9l+cÃÑ=„²»f0\2,ûk^=J@~âêFÀi\7¿†+Ñظüàî+Âêü«'\4\27Ä\16./Úƒ\9\19'˃\25]CmÎdõ\19¢»\14.W\3:$#`ƒÉØ\0ˆP»ís{rcÇ›\3pøÇùê\16T„òoÏ_»»\15Ôö½ [Š§Êzö•ÑB\11TüóoÀXÆìM$e稟jÐS×5\13\5–¦÷±\2ÍÀUŽ1«©\22Î7i}6\8]î™—w¿xÞÙÖî>YEÅ>5,š©7ïãÎ{÷‚·ö\25Š]Ní‡ÓÀQ6…‹\28h‘,\\õåÜùín\127\27M\20„\22¿ëŽ\15ò£4*IóF5.ÙXÑn=N»\31âôÿ¦=ké[5ÏOfüg#~ù\6C\6«S̤¨!Ã\17¨\9²ólfÑjd”…,ì~>Û—u³]k%Xt\9`¸—\2Š®*Žjå?k‚ûbÍ4ûZд\21ûÖ;‚Ú®ÚóÖûùÎ’]½¬C¡\9Šmq9ª_»s9\00285\11”#ë7´$\23JE†ü|­²f\2YCëÅGíš;a]¼­­Khìщ§~[\0276aXÅ÷·\\çîG$å‚\12GŠE»Ò\9ë\22Ú‘ÚY\8UÊcJõq\ +\25€Ñ®Ñ_¹\19âÍYktð\30ü\2ÏÙÎÚñòU¬…ô·»B\30JJe†Ð\2\18\1Ô X\26ðù†‘d\17ìJ~•XîÈÞµ;h\7!\15Æ`\27LÙÚ±¢E奅îäE–€X7\9m\8{…e°Ýrs»eäpµL·lÙn\7ÃP\0275y¶¬O^!œËÿĽÎ;-ñ=®\13h\6ß-À¥£î\8¥ï\21¥ÍVaÇ\5ýk\ +\22Ú§éoi,‡Òº\26±,\21vB\21¶Já\28\11ð‰ï¨\20Ú5½øB\12\19ÏDŒ;\17\19uø¾Y[vØ€Ö·=¼×ÅŽJB¡ˆ))÷óÀ3#®0å\29sŸ\28W¡^5\1‚¯Sú?ûΆQÄÝÄé\29+xr·÷\25’9·wHÊô\6\6\6´üÛ¤¸c¾ƒ¦1\1Ð\1NÝouNü™j>Ó–Ñð\30ÜÊv´\0099ú/O š)*?vºëU¾ºÆ,þº\27÷»\1µ³,@k©#\\‡Ñ§\23 R7Ç\1¾A\3o7(zØc«de<‘\5\12¦Ø5ÀÉ—Êt«o¸`ñ@ôšïéûÿôÿ<ªEiÂa¬þ©Èß\19fóvä\13äªÈ,l°\11#z\\4ç~ð\23­ïËáKËóøn|\0Ð`„v\7-º0†o¦ðÓ~¨5G§!8ZæD\18&ýx\13\21¸ø(þþïìŽÏ馆¶Öζ[tA.J~\4æ\ +\29¹åŽŽküæÚ%\15lÍ„ú&´›q„>²ž¬Ý4ð=s‘\0ZËäì–äÍ]\13úÛÙeÊ唬 ëíh^u‰\127óåÕi÷ý—GÕwÐd\20\12ÊßnK¿.…[Ò÷_„}]LÈÅX{v£?ñŽ\23BáǯS÷ñ\11;Mõ/¨ðrtñ—;‚§áÓHýê^ú«\11R«ÙõË|ÀÅ/¡d¿ºòÏǯâúñëº\30õǯþ¤_\23u·ûÅñðK7~9\28¹¿ú@\0286\6~±ÛoÍ\27.¤äÅŒó¯jP²á\2¡â×ðSõ”\2ªc(ojàÐè{~ågÕ_C}'|\25ø«ÑÔÿÍѵ\14•_\16ü~\21“~óÇ/öª_ëUµ¬©Á‰ª\21ÔóWhô±iÓ.þëPÍűêϺ쎥Bþ|üî˜Ä¿õA¿ËûÇo¡Êò|~ü.†Ÿ\31õïF{æoá®ÕÇïzTà¤ßK)Tÿo\8Å¿[åmÒÇo•\127t*¤8ºŸú§ÖkZ~~\28¼¾\31Ýha¯£;M\31GÛÍ\4/\31‡u/\31'\4ÞŽL\31:º¥\29íjÃR¢m$ý¶)Hߣbuë’\27‰Ûå\\?Ž7eé¸VW­öãzëK¹«šs5ÁHÀO¥‚-z£ëñ¨•U­<ÿ$ãù§\22îqú¹*_s\19ZrÜ ÷TÇMÅÝ´‹i\30·\ +nèƒ9\"\28\19ó\0«ò8¬2‡´<Å\16È ´é¸\17\0297ͱã†J_\21¹ÒÞÛFzt%$\18Ѭ[\13ùð8߰ΩHÊr\18,\ +)7_¬\13f§\6g\18\"w˜`¥d\8$\31V/MŒUÍ\31…\ +íogõŸ¶`j8éºwÔPv\14ë\7%‚#àè…»\11—4\28\1¡œ\1Xo!\27\30\29\ +ZÔ(ÜÓ\30ˆ™/…Ó\3=ŠJø?Ú¥¶åM!Ô?\6«²üÝQÂ{n{Gu\23.vÜmÍì°.Ûã}¹;\30ÕM˜\12€Ï~˜Þd Qxœn«ÝbËà0бv<úzÀe1\29ú\12vÎã„8ÍQÁ\14}<„ªª…GË\27 w)$nžxêÉ)(ö­c<É.ÇÞñÐÂ?êÇDí-¥4J©[æñ\9íê(TµN\1µÞ’\2Gñüh\14µ¾Ðµý\24»\0233sD•öÞþ\3%ñJ«£Á£YŽ ¤\14\13öå0¦\26ö)¦\12WgˆÊ‡PL ð\1çIôS¨•¢Ÿ0ü\29ã³ß³\ +³cÔÝ”Õ)X{ØÍO¨,ãØ3Kˆr\17ãèù0Žg´÷g¸¾¡ê\26Ï°Ü6.è \15k\ +í»Œ\127Áó¹Z<%­½±„#Rm\1ð…¯÷fؘŸî(^\12:\\tGkóÍÇË\12*‡<à¥JÀ¹ÉëÔñj_B^(kQøFZ\6óUE]IŽòE\19™,V\2£b·\18ìçàòtÅë“gã€;6Œ—ë¿÷?Í?¶ÍóÖöœ{›ŸOlíÀüš“ŒœÎBDØh8·ØF@K”âÂxë»ÔHl×zÛ±¢\21Ë\11þtz˜×³ø`ÿö÷\0140“¤‹ŠÎ\21»\12r‰quÝPBÕf–\25\12j\7—´G\3lŒ-\"xÇk´òÈLhò\6][m7G¥m£—û†,±RÌ\20¼rHmhb:6\24$ÚúL”H—σ‡;Ši=7!5Ëm<\11\1\26€í\20¨\0i?\18²+nÙ\0156ë\0PsU\7¡ìð“ß\1‡\23%x\"n'Ì\17\16æ‘ÓmƒOdœà€÷ÇÔ\9²‹’p\28ê’y@µ&«‰8\7WÍ\2ßßÛÉàì¡E¿\8Sû\127vœÕG\ +ãà\22œ\25)k\26qð\27ì\31§ƒae{ÕúÔ©B>x¼4¢ºyÊé‘sÃøÐÍ®ƒ¼°lˆù0\27\0239ú‘4#\23éø¸\0Ýÿ\6D\9NãpΓ¬\13\4`zæ‘Í%\31è\27¢\12ؽTåÅÖ“‰L4N5jƒb*…'ˆb@\11Ý\17ÞG%¬[,Ì0^˜óᤣ\5_\13š¤/™Én\11Ä\7¤¸\3%\27\27éXZØSgŽ[8à€—jÌ`&\19ºRÞp4¬\7Š>ñ%\1f¯Õ¡£«¥ìà9\0180öî\24&8Æ…Á6ßÜVÀœžM\20»ÅÈÏvtdä€e§.\7²¶‚U\"\28Ù©¼(‚îۅ϶®‚Ã&E‰0þ\25<ÃÌ?ÅÇņ9Ôb­Ï\2Ó*\\;Æ‹\25:#–\\Ù9>½\28³bŠÃº\31\14ë~8¬ôá@‚ç¡°¬ïáÈöjl öȺ\26\12VgXíñBÂ\12W\0\23”ÃúœG—So\13£‚2á“\18ë\"þ9\8+2óä`Ðan˜Ã†;í¡\31bå#\20\0.ÄÖa›\21G4Yí°ÝH\1ï V½¤A¶mçÃúj(\9Æ\26%\12Ü\1\"íD\25‡@dòÇØŸ·\11×ØÃê!È\23=Wã\25\29²ÔñÏäâéÀ\ +±úï \21«ªZš\ +Qh“\24Û¤\"ÓY½ˆö’£ÐÈÃÓñqdòÀqÂbâ‘\31ÍÞEÌS”Jà%È@Ùò‹Éaš³Ú°\23\30º|+VÁ¿ô™ T>®\21wúøÓÕ×òO§\27µœ ¿\14sBÇæS`\25*»]ýQ+pÿøS\\\21rÕ)÷Gw§?Åí£øÓ\127üé»BÎ5]äNƒ\ +@u¼Š,ý\27F¥¾\127œÿ\20\15\21õÙ}ü)5’\127êÏ$ïЫ˜á>ðW'ß\31ë\2Sˆ.ò\127²ÂÎ?\24ZѦô\7=\127ŠQ!º…þÑÅ·þƒr«?Ú¿ÿ¬Å]_´\"»öÇÇqógÕ'\21/•¸\9Ò¢BíH¥\18sƒúSðE\127´·þ9ýg÷û¿úzÈ=a…Fž[/·ÕVùýŸ\26ÿýŸN\26EŽrÐW¨\4#Qg\3ßÿÐË\5ZDý\4/FHôgìÛ\12&r\11ߥ´‰´¦k`n­×ÕßÞå\29¼\11L5ÅN\1[k6½Ö¹b³­ã‡ÍÅPgº%{¶+\17[¥«IÿÎ\17\9¸Â¹%ˆ²^¨BxNð~™Ì|˾{\6\15¾gC3\25jHu¨æàÑ5\24=$ÜF\19øÓBÞqŠ×‡¥Št±#\29ùƒµ“\12_¹Þø®?ºpˆTï’´â”ölÜÄ€Ÿ¨@¥én–ZÕ¾û€\7c…˜ÇÜ1\6Æ»)d<}˜-˘<\1nÔ›ØI>·V_S¹ã™ \9íÊHÁ(žAptí\15i‚çÉ–·{z5Ð~´ÕkÞéÏÉ\31xÚ´¼å\3\21¤¢=‡qc—×42‡#n¨\30\15y\"„§Ý‚\18¹W©Éy(£ãÑÉ\13¨6»îÁzÇ…Õ\8\0bC˜6Éï/øb¾¿:.\21\2Z³\ +ÒTùºÚô‡ÿ@_þþB[è÷Wymì»bQK€Z¾n¨ó†dÈä“ï†k5¢Xö3Ð%ŠuòÕ`.Ç^:\0238g°fÀà}e:É÷W}{RRá:ò,4\28ßð\5<\9c\20€ýÛá:S{CÝÿr„îx9`¦É7l%Sé\13¶\ +\26èÐ\18Ý'T]§ÍídÿÓ]\4%à\ +vÝ5h\26€Š3µâûëÞŸqË»=\13-¾\27\0213ôÇßï\24\ +á{ëû‚\31Íù‚\2çDÄ£vß=x_úþj‘ÌVj^\30T\19‚ØÄ=ˆ{œ1ŸCšŠº\30ÓzÞ©ú±4¡¥a§Ç)'ß\\ù#³]èOàK\30(âzC'‹Ú·+ütCñÉÇ2\6CEª¬]×д3µ‰Í\23ÀŒVÒH®'3iüáù4ú;ÇŽ¹3fJ‹Éô.yä-ÌÞ­»uï\127¬\21¢ÐŽ†‰Ç\"äˆ@\11Î\\™s|ˆ\25ºýÐ$*[‚¤6“#òŸàÈgÿq²H9P?Þ\18F‹…\17Ébý\18®çÙ;úÍã”­H\0226\"I\30d>(ps[6Ëè{´-Kúë\16l\14\25Òu\22b†r^ÄÆ\16¤M0y±Œš`?Þ6­‡¯\29\15êñª8M\29½Õ :ûý5y=Mž¼»\15Žê§”ý^)Péºì¯Ý=§¼x'Ä÷‰˜\16ßo\28TÐeÓÉ«d\26\17Á'Ý”»\27\1\\·vz/ÿÂ\0036µ+Ý0mÞŠ\20é\13NÙBïàOé‚„\":…bLi¦Ñù\19]?í]ãÔ»û\17\1Ô>W¶{ñN°{9e8rŠaqH½æˆÍ¢³¬¾ýûëÂk„\6l¾ST=ãfýËù©âö†î¨Y‰ÿExþÏ¥¿l¶äŽ`Ë%P°ðl›…Ûyó›Ù©¿þ\29\ +_Ë©£ØfÉëÕvÿ”jñn–¥ Õ°%÷éòi\19 \4-çQ\31PÙ”¨œjqö\5\20ÊÞÚëiYBýŽ\0Ó|ÿ©Q¾˜#ÿÿžÎ-ÉQ\28ˆ¢[©\21L„Á\24{9î1åvC\1!d\"XöPÿµ±É{®z~¸z?\18)•z¥Ô¼«)ýozÛÐN\21I©Ñ¬E\5…åé0C|on¾s·Át„_F\23RZ\25t¯ùºyáDOŸjv\0ÞhWs>‘n6U²h<÷o±Ä\19,}\29ÜÏW­šbŸUPÝ\26¬þôúUk¤Xi¾B\5ׂGx¶Œ9k§?³^Ö\9Ÿ„eSþkˆ\18€\23×dºêŒ\0\ +zHpÕ\12æ¥Ú®Ú_\20MNT IÐB—ÿ>+Ž\21\27†&‹Pèö™EÒt—ø,¼ÐÛÏ\\ð\11û\3Ž‰äômÉéÛK$ì1ÊX„ú¶\8%WÚ¶E§ï4 ö}'®Tð\16äH†\3·Ø>8`>ÁÂê\21>¢j{\ +Ãê$S\13aº&.ær¾Ž6©£Çƒ\3Rú\24÷CøzÑÈõ\24ôRÃ8cí³Êpù›&¿9†Z\25<;F¾ôô˜†øõ­]rÁù\0151µ¦bw±q!Ï•>I[g–ð\15\0062¼¥¿¿2}Ýà\29i>Û\1276Ë‘6\19†ömôM´¼˜\26.#\5Zêÿ[\\«\5Æ”ª\28‘ª\28‘ÒˉI0´A\11\0292¸—Ô\11|\24.æ<áõ/U0ûO\13$õ!C|\0255ÒVYi8,¦Šnù\17ž\12¶[¾;`v'ù»\16Tµ@A&\30Ø!ðÛQI\17“zX¸¦Àr¥J”_·»\29^È]©N9í÷IŠNð-\0016Úƒ‰Ü‘lpS8EÈ §êP\24\6iùÿ×?:‚\7Ä\19thÌ>XʼnPzõlÁ^}ÅA¼WšV\5(6\9<©Õog·\7n·…µù¢7IŸvض«Úˆ\31Í\12ÔÛ*®V\3Ÿ¤¨¥Á^:\\*ÃÒ\0ºH´]J•NŽ³G+­ˆÓb´¤½\0190A’ƒÚk1’¦{tp¤ck\28{û2¬®Þæ9Ïqv«=ιæ’+\29xû\11ëg\24?N\8MZó‚ ¹=ð×5-bJˈ’6áéTñ\27t[⧜\"…¢\13~ð¬Y{¹Ï¬ÁËÀQžŸ¢\3l?¥»ë\26g8ë&ºâþ\ +2–_Þl\12Ã\26Ár“šðŽz”þA”Ûƒ\0204ã4Œ†/ÃbX\13$^çÅ\26Ì¢\8:\16¯ôä\24\19M•÷q%èCòFyhË) E·V\24úMb²\28ôêC\0248#å\16ëà\16ë[¨3«\1\23-¾\6¿)'ù6´¨b¥dÄå˜ÎOi%‹\21f›ÿàz¥¬Zj*Ÿâ§¥\30Ÿoôb{Tæ\9=Ÿ½N±È:ت_\\ئ\14Û|\23ñŸÚy\11м$,R+\23¨MY5íŠxZ©\8GuÌòÔÚ£üô·‹Ï@GQ~ëC¡þè£Q²p9\26àgŒ,@•±\23\23ÒñßxŠ\13r˜ç\14¢\8\6\7ˆÍý\127¬”%æo;¹Œü¬ñõÀ¢<õdk|;²\30‘£\"/\13¿\1“ä³2N¯-÷2´Ú0VÌ\5\0273¼2.âòÒnH\18\29ŠÜåø¡#¼°Œ[OÒza*@rn\20ëúÆq÷e‚0‰· !BJ\20[\18`fWºérÿRv1·ƒ 7)ú”¶Ä;iM7èÔ¯4Ò\22%¿A\ +~±\6šòÔUj¡**Âù\2#\1\\Úi¸m\23ú¸\2åÁ~dЩ©LOwÌËôZp\20)'\31µU!\22Ñob[\11${æ|­'W–²ŠÏvËõp\27ž:Ûõ\4›Pó§2±K\0055\14\ +á°p‡\127\18K?\22—ÂaÉ‹6ŸÂ\13£|¥\13*P{®\1\28÷»þ\20rä\13ö (s±2ë`½@×éŒCE8€ùÈ\13-È\25«j˜\29o~\26\0194Í<Â$}­\30é\17¡\11J\5d;Q<= í\31“\25«o™î¢-¾,JþÔÛ8Ú¬\"0rI´Ð\23¶ë:é«Oµý³ž“Ð<1êgæW¹€#?\8qÒKŸä¨WšmOÿîk\17ˆN¼jI\25õ¯Q' bW]ß\01928dP®zÓ4œ”•º\15§ÿé3ò×lA”\23)4\4Ž5¥\22\15›Ÿ\5ôxÍ\4JüeÊÞgrÚo ØI£V7‹¶eJÒ+%´€\11ݸ¿\31Íi§’ìå°_„ü-¤\ +\23FºJ 7v5Óo\23èQšâU°`Z wÕ^õó†øÏ?Tî dƒ:¥\\\7WÊÐÐïd*ôàò\12½\8šF{/ž[\17”Ò JÃ/ž5E\26”Áv \\Õ\9™ä™7bjJ\20€x\12PÍn8*R¢tµ,É&!\17Lä~:¤JXu\3…\27…X$†Ö\26\21Ôfµ\24옽2ÒŽ4×È<«\24²­¶îô\\\17¦¿oˆ)‰ÿ(‰M\22A¼b›‰0Þí—\7{eÐ6pt]5ú_3\0074\13Â7Ò¿!cÄh\14M9¶„ÛQÙßô\6\16É7cO–Ó\24\14•­êFœ®–Z\31Ý’2!2Ø.ùÓ—î/:n«±Ñ½,‘ÉE†ÇGÍðx1R5,*ì\23Æï[\23M0cÓË/B5ïqÂUã$¦Zu\22+Íe]%\30½‹b®D‡ó\27“v±4¤È&#²\16æNç‹.„\25\16\5¥y¢&¸îçßµ˜ÐŠŸ\127Z\0085ž\21h\18\21Âq&7³ÛL¦zA3»Í\4B[XT¦9ˆ§‘%ÚIì½@\27Û½›5±5\18))´Ì\21\15\0094\"O+CfÎF\12%’IåW’\9˜B¨Åù ùŸË\0200»\0243’\12¦ú3H)É|ÿ\2Å\23µ,>Y$AƒA¤Q€šDÖ\26\1\12l1Òâ\ +ðÚé9©'\14Ô!Ñ\3%â\15ƒ\18\23»/ì²)\6¬}6ðü\"\22ŸŠ¬ÓÏÿÈÒ\12âÕ4§QDÍpÙ\22ò”ª‘¦N‘(ª\21e=Í-¤BS\30\9&M’ ˜/»Ù©Y\24Äš\13izZZ%É`Î\21ˆë|\21§æM\0307œ\14—M°sˆ\3ê)Þb\"\7ôqý§h°\0½Bf³á\25p-\2‚Y\18RH§×\6„íT/V°³v\5œ0•Ê\23D(9×êý§\1Ý?]Õ\1U_`A\22“£¥4À҉͓\0079]\\ñm·<\20SŒ®' É0;ù_ÌÄŠ:@è–†ÉoYˆ“)Cµ¸äu·lÔýRm‰P\27U§É\127±É\12.@\27/…Å\"\30¹­WX\"±ˆIÙœM\127”m\127ª.ק’ZŘb@üàÅéÉ¢„š#zÿˆ\12)d?Ù’•\28â-C[w*t…£Ï%Ò¥¼5+Õ[¯T®\12Oæ«„\19\22¨Ê5Ô\23³Ü\ +‰\\›‘*XÇNœ£\0Ý\13èž±šÁë¯äQœ\25]Q²—XF\9\14e\11~m¤\19\"\3Òük4E]ã\27sv:3\19Æ*¹yµ­fÊX5”\\°™æ]gÓR\28•ì}¥LufH5‚Ù%È;Õ“éÄ2Å#ÙÈðØJ\12Ò-×£¦[Š\13c\1ûÛ\16’±V\7”LTÈx\26º+Û\22±zÌ›\21¦%Y\24€|‹Ù¦{­gp\25ÔOi™“ˆgëbŸ5ÓÝ}Uàv½(‹û€Lgþ\2É7²®(žJ—\27e,W´\25\13[åM”\15YFÍ—UÌ\2GgѶé6åä¾Ag¶n¶©\16\18•_\5\28x«y$i2®”\13Š´©•\16\17`\27aÁ¶;,\23]xCðÁŒ63Ñi…MÒ..t­-“-\23\6bó@¸wÛÆ,³mN….¿!$uõ\6¿ÓÈTYÚM}Ô\25¤æ¶\ +n~SÕû¿vò\16…\13bð‹\288Î\28#ý/¡Þ)1˜÷\"ït\18r”,vO={ÅÐÙ\11c\6˜\0014«Xa\22C\11Ü>ðí\24p\20²î%&9–í²å‹ä‚¨Dµ\7ʳÓoj\23go\24M\18’é”{0«.\31PÖã\11ÄÐÝ;ÃÒÂò\4œè¥{á\25v\9›2JVë\25öq\23Ý%o\11¬÷\29Z'v2?ðÏc\0\127¦@\25A\13·Ìä\0#ï´Åtû\22]\13à×4L>\ +)Þ*Ø\13aDnd˵.Ö†©j7]–\0073âîéNææ(H\31\ +|–`gWbi$Ìþ9]\127\0075&΋@\"Ô®y\14êè,$\12+\8…\8žq?]Ó'SÑ^dµ=Åf48NŽ6…ãíÙž\2¹jŒ„òCùIŒN\8ŽÚ99©8yï%G\12ý,¢uîz;#PXÕ¸û\23k•ªjøC:äQC›Ë\\v¸\16ÇLW°4Gû„G‡›\22éxbÖt\8‘\17˜°£L\26‡W–X=Ç-\30wÂáÞNN`Š˜`nçΦ\29\18É2Ú\14ø+µ†]W\\aU\14X\21å1ãJ—8L%D²vò¸áÌ\24V»üEôƒ\1yh.‚\0140|,0•FDNýE ÓÅT³\28\5œ`q=\29nܺÌË4õj/¦»ãôZ\15)ÞOgçt~NÆ•’ ;°Tð‰¹ÃÏ\0119=Zøÿ,ÀHN¤ã«£ôÁ`øe@G9Ù¨¨:gAI'\21ë\0120\30e-¦+k1;?ªø¾@šùl\2\28Û9‚\ +föŒ,Tv\21«\31WMºsb±¢sçz|àë\3—\15Ü@²`¬D†îe´\"½\3\0\1TŽ{)„E/\8ÆfGx)ýOݯ†y\5Òc΃ñ‰)ç,„>J\18\30‚§\27]N´º,PÓ³rj´[g¨–›‹\3³Ñùᛀðé¬\1­\0252Ið³\7Ç8ͨªB.çXcàêäpÕЊ\11Á·»ZšäêÅ\16Ý»kÄ™ÉþÒô&“\9éÚ‰baö\18éC*\22ÿ ptÏ«aÉ\9\11½ãbF–÷ù\11dv9™B*A$Ê뫦§^ßÈ’ÿß­{w,ÙuïP?1ƒÿ\27X\22ÁÕÝî\29¼º¡ d\0Ѓ7\13ýÎJ¦yÃy´oÚM&Äæý›\9V¥ý;2Oü®Hô;\14·Ÿÿ«~ÉðÚ¯þ÷Ûýqk\30¿nõ£»=¼<¢ñà\31íGÇšá\3Á\127·¥Þm;dÖcó2H·ö!©\1ÿ\25QÆ« \27^IÔµ\16GNuÿ\5\8-š8\8\17%T?`Æ“­\25ËQB\28\3‘\23rÓ®\21Ù0ÏT`úXs$ Þ‰\4X5{0äeV'\0305\4²y]âÑK,TÅ4ú\13wa\13plŠÑ]ü«\3=BÛ\127…\2b\1Ê\16(ã¼ã¢ÄB´C¤Ú‚¥A¹Ï„÷,(Üó×Í›‰;>©q*©íw»‚_²‡F\19P¼0~ƒ½¢Z„\0289ŠÆÊl²½à\5q;ž\5l€“|,¬p\18f\0138¶«c Ï+';ÅdÃŽ{3¶Ýðù\9gùI\5ì\6ÑqŒ`/)2ã`þÆÇX\14–••Ïães¢<\12\28¥é\\‰vÛdh\23¸þ²ç\25\9*A]áÙû{°™¶â¤‘¸ÚõËŽ®ûúZ{‹¥‹æQK˜|ÜGþ^jÂJ!¢ûm¬Äê<Ô\"T^„§yIJµòˆSÛc*.æØØi6Єƒy(3qn:uù¨Šˆî±QÕ‹ûÊò\1€¾'P¶š,\20PŒ\12ÿ‹ÿ)½,Ê}Ük²á\24ž<ð¹Óm£¨](@äCÝr˜éJ9u}+ÒW=\16«Õ]SPÖ\19[¨\15äeqÁ‚ìÏÈ+â\21'\26#±}Á\0O±š…OR“ª!e¶µ\30É{5‚’'©½…\4U?4oF\28S3æþ·pfvã:Ãy-€0$²×*\5 Î‚õ@~³’{d\13_‚ÏSh\1.vfC_þ’O?\16Tí\ +úµ¯l²ªZë'€oÅÿS»À\22šÃžÛdðéH¢¹ÄݽF-¨\25:\127ìšI\31lÉ=$\\à'¾ÑIŠGÄä\31ŠŽyb)˜›Ï?‘O\19¸|–\2Tšë\31ùjÐõEwf\13ìá},Uᦼk\2yìÞ\4\0z+FÐëZýtüéˆ\6¢\17”aTÝÎ\26„ÌÕÄu¿Ä\6\9)\17$ö\11\13«O\15\24ÜÇ\17k5µø®`?±¹€\5'%Ûx²l\30\16\1Ø¢‡»ÙéªÕ„òð6‹€ˆ±LX³ÇÅæ£R¸võá‹Õ´ÇÅ®°LZébáQ&õsUgq90½§õ¸.s¶Íã­î\4qVßi\30ß·æ©Éñ)Òú\20[öìꇌ»ÈiûìT/OVymî25Méw¬d&6hžž%ž¬XàÍLýôNÁSÂã\0¢\13wÉ`x´›ò¬P¶0mÊI£J.¬9>­æñ,+ür;\9eÑ\22‹(ÎSÓ9\6í,'\21ã©Ùò©iâ)«\12X«gÓßž­²|\23·\\?5\16ŸÌ\0ú\11\0093É®É\15ÓÁ\3ëB²´³L\22•„µè\31\8ª‹?Íô‰õ\22U\127†ÒÑŸAmÿôNÜ3ìê\15O¯éŸ5\"AX6\9*üé\16¶¼\12(•@s’êqQmÁE\13\31Ô§q‹á\27\11 $?ÿ¹=k‘Ov\28ØpøùW9zuÙ–C\1^š\4q‘ˆ_=_O\\€K\16ckp\24ˆ­\7üu{–åX\1¦\8Ü\14[.cSzrŠ;)®`Ü°ïyɸœ\24_„û­\12\8‰&fušXw‹LM£˜ˆh‚÷\0võt\25‰š@Ÿ^Ämž¬a?Ñ‹yš\20+ÄŽ]SŒ\28¨\18ꣂÉx–¥Ö§LuªÙ\27\25OÍ\5š\5Ÿ^q}zuõ©\9<ø\127µIM6¬Ýs\\[ì\"‘25 Û§ó)H^5ÁUÏ|wwÉ’±Åή­Æ‚\28³}D\3åÆb…Lð!÷ƒ{§[f7HÞ+ÇßñØ\15§¤\22zf÷£ìrd¦j¹KjÅÜ\9\127ÑNù:ñ}«x[CñvRÜÕEžâ\"z…Øã†KÄ%‹oÁœ1sí\127EÝÑâyºª`TžˆaTŠPKBz\22 \2Ÿ\"=ÏÓ{\21O\4$!¨Îû\11“†C0\22þ\26Y\0\15ñ\ +ϳ\17;üDnÙ\28€f<ÙƒÀÜ”áJ%½Ð–y^(ˆÉüù²êýg™Õ‰É®Ÿ|h…+±L(ËNc\\{M0¨á³¦ƒª‰\127þÏíçÿtš4›^dN\4­fóL†\24Ͼéºt\19;­ìX\17,ÜZAõï¾{¨ÉXÈyʧ\17)éÙ,[ñQ“)ºª¤¿‹\29Ó¯\16±é¡\9³\23m$¨æcu×\0025‰øGå7Ü\5Æ\6RG¬\11\4‘ˆ\26Ó2gGŸÃ.ôÝ<ÛrßIVÝMù+\5-˜Q<\6ÿGb¤\5¬I]Yeã\127\17!SÖYPW´fQáúJö@f\0238”ÞkÇD®V®°Õ=ü{\2X¥÷*e\1K\1õV ÅX‘\8•\21ñ\19â\1©—Ͷjs…n×Í;“.€æ.•X\13\9ʽ\17U\17XØÿÅѶš< þà¤\\\ +ÍAîÔ\25©2vözO\17½WâØi>S\1®\2è°â\\Ô\16’\31h.òö¸õ°ý2óÕ|µÔšú±r«N×÷Þì\0070Âz\20ò$\25\9ŠBÈ\\5Á\11ì´tðŸ\12ÃÇ*\9¦@GÝÕ\11{¶Qo}Ûk®\1à‚øyï)¿˜\ +a\6I`¾í›à.&£\23%ìCÓG•2ÀÝ\25\16PsW$·A\1qÛm\25Ô\5\2\19D߆ñW\1#%\11£$Ï>¨žkÉ=6G›Âu\15¢\2ü\127‘ä\12\6z_PÃ_\6_2\19KjÔCÿ\1»APxo\9$ÿh.]ÈpRm\21gç\28\25H¦fC\16±—‚\ +`k.A—Ž6\8Ë‹\\,øÑ\0194z*$\15t,é»ÔýIü•½lƒf+°$¹Žç­(df*£Y³cÕ¬P\1%ï€\4\22O\5]/R[ßT©h\3Ê|C\1¸dÄ\17\1±\24ètjrC\17\11¢G³j\16\23ç¹€¥€µ€­€ÃQZ\22Ã??¸ì\21EP\15é?pì\11j—\9\8²–e]ÛéI€Ó®šE\29(.·èŸ{\25A÷üé\9{v:¬û\20ÈÌÚ\127´H\11Ì\31¸;\4\26zŽx\30ÅJ«Wh_PŽ‹\12¸ý$ÈS\27,\15b®6]XÄ›>\20E\14åïØ\0âq]/ЇP\22ŒŠ^l\9S;7ðÀÅ9>K°lŒ¬ÙX'ÁµY²t©à™\18‹EùeûIuÀÔWlIi˜‡¿!BáoÍpýÏÿ@‡\6h8t~\20¥îGk\8\11h\26‘\9£\5€î\20ø\0066ô±\"j2ëÙ»î_AU\9ÙyE&\17e:Š·cABž±¡p25ñ÷Ñ;\1ؘ–ûȸŽ\13\3?†jÀlL\24b\29Ž\29«k(:Klcá\6ÏT÷\12ÔX$#¹ß\0†µÓìPõä\25ŠM\20…3³£ô’ݖ̱LƒMƒ©êãÊ\\ÕĵÐ\7Á\0½”ïTÀ\12:¹ï%ÎQ\\ÉÖzjÄ\23×w\1_\0058e\ +ÃèY)(ÃGæ\26\ + ¢òýÀ„}!Rµ—lïÌ¢\18µåw°\8$$G°I¤ƒE$|(Èq?ý\127ºØ'5\ +‡é:&Sì\"õõ\\‘kåÁÛØ}ŠŒ£„dH… k%_ò·²\2tWËÔ貈,SYùáÚȃÊÊL_\25L\25îIÍ(ö³oe\22Ùæj\19¦A>oЩ\4ª\0098„\\¥b‰Êˆdk±1\ +Œþ\27è—_Ø y¹Yܻ뼨yaƒ\26¯ËNP¸¼|ã\2ITÅvþW)2+Ì-NŽP³|\4¸ìõÛÁ¾ÉȆóÆ\20œ·‡ÍÞ.ÁEÙ(ö=kè*û[¶ÿaóÛåÚGò¸³>Ø{˳ÏÞ²\17†Ã˜‘kzËæÂv‘ \6“DîÝEÍo#~»é\21~C«S•ïA\5ë&C\5\19Ý„š\2\20¨au»owt®\8ÀP’W_\2ö%Ö@áwór¢Ä½\22&}(2M¶›\23 ÛºØÍ\20ìÁ؃\24},L8{(1Ëì¶s*BhÃqØ\6\15Q±Å¦tEwH\ +²µG\22ZZyÅ\1[;:\23qê\11˜\11X\ +8œß\8§\3\19Njñ4Èž¥\5v#Ëcquô\\JÆ´¨xÙ~\2 ÛÕ²*’ë¼6\3M#s¿FF—hx\\\ +8öÕxê\22—¯\ +P\29÷Nc¢9 1G×Ð9\14ÈôÑxÆ=**žÝ<åï\8\ +Ê^B&†™\\M10>ÍÁØ\25\29\0ÖB\0Þôðê\30j`k‰t8\28Œ(“ò\30\16¹ˆÎE+i®? y$MÖ\15ˆ–\0Ê4ÄŸp\18ñ“Ë\4Õ9`Œîl#¢wÀÌ‹®ÇÓÈÐ;±•*8<\ +«Ã8\23ˆv},LªêÉ «luŒ50ô\14ÆÑqgdzÛ\11s\1±Ó­²c1´4ëÁçÉģݺÓ%ÞÜÇŽÆS#<3¥¾£Re[r\24Q\25ƒ¹€\29\4\20B³ ÂžÆ{\"•ô¬ž9\19žN™EK;±ÏÙ×e\11ÍßXÝx\13Ëo¸•6bSËŽ{±í\31ø6ÖWÁâÜÑY=]ϧ¹œ²3ûç7\21y¹^®=\24\\„‡Ý$\"Ùþ³Ù&›0W§ÿ\24m\16ò»øø{O¯½ž\12eqê½e>Lzꥎ€9ؤ±.Ïn—Ç‹Ì_v:K¨3\20DZ8¾í¨R]Ì\0\23š®ýÅTu1EÕýµ\0156\29Ž\9ÊËfýÕ~)\7U¯ˆVi\22@/¼ç”‘\ +ôŽÌÀU/~¤þ²DõEw“I‘¿àé«/\15ã/êYæió\27_Øöû\23¤õ‹6®¿v•ÿ7Cñ·\28\127³†Ùÿ®Ð¬z\17]5[\19º[@­1ÜÕì¡«%“\0066ìƒ$R¹vO‰Þòëñ\19=\12m'æÙ\22\5dc¨\2´\26e\1QUÆ\8‚\8\0023\12rI„ONH£44]:äÂþo`yX\14šrB·J\6\9È{¤²\17gÛ\9ñ\0238ô×íÕ“_ùWÝA\22Õíƒ\0ÛÒ\4\23»/ï\19„'\8%ð˼èox¼…Œâ=þ,\25‡ö!\14!\28#tßÂ!”}ãªì‹Ù¸ =;=âÌÅ\8\4Ä¢_X@'°4¶¬T¾Oþ\8ƒÆP@ÒÚWGk!Ù¡èÍ\26\8µ˜\19rƒ8\23úr4ä2ñE7Þ¸ú\16…íÔ\1jª\1BÞ\23\0®;øáŽB.[€þØe#Ìqa2*”›ê‹D¿\\\30…W;Ëh\31\24ô˜Ðº\22\3+\27\2=¢•\0ZÚ\1áÓfƉ5'€¸±à\3i\1öUƒa ¢µl±D[’Me\\¢*#Œvªñ­fþf'ŽÆ•æV\20\27\4$>\19j¾;ÙYc…€Ñ&\30ê¢);üjs\15öÚc±Ù‹\5‚`QU)\17)¹`êÐ=–\6ùˇ‡šËÎÁ®Á˜èî¬þN\01417vË\15#¡)C²ª\7®Wc×k/€ÎÆ@\11‹ë·\"\127«\19^»f·Eó¤@\11šzþÁ€²·\25ïj#Êæa\22˜\13daPJ²-…ÝJ+Ð%Â~'ðnr€š åÛÑú\14E\6\13E\6\13¡Dmv\23²E5 Xh´-Òl»ó°—BîmÎ%v>oå¸a^??¤‹â5§¡Ä\4\4ô¯]î£-­uL¶¸™PŸÂl6»¹8\2ÆZT8ð=] ÜÎÖEñ\28f@\0\15\5y½ì\4ösu\29×'iœ%Ïç§a¼:nëî î§{ÆÙÂ:\17„Q\18Š\2Nà \12\17Ÿ\127V•«E\127œÞ¬Ú\28ºFî\3C\30'†Á€\\\30Ú!@Lèëƒ÷„\0ÌkòGNb\24hZ\8÷Á\17éV2ÉY=¤\11\11\21\\\15Ù\24Å-(¤SE \23öú\30Ø`oÂ]Å\27;ñ“\26Bî»cÇ9FÙÂC=SÅnž¶E\2²\16\3™\26Ùv†|èg¥eFõ³<Ø.îZ`oÃ:\22{.v\18­Ç&‚\13\0252Œq#ÙÈq%\17ñ1‘\\\6{\13û/\23ñ›X.UV;ÂzË欶è\7\31\9UÑ\2\15ã%þ…¥+–p\127é\31¤ð\ +®«\23ÇÁB¥ÔÙÔo‚æú\16]çQÔMEDí[ÎôšèS¬\2¬ú+ÔÐ`*\127ò\31)\5Õ\24Ù»—óL\4ÎTÊEyS¤\"ÄÔv\18e±õ­ýn\27\8½j…›Z)Vì\0ÉMÃù\1>¯>\0056“q^%\ +åb…Ü•£œ€™¨«½P\5\ +ÑXP2d7‘‚£œž\ +d¸D“h¦œœ\21©žÆÎ\0303€½=ƒµ\0O‹ñ£nS\\HÝûš!2³MÂRO*ÔÌ\17\29\15\7Æ\15\21>[Ñ\9ÊÊ\"\22Vj\7 N#ò¼ØBO™M$E›á\3f\6½(ôþ«\0µÆÌÒ1&ãa.Gƒ\9ˆ\13}ÏP›f\22×Æ?‘R×,\6‹Z…Fj¾aÏ9YD\19¬\21/TÙù„\30·hÈ+4ÚÏ\0ɤ¸íÄÉ\12\18\0ŒÉl\ +ÈÜ \16³ùY¤J\\\0303…óf}AŸòc` ?ïÚ‚ºÌ—kÈ3élZ?ßé¼ó×\31òn½\18@¹ž‰†Ä’X\9¢OÆ„Ã$ŠÂýTÀRÀ^ÀÛ\1›Á@Õb\28p¬-ê\18h\16ÐD\9ÅR\"í\2ž\5\20„;³}*$\2\16?Öí\3\15§p\25Ïu\26÷·\19úŽä²\15ά³…²i@÷ŸéÑè‚d\31HË0\29M\8Mp!Ly\0ŒI\13\29qÓšyƒ³\21\22\23\0005\14\17žóÀÔ–Ï°\16„ƨY\14gÿµ)Åà\16RH^y+ÖËAÿ~\18ôç?²\12±q%Cb“Ïß(À(é!Ð',æa’µÈQj–É—R\25,\1271ÙÓ«­3Ÿ”‡Õ+ɧñoª`¦³û$\2–BÀôó\5Ef´ªk\6·F&ã\"î³-ÉæB,/Y\27¸¶s‰\4\23÷éÖÉ+*„ØÎ\2ÞvdrO^`\9ÉçψàòçæíRÑ™ÓÖd²d\20ŸY˜\21uòÑ\30KpÚçWp†\29ÛÝ8]3\2‹ø”\27\11íô²¥C\9I\0mŠ°07\3ÖÂ….¥¼1\20–¢8\28$\29+rú\0°Ù$Çÿ=U-ìhÁÊ\29’·´N¢9\28-£° ÁŽ÷…éÌ-ž!VノIê\16# »0£«‡S\11õ\21\\Ñ{\18›u‡q¬VFÎZH\23à0X4L\20|\9\14±\22T´¹bUtË•í&BB¾*ÁŠˆè­:õ\19‹\4ãTbž§c¾?AÕ\4ë36ê1ëó·ÓEÞZÙ-\20\16\3­\26æ\22°ŸN¤§¹Vt&\8„ò#x\0022Ò*Y¡!IúœÜh»Õ»¨\ +ÊîÁh?\8ÍêU\20€[‘‹3\8[ùp:Î\27}m\13(Š\11\17Í\"&€a¶–\5 )Îø­N\24þu\5'kUÁwp¨î|ÄBlBãL®-¶Ã\2ú¶ö©iI±³tË•\5.\ +\5í\22É^Ç’;\14>\17æ]lÍÛ\0211WÅ—Ã\19lÊ@Z°ÒAÔ®¹´\23G*BÙq´kå¦É»+<ïT\20\7-‚\15X8Ä›\22ù‹ü\30\8‰«×ŸèÌ\1B½\30¢ð„8\24/ëÑ\12%Œý4OÓØÊ$ùõ~¸>?,+;E]\9æZ[³ÓÛáOÇ\0234XݧËQ\12šÆÍÚžÎÇYx\22ŸÅwVQaVRgõr \24Ãõ,XN‡„oMv<\12Øo±­vcŸ\"mœæPmÝ/×ÝE^<[ù8‡ºÀ;@²Ößt2ý ¾\16rŸaw³’È\30Û­› ×p¢5\26‚YB¯'\7s9^.Æ\0126•¼™…A™‹ùÝ\4-siK(\4LfÄ{wlºN¶€ÖP£Ù\4¦ÍVå\9\28\28‘ÌÃð‘\0DCl–)›¡¾u\13.m7\7‡J6¡ç›\15\\`Û\29…\25S&¡Ý˜¼¸Õ`µ\5ï?\23\20\4Nžh 0“qìä(à›ðê¸ÆdN~ƒ÷2(äÃ)Šœ\8€N\31é'\19õ-Q—²]\7!`V¶r4 ü¹û€\31\8:;•fSj÷¬-Ãg \23\0216sç5ZQʉd~bK\14“ÿ\6uÚÊÚ*ÐåA][&üàæ\14±Y£\30§³û\3K\16ˆ;¾{±Ñ\"[sºžYˆÃ‚\23ª\24¤]\2˜.7ᯋ\\±\"\6*ëJèÊÝ%ºYŽ{a}vXŸ­8ò\16ŠÆÞ\11íÞ9<\29Êò½A6ÒÞ–¾ ©Þ9X‰\15\29R\18íH\3*]X\26ù±F \0í\19XKnÖþ\3\11jwνðHBÄ@\2Dã¡6Ù´¥¼‚0@{ÝWÆÇÑà°›Xïeéeï[/-Ú“Á\0062IO(…\4¶\12¼{\27Oc)}M’öfg3Ƹž\6ƒ\29‡\2F£d¿×¶±€\18\16zº\23Æj÷Ö<~f[ö²\20\"×4ßÊþðb+¼Mí\13\ +ÒçÞ\26\2³v-Àüa\28+;è\14·\20p\22çóU¢ÑI\8\14UÙCe[“K6¶RÂê/\3êR\8ŽR\ +$ÒÚû\"D`„ïE{\13Ïs-9=á\15•š\24¬}dm¦\6Én]1V\"\6›$æÅ~î šR\1Ž)ð«XßFèñª\24óÍ·óôÅ\17jµ\127˜ºr²ÉàtÐl\19§™ÆѬ²W\8ˆ{½¸áYö—\9=½sP„E\7Þùiì™!\15p^Í\2~.\24±ëÛ€öÍÕèȳ\17QXȸ³ï)\14›Ë\6Áß=ݱ›SRt£i\14*èJõ{&l½ÅãŒ8e®¡A±›snlÑ\19fg\14$Âßn!xÅzÿù\127nÜ\15\2#¸\23ýU‚p\16\11€…ÉÏ}Ä;\22ª›Æ´ÉÜ\28Y4õÛkÏS²”Ú:Ë8ñ@:Í\18”Ë\26pc}\13à!s‘‰ëasd.ß/÷&³}ûç²\31\\]\15×Z\ +É\28æm)%¥™\12Q¡c\9xÿ‚ª\28P\9\31g¡ª\15¯®rªR³\25\19ƒÌ}·\7\12‚ÊJ•ÕGE™-\8¶\28™báÊcö`À\28Œ›\23µ`\20\15÷Õ£‚I8ØöGIžZå|&&îwæ~q\16 mQ\0116p’ìI…²'EFYò<¼\18‚[(Ò?õþ«\0R¬Í‚°ku:„Sæе²±à²Pqbð//cÜYb«½E\0218ÒŽâ¾Qg§œÍ)\29Üð\"Ôyw(þ[Žf†²\13\21Ž-~ÄÙ\26U\13\0206v\23M3P\12NÕlÞáÙÂ\11\20©XAÕpzÎ\ +\29œ\7(š\28¿ìÎf\5ÀiœeaÊ\27N®\7àRÐ9\01390øˆ¾–pë¯â¼>Š3ÉœÁ÷ýØý(èOç\5\9•ª§ãF\23æ¤\7[°ÿõÏ\15œ©CÁ’³Ý\5a/+b÷r\19'ÄÜRçoL°›¹«\15ÙÙÌbRhÏ2õæU¬\6‰9 JV§·\20\\kè»\16º)\19Öé%‚Ê5ˆRËf·Ó–Ë\30\8Åf\\g­)Æ–Y³Že\18\14¢Q¡ªX¤\1¬ª\16Ù{\0ªÔ\19îõ2ô??Â÷é{éX”±\22zðyµÅH`^˜\13Îúåt_Д³}]NÊBw[\26ú\20U¥¶Ïø—Ý\24³çÄÞ\20³ˆø\13æÞ3»\12&Vü\12¾oˆ…Zx4@2.T\8\2ÇØØ•‚  \11jô¾ñ„µÜ‚Ê‹£å$F\9O‰Þ(¥4‡\12\1¢ZO_^\17||\13/¯\12ŸÖM\ +å\24›m%ÞiGWoø³œæÓk8CÉdA3ÛÉ\5‰³Ü]\21\ +×\127Ö_ ©¾âí\0308Ã&Sù¿Ð…\17`}1p·Y¸F/\21^õ\8{q5^\22¹Ø\17\15÷‹\17q-j×êZTÕ\23\26˜Á'Ý„î\14¢\ +Öò*ëý\2ÔÑÅL^l‡\1럗\23õ/+\0øÔ\13}öÚ?Œáõ‡\17¾|rv/?\23\25õÂþåÕÈ«f\23;X¿\30\31‡ó*üå6òµ\30áú\"È\23\1[/‡]_eQíý9¦¦\31îý\19àš04´GÞ^ó\0\"Õ³æù¦&ÞÖ¢oÂ+¶Ô«¯}2øäûsýSøbBR²úC;¯\9\127ÿR–þö®Èß0’r\17£Y‡¿ED1\23\\\"ždõo²pGC,Ô¿™Í~Ó¡~£ƒ\21ÚoFù7* \0…þ.t7nè\17q\31\26Ä„z\16\0278p\1\19îϧþŸªÑ᎚O5¨»W\3{ÖCW2’’×ïBà\21\7J2tå8\0066N\27pùªO\9ì\11\9À¢\12\28 %ª:Ë \20}*Pɨ»\13\3Ú¶˜r¨\7\21ž\19CÃHa|]§\4ª¡}\9Ÿ\2E\14%\14±Ñ\12=xç\28S´È@ÙŽºX=Då:úhÒ 9I†šÍâ‚ú\ +âÁ\16}ˆ­\30ª\8¢Äõ+@«–\15>\30Ø\12ˆ\14Bµ’7+?\14,YU\3š\3û6Ê\7\127Jq°NâÐFšë®Šðám\21yV\17P–\30ªTÉ.6NéÜ\19ýUl\30½4‰¹\24|+æiXè˜|i>Î\2bR¹ÍÀ&ÈࣤDûVVXÎÆœp—Àƒ¹Ù<\9#1ƒc|šª‡tq[žr¤49µ2ˆö\13’aÚAóñ`­ELM\6j=uN¹SVÖ¬\7Ÿê\27òÚ‹ý5tÇΫ¹áÁçXšõlÜèÏ97\24\"êÂÅÁ\23YV°iò\24¸7RÞ\28ðP\14Xö\26| E4\26e”®”‘\14†,âA†®ž¼³Š7HfQ\127Ø„â¿Ë6Ã\127ï“\28è3Ÿå‡\1%ÁVxáï\6¯(\12\21ú}¸\8·\21ûäŽvæ°‡¿\20!r;âP\14ô\1X\17:×,š\22jAVÝ°$è‘õí\6‰\"®ZE­\14ÍðƒogÀ\\mªúYJ\31<…\14\13âÕ€~ÜP\14Û\15eñfð’¤ÝDu\7¯A\14(¾\13–1ôKGD¸\16Ψš²ˆ1 !t\5<\11\24\9\0¯3 \27èŸmŽ·±/¹\18Àô‰Ž±îe î/¹FÁØd\30{ï\19â*Ñ{D¡ý6¢ÂNèP\"\"#jª`Ñì&çKM¥)\11ôFT¸Æà#«Ü4½\22Û_2\27V\0055ùˆPp6ë!îqD³bä‡\17Ý,™>´3–ã7\ +k,p\13B\30ð/“ïˆr\9áPHP}³Ò§p#!F\8N3²¤‡\8͈žAÛ©-ƒ±Zƒ}äÊ\1£Ð\\2ú\22\2Ì&Ù‰$<Ÿ\1®\18øëæC\12Ø\18'¬\26ÛÅè¡\25Â.ìèÛ\8FŸgmí—m;@í+EÇÂ\16Øñt²\26+Š®®G•,.Å’ñ…p‹È 0-ä¡ALç~îŽ\"‰‘ÿ¦Vjº§·¦zCÖ%ÁÅ™\28y¯¤ŽÄ\24˜T\8ÑäÁ0ö:†P ¤¤üPÄõ0ãÁ=ˆ\20œ£,cð…Õ\2;÷ÏâxØtSmÔ\7¼ìèÕéô\27yf1Ù5»C‚ìaDå\14|I 3¾^É6—”K\21F¯0F£®²\29}\15ˆ±,¦­vö`²˜\29§â°¹\"8\\3–›BT\31œ\22ç–r·üA¿ƒÍà?Ò\16õ1ÛɽàXmf»lTlùÙ\11,/SåøçÎ\6¢%˜{ÍIr¾!É—¢SH®=Ââ2Ö,Ê\17\"Ûâ˜\30ú\22¿\11¸\12ŽÉ XJ—¼4Ñ«s¼ÉÿßþýÖx\20§8\"#‰²Žb\24›Q¼â8ÄÉ›jT\12ƒE­{\27ËÑœ‘V‚W\27]\ +åª\25Å\"*ˆ~ǸŽâ¾dIP\0239W/ñ­ãë\20\18.VÅA\18õÀ±7\11>F¿\0060ú8²\0'kÆX)\23q`\8G‘3å6ŽÞ=\21ôQבÆ3_ÙŒ°•Š±\16veœˆg|\20\0¹’\25\ + ›¢cPá¥ÌE.Ñ\21ª•d¸DUXÄjŒ>\29sÚ†\15÷\17S\\ÓÈy€‘‹%Æ»z@¢n’\24õzL]T±’©xò¸£—\12U\127É#\16Å\13¶oé\22É{É£XÉ\21Rœ\2]°Ì2Ö» ´¦ÿ±°Ÿø¼1ã3\20ð\"\4\7=Æ*ÅÁnª\9,‹\19Œ\25Kv¾²ý5ˆù_¨–Dë¡ÿPÀ&\31\5>\28ìÛ–ß”Oõ¹”;ÔÔ”\4…F#°Ž+—´`.˜f\26ǵ(\"4ã\ +•×4Ž^šŒŠ¹93ÙP¿w6—G®Æ\16®Œz<ÀwÕ,á˸ï„åW\\*„Í\14y\18­‚^r\"G¿\"ˆ£8§„ßò\13εæä\8‡x•¹Ì;\4Ê7<ÊÈ!\0<à{yLÀ\30\27)H~Q\2›“acäœ7±wµV‹£:jæ²´‘W*FŸä\30ó…Nq0Î7\13(\\\27ÂØ}ä´ÌÈi™‘%Îñs\9\29\23ºÎØè¤\28‚±\23’ψb膺 \22\8½Ðf\0g±³Ò(\20A\19èXN¾\24$p±±…Í\29Ó\28µã)Ý\0114#»?\14X\"¿A\24•ðþ¹\13\127l}\14Æ`.`1P5ª\6Ø›QXn¨\7qTGܳ¹sÁ¡\5gv~ò\31@Òå^”jäÌËXN¼p›íH\8κŒ>¶ØŒÂí£-ã|/ÚáÃÓãam·‘+Õƃù¼\28g\25Ëq–±ì\16à\24m®vb‚;\ +S\2#o“\127Xø±œ]\25ÍÄ>¦¢„!!G330á_G¯ÿ¬VvcK\1\14`„\14ƪ/ã½ìet\13‡Vа¤Î°fûå‚„z\1¬öb‚>˜ÎGØdý3ª¸S\15¶Õr\1:šÉ&Ýäh¹¦`ô\17–Ñ\\\31[\5Ì1µ®P\2ö\12\ +¤ÔÍ\22í\25C\1ƵE¦\27C×\22WV\2ò'f.®¤Ôþ…áª?+×Çéiš§;:\23°=Í.jN£ß”³-#²\6Ü2ÎÊÆÝŒK[\14·à\24ú\2œ«³´\28«»vLÅq-`/Ž»‹oá¦Ø]\15̇mÙW\24}\18¦ä(N£C™Ü\29\28R5 ”åö?ŸçßKy\16HÇ϶Ãèí†åc/ÑvJp”un»çð¥ L²¾ßÓy8Ý\21Îßîe\23½•ûÇpû\"\24Ä”\ +ar¯J\29±\ +7–%\127\1XÓ‡LG\4¬Ñ4ÆgoÆKCð‚Ž^Špùîº\17éIÿ0;\23šßhñ\18÷ú\8âÍÈd!&_4œ\12|uBD&~oŠý}k^¿n¯ÎWÎvüøb¼\0027CO˯,%øÁû²ðôê\6nˆ\19¬G\25p\6¯.UÃýÖ¾Ðózù®Þl›Èñ\11êÖ¼8 òê|Ý\19O¸”\20Wfö—ï-àp\24†\"žb¨^åö‚\23÷\127br¿ÓË·D\2Îàˆ;óÏësË#?l\27Ù½$\3%z•Û\6^\22‡^^i|uo*륩é¥bi^\19¯\"¼¡bGó\0214Q½\2·Ý5¯@é\3ªØ€fÆäÊ\19\0Y\8åR\26AåÐ\0001îåýlöš^(çŸ\14À=E\4\17\127ó\ +âé^\0016h\3\30`÷þ…¼\11új\6»\5ŒWyö‚ŒÌe;äå3\21/Ÿ˜x}–\23_>2ñ\ +eYíÅñ\8Û¶bC9ìeÉáå÷z^Á7Á½‚Ϥª ->\" ÎÃFC\8\16\0†ý%\14­\3”sö\\KÓHÌy•£\11ò]¼,…{¦Œ¦û¯ò\ +FG•î.\7/\24½¼-M7c‹™Ü\28þ§\7Êl\9÷§Û…\11¥·êU\15Ê¡\24W­–‰’¹_šÁ†\26\14Ÿ»~ùä6À×\9îíeW®²2Ü\1Lf/ž\5Rù¢\31 xÅzV]úÒ›Wôe\27\6\4G¼\127Ymáõ¹¢ì\21+^c©^q¯dqûù¶rTæ)ãŸ5±×\31Mð—bU/!……~5¬\20éWS‰LñEÕKŒÁ‹Õ\"\25ªˆf\17\13|‰éz±ÛôZs­ â§^œ€~eîƒ~e?‰€\19=•\11ňÃÛ5˜¬\19\ +ÔÊL¦¯iªî1Å—*ÆXá<ƒË![\13òå{Ãl\0271#Ø‘\6_9«w©ö™Ü_¨N5\6ìR)_™D`¥\20z\3¿z\14‘xgB¦wž^è ‰\127\23$cP¾—¯\22\3|*ë¢êë³\16ùò­:2\15P\127\\Œ«\\Kó*׉½þ\28G{±Ý¤üHÜyå/Xû×]5Ì}·ÕKÑv\14ξ`ð^-—õ¾vJ¸ÃVÊäfÄ×î—ˆš—jÁ©\"…¿ ÌÕK\0095'¼ÊË;Ë/\31ŠáŸ\22eû˜8Άr§‰Y2\15<ËK„‡\9\24\ +ÐXa7ØÁW‚[[ÔO¼ÊúéË+\5³\ +6I›\4{;ЛüË’I]’œpJ\0{y÷÷Ä&¾æuÂI(4L5`sîwþÙ§Uæx[åuÂ]a¡ïœ¬s¾¼gû‚vv\5м\"›ôKY9Œûòæ­]iïÓÇdm\3ˇ_xý¹„´ü„òsÆ\18\2͇×ùç´ª²{¹bUü«y@,®ø­\127Ôà…“[Ñ4H}\0ôõgž{]\23'×›—f¹×oX·ú¥)N_¾ßšøë\22ïÝ-²í\24\27‰h(5E~Ÿ8ö¿1\3®B\21«Ž\21\"€††<Æ\0293â=ÿâwV=E.ýY\4PøÂt ô\11¤Œëèã™B²\16oq¼%b®\ +\14…U¤L€û\0066ñq\4\20»\22­ð\29½\27\22}\14“Ý\20qg¶F}ò²¢œ;q8T\18Û‡ˆc,´ ‰”£ø±y\6°së˜M…y2O¶E›™À\26\24ñÉ,\22ŸìÜGß\9µ\9Òj ãüeìË\5)·è£—Q­B­õeõ¤À\19\8\6/ÍÅÞJd±òf{ì}–›ÓC3nJÓ‡ÿ\13.™bœ\8t8¦{#Ób´9ÙLöXü¿\16J<à¢åX¤`5K¯)R_º\\ìËÃ\1†\5ÝéÄYŒ\6ø¥n\0239?ÙˆeŽ1®©Â7„ÇÀÒ'6\17¬èC–ñÎ\ +c\12?ÿ\16Œã•Ü\26’üOSpŠl%Ê°cŽ\18\30ä6\26\11«\16²VäL³w\16[*Wz\19Û˜`j¹Ô^\8gPÌ>±\16a\0ÚµÀÍž\4Ñ„Õ\17u-\3¼\18GC\13(\31Ç(mCÑ'z½1–\11N\21JäŒ\0Y0c\22¹ö_•-yJ\24EŒVÚ\24Ýéf¹År³nônY÷´¬\127\28Yö‹\31\17žØ\4ÂZ\11.\6?=\6ôuÜ\17íÚbåJµˆ6mC\29mh#ãLW²¾+•³]®\22q\26ÔDY\31Œå8e,wùØ\21\4\13l;s>j8\5\30·øY&\4\"žÉ}n\28µÊŽ#ÁKOÉQ\14W¿;¥\286\ +0ªôƒÿ\18Ìgé\"ªaÑ`±\127½\26PüPn\14ˆå.ÁøYö‹¡ˆ:ž‡âï\\ \1‚\18`‡ïéƒpX]}hw`\22Ä>^\9Ì%\"c\21@eÞÓ+ÞF€à\18ÿpRÜ]ã\22:Ýï¹\12\22ê\20$¹(ã¸\\õLÿv Ëo7b;ìCëËòMD×ü}û9Ìè\27àâýç?—ŸÿQûÿü3Ú|w\26Uª??ý\19}Š3r\0249r\0229²_ˆéâ\15Ö[\3P\8?eJ0Ú`lä?\26ÅÈ64\23x\8\7—í*+\30\\#W )\12se\28Ý‘›‘}ä8²a¬x+ônäÑ„ÈšLúÛX—\0084¬_\12äQÆúRíßEokæ\1H\3O·EÖ\14b5©X“ªt¢ø\19ïp4¬¢ÈP¸‰©\0303ÚL\4cé]ð\14i›¸\31,N+\127´j=å\11\7®Û’¹Ù<‰u‚\13®Uÿ\14òvoLe›#„òkÐÛ‹lPDß¾\24gŸ)\21\0163\19«Ù£\26*3{š*‡¶\13.¼›\21\11[0Q\20§\26\9L-U3}læˆGœ­\29\6Ø{Bïüïva.òYïè\27\0281íÂ\31qö°…Ú÷[k0ÜLM~#‡ÀŒl\19y\30Gn_”êî)•-°è\27΢Ïñ\9˜0'^ܲ¥·98ìì\127¥\11Yă×\30bòQИzN›F䥘¬ª\17S /6åÌ[ô¢«L¶Ž¢/›$\28G¼d{‹ù/A«oÒúù\0159r²*nŠ¬‰\0268Wè+ÄÄ6QäÌš¦Š”P{½\11‚Vü~LKÀ½Yâj)³Œr¦]HLý}P-¶^®Å‘žî6½X\9p\14}}iôƒ:1•é.±{\0169mÖDø‘Šwm¢æâQ(\23vØââÙl)Õ!©‘Érá\8•ÁßD\28ˆˆfBôÁ²X\14–)>͸\20=¾èÃc‘ííèƒa\26+ _{7\29OÒHÞ”Õ³ÏêÛNT\25kO\24ž­\9†}4\8*÷\26 ÓkÈ\21&úKÑ7¾ÜYÕk5vx\23¢\0\24\11H·šsN+6\14½\19\18“û·å\4ŸÒÄ•ùЇ¤ôËf\6\0\ +\0à°¯šo\"ª¦a_»µ€k\13d£'ßML¸¿\14“Ü6\0308>&\21Wï\127E\14BÑ¡V¨±²½3ê×CI¯ÖG\0¸ØN™{n\12È-:ÁxÃ\1~Ž7\1¡×ˆ }!\14ë‰à\27ý®Œ’àœRäðQ\\¯\11I&Þ…!C¤2/ÎéWLYïdœ³,ÑGb–p\29sa~3Ã\7ÍxL\18\22`\14à\8VÌ+—3àÆ\\šý¢¬€YEN$Å*ÃÛZ\28Ä„Eæ4¥þ¡H5\11êÑr]ô¬_sê&æ£v†\\\31\0rƒÚAô\29ÓÂ|Ðb¾j:úfULê¶\21¼Œæ7–ó\5kUg:¿%FÜ$kÀuYvTþ˜½êü7¿\26È\28r’8ï.»\0212Â)¦ˆ$\27›-ì\"H[Ùãˆ\28QŠœ7Šœ7Š›é'\7‹âÆ«<‘\3CÂE\17}-Dä,hܱ¢ß±Á„úl\31r·}šÖu³wž}¹\1270îæ¤w/\ +Ê\23ZƒÊ_\20ïÂD\11±Ý¹¤2z%²A\17w´¤ãn\26[î\18Œ;‹^\17už†Yë~PÖÃ\18‘\21ebÑÕÖÏÜ2E\1\14³SÆF?í\ +ZÙãxš$¡À¯DŽPõFÈtÇ}R‰¬\4Ù\17Âz”‚\28ÞΊèׄ\2˜¼hæ£E\0269\"˜ÄiMÎE\\À79·3¿sg“¸µ\19a\29%–\5úè\23jyRnqº+.+„äà\28¤A¾¡&\6Á?(\0:7”\ +½I¥—É¡ºŽÁF„Íx+ZÝ—eDÖÆ£_NÁÌÎ%Áï\22Ѭ\31Ë2x´ðKtßø\20Íñõ\31èÂC™ý•\16ÑåÏš÷\ +\0L‹\2Ì\24§éÙé§ÀâçM\18Aó°õéXH_õ%ús!m\\¾Û\30\16l*ÜEW®™éüÊ\7À\12H«\1ú\21+Œ`Ód)ú[¾ß,ýüßµ>\9È÷Ÿ\127oÕÏ¿¿n?ÿŠ$þü+2(£—aöL õy¬Ÿ\1279Q|óƒƒ\11€A\5¸dÖk#\6Y0c ˆû/ÜOóóïAÌÃ\17\25=Jä@òÆ-\17êÈþ?I\20£ý¾5b\"ï\11î\19·»W“(‘ÜGY­¬€³&£ÉzGÓg•|*Ë㓵¦\14J¤8 â¾[ýŠ8Ëh·›FÑ—~¿›Û$ì“Ÿ[üH†°©\22¦^BïÄ•CþY°˜\26?a‚›\6ëÔ[u\4Ȧ\"\0´SS´€&Kþ“/\26\5Ð-&¯\3(~&Q\30¡›z¦ÚɯkM,8N¾È\15\0q³-\23p\17\24¥¼\9\13\\9rÍ™3ƒ:ËÄÑ?²ÉõIõ\20\30JÊ\15ŒL¾GJ\21x'ÛÌU“õ\22d'´¸’\ +\23VcÚIRêô\17O'ïÆL\28³ÂtA¬T,ä\26ï\28 \18o1•[vp£5¼ÌŠ¹`æÊÿ\27YA‰ÏÁv»qÑ(OUe»Ip7ˆdHÅâ0\9á<®¦ò\12„‚_„zï*K ÀbÆ&\20®1¡µX4\18Ta(ØÞ¦ÁLç„t5¡{*²3\13\16EÂÓ\8ˆ\\øh²1Pq\7M\28\19Š’2鲓• wÜÄÚâ“ü¿ø\127µ¹Ù…R¢Jè(gg7èß4\20]\23;‡T`4àÀë4œ\21aì„rûtW\31G‚›Š–ŽrËÍT\19»¤8¢]3¼n4y»OnµZY‚¡~¢âFV«§Øž’É&–Ù'e‚©bòëÓ,©A”\23›Ê5[\25\15ËeSõmùH&CBRR‰ƒ(a°\21p\16šºäEÌÞná0ˆŽEÇæv”\18üz‚p¥©ü<áä«ÜR±®\0ôq§Ù÷|NE¶2p\0228³=ñ¾¨¢ÑV3—m’2+f“Å®i.CÅ\15Ê‚jà4{ÑašK{Ì^‡ÿü¤Ï‹UZˆ'I#±é]sÃ=y\19B×d]é©\\y2Ý݃xm´€„þ‹Œ3!±È=Ó.b‡&^%‘o»4T·©•¥\6™”J²zb³’€_OŸêµ“ä4­Ü86­,žN+—ŸãÃ¥o\ +Æ©_E%\7+×.OТ}þÀ7‘Çd›X†ésÁ\28%@\"}ßw£\11½;ô\ +Ëð$\4Ë=SyÏ\17Ï\13‹+ѯ9\18‹©jⵌ‰ãÿ·)#S7S\22¶ÌòÚ”­H6¡Å{Ÿ\26ZWÇ?±ãÑ\23`\127x´©¼½†­Xr±ˆ#\0212F%Ê\0S£Ì\\u¯Þw©iÐ|Ÿ.NòL’h¦7WÜÉD‰\14àéù\13÷3½aë„DÃü»l*5³¸\14‘ȹ\28÷1H·ÖWQÝ>\23R‰¼ÍÿÝûŸ»‡úôìë-«™ÒÏ]϶ê{ªú\5¯>ƒ¨\127+€(Àlm¹áÊ\0y\14’˱€¾\25²o\17\16|á\31oly§\7ÿêXÊJ\"^bÚ°[\19±V3&›ã¢Ýét¸·0‹j°\8Ft\30ë2`qdæá„®€TÀ.ÐB\127f+n+š:Ç\12'•\ +8\13Tæ.×$±Q\1Œä™GOe2Ö\20q3êÚ(7‘·Ù×\27¨˜\16ç™—%]=Ü73w>”=£\0318[ËÛ¯Ùâaý\20ð\28\14à>H\30éƒØ½W1w§•e…@?Œ=þÔçä\17›â\28³ƒMäã$虋\15KÆBhô(@Ђ×I;¡\ +>ÃoÎb\2çú)ÆiV¥ÌæÝî’´æ&àˆ挶µÿi7n@\20ó3û}\3ŽîS°¢ñ\0N‘”fFÝZ\8$ºc\ +YÅÍ\9³¯T\3´'H™cçòzÁ\\\14KÍ\\i•@€ºôluéùϳms9;5\127ÎNÍEUš‡f\22rÏ)[L||½¡fûYýÂY1µUgò\0135X¼Û6Ã\12¢ŒÈÞÌ_²zŸaö\29\9+¡w–Q\5Ì\11\24®\5Ô{W\1_\6È\16†ÙQi¢\27ã\1âÝ\15Ùº\21\12\16¶¹\28òÿõqˆÎ\15úÉìñ³¢>ûÀýÌ’÷\\®sœËVÃü¹Æq.;\11s9SÝQó\12\22ŸSæ\17t:·ïk´…”ÿ\12ôp–Ç;&ït±ˆü¥!V\5þßf–U#oÚçÎ;Ó³WDÕƒ\6þGþ^*)WFϯ|ñ€×}¦ã Ý7s\20ù›Ù!Ù\ +@:—\17Û%\1Íì+4DøùG\22\24*¡³®ïÌM\19J\23•ßÙ‡Åæè‡ûæ¢ú;[ówæî\9J\26×þ£\0195{©qfqqUE°\0194[Y9‘i´eåÕÁ¹\\³O\26\\³\0158J\16w\11®×ŸÙPPiL\11|ülŽ¥\17|5Íü¹™fæ¾û™õ\11\25âàg¿\6 ó\"‡âMfwÅ\8ôè™Ë¡ª•?8ëû\\I|\17Ã\9¹åºT\5M]µÝ5œàHñ‘Èip8\4´KàüUÀ\9ÍMÝÛ\0URx×h@#X\25™ºñu\6¸.Å\2YNÅK +6§\16HU¸4\21;À[!b\15Ç(\0“ÔV\ +&nLÿ°\3d@¼\3\0282íÃý<0Í3©fMFxx>\18\8\6ÙÙÍy°Gjft”e|zj5[]™WÀ+¾h.%ÉÖ¦êŽÃ­âÁå ñ¿‘…Usw;¯Ð²–÷Îç5V8D:þ\26U±lƒDêkÍô¹5£k>¯¨¬«Sˆk\127È\25Mq¥žŸôˆ;\11$íÌC\3³•pN §ßÙk«sö\14žÀŸ!–Ç@³f.¦T®21[£O\21³,«®µRÈi!XrÂé·L˜k¥™\23õÞ¼\4WÛ%,Þ…ÅK\3#¯¤0ûq\7\4Žó[™Ò\0125à9(¨êb]gæ¼ ³ˆòË\\nDš½„+„\27\13æ§\8f«÷̾Üä—½Få_Vº\20Ï\19°UÄxB€Á\27ªÀ~ÑŒò\25\0076\14vʼÿ©\15+ô\25˜FòC\14á—©\31¤p²p\16öpÞÚcb\25MÖL+sµä\\´æ?ýƒ¦y;ži”êjã5‰YŒ\0}âó¼ÁŒÂ6RÖ\\.Pš?ìñle!ä+\28}\17ùGmˆ—æƒçï7bft°\27 Š=ï\0289ºá§ž[q^Åá“cG\6²õ¬\13\8Îöž'\6„*žÊâGÏZ’ ±AùtæQC(ùž‹\13Å>Æ“.ÚÖsy`@õ¶3F¹âE¡¯¨’ï—DP™lÙV3ã™%\25eëÅ`µ©\\y!xö:ð|¸Oúj–Ùê×\\×É<*‹‚¢¬ñ¶E\25?Ø\26ž­Œwâ\21‰È”ï\3“ü\0316\9*ê8ûE€™#“\15‚§’\3¦VK\20rZ2ÿ«C­å_Å¢Õg\31œ\0206ÖNç\ +\17‚¬Y7Ú[Œ;øé°(A\19\28Qb.\2„m§\1Õä['f®ôŸ=\31Y¼°˜Ü­\31X\\É™Oa\"d»2ŠÚ²]aN8”¹\27 .3\127ÎfΖ=fŸÐœËÌ|øeVp“«Æ\26˶öC\30 Ö^\"\\$|rí\21Ùø­ÎÇ\15™,·ÌÈ'Jçôø³—†çr‚sF8šOî5œO«ÑÏ\18ˆþ–EbÊì÷Ôå\14©FÌ.'ÿ4\15\25=];#döÛpóÙzPŸ¾PÎ :Xó&\ +\1¸x\ +L¾Ri>}3ÁÌBï\29\6éj\31ÊÑÅ\25Ÿjv‘.X±«-\19Å%AF\3ïâÑ”™‹‡æKòÿì÷ã0‰ËÛcõÌ««3Û\\ÕüóÿÊPú,\16Ïo¿\1ÉLõ\5Ϭ\1ÈK\0sõÅX¾\127uLv²¤\2–›\23?”\4ûÎAióV€âpT§š¿4„\21)›/˜Š¯rb|þïÍ\23\13Þ»‘…ãŽGYðãRÚƒda}ë/ó,ÍWœAÊ3\0093Wsø¾ÚÞ8TÙ¿)‹&µ¤ù-Ñ°Mòݵ¾Éöu“…%ƒôY*N\31ôîð~pÌ\27N!ùá\1\0228^¿ly}lÄ{ú\16\22\16N…=ô([Õ«¯*h¯1,\28ýD²\18êð\17«Ó\0÷[ò[¬ü_ö\23‘I<6Zq¦^Î\0038\16ôdÏÕóƒÃóðm²¸wÚšeòÆ^òKn‰‡Ü\8`g…›É\18ÊuBœH#©ü\29‡$\19ò[=\21ئ\2scȺdjJ¡4›x$ïItª¯¼\31Ë?ªÈ¶²[é\31çä\15£¨ôW„\29¹ð \18j\4£­¥n ƵÀ³`*Ù\6:\0ËiÊú\26W6òŠzX\18€Î“>\18¦dŸÚmVoÑ–dÓej77µØÌ‹\0\127Éع\11CÕ²/$±/dÌ{0œ8ëˆÄ^£â\28ÑNèáÃ\21\18 vÃ+Uw\4&\22Ûv·ïé†ð3\21GW\21ë¢Q}ÏLªx–/¡5/f²«‰ø6æ/ªœîÊ•}rýæåô\16.žº>\13œRÑO¬’¤ÌÄ\19®­â¨Yz“«äG\28±‰)N¼œ¨¤Õ^\"¤Øög\1±\0òÓ+‹©71K=Šñ„—Èö-+’kò\3P\4áj7ð‹†\25¬v\20Ó£\84l›1CÓR\15ÏØ›@\16å¢èä׊À¸³P`ëI@\26‡kcK\16\21<ùe \2R\1‹ÁeüôqÞ·±¹8ô¤¢\9‘8\28Ê\19\17Éj‰JAµËM#‰÷=\20$ \3š‚÷n’ß× mà=\18¯9ØüXZ‰Œ‚ó\11\27ý6„¥¾±V·ÚcÃ\5%íäw\2\ +H\5H\12®\18×KË”üž|½=ÀÃÂÏF$n—ÙrÆ/q©<¸cïT¹ƒ\26 º*§\25’ïC'Ò\12³–Ô^²Ì¾»\0089¿§˜¬ù$V\30Ya&{\8Ï\13^À\25ü®\3Fg³àÌ\21ÒŠ¸f‡É›Á1\20Àøð%¼Ô\0\11^)l¿@Â9†TÎ\25Ïx±üf;º˜’r8\27ž8ØíL\127)Êɲz6^Dñ„ð¸óŽð\7Uã\23\23°4¤ŠZ`²˜›Êû{Ä<\28œ-@߬m‹¯\127â\9r¯W¦r$˜˜Œ{\9ÄÆ&þ@\28\3N\\íµÚË¥¶\0O\8\22¼’ïÎ\2|è\25Þ๸­]\0\9\"…«\20ðbØû\26)Wÿ¥ñ\19ÞÞá³¾ÓÌ«‰;¾1•–h¼\ +Z\15¼Ì’\6ô™p\18úeùÔ\0125§Ÿ\19 àÅÂ@òs\11É·¡Ør\17P¿íÀh\25¼1’\6\14d¦Áœ…Áè\0\ +{\31h²zØI\6ÖóÆ¢ QÌ)¦¡rM\15Ü7‘\6Q\25E§b½»$ô\\ˆ•î ãz\8¥Íó'‰ƒíLz\28ÜÅä\0299\1\8\3GD\21€5ª4\30\1R%NéN!Ð\12·©\4ãg\4Ö\8À<\25ÿË&>½ãÆòä»á\14`S*••\15lpšÉº› d·/E®ìIqG\19\18ãŒ×\4‰9ùP\4éiy“Ö´Ú²\17e¢7¢5Ø–\16'Ý\13Ž#ù¥…„¢\29ɲ4•8ìBixxV–%Þ89¼ÙÅókôt‰È>\24\15¬GäÖa|È\12wµ«ÁцbBòRIò\25™ô9#“¼T\2\ +òÀM'É/H'«ì4v£ü<°¢\127hpYQI¬¨\4Ò m<È°ocN~L0±ÈÒ\21\ +\0ÏŸþþçÄMŵñj¹©º±^̌³\9³mš?\13\20\17u\18¼èW\0|¸ÕÀ<ÒŒœ“xõ&±dÔ\16•\0132\3ú\21ë/\14Ç1íTžJH\30n¬\11\16ŽSÈÉb¶m\18©“\31GÀ¼0éâ’ß\28˜\23E\13\14\3f–ÊÛ}i¦‘‘¸lâPóÊYš­‚—æ\15)±LR¬\16\20‹\19Î\31Ó\ +çœ\18L'¿¿\25\26\ +•ƒè'ö6)bòrq*o\9¤Äƒá)5 K(x§Tž'I&w\11д\22²¡\17“¼^ŸXŸK‰'¬“Ï¿\ +ÝñR’TÏBN—2ÿ£ìñì\12\7\28ÛŸ\127\29b¶\19óÿR\26‹‹ñ+Û(ÉBËq¯¾\28X×âÐ\27u·˜4/~¨&Ñ»\19wï¤vat—+ò\1*Þ\"ÉYÝÓwáËI™K¾þ^}Y¹]¹_*ùÑÉÄ£“)7\15ê9ó¤T\7„w\20Y“„€#£<\23á\0ø\17\14\18ï\17ð*\1\\\21·.\ +Í@á9’U\1\26†C\30m0\11gF=LJÁ7rÄSQ讂,žT¶\23\27yå2Ûä·Î“Oħü\25Ɔɮ§M\6$+D\\\1ଭ.\4h\25\"Œ]0\6y-åñ\21\9É\29ø³„ÄâœZ&{á\7\11ÕÆÒQ \23,\26%÷p¯\25%¯\25ñbËö*€üùŽraÝVG©\\‹›i\22o.\24|\17`¿q;Êþ‹`ˆfÉ+HXhîÌ]\31u\9â–bŽÈžlSæm>™lG¨ºÊ\24\16å©\8R1A(Çf¥²o,ű~ÙÕ-ÍAb,ĺjc\127“#É—*3š;°)h\15&_‘ž6\11â€f4Xì³Û<\8ª\\mìÒ¥j“üLóT‡«ƒ›vºÕÐ-”'U”ÊbUb¿Ižd©Þ ¢›9ÙÍŠ\0038\0296OBî¸Ã²ª\22õÿYÏJ~1¾I\16*\20\0\19ÇîÕà¸x\127|aýBè¹mpé|½Ÿ“Ûg¬L¼\2¾:mai‹@w§ã»)ÈÌA^|-ðRvÊy\29§Ã\7m×Å\11\26\ +Às\29\ +`\127·éòG…añ©ÿåq[Ú‡*àQ¶9\4ÝP†Æ¸?1z\5¡r¼”°ðªµªjâ-f~w\12õ\6\13‡%(Îâw\25[‘\22z&à’\27\7B\22^C¤.\16ÃEP\23¿_hë\30\ +ˆÄ\8ö‚{OvU?y\127~\20ÙO\19,¡\30+å§Èã‹ß¥_üLÁâW\ +\22®\19Xxâ\12\19Á{©\2Ø\26žX\20L¾Dk)[ì\11o'å\2vrà\\%3sŠÂåv‹†ÜâËÄ\22.\19»¡XaD+×Ù\0Û™¢C*EO׿\28âà^ø%\20VAÍc\2ᨙ~¢ËóbÁbQ|ñ±Ònöer~šŽLì\28ƒ\11vЄ´°\23\127\21\27UÃ+\12ä›\30äýø\2Ö\2¾\28?»ivø\27¢‹æÂ!²\1¿”·\20ý”R,À\8Ðt\\ʵ\13‚Õ~5TÃ#Õ/ìW\19\16U'p\31®6\4ÐÅ7€-–îp¡ëû\22°\2œX9þ·@\5\26ƒÖ­lŠ\12»ª‡\21‰_µ\127:\5Úÿ0¿pQ³ú\30G’$á,â\"ê…«^\23äÆåç\127é½Ãm‘d®ä\6rjá{\25¬™µH V\8º%÷ž\17Xr­›Z´TmŽ\8+SýÊ\7Q\1&sKÙ|_,P.~ÀOá@ì-ø%ò\ +•ò…\12¹D^q\18ndG…RÝsÀéEÂ\22\19\23³:\4»ŒrU[U\17\23TN\23+þƒØ—ñ,ÑcØÚðƒ \20#ß\19‚\23ÙmXŠ\4¸ü‘\0—ÿJ€Ëݽm®nËŸGödMx,\24ªl\14½Ý\23\8LVLX}ýõÅT…eÞqUPïñ.¼þ½xo÷\4B\29ÙáÝ\12ÐL•€Q\17nô¯ÅGáä\21D^ç’\7÷X,L‹‹¦HZ$—‹:äŸN5\3÷s-æÓ\23±æbß–\\h1w\0-¹Ð^nzí\8œ#ØHÞ½í\0143¸4yÃe+iSi2›`@dŸØ^ÊÕ\11ËŸ-X%@pÄ \5^9\17œIR\0É{1WŒ™ñ*\21þßøÅ—Ä\ +Ç¥¦ÌWóEùDl\23o½.w¥Ç*·Ð4>aÃÁ±Õn’×­íXOÃ/<ñ3Ù٬ع\20¾xA#PÿðÂ\ +Jlx>ƒX€ƒ»Vý¬†\16çÒë¹ÜhÙ\16h\8áZBäX6ïã\26@J7$nÅ•„©¸‡óâ¹oóü´qÐH¥¡\11“%8æåðs7‹wM—£zò+î\24óÂa\"à$\28°°KÅáŸåðµo\0rWsSìâ›b\23ÞŽÐ/¯Ž-lq.f:\23Þ‰p@Òhà*Ü4ÞŽ\\Êväò!-¥çã\21mf\7€\0146eÃq9X‹]àó*\9…\11{ˆKy\2kñI\17,\5=3ùÝü\ +£Ì*ˆ‹\ +uyn¼PaàÉ‹Þ=\0ÆK¦hñ•\24\16—:…L¶l—kuç¹X\1_®£ÌÚ—Ò¾ŠûI…\23ÎK½š÷±–ë[ØÔžåzúÅTϯ|/Ü¬Ä ‚\25[~ߖ߶‰*¡1´”+a5cÝáúÖî\23\6û|\\g¯±µÞ‘Ѫ•çìpBÅ…×ìvu_\30·;nk¹ÿh÷\15“(îpQ+[À¼8ê \0–§BUÏNæÓ\23ú­\21ûiJ’[ƒVó~k×ö\13!zAùP]äF\12ÃZì‡At„~q\4Z\20¥Å3*©@ªÜ0¿r×úÊE\21êÝkåušµ\19\9W|§úó/Îb\15W¸Ãd°\19;=nèR³Æ.æ=€Õ›,«wþy¬O\\?¦óB=¡J)Ã\0079x±/\18VÔwöTµ«WÒ\20vuͬç¯\2”´'mq‘ëg‡«B÷f-Û\\Š€\\{3ØíJûÄ?\5Þ\31 a\24rüF‚>ï\1R±ö\\ˆÏvÕZ\26Ê$lí>$lí>·ÇPO;\30êV$xPû÷ÃÉ\"«­åF\13ƒ‹Â¿\\;\0261«¹H?-èLªó¯Þ\26[;K)ÊÜå’rCñ\ +?©ÿwiÈ·›à;ÒDe\31lU\21>J\13>Ü;þûnºŠ£®ÃE\29ëÓ\15lêÿ…åuV˜î\127ÏW­ìµîJý\13v]…ì½Èà]ÌöG6 §õ<-Ÿl\21ƒ·ö\29\23ºÙJS÷nk¶;¡X¬ïʯê»Ý^T˜œ\ +Î\3@õØí]\"5ßÊ¥æD,,¶\9òðìÊ[Ì]Wœ—…|Â1òÊ¢d&€ÕVË!\12ßJø6ø­\12{ÅOœ6\14CC®æ¿eZ)P\17þÿžÎ$K•\30ÙÖSa\4g-Ü\1\17Cx\0.ý* `DyW¶n';ïöÎÄÞþ¶yd\3\25*]µL&+@BÔJ\27â{\16\15H”¨Ùð½ðq\29㸟þ@r»9o\1÷\19Ú\3I@\31œ¬^Ã>7\28\"×\26¯ƒ\0\19\6îC!jëÉfiÖx\1\\ý\2¨{\19œ\26èϘ]\6X% »\12SL¸[¹6oëPW¨~'ÇÝíÉþŸ#\8\ +€_¤£ï I®ñ8¸\14¡»D¥õ\18£Ñ‰Ô~¨Nxiãc›a=ÌHW¬3t¨uþ@-”Â?U×yøT¡¡:Yñ'º\15Õ’«\21(¯<(N+\12‚X¼$=7\6\3õ4‡“7Rž\18׸¿\0tN®¡l\25pHá+á«Îp>ó9aH+6²©\2Z„]ÌÅÌÚ$\18N¿Ú¤ô\26Öl×\1\0024a—“\1\11i6¿­\26\"\12QU5ßâŠnêqõß\17EÉÚeçû¼‹\7C•áà-6“™eëë\13üÀ‘~=Ñq¾\19#hb¦_þÐÆêBˆ¼N\0D\25Ș\29\6:ª$Ì;¹W»ô{b\5ZÿñjýÇ+ö\16)\6nryàH[}\19ZY0'UiðaŒ`Ë™C\6c¡«5¬1\6æ\30^㺲†ö5”ï¬Á<\ +dßPÊ\23ÝAêZ\15Ç\0\30pï~6ß\4˜PŸÆë£\27ËuzÅ\12\16\3Ó¸\30­(\7Q\7wZ4ö;eÆ›¡ª§+Ȫ+ˆ6ì³\28Õõ\18*OWÔ¯\15ª3úÕízÛ¹ Ø\25áýBJä½Ð\2¨Å…Þo\22èÅìåòÞ¶}ýâÑ\\Q0íT^ƒ†•äè\5\14XI»|Û³úÃÞ ¬\7˜8N\0tòž\2Ü\3¬\6s\4Îê9´èÚ³DTâÔ»¤™\2ÙPôß\7-ºeI€\0121 ;A‰t\12š\18²³_ˆ\0301„`p\ +À\6zq\19¸LW×\0154T¡Sñ÷1øERZt¸\20\127̼\4ª¡“³ä¬X4\7ì\14D­ßzÙˆ„ÛŸ-Ä;•õVRH\24’\\ǯ݊JAí\28Ô\9\30íÕ·?eZlTp\13u6†:˜P› O-\30)AZ#ð­ñǽ2\21¸'®~x\\Q÷K\9Êzù³æÐ+±úÊH\6ö\127«mÖÙÈè\13+÷GLÖj¥;Á—\19|;¦D²\26 E‚\7\21NÔŒÇ\13\3ê‡l׺=:®ÈìS\3ª4Õ£Û¹Õ¢ê.\1=çkÜB\9%\14íKÄ0žK`Ú\4‚\30,¾\31­¾‰®‹q…ý·\28,¼¯ˆâòôjÐ\0#ݾ×P„\1øu\19P\\\127飼Ÿ+Ùg˜£SH,*\22egöRÚˆ„ÅzµF{À=Â(\0149\7@9\\;ql\28×Áåzò_m\30\21Ð?\3¨Üá:\26¯»Òg0j\\ˆâ&»^mìw½\"7j%\24w¢àqY­ðeµÂ—\21!D»“ã+UÑtT\31]vYßË\ +i\6\22)P\6XìÞ\6\11#xùÓwìþÚ\7wkØ\\\22@{½\0Ì^\2˜q½ñÇ\27\30»”Z·rª2a\14\26‰0P,\0+울\16ho–32µ¸Î˸»šG{M>¥V,î‚ꦆ\11\25YÈ\3›öêEŸ\16ÿÀ\29ÔŠÄF¡è[ÂõÉ‚ÆÞuÀ2Éê—8\18ì–¾Ñ\15€'_ç\1N©Vãi\0059Oë§ò\7>2پﴂŽ(\6Û'ŠÑÖ£YcÜdc×&®Q÷°`»š\ +°&îØÕ•Éš6~~S¾|¡=9¸tH”R\0–\18üÚT\15†í\21\14ŒÕ¯mˆÆh#Sãà7“ë~(Õî›pú\15ž\27ÜÕn&Øõæ”IÈ*7£c5\17aõ›šâ…I¯P\9ô÷ázó²¶&ΓD=öš3ÿ53³f\20*\8h·\2x©ŒQsx¼s©Ë}z‚–\12ìúÕ\\D«n\1ê€=üÎ\4ݲ=w{VGk\0\8b\25\9k#\29’Þê$ùØ\24ë'ÿc>úÍK\30\22I=ÌÌ\21´_Tû¼\18Y\16ÚAèë\23/\21\6%l‡¯5\30¾ÖxøÒgÁ\4¡\16¬aòc5ñë¸[7UÔkØ–±7ÿÙ 7Xè\9DΙzÞ@/\15VM\13X>ø\2D\6ù 7å\8¾\7È.'5Ê\27é‰Ã3j´ø¦µÑ|\27°Ò„¤vj¸Ò\28ö‰\3ekµî\27Å®Ì&€[“H\5LJ€OL¨\24ü_É’¢\31Òú¦ÎF¯+lÅΣ¯øUw…ì\30\0168Qhv3ò¼\1cöÑ_Q{½Zíµ2ƒAÃÜíZç¨P.4ï\31\28Æhr óóW¥õö‡Èæ÷Û\21,éf@;McYÍòí¹€nÁ\20áW¶0¹nLà림z\13“˜+Ìß³?¹¸DðHĺ¾\31\14Lþ¼÷¤°ÓC\26:Ä’€'7ØҢ믺ÌuSI\0290ªo;´«ŸõôÁ¾¸ò/-\2p\16Úº]&\16\31ôqÓ6\2\7\17\26ªf\ +ÝÚ¬Vkm{z]a\23öü`$'ŽÆ\ +¸L*\22ý\18ÝÒlD°»]!Ø\24W_£Üy\13}v¶·îÂC÷ž\ +úû/>õå/}¹„/†ïàŽj¦\16ëÛ¬\17ßÈ\27\22\1p£5¾Þ™¶E\24ñV3¸š;n\13s–\6÷\0\12>‡uX³\\C\2×€tÖ–…÷åã# {(z·MHX¶»#„bcýýHB$ÉÕmÕÕà\17yµÒ\19ÑžË÷Ù½ü\\ØiÑþ½vh']9:âk¸à\16\29Œ»›räñîÃ\11\26hz·¤ÐAÿíáªÃ\19¨\18®lŒÜ QcâÛ›\\N0šÔ­KSåÓAñ&ªÏt\ +êªb\127r‘èÃ\19q]ð-\12EšV¥\17úÑQ4þ¨Ž?:¤÷+\20¥ñåʽ’’hµ¿}ÈŽ\127ÿw7éDžÒq—Ð7’\6M‡\4ΓöÚå\18\18Á\9å«òÞv\9›\9ú§6¤ãÔ\22!i\9ö\0319þ÷³\27Òø¡Ò>þìÒÇñ6ÙÕ¹8²³GbK%}\24M—çÀ˽\18ã|êÛŸæ@:¤ás¦:0Ë'«Kåÿb7ÙUîO”ÀN\9\2oúä­\0W\24^ú\28›Ü\19\23îtB/z:éÎyÐ\29Ç\127\18€c)YMJ2]#Ù@’2ÕO»š„)H\3é4è(À*\6—\20⸥\16IKä…1*…ÚÓ\20êQ’i\6äÑÞF\18-ødÕ§.‡\19Ô¡óuƒ.ï©\14Q‚\25zLšÁ\26Ò\12+¶À‰SS\0¶N\7¦a\7ÛSÙ™éi\14°\4H‘¤Û÷¤(—:“xBa\0092r\4\13çÅ®Z7c\15€‚\18û’Sk|œd$3\22g\0\7-B=žé’øвn\25©Þ\2jŸâÍ“°æ\15£ü#ébÃ\11¡ Š\18zÕ]\8Lx\1‹C)\18݃)Ä‘\13ˆñ[i²ruª>BjH–GNÖ­®f¢ú\2Ïl–ENØec. ^•4ÐYIºº»¹Í9\22sjj\6ÏäÊ6™®ÜÙ©stIv\21Í\20‚AÊÕ=lÉ€)¹Ÿ÷\17]Üè ¹¥_sJ\ +ð‡‹å¶\2ÚïA5ÓzB<ùâ¸tw†\26ùÜœJN¿êìÓ/V¦oW\ +ô)Ù6\18úÇo\1¢íær6ør’æF5×Ó\28W)Œ\27áûvTäÃBqxݲæ+h²,òþþ{—Î0Äáj\\§3›Áy:¡]$O\30­³…W\12Š\3Ÿ$¿‘š\26ÊMjÁ™$ˆ\7¤ñœâ\127ö\127\23.t4÷eg[4ªÇ™\11O2ÿq²Å?\1³!ë3tÕÙÌg„1†çXœgÛ6\0064\3›lÒ—\27%é\8Ð?ô²\26àC+K²žÓtñø]¬e:ay\13ωè™\5z\9âkºXǯ:æÂü\28!‰‚•y\9FM¼°j(\9\6=u³Ñh{E] \24¨Tšm\5¨¸Œ\25º\18)5…BTû]’EMSØà –êŽ\23á\26‰Çˆô¥ËBR\16Úžåîgþjh—ƒ¦\14Ê\25ñ3ÆV\3\22YÃm\3®Wòˆ²®Ä\27n\"ãUˆ1®jÆ%˜ÝJàÃîÅ17ÿ\23bí\4Ó\27¯°ªdÕ>\9Áf¦Ê•ÁGG\15\17ì\31pT:k\18n\13€a+]“î\27f·Œ„j±oÀéŠ\22½\20êM“ï¹\9Þj6C®¸jѦÌ4mv\26Ò5¸\15åg?¾6K{§k'©\7Þœ¹Éû\15Òb:˜X0õ¨yVw\ +wçÍÿ馛_N\19\ +E':ûv\"ì4%\3ÝM\19J{”‡Z ¦!\23Ò*Zü…¯\2îwÇ«v6;¥T}À\29ûÝñ?ª\0¼ýr½f`Àç?\20;E\\ÂÝóÅ‹Éâé6]8\23nX!Z\28ˉ û‡›²|ò¥åäÿW»þ¶§ÃÍ÷ Â\26\30Ô‡¤\27|N\9¦h¹Ðä\14Zä7XæI›È®K½îßã•ï²†a#¥\8\15åÍÄ{â},†žT%Ì^~7Šôr†\17ïË\30ê­Û\24Y=¼€ÕÀ]Ëâ°E>°\7´ª*¬»ÎÝX…îýé>0CïðÍáª%ã\29©´¤[U‚ýD!w«ÚIf—1\8m\15é\30v+\18¼+×›ý|\18.\16\ +^\\€\21\17$(Ù\9Ìâ®^À\29)iÊ,Xmá,Ì;\4\25\11†»l–é\8í!Y?kZa6³Õ¬»=úîÊ3vâáëK‹nu\"‹f¥•WÌjÈlÁ\30-sy\13ë)^e\12!¤\21yAs0Ì\1’ãèY\27\127Á«ùÚ]\2+àÀ“E\26×Y\19^è$ªJÁ”5!<€F\127\13†\20̳ñ™=òƺPZ1p@\26p¡\29\19Rð\17ÊbX±í.—-\7\"¨ƒ<¢€FÓh‰©üÉd6¢®¡X]Ÿ¿Òýk¨NÆKɈ ¥u5R\0075Šb ¼PßĪ±:X\26”¨&RBI×O>\29\8ßZ\7·eœV«\16G—jò\23+G.ˆ¬øÕ\\¤\4f\23\27\21ŽíÅ×Cšë.ñÑÊS–öb|j”}®cc>á}찌Ɖ¸î»{™Q˜\16ÛLÂÍ“\\þqÆ$\29ªvYvÂZ\25(ù’Ý‚ëyfL”;´Cœìê®ÕÞ}x\2ˆ¹\29Ž\3`¶‹(6ag»|÷€F\ +bÀ\29ÙTpy´\1hâ%áçx\"g¤f÷ˆ¿ÿb7\12^BL\8÷D&èÌ\7{¯¤\\Ý\13æÇQµò^nvµ2Ì’\2>çl\8%Á’£‘LÅ,\22DMVŒàÐèÞ£>¡‹Ý«Ý»ã“£«=¾>\6\4‡I…Þ)F±Ðª«Ëª?âYž ^+˜$̓A\17\3’óV—L“ƒŽÈ0ìÿþG«?sÎúÕ\30\29\3§ðœ\3\\\29Æ\9\2#\15ÿÁ[\5˜€¶ƒú¨Å\12\0ØSž\29C§Ð5Í\30\14:(9DC\12åàÕõ\20ý\18å\15_B÷\8\9á­2|¨±™{bv²Å•ãÉ\"…Þ\30|ßo\3\21HŠõhPÉ(T\"AÿW\21P%²Gü A0JY˜©Ü=“÷A„åú\19‰äVćÚe¿\19\3Š\27m%܆\15Š(üGуêÏ\5Ï\24{>À\27\4p\7Tvâì\19&{égK„\2Üœ6Ù{S\14T,[(Å\18\18rèJ#\20Ù¨\23×\0nǾ\2\20“q\12šÁIÅ\23ŸC\5ÂVâNp»8Ê}S0ð«¹sb:\22¶ûj8ë~\15TUJ<”\11ˆd@[\11Œ°$œC(x\7}Qt£<©¦ÐW’•cÈE\9R*F\28Pö‰Ýqö©‚Ž\31<—?\1PëA%!¦â§»T®\3•CÇ\19a\28ò¬þÿ¤\ +qÝ\18ŒÊß=·¬\127‰à‘a+kÜÕuÏ\0015âù‚EåB< ò$»ì·Å-õk«>…\30‘T\8ÞVDÉ6ŽÂzä\16-Fa‹G·Ä5½X7€B§\26\31bH•\12•dÀÜwVz‚*ÍF\13ñë,¥rCsV\4QB5Jxy%$µË\ +B’j\13ªU^úg\22¨zO•ª\30*}2òÖ5\0090\13ë';ßT?\25†:}êcæWc¿PÙ\0211/þ\23ÂÏ\4p½›*´\17í×ÊU\7SEÀ—&\14õzøŽ%[¯x®‰œ\20«]ØŸ\26ï\20³\18²:çà$ôCqSõš†ŠËAdjFµ2çd\29Ë)î‘\27KD@’´ãH‹‚jëà5\0ÃW[´\29\13ÌiCÊyç¾Óicôuxç\11”\28×ÑÖ)XèÂÄCSê\28â}¯^ï“ñ\25\1:¯\15lp;j‡\28!€ÖPE\27\17ïa;& þ¡ŒÆ‘:ya-Ó?6‚î‹_G®,u\31¬\0293\\„@ƒè˜Þ’ë^íÛ¢íO\14éþ<¸\ +O›\6Jºâ ¡ž\25ó„\3€cTW—¢a~lƒù˜7\28@ÑtðCm~,¨$Ÿ\18šÙT„ÿê«>ê\31{ó“\17\127’éIãžTsÏuóÉñ1>\19a¬¾Ÿ]z1õ¼—üý\15Ê«ÿ³Ïrò¼—«»Îßÿ(^ŽÎÜ¿ÿ1vÒ…KÛš&_æ]AÈRþe ÌÜÕ5~\31;6ÏÏÝ9¢p \7gë Q\"m@¼õüQŒ…3üùˆÝ¹l]’û¬•¢ŸÎ‚\28Ê#\21{ù¢\4\16Ke¾*ÄZD&…\9‹ÌV&‚‹š·\28Œ\25>H9ÚÙ”ÿFü-4ÞéÏM‡•À\29â‹ ´[e¿“rõgC+\1\12Ì\26\22ƒ\27U]ù´-$eø\30\11•åÿ„é\0Òe¾\3ò¡’r²[Ȩ]S5‚Æ•ýί\19U°¦\15\4È(¸L×c\0µƒ@\127³P-h÷ü/Ä›/<[ßd\14³˜ö­NLj\20lê¨jhÔRX¥Tæo¶å1\\ªæ\29‘ˆ–\2P8´\11å$Æì‘ÔÚ\17¿<\25•’ji\127ÓnMBý\127+øc’ƒ`M\30OÃNýq¤ö³\6]\24Ýð™TÒü\9sqÖá“mo+›+-O \14ŠÿØ\21¢\17£YÈŽ\22¿Ìþ§˜‹òNh–Ì3öY\24~\30c3Ä_\ +3Ñ×`u “n&d´]™\12y—¹\19ÔÞlƒšy3¨\25PcèÔK€Tœ¸â5m \7á×Êzï¤Y]ìêR­}.‡\18Jƒ7-_?_ò­6Ô%x™\15ö^iÖJ=y”vP½…ï\30>ÊB\0311\30´~©,&—Õyd¬gÑ:M«lš084xOÆvg£·Ê'µ(\7u[†Þð\15=_(¡À&°§ \17Ž#\18$÷qqÌäJ°õ,áe\6\12sqÁ¤ ÎúÛø¼\25½\1:…\8#Kss\8´\0û¸îfkþì3â,\25I–Œ>ɼW-/¨¯Õ߯‰­à‹/L_Â-å‚Øg^Ù:\25òÂSF^héuóÍØP»Q\13^¿³Íƒ&C¨É98Kó2\\t<¨ˆ\11Ó\26³NÙŒLr!B\9L·Å1L:\20.\16„æ5}D×lSJ\0027Ýp©\4<ÔŠ\18Î<âºÂ6§°/¥Ã\7\7À\0207m‚)@^¨Œ\0\24&³¹œ2‡\2é\27é\15Ôë\28®C Ôä…•ž­\28!#\12ÃDÜ\127+\26Íóškßêù=º'ó^úe3ÊÃMËÑö•òm³óžÍ!”Ñ\4àzÝl½\23¨Ú#‘¬ó\30_fÅݼ­[Ï|\14UBj ¼/R2<¶rÿ½³ˆÆÉIÙ³mö4£u¤$§üñwáÐ̨ÌVïš\17QWß\31¬\0092ß6ž—\28fë±¼²Ì4H)Cú\16\15\0170þgó*jŸUÓÒéÈ\20Cv'#³3\2HŸÿ«‘Ø7.;ÌΔ\0ëO¶†Æ\12õWxPNÓU—†=WBÅ *C[Ì\0]ºåFq{$´QÄ}g:éæDÜ꥓¬\28 §M7@\14Ò\1\23KuZ‚9)›Ä²ƒ?çâšùkÉ’Í\4²°}Õÿ\0066Z㙨¸|ÒWáZ–sør\13ж˜­m1[ü'O\ +ô½Òf\29\127¾]\4@¢/[€~4yNP¹³™{ô\5hJ\0.‡9y馶õ\"xN\14¦žœ:µEÎ2Ó/È\127fL!å\28\22’\5ámÊyC_2×ö+àû ôÜ•sÑfÅ[\11/^jl¨eÔ\127ÏÐlÞ\1¢„.CȽ;h0ŽB›\4Ì6\5ˆûCªy¾Ò®23¯\ +[ôˆúG¥@\9$åŽ\22þ!²&{\28g\1\6<¨yÈ\\ê)_{Š}NcÛÂß\20¾¸”åjwý$\14¬bRw\23V1\"\15K\ +Ð\29ø`½”\4N2\20ØÓ\0'j‘f7\8ž¼i´wËq\22\24`üÐg\0195´ò\7R¸^1ÅbÉ\2\30\4eúÇ\9ªƒ*Õ³R\4g†{™Èæ¶Ã3§ö%MÉ\"ü\21×or\3££\13^HJh\13Àsµ§Ø­l[à\17ÕÚùód¥™k\"ÿ¡ÓdËÿg+‚Ìp±¬ê®+F\14))…pvöf™\8þ°ºâ\22!\15;|°eäþ_¼KŸ}\8éÄEZJûàS\31\127íõïðÞíÿÙ\29þ™wÿôI?˜[\14ÿt­\19í#…—\ +­4^'Šµ\12bgB›x\9Á¨²\9F\21\11F•\13\31àd.Ö×\"Ì\2[³\ +¸é?bNð7\127è¿¥›ŠO\17\22ì\0181\11ß… eûáÅaÂ8\8{P®püb}ñúkŠSAFÞy/Ú ‹Ñü‚‰ÞÕž$÷‚\\C9.TÐæ\14KÈ>É'T—\15!$ ú\26›3ñKåó,¾Ü·F$/4åp¼kÂ\21[ØQßpò\21hd%ìÖ\3ÐŒbXH¾^ª\6KÙ×åî´ßŽ¼òu«\1ÄW£€Ú\12\24c(oÈÑý‘)‚;\1ÿ3©µ£¥™À\127Ô+¾\21”Éz\31•\2ý°C±4\20\17Þo\31¥ûIAµ©W»Ô\2^¼räeº\28aŠ\3ÀÓDÄì,%¤æ\1…ÏÛþT\9‰§\18Bôå\23ËW¥ÚÔ\25¬N·ë\26\\¬J^£ü ‹,ùT>fôP\20!ý…\25R>á±-f¨)pϨÊ\12uÈ-\21$“Tå\19Œ>Å彩n?\26‘Á&ˆŠŸó‹\5†À9Ê\9´™ð\25n\17h•(EÇË\6PÌ^SlwXÅÝ(Lÿ,\20Ä7µ‘@Є/²„Ì\15 Q®­u\0\30„\21\127Ëz˜”¡D\18¬Õ\23+ó+\22ï1àClGå\20\0210ÿM\9Í~ÅÒ;\5ý~Ê­Û¶ÂuÓV±Â\20ÊÌbBòÁµ›\22\4v>ð\28´‹É÷é´Ÿjîüéû]™Oáª\11aÐA9r±4Ïâ`}Ç‚<\16^¯t5w%y\16vQ7È÷…wȸÚö\0a^\23EƬôy;‹–L‰‚\28ŠA¾2¡Ù¿Ìç\15\21\13ó9ŠŽoTôÜiÏ…ÈË\31»Tð‚Šsé:ðB\14ÖñlÁè‚ê\2¹‹;`q\8ˆs\9v\30ôó½é¡«Nuy®<\16”¸Ñ\1\14«#“]ˆrJây;£Š¿Ìû+õ¿a¸À€õ;£“‘\20fê)6‹ŒÏLqÂ\14þà»\31‰B\14^µP‡Þ9\21Õbí\ +ˆS¯$B\18\18Âö\17°ÂVIJd:i\29j6Êf‚ X‹a¡÷и$ïiö` óÖœkög!'”Í\8Y™C\0‰Ìó”#¼láå¼Á‡‹;{ Ñ˜Ë'Ø&n‘&»3ÃVx&\13²\7kMHÊ&‡ê\6âjd\19\22 ‘÷\22¥ï?Ђ«rÒÙnž¨Gö\12„ïVa9ÆŽ2Æ\127ºPQÕ¤D…\ +\31µ¥åb¦\7mg´—Ûgqwzzy5Y„¬ØÀr™m˯X¯‚º¹î³=þ\26ÔÛ\18:\18ŠÕ!\18¶ÐKÖŠ¨2›ÛÍ|Ùþðùß\13\12Y¦2wÕ€9ݽ`7i&5é\1Ï¥@rU\127Ô\0316\\\\αU\13gÓ™ñ²¯œÙ!Æóìdؘ){ÖÂt¶b8¥B(Lº>âc»œÌ`¤„Åÿ™²‡3jÅËy¨\3ÉtQÀÍŽgR\22+1,º\ +\0234\ +ê8¸x“(›°”¼LäË\4Õ£\\‚ŒQ\16˜úƒ\23®\21\1si”M\26J±ÞJµ\6“\3o\2¼óq)ý¦PÎ\26\30$uq(\23\30Ëe r\23ÈD\5Y£¬ïŒ\26\8l½3s\23x\ +Ê2°‘ù.^\22hØ„?p?Xß\11GÂÂÆVlW\30\15ës™Né\22€¨Ù­À<úŠ‡FŠb»ç\14æXǦùñ\24^ºÀö¼\13šCA›\ +V«ù\6C³Œî÷…ZZßYñ­Ÿ‡'ïx\19O/šž\2ê;À)€zBw\20Jû†µË \0240PËwp… ÍÝñ\5æ`§;\1ÛÔ‚*{ÛᎇSn±9\26?v±å{ßÃÈ­k 5]L5°çb\12e¥vZ\27±3ÒûB@¨_¨ .›\1\11ù¯Í¾-åê¥\9Yaò…ÄkÍž\0221T\25f±b\27¬\20ŒŒ\4d¡O»‰¯˜,XLsÀMd\ +$áW\3GùÕÀÁ\12Røá[cümÁzyàã*¿Dˆr5&X,òT†ëñËÿYJa9‡@Ïn¬¸àÂ2S®Á Xl\23åáH‡nŒò¿ÿ(ø\0å§`>äa°~8\27Ï‹åj\2I\9=Ž„BË(0Š-'>ÌÚ°-\ +\3V/|\31h™™‰ÖÍMáXi\0ÜUDˆJáË\23\3áÏ*\15Ë\5\6Tr`mÂJFsõ“¯°pQêOØžTÂ-Îä©4\29Û\"JÛ\14¼€—+ï\11x\23ÿ§r¤`3H\\«\18T(êm]1\4¶È ÖÁãC.N¦d\\ÇŒ9ÅÄ5\\gðJ·\0]ñÃws2DMK¼;ãÊ\"qZR4ׂ/Ŧ­K¼\2·}x^\30}·{åúp7\15$$\30HþjÂ\15—†ôRZ}ØÝ\16z,\1º•\3\24è€l!·b©õ2 ¬²@­•{@¤´üŠåÁ&\0019\4\8\19H\9Ž˜’}:™>°=ì’y\0V4<\19*\5æióVð°QX+FÓ;\16!6 ‡p[µè°2>ìó^\0®,@,\1U\0172Õ)€Ö‘­”°v\2\0ƒBðÏ\30v\16kñ)&g\21«¿,Úu*CU° ÁæÁ\21¶\14\\Ǧ\16ù+ÕªÌI4\15×ù1k\25\20û&\12\27×\2_F\24­\15³ JŒu_Ç/6\20¤\0—\0lU•ÓòP¿Ù\30\4R7hÎve3ßûä³P\8\6SçÏ\0009€“#ûW,ó\23>ÎÓ:b{© óGZ\12\8s°!¸äfLw\12Z_³9Û\2ÿŒÚÞ¬”D\\æ\26¼S¸î?\27¼/–H,æ¦)Ž\3I/°Í\20Øf˜îð\4«Ü'”„þ¡Tý4s Â\"£\26©/:{i\15\18¨\0161â†+qðíª¶£êÐo¾5\0032ñÔ«[!½ Yÿ±BÅŒéwâÑAX ƒ+€Ní«­ìê\3(lU\22Œ²\21ß…;/U¸,7€K\1eíHE¡æØ\0230V°\21Œ• ˜—_‚¹¢:E‚%?,ý'`rac:w´·ìyk©asµ²\2«µéÌÒùS­\31Œð3!çÙn\"½®¸õp¼\\v*f„3¦\30/+\17—Äÿ/\28‡~+¿¶6žq*\5꾨\ +ܨ¤\16@\5ûY©b€‡ ¯\29‡§n“•#T\14Ø\31A«k\12g<\13ÐY¡ÔwR`¢N©!”U8h\26UZ?ù\ +\26\29ð(h£‹Wd[P>ý‡ÄBE\6\14d¥:rK®ØÃqó4þպ”V÷+eÓñPM\18W@šp¡òªtm¹S5y¼\30½h¨MuÓª«¤…NÕ«;»¹ÉÍÕ×~ÌçÛÍ]¬[¬Ž83NRħŠÐ°M•Ç\0189ê\12\0014vÖéók âKMþüš\28ñ¥\0171ÈŽÁ\24€æW=|Âõ@ ê(\1køþ\9_\9ð\8ðT/ƒ®b‰¨t\11\0024>\ +My€\21Téh,ôfô}{ä?S²›]s\12å8Ž·\8‚±![5\23••\27¬þwœ\23®á!»N'M›Ór8âêÀ¯Pã+´Š\14ݱR4­ÃÔMaÄÁÕðÍÇ2ù\127W)º`‚péLPÈ'Í™1¿(÷M,\3{@g\21¶#h‹©Þ5¨Ý\ +ÒÖfÝ\13̇Y=T­ŽJE]fÕX爖F\17\0171Z÷'\0+hÖfÅn‹ž(\0éÁûp\\jЩ+tj¡‚l\9u\14spú\3ý¹¢†÷\0W-­¼ñ–XÍr4]üÇ-öËF5ë‘mnßæ«\19Ã^F,z­**®ê\28BS¶×½ð1è×Xí¾ó\31­T\21\11:þ`u\25h®\19$-:°i\20iƒk\26\21øîI\21ãÖ®ÔÔ¼nT\17Õ~öcCV‘\23w¿;\22ŒÛ)“\7`ýÇaíéÏ\23JÙ—‰q-\7OäÙºH+·U€ÅHÉF(÷Fu\ +Ö\15ð\21òM‹ÓaW”ge*\2.@˜Î]%\24h$8\27`t÷\23\24?Iç¶\18Ï‹wENÕ£]Ýa•\8\15!kÀÕ®S㋘ì¬~ø'\4\18jýÝÄ\15³\14ÌjIÔjy¹\26úx™tì\0013\19ax³D`fªZéÚCt¯ç™Uqö\4=ã\28ÎL¸³Š;÷\3#|P\127_ЭY÷ê^Ý\26Ô…_\30…ÃWìÉ_~˜¬Bvô[NS\0\22Öáka-M¬öú52q¾T9¡?³]/ì/:mðÚçöA%¿Ò“ÌÂ|÷º’é\30ÊLŸ\22µxFQÍZUMÎU$\1ŸËD°·\ +\20¯ãþý·‚f\26TƒT+IÆùIPu\16e5Ô\ +\19ØŽ\1\8<¨£ê²?3ƒ\7ÖÞr¸$6\12Ûü­¶ù‹5I¯õå\14׸>|gZ!æQƒŠ\ +øtr´HUȨÕV[Æjæ+â/lÐ\27-U¡P€\9†Û§š<¥»)ˆ‡\11»R·\21]Vöª+&lßÀÈP\24X«¡ª¦¡Rè#’½ù\30ëda²ŽK¡_tÂ×…Õ\13€7\17ßbOscè”èŒÆ{Sµ,»ò5f²b:‰ìhŽ}st|ÓûÓ÷‘1û>ÒþïQ“lÿÍ>5~ûØøž9+¿á#®Ã·fß\30ë©*`†üHªêø§‹ÒôU!È7qûdڎߺ¨Ü:Èm\19%²Ïï¯òÿ\18rëáʾf“Ýx þÖáÊâ»N‹ÿ/TÕæŸq1àXmá¹^-\2\\1¬ÚÐß\7LáT¬—.êï°g3Õk'zbG½1žÓÍͶ‰Ä\29ÂØ>2‘ˆf¬°)J\28“ÁlmÎpq‹J½\29Qtc%\11`\28†sä‡\21¢šç\13\5D\0043}·(Ž17¹Ø¥Â1S1‡gŸÆfD£\28\30˜W¹–ÏQ5\30Ó\28ª‘ÞØã(\14+±Ä.;Û°¸¸\0&ç-LàÆ«Æßæ2¶\18‹óTÒ„y\22âº\3û%@u\21|Z\31x’‚ÑÞGœ-8WÄr‡\ +±€\27\4ãƒÖ ³KB\7#Þ³ã.‘é\27ôÖ§¨\1’Ÿ¤œt\20Ü–®]îœ÷\28T‹Ë_W§È‘ö ŠÃ]˜Œ¡D\1K\29žn·ä\14\0…\11CÐÐ,˜‚€ê\0177ë\27±ÕÙ¢ÄV\"wwµ°\13V¼Ï\29Ê_û\18>·Û¶#‚ð¡‘\7p\12@á¶!´j;\18\6o§^Ýq½ºø—V—·jŒ<ì\16Ü:Ù½þ1\0‡ÂvÂ\14W{ít\7ýtò\0Bá\14õ>[ÿ¯ q\22Ì‘l±ÔÓÚøí[\3`ã¢Z?»Üy°Ë–?A‘ ÃüGÇx\29ïÉÿ­™Ù•èì˜\17\2z¿²~aPÕWWW\16uíc…;\25ig¬…\11xúÂã^ƒ„Ž^Á‹óÌn!š%­l°‘\2;r5”B:eg\9¬šPêúÕU„E†r=\7lc\27%9^#«Ï§5±\6V#!+lŽ¶Á¢É\20:Ü*Ún\20ÓÁËQq£ƒDil»¦½¶1}2‡&¸(÷5±ÿ%ObÈ]ÕjÞ´gqWЙµ\6\22x\0269\14ö¨º«VÖ® ËKMMl[Èåªg\7cÏPÆ«ÙípåÆ›q5G¨æd²9z\18Vò¸ÕÐ\11\20\"<\11\29W÷JeÑ\8W!™«O­\ +®¢“VÅ0\28Ê>\\]\31bÉ;\17´pª»ÒEɧÎÞ&™˜–PºqÙ‚’'éÝ\24\6»\6p—\"xºyÝ\26ØA)&»šÈp }ìfeÝ\29Ô\0„zð•H\1\"”2\"ŠÓ\29¬ú‹\"ÊxqJ˜\23¬¼‹=ðº‘^³#1ULúVq`!Шkê\3ãØG\11ø¹\13-U\27ZªìÏÙ·£\12)ΪÁná[\3\13ª!îÕAWî\30PòmÇë({+·Ì…‰ŠKøŠsi9áóM/\27Wò\12Z\24Ò\9Ø!a5d›÷6¡q¡”Ù_›}+FŒUSݬ՜þöe\23RœšO\31Ìnoß=>SŠ“0/2Øã\8ïú¾š³¢æe;&Ì\127n)Ä\12åuÈKr†JJË\3Ã/1ÝÜ`¦é”Ñ{\1£ä‘ž¡¾ F¾¸¥7ç¾u:8‘Ê‹\17\22i>Å\3Òdò).\11ß槰ɲþ\9°:“KI¾udt \26\24ÊÞ±sjQ {spïšôúÚY/ÖɾèQˆb¤ñÇl¹Žë\6Kl“9î\3f·\5¤\0Ÿ\17‰ÊD¼ç\0Þ·2«\6)B½›g×ÊÝ3¹j˜KW*\31\23pgò\0314.w¡\29Ó?”¯‘üU6XƒY¶\"aQëq\15†dM¼à|r\8ëz[u7\20¶#\0m‚wžjd\24\27åü…è]™\21Ÿ3©[\6\26Š^uŒŸ\3`³Íð²AÕ©ý\"`“”ÛT9ÞfÝàC\13Ú-¯’'\23rbßh[‰ç?dÀ\26'Ù/v¯Ž¹.þÄõê’`¶®c;Þ\22GÞ£ZðQ\3¼y4°&ùxŸÇ—?»dÛðËVF\11eâ ÇÃDtøB+Ôb>Õ›³?\28ôðNÖLZiH\19Õƶ\0089Y.£Üf¶…\6ë\28\30î³\13*\1aAÊhs¤;±ˆÛ<_\3¬\14ô-\22AJ\13jóAGF\31§m¾Â\12\11t—Ì7(\25¦WkLÛÁj§)êvsttÊ|?\5¸»ü»›6ûr @\19aĉsºµÐyNySñRÁ`¼÷\17ýéN\7ç\28½¨Û¥ÁàŽ`r7O\4nŠÐÀ+¶\25„ÃW4HÞ+\5}qß\15Òxm{öõÉ2Ÿ\6Ý…~½(s9Úe5µå3bŒ*µ\5\13^*\ +ýkÖ]è£[Ћ«Á6£ï\ +ò–qó\28\0woÖð« ë4\21\\7´¸/µ\5%Ò\2\15§ôpO°<+Ì\3Ê–Ó86\0278XK\30}/ó\22‡»€»?A]\0098Gì\26 \4¨\1Þ®µ6\6‹ö^|_†¨K\11’Å˪Ÿ^\12ÞŽòù+èÑ´‚xÅ¥mÙq£V r\14¼ÒäæT¼Ö`ç§\24íkœ’\20ïMQWG‹˜’½øÂ\11ªS{eÂ!ärÒ`æ¡v¶\22Û¢«ýã8á\30TýŽý0EP\27Þ×k\7sítv´x\31×\7¹\12–À-|&ÿwöŸî»Ó\14l6£³\23-\3ž2ªŸ2jïå@/+4\30*jpøׇö\ +ܘM{È7OVóKE¿\24˜\23;þK}ð‚óEmzU—$d\ +k\23å½ ¡ÛŽ\7\20¿Ë7ú»WG]/v¡R¨¨ë;Õn¶»%V[y\15iG\27üm6°×øÐíƒxôk\25¬Íi\22ûtKÆŽ\27Š¯ä³y§†œACÎàŠçþAsî\31®ËÝ•ÀR\26kÞÇ)‘¹\19ɉßÂæJã\1å\18€Â‚‡\6ˆ¤CCš@“Ýæ¼\16Ý.\29Á\13Q‚æ—\19\31Æ®îüŒ»0ªå‰[ÉT]åúm×\5£ž¨…I•\22†ù\0/ZÒÜrŽ÷\22\27Z\11Éaù\ +n'\26ª„ª¡­\127䜧±ÌÒQ]ôqÔ¥@\1275Ÿ&\29é:õ™\0¼‚Ø]ìR\8‚Åídu8Àñ±ÛCÙ\0047h³±\23\29¦ˆq`\8\28¡Ÿ_\12#\24B\11C\24Í&0„\2\0128›ÎLý›.¤Ó1Ê\127Ø+\5ï{‡M\0267?0øSšÖÍæ0V'mþŒ°Ö\6üÒqïl°ÊËýr$¤\21¬/hL‘[føÀ<й\7“¸\2\0271\22º!ù“°+Œ\14\1+¹¯QˆP”Ñ:‰›u[º¥Ì^sË:ì¹P>õ¿a¢g×BÓ%^¤í\5!L\24¤±›ÏæX‹Z\19~\14p‰ÐåÏ\6ç\13’úpã4&Õ\3p@¸S¾ñì4W\1274Í\6fÏ&”Ó4\2\\\27d»\4tfP§êÒ9\29HZ{¤ìîV¡\5\13¦ÉØVCÑ*Yïp¦¡„8{§\17tÏ2,ë\4SA›ãa\3Þ‚#u²Í!\ +€\6‚¢âÁi„J\25Ü\28¦\17UŸ®Î\14q´Ùð!»“Ÿ=\8å)Œ\28u\13@{Vmè\6˜E\28\9Z$í\24€«b\11Aî\22<÷\7£‰mð|ÚŒ÷¶_+‰\ +QÕ?\28sø<:‚ÍÕ¶Cæ-Ý9’¹…Ýk6³©\3s\0Öò\12ÖЂo°Y'è5¼‘c¦KCý…‹E\18aû#\12P\29ZÎìM\2þ>Î\17Ú\"\25\15fjoùv¢k¤½6§…†;\26ÿ5{ÌŸ\0Qôéz§S€s€‹“¤Õu‚ö\15Œ\11è\25c]=\7#ÏË\6{@ÏŠb•2-$[þl\127TÞ\8?Nú£û\6‰ƒ6Ç|ã³k›ÑFK»z\18\25Ç\11X\22ÑY³%\6ÓHö\7sî±3Ë®;ßg¶Šxx\7x\20&ï\15\127\127¼ÅÏ?L¥ùGÈ`ûû?»ö÷_#Ágœ‹æ£­ª¨8¡¬\13µ¸G@a‘|q^îÙ\25Ç/¶íé\11ª\8àä \13Ûáëxw\8\29§ b—Uò5;ßüáÿ³«ò\5\13\18@M‡//ÿ¯9©bƒß˜åÙN’/”5q¯¨v\27ßÐ\14¦:és{Þ”…å=~\9‘$Dý*ç|8ùðù\18öH\28Ö\3\21\14\21ïƒÒè mî\\P8\4¾º[Àã{c|A1¹®4D\15ìêc\11\19üÀýEÎçÑîl×Yt¡Q’ÓÄ\127æ\0\"\6á\17\18Ñ,X\1Ó•y¶ ¢\1ïj#º\0136r\\§„\4mÈ Çû•öZK\27øž4»”³ë\5β\12\23R£¤P\13Z.jÐ2]Ø\30–\11ê÷\14¾UÉ™\9úÆÚ¯Àdì\4ÒO³´/YTþ›74€ðd¢8y\1 B~+Óg9\16l†¥MˆVùb¦Âîw»D®ðé\ +Øf\4\28PŸÝùu\2)ùz&\14™\17à\5aÆæ\7²\0222\7Îñí0D\14š…\13pÿqî\18At™ùq›­´\16CG¡i¸¡\0096kbc\1TA|\0µ°ß\0067»TÓ\ +´\28“\13¼\127rylV ^oƒ{ðAM§\7³õ{`a|ƒ2|\31íγ]MÅñ›ÑÑ\127Úð­\17\27Ú·¾³‡ÝR\127]׫úõz,Õ\11\17D9н›\22Kó‹K»©\23ˆT;îñP\0034çzƒs8}F}|\7;ÃŒ\19.2á-i\12ÓQŸH¼Âã*.qµ\24¹àîå|ñ÷D܉uÍrJž²\ +ž¿í¹R’õ˜Ø\11¥Ÿ+ðB‰ó=t›û.¬¢\0176sªìox)'H‘\6«\1w5’ДäGpR¶êH_`\12]ίö„f¹t_®[\0264c´¾Ý\24-s¹ÓÙÿ\25&%½ø;XTii!öÊf\5߹怮àÍ´uÔp1dik\17\20LTO58ÉhvŠ6šÅU\30Tf\16˜ð n‘Ô ¡&½s§´èfÍ\12Í\\ë\13©£QÓt2ŒrCw×ÂJF¶’uý4””º#W^ž\27úE\29‰ž\6|Tk‚ ÚL£'JBg§&\\JB?\27Š&]\20³Ùôö–lÿ\20È\18lÖ…\24 \25,49“\23CØV²U7;¨;¨³¯§üpÏ\21Je‡nVöÇö\11õ\1âƒQuª\2ÏécÁëC®\28FÌãâH\8\127\2ì¼#òäNym\6kuÜê+Ŷ\1=>\18¥øüD–ohÑÕe>E\16\11\24N]èPÍBºck\8½¸\12˜ŠˆãÍÑD\27ó…Î\0014ÌÅúÉø\ +‡fY>\\å£\27€´—í@*\11‚\26\13\0269¬+Úù[1ýÍ>\6QÙëb\0…™P\31ªÂº^„¢~ÂÀMNç\23µå\28Ÿ,2B\24K¦ð\24†Ç+¶°ú\0>]\ +ëÈÀ-õTb=\21\22‰Ad`™ ¸qøöW“\7,–ˆ)N\13To±/*ÔÜ}ž(Z7Èf:<\ +ø>ýÕNÏbƒñ°Q£\0îJ\24\ +ð \17÷J±Pb´Çµ\23hÕ·ƒº\7i‚\31Ñ\17D£s½™©l×Ì f\31g¥9sÕ8\7¯—€LRó3máƒ&óP5*-\24K(\23:hÃN°þsTÁ‡\1VZ¿U¸—L˜\2²Ž<íxæ^øCjŒC˜\1xvèòaÀ!Á{z§ÔûLvÄ«Zpå7ÞVwf\ +.\1š£ˆÁ`O«¨Bç?+œWJú‘\8K=x8ÃÌ´ß‚Q¿ùÙß6\30|B—ð1ÿF³â\27p»ôó\11 \2~Žá\19(Lm\24êi\24êiATðKÇÕ=ÜL¡2l\17Q\2Dbw\4ä~ƒy×6;=öÞn\27¬\1ï\1Ø,M‹/›ßÑÂÄÁ±\8p×€b@¤]ü\29F\16’ø÷%àÍ_âê.\0êb£>x §\26Æ”`ëµß—¶gŸ\18Ë\18Þ\20Ñ/\18ƒ\24VðB\\ON®½{“VF5ß6'›q\22V(\12'.'6¾Óڦ܆p¾\0AÀn:U\6—BË\27èµÍ\15Œ­C¹ë\0318FAº¬NOôñïÿ€ëwlrš\4;ÉÍÚl yâ z\3\17¶u„,\5n±t;›B÷òï\28‘}ÏÚïåƒB}ôt“\1ͲÝzGNåHÜ\19dY“í±Ý\15îI{î“-åÀ«¾\ +]zí\21ôRÛ_&\29\8\23\25ß,›7\13\127?w}\127ÜuP­Ž@~?\"fÛcÞõ\3\15Â\29ª\23\1ÚÞûQ›ÃЇ]ÿpŽ\25\5`P\127ïJþ¡¿ðd˯Ia¢“³E½!“=š\7\17>ÍËÞè÷\16\21*üÓ\17î>?\3\25¬Îë‘ø½`èßÓî›öÑ`ìŠk“éô¡òa\19Sî\21¡\13\0072ý®0éu+}ÄM\4ébÕ¯(xî×)A\31ï¶\0¤\2tX\0260u®Ö²ß¯±\0300ËÑ1ÈÑm‰£ëÜp™kh\20!âd+CÚëPæîÐN\30͘›5é*+“ÿv8'ÿ/¸PÈz0sÛ·\2ÐtÝmû¢ß¢!û›ó¸ÖfxS\4µ¼å¨\14vï\8\"­@rÐè/TWº\18\4ç†@ôh@:‰Q¼#{+<¾ß…©\14­âîu\127ç¶*׫ûîÝ\7\25m\\ʽ;|°q‰nuÛ}\29p>¿¨\2Š“T‡Õ\11,\12Z\3Íf×MSU¦‹¦\1\12a}ÁXÙóÖ¤ð®)K³m•ˆXxSx™<«ŸÿðÃÂÐ×P¤­zèöÚm!n\"ÔF\22ºÍ­©á‰Ö\11±éÞ=÷,\9k¿“_UÕ½D·þ®Oïéj„€{žu\25êÙ\26&z(°Sêü…öèÙd]‡x˜npÌôì>Ëî³}Ög2o½\29Ýœ½XÜ®\02384ðœ\"줾@ñ\19;%z›vÝz¬4û\11\"âÊ;Ì$„Ï\1@k­ž\7€a\27\27’æÝd\\’Ü?\8œh/\0285\4U'÷ø\21kƒžzÑ<(ÞÛ\ +rC\29Eí\29E\0257B )\0018tJ˜!éÌz”2øÛ:ö\\/ôo)ÔBÌ*C5Ö•1S\3o=È5\19Dg\20íÜåaˆùÝŽë™B\11\14$à\24pq\"6AÅ­ödÇ0@%LB\26ú\19›Ùcÿùrb–2>öÞ\2±¶£KÝu3ÞÕ=ú…‰<ñ\16«ŠOBµz¥\2\21¡\13\\v\9]\12ôå©jÌ\14\22\5îÕ\28ZòÑ~\20¢w6W\\%‡YRIÒ@\1\\Ÿ\0ìL^‘0rÙÕÇ­â\28ù5¦1SþæT¬•\0266\22{´Ã<\8NÉ-×pÞà\22Li¨œ´—\0lI| „<»\31u\5<ù¬‹¼-\"`gê0Ùt?b– o[©xT\2܉çÙÅšêœ\11ö”Žh:ú¯U\13/?t‡÷–\14gBA;»I\27\0xÔz(\13·¯ØçâÝçÍÊê‘ÚãBÕ›wh\27XìVM†X\31c\15 ˆ™Ü6\\fÓ\20Þ7E$ÝH¶½ê…&ô®#õØ‘\19é\19ýÔ­³\0020\27\28\22{®¸{Æ®‡N¬€Ì\17];{÷¾d½|ý1£\5´\7±¢?n>\25É';+¦\19\11Ëá9\19\2ºòœ\15'ÿ§‚ÃÓãûDÊf\0052\4(綵Åoò{ý\9éeý¡Âã\18¾J\25\8\28è›ì:O&óÓÌ-\ +áËlx?ÚÐúøb•+å\27\5J\2ð\18ÿý\127l»ýßÿÛýý¿éCÎ~•\3Rþ÷ÿ/Ý?†Ýðø³\19\6ŽP¯!\27ï㈮f¹—Iî œ÷q„ZÍÿ„‹úˆ\7R‚$º\17ä×?‚*‰±»”w›Ðå•?ý@¾UŽÏ\18\19ecÐ\7€Ð©ŠZ]\20Š­\30Ç=\5 Eéa\21Ö\15æ.Ñ°ºØ×q\0271¶6÷ðK´‚Þ»‡\26;=F•2Í»\17më‡Çü±{èÜ»áÞ\0143\0ú’@ÅUSUÄlÃexšUÆ+\25ɲ\9u=6Mc\15‹sÉ7:…ºa¾ëÎö0gèc{vT„\6ôaIªÇŒ\21fuö¼\22‚Ø\20\3RºÍþ®ÛŸ\20á9¼¾!oÿ®óöo¾net*;P¢N\26Ü\3\\\28x;|Ì%ê>¤+ê¡K¨f­ÍÜ?–›E0\30\0228Âu&šé׎\7Væ\25Â\5µ\8\15¡Ï©\25dGÅ\127íS\15›“\127P\18Aršn·\15uåˆYÊÇm›˜B\19Æû¸¡È„H^¾\31\0168\31´ÎÖ\19\27,À°ÿô _0\8È_u[\26˜¦¶Üv\12XŽ\14%ÎBÛ\2Ô\17}-s\3¢¶E™Õ\8ô\21ãj1\0Ê:\25°\9>L…~¤rw¹\27qø\17æ‹Ô\11I½£BÈgÁ•GRwbPA\127§2ì\30(\1”K+\1\22J\2Ö\0\30»Ç¨üÝË\19\30òGwM½Mï\30¯q7<ÿìžÇãiÜ=\7\4ƒž\28{s\0ò?Ó‡âŽÃÇQ.ï/OìÑ>X·y¢Ïìi\21¢Ox€ÖÝôÔ•ý\9óÍ¡\24^\12.\\ü•\16ƒPò2:Úü3\5(þ\0ÿëቴ²\28\6úy\12\5GÏPÓOh#ç Q5\2{0P\9¡€\127ÿ<:9»×Óš÷UþK­ûØ=Q®ÿÜ«lkN{B\16P\29‘§\29žXR\27ŸZÃOtE\18z\26õ‰ÙÌCOxZ–\0áe›\6pw~Îs¨Å{jí+\7–¼\0 l\ +ÒÉ Ò¸Ý\18Ho˜Íù\25Ûâó·È\9\11]ÓÓܲÏ\25\30)åòWnN=\16Ì\4Q¤®\29O(WÏy=R×\21´Ká«k®“‘´¡\28ñ¹)G$–\"m±ÍY’ãèÍY¸,Ÿ.Tß6\20\12è[à\26À\25ÊVÛ‚ÌÙæïÓöçi˜Çã\6#\127vú‰6l3É{Ñs¶ÑO\14pLk©ó\26µ\29„†â2س¹çìë\12Ñþ¿Ù©\15ŒãY\3ª\ +ï•>´Ü=­Þæ‰Î\ +\29ûÚXŸ¼\ +\31\13*.‡´A6à’õD\26K©‰X¹£\ +Ä[¥\0284³BØ@\1aý|ûC0¯ð\2®ì2XùóÓê埰æ>÷Wœ›ºv\127×?õ£Oë'\"ýO\15\7Gô\19‚òsÐFô´ð\13®¾#¨ãõ‰\13KÜ3­~bC­ž%\22V²nC}OM*ñ´\ +ÜT\30?Ukq}CXø\0192ù3(á\ +`îƒ\31>­\27úi¾§…\13Héž4\9wxêosÍ6{7O°Õ'[ﳫì\7\31Ðåÿçxžäè>ú\3›žö¹\31­p\28þjñý\8\127øQ%Or‘MþÑ\24þhü~ò\23W—Ý\11ûB/îŽr„i½‚Q÷µ\127\ +±~ï¦÷ŸÝÛú³\5Æó ÷ÂL|‡µš·ùMßÇu8áž0L(ˆRÌwèž~ƒ\2W2kky{t§·°×÷§~ʤv½gè\19oÐÖ“=ˆ`\9\26¯z[¥¯ƒ•õ=ÃFôž/à•ovíL\24¹­:ô\29v®å\27Î{ÀT)ʶ@\5'\20å½°¾Ùo'Ç\9§x£¬‡(ãLo¯…·\5ôßl”\7¾\9\26õF\28ÿK ‚8ìÞqÔ¼Ù\19\1HÀ‹Ë;vÍw nïY8œ]-Áé­ýý­íìMêAi—\25šÙ{1\23dxŸ\0®fï\16B\31ßËYÿmóã\13NAúow,(…Z³\\\0072Yÿ ±¸÷¦]\19HQ+êh\1춂¶ÁùFo¦Ü¼\1Ö®9Ûý¶Û\0257\13“\28mŸoõq:iž¼±kým\16Ý÷\23Yí¥gËswÆ{çû\6EyŠ³/Ú—&ÐK*(fÔýUßúÖ0§\127^g\127¿¤²\20\12À«>M#Ó«¾nŽtD¤û‘H”à“¢å¹\29–ß_©\28Q\26^ÌD\0ƒ‡\22DúdZ:c»¯šñ¯¯š¨íssÜ\22R¦íkE¼övd”ñn#µïiH?•­Á<ú·\19÷?\127ï´ù5Š“!i½&q\5B¦•”‹*Ÿ‰ò`\21G æ²QÉe·ˆÿÚƒ>½ö&CÀ“»¯©÷ŽØ#׶\19úó÷ŠùG“ °\5‘óKñ¿êÖ‚ËW\29\ +òW\29LæKj£\2ú°­ÆVÅ9Šº\11‹Zó%ü^ˆ+6’\0}¶\3®#æ˜Áî¾4ð\5ê‹-“-÷¥œ\28¿’¥ýµô\17t›V,–|¾ \15\5PÓ6`eYM%ov\23Æø¯¶Gêùò`|µ!¡|M×Æ&\17¦üô\11Éák\ +Áák\ +1SÐ*Ø—z“\ +í}_­v®™ãK<=Ðo¿Ž\22¡F@SœŒé\6Ýøz‡>ðuÊ3_?\127Á(\28¼\7\30õ\9Ïð·\20‚âç¿ØÿÜ<\127lY\12\127;Ÿ.l3ÖsÚìÛrM«IRo{Š#í)Ž´ù\23\25´\29É:fwÛuŒ8\0\9¿Í:gÛAOÚ.ô£¶è\14‚ÀÀ6ØE‹jZÖö§ªÐöeƒ\2\2t¢Ì\11%mÙƒ°\26€ïˆdJ)Ø\5<ë%æÝÙ\25\5z‘£\29ÔŠp‹å6\6.r¼™Ç¶'SmÅc)X¼\21£X]igwÆ\26³\0gVcéLʱ’\24òkÛþ\127Ûv&Š¼ì/>\3œÌA\0ˆß­†Žr—ª£çD{]Þ\18èÕš‡·Ùúi;E®•+¦Éb\29Y䵯ÉçÔÍ\0137Ã!Ü7³\\`¸‹™dÅz›¹ŠrY#VpLÅrW®›Ëú\29A)—Í\9]d\5\27S½ªíû„‹£ï[ÔÝKC­ùj›\29\30øƒÅ\20Àâá=’]âVRT„y¾\27\18zœ]rœ]’‘é·*ý]\7Uú®\25‹o\0085 \8½œžlß&Åßæ‡2\7LcõwZF\19ôïd¶ø~ô=Å$\17›º5Õòk\1©Ã¶c‰>úÞay2{ƒ>e€¼oš‚\13ËKŒUW?¯˜¡{\0[ÃÞB¼à!mÀ¶\2Œíê\12\4éNÆ#83%\4—H_,e\ +§œ–tú°tÕÕÒ‘«ˆº\1[ù\5ÜY\26Qü#r\8òЙov\"¨2Û´\24\0249»¶BØïÚq²çru›ÝµAź6V\20»ï”5Ì~YòΠ«zàÏßqTŸv½—úºSÜîàN”Ñ¿«ÕPº\23\29Üu\5†wc:[<Šd\0190…V/­˜)ÚM9=ÞM¢\13€\16̺©/¾í;Û·œÃWBL\1Òvç¨)d‹ÌÎ>šÎf,¬YPÏ\5z¬\25ÔÁ±º½xBõ;\13,ùîŒrF×ïAÚv\31\26¥\11µ;Ò½\11X !wï ¡Ý;¦i÷\14é³ûù‹êßýü)™–pq–]ˆÚŸè-ÈDïÏ\21…¾.¬ÃÉ=D´y¾¥Ê–\"8o_G\27û\19\15ú:Û\"ÅáŒ<ëú\19\11zø-\28¢O7”7V\20\127®gõÉŠv/dÙ\0×'æ\19Q´¹¶\20¤8¯3®\21Xeõjí¶T*PyNË’»ì\20\"=°pæÃ~·s\12ï\18UºO•õ0ð猽¢E\24®\1Ñ¥T'׬ŠªH¸†ÆÈwÏ\0&<Š{œ­:Ò/$\22¥¦o϶JüÉ\1±p×·×\23拵~9([L9-Ȳ:`u\ +ËÔ};F EÖ¾\13$\17\\ùžÎ\24ö\28 }{ÖLÐ{\12½(ßZíÎïPK5ãû©`\21Bb^t÷tvó\20\ +a?¡nÈÅ\28VàdÇb¿ÐPús\22È\27\29HNwÎ\20\02415ØY÷ëwd\26ÙÙ\30è¹çsÄ\8A¤?§A\127®ÁÊ\29MÛï\27‹\6ýûì×w“Ùilxß^\6gUÞ^\29BX-í½ZTT¬ÕHû>{é}VN’{sZJÓêÞ«t§µ@YÎz*ÈKPai\127-ïe¶å(\"ä\29ëó½$$¦—€\12ƒÐÊ\18•\25D•†s>\14õ‹e¡þ*1­º*¬‹°N*5’²l\22å‡SË\0¶\1Íž\7\4¡[\31\30g„ñŒ°Ô'<ý—ð_nëw\5/”×zFa\1h¨ËÕ%z1D®íö\5ÜŠ\13ŒP,aÒ¬†\13^¤nÃ’Ÿ°‚™8„v!v3wm!ßÔ¡M*Ü\4m\16ûèQÃ\6V)]s\22\15)[œ#Cà\20\30èA³µ¬]v¶©\23\28\18º·blNÒ–Ý7àì«6D\25Áè\18 9µ£W¦\7„5äd¥‹\21¡adš\12“%½aò2åp®,\15S`Ë0Ö½\4WÖ=°-ES:ÅÙ×ÒCU×Â>køXV\0@‰\12#ïÍ€I:Äæ\5ÀIöØô\28ö¾€â\12ûRa\22\11“ax{«exŸ\8cQ>,±†&\11Ôm\16\14F\127ÿüé¡<£Pp¬\31ðç±\14r0ÖHäc}ëÂe±s¬YÚ\25O\20\25ëÍQÌ\15ä(\13OJo8\6dF\0003‡7&rr?‹ˆÿª\0VÈÔÕÞf\29c\15\12ps&7ÏE\5Ï‘ùuu¨\0263¶æ4c¬\31è\21€\24\31)ɱr È¡=shoѱ\13Z3¶\27ê¢b±È7¶¦dã\20zƒ U›Ü¹ã\20ø#§—Íå^ìZ#ÎzzšAŒ;˜4¾YÛ\24ßîéwÐÎñ}\22|R¤ñç¿\11ËVaa\27\127þ6\16Ïñw\28¥sŸË\0bA;\30\19]75Áj\14jR\127C'ŸÎ\13s \3žÏ\21õvzFÑÓS*ÍCð¥a™:\22ͦ.VË€d}nvO½)ïÔÇŠâÔ£0N½w\\\0éô<Ƚ/E\24§á‚‘[@Ÿ†ò1\25æ‰ý`A©õá¬:\13\5{øÓ¹~9±²0Qª´§Þ\0\03021ñ¦¹`ô§9‡¥\0Èhž+$\7œ­™]s\127n¯Éj¾0Í•\19£˜°–T\7p\20`\318Kcù^³úåX¡ÊMKL5¹‡3Õ\0180²,Á£ieÖOÒËÉoE®•¹ÚŒê˜bL›Dx¢oÁ÷€é\23\18{Ë@NœîÅ\3²$\19Q\27Fv'íö¥@\13œ\14¯¿NG±c–»jÁîó\\\7^Ì'\23™ëž\5\31yp¹¹\14µt®QA÷ÓÃ\17‚Í\18Õ'@tÁœ¬¡ÏœÒø¶÷\24Þ×\25ÓBøÌ\22c\5T\127\25Æl˜Óš\"±©èü%4˜Û@qA2`7\0ÀÄŸû4\22,¶Í=›pEXbe¶\6€eòvÝ\9AzEµíâħD³ŠÐ\30ÿƒ­ÓEe×s–¬idIXÎ%¼—Êkcòð¸¯_õÝí]=\9Ö \4<Ük{¶ÒJ*Uiƒ„®§$½ž[ók;Ž&Âòˆ‚XÓsN\22£Ö\19Í×ïhË÷þÛ\2ÔÛµ«+ÐHÐk\27‚\8ÝkÇv3°u\31w*Ä}Ïi\6çÖM/\24ÚªÙ½mwÖ|Önoš\"býï\20‡¬åàÂÞåÓ;\22k‡*†¥ç˜Â¥-\31H\2²8n?m›[$ÝÓ•ë÷\"zJÜ¥€»¬èG\6šì@õšs\24¦û\11 \20\ +à\21V,÷> Ï&bÙ`y+«ZëÈzŸLdƒUÂè\22ÍäÇvò:Õ4oò¡¦u\26\6V×WIBTí\20}V‰\30l0¬çä^oðøULÛM½<·Šº\11æ%öP¼$»Jöu+·º€\8\11z'’\5Ú\19zà6oßß~mkØ\6ÎT\8Šœ:¶B–[·É´TÐË\23B\8vÖ]Ú~³Çî•»u“\14R°Tµîã9z{\14Å^wïô¯\7å\28ªZ\0290p\12‹Cë\5Ú¿JÚASXß\13ðÎ\26wÎ;t°õ}ÎŒ÷‰²ïª–âÁû\20ì™\0048Qó?ÔÃ'yd.—\0™¸\29¶¿&|‚A.dé#ƒŸ¿K\22!ï\20î·F\9\21‹ù´dT›µÚíÔ­¶ºCç\23ð29N\13\30– +ÛIW\12W`Ÿ3L[=pÆf«c\15BÁsá|b\22n'ŸßjD¦Í\27»[B€ÓèÝiý–Î\ +œ\\V¡“}ÇÓw\13X­m8ÑV\13IŒŒ¶yÁkk­Ü\11Hc\6\6½Ý¼ý'ó•9ÔGù\4ú\2ÐC±6XkcèñÙN’²ÁZ{Cê>IƧ¥S¬ÑlSlâáÞ\12;Ä.9£Ÿ¦\24l%‹Áܦ³GNM\28”DáÙ$Ÿ¿PͶ)c“Zn‘¤ä\20K¬‘)\15ñ&'VOš+ý™»\7q\ +By\30Î)h݆˜·I´ú\4$Ž½\ +°B´ýo3[6\0091ìA\17CbÊ\20‘Ø!·Ï\25ÛçE6dÊmýÆ<\15-m»5¡íä»Û¹Œ¶ËhšfQ•ÃÊ–ÀR.EM½\14Q%!òö\14Ùn;\23Ö¶·OO\9D\6ïå¶Þ1Y€Œà¯zºýü=îa¬zËr÷ñOYŠÃY\9ï»ðùs\"ñÏŸ\25¹kû=\17µýžˆÚ]·ý©—½|¤E5Ü3Ä€ÝBåž¡ïì1ôâ–P\21\\\4=ó'¦UúÝ\11Ü»ÏPï¯XçØ_Іý‚W‹^º_:gf¾¿—0ž½Bòܽ\15°Ge÷^3Ä`D\15gíÅ;Œ\28\29…¸á±96‹\\{ï£3ûy^{?Ïiï§b,8„;Ä\3Á1 l`fº_úÝA^E”ëí*ýü¡E\8£û°‹©íìW‹\\&¯ÇîqÐGr×h#ºÈ~\31]µsCr\31#ň&½gãä 5‚r÷þHÁ\23\2.3mBsÞgvºvq˜ŒBÝ墿töR\"³‚sî‰Å\29´ø°*\8¸¹_\23ob*áµu\28÷ób´ÝÏ\19Èû…©·/\23\".^eÜσ<û²¥‚ÕQ%Ù\2E–\29-o;Ü->÷¿‡\8\14 \31³è=ó1V9¨SÅtÚW\31\1“Ò\8\15–Ÿõ¶-HÚ)¶@?²D¶‹|‚€[Q¿œ×Æ)‚]\21cêÉ;A«÷­ä°ìäÖ\1YÇáï\2ëÙÀbã €B&N´+‰÷hÓáœ\23”„ý<æ£d뙓ë˜qpˆ\5•ãÜÉ9Îœ£FË?ÎÅᣎ]í£î½ñtœ\ +âÁ¹öÂ06(\15´wzYŒÓ\4Ú\12Tm9RìV\30)´Q‘\13£\14n6ž\20g†ŽÔf¯\19®\6^ÅT4¶þØ}ÌQ:”¼\23ÙÞ°õHrŽÂÙŸ\17§5ü\11z\2ËÛõèÙA:N)ÖÐÙ\4\0217tºPÊEq$s\25®¹ã-Œ§#®}\0{oPvå¾¹šf\29íÙ«mÃfÍq\30iÆ»\16͆\24.w\23ÑUjfìÑƱ´ã<Ûx´ã\11qû`?ß)Ö\8Živ°ÒÈl=Nmð8wlŽÖ7C\14\22Ù2ÃØ»8¦ÒÞ±ûv¼©Ë¹§p@‚ë\19:\15tœ\0¬\6\9.\8Rÿz¹ö_K-ÿ¶dú\12780úVЛ־ÇP‘ÞçŠÛ{Œ!—\127\23þ¡Ž)œã\22ï‘©öžc-ö=‡²).áÕ\ +àéfڽϕ±÷ÒùîÆ{½Á ·†Gº¶&ö{»›\9¼-\ +¿·Ðß û\127\20ñ?ߊóó_\27\11ûsõ¯mÃr®‰ý`¼¼\12)\24´;N³È¤Ù?\127»É®¸¥#ø›T(Ù\4\\Ë€‡áy‹E\2]Z¹¤!Kþíœ\22ä}8\9ÿxÆ\0207om¹¯ƒa¶FÊ[\20ºlwh¦=\"\9ý)3w´#wMÀïâס9B–ˆ\127\0305—å꺼\19›£§Wl3Ê’\17w¬\3.\1}¬Gðfúƒ‡EšŸ?Tâ»éOù´ù²É’€ôÞ\30³gô\4â4\6{Z˜'þÈ\18\8Ä) «‚âòQ¼\16\17\13F¹­.àt­U\31.g“y¾Ê‰ÈŶ\24æïèþÙ¶_‰I¹\28\31õ¥n>òZL¨ÎêçöQC¨åÙ­²vÊ¡®Yñ«Y.“·ø^-Võ<Æõ®èU=ªÙ\ +™IºH\12­¹L#~‰ES·F`ÎêzÅÃg\30\20YÙ–úñQ?*ìûCýÂ…51¤š+\23©\5ö(wµ¯^ü\3\\Q¬rY”¦x(§G+)F&\0028€šæv%—\0119ÊG㥜û©\19è«”Ó\29¿){ØTŲÇä\28&®ªáZš¢æ>C\"'‚Xõ­\31ו\\÷\"QÎ.6\7xÒš]T\31Çæ ÷?6\0316)ù-©ýV\127~ÔŸu¡èŸ4èó+I¿©?»L}ò9}”u#gÑÔʶ)Ô±\0310)\17ýº¹\19rm$g×M*Düì¢ÄÆ•hŠÔÓ)\13‡\5ë¬\1y‰1Ùœ\29\"IO‰—>Uö\\í)rº8\1\13\19\16>\0260fÍSaÂ\15!DþpÍu\0022n.ßø\127ï\31׺é±\14ÔXƒÝL\21CÐLT´šæYÃÕL*¡’Y\19\9yGA\18t¨\5*Gr‹\23*Údȯ€¶¡$É3J\12S¯\27hÖ°Ù\"ühXq`ºÖl&ôNµ¢äâö\26²raRʽÔR=Ô„¦«z\31\12TãâžÜ[~\23Ñó¼N´@tìVWIòi8mT{=^þ½\18$´ç:q:¾\22’ª†>´]g4€³ÑT*AˆÊúYchàó'¾Ï$BZK¦ý\2 úó)õäƒË–-1žm¦–?‹g¾\19…\8m}Á,ÜiÏV[>‹v$‹¶¢\15óg+m¢.žßªÌÓ:•*þD\6WvT6\127.mG}ˆ5U‹£\9Þˆ1-æ‡õsázÄE¢WæÑs+\\Ñ|óõ’úYr@XÞ\23q åý¦\28\22\19\13\ +%Ìë\23\24ûªÁèWM\21_õ¨¡ç°.‹\29¸Am\5®ÿàm­§~¡}×ÕK¤˜¶¾Ò D”ÔÍdx‰„3a^È*\29P%1í¸9\1:\11ìö\22' ÑÆH¾~þ/پ苗\ +]íR!¯\23®\0DáU¾ =^\127W8¸÷ò4{]\24kØà@,‰…8\11”ˆú5LôÜk䶎Z¬.á\0ïT }ñ’HVg¯‰Ê#•Õ¯jq\5.\12šL:*{¹Ö—ÅYJÎ\0254¾¯5WG}ød9]/\127&‰œ®‡\16¼up¹~ï\14íj'êvb™L½ÖØ™ÆƦ²Áõpð[\24ôÚiêžÕr¼ÄƒŽú„ŒÃEÔµøRÁ_\8¼2•®½Ô\26í6Ó,Á„ž´y«æ´^Î\3¨\7Ú{G\\£RæJ^Œ©­2šH?ÞÄЕ\0253»e;¥fi¿ÃÑ]Ul\27ák9\25\20.`u\5\ +&mËQ†Z$¨þ®¥1ו$‰ÚÒRýV\17·ü›!Ï¿Uûò»\23ºw4Ȭ\3AL\\¬\19Ó“7¤¨Ëàu8TÇ®Î%Ù\16MÝÕ•õò°\31\27Šu—×л®ÁP‰÷.iV_»¤*g§µä;5£K=X!\16+C²I8o¹ñV¢œé™üiiÇ\13‰\15–¯ ]Ý\23\11\12¹êKõ;\16Q&ÙrOR ]/ªNíïTEûEÿwª\6è\ +M‡\26wPù®“jCœv°_Áu¥3¸0b]—I§Â‹™‹ç\18ñ\15Z\12\25èDJ\14\"ö®PÙïî9Êôp\23TaêF¦š\19ÿà5)¸ê˜\8Ýe\2ý:IºâÚÝR³Â ˆKzáL/×3[\\él™þ\9@§\\\22WIôßíõa3›Hp>E€ù_çÓgê*M惑\0057º+wWðuÃJM\14\13ñ:Ùd||Ä…\5ÃÍý°rbD`«\16§‰äÂ7\4\22@\27€,·Ú«dœ ª{÷áV\15\1Æ5¢Í\1N—+¹¹Á>§ÂMö4$JÙ¾>*™íËž-\3¸ýr¸îê*•\27t£Û2Ó\ +Ž-qÞ²mÇÿ¶UÒõênß®\12\25eeœZW~’Å\25\12%»Õæ\14}U7ʲ·¼××wȵØÚkÀõb£\22'ÒZOƒûÌí\19àp\1P\2)¢!ãQø¬ø?a!´\30‹×\18±\"Ñx¸¨\9³˜\23Ç„~d!Ÿá\2u\21sžËÙC÷{V*k4ß3É\26q¶ˆsD…$\12\\k.=Èà˜s.¸;Ù®Ìz‹›qÀ²îó‡\27ÿ˜ÜìÇîÃ\0056\ +Ê`>}%)‰L\26ª‹ïŸBsµ×­Ë^HœýõElŽ—xP[0áÚCNî}Kc8?\\G£L¤\12=V-Œ»¿\127»×3\19éþ»°”Š“ü.È,}þ\13Çì¯ß—Ý~\16HÅôÃ\5Ê c…²î™Ù\2qî\22\24ÓƒÅÁ\25·×Hü†ƒƒÓÿ‚™jx»¶ýI \21E\24‡¯gº>è[ãJߢ\6ôœ9%rk­¦\23õãd\17áSÆÀö¢:c@èˆ\"@9\5¤ŒF_\"»=‚\25¥aÙs\"™\29\14\23ºF\4Â\21_ÑS\6vdy;Åòï°Þ‘ñ‡\21)d`KŠ\12¯«‡ƒ­K)—­Ôï\15´Ì¼6\"rÄKe ã\12V\20%t”c\29קxˆ\5Ô\29…”Ï\15\24£Ðë&\0*Œfˆ¢Ž\8Â2·›†‚—$êr´x9\6ÉâÜ'+\0c\5ë\26ë\29Av¼×Ç?äø\16ãQ²†ÂòF˜¥V\15øûÝ—îÃÛ–#¾§†?ޥœ!\\LqÄYÈa%Jè\18c£™Ád\27›ju'{ÇØ\"ÆÕ#;\22é¥jKp·t5¦›\23P\0208\8yx‡\1Sm¹â£È\5cÊ`M<ƒ\2;\244£fäáÜÊ­EOQ°,é\127^®~òã.d~Ðä'ÓjŒ{n4û…¢ÀþKcP˜yŽ‚Qîí•Xö\"¨…¡¯öpøô$s+b\ +ãØ\0270ò,_–5íÞ\12Ù\11¡,är®poð[YnÛÝ8&ëV8ŠšDVRÀ@\9Òl,,\16²\25ñ•\27˜:Æ©ÌzlëŠAGyÏÇvýf8\\ǼC–’«km~Û\21\20À\28€j^;V¢F\4ç@@de\3¡õðÈ\14|\30%Ì‹‚1@ÆM$4†\27§3\30Â;8ôXŽÉ\29XŒFŸÂã)кŸGOÆÑ÷á”`œN\23\5±ãÕ\6tä‘·ÜI#\"vÍöØæ’Vã)¤jwrúë:RÍl„ÑÝÇÉíá„0ÂÇ8\9ëªqBø+G´ç+g€‰Q¸×ƒÅK[\25‹¤\22°t9®>ü’c\25Á¡µö¨ZY\0ŒŽ”8ŸfV>Z] \"²›²±ðbh´`\5Z…­\30’ÕoÌ\8Š¾¸§Ö³£WóLÅ\30\22ÚGSgÒ®ó´l–YÆ\11´Hóqå\13(²ãâ¹,m»ý¹x®¶Ã^9ä\8\31dŒ\4ké¬J\127¸ij9±ß.ÅÝ\13\19¦W„x\22ÇyVÉ\13\8¦<ŠåŠÐ³ ˜b47‹†ã¦îëíF”Srä\12ŶT1òÞ\0008(-\8É\3÷ûeèæäÛîÍÞË7Øì\7ßž¥ÄÊ\\Y‚™3\19ðyåö^=ßÖ;‹·381ç¾Ùy«\17ÝÊ™¡PË\8¨<ßæ›4AÖÊfS¥\11å-õý¡Â—±–Ú+‹Ä`äŸ\5!áZ{½VŽ\17ô\6®\1\14‡Ù¯¨W'÷•^rÛ….Ke%láš#’ûø\18:/ÌóåAéÚGdXmI_ê81\\›ã´öû¶WoŸ‰T¾•Œk±¹Û|ÛTKI\2.-•\23Íy\13Œ~^šóæ´lY€ N,È]êë’ \0312Ý\"§L¼&õAPcói“¼RÆÀ\\½¦#´Jû+FK•¬räB3—[¥\1¹~IÃ\4G×\0ב\8¹@‰FgÈÃg«³œlÎÄI^\30çÙ åY‡‡sNð&\\\12,7Rèu¿Ì!Èi횣 VË\22–mT\7NÕhdÓÁH¤ÃïºÜÔvM\31õî“8/vh–âU¶x½41\22î€\16B·‹ïm߬ñ€®’à,4\ +Cé©öQßIÛ6î=ˆÊ’™,¡\14\ +‘y\8ÌNP¬åü€€ð¸v\ +Þ5R»a‹‹ØâNT(<»ŸHõKÑ}Õt_×n\1\"f\31qÐ^hDÙM\26«¥+-\18,·æ\4+Þ‚!¢Yörj‘‹dÚùƒCQ1\28¢¢qCª¨¤´3§Î‡–õëeV9\26ÃÏåUa\28Ûb¼ŸÒ\4 »$Æ°FA\11<¶œ\18Át›F“9¯p-ñ\2—\6\29*ù‘•Åè8•Ì¥U“:\5Ñ\3Õ\4S•ÃsÔ|p‘\12MÉ÷™\"‚lh†yjËA\12a\2ÄgõU@U=¦\15y®¼ý\0\12™`\9òâë¹\14\23ƒ\31\29Ü¡YpÀù±øí9`Õy*åëlïÒýµJCk\29ÊšÔâów†±b¶x!b‘¨Á»ÉŸ='ü\11Œ5tÑ–\24K[Xc_¶Ñæä0u“‚\\ÜfÑnQÙ®ëæç.\5M\14‰´»”·Ó½A›\11\12L&Ñ‹½t\23ïF²}{\4 èðæÌR\30Ö[—ÃW\2z¡\28‰qQ\27ÅŸ|~Ú»\24ÃbIßg§í+îæ\25-É\12Ñ\\\0196¯/ä¶Ö\28³“™¯5LgåDP-†\ +\21]\31¬¹_V\22œ]êšà\3W+ó2_65ÀkÎÈ­©ðz•÷\ +JÉ=\16\3ä\31ÖÖÖ\4\31S\24\19Jk„m‘\0\9me\21~-¾X7Z™Ù×µm²‘·¶\16\26™ô¯ü\14Jý†0­ß¼­ ”^Êᥠ@Ç­\12¿S\8“\\»kk\23²~aÉŠóýÿ\4`\29\11\8'Wú3\27Pý²²¨\5¦Õöcì\ +D1Ê0‘ù}¿Q\24\7,䲶V{iÃ+¥ôd¼Íàç\18]ó¾uî,«­T㺲™£ÚL\14ž=\ +Ü\7Pá3•^űeÞ9¢\8+B\18\0Ï5–\1\12·ðv^1f¾/`ÀH!‡V,„ã±\28\1É¿\\ÛçâTÞ=YWN\6\17JM×ëúí8\23çÑÅ\13F¼³ÑiÜ•«Å§µZ'•bÏ\17'*µ¹QA\21U×mñ ¯—¨Ä;J³@&ß\2\28^/›ûÙkà%\0254\30«­ÞH\27,\127\13ysõí5Ö·WV|ÐÕ.0Žú.\18ÿd86È\18–P5)Uîî-$\31\22à™\2÷m§Ôóì%ÁÜK +\ +ÝÝâýî\9t9Œoºû=VT£âD\7e¨\1y½1Ù6öQ¥5{\23[Ò²j¹aFr‹û[¼úG¯oµßK«\127—U6ž\22\"éê¸Rãëë\6ƒÚ\ +\4šzûü*ðB\28•©‰ºU©iì §6h!»¨òÚ²Ä\"ÿ\22ò\3ÝTã*:¶äòmy•ŒDaŸ\ +äVªáA\27è!u¸y\16'èÚ\0»Ëš\28cq\12\22¡X´C\11º\9Z á°Pï¨HpÊ”)»•^\21Ê7\9\2Y½±ï_Ÿ”x»Aˆ•)õçI\25êÚrWQ@Ú¢¹E•M$‘šoÙ÷$~ºõ>°õw¦¡²\29\24jÎñ©|N!ÛdŸ`» ã_7&\30;Ë3|ä²y\11u›\17²m\2;/\27Ó\127‹\29£-3²\07DøŸvÑ©Š\2Õ¸\27µåøù»ž\16\9\17N³e>ù åŒ-\"t´Ž~’\5š´™¤\8^}éHÍgÅ—ç\11X¢Ý\2ß\1ðËm{òF!ÙˆK1À>i-P´«Gλ9Û†b³mÕÄvßÆ\25gš¾¹¯7Ÿ‹\16´TºllS-ËÎ9­œÁ=\0078\28‹¹º]·ŒçÔ;4v/Ð+¯\27’ÿÆ\2ÿvÄý1ubö/±D\0199ì=CMÀ·;\22!ró)+û÷\20¶  Ùfe¡\18¨\29÷\15u,¼\29\0ÈÝJq7¯níÈ”7fî~G\5’O!ª±?ê›öL³c/8ë¤\24¬RÕ¼ºDÄ’m¬½bXÙ™‘é\7a”Älj\23š‚`{\17o§ò\8-twmýÝ/\\ˆaþžãB\5¹²\18°³±\23-Fg\28HŽ\29G9.‚VÒå‹@µ‡\14\24/iÕ;',ës}z/z²º\15®cÉÒç>\20\30¤}¨Ð…ëXÁæd/Y-æ³{èr»7X÷Å*ôîMÍ}áV³\281ÞÉý&_s»ÝlÉÏ|áuó&â΋?ÌÙ}Ý|@ã·•ñ\12\27âãî\11C$ñt1l\29åå+\18Ï\14aÿ+DÙE\6é`+\ +{åÅ\9_\21Q§ eÉ\0‰\20Á믻„'Í ýÿ\8\15 ¹G\1G(\14Ú*÷%±Æ;Ž‡ÔG6—\4é\13\0ŠŽ¢ÞÙ;|\5¾¾û±!‚Y\4=·’sÙVœÔMôU*à¥>ØÐ=8î$íƒQºJ2k®\20ž,^Iº‡²\29§Zv˜Ü\29VÇ\0É1½ywhÚævºð\20‡28êʪ\1—\127[g°Ú<(ü\9–\28¤ms«÷Å\1\13¿\28,1\28í\29-år˜ž\31\5ÚÍU¾\11•o\25XÞh­QüÊÃC¥8ÁÝå„)+7ÚÏã\28€Ø‘PW*¿ÃËó¼<¸R\4‚Ê\1ï<¦lÁ„!\30,\12\30¾N«¬\25ìÃøt\20–\13\21¡ëà¶×‹N\\LjÁ2ʱÎéþʱx7DIö\8t»©N\\hRƒv|\14Òfì'Kx=\16\14ʃÍ\18uýå_•ù/7Îêâ_åõo,³]þU’÷Uaïª6\9xsͼ~ß9\12Z³\13#ó?*ö?ÿÉÔ¾\11´ˆ,?.\15)†:Ãx¼\11™ùC$ïQ\23Ÿ½Ìkƒ\15„äQW„šGQ7øYÞ&H\26üçâ\"\ +äZÙI½{ðÄýGþ\16ÕU>\29±ºz¬\8El}p\4d\13§fº²ê¼i)\127.†;÷Œ»\24xdd*\12ØðíîÌNB)äÖµö,;4)[&\0[´*¼s\6L,Ê_HÁ‰\7ƒupDÑ{\3V\7mÙœ¦ÚJ€˜âû\15\0156ôi†o5=â\9“\7{¬˜–}\3Ž\1SsÂÓ?µ'<\2¶ö/ûÉn¿ml·t¹\7»¸ŠÍöýƒ\7¸©‚¯\29=¼Û\17€ªŽœ]ôSÝ}\0ªÇÛm³Ó!ä?X»ÎœØ\23-rˆdºˆÌ\29³´eªÈwQ'\0ŠÕÝ·\20î\17Þ9—¹Æh­ÉNô\0262XÉÀ§@\30HÃî(„sâJ¾Î\3¬ExO\0016û–ÎýjWµÞÈ\21Yø\ + d‹Ù9{Z>jº«ø‚/¼졖ËSÅåBÍåa—f¶B@¡ü\1~ʤ‡®lòàx\17+}ÙGY?¼\15øx\20\8øu\14g\8\30¼w›\25²˜Lb!ÍýaÙ¨“KZ\\|Z\"{F²\18è\\7ve\13è7…Ò\24œS€ÅÙ¨Q·Çå‹Ì¨P{O7gÉÚŽÂ¥_=\30\21\7öTK©ä2‘5\31\29\\.N9\27$¢3\31ïŒ6½/î\26Íw•&Rr}|ÒuÂMŸn}0c\21\19¬—‡\18$q\30™9\15_¨ï&Ë5Xî\29Þ¦\4Iø¦jóò\25 ‰Ú*\16œJœ2m€,§\ +¼]Åti¸aª¦\18§pŒÒÑž>3ª,5\0éUWd÷rYÞm\2Œ¤{M¶{Çê´Œ§\21\1Jy~9Õ\23ŒR AbÔé_4ê‹ë\6Äiɪsk»\12\7Ô£\7v’†€>×…\127Oê\14{ÞgbÄ\20'y8:m¬1|pt<ú5q–¡\14x\0zI)‘Œ@Cfzºö—ÉNiÂ@K8\15¿Vû\ +ˆxc\11ròÃ/€\18ß¹þü7ú­ÿùÃ:!oçSˆ\8\3™ðôj\14®6\14AÂ{ð\18…c´¶óF¯¡¤\"ÃÙ@x¤¡š\25\30Ȩ4\28¢™‹?ÌÄý\26ÿüq“+²•\20\6òñ6r\29 ðå3¢¿Ü™Ë\11R\9è\3L\17/\19Z>8hJï.m\12Lµ´NÃ\27*dáfñó\7Pi1\22,FÑ\5E\\€ƒñĈJpÜŒˆtÕb\5öÁ*.–ûPOñ`5÷\17wFU:ýÀÔåð®\0\21]£&’<\11Æ5[1×*ú4ƒ†¥uòàF¯­p)E_«º\12‹qjå:êÃ\15-\17aswpéÇU(ÙcÊ\30¬'«V›kµ\25ÕXÀƒb$w~$‹{ÊüÚ`ñ$d79’@ĉíù»½ñ<Îiå“¿\15¿coËý öÇ}¡±îþŸ?ãý£zH;½>Š§¼_²ˆ\30}í\23d\4U å]ÚG[>à…mñ©|[è\8¯¬c¿5Jühùõ€ÌýF\8\29 ¦Ò';¤åçöéäÂÁ–×Â\30-\15øúSæ@Úéìô\\ɽ\30!n\5fÞzo[=2T<~§±\19\1jï‡\17\11ìÓ\20Ó\127Ú ¤äEoÜ\3ܵ'JÏûÅðQŸ\14—s]-ù>â\\Ùõadè­%\8põÔÀ.Ž\25´Xz§âú‚-\24âF.={ùjÐ :8\13\"kdä\1šX\18ýâ.\17c úXÔ\27\19vB^šF“Èi<¸ZŠl!½¯æH…ÉÁÚÙ\7œ¼¢\9J\\4 ë\21½¯\\·JŽj-Õp^Ã`Zk\\¤\20®\16Jµk÷*Åc\23‚î\25\19`oŠ¦ü²\11¶½g¨\ +ñöät\22Ä\31ñ\26Àc·HÇõþÇ^JÎ)\31;ÜoçøËc7ïŠ\27ý½çnÑc\31yR@ q\31î£\23ß\4;(Ð>Ž'•Ž\11óW•=‘ÿLþFðxcJ1L;GÈø\31«d‘@\29ýÅÈïkÇ1\6ÿ,\8ª¼¯>Ê\22nˆ»-†k,\17Úç;bŽ\1æ\0\17+;¨Š\28*\29Õ\26ðÆks3Ù;§Z?Ð_^„oNzE’Ù½n*¿ƒ^’UL\0111¢øWàlÞõUFA&ï;Ç@\30ï\ +eæñŽŽ\127Wîx.›ªËß&ÒŒX*xû©Ç\27y4{¼5\0oÔîüñ渑’Œø8Ë \15oᔈ4›Š2“*ô–h‡Iyë–“çj™í½\30äÀZß#^LTê\13–ûF\6…“\16ôó_\13\15\24q^Œv\0øo¸üzP\7[Ö_ŸÍ\22«í$åì¬Ý;³œÚÿü\5÷\127þVn÷Ï_X¹ß\127±+\4+ \11ý{¥È¿#LY \28#ÖXMŽ\0205ûË\19#Ýi™\13YX3ô»ráCëþ²†üð‹W\15ž·rSýÄ\12À7\28òžpsµ%‡úí+À½q ×|eÆ ò&\7ùþñ ø]Ú—}ÑD€þ·\9\1~›\29ËV|\7¤%\127\20/ŸyýñÉn0ÿ´’ÁFî§Å\26Ì\9oî‰\127Öóe‘ɱ¡UsCŸ¾oûÉI\2ì\30×ÏúºÖÙ‡\127ˆµ\9¬_\0051B`ûdÿ¸\23‚6\7eÛWIÆ~\21±ü||䟟\31Ÿ—ÏöãóÚ||6\"gŸèÛ23¹“¨ÄåSSðÓ[βyý)àñ‘}‚æŸÉ\7…\13R€!\0RÆgŠÓ¢Ÿñ2Áñ!\9~$\31\14§Ý‰=ÒKŠ;å\20¨*¦å\20NT“…l/ü²EÞ\26öò“õŸO\9_\"5×Ï/EúrWf‚¢µ2Ñ”ðÜ\8§™_b”2Äâ\1p–Ï/äv‚²/eù%þ.»ºø«pâÉ9MpPå!uóók%ž\26ñ%äúl/Ÿ*°UËyõà³M·\17󺪚\8ý2Ñ?[\16òúYv\31Ÿ]Á„3P.ÝE³í³óU»Ïί³(\8eÊpÃ7ÝÞ\0051[\12r¸LW™’¾\20eź—\0243i8ÿüyUÁ}Ýr/þ³\127«\9\23õïÐŽ­¬“”C\25›‚&é úzÊ?U¹©QËÄd¼\20æÇX\12\30\1º\0BáÏéªü&„ÜωßÖ(§±˜Ô«\19Kþ2+ü$f~NÕLÃ'\18eHП\19“‡\127µ‰Ò}rÒåsò?\16?/³\12‰\9ŸRòó/€:D€gÝd&IŽŸË„êü©‘Ï?ÅŒóOð\26\13åsS\31]>ñßUmó0ÌdSˆ²Ï\23uÊ^Γí\11f¥ºî9Ç̯Ÿ;>\"p9‘”)‹ÙŸïrV¡o±¼Ïò?\31Ÿ¢|\31\23qÚ¼©m°î&Ñó1}4Eý\16b\8°{Ñäܨnj~g\22@\3¡ø’×®Mýü(šúõøðOê\14ü_ßr¼*%¬Ë—&%!ª!®}t\4ö·›ú+‘ô‹xB\6eÕ¹\26Ýd“\4ñî,..™55\127ý¨š²æuþ†{^2½\9Οñjjì‹T\13·£ø/IÞøxC\19kÌ\13\23D&'$Ž\31½2\24\5²¡^íJÿØÅ=º¦ò\29\4¹\7–Ì\1…¤\25‚Éf¨f·@\4C\5sˆª¹qx^ªÜH{.Þ9’ƒü‘¡\0314e”&ÌÒÿD¿Ž+¥IÈ %£Eƒ†³´7.w\18Y|ì4q\12Z\6h|ÓYЉFºlª†TÌy¦0.‡5\\ãPà¨,~o™kù‰B¼å¢nY¨\20B¦\2ŠÉÕð&_\3\29vÇÄ?>\5}–A\25pxZ¹JÿVsÅ©ñú\22\18(Óµ“yç%\28‚\16¤ôòV\27W,—à2‹ÄÒ··Ñ\21âÐ\5ƒºQ—l›Œ\4,‘7ljk Ëzg\12wÿ:CUKÕC”AæAcŽ‡M1\22uúÑÓ\27‡f®òAÂixSªyÄÅŸæü\7góˆ—µ\4Ím›óï–\21ÜJ0^\27k$<•“(cÙ\\T\15•)SH¬¹\0fbú}²¦ñºwã…à†ÿ,2:þßâ\4ì¹SÀãÌC‹“gëpÑK\13¯¡4U3)—[SqŒ\\AP–¦)\22Mš¦á\24mÓpj§6ìœ9ÿzP²·Âì^¨Á\27O¶Ø1—Ìudc¼i.ì_$\20¨šÊ)ÕBÓ•ÑIÕãƒóìÈá\0i\7\0º9=’ýzÛ'¡˜Lõ\14®ÅÑöˆöv4õz“ÔcŠÖ8’¨ŒôkŽ—HšlŒN‰n2è\13ÚÚ\17¥µ\2@ñÄìJŒIbk]\30µ\"=Ãty/e[&.Ç+æK$\"e/×ñ\11¯úXq¨¿S›76‘|TßöE¶mlºãÔ|'˜_\31àÖ\8^%*ŒŽ«.M\5ëRÊž§:Ô†ªuùßš\28†aÌÝ¥]kó›L:eÂ?MéæŽ;\19\1™ãò¾­\6•‘gÏEÛä!^§1èú·s\24©zaŠ˜8\"j0\6pÓ\ +žK—‹\7‰\5,¨\1Ù\4\"\3ײÛâ°‰ø\15ø#¤àm\8Î\25sC\ + ~lxŠ\127dî$H) Ñs^j²‹\2X‚p‡ÆZDÞÄ2}ƒLÕã–b(‘ÉÞªY©©añŠ\12Ú(¬\29¿ž%A{G™-½\16·#\5˳¸3wÜË3,0\0045`uðÊ\127\17da‰P`åV½ý#õ:OŽì8â¯Ì¡Ä\13@\0\23p©ÐN\9Ü\23Z\\¯ÃÍêÐl-\\\1àõïŸ!J[ß\17ðǃ!\9¢ñ¡'9\0062\31Ú\4íM\3m\29|\9†ÿ­N'x1FÃäy0LàÅ0\21ã\20¾´l(f£ëàÑ\28ÜEÃîìMÙ\4þ€©£g\20ô4U£h’ÿ\22\\z\23Yô$¬é\127¾\11ÅŸüÈ»\25\14îÃ5\4Ø\2\28‘èõ[Bks¢úÜ°¡hê9úÀ­a¶Ý°ð«Š\24Yq­ç\9éÿ\17\9—˜PªÄåŒßø\18ûl±tÀñ0µoŒž\31Ýó×éAÁê6\30Šq}ý`‹¦å´8$\6m®»\0003\17çðsÍgÏ®y§â¤Xn5XÊ+vÔ-_Py\3vô8;4\4øŽ\0000ƒáxÇF¹ùì}ãKÏñ—Ü&,1SЦ\12fÊ\20Ûm’ùp³{ûç\12ØRDÜ¢\14í3@O\13\22÷«\\ô\4é;›#µ‘Þä!\\¦(É(#OO‚eòqß’FLn\3\7/ÌwJ²´ÖÙXùÀ\19&£´{Tv_\29ä\9»ˆƒÐ\28\13\22U‡+”¨?¼Èãö¯Ÿ¶»FœÞÝÇ°\9úb¬oëýk¢°µÍ9„Kìo›Ý\7Šig\26¼vî\6ι:l²9Cm×\0247Õ\\Bµ&òzÝœb‹„æÛÞ\8\26\\‡ÍcÍéMƒ)À\18xµry\9ð¦_WNŠÙ½»‚åáR\9óûÁ“ÍMݘ[çÑË&v’äNðµ´™\15\9t]øö\1Æ\0d¼A‘ÈÆü!j\22\21CXÂË´v3¹a‰†ü‚ª\8Y/i7\1ØMó\25Å#® 4¿kˆ\0256¡F•Ž›GòðôqvA)œÙE¨õd\9Â{\9r[Ša-»áÁÌæÅkËÍËû\25¢$å—虥­‹lb§RR\26©\3\26aóµ³sÛø\18)”¼åŽuÓJè\22\22´~M¸i/*I\14T^\1áL^c\0200ïk%æå*¢­éûVSœ÷dIY¥¾'» >­Ï³(?fn‹j“5-€ŒTرÓñüjPž·×½vüü\5oh3~©Z·¯\ +úÑ¢¥œ?#\20´Xª´/–‹\4|úÍnu\127Ës\2r|a0€-iYöU1F\8vY\7\2ÄùK™£ít\\{A“i»Ês‡ÄŠCã]\21E\30L€\11þ0èê\15i\13ÀDi\7¤h‡Ù~LA^.`DZ·®\26fb£/ºÞ?\30ägÙ·µpK¸ïY¸ÇFõˆÓ£77ñû›¼ñS7*gteGf|\11µUwŠªT2\25‚‰¿È5þÍ ]9yx&„M%Bïhg¢ÍsªÈ\0ŠhðrÕD\11ŠÆòS[ŠX`B2[6‰eƳûMÉËH\13\127x…&ó›ïïp¢Iâ,);ÎÞ7ìüM)ànozt½löÚZ£!ò¶\ +9Ö\30Nq”ÎÑS¬åÔysù®1˜‘2Õïß\"©bFMõ\13©”óŽ«üÖx \7†e!¶\26õ½\27\13\11Y ¿2\9Ê5\11!=E'1¾«ý¯¸¦kÅ$\11z¦ãï¶(\11’¼›.Ö·5œ½Æ–ãUMÜ\21j¸FË?·\27þãFýxW䃿Œ3aý@\8\14¢V\0\21Ü'þ\16Ýüþh­á‰Úq{|\7V¢IƒÐÄÓ\30Mÿ?ŠäIß·ù\19“­\ +ù¨ü‚ËÅÿüõû‘\31¤’óÏçJš\11r\7Ãwþ\11ª\25øÿL^²\20@\\þý¡Qóû\3£[sQgò\14z3Æ­¤K\3\30OpééúÈ>xØM¡S&\5æÖL<ÛWƒï\19‡wš‰_¾4S,lL¿\11\27Sb`'5 ›¬lLOõÊô*™©‚ùÀ~ƒb¼\16\28§¢%\127®B7SׂÉ\19‡ÎG»U½©°DäÍ\4¹‡\27½µ ãN\11”cZh‹GyZ¼ŸOZjXñô¶¢­.×Ø3ù|œ\"¬È¸SÅ\31gU?\22ßdºk\\q¿¢'/\19¨\9dà\0266&“3Vèä\127÷ ™?VóMeý\127\31Í\"T¸x®©YJs_ê&\28Rš/ü?Z¼máé¶\17X²\5É\"UëÒðEAœñeÏ™Y-P‡‹uš……\26JP\13N>Œ'k5‹ßæuY¬\0\0\22W€¾+\24\1q©÷j€\1,V]%ƒ®«]͇Šy\14\2•T<œó#v¯äÀ\19@\6Ä\18G\22\19Tżlá­Êñ„LûÅó\127tjVa\22SjAÊ!Q\7ƒdƒ×Ï!¶/»\6šfŽÄ\29\7?\24%m£³ØNÏ•ü7a—ø2-áÍpr>œ¾¥Ó®HÃB­\5uïÊ[€¼³ØQ½“õâDGZx•\1“sñê´¶CF´\24íô4\13n\8ØÐù€Ånpn\0^\26ßäT^ì{7¾ÎHIÓìù~Y&Ö:rËÙKüüǾ\7N#Xü:E£±“\11\26¥P¹·9PUÓw¿b­8oêû–½Y\13’¨­h˹G¤~ýù»ÙM®Es[|A¨á–^ƒ\127üÙ$Ã6‹¼±°·ºOÖú¡RWžJmÖ¬.½Œ{YAÝÕçí\4:?8/QÏuT‘Žº\23Í+W^Ïjîk\30«Œ\11¹m\25®+«L¼µÍŠÔ¦ø+\27[àŠó\\\19‹\3ë\5ær\21\16­Xýk㆟`QzèìX®ƒ}Y\20+\21‰·\3ä\27÷æ\27ÿÌúålÇH}Õä©V#(\15*StÁõbžÚô?\1\27ÃÍ‘7çå}%²ØM¥\14Wª¥›®mCv­\11-ZôQçÄkÐSF\30\\\13&Fï\8ˆ\\\25·\ +\9A\24öÏM\\O³ýë\ +?_+î\0196kù½“ªc‰\14š\12çK†^ÀÛè|X¹¢85\15a}{ÈW^¤S•¹IH\18Öisy\30\4¹*\30-\0y–^lá7&]HHwÿÅ„H¹—6֌Չr…ü¾³(O\15(k\14ÊxºÒT1Œè\4Ð\\NWg¦Ý3Ò\15€\26\\á\0ü©â\13ãð\"åÊîXÃÆ­2Þ¼4\ +d=–-M\8Ñšs!©á\24\22}ÃqúFê¡G\8À\4—>秵é\15©tó‡\31I>¼Ê\9\0290ìÈmk{Ä\13É}ßxs\1Ë/¨â·í^`V$®·P#zŸ\28Øñv_îjòº/ðOi—¨½BÂÝht!¯TÃ\0011HózÄÏ¡\20ûíÎxÓn¿µ¬\20ïͨ…þÁ++àÀ…[\1ec*¸åŠµÅ¯››ó·\30À•0Fðü\19G³åÆú-^ÆiÎ_q4âÆ\22¹6ú€…¾åqN¯\16dÆÄû/Ÿát-6\14g6¼¢IÑ’½Hd‘uóö2ðO.ʧÑØ\31Åæ\6l¿(—8ea *ú”EÞìÏI\20g‡ÜsI©Ù[¶Döè\21ßH¼M‚ï•5dy\14\23üØzT‚a$±Œ+{‰)pŽCVhX‰W—ä9V£ƒDâRéÃÄÆaOÒ™\16I—‰µ–Ä;\02829è\18`3ÈÉ%þ[)蟑¤s³/Õfˆª‘¦‹2‘D™jîôªr9ËY©ÞxÂØ5à\6.\30·/L¦Iº±y—Ø´kI®Y%S}Uï(\30\6î‹øá9n×P\19Öžk\29 :Q½U\31\\]4\0\15\18\27x‰kP˜Ñ;\\\19‘™?ìà粢ᲄÎ*\11ÿ$ë° Z$®ì\5?'+µ*_<Ä)eç ªÀ$uQ&ÚK\\Ô ‹9ž¹\24P¡‡Óî¹ïx&^U—|,ȃ\1‰ƒIı tMŸ\26¹O¹?sÙ.ŸŠ_|~É}NƒFš]ÄÄ\6;(Óä¢Ý‰=Eá2a*²ñË¢\6ꑦb?FP“Z\25Ò‘’ó\\¤á+\31‰‹$ÏŸ\26%j¥&~¢èÿð\ +{o^ÇÇ›—¿y s¦\20Öº\29\7~\1\\;û‚X€!<Çp\21³!íd_€ö9u5¡l©^k¬Åâ6<¤c¨Ú-5j¹×D\24C“%–`>¤¢´4ð|ú25·IsPªsà\3š±œ¼/‚©˜Ò²dåg4É*\12…KgøÇNú\22\9ûMK+žÃâ\127ÃûӇĹG3Ðe›Ó3)\27–`SÃoÉ”æÀäo\29ôò›Î( ÎIýî¿a$QÊœ0\17¶\11C‘„›š-22Uü*=L¶F\5'\30ï\6\8\31R*8õ¼m&“ƒË)u—„ÉÞ­ÒP;^+'d@¢HüNG^£òM#—DqHÆK1†\25û\25)ñômJKÆôôo²S.\23!‹3$qÔUø\30/í$n`à«\9£<Ò\17óéòüÈÓSÁONI¨5<øŽ3ãH:.°ñé7V\4+Èå³€¯\24Oé_éùL\29\19\25Q‰§„gÕøÙ2DϼíÕï~ý}\4®„|EÍžßš-OU÷ù‹\2OŽ‚«+ŸDœn\22?¯=ÞÒS]þäI\9ÌΦ0ãÉaáâsÇ¡}n\28íIñ\14|zú\23Vr¼3ÅCðezŠJ=ß\11mÊžbƒª:ó€7— l‚ﻪÇ8f/˜€\8\26ãT½¤éËÛBCzùwëéÅVÅ\7ÛÑèÊ‹Ç´?ü£Tȃ_†gem°''‹øŸœD«Žì’©Â+IfJ/߀I¯;k\12éÅÊôº½8\11•^m\6§xµ ]þb‘Y\14\14y+ï’\\Aß–Tu|º_õñ›!¸ß¸xæUM\127©ÀûKlSÃóò\00970u¢2¼ö“§×D\9Ò%*YÕ‰/i}éÅKZ\5‘n¦\24<û>\0240×^«iÃåå ÿþ\14r ¼@~\25¼i\28ÐH'¬üâa\24™î¬//=$ÎÎBa¿¼\0Áµ3ÉÈékz\21˜‡Zþ\21gHÓ—\127\0ªaùâ¾Ú5Ý B÷Ö|¹½×ÌŸ¶âPŒ&žèð­…Þ¶YÃ?Óð\0194\6LDgÍ„ßNrs(µÍÓ¦z§l¯\12è\5]ÄùNN•*ØŠö|pýâ…ýJiwv+äàá\127Î\27,Ô¤\4/Ú‹°üæ£\7üC‰àWI&%\7§T𫵃é,°«ÛZ|¤@â“yÖJýH)À\23ùsœÀ.æ=NŠcÏAªÔê* \0|D\11窣ñ£íýZfâyä\2Ð\27\9œj\7BrÛ¢ŸV»ÄE•z ?-:\20í\0æ´ùPHV¶Å™–¼\5K$p·µRC*6ø®1‘˜Ë\23¶5\0206J:2äàAjGï\28C#Ù•ûÒ@'¤|SËbt1Õè†÷'æ«.\1È©ürX²¤Ê\24Azyv9µ\29Ñ»óäý\14Êb¾µ~\24RqV\13>0gÉ\20w99£u¦-¨) Èu¼ì9\0295î\3%Œ‡=¹G¨zM.›\6ñ^¥b!µ^:â\8‰¾»Tât\18ÀÖ„º]\"˜f\23ü¬W9x´23?Ù‹6\\£]‹ï†â‘\127\25½X˜àPJ÷O\0£ÆšußDÏ \14ÄuMòhŒÏ[\25\24Ï€„^ÝÂÍB_[n\21´¦•ÊœÂé&jò’ô¨ï4ç~D;\14r=PEÒEÅ~O~.<}ó&\5\29\14Ïéê¾­\2 ¦uõ`6ÒÕÖð’0üP–Ý)¿t<\20—|¸:ùô(\19¼K\16¯®ü²\12\0±É¹ªN\0010N5ë:?<#x瘎Ø+ýÐå,ªàËØvE79*DpQuwŒ7y*˜ÿR#·t%7ÜT*—„•È$<ë˜'EÇq\16Nê *¥ŽpÿïGáü{O>©¢æc[Í4\ +š*$\28)÷Ò΂-69®n'Ó®›¢êÓÒ¬7 wk\8<\16€™ÐYg¡'ãú½°•7Ìž8Oy\23ÇØ•·—W\5ýˆžSµŽ6ÑÒå\0299@»Â\11‰ª¢ˆ8\21÷£P¼Ø\12Iïü¨\26.XƒÂàã\29WÒqxTo’\11/ò£™+GU\8a´|¬‰\9ªL¶6¼ÁÔŽw“\0122W _Ý•~³ˆB6ÎW(Ñ– ¾@²öE^“4ÜŒ\19«ÛGEû\18Î\17od¤x\"#õq2Ï÷ã\9Tw!Cq‚Î9ˆ\30G3U=¤é¾çÖ%Éè7@Þ\7œ\3\28Î+ߣ$½/†ÜX6옣Q\29)\22\25®ýä¡\24á>›Xæ\0P\13ësÜÕ7‚M\8I\ +bz÷ù„9UGhàþ;$)ÅÅ\8(fp‚\127ÙãÇ™äÔ¯g[ƒO\2ø\0191–Ö‘¾“ØdÙ^ÐãÃÊ<¼Ï¿\18PϬ\16j\14\11FV¨Çþ•€\0117\25\3ÐW~\12X\18ã`oùóǩο\11ð\0318ƒróXÀd÷\22:óù/\1 ýyü\11'W´Ô\29;\"t¿_¡Gýž\127%{¢&ô»»rG‘ï÷ø]#¾$Þo³‰[¿{Bî«c¢\22õ‡u\6þ@5\1iŒß\13\"Tj,ï¥ÓÛýÁ\29ªd¾Š¶ÐóO`7è­>»¦\24ø¸<ÅTùù/«\19\"N?ÿÝ\28ü×\19ÀG‚\18ðíÄ,á°!:\1/PÊÞ‡’\19'\0026\21\12½¸òj¿HÆs*\0¾ZÆâ8\2Þ•?\28àÙžˆ9\3ÿ”iù\19ê\19=üÁ¤î¿í=Ø„²\12µ\15¿°Ë\0007–/ý0T¬ŠÙ\9Ú(0H‡è–ÓdŒÀÀ\27LÊvà™\15Îz2çdòJ³Ìè¡‘t\11ñ\30B­\25B«—\16\12âq‹\0–<ðØ\0‡^TyQ\"Â[iú\3û¯›]/z\3ö#?\22\12¸óÂI_Âb!p€ú\14-÷ªÕ\0ð¼\24¾i¿ÕW^~]qx\25\0@^·AWý\\—ÐþãÇ\8\6H¶ƒ\11ã(ª&Æ\16suÈ8?¨(³cú/\8‚þëˆ÷½¯#§’ç¡å1eŽì\28ü›\24\14Ž€4Ãyi\23wáºæž€\3ÿðÀAÆ˺݈뵑wØÓ\16φ+Še‰lØá¤\3ÃR\12ÂÕáCÌ9\25‹q\3¥\6NèrJÖk?‚ÔÓ–\30\0022\\X*\22\"‚\22£y2ʈ\0268úôš]*kôoœx\7\4á°\28+6ø\19Š\11š\ +ï\6©=ãÍx$5\6\14­0–QÇš‡]g,\8\29ò•¼œ{Ig|\4£\27¹@@öñc\4Åa»–×>4xî;ñ«„¡%â­ŽQô’¿Æ&ü8U!\7oا±b!_®˜‰£Ï¶8'«|#ë\9\20\29\18·|Y1ÌFoêû\16°º\4d\25½h˜Éaácdy0÷¹`\25•5€ñ…Ä5Vœ\29Jüï`\8\0zŒüù®Øí\1BŽ/Ÿ}\3\18ÙÿŠ\3\22ë\20€Žzùur’ „ðë\4/GŒ_ÌcO\\™ìùJá†ójj¸dÿq'`KýÝ·wîåà œX僇@ÿå]–kk“JH\7ü\6ø•mATŸ+GžÕY“GgROÜÆj2šdü4L¨Ã\28áÖØ\2•\28i\0067±hj¾¸Ð‚wW“_þÿ`qºâèˆë½zйüÓ~p¯–\3äv£R~áVå@“FÞ\6`êÈ\18D‚ƒ×ä¾\"˜\1ËÞc#ù{š\9¿\24\27¸Á‰ áGü—€Î¯Úxìn\2ŽVÚÙ\24ìmä!ÉΑ Òca©eô\15Âü¤M¸Xè\30ý€ª\1…dÎ[‚y\ +ð\ +@^÷­\29Œ-‚Fž|›Œ\ +<âß\26.î'?·aç\25M,’¼Å0gvdÆ<þ)ÆñÝ\1';Ì°¢TCÚ¯cE`>\22»g9'šÓø/>0¦ëø~0l,©Ë¾Ú\127Ck­8nÍés8ÂÈM_bø\127;j‹Ù˜2rù—\24æÒ›L)›Ôƒ\19\11/åÄ\14…ßH\19²óÔÄà]¦×E®Ê¼lbßå\ +b\9»V¬Ô—ç\28\20Å2¤ó'\2É/Õ¤jb=v\18šC\3'è\15\127\23P™¬:L\\ûO“©ï4Ï,*\0ÓDzÒÄi;\21B_«ãQ¤ø\25\19J9Î.€ê1ñòJâ•…4qÊŠšîpl\0215×\25¢öìY1Ë‚¹P–7.åÈͤf\11ÝŠ·kTfÊšc÷\\\23in©š¹b#'óŸŸÍ§Ûl\29`æÏW) ò•ç}-‰iE|žØq‘kBݘç+…–ÖSçsÞÈÉEÉ4s&TY¯HV3ÿ‘TÐ\6O}ò:]¼ÿ±dõC¨°Ôþ£1›\"Vax㔕<Ö•å—Ãp\22³\9©—”Íë\15=€g¤ü#£\26Wéõ\0ÎÅyädáönZÄ7à*\11ûx\2Em‘Íp1Ìé\16JœÏpÄÊ—Ú}+áe×j6dËb\127K¸@t\30)š\4\3ZÊzÚ{áG5Ù¦H´E‘\8¤jènÇî(ñë\3,·ÕµA[\2|9\18J¥Ú\127DMY¹Y¬bæi¡\15\31í\29\2pþð  ›y{¼é«\6ÁváÎ\23\14±ÍÕN°nñæÎÒxSjiø…1.NÉ\8r?ÞN”[9·ÉLlÉ\26$“‹#³\17\17€¸…WØí\6ý–`wKâQ`\3K‹\127R•â\8)<¸… \24±r.‰¸·\0§€^15‰¬Ô.V^å‰j³ø®1bY‡r¹.^c‹†±Bˆè»Y\25ðÅ«\13\6ižü «˜'3bA\31Dg‹\17„1®úÏ\13Ïö#\16]zy(lGÂ^/Þ\19¬îxY>ª\15\11½\2ÐÒs\11øšÊ\3#^\23\19‹×“©¦«'ª·'ï'ìê\6ìí«¾Óå\30™ÜúqtÍuëêgĶä%o¸\9Š\4¥O°õ‚M\9î\18ë\21‘…Õ³\18é„Õ_LN:E–&ÌÈ\11\14¿V\25Q#t“ª½Õ\4QÃIß<\6Â[Î?!l<+\26ý5y\\Ü\31Šç•²3ãnkøY·VÌâðÑ•úãí£àÚjŒãïŸùã\127}`ŽôÜc°Ò±µW*÷÷\15óž¢Ñ¦Üé²!÷œYÿ¹\29„äå’éƒ\7Qðj*,\\¿ì›Joì\27¨.Ïߦ^äËozŒ\18³®€\"ø\24Lwû\21~zþ\8Tk\6‚jp±Ù\9ü8Tä³)¿fÞ5è\2$WÀ\23ºlKè|oœEàp ŠÄ»\15'o\23œ¼B\8ýΡD`^™÷°\\ßY†ùn5îu0­Ž„/\9aDöMd\17»œL4ý¹GÅCZ•‘ÐRs0\24K¥|¨()µH¾Xç\2ÞÞ\14‹2t”0\8ú¡ô£,\0¬6>8\4X\11\0?™›Ù\23aÈñÑEõÆ\7“²Î8äÙý_\21ÊshSEA(Q˜†Õ\\,–\27\0å`j@Üy²\9ÅÙ„bõºÏn+ûîò\11³\22¤|y\ +TF˜Ë\28T(¼¼\27)–Y]¾Lõ’#¨À”};}k[&\6]\0†&w\13ã..^<2Läó\24ñ9\0ûR†§üp\15jü\3>\"\27F%ÿô¨ªþtè\18\31sŠa®$M›ýXëv€+\22ÖBìß]ȶûé´Ô_÷\",ÈT\14úG\7«¬›¼qˆqŸ\31=\8–fï°cá;A¶%='óŠ.|]'˜i^`ÿ$b½Ã\18½GÞ ø\25Û `Âa\31¥\11Ã(\4\27o6\28\"x\12àE³!§I¦FŽQÙÅ-\8[V`'ö/\17ÿäPÍ&©sÐyyp\2Ž´ Ù>;\3&@È\31“¼œ\24ùs¦Q ×`¹%²àm±\15ï\16¾¨\21’ •\27abC¶Nê\1£™­ºÐ/5h\8c\18ßú\ +®¤\30\"Ì\18Dƒ!è\4L®ëÚºfV”üùÃXbmó¿‘€ó\22¥çogh±c\ +Ý\29ºÇ\16ê\4Ô6±y‰np©Ò\28þq²d\12S°Ûž<\13°ˆ§}ЪgÑ?u¸ï­Í\0313¥ÉÓ àyò¬òXxð\22{!¿=ÖZ¾Ô=¬Ët'Ó7LË)=•Î\27—\\9ÜQ̶ßÕ›iêæÝWŒ9Ól\14K\31mV©sÁä\11öYKUÉ\22=9qX›ö?3𧄡lÃt°ó\6ñÛàãT£·’ù©YlŒoCFŠ»ï½ÛÅÛø\22tÕÍ*¦0¤Áu7\127¨ù\22Eû)4æ´*Íî¤Ô,ûËÖþ\31yN¿Õ9FÚ·`1°\9›îÃoªe£r\27ø\8\0Aº.¬NªxÖ¶’øš`«_ê?¶\\]‚tCC9\9’Ýͦè°g†5\27ÆÂÕꇹ­6rª «‚álµI¨Û¬\19¢ü@\4Äõ©š°-¥?Å®\12]Ux\18jjFæ\28?°ûÃ\3°ÕLHÕГcÃvš\1\15’h0沺­»ÇÍ\24Ú–Or®9z¶ôy²Éw«M‡\1nè?ÔŸÙÌŽ[P\0047«Ï4 ÕG™þØx”Ã=%.ÞsÂ挚ŠBOƒO\"/wCz‚®/\3¢ÄƒÏ¸¢~U\14\26?pu1q©m‹üÀoõ¿/ÎŽ3w;Óõ›mõ\16æÏ\ +ÏžÍÆQ\13ý}±ñ| 4»\19Y—³3x»“wÏ4\16Üm7/æf”VKlƒŽ¯Î=ßKví]ã´\15~ÕØË;Ž/\23¨•l·Py·C*Sš·?AŽvøÌö9ÞÄöÊOXû‹\22×ÈPtgZ„ì„zS(w©Úíf\1Ù¹\14î«\31î÷5h\3ûºœ´âv?@*\5Ä ðó\29Ý)µJ*\24ü`ß\26Ѓ½ø·Z .8Zd\11»ãóðvØ\\ž\0\20t¥ÃpŠßܸ\27\1¿#œÍ]ý¶õZ\23\24E—SÝxD\0êH!”Ííð[Å\1GSå@&ÌaÖÉ\ +¨.‡â\1²q=âì°\13u€ïò‡Ÿù\15ß}lSè\7n@×S‘f½\8³&ÝùÐü9ÎôÐ1W-èøÑÐÃGã‡ëòðåó@w-\ +¿\25ÖƒK÷‘/ôÉç‰êÐ\13‹Ì\ +ö êxû±â`&ü\11Òð[kðÕ\ +õäû¬\14xê~zs\30úÎ\2¯1éÖxã©{kõì[\11,\1º\15 ¸ÕjF\2f\7'šŒ\6Ps•ën$¸\4© XÎßòÁ—Ò·Ö×\127줨äŒ/£²A¥nî+ß\14{\12àEçœ\\ãí\28I\0169éÛ£rOœX‹ç^3½ÿ\8”ÈsÓ4\16öÑÛ×t*U}GK:s¸*ì·þssW\26fFw¾)ï\14Ñn\0035IÐÊU•¸Ç)\6Ü»ÚfÎà\0|”†ÒY$”—õæÅÍ@\24\5®‚3£×¡N@.˜ñ×¥ï\30d6–ÌÕntP\13&—\28Í\31\7Ô9yPiLè\0\\,¯Ù}7Ù€cÍŸ\17ï$L\7ØXÃU\0229ÕâÈëæÆOÆö\13S\0007\23†\11ÀõÛ®+\ +U‰\24ªñ4›žúè‰<¤z’›MÏC»œÛuµ'9‚äó\6‡\25uXÜo¯3QL*\0304˜\13\29Âõ½_=\4ÖZÇ[Ï»CW´W“¦k\3ÐôruÅ×Ú=Ь‘ÝjåD}f{ÛVgR¨·W’­Ì\23¥¸Â/Ì\31T\0180\31éî­ø¨G—\19A,\14K;=\23±šîÉ\23ÏIü;\21Ó-è\ +ŸhR¡¾\11ka=\24.\21dÆâëè\21\22\26žä‘åuª\7éy\24 Õ^òÅ›H–€\7™ãÿ*B™›Ý\6ß\31Må¿ÿ\21ZÚk»Ò. v³½}õ,óó€¯`ø´º´rXγSè„îY7G±b®=Ý­\20j䥿ê¶è<\14@ç\12á)\2t$ëîú\18j\23‡=\29¶ênÝ{8V‡fÊòf\9Дé9ΉÒÝIa<\21à¹Q<7=¨aomqD©÷zÌ>àQ\7õ\\Qú>ÐÖ¾¿Â\2§Ô;ˆK$£\28\20èS\127ZÖó©Dì$=Š.ú\30›ºª0ÌϽÅAT°7Šb8³CÜÕ­Ê+¹×\18Ò\5ª\24#™ÊVxµªŸ<×Òõ·ÁïÔÈmh>ò\26½u\23õ¬A*€Ž\0205Â9VT/5=£¡‹]Ÿ†;'Bj\6”Ò÷éN¦B\9ú\20¬Ô\1µ–•dtY6tiÀŽšP\26Ø£\12õt ôÉ÷·>¹;\18GÈô«\3Ì™!\0089È\1—gè'˜úä\0294¡//\18bÀ’ÌÙ;z¸ö×ÍðFYì©=Ìú4x\30\ +ÎA\20ž0ˆäà›ÙÙaÿ'ÿ+\12M\26Yvƒ4×ÙC¨?¶×Ó§³\14œ¦OKÅ\127òôQ“Ì?®}\29WyÖéÝ›¹ÞóíϿI™]#\0315ù]r‹\12\17qk\18-\31H'Ct°o÷!\14Á\18O\27uÆ*&#¦EÜ'‹Àœ˜½§\7m}˜9“¼+´\1ôPó&ÈÁ.pÞ\ +â”ï\3\"veÈézQ¿U£æÂyÒ$cßWÓê\9Nc-\9ö¯ÞêqϽ;uÂbSoZ`\31³`2—NÍ\31\4ÿˆÛX\2ð’ô\24˜è\0Yø¿ÀvÕâ>õLŠiA—„ÀxÍ€ó³\0(3Gf”ÉôX\0é§\29\13º\2ªl?AÒ(úI½0½¡ì¨$\8 ½Í\8’·60\28ØÚz\27\4a\11\22‚Ùϵ¢9!\0225ÑÖ\17ê~ù¦i\11ûÿ¢\21µ$l|ô‹Í…ö‹)Ýý2•œÕ‹\25À{TòÜ\8Ôh\30À+\\\6=êw së\15ü¶u„$\127Ææ¿Ìüå\4[æÛ™l蘥˜K¶*Å$\18̈Ü÷C5„þÎ#Å\ +2O\5›õ\2Ð=§·=\16'jxQ\20ÌÂ7Â?Ú[;”¡Âå\0:{VèÎs1΋îžj\3–]!,иM 6\ +zRr÷ÿW\6–I€×ìÊ«\0€·\4džÍrý¹ä+E¦\ +CÜQÖÄ1µ\"±¦Ed…9òeŸ*¼|—¯€5øIÏ\19ø%ê©Mîòô\13\9Õ\19rš\30W—\127­*þ]@Ö\3’×>\19TÀ›P-xŽ|žßìk8¶°>²\26\28P/z:\18\19ï}¾3Usb—Ë]Ÿ å¡ÏŸwñ\30qå\0306Ò\30»b\26\4\27EïQ\23ÙcK\28\12¨6iœ´ê€s¯\24h\28½ÍÉö[Kå78|ú}í~+PgØŸ\21ªÜ¡q)ycr;q\\š0LAÊ\0116·ûz;\25?2Vg\11ãú\ +ôè\0125\0297ÙeûEíG¿ÙÌœÀ\\8m\1Û\8ÞÊÓÔF²I\4\127óòàlV!Ý\7M\28@3>\28æýekØ“6sO(‡\ +m5ýÆLÞšÙ­YèE¨ßE\17ÞÞxë\22†à\5\11j¹…&Ï€ªÊf=\1òžF{Æ–îú(\24SöÙö:\9ùŒÈ\6e]€^g\25b½\7´?PÆÈj|½Ô7#b›\21m2\16»\22¡â÷\11Â\ +ý>\\=yö\0019\15õ%ÆäåNS{ÅP\22>ð\"¼N\11b3Gp0ÒES6Æy\20õ±‡Ýïó|¶›‹\0\31”2T‘ô¡Š¤ß¯ðÄô;¢Eý¾î'þ{±î^p{®\23áh»µöûVÞq•L›z1h\29§^çß\27Íþý[›Mÿ¶$Kÿö0k¢ÚV\ +^J|¯Íˆ\27‡Ú{%ß7[Ù;·\13ߢÙâ,”õ\4æ¯~UoÂÉfÈL³}ŸùãÏ\31\127v\2´1]rÃß\19—V¯EÞÁ)ê‚F\30’Rë åõ\31=|\1™:/\31ü¹Øº“?€ËNø³vî\15\15AÀ\13ˆ^\14µÙ\29z¨;ÔÍåÐ~·_C{ú^¾4£Ð#>´\24\3\26Z³‹)þ¦\11õ\0)¦\28ªVGºbºj\20àÚQñ\21˜É`–=yî8ŠÖŠÎJ4ZôÆ°#ëñ ´É\5LŽÁˆš€ƒ\14m ÅØì†7—à…¤<\26(ðù-Oõì¨Å\19½….\127¾ý\ + \28„\30Í\0204WCí\12áf~ðí¬JCFÒöo¶›Û\0‡[7CÔ\31Z+!\27Š\ +@kxh_кˆS\7ª•¨\26nE\0007su’.s\21î\26à­¸Õ“\1VíÎ-àº,\23Ú\3žÐ\12 \127çäØ1á)Ü„0t=€åÛk\\d(,†&/\11Ûý¸–;\13-×7õ,…{«PÌ3´Ù\26Û\12]L\13E\30Kè/\2mßQÐ\22q\5Ñò\5ÌÞÑø#|[-ÉöT®EÅîBF5×åáÜ\18ç[õÐ\6gÍÐú\2¡±v=ON\15\9uÀ\16nKÉ»å¨Ô‹Ç/ºí¸1|–‘1X]Çc íG\12\26jå\7LM\19qðÿ_üåª8œ4)>4¥¡þ~ûƒß_—átû\26nÞ\\\7\12kìòu…:\12­ùêG4í\127\13V„O\12:3‰z:L{8\30aÕøÖH\8\0184 ¬9Rf»›³Ú~‘ǑЬ¢Bµñ\14hg”û.o|tB,â4\8«\30:¡¨Cç•ahLJ\127¼b‡®„Z¯Tßê\20ªX\14]õÝ}\21ò¸c:l„ÙûVþÝ­$\12ËÖJ\14-\127@³\127¥ÞínpÊ7†íÇß}üúSûOöר¶\31=Ä}Ûu×ê\12Õ/6qù4Ùm\7@…sW§)º¦\15–p\24Šî<Ì„Ýõµ\4ÄGêÝ\19O€ƒEÁ\21—ʵ\0€z\ +h#|\1ë¼Uäœ6|\27#\5Xü\5G¥òzàŒ*²»Ž´\20©¼\1yXZƒúÁ\1\5ûƒe6i\24\ +õ‡ÎøπĢ{sò¾f½úváX±\127\20(¬F\1270\17ï×\7fçËH£}\127sû'\15…Uë\3Ô\1önáe†BÚã{áCêóIxÊ\16êòÉ%»ìŒ¦]¼›\7qÚý\1«¹›\0264«\15¨Ã'«èí)\6…Ëšjôt÷Æв?âIÔíÉ\14WÁ\";„Z\19b\22'[\29Ÿíú¬\28êq\2\23ôŒ\18P{<À›m@z°±&¼Pé\29º¥Ht8´:<\28³1\12â\7æ±îrµç𜜌#¦›?Xþ\21´fÎìÅHW¸“fô!“óþ4xÛE©È`\5öjÉBOr\13è¯G釮ñ\6\27\3R-.“õÚ]———狦»£VÏ€ÕÕ4Mt0J8Ó¬+TÑ\0012¨\27·Æ¤²&ú!4Ñ\3F\127\9©ÄÞ\23…­Üxíe—íÖ2Uñm\"µµl:\25Z6)0=\3ä\0[\0:iº«VÞµ\11åR\3l¤¸³Ð12\\Ú\0Ï_\31øtÆð|ÚÿêÜ‚—ka9\25\30Xö›[½;¸r9\31®c\ +FÒððçÌ\17Ôl`\29Ù³/{¦©q„0o\2z‹Éî%Ý!\\%xyØ=2-†uSAÓÞ\0J\11ù\15h£Çu\21KDC­£emã;¯A؈qÛ滞\\X줰³ŒäœmIv0ÓHºùÏ\26ò\7FåÛ€\21ÌÈ>8/á{…O(ìâ\127ˆ?“³Ü\1ª«\18¥\30W÷*}9”šAÃð0¦q\30”e3°}be*ëª5\20°Ó\15Õ Ãm\24\ +¸7†amkÛ¡â­~\24|¡&²#²ñz\29¸yëø\24\6ô÷)Ò\24ÏPjZà¡1Ípa‘¨2;Ös†“Ž¼;¦Û†»nƒ o÷’î\127ÿsQ'³)£8c€M\127°z~þ¯œðÖÁ?$uMºüæŸòM¼‚*\29cœT%}\13fÀsÖM#š|^a†ZÉQþ$7BÔ\1©\30|\20$\20\8\14éŽ\3\"´©(ŸÑyŽ®\17K\4{Õþvä[3\22\13èÃWíAÆqÏd7-®·ª6\6J\18‰ Á9˜\26ÎðF(-¶§\9Ë\0016j¦{\21±<ñ@¬\27ÒâÞKü}q$*_P­Äk²öà¤u)h\22–7Ô\12ï4\8\23\12–-PŽ ±\6³ãGrº,÷”H?[Jò–‚\12Â`\21-$a¬WÒ mω¡&ã¯Öˆß`3\27Œ¬Ññ¶g\1x“\0æ.\0005Fm¨Ày»’çf#ôC\"[î[J†þ\11€+rèª=<„¨>Z®X§\7³£z`(쬰\127\1271½9N\ +¨Õò\24‡x ôHîÜÌ\23Cu嫈˜\25Ó.5\127Ø\31ᆒ\3Ùk€¤]¼€—ãêËÊ\3y\3\21º\11;©\31»VÃãï?WÌT\13\5\24Ö8½¯_Ãtæ,òEmòã|9ðÈ9„r»º/%HL\8\19\3í\26&¿K(nm\14µð\12!\\«\19}\25rë¬{5z³rvÎ\\.P¦¡³\24°´O]6Õ$ï\2ÝV\31 7\127\8EùÃ\20´úœ­æGm¾Jbý\ +Í\27Y÷ÖŒ1X3†2îžÜ\29|Ǽ\6j\7è~`\14˜Éíúšœ”¦XY\31ÿ;\31\27$Ê¿\\/ˆŒÈÖ¢½\1c\22M~ú«ÍeFÃYÌS*•û!x\6^œs\26Nª—Ò<ñ\\ŸîZ6?tS”j[\3\26©°‡‚š\7µðΉÊ(}¦£½\26&¯‰z\2¯?\17EÆËšÚ&`ÝÎñÇÄI\18ÐÀ\5þQ:O˜éÀœº°u*·\27\23j\15\20\7~5Uˆ\22ð?FÙ¯\8Jñ¾Ñ£oÆãm^ž\1Í<¤´©\1Í \9E¤ª¶ŸG+¼o–ý‰sâ\9¹bˆÍe¨ž¬.KÙ\11`€`\0\31\29žÁ\0307|Ô&\15'UÇÒâ\3bâœ7³™0†óÌ鄪\1‘ë:À*\28o&³327èZ#mmÉ\27\3vñ¹CØSà\18ciQë!Är\6k\2%)µ<\27uÕV÷m—M¸žÁÖ‚›Ð S$4?¢<)Bs€@ö~ Œ¶$«…ÊWK­Õ87KMí+¢O™:ÃÞ<\8¿}Úã´±EÍæ¢\27fó(\15¶\0190Ì»¯¤³e2ÈÛgú¢Õ¶0ò\11'\13ÿW6MÍ\21ýÀ~­qsXª\27Çü‚-=¢o7ÇÐ\21\11š¡\9Ó\16ûÁI‰=\3乬š\30\11˜Ì¢6.a0rX°ò<,SÝ;1½½L0p\19v°+,¥mv\0134IÍÆ\18Ë\0\22\14^«:\ +5U’úW1^™\11:\27\7”\"PÒ|\5Kâà(†zYË––\4r¾4È\25\25\6æM9>ém\21Ažî¢EA”¦i\9kñð‘Â\29ªe=ð]\\ÛÊSca³¼¨ž˜R\27\22Îò0V>°f\22_m\1‰Ž-^µz}e\29­œò\8t Sî÷—œêw§‹—Œ¯×~n°Õ@Þ\18Pïgƒ‘üAê\16¸é`\20¼ºo\0137 ¤N\1øæÈÚnP…„Îxî)ÏÝî\1·ÁÀ\3Gr0|rª«¹5†°N0ð¾1;Pµ\18òÓ³TVT/\14X(¨fÃÚ†ÌÀ‘°1€#ÉAs\7.êÍa¯DÚ\23ƽɉ­\30âVv%ºéíš\15r|°m®\7è9r‹„¥›]N<[\31à%\30Ü`E{éòŒ\14\"\17l߆Âf»j¾4ÔbyVOû¨þâ•\24F\2\4ꣂDª‚wpš\2S\1úÏÍð…*!®<ëÎ\11¾€íf©&˜\14\24VÄÏ\7k9¥\21ïME„\5÷_\14€f[\26\30\17a\4\29ù$yc\27Ô”@÷\23®gæj†@@3\27˜{÷2äÓ×[Àº¼\5\12ª\16T\13€ZQ\9\12¿>°#íSžÍë-”\21ÝÆJzIVèj\13«\25iY\0196 @>ìK\31\3\2C>ÿ&\0234‹*\1˜Æ\25\29?¸þ\18^'}ƒ5Ã!wWã\2?Ö\4âÏæÄê‘\\w\22¿Vð\28„Í0#0\20\30\127R­\14ã´.,ˆ¤¤Î ›XPx\\\7\12\9(*ä†\\ú¢œ»35è1²©œ\18\25%´s¨îÌñœ\28_>˜\12…í¤*ÅÃ-y,þÏ\29.¤\9‘­åB€o\ +ß$\8[\2“\7…E\1À\18‰å\30j?UnF\14\ +©£¾)Ç\7 ¶tÑ\16òCD› 'ü‹ƒUYS½x¸\25~žk†\28Ü Ê›ƒwÀt\0c2]\17ÖS\30Ó\27o…¦GõGý„“VYÎ^~yQÆÙt\11†\28Ó|\31èyò¢ia=`°õ\0eôâØRXšÂÇŒB]ʲz˜^;ýéí(kH<1|ɽbO€½Ùò.\20[\"\21àÖ#\0200 ç’™Ìü­ãØ\1[äSZhÅÁOzàcs`@,å¹E‚ÕõæOÿógüù3ÿüÉ?\127èO\24*\17kpPò´BV\5ˆ¨Êð±`@±‰á‡~ýË…-Søbi\9¶\31\127Ô}qea½{9\28yýÁ—€H‡Á—O@ú\4€oòØÖ¹;>”\6Wl]\\•Í‡‹\31ä>uÙ#Ý_ðØÔºP¶\19\127è\25»³\25ì•;ýðó†‰\21Lă\9—áhV\27Þ,A[F\0x½Qžœšw…\\céBÞ?%\27\27Ë\13\ +ôÇÄžagh\6Žác.a¸z4·\16Ý\31~FÍÄýíÓÄr«èë-ˆD4\14Û\14ú×p‹ß8¨>’VÃvt£î-&p\12Ûõ½éèäʨ›4\6¢†ý\12™a·æa/áM\31Œ—ò\2©\16¤ å\26i\25>6¸\0209©7ö)\26Àcc\11àÐØABu”ìê\0278øå®g\\XG‡`Í\31>¬ùƒÐ©\29¯Î…Ž$lLfÈ?\15ûöý…z\28VÀ¾y×ß7åÓìÛäˆÙ‰47÷\16Ú0ôSÖ¾m&\18¨+ŽP¡5Ä{äpt·\9Ä÷@ArĦÁ \19ãú¾(öí÷Äf`Xß~\18z¯J\28/C\24\25P¢-€Q\18+\5C}Ï?ª\0F\1@‡¬Ce0SºAÇàÎ÷\19Z‚õ«9\23\16‰eˆ4?:#2•¥cK\5pµ)\16‰\29>üëjŽEcÉý\15«ö³@dSÀªJCëðöu×%òÞ–8ßíõ«¼\11Y»·gîí÷¶Oü\31ä  ÷νò—=ÊàÞ¦“œ&\9\29¾·˜ÿ»G\7âq|c\21D\14­² /‹÷˜À÷vº!\6uylp\16j\19·†š\25w¸¸‹1àòñn\20 ;\17¾£\2T\7«ñ‡TX\18¼·æ¸·ÏÚw†»\25Áå-íA“VÃ7X\15¼·3JÒ(I?•Ë÷Õ¼nª.\11%à<\23\0ÅùS¶û»-šÏ\1Ë.\127þ,Ôo†\22BúLKÐ>{\15fð{ëýÉ>²(Pt\15\14ñ{‹mSR,ôËÚ8\21a& ™\13•T›ü];}å1Ëd’ëWcXñ]±¹g<\15ïažà^¶tÞ^ÅXíµ;dG¥Ð\0236-=b‡Ç¨Ò†VÙ\27c„\28Ê=Þ\2•\18\0226uóÑpìit\15×èXœ‹ëR\31{\15–gÍ&\19;ï§ï¯ûçE°¼ÿV\0ywÌ®AXóÞµ`Ø÷î[kàÞùÒsÇ€\23b/J\8³ô½ë¯\27_ôðå\11By¸w–\1R\18ø+\5TŸ.µ\5.¶¼\ +Lu’6¹”äñèÒVâ;ó\127´ò©;ö£ÍTrG÷(ùzÊv1U­‰3•D¢OsçO±àžóÙžÌ7(cl€VÀã`²¶\6Iƒ­‹ÐÉ£\25\19v\17ïÖÚ'ð„¡N È\23¼³\25\14\8á†\ +8¹À\25Q­»–\26,jw+M!\15Ðw…V\15\\Þ\"œhJ\\\25K\"h\31L¿¤]œV¸: ¡†«íË\4Ì?\1276Ã<º;Ö\11¸–+¤°Ã…i\23§3yˆ ß\28ãá&çæEî\8)O\31H\13ò\21e?wž\7à‚¸‡X«`mŽ˜{·ÑƘÀ\8˜‘dCB¿Œ?\21yùvïl¹\6`õ¹÷.¦mwX2û|ïõ»\127Ý«»†,U­\26žÊo::aXàžÎ4/•½–zêû3.S-\25\23¼§±QKS06ð­ÕÞ\9²\15d Ã,÷:’®|©‘饛ê=¹“\19ÆcîÉ·Æ{¨°¾CáÔUç\14aôž¶ÆUò:„>©–¦‹›\20̱÷„hÕ](´\ +ztvžTê\1s¡Ö\6:¦\27@3ñBqW\13l\17KkNÝ\1OœþªXä¹î3\4¯;„{©r—\ +BÄ}ùÖÄZ¸\2݃ܠ\00887îË­cQá[#ô-\23Æù\15`<ü§óŸª¯\25VBÔŽEø\13®í\18\0086 lú“Tëå̱°X>ÀÒb©-(‹\21€¸†gyâ¡ŸÍ‚ªÿð¦ËóŒÔX\0187¸\5Èd8s&,n£MmÞÃh#¾ä–¸&fhº/ë¹Ó(YÛ×ùŽ\30Ö;šUï\11÷)%õ¨\1Ü\21ÙÕ\23\2¨\0154ý—ÃL\8úâàåæ¾üëë~Ò)|WbÔjÝ¡vÜ×äU½j>\20_õþ¯PûWÞU˜µžÜAµÕFD[ïàg÷\29š\16®:{7ö&ÃV¨‚ù\6£+¿ð\12Né)»_\0304*ŒQÝ÷\17•\29Š›Âu\ +kǽï0ú†Oí\16n†ëStÇ@:Qqèíºq«f{f[Üë(¡î±µ« \11îÊŸ\12ϧ«–£\20Ý\0255$»Ùå\0lJøF\0187O\\¤\23ÈxsÆ›³Ù®È!ëÕßQÀö6€;›\12Ø v(Y÷°We ê\30Êë°(¶\0ÇÆ¡=\9\0233\4÷#U\19ûHy”‰Ð„Ååû±œqÖ*\\¡~÷Z¥½9@Þ¨­»¿o%\127•é›gô_À«\23ÿ{€\9A0›÷tÃÀõý\29½ü^ÍdvGóÊݶ¨îAšUfùô ‚Qþû\31Ý\"ä®Úç¯þS0Ò\127ÿÑìÛW¸ c\127ÿ\25àè\7b\19‘?£\19ú¥“\15Xà€ðšKÍékh\127 \26VàòrÞöéÖÖ\6¬\"ïÓÑ\6è\2¸äwÕÖ\1Gƒf\14ð)êM\2:3ÐÜk\29ýwÝ?ÉéÏ…q\17B˶|7κ\26báÏÿ*£TV\24î\20±`•Ã½\0Tå\15ˆô=˜çÈüø|Ã|HíU×ìtÂþc2\31]:û¥DA·‡‚°\"IÌp–{Ö\17©ÿÚräª&Élr风\\‰Ç°‰ÿ \6r…\9z‚@b¯víÔ–Á‰¢²¦žàbp IËV·!‚–—@\13™\31\3áÙyfrÁ6V”°S'ð`\21*Ü_ÙÍ\20Z¢Ã3\0093Có@\2ƒUUu\22©\23••\7=ÁÿLÕ\27(å\ +Ggò$Çy9ÙCü\12²’Âr\22a›¿¡Èö¬ãZ•y½ˆXÉSÓx\1`·0¼;•Ò`¨\4®ÞªÌZcû\5?5¾\ +d*·òè—jT\9+·L=ÊL\15dæxB7_Äl¤Îî€\19•á™.µw徑WM¶¨¥T»Û¤þÃ\30Vú ”é»b”¿‹oÕ­ø†h‘¾Ap\4Â’kúnx\9KçoÈâéÛ[ .))¾æ­C®&µ\"V²£bß\23œwÛàZ§Î%ýÖ˜0çÓo‹Í%$eÓïñ„Ó@/Sˆr¬~¿¿ÎØÕL&ì§â¦K]º¹ú7+¸M7¹•*}Ã\22\21‰ÑÞ‰I\22Å™+.)BÓéf-~_È!Œ*ëzƒ«J™ ´Ž¯fÊà1šÿ¼Ô‡!ÏÔÿüÙˆYŠÃ\25BA!Š\11\9y²GØö\11ó熺5^óü\15ªN\18E¬Ù90˜·\0266ƒt³0k²µ«¯“jÔô\11\11\13Ëßê¤\27\26~OÉV¬ÒÍ»;ÖíaÈI6uK\11´ª“Mí%ì·’mÖq˜næˆ$,9¬Âó!Ù%“$\18f:7ºð¼è´Ó>\3ÞôãÛ½þ^q™ßõ\13Ê/5ãV’ԡزSrTé„ÚÄ\20ýÜ\13KI¤jÕé+\"Im\29O‰õOÄȤ7¯[êP@O>Ì.kø''NV\30\25yRTÂI\9¯h€HÝç].]º«NÜ$”îû\"€*;%T\3ºfv~\12iÇy<‘ì2\11ÉPhåò1‡Ç£\0S\13¢nê|Ý4ÐižBA¬“¨\127j¸±\18ϧìHÝJÆ&\15¥Îƒ‘BYã@J~\21àt6\31œ‚Ý)\5Ÿ\19Q“Ó-Ô”‹D²j9\3–¸æ\9s\13mk4\11ӨɬKúôÄ\20âÑ6\9íÿMt¦Ç1_Ÿ¬¨†në_=ájl¬ 4µ°¡£\25…<¯,3Ý\13è\16]\9Xò°²„Fæ›ÆVCÙë.¢)ÑŸ{eÐ#êšú\7ö?Sï] 7gsB\22HƒÝsÀ$Ìg¥ÞÚ=R\31ôÏÔ›}6Á¡¯ò-\13É48\15­\"‡\2Îe,\9¬êú¡ä¥=\13ºÖï€f‹$>\24\6\22þP¨ãá#µ{K\1xmKÁþY‘’ùc‚‹\2k\15ðà¥÷aÞIÁ £Ìl\25&\13qô˜'H\0¦-û\24‘Aˆ÷™Rîd|Jt&ì-\9\14\18E§e&ñ£Ã\28e\26Njø01å‡)\4\6•;Û‰\23ØÝ@õ\25–¾Æ­ú•\22r²y²4œ]*\6\24)ÏÕ\29\ +¿´¤xT\17´\14¶ÄÃ\3 Ÿ\31ö”œ\23\16ˆ9‡•æ¹‡—•2`º4…q-5-»c¹PÓëP¬§ÿ\25©\\\9T\5¹QÌ?\27\15K…ºA×çB.æfÔ¼òΡr߯,1£Å\ +dåó2•\26\24\\R™²pÛT'J\23övNœCã™5Š\0224\12T}S…3â ¸Š\31Ûùz³GÝ¢¶½\22§ã8\2´¤àò@\28÷n\20\3g‡\22\12î\25Z\13®ò\24/Äë–és«\26ávM#¬©öqì–#‚åöuNÊŽPŒ>ýB/8q^ð£oÝ\2%RŠ\20\1QA~4lcH‹]D‰óâ´9\ +Ù~\5È´œÏ\19\18ïi¬à4’¯â¸ÄÜP\26K÷\2â¸T\9¯Ú3ŽV©go\14°E(sW‰\27ž¤\8G¿š\28õÀOÎca\27F)l\25½ügò÷\\\"Óx\30\23ûxk&nÅ\7ß\18ˆ\19„\29B^4~ã=\\®¶Å‘i\31V\18v§Õ'Lù«ÜÕ\30Ö˸ð\21·a\7\21½Ã\6»w\7Ž\7ÅÅçá®]\31¥Xã+Š•F\12VC£F y¡?*¿#)\2ª\1\29p\25J\31m­»\26q+e´¹È\31¥F\9í\26iÜ}uÐx7{M†'\127­\31%x3yèï\2-zIŽ\15,ȹi²ØIš\26Ïõ‰‡í4aÀ\12ùjF\\‡!=\12ÓwD±)ꫬ¶M\8.ÔÀ*\2Uꤻîh€6\7CÖ÷Ô€FL7ât˽…#\0090ÒÌDöº<Ëm\9š‘Œ2èëÓÄ\0254}\8u\9£9rOƒóNê\6Ìä°y\7\25N÷ÐäÂOl¿\31K9©™Ðs‘l('BAA§\0166t\31r­W¶\15\23ñpýä[ìª\22“V/1¨_L“\13‡)¬\\êìPRh8¾\25›\9üjÒHñn`¿V\19’c‘¨#çÉ‚«DbÞyÚP¤`È>­‘w©œ\31â8\2¹ò„™&äW\13®Þt&„GÝaꃀd\31\27Ê´°iMËä¡à8™ªåÉ=iZfšÏSªbNp(Ú\"\31-_¶bwúƒº.§\127É]á#OSûÊ·œI3¿1Àx\19É\13[è\20‡Ì„¢E\\ÏH\8\17š{l{4ŠªæÃ:\24ý'F\25ÑN\2xndÿš°\16G»Qb•Ì´¥<öéB&\28?\21FiÔÄ\1ÁövâéBîÑ]n\6´\12‹3\9c0|§ûm­ë\6ÌâhñK¶\8“Êg\11bpÆö`ªÑA2:•ól§KÄ©\7¯ØVæ¿ÚV=}î<[z\27{.*ëòä\14U>\25ðó³;\13\6#™\"ùžÖÝž(\127ƒ\31Ž\3\0.±6@rÂ7ù°\0žÉ\29\18œc©yjå>cª—Õ\19ñiÞ«ôDí‰\\cŸÏW9}a#‘¶½®\0257w\23\7‘e\9§ŒÁ\24€\20ÈH§§°\8n\11Ï\\p\20h–• ט\19Pkž5¼[écNE½ËÅñ\9#Ã\25µ3ub\23+±\11ŠR‡3wv?ÃäFw(î¬æÌ\5\6ÊÒ|¦§¯6bÛôûL¿Ï•/Óa\30Å ûco+3b_\9\30+6Fe½!¶¦?Í\6…¢žY‘s\\zgëœ7PÔü»;\19ÅÊTeolj³nX€›gꬫ\21_ßl\19=™Î\0020jOÝPaOÒ³}ö¨ô\0304\127öÓ3ʲ廽kÊE¤\8ÀŽ\0226S\18¯`þ\0=ªo\15úœƒ\2àÛå\0288\7j0|Ëœã–9w¶µàø9¢7\3£[æüS\26›ë\13H&³5ˆ\2«¸(æ™\31ð’¸\30i?Å\12¯º£ç(\5ðE¿_i_ÿ&h\0082Ê\\\26%Ÿµc\127kºÎ¾ÐV\31Nj\0073\7mÈ…®,`~W¬uS\16\12Š\0 \24Ô\\\17‡H‰`ãëÒ<œíf˜¬Û%¼\7£6ìQ\23v\7}ïqjf\16ô9™Ò f¨ë¶ÇÑ€¡\23B\6ÿg.h•ù\127L·ä¹\4'¶b|§Â‡8ˆüh\ +Ä/\28AÞ\7,ciö´ÂzjšÃà$\01652ÀűÄ\5Ò ô ™sP¢ÂŽŒ1|?ˆÐYaX¦ä\15Ý^kVl\28–ó¬Ó;\127Á–>P…ùŽEàâ¬SéLR\20\17ý\5\25\31ü}žm©\24m)¿üÛè'0köpÌVÆ Ød\6”³EV{éê€ÚÍgì˜P\30mEC¢:×(ž®.Lˆed®7¨ä\5¬Ìg!\4˜A…ð\19\5_!c\17Õ\29a›¥\0OÐ@Öæ\"ê”­ZÉh²­×`e\21=~‚\15'~x¥\"|C\17£3¿\24=žáq¢\5y4q¶ua\18ƒaÏ:e\0310`§\21}ëŽçÊß\3\18æp\4/\19\12„|Éá*\16äÅ™\0291—\23ýåÁ\ +{\29÷šœéË\19&ûB<_‚…ˆâ7_:æ|µˆiA6^‘æŠiNa“\1äåËØlÎœ)rózª-ðCGîd~Î^\21èoUOë°Ñ0Á\15£ÿŸ]z¦p}ß\24«/[ƒ*î4û\5E±V\22b¯vRÒ\0273ž±’I2Ô É][.¼ptú«Êº|T0UÚk¨\15óÕçöÜÀò¥!Ù\17•>!/¡d‡7w™ìn\4¹;\27X<\18tTFÅ,\30%¤°\5-jéÌ\13b¹Ü”-olì¯'\12\7¤¥\2=Å\4Ž\2˜vˤäæÞÕßÆ[Í\2\7vZÂ8°áªš/5â}l‡‹¶¦Ò\17àî\11\ +¨•#\3x2Ú\20L¹ésΚ\1\23I\20À×Üå‡æ·dO£åäŒ?‚TªÁV:µ‡²zq´½Zów¥ò\5\2¡»H{Uûm\11'^jÏ\11\25¹žÿå‹\21Tz_´ï¥O99^\2(÷ÒŸš§@A¡\22\5J¯ók%ÀL ‚ù\13ƒ\3èöWƒjkeÊy‰0Zz¡¿6½ÖÝ|¸éų•\\Q\0°t2e1¾ü\24¥À\15ë_²¡œôªáŸP#ßÂ\24‹—É=`fd~]ãf³ZƒgBäP_¬mÍ!j\5l\9J9©ù\27ÇäZ³'è\18\3\22SxÖ­h\27j\13\25\20&¸¡7@)V–-Ï\28iM%i’ÙÁR˜\1Iaí#­^GÄ^q«H\18w굊kÿʾv°1oAº]Ñ\7‘>pÖÒx$`\ +à”Wãt\21\2‹É†.t\31Y’ÿ“¬ò¼Z±¤\ +¹­\13¶QÒjLz\11ÚÊ=oõ\26^·\4W.\25ì|¿—ê\18Ñ\19·ñ†\26¤\"[\9Hè󯙿l¸eö“ŒÀÌ5\28Eýªq\14›½9¼¼\16ÀP«=¶ÝoÚ”ò7÷è\\£Š,åß÷Št&ÍçßÜëчŽòSß\3\5HÕUFHÎùÜyU™÷6å\24Êlí߶I\29ÅÀf\4ù\30Ë9I7‹\14ä\"—V8˜ÌˆË\23^\5Ù—MxpÁ’ƒñÖ ²sݽ‹…Úõôs9£âÿ’2£’ï6¥Tðç  »—p¾ÿý\7V’”ƒC\\9¥É틉ÃÑ£zææÁŠA¹ß/Ú\5»­:$Â;ØóÐú…’ƒtÁž¶>4ú\5\22†Ï\22×Ic’v†DT:\15jÊ\1E\19Çφ‚º0fö•ÉáÚñ¨Å¸€1a û\21€ºù\5Ž/V·Èt\ +\21Çù óp\0075Q \27Þÿ„ÕDõ8·•l¾\0«™»•\30†š”m“=åçRypž»û8úG\27å)et—áÞt¶äåI%…\127P\17\27\20\19À®³@£]F½\0305…’“OÔ½|±geíŠ~ŒÊõ‹É‘•ÚµÿŒ*;Nöñ¯\20jþÃq¬™lþ3’@u!% /(X¶áu>ñÐVùÀwÍ\24ñpb\31;ð\0kmäP8”¬ßç\3Á\ +¤F·½“5˜`Ô¤Ú\"‘«m\29—¤ñt†¢\ +ö÷Ã\21œà!ž#5'¿áçëõãgÝXs=\11Y\127oHô¨àáf–ýL„Õ?×\18+3\9¶Ó\4\15naXOAÕÙ,öd OjóbˇùÁƒ?B?SPãl(0Y\3\18ª£¼¤„ÊŒþL›Æà$\\=¬îHi¼³†å\\ùÌ\12‡×'œÒ°ûn!ø&¸9úä^ßÎÊeã…†^Û\18væ9\16\15\28õ@T¿¤\15rtFÌ\23÷E\16¯FX\12h\0030\22À\20à\17€©K.¾Ãn(¼'3\15¥òÙñ\29¥]êµ5\15Û\ +â\15c*@\21?FjÓæ«\29Õ§©ð¤%ë.Ka~@AÄn\11¸dÍŸ\11ïC›ëiäF'0Ï1h:Ÿ\2PV…¬~‚ó\27T\23mðL+‹8%¬¢C\29Û|ÈÀ^ÆF¿!\5!P{Ùnì:\2?„S¥\29¹ERæ\23ÜÅŒ(\22\3ÂG\5>ô!Bç9à˱hx·$*­ÚlÛÙᨄ'À{é\22:¬\8ç¾\29ªÝ\1\15âN‹3\\z\26âªd=ïÖ­8\26\6r´¡*•$o÷,\13ÙÙ\0057í\29'O•\127è¿\127„¸Áx°ó†¹Ÿ¹rÕþTÏîϲ‡¹[1OÐÔÝ„œ\29E·©ÞŸ¾hîO\19\20øÃ\"D={Ú/ñy¥c›à\7¡#\25µßï\0206’‰®\28Éq\11q(\17\18@ƒ “Âõûø¨ëv8˜¬ÿ\20«#ØUÈŠ[؃¡Fw\16ÃÕ…¹þ4“çKç×|…7n\9Ï\0264{š*Nä\7ØLGÞl’ú>ž_Ô\17™f‡zd -:qcÃ\23\15Û}.Éj¦SçÂË^´\7÷\5Ü\1OïÒ>Ô†¨ó\23\127ó¢\28\20Ÿ=~”áboˆ\12'Â@t\30(Ý[\3ÑeB\7²²Ÿ˜*BÀ„ùÙG–S\3ÕGpª©4;\26BÒªF½×ªÕ屫Û÷WCNõ«ÂÀ‰>f\20v\11á\23\29¬W­dª!PG¥u{Rn\4붩Ž²lÝc\23ÒM6‡kYssR\12gšP“Ç[óâm1ÃÇûüRÇýýyf\0306÷ch&?Cžsô§ñyyÖÌ>=þþóË®“þS}—ö±Øä²b\4Øž\4R\4Ž\0178ß>p\8HU­¯SnÁ™ð÷ŸëøpÜ„'\14\4yÀð\\–n:˜\25ô4—÷å\12Ö@<øÃæ#\8\6fÿ\22\17Þ?ä‡À@‰>å•\127ŽüóvvvQŸšËµ¿r›ÿþ\0035Ú°ú©ñæÚ€ü»²''·Á3\18ÑúY0Cðj”écËö-Ìð7Ñ´?\21‘'(9ç±m¿*9¿pKhÜcÑjàǶІPß„§Ñ$×¾rú^\1ßoâ…¼)þ†0ûØÚüÐˆÅ—é— ÄêDÒÛ ßM{\23EélS)Z¢D¼¿4á5É•SO\14=V+œ¬_ä½beyD\25)Q›Ë2\19ݨí{¬„\22&Å\12g§ºS\3Dc\12ž¤}‰eÄÇ54¸\5˜\2dƒ\20qšæ:«t³xP½‘r\11!†#FÜž\1¨,¤æ-¼|>±\1U@\2'!bÆܬŠgl¢”óX·Óe¬HÒŒ:T\9\30ßöê$\31Ã.\0135ž¦’l±\28 öM’@\19\24‘aÄxÛèÚ˜º2†ä¢ªûüt÷óé¶=ËW‘\2®_àÎw3%]8p”r¾á›ë›‡¾\14{ë$òhb}@ÿËd\15ª¡Æи:š5|„ŠºÐŠÙÝZsò·%Fø¼gpSP’†¾ØXÉc\27T\15Aï•öwîGS2\"\"M\1×\20¹iµ4­õ´¡µAUa5Œ×\22Å(\ +1·Èulßj÷I§Ãyü­mâ·æÇïnÆ+<{¬n5\19€]F·%µ CèC\30&DçÍ­ûÞé—N_£“uìмÏΡmÇ`ÒH¡§õ—J\3nN#ìM>O¨\14å³\6\12ageÑðc&ÜJ{àˆ‚ú\17\0š8ZSë\24šZÇN=‰;ÚU\23pS“Û@&%èà‹^+\16¾óÑj\\iÌ@È 7Tj©ŠÃÔÞîÔí®\17êî\26\26}x\127:‘8Õ!'è\28JCgw°–isBªEi„ò(©ÓS×ñL\18\20µŽf^WѺo‘~róØƺb*5®0\7´¤f¯Ì\1é%D`ØWü‡b§¦Kä­K™É\15µÿòÅTðòVŽÁ\20ÏŽíó\27ÞÕî\21~:·¶:W{\15¾J—îbxõfBWǦa\0148?\\<ªìì§,áJèº\28­öաΫšìYzWk9~…WØûh\13n¸,g§ÍS€W\0·\22y]\18\29í\21xvàVx¦L»'Õ´;·ëΪE¦cì\\¡b_#UÔäp\7{–Xƒìh\13²ãG\30ÀýVM¥¶\13‚Ð\20N’'9q\13…·7\18Íl†Ý\\·T\19>¢ˆÛ6ºaŽI\\Yn`Ž\127\28ãÔsFæ}äZj_,—¹\27\3L\1ž.Â\26F+‰\5‡í]\18»[\13[\24#8»\127¹­Ž\22F\11/Pᔋk\8c1ƒ¯\11\4^xCH»ÙWYƒ“fðŒ\2?jõvɱ~\22\22ËB±ÅâA\127ÑHä7¯€–%ùJ\30„\23ƒ¶b’ÈœÎî0Pб‹û0$‰vw$ZaG_€qQ^aoò0…F¥Ñjbiàê¾_‘šu,G*/·Ÿ)싳¡‡aµUÅ\17ªý'—ü\3·\15üÔÁC¹B:\ +趬°VðœYòt]QZŽ¡{6ü[DG#áØrð<¤Ÿ?OŒ&Ø\24r\27\23\"\16°úüi#)Ï?\20øtñ‹ûw]&\12\26®HÁ|NÖ2=&ËÂWcz\ +YÄ·€\ +Á#+ß|…ce„Ût\29)x\6\23Uu\28ÊUxL\30\8e­Mþ$—Bç¡Šïfwnšaßqš\25Ÿ®\0040\19ýüs“°zK‰g®\18ú ª\11Þ\0ß›Mˆâ§úóÇ`\14Eï\14ÕŒJ\11c—TùF.Ó3‘\29·‡1Y\12X\1á{!H¯ªÒ¡1L•×\\Z)\13j\16ò(ãbîL\26\31\24S,¤\2Î\25ï”*Ý+75ÆÚaŽ\28yvd7Kú¨æ|K¶6\0140;´¾AÉà˜\14Ý\5ÆÇYã{ypÔps\27«Ç§¾\21Ôw‡7›ÃéÊŠ¦£\27˜Å–86Sô5ô‰\17±\3\18°õ—Ü\12Fo\12r\9¯!ˆ©Øåf×­=™K\13bŽÔÕs\"OÌW@^~zl\23g-¼B\9¹4–ãB\31)‚K\19>*Z\24\19\31«\23Aœ\0ce\20n,S¶\12W\4z˜ý°o@I«¶aû(i].Aé\30[i^i¤d´Ê\4ƒ?L`*3æš\25®ÚÝQÁ\11¦¯rébÊÌÝiD\31¾R¢N\01609\5\23$e1¹Öyr­P‹Orî\6chè\19\20žK\11²/Î:(P:íwL´ò)Í\3¸»30­Î\ +@\22kD\22\11ù´…\14\127ëº?¾'&sõöнc±Ð¦Ë\8µs\28A¶\12œZ脦•>©Ç\9,a\ +\28ij\7P§‰ÚMíÕÄ4M\14HM:¸L\7Ó´ÐfH(\27äÔ}¿5›ŠQkëvÃï7È€ìÐlbc\8\11éS_\2•”}óÌ}O—…©3N\23bV£•+Rz—›\7 Ê\4ctñ\21^—®XL:Ž\\\ +/㔸cMÉ\8\15\19î4¢¦ùìz!‹­–q!<)òå$êŸi™›ö\2\9hòYÂé©\25¾àâètÄц:Z}²¶mô'“!&ÈÇé;ßRÎ^\19r*Ï8]Ôxí\27QUÆ•Áz•q‘ž\5¢g´ÖzB7›ü¶N ÍTóù©ÝQcóüûgâB~š©¿¥Dƹ­,Â<êúô]°}ÌP\25ªÑÚ’ÇBä14!sÕóI¾lj U†õ\127œa£­\24ÉÔÚ·³yi\6\\ÌPV‚$\\P£%P\25ýǪ¨î`XìÀ£õ>¿Y@°Êë¿ZÆD‡¡ùý…<$c°pFàa“X\26\22ÐrþÖ½o)L^\22 (\8.è1\30C±P!O\7Yaáà8\11PÆRP\5Ú¿ÜÑîd7Û=ø|ð¥k\\†+\22Ç\5é¦\5ŵxòVÛ§%¿ÔƒñÕEÓbI¥ù.UGo\20ËÉ\4ÐE3o±ž\28}3þ€o\3úy©MX^¼-/Úÿ4C\11A:j\25MÎXF°ë\5²ò2}·\21àrkZ\2é¯Å\26&\12ÈÃ6FíÛ\2ì´xrÊ\ +\5âòTLÝÅÄe2ÆË\21z\9K[úßöö™º²<ù3E”ïE\2ù;B±\21:ò®G(§ÓòD\12N ‚8¸X!\30qU\0007\12\0193jêL\22³;\5[|rkðÖeFÇ\1BBúËÌZ$p´¶5Ãíó-$êåäyà\22ð΂\11»œÂ^Ñ\7\\ä\13^\6W7áEíÀ£¸e,/Ê…ïZ\9V×h5•{ü<\30Ž~5,=‡¬˜„Ô5ëoYc65:4®\20î›übÓkšo«Ç„\8Á\\×\17‚/ß½Ú\0\29X‹ÕYöCáÙ²Î\14©>!Û'\5]ÀËdv0†þ\0086â¶ðü]º\16ÏÏu\127:wv¥\5Tb‰»\18ï—·\0^#¹‹ù\1ÿÝ\24›ãrå\4]0Mʚ˞ÔÙ}\7žK†ž˜î*c–ú\2Ã'\31ëÎj¦EB*/\0026\17T\"ð½ûqC‰‘\ +ÝPR&oL-·\18Eª¤8“Á–+\7Bß\5 Q\15cJmžÆ `˾±1ìk\2\15ب\3\127m¸k.\7´èå@Kµ|h\12”û‡\13Tµ_µ¦m%ä„°4düµ\5…^‘Ä“;ënÖ@™Áü¨Ã1o1\26k°Šn킺R¢/Q~ôK\8`\6d´RîÓÈ#‚Òw7v¦•\29WxICrRḄÇÇ›@ÅË’Ÿ\23Ö\15cËh¬‘·†–À˳aÞ\23\31ÊQI¾XœæÅ«™@6ðóTa\"Ñ\8\13èÊ!¤? Ð¦\5\17¹q^¯\0092\29\20!mëÆJåp9\23´f¿ÑgêŠx>ÀèÊ\26æ°ÕJHE+wDµ\18¶\14ÊCú\0064qMš':N­ô]¾l\26×\25”W]’0K0®>ÎA±Ö…çZ€n´`]<𕀕бQ-àüàD\22˜\8\12J`\9yI¾×u6x•\14ã8[u\30¸­š{ó\21X¨¼u/\18EíÏ\2·b¬û¶UÉq(p\29­ý{\\ß(\25\20ðÕp}kK¡îoš©óØt³Ð\7Ž¿ð£\2×wXA\29œ\21p´\15~uàõi\0GÇ\8“¨Ð<Ö©îž ô\25¥M\2˜?t\20É\16R\0Ó\13q\4B¹e4™·,áÑ…ïà\22RPŽ¿É‹%/Wßf30Ê\19÷\27‹\21\0˜Z¹ó#¤ UÒ£\20`í©\3ú˜Iä\7+öˆ6\0Ç n¿ŽfÛG)å+|Ù>+×áÏ\5Aµ.R€‡å³\28?š¼FÄfJ¾L\\\0233TlóêcÈ\22¨¦\26dä\23#pv'x²59‘2¡O\9Ä\26V9\30gGd\16¾Ý-\8!ŒØB$!:$„CÔM{6è\22Ï`t¯\5\4ä»ìþz§n͸“â2î›\19ðÄŠ\\À\24L»$Áüª\25n°pìj\8V¼eˆïlKñÇ\20Çlmœc6j¬¤ÏÄ€û„TÜìÜ.FNô¬ÖÒâñ­–šË\30jÀqŸß\30NVÇ\25±Ñb\3Ü\22ho.^l%(ývO¾¸Üå\23ÇG\5nù£\5\\_q\18^òêOßn\3ÁŸÁ\11fö\3\11¿d\0ÀæZ\30\14®Mö@ó73>[/„23)³´\22\21E\22]¤ù\16ær<Ý…@Ÿ*±=ìRÉ­6y=sÀñ­\25òç\15\\\29Á\25kEیԶGMwW{{³¡XúOýÇý\31FjµÊw è¢ãÕ\21gÄ?\4F5\9._Fñ0\21\26Ý.É_ùÝ0Wÿ’cÎ~\0\ +\6\21†Ä˜áw%\11M4_–LmUëÿþ\1©\22`Êÿý³–\6Øz\25O”`J\5öu\16™É{üfa=ãå\12½Ò›C¹`9˜òµ—\27\ +ÓÈ\17<;5d®ÍúÕÆ`Wsäº~ ·æ\13öfÊ6}D\7÷¦,\16ܸ\7 ·•öP¨\31mÑñÁ˜ÖØWÆ\3o\5\28•¥7“­cÇÄþ\26\21ë¼µ\5L \14¤òei랺Šn\13&\ +ÇõeÕÐ$\18l-cz\"þ4¬hÛ]V§ ˆ÷—sÏQ\3pÍíCŠß>¶T\29\0’€afÔ÷PÉ-\18\30Îö3ߪMçœ\26•¢Ï¼w(L³YtIõƒ`‡·>‚ûà¼A\ +Ù\26h\29–\26P¦ˆUª.Ë`*\2jå¹X#L@ä³Çc¢Äu[¼9\11ćë͵@U‹rÓÙÊ\27¤þ°Ø6¯Ü­XM\22ÛNNãCqæE„Q\11Ì0°î1#MÕ+¿õo\13b`Že}lµé\11J3FÙˆ\25Àí:RÈÿ{¥P²eôW¯Hõr±¥±º-\7ƽ…]i\0073¯·\2²Â¶_ƒÀµí.~îBñĸ]ÞÔ»ysvl\31Žñ\17\29÷M€KL\18oÛ\9QV}j)˜q‡ê¿_Yý{ùÍw{ñÍ{Ã~#„Wõq·ºòq7;ª½`A{Ïß¾\0Ïf\3®vø®Ç½aùî‰\27ÜÞ@EWô$\\\12\23ü\27\13¥·\0új\15r\31>õç‡î¹ÇÅ*t™\18˜œ0×í\0194€}rüÓõ~vº¬¸¢\24²—ÿ\12\5v7o\13\27ÑŽ~„Ú4t8íFÔÔ{ÿØg\23¶îÅ¿—;à…°ô\8Ÿ\29aÞ\16÷Ë*L°š±ÙWôÐò\25ûùŽ:e€\15[@Aí¬3MÀ\30Ý\21èuG““ÿðFç\28œ¨<\ +H»;¸äÎ\"\12)Jý¯ZÎþÝÓ¸ŽJiÚy`PÉHÒ\26\\i§\\íuˆ9fý+U°OüÝXÆ^£0]c»Á®´ïšW{P˜\14]ý\14ì\19h†\29-ýQ¡\27«Ðrm\23•C6B@ÅÇ£+n|„’\30¹—¡a×;L2Ò–ž úëôeYÈå“Óhž8y±\16Žx;Zë*ùW'ZgÓˆÌ=\"G;„Ðãô¢#ÁÍ¢\3‚²ÐLhè¥w$\24µ\ +A\6ó@øRß\29È/)¯\4W‰Én‡ÎðZ\127Á£\14ÎîóaB\9\"\24Še¥5ÿ‚¤û/­Ü7óü]›lñÖ<\127÷l´ç\17üôí‰{\30‹7\27Ú[ˆ÷ˆ\4Ôø>¿Nľ*†\21ÂÞÛ\27mÇŸX’Ö\27«\28é¿0/À\127\0116ï\2Y\31ù™KúÞ\8Û;Ãa/`v\28•OXPÜß\27ºÛÇÓßÿl<À…)çÜ®ötxJ$û\8D«ÈhQ¸Ñl”|öÏŒçÿÕ^')Ç–¶\8^åþ¡\23!>6ïêßÍþ ¿\8)Ó\127È\17B±Iø\7ùöñcº`´òW@¬'aÜ°ˆè:\6Q\14ݯcè~\0218oºùT:¨ëíëÄÁ{žÚökjOº%MXOŸÚú{\"ê[wá©\16˜\29óÆÕ\30?Õ­î/\19¼O\7\13òܼÝ+«ÛÓîJ\24R\31ʧ#\4– ¹0œ\8¼\8ꕦè'\2\6ò\24teAî`&÷ó°\\\8C—\5¾ÓjŸ.\8@n¥ÊáÞâB–šlNa\ +\22Ge($x:sÓÕ·\26èËÔ\8åР\26*\14\6G—,\12\4‡Ã¹v.¼™Â¶3Ð;;a\26Á=£SÉÄÞ\28€ÏŠçò5]Ú\11\20 \9äE_µóù7nºRÃŽ\15g6cõ\28¬#¼$D-ggƒæ\20\127Y¢A%üë3\2hõŒ\22?C6¢HPa\27Ž\28Ü\8­Å)X\17'˜\0165ÿÛȉ´¶G=µÖæ@ÒÃID+I_¿\\íæEêË‹ÿ¯Ë«Yéý×\11B² \23Hƒn\14XjûTÓ5]ku)L†Êný¦;l\25žÀŽo®ðŒL­IHjÿÊ\"A³\25êö\0005=P¯\13÷pt3)š592µ«\9\21ô0,‡\26Vw\14¬†.{u~d\19¯c6Ô莭3CÁâšÚ\12\21B“AË];\24|\13ú*?\29ÿtÏpœLm\24ù\ +øüÀ- ƒTƒXòåFÕ8r4ÿ6j»ù\25\15\22¯vg\24¸#ȃö\2û<8! ˆò6z»F\3„2ÛžÎÅ\9šÕM\8=Þ人\19\16¼H9þ¬.n«)fo¼\4vúxï<%v/×}u¬Ûnƒ\21¤²K•‘\3œ\12i”ÑcE˜>§\22\28™>b“Öêý\23\21}W\ +GiðômeÐ\2åˆkÑöé£Ix°ûôÝ¡²VÀ*(¦ò[—»©þÞUÄ·v4RAbžN\26œò7\4ÙIµo MN7Öe\0uúõÆ“étSGUË\7¾5\1¼¤¬d\127\ +£ö\0\13^\1›ÖÔXßþ$W}3Y]ˆªŠâÞôÖT8Ý>ö¦ùƒ¢¸Îÿj+\4ä³|wjôõö÷¿ŠFð^\0TyºÜ*4Ž¨èe\"=x¯*¾\28¬Å\27ê\9AÀ\24Ä›\5&•—ûãÖ¬èÔ#Ôõ¼ÜÌæ?ÝØŸUùõíÿÚ†¦\27ʺ°úB\3\0S\0z\8å\27xØõn~b›n\0319[ýAÌËá`v$»>)1—ì¦hþU›n5\23Êé†Þ.Üê\22€B¬è÷Ä!Œ‘ù©kt}›:T\21N݉™Óµ`\21:‡hY‡T®RjT:aµ*¢«¿\23³\0)ðï\127øêwIŠ›ƒnLéîVò~\1z\26\31rœÔø^äè¡\20ÒÃ;ÙÄ[vûòçºK^×\7\18ÒaÝ\13V9¾ÞãÛ\31EöÅÍÓM÷;Å#³5u=mêÛ°,=Yu1Š\15Ù\6:\31H\02949\0037¬W]N°˜òíÛ\17Ê›Nn]£\1&„\9u:Öœ¨ŽÄxÈ\28>–+^Ï…ø“?1K@ö\15àñ\5[ÃhøàŠb$\26\4ˆÓ®Ã\4«\19.®\"7r9\27\0209ìv£©w¾EE”Ás\0138\7Ø\3\28|]¤\27uHº²LuÇ\19ádË/,ñîóüã÷ÉÕñè²C\7d}sôà1J-ËG@Ä—÷*Ò±\17\8^fÏ$ÎéÎoñú_£}\0306×fsÞîh·stwð²Tãá#d\5ôé˜\28ÿ „gh<ê\19€S¡Å\31Ïb—\17,º\0173Éò±»wqOõŸÒ\6\17õÏ\29§›#‰bƌыèòB¬HÛÓÙR’ÊÍl¸S…º\1;¯©DyvNÙåA\26ÐXMžˆ0Oúp2Ï\28s®Næ\\µ«ó—à'FƒõÔ…0ƒ‚x¾5X‘é *Ošù¼D3Á>ºçõåùö4—½ªñdLž%‡? 2>ƒE:ÒlîÀ§§ÀÓÕy~âÕä©óË•g\24¶håÞ.Ômf7æ;³­L˜FÔ\4$\21êÇpøMf€5зB‰\7*‡ž6ª5\15WÆ\28ù;’$gŸÛÒI°\15éàì\29\4`9\"¼>ærkŸ\1Õ…#’ªÏ1hs\12Ú¸,ž\19ÞG^žLèzUǾ®ŸÚwçWñpÈ\20-F£’2yy®¬ Á†\20x^m‹r²¼ûté¼ì¬•“çø.Rv}€\1p]/Fwãäéüöm_LÅàdÌÉjp|]yØÿÅò‡’óÑ\22Óm‹é¶E\23oÑÅpá9n\30f›qðú+Ry£Ä¡3Drv2—ëd.×É\\®|ç™\13/Erý˜!\27Rµü¿¸~õft`«½\6·ísøl»›â]nã8¶ü/e«É—©ûtÎîe)üT‡é³-•ûæãá Cj4\\\24LÎôˆ]åˆ\22\31±®à)¬\1«gJ‰v‰\9®Y>=ªÙ¹1ÅŽ“Géð²8¼tþωÿ1Ã=ÁÿþWWl¹«æÃßÿnL‹¿ÿCøÿ‚\27è>Röì\17½y᧪\7y©zuU_ôücöV}ÍáÖW\15uOÓ?8NûbÞÁ\12{p®²7ò­¤õÀÅwøŒ°•in@\19¿\21®SI®m±Lgã\18:V§6@ç0å7t…“‡Ìzüa\12VN.€a1\127\3z„Â{./ƒÖ\17‰*ÕcÀ ‚\0,l\19VÐq\9(\22mvÃr¾\17¹Œv)\17ŽÔ\9»ÉT\127EO\20Þ|V\6®U“uÐ3‡0\7\11b\29¶B\5÷r\"íîì¹™œ\6ÆÂJ;'8N~Ùó\0074b°Aš\15„\1~:\9\17¿‡UOÁƒ§Û©º?4hji:)-6b\0g6ll·d‡áÂ]\"—\29\19˜ìó]?}¿¹M'k϶Yû›Á†xJ0Ò­Ø\8ƒ\0026\5\23s¨£\6\"¡ULFÐRÏ\0291a\14»\0'Ð'<\1\ +F\7/Ù\13W_T“°ô\28ÑæŠ\2sª,ŸãèE”÷\127@™\2N\31ï\11\24J\\ý‡NNg\12N‰ì*cMé:ºTäç\28ع\21PDíËœùJD&0\18Ò™ SWyIÂ\3”S‚.$#CÖ‰6¡,\\.:!¾\127þ¸Ÿž.\15-x\2õ³åÄ©ÓÓ½\28HMzR¿ç\11jHz6vÁ\9’Q\15øÑÚšVÎõo\\”ÎUS‰\"^GÞH\26öŧпëOY+©˜»šIPƒ0‚sObõæ\0038TÎ\23ÓÏó/úz\30|¤¦ùáLé€nÜÌc‚s™\29\ +©Fc;ÏàåðmCcAMª[\12Í…j\"@5Y\23©U“›V\0¾{žàѦn&ÜLÉÖ\30\4fd¥\2\18_¡Sé\12\31ž~Ø¡˜By#ã´FçZºfJ¦RØp¸@F™¿>3~‘œc®\0034›§‚\21‹ñE\14:\16\1‰\14É›\7¾F}˜½fö$Ë­Ø)\31}boÇ|èª\19J´º\0^8[*Ú€·\0C€)\0KÒª­ÂK3aŸQ/nžˆÐ=’uì\19Á\3è„~&ovþãz\127D#§\20š&øî­:1v\22µŸB!’ :Œ\0ÉËû€ÙýtM!\6ý\12ò°q6\0156Î\7Rô\19òñÎâ\31„\17&H\1\8¼NÕèÕƒ¬5äØi,iq=Vßüç\2ã\1´UŒ©\ +)N¸îiwH%‰u{$Î\20„ËØp¯™bÓhÆh´ía\24@)\25Í\0205Yìi÷§~SŸÂèÅ\20F/&6–‰\7*N/\24O‰Á$!ÕC²ZP;ï4.Øô\1@r<£Øp2ÿ‘@e†åÉì\27\20¨ÃGu\29­”|\26Ã@Ÿà\5ßu÷æ3‚8LP¿é'h]SË.1Õ­6ëé\0123ï4™¤\179\1»ýä×g\5VÈv£ïŸ\25b²*_4XRW(2ÍÓä£\6\11\13/\3\6\ +ª\27»¸R\30“C\15ByÅ™ ˜PP绋I_ÿ\31Sï’ä:ÎdëN%Ff\18_Òp¤-†‚A\2`\1$ó(ç³\127³êžSìÇÄîú\22´ËnCp\1Äûáp8\28îp4ÀŠÍâé‡\29\6\"Â\0210ÌNñU\19B?\3Ààm}FŠe\1P9šõf\22\27gÍÉ9\5r¨OÜU‘÷\17¨žÜˆ/X£G\14«:á°}â…¬\3îNÆ\17ɹ[ôoñéãNO‚7!•é&^†eWØ”±w†¿\28y«5ß\28ùÏú6mø\31W²\22‚e}\31LÑ!´C\11E\14Âö\19uF\3†']ˆv܅͸U‚«7]¤š)ÊB5K5\6±,•ßp]¬\20Y½\11nÕ7*‰…\30÷\4HIk‘Ìg2ïè±°[÷4\127\6w‰þ˜\17b‘ÓeI÷Ž2“Ëj¸7WX\7\14\\*gpIçp\2\4ã\24\27°ãÛº’îä-æ\24Ô«‰ÙŠ\16ë²äÛ@\31øaú²”[ÏΉßãX\24?àÌt,½·xÌFЄÎ\15õð¢¾\9£\18:äËëÍ\6[y£Ó\24C/Å:—\23¿÷X–\13\3n\26•½ìx\4hÓÀ‘fAå¬>\15<Ñ¥½<ÑU½w\26Ú-»;\30…‘Š\25}ýN’Õóï´{Z³WŸm»A}}P&Â)\2Õ²£Ò×ú`í›Þ3kä\29~x%Y5©#¼X|\31C\18ñ…åÔú¡'G,@xµÙŒ·\ +Uõ:$—Р·ai1ë€ë\27\5\5ª÷Pv¦ Æ\8¯µ\17\7\27ÛXÓ‡­op5\0178‘\27 “!Èi‰¼ÙìÂTiúP' \0029ÐV“\14\0$áü±±§å\16&{Hͤ\11ˆVÈ%°ôß„\"ŠªTŠ\28Þ\24.4ôaø³\29·ÛÈ¥\15–8X‚{A^º\2«þ\19>ü¦²-\127Ü+ðϣΟ\11·RKœZµxˆš1\9„È3:ÂQÁ?°bÅ#¾…5¯©\127g(Òýz\127±ÅÔ?ù¯÷'RaÿSî\3Ú ùÎ àÔk\9ú-\0±à÷õOùó\7ÖVâtŸ\14Lqj\30«—\16b]V[—\17€ZP‡ÁÛ¶~õ弚š3\27\16kÚ\16\5Õ§/Ö¢\30íå:PÞ­FÕbX¡/Ö³ü+V;\0¶b«0e´¦ž\6·+˱û/դ˚\"ùƵ¼ÀÃ\11*×k†%sÃa0ášÑ“«\30[ GÞªhU žþsòÓPÁŠ\8å/\21V„™\19Šá–?/ÅùÓ\29€ß•¢Ð\31¿\27Ñ\9\26Ô‹Â\19m'Oõt±\5\15m\8œ—5\1`%Èí+>)•äyë\6'2\20¸ö#ïäå—\26Õ¸\2\22¿],~» ÿ•,ß´Hi¼¿\27\30Ž\19ü\17FjUã­r=Úe\"g«\26!_«Êr\\);\27`¡O¬ñšæ\29´î\24=4‡g÷1^`\27Ðm0\31\9@¿ˆjq0Ûàôšè,îFkÂ~Uè«nÐĆ˜\\M8\4`\21TÛ‰îkÓ9\ +]îøam:RôgLX.UÀ•ÈÅÀ*\8ðúü®þaÿ@²•i½ÝÌëÜ®4móñ®µ\12ëòÖG¾l–VZ\16*…\1µ5£Gg«—“íVmc,ˆ¦ú:jÙ°&Ë—\9œgmØ‹­\18Ø3û\11hd«ÄÏæ\7¸K•Cå\27óV‡|öÛMG\30ÏÈíJ”„\18\\U“=ÞŠª•Ê»Ìv‘mætc¡\7ôR?+à £ÁdÇî«e\20LˆYÈ\4Ëç±Ôh\16§¼\13$;\6ÿpÛxŸÁgn\16\0J´U\17C\"¥Ú\1u{”Ÿ¥¶q½¼lÿÛ\25\22€Y¶\1,ª¡å@6Dª\9Ü.d»»\27¬DP w°\28·\29kÏñÏŸ\\¿”¾B´\14ª¼›l0־[Ôú£Öéoþk^m¯\11™\15Èc«(DLE¸_`©3›÷{\127÷N»÷w&úÞý’3psu‚¦\23-n~ä><Õ\127;äé®áE@rÙ\17ŒÙÁ€»¯`ñ0â»\31w“\25\22Áº¥÷SÐe‡oÜBþゥœý̵ä\30«S\22\\/òRô\1å¹WN\5Bá\6ìç8ù«…¢vÌú(Gèâ½'“•É°Ÿ×FëtÏg.ðw\15ýn\13÷\0¸qÝžñd®E\13\\H\22Ò =0fö\2\23\24\25O*U%yöò5Œd/싯MAa\27\3BrïØ4‘k½_˾Ý\26:ÀRFû\0¹£\26mp€÷íÉ÷/ÚÑmý\0239oØt\17èé\29‹÷ì~Ò¦Aâ\20¼Weç\21®\21\"HA–\0075gP±ÀŠë&]}:\6 Ñ\6/Š¯­¹_\7r«üŸ!_¬\13Ü^\8íª\19|9.ž\20\"Ê.PL‡éuùŠcZ1ø‚œiKܱ›¼Þz÷px;?°R­o>\23_+µ&\31Úˆ’+`ä•žÍh8zŸÿ±2©¯‡¹^¬“cº˜­~Té‚£ª\4YŽ+ûõý0˜eG3A­\30\28ÎåU‰ï\30ýà\ +KÂA‡ŽÚ=É •TeŽ\14Þ\20\ +t\9\"½?8W\31™\5¢ˆLâÃëù°hØ1ðÜg9YÖgø\20\7h{iŽŸß\26Õ\3ÉÊÖ\26±\8_'èΗ–Ú«}ÀÄöMÂk„Qûª“û¥ú\3.\19G­\0232\11¯?KèeRç¥=]ƒýBè@AN\19¸\8{\5U÷uŽœœ^uÉ\8 ›lyùÙ¨¢$bs9ój¸ú}×{],<öêëå´%Ô9];\16²Ž3üëý,I\127`!¼|¡*p…0yyûÓÊK\21Š¯ØUÅò\"óÍèàe®ï‹­‚¦XQÝÒ½@ÿÔcsoðlFUãØÞü£\17ø§»sÇö\31¸p?ÿÑ$üùU…ûe8¨èç?¹µ\\ßÏ\127Z\15¡\2Æf\2ÚæO\13‡ÙVß’×\24=Lseå•N7þüÛ˜Dùù·E\17'/Ê\17+ÂïLþ;Kþ!Ë©N ùëŒ|\2¼y'ºreñóï™Uóó¯\21ö\ +®Ü<¡`ì_\14\\¨|Ÿû\ +¹…ì,Òk¢Ê\21Àü,å[bÕ\"¾®€º»‚ââñÕ\8å­ó”Ú˜²\17VÇBk\15ß㸸Ċ\7x†W3ëÞæ1ý‹‘1[„L_ês#^—Ñi_È°û맽­‹xé4]##H\0ë Öþe«`‹¦ü\19\28\5„ÜÑ‚à\20¬\21Q%~~3D¿O³\3ëù—\127Žz\14ˆæ\9ú\12÷ó\27ä#\23;õ¤Ô\12Ó2ãI#>\23Vç¯%—í¯‚s?¿my‚rÀ0*§Ôh藰׃øÛÏŠ\0137—\2\9'+\127\26ƒ1f²«ýüûÝÏ¿ëE†Š¯ò˜?¿·ÍÁôöI'²s\16éÖ\4´Ã†›è0\2îúÛÝ3\1¿„íÃíá—Œá执Ї–NèokùÀæè§ÝùcÀþèH\14§Oòüœœõ§\6\26W½\26ê»»p³$™ÁRA!¦húpÃ^¢²pÁ­0ŒÜo×s&!G\23Š›oNÏÅV°e\0\7’\25š$üI¤»|g·ƒ»[yx{\ +ôîÔ†Æ;Šý\9`¥òª=rS€\3öˆp\27\0´I’ø$s.\5C½Š C4\9\18šHæ@¹\16š3\127´Åã\"'\16ÐâJ}DH„\22\5“ú\31…PB\21±V?¡À×(7²Š—_Ô\27\3žá\13£“ŒNþIX÷l=\26ñé>ˆO\ +ˆÓíB\27De“õR?…ŽdÑYq•ˆæ‰Rë.B\\ñ\19ñVw·\01430\17ivî\30=£kÜ\11 \15L©ªglº/ \12M¾\22\8ÒPæ–\127Œ\28\18[Ó\\…gQê•7I\7Ze<ùS•\28Ò5{vä¬àå¨b°\8\8kJûåoeq’Í£œ_$°]\5îf=|s(ÏWC\12ž¦\26Ì\21\20š€ä\\,'’lv»Ëê0ÎüÁBÑÎEÇž+Gý\27y[¹\27^§ñÞ¯ÞÒ6¢1ðìÙ(¯ÛõÂRŠ>)ÛæÞªæ÷4ò!µ\\\31\8vôh…£áTƒ™€wÑRü¯_®\26°ûî§)‚-a»\15¿‚Ú\16Ãé—Fõ—[÷P\17°zÃ\3³-'¨™0Þ\0208\"n®¿šÃ\ +¹ã(ÕøKû\6\22Æ\16\31\22¸«#Göï0Z\26CžŽõ3\14ÜX\25lŽ¯~\17:ù?j(\0ÙY¨{Æþ‘jXriì0¤í\2rµÈÝ;Ùêp²\23\23vh\22ÃÕ~¼\28\15„Ì£+…h8tÆA%m@%-¦Ÿ‹Ýƒ\16Õɺh\21åü¼‘ˆS\31Œª‘ÏUTÍŠqÒAF.¯¼‚õΪ-ðŽ\0ŒÍˆÉZÒ#ô©4Üê+ƒoþ{)\"\0‹©iÍ‹ñ<ŸèXôÑòÁ1;l¼à£Q3ZÏ‚ë…s­5È\26E©ˆÚwx…|\12é(D;Âè‹\6¢\"}\5˜*˜•W‡Î°àHU•§>ÔA@¬ÁVЖ\13‡\0–Ô‡ÆÁup¼\13Œ°ç\0:A\127\3!IIã=f\\†*!K\27¬þU®Î\9´\17E/Á²´ä…\12^¨º_;Â=\11Bb\"aæ³\127¼ÿ B\5»Ü\30ŸØÑñÍ\30\11=J'äo㋶¡wÁ ;ÐÕŒ<\25'aY\23C:•×£s²É-\7g'}yV7‰ÂQW\18ÐÁ\26¼îÆÞz¹å\1\3\"»Å\6óÇ\0I@ÔÔ=žkÿ[®4XךÁ\\Á⌬\0296¼õ¦†ª7\21Ð+£ÖúÐ\3Baw\2­ã‚:°ÑØ\ +\9Ó/_íë’çCvr\12^“é^?ÁÍ óÝíÈtÑ97HÑ\5ˉRw\20@(=jžU\"\23/\0mHò\22OÙ“35çÔ>\26À[\5bjëòì-î$·¡”‘÷GZ“V+\26,¼ÉGo\21£åŸzßÅÑÇ…3zðA3\17ü–Õ¬\127~×?^„ÆÚh\14Å­QÂÄ]`6\18påH\30\16¿ô¢Ø\\Å­\14ÃV‡a«Ã°ÕE…Ð¥íº{Ío^­0\29ƒ-¨Ü\12Á™cµÎ'8p®©þR!»/Ñ\\KUú?ΪúŒc6×y{×\25\ +<Œ{«\14jǽñÈ°F|­ªÿut\28K§n¡g¡¨Ï\27G:\1³G\4\15að3&¿CÿÙ¸ÛÛOQÁ¸P·çOhî\19\29\"=\127öˆ\29c¥¬ŠjEã?\21nq7¹¤|\11ð…?\2|*Xhïk¼ii·_;$µ\22\19r{]˜Ð\"\23¦›Ž\8W\1!¢A€{F\1µeêµML\"°u.»„ëôpC¦\7\25<ºo\\dÁ\4°t\20&ß:„*©\23¦ñÊ\19×0uŸ\20õD\11Ž|Ìlj\17œH?÷_ü5J„?åX¸ºÂhˆ\30\127Aa¡É¬Ú0-^]Ó2pYD(\29?\13‹³å:I®õþáõ8NB\13Uᨪ\2e; `4T¹·`Ûöö1³1j_q•M|¿!4™ír\7¬=ãbî'`fš°&ª~-bfNÿ¢>è\16\13(u\11X#\0126\2KT\12Ê¡çj®`¡ó1Ž\0260Šz5<\13 ¼\9Á΀|Yrº–½c*(5ãê:ø\27\0124Ãê`E\19<€èÙèpH0ÊA[®ÀË\30ô\20…‰98ÕÕ5yÍ ET‘\16”’+⌾AV8°›L\29ò ¡Gù\24®\26Ó}›0©jCÏá{ä?ˆhU¸|OBöñÏŸ¬Iô­–|ó\28\\ó\16“\0018~\ +\18šY”J‹\20R@IÜÝ*„ì™Ì+Ðh²òþ9,·ž=ÄR:ÁOf¨\4ïHì.v!ÕxD@\12/\22¼ô§5Õ^\14û7\127öQ3,½ª¼ C\28–ú`9X\ +âýçÕ¨’ZëÊ Ü\20“ƒÞ\2X:\\\11\31\4ŽOøâ#94ö¼O\11\29¢™“\1“\19˜\0Û¤c!´dxÓœ\1ZRîd·öˆE!T&æµð1ÑØ\0196@ë\29ÂØ\28/ªäT\24L¿`¼ZAMhZZ\17²Êè¼w\9V7i\29®idã\22¨uNO>1*ç•sâÚ Ð\22ÖÓzÁc‚ví|Ú’Ï1êÝš¢\20ÂØvŠ\16»âq\22Ö·ÈX¸®\28¬×\14•\26\2/âcr^\31x#\16šÿbYg¡˜·,Dx«Ø\15y·‘¬ )pe\9^3,Å>(‡Î\17\11J7C©ô5ÊÃ\2JÁÔ1×Ò3ö\22¯\11È+À¼*È}-\14͇£:£\30ýr\1\6:ûÍÈßËvý†³€Ùp¹íää\0223`Ë-J 0³1†‚òp|Ý;êÜF\3N\26h´\25ýõ²\19èÑñk^ò¹\"Gèî¢X×ÜÞ«9g¢™‰T:Öoix'\22P¯\21ФE\5\27/<ßÓËÃŒyß´\7î×\21x)µ\15\1”sf\22½MO+\5’ÜÊqs–VšCl\30ýRßU*\11áx\14±ÿŒþÂ!&øîýóíÕ\15N/¦w•=ÚaBÙ¹\127\0170ë6p\19\0310\28\2Êa…Ñç¢BxÞ¯$/Jú£Ÿª¢3úÐwï¡^½‡“b6ä¹µjÅv\0315yÎȯ\7›’\14¢âÁJ=KŸ\29\25r\30\31ÔJئ«Rj?€/³!O®O“±wÏ\13xðÕ7ÿ½5n\24\31\12¸LQ°F@<0læ\13mÖ¸\0216Ψ'ö+\12\6‡\19\22ƒÃ\14O`o(\27Þ\ +.\\ò°«†;\"\\aoaöb!Otðn\6á\9\25Ì€\1¼Põû„ݘ_`9ÛíiÛ¾ôˆÝ·ËÚ!좠Á­º'ìh¬+ôA\28&å{=ꬿóHSþfÆ>\4ìÑ\6FU\127ï\1»/lTW\17`rDŽ÷\2`\30¸*»e•\12 Ô\5Ùïw\4f\4\ +\ +È;ê_6¾Zó\26ymŸ֩\19vß;)¤ø2\\\127\6¨C¬*vÜào„zÝ\25¬=G`´=ÐpØTdÀ˜‚+ø·º]\19\3m3áõDH&¼\26ºð5W\30ùk†À{]f5îufν¬†Á€0dëÂËím\3·k¡“[Ã@²¯ÒÈéÙG«P+ áza½6[\25ï\2kóç?ÞºOÜë‡âb\7ï‚ \19¡2\4\25\4癩ÒÔK«àK+â°_höûÌðóoÄ¢¢àÅzŽ-\14Š¬:T\17¤Š&åìL³/»‚M\22\7æVÁº|\2wRÁwRÎ{>'Û®›Ë~áy!³\17Þ×E§PϘ`hŒï©ìŸßˆ•\8 ¬\29¸ µ@áø\7²³ýü^œÐ\12¨·•>Ë!7ô‘{@à\31RXQâè™øóûZ‰8\19èõÓ;Ê°.®“³Ì·ÞÐã\7H\21ì\6Ì\21\21‡ð\0175ß>]›Í)\26\14Û'ut¼õHÃ!/›>†ˆ%À¬(\ +¾k÷Ö)îþ’ÇlËx»@ÿE\4\0?bsCm<Ü{)Þãø¸DDãé&úOÿD<\0094Ÿ¸Ÿ\"\31b½ÿQ\16\7±xC‘€\\ι\2\5ô¡âŸ( ˆ·/bÀûä‚$öHµÅ›iz^óD\7K‘4ñ†êG\0219\23> \24É\0é@B7Gß\\,x0b\127ýIëÐÑBA‚\4Ûî‚Ú¹`î\0298R ¯\3Rõ\22\3Û_·ÿðgÒ,Æ)\4N‹½µ\4ËwÓA–'²¶&|d ­Îm[èÜ3÷“´\23ýÎ|ã‚\6\6W\27ž— ü!ƒtõ7\25‹ì\26¹KRrF³^%©Ã\"i⧵øÆÎ7Còw´ªÊC¶ÑrX˜èåŠ(úVˆfÄNMvšâPtÒ\25Š65ìD>Ç\7o“øòlñ<µgð…µÛðd\11Ïò\9ènËB”E\27\15¡\25$ð°¨ ÊÁ45È\12\"/>º\7؈ë•øà\1R´\30\29\\\21Ù=P\127\18\31(Ï\22U\29ñî\"lŸÍK\5‰xšyäV¯\8\8†ù/?R©o¸\1ûesæÁ™‡Zjp\22\16P¶®Fçâ-ŽÙ¥³áêlaWX*\28§\ +E†RµìÒråÄð¡Ôï”\14Ð4<Á.Œ\15Þï\18æ—@òö®èä)øç\15SQÿÀÒ\31^÷Ñ\25ß;ýFl+L4ÐvSáVáAO\15|D-\16ì–;n—f§ã˜ÝЬ”È\17\20§ÿäÞ ¢¯ì\17Ê'ÙFÆ]¾®+º'bU ¤ÖzÙ?²\25\23D\0269ãú\15xÖªƒÔÚL\17Zªd[x\11‰Ïú4å·\22\18C´»ðÅmlËwõµ\30¨\3ftË»[­IÈE\12\24øòË[׳\\¡žÉn«uƒà\23¦{XÇ«‡—UòØ›É÷SÝ\6âRa•ºT?ì+Þž‘{ìœ×UÎÁ‡\23ñ^¢\26ø_øÏ;ÙXÕ\19)Ì$þ\0099Š(4ÑÉQ¥F\12µÄq@\2APkc­ ðí\0264Äãõvɨß$€sH\28Oš_=©\0142»\11KŒ÷»°\1O›1(á¼~Í !Ô\0235ѯ&å6àŸn|ø\0062Z‹‘Á³‚…j=¨P\11\3\8“\20mq&\9Ÿ2\12ü;i\24|?é\4ÅYCÀE+3ŠfÌ©\25\15h\25ykþ\24wŽÜE*M˽\17^ˆ\ +® c5‡\0251‘$\7“$qlž\"‘åë¬\"C\127.™2ž\24•ã\23Ñ\21í\12£Lÿ.({‰\22æW\24e¢Ð\23\6\12#Öþº¨C³–‘\18}ãÌ4Ó\26{¢5òDßHòŸÆñpS¾AG­±úA·£ëƒÒ.{Fº“—\25Oík†ß5œÊW\22+{±5ð\16\17õ\12\2°\0(rõ\0062Ö\27Èè\27H¹\0229\17\28ÜýÖÌ\19}\24«2žè\11Äø6#é~ \21QÊC‘Á#‹x«û\13*ƒ\24Ô.LãÅaßxz\15J ùÁ¯E\4i‘£'»»]×Àúw¢õïhjEJ€>\25µm\127y‚½íPF?×”‹ÝëXµíDkÛ‰\24¤,ÕW–\ +7g\1ËÆ^Ú\31}IC{â×èül\0125ÂÖV·\3ظqï‰\4\5ÌWw—¨\24ÜÚ]P•c¢a]ÒùVKî\12/7\0179M´\ +[%€\27Ô\3VZ—ÝV™\19}µ‰ë7\5ö\127Vð¬`® n\21:ï\\›YMCÆ·\"X\21éD+Ò\1 ¡\3xfñå\6<\6ð4kl\13 Z\23œ\1\29ÕæézЦÌÌ}ó\30Õ\5¼o¥öé\23h%§O×!Õ”1øÈà”[¦XoÍ\16ú6¸/³¥ÄcUÀ\3¸lu€›¼» žv¨Û„7éœW­5<\12\0029SE4CÇÑ\18œ°«kÃÊ/»\30Ãâ¥WÝ6?{\3\127`\25™t\13ˆÕ\ +u!…ä²…Z\13\14.\20ŒÜ«‰ÀÏÄ)èÓbºÅ\127,®zN¸ié\19µmàò©ÄŒò_Å4ƒWé}2}úIŽû\"#^fè\\ýòÆ_ßà0p{,íH>ì$ÏkfÊà\27?,ûŸ˜OŠ„±‘ø4·?>ßTìÓÜz\3zð\12F³\14¢øìMÈ>KeT<;Ò‹€?ï õ\23ÑP¢l\30Y8Ò®Ðÿj8Ñß+ŠÜ)\127wcQ(Nø\1\13§\\_Êà\4\11Ftª\5T©‡øGêAËãKc÷uÛÔ\12m´Jwb£<…åžÕøéÆñG•˜x-ÝÆ\9%Ër½¼¦_ªÀ\5Ösœê\\œ†\7ò\13±³\28DœzHɉ\7\8rYH\19d8IiûtB„2NÍè¢,\19\17'ë‡RNÈ\23ÄÉ6—E\18L-짩38]Ÿ\11\11\127â°?}“\228y'°å\18<àƒ\9^Ñ„†\30¡ˆÉ”Õ\20¼KM(\1\0028r }\28ˆ‹§º\31¡ŸGŽæ\28\0\5\23ø`\2ËØVØ™SqÂB]´6\30RÆ{\5_\21”\ +¶\26±T€\15q\8UŒ\0256¥x¶‹µ$Á•¾IN‰f+õ1¦µÔø•\29¡Bºè¼ÒókÃ\0307­]¹0Q×<̦\8'ÏŪŠGžnb[\26ÏC$'ܾkIdS|Ù*ˆ\20bœ¼Õõ†5íf²D~XdÓ{«CõN\5æùMe·«I×B¾L6d¡\2[ÞÑ\25PùÈ€Cè\4ÿÙ\5°õL<%Óßî\31\\^Sx÷;Éw.ŠåñlÒ¼ÖÿƒŠ_ŽÁsë\16™hŽÙtмƒû$U¡î\11ªGÿ}ûúˆß·ãª%òm\14÷E;c‹z\29}÷iÃêsbcõ9ø6{è\127¹}v*Œ8Þ±¶\26Ï3Ëg¶\1‘8[¦œ05§µÅS€q-jlâ¬Zª0Û¢7Ø`“Ì~ó\24µÔ¢çzŸªÏÙ¬\11%…%\12÷\14ܳGl\28Û%»f¢ˆÙtUœ\7OäÙW~ö)\15^™Ä™g\17JΚ”#ú¡#¶\0289\18Ï\13‚|ÝíÑP4ÖK\28mþ/ì\28rýÅ\21…\0•£ŽWáÉ=\16¯\20@{\13U\6c÷<#Žfø6¶Ò\21±äõ\"gX«¶»\"`#\14ø@ÃóÖ\20W‡ÚT=\19‚¢‹+\24áRbÐ@^kÖ‰\8þGŒ\ +û1¹X¦Âu~ñ±v\28íx?NÖ´Pi(B‡K\2äñptõÑ!kì„îSP´U®\0096‹ºÑÊHâòòAÁp\2ø~%þÑ\18\18mÒY˜LKÑO\15\20\20¦¹Á½,¸hሡ^\23Ç°\23$_ôç½+´\12^lá9#d‡Î_lwñŠ]_|¬ù>\26{¾9pyÚǾ\26ѵD\24Ô\0¼fÂn\23p¾™ÄJ×E8É\17<ˆ\0\0±h5Ü[8ƒ0Víc$T‹Í®º9Ú`^º’é}7è\30\26LX\"m\5¹¡„Ñ›µÀjp‡¶‡—qÉÕÏJ±yÌhÑ\24•\ +Û šï§}q\30ý]ø?rŠ£E˜ü¥\11Fõ.\3\28ÇZi\11 \26*Tg@–0D÷·\11²¨X¬ô:\1~\7\24MŽÿkÈ$²T\13®æŒˆ\22M²ø¥Cx6žˆ¬¸hu\29‘틈ðþb]pBë…\15˜\0020h¶ž3½ò\22êñÏ¥‘²c\19ˆ\29k(Z1OŒ=\23ãò…jùH½`öœ\0X3\14¦‰huË4W\2Ëí\ +\30Š\127\24k±Ø®³`ã®îP\127*ŸÐãÓÑ\26v‡ØÃw›ß\1Å\17ü\ +\15\3àãfØL±fÌËK'ùf¶Åª€¬m5\0ê\25¹Ñ\7¸Ýè-¿Õúx]\3GX°ˆ\7z\28\9p²Á+>Ö;üš’›\19\2\24Jå»\29µ\"»3BŒÅÑ\14\127}÷ÕÙ\3\0251\ +E%\27\31¨è5n\3âV%Þô'ac/ÆýDÍ{s\0#J€b¬ÄT<½¨Í«%h¨™Ùh\14€ãŠ\ +MæìÙnj\23­•&&?H8GŽ\24é>¶ªijï¸wã”ÔÜ\25ÄÔý\"ý£^˜41qƵ±T¹“ÝÙ®É\5&‡Å²\20±aÝ÷ØA\"\23æbb€\20i¦*l\ +\9A{¹µ©02Ü\4(\22¨+õÁ\9‘\30¥~65ªùƧp\13¸ƒP²Ú\20´\2\16ºj\1>±%\8'\6è„„…þÙ\22ƒJέkdÎpòëI\3u%f<ù†ØÕh\26K38ÖpÆF>ß:ágðÒ5î*³pS¥á«™OÅÑaCÝŸÄ\11]ö\22Ó\26\20`\14›O\11*Î[œ!èÃ\12m9Xû-N}våJå\12«\28¶¹d}ö±ªœ‰ém˜EyÂ\28HÂ\26t\18R$-ý\0µ“ZßSUé­˜l›«\3Î5p©€Ü\16\28UV¦›ÒN¯\29dzxô»zA„¦þ!þü?¸]+7?\28‡Ö„\\tlW„Ù#òFñ”õ\17\9”˜ožú\2°e\16j‹mæ\14%³i¶äÊë1Y)\23¶oNuß\18T¯æ—ÖêÛrbϵÇÉçšrºqzë̺—ËÕ@Ë\22^*­£@O3®ôx\7Þa\11nŠ÷\"Ì)*‡¾T®wg{Š\17y_PeéyÐæP\6¤Tã ±JC\17l³\28QÑ|\23Q|~Fá\11×\ +å×\23Å°Ž\ +üs(.\20¿(9V­c¹ `\"Z\13ŒbÀ?\4»A\"©ÆŸþòtì§# ¥\23\31Úæc¹šÅŒ‰\3î\9ªº˜hu1 Rj\0050\6/õÂ\8ù¾±¼ý[…*\18½žÐ’¥nO\5ÁÛˆeF„@bµíh0\25˜Ü)ï³wµíh¸¹ÖÜ\0309R{ø\15›Õµr׬zFõçÀ„Ö\25uý\23%\13_îÙ/Z?u\30ó\2\25³\1{Wªè˜ó Š–\18‹UJÌ\23Çä_&ß„\22\27™ÐG#\127ínP\0UQ·„º)\21o\9(¡ñÝhá,B\30\"^k]g¶J}†léä+\0316>t« F\22•ÊŠç¨ÂŠ¼(\23žåÆ2§'[\16bßË≂tšê4³A”Y\4ˆ;\30Kd|܉„\0216Ê4e¨)7¿\"‰\17û“vŸÕó\4¯m˜‹!‚sð%\\dï\3Û)µûÄÊ}ˆÊ­\27>Ðüu³¢3\"¡\31Ópñg\6ýZÉÃm´‰tU‘k.{§šqtËG,ÈÚk\28\\[Ú_›ìA´\20H‡l­¯õ-BM•L\\ ½×ÈuC}(¸\23ý\17\"\16*sq3;ª\26ĤP“\6\24º¼U0:Ûfw\15 \19%nWµhã\21‘\"Lt¡õ\15c抽ÃZˆqg\31„\4§êeü¦Ît4³ŸŒ§†ãÎu›Ðí£JL– j«&#}î½ÍT!O5yJvWçWjù\29\18ת\27¹wL Ô0Í7¨Ój6Ó€/\15\29Õˆ&Öˆ³ÛïǶ\24G&@éh©ñNµ\21‹Ñ\11/e\27ˆ~3F\2×٢ȖÉd\21‹†_zš×o¾£\18€Œ÷êeNwfL¡D\25ºa+gÏOˆö~\27`þl>^nèl§šûÙÅw{NÕOüª˜(n¯ž¨¯Ü0å\7+p?ùÂ\16néŽîó¸·l`û'%È-ÜT³mìO* ˜­æÓ>aÄ:î³Pä¾\\7XKû\2Ÿ\11™Õ¸£QB”\24\\ó·9ɸ'8N`—½E‡\24ª\\K{\3XéØ¿RwíÚ„\24š½\12v¡m\19+bÃ\17æ\22²uefêrgž\2:oBþ‘²Çö¼ð†\26TEN#\26\13Uçö¤€E\7K\8LN7L\27\1S½‡Ÿ^Æ\6ä%Ìå­¾ù[½¥æ½:ó\1^WŒ‹ãK³îÕÖkÑ—*ˆC¼P\127©sô\11:Ãr«¸˜\23ÔwøC¯“¹¶ì’¯h\13€‚\127²áîzqÃ\29_–ÜU’5¾\6óååÛÉ¥P\22øãuE1N|!š‹»\3¸\9ÃåzêEª‹¯¤ªÊ\23|µ<ó\127þá²é?ê\16·÷/ Â\23w\0÷}'®\7õ;?˜\11?ÿBP⎡©4ÁΗ€ÈöûÊ\20‹‹Äþ$ìô¹T\1ÃÓ÷Tÿú¶Òšd£0þåäÁ¹ta\0074kë\20\12…u(ºì\19œ2}¤\7ä«\21(­ËàE›\1›ýÏ¿\8Æàö.ùhÞºY\4_'f¬U³à»Ü®ˆ@èß—+†TKSåg£(•\27×\3¾ê\16™àû'\29\18˜ÂúÆ4’\15þ÷ÏïóÜ:&õù}f\ +Úâúá(ÁÞ7ýrB\24\22ÿêSËÉê‹¢5â’¦8.lösº}¤[÷ëC\7ìOýkg9³\14k‘ð¹w¾hëH¶Ù®ÿçÀÿ>~$\15§BúUQuîÔ1HPÇM¡\"å”1¤üþ£}#¡„â¿«\127\"b§Œn×°ÓÍňà>ëì®’ï:\6]Š ú\11Ò½½'»:È\2´\23\8¨´\22\13\ +é>f„ó€m1€¨úóGÑš;\2\27\2*÷nÑÃSºŸ¾UäýLö\29´bº7Ì\20\3Õÿ|Ç\22n“îlŸ*MëKÑ[ê¬×9Ý©‹àÊKè\7×\9@;úÂrV‘;Q_\ +x-ÝGêîšò\ +…_™Ô鿪\2õÔýz)ÞI݉0àGzÜZÆc¸iš¥‡5!¤‡/‚\12\ +¡×G§’-̇\6]¢Ua½¤‰ÄØY¤.ÙÜÝ`_ñ'­>\3ù\30\19j´ÑÏ;=\23 úå1'Uö¡íj18HÊ.e5¾4Á«3a|îf\15š’\19Fæ¶D’r¡†BÁ¤Ð*À¥Yèw¦d´OÚ÷\"g¦çGŸNÊ\127ìÕn‹ ¦Q‡^¹Œ\8“¿„ê\12ü‘\6.\\Ñ\21¬3•À\25“¸x›RbkÒ\9×\19SËŒdªQ3²Q$È\24£\17ÁX\0023m9Ó5Ø%Z+Pô± ©Õ€™!œ~£\6ÛY³yD`6½\13pŸÓ§â±\21£\127Ï^žöSg\28|:Aú\19}øÉ8µŸ\30¦O\14©\0˜¤éôÉ:øü¬kê³åUvúì>©Ôg¢ }„\19…Ouÿ<\127îþϼùt~T\28|˜ª\5Ýô©\17Q\\×Éæ`Ó'ó´O:¾*Á'Œ\12\\\24\11hZV©\28%ô×V\19’³ÖÜ?#:”ž\3ç’Äý:íáÒ\\Søéÿî\28\20\22¥'¼d…\14ÐΩÁ²IzV;xÀNsëiiv\0292ŸƒZðœ\16Œ\16\24Ïä1t\19–ïx\18ùt\16³YA‹Ý`7’7çh<§äȼà\4\14\\œ\17®œT \3ýäÝnz.¯Z =Õ)c4m¨\26Hg¥NP§7%K´µ¯õ͵a¾æ®pªÁÎÓò—\14&´ÉÚºÉÕË\3ð›P7µ´d\7#ÆAç‘ŽbÎ=m¼\17MÕ‹=Ký\"Ô-\31Jpù\4\6SªÍe\11¡};ªöDƒì¨[­1‚˜£áKM\20è_íÑ(€mÊmþù͈@L%[àQ;,ï–N_ýGú\26/¿Â•©ùE_|qÇ&\6é4\31×\9øP©Óƒ\21U‘‡\5ôÒ„5\0ýWÃx+ODöŠI‡\9Tn«\15'óוµ\0140+\0092_è÷‰EÚCRó\31D3qÛšÀÒ—oÃ7Õá\"0Í\3\0233i†'œf¤ñ'U~æ-nš­±85ؽQ\12‘P\ +\26\17P1Ô¤èfÔp&\30 (b?3åN\12ƒv;V_›f\17XÉ\ +ûn\14å\5`š›ù{4 \30s‡\13²Ä.5\4GÂôjš\9ó\19dýEã\13¾Ñõ\22E«Š/;\21_¬«$éÀ¦Ÿ9Š‰k¾t©y_ÈšËãàrS M‚G©rá\8\19˜É\28•©\9{\25`5®þ\18W\127Š\5\23R!°s\19o~(\\Óp1 \31\27\22•gìœÀB\21è9§kà6ržppq¬ëæÂ\17jƒÕ͵…àvÆ홚3w\4Sõº\6ZŽMš1çËv¹]Žz;.\2Sú–*)0ož\7lW˜¶§Z{í5ß:ªVÜ:jÜ\14Vø\12±™¬\12Yѽƒ \11ùCö\23\19†‰·Üzè¨T¥ù\18–;ÔrÌz(»ë\2ÅÍãé|\31‰Ã<^ \30\0\9*\6`¬†\5[RÞ÷Aãµå€tlZÌåK‹-—\26€Å\9\0054lÍË#\13\7»®B!²\9\6U,l—êô3²QÉ–@\ +ÕaoÀr©ÝO’Á\30U$ÓSÖ/Àën\29f“Õ\12$ìRšq–¬p€¬Â_\21\20GekâJ\2ÞW²]ƭ©‚äb¡}“Í\2:£â\"M—ª¹“,¶Z\ +\19la§ZÚO†Nà›†=™SËé)jMÍrö\19t\28\ +à \21\23.yé™Éú^U,ÉæÑø\17ó1Làå½$\13‰|æM^Z®,¥g~,غHK%Ø°ÓâèË1\24–:\"îB[@©±úh\23Ö›¾íec\3\4ç\6ꯢÂi©Šæùƒn\13ü…à3ë÷ý^‰ŒLÁ\24¾ü\21tM\8ç\20jýÛàP´\28&\14hê•ä\1JL¡fI£ƒ\24k¡IjžºÓ\11ßo¢ÔSC2n\\ªÊí„&üâ$Ý!ïEᇛ¸ºÝC¡³L\127QsÆ€…I?YVD¡0\6UeFÖŠ“‰\3Wá¯7Ìo\8YâVï\29øh±\8¿|ñRw|”ï’Ÿ\0119\29\16È\11ñ\26\\\12BTÉò\12g\20\16ÈQ¼à»Št\14>G\4ÛÍNh×cËGÛ^kÀ’TTHcôUðB+\5,í¤p‚’\ +÷ž€;u\20`\11\9Í}Ñ\30\23îé!ò>˜ð\14'áÄÊé\2l²4„º\26\2ÖžÕ™Á3\15¥\21„Y:B\16¾\30E»æjøÔh\9 \4ó’Â\23YBYUEBá\12;Z˜(EaO\ +™æÙ\0*5\12°ú\19z\\˜\29Š™\0281û\127ù®`sØN©ßÔœÞAé݇MH<*Щ\"\22\27Où\20LåÙrˆ\2NôÌzƹ5\27½\0–\9y´›ìî\31Ü|kø\2×FdËFÀŒs\23\20m&\15Ãý\ +&õ\11rG†d\8¥…f\8ÂûìJ5ýæþ4b\15Ü\30ÓãjG\3+>\5ó;Lb\6Í?†K’_²Ê/ê‚ž®vK4\14ž-*)6¼µÄ\\†NI=\15\28“\31ã=\29¶:\2\27aï'sþ–Ÿo¨*\12ñr\27èÚˆ\21u\21\23}ºL±Sd?–\18Àâ÷\8ä6PÄãäõ&ç—µ)Zˆ(ù-\2¹ÂDMˆì«}\\Hñ\5¼‰€uo_qM…ØXUÔ!YÔ!Åá\19ÇÔ42¥äî…„À(¾'¯¤\9œì&Ç;SÓËS£—,(Y€¥¥i\24ýÃzÈË®©¹kœèÜ؈TS(†’äƒvŽ6ß—,F¡\15Èi«\15‘¦P\31Ö\25ªo'÷ùÔ¹gQ>§8\20ušÉ—ÈMB–ªúÈÂfBðQ\23ö#/n\9æÙT²å1ÕAìM«uh\1@rÍ\ +’[W\8Á•Ö¯(\\ðXi¤CÇ\15„*ž\14b±¬ý:Õ˜ì\"«3G­\9[™¾±G\11d>u¨4\1zb­+,9¶v\127E¯QZ\7°\\fÙj]Åé²^\ +õµ™ÃÔþ—jaú3\15·‡²ÉÍãÔ9ßT§Ì‘æœ2˜5ÛP\01142¢f„!¢¨\17¿-PcMÆ‚\11Jx\24”Æ\0267ì{VP\28uu\1>¨¢©]¹4ûìrZÎïàÎ|Ÿ6Žò€\0rÊ·ùqC\21‡R>4\25zùYñù1Œ=Ñ\7ž½ÉÏ…¡åK|bËfbÓAìDN\30/¨„#vï3q~¼í™ñ¯¯\9Ü \7*\22´µÒy¼eV‰!;s\30ïÌ$í!‰zÕ… ÉHâ«\0311Ò®\4½gB\30}À¯Ü\27®Cå`\22Åâ¬ïTùÓy01óˆt$…\21š_•éðѽp\29·Kt\18ïnÐò\26\"¥ÎŸ7«[ן\9\3_‚?ÿÚ:¸z¡Aí°úÓøÞO\16«aÏÈÏÑ\29÷Ô¹s²§±IFÿZ\5àT؉\26W&gžº{\11h\0\3‘^tCÆ\"‡ª\4Õ‘1öJ5ÁðÙ\23X\6s\5±\2\ +žrº0¬“=p'z\11õ á\3úÃ}T7ƒò3Ê\26ðÐWs•£Ô\31ä—”’)±ðÒ›(Ô<\7ÍQH¸ü&ðr\31<\19Ñ4ÊŒ\9¦&røùÏ\21/Wô)ó0Ò g=ä®\14Ÿ6\11’\8WÓ™‰\30Ižï,ÌlÓ{(Š¢\4*=¡¸O3„•ÚfŒÕèCF¹.ÑJ\5ž\23Þ}²ER^­5+eP\14Tg.¶Í¬ï>\17gdJèHNÂ7CëËQ6¬íl\11A@¤Ž\25©r™¯Ô»˜ƒžO\\\"ÈW\21”9ÀãQÎôhѾ}{ñÂÃiW‡Z†$ääkt7½¼˜›Åø9»«6Ÿ‡2\22Ä=éÞwׄ\\ÙG\28@Î\\!³Sgk?W^¼Ò×(m¶t—òe\0270¸šÐ\ +@ó\16\26¨¾ÂlØÒñ“K{Wzó†”7[/%ç´Û›=u°©òÛUùùÝÖºò¦U­\"Ö~áÓu\127\17—‚©îá\25£\25€íæDØÛH¹\ +\18¨:H\16ÈÛxä^,‡—ûöū✫Ž\14ÔRÔæÅÃéS\9ZO 0rÕ®¯$>Æåj\28\15\26ëÄLiË\13z±ÇzV:\23.H°Ì\8\9Ý\21°i¹O:ß—ñsêˆ@u4_x¶š,Ɣʙ¡ÃÔäxæó\1øÄã·ÙÐ\27·ý¬AdŒt«(¸œ¿”Y\25¾X«Å:¿FþXus… €RùkeFÏ€ÀˆFª„–\6\ +œay\0;Ÿ†O<ç“/Aæ5ef¼4_›ƒâˆB¹T–tr@By@*(ÌJ6\28%€Vy8_\21tµ«\6œh!\24Ëà…U,š\13`™h*»&íÊ‚,•ùTšÚG¥·Rù#°Å¿ŽÊ”©Þߨƒàz\20›«HV:IA…\21FçQ\23dT\\:\15&Ô\28\14í\8Ñ€KªX,zàŒž.Ö!›ŠM\0\16‰wÇöR©­\7GQ0´pÁ,\0)¦[õ\21DGñ l\16Ͷ2U\12y÷J®Üݧbãˆ4ºvЦŒõV)„j(5™B-•—…€\ +HˆÃ7\19ÀzÒ\19Â)Ü{l•&Ps\6Ž\20[U©Ÿ4U˜\22\2P\22[=\11]¶+\"}ä Šm#§ò\13}g–ždûPƒ;{ûÙ\28rùß\28³m¬t½úƬ%¥0ógƒ\"¤]þC\127m¼±v&ËÅs\3lE_mUÙ€>D›²ãFN­?×®EŸ{¨ÐdíæG¼ö'b£®„‚óe¯¡ôØæ½9ûCñÝá6nî\25\1'Ûš¢)´ù\17]ÚÞ|§m:S×i¾5\2Ävœ„¤“Šýv¿ú¢`ó‹_Á˜*zW\0¥¢U^Õæ\12ÓZ¼\3µ€LoKy¤-õHÌ(/x9[Ý”ä¡'\1ïv¦É\9\22\18xß\7eÞìÍÜcéÛf—ËP¸3è\27dÐ*O\6p!fH\19–¯8™÷[½qØx\30G…9Mmåk/œ¼õ·Å<\29A=[âVD·öŽ\14ÕºUÉq•äk\6Er'÷\22BKÛ{êt\22C\"x|\7›k\"È:Øè\11\1—\4TÖÉ\16\29ÍhEÜŸÔc{q<Ùv×y÷üÖ|ß^y\27B\\\ +pK\6ê°ßUù7>é­3\"íl0»[¿câ3í؉Nû\6Á¸\31°aºã\6åd\22ññVíß«b\31\13¢[©Ú>JýaÆÁ{7=ê)á\24Ì=8ÆGC.¬\21¹\11!\\h´éð9èð\1\8uÿLðC”'ûÎ1rL<ÆÜØcé¦d\11I0š\16s«ŸÁpÇxÉ\28\3\27\127øü€\24aÍ\"\5‡fv¼¹\2\20î\24r+$\8ipB\9^ŸŽ/N\0310Â\15Õ¹³;¾\9fý\29ð\"\8‰\\£Àœë\14æïp0k\14\19F\ +ÙüÿL£µ¥ÑÒŠ›\1jêq-œ½å\3\27\28åD\29*R>¬iR]V åŽòâÞçx¿šN‡öæAŸÿÖ\30ò7\7Úáoºáï\27W\6\127wš£ÿG]lUbé……tz™\2ÿ@Üý£‰qb.pZÿù\31.·~þG„·ÜÌ%÷Ïÿ@\28ÿüO»}üü?&ÁÏÿ\27ø{9>~þ/\3zâÕÑz»}èàûëC'ê_jÍêg=:ür]¾¶·ÇS\30ÞN*’úmÅ€\ +®“ªÙ+ò\15ªµà\12Å\14\28¬eaµ\24¶ršfµ\9\20y\22’¨ùm\26ôgÀ\"õ*:XkDàôm\15(²ÓŸ@&>[¯7¢±•®U1Õz3‡Ë‡ur\4UÈÃm\30°±´ß|á\9îU~øT\2Hí\24AÄ‚\5*G\15çö¢”•:ZSšª*jPŸµLå´«ãÊ•Ãeæz³‚WU“­LIÑÝ\0¯`® ’\19Ó·\24>\29¥&›œ;\7\11bÎ\4f6\23 ¦L\4šp\17@\13\27Q(4[@H𪱠\1Â|$@Ž¯Âè’vÜÁ¹`\ +@ce-ï€ß\31kï¡ÂÚÛŠî&þ}îø.úJÃäúŠÚ\16ÂS]±ñ&¯µ¼\19ì\1-Wr?ñ\5æ¢m°f5¬\\¬®\23¤i|ÕT¥f\11›\17Ø“j\31\28ä.G\18Š‰¡µy^Ow\13ÜýfŽâzGÂãcý%, /¢¬Ö·:¥õ\ +cÜëCówÔg°~s#·5˦]™¹ã/Bt¬¾iŒÆ\7|\21iüuDŸŠþr\ +\5b}G°\127¤[\5Ñ13ù=<\0V3#÷Em¬m†ŒéÍQ\21T†¨\"XÇO­\29«•Á¶çjzIÿ|\13 ˜XAã\23¹ŠÈ[ÇÉú(Ï«UǨ\\t…Ë÷3+ÀšŒõ·]ìB\25\9b”¥|\\VÄVVh0”ãRï?Ö*ð.ö3bã¢n\"ÊHÃ[nÌì›(qA!?¾\\}‡æŠ5Â\0¾IŒ<µï\30øÐîZaKúeà%õŠêkŠCìlµÒ˜ÕÚbÈ\4>õ*Êh=!:«\ +Ä[4h\\9tm)žI \20Ï­~¾°¢¬e£%ñÉÀT[ˆ‚ÃÓ`j=@¨Ý”»¼Z\127+\23z!–Û0Wxq· Vï<Àž”XúPƒûäxBŽ\2Wnòœ\13óyDdùéh\"0P;ìVÅÚ\28ÒqËÄ\127! qµl¢à€/ß\0260#z¨À\7ÖïbßTÁR\3kÔn«>tgh4ó/{ÇG\5S\5s\5e}Ãí\13÷\ +7ç;‚b\17ÄÄ\25/ѳÝö\18\8à\2‘È\19‘³Ð\" ç¶O\16Y?€{Õ³¬aŽñß,\12¢æñùýþ312ÙñríÌüžœÙ6AY\21 \30–9Š[PËlü3ššgÂ\23êZÈ\25\25Âu|\23ƒ>ýÕÒDò5#{ª¼V…©L@ÙòEŠ/5‡í/{6§¶¨>¹ \26»Ú^\5nïjÜjÇo\21\13ðF`­*KÖª²\4ð \24„È\13æ\26X“1‡­ÔÄƯ\31\21PZ‹Î_µióºE5ÿjÕ&|Až@¾\19Zc­išo\30µ­vÈæÎ:ï|`2c–ÂmAF@Ëâ`±TÙ)Å®iЂ´¢jDxQ\20ÑÚ\127b•em>5\2¼¸Y?ó¤\21Â\16n´ÖÓSèòùͽ½ø\0ªd¡Ô\2OmÅöÄZ#žìÚÖªàu3mqZÁ\15\19\28\21ý\19iæöuœ\31•›'¿MPÈ%ÙÆ#ÌuböN'€gh7kúÍ7…ÈEáǺô\26¡…i±€\12—›Oº(VŸí\ +õ+0\14¨9ã§;·\24\"šð«'\4ÖF¡'ïjg\30D)È‘gÃqåŽ2F—Äh\\\22Þ‡(Ož€¬–\13Y±,\17[M1Cj\7ùc¿…üˆ}\16Ù\9é†-Q\127½(–Ô‚\9µ1^4&'›jR]Ù ý\ +\127µípýÝþÁIlåÊ\127]^ÝC˜·Úç\4ò0ŸoF‹\13xä\20ÕyƒwÁXÑâ\9ò\19YÚ5‘\24ºHÝ•,µ.ßH*§õ÷sPªô\4Ù$-\21¿ÈÕ7°h2;bµ\13„5Y%m¿òþvE®cr\4\29xäë-«º&\0049”˜°Ð¯,É\20:pgb´\18¯ôVîS)2>\11„‘-\"ð©Üü­\28䳞𙿵vi¥\22h¹Wt\16M%œWëÖÁÓº~(:[¹IÐø§Köü;ñîƒT“]õqÊ}Ì5\0235WÈ×\127êV“òál\127þSÁ¿äîͦ>ªUs\ +pf\"Á±¢®Åçù\21¶\0193+\21[_\17Ô’Ö™]›Î —˜\"o¯ôêæZ{\0272·h!õÖ\127Ý„\2’×\13–\17äºO[êt@8rPíé‘¿AÎZŸ\"Œ\31vÕ¼Õ\22Ðåin¬`\0141*f­\ +x\13Y=«\31\\‘du\28:\21‚DQ†•Ç\18Š2Ü\12Æ‘xcÇöP©9‡F#•+4X®\1¾+\6\23\11°[/5˜Þƒ¦¸‘õ˜½„\5A¶ëÀdUK”O\1h\23ç¤Po/U!dFJõ»\17ì†TFójýRþ\\m®ëirëOì¬+ZbÔ\9ܽs?ÉՌʙ¨Û”/ïâ}ìX95®\13}vrÊÅ\13×Â\5\9`¹Ì¯^×ØmP4VŒ†b¡+\26$ÖôÉÎ<[\29–]r\18BNîô쬬¬q][Npª“\0ðšÁ\19\2‰1ÈiEÕ:Ÿ“ëH”·J\3å_Xek5x \8š_¯e\29ým]\28F2[î\18ØÎTŽîâí —«n!ì»uýù·¨\15û\21r\"³þQ-¸By6ru*‹å¥ó\ +·\21§á¤o& L~®\16£cMö\"%˜ßZGZþ©á¹ò9O+Väå ó\23O\0176±ÅaÜ\30\19âÜóZuB†­\23AíL\24u{ÉÚ}\4ГÍ7¶Ê\14¨t?ñr÷ýoàÜñ¿\127'Š¤ãÕ½ØUÂÓ°\19™ºEO!Œ³\16Áee(zFà\23é\19‡Ÿœž\127U0V\0'‚\23‚+#\16Ðø…ê€Ëá\4\26.ï(/\3\"¯iàOÜ’ØE‘5\127Pªßñç¨Þƒw„çÕÏBU˜Ç§>m\7ÎÍdˆé(û±–|^Õ4¿y_KÇ\3’\21S\28Ø\4!\4\25,c¸Or¹á]±ÄÁöÒø1øZï\9ÖÁ¯¼ñ1º_f¯¼Æ^û\2¥Vü£\5$Ã~&´¿Í.\24\5\12gïz\25… \02067Hóç·~\16¾m=-ƒ=«sYno\8ç\17x(l±Í\2Õ\127¹k5©o—Gá#\20\5–HGÚ½4Ï*w4¾¡Cy:\23i5–¢²yÚd¸Pƒe\8¸©Ÿœ^´\6¹½œ5§'|\23!\19¥\14:£\26ÐÐÜU›ë-ÜI\24ÐoUë‡ÐkK4\15N¸ ‚.?jt䞨Í`]øÓ¯\13\3\17Þšdôï®üÏW\18¤Ýñ‹>‰\30Øx»)¨òÝó͆\24EC\\\127\21G¹r\20Á,ëà¨\1:U\28Þ¹ÅOF\2YNrOÚÁQ˜Á¶™qMè\14‹Ü\23œ³_#hT#„K>av<Ûê¸b ôysJ^µ8\8}\7@í]uÑÄr8\2Š\29¨ôë\19\4ûdÌ”g8þjâ\ +£G\30gf¡:\3ú¾çX¬õ²Ö\24É_’§\3ÒsŒüªo\13^O=$ÝøX£ì\31è#B\19SæŠ\0;p÷ñ‹OVóœm\7=sS눆gì\25{\22BTj·Ž=ˆ\7>÷\15\29ÒE\31Å›ýúÄÊæ–µÄ\11ö‚öù\305ÍÈ—8\23Åô#V\9óÛ¢\5¥Õi§u¢ú‰ÒPP©õ*\30ÎÒ|áê(ŸoöÎdf\22ƒÖ#”\1]T\28\21Vƒ€ùÂ<^&ŠUk,ÕÏB°u+jˆ\4\2‘\25%6\20‘¡\28§4â›K`†ö¦JŸjù€ÙsÎäÙ\22ͳM˜ã¶\30)\31ü€4‡D½i®Œ¹r7cÛ¸›\"õ~w°&‚rW\0294 ;‹z\1274^Q;£±#=kP×\4ïm2æ5Œá\14â\29$:0ßK„ƒT\\dh™´há Œ‚ËrÝéñã­<ýý™uôž(\7Ÿ»Ã\8ý€¦TØßòüÓýÓSœ›dÝ\4\26¿»VŠmoh\22í>ÆÖ\0é×\ +7CÄ\127ò\29Å„r¯à‡ó}zÚ\9\2\9\2àqç{º ƒ_°\19@¤6€ÁÊw†\0170N$Ù]’\31éæÓ/ÕVÕhy›»\7H\11«\28\"Þñ}ñEHœ@í&D\19½\"7S\2‰ø\0\127\21à…;hb±\9<ª\21@y\27Ïí |[\28k{‡jm?´ñiò<\16€Ìê\18ªØØd‡â­ÐN¶éŽ,t5;SŒv\24\20§[’AP\20Œ©ß>üV[tb…_äŠú%C\30…\3·jS”ljòÕÕ·òtZ>f\16ÜC§ã@\0219\6×üÀ^\23tÔæÇçè>€¿Jš\9IÕ\ +]±‰ÖÉûpœQé h\4­ÜéV\1•Fµ·ò}ÖXBå\6\15‡q\6˘éÈXèÀ…Â<ñ<0snf(}~\6£,ÐQGN\\ë0‚Ù\17™„ˆ}Aÿ\18>¾ÃÝÏ“\21ƒQ™)\0307'Ï\6p<\5\16r%6¼\29ÁÚ­\26]nv\9?Ú\26û ^£ûl&ÿγ\12p\16}®]á’0kÇH‹ÆíŽ\15l[-ž\7s[óFi.´ŠR%'J÷\ +B\5Ñ…˜X\25gU\22=†Y“ \17Ö»ù[YÝV;ž\9r©£ÍiÜÂãÏ+ >Šjû¿\127iÙ÷>\ +\24Ý©\25»GÜž\17BïúUá€/Ž\14}R°-h\22,jß™—By¬\0232¹JÍÑ,¨åGý0ÒlMÀààâ°!%åaC%™çÊêOÈh\1Û+Éc¥¦U\5Dµðþ\03086dÃ\3ºž¥\18˜aê©ŸÿP\25¬vã»~3š! o­Ê\7†! 'D [[ÀÉ\17·\9¹i\21\22 Ñ0ZÂÿ:\27\27Ç·\13(Ba­^²-“ddÔiô€U\25¾ÑgÍè\11${±p“ýz\4ð$IÄÐ%\31Ÿ±úÀ³#OôUV¤Ïc\15òoFhçQ¸y¼x±G¬Ex¹cü4fÒ\19­°fµ½m\30•·M\19ÊÛܵ¢š1FúƒÔ³\4(Ç.=)\19¤?6霉–Ü~\30ÁôP³ëÄ\"^9MÈ5ÊsÖ¶x’‘øŠœŸF(>rÌuaúú;Wc'¹\26;ɾ\12ǶÓ8W_Ü*\\+(ã\27î\21ºËrETHý\27Ìn`~7#§¥\2£UX~ùÏ…w®FK´V²C‘)#*ï«óØóÐÒS\13~ÞÉ|OyŒ\2tŠþEvµ>& G¿\25·V\0228ªõ+ªR3Ê€q\25»\2KT\5˜ýF5Š\7s”?=üa~Ñ°S©ù.»‹§\1]\9É\5³³f$SÄ:Uà©_hLi‘_3ô\26\21œ¶\ +¿]b­I_\16–wðZÁÛ÷\2TAZ›.€8AØ´Ã\ +016¯ÐÒpkÈ2÷)`¬‡€ü6MBBuÂ\0<ðvÖߎ÷Ý¥ž4e÷h#9—­hr4<<Û‹‡ Ôe®“ÃT¡{\6\19ÖøtŽ¹\0X3\28kð\28«\127\1˜\127h`´\11?ŽHßvÓͨ…§mò!÷’1T@glÃæe¼™ó\5oû’\12zFÚ9í$Þ\\QÈ‹Îà%d?Ô\127ðdAR[E\22tåî#Áˆh¦r8ŒümÕ×M8êj8*Æ8<¿ú£R§ÜÖS\15Ž$F$‡7¾öðÕ‰JA¤Q}Ë9ƒPôXjŠÿMÉ.ÿ$ÊâÓ\6„\0056QlŸã\5Ž²a\6´p\23\4-\17ÛOöÐO¡Ö†À.ô`†ÏM\20êç~)dóÊПH\8™f+´TIªXçýøù¦ŸÊÒvKDF=}ÎÇZI<øˆ¼]~Vêÿm$ó\28”Z<¡ˆÎO\6QDĦ\2E`$\1270ÖxÛ\20\0016l}‚LuÀR\1TÉ\19=yrQÔ¡Üh™µ©dkSÁMv3õ˜Z×G;}@»ŸgöÂgW›³h &; \1§¯§Û”Ø\5ŸÉ™BÐb.äN$Ø\1\6‹\3Ç\21òÿiÚ­Á\6™\0029Ÿ?9sgûZ\23É#\25|ý†2HþA\3?¯˜ÿQµX²Ï}P·>á~dÑIWÈ\28Á\30*ÆÐÄèI³ÈÒ!¸\28t¾¾÷…ƒêWâ¼9|)¿¯*W\"ø\27ýôWÿãiKV\7²€&ôé\26°º¦Ê†Ñ·ì@d$\1ÙßX\31Ȏ䩽\29\16'SåÉ3\22¨òtÆàož.H”(Æ/˜ŒSç­vÒÁq´Ž\0210‹\8…+úÄ\27¨‰ÆsY•›\2y\13¬\20€çy\6“µ\18(\"\7Ü©\13\19Øu2\0137…aqÚ`7ù;&²4UÂê0ê\29®f\ +Np<‘!¡á]ìÍ€Ÿ8\17 \127%þ²\3¬\\’ab°\2óÕ¸§V„Á+»Êú\0´Ý¶þSÌ\5™°Ó³Ò¥\13:6Tí8;cº>úÊ\29_ÊþD\27\"\26\0063Gà0¦\24Ê&¿+¬\17YB}ejNÜm«sš\20ªo\5€0¦\4¯J\0~·ÂÜ\3\28i'D\31¸&˜N^¥<Ú\2xx\ +:ÄQHjÒk2MÃ]yqp\16P\1Œö)å\8^úò]“s\ +ª²HÖgú\14|ÙÇs\29yQÛäBªY—\\ͺ\ +\20Fp¢W\21m5\29¨VE°c\18Ý“\11#frý‰‹6EäíŒf-´Çô‡>Ÿ*\31\18\17&e¹õÉ€Ž±ˆž²¥\20\"°cOP\29\19÷ˆü÷\00498ÆNB–Tù\24¯O£ º\25Oº\\\17'>£7³WÝŒ\9Æ<×\3½\2ÙÔ93ó\\\23½‰ê\15t…\16\8ërNl\9\00075­î\6\12´\25¸…ÉKÂÏ}—ú™eÐσ9Z³EFTpï³ç…}H\8óæ¢zÞBª®hu ”\1™OP—údZVÐ¥@\8Sû¿+HŽÕ¹›ê·Í“Îj£òÛB\13õ@;\20\22í±§Û‡qx\26¨¼0jm®ÀÎË&ø\12¯<ür\15?\11 ðD:Ûûr¹XqÉ\127˜\17Õ\24\15 óMK\31\16×Ø(\20¥\0319œàÔ†?W\ +!Ñès\0[ZÍB\6G\31øßÕkšàƒc(ÜN\6O³P…¦ñ©óëm Mr\15ìfI†\29V·\\7&ìõ¾0ì¾-ÌVI#x\13?¿ñþü†Ë¯Úrºp(\19=\12?¿Ñb‘YêænÛJPæ\29€h±ŒÐ뺺7¬¨Æ\31 õãm\27j¨Î‰Šv½õÜ­äjîG@‰«}\31\3ØÌyv\0245\6WÄñžè™8þ\"\27\31\21`°äøG+\24\9 ›\1Å%›ç)×/¢Ñ¶äÃ(ÄŠì°Oš\0èC â\"+óà‚–\27§Aù'\19:>’X\18Î-D.'[ç\13µ=8 ÄŸÿ&wT/d›kÃ3Ó8Øð\29Úaxé¯\14Œ©s&ð\5äºn‰3K„üˆV¤­ThÈ¢³™\17Ñ2mÄx‡1¢‘ñÛ²Nf²áåµ³}Ì\6\12\15 ýD\\ôád‘\"\13ê\0ˆ°&—ƒ°Ž çTô‹·lµ1ÙŠb”’]\8]Ü´esÁ;\127;.Ûúˆfü< (˜šuˆOe”õzŠÅ\6ŠlÓ69Ý@q©Ã\14Ô5§»f¨A\22!î*\16e·\ +>Eš(,V_±Oxí\9Ñ4\29šôx\20\17‹é‘È\18‚?[\17*\0â\29µ£K\5êT4‹‚¾’u\19åôôÓó\\UàetY²ÊSÿÌ/¼\24ªÉ\9åC'„ËrªR\ +©c9£6‘ÃBUŸè€\14‚\27\13‰\12`\26æÞÞŽÚÏ\\Å¥ÙâÉ„©wP\21‡=çŒ\16o¶r·û\27®\21²³¤å\15kÖÿjúÅÖÕ¬\2Š.hQ8\11â\17ž£“[ŒFhŠwár‹Õ‰ÁœÂÀù-5ž\22~ƒˆKÁŒ5ϸdúšˆ™\8u\2\13XÂ+\ +­\0016´¼òÖÓ€¦¥>§\8“¼Â\13ÂÆ\127Æ?!PX‰å0þ\5@—O®æ€\4ÞÂ\11\25í<\15\6zõXð2Aಂ³T«•¾¹®T\6\ +,[/\16É|¥“Nlèè.pý¼­'„ê(\28ÌÉ“\127O\11\22´ö{xméÄkÏœ*‡,U\14YB8­85ÓßïÚ\9å=»¨\18ދϬŽÔ™[Æ«ób&\18/±!\2Òf‰h\"À\9ÃJON{<Û}ЉÜ_g\30òRýëá}‰G»^\29\7Û\16j\21é±c4EÄËUª} yœ\127þ¯\5QE@ªò¨Pͽ\0310êÿ\8ýƒ¼/\23šUì—PpYçn]{M¹õ¬\14Y›Åtü\ +zz\11\25A\4•šoàÕ\12Z÷%šm\7å!c¤l¸RÒ§Ñ&\4ùʢ鸒¨Þdw]\12LÂ[œí¥õ€\26\7\14)‚\0170Es%D`À²¢3£1Õ¤Œ›Ö5²¨ø¡PX®:\5øΦç\20•9\4Pë ¹˜ßºK²&ÈHÝXΚ‚åV\1³4Û‚RÎ\29»‡N‚\16°ÙYX³¹:\20¦<{\ +ú5riXÛç2øú\20éÚ\\n‰÷bÊ¿Üü¾%_ÊÕÛ)*ëè\23ŒGghÍì0\24´È’Š\26øõ\5zQmà \21m„\3\17}ûSZÞ\19æ2À%Éõ —ßV‡r1\9Ç=…/bª­ \\­\1)ŸJž•Þ·ÛåÍ\17\29Ê\0204ãä®*\5\21\28\0316ûí]¶øk\13\22Äü†6(ßÚxË7¦Û2rÒ\16\8Hõ|R9Lr;”m\25»Ü\127UèÚ€Z;kóÈ\0053“ä}ò•]5ä\3%Wf\127Eƒ\3ÐÏ$³­\31Œ†\13\23HäÂf\11 x¶¤–\ +x×\9¾ÝÉ›$\"³\28\7:\20„\28o\127ò×Ý¡{Ífw6ûÀ\2R©¾f\22ô‘R•ƒ¼.ÝÔÁD3oɧê›+(dÔs}¦Ñäá\9q˜;Eøyví\3•.o*LtÞè!Wu\ +/HT\16ªÊs¹ð̈À!öœ2°¹z\25+D2­$”‚kÊñþTࢂ\30%ª‰P¼r]?¬z‡ò…;¸®/«\20\5\29g\26̵Öxu+Ðh‚‡Û\29¢pä-è PNg\22¯U,Ü?¬”ž›,ÌN Q ¤I¨ÕÏ\25\4ÇñnÍÙ¨/ÕÏDÿ@GPn*\31¡šïùüóçëÏŸòçÏÿÇÓ›$¹Í3]Û[©\0218‚b'-‡²(\21ÅNA\0´éýøŽx¿é?©ymì?×ü\12„\20z\16m\"‘ÍN›¢ï¯(Çsu5•›Ì\23.q󄆋 w\26\24ÿ×—ÝÙÔDvã\16\0192u‚\7ëIm0A¼5±BmLžy\9\14„`®ZÂX?˜dß°ÈNÍYm\15p3\19M(¼§òúAt½{ü\15\31ƒá(=\17u½\9\24r(i3ï\ +Ú>\ +Å ØåÃêŒÔ\7±«27C4ƒI¶ÔcG\3Ù%bF»1ÆCŸDcÍ\17‹Þ(?â‚b+<„Áë\8Ø\12†\28h,2›ÚÙàvö«pD\25¯A¢I}å)ü¶Äd¥J/¼ˆèŽþ\19'\8¦Ö\127²!<À̉Ø?2èsË<\23ñÙK\31Û,Ž\0061ú´\2Ä\\³¹ˆl$Gå§p¶Õ\0265'‑t<™å\6H_\12gï.q0»S„ÝÆÅ\12ð'\2§ìõ\6)à\14‡€¶û¹\0\0198Êè±nb¦ˆ!*O\127Ä\6‹ÇÙïTZ\13öåC§DW£‹P]q~ã75ÊU¶¬:e»D“*\4Æ\0288»+J£9±±‚?‚é¸&Zby‹\25í÷{V4Gß{ŽG\27÷Ø¢µ¸\27ø«à8X?¬ŸjÊàŘ˜@os:!§âN!/ËÀ¼w\127Im{Ÿ[¬Lå@ó\ +[~<1å\21X³ÙÇ|㈶\5\15±\0¼ht$\21\3\24[ô\0051ýÉ\22HDë­tVž|b²n™ÍZT\20æý5›âQœM®lñ°É\23Ãì‡%R\0\25OÕ\13?Oüþï’}¦€4\17]WZV\24êáÕ:qåOM·kÞ¤+ü­é|UÂT_ÕîT™‡.ÝØ=ÒÍì©2YH·h¬ÊyKÈã¹®¿§\0171\9ù‹Ý©è´¿\16­Ïü\17&î2k«û÷\0310}ÞÃñ-(®¡Ì—{\1ú§\28£6˜\127vÁy\6ý\13W't·ÿ­Ã}n\ +jI8FÝù\127ͪšëŠ[\6éã\20t}Ö\29O\17¡«®€¦Óv\24ÎN$t2êŠÀÝq]ì;ä–?{\18Ã¥­{!,¥øÌ\20\16\11·Ròm×\127è;ºoÂÄ'w£²{÷¡;éôew‡±\3‹¼\29\25\31¹!\15Í\19\3}€¢x8ÇäîãaÐh\27S(ltºµ¢¼›´ð’…¢;isS$‡šª£BÍ\30\8Âø\0096\127I}BÕªþ#õ\16,\29äËðàÀ«]Z]u\8*éN‹ÇÏ\15œº·‡6`RF\1279Û”x\28\15'žìÎ.zua/êç½›õ8\6š0B.u\15Œá™ý/ç\11[\6ÉŸ£«9&‰‘³pÞ¸º¦Dáí¨=¬ÀÔ\8\5¢\25\11ké\19lî-æŠÉÒÕ5à׺Òjd÷\3:o#¾éAa–<\ +Yòˆ0è\20š\5\19\31hû\ +tŽ\16S”–!á%À®B™œ‡ÔœøʉážXÔ\12¾\14z]Ê49  ¸Ïg†«ž=Â<‰´ÙKñ3šÆ\2\12üS\6ÑÙ&§4÷¦þ0³æ¹×¥“$‡\26 Ñö\28¸,.´E¦H߸ttÚb²\\@%ĹcHšÎ[Y@ªhË@7áõýÏY*ìd¹ÝyóÈ n, `ÌK¦´Â1ªÙøR”\13\3Î(2&pËqôéf\9OJðœÝv×ËäÐÈŸå<óDgúÓü\16˜|Ás\22†>µ58gp\30ŠDÐ'tæµÓhÇÜ8¹]qvÇ\"؃JYÌݨÿbð Ƙ\27f16’˜‹Èb=¨™¥¡Â¿qk°p¼­%zÄò<Êž\30}ö¨êäÂ…?«·„«’<:Bßz‚')dn3¦`½7\3uîõBò=©\15.WíUo¥eáŠ:Ùp]11\31®\9«„¡¹Rÿ5MZ|׃ô\ +?=U~\ +k.‚6ä“\28M½ê'gSÐ\22-ä\3\18Ý\15•ÿó“\13âççyVXû“Ž‚\15ª‘ûÓÿ\3\25\19Ãqѱ\31Þ¢7‚óEÓèÖûÞ\24nÉœ|†êÕ\27¢Úªœºû‚ý´g {mÅrµD´ñ‡¾õ¤V(ª\2…žW]âEKY®ªº§þž“D®ÓƒÀ*ÙÍÝ…\24\2ñœ&ˆ!à&µ»¿íZh®\24\1\3%¤}½2\19ÔßíRf\3\30¯B{]¢0™¾j+œ#8\21‹E\25BaY\4\0G\13ø2NC¶\19ÃÍ;_ ¨‡²\4\20²éc/ú£®m\1\26'õÀƒ]ÿÔ·³[û‰3ð]Ã\25[F‚—›[1œ³n\12\12´ƒ´Ž\22\15À~¨]+\22¾Tìy`ùƒ°“ŽüÖ9\30J›)Vãà\0r`È :ïFÞÁ\31ú䛟Ì×ÞzƃùXCïeŒ˜€¢GÊ@\16Ö9xÊÆ6û¸Ø·Ò\19#¼=ÁB\0\6C\6\28rUÏ9f\3ó/\23ÃÆØ'HDüñSAàÉ\2,Šúk¯Ñ~<ì~ÿÕbÑ\20V]\19ojt‰‡Õ…¼@Ÿ¡¦^?Y‡h`\\9?”¸fg观¾6Ó‚Ê4ÿ¿9*\30n/´P\19¡á¥Æ\11ë808ø}Ð4ˆ\24˜d=f0eð/ÑàŠ\26['¢±+ã?­Ì”\18ý7´™6MÐ¥µ¤°ˆ62–\19Z\14<Õ\21õZý\25±q\25ÉßзKì\14›—ÿ¸ï¦\29»7ö»\15§\29ÅçÙï\12ç}ð !¼\16²ðB°ð‚]s,Ð\13°Ï©‘³»\127¾Òî™þòîÝ›ëM!f\26@+ÄLÝ34BÍóyñÚšiöŒ\12\19žo0Ñ'±‘ª\ +¹ƒ€\4½µð…Þ·IÌvà\30n¨>Q\31“W”{ mœzü9ùÈ£w÷κ\9ù÷C–\2@Àz˜\31;&ä¾8ÌizˆI*rÿþËm¯?ܪh__Îúcé…Pß5Ö÷\21‚¸\0¼b\23ý±Ô³\0\ +\0296Ê8t\7uãÞ\27²\17“Å\127¬z<ÿ\25Táå!TìÚ.¹(Å\9o¡…ð@Õ(ÀgìãûoåX ñïOµ]ˆ_øÔ=W—“OÖZùÙ9}ª\5ŸBÑÔÚO!\8\26ßφ×Ø;\7Õ'O®¡údz4Ÿƒ–ñg\6\13•ö\4süäQ \12ŸHõÔ\2¿(îúdýÖŸ\7¥²<çNÉ\0é€\12iZûéQÒ,\28¸)6Ê6`º3\12à‰ö½ìjb\12§ë¤Ú\6L–\11XW`iü\9oÍUx€\30\29ÌÃÿ#\7ÃdB.&¾¹ùÃp;{'\30n¬¼\1ÌÐ\\üÁlú6ì@^L\21/\14Tç\14¥EçÂÀ¸\15å-\8¾zUq‡ó9˜½>À^O«(òa›Ø\ +{\24Í\24\30Ü-…Ç1­†OR\20ÅZ\30LÙTÀé³\5T\14\28G]Å\"\8?!µ[\8/\15`-©\26¾\30ƒW\6!\3ç@'¼¶îå:Lî+mŠr,7˜áö†ä›ê'.SdÈF\27õQF\0260HÃê\24Ì\20Ï›;ížý½¼\7˜ÑKˆ¸\2¹\29Û\"†–Š\25ÕkÍ­–¿D£\5€“€Ãp~¹“m7H3,n¾É\14Ûyœè-z¯\6\1~t\14¿\1þ\4—\9þ&‡\27*±£ã@å\6Ä\ +pQá\5d_E\24s\14Nrí胛ÌMuˆç'ñÙþ£’[y\5Svˆ&U\13‘G[ 6\23\24åh)8%K3›\23ìãê'–Óp1^\8\0199m\18ö÷Ã\24\28.\16\5\12BôÃéÙqù«ŸX:jÃùÙ#gÒ\19‚]õ46ò4½BûþÆFú,QJ÷\15¾«þ?9V+N\2”šùC†`èÏþ^G@ùÀEE\20÷€'}ùÌÈ´ºá\9zX\27zâ<ÁM\\äæ6Zõ…ýÑuÄÊ÷ù'w¸§\ +nž¾ô<™fO\22áÓ^7WÓ¡–Ë\6õÔ¸ÈU·<±bçèÍ.ÇÒ\19Týi?Ó¼yr©~–±\1+|¦‚óâ™®þøıZ\11!\8DMU{&÷V¢\4¥\8þ\"–Šœrl\12‹—Áe{‡ÃÖéøxÖ0Œ`´ÊO!SPÆNsYNÏš…\0©$\29=ÔŒÝé¸ö q\3\"=vf1!˜Á\27!agA\2\5leé\\f^£µå\0008ÝÆÎ=®r_ÜX…K¼Þ-ÀL[\13dŒHˆŽ\26GÐ]¶ñ\28°ñ즶6áKòí–ëÜÆ\\ìöræ-8ïÆ=X•l»››\3µLø>¶ß‘^\30¹L°’tÙ„ÎÑŒ\\òƒ…\11ðhšÉÉûòy¬ól\"‚K™¢¦2:/\23§,g\16,gà4ˆ\27Ø«\18³œ\1ïÃàå\8\22ä¤[ŽS›©´\8\20Šä½@¨f@Í\18WJïǪ£ŠåÓÅïÎ\0022:2IÇO\6vpW 6¤ÿ\30‹•3\14î2\29¯Œ%mW¢iÊ>¡¨Šj¼ñì•\11iÏp\28Õ.aY\27ç^'\3\15\ +¤låEû>`ØèŽzà;TrÈõ\27ÍW¤1ÒÓ8¸ëžÞg\5f&»ap£Ñê<—\31<ž£Æ[WÖ\127ƒƒt‚ïéû¿Í­Ð„^œŸë¯­N©e\24D×ß\9RÅÈ\14\1_”?\0255¼ô·/ëã\5¶R¤’=Ì%¬IÚžäeP”\22¦.Cq=%n$ãtxΰúG\21VŒ\11çѸv4w½Ýì¹³åŒ~\9(Çu`dÔymÈ2\24ð\\MÞ…\4'g˜i\8ÖrÕº\6\27¶\13Ú\ +\23ǹ50>XVò¥Æ¶#\6ËÔÖuó\21j\4\19R{VO#¤½áêb8\4VãeËtû\17•í\6Ü\14Ç6ßðÊ‘K:‹K]­bóu®´)qxÁÀ}m4êš\1çÀˆ(º}c\6¯\28¨%Vfö1ק>0\19Ù’\11\4a\24›-écÆ-Á2Eµ‡fŠ¶©Ñzn5‚Ú±\4à…€ÙìÆÛ\2\28\5ÙX\21¾äàÞñS$fÄ\7À@¿Î:`ÃÌ,éí{8†çŽy@©©}N\2U3œf\14•yÕ\8Ë5\7—ÂV(—³·ï9ù‰Y@%Íi:QBš\\Uâ:…3}N\0273æ4\31•s3Lx\25\22|\27–\18ùÃNn¹&\\Þ\29Ï‚œ\17\5ãê‡Ëjé ÆBAÕ~°˜2èGL]30Š\16¬Ì^îÕ.ˆÅyÁ\ +\13̹|7\4Ô2 \2\6§f).}\5y>h}ò„l…0§°˜ò½øqȯ\15KI'þ_rE Æ\24ˆ\15ê¯\0077¤\5¬ENí§l\27¸¦uÃ’}/\"7\14\12y\24³\18“*j\31¯òͲ²s,\28¢šÜËjDtá\20*!´ªYB©õå‰Y°\28<—@sű\6VR³\13+૆!®æ\31û0¦\0146‡ƒ¼-yz‡\21öµ°ÂzS…µóK(—Æ\21ZBŠàÀÃ\26\5Ó²õÞ\21„Ü¡Ê\3èù\26¾HƒÍI\"INìšë™MsÅ‚LX\31\13Ù„õ“\9»B·²|•<²®Wî\15ëh\5³mX™’«•Àê?2\127jÍäæx{^§÷¸ág.+ѽ!\17*·…ê#¢£f@RX­+Y)ÐcLB63å£CW6\0ÌÊ“ûªµâW¢Õ7H\0»l±Î \27ëüéîAò&ÀÒ­Êfh1ë\1ð»ýz\18.HÔá÷XL{\7,n“\2mööù}Îì¬u°Ájû¨\1\13ðêbìPã…®«®Øp‚ù\8]ó÷ßÆ,Óü£\30í)ò\8O¼2Ó\2*ŸY\18:­ÄÐåF¾E´\2Ï[\19ÞÊü6òB¡…I©'¿%\14\2Œ;ð\24\16Ò˜M'\11q\17aVö\0›\13Þ„\\\0250\127Œ\\†Õ&–±\14ÌE&Ø\14]@¬ËaÌEž¯É\127é§ýGöî_„ýøZÛ€‚­Ú3„lŸ-‡ûËÐ\3\17x£õ·ùéÕ•c T[n‹©O†‘ä-dXžÏà¿ó\3\27\31hâáÅæ\9¨>9êàÿ§2\13\21߇½c¿°@µ¿S&‹;h:\12\28rMÈ,\21\16ŸY’ÁºU\12ØWBæ­¨ÃÀ&\11!x\6Xo\22H\127hž(Y\14\16ç8Utéæày«<Æ»ØÕÜ\15\16,༳$XàòùÃ\25Þ€ïçÚù׳a„\9T\0312ÑT.7!`>*pq œf4f|\11°˜©\0HáÆÂÏ}%\17žËæõ\16ï¢'(ÔüX/'z{\5íFU\23㶺\127¸C7\0—\1\6L 7aíH¦àG\14°‹\\vRk*¡g¸a\18\7†(#ýRs¿Sì9kE\8\24„7ú\26W#¹åêL`fJ\3\19@•xRbâ°vRÏ»Z·*\20$îÞé XG°•Xn~YÓùƉs\0220)\0018d8L9\25 2}WÆØÌ”``Ÿ“/!ÃW\14fµ¡\17êÇ\27Æ<«¶Î´\20ªe—áí#K;\14>“P#­€hMÑöóöW(áÙ5\24\25#fû‘ý[®ùSÈsÜ\6X\0õ9&oD®³¤\29˜­ˆošF\23­ÆÀÑeÎkÚ¾r}äV\"I¢ä+£`qÏ9û\23\127\13V³\12`«ñ\0306ÞÌÓ\3\14²ˆh#Õ¦Ù§\12Faq\31ìXÛÁƬÈ#zöb«É¼ù2PG‹aeÈmÕ†\29f÷©-%\24zØý;¥Ÿx¢ï\29ÔÄÝÙêÓW¨¸qï\24Ž³ß^£¶4yß:D\2\2¥ š1unHºº |ÉC¿%ãÒ1¿¨Äü¢â$ì¬:h\23.G1ù\2}\22´\4Lô\11´­:Ðzkœ©ç¡Æœòñð„<òqsð¥GÃB«£9¥Úx¼r\20u\30›ð#Õqäõ|ì„A-.¬!ï„Ë\19oä*…¶¼¯9û^Ù·½#éùï¯v7Òeºóæ?%8[|Sæâ™~,,ÆÎl0¹YnM݉¥ŸÀÒS‹P«<'WºB$N×’«uºj€Êt5\3\17=˜¹ÐÓ‰Ëä\9Â|ʬөò›KzÀ—N0Þ$h¿©\28àÚJCäÀÅî•.^\9{]!j†Ü\18ÊLt9`Ë zD”ðd\27zºêqUVG\30ªìhÁpNùÙÌ¿áXÆŠTK½6øÐ<.€Ú0\21\8ž«[\"ûQsl9(Ëì \18\28\16\9î2ðZ? Ã\28î›Ã\23îÃÚpÃ\17¬\"Ux²ÜÑiV-Úƒ§œã´\27\11úó\17þ”?\5šðÇTDÞ2¾ÿ;1êÿ¤‡Ã?éá\0,\6à6A|Bš8œ™ÜÚQt!€\27õ¿\8VæNõ;È÷×I\11¼ÐÍÞl­_w‚\30Núõ„‰õËk©\17„@(ðÌ`$áØŽÏwìÂI!¸:ÜÌm‚:yz¶,R@Y ÊôùÊä´ï¯ü‚\1ì\13ë,÷râl\8ˆ!›\25yc·’¥Tùå\18}¥›€9kµeÞ|Ë\0247·\28xöBVÐ\5^Ô6XÚ8äç4@í×\19§5Ÿ\28r¦Ð9©s¶¾ÁúOï¬uf~F\31\28MÚ{7pç<Ìz\7µQZÀØ\22\25o^µ_IÏÁ´\5’\31Ó;\20†Q“9Á¬ÑRÝû\23f\11D~M”5áæmé¯Áª#Yþ.\22è²h¯?õïÉLÇ\127_\14µR,'~95Æ’I³á¢ZB`»\24ã£èÜ…\127Ñ\"dØr•Trèˆß\127ýâlÉàP[2XQÞí\1ѵîxö®ýˆ]!ô%v×U\127¯ª\0157È­®:•‰PÑJô“ÿ7ýƒ\0297vç[©“ÅÄ”¾Šúsƒê\15dovDúˆçN;zìàP—;ÚÕé\30‘\6æ?è.à“\12Œ\18t˜^ž{{×4\20„DÀ™ ÉÈ}~œ!ÌÄ\14•\8\ +Ö„\127k¥Aº KÄð6\29aºà{F\27ü‹ˆéVÀ·>ꈩ¿ˆÜmçXS?í§¹Y,WMàæO(Ê#¨@\23¾ìW‡Õòæî\25©v,w\0264QÌ\5\19+±ƒ»C\0K€dká€*M\0292qhp\28\7’½#žÀÁI¢žtqdœ,lˆµÒ'¾'G² \13¸døP·´Ý4q<)djsŠóÄ\5•\0íe]N{\0PF\5]\ +\17˜h}¥\27éšé’\12¸uÇn¾~àd\22}u\ +š…c\5ýª‚ŠE×ÌÕ@Êr®˜Bs­ÑoP¬\17-œ\27»¥î®îÐÅý\\Bç0Ñ+d\16©~Á@Q\29ß\"ºú\16\14º\0319¦woYh\23€¢Î\12y»*lE\"v~à¦<^¸\9ÓöAw7á®­‰¯]\30ºÅ,¾[XE”Z¾LN‰€¹_ð×ó\13a¸g0eP»§±•\"÷í6ÉàÙŠµ{;0O\17\5VPôdYTL\127ÌÁ-8\20¸Lu\1Tà‹\30Œ\24=\5ÒÇ%\"÷«¥…¤¯>KW\9æA•ü\9‰E‘,\3\2\12ž1\\¿\"¼Òr¸ýF\20ÉV¸ôÚå·.Œ±¸ž>¢mìÉ=©”\6\21 ñšw\27\ +ë7ä… ^Wt>Äk:\27\28åˆ[\5Ü\22™†øó#\22?ÕöŸý¢~ù‰\30‚Xÿ\\\21C%Ù²X\1)(ö(TŒ}\9'±<´¹¿p±’§™\29¦\22\1^vÕŽ¾fâè\127Å\6Õwîcè\4É.“ W¢º¿’47\21ý^,½¾pS¸T‚\27ÖÄ¢Åx¡þ’«o1;fߘÁ”ÁLí\22\15Œð\11Õ¸9JÃ-ÇJ™øÃU\25h\6*!H؉‹X\"\0273Ð\25\29\17\16&ëAб]rUlÒH\1ÓÀ»Ö¨ÍE\12Ẳ;•!½£ÉÑßᦌ¶¦œ\0093%g¨×?h±‰Ø†»“&ÊBï\15\30®Ë\6\28„ý'žÁ•\12Õ–rtÈ}Pp–\29Öú\28|Z¨ÈAû\\™ÉàPÅÙHû\2]©ò-wZi\11 \17ke1Ã*86x0Ÿ|ª\15ÓÞ‡©)ä\0191à•r\11Ï­qr‚ÊŸ7b±Œ)Ž\4±;\24VÝ“‰é¸›ËdÊ÷͈.“\11äõ\31î[ì‰\25¸W‘$ŽU?žÙo\4â…¼\8º¸\26H\30Ö\7\23û\19G/9r÷<‹\20ËWºUSã\3¦¯\16\29¦O¦ŽÍú-+\12éþ–ÁÁ˜ÁL§d–\28L‡z\2úJj*\127p*3§G„äwÏ*8\23\13r\25y.óÚOÒa˾AÌ€ü˜+cH¦Œ\23¼Í–\9¾p\2܃\0193ub[\21@¸.›¼\8&c_¼<@oWZTs€\8{ÇB>˜¯@>8Z>8þO>Ø\1î/ο~6憑³\15›\26\23\"¯U-DÀ¡°Z`ñˆý¿Ÿ/s\14%él¼@ˆ¬1‹\ +£œ¯?O­\11~Ñz˜\127«ü\2¢0/ï\26&8žAjЄ~9sªñ*²dÀ\20P\28´oÍð姷\25iï\27ˆ\21ûÉä梄\13ñ\\ÒÏ\25pcÖüã4î\11ác-îºðeÕ2dÿàY²äñY<0Ë\5,@IVïT(ð TÈƤYiY³¬»C¿ÿ?r”Ìã>ëZ²zÉ)å`{QŠ„ªÂX{RXUá#OC‹\31GÄqó@-\30(4Z È½šÁ\11¿›“­oÖ¼šÖÍ“—ž'™w\19âË—ç%\12Q¸ÈßÛëod€ôá°Üh!¾¾ÿ4Í‹;xÄþÚä¥ør÷nÝO\22ïÆKhã÷#zdk»\\\1¶‹é(c\"Ž\15ïðà: X“þ\31:ÙXðÙïNopÏà‘Á\16\\\26o\ +xgoiXÓs°\21“á_PËá˜eÉ\25s+ ÌD\24œÌ~F¸N3§ÛW¦ÝV?\12¼[ÛÀ[ÌrÎÖ\0119äÐÝE<\0298þÈ ÏàÓß4z6l#\22|þ©k¼#oYë¹ÿøÍŠG24à\8–žUè?gVm«\3Ñ÷\27Q\12Ÿ{Â\28Ñöå,Ë;{\24lh.Ø3åVnN¤­Ôàùýå\ +BåíyËf–\0290eðéf ï‚·ñA°ÕÁ…q™7U„$®å\18û’òp¤<ôN¼\15\13j@\"2Ú\20a¢:ê4¿¢{±äòõkÞÞ\1:Ï´µ\7š\26¼û\4\31²Á¸H\11]ŠÝ+|’-ä\13\21Ê£ººD,Ï^ožº¥¡£ó™çNÁ6xÁæ]çÙ\2›Hì½\14]ÞifÐ`Åís€ëa]’øå&øx\8¹iÑC\14—€ŠñëøÛ\31\29ÝƼly­Ì\0215èÚtÎOõ\27`N‘-XS\25b‚ñmÅŽ”G\6\25Ù\8)\127\7\29\27Î{Þ\2‘›é …Õ\1\\\23½`ŠÃ:‰Ñ\21cEæWÈpò„³!;eÖù^£½Ô‰úÊ“8æ¥\22ó©\23ó©Ç\17 Ú¢18šoà­'\26™A\ +<\ +\9;¹§\"ä\2\12ØÑ\1\17•“$I®,yh£w¢ÈÜ\16®šO\25öä[B,úäb’óqyä\24U\3÷\14µR~\12–³6|+\20H¥:\26ÜÜ_Øð‰H|Ÿ\"\18߸w»Ÿv\7Çún÷ý\127³]#\23\2l\6…Æ\21©n\\–E\22åŽíÝhõC(¼U’Å\7|Šr‡šÁ\ +Kã\3\11®=¡ð˜éæ UU}ÒåÕgß©,䬵VšOØ\12ã§\25z\5JÙ§¾ê3\0251ûLŠû„«¥@\0062\14Ð[†ŒÄ\15…ïzƒ•rÇ¡FÏ\1)NsŽ|9%*”¢-¢ED¨O<|ŸüÆ]z:Z–Z)PE#·\5W/‡›qXD¨9{Ú\1\30L\18ù\12\28£ŽˆQ« h:Ãm\31ª\15ÞÊ\15G0%‡¢?\17æ\29¹n¼\11³å³˜mœáÛî\25h܇\ +¦~yBIÛÁ¥\0Ñ\9bNîÛ\7&ÐpÎœ\25xÝZ\4²£\13©åw&Çy¸ož\12×åÔ¾4YL;ZL;\14—|Ê\14ÆÉAªJ\\mkB˜\19;\1fÏâàö?iNÞE†\17ƹhé\19Ösn$6\24ˆ#áˆ\"¸\19\25Ü «zלÂrQùO4tϥƊ‹ïð>.†©¸U´\9Óf'³\5¨\16°AÂî½Áƒt¥wÄa\26s\24Ü*ØíË_7MŸ\25€]\8|\127µö³õ\13\19H\5y8ŠUP¬\28—ûjòÌ1b8´˜IÓ¢žÁ¼\6s\26âÓݬÏþ­Ë\1›»kfo\27æÕUÍ\13÷(­\5DÇåm=Yg—>\31v]GcœoX¬ÍGù\22\4#|\ +1K˜G\30:˜H–0'%J\15èÃ\ +&½\6\12Ê\29¿Œ|Ü2†œvÉQœhˆ“3Œµ«ð\26)Íõञb°LEÛc\6\28„\16ºT5-8ß°.—Ð\16\5º5 Ï\28wgt z\14Â’œóÅN\6ËÄ\9Ž\9ýºÊ\7b£\28\27§Ì\16òJF¯>zš\27¯E\27{U`å8ÛI‹¶“f0g@í¡?›Ë€\127›»\"\24u\17ðJµF“h¡uÊ\27Ý0„€3ô|\11õˆÌK´A5¼¹šœ$7òõ^K>ê(\8¥!\25z\3²íUÇ\31\25x@ÏÁŠ•õù>± \11\13ØëŽØ^ò’à–9\20:GÐMMI¼“G„à\29âi¼k%wÝn\6n\29ªV –\3¼oì$Œ¢ÉAñ¥ûí …ßÿA,\27ZA2^\0049 ‡B7\0302Â/\24˧·×â\9n\\g9xù¼Ó\8\16÷4ÉúÉ%ø\9*Ï´{ö/Ç@:}¢ýF‰Ù\13ê'œâŠ8ü̈8rårªÞÿ™™OŸxçgÒ\12zú5ˆ/yúÖút·—£N\30K‚G3ªÝãt\\äf“IQw[l6é\3¦æ#N¨Ê\4X\30@×\9«>ŒP‡ÕÂÆ÷Ê\127r}ñmÃEÐG¶.—ŸM\6±\0066Ü“âÛLV´%¬ˆn*uú?ñ³8µ~9‰œ–,²·eìXhNϦyÍYô1b\12Fsì43ÝOsÿ¨\20g\21ïц®b–ÌŠ6RÕ\16È¡:ó|‚ú‚X(!Öž¢E\"d^5º]Ì[\23—lÍÙ§¡øÖ\22E:\14”jÉ´ÊEy,’\20­\0077.\8Øq'Yµ.p阵k\24ÛÕç'ÂCq½öœ,¥Å‡\20se{Yu\7Z± \26׋é\28+Š5:(6€Š¸2ÐkÏúZ!Œ­<œ`lRÀý\14\25Ý¢E*䮽3®§GÉ\26Ã\6\16+p…˜´\"|¥ôÍí¨\30Îð(ù\26Û\3R:YWíàü-Œ\18¯:Š\26-lµ\1½¥¸ìͫϦUw6\28?º\0175iû^Ç0\24¸…\12Ôj\2\9×ur«êÉ\31!·o\29ʷ‹õ>aÇ*ÈV²\5¯\19\8÷Û”P†|\13v9ÿA­qu\21D\12D—@W(\15êÍÙtˆ•7‡\21Ñ¿\\>²K\17‘%5z\30\28…=Dõ×\23yu\24Â\2dvîúkÍlZ+“\ +Öül°ægô¸ž\23cL+GÆšoñÈ\24US†/ǹ—,k¤\22±ç®…iõëë\5B%\0õ¡ÎòDUþ£C7\"h´ætn¶N\11euoêÊÌ7lÖ[eÈÝvµøJôŸÒ7\15\19UÖÍ*è£eÌ“–Óö.¢…A³™ø±Z\23X\13¤¶Íu*¿7ØœÞx\7ײµã9\25BöZ1ùæXw؆Ýj|ñ\12Ø\ +¯èfvA\30ÁÂmô¥\1275}j…\1_­Ð%MNíIy¶\21OyOîN´aÅ•3Â2MŠ÷I³ÚJ\1¬w&·¬&öqä®(ŽØ%â«á¿ Kö¾5Ábµ&iåýE©p\127Åò¥½´ef¼\6¸\20\0/}ÊkEïb¬lL(n•6\27˜ßâeë®\29sÔ\\{r[6«-kO\22\28[.Â\ +\22>\19kóê•0½@\"ÛÐÙÊ+°yöä \29N ~9K d49\4Ÿw*Ã!¾œ6¿ˆÉõíÛë_ÃæWÅ­÷;¬®\0147U&p\7Á,<\1ê\13‘n˜ÿh´\9º\31Mfù#Ш«\25ÿœ\31É\3ühEÀj\3\6'ÈEÿÈ‹H•á“/ô@Ÿ\31ÀÔÑ|\20V\0288¸âÉS\11rËj€Å¤S$ŠêCn\9aXpŸø¦>,.€'œbó(`vÝUû]\21R\13r®\14ˆÆ‚àM–ƒq\20X\13yR,¶îÀ’³¢†–/‡-‘ý\31\27ÄÚ\11°½kŸõØ\17ëO³­\\˜\7°\29ŒÏŽNÅöŠ=W\127ÉÀ¹e\ +ÊFxZð÷\7Œ}+¸ð¶ÒeëÅäd˜\29‰¸\19dºÒ\9ƒmòø¨«`~$(w?Ò™\17«d|\"FÉ\30Î\6›6‰‚?|¥M6\23\6(v¾F÷iÜNm—ëV$÷“€§l¢¤T¸±\9ª¿FcâªTli¹ØëG÷-yb¦\26† :\"ùe\11Û\27°X2\22\24âè3\24œ”Ž8ÊÛæ;¨èx \29döËÑ\17¤Ø£}ùì‡\0133Z\21þ‡Y+o?Þý\15^Ì\12خ͈i¶Ë˜Áѽa_»ŒãÒ¿s\29l\8[æ½0dЄ\15ÝŒ\22¿õµG\11^mnÄ_\8`(r÷qÁ¼\0124„OÇÕq\19ºÓ\3÷Í\4€\22e¹•wa\4£T\14F¯b\19~~v\25Ô Œ95&Qåúy\ +I©\30ŸßgPÐJ\17'\30¹JKD©X$¡œ\25þ?%]à\127ŠÿD¡\28\1uL &{ÁNJ”0\18Zúú,_ÉJAµbÈ¥æ<Ì\0265\2<•ê£Kθ\\(ü,Š^»˜õÏÅ6œ?¹Ô„\22µ41K;Å·´“J\31üI ÂQ\23‹Ú\ +“\8X\25,yÖ\7'«OŒYA\18ÆX \\XÿÒ¢–\6\31¶³nöƒè’J‹AEX\21Klm\14K%°FÅà‡!{I\28Š\17\18g¸Œûà\"À=1çu¶pþ\6¡htiM.è49ÿ”ëBì?Bõ³\0²¬nÒÊ\24!°mw6ª\24^œ£¥ÍPEd>µ$‘çdb—Y\12*æ—é\"èòL¤mNÅ,\16\21ñº3lnÄ¡Ëí\13\31\25n\25Ф†\23èhA\25ò@3\24\29ç¹c\ +¢ØLo¸d\16©œ«T<Ã\21ïq7Š\3Ð)\24-hÕûK-q\21ß6§h#”Z˜ªaV¢5É¡;i\18S&™n\13ã+\17\24‰æ'¥T8A#Ê,£\25\21ùÿ‹B\0147å\8S–ç4\22 ßÁö8£Y¬¼L\ +\16†ðý\23åX°jø!¿Dp+ž°m\26弟\9âÅÜIØ¡Ú\28{'ör—'\11lEKa1%¢MÁF\24m\24>Ø^4D™ùD-„¿¤\127ûéèèC\27°»|äÁc\27-@©Ì[ß8\22â\15ü\0145U5º~v\14\14}\6OƒÝ­a~*é>fÀåÁKçsÿg˜-\14…˜¶¯\15º³åïòPñ>\29y¾;èa—B[ôHÇø¾:y†ðŽº;Ê/…<$fæ\2\0278‹~¸ãCj¿Í)\12Vuà››\7FFš±ä,¹‹ŒvÇ,e\20ý†õ\0171€\19\28jÕº®\127uâ-§5Ò\1å?¹¢÷eìmQ+fY \3'\11yŸÃ?¹”˜}¹_AM5\29À…Îñ“æ\127*\0š'±\22щÑì#–Ððü ¤zÔOºÎä#\5.\30Ëa¡ ¼\30p„_b¦ÿØËàAîàˆƒ…š¸>{ÞMº“Ò\0206\22ŒÈ{\26øìn}½ˆ'/79;ÖF³H¹e\31˜©PT\16S\4Jؾ\9\29ª\5\23_\16BM©—ŸCÀ0æû;ç\0026ª\24ú\17׻Ζ¼çBÐã\5\17s³ŠímYL±½Ÿó`œ'tƒ\22u²6\ +ùÐÒ\4\0%ñ³\15ù6v–\15–Ju\"ê©{{Ùrðso…uþA´9à\13Í!Óú)x·´\13³òWwR'\30\26Åãqy0uŒã2œ‡ö\"á·¦R\29è4\"áÂ\127+\8Žõ\1õçà\13±98HŽ*¿¢\3#`3\11ʱu¦O\31¾¶Á‘®1=l'Ö^n¿Çæ7%|ì¿Ç‰9ƒœŽœ\18Z\13¬èÑ– byD·Ó|éÊb9ÈøG]õ‡žhþ`¹M †Œ\9Î÷ýŸþÊÝl=)fvôO‡\24…5Œ†žhß_÷ÒW\8\16vù.£c¿F\27¢°t\16«Ó&ÔóÂçk¢³\21ªÊòB8µ\15\14¯i˜¿!h°Â}ž!LTiŒ?´H€4\30Ø;Ú7SA¤™\127üûçÖ~a¹ó]Hé†åFŸCnƒùA\20éÙdh…¢~?z\127‘\3\22\28“ùM+ïs˜ŽRF›Ãœ³wËÀ\5üµv²\8ç÷‡e™|’ñg\0Œ¬+EN%•ÿ½\24G1cø\3˜q\20!`¹—ÿ.æüûÇ!\0303i\21I)úïoã+Uf\16Ç»å4yú”ß\127­Å\29Kƒ\27ÏEgA“²1\27U¸°à¶D'6Šª\0220eR×>>\18ï‹©\27•J\127µ’\18¼Êrµ\3âê®›>R™\21b'/c,\26:ÇF\14¸9´!\11]R,·’Ô…ÏR®†E!éLÊý£HX>L¶6•®-$/À\2€\31U \18—®§ëæÿªõje\19ŠWÆëÐðw@6#]‹'¹§Ì8œ®“5Ù¤âZ‘—ûþÇ\9ѱtM:iT©ö°\4ïä’R\21?\7ý\31\0119©Vj}DÉ]2µ7D-Óͦ®Ò\13ûtévþ€}”të»\11.6t>Ò\9FGù\6ÝY\1.þ\4¿£|¥n$x^\31\28MBÃñ\4ê:iè\20†.uõÓ\13ö”t\27´\19ÉÅz†\0Âò\2+å¶\3íhŸjõ­f.¥ÛÔûìU‹×+E­nŒ.Bé\6E¬\21,Ȥ› \\†ã¦ë¼ðùÛ¡¿¶k¦\16Í&µ§¯;\13\20\6™\18J²SF\27õÿ¤&ôfÃN-;‡:´…™J˜ù\17lWÒ1ü}cf\31\12VÒ.¤\27—ìÛ2pEšªÉ<2j-‚S\9-ýr±îÉ\127!'IØâB©\20\22ÏäN“ÚWÝ)·¹Óá~ÀO÷â®6Ü«{í¢îÃeÂeãHÅ}\"ñªï¸¯›'¹°¹t\23ÆN–H'ß1\28Êœ¿sû\19€g<Õ\15Æ^®’7\15\20w¥Çõ Á\15í\8Šà»Ê\7ŸU=àxq\12+:éüÐvÎ\15\16[BÉk5VʦKqBKTz`·K\1\21VÐÓ\3\29AÊô0_\"ÞÞ^¤\1\4Ðù‘\30\16ëpb-(hu\30æ`æGpsÜ\26(ÛêÍ\7\ +³”)hˆhÃkËQÈ€¤Gê\14+`W67úûS³ã“á\31Ϊ¬\26šŸú{R\11‡[í`\1í}\25†\0127\3­¸4 F”xÜNƒw\16˜N\19êÂù«Ã6¡åv\30Îÿþ<.D\4\18¡~C@w \ +Šy‰ò2˜XêÕSçF:?=ÛýX–tb¤b<©™-:W„Ñqq\3 \25-\9ÕUÞ\19WŽ„ºaÅôB\ +ÈÈ\8Ž=Dv Ñ.ûÕ_ãçÚR «@ÓP‰‰\27\27Õ‚<\9ª<ô\15Q\18‚[\ +\24\3óeœ\\ê™Õ>úÙLa\24¤U9+-¨×\7ÍaÉ%¸Û\9UÓi<\5B0…“\1\5Û¢ƒã\6»\5q¨øµo%….¥$Çh\6a\12ù\8Cc )7,\29Ø,ª‘[\15\31ß\8·KÖÖ½¹\24ØЕ24T˜§þ\24/ð‘¦1ÛÎ\21ÔZ÷&1¾%•\19\18/iL,¼Ñü+i4j–Æ#œù2Ä4\20\6²Þ$\14\20ÏsßÓ…a·jI3\21º“¤óÔ\"ò™²MŸ„zúHB]„Ó\9FÜd£‰\23(¿)ê_=«¬rò\12Déè}*%‹Ï\24ä¤%_©#\24Í_”ê\23¬ž8¶(Püøã\13É£ØDsuqLhá£îê¾Úݨˆõ4\13ý\25÷Â,²\20'd’ñÓ¡‹?3o>­5”¦‰é;!;‚KÇM\21lË)?’\25hæLê\14ö¯,Ó#¯U/\9š¿\\\127\26Û-JØgÝúœÎ{\ +|Â?2pz-3ö\12\0210T¹–\18ëÍ$à`˜¦Õ•ë\\eô¦©\12«Çj¤½Äž¦T¹ÎÃ\30f×4Wî\30:dmèŠZwDýowšç‰7A(£ª@ï\4fÄt¢³\21ÂÂÖÓ䱎þFÛ]5\8\6Ô»!RPËU\3OéoŽ\30ÕvJž\6©ÿ@\"{d\12ÓË.å\\\18êDl¿—‰û¨\ +Ú=25\26ô\19\6–\\ìî¹b{¼\6C\6”²—nè.ܧHî†c¬GGYK«¡—Ä”­Ÿ\2\15\15o\13½QË›Éú~\15OÍlLa\22:D`2w=f—w\18^‰Á\26qš\127âÜ\8\4m±\"G\18L]Af\30æ\15œUJ\3#TB¡cš\7*\25nJ>\15=gçÌt$3OBi¾ÔæSp¶°:%ý;£\17àK\27Å\12N7c¢™ªry;¥g\17PŃ—Ëû·U+XþK‡ÜL‚m\\Í«–²SÓ.\8™¥\22\25³dMª'\11á7r;»´c±1^ƒ)\7.\31osÖãÇÿ4Rgi}\11ék>V\22ÒO\24Œd\0238ñŠŒtPOHf\16¤¶Ëy£dÕbFe\18q\8(\0265Ûj\21|¯xwRc\14°0ù@Îÿ6:T\1ht–Óƒ½C>ŽR5‡é\19—Ëã¼%2•nЂ¬–Ü\16m÷\5àî ™J‘VJË°j\0\22ìVðAîƒÑE·\28QòPr‹>Ú´¨ùÛ@þ„\127\127h´ïwø\7|(\31#u`™-‹­l;¶<¨~u‹Ê•\28këzùÛ„'óxAu9E¢ÑÛp±[½\28çía\9𣻕!“ \8âb² xN]>”\1s\6ÔÀc•@¶:O(;Ç\18ÓòîâÔP\19Çt¹\0288ìq0Û\31º\15j\ +éÏÃÁÂö×\11ˆ0(v±ªŠ\21Æ6ýU¹~“\23àI>­(\12L0ˆ¥\23\11;•/Lî$?ðY{C\7hŒ\19C\28`N¾Ôɯæ“ÄjàùÅ\16¾L§\20˜\6\"=W^ëå¥:^ÅK³è¥Ü7\3:ûÕ \4\"½tÕVÿ‚ᕯÍeS\8ªõ\4ÐŒ/€i\3@­dJ\16qØÖÑ\13–äìƒú\5æ„tB™\13®\26¸™Þ.¯‹[çs¡‚L™òKx*ý\16®(ÔqÓ\26ÄïÙ÷6\29k“K`ò\\¬Å&\9õ\2ßØ\16Çû€\28–Èš.Þ\0207.‹iÓ]ð\6\16\2ºÝ*\18ß\6ü·šN¦Š­ÇÀ«@¿±ïnùÄä=¼É  \25\\ÿ@M7ø|U\8·|(mtm»Qf\0Æ…7Í¡\22s¼ª\17émH#Ák$\20?5Õ@6ê€ÑÞ\0202‡ˆ¢ÿÙFMXJäˆò…Âf.í™?R¶Š88õæÈ|eÖ\14Á=#èžýtƒ,Ж\2÷•ð‰S~VÝ\27ªû°¾E2^\\’6Q$\28•*\18ò\4Ï\12\28>(Ã$.+ÃL\24®‰ŽÍ_†©\18bY˜¶\22Ò;\18\0123øÒTxÈ•£.Œmj¢{\8\9¿„âL\18’ùßlFß?{+rWÊ\7·qÂf“’­tâÊi\23Öf‚b\21»œ\ +<çâ\14\14\25i\7œyé­LR·‘†×Yü`à\23\1BO^\18~ŒMeÆr,\22èR@ò!z“ “š\20jœ$æ\12œ£±á\2\27{\12Š©¥š\17ñÞªÄleÀÇÇ9¡6±×@`\0222q¡ å0ŽM2L\23B\7\14H¤šhX\4\15­Í\12­ZžI§*ŒÅò``‚n4+0‡‡×+ù\28u=W˜TM캚–3Yk»\21„È\8¶b^<*‚ˆz<ö¨|ã3\3\3xX„!\11°N\25\4\29D‘}\1ž)våhžÄ\20/y¶ñ\12\7\16-`\2hÐ5Npè|àžc¾\16Åð¤2£þQ\3¤ÙmÝ\"µ!_fñ¾\15ž»Ø\23“\31£3 VóX\18\\D\127\ +\15¦ú.žbåú\20çˆÈ–f7\29œ #û•\16Ï\27\29=ñêF[ý‘¼b)Ò\ +šÔÄ2\30ÐlcsÜèVÍFá\31ªäƒýZBÍ<}\8~\127A /øÃý?Öæ©RïÓ\27*\11Î\15²þ]r \13S»Z_Wk_;½ÂYÌ<Ó\13\0^k\4\0264‚â•Ûî¾Éì=5è‹Py•Ðù–„Cª¦+jÚ±3¡`Õ°Ûæ¥\ +þ¥qø­‹âï\ +ìø€¶ÖšøÈƵ\"\25s2!öûk¯µLÔN’ñÞñ±w¥0Om+\0087ìÝõC;Íí©\0ˆÝ{w/q\26þb\ +\0277ȵhàÞµ\28I{Ö\17£|ã'\5aÁK%¡öµC\27\\»7\29,Òºþ[\31ŒBÙq?Š\29\19>8.\24bØnõ/{Öøb½e7é¢G–Mè€ÜêQ’\1ºÑ\25ˆpº\0FMhÚTP3BÛ”üYÓn\15û¢<úrù,dF¡SeG7Š²W \17òôþÌÊrÎ~t•S=¨\22\3-ßÉ\5± ÃâÈ¥{ú\22„\28\6\23!¬—T”§Û\24ïéú£%bo3’H׳ÙÀY–KÜœó î\23\29ü⓪ÍÞð\4jß”ÁœÁË ÿ‘ÁÃ)û\28çŽA]\9\0ºŸ@5÷½aÈ^·hË\7¬ÿð\25›•ðð\7át‡G«7PŸ:ŸŸ‚µG\6T/)l`Àš0ôŽÓfh0f0å8\26sru¨]\7¨””¡GX\24\2ʘ¨#\18\31ÊŒM* ¶ý™TˆÊR~|#,êÖÈ\16\20qu2úÐÖ\"\5\ +·8©?¹\15©1\127ø{=}ì×ü\20·_-ʼg\13\"Í^è;Ú›Vá~{뇓w\29쪒\27+â–¬UÃp#\6æ\1¿\28ï}Ñ©ì¾ÑÙ‹»9ìŠóS\1§[!÷Â;ÓÞß„û\3\2\25 ‰Œ\31å®#³Žø¡\0\3þ6dV¯šCR\127ï\20ì·”Ýêå•çAe‡ÚªøÇó\3Rœ:\ +•\25½SaðèF\26ÝaH²êC{¤\"\8²d7Yt\14¨–GŠ\0065¢¦{ÿÉ—\13\5I‡sO»\7Þ\22 ó\9yhÌy d®”Wgeõ\22Ñ[-œÁÍqú\6¥COÎŽþ\11þÓÆ\17J‡¿qÔIÔjwãë´\5ì([¥Y£uŸÒ¤Ñ^+\16â\15\\ÇÙïN\2·US&·`ªar\15ý\7\25¾hùt#:[x=ãe®X—\5‘:áüší¾±Ê‰Ý*'ˆÓJW%B±)med'¢õJìo½\18öÎ\0d\31\12rYéa_Z\12rQ‡]ä•wë–Øߺ%ðS\6k·\0‹Ø³I\0004-æ\8v*͈ÅB\9„21l\15Qžf©\7àÂ3¡æß²:Ô;”á\6ÄnöŽ\25÷&{½Ì\5ݨųbIÁ•\29¹ê£sà1“\2•L\2/5ÐZ²±‹ÂN\5©Æ=‹v¹\13/¶†n\28ú¶[K“K«Ýܳ:ƒÝz\12O€\5Ø^\ +´:\3\14Ð\22|ÆÁË’!=l\27Ð\6á]TžÙ\27\18Õ\2o\4N+\16J‚\2†Òß±\13ì¨hêv\23B> ñ\24\3FC¥\27[ÏÛì3£ß¬\18‚Ö\31n;{}¿Cžû\27\22'm`\3ï7\19ô~ä\127–AÛm·âÑÈ«9%Lg\7-u-¥Ù0h\ +\7\22ÌÉ¥ñ\4õr¨h2æ*ˆ\15yz\"é¿÷>j\17éÇ\29½Ž\2½„‰ÆX~¿Ù-¯O\ +z¬±\29\23\23ÈuI¡V\12â\127pÝíN‰œ\17 eàéÉ\11‡<¼€ê+Â\1e—\ +\29Y dÇ–þv\24b\ +3\"Á”Ô1%P6Gi–¡ß-<¿cq]\7\0mD–]iÛèOŠÞ.¼Â«è\14kÛ\127\\€ã?\24~äh¯ÿè=\"2Ý`È\27]¦?&ÒÏfœS2fÅ9y\18ït+¨\12íؽ©˜ÉŠ(zO\23/ν~¯¼¦w4¿*\0141úÝÖÓ÷ïÿÇÑ¢íùnÆÙ]³Í¦*\21øÐæ`Qøý‘­\26ìhê\31\0Ö¼¾ƒªí\22ß\31OÍ2\16‡–Õþ\22Œß\11aXŸoõÅ>¨%\3löB\18¯\26âá'h‹Â5pˆ¢Ë½õÕªB\6(}\21~P'¹\28h¶ß½\15y ‡G5“î‘\8z0~C\5‰\23Éë‡u¬Þ´\21¢buÇ’Yo@ݧ±\\\27MM«]%\30\0024“º\26%¬o\21¬”XûD\27NSA¢ÚËa@\27Pe\8n¬\15˜„&bPÛÛúUôí\8g›­'‡\12\14¹ôlâµþÑð©æ#Œ\1*÷·aÒ]\9€$Mt8—rš`\28c ÇܾìRÆÒÜxXÂðã¥o!j¹f.Û›Á[Ž\\Z¶Ô\15xYñºcu\3¹81â–\2–ƒonaiÖÙ\22·ap!\11\13²âUxööÓ°œ\2ùÚðÌùyÍ£\26È\ +š{/w—Ù˜öá•\7›U9l+cÃÑ=„²»f0\2,ûk^=J@~âêFÀi\7¿†+Ñظüàî+Âêü«'\4\27Ä\16./Úƒ\9\19'˃\25]CmÎdõ\19¢»\14.W\3:$#`ƒÉØ\0ˆP»ís{rcÇ›\3pøÇùê\16T„òoÏ_»»\15Ôö½ [Š§Êzö•ÑB\11TüóoÀXÆìM$e稟jÐS×5\13\5–¦÷±\2ÍÀUŽ1«©\22Î7i}6\8]î™—w¿xÞÙÖî>YEÅ>5,š©7ïãÎ{÷‚·ö\25Š]Ní‡ÓÀQ6…‹\28h‘,\\õåÜùín\127\27M\20„\22¿ëŽ\15ò£4*IóF5.ÙXÑn=N»\31âôÿ¦=ké[5ÏOfüg#~ù\6C\6«S̤¨!Ã\17¨\9²ólfÑjd”…,ì~>Û—u³]k%Xt\9`¸—\2Š®*Žjå?k‚ûbÍ4ûZд\21ûÖ;‚Ú®ÚóÖûùÎ’]½¬C¡\9Šmq9ª_»s9\00285\11”#ë7´$\23JE†ü|­²f\2YCëÅGíš;a]¼­­Khìщ§~[\0276aXÅ÷·\\çîG$å‚\12GŠE»Ò\9ë\22Ú‘ÚY\8UÊcJõq\ +\25€Ñ®Ñ_¹\19âÍYktð\30ü\2ÏÙÎÚñòU¬…ô·»B\30JJe†Ð\2\18\1Ô X\26ðù†‘d\17ìJ~•XîÈÞµ;h\7!\15Æ`\27LÙÚ±¢E奅îäE–€X7\9m\8{…e°Ýrs»eäpµL·lÙn\7ÃP\0275y¶¬O^!œËÿĽÎ;-ñ=®\13h\6ß-À¥£î\8¥ï\21¥ÍVaÇ\5ýk\ +\22Ú§éoi,‡Òº\26±,\21vB\21¶Já\28\11ð‰ï¨\20Ú5½øB\12\19ÏDŒ;\17\19uø¾Y[vØ€Ö·=¼×ÅŽJB¡ˆ))÷óÀ3#®0å\29sŸ\28W¡^5\1‚¯Sú?ûΆQÄÝÄé\29+xr·÷\25’9·wHÊô\6\6\6´üÛ¤¸c¾ƒ¦1\1Ð\1NÝouNü™j>Ó–Ñð\30ÜÊv´\0099ú/O š)*?vºëU¾ºÆ,þº\27÷»\1µ³,@k©#\\‡Ñ§\23 R7Ç\1¾A\3o7(zØc«de<‘\5\12¦Ø5ÀÉ—Êt«o¸`ñ@ôšïéûÿôÿ<ªEiÂa¬þ©Èß\19fóvä\13äªÈ,l°\11#z\\4ç~ð\23­ïËáKËóøn|\0Ð`„v\7-º0†o¦ðÓ~¨5G§!8ZæD\18&ýx\13\21¸ø(þþïìŽÏ馆¶Öζ[tA.J~\4æ\ +\29¹åŽŽküæÚ%\15lÍ„ú&´›q„>²ž¬Ý4ð=s‘\0ZËäì–äÍ]\13úÛÙeÊ唬 ëíh^u‰\127óåÕi÷ý—GÕwÐd\20\12ÊßnK¿.…[Ò÷_„}]LÈÅX{v£?ñŽ\23BáǯS÷ñ\11;Mõ/¨ðrtñ—;‚§áÓHýê^ú«\11R«ÙõË|ÀÅ/¡d¿ºòÏǯâúñëº\30õǯþ¤_\23u·ûÅñðK7~9\28¹¿ú@\0286\6~±ÛoÍ\27.¤äÅŒó¯jP²á\2¡â×ðSõ”\2ªc(ojàÐè{~ågÕ_C}'|\25ø«ÑÔÿÍѵ\14•_\16ü~\21“~óÇ/öª_ëUµ¬©Á‰ª\21ÔóWhô±iÓ.þëPÍűêϺ쎥Bþ|üî˜Ä¿õA¿ËûÇo¡Êò|~ü.†Ÿ\31õïF{æoá®ÕÇïzTà¤ßK)Tÿo\8Å¿[åmÒÇo•\127t*¤8ºŸú§ÖkZ~~\28¼¾\31Ýha¯£;M\31GÛÍ\4/\31‡u/\31'\4ÞŽL\31:º¥\29íjÃR¢m$ý¶)Hߣbuë’\27‰Ûå\\?Ž7eé¸VW­öãzëK¹«šs5ÁHÀO¥‚-z£ëñ¨•U­<ÿ$ãù§\22îqú¹*_s\19ZrÜ ÷TÇMÅÝ´‹i\30·\ +nèƒ9\"\28\19ó\0«ò8¬2‡´<Å\16È ´é¸\17\0297ͱã†J_\21¹ÒÞÛFzt%$\18Ѭ[\13ùð8߰ΩHÊr\18,\ +)7_¬\13f§\6g\18\"w˜`¥d\8$\31V/MŒUÍ\31…\ +íogõŸ¶`j8éºwÔPv\14ë\7%‚#àè…»\11—4\28\1¡œ\1Xo!\27\30\29\ +ZÔ(ÜÓ\30ˆ™/…Ó\3=ŠJø?Ú¥¶åM!Ô?\6«²üÝQÂ{n{Gu\23.vÜmÍì°.Ûã}¹;\30ÕM˜\12€Ï~˜Þd Qxœn«ÝbËà0бv<úzÀe1\29ú\12vÎã„8ÍQÁ\14}<„ªª…GË\27 w)$nžxêÉ)(ö­c<É.ÇÞñÐÂ?êÇDí-¥4J©[æñ\9íê(TµN\1µÞ’\2Gñüh\14µ¾Ðµý\24»\0233sD•öÞþ\3%ñJ«£Á£YŽ ¤\14\13öå0¦\26ö)¦\12WgˆÊ‡PL ð\1çIôS¨•¢Ÿ0ü\29ã³ß³\ +³cÔÝ”Õ)X{ØÍO¨,ãØ3Kˆr\17ãèù0Žg´÷g¸¾¡ê\26Ï°Ü6.è \15k\ +í»Œ\127Áó¹Z<%­½±„#Rm\1ð…¯÷fؘŸî(^\12:\\tGkóÍÇË\12*‡<à¥JÀ¹ÉëÔñj_B^(kQøFZ\6óUE]IŽòE\19™,V\2£b·\18ìçàòtÅë“gã€;6Œ—ë¿÷?Í?¶ÍóÖöœ{›ŸOlíÀüš“ŒœÎBDØh8·ØF@K”âÂxë»ÔHl×zÛ±¢\21Ë\11þtz˜×³ø`ÿö÷\0140“¤‹ŠÎ\21»\12r‰quÝPBÕf–\25\12j\7—´G\3lŒ-\"xÇk´òÈLhò\6][m7G¥m£—û†,±RÌ\20¼rHmhb:6\24$ÚúL”H—σ‡;Ši=7!5Ëm<\11\1\26€í\20¨\0i?\18²+nÙ\0156ë\0PsU\7¡ìð“ß\1‡\23%x\"n'Ì\17\16æ‘ÓmƒOdœà€÷ÇÔ\9²‹’p\28ê’y@µ&«‰8\7WÍ\2ßßÛÉàì¡E¿\8Sû\127vœÕG\ +ãà\22œ\25)k\26qð\27ì\31§ƒae{ÕúÔ©B>x¼4¢ºyÊé‘sÃøÐÍ®ƒ¼°lˆù0\27\0239ú‘4#\23éø¸\0Ýÿ\6D\9NãpΓ¬\13\4`zæ‘Í%\31è\27¢\12ؽTåÅÖ“‰L4N5jƒb*…'ˆb@\11Ý\17ÞG%¬[,Ì0^˜óᤣ\5_\13š¤/™Én\11Ä\7¤¸\3%\27\27éXZØSgŽ[8à€—jÌ`&\19ºRÞp4¬\7Š>ñ%\1f¯Õ¡£«¥ìà9\0180öî\24&8Æ…Á6ßÜVÀœžM\20»ÅÈÏvtdä€e§.\7²¶‚U\"\28Ù©¼(‚îۅ϶®‚Ã&E‰0þ\25<ÃÌ?ÅÇņ9Ôb­Ï\2Ó*\\;Æ‹\25:#–\\Ù9>½\28³bŠÃº\31\14ë~8¬ôá@‚ç¡°¬ïáÈöjl öȺ\26\12VgXíñBÂ\12W\0\23”ÃúœG—So\13£‚2á“\18ë\"þ9\8+2óä`Ðan˜Ã†;í¡\31bå#\20\0.ÄÖa›\21G4Yí°ÝH\1ï V½¤A¶mçÃúj(\9Æ\26%\12Ü\1\"íD\25‡@dòÇØŸ·\11×ØÃê!È\23=Wã\25\29²ÔñÏäâéÀ\ +±úï \21«ªZš\ +Qh“\24Û¤\"ÓY½ˆö’£ÐÈÃÓñqdòÀqÂbâ‘\31ÍÞEÌS”Jà%È@Ùò‹Éaš³Ú°\23\30º|+VÁ¿ô™ T>®\21wúøÓÕ×òO§\27µœ ¿\14sBÇæS`\25*»]ýQ+pÿøS\\\21rÕ)÷Gw§?Åí£øÓ\127üé»BÎ5]äNƒ\ +@u¼Š,ý\27F¥¾\127œÿ\20\15\21õÙ}ü)5’\127êÏ$ïЫ˜á>ðW'ß\31ë\2Sˆ.ò\127²ÂÎ?\24ZѦô\7=\127ŠQ!º…þÑÅ·þƒr«?Ú¿ÿ¬Å]_´\"»öÇÇqógÕ'\21/•¸\9Ò¢BíH¥\18sƒúSðE\127´·þ9ýg÷û¿úzÈ=a…Fž[/·ÕVùýŸ\26ÿýŸN\26EŽrÐW¨\4#Qg\3ßÿÐË\5ZDý\4/FHôgìÛ\12&r\11ߥ´‰´¦k`n­×ÕßÞå\29¼\11L5ÅN\1[k6½Ö¹b³­ã‡ÍÅPgº%{¶+\17[¥«IÿÎ\17\9¸Â¹%ˆ²^¨BxNð~™Ì|˾{\6\15¾gC3\25jHu¨æàÑ5\24=$ÜF\19øÓBÞqŠ×‡¥Št±#\29ùƒµ“\12_¹Þø®?ºpˆTï’´â”ölÜÄ€Ÿ¨@¥én–ZÕ¾û€\7c…˜ÇÜ1\6Æ»)d<}˜-˘<\1nÔ›ØI>·V_S¹ã™ \9íÊHÁ(žAptí\15i‚çÉ–·{z5Ð~´ÕkÞéÏÉ\31xÚ´¼å\3\21¤¢=‡qc—×42‡#n¨\30\15y\"„§Ý‚\18¹W©Éy(£ãÑÉ\13¨6»îÁzÇ…Õ\8\0bC˜6Éï/øb¾¿:.\21\2Z³\ +ÒTùºÚô‡ÿ@_þþB[è÷Wymì»bQK€Z¾n¨ó†dÈä“ï†k5¢Xö3Ð%ŠuòÕ`.Ç^:\0238g°fÀà}e:É÷W}{RRá:ò,4\28ßð\5<\9c\20€ýÛá:S{CÝÿr„îx9`¦É7l%Sé\13¶\ +\26èÐ\18Ý'T]§ÍídÿÓ]\4%à\ +vÝ5h\26€Š3µâûëÞŸqË»=\13-¾\27\0213ôÇßï\24\ +á{ëû‚\31Íù‚\2çDÄ£vß=x_úþj‘ÌVj^\30T\19‚ØÄ=ˆ{œ1ŸCšŠº\30ÓzÞ©ú±4¡¥a§Ç)'ß\\ù#³]èOàK\30(âzC'‹Ú·+ütCñÉÇ2\6CEª¬]×д3µ‰Í\23ÀŒVÒH®'3iüáù4ú;ÇŽ¹3fJ‹Éô.yä-ÌÞ­»uï\127¬\21¢ÐŽ†‰Ç\"äˆ@\11Î\\™s|ˆ\25ºýÐ$*[‚¤6“#òŸàÈgÿq²H9P?Þ\18F‹…\17Ébý\18®çÙ;úÍã”­H\0226\"I\30d>(ps[6Ëè{´-Kúë\16l\14\25Òu\22b†r^ÄÆ\16¤M0y±Œš`?Þ6­‡¯\29\15êñª8M\29½Õ :ûý5y=Mž¼»\15Žê§”ý^)Péºì¯Ý=§¼x'Ä÷‰˜\16ßo\28TÐeÓÉ«d\26\17Á'Ý”»\27\1\\·vz/ÿÂ\0036µ+Ý0mÞŠ\20é\13NÙBïàOé‚„\":…bLi¦Ñù\19]?í]ãÔ»û\17\1Ô>W¶{ñN°{9e8rŠaqH½æˆÍ¢³¬¾ýûëÂk„\6l¾ST=ãfýËù©âö†î¨Y‰ÿExþÏ¥¿l¶äŽ`Ë%P°ðl›…Ûyó›Ù©¿þ\29\ +_Ë©£ØfÉëÕvÿ”jñn–¥ Õ°%÷éòi\19 \4-çQ\31PÙ”¨œjqö\5\20ÊÞÚëiYBýŽ\0Ó|ÿ©Q¾˜#ÿÿžÎ-ÉQ\28ˆ¢[©\21L„Á\24{9î1åvC\1!d\"XöPÿµ±É{®z~¸z?\18)•z¥Ô¼«)ýozÛÐN\21I©Ñ¬E\5…åé0C|on¾s·Át„_F\23RZ\25t¯ùºyáDOŸjv\0ÞhWs>‘n6U²h<÷o±Ä\19,}\29ÜÏW­šbŸUPÝ\26¬þôúUk¤Xi¾B\5ׂGx¶Œ9k§?³^Ö\9Ÿ„eSþkˆ\18€\23×dºêŒ\0\ +zHpÕ\12æ¥Ú®Ú_\20MNT IÐB—ÿ>+Ž\21\27†&‹Pèö™EÒt—ø,¼ÐÛÏ\\ð\11û\3Ž‰äômÉéÛK$ì1ÊX„ú¶\8%WÚ¶E§ï4 ö}'®Tð\16äH†\3·Ø>8`>ÁÂê\21>¢j{\ +Ãê$S\13aº&.ær¾Ž6©£Çƒ\3Rú\24÷CøzÑÈõ\24ôRÃ8cí³Êpù›&¿9†Z\25<;F¾ôô˜†øõ­]rÁù\0151µ¦bw±q!Ï•>I[g–ð\15\0062¼¥¿¿2}Ýà\29i>Û\1276Ë‘6\19†ömôM´¼˜\26.#\5Zêÿ[\\«\5Æ”ª\28‘ª\28‘ÒˉI0´A\11\0292¸—Ô\11|\24.æ<áõ/U0ûO\13$õ!C|\0255ÒVYi8,¦Šnù\17ž\12¶[¾;`v'ù»\16Tµ@A&\30Ø!ðÛQI\17“zX¸¦Àr¥J”_·»\29^È]©N9í÷IŠNð-\0016Úƒ‰Ü‘lpS8EÈ §êP\24\6iùÿ×?:‚\7Ä\19thÌ>XʼnPzõlÁ^}ÅA¼WšV\5(6\9<©Õog·\7n·…µù¢7IŸvض«Úˆ\31Í\12ÔÛ*®V\3Ÿ¤¨¥Á^:\\*ÃÒ\0ºH´]J•NŽ³G+­ˆÓb´¤½\0190A’ƒÚk1’¦{tp¤ck\28{û2¬®Þæ9Ïqv«=ιæ’+\29xû\11ëg\24?N\8MZó‚ ¹=ð×5-bJˈ’6áéTñ\27t[⧜\"…¢\13~ð¬Y{¹Ï¬ÁËÀQžŸ¢\3l?¥»ë\26g8ë&ºâþ\ +2–_Þl\12Ã\26Ár“šðŽz”þA”Ûƒ\0204ã4Œ†/ÃbX\13$^çÅ\26Ì¢\8:\16¯ôä\24\19M•÷q%èCòFyhË) E·V\24úMb²\28ôêC\0248#å\16ëà\16ë[¨3«\1\23-¾\6¿)'ù6´¨b¥dÄå˜ÎOi%‹\21f›ÿàz¥¬Zj*Ÿâ§¥\30Ÿoôb{Tæ\9=Ÿ½N±È:ت_\\ئ\14Û|\23ñŸÚy\11м$,R+\23¨MY5íŠxZ©\8GuÌòÔÚ£üô·‹Ï@GQ~ëC¡þè£Q²p9\26àgŒ,@•±\23\23ÒñßxŠ\13r˜ç\14¢\8\6\7ˆÍý\127¬”%æo;¹Œü¬ñõÀ¢<õdk|;²\30‘£\"/\13¿\1“ä³2N¯-÷2´Ú0VÌ\5\0273¼2.âòÒnH\18\29ŠÜåø¡#¼°Œ[OÒza*@rn\20ëúÆq÷e‚0‰· !BJ\20[\18`fWºérÿRv1·ƒ 7)ú”¶Ä;iM7èÔ¯4Ò\22%¿A\ +~±\6šòÔUj¡**Âù\2#\1\\Úi¸m\23ú¸\2åÁ~dЩ©LOwÌËôZp\20)'\31µU!\22Ñob[\11${æ|­'W–²ŠÏvËõp\27ž:Ûõ\4›Pó§2±K\0055\14\ +á°p‡\127\18K?\22—ÂaÉ‹6ŸÂ\13£|¥\13*P{®\1\28÷»þ\20rä\13ö (s±2ë`½@×éŒCE8€ùÈ\13-È\25«j˜\29o~\26\0194Í<Â$}­\30é\17¡\11J\5d;Q<= í\31“\25«o™î¢-¾,JþÔÛ8Ú¬\"0rI´ÐyðEû¦Ea¢ Ì¥+´H\12ŠÐ \8\13H«1l5§¬î’^f•EÜW8\21\15ø\21\13ÛH\0272¨\8ƒŠ0¨\8ƒ’6(!ƒ’6(iƒ’6ãéϯ#=û¢Ñ\18‚\127«ßÄgârqFÜU\\)®\18׊ëÄõâ¬8'΋›Ä-âVºóߟ§–¢ÅU,øî4Ð÷ºß<ýrÊá\127ôçöÈ32I=´zú\\xrÙ2š!£\0252²»Ô“­žmuÛéiºéõ°—ìxÙ;IV¥æÑh£\25c\19`û7hÄ\6Ø \17\0274¾æ$âÔ°íEù;ÉKRERhwè\1\6=Àt›+ºÍ\21ÝæŠnsE·¹¢Û\\Ñm®¨ë+êúŠº½¢n¯¨Û+ªçŠê¹¢dKb”Ä(‰Q\18£$FI\12D+Q\2£\4FY\28ÇPœ%hJ\30\7M\9š\0184%hJД )ÇãŒÓ\1U:š9“\6X²\1–€,\1Y\2²\4d\9È\18G+BV„¬\8Y\17²\"dEHį\0Y\1²\2d\5º\ +t\21Ö¶à%\21B®‘P\0145Gz«…\ +Y¨…\ +Y¨…\ +ì\21Ø+°V`­ÀZµ\2k\5Öú8œ`ý)\\\3ëÓp®sñ£¸p‰¬ÏÃñ\24®\20õp¾ž:ÜyëAJ£^Ù‚+ÄÕâFqh8\13\11£aa4,Œ†…Ñ°0\26\22\0062Ñ\\L\28Æ6(—\6åÒ \\\26”KƒriP.\13ÏF–\27d¹A–\27d¹A–\27d¹A–\27d¹A–\27d¹A–\27d¹%QK¢–D-‰Z\18µ$Bò-0Z`´ÀhÑ\2£\5F[wí¬½Yõ„l·ÀkùyàµÀk׎1\127í8\28'VmPá.‡K“ª\"©\17\21’ô’ôƒê<¹\31—cY\127løâsãó´é²o˜~×m¾ënÚµ+©CÏ?ª@E¶(Þ6<–L­Þšã\6óäxt”\11ZË^Ò¢\26ZTC‹jhQ\13-ª¡ÅÑŽÕб\26:VCÇjèX\13\29«\1%Ø¡\26:TC‡jèP\13\29ª¡C5t(î\14ÅÝñd\20w‡âî€Ûª\3F\7ŒnÖ{c\7ž\14<\29x:ðôäéÉÓ“§'OOžž]ú0ÚïAÖƒ¬\7YÏÏ‚¬\31Ãsˆpô€ì\1Ù\3²\7d\15¶\30l=Øz°\13d\27È6m Û@¶lHz\8(Gº\6V†\"Ás(\2ÿRÿV¿‰ÏÄåâ\12n ô¿ÅÇÓM\24JÌ™ÊÚ©èç¼RéÂ=lú¸DUØ}oç\"‘s\27U<ºŒn\16y‘\12˜BÜ(î!n­é¯BUŠ«Ä5\ +ß(|sY\8Iª•Ó:q½8+Ήóâ&q³¸EÜJw6§ðDqüûsß\24\27É£n-õ¾áÆ)m\24·ŸÕ†a|Üè²xà‡[%\11\"\127'Yäû\25\15UaàÐS^1t£—XM\0247\\“*“\ +ãŠ]I\14I¹¤ö\19—¨ú”äLá4ª;\25õ­z§ž42~\28.¹ÔQ.[a ïÆ=\11&¶V\19›«‰íÕh\0035ÚBÍɨ\15×ßË$òª»Jõ•úZ}£¾Uß©ïÕ\15ê­z§Þ«ŸÔÏê¥A˜\31Ú‡BFþýGű\127x•£úå!µg´\12L,„عÂcGÌ\127‘\ + H%PœöóbÄ\"\22K\17Ë¥ˆ\5SÄâ(by\20±@ŠX\2E,‚\"–A\0173_ÄÜ\0231ûEÌ\127\0173^h_0ÅÙJ3 \26’’z)b†‹˜ãbϲIY1ñ K»\\ÜåUD?)œl–z^\19˯‰Å×ÄÒkbá5±ìšÓ¥TÌâ3=«7ê¯êKõ•úZ}«¾Sß«\31“O\\ã1‘\5ùÚå{—[’YRyR&©kReRURmR]R;˜MÊ%å“š’Z’ŠE\28?\25?\23nøxX‚Œ\9ÄÏêc\"š„NâP­zL›H£-¤‰\13¤‰í£‰Õ\"c)Ƈk•ï]î\31Ë’Ê“2I]“*“ª’j“ê’ꓲI¹¤|RSRKR1{cžb¤,{¦Çl›tÜ%å“š\"¡îi•dÌRq\5\25‹\11òµË÷.·$ó¤LRפʤª¤š¤Ú¤º¤ú¤†¤lR>©)©%ª<ň…4ff§6;µIÇ]R±RY§¢ÞKz4{1™½lL*\5“JÁ¤R0©\20L*\5“JÁ¤R0©\20L*\5“JÁ¤R0©\20L*\5“J!Ý_‚L91{V>ò²\23ˆI\5bR1¤]©ˆâ!\23\26æY.‚ñ˜WàxJlgzåmæ\8 =XÛ\23(›º«M½Õ¦ÎjS_µ©«ÚØSmì¨6öN\27;§}ÓÆ®icÏ´±cÚØ/mì–6öJ\27;¥}ÒÆ.i1\23fEå1F\30™Dk\18­‘\17\29ûHââéZhVwÄ\27ZºTê~-c={Ò°ZÒ«fGÊ\23O\28\3ž8\6©)ž¨{ÆXœc,Ì1\22å\24£1ö¨.‹>UFº×ˆ|ïrK2OÊ$uMªLªJªIªMªOÊ&å“Z¢ÊS*±8ÓÝDdâJ%‘ÅR\26#iJ$¥±'\17ó`ö20©A¦Û\11ežÔþ¡kReRURMRmR]RCR6)ŸÔ”ÔâTýH¹5)KfÏÓG¦ör1©\\L*\23\19‹7\29‹Ùˆ™ˆ©Gì\8\29k-\2»ècB‘?ÅŠÙˆ!u\127¼ç@¥’·û¥Àî—\2»_\ +ì~)°éR`Ó¥ (óq*ç=ö\13ðЭ8ç\0177/\127·Ï-û±¡ó ûæ´o-œñˆ\27ã¾1&µ#¤¼ÇfcÓuʦë”M×)›®S6]§lºNÙt²é:eÓuʦë”Mשp»ÄüE^ù綟\\Ì^ž\24òô\25óQ=æ£~ÌG\5™½.Ì^\25©ŸØ½£Ø½§Ø½«Ø½¯Ø½³Ø½·Ø8\26\0199ìÒîÒíÒïrÚe*\17ô¤(÷<›L\127´£½3\5íw\ +ŸÎvIí‡ÓQ·\22\127\127¶ûFj÷AïE‹T¶Üx}n¼?7¶ìS»˜©\26 Í·\4Ø/b\1ÈŽ½oȶ»ô—éÛŽ¹ý¶ùè¾m®ß’Ÿû­Ç‡þ<-.–ÅíëDZòCW\31ú£\24SË€î?ôð¡í‡v\31Ú\127èéC/\31úƒõ\28Ú\11Jì‘\127Ûµ};c=^¾m–ßÖý·ÍæÛV÷mk9ºïÛã~vþQ\28ù\7¢ù̷֔¶d¾5&óÙ2ö.ÊëçFù¹Q}nÔŸ\27ÍçFû¹Ñ}nôŸ\27Ãç†ýÜpŸ\27þÛÆ·ÜNŸ\27óçÆG\29šÏÒ2ߊë[øèáÜú8±ülÚíGŽÚ\15Rû\25Ç}¦õùiÿy–ÿ(ÏœM\31%8iˆø\8\2%«ÖQÎ*×XÁA¦«@ЩÕ\7Ý'•ÒÕ\19ý|ÌNªNÙ¹–\0‘*\18ÅOͱ\28g-éå‘ÚÞòH\13\15òµË÷.·$³¤ò¤LRפʤª¤Ú¤º¤ú¤lR.)ŸÔ”Ôž…5ªT~Ë#Oáò´ÏìY1{VL:¾\127Ú'5%ØXˆò€ˆ¥¦\1KM\3–š\6,5\13\\\15ÂúÒ€õ¥\1ëK\3Ö—\6¬/\13X_\26°¾4`}I\6ý2Ð\15¶ï!Ȉ_†û2!)£­ñ8ò\ +Ë\31að\20n\2X°Põ;*Œ\13 ºy-E¹ËŸ_QùQÕø÷\25Õ âñ÷\29Õª\2%\16DA[Ú3Þ¬Ó\1ؤ\ +£-Š1«\7•s<¸èçB‡\16\14È…â\127Öú\18\5c\24Úîø÷…dÎÙ\15äg\21)ë3ª~›(‹txΓš¢z0\29ó¿!‹\ +ë\30ãå„ÏéƒSœ¬‹OOñÑ)>7éC“>1éã’>+郒>%é#’ÎÅéc’ÎÂ\5\23\7kúÔ\20œ£õ²1‰›ú©….—ôÙ„ãÃS|rÒǦ1+bNŠ˜•\"楈™)bndØ\1Ÿ«7ê¯êKõ•úV}§¾WoÕ»è±ü\0áuǤ~Q¯´ç\28+\22\16+\22ÙƒÐ\\\22šÍ\"泈\25-bN]ÜáN½x=àc\26^wL‚]kve7³,gâ½>zZ¨wâ8>\14\30K\21:DÖá±\14uX¬Cb\29\14r\127¥Ã:”\14uX¬Cb\29\14ëPxä0žÞʦ\19çÅMâæ#û‚‰\13Dî•ñ±\18kzt:Õ¯#c\29\21Ç\17q\28\13ëså\24׿Ƹü5ÆÕ¯1.~qíkÔ¥¯QW¾F]ø\26uÝkÔe¯QW½F]ô\26uÍkÔ%/úxÀªwê½úIý¢^i5OÚLâ¢Ö\24×´F]Ò\ +ÞÆS]<âôˆW?Å3¤˜VOP'§1cå¿ÿtú‚&\31±Çr¨õm\12>jó9{lea[¦ƒÆNæ\11ùÈÍçm\18\ +ÏÜ\2äŽ\19/ÛŽË<|ò\30}wÏDôúš‹<ŠðAœOá|\4·|5ÁòÕ„`/^_\8±|MÁþû<³ˆpQõQŒIð\18$²?\26•aô‚»qËÍãߟ'ŠÐ@ø’_”¿“ä\26¹ê\7 ^ÄzÓâ-\9Ü\16-æSmNƒ©S‹YU\27\30®\6}V_¼²h]-=ßÌ¡¯ÕêgõˆVœÝ\26ké¶(Ž¡Ð:(h\23…‡ÀÛ|_q7fq-fq-Q>_\18´é\5Aûùr \13\3®E\11¹\30õM\18‹!€Å\16Àb\8`1\4°\24\2ØQ+@îÜq’$ÎÄé‘87\18'FtVD§D0çÀ\2\31å¥4?²OŽ|ÌŒP\14—E•ND=¥ý¡‚·¨¸;\127L ì³'ŸS'\31ó&\"ÇýÜ1Ãú{Ô—]Ž»|$¹§Æá‰ÈGRk¡jù÷Ÿfص‹\18³5:US'µïsš¼bhj…žPky)™n)Ü*Å\26ŸjuŽG'xtvG§vt^G'utF\7Ó9Z*N6½¸IÜ\"NÂœÿ>1Õ\30D¾bEC\4‹÷lNÿþ³¨*ΑF^iù÷Ÿuߨ/fß÷`â–»ì\0272]¤\27x\15Må5©2)y5&n|ÈЀ>Îr…O\27¶.>\"»Ç’´¯‡úÒKçe>I“2!ïâPËN¥Ù¥½$é.]Ò~Ú÷û5~ô\7{€fâÇzi]’}R~Š2«£*¢¨‹®\24jé\31Û¿þ¿íßß·1>ÿÜοo\22Ã÷íîûfÿ}sü¾é÷ÍýÄ1I΋=£&\30´é|—\14ºxЧƒ^wélJœË\19¹V¿\1aÇV2$CŠ±?[6Ö^xõUMˆXÒñEMQ¯¤ÞImQeQäI¬Q™ý\3r‡ÑW%)Ê(ª(\"Ž\22¿¾\20Ia£pQø(¦(–(b|y\5\17*v$}ñ\15Âìy5{\22Mʆ9}œš²d>òdR^LÊŒ9í\31j’j“ê’²Iù¤bVL\12—°},\9\31÷¬©pVÍx\29\27p\\ã¶clGéfk5Åp¥ùZT\\ÕóÒ”šRœŒÓ¥\0;Æf§(\ +¢m~’zYÏrp•—Uƒ`X\14u\29VœCu\13sT+ÅçÞ‚ß±h5|ƒ{_sH\11\14ûjþ԰¯3ì‹\12û\ +CZ^H«\0092‡ÏÛ™¨ßQí\31Mk\11ß\22\22t#\12NÝÇ\6ïqc\29Ó†.>|®<|.;|®9|,8¤Õ†}©áÛ:CZdH+\12iy!­-¤……´ª–\20ÒzBZLH+\9i\25bŽb‰\"¢|.#|®!¨Æ.æ›c‹]–ûY\\HøXEøXBø¶~ðmñ\0\27YdÎRÉåU\18û¾ú¸œ¢<-œ\13–å’NYbÞòø1³7.³·.³7¯p[=–˜+Œ\27mž4ï·”Y:;OÊ$U\28Ëu×}áâÆ5R&U%U'Õ$Õ&Õ%Õ'5$5&e“rIù]…a\\ªq]¬ØW*öeŠ}b_ ØW'>–&>Ö%öE\9ª2Õ\21»};Ëƾ»Yj#i\31³kS@—’NŸóé E‘ò1Å\20&—„~=˜\15û±ö\31©ò\31©î\31éÒòH—–G¬êG¬éG¬èG¬ÎG¬ÍG¬ÌG¬·G¬¶G¬µG¬ G¬ŸG¬žG¬ƒG,øGì“Ø+\0301ïX\17T\15T\13X\11TÚX¶1îI…üH…üH§§B~IJ•å\1‹™~‹™~‹™~‹™~‹™~‹+ºãtãtã\20ã\28‰ã\28‰ã\28\9î\25\14s$\14s$\14³#n¯%‡\25\11‡\25\11‡\25\11‡‰\7‡‰\7‡‰\7‡‰\7‡‰\7½§ñŽvQ_D/Õ¬š\21\29õëC¿?ô¶ël—ù.Í.¯»,wYí²Ýe·Ë~—v—n—~—Ó.—]®I6*føóª_ÖR9Ô…êpñ\25£<—X\ +P=`\16\21×ãÒb\\\16’M½äRè\1\29ÛŠÚ¢Š\7m:ߥƒ1Ií¯Tº«\22'%Â/@\7§ã\25,ü©“²ápÃa¸áØtÐ\0\29\26 C\3th€\14\13C\ +Ï\6èÙ\0=\27 g\3ôl€ž\13\16\9øK¯?1@ÕFa£XTh;óh°>§A»ôh¶\30mÕ£­zî«\11#ÓšTq^\19\27iÉË£=ûöÏ\127¸©\22ÐÜÑÑ\"\127ž\18íÜ£ûQ¿\26êÓ\20¥GYxqóqÊÍÇ97\31'Ý|œuóqÚÍ뼛׉7\31g„¼Î\3y\8ò:\19äu*Èë\\×É ¯³A^§¼Î\3y\8ò6BËT\15ã>>{}\2öúäëã‹°>¾\7ëõ5X¯£U¯ãSÏRb%N4ø®*¦ ©øQQERuRcR¸ËéŠrZN¦\24'pÎ:K\11\31\22˜’Ä\\\5ä™_~t²á~¬ÇL%ãB„\26ú’U:¦êÑT=šªgS\13ƒßWšåVš„\13[ø\6\7gg¡qeœØ´'6í‰M{bӞش'6m\20Ètai\4çþwÅ×i'´Ý)§A;Ðv')³\9MxB\19žx\8ÍsB“œÐ\12'4Ã\9Íp\26/8\27-orú}x\8ý>|”¿“älÙäx¾^Ú!ä[ïªê]>¢ZEœý\15|\9\12Ò£F)FŠ•i®ú³\17\19‡…Zˆ\19ZÅÄB@O(ð\9\5>=ÂÀç¿Çü\20å)Ç]#éb×k”çüÇŠ9®\9\1gö±™•3K?›¥sÍÒµfv¬™ÝjæÝbf’ß7™Ù›äGNfö¤™ýhæ\16nf_šÙ“fö#ù\29”™}hf\15šÏ\6³Qsxþ\29àÚS‡»À,s`p\19\\ÏG—9\\àsq\6Ά\0<ËÉÉ~’Dü”‹cÊ+WÀg]W›õ\01143‡\0303;«¼S1KG¥›Î¼€Ì\28ÃÍ\28dÌr7˜õ\ +5Ç+Ô\28¯Ps¼BÍñ\ +5Ç+Ô¬W¨Y¯P³NsÏzšõ\0025ë\5jÖ\11Ô¬\23¨Y/P³^ f½@Ízšõ\0025ë\5jÖ\11Ô¬—£Y/Gs¼\28Íñr4ëåhÖËѬ—£Yîx³Þ(f9(÷¼YÎàÀiæílÖ\7®\27;óùÆÎ|cg¾±3ßؙфoè»7ôÝ\27zí\13Ýõ†îzCw½¡»ÞÐ]oè®7t×\27ºë\13\29õ†ÞwC—¸¡Kܘ\30ºÄ\13]â†f½\16c!ÆBŒ…\24\0111\22bà\19\0110\22`,ÀXt„µèèjÑ‘Õ¢#©EFQ‹Œ \22\25=-2rZdԴȈi‘ÑÒ\"#¥EFI‹<Ê.2PZd´È\0i‘ÁÑ\"ŸȠh)ð+$\3\5\127…$ø\14\11îK!]`Ñ!Oð7ê\7õ^½œçO\22óôT²‚º`\21\31Ý‹¢Äµa)ôH.\24Òf\22\29\"-2@Zä¹fÑQÒ\"#£EFC‹<²,Å2„\11T¸š…6´ –\23Ôò‚Z^ê¢Ó\14#Ò$ÕªŠ?\21°Ô]¾J\15XÐ6\22´\5mcAÛX\6šºÐ1Å2ê÷\4\22´—\5íeñ4\\\30ÅmSå\24n”KÚš÷\3K”26Y|¼0CåÕxŒŠ'¢\29.h‡\11[\21Úa|-+¾“\21_ÈŠocÅW±ô=,}\9KßÀÒׯôÝ+}ñJߺÒW®ô}«àÂå\8‹ÀúÚ•¾s¥/\\éÛVúª•¾gµ<Î\1277Þ’‚Ê×\19~\17e‘G±øþ•¾|¥o^Å×®â;WÁËÉÒ\14\30qŽgÑç²å!ïN,VP[§NŽJ»Ñ\7µàu·ÓÄuÓëa/ù”fõÐ6+OmúFׂ>\127gŸ¿³ÏßÙçïìówöù;û<*æŽ>\127GŸ¿£ÏßÑ(ïh”w4Ê;š×\29ÍëŽæuGóº£yÝÑ”îhJw4¥;ªüŽ*¿£ÊïL\25\24+1Vb¬ñ·+Vò¬äYɳ’‡\31ЙìUç±WÅ^u\14{Õ\25ìUæ¯W™½\14Έ»Š+ÅUâZq¸^\28\26»KÊSY9äÄyq“¸YÜ\"n¥“ëPð\29\22˜è[õ²¿×mwÂÌc\16ó4ÂÇŸGZ/™\4“†¶Ê,÷*_o_/Fól4ÏfÄ`r>‹>bãD-‰¤™×U&¢Wýbóz‘Ÿh_y¸–\31iY/­\20JëÔÉéñ»\\«~ól½è\1¯|^\ +ÑËÞI\ +˜­oEƒZÑ VC#\3U~/\11?—µ¦ßËZÓ\15f­ñ\23³V™F\26‡)iW˜¤\23,”ÆÉã8u\28'Žã´qœ4Ö)c0^5KrƒÒYWsÕ\25WoÕÙVbÕ\9V^ÕÉÕU׌ôF¯óªk!¿^\16ü„9Á/¾¬¡[øÝ¥.náÇ=¨ª\11•\20«Ü`âSx|\6×'pJ‹áñ!\\\31Áã\0038¦ËÎtÜ‹î¼¢;¯èÎkÝÇ×SVôlþ<ÛŠž½ö4\0237-\15¹²ë†\\ÒWôù5}=pMß\15\20õLê•Ô;©-ª,Š<\ +\19Å5Š2Š*Š6Š.Š>Š!\ +\27…‹ÂG1E±D±ªÐï\ +®éË‚kü’à\26¿%¸¦¯\9®ñëmk|Wy¯%¯#\31²V\\\18WG#£Ä5ÎÞ¬¸F®¸F®¸F®¸F®¸F®á’÷ç¿_‡?¿¿\14ÿþƒ?ü~ÚzøûúÊ\15¡\8\3áá+\20j^\31Âcâ×ߧ¸—¸·¸.£Íi\13핶¤mi{ÚÖÒzÚ‰v=„»ûן_t\8\5÷\18÷\22·Ñe´9­¡½Ò–´\21mCÛÒv´=í@ki\29­§h\23Z€<\5äù\21\6`tOq/qoq\27]F›Ó\26Ú+mI[Ñ6´-mGÛÓ\14´–ÖÑzÚ‰v¡\5O¸Ò\"ÀK\8^Bð\18‚\23\9^Œýbì\23c¿\24ïÅÔ_L÷Åt_’b×Ó‚à-¹}KêoIöÍdßÌØ›‰¿™ø›‰¿™ø›™y33ï/IÊÒzÚ‰v¡EÐM\2mR¬›\20ë&a7ÉÔÆ°\27Ãn\12»1ìÆ°\27KrcðÁ7\6ß\24|cð%¹\17a#ÂF„\13\8Ùߟá³\02592ÆÏ\24>côŒYÏ\16¯$ÕJ’­$ÝJ\18®$åŠIW@\15ÖÐ^iKÚ–¶§\29h-­§• !\19ù*¡V\9µJ¨UB­\18je¨•¡V†Z\25je¨•¡VfheÀ•\1W\6\\\25peÀ\21\1Íߟ\25­ç†4ExÄ7Ú&éßê7ñ™¸\\œ\17w\21WŠkÅõâ\6qVœ\0237‰cìMco\26{ÓØ›ÆÞ4ö&±7‰½IìMbo\18{“Ø›ÄÞ$ö&±7‰½Il¶J#‘%®De\12F`ÂL–‰2I&Èä˜\24“Bó3R}FªÏHõ\25©>#ÕgX}†ÕgX}†ÕgX}†ÕgX}†ÕgX}†ÕgX}†ÕgX}W$sÅg®8ùŠsKViI’’ %9Jb”¤(¿xJ\14c`®0-L\15ca<Ì\4\19\"I\31¨xéŽ ý@ºô\2v\2ö\1v\1ö\0v€\ +×\13v‚\ +9cG`?`7¨pÅ`W`O¨p¹`oh\24³ă1\27Æl\24³aÌ\0061\27Äl\16³AÌ\0061\27\\û\27Äl\16³AÌ\0061\27Äl\16³AÌ\0061\27Äl\16³e鵌Ü2f˘-c¶ŒÙ~ñ”\28ÆÀ\\aJ˜\30f€±0ž\6É\"J‹\0\29“Ž\1:\6è\24 c€\14\1:\4è\16 C€\14\1:dªCivÈY‡x\29âuˆ×!S\29‚v\8×!S\29böŒÙ3fϘ=cöŒÙ3f˜=böˆÙ#f˜=böˆÙ#fÒì\17³GÌ\0301{Äì\17³GÌ\0301\7Æ\28\24s`Ì1\7Æ\28\24s@Ì\0011\7Ä\28\16s@Ì\0011\7Ä\28\16s@Ì\1™\29\16s@Ì\0011\7Ä\28\16s`Ì)`Y\6¶\12i\25Ò2¤eH‹\22!-BZ„´\8i\17È\"†ýb*žf…` p¿vLß1cŽQ\28£8FqŒâ\16Å!ŠC\20‡(\14Q\0282æ1‡x\14\25s\8꾘´…ñ0\19LȘ“kJp\12(—\22'—\22'—\22'—\22ÇK‹ã¥ÅñÒâxiq¼´\4[Ñ6´-mGÛÓ\14´–ÖÑzÚ‰6xv\13Ïž\17ý\23w\24˜+L\9\19>æ‡>會\25`PŽ^râ…ݓӓГǓǓǓǓǓēÄ\11\9/~~EuLLtb±LLy\"áDÂ\9„\19\"MÀœ\16m\2愈\19BM_LÅÂx˜ôÄ”\29Nqk\11ƒ,¬‚…¡\22†Z\24ja¨\5¡\22„Z\16jA¨\5¡\22\20ù‚x\0112¸ èòÅT-Œƒñ0\19L\8/\5%åÄbbm²¨XR,\22–\ +\11e5Øg†Cx¤\26êq9„Ǧðp2\29ðŇã\0007\20~\18¿6ðžûmØž\15ááèïÏ‹\17?zø<<Ý\31Î\127~…'¾\3æ]C¾Ïáy)d\22Óe!·çì”\29ø\11,gL™Ÿ¯§ëá\\žÊù:U‡ssj\14ç6<\13Ÿ»Sw\8\15Xýá<œ†\3–)\14üq¿ðô;\29ÂóÎr8¯§õð\127\127~ý\7FÝߟâžâ^âÞâ6ºŒ6§5´WÚ’¶¢mh[ÚŽ¶§\29h-­£õ´\19ív¡½Ó®Á\18\9ÿ\24ƒN·\8ˆ\127Œ!î-n£ËhsZC{¥-i+Ú†¶¥íh{ÚÖÒ:ZO;ÑÞh\23Ú;-\1Éò\20À§\0>u§\0>\5ð)€O\2>\9ø$à“€O\2>\9ø$à“€O\2>\9ø$à“€O\2>\9ø$à“€O\2>\9ø$ !^\2ø\18À—\0¾ô˜\0¾\4ðEÀ\23\1_\4|\17ðEÀ\23\1_\4|\17ðEÀ\23\1_\4|\17ðEÀ\23\1_\4|\17ðEÀ\23\1_\4dô·\0¾\5ð-€o\1|ë)\2ø&à›€o\2¾\9ø&à›€o\2¾\9ø&à›€o\2¾\9ø&à›€o\2¾\9ø&à›€o\0022ì&€›\0n\2¸\9à&€›žIÀ€\27\0017\2n\4Ü\8¸\17p#àFÀ€\27\0017\2n\4Ü\8¸\17p#àFÀ€\27\0\17/#^FºŒp\25Ù2¢e$Ëx\"¸2`e Ê\0•)\3R\6¢\12@\25x2àd É\28\13Ã(\3P\6ž\0128\25h2\28Eò9arÂä„É\9“\19&'L\14˜œg\3&\7L\14˜¼¢‘\15WŒ˜Wˆ‘\0030\7`\14À\28€9\0s\0æ\0Ì–\3-\7ZÎ\1-g2+Ž š! a\12C@C@C@C@\3@\3@Ã\0Ð\0Ð\0Ð\0Æ\0Æ\0Æ\0Æ\0Æ\0Æ8\26\0045@2\12\12$\3$\3$ƒ£HôJ˜+a®„¹\18æJ˜+a®€¹\2æ\ +˜+?\7˜+`®€¹\2æ\ +˜+`®€¹\2æ\ +˜+0®À¸\2ã\ +Œ+0®À@J%1Jb”Ä(‰Q\18£$F\9Œ\18\24%0J`”ü00J`”À(Q\2£\4F\9Œ\18\24%0J`”À(Q\2£\4\6\18©ˆ!Õ_\17£\"FEŒŠ\24\0210*`TÀ¨€Q\1£b\ +À¨€Q\1£\2F\5Œ\ +\24\0210*`TÀ¨€ÁæU\1£\2\6>ß\16£!FCŒ†\24\0131\26b4Àh€Ñ\0£\1F\3Œ\6\24\13“\1F\3Œ\6\24\0130\26`4Àh€Ñ\0£\1F\3Œ\6\24\0130ðÑ–\24-1Zb´Äh‰Ñ\18£\5F\11Œ\22\24-0Z`´ÀhÑ2-`´ÀhÑ\2£\5FëiÀÑÒ\0¦\5L\11˜\0220ølG˜Ž0\29a:Ât„é\8Ó\1¦\3L\7˜\0140\29`:Àt€é\0Ó1AÀt€é\0Ó\1¦\3L\7Œ\14\24\0290:`tÀÀ\7zbôÄè‰Ñ\19£'FOŒ\30\24=0z`ôÀèÑ\3£\7F\15Œ\30\24=S\5F\15Œ\30\24½§\1GO\3˜\0300=`zÀà\19\3a\6Â\12„\25\0083\16f Ì\0˜\0010\3`\6À\12€\25\0003\0f\0Ì\0˜\0010\3“\6Ì\0˜\0010\0030\6`\12À\24€1\0\3§YbXbXbXbXbXbX`X`X`X`X`X`X`X`X`X`X`X¦ïh\24\0140\0220\0220\0220\0220\22Gqž#Œ#Œ#Œ#Œ#Œ#Œ\3Œ\3Œ\3Œ\3Œ\3Œ\3Œ\3Œ\3Œ\3Œ\3Œ\3Œ\3Œc\16OØ€q€q€q€!ª[É€3=™¼l“ɓɓɓɃÉç4 ð ó ó óÜ\0072ßÒ0†\7 \7 \7 \7 \7 g؉Ɖåù\19\24=h=\21h™\13|·M\29AVFY™*ï\\Á\"!ȉٙ˜‰Ù™˜‰Ù™˜\9©LÈ΄ŒLÈÈ„ŒLÈÈ„ŒL\0081!\7\19r0!Ö„\28H \2OÈÇÄ €&þ\4è\9G±ëF˜\27an„¹\17æF˜\27an€¹\1æ\6˜\27`n€¹\1æ\6˜\27`n€¹\1æ\6˜\27`n€¹\1ã\6Œ\27c\2ã\6Œ\0270°µ\16c!ÆBŒ…\24\0111\22b,ÀX€±\0c\1Æ\2Œ\5\24\0110\22`,ÀX€±\0c\1Æ\2Œ\5\24\0110\22`,\12\12Œ\5\24\16wb܉q'Æ\24wb܉q\7Æ\29\24w`Üq\7Æ\29\24w`Üq\7Æ\29\24w`Üq\7Æ\29\24w`Üq\7ÆÑÁ?bH\7X‰±\18c%ÆJ\12¶³\21\24«¡qbùy0­`ZÁ´‚‰-r\5Ó\ +&6Í\21L+?\8&6Ô\21L+˜V0­!½Ë×árœÃßãp¹\28\15—\"üÕáo<\30²âXL\21Ü•ß›\8¢ýªŠÁ\0282¬¡Çv†òë©\29-Ôz\26ðæf\31µ\15Âõõp,¿ªKØÛHà2¯‡lÄy—CæŽ!57\30ñ¿à–Ëãዾ§¾\8\2Oí'{\25‚ô5\18fÊÔþè\15ù¹\ +\127ë!¿tGóµ\20]wÈ+ü\11`\27\24n;˜%˜\25ÿ×'ŸñŠè8\28°\25N…kh;ZG»ÀòÕ§(úƒ\9\15ü0[0ëÁ\20ót\14×\"ü÷\22sÀïJ„ÏÌÂë§Cñu(ŽîÇzɃŸÃßãPð\27̇\"\20q\17Š¸\8EŒ_û¼â?ÅT\23×S…ˆõp¸Ž|if‚D®c\23\ +®\27]Pþxýº\28×q:”xÑ£, ø#©ÓåP†\19C]Á_Qc%ÞD5_u7\0069ž§“;TŒc\15\21¾ŠýµÀßO]ñ8Ô_‡úøç¿0¿ƒù÷Ÿ`.á¯\8\127uø\27Ãß\28þ™Ãyð™›ó\22’êÐ`†é';4—óÊâSÑ\31Ú‹ëŽþ«Å\18©vsU\28ÚÚ\29óª\26¯×C;\14¥“æÓ\21¡B\15Ý|¼N¡Q\\¯\5të¿®…;t@Ò_P2!žˆNýpèkΘ]BQ÷u\17O\18U\31zü£˜“+.á\3ãz9Ö§öгI\\|Ú<\12\23þw¯qñ\7þ»…Ûaü:Œ!ÏxËz\12§ô5¿îŽW­Ç+Ê:”?Þ¶Æ{ÖxÃÚ^Bϸð+PAúSh*ª\3~:bæ¤ÔA~Ò¿&và\11œC(y\21¿Uଳ›P\"ãqž*Ù”&gÇ®Æ?k\16\17²P\4…×Î4y”*\"£@Ñ(\15.´ÚÐÍŠÓ0…63@·'þlƒCWÄ¿\31<¶\7\31Îâ‹u]qð-›ß8„¬ø>t\13?.5^\24ð\15t~;A„NUô\7\31Ê\18ó˜?\31\7¼\13·†¦:”gÌo\0041±ã\29N}\12A/\23\127\0081C]†v>Í.\27ù®\26^\29Ÿ\30å(¹˜¿\14s(ú9\20ý\28Ê{\14Å<‡bžC1ßÆcÇ7Ë–p\ +éᢄ\23\23ñ-wñýáñuÀ\127dÄ\127cÄ\127bÄ\127aÄ\127`\\ñ¿\20ìÿ\3A\2ýø", + ["length"]=30338, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=4053, diff --git a/tex/context/patterns/lang-pt.lua b/tex/context/patterns/lang-pt.lua index 5982d7cb3..82d9d1cf9 100644 --- a/tex/context/patterns/lang-pt.lua +++ b/tex/context/patterns/lang-pt.lua @@ -2,7 +2,9 @@ return { ["comment"]="% generated by mtxrun --script pattern --convert", ["exceptions"]={ ["characters"]="adefhorstw", - ["data"]="hard-ware soft-ware", + ["compression"]="zlib", + ["data"]="xÚËH,JÑ-O,JU(ÎO+\1³\0Gž\0074", + ["length"]=19, ["n"]=2, }, ["metadata"]={ @@ -100,7 +102,12 @@ return { }, ["patterns"]={ ["characters"]="-abcdefghijklmnopqrstuvwxzáâãçéêíóôõú", - ["data"]="1b2l 1b2r 1ba 1be 1bi 1bo 1bu 1bá 1bâ 1bã 1bé 1bí 1bó 1bú 1bê 1bõ 1c2h 1c2l 1c2r 1ca 1ce 1ci 1co 1cu 1cá 1câ 1cã 1cé 1cí 1có 1cú 1cê 1cõ 1ça 1çe 1çi 1ço 1çu 1çá 1çâ 1çã 1çé 1çí 1çó 1çú 1çê 1çõ 1d2l 1d2r 1da 1de 1di 1do 1du 1dá 1dâ 1dã 1dé 1dí 1dó 1dú 1dê 1dõ 1f2l 1f2r 1fa 1fe 1fi 1fo 1fu 1fá 1fâ 1fã 1fé 1fí 1fó 1fú 1fê 1fõ 1g2l 1g2r 1ga 1ge 1gi 1go 1gu 1gu4a 1gu4e 1gu4i 1gu4o 1gá 1gâ 1gã 1gé 1gí 1gó 1gú 1gê 1gõ 1ja 1je 1ji 1jo 1ju 1já 1jâ 1jã 1jé 1jí 1jó 1jú 1jê 1jõ 1k2l 1k2r 1ka 1ke 1ki 1ko 1ku 1ká 1kâ 1kã 1ké 1kí 1kó 1kú 1kê 1kõ 1l2h 1la 1le 1li 1lo 1lu 1lá 1lâ 1lã 1lé 1lí 1ló 1lú 1lê 1lõ 1ma 1me 1mi 1mo 1mu 1má 1mâ 1mã 1mé 1mí 1mó 1mú 1mê 1mõ 1n2h 1na 1ne 1ni 1no 1nu 1ná 1nâ 1nã 1né 1ní 1nó 1nú 1nê 1nõ 1p2l 1p2r 1pa 1pe 1pi 1po 1pu 1pá 1pâ 1pã 1pé 1pí 1pó 1pú 1pê 1põ 1qu4a 1qu4e 1qu4i 1qu4o 1ra 1re 1ri 1ro 1ru 1rá 1râ 1rã 1ré 1rí 1ró 1rú 1rê 1rõ 1sa 1se 1si 1so 1su 1sá 1sâ 1sã 1sé 1sí 1só 1sú 1sê 1sõ 1t2l 1t2r 1ta 1te 1ti 1to 1tu 1tá 1tâ 1tã 1té 1tí 1tó 1tú 1tê 1tõ 1v2l 1v2r 1va 1ve 1vi 1vo 1vu 1vá 1vâ 1vã 1vé 1ví 1vó 1vú 1vê 1võ 1w2l 1w2r 1xa 1xe 1xi 1xo 1xu 1xá 1xâ 1xã 1xé 1xí 1xó 1xú 1xê 1xõ 1za 1ze 1zi 1zo 1zu 1zá 1zâ 1zã 1zé 1zí 1zó 1zú 1zê 1zõ a3a a3e a3o c3c e3a e3e e3o i3a i3e i3i i3o i3â i3ê i3ô o3a o3e o3o r3r s3s u3a u3e u3o u3u 1-", + ["compression"]="zlib", + ["data"]="xÚ\29ÒArÜ \16…á«è\2YØ“\11A\16\1b\24@X¥ÛÄ^¤ì*¯œl¼õÅò~-úÕД¾Fb\30ž\30ÓôðôX\21WÕ¬ZT7Õ¦úþM¼\16¯Ä\27ñA|\18_Ä;ñoz0¿ˆDÈ3òŒ<#ÏÈ3ò\12žÁ3x\6Ïà\25<ƒgð\12Þ÷Ÿ+1\19\11q#6\2Fùræë™og~œùyæ×™ïgŠ³œÌr2+׊µR­P+ÓBZD\11hñ,œE³`\22ËB9(\7åD9QN”\19åD9(\7å \28”ƒrP\14ÊA9(\15塼(/Ê‹ò¢üFý¼ž9Ÿ¹œÉ\30#<#<#<#<#<#<#<#<#‚ˆ èñ ‡ƒà\0\16\0\2@\0\8\0\1 \0\4€\0\0169cäŒQT\20\21EEQQT„ŠP\17*BE¨\8\21¡\"T„JüG’”$%IIR’”„’P\18JBI(\9%¡$”„²\ +X\5¬\2V\1«€\21`\5X\1V€\21`\5X\1V€\21 sŒ,%KÉR²”,%£d”Œ’Q2JFÉ(\25%£\20¾Ká»\20QET\17UD\21Q\5ª@\21¨\2U \ +T*P\5ê~^ôý¼èûyÑ÷󢫺U½ªNe-¸\2Wà\ +\\+p\5®À\21¸\0027\1M@\19Ð\0044\1\13 \0014€\6Ð\0\26@\3h\0\13 ó’—좺¨.ª‹ê¢:T‡êP\29ªCu¨\14Õ¡:Ô€\26PCÔ\0165D\13QCÔ€\26P\3j@\13¨\0015 \6Ô€z†z†ÚEí¢vQ»¨]Ô\14µCíP;Ô\14µCíP;Ô\14u\0088\4\28\2\14\1‡€\3à\0008\0\14€\3à\0008\0\14€CÀõrUͪÛd.fšµžµžµ^ô{Ñï岨XËT¼\19\127§›¶oÚ¾i«^êÔ.mÚÔÛÔÛÔÛ.:Ëÿ·\"8", + ["length"]=1444, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=307, diff --git a/tex/context/patterns/lang-ro.lua b/tex/context/patterns/lang-ro.lua index 0b1fc8d38..60a602035 100644 --- a/tex/context/patterns/lang-ro.lua +++ b/tex/context/patterns/lang-ro.lua @@ -122,7 +122,16 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnoprstuvxzîășț", - ["data"]=".a3ic .a4n3is .a2z .cre1 .de2aj .de2z1 .g4 .i2a .i2e .i3È› .i4u3 .i3v .î4m .n2 .ni2 .p4 .pre3È™ .s4 .È™4 .u4i .u5ni .z2 a1 2acă achi5 a3e afo3 a3i2a a3i2e a3il ai3s2 a3iu alie6 2alt a2m a2n 2an. a5n2e ani2e ani3È™4 an4s 2anu an2z ao2g ati4a 2atr a5t4u 2aÈ›a 2ață 2au a3ua a3ud a3ug a3ul a3un a3ur a3us a3ute a3u2È› a3uz 2ă1 ă3i ăi2e ă2m2 ănu3 ărgi5 ă3È™ ă4È™3t ă2ti. ăti4e ă3u ă3v ă2zi 1b 2b. ba2È› bănu5 2bc 2bd bi2a. bi2at bi2e 3bii b2l 3b4lim b4lu bo1 bo3ric 2bs 2bt 2bÈ› bÈ›i4ne. bu3 1c 4c. ca3ut că2c cătu5 2cc ce2a ce2È› 2chi. 2ci. ci3ale ci2o cis2 ci3sp ciza2 c4l 2cm 2c5n copia2tă co2È› 2cs 2ct 2cÈ› cu3im 3cul cu2È› 2cv 1d 4d. da4m da2È› 2dc de4sc dez3in di2an dia2tă 2dj 2dm 2d1n do4il 3du e1ac e1aj e1al e1aÈ™ e1at ea2È› e1av ebu5i 2ec eci2a ecla2re edi4ulu e3e ee2a 1efa e1h e3i2a e3i2e e3ii e3il e3im e3in e3i2o e3i3s2 e3it e3i4u e1î 2el e2m emon5 2en e5ne e1o1 e3on e1r 2era 2eră 2erc 2e2s es3co es5ti 2eÈ™ e3È™i etan4È› 2eÈ› e3u eu5È™ 1evit e2x 2ez eză5 ezi3a e2z1o 1f4 2f. 3fa 3făș 2fi. fi3e 3fo 2ft f5tu 1g2 2g. gă3È› 2ghi. 2gi. g4l 2g3m 2g3n go5n 3gu3 2g3v 2h. 2hi. hi2a hi3c hi4u 2h1n 2i1 4i. 3i2ac ia3g4 i2ai i2aÈ™ ia2È› i3că i2ed i3ia i3ie i3ii i3il i3in i3ir i3it iitu2ră i2î 4ila i3le i3lo imateri6 i2n i4n1ed in2gă inÈ›i4i 3inv i3od i3oni io2È› ipă5 i2s is3f 4isp iÈ™3t i5ti iÈ›i2a i3È›i2o i3ua i3ul i3um i3und i3unu i3us i3ut iz3v î2 î3d î3e î3lo îna3 în5È™ î3ri î3rî îr5È™ îș3t î3t î4ti î3È› î4È›i î5È›ii î3z 1j 2j. 2jd 2ji. ji2È› 2jl j4u ju3t 1k 1l 4l. laraÈ›i2 lă2ti lătu5 2lb 2lc 2ld le2a 2lf 2lg 4li. li3a li3e li3o 2lm 2l5n 2lp 2ls 2l3È™ 2lt 2lÈ› 3lu 2lv 1m 2m. 3ma 3mă 2mb mblîn3 3me me2z 2mf 3mi 4mi. mi2È› 3mî 2m1n 3mo mon4 2mp 2m3s2 2mt 2mÈ› 3mu mu2È› 2mv 4n. 3na 4n1ad na3in 3nă 2nc n2cis n2ciz 2nd 3ne ne1ab ne1an ne1ap 4nef 4n1eg ne3s2 4nevi 4n1ex 2ng ng3ăt 3ni 4ni. ni3ez 3nî n3j n1n 3no no4È™ n1r 2n3s2 ns3f n4sî ns3po n3È™2 n4È™i 2nt n5ti n5t4u 2nÈ› 5nu nu3a nu3ă nu5m nu3s2 2nz o1ag o2al o2bi. 2oca ocu5i 2od odi2a o3e o3i2 oiecti2 oi3s2p omedi2e. om4n 2on o1o opi3e opla2 oplagi2 o1ra o1ră or2c o1re o1ri o2ric o1rî o1ro or2te. o1ru os5ti o3È™i otaÈ›i4 o5ti ot3od o3u 1p2 2p. 3pa păr3È› 2p3c pecÈ›i2 pe2È› 2pi. pi2e pi3e. pi3ez pi3o pi2È› pi2z p4l po4È™ po2È› 2p3s 2p3È™ 2p3t 2p3È› p4È›i. pu3b4 puri2e pu4È™ 4r. 2rb 2rc 2rd r2e re2bi recizi2 re3s2cr re4È™i 2rf 2rg 2r1h 4ri. ri3a ri4ali ri3eÈ› ri3ez ri5mi 2ri3un ri3v 2rk 2rl 2rm 2r1n rna2È› rografi6 2rp 2r1r 2rs2 r3sp r3st 2r3È™ 2rt rtua2le 2rÈ› ru3il ru3sp 2rv 2rz 1s 4s. 5sa 5să să4m să4È™ 2sc 4sc. 3s2co 3se se2a se4e. ses2 se3sp se4È™ 4s2f 5sfî 3si si3p 3sî 3s4l 4sm s1n 3so so3ric 2sp 2st sto3 5su su2È› 2È™ 4È™. 3È™a È™a2È› 3șă2 3È™e 1È™i 4È™i. 5È™ii 5È™il 3È™in 3șî 4È™5n È™nu5 3È™o È™2p È™2ti 4È™ti. 4È™3tr 3È™u 1t2 4t. ta3ut 2t3c 2t3d te2a te5ni teri6ală te3s2p 2t3f 4ti. ti3a ti3e 3tii. 3til 3tin ti2È› 2tî. t4l 2t3m 3tol 3tor to2to 3tru. 3trul 3truo 4t3s2 2t3t tu3a tu3im 4t3un tu4È™ 4t3z 1È› 2È›. 3È›a 3ță È›e2È› 2È›i. 3È›ia È›i3a. 3È›ie 3È›ii 3È›il È›i2È› 3È›iu È›u3 È›u5i 2u1 6u. u2a. u2ad u3au uă3 uăs2 u2bia u2b3l u2b1o ub3s2 u3e 4ugu u3i2a u3i2e u3in u3ir u3is u3it u3i2È› u3iz u2l u3la u3lă u3le u3lii u3lî u3lo umi5r ur2z u2s us2pr u4st u3È™ u4È™t u2to 3utor u3ui u3um 1v 2v. ve5ni ve2È› ve2z 2vi. vi2È› 2v1n vorbito2 3vr 1x 2x. 3xa 3xă 3xe xe2z 3xi 3xo 3xu 1z 2z. za2È› 2zb 2z2g 2zi. zi2an zi2ar 3zii 3zil z4m 2z1n 3z2ol 3zon zu2È› 2z2v z3vă", + ["compression"]="zlib", + ["data"]="xÚ-—]rû.\15…·¢\21xÆàüöãØ$¥\127Ûx0d:¾í*:ÝC7Ñ^dYïsÈ{Á1áC\8éH\"ÝèãdÝ8l>\30|ÝiÝ”CoÝ\28ÜøÞ>'¿îƒuÑ‚\0øç78T¯þúߟaµns´\8ì¬ÞsðÏ/ë\14úÏ/ \14\17¸làélìÍÓߧÓ[¼Ø胷äéè\24a\16.6F\1278u«K\12ÿض\20\27ÝJÛø±u6^6­Þâ\11½Ž\27·áÐ,»6.5&w·±Äad°d¶”¡Ò}~\13QĬõU‡×Yp\23,‚M\5‡ Hµê°\1ŸÓÜßgo\127Ÿ>\2RáïÓ­\14Ü0Ïßg¾s=f±ÅßçðüòE\11Jìø –û*xhüŒÖ_Í];»Ž:à*1\23\6&ÚlWŒÓ5,Â`þ\26£]ÝBgXâj`µkêi>GmÂ\ +×B“°çw\28¶€\0004ë'\27¦Î&ÝÇp„›„E‡Mtñ¾€]\14\7u 0E?.K\0naàØÁs¤?,¬Zi—ͦ´ÇÑ\21Ì:¥—\16ô˜ÐcâÇT=ªú\9ãNõ5û°~¶aîl\30aÒÜîîæÉæ0\28ÂÓÇÍfî-|\9vó;óæžÁ4À\21?W\11ý8\9Þ\5‹\0˃ÅB“J÷aáZ/Ñ\\`å$¾…i\25]\14\22fH\5\3t\0122A\31nÌöoŒ´u—`\20,‚U°µ™$\20[ù\20Á u~\1278ˆ¥06¬iþå—\0139=Ž\ +>ñ³Ï\12çQ «…Œë‚;,\28~Bìq)RW7H\28^à·L\20t#ø\19ê…É>šu\30­–<°Ðãå \7Ä|¤|xÄü#[O\18ÿÀf\31Øõ\3¥üG°\15íò\31XïƒS>`\13BÎÎÎ×Sô$/œü…àÎX{‡\ +aØ)‹Ÿ\24ü$¶xeAÒÓ‰\3')õ|qþt\15ã\1ð÷ù?Zõ—e", + ["length"]=3439, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=647, diff --git a/tex/context/patterns/lang-ru.lua b/tex/context/patterns/lang-ru.lua index af6eeb62f..54fe7cf6a 100644 --- a/tex/context/patterns/lang-ru.lua +++ b/tex/context/patterns/lang-ru.lua @@ -2,7 +2,14 @@ return { ["comment"]="% generated by mtxrun --script pattern --convert", ["exceptions"]={ ["characters"]="абвгдежзийклмнопрÑтуфхцчшщыьÑÑŽÑ", - ["data"]="аÑ-беÑÑ‚ бездн биз-неÑ-мен буй-нак-Ñке вбли-зи взба-ла-муть-ÑÑ Ð²Ð·Ð´Ñ€ÐµÐ¼-нешь во-до-Ñли-вом волж-Ñке воп-лем вопль воÑÑ‚-ра во-ткать во-ткем во-ткешь во-тку во-ткут впол-обо-ро-та впол-уха вÑе-во-лож-Ñке вцÑÐ¿Ñ Ð³Ð°-рем-но-го го-ло-дра-нец грÑÑ Ð´Ð²Ñƒ-зу-бец днепр добре-ем до-бре-ем-ÑÑ Ð´Ð¾Ð±Ñ€Ðµ-ет добре-е-те до-бре-е-теÑÑŒ до-бре-ет-ÑÑ Ð´Ð¾Ð±Ñ€Ðµ-ешь до-бре-ешь-ÑÑ Ð´Ð¾Ð±Ñ€ÐµÑŽ до-бре-ÑŽÑÑŒ добре-ÑŽÑ‚ до-бре-ÑŽÑ‚-ÑÑ Ð´Ð¾-бре-Ñти до-бро-дÑÑ‚ до-броÑÑŒ до-броÑÑŒ-те до-бро-ÑÑÑ‚ до-бро-шу домну доп-пель драх-му дрейф-лю дрейфь-те еди-но-жды зав-Ñек-то-ром за-мру за-члиÑÑŒ из-древ-ле изо-тру ин-ког-ни-то иÑкр ка-за-шек казнь кольд-кре-мом корн-па-пир кÑендз лик-бе-зом ло-шадь-ми людь-ми лю-Ñ-Ñом ма-зу-те ме-ти-лам ме-ти-ла-ми мно-га-жды морщь-те на-бе-крень навз-ничь на-вÑкид-ку на-вÑтре-чу нагл на-изуÑÑ‚ÑŒ на-иÑ-ко-Ñок наи-ме-нее на-иÑ-коÑÑŒ на-обо-рот на-от-рез на-Ñупь-ÑÑ Ð½Ð°-угад на-уголь-ник не-оÑÑ‚-ра неÑ-лаÑÑŒ неÑ-лиÑÑŒ нет-то не-уду обидь-ÑÑ Ð¾Ð±Ð¾-шлоÑÑŒ об-ра-Ñти од-на-жды оÑлаб-ла ото-мÑÑ‚ÑÑ‚ ото-мщу ото-тру отру отруÑÑŒ паб-ли-Ñи-ти па-на-ме па-на-мец па-ра-так-ÑÐ¸Ñ Ð¿Ðµ-ре-вру пе-ре-ме-жать пе-ре-ме-жать-ÑÑ Ð¿Ðµ-ре-шла пиÑ-Ñ‡Ð°Ñ Ð¿Ð¾-вÑе-дне-вен по-гре-мок по-до-тру по-иÑ-ти-не по-лу-то-ра-Ñта по-лу-Ñвью по-млад-ше помни по-мниÑÑŒ помни-те по-мни-теÑÑŒ по-мно-гу по-мру пол-вто-ро-го пол-шка-фа по-на-доб-люÑÑŒ по-трафь-те преж-де прид-ти при-шла при-шлоÑÑŒ про-тру про-хлад-ца пÑко-ва пыл-че раз-орем-ÑÑ Ñ€Ð°Ð·-оре-теÑÑŒ раз-орет-ÑÑ Ñ€Ð°Ð·-орешь-ÑÑ Ñ€Ð°Ð·Ð¾-тру ра-зу-мом резв-люÑÑŒ Ñ€ÑÑ„ÑÑ€ Ñан-узел Ñдрейф-лю Ñе-го-Ð´Ð½Ñ Ñме-жат Ñо-блю-Ñти Ñо-лжешь ÑоÑÑ‚-рим ÑоÑÑ‚-ришь ÑоÑÑ‚-рю ÑоÑÑ‚-Ñ€ÑÑ‚ Ñо-ткать Ñо-ткем Ñо-ткешь Ñотку Ñо-ткут ÑроÑ-лаÑÑŒ ÑроÑ-лиÑÑŒ Ñтрем-глав так-же тве-ре-зо-го те-ле-ате-лье тер-но-Ñли-вом троп-лю тьфу узу-фрукт умнем умнет умнете умну умру уÑлышь-те ушла фо-то-пле-нок ца-ре-дво-рец че-реÑ-чур чер-но-Ñли-вом чреÑл чуж-доÑÑ‚ÑŒ шеÑÑ‚ÑŒ-де-ÑÑÑ‚ ÑŽÑом Ñдо-зу-бе Ñрем-но-го", + ["compression"]="zlib", + ["data"]="xÚuVkzÚ0\16¼Š/ ;bH\2ý’†«8Æ\14*Øæ\ +Ò:3+ÉÂiÿ€w´Ú÷C¡‹­\11_aŒmÜ7ü\8×0„™Ÿ>\\]˜yä„\3‚ñ\16þ\16ìÂÍÅ6ÜÂØ„\30¼÷à\29nzRWÐ\3„Ÿ)\30â>~€7žílˆ;ÈšLò)~\16]\28t.\20(9\0Â$\28ôw¥g\9\15Ê\29óé#Ü“\0šï ¹3qq+\0295Wt¾–¨U9éx¨¿\25‹\30âa€ÃÏ\23\15vvÚ­'à{\21\13\11Gg~ÜqR™\28ßðùˆm\19.\8Çê:8/a!jwœâÒYTÞ€Ç]üÍk\3d\28\16Zþ|ÙÙ@&\8Ýñ\19ÆQª3ï\24Å\ +Ha¯¹èYMÓ¥qsQ\24®~lp†ø§ÀÓO¾SÉxጟOLñ³È_‘ý–¥ÒWP$šeV …îütwÙØ.zëè\"ÙÏ÷\28,?˜QS˜ó'jîhªÔ”¥øªÂnR-ÿ‰/È¡\28ÌtQ7B‚O\9ÿÆù{ƒ\22èBÏ\ +\25ÙCûTZÌÞÕ:fGÑüŽGöƒ9£f4ù½z@JÒ.ø0;\20Ý\18.Tç%¹ámÔ3*åFÙ\18z¢b\1 gÊæ-x\23\6G^¦p2ƒp\0z¦ÿ\29\127¼$Éô\25Ž]\27Ú\7/X™”®KwÅ\17Ò\7D\1’,\4ç™rñ7# þI–¡¾-b“ÕŸ×\0±ó\ +H\"¦ÜB]\9ëD[ã¯\18ù™g²K>™§\28\\½5\31\9qjà\27Ò:O6\26õš£òf—Zµ3Ò#êæ\30¡Šfp†,i+íOà\26›'8ï³g°,KƒW³ìA ÍoóØÆæ\127\ +\12øZD¤Å\\†µ\29F5:öÊeEz³ž¬æ.VR3\21¶Uý’â\30´ñlMl\16Þ`iÍ“ÎÓÄÓ šÞr˜ÆB³Šêm¶Òc’V¿ÍtÛžfåHO3úKÁë «\17Ÿ‘}~n]”×¾IíNwøÝ—.¾æªdÞ´]\29-Ô7ö¢¸Ç¸³=þüJ•\22{—Òax…|Ààƒ­´\23f\18fï‰LZ\3Sù¬PU´\17vÛº‡s\14‚ß­HŒ'\0215Zf±Žç3Øv\12g\6‹7¹5Ø‹v§©ÇÒµïV\11%sü¯WGãd\29ךlIëŽCÃ>Õzù\29Å’´Ëg{Rå7+€íÃ÷/\31.\16u", + ["length"]=3191, ["n"]=184, }, ["metadata"]={ @@ -91,7 +98,104 @@ return { }, ["patterns"]={ ["characters"]="-абвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘", - ["data"]=".аб1Ñ€ .аг1ро .ади2 .аи2 .ак1Ñ€ .аль3Ñ .ар2Ñ‚1о2 .аÑ1то .аÑÑ‚1Ñ€ .ау2 .би2о .во2б3л .во3ж2д .го2Ñ„ .дек2 .де1кв .ди2ак .ди1о .до3п .до3Ñ‚2 .епи3 .зав2Ñ€ .за3м2н .за3п .иг1Ñ€ .изг2 .из3н .ии2 .ик1Ñ€ .ио2 .ио4на .иÑ3 .ле2о .ле2п3Ñ€ .леÑ1к .ль2 .люÑÑ‚1 .ме2ж1у2 .ми1ом .мо2к1 .му2шт1 .на1в .на3Ñ‚ .на3ш2 .не3вн .не1др .не1з2 .не1Ñл .не1Ñ2ц .не3Ñ‚ .ноÑ1к .нук1л .обо3ж2 .ово1 .ог3н .оз4 .оÑ2ка .оÑ2п .оÑ3пи .от1в .от1ро .от1ру .от1уж .по3в2 .по3ж2 .поз2н .проÑ2 .ра2Ñ3Ñ‚ .ре2бр .ре2з3в .ри2Ñк .ри2ч .ро2з3в .ро2Ñ3л .ро2Ñ… .Ñепт2 .Ñк2 .ÑÑ‚2 .Ñу2ж .те2о3 .тиа3 .ти2г .тиг1Ñ€ .ти2о .уб2 .уд2 .уе2 .уз2на .ук2 .ум2ч .уо3 .уп2 .ур2в .уÑ2 .ут2Ñ€ .ую2 .хо2Ñ€3в .че2Ñ1к .ÑŽÑ1 4а3а аа2п аа2Ñ€ аа2ц а1б абе3ÑÑ‚ а3бла аб2лю аб1ри а3бу ав1в а1ве ав3зо а1ви ави2а а1во аво1Ñ Ð°2вот ав1ра ав2Ñе а2вт а1ву а2вх а3в2че 2ага ага1Ñ2 а2гд а2гити а2гле аг2ли а2Ð³Ð»Ð¾Ñ Ð°Ð³2лот 2аго а3гу а1д 2адв а2две ад2жи ади2од а2дл а2д1обл ад1ро а2д1ру аду3ч ад2ц а2дын а1е ае2го ае2ди ае2л а2еп ае2ре ае2Ñ Ð°Ð·Ð°4ш3 азв2 аз3вез аз1вл азг2 аз1др аз1об аз2о1бр а2зовь а2золь а1зори аз2о1Ñ Ð°Ð·1Ñ€ а1и аи2г1 аи3гл а2их а1к ак1в 1акк ак2л ак3лем ако1б2 2Ð°ÐºÐ¾Ð½Ñ Ð°ÐºÐ¾3Ñ‚ 2акри ак1Ñ Ð°1ла а3лаг а1ле 2алек а3ли ало1з а1лу алу2ш алуш1Ñ‚ а1лы а2льщ а1лю 2ама амб4 2амет а2минт ам2нет 2амо амо1з2 амои2 а2мч ана2дц а2н1а2ме а2наф ан2дра а2н1о2б ан1о2Ñ…Ñ€ ан1Ñ€ ан2Ñп анÑ1у ан2Ñур а2н1уз а1нь 2а1о ао2д ао2к ао2Ñ€ ао2Ñ Ð°Ð¾ÑÑ‚1 а3пла ап2лом 2апо апо4Ð²Ñ Ð°Ð¿Ð¾3ч2Ñ‚ ап2ра ап1рел а1ра ара2ÑÑ‚ ар2бок ар2вал 1аргу а1ре аре1дв аре1ол ар2жа а1ри а1ро ар2тор ар2Ñ‚1Ñ€ а1ру ар1Ñ… а1ры а1рю а1Ñ€Ñ 2аÑ1к аÑ3ми аÑ3но 1аÑÑиг аÑÑ‚1ву аÑ3тем аÑ2тин аÑ2Ñ‚Ð¸Ñ Ð°Ñ1тоо аÑ1тух а1Ñтье аÑ2шед аÑ2ÑˆÐµÑ Ð°1Ñьи а1та 1атак ат3ва ат1ви ат1ву 2атез а1ти а1то ат1обе а2томн ато2ш ат1рах ат1ри а1ту ат2Ñ… а1Ñ‚Ñ‹ а1тье а3тью а3Ñ‚ÑŒÑ Ð°1Ñ‚ÑŽ а1Ñ‚Ñ Ð°1у а2уб ау2д ау3до а2уле аут1Ñ€ ау2Ñ… ау2ч ау3чь ауÑ1 а2Ñ„1л ах2а ахми2 ах3Ñ Ð°1ч 2ача а2чл ач1Ñ‚ а2шл аÑ2ли а2ÑÑ€ аю1та а1Ñ Ð°Ñ2б аÑ2в аÑ2з 1ба ба2бв ба2г1Ñ€ ба2др ба1з ба3зу балю1 ба2о баÑ3м ба1ÑÑ‚ ба1Ñ‚Ñ€ 2б1б б1в бвы2 бг2 2б1д 1бе 3бев бе2гл бе2гн бе2д1Ñ€ 3бее 3бе2з без1а2 без5д4 бе3зи без3н без1о2 без1Ñ€ бе2Ñ1к беÑ3п бе2Ñ1Ñ‚ беÑ3те беÑ3ти 3бец 2бещ 2бж б1з2 1б2и 3биа би2б 2биж 3бик били3Ñ‚2 3био би2об би2од би2он би2ор би2тв би1Ñ… 2б3к б1л 1благ 1б2лаз б3лази б2лан 1б2лее б3лен б2леÑ1к 1б2Ð»ÐµÑ Ð±2луд 1б2луж 2блы 2б2ль 2б3лю. б2люд б2люе б2люл 2б3люÑÑŒ 2Ð±Ð»Ñ 2б3н 1бо бо1бра бо3вш бо2гд бо1дра бо1з2 бо1л2ж бо1Ð»ÑŒÑ Ð±Ð¾3м2л бо2мч бо3мш бону1 бо1ру бо2Ñа бо1Ñк бо3Ñко бо3Ñти 3бот бо2тв бот2Ñ€ боÑ2Ñ€ 2бр. б3раб б2рав бра1зо 1б2рал 2б1рам б2ран 1Ð±Ñ€Ð°Ñ Ð±2рать б1рах 1б2рач 2б3Ñ€Ð°Ñ 1б2ред б1рей б1рек б2рем б2рех б2рид б2рито б2риты 1б2роди б1рол б1ром. 1б2роÑи бро2Ñ1к 2Ð±Ñ€Ñ Ð±1ру 3Ð±Ñ€ÑƒÐºÑ 2брь 1б2рю 2б3рю. б1Ñ€Ñ 2б1Ñ2 б3Ñк бÑ4л б1Ñ‚ 1б2у бу2г1Ñ€ бук1л бу1Ñ 2бф 2б1Ñ… 2бц 2б1ч 2бш 2бщ 1бы бы2г1 бы2Ñ Ð±Ñ‹Ñ1к быÑÑ‚1 1бь 2бь. 2Ð±ÑŒÑ 2бьт бÑ1Ñ€ 3б2ÑŽ бю1та 1Ð±Ñ 1ва ва2бр 3ваг ва2д1Ñ€ вадь2 ва3ж2д ва1з ванÑ2 ва1ÑÑ‚ ва2ÑÑ‚Ñ€ ва1Ñ‚Ñ€ вах1 3вац 3Ð²Ð°Ñ 2в1б в1ви в1вр 2вг2 в1д в2дох 1вев 3вег вед1Ñ€ ве3ду 1вее 1вез 3везе 3везл вез2у 1вей. ве2п1 2верд 1Ð²ÐµÑ Ð²Ðµ2Ñ1к ве2ÑÑ‚1в вет3Ñ€ 1вец 1вею 1Ð²ÐµÑ 1в2з2 взг2 взд2 взо1б взъ2 взъе3д ви2аз ви2ак ви2ар ви2а1Ñ2 виа1Ñ‚ ви3аф ви2гв ви2гл 1виз 1винт 1винч ви1о ви1Ñ2ни виу3 ви2Ñ„ 2в1к вк2л 3в2ÐºÑƒÑ Ð²1л в2ла 2в3лаб в2лев в2лек в2лет в2леч 2вли в2Ð»Ð¸Ñ 2влю в2люб 2Ð²Ð»Ñ 2вм 1вме 2в1н 4в3на в2Ð½ÐµÑ Ð²Ð½Ð¾1 в3ну. 3в2нук 3в2нуч в3ны во1б2 во2б3ла вов2 во3вк 1вод во1дв во1др во2ер во2жж вои2Ñ1 1вок во3м2 воп2 во1ру 2ворц 2ворь воÑ1к во1Ñм во1Ñн воÑ3пе во2ÑÑ‚Ñ€ вот2Ñ€ 1вох во1хл во3Ñ…2Ñ‚ 1вою 2вп2 2вр. 2вра. в2рав 2в1рам в1Ñ€Ð°Ñ 2в1рах 2врац 2вре. 2в1рен 1врид 1в2риз в1рии в1рик в1рил в1Ñ€Ð¸Ñ Ð²1рит 2в1ро вро3Ñ‚2 2в1ры 1врю в1Ñ€Ñ 2в1Ñ2 3вÑе3 в3ÑÐºÐ°Ñ 4в3Ñки 4в3Ñку 3в2Ñп 3в2ÑÑŽ в1Ñ‚2 вто1б2 вто3ш 1вуа ву3г 1ву1з 2вуи 2ву1к ву3п ву1Ñ2 ву2Ñ…1а вух3в ву1чл вф2 1вхо 2вц 2в1ч 2вш 3в2шив 2вщ въ2 1вы вы3г2 вы3зн вып2 вы3Ñ‚2 вых2 вы3ш2л 2вь. 1вье 1вьин 2Ð²ÑŒÑ 2вьт 1вью 1Ð²ÑŒÑ 1в2Ñ1 1в2ÑŽ 1Ð²Ñ 1г г2а га1з га1ÑÑ‚2 га2у 2г3б гба2 г1ви 2гг г3дан 2г3ди 3ге. ге2б1 гено1 ге2об ге2од ге1ор 2г3ж 2г1з г2и ги2бл ги3бр ги2гр ги1Ñл гиÑÑ‚2 2г1к 2гла. г2лав г1лай г1лами 2глаÑÑŒ 2Ð³Ð»Ð°Ñ Ð³1ле г2лет 2гли. г2лин 3г2лиф 2гло. г3лобл 2глов 2глог 2глое 2глой 2глою 2глую 2г1лы г2лÑж 2глÑк 2г3м г2нав г2нан г3не. г2нев г3нен г3неп г3Ð½ÐµÑ Ð³2нир гнит2Ñ€ г2ное г2нои г2Ð½Ð¾Ñ Ð³3Ð½Ñ Ð³Ð¾1б2 го2вл го3ж2д го1з го2зл гоз2н гоиг2 3гой г2ол гоми2 го2Ñ1а го2Ñд го1Ñкл го1Ñн го1Ñпа 2готд гоу3Ñ‚ го1чл 3гою 2гп 2гр. г1рае г1рай г1рар г1рег г1рек г1рец гри4в3н г1рик г1рил г1рин г1Ñ€Ð¸Ñ Ð³1рич г1ров г2роз г1рок г1рон г1роп г1рот г1роф гру2п г1рыв 2грю г1Ñ€Ñе г1Ñ€Ñл г1Ñ€ÑÑ‚ 2г3Ñ2 г4Ñа г4Ñб 2г3Ñ‚ гу1в гу1Ñ Ð³Ñƒ2Ñ1к 2гф 2г1ч 2г3ш 2г3Ñ 1да да2б1 да2ген да2гр да1з да2о даÑÑ‚1Ñ€ дат1Ñ€ 2д1б дв2 д1ве 1дви 2д1вид 2двиз 2двинт 2двинч 2д1Ð²Ð¸Ñ 2д1вит д3вк д1вл 2двод д1воз 1дворь 2Ð´Ð²Ñ 2дг2 2д1д2 1де де1б2л де1б2Ñ€ 3девр 3дез де2з1а2 де2зи дез1о2 де2зу деио2 де1кл 3деме де2од део3п де3пл дераÑ2 де2Ñ3в деÑ2к де2ÑÑ€ де1хл 2дж. д2жам д2ж3м 2Ð´Ð¶Ñ 2д1з2 1ди ди2ад диа2з ди2али ди2ало ди2ар ди2Ð°Ñ Ð´Ð¸2об дио3де ди2ор дио1Ñ Ð´Ð¸1оти дип2 ди2пи ди3пт ди2Ñ1Ñ‚Ñ€ диу3 ди3Ñ„Ñ€ ди3фто ди1Ñ… 2д1к д1л д2лев 2д3м2 2д1н д3на днеа2 3дневн 4д3но1 дно3д2 дноÑ2 4д3ны 3д2нÑш 1до 2д1о2бед до2бл 2д1обла до1б2ра дов2л до3в2м до1д2 до3дн до3ж2д до1з доз2н дои2Ñ€ 2докт 2долим до2м1Ñ€ доп2 до3пл 2допле до2пре до2руб до1Ñ Ð´1о2Ñен д1о2Ñин 2д1оÑно доÑ2п 2дотд 2дотл дот2ри 2д1Ð¾Ñ‚Ñ€Ñ 2дотъ до3Ñ‚ÑŒ 3дохл до2ш3в до3ш2к до2шлы до2щу 2дп 2др. д1раб 1дравш 2дразв 1д2разн д1ране д1рар д1ра2Ñ3 д1рах д1рач д2раю д1ре д2реб 2д3реж 2дрез д2рел д2рем 1дрема 1дремл дрем3н 1дремы 2д3рен дре2Ñк д2реÑÑ Ð´1ри д2рий 2дрин д2рип д2рих дро2г3н д1род д1рое 1д2рож 2д3роз д1рой д1рол д1рон д1Ñ€Ð¾Ñ Ð´1рот д1рою д1руб 1друг 1друж д1рум д1рую д1ры 2дрыв 1д2рыг д1Ñ€Ñ Ð´2Ñ€Ñб 1д2Ñ€Ñг д2Ñ€ÑÑ… 2д1Ñ2 дÑк2 дÑ3кн 2д1Ñ‚ 1ду дуб3Ñ€ ду3г 2д1уд ду2да ду2о дуп1л дуÑ1к д1уÑл ду1ÑÑ‚ ду2Ñта 2дут1Ñ€ ду1Ñ… ду2чи дуÑ1Ñ‚ 2дф д1Ñ… 2д3це 2дцу 2дцы 2д1ч 2д3ш2 2дщ 2дъ дъе2м 1ды 2дыг ды2г1Ñ€ 2дыд 2дыме 2ды2Ñ1 2дыт 2дыщ 2дь. 1дье 2дьк 2дьт 1дью 1Ð´ÑŒÑ Ð´ÑŒ3ÑÑ€ 1д2ÑŽ 1Ð´Ñ Ðµ1а еа2д еади3 еа3до еа2з еан2д1Ñ€ еат1Ñ€ 2еб еба2Ñ Ðµ1бра еб1рен еб1ри е1бро еб1ров еб1ры е2б3рю е1ве 2евер е1ви е3в2ме ев2ним ев2нÑÑ‚ е1во 2евол евра1Ñ 2е1вре ев1рее ев1рей ев1Ñ€ÐµÑ ÐµÐ²1ри е2вт е1ву е1вх ев2хо е1вь ега1Ñ2 ег2д е2глан е2гле е2гли е2гло ег2на ег2но 2ег2Ñ€ ед1во ед2ж е1дже е1д2лин едноу3 ед1опр е2дотв е2дох е2д1ощ е1дру е2дру. е2ду2б ед1убо е2дуве е2дуг е2Ð´ÑƒÑ ÐµÐ´1уÑÑ‚ 2е3душ е2дын е1е е2евид ее2в1Ñ€ ее2ги ее1Ñ2 ее2ÑÑ‚ ееÑÑ‚1Ñ€ ее2Ñ… е2жг е4ждев еж3ди 2еже е2ж1Ñ€ еза2вр езау3 е1з2ва езд1Ñ€ е3зе еззу3 е3зит ез1об ез1о2г е1зом ез1оп ез1о2Ñ€ ез1от ез1ош ез2Ñ€Ñ ÐµÐ·1у2д ез1у2к ез1уп ез1ÑƒÑ ÐµÐ·Ñƒ2Ñо езу2ÑÑ‹ ез1у2Ñ… ез1уча е3Ð·Ñ Ðµ1и еи2г1 еи2д еи2м еи2о еиÑ1л еиÑ1Ñ‚Ñ€ е1ка ека2б ек2з е1ки 2е1ко 2е1кр ек2ро ек1Ñк ек1Ñте е1ку е1ла е1ле еле3Ñк еле1Ñц е1лу е1лы е1лю е3Ð»Ñ ÐµÐ¼Ð¸3д2 еми3к емо1Ñ 2емуж е2мч 2ÐµÐ¼Ñ‹Ñ Ðµ3на ен2д1Ñ€ 2е1нр енÑ2 ен3ш2 е1Ð½Ñ 2ео е1о2б еоб2ро е2о3гл ео2гро е1од ео3да ео2де еоде3з ео2до е1о2ж е2ои ео3кл е1ол. е1ола ео3ли е1олк е1олы е1оль е2ом е1он. е2она е2они ео3но е1Ð¾Ð½Ñ ÐµÐ¾Ð¿2 е1опе ео2пр ео4пу е2о3ро еоÑ2 е1о2Ñви ео1Ñк е1оÑм е1оÑн еоÑÑ‚1Ñ€ ео3ÑÑ… е1отл еот2ру е1о2ч е1о2щ епат2 епа1Ñ‚Ñ€ 2епе епиÑ2к е2пл е3пла еп1леш е3п2лод еп1лу е3плы еп1лющ е4пн 2епо е4п3Ñ Ðµ4пт е1ра ер1акт е2рв ер1ве е1ре е3ре. ере3до ере1др ере1к2 ере3м2н ере3п ере1Ñ…4 е1ри ерио3з е1ро еро2б ер1обл 2ерови 2ерокр 2ерол еро3Ñ„2 ер3Ñк е1ру е2Ñ€1у2п е1ры е1рю е1Ñ€Ñ Ðµ3Ñ2а еÑ2ба е1Ñг е1Ñк е2Ñ1ка. еÑ1кал е2Ñке е2Ñков е4Ñ1ку. 2еÑл еÑ1Ð»Ð°Ñ ÐµÑ2лин еÑ2лов еÑ2лом е1Ñлу е1Ñлы е1Ñ4м е3Ñо 2еÑп еÑ2пек еÑ3пол е2Ñпу е1ÑÑ‚ еÑ2тан е2Ñтл е3Ñту еÑ2чет е1та ет1ве ет1ви е1тво 2етеч е1ти е1то ето1Ñ ÐµÑ‚1Ñ€ ет2Ñ€Ñ Ðµ1ту е1Ñ‚Ñ‹ е1тье е3тью е3Ñ‚ÑŒÑ Ðµ1Ñ‚ÑŽ е1Ñ‚Ñ Ðµ1у2 2еуб еуб3Ñ€ еуз2 еук2ло ефи3б2 еф2л еф1ре еха2Ñ‚ ех1ато ех3вал ех3лоп ех1об ех1опо ех1ре ех1ру ех1у2ч 2ецв е1чл е2шл еÑ2 ею2г е1Ñ ÐµÑ2з 1ж жа2бл жа2бр жа1з жат1в 2ж1б2 2ж1в жг2 2жга ж2ги 3ж2гл ж2гу 2ж1д ж2дак ж2дач 3ж2дел 4ждеме ж2деп ж2ди 4ж2дл ждо3 жду1 4ждь 3ж2Ð´Ñ 3жев же3д2 же1к2в же1кл же1о2 же3п2 же1Ñ2 же3Ñк 2жжа ж2же 2жжев 2ж1з2 жи1о 2жирр 2ж1к 2ж1л ж2м ж3ма 2ж3мо 2ж1н жно1 2ж1об 2ж1о2Ñ‚1 жоу3 жоу1Ñ 2жп2 жпо1 ж2ру 2ж1Ñ 2жф 2жц 2ж1ч 2жъ 2жь. 2Ð¶ÑŒÑ 2жьт 1за1 заа2 заб2 за2в1ри за2вру з1аву заг4 з1адр зае2д зае2Ñ… за3ж2д за3з2 з1акт за3мне 3з2ан за3на занÑ2 зап2 зар2в за3Ñ€2д зар2ж заÑ2 заÑÑ‚2 зат2 за3тк зау2 зах2 зач2Ñ‚ за3ш2 заÑ2 з1б2 2з3ва. з2вав з3валь з2ван 2Ð·Ð²Ð°Ñ Ð·1ве з2вез з1ви з3в2к з1вла з1во 2звол 1з2вон з1вр 1зву 2з1вую з1вь 2зг з3га з2гли зг2на з2гну з1д2в з2деш здож3 1зе зе2б1 зе2ев зе2од 2зж2 з3з2 1зи 3зи. 3зий. з1инт зи2оз зи2оно зи1оп 3Ð·Ð¸Ñ Ð·Ð¸3Ñ‚2Ñ€ зиу3м 3зич 2з1к зко1 зко3п2 з1л з2лащ з2лоб з2лоп з2лор з2лющ 2зм2 з3мн з1н 2зна. з2нав з2нае з2най з2нак з2нан з2нат з2наю 2Ð·Ð½Ð°Ñ 2зне 2з3ни 2зно 2зну 2з3ны з2обе зо2би 1зов зо3в2м зо2гл зо1др 1зое зо1з2 1зои 1зой. 1зок. з1Ð¾ÐºÑ 1зол2 зо1лг зо1лж зо3м2 1зом. 2зомн 1зон 2зонр 1зоо зо2о3п зо2Ð¾Ñ Ð·Ð¾2па з2опл з2опр з1орг 1з2о3ре зоÑ2 з1оÑн зо1Ñп зо2тв з2оте з1отк з2ото зот2ре зот2ри 1зох зош2 зо2ши 1Ð·Ð¾Ñ 1зою з1ра з2рак зра2Ñ Ð·2рач з2рен з1Ñ€ÐµÑ Ð·2риш з1ро зро2Ñ3 з1ру з2рю з1Ñ€Ñ 2з1Ñ 2зт з1ти 1зу 3зу. 2з1у2бе зу2б3Ñ€ зу1в 2зуве 2зу2г 3зуе 2з1уз3 2зу1к 3зуме з1у2мо 2зуп зу2пр з1урб з1у2те зу2Ñ‡Ð°Ñ 2зц з1ч 2зш зъе2м 1зы 2зы2г1 зы2з 2зыме 2зымч 2зы2Ñ1 2зыщ 1зье 1зьи 1зью 3Ð·ÑŒÑ 1з2ÑŽ 1Ð·Ñ Ð¸1а и2аб и2ав иаг2 и2агр и2аде и2ади иа2зов иа2му и3ана иа2нал ианд2 иао2 и2ап иа1Ñ2к иа1Ñта иа1Ñто иат1ро и3ату и2аф и2а1Ñ… иа2це 2и1б и2б1Ñ€ 2иваж 2и1ве и2в3з и1ви 2и1во и1в2Ñ€ и3в2Ñ Ð¸1ву ив2хо 2ивы иг2д и3ге 2игл и2гле и2гли и2гн игни3 иг1рен иг1ро иг1ру иг1ры и2г1Ñ€Ñ Ð¸1дв и2дей и1д2ж иди1ом иди1от ид1Ñ€ и1дь и1е и2евод ие2г ие2д ие3де ие2зу и3ени ие1о2 иепи1 ие2Ñ€ и3ж2д из1в2 из2гне 1из1д из2нал и1зо изо2о из1Ñ€ и1и ийÑ2 и1к и3к2а ика1Ñ2 ик2ва и2кви и2ÐºÐ»Ñ Ð¸3ко ик1ро ик1Ñк ик2Ñ1Ñ‚ и3ку и1л и2л1а2ц ило1Ñк илп2 и2л1у2п и2ль ильт2 2има и2мено и2мену 2имень и3ми имои2 им3пл и2м1Ñ€ и2мч им2ча инд2 1инж ино2к3л ино3п2л ино1Ñ Ð¸Ð½Ñ2 1инÑп 1инÑти 1инÑу 1инф 1инъ и1об ио2бо ио2вр и2ог и1од ио2де и1оз ио3зо и1Ð¾ÐºÑ Ð¸1оле и1он и3онов и1опт и1ор и3ора ио1ру ио2Ñа ио3Ñкл ио1Ñ2п и1ота ио2Ñ‚1в и1отк и1Ð¾Ñ‚Ñ Ð¸Ð¾ÑƒÐ³2 ио2хо и1ош 2ип ипат2 ипа1Ñ‚Ñ€ ип2Ð»Ñ Ð¸Ð¿3н ипо3к2 и1Ñ€ ира2ÑÑ‚ и2Ñ€1ау и2рв и2рж ири2Ñк ириу3 иро1з2 1ирр иÑан2д1 и2Ñб и2Ñд иÑ1к иÑ3ка. иÑ3кам иÑ3ках иÑ3ке иÑ3ки иÑ3ков иÑ3ку. и2Ñлам иÑ1лы иÑ3ме иÑ3му иÑ3но иÑо2Ñк и2Ñ3пр и4ÑÑ Ð¸1ÑÑ‚ и2ÑÑ‚1в и2Ñтл иÑ1Ñ‚Ñз и1Ñьи и1Ñ‚ ита2в ит3ва и2Ñ‚1ве ит1ви ит1ву и2тм и2Ñ‚1Ñ€ ит2Ñ€ÐµÑ Ð¸Ñ‚3ром и2Ñ‚1уч и3тью и3Ñ‚ÑŒÑ Ð¸1у2 иу3п иф1л иф2лю и2Ñ„Ñ€ иха3д и2Ñ…1Ð°Ñ Ð¸Ñ…2ло2 ихлор1 и3Ñ…2о ихо3к их1ре их1ри и1ху и1ч иш2ли и2шлы и2шт ию4л ию2н ию2Ñ‚ ию3та и1Ñ Ð¸Ñ2д 2й1 йд2 й2д3в йно1 й2о1Ñ Ð¹Ð¾2Ñ‚Ñ€ йп2л й2Ñб й3Ñка йÑ2ке йÑ4мо й2Ñ3му й2Ñн й2Ñ3Ñ„ й2Ñш й2тм й2хм йх2Ñ3 йÑ1 ка2бл ка2бри 1кав к2ад ка3дне ка2д1Ñ€ 1кае каз3н ка1зо 1кай 1кал. 1кало 1ÐºÐ°Ð»Ñ 1кам 1кан ка2п1л ка2пре кар3Ñ‚Ñ€ 3к2Ð°Ñ ÐºÐ°1ÑÑ‚ 1кат ка1Ñ‚2Ñ€ 1ках ка2ш1Ñ‚ 1каю 2к1б к2вак к2Ð²Ð°Ñ Ðº2ваш к1ви к2воз к1ву 2кг 2к1д кда2 1ке 2кеа ке2гл кед1Ñ€ ке2Ñ1к ке2ÑÑ‚1 2к1з 1кив ки1о киоÑ1 ки2пл ки1Ñ2ни 1кит 2к1к2 кк3Ñ 2к3ла. 2к3лаÑÑŒ 2к3ле. 2клем к3лем. к3лен к1лео 2к3ли. 2к3лив к2лик к2лин 2к3Ð»Ð¸Ñ Ðº3Ð»Ð¸Ñ 2к3ло. к2лоз к3лом 2к3Ð»Ð¾Ñ ÐºÐ»Ð¾3Ñ‚ 1клук к3лы 2кль 1клю 2к3лю. 2клÑ. 2клÑм 2клÑÑ… 2км 2к1н 3к2ниж к2ноп 3к2нÑж к2о ко1б2ри 1ков 3кова 1код ко1др 1коз 1ÐºÐ¾Ð»ÑŒÑ 2комин 3ÐºÐ¾Ð½Ñ ÐºÐ¾Ð¿2Ñ€ ко2Ñ€3в ко1ру 1ÐºÐ¾Ñ ÐºÐ¾1Ñк коÑ3м ко1Ñп 1котн ко2Ñ„Ñ€ кохо2Ñ€3 1кош 2кп 2кр. к1рел кре1о кре2Ñл к1реч 1криб к1рид к2риз кри2о3 к2рит к1рих к1роа к1роб к2рое к1рок к1роо к1рор к1Ñ€Ð¾Ñ Ðº1роф к1рох к1Ñ€Ð¾Ñ ÐºÑ€Ñƒ1Ñ Ðº1Ñ€Ñд 2ÐºÑ ÐºÑанд2 к2Ñ3в кÑ3г к2Ñ3д к2Ñиб к1Ñки кÑ1кл к1Ñко кÑ3м к3Ñо к1Ñтам к1Ñтан кÑ3те к1Ñто кÑ1Ñ‚Ñ€ к1Ñту к3Ñу 2к1Ñ‚ кта2к 3к2то. кто1Ñ ÐºÑ‚2Ñ€ к2у ку1ве 3куе 1куй 1ÐºÑƒÐ»Ñ 3кум куп1л ку2п1Ñ€ 1кур ку3ро куÑ1к ку1ÑÑ‚ 1кут ку3Ñ‚ÑŒ 1куче 1куют 3кующ 2кф 2к1Ñ…2 2кц 2к1ч 2кш 1кь к2ÑŽ 1ла. 2лабе ла2бл 2лаго ла2гр ла2д1аг 1лае ла3ж2д ла1зо л2ак лак2Ñ€ 1лам. 1лами. лан2д1Ñ€ ла1Ñта лаÑÑ‚1в ла1Ñте ла1Ñто ла2ÑÑ‚1Ñ€ ла1Ñту ла1ÑÑ‚Ñ Ð»Ð°1Ñ‚2Ñ€ лау1 ла2ÑƒÑ Ð»Ð°2Ñ„Ñ€ 1ла1Ñ… 1Ð»Ð°Ñ 2лб л1бр л1ве л1ви л1во л1ву 1л2гал л2гл лго1 2л3д2 1ле. ле1вл лев1ра ле2г1л ле1дж ле3до ле1з2о3 ле1зр лек1л 2лемн 1лен ле1онт ле1о2Ñ Ð»Ðµ2Ñб ле2Ñк ле4Ñка ле1Ñ2л ле1Ñпе ле1тв ле1Ñ‚2Ñ€ 1лех ле1Ñ…Ñ€ л1зо 1ли лиа2м 3ливо 3ливы лиг2л ли2гро лие3Ñ€ ли2кв 2лимп лио1Ñ Ð»Ð¸2пл лиÑ3м 2л1иÑп ли2тв лиу3м ли2Ñ…3в ли1хл ли1Ñ…Ñ€ 2л1к лк2в л2к1л 2л1л л2ль ллю1 2лм 2л1н лни2е 1ло ло2бл ло1б2Ñ€ 2Ð»Ð¾Ð²Ð¸Ñ Ð»Ð¾2вл 3ловод ло2г3д лого1Ñ Ð»Ð¾1др 2лоен ло1зв ло2к1а2у ло2кл лок3ла 3Ð»Ð¾Ð¿Ð°Ñ Ð»Ð¾2рв 2л1орг ло1ру лоÑ1к ло1Ñ2п 2лотд лот2Ñ€ ло2шл 2лп 2л1Ñ2 лÑ3б л1Ñ‚ 1лу. лу1бр лу1в лу3г лу1д4Ñ€ 1луе лу1зн лу1кр 1лун луо2д лу3п2ло лу1Ñ Ð»Ñƒ3Ñ‚ÑŒ 1лую 2л3Ñ„2 2л1Ñ…2 л2Ñ…3в 2лц л1ч 1лы. 1лые 1лыж 1лый 1лым 1лых. 4ль. 2льд 3лье 3льи 2льк 2льм 2льн 3льо 2льÑк 1льÑти 1льÑÑ‚Ñ 2льт 2льц 2льч 1льща 1льще 1льщу 3лью 3Ð»ÑŒÑ Ð»2ÑŽ 1лю. 1люж 1люÑÑŒ лю1та 1Ð»Ñ 3лÑ. лÑ1ви 3лÑво 3лÑвы 2лÑд 3лÑм лÑ1ре лÑ1ру 3лÑÑ… 1м ма2вз 3маг ма2гн ма2др ма2дь ма1зо ма2к1Ñ€ 2м1алл ман2д1Ñ€ маÑ3л ма1Ñ4Ñ‚ ма2тоб ма2Ñ‚1Ñ€ ма2у маф2 3мач ма2чт 4м1б м3би мб2л м3Ð±Ð»Ñ 2м3в2 2мг2 3м2гл 2м1д меан2 ме2ег ме2ел ме2ж1ат ме1зо ме2Ñ1к ме2ÑÑ‚1Ñ€ меч1Ñ‚ 2мж 2м1з2 ми2гре ми1зв 2мизд ми1зн ми2кр мик1ри ми2оз ми1опи ми2ор ми1Ñ2л 2м1к2 3мкн 2м1л м2лее м2лел 2мм 2м1н 4м3на мне1д 3м2неш 4мное м2нож 4мной 4мном м2нор 4мною м2нут 4м3ны мо1б2 мо3вл 3мод мо1др мо2жж мо1зв мо1зр моиÑ1Ñ‚ мо2к3в мо3м2 3мон 3моп мо1ру моÑ1ка мо1Ñм мо1Ñн мо1Ñ2п 3моти мо2Ñ‚1Ñ€ 3моф 2мп мп2л м1раб 2мри 2м1ро м1ры 2м1Ñ Ð¼Ñ2к мÑ2н м2Ñ1ор 3м2Ñти 2м1Ñ‚ му1Ñ2к му1Ñ4л му1ÑÑ‚ мут1Ñ€ му3Ñ‚ÑŒ 2мф мфи3 2м1Ñ… 2мц м2чав м2чал м2чит м2чиш 2мш2 2мщ 3м2ще мым1 мы2мр мы2Ñ 2мь. 2Ð¼ÑŒÑ Ð¼ÑŒÑŽ1 2Ð¼Ñ Ð¼Ñ1Ñ€ м2ÑŽ мÑ1Ñ€ мÑ1ÑÑ‚ 1на наби1о наб2Ñ€ на1в2Ñ€ наг2н на3жд на1з2 на2ил на2ин на2и1Ñ2 4накк на3м2н нап2л на1рва на1Ñ€2ви на1Ñ2 на1тв на1Ñ‚2Ñ€ н1а2Ñ„Ñ€ на1Ñ…2 2нач на3ш2л 2нащ наÑ1Ñ€ 3Ð½Ð°Ñ 2н1б2 2н1в 2нг н2г1в нги2о нг4л нго1Ñ Ð½Ð³2Ñ€ 2н1д н2дак н2д1в нде3з нде2Ñ Ð½Ð´2ж н3д2з н2дл нд1раг нд1раж нд2ре нд2риа н2Ð´Ñ€Ñ Ð½Ð´2Ñп н2дц 1не не1б2 не1в2д 2невн не3Ð²Ð½Ñ Ð½ÐµÐ³2 3нед не1д2л нед2о не2дра не1дро не3ду не3е нее2д не3ж2д не1зв не1з2л не1зн не1зо не1зр неи2 не1к2в не1кл не3м2н 3не1о2 не2ода не2ол не3п2 не1Ñ€2ж не2Ñ€1от неÑ2к не3Ñ2н не1Ñ2п неÑÑ‚2 не1Ñ2Ñ… не1Ñ2ч не1Ñ‚2в не3Ñ‚2л не1Ñ‚2Ñ€ 3неу не2Ñ„Ñ€ не1Ñ…Ñ€ не3шк неÑ2 2н1з2 нзо1Ñ 1ни ни3б2 ни2ен 3ний ни2кл нила2 ни2л1ал ни2л1ам 2нинÑп 2н1инÑÑ‚ ни1Ñл ниÑ3п ниÑÑ‚2Ñ€ ниу3 ни1Ñ… 3ниц 3нищ 2н1к нк2в нк2л нкоб2 нко3п2 н2к1ро нк1Ñ Ð½1л 2н1н нно3п2 1но ноб2 но1бр но2вл но1дв но1др но2ер но1зв но2зд но3з2о но1зр но3кн 3номе ном3ш но2рв но1ру но1Ñкл но2Ñли но1Ñ2п но2Ñч 2нотд но3Ñ„2 ноÑ2 н3п2 2н1ре 2н1ри н1ро 2н1Ñ Ð½2Ñ3в н2Ñг нÑ2ке н2Ñкон н2Ñл н3Ñла н2Ñ3м н2Ñн н2Ñ1ок н3Ñ2пе нÑÑ‚2Ñ€ нÑу2Ñ€ н2Ñ3Ñ„ н2ÑÑŠ3 2н1Ñ‚ н2Ñ‚1в нти1о2к н2тм нт2ра н2Ñ‚Ñ€1а2г нтр1аж н2трар нтраÑ2 нт2ре н2трив н2трок нт2ру нтр1уд нт2ры н2Ñ‚1Ñ€Ñ 1ну нут1Ñ€ ну1Ñ… 3ную 2нф2 н1Ñ… нхо1 2нц 2н1ч н2чл 2нш нш2Ñ‚ 2нщ 1ны 3ны. 2нь. 1нье 1ньи 2ньк 1ньо 2Ð½ÑŒÑ 2ньт 2ньч 1нью 1Ð½ÑŒÑ Ð½2Ñ 1н2ÑŽ 2н3ÑŽ2Ñ€ 1Ð½Ñ Ð½Ñ1ви 2о1а2 о3ав оап1 2оба 2обио об2лев об2лем о1блю 1обм обо1л2г обо3м2 обо2Ñ 2обот об1Ñ€ о2бра. о1брав о1бран 1объ 2обь о1в о3вла о3в2ло ов3но о3в2нуш о2в1ри ов2Ñе ов3Ñко ов2Ñ‚ о2вх ог2 2о3ге ог3ла. ог3ли. ог3ло. о3Ð³Ñ€Ñ 2одан од1вое о3де. 1о2деÑл 2оди3а 2о3дим од2лит о2д1о2пе одо3пр о2д1о2пы о2доÑи о2д1отч о1драг од1раж од1раз од1рак о1драл од3реб о1дроб од1ров о2д1у2ч о2дыма о2дыму о2дын о1дь о2дьб о1е ое1б о2е1вл ое2д о3ежек ое2жи ое1о ое1Ñ2 ое2ÑÑ‚ о2ето ое2ц о3жди о3ж2ду оза2б3в 2озав о1з2ва оз2вен оз2ви о1з2Ð²Ñ Ð¾Ð·2гло оз2дор о1здр озе1о оз3но о1зо о2з1об 2озон о2зоп озо1ру оз1уг о2зым о3Ð·Ñ‹Ñ Ð¾3и ои2г1 оиг2н оие3 ои2з ои2м ои3мо ои2о 2ой ойÑ2 о1к 2о3кан ок2в 2ок2л о3клю око1б 2о3кол око3п2л ок1Ñк 1окт 2окти 2окум о3ла ол2ган о1ле 1олимп о3ло о1лу олу3д2 о1лы о1лю о3Ð»Ñ Ð¾3ма ом2бл 2оме о3м2нем о3м2нет о3множ ом1ри ом2ч ом2ше о2мь о3Ð¼ÑŒÑ Ð¾3на онд2 оне3Ñ„2 оно1б о1нр онÑ2 он2тру о1о2 о2ол оо3Ð¿Ñ Ð¾Ð¾Ñ3м ооÑÑ‚1Ñ€ о2оти о2оф о3пак о3пар о2пле. о2п1лей о2пли оп2лит оп2ло оп3лю. о2Ð¿Ð»Ñ Ð¾3плÑÑ Ð¾Ð¿Ð¾4Ð²Ñ Ð¾Ð¿Ð¾Ð·2н опо2ш3л оп2ри о3п2те оп2то о1ра ора2Ñ3 ор2б3л о1Ñ€2в о1ре 2о3рег оре2Ñк о1ри ор1иÑп о1ро оро2Ñ3л ор2Ñ‚Ñ€ о1руе о1рук ор1ÑƒÐºÑ Ð¾1Ñ€ÑƒÑ 2орц о1ры о1рю о1Ñ€Ñ Ð¾3Ñад оÑа3ж2 оÑ2б о2Ñ3ба о2Ñ1ка. оÑ3кар оÑк1во о2Ñке оÑ1ки о2Ñки. о2Ñков оÑ1кой оÑ1ком о1Ñ2коп оÑ1кою о2Ñ1ку. оÑ1кую о1Ñ2л оÑ3лей оÑ3лог оÑ3лых оÑ3ми оÑ3Ð¼Ð¾Ñ Ð¾1Ñ2ним оÑ2нÑл оÑ2Ð¿Ð°Ñ Ð¾1Ñ2пу оÑ2Ð¿Ñ Ð¾Ñ2Ñв оÑ2Ñ3м о1ÑÑ‚ оÑ2та о3Ñтра о2Ñуч 2оÑÑ… оÑ2цен о1Ñ2ч о1Ñ2шив о1Ñ‚ отв2 от3ва от1ве от1ви от1вл 1отг 1отд 2о3тек о3тер 2о3тех о3ти о3ткал о2тм от1раб от1рад от1раз отра2Ñ Ð¾Ñ‚1реж от1рек от1реч от1реш от1ри от1род от1рое от1рок от1Ñ€Ð¾Ñ Ð¾Ñ‚1роч от1руг от3Ñм оту2а от1у2ч 1отх о3тью о3Ñ‚ÑŒÑ Ð¾1у2 оуп2 оуÑ2к оу3та оу3то 2офаш о3фе 2офит 2офон о2фори 2офот о2фри 2охи ох1лы о2Ñ…Ð»Ñ Ð¾Ñ…2ме 2охор о1Ñ…Ñ€ о1ху о2цо оча1Ñ Ð¾Ñ‡2л оч1ле о3чли о1чт о2ч1то ош3ва ош2ла ошпа2к3 ош2Ñ‚ оÑ1ти 2ою о1Ñ Ð¾Ñ2в оÑ2д оÑ2з оÑ2ри 1п пави3 пав3л па2вь па2др па2ен па1зо паÑ1л паÑ1та па1Ñте паÑ1то паÑ1ту па2Ñ1Ñ‚Ñ‹ па1тро па2ун па3Ñ„ па1ху па2шт 2п1в2 2п1д пе1 пе2дв пе2д1ин пе2з пе3за пе3зо пе2к1ла пе2ль пе4пл пери1о пе2Ñ1к пе2Ñн пе2ÑÑ‚1Ñ€ пе2Ñц пе2Ñч пе2Ñ‚Ñ€ пе2шт пиаÑÑ‚1 пи2ж3м пи2к1Ñ€ 3пинк 3пиÑÑ 4п3к 2пл. 4пла. пла1Ñ Ð¿1лем. п1Ð»ÐµÐ¼Ñ 2плен п2ленк п1ле2о плеÑ1к п1лею 2плив 3п2лик 2пло. 2плов 2плог 2плый 2плым п1лын п1лых 2плю. п1лют п2лÑÑ Ð¿2лÑш 2п1н п3на п3но1 п3ны по1б2 по3вл по3в2Ñ Ð¿Ð¾Ð´1во по2д1о2к подо3м2 пое2л пое2Ñ… по1зве по1здо по1з2л по1зн пои2щ 3пой 3полк по3мно по3мну 3по3п2 п1орг пор2ж по1ру по1Ñ4 3поÑл по3ÑÑ Ð¿Ð¾Ñ‚2в пот2Ñ€ по1Ñ…2 по2шло по2шлы по2ÑˆÐ»Ñ Ð¿Ð¾Ñ3м 2пп2 ппо1д 2пр. 3прев пре1з прей2 пре1л пре1ог 3прет при1 при3в приг2 при3д2 при3к при3л приль2 прип2 п2риц про1бл прод2л про3ж2 про1з2 п1розо 3прои про3п профо2 2Ð¿Ñ€Ñ Ð¿2ру 2п1Ñ2 3п2Ñал п3Ñин 3п2Ñих п3Ñо 2п1Ñ‚ п2Ñ‚3в 3п2тих п3ту 3пуб пуг3н пуÑ1ку пу1ÑÑ‚ пу3Ñ‚ÑŒ 2пф2 пх2 2пц 4п3ч 2пш 2пщ 2пь. 2пьт пÑ1ра п2ÑŽ1 1ра. раа2 ра2бл 1рабо ра2б1Ñ€ 1Ñ€Ð°Ð²Ð½Ñ Ñ€Ð°2гв ра2гл рад2ж радо1б2 ра2дц ра2жур ра2зий ра2зуб рак2в 1ракиз ра2к3л 1ралг 1рамк 1рамн ра2нох ран2Ñц ра2п1л раÑ3к2 1раÑл раÑ3п раÑ1Ñ‚ 1раÑта раÑ3Ñ‚2л ра2так рат1в ра1Ñ‚2Ñ€ 2рахи 1ращи 1раю 1Ñ€Ð°Ñ 2раÑÑ‚ 2Ñ€1б рб2ла Ñ€2бле рб2ло рб2лю рбо3Ñ 1Ñ€2вав Ñ€3вак Ñ€3вар Ñ€3вата Ñ€3веж Ñ€2вео 1рвет Ñ€1ви Ñ€3вин Ñ€2вит Ñ€1во рво1з2д Ñ€1вь 2рг Ñ€2гв Ñ€2г1л Ñ€2гн рг2Ñ€ 2Ñ€1д рда1Ñ Ñ€2д1в рд2ж рди2а Ñ€2дл рдоÑ2 Ñ€2дц 1ре. ре1вр рег2Ð»Ñ Ñ€ÐµÐ³2н ре2д1о2п ре2Ð´Ð¾Ñ Ñ€ÐµÐµ2в рее2д рее2л ре3ж2д 1резк ре1з2л ре1зна 1ре1зо ре1зр рез2у 1рейш ре1к2л 1рекш ре3мно 3ремо ремо2г3 1ренк 1рень ре1он ре1оп ре1о2Ñ€ ре1о2Ñ„ ре1ох ре1о2ц 1репь ре3Ñ€2 реÑ1ки ре1Ñл ре1Ñ2п реÑ2Ñ3м ре3Ñта ре3Ñто ре1Ñч ре1тв ре1Ñ‚2Ñ€ реуч3Ñ‚ ре1чт ре3шл Ñ€3жа. Ñ€3жам Ñ€3жан Ñ€3ж2д 2рз Ñ€1з2в Ñ€1зо ри3а риб2 ри3бр ри3в2н 2риги ри2гло ри3г2н 2ридж ри1д2Ñ€ рие2л рие3Ñ€ риз2в рик2Ñ€ ри3м2н ри3м2ч Ñ€2ин 1Ñ€Ð¸Ð½Ñ Ñ€Ð¸Ð¾2з рио2Ñ Ñ€Ð¸1от ри3Ñ€2 ри1Ñ2 ри3Ñб 2риÑп ри3Ñтв ри3Ñ‚2Ñ€ 1риу ри2фл ри3Ñ„Ñ€ ри1хл 1риц 1риш риÑти2 2Ñ€1к Ñ€2кв Ñ€2к1л рк1Ñ 2Ñ€1л2 Ñ€2ль рлю1 Ñ€3Ð»Ñ 2рм Ñ€2мч 2Ñ€1н рнаÑ4 рне3о рне1Ñ2 рно3Ñл 1ро. ро2блю ро1б2Ñ€ ро2вл 1рогол 1рогру ро1дв ро3д2з ро1дл род2ле ро2д1от ро1др 1родь рое2л рое2м рое2Ñ… 1розар ро1з2в ро1зр 3Ñ€Ð¾Ð·Ñ‹Ñ Ñ€Ð¾Ð¸2Ñ3 1рокон 1рокр 1Ñ€Ð¾Ð»Ð¸Ñ 1ролиц 1ромор 1ронаж 1ронап 1Ñ€Ð¾Ð½Ð¾Ñ Ñ€Ð¾Ð¾Ð¿1Ñ€ ро2плю ро3Ð¿Ñ 2Ñ€1орг ро1Ñ€2ж ро1ру ро1Ñк ро2Ñки ро2Ñку 1роÑл ро1Ñм ро1Ñ2п роÑ2Ñ„ 1роÑш 1роÑÑŽ 1рот2в 1ротк рот2ри 1роу роуг2 ро2Ñ„1ак ро2Ñ„Ñ€ ро1хл рош2л ро3шн 1роÑз 2рп рп2ло Ñ€2плю 2Ñ€1Ñ€ 4Ñ€1Ñ Ñ€Ñ2к Ñ€2Ñн Ñ€Ñ2п Ñ€Ñтв2 Ñ€3Ñтвл 2Ñ€1Ñ‚ Ñ€2такк Ñ€2Ñ‚1акт Ñ€2Ñ‚1в рт3ва рт2вл Ñ€2тм Ñ€2Ñ‚1об рт1орг рт1ра рт2ран рт1ре рт1ри ртуÑ1 Ñ€2Ñ‚1у2чи Ñ€3тью рт1Ñч 1ру. 1руба руг3н ру2дар 1ружей 2Ñ€ÑƒÐºÑ 1рул руÑ1к руÑ3л ру1Ñта руÑÑ‚1Ñ€ ру3Ñ‚ÑŒ 1руха 1рухо 1ручн 2рф рф2л 2рх Ñ€2хв Ñ€2Ñ…1ин рх1л Ñ€1Ñ…2ло Ñ€2Ñ…1оп рх1Ñ€ 2рц Ñ€2цв 2Ñ€1ч Ñ€2чл Ñ€2чм 2рш Ñ€3ш2м рш2Ñ‚ 2рщ 2ръ 1ры. 1рыб ры2дв 2рыз ры2кл 1рым ры2Ñ1к ры2Ñ…1 2рь. 1рье 1рьи 2рьк 2Ñ€ÑŒÑ 2рьт 1рью 1Ñ€ÑŒÑ Ñ€Ñ1л Ñ€2ÑŽ 1рю. 1Ñ€ÑŽÑ Ñ€Ñ1ви 1Ñ€ÑÑŽ 1Ñа Ñа2бл Ñа2дь Ñа2кв Ñа2кл 2Ñ1альп Ñ1апп 2Ñ1арк 2Ñ1атл Ñа1Ñ‚Ñ€ Ñа2ун Ñа2Ñ„1Ñ€ Ñа1Ñ…2 1Ñб2 2Ñбе3з2 Ñбезо3 Ñбе3Ñ2 2Ñ3бу Ñ2бы 2Ñбю 1Ñ2в 2Ñ3вен Ñг2 Ñ2ги Ñ2гн Ñ2го 1Ñд2 Ñ2да Ñ2де Ñ3ди Ñ2до 1Ñ2е Ñег2н Ñе1з2 Ñе1кв Ñек1л Ñе2к1Ñ€ ÑекÑ4 Ñеми1 Ñере2б Ñе2Ñк Ñе2ÑÑ‚ Ñе3Ñта Ñе3Ñте ÑеÑÑ‚1Ñ€ 1Ñ2ж Ñ1з 1Ñ2и 3Ñиз Ñи1ом Ñи1оп Ñи2пл Ñи1Ñ… 4Ñк. 2Ñкам Ñ2канд 1Ñ2каф 2Ñках Ñк2ва Ñ2кви 3Ñкино Ñк2л Ñ2ÐºÐ»Ñ Ñк3лÑв 2Ñкн Ñ1кон 2Ñкона Ñ2ÐºÐ¾Ð¿Ñ 2Ñкош Ñк2Ñ€ Ñ1кра 2Ñкриб Ñк1Ñ 2Ñкуе 2Ñ3ла. 1Ñлав 1Ñлад Ñ1лам 2Ñ3Ð»Ð°Ñ Ñ3лев Ñ3лее Ñ1лей Ñлео2 Ñ1лет Ñ3лею 2Ñ3ли. 2Ñлиц 2Ñ3ло. Ñ2лож Ñ3лому 2Ñ3Ð»Ð¾Ñ 2Ñ3лую 2Ñ3лые 2Ñ3лый 2Ñ3лым 2Ñль Ñ1Ð»ÑŽÑ 2Ñ3Ð»Ñ Ñ2м 1ÑÐ¼ÐµÑ Ñ4Ð¼ÐµÑ Ñ3мур Ñ1н 1Ñ2наб Ñ2Ð½Ð°Ñ 2ÑÐ½Ð°Ñ 1Ñ2неж 2Ñ3ник 2Ñно Ñно1з2 2Ñную 2Ñ3ны 1Ñо Ñо1б2Ñ€ Ñ2ов Ñов2Ñ€ Ñо1д Ñо1з2 Ñо1л2г Ñо3м2 Ñо2рие Ñо1ру Ñо1Ñк Ñо1Ñ2п Ñо2ÑÑŒ Ñот2Ñ€ Ñо1чл Ñош2л Ñп2 Ñ2пав Ñ2пее Ñ2пел Ñ2пен Ñ2пех 1Ñ2пец Ñ2пеш Ñ2пею Ñ2пим 2ÑпиÑÑ Ñ3пн Ñпо1з2 2Ñпол Ñ2Ð¿Ð¾Ñ 2Ñпь 1ÑÑ€ 2ÑÑ€. Ñ2раб Ñра2Ñ Ñ1рат Ñре2б1 Ñре3до 2Ñ1Ñ ÑÑанд2 Ñ2Ñб ÑÑ3во 4Ñ5Ñи Ñ3Ñ2к ÑÑ2л Ñ2Ñн Ñ3Ñ2не Ñ2Ñори ÑÑ2п ÑÑÑ‚2 ÑÑ2ч 2ÑÑ‚. 1Ñта. 2Ñтб 4Ñтв. ÑÑ‚1вер 2Ñтвл ÑÑ‚2вол ÑÑ‚2Ð²Ñ Ñ2те 1Ñ4те. 1Ñтей 1Ñтел 1Ñтен. Ñ3тет. Ñ3тете Ñте3Ñ… Ñ3теш 1Ñти Ñ2тие Ñ2тии 2Ñтимп 2Ñтинд 2Ñтинф 2Ñтинъ Ñ2тич Ñ2тишк Ñ2тию 2Ñтк ÑÑ‚2ла Ñ3Ñ‚2ле 2Ñтли ÑÑ‚2лил ÑÑ‚2лит 2ÑÑ‚Ð»Ñ 2Ñтм 2Ñтн 1Ñто. Ñ2то1б 1Ñтов 1Ñтог Ñто2г3н 1Ñтод 1Ñтое 3Ñ2тои 1Ñток 1Ñтом 1Ñтон 2Ñторг 2Ñторж 2ÑÑ‚Ð¾Ñ€Ñ 1ÑÑ‚Ð¾Ñ 1Ñтот Ñ2тоц 1Ñтою 2Ñтп 2ÑÑ‚Ñ€. ÑтраÑ2 4ÑÑ‚Ñ€Ð°Ñ 2Ñтред ÑÑ‚1рей 2Ñтрив ÑÑ‚1риз 2Ñтрил 2Ñтрищ ÑÑ‚1роа Ñ4Ñ‚1ров ÑÑ‚1род ÑÑ‚1рох ÑÑ‚2руб ÑÑ‚1руш 2ÑÑ‚Ñ Ñ1тут 1Ñтую 2ÑÑ‚Ñ„ 2Ñтц 1ÑÑ‚Ñ‹ Ñ2тыв Ñ4Ñ‚ÑŒ 2ÑÑ‚ÑŒ. 2ÑÑ‚ÑŒÑ 3Ñтью 1ÑÑ‚ÑŒÑ 1ÑÑ‚Ñм 1ÑÑ‚ÑÑ… 1Ñу Ñу2б Ñуб1а2 Ñуб1о Ñу1в Ñу3гл Ñу2ев Ñу2з Ñу1кр Ñума1 Ñупе2 ÑуÑ3л ÑуÑ3п Ñу1ÑÑ‚ Ñут1Ñ€ Ñу2Ñ„3 Ñу1Ñ… 1Ñ2фе Ñ1Ñ…2 1Ñ2хе 2Ñца Ñ2цена 2Ñ3ци 2Ñцо Ñч2 1Ñча Ñ2Ñ‡Ð°Ñ Ñче2Ñ1к Ñ3чив 2Ñчик Ñ2чит Ñ1чл 2Ñчо Ñш2 Ñ3шн 1ÑÑŠ2 Ñъе3д Ñъе3л 1ÑÑ‹ ÑÑ‹2г1 ÑÑ‹2з ÑÑ‹2п1л ÑÑ‹2Ñ ÑÑ‹Ñ1ка 2ÑÑŒ. 1Ñье 2Ñьк 2Ñьт 1Ñью 1ÑÑŒÑ ÑÑ1Ñ€ Ñ2ÑÑ 1Ñ2ÑŽ ÑÑŽ1Ñ 1ÑÑ 2ÑÑз ÑÑ3Ñ‚ÑŒ та2бл таб2Ñ€ та1ври 1таг та2гн та1з2 так3ле Ñ‚2ан та2пл 1Ñ‚Ð°Ñ Ñ‚Ð°1ÑÑ‚ та1Ñ‚Ñ€ 1тащ 2Ñ‚1б2 2тв. 2Ñ‚2ва Ñ‚1вей Ñ‚1вел Ñ‚1вет 2тви Ñ‚1вое Ñ‚1во1з 2Ñ‚1вой Ñ‚1Ð²Ð¾Ñ 2твою 2Ñ‚1вр 2тву 2твы 2Ñ‚Ð²Ñ 2тг 2Ñ‚1д 1Ñ‚2е те2гн те1д те1зо 3тека тек1л 3текш теле1о тем2б1 те2о3д те1ох те4п1л те2рак тере2о 3терз тер3к 3Ñ‚ÐµÑ€Ñ Ñ‚Ðµ2Ñка те2Ñ1ки те2Ñ1ко те2Ñку теÑÑ‚2 те2хо 2тж 2Ñ‚1з тиа2м ти2бл ти3д2 ти1зна тии2 тииÑ1 тик2 тила2м Ñ‚1имп 2Ñ‚1инв Ñ‚1инд 2тинж 2тинф ти1Ñ2л ти3Ñтв ти3Ñ„2Ñ€ ти1Ñ…Ñ€ 2Ñ‚1к2 3Ñ‚2кав 3Ñ‚2кан 3Ñ‚2кет 3ткн 2Ñ‚1л тло2б Ñ‚2ль тм2 тми2Ñ Ñ‚Ð¼Ð¸ÑÑ‚1 Ñ‚3мщ 2Ñ‚1н то2Ð±ÐµÑ Ñ‚Ð¾1б2л 2тобъ то2вл то1д то3д2Ñ€ то1з2 ток2Ñ€ 2Ñ‚1омм 2Ñ‚Ð¾Ð¼Ñ 2тонг 1торг 1торж 1Ñ‚Ð¾Ñ€Ñ Ñ‚Ð¾1ру 1торш то1Ñ2н то1Ñ2п то1Ñ2ц 2тотд то3тк 1тощ 2тп2 тпа1Ñ‚ Ñ‚1рага 2Ñ‚1раж 2трб 2трв 2трг 2трд трдо2 Ñ‚1реа 1требо 1требу Ñ‚1ребь Ñ‚1реве Ñ‚1ревш Ñ‚1рег Ñ‚1ред Ñ‚1рее Ñ‚1реза Ñ‚1резн треп1л 3тре2Ñ Ñ‚Ñ€ÐµÑ1к Ñ‚1реÑÑ‚ Ñ‚1рету 3Ñ‚2ре2Ñ… Ñ‚1рец Ñ‚2решь Ñ‚1рею 1триб Ñ‚1рив три2г1л Ñ‚1рил Ñ‚1рим 4Ñ‚Ñ€Ð¸Ð½Ñ Ñ‚Ñ€Ð¸1о Ñ‚1рит три3Ñ„ Ñ‚1рищ 2трм 2трн Ñ‚1рогл Ñ‚1роид 2трой тро3пл Ñ‚1рор Ñ‚1роÑо тро3Ñ‚ 4Ñ‚3роц 2трою 2трп 2трр 1труб Ñ‚2руд 2трук Ñ‚2рум Ñ‚2рут 2трф 2трщ 2тръ Ñ‚1ры Ñ‚1Ñ€Ñ. Ñ‚1Ñ€Ñв 2Ñ‚1Ñ€Ñд Ñ‚1Ñ€Ñе Ñ‚1Ñ€Ñж Ñ‚1Ñ€Ñй Ñ‚3Ñ€Ñк Ñ‚1Ñ€ÑÑ‚ Ñ‚1Ñ€Ñщ Ñ‚1Ñ€ÑÑ 4Ñ‚1Ñ2 Ñ‚2Ñб Ñ‚2Ñ3д Ñ‚Ñеп2 Ñ‚2Ñ3м Ñ‚2Ñ3п 2Ñ‚1Ñ‚ Ñ‚2тм ту2гр ту2жин 2Ñ‚1у2пр ту1Ñл ту1ÑÑ‚ ту2фл 1туша 1тушо 1тушь 1тущ 2Ñ‚Ñ„ 2Ñ‚1Ñ… 4тц 2Ñ‚1ч 2тш2 2тщ 2Ñ‚ÑŠ Ñ‚Ñ‹2г1 Ñ‚Ñ‹2Ñ1к 2Ñ‚ÑŒ 4Ñ‚ÑŒ. 3тье 3тьи Ñ‚ÑŒ2м 4тьт тью1 2Ñ‚Ñ Ñ‚2ÑŽ Ñ‚ÑŽ1Ñ‚ 1Ñ‚Ñг 1Ñ‚Ñж 1Ñ‚Ñп 2Ñ‚Ñ2ч у1а у2але у2Ð°Ñ Ñƒ3бел убо1д убоÑ2 уб1Ñ€ 1убра уб3рю 1у2быт у1ве. у1ви ув2л у1во у1ву у2гв у2гл у2гн уг2на уг2не уг1ре уг1Ñ€Ñ ÑƒÐ´Ð°1Ñ ÑƒÐ´2в уд1рам уд1ро у3ду у1е уе2д уе2л уе1Ñ ÑƒÐµ2Ñ1к уеÑ2л уе2Ñ… у2жж у1з2в у1зо узо3п у1и у1ка ук1в у1ки у1ко уко1б у1ку1 у1ла у1ле у1лу у1лых у1лю у2мч у3на ун2д1Ñ€ у1нь у1о уо2б уо2в у2оза уо2к уо2п уо2Ñ ÑƒÐ¾ÑÑ‚1 уо2Ñ‚1 уо2Ñ„ у2пл уп1лю у3про у1ра у1ре уре2Ñ‚3Ñ€ у1ри урке3 у1ро у2род уро2дл урт2Ñ€ у3ру у1ры у1рю у1Ñ€Ñ Ñƒ2Ñад у1Ñг уÑ1ка уÑ1ки уÑк3л уÑ1ком у1Ñкр уÑ1ку. уÑ2л уÑла4ж3 уÑ3ли у1Ñм у2Ñн уÑ2п уÑ3Ñ Ñƒ1Ñте у1ÑÑ‚Ñ Ñƒ1ÑÑ„ 2уÑц у2Ñч у2ÑÑŒ у3ÑÑŒÑ Ñƒ1та у3тер у1ти ут2Ð»Ñ Ñƒ1то уто3п2Ñ ÑƒÑ‚1ри у1ту у1Ñ‚Ñ‹ у1тье у3тью 1утю у1Ñ‚Ñ Ñƒ1у ууг2 уу2Ñ Ñƒ3фи уф1л уф2Ð»Ñ Ñƒ2Ñ„Ñ€ ух1ад уха2Ñ‚ у2хв у3Ñ…4во ух1л ух3Ð»Ñ ÑƒÑ…1Ñ€ у2чеб 1учр у1чь у3ше у3ши у2шл уш1ла у2шп 2ÑƒÑ Ñƒ1Ñ ÑƒÑ2з 1Ñ„ фа2б1 фа2гн фа1зо фан2д фанд1Ñ€ фа1Ñ‚Ñ€ фа2Ñ… 3фаш фаÑ1 2Ñ„1б 2Ñ„1в 2фг 2Ñ„1д фев1Ñ€ фед1Ñ€ фе1о3 фе2Ñ1к Ñ„4и фиа2к1 фи2гл фи2ж фи2зо фи2нин фи1о 3фит 2Ñ„1к Ñ„2ла Ñ„2ли Ñ„2ло 2фм 2Ñ„1н 2фобъ 3фон фо2рв 2Ñ„1орг фор3Ñ‚Ñ€ фо1ру фоÑ1к 3фот фото3п Ñ„1раб фра1з фра1Ñ Ñ„1рат Ñ„2рен фре2Ñ Ñ„1ри Ñ„2риж Ñ„2риз Ñ„1ро Ñ„2рон Ñ„1ру 2Ñ„3Ñ 2Ñ„1Ñ‚ Ñ„2тм Ñ„2тор 2Ñ„1у2п фу3тл 2фуф 2Ñ„Ñ„ 2Ñ„1ч 2фш2 2Ñ„ÑŒ. Ñ„2ÑŽ1 1ха ха2бл ха2д 2Ñ…1ак хан2д хао3 Ñ…1арш 2Ñ…1б 1Ñ…2в 2Ñ…3ве 2Ñ…3ви Ñ…3вы 2хг Ñ…3д2 1хе хео3 Ñ…1з2 1хи хиат1 хие2 2Ñ…1изы хи1Ñ2 Ñ…1к2 Ñ…1лав Ñ…1Ð»Ð°Ñ Ñ…1лат Ñ…1лац 1хлеб Ñ…2Ð»ÐµÑ Ñ…1лет Ñ…3ло. Ñ…2лоп 1Ñ…2лор Ñ…1лу 1Ñ…2му 2Ñ…1н 3Ñ…2ны 1хо 2Ñ…1о2к хоп2 хо2пе хо2пор хо1ру Ñ…1оÑм 2Ñ…1оÑн хоф2 хох1л хоÑ2 хп2 Ñ…1раз 1хран Ñ…1ра1Ñ2 Ñ…1рей хри2пл Ñ…2Ñ€Ð¸Ñ Ñ…1ров 1хром хро2мч Ñ…1ры Ñ…1Ñ€Ñ 2Ñ…1Ñ2 2Ñ…1Ñ‚ 1ху. Ñ…1у2г 2хуе 2хуй 1хун Ñ…1у2Ñ€ ху3ра 1Ñ…ÑƒÑ 1хуш 2хую Ñ…1Ñ…2 2Ñ…1ч2 2хш хью1 1ц ца1 3ца. 3цам ца2пл 3цах 2ц1б ц2ве 2цвы 2цг 2ц1д це1з це1к це1от цеп1л цеÑ2л це1Ñ‚ 2цетат 2ц1з ци1 ци2к1 цик3л ци2ол цип2 ци2Ñк циу3 циф1Ñ€ 2ц1к2 2ц1л 2цм 2ц1н ц1о2б 2ц1о2д 2ц1от 2цп2 2ц1Ñ€ 2ц1Ñ 2ц1Ñ‚ 3цу 2цц 2ц3ш2 3цы цы2п цып3л цю1 1ча ча2др ча2дц ча2ево ча2евы ча2ер чаÑÑ‚1в ча1Ñте ча1Ñту ча1ÑÑ‚Ñ 3чато 3чаты 2ч1б ч1в 2ч1д 1че че1вл че2гл че1о чер2Ñ Ñ‡ÐµÑ€ÑÑ‚1 че1Ñл ч2ж чжо2 1чи 3чик 3чиц 2ч1к 1ч2ла ч2ле ч3лег ч3леж 2чли ч2ли. 1ч2ло 1чм 2чма 2чме ч2мо 2ч1н 3чо 2ч1Ñ 2ч1та ч2те 2чтм 1чу 3чук ч2Ñ… 2ч1ч 2чь. 1чье 1чьи 2Ñ‡ÑŒÑ 2чьт 1чью 1Ñ‡ÑŒÑ 1ш ша2бл ша2гн ша2г1Ñ€ ша2др шан2кр шар3Ñ‚2 ша1ÑÑ‚ ша1тро 2ш1б ш2в ш3вен ше2гл ше1к ше1о2 ше3пл ше1Ñ2 ши2бл ши2пл шиф1Ñ€ 2ш1к2 3ш2кол 2ш1лей 2шлен ш2ли. 2шлив 2шлил ш2лин ш2Ð»Ð¸Ñ Ñˆ2лите ш2лиф ш2ло. 2шлов ш2лог ш1лы ш2лю 2шлÑе 2шлÑк ш2лÑп 2шлÑÑ‚ 2шлÑч 2шлÑÑŽ 2шм 3ш2мы 4ш3мы. 2ш1н 4шни ш2нур ш2п2 ш3пр 2ш1Ñ€ 2ш1Ñ Ñˆ1ти 2ÑˆÑ‚Ñ ÑˆÑƒ2ев шуÑÑ‚1 2шф ш1Ñ… 2шц 2ш1ч 2шь 4шь. 3шье 3шьи 3шью 3ÑˆÑŒÑ Ñˆ2ÑŽ1 1щ 2щ3в2 ще1б2л ще2гл щед1Ñ€ щеи2 щеиÑ1 ще1Ñ Ñ‰Ðµ1Ñ… щеш2 ще3шк щи2п1л 2щм 2щ1н 2щ1Ñ€ 2щь. ÑŠ1 ъе2г ъе2д ъе3до ъе2л ÑŠ2е2Ñ€ ъе2Ñ ÑŠÐµ2хи ÑŠÑŽ2 ÑŠÑ2 ÑŠÑ3н Ñ‹1 Ñ‹2бл Ñ‹3га Ñ‹3ги ыг2л Ñ‹2гн Ñ‹2дл ыд2ре Ñ‹2д1ро Ñ‹2Ð´Ñ€Ñ Ñ‹Ðµ2 Ñ‹3ж2д ыз2ва ыз2д Ñ‹2зл Ñ‹2зн ыз2на ыи2 ыиг1 Ñ‹2к1в ык2л Ñ‹2к3ло ыко1з ык1Ñ Ñ‹2ль Ñ‹2мч ыноÑ3л Ñ‹3по ыра2Ñ3 Ñ‹Ñ€2в ыре2Ñ… Ñ‹3Ñа Ñ‹3Ñе Ñ‹Ñ1ки Ñ‹Ñ1ку Ñ‹2Ñн Ñ‹3Ñо Ñ‹Ñ2п Ñ‹2ÑÑ… Ñ‹Ñ2ч Ñ‹2Ñш Ñ‹Ñ‚1ви Ñ‹Ñ‚2Ñ€ Ñ‹3тью Ñ‹3Ñ‚ÑŒÑ Ñ‹Ñƒ2 Ñ‹2ш1л Ñ‹3шь ÑŒ1 ьб2 ÑŒ2Ð²Ñ ÑŒ2дц ÑŒ2е ье1зо ье1к ье2Ñ1к ÑŒ2зн ÑŒ2и1 ÑŒ2кл ьми3д ьми3к ьмо1 ьне2о ÑŒ2о ÑŒ2п1л ÑŒ3п2то ÑŒÑ2к ÑŒ2Ñн ÑŒ2Ñти ÑŒ2ÑÑ‚Ñ ÑŒ2Ñ‚1амп ьти3м ÑŒ2тм ÑŒ2тот ÑŒ2траб ьт2ре ьт2ру ьт2ры ьхо2 ьхоз1 ÑŒ2ща ÑŒ2ще ÑŒ2щу ÑŒ2ÑŽ ÑŒ2Ñ ÑŒÑ1в ÑŒ3ÑÐ³Ñ 1Ñ Ñ1в Ñв1Ñ€ 2Ñг Ñд1Ñ€ Ñк1л ÑкÑ1 Ñк2ÑÑ‚ Ñле1о Ñ2м Ñ3ма Ñ2н Ñ3нь Ñо2з Ñ2п Ñпи3к Ñ1ре Ñ1ри Ñри4Ñ‚Ñ€ Ñро1Ñ2 Ñ1ру Ñ1ры ÑÑ1 ÑÑк2 ÑÑ3м Ñ2Ñо ÑÑ3те ÑÑ2Ñ‚1Ñ€ Ñ2те Ñтил1а ÑÑ‚1ра Ñ2Ñ„ ÑÑ…2 Ñхо3 Ñ2ц ÑÑ2 1ÑŽ ÑŽ1а ÑŽ1б ÑŽ2бв ÑŽ2бл ÑŽ2б1ре ÑŽ1в ÑŽ1дь ÑŽ1е юз2г юзи2к ÑŽ1зо ÑŽ1и ÑŽ2идал ÑŽ1к ÑŽ2к1в ÑŽ1ла ÑŽ1ле ÑŽ2ли ÑŽ1лю 2юм ÑŽ2мч ÑŽ2нь ÑŽ1о1 ÑŽ1ра ÑŽ1ре юре4м ÑŽ1ри юри2Ñк ÑŽ1ро ÑŽ1ру ÑŽ1ры ÑŽ2Ñ1к ÑŽ1Ñта ÑŽ1Ñте ÑŽ1Ñто ÑŽ1ÑÑ‚Ñ ÑŽ1ти ÑŽ1то ÑŽ1ту ÑŽ1Ñ‚Ñ‹ ÑŽ1Ñ… юха1Ñ ÑŽ1ч ÑŽ2щь ÑŽ1Ñ Ñ2бр Ñб1ра Ñб3ре Ñб1ри Ñб3рю 3ÑÐ²Ð¸ÐºÑ Ñ1во Ñ1ву Ñ1в2Ñ… Ñ2г1л Ñ2гн Ñд1в Ñд1Ñ€ Ñ1е Ñз2гн Ñ1зо Ñ1и Ñ1к Ñ2к1в Ñ2к1л Ñк1Ñ Ñ1л Ñ2ль Ñм2б3л Ñ2мь Ñ3на ÑнÑ2 Ñ1ра Ñ1ри Ñ1ро Ñ1рь ÑÑ1к ÑÑ1л ÑÑ2Ñ‚ ÑÑÑ‚3в Ñ1Ñто ÑÑÑ‚1Ñ€ Ñ1та ÑÑ‚3в Ñ3ти Ñти1з Ñ1то Ñ1ту Ñ1Ñ‚Ñ‹ Ñ3тью Ñ3Ñ‚ÑŒÑ Ñ1Ñ‚Ñ Ñ1у ÑÑ…1л Ñ1ху Ñце1 Ñ2шл 2ÑÑŽ. 2Ñ1Ñ .бо2дра .вÑÑ‚2Ñ€ .доб2рел .до1б2ри .об2люю .об2рее .об2рей .об2рею .об2рив .об2рил .об2рит .па2н1Ð¸Ñ .пом2ну .реа2н .ро2Ñ3пи .Ñо2пла а2ньш атро2Ñк безу2Ñ Ð±Ð¸Ð½Ð¾2Ñк виз2гн выб2ре гÑÑ‚4Ñ€ ди1Ñ2лов доÑ2Ð½Ñ Ð´Ñ€Ð¾2ж3ж 2дружей е2мьд е2о3плато е2о3пози ере3Ñ2Ñо 4ж3дик 4ж3дич заи2л зао2з 2з1а2хав заю2л з2Ñ€ÑÑ‚ зу2мь 6зь. и2л1а2мин илло3к2 й2кь ла2б1Ñ€ лу3Ñ4н ме2Ð´Ð¸Ð½Ñ Ð¼Ðµ2д1о2Ñм мети2л1ам миÑ4Ñ3н нар2ват не2о3ре ни1Ñ2кол ни4ÑÑŒ. но4л1а2мин н2траÑÑ Ð¾2д1о2бол о4ж3дев о1и2Ñ1Ñ‚Ñ€ ойÑ4ков о2м3че. они3л2ам он2трат о2Ð¿Ð»ÑŽÑ Ð¾Ñо4м3н оти4дн пере1Ñ2н по2доде по2д1у2ро пое2ж по2Ñтин прем2но приче2Ñ1к пти4дн редо4пл реж4ди рни3л2а3м роб2лею 2Ñбрук1 Ñо2Ñтрит Ñо3Ñ‚2кал 2Ñтче. 2Ñтьт ÑÑ‹2мит 2ÑÑŒÑÑ. 6Ñ‚Ñ€. тро2ÐµÑ‚ÐµÑ 6хуÑ. Ñ‹2рьм Ñ‹Ñ2Ð²Ñ ÑŒÐ±Ð°Ñ‚2 а1вё а2двё а1Ñ‘ аз3вёз а1лё 2алёк 2амёт ам2нёт а1рё аÑ3тём а1тьё 1бё бё2д1Ñ€ б3лён б2лёÑ1к б2люё б1рёк б2рём б2рёх 1веё 3везё вёд1Ñ€ 1вёз 2вёрд 1Ð²Ñ‘Ñ Ð²2лёк в2лёт 1вмё в2Ð½Ñ‘Ñ 2в1рён 3вÑÑ‘3 1вьё г1лё г2лёт г2нёв г3нён г2ноё д1вё 1дё .доб2рёл 2доплё до2прё д1рё д2рёб 2д3рёж д2рём 1дрёма 1дрёмы 2д3рён дъё2м 1дьё еб1рён е1вё 2евёр 2е1врё е2глё е1Ñ‘ 2ежё е3зё е1лё 2епё ер1вё е1рё ерё3до ерё1к2 еÑ2чёт ет1вё е1тьё 2ёб Ñ‘1бра ёб1ры Ñ‘1ве Ñ‘1во 2Ñ‘1вре Ñ‘1ву Ñ‘1дру 2Ñ‘3душ 2ёже Ñ‘3зе ёз1о2г Ñ‘1зом Ñ‘1ка Ñ‘1ки 2Ñ‘1ко 2Ñ‘1кр ёк2ро Ñ‘1ку Ñ‘1ла Ñ‘1ле Ñ‘1лу Ñ‘1лы 2ёмуж Ñ‘2мч Ñ‘3на ён2д1Ñ€ ёнÑ2 ёпат2 2ёпе Ñ‘2пл Ñ‘3пла ёп1лу Ñ‘3плы Ñ‘4пн 2ёпо Ñ‘4пт Ñ‘1ра Ñ‘1ре Ñ‘3ре. Ñ‘1ри Ñ‘1ро Ñ‘Ñ€3Ñк Ñ‘1ру Ñ‘1ры Ñ‘3Ñ2а Ñ‘1Ñк Ñ‘2Ñ1ка. Ñ‘2Ñке Ñ‘4Ñ1ку. 2Ñ‘Ñл Ñ‘3Ñо Ñ‘1ÑÑ‚ Ñ‘Ñ2тан Ñ‘3Ñту Ñ‘1та 2ётеч Ñ‘1ти Ñ‘1то ёто1Ñ Ñ‘Ñ‚1Ñ€ Ñ‘1ту Ñ‘1Ñ‚Ñ‹ Ñ‘1Ñ‚ÑŽ Ñ‘1Ñ‚Ñ Ñ‘Ñ…Ð°2Ñ‚ Ñ‘Ñ…1ато Ñ‘Ñ…3вал Ñ‘Ñ…3лоп Ñ‘Ñ…1опо Ñ‘Ñ…1ру 3жёв жё1Ñ2 ж2жё за3мнё з1вё з2вёз 1зё з2наё 2знё 1з2о3рё з2отё зот2рё 3зуё зъё2м 2зымё 2и1вё иг1рён и1Ñ‘ их1рё 1каё 1кё к3лён к2роё 3куё ла1ÑÑ‚Ñ‘ лё3до лё1з2о3 лёк1л 1лён лё2Ñк лё4Ñка 1лёх 2лоён 1луё 3льё 1льщё 3м2нёш 3м2щё нд2рё не3Ñ‘ 1нё нё1б2 3номё 1ньё од3рёб о1Ñ‘ оё2жи о1лё 2омё о3м2нём о3м2нёт о2п1лёй о1рё о2Ñкё от1вё 2о3тёк о3Ñ‚Ñ‘Ñ€ от1рёк от1рёш о3Ñ„Ñ‘ пё1 пё2ÑÑ‚1Ñ€ пё2Ñ‚Ñ€ 2плён п2лёнк плёÑ1к п1лёю поё2ж 3прёт причё2Ñ1к Ñ€2блё 1рвёт .рё2бр 1рёзк рё1зна 1рё1зо рёз2у 1рёкш 3рёмо 1рёнк рё3Ñта рё3Ñто род2лё роё2м 1рьё Ñ2дё Ñе3ÑÑ‚Ñ‘ 1Ñ2Ñ‘ ÑёкÑ4 ÑÑ‘2ÑÑ‚ ÑÑ‘ÑÑ‚1Ñ€ 2Ñкуё Ñ1лёт Ñ2Ñ‚Ñ‘ 1Ñтёл 1Ñтён. Ñ3Ñ‚Ñ‘Ñ‚. Ñ3тёте ÑÑ‚Ñ‘3Ñ… Ñ3тёш Ñ3Ñ‚2лё Ñчё2Ñ1к 1Ñьё Ñ‚1вёл Ñ‚1воё 1Ñ‚2Ñ‘ Ñ‚Ñ‘2гн Ñ‚Ñ‘1зо 3тёка тёк1л 3тёкш Ñ‚Ñ‘4п1л Ñ‚Ñ‘Ñ€3к Ñ‚Ñ‘2Ñка Ñ‚Ñ‘2Ñ1ки Ñ‚Ñ‘2Ñ1ко Ñ‚Ñ‘2Ñку Ñ‚Ñ‘2хо 3Ñ‚2кёт Ñ‚1ревё 3Ñ‚2рё2Ñ… Ñ‚2рёшь тро2ÐµÑ‚Ñ‘Ñ 3тьё уг2нё уг1рё .уё2 у1Ñ‘ у1лё у1рё у1ÑÑ‚Ñ‘ у3Ñ‚Ñ‘Ñ€ у1тьё у2чёб у3шё 2Ñ…3вё 1хлёб Ñ…2Ð»Ñ‘Ñ Ñ†2вё 1чё чёр2Ñ Ñ‡Ñ‘Ñ€ÑÑ‚1 .чё2Ñ1к ч2Ñ‚Ñ‘ 1чьё 2шлён 3шьё ÑŠ2Ñ‘2Ñ€ ыд2рё ырё2Ñ… Ñ‹3ÑÑ‘ ÑŒ2Ñ‘ ьё1зо ÑŒ2щё ÑŽ1Ñ‘ Ñб3рё .не8 8не. 8бъ. 8въ. 8гъ. 8дъ. 8жъ. 8зъ. 8къ. 8лъ. 8мъ. 8нъ. 8пъ. 8ръ. 8ÑÑŠ. 8Ñ‚ÑŠ. 8Ñ„ÑŠ. 8Ñ…ÑŠ. 8цъ. 8чъ. 8шъ. 8щъ. 8-7 --8 .а-8 .б-8 .в-8 .г-8 .д-8 .е-8 .Ñ‘-8 .ж-8 .з-8 .и-8 .й-8 .к-8 .л-8 .м-8 .н-8 .о-8 .п-8 .Ñ€-8 .Ñ-8 .Ñ‚-8 .у-8 .Ñ„-8 .Ñ…-8 .ц-8 .ч-8 .ш-8 .щ-8 .ÑŠ-8 .Ñ‹-8 .ÑŒ-8 .Ñ-8 .ÑŽ-8 .Ñ-8 -а8а8 8а8а- -а8б8 8а8б- -а8в8 8а8в- -а8г8 8а8г- -а8д8 8а8д- -а8е8 8а8е- -а8Ñ‘8 8а8Ñ‘- -а8ж8 8а8ж- -а8з8 8а8з- -а8и8 8а8и- -а8й8 8а8й- -а8к8 8а8к- -а8л8 8а8л- -а8м8 8а8м- -а8н8 8а8н- -а8о8 8а8о- -а8п8 8а8п- -а8Ñ€8 8а8Ñ€- -а8Ñ8 8а8Ñ- -а8Ñ‚8 8а8Ñ‚- -а8у8 8а8у- -а8Ñ„8 8а8Ñ„- -а8Ñ…8 8а8Ñ…- -а8ц8 8а8ц- -а8ч8 8а8ч- -а8ш8 8а8ш- -а8щ8 8а8щ- -а8ÑŠ8 8а8ÑŠ- -а8Ñ‹8 8а8Ñ‹- -а8ÑŒ8 8а8ÑŒ- -а8Ñ8 8а8Ñ- -а8ÑŽ8 8а8ÑŽ- -а8Ñ8 8а8Ñ- -б8а8 8б8а- -б8б8 8б8б- -б8в8 8б8в- -б8г8 8б8г- -б8д8 8б8д- -б8е8 8б8е- -б8Ñ‘8 8б8Ñ‘- -б8ж8 8б8ж- -б8з8 8б8з- -б8и8 8б8и- -б8й8 8б8й- -б8к8 8б8к- -б8л8 8б8л- -б8м8 8б8м- -б8н8 8б8н- -б8о8 8б8о- -б8п8 8б8п- -б8Ñ€8 8б8Ñ€- -б8Ñ8 8б8Ñ- -б8Ñ‚8 8б8Ñ‚- -б8у8 8б8у- -б8Ñ„8 8б8Ñ„- -б8Ñ…8 8б8Ñ…- -б8ц8 8б8ц- -б8ч8 8б8ч- -б8ш8 8б8ш- -б8щ8 8б8щ- -б8ÑŠ8 8б8ÑŠ- -б8Ñ‹8 8б8Ñ‹- -б8ÑŒ8 8б8ÑŒ- -б8Ñ8 8б8Ñ- -б8ÑŽ8 8б8ÑŽ- -б8Ñ8 8б8Ñ- -в8а8 8в8а- -в8б8 8в8б- -в8в8 8в8в- -в8г8 8в8г- -в8д8 8в8д- -в8е8 8в8е- -в8Ñ‘8 8в8Ñ‘- -в8ж8 8в8ж- -в8з8 8в8з- -в8и8 8в8и- -в8й8 8в8й- -в8к8 8в8к- -в8л8 8в8л- -в8м8 8в8м- -в8н8 8в8н- -в8о8 8в8о- -в8п8 8в8п- -в8Ñ€8 8в8Ñ€- -в8Ñ8 8в8Ñ- -в8Ñ‚8 8в8Ñ‚- -в8у8 8в8у- -в8Ñ„8 8в8Ñ„- -в8Ñ…8 8в8Ñ…- -в8ц8 8в8ц- -в8ч8 8в8ч- -в8ш8 8в8ш- -в8щ8 8в8щ- -в8ÑŠ8 8в8ÑŠ- -в8Ñ‹8 8в8Ñ‹- -в8ÑŒ8 8в8ÑŒ- -в8Ñ8 8в8Ñ- -в8ÑŽ8 8в8ÑŽ- -в8Ñ8 8в8Ñ- -г8а8 8г8а- -г8б8 8г8б- -г8в8 8г8в- -г8г8 8г8г- -г8д8 8г8д- -г8е8 8г8е- -г8Ñ‘8 8г8Ñ‘- -г8ж8 8г8ж- -г8з8 8г8з- -г8и8 8г8и- -г8й8 8г8й- -г8к8 8г8к- -г8л8 8г8л- -г8м8 8г8м- -г8н8 8г8н- -г8о8 8г8о- -г8п8 8г8п- -г8Ñ€8 8г8Ñ€- -г8Ñ8 8г8Ñ- -г8Ñ‚8 8г8Ñ‚- -г8у8 8г8у- -г8Ñ„8 8г8Ñ„- -г8Ñ…8 8г8Ñ…- -г8ц8 8г8ц- -г8ч8 8г8ч- -г8ш8 8г8ш- -г8щ8 8г8щ- -г8ÑŠ8 8г8ÑŠ- -г8Ñ‹8 8г8Ñ‹- -г8ÑŒ8 8г8ÑŒ- -г8Ñ8 8г8Ñ- -г8ÑŽ8 8г8ÑŽ- -г8Ñ8 8г8Ñ- -д8а8 8д8а- -д8б8 8д8б- -д8в8 8д8в- -д8г8 8д8г- -д8д8 8д8д- -д8е8 8д8е- -д8Ñ‘8 8д8Ñ‘- -д8ж8 8д8ж- -д8з8 8д8з- -д8и8 8д8и- -д8й8 8д8й- -д8к8 8д8к- -д8л8 8д8л- -д8м8 8д8м- -д8н8 8д8н- -д8о8 8д8о- -д8п8 8д8п- -д8Ñ€8 8д8Ñ€- -д8Ñ8 8д8Ñ- -д8Ñ‚8 8д8Ñ‚- -д8у8 8д8у- -д8Ñ„8 8д8Ñ„- -д8Ñ…8 8д8Ñ…- -д8ц8 8д8ц- -д8ч8 8д8ч- -д8ш8 8д8ш- -д8щ8 8д8щ- -д8ÑŠ8 8д8ÑŠ- -д8Ñ‹8 8д8Ñ‹- -д8ÑŒ8 8д8ÑŒ- -д8Ñ8 8д8Ñ- -д8ÑŽ8 8д8ÑŽ- -д8Ñ8 8д8Ñ- -е8а8 8е8а- -е8б8 8е8б- -е8в8 8е8в- -е8г8 8е8г- -е8д8 8е8д- -е8е8 8е8е- -е8Ñ‘8 8е8Ñ‘- -е8ж8 8е8ж- -е8з8 8е8з- -е8и8 8е8и- -е8й8 8е8й- -е8к8 8е8к- -е8л8 8е8л- -е8м8 8е8м- -е8н8 8е8н- -е8о8 8е8о- -е8п8 8е8п- -е8Ñ€8 8е8Ñ€- -е8Ñ8 8е8Ñ- -е8Ñ‚8 8е8Ñ‚- -е8у8 8е8у- -е8Ñ„8 8е8Ñ„- -е8Ñ…8 8е8Ñ…- -е8ц8 8е8ц- -е8ч8 8е8ч- -е8ш8 8е8ш- -е8щ8 8е8щ- -е8ÑŠ8 8е8ÑŠ- -е8Ñ‹8 8е8Ñ‹- -е8ÑŒ8 8е8ÑŒ- -е8Ñ8 8е8Ñ- -е8ÑŽ8 8е8ÑŽ- -е8Ñ8 8е8Ñ- -Ñ‘8а8 8Ñ‘8а- -Ñ‘8б8 8Ñ‘8б- -Ñ‘8в8 8Ñ‘8в- -Ñ‘8г8 8Ñ‘8г- -Ñ‘8д8 8Ñ‘8д- -Ñ‘8е8 8Ñ‘8е- -Ñ‘8Ñ‘8 8Ñ‘8Ñ‘- -Ñ‘8ж8 8Ñ‘8ж- -Ñ‘8з8 8Ñ‘8з- -Ñ‘8и8 8Ñ‘8и- -Ñ‘8й8 8Ñ‘8й- -Ñ‘8к8 8Ñ‘8к- -Ñ‘8л8 8Ñ‘8л- -Ñ‘8м8 8Ñ‘8м- -Ñ‘8н8 8Ñ‘8н- -Ñ‘8о8 8Ñ‘8о- -Ñ‘8п8 8Ñ‘8п- -Ñ‘8Ñ€8 8Ñ‘8Ñ€- -Ñ‘8Ñ8 8Ñ‘8Ñ- -Ñ‘8Ñ‚8 8Ñ‘8Ñ‚- -Ñ‘8у8 8Ñ‘8у- -Ñ‘8Ñ„8 8Ñ‘8Ñ„- -Ñ‘8Ñ…8 8Ñ‘8Ñ…- -Ñ‘8ц8 8Ñ‘8ц- -Ñ‘8ч8 8Ñ‘8ч- -Ñ‘8ш8 8Ñ‘8ш- -Ñ‘8щ8 8Ñ‘8щ- -Ñ‘8ÑŠ8 8Ñ‘8ÑŠ- -Ñ‘8Ñ‹8 8Ñ‘8Ñ‹- -Ñ‘8ÑŒ8 8Ñ‘8ÑŒ- -Ñ‘8Ñ8 8Ñ‘8Ñ- -Ñ‘8ÑŽ8 8Ñ‘8ÑŽ- -Ñ‘8Ñ8 8Ñ‘8Ñ- -ж8а8 8ж8а- -ж8б8 8ж8б- -ж8в8 8ж8в- -ж8г8 8ж8г- -ж8д8 8ж8д- -ж8е8 8ж8е- -ж8Ñ‘8 8ж8Ñ‘- -ж8ж8 8ж8ж- -ж8з8 8ж8з- -ж8и8 8ж8и- -ж8й8 8ж8й- -ж8к8 8ж8к- -ж8л8 8ж8л- -ж8м8 8ж8м- -ж8н8 8ж8н- -ж8о8 8ж8о- -ж8п8 8ж8п- -ж8Ñ€8 8ж8Ñ€- -ж8Ñ8 8ж8Ñ- -ж8Ñ‚8 8ж8Ñ‚- -ж8у8 8ж8у- -ж8Ñ„8 8ж8Ñ„- -ж8Ñ…8 8ж8Ñ…- -ж8ц8 8ж8ц- -ж8ч8 8ж8ч- -ж8ш8 8ж8ш- -ж8щ8 8ж8щ- -ж8ÑŠ8 8ж8ÑŠ- -ж8Ñ‹8 8ж8Ñ‹- -ж8ÑŒ8 8ж8ÑŒ- -ж8Ñ8 8ж8Ñ- -ж8ÑŽ8 8ж8ÑŽ- -ж8Ñ8 8ж8Ñ- -з8а8 8з8а- -з8б8 8з8б- -з8в8 8з8в- -з8г8 8з8г- -з8д8 8з8д- -з8е8 8з8е- -з8Ñ‘8 8з8Ñ‘- -з8ж8 8з8ж- -з8з8 8з8з- -з8и8 8з8и- -з8й8 8з8й- -з8к8 8з8к- -з8л8 8з8л- -з8м8 8з8м- -з8н8 8з8н- -з8о8 8з8о- -з8п8 8з8п- -з8Ñ€8 8з8Ñ€- -з8Ñ8 8з8Ñ- -з8Ñ‚8 8з8Ñ‚- -з8у8 8з8у- -з8Ñ„8 8з8Ñ„- -з8Ñ…8 8з8Ñ…- -з8ц8 8з8ц- -з8ч8 8з8ч- -з8ш8 8з8ш- -з8щ8 8з8щ- -з8ÑŠ8 8з8ÑŠ- -з8Ñ‹8 8з8Ñ‹- -з8ÑŒ8 8з8ÑŒ- -з8Ñ8 8з8Ñ- -з8ÑŽ8 8з8ÑŽ- -з8Ñ8 8з8Ñ- -и8а8 8и8а- -и8б8 8и8б- -и8в8 8и8в- -и8г8 8и8г- -и8д8 8и8д- -и8е8 8и8е- -и8Ñ‘8 8и8Ñ‘- -и8ж8 8и8ж- -и8з8 8и8з- -и8и8 8и8и- -и8й8 8и8й- -и8к8 8и8к- -и8л8 8и8л- -и8м8 8и8м- -и8н8 8и8н- -и8о8 8и8о- -и8п8 8и8п- -и8Ñ€8 8и8Ñ€- -и8Ñ8 8и8Ñ- -и8Ñ‚8 8и8Ñ‚- -и8у8 8и8у- -и8Ñ„8 8и8Ñ„- -и8Ñ…8 8и8Ñ…- -и8ц8 8и8ц- -и8ч8 8и8ч- -и8ш8 8и8ш- -и8щ8 8и8щ- -и8ÑŠ8 8и8ÑŠ- -и8Ñ‹8 8и8Ñ‹- -и8ÑŒ8 8и8ÑŒ- -и8Ñ8 8и8Ñ- -и8ÑŽ8 8и8ÑŽ- -и8Ñ8 8и8Ñ- -й8а8 8й8а- -й8б8 8й8б- -й8в8 8й8в- -й8г8 8й8г- -й8д8 8й8д- -й8е8 8й8е- -й8Ñ‘8 8й8Ñ‘- -й8ж8 8й8ж- -й8з8 8й8з- -й8и8 8й8и- -й8й8 8й8й- -й8к8 8й8к- -й8л8 8й8л- -й8м8 8й8м- -й8н8 8й8н- -й8о8 8й8о- -й8п8 8й8п- -й8Ñ€8 8й8Ñ€- -й8Ñ8 8й8Ñ- -й8Ñ‚8 8й8Ñ‚- -й8у8 8й8у- -й8Ñ„8 8й8Ñ„- -й8Ñ…8 8й8Ñ…- -й8ц8 8й8ц- -й8ч8 8й8ч- -й8ш8 8й8ш- -й8щ8 8й8щ- -й8ÑŠ8 8й8ÑŠ- -й8Ñ‹8 8й8Ñ‹- -й8ÑŒ8 8й8ÑŒ- -й8Ñ8 8й8Ñ- -й8ÑŽ8 8й8ÑŽ- -й8Ñ8 8й8Ñ- -к8а8 8к8а- -к8б8 8к8б- -к8в8 8к8в- -к8г8 8к8г- -к8д8 8к8д- -к8е8 8к8е- -к8Ñ‘8 8к8Ñ‘- -к8ж8 8к8ж- -к8з8 8к8з- -к8и8 8к8и- -к8й8 8к8й- -к8к8 8к8к- -к8л8 8к8л- -к8м8 8к8м- -к8н8 8к8н- -к8о8 8к8о- -к8п8 8к8п- -к8Ñ€8 8к8Ñ€- -к8Ñ8 8к8Ñ- -к8Ñ‚8 8к8Ñ‚- -к8у8 8к8у- -к8Ñ„8 8к8Ñ„- -к8Ñ…8 8к8Ñ…- -к8ц8 8к8ц- -к8ч8 8к8ч- -к8ш8 8к8ш- -к8щ8 8к8щ- -к8ÑŠ8 8к8ÑŠ- -к8Ñ‹8 8к8Ñ‹- -к8ÑŒ8 8к8ÑŒ- -к8Ñ8 8к8Ñ- -к8ÑŽ8 8к8ÑŽ- -к8Ñ8 8к8Ñ- -л8а8 8л8а- -л8б8 8л8б- -л8в8 8л8в- -л8г8 8л8г- -л8д8 8л8д- -л8е8 8л8е- -л8Ñ‘8 8л8Ñ‘- -л8ж8 8л8ж- -л8з8 8л8з- -л8и8 8л8и- -л8й8 8л8й- -л8к8 8л8к- -л8л8 8л8л- -л8м8 8л8м- -л8н8 8л8н- -л8о8 8л8о- -л8п8 8л8п- -л8Ñ€8 8л8Ñ€- -л8Ñ8 8л8Ñ- -л8Ñ‚8 8л8Ñ‚- -л8у8 8л8у- -л8Ñ„8 8л8Ñ„- -л8Ñ…8 8л8Ñ…- -л8ц8 8л8ц- -л8ч8 8л8ч- -л8ш8 8л8ш- -л8щ8 8л8щ- -л8ÑŠ8 8л8ÑŠ- -л8Ñ‹8 8л8Ñ‹- -л8ÑŒ8 8л8ÑŒ- -л8Ñ8 8л8Ñ- -л8ÑŽ8 8л8ÑŽ- -л8Ñ8 8л8Ñ- -м8а8 8м8а- -м8б8 8м8б- -м8в8 8м8в- -м8г8 8м8г- -м8д8 8м8д- -м8е8 8м8е- -м8Ñ‘8 8м8Ñ‘- -м8ж8 8м8ж- -м8з8 8м8з- -м8и8 8м8и- -м8й8 8м8й- -м8к8 8м8к- -м8л8 8м8л- -м8м8 8м8м- -м8н8 8м8н- -м8о8 8м8о- -м8п8 8м8п- -м8Ñ€8 8м8Ñ€- -м8Ñ8 8м8Ñ- -м8Ñ‚8 8м8Ñ‚- -м8у8 8м8у- -м8Ñ„8 8м8Ñ„- -м8Ñ…8 8м8Ñ…- -м8ц8 8м8ц- -м8ч8 8м8ч- -м8ш8 8м8ш- -м8щ8 8м8щ- -м8ÑŠ8 8м8ÑŠ- -м8Ñ‹8 8м8Ñ‹- -м8ÑŒ8 8м8ÑŒ- -м8Ñ8 8м8Ñ- -м8ÑŽ8 8м8ÑŽ- -м8Ñ8 8м8Ñ- -н8а8 8н8а- -н8б8 8н8б- -н8в8 8н8в- -н8г8 8н8г- -н8д8 8н8д- -н8е8 8н8е- -н8Ñ‘8 8н8Ñ‘- -н8ж8 8н8ж- -н8з8 8н8з- -н8и8 8н8и- -н8й8 8н8й- -н8к8 8н8к- -н8л8 8н8л- -н8м8 8н8м- -н8н8 8н8н- -н8о8 8н8о- -н8п8 8н8п- -н8Ñ€8 8н8Ñ€- -н8Ñ8 8н8Ñ- -н8Ñ‚8 8н8Ñ‚- -н8у8 8н8у- -н8Ñ„8 8н8Ñ„- -н8Ñ…8 8н8Ñ…- -н8ц8 8н8ц- -н8ч8 8н8ч- -н8ш8 8н8ш- -н8щ8 8н8щ- -н8ÑŠ8 8н8ÑŠ- -н8Ñ‹8 8н8Ñ‹- -н8ÑŒ8 8н8ÑŒ- -н8Ñ8 8н8Ñ- -н8ÑŽ8 8н8ÑŽ- -н8Ñ8 8н8Ñ- -о8а8 8о8а- -о8б8 8о8б- -о8в8 8о8в- -о8г8 8о8г- -о8д8 8о8д- -о8е8 8о8е- -о8Ñ‘8 8о8Ñ‘- -о8ж8 8о8ж- -о8з8 8о8з- -о8и8 8о8и- -о8й8 8о8й- -о8к8 8о8к- -о8л8 8о8л- -о8м8 8о8м- -о8н8 8о8н- -о8о8 8о8о- -о8п8 8о8п- -о8Ñ€8 8о8Ñ€- -о8Ñ8 8о8Ñ- -о8Ñ‚8 8о8Ñ‚- -о8у8 8о8у- -о8Ñ„8 8о8Ñ„- -о8Ñ…8 8о8Ñ…- -о8ц8 8о8ц- -о8ч8 8о8ч- -о8ш8 8о8ш- -о8щ8 8о8щ- -о8ÑŠ8 8о8ÑŠ- -о8Ñ‹8 8о8Ñ‹- -о8ÑŒ8 8о8ÑŒ- -о8Ñ8 8о8Ñ- -о8ÑŽ8 8о8ÑŽ- -о8Ñ8 8о8Ñ- -п8а8 8п8а- -п8б8 8п8б- -п8в8 8п8в- -п8г8 8п8г- -п8д8 8п8д- -п8е8 8п8е- -п8Ñ‘8 8п8Ñ‘- -п8ж8 8п8ж- -п8з8 8п8з- -п8и8 8п8и- -п8й8 8п8й- -п8к8 8п8к- -п8л8 8п8л- -п8м8 8п8м- -п8н8 8п8н- -п8о8 8п8о- -п8п8 8п8п- -п8Ñ€8 8п8Ñ€- -п8Ñ8 8п8Ñ- -п8Ñ‚8 8п8Ñ‚- -п8у8 8п8у- -п8Ñ„8 8п8Ñ„- -п8Ñ…8 8п8Ñ…- -п8ц8 8п8ц- -п8ч8 8п8ч- -п8ш8 8п8ш- -п8щ8 8п8щ- -п8ÑŠ8 8п8ÑŠ- -п8Ñ‹8 8п8Ñ‹- -п8ÑŒ8 8п8ÑŒ- -п8Ñ8 8п8Ñ- -п8ÑŽ8 8п8ÑŽ- -п8Ñ8 8п8Ñ- -Ñ€8а8 8Ñ€8а- -Ñ€8б8 8Ñ€8б- -Ñ€8в8 8Ñ€8в- -Ñ€8г8 8Ñ€8г- -Ñ€8д8 8Ñ€8д- -Ñ€8е8 8Ñ€8е- -Ñ€8Ñ‘8 8Ñ€8Ñ‘- -Ñ€8ж8 8Ñ€8ж- -Ñ€8з8 8Ñ€8з- -Ñ€8и8 8Ñ€8и- -Ñ€8й8 8Ñ€8й- -Ñ€8к8 8Ñ€8к- -Ñ€8л8 8Ñ€8л- -Ñ€8м8 8Ñ€8м- -Ñ€8н8 8Ñ€8н- -Ñ€8о8 8Ñ€8о- -Ñ€8п8 8Ñ€8п- -Ñ€8Ñ€8 8Ñ€8Ñ€- -Ñ€8Ñ8 8Ñ€8Ñ- -Ñ€8Ñ‚8 8Ñ€8Ñ‚- -Ñ€8у8 8Ñ€8у- -Ñ€8Ñ„8 8Ñ€8Ñ„- -Ñ€8Ñ…8 8Ñ€8Ñ…- -Ñ€8ц8 8Ñ€8ц- -Ñ€8ч8 8Ñ€8ч- -Ñ€8ш8 8Ñ€8ш- -Ñ€8щ8 8Ñ€8щ- -Ñ€8ÑŠ8 8Ñ€8ÑŠ- -Ñ€8Ñ‹8 8Ñ€8Ñ‹- -Ñ€8ÑŒ8 8Ñ€8ÑŒ- -Ñ€8Ñ8 8Ñ€8Ñ- -Ñ€8ÑŽ8 8Ñ€8ÑŽ- -Ñ€8Ñ8 8Ñ€8Ñ- -Ñ8а8 8Ñ8а- -Ñ8б8 8Ñ8б- -Ñ8в8 8Ñ8в- -Ñ8г8 8Ñ8г- -Ñ8д8 8Ñ8д- -Ñ8е8 8Ñ8е- -Ñ8Ñ‘8 8Ñ8Ñ‘- -Ñ8ж8 8Ñ8ж- -Ñ8з8 8Ñ8з- -Ñ8и8 8Ñ8и- -Ñ8й8 8Ñ8й- -Ñ8к8 8Ñ8к- -Ñ8л8 8Ñ8л- -Ñ8м8 8Ñ8м- -Ñ8н8 8Ñ8н- -Ñ8о8 8Ñ8о- -Ñ8п8 8Ñ8п- -Ñ8Ñ€8 8Ñ8Ñ€- -Ñ8Ñ8 8Ñ8Ñ- -Ñ8Ñ‚8 8Ñ8Ñ‚- -Ñ8у8 8Ñ8у- -Ñ8Ñ„8 8Ñ8Ñ„- -Ñ8Ñ…8 8Ñ8Ñ…- -Ñ8ц8 8Ñ8ц- -Ñ8ч8 8Ñ8ч- -Ñ8ш8 8Ñ8ш- -Ñ8щ8 8Ñ8щ- -Ñ8ÑŠ8 8Ñ8ÑŠ- -Ñ8Ñ‹8 8Ñ8Ñ‹- -Ñ8ÑŒ8 8Ñ8ÑŒ- -Ñ8Ñ8 8Ñ8Ñ- -Ñ8ÑŽ8 8Ñ8ÑŽ- -Ñ8Ñ8 8Ñ8Ñ- -Ñ‚8а8 8Ñ‚8а- -Ñ‚8б8 8Ñ‚8б- -Ñ‚8в8 8Ñ‚8в- -Ñ‚8г8 8Ñ‚8г- -Ñ‚8д8 8Ñ‚8д- -Ñ‚8е8 8Ñ‚8е- -Ñ‚8Ñ‘8 8Ñ‚8Ñ‘- -Ñ‚8ж8 8Ñ‚8ж- -Ñ‚8з8 8Ñ‚8з- -Ñ‚8и8 8Ñ‚8и- -Ñ‚8й8 8Ñ‚8й- -Ñ‚8к8 8Ñ‚8к- -Ñ‚8л8 8Ñ‚8л- -Ñ‚8м8 8Ñ‚8м- -Ñ‚8н8 8Ñ‚8н- -Ñ‚8о8 8Ñ‚8о- -Ñ‚8п8 8Ñ‚8п- -Ñ‚8Ñ€8 8Ñ‚8Ñ€- -Ñ‚8Ñ8 8Ñ‚8Ñ- -Ñ‚8Ñ‚8 8Ñ‚8Ñ‚- -Ñ‚8у8 8Ñ‚8у- -Ñ‚8Ñ„8 8Ñ‚8Ñ„- -Ñ‚8Ñ…8 8Ñ‚8Ñ…- -Ñ‚8ц8 8Ñ‚8ц- -Ñ‚8ч8 8Ñ‚8ч- -Ñ‚8ш8 8Ñ‚8ш- -Ñ‚8щ8 8Ñ‚8щ- -Ñ‚8ÑŠ8 8Ñ‚8ÑŠ- -Ñ‚8Ñ‹8 8Ñ‚8Ñ‹- -Ñ‚8ÑŒ8 8Ñ‚8ÑŒ- -Ñ‚8Ñ8 8Ñ‚8Ñ- -Ñ‚8ÑŽ8 8Ñ‚8ÑŽ- -Ñ‚8Ñ8 8Ñ‚8Ñ- -у8а8 8у8а- -у8б8 8у8б- -у8в8 8у8в- -у8г8 8у8г- -у8д8 8у8д- -у8е8 8у8е- -у8Ñ‘8 8у8Ñ‘- -у8ж8 8у8ж- -у8з8 8у8з- -у8и8 8у8и- -у8й8 8у8й- -у8к8 8у8к- -у8л8 8у8л- -у8м8 8у8м- -у8н8 8у8н- -у8о8 8у8о- -у8п8 8у8п- -у8Ñ€8 8у8Ñ€- -у8Ñ8 8у8Ñ- -у8Ñ‚8 8у8Ñ‚- -у8у8 8у8у- -у8Ñ„8 8у8Ñ„- -у8Ñ…8 8у8Ñ…- -у8ц8 8у8ц- -у8ч8 8у8ч- -у8ш8 8у8ш- -у8щ8 8у8щ- -у8ÑŠ8 8у8ÑŠ- -у8Ñ‹8 8у8Ñ‹- -у8ÑŒ8 8у8ÑŒ- -у8Ñ8 8у8Ñ- -у8ÑŽ8 8у8ÑŽ- -у8Ñ8 8у8Ñ- -Ñ„8а8 8Ñ„8а- -Ñ„8б8 8Ñ„8б- -Ñ„8в8 8Ñ„8в- -Ñ„8г8 8Ñ„8г- -Ñ„8д8 8Ñ„8д- -Ñ„8е8 8Ñ„8е- -Ñ„8Ñ‘8 8Ñ„8Ñ‘- -Ñ„8ж8 8Ñ„8ж- -Ñ„8з8 8Ñ„8з- -Ñ„8и8 8Ñ„8и- -Ñ„8й8 8Ñ„8й- -Ñ„8к8 8Ñ„8к- -Ñ„8л8 8Ñ„8л- -Ñ„8м8 8Ñ„8м- -Ñ„8н8 8Ñ„8н- -Ñ„8о8 8Ñ„8о- -Ñ„8п8 8Ñ„8п- -Ñ„8Ñ€8 8Ñ„8Ñ€- -Ñ„8Ñ8 8Ñ„8Ñ- -Ñ„8Ñ‚8 8Ñ„8Ñ‚- -Ñ„8у8 8Ñ„8у- -Ñ„8Ñ„8 8Ñ„8Ñ„- -Ñ„8Ñ…8 8Ñ„8Ñ…- -Ñ„8ц8 8Ñ„8ц- -Ñ„8ч8 8Ñ„8ч- -Ñ„8ш8 8Ñ„8ш- -Ñ„8щ8 8Ñ„8щ- -Ñ„8ÑŠ8 8Ñ„8ÑŠ- -Ñ„8Ñ‹8 8Ñ„8Ñ‹- -Ñ„8ÑŒ8 8Ñ„8ÑŒ- -Ñ„8Ñ8 8Ñ„8Ñ- -Ñ„8ÑŽ8 8Ñ„8ÑŽ- -Ñ„8Ñ8 8Ñ„8Ñ- -Ñ…8а8 8Ñ…8а- -Ñ…8б8 8Ñ…8б- -Ñ…8в8 8Ñ…8в- -Ñ…8г8 8Ñ…8г- -Ñ…8д8 8Ñ…8д- -Ñ…8е8 8Ñ…8е- -Ñ…8Ñ‘8 8Ñ…8Ñ‘- -Ñ…8ж8 8Ñ…8ж- -Ñ…8з8 8Ñ…8з- -Ñ…8и8 8Ñ…8и- -Ñ…8й8 8Ñ…8й- -Ñ…8к8 8Ñ…8к- -Ñ…8л8 8Ñ…8л- -Ñ…8м8 8Ñ…8м- -Ñ…8н8 8Ñ…8н- -Ñ…8о8 8Ñ…8о- -Ñ…8п8 8Ñ…8п- -Ñ…8Ñ€8 8Ñ…8Ñ€- -Ñ…8Ñ8 8Ñ…8Ñ- -Ñ…8Ñ‚8 8Ñ…8Ñ‚- -Ñ…8у8 8Ñ…8у- -Ñ…8Ñ„8 8Ñ…8Ñ„- -Ñ…8Ñ…8 8Ñ…8Ñ…- -Ñ…8ц8 8Ñ…8ц- -Ñ…8ч8 8Ñ…8ч- -Ñ…8ш8 8Ñ…8ш- -Ñ…8щ8 8Ñ…8щ- -Ñ…8ÑŠ8 8Ñ…8ÑŠ- -Ñ…8Ñ‹8 8Ñ…8Ñ‹- -Ñ…8ÑŒ8 8Ñ…8ÑŒ- -Ñ…8Ñ8 8Ñ…8Ñ- -Ñ…8ÑŽ8 8Ñ…8ÑŽ- -Ñ…8Ñ8 8Ñ…8Ñ- -ц8а8 8ц8а- -ц8б8 8ц8б- -ц8в8 8ц8в- -ц8г8 8ц8г- -ц8д8 8ц8д- -ц8е8 8ц8е- -ц8Ñ‘8 8ц8Ñ‘- -ц8ж8 8ц8ж- -ц8з8 8ц8з- -ц8и8 8ц8и- -ц8й8 8ц8й- -ц8к8 8ц8к- -ц8л8 8ц8л- -ц8м8 8ц8м- -ц8н8 8ц8н- -ц8о8 8ц8о- -ц8п8 8ц8п- -ц8Ñ€8 8ц8Ñ€- -ц8Ñ8 8ц8Ñ- -ц8Ñ‚8 8ц8Ñ‚- -ц8у8 8ц8у- -ц8Ñ„8 8ц8Ñ„- -ц8Ñ…8 8ц8Ñ…- -ц8ц8 8ц8ц- -ц8ч8 8ц8ч- -ц8ш8 8ц8ш- -ц8щ8 8ц8щ- -ц8ÑŠ8 8ц8ÑŠ- -ц8Ñ‹8 8ц8Ñ‹- -ц8ÑŒ8 8ц8ÑŒ- -ц8Ñ8 8ц8Ñ- -ц8ÑŽ8 8ц8ÑŽ- -ц8Ñ8 8ц8Ñ- -ч8а8 8ч8а- -ч8б8 8ч8б- -ч8в8 8ч8в- -ч8г8 8ч8г- -ч8д8 8ч8д- -ч8е8 8ч8е- -ч8Ñ‘8 8ч8Ñ‘- -ч8ж8 8ч8ж- -ч8з8 8ч8з- -ч8и8 8ч8и- -ч8й8 8ч8й- -ч8к8 8ч8к- -ч8л8 8ч8л- -ч8м8 8ч8м- -ч8н8 8ч8н- -ч8о8 8ч8о- -ч8п8 8ч8п- -ч8Ñ€8 8ч8Ñ€- -ч8Ñ8 8ч8Ñ- -ч8Ñ‚8 8ч8Ñ‚- -ч8у8 8ч8у- -ч8Ñ„8 8ч8Ñ„- -ч8Ñ…8 8ч8Ñ…- -ч8ц8 8ч8ц- -ч8ч8 8ч8ч- -ч8ш8 8ч8ш- -ч8щ8 8ч8щ- -ч8ÑŠ8 8ч8ÑŠ- -ч8Ñ‹8 8ч8Ñ‹- -ч8ÑŒ8 8ч8ÑŒ- -ч8Ñ8 8ч8Ñ- -ч8ÑŽ8 8ч8ÑŽ- -ч8Ñ8 8ч8Ñ- -ш8а8 8ш8а- -ш8б8 8ш8б- -ш8в8 8ш8в- -ш8г8 8ш8г- -ш8д8 8ш8д- -ш8е8 8ш8е- -ш8Ñ‘8 8ш8Ñ‘- -ш8ж8 8ш8ж- -ш8з8 8ш8з- -ш8и8 8ш8и- -ш8й8 8ш8й- -ш8к8 8ш8к- -ш8л8 8ш8л- -ш8м8 8ш8м- -ш8н8 8ш8н- -ш8о8 8ш8о- -ш8п8 8ш8п- -ш8Ñ€8 8ш8Ñ€- -ш8Ñ8 8ш8Ñ- -ш8Ñ‚8 8ш8Ñ‚- -ш8у8 8ш8у- -ш8Ñ„8 8ш8Ñ„- -ш8Ñ…8 8ш8Ñ…- -ш8ц8 8ш8ц- -ш8ч8 8ш8ч- -ш8ш8 8ш8ш- -ш8щ8 8ш8щ- -ш8ÑŠ8 8ш8ÑŠ- -ш8Ñ‹8 8ш8Ñ‹- -ш8ÑŒ8 8ш8ÑŒ- -ш8Ñ8 8ш8Ñ- -ш8ÑŽ8 8ш8ÑŽ- -ш8Ñ8 8ш8Ñ- -щ8а8 8щ8а- -щ8б8 8щ8б- -щ8в8 8щ8в- -щ8г8 8щ8г- -щ8д8 8щ8д- -щ8е8 8щ8е- -щ8Ñ‘8 8щ8Ñ‘- -щ8ж8 8щ8ж- -щ8з8 8щ8з- -щ8и8 8щ8и- -щ8й8 8щ8й- -щ8к8 8щ8к- -щ8л8 8щ8л- -щ8м8 8щ8м- -щ8н8 8щ8н- -щ8о8 8щ8о- -щ8п8 8щ8п- -щ8Ñ€8 8щ8Ñ€- -щ8Ñ8 8щ8Ñ- -щ8Ñ‚8 8щ8Ñ‚- -щ8у8 8щ8у- -щ8Ñ„8 8щ8Ñ„- -щ8Ñ…8 8щ8Ñ…- -щ8ц8 8щ8ц- -щ8ч8 8щ8ч- -щ8ш8 8щ8ш- -щ8щ8 8щ8щ- -щ8ÑŠ8 8щ8ÑŠ- -щ8Ñ‹8 8щ8Ñ‹- -щ8ÑŒ8 8щ8ÑŒ- -щ8Ñ8 8щ8Ñ- -щ8ÑŽ8 8щ8ÑŽ- -щ8Ñ8 8щ8Ñ- -ÑŠ8а8 8ÑŠ8а- -ÑŠ8б8 8ÑŠ8б- -ÑŠ8в8 8ÑŠ8в- -ÑŠ8г8 8ÑŠ8г- -ÑŠ8д8 8ÑŠ8д- -ÑŠ8е8 8ÑŠ8е- -ÑŠ8Ñ‘8 8ÑŠ8Ñ‘- -ÑŠ8ж8 8ÑŠ8ж- -ÑŠ8з8 8ÑŠ8з- -ÑŠ8и8 8ÑŠ8и- -ÑŠ8й8 8ÑŠ8й- -ÑŠ8к8 8ÑŠ8к- -ÑŠ8л8 8ÑŠ8л- -ÑŠ8м8 8ÑŠ8м- -ÑŠ8н8 8ÑŠ8н- -ÑŠ8о8 8ÑŠ8о- -ÑŠ8п8 8ÑŠ8п- -ÑŠ8Ñ€8 8ÑŠ8Ñ€- -ÑŠ8Ñ8 8ÑŠ8Ñ- -ÑŠ8Ñ‚8 8ÑŠ8Ñ‚- -ÑŠ8у8 8ÑŠ8у- -ÑŠ8Ñ„8 8ÑŠ8Ñ„- -ÑŠ8Ñ…8 8ÑŠ8Ñ…- -ÑŠ8ц8 8ÑŠ8ц- -ÑŠ8ч8 8ÑŠ8ч- -ÑŠ8ш8 8ÑŠ8ш- -ÑŠ8щ8 8ÑŠ8щ- -ÑŠ8ÑŠ8 8ÑŠ8ÑŠ- -ÑŠ8Ñ‹8 8ÑŠ8Ñ‹- -ÑŠ8ÑŒ8 8ÑŠ8ÑŒ- -ÑŠ8Ñ8 8ÑŠ8Ñ- -ÑŠ8ÑŽ8 8ÑŠ8ÑŽ- -ÑŠ8Ñ8 8ÑŠ8Ñ- -Ñ‹8а8 8Ñ‹8а- -Ñ‹8б8 8Ñ‹8б- -Ñ‹8в8 8Ñ‹8в- -Ñ‹8г8 8Ñ‹8г- -Ñ‹8д8 8Ñ‹8д- -Ñ‹8е8 8Ñ‹8е- -Ñ‹8Ñ‘8 8Ñ‹8Ñ‘- -Ñ‹8ж8 8Ñ‹8ж- -Ñ‹8з8 8Ñ‹8з- -Ñ‹8и8 8Ñ‹8и- -Ñ‹8й8 8Ñ‹8й- -Ñ‹8к8 8Ñ‹8к- -Ñ‹8л8 8Ñ‹8л- -Ñ‹8м8 8Ñ‹8м- -Ñ‹8н8 8Ñ‹8н- -Ñ‹8о8 8Ñ‹8о- -Ñ‹8п8 8Ñ‹8п- -Ñ‹8Ñ€8 8Ñ‹8Ñ€- -Ñ‹8Ñ8 8Ñ‹8Ñ- -Ñ‹8Ñ‚8 8Ñ‹8Ñ‚- -Ñ‹8у8 8Ñ‹8у- -Ñ‹8Ñ„8 8Ñ‹8Ñ„- -Ñ‹8Ñ…8 8Ñ‹8Ñ…- -Ñ‹8ц8 8Ñ‹8ц- -Ñ‹8ч8 8Ñ‹8ч- -Ñ‹8ш8 8Ñ‹8ш- -Ñ‹8щ8 8Ñ‹8щ- -Ñ‹8ÑŠ8 8Ñ‹8ÑŠ- -Ñ‹8Ñ‹8 8Ñ‹8Ñ‹- -Ñ‹8ÑŒ8 8Ñ‹8ÑŒ- -Ñ‹8Ñ8 8Ñ‹8Ñ- -Ñ‹8ÑŽ8 8Ñ‹8ÑŽ- -Ñ‹8Ñ8 8Ñ‹8Ñ- -ÑŒ8а8 8ÑŒ8а- -ÑŒ8б8 8ÑŒ8б- -ÑŒ8в8 8ÑŒ8в- -ÑŒ8г8 8ÑŒ8г- -ÑŒ8д8 8ÑŒ8д- -ÑŒ8е8 8ÑŒ8е- -ÑŒ8Ñ‘8 8ÑŒ8Ñ‘- -ÑŒ8ж8 8ÑŒ8ж- -ÑŒ8з8 8ÑŒ8з- -ÑŒ8и8 8ÑŒ8и- -ÑŒ8й8 8ÑŒ8й- -ÑŒ8к8 8ÑŒ8к- -ÑŒ8л8 8ÑŒ8л- -ÑŒ8м8 8ÑŒ8м- -ÑŒ8н8 8ÑŒ8н- -ÑŒ8о8 8ÑŒ8о- -ÑŒ8п8 8ÑŒ8п- -ÑŒ8Ñ€8 8ÑŒ8Ñ€- -ÑŒ8Ñ8 8ÑŒ8Ñ- -ÑŒ8Ñ‚8 8ÑŒ8Ñ‚- -ÑŒ8у8 8ÑŒ8у- -ÑŒ8Ñ„8 8ÑŒ8Ñ„- -ÑŒ8Ñ…8 8ÑŒ8Ñ…- -ÑŒ8ц8 8ÑŒ8ц- -ÑŒ8ч8 8ÑŒ8ч- -ÑŒ8ш8 8ÑŒ8ш- -ÑŒ8щ8 8ÑŒ8щ- -ÑŒ8ÑŠ8 8ÑŒ8ÑŠ- -ÑŒ8Ñ‹8 8ÑŒ8Ñ‹- -ÑŒ8ÑŒ8 8ÑŒ8ÑŒ- -ÑŒ8Ñ8 8ÑŒ8Ñ- -ÑŒ8ÑŽ8 8ÑŒ8ÑŽ- -ÑŒ8Ñ8 8ÑŒ8Ñ- -Ñ8а8 8Ñ8а- -Ñ8б8 8Ñ8б- -Ñ8в8 8Ñ8в- -Ñ8г8 8Ñ8г- -Ñ8д8 8Ñ8д- -Ñ8е8 8Ñ8е- -Ñ8Ñ‘8 8Ñ8Ñ‘- -Ñ8ж8 8Ñ8ж- -Ñ8з8 8Ñ8з- -Ñ8и8 8Ñ8и- -Ñ8й8 8Ñ8й- -Ñ8к8 8Ñ8к- -Ñ8л8 8Ñ8л- -Ñ8м8 8Ñ8м- -Ñ8н8 8Ñ8н- -Ñ8о8 8Ñ8о- -Ñ8п8 8Ñ8п- -Ñ8Ñ€8 8Ñ8Ñ€- -Ñ8Ñ8 8Ñ8Ñ- -Ñ8Ñ‚8 8Ñ8Ñ‚- -Ñ8у8 8Ñ8у- -Ñ8Ñ„8 8Ñ8Ñ„- -Ñ8Ñ…8 8Ñ8Ñ…- -Ñ8ц8 8Ñ8ц- -Ñ8ч8 8Ñ8ч- -Ñ8ш8 8Ñ8ш- -Ñ8щ8 8Ñ8щ- -Ñ8ÑŠ8 8Ñ8ÑŠ- -Ñ8Ñ‹8 8Ñ8Ñ‹- -Ñ8ÑŒ8 8Ñ8ÑŒ- -Ñ8Ñ8 8Ñ8Ñ- -Ñ8ÑŽ8 8Ñ8ÑŽ- -Ñ8Ñ8 8Ñ8Ñ- -ÑŽ8а8 8ÑŽ8а- -ÑŽ8б8 8ÑŽ8б- -ÑŽ8в8 8ÑŽ8в- -ÑŽ8г8 8ÑŽ8г- -ÑŽ8д8 8ÑŽ8д- -ÑŽ8е8 8ÑŽ8е- -ÑŽ8Ñ‘8 8ÑŽ8Ñ‘- -ÑŽ8ж8 8ÑŽ8ж- -ÑŽ8з8 8ÑŽ8з- -ÑŽ8и8 8ÑŽ8и- -ÑŽ8й8 8ÑŽ8й- -ÑŽ8к8 8ÑŽ8к- -ÑŽ8л8 8ÑŽ8л- -ÑŽ8м8 8ÑŽ8м- -ÑŽ8н8 8ÑŽ8н- -ÑŽ8о8 8ÑŽ8о- -ÑŽ8п8 8ÑŽ8п- -ÑŽ8Ñ€8 8ÑŽ8Ñ€- -ÑŽ8Ñ8 8ÑŽ8Ñ- -ÑŽ8Ñ‚8 8ÑŽ8Ñ‚- -ÑŽ8у8 8ÑŽ8у- -ÑŽ8Ñ„8 8ÑŽ8Ñ„- -ÑŽ8Ñ…8 8ÑŽ8Ñ…- -ÑŽ8ц8 8ÑŽ8ц- -ÑŽ8ч8 8ÑŽ8ч- -ÑŽ8ш8 8ÑŽ8ш- -ÑŽ8щ8 8ÑŽ8щ- -ÑŽ8ÑŠ8 8ÑŽ8ÑŠ- -ÑŽ8Ñ‹8 8ÑŽ8Ñ‹- -ÑŽ8ÑŒ8 8ÑŽ8ÑŒ- -ÑŽ8Ñ8 8ÑŽ8Ñ- -ÑŽ8ÑŽ8 8ÑŽ8ÑŽ- -ÑŽ8Ñ8 8ÑŽ8Ñ- -Ñ8а8 8Ñ8а- -Ñ8б8 8Ñ8б- -Ñ8в8 8Ñ8в- -Ñ8г8 8Ñ8г- -Ñ8д8 8Ñ8д- -Ñ8е8 8Ñ8е- -Ñ8Ñ‘8 8Ñ8Ñ‘- -Ñ8ж8 8Ñ8ж- -Ñ8з8 8Ñ8з- -Ñ8и8 8Ñ8и- -Ñ8й8 8Ñ8й- -Ñ8к8 8Ñ8к- -Ñ8л8 8Ñ8л- -Ñ8м8 8Ñ8м- -Ñ8н8 8Ñ8н- -Ñ8о8 8Ñ8о- -Ñ8п8 8Ñ8п- -Ñ8Ñ€8 8Ñ8Ñ€- -Ñ8Ñ8 8Ñ8Ñ- -Ñ8Ñ‚8 8Ñ8Ñ‚- -Ñ8у8 8Ñ8у- -Ñ8Ñ„8 8Ñ8Ñ„- -Ñ8Ñ…8 8Ñ8Ñ…- -Ñ8ц8 8Ñ8ц- -Ñ8ч8 8Ñ8ч- -Ñ8ш8 8Ñ8ш- -Ñ8щ8 8Ñ8щ- -Ñ8ÑŠ8 8Ñ8ÑŠ- -Ñ8Ñ‹8 8Ñ8Ñ‹- -Ñ8ÑŒ8 8Ñ8ÑŒ- -Ñ8Ñ8 8Ñ8Ñ- -Ñ8ÑŽ8 8Ñ8ÑŽ- -Ñ8Ñ8 8Ñ8Ñ-", + ["compression"]="zlib", + ["data"]="xÚU\9rã°’m·¢\13\8Åø1Óï\15âü_3û×&9.)õ˜\18Š\127Ï™…¹Ò”~Ú2=mé‡\27‚?ÿÚ\127\127Cü^ÆoÔÿr?‘„~®Ôƒ<”‚2:\20ÒLÁU…1ˆM$|M] žã_#q(’J)†sß\7§\30~lª‡\31„áù’p˜IÊçCéµ\22§ÿ2ý¸Ä‚óÓï¥ÊØâì{ÇeÜ©KÙl¿×Ão+i7¼%½\16ÉÃË™Jxº`º\127ÅŸ\8†›\13oåo\18{\11ó(z‰+çº\19\0* ‹ßë$®ÿ½\6±ßzÌ’ôB\26\30@?×ÇÅÃ\15L>\6áïlø…Èù \15exǵ8¼µ‡ärH\18\15dq/¡ûu¾‹[ÄcZÞ\14Â2€\0179q©\2Ž†×\16y\28hŽw<—T\14¥oaöïIŒ)rÜð¤Ã“Ýg\ +2\7¹ƒx¤(±(jÇ4ÎâÚ7_ÿë\21=Ô&åvíç^\15¹Ñý×ß×8ÞFÆ\23~ ýpo^a\0006ü\9¶ÿN\6ðÿÆKQ8\\ìp7\9˜îqt\15\6†çœDúû\0¬®¹Ï\2Ô\9Š`x“:;¼²¨HñB\5ã¿\\Çó¡ÒtÄ”ŠQõ$¦sL7P7‰¼<â¥û>ñ&'®šKÝk8«sÃuú­8Þê·‡$Ã#N¢Ö?}Ñ\16\4ÈJõ\28T†Ã2\ +\31¹vþžÃ³üŠë”\15ÇFN|ËN¿òÔ¯\14õG±™žs\22¡ï”\13\20ø\9¥€:~5\0270R8U•©•À°;Z°Gä÷z>¼gÝIï`8ù}\26ðŽßÔ/ä3ò\18Ræß\26$ß?÷{\28^ô\"¥ö³\12zìÏ÷anñ1S\24ú ÿWHŽ×PûìsFLf\24\ +åYÒð@SU¥ø­\"jü÷y”kÉÓáÁ\23©.›úDTˆ©„y”¸îQê=\6Ÿj8†’†À±Ÿð_5w6è@ŠÜÌô\"\6yÐKK¢ç¼´*e¤r.¦À\0277\25^)1f&n[qN×ÔqûÂiürjéZ¤ïÃ\20\16ëï“\30`(€ï#1Wݳ1‡Í¿û\31Žs]3\11]>äWGK£x§è|E'½l1Ú\23]d2ZSa:Ú©oçº1œ\26ZÄH\19)\\w”&ZPÅO¥ÊüfÛ)aT.ÓFƒ¼Nq¡^¸A¨$=Z;¼åY\8Êg7\19àj\28úŽ\29ØunÈÔˆ&ÅÑ«Z\13¯0ž·÷}úh+\31¾&tû÷~æÂégI\3ôQ]rQ´\2j^)C\17\14m‘²!¥\24o)\8\26\18Qg©\19\17L©¹–;Ý5®zY'Mª ê²\4ÕI†e*í÷bjt‡ã\19á•ð\22Oi­\27á<^<ÒТ*\127Ë¡ðKÁ¨ÂB¯©õ‚ôhŒ£íh\127äáÆcïÉy\12yxaÎÈp£ï³ŸvH\127\24î“ýÈ®\ +Cš3O¹\26ž:r²R‡)„P\13*áUÒØ)g3¥³Ú˜¢I݇sšPobQ9kœçU\0qpŠxiÊ¥e®÷3¯fä\127EA¦§˜‡¤Bµtãì•ÐÇj\15¢E¨\15ég^Ï£»Ç\25ëûh&ý\ +£Bo\9÷Ný½—\30\27Ò\\¦ºj35\30[7XC›Ú¸B~oç\20å^Ųwu\27B¥ß[?D‘ëø262ß\23ÿøÕE\31wÐë¼¹’Fø ,†\23s4÷¨‡÷ˆ—¤n„Å\12IÊê\30U­ˆ’¸KM]§¤ê\20Š@§v½¹»ü\22Ã\3ÜÕÜߧþÇ÷i\22ásæS™2’O¢\7;‰Z¢z”Z¤hÌHFò™s–GSòw†ø?ÿ²?\18£oP\0189ç.jtRJ\0305õ^BTïqŒôƒäT™_réß\31TåLáQáKO9h×h=HSºŒËx\0033\29¿ˆ×\15–ñê4dpdGb5Œ÷±¥Gj“䌗Q\23Tbe(\ +e*å¡V¡º-RfBŒ²r\3årql›\18äzB7ƒ-§\25A¤\20A“ºgk¿6‹/=X4V3G•\1’\15Ò_õ\16–òQªÇt¡3¸ËÍ‘Ê–‹Ã]\3—côÓ£Âߣexºhºij’î©u“PÏôF$\14U{ÉõÍÌlu´{Žä¦1\6™ú\"+ỚœtsõëuEHdO}X^wgdºñÍtî8KºÍ\\\31¾\23Q.óȳ^³š\"%\15A\29\29•®¢UHRnª`ŽTéHá-S\\¨¯\9)\7\24ÒõR!þ­¡h‰•Ö¾»Ý{R5IgÓ\15å¡Ã$•~!Y''yxëÜ´s§Q7SÇÔ|Œ)¢AòSvÔ9•†\30Âå=×ñ06\25â|îÌÕêùă˜ªÔ\6ÞÝ\17¿Ïy5ßË?üöÊ\23Æ;\12bKC̦üÈÆ÷Q\13r•¶æ½\15PÄÿ£Jû\20לܿ\12A¹þ>%Ý\17RtI\"©Xþ>\1278àWÎBcPþÖb³hcîIUÇUñzÔ:>¤’•ìáîä#uæ%\14¸\15ãù0Y„(-ýpWË'ÑÄ\15ú2\\<\29¥ïíÔ¿0<³Ã(Ї´õƒæ9BAûP÷ý1Õ¯Åø \19d\15élwø•Ï|Ìæ \127‡A\7ir‚´…T¸¥Ú×\21³1õûCQÃàqªŸÎ\7b2Ÿ‹R\127ü¨l‹\30èëô*Œ\31N¹#¼\18ª|gÒ\15\15†#\17f„\31|hä>g„z†\9cJ—/Æ\31$?iHSJ½T\9Kš»Ï¬óOç¯t˦ÂU\19ü »ž¤½R©\19\28aX5Z½‡¡®­ç¾…x}ðø\26¿Ä\24u¨2k\21Žè~H¹G:kü;Qja‘ª$)¿–ö*ou'\20Sš\9È\31VÛwb|¦QÞ\27\13v†ßn‡Ñÿc.\27C¤oÓ\27kÄ£3ßë\15gXæžQÔ£ÇéÓDCö\24€ýXè\\-:0eª{¨Yz¸yTúÌEÜ1¾Ô¥ù(½¢-xhÈ\19&\ +_Zq³Æw\13Ó‡Í\5\11uHÃ\\°B°tÖÙ‘<5\7Í(µœ\17Iî_ý©v4\3úÙPªºfkô‡\6d;ƒNjî\0179\9\3ÄâÃÁ¿¿Q#R;1Ã~¡\31ŸÒ\12¤H©1]✇\26ÿHgsµ\28\15\20úÔ·,E¶úÌ®ò’ªQª“d¼¬÷Ó\29\3Ô\8Ô‡qœš„‡Ç*¤§\31ª Ñz/‡Z\21¯Z-hèœ?é \28Eµ\4\15\30-p³U¼¡hà#Äù ¦5†\16$Ñ/\127ú8TâL'J‡~q‘¢WH¥Ug}ÊÕCç[ ÅYu¸\31ß›™î·\29\0306n³ó㨪\12?­ü\29†rÒ‹‰ú#4H\\\17(\127ŸæV5!\20¢äûdÒ†\24\30è4\16@Äa¦&ÿ\17ÍGÜãlÝ\25cªábðÛVƒ¢3Öp\26½è•^¦\14\29\29‘ƒj~j”ñ¤¡xÒ<Ì$΄ûs\30jâ©ný\16º\5\8…\21WÇ`ç¯2 1z;Ï\0+þGƒ)Z®¨Î7ËéÁ>5\30VS2Ü ºŒOg$:Êa<›Ù|õ\12½)KâžH6!Ëȶ4iOw\31¬\22•ãéÞìC\25\15é=J\13Qk |\30å\27)r®¶©$Εé†QäsD©]Yñ>T\0305\0227\"õú-=G)\31¥w’TÁŸ2ô\\ý\0242óÄo|ß\\0’*—T£S-f©Õï·.~éXTÔÏÉ`ÜRÔ[¥+]š!¸WªØŽ\18\8©L’®\9u\28¥”ª[LŒØž÷\28§K|¶àdÁI›Î%•ÿ\\â[wïR\26\15wŸt\6ÿ&1ÝT:¡\30åv”zµlÏPª¤áõŠó{5ˆÏ±¤û‰0Š²²ÖÌGé$•…\21äs”ª$…Eì\25jÏM\27Ñå˜@*\18i<«\2´´Gêü\14%\21c\\5Jí(õIÒSYÚ(\19Ñ•åì F\30~¶PŽŠ¹¥‡\27¤:IÆz.]÷üãÁŒÂ»ãã'B7>\28*ã¡\18é ?\13¾\21Þs®>ñ\16\\&Ñä\14·ÊÔ[\"< å(Ö\12“a,È~fä$K’\1û\30\17ÑÀg¶ï«IWÝÍÜ=\ +ÓxF×)I­ž.\27»O)±t$âð€™{\12Ž©¹BŠ)sW àçÜÎë¼Ú­Ì†Š!U6S’|âÙÀ»†‘ˆê«Çlác”ôÈa¡°iÂrÔ¯ìÇ\ +áÈ\24¢Dd™â¦âÞ·iøAÔhDi®1$Ù>%¹\15À\29£\11òpôRæSGºÀsw;âÁ^C•Ð´‚:\18!Íe<\29ΤÒ+üÌʵz¾ÊB)Ëü\24Wÿ:_û\21§¾1Òr’f-î¾¾›§’LÖ\12ÅNI9UÕö]ÕT*]O̯êXh\27êK§Ö­!#\13Õ±£´;£å„½ÛH%ìmëÎd·E4KïmjííúÏû,q\20\26ÜZ\15\"WÔêü jšRµïâõR\17Cþœ0Ç>TÒ¹G¡\\6ôÁÀ¾S'ªJ±n_%\31ÕÏÉüãj\18²)V\24lHêÑ1Å\17\19Z\17?ã \29“·*ƒ±å°\20õnŒÝŽÒ~Â\29BIgÌ\21d¶³ÜÅžÄ\23¿jÅ1KS\18Y2ÈL9ÛÈXäHaÑ\0223\14dŠãÆ-I<'Í\13cn€Ü”Ä•ê¢d©\17#®O’žÈóØš„g–Ñ\26TRžÊªóÃÌS;Çù÷(Õ£4ÞgÌŽZ<$\ +L˜éÙ\6é9J¯ñl“¤ñŠ“\31E-¤s5ˆONÞüLC\19yO'o:)\9=amêÉø\8çƒN6ÊêugRÚ‘³¹\8ð¨C§×*\19M\19¨\18‡Ô9qoÅ\19\3u\20QˆÄa®ÉÔôÊ(4SRšÊH°åô^oM3\9®ÎÑ9ÈPqóïúÙ÷θ\15áÉYÛûüAºNFëL5*,\0293 sѹ´lî²î\24¢2†ûŸ4”–D6NÜTc—Lc\23\29ª#‘1bÉ\24±džoÉäÁ£Áq6ãÄ\16«G˜Ëð¥P\26]\18\19/9ÍPδ¤Ê)ÿéRDýŠ\127ªšº!fä×ðõ\7âzæ×­\26¨™\3GÛ\2ù™$\1Aý\19»¶VÆ,@>zFäS.Íé~Æ»°\3ñ©Â±[“£\\‰s©ØÕ˜§x)\3\127†0ó˜->W¦Æ.§˜ä–¥Ë\ +;¡$Ù…\21}2[‰#\18æ粧*¦p2Í}9'ÉÍ‚^§² 9\6ƒ©ÓýxšŸ›Z\17#\29&¶Ï.Œ\\1e\0184‰U\18ÓR\1\23Rf\6²j’/3J¬\25ÎÈ45_¥Š/‡„Œ°!Ô\29ËP£u’Ü\17TçY¥Si\\\"Éjb*\19ÓÌB‡àç­f©æWÌñXò´ŸÒ¹J©“–c5È#˜§ôµ0ˆ±bŽcGÎØ>·ãF<`í\18l¢\23›ÍF±²Ô¡\2ÂùNµÇSTŽ9©Hç©ò%§Çhý\24-\24¶Òñ>qÑå®þ\29\5Ò1\0313±O™g[sÁAôÔM{î>æ_Îf~ê΃bŒ\29ïîJ%çèã„\0065÷\30Ñçøôä2âû©B®,7§!_+\30\13\127|ô×ÉG\127\28\127\28\127\\^ÍñÓkó’:é¹Ü_\29V3º3ß\27\25œ\21½™ùel\18ÞC¿éïÌ%³µ\31“.ÙÎñ\2³\\Ó€F\26·´–zRÿÜ-q\23’\28ƒ\"—;½]&\27r¼zòt¥½Öƒ%¿A˾ìµS')žï…¥í…SÕ$º\18öc|ɺ÷²-ëe7Ö—»/s\11µ#4bxi–÷¥\1LPciÈ®­\12\26™¦NŒººÄöH1óÂ7õ¥\1¹Âð¬Ðe\26»G‚á©æÑíqær7‰/k\12âl\11³ÔÍHÖ“L\26û•Ú`Mòñd/uÏ_Ü\\T(­æVåK;¼‹…ÏT\14\\\4C\13\ +;˜†_\18:Ÿmãj\25~tØiðþ’6\15GÄ\23½Y…nÆ_ä³—µè5cVñÅY™U_šMzyhö\26\6cñ_Ž\3/æy^Œš†\30àp—BNÔ\14ï\14gã\16 u\24ƒ±0;z \16IŸ\127ˆ‘V.ðñMR4[Å3Ä(­bl\2¼rAv8e\21¡HµÅÅèvPȵR¡=È•*ÄlŒ|Y\"9³P\5¡’¯‚ºÂë5\20n\9í½Y°ø@Q7gT˜Ëó8Ô°ûÈúyêjô&ˆö¯ÀßÁ\14Ê9çd|)Ðb…gÕ‰P»[¤¡[ÁÐÞ¸\12\25…]%âMÙŸQ\17²I\20\30yÅuOÝX\21°\24‡WÅ8¨R\\Ë\11Ì\\€ª\\ÑÔ\0232ǽæúÈsšz+\24¤ u\26©\23±Ž!~LFÜBU½ˆ©-\5ò±\24~\3oáÂæÙb”ÔŸ*¼0¤÷%¡¬\11{«™¢0¹\14õÅ'÷~b½¸JÄ+te-T»\ +ÍÑE?¤pSv\31¥>IºóŒþJ.ðhÊcKÞKç6µHfßü—L™X;uTxYtÓ™\11¡ª˜;®…­µqwŒ²).z!Å,Ù}\ +›PÒÙRõ‹Îf‘Öè8JµÑ\30h>\22*Öâ…Xrßfª©\28yÉ~8Åz\6$’Ô\9)<Ó¦i\31ŒH:ûô…ŽWüܧ¦ò‘‰SK/R»U*<àÖõ¼ ð¬WµR\7c•~<\6Øäw§“ª­zæ\31Û_!Û_‘|Ü$\20ŽÁä'‰k“ñ¯Ñ/®¶§B!ïo…CÍ\21ÞDŒl{\26á—²íiÂèN8 Rji\12Çš;ô”S<\4R9aòÉU­ôâ…áý„³•5hÉr…@·Tdt*J­\29à>ýÄnZ\30^•¸Bp±åNrZ7f_.õI5Ÿµ™àóµõïaq-§<ÎÝ\3óRÍÍËg\30<ÆCãš2yW”\24ÌJ;ãè§\30¢¶ÄW&\3žn\24Ú§Äj§5.úa+™r´Ü•cÓRâA]zÞ\127>a©˜kA™ÖX\"ù÷žŒ…Ê\0253ÈÊŒü«J÷ôÞŽQ[^2c\23\ +¯\28gï$«__ÊÒ«Ð9ËÓÔk)\19\26¡™\23,ljÑá\25s\15óËÔ\5,=:ž:\25eöâ\14ÑÔ*[R!©ØŒÈ„†Ýlµê²›!¥üê×ÞÒj¥š´0ÑÌ\12H•l§¥:©€\\¹Ï I6ž¸Âw­Æ\18N¶¥¸\18ïòH¦Çœò\2ë)‹äJ­\19J\23ÔjFuž\1kiOë8\23=Ä™0hÈNc_œ\31Y­W‰|Öï6~Ì´\12¨Œ9¤ž\\x>°ÄCºÔ\26A?~ëyð\16^>Ž%:s׶Öíýx ñ]鮡»\26Ñ* ¡3-¯‘6„Ÿ\0193\21õÉ-l‡ô gÈ™&’’uJ1Å\4\3\0025Ë­¨¥zLÕª\24ÔÕ\17Ú¶B¯,UçV¥\28ýÁËÑ\31¼L^à5ç—~/nÞþ’Ôž©åØæYr¹ta‘ž‘p›òª\9ÈÒJ*™ˆÊ\31\19‘fÄaì_?w-Ö²ÕJ¸*ɯ\5L1÷\27c×µEWâ!|9\25kU-k\4\19Rêšx\20]¿4Ç2ñ\21wÂlb+¬î I²è˜%Q*\1YºÒr>Jå(ù\21H–M_w¯Ç[`I-½Ò)ÝÀª?­yR6»ôñlotÊ\27òÞøŽN¤£\13Ô[Ö¯x¨÷lDC\23´DE«\29BôzÞ¼žwÔ»\8Ã\0037ú{ïïÛ‡j´®TXW¤ª*÷S*\28a*ÏDÊ\25 \26\29ôÌQ¬;ª~\22uT\26›`\"üHÂxªVgß5Ça˽=ulÑ/*êÔb®\18rë\20…V%·\127]\29ï@18\31S\13+-\30›Ž©b\16T©ïâ\22-€°¤[Z\26J­\2ój†Ë”bÔ´Tšý®¦.—ðõÒ½sÊãUUã²®j\\$Pý8òWÉ‘ß·)ty©âÆ\31¾’Æ\\êõ”˜Ä«_>òº@sy•-ÎÕÐ.©Ó«µ½\127?F\9/P/ùUtZû›V\1\127Œb«‡¬5…Â%åx§\18\28j;\3VÉp›N«øæ£\27½Äð\30µõQ\0058\31׬ZôK¬½ÔXÆ/yÉë¬úç•š}Ÿ¹rY,‰¡ç1\ +¾gHr\11¨|<•Ckå‰é×Ä’Gø•f¨‰Ty'W$—m§…\30\ +åƒR¹é­~F¶\21~t•§o\\ööóôϦٙJ³3& í# \27©Õ\13HF\7¨²c}3Æõ¤Z¹‚thÁJ\14Aº%\9\14ÊC8üTrø©ÆÕ¾\21kt‘f6Åûü ð¦^ä­Z1M+ ªÑ_^'µ!\3‘ªkÓä*ãNŸ¸·t'ÆþÕèù‰Ô%I\0151zÄT£ç'Òxçaè\27YÀ]³²Çg¦§Œc·×R°Uò\7¬\"|¦\8?Ìr|@ûßG\",®Äu\\¨Êá©\\&FÕ4rët\30rVã ª\26'f\29\23Z9îc¥¡ÖµrëZ\9Á¸êÃqn\5*¬M•VrU\26=Ë\ +Yi°¬™\20kÔX…+ƒre— jt¸©ÔeF9{íy\21\30Ã\11ç0yãTÉ\7Ç©V¤Z¹ºi–š\31ü¾\14gçHG•úÆϳéh磽\14\14ºè¬gˆ!0º¨ö\14\24“\16’+`6„¨GÿÙší\29¼”³vÛRÿXkëÔ¸ÄzG©îZÛ\26èiÕú˜Œ~ð\31>;zËԿƾõ¸NüW|þK&[ãœg:\17/f”oÈ~kqÏXO©ëä\19 i“27Õ\"²\26ó[\29,Öl\4QÓ»ªitj\6ʵ›\28-ï|z,WÓ°Ôr\0\27Í=J‘z×ļ½Öëä/£ÇÍí}O‚,4_=ÎbÖ¸ZD\5·ìçɽ|о£6±ÑLÔiúy•ä™\0318§ÛRNxµf\1Ý…±ÛÀ,åCîí9òÊï\"\31\27ðÚK2\21µ¥”ܱPϬÆ\"2¡q\26Î Eo­¶á@?TŽSýµ†Ý~8\15d'nÆšèpÖ£[n=6º5ï‰F«¥-}õ¸\26¹NvaÇy9M­Þ$W;ïºZ…áÉžz6\22í”·ª\1níÕæ‘)~R…ÝÊ” '×SàNëÝ5îvx­=9l(Yº0'¶så±?£EÖ\23pl¼\5¯·“iŸ‹ª©—ÍpÄ/ÓÁàGzw j5H.×dAõà~Á\13P@õ8Šdƒ\24ò5V&»³ê¬ÒØ\16QÇÛPÍ‘îª5~ªCSº\"áÛ_ÛGÑÇÙ\31ó$%ê(9¸JªÆsŽa7\14Ý gâ´¦é©\127TdZÒRk¾]Ù‹y›\26\6âuî”˽\18Ÿ>\28ø%\14á‹ðMØ8üÞ~Lþ\4\ +RšÃ¸,‹â•µÑaI|EØ\16¶œï|¬ªÇRpl\15ÈV>šgs¸#Ü“æ¨\14¥§S§Ô\30\1X\"\0317©Òá|ãŽb¥mãÕÃ\23f´Hš\3D:K¢\1Q\\eÛe3ÕÀ¤VìOƒÑx«\7G‡ÇAh»&íÊqOrJ«\26\18×lfÎNŒ$\21¿\31.ü\19¿\18×hïƒ8áE\21:öúÒFûr…\17K‹ˆ\26kze/›Èœ\21™›x?³Ü\15+©ž¤=Î<\12jpzs4\21®Iƒ\14g7÷Ö\26ñk/ÿF¡[£,ó‰wJ+\4s#Óa6Ƶ¤¬|/ì~%±êì³ÏZÿ\19KZ\26\0ýdå’²·q#MØŒÛ3 )©8o¼â¶a\"¶a\0156\23U«iÃ?Ší¸¼•{vŠ{R“Î\14Y\".Ì\11×æ®øhFš´\26¬ñ:ÜZùí\\\20iõm3®¹mF½i‰³\24\\HYÍI ‰,߯%ì}¥•eƒ²ôæ\21¸Í¸B¬\25U¨.¶ÏIcCœ\6ÊúÓ3·n\13†…&­\2\8ƒ§×\0304Xm›äNÞHÝ5L\26(Œß\19Jo=C±(q<šW˜:ýzÊÖ\4ÍèêÝŒÞÝMÒ¢‘M\28‡[Šo´Uän‚\17V\5e©FÒØ\3é Ôòðn†®©ruÔ»\15:U¨§´¤1ZcÝÚx“Œ\8ÔÞ61Èh¼aAÜ&pÆ!tWYܵêËzœ.Y\13–·\13L¢æ¸%…ÏGÆé¨aÚwJ‹èÚ$M½¦¦Mûuµã\14ŠŠóÛk½¡ÒƒŒL½KSÉÁr† ÎIûÓ7Õ\6W\0306*V]÷Ö*Ší\13UÑZOYGÀÞ\13iº¸Åí õŒ^\27ê§U§R¿ôÄQuþ8Ÿ©\127ÑÚ_ZWfº\6O›ÖkÖ\"Er±lY¾%I\19.­Ö\22\21\\§\27³.åùK~ù\2ë­6­çø;a\7¯h­\20«Þ[ë}¿¦6fµ^Ô†ð°É.-€\0267|ô\29r+æÖ«šÚäæíS\25ÏŸ›†¥-\"‰÷\22\17–üÃL\5)æ5\27ïY¸PÜ'¯ÇÈv”º$ù}æšÓh\127\\‰ÚÑ•¨\29Ý\16çŽífä±³«+rJÚs#\\WZ»\13zŽ«\29×صx\ +’#&\3ZûÏÆÈè½'q¸J\25\12ÏŠôp†ÔËw×ü*¬¦¾¿®8ðãá\7#ž\\¡<\5¯\23ªŠ€›;ÍV›Ý2\\i\25¦´,ªj=\16\25~7œcÚ™³9\9™…NÇÉ4Þж›Q㻹;\22öCœØ»é/¡v%ëÒ&a_òð®:m4¦ŸŽ´ÍÄ[÷Ú<3Iûø†\19WÇ^ZÜUú³³«„\6ïìr’vûÒ\15%¹õíå\31Ùiƒ¥8÷˜Ð\3\13TìtU;¯\15f\4»qw™ƒ~$yJv㾯]Ú|¤#vå„ñF;ûËvv³ðöÃÞm\2±ü\17Ãj\25\9q\15êØ.#\4/0Ï'¬zŽ·ìÙj-ÒwÚp0ñOe^1Ö±ZŠüxípïlx}.åÆ™ ×k1¼ÉV7.ŸÝOÆ\29ÒüF~:\8£\\ü’«_ékÅÏY¬šâ5ÐëÒ\26Î\7?ÆV‡]Z#ø÷GŽZÔ˪:\28B:/\13ô}õ`¹†ƒÝl4Øut\7º¹×G)s^IUrAçP•¿\27\23`uv=ïˆÛé\22/{\16±XZ™²ÏìÝö‰Î›uO<ýí\14e—Lg\27ÎnÇ‹\14z‰ýÔæü^v¯¡;;Õÿ™Gà=+ýKßÇÛ;÷^Ìþw”:ÎU¬ÆÖAí§ÖúV=—\22‘NSbìý\22ÓÝÇÖ“ƒÝ(í‘VãY¹¹õò\9÷Æó}tš´a’ÅʦÁ^Ž¯•¥!iìnØÏÕ“êÃáë—ð~L\28\ +/\26âp:J¢Z›>Í,÷\30mµz„iúڋϧ'óÜð•ËJùêôÉ3É‘ÝG\18Ôëǽêz¦Ý’Ôø~î­÷LÂqöJ.å\2'ª\30ÂÒÁ/7®£\11Ô§í’zæ\15ú4\127Чùƒôý\21Ýg\0281IN㛊3i0Ù{\24P')ôO±Æ\0»ÿÍm’ÙèÖ—•3YÈ{5¬‹å—èùýÈšeë“\9ª\31çq{÷R^¾·;o’–\127|Í)½\27.? \22¿~œÙÕ\5[?–¦xÓÓ'GËQ¾ù²‹§ß{gÆ\21ÍR\31>Ns{\8ê'ìãT ý{ÏƸz”:->ÑÞ©j\14á<ÝE»ø¥¸ìG¬F©N\9õµ!dåÏþQ;ÇÙ`€Œ­X²»Kýè}ì\0153h\4Dö´¯\0i{¤Ðÿ3?¹1d•eÏ~£½¼µõ#sö£\"R~Z}Z,ÝOͳ:\4\19:¨)ÉÊoß\11Š#ð\22H=®Czçã¬Nÿ3—Ó«Çß{j¡\31J@Ëí÷:PM‘é³÷ôKï\29¢{Í2¨æÌb\30fj»ˆz\6\127g\ +=€¡\0191äž898Ùt\";£cçá$Õ¾¦\17,Q\23 ¦\0,½ä%eY‹\5“¬\"°™ \24Æb —8§ÐR\0GÇ25ö©\31RKÂVîl¶îXû:^.[6çÉ¥ým\127EöHrw°¨\22Œó6¢»ûä\15Ox{ô)\5iÝk¢§à{\28GéJx#6PŒñ„žÜÛæOüYè\12¤¸.IÃ\13BRŸLÓ7CâÇçx×\"©ˆ-9ÿÞ\17û5ñ5árꉭ¨—Þ|¡$U©rœ±½¡Ï)\3\15”]æÈÀPŠ*\18\27\5¼ž\28¡·ñ¤<¦ºJ;6\0149×¢]\ +-\17ãýü|Ê…ìíÜ\28\0193HÞ\16Šåa†r67\29eýh>š®Ò‘:¡ÞðhæŸe6(IúÁ4/ä~pá×›æ…\16ÕòLÓA—¢Sv¼Ó¸\21âP\15Ó\30\21€›ËeÝ*Ô\8ÌÙmÍ7òŒõsNâ\22¶½°&iÔv”ú$Íøù©¿.‡è:àHŠ/4\127\127(Õ‰\23\13x\4è½2–éA=Mà\4Œ˜|ÙX\31»”~Ð=–V\0200•A÷\24FRsÁ¤Ý\8æò8\ +è^VCs6ŒLRkI“„Ã\5…¸+ü\2ÇÒ—yÓî´3ŽïþÙÒ\22Ú‰§'Ÿ~È2YÔœàù“ 3‡¥÷mó-\19\25ø±ÙC\23‚J{Bú>\13èITQx²™[MŸ–\18\21I\"ÎÓ}º”wâ©iÇh'Uïùê÷QºØ\31ÈvÞó\2\27?ÞÐjÕœÜ=»ÇMi*§¬IUpÑWÝ\\C+庢&W(KÍ‚é|íºX\27!yÎÅû©ÑfÎj\13g¤VqÄtöÐ[‘40ß!¥\7lm×pÞ:ÕmûÚYÏ%o;G?R:mð;Êj”8s±tLb§H=\6Fec±'\23s§ñ³uã‹ï¼‚\21)ü è8ü%_#i¾Ž½\9e:uÇB\22'FŪ¿Óq;\0256-”;ÿ¼Cn4Šd;C™Õ\127ä~”»ôC\29‘ý€ýXˆ¶î1±l}Íë•ÄT‚2taÊJZÁ2Z­K\26\2÷\24$t…L$›”0œ“‘®H«Ô°{Å\26q\30yêÀùðÚ5ÿöfšÚY\28ñý“[²aO\ +ïÄsHE«¥Vq[åªO-h*\20mͳ˜ü‰ ŠÎfƒ0Ú-Emz\30W´È\8u.L»qÑjâ¯kÙs„ƒz;\11\14\2ˆdÿ\9Éôúª&%êÔêKâ½`¦ñ%ÌN¥Ø|”JIê r«´q£\23íœõâ#ú\0226\20ïdÇf—¾9½Loùœ‰eŸ\127É07\27¿“âØzb3¥UDHsâ¦c{àÝîÌßè·©è­\27LI\29ÒÞZWm•÷Ɖ£­\30gk\5\20&\22wEdl™è‹eã«d\27\28Ÿu\23Cݼ™ö¼Ñ[’\18Ö\20©ÃF‰\14*¤ƒ+5s¦ƒpÔÿOeïäÒ:ùõœlAð>Ÿ\0051•õ\6´Š\25\11ç\20YSê³o£9W…¥£cÐ\28¡+¥·`Qx%ŒÞÌÐS'㎽úfW\3Ë\28«·àŽ\4±j4þ¹ûnIºL’T:R”ôrêmK¢ü$÷át!1\20~\18WÜÊ«,t\3Ùa,m¦Djx\25Y\8‡¥\8dK™M,j\11\9dí\19n[uÔô0^‡ÿ[œôSh\18H¾\4²N„3ÁLéÔx/éU.ý5ÉøȨ́3=¾—éj\5\ +É3'›)zì .ù€¬\5\ +\7çû°¸8RÍ×Ò›éM%ñqV'µÆ\\¦ÉA}\"Õ‰Qö¯§ú¡\12½Tò…\15ʉÇE¤c½{’zIvWÜPKäàÿ1±’V‡É\31«\25¡\9–ùØI€\4Q¯–iE¹Ï>øÍÊ‹©9_sV\29꥗\127ÝüJ䀒&\0ÚÉèÕ1Þ±£•Y²4JwT)zµÓ_ŸcÝ“f#RzoMá¹€¿\31z¯þÞB’²\9›˜©\14;²8Çm I9\9C‰éÚØ™/ÅéõÌ“-ËbéRt£K\\ôCØ\15í5I3\0201ÏœÎ;ßóä”ÎÄÅø\12¶w%±ñ\15¨.â®NÞô;\30³Ñò]/4Íy¶Æ#ååTíÚÒû\11Ü'H”ž\29\0069­½ž½f¹t=No·Åò¡¨Ÿ|‡Õl*3EüKÝ­åLv¼ˆ²¥Of„®:É\15#D\25μBZ}eŸw¿b9ö+–?}\5/¦¶‚ÂRåÓ{cžÚõeXy˜ÞQŽ¼Ý\\>Jõ(µIR'\13q7F\30FéŠTòn’5Õû×I?ô¿JË[Úù\18^½FQSm{¯Íï\5\12ó.qìÙ•°\"È:à8ï§dÑ\11dîŽt?Kê±1Æ×?òÐ\9Ki:½?éàñëÄÏ\15ÈÀ±ÄåΕÆ\19‹a\0ÕM]\9WZeÀzÂôT\9ô¼ƒ\"T\9h_wý´ì[ú(íG\18†\27Α®\\v¶öÕþ\14M’¬‚¤ý¼yt„ññÌ¿³Qì$Mýlko”ëä9q\26Ã.Óª)]\23+c,ÅÔ‘$:ãH½¯ 3’¼\11í5¹™sÒ\ +RÓ Q>î¾EÇÛ¬ïܨ{²Û\13wlü^rR\25ßë\26íÜâ;ÿZ“ý|°=.ÚûÓUŠ/©p^S\16¿»çåï}ËÃL×\28¨\0Ÿz<¾“—$ë’xUl%e¡ ´wéÅ\7\17¦\5\29jl¬œÎ<¥ûáKúßËñ•ªÿ½´?~ä÷âZ2S»\17‹¬\\iÌüÍ?}óxÇ«Õ\3_¥•\ +!ñY´‰fC£²¯Òª)Ÿ\6u5~(|\18zå/'ÝñÓ…ñH«´\7ƒ¥•º“+­\"ÐR]piñ™ÕõðzV|øp”ê$Is­lú]%WB$}à,ŦkÝþ­ÒÇä¼%ÉÂQê ­¼®LáMáÓÉB•­Ô\15_¥/NÅ*ÙRq|P¦S‡œ¸h¿Wì„\29¸è3ÓnQWì4î$µ°Ò|íÔ¯\"O\27·ÙÂsœøVÄ\11”\16óMÄ\5\1«´³îß$c'ý9è~REU_%‡|Çj[­A|¹\0ô3,¡•Í-qâ9/û*Ûºì6nF¨QþJÖF‡uºÉtl\0WÞ5é1IR6áëÛþyZDý\8‡Õ/K¢ä\13´¦%´q«X0\22¯Í»“Œb›Ä Èn(­/н`V\24×®\21²ÞÈ\20†ä¹î°‘4\9^!áÏØ8YǧX\1\24íø*Y\2}Zï\\ö>g¾K•©óè\3;‹\22´¹¥„àNKj¦cK9\29ÛÉéØJz£\0m\27AÜ8-Æ\24Å~\0205ˆXáÉãï\7®ÔL¯ô‰8×+{öo˜$·œ§TÔ*9ÏÚ£üNø |\18Æ=;2›¤–6,,n\127ï\26ò\"\11Ë)òy”\31®æ–õPéûdIÊFé'eae‚ÜNø‘žZŠ§\28Ñ´\1¸\14-ÇgÍ=á‰'ýLU5}\8-mctøɪtò*\13\26é><ü\27ešmZ¥\15¥%©™üá¢6e¨´ÖH_˜tÜ\\ubJßÃ¥Û\16¶“ÔáxŽwîü¡0üýß¾\11ß3J]’Å(iTµâ\3–‘#Y†\8ß ±Ô;´.O}\25z5üœ\\\7S\\“$ëo™Ñ\28ò\16ª(v$d½ÁG\18LSÚÓ#Iù(½F)\30o.©Jqé-\14Òq”n\19uÓ ¿bð²b\27è\5!³\20Õ$ÁJK÷Ã$*篧Š/¤—7»±mS\14é\17Ï\4Õjìá¬Ô¯q\11\25ý>*BH]’4V\11É¥³ñ]·bd烽Î\28Ôx’,Š0õ1V£o¦vþ;…s¶\14w\24MÃ÷y\6|gçÎË\29\7á¢'½FÜÕ“Ë+}\25Èá‹Ð$Ü4´Ž,å\17çNÁÚÛ0EWñîF|Í\7Ê‘ô\ +Öi®~Í\23iØ¢\\\21i-ƒ[äÌ[| ”‘J_\3óqç0´Çš©Ý5sükÚîuÚV8I¹$\12Õë´¹c\16Ì”ïÚ{\14G˜¾0›äNO•é÷¦¾Þ“³kæaÖš2ò1šeÍ®õDn'¦æ¥[xJfÍ\28áÚÛØöŠ)õ¿rΫ)éª1^éq9w×{=• AázÊË°w¸Ã“~ÝNáÚAU¯ožŠ'-p$šÇ]Oý+n%\21º„;ëص½’\28ö\14ýô\29­æš­Á-ltqï²èS>ðk‰Ÿ\3„ôr¬yõ5kG–Ž¬Â™ß1qÙ,\13·ÖL™é\7˜š˜h“\27\23\21ÍÚ\30Ë\14o\19\127E¶»µ?jÀ\12‚²2ö¦ÖØ\25Ç8Ù>u…\6=ëÑØîN\24/ãOìVÍðÇoÏ3Rk¬\"kšæµì#“4W‘'Iy\12)\20ÂÚ^\"k{è9<ë\25\25\16¬íÉh\127Òœ’[ù\9V¸ ¬íÕ\0241ø¡/'ŒÀÈàŠ\2[Q`Ö\31L—¸Šêƒ\3>ç[Æ\21Lˆ­•1¥ßø§µe_„)\11Ì‘­µOŸ_\30\31\26ÐWq\4Úü{û‡ZÎÔŠ¾Lì\27x:Å\19J¹Œ\21ëï=Ï»§L\14dú Lxãд©’DL¯R½è:Ý×\31\24\8X7|ÕÓ‚ÔÉ&9†¤\26ƒDÕÙŒ³\16qM¬“ÃWÁ%tìF‹D\"ˆ&n£îÒfê\27yk\30Ké†1R˜KH*eó'žgã^z¥üñQuK/BòY²dUòT#\ +\28Šãwã~X’6ÞÜÐa§Ì5NÔêÀ]Û9NÈò:£Û·\25§\6å–à€\20\16mFëä&í\\­³îÄm€vc‹îk”\ +ÎuÄ8Ë|Öü{£]ñâØ÷UÓ¾qŸxá\19Úbvø%Õ¤èÛo‚_…\27§Øë@­ñ&ÚÙ¸¼Õb&Q8{ì´õ\7ܼ'e\20øvd'¤x»Ú­r!{ÐÖ¦·­ç˜ô<\9ÃF¨¡ë6Š{ë]žd\18‰\127éf…##=›8[Êg\19&,c§ëˆrh«1”j™\13ØÛô1–$­Fi§[תužéÌÇtš¼Ø¦Ù‰mÚØ>͈\6\21[7IŠòlÅV;òűÍü\30‘nqzCuͶiù_’|»\17²mú>Ñ,‰­N‡\3¢Bë­¼È#ÔMÓz\3&mÎp\\*\24,„:æ·¶¸ºpÞ&Ò-û™Z yÝÒêlÙ4=„¥_‚zW[5\25ú\22Š4Á–I¦­·tSØ’@ëMÛþú”l<[\12‰[\25\13#é–\31Ø+\12í³u\15o\26Ç.lsa(ûûá ò¼K&\28Å„«ó÷N]‹Ý\12\0wp·S>wÒX;{Ö:¬\8U¿wiÄ\21\18\13âN¾VºE.cÐÊ7Ò\13bÒrg/r\9´¶;ï\28cIol—öë\13)\22Ô+Üxr}'Ž\21Öú¡Æ\7C\17îøö×\12)C\"G½/ã.Ò\14»©Ì\6ú„äð?úà;}tc®ÏIÆ?éˆ]|ÌÞ¹u!ËÚ¸Oë\15’´CÊݦ%9îd™´i\7:Ö^¸/°ÿÙhîG¾Mæº`%¥.I¯h¯7·w\11³·Qkï»°ªTöO7\25{,UñQ£…tª$zpûÑÛn¯†e\31_w™é~ådŽ¹T¡Êg?•\25aO‹²Ç•iïiÏç(…íÀ«Gœ¦üHWuºw£óú\2Â\\é´å\127üF«Ÿô^ÕžnÏÞÓ<Ú[Ifî½Fñ{K÷3½·êÞÛÒºÇóaçÃ\30‡=\22×==ž=&ô¡6\29FÕ~\24{\6–Ôf\31Æ7\127°¢¯Õ2Î$2.<ü¬\31‰íhãµ\29ÔË>Œ¾\5‡ñM\29¨e\7vì\8Aý!ùª\29F‹ÝaÔV‡ŸúqÀVv˜±DuæÞç@\14¬¶`›ã\15¢l\27?¤ù¤CÚâ\21)A\18ªÈ\27¤.ÝFÓ#Ž\ +\18\14^™¤\24™\24äÛ¯—g©âœ‡›ŠS]µ´\31%_Ûø©â»¿\127¢ô\26m)\16¿ÒF„6\1‰\4­'«\7)”ÉÁ‹Èµ\0210A<\13K’bÉ‹ŽÇÙŽ\3îC:·QÊ­ä/V®4ò>xä}`ä}\16]\ +c³4…·È†5FŒåÚç+v*Âœ\24\"¯þ8v\8Þ×Å¡l®G\6\ +·\ +5GqL{£„_7s\14±\17RÔ­£ûtG?õQ½—Ïá^Ÿþ\28Ãl´F¨‡O†¸Ÿ3\127p@\17ñ³Ÿ6\"\15¥ûù}Åÿ›ÿËsë4Üô\4‘'\127ôÇáþä­\24e½PZÆn§´gbèÄÒ–=áÁ0ÓMäp\27ÎNL œpúŽ‰\22î+û߉o\30„\20EwÒ÷\29tÞ\3ë\19N%§´\3±¢<·\16’Š÷„•øDË~²Kãœ'ëuÕ¸p7DUäÓh?<Ííþ¤0\30n\28\\žÒ\18\ +Ùo”e¯Éˆ3jb\"^÷ðܸ\15qÌBÍôZN£gÝ)­@\28έgºæ0unmº<\15ep–7ò™Yò3íÔY3 çÑwüŒæ9ÿ\12BÎ\20îy¦¦ûlŸ­ØzP3\5£TYêô[ÞVH©ýß]„ó<-\14Ý;óí]\16ç´¡\24’si?ƘS\8%-gfG¡:!g6H\9µzN›¢ EiŸÓ†'ƒ´•FUø¯Ð\3iËC‡¹C]#ëØð?2\"\7·‰>Þüï©ÎØ0¼(îâ¡Ül\16žqäÚ{Á{ë¢]\21âu\25§Œ.š5¹x±~\28µ:’Iæ‚göET\\Â'DE|Áfra8t‰à‡a—äüêÓñ\16\23z¥\23gŸøŽP\5yñ÷jul•~Ñ”³Ÿ`–¢4Ç#ãßeô\7½ÈÊs‰\30hü×pä\18Ó\13ÃÑ-º\13CÙ]uÍUÝ5TðvÈ£\\UŒWo”\17aÄi£\9…ê**^„^e1»Îä\31ÿW·\18¦×Tǯ\12ú¯XÆ®\12v¯SšŸk<ú•Ê}¹¼‡³Aî•Ç»¦¼EðG\23PàןïH8®s\24¥}¥´¯©î\\GwÔëØÁ»¦\13†‘‚¯+¦›ët<³â†²Ì\\¥ï¯1Š”ŠºN÷Pè:\26îqó—\17ãKï<ÄM†ÖøÍ[úð7qj™nZZ\19Nµvßì\28ÆïÞ´•ÝV7õ”Æ\13Õ}c™Ì\13ÎoSß¿HçyM7½¦›Já–^Í-9ñßPµ·to)Ü›&PçD5ŠÂhyó¶\17Þ\13ô/aIØ9Tz\ +þæ5¿\17ÊÔt‹’ž+\15cñßFÿà\27Ê[Jã\21íq¬IPRøwV. ¿˜Û¨„o£\18¾a*»ÉTvc4zó\2áá8FGñ„ÞÞ6ú5Ñ\127¹ÅKüжAì\31÷\17{LXÙ\127ècx÷\25Û¹\127¤µl*„\15ö(\26ø¸rÀDÙ¯ƒ÷¯ƒßÉ¢Ë÷ë þu0”Û‡—T·Ú^BG\29»dN><Í\23JëcÜk\"vûü/]úzï_ïÚt˜h̲\24?Yr\31?’=Ñ®Kíxæ1~Ïh\18c&ò<\9µû½Š|õÙ¥_\31‹Íƽ\23&Þ±Gk•cSÓì—\11¹¿/}æ\3ð\29Ÿ\23æ+§Dtúr \31ìÅu-¾â銪QÔ×Õ\6åSÎü¹»¿VÖÚÞ&Œ|§/–×9ÅŒÉ*\127¾,ÈþßøVØÇÏWøn€fÓëô›÷Ì{©×ã’GoA¼üãí?µÌsŠ\28ñAçÆ;¼®~mgç)î?ò±ô\6^«ÇÎ~?Ÿ¸k)Ý*m\11¨OÊD^ÛØ÷äÿËí¸1™VûŽ\11˜ïläByål3TÎÆ\13ò;\127ä\26·Tjbœ§ÍZ¼Ÿöwo~ö‰qFÓ2\18oT°Œ_hô<Ú\16æO| $­‡Ï™\20gaµ¿ã=.³f2a\\[ýbÙ1\14zi\9±w‰e}ï/W¢øtRú=OOë3Û¬Uû÷úc‡ïÅϳÌÓš\29ï&f§¯PØšô\25ýOWi:x©IÚ\25ûiø”\ +(9~­ð0jF\127¿ØÞùö1ù_\28ã\\Ûø(ñrˆ\14³QÌ»žìôßDwñ–úƒ±\16C{7Ê7èK\0217Cšê¿¾\17úý\21ë«ÿFSú5ÑLà\16SIj¾¿â\21ýµ‚°\28Z9®UÿbHÙL<\12>\15±±ÝÙpnø—ö[\14Õ?$\26J_ê싲F·EZÝÏq’š$i?èp úšx<]DúÈ.ß©qÎãi¿ä± ˜Àè1ã\25´Ïë#žfâíÔ¾´?ÁÃ?\29\11ªC+\127Íu‡x\0145\ +Óõš\127\28\18\15d‡÷¤\31ééUÀCºÌ%\28ý¯_š}H\22\0275eþ\20¥\18ò\21w_Dè”±ûTlU6ˆ¯1²Ñ=%iÛmdíèKâ6Täg\20¹\19ë\1òT°íÄ+Y¿&6X}ùûâò©RB\13R%MIôÒÑÜå'.bYçWúˆ:é\23Ä\12e7~;ýû+}/=F;.<\127Z›‹ŒÊLO\28‰™BþrŽOŠ“óÈ\23_«•àNÏ\23™/—DØö¾4©+#ê—>›\0281…BmM¦>ç\23ß\22•¤ÙÁ/\15ßf\22ø\21Ùz¾øЉÓðk5×Ôä¢Nñ²ÙÅ\27Y‡…í‹.è\23}œ¯Ÿ‰Ù/º;_é£h3‰¹.’šù¢\1Sšô\19Þ¥*Jå\28Ç}UçãÐ\24_tŸ¾èÞ~ñM{G”„qAú~¼bœ\127\ +œoÄG„\19ülð$Y\0279\13¿øóå÷¨Bγ‡\27_Øľ~>Á®sîZ}¹3\22WùsêŽ)\9•9>˜\18’Ëkús­©ð¬å\23]±¯qÎñëçãæ!§›[ö\28†Ót”\\úù¨Cí2òõk#ž8XIÃö©L{7FÞüæW\19èœÅ¶I=\15ç]Bœuz'©&¦í\26¢ŒbÚV‰>\"±GˆÎ¨7\19°²c\7\0014ßÄçÙ7 ž)Þ\16íJG\\Ëå?ëï“Üý,n\30r`Ò¼¨h¡÷¥ep\ +Y¦|\14š ¢ôkZàFQIJe†g«RNilí#ÿ…£ûWZÜ\17YdEG$û%²¸#²»\29c\15ãò\ +gã§0íý5áý¾Ê~s¥l¯â\9íAœÊ\16F䡘ª\14qòo”J¶³ ”­|Ú’þü›ääá;\30t?©äJ\25òÖ?\23c…¯\31·FÁ:G™Ø­Ñ=%;3ŽCõoæ4´øN!¹\18|DÏä6ñ…WÑ\23N5_£ëÊ\23>#_É\17…Û©U—SQ˜\"¿˜ÞVÑm…:3ËÊ…¦ un\31\23G\127€yªlŽÿøÍù>½ú=Ý„\3\21Éf÷¯0bGêE25GÔi,°½FÌY/ñ<â\127¶.\ +»Ê×h1‰®Ú ’þ;ùo\4\31CpÿþŒàáàé sðrP8¨\28Ô\14\26\7­ƒ^A¸HF°t°r°q°u°s°wpppTðŸÿ›üç?ÿ\29røWÿïúÿÐÿ§þgúŸÇÿï/É/ý/ô¿Ôÿ·þWú_ë\127£ÿ­þwúßë\14\11ý_êÿJÿ×ú¿Ñÿ­þïô\127¯ÿ\7ý?êÿ§þŸôÿ¬ÿ\23ý¿êÿmøÿŸ\127\127ÿ;üMþ«ð?>¾s|çøÁñƒã'ÇOŽ3Ž3ŽsŽs\31\127\127ùx(\12Ÿ\127qþÅqÁqÁqÉqÉñ›ã7Ç\21Ç\21Ç5Ç5Ç\13Ç\13Ç-Ç-Ç\29Ç\29Ç=Ç=ù]ß\5ÇKŽ—\28¯8^q¼æxÍñ†ã\13Ç[Ž·\28ï8Þq¼çxÏñã\3ÇGŽ\28\127rüÉñ‰ã\19ÇgŽÏ\28_8¾p|åøÊñã[\28ßáá\14\15wx¸ÃÃ\29\30îðp‡‡;<Üáá\14\15wx¸ÃÃ\29\30îðp‡‡;<Üáá\14\15wx¸ÃÃ\29\30îðp‡‡;<Üáá\14\15wx¸ÃÃ\29\30îðp‡‡;<Üáá\14\15wx¸ÃÃ\29\30îðp‡‡;<Üáá\14\15wx¸ÃÃ\29\30îðp‡‡;<Üáá\14\15wx¸ÃÃ\29\30îðp‡‡;<Üáá\14\15wx¸ÃÃ\29\30îðp‡‡;<Üáá\14\15\15xxÀÃ\3\30\30ðð€‡\7<<àá\1\15\15xxÀÃ\3\30\30ðð€‡\7<<àá\1\15\15xxÀÃ\3\30\30ðð€‡\7<<àá\1\15\15xxÀÃ\3\30\30ðð€‡\7<<àá\1\15\15xxÀÃ\3\30\30ðð€‡\7<<àá\1\15\15xxÀÃ\3\30\30ðð€‡\7<<àá\1\15\15xxÀÃ\3\30\30ðð€‡\7<<àá\1\15\15xxÀÃ\3\30\30ðð€‡\7<<àá\1\15\15xxÀÃ\19\30žðð„‡'<<áá\9\15OxxÂÃ\19\30žðð„‡'<<áá\9\15OxxÂÃ\19\30žðð„‡'<<áá\9\15OxxÂÃ\19\30žðð„‡'<<áá\9\15OxxÂÃ\19\30žðð„‡'<<áá\9\15OxxÂÃ\19\30žðð„‡'<<áá\9\15OxxÂÃ\19\30žðð„‡'<<áá\9\15OxxÂÃ\19\30žðð„‡'<<áá\9\15OxxÂÃ\19\30žðÁC\6\15\25p|àøÈñ‘ãOŽ?9>q|âøÌñ™ã\11Ç\23Ž¯\28_9¾q,ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼Ð\15/ôÃ\11ýðB?¼à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30\ +x(à¡€‡\2\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(á¡„‡\18\30Jx(áá\13\15oxxÃÃ\27\30Þðð†‡7<¼áá\13\15oxxÃÃ\27\30Þðð†‡7<¼áá\13\15oxxÃÃ\27\30Þðð†‡7<¼áá\13\15oxxÃÃ\27\30Þðð†‡7<¼áá\13\15oxxÃÃ\27\30Þðð†‡7<¼áá\13\15oxxÃÃ\27\30Þðð†‡7<¼áá\13\15oxxÃÃ\27\30Þðð†‡7<¼áá\13\15oxxÃÃ\27\30Þðð†‡7<¼áá\13\15ox¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\ +\30*x¨à¡‚‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jx¨á¡†‡\26\30jxhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\6\30\26xhࡇ\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxhá¡…‡\22\30Zxèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\14\30:xèࡃ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zxèᡇ‡\30\30zìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11ìÕ\11xXÂÃ\18\30–𰄇%<,áa\9\15KxXÂÃ\18\30–𰄇%<,áa\9\15KxXÂÃ\18\30–𰄇%<,áa\9\15KxXÂÃ\18\30–𰄇%<,áa\9\15KxXÂÃ\18\30–𰄇%<,áa\9\15KxXÂÃ\18\30–𰄇%<,áa\9\15KxXÂÃ\18\30–𰄇%<,áa\9\15KxXÂÃ\18\30–𰄇%<,áa\9\15KxXÂÃ\18\30V𰂇\21<¬àa\5\15+xXÁÃ\ +\30V𰂇\21<¬àa\5\15+xXÁÃ\ +\30V𰂇\21<¬àa\5\15+xXÁÃ\ +\30V𰂇\21<¬àa\5\15+xXÁÃ\ +\30V𰂇\21<¬àa\5\15+xXÁÃ\ +\30V𰂇\21<¬àa\5\15+xXÁÃ\ +\30V𰂇\21<¬àa\5\15+xXÁÃ\ +\30V𰂇\21<¬àa\5\15+xXÁÃ\ +\30V𰂇5<¬áa\13\15kxXÃÃ\26\30Ö𰆇5<¬áa\13\15kxXÃÃ\26\30Ö𰆇5<¬áa\13\15kxXÃÃ\26\30Ö𰆇5<¬áa\13\15kxXÃÃ\26\30Ö𰆇5<¬áa\13\15kxXÃÃ\26\30Ö𰆇5<¬áa\13\15kxXÃÃ\26\30Ö𰆇5<¬áa\13\15kxXÃÃ\26\30Ö𰆇5<¬áa\13\15kxXÃÃ\26\30Ö𰆇5<¬áa\3\15\27xØÀÃ\6\0306ð°‡\13áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30>áá\19\30Nðp‚‡\19<œàá\4\15'x8ÁÃ\9\30Nðp‚‡\19<œàá\4\15'x8ÁÃ\9\30Nðp‚‡\19<œàá\4\15'x8ÁÃ\9\30Nðp‚‡\19<œàá\4\15'x8ÁÃ\9\30Nðp‚‡\19<œàá\4\15'x8ÁÃ\9\30Nðp‚‡\19<œàá\4\15'x8ÁÃ\9\30Nðp‚‡\19<œàá\4\15'x8ÁÃ\9\30Nðp‚‡\19<œàá\4\15'x8ÁÃ\9\30Nðp‚‡3<œáá\12\15gx8ÃÃ\25\30Îðp†‡3<œáá\12\15gx8ÃÃ\25\30Îðp†‡3<œáá\12\15gx8ÃÃ\25\30Îðp†‡3<œáá\12\15gx8ÃÃ\25\30Îðp†‡3<œáá\12\15gx8ÃÃ\25\30Îðp†‡3<œáá\12\15gx8ÃÃ\25\30Îðp†‡3<œáá\12\15gx8ÃÃ\25\30Îðp†‡3<œáá\12\15gx8ÃÃ\25\30Îðp†‡3<œáá\2\15\23x¸ÀÃ\5\30.ðp‡\11<\\àá\2\15\23x¸ÀÃ\5\30.ðp‡\11<\\àá\2\15\23x¸ÀÃ\5\30.ðp‡\11<\\àá\2\15\23x¸ÀÃ\5\30.ðp‡\11<\\àá\2\15\23x¸ÀÃ\5\30.ðp‡\11<\\àá\2\15\23x¸ÀÃ\5\30.ðp‡\11<\\àá\2\15\23x¸ÀÃ\5\30.ðp‡\11<\\àá\2\15\23x¸ÀÃ\5\30.ðp‡\11<\\àá\2\15\23x¸ÂÃ\21\30®ðp…‡+<\\áá\ +\15Wx¸ÂÃ\21\30®ðp…‡+<\\áá\ +\15Wx¸ÂÃ\21\30®ðp…‡+<\\áá\ +\15Wx¸ÂÃ\21\30®ðp…‡+<\\áá\ +\15Wx¸ÂÃ\21\30®ðp…‡+<\\áá\ +\15Wx¸ÂÃ\21\30®ðp…‡+<\\áá\ +\15Wx¸ÂÃ\21\30®ðp…‡+<\\áá\ +\15Wx¸ÂÃ\21\30®ðp…‡+<\\áá\ +\15Wx¸ÂÃ\21\30nðpƒ‡\27<Üàá\6\0157x¸ÁÃ\13\30nðpƒ‡\27<Üàá\6\0157x¸ÁÃ\13\30nðpƒ‡\27<Üàá\6\0157x¸ÁÃ\13\30nðpƒ‡\27<Üàá\6\0157x¸ÁÃ\13\30nðpƒ‡\27<Üàá\6\0157x¸ÁÃ\13\30nðpƒ‡\27<Üàá\6\0157x¸ÁÃ\13\30nðpƒ‡\27<Üàá\6\0157x¸ÁÃ\13\30nðpƒ‡\27<Üàá\6\0157x¸ÁÃ\13\30nð\16á\127þ\31äF´±", + ["length"]=63270, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=7021, diff --git a/tex/context/patterns/lang-sk.lua b/tex/context/patterns/lang-sk.lua index 4bbdd915f..2aac254d7 100644 --- a/tex/context/patterns/lang-sk.lua +++ b/tex/context/patterns/lang-sk.lua @@ -2,7 +2,9 @@ return { ["comment"]="% generated by mtxrun --script pattern --convert", ["exceptions"]={ ["characters"]="abdeikmnoprstyóť", - ["data"]="dosÅ¥ me-tó-da me-tó-dy ne-do-stat-ka-mi sep-tem-bra", + ["compression"]="zlib", + ["data"]="xÚKÉ/>ºT!7U·äðfÝ”D8«R!/U7%_·¸$±D7;Q77S¡8µ@·$5W7©(\17\0Y$\20Ô", + ["length"]=54, ["n"]=5, }, ["metadata"]={ @@ -97,7 +99,39 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnoprstuvwxyzáäéíóôúýÄÄľňŕšťž", - ["data"]="a1 á1 ä1 e1 é1 i1 í1 o1 ó1 ô1 u1 ú1 y1 ý1 i2a i2á i2e i2u o2u c2h d2z d2ž 2b1b 2b1c 2b1Ä 2b1d 2b1Ä 2b1f 2b1g 2b1h 2b1c2h 2b1j 2b1k 2b1l 2b1ľ 2b1m 2b1n 2b1ň 2b1p 2b1r 2b1s 2b1Å¡ 2b1t 2b1Å¥ 2b1v 2b1w 2b1x 2b1z 2b1ž 2b1d2z 2b1d2ž 2c1b 2c1c 2c1Ä 2c1d 2c1Ä 2c1f 2c1g 2c1c2h 2c1j 2c1k 2c1l 2c1ľ 2c1m 2c1n 2c1ň 2c1p 2c1r 2c1s 2c1Å¡ 2c1t 2c1Å¥ 2c1v 2c1w 2c1x 2c1z 2c1ž 2c1d2z 2c1d2ž 2Ä1b 2Ä1c 2Ä1Ä 2Ä1d 2Ä1Ä 2Ä1f 2Ä1g 2Ä1h 2Ä1c2h 2Ä1j 2Ä1k 2Ä1l 2Ä1ľ 2Ä1m 2Ä1n 2Ä1ň 2Ä1p 2Ä1r 2Ä1s 2Ä1Å¡ 2Ä1t 2Ä1Å¥ 2Ä1v 2Ä1w 2Ä1x 2Ä1z 2Ä1ž 2Ä1d2z 2Ä1d2ž 2d1b 2d1c 2d1Ä 2d1d 2d1Ä 2d1f 2d1g 2d1h 2d1c2h 2d1j 2d1k 2d1l 2d1ľ 2d1m 2d1n 2d1ň 2d1p 2d1r 2d1s 2d1Å¡ 2d1t 2d1Å¥ 2d1v 2d1d2z 2d1d2ž 2Ä1b 2Ä1c 2Ä1Ä 2Ä1d 2Ä1Ä 2Ä1f 2Ä1g 2Ä1h 2Ä1c2h 2Ä1j 2Ä1k 2Ä1l 2Ä1ľ .as4t4ro3 .atmo3 .a2u1to3 2Ä1m 2Ä1n 2Ä1ň 2Ä1p 2Ä1r 2Ä1s 2Ä1Å¡ 2Ä1t 2Ä1Å¥ 2Ä1v 2Ä1z 2Ä1ž 2Ä1d2z 2Ä1d2ž 2f1b 2f1c 2f1Ä 2f1d 2f1Ä 2f1f 2f1g 2f1h 2f1c2h 2f1j 2f1k 2f1l 2f1ľ 2f1m 2f1n 2f1ň 2f1p 2f1r 2f1s 2f1Å¡ 2f1t 2f1Å¥ 2f1v 2f1z 2f1ž 2f1d2z 2f1d2ž 2g1b 2g1c 2g1Ä 2g1d 2g1Ä 2g1f 2g1g 2g1h 2g1c2h 2g1j 2g1k 2g1l 2g1ľ 2g1m 2g1n 2g1ň 2g1p 2g1r 2g1s 2g1Å¡ 2g1t 2g1Å¥ 2g1v 2g1z 2g1ž 2g1d2z 2g1d2ž 2h1b 2h1c 2h1Ä 2h1d 2h1Ä 2h1f 2h1g 2h1h 2h1c2h 2h1j 2h1k 2h1l 2h1ľ 2h1m 2h1n 2h1ň 2h1p 2h1r 2h1s 2h1Å¡ 2h1t 2h1Å¥ 2h1v 2h1z 2h1ž 2h1d2z 2h1d2ž 2c2h1b 2c2h1c 2c2h1Ä 2c2h1d 2c2h1Ä 2c2h1f 2c2h1g 2c2h1h 2c2h1c2h 2c2h1j 2c2h1k 2c2h1l 2c2h1ľ 2c2h1m 2c2h1n 2c2h1ň 2c2h1p 2c2h1r 2c2h1s 2c2h1Å¡ 2c2h1t 2c2h1Å¥ 2c2h1v 2c2h1w 2c2h1ž 2c2h1d2z 2c2h1d2ž 2j1b 2j1c 2j1Ä 2j1d 2j1Ä 2j1f 2j1g 2j1h 2j1c2h 2j1j 2j1k 2j1l 2j1ľ 2j1m 2j1n 2j1ň 2j1p 2j1r 2j1s 2j1Å¡ 2j1t 2j1Å¥ 2j1v 2j1w 2j1x 2j1z 2j1ž 2j1d2z 2j1d2ž 2k1b 2k1c 2k1Ä 2k1d 2k1Ä 2k1f 2k1g 2k1h 2k1c2h 2k1j 2k1k 2k1l 2k1ľ 2k1m 2k1n 2k1ň 2k1p 2k1r 2k1s 2k1Å¡ 2k1t 2k1Å¥ 2k1v 2k1w 2k1x 2k1z 2k1ž 2k1d2z 2k1d2ž 2l1b 2l1c 2l1Ä 2l1d 2l1Ä 2l1f 2l1g 2l1h 2l1c2h 2l1j 2l1k 2l1l 2l1ľ 2l1m 2l1n 2l1ň 2l1p 2l1r 2l1s 2l1Å¡ 2l1t 2l1Å¥ 2l1v 2l1z 2l1ž 2l1d2z 2l1d2ž 2ľ1b 2ľ1c 2ľ1Ä 2ľ1d 2ľ1Ä 2ľ1f 2ľ1g 2ľ1h 2ľ1c2h 2ľ1j 2ľ1k 2ľ1l 2ľ1ľ 2ľ1m 2ľ1n 2ľ1ň 2ľ1p 2ľ1r 2ľ1s 2ľ1Å¡ 2ľ1t 2ľ1Å¥ 2ľ1v 2ľ1z 2ľ1ž 2ľ1d2z 2ľ1d2ž 2m1b 2m1c 2m1Ä 2m1d 2m1Ä 2m1f 2m1g 2m1h 2m1c2h 2m1j 2m1k 2m1l 2m1ľ 2m1m 2m1n 2m1ň 2m1p 2m1r 2m1s 2m1Å¡ 2m1t 2m1Å¥ 2m1v 2m1z 2m1ž 2m1d2z 2m1d2ž 2n1b 2n1c 2n1Ä 2n1d 2n1Ä 2n1f 2n1g 2n1h 2n1c2h 2n1j 2n1k 2n1l 2n1ľ 2n1m 2n1n 2n1ň 2n1p 2n1r 2n1s 2n1Å¡ 2n1t 2n1Å¥ 2n1v 2n1w 2n1x 2n1z 2n1ž 2n1d2z 2n1d2ž 2ň1b 2ň1c 2ň1Ä 2ň1d 2ň1Ä 2ň1f 2ň1g 2ň1h 2ň1c2h 2ň1j 2ň1k 2ň1l 2ň1ľ 2ň1m 2ň1n 2ň1ň 2ň1p 2ň1r 2ň1s 2ň1Å¡ 2ň1t 2ň1Å¥ 2ň1v 2ň1z 2ň1ž 2ň1d2z 2ň1d2ž 2p1b 2p1c 2p1Ä 2p1d 2p1Ä 2p1f 2p1g 2p1h 2p1c2h 2p1j 2p1k 2p1l 2p1ľ 2p1m 2p1n 2p1ň 2p1p 2p1r 2p1s 2p1Å¡ 2p1t 2p1Å¥ 2p1v 2p1w 2p1x 2p1z 2p1ž 2p1d2z 2p1d2ž 2r1b 2r1c 2r1Ä 2r1d 2r1Ä 2r1f 2r1g 2r1h 2r1c2h 2r1j 2r1k 2r1l 2r1ľ 2r1m 2r1n 2r1ň 2r1p 2r1r 2r1s 2r1Å¡ 2r1t 2r1Å¥ 2r1v 2r1w 2r1x 2r1z 2r1ž 2r1d2z 2r1d2ž 2s1b 2s1c 2s1Ä 2s1d 2s1Ä 2s1f 2s1g 2s1h 2s1c2h 2s1j 2s1k 2s1l 2s1ľ 2s1m 2s1n 2s1ň 2s1p 2s1r 2s1s 2s1Å¡ 2s1t 2s1Å¥ 2s1v 2s1z 2s1ž 2s1d2z 2s1d2ž 2Å¡1b 2Å¡1c 2Å¡1Ä 2Å¡1d 2Å¡1Ä 2Å¡1f 2Å¡1g 2Å¡1h 2Å¡1c2h 2Å¡1j 2Å¡1k 2Å¡1l 2Å¡1ľ 2Å¡1m 2Å¡1n 2Å¡1ň 2Å¡1p 2Å¡1r 2Å¡1s 2Å¡1Å¡ 2Å¡1t 2Å¡1Å¥ 2Å¡1v 2Å¡1w 2Å¡1x 2Å¡1z 2Å¡1ž 2Å¡1d2z 2Å¡1d2ž 2t1b 2t1c 2t1Ä 2t1d 2t1Ä 2t1f 2t1g 2t1h 2t1c2h 2t1j 2t1k 2t1l 2t1ľ 2t1m 2t1n 2t1ň 2t1p 2t1r 2t1s 2t1Å¡ 2t1t 2t1Å¥ 2t1v 2t1w 2t1x 2t1z 2t1ž 2t1d2z 2t1d2ž 2Å¥1b 2Å¥1c 2Å¥1Ä 2Å¥1d 2Å¥1Ä 2Å¥1f 2Å¥1g 2Å¥1h 2Å¥1c2h 2Å¥1j 2Å¥1k 2Å¥1l 2Å¥1ľ 2Å¥1m 2Å¥1n 2Å¥1ň 2Å¥1p 2Å¥1r 2Å¥1s 2Å¥1Å¡ 2Å¥1t 2Å¥1Å¥ 2Å¥1v 2Å¥1w 2Å¥1x 2Å¥1z 2Å¥1ž 2Å¥1d2z 2Å¥1d2ž 2v1b 2v1c 2v1Ä 2v1d 2v1Ä 2v1f 2v1g 2v1h 2v1c2h 2v1j 2v1k 2v1l 2v1ľ 2v1m 2v1n 2v1ň 2v1p 2v1r 2v1s 2v1Å¡ 2v1t 2v1Å¥ 2v1v 2v1w 2v1x 2v1z 2v1ž 2v1d2z 2v1d2ž 2z1b 2z1c 2z1Ä 2z1d 2z1Ä 2z1f 2z1g 2z1h 2z1c2h 2z1j 2z1k 2z1l 2z1ľ 2z1m 2z1n 2z1ň 2z1p 2z1r 2z1s 2z1Å¡ 2z1t 2z1Å¥ 2z1v 2z1z 2z1ž 2z1d2z 2z1d2ž 2ž1b 2ž1c 2ž1Ä 2ž1d 2ž1Ä 2ž1f 2ž1g 2ž1h 2ž1c2h 2ž1j 2ž1k 2ž1l 2ž1ľ 2ž1m 2ž1n 2ž1ň 2ž1p 2ž1r 2ž1s 2ž1Å¡ 2ž1t 2ž1Å¥ 2ž1v 2ž1z 2ž1ž 2ž1d2z 2ž1d2ž 2d2z1k 2d2z1n 2d2z1r 2d2z1s 2d2ž1Å¡ 2x1t 2x1n 2b1Å• 2Å•1b 2d2z1Å• 2Å•1d2z 2d2z1ľ 2Å•1d2ž 2d2ž1ľ a1í2 a1o2 e1á2 e1í2 e1o2 e1ó2 e1u2 i1o2 o1a2 o1e2 o1i2 o1o2 u1a2 u1á2 u1i2 u1í2 u1o2 u1u2 y1a2 y1e2 y1ó2 y1u2 a2u1 a2i1 b2l3b 3b2l3Ä b2l3c2h 3b2l3k 3b2ľ3k 3b2l3ň b2l3Å¡ b2ľ3Å¡ 3b2r3b b2r3d 3b2r3k 3b2Å•3k 3b2r3l 3b2Å•3ľ 3b2r3n 3b2r3ň 3b2r3v b2r3z b3s2c b3s2t c3k2t 3c2r3Ä 3c2Å•3Ä 3c2r3k 3c2Å•3k c3t2v 3Ä2l3n 3Ä2r3Ä Ä2r3p 3Ä2r3t 3Ä2r3v d3c4h4n 3d2l3b 3d2l3h d2l3n 3d2l3ž 3d2ľ3ž 3d2r3d 3d2r3g d2r3l 3d2r3k 3d2r3m d2r3n 3d2r3v 3d2r3z 3d2r3ž 3d2Å•3ž d3s2v d3s2t d3v2r 3f2r3Ä 3f2r3f 3f2r3k 3f2Å•3k 3f2r3m 3f2r3n 3g2l3g 3g2r3c 3g2r3g 3h2l3b 3h2ľ3b 3h2l3t 3h2m2l 3h2r3b 3h2Å•3b 3h2r3Ä 3h2r3d 3h2r3d2z 3h2r3k 3h2r3m 3h2r3n 3h2r3ň 3h2r3t 3h2r3v 3c2h2l3p 3c2h2r3b 3c2h2r3Ä c2h2r3c4h 3c2h2r3l 3c2h2r3t j2c2h3n j3s2t j3Å¡2le j3t2m j3t2r 3k2l3b 3k2ľ3b 3k2l3Ä 3k2l3z 3k2ľ3z k2r3b 3k2r3Ä 3k2Å•3Ä 3k2Å•3d 3k2r3k 3k2Å•3k 3k2Å•3m 3k2r3p 3k2r3s 3k2r3t 3k2r3v k3s2ľ k2s3n k3t2r l3d2r ľ3k2n l3t2r m3b2l m3b2ľ m3b2r m3f2l m3f2r m3k2n 3m2l3Ä 3m2l3k m3p2l m3p2ľ m3p2r m3p2s 3m2r3h m2r3c4h 3m2r3k m2r3l m2r3Å¡ m2r3t 3m2r3v 3m2r3z 3m2Å•3z m3s2t m3Å¡2t n3dľa n3d2r n3f2l n3f2r n3g2h n3g2l n3g2ľ n2g3n n3g2r n3g2v n3k2c n3k2h n3k2n n3k2r n3k2v n3s2c n3Å¡2p n3Å¡2t n3t2g n3t2l n3t2n n3t2r n2ž3m 3p2l3n 3p2l3ň 3p2ľ3ň 3p2l3z 3p2ľ3z 3p2ľ3ž 3p2Å•3c 3p2r3Ä 3p2r3d 3p2r3s 3p2r3Å¡ p3Å¡2Å¥ p3t2r r2b3n r3g2ľ r3g2r r3g2s r3k2d r2k3n r3k2t r3k2v r3p2c r3p2Ä r3p2r r3s2p r3Å¡2Ä r3Å¡2l r3Å¡2r r2t3m r3t2n r2t3ň r3t2r r3t2v 3s4c4h4n s2k3l s2k3m s2k3n s3k2r s3k2v 3s2l3n 3s2l3ň 3s2l3z s3p2r 3s2r3d 3s4Å•3k 3s4r3Å¡ s3t2k ôs4t3k2ár3s4t4v s2t3l s3t2m s2t3n s3t2r s3t2v s2Å¥3h s2Å¥3m s2Å¥3p Å¡3k2l Å¡3k2r Å¡3t2r 3Å¡4t4v t3k2l t3k2n 3t2l3Ä 3t2l3k t3h2l 3t4r3Ä 3t4r3h 3t2r3l 3t2r3k 3t2r3m a4t2r3n 3t2r3n 3t2Å•3n 3t2Å•3ň 3t4r3p 3t2r3v 3t2Å•3ž 3v2l3Ä 3v2ľ3Ä 3v2l3h 3v2l3k 3v2l3n 3v2r3h 3v2r3c2h 3v2r3l 3v2Å•3Å¡ 3v2Å•3t 3v2r3z 3v2Å•3z 3v4z4d 3v4z4h 3v4z4n 3v3z2Å¥ z3d2n noz5d4r 3z4d4r 3z4d4v z3g2n 3z4h4r 3z2r3k 3z2r3n 3z4v4l ž2d3n 3ž2l3Ä 3ž2l3t 3ž2ľt 3ž2r3Ä Å¾3s2t x3p2l x3p2r x3t2r 3b2l4Ä3n 3b2ľ4Ä3k 3b2l4n3k 3b2ľ3k2n 3b2r4b3l 3b2r4b3t 3b2r4Ä3k 3b2r4d3c 3b2Å•4d3n 3b2r3h2l 3b4Å•4k3n 3b4r4m3b 3b4r4n3Ä 3b4r4n3k 3b4r4Å¡3l 3b4r4t3l 3b4r4t3n 3b4r4v3n 3b4r4z3d 3b4r4z3l 3b4r4z3n b3s2t2r 3c4Å•4Ä3k 3c4r4k3n 3c4v4r3Ä 3c4v4r3k 3c4v4Å•3k 3c4v4r3l 3Ä4l4n3k 3Ä4r3Ä4r 3Ä4r4c4h3n 3Ä4r4p3k 3Ä4r4p3n 3Ä4r4t3n 3Ä4v4r3k 3Ä4v4r3l 3d4l4b3n d4ľ4ž3k d4l4ž3n d4ľ4ž3n 3d4r4c3n 3d4r4g3ľ 3d4r4g3n 3d4r4h3n 3d4r4n3Ä 3d4r4n3k 3d4r4s3n 3d4r4ž3b 3d4r4ž3k 3f4r4c3k 3f4r4c3n 3f4r4Ä3k 3f4r4f3l 3f4r4k3n 3f4r4n3d 3f4r4n3d4ž 3f4r4n3g 3f4r4n3k 3g4l4g3n 3g2r4c3k 3h2ľ4b3k 3h2l3t4n 3h2r4b3Ä 3h2r4Ä3k h2r4d3l 3h2r4g3ľ 3h2r4k3ľ 3h2r4k3n 3h2Å•4k3n 3h2r4n3Ä 3h2r4s3t 3h2r4s3Å¥ 3ch4ľ4p3k 3c2h2r4b3t 3c2h2r4c4h3l 3c2h2r4c4h3ľ 3c4h4r4s3t j3s4t4r j3s4t4v 3k4l4b3Ä 3k4l4b3k 3k4l4z3k 3k4l4z3n 3k4r4Ä3m 3k4r4Ä3n 3k4r4k3v 3k4r4m3n 3k4Å•4m3n 3k4r4p3Ä 3k4r4s3t 3k4r4Å¡3t 3k4r4t3k 3k4r4v3n 3m2l4Ä3k 3m2ľ4k3n 3m2ľ4k3v 3m2l4s3n m3p4r3p m3p4r3v 3m4r4k3n 3m4r4k3v 3m4r4m3l 3m4r4n3Ä 3m4r4n3k 3m4Å•4Å¡3t 3m4Å•4t3v 3m4r4z3l 3m4r4z3k 3m4r4z3n n4g3s4t n3Å¡4t4r n3t4l3m 3p2l4z3l 3p2ľ4z3n p2Å•4c3n 3p2r4Ä3k 3p4r4d3n 3p4Å•4c2h3n 3p4r4s3k 3p4r4t3k r2s3k4d r2s3k4t Äer4s3t4v 3s4l4n3k 3s4k4r3z 3s4m4r3c 3s4m4r3Ä 3s4m4r3d 3s4m4r3k 3s4m4r3t 3s4p4r3c4h 3s4r4d3c 3s4r4k3n 3s4r4s3t 3s4t4ľ3k s3t4r3b ko4s3t4r3Ä 3s4t4r3Ä 3s4t4Å•3h 3s4t4r3k s4t4r3v 4s4Å¥5k4rát 3Å¡4k4l3b 3Å¡4k4r3k 3Å¡4k4Å•3k maÅ¡3k4r3t 3Å¡4k4r3t 3Å¡4p4l3h 3Å¡4t4r3b Å¡4t4r3n 3Å¡4t4v4r3t 3Å¡4t4v4r3Å¥ 3t2ľ4c3Å¥ 3t2r4b3l t2r4Ä3k 3t2r4d3l 3t2r4h3l 3t2r4h3n 3t2r4k3n 3t2r4k3v 3t2r4m3n 3t4Å•4p3n 3t2r4ž3k 3t2r4ž3n 3t2v4r3d 3t2v4r3d4z 3t2v4Å•3d4z 3v2l4h3Ä 3v2l4h3k 3v2l4h3n 3v2l4n3k 3v2l4n3n 3v4r4h3n 3v4r4k3n 3v4Å•4z3g 3v4r4z3g 3v4r4z3n 3v4r4ž3l 3z4h4Å•3ň 3z4r4n3n 3ž4l4Ä3n 3ž4l4t3n 3ž4r4d3k 3b4r4n3k4n b3d4ľ4ž3n 3c4v4r4Ä3k 3c4v4r4n3k 3Ä4Å•4s3t4v 3Ä4r4t4t3n 3Ä4v4r4k3n 3d4r4ž3g4r 3f4r4n4g3n h2Å•4s3t4k 3c4h4r4s4t3n 3k4r4s4t3n 3k4r4s4t3ň 3s4m4r4Å¡3Å¥ 3s4m4r4t3k s3t4r4n3k 3Å¡4k4l4b3n maÅ¡3k4r4t3n 3Å¡4k4r4t3n 3Å¡4k4v4r3Ä 3Å¡4k4v4r3k 3Å¡4k4v4Å•3k 3Å¡4k4v4r3n 3Å¡4m4r4d3k 3Å¡4m4r4n3c 3Å¡4p4l4h3n 3Å¡4t4r4k3n 3Å¡3t4r4n3g 3t4v4r4d3n 3v4r4s3t4v 3v4r4z3g4n 3c4v4r4n3g4n 3Å¡4k4v4r4k3n 3Å¡4k4v4Å•4k3n k4Ä3ný. k4Ä3ného. k4Ä3nému. k4Ä3nom. k4Ä3ným. k4Ä3né. k4Ä3ná. k4Ä3nej. k4Ä3nú. k4Ä3nou. k4Ä3ní. k4Ä3ných. k4Ä3nými. k4t3ný. k4t3ného. k4t3nému. k4t3nom. k4t3ným. k4t3né. k4t3ná. k4t3nej. k4t3nú. k4t3nou. k4t3ní. k4t3ných. k4t3nými. n4Ä3ný. n4Ä3ného. n4Ä3nému. n4Ä3nom. n4Ä3ným. n4Ä3né. n4Ä3ná. n4Ä3nej. n4Ä3nú. n4Ä3nou. n4Ä3ní. n4Ä3ných. n4Ä3nými. n4k4Ä3n n4t3ný. n4t3ného. n4t3nému. n4t3nom. n4t3ným. n4t3né. n4t3ná. n4t3nej. n4t3nú. n4t3nou. n4t3ní. n4t3ných. n4t3nými. r4d3ný. r4d3ného. r4d3nému. r4d3nom. r4d3ným. r4d3né. r4d3ná. r4d3nej. r4d3nú. r4d3nou. r4d3ní. r4d3ných. r4d3nými. r4m3ný. r4m3ného. r4m3nému. r4m3nom. r4m3ným. r4m3né. r4m3ná. r4m3nej. r4m3nú. r4m3nou. r4m3ní. r4m3ných. r4m3nými. r4p4Ä3n r4t3ný. r4t3ného. r4t3nému. r4t3nom. r4t3ným. r4t3né. r4t3ná. r4t3nej. r4t3nú. r4t3nou. r4t3ní. r4t3ných. r4t3nými. s4t3ný. s4t3ného. s4t3nému. s4t3nom. s4t3ným. s4t3né. s4t3ná. s4t3nej. s4t3nú. s4t3nou. s4t3ní. s4t3ných. s4t3nými. z4d3ny. z4d3neho. z4d3nemu. z4d3nom. z4d3nym. z4d3ne. z4d3na. z4d3nej. z4d3nu. z4d3nou. z4d3ni. z4d3nych. z4d3nymi. z4d3ný. z4d3ného. z4d3nému. z4d3ným. z4d3né. z4d3ná. z4d3nú. z4d3ní. z4d3ných. z4d3nými. b2l4Å¡3k l2t3k2a. l2t3k2y. l2t3k2e. l2t3k2u. l2t3k2ou. l2t3k2ám. l2t3k2ách. l2t3k2a1mi. n2t3k2a. n2t3k2y. n2t3k2e. n2t3k2u. n2t3k2ou. n2t3k2ám. n2t3k2ách. n2t3k2a1mi. n2k2t3k p2t3k2a. p2t3k2y. p2t3k2e. p2t3k2u. p2t3k2ou. p2t3k2ám. p2t3k2ách. p2t3k2a1mi. s2t3k2a. s2t3k2y. s2t3k2e. s2t3k2u. s2t3k2ou. s2t3k2ám. s2t3k2ách. s2t3k2a1mi. 5p4r4s3t b3s4k d3s4k j3s4k l4p3s4k l3s4k ľ3s4k l4t3s4k 4m3s4k 4m4p3s4k n4d3s4k n4k4t3s4k n3s4k n4t3s4k p3s4k p4t3s4k r3s4k r4t3s4k r4z5s4k 5s4r4b3s4k Å¡3s4k t3s4k v3s4k z3s4k ž3s4k n2d3Ä z3Å¡2t2 r4z3Å¡2t2 b3s4t4v Ä3s4t4v d3s4t4v Ä3s4t4v ľ3s4t4v m3s4t4v n3s4t4v n4t3s4t4v p3s4t4v r3s4t4v r4v5s4t4v Å¡3s4t4v t3s4t4v v3s4t4v ž3s4t4v .a2e2ro .a2e2ro3d2r .aero3g2r .a2e2ro3k2l .aero3p2l .aero3s2k .aero3s2t .aero3t2r .akcie3s2c2h .ane3s2teti .ane3s2téz .anti5k4l .anti5k4r .apo5p4le .ap4rio .ap4rió .arci5k4 .aristo5k4 .as3k2lep .austro5s4 .bac2k3hand .balne2o3 .belo5 .belo5h4 .bez5 .bez5b4 .bez5d4 .bez5h4 .bez5k4 .bez5m4 .bez5p4 .bez5s4 .bez5t4 .bez5v4 .bez5z4 .bielo5 .bielob4 .bieloh4 .bielov4 .bi2o5 .biok4 .biop4 .bios4 .blaho5 .blahop4 .blahos4 .bledo5 .boha5p4 .boho5s4 .bože5 .bože5d4 .božec2h4 .brato5v4r .b2z2d .cáro5v4ra .celo5d4 .celo5k4 .celo5p4 .celo5s4 .celo5u4 .ceno5t4 .ces2t5maj .cirkevno5 .cirkevno5p4 .c4t4n .cudzo5k4 .cudzo5o4 .cudzo5s4 .cudzo5t4 .cudzo5v4 .cyto5p4 .Äaro5k4 .Äaro5s4 .Äaro5v4 .Äaro5z4 .Äerno5b4 .Äerno5h4 .Äerno5k4 .Äerno5o4 .Äerveno5a4 .Äerveno5b4 .Äerveno5h4 .Äerveno5k4 .Äerveno5o4 .Äerveno5p4 .Äierno5b4 .Äierno5h4 .Äierno5o4 .Äo5ch4 .Äo5s4 .Äudo5t4 .da3k2d .Äatelino5t4 .de5f4 .de5g4 .dek4l .desaÅ¥5 .de5Å¡4t2 .dez5i2 .dez5o2 .dis2k3d4 .divo5t4 .dlho5h4 .dlho5p4 .dobro5d4 .dobro5p4 .dobro5s4 .do5Ä2m .do5d4 .do5f4 .do5h4 .do5c4h4 .do5i4 .do5k4 .do5o .do5p4 .do5s4 .do5Å¡4p .do5Å¡4tie .do5Å¡4tí .do5t4l .do5t4m .do5t4r .do5t4v .do5u .do5v4 .do5z4 .do5ž4 .drevo5p4 .drevo5o .drevo5v4 .drobno5z4 .druho5s4 .dvoj5al .dvoj5Ä4 .dvoj5d4 .dvoj5h4 .dvoj5i4 .dvoj5k4 .dvoj5p4 .dvoj5s4 .dvoj5t4 .dvoj5u .dvoj5v4 .dvoj5z4 .elektro5 .ex5k4 .fol2k5 .foto5 .gala5p4 .gama5g4 .ge4o .hnedo5o .hnedo5u .holo5b4 .holo5h4 .holo5k4 .hore5z4 .horno5u4 .hrôzo5s4t4 .hrôzo5v4 .hrôzy5p4 .hrubo5s4 .hydro5 .chorobo5p4 .jasno5o .jasno5z4 .jedno5h4 .jedno5i4 .jedno5p4 .jedno5s4 .jedno5u4 .jedno5z4 .juho5a .juho5s4 .južno5s4 .kata5k4 .kata5s4t4 .kmeňo5t4 .koso5d4 .koso5Å¡4 .koso5u .krivo5p4 .kruto5v4 .krvi5p4 .krvi5s4 .krvi5t4 .ktovie5a2 .lásky5p4 .lesos4 .ľ4s4t .ľudovo5u .makro5k4 .makro5o4 .makro5s4 .málo5k4 .malo5o .málo5v4 .márno5t4 .medzi5h4 .medzi5i4 .medzi5p4 .medzi5Å¡4 .medzi5t4 .melo5d4 .meru5ô2 .miesto5d4 .miesto5p4 .mikro5e4 .mikro5k4 .mikro5o .mikro5p4 .mimo5Å¡4 .mimo5ú .mimo5v4 .mnoho5s4 .mnoho5t4 .mnoho5u .mnoho5v .mnoho5z .modro5o .mrako5d4 .mravo5u .mucho5t4 .m4z4d .na5a4 .na5b4 .na5c4 .nad5h4 .na5d4nes. .nad5p4 .na5d4p4. .na5d4Å•4. .na5d4robno. .na5h4 .na5i4 .na5k4 .na5m4 .na5o4 .na5p4 .ná5p4 .národno5o .na5s4 .na5st4 .ná5s4 .ná5st4 .na5Å¡4 .na5Å¡k4 .na6Å¡5sk .na5t4 .na5u4 .na5ú4 .ná5u4 .ná5ú4 .na5v4l .na5v4na .na5v4r4 .ná5v4r4 .na5v4z4 .na5z4r .na5z4v .na6z5vyÅ¡ .náz5v4 .na5ž4 .ne5a4 .ne5do3u2k .ne5e4 .ne5i4 .ne5m4 .ne5na5h4 .ne5na5i4 .ne5na5k4 .ne5na5m4 .ne5na5p4 .ne5na5s4 .ne5na5t4 .ne5p4 .ne5s4 .ne6s5tor .ne5Å¡4 .ne5t4 .ne6t5t .ne5u2 .ne5ú2 .ne5v4 .ne5z4d4r .ne5z4d4v .ne5z4l .ne5z4m .ne5z4n .ne5z4r .ne5z4v .nie5k4 .ni5k4 .ni6k5toÅ¡ .nízko5k4 .novo5u2 .novo5z2 .obi5d4 .ob5k4 .obo5s4 .obo5z4 .ob5s4 .ob5t4 .ob5z4 .od5b4 .od5c4 .od5d4 .od5f4 .od5h4 .od5c4h4 .od5i4 .od6i5ó .od5í4 .od5k4 .odo5b4 .odo5h4 .odo5k4 .odo5m4k4 .odo5p4 .odo5s4 .od5p4 .od5s4 .od5Å¡4 .od5t4 .od5v4 .od5z4 .t4v4r .o5h4l .o5h4ľ .o5h4na .o5h4nú .o5h4r .o5c4h4 .o5k4l .o5k4ľ .okolo5i .okolo5s4 .o5k4ra .o5k4rá .o5k4resaÅ¥ .o5k4resávaÅ¥ .o5k4ri .o5k4rí .o5k4ru .o5k4rú .o5m4l .o5m4n .o5m4r .o5p4 .o6p5lan .o6p5rat .o5s4lab .o5s4lad .o5s4lav .o5s4láv .o5s4lep .o5s4lo .o5s4p4 .o5s4tar .o5s4taÅ¥ .o5s4tan .o5s4taň .o6s5tatn .o5s4tatok .o5s4tatk .o5s4táv .o5s6to5 .o5s6t4rap .ostro5u .o5s4ved .o5s4vetl .o5s4viet .o5s4viež .o5s4vit .o5s4voj .o5Å¡4k4 .o5Å¡4ľ4 .o5Å¡4m4 .o5Å¡4p4 .o5Å¡4v4 .o5t4ras .o5t4rav .o5t4ráv .o5t4rep .o5t4ria .o5v4dov .o5v4lád .o5v4laž .o5v4p4 .o5v4r .o5z4l4 .o5z4n .o5z4re .o5z4v4l .päť5izbo .plno5a .plno5p4 .plno5Å¡4 .plno5kr4 .plocho5d4 .po5b4 .po5Ä4 .poda5k4 .pod5b4r .pod5h4 .pod5c4h .pod5k4 .pod5p4 .po5d4ráž .po5d4rep .pod5re .po5d4rh .po5d4rie .po5d4rm .po5d4rv .po5d4r4ž .pod5s4koÄ .pod5v4 .pod3zem .pod3zol .pod3ža .pod3žu .po5h4 .po6h5reb .po5c4h4 .po5i4 .po5k4 .pol5d4 .pol5i4z .polo5a .polo5b4 .pol5o4blú .polo5d4 .polo5h4l .polo5i .polo5k4 .polo5m4 .polo5o4 .polo5p4 .polo5s4 .polo5t4 .polo5u4 .polo5ú4 .polo5v4 .polo5z4 .pol5s4t .po5m4 .po6m5ník .po5o4 .po5ob4h .po5ob4l .po5s4 .po6s4t5s4k4rip .po6s5tul .po5Å¡4 .po6Å¡5ta .po6Å¡5tov .po6Å¡5v .po5t4k4n .po5t4ľ .po5t4r .poroz5p4 .poroz5s4 .poroz5v4 .po6st5g4r .po6s5ti .po5t4 .po5u .po5v4 .po4v5ra .povy5h4 .povy5k4 .povy5p4 .povy5s4 .povy5v4 .povy5z4 .po5v4z4b .po5z4 .pra5o4 .pra5p4 .pravicovo5o .pra5v4l .pra5v4nu .pra5v4nú .prá5vo5p4 .pra5vo5s4 .pre5b4 .pre5c4 .pred5c4 .pred5h4 .pred5ch4 .pred5i .pred5k4 .pred5m4 .predo5h4 .pred5op .pred5ostatn .pred5p4 .pre5d4rah .pre5d4rap .pre5d4raÅ¥ .pre5d4raž .pre5d4ri .pre5d4rž .pred5s4 .pred5v4 .pred5z4 .pre5Ä4 .pre5e2 .pre5f2 .pre5h4 .pre5c4h4 .pre5i4 .pre5k4 .pre6k5vap .pre5m4 .pre5o4 .pre5p4 .pre5s4 .pre6s5n .pre5Å¡4 .pre5t4 .pre5v4 .pre5z4 .pre5ž4 .pri5b4 .pri5Ä4 .pri5d4 .pri5h4 .pri5c4h4 .pri5k4 .prí5k4 .pri5m4 .pri5p4 .pri5s4 .prí5s4 .prí6s5p .pri6s5p .pri5s4pô .pri5Å¡4 .pri6Å¡5la. .pri6Å¡5li. .pri6Å¡5lo. .pri5t4 .pri5v4 .pri5z4 .pri5ž4 .re5g4 .re5k4 .re6k5v .re5p4 .re6p5n .re5Å¡4 .re5t4 .ro4z5i4 .ro6z5í4 .rozo5b4 .rozo5d4 .rozo5c4h4 .rozo5m4 .rozo5p4 .rozo5r4 .rozo5s4 .rozo5Å¡4 .rozo5z4 .rozo5ž4 .ro6z5u4 .ro6z5v4 3s4polu5 3s4polu5h4 3s4polu5p4 3s4polu5v4 .Å¡ikmo5o .Å¡iroko5 .Å¡iroko5p4 .Å¡táto5 .Å¡t4vor5 .su4b5s4 .sú5h4 .sú5k4r .sú5m4r .sú5s4 .sú5v4 .sú5z4 .troj5 .t4v4r4do5 .viac5k4 .viac5v4 .vide2o5 .vnútro5s4 .vnútro5Å¡4 .vo5p4ch .vo5p4r .vo5s4p .vo5t4 .vo5v4 .vy5 .vý5 .vy5b4 .vy5c4 .vý5c4 .vy5Ä4 .vý5Ä4 .vy5d4 .vý5d4 .vy5h4 .vy6h5ň .vy6h5ne .vý5h4 .vy5c4h4 .vy5k4 .vý5k4 .vy5m4 .vy5p4 .vý5p4 .vy5s4 .vý5s4 .vy5Å¡4 .vy6Å¡5Å¡ .vý6Å¡5k .vy5t4 .vý5t4 .vy5v4 .vy5z4 .vý5z4 .za5b4 .zá5b4 .za5c4 .za5Ä4 .za5d4 .za5h4 .za5c4h4 .za5k4 .za5m4 .za5p4 .za5r4 .za5s4 .za5Å¡4 .za5t4 .za5v4 .za5z4 .za5ž4 .zá5k4 .zá5z4 .zo5b4 .zo6d5p .zo5d4r .zo5d4v .zo5h4 .zo5m4l .zo5m4r .zo5s4 .zo5Å¡4 .zo5t4 .zo5v4 .zo5z4 .zo5ž4 5alkohol auto4rk auto4rs 5b4lah 5b4ledn 5b4lesk 5b4lok 5b4lúd 5b4lysk 5b4lysn 5boh3vi4e3a2k 5boh3vi4e3o2d3k 5boh3z4 5boles4t5n 5b4raÅ¥ 5b4rázd 5b4rec2h 5b4ronz 5b4ruÄ 5b4rús 5b4rýzg 5c4muk 5c4nie 5c4vak 5c4viÄ 5c4vik Äí6s5lov 5Ä4len 5Ä4lánk 5Ä4lánok 5Ä4lov 5d4ňov 5d4ramat 5d4raÅ¥ 5d4vanás4Å¥5 5d4vih 5d4viž 5d4voj 5d4vor 5e2u2róp 5f4ľak 5f4lias 5f4rancú 5f4rekven 5f4ráz 5g4raf 5g4ram pro6g5ram 5g4rÃ©Ä 5g4rob 5g4vardej 5h4lad 5h4las 5h4lav 5h4ľad 5h4liad 5h4luch 5h4mat 5h4mot 5h4naÅ¥ 5h4ned 5h4nev 5h4niezd 5h4noj 5h4nut 5h4núť 5h4nút 5h4rab 5h4r3mot 5h4ran 5h4raÅ¥ 5h4rob 5h4rom 5h4roz 5h4rub 5h4ryz 5h4viezd 5c6h4lap 5c6h4leb 5c6hran 5c6h4rán 5c6h4ráň 5c6h4vál 5c6h4vát 5c6h4vat 5c6h4vost 5jedenás4t5k 5jedenás4Å¥5 5k4lad 5k4laÄ 5k4ľak 5k4lam 5k4lásÅ¥ 5k4liat 5k4liaÅ¥ 5k4lep 5k4les 5k4Ä¾ÃºÄ 5k4resb 5k4resÅ¥ 5k4ritic 5k4rídl 5k4rôch 5k4ruh 5k4rúž 5k4ryt 5k4valif 5k4valit 5k4vet 5k4vadr 5m4ladis4t3v 5m4nož 5m4rav 5m4ráz na5d4ž4ga na5d4ž4gá nav3Å¡4tev náv3Å¡4tev nav3Å¡4tív 5p4lán 5p4lastik 5p4lat 5p4lav 5p4leta 5p4letie 5p4letú 5p4liesÅ¥ 5p4ne2umat 5p4ráv s6práv 5p4riam 5p4rirod 5p2risám 5p4roduk 5s4fér 5s4c2hopn 5s4k4lad 5s4k4lon 5s4koÄ 5s4kok 5s4kop 5s4kor 5s4kôr 5s4k4romne 5s4k4rot 5s4k4rúc 5s4k4rutk 5s4k4rýv 5s4kúsen 5s4kutoÄ 5s4k4vúc 5s4ladk 5s4lav 5s4láv 5s4led 5s4lep 5s4lovn 5s4love 5s4lova 5s4lovk 5s4luh 5s4lúž 5s4ľub 5s4maž 5s4mel 5s4mer 5s4piev 5s4pev 5s4por 5s4potreb 5s4p4ravod 5s4p4raved 5s4p4rávn 5s4p4ravo 5s4tarost 5s4tatic 5s4tav 5s4t4rach 5s4t4ran 5s4t4rán 5s4t4ráda 5s4t4raÅ¡ 5s4t2rat 5s4t2red 5s4t2rek 5s4t4rel 5s4t4reľ 5s4t4riel 5s4t4rieľ 5s4t4rih 5s4t4riž 5s4t4rom 5s4t4rúh 5s4t4run 5s4t4ruž 5s4tati 5d2ve5s4to 5t2ri5s4to 5Å¡4tyri5s4to 5päť5s4to 5Å¡es4Å¥5s4to 5sedem5s4to 5osem5s4to 5deväť5s4to 5päť5 5Å¡es4Å¥5 5s4t4la 5s4tup 5s4ved 5Å¡4ľah 5Å¡4liap 5Å¡4kola 5Å¡4k4rab 5Å¡4k4rie 5Å¡4pin 5Å¡4p4liech 5Å¡4print 5Å¡4tart 5Å¡4tát 5Å¡4tep 5Å¡4tít 5Å¡4t4ruktúr 5Å¡4tude 5Å¡4tudo 5Å¡4t4vera 5Å¡4t4vorec 5Å¡4tyli 5Å¡4Å¥ast 5Å¡tiep4 5Å¡4t4v4r4Å¥5k4 5Å¡4t4v4r4Å¥5l4 5Å¡4t4v4r4Å¥5r4 5Å¡4t4v4r4Å¥5s4 5t4laÄ 5t4lak 5t4ran4s5 5t4ras 5t4resk 5t4retin 5t4ried 5t4rub 5t4r4vaj 5t4r4val 5t4r4van 5uholník 5uhoľn 5viac5h4 5v4Äera 5v4lád 5v4las 5v4rás 5v4rav 5v4rat 5v4rát 5v4Å¡imn 5v4ták 5v4tip 5vy5s4t4rája 5v4z4du 5v4z4Å¥ah 5v4z4Å¥až 5z4lom 5z4luÄ 5z4mysel 5z4mysl 5z4nak 5z4nám 5z4náša 5z4rak 5z4väz 5z4vest 5zv4esÅ¥ 5z4vod 5z4vuk 5z4vuÄ 5ž4rÃ¡Ä 5ž4rane 5ž4reb 5ž4rút .cv6 .ch6 .sp6 .st6 .Å¡k6 .Å¡p6 .Å¡t6 .tk6 .uh6 .uc6h6 .uk6 .um6 .up6 .us6 .ús6 .usc6h6 .úsc6h .usk6 .úsl6 .usm6 .úsm6 .ús7ta. .ús7tami. .ús7toÄ .ús7ti .ust6l .úst7ny .ust6r .ús7tret .us7t4ric .ús6t6r .úš6 .ut6 .uv6 .uz6 .vd6 .vh6 .vk6 .vp6 .vs6 .vÅ¡6 .vt6 .vz6 .vzh6 .vzk6 .vzl6 .vzp6 .vž6 .zb6 .zd6n .zd6r .zd6v .zg6 .zh6 .zm6 .zr6 .zv6 4b4s4Å¥. 8c4h. 8d4z. 8d4ž. 4c4ht4. 4j4s4Å¥. 4lt. 4m4p4r. 4m4p4s. 4m4Å¡4t. 4n4k4t. 4p4r4v. 4rd. r4p4t. r4Å¡4Ä. r4t4z. 4st. 4sÅ¥. 5k4rát. 5s4to5k4rát. 5t4ľ4c4Å¥. akci3a2 akvári3u2m gymnázi3um le2u3kémia t2ri3u2mf kli3e2nt", + ["compression"]="zlib", + ["data"]="xÚe›M’ã8’…¯¢\19¤Y(¨è¹\14ER\"\5\18¤\1\16;ƒ'¨MZ¬ê\0002\27-r‘«ªUš•6\ +Ýkü=wPÑ=‹„?€øùàø%CY¾lî\23ù÷¿/›FÌÏ—M'æ×Ëf\20ó—üûûes\22óûeó.æ\31y¾-åßý\"A#ÿΛQþUÛvSo\23ù÷¸m¶û—=‚\ +Áç\15„5å\7Â\3‚#‚–™¶4'\4\14AϬ¬e@à\17<þ@8!\8\8\"\19/\8\19å\21áŒàß\8¾#Xø„5\6Ñ\ +x\21ð*âUÀ«ˆW\1¯\2^¥d\21È*U «HV¬\2YE²\ +d\21È*U$«@V‘¬\2Y\5²\ +d\21Ȫ\23E!YedŸ?€&aÅ\16pbj|Ð\28\24\30\25¶šw«öÄÐ1쵈V90ô\12+fb\24\24F}p¡I\26¹ÒÌ\12ÿÍð;ÃE\31k½D§EB\13ô\26ä5Ákp×Ä®A]\3º\6s­È5ˆk\0×à­‰[ƒ¶\6lMÖ\26¨5Hk€Öä¬Y“²\6d­(uöâ\7½øA/~¨\23?èÅ\15õâ\7½øA/~Ћ\31æÅ\15zñƒ^ü \23?€õ­ŒE*Âø**\0134ÛóK\18\12\3CÏþý \127?èß\15ú÷CýûAÿ~¨\127?èß\15úôãÅÀÕ§\31Ö“\3:r@?\14ìÆ\1½8°\19\7ôá€.\28Ѓƒvà\0þ\3ð\15 ?Ч\7\16\30\0x ß\1x\7Ð\29\0w Û\1h\7’\29\0v\0×áE)Hu0¨# Ž€:\18ê\8¨#¡Ž€:\2ê\8¨£B\29\1u\4Ô\17PGB\29\1u\4Ô‘PG@\29\1u\4Ô‘PG@\29\9u\4Ô\17PÇ\23¥ ÔÑ Z@µ€j\9Õ\2ª%T\11¨\22P- Z…j\1Õ\2ª\5TK¨\22P- ZBµ€j\1Õ\2ª%T\11¨–P- Z@µ/JA¨6o4ŠU)Xeh•ÂU†W)`¥ˆ•BV\25³RÐJQ+…­\12·RàJ‘+ƒ®\20»RðJÑ+ƒ¯\20¿²\14TÚ\5˜\127[ªÕ«ûÒÚ•\19:rB7NìÄ\9]8±\3'àŸ\0\127\2úIÁOÀ>\1ú\4ä\19OÀ=\1öDÔ\19@OÀ<\1òDÄ\19\0OÄ;\1î\4´\0196 \19<}zQ\30\2ž\12Ï\1Ï\1Ï\17Ï\1Ï\17Ï\1Ï\1Ï\1Ï)ž\3ž\3ž\3ž#ž\3ž\3ž#ž\3ž\3ž\3ž#ž\3ž#ž\3ž\3ž\3ž\3ž{Q\30â9Ãë×\3¯'^\15¼žx=ðzàõÀë\21¯\7^\15¼\30x=ñzàõÀë‰×\3¯\7^\15¼žx=ðzâõÀë\1Õ¿(\5¡ú¼MÞ¸MÞ¸MÞt›¼q›¼é6yã6yã6yã6y³mòÆmòÆmòÆmò¦‡Í›á›áM7Ã\0277Ã\0277Ã\0277Ûn†7n†7Ý\12oÜ\12oÜ\12o/†§›áÍx\7à\14 \29\8;€u ê\0Ò\1 \0038\7Å\28@9\0r\0ã@Ä\1„\3\0\7ò\13À\27@7\0n Û\0´d\3À\6p\13/JAªÁ < < <¡< <¡< < < ¼By@y@y@yBy@y@yBy@y@y@yBy@yBy@yL<‰çç_”‡xÞð\30\127€OŠ!\8ÅÔ\26ù 90<2l5ïV퉡cØk\17­r`è\25\2WÌÄ00ŒúàB“4r¥™\25.š¤u\17—\22\9\19p'ÐN„À:\17u\2é\4Ð\9œ“bN œ\0009q\"â\4Â\9€\19ù&àM ›\0007‘m\2ÚD²\9`\19Ü9Á\19\8§\23å!ßdx\1x\1xx\1xx\1x\1x\1xAñ\2ð\2ð\2ð\2ñ\2ð\2ð\2ñ\2ð\2ð\2ð\2ñ\2ð\2ñ\2ð\2ð\2ð\2ð‹ò\16/\24^\4^\4^$^\4^$^\4^\4^\4^T¼\8¼\8¼\8¼H¼\8¼\8¼H¼\8¼\8¼\8¼H¼\8¼H¼\8¼\8¨ø¢\20„Šy\ +^8\5/œ‚\23‚\23NÁ‹NÁ\11§à…SðÂ)x±)xá\20¼p\ +^8\5/:\5/œ‚\23NÁ‹NÁ\11§à…SðÂ)xÑ)xá\20¼è\20¼p\ +^àK\9¿3\\ô±Ö«Óñbì\9è\9ä‰à\9܉Ø\9Ô\9Ð\9ÌI‘\19ˆ\19€\19x\19q\19h\19`\19Y\19P\19H\19@\0199\0190\19)\19 \19\24\19\16\19\8Ó‹ò/e×^éÚ+]{U×^éÚ«ºöJ×^éÚ+]{5×^éÚ+]{¥k¯êÚ+]{¥k¯êÚ+]{¥k¯tíU]{¥k¯êÚ+]{¥k¯tí•®½¾\24ªºöjì3ÐgÏ\4ŸÁ=\19{\6õ\12è\25̳\"Ï ž\1<ƒw&î\12Ú\25°3Yg Î \1:“s\6æLÊ\253\24g Î œ_”‡|³á-À[€·\16o\1ÞB¼\5x\11ð\22à-Š·\0o\1Þ\2¼…x\11ð\22à-Ä[€·\0o\1ÞB¼\5x\11ñ\22à-€Z^”‚PK\30ožØ\15žØ\15=±\31<±\31zb?xb?xb?xb?ìÄ~ðÄ~ðÄ~ðÄ~è‰ýà‰ýà‰ýÐ\19ûÁ\19ûÁ\19ûÁ\19û¡'öƒ'öCOì\7Oì\7O쇞Ø\15;±\31ùÄ®Õ!µº Öž×Ú÷:×ú\29•~·×ü?¥ôŸ|‘„\0074¦ïvæQƵníAùrÿµ•pÜnš—û…á/„šð\23ÂóvÓ!>¾”\8\26\4\29\2I;#íÌ’g$žYü¬¤à;ž¿£Ì;k{G\"Þþ$è^6ûmÿºß¼ÂÈpÀÀߌ;˜Ï›ÚþU\\KsÙ0U¬$\7)Œ°Ö\8³>þT\27^{‹J7\25÷j¤&Ú™E—Íþ5n+†iS½:\9\5\"€G,ŠÿÐ\4gq'¹ÒvÞÈ\3\1ò´ÌN;Y<™7õkU´…䫵³0øÜò02\30bÑ)\ +v\7温µ\23µ¶\01430ÍkdV³¨ÑÒ\2(¢–ÞÌ\12“„ó6l^\15Ö'؃\26\7£þ:°n\26©ü(TG˜ðZ©‘X«ø-H÷\26M0ö‡\9š(Õí5ŠÆZí\13ÌC\ +§fPãÕ`PZu[«=“‰ -L*X¹\ +©V…x5§õY¤Í\9o]Rë‰}?ÉDÙöØ´\29\24Š'œvÄYGœN>ÚÅ’—Ó6uÄå™@Që\3gÑl\7MžÔD5IͼqB$SÑm£à9¢ô\24¸\13&ùÖK\4I\3f;CÉ\11ƒ¤\3“\14ÔÈú:\24óÀ…2¼NÌ0i™‰ù¦mÄó smÈÞ\26\8=Ðc\8e\21\13Š8¨Ó\7N\3z³H%ðá\0\31¦\127­?o%ŒTï‰ä‰äe‚´\12{†Âà·Gé#\"úx–ÐÉ\"CØ2ô\12\3C<|*\13MjÐ^Ú\30\25ö\12=C)!;\23<=é\2što`ßMp\24'\29FK¿AHŸ*X\29ÑI§æ¤#5©7&´-ûóĦÂv/M\4íS`o\16F\9ݶ–ÇŽ±e\4öCÆ^úPZ\8\28\8©]ú„Ê5\13óQ\13êOÒ“ÀÎA\11~Іuƒ‰…î\28Q\26ê\25\14\12%…΋lT\26 '¢y\"Ò\1‘­K„½Œ…NÓX°›Qêw›ûß±HRÅý\"E*f©<¡!.\22hO\29\24âéã*³‰fP3m\30\23,\0285\1†Kìqa}‰Ï’ÎÙds6qÎ&l \18)t4`[<ãbNº¸\18·‰²HºO¬Fú’-:Œ²“>žs²\12ølí͘\1*°ñÒ85\30&hbУgV‚™µ\\L$M_,\ +[,E­¦Uƒº^\23LžE–ˆÌÖqÙÕ…øB2f3˳#|!¥˜¦ý\\´cK1\23âÉÛ¶FL¬òS$ÚÏ›ZñÙ‡däòüÎÅÿÃý]½/;HñùCÏ»Ï\27¤\30¢…\127žª[=\13‹½\30“\16IEÎ\30Š\26ë\5GhQçÓƒ¶—ÙT8&\21¡\24x‚‹ðÄUåTÈ|èU¥§°rs\22\11ÏoŠ>\11¯\0072zS¡5…ªä\25Û­ÄUvDS9\21:É5©Ç\1\\XŸE\5\6*±®ôä\22=å\12¢rZ2eu›ÂQ,5bS¨\11qì\3·\19¤ˆx&ápF\19Y\28y\1QeIm\22ê²:»\12\"æg\15žN¦pD³ÒUx\21ê\24¨\3ø\14ÙC\7ÖY¯¢À’P}ÌB\ +\30…þ¨G¾ÕŽó]ºèì„/ô€–\20;ϵŖÓC\15~ë!”û¢¼^\7²Ê…Šv΋õòZµpݤ7¬6ÏE•\24©þk\4\13`[ÔZNܹ‚YYþŽ\3Ä“šÊ©XžÂC°\27ê,ͽÎ*\6M\17ú!?›¬R£w:»M%­Þ&õ ëÏAIÇœ¦QÍú\20ƒ,G47.µxÇnÈ(\3Qz\21ê¾!Ü\0:% L¹À’\11,šM—“—Q\18\23ñt¥Çä4-z;Hµ\12ŽJæÅQ©3lZ÷\3aÔ]`BczÇbbÌOრ—\27WÔf“\\‰\27ú«àif‹1\22â)l¢±\16:ì1*Ð=Uu\22.‹\4\1?ñ\30\19\11ÛœbvYÌã‚iÀ·†Èsa¿qc\17×c†“eU²]´9M\ +ÐΛB\14ËëN\16ï—Ä“L¦\13V\"\21‘¨¸×\12%\14>å³çª¦‚gúZJ›ðùh\\3ªÆ*Hp\127eR7æ´º?åõ\6Ñ>…\30‹ê\3\21³\ +Î[tQw5$é6bŠi³úÚD±¨Ä\5·àY×c½ýÈÊeáUøœâ™’÷µ9\9j’ùtÔ¤/Â2\9E¯G¡\29ä\11ç6Ͼ\"Ÿ`”É\20|àÖ#¦À!ñe×徿ž\20s^(²y\11‡MBÝÝ¿îïÊjÛì\17'\0047GîŠÜ¿PÒ­{Ž\22uÿ_òê…¹ŠE‰AÔHÊSQat:ñ\8ÉsÇzÇÙóEç\3n¸Uë1·>Ð\18CöŽiõ¡3QÇE§ v\23ûF>\0118\3umÏźZmÈ\ +ÿô%#¹Õ\\\0011ê8f÷\127¾eõ³\29Ÿz8g=\14kê?OùsU—¬šÓšö{-¼Vsÿõ¬¦j¿TÙA§L’ž éÉ‘2FZ)R†H™!e„”\9R\6H¹ýôl>åÖýê\8ÿÅ\17þ‹#üê\8ÿt„_\29áWGøÕ\17~u„_\29áWGø/Žð_\28á\11u‹ˆ”‘Ò“(=RæI+NÊ4)ä̒2JÊ$)ƒ¤'Çê\17N0´®‚­›Dë”hÝò­êg\22\23\19h]S~çb¹äÖ‡µõ!·>äÖ‡Üú[\31rëCn}x¶><[Ÿt\8B\30‚ð\28‚ð\28‚‡ ¬C\16ò\16„<\4!\15AÈC\16ò\16„<\4á9\4á9\0041·\30Ÿ­Çgë1·\30×Öcn=æÖcn=æÖcn=æÖã³õøl}Áh¼›mж*4½äÑ×LY4fË\28?™X‹d±ÖF­’œ†\30/ÏI·<'ݲε%ϵ%ϵ%O±%Ϭå9³–çÌÂ[\21¶ñM¿Åk® ªxÏ¢ÉâœÅ¸ªûexJTl•¼páæ\ +}®Ðç\ +}®Ð¯\21úg…þK…þ?*tˆÉíÎ*žrÅS®xÊ\21OkÅÓ³âéKÅÓ—Šc®0æ\ +c®0æ\ +ãZa|V\24¿T\24¿T¸›ì6·Ç]\17Ÿ1%<1ìå:CËP.zš˜heÉ©±L¾¨Í:Ëá-®1Í5Y,h˜cŲƒÝáf©\0282Î0ú|f¸è\3Åðòâ.'öÂïW[Ô`jo/'²\9¨¨sÂG\0227\19ƒYŸ-q &³ù[¼kìT‘K?·¨Í>r­ßÊm³\13c¶ü„÷­lD\29©4\21ßj4uZUܺU%S‰e\\Õ5øl·m%â!S“ºUß\127.Щ“›t¿*\20œF\25ß¾*B7š½ÿ%\"TÈ\4ÑÅ4ªÄG®¾™Dc\ +£ôyóm_V[÷Ú–¾†î}³ÅÊöM?îÌ´ÈÕ,;\13÷\22«Íæ§Îì`v2\27Í&³³Ù\5¶³f`÷9¡Í‚Y»­æ\24Ù@7NjXm_¶|\8;å\4}ÒÔ|2¶¥’Œ­uw|Üš]¶uN\17×C†R\\5õûí²\21T÷K`J)\26°(AᲘ²ˆYœ)üÈ>WŒàn(O\"»àšÙ\3l•,]àÓÀ·ê\\/V/Õ¸ª¸ª´*8§zOZÃç2hQUqUóª\22ª&H£û§lŸÒ=åhrF/ʯ‘ý×Hû5â¾Fþ£\2%ì¾4Ý}i»ûÒ⸫Z\19Ú…s­}®K~&þöùQ¦¦ïÌ·u³;¨9ÒpyÔM,奓ɸ§o¡–]gv„íðé\23CYw³UÕ·\ +D\1âzÜ\7\29oUÏ´Hµûü±\29(4¢X%Ò‘ÂD§Æ©\25\25j]¹\"¼Yd‘ºf•÷_”‰½‚\29Ì\6³3í™á¬u-Vå\01364³µE5fÁ¼aÜ{Ë\31ζ:êy<íÊÞ„¼Ô™ª³h³è²pYLY¬\21¥,Îf眀V›¾qØ„D}g-‡±ßº\29lBê±ìuí\30Ë¡ä\0\31›BºÐúFý¨BênÇ^'\22E›…£\8\13[\19áue¶áþ7×TzFæ,ßÙb\27Î{uHû^\19±’òã^y*£'€\ +Ô~jj›ÏªºUM«Š«:¯ŠeáüÒ,3\0317ËïÊT²\31\20Šì†æñ‡NZ7F}\0202eL‰O\\èlì]8'í¡\11sgI\"b\22¬)s×ìJY\28ýý\18ú¡o\"·ÓÏ\27^Ëae=ά\127(í8ªÆU¡Àp¿ôùiO_iʬς­ß¡©—ŽnSÕ­jZ\21{¥R‹Ø&<4á¼»ÿ-ÀC×à„c¢J–î\0ӬʭjÌB³\13æ9ªûo\19$õùÔP•VuÎbÎBNèa¬­öP:ã\9¥¹ë\\å\ +ø׎o¾ä¾*f¯¦¢©é\13‰Ö…oâ7MšrÒT|3õøs•\\Ç\26±²\26§fP3ªa]÷K¶a¬u.û’\29…I–%f›ô\1½D«5¿=ä™c’å8«¹ÿ¶’g³šPîð7\25µ¾4\17,‹)$-*\22ls´3[“«ÃûãÂÜz‹\0\ +¶9ߨ+å8\31_ϸd‰l4¥S3¨É.¢êVåVõÌ7­*®*©²Gšþ\22wi\12LPÿ4–í-í\18£ç-Íý·ÚY3é_²²šMõf\7³ÞlΉ|²L\9Ü™ysB Žùµ8èž«tkb\0171î;ÎÈqÏ\12£ío°‹&k”ôb˜XsrŠ©ÔÔj\14jÚüÌDGóÖípÿ”øý—¦³½z´ºF+7®éC‘Õd6jAæ\24Ý+6©™Õ€“_Ý$&5÷jð\31%`1Íh±®!˜Íˆõ6\13Ãì\14gF—E´\28¡4{¿˜àõbÕ÷ËüŒv9ï/\19çœÀæ\7moÀ°Â\16†|›v}éUÈ\29\20é±èË}\22u\22³\9iÖ\20®ó\20£ZÖ‡£¢\12Y(\31”_“þ@c2wË´¦¥Ñ­2«ÜÎ\27d\ +q\9šä\11ÄY³ÍMEêMuMZÕã–eN\27O\16üäiâó–ÕÅ”\5Ç\26÷ž2f1›0@Qê\8\17\29‡l.ä¨R!ÞªM•Š2[Ý6odá\21j½šÐ¨å†5ÝÿWî“ݲ\23\15O½çaM‹:(85©\\ \26±ÛcµL:í§Q/SÓXëv3qm\5\21­%àï1\0209ǤEkt\19àªÑQ<\5¤¦´YtkÒÅœE¡5`=¹QÞ­©gméui\6\19c¯âq+³8³\ +¥|k¥Ý=\19t\25MzÛ™F£î­ß½$/\20ê°|GÓ±Ø÷X\18L´ì¶z'[…S¾ÁQ\12YŒYLYÄ,R\22ç,xìPÍY,FÀëÌ”+~\27v²y:¦h\11;y\29m³ –5ô&%wü«[7i|—ΚAgÁˆc1•«\26ç,u$R?¶¨ú¼™àD\8ãbÝ‚Š«š­á´;jF´ÙYI\26\29 ÍWÌ»ÀÖçw\0272\17.‹)‹˜Åœ…º†ç¯Ž/\19‚Þ\28`'µsWú2D]!\20þ¼*\14nÀ±>æblT.ä{³•Úú)Ú5eUY—\19\6\19ã3÷8e\17uGcd²fdò—íSNO‰qÕ·Uw«²Ä:£ÛŠX¬v]Ø\"š­ÚƒÙvíeVY늜Üs¦±N™³›•ÝÚ•!×^Ù,\11zÍ€5 •‡w¢)tæå.\3êù\15ÛšÍ`\1Ý\127™è\12§3ŒÎ0$C\22\2Dòn\0218{î\127«4È\14Ó¾/¿=u÷Eª­#u¤³ŽtÚ‘ ïöæ4õ\25£“F'ø%»Å¼\18Æb¡¯Ã(·F^Cd\29éöCQg¡\30 \28²˜²\8YÄ,´\17¨eMºåvÎYH?ð\7ö±?ïVÑ>Ó¦§D\31—Î\13\\N¢‚\\>vO5ñ¹\28ÂISå¢3\6Qñ\\è}-Þ\127s4aùE\16bÈ\"ç˜Íò®\20äõÛ®L\5¿‘Í]Yѹ\0203EÝð«Û,\0119\127'Ìš>ಮZ\196â#†}M±o\12²«HpÿgG¹×\20¬u¤U\26åìD\\Å;\7\7ñZ£-\28;¸²PøF3´¹>\19Î\ +:\14j&K4\26-\0265ª½yÇ|Ä\21Zž@:>K–5iVëÐb©°‹¾½-²Ñí5Z©a_–’}Xô¥ƒ\15M85ƒšIMP\19Õ\16lÑwŽ¥dãK™\27å¤C«Î,Óu‚/ã[\21Éi\30Ìδ­æâ5t±\11¨Mï<¹í\19£}\18±‰nÓ|WöN^uûMyNc\17œÙ¸‘v{Ù`ašÚ«•\23CØQÍýwMûnÉï\17ÙÆöuîŠæµÜº/±q‹?½3.‹•ÊäÌe½Ü±aå\295\6|¹¥\24ýB{–«\13süŽjÿYŽ\27qýpv0òú\0043—ŒÍ\29rúÍç\15îj½\28Ø\24¿¾ñjï\23ïV5šD&y\11ÿCm(\7¹´ç\19\5þ.å-•¿}a¬kÕÈi\2+—_š°‘3ã¼\13÷¿¦¼T}Þ\0u(úN®¹bCé+9H¡\0267\3çÀŽop\15(\15j†Í$›Îq\7…„ûOô舯\0020s\25êFškù\18A\19ÕÌ\27¾'Yjgö,‹Z,»#f¤ñì•Ø¦VòâÉEã#\27ðgÍ|ÿm¹e¿€\13ò\26\3ójµI·Ôh6‚Â\12j\22š³&¾36kKÕ\27¸'\19r\9…`mH\16Ï<•ì\21”óýÒ¯*™*³ËÂfwjꆃ•dÙ?c\28:§^ƒùüØðE\17C„ø@#9Ñ\11\7\7&³– ·tš¨åî¿1.|eܛՌ¡K]µÑ·ÆºWñ7F\1¯jÄ£7Šw¶1—}wÈBS\26{PË”\26\0ÝEþˆ\13ïš#\ +\15|e‚Á\4ÒOH·âX>¥¼ßúræ/[d|Å\7OmÉ÷_3þœØÓÑ\19fR’eC•ÔèóFn¿j±Ö(0EtÚéI¦Ðö{³ÙÒ¬\22Æ°£0‡=ÊäÁjˆ²(5>4½\26vaê\26¶1™Ñ\14NcÂk\26¿\7à\27d½Ê&KaóÏ\12\27ýdÀ%Á/\0]¥‚µâ»j³ò&8\17LÕe~*ç(Ô\22ß1Th“\16Î2i\15 >o¦º5­û’˜Ûì\0309\9û„¶ù;?r¶ŸXåRÔgµâ­vU˜„‹¬æ†÷‘¥\24Þ#&8\5­‡\27Äpo£}\\J¨ \15dž-´\13Æa™\11Ý;%\1‹YÌY³±~üžó~ɪĎ\6\29€ä$þVÍoø;–\4qBÞðJá\24N\12‘’pF®³œ‘0L\24\16 ×9J { ¥æ˜\8ÄÆz>\0284bæ_\9¯*ð»\20•ü\14EÕ¡Hzë\25Mÿòï\26\15ö8àkâY„ÌŠJ\1ìáã‚Ö€~F\7Ï‹\4s\0l3f€Ï@ž™{Fî™\25\23fZ˜ké\0252óã&á²GP¿y†!nÒG$£à‚Î-\1´]ìùsço›ÿ‘‹¾„u±0|ܾmðsûTˆ=Yž¢Oßøëš\"˜´X7\"ø3\27±øõÎ,6ÔúÛ·\4ƒíä\7\127†\22ŠˆŒ\17•Ú/­¿qÿ\25Ÿ1|i\18\2dÁoMäÂ-VnD¡{=o‡Íñ}À\31U$2lz9™_ÝýçЕøÉ43\0286®ï^›­Oÿ\7¸'ðl", + ["length"]=19364, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=2467, diff --git a/tex/context/patterns/lang-sl.lua b/tex/context/patterns/lang-sl.lua index 27b2e2694..4ea743537 100644 --- a/tex/context/patterns/lang-sl.lua +++ b/tex/context/patterns/lang-sl.lua @@ -106,7 +106,18 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnopqrstuvwxyzÄšž", - ["data"]=".av5r .di6spo .ek3s .ek5v .is1 .iz1 .obi4d .ob5it .od1 .po4d5n .po4v5s .pre6d7n .se4k5s .si4s .st4 .voz5l .voz5n .zliz6 a1a a1b ab5ba ab6rod a1c ac5ci a1Ä a1d ad2l a6dobl ad6rl. ad6rla ad6rob ad5ur a1e1 a1f af5ga af1t a1g a1h a4hm ah5mi ah5mo a1i ai2n1 a1j a4j5ek a4jf aj5fi aj5fo aj5ha aj5he aj5im aj6imo aj3os aj6stb a5ju. aj3uÄ aj3ug aj5žn a1k ak4s a4kst a1l a1m a4mz a1n an6dga an6dhi a4nm an5mi an5zi a1o ao2b1 a1p a4ph a1ra ar6dwa a1re a1ri a1ro a1ru ar5xa ar5xo ar5xu a1s a4sÅ¡ as5Å¡Ä a1Å¡ a1t a4tf at4i a1u1 a4uf a2uk a4ul a1v av5Å¡i a4vž av5ža ay5to a1ze az5fo a4zig az3la az3le az4lil az4lit az4liv a4zob a4z3oÄ az5ora az5oro a4zra az4red az5vp a1ž až5mi ba6bba ban3Ä4 ba4u 2b1c 2b1Ä 2b1d be1 be4v b1h bi1 b1ja b4ja. b5jel b3jem b5jet 2b1k b3lep b5leta b5lil b5lit b5liv b1m 4bmi 2b1n bo1 bo6chm b5ord bo5vp b3rab b5ras b3raÅ¡ b3rez bre4zg bre4zi bre4zr b5reže b3rob br6žda 2b1s 2b1Å¡ 2b1t bu5ki bu5ku bu5kv bu5ry 2b1v b1z b1ž 2cc 2ch. ch5ma 2ck c1ka ck1o2 c5ko. ckov3 ck1s ck5we 2c1n 2c1t 2Ä1b 2Ä1g Äi1 1Äj 2Ä1k 1Äl 4Ä3let Ä5mes 2Ä1n 4Äop 2Ä1p 2Ä1s 4Äup 2d1b 2d1c 2d1Ä 2d1d dd6voj d2e 6d5elem de4min de4mn de4z3i 2d1g 2d1h di5ck 4dind d4i5no dis1 di4skr di6spr 2d1j 2d1k 5dlet d2li d5lit d5liv d1lo 2d3m 4d3nac 4d5naÄ 4d5nap 4d3nar 4dnas 4d5neb d5niv 4d5niz 4d5njaÄ 4d3nož d2o 4dobÄ 4d5obd 2d3o2f do5rd do5vÄ do5v4z 2d1p d5raz d3rep dre6pn d4rev 2d1s 2d1Å¡ 2d1t dteks6 d4ur du5ro du5um 2d1v 4d3vi 2d1z2 e1a e1b eb4j eb6liz e1c e1Ä e4Äd eÄ5de eÄ5di eÄ5do eÄ3le eÄ5op e4Ät eÄ5ti eÄ5to eÄ5tr eÄ5up e2Ä1v eÄ6vrs e1d e4df ed5ig ed2l ed5ob ed6obe ed6obr e4dobs e4d3oÄ ed5vÄ ed5zb e1e e4ep e1f e4ff ef5fe ef5ta e1g e1h e1i ei6pzi ei2z eiz5e e1j e1k ek6mal ek6tre e1l e1m e1n e1o1 eob4j eob4r eo4dl eo4z5n e1p ep5nik e1ra era6z5l era5z4r era5z4v e1re e4rf e1ri e1ro e4rr e1ru e1s es5da e5sta e5sti. e5stih e5stil e1Å¡ e4Å¡p eÅ¡5po e1t 4eth e4tinÅ¡ e1u1 e1v eve6t5l ev5ha ev6pre ev6ste ev5stv 2ew ew6ind ew5le e4wt ew5to e4yw e1z ez5dj e3z4dr ez2g ez5gl e5zij ez6ijo ez5imn e5zis ez6ist ez5iz ez4l ez6lom ez6man ez4mo e4zob e4z5or ez4re e4zt e4z5u4m5 e4zž e1ž 1fa fe1 fe6ljt ff5ma fi6zlj 2f1n fo6uri fre4u 2f1s 2ft ft5ve fu1 2g1d ge6ige gel5Ä4 ge6njÄ gi6tpr go1 go5vz 2g1t gu1 ha4u 2h1Ä he4i 2h1k 4hl. h4lo 2h1n h5ren 2h1Å¡ 2h1t 1hu hu6ffm i1a i1b i1c i4cs i1Äa i1Äe i1Äi iÄ5ra i1Äu iÄ5vr i1d 4idor i1e1 i1f i1g 4igh i1h i1i ii2n1 i1j i1k i4kÄ ik5Äa i1l il5Ä4k 4ile 4ilo i1m i4mh im5hi i1n 1ind 2ine 3i4n3os 1inp 3inse 1inÅ¡ 4inÅ¡k 3intr i1o1 i1p i1r 4ire i1s is4a is6ert isis4 i4skv 2iss i1Å¡ i1t it5pr i1u i1v iv5jo i1x i1z iz1l iz4la izli4z iz5me iz5mo iz6ode iz5po i2zr iz1u iz6ure i1ž j5akt 2j1b 2j1c 2j1Ä 2j1d je4ks4 2j1g 2jh j1hi 4jime 4j5int 2j1k 2j1l 2j1m 2j1n 4job 2j1od jod4l 2jos 4jož 2j1p 2j1r jra1 jraz4 2j1s jsis6t 2j1Å¡ 2j1t ju1 2juÄ ju5dm 2jus ju2ž1 2j1v 2j1z jz6ves 2k1c 2k1d ke5ti ki1 2k1m 1kn ko1 kok4 ko5kd ko6vÅ¡e koz6lo 1kre 2ks. k5sat ks1c ks1p ks4po ks1t 4kst. ks6taz ks5te 2k1t 3ktr 4ktra ku5ro k5vip la4ir la6vz. 2l1b 2l1c 2l1Ä 2l1d le1 le4e le6ipz le5me 2l1f 2l1g lg5Äa 2l1h l2i1 li6dž. 1liz 4l5izd 2lj. 4ljc 2ljÄ 2ljk 2ljn 2ljs 2ljÅ¡ lju5d6j 2l1k 2l1l 2l1m 2l1n lo1 1loÄ 2l1p 2l1s 2l1Å¡ 2l1t lu5ki lu5ku 2l1v 2l1z 2l1ž 2m1b 2m1c 2m1Ä 2m1d me4d5n me6dos me4dr 2m1f 4mind 4minp 4minÅ¡ mi6th. 2m1k 2m1m m5niv mo6Å¡t. mo6vÅ¡. 2m1p 2m1s 2m1Å¡ 2m1t m5urn 2m1v my5hi 2m1ž na1 5naÄel na4d5nj nad5r na6dra na4dre na6dur 1naj na6jak na4j5en naj3o na6joÄ na4j3u 1nas na4v3z navze6 1naz naz6or 2n1b 2n1c 2nÄ n1Äa n1Äe n1Äi n1Äu 2n3d2 nd5ga nd5hi n4dm ne1 ne3d2 1neh ne3zm nez4v 2n1f 2n1g n4gh ng5ha n4gv ng5vi 2n1h 2nj. 2njc nje4v5s 2njk 2njs 2njÅ¡ 4njv 2n1k 2n1l 2n1n no5rd n4ost 2n1p 2n1s nsis4 2n1Å¡ 2n1t nteks4 n4tg nt5ga nt5ge n4tv nt5vi nu1 2n1v ny5qu 2n1z nz4i 2n1ž o1a o4as o1b ob5gl ob5ide ob5jo 5obla 5obro o4bz o1c oc5ke oc5ki o4cr o1Ä o1d od5dv od5nal o6drep od5zd o2d1ž o1e oele4 o1f o1g 4ogl o1h o1i oiz2 o1j o1k o4kb ok5ba ok5be o4kt o1l o6l5avt ol6gÄa o4lr ol5re o1m o1n o1o ood4l o2ol o4om o1p o4pm op5me 4opy o1ra or4deÄ o1re o1ri o1ro o1ru o1s 5oseb ose4m5 o1Å¡ o1t o1u ou5ki ou5ku o1v ov5sem ov5Å¡k o2v1z o5vza ov3zd o1y o1z oz4b ozd5j oz4g oz5lo oz6lož oz2n oz5nic oz5niÅ¡ oz2o oz2r oz2v o1ž ož5mi 2p1c 2p3Ä2 pÄ5ka pe1 1peÄ pe4kt pet3l pe4tle pe4v5s pev5t4 4phs ph5so pi5zo 2p1k 4ploz po1 po6dfa po4d3l po4dna po4d5oÄ po6lob po6std prez4 2p1s 2p1Å¡ 2p1t pz6ig. qu2 3raÄu 2rae ra6jžn rav5z ra6vza ra4z5id 3razl ra4z5or 2r1b 2r1c 2r1Ä 2r1d re1 3real re6cht re5Äv 5redÄ re6dig re6dnju re6iba re5jo re5km re6sda rev6sk re6znaÄ re6zus re6zve r1f 2r1g 2r1h ri1 r4in ri5n4o riz4g riz4l riz4n 2r1j 2r1k 2r1l 2r1m 2r1n ro1 rob6id 3rodi ro5zo 2r1p r1r 2r1s 2r1Å¡ 2r1t r4th rt5ha ru5kl 2r1v r3v2j rv5jo ry5an 2r1z rz2l r1ž rž5da 2s1b 1sc 4sc. s2ci se4k5sa sek5si se5ma se5vp 2s1f si1 s4id si6gn. sis1 2s1j 2sk. s2kn 4skre s4lav s4on soni5 soniÄ4 1sp s4plod spod4l 2s1s 2st. 3ste s4ten 4stf s4tiÄ 5stim s4tir 2stk 2stm 1str s4tra. su1 su4bo sve5t Å¡2Ä 2Å¡Ä. 2Å¡Äk 2Å¡Än Å¡e2s 2Å¡1j ta5wi taz4 2t1b 2t1c tch5o 2t1d tek6st 5tema te5xa t1f 4tind 4tinos 4tinp 4tinse 4t3int 2t1k 6tletno 2t1m 4tnaj to6vž. trt5u tr6tur 2t1s 2t1t tu1 4tz. 2u1a u1b ub4j u4bp ub5po u1c u1Ä u1d ud6mi. u1e u1f u1g u1h u1i u1j u1ka u1ke u1ko u1l u1m u1n u1p up6Äka u1ra u1re 4urg u1ri u1s 1usp u1Å¡ uÅ¡e3s u1t u4th uth5o u1v ux5em u1z u1ž 2v1b 2v1c 2vÄ v1Äa v1Äe v4Äer v1Äi 2v1d ve4Äl ve4Äm ve4i ve4tin vetle6t v1f v1g vi5dv vid6va 1viv vi6žg. 2v1j 4vjo 2v1k 2v1m 2v1n vo5rd voz5le 2v1p 3v2pa v4pij v4pil v5skn v5Å¡ek 4vÅ¡k 2v1t vt4k vz2 v2za 3v2zg 2v3zk 2vzo v3zp v2zu 1wa wo2 x1f 1ye 2y1f y1j y1l y1w 1z2a z6ane. za5uk za3vp za1z2 za5zd 2z1b 3zbir z1c 2z1Ä 2z1d2 zd5ju z3dv z1g z4gni z5got 2z1h 1zi z1ig 2z1is 4z5iÅ¡Ä 2z1j 2z1k z3ku z5las z1li 3zlil 5zlit 5zliv zliz5 1zlj 3zlog z5lom 3zlož z1lu 2z1m 1zn 1zo z1ob 2z1od z1og z2ol z4om 2z1p 1z1r 4zredÄ 4zreÅ¡ 4zrez 4zrež 4zri 4zru 2z1s z1Å¡ z1t 1zu z4uj 2z1up 2z1uz z1v2 z4ven z3vn 3z4voj z4vok 2z1z2 z1ž 2ž1b 2ž1c 2ž1Ä 2ž1j 2ž1k 4žmi .Ä8 .Å¡8 .ž8 8ž. 8Å¡. 8Ä.", + ["compression"]="zlib", + ["data"]="xÚ%˜]–ã,¯…§Â\8²–\29ìÓÓqb’à\31ð‡±ë-æЃ¨‰ÔUg^çÙä\"[ °\16BH\"—áì’¹Œ¾ß·h.n¾îÂî4\23¿7@\1âÍÛQ¤ó\0252ÂÚ¢\29»PéÙñÉ–\\?þ\31ŒÝÙYŒÝ[a¶ærÆÒ-\31„²øÒ›¡\25øÝÌpën´n}Š#Œ»\25îÝÝÓú÷\23€5¶‹\25ú1Þ cŸ–ˇ\12•D\4ŒÝ‘˜ê\26àa†G÷dìÑdºO~/3Ø×j†W·úŠ\17&-ß\6}11]ã®ÎžY¾›Ž‹X‡\20†<5ùý\27Ð\25#\12vÞ¥\13\27h\16b×BƒñÐR\21òbM\27\24\11UÉÐ\21Y€•b{“’\27Ã\27\27i\18óS?~ÉvÉ\00941iKé`¨ûo¨\24+Âi´üþþ1ÃÞ½\127ªMÕ‘ulf¿ÙJÀÁ\"ö Û\0302Ä!MOƒg¼\127¤Ùùþ­_„\127wY«\21\22/ÕJ¶x¶\\®:\17P|»øåCò‡œš¦³²å\26¥D颶\"REÔŽMn\20ïܤ%K¾\127eŽÛÐßp‘Û\16®ÿþZ¨=\12f¹\11\5ŽæÆéßœ=Íó¾y:ÍÄ'v\26.æÖMn1·ëäÖÚÎúd†±¸\13Æâò ‚ÊÂ\\Q‚Vco,Ïä`n\17‘±¿¿$\"&\22ŒRóvMÃ\13N\26öÚÆ´\16WÌ-9[ž\31â?$iž{ÿ:MÁ\22·Ô¿\127ÇAòw\1ß‚,\127t³¯xT<…é[ƒÒ\ +Ù²M{gÿ÷×ÅÜñh„ÜgsoæÁÜç&¶æÞÍ‘¡9žWqv ûrÌb+\0\22ø÷—»'|š\127\127±\23–œj\127Vs1öß_ì“\25ìV·×‘ fÜjûƒ»8\7íQÂF\29ÉXdäHƱ?ãdÆÖ™~ìÜ‚õGgW\31*©X®^sŸ‚—\25}Ç.ìè\3\31[ß…\8‹\0164\18Hædj„Jš:\9fÓRðàÍX\15n¬\00776Kd•ã\27¯a¸\27Å©\1­*Ý>Ü\4\9\28šxîÆ—/Õñ¥’éóÁ5Dl=¶‘v¼}dÄÛ(ñ±}˜1v¸\2x2$b‹TÛ—†bF\\6qc»x÷©Ñ] Ã\0309‡1»yï\25$„\28s\20\30«\6¥Ïõ¬\6*­q„K‡•\29>\13ôÄPúw~,í8†Ñ8Îjt\31â?$ŠèNªÇÑib®ü™‘㇤J8J§s=ÕëÏ´#\30¹v|\0247v\\s§`ìd\3°7÷!Isâm\23©÷›)ç‡\20f6L³XÂ\17ž}\0îá„Y›B,§ïˆÉÎ÷[\17iÙœ/\29s8lÇY»¹_‡E$'qir?\29>é\26®¦‹Õ, ª˜\22¡’ã0ÜƱ\"BÁÓ¥¡W.‚vE“+=5(%ÓC-/ˆê&µ\14€­í\29—Õu{þ ¿|ÈëC¤\16‡êìû‡\21ß?\29ÉÔqÀÖefØìƒF\21hÌ{º>KS)Æý¦åO\18Š\8òð\20÷eÜW¯Ëà¾:\29¡ýÊjêÈì7ƒ„\2Wº‘}_‹\29Qµ´OqžÈ%À/½Ÿ¢Xžû&Þ^y$#ñô¹]ÄYâ*²\14A¬U\11(ZƒÄ:±ªqJ®œÃ®\26\\\12§HÔ<\6ó ü>\\¿LÙ<\30ŠG\15ß—…{úà„\30±?°éƒ xˆÃ\5x0/w§3\15\12Ò>q²§ëýÓA–NAžn˜ð §ï3—þÉ\25?¹_E“³yòÕ«¦—ÜÿåHb4\9\30/ʃ—U\0x±ò‹€\27ÔÔu{ñaó:Ìëè\31Õx.”çBy.‘·÷Æ¿¿CEWÑ\27Ï•H\31ÖQ;g¢3\26ëǨ\22›ö¸´Çƒ­\127¾hèÇwµ´ð¸®G'ogtôs÷\17¿\24_·ˆ²žS\5\"\\ô±+_¯\29¥€GõF\7ßúàÌÕÛ z\3ÎF'ìNMvd…³XYÚD-¹ñ#¶ù¤=°§Ý²æÞ»”!ôŒb)Îåwm\24!\30«øÜm’À&ñM\127v“Tú_1”\127hŒŸ ‡ÂÍŠCF¨È¬ÒDZvpwnmÒüCì£j€{LÝ0“o&eˆI\25bª\25bÂŽ\19•\"*Ñ&\3L/35lÞN\30ñ”dlKC³`\17¬\2’Ð\20«(ÊÅ)ŽVCX\7®²â¤Ì4a‚)\13 Tù»™Ø}_\5Ê\23&v=Éõ&UmÓÑ\18~0íhß¿âc\8 ˜©ô§òß,Õg”žbçLʤ»šf\14fÆôsœ-ÐÍ̈ýùþqPÝ,f`ŠvÞ/†²xÈfÞ\17\5l€Ål4\9\19TˆL@EòƼwYŸÀ¿Îœ­\5\0063×ô@aî7³\12\0281ØŸåbÚE¶]¤àRm» æ‚s.D] ÷[èÔ\24z\8žfyV\127¤ý2KËn\22ßïß‹i”YìBxÀÿ–éB{’ä©JžfA\16ì\2Œ¹È|ý$Q\26ÔY-:«…³Z°\12éø£Ô&ÐWõ\8\22v·ÔRg©¥\14ŒSPê\4ŽrÕ®Víj­»ZÙÕêê£cå©Á™«—4ð0ª+ÆŠ[EVX\9\29ÔG\12Ï‚Õ¬5ɯ±\127ÿ`뵞R\29ß\4»@Š­(¶ò¤\8j2ÿ[WRc¿&àSµ– ž\12C­\19 ${°\0319#19mõHèM\0244ÞOì\17^\25\1Êã¡2e\21q¯‡&îjŸ×\0029‹ëÅR›ûÅ\14ƒL\17dŠ j \ +5P…\26¨BPm¸Ž­\9£Þ? Z\7‹_\7ÔÒ(ØH²±\9ØcA4e耡ƒê\31˼Œ\14¹j\14:1N5Q\"èž\6N\"|wÿÓ.q‘PlÕŽ3‰Äñh±_ÄH\25÷É<ø£š£!vⳆb\0013d§ÿ~²ra®¹\16T]”kJ̵n´ùZ«ªÌÙöÜ£\28ª$^ÊY©*+%R\2dÎì\0ûL\18c|\23d“Ù…Í*8\14bîB‡^]ìk£¥\ +ð@¿CÞ} Ý1ö+¤ƒxy Õ\3\30øßA<ØÜ¡\127+\0Îútá·ò\11ü\16¸õXE3R\5”?’$$}N5|pP‡\\íÀd×vF\21\28îÈ2Ч\29ÿuN\18‹&RQœ²á©+¨ÇéYÓèYÓèɻإÚñš2šSOååCV\17/ÀŒ\16,G)y²©“M^\25ãôc\127\14¦9½Úýû—p ÉØ\19?§5\11V\1\"jr¬ÿŠ:q¨ëÏvC\25»ñl\19²r·ã­ŠÂŽXv*\0243“e3O‡“$s¶\4\27¾+\\l\"³†¹Œ´6\13QT|\13æ+¶æ?ôl¾Yç›Æ7\26}cêïæË4¥\29Lé‡à.¦\12Ý1ƒW®R\25ô‡\3\28U€\5›]Ë\13×.2\\©±«4T\19\ +ý‡)W6_0\3Q$xÃë3fMx!žnCü¢ÇÃSA±þ\1Iw\18°Ü•œƒ\13HÞ¼2<ëèO¸®è¿\28!‚)F;$ñ”d0>5›4¨6\7ÊG‡$qÙ\ +/&v_\26½\13ŠÞ\6\0Ó•9‹2'Ì)z\28•Ot\21UñRô\127]íüŠxA\21+­˜PônÄžÅ\30Uoýß\5\22\6N¬`OÂA¹ž\1­¬þð\18Κ\"#V¿ãEq«x¯X¯þ¯Œ\0r¶ï_2àåßß?æòþ\17üþ1\127t\15ÿ¨>ýCŒø\127é˜ím", + ["length"]=6052, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=1068, diff --git a/tex/context/patterns/lang-sr.lua b/tex/context/patterns/lang-sr.lua index f42923066..b45a9f322 100644 --- a/tex/context/patterns/lang-sr.lua +++ b/tex/context/patterns/lang-sr.lua @@ -2,7 +2,12 @@ return { ["comment"]="% generated by mtxrun --script pattern --convert", ["exceptions"]={ ["characters"]="абвгдезиклмнопрÑтуцшјњћ", - ["data"]="на-дно на-тка на-тка-ти на-тка-ше о-дно о-тка о-тка-ти о-тка-ше по-дно по-дне по-тку по-тки по-тке по-тка у-дно и-где и-гдје Ñву-где Ñве-где Ñву-гдје Ñве-гдје по-не-где по-не-гдје и-зби и-збе и-зба и-зби-ци и-зби-це и-зби-ца и-звит и-зим изо-ба-ру изо-ба-ри изо-ба-ре изо-ба-ра и-Ñкок и-Ñко-ку и-Ñко-ка и-Ñкон и-Ñко-ну и-Ñко-ни и-Ñко-на и-Ñкру и-Ñкре и-Ñкри и-Ñкра и-Ñкрав и-Ñпод и-Ñпо-да и-Ñтру и-Ñтри и-Ñтро и-Ñтре и-Ñтра на-ју на-ји на-јо на-је на-ја на-јам на-јеÑÑ‚ о-браз о-брет о-дви-ка о-дран о-дра-ти пре-двој ра-зму ра-зми ра-змо ра-зме ра-зма ра-зну ра-зни ра-зно ра-зне ра-зна ра-Ñклоп ра-Ñпе-лу ра-Ñпе-ло ра-Ñпе-ла ра-Ñпе-ћа ра-Ñпе-ћу ра-Ñпе-ће ра-Ñту ра-Ñти ра-Ñте ра-Ñта ра-Ñтом ра-Ñтер ра-шћу ра-шћи ра-шћо ра-шће ра-шћа у-зно у-зна у-зни у-зник у-зрок у-Ñку у-Ñки у-Ñко у-Ñке у-Ñка уÑ-ÐºÐ¾Ñ Ñƒ-Ñпио у-Ñпео у-Ñпор у-шћу у-шће у-шћа ин-те-Ñ€ÐµÑ Ñ‚Ñ€Ð°Ð½-Ñу тран-Ñа тран-Ñом те-ле-ви-зор те-ле-ви-зо-ру те-ле-ви-зо-ром те-ле-ви-зо-ра те-ле-ви-зо-ри при-ти-Ñак при-ти-Ñ-ком при-ти-Ñ-ку при-ти-Ñ-ка по-ја-ви-ће на-пу-шта про-гра-ми-ра-ње Ñни-ма-ња Ñни-ма-ње", + ["compression"]="zlib", + ["data"]="xÚu•íqÂ0\12†Waì\8ñ]\17rí\18†â#\7ØYAÞ¨ú²,‡ò§}$¿’\21Y6!Npƒ\12å\0Äu†\7DÏôo\29\29gH\7(\22W,ªŒ1eŒØ\\Œ±úIXCg\ +7v\26Ü¥\6˲Nð‹œ\26Õ\5¹žàJ\26Y!\11’·tÍ´ºÊ¶Ô•{Àè\16\13îu‡\11U¨Ô}±¯NõËIØL£ÙÄWX물Â\11\9\1w½PçŽÔ•ÁÁY\7GÚ;83~Û\3\ +<:OÒcg\14Êì—ò¨Ìò-Ý´@­PY¾Qyu\28=ÃU-ì.Ü:ÓÁª°Î=1qKÆ\\\28'Çmp\23Š\20²Ù]lÄù\16•b'ê|[§|<â\23.÷nœÔ\127£S›ÚÔãd,wÖ\27°Qˆ¨K]\14¼‚'õÂúŒQ×¹8NŽ£qv±ÙÅf\23›]ln±|vOløföFÅ=[Bs´LÝ\17\7Gýysìr \"5\7žLèÜ*fvšž\17¹àY8U=ªuv\27![.ââ89–\7Cû\19¬#Áú×\8o\ +3Í—0kh$Z¹\1FɈrÖ\19D©'õn˜µ\24'Ç…¾)Ø\23\5«;´ª14Ó\20%¾á”r–!£¦\4oÑÖÎâæÍrnøç*\15\11oøî•\7æÿ…O™ä…ù° C¿ò\5àâè\5\26\\Ü#ºm{/ÿ\0ì•Q\31ÿ…Æ™vâ>ñME1·kf\17\21Œ¯´Ìý‹„<\3ßòÌgªñ%Ž¸w¤? soÞ", + ["length"]=1800, ["n"]=130, }, ["metadata"]={ @@ -94,7 +99,45 @@ return { }, ["patterns"]={ ["characters"]="абвгдежзиклмнопрÑтуфхцчшђјљњћџ", - ["data"]=".Ñ’2 .Ñ›2 .ч2 .ш2 .ж2 .а2б3алиј .а2б3анац .а2б3ерац .а2б3ерир .а2б3евак .а2б3ирит .а2б3ју .а2б3ла .а2б3лег .а2б3леп .а2б3лок .а2б3лу .а2б3ориг .а2б3реак .а2б3рог .а2б3ÑƒÐ·ÑƒÑ .а2д3ерац .а2д3ј .а2д3лат .а2д3рен .а2д3рог .а2д3верб .а2н3а4е2ро .а2н3афиј .а2н3афрод .а2н3аго .а2н3акуз .а2н3алд .а2н3алфа .а2н3алг .а2н3амерт .а2н3андр .а2н3ант .а2н3апто .а2н3арх .а2н3егер .а2н3екл .а2н3екум .а2н3елек .а2н3енер .а2н3епи .а2н3ерг .а2н3ерит .а2н3еÑте .а2н3идр .а2н3ирид .а2н3изог .а2н3изом .а2н3изур .а2н3јон .а2н3офт .а2н3Ð¾ÐºÑ .а2н3Ð¾Ð¿Ð¸Ñ .а2н3орг .а2н3орх .а2н3овар .а2набап .а2набат .а2набаз .а2набио .а2набол .а2надем .а2надипл .а2надоз .а2нафаз .а2нафила .а2нафон .а2нафор .а2наген .а2нагно .а2награ .а2нахор .а2нахро .а2накал .а2накам .а2накат .а2накеф .а2накла .а2накли .а2накој .а2накол .а2накрон .а2накру .а2налеп .а2Ð½Ð°Ð»Ð¸Ñ .а2налит .а2нализ .а2намне .а2нанео .а2напла .а2напле .а2напне .а2напно .а2напро .а2напти .а2нарт .а2наÑар .а2наÑеи .а2наÑпаз .а2наÑта .а2наÑтиг .а2наÑтом .а2натим .а2натоц .а2натом .а2натре .а2натри .а2натро .а2неор .а3г2Ð½Ð¾Ñ .а3г2ноз .а4е2ро .б2 .бе2ш3Ñ› .бе2ш3ч .бе2Ñ3ц .бе2Ñ3к .бе2Ñ3п .бе2Ñ3Ñ‚ .бе2з3алкохол .бе2з3атомÑк .бе2з3б .бе2з3д .бе2з3г .бе2з3и .бе2з3л .бе2з3Ñ™ .бе2з3м .бе2з3н .бе2з3Ñš .бе2з3о .бе2з3Ñ€ .бе2з3у .бе2з3в .бе3Ñ4крупул .бе3Ñ4покоја .бе3Ñ4покојн .бе3Ñ4пора .бе3Ñ4порн .бе3Ñ4тид .бе3Ñ4тија .бе3Ñ4тиљ .бе3Ñ4тилу .бе3Ñ4трана .бе3Ñ4Ñ‚Ñ€Ð°Ñ .бе3Ñ4твар .бе3з4беда .бе3з4бедн .бе3з4бели .бе3з4бједа .бе3з4бједн .бе3з4бол .бе3з4иÑтан .бе3з4иÑтен .бе3з4јач .бе3з4јак .бе3з4лоба .бе3з4лобн .бе3з4начај .бе3з4рача .бе3з4рачн .бе3з4уб .бе3з4уп .бе3з4вуча .бе3з4вучн .бе4о .беÑ4Ñ‚Ñелер .би4о .ц2 .д2 .ÑŸ2 .ди2Ñ3акор .ди2Ñ3јунк .ди2Ñ3конт .ди2Ñ3корд .ди2Ñ3кре .ди2Ñ3кри .ди2Ñ3кур .ди2Ñ3квал .ди2Ñ3ло .ди2Ñ3ориј .ди2Ñ3парит .ди2Ñ3пон .ди2Ñ3поз .ди2Ñ3проп .ди2Ñ3тон .ди2Ñ3трак .Ñ„2 .г2 .ге4о .Ñ…2 .и2ш3Ñ› .и2ш3ч .и2ж3Ñ’ .и2н3абруп .и2н3адек .и2н3афек .и2н3акце .и2н3акур .и2н3амор .и2н3аниц .и2н3аплик .и2н3апÑÑ‚ .и2н3арт .и2н3аугур .и2н3аура .и2н3ед .и2н3ефек .и2н3ег .и2н3ÐµÐºÑ .и2н3екв .и2н3елиг .и2н3епц .и2н3евид .и2н3јек .и2н3јур .и2н3јуÑÑ‚ .и2н3обл .и2н3офиц .и2н3окуп .и2н3опер .и2н3опор .и2н3опÑе .и2н3умбр .и2н3унда .и2н3унк .и2н3утил .и2ноген .и2нокор .и2Ñ3ц .и2Ñ3к .и2Ñ3п .и2Ñ3Ñ‚ .и2з3аба .и2з3ака .и2з3анал .и2з3б .и2з3бија .и2з3бива .и2з3д .и2з3дај .и2з3г .и2з3и .и2з3л .и2з3Ñ™ .и2з3м .и2з3н .и2з3Ñš .и2з3о .и2з3Ñ€ .и2з3у .и2з3в .и2з3ведб .и2з3веде .и2з3веди .и2з3ведн .и3Ñ4как .и3Ñ4кариот .и3Ñ4кат .и3Ñ4кин .и3Ñ4кита .и3Ñ4коч .и3Ñ4конÑк .и3Ñ4крам .и3Ñ4крат .и3Ñ4крен .и3Ñ4крењ .и3Ñ4крич .и3Ñ4криш .и3Ñ4криц .и3Ñ4крит .и3Ñ4крој .и3Ñ4крÑа .и3Ñ4крÑн .и3Ñ4купља .и3Ñ4ÐºÐ²Ð°Ñ .и3Ñ4кврч .и3Ñ4лаб .и3Ñ4лам .и3Ñ4леђ .и3Ñ4лед .и3Ñ4лијеђ .и3Ñ4лијед .и3Ñ4лик .и3Ñ4лин .и3Ñ4љеђ .и3Ñ4љед .и3Ñ4лов .и3Ñ4луш .и3Ñ4луж .и3Ñ4ме .и3Ñ4мије .и3Ñ4мје .и3Ñ4паљив .и3Ñ4пав .и3Ñ4пира .и3Ñ4плић .и3Ñ4плит .и3Ñ4покој .и3Ñ4полин .и3Ñ4пон .и3Ñ4порав .и3Ñ4права .и3Ñ4прави .и3Ñ4правк .и3Ñ4прављ .и3Ñ4правн .и3Ñ4пупч .и3Ñ4пур .и3Ñ4ред .и3Ñ4рк .и3Ñ4таћ .и3Ñ4такн .и3Ñ4там .и3Ñ4тар .и3Ñ4Ñ‚Ð°Ñ .и3Ñ4тави .и3Ñ4тављ .и3Ñ4тиц .и3Ñ4тифан .и3Ñ4тин .и3Ñ4тир .и3Ñ4точа .и3Ñ4точн .и3Ñ4точњ .и3Ñ4ток .и3Ñ4тори .и3Ñ4трад .и3Ñ4тран .и3Ñ4трав .и3Ñ4трић .и3Ñ4триж .и3Ñ4триц .и3Ñ4труг .и3Ñ4туп .и3Ñ4уш .и3Ñ4ук .и3Ñ4ÑƒÑ .и3Ñ4ут .и3з4бав .и3з4бичкава .и3з4блеушан .и3з4бојак .и3з4бојк .и3з4драв .и3з4гомет .и3з4гред .и3з4грн .и3з4грт .и3з4иђ .и3з4ид .и3з4ими .и3з4јежљ .и3з4лож .и3з4лог .и3з4лопаћ .и3з4лоз .и3з4ним .и3з4ној .и3з4олац .и3з4олат .и3з4олир .и3з4олов .и3з4рачи .и3з4раел .и3з4раиљ .и3з4ун .и3з4упч .и3з4вала .и3з4вале .и3з4вали .и3з4ваљи .и3з4валу .и3з4вижд .и3з4вииÑкр .и3з4вија .и3з4вијен .и3з4вин .и3з4вињ .и3з4вир .и3з4витоп .и3з4вјед .и3з4војац .и3з4војц .и3з4вор .инте2Ñ€3а .инте2Ñ€3е .инте2Ñ€3и .инте2Ñ€3је .инте2Ñ€3о .инте2Ñ€3у .инте3Ñ€4ежџ .инте3Ñ€4егн .инте3Ñ€4еÑа .инте3Ñ€4еÑе .инте3Ñ€4еÑи .инте3Ñ€4еÑн .инте3Ñ€4еÑо .инте3Ñ€4еÑу .инте3Ñ€4ије .инте3Ñ€4огат .иÑÑ‚2к .из3бе2з3обр .из3бе2з3ум .из3г2н .из3ва2н3евр .из4оанем .из4оаном .из4обат .из4оброн .из4одим .из4один .из4одоз .из4офон .из4офот .из4огам .из4огео .из4Ð¾Ð³Ð»Ð¾Ñ .из4огон .из4ограф .из4охал .из4охаз .из4охел .из4охиј .из4охим .из4Ð¾Ñ…Ð¸Ð¿Ñ .из4охит .из4охор .из4охро .из4оклин .из4околон .из4Ð¾Ð»ÐµÐºÑ .из4Ð¾Ð»ÑƒÐºÑ .из4омер .из4ометр .из4оморф .из4онеф .из4оном .из4опат .из4опер .из4опле .из4опол .из4опÑеф .из4орах .из4оÑеи .из4оÑинт .из4оÑиÑÑ‚ .из4оÑкел .из4оÑкоп .из4оÑтаз .из4оÑте .из4отах .из4отал .из4отер .из4отон .из4отоп .из4отро .иза3г2н .иза3Ñ‚2к .изд2на .изд2но .изд2ну .изр2к .јури2Ñ3к .јури2Ñ3п .к2 .Ñ™2 .м2 .н2 .на2д3жањ .на2д3ждр .на2д3же .на2д3жир .на2д3жив .на2д3жњ .на2д3жуп .на2д3игр .на2д3инж .на2д3ина .на2д3иÑк .на2д3јач .на2д3јах .на2д3јеч .на2д3јек .на2д3јез .на2д3јун .на2д3л .на2д3Ñ™ .на2д3оч .на2д3офи .на2д3ора .на2д3оÑе .на2д3оÑје .на2д3оÑо .на2д3рач .на2д3рашћ .на2д3ран .на2д3раÑÑ‚ .на2д3реал .на2д3реп .на2д3руч .на2д3руг .на2д3рук .на2д3уч .на2д3удар .на2д3ум .на2д3в .на2ј3а .на2ј3е .на2ј3и .на2ј3о .на2ј3у .на3д2нев .на3д2нич .на3д2ниц .на3д4лан .на3д4леш .на3д4леж .на3д4вал .на3д4веÑи .на3д4веÑÑ‚ .на3д4виј .на3д4вит .на3д4вла .на3д4воје .на3д4вор .на3ј4аш .на3ј4ажи .на3ј4ада .на3ј4аде .на3ј4ади .на3ј4ах .на3ј4ака .на3ј4ако .на3ј4ало .на3ј4ами .на3ј4амл .на3ј4амн .на3ј4арц .на3ј4ари .на3ј4арм .на3ј4ати .на3ј4аук .на3ј4ава .на3ј4аве .на3ј4ави .на3ј4ављ .на3ј4ази .на3ј4еже .на3ј4ежи .на3ј4ежу .на3ј4ец .на3ј4еда .на3ј4еди .на3ј4едн .на3ј4едр .на3ј4еÑти .на3ј4етк .на3ј4езд .на3ј4езн .на3ј4урен .на3ј4ури .на3Ñ‚2ках .на3Ñ‚2кам .на3Ñ‚2каÑм .на3Ñ‚2каÑÑ‚ .наг2н .наг2Ñš .ну2Ñ3Ð¿Ð¾Ñ .ну2Ñ3про .ну2з3бел .ну2з3биљ .ну2з3љуб .ну2з3ре .ну2з3рје .ну2з3уж .о2б3игр .о2б3иÑтин .о2б3иÑтињ .о2б3ј .о2б3лај .о2б3лакш .о2б3лам .о2б3лаÑк .о2б3лећ .о2б3леж .о2б3леден .о2б3лег .о2б3леп .о2б3лет .о2б3лијеж .о2б3лијег .о2б3лијеп .о2б3лијет .о2б3лиÑÑ‚ .о2б3лив .о2б3лизат .о2б3лизав .о2б3лизи .о2б3Ñ™ .о2б3лока .о2б3луч .о2б3лук .о2б3ору .о2б3Ñ€ .о2б3уж .о2б3убож .о2б3уд .о2б3уме .о2б3уми .о2б3умр .о2б3уз .о2д3жал .о2д3жаљ .о2д3же .о2д3жи .о2д3жв .о2д3арг .о2д3и2з3д .о2д3и2з3в .о2д3игр .о2д3иÑк .о2д3иÑти .о2д3ј .о2д3л .о2д3Ñ™ .о2д3ок .о2д3онд .о2д3онл .о2д3оно .о2д3ону .о2д3озд .о2д3озг .о2д3Ñ€ .о2д3уч .о2д3ук .о2д3ул .о2д3ум .о2д3уве .о2д3уви .о2д3узд .о2д3узе .о2д3узи .о2д3узл .о2д3в .о3б4јеш .о3б4јек .о3б4јер .о3б4јеÑи .о3б4јет .о3б4љан .о3б4љут .о3б4љуз .о3б4рђ .о3б4рч .о3б4рш .о3б4раћ .о3б4рашч .о3б4рашн .о3б4раже .о3б4рамб .о3б4ран .о3б4рањ .о3б4рат .о3б4раза .о3б4разд .о3б4рази .о3б4разн .о3б4разо .о3б4разу .о3б4ређ .о3б4реч .о3б4реж .о3б4рец .о3б4ред .о3б4рем .о3б4Ñ€ÐµÑ .о3б4рети .о3б4ретн .о3б4рич .о3б4риц .о3б4риј .о3б4Ñ€Ð¸Ñ .о3б4рит .о3б4рив .о3б4рк .о3б4рл .о3б4рљ .о3б4рн .о3б4роћ .о3б4роч .о3б4рок .о3б4рон .о3б4рова .о3б4ровц .о3б4Ñ€Ñ .о3б4руч .о3б4руш .о3б4рук .о3б4рун .о3б4Ñ€ÑƒÑ .о3д4јећ .о3д4јен .о3д4јев .о3д4лаж .о3д4лаг .о3д4лака .о3д4лаз .о3д4луч .о3д4лук .о3д4рж .о3д4раћ .о3д4рачи .о3д4раж .о3д4рана .о3д4ране .о3д4рани .о3д4рано .о3д4рану .о3д4рапа .о3д4рапи .о3д4рапљ .о3д4раз .о3д4ређ .о3д4ред .о3д4рем .о3д4рен .о3д4рич .о3д4риш .о3д4риб .о3д4риц .о3д4рин .о3д4рињ .о3д4рл .о3д4рљ .о3д4рн .о3д4рон .о3д4роњ .о3д4рп .о3д4рт .о3д4руж .о3д4руг .о3д4рвеч .о3д4рвен .о3д4рвењ .о3д4важ .о3д4вај .о3д4веÑа .о3д4веÑн .о3д4веÑÑ‚ .о3д4вић .о3д4викав .о3д4викн .о3д4Ð²Ð¸Ñ .о3д4вој .о3Ñ‚2ках .о3Ñ‚2кам .о3Ñ‚2каÑм .о3Ñ‚2каÑÑ‚ .обе2ш3Ñ› .обе2ш3ч .обе2Ñ3ц .обе2Ñ3к .обе2Ñ3п .обе2Ñ3Ñ‚ .обе2з3б .обе2з3д .обе2з3г .обе2з3ј .обе2з3л .обе2з3Ñ™ .обе2з3м .обе2з3н .обе2з3о .обе2з3Ñ€ .обе2з3у .обе2з3в .обе3Ñ4тан .обе3Ñ4тиј .обе3Ñ4тран .обе3з4нач .обе3з4нан .обе3з4нањ .обе3з4уб .обе3з4виј .од3г2н .од3м2н .ода3д2н .ода3г2н .по2д3адм .по2д3игр .по2д3изв .по2д3ј .по2д3лакат .по2д3лакт .по2д3лећ .по2д3леж .по2д3лег .по2д3леп .по2д3лет .по2д3лијећ .по2д3лијеж .по2д3лијег .по2д3лијеп .по2д3лијет .по2д3лиÑÑ‚ .по2д3лиз .по2д3љут .по2д3лок .по2д3лом .по2д3луч .по2д3луж .по2д3луп .по2д3оч .по2д3ош .по2д3оф .по2д3окн .по2д3ра2з3д .по2д3рад .по2д3рам .по2д3ран .по2д3рањ .по2д3Ñ€Ð°Ñ .по2д3равн .по2д3равњ .по2д3разр .по2д3разу .по2д3реп .по2д3Ñ€ÐµÑ .по2д3рез .по2д3рик .по2д3рит .по2д3рож .по2д3рон .по2д3ров .по2д3руча .по2д3ручи .по2д3ручн .по2д3руб .по2д3рук .по2д3упла .по2д3уÑм .по2д3уÑн .по2д3вариј .по2д3веч .по2д3веж .по2д3вез .по2д3вик .по2д3вил .по2д3вир .по2д3влаш .по2д3Ð²Ð»Ð°Ñ .по2д3вођ .по2д3вож .по2д3вод .по2д3воз .по2д3врћ .по2д3врж .по2д3враћ .по2д3врат .по2д3врг .по2д3Ð²Ñ€Ð¸Ñ .по2д3Ð²Ñ€Ñ .по2д3вућ .по2ÑÑ‚3егз .по2ÑÑ‚3инду .по2ÑÑ‚3лим .по2ÑÑ‚3онк .по2ÑÑ‚3опер .по3д2нев .по3д4јеч .по3д4јен .по3г2н .по3м2н .по3м2Ñš .по3Ñ€2Ñ’ .по3Ñ‚2ках .по3Ñ‚2кам .по3Ñ‚2кат .по3Ñ‚2кав .пре2д3же .пре2д3жи .пре2д3ид .пре2д3игр .пре2д3иÑпи .пре2д3иÑто .пре2д3иÑÑ‚Ñ€ .пре2д3изб .пре2д3ј .пре2д3обј .пре2д3одре .пре2д3Ð¾ÐºÑƒÑ .пре2д3оÑе .пре2д3оÑје .пре2д3оÑв .пре2д3рач .пре2д3рад .пре2д3рат .пре2д3руч .пре2д3убеђ .пре2д3убијеђ .пре2д3убјеђ .пре2д3удар .пре2д3угов .пре2д3ÑƒÐ¿Ð¸Ñ .пре2д3уÑло .пре2д3увер .пре2д3увјер .пре2д3увјет .пре2д3в .пре3д4јен .пре3д4вај .пре3д4вар .пре3д4веÑÑ‚ .пре3д4воја .пре3д4воје .пре3д4воји .пре3д4вор .пре3д4Ð²Ð¾Ñ .пре3Ñ‚2ках .пре3Ñ‚2кам .пре3Ñ‚2кат .пред3м2н .пред3м2Ñš .про3г2н .про3Ñ‚2ка .про3Ñ‚2ки .проти2в3акц .проти2в3оф .проти2в3отр .проти2в3Ñ€ .проти2в3уд .проти2в3ÑƒÑ .Ñ€2 .ра2ш3Ñ› .ра2ш3ч .ра2ж3Ñ’ .ра2Ñ3ц .ра2Ñ3к .ра2Ñ3п .ра2Ñ3Ñ‚ .ра2з3анал .ра2з3б .ра2з3д .ра2з3е .ра2з3г .ра2з3и .ра2з3л .ра2з3Ñ™ .ра2з3м .ра2з3н .ра2з3Ñš .ра2з3оба .ра2з3обл .ра2з3обр .ра2з3од .ра2з3орат .ра2з3орав .ра2з3орт .ра2з3ору .ра2з3от .ра2з3Ñ€ .ра2з3уда .ра2з3удб .ра2з3уди .ра2з3улар .ра2з3умр .ра2з3уве .ра2з3узд .ра2з3узе .ра2з3узи .ра2з3в .ра3ш4ћењ .ра3ш4чић .ра3Ñ4как .ра3Ñ4канд .ра3Ñ4кин .ра3Ñ4клад .ра3Ñ4клањ .ра3Ñ4клап .ра3Ñ4клон .ра3Ñ4клопа .ра3Ñ4клопи .ра3Ñ4клопљ .ра3Ñ4кош .ра3Ñ4кроп .ра3Ñ4пај .ра3Ñ4пав .ра3Ñ4пета .ра3Ñ4пете .ра3Ñ4пети .ра3Ñ4пето .ра3Ñ4пику .ра3Ñ4пињ .ра3Ñ4плин .ра3Ñ4плињ .ра3Ñ4пн .ра3Ñ4полож .ра3Ñ4пон .ра3Ñ4пор .ра3Ñ4прав .ра3Ñ4прем .ра3Ñ4рђ .ра3Ñ4рд .ра3Ñ4ре .ра3Ñ4тај .ра3Ñ4тан .ра3Ñ4тат .ра3Ñ4тав .ра3Ñ4тењ .ра3Ñ4тил .ра3Ñ4тињ .ра3Ñ4тир .ра3Ñ4Ñ‚Ð¸Ñ .ра3Ñ4тит .ра3Ñ4тој .ра3Ñ4трел .ра3Ñ4трет .ра3Ñ4трој .ра3Ñ4трт .ра3Ñ4тућ .ра3Ñ4туп .ра3Ñ4тур .ра3з4башур .ра3з4бад .ра3з4бан .ра3з4бар .ра3з4бау .ра3з4бој .ра3з4бор .ра3з4ев .ра3з4гађ .ра3з4грт .ра3з4иђ .ра3з4ић .ра3з4ид .ра3з4иј .ра3з4ин .ра3з4ир .ра3з4из .ра3з4лаг .ра3з4лаз .ра3з4лич .ра3з4лик .ра3з4лож .ра3з4лог .ра3з4лоз .ра3з4мећ .ра3з4мет .ра3з4мрÑк .ра3з4нат .ра3з4ред .ра3з4роч .ра3з4рок .ра3з4вал .ра3з4веде .ра3з4веÑÑ‚ .ра3з4вић .ра3з4виго .ра3з4вија .ра3з4вије .ра3з4вију .ра3з4вит .ра3з4вој .ра3з4вон .ра3з4врћ .ра3з4враћ .ра3з4врат .ра3з4врт .ра4Ñ5турч .раз3д2ни .раз3г2н .раза3г2н .раза3Ñ‚2ка .Ñ2 .Ñу2б3а .Ñу2б3инв .Ñу2б3јунк .Ñу2б3л .Ñу2б3Ð¾ÐºÑ .Ñу2б3орд .Ñу2б3реп .Ñу2б3рог .Ñу3б4аш .Ñупе2Ñ€3а .Ñупе2Ñ€3е .Ñупе2Ñ€3и .Ñупе2Ñ€3о .Ñупе2Ñ€3у .Ñупе3Ñ€4иор .тран2Ñ3а .тран2Ñ3ц .тран2Ñ3е .тран2Ñ3к .тран2Ñ3л .тран2Ñ3м .тран2Ñ3н .тран2Ñ3о .тран2Ñ3п .тран2Ñ3Ñ‚ .тран2Ñ3у .тран2Ñ3в .тран3Ñ4еп .тран3Ñ4кри .тран3Ñ4уд .тран3Ñ4ум .у2ш3Ñ› .у2ш3ч .у2Ñ3ц .у2Ñ3к .у2Ñ3п .у2Ñ3Ñ‚Ð°Ð»Ð°Ñ .у2Ñ3тара .у2Ñ3тећ .у2Ñ3тег .у2Ñ3тер .у2Ñ3тов .у2Ñ3трћ .у2Ñ3трч .у2Ñ3трај .у2Ñ3трал .у2Ñ3треб .у2Ñ3треп .у2Ñ3Ñ‚Ñ€ÐµÑ .у2Ñ3трг .у2Ñ3трк .у2Ñ3трн .у2Ñ3трп .у2Ñ3тућ .у2Ñ3тум .у2Ñ3тур .у2Ñ3тврђ .у2Ñ3тврд .у2з3б .у2з3д .у2з3г .у2з3и .у2з3игр .у2з3инат .у2з3иÑкр .у2з3л .у2з3Ñ™ .у2з3м .у2з3н .у2з3Ñš .у2з3обеÑÑ‚ .у2з3обијеÑÑ‚ .у2з3охо .у2з3орат .у2з3орав .у2з3Ñ€ .у2з3угар .у2з3в .у3г2ми .у3г2н .у3Ñ4как .у3Ñ4клађ .у3Ñ4клад .у3Ñ4ко .у3Ñ4куп .у3Ñ4пало .у3Ñ4пав .у3Ñ4пеш .у3Ñ4пех .у3Ñ4пел .у3Ñ4пем .у3Ñ4пент .у3Ñ4пет .у3Ñ4пев .у3Ñ4пија .у3Ñ4пије .у3Ñ4пијуш .у3Ñ4пикуш .у3Ñ4пјеш .у3Ñ4пјех .у3Ñ4пјел .у3Ñ4пјем .у3Ñ4пјет .у3Ñ4пјев .у3Ñ4пон .у3Ñ4пора .у3Ñ4пореч .у3Ñ4порен .у3Ñ4порењ .у3Ñ4пори .у3Ñ4поÑоб .у3Ñ4према .у3Ñ4преми .у3Ñ4рђ .у3Ñ4рж .у3Ñ4ра .у3Ñ4рд .у3Ñ4ре .у3Ñ4ријед .у3Ñ4рк .у3Ñ4рљ .у3Ñ4рн .у3Ñ4рп .у3з4бор .у3з4диц .у3з4лан .у3з4лат .у3з4лић .у3з4лиц .у3з4лим .у3з4лит .у3з4лов .у3з4лудоб .у3з4нач .у3з4нак .у3з4неве .у3з4невје .у3з4нич .у3з4ниц .у3з4ној .у3з4рет .у3з4рев .у3з4ријет .у3з4ријев .у3з4рн .у3з4рњ .у3з4роч .у3з4рока .у3з4роко .у3з4року .у3з4руј .у3з4ван .у3з4ват .у3з4виж .у3з4вија .у3з4вије .у3з4вијо .у3з4вију .у3з4вој .у4Ñ5кош .у4Ñ5коко .у4Ñ5коле .у4Ñ5колу .у4Ñ5ком .у4Ñ5копа .у4Ñ5кора .у4Ñ5коÑи .у4Ñ5котр .у4Ñ5ков .уза3Ñ‚2ка .ва2н3евр .ва2н3уÑтав .з2 .за3г2н .за3Ñ‚2ка 2Ñ’1Ñ’ 2Ñ’1Ñ› 2Ñ’1ч 2Ñ’1ш 2Ñ’1ж 2Ñ’1б 2Ñ’1ц 2Ñ’1д 2Ñ’1ÑŸ 2Ñ’1Ñ„ 2Ñ’1г 2Ñ’1Ñ… 2Ñ’1к 2Ñ’1Ñ™ 2Ñ’1м 2Ñ’1н 2Ñ’1Ñ€ 2Ñ’1Ñ 2Ñ’1з 2Ñ›1Ñ’ 2Ñ›1Ñ› 2Ñ›1ч 2Ñ›1ш 2Ñ›1ж 2Ñ›1б 2Ñ›1ц 2Ñ›1д 2Ñ›1ÑŸ 2Ñ›1Ñ„ 2Ñ›1г 2Ñ›1Ñ… 2Ñ›1к 2Ñ›1Ñ™ 2Ñ›1м 2Ñ›1н 2Ñ›1Ñ€ 2Ñ›1Ñ 2Ñ›1з 2ч1Ñ’ 2ч1Ñ› 2ч1ч 2ч1ш 2ч1ж 2ч1б 2ч1ц 2ч1д 2ч1ÑŸ 2ч1Ñ„ 2ч1г 2ч1Ñ… 2ч1ј 2ч1к 2ч1Ñ™ 2ч1м 2ч1н 2ч1Ñ€ 2ч1Ñ 2ч1Ñ‚ 2ч1з 2ч3вј 2ч3вл 2ч3вљ 2чв. 2ш1Ñ’ 2ш1ш 2ш1ж 2ш1б 2ш1д 2ш1ÑŸ 2ш1Ñ„ 2ш1г 2ш1Ñ… 2ш1ј 2ш1Ñ€ 2ш1Ñ 2ш1з 2ш3чв 2ш3цј 2ш3цв 2ш3мј 2ш3мл 2ш3мљ 2ш3пј 2ш3пл 2ш3пљ 2ш3тј 2ш3тл 2ш3Ñ‚Ñ™ 2ш3вл 2ш3вљ 2шћ. 2шч. 2шчћ 2шчч 2шчш 2шчж 2шчб 2шчц 2шчд 2шчџ 2шчф 2шчг 2шчх 2шчј 2шчк 2шчљ 2шчм 2шчн 2шчр 2ÑˆÑ‡Ñ 2шчт 2шчз 2шц. 2шцћ 2шцч 2шцш 2шцж 2шцб 2шцц 2шцд 2шцџ 2шцф 2шцг 2шцх 2шцк 2шцљ 2шцм 2шцн 2шцр 2ÑˆÑ†Ñ 2шцт 2шцз 2шк. 2шкђ 2шкћ 2шкч 2шкш 2шкж 2шкб 2шкц 2шкд 2шкџ 2шкф 2шкг 2шкх 2шкк 2шкм 2шкн 2ÑˆÐºÑ 2шкт 2шкз 2шљ. 2шм. 2шмђ 2шмч 2шмш 2шмж 2шмб 2шмц 2шмд 2шмџ 2шмф 2шмг 2шмх 2шмк 2шмм 2шмн 2ÑˆÐ¼Ñ 2шмт 2шмз 2шн. 2шп. 2шпђ 2шпч 2шпш 2шпж 2шпб 2шпц 2шпд 2шпџ 2шпф 2шпг 2шпх 2шпк 2шпм 2шпн 2ÑˆÐ¿Ñ 2шпт 2шпз 2шт. 2штђ 2штч 2штш 2штж 2штб 2штц 2штд 2штџ 2штф 2штг 2штх 2штк 2штм 2штн 2ÑˆÑ‚Ñ 2штт 2штз 2шв. 2швђ 2швч 2швш 2швж 2швб 2швц 2швд 2швџ 2швф 2швг 2швх 2швк 2швм 2швн 2ÑˆÐ²Ñ 2швт 2швз 2ж1Ñ› 2ж1ч 2ж1ш 2ж1ж 2ж1ц 2ж1ÑŸ 2ж1Ñ„ 2ж1Ñ… 2ж1ј 2ж1к 2ж1Ñ€ 2ж1Ñ 2ж1Ñ‚ 2ж1з 2ж3бј 2ж3бл 2ж3бљ 2ж3бр 2ж3дј 2ж3дл 2ж3дљ 2ж3дв 2ж3гј 2ж3гл 2ж3гљ 2ж3гр 2ж3гв 2ж3мј 2ж3мл 2ж3мљ 2ж3мр 2ж3вл 2ж3вљ 2жђ. 2жб. 2жбђ 2жбч 2жбш 2жбж 2жбб 2жбц 2жбд 2жбџ 2жбф 2жбг 2жбх 2жбк 2жбм 2жбн 2Ð¶Ð±Ñ 2жбт 2жбз 2жд. 2ждч 2ждш 2ждб 2ждц 2ждд 2ждџ 2ждф 2ждг 2ждх 2ждк 2ждм 2ждн 2Ð¶Ð´Ñ 2ждт 2ждз 2жг. 2жгч 2жгш 2жгж 2жгб 2жгц 2жгд 2жгџ 2жгф 2жгг 2жгх 2жгк 2жгм 2жгн 2Ð¶Ð³Ñ 2жгт 2жгз 2жл. 2жљ. 2жм. 2жмђ 2жмч 2жмш 2жмж 2жмб 2жмц 2жмд 2жмџ 2жмф 2жмг 2жмх 2жмк 2жмм 2жмн 2Ð¶Ð¼Ñ 2жмт 2жмз 2жн. 2жњ. 2жвђ 2жвч 2жвш 2жвж 2жвб 2жвц 2жвд 2жвџ 2жвф 2жвг 2жвх 2жвк 2жвм 2жвн 2Ð¶Ð²Ñ 2жвт 2жвз 2а1 2б1Ñ’ 2б1Ñ› 2б1ч 2б1ш 2б1ж 2б1б 2б1ц 2б1д 2б1ÑŸ 2б1Ñ„ 2б1г 2б1Ñ… 2б1к 2б1м 2б1н 2б1Ñ 2б1Ñ‚ 2б1з 2бј. 2бл. 2бљ. 2бр. 2ц1Ñ’ 2ц1Ñ› 2ц1ч 2ц1ш 2ц1ж 2ц1б 2ц1ц 2ц1д 2ц1ÑŸ 2ц1Ñ„ 2ц1г 2ц1Ñ… 2ц1к 2ц1Ñ™ 2ц1м 2ц1н 2ц1Ñ 2ц1Ñ‚ 2ц1з 2ц3вл 2ц3вљ 2цј. 2цр. 2цв. 2д1Ñ’ 2д1Ñ› 2д1ч 2д1ш 2д1б 2д1ц 2д1д 2д1ÑŸ 2д1Ñ„ 2д1г 2д1Ñ… 2д1к 2д1м 2д1н 2д1Ñ 2д1Ñ‚ 2д1з 2д3вл 2д3вљ 2д3вр 2ÑŸ1Ñ’ 2ÑŸ1Ñ› 2ÑŸ1ч 2ÑŸ1ш 2ÑŸ1ж 2ÑŸ1б 2ÑŸ1ц 2ÑŸ1д 2ÑŸ1ÑŸ 2ÑŸ1Ñ„ 2ÑŸ1г 2ÑŸ1Ñ… 2ÑŸ1ј 2ÑŸ1к 2ÑŸ1Ñ™ 2ÑŸ1м 2ÑŸ1н 2ÑŸ1Ñ€ 2ÑŸ1Ñ 2ÑŸ1Ñ‚ 2ÑŸ1з 2ÑŸ. 2дј. 2дл. 2дљ. 2др. 2дв. 2е1 2Ñ„1Ñ’ 2Ñ„1Ñ› 2Ñ„1ч 2Ñ„1ш 2Ñ„1ж 2Ñ„1б 2Ñ„1ц 2Ñ„1д 2Ñ„1ÑŸ 2Ñ„1Ñ„ 2Ñ„1г 2Ñ„1Ñ… 2Ñ„1к 2Ñ„1м 2Ñ„1н 2Ñ„1Ñ 2Ñ„1Ñ‚ 2Ñ„1з 2фј. 2фл. 2Ñ„Ñ™. 2Ñ„Ñ€. 2г1Ñ’ 2г1Ñ› 2г1ч 2г1ш 2г1ж 2г1б 2г1ц 2г1д 2г1ÑŸ 2г1Ñ„ 2г1г 2г1Ñ… 2г1к 2г1м 2г1н 2г1Ñ 2г1Ñ‚ 2г1з 2г3вј 2г3вл 2г3вљ 2г3вр 2гј. 2гл. 2гљ. 2гр. 2гв. 2Ñ…1Ñ’ 2Ñ…1Ñ› 2Ñ…1ч 2Ñ…1ш 2Ñ…1ж 2Ñ…1б 2Ñ…1ц 2Ñ…1д 2Ñ…1ÑŸ 2Ñ…1Ñ„ 2Ñ…1г 2Ñ…1Ñ… 2Ñ…1к 2Ñ…1м 2Ñ…1н 2Ñ…1Ñ 2Ñ…1Ñ‚ 2Ñ…1з 2Ñ…3вј 2Ñ…3вл 2Ñ…3вљ 2Ñ…3вр 2хј. 2хл. 2Ñ…Ñ™. 2Ñ…Ñ€. 2хв. 2хвђ 2хвч 2хвш 2хвж 2хвб 2хвц 2хвд 2хвџ 2хвф 2хвг 2хвх 2хвк 2хвм 2хвн 2Ñ…Ð²Ñ 2хвт 2хвз 2и1 2ј1Ñ’ 2ј1Ñ› 2ј1ч 2ј1ш 2ј1ж 2ј1б 2ј1ц 2ј1д 2ј1ÑŸ 2ј1Ñ„ 2ј1г 2ј1Ñ… 2ј1ј 2ј1к 2ј1Ñ™ 2ј1м 2ј1н 2ј1Ñ€ 2ј1Ñ 2ј1Ñ‚ 2ј1з 2к1Ñ’ 2к1Ñ› 2к1ч 2к1ш 2к1ж 2к1б 2к1ц 2к1д 2к1ÑŸ 2к1Ñ„ 2к1г 2к1Ñ… 2к1к 2к1м 2к1н 2к1Ñ 2к1Ñ‚ 2к1з 2к3вј 2к3вл 2к3вљ 2кј. 2кл. 2кљ. 2кр. 2кв. 2Ñ™1Ñ’ 2Ñ™1Ñ› 2Ñ™1ч 2Ñ™1ш 2Ñ™1ж 2Ñ™1б 2Ñ™1ц 2Ñ™1д 2Ñ™1ÑŸ 2Ñ™1Ñ„ 2Ñ™1г 2Ñ™1Ñ… 2Ñ™1ј 2Ñ™1к 2Ñ™1Ñ™ 2Ñ™1м 2Ñ™1н 2Ñ™1Ñ€ 2Ñ™1Ñ 2Ñ™1Ñ‚ 2Ñ™1з 2м1Ñ’ 2м1Ñ› 2м1ч 2м1ш 2м1ж 2м1б 2м1ц 2м1д 2м1ÑŸ 2м1Ñ„ 2м1г 2м1Ñ… 2м1к 2м1м 2м1н 2м1Ñ 2м1Ñ‚ 2м1з 2мј. 2мл. 2мљ. 2мр. 2н1Ñ’ 2н1Ñ› 2н1ч 2н1ш 2н1ж 2н1б 2н1ц 2н1д 2н1ÑŸ 2н1Ñ„ 2н1г 2н1Ñ… 2н1к 2н1Ñ™ 2н1м 2н1н 2н1Ñ€ 2н1Ñ 2н1Ñ‚ 2н1з 2о1 2пј. 2пл. 2пљ. 2пр. 2Ñ€1Ñ’ 2Ñ€1Ñ› 2Ñ€1ч 2Ñ€1ш 2Ñ€1ж 2Ñ€1б 2Ñ€1ц 2Ñ€1д 2Ñ€1ÑŸ 2Ñ€1Ñ„ 2Ñ€1г 2Ñ€1Ñ… 2Ñ€1ј 2Ñ€1к 2Ñ€1Ñ™ 2Ñ€1м 2Ñ€1н 2Ñ€1Ñ€ 2Ñ€1Ñ 2Ñ€1Ñ‚ 2Ñ€1з 2Ñ1Ñ’ 2Ñ1Ñ› 2Ñ1ч 2Ñ1ш 2Ñ1ж 2Ñ1б 2Ñ1д 2Ñ1ÑŸ 2Ñ1Ñ„ 2Ñ1г 2Ñ1Ñ… 2Ñ1Ñ 2Ñ1з 2Ñ3кј 2Ñ3кљ 2Ñ3тл 2Ñ3вљ 2Ñц. 2Ñцђ 2Ñцћ 2Ñцч 2Ñцш 2Ñцж 2Ñцб 2Ñцц 2Ñцд 2Ñцџ 2Ñцф 2Ñцг 2Ñцх 2Ñцк 2Ñцљ 2Ñцм 2Ñцн 2Ñцр 2ÑÑ†Ñ 2Ñцт 2Ñцз 2Ñј. 2Ñк. 2Ñкђ 2Ñкћ 2Ñкч 2Ñкш 2Ñкж 2Ñкб 2Ñкц 2Ñкд 2Ñкџ 2Ñкф 2Ñкг 2Ñкх 2Ñкк 2Ñкм 2Ñкн 2ÑÐºÑ 2Ñкт 2Ñкз 2Ñл. 2ÑÑ™. 2Ñм. 2Ñмђ 2Ñмћ 2Ñмч 2Ñмш 2Ñмж 2Ñмб 2Ñмц 2Ñмд 2Ñмџ 2Ñмф 2Ñмг 2Ñмх 2Ñмк 2Ñмм 2Ñмн 2ÑÐ¼Ñ 2Ñмт 2Ñмз 2Ñн. 2ÑÑš. 2Ñп. 2Ñпђ 2Ñпћ 2Ñпч 2Ñпш 2Ñпж 2Ñпб 2Ñпц 2Ñпд 2Ñпџ 2Ñпф 2Ñпг 2Ñпх 2Ñпк 2Ñпм 2Ñпн 2Ñпп 2ÑÐ¿Ñ 2Ñпт 2Ñпв 2Ñпз 2ÑÑ€. 2ÑÑ‚. 2ÑÑ‚Ñ’ 2ÑÑ‚Ñ› 2Ñтч 2Ñтш 2Ñтж 2Ñтб 2Ñтц 2Ñтд 2ÑÑ‚ÑŸ 2ÑÑ‚Ñ„ 2Ñтг 2ÑÑ‚Ñ… 2Ñтк 2Ñтм 2Ñтн 2Ñтп 2ÑÑ‚Ñ 2ÑÑ‚Ñ‚ 2Ñтз 2Ñв. 2Ñвђ 2Ñвћ 2Ñвч 2Ñвш 2Ñвж 2Ñвб 2Ñвц 2Ñвд 2Ñвџ 2Ñвф 2Ñвг 2Ñвх 2Ñвк 2Ñвм 2Ñвн 2Ñвп 2ÑÐ²Ñ 2Ñвт 2Ñвв 2Ñвз 2Ñ‚1Ñ’ 2Ñ‚1Ñ› 2Ñ‚1ч 2Ñ‚1ш 2Ñ‚1ж 2Ñ‚1б 2Ñ‚1ц 2Ñ‚1д 2Ñ‚1ÑŸ 2Ñ‚1Ñ„ 2Ñ‚1г 2Ñ‚1Ñ… 2Ñ‚1к 2Ñ‚1м 2Ñ‚1н 2Ñ‚1п 2Ñ‚1Ñ 2Ñ‚1Ñ‚ 2Ñ‚1з 2Ñ‚3вј 2Ñ‚3вл 2Ñ‚3вљ 2тј. 2тл. 2Ñ‚Ñ™. 2Ñ‚Ñ€. 2тв. 2у1 2в1Ñ’ 2в1Ñ› 2в1ч 2в1ш 2в1ж 2в1б 2в1ц 2в1д 2в1ÑŸ 2в1Ñ„ 2в1г 2в1Ñ… 2в1к 2в1м 2в1н 2в1п 2в1Ñ 2в1Ñ‚ 2в1в 2в1з 2вј. 2вл. 2вљ. 2вр. 2з1Ñ’ 2з1Ñ› 2з1ч 2з1ш 2з1ж 2з1ц 2з1ÑŸ 2з1Ñ„ 2з1Ñ… 2з1к 2з1п 2з1Ñ 2з1з 2з3бљ 2з3дл 2з3дљ 2з3дв 2з3гј 2з3гљ 2з3мл 2з3мр 2з3вл 2з3вљ 2зб. 2збђ 2збћ 2збч 2збш 2збж 2збб 2збц 2збд 2збџ 2збф 2збг 2збх 2збк 2збм 2збн 2збп 2Ð·Ð±Ñ 2збв 2збз 2зд. 2здђ 2здћ 2здч 2здш 2здб 2здц 2здд 2здџ 2здф 2здг 2здх 2здк 2здм 2здн 2здп 2Ð·Ð´Ñ 2здз 2зг. 2згђ 2згћ 2згч 2згж 2згб 2згц 2згд 2згџ 2згф 2згг 2згх 2згк 2згм 2згн 2згп 2Ð·Ð³Ñ 2згз 2зј. 2зл. 2зљ. 2зм. 2змђ 2змћ 2змч 2змш 2змж 2змб 2змц 2змд 2змџ 2змф 2змг 2змх 2змк 2змм 2змн 2змп 2Ð·Ð¼Ñ 2змв 2змз 2зн. 2зњ. 2зр. 2зв. 2звђ 2звћ 2звч 2звш 2звж 2звб 2звц 2звд 2звџ 2звф 2звг 2звх 2звк 2звм 2звн 2звп 2Ð·Ð²Ñ 2звв 2звз 3ч2лан 3ч2лањ 3б2дењ 3б2дет 3б2дјењ 3б2дјет 3ц2мач 3ц2мак 3ц2миз 3ц2мок 3г2дегод. 3г2декад 3г2декак 3г2дјегод. 3г2дјекад 3г2дјекак 3г2мил 3г2миљ 3г2миз 3г2нај 3г2нежђ 3г2нев 3г2незд 3г2нијежђ 3г2нијезд 3г2њав 3г2њеч 3г2ÑšÐµÑ 3г2њет 3г2њев 3г2њил 3г2њиљ 3г2њио 3г2њит 3г2њур 3г2ној 3г2Ð½Ð¾Ñ 3г2ноз 3Ñ…2тел 3Ñ…2тењ 3Ñ…2тети 3Ñ…2тев 3Ñ…2тјел 3Ñ…2тјењ 3Ñ…2тјети 3Ñ…2тјев 3к2неж 3к2нез 3к2њиж 3к2њиг 3м2наж 3м2нож 3м2ног 3п2Ñич 3п2Ñик 3п2Ñов 3п2Ñуј 3Ñ€2ђа 3Ñ2фер 3Ñ‚2маÑÑ‚ 3Ñ‚2мул 3Ñ‚2муљ 3Ñ‚2муо 3Ñ‚2мур 4Ñ€3је. 4Ñ€3јем ч2в ш2ч ш2л ш2Ñ™ ш2м ш2н ш2п ш2Ñ‚ ш2в ж2Ñ’ ж2б ж2д ж2г ж2л ж2Ñ™ ж2м ж2н ж2в а3а а3е а3и а3о а3у б2ј б2л б2Ñ™ б2Ñ€ ц2ј ц2Ñ€ ц2в д2ж д2ј д2л д2Ñ™ д2Ñ€ д2в е3а е3е е3и е3о е3у Ñ„2ј Ñ„2л Ñ„2Ñ™ Ñ„2Ñ€ г2л г2Ñ™ г2Ñ€ г2в Ñ…2л Ñ…2Ñ™ Ñ…2Ñ€ Ñ…2в и3а и3е и3и и3о и3у к2л к2Ñ™ к2Ñ€ к2в л2ј м2л м2Ñ™ м2Ñ€ не3г2де. не3г2дје. ни3г2де. ни3г2дје. о3а о3е о3и о3о о3у п2ј п2л п2Ñ™ п2Ñ€ Ñ2ц Ñ2к Ñ2л Ñ2Ñ™ Ñ2м Ñ2н Ñ2п Ñ2Ñ€ Ñ2в Ñ‚2ј Ñ‚2л Ñ‚2в у3а у3е у3и у3о у3у в2л в2Ñ™ в2Ñ€ з2б з2д з2г з2ј з2л з2Ñ™ з2м з2н з2Ñ€ з2в", + ["compression"]="zlib", + ["data"]="xÚm]Q–Û,³ÜJVðóOr÷gÉ–ìç<ÙñÃla\6I‘ŽŒ¤-Ø;º¢«€nœ‡\24ªAP4\13´Úxòßë÷Çÿ^÷ðq\14\31—ýãù7||}<¿\127>¿žçôº*¼<¿^mÂÃëð†§×!ã§ÛŸ™\19ž¤¼Žøu}\29SÙãù¥òó3hShU->T\11kh=?·£A÷¾ãU•\30Ÿãëøª€ûb,ýÏ8ê^˜EÎ=Z]\20Jmî5´ò\13¼ìúúõ\28>B,y²F¥>Kž®ÿœ\3S…\31¦îcoïË”v\ +ùÀ'r_0\127}œ\31`Sº½jÃõðj\18Úg$´¦ðü|h´óô\ +‡Y›\21^Š§·ç”Ñ®ƒÎ Iñ\26^ÕÎkHx2c\16›R:™žcž“„½Æû¼§ç_×½tI¥ë>\31¹çÝÒ¢…\8Ú9klX¯F[k°üÔË®Úï ^\3S?,\0295œÒ<\8\\“ªwØïÚó\26N;1S¾ªÆ‚X8åµ\6AV\0 æÝes\7\\4³.,šülcž}5Êôƒ%\7û´Ð\27¨52ïÓ~ÒPs\0228i¸¦5\5h:’\21¶\24AÜ6\22Y3ƒžš‡šdÂÚ@=S>Øu†;Ð#Þ,ë\0\7\13—\18êg­òÂÒÌ#V«z\7•1µ\29\14ºjµ·l, ¬¦/\11Ó¶\25\5«š\26)7pû$¡)\13;¤…“…iTC4—ŸÏ.à\21z\8œÕ\22ú\29N¦ï€/?_w•?Ç|õS˜1/ûOÌoªNÍüsÄž\25짡Ñ$9\6¶«oVÒo•ïU¾SùIåU‹¯›’{•_T?J¾*ùA媎Cþç«ú\5³Þ\13å\24{\21é\22v±°>dÎߥ‹•rA[‰ª#ÆÐ[lÛ\22É­xâ\17YC\18z\9öPÊÄ\0òsq\15\13’çø+¤ûîöõ&Y\ +É#Î\1$;¿·ç(³O&\11\16‚^K¸Wh<\25|Ô½ì-8ž!QòAD$\30 è0J¶`AÜÁ³\12§Æ£Ær^ljtµ}‚ö»öu\ +jéäƒ\26}5\1MišÒ\0145}<ÿþ|ýFNü­o¬X%éé5MÉ[4x×H+\26Ë\24\26š’Ç·\26¼ìco\21Þä\0Ó-nÁ¢U\7ƒŽû\17oz\8Hlj¢\31×ç|Á\22>ü”üÃJ#Ù¹¦ä+N¦î¦8‡wˆ)÷\"ëvÎH³\19¤G\19VÙ##ñ½[U:\27í\7ÿnPc]±ûi\28VIêm÷wÃ\12*¼p—É83=b\31$\14.*·\17Â9õ\21O±)aS:Á¦t~M<½¾c#Ö¥F\11WË”Î.æÒžpXY\9÷9Ç=iJ§Ü”θ)pS:ߦtºMél›ÒÉ6¥smJ§Ú”δ)hÌÉ\14ý]à¡ÀS¥Gœ‡Ü‡\19’7…\21ÚK2&óôD?)â\21«8¢}w«tû²'x‹uëéÍQah%áI÷ øRà¶À¶}xÁ\9\7Ç°Àºÿà+ÜÌ\8\29Oà„÷g2£G°6ƒ¼B\3v¶ˆ`CDÁÚlù„óWK”.\31z&vŽêÙ€Ìs+,\6h_e\23ž\127\19ò°\28æ¥ÿŒ\13\ +gÆ-¬\8-1hŠ{ q\24ϽÀµÂkzCÉ\0183Æt&MÚ\15Ó}\2ë^)™Þ$s)ÁÊ4uT_b\7g±.EÛ\7­í\29¥¶ÃÚÈc\22?iVsVkëº\7ƒ*SS\00283\22o²5èDŸ,I,2=­ô€4^,ÎkPÎùÙ”\03043Ñ]_à¥ÀÎàIëHöŸ¿Ey«q8o\21æé$HYö.Ÿ•¼RyX\29¼ÙÈ\5hß[dGŒ6\4iX·{Ëi\28ôƒ“\15«%\25÷j¤\1wáMio)÷Þ)ˉxÑHÕ¸º%¯ž™Â:Hö‹¿´‹èCÿ5¨3hKö\25%cB\11_h#Šk3 Uâ}mkƒ£E¼f]ÐGŸ4\14\30ŽÁS\30Gð\18T>®DñÖåÝô«ÀC'þeËq²\18ïÖ§ô\27ð\20Î05\26—=\3…\0075w.­·XþÇ ÛVMOš\18µë£†XšÒ¶H\ +,-†w\9‰\4ü\4;…‡\2O\22ǽ]ÕX‹\26ÇŒw§ä—XÚ盬øµ,ž°…lø‡lú‡ì_í­ï²7~“\29“È‚CI;\13>ð\7–+¼;Æ\19¢·ª„\12ÚŠ(D]bÞ¥8­‹Ï„.\16áòV°jA\12lF˜bo\20ôqéehJ¹L\5¦Ðd†ºí.ž1\17\14T\30áƒ\1¥$0D$t)!F¶ÞDg9ÃQÃÁ–â=3CEEà¦ú–굆«ÒjŠ“’Ùœ}ƒ(À&£DüFEA8\20´ÀÇw™\12k+\8\17ë<~\9Ð\27hæu³óºÙÖcd3ÁU)\11¯MZÕAóM†1`™àÄÈA\22Ä׺(˜ítT¢£M\9Ä\24­ 3\20\31¤1P7W›Ñ¥×\127\5UWuž½\24»LH­ÃgÿÁˆW\2«\2\\ããë OàÅ6¿\2\26\28:ŸCPæ\22B\13>|,òÁ¯¡þîƒûc0¾0Éx0h*Jáú&lÛ¢S\18ñ\20\22’Á‹\28Ë\26\127iÌ—¦%~ÍÆà™ÆÁCQ>\20Ï\0152Í\9ó@ý=TÙMÉWÝ*B\3\6\31\12ë•›ºÂÜ‚uUõu(Æ\21ðE\28\18%ÑL\17þ\20+_ÔW_f\4\7~U‘p\8èY,nÆZ[¶öQ^ï\15FâÕ˜¢\21¼®?“6B~PùIåל\23s^Âzè\17éw\6O‘GÂmÂxµ\\\12\30ÄùU8Ú˜`—µD\28\15\\#‰º¥\4\27¸Æ¶ü\17ÇL¼æ\25O\18ên7»_a~5\ +ëÈà>µ\23ñP`S?®\2–ÎÅÓsÔ6ñ£À¾èÝ'\13E¼˜Þ\14i\6ˆ\0116‡h\25ÄuQžìŒí»‚¯+Fë\ +~.®NâÑ”\15yÏJ¸(O\22'ØŒf(t?\20ºŽa#…ÍÌâ\11hóÄŽgóÄ(n­ÆªEÙ»‡7Ij1œ\18zƳÆY^½K¢ÍÆc\7Yìدc\12o‹_0Ï«€é\9Â6\18Æë\9ñëÆ/\25\"FÈ>¡¸*ˆ\17rYq݃g\3\17¿i\\J‰°]q!$•=\24xLh÷n.\6{S\23§Jăì´\9)N\15\4\17\21‹xÙd5—MrKµBx\25úû&éÞ$[))Ú©,–ý1£‘Þ–‘¼Õ™’Þnªdå^±Æ\0112²×&¤ô´ò{p´‘fJÏa˜{¾hGÜ«<‚j\25M\26™\22Gäሽ%\20Ù'Ï$æ'•w)ÿÅk\23@9`­q®­lPy\31\9q]¯éÊÏš¼†5ù\12­êÉà\30÷\6=\12Z5Š:\0224šçÆ<’Ì2ϘœÈ³Ê?TÞ«¼Ëz\0194)¤z\0204\24dkæQˆ\6÷Í@\2/Xy\17Q*;\24„s7ã:¡\27Ou\"\6«\18‚…\8:H@(æÏ*Ÿy\28\24ÞÉè¢k\6¬z;Èy<\24ìeWËØÔÆž”PmjŽX_\ +÷\5ž\ +¼\20xµ\24\22B<èÑÓûM\8+1•µ¦¬7È›š•Aµa\24°f8™>'Ó\11¼¦Œ*ƒ´ž&eC\7e3\7X\25gô¦äšÃjfw5ŒVÓÚjŸ£ç¡°æ¯ùÆu–ÐE#Ý\7ß)rM¶ÒÃÆï\26ÅšD.\"9³þ\26Ô\0254GÞÄcF™)PäÖ‡\17þUù/Å%\7\"\19¶uùf¦ñPà©À«ÅÑn‰·¢½­x~‹³M<*”¬¾7¶Ü\27[&Z\20š²fÒ\23f\9Å\21Þ\27;f™måBœ7Œõ\19«EæùM=_«üQëŸojñ\25—WzĦ\7Ç/\11×ôÆóW£¸*ÓÛÏW\23‹3/yó¹k„¯\9\ +ÉbêW\ +­ì[{±«ña׃]\11ÿu5—ºVs­k5\23»Vsµk5—»Vs½k5W¶Vsik5׶\18Â8ÌÕ­Õ\\ÞZÍõ­Õ\\àZ͵­Õ\\ÜZÍÕ­Õ\\ÞZÕu§¯ÜR¾Ze%ù¤²W‡JÉ?êüÑ\18úô¹†K}õ)†%yŸòñm=#ÖÛ¢g¶K½ÂÑ÷Êx”1oÊߊe\15u\5ÔÈ\ +\9v[…e\29hÜ\21x+ž·íMi\7/dE»ÙÏ/eÛ?Ú+û¨J‰ì}Q\15Ñ\23Êå8á4Özç–-ct\17¯¦ö*{dF'¸Ò#³Cˆ\4qÕ(Ù\27ö\5.Ú\16ÛÓ¸*ê»ò‰ýÔ.ž\9¡Ñ7ÉÑHÌ\\$¯G—\6OZËÉ\127ÉxÕZM{¾ÆÚš\15麕Lo’ÅJd%j¬Y\29Ó%â$á.ª±^…ŽWÞ®F6\24\27p…};«\27ìù\5~XlV¶DËŒ]AR\25É*g¼Â\5‡UÛ•`Ãi×ÍÝbû|ò\127¬¤.$]'Ë’>bÆÇÔfXÆø!Äh$“ü¬âhd\15~#”%+¯ƒiIü\26e“Ó4GJ7å]ž-Žs­wß´K3\31×ÎnN\31Qçæ|ÞŠ\19Z—×E¹ð\9«G½¡k<\25Œ/ð5æ\25%r#}*eüéI!+ŸÝOo-•§òð…c)é\25±Ò²™>¼’1Âo$ŒmÙZF\31)Îo$V\7Ù\ +³$îàIr”›¸¿ßdùꔑÿS\26£ùZÖÅ]JÉâÏX”l\31×Ãê\31Q…·ö\\zç\127—š1æ^­åFIòZ­äPH¢¯ªeñ2{)\27þ!›Þd‡7IÒ…]#Z‚Ubê$VÊKR\24k0œ\18y¥\ +âó\5Ž<Ã׋ÓÇÓñ²ï›”çu!Ëë$Iÿ!9&»T²0ú}›\8\31á«\0278á)\127fž7—!‡3\30óòõdÌoªN\29ŸÕwQ“ä[å{•\31T¾SùIåU;Á1Or¯ò‹ªóGÉy\23ßà‡ÅAu\ +kvk\\ÌV\18Œ]KÊ\26ápPX•ê¾°„\11ü]àÉà\7–Œ’ø\2;­Ñ\24\0144¸,ךæ¸~¾.¿^w¼‚&|Æ‹#p¾okð\18{Ó7l\21~`³´’ÜKª³\21’õ­\21á‡7Ùô.‹6“îõ^4Ž·ÿ“dÃF¥qÖ‹à\0017…\11Éð&™Þ$«‘Lá`²\18£‹-Þ\12±’¢Ž-_ãM9#+êD›ÑwD\11É\16W\23n‚ÆÝ€÷B{ƒÔÈå¦ÅÕbÝ»”×E¹3xÐã‹·è5.Ëõh\4W\5¶ý­–ŸŒõQJì3ñ®µ–\0205ŽzmÄ»œ¦<²ämÍ×å]–ô\26ñR\3‰ù‹\6-ã$V×áU¬Kû†¹³YH†Rb¬Ã©un^ZlfŽ¯_\26ãu«ÔÅ3Ä¿^ÕÿÁ꣆äàÿ@9b8JñK–7œ\\§W\21\28–*|çüý\19\2æû˜Ë8ÿ0-Õx¨<\127à1·²ø4c\ +\ +äv\28¾\11À;o@a;—S\13\30\ +<\21xµX¦ˆ\24W=¹h\25ùÃÏï,\22gL—\15\5ž\11ü(°/ðRàµÀ[Ñ\127]àcQße,ÇÉ [È?f±2x«…$0=&'õ˜\\ÔcÒÃ1ø˜¸\30ù#¾¯\24“È\18ÜóJxˆí\2É\\粃BòV\21\17—FFgx\0*,3ñ NžÆ›Åš3\"\24\ +Í\6-\6évŽ†cÒ%ËôØ\28w…e.ŽÉq?&·ý˜\28õcrәÛ\127Bqó$Ž\23ω\0311'ÎÙ1¹óÇäÌ\31“+\127LŽ{Ü,³\4ïËF\26®Û*t0<²û~Lîx¼ÉÒñ€=&Wø([’Ç(Óöt4îïQ9²¿-†Æ¢Û™ót\14ŽÉ½|¨Òèn\30“Ûx1¨Ñ\8zLÈ\27´`ÜÙý4¥ºx hiL};•\17Ç\0034ðõTádiü¡ŽyZ³âÏv€’‹žPf\21µjÊ'eO,9O›Ð.Ý1;tœÇŒW‹åü‹-\0293Gqÿ\22”\30\29~\26–QZÖ¡Óxµ8÷›œ¹cp»Ò+nB‘sÂ\15´1ÚJØ\27„\23íÜö¡À\21VCÂ5,2=/Z·\14ÝÛ\15NˆñƒnF¡ÇpÉ]_²×M|¼~ÿo_y’Ü‘œ‘\\$Ùµ+É7„-P\15ô‰ä\4a\7Ô\0Í@7 dð€¤‚pÜ“;HÜAâ\14\18w¸ƒÄ\29$î q\7‰;HÜAâ\14\18w¸ƒÄ\29$î q\7‰;HÜAâ\14\18g8ƒÄ\25$Î q\6‰3HœAâ\12\18g8ƒÄ\25$Î ±'W\8g \27G²@x@R!©Q&”ÂW)Wfö}Ÿ\18iäéþÛ“\11H_@ó\2š\23м€ß\5ü.àw\1¿\11ø]Àï\2\14\23p¸ óà%î\22$™\22Õö\12%á׿Ì\8/‘ÜÙbÑ\22‹6\22íFÍvj\22í\18>åbeŒïòºËø^g&2+!=3½ ň÷ô›ò–¸'þdz¢¼#n˜^)Ÿ‰oÄžéBùiÅ´f¹èêÕ‚fKš-i¶¤Ù’fKš-i¶¤Ù’fKš-i¶¤Ù’^Kz-鵤ג^Kz-éµ ÷œÿC\2kÙÓ;Ó3Ó\11RÐÜÓoÊ[âžø“é‰òŽ¸!ž™z¦\11Ë+¦5åÐÚ\13´<\19²ódåÉÊ“•'+OVž¬Öéc>ÖéÃÖ\0252]¬ÓÅ:]¬ÓÅvºXÙÇÊ>Vö±²•],r(zýþ/$Ïo&aŠ$=3½ …zöô›ò–¸'þdz¢¼#nˆg¦žéÂòŠiM¹(ðÙƒNO\26=iôì¾g÷=»ïÙ}Ïî{vß³ûžÝ÷ì¾g÷=»ïÙ}Ïî;tß±ûŽÝwÔBG\26\29it¤Ñ‘FG\26\29it¤Ñ‘FG\26\29it¤Ñ‘FG\26\15¡!;ߎ<\19N‘'9Oržä<Éy’ó$çIΓœ'9Oržä<Éy’ó$çIΓÜ\2r\127ÀÊ‘•#+GVŽ¬\28Y9²rdåÈÊ‘•#+GVŽ¬\28Y9²rdåÈ\ +kûë\127ûÇ·ø9!¹#9#¹H\"|¾Åë\9Â\22¨\7úDr‚°\3j€f$\30É‚²\ +I\13a °¯ü ‘oLÞ7&o_óa\11láµp\27[¸-ü±\22þX\11\127¬…ÛØÂ-ká–µpËZ¸e-ܲ\22þb\11\127±…¿ØÂ_lᤵp\20[øjmôŸÚè?µB9ø\5’Èvýì¡Å\30Zì¡Å\30Z졾\30ê롾\30ê롾\30ê롾\30ê롾\30ê롾\30ê롾>nQ}Ü¢pgh'ö\9í}B{ŸÐÞ'´÷\9í}B{ŸÐÞ'´÷\9í}B{ŸÐÞ'´÷\9§ö\19Jü„\18?¡ÄO(ñó\127±÷\ +I² ËOÑ\20f¼ÇŒ÷˜ñ^”\0256ò\12»]¾Nà\127\2ÿ\19øŸÀÿ\4þ'ð?ÿ\9üOà\127\2ÿ\19øŸÀÿ\4â'0>ñ\9TO z\2Õ\19&ù$,wtC\",;Ìu‡¹î0×\29æºÃŠé0å\29¦¼Ã”w˜ò\14SÞaÊ;Ly‡)ï0å\29¦¼Ã”w˜ò\14SÞñÍE22÷]œûŽs\31ÎA!\ +\29wÐqGöð/\26¨·z\27¨·z\27¨·z\27¨·z\27¨·z\27¨·z\27¨·z\27¨·z\27¨·z›øúÕÄåÕÄåÕD\3n0\5\13¦ Á\0204Xu\13\7A'©¡“ÔÐIjè$5t’\26:I\13¤†NRC'©¡“ÔÐIjè$5t’\26:I\13¤†NRÃt\ +\6{…F¯Ðè\21\26½B£Whô\ +^¡Ñ+4z…F¯Ðè\21\26½B£W,¸+\20{Å‚»B¿Wè÷Š\5w…š¯Pó\21¶2ÃRgXê\12Ka©3,u†¥Î°Ô\25–:ÃRgXê\12Ka©3,u†¥Î°Ô\25–:ÃRgö\30-uŽ–:GKa 3\12t†Î0Ð\25s{ƒ:oPç\13ê¼A7¨ó\6uÞ Î\27Ôyƒ:oPç\13ê¼A7¨ó\6uÞ Î\27Ôyƒ:oPç\13ê¼A7\12ÈC\30êôP§‡:=Ôé¡N\15uz¨ÓC\30êôP§‡:=Ôé¡N\15uz¨ÓCž½Cg\30:óЙ‡Î\0220[Àl\1³\5Ì\0220[Àl\1³\5Ì\0220[Àl\1³\5Ì\0220[DK\1y$\11„\7$\21’\26eç\26¼Š\13d7Ý@vÃâ=`f\15˜Ù\3fö€™=`f\15˜Ù\3fö€™=`f\15˜Ù\3fö€™=`f\15˜Ù\3fö€™=`f\15˜Ù\3fö€™=`?ª@©\2¥\ +”*Pª@©\2¥\ +\\*p©À¥\2—\ +\\*ôP±éðwC¯1sûÁ/±\30`Ï«\16äØ\19Ð`°£b°£b°£b°£b°£b°£b°£b°£b°£b°£b°£b°£b°£b°£b°£b°£b°£b°£b°£ÂŽ\\!æQ1æQ1æQ1æQ1æQ1æQ1æQ1æQ1æQ1æQ1æQ1æQ1æQ1æQ1æQ1æQ1æQ1æQ1æQá ¨pPT\8}T\12}„ôÎôÌ\20$=Iz’ô$éIÒ“¤'IO’ž$=Iz’ô$éIÒ“¤'É\5$ÿ€ÝÆ„$7’ÜHr#É$7’ÜHr#É$7’ÜHr#É$7’ÜHr{n¬W1­)wL1ï\7®™ÐJkZiM+­i¥5­´¦•Ö´ÒšVZÓJkZiM+­i¥5­´¦uÖ´Î\26l\0258©\0248©\0248©pnTô\9Bzgzf\ +e:*ÓQ™ŽÊtT¦£2\29•é¨LGe:*ÓQ™ŽÊtT¦£2\29•é¨Lñ\21^5¶š\26[M­¦ÆVSc«©±ÕÔØýjì85vœ\26;N\29§ÆŽScÛ«±ßÕØïödC•\ +I\13¡ˆþW\29ý¯:îE5\22yÕTc5Õ°€\26*>†\29ÞáÈq8r\28Ž\28‡#ÇáÈq8r\28Ž\28‡#ÇáÈq8r\28Ž\28‡#ÇáÈq8k\28Î\26'ã\8U*$5„\14I8r\28N\27‡ÓÆá´q8\26Gð\28Ás\4Ï\17ãåFqÌîÛA’Ž?ø\127Q„\27\\̆?5ô;\3—³cìa‰¿ Í\21!ˆ5^\127ä\27mfÃ5•”­r¶NÙ\\72\13Y2\21隥ñ1¹Ô•þgë\15õŸl¨ÿaãç«ù«n”\13S\22³áÎzªâ˜åÕ\12Ò\19˜äô\12ïXì~85—³£d…ûßœí~à§4òóvfW\13\21¶\15ù‹€ç”•‰”ì*½…¬\\¼Àïßž_{¦\ +±¥p…O.\12xþö\28@þ°JÌ\6­F隥‡\31¿ø·DÿK¹}ñ„…à~¼.\31;ýso'äo’÷ò¹Èç&òZòîÇóoøYÞþ¹/ßðÙËg'Ÿ\15)½IÞËç\"ŸûS_áêgø\28äs’Ï5|¾Ž?öE¶8,µ‡äoòyø±/§]\30>%\31Úé?v•îŸ¡~/õ{©ß‡:A²×\25¤¯Aú\26¤¯Aú\26B_¯“´y’ñžd¼'y¶“Ö:i­‹’]?Ôl¤f#L\26ée’^&ée’^&ée’\17ÍÒÚ,­ÍÒÚ,O=„¹—R/¥^J—À\17{Â\127\ +aÒÂ\"4¥“)]…É*LVa²\ +“U˜lÒã&=nÒã&£Ø­ª\13Ÿ»\5†Ï‡Hn’÷ò¹Èç&r©\31´Q‹öj©_‹ä\24z\15Ÿƒ|Nò¹†Ïл“~ôëd¤£XÎ(–3ŠåŒÂp”š£Ô\28Å~F±Ÿ1>åþ\31@yCÛ", + ["length"]=28148, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=2425, diff --git a/tex/context/patterns/lang-sv.lua b/tex/context/patterns/lang-sv.lua index 698ccd105..5cb331a91 100644 --- a/tex/context/patterns/lang-sv.lua +++ b/tex/context/patterns/lang-sv.lua @@ -124,7 +124,62 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnopqrstuvwxyzäåéö", - ["data"]=".a4b .ab5i .ab5ol .ab3r .ac3 .a4d .a3dr .ad3s .a5g4ra .a5gre .a5kl .a5le .al4pr .a3lu .am4br .amp3l .a5mu .and4rar .a2n5es .ang4er .an5go .an5s .ap1p .as2k .a3sket .as4t .a5sten .a3sti .a5ta .at3t .au3st .a4val .av3s4 .b4 .bak5s .ben5s .bild3s .bo2k .bort1 .cis4 .cy5klop .d4 .där3 .ek1v .e3l4a .e2l5in .en5st .e4nä .e2r3i .e2s .e5skad .es3kal .es5kap .es4t .e5strad .e3tr .evan5 .ex3 .f4 .feb3r .fram3 .fres5 .fÃ¥gel3 .för1a .för1en .g2 .gu4lä .gus3 .he2m .hu5sa .ib4 .ik4 .im3p .i2n1 .i4na .in3d .in4ger .ink2 .in3s2 .in3t .is5ka .i3so .k4 .kans4k .ko5li .kort5s .kring3 .krings2 .köp5s .l2 .lak5r .lek5tr .lu2st .m2 .mas2ke .ma5skeri .me4re .minis4 .mjölk5s .mon2s .mÃ¥n3s .mÃ¥4st .män5sko .mörk5r .n4 .ner1 .no4n .nöd5r .oc1ku .ok3t .o3kv .o2ma .o2mo .om3s4 .o3mu .on4k .o3o .ord3s .o5sc .o1s4k .o3sl .o3stra .o3sv .o3tr .o1u .p4 .papp5s .pa3ste .pa5sti .pi5sti .pres2s .pub3lika .r2 .re4gr .re2ste .runs4 .rym2d .röve5 .s4 .sa2k .seg3r .si5o .sjö1 .sk4 .skott3s .slut3s .st4 .sta2m .sten3s .string4 .sup3p .t4 .ta3bl .ta4k .tak5l .tes3ta .tig3r .til4l .ti3o .topp5s .tred2s .tre3s .trä5k .u3k .ult5r .ung2e .up2 .u4ra .ur3s .u2t1 .u4ta .u5trer .ut5s .v2 .var4t .vatten3 .ved5s .vä2g .väg3s .x2 .y2a .y4e .Ã¥ng3 .Ã¥r4s5 .Ã¥3st .Ã¥ter1 .ä3ro .ö3ro a2b ab4bu a5be abel4s abe2s ab1l ab3la ab3ort ab5ric ab3rio ab4sc ab4sk a5bu ac4kes ac4kis ack3sk ack3u4p a5dag a5dek a5del ad5ep ad3j ad3op a5dran a3dre 1adres ad3ril ad3ru ad2s a5ed af4fo 3affä 1af3ri af4tor a1ga aga4ra a1ge a2ge. ag1gr ag1l ag5ord ag3ro a4gur a4hj aib4 a3iv a1j a3ka a4kart a5ke a1ki ak3n a1ko ak5ram akri5s ak3rob ak4sta 1aktig ak3tri a1ku a5kvari ak3ve a5kÃ¥r ak5Ã¥t 4akö a1la al5adm ali2br a2lin a5lin. a3line al3ins ali5stik a4lj alk3ak al2kv al4kä all3st al3lÃ¥ alms4k a1lo al5ort als5pa al3tr al4tu al4tä a1lu alu5s alv3s a1ly a4maf am4i am4pr am1s am3Ã¥t a3mö ana4bo an3alf an3ark an3c anci5 an5dak andel2s an4dun an4dän a4nef ang4es an3gi an1gr aniu4 ank3r ano2i a4nok a4nop an5sce ansis3t an4sj ans5ku ans3li ans3par an1st an4sto an4sty 1ansvar an4tj an4tre a1nu a5ny a3nö a1o a1pe a2pe. ape4n3 a1pi ap4lan apo3str 1appara apps4k ap3ric ap3rif a5pris ap2s ap3se aps5l aps3p apu5s a5py a5pä 2ara a4rann a4rarv 1arb 4arbi 2arbo 4arbr ar3dr ard5st a4rend arg5si 2arh a1ri a4rigen ar3ka ark3lan ar5kr 4arl 4arn. ar4nal a1ro a2rob 4arp ar2sa ar5skal arsk5l ar2sv ar4tro arts5p ar4tur 4aru a4rur a5rus ar4väg a3ry a3rä 2asa asbe4 a1sc as2h asis5t as3kis a2sko a4skr as3ku as5l as3pa as3pig as2sk as2s5op as2sp as2st ass5up as3ta a5stard as5ter as5tiker asti5o as3to as4tr ast5rak a5stral ast3rol as5tör a3su a4sul a4sund as2ut as3v a1sy a2s5Ã¥ a2sö a1t ata5ra a5te ati5ö a4tj a2tr a3tral 4atrar a4t3re at3ria a3tric at3rie a5trik a3tris a3t4ro a4tro. at4ska 1attac at2tak at4tj at4tos att3s a4tung 2au au5b au2t5a 3autom aut5s 2a1va a4vart 1avg 2a1vi av3r 4a3vä a5Ã¥ 1b2 3ba ba4di ba4do bad3s4 bak5l ba4ko ba4ku bank5l bas4ta ba5stu 4bb b4bak b4batt bbb4 bb3l bb4ler b4b3r bb4so 4b3d 3be be3d4r be5e be1k 4beld be5lu be3ly be3lÃ¥ be5lö beng4 be3nÃ¥ be1rö be1s be3sl bes5s be4sta be4ste be5su be3tr be3tv be3u 4bex 2b3f 2b5h 3bi bi3d4 4binv bis3ko bi5skv b3je b3k b5lar b5lat ble4mo b5len 5blera 3bles 5blid 3blikr 3bliks 4b3m 2b3n 3bo bo4gr bo2kl bo1mu 5bon bors5te bor4ti bort3r borts2 bort3sl bo1s bo4sc boy5 4b3p 2b5raf 4brar 2b5rati 3brik. b3rika 3brike 3briks b5rikö bru4st 3bry 3brö 4b3s b5sce bs3ch b4slan b4sof b4sp bst4 b4stj 4b3t 3bu bund4s bus2st b3v 3by by5r 3bÃ¥ bÃ¥ng3 bÃ¥t2s 3bä 3bö bör2s c2 5cap c3c 1c4e cens3t 3centr ceu4s 4ch. 3chau 3chef 5choc 4cht chäs3 chör4 1ci ci4lu cim2 cipp4 4ck c3ka c3ke c3ki ck5j ck1l ck5lis ck3n c3ko c4kordn ck3org c4kort ck3r ck4re ck3sla ckus2 ck3va ck3ve ck3vä ck5ä ck3ö cle2a co2a co4m 4cr cros2 4cs 1cy 1d 3da 5da. 4dadr dags3 2dak 5dako da3li 5dam da3mÃ¥ 4dand. 4d1ap 4darb 4dart da4tr dat5t 4dax 2db 4dc dcen3 2dd ddd4 ddi4s d3dj d4dos dd3ra dd3re dd3ri d3drä dd2s dds3v 3d2e de1k4 4deko 4deld del2sa dels5ti de5lut d4en denti5ö den2to de3pr 5der der1k de2ro de5rol der5sti de4ru de2s de3se de3sp des3ti d4et de3tr 4dex 2d1f dfö3ra 2d1g d3gl 2d5h 3di dias4 di5el di2gr di3ka di5ku 4dinf din3g4o 4dinr 4dins 2dinsp 4dint di1o di4od di3sc di4sj dis3ko dis1kr dis1p dis5to dis3tra di4tre 2dj d3jor djup5p 3djur 2d3k2 4d5l 2d1m 2d1n 3do d2ol do5lo 4domr dom2sk 5don do4pak 4d5ord 4dori 4dort d5ost do3y 2d1p 2d2r2 d3rad 3d4rag d3rand d5rarb d5rassera d5ratu 3drej d3ren 5dres d3ret d4ric 3drif d3rig 4d5rik d3rin 3d4riv d5roc 3dropp d3ror 4drot drotts3 d3räkn 3dräkt 5drän d3rät d5röd 4ds d2s1an d2se ds5enh d4sf d2si ds3ins d2sj dsk2 d3skef ds4ken d3ski ds3kl ds5kn ds1l ds4lot ds4mo d4smÃ¥ ds5nÃ¥ d2so ds3pl ds3s4 ds3tal d5stat ds4te dste4a d5stig ds3tin ds5tro d2su ds1v d2sö 2d3t 3du dub3ble 4dup du1s du2sc du4ste du5sö 4dut du4vu 2d1v d3vr 2d3w 3dy dy4kan dy4ro 4dz 5dÃ¥g 2dÃ¥s 4dÃ¥t 4däg dä2r 3dö döds1 4dög 4döp d5öst dé4 e1a e2ake e4am 4eb e2br eb3ril 4ec e3ch echiff5 ecis4 e3co e2d e4dans edd4r edi4u ed3j e5dral ed1sk ed2sko ed3s2l edso4 e3dÃ¥ e1e e2ed e4ei ee2k5 e4en. e4ene e1f ef4s 3efte e1g e3ga e3ge ege2l eg1l eg2ler e3glera e5gleri e4gran eg5rat eg3rin e5gru egs3 e5gÃ¥ eig2 ei5gn e3ik e1in ei5sh e1isk e1jo e3ju e3jä e5jö e3ka e1ki e1kl ek3lat ek4le ek3n e1ko ekord5s ek3orr ek4ret. ek5ro e1ku e1kve ek5vis e1ky e1kä e1la el1akt el4arb 3eld. eleb3r elekt3ri el4fra eli5ku el3k4 el3li ell3s el3lä e1lo e4lob el3p el2si el5ug e5luv 2e1lä e1m e5mat e5mis emon1s em5ort emp5le en5art e2nav en4ce e4ned e4nek ene3rö 2enj en5klo en3kn en5kr en5kä enning5 ennings2 eno2m en3si ens5ke ens2m en2sp ens4te ens4vin en4sÃ¥ ent4ha en2t1r ent4rat. ent3rati ent3ri ent5ru e5nus 2eny 2e1nä e1o e2og eo4i e5or 2ep e1pe e1pi e3pla ep5le epp2s3 epps5t e1pr ep3s ep4tr epu3b e3pÃ¥ er1ak 4eras er3d4 erg4l er4gu er4gÃ¥s e1ri e5rib e4rinf erings3 eri5stik erk4lin erlä4 er5na e1ro e3rob e2rom erp4 er3ra er5sc ers4ken er3sl ers4le er4sta er2ste er3str er3sv e1ru e5rum e3ry e5rÃ¥d e1rä e2sal es5all es3arm e1sc 2ese es4hi esi4u es2k e4skan es5kar e4s3ken es3ker es5kul e1sl e5slag es2mi e1sp es3pl es2sk ess5lä es2st e3stal es5ten. esti2ge es3tin es5tor. es4tr est5rer e3stru est4rö e3stÃ¥ es2u e1sy eta3b e5ti eti3ö e1to e5tri. et3ris e5trä et2s ets2ad ets3kr ets1l ets3m ets5pa et4sv ett3r e1tu etu4ri et4va et5vu e1ty 2etz e1tä etäc4 euk4 e5um. e5up4 4eur eu4se. eu5tro e1v e4varm e4vj ev3r 3exp ext4r 4eä f2 3fa fac4 fac5ke 4fans 4farb fa3sh fa4st fa4tö 4fav 4f3b f3d 3fe 4fef fe2l fes5ta fe3sto 4fex 2f1f fff4 ff3l ff3n f3fo ff3r ffs4 f3fä ffö5re f3g2 f5h 3fi fi2br fib5rig fi3li fin5sm fi3skal fisk3r fi2ti 2f3k 1fl flo4da 4f3m fma4 1fo 4fof fol2 folk1 2f5om fo2na for4mo fost3r 4f3p fra2m fram5p f4rer 5freri fre4s f4ri. fri5sp 5frit fros5ta fru5str frÃ¥n5 2f3s fs2k f4sl f4sm f4sn f4sp f4st f4sv 2ft f3ta f4taf f4tak f4tap f4tarm fte4r f4tex f3ti f4tin f3to f4t3r ft2sa ft4set ft2s5i ft4sj fts4t fts5vä ft5t ft1v 3fu furs5te fu5ru fu3tu 4fv 5fy fy4ma fÃ¥3tö 1fä fäs5ti 3fö fö2ra fö2ren fö2ri för3k för3sm för3su fört4 för1ö ga5br 3g2ag 4gakt 3g2al gall3s ga5lä ga4no 2garb 4garm ga2ro 4gart ga4st ga4su 5g2ati gaus4 g4av g5avsn 4gax 2gb 2gd g3d4r ge2a ge5b4 2gef 2ge4j g2eli 3gelis gel5st gel5y 3gelä gel5än g4em ge4nap gen5g 3g2eni 3genj 4genm genom5 gen4sa g4ense 1g2ent 4genv ge5ny 3genä ge2o 1g2era 4gerarb 3g2eri gers5n 5gese ge4to get5s 5g2ett 2g1f 2gg g1ga g4gap g1ge gg5g gg1l g4gos ggs4la ggs4m gg3s4t gg3s4v g4gu 2gh gh4te 1g2i gi1o gi5sn gi4ste gis4tr gi5stral gi5st4rat 3giv gi2ö g2jo 3gjor g3jä 2g3k2 2gl g4lans g1lar g2las 5glase glas5k 5glasö g4lid 4glj g4lög 5glöm 2g1m 2g1n g4nag g2no 1g2o 3go. 3gol gon3s4 4gont 2gord 4gorm 4gort go3sl 2g1p g2r4 3graf 5gral gra2m5 5grans 4gras 5grec 5grett g3rig 4g5rik 5grip 3gris g5roi gro2v 4grum grus5t g4rÃ¥ 5grÃ¥. grä4n 5gräns 2g2s gs1an g5satt g3sel g4sf gsi4d g3sju g5skaf gs4ki gs3kn gs4kot g3sky gs1l gs1m g4sme gs3n gs4ni gs4nö gs1or gs3pl gs3po gs4por gs5pre gs3pu gs3s gs3tak gs3tal g3stark gs4ten g3stif gs3till gs3tj g3stol gs3tra gst4re g3stäm g4sug gs1v g4s3ve gs3vi gs3vÃ¥ gs3yt gs1ä 2g1t g3tr 1g2u 4gug guld3 gul4da 4gulä gu2ma 4gup gu5ru gus4k 2gut g3utb 2g1v 4gw 3gy gytt3j 1g2Ã¥ gÃ¥rds5 2g5Ã¥ri g4äl g2är gä4s 1g2ö 4gög gö5ro 2g5ört 1h ha3bl ha5ge ha4li hal4so halv3Ã¥ ham4st handels3 hands4l han5g2a ha5ra ha4sc ha4sp hasp5l has3t hav2 havs3 h5c 4hd he4at he4fr he4lä hets1 hets3t hets3v h3g h2i 4hir his2sk hi4t hjäl3s h1k 2hl h4le 2hm 4hn h2na h2nit ho5nu hop5plo hop3s hos3p hos5ti 4how h3p h5ru h1s 2ht hu2s hust5r hyg5r hys4t hys5ta hy3ster hÃ¥rd5s4 häll2 hälls1 hälso3 hä4ri hä4s hä4var h2ö hö2g hö5gen hög5r hörn5s hö4s höst5r i1a ia3fr ia3g ia4lu ia4sk ia3tr i2b3l i5bril i3ca i4ce. i5cha ic4kord ick3u4 i5co i2d iden3s id4ge i4dom id1r id3ro id2s ids3v i4dun i3dÃ¥ i4dö 2i1e ifes4 i5fn i1fr 3ifrÃ¥n i1g 4igan i2geb ig5ej ig1l ig3no i3i i4kart i1ki i3klo ik5län ik3n i1ko ik3re i5krob ik5rof ik5ros ik5s2h ik5skor i3kul i3kum ik5u4t ik1v i3ky i3kÃ¥ i3kö i1la il4dan i2lin il1jö il5k il5lak il4lik ill3s2 3illu il5lär il2min i1lo il2tj i3lu ilufts5 i4lup i5lä im2b3r im5sm im4so i1mu i5mÃ¥ i3mä i5mö i4nau ind5skä ind5sti 1indu in4ga in4ge. ing4es. ing5is in5glas ings5te i3ni i4nif in5j in5kve 1inneh 5inre 1inri 3inrä in4sem in3skrä in3sl ins4m in3sn 1inspe 5inspeln in5spr 3instink 3instru in4stÃ¥ in5te 1intr in4tra int3s i1nu i4nun in3ym i1nä i5oc i1og i3ok io4kr i1ol io5li i5om ion2 i3ono ions3 i1op i1or i1os i1ot i1pe i1pi ipos4 ip5pi i3ra i4res i1ri irk5l i1ro iro3p i1ru i5sce isel4 is2h i2sk is5kep isk5na is3kopa is3ku is4kun is3ky i5slam is3län is3m is3n i2s3p is4pri is3sa is3se iss5n is4s3tr iss3tä i1stal i1stans ist5att is5ten. i1stent is4tes is3tig is5ting is5tor. is5tore ist5ro istÃ¥4 is5v i3sy i4sÃ¥ i1t it5c i4tei i4tex i4tj it5ran i5trin i3tris it2t5op it4t3r it4tu i2t5Ã¥ 4i1u i1va i2vak i1vi i4vin iv3r iv2s i1vÃ¥ ix2t ix5tu i1ö 1ja 3jakt. 4jarb jas5p 2jb 2jd jd3r jd4sty j4du 1je je2a 5jef je5sta 2j1f 4j3g 4jh 1ji 4jin 4jk j4kl j3ko jk3v 2j1l 2jm 2j1n j2o 3job jo4kr 4jolj jo5lö jor4din jord3s4 3jou 4jp j5pl 2j3r 2j1s j5sa j4sk js4me js4te 2jt jts4 2j2u ju4kos juk3s jul3k 4jur jus5kr juss4 jus4t jus5ta jut4sta jä5lo jäl4p5r jäl4sa järn3sk jär5s jör2s jös4t 5jé 1k2a 3ka. 3kad. 3kade. ka4dr 2kaf 5kafä ka3i ka5ju 2kak k3akti 4kalf 4kalg kal4lo kall3s 3kamp 3kamr 3kan. 4kand. 5kano 2kap 3kapi ka5pla kap4pr kaps5t 5kapten 3kar. ka3ra 4karb k5arbet ka5ri 4kark 3karna 4karp karp5s 4kart. 4karte 4karv 3kas ka4sk kas3ti 3kat. 3kats. 4kau 2kb 4kc 2k3d4 kdom4 1k2e 3ke. 2ked. 2keda ke3dr ked4s ke4er 2kefu 4keld kels4 4kense ke5nÃ¥ 2kep 3kern ke2s kes3s 4kex 2k1f kfö2 kfö3ri 2k5g4 2kh4 kid3s 4kif 1kig kik4s kilt4 5kimÃ¥ king3r 4kinne 4kins 2kint ki4nu ki4tr kiv3s 4kj 5kjol k3jä 2k3k kl2 1klag k2lama kla4mi 3klang. 3klass 2klat 5klav 2kle k2lej 2klig k2lim 3klip k2lis 5klist3r k5lock. 5klocka 3klos 1klub 4kluk 1kläd 2k3läg 2k1m 2k2n k4nal 3k4nap 5knip 3k4niv 3k4nu k4ny k5nyk k2o 4koc ko5de k5odl kog3n ko4gr kog4s3 4kola ko2lin 4kolj kol5tr 5kolv. 1kom 3komm 5komp 2k3omr kom4s 1kon 3konf 3konst 3kont ko3nu 1kor 3korg ko3ri 2korr 3korres 5kortera ko5s4k ko3sl 3kost ko4str 4k3ou 2k1p k2r4 3kraf 5kra3ge 4krang 5krera k4reten krid5s2 1krig krigs3 krings2k 4kriv 3kropp kropps5 kru5stad k3ryg krÃ¥k5s krÃ¥4pa k5rädd. kräk5l 4kräl k3rät 2ks ksaks5 k2s5as ks3ch k4ser ks2k4 ks3kl ks5kra ks5kv k3skä k3skö k5slag. ks2li k5sly k2so ks3pl k1s4t kstavs3 ks5tid k2su 4k1t k4tex kti5ge k4tinn k2tins k2tod k2tom k2tr kt3re kt3rin k5trod kt5rog kt3rol kt5rät kt2st kt5t4 k4tug k2tut k4täl 4kug k5ugn ku5la 4kuld 3kulö kum5pl kungs5 5kunn ku4pen ku4ro 3kurs 3kus kust3a kv4 3kvali k5vare 3kvarn kvar3s 3kvart k4vato k2ve 2kvente 1kvinn 5kvire k4vo k1vÃ¥ 3kväll k1vär kydds3 ky4lin 3kyrk käl4m 5kämp 5känn 3käns 3kärl 4kög köks5t 5köp. kör4l kör4sl 3la. 1lade. 2ladm 4ladr 2laf 3lagd. la4gin 5lagm lag3r 2lak 5lakan. 5laki 3laktis la5lo 3lande. lan4di 2lappara 2larb 1larn lar5s 4lart las3h 4lask la4st 5laste. 1lat. la5tr lat4tis 2lau 2lav la5vu 2lb4 4l1c 2l2d lder4s l3dj ld3ra l5dry lds4an 1le 3le. le4ge. le5ig le2kl le4kv lem4sö 2l5enl 3ler. ler5k 3lern ler3ste le5s2l le5tÃ¥ le3um le4vu 2lex 2l1f 2l1g l2gj l3g2l lgs4 lg5st 2lh 1li li5ch 3lif 3lig li4go lig3s lik2l li5kli lik3s 5limer 2lind 2linga. ling5o 4lingr lings5t 2lini 5linj 2lint li1o 2lip lis3c li4sta li3strö li4vo livs1 l2jak 4l1jo 1lju l5jÃ¥ l1jä l3jör 2l1k l3ke l5kju l2kl lk5lag l5klä l2kr l3k4ra lk3t l1la lld4 ll3dr lle5b ll3k ll1l l1lo llok5v ll3p ll4san ll2se ll3ska ll2so ll4sva ll4tig ll3tr l1lu ll5un llust3ra ll5v l5ly lläggs5 l5löd llör4 ll5ört 4l1m l4mol lm3st l1n lo2af loc4ku 4lodl lo4do lod3st lo2ge. 2lolj 2lom 4lord 2lorg lor4s lo4vo l4pak l1pe l1pi l5pla lp5lö lp4st 4l3r 2l1s l2sc l4sjo l4sjä l2sk l4skensv l3ski lsk3n l5skot l3skrä l3sky l3skÃ¥ lskÃ¥4p l3skä l3slu l4sm ls4mo ls5nyt l2sp l3spe ls3pl ls3pol ls5s l2st l3sta l4stak ls4te ls5ter l3sto l3sty l4styg l3stÃ¥ l3stä l5stö l2su l5sur l2sv l4svi ls5vid l4sÃ¥ 4l1t lta2tu l4tef l4tif l4tih l4tos lt5rati l4tret l4trö lt5sk ltu4 lu5i luk4to 4lull. 2lun lung3 2lupp lu4pu lus2s5p 5lust. 4lutb 4luts 2lv l1va l4varm lvers4 l1vi l4vos lv3ri lv3sp l1vä lväv4 lycks5t ly4gat lyg3r lyg3s2 3lyste 5lystn ly4str 2lÃ¥. lÃ¥g3s 1lÃ¥ng lÃ¥ng3s lÃ¥4sk lÃ¥s5te lÃ¥4stÃ¥ 4läc läg5r 1länds 5längder lä4san lä4sp lätt3s 4löl 4löm 3lön 3lörer 1lös lö4vä 3lé 1ma ma5fr mag5n mag5s ma5ju mak3r ma3li mand4 mang2a man5g4o ma5ni mani1k 5ma3ri mash5 mas3ko mask3ro ma5skö mas3ti mas4v 2mb mb4sk 2mc 2md m4dat m4di m4do m3d4r 1me 2meds me4du me4kl me4ko 4meld melo5 me5lu men5k me5nu me5ny mer2sko me4so mes4t me3sti 2meta me5trin met3ro meu4 2mex 2m1f m4fes m4fn 2m1g4 2mh 1mi mid3s mi4lu 2mind ming4o 4mink min4kr 4minv mi3nö mis2 mi5sf mi4sp miss3t mi4te. mi4tr mitt3s 2m1k 2m3l 2m1m2 mme5d mm3s4 m4mul 2m1n m2nam mnas3t m4nav mn5dr mn3g4 mn5st mn5tu m2n3Ã¥ 1mo m4od mo4i 2momr mo3na mos3k mo2ta mo4tin mo4tu mot3v 2m1p m2pak m4part m2pl mp3lad m5plane mp3lat mp3lin mpos4 mp5p4 mps4k mp5sp m4pÃ¥ 2m1r 4ms m4sal m4ske m3slag ms3lä ms2m mste2 m1sto m2str mst3rin ms5äp 2m1t 4mud mulls3 mult5r 5mum 4mun3g4 mun4ko 3mur 3musi mu3sta mut4sl 2m3v 1myn mys4te mÃ¥g4 1mÃ¥l. 5mÃ¥let. 5mÃ¥n. 4mÃ¥r mÃ¥1s 4mäg mäk3 1män mäns4 3märk 1mäs mäs5ta 1mät mö4bl mö4gen. 3möj mör4kl 3mös 4möv 1na 3na. 3nad nads3 2naf na5gr 2nak 3nako 3nakr na3kro n1akt 2nalf 5nalfl 4nalg nal3s na2lu n5amb 5namn 4nand. 4nanv na4rap 2narb 2nark 4narm 2nart nast3r 2nb4 2n1c n2ch n3cha n3che n3chi ncis4 ncyk3l 2nd n4dak n4dav nd3d4 n5de nde3s n4dil nd5rak nd5ras nd3rat nd3ri n5dril n3drop nd5ros nd5skal nd3sn nds3or nds5vä nd5Ã¥s 1ne 3ne. ne4di 5nedl ne4d3r ned3s ne4dö ne2gr ne5gres 4nek. ne5ly 4nenl ner5sm nes3s4 ne4sta ne5s4ti ne3tre ne1ut 2nex 2n1f4 nfalls5 nfis3 2ng1 n4gar n4gen. n4gend n4gens n4genti n4germ n4get n2gi ng3ig ngi4s ng4ly n2go ng5om ng3or ng3rad n4grö ng4ser ngs1k ngs3pa ngs5tim ngs3val n4göd 2nh 1n2i 4nid ni5ec ni4ki ni5li 3nin nings1 nings3k nings5v ni1o 4nip nip4pr ni5steri nist3ra ni3t4r niv5sk niv5st 2n1j n4jar n3jun nju4s n3jä 2nk n4kart n1ki n4kis. n3kny n1ko nkrafts5 nk3ri n1kro nkrus4 nk5sl nk3sp nk4tin n1ku n1kö 2n1l 2n1m 2n1n nn3d n3ne nnis4 nn3k nn3s4t 1no 2nodl no4kl 2nolj 2nomr nom3s4 2nord 2norg no5sa no5sc no4tu 2n1p 2n1r 4ns ns2i n4sint n4sis. n4sise ns2k ns3kan n1ski ns3kor nslags5 ns5las ns5mit n4soc n1spi ns3pl ns3po ns3s4 n3stans n3stap ns4tel n3stif ns3tig ns4tra n2strik nst5up nst5vil n3s4ty n1sva ns3vi ns3vär 2n1t n4tark nter5s4 n4tinf n2t5omb nt3rad n3trah n3trak n5trala nt3rali n5tram nt3rep n3trer nt3ria nt3rin nt3ris n4tropin n4tror n4trö nts3c nt4se nts5kor nt4str n4tut n3tvÃ¥ nufts4 4nug n5ugn 3nui 3num nums5 2nup n3upp 2nutb 2n1v ny5gr n5z 4nÃ¥r 4nä. 4näc 3näm 3nät 4nög4 3nöj nö2ja nö5kr 4nöl nös4 nös5ke o1a o2ard o2b 5o4bj o4bli oby4 oc4k5r ock3sk oc3ku o2d ode4k odi4a 1odli o5dral o3dro ods4k od2st ods4ti od5stu o3dä o1e offs5t o4fl o3fr oförmÃ¥4 o1g o4gav og3gr o4gj o5glo o5gly ognos4 ogno5st o4gri o4grö og3se og4s3t o4gä o1i o4il o1j o1k o4kli ok3n ok3sl ok4su o2kv o1la o5lak ol5au olfö4 1olj ol3ka olk3r ol4ku ol4kä oll4si oll5slä ol3lä olm4s oln3s o1lo olo5kv ol4sa ol4tÃ¥ o1lu o4lug o4lur o1ly ol5Ã¥r o1lä om4brä o3men o4mord om5pa om3pl 1omr 4omra om1sk om4ste 3omsät om4tr om3tv on3c on5gi on1gr ongs4l o4nins on3j on1k4 ons3c onsi3s ons3m on5stel ons4ter on3tras on4tre ont4s o1ny on5Ã¥ o1nä o3nö oo4d oom5s o3or o1pe o1pi o5pline op4pl opp3le op4pr op4pu o3pri op4st o3pÃ¥ o5q 4ora o3rak oran3g4 o2rap 1ordn or4d5ä o4reh 1orga 5organi or4gr or4gÃ¥ o1ri 3orient 4ork or4mö or4nu or4nä o1ro or4pl or5pr or4spa ors5tig or5te or2tr ort3re ort3ro o1ru o3ry o1rä o1rö o3s2fä osk4l o1skop o3som os5pig os4sk os4s4t os3tig os5tiker o5still os4tr ost5ron ost5rö os3tul ota2lan 4oti. 4otie 4otin o1to o5tro ot5run ot3sv ot5ti ot4trä ott2s o1tu o5tun otvin4 o1ty o5tÃ¥ o3tä oun4 oup4 4our ou3rö ou4s o3ut3t o1va ova4n o1vi ov3r ov4si ov3sl ovs4me o1vä o3we ox5 oy2 o3Ã¥ o3än o3ö 1pa 4paf pag4 paki3 pakis4 pa5la pals5 pa5lä 4pand. pan4tr 3pap 2parb 4parm par3s 2pask pa5ski pa2st 3patr pa3u 2pb4 2pc 2p3d4 pek5tri pekt3ro 4peld pel3s4i 4pem 5peng 3penn pent5r per4bl 3perio 3pers per4sl pe5tro 4pex 2p1f 4p3g 2ph pi4el 1pig pi1o 3pip pi5so pi5sta pi5sto p2j 3pjäs 4p3k2 p2l p4lac 5plan. p4lane p3larn p3lev 3plex 3plic 1plik 4plit p3lj 1plom p3lop 2p1m 4p1n p3ni 1po 5poa 2poc 2pof po2i 3polit 4polj poly3 2porg 3pos pos4ter 4pov po4vä 2pp p4part pp5ask p4pax p3pe p1pi p4pins pp3j pp1l pp3la pp3lin pp5lis pp5lu pp3ly pp3lÃ¥n pp3lÃ¥t pp3lä pp3lö pp5oc pp3of pp3p4 pp1r pp3ra pp3ri pp3ru pp3ry pp3rä pp3tr p2pu p5py pp3Ã¥ p2r2 2pra 5prax 1pres pres4t pre3sta pres5to p3rig p3rik 5pril 3princ pring3 p5riol 3pro pro3g p3ror 4prÃ¥ 3präs 3pröv 2ps p2sal 3psalm p5s2ho ps4ken ps2li p3sna 4pso p3sod p1s4t p4stak p4stäv p2sö 2p1t p3tri 1pu 4pug pul2l5ov pul5tr 5pung 3punk pus3t 2p1v pÃ¥3dr 3päl pä5ro 4pör 3pé qu4 3que 1ra 3ra. raci4t 3rade. 4radr ra4du 5ra1e 2raffä ra3fr ra5is 2rak ra2lo r4ande 3rande. 4ran4d3r rand3s 2ransv ra3pl 3rar r4ar. 4rarb r4are 4rarg r4ark 4rarm r4arn r4ars 4rart r3arta ra5rö r4as ras3h ra2st 3raste. 3rativ ra3tri 2rav ra5yo 2rb 2r1c 2r2d r4daf rda5gr r3dj r4dos rd3ran rd3rat r4dul r3dÃ¥ r3dä r4dös 1re 3re. 4reaus re3b 4rec 5reco re3d4r re5du 4reft 4regg 3regn. re1kr rek5tri 4reld re3lu rem5p 3rems r4en. 2reni 2renk 2renl re3nö re3o 3rer. 3rern 3reso ress5k re1sti 3ret. 4retet ret3ro 4rety re5tÃ¥ 2revig 4rex 2r1f rfö3ri 2r1g rg3g2 rgs5top 2rh rhands5 3rial 4rib 3rifi 2rifr r3ifrÃ¥ 3rifu 3rigt rik2s 3riktn ri4mo 2rind rind3s 5ringen. ring3r 2rinr 2rins 2rint ri1o 3riot ri5ple ri2stä ri4tut ri4vis riv3s 4rj r4jis r3jo r5ju r5jö 2rk rk3akt r4kek rkes3 r1ki r3klas rk2le r4klö rk3n rk4ne r1ko r4kod rk3tr r1ku r4kup r1kä r5kör 2r1l r5laka r5lav rld2 rlds3 rl5sp 2r1m r4marb r4mil rm2s5j rm5tr 2r1n rnal4 rn3g4 rn1k r2nom rns4k rns4t rn3t ro3b ro4gro ro2kr 2rolj rol4li rom4a 5roman 5ronau 5rond. ron4v ro3pl ropp2s ro4ra 2rord 2rorg 2rorie 3rorn ro4sin ro4sn ros3v ro5te 2r1p r4plö r4pö 4r1r rra4n rrd4 rreligi5 rres4 r5rib rr5k4 r4rob r4rom rr1s rrs2k r4rur 2rs r4seld r4sex r2sin r1ski r4skid rsk3na rs5koll rs4kos rskotts3 r2sku r3skö rslags4v r4sle r4slo r4s5lö rs4mo rs5nat rs5nä r1sp r2spl r2spo rs3s4 rs5tak rs4te r5stek rs5tend r5steni rs5till r1sto r4ston rst4r r3strö r3stu r1sv rs4vag r2svä r1sy 2r1t r2taf r2takti rt4an r4tins r4tom r5trit r3trä rt3t r4tut rubb5l ru3br ru4dan ruks1 ruks3v 5rullera 3rum. runn2 runns5 4rupp rus2h ru5sha 2rut 5rutig rut4ra ru4vi 5ruö 2r1v rv4sj rv2s5kä r3w rydd5s ry5o rÃ¥ge5l 4rÃ¥l rÃ¥ng3s rÃ¥5ra rÃ¥3st räck5s 4räkt 4räm räng3s räns5t 4räs rä4san räs3s rä5sti räv5s röd5el röd5r röd3s 2rög r3öi rök3s röns4t 4röp 3rör rör4s rö4st röst3r r1övr 1sa 3sa. 3sad. 3sade 4sadj 2sa3dr sad5s 2saf sa3i sak5ri 2s1akt sa5lo 3s2am sa2ma samman3 sa2mor sand3s 4sang 2sanl s3anlä san3sla 2sap 3s4ar. 2sarb 2sarm s5arm. 3sarn 2sart 4sarv 4sass 5sat. sa4tu 2sau s3auk 2s1av 4sb s2c 2sch. 1scha 2schau 4schb 1schen 1scher 1schet 1schi 4schk 4schm 4schp 3schy 3schö sci3p 4s3d 1se se4at. se2g 2s3egg 3segl seg3ra sek5le sek3r sek5tr 3sel. se5ly sem2 3sen. s5ersä 3set. 2sexp 2s1f s4fär. sfö2 4s3g2 2sh 5s2haw shi1s s5hö 1si sid5s 5sie si4eri si4esk si2ett 3s2ig 3sik sikts3 5sill. silver3 silv3r 2s1ind 2s1inf sinne2s3 3sinni 4sinr 2sin1s s1inst 5sint. 2sintr 3sio sis4t siu4 1s2j 2sjak s3jakt 4sjn 4sjt s4ju 5sjuk 4sjur själs3 3sjö 4sk. 2ska. 3s2kada s2kado 3skaffn 1skaft s4kag s2kal 3skal. 1skap 5skap. 5skapet 4skapi skaps1 4skar s4kara 5skarv 4skas s2kat s4kav 4ske. 3sked. s4kene 3skepp 4skh sk4i 3skif 5skin 4skis. 5skiv 5skjor 3skju 4skl sk5lap s3klas 4skn 3s4ko. 1s4kog 4skogsg 1skol 3skola s4kolo s4korp skor1st 1skot s5kran. 3skrat sk4ret 3skrev 1skri 3skrif s3krig 5skrin 3skrip s5kris 3skriv s5kron s4kru 5skrub 3skruv 5skräc sk3s 2skt 3skulp s3kup 2skv s4kve 1s2ky s4kyn 2skyrk 1skÃ¥ s4kÃ¥l 5skÃ¥p. 4skÃ¥r 5skänk 3skärv 2sl2 4sla. s5lad. s3land 3s2lang s4lant s3lar. 4slas s1lat s2lev 3slev. s4lic slins3 4slis s2lit s5lor slotts3 s2lu s3luc s3luf 4slus s3lust 3slut slu4to 3slÃ¥. 5s4lÃ¥r s4läk s5läm s5länn 3s4läp 4s3lär s2lät 3s2löj 2sm s2mak 3smak. s3makt s2mal s2met. s2mid s2mit 3smitta s3mj 5smug 5smyg smÃ¥5g smÃ¥3k smÃ¥3s 3smäd 3smäl 4smäs 3smör 2s2n4 3snab 3s4nac s3nam s5nare s3nast s5ner 3snib 3snil 3snit 1snitt s3niv 3snut s4nÃ¥ 5snÃ¥r 5snäc s4när 3snö. snö5g 3snör snö3s 1so 3soc 5sock 2sod 5soi 2solj sol3s2 2som 5somm 3son son4st so5pra so4pu 3sor. 2sord s5ord. 2sorg 3sorn 3sot 4sott s2p2 5spann. s4park 5sparv 4spas s3pass spa5tr 1spe 4sped 3s4pek 3s4pel 4spelsl 2spen 2sper 5spets 3spill 3spir 4spl s1pla s3plan s3plats spli4 s4plin 5split s5plä 4spre s3pres 4s3pris 3sprit 2spro s3pry 3sprÃ¥ 5sprän s3ps 1s4pÃ¥ 3spÃ¥n 3spÃ¥r 5spän 3spö 4s1r 4s1s s5sad sse4lin s5sil ss2k ss5kl ss3kun ss1l ss2lag. ss2lä ss2lö ss3na sss4 ss3unn s2sv ss3vi s2t 2st. 4sta. 5stac 3stadi s4taf 5stalgis 3stalla 2stalli 5stam. 5stamm 1stant 5stark. 5startad 1state 3statl 1stau st3c 2s5te. 4stea 5steg. s4tek. 2stekn 5stekt s4tell 3stem. 3steme 5stenar 3s4tene 3stense 5stensm 1stera 1stering s4teriu 3sterne 5stetis 2stia 2stib 3stick 2stid s4tiken 2stil 3stil. 3stink 3stisc 1stit 2stj s5tju 3stjäl 3stjär 2stm 5stoc 1stol 4stolk 4stom stori4eu 5storis stor3s 3straff 4strativ 3strato 3strec 3strej st3ren 1strer 2stria 1strid 5stride 2striel st4rif 1strikt st5risk 1stru 3struk 2strumm s3tryc 5stryk 5strÃ¥k 3strÃ¥l 3sträc 4sträd 3sträng 5sträv 3ström 2st3s4 st3t 4stv s3tvis 1sty 2styp 1stÃ¥ 4stÃ¥g 5stÃ¥l 1stä 3stäl 1stö 1su su4b 3sug su3i 3sum 2sun 5sun. s1under 5sune s5ung 2sup 5supa su2pu 5sus 2s1ut su4to su4tr s2v2 5svag. s3vagn 4s3vak 5svam 4svap svars3 3svart 4svas s3vat 4svec 3sven 5svep 4s3ver s5ves 4s3vil s4vine 4svis s5vitt s5vÃ¥d 3svÃ¥ri 3sväng 5svärm. s3väs s3vät 4syk 5syl 3syn syn3k s3yrk 3sys sys4t sys5ter syt2 sy5th 1sÃ¥ 5sÃ¥g 4sÃ¥k 2sÃ¥lde sÃ¥ng3 1sä s4äd 2s5ägg s4äl 2säp 5säs 3sät 4säta 1sö 4söd 2sög s5öga sö4ko 4söl 4söp sör2s 2s3ört 1ta 3ta. ta1ch 3tade. 4tadi 4tads5 2taff 3taga 5tak. ta5kre 2taktig tak4to 4talf 5tallise tall5s 4talv 3tame 3tami 3tan. ta4nab 3tande. 2t3anfa 4tanl t4ap3l 2tappar 3tar. 4tarb tar4mi 3tarn tars4 4tart 5tartavl 4tarv 4task 3tast ta1str tat2 ta4tan tats3 2tatt 2tav 4tave 5tavla. 3tavlan 3tavlo tav2s 3tax 2tb4 2tc t3cha t3che 2t3d4 3t2e te4as te3b4 5tec 4teg te2g1r te3gre te3i te4int 4tej tej2s te4kl 5teknik 5teknis 4teld 5te5lö 5tema 4temo te4mu ten3g4 5tensi ten3tr te4nä te5nör 5ter. 5teriö ter3k4 5term 5terna 5ters ter3t te4ru 5tes. 5test tes4te te5stik te5stu 5tetik tets3 4texa 2texp 2t1f4 2t3g4 2th t4hen 1ti 3tial 5tib 5tici 3tid 5tide ti4du 4tidö ti4ed tifts5 ti2gel 3tigh ti4go ti2gr 3tigt tik3l 3tiks 5tikul t2il 5tilj 3tillst 3tillv 3tillä 5time 2tind 2tinr 2tint ti4od 3tion ti2os 3tis 4tisc 5tisk 3tiva ti4van 5tivite ti2ö t2j 4tje 4tjob 2tjou 4tjäl 4tjäm 3tjän 2t3k2 2t3l 2t1m 2t5n4 tne4r 4todl 3tok 4tol. 4tolj 2tomr 4toms t2op 5torap t5ord. 5toriett 4torm torm3s 3torn tor1st 4tort. tos4k t5ost. t4ov 2t1p t2r4 2tra t4raf 3trafi 3t4ral. t4rala 3t4rale 5tralo 3trals t4ralt 3trans tran2s5a 4trar t3ras. t3rat. t4rato 4treg 4tren 4trer. 4trern t3rets. 2tri 3tribu 5trick trids3 t5riel t1ring t3ring. 2troc t3rock t4rog t5ronik t3rono 4tropi. 5tross 5trotn t4rump t4rup 3trupp trus5ta 1tryc 5tryck. 5tryggh 4trÃ¥k 5trä. 3träd träds4 3träf 3träg 4träk t3räkn t4rän 5träni 5tröja t4röt 5tré 2ts t5s4and ts5art t3s4at t3se t4seg ts4en t4sex ts2k t5skall t3skatt t1ski ts3kl tskotts5 t5slot ts5läk ts3nä t3snö t2so ts3ord ts3pl tss4 t1st ts4te ts5ter ts5tillf ts3tj t3stol t4ston t2stra t4stry t4stur t5styr t2su t3sud t5sy 2tt t3tac t4tau t4ted tte5g4 t4tem tte2n ttes4 t4tex t4tins t4tip tt3ja t1to tt3rad tt3rand tt3rat tt3re tt3ri tt4ry tt4se tt2si tt4sta t3tu t4tug tt1v tt4vÃ¥ t3ty t3tä t3tör 4t5ugn 2tund 3tunga tung3s 5tunn 2tupp tu5re 2tutb t3utv t3utö tu4vu 5tuö 2tv t1va 4tve t3vig 3tving t3vit 3tviv t3vÃ¥g 3tvÃ¥n t3vän tvär3s 3tvätt ty5da 5tyg. 3tyngd 3typ ty3pi 5tys 2tz 3tÃ¥g tÃ¥s4 4tÃ¥t täc4ko 4t5äg 4täm 4tärm 3tävl 4tö4d tö5de 4tög 4töp tö4pi 3törer törs3t tö4vas 5té u1a u2b ub5al ubb4le ub3lic u4bo u3cha u5cl u2d u4dak u5de ud3r ud4ret uds4a u4du u4dy u1e u2es uf4fä uf4tan uf4to 4u1ga u1ge ugg3s ugn4 ugns5 ug3s4 u5ie u1in u3is u3itet u3j u2keb u5ki u4kl uk5la uk3n u1ko ukos4 uk2s uks5ko uk3tris ukt5s uk4tä u3ku uk3v u1la ul4di ulds2m ul4du ul4dö ull3ste ull3än u1lo uls5ti ul2tr u3lu u1lä u1lö um4fä um4so ums4t u1mu u3mör 5underl 1undersö 1underv un4dom und3r un4dÃ¥ un5g2ef un3gersk ung5it ung3r ungs4p 3unif unk3l unk3n un4kr un1sk un4tr un5trati u5nu u1o u1pe u4pern u1pi u2pl u3plet up3lik 3uppfa 1uppg up4pin 1uppla 5upplä up4p3r upp3s upp5sp up5ut ur5ak ur5arv u3re u1ri u1ro u4rob u4rom urs5tin ur4stä u5ry u2sak us5anl u3scha u3se usen3 u2s1k us3ka us4kla us4kr u5sky us4kÃ¥ us5lä us3n u2sp us3pen us5tat us3tig u3stik us5tin ust5ro u4stÃ¥ u4stä us3v u4sÃ¥ u4sä u2sö u4tak 1utb u4tef ute3s utik2 u5til uti3ö ut3j 3utjäm utlands3 u1to u3top uto5s ut3r ut4rer ut4ro ut5rop 1utru 2utsid ut3sl 3utslä 2utt utt4j ut1v 3utvec u5ty ut3öv u5u 2u1v u2vak u4vj u4vä u5Ã¥ u3ö va5dro 1vagn 2v1akti val3k val4li val4st 5valv 5vama 4vand. 4vanp van4st van5tr 5vap 2varb va4res va4ri. 4vark var2s vart5r va1ru vas5ti 5vattn 4vau 4vav 5vavi 2vb4 2v1c 2v3d4 1ve 5vec ve2k ve3ke 4veld vensk3ä 5ventera ve3ny ve5nö 4vep ver5g 3verk ves4 ve2s5p ve1st 3veta 3vete vet5sa vett5s 2v1f 2v1g 2vh v4i vi4c vid3s vild3s vil4t 3vind. ving3s4 3vinkl vi2no 5vinst. 5vinste vi5ny 3vis. vi5sa vis5h vis5ko vi4st vis3ta vi2tr vi4var 4vjo 2v3k2 2v1l 2v1m vmörk4 2v1n4 1vo 4vok. 2vom 4vord 2vorg vos4 2v1p 2v2r 5vrak 3vrera v3ru 2vs v4sc v1s2k v2skri vs4mi v3sni v2so v1st vs4te vs5trÃ¥ v5styc vs3vÃ¥ v2sö 2v1t vu4d1 v1und 4v5up 4vut 2v1v 3vy 5vÃ¥ld vÃ¥ngs3 3vÃ¥rd 4vÃ¥ri vÃ¥3ru 3väg vägg5s vä4l väll4s3 3vänl 3värde vä4ril 4värj 5värk 3värld 2vät 3väx 4vög 4vöp 3vör 1wa we2 w2h whi2 wi2e w4na x1 xan5d4 xem3pla xis4 xk2 xli4 xs4 xti2 x4tÃ¥ 2y y1a y4bris yb4s y2d y4da y5dan y4do yd3r yds4 y4du y4dö y1e y1ga y1ge ygg3r yg4gÃ¥ ygs4p y1i y1ki y5klist yk5lon yk3n y1ko y1la yl4gj y3li yl5k yl5lä y1lo yl4tr ym2fl ym4for y3mÃ¥ yng3r ynk5l yn4sa yns4t y3or y5ou y1pe y5po yp3ri yre4s y1ri yr4ku yrk5v y1ro yrs4k yr5st yr5tu y1rÃ¥3 y5scho ys2st ys3ta ys3ti ys4tik. yst3ra y2tak y4te. y4tea y1to ytt3r yt5v y3va y3vi y3vä y5w y5Ã¥ 1za 1ze ze4ro 1zi 1zo zo4nal 4zp z5s 3zu z4zin Ã¥1a Ã¥3dj Ã¥ds4l Ã¥1e Ã¥1f Ã¥1ga Ã¥1ge Ã¥ge2l Ã¥g3l Ã¥g3s4k Ã¥g3st Ã¥gÃ¥4 Ã¥3i Ã¥1ki 5Ã¥klag Ã¥k4strä Ã¥1la 1Ã¥lder Ã¥2lin Ã¥l3k Ã¥ll4sp Ã¥l2s5e Ã¥l3st Ã¥1lä Ã¥1m Ã¥man4s Ã¥nd4r Ã¥n4du Ã¥ns4t Ã¥ns4v Ã¥3o Ã¥1p Ã¥2pl Ã¥5pla Ã¥4pö Ã¥r4do Ã¥rd4ra Ã¥rd2s Ã¥rd4s3t Ã¥4rel Ã¥1ri Ã¥5ror 5Ã¥rsav Ã¥r5s2li Ã¥r2sv Ã¥r5ö Ã¥s4ke Ã¥s3n Ã¥ss4 Ã¥s4skr Ã¥s4t Ã¥te2 Ã¥t3ri Ã¥3trÃ¥ Ã¥t2sj Ã¥tt5s Ã¥1v ä1a ä2b 2äc äck5v ä2d ädd3s äd4du äde4s äd3r äd5se äd3st ä3e ä1ga ä1ge äg4go äg1l äg3r äg4re äg3se ä3i ä5jo 4äk ä1ki äk3n äk3r ä1la äl4pap äl4seg äls5kog äl4slu äl2t3r äl2tu äl4vin ämp3l 4ändligh änd3r änd1st äng5r änni3s änn3s ä4no äns1l än4st äns5te än4sv än2t3r ä3pe äpp3l ä4pr äp4st ä4rap är2bre ärg5l är4gr ä1ri ärib4 är4kä är4nis ärn3st är2nÃ¥ är4nö är5ob ä5rol ä3rop ä5ror ä5ros är2si är4sko är2so är4sp är2sv är4tand är2tr ärt3s 4äs äs3pa äs5pi äs4sk äs4sp äs3ta äst3r ä4stä ä4sÃ¥ 2ät ä3to ä5tre ät4s3k ät5te ät4top ätt3r ät4tu ät4tv ä1va ä2vak ä3vi ä5vu ö1a ö2d ö4dak ö4dal ö4darv öde4s5 ö4dis öd3ra öd2s öd3se ö4du ö4dö ö1e ö1ga ög5ak ö5gar 1ögd ö1ge ö5ger ögg4 ög1l ög2n ögn3e 1ögo ög3si ög3sk ö1i ö3jo öj4sv ö4karm ö1ki ök3n ök2s ök3sl ö1la öl4kv öl4kö öl2p ö5lä öman4 öm2kl ö4nal ö2nom öns3ke ön4so önst3r ö3pe ö4pel ö3pi öp5li ö5plo 1öppn ö4pr ö3rande ö3ras ö4rask örb4 ör3d4r ör1eni ö3res ö4restr ö3ret ör5evig ör3g ö1ri ö5rig ö3ring ör3int ör5ir ör5iv ör4kal ör1k2l ör5kli ör4nis ör3ol ör1or ör2p5la ör1s2k ör3sl ör4slä ör5te ört5s ör1u ör3vr ör3y ör1ä örö4d ö2sak ös3n ös4sj ös2sk ös4sp ös3ta öst3v ö2tak öts5ko öt4st ö1v öve4 över1 5övere ö2vj öv3ra öv3ri öv4sk é3e", + ["compression"]="zlib", + ["data"]="xÚ5Y–«8Óh§Â\8r-Ó\12\8§1‰\1Á/š:®ñäyø&\19ȉݽƒº\15ŽPß+\26)„?Úú^|´÷f\8¸L¢*\3?+@ý\0T\15ýj\0036}ÛÀ¹\19fh&ÝS½š®š\14à\\ßõÌk\21\9fÃÒƒ¼†–©é,,õu§?5ý\18ÈÀõ¶\2·r´¬mìv}µ°Ùö.Eènk›Ý†ì•Q\7a6÷l­î¬¶ºø¸ûkG\11½wQö}˜¢\23÷ÅÒïKÞoÅÇç`âÏ7]Y¨ùçñûé}7ÞN`5ÕTÔ•S3P»%QUW§ßoCs5ˆ(µk¶±e¼º­\26mF·5c»Šm=±{Žèj§ÏÝIwA\127¨çI•Ï.Fý™ÛÙLfÐïß¾›ôÿþä[û\31v\12ú’ßQO¶¡?6’|uå\12<štƒ}\31FÁ\\Ñ„¡Lôt¨“q©z\8ëÞ±\31ÒXFØv!Z:ØnPµ1)–1¶i«\25°qi¦A”wGsÌCê«ÿ°ÙÇߟՈ\9÷ÄÀSüÔÝŽÒQ›‰™ÛNÌxu™\2ç®v1ÍCŠ©˜_¿?SLÛ¼$\7vþý›ª\11×QÊï7“0.º~rÔ“È—ºL'ÓR3>é÷çaøòy\27YzËhÇ–jd>—rn\3’\127™c¥,•ësIvr©\12ϱN–fû\4Þ¶+b›\2în\0°eÅ\\.7r¯”³¶k\12Àê\18íÄM,Õuø\0153­vh=îÕ48È™\1É]ÝgQ\25™ò‘lS~Ï%ó”\127\127ÎŽ¥`ÐÖºl·®w¥lCCC7ÆŠNoN\19#²ï6{›Ž\11ï†î­\11ís…9[†\31«+Ã${[Ý'‘ÝÜ™8=¬b÷×>Dmû0Õ†\14ŽÎ¾\\ÝÜs÷(/\\\5úýn(á¨\4Óîø\31©/éÓ±ÒÏ#(Ç‘M{”n½£¶Šƒ\21âRëçRTíóɶ¿µO\18\25¶/¹ho=\13î[‡\0247*ûîƒ\27K\11Ha}ê\6Å\8ÔýAxýE+¤\21m5œäÃ\7é\"|líxÃ6mo#•ŒUÒEFv[;ƒ2KÚð¼0–#Û²¥E#«ÅÀ݆¹ñ(‚¹Œ\2ÎN\31Óc\25LKQ·Ð\11’9ÒSÓ>(u\26J¸F[N\16Z˜Êè\1\27%‘uª†´™ÂÄ`Ö\19ƪÅ=•ìkØÏÈ°´ÓäÄ“~úý\11šÝ»T²XILç´5«UºuÉ´\31\1Íz›t\31vl‚…\24𦦹}\22ð³A\0‡kç\27Qse\31Új¶\15\9&J\5©j§g <ŠX'ésh€,\3CX\0Îeª\31G\ +\4!£‚Ô™\11^h\\ÕSQŠiKÃÁÔ$\6\25¸”ƒI—1àj¡Û'\3“¶jiÛK_ã°§a\11´¶f¾mWŠ}¹Ð›ÉJÛ\25qõþ\ +˜ìäœ%z]¥˜\28ÒßVWÓêjZáz•!”½Ö“Ëv\13‚Hq+51¬ë\26\3¾V±gDô­Y³{cµókµQÞºAd€ v\17oÖ·€y(£ VrJòIñù΂É÷ÁXÆZ7ÏŠ&m~È’IÚ%\22xî›-’}ÑR×^‡\30žIb×v\30«hxnÆl1“À…–᎓Y$\16.kÂi].7sÉۉ͛ÔÑÀÓ\28»‰óΊ\ +ß\17%\30Véöjò±\25.ÁbD³Ãš£ƒ\22¹Ý;f÷&5ÙJÚÊ<6ô\2áÁ±*åo-T&G\16eƈ9¡\1Ýhð’1`ãz\0_ÐB¶æÐ#\9—\7µî|Šïr a\12Ç.\0111‘°Þ#ˆí=F–lw·\13>E\0308-ÍßìÜvL\1\29í­<¢Ñ’ím»\27÷]¹Åò!jg\24l\4¬­¥BƒcÅ•ÖWE5u»+\23\18^¹\8C\27q.!}’\14|ã\21¸‰ê bŒ?3·3L’Ÿ}oÍPîvaZ€\11éƒ/’\28vÄèÓ‡£lÁ„š\22zzì\11Ä'ØOÙÞN\23ß)\1¼µg\31!,¢³rn«S:a\15o÷²¨îmqGD\30\2.À‡âÄ=8(Aã\18ð\0¦+h“áÝ\29Þ£¨ï÷\2&D[…û^ÜïPâ;¬\7POÌ\16áÔŠ\7A\12çƒ\ +»âÞUˆÑ F÷m$¦›\30ú¡^DB²„4Ñ †\27Ù\23~OXŠ°[Ž0h\24AH4w$MÝAÂ\3Yn³Ea{\14x\ +mq÷§(ïÕ\19Ð|Ñ\26:>Ð\26\"†D\26\8‘]†DCïÕ‹‚\16\5îÍÄä\ +éâÔÕÈ]xØ\13>–\6²\7„\15Ï`\7\17‹ò…6û<[_\"€L‹b’¢;^nHlÍ}I8³\11[\\#c)Ô;h DÒðm‘~³\0006Û}y7–¼Ú‰\12iGQ¡}á!\127ug}\0202û±½|Ý…(\0A@žuχr(¡o\1!”g´\4ù¾UŸ_LÝ&•qêžÂ•pD-\\¬JR››\17f\11!yÜصw6QugúÞCàl]B\13h‡t\18ô- \1lF\2>Ë¢ùDÃø„ÓÜ>\17‚>»$/¨ÀÌÜgG3‹úóëƒ/V=\16NÓ|~-Ÿ\6ïÅç×ï7º\3è'×\0201\20ŸCÍ2ú\28\16Ô?‡\21™¶þ\28)Ÿ¡\0t\2ÒŒÍ\11€Lkb;~*\30|:÷\8A\8\25É%÷—w×GkF…|Å#\24þçH—õœmÀˆpoQbÀŠ^~N\29ÒÝç\18 ži\ +¥ä…ŒõçFkab¬˜G[À]?ŠúìT nÑ¡Rv+Ï]\8\19âžu¢A˜0=L\15ÏÒ#gy¸Ý\31Ò\19à\14\25&„¥þ0ê³x|*…–Gñx°Ú\31q}TWñ`ÛãDNk\3v\1\7#%ô\15E·ÇCêX=ƒ\31ìW†\20ip\17²mC\"hE’e0»˜–Ôl\7“\24ô\18G\9‰~t\21Ò\7B$mD¦\29°)³H¢\9\ +\29ã.u\24µ™aë\2®@\22Æ`Á»!ô“úíáíY<žŠÅ­žž–÷\19.·7tí1@²€\13’ëc(Ù\127ÁÅ@\8d\13Ê—ÈŽÖØ×Kør@Ȩp\13\15õ\13ˆ\17ŒÙò07[Ðñcì.r\1º9Ð*lö\8“3˜P±¤t¤«\23bîãu¬ðZ†\30þZ>*t×úÑØàÛ,€R@‡\31¥\3‚²j£–™lË,«l\30ЋÇR¯¬\14²)\14\19Ït\9ig³°\11\31Kõ¶(èãD=sn]f\8\"}xàz¨–.\27ѶIÃtAÒ•ámj–º…$¯Ûù”•\17‹0äúè­^~¦'EáÈß\4-‘\ +õʘÅÑÌ\11Ù\1;Ëú¡ô0š^¼[…\2d\4Û|(‘=¢ÖrC¾\0191ÿ[Ó¥/š°=\13`N·\16¥qÓÔm´‡èáD\"¸¹ìðE\"È,y©î±ÝtÖ“MÙ¤ß\20æN\"ZžBI‹\25VSÉ\0\31Ê\29x\20<\"Ën3ö®n#Œî›b°äF\9Š\2\14k9u±à™XˆØƒ0ôd¸\3}Bšy\28ÐðÇQº€Ž`S\15\4G‰ïÃ-sÔçá¼QHuÆâø‡2ÞÅã6“DÙåð/£öû\23¶\14„8>B\19y( \1JèîÃ\29Ç@n7Ã\127ú€ÔÎNtuüþ¯.º[[@š ‡t\9ºÔÝñ\"Žz„ƒÚVwŸE'\19è>¿†ç³\1{®AÐBº\7™ @[Ñ=äã\29kü\0¢\15vj‚\19î\27kUÍšÝAÄV\26¶-–@s©zËÎrº¡èºrlt\"Â\ +‰cKwO(TÕ=w½=\25Ñ\14\1øúÎâT\8»¾TÈ 8Øp׈)\16.Ëxu½Kºð¬‰\"\18ªÒI[qÚˆ¡/\1MO\\ÅBîn¦‚ó\127é´ý·\23­¯^‡\0RØ5/\6¶“xtª”\0š ,N%#⎞d0Ùd\"\8%\\$\27Í&¢{èž‹)\14ÁiŽæ„ýày\11¬Em²›ÔDA*\9ŒÂ\4µï¦8_\3\ +”F>íô\20„¬›*è2p2\6\0052ÜWyÔÈÊ¿\27²\22Rk“4\7ƒ\ +©>‹’$‘p&`¶3Íl›æ\5¥\11\20:g7¯\30‘v©‘ÏtejO<õ§\11(ÅL¦Ž!K]¥0QvéeÚѺSÅ\14Ô“\3ZUJCê›ÿ0Ü°C7œMiÓT\0­i‹ U\2Ün@Ñé$¡ÿ9i¯¿Z“ì·\28>¦ûCG\21rP8\0025Î|“PehØÛ\14§è°‹ya\24\0224ãŽn\18Cej‹\ +\"Üʹ¸:¾¢úU\"U\28¢©qu”W9n·\30ÕÝô6+ß$Î,Hb³²e—ûš•’ëþ\8è¶íÔëà{\3Ù 0¡.Î +ñu@Ðe\22•ÝÍL¥4É•ç\2Š3\11y'#”Wãä~òOöm¾¨ aÛ\20>›ŸC8îr\28Ð\25e«³'€”èèäc¶Ü·Îß¿\15ƒ\29#X;…lM;‰ª6“J…¯ì ÌÝV\127Ñ‹-ö¿ÇÝjL*âØ8ë«¢%¢\28¡¨]d\0074ÈP½™f7’s\28Ô·\11…°Cù‹%\25B%Ü\127¿\26±\7‰`pJéÀE€\13^²Á1\17ª\127A\18è bQĦÝv§\6UÏÚè¤\7†´Âe²\15ŠjÝ\13®Ý©£Q–\11g\11Ÿ­PríÅa¢ Êu¼ËPôÌBOc:T8\6sWt§,êÙÚ9ÞÑÄ€ÍiÕ»«oÿWW”üûýÉì\29nÝæ˜?„J¬\29²¢oGÈ\17\12¦ƒ+têt³ˆÍ¥6Wu\127\24¸?ô‘,”÷D¡{¶Å³¥P€Û¨~J¨Ð‘'Êï\23PÉ\31¸ËxžìãúÉX%±O‡–Ò\28¼Åp„­'”ùù|Rü\19%\15Èø\\te\0<\2¯-A c\"ðAdŸŠbÏ¡xÆÙØsP\9éÁÒªçšmÖ\19§\19O\8¯%\13%3S>ѽnOB§\5†c;I9·ÈùO\27„^ò\\¦R0ÞHÝ°\29ž\11¤\9˜eòÏEíß|k\1­„žx9ôõ¬]$Í3Ë.€°\26‚˜ù§›o5f`ŒÔc\4òái\2\24e¦±Y$w¹Ã¢&Á,H\2‹vt]\11å\19‡Ç\23OTægÀ1à\26™„¹1ux\24Ú§Â-ÎÁ\17e¼Ÿ\30%BÙúÉÚB\6ÓÓ\12á{\1½“\0016*\29O%þçÎ\"©žGñ<.…òyHùžG¥®þ<éÖ»x¾ë™\18\127ÿVÎÿ-&\11\21ÊÀÌýàù)s{!önàA”™Œ@v7ð\17\240îtÈÛ·\13\19Ì”³µë^\14¦{\"<8\18ÑîêÞÓ¿¢ìCmé\29‡ÞÓªpïÆn\23D;&7íêÛƒ…Õ×,Õ¾iOFš¤,ÆþÎïQôqšÐ«iõ]s¯\9d\9\3êWÑ—ðGZÑ©ä\1=e\19½#ÌÆàQ\2íëŽvÀŘ\0306ÕÛô.E^xY\0134>-s#ª™\21²$ˆàÍt{¤8m@Š²S”].\17ÍxzU\21¼\\/]‚.7Hؽd”j™ð¾óô†.CChýÍ.ôEï©xO\127W¤íiZ¯ðC Š[ßCßÛ@´¯¯\\\20N\19 Nö_Eÿ%÷¤!Ô«\"Ó³Æéñ\16\18h?\4á4,N²Â!'¥©HôýP:±%¢PÕ«¿ô!\11•½ŠKÙÛŒI\ +C‹ øÈc­}\0R2°\25/ŸeÔ\30ÔýôÒ©XJÌïÏl_\0038\9‰•Ó—)†Í\26—\15\1µ,I±œ.'\7'ô\30à\28\30ÇÍ\22e0Je®Éã±\8\18Ÿ\29rã7á‘\18ö9Z˜‘p…Œu\127©3}¨3„\13«ù]/\8k\12Y^ÊÓ|0H€ì¿‡©ü5éïß\15ÂàÎ)|ßê=Ü¢\15å¥o¶6Êß:Ç\9í¥‡Uº`7„ÊÞóXƒjdI\24?B’î%2 \14ö²\24ÀlÖ¹3I¤HC@Gt»9!Á8…‹\17k\0045kŽ\28ë!´A•Ô§¿Ôš^ŽšõÖ^Gë\29ž\0179LQÔþŠÀåò°|{×D\23¿ßÑ\"ÄÇ^}\7§‡\31¤;£\27'#\3zïFÇJ¹Ù#\15Ø{¸/k’ŒÇô¨„AÕÁn–ÃKM³Ð\17øRª\8h\22ÑY|U}ñžEÎ\"Ý\16ÒÑ×À\30ÿb\23JS¿nŒÑ\23\21)å•_lˆ¯D\14x \0\30öµ ÷\2×fE\22\7›gñÒâk\9¢_\127-ÿP\13~\7ý\11‰¿ü\"\27J*@iªøz÷\1%-@9â×ÛÛa\2\29ø†ùEkàÄì\4x[*±2ÐWÌÐûš/§â\11¾Ò\11\27/6ÀQ\7\19¢•\0038ÒÿDý\3ºêÐV\12\20°\7x¾7x±`\0\11\11±\2ad€÷ º\14Õ'ÉQL>\8ùD?\24®\19=°\23š\6.äÀÿˆëäá\1i&Ã\3áax H\12ž[\0K㜃!Ð\\\7Ué¢\28P`\7ä#‹{\18w£qÕ\16ò\1\30ˆÉгÿ\21SïÅÐ7Ý\11Èn\30ú\ +Ò6T\3åÄýä \0309TªIÃ(s$“:ä \14‰‹-‡v§°?¨9>/´‰¼p\17Ñ7K8¦€³a\7\0195høAÈ[`Ã+}\7•ËÁmgëT.†é¦V;LPjÀÔŠØ\1¢I‹Š\ +|D”»i˜\16ØmŸ-žJèÄPEô¡$RxôºÒd—ó0—ê«Ã¬|7Ìn§Á#ï¡™£=³i\26/\0315ê <±’Ô\14ÃÁÊD\31\127\24\12ó\11èŒÆ\21cà\6\2ì(sÑ\27òÎP%‡6Äz\9T²)(u_E3¤\28\30Vd\5Œª\16¬h\29\11a¼üò‘!ÉLu'“oèƒM )Yä¶:Ù‰&¦ñrähf¨\24ÄïQ‰ëÒ+IÛîÕÍàÅ$ms)¥ê=\27\16#°|âD\11\29ª…A_êÑ\21\15å\29ÂTeP¨\29–T\26ïâY\18T„\4« ’ZöâZ¢ƒzë°.®L¶¼‹Ë\22Ô\30ä\13ªœCö\26gP‹\28òRYŠ­sÿ\1>E¾XY\18\26­hPˆ\17ÉÕ<ã¬s½09 Îve‹%¦67ë¹–°jÑ »¢\24«€+Y5Œ(²oV¥øCÄ\22\27x\0039\24·Ðô\2ÁJ\7¶¿\12tøOó3\\QK‰Åþȯúˆe\1\4V\17¼p\23¹í¥Ób¯\26÷\3ªß\16‡\7\3\\iØ!Æ\16Ón\8øGø2ÔÓ£AUÐm\31×uÃ^î^Q\14{ˆå\"Æ€0Oã‡\27n/݆òtÿxÛ6ÄIÅ –6œ’’›ŒqøSRíŸÆÌ\ +Ì·W[T/\4å¢~)\30¾Z/bË\23Üîõ(^\"@Üv¿ B¤ïŠ—‚nóBÄ}iÕ’\18Y±~A\26ë×\23I é/ê®_#™Æ©xyFý\26¡d¤DJzÍ:RñRÂzA[^±èêׂhöZâÆ\13IÏ“oq\\\8’\14ÞýZ‹\23|„ì´‹26¼LéKRübÏtB–\127ùÚ‹\23ú\9\14Xþë€\0“ô\24+á„\30Q{øýB–\26\3‘\18\8Õ2ˆ\30½Ž=Ž+àqž€Ëêê\21N\16Ž-³[6\28ð‹×u™\4²æõû¿â62JÕØ~\8\30\23„€Œmý é\ +^\13€57¶ã±mÈ\8\30\11í1d‹£†\16žX(â\"’ãRÒ¼\6ÌBÖ¥§³TAê5\26Ì\17¾F©\30 áÖèblãèHƒ:….’dÛãþ”\25Ü‹±\1¢æ‘-G\3ÐL•®\4k! ·Á:>.Ô\5:M·Ù9Ædlãž„=º½o‘Öî¡oŸ`\15¥F\24^í0u$bdʱ{\\\22w\26%à„\11\6Ž„£WÖ£\23>\0Eñ1´Ÿ±‹\3ôRBá)O\"¤4—R'iPÓF–ç¨\30\25Ð#¹rlz\22ÇøE3†G$„dÃ\11\25ëa´ÒaB­lÆAV1j£—M\3\25\15„Œ2z-3JO…Ðq8£ \23ÙXÉÌcè)#«mD2¹ž7è'H™¸ëÙ!Bué?\2o–éIn\3†ZFìf\1µg(5•¹\20o\14£šÚ\24š\26г}è\0163¢7JñÐœ­2æ\1‰À9TQ\19ÐÏÿL6GóÄxÅS@\4‘1ŽlZ\22k•ßæùý«9¦¸†‡!E1!¬w¹¾\12±Öᢉ{¨r$éÖŽ\22Tn[+®ÄYšì…‘ZkC ­£$¬\13t\22ÚÍIQD?Ôá\17ç‡ÉáãúÞ–¶˜\19\26:Þ¤U#T-\24eb\15ãÝeð¦1\24\18”HdôXˆq(w×>hy\4œ…4(ŒNâV€D\30\23\18-óë#ÐÍ°7ѱq÷@\21\31»‹B\15×õ~Dmv\31–NHsô\20s \11\26À†Wº´CÇ,ñ‡í#m1G‡M:êµ\11\4«%]–>\30´\17Õ¡b\\Ngølc\0ý»ðI$€AKÃJeD/Ø\25˜\18a­DbKŠPãiŸ\25×ÁÎÕ'ñÁCÉ£–\17>$Òñíí4(ÎÊ‘F š£|Â宺\6JF…~.ÒbjT\21\4Œ\23QþýY?ôf´º@.Ü\9öÄ,ã€p†\7êT}醺Oqã?­ûDÇÊ—÷<“·„·\9ñaBð Ÿ7ùÎT)“Ø=¦L³\19ô+“Ä0Ž®\22CT“\8Ê&ÑìuÒÆ{RG›&X4â\6«mb\0ïº)‰8ãi´°¡\12¥©ÊD\12ûä]¹\"Ê؆g‰˜SO­ <…½è¤‰è45‡9Ü¥Ö;YV\3šdPnoõ½Ÿ‡~íjH\16g%ô–9¬gHÊ4k¤:!GNKÉ~€÷i'† \4_ò¼žê—G¤Y·œäE@·‘\ +9Nø\8Àå¸ÄÀ†]ä\"3©ÈL!?Mkˆ¤Óêʯ§Øaˆž“×ètðµ\4Œ¡tƒ \3!™ÐŸ°\2˜6õéI=y \28³ã\1›Ð\9\19Öëå8\7ÈCý),\5&4•÷ná‘ƶ\5\1\23:\14ÚžM’U­v[szÒ6… <]\6‹SÜ \8ß\17\15Gš®\27©@߶ÏÓøIò\27áxÒ<Óé³\11Í\9o˜Boa\6¨io!Û„ìh\1Nî\5¿„ˆ\30Ú­{n>iz²\7²ð½qxöƒé<\26bÑcg4öirv\\\16‡¶Z8á¤\19”*´ëB#i\\*\31&Þï\1¥34P­gº®¥¦Ó»Fƒ¬ù´\29§\"‚vÈ«Áö\19\0K˜ÞŸAv§7º½H²)ô¼azK<\26Q2…RC9yÒ\ +pÓß&mÊŠ€Ò\0ŸTŒ¢8\3\8ï\30#õûÍ\2a=£9xÃ\30R\7p¯\1’wª±mÄ«(,,=™ž\2\"$\1S@o‹(âÇÚ~4†5\20%\3Yrn›g\6öè³ÂÍ\16¶ú¬e9ÐK®ÙwCBÏ\ +gÏ\12ëÅd)¢\6hDC¬oûj„ªl Ó‹xeÂ\4Η`\15ª\17Pç{1‡\9\1279CÙçG1×\15Ær–£Ìî¾9®En(eåÜÑõ¹Sƒ\4B‚„Lý¬8\15X¨3Œ/g/èu§#à\27˜ÃŠƒ\28›P¡eîâù\18ŲÞçîR”g/MMÁú\"\6ú>Cßçú‰\\\8Lú\21úg¨:²÷\28rÿ\28¶z\30)Ñ\14\15wl”Ç*€ÐIg­2ç!L¹çå1\15Íö4\31s6ǹ\30yá\28jÀ<Ä4R™#SMºf²ÑLªˆ·2s=\31\17N£ËÔÎÅœâ”v®5j˜\19ì\16XÑØ9\30J\1Ùl$õ$ø\6E˜µ\3›5\26(g%ç\25Ù˜X˜5`é ,q‘'\"粫{ψ¼s)›!s\16S<ÌÄŠ4@iR:´›ðîÌ\31\0079óÚ¬B%jÜv¼ÖЀ\18\29!‡×KúYÂǬÇÅú\28G1 ’\24›4h.ÝNóvÉŠóÖü~¯\22\4a\15š\1-@˜š¯§/Í\12Ç&ü\26‹#¹fª\25â\4ؘÂ#\8Þ¬²îxÒÉÛü¦ÔwÐ>T7²Ý@\23Ïü&-`t¨2Ï>£\0@Èq£\20\1ÆÊäßI·\15ˆ<\"Dš3l3,Î\6ô1@lÃû\20¨÷hÈG\12/½Ùå­/Êý¡IL\13Óó!x\20ü4®L0¬ä\3@£Qv\13˜\9V•(R\24à\16;¡\8!\ +ÉÃ\0\0ë+µPÉ\"5-Ûè9\25\0276™ “hØùjv\4:áh\4DR÷Nt(Žeò²2!™¥\18\17'iâ\26°\0118\20)¬®Òç{t!³G\23i®:\30ªñIe0i!ið0á\14û÷@›‰¼ÊKaÒéÊ6EØçEŠÅ\20×»\0’l\9¸U¨x ¸V&Rc•\27+³Jì±ÔI^šÔÁãuÓ‰¤}WxØ¢©ÓÀ2ikŶ×$È<\ +\22¸“y²§Á©\11#»tYl“`Óî9iÔIgºÛáÈKBÒíI²g˪lÀCÌ]\127+<\19ÎBç>ÐãBÛ…,Í\27×9\16ý/{Bú\ +)(yï\9¬i\20ÁÌtï\17+‘ö»\15SI2É1I¤¾‡°\0081Ñ6\7Í1$Ça\14¯/5“wJÈ2\9²–¼¨I°ê44\29“:x³‡Û\27èÄv\11c§Û… \21\17¼’Bg­îÏÏ“¢ä\21¬·Äi¸ä³4øZ\0tÊÂ\0039B·\23Õ¿\28ˆê\5÷N/¥Óuî‘\\&qǼcH>íb¨ª\17šž¼]H*ÞžÙû8g0,G WíImÕ\8hM\ +åÓøC àŸM¸FBèpŠûÓt­Ëê:\127\14Ì\8HΦð!h¥ëÔÙ@gFz:Xõîs\21Ñ\25[Œh\7Zñ;ÅÕ¨P}•ÎX\127Ü¿¢çfoÏ’CLÑž1CP€Í1¥†¯\11‘¶ñz¾½\"§áòÏáíÖHäR½\30ž¤‹¼§ËŒÉ[ˆeÕ¯#\7ruïj8IÓ\18ÝMŒÖ\30â\22)Ø‚”©¾¼ÚAƒL\7½Ž³*\29®h˜\3?¯]“]¯”\26qzS›n,ë·ô55ÿ’Ubïk〟dö\22Y¸\27ò\3£Å³ž\127\127P«Dñ¶))Œ%u\3jдÜÚ\2…ƒ5³”ÐÞ¥¾¿\ +\0#²Üßu¡\22\2ëZ®GŒË§w\21\11jïò@ä\1\0145,„\5IòËVu‘\26\18±E´r¼n\8Èòˆ7.ijŸ–\0275?Ÿ\ +­Ký4\23Bߢ݋/z‰î\9ï¡ÊK_ÑkÜ´ªéÑÔ„o‚“¬\\äÞõÀn\8ÈD,\26\8\20qz\0231Q±,¤…=¾@}Ü=\4©Ë,¾?\0j ³ølpQa\\âên™\26”ÿe¢a°^÷Ú2i¯ºL\ +£Ë¤†¶\\¯\12\23•ÃAÄ^\15\127u!O\1—É«ÑEMsA:´’8x_|fø×\8ÊAtë\3f\3ÞVî|\4ì\2’P\11Í%¬!\23Mz\9Ù5`ÅãvÝë°Ö\\vÍ5\23-0I\27‰N”\9CÞ†8xqÙ¸\28††¹åâ\18>Âry‘‡.Õá7\25\22µÛålk놄/Þæ-gl›36á\25—^Kè¶Kõ\15Î?M±¼Kñ„ê_ÂçÖK…[×&&\1ß\31ª8¬\18\7üÒ%ˆ\2E®HK’‡6 ë˜LÞ\31‰Ž\8{\7ÔväÂû…¿/ôcJšŒÇ6SKm¡YgŽB×ÕwöÂ(,_™]\12%´hõe2~Öäꃠr%#ô€Fßü C!`î9l­Ãë#¦5,ã„£É\7—\2‚\5m¹¾›±6¬‹\8$m^ªH뛟Uû8‚[\17zX¹RMX“W+YÒ„…|—µú\26·$+:ˆ\27ŵ\2Ya…Æ-Éz©‰~¿O‹Q\12]oκ«úF\31¡¹T\127LåÔ8UÇuåµ\30±¦\15Äáõ¿’‹Øß¿ž¬B•¿™›ßï&V²\7¶\4ý¯ø¿\3qäÿŽ®¸ù¶3£;æöS£+Å2´ t;rçÖó\20´,D\3Hx|Ñ ‡™Rn5N)%ò\25b¶\20¹öÀ¿ˆ—WWþ\20º“þ*Rz`If\8uåSN2äHÇž×Ý…»\15÷\24î9Ü)à\22!{‘+`kõR2b6Ü^\29ä‹:äëÒ ^GDuŽ\29•ënÞHê*«Ù‹‚ŒÄ\4+‚^åG¨ÉÙë‚\28O\5s¼ »Ðn\24D;‡qT\14Q)«\9¢3zÙ“£·]{­«îºÙÜ€E¿çS¹k\24EŸ»°ïÍÔ³÷s盺ü\31Ý\"\ +zE\30¶Lî4á&ÙL©ñjHKå!à\24p2¥ü\27´˜2\127\4LÂͺ7-S©Âs¬*žæÄ]#ý¹Ž±ô½mœü‚\"O-D³´.Cëòÿ¿ÇÎÈ}¹×Æ=«\23Ju2Ã\29æM¡,>‘m¸ë|š~py\\–b\17v\8{ª\29FŸÇ‚v†vðÜ™Ähµ\2o5Üpv5_—áF^p\11h\9ÒZö£Nèg\7*ãp™â\20ç³&)ŒØuOžÎ—þêÅ€xb™ã}SÉ\2Ëa\4A‚±Óƒ¶^dµÈ\0287å„”\22Hê\16+“æ\17ýÞ4®õ‘}›ý‚†\1‡\1¨\0079\0307e/Úl741Ç\21Y \22àô(\5Ö4yÄE\18—ø|í€\25Ò“çrkhõìÆ&šZS;Õ@¥¨ŒøWd5KœÊòÂÝHÀÂÒËJÛ4oñ¾¥ÌR{€¶ž\25Q\19r\8bY+’´G 80°>ÍÏÆôf™9¢\28¯êrh­YF!\28\\ì‹ûqQ=\13$Ô~0/JY´˜Q€WÅÖ\\µFÍ\16ñœ\21/r†GgÖøÐ\15\14ä‚\28ƒ2C†»Ö\6PH÷òVd\21Þ\28\31i(³û`‹\13\2úÃ0D\19B\13&d\28ˆð\26„ÁVÍC\2ó‰[9¾·ã˜—›3u]\\çP™í7Œ¾\11è´nq\11“ãNDãu7¿¯'¿­iµ\8\7\9h´j´B¦$0N\3}Úëb\ +S®Çåeφ jƒâtÒGJH„Ycã\"ÿwç&>LpZÔÙöVr^Õ¾\29VÚQúÀB¨éNÞ½Èô)}r`¼&Ï\18\17\9dˆˆYá._›â¸ß\27ª?ª;5\30a\24™q»\5dò3§ë\127ö}\14Rg*\3²¿\25}D|h0§½Á—ë‚BÉ£Ü\11t©P*²#a±¹à>ùôýF>Yα'ª\127Šü~øZ0¿\27\6Á/fi’€cÒ\23·\28`MŒóõQ zñ©=C}½¢\21Í\6þ—ô;©¨ÖÁ~ó\127÷\28z®È0®\4ŸÖèG¦ºéÂ9P0$/Ç3‚«\9\127ÆÈ÷\19\27ÊçT\18_·qŽ{rQ\29mú‰SM_‚œ¹¸¡.V›ç®[Ø_\1ábÀW\20 ûÅíç*6¦nÓ\8kóË<\16É-Ž^·ëª|+Û\25·¶á[;³C«ð-f\15ÊhçzKêo\21!Å\19oô\9¥­[0Sܲ·MÖ¹5Àh\19;Ö Ýbò)Ü ¶›·ê[\27\7P\27Ô€r1\26f’{±•ðÈͯ\18Ü6Ïlu“¬\6Ý#\8&P¾Ð\30hˆ\4cÀ9 ûüz\7dulŸCµ\18Q=Hß\21›&â´£+í_\21¼qëzºé“VÆ\3Þ8™LÝ~»¾RæS\3³xäºusi\0\\ck\16ݽµÚduåæ»1ºÃÀ׈-ŒÎ\22öYT\13++·¯BÙ¬ý§Ø¾\6ˆÍÖغ\27jϦ!\14‘»\0131?\15€·!ž­0]ƒ­\28\12\25¥,¤õz\17xv¹\ +,ûÚnq=/¢\17\26vùº²Òå0\5s\3Z¹\22¸»å¤hzXÖ’p!›\11ró\11C·­tYy!¿…=%e¼’€\0045¼­Ù^‡#¯Ý¡wÅSÔ&¿ƒ>ZlX\ +BSÛ\7\3+ríHuÎ$ØrFhqSDM\31\17³\22žŸ¯\31\23ê¬9Œÿ„>ÀÞ|\9i^Eîm¼V™Öz–t•\26!\ +f[Øà…@Ü…ou9Œ_\20†–æ}öÓ24ìÚâ¿\14à\3I\22A£/]kdGØ\8ß\25>è<\21å#\18\30÷ˆ:¢/\0187*\14mcÔqLÑ—ÃE;žæÓª›\1|ë~»‡Ãîç\22\23ú\4I9½4ý˼Ô[|²+lË5Ø\0223\9å6¹à5ññhØq\15»\28Wf…磌\12U\22ß\28ÉMƒ\27f.\20Q˜ãél©‰n(v\26ßmjvª2ŽØ$•š.\14»y_DYÇgÀ§I‘Šu+šû½;Ò\30ÞÈãñ¶»¡d\27.ú\30‹x\17;_(ÅtNÞÝÕqÑ—-ßS3\27ï9j¹‘\20¢i\127vn\14ªJØ$”\14øøö\17Ќޜ¶¦{Qõ|8ƒó›a˜á<\23ªÆ\0119³\6àT[ÜÐéS¸ÛÊ„þ„\"ç¤Ö©µÇ^³*3 \20èÙ\28žÔ¹f“Ò1p\ +èr\3:ìa\5¹%ÇÅ\3dš“®yŒ\19dþ#ÿï\15=ñÀ¸¿|9|Ú\9lŽ%šs\3|#—â”Ã(ý\1´7(=½\"xžMÌ¢\\´ä\7©\31ƒ<Ë#\"8ˆ2ßæG7.O\31\17ÎÅâ¶_lv¹–”¶¶Iªë!Á\24ÞØõ«+¨Ze1¸%×7ÍIˆè\28ËzíÆ\11M\0176y«ºi^'´ç\23‡yUb\18eÓ17Íd¼Æ€Ï\7Ú­a\26j›à‘C³­×’\\ãxi[c\30Bõ¯Å±5WE$ªBJ6ì\29A1ðk|¤ƒ@\0075ò¼\"P´Ì\4ú$¨ÞÖlÁ7`ñÅ\6\31³\13›¬\0\31r«Ú—ÎÊ'\4›ïÜ67]ÿ\17X\6\30ËØ\20\8­›öá8µ7Ü4‹ÙâBe+m­šÛ¶·n\23?åñѸxqkÐÔGßp„< \30\"b¾²0íñÚ`\15_\30¯Ð¬é¨á\30H‹¦ð±ƒ÷JÖߨG{\\-Mß;\27Ž\27Œ\4”\"ÐÝæÅ‘Sµw!n€ºˆc#8ÓûEç÷°¤Žð-š£¼\25h\8:¤ãˆt9]éÂÎ\0151.º\20\27h\31b‰kDêíÅ\24«f]\5ŒÊã¹\ +hû´ð˜ëmÐì¯(\\®ø\31v\19ïîŠ}‰´‹\11\0188\6b'ïhï¶\29J\21ÇÊÚ?<¿Ãí|{/¾\5‹\16JóO‰Í\25\27§\2)\23T>A1P\17óT\0088Û\28ÒÎyɹgP¡³\13wLÊé·~€ÁM\16ÓhÆyQ\6ï2ã›\27R)Ç@S4É\\sú\8²Æ{Kñ5’^qÎÑ\0269B ë‰éx;\0070k~r’JŽM\0Éâiáö¾Œå¶÷^\2šý‹ñ\11òã küæL3Ò¬ŽíúªØM™v«Ã¶^S–¾\15ŸdSÞHÎ`KW\27€®² Pqe¿©äí§gœÑa´ˆÚÂúkSÇÙ®G%È¥×3QrW\18š½½iº_GAn„Þƒî.~b¼µÙåº{«){q)¦=ÞËän\15«’ El|±º\28ø474B8\8“eÔÁJ÷ëð°ÜQrž­ÉÑxPuý<5\21hBl¢\28BÕ\1ÔW!°(€w¸R6+†¾SxO!\27Šdp=úæý/\4¯´^ªÐ­ÕÅîC,`¤Gúj,A™9pºðBj\31;áùCbo öÏb\15»–=ìZhý\3á`/évç\1åÞU$câÙ˜\16Rüe\15ÿ دsƒ\6\19zÀEô\11÷«4“Æ\4¤Ö“éÀ\14 G øâ \2ì{bàbúù(ü\16o\31u¥m\8Ÿ]½¾‚½ûŒ\5ÑÁ\21ø\17p \12_ÕÑ;\7Lm -bv3+Àî>I\20\26\18§\29{w}Â%p¤¸|\14¥†ýÐí=t°]\3\23\6EË8\22ü^‡êè¡äîÙa#q\7|F€“6î¾H¥œA“\27 /LŠ:¾\31ªm\7k)>`t•\19ä\"³1„)`ìð8Í–ú¼«ôˆÜÓü»k-+\ +ÈEYl²K\22Ä~\11Q\"ÌBúH¿¸/³r±ß!틸‹u\18˜¢Îe\29\28ª¼x\"\2Ú“Iy\13´ZŸÇ_{¾ÞìÝ.Þ+\28#ß»g\13Ö\23\15Ž\15éH,‚ó\6Ô¾OÇóBÑAõœý¿/ÄùážïtåÔJ_ÞüŠ\1ÿýÙÃû?úÁ\0086¾[ Ð-¾L%ën\3±ÒêMŠ²ù)Â=NFw¥Ï=\12Þ&ÓŒRµ=\14J÷xϳ_'¢‰üXÜ\30úÖhl‰Ð0˜\127T‹]K9ëõpxWFÝ]9ûE\0.^¶_Ç›OS!kí×7\21öë˜Ó·8±„”W\2\29Ùz÷w6î0ùñ0\4±ÃvVJ¸{­\28\ +t×C_ \28zf=e\18nWÈ\31¡G \"æi¯\28>ïÐ÷˾(Pú\15ﺀ\12\6Ãü\22:Š»\31\12Û¯\23™»Ÿ’ÙãåÐî×f\8ÖFˆà· †(¾³[ïa*Tî~b·ò«µd>â|Ò[ùˆqõ\28MðG­†öê@Ø\18:Äñ1<’zjj°wñõ\14çÙ+/c\20ÉbEŸ*¯xÌy*)„ÕR\ +\31ëgW\"‰Ý­(B/ߟÛÜß¾ûÛß©·uHtû»Z]eˆ#åþ/a\22\5\8®é•l|ªI\17ao®Õ*‰\18f)\21ò£t\11)‚ñD\23õ¬sOïíÊ\18F¬nÑË”\\äe¤ÁŠcÍÎb>nmq”÷â¸7Pþ#> [ÄWö?‹Ãs\31ÁGæ“ØòQ\28a\11zXÕáeâñˆ#™Ãw舻¥#î–â»QÎuŽ«vZ\8\0258ÊÍÌÐ×d£âÄû¾\30›\31•%úf´vf~²eº\16íh|esÄqœÙ£‰~\21àð)\13.6‚yìú\20µ(\31¢®hÅõ‚ÿ¸Þs\28ÿ5É[™#ÞÂ\8¿-Í©Ž¯kݤ#G<‹9vm¢\15Š+i‰šðq}èíð»1\16™1ŽÝ3?\24å!a<*¯p}qyz—á\13Nw!bmÍj\29\8}å±o\8dG|›™Ò†Ž@º\2MdwÄ·¸ h\8¶Tÿ6¥f\16Gc^âŽøVÀá·ÜŽ0+94>;láé\31*,Å-4ºòŒ?% Ð\23`g\27W–\"\15ÅOU\6 ’îyÙŸƒVâã\16Ë?hÑ\ +B5êP\ +Î6>\5!\26\"q¶Lõ\28µÃ&ƒ´ü:ã\19ëfÜaû$;\4QÕ9P”’ý©¡À©8\127S\31°£g\7{=;߻էÂ8šå6V\ +‘ñ¨\19nGlz\3\21·MD[»ìÑ\29ȼ\12JñÉÑéí¼á*^ÀŽ]û`P|çüôýàyC_>¿Š³fT†š6ÄÃ’óú›\28f\27°\ +†F†\17ß2À\1½:\7¿3…>›\20î.L\29C|Àëôt\29·Õ\13[ó\21\16\18v\0141®C|–ž\18\24Ýsˆ¯Ú0‘‹ã¡¬zj }\"«žñï.1XÉq‚âž‹ÇD§¯àθC>=H<¥•¤ržJgLã‘êŒgЧ¦=åIoü¼Ðé¹w–å‘»ög@ON\13ZŒÝ\13µ'~™–%¥±8’Ùy}šé¼lgÎ\27Éà\0157rÈ¡ëSóeXîn\28+÷|\23êøΡº¶\7\8êú¦\12•\31h³ªøc\0/ŸÀõTÄóÜúÊñí\11QY0\\êŒ\1M–ð_\22ï뎈\28‡8¿\6þ1…¬óŒ«ÆSR}û§-þéÊâŸò«øçkÀ1 Bþã¿\1ý¹\21\127ü{ŠºøÓÍñÅÎ?šÕýa\"þx\4úG\15JGñ'lWËwñ†Ó¾ë»¬è}¯\1pÓ·Ÿ¢R6H:—â-å\127+¥¾åAï`Aoxé[Žù–c\"àš¦\15ƒÌwÿ÷mà\7G|__\7(Þ~\28€\"å\4o¹â[&÷ž´\15~û4ìí‡wÞñu\29ã\22ãXTï¹|NÀú¹à‰~¿ƒÏ¼ã3üïø\0Ý;ndß\26¦¾\27Ô§·läݬ\20\18Faïø°áû\22NM\127ßÙG£o©þ[\19\0 ÖÈÀÝÜ^.“\31\2O||zó\29ëü\29ïÎÞq®ø\1Ž7\9ïø‚w<½\18:(Ðw|\0ó½[ŸF\127{TûŽ\15£¿›\127øù†ê_øØ¿]ño'Y½ý;ð[Š\127—ø\24BýïZüËZªþ=Š\127ë\127a\3¿\127™/-¶^Àøì\22!à)èÛ€\6Ä—}$xA{\24x\23i§M)ƒ©™!\26\18Ÿ„\0]'‚†37·8aÊøâ\11\ +ø*KqE¯bhS\23¡Qj\8\27 \0250KtÁþG˜Èu\3rŽ\2V¿˜Ú‚äð¿\127ã«ÏOY]þïÏÃøì—º\3—ÛåUX$Uî¢ïÙ>4\26Úip½µ\22œ›0 Ãá\9¹þ(Ñ»DQeOÔQ\12ò\0319Ä–éÓ0`\21eVA0âSýŽuY꣼o§à\27é´ô\0085¬\15\12eãø\29\5(-(úûýpÍX\5À\0061Aƒõ]éŽùúŽùúö\19†\"È¥\127ˆ”#(GL\21\25­ï\6ÂZ«„‘m4ÀÍ$4½S\22Ÿ‡ÅÅ×aÐø¼Þm¼åŒ€ÉV!\31FòÉw³\6ûu\30¿\14ÀR©}\26:yb¢#’¥Ç-Zœ|;ê}Û\16]ôUKá×þluÚ¢ÙÁh/‹‹îò:0é¿ú4\9ýýÖŽÓ|«Aë•!\30ˆAúÊ{t8÷IÂÆ›^9\29ßÙÿ52L3\17±§\\קoöÈêµX„;×ß¹AŽ\11cFKª”U—/\20YUåÌá“Îð.—÷jJ4=«\31<¿›5dzXORµ Y\29nm½EÚz\7Z#n¸«ã—°&–æJÚiÒnmþU\22h÷ãÀâkÜü—\19KÙ¯ìñ}¥@±øÎX}\ +L”rÆ¢@;üýqUþ¸\8\127B\25\ +4]\8A÷÷ÇÅØ„ß‘û‰Ï\29€ÊËã\18û‰Uû\19äò\12‰\21úÓ7Q`ãC³\27ÞGÄt\17$yøéѼ.‚Ÿ\30Í\27˜Xá¦]ôø-ë@\22sÓ­¥ÝïÏ+VÈ\22\1³1.éŸXÒ?c4,Þw\16áÊþ™üäB hàT®¶ ¨Î\4GäG\5(0E×Ã\4Nc\29¥0°ªÞ\24ןX?qŨǪµF¶L¿\15Hã×5EŠH\29\6«—Ö±f£7Ùuù“Àóú;¿èž’¥‰:Ïÿ\27Ý„õ¤é…±´\127â\11ÀaD™®(O\20M;ä\11Ùéìç—¢\2¿ò`ð\24Mýo#k¹¢—ÈT®M\12Y\14\9)¾P\27Ñõv\13W\15Ô†àÍ\9R¸¥…«\11\26\27L\"+©Í\0165ETA™06P!…\5z\1\26xñàß\14¹àîŸ%âÚ§qðÝï³¾·Ætí³™|9ÇùêîçЯ’/ÒªX_t†°4¨%*%'D®G|\29¨è«hØŸ$+@\16k@^¾ôŒó#\9tò8×4¼Ø¾\23CM&\21y\15ýä~\0\13ÞGp…\26\8¹—ŠÍ‰¶®wôÔöq\31IÙ\30æ]rG6N”¬÷F\26Í¡ªÎEUt\18h®\6²ß\26à;\25È\17R\8)å\25¯G‚\20QW!Â\12ÏΤ!æW\24Ì|CÌ¡ý¹\8s¼\7Šà\28Šà\28*ß\28ªÞÜ̶æ}†Í¡ÐÍ\7ÚTó\30\12\5s©ö¹ŸËø\9\18þ­åßšÓü¹reK”!=åÑBлpü« †Æ—¹ézs±±¬\24PHû\3½h‚*iûrG¥IÀw\17ls\21¬\21Q®o:QÖ\0ÁðI„—ubÓêˆÖšfGf\5b*\12\14c©áÐj5„ÉdÛ¹\\}ðð“üûIêoíþ]H­.‚iÑ7ãÂ3Õ\7!fp\28±\16Kì–d©¤\127éBÊ\3{Û¢ïQÍ€V8‰èEÞþ “™\\Y\29p–\7d5‘à„J\4\16Úr!vßÚÍÃë\21þ^I7Xˆ¾<“\6XˆU\3¢ÑEßâM»\11)^¦õ\3­®!šh\0222\25—N\6µLâ+\0030ö\3-5\31\25í„@ßMÌ\9ð¶ÐŽ8G¥f^eۡ΃}ø\0032â9`!&…æ7'Ÿ-zlÑRØk!½\21\"rá­·žß$N7Ò8Ž!\23‚*1ÔL‰2­¬Ã•_¡ñÖµ\5\20Ú…¦^Sy&*ˆ\18Q\23*{3ƒ¢çž´±já—\14¨¯Öï…\ +šÊ \3ÑOH‚…¯ØÑ¥„D3\30 ™–6\18:\11U›3¢•¾­‘·½\24T\26\20ÅÜIç\7ª`Ôôù\ +íz\18£fN4ÖLž¤Í\127\19eZ±^GAP·\31ˆ–š­–\6×…šŒ÷Dì/íÿ¤Y‚ME–amg\1éu\22\21AÛ]@€-D¹\ +\2DP\17^îQ\25ŠàEÝÈ-¦(Z„Úóè»\12&n½EDpª\23†ãPe\30)§xô\"1\31dØ|4\31DÑzê|ð]ÈÍ\31\30\4\21‚RAѲÅÃ\0ÂIJEÁå\2N¢Ä=DƒI?HÉÄ\7„ê¨\12r•³\20ù}ç^#vß‘\6Œ9{\2‚¼[J™Ð3rEWjÀu“îG/5\15\12û[Sy”¾÷…h#Å\2‚p\1B\2•&•Jä\26Þ×\0083\17Í\12Z\25´6èÉ A[ƒ2ƒ~\24ô\18#Ï“’OÇ?SA\27\4\19í„Ó–2s\12vÎ%krç™ ‘8™Ôò Œè\0176à¥.Ç  Ô\12eÔ(–‘\21¡\23\30ª/Ôþ_ÖA¸g<´\3¬Û{\"èÙgôBÙ\6Th¦t5^¨à#A{Z>~B[^b\4X*¿ºnñUßáÐ?\6%Ê¡‘\21k©JâFлˆÂ‚èN”›e°iuSE.§È·“­\0315§É‹S 7\12Øh–¡NN\4ar^\24t\"Êô¹Ö4~$1^0µXŠHZŠäLuHÎ¥\8ª9Ì\16®B\6rwb[É#Bݻݭ§\22œG)Á\15©„GUˆOD˜ì=\26ƒÂ£är.%\1274Ë„ªÍ:FO0áId–3&éºm\"Òk'¥Lt™»Ã“7òï\13uì\4ÓžDWw¿S$=\21.Idª_1\0273D˜É\20?\17<Á4(\25Èü\6\127-$\5õDÄ)4ÜÄ8*$2 /\25€Î\2´6ѶL-CjŠ\28\19$ý€ê\16\22•UÐ\15亯2\30i´¨pE_ôÅ^[H¼Îí\27(Cý‰€¸‘–\4JP…XïÔâ/©¸:4\0169\23LhÂ\26D¹AGƒZ&øƒ¹_ª9\0¨À\27ú:ÓKz¬hù\23ù~äÊ[\"…\29±¢\31Y\ +T\4_*(3¨0èEß\16†ú\4Ë%‰LB\22$—$3y,¡\24H0IIdf’°Hk<¶ÖVYSíDÀ\18q7\18a#Lö7Ñ‹vªm„¤·%Êß?D\16í‰Ö\"\\\4\5\31\14¢\21Þ›#+¹r@A×\8\23\1õô\26µ\4æ\24‰Ì1îH–\18\23S‰¹\6ø.éiþªàEÑ\23ˆµÑ\0042\14æÁDÌtc­ôhå01Ó‡D\7\29Q¡•\3”\19•\"7‰\18ƒ\ +¢ŸPÓeVz@öÐ\27ú$jý­ˆRM8Òÿ‰j¢Ì<—k_Šì\28‰\ +ò‰A\15\6\20i\19ˆÈO5ì\20ìÙR\22¼©\13fHù×Ï9\19ŒP\9,\19IO›²\19é\29Œò'iJçüPI\ +Ž|—8b¯ðO¬0Ž¬dðpFŸUð’)¯4À¿m…‘c…±b%Ì°”gÆW*(Va\1¶'kñø·§qW²Ê´\"ú.³P J\24g…>»B\127\\\5‡\5M¯ÄÛJM,L¦Wàî•pwȃÎ=¯¯ü«räó(\17Ž¾Þ7ž¿fh Ĩ%F\13S8\16L×+]1ª€>háÔR\27Ô\18e˜D\21\0174…•Î?\27\"/\23zê\5¹ò’tÜ\7Ië\8Ð\3£\5ÝN\30îäßn€E:@NtV=­ÖÀ\11žóV\127º\25m5‚‚8[\7ÏPY`J\13ƒ¬%ÂWZï6’Ôš¦/\"ØlÖ\3­Í5Xd=P\9»–¹$ä€\11À\4b­\6£Ê “AQÌ'äXóÑsÕµVíB\2ÂB\29\17JãÌ@ŸEx­i\19_‹»Þ\2I\29qG\16tõ\17ѽA‰f·¦ú³ö,™\13\24iU4A“U´\19E‰(\ +ÛëËNWêÖ°V\17u úɘÞÉX–]8÷_\27Ž[\0113MÅž°6¬µ\6k­!ÄžD]Eå?É:À•\12|O2Ô†°\ +ÿ§Jð'©yðÊ“Ú\18RD+5Ú‰Šæ“Ús*¢\7a¡§`cöcð“x”õüø®¢ê»©£ï\"‡¿cÙw£«q\27]};\9š‰$̉ Ål‚Ñv4\16\4eÍ%¿\25`l–\0007r¤—’g™³ù yÚÈò\12\22ì7ÂÅ3\13ƒ\7Ù‰(Åìh£SãM0xJ) EnD£ùGsšS1ß\4󧯬(\31ÿ\17/ÚHß±ô\9 /Þä¥Gܽ“™\8`$‚\5à„({êF_ÛÐ,Dt\"Zk5G\3…^”r£CJ\21)…&”\14Ô<œB)JE)ŠH×ЩTg\5\27M*ÞkxR–\23¡©¦˜r§˜Y§âAS௻\0111¶ßË\26qÊU`\"¬ß\3mbä¸\12ÏÍ)üSµTvxÍ\2Ù_Hýl”ô‚:õƒr\7{KêÕ;É]¢ŠG*‚ëMúe:P\0137Å,-\21\5nƒ¿Öò—\27/Qú5'劺¿ØZzA\ +\0051\21¹1\"\25êm+oe¢ó€\127C³Â\5\22äÄÝ\5a,e‚DLû:Ñ\6ÙHô\31TÎRÕ¡š\"ùè\30ÊcÒw·\20¾|©L÷°N‘\14tž”BÙM©á¦Æ\17Ö%¶W6Àâºãk8l×b!Ú!¦¬ß íŠF¾T…uF\4G\127 …Ì(€\18ó\\¢¬¥\ +·lö¨PyÆŽ\15„Å` 7Ì]R2Ï\17ú.¶G¤¢}@^©¥)\6\9EkM=rîMÕê92¨QüÜN¦ªD\21QƘN\\\14Uð\4ö*l‰f&ìNs\11/¦œèÁÄLô=˜ãF1sm\20Ì97‚NôÏOuÑ<\1sÿv\29¬\23ÈêR\"µT9ˆŽ\0065\6µDo\26³£ž‘ªÓÆ”\8C@jÜ R]Ÿß\25”\17™7„É5Ñ“AoÒ‡¤ã`\14“b\14“Bñ<3˜¸Sù:én\3ù\17v+ùÚ\5˪/Á®§ýo70+þ;\12\7»°\3Lâ\11úŽ8êW²\0194e\127Û\5\1­©-p_âþ(±VökÜ7¸o%VJV\5‚š¹£H ª¤µv\3åÞ]O3¹Ã”n':#t,\8KÌ¢wªt/\5\21W²\21ƒh*\ +\14Ð\\óVP=\6ÚiÝ\21\\úÚé¢`‰ì½à\30ð\30%Ø‹·ÀN„\"\\¸\16°fÀZë½’!ôD”AEÝAÙÙima9ý‡A\5Ñ­ô‰*B©AQÌ\7-eÍ!|§\22€Ü \19ÑO­È±x\23­‹ôtþ»3\19´\26\8ü\2ÖΛ\14¶\26à:ÍΠ½ ŽÒhgTÈQ \4¹\6J¤A|×Û¡ëíÐõvÒ¹F!K\30iUx\4›\29—Y\31tY<ÓeÚÊ _1òl—±«fÜö\23#‰&Š\0264‡Lz(¼\12}\25Ö‹‘æ½ÔíœhaÂ0Ôeª–ý ‚Ù\16Û\0251Vgôr»\24\0zu'ÃÒJ&K+k%}Ý‚¼!9–ÄæÒ\30ÈüC\15÷¨€\16/™ØW¿%\0210\0310кÁ®§FУ\0120¿ˆJ-ÿ#7®f:•mQ*˜—3h„™8\23I>Èc+NÉ\17’<*\25Š»ân›¬ÇZZËø\13\23 8~\19¡0\27\14o\25Äh&>£à„ÔpB*{Ì®€d:·ÇC;møl u€…éšhª\5\5*ˆ˜1°È/¢\7“ÊNÌÑŠ|·s¨/},‹¶r\5\20lVD%Ñ8ÎrPö3Y«† Îd9\"°;„pFŸE3%%W\\‹ÈúØU(ª\"Xóp¥Ö\26 oŸÒÝgnÓÊE? ¯\23½@\"‹Ç_\12úaP‡jäÎøšgF§$*\12ŠbBSÌtþzOTjÃ8»ðž\1{‘ÎY´U@\17¹ 1‚­1Ò\5\14R¿ÐŸàŸ‘Ek\30ƒÈ‘mjÐŽ(J­Ò‚¨Ó|ˆ\22\2\"g( oÚT'S3'“ÙÈÌ–Ek\31Š2­€“\17Ò‘\23~f\28¬2ã,ª(ÎgiÞWŠJ›É\0008Ò*nE€eDcƒ–&棾/Úç\11T`\1-Ó’u\\\29É‚ÓAŒVšZ'“Ü\17ÃÖ\12ã\5DÑ\27¼‚üÁwAéa\24f3Ýô{ Ò&馜ºdº{V„E\24ž\27ƒND\11\19¶øwXG´41ŸC“Ä.b@¹I\5ûj\127\12Ô¹ù‡7\16ø\18þ\16Ñ\3ש\31fÿg.™Åšt.ã×­bQ42<œèñ²\22z¯Ax®¥\4ͽD\26õ@RŒå*€\26¢\\KÖ…\29\19\31<ë;+ˆ‘ð¢—K¹J¢†HÓ%Q®+Û’¾ß`v#\3]®\30øŽ[‹¾*úEèä¾$…ð\27ø¥ð\19ôž¸™Éì±\16~¡\15O9t\20‘ÙÛ£9'*@©\12\15…šV\27\"H†B%C\29#ψ@\21\16DE!¢\2\17\18N¸‹ FwQØ“ˆƒBýaŠ\24…Ì&d³B„ÊN\11¹2Ï­8\30\3½!ëP¨\11˜\26\ +£\16\23}åê\"¬A…×aEê–hª\5Úqˆ'úM”ÇÏ…¾A”\ +Ê88\23º®uoÐ\0305›Á¿¢€v®\127{ñX¨'ÿ\1EÝË¿UÐÈ{:‚ÈQ/IŸ¤\19ì1Z\14\16\29M\31Ü\6\ +u\27\24\25Ô\16%Ê‘‘Ã~a,\5…ÊÅ„(Ór\7¯\26eC÷ö=Ãöæ¹=ˆ\\4”ÕD%ÑXSkÂ^\13EsmßÆ”³1}-Ú¸YD«w2B5ÚÔÑ^\0 Gó²Ge»†ûX‰¾\24”\27\4žo°\20Ä°ÔÄ„\7^a|\ +]*´(ÊÙ«A¥‰YÊ´Ä¿ÝïÚáÛƒÐ/ŒÆ]è\2}B4g³µÂ°^E* t\11\8Ý\"\8Vt&¿¬8\11\29\13‹ŒÈ«G*®‚ræjÿ\5ëT/²2:p-õ¢\11©/\31´j^DCƒ\25æE0\26\25佦_¤/½ˆ¢p…\21Þ\0237c’¿¡2ÔD8\12ìE»FK\4“í‹Y[$:\26\20ÅÌa6{Ñ\127\27zB\19\21DÁ­øEºæ‹Î,rA‘aûE|Ý1ò¾ö½îê\ +ô*î/[±|¼^…e>W¯\3èý=\31ïˆ;\30«=_Èòí«¼åUÚ\ +vRUßÆ _D\16cD.ír€ê\30(ÝJ¤èäµR¬bX\6-áý[êd#ƒ\18ƒ¶\6¥’Zäå[F\14»\31$PDs)v«\2G£”~,F\0\\Jw\0065x“\28?°\0199Xr9²Œ¶ÆI\0|rJ±\5A —\0314›iØnÝ\23(Œˆ\0\12ÄDmŒ|·,U¾ÔÆsƒ\22DØ‘P\ +g\28E$\18%\23!\31í@È`;\2¢zVêùe™ \2v©2XÜ¥\3íÄ¥\26ß\27\6Ô¨z\12\19eO\11\29­K\18ÕD3ƒ\30M̵A\27åËhI³Œ\14\25Qô¢ü\\Ñû‡\8åŽ÷Ê•šö1Þ™~¥ÿ9ßç\7Ei\31\15á¥5õš2ê„ð0’\5šÒ\12Ù¥\25¤KsNH©6£ wª»¥î©È\5¼4Z\25,Ã\ +Ÿ\23³¥Ž\15\21\17,W¥N\31ò\24ù±¡Tǧ)Ñ“æþÄ}_¥Î.\14\6\29‰\ +LüJ}¢5|\1ônЉèFK\6\20¥2Ö®\17Ùi€\18“ÊÊ„­Lšk\19Ævii¯\19$\19c\17Aþ\16&)V'l/ýy ~N%FZ÷\23¶ì€\28‹´è¦¡)ÜPÜÁ_¤\12›î©°é®Q$ÆXŸ\13H»°(â¤Ýþƒ\11ÚÃse¯«\28\7\"¯AÃŽªS¡=\14ü\1I£ë^ÏòÊ\20¹Rì\5å´¥íÕY \"\ +ž]{ኽv™¯D+á÷½ú3Þ\19mô½5§\2{3—ßëh\28!ì<ß+ot‚:V.QA\4%x¯ŽVSƒn‰28aí¥\25öj\24ˈàÄþS%g-\8\6*ïíýSÿ\14øiN\12ªÌ\ +W\21mKTô =¹Ò\6¿7èdP\23#_B lVªT;(\12ª\12ªc\20§RÄÈŸ\27+(ò\14¯ô\20®É\5Žâ‘sî€(Ó*=>53(\ +{Öœ\1µD`ý*,³û\22¬Ì¾S Ds=ç©”•ñ‘ªt\"\127$ÂÒ=Q\21#y__}L+ÝÚTÅÈ\31\30Sõ´ú\19\30\0084×\ +ÀÊÙ;Ñ“f(‘¬wDÏúÞ„¾\4•Ù,S\13tý\13dÛ“H+\30ØU…µ·P/+.‘Wê±_Ä(z.Ì`*\24¶5SkŠ7 \22tÍ1¼â\1³\8(4#kžKR‰¢–i‚Ϧ\6€ND&¦f’\7ÃT¢vý­Í•šÆK\13\23¥4ùTÆÁBQà°H±«T±Ë\13*b$5y\21|§\18\9Ûñp\15EÞ\\\ +”h*;óö\29­¢@°\4Ut ®Ô¢2\23”ñ|›Jm(\9˜;ôˆ>‹Ücåö(‚úñ›‰\5Œ–Ó\6\4Dñ–s8\7zÖ¢ät”\3‚a¾ê«&RõtÚS© F9^(²+™p}\21e™h+¨¤\2[™³(+N*ˆÈ¶¥\17P%­¥•q‹®t‡\22cºÚß \0þ\24ß*nýÊô¸ƒèÐ~\6ZavXiv½bz%:\ +à\23\19x¯µ]ó\4©Ê,oV<\23­\ +î*¡âjne¬Ì(^騖\19í®L2™òXä\12L„Á£îëô½Ò)úš¨0™)´5¢Ýc•(Ô;ƒX…\13=ã+µZ\29ˆ2ƒ^ô¹\19\23\11€î\12¢H8\25Þ;qS\25Ms\15U¿Ò8-W¯+c^¨Œ\ +\\é\14âÔ \19\22:aköÃVê\16\20½ri\30]jq\"‡¿J×CD+“¹•8{UªgOˆ`W©t³óHQ7ÒJé¨ÉUêgT\19å*zÀè®s­¯Bî\29ò¶\13¢+qá\22´\15l\9[Y\5\13¾’YW\31\127¥'vÓÐUâD\17#\8(¯ÚßÈ\12²\18½‘1oÿ\29\22†±7Ñ£`g'ú-(¡N÷&ÃX¡\8¢³!šãÌ‹7ìs“t¹lõf\28cßt­*\19t\ +H’ø-Œófj\\‘$ú\127Ó\5g§Ã\0300%9Àmþ\0çÈ\3NY=À–xÀÙ\7ø¾\31`L:Àßé€E‘ƒ,~\13ð×\18÷\4÷\21îk‰ú„EÚ\3Ö*\14pü>À§ñ€sË\14X =À\23û\0{Æ\1†\127w¯äîËx\0¯\28À+\7¸‘\30±\18\6L\28Ûw4®h@\11Éé1lwŠc~7\8*(QC„á÷h¼ØŽrÆ|΀Â<ô*%\6ªL\24œ?\21E\25qúüØ ï‚ÆrÆCCt§©ŒéÞvTíþ\23\17¬äGLB¯¤òê\27\27¡7MtJù\7ôY$ôÑxå\29u»Õ/E®£¡H³+µ¾\30Íú\30\16̧D¿‰2ó\\¦…Ÿq\15ÊQO\18ψà\13t#ž~ÎþÔ“û\23\ +V L”Žb\31^꛾²¯\2á8AAaûÁQ<\12¿H\23t¨/3p\4ì\24Àý!@7šü\28ÙÃ\4æ\23у6ÂNŽD\13\17æ·D{\"Vç‚\18î¨G2Œˆró†½yû^Ÿ[Êx\17¡¥²Ò2œ\2\19£†ˆyYÒ\25…\8MóHCéQäò­ö•\4ÊÒQÏ‹Ø\26t0èhPMô FÄ£Îû~\25ÔÆH_=0\15=š\4YNL\29\11¢ïÚÖ‰\17@\9WRŽzÐÁ!F!æ\ +<²–º?\18=i.ÂL\14\0019\3xfÿQçukƒÀ.\27#\0177ܺ/(x\5\16á¹h>x43À£ì-™4ÙJ9­8êgI2¢L›!çR'ÐZy\12hcPCôlPª…Ëéþ\3TÅan’pgг \2-ñbDÚ\11­Í‚©KG3Å9êšÌÄ Ä \19ÑBs_ÒJ\0„\3õ\\©0(zngPfbæ&¬\16m€ÈµG­¶°šè»\9û.iÖô\4‡=23ÑФµª^®Âë¾Z§kh15¿ETëŠX‡\8ã\11=,cJrIrEr-)ŒiFÀ†\1\24|\16°½\8\11\14ATÖj\28{'únÂÀdµ9Í”¨AÎCv§24Ü_è±\7p\29®y&^)ªe\4\28iÎ87!úA„ñ¬6G¨Öf4\6Z›0\12\3@…¨`µŽ¿#ƒ¢Tp ZÝS§êº¯;Ýk\29±×\6åDøšC\13Õ´–s’áø‚ÉõHsú…]\7a¹¶Â\23\14..\11_4à+7)Öf[Am6\11Ôbg„\20¯ûjœªir¬Õë\127iÐ\30EX õ\22ll\12VµœGY bàÜ¥Œ. C¹–qË-…\15'\6eDl%\7pIÐ[Óë\30ßaÚ\25\31°¤[›‘·–1賌AµŽk\29\17Vꉎ\6ÕD?´–\19ªž5¢µ\0247§¢c×zŒIA43ap\28¬¹ÅµVŸªßDXC«ûjÜ®e<,µÞ\4…B®Eœ\9ÚpÁ®Öñ0!b‘74ì×á\24ößhG\12\3e¶\17™ô]K\28I¨¦§*Y£\ +\26\ +\15e(#‚ÖÑPh¨N4T'\26*\9±ÿ4=ÖuÏd‚u^\\Å¢©0ò°à\18]£cp\20“¼X\24~+®þý\6¸\0037Ü. è…Ê\4P¦í\31Ÿ@°n72bÞk‘£Ñ´Ñ\17Ó¢Ó…úÂÛ˜\11ÍgI'¢ÆŒ¦@+-mÉñš(J%Â\25­é©´Ýc\20m0 ‰[~\18þrœvÂPy\18Çè\5IÇ¥'sNÈI-K œÝÔ*r/èŽSü“ˆÆšJtlòIO@þ‚Ü,åß%$þÉìß!jcä›ò„¡ü„®u\26èñ8'i=\16eZÆ\29\5>ÑÁ ÿr\8IÍXôÝæ“z€´xu¡\1\5šá¤\14ñµ èS„á$\27\20Å|ð¤jSK\4¾8õuª)\1\29NÇ\1ÂÖÊSO‹W\13´Åª1ðÇ\9\ +ˉÌáõ©SX\7E\3â\19\3È*NzCæ\2ê€úÂ']H\30\27´\22\20yŒ\2M5ù[\ +å“z“\30ˆ”\31Ã6£AÈÖ̼áμáN\6n×{~™Ã7~é'—Þ\13úM´3\8|óK]\121òïû¥\22Y„ÍùÉ\8 †E+&¿5¬\22TÑ’@T\25ôNt#÷;ìtõyù­»À~\19ÍÍsb@\15&fbbBf\3­MÎÖæ¹µÉ\11N)Q\20‡Á\6D\20•=3iæ&,7å+M½”&×X(nÃJ·Ùê—&GDw’³–ß©'ÂÒPkÎdi%¾Už8\24tŠ‘Ïuk–°[ã<ÑŠÖžè\27n®t“\7\16\28ÿˆ¢°Ü„½˜0l=\16ä\6Š¹ 1Õ V¾:œHÀ„'£¶¢°çZ)Sjâ­YjnÎÞòã}Dðá\21ä\6cTûßâΆ\2|#³´Á¾\28£oÚxø~Û\21Ãö2+ ª\13jô¹°•¦Õ9B\29£ð¾è\11)D-Q%ò°5C\31QCt§©,¸Í°5_\3 Šž\11ï“sîQðhbÐêqú\13ØɥHãGAч.[5FçD™fr%}¦!º3he\16SYq©¬U×[ 5%ukœm[=é¾&zP†Xótj –oM¿Î°c\2i\14ôËì­ªò\19ƒ¢Œ¼ÅÉ»\ +Û\26„Âmø\17\16 ­\22`+â6W\20\28\28ZXšAr\27`«–ˆ\8a¯D\27ìÂ\17w<—­íiÖ3ÊÿVö\11IÞ’œ‘\\’|$¹\"¹!¹½P\31¸-dÖ@Õ+\25ÉœdAò…dI²RÒ+& o¤XP¸*¢™ŠÌœ~\2­YnuÇÇ\"F’|_Ç‘¶\31K³\23Ó¾¯tõmÍÁ\29@k\22÷UÛéÕˆÀWª­­î\30Ý\8Š´~ìàá˜Q\26!\27}€­…ÖßêFµœ(Œ\22ƒ˜qö\28P%À«€\18\16>ì@T2Ú\13\0030™m¡?¶½\16êÕ­…øKvfof\7¯¸N­5µ¢°dIT\ +š‹(ùE„‰~§ž6 Gº*uÆo¿ãD\31gÓ|ÒW¯¹öÔ©\28Ɉ6\6eqL7<­ˆn¤?uÆ©±ÃvËpЄôòÎôËN\14’º¿P·Ð{éXD©A\7ƒj-Y†5:„mLÌ\22ÙEK…Ì)o0vô@ï°]R\26Ð7v‡ÆîΑ{r¿Å]\26\31ÍåµkM\31š7\26\15š·CÝ\127>\\ÈŽjýäO@Â9\1‰‹q@2lx!ª£g@)\18\22Ä禺\28\17P\2$Î\0\25\3rM\30ó•… ;“\4´Ô˜wºª\22Èn |ŸÓ+X:^\6´2(Ó7ð¼\0085\6.>È\7ì|ÿò”ÎË\2bðËñ\1eš õ‰€XwðO;\8zPQå%†6t@ã\127£BQwóoôr\1ó„Ör\18ì&²)1ñÒ&\4dš‹ÄTU\"›\0C@©ÙKD4à¥+]Ó\8h¥•³2Õ±2-¸R­=øf/´­×º<\28¡\\ÐF× \3Z\26dbâÃc~kEÔCD}Xh¥nM*©º¯\00647ˆü“é)Z\1‘\0113uÝ\ +(Ó:£±PŽwˆ›;ד\1±‹åêà\20¡Žh¯eÏÕ.ì‘|\19À·š\30¸\18\16;\\!\7ˆâ¡RÝ\25\2\ +»åÝRaø/ÓÌa\16\29\25”(Â9‚Á½>U>Ûë1…\1‘#÷†#÷¦\7\1Ea¥¢Ÿ¦¢*+\4ÄfªT¿\9觖áM§™\17j\20ÁB\20!÷¾‘‘µ#ý€ZePC”›°<\14Ã\23_#T\ +šé.\ + Ì\3\29ºÓ)?\5™m4’\3Ú™fòAîkÖÈh\0µK>\23º‘î3R‡\19âÈw\1\\@ŠDRU\27\3Êb\4eÂ#ùBˆßát\21\\…üÎèwEö$+æ&Þf\26¡†¨0a¢-\4ô¦a7zbu@[M\0053õ-‘¨©ÞÐ&²\"5¨!‚øWäÆü€2\19“%\26«\13! Ê„UZ)c=3 ±O{Çu]è÷\ +“~kÇ¡™®Î\7´×œQ“\8(Ñ:»SßÀ€vZ¾;ý,@@rv§_\29\20ŸYOê\9:\14ÍUQ\9(1(Ó\6š›†]˜L.Ø‹nb\31ñ\8EÏ1•¥IeIá\ +´Ñf^š7,u\11žŸÏ_\5\7·€Xí”t7ª7?\11¢kv@ò±¹€2e«Ä”hÅAîFÍ\1\21\17\6Ü\27\29T+ƒŽ\6Ei²^Vê&\4äØêMÐZ\15t\ +ˆÏ­Ã\1 ŠÄø\28!„m(Unt >\16e\6±{oÌû6T\19€öq̨klL×x6íþl¸n«ßf\0‘\11^ Ëà\127\"š+ÃcçyÙéþ\6?\16P1¸ÑÃ}£°‰¶\17ó\13è1\22‰™iÂÌ4š›á\15\24À®H/§€*-Q&\"‘ïëFʃ™Îò|3š\26¤ßR„NDS}_NmùF-\6#ƒ*\"\22\14êÌh¯ï+8\21¸‘eÇG}ß‹\17ä¥Éui„uiž+ÿç¹Ü Â¤²×\18•ºüâw\19è)ò\1eÚsö†Ë÷¦´{u\25\9视U¦®+=¤(ð ¿e\23\16ß^Q{¹Qƒý\3Q¥ox\11ý¶ïwqy¯Õž'÷\23rÈ\8;èب7c\25GÅC, LO€R\19³ü7ú-(šXŽƒ··çs †Ýé¶û€VÿF'\"1!ùÍëz D„:\"4þ8vFñžÒê1\16P\22#\17@}9Ú^æUã`Þ\8\9nt;q@š\4]r#\20=—\27´×FØPã$BšÏê\16\28¡DÐÖ¼!¥Ô\26ËW.zBò[x^\"¨\15k„\16F\127„€Xœ‚šÌX;À=\17[u¯~¿\17Ê\5UìÒcUßOD䆊Ý}\28\31õDä;\14\16ß^éÆÃ\87Þ\\LâƒV\"t\0184Õ£ç\2\18s·C\\Ò\8¨2a•èh“xÓX@KÉÕ‚€0ó›¨GÐ.F¾?Nd˜Ÿ+\18‹œ¯ƒIìkív0sµ›Š\15›ûwBv™Æže\17z‘ÏýtàgŽ'\9HÔ\11< [ƒ \21\0¥\"\18¦\3íNS~O7Fq\18™–\"íá®ë¡\14í8\28\3É—8\2‚XžÆÇcøC™Ô‡? Ù¬\26ÐNßÀÓ’ýnprìÔL?§ÑÇuctŠQx\31W›\3Z˜4\23&•…yî\1Õ¨+Ê\17’Óè\2JMÎR¾ï\\¼>\3Äuˆ(ÎVfÂX}•l:\14\1…y¨4ey\3ç\5\20bòTè€Ø\30µnK#ŠÃ¾k©y|T@©y.ÓVå1Ñ\30¹Ãk…|×óÓˆB´wSýïj±\7RÆ\16ƒ¯¿;}èV÷Tß\ +š¨ù< 9]Í!8\29vDòÅ\\¢ð\\Ô§nÍ\12ä6>#8 Ì„ešŠ\30\25®èVcÊ¢ZH…gÝ\4”ëÛSZ@oõcB¹ nà\8h\"cÖm¼‰! ÖÒNO’r¨ ÿÝÊÒÿÈ1;àmìà\17ÐØ„µ^+u]\8\8rìV‡š_1\ +yªØæ·Ú\1C*r\16p\8x2\15É…coµgFÙÊ´R+ÓP\21Ç[íKH¥Ö…½€f&l¦o¨uõ3B·Ds­†š*\3Ñ/¢•IeeRYyß €Ö&Mq\9\8è»6óÑß0 Unª)RnãO#G(3\8íÿÞ\19^ñ}ñ\22}q\22Ÿ«\22\16\6¾™\14Ž-Ñ›ô™ZÏN‚vêßÕÝÅ'*9\4žqUð)œ„àsñIÍ<.ŸÄ¥Þß7¸gr—ØÈó'¿pty~ßµ»Þøë\127ýuì¯\127ùëG\127øëÔ_oýuæ¯wþúÉ_?ûëßþúÅ_¿úë7\127ýÇ_ïýuî¯\11\127}ð×¥¿>úkâ¯+\127]ûë“¿~÷׿>ûëÖ_S\127Ýùkæ¯?ü5÷×ÿ㯅¿¾øë«¿–þº÷ןþZùëÛõÅõ¹N.ÝõÆ_ÿë¯c\127ýË_?úëÄ_§þzë¯3\127½ó×OþúÙ_ÿö×/þúÕ_¿ùë?þzï¯s\127]ø냿.ýõÑ_\19\127]ùëÚ_Ÿüõ»¿nüõÙ_·þšúëÎ_3\127Íýµð×\23\127}õ×Ò_÷þúÓ_+\127}ó×3/JAn\2ñß@Œ\3ñW >\6b\18ˆi n\0031\11Ä] >\5âs þ\14Ä—@|\13Ä·@ü\19ˆû@Ì\3±\8ÄC –x\12D\18ˆU Öx\ +Ä÷@l\2ñ\28ˆm Ò@ì\2‘\5\"\15D\17ˆ—@¼\6¢\12Ä>\16?\3Q\5â-\16#ï9\28šƒô\127#z\28Ñ\127EôLjžDô4¢o#z\22Ñw\17ý)¢?Gôß\17ý%¢¿Fô·ˆþ'¢ï#z\30Ñ‹ˆ~ˆèeD?Ft\18Ñ«ˆ^GôSD\127èMD?Gô6¢ÓˆÞEt\22ÑyD\23\17ý\18ѯ\17]Fô>¢\127Ft\21Ño\17=’Óc\3?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?´\17?(}p—ú|éþrÔûÐ]\27w9¹Ë/wùí.­»8Íí<òŸu€ë\0117ɸ¾\24ž\21c7ø'ø×N\5¹vÊ×µSK¯Öqí\22nÏ—Ïîéÿ먿ÝêåµßÑæïÿÅ}Œû_¸\127Ä}‚û\20÷[Üg¸ßáþ\9÷ϸ‡÷|Áý+îßpÿ\7÷{Üç¸/p\127À}‰û#î\9î+Ü×®\ +\28ñ„?¾ã¾Áý\25÷-î)î;Ü3ÜsÜ\11Ü_p\127ŽÄ}ûOÜ+ÜÝVË™pL})´ß\0ðx­ßÀL¯uËuy\29>®„Åúr¨_[*ù\4È\21É5Içšµ’¾4Æ——¯Ãz| \19’þã¸ÃxC/\2vrJ\26sà̇ºažç8á\12š\5{2:xæŠëŽ\25B\127wÐõÝ(t|­Žfo²Ìüq2\127”×<ç}(¤®ë‡?`—WtV7ÕŸ/\17\25z~ñ¥ø\9\\ÇÞ\6kÙBq-èF¶;\\£6ðµ…±\28>0Ä\31—bn²ð\24ý#F5¢\9k|Âv\15ÁL†úÇ„õŸ*ÂÁˆ;\17\26.‰ÈiW´©T+6Oÿ¸ÆAú—B/õ«e\27q\28½V_{9¤¿æ\31\21\6ÁKI5‘ÇOhÊñµ:8&²AîZ]!w~ò\18ýQ\15Õ«®!Ù’¼'9\"¹$™G{\28Ñ9‡k\"V2NœÛãnž‹Ñ\21³\"µ‚\19aF×zlÉNò\0064&¹#¹•T\19ý ö’È\\:Œ?5\23Åt\31’2hiPIô\"eâûfŠü²È0>f%—Êa\6œÃ1ÿ(eš\11‹¥\23FQpÁ`9\29³0Áµ)ÄÑ\20âh2ãΩ3è\31ƒ\22\6-\13Z\27\20UEkÞWÉ\22ù(8bG8‡\1ÇÇï\23bÎq»Tö×J&Cù &Ú\19d¦¤_Ý#rË+Œæ\30Þá›ê”·L.ùš\9bNðÒ™Ÿ\0b°Ä\ +d aªrO\31¯yV\18K÷0äI\21òÊÑ%é$Ð_q”À§\ +\12ÕuÖ/Í»ûµ:\27ƒÜêwŽÎµ\31Â*ñÆ\31jŒ\23Ë[CÏñ\14Eû±[\29\15N\23bt^^B‚;\26ÿ=¸ÿn®õK¾e ‡X‘\27b\23>,ÜaÄÀ^µŒd)\25Y°jRý®¼“=Oø7\28·1Twî\6¤k’OáoîÃÚ_+é|˜|RCì_…#Á%£¹7Þ^ÇG9d×ñ§áN¢\ +ð3\20%‘\19\27×ñÇ?S\"pZƒ°a‡StLÔ0v\24\27”*ò],|B“äh¨Ç—ŒI®I.I>’LH¾’¬I6QºRw@\31\13º7(1(Ó\29¢\7\29¥Ý\127×\26#Å¢9Þ\7\31ö\19ÉŽä^²\7”‚tí\23\14Ͻ„Gn\0204‹è\7>½H¦Å\26gX‹?.Tx„×\20!^ØÇ#ÖÕ3/bõþ\18c\0176pL¯ãEý1\3–$7$ç\20»¶—\14nÈ݇o¥v\127]ɧQë+ou\19\13hHqJwí¿QçÎ%EÉJEÀÔ \7ƒ6N}•µ‡Ñ%Êèeœã %èšt`á\8æ\22–\ +‡²\17Ì'»A7\30CÏqE‘õ[/$+œ\127JòZÏÂvˆBãÄ\18v$\31@^Bÿ\12ô—ˆN†üÒ-ÉŠdM²³ÿBõ¸'¹W20KÉ'A6$GŒî\22 ‡ºï¦\27Æ_æ˜\14c·å‚a+ÙãÏ?εqc\"W\12;\13c?­Ì Ê ;>\4Y\27†i\21ÉDÞ?Òó'Wõ–aG\25°\0253BPwFF\7\25\25\29„Ï͈^¤ãTË\3\18vkÁ}w2;\12JSG\21eŽYŽ«\15LÚæ¢ÙÑ\7F¢\28øÑßÑ.é¿efvO\6,H¾ì,‡.ø™éaô5 \24½\13ã\15\18dÃøó‘'É2þXÈq1æ¿q\24ùÃ\31©\14ìøc«3?ü1\26ê$rArC2%¹#ùB²ŠÎµâ¿'’mDJM\1ɧâVü#‘ó¨<\27a+*IØŸ]ýßùÎ{ʼnN4ïáXRs,©¡+ð\28‰\5QJr\23ÈKq|\27Æ_-_\17år´Ð0Þø±\7ò\"nä—\29„øry¡ûEü\24°×¾ÓˆÜä^\18\"?DìDG@Ç\17á'ɱ’~›\7Ñ4Wª÷e×JîI.\2)¹í.¸\13¾¼\6)\29ÃkÍø\\Tq\13R|é\14ˆý\21½ð\17A\27´Tv\29F&?\30è8åô„¯\20ÝÍP¿´íT•\21îk*\13\0076ô\13íÈ™9\16gÁ€¥6RБ2è¨Ü¤îa©#}4›a’D^ÁaX%zP\13m\1jZM½{Ç/Î\13ÕÀÓ\12ã]šK Ks¬QséWÍä¯kÆXÉÖV™Ùÿ\127\"x—?Q*þçÙ5ß{#\7ƒ‡¯\14Ö×\26²‘LëcrÌ–oÊ9¥i£ÚG\24ŠÃaÿ\29¡YM¥Ði1Q˜_\22<)ÖÄ|\9hè'-\7Ì<\2¯e—PÑ&\23WzüS-Æ° éÎhS\0Yê¿aTœ!ɸ?Sg~&§>P\8\19¨7ô¸ÏÔ\22kt¬\6,ÖH7}ÔÏ\4òö\127¢t\23aÉ_\20¤PˆœU³'ä´x€œ\7òR-\21y˜û}CÝ~´Ð°nÊhPýBÏY²Æ–Œ\16Œ†™ÎõZ\25… Eý`ÿÝ\\¨n¿\9Œ\26BàÑ\23´Ì\7Z§JypCÒ•ü\6fƒÒ/Ÿê8tÀ¬õà™a\23Íüvh•Q\24HöP9'¢žz\0057hðoFMÒÿ½‘êN0Cž\6…÷:ÐCùª.ôu˜Â‰µsãÓ@ÜKÄP+>?K­•\17‡'\31²S\25%*z*º´~±%ÒÃWa†2‹ˆ‘{§\28°\30Ž\2\29ŠM5äa\21*ç\11Ä¢ûó'jnÿn¯õ4ç’$x°ƒ\29 \11\13Ð]²ï5î}\31ñ¾f¨\27bO$[’’A\9ž\"'/!›©Ÿ1ÁÝí\18–d¡q¢Á%˜$\11“šÑ%\7·\17êÃÿç*ý;\13\19RÅ8–¢\22Ž\11å\4Y^)™\13ñI_qR?@_ڲˎP8l™\15\31@F>ë`NòÛ6‡8AO$¯ÏÝk°\24áó@Q)Röõ4Ì{\2]DôIi©BÏ;‰š\0\22\23C¸¬\9[kû—’…ÀÍ%ld;ïÖ ¢V*kä\29&D_ðÍŽS1k\23Ú\2'n˜^‰:ŠìÝ—œÙŽÂl-\9í7\13M;õ¯’9³\16·(\16T;Ì\25WD\16BZ\21Y¨Š»ÀDóú\8ù¸“æ¾\19™\6ô†ò¯0÷‡yÒÏÜ\23ÂF¹X\20Ó¡\30H¼ƒýù\7¤ËäBÇ5?å\24ɲ‹\6úo(Ió\"¶«›oh¥½÷bÑ‚&¢5%\18i\28ɱoTñM0Hœm¨îï°äg¾>UÐxºŠh—ó7—ó7—³Ï嬹œ5—³Ïåìs9û\\ΚËYs9û\\Î>—³Ï嬹œ5—³Ïåìs9û\\ΚËYs9û\\Î>—³Ï嬹œ5—³Ïåìs9û\\ΚËYs9û\\Î>—³Ï嬹œ5—³Ïåìs9û\\ΚËYs9û\\Î>—³Ï嬹œ5—³Ïåìs9û\\ΚËYs9û\\Î>—³Ï嬹œ5—³Ïåìs9û\\ΚËYs9û\\Î>—³Ï嬹œ5—³Ïåìs9û\\ΚËYs9û\\Î>—ó7—³Ïåìs9\127s9\127s9û\\ΚËYs9û\\Î>—ó7—³Ïåìs9\127s9\127s9\127s9û\\Î>—ó7—ó7—ó7—³Ïåìs9\127s9\127i.žæ¢4\23¥¹xš‹§¹|i.žæâi._šË—æâi.JsQš‹§¹xš‹§¹(ÍEi.žæâi.žæ¢4\23¥¹xš‹§¹xš‹Ò\\”æâi.žæâi.JsQš‹§¹xš‹§¹(ÍEi.žæâi.žæ¢4\23¥¹xš‹§¹xš‹Ò\\”æâi.žæâi.JsQš‹§¹xš‹§¹(ÍEi.žæâi.žæ¢4\23¥¹xš‹§¹xš‹Ò\\”æâi.žæâi.JsQš‹§¹xš‹§¹(ÍEi.žæâi._š‹§¹xšË—æò¥¹xš‹Ò\\”æâi.žæò¥¹xš‹§¹|i._šË—æâi.žæò¥¹|i._š‹§¹xšË—æâiþñçÏ\31\\N.‰Ën—_\127ÿÐõÔ5éÊzÅrÅjÅbÅZÍZÍZÍZÍZÃZÃZÃZÃZËZËZËZËZÇZÇZÇZÇZÏZÏZÏZÏÚÀÚÀÚÀÚÀÚÈÚÈÚÈÚÈÚÄÚÄÚÄÚÄÚÌÚÌÚÌÚÌÚÂÚÂÚÂÚÂÚÊÚÊÚÊÚÊÚÆÚÆÚÆÚÆÚÁÚÁÚÁÚ¡ÿ×£ÿ×£ÿ×£ÿ×ÃúÅòÅêÅâÅÚ?\127ýÐõÔ5éªõWë¯Ö_­¿Z¿µ~kýÖú½ÿqâÛ‰o'¾øvÊ·S¾òí”o'¾øvâÛ‰o'¾øvâÛ‰o'¾øvâÛ‰o'¾øvâÛ‰o'¾øvâÛ‰o'¾øvâÛ‰o'¾øvâÛ‰o'¾øvâÛ‰o'¾øvâÛ‰o'¾øvâÛ‰o'¾øvâÛ‰o'¾øvâÛ‰o'¾øvâÛ‰o'¾øvâÛ‰o§|;åÛ)ßNùvâÛ‰o'¾øvÊ·S¾òí”o§|;åÛ)ßNùvÊ·S¾òí”o\9ß\18¾%|Kø–ä[’oI¾%ù–ð-á[·„o\9ß\18¾%|Kø–ð-á[·„o\9ß\18¾%|Kø–ð-á[·„o\9ß\18¾%|Kø–ð-á[·„o\9ß\18¾%|Kø–ð-á[·„o\9ß\18¾%|Kø–ð-á[·„o\9ß\18¾%|Kø–ð-á[·„o\9ß\18¾%|Kø–ä[’oI¾%ù–ð-á[·„oI¾%ù–ä[’oI¾%ù–ä[’oI¾%ù–ä[’o;¾íø¶ãÛŽo»|ÛåÛ.ßvù¶ãÛŽo;¾íø¶ãÛŽo;¾íø¶ãÛŽo;¾íø¶ãÛŽo;¾íø¶ãÛŽo;¾íø¶ãÛŽo;¾íø¶ãÛŽo;¾íø¶ãÛŽo;¾íø¶ãÛŽo;¾íø¶ãÛŽo;¾íø¶ãÛŽo;¾íø¶ãÛŽo;¾íø¶ãÛŽo;¾íø¶ãÛŽo;¾íø¶Ë·]¾íòm—o;¾íø¶ãÛŽo»|ÛåÛ.ßvù¶Ë·]¾íòm—o»|ÛåÛ.ßvùöƒóÙ\15Žj§Žm'ŸO>'ªDµSíßoà\31\\\18ïY~pIzwè×\31ú½â“1ê\26ô„U[–N-%ÕIõ®zÿ÷'ô\15]“^ÓüÐ5ù{ÇïöÃ\127íhÌ\"Q¡PI ¢\127EûŠî\21Í+zûÏo:W4®è[©ßwUSîšž5=kõ¬éYÓ³¦gMÏšžþ³ž5=kzÖêö]ÕS£¿¡gCÏF=\27z6ôlèÙг¡§ÿܧgCφžº}WõÔÑ¡¥gKÏV=[z¶ôléÙÒ³¥§¿& gKÏ–ž­º}WõÔÑ££gGÏN=;zvôìèÙѳ£§¿^ gGÏŽžº}WõÔÑ¥§gOÏ^={zöôìéÙÓ³§§¿– gOÏžž½º}WõÔÑg ç@ÏA=\7z\14ô\28è9Ðs §¿Î ç@Ïžƒº}WõÔÑi¤çHÏQ=GzŽô\28é9Òs¤§¿\6¡çHÏ‘ž£º}WõÔÑk¢çDÏI='zNôœè9Ñs¢§¿>¡çDωž“º}WõÔÑm¦çLÏY=gzÎôœé9Ós¦§¿v¡çLÏ™ž³º}WõÔÑo¡çBÏE=\23z.ô\\è¹Ðs¡§¿®¡çBÏ…ž‹º}WõÔÑq¥çJÏU=Wz®ô\\é¹Òs¥§¿æ¡çJÏ•ž«º}WõÔÑs£çFÏM=7znôÜè¹Ñs£§¿\30¢çFÏž›º}WõÔÑõ çAÏC=\15z\30ô<èyÐ󠧿V¢çAσž‡º}Wõ<ü”«gߣgßãϾGϾGϾGϾGϾGϾﭔž}ž}ž}?ô~ßüÙ§“ô…Ä…Â%‹þ\23í/º_4¿èío´è|Ñø¢ï¥~ßUM/?qG]ƒNnjËÒ©¥¤:©ÞUïÿ¾\16û¡kÒK×\31º&\127áÿÝ~øk\6?½Kã•Æë\26¯4^i¼Òx¥ñJã{§&W\26¯4^oþûæ\26þ\11á–Æ-Û5niÜÒ¸¥qKã–Æ÷ZN\26·4niÜÞü÷Í5ìöóÏŸ\127üïÏÿü´â×ßTvýùgEUYQSÔV4\20\21-EkEGÑYÑSôV\12\20ƒ\21#ÅhÅD1Y1SÌV,\20‹\21+ÅjÅF±YqP\28|‘G_ä±ò¢º¬øç/*»Zùª|)o•÷\31¿þ†ã×ß\6b%$VÛÍ>Tª+ÊZeMÙ¨l([•-e§²£ìUö”ƒÊrT9RN*'ÊYåL¹¨\\(W•+å¦r£þ|ü!ùøCòñ‡äã\15ÉÇ\31’?$\31ÿ\"—\127Ë¿Æ¥oqù—¸ô\29.}…KßàÒ\23¸¤\127Iþ’ú%ñKÚ—¤/)_\18¾¤{IörUŸPÓw×ÛÕåßeýÚ\28~ß´Úømò\27òºiKÿ{QïÓVÿ4ø–÷ð-´eõ¿¾“gùðçª÷4þ[Œ\0317Ó\127x£iOp~¯Ùžãÿ\1ËÕé\19", + ["length"]=13245, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=2372, diff --git a/tex/context/patterns/lang-tr.lua b/tex/context/patterns/lang-tr.lua index f6052525e..97aca0e3e 100644 --- a/tex/context/patterns/lang-tr.lua +++ b/tex/context/patterns/lang-tr.lua @@ -48,7 +48,17 @@ return { }, ["patterns"]={ ["characters"]="abcdefghijklmnoprstuvyzâçîöûüğış", - ["data"]="2a1 2â1 2e1 2ı1 2i1 2î1 2o1 2ö1 2u1 2ü1 2û1 1b1 1c1 1ç1 1d1 1f1 1g1 1ÄŸ1 1h1 1j1 1k1 1l1 1m1 1n1 1p1 1r1 1s1 1ÅŸ1 1t1 1v1 1y1 1z1 2e2cek. 2bb 2bc 2bç 2bd 2bf 2bg 2bÄŸ 2bh 2bj 2bk 2bl 2bm 2bn 2bp 2br 2bs 2bÅŸ 2bt 2bv 2by 2bz 2cb 2cc 2cç 2cd 2cf 2cg 2cÄŸ 2ch 2cj 2ck 2cl 2cm 2cn 2cp 2cr 2cs 2cÅŸ 2ct 2cv 2cy 2cz 2çb 2çc 2çç 2çd 2çf 2çg 2çğ 2çh 2çj 2çk 2çl 2çm 2çn 2çp 2çr 2çs 2çş 2çt 2çv 2çy 2çz 2db 2dc 2dç 2dd 2df 2dg 2dÄŸ 2dh 2dj 2dk 2dl 2dm 2dn 2dp 2dr 2ds 2dÅŸ 2dt 2dv 2dy 2dz 2fb 2fc 2fç 2fd 2ff 2fg 2fÄŸ 2fh 2fj 2fk 2fl 2fm 2fn 2fp 2fr 2fs 2fÅŸ 2ft 2fv 2fy 2fz 2gb 2gc 2gç 2gd 2gf 2gg 2gÄŸ 2gh 2gj 2gk 2gl 2gm 2gn 2gp 2gr 2gs 2gÅŸ 2gt 2gv 2gy 2gz 2ÄŸb 2ÄŸc 2ğç 2ÄŸd 2ÄŸf 2ÄŸg 2ÄŸÄŸ 2ÄŸh 2ÄŸj 2ÄŸk 2ÄŸl 2ÄŸm 2ÄŸn 2ÄŸp 2ÄŸr 2ÄŸs 2ÄŸÅŸ 2ÄŸt 2ÄŸv 2ÄŸy 2ÄŸz 2hb 2hc 2hç 2hd 2hf 2hg 2hÄŸ 2hh 2hj 2hk 2hl 2hm 2hn 2hp 2hr 2hs 2hÅŸ 2ht 2hv 2hy 2hz 2jb 2jc 2jç 2jd 2jf 2jg 2jÄŸ 2jh 2jj 2jk 2jl 2jm 2jn 2jp 2jr 2js 2jÅŸ 2jt 2jv 2jy 2jz 2kb 2kc 2kç 2kd 2kf 2kg 2kÄŸ 2kh 2kj 2kk 2kl 2km 2kn 2kp 2kr 2ks 2kÅŸ 2kt 2kv 2ky 2kz 2lb 2lc 2lç 2ld 2lf 2lg 2lÄŸ 2lh 2lj 2lk 2ll 2lm 2ln 2lp 2lr 2ls 2lÅŸ 2lt 2lv 2ly 2lz 2mb 2mc 2mç 2md 2mf 2mg 2mÄŸ 2mh 2mj 2mk 2ml 2mm 2mn 2mp 2mr 2ms 2mÅŸ 2mt 2mv 2my 2mz 2nb 2nc 2nç 2nd 2nf 2ng 2nÄŸ 2nh 2nj 2nk 2nl 2nm 2nn 2np 2nr 2ns 2nÅŸ 2nt 2nv 2ny 2nz 2pb 2pc 2pç 2pd 2pf 2pg 2pÄŸ 2ph 2pj 2pk 2pl 2pm 2pn 2pp 2pr 2ps 2pÅŸ 2pt 2pv 2py 2pz 2rb 2rc 2rç 2rd 2rf 2rg 2rÄŸ 2rh 2rj 2rk 2rl 2rm 2rn 2rp 2rr 2rs 2rÅŸ 2rt 2rv 2ry 2rz 2sb 2sc 2sç 2sd 2sf 2sg 2sÄŸ 2sh 2sj 2sk 2sl 2sm 2sn 2sp 2sr 2ss 2sÅŸ 2st 2sv 2sy 2sz 2ÅŸb 2ÅŸc 2şç 2ÅŸd 2ÅŸf 2ÅŸg 2ÅŸÄŸ 2ÅŸh 2ÅŸj 2ÅŸk 2ÅŸl 2ÅŸm 2ÅŸn 2ÅŸp 2ÅŸr 2ÅŸs 2ÅŸÅŸ 2ÅŸt 2ÅŸv 2ÅŸy 2ÅŸz 2tb 2tc 2tç 2td 2tf 2tg 2tÄŸ 2th 2tj 2tk 2tl 2tm 2tn 2tp 2tr 2ts 2tÅŸ 2tt 2tv 2ty 2tz 2vb 2vc 2vç 2vd 2vf 2vg 2vÄŸ 2vh 2vj 2vk 2vl 2vm 2vn 2vp 2vr 2vs 2vÅŸ 2vt 2vv 2vy 2vz 2yb 2yc 2yç 2yd 2yf 2yg 2yÄŸ 2yh 2yj 2yk 2yl 2ym 2yn 2yp 2yr 2ys 2yÅŸ 2yt 2yv 2yy 2yz 2zb 2zc 2zç 2zd 2zf 2zg 2zÄŸ 2zh 2zj 2zk 2zl 2zm 2zn 2zp 2zr 2zs 2zÅŸ 2zt 2zv 2zy 2zz a3a2 a3â2 a3e2 a3ı2 a3i2 a3î2 a3o2 a3ö2 a3u2 a3ü2 a3û2 â3a2 â3â2 â3e2 â3ı2 â3i2 â3î2 â3o2 â3ö2 â3u2 â3ü2 â3û2 e3a2 e3â2 e3e2 e3ı2 e3i2 e3î2 e3o2 e3ö2 e3u2 e3ü2 e3û2 ı3a2 ı3â2 ı3e2 ı3ı2 ı3i2 ı3î2 ı3o2 ı3ö2 ı3u2 ı3ü2 ı3û2 i3a2 i3â2 i3e2 i3ı2 i3i2 i3î2 i3o2 i3ö2 i3u2 i3ü2 i3û2 î3a2 î3â2 î3e2 î3ı2 î3i2 î3î2 î3o2 î3ö2 î3u2 î3ü2 î3û2 o3a2 o3â2 o3e2 o3ı2 o3i2 o3î2 o3o2 o3ö2 o3u2 o3ü2 o3û2 ö3a2 ö3â2 ö3e2 ö3ı2 ö3i2 ö3î2 ö3o2 ö3ö2 ö3u2 ö3ü2 ö3û2 u3a2 u3â2 u3e2 u3ı2 u3i2 u3î2 u3o2 u3ö2 u3u2 u3ü2 u3û2 ü3a2 ü3â2 ü3e2 ü3ı2 ü3i2 ü3î2 ü3o2 ü3ö2 ü3u2 ü3ü2 ü3û2 û3a2 û3â2 û3e2 û3ı2 û3i2 û3î2 û3o2 û3ö2 û3u2 û3ü2 û3û2 tu4r4k m1t4rak", + ["compression"]="zlib", + ["data"]="xÚ%ÓKnã:\16…á­x\5\23ˆoo(¡ÞÔ\11\20%@ÚL€žd& \19M¤™­}õ\127Šƒ:´\19ó«\2z~~<ž¯¿DN½\127‰Z\127Ù‰A\31\14bÖ‡Kq~<>¾(G½~ˆŒ*¨’z\127\19\21ÕPžj©Žê©‘\ +ÔDÝúa¤\22j¥6õ\127ºÜÿ÷x~}QŽzý\16\25UP%õþ&*ª¡<ÕR\29ÕS#\21¨‰ºõÃH-ÔJm§ƒu°N¬ƒu°\14Ö‰u°\14ÖÁ:X\7ë`\29¬ƒu°N¬ƒu°\14ÖÁ¾~¾\20N!ùõ“)\ +E©þú©\20Â+ZE§è\21£\"(&ÅmW¢bQ¬\ +ze´Ê蔩QFŸŒ6\25]25Éè‘Ñ\"£CFƒ\12?ƒÏÐ3ð\12;\19!gÀ\25n\6[À\22°…Ø\2¶€-`\11±\5l\1[À\22°\5l\1[À\22°\5l!¶€-`\11Ø\2¶„-aK±%l\9[–bKØ\18¶„-aKØ\18¶„-aKØRl\9[–°%ìûûKá\20’ßß™¢P”\ +éïïJÑ(¼¢UtŠ^1*‚bRÜv%*\22Ū WE«ŠN•\26Uô©hSÑ¥R“Š\30\21-*:T4¨ð+ø\ +½\2¯°+Ñ\21r\5\\áV°\13l\3Ûˆm`\27Ø\6¶\17ÛÀ6°\13l\3ÛÀ6°\13l\3ÛÀ6b\27Ø\6¶m`=¬‡õb=¬‡õ°^¬‡õ°\30ÖÃzX\15ëa=¬‡õb=¬‡õ°\30¶…ma[±-l\11Û¶b[Ø\22¶…ma[Ø\22¶…ma[ØVl\11Û¶°-l\7ÛÁvb;Ø\14¶ƒíÄv°\29l\7ÛÁv°\29l\7ÛÁv°Ø\14¶ƒí`;Ø\30¶‡íÅö°=l\15Û‹ía{Ø\30¶‡ía{Ø\30¶‡ía{±=l\15ÛÃö°#ì\8;Š\29aGØ\17v\20;ÂŽ°#ì\8;ÂŽ°#ì\8;ÂŽbGØ\17v„\29a\3l€\13b\3l€\13°Al€\13°\0016À\6Ø\0\27`\3l€\13b\3l€\13°\1v‚`'±\19ì\4;ÁNb'Ø\9v‚`'Ø\9v‚`'ØIì\4;ÁN°\19ì­Õ»µz·­Þ­Õ»µz·Vï¶Õ»µz·VïÖêÝZ½[«wkõn­Þ­Õ»µz·­Þ­Õ»µz·VïÖêEZE:E5Šô‰´‰t‰j\18é\17i\17é\16i\16ñ#|Dà\17;ŠŽÈ\0178âFØ\5v]Ä.°\11ì\2»ˆ]`\23Ø\5v]`\23Ø\5v]`\23±\11ì\2»À.°+ì\ +»Š]aWØ\21v\21»Â®°+ì\ +»Â®°+ì\ +»Â®bWØ\21v…]a7Ø\13v\19»Án°\27ì&vƒÝ`7Ø\13vƒÝ`7Ø\13vƒÝÄn°\27ì\6»mÏÿ?ŸÄë¯2W¼\127•µýuW\14öñPÎöñ²<Ÿ×_Ý&u#·\20ÀQ§ÿìv\12éËaÇœ¾\\éÀÉÅä¦äBr3r\17¹\9¹€Üî纞ÛíÜ.¿\127u›ÔuŽÜR\0Gþ³Û1¤/‡\29súr¥\3§\22S›R\11©Í¨EÔ&Ô\2j»_ëzm·ëô\18»½Äž^b·—ØÓKìö\18{z‰Ý^bO/±ÛKìé%vs\0061ƒ)ƒÁŒAÄ` `°ûƒ®\15v{HC\0286Ä‘†8lˆ#\13qØ\16G\26â°!Ž4ÄaC\28iˆÃœYÌlÊ,d6c\0221›0\11˜íþ¬ë³ÝžÓ\16—\13q¥!.\27âJC\\6Ä•†¸lˆ+\13qÙ\16W\26âJÎiΙœÓœ39§9grNsÎäœæœÉ9͉óŸðÇ?ºø'|ú\127Ò¿Úß", + ["length"]=2786, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=597, diff --git a/tex/context/patterns/lang-uk.lua b/tex/context/patterns/lang-uk.lua index 058cf73e1..3fba7a15d 100644 --- a/tex/context/patterns/lang-uk.lua +++ b/tex/context/patterns/lang-uk.lua @@ -82,7 +82,71 @@ return { }, ["patterns"]={ ["characters"]="'-абвгдежзийклмнопрÑтуфхцчшщьюÑєіїґ’", - ["data"]="2а1 а3а а3е а3Ñ– а3о а3у а3ÑŽ а3Ñ Ð°3Ñ” а3Ñ— 2е1 е3а е3е е3Ñ– е3о е3у е3ÑŽ е3Ñ Ðµ3Ñ” е3Ñ— 2и1 и3а и3е и3Ñ– и3о и3у и3ÑŽ и3Ñ Ð¸3Ñ” и3Ñ— 2Ñ–1 Ñ–3а Ñ–3е Ñ–3и Ñ–3о Ñ–3у Ñ–3ÑŽ Ñ–3Ñ Ñ–3Ñ” Ñ–3Ñ— 2о1 о3а о3е о3Ñ– о3о о3у о3ÑŽ о3Ñ Ð¾3Ñ” о3Ñ— 2у1 у3а у3е у3Ñ– у3о у3у у3ÑŽ у3Ñ Ñƒ3Ñ” у3Ñ— 2ÑŽ1 ÑŽ3а ÑŽ3е ÑŽ3Ñ– ÑŽ3о ÑŽ3у ÑŽ3ÑŽ ÑŽ3Ñ ÑŽ3Ñ” ÑŽ3Ñ— 2Ñ1 Ñ3а Ñ3е Ñ3о Ñ3у Ñ3ÑŽ Ñ3Ñ Ñ3Ñ” Ñ3Ñ— 2Ñ”1 Ñ”3у Ñ”3ÑŽ Ñ”3Ñ” Ñ”3Ñ— 2Ñ—1 Ñ—3е Ñ—3о Ñ—3ÑŽ 2б1к 2б1п 2б1Ñ 2б1Ñ‚ 2б1Ñ„ 2б1Ñ… 2б1ц 2б1ч 2б1ш 2б1щ 2в1б 2в1г 2в1д 2в1ж 2в1з 2в1к 2в1л 2в1м 2в1н 2в1п 2в1Ñ€ 2в1Ñ 2в1Ñ‚ 2в1Ñ„ 2в1Ñ… 2в1ц 2в1ч 2в1ш 2в1щ 2в1й 2в'3 2в’3 2г1к 2г1п 2г1Ñ 2г1Ñ‚ 2г1Ñ„ 2г1ц 2г1ч 2г1ш 2д1к 2д1п 2д1Ñ 2д1Ñ‚ 2д1Ñ„ 2д1Ñ… 2д1ц 2д1ч 2д1ш 2д1щ 2ж1к 2ж1п 2ж1Ñ 2ж1Ñ‚ 2ж1Ñ„ 2ж1Ñ… 2ж1ц 2ж1ч 2ж1ш 2з1к 2з1п 2з1Ñ 2з1Ñ‚ 2з1Ñ„ 2з1Ñ… 2з1ц 2з1ч 2з1ш 2з1щ 2к1б 2к1г 2к1д 2к1з 2л1б 2л1в 2л1г 2л1Ò‘ 2л1д 2л1ж 2л1з 2л1к 2л1м 2л1н 2л1п 2л1Ñ€ 2л1Ñ 2л1Ñ‚ 2л1Ñ„ 2л1Ñ… 2л1ц 2л1ч 2м1б 2м1в 2м1г 2м1д 2м1ж 2м1з 2м1к 2м1л 2м1н 2м1п 2м1Ñ€ 2м1Ñ 2м1Ñ‚ 2м1Ñ„ 2м1Ñ… 2м1ц 2м1ч 2м1ш 2м1щ 2м'3 2м’3 2н1б 2н1в 2н1г 2н1д 2н1ж 2н1з 2н1к 2н1л 2н1м 2н1п 2н1Ñ€ 2н1Ñ 2н1Ñ‚ 2н1Ñ„ 2н1Ñ… 2н1ц 2н1ч 2н1ш 2н1щ 2н'3 2н’3 2п1б 2п1д 2п1з 2Ñ€1б 2Ñ€1в 2Ñ€1г 2Ñ€1Ò‘ 2Ñ€1д 2Ñ€1ж 2Ñ€1з 2Ñ€1к 2Ñ€1л 2Ñ€1м 2Ñ€1н 2Ñ€1п 2Ñ€1Ñ 2Ñ€1Ñ‚ 2Ñ€1Ñ„ 2Ñ€1Ñ… 2Ñ€1ц 2Ñ€1ч 2Ñ€1ш 2Ñ€1щ 2Ñ€1й 2Ñ€'3 2р’3 2Ñ1б 2Ñ1г 2Ñ1д 2Ñ‚1б 2Ñ‚1г 2Ñ‚1д 2Ñ‚1ж 2Ñ‚1з 2Ñ„1б 2Ñ„1г 2Ñ„1з 2Ñ…1г 2Ñ…1д 2ц1б 2ц1г 2ц1д 2ц1з 2ч1б 2ч1д 2ч1ж 2ш1б 2ш1г 2й1б 2й1в 2й1г 2й1д 2й1ж 2й1з 2й1к 2й1л 2й1м 2й1н 2й1п 2й1Ñ€ 2й1Ñ 2й1Ñ‚ 2й1Ñ„ 2й1Ñ… 2й1ц 2й1ч 2й1ш 2й1щ 2дь1к 2дь1Ñ 2дь1Ñ‚ 2дь1ц 2зь1к 2зь1Ñ 2зь1Ñ‚ 2ль1б 2ль1в 2ль1г 2ль1д 2ль1ж 2ль1з 2ль1к 2ль1м 2ль1н 2ль1п 2ль1Ñ€ 2ль1Ñ 2ль1Ñ‚ 2ль1Ñ„ 2ль1Ñ… 2ль1ц 2ль1ч 2ль1ш 2ль1щ 2ль1й 2нь1б 2нь1г 2нь1з 2нь1к 2нь1л 2нь1м 2нь1Ñ 2нь1Ñ‚ 2нь1Ñ… 2нь1ц 2нь1ч 2нь1ш 2нь1й 2рь1к 2рь1ц 2ÑÑŒ1б 2ÑÑŒ1д 2Ñ‚ÑŒ1б 2к1Ñп 2к1ÑÑ‚ 2к1Ñьк 2п1Ñп 2п1ÑÑ‚ 2п1Ñьк 2Ñ1пк 2Ñ1пп 2Ñ1Ð¿Ñ 2Ñ1пт 2Ñ1пх 2Ñ1пч 2Ñ1Ñьк 2Ñ1тк 2Ñ1тп 2Ñ1Ñ‚Ñ 2Ñ1Ñ‚ÑÑŒ 2Ñ1Ñ‚Ñ‚ 2Ñ1Ñ‚Ñ„ 2Ñ1тц 2Ñ1шт 2ÑÑŒ1кк 2ÑÑŒ1ÐºÑ 2ÑÑŒ1кт 2Ñ‚1Ñк 2Ñ‚1Ñп 2Ñ‚1ÑÑ‚ 2Ñ‚1Ñьк 2Ñ‚1шк 2Ñ„1Ñп 2Ñ„1ÑÑ‚ 2Ñ„1Ñьк 2Ñ„1шт 2Ñ…1ÑÑ‚ 2Ñ…1Ñьк 2ц1ÑÑ‚ 2ц1шк 2ш1тк 2ш1Ñ‚ÑÑŒ 2б1б 2в1в 2г1г 2Ò‘1Ò‘ 2д1д 2ж1ж 2з1з 2к1к 2л1л 2м1м 2н1н 2п1п 2Ñ€1Ñ€ 2Ñ1Ñ 2Ñ‚1Ñ‚ 2Ñ„1Ñ„ 2Ñ…1Ñ… 2ц1ц 2ч1ч 2ш1ш 2щ1щ 2й1й 3Ð½Ð½Ñ 3Ñ‚Ñ‚Ñ 3Ñ‚Ñ‚ÑŽ 3Ð»Ð»Ñ 3ллє 3ллю 3Ð´Ð´Ñ Ð´4ж д4з а2й е2й и2й Ñ–2й о2й у2й ÑŽ2й Ñ2й Ñ”2й Ñ—2й 3й6о ÑŒ6о 6' 6’ 6ÑŒ .б'8 .б’8 .в'8 .в’8 .д'8 .д’8 .з'8 .з’8 .м'8 .м’8 .н'8 .н’8 .п'8 .п’8 .Ñ€'8 .р’8 .Ñ‚'8 .т’8 .Ñ„'8 .ф’8 .ш'8 .ш’8 .бд6 .бр6 .вб6 .вб6'6 .вб6’6 .вв6'6 .вв6’6 .вг6 .вд6 .вж6 .вз6 .вз6д6 .вк6 .вл6 .вм6 .вм6'6 .вм6’6 .вп6 .вп6'6 .вп6’6 .вп6Ñ…6 .вÑ6 .вÑ6Ñ‚6 .вт6 .вш6к6 .дж6 .дз6 .дл6 .дÑ6 .зб6 .зв6'6 .зв6’6 .зг6 .зд6 .зд6з6 .зл6 .зм6'6 .зм6’6 .зÑ6 .зÑ6к6 .зÑ6Ñ‚6 .зч6 .зш6 .зш6к6 .зґ6 .йш6 .кл6 .кп6 .кÑ6 .кх6 .кш6 .лк6Ñ6 .лÑ6 .ль6 .мÑ6 .мф6 .нб6 .пр6 .пÑ6 .пх6 .рт6 .Ñк6 .Ñк6л6 .Ñл6 .Ñп6 .Ñп6'6 .Ñп6’6 .Ñп6л6 .Ñп6Ñ…6 .ÑÑ‚6 .ÑÑ…6 .ÑÑ…6л6 .тк6 .Ñ‚Ñ€6 .Ñ‚Ñ…6 .Ñ‚ÑŒ6 .фл6 .хл6 .ць6 .чх6 .шк6 .шл6 .шп6 .шт6 6бв. 6бз. 6бй. 6бл. 6б6ль. 6бн. 6бр. 6бÑ. 6б6ÑÑ‚. 6б6Ñ6тв. 6б6Ñ6Ñ‚Ñ€. 6б6Ñ6ьк. 6б6ць. 6вб. 6вв. 6вд. 6в6др. 6в6дь. 6вж. 6вз. 6в6зь. 6вй. 6вк. 6вл. 6в6ль. 6вм. 6вн. 6вп. 6вр. 6вÑ. 6в6Ñ6тв. 6в6Ñ6Ñ‚ÑŒ. 6в6ÑÑŒ. 6в6Ñ6ьк. 6вт. 6вх. 6в6ць. 6вч. 6вш. 6вщ. 6гв. 6гг. 6гд. 6гл. 6г6ль. 6гм. 6гн. 6гр. 6гÑ. 6г6Ñ6тв. 6г6ÑÑŒ. 6гт. 6дж. 6дз. 6д6зь. 6дл. 6дм. 6дн. 6др. 6д6Ñ6тв. 6д6Ñ6ьк. 6дт. 6дь. 6д6ÑŒ6ÑÑŒ. 6жб. 6жв. 6ж6дь. 6ж6ÑÑŒ. 6зв. 6зг. 6зд. 6з6дв. 6з6дн. 6з6дь. 6зк. 6зл. 6зм. 6зн. 6з6нь. 6зр. 6з6ÑÑŒ. 6зь. 6з6ьб. 6з6ьк. 6йб. 6йв. 6йг. 6йд. 6йз. 6йк. 6й6кл. 6йл. 6й6ль. 6йм. 6й6мÑ. 6йн. 6й6нÑ. 6йп. 6йр. 6йÑ. 6й6ÑÑ‚. 6й6Ñ6тв. 6й6Ñ6Ñ‚Ñ€. 6й6ÑÑŒ. 6й6Ñ6ьк. 6йт. 6й6Ñ‚Ñ. 6йф. 6йх. 6йц. 6йч. 6йш. 6кв. 6кк. 6кл. 6к6ль. 6кр. 6кÑ. 6к6ÑÑ‚. 6к6ÑÑŒ. 6кт. 6к6Ñ‚Ñ€. 6кх. 6кш. 6лб. 6лг. 6лд. 6лк. 6лл. 6л6ль. 6лм. 6л6мÑ. 6лн. 6лп. 6лÑ. 6лт. 6л6хв. 6ль. 6л6ьб. 6л6ьв. 6л6ьг. 6л6ьд. 6л6ÑŒ6дÑ. 6л6ьз. 6л6ьк. 6л6ьм. 6л6ьн. 6л6ьп. 6л6ÑŒÑ. 6л6ÑŒ6Ñтв. 6л6ÑŒ6ÑÑŒ. 6л6ÑŒ6Ñьк. 6л6ьт. 6л6ÑŒ6Ñ‚Ñ€. 6л6ьф. 6л6ьх. 6л6ьц. 6л6ьч. 6л6ьш. 6л6ьщ. 6мб. 6м6б6ль. 6м6бр. 6мг. 6мж. 6мк. 6мл. 6м6ль. 6мм. 6мн. 6мп. 6мр. 6мÑ. 6м6Ñ6тв. 6м6ÑÑŒ. 6м6Ñ6ьк. 6мт. 6мф. 6мх. 6мш. 6нв. 6нг. 6н6гл. 6н6г6ль. 6н6гр. 6н6гÑ. 6нд. 6н6дж. 6н6дз. 6н6дп. 6н6др. 6нж. 6нз. 6нк. 6н6кÑ. 6н6кт. 6нм. 6нн. 6нр. 6нÑ. 6н6Ñк. 6н6ÑÑ‚. 6н6Ñ6тв. 6н6Ñ6Ñ‚Ñ€. 6н6Ñ6ьк. 6н6Ñ6ькй. 6нт. 6н6тк. 6н6Ñ‚Ñ€. 6н6Ñ‚6Ñтв. 6н6Ñ‚ÑŒ. 6нф. 6нх. 6нц. 6н6ць. 6нч. 6нш. 6нь. 6н6ьб. 6н6ьг. 6н6ьк. 6н6ÑŒ6ÑÑŒ. 6пд. 6пл. 6пр. 6пÑ. 6п6Ñ6тв. 6п6ÑÑŒ. 6пт. 6п6Ñ‚Ñ€. 6пф. 6пц. 6рб. 6рв. 6рг. 6рд. 6Ñ€6дв. 6Ñ€6дж. 6Ñ€6дь. 6рж. 6рз. 6Ñ€6зн. 6Ñ€6зь. 6рк. 6Ñ€6кÑ. 6Ñ€6кт. 6рл. 6Ñ€6ль. 6Ñ€6л6ьз. 6рм. 6рн. 6Ñ€6нÑ. 6Ñ€6н6ÑÑ‚. 6Ñ€6нь. 6рп. 6рр. 6Ñ€Ñ. 6Ñ€6ÑÑ€. 6Ñ€6ÑÑ‚. 6Ñ€6Ñ6тв. 6Ñ€6Ñ6Ñ‚ÑŒ. 6Ñ€6ÑÑŒ. 6Ñ€6Ñ6ьк. 6рт. 6Ñ€6тв. 6Ñ€6Ñ‚Ñ€. 6Ñ€6Ñ‚ÑŒ. 6рф. 6рх. 6рц. 6Ñ€6ць. 6рч. 6рш. 6рщ. 6Ñ€6щ6ÑÑŒ. 6рь. 6Ñ6дп. 6Ñ6д6рп. 6Ñк. 6Ñл. 6Ñ6ль. 6Ñм. 6Ñн. 6Ñп. 6ÑÑ. 6ÑÑ‚. 6Ñ6тв. 6Ñ6тй. 6Ñ6тм. 6Ñ6Ñ‚Ñ€. 6Ñ6Ñ‚6рь. 6Ñ6Ñ‚ÑŒ. 6Ñ6ць. 6ÑÑŒ. 6Ñ6ьб. 6Ñ6ьк. 6Ñ6ьм. 6тв. 6Ñ‚6вт. 6Ñ‚6зт. 6тл. 6Ñ‚6ль. 6тм. 6Ñ‚6мр. 6Ñ‚Ñ€. 6Ñ‚Ñ. 6Ñ‚6Ñ6тв. 6Ñ‚6Ñ6ьк. 6Ñ‚Ñ‚. 6тц. 6тч. 6Ñ‚ÑŒ. 6Ñ‚6ÑŒ6ÑÑŒ. 6фм. 6Ñ„Ñ€. 6Ñ„6Ñ6тв. 6Ñ„Ñ‚. 6Ñ„6Ñ‚ÑŒ. 6Ñ„Ñ„. 6Ñ„ÑŒ. 6хв. 6хм. 6хн. 6Ñ…Ñ€. 6Ñ…Ñ‚. 6хш. 6ц6тв. 6ць. 6ц6ьк. 6чб. 6чм. 6чн. 6чт. 6шв. 6ш6ль. 6шм. 6шн. 6ш6нл. 6ш6ÑÑŒ. 6шт. 6ш6тв. 6щ6ÑÑŒ. .бе4з'3 .бе4з’3 .бе4з3 .безу4 .бе5з4о3д .без5о4Ñоб .безві4д3 .без3ро4з3 виї4 .ві5д4ом .ві5д4озв .ві5д4ун віду4ч .ві5д4а .ві5д4ер .ві5д4Ñ– .ві4д3 .від'3 .від’3 .мі4ж3 безві4д3 ові4д3 ді4єві4д3 за4вві4д3 неві4д'3 неві4д’3 неві4д3 про4Ñ„3ві4д3 Ñпе4ц3ві4д3 Ñпівві4д3 те4Ñ…3ві4д3 .пере4д3бач .пере4д3виб .пере4д3г .пере4д3д .пере4д3м .пере4д3оÑÑ‚ .пере4д3пла .пере4д3пок .пере4д3Ñ€ .пере4д3Ñв .пере4д3умов .пере4д3уÑÑ– .пере4д3Ñ„Ñ€ .пере4д3ч .пере4д'3 .пере4д’3 .пере3 .пона4д3 .пона5д4Ñ– .пона5д4и .пона5д4Ñ 3п4ре 3п4ри приї4 3п4ро 3п4рі .пі5д4о .пі5д4Ñ– .під'3 .під’3 .пі4д3 .пі5д4е .пі5д4и .пі5д4у .пі4в3 .Ñпі4в3 .напі4в3 .ро5з4Ñ– .ро5з4е ро5з4йом .ро5з4а .ро4з'3 .ро4з’3 .ро4з3 .чере4з'3 .чере4з’3 .чере4з3 оо4б ооб'3 ооб’3 ооб3м ооб3ро об'3 об’3 од'3 од’3 на4д'3 на4д’3 за5о4Ñ€ до5о4Ñ€ по5о4Ñ€ пере5о4Ñ€ пі6д5о4Ñ€ бе4з5Ñ–4дей до3в'4Ñ” до3в’4Ñ” за3в'4Ñ” за3в’4Ñ” зі3в'4Ñ” зі3в’4Ñ” обі3в'4Ñ” обі3в’4Ñ” по3в'4Ñ” по3в’4Ñ” уі3в'4Ñ” уі3в’4Ñ” з3в'4Ñ Ð·3в’4Ñ Ð¿Ð¾3в'4Ñ Ð¿Ð¾3в’4Ñ Ð²Ñ–Ð´3в'4Ñ Ð²Ñ–Ð´3в’4Ñ Ð·Ð°3в'4Ñ Ð·Ð°3в’4Ñ Ð·Ñ–3в'4Ñ Ð·Ñ–3в’4Ñ Ð·Ð°3ÑŽ4ш на3в'4Ñ Ð½Ð°3в’4Ñ Ð½ÐµÐ¿Ð¾3в'4Ñ Ð½ÐµÐ¿Ð¾3в’4Ñ Ð¾Ð±3в'4Ñ Ð¾Ð±3в’4Ñ Ð¿Ñ€Ð¸3в'4Ñ Ð¿Ñ€Ð¸3в’4Ñ Ð¿Ñ–Ð´3в'4Ñ Ð¿Ñ–Ð´3в’4Ñ Ñƒ3в'4Ñ Ñƒ3в’4Ñ Ð·3м'4Ñ Ð·3м’4Ñ Ð·Ñ–3м'4Ñ Ð·Ñ–3м’4Ñ Ñƒ3м'4Ñ Ñƒ3м’4Ñ Ð²3м'4Ñ Ð²3м’4Ñ Ð½Ð¾3м'4Ñ Ð½Ð¾3м’4Ñ Ð·Ð°3м'4Ñ Ð·Ð°3м’4Ñ Ð½Ð°3м'4Ñ Ð½Ð°3м’4Ñ Ð¾Ð±3м'4Ñ Ð¾Ð±3м’4Ñ Ð¿ÐµÑ€Ðµ3м'4Ñ Ð¿ÐµÑ€Ðµ3м’4Ñ Ð¿Ð¾3м'4Ñ Ð¿Ð¾3м’4Ñ Ð¿Ñ€Ð¸3м'4Ñ Ð¿Ñ€Ð¸3м’4Ñ Ð¿Ñ–Ð´Ñ–3м'4Ñ Ð¿Ñ–Ð´Ñ–3м’4Ñ Ñу3м'4Ñ Ñу3м’4Ñ Ð´Ð¾3в'4ÑŽ до3в’4ÑŽ за3в'4ÑŽ за3в’4ÑŽ зі3в'4ÑŽ зі3в’4ÑŽ на3в'4ÑŽ на3в’4ÑŽ по3в'4ÑŽ по3в’4ÑŽ уі3в'4ÑŽ уі3в’4ÑŽ інтер3в'4ÑŽ інтер3в’4ÑŽ за3Ñ4ло коу4роч зу4роч наду4роч позау4роч поу4роч приу4роч на4й3у4бог нао4Ñ€ прио4Ñ€ неу4к 3в4б4лаг 3в4к4лад 3в4п4лив 3в4п4равн 3в4Ñ€4одлив 3в4Ñ‚4рут 3в4Ñ‚4руч 3з4б4рой 3з4б4рою 3з4б4роє 3з4в4'Ñз 3з4в4’Ñз 3й4ш4л 3м4к4не 3м4к4ну 3м4к4ні 3п4Ñ4ков 3Ñ4к4лад 3Ñ4к4ле 3Ñ4к4лит 3Ñ4к4ло 3Ñ4к4рипт 3Ñ4п4лав 3Ñ4п4лат 3Ñ4п4лач 3Ñ4п4рав 3Ñ4п4ритн 3Ñ4п4риÑÑ‚ 3Ñ4п4ромо 3Ñ4Ñ‚4вор 3Ñ4Ñ‚4ражд 3Ñ4Ñ‚4рах 3Ñ4Ñ‚4риб 3Ñ4Ñ‚4риж 3Ñ4Ñ‚4рой 3Ñ4Ñ‚4рок 3Ñ4Ñ‚4ром 3Ñ4Ñ‚4роф 3Ñ4Ñ‚4роч 3Ñ4Ñ‚4рою 3Ñ4Ñ‚4Ñ€Ð¾Ñ 3Ñ4Ñ‚4роє 3Ñ4Ñ‚4рої 3Ñ4Ñ‚4рукт 3Ñ4Ñ‚4рукц 3Ñ4Ñ‚4рій 3Ñ4Ñ‚4ріл 3Ñ4Ñ‚4річ 3Ñ‚4к4нен 3Ñ‚4ÑŒ4мар 3Ñ‚4ÑŒ4мÑні 3у4п4рав 3блаж 3ближ 3близ 3блиÑк 3блок 3блоці 3бран 3брати 3бреÑÑ‚ 3бризк 3британ 3бруд 3в4бив 3в4веден 3в4дал 3в4довз 3в4довол 3в4живан 3в4лад 3в4лаÑн 3в4лашт 3в4лов 3в4певн 3в4порÑд 3в4разлив 3в4рожай 3в4Ñюд 3в4тіл 3глад 3глиб 3глин 3глоб 3глуз 3глуш 3глÑд 3глÑн 3гнан 3гнил 3гноз 3гнучк 3грав 3град 3грай 3грам 3гран 3грати 3граф 3граш 3граю 3грає 3Ð³Ñ€ÐµÑ 3грець 3гроб 3грож 3гроз 3громад 3груван 3грунт 3груп 3грів 3гріт 3гріш 3г4ідро 3д4ан 3д4бав 3д4бал 3д4бан 3д4бат 3д4бає 3двиг 3двою 3двоє 3двій 3двір 3двічі 3драж 3дром 3друж 3друк 3дрÑп 3дріб 3дріма 3жвав 3жміть 3жріть 3з4бага 3з4Ð±Ð°Ð»Ð°Ð½Ñ 3з4був 3з4бут 3зваж 3зван 3звед 3звел 3звеÑÑ‚ 3звиÑÑŒ 3звич 3звищ 3зворуш 3звук 3звуч 3звіт 3з4год 3з4дат 3з4довж 3з4доров 3з4дійÑн 3змін 3зйом 3зміш 3знав 3знай 3знак 3знал 3знан 3знат 3знаход 3знач 3знаю 3знає 3зниж 3знім 3зрозум 3зрюв 3зрів 3зріл 3зрін 3з4чеп 3й4ма 3й4менн 3й4мищ 3й4мовірн 3й4му. 3й4муть 3й4міть 3й4шов 3м4нож 3м4ріть 3м4щен 3п4Ñов 3п4Ñон 3п4Ñув 3Ñ€4вав 3Ñ€4вати 3Ñ€4віть 3Ñ4кид 3Ñ4кок 3Ñ4коп 3Ñ4кор 3Ñ4короч 3Ñ4коч 3Ñ4кіль 3Ñ4кіпл 3Ñ4пад 3Ñ4пект 3Ñ4перм 3Ñ4пин 3Ñ4повід 3Ñ4пожив 3Ñ4поÑтер 3Ñ4пі 3Ñ4піть 3Ñ4піш 3Ñ4табіл 3Ñ4тав 3Ñ4тад 3Ñ4таз 3Ñ4тайн 3Ñ4тал 3Ñ4тан 3Ñ4тар 3Ñ4тара 3Ñ4тат 3Ñ4тач 3Ñ4тає 3Ñ4теп 3Ñ4тереж 3Ñ4теріг 3Ñ4тиг 3Ñ4тиж 3Ñ4тиÑл 3Ñ4титу 3Ñ4товб 3Ñ4той 3Ñ4торон 3Ñ4торін 3Ñ4тоÑо 3Ñ4тоÑу 3Ñ4тою 3Ñ4тоÑн 3Ñ4туп 3Ñ4Ñ‚Ñг 3Ñ4тіб 3Ñ4тій 3Ñ4тіль 3Ñ4Ñ‚Ñ–Ñ€ 3Ñ4фер 3Ñ4хил 3Ñ4хов 3Ñ4хід 3Ñ‚4кан 3Ñ…4то 3ш4код 3ш4кол 3ш4кідл 3ш4кіл 3ш4кір 3ш4таб 3ш4туч 3ґрунт 3а4вторит 3а4гент 3а4Ð³Ñ€ÐµÑ 3а4декват 3а4дитив 3а4зарт 3а4ктив 3а4ктуал 3а4курат 3а4куÑÑ‚ 3а4кцепт 3а4кциз 3а4лергі 3а4матор 3а4наліз 3а4натом 3а4парат 3а4пелÑц 3а4поÑтол 3а4птеч 3а4ргумен 3а4ромат 3а4Ñоці 3а4Ñпект 3а4тлет 3а4халі 3е4колог 3е4коном 3е4легант 3е4лектр 3е4лемент 3е4моці 3е4мігр 3е4нерг 3е4Ñтакад 3е4Ñтет 3е4тап 3о4б'єдн 3о4б’єдн 3о4б'єкт 3о4б’єкт 3о4береж 3о4бира 3о4борон 3о4бід 3о4біц 3о4даль 3о4дÑг 3о4збро 3о4крем 3о4перат 3о4плат 3о4птим 3о4пуÑÑ‚ 3о4пуше 3о4пуще 3о4рдинац 3о4ренд 3о4Ñоб 3о4ÑÑжн 3о4холо 3о4хорон 3о4хоч 3о4чиÑн 3о4чищ 3у4ваг 3у4важ 3у4гав 3у4згод 3у4клад 3у4компл 3у4крупн 3у4люблен 3у4мит 3у4міл 3у4перед 3у4разлив 3у4рбан 3у4рочиÑÑ‚ 3у4Ñ€Ñд 3у4Ñ€Ñдов 3у4Ñпіш 3у4Ñтанов 3у4Ñтпіш 3у4Ñувати 3у4твор 3у4тробн 3Ñ4дерн 3Ñ4зик 3Ñ4ÐºÑ–Ñ 3Ñ4Ñ€ÑƒÑ 3Ñ4Ñкрав 3Ñ”4д3н 3Ñ”4дин 3Ñ”4пиÑк 3Ñ”4рей 3Ñ–4зотоп 3Ñ–4люÑÑ‚Ñ€ 3Ñ–4мовір 3Ñ–4Ð½Ñ‚ÐµÐ½Ñ 3Ñ–4нформ 3Ñ–4ніціат 3Ñ–4Ñнув 3Ñ—4ждж 3Ñ—4зд 3Ñ—4ÑÑ‚ 3Ñ—4хав 3Ñ—4хат .заї4к .заї4ц .заї4ч .наї4д 'Ñ—4в ’ї4в 'Ñ—4з ’ї4з 'Ñ—4д ’ї4д 'Ñ—4ж ’ї4ж 'Ñ—4л ’ї4л 'Ñ—4м ’ї4м 'Ñ—4Ñ â€™Ñ—4Ñ 'Ñ—4Ñ… ’ї4Ñ… аві4а авої4д ае4тил альбі5он ахої4д ауді4о ай4Ñ3берг бактері4о ба4Ñ3енер ба4Ñ3антра .бе5зе. бей4Ñбол бе5кон б'4єть б’4єть бйор4Ð½Ñ Ð±Ñ–4о3 бо4г3дан бран4д брі4дж3порт без5Ñ–4мен бо4Ñ”3гол бо4Ñ”3гот бо4Ñ”3зап бо4Ñ”3здат бо4Ñ”3комп бо4Ñ”3поÑÑ‚ бо4Ñ”3прип бори4Ñ5п 4в3антрац вер4Ñ…3н ви3й4д вина3й4д ви3й4Ñ‚ вина3й4Ñ‚ від7зна ві5д4ен ві5д4е4о ві5д4ом від5о4браж від5о4браз во4Ñ5ко водо5Ñ4ток водо5з4бір воль4Ñ‚3метр воль4Ñ‚3ампер 3в'4Ñз 3в’4Ñз ге2Ñ‚ÑŒ3ман ге4о го4Ñ4п5роз гі4д5ро5мет 4д7зем дер4ж5а4том дер4ж5а4дм дер4ж5бюдж дер4ж5вид дер4ж5дум дер4ж5замов дер4ж5ком дер4ж5нафт дер4ж5Ñ€ÐµÑ”Ñ Ð´ÐµÑ€4ж3без дер4ж3резерв дер4ж5ÑÑ‚Ñ€ дер4ж5Ñлуж двох4а5том джен4тль диÑÐ±Ð°Ð»Ð°Ð½Ñ Ð´Ð¸4Ñ3гарм ди4Ñ3квал ди4Ñ3комф ди4Ñ3конт ди4Ñ3кред ди4Ñ3крет ди4Ñ3крец ди4Ñ3крим ди4Ñ3куÑÑ– ди4Ñ3куту ди4Ñ3лок ди4Ñ3парит ди4Ñ3Ð¿ÐµÑ€Ñ Ð´Ð¸4Ñ3петч ди4Ñ3пле ди4Ñ3плей ди4Ñ3пози ди4Ñ3проп ди4Ñ3пут ди4Ñ3тил ди4Ñ3триб ди4Ñ3троф ди4Ñ3функц ді3й4Ñ‚ ді3й4д д4ні3п4Ñ€ .дої4в .дої4л .дої5ль дої4д дої4м дої4Ñ… дої4ж дої4ÑÑ‚ до3з4вол до3з4віл дорого5в4каз еу4ÑÑ‚Ñ€ ео4Ñві енерго3з4береж енерго3з4беріг ек2Ñ1к ек2Ñ1п ек2Ñ1Ñ‚ ек2Ñ1ц єв4Ñ€3атом єпі4Ñ5коп єпи4Ñ5коп за4п3чаÑÑ‚ заї4д заї4ж заї4з заї4л заї4м заї4Ñ… зе4кономити 3з'4ÑÑо 3з’4ÑÑо зна3й4д зна3й4Ñ‚ зо4ка зо4ке зо4ки зо4ку зо4кі игої4д ий4ти іе4тил Ñ–4л3е4тил ій4ти інфор4м3аген йо4Ñві каза4Ñ…3Ñтан квої4д корої4д квар4Ñ‚3плат киї4венер кон4трре кон4Ñ‚Ñ€3арг жко4м5а4том кому4ненерг мі4н5е4ко мі4н5е4нер мо4к5рий 3м'4Ñкш 3м’4Ñкш 3м'4ÑÑ‚ 3м’4ÑÑ‚ на3б4лиз на3в4Ñ€Ñд на3в4ча на3з4в на4д7з4в наї4Ð²Ñ Ð½Ð°Ñ—4вш наї4ж наї4з наї4л наї4м наї4Ñ Ð½Ð°Ñ—4Ñ… на4й3а на4й3е на4й3обереж на4й7о4бер на4й7о4гид на4й7о4гол на4й7о4грÑд на4й7о4пук на4й7о4хай на4й3маÑл на4й3Ñпри на4й3ÑÐºÑ–Ñ Ð½Ð°3в4чен на3в4чіть не3в4том не3д4бан на3д4бан не3з4вич не3з4важ нео4пал недої4 неї4ÑÑ‚ на5п4лив ні4Ñ‚5рат оної4д оо4пал ео4пал обі3д4ран обі3й4д обі3й4Ñ‚ об5у4мов онаї4д оо4Ñві оо4к оу4ÑÑ‚Ñ€ оа4том об4лдер4ж об4л3а4дмін переї4д переї4ж переї4з переї4л переї4Ñ Ð¿ÐµÑ€ÐµÑ—4Ñ… пере5п4лив пере3й4д пре4й4Ñ Ð¿ÐµÑ€Ðµ3й4Ñ‚ перег4ній перед5о4бід пере3в4том пере4д5Ñм перед5у4мов під5о4дин пів5о4Ñ4Ñ‚Ñ€ пі5в4ень по3б4лизу по3в4тор поч4не поч4ни поч4ну поÑ4в по3в4чен по3в4чіть по3д4роб по3д4раз по3д4во по5ж4ніть по5з4бав .пої4 пої4д прої4 пої4зд по4Ñ4Ñ‚5радÑн по4Ñ4Ñ‚5кому по4Ñ4Ñ‚3декр по4Ñ4Ñ‚3контра по4Ñ4Ñ‚3менопауз по4Ñ4тприват по4Ñ4Ñ‚3раді по4Ñ4Ñ‚5Ñоці поÑ4Ñ‚3кап поÑ4Ñ‚3ком поÑ4Ñ‚3нат поÑ4Ñ‚3проц поÑ4Ñ‚3Ñоц поÑ4Ñ‚3Ñ„Ñ–ÐºÑ Ð¿Ñ€Ð¸3й4Ñ‚ про3Ñ4тирад про4Ñ„3Ñ Ð¿Ð¾Ð»Ñ–4Ñ‚5екон пор4Ñ‚3н пор4Ñ‚3рет пор4Ñ‚3фел при3й4д при4нцип про4ект3н про3б4лем про4м3май пр4о5плат раді4о рай3в4но ро4з5д4во ро4з5мінний роз5у4чен роз5Ñ–4мен роз5вант роз5вин роз5вит ро4з5діл ро4з5гор ро4з5вер ро4з5чеп ро4з'5єдн ро4з’5єдн з'4єдн з’4єдн руко5Ñ4тиÑк ро5з4ум ро4з3гром ро4з3лив рмої4д Ñан4к4Ñ‚3 Ñеї4д Ñерцеї4д Ñпе4ц3кур Ñпе4ц3мон Ñпе4цпр Ñпе4ц3Ñ Ñпор4Ñ‚3вир Ñпор4Ñ‚3зал Ñпор4Ñ‚3ком Ñпор4Ñ‚3клуб Ñпор4Ñ‚3май Ñпор4Ñ‚4Ñ3ме Ñор4тн 3Ñ4промож ÑÑŒ4квуг Ñтат5упр тор4г3пред тран4Ñ3 тур4к3мен цук3ро у4к4Ñ€ укр3а4вт укр3а4гр укр3е4ÐºÑ ÑƒÐºÑ€3Ñ–4н4банк убої4д чорно3б4рив цен4Ñ‚4Ñ€3енерг ÑÑої4д ви3у4ч за3у4ч на3у4ч недо3у4ч не3у4ч під3у4ч пед3у4чи пере3у4ч Ñамо3у4ч виÑ4в з'Ñ4в з’Ñ4в заÑ4в наÑ4в уÑ4в во4євод во4єнач Ñво4Ñ”Ñ‡Ð°Ñ Ñво4Ñ”ÐºÐ¾Ñ€Ð¸Ñ Ñво4єрід хво4Ñ”3г4риз гелі4о ді4о еті4о мі4о Ñ–4он полі4о Ñоці4о фізі4о хімі4о гоме4о ді4алог ді4оген деÑ4к оо4динок ао4пік ао4ха ео4ха зо6о ка5нал оі4зол міжу4Ñоб мете4о абиÑ4к ніÑ4к виÑ4Ñн най3Ñ4Ñн неÑ4Ñн поÑ4Ñн проÑ4Ñн роз'Ñ4Ñн роз’Ñ4Ñн ро5з4ора. ро5з4о5рам ро5з4орах ро5з4ори ро5з4оро ро5з4ору ро5з4Ð¾Ñ€Ñ Ñ€Ð¾5з4орю ро5з4орі ро6з5о4ри. розо4ра розо4ре розо4реш розо4рн напоу4м неа4би ео4цін оо4цін доу4к доу4м ео4бур ео4голош ео4зор бальне4о не4оліт не4омальт не4Ð¾ÐºÐ»Ð°Ñ Ð½Ðµ4окомун не4оландш не4олог не4олібер не4онац не4офіт нею4н неÑ4к неÑ4рок но4к3а4ут пі5в4оні піво4Ñ Ð¿Ð°Ð»Ðµ4о па4н3о4тець .пе4ом. д3у4Ñім п4о5бере ао4хот ое4ко ео4хот ео4щад ао4щад оо4чищ поÑ4Ñ Ñ€Ð¾Ð·'Ñ4Ñ€ роз’Ñ4Ñ€ те4одоліт те4олог те4оÑоф оо4біг оу4Ñун оу4ком пів3о4вал а3у4дар о3у4дар з3у4дар в3у4дар контр3у4дар о3о4киÑл и3о4киÑл ень7о4киÑл е3о4киÑл Ñ…3о4киÑл и3Ñ–4Ñтор о3Ñ–4Ñтор Ñ–3Ñ–4Ñтор а3Ñ–4Ñтор Ñ3Ñ–4Ñтор е3Ñ–4Ñтор наді4Ñтор най3Ñ–4Ñтор пів3Ñ–4Ñтор перед3Ñ–4Ñтор поÑÑ‚3Ñ–4Ñтор ар4Ñ‚3афіш ар4Ñ‚3взвод ар4Ñ‚3деÑант ар4Ñ‚3кафе ар4Ñ‚3Ð¼Ð°Ð¹Ñ Ð°Ñ€4Ñ‚3медіа ар4Ñ‚3Ð¼ÐµÐ¹Ñ Ð°Ñ€4Ñ‚3мін ар4Ñ‚3о4бÑÑ‚Ñ€ ар4Ñ‚3о4дин ар4Ñ‚3о4збр ар4Ñ‚3під ар4Ñ‚3рин ар4Ñ‚3у4Ñтан ар4Ñ‚3факт ар4Ñ‚3хім ар4Ñ‚3центр наді4Ñтот найі4Ñтот еі4Ñтот оі4Ñтот ау4Ñ‚3екол оо4чиÑÑ‚ з3а4кт оа4кт еа4кт гіпер3а4кт найа4кт піва4кт ао4браз ео4браз оо4браз граф3о4браз най3о4браз Ñупер3о4браз ар4Ñ‚3мейÑÑ‚ баге4Ñ€3мейÑÑ‚ бале4Ñ‚3мейÑÑ‚ бран4д3мейÑÑ‚ ва4ль4д3мейÑÑ‚ ве4ль4Ñ‚3мейÑÑ‚ го4Ñ„3мейÑÑ‚ гро4Ñ3мейÑÑ‚ декре4Ñ‚3мейÑÑ‚ до4к3мейÑÑ‚ капе4ль3мейÑÑ‚ кварти4Ñ€3мейÑÑ‚ конце4Ñ€4Ñ‚3мейÑÑ‚ кра4н3мейÑÑ‚ полі4ц3мейÑÑ‚ по4ш4Ñ‚3мейÑÑ‚ фо4Ñ€4Ñ4Ñ‚3мейÑÑ‚ хо4Ñ€3мейÑÑ‚ шапі4Ñ‚3мейÑÑ‚ шта4л3мейÑÑ‚ єге4Ñ€3мейÑÑ‚ иа4варі Ñа4варі оа4варі еа4варі беза4варі між3а4варі над3а4варі поÑÑ‚3а4варі напів3а4варі перед3а4варі Ñупер3а4варі аа4Ð´Ñ€ÐµÑ ÐµÐ°4Ð´Ñ€ÐµÑ Ð¾Ð°4Ð´Ñ€ÐµÑ Ñ–Ð°4Ð´Ñ€ÐµÑ Ð±ÐµÐ·3а4Ð´Ñ€ÐµÑ Ð°Ðµ4фект ее4фект ое4фект най3е4фект Ñупер3е4фект ое4міÑÑ– ие4міÑÑ– Ñе4міÑÑ– ее4міÑÑ– безе4міÑÑ– гіпер3е4міÑÑ– еу4бог й3у4бог ий4Ð½Ñ Ð·Ð°Ð¹4Ð½Ñ Ð·Ð´Ñ–Ð¹4Ð½Ñ Ð½Ð°Ð¹4Ð½Ñ Ð¾Ð±Ñ–Ð¹4Ð½Ñ Ð¿ÐµÑ€ÐµÐ¹4Ð½Ñ Ð¿Ñ–Ð´Ñ–Ð¹4Ð½Ñ Ð¿Ñ€Ð¸Ð¹4м пій4м дій4ма вий4м най4ма зай4м д4о3й4м обой4м прой4м обій4м перей4м безу4гл безу4пин бло4к3поÑÑ‚ .блі4ц3ана .блі4ц3криг .блі4ц3опит .блі4ц3торг .блі4ц3тур .блі4ц3Ñ–4Ñпит о3а4наліз бак3а4наліз ц3а4наліз ген3а4наліз з3а4наліз м3а4наліз нт3а4наліз між3а4наліз полі3а4наліз ре3а4наліз оу4год ау4год еу4год пів3у4год роз3у4год гоÑ4п3у4год ео4Ð¿Ð¸Ñ Ð¾Ð¾4Ð¿Ð¸Ñ Ð°Ð¾4Ð¿Ð¸Ñ Ð±Ð¾4Ñ€4Ñ‚3мех бо4Ñ€4Ñ‚3о4пер бо4Ñ€4Ñ‚3про бо4Ñ€4Ñ‚3рад бо4Ñ€4Ñ‚3Ñ–4нж оа4каці оу4Ñ Ð¾Ð¾4держ оа4на біблі4о .на3в4ч .ви3в4ч .до3в4ч .за3в4ч .по3в4ч .при3в4ч ана3в4ч ена3в4ч мона3в4ч жона3в4ч іона3в4ч ови3в4ч еви3в4ч едо3в4ч оза3в4ч по3в4ча .ом4рі е3м4рій .ви3м4Ñ€ .віді3м4Ñ€ .зав3м4Ñ€ .за3м4Ñ€ .зі3м4Ñ€ .на3м4Ñ€ .пере3м4Ñ€ .по3м4Ñ€ .при3м4Ñ€ .роз3м4Ñ€ .ум4ри .ум4рі .ум4ру .ум4ре во4Ñтаннє най3о4Ñтанн перед3о4Ñтанн ие4Ñтет ое4Ñтет ее4Ñтет й3е4Ñтет пан3е4Ñтет пар3е4Ñтет оо4ктан іо4ктан оо4плачув ео4плачув перед3о4пла виу4ди о3в4каз е3в4каз 8-7 --8 .а-8 .б-8 .в-8 .г-8 .Ò‘-8 .д-8 .е-8 .Ñ”-8 .ж-8 .з-8 .и-8 .Ñ–-8 .Ñ—-8 .й-8 .к-8 .л-8 .м-8 .н-8 .о-8 .п-8 .Ñ€-8 .Ñ-8 .Ñ‚-8 .у-8 .Ñ„-8 .Ñ…-8 .ц-8 .ч-8 .ш-8 .щ-8 .ÑŒ-8 .ÑŽ-8 .Ñ-8 .'-8 .’-8 -а8а8 8а8а- -а8б8 8а8б- -а8в8 8а8в- -а8г8 8а8г- -а8Ò‘8 8а8Ò‘- -а8д8 8а8д- -а8е8 8а8е- -а8Ñ”8 8а8Ñ”- -а8ж8 8а8ж- -а8з8 8а8з- -а8и8 8а8и- -а8Ñ–8 8а8Ñ–- -а8Ñ—8 8а8Ñ—- -а8й8 8а8й- -а8к8 8а8к- -а8л8 8а8л- -а8м8 8а8м- -а8н8 8а8н- -а8о8 8а8о- -а8п8 8а8п- -а8Ñ€8 8а8Ñ€- -а8Ñ8 8а8Ñ- -а8Ñ‚8 8а8Ñ‚- -а8у8 8а8у- -а8Ñ„8 8а8Ñ„- -а8Ñ…8 8а8Ñ…- -а8ц8 8а8ц- -а8ч8 8а8ч- -а8ш8 8а8ш- -а8щ8 8а8щ- -а8ÑŒ8 8а8ÑŒ- -а8ÑŽ8 8а8ÑŽ- -а8Ñ8 8а8Ñ- -а8'8 -а8’8 8а8'- 8а8’- -б8а8 8б8а- -б8б8 8б8б- -б8в8 8б8в- -б8г8 8б8г- -б8Ò‘8 8б8Ò‘- -б8д8 8б8д- -б8е8 8б8е- -б8Ñ”8 8б8Ñ”- -б8ж8 8б8ж- -б8з8 8б8з- -б8и8 8б8и- -б8Ñ–8 8б8Ñ–- -б8Ñ—8 8б8Ñ—- -б8й8 8б8й- -б8к8 8б8к- -б8л8 8б8л- -б8м8 8б8м- -б8н8 8б8н- -б8о8 8б8о- -б8п8 8б8п- -б8Ñ€8 8б8Ñ€- -б8Ñ8 8б8Ñ- -б8Ñ‚8 8б8Ñ‚- -б8у8 8б8у- -б8Ñ„8 8б8Ñ„- -б8Ñ…8 8б8Ñ…- -б8ц8 8б8ц- -б8ч8 8б8ч- -б8ш8 8б8ш- -б8щ8 8б8щ- -б8ÑŒ8 8б8ÑŒ- -б8ÑŽ8 8б8ÑŽ- -б8Ñ8 8б8Ñ- -б8'8 -б8’8 8б8'- 8б8’- -в8а8 8в8а- -в8б8 8в8б- -в8в8 8в8в- -в8г8 8в8г- -в8Ò‘8 8в8Ò‘- -в8д8 8в8д- -в8е8 8в8е- -в8Ñ”8 8в8Ñ”- -в8ж8 8в8ж- -в8з8 8в8з- -в8и8 8в8и- -в8Ñ–8 8в8Ñ–- -в8Ñ—8 8в8Ñ—- -в8й8 8в8й- -в8к8 8в8к- -в8л8 8в8л- -в8м8 8в8м- -в8н8 8в8н- -в8о8 8в8о- -в8п8 8в8п- -в8Ñ€8 8в8Ñ€- -в8Ñ8 8в8Ñ- -в8Ñ‚8 8в8Ñ‚- -в8у8 8в8у- -в8Ñ„8 8в8Ñ„- -в8Ñ…8 8в8Ñ…- -в8ц8 8в8ц- -в8ч8 8в8ч- -в8ш8 8в8ш- -в8щ8 8в8щ- -в8ÑŒ8 8в8ÑŒ- -в8ÑŽ8 8в8ÑŽ- -в8Ñ8 8в8Ñ- -в8'8 -в8’8 8в8'- 8в8’- -г8а8 8г8а- -г8б8 8г8б- -г8в8 8г8в- -г8г8 8г8г- -г8Ò‘8 8г8Ò‘- -г8д8 8г8д- -г8е8 8г8е- -г8Ñ”8 8г8Ñ”- -г8ж8 8г8ж- -г8з8 8г8з- -г8и8 8г8и- -г8Ñ–8 8г8Ñ–- -г8Ñ—8 8г8Ñ—- -г8й8 8г8й- -г8к8 8г8к- -г8л8 8г8л- -г8м8 8г8м- -г8н8 8г8н- -г8о8 8г8о- -г8п8 8г8п- -г8Ñ€8 8г8Ñ€- -г8Ñ8 8г8Ñ- -г8Ñ‚8 8г8Ñ‚- -г8у8 8г8у- -г8Ñ„8 8г8Ñ„- -г8Ñ…8 8г8Ñ…- -г8ц8 8г8ц- -г8ч8 8г8ч- -г8ш8 8г8ш- -г8щ8 8г8щ- -г8ÑŒ8 8г8ÑŒ- -г8ÑŽ8 8г8ÑŽ- -г8Ñ8 8г8Ñ- -г8'8 -г8’8 8г8'- 8г8’- -Ò‘8а8 8Ò‘8а- -Ò‘8б8 8Ò‘8б- -Ò‘8в8 8Ò‘8в- -Ò‘8г8 8Ò‘8г- -Ò‘8Ò‘8 8Ò‘8Ò‘- -Ò‘8д8 8Ò‘8д- -Ò‘8е8 8Ò‘8е- -Ò‘8Ñ”8 8Ò‘8Ñ”- -Ò‘8ж8 8Ò‘8ж- -Ò‘8з8 8Ò‘8з- -Ò‘8и8 8Ò‘8и- -Ò‘8Ñ–8 8Ò‘8Ñ–- -Ò‘8Ñ—8 8Ò‘8Ñ—- -Ò‘8й8 8Ò‘8й- -Ò‘8к8 8Ò‘8к- -Ò‘8л8 8Ò‘8л- -Ò‘8м8 8Ò‘8м- -Ò‘8н8 8Ò‘8н- -Ò‘8о8 8Ò‘8о- -Ò‘8п8 8Ò‘8п- -Ò‘8Ñ€8 8Ò‘8Ñ€- -Ò‘8Ñ8 8Ò‘8Ñ- -Ò‘8Ñ‚8 8Ò‘8Ñ‚- -Ò‘8у8 8Ò‘8у- -Ò‘8Ñ„8 8Ò‘8Ñ„- -Ò‘8Ñ…8 8Ò‘8Ñ…- -Ò‘8ц8 8Ò‘8ц- -Ò‘8ч8 8Ò‘8ч- -Ò‘8ш8 8Ò‘8ш- -Ò‘8щ8 8Ò‘8щ- -Ò‘8ÑŒ8 8Ò‘8ÑŒ- -Ò‘8ÑŽ8 8Ò‘8ÑŽ- -Ò‘8Ñ8 8Ò‘8Ñ- -Ò‘8'8 -Ò‘8’8 8Ò‘8'- 8Ò‘8’- -д8а8 8д8а- -д8б8 8д8б- -д8в8 8д8в- -д8г8 8д8г- -д8Ò‘8 8д8Ò‘- -д8д8 8д8д- -д8е8 8д8е- -д8Ñ”8 8д8Ñ”- -д8ж8 8д8ж- -д8з8 8д8з- -д8и8 8д8и- -д8Ñ–8 8д8Ñ–- -д8Ñ—8 8д8Ñ—- -д8й8 8д8й- -д8к8 8д8к- -д8л8 8д8л- -д8м8 8д8м- -д8н8 8д8н- -д8о8 8д8о- -д8п8 8д8п- -д8Ñ€8 8д8Ñ€- -д8Ñ8 8д8Ñ- -д8Ñ‚8 8д8Ñ‚- -д8у8 8д8у- -д8Ñ„8 8д8Ñ„- -д8Ñ…8 8д8Ñ…- -д8ц8 8д8ц- -д8ч8 8д8ч- -д8ш8 8д8ш- -д8щ8 8д8щ- -д8ÑŒ8 8д8ÑŒ- -д8ÑŽ8 8д8ÑŽ- -д8Ñ8 8д8Ñ- -д8'8 -д8’8 8д8'- 8д8’- -е8а8 8е8а- -е8б8 8е8б- -е8в8 8е8в- -е8г8 8е8г- -е8Ò‘8 8е8Ò‘- -е8д8 8е8д- -е8е8 8е8е- -е8Ñ”8 8е8Ñ”- -е8ж8 8е8ж- -е8з8 8е8з- -е8и8 8е8и- -е8Ñ–8 8е8Ñ–- -е8Ñ—8 8е8Ñ—- -е8й8 8е8й- -е8к8 8е8к- -е8л8 8е8л- -е8м8 8е8м- -е8н8 8е8н- -е8о8 8е8о- -е8п8 8е8п- -е8Ñ€8 8е8Ñ€- -е8Ñ8 8е8Ñ- -е8Ñ‚8 8е8Ñ‚- -е8у8 8е8у- -е8Ñ„8 8е8Ñ„- -е8Ñ…8 8е8Ñ…- -е8ц8 8е8ц- -е8ч8 8е8ч- -е8ш8 8е8ш- -е8щ8 8е8щ- -е8ÑŒ8 8е8ÑŒ- -е8ÑŽ8 8е8ÑŽ- -е8Ñ8 8е8Ñ- -е8'8 -е8’8 8е8'- 8е8’- -Ñ”8а8 8Ñ”8а- -Ñ”8б8 8Ñ”8б- -Ñ”8в8 8Ñ”8в- -Ñ”8г8 8Ñ”8г- -Ñ”8Ò‘8 8Ñ”8Ò‘- -Ñ”8д8 8Ñ”8д- -Ñ”8е8 8Ñ”8е- -Ñ”8Ñ”8 8Ñ”8Ñ”- -Ñ”8ж8 8Ñ”8ж- -Ñ”8з8 8Ñ”8з- -Ñ”8и8 8Ñ”8и- -Ñ”8Ñ–8 8Ñ”8Ñ–- -Ñ”8Ñ—8 8Ñ”8Ñ—- -Ñ”8й8 8Ñ”8й- -Ñ”8к8 8Ñ”8к- -Ñ”8л8 8Ñ”8л- -Ñ”8м8 8Ñ”8м- -Ñ”8н8 8Ñ”8н- -Ñ”8о8 8Ñ”8о- -Ñ”8п8 8Ñ”8п- -Ñ”8Ñ€8 8Ñ”8Ñ€- -Ñ”8Ñ8 8Ñ”8Ñ- -Ñ”8Ñ‚8 8Ñ”8Ñ‚- -Ñ”8у8 8Ñ”8у- -Ñ”8Ñ„8 8Ñ”8Ñ„- -Ñ”8Ñ…8 8Ñ”8Ñ…- -Ñ”8ц8 8Ñ”8ц- -Ñ”8ч8 8Ñ”8ч- -Ñ”8ш8 8Ñ”8ш- -Ñ”8щ8 8Ñ”8щ- -Ñ”8ÑŒ8 8Ñ”8ÑŒ- -Ñ”8ÑŽ8 8Ñ”8ÑŽ- -Ñ”8Ñ8 8Ñ”8Ñ- -Ñ”8'8 -Ñ”8’8 8Ñ”8'- 8Ñ”8’- -ж8а8 8ж8а- -ж8б8 8ж8б- -ж8в8 8ж8в- -ж8г8 8ж8г- -ж8Ò‘8 8ж8Ò‘- -ж8д8 8ж8д- -ж8е8 8ж8е- -ж8Ñ”8 8ж8Ñ”- -ж8ж8 8ж8ж- -ж8з8 8ж8з- -ж8и8 8ж8и- -ж8Ñ–8 8ж8Ñ–- -ж8Ñ—8 8ж8Ñ—- -ж8й8 8ж8й- -ж8к8 8ж8к- -ж8л8 8ж8л- -ж8м8 8ж8м- -ж8н8 8ж8н- -ж8о8 8ж8о- -ж8п8 8ж8п- -ж8Ñ€8 8ж8Ñ€- -ж8Ñ8 8ж8Ñ- -ж8Ñ‚8 8ж8Ñ‚- -ж8у8 8ж8у- -ж8Ñ„8 8ж8Ñ„- -ж8Ñ…8 8ж8Ñ…- -ж8ц8 8ж8ц- -ж8ч8 8ж8ч- -ж8ш8 8ж8ш- -ж8щ8 8ж8щ- -ж8ÑŒ8 8ж8ÑŒ- -ж8ÑŽ8 8ж8ÑŽ- -ж8Ñ8 8ж8Ñ- -ж8'8 -ж8’8 8ж8'- 8ж8’- -з8а8 8з8а- -з8б8 8з8б- -з8в8 8з8в- -з8г8 8з8г- -з8Ò‘8 8з8Ò‘- -з8д8 8з8д- -з8е8 8з8е- -з8Ñ”8 8з8Ñ”- -з8ж8 8з8ж- -з8з8 8з8з- -з8и8 8з8и- -з8Ñ–8 8з8Ñ–- -з8Ñ—8 8з8Ñ—- -з8й8 8з8й- -з8к8 8з8к- -з8л8 8з8л- -з8м8 8з8м- -з8н8 8з8н- -з8о8 8з8о- -з8п8 8з8п- -з8Ñ€8 8з8Ñ€- -з8Ñ8 8з8Ñ- -з8Ñ‚8 8з8Ñ‚- -з8у8 8з8у- -з8Ñ„8 8з8Ñ„- -з8Ñ…8 8з8Ñ…- -з8ц8 8з8ц- -з8ч8 8з8ч- -з8ш8 8з8ш- -з8щ8 8з8щ- -з8ÑŒ8 8з8ÑŒ- -з8ÑŽ8 8з8ÑŽ- -з8Ñ8 8з8Ñ- -з8'8 -з8’8 8з8'- 8з8’- -и8а8 8и8а- -и8б8 8и8б- -и8в8 8и8в- -и8г8 8и8г- -и8Ò‘8 8и8Ò‘- -и8д8 8и8д- -и8е8 8и8е- -и8Ñ”8 8и8Ñ”- -и8ж8 8и8ж- -и8з8 8и8з- -и8и8 8и8и- -и8Ñ–8 8и8Ñ–- -и8Ñ—8 8и8Ñ—- -и8й8 8и8й- -и8к8 8и8к- -и8л8 8и8л- -и8м8 8и8м- -и8н8 8и8н- -и8о8 8и8о- -и8п8 8и8п- -и8Ñ€8 8и8Ñ€- -и8Ñ8 8и8Ñ- -и8Ñ‚8 8и8Ñ‚- -и8у8 8и8у- -и8Ñ„8 8и8Ñ„- -и8Ñ…8 8и8Ñ…- -и8ц8 8и8ц- -и8ч8 8и8ч- -и8ш8 8и8ш- -и8щ8 8и8щ- -и8ÑŒ8 8и8ÑŒ- -и8ÑŽ8 8и8ÑŽ- -и8Ñ8 8и8Ñ- -и8'8 -и8’8 8и8'- 8и8’- -Ñ–8а8 8Ñ–8а- -Ñ–8б8 8Ñ–8б- -Ñ–8в8 8Ñ–8в- -Ñ–8г8 8Ñ–8г- -Ñ–8Ò‘8 8Ñ–8Ò‘- -Ñ–8д8 8Ñ–8д- -Ñ–8е8 8Ñ–8е- -Ñ–8Ñ”8 8Ñ–8Ñ”- -Ñ–8ж8 8Ñ–8ж- -Ñ–8з8 8Ñ–8з- -Ñ–8и8 8Ñ–8и- -Ñ–8Ñ–8 8Ñ–8Ñ–- -Ñ–8Ñ—8 8Ñ–8Ñ—- -Ñ–8й8 8Ñ–8й- -Ñ–8к8 8Ñ–8к- -Ñ–8л8 8Ñ–8л- -Ñ–8м8 8Ñ–8м- -Ñ–8н8 8Ñ–8н- -Ñ–8о8 8Ñ–8о- -Ñ–8п8 8Ñ–8п- -Ñ–8Ñ€8 8Ñ–8Ñ€- -Ñ–8Ñ8 8Ñ–8Ñ- -Ñ–8Ñ‚8 8Ñ–8Ñ‚- -Ñ–8у8 8Ñ–8у- -Ñ–8Ñ„8 8Ñ–8Ñ„- -Ñ–8Ñ…8 8Ñ–8Ñ…- -Ñ–8ц8 8Ñ–8ц- -Ñ–8ч8 8Ñ–8ч- -Ñ–8ш8 8Ñ–8ш- -Ñ–8щ8 8Ñ–8щ- -Ñ–8ÑŒ8 8Ñ–8ÑŒ- -Ñ–8ÑŽ8 8Ñ–8ÑŽ- -Ñ–8Ñ8 8Ñ–8Ñ- -Ñ–8'8 -Ñ–8’8 8Ñ–8'- 8Ñ–8’- -Ñ—8а8 8Ñ—8а- -Ñ—8б8 8Ñ—8б- -Ñ—8в8 8Ñ—8в- -Ñ—8г8 8Ñ—8г- -Ñ—8Ò‘8 8Ñ—8Ò‘- -Ñ—8д8 8Ñ—8д- -Ñ—8е8 8Ñ—8е- -Ñ—8Ñ”8 8Ñ—8Ñ”- -Ñ—8ж8 8Ñ—8ж- -Ñ—8з8 8Ñ—8з- -Ñ—8и8 8Ñ—8и- -Ñ—8Ñ–8 8Ñ—8Ñ–- -Ñ—8Ñ—8 8Ñ—8Ñ—- -Ñ—8й8 8Ñ—8й- -Ñ—8к8 8Ñ—8к- -Ñ—8л8 8Ñ—8л- -Ñ—8м8 8Ñ—8м- -Ñ—8н8 8Ñ—8н- -Ñ—8о8 8Ñ—8о- -Ñ—8п8 8Ñ—8п- -Ñ—8Ñ€8 8Ñ—8Ñ€- -Ñ—8Ñ8 8Ñ—8Ñ- -Ñ—8Ñ‚8 8Ñ—8Ñ‚- -Ñ—8у8 8Ñ—8у- -Ñ—8Ñ„8 8Ñ—8Ñ„- -Ñ—8Ñ…8 8Ñ—8Ñ…- -Ñ—8ц8 8Ñ—8ц- -Ñ—8ч8 8Ñ—8ч- -Ñ—8ш8 8Ñ—8ш- -Ñ—8щ8 8Ñ—8щ- -Ñ—8ÑŒ8 8Ñ—8ÑŒ- -Ñ—8ÑŽ8 8Ñ—8ÑŽ- -Ñ—8Ñ8 8Ñ—8Ñ- -Ñ—8'8 -Ñ—8’8 8Ñ—8'- 8Ñ—8’- -й8а8 8й8а- -й8б8 8й8б- -й8в8 8й8в- -й8г8 8й8г- -й8Ò‘8 8й8Ò‘- -й8д8 8й8д- -й8е8 8й8е- -й8Ñ”8 8й8Ñ”- -й8ж8 8й8ж- -й8з8 8й8з- -й8и8 8й8и- -й8Ñ–8 8й8Ñ–- -й8Ñ—8 8й8Ñ—- -й8й8 8й8й- -й8к8 8й8к- -й8л8 8й8л- -й8м8 8й8м- -й8н8 8й8н- -й8о8 8й8о- -й8п8 8й8п- -й8Ñ€8 8й8Ñ€- -й8Ñ8 8й8Ñ- -й8Ñ‚8 8й8Ñ‚- -й8у8 8й8у- -й8Ñ„8 8й8Ñ„- -й8Ñ…8 8й8Ñ…- -й8ц8 8й8ц- -й8ч8 8й8ч- -й8ш8 8й8ш- -й8щ8 8й8щ- -й8ÑŒ8 8й8ÑŒ- -й8ÑŽ8 8й8ÑŽ- -й8Ñ8 8й8Ñ- -й8'8 -й8’8 8й8'- 8й8’- -к8а8 8к8а- -к8б8 8к8б- -к8в8 8к8в- -к8г8 8к8г- -к8Ò‘8 8к8Ò‘- -к8д8 8к8д- -к8е8 8к8е- -к8Ñ”8 8к8Ñ”- -к8ж8 8к8ж- -к8з8 8к8з- -к8и8 8к8и- -к8Ñ–8 8к8Ñ–- -к8Ñ—8 8к8Ñ—- -к8й8 8к8й- -к8к8 8к8к- -к8л8 8к8л- -к8м8 8к8м- -к8н8 8к8н- -к8о8 8к8о- -к8п8 8к8п- -к8Ñ€8 8к8Ñ€- -к8Ñ8 8к8Ñ- -к8Ñ‚8 8к8Ñ‚- -к8у8 8к8у- -к8Ñ„8 8к8Ñ„- -к8Ñ…8 8к8Ñ…- -к8ц8 8к8ц- -к8ч8 8к8ч- -к8ш8 8к8ш- -к8щ8 8к8щ- -к8ÑŒ8 8к8ÑŒ- -к8ÑŽ8 8к8ÑŽ- -к8Ñ8 8к8Ñ- -к8'8 -к8’8 8к8'- 8к8’- -л8а8 8л8а- -л8б8 8л8б- -л8в8 8л8в- -л8г8 8л8г- -л8Ò‘8 8л8Ò‘- -л8д8 8л8д- -л8е8 8л8е- -л8Ñ”8 8л8Ñ”- -л8ж8 8л8ж- -л8з8 8л8з- -л8и8 8л8и- -л8Ñ–8 8л8Ñ–- -л8Ñ—8 8л8Ñ—- -л8й8 8л8й- -л8к8 8л8к- -л8л8 8л8л- -л8м8 8л8м- -л8н8 8л8н- -л8о8 8л8о- -л8п8 8л8п- -л8Ñ€8 8л8Ñ€- -л8Ñ8 8л8Ñ- -л8Ñ‚8 8л8Ñ‚- -л8у8 8л8у- -л8Ñ„8 8л8Ñ„- -л8Ñ…8 8л8Ñ…- -л8ц8 8л8ц- -л8ч8 8л8ч- -л8ш8 8л8ш- -л8щ8 8л8щ- -л8ÑŒ8 8л8ÑŒ- -л8ÑŽ8 8л8ÑŽ- -л8Ñ8 8л8Ñ- -л8'8 -л8’8 8л8'- 8л8’- -м8а8 8м8а- -м8б8 8м8б- -м8в8 8м8в- -м8г8 8м8г- -м8Ò‘8 8м8Ò‘- -м8д8 8м8д- -м8е8 8м8е- -м8Ñ”8 8м8Ñ”- -м8ж8 8м8ж- -м8з8 8м8з- -м8и8 8м8и- -м8Ñ–8 8м8Ñ–- -м8Ñ—8 8м8Ñ—- -м8й8 8м8й- -м8к8 8м8к- -м8л8 8м8л- -м8м8 8м8м- -м8н8 8м8н- -м8о8 8м8о- -м8п8 8м8п- -м8Ñ€8 8м8Ñ€- -м8Ñ8 8м8Ñ- -м8Ñ‚8 8м8Ñ‚- -м8у8 8м8у- -м8Ñ„8 8м8Ñ„- -м8Ñ…8 8м8Ñ…- -м8ц8 8м8ц- -м8ч8 8м8ч- -м8ш8 8м8ш- -м8щ8 8м8щ- -м8ÑŒ8 8м8ÑŒ- -м8ÑŽ8 8м8ÑŽ- -м8Ñ8 8м8Ñ- -м8'8 -м8’8 8м8'- 8м8’- -н8а8 8н8а- -н8б8 8н8б- -н8в8 8н8в- -н8г8 8н8г- -н8Ò‘8 8н8Ò‘- -н8д8 8н8д- -н8е8 8н8е- -н8Ñ”8 8н8Ñ”- -н8ж8 8н8ж- -н8з8 8н8з- -н8и8 8н8и- -н8Ñ–8 8н8Ñ–- -н8Ñ—8 8н8Ñ—- -н8й8 8н8й- -н8к8 8н8к- -н8л8 8н8л- -н8м8 8н8м- -н8н8 8н8н- -н8о8 8н8о- -н8п8 8н8п- -н8Ñ€8 8н8Ñ€- -н8Ñ8 8н8Ñ- -н8Ñ‚8 8н8Ñ‚- -н8у8 8н8у- -н8Ñ„8 8н8Ñ„- -н8Ñ…8 8н8Ñ…- -н8ц8 8н8ц- -н8ч8 8н8ч- -н8ш8 8н8ш- -н8щ8 8н8щ- -н8ÑŒ8 8н8ÑŒ- -н8ÑŽ8 8н8ÑŽ- -н8Ñ8 8н8Ñ- -н8'8 -н8’8 8н8'- 8н8’- -о8а8 8о8а- -о8б8 8о8б- -о8в8 8о8в- -о8г8 8о8г- -о8Ò‘8 8о8Ò‘- -о8д8 8о8д- -о8е8 8о8е- -о8Ñ”8 8о8Ñ”- -о8ж8 8о8ж- -о8з8 8о8з- -о8и8 8о8и- -о8Ñ–8 8о8Ñ–- -о8Ñ—8 8о8Ñ—- -о8й8 8о8й- -о8к8 8о8к- -о8л8 8о8л- -о8м8 8о8м- -о8н8 8о8н- -о8о8 8о8о- -о8п8 8о8п- -о8Ñ€8 8о8Ñ€- -о8Ñ8 8о8Ñ- -о8Ñ‚8 8о8Ñ‚- -о8у8 8о8у- -о8Ñ„8 8о8Ñ„- -о8Ñ…8 8о8Ñ…- -о8ц8 8о8ц- -о8ч8 8о8ч- -о8ш8 8о8ш- -о8щ8 8о8щ- -о8ÑŒ8 8о8ÑŒ- -о8ÑŽ8 8о8ÑŽ- -о8Ñ8 8о8Ñ- -о8'8 -о8’8 8о8'- 8о8’- -п8а8 8п8а- -п8б8 8п8б- -п8в8 8п8в- -п8г8 8п8г- -п8Ò‘8 8п8Ò‘- -п8д8 8п8д- -п8е8 8п8е- -п8Ñ”8 8п8Ñ”- -п8ж8 8п8ж- -п8з8 8п8з- -п8и8 8п8и- -п8Ñ–8 8п8Ñ–- -п8Ñ—8 8п8Ñ—- -п8й8 8п8й- -п8к8 8п8к- -п8л8 8п8л- -п8м8 8п8м- -п8н8 8п8н- -п8о8 8п8о- -п8п8 8п8п- -п8Ñ€8 8п8Ñ€- -п8Ñ8 8п8Ñ- -п8Ñ‚8 8п8Ñ‚- -п8у8 8п8у- -п8Ñ„8 8п8Ñ„- -п8Ñ…8 8п8Ñ…- -п8ц8 8п8ц- -п8ч8 8п8ч- -п8ш8 8п8ш- -п8щ8 8п8щ- -п8ÑŒ8 8п8ÑŒ- -п8ÑŽ8 8п8ÑŽ- -п8Ñ8 8п8Ñ- -п8'8 -п8’8 8п8'- 8п8’- -Ñ€8а8 8Ñ€8а- -Ñ€8б8 8Ñ€8б- -Ñ€8в8 8Ñ€8в- -Ñ€8г8 8Ñ€8г- -Ñ€8Ò‘8 8Ñ€8Ò‘- -Ñ€8д8 8Ñ€8д- -Ñ€8е8 8Ñ€8е- -Ñ€8Ñ”8 8Ñ€8Ñ”- -Ñ€8ж8 8Ñ€8ж- -Ñ€8з8 8Ñ€8з- -Ñ€8и8 8Ñ€8и- -Ñ€8Ñ–8 8Ñ€8Ñ–- -Ñ€8Ñ—8 8Ñ€8Ñ—- -Ñ€8й8 8Ñ€8й- -Ñ€8к8 8Ñ€8к- -Ñ€8л8 8Ñ€8л- -Ñ€8м8 8Ñ€8м- -Ñ€8н8 8Ñ€8н- -Ñ€8о8 8Ñ€8о- -Ñ€8п8 8Ñ€8п- -Ñ€8Ñ€8 8Ñ€8Ñ€- -Ñ€8Ñ8 8Ñ€8Ñ- -Ñ€8Ñ‚8 8Ñ€8Ñ‚- -Ñ€8у8 8Ñ€8у- -Ñ€8Ñ„8 8Ñ€8Ñ„- -Ñ€8Ñ…8 8Ñ€8Ñ…- -Ñ€8ц8 8Ñ€8ц- -Ñ€8ч8 8Ñ€8ч- -Ñ€8ш8 8Ñ€8ш- -Ñ€8щ8 8Ñ€8щ- -Ñ€8ÑŒ8 8Ñ€8ÑŒ- -Ñ€8ÑŽ8 8Ñ€8ÑŽ- -Ñ€8Ñ8 8Ñ€8Ñ- -Ñ€8'8 -Ñ€8’8 8Ñ€8'- 8Ñ€8’- -Ñ8а8 8Ñ8а- -Ñ8б8 8Ñ8б- -Ñ8в8 8Ñ8в- -Ñ8г8 8Ñ8г- -Ñ8Ò‘8 8Ñ8Ò‘- -Ñ8д8 8Ñ8д- -Ñ8е8 8Ñ8е- -Ñ8Ñ”8 8Ñ8Ñ”- -Ñ8ж8 8Ñ8ж- -Ñ8з8 8Ñ8з- -Ñ8и8 8Ñ8и- -Ñ8Ñ–8 8Ñ8Ñ–- -Ñ8Ñ—8 8Ñ8Ñ—- -Ñ8й8 8Ñ8й- -Ñ8к8 8Ñ8к- -Ñ8л8 8Ñ8л- -Ñ8м8 8Ñ8м- -Ñ8н8 8Ñ8н- -Ñ8о8 8Ñ8о- -Ñ8п8 8Ñ8п- -Ñ8Ñ€8 8Ñ8Ñ€- -Ñ8Ñ8 8Ñ8Ñ- -Ñ8Ñ‚8 8Ñ8Ñ‚- -Ñ8у8 8Ñ8у- -Ñ8Ñ„8 8Ñ8Ñ„- -Ñ8Ñ…8 8Ñ8Ñ…- -Ñ8ц8 8Ñ8ц- -Ñ8ч8 8Ñ8ч- -Ñ8ш8 8Ñ8ш- -Ñ8щ8 8Ñ8щ- -Ñ8ÑŒ8 8Ñ8ÑŒ- -Ñ8ÑŽ8 8Ñ8ÑŽ- -Ñ8Ñ8 8Ñ8Ñ- -Ñ8'8 -Ñ8’8 8Ñ8'- 8Ñ8’- -Ñ‚8а8 8Ñ‚8а- -Ñ‚8б8 8Ñ‚8б- -Ñ‚8в8 8Ñ‚8в- -Ñ‚8г8 8Ñ‚8г- -Ñ‚8Ò‘8 8Ñ‚8Ò‘- -Ñ‚8д8 8Ñ‚8д- -Ñ‚8е8 8Ñ‚8е- -Ñ‚8Ñ”8 8Ñ‚8Ñ”- -Ñ‚8ж8 8Ñ‚8ж- -Ñ‚8з8 8Ñ‚8з- -Ñ‚8и8 8Ñ‚8и- -Ñ‚8Ñ–8 8Ñ‚8Ñ–- -Ñ‚8Ñ—8 8Ñ‚8Ñ—- -Ñ‚8й8 8Ñ‚8й- -Ñ‚8к8 8Ñ‚8к- -Ñ‚8л8 8Ñ‚8л- -Ñ‚8м8 8Ñ‚8м- -Ñ‚8н8 8Ñ‚8н- -Ñ‚8о8 8Ñ‚8о- -Ñ‚8п8 8Ñ‚8п- -Ñ‚8Ñ€8 8Ñ‚8Ñ€- -Ñ‚8Ñ8 8Ñ‚8Ñ- -Ñ‚8Ñ‚8 8Ñ‚8Ñ‚- -Ñ‚8у8 8Ñ‚8у- -Ñ‚8Ñ„8 8Ñ‚8Ñ„- -Ñ‚8Ñ…8 8Ñ‚8Ñ…- -Ñ‚8ц8 8Ñ‚8ц- -Ñ‚8ч8 8Ñ‚8ч- -Ñ‚8ш8 8Ñ‚8ш- -Ñ‚8щ8 8Ñ‚8щ- -Ñ‚8ÑŒ8 8Ñ‚8ÑŒ- -Ñ‚8ÑŽ8 8Ñ‚8ÑŽ- -Ñ‚8Ñ8 8Ñ‚8Ñ- -Ñ‚8'8 -Ñ‚8’8 8Ñ‚8'- 8Ñ‚8’- -у8а8 8у8а- -у8б8 8у8б- -у8в8 8у8в- -у8г8 8у8г- -у8Ò‘8 8у8Ò‘- -у8д8 8у8д- -у8е8 8у8е- -у8Ñ”8 8у8Ñ”- -у8ж8 8у8ж- -у8з8 8у8з- -у8и8 8у8и- -у8Ñ–8 8у8Ñ–- -у8Ñ—8 8у8Ñ—- -у8й8 8у8й- -у8к8 8у8к- -у8л8 8у8л- -у8м8 8у8м- -у8н8 8у8н- -у8о8 8у8о- -у8п8 8у8п- -у8Ñ€8 8у8Ñ€- -у8Ñ8 8у8Ñ- -у8Ñ‚8 8у8Ñ‚- -у8у8 8у8у- -у8Ñ„8 8у8Ñ„- -у8Ñ…8 8у8Ñ…- -у8ц8 8у8ц- -у8ч8 8у8ч- -у8ш8 8у8ш- -у8щ8 8у8щ- -у8ÑŒ8 8у8ÑŒ- -у8ÑŽ8 8у8ÑŽ- -у8Ñ8 8у8Ñ- -у8'8 -у8’8 8у8'- 8у8’- -Ñ„8а8 8Ñ„8а- -Ñ„8б8 8Ñ„8б- -Ñ„8в8 8Ñ„8в- -Ñ„8г8 8Ñ„8г- -Ñ„8Ò‘8 8Ñ„8Ò‘- -Ñ„8д8 8Ñ„8д- -Ñ„8е8 8Ñ„8е- -Ñ„8Ñ”8 8Ñ„8Ñ”- -Ñ„8ж8 8Ñ„8ж- -Ñ„8з8 8Ñ„8з- -Ñ„8и8 8Ñ„8и- -Ñ„8Ñ–8 8Ñ„8Ñ–- -Ñ„8Ñ—8 8Ñ„8Ñ—- -Ñ„8й8 8Ñ„8й- -Ñ„8к8 8Ñ„8к- -Ñ„8л8 8Ñ„8л- -Ñ„8м8 8Ñ„8м- -Ñ„8н8 8Ñ„8н- -Ñ„8о8 8Ñ„8о- -Ñ„8п8 8Ñ„8п- -Ñ„8Ñ€8 8Ñ„8Ñ€- -Ñ„8Ñ8 8Ñ„8Ñ- -Ñ„8Ñ‚8 8Ñ„8Ñ‚- -Ñ„8у8 8Ñ„8у- -Ñ„8Ñ„8 8Ñ„8Ñ„- -Ñ„8Ñ…8 8Ñ„8Ñ…- -Ñ„8ц8 8Ñ„8ц- -Ñ„8ч8 8Ñ„8ч- -Ñ„8ш8 8Ñ„8ш- -Ñ„8щ8 8Ñ„8щ- -Ñ„8ÑŒ8 8Ñ„8ÑŒ- -Ñ„8ÑŽ8 8Ñ„8ÑŽ- -Ñ„8Ñ8 8Ñ„8Ñ- -Ñ„8'8 -Ñ„8’8 8Ñ„8'- 8Ñ„8’- -Ñ…8а8 8Ñ…8а- -Ñ…8б8 8Ñ…8б- -Ñ…8в8 8Ñ…8в- -Ñ…8г8 8Ñ…8г- -Ñ…8Ò‘8 8Ñ…8Ò‘- -Ñ…8д8 8Ñ…8д- -Ñ…8е8 8Ñ…8е- -Ñ…8Ñ”8 8Ñ…8Ñ”- -Ñ…8ж8 8Ñ…8ж- -Ñ…8з8 8Ñ…8з- -Ñ…8и8 8Ñ…8и- -Ñ…8Ñ–8 8Ñ…8Ñ–- -Ñ…8Ñ—8 8Ñ…8Ñ—- -Ñ…8й8 8Ñ…8й- -Ñ…8к8 8Ñ…8к- -Ñ…8л8 8Ñ…8л- -Ñ…8м8 8Ñ…8м- -Ñ…8н8 8Ñ…8н- -Ñ…8о8 8Ñ…8о- -Ñ…8п8 8Ñ…8п- -Ñ…8Ñ€8 8Ñ…8Ñ€- -Ñ…8Ñ8 8Ñ…8Ñ- -Ñ…8Ñ‚8 8Ñ…8Ñ‚- -Ñ…8у8 8Ñ…8у- -Ñ…8Ñ„8 8Ñ…8Ñ„- -Ñ…8Ñ…8 8Ñ…8Ñ…- -Ñ…8ц8 8Ñ…8ц- -Ñ…8ч8 8Ñ…8ч- -Ñ…8ш8 8Ñ…8ш- -Ñ…8щ8 8Ñ…8щ- -Ñ…8ÑŒ8 8Ñ…8ÑŒ- -Ñ…8ÑŽ8 8Ñ…8ÑŽ- -Ñ…8Ñ8 8Ñ…8Ñ- -Ñ…8'8 -Ñ…8’8 8Ñ…8'- 8Ñ…8’- -ц8а8 8ц8а- -ц8б8 8ц8б- -ц8в8 8ц8в- -ц8г8 8ц8г- -ц8Ò‘8 8ц8Ò‘- -ц8д8 8ц8д- -ц8е8 8ц8е- -ц8Ñ”8 8ц8Ñ”- -ц8ж8 8ц8ж- -ц8з8 8ц8з- -ц8и8 8ц8и- -ц8Ñ–8 8ц8Ñ–- -ц8Ñ—8 8ц8Ñ—- -ц8й8 8ц8й- -ц8к8 8ц8к- -ц8л8 8ц8л- -ц8м8 8ц8м- -ц8н8 8ц8н- -ц8о8 8ц8о- -ц8п8 8ц8п- -ц8Ñ€8 8ц8Ñ€- -ц8Ñ8 8ц8Ñ- -ц8Ñ‚8 8ц8Ñ‚- -ц8у8 8ц8у- -ц8Ñ„8 8ц8Ñ„- -ц8Ñ…8 8ц8Ñ…- -ц8ц8 8ц8ц- -ц8ч8 8ц8ч- -ц8ш8 8ц8ш- -ц8щ8 8ц8щ- -ц8ÑŒ8 8ц8ÑŒ- -ц8ÑŽ8 8ц8ÑŽ- -ц8Ñ8 8ц8Ñ- -ц8'8 -ц8’8 8ц8'- 8ц8’- -ч8а8 8ч8а- -ч8б8 8ч8б- -ч8в8 8ч8в- -ч8г8 8ч8г- -ч8Ò‘8 8ч8Ò‘- -ч8д8 8ч8д- -ч8е8 8ч8е- -ч8Ñ”8 8ч8Ñ”- -ч8ж8 8ч8ж- -ч8з8 8ч8з- -ч8и8 8ч8и- -ч8Ñ–8 8ч8Ñ–- -ч8Ñ—8 8ч8Ñ—- -ч8й8 8ч8й- -ч8к8 8ч8к- -ч8л8 8ч8л- -ч8м8 8ч8м- -ч8н8 8ч8н- -ч8о8 8ч8о- -ч8п8 8ч8п- -ч8Ñ€8 8ч8Ñ€- -ч8Ñ8 8ч8Ñ- -ч8Ñ‚8 8ч8Ñ‚- -ч8у8 8ч8у- -ч8Ñ„8 8ч8Ñ„- -ч8Ñ…8 8ч8Ñ…- -ч8ц8 8ч8ц- -ч8ч8 8ч8ч- -ч8ш8 8ч8ш- -ч8щ8 8ч8щ- -ч8ÑŒ8 8ч8ÑŒ- -ч8ÑŽ8 8ч8ÑŽ- -ч8Ñ8 8ч8Ñ- -ч8'8 -ч8’8 8ч8'- 8ч8’- -ш8а8 8ш8а- -ш8б8 8ш8б- -ш8в8 8ш8в- -ш8г8 8ш8г- -ш8Ò‘8 8ш8Ò‘- -ш8д8 8ш8д- -ш8е8 8ш8е- -ш8Ñ”8 8ш8Ñ”- -ш8ж8 8ш8ж- -ш8з8 8ш8з- -ш8и8 8ш8и- -ш8Ñ–8 8ш8Ñ–- -ш8Ñ—8 8ш8Ñ—- -ш8й8 8ш8й- -ш8к8 8ш8к- -ш8л8 8ш8л- -ш8м8 8ш8м- -ш8н8 8ш8н- -ш8о8 8ш8о- -ш8п8 8ш8п- -ш8Ñ€8 8ш8Ñ€- -ш8Ñ8 8ш8Ñ- -ш8Ñ‚8 8ш8Ñ‚- -ш8у8 8ш8у- -ш8Ñ„8 8ш8Ñ„- -ш8Ñ…8 8ш8Ñ…- -ш8ц8 8ш8ц- -ш8ч8 8ш8ч- -ш8ш8 8ш8ш- -ш8щ8 8ш8щ- -ш8ÑŒ8 8ш8ÑŒ- -ш8ÑŽ8 8ш8ÑŽ- -ш8Ñ8 8ш8Ñ- -ш8'8 -ш8’8 8ш8'- 8ш8’- -щ8а8 8щ8а- -щ8б8 8щ8б- -щ8в8 8щ8в- -щ8г8 8щ8г- -щ8Ò‘8 8щ8Ò‘- -щ8д8 8щ8д- -щ8е8 8щ8е- -щ8Ñ”8 8щ8Ñ”- -щ8ж8 8щ8ж- -щ8з8 8щ8з- -щ8и8 8щ8и- -щ8Ñ–8 8щ8Ñ–- -щ8Ñ—8 8щ8Ñ—- -щ8й8 8щ8й- -щ8к8 8щ8к- -щ8л8 8щ8л- -щ8м8 8щ8м- -щ8н8 8щ8н- -щ8о8 8щ8о- -щ8п8 8щ8п- -щ8Ñ€8 8щ8Ñ€- -щ8Ñ8 8щ8Ñ- -щ8Ñ‚8 8щ8Ñ‚- -щ8у8 8щ8у- -щ8Ñ„8 8щ8Ñ„- -щ8Ñ…8 8щ8Ñ…- -щ8ц8 8щ8ц- -щ8ч8 8щ8ч- -щ8ш8 8щ8ш- -щ8щ8 8щ8щ- -щ8ÑŒ8 8щ8ÑŒ- -щ8ÑŽ8 8щ8ÑŽ- -щ8Ñ8 8щ8Ñ- -щ8'8 -щ8’8 8щ8'- 8щ8’- -ÑŒ8а8 8ÑŒ8а- -ÑŒ8б8 8ÑŒ8б- -ÑŒ8в8 8ÑŒ8в- -ÑŒ8г8 8ÑŒ8г- -ÑŒ8Ò‘8 8ÑŒ8Ò‘- -ÑŒ8д8 8ÑŒ8д- -ÑŒ8е8 8ÑŒ8е- -ÑŒ8Ñ”8 8ÑŒ8Ñ”- -ÑŒ8ж8 8ÑŒ8ж- -ÑŒ8з8 8ÑŒ8з- -ÑŒ8и8 8ÑŒ8и- -ÑŒ8Ñ–8 8ÑŒ8Ñ–- -ÑŒ8Ñ—8 8ÑŒ8Ñ—- -ÑŒ8й8 8ÑŒ8й- -ÑŒ8к8 8ÑŒ8к- -ÑŒ8л8 8ÑŒ8л- -ÑŒ8м8 8ÑŒ8м- -ÑŒ8н8 8ÑŒ8н- -ÑŒ8о8 8ÑŒ8о- -ÑŒ8п8 8ÑŒ8п- -ÑŒ8Ñ€8 8ÑŒ8Ñ€- -ÑŒ8Ñ8 8ÑŒ8Ñ- -ÑŒ8Ñ‚8 8ÑŒ8Ñ‚- -ÑŒ8у8 8ÑŒ8у- -ÑŒ8Ñ„8 8ÑŒ8Ñ„- -ÑŒ8Ñ…8 8ÑŒ8Ñ…- -ÑŒ8ц8 8ÑŒ8ц- -ÑŒ8ч8 8ÑŒ8ч- -ÑŒ8ш8 8ÑŒ8ш- -ÑŒ8щ8 8ÑŒ8щ- -ÑŒ8ÑŒ8 8ÑŒ8ÑŒ- -ÑŒ8ÑŽ8 8ÑŒ8ÑŽ- -ÑŒ8Ñ8 8ÑŒ8Ñ- -ÑŒ8'8 -ÑŒ8’8 8ÑŒ8'- 8ÑŒ8’- -ÑŽ8а8 8ÑŽ8а- -ÑŽ8б8 8ÑŽ8б- -ÑŽ8в8 8ÑŽ8в- -ÑŽ8г8 8ÑŽ8г- -ÑŽ8Ò‘8 8ÑŽ8Ò‘- -ÑŽ8д8 8ÑŽ8д- -ÑŽ8е8 8ÑŽ8е- -ÑŽ8Ñ”8 8ÑŽ8Ñ”- -ÑŽ8ж8 8ÑŽ8ж- -ÑŽ8з8 8ÑŽ8з- -ÑŽ8и8 8ÑŽ8и- -ÑŽ8Ñ–8 8ÑŽ8Ñ–- -ÑŽ8Ñ—8 8ÑŽ8Ñ—- -ÑŽ8й8 8ÑŽ8й- -ÑŽ8к8 8ÑŽ8к- -ÑŽ8л8 8ÑŽ8л- -ÑŽ8м8 8ÑŽ8м- -ÑŽ8н8 8ÑŽ8н- -ÑŽ8о8 8ÑŽ8о- -ÑŽ8п8 8ÑŽ8п- -ÑŽ8Ñ€8 8ÑŽ8Ñ€- -ÑŽ8Ñ8 8ÑŽ8Ñ- -ÑŽ8Ñ‚8 8ÑŽ8Ñ‚- -ÑŽ8у8 8ÑŽ8у- -ÑŽ8Ñ„8 8ÑŽ8Ñ„- -ÑŽ8Ñ…8 8ÑŽ8Ñ…- -ÑŽ8ц8 8ÑŽ8ц- -ÑŽ8ч8 8ÑŽ8ч- -ÑŽ8ш8 8ÑŽ8ш- -ÑŽ8щ8 8ÑŽ8щ- -ÑŽ8ÑŒ8 8ÑŽ8ÑŒ- -ÑŽ8ÑŽ8 8ÑŽ8ÑŽ- -ÑŽ8Ñ8 8ÑŽ8Ñ- -ÑŽ8'8 -ÑŽ8’8 8ÑŽ8'- 8ÑŽ8’- -Ñ8а8 8Ñ8а- -Ñ8б8 8Ñ8б- -Ñ8в8 8Ñ8в- -Ñ8г8 8Ñ8г- -Ñ8Ò‘8 8Ñ8Ò‘- -Ñ8д8 8Ñ8д- -Ñ8е8 8Ñ8е- -Ñ8Ñ”8 8Ñ8Ñ”- -Ñ8ж8 8Ñ8ж- -Ñ8з8 8Ñ8з- -Ñ8и8 8Ñ8и- -Ñ8Ñ–8 8Ñ8Ñ–- -Ñ8Ñ—8 8Ñ8Ñ—- -Ñ8й8 8Ñ8й- -Ñ8к8 8Ñ8к- -Ñ8л8 8Ñ8л- -Ñ8м8 8Ñ8м- -Ñ8н8 8Ñ8н- -Ñ8о8 8Ñ8о- -Ñ8п8 8Ñ8п- -Ñ8Ñ€8 8Ñ8Ñ€- -Ñ8Ñ8 8Ñ8Ñ- -Ñ8Ñ‚8 8Ñ8Ñ‚- -Ñ8у8 8Ñ8у- -Ñ8Ñ„8 8Ñ8Ñ„- -Ñ8Ñ…8 8Ñ8Ñ…- -Ñ8ц8 8Ñ8ц- -Ñ8ч8 8Ñ8ч- -Ñ8ш8 8Ñ8ш- -Ñ8щ8 8Ñ8щ- -Ñ8ÑŒ8 8Ñ8ÑŒ- -Ñ8ÑŽ8 8Ñ8ÑŽ- -Ñ8Ñ8 8Ñ8Ñ- -Ñ8'8 -Ñ8’8 8Ñ8'- 8Ñ8’- -'8а8 -’8а8 8'8а- 8’8а- -'8б8 -’8б8 8'8б- 8’8б- -'8в8 -’8в8 8'8в- 8’8в- -'8г8 -’8г8 8'8г- 8’8г- -'8Ò‘8 -’8Ò‘8 8'8Ò‘- 8’8Ò‘- -'8д8 -’8д8 8'8д- 8’8д- -'8е8 -’8е8 8'8е- 8’8е- -'8Ñ”8 -’8Ñ”8 8'8Ñ”- 8’8Ñ”- -'8ж8 -’8ж8 8'8ж- 8’8ж- -'8з8 -’8з8 8'8з- 8’8з- -'8и8 -’8и8 8'8и- 8’8и- -'8Ñ–8 -’8Ñ–8 8'8Ñ–- 8’8Ñ–- -'8Ñ—8 -’8Ñ—8 8'8Ñ—- 8’8Ñ—- -'8й8 -’8й8 8'8й- 8’8й- -'8к8 -’8к8 8'8к- 8’8к- -'8л8 -’8л8 8'8л- 8’8л- -'8м8 -’8м8 8'8м- 8’8м- -'8н8 -’8н8 8'8н- 8’8н- -'8о8 -’8о8 8'8о- 8’8о- -'8п8 -’8п8 8'8п- 8’8п- -'8Ñ€8 -’8Ñ€8 8'8Ñ€- 8’8Ñ€- -'8Ñ8 -’8Ñ8 8'8Ñ- 8’8Ñ- -'8Ñ‚8 -’8Ñ‚8 8'8Ñ‚- 8’8Ñ‚- -'8у8 -’8у8 8'8у- 8’8у- -'8Ñ„8 -’8Ñ„8 8'8Ñ„- 8’8Ñ„- -'8Ñ…8 -’8Ñ…8 8'8Ñ…- 8’8Ñ…- -'8ц8 -’8ц8 8'8ц- 8’8ц- -'8ч8 -’8ч8 8'8ч- 8’8ч- -'8ш8 -’8ш8 8'8ш- 8’8ш- -'8щ8 -’8щ8 8'8щ- 8’8щ- -'8ÑŒ8 -’8ÑŒ8 8'8ÑŒ- 8’8ÑŒ- -'8ÑŽ8 -’8ÑŽ8 8'8ÑŽ- 8’8ÑŽ- -'8Ñ8 -’8Ñ8 8'8Ñ- 8’8Ñ- -'8'8 -’8’8 8'8'- 8’8’-", + ["compression"]="zlib", + ["data"]="xÚm¼YŽä8Òv½•¸‹«* ;\"«ky®É%\7\28\18\26H¸ãßE¸æY[Èè%|+ùE³CŠÊz„Ó\30R\28ud\28¤È\127ÿúú×Û¯¯__òÛ˜ßï‡Ø«Ø‘üæò[ÈïOù}¾ýûW³gm$k#Y\27ÉÚHÖF²6’µ‘¬dm4k¿gí%k/Y{ÉÚKÖ^²ö’µ—¬½dí%ë÷ã_oß\15“Õü6òÛËïj~÷¬æ7—ßB~\127ʯ©uÝk]¥ÖUj]¥ÖUj]¥ÖUj]¥ÖUj]µÖh¯5’Z#©52Y½Š\29Éo.¿…üþ”_“5ß³æ’5—¬¹dÍ%k.YsÉšKÖ\\²æšµØ³\22’µ¬…d*$S!™\ +ÉTH¦B3ýÜ3ý”K~Ê%?%ñ§&>÷ħ”õ”²žæ’\127ÿzýëרÁ&Áw A¨A¬A¢ÁUƒTƒLƒÛ\30”ÿúõÒ Ò Ö Õ Ó`Ô`Ò`Ö`Ñ`“àû¢A A¨A¬A¢ÁUƒTƒL\3\0261˜àýÃüþ¿ËÿgÂJ«­´†J‹®´èJ‹®´ÌJˬ´ÌZóÕš¯Ö|µæ«5_­Mª5{­Ùk›]šÔj)­–Òj)­–Òj)­–Òj)­–Òj)fï4{§Ù;ÍÞiöN³wš½ÓìÍ.\24õæŒzsF½9£Þ•IÓö Ô ’à\127ÿUUkÐj@†QƒYƒEƒM\2¹\127“¶sÒvNÚÎIÛ9i;'m笵ÏZû¬µÏZí¬ÕÎZí¬ÕÎJϬÕÎZí¬ÕÎZí¬ÕÎZí¬ÕÎZíl«•á™uxfáe†—E›´h“\22mÒ¢MZ´I‹6iÑ&-Ú¤E\7dÑ&-Ú¤E›´h“\22mÒ¢MZ´I‹6iÑ&-Ú¤Eš´Ð¤M›´i#6©ýû\"‘&(5¨$07ΨZƒV\0032Œ\26L\26Ì\26,\26l\18˜æš Ô Ö ÑàªAªA¦ÁM³\15&0­þ¾h«¿\3m` -\11´I¡F†\26\25ºÈV\3ÓÎX/‰õ’X#\19U‰f¸ê%W¼ºHseªi©F¦Zt¦‘™ÞÍAÇsÐ[<¸ÈZƒVƒNƒQƒIƒYƒEƒM\2¹ÓƒÞéAïô wzÐ;=è\30ôN\15z§\7½Óõ÷\29G³\27êcŒ\17ZC\30j{Mg¯éì5“IzY£´FeÚ\26­5:kŒÖ˜­±XcÃЧÙV:ù•ê3mŒÄ\26Wk¤ÖȬq³%\27ÿ¼Ø6/¶©‹mØb\27&Æd\25C\31&ÛŒÅÖ¾ØÚ\23[ûbk_¨ôûBÉb\8É\1ÍPC8´\13\27÷šd\16Œ!u‰q—¦m6q³‰›K4˜o‡±aÈS%FhÄ\26©\24^\1ßáal\24\20`Œï»5CkÄÖ¸ª‘i’ôXË\18S\11Q3”çmïÈh\13Ã%Ñ ÝÌĈíU±½*>®Šm­‰MLŽÄ«»Ú²2ÛËÌõɬ@XB”:_ïdüï¿:\19ÕJr«\28w\ +Ëèf\"¦\4üï¢N\18‡va|µ+4;Ö\6&Ú¦«8\ +¹\19™úµ›\";\24v>~-;FÅÛ‡\25r\23æ{ü´3]\16þ$4ñõþèî«Èúso­ùíöuó^ؾ\4–ßÞü~?Ä^ÅŽä7—ßB~\127ÊïóßRÿð—Y¯ÝÍï_ïo\127íþõí¯}ÈþüõzÿÛüî\17&,E•¨ZTêDu¨YÔŒZD-¨MÔ¦jwéò‹\ +E…¨XTŒÊDe”òúUÿe‚ï‹\9Ê_/\27¼[c¿PÍÒÆ•G\\¥A­A«Ag\3\27?j0i0ÛàÝ\26®¸Í\6ïÖð’¾\19±¾\3\27|‡j\17d\127iEµ¶£ÖvÔZm­Ù:í`g;Ó\29é´36Ú\4Z@§\5t¶ÁÝÑàŽB÷@«î\\«ºïTƒÌ\6\\ñ¿ÿš`ÐøQË\30µã£\0227jGG½dÚóiüdƒ»\9fUówl‚EûµémÜ4m“‚vGjZd\\ˆ\13¤ÖÝ\"Ølðn\13ퟘÇEZZ@iV%\\\17jñ¡´`\15\18\13L[¿c½$Ñઑ©^’i¾LÓ2mLf*ùk‡³üS‚NƒAƒI‚¿ÌHhÄ\"Á÷Eƒ@“MK­µß\20ŠRÁ¥\"vׇ¸jy;ô\26”\26Ô\18ì0k.±¸°Õ #¾³ñƒ\6£\6\19É“Mž5X4Ø$ pm}é·\25AfÓ¯û\17MëKíkùdûòjip3A¥åV¿*\13j\13¤™•kf¥Í¬´™•¶¯ÒöU~û*פJ[Që¸Ô:.µ\27—Z«¨µàZ\11fTk¿ÄÚïZM¡”±Ç»úZ½S­æjÝE­»¢Ó¤N»ÚiWc\"Þ5£s™;­¶ÓÆvÚØÎ]µp•6»;j\"0í~9KŠ\0264bÐ:\7mË m\25t\6®ü˸\4˜ˆàv\12Ú=bÖ{0h“\6Ó$\"„¤A[6\16鞃Á\31áÁ\127\14\6׉Á\31øÁå\11)ë;Ö ÑàªAª\0166jñ£–@_F׉Q+\28µ¼Ñµmt-\24]Dh¯M4ò'\29ÊIÇpÒ1œ´¶Ik›\\m“\14Ùä†lÒ!›t &\"µÆi÷oÚx›ÛÝI±JgUΪ­eà\9\\tç¬ÑY³³\22gmÖ:òʈ\28u¹aqâ(Ò5ûîÆJ¢cg%κ:+uVæ,q\ +³öuö=ëì¼ê¬½žõÉžµ\21³\14ø|\\¯Ýœµ³vp¦\4íäìS8»þÍ>x³vmÖ®ÌÚY\27¼hÎEÛ³8¿µø®kqþjq.kÑ»µ8÷´8\15%Öf-2r\17WhÃ\22‡îâH]´Ï‹öy!·½ÊL·Öâz\127\0\22ÿ1\\ü1pBç’Åe\14]ú‘-ô°YÜT±èø-:~‹B°¸©aQ\22\22ÆÕŽœ…~q¨/~£\14$7\29ÐMÇ\127ÓÆlÚñÍïävdÑ^l®í›6q“¶}_¤æ=(5¨4¨%°>[­ÖZRð~‘FÈÝ2ñê¯Å²WŒDè\29T+Ô¤‰ˆ‰k½çxO5°\5..¿»©\26M=›\4Ò¿=àÚý‹³BkÙ!²ÂVÏx\17}§é.ß‘)t¥Ú¼2 {hp%ùj“S\0132\13n$ߎ*5\8ì#!–ë–ÒlVŽ$1b\14R ƒ\20p±vžþ\30\21kpÖl-í‹, fØŽ\5®\15‹PR½!\ +¬§µU…ved¬\14K\27\31ºÆӄк*Û\18õäáò\ +ôª\11)PG9Ô±¥½¡÷¨ìë_©!Örc¿ÀXˈ]ÆXï`¬J§¤=˜5ñM´œD³&z3¯®HÆéêÚ™ê8¥ZHª…¤š;Ó<™\27L¯ÊôªÌx¶I-Û›Œœ®BKÙM6û.úýÙrÄh\5Ö~#¢O5\127üê>ÍËÚ¦üøµ~îü¬¿^6f¿Ï_µËû±³¸JaûÒ¹ÿ~~ÊÞïñÃìß×_óIí™\15ý\29ýZÞŒØFôùzW~yvó}ñò<Ô¦\1’ùÝYtn6éíÞžs{÷úYïÆ÷ÏCw¿¾>Í\6ÃêeÏ©âý¤¤\6Oïb“þ\127Ç\31.Ê\21§>\21^ŸŠSŸ¸î;ÿüÎäF¸‹–SvãïüF\29Ú^!\8ª¶ëŒy²Žî ŽT¿CÛ¹CæS\1M\17Ë5y‡Í\13Ú|êâìuqöJ™])îúÒ]]z±‹éÕüîÙ§Á½Áõò|yyü\20}0¼Q™[¬žÌ¦\30ú\4ÁìA0ÿ6¢óiDçÓˆzcqh;\307\"ÁiLÜã’Ÿ\30—ÜC+?¡•{hå'´r§üÄSî\1žŸ\0ϽG%?=*æÓ\20³å2£äÒý\24¿ßÅç>£í>hܧ»H¼ù>»È\"ÇÚÆ¿ÖžÞdmòuŽñ•L\15§ü»ßݧ¶Ý+îY+-Ñú¤ýZìýA1׌ûlRî—šv}íW‹\26UÕª6£ú}îDí…|™\19IÑß\23³xª½+¾C39Eû\\î«tW©Ç´S^€8%¯W\14õSUùùþ]üê¬ØG\0179ì\30aoÑnÍÒÒEfKì}â:ì‡Î”Á§\25nÓ:1]ßœj<»7ívjµ¶\12ÜfÓ6-£ôÕ9ÍôW•Ž–§ö\26ÌØyº8òÊèÌÔ»Ý>Îë~·PRVKÛÑ߉§Ì*ÌW­§dÔ=5žÔì«ïø¤Ò“ÊOª8©Ÿ'õ<Ô¾þ\26é塯ž~ø­ÛÕä+iAhﶌޞpÿÜGêKF\7µ\19\"wÝ€}Œ¼y\11`Æ\13«÷¬ÎZòÒTíõ°öí‘aè%%-ÖÚï_ÝÈšUíÞœ\6[{¿ÆåØûÊ“ô:\30£rïG­}1²Þ/Ÿ¬¹š×\4¾XmZk\ + äòô\26æ\2/^ÞÜ\"V÷ìšÍ½È¸‘Ë>b5Ïñ^Vç?ƆŠv\28T\6ß¹½4ÔûcŽÓh@%\25_ÎZ°V\27·Ag­}BW«°yÍ}\19k¡oÆê©c1\14P-ãGdŒ+woÕª58kvÖb-îœÚ±³2gåÎú‰µßagí›elú¥cä¬ÎYóѦ½çåÑ\6³±”\27£ö†õ°}ÙY\15%íú”IR—ýfû)%Õ†%\29\0Ìé0\11¤,ÌŸò\14ÛlÔ*¬•÷Ú%N·–UÛ`-ó`a¥ò\24Ôê~°Äe\24kïFë¬\17«®ÕÒµ—³öaÙíV\6¤\20k6\29–amµóbwÚ‘J.G\24Ô\22¹\25&ÂŒª3¥›”Ú:kÁjäF¨5Y‹Ç¶“c@ë\20;uÖ\13k•é+SE\15Ų×rËö¦Tf&T³fð;û\8·NÈØÑøÚ\0128Ïmg\6\3k`x%ŽÊ\23\6M­ÁY£³&g-Ö¢›ÆJlëD¥ÎÊõ\19«g\24\23sËŒ¥t›=´ªÝ\21”X\15Ïšœ%õ\127ššp0|rëÅhÌ\1¸\21:Ðb®JKûŽþt\22`ˆx8ñ1”󧸉VL\15¤]Üp²›ž\21•‡ic•%³KLõ\22\"lº–èÝêÁM£bnÎdÊVÓÍ \"­iÆË\21¹‹)o7ÜêÄøëÑ-\14d\29>[Ñ»E„\14Ü‘gÕiÂI9ïol“6™ Õ8:µ)f2åîõ¿¼\25Ø-_Ĭ\15³;Ìƈð2\30±n\17cLÂ\21áa¦‡i\23\18\ +\16¦qÂnIcäÞÊÊÊÞ7ÝEfbŸ\14\17šõ¡\ +óP¾\01418S`_<©<[\25¸Å™ŒìQž[\24™U‘Ë¡N^ÍÂ5Q½¢5‡Ã´H\24a‡,v7o\127†{ÛÄ-i¿\19î¾Y!1艴c72E³>Ì\9ÓdòÅaJm\25$`ª¿ûß\127½)Ì\28K2`½¨ÌÓæ„›?¿ôØcÔÇÊFôr¯J•fs±i£Ÿ2Ê(ª{\19\25é\\~Èà\16WCŒ/u‘÷%ë|óžH¦²/]:†ú¤\26)þs\31…ÎIÁbV¹)µ¶\\ó\\î‹\22³€U©ÏØj›¸\9ž©\8©3R·g#V­]eàÖ™\"Ü3od(­F$ÚÄ]4ÜIÙß9¹ÐÞFújfOn\5\17f\28/‡œÝ­jÄûÒˆF\\¬¹w*\22\0296\17ÒË/¬>\"\26\ +‘4ãîÍÜû÷Oó¹\4ÊìÞ|mRµ“Gê¡']e¯\30C…{>E*ù˜r?V\22ÓâÿÐÇN\14\23em¾ª\24¥ŽY…úÖ/Û‚ÍnèD\8‹öBG\27\"“Ý'â†0/\"ƒÞ\11¹Ú\0083ëiSyg¡æÞº–ÞÈ#=Ñ>#¼žŠLÕLÅ«-‡¸éŽ§Ô\13¼5[5+üwdúÏ\ +E6þn\27\28)>3SP$ccü–x’]Nû|ÿ\18fˆ˜Ù&GÊ\ +¹8(¢Ìó†B#XºÓ\ +Y|…DèvÀ™¸·èSOŸevŠ,‹—j¾¨õÓ#oîŽ\12”v\11m„®Ý¥\17…ø$]o\24Ñí-\29Õ4>0\16S\22\127˜0££ùS^E,XLÆ?e^Ö­ä.d0Œo\127˜ÂWq\16›Ê}DMÃ¥Y\15oí£RÎŽt±«2\22\22f+\31æQeê|˜v-,ažf‡XË\127š:k1\24ã§zÒ™ò¶Å¸Ý秼PQs'Ö™©ž÷›\11ê·w\19”oæa\21Ctgu§º¶šë[«[Õ“Õ“êÙêYôÞeÕ»!:±:y3m7ýÿ\18Ë\28/˜Zv»Ñ9\127zÓÇÞx‚\31òàè¢×^g¶á&¿ùC8³v\12>ð4Õ›€9Ú5…\\ò\18oü!m£'ø\26!NU\28’}+ù«ùSßç™BåÐmzÓ¤Q›ñ2çë²Ö\18_w\8³Î7ëKs«M³ÍûM\19½»“êCܘɭ{XéÄKÛ·ßâ\15öï!¯\18\127(Gä0Žáç‡<íÓIêå*;ñÖžÔ=‹‹À'\28\17Ls^„žŠ½á•û½û?vi^\13\29ôó$;®‹¼R\\äM¬,äk1õøÕIYÕ‡§\20‘ÆÏÿG·)oÇëW^ÒªÛæ¿ÝÕ\\?trЭë?¢:SÕ*\13\31%¿\28f®?ì²p<¢t³i\30R=‘ù¾ï—|˜1—\7ù\20ù%ÞT–múš@Ïsô0ÛTº¯“äO\19>ä¨`‘\8é@%­Ù\29É\15Ž\20ÌŠe\31\31ú\14MjÛGX\6ÃL_êÃöç뇮\23ÖßãÌ'”~ÌËœãÈ7ì.ª”\29\23QËVÏ‹è¤GÆçz‘ãï•\25W\17›Ûå¢d\1øÓÐm£>\20W/BÜd'O¢_<þÑc$i¹yü“Ý\21ü8zÜ\26\28dÅd\30­Zü°\127n`¢ä\1®d-7\31\17²*5\15Ê\17±š\15ÊÎ\17‹vŒ\8&ºsÄ?®0ìû\17ý©ZÞ>Ÿ\"Ì\6ÅEèY¤“º\8íýZ„1¿s›Ð˜ú\17æ|û,õõ£0¯\23z/B¸ÛüˆÈ«ÒºZ'9u>E¬ÞàíDDæ›8\25\11óÚDŸhL\25C™Óô¥¶|„¿ê\4ãÌÉš?ì͵N\29k¶–™&ˆk]\\ Í_õM‚úD'eùâŽe*󌕺Ú\25mX`\24\14\27]¹•ò7ÑvUL)ÇŠõÿN’}ªYËŸ\7‡¹9Ó´Ñš×7óýǧy{dw!{Ä&sý\15N\26L\4\30W#ô;‘íc_U}i—;;s[«uVç¬ÉY³µÌ v§M…îÔÌ™Žñeº\5–·ñV©g¶7´ó]w'‹m㶱\26gõÖ2Ðc™?\27—™J[ÞK)ò\23áÇÝOQ£lÅü\8`°\17‹+š\8\29ƒ«\23¥\5û\17ñ\14Å(tê‡8°irº\19q}{z|)ª0›Œx\0079´]\25HfA_Øeï\17\17Ë{¶£J[zonñU÷‹Z›\30IjyÒ&åSv5\\1ëþhˆO\3†‚íКM“¾ø•—Ú‹JsØ÷Ã\"æ´¸\11y¿5¼éîê‡|Ý+\16Û\8·&B_&jèþ”Ú“¦UÊzÒÉJ\30.'e\31ì$¯Çì\7‘?8!=¾Š´1²ì²¦\30 p¥™®ÜnUOžÜ'™f\11g?ª´¯¥\8õif#d\31«=·\0283Œæ6\26Õ¸x³„½zÚ~B,Gñ~Ä,¬¸\8s×¼ä-\17\22$3v—sT'\19Í)JŸ…ߢÌNðuŽTP¼(Ù½ÌæsÔ€\24÷ÊÌ~f³Ï\16æõ¯Yf™w»Õ›®\8Íc\26iãC]LVúÄ™=Ÿz\6³¸\11Ì·Òf\8öü\31–š«¹'ú¥©\28¥šM|ír±oL|Yy©fµg†H¥,ù˜ÿÍmä”Eï@*+ÔE\31\25u=oòæcÑ/f>¼Å¤®Ðñœ¥üoLŸòÍ™üwPŸ|2v˜²vð¤5õ\3H+Ìe*t’ÐyLS\5¤ù(ÄÜf6ºwkÈç\\jîÃ]¸\5¥XûjŒ\28ÍüÔ\19\24§ôíµY@ˆÖýΡY¾÷~œìÀöqKÐòYÅE×¾•¼ËÑC?{>höÐjÍx\24ó‹ã³W[G.¶qµ\29¶y\29G>YvÎ\28íHé_¼µ¡.»A1\11œBWBnV×Ýÿ—®K\31Ö6+Rv£\9»ª¿ô‹Â¯\31|\5°·Kœmœ^«Çè2\23Ë\9\18-ú’—+Z¯™\127ÕÒû%¯\24ôSƒ\15O6ÎÖÕ\0¶~\11¦J]ãûYê\13w\17úÇ\28æAúÓÓ?øpç|ž\28;Eôg¹ž¤ù\127²|Yœe~–\15‘\127ñ·${Ñ\127Ò\\•_'՜վ×ñ5ƒ¥\31g² 7OøË< r»®ºZ]=»Ö«GgÍ,¸_âYÕ®ôµÙ[5ú\26kå@ZŽ»\23n¦††N]ì‹šõ\"/fÔOÅ<-«\"î®” .§Ö½\0281|Hjk8¶\\\18£/ºPæY ¾ïüó F7VÂ~~h\8\23Ϲ/÷#]™è’u•¯øø«”•õµyp´£›¼ •·_!_eÉß_H\127ÿ|Ã1\5òñŠY¾ï·›}£{‚VÝ¢ØýuãÅŠ}ÓUÑ—g¯ÇÛ6‡¾úå\12úå§LÞdéM±1:œ(q#±ý;\6=)Z훬å\13@X\15š\17‘·œXŠß–ƒû‹þ—wNt¾(}á–·ç¼\28Éèî¶?KÙCüç\28uºÂ\28íŸóËa•Ý\\¬'iþû>?õëœZœS›³\\Xöc†óE:Nç(»ú=ZÞiüÖ V\20_‚rvD”úE˜¾NºØ\13„.œ\12;_n}òeÖÁG„¬M\12ÇGD#kůsÔù\26õ\17V\ +\30l‘½86€^Œ¼æ>b6Ýk~¹\5»\127½÷Jõˆ‹õmØ\17‘è³dåU¿\28ø?nGÈí8Å4'µž”|×\30~°Í˜¼ÇL0í\7 z* Vã¬J¾›jXK®z§Ô…XõuzïÓœÔzR|!úqŠTÐü(yFi€\127åo·Tß™Éa¤,Î~‹žä$ó\28íÞù¢¥+æ\29Ó?â›Oÿ…”¯øë??J7zÁ)’]ð?ÛQ㡽(Ù«Rßo\9¥~ÂcŽ¥/¿%­²ù3Ýÿ½\ +y§.þÜ‹t;ÐëïÑŸò5’\31kÎ{¥Üà÷„D\18NQ\25\127½öÛ•™y\4ä\\É‹üù\127Ý°^ÿ\26óK\23\15…¯\20Q§š“Ò¿÷ôcdaöqŠ’§é\28e\29Ô?®c.8_k}Ü©‘\30¤þÕ_r†Æ'ZÚZ§V_\0257å¥é_Öž¢ä\29|̇K{à«õ¤x†¼(¿y¿ç“=\3}'à©}Øý´æ¤t ý\24ÏKœ²y\127\"sú{\25óÎAþs0Ù\30¶|¬‹Z¼\0209÷tÊÞ\4§õœ<}Ñ\26˜ÏÕâŠY^o»tj™uÿe¯•?ƒV{•FÛÒ.ÎvmšO-šßÜ\31U›ïî=¥\31”êŸ=È#¿¹?È}¹GñKÞ¿û1úV³:Å­ò\21Ì)'Ÿ\9Vç8Yãú1\15ý\0N^q®¿}–§ßhü\22):]%û©ß#»ß#æß#\22\0301ÿ\26÷|z‘8¦ß›!‡¸ç+W\29c\22\11‡Ýx6ë\20\23¡+H/B\14\2y­ç• 'ò½>¨‡ýåÙ/|\"\31($§\24ûÁÛ)rÓ?Ž=bìÑ \23cŽ$ô\16þKÞéé9¦®VÝÎÕÀf¯‘Ï5^²_`×üçñ\"Dþ«¶ÞÙµ=DÖyñöo4å\\áË+J\14<œ˜uObe{’{kN©«×\ +ùû\24O\28mҿ³â8ÿ6ÃÊGç²*ží_3Ñ»ùÓþ'\1ü­£jó8—¾:lïªÅ‹?þ\14Óêõ°ùKKQPdU$†ùënLùcmÌ踠±ŸÂð…Ýbþ\ +Ø­yŽèÓÛôYÓ\23›¾húbÓ\23M_múªé«M_5}³é›¦o6}Óþ_lÿ/Úÿ‹íÿEÓ\3›\30hz`Ó\3M\15mz¨é¡M\0155=²é‘¦G6=Òôئǚ\30ÛôXÓ\19›žhzbÓ\19M¿Úô«¦_múUÓS›žjzjÓSMÏlz¦é™MÏ4ýfÓoš~³é7M¿Ûô»¦ßmú]Ós›žkznÓsM/lz¡é…M/$ýdõ\31ïâ>Ð\127üÿèuìV", + ["length"]=43681, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=4565, diff --git a/tex/context/patterns/lang-us.lua b/tex/context/patterns/lang-us.lua index 2b6939f02..19b2bda6b 100644 --- a/tex/context/patterns/lang-us.lua +++ b/tex/context/patterns/lang-us.lua @@ -2,7 +2,9 @@ return { ["comment"]="% generated by mtxrun --script pattern --convert", ["exceptions"]={ ["characters"]="abcdefghijlmnoprstuyz", - ["data"]="as-so-ciate as-so-ciates dec-li-na-tion oblig-a-tory phil-an-thropic present presents project projects reci-procity re-cog-ni-zance ref-or-ma-tion ret-ri-bu-tion ta-ble", + ["compression"]="zlib", + ["data"]="xÚEÍA\14\2A\8DÑ«p:\20ƒ8ƒi¡\3¸\24Oo':qõë­Š\11\21\16ãVâÿ.º©`\24œÑ\22N±\13Û±\16yÒÖäH£úTg²÷‰ü}1è­\13.HRöùRµ½£ÒWy¦•M\127Ø°¾‰1ê?Ôùh\5Ôò0_õ ÅàIÛØ\29zä\12ÍHÂz°V\16e8\29ÓA+ø(þ]Úg¡oŒÙ\25«–1‚fdpÊÖ^Coç\8O°1*\24“eŒ´¹\29ë¡¥¢1z;\18ó2ç8Üt&ÓæþˬŸ/C>Û\5‚³ãŽŸNŽ\31Ñ`¢ÍS´a¢ßÀV43hc`뉵ºé\6‰ 8E,½ôÉ\6Ú×)0a¢Ç$+'q\7bD\8Ô…æNKô›U\9cfè©\127nÍWÍb\29a+%⯇\8Û\22ûÎr©7\19t¶c.:Fœ\5!6Àø’‰-\24z¿í_ñu0x3=™ûF\20‹Þ‰›\11›\\ÍCŠÒ†ên9Ô,g˜†pæ\20q9\18.áß\30áì\17t˜ç9\14Œá<³ŠÚ9×Öw½’ÿv.·ÖTÛ-‡3GemÕr\6í\1\15»½3~)•Ð_‡x.÷5‚ÞV|´\1]áó!rba”€Ê?Àä:­\22x\"±ËƒŽÔ\11BCkœ.…-•s\3ÜEÐeØéG¹Pæ’oér$?gjè[ðÖ8RÕBÚŸ\3\30^¶%E­\21\11ü\"Ž@“`KmL\11¡c\4Ni7˜æ÷:\21ý^ëöÌ°;®²šá\21ßG|½j\19¾…×\24Œµ¾=\\ä+ËßùXëÎz\27$\19š³:å+Ë\"ÊL›•×÷! MX]\26tÃæ¬Õ`9H@P-\17kŠ¨Ùæ¬1Éké\28®å\20™2<…2cÑÃ>#ÞöW?€5EӜ۵\4'êv\3Û¶Ú…³Ë”¹=\6Ò4[¬Ëj\27®\1o®»MæÞnOD+òˆŽ[\6\27·fy\24 Ž1èâñV8û ‡Ä»t¾öª…K!c´·ø\24s|в=5\0065éö0&õ9œù+\28שÂ,…*)ÌQT\18ùÈ!±ØÍõh’Äb\127Xù^KÊ\8Ë“ñƒÜe—¨AÒ\17 Ú=ÖÐ^¹ì™Møä\26ŸâS½K3Käf“çÉ‘%l\9È,E¿\16_\13Ùëí)Ü%8»\"„\31\15³\31\6\28Qåap >pá€^\28ˆ†íÑ#‹´GÕ\31\6ŒÐ^Xhg\8•À|\29û㤴G\17\5¿Èÿ\ +\2׼ĊW\ +Bõ:ÇéÅ*šO‡*^\"ÑëäG¯jèpºŠW^¡[ï–4o§ò]KÁÞ\27Óþ[;4@ZñQ6jåÐíǸ¿³¨¿?–\23|úër…aõ\4©L$àˆ|\0ÿn:Ú‡[\27\6Í\1®pq¡6\5˜2\\Š+\8LοKy-®—ky%yuµ¸+Ô·½ÔW\4¸úš¨'µÏZˆÌF\0l\24í¤k\3¦€ƒ°\23Âæ(:x‹1‚A42¡Q\1fFAç0h³Ì\ +š\22J\14Creñk˜s‚\8!\26ò\5\0204ŸLSïä¬\30—ëPÒæ\1Á­ºBóè\30“t\1*¯\"Vq\29hÃP)¤\\Y¬\13´\14—\12¤­\8q¹_ýB2¹*dRR%qѱ5C`’\15EÀi¯\14B\9–\\O\17ý\ +?epB8G\2Lö„ú6#(e<”R\25ÅÉ&Óº™Á¡ØL\19rç´dg\0085‚)¿æ©¾\26¤\24xuIØ\30ˆGsÍ\8:”’W# f\17”\2Š\7Ùa„s\24\27­B)£öŒ€èl-ü}/×5\4ïòÚ \28_·Ìp”W‚wÈÈ5rØéëq¯ï¢À¼\3¬œÂ\3nl\8]8Љ\13~\0027\25#ŽÚkƒkoÑ\17hø\17ãˆÔ\ +, \18×æM\ +Dˆ«˜\\ ¨Ý\22á&·¶€*ÝB?¿!©1‹È\8Æ%’ムJqó\6E\31Í£\18\27)\24`•ßNHíBFÑ Æÿ&ÖÿE K\29ç\19…/t,Tä\9g\11e\11\23ÊÔ[âfJ©\7~YËÕÊú»¡ËÜà$\13ÐÕs»åi‰/¦JxÀåÊ\27ˆPßRwB:ŒÎäÜÒø¥\23i±¸¡óÍÔ¶™”°\8ƒvár¹)™O†Ã\30\8ÙX¥@È\16DS\15\3‘Ð8nM‚äá\127;L6øa½\15\8\27SÀôa‹»Þ(¹Sס,dÓN¡@ÿçÒÁm;\22CWt¬ßŽõÕ¥öN1IVƒSCÿq$_~±4p@\"ÖFW8s]ŠLªM]\ +Í\13\7ÑêÇ@P­«Y.ÐÒ\9â\3dæK®s‹Äâj”ªÍ`8\ +}NVX«Bu©bau*ŠÍ=¾\14`±è\0053»´\12¶ªDžïªôc]ëÃÊÖ‰àu–y»3aá FW¦Íj‹ÍF”[cÙȨƒåÉôUË¢\6{ŽØ§÷\5@`\26íé\7PÐ\16ùQW¡r“Ne»+{‡òQR7\2&c9T¤\24¤8Ý\0\13§9°\4\20D·+…)+{Z8p\3…™rt’:É]Ñ\5Ñé‚æ©5P\11²\2\29*ÜMà\7ÿ&\7+¡\27Vú6\20«E¬n™u\9šM½ª>]éÒ!«ýA|èšgIg\26É]§ä ì\0022DµÎ&Ò)/‘\ +õ³\27íâä\8Ìb&4·ËŒ&\26†¸”\19Išìà@}º,!íòèüdЀ±…Ö8\29YE\24gŽ¨\\uÀ¥^Ì·Zäf…ù0\9¤€€_ª]¨\"Ö¶o@G”§–ž©¾\0$è\13.c¹ºÇ\ +\28p÷Š”(ÕÄ\30\11ë—¯h+*lQåö\22б\18^[tÌ}q ·vGé’8nzo’È\26wÿ:\29°éh\16*;ä\18Ðà\24\25÷\3‰I§¶uÇ¢\0156ÐÅhSG\9\9é>´ðSo\4\127P’.©pE§Ð=SÛ\21\2e˜\20\0024ÝRšæc\4ŒMø!Ú\7Îä~pjÜJHª©Âú&T½õc¸Î\24Oai-+Ú’E#\29Rñfâ\0139:µ\17\27Ê\5Î\3’˜Ôiï—T·Õnå\11ðª\"Á\11\20\19\"\2'è\0074BÑ\"œ-œY\8ùHÕ•†‚ÿ4ô<\30H7d\31ºÎZH\12ì†p‘nå`\"ô\27‚äÆàÇéK§ûÁ©àÆé¦Ê\9õ\25 ãI>9…\3QM0½ÑBj/uê@Õ\4ö;ö±\127C\16\13\13?\"mþ¹í§C©|ž$ÍŸ)³ym\"˜_ê_ítg5]s°Ì\18Í\4(‹ãeäÓ\24^Õ1\2&½P\4àR\24\0CLÉ=_BàP©I¿4Þ\25»ƒ\30éR¤{|¨¬' eÝ]\24©º»\5‘îä„\30W„ÓPÏ1Äžú~l\ +©GàL½Ç%©oì]\31hÕ+óP]Fâ—sÝ節dï\11»„âÅ¢LPÅTÀ°Ò#Š«Ã'(c@VNC_êëIÎx¹\18†™\22\23gñÃŒ\4Ÿ†µ6e`-ŠÐÃ\8;\11\5Kõ¿ƒ¸\127Íá,<Ý•LO©\14ª\30\0ßW@ª\27Ý\\Ã9·i\12|\27øÛ:\26cjJWƒUn&H&¯\24\2*\28\3ǘ÷btˆF§g\20kQI®âÙ\24\12Í\25sþ\00951½ãÖŒ¢CM¤=#’É.UÍL±=\"è…@œÜàJ*\23d\26Ç\\›5Û~‘(Š­vŠ6pëë?—\0186é~¹9{[ssáV›Ì‰ÈSºr7'ykÒ\25¡âN~jûúÏÝN÷%£\ +’²=,ï\17\28½pí”nÛ\ +;K\24”²u{ç\0QX\18µÅÙ\14+m\11N¸\0135\13h¶§s·©ßPDÞÀó#>¢Œl\17…\"ÅV¹­AÐ2HN6±‘At\14ÝæSÜA\19F|\12©»7%\ +˜MVØV\6§DÒ»ºÝ…˦_‘À¬ìˆ’U§fÍ„‚—ö\20´\29¥ŽÄû#wH3±¥ëñ-’)\11Çúö8Ã1l\13\24‹\9Q9T£´Ï~D?ª}=¡YÁ©›1«’\16_’a·{7¿$\9{ó™„R\21Çêpö\16ÿÒáYu:\\ñ‡'°À¡aè\14w\18Ó«-\23ûôj)éÕ´bwó’ö!a${Ií#L–ái)ƒÿ\ +\22D˜+¯x\25R9›¯A\25±~9M¯@ïæ%®6/ñç-ʽƒ+¼¥«Õ\27ôðÀ\21ø\31Gzƒ÷Eú]@ÔÒÂGôŒ½±{{¹·\21küÞ^«\21X»Tï”vo\29×»›`\17\2ãáK&qiî­I×\6~Mî=Ê€šŒi¾·hϨ¨wÈP}Gƒ¼SÝ\29A´\22º/xOR»\4­\1î—ò\14Q»Ë'ƒxÜÓ\\\0262wÔ~g%š\16̸'ÔÛúŽ\24äÖá&„-Ü\27D\30ê(ï60D*>ÐæîHýw\ +U]+ïqÌYë@»ÂQ„<šâ“%CÊ\27³@ 3Ò(Vµ§“\"å\22\9\14 Z\17e\12ýƒ®Þãxû>ü·ax÷l±¼»§uWÉ\3 :ᔬ;,\24ež°ÚÚ67±îž\6Ü›ÐÉîcŒû\24œå^Ž\31G’%tGe¼£àßYAü<]×Ù%íÍžU]”ñPúqX\3:t\2\8·ÃÙbN–Ë}Õbç\14C!t•\12á ºÝC¯ºÃÊ\26Ç\ +õðN29ÙÝCŒêN·C/B‚¤\3nˆÌ†Ø©C“\29\4K˜$PÙù.ã)˜ƒ\2!«oï\16r\4NôPà@€šgÙ#à÷m£ÀÒ‡ÜП{ª½ç–\127~!]Còð\24·6\12MZ#\20‘ˆˆ\5Þ·$®®—ÞM\14‰/ »ô)ý‘\18YŠ†öŠiÛé°š\9µiɈZ…µO¡|\19AUˆ¦V˜>¬=dAcƒÒöÉo’ìÆ*›÷njÕ}ÀÛS3MQµëûñh€ÈyýCþØ?B\"è\31Ž)òõ\0235¸ËÛÇže/Q#TŒS\18§p\24^c*{ïäô\8Òîì\18?\0274Ó\"\18ðµ™\14•½‡þñƒ`v\3Jã‰z\25Å hý\8W\5h¡„t\14‰*zTÜ~”~÷¬Áž´ÌN/\14Ò,{d×»‡Ø3VT<ééHôØ—³]«\21É\8ÈzAÑB“\4Õ†>_íCN¶Zq¬Ï•ó –T÷•»\5~t¹1ÆýÞ>S\8ølsÁçlû³#à>d_-”\13'Fžíl;\5ø)­ÁY\30¤ktÝú\0007hÃó\9—Ž {ÌúYn_ºPGÊ¡Òõ\0Mû!‚91\"¢X\127Ðíª?40)ûÃá\14£1‚œ\8TÓºW\6è«7\13cÝ÷Í\7a¸ÿh‘à.9Sñˆ]½Gë\6¯p¿bçÿáÆ~2\21%ÅÚy\12²wýg\8\13\02868·\14Äí1x\4m`ɺ\127<ù\21¬Î‡zèåQÚ[×Î`8s:)×–Â\14äÎ9á\20WÃ#C<€£ÇÒ\21®\13É2ÛGžR\5tÙ=r\3=\":Ó¿ì1Ä#ìߌX\29é\28\27?~1<±aŽ\127sn³†¢Å#\31¦Bó·ÈWPñ\7\3W?\28…5%\6fÍ'B­Y›\15šÎBx¨S@¯œö]Ó\31ØÃ.Õ{¸=ö8ÙÀC›°¤ãD\28›æ‘\15—Å£x“úÝ®x?H\1)òSÂ_\6T¡„\23\15-rú ¡\14HL\8ËsPK\26âDY?E\13\30ª\1%ÿC\29„\31ý`W„óó¸ ª\19sÓñAƒ²]gðCò«\31y:\28Êñ(pð\8õ‚¾”ý^©úÊx\26pØ\4÷uÑ>Û%üDkmöu:”êy^||ðßÀ:·€³ðAßn–sC~\27\5£¹gé\7ò†Š_\13B\1·øpl‘‡4ƒ¶!§Y\18A¬} ‡îTrXÇ\17†g»‰0 ¬\12A‡\\\"a\12\5ªð\0)\6Ìžd\6—pسDiÈÚA\13yÕ_£1\18`Ñ`•‘Šx\4ÇòdÕ/\22»¬ò£\6GtXµ\28-\7\4¤aóà\25öçLo’<‚‹›!š\3\12\14òææ\8bÝÝÔ\26Â\17ò\16Í6XÝâ—è¿iA34›MØþSÍ᧬°-8'\31“´jcè¶\28F÷ƒÜkØÊ….lõ\"\ +xåA.,qÛ6ÍaüJ\17¸·¶·v\2ͳ+¸àVÓ—ÛaÕ1ï‘Ö=\6çO£.ƒ\26f»ò°@(\"YÔi—6(#‘\22!CÖå\\[\5¹\6±¾ôÌÛC\127•ZÒºgå'C·kÒx1ÅdÆ@6™\7\15›'#\26zk C\6G“´ì®p‚vK9D¿f?ºø\14î\127´¾p\25±\\ÏM\5Xɯ°®ÀM«ó÷¢úÓa\\_Aâ›—F=d\8“{sdq¬Êÿjf‘,Ÿ%2[ñ„œ<\19ø÷T?#uìú<¥¨OM·žUXÊ>CÝzV÷Ëóav´…'\4¤yÂn/ÏÚa~\14#’èSÃÉghIÏ!”åç\127V%~R²Þ3\26\18ÛÞϧÅ1°ÏÑCà'HóDt{JÙh]\9y*§>ófÅ\30Ó=•—Hª˜ô¬aiO$Ÿž6>\17¿Ÿ\8>£fG·ËX·\8\24c{c,ñÓ\24\4þî\3DW\2N\21ºÒè\22l\28dŒ:HBñµ›x¥Å£æŸ@e;ØŽ8êiròØs<µà\17Òn÷B`,G†vÔ\4àâV^Æ\27tfô®H9ROi#.qÈ:jÖNg—Ñð@ÕÕþº\2¤(c\9sÅ'm\29Iè©\21°Q×F¹Üèî¸0š\31çú#³ê~ѸKÝF-˜GÕ\8€–\4ãÑ\8<·\31\15w%ð3˜@ĵÑý\15R xŒ‡:h3\30“E\30Óü\21\31\ +j¤eŒ\0147‡êñ {Ú;\9½»XŒ1«/K\3î1\127o‚]—ŸVÀP|˜l`\28ÁŒnD͇®\22TYN0Š©-AÑ©=m\2ñ .]¼fæž*îl²\7!,³\11\13…¤Mmì\\‘\7dÃq\11\"\28­»¦Ö†Oçæât\26l’m1ù\26\0228áùÃY\21\24p¹\9ÛnÖ.Í´r·¤]¿;CÔ(\23Û&nRNÜéÊŠi.S£A0+8°yººi_OÚ$NnNNÚUM\26p2\14š\23LZ\25Pn“\26ø”J2y*<ë€]“ö\1Sš\ +hVá\14 ¿Úl³\6öÓyÙŠ\18\29«8K\0002ªÂ£¹Tê\9\17aÛS\20\11Ø\28l(!ÑÚ-NaçÉ\"TmœRq¦ÝÁÖ©ö8Öõ©®m Ëcò\16ÅÆFߊ;…=H\4;žÜmŸ†®>!}\25 ÀÕ„ÆC\8tg4{pk\4O·×&UŒ8•Žˆ¹\24µ†\8í\18E÷ˆ<ðg\16k*L°\\N€ÛŒñµGžÍ¶!×\"¦MeÌöÓV’\4¶>ú\19ߌD\12dí&&ÞÌð\"ÿLY…\\èÜ^¦\\vÅÅSð\12D\28\0216›CžŸøåM´-3’ÀàÝ:}|Å`H†\16õN7’ÎQîœ\17ƒüú|YËj‰k«?*¡ŽÑ\27˜\26\22Õ\19\20\9ª7¹Cú\24yâ–\22¦ªƒ:zOôÙ©Zl\8TGTP„ž\22·›'ˆ\15Ó\14/Ñ\14Íˤk„\17´Ä®\21Î\22{$$‰Š\22¯¢N\11’©å8p®ýI:5¹A4!Óó½3xLÎ1†ý{ƒg÷k\31*Z|ÐûÃ=ß\9’1\0295ºæ\4]€/”sÑ–×\11óp%pnQùgïõ]Dî\29\127\31×ïf¯\5\16\1NÌ.òÖ´£ñq\17sv×hng…¬Y\3mcæÈâ\13QÍ\13SøÍî\30Ÿ\23A&kÝ\8ðªñ\28æÙ\22dVä\127ÒÀ\14\26o…\16xðu¨RV¸¦xiV=\23WªÐЛ¢n\18^”æ‡m¿=R\14\7J`\24ã;ÇÍÃù\22»åPLg÷~f’æØÃñB\5i\ +\22´‡_J½sçÎ<\17åp7ÆÜ•ûˆ\4\29´»Ó¸uîÊ7R´L‘€€ä ¦kuèE\"›\19*\9\9:¾=8ÁéO‹\25¢ÀÕ9®pÏ\26­P´† \6±\28\ +/«Õ³Ô\5½?\27º\8Ë\31Z“¬)D:µcëÕB >\26\11pó´PŠ¦f·Ûgo.èßÃÿ¶H…ÇY:ÂÒyECÞt‚¾Æ=ȹê\17/fÏi\24q—•å{ÄBù}ã`ǦÏÜÇèj¸dNûÒçWm\26pt.z‡¸\7\15g­9™9¸ßüh\3\\+ \18r\5â5KI,–EÿÕ\16¥±Y\5vvçL¯=\31Ê»þæ4j›5³áS¬+P@È\27È\19æísð™9Ô\15rgCD—P[iS´ZzU{\30nydKóªÄ\\ü£ÅÏòâ6ëj䓵=?Ýàš‹çhwDiê„ \11 „±àæÙ\0»4ê8ÊÙC‰9W×ÑñÌ7³WÐ8ªÎâ4Òã\28;!sVQÇiÜ×o Vˆ[›É´ó›ÿ\22G\22K(oúè§EnZ[Ælfø\4x΋%,q——¸Õ-¯Y‘KR6\0~fêê2*\28Ña®{ºtª†Bä&E\11™ˆ\28²\12Þ\23ÙX¡ù\30ü,ß\7Ï-sé{\5¤úüe©!\21\20\16Áì¾åéd#,\12ê‚lJ4¨Ÿc7™\8ëë\17^/UîÅo¯c#v’‹ìA €qÓh?bú}¸7Ö\4‰Èó\11\0170gp,»-,$I®A‘Ðts•Wš˜Á\"º³P䂶\1”…£\5»R‡ÓÓ›Åi]ÜÊÕ\17q–sþCe%Ì\17B\12@ðËÅbtAw–\15eCˆ³[Õš¤0êõê\13V׸†\23º\30\16ä8ÖÍM˜\17²¤Ó\16AbÓzJ=|BÒòêøÕ9®×¸Ù\0303¸ŠkÌ`X\0\26±;Ý«ƒ\26w£óŠ bÒ|7:\31Ñ\6¨:ðZ‹†úyÝbI­ÚYûzÈc8\9,±Y\29ÛB\\Úä Yk>Ø”«¤Únð´:ë¯Ñª/kcGÜ\127ûÛd\26$-ÛÙ¾°¸Ëî\19fw¹ñ9\15›â\20\5K²â\25‘Áü»}Üâ’\28öëyO£[¤yýÝ,‰ôí\16”\26íàâL\13TÔ\20ÄýDû½‰Aƒ+E£µúÏ¡¹f„[à\1Ê\28Ú€åûw8*0ù°•Ç\28·#p¥#GC\16{iËK«Ê\20®Øêö›Ôëe±¯A†››W)â}’º·Ççù}.ŠwA5o¥ƒ\\¿éû‡•U0\20KÈýKX¡-í\13Z½Ômwi–¶g\0\22\16© Xw–ðŽxUá€Ö°œ×0—\22•\ +ˆrDiËq\9\13éjü\26¦¿K\\Õ¿T‹j\31æ­[ß\1ò¥\2º±´Þ&[â\30º-ص$ñóaÁç¹óâm+Ê@€YÐþ—\14ÎëÞ\18š—MJ¡h,Jßåâ>7áÈõBªQó¯u<\9ZjTß%%d%=\13V\24_Î\11WK˜S›f&eŠ3¬%ôøE™û벤ÿzWÇò]b?•@8Ç®ë\22‰.º{DÎ~ÀS…»)µ\27à£xÙZ³4^ì'r\16Ý–;¿žßƒª\30¡\\B\19ܽY¼[¸\25\21&LË\3V´‡t3ÄÖÇ®êâ×HK}¶h‰\19e˜»:üÙ\12…‘î4úÖ\4\3;0Å–ê½þr©\16º‹Å‹ØÕ\"c\5(ž.c4l\28âòÐâÖ\19 6^KñÅý3ᤥãRL–D¤'•ÐN˜ë’}Ä\0…;¹«¾xö¶d­ó\27\29*ÈãÇz²‡¶‹§)H¼è\3x!F\5”\23¯ga‹\22çKl?’U»…E¬½€ËRBñ\22pÒÁÒâ|Ñâ<à\26¬´d“\127/kɯýx ¯Ñ9ÕžæÝ>‡d[×tw»Iy[Ü$Ô2Ö¤N¼Ä³\22f\8Ä!îe)½«[7•nm’a\19óª5»Ô×\16Ít«Qˆh®ƒ¬].Êð‹\18û²ÕÌKí\11\9„’zK—K,Pov-;š\22PExñ–çr Ñ-\7*ÌâžærŒ\21\1399ăCµ´Y\0167ø\22ß\14‹:ÂÈ{\9©zñõ3‡-^AsÈÞ—\31HåÏÑB\5ËŸÃeþã+;BšëÎËŠ\4=\0½õpРY­­Ö©:P³µlõzc\127õ–W„L\6Í>’\19O±ngÃЖ€Kœ­ž\6lèJÍMHÇô'\22&.œ‚r¶KÜ'žÒôfõÕ\8ËQoòëÕ³B¦4¬:a\127Y¯H¥\0ä¼5\4ýµ¹ª½¬×óêózES]‘sÖ\18š¼z™ü²Æ\14Çz^íXY”ÌÅ\26›\26ëí˜*‚:ÈÍÚ!V®\26›\0044i'ÍFý\8;\2¯\17¬©0eRÝV X™\20$‚\23\31uû¦-ÞÚ¡Øtk'O׋_kÜì¢\12‰*…t\6Åc;kêÄ'\18ÜAÕ5•w£4\30¶¶Ê™ Š‘LÚG\26\21©}=bõº\13—¾Òæs»”¨lŒ/®­é\20x\8ƒ£¬…\22dkÚÕ{\3&[£€Íºj•GW…w«|%ÌzdïÑaMàI#oÑ\25ê?C÷(\11,#å±ë\127é¯_‘éœÇ&½š³7ZŠoÿ¹QøkˆdÖûäz?h$óz?l§î)M›\25¦°\23ù*\15OÖ^ºµö1-š\12l\1‰ö\17¢µW£_}µdõjòêõÿõaƒ¤€^Ñ {q„\6ÛÔ{à[#BÆ¥X˜*Ĥ\17v&•Š¯aẞÇd«ôúv:T«\16P)dõ͹ټ=£¡\1ÍŸ\31^\127Z5uaa\12“åÄ\19…ké9“;úðÄ5Þß\16¦€½\5gc‘m\5á¾(ć‡{¶éÇ2¢ÝÎáàžÁ\26:Î\26çh\26N¨$¯ª“)ös.†S\24[Ã\0Àbá\29ñ\8œ\29\9s»Õc¶=\28zRýãç\22ÎúÔ\18S(‚\21¤\27]Û¥7Ô\16\30†> %ŸWÉÖÊkc+üdõt„„¨@«\27ü\4œM8OЦÃõ‹›Má|jÆ»ÆÅë\21\29eÔïHÇ\11%†|ôÛþÂ])ƒ†ˆ@]}4Šôù0\0¯•T'“\26ýŸR|_\4ÂízËP<\"3¬\19ÿSùiÍ^B^ói⇋þ\"dLU©Æp\"«;äñåöÏšódfXœf‰¶ù|­`\13Û5îzúÀžüÌ—\2ÌZ{\23Aç#|§Åãš\127\27úF• \11¡M\\¾Ò\6Ö)\12f¸.5H¾žºÎª\29äZ¸Êc‹IÈrª×”Ãlµ®îä­qéh]ƒöÚ\ + §Zõ\ +\7[‹º6\31%±\24Yš×Šðn\18²‹ZˆMˆ})šWdcº\27¯ù¬ÞïYÏû5ë†ògÈb$$r\15Ûâ5,X\16æ\18IÎ+38²\20mT¼3n¸Û—ñ˜¨\8àý\28‚¼„ªS\0175~±kµ\ +ÌzÒ¤a·šÝíþõp…\31•[Dk\28\28®‡\27\"øEbX€‹J>»\30ó³\17Æ\\4ÇF—½(CŒ»çëKÛ|êð¸pý¬Aͬ°Ð¹ŽK/øí\14]þ(D­\31ÕÈõƒ´¸µå¥Ü\ +\31$ÙÚÛ“ï›Ç\24[ܼ&\8o«\17…\14\18Àæ3sáß/Õ¦ÁäÖÆ\1\26E,Æ*Imm¼Î³ùŒ\19ð0³V3Ƽ/õÖ\\/[<ùÒè‚Æ›O¸\\T\12;¢}EÿFø£Ôû@ÖÜ‚ô¡'\6iÚncŠ\28ð\30THÄ\20ß_[>~ÄÑÀæ©\0UY ‹gS§Øâ\30\13°y\3Ë›¦#›WhêMãgÒh\9>u£ñ= ¯lµ\13òz«Þñ\30ðe¹ÐÍ]~“‡A6a\14IàÚq”:7}|x½…¯½;\0293¡•U|…m§gq†­\26\17y]\17,4@ÝBµßâY\25ªvÃ\2çEH\\9ßPõ\7C~ÉVݙ͊úÊ\7\30ØÍö!P€\25â­˜í¡­`µ=\6\6þ18ðh\16\23­ÍÆN?k\4è”k\26ÝPêƒñ\26\16~`Ç·\27Y\7ßÂht¶p\24\0307öÑv¶¸3·\13šbóá]ž-*=¹Ë\22ûø[P\8\22f6\4Ml‹§ÅpÊ\21àô\21\26ºQÑ~¦9\12xéû»l>ΰ=\25ú`\2[\24'\14ý.P6¶xŠhÓàb\02757·\17àn5Q\7ÚÜXéØMOˆé•, ÙâÀsc6\0274œ-\30rؘƒ¬B½å;Rø–ãþñ–Ǫ£šØñ#c\6\27ª\13b\11’j\0\8™A9ÝܾbVݺ\15»\7\16£9í\31Â\5W³\18é\22OEÕ[f‚Ðû\7\26»8T^x\5 @lK ”ÖúPO¢™Ñ­D‹¤ÂØ_\2f¦qñšËvšG(AÑx´«ÐA^œSÂá\7\27’¸TkÓf\2Ýc“În[»ÑÇÒKM\27\11ƒhÍàíæXæ×nR—§v\21Û\22¢©RZÔ²‘cóŒqó\21ÄÍf;’JoÛiIJÒ7\21î\20VJt…dÚÛ)Px׎Ë#\29íßÜÌqU¨HøH³;\30›\27\ +ÙÏò-ôE-®Ïm\30(x€m;µ*Ô†ÑÉ\9cÂ-\30’Ùbÿf;\22ŒYM\21ËzϦÚóTµñ%FÅcv^»\\-\30gÓ¼°3~Õd>>?á\12N`œšSúÛn¡fn$9”Ñ·£F£ÚŽ²\23zX¼\29ƒèT\31F– æ¡©Ò&Ù\7 \20o Ã[<\127;‰–ûqá}HÿñYÑí£¤v)|ÃKcLÆñz‰³\22—$\0319njÁà\6ƒ»¬\31*¹ûPiÆ)GÃQEwßϘu4\3Çyê\15¹i7Eu&³\4÷­ hÖ~>Ò¥cAh]æ]o\1MUi\17²ÇI\9‰¶\17N\8s»\31fô¢Ìî\22\16M\127ãûUt)w¨L½“\12¢\127Ù}ò\ +¢ºû¼&Í{j»ÚV&Ö÷ž4Ë\3!¼\3¹ûÜ\14L\27RTì‰:S_ûí\25„\14Âîîýjmþ\"ÓH÷R9Áýö4i¡ìkŒHÆ:·€Qg˜0îŠ:{¨6½_\12\6ÚŒå.Öä\17¹xYuFÂ<}„Q+\1Æpâ&^<Ê8ì\17ý²ptwU\127ŸiÜlœ›V~\8~ýØ·»c€Ú·CÐw¯+\1K\8,ˆÏ\4>*_ÊÄ/\7±Nׄ\31ñôB8NÇéZ¥h\9¾?F#²Û?^\13Ö\6\17—¾y=Æf>Î{dÔê\24°¤Ä²Ó°¯Ü}دŽåµ‡B\",Ž‹W‹\29noœ³Þ\24ࡹÛ\13d&Ÿp;\0088/{\22q¿\25ÀX\14S\24Mï2‹=˜Eœ\4~…ã¤BÍ(\8‘9Þ¡L¶\26c\31ª-’\"\22j´5™hËá_L²…Á\30Éà·»;È…’\25?0¶pƒŽ¨¿HðG{KSŪ\8£j2E\11âõŸ=^ÿÙ=j\ +Ç%\5WÙ=3¦ÝsIyTœQqö\28\15y‘ wŽT¾\19¤]<\17®ìÛX»\0071¦G±ˆG¥!œûù\0\16ù¼H\0094ª\\Mì\21þ‹Ç}¶Y\11—f×BžDÇnôÛ÷\7vï+yI{w7EظŽÖ0ŸÕq£!\\\20‰=,êM£\8‰‹\16¹5xPù…\14æêÓ\22\23–,shnA^Ö¸h\25Û¸®Y4H#‘3\17u5}À‘Z׺Ÿ\8”YájŒ£cJ\15û€Šú»»U»V{»&»ïLÔ{µ\27è\14ýÞ8²;‚d\1ÝÝ=ùÝ\15mê\9ºRÊ\0017†¬¸\"ܱjˆ\20\3\17”H0\7]©ŽÅ™ô|”h\23éq.Nø\18e\31Ú4\17½Q\4{G£ÞTó\30¢áÂK\20û§¥0ÈðþY\18dðãE†zgJþ”x\14ÖÇÁè\2|îüЖ\6è¹Í®[vÂ*Â\16¾éˆÁ\16ÁãJöÃG‚.G<í©\31Nw\20WK¨Ï\19dO\26@È£\2¥Ø::bF\15¤áÊàUpþö¹\28Þd::w,Ž0†Áñ~ÍQ¸Ö‰\24Rw:›\9\24‹£Ñ<æ¨;¤…£óéQü–æ©î\17Vqǹ³s¤x?©:î4ÄËE¾Ï\ +\18\29aeX\30Cy9†°…9Ü\0319âÝšÃË\18@_^?|Ô\5\16ögGó¡{’Œ•§\5\20ýòñrê\31UÐ\26\29*\29ã4äP¶<4ŽÖÛ›\8Ö\127„UÓq\30´\30§Â\127Œñ¾¶NŒÕÉÕ`ºÕd\1ÊÙ\7|€Ÿ÷òŽ±ˆ\26]:F€iG5\30\22\6e\2¾.G¼›}Ä\3 B¤ž£˜\28¦sãàð‘6&m*—ËÁB¦Ÿ^[?4è!%tìð\5jC\25¦c>[9\15¬‡c®\28Þxbå˜}\2Ø=†gŽEṗ\9š€­óê”e€/n\28\5ÌèXâRÁÑ,\14Æ©‰\31j”øiÒ²„@\127,¡Ó\30îù’ϳËÓ£ö\0DH\17«ov\29«¸ìÿ\3\28k°\23‚}uTG\\óí\7\22S<¤tÄ™¢a¢­æ\9®0\26º\"k\28îÁ¸§r¸\15\2TÑ?ÜK\8ÈÀ„\30ÓD_×\16ê\14Õn\19(½Qò\1jÙ»\13)LHu›vFèÆ Ü¦1î±y6ê‹Àb¦Ïz\28[ì\15\29Þ†9aGá®\29Ô\17o/\31^l9O©)LTŽ¸¾R\31J\28Â1 %ìa\30Z\31…²f¼Ya÷N‹•ã”*ó•äË\17o!6Gœ\1ñQPëî‹8ÇîÛtÂ)ä!R¸Ú$ã„:(è̇›í‡¦)ÇïÁ:‘¶\20/$ˆ—¢Þ+ÞF{µ7Än\0292¼|ˆõ\21o£½Ú0\17}ÅÿI\0\11c\21à^çg\28†ûåA”\\íeW_öôÅç\11…÷âVé×ÅW6àH¯ä†;ë\20\\\127ù\14ÐtMÌÐZ×.Âð+Þ´ñƒÒ\26ž‘z\15ל¡›Ÿ•MJ«¯ÅSgX\27\24ŠÂEšr³öÍ:ãq•W:aœ(¼âÚÊkð\8œÚ¼…\9\12\5T\7)͇>Ò\127¡[84Û—…ÉÔÓ\24MÊJ·wÝã\5-‰‹Ç†_âkýŠ'”_¥¯€¾¼©g\0163ù\26-òN\29ÓóNþ½\ +\16‚÷ö\22:IMŸ|\12ï­YÐÛwÏßÐø·×ÁßÿIsïó®Ð;¶‹ßÐúKõV,{\15QöðG»êçå=z\7ì-¡æ\27ZP¼3…ä©\0„$òn–Ë[£ü·£\0a¼+HåÛÓÿwØ—½\27{T¿éÕ§¾ü\"\17ü² Òå·nûŒ\127ªô.ø\16i~+èÃoA¬×!Küzöñ+\19\3:Ÿ¿Åãòû K€‘\12\15(ã/|ú×ÿÔøõEÜß0]ü?\16ø\29Âvï·\"c¯x¨h‰‡Ô~\23ô\17”q\\ô_H\5±àÅ/ëð×½¿ßßúòñ(°ú¸­ö‘ª~˜‹Oq»|<'ú„ÕÀÇ\11‚Ÿ8\11ûÜ\20…ð“\15¤ø£ôûqN>¬¯ûåã%¯\23G>*:õ§\"g1P\4˜ö\25Ç8ÝþxŽûñæ3]}¥ä£Üú™\22˜ÝG\19[J\19ù?îV}|7ìÒ|æ_\ +Ê$ó¥üO-åúD[rl{\0192oBǤ\31Ù\15°²ò%\27ŠÀþY´Î@R¢îÕ[‚\31ßë'\5ãþñ,ñãÎî§ò1§Ï\22V*:™\17r\23èS³Ä>\26d}¼E@”¥\5½þht2_>qpð)Þ—¿¶¸ü5ZDÿ!l]ê¿ë¥üK—?/×ü©³ý9l\127ɪÿâ‚Å\31\3õ§,ó§þÙüAìÿ¦KñG¼×Wÿ²ÿÓôç+Eõ_ñW’ìïãŸ01Lñ\31LM<–õÕ!\27¿üï¤ýQ|¾ÇÂÿBªGÿ®ÇS(€|ٷžú¶Þ\ +±ýË\127%\1™âŸö¿5ª¦|ºËw\27îš“ÿIÔxÎúõdµyUé뙿WV²\127.”¾\7Eò¯I«d˜Ì—Æ«­ÿ&5yÄSÞ⯂Š-nx}Íé;WwÜœ|$\31w.¼\29ñå¹ÿ†µäm/Ú)þÅ'ÜP\ +æ\"û×:Ó\16ÿ³3ir\17ŸÝ÷é.õ鮧û­È\28\1ŽÈ¶<¾ãß–¶eˆ‚bë 8ï×î\11ƒŽãž¿\23ÿ\13\ +eç'þ\23Ç?\9ˆ¿ÉI«ÿó¢'Êû_ïT^í&ÍóT\1ÔÎîÁyåÅÿL¸\127ßãÿ;¦Â\127™RôOG¾?þ\3H\6/üÇ‘Í™óÏ9^þ\29ÇPø—#¬P\21¸øF0hÁf\9lüó„¯pû\7\18¨aîšCOX5»\127\0¢æ½\22™k÷ïPçýŸ\2ÿ;©U\28E‹òß\4|´¾½=KE¹kËÌe_ã»B‹\31ŸxÁߧôáOñ6~˜©_™h÷\25¯ƒW[¯ðž5>Šõq¹\22>}ÿJ>m?{³â:\14ÿ¹\25·ôåù‚¾â¢”\"å^®êX?¾þþ=”ñb|x¶ó=xYÝõØKOä®ÿ\29ÛßüӞ·×A\6U³úöØ|\\|«\30‡ïˆû†Öå6ôU[ù^ø̤ù\2·/s3‘â|ÿúØÊx‘Ú\6û\8u¾1VžÆWG}qzõÚÇ÷Í·ŸËµh}ëÎÇœG÷ߺvo}€¹kâ¥à²Sfí¨eŒwŠãå杻oç\21¹.\30stx±¸œr¼k\\Å“Êq¿\24­§h‡\14÷ÞÜ×x>¸`µ0\9ÝPÆ\11¹:«oàŽ>Ø¢Ë8ùÊnóÖüÏgt\11í)Yìw\6—*\17s‘®Sßø¤îTi\19ØUnÅuG)\11î\16Í\23zíS³õ\28ªVÒ\0\0§ë\13íŠ#n÷'ªÚµmLŘ}N\17%í×'\18Ã\ +§Hß‹ïM&Æö’J¤©º}Hp[ÚÛ÷pù\31ÕQOkÒQÞFͳïéZ¬Gë\19`“§·\12\15<å>øÿ\27Ìö}l‹þÛ‡¸Æü\14ìD‘;òzéÓ\\Þ>_ñz\19óÕ?Єӥ?G¥O\8¿¾\4Æ·K­éǬÕÃ¥èË\25®æóF…–·=Hè£T%‹ž²qó\26×\23z8\ +CQüµ—xZ(N®zÄ\16_(*Ž=ÞÝ\25|?ðÑ–>\23é\0197í~£ëô[¯\30\3„÷ú\30Õ aNá‹8^0ù»<$¶ïá– ïÝ|–\\h“4tõàK\3wÿÛj¿\12}\\sÁ\25öx{¢þ÷ôQ\7ØXå{\ +`ˆ˜\127>¦à_*\13ëš¾_Uöù\13Öú0‘Ê5”›Á¿\6ƒÀƒÿÿÚY&ÿ¯E\26(\20lÿ%\8\ +k©x¦ï˜éâ\25ÿj\1B\22Ï\28âì3Þ#ô\31æèŽ\23”‡ïÙ\13A¯-7·Tx/\20œ\25Â\25Æo¦Ö©ø>/ñÊN¼Òšn Þ·—ƒ+f\21Rà3·#¹³7\18ij!û\30Ç\2m\29ò\18\7ócãõ)¸Â„`?c.û¸\3™Û®\22OôœuU»—WÏN\127.ãácãÅxÌOåt<ÞÜk½Äüé¡\27^~ü4ÛþY/\19¬Is©\20m=z9|f|b\1÷ò\6/ò•ÞŽëÀrÿUÀ€ü}3Æ\0210WdûÑÓPu’ƒ¸\16ç•;IeÑzMÎ\27\2^\18˜\28>M\16&iú\17³ûÿ^ïU“t\5ý‹Õ´Òc瑱\4\23^-ƒ2ù¨×”YL\8}Þ9K\31o¾åYK}ï±ÍÙ·|Ðôó÷y%’¨bûŽKf5«ã2¡+€ž^\15cb¡ü—é`:²÷¹ DÍqñϯNšªäUÊ\13/sªd\19^ï)¶ç\16w•¼t4CP\16Îæ{t+ìU©6Öÿì»kÞÆè‹G\22nËê½–õX.óù\"^3ç[0)R!t´ßÞ]™\26Ñ‚\26ráß\8^B\12ø\9gPØò¦I‘EzzöJ“÷@@D”™y«§œ½áÜgx\15Ë9î00h¹;UÆÜ\13ñÈdFæ`8ò“\14í§ñüi}?\14U\31dQ£{\16\0161HkóD)ß—l;Oƒï¼Nm¸¬eYŠ^\13¥¥/_adüÐ,Ü[\9+ø½j\127ÜÈ\28µ\127¯·ó/²²\27\ +\4᫼\23¤‘o\ +Æ­ñn[ixÎÚ>G5ÿ\14,±0P\29¿½£¯ùkÿ­B¢ukë9°AS|·ÞEY\18\12¼¸,(\16cü5\20>7°Š¤ažR\24uhã¨ï\26–\26’h¬èV^±„G\8aI³Æñ#\19U7B\14›Ú*ì\6\11ÿèLSÁÒ\7n—5—·Ól\25†Å2[†õ…\6…\27L€\ +qª\14‘\15±àÿýwün:»;µ4Þ¬Ñد¹Æ[†?G\0117§Ýø¾ý3«Ê mX¿OwÓ…ØíþÙ‰Þê%ׇuŸrXµ¶WÙ]Öt\14¬ÐàÅÿöëù„Ýæï>Œö|†oõ/æ\16C=bp\0205Ži›¹|„-ž\15\4¯\21|Å{%«gõƒ–ò\26±íßþk…†_ÌøÕ\26\6Y-rx0ÚïõÉÇÓ\21±\22OÿtŒ\16\13’ü/½\31o‚Ò]Öÿ­u=ËÐ%ƒ¼Îû0‰;×Ú°„ §Y\9\4\2¤Ð$äaKIt^Ic?©ÙÖŠàÿ¹þ‹ÎÖVq™tÔ¾á;®\ +à\17—Ç0jðߊ6\20¯ìß\25°Œ(d€Hí\4Â\23cgt»}@òâ‚0\13E]´\2°‰\11Z\4¡Ó<=Ì\8vWéöȱÔ`!Ïy@ßœßI{\23\20\16Ð\7v°=?¶¸ÚÆüjaH\8C[™ƒh{*]ŽZt\18„t\1\19\9wˆcèøßÇ8s.tÅg»·°Æí²GÌ{ŒÁé!¤\18Iи7LÄ\8:ƒ\24\25çóÛ\127+Î?ÎéçÆ\19‹Â\19áiñ\4\21qp‡€\14\30\15¯Õ>†Ç-ÚmoLÈBv1Åa,-ïõÿ¿I\2¯\19lûQ,C\13»„\9E\28»–\26j§o²\127æ›Psÿò²½‡iúŽƒS‘Hùeo!R4Î\19Q¤¾Ë\30,m¾€~hghO$F'ˆSJTw`\21¶w»wRa”aá\25\7ŽZ’è\20ëwö\\\15¦—â0\4©8ûS\28GH©G•.©š[ð'ËF\20\28\12*Ô ð|Ld$\ +ó̓s\12÷\1Ý0{\30\21R«GMrrÏB;f”œ\7$f|E\24?zâƒ['E¶¸ð¦ŽåT±\2\20\31~ò\6ß)=\11w¸\\G¤Êuü•ç[\3cÿ5Õ=ª\16‘~‹ŸË/\24¿U——˜¡ú\31\31&Ð4€%ç£ÍŸ\20»ÏÅçòçß\21~á0Vpø¿ò¯zÿ\31®D°»", + ["length"]=31488, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=4938, diff --git a/tex/context/patterns/lang-zh.lua b/tex/context/patterns/lang-zh.lua index 6232c9c75..78df4bc5b 100644 --- a/tex/context/patterns/lang-zh.lua +++ b/tex/context/patterns/lang-zh.lua @@ -100,7 +100,11 @@ return { }, ["patterns"]={ ["characters"]="'abcdefghijklmnopqrstuwxyzü’", - ["data"]="a1b a1c a1d a1f a1g a1h a1j a1k a1l a1m a1p a1q a1r a1s a1t a1w a1x a1y a1z e1b e1c e1d e1f e1g e1h e1j e1k e1l e1m e1p e1q e1s e1t e1w e1x e1y e1z 1ga g1b g1c g1d 1ge g1f g1g g1h g1j g1k g1l g1m g1n 1go g1p g1q g1r g1s g1t 1gu g1w g1x g1y g1z i1b i1c i1d i1f i1g i1h i1j i1k i1l i1m i1p i1q i1r i1s i1t i1w i1x i1y i1z 1na n1b n1c n1d 1ne n1f n1h 1ni n1j n1k n1l n1m n1n 1no n1p n1q n1r n1s n1t 1nu 1nü n1w n1x n1y n1z o1b o1c o1d o1f o1g o1h o1j o1k o1l o1m o1p o1q o1r o1s o1t o1w o1x o1y o1z 1ra r1b r1c r1d 1re r1f r1g r1h 1ri r1j r1k r1l r1m r1n 1ro r1p r1q r1r r1s r1t 1ru r1w r1x r1y r1z u1b u1c u1d u1f u1g u1h u1j u1k u1l u1m u1p u1q u1r u1s u1t u1w u1x u1y u1z ü1b ü1c ü1d ü1f ü1g ü1h ü1j ü1k ü1l ü1m ü1n ü1p ü1q ü1r ü1s ü1t ü1w ü1x ü1y ü1z '1a ’1a '1e ’1e '1o ’1o", + ["compression"]="zlib", + ["data"]="xÚ\29ÎÉqã@\12@ÑTxó™!qlˆj-„Ôr—–Ó¤1¹øæL\28É<øð–Ëõˆeþ3-ó»>´Óª½\14:ꤳ.ºªë¦OÝõÐS¯)XÁ\ +V°‚\21¬`\5+XÁ\ +V°‚\19œà\4'8Á™×eZY+keÍkØ;­Úë £N:kó?i_tU×MŸþ>ì»\30zê55vc7vã6nã6nã6nã6^ã5^ã5^c5Vcµºu[¦·ñ¶ºu\11{§½w³\15:ꤳܺ¥}ÑU]7¹u\27úþò㮇žzMé\3é\3é\3\9O\7§\15$<á\9OxB\19šÐ„&4YÉJVÖÁ}™:¯óz\29ÜÃÞi•£{³\15:ꤳ\28ÝӾ誮›\28݇}×CO½¦Á\31üÁ\31ìÁ\30ìÁ\29ÜÁ\29ÜÁ\27¼Á\27¼Á\27¬Á\26¬Áúþ‚\25ï5>jìj¬5ö5\0145Ž5N5Î5¶\26—\26×\26½Æ­Æg{Gg×ô6/ÓÏß\127æÛ\28¿¯ðÊßWþ\0073™ü!", + ["length"]=802, ["minhyphenmax"]=1, ["minhyphenmin"]=1, ["n"]=194, diff --git a/tex/context/patterns/word-th.lua b/tex/context/patterns/word-th.lua new file mode 100644 index 000000000..a32da5a43 --- /dev/null +++ b/tex/context/patterns/word-th.lua @@ -0,0 +1,482 @@ +return { + ["comment"]="The data is taken from http://thailinux.gits.net.th/websvn/wsvn/software.swath by Phaisarn Charoenpornsawat and Theppitak Karoonboonyanan.", + ["copyright"]="gnu general public license", + ["language"]="th", + ["lists"]={ + { + ["compression"]="zlib", + ["data"]="xÚuXn£H\12~\21\30µTHICW·§\5e!ÒV\20¡œÈQ¥,ÑÁÛð(ÇøógLš•ª”ñ7ãñxü;ó\24Ïc=·y,æq˜ÇC4\7R3ŸBÊñ\1êòµ\19êòQ\25©áG.\127½\1gÁzr«æéi\30OóØ…-§W›÷OÀ‚\8+uYô]¨ËÇyü{\30ÇH–Ç2ñ=0Ÿž—ý\"ù\23SŠÞDHdÿA\22•\11\6R\23Ä\15$\28í(gN }įÍ\25\19aºp˜–=lÜ\ +“T–+u\\”'ºì×(Œ.reâ\14GHœbcœÈ\3LJ@%2ÔÂ'›Ç½hjŠ\5;ȤŠš©=Vˆ”\11ÃÖ‘T'z\15\16špPçBMD£…\3ÂxÙüm]§z\12»â\26\18Þ0¶øà\21„WKX†¥-z\22@T\23¾³HöO„Ô\6Uéf\7‘\0002\0318I\4Ž‚J‚æa\3˜¢–°\0¿Eª^L\13ÊÍÄ6^\1\15´Ý«`]Õ^Ž’à\4\28‡\3Ÿ¡\24!M/a‰Â¸‚eùø\"†$\"O{ãø\2KݦHðžö°‹xÄ„ªˆ#\30|#ÇÁ]³,/iŠá²\14²ƒyvc·ø\5\8’\5\29ÁFNÂíd*M)\\¥Z]\22É~†•ò{&SÎêEÈw™{Þ\2AsŸpǔƬ6dL\6\26ÐFŸ©\12:ˆÄñ„ëÊeúÑÜ1\21w…·Äwã$RßUE6âͯ¤ª¢3\27ßDIA\8\1\22£~á‚Ûw€\20W\0\16V]!èÀZS˜‚üzjÐãMØÕd\23\20ƒàÚÐ\127\3©§'©qg¼¢Oh)“õGaÔ‰Ø9ôš\5\21„ÝJœ,£’Ï. + Q;¨ö|\7à6,€p\24”0áÜÙWžB]ô&F˜ÈàMbcŒ¨n¤‰¤p¸š¡8æ\9'\3\6µÓ è\13Ô\9>)¤\9òæ\18z¯4<\13Bù]\26!U\21V0­\4\29\"lÿ‹8(YhŠ¨Ü{\25Ž.Ùä\17í1öáWHÓî\17uÍJ-ŒÎ€†6\127\22‘ßàÈG¦ êk\20>~9KPñ\17±y‘ô§\5ʈ'ÛÉoJ`0 fp¼F¬\16ÎtMK\31©÷_?)3í\26Tnëâ?\1‰ç\19‹íT\18—,\13l2Ý\19#`­¹Y\15\7 0(™J<\16.\15\1¨“^=p\3`$\24W\6¥•4O+\3,ˆ¬b¤4ˆÆ\\\0269·¢™¼ÛŠNó\28k­rkü¯™\12K\30b\19³Ç“OŸ†eÄN\14Syl¿Õ\0K9ü\3j-\30œÛ¸¡[Õ\22;}…Ú¡ˆx\27±|d2WKÂwŠ•ƒtåªÖÆ\23\11s'W$Y~Uê7Ö\2¥‘þb)£e\0¢¬2¾ùøy¹x‰`ônVs\18àj\3MO˜\2\14¿4cêŒo Õr\0”\24%KŒ\0\\\17Ð9\8—2 òüå²æ\15\1n \14¢\9=\6DùÅJo\16­yËk`ø2\9qz3)\4ášá6…Ù¼­&¦ÞÒƒ:0OÐÑ7ÙÐ…x\0225\21¢ÈC ß\2A%|ë\ +¤†i\11\4\19‚Äb¹\26\29Þ\\‘\24L©R÷ÒrJÄ`xAe¤¤F.#Ö‚6U‰i \7¹ÒŸÕs\29-ðD¬”2SWÄ,‚B0f=¬ÔB%Ñâ!7y\0063)„ãjU¸[~æAb#µ®Ú¹:ÊyÖÔ_Ñs®6Î(Mo¤Â¾\26º³éuÅzgוçØS§¿©4í€bp\23ßš¶5a\26å[æñÎw5XýáZÎO\11-\21ssÌþ%óëjÖŒ\15°–\9ð@¹p!Z\28&´¤\30­V$\23t\21ày±\22\25O{Þ\25Ìø\0056ô.\23rôµ2H\5ó¬*TrZ\0RzV#±àOX(&*dýw6Â\31\0201XRí,)‹\\\9ßÑo÷k¨Ð²\31\29`PŽ4Ì÷êÒpP0s\22FjYŽïX\"¬\1\28–‹è›Ú¸eµ\24[ÛЊKÏ}d~H]ÕÙG,²S_á5®q) 2êуë½4Ñž·eL\23é«A\16€Ï\5lI\"\ +[ðT:Þt\16žãž%À&\12û\25\21÷È=\3Ë*{>Ã\28y”ÍŒÂwè\16,¡ñnJð„ï%{‹Ðz\8íÓý%\11\22Ìëp®‘8°ý\16c|EÞjÙ²ìäoÓ\\´Ò\1ŸM›-R1ˆ~ÿ\2¸\16œ~\17¾-,|\29\8ß\12®y1Ç’Ã_à \23&ÇËÉ¥eî\11uwBË÷€„Ó\11Õí\1‘/ô*[þˆjª\16`ÃDmÔ\30ÞzšmU‹juu\3É2tl~Ó¹Ì×`|åUnØ}º÷ÅEÿƒ#túV£Ñ¼ð¤ûÈÌvµ²I­ü­¤~SJ÷h€Vý’”/ö\13b[«[ ýç#€¦B½mŽ5\30g¨-?¶oVµ‘ÐðéªIqS°~È­ûÆ\13¯žš\6\22\2Cô?Õý˜:", + ["filename"]="tdict-city.txt", + ["length"]=5991, + }, + { + ["compression"]="zlib", + ["data"]="xÚmRíjÂ@\16|•<ª'¦*þŠm“\31Ú˜\6[E¨¢°÷6û(îÍô>ÀBÈîÌÍ~Þ©\12*NeT¹VðD\3õIlÞ]¥\3ð3à\31žlU¾\0²ò¦ò†ø.%\27ñ_\19\31!yQ9¥ó\11Ä{â\13‚§2ÿ·Jƒ(Gö#†\28RKg£{kÓÌ’¦¥9Ã@¢²«)\19\11ójD\7¢¡©Í8\28OQè­ß¡\49ËnY¨F‡\7–¼†¡ý*Õ9\26ýNé+´þ×å%t\30t=2L\5\27¾)æÍÅ]\0242g÷‹‚ÂN¬\ +g°]´y\26[\0\23V`ߤ\29öX~\9ükÒ;\\îð„‹ø\22)7O8f™£Y¾\31÷\31\21…IòË÷qK ¼)Du¼¸\1Àœ±z\0\23\27ç\11", + ["filename"]="tdict-collection.txt", + ["length"]=708, + }, + { + ["compression"]="zlib", + ["data"]="xÚ]’ÚH’~\21=ªa\21\11\13öíÝ\25Œ\5s`¡UÈ#B\29Ý\12xàmúQ¶•™ßWY%á¾\8G\15™YÿªÊÿªy»MÞn•ü+ßnç·ÛâíVdoŠ½¿\3ÍûoÀ·îí¶•\31÷·ÛŽØ—·Ûêív•â\14Óƒg_hóvŸK\0313}oðõý/Qgé³\22>\19{uúݤm\27À¿ßn\127¾Ý¾²p-M´\2Ø\4¬æŸÒÓZš¸¼ÝŽ\28Þ\8áíö]~ë€\22ÚÚ\\Và}h§þïý‰\13¤Óx\21x%?&D•òïUZxïì…C<£Ô}©@_ªïÍ\6þŽúòv›K§Ÿ\7¨þ÷~°÷O2•\9Kè˜Vø~¿V¤„|áìß\127\29äk\127&\\ÊX/\ +_å£XÛ¿tÛ\16x_£\0027\25Å;ü÷û²\19UÊdß{ÝcœßÙÑý½:~É.Ídz\19tn€Ì¢¯cðÑ\3ïÕV\30ˆˆm?óûg\1ŸuŸ9r?‘\23\2ü”ºòÖ‚l©÷\22nõ;j\"¨ÙÛ-×V&è”»ßPµT¯eûU²t\15\8ò!ö>´©¬ß³k×`?ÒqòTÈŸdž3ifé\8}O+\15¿°ŠÍ=\23´\29Ý©ìã'ì¦N7ô\20l ÐU™êv‚Ge\14y\127 mŸ¥îE±º9\27\25wDz)V¿\18h}o\127¡Ü{¡/¬t•þßklFP²O\15$Ü…ÐÊîèQ5Ï{Nž#ÿ.º›s\28Š5>ÎE¹\18\9;\5\26œ¬\19Xð‘„ÐVC\22˜Û\28Ã\22m‰-äû.lõ_fíhýòè4J\27\19\22I«\ +'¸íüê9B¿\29FhS®H€Áôõ\ +\25÷ûg-“\0067øQŒŽe\"%ò\1ª\31Ežô·‘U¨“\14\14î°…Îw¶¹\7}–=G‰à¾à^\14†?\8¡x‰em‡õJ!l\30\17¢EÔ!îR¸ÿÝרäƒ\6yà\11éÆßY‰\15Èi\3\15«þ®RßlØo`?¶B#5jH¾ÂOÙaåXø\21\17Il‡ðœ\16\ +ùqæ©PÔ7\17Ö7U(<á;\4Z„=’›¼Z×ZªŸšmå³\9þ~wu8iÖÈUjL(Èz_²\127ÉtT>}õ´\ +ºUXê³Ô¸\11ö\"=\5‚Êç<‘Ï„\15\14v»øìÎýDŽŸ‹î¾š\13)ó˜Œ\30\8õÃþ®D/»>hàQ‰~\5·‚=\18Õ&‹¡jÊ;|#¬µ\2k¼§b\16\0189wZ™Á•\0284\3ja\12\1PF\21Õ¯{í#è}GUúdÀV¢å/ž\15å<\ +Û¼ÿÀ}‰EÒÒ62\31“¿ÂÚŒ *€1Ñ•ðñ@XC%S5<\16Ž\16²\1u'\16J–Â\7?Ç°œ\6‡ê;Þ‰´4T4n_t\22œý–l\"×`ž~\15÷+8\27 úÕŸ©Ê‘\0110ñÀÂ\3\27\2SY\23Õf\7(Õͦ\24X!*½Á(öµª+\30kúð\0…‚¹í»*›A‰Š\9±\\™QY\18Àt¤F)A‡œI5[\4)f:T;{­h.Ó»‘¡\8ªWÒ:é“jéM÷öIz1Si.{aæùÈÜ\24vÚ\2e¦ËVégÛ\20°&=vÁ†/T÷\3€Ž–1Ö´¹F—g\14ej\15‘ñª|\3„~\25~¨*7—£&FO_üª¨_¾k5!­þ]\ +‹%ƒ¾î\28ñ=Öc\2{è\23€¿rš\13sù@ëPÆ\1¢õ\127rk÷\"[aîM‡9Lé#yÈ<:Phž\21Ç\9º¹ö²†Clÿ£\27´3(¾xP\\\24'‹´º\27çú\9Ÿd/ÞÈ$Ÿä“j\29Ù\0092íOPƒ\127*ö˜˜\11ô™{`í]\4È?²\8O0N\22¤ÏBF0U&›A.¯‡bj\1Ã\"ÀT\21æ\9¼N`\29]?Ã\2rè‹êæ$\28䟙E*\31ZþRîsâvÓ\9M<03¡\14¶¨RæÀ_µL²Na5\18\26ìëE¬A\6›’rìo/f\22ƒDHƒ3©*\24I\25Àæ_é₱¬ñ(š……og¯rm*°¹\ +\0087\24ù]·©bO²\5\27Ý•ÿa?\9Ó3C4·½i<¼çÊOêwP6t0Óß4–š„RPÆM—ª&.ÁA?é¾XºÍ}†bÑé×VÚÞl\13Ø\127#X(Åa{.Á•S“Ž¾‡ \20+j•èÛ\9¶ôØ5ÖÆà‡Ã?Ëêœáš0ÔÖ÷[\ +\31Íuó/½ÞºôÚ7Üeìt:è[\13gYCÙ,\31\0232+ÅXÃ\18Ú…þ꿸C;OMMìš3»8½Á`Z½&ö\20Ë_}·\14-…\15*QƱªÝ+A5Û&™Qè^ÏÇ\18ºC\3¬\9´‚°û\5/Ljq»\4-‘wVµ\2SMj˜‡¶M\26©ÐH˂Ц;\20¶¿À÷%%¢µ'óy\21Â,&P\17‚ì‘ê6”%QÚÈ\28še¿/×.‰ ×÷¾Á…u”­÷— z¥j‚Ofßdc{Ãbðï\27òW\6\13ÆÂd\27„sû_~½\23W\27§%\20ðܯH sÕ„â\4ªy(Eû,ò`9Â]Ý#\27™ÐO8«×Ü\23\27„ý—˜\\A¬ê\0057ßY“Tlâá§\28÷÷…6ƒB±™·\9FPÜf$)#x\16ã–è,æ\30\14Ÿç\"‹s4+Òü\29\22·ÝÀm_û\26×X­lÕÅ¿ñé\1\27õÃѯú\9_=ÒÃ5Nˆ_ä¡\17ì|ÏkGë;zuaLëa.ÓÜô.Cpÿ\13\3\7\27Ÿ#°IÂõ\14•\ +ó@\27VÄ4òÕ@¶JÌ’9°^\19;˜Î\9á†|€Š\12“]¬C‰Ì÷\127+Xb/¬×¹`Ó–Ø«\12üˆ\19ý·ÿ\20žÖ \18Fš05'¤,\2<\1'Ήʇ¨\25rf\22T÷7±\7ÛP\21Lí\29\\_Ŷ÷4/+[ÓL‰É(V½N¿£å¤Uá—ü˜Â\7q—ˆ§\0ln\25\3ŽB¯ý®Ë±=žU\11øîSºRØ’…úqVEì»sî—\ +_ùë6të+öÄÜ\ +ýŠß•Áª³óª¿öèò§Âª\0049F*Œ¥ ~ë\1m\12rB|v'\26í\5\\o\23:R\ +'\0026\25ØÆK\6}\127&\29oUZ+ê_Ž—køÄ\8ÇaÙ#LSã~\5úž³âÅ%O­ÈÞ\11ª\20\5ø`KŠˆ|0“«£…K_h#¥RâX\31” ;¢pŠì6ƒÓ{MC:wòÃ\0“\6[|ò=ã7ÛÈ\3dÁ¯Z—ßÓò1š¬Ò\22²i¢®@ÂMäç2^Ñr$5\"°+ºÃ\4ŠüÕ\31u\3m‘\27xÌP$XWÛ¡ÁµuÁ3¢D\13¦7n;\24£UÙ’¨.\3#.\25úÞB­Î\25äHQ\20Ú~:+Âm\6\0170\27„])Œ\7ªì\22ÊI\29Ô\25G°FÕ—{ç\22 Í'no\4åÇhX\22ºr\11¿x!\20¸Å–¸xwß–>Xíh¡¿~)\19’_p­n¡B,]Þ‘\13ÿ&b1—\31_º/9\29q)Þþàþ¼KXϦ¼ðZâ\22©\23\14ûîd‘Áfun£ïàNä\22™›ª&íáR®H^Ã)\127¥ˆÜBIY“A\16U\19Þz·\26\2­\11]$çb\16¨\31=U†½¸Ý—¢ÂhÿæyÉŒ—\6'Û3PÆ(\2¼â¯5\127hR\0190Ž(OrKwv\2çÊÚ¶ð;k\16Û¥fN±ì¯Ø\24\22çžb·D\5¯ÜöQ\22\4iIøÐ\8M\ +˜bìt€­[Ùþ\0000µ¨\13þ¯8ûu‡tjMÉý©;t羂ùÃW²6'Òƒ\13Å»ˆ‚4%ïte\31‘—È+ÐMh\9&šË£éška„9bß\19J\27\0164€Ìl1Ó\24\127²Dô\4Mн\"½lÍqž9$ëðŒPdEF—`oÀª\17{×”˜'\22„ÿ\12\\n\7\14:¡ZET>@\0138ñ\14\28°Ó¸\31Pýèg\4¦°7?!mçš¹ jéXI4?Í\\šsú¿;cßN;µó¼S—2\25â\0043 \6!\24§‰­É@†q\26ƒ+Ë|0ÉÀf§cç|\7êÅy’O²\29#[\0287)¡&\5ŒÁ‰Tz\"oØy§ÈÎù>扗OM…*ürût̓ì\8fžŽÒF<Ú£µ?(¤yc\15j\15i¸‡\16çgFd*`ËÑ^%X6^Ûhë\1êñ\28´Æ&sv˜¦\24p2‡ÞþIJ\29bÄW¦åïqJ*.\13\19mU{›'Ø\27Âf?õ&‘7#÷^b\12£\18E²FI¨a„Ü\12cå{Çpçq ‘QF\22‚\19ÝÁn=\20å™Þ\26„¾õ\19ÜB¦Ñ-“¥O\"âVð†ë9!qI±\27Y¾\31ªQª\27Í\19~Ê`\27,矎ܟ¢ï\ +ß©ìí-áÞB\9gª›{ÁNð™\ +]Çá=#TŒÀîÝ|š„ëf[z`ã£Ë¥9fñVÛ\26Wî—ù)¡5„σ¯mŽ\5jds  \4*}\13\21\"R[á”ø\29Íü›?èk×uš!¶{a\ +Ó\30N€s\12»p±®ƒñÒ½™aà”û P™\29¸ò´2›£¬~¼>t}L,Ä\3Ô\0•-Ÿ‰ºé\22>$á·\3ò÷&\30È=°öÀ&C\26Û\17S0XÇ°Là’pÍ\27\27Šzõæó\1ÞèÚøՃtžXÆu\1%jÉ\26\20lód†WV¼\14¹Æ\1>Ó%s½ˆÒ]n{Y×q\17\1\22凹w\0¿3 tCm½¾tà¹:ÄÁŒË`\2i¡*éÐß{1THë\3OH.îwâÁ;Ô\5Ž}ì\19d&-yõìà\24„ÇZ4ÌŠT¾~\5=rÁô\14\18®\17À„CÝb\11\7 Å5$i诒¿ª$\3%Á>{£qŠV?ÓH<ø“u*…òæ\24Õ:-\4óÃ…\0216î\2È9¡•¸!z\29\18vŒœüp×V6`\9*‹¾+¹aV\12\19¢\127€qØ\29Ó D\16!úI'\31sG{ºFhÌú*‘\29R9Ÿqex«Û\23ÿcú’á\22›¢rÂghkGêàzáݘs†ô\25Í“O`\8Ó\18êÊ,\2|ð&‹og¥\26W\9MiË‚[F›‰:\"\9§Oµ†TM¶Òdß=ü‚+¿¶’¯~™_‘ø{IèGÒëGŽÍß‘ÕkVâp\23P×­Ûs`\5è)ÊÌ)aQq1͇Vº4„œ[³t\26Ù$†£lô\7„øBGÔZžÀs¿RÄ.\0188éJ¤R\25n?õJS‡Vh&\24+îxÅ'\27˜²\1Þ$ð…@ÍÔ†2¸øá\21LQHË\11qOË\22v(K\24ª}££ûØR±¿1\25¢Ä%éÒ\29‘þü\31pËÖÄD\9ÙûÊ,š\18k=IàœðÆ'\21–>žõÉ%m_\\‘8I¯\28„\8Íþ*q-åì\18V£k2£4»[–»Ë’5ëÍEcÅ!]\11)¼–ÜðÏYÈ)Ž-ß\9–w›´íoǘ‘k×Îi\16¯˜²G8R4åÜÏ:”Âr¬\28¶ßÀs\26¤™\25ȸLÎK\\v¥<Ó4\27$jD¦Ø\17щŠ·š°õŽØd“\12·4f¼fÄ䤻7wŽ\9×\12.œ’¿š\12’¥A„\127­¨;]eš'\0–Ü’\28ÝÒ9\"‘wã]>,[à¸un…#Zª”\30‘¤^¸o°ò=Ul¡¡KüHãèˆXJ8eGde‡ôÉ-S\31Æ°.ÕÁëñã•b\26ªNÌ\25h{Û.ÇÁ\11\4k\18ôo/=4-¬L|‰•Éú(_Äè+\\¾\15›¨Â¾ú/²W\0264¯¦FÁ‰RáRŽzð­•\0052JL}¨\\îXCUÄRU3—’zêÃ,0ß©bh`ëëÛí\1270?5é¾1®\27´\0178Jì\13‚QZ‹\14C\24Ã9ãômÔÇR¹«\12ß\17\1277õÏÓŽ2º—,º´k÷j¢{¼a¬G»“#2ŒØö‘\ +øë@šTv`Æ\8>C5gX¢²›\6=í\27a•\17‹Ìy¯\18ƒ:´¸I:\25Fv¬Ñ}âw\25b7l„.É\8&Ý\20æ&\11×\22ÆÚnGÛþ\0049\30´\ +\"wë÷Û/ld¸Œ\20n-{Ø„kÅÛ$1\12±ó)øj\17Æúäó™ªReØ\"d¯Æ·+¤UÌ\29Ü\23¼3\16§¨àm%¶Fl?†™àZ!\7wáã]\21ÂO¥fSM¡O\23Þ½£ØIH€ƒJ2Jȳ„ÑUpgtL¡©œaÜïÛ:Î0‚åZw\30ìsó)´ ;'+\1\31{uÎ\20•\30zñ¨„—°Ö­W‹•°¦n[[Z¦ÓDáÀ5vó…òµæ«(® ‰¬§Ì¹y¯Ê½2w•ê%ƒ|-™ÔNøîo\11ÕI\18¡æDMøk\21~\13¯]ÕÎq`m,\13ù_p0ÕH\16]ð\"\28Q-]\22ȃ‹/+’?\"¬|x¼v,2ºdÈ‹©‡l˜i5ÁG·€píȘ°Ÿƒ²˜;߀\1…\7\14º.»”`·µ\27\0\30eû{Â$Û\6\9R+\29…~ì\6êpÄÆ\27»Ç\24Ÿ\5¶p̼\23Œƒð!¾\16œlb¹r÷ÅkÚ„ÉË\6„á #\\0`› 6\30U£\22\18»šQe q÷^l5ÎîRâ…¨M¢\5\19ûB €}Wû¶ˆ\13¨á²Ú·»ÈŽ\24É\23nœ\23ÎƜӎl\16û.Üh,j\17T¦ðBPƒ8W¸\\×Àá\19–ÅØE\3\11v\5£|éDÚ%váWr¹ºÁ;'ª¡\5‡O\16­MX9×=S_ç@é\19+n1Ãu\26}ó(˜\21\13.6Ž8²ÂûH\13t‹Ž—ÿ\0271aVúKyœ=\13Öˆºeo¡\1\21»WY0òÖ6CúŹC\6¨øÚzJ·¨Úç\20s1\3\27\0185\23æÀR\26Sñ¡\2ä˜Ï\22R0ŠÉæ˜È\11^ü{â+\18\13Þµð„~Å+\\ä´×ò\18ìùc‚~²\0074Ùï?Ñó1Î`î\9\23”-¼þø\19K´Ãéù_\4!Ã\9÷…ÎLBü\9\15Í\5lyÉ6\127áÀóè!nÁBÌü`\2È‘À\21¶Ð6èb Ê|bÄZ®úÅ?¹Õ\"‰½ñÝÏÜõ¾FSEÍ‘Û¸ÎÂpM\"Œ\24\4\15J¤4MUú¨\13»XÝÂ|6¿AëîáL\18zžÐ\9\15Æqö\19_'Î>§¬\13”:¾\31³ø\13Ù®¥µv\23'Òµƒ÷-¼á\20£ðì\21°‘Ò]úâ\5õ\15_|æ÷Èš‡„p5:šnH˜X'æ,\9z¨£Å\3˜òÂUërŒ¬wÞìÉ\19T͵û†Ô©×dä\19Kúv\21‹áÕlFÕ×\0œŠMâðbÝX8^Qçð‹'qç\24G(yåÒè¨&\30È9êÝPÁU¯ÄW—\21~2Œ#<|‚#Ú(rbô¢¨|{’ôHqŒzìžÁ¶“»ªõìr,žüäjwSÄ¢4þ1\26\11C}Á[Zº‹í\29‡•k˶wé\0‰¤ÒÐ-tra–\6ÉŸ\22¼8Á£¸%ÜÐñb_0\2ìf‘{{íäSWNÈç™\"üzÌð®ÄŒ¿*]°îÃKê݇7Ô»\15¯§w\31ÞMï°ƒÖñsÏ;H·c\6?õ\13»?Ò$;wÂT\11û'Z¶Ok¥šÄ\27ÔÁñ\20.\24uP½r§HYv\5#2(\25ž°q±¯ m°ÍHhu8¶æ(êðÔmäìʼ\31ŠWæ¹&Úˆ]’òÓ¹ôº •F°®“qZžÐ\30®\12ÉÃI†çO\12nÝ;-vÍÊ\12\31}B2™¾™ã•sñ“ ÷CMI8³øÚ%¥†\23ÉIKÓ4:ç¼HÇÿ\26\7ê.É“å‹žT1*¸éËa›5\2 å\0\5ÎܹûÊÑm&5Ò¦\20‘\29nYó~Æ\11K\29†Î§.Κ\28‹ž¦‰~sÕú;¤u†R-sM:Øí¯ôŸ\17[\12—”ØW·•Â›E(16p>ñtð·oýfÁ3\2QÞ¢U-‡Á\11OHòBk6è\31ÖåeøÉïÉyLv¹¦\29Ö<¼ÆÈ‚§dõ‚ÖÕ!(³æ)>àT%\7†yt\17—½&\31ú:šÈÐáuÆ-°\31\127ñÁ†\14J¢žÌ_ÄÞÀ¥f„×8¹'¯û\9\13ÏOg.\9‰o߇‹b¤…‡3ˆâÐÉ(–\14¥@ký\11\24ľÚ\29lû\12k½\4ßAƒÃ³\25ö’q ßmÙ6øÄ×\127:¤E\30 ºØk!Ë̽\27b¹}šq\15\2±é~\29Öû¸Ä Y¸‹_YR[.øÓÆÈ÷yR\2¯¼Í˜šµ¿›\24µüˆL‡o|߃ã<&·aèÔÖq~!¶u×bZ\127õ†´ý\24MîWeƒw²\3¼\25c®h\13ÙC¤ª\12vcOy\7B>F°gŒ¯Œ˜táA\27K¸Ik<‚jÅhQüE‡¼Å­Â\5ÓXý[Ÿ\23æ\1ÅØþdxB¸Ð‘ûW‰;<½øæ\9k\18*?Jå‘\12úŽ`sV\28\ +\\#´£\23iL]ô€{/þqÏ\19™†;\30îVOç¯82S£ám¨Î)Eé«\17l±NžOšB\21¦Úrñ\29¦þt]Öe\12˜0»\19µâmÖÜ)»WÚ\31\29:ê…Ä3Œ7}ûײ-ŸãìQÓŠŸ\29Ê^Tx¦CêÙ{§žÑËŠžÍ@÷/™e°\15\11ÄeK^©~¦Jö\12%\14Ž\28Ä4<Á<\28ÏðjL8ÂÈÛ÷삇9ßh$¡æÄg(ri\23û³M\13^ó×–\15ó<Ã~R‘BØÖÈPP\29DV=»‹£Ïü\127\ +\1k†?\9ö5VâßþLøàs±ž‡yŽÏ±³ú\2s¢ ÙvLš\28®ðŽ¯Ð\18U~x}zªUù«\12¿’‡iŸ\17˜V¿\\ëëó{¬‰\"7°§\27ží?úÖ+€ðÔsN”É\14\5£9Z™¯0¬|‘]r$ŸáP\\\15Q\27òÑXë¡åKý¾”ãwÿ\1\18îò”", + ["filename"]="tdict-common.txt", + ["length"]=27272, + }, + { + ["compression"]="zlib", + ["data"]="xÚ}XÿŽ£8\12~\21\30u¨¦Wöt+\29Ü,T·=Š\16+VŒv\16Hámx”%¶?Ç¡û§\"¶ã8þñÙéæÒ͵›[6—'›_|l®ÙÜ÷Í­›»lnRj¿­/›+‰=m®SÞDÔÉëY¿(©²üzsãæ^u}§u$5ÓbIè\0206ª\"‘‚¬Û¿\7R¯lÿQ\17{Rê.þ“LózR\"5Ä«IÉ}§žˆZÒÆÙߊï\6ã3¶”å°ö‹†~w«\6•\26ÂŽª”ï’\17)'½\25\9ýÃWeÞL›\6=mÆö\26ž\\ž2\6¥öLâõ–Óé\11\29{ãuÒ¨bÔ¨M\21ÔŠ 1#¤PÁ…WK\3\20^«·Ù©ïˆû¤²3®’«j‡\ +åƺ$8]{\12PËÝè6'š\27$“8'*]wôq\7\\4†a°ÀR\23«\14y#\5Þk¿9A|¶x”\0†f\20ÅL”%AŽq‹ŒúãUÃËÎQÒ\21s&ºQDH‹‚kö\14†\0ç …í“õË'Í÷Šb“…-Ú\8i,ï\20\0Î`\ +«\"ô’ih\0ãXyî_Ák\15ί$þƒÓâ;5„3{âF7öø£‹êQ×\13=”Ý}…›Â¸vÃ4wá\29ÿQJý¥AàõW]7v\6‹»ð*)_Á\25žéZËVpœ\0183—Ìv\8á\27\31x塸™Ý¡Ìz«°ãù/ÁœÐaX½é€§}t¿ÇEeQ¬ôáÀ\9Ý’áv\8\24\28t\6H½ÐÁåq}õœªJZ’¨¯?¤XÃñ-\5Cü\21µçDÆb\25u\27`JªŒ\ +\16ñ„”)©¥cGrQh‡M˜\30Ù9‘¹Þ˜U'\1\5G¢J6\12\6pûxŸL¶­bÕ][7Ö’\19\22|«YO“‰çàÈ\0196\30[rGâ)ÄuîšÍ´#BåÁj‰g‚2Kµ{=‚\12K¼™'[d±íSey£G®½\14\14ŠR£Ãø·ÿ¾$¥ÂÞ€z/:\27vrVÜÝzL©vš\11­vÅë°ä9¤\7ô<\25‘õŠ9€o´øª³j¡(Ÿ\16ر1iÝ>¤\3°¶âʼnDv%¿€©r\3ËûàÐYÒrp¼\14\0189æÑ\8B\7ܱ|\24ðŸ‹.:03é'²ñ«]O\4\14ìÊ¿1K~(V\14ðÿ‰‡`%Ѹ¢-…Ž\29Q!ÑSw4óP‰‡^Àê\7¶ah%ñ«Dx7ó7Âaîå\11\29ý”Q¢c¡è\9Y!ã*=ÈJ4‡-¡¯-c\6„ôTi£Mo\30<À“Ñ;:ã„ËÉóŒÿ#ÊŸ¾|²ÿ©ðwŠž¾R_‰$†\22xžÉSûÝâ\23\27ÊžxçKé\"ž+Å\13=þY›’ßÜh7®", + ["filename"]="tdict-country.txt", + ["length"]=5128, + }, + { + ["compression"]="zlib", + ["data"]="xÚuV\11n£0\16½ŠZØH¡i²ZmCó“P\1!Z²¬\26ÚtÍm|”µßó8\14M¥(òüÇ3oÆ\24\24]\27ýfôÞè³\0253eQ\11ËŸèáò[Ádî‚Ò‡,\1o}\12Í\22ÌU\19î\30\25¶ÒA‡^å¡ã|nàõ²0æ\8à\1ÓÃjçÇØu7“DV\20k@»ždL\0017íZ&ó‰I÷Ò!nû\27v{¤\27Àz¥‘DoP\13½\19!Gç\25\"iy«\18\14Û\12~üM\31\0×Y\24µµ¼uw@Ï¿pù'\2\21„sÁº§p4SÈkºÔ¶ˆºšp÷RØ\4¾“˜u\0ú\18F\12Ü°fOÌ‘Z\27.y%“YqÕ­\28\4I\31]Þ>O¾\11»°9\127ã¦KÿèKÕÂV½lÝ!š¿>\12½ìH™sŽÝåÁFukX4J^–\\\16„dœéÞï\13\4¯eÅsêr®›7\25e¿~Øôœù±-kÙË÷ [\ +>àÞYµ(\2\7²\3í„ÏT†ù9,¡O¶%,\16?ÓÌþ\8ÄìÃbàM›Ð\29Îg*o&ãl‚ \6·ÅM_¢áw;\18cw`5“/ ¶3\3Í”üKv”'dŽá|\8˜è¥¢ÜÌk®maæ2†—Oâï£\\å“C¦6\8\6ùnySÑ\3T¢Ü\0058µ|‰•ê?ùR®p", + ["filename"]="tdict-district.txt", + ["length"]=2762, + }, + { + ["compression"]="zlib", + ["data"]="xÚ…TKŽƒ0\12½JŽ:T•:*3;X¤,ª‚\0163T\25µˆJæ69Jãç8„ªÒlBìç½§ÙÓàéèi1~)<«:hO~ùà“\30žZO6Á#€&\3¨ò4…[\8\18<‚\\{ºAž\16ý\0071\31\6*ÇÂò\5àPŸy(ËÉ—\0292\16€­\13\12-âÜ=}£âpŸ\2°\7ÐÊ]4AôÐCÂz”\23\26,Œvx\8‚`\6\5ð í\6¡„|V\23¾9IÇ5²Q<•\20pAë=Î\2\5²jŽ&¡ŸXÖ\ +Œš#]\26¡Wê»ÀoƽÞv×m{´êÀô8›è\22\3±m\16(ÕѲÀæÑFä%KpÌ\29s T€YhÁc)uodØ\22Ze`–‹+ ²Ys\22ç’Ì!WU«\19Oƒ0;\8¥¢%´3ÁPfB´\28.ÐU\26|nJB\9ÑŠö¾>\12c÷\28è1[µÈ“Žm‘ÍO\4®:—'Qáµ¹òJ‡Û& Cñ•¬CRÝ^´o\22áÑ_$Üë÷2t\14›×\9A&6¾yºUUÉ2\"îf\17œòïâ\0193‹µ,Ÿ¬Gûê*˜¨Þ²ôŸ…¬±\24õ÷\19µ»—Nò‰W˜+šñ|\13^a”Ÿ‚˼ó\29¡?ty\0|\21ÙqBv*äÝ’ÖÆ\127ã\19z‹¬Ÿ", + ["filename"]="tdict-geo.txt", + ["length"]=1324, + }, + { + ["compression"]="zlib", + ["data"]="xÚm’}jÂ@\16ůâQÛ\"h£Å?Úh7…~Ć\20% ÁÀä6s”îüf׊\21Â’73oæ½U¹W©TN*Ê»ÊQe§ò¢\":.'jp¯Ò挃‡„¬\24\ +ÀÛxNt|\0k\0jrShGß/\8†ØaJJ¡²\4\30UVž\27’vq¤Neö?\26s··£*ÏŒ<Þ©Ì™ÌçM÷ÓEÈ\ +­=ç\24ï\27êã¥öK›×jÑb¸Šö\23€µw,ž{gœæŽ\21Rm½CÐqNmH3è–Dl\16)PjÑŠ(Þغ\17\14·¢Ö*\22\13ÌnÌ•ŽQÓ\0054>Hp+ߘ\4ƒ­h‘‹\0128BаÇÁC‘fEhÈ{”ho&\ +}_1¡öq?p!ø\18\19Ü^çí{¿÷$ÿá'RüaYŠ+[ÐõÛÄdìÆu¢w‘ÿgW@ð\14\13ŒþÐÜp\19¾…´àKŽµ”'Ë\29y0g¬°+8:Ãåô_K&ºÐÚ,AË\3\6 \12Q“ñ\\\0îÄÁ\15\16\3Ùp ƒopó\30nÕ\1275n¹'‹v\26o{\0133\"Ø·þò€2\22­åŽ\28ļÍÑ \"hÇU= ¼ò\21\15NèAßI›Ú0Ž¯\24%\24ƒ\11õq\8\8±r+(+\0228í@¶¬F^£_Á‘OÁF\1æ¢+E9ì\\Ð\30äI€p\18O6\24KI-°êÞ¶Šþ‚-¡¬ \0ùR†]ñRŒ¾¡oØ_Þ#ÿö\30+b®äƒcšÉÀ\27àþ’äîIT­ïµ¤d±å\20Bq\12T#¤%é\21íæ˜.õQ)q\18bñ\22²ŽÉ¾Pd§þ§8\18¾'ÈÓã<í!;a¸:²¾ŒÚ–}\20*ACUêT•úµ*IõŒ‹fí,çK=há“PŸÈ…ç~[i¹âÓ\30¢·MMÅFÚ¿LlÂÝï‚îü?îxÃX¨“&ƒÑ#Õ\1¿ª#¨±I\31&¯S?Õßú©\22?•ÆOÜ¥þF›»¡Óô‘župh¤\13>Ftié‹tA-|Ó„cl@a;w´ ‡Ô`xŒ‡´\0085F\\\0276_ŽPE¸Z€ÞJ)ðY­ñ}’\6kÂC*ô\29f.\2ö-§-—\17:¥«ÕˆjQ$04Ä´v\3\9C[\1îy\14¡´°õlMÔƇ\15bƒAûÿ–î:)Ñ’½ZÍî@;ý÷°`Wsám\9@sùz׸mÉ‹û\\?–ÝxÆ\15Ç\13‹\0220B±\"wwµíçtûyu{çûm®4\127á\9û},ãoZ¾.\28Uµ•†,œ\25u\17:-½mR‡¬°ò;a+‹\26ÄS´¢³7¨);\30S¨´PO\23{ØÆ\24ìÁ”½V\13&\25<ÉH®~zônPþvUɘX\9ä\20MQç\11úÔäUXg”–Q¾ÚQF;2¹Ì\31;YÆSTõyê\5OÒS&cÛV†É^ѳt²á\7M³ïÅŠPxíÿ}+°þ‰@í‘[™\13‡5\30Ow]ÔKߤ!ã‘ú¦_Š•>èh|Ó/:ʬuT\22š=¡ù\28\4\26-¬KÚŸ¤ï\9ÿôå\22ég‡tÈŸkc8\24è2×y:È:ö'’Œ\26æ“ßÔBÿ\15çS«", + ["filename"]="tdict-ict.txt", + ["length"]=6299, + }, + { + ["compression"]="zlib", + ["data"]="xÚeS\1JÃ@\16üJžjB!ÒV°Ø¨`IKPB\0274$²÷›}Š·3ÙkP8ŽììîÜìÜE%W9«L*y¦\22|«™ù˜ u†\1ùƒÄ®\31Þjï“í©±Ÿ\31œ)-—ÿL\15i5r!û\5•ç¯¶", + ["filename"]="tdict-lang-ethnic.txt", + ["length"]=969, + }, + { + ["compression"]="zlib", + ["data"]="xÚ…ZŽâH\14~•<ê¦\21©i˜•f¯aé€\4\19¢ˆÞpAM.Ü%oã\\ÊŸ?—\3ÌŒ„º)»~\\þ·‹[ŸÞúòÖ_o}~ëwòe‘Ü\20ú¿[ß܆?FЭÿ\2ô(\19›[ßÝúâ\0144`|¹õKù´·~?îhÐêÖ\31\4ú.‹FÈj<Æе|¶vT++:! º\13/ã\25@táÀ\0\29­Ÿ€Âgø\6D\127ë7·!\27w°qqë\127ð†ãç\29ˆA–¯¸ÏFö,Ç}Fl\" TÐØZÇÿÈnïrÉ!妅|®·a–\8Ù©LùK.ù§|¾ˆ\8û|ŽŸ‘†qüªG\15oã÷\17”Év\28ú\18N\15è½ðºÄ\0133åQ¸÷J\14È\1mÂnú\5·X\26¢²ë\0ÔÊò\15ˆJÆÊ\1(Æ\1ÌÃDìµ\15ür4\24¢ö4\0084üÝÈߣAk\17«\ +4SNË-0î… *|\9¬Pè K6‡\17ÚB\ +YP\25¨Hä‹\" ‹Š[\7\9еÓøÖ 9µ8‡l2QáÂ\6+Ùëën|µq%7/„À‹èeкLxõjƒRdyGmû›˜Z&ÒŸ ™œÐ‹M–2\30^eÉÆ´{æ„;³ñIÆ9Æ\23¹\20.Ü\2Ô\25K1XÈ”\5„?3»6¤\13D~3¡éß\"Å\0310\31#Æ\16•è^êÐáÆ3B¯à\14@…YTÊK¦N\2sgtA{Ž…”¹,¾ˆ~\22rV/\19Gø÷_£G«ý\14ªç\2:Ò[¥°\27¬6®ŒãwÐ+@°>hú܃&>nNµž\0273\23bE3h‘\22®ÿÊAXy4F\0´—ƒš)(0¥¤V­\12QQ¬A­æô½¹Ð\1\"VBÄ0Ã\23rÖHÁYœ×›àÍ/Ð…\5µj![T\6*äKï÷:ª”ƒ\30)\20œ9\1\13ÐUþ¶f\13\2\ +ê0ª~fl”Ü\7¸~\3mèrúDU&ð\19\6ÐÒ\6\20!,\11—½\24Ç\23T†|:\14üQU\\ˆ\4\16’Öry…\30äæ\11?\30\16Ißåˆï²ñ8å_ \24üLEH-¢\24@sù vDP\1^‹ëþ–P]£\"ÚôV\8†‘䦷K\13\"\1$\14Pðy\13>/…æ-cÇÚ,w);\22ô”¥)ÙÒÑ\20Ç\16q'7®=î(Ð%l\29ã\1\28[Ê\18¤\8K,\1´d4j„\9Ñ=/\5Zz(°äV\7¸\16QÝ!\6qp\11Œ+5â'uO\02470€:hux1{µ°{±%6Î0¾ª…jý’?ƒªü\"âãéôÇé=\13ç•\22ó\ +\4n9êÔ\127å‹ÆùX6\ +̃rò4Fˆ;P\24Ïh~ÑGoœÖj\2ÿºY#ëV¨ûb\2ùA\3€—SÁlE|WÉ\24ÚD­\3¡Aeî/2AÏŸ¡‡Ùo7Àùˆ‹/L9;à4“\18OÂ+ƒU\20ÿV´gåMg+‡l™ÊÍ\12úf\28ßÆâ%F±o†k)Ø7²Ýê\22›\17Ö©!mik“\28\1d\28„þ\25Å\29|ð«\20Ý;aÀg˜s’å\22f\2´ònr'{¯\0309°ÓªZ<\5„·³,„\25¡Å›ŸâRà:†#KÖ¥;!Q\3è‚_àŒ4–îèÒÖÎA(¢â%¿¨é‚Ù’ªíΩ-ÓD^ЪŽ\3Ó3\5YêÐ!\6ì\25¦ò;hÅ­KgV®•È°gš\11ó¨mÖ•\22ß&l>ÌDSÃ}÷.Ê…;ì^\21\26=e£\31LV:0Å@%¹®’/äZž:…¾Ë¦\127a\0•ŸÃ\13\21¢?@«håê9>\17Î\11Y~\18šþÆ¢/\17\22ìo0…yµˆïA¥é0@ã•ßLM\0µä+e®VŠñ¯îÐf®\ +­}ä.x±ÊÏêX.ÏU×'8\"lîÊUeØ­3œ%’™ÕšÑ\8\31jWF\\\19¦—gæ¯'Þ\127™0&¾™s/˜Û—Þ¿\23¦/“àE\7…ò¤~@Ç*PÑð0j]Á›hØ—ü5hÆ®)²»Fz\5Š›k;Jm¯Ñ\6\21û‡X±@ª(ó/C/œhׄÆ\26¡Ä¢Ÿ œé\20Þümw˜ÇÁ\31X{]z\ +]í\20S9\3¤Év+f¨\13¤\3EÿaŠ\127 aœ…ÏÏ@Væ\28\24˜Û€£«8¸Še2ñŒn5Êôœ\ +Œ\30\19 1\5Dy¢s÷~ïå0ùð”.ú͇ a\14õ\2\3(Yë/±}©=q¥+ÖþDÜ\7^åèÞUÌ•A\11m\13huZ&Î\8\11ñOG\7\13&ÖÛònÚøZ&ìŸ\"€nh­©åî%Ù:)qK¦¶\27ÉnÕ`JjäÆ*¹ÒÈ´\6k\25\27\4ZF<ƒú\02964¶I/±|Hrƒg¬Ø\\Miï°\ +ÍÌž\7/tpt\4÷¸Î5à+ëÉ\2\1Ç}„wÃ\27G\6÷\31\7pôæ\7Ÿn¤šmÞlÇD!>™h1‘LËÑ¥x©9ªâ\23ô\13­;²ÕPþ¤›«N¯ÕÆ’êÐ$c8º”蛫`¯¸&ÑZèÌ\18×,O\8Gjh%÷ÈXGê\1\29sZ\19\14¼–â²_à\\\17È,ãø,óªïníg\28ŒÈnZ(\25\12ë\3÷®ÙÃ8 i©]ÇL\29Y­Ý8qÆ(«å]î!_¬á¶Ü]:\11\30µÌÝ\17w0¨õ AÍL˜¸-bØš9ÌÁCK6õzkë׎9)\23u¶¨sä}ø\21§è!8÷JúÚød¹\11ÊÓÏÒø\27¡=»SÁM\27h­\0214ìH4­fù¼ö\19cCëî™ÇÐWÏšžîÏ|v„\30ôäP¸O¦w4¤Úóž8-\ +×^ùk飫Ÿ!hxcĪ|\9Š'Æ\25\29 ÞŒ4ÃHyµ•=F\1ñÃ^ENô\0…51OÎ\27Ÿ…k3hûIŸ*\3Y±ß}BVÊw.M¨2™.bd\30a/µ+?èy\26\8¨€›“ìO{ZB\"Óø'\26\12zv}¾Î\19\26„y^Ýôñ‰yei¹CkÒ[9ÕÄ…{2þHès«Øª‚CUBb\ +Ôð¾±SÜ°a_\24þðø^Ú8?ñQ½Y¶vâÜ/vosãø×SN\\Ø\30k­Ü \"\8Ó\6š”\7JÓ3lx²\3/ꃕ-k\127@E—³ Ão\31–jß«´{]\\\8i\31\15«ùôV{PmU—\7]Xq„\31#$ú^¢If1\29\7ùÝ\\;»aºiÏ\15\ +r¥“V3z{ß)\15wˆ­\11\6OÐí\19У»nøªˆ‡ÂÞ@{~SfîÝ›ßS\4ÛŒð÷M±µþÆ'\11# g'T=º€‚v«7CT„ƒûäB ”y“Ÿ34\"\14kóv.\14ênöÊ\23Ûå\4ÏõÊ\127f²Š«cÓK½ùò\23¸x>l!ó¿7y\14õÓÍ5Ò«²\14MÙgK5óS\127T\27.ç7ý‚‚\11r¼v\23\\>’äæýtƤ›\30\15Ó Òèë©ž\20Ë\28k¡-©èŠUb\6*]fÑÚ¯MšèûµÙ\25Ëqó90íA_e4Ë\0302¯„gŹÖ\14@Ûø›)¦¿ç˜°k\"2Àû(ÒÒƒž¤À“\28ĺa\\Åeh÷?\6\21i@bõ¶‚œâúàm‰°Ø%Æ`µ“\28I³}KŠößžQlK“\25=~r¹\0\7g%Žã©™Ø}@Q§Zºnƒ7˜3ÍgY]«1‡a#W­UÔ‰iMÝãè\ +JjW'ÅjÜŒ £\ +k¸m©ìÕ`<_uœREj%W%¼;|¹ìÍ+P©Â˜ªuÜÄÅŸ¥mÒ\18~¢\0259ÇLêP¯¼fì\5®`•¤&Õj-ê\0Ÿ÷Ò±–I…ôm-©Fiò«¶@\21\3°\13ÕF*îëQ›L\14y£R…é0S\5Ï1…>39èQƒ€Æ\21\7b¬Ua=ˆ€\2¯Žû\24+Q%P¬7\13\0072ïO\28\\%€'tå\3¿ÿÔý¾ÐÔ’7C\17&ý#Í€óâ\18z\16\31'¤¸Ù\3&ý\29ØK¬á¥]\5i \"’N\4—ÂL Ž¸à {ûDƒ\12éÉ#\23‰5\\\19°\19uÜLêæÙfeà$HÌT%\25«JßŨ\1Z»81#jòÇ\4\24÷ðÃÞnäÖg\\÷Á¹\0d(\16HýɾàJÇÉÚÄ5t\20\15óý\27T:ÛðuÂ\1277:„\19º²zï]Ç7³(ù‡»ëvÕdk+\127R\127\19Pí\1NÎ\26:„“P\ +…ÛJºú Ó]ü\0€ë9!ÂüAï$o›«Uª¾Q:ˆÒkœÂ¿§}Á\\18\2\127Ä1‡\19Wÿ\22U”E§\29¥åà`Ks&{,g¥õ°g• \8þz¢oÿÀòÀ„m›šÝ^\21;I·B\2\24Šý-ðGCÒ(¼^¼\19öê£-ƒRƒ¡\5¸,TN6\4w.#eïm\6ÛNg4Òàûm²l\\\17:´øSž\21ÝõË°`—íŸ\16cQºB…}ã\23^÷Bð$¡rÅêûç\17½\\Á’Zµ\3éï\13÷É\5«í;¹7õ»¥F\17‹æw#…Ý&»['•æ(/1þkžõ^ãÛ\2ÏW€Þ\23å–”ÝS«´–Âèvè ë9KgHä%¥˜â\18ðˆz\"§îÉÂ\30>p:ö§Å4:©›ß46¾Ñ¨7•Æo®ôTþ.X\19½íà×\29Ó_)z±L%õ·Õ_ebŒó¶¥¦Ù‹£\2ãˆE&Ö\16ú\11é\20õæWrD½šÞƒXÿ¹“søRqý°$àM¨¢Ò³Ðï\7éd>Ûc\1oRÐ…òcB+Ðm–EÿÒТ:Áª\4ËÍ/KI\29QŒß\"­µ€:îMu\ +Á~3þÞ¦Xh€µ˜Î)ìq§õËÝü²\18([.„Å\16T\16–Êèúð\0·ÏCî\23é“©—Ž”\18\1\11÷ò¨Ø.šw\12̳0\2æQ÷øyÌcÊ“ðì¿Ë¯\4¹\0041\19»Œw_š¤¤îKir\2{{­»áÝ)\16sp›I¿\1e>D–", + ["filename"]="tdict-science.txt", + ["length"]=5492, + }, + { + ["compression"]="zlib", + ["data"]="xÚmT\11ŽÚ0\16½JŽJØTá×Ò\ +Ò”\4M(\2‚\"A\20Vömæ(õ¼g[ÙR)‚ù¾ù[L*&\23;\17SˆÍŤ‰¨è›\24#¦\22³\23ó¤híÿÄÜ éÅ4\20½;{RG1w€•Ðߣ´\23›‰9 Ê\ +R»\20sv\16bfb\30b\23\14‡\ +Gub*|O±s‚Ü\1ê\17\31š´º» 'ŠzÄqIoÈ\27\20ñ¶Q´þ§®7 l\29åÒûáPHÄ\12bvøÍ¢è\25Dé«È[õȲ\11\8^ú!v\6Ã9\8vΦÐ\21ø´\14ô¿\3ºBwF¿æLë‹CHЭXv}\17Ü2x¥ÀuÑ‹\4ƒXB]0Ü\18áµÚ-úRÂ]çQ\"½œÔ1Ìw\19ùžE—˜oŠ±yã\1Mu}\30\18lB,#\3慄Ë왆\15ØNð;\11‹¡ü;\"f‰wÑIg̲Š¹°ö\ +(\25¾\27—£‚~­yi;jØN£û€n\15‘!\\\13z˾#\8\15´ȡ@sJ‚ìÃÄ\23d\26äƒ,¡”9\"9KÞc,\0221\ +˜ï°?Σ¦š…]c#\14>E»âè›`«P\19'ú…hðµ\\O.\12Ï¡õk€PNú\7 W®ÿ§aF›4\1JÇè\0096ñá»àÓ¸AâZó3ZôãÖœA}Å×3\127Š.ø½êÎÀñ‚LŸ\16é\4™p…žNÃþµá$*Xh\21W=p ´þÜtjòpX-v=¥ÔåñËÁE†Óß„1°¹^mFþÌ¿\12BÕvT+_£K‚&5hcEºÅiGEC†7 åwx궡);ÿ6€Žjxè«9#B\23*ÎHDÑÁ/a؉n4¥v|ZTüÇ|\8\"\22íî±\12\ +\30¯\127x8'ÿ\24b`«\8ÐÆ\21èP\00873GÆ$vñî *Âpx^»¨8EÝ‹È?”šL7\26‚çù ×áð³ä/Rçì-", + ["filename"]="tdict-spell.txt", + ["length"]=1787, + }, + { + ["compression"]="zlib", + ["data"]="xÚ}’ÚH²õ«ô£\26†Xh<Þû\0134\22´iKZ…<â²aÔà†·™GùŒ*óäÉ,µ÷F8\28]'«D©T?Yùû÷uò÷ýßáïëíaøk>üßÿ}­Q^ü}=\13\127\11ôÏ¿o\31†?þüûú?\127ß~Wôº\29*®©âýÁåð¸*@·¿o‹\4­þ¾Nÿ¾^¸¼\26\30T\12åÛãÏ¿\6–Ë?\127ëÇð÷œÑÝß×æçƒþ¾\30•pGÏ\127_ÿúûúÇßׯ\127_gCñ'x\25%÷Ã÷‰Vß_ÿþ®?Ÿ9×ß¿£e\24¤\4m†_î\25ú<ü1ü­ã”\8E¨Ø\14o<\7„\31©s¨ÏÑ?\6´\8èÏ1z\ +Ð>ìг+÷\9Ý8\15´K t\3a3ÐÎÜîû0?&Ã0Íù¥”pŸ\15#hqŸ\2Ôc~N;|ÎØè8\20—C'Gi÷¦?{øÍÓîúI®îˆÌŒ*´>\13Cõóßd”°\27^ù‘?O Ûä\29h·éÐÏÉðÇ\127\6ü?Ã\7rÏ_¥o6üñ:J8Œ¶øëïÛD§|;Jþ9ǧc„ûèÔ¼\26\3¹\11\3»\26¾QÚ\4nLØ\12-–Ù£Ò\23ø9Ê÷?j?(÷¡ju)»)œhG]lûáÉŽü|\31b)¦Oá&N5ú§%ZÚ§äÅÜméSØ—ï‡fÆ?\3è4†Þ\127æ5¯û–Õ½÷d\17º\1ÂÀ4D‚œÈî9«_Îáð\31ÚMµõ§Œv?\17~NÝ¿òF\7þh÷\13\"_HLˆóMN¨ÑqU\26÷>«‘¾æÑÿžÌ´Ÿëm– Vù5\27›V¦ˆ|í6í\296KE \23å¼öÂÈò¼á oÁ\0\2Â\30-Ðz˜jÆ\3&´\27VûcúŽL8\12\15=ê^\"ŒAm? Èzè˜\12Α\6\21\15´Å8Iì$£Óaﯹùf\24†}€®¾á}(šP>…*/<å\18TƒÃaô0\12«û…íе\9*~\31eã¿+\27?ç\3)m—\19îP‚¦Ì·ž”ß;áS\3ºrCùEplÃfñ;È«áEÝO\21\3SàÊŽÝNPÇçÊ\0Ý\127áÈ\29Üú]©$ÂÏ—‘iÓ2\ +Îf;|kyz¯\28§cWz½M•à˜À(YÃOÃ\12)†¿¿\14‹ïÃp\22M0/\19ÿ÷sÑ\28Ñè\11qR¥¤-¢DÅRgå‚¡Ùð\9þ$H\6\127ÏÐtxÜ,üÈŽ_¦â‹\1£[a\29<7•héfµg(\29؉0cÂèUCöÉ\ +S\28-êáÝ\26ž‡\3*\23\18[©î\22ã\27ê©°oƒVêŽ\25\127 V‚ûFgLÚßÖè§I\3UaúöÊì¤Õð?@Ëï\31\8µŸ±F8(Ÿi?Ð)-í+‘/úIûßöoa\28ÁÍŒû\17´Ó½·\0$£¤åû:Û\15ß`Ê‹ê\21\31ëïÛo˜¤„\14œ\15:~\17Vô>ȯÃ\16}Åoœõ2éf#P›„\9jƒx\"¡‹aÏkˆ%üS?ÀÀÀëyà\22\16\19¾¥Í}Ø~\7¹Ð×^\17D/|Ö«ŸÍªTez\127îCþÎèd˜¹‹P÷\3I<ú@\24aô@ëøËá\7Ò™º÷ž4tÅ(=AÖ\5\15§´ht|ê¼Å\127túÿ$\127ÄxáÞ`{>AÔ׋²\31-ŸÆ?ät’ƒ0 Ã‡\13ÍÓ\31:¿â\17\127Í›_‡ï2Ñ—Z2!1__ø$\6ZêÌ6yŽÒÒ¸\19cz%^mͯŽ§UÃ\16Z/?\9w;Ü’sÔ­J\"È\28‰|l¨\17/\27סÑçt\18bÒ]õâvåïžPÝg´o7”oÔJ<\8xWãÛ„0×ËB««‰\9ÂÁ¤M\"ݱ\\»ùp?pÐzøjt³¥kIë\9²\\]Ý©ž¶|­\30\6H™Ü:§á \14íô:Î÷õ\7cÓ‡{¯}(a]G.ráb:vÕówÃì&@W·G\30‡¼\6Dacw!êJñË\26þU¿\13SªOŒ\"пtæ¸Auh\24TÐò«¿ÕHÒÌÄ0®\3í\"‚Â{Rå\0125ÏÒãð÷T\25\11iÚ«\24g2ìÛqZÎõ\4+ˆv',†s¥&žÄ‘ëìð\25'\23ÊN”ïÕØ©à\ +‡àù½ª_\7aPù‹\26Õ d\29§}Óƒt¥ÜÛx½4ºÓ|¬ZåÃ<˘¼ú\5\13w¦œv?„ªÐî‘Ú…ñ´Z§ÔÏ}L\4°‹áUšÉ8Ò•ˆ!\17ÈZ\21\\‘¬Ê‡ûþ-‚.«Ø¤ýk¢RØ™\ +WZF\23Ã\"]1”„­\14Ú)›\0häGï\21;½\25á5å ª”\0031Bá…ÑLP‰+¿O«ê\0\7%í“—\31°ø £™8Aל\ +¡~\30½é–+cúó\ +Ý«|~G\28Ê5î_\16º¥ôS\"ÒžbB\17A.¦Væ+†¡6ü€^”Ñ•÷iôw°y§ŸºNUÝ°Ò¿{\16Šp\11êاeÞ&&kª\ +…\"Ýå´–²”©\\\14Ͼ$f`*wbeʦ£\"üéðˆ’ux\12ÝrèM\31— {ï\15èD9\20ÎøÅ\18\2ÿá\5OL¸j\7‡Jƒz.-ñï*Y­ðÜŠ\4\22\21³<9í1§mõo\27öŠÄîSág|Óa§ºÏ«C’ëLU¸Ëç\7É\ +q; H•g\12\29C÷[\18;\20\5\24\12F>¡IHe·Æ©\ +ÍN\24£Žjò\6wæ¡LbÌ©JÑŒùNÐq8JN:bîðL‡AA„ûÖÖRï­úfØ¡ÒM­\6ï;\29\21Í\1u\"X¼õðïOsu—IˆŒr\19Æb\30îò¨Øêµ|\30ê\30’J‚Ñ\25Ï\3†Ú$ç·3æAi3üŽæ[]@6#4\29×\\Ñn®¨e3í¨\26¥Ä\30p]ùõAY/=—¯2z\2µ\14:[¤Üé\29?mgÒ!ÝÂüdÁƆ­A¤Æq¥¤Š\11½N\29xPNj9pÄT‡Xz\29jU£µ*ÜÃP±Só…Ï\3;¤»!Ù5L\24=BÐ\2tëwFÛ4fþÚ’hé@ô\11¶‡<9\21v¤~} ©e­ÂF·;'Ú·tùeè z³9\30Æ!‰Ó\ +Fm¾$\3‰5o@€Nx“\4™Œ{Jú“\3n\21 TA`5%½ˆÓ/1a\6\5f ˜<Ä£ö›†vùPµÙõm\27ÈÂ)yT×u@\23|,ò\15,ømQý#\4\8Œ®!«ö¨Íb‡ž²nÈ\"(Bݨ2gZ’v›n t\127\23ôHLž\14Ï\\…\7¾øÝÒ=­äOöH\31§Í\7ãÝO–ο\11­('\15åõê\8\13\31š\23ã\28õŽF\4mÊÐL'è$CIÔÌ„«¯{?\8f8\27Ðô;Pº©@\8èŒ4Xe œøÈJè?h ¥Cë¼ü¦_Ïzó”¿ø\6V\3€vÃÆ4Üðn\31±\27$Ú3[l\1M§‹\27ºÊÿÎm‚#.Ña\4Ò†>Ö9\19\6š™pá9\7°Â(\23Ù[ŽO,GKžä„\26»þ\15eJ°4\5Úó`ÿPÙŸ]F˜Pð>üƒ¸b‘®\4BÉ›ì[hþ¦—´)¦ÍU§M’4V@\7]\27\29×\9*˜óIÐ\30“8•ùÐ\17(\30:·áGÅtäANüÛœ¬*N9a\26ÐGAeq0êX>W}Ž\0272?:íˆß\2zÐû†ûÍ>\29_\30JG»Þë˜`ÂD4çoç\8k\22G\2}J³& \0275Ð\9¨^7CÝKÞß\2ë&§¥Ã´|ðœ{«Âá´?\7š0fr\3\22#\23’h÷\15ü]Ô(\8W’HkÆ\26É!\\³H\2¦x\27ªlh¢LÀ\8:gÉ/7wB\27T\12ÚÁØÅšP¹;7,¨H+\29·/*\29/ù;I\8¦üb©<Óµ$eX餲\\º¯üˆti9°¤=\17:âm¡N5Ú\27ý®ˆ)fi}\4´ò\18¹’…\28Sùq\0173ý\30š\30p\4<\12ï6'‹;³†˜ùÏ!å\13\0197àºfÃ*ó꽟I\4c&P¾\12£cU„­Nå?U·5b\\6S\14w\22Líf*ôü̪J {1\7Ї\20áR;@2N(ÿvm\27¸—·Cý\23\20’„¨Áíy&\23C=í´¬\6\14©|\0001\25\16ôø0µÖ¯ù«$ôIÅz†\30©ú\17=`Â`w¦Ã\18ZØg\6!éu\28dVêºù4¨qÖ/5yàlÜ=íö»òŽÝ(í›ÈÙäÓþõ^¥\19iy–y\15N<.\3š6L’•$Ú5¨¼~eÛ\26&ŒÏÕDûŸûÌ\16ÆüàiÂ!¸YѨTf\6ó0%˜‘â„6#û•Ë\4ãXfdG8\7ƒŒéžÖ\6Z¿\18JÌôÌÛíÍr}'×@§\31©o²“¡\30^6±ox¬²åïÑú\7ÿûi=wüB è>H_½Ñ¯.\26EûP½Ô.ÔaX¯jê³Ã”3³Ï0ö\0158c丗©R€\0\1S\25 \0276àVÍ®\ +\\Hf0°ÔóÆ~\ +\6K6í€òÙÄ„4j÷ãHžžDRG|²¡ì;\127\12Ú÷\25))§0\20$TæÈ÷\7Ý:º@Z˜\9ÌlÚêþŠÎžH—2ã9s\18Ö›î‹@çÙC’hEЊP§)Fgî]\\àŠ\11è‘ýd\8õ\15…Ø}’A4?N:£Ÿ¸Vy\15S¼:´r\ +†\25Y½Ÿ²Šcƒ¦èp¯›©ånº—–\12™Ež}\0196\21ft‡\5ÖÓEÚ~\23\27Ê2\12u¯\2ÿ\13Nô\4ç\0045#»9jÍ\0Ú}\19?ô\27hÐSß\9¯ly”Ê\5öÈ3Í4yŸ7µDºpÙmÒWãGU| ‚b]\31\14ZòË&(r>\9-òŠf)\8hÄ6\11´vøãÿ%é,Ðh’\5B§~QÐ\27í0hWŒAJhÏ\\A‚nÃ\28™0çx\29¾ûŸc\4Ùœ–þѲ×Nxò_I;ô\22Vg¢‰dßîè™VÒ==2\\·¡Vb\30þÄ÷\6z”IÐ\20Ÿõ¦ºî)/Ë›ž#?’öÄ:@çð\0113fÓÓˆˆ\2U–ÃQ¸Ï¡\22þ5 lU\12^†—K–IŸ\24JRôM-(L>agIèsšA\0129ž%A5o˜\9j†e¸ÊŸØñ\ +¸éí¨\12[\14\0087.\31\19«\24 þ.ä\27d\8\15Ê\0”À\5öPhU\0166@ž˜®ÆõÀÍ”9\13¬ù*£\25Û‘˜\24®‘ÌBî?ù¢\\ßGµ€>²­Žò-Æ\29N\6®î]ÚÈ»ùJ&¶\31'G³žüWø:òî3fd1÷n¥ù¯\127ˆo#ï>ã#›YåÏXé6^Cæ“WZ³Ì,'oì\26@ÂŒ÷:´à,'ÿö‹_Ùþz0¶ÃŽ»„0kôÃn“ødô\1;òg<ý×z×÷_’j$#±÷êaS\24'WÁö,¯QÓ-å½\26d'9ú9Ø6}ü\25\13idÞ}Lóëi†\13ì]ò«nKó_Tºª\21ø·a³\31¯Ç|”¿ÛèÂüã!\\Nqi}\24½\3’¤h”¬:zO\22%:n^ó÷È_Þ¡™{‹ÐZµ€Åßn¼\14ÞÛ’dÌ\3O0\11KS¶]Ï2g:g\18q\127zÈåVº%¿\5c¼\25¤\"Ö×é1UIT‰ÂVÅ-RNÊžd!l\19ˆÎÈ(·çŠµ›×Æ\29¹Fs\12\17£Á#Ìh%i‘­×»¾\15|Í?Ì\20Ãyí®\3m\9ƒ\23FÛü!\7\29¹PWnÿ%ø,¦M3šè\ +„CuepÛÿÐ\11ÇŽœ—ûäd;×\127¢gEY¸R\24 ·`GÒŽ¼JæÝ(ãÙiþù ž\28Sf\26çªyéõçn\11•.Ìa,@¨\26‡%Nt\14K\0194Ç€\30Ñ»´®ç*)\23(©‚’¸Zz°ä›õ\\öB9+ÔrÖnsrDZQ?\6&‰ÍÍçäm±\ +\27±Òäpî\3ZA¦ŸÐ‹zW©o¶ž`D“\27\6Ì\21FÈ\24F¡…uŽE\24È\5ÌW3‚\127_nñAM.ÜïÁžÈÎljŽ©}Â\5Û@-ô°\19Žz!\22@ªðÔòp¿]ä·í\8éP$Cs1)[àî³Ð…Sé\5;\21ÿ|`ó3˜ y”ú|T=¥\25¡Á\15;Ö2çP@;Õ’N\24\21õê\"y\7Éþ|7o5~±ƒüˆÅá{}Uº.0í+~\26P5úJW\3s/®F­–F—‡L\28ÿîï;š·d\127G¤Ïæ_\5nT\11rb—ź0ƒ1\18¬-HX¶\8Pò˜™s[çc¼ »°š\127d«\14/%C\11L*X}- -Y˜ÔÌ\14\22pƒ\24&F‡Á\4ZÂÁ\18š„_‘áËüß+å=¨•'éH¦wÖs}×\0301A\19‘k\16‚(jÌ¿AÏüÒEе¹PM“ ¯b™¤bà\1²Ùÿ\23^ð•LËå)¯jªõhÈ}ã[ò£\27ñôP›™…®ü©6²à\30\18ÿƒ×;¼\6¿ªõQÁÏY³\30ˆ¡^¹wê¹ÙåMãÊIèvx³-op©W\19‘©é\23Z´ª“:a\7={Gœ\12\2«Á„\6\14báÑ\27Xø 3é¸XŽ@£Ê«\5\29 \18N  np\12\ +ó\0184\19:.T)–Ô\13\127âÑŒö)È\16£µŠ\20\11tñª“w\1\17ÑB…sE¾)%o9\19w\5ÈÂQä„s lô›|\11ýiØá\26\16¶`‡Žh,\23äÓg£xUÅ2”Ð\23žU×Á¯xd8ÿW]¢ƒf^†l\13\0139C\11•gÏ\25-Hð\23\31’ܙրÒiý)í\11\15lŠ\25œô\2:B˜ò½h‘Û\4\17D\6A@g*Î\14èý\\ˆ¨óí[ýÐmÐ!:ÔÉó\22däQöƒçtäŠlÂÛ@nstä[\24ßòÞlQ»Él†\30õüë!D[èѺàµ^Â\9©\2Z\4tç!D\127³\8@bÁ•t\30\11r¢¸¡\\ú€AŽð[êòTï&\19\31!¤\0-9Ç_PŽ.«@;¶süMBá\12cÐý\8y\28žòˆ\27û£FÅ\19ÆêQå)Élb5@2\ +‰~D\28\4½´Î˜¶ñ\17‹©GÈý{ä3̆Ð9¨\14\31qN)šâd$QÈÀ¢>Ò¡ùÈ\6·ÊýŒûõ&\19êG-È-¯Äf\4AÙ#fü£ªB\31Y\18ÇhÏÍž¹.næSögfÂ^G*o$\17\23\26ìêt…® ðN\31û»\ +¹¿?àEðj€ [)\25²wQH\0266¡mš\0177†*\8ÌRù\27Ô\9€Üõ%\5y[rH¢%©Ë­lÖkK½\6lP ^b˜YK•Â8fs©VΑÁ\7\1Ûî\29*C'RÃÛð;) Õ.\16\ +ô‰¡+ÐZ=ƒ\\Ÿj¿‹‹õ¡xð,5|B\31Ê\23\20ÒñTs\3ÚkAÎÔ5?bçC3\22 ”\8’\3¨Ï{€\21]1ô-‡:ý:\22?t©þJ\27}¢\9õl_\"Ô\127üV-JJ.\31`~€†Uî=´ÔõòB3Ð\"Þ-p9\\ZøP½Ÿ-3Õ_\31P3-aTì*\9ò~€K\18J\22„Šä?i1QÖÛ!èq:Á?/½oÚuM‚=aÔB°*tŸw7ôàD\17T\11†\ +\29\18{\0163mVQ¸ŸP·‚8\14èo˜70z²7M\28È?ôîràÉ\24e3K¨Âƒ\4\1n\01720@æ\29Æ„%oVŠÞ’ítxîƒY$«\22yݹBŽ÷-0/z¯\7wÖ\28Tã® º°\13\1Ó^`àÀÏ´é\7ƒ¥‚çô#ýø3\27(q‹=Û3\6‰§qO:¾:G“OÑQ\7gÌÊ™\9NDÁ„‘¨Ä¿\"ç’\4®Íñ+„Pè\31\13W¯¨]-¸W­¡«ø…MÒ€n\16è\11h›•)¶óR\5KX\11\"zZÒg\18\17ó’Z\9”>Û\22P¡\\‚•]\12ž¥9Ï‹2Ï~¡d\11óA’\16½ú*\16\30Ù¢lInY\11nþÈû\8ª\20Ø.R\21Þ©\7Q\7Ÿ©|Ö+lb/í\8‚Ó’\22€¶|î\1-ñ7\19ŽaËz#s\26,R Þù;\16œ`r©Œí\18²à¥~3\28ê—\26¹`Mg\\‚Æâv-Ub»D\7®ª7èC•Ž\22µè\23KÞx\21¢1\26%<†Ÿ*ò>ÜXV•ä\16Sx ,õúínŽK]r%Ê)œïxÌ\24&ÿ@Ð&ì…\31ƒTaIâZ­\29¡D˜”\15´¨ á*rB\19 %ÙeE‚Ý\23€~$gD&ˆRðÂ!«2šh…g®\7.oòÞc7|EA£R磡«ô•BÅ£nÝO\15¦S”ž\4¯÷Hnu\20>=˜u¬ñHK\22vð\"2·8nñÄò\14&¼À3)\16ʼE\26|;\0272Zô‹‰ätØF\20¡\16ã/ua\22ðϤ!þ¿ŽÙ›/ó\22-{¬-ÍíJ¸\18 ÷ºg¸ï\1-ý\5-\16l_pQì—\12¥ílI\12¶#ˆeÔoú;\29Ž“ºÉ‰oóÇáG?bëþ(ìÿPåA½á'úGAÐ-¡“\0075„cS¢Ò£²Qï\3:Ñâœ\9…†CßyTÌ×\31µ\27Vý /\19cœAÞêh. üGº®¤7J}—\13ѹ\4\0žÂuï“õcºU¡â\11\14}Ôr\30\15\31I\26ø]UM\21\19\"´âo£®\8ê*ñQ\21Ÿ¼ñ\1\21\27@«µ÷®}\19Vñ|TÉû7þnÐ\7~\4\11þQY¨ð›@Õé_!ñª:c–¤Š5;T|”}Í\25W%TG€ÅS\31Õþc¢WpaçAHÝØ\18äWÁ\15=ÑëÑŠßп\7=ø\16\1Û:÷F\17°“€¯Â`bšòK^uq\21¾L.R\31•[ùˆ K\31ÕÚø#k\14>bÖu\1­`Š‡§Ä9q¹‹KôzV+NݳÒXÌ+µO©ÅRM¿×Ñíø‡îȧ\28º¡\3oÊήà\30¼¢#pÅL•'H|\13F“¥\12MÛ\19b\0­4\"ŒÄM\23BÏhPù A\"\23ôH˜Qûƒõ¦3ñ£š¬$вWã\1]±=2\\Óátc÷À•n¨-=Z «ï†)¾zúA[ç`•W9Íýà™\28ÉM\28°R)”\11’»²\0Ö\18¹Î¡+§\22VÙÌŠ‚½LFŸÖÒg´ÁúH¾\5Q\2âkxË\1[1ÄiEB\29Ä-ÆV\5Ù\15\19\14d~›„\31˜d;—@\19Z×w\6v¥·ðU\18;)d»Õ\\Ñw&ÏT\3\19ZÔÜ™nU“\12Ò8»\12‰ \5­$Ö\15Ê\21æ\2š˜\0óAŒé|$\31E½'ûZ5<\11Ð×Øh×zË\\cÏ\\\19ß·Bù\5œw*[‚©51”Foøa\7\22É®õ¸›RürCÍï\20Уn\\[F×éÄâò+»‚`q\13\0212\0234ƒbÌ•u\30˜\11u¡^ÛÙMJE~hÔ'¡EnÛc´C诳‚_«8º×Bö\1\19ôœl6Ð$¾IÉ\30â\ +Á«F¥\14¨[锑2\2O›qÊZ…ò†Öjôé.€L\27áa×jDîRƒŒ 0gaš]ÅÖ”±\16¦àî²½Ö)Ó©\28ú\17h\31ìÉ&íÅŽ¡Š×[Í)\12\1i|¬bÊ”ø/XÕ0Úc©§ø×)üÔŸ#FU\22/yäù¶\13h§Æ\1…'8›©Øè@ÙáDöƒ{hºwJ\28{¨‚|9Ù¥\0zä\16Äk\18Úl\25Z³É\3Ð}Ö6±Æþ\23bÔb\8'NTQÄ@sÿDAyÇ\20\2bwÝ\2„\13÷†û‘e\21ƒö|D¶„ê_TB½ \31¼C/*â{Ñ䦿3ÙŒ<Ö\26\"þˆxÅk\11\17OS\30\4¾QPrKÄäƒÀ\12„£Z\5—:óÄ\11&ýø“J_n3…&\3³2\7´ ­\19?ŠOø\5<î\11‡5™é ¸ LŒ^4Ö™¡|aý\11qŽ\19ºÑ3ȵà\0273P‘!m¸bºD»²¥kc4íd\23F+呾…>©£w†²S«\17Fœ6Ñeä.±êFœ›‰5¡\127Z§šùiø4\29ÎÕ'å@yŽÝ\11«á\127á|žp\7׿%ºÜ“nâe®l~\26Ð'ÜÇŸÔèâ\9…Y\18Í<Ù±1ĺLZ\8\11w,½“_\20´J{p‚Øôh“\14ö'•^?á\14ôD\ +¦Yºß>yG¶'H%žô\28{\2WôD¦2³á×M³D4ʳõ¤\28ñ,ÕàŠ\21Kn\3Ê® L\27”¸xn«[HšKOIæø¤†7%n\15Oºm¹\8ÒOÊx\8ÿ)Ê¢¬|]’g¾L\21Äظ\16®\20•µÆÏþÛë0Su&¡÷ôžÂÈ”ZC~³Užè[,s¸p\16¶º\1_“nÖN\1\ +“%hRÖ?…ò&”û\0™Þ7Bé&\0?7\19á¦Þ\"xâ\31i%'ÔÝ\11\19„ä\24Rvþ\28\9JNõVh¸\0ß^9áš¿I\28¸o!ªúF…”³`}ÿ@â2;\0136¤À—ÍvCËÄŠcCÁ\29_cùÝŠä,Á´a­p9±œsn¾…\0\13\25°7\02276¾YAÑqµ!K×Ó0`ÿ\30ÀÏj:o5\14šèl•£gl#!iÝ\27¯pÐD–¸É\2>ºXÅ\27’\1›˜žQ\14Bà\8–°aãüçÜ›\27¡ö[‘\11Ÿ6²S\29Uæ~â¡]ÉŒV\5Ðb´y\28ËUˆâ¶Ñ3{Âm¿›e\24ÑsÔ\3ò \ +†ÁLèp»gÔ¥ÒÙP\14˳\6.N›ç¿˜¼á9„ÒÏ‚³ãç¤U¸~¯†;ˆr2¢×D\31Õq1\25£³¼â\ +QT\2aƒ\143\7\12„\0116\5 ;?6‚>ó\7e}À>TL\7V\19ÐŽ-Ÿ˜n@«\28mÂ÷À¶\26`@ÐU>î«Ü€n£W»\25\7‘QT¹(.¯¸Õ\26\7ûI•Ì%ŸÉ0wÄȧ\28 ôÛ[ÚnÚmó0Ý\27rÓD®#F•\19ßH¢çsº§®Xh\ +\27Ë>è²\0!CRÉ芤\24@%'©qç†vähQsX6\\:ÍŠ\19×É\20¦§¥˜\16FÞj\2ôO”¾Ã‘ã\3Ë<\4þ&m¥0?\12Q¸64›M«¡¨0G®âÚ\127\ +cBÌ&–Ñk†F†Íµ(‚Ã1\26Íhp\16\12ÎÇ\13ßè·–IøÝÙæä@–ÓìÞ<Òîsˆ‰·¡\11NŠ¿ÓúŸ\20ZˆÕãÈÛÑü\9ì¢Õm`èB1\\F\8Oð?ÛX^Éû,€\5‘Ð\22ÚI6¾p´R­Ý›@H;Û\15\8\0087jø‘u4„’±r¨þ\3\30»ŠŠoè\28èw\21´‹âb£ÆÚ¯\\ÆõÀ ÷á€òE\27hb*~.·á^þ9\0048ÿLŽï\"%ú¬Œ…¸¸|F¦™Ü^“iă\25öhë ŸŒ9Z{TÌ\0þ\2×À-¢Ó\21Ó,ã=£.\16ô(íü\30Íò\8Ž\18„\7\ +´\30™År‚Ä&aB\28v\16VY— ÌÖ㎫C\22\23ßcÄ„–É]’-æèI/$\18Ïõ³nÚ)ѱ@=…¥þÎÃu!åÕž!\11BZøÕ.ì©\25£\20j=.q\ +=JÅ’¢P\29m«zo¹\28;A, ä•xçðeû\4(I7–(š£{sM‹š\28Î~GE$\30òHP½B\12\29Uî_èa?Á\0134mX6QeÄí.ÕÝ\13’ï\6å=\23b$Û„~K÷K\20nh™—¥üŒ‘/Ù´½Ð\19¹çÊÉùî†úÉ\21 }PŽìÂÓ)Bù¾R¨ùß\11K~\ +ÕÅ\0210 ,4D=˜°9£é~y\12Ðw½\127»º\11\18ƒÔ\24µ£Ú%Ôx½£7U0ÈŒŽ\20Ò\11ma‘ËhÜŽ:©/é¶UÐaç¼\25\11\18Œ\22,Ý-\\Š\23òG+,†\1Þ‘…E…†%+0)\17\"ÁÜ2¨\22…+((\13€„\11*H¹‰_0\21¨\0110ˆà­v\8D(ËÉìj\\RàaF§\20mÖÐÿ\"ë.(þÙ†#-\23\20£M„\8x¦Óx\20d lÓ\27Ð\25£\8èŠ]'A;ÚÀ?ù_—•¼f¨Æ×BÏk6\9dB\19F®•\31þ¦\7ò·p»,(!Á9è'˜öæ»$s\23O°|b›¬£7M\13AÒô‚œ²%b\0\12\2ÜÍ\19O((\14ç÷@ø\16&ЖÂÒ\24䲪<„øŸ\22jâÿŠÂŽýÃ\1]P0ë‡B­£\\úÕB¹†-\"’\3Ú!8 w‚B\14j‰K\25}Õ×h˜0çãÊ\0099Qe«‚«\26£Ôû\20]VwŸ„?\12½ð:G¼;;> im|?HtV˜ô”$ºE.*Mп‚|\17hHŸf\4äáÜa:÷*S\13naÊ!\25oY‚\3BùsÞ§t”~Æüí}*K×|§ëtâ\127ÇrRYÝNSW4\1ÿ‚‰@¥2|ÄjTéåÉ*)cè‘—&\27˜¸¶#võ…™Tûc¢W]š_yBÂÃêÏa\30Ì6…^0íÏ<0­ÈëîS°RiŠôâ\21‰>PÞ£3¯,D,T&¼à\13æ5lˆI2ÝA\\¨ÁLš¯«\0õ|¼¾æÞý¨;%›E«>Q_˜ù8šòU¿GsA…ðK3µ[\13ºŠ\18J2ê\17ôîI\22Ü–\29\3˜ö\26Æc\6g\31Ê´3hNßÕ¡–\29\127j\17t-L8Ã\12¿\8±\31ùÑK\14¶š\19\22{B\\£öÏ9\22©…i\23.¯u?´r\31˺\8\4â5,P\17:X@\21\ +h«g`|\\HLVp˜¼Ò.üòÈÇßåý{\30ÄÜqŠîõ*?\25G5Œj\31È—0¦ût©/ȨÍr$e¨]\18Œpò\19Ž\\k\25ºùž\ +§ëzWÁsΣ¾VMç¾-èdynµ\26Ú•í\23c„JTïÔˆ÷\24\6´ËVBˆ] á\8ÝÓNª›ý=\16z\14øÉ}sñ8Ñâ óq’£ÇѺ§±ºvº¸º¦>a\2„\14=æ\27P´»Au\\ùQ&k\24®ubW™B•^s„Ã\127@à”ž\11GÚÙE# «ÍYYæÂÏ&õ\8fŽÅAÌ´œG\3\0023¡€4‘ÑQA\127¬q\29vî\25¿ÊšlÞZ°ó 8IÁ¨„‹j{™CDÿ[ëwÉv[|·ÆB÷‰–ß­Vu×\127\127~«–sQ.‚vîÛ}\11ZOF—94\"\30ºhvðBS×8Ô]®.j_kŒÏ\15\21Šœ!€¸ª±Ø\21µ®zá;ÀI° ¸µn¼\24=sˆ\21pô\5\9Vp\30‹g#äjTQS¨Ð¢Cù¬\ +‚£Ê\"oZ¡*T\31d=¤üÈiæÅSøÀ¢àjb£9–n6´>øw°Àÿ¡º\17Î\20-\"m\17‰Vh(hd\23\16—\8\2„*ö!ì\17¬^\2tfçŸÂ¯ÜÖ×½A;\26\31\0021\8Z¶^ËP0!\127%\9Ï\127¦W¸C\22öH¡û5«PÖÁ*î8 d¡æ‰[_V«mFù\16w(L3h,ð7ê~W‰%Êú\7î*¥Ej“ßR\17l¹¶&¡£;àV¥/K.ˆ¤m«Îù'TÞè\13Ol\22\0I>¼­Z\11˜3\0 “X´ªÒd \8»°×ë¾\9[u0xEaü\6MŒÓSyç\3½š\30\14?‹\5\15cAò*G-'­Üjš\"\23†b«i3\11øD\0MœÒ7§Ü³nh\24z¼‚F\19¦*¥šê¹†éH\\†ž~{ Ï‹FGk‰†.fýÖ§£¾1Ä\14£†n)T~Í„V\28¼Ìî¶%—/«—Fã\15|·Z\21²{e£öðS(0¡\0153•\9ŸÉÄi„\\(_Sc†2¹6\24õ~Œ<Æ\18m)·l\15\19Æ€æÁç­\6ìlÿ\8_ª&Ã\23‰AôFeÀINe¼†S\24oõâ™R\3\30Ô´)¥¾\15ÕZNsÛP,åŽIƒ·ä™¦ºÖ[ÊYø\23œ=¶\20[f\2\30l«*&d@\17\9Ö\"ÈnBYÔ\19[Ò`mÓ=\25µv\8+°õ±etuÈÜr~\4æ~O\4\13\21¼õqgnøµ\21²çn}ðmw¯Ýª’Ê\18\3\1ÚB&µ¥\0005\"ÆÞª®{ªÛp\17ÐYŽþFm-ÖÌ\11U‘Íy\27–L m³\1£rÞ4±³W\29hK% V+_ÎÂñq]gRÏè\5\31\27Aa.þ™O\28\15ÍD×$h^!Œ$‘åx¸\4´S-͆MdC3e\14zãóNÕ<\20\4gk¹†¼Lš\9»0}@€9Ñœi¹†\\{l0\11\9Å—\13Š\18 œ¥\0089¨\1\19¯43V<@­2\6\21CÛ$ØRRQ|3ƃºšËJ.\25ÚZÒNòíÙRx€ïzi{%B¦wÞR\20€´þ^xýõ\22·Ú~CP'œÙª\2híË\20EtK¡“/\15&˜\20#Ê9Vi¯ÊÄg¬Þ+\19­wi\";“©­ZýEÖ®\127Ç\0¡VyMo²Hê;<üžø\13\ +\19û«uòÖÙý\11Ù$d<控¥­ï;×+ ×Ë‚…\7DÝ=T\"4‡Æò¨3yéÙb#\20\16-2º‡¸Žçi4\2à\22µn¤õ\0249&PUóÿ áÚªà\18I\17-omEñÔ\0234Úæ>\29æ¬\30±Ã7ØC\18»%\25Ý6è\31\2Íu\23ñt\26UAH\11äeÚ¢nÚ£gaW¿èøà¼Vù9â½\4t‹\16Ì[=lâr½R¤ù³~2c\0¯\12Ít[œ°AXNóFS:û®Ãqø9|¯„\22*”<3\26Ý(·šø \11Óý*\31Áó7Æ\8ê…æ\27\8ˆ1[kVù݃Œ—\4­±£óFéÅN`¨nzý3\29†¢#SC·º\\¨´U3æ\5×Ú²Ù0&;®w=\19\\š­ºÍסyG~ý†\30´7\0057Ÿ*\1u-‘“ûétßî\30x\6¾â™âþÙOa„\26Š!\21h#êØú F\19«‡|‘ó\26\127ðJp¾¾\7BT–;Š·„\5Ï\\2\25‰\7Úö=‚\19‡Œ’\\d«ùMà‰’Pú@€\22þÄeT¦V\17êºè:‰Åå(½@-ßB\17Ðg\15Ý\13\29~P<ž\21ÓF,¼·ºØjéÓC|´ÕsÃçiF^äè%ˆÌÔ§Ú¿\21BàlÕ\2áOâÜ\"!\4̈äN»X<ÈN$VÎ[\9à<\8®~ÓÔ\18²\20\127Ó\16Y\\&CsTÙBxŒrºÞI\19^h&¸ûM-¢\26µ\1\23hÏù­SìÏ=\ +&ÔýMßñ»ò;\6…\3[\8\13Õ\26[¼¿éWµ G€^õFõ‰Ñ+Î\1~n§QºlTÝòùM‡¬Ðó¤OÚƒü•„‘˜1÷\1È\2ÖíäÂL‚0…ô–¾\27\12’Îtçq¢–Q²\0291©Æ\31¢ÏÎ\20F‰ös‰ü\14Ù\7 JÒD\9¤\4ïËÃ-œö‹\9{ÕÇ™í5h\21\4—€\\Z\15 \22%\4P§¡§÷8êwÊÜí\30Tˆœ¸Y9}wú\ +Ó\16A\0„\21˜™æ—0ìN4K¢û\16\11¸\24–STL…HÇ¿£ëêzô[@z“þ®\30¼XµÆ89\3)Í!ÓâZ×øʃ\7—3\19»Ñ7*òJŸXŒ\25hE`s™ü\26f*\17²”H9Ùqöá±nÖ$BJKÎÅSxò’\21£#è¨|1Ô›å¿Ú#ef@kÍ2óÏ÷žæ\\åÈy÷\30¹\9ƒÖ=ýõ6\15fF4R.q‹Þ÷l\7ê5ðþøëCÄ´#MVû\0309\30@È\19°c¿¡J‡>)Ïá\12‚wä©v\26µÆÑ×ÀÉ¢¦L;Õ­ÙLnôò?Ay5:tn·\13?K!2(R\2§žöüN£qѾë5\9·\9\4B;\13ET9+¼aCÚ\12Ì+–RÃŽ9\12­Þ\27–¤^p\7g£N-»ðÜ2¤ˆ\3a«fĨn¹©yÐ-\16¾…Ì\13„\29\4o;Yñ·¥š\127¾<Än¢\19¢$ÔÛÆ¢î‚\13O¹î\2Ö•@%+\0147_Ãã\14Ð&|ÙžÓZ\0022cÁ\29¥FèéÅ!m¦‘î‡\11Ÿ{Pš/ÿ˳Ëãgñìs?]\5\19[\"PØd®¾\9Óôì5\7<_\3\18˜î4¢õ$ûì‚VùBº²ã\24Aöôלpbž¶!i\\\\B\16Þuü\6)öâ\23\24&0ÚÑq'´Vb]Zê`CK8¼\0ªýÐk4ž¬ãB¨Âqp$\19‰Ôå¨t”_;…‘ƒFø•ËˆL€.dE˜\ +Ô¬\18ó\26{º^~\31«‘]X\6ò]ºpæüë9ÁBóå4\11\12˜Ó\28³\28hé\18?å«C¨áî]9-©˜ú‡ N\12¯\1´Õ¦Wf&¹†K\21±#Å⎷$\13TE±›v¤ïÛ½Cmú{¨~àÕ\7eŸ3íßy\29ßbà\17¡ÅŽ\13…™Ð!\26ßNwîrÔ6|§Â§%mš€LíЄê+ýM*sZ[&\4H”¶o~ܶ\29¾Ì‚â:üÉ\15l”ß*U>˜¹&¡<Åõ\26ñÀ\\>Pkæk©›\7—/\24Ìž2â®\24r7^÷5\28ª} 8£ò\29…Øjhkc¯/»\23˜Î©Ñ¹b!¯:½¦@ûã\13ŸvªÈy„{0 -ï¸g\13`ÿ\15•;9”CD'yǽð]b­è\25}Q‡£9Ê‹ì--`]E.j”Øbè8>Ñ„\27þ\22Zí•1¨À=^Ô\"å\17ßú\18fÌÅé:=+}¡En×,ä9xæãî\7\9¤åýÞðm¹<ƒ?% }(›¨ÆÊèÔ›Ÿ\2©l\9’'ãèèC:VÔ¡‘Ó;ïTuµƒöu§I\19¶´Í\24\1A‹vœ:(=§\26Ò¹ºKëUoÁ3_7¡rÛ·Kø•.ÍÿÅ\4Ÿk#ÌYx¾\4ˆGˆ§\11Ïž›î¿½–e\5?òž\127S3ô\5óŸ)«¹eÙiô¦)\127†›„n\21C\9çê\0032o¥\9¢X°¨Å<ï\7•Ë×áQ»\16\3hGÙ\"’4•Q\ +i\0034lVL ] ê®ó_+8û\0£5kùðP‹N„Š\13\20t€\14Ä\5:ÂQâwÜÐ3±YÄÍ\25¶z$lÕ³fG·Èâ!»qoCT¡÷j@ÎÊdÕ±â|x\28UñRÏ·þ\7ºÿ~âÀI8æt±[Ðx™*ÓåZ%‚=9¦ª\13h˜\ +æ}\6ò¦|Ã\29\127€Ó\27ç䨋Ü%\30Š\28¤¬éwåÓˆóK\0128 œºg?`fbT†\1\11\7-£ö÷<\16’t  ƒ\"\18‘ Âsžù7/zcÙ!B\23T>pjO†oŽ\0g\23Fý3C‹’u®@]ž\11&ÀG7öG­ÍL.ò{¨tüdj7¿SÁOAå±eŠx\22;J¨dè³\127\3±\ +\127Ö\16^7\4ÖLÐG\29ª\23œïÏ\18 a°°}Ö,ßs\ +“ô5[Ï\26ªàåA-Âö°FDîaÐ\26\13Ñø\25Í\27w±ó;\5j$æVìò\24ú\26´ Ï*¼Hò¶ç´7&BGJK\11¥\4ù£ÇÏ*ª\18d@¤°Pc¾gRAÖ\24~ü ë“…'…\7Ð3\12¶˜ð›þ¦\0253ÏØ2‰\9Kõñý\30\8¢£\12(Ï\27C;ÜVŸUùó]>rÙùý?ë¤\17íÔ3-Ï-ß\15ž]H\27ß|ž«÷žÕXð™o¡Ïj\6kù\20\21ºÿÑòû L㊠ۭ\22Î…I¨xà«á³\30#Sr†º\5B/I#”½dZb\20j6Í{&³Ø¥²r&ø\27%︫9Ù´Ë£d§™}&û_á[ŸÍ\12‹ä%xÔ\12J\6T4\127Ígʨò\20\30W¨_–ƒ®4Ù¤ío¡á.ÿ\0;Ž‡\0034Ì\19A÷\20\7ý8F ;_îÿÞ¿¥Hè&,\24£J\31lè¢\27]ù{ÿ’•R \6þZÑ7\28íŽ\20†å„Y¬4Š·õ¬š¥©ZH\0094â³ýLŽß-.ÅÏâ=-\11指œ5S©I~€Â‹^žpñÊ\23LLG8ä- «=êKJŽê)bç?Û@&Œ¤m|7³EªA)š$ó\15Ö¢ÈBžðª\23N½\0È|^ž5ëõ‘û°7·eM.\6Ú\11­=¹I:›ãgŒÙSŸåä°X!Ï\18\\ÚV†\ +\25õ3Ë ]ý\14:÷¨ÆÄCD÷\25:ò\21s$A°á6‰;hKþidE\\ò%D+}âv+œ\28©ÑT\19otùopÂé)Oœœ|ÀàÜÔüy\19^Ðe\26y&ËÜ´KO\7¾b\22hÿÅ+\7UG\12¾˜æÒ\8Ž§)8\24Ýû è\127…·Œ1'ÑΙ \0mÕBë[RvƒÐѽՅÊ{ö\25//*´‰OvY\0ÐÈI¬FÐ(åY•JSÝ›>\5”ç°([N¡mɆ»ÏÇJºóxý|Ö\8ï\11ú\30‘ð/yϺ ®\25¹pÝƇ\127¶êf/óâ“-}ô?\16^XZñBÜ]Ä\24ý¿\9ô\5¿yUéÀG¾k\19!ý¦Š­2‚—뾨¿RŠÿÑ$è6<ÁÕº½×<É1_ @aÈv½—á8xÁþúâ͉]\26ÛŒfvVãä[Z?/j\1[iù6Q“¡\9ªp@ZÌšá«î@)˜èânï1@\1\8{¾ž)šEüEu'ûýªÊµ'¾–\2e\27`C£ÖiN¬\0´\ +¹;@hT\12²Æ\0ô$RrÙ\28¿š\\V¥Ã\ +©gÌ×Ñ\0qŒZ\30ñ¯*ZŒc]Ë…CM\0\21%cDT\28\0097òUƒSŒûh|UHxì¯r¢ÝŸþ‰ƒµ}UÆÉ©˜Ò×ûŒ¶7½\18HÐä\4Y¤‰¯ºt¶Dײ:Mý΄)^ÃÊPÔ¥Ç?S•{Çf\16\1ÚBê\3\8VÅöì5:ñÀ\0193¤ƒdBx3Ac(ˆ@6\31NFká<þ•P¯\9\2*¦ý—¦GŠ(\27;å\"\9„—áèq±ÏrÐ\18ª7l®h×`Fgº—•|ùáG\9+\7ô/ÕÜZ<·¯Ã\24\27든ĥ,u^¡œòšia\5Er>ÆjQ4–¦ÞhÁ–Y\8KUãMÀ\0\6TD3¥¦vû'%ŠLHÉ•D\12Á†\12„Z8Pó¦\13ëdÈ}\127÷ÿÑmÛåÔ-5Ôð«\22È'ª4—&5‹.Uö&;t©²7É‚Tê.·#ëÊŽ\0095Ê…7óŽý*4E<ÁB ”£ØƒHœ„5 šÈòù±L#™UiQÉ4|C0±-•7Âv^0º€¬\19Ðœ¹; Îl\19¨Ùð—dâeÖ†%éÛglz„\18{Z\ +l°Ð\29Å&zÚÜ\127W©cAúÞj´ÆÈ!ŸWªx\"\5ZK[ƒè\"\22˜ì#¦ç\9ýœ\\[Xx\27\8QÙ[Ò5rŸ¬Èûžn\\u6Ž\22\ +¯ñOpÑamÌÓŽ\26 e\12J\31‘¡ôƒlWE§\28+EÁâ—ÉI\13ÔGúÐê&Zb\7êý\27û@è\12%Ùú*4L^>-Wl Û!ˆÞ§§IlÐZ9}Ûx€\26«Äh\31‚`€ve~—Q\11\31À¨»n•\20è`­cj\16¬¢A³£yÂ\29¯ÂnËPŸWl±\26{u\7\\$\19`@HQ}âa«)‘àŒß¸†mUräy`µyè†&ÔóÆÇL(9\23K©\1õ\16f¿\31%¤ØŒ{\16V$Æ\6„SBÔU\5W?ð>uÖ\15¿àã\31(s\26†vl²­7\23•&•â^àOß‹ú“LЛ‹²v=X5Ô\18·\7ÔÚckFùä[ù\24\7éàLöËs./Ñ£\31:\29\127ø*~á\15¦¡rTÿ`–àMý¥ŠP¶C8\29q\11îê\27\27\6”tB®áŠ‚Z;Ø÷rE\23p\4uy\31\17¨ã\ +ÍÍ\6\21cBJTÿM\0137e3E2Á\6_ã:LïÄKÿ\13ôª®6.\27vIF+%‡b\ +„\30/Bùûülb‚Ûó˜`ÞH%Éï\26Dj`\20\9E·ü\1¯r\0ÒgWˆ„·ŠŠ‰RÅ\21\31™«¸jÈËšºeüUO£!&ªoÔS±ð­`6„ŠF6Ã\4Ð\14ÊÌÜϹð¨\3Œ\18\18ÚQ\4´Í\3rÇæi7Jï\27Q0ä¤è¥\15ma\21›pþÜÄ7™>% ‹»¯¨\28Ú’Þ\ +ug¼.\18ô…3\9–\20\4cÎ\26L&ð‹:ÚT¯¯–Ɽt0¦&ù \23%\\™¿\5\20\12¡ Íp\12R\19\11\00475ƒJV\0162Ô\6¨§··¼6Œ^È\0ë˜\19øƒúÛGˆ×_zåfÃjÔ@C¸>Ø\23ˆdy¢v’‘°VÉë(ù¾U­rÔBŠ\4ôÀÎ5¥×ﹸ)%¥aœê\0090B³ÐíPpBM&Œ\12ë\27Egœò\7gBÃÉ\8KÕç›î\17Ð\16QüŽ,˜\0£\11ƒœ‹O™\5¹±ŠKD•(5èÍ\9.>X\11¥/«z\27PJ\2ÆeRÅr\15,\2WßC_ÌP\12̇XVPT/\17×_Ë”}\1m-À<*ZŒ\12”C+\23ºÐõƒg`Y0¸nþ‰Ç¢Ôã7»ðYØ>Î=E%‰ køš}µûૉ]Ä2{7!´\28f\3èŽ: ÐW\4è!Ô6R¼±\\¿>z(N¦,­g)îóº\26fzYÂq½\ +h‹`:ŒÂÙa\ +Eu©î\11\19UüZš\0114*\3›´\6FX\5ù\11ë¿ñØ•&_²FN£\14èâóÏ\27-¥¥r\15Ý°J¼$çTËR\24Z”z\29±\0225å,‡ê\28‘ØZà–Ì/©)Çð¨áï%xFûµ™Æ\20˜©*ÀhÓ´£ ùsþS%\27b\16zC„/CEì\17^©È\7󠲉xró\24‰Ê®²Ìˆ\26l°\"\15ß›–ßq\4\"š²Á•\26\1ÔT…,\9¸ÕÏ÷ù\7ªT‰ÿ¯|DXW¿Aª%@-îV•í\17*“¨4ž¨XQWšnT$G•2?\"\7«‚?X¥\14g\21k^\25íõ·Eº[¥;E¥9ê*\\#ñF\27hã+3\29&Ñ\6Pw»`´bSáJ\25”Š\25íŠB­TÜÜ%«|°–Š_ñ¨œgE ãæBe=ˆ\30t\3œs¼Ã\7>‹uáO\2Zñpœ)xëöY%@\23.WP(\2ªÃ\9p&›\26¦Çž–ò¨ÑnÀ„5V\11£;^\127ì½iaÞ\2¡âéĪ \26¬ÈØ\\\30ýƒDǶ‹üPWÂ7ì”?Lt|[øæ2_\ +\31ŠS'\0226÷\28âZ~\5r݃\28<ô³ožå°ØÆ53ao>è«AÎ*¯¢¸ÆóP>…*\7\28Ö\15~¿H'£ W~ÖU™\19W>ð´FÞO‹Ÿ¡h:bï›ì'®»áuÕ ÄoÔ\3Sëœù§\23ä<0ñ¨­zn1–Z¥Ê¼fê€\22¬Úª¼/\11”Î'¬!cˆÃ@3e\29\19ÚÀKå¾-f\0Æähä“¿À¯^îsÈÑSihjÇH\2:3´\15׌›.›\11ä}•‹@p',9ªo•G9VHvš×ü9!40Z,!\21dè\13âÊÊB%ÇŠŽgÝŽÒÞXºˆGµz\23Šè- [•¨.\2ÊNXŽ°¢-ÚXÜŠ–oÁ„^\9ÜKIËj­\31òëg'r²\27›â\0235ÞØã%üDÅrþŠ$Äé\14^å„ß<š.„Ž[˸Ñ7ar¥Gë(ͧ\14­²›\18Pùãû\3ß0U7lÍ·Ê×T\12Uˆ\12\\E:p„Šü#\29¡SºøŠ¤ î\21\ +øæ\ +Iò’\25d´ÓÑW亮yA©Z$bO¥*šûáR«×ÝÏ\1þ\23Ê.½L­_°\14™²™`'gMŸs’Ês½\15sæÕ;a)ç¶Ú0Ô\3qBלïŠ&¾î\30½zÎÐ\20w€\4M4˜\26jywI@iOÛ\1²¼J©¼Q#\6Ù»\19ÊÂÁ+Þï‰\29÷\9ÊRp¦>ŠÅ´\21ØÐ?µ-}™”`¨Rq€ü\4ÕüÍž`rr§±\20ëA›¶fÂYò¾ÒgÜê}¼@¹F§¶j†cý,a…Z“{Ü„¤õ Htd³Ôà\22\21}S©\14aŸ¡)tÂ\20i;ku\24xætÞ@kˆm4t“7#ªU~þ\1\19\5b3Ñé\2ª\17k•¡\19×*Õ\3ËÊ\22r\8/`Á›\24²<ç„R\0000F->\8£\29[»ÖzѪyÕ\3âq½7tVª5yçVxe(ôìºZ“DoÓ\2A\6Œç­ÉIÆ\12¾\0245)]íÃQ\30\3\20³ä\17-ós©]èJ½(q‹YŠ»Ì\29_Rÿì7k~éUž\13\22„5}sƒz|\24È\26kô†}7“\6–\2*ð|y$/N›X,é,rèš¿xA6^} \28ÍA[÷™#±?+‚²ô²¨[óÖz”yG\9Èj\18„\30á·¦ÛM£\7A.\2;\17¼A,·8°ø€Ãvi¶E½¨z\26i(`dºUC±)®÷9\23¬Ñà¼kÌ{MÖ´3úèäÇ>VÝ\19à\12Uk8 Y:\ +\25J¶I\5A\3ƒÀå¸{ž(\12TƒÛ\25ª—xû“Ê¥\\$>&ü{èò¿ˆ€¼\8:L¨{Q‡_‹ÂáÉ™wUM\14%vÜ&hC&\27&Dî#¹ýØ}3ùT¦û/ÈÛð\3\127hÈâdºò\23\19à\17÷-_I¡Æ!|á?‚ \30h´\26\3¡Ugåt©0÷µ_94Ö%[F@+8µ\0164r¢K]·Õ|fè\25Ï’\21G\\cè\6~,ñ‰1Ó,’Ä:sQ ‹À´ž$>¾gÌGx‰-)Uü\24˜Xd°­¡7«³<Éæ*•ÓuÂÑz^o[\4\7\31†\9z\12¿ü\18\14]E‰¹9…$è€ê¼¡Ûš\19\4ÞaÆ“q\27”\ +@f°&•Ñ\0016¢5é|’e‘|Í>çjÔº›6\26h\5x[·ìvn&$ô\15\30ù^#\21\2ôMÝÃ6X\8=¹ÌÌ\3Ó\ +\17>N\16\19·O’å ]›˜SîÉÍíO\ +Éak³§}«\14P¬eÊp@ñu—¹K0\19ΚM¯ÇÒ\6í\6þ%AŸUÒ\127aÔ\9Ø€ní7‰\15bZ\30ÁÕèaéÍè5Gwjp±\14¯ý•ï±½†\3:ñ„[²®\288‚¡Ü¨ªtÁ§CÊ\17ùAc\8ŽÈáF+\21ʤÆÏÛ\"›š\0189ßT\"dç%¤£Oüƒ@_üU\\¶&sQª)Wu\31j5|\27¾ªÆ½Á-ƒ5??üX>¯\5_/Xÿ³d>‚\0095[\0190mª~9ÆT£\11CXp“\13GÚ\8zÔ×ú\19/¸ y1\5ºÊÙž+]äñ\\\17æÔ¡b™/ú+\9ºZL\29Bý칪ˆ¾\14{\17\18qLÀ(!ÓHË'>ç\31ù“!·áqJ’Iø\5÷\25‘ebÁå\15$\\·Ç¹‹ÂM-3OþGEŽ„ÆÐh÷WSBWJ\17è\8\1J,\30IŽošM$^¥oš\23î^÷w?Nwè)û\24wÔ‰‹\18äô™ø‘”ZaË\21]\18 €\30B‡¾ª ÕÊ}€ê°\30\18šó\16¶\9ÕdÞG[ÓpÖ-8§fMÞI–\18hÍ\25B\24½@ßV»\5OQwj\0135\4\127\"«~ÄK1Úû\21#³¦×Ðå¦g\"Ô+íj5÷0\13§Bn#6Â*X¶»³$$\127%‚l\25“\7Ò°èÒ¦,¦D —\13\14ð7SW¸Hp)F™\16ã\0012í@#j\"«:8ÀÔäçÂ\27L¤w\1æŸHÎ\27ÛE¯…¼u\9£ðÐ4~fõà±\27ïÿùå\2Ç8Ï›¶Ã6ú9ÿ†¶ÿç´–}\27øKç¬\24È\7B\ +hlùm˜öª9\23<\15ÔÃ2›šð³|\13É•6’&Ê5]ëiµÈvÎX\15BÄI }Î{T(§·\8“óMåÉ«\28:g¯,yî\\E\14\29\29Ÿ»\15Y“‰p\127:ø#Gc\29\\$ô<ã1 Uþ\3uÞц¬\30oy‹Ž>öcþ±;势ò*†'k3úš\7\21\\Œv=]C|bÞš|f¤\26.%¨Ãmé»&Ë\11Š¸8\127;\127ž8\0071·¯¼y5Ñ<#PÖØ\15Y‚è:{?£uï\13iá\5PqÂ.²Å#Œ¹sÐÊ\31¸Ï\31u Ã“½ÚÄ\20ç-ߥB¥G\31 p¼Ò.TJÇArúì\31p*id+_öb7ÝÅ‚—\\-l‚\4|^¥0ºDsƒ\\£’»j²\14kÙE\18ä\18š´\0õT6E±ü5#\127~ßæc»\27:çTZèÐ\"”×d(UróMÖ\\‡\21åÜVÀÑÄú…!w0r7ø*\28\9fê\3\2t®nô¤\01147wÐ+ñ²såA\7õȱZk2>p´Õš\20ÚÖÅ\ +¢Ô\127«\24I\4rÿÖYe\26‚†Ò\22\22ZN6!\3ÿßèG>¢`·‘†´ÚÂÄ7¤d¶çM\7è/Ð÷pLh<\11,eÓÑ7A»ÝÐ'˜óó]ù\3×_….¯ \29mÈ?ÛÙé4fÈJÎ:@\27¶ªm”Óîµ,Ì÷\30×y@\8Ý[\18ê\29\0\30Ô²´aåL£LUÃúâFõ\9\13\2C5dç¶BÙ\28¼\26õÐlñ`$ù²ú.Åh£·\9\23\ +*¸dö7\16.üO†z~9k4ê£ii\29\27ÊQ}ÁçÔ”Y®–†\14ÁºÑkÄ\2ƒ½PC†f«ùñ}\127pÕ\14›í¶¹²5dYóAÕ\15…'ΈQ\8,K(´\2Y\4Ÿ\30UyRøå\6\29_Pš•‚?|«sßÖj«QNv,\18\3áÀZÆâ Ý Ù’/0dGqË\\ÖMC3Í|à\26\24\0084”LÒBG@Øãœ\14\20[j‘Ñlmº\22&ðÄ\15„Θ!Ì’Gã¨'ò…Ëι!¦è€å¡Æ.$}i(,À‚¥T\13å]=a> O\\˽‰V\27ŠRÂ[@nó\3\11Þbqãq{®Òè,\19Å,\"éۼŔ“rK\27žå°+xx\21U‡\"†Ø±Þ=ÁÉ°=ú˜ †?\27Ð\16|LW Rkû\24'•ÖN°µ\"­KÏUÌú¬!WDË÷\1´vš!w”œ(Þɉëâ êqv8§˜–o\19|Ø\19\29\15'\12J¯·\19[~=K’\27ò=;é¤\29ÞYÉ5´ïÇ_˜\16\0233çºseØ\"ú¬M?=XV+Š¡‹DWk²\14j1n¨¾áº\21\31 !Ë„@_á” †\25ˆžŽ®’ ÚU€V!\4£¢)ô€ÊO\27Ò\30m¸ƒ.i\21 ½—µ\24ÁñMPƒÅ`Ï܇Ž×è+©(ly½ªMð#A¢×®¹JÇMféÙ¨¿¦ML¤Ìvj½ê’´½ø\21¾1\12L¸¤•úÁGÂ+\29Êß\2Ú\5.ì5?\23ÏšïíÊ+ñ¬§pIeq/_\1ZÑU*AÂÔ¹}CQ1\27Ÿ\4t‡0‰ŒÎØúŽ\9s\13ùléÖ™lf$\25)Óf\2(ITÛ\7*”y\\&ðÆ_éC\8äž›â6~dk8 #\21™“¹ÿ\\\9ã\4*ÓdE\21;Ì\0õT\30ã’SaÉ\21â3£ÎÅ\13I’ÜŽ|e¦\ +Pz§\"”mÑ_-.ƒ\12Ôx£ÿð\ +†öÊí\1@yÚ\27zà3óª1tlÛ»iˆz\20²³™Ñk^·á\1eÔq\6 ˜S:£ß\7žg9ú³\28[\8\4aªù9\25çƒÄëqÙ)t8\\ýê_G\14›Šk!Fß\"4Ÿ\17\11Z\4BË\19\1èÑ—iÿA•s\24°\25_ën\"Þô·Â\27¥\20ÉÑì‰Kì\19\1½buÞt\25\31ýãî[ßÇЛUþ!W\28l\3èÚlÿhÏ\1íʼ\18>Æ:\27$A?†¡{Ò›‰ûv\27NõÈOø\28>F\17\14@F_=$B$1Ö\2Á2n\16Dá÷¸b²ê™àh\6!|+/\12\13cÆv¢Û÷\8¯p\ +“ÁbAò7Þå_ïYsÞ8(©ŠÝ…#Ð\6mâÇo–Xk\16(\6ô\18f×\11\2¬£\127â\8ÔP¬À7ÿ5oSD—äV%x“›2èýèd\24Ù\18kfnoÎ…Ë·5_ ‚ˆi£À‡qß\24Ë\5ȯԄ>µùçü\6ö\12å3BÉ3¡Óø\\E@¿\15÷²¯a¨,â\5w¶Ë_ÿ .ýó\28½„‡Zî\29†JæÄ9ôã<‡âÒ:À˜¡s>ÌÉŒa›¡¢\26-ÂQ“\12[ÝÄ<†Åp@<* [\4ùårËr«¦@¢ê”0jyÔÄdn˜\12\4-NdBŸ¡°JåC\8ÐØH¤IËšMóåô„Þ&úÊ®î’õÀ@¦\26hCçŒ#\28|õ\20„{°·äÔoD0ÁBÏo´Õ³ùç4þ\0314Jwœ'ÎÀ—\18H\23œa£‘Ì\\’`¡ÉP“Èrì$yÚ‘Œ[\0É \23\\‹¯$V±#KLûÙ#\5ൺß3fÖZô¹ê‚h&M&©\1\7¬3!&\20?\30òÚo\16\26\14²Ùh6Å\15œ1Ïmcä±8B[¥„O9a/\0016-˜\5Ó\2'¿×\27š\27;5ë äS÷¶~ñóÕiYkèò\27¯þŒÃ\12\19‰Þ?W<¸o\20™« ï‰Øú\11[²„vÎ+œ\27E\3\30¦A4\31\9GºõúÉFα™À•4¢ùOšx³\9ŸðÍ¢ze¹;=ù~\14u9¡\26\22^“\18~å´†3ÓÖ€¸|•-*\23\28dFGŒ\8»iOÐè„é|ŸPv\1ì\23ÖîQC\1\8ÅÒ¾\127KQ%6\20ëf\17 é°«Í=ª\1\16¦¤É2«) é”`È]þ­â„\24\19µR¥t¿€25=\19l\16Æ\8ºfg¶ßú —Ø\14…\9kÙ£ºUûù-\23DDÓ\6ãú–4™hì¢4K“\5Õ'iu˯ Ç¹<8‹r\18€ZÖ2 \11‡ð\ +߆³§\5\19ÓŠƒ½>§¨&ŠâŠ–†\11>Øñ§Ó\0027\17R;ô\22ñ÷ZîüIu‚\5— ŒQÓê\4ôÄèJ—ø_’øi`CAkù¹«Ñç®\18\8eç\23í\2$\4¯ë@ÃŨãäȨTSÞfç\28Òj\6\17K3ÐR¶uÉk×Rž‘=âÿ´š‹¼fÙ\"Ðd\4÷ÿôwääžÐ)Þ2Á]ý®øîÛ’ì^nS­ùaiz\16BU]ŠZ&_c¨ç&Ótù\5ÝŒŽ[Š\2‡\30\25$ò †ä\ +\1ÈÍVŽÍæ2\20\4ZËóõ\6¼\28\16=ƒ¡ÒoÉÝÙF¨Rï­\18ó¥×üN¦ÛnÉÉ\4ßÇ #¿w\21B’\1½ª„Õ|‡Zõ\ +i!>eÈõ´•EMß®e)}«\28Ü¿5)¦3Ü\14äó\127­a&$ï5u´&¤\3k):\\ܸÎd×¼æo‹\24qÏÄÊ:BÍÛ7£XL’÷\4iG¾i^Â\29¿æ2x3j!8\24ýÌ\21SÂÁ\27’³·ª+ˆ»×YÇrdpž†ßÙð”2÷mØôäh3ŠöcÕe¬ó\22\27>›˜ðyôgÓλ—ôr4¨1âGݪ«Èf 'P©æ[ñIŸõ€toRèfúy\20Ý\13Å\127Ò”\16ƒ†oùYË4Óu\4B§}\27$šƒá\17*}U\31ù=\127£°€¾R¦Æ^#lËPÌô\0044\23†–²i§Y5âç?V‰¼\25FÉ1\24€\29Ft\0Ö`\14ï\1-É/q¢d¹òóž®9Ö‚j®U\25‰‹2\6Cé”8òÂhIBDAG2c¶äãjÉ+A\24Ɇ™h\27\0232€\8Ep\27h)z“ûÙÿõ\30¬\14m9¦YKÎ\127G~îY‹Úp\23oÉh¿eÓà–ÒnŒ˜Š¶ú@ؗȉ±òÞU†¶0\26mõcNC†‚–dF3:Š]£5,K[ýÊâ–2ѳÌUáã-B- ïÙáq›R8e‹ËÞrðü–ü®Pvª@CM¶\5(„Ï1B\19~q$À{«ƒÜëñu_\20ßt÷l±µ}SуӜ2ú\26ËÐù~S\30\")b~Orá´Ëä´NwÏ\9yÐEQkðØ$B¾\6'ëoš?\14\17\5æ„Šÿ\27gLZ1y\9£q>àAþ'vzþ%Ø\11?bÜzMñyà¡ëå87ýÝ1to¯w\1gÁÅ¿´÷¾8}xå=t$xæ\127\9…­ƒ‰´=ã]sD´6M\0\26uêðAšBÐ}š~Ÿü;%1Ì\3;±¾ù4t\13X\17&X\22&Fë¬ï²ÇÙ\\8+úoæy‰ ¢’ùCÐehd»\5^\"ÐŽ\24¢‹NG‹â\4Ôù\20\2•iÎÐ\17]¾iŠ‚ßó\1\9´^cú<¢—\\ã‰M\ +\2Í¥9\0144·§|Å-I\30ÍóŽÑibv2B’kz1k ›\28•\9‡¢é›nàÄ6\8á/o\21\11ô®Iv’ĶáTOh\23<'ñ\\—{fªL]âÕþ\21Že¿i`\13ËqýM…xßØ‘\18-0\18\18R­Î.+«\20!V!o\25Ï\21;¸Åç„ËhuËÍÛ‰\17ˆ²,(\3\19:¡Q懎R×Îqyd´V&÷’\8+:\23\11…D2\11®IäSb\0Ùé\0210&^ï4¼\"r^΀ö\26t\19;V\19h&Î\8¨D_\9Ïq÷V¦\25OÜ©šhý^îùÎØ|‰ó^)ªR“Î\14±!\20\7Z}Î8î\0304\8ÖgŒÂvÓìî˜\0ÛØЂd(\29lµÙÈ‘PzH\18Z!NáFÿ.\25u¡ò\25\13\27‡Ð®ºV\ +\28l\15ïT\"šX‡„\0124–úuÂÎÜQv\26å\5Û«w\28\23²Ó“÷„Þ™EUiv.\25²¨v· ¸Ì.Z[Y\ +HtÍšl¸`áè»\16z\18eñ\11ÐßS{ŸN9\1ËÇ\1hØ¿Ž˜\5³5î(¼ý•¦’ù.,ÃÛ\127¢¬\18ÖÕRWÆ\31ü5Kµ\28x—¶¦;ýž_Ô±Z.bVør'µP?óÖU*ƒR¿·X©†ì€\5^³R7‡\ +Ì \21BL¬\14‰¯ÁÔ=P”¾\3\30Š0¥&\16²hÅ©\15\15a±d‡Œ‘ûÑ`€žsLÖ@pL2\19\\âŽ@K|û\20„í;F\17 k\2¨Îû)]°\27œL'GöÛP\18”à¡u¿ýž“ùd6£©\3GcíôÀ›PÈac©{\13\0ÙºÖtª\19¹†\9\6t‰×ìéræ~`G6F€n\19Ý\2æ¾¢‡Jº¿•Œš-jGü\127nÕæÈ-Øsä~þ\20^\1h\12\30…lçÎ\2Ü£ôœŠÃ¡0jv×#ùÓ;r‰êÃã^Â7­Øÿ‹¡\27•5ÅbGÿ&¡Ì|´… \\Hl¸ìGMbÛ‘ã•ó©\1!äÿ¶‡´Ì\0ô\18S&\27ÔŽ3«3ú\29\22N9Á]fA\24W½q³†÷,rÚ[xá\25¢m¡VÈgê¦\27¿\3§g\2Ó±FT\3@ã÷¶^ý@\4\21„Ž÷Ã^Yú‚gE’}ÿ[Í\19‘žb w\22ëÑ_„QѲ•a¡œTÈa3—\9!DE$'‘Î?˜Vª\14r„9\127¥l\13\11\14\22Ý©J~ªGè\6èœ\5#\1ÅÍÔôê\19Kê\19 p¥)3~@al3\25:Ö\4òÈXœ) b®´r­ÿOz\14\15©:=VÔaZå-–,bàFÓ\20v8o\17Ù•³|9ÚBÎä\23΃)\7ºsY\3a–äÉü„N‡=³®\26ÈXÜ#ñ<ëí©àçN\16Z\31UpŸï\2zä1*Â\14p&­ï’Å6pZìxÅ_BPýN%ñ%\ +Kµ™þ\29ûõ…â\9\22\ +EE¨Tt‡Ì…ÄC%ÜR;ŠßÔ©úéÈ<íe0Ô½²|ôâÒ¢g\7í%—?ýP\15‚\26\31õMo86¦obñ®º2@|Ã1¨Ç6ðFL‡‰ý`\18Ë÷WgžÛª¢®\8´Ž¹œ«îh¶\8¯¶ðtG¾ª·QÃñЕ@\2å«xñÈï<Âyƒ`|k‚<Ǧo\127U‰O¼Æ$‚)¬\2\26/-WI„á„ü\"\29› Úw§¦‰iIîÃ\17z%¹§\13wÊ07Å”¸IlI’\23&Ⱦžjüh|oÃ\25\20­ç”@ë/Tw\23ÇT÷À¶Ú]ÈË•l9&t\14ÌQ1\26ÐwjØT©X°‡w@çݤ«€®9ø\31£}^wÃ\22\19ŠRª5TdþÎÕ\13<›£M؃££=,ïI”6¸\22I•f¹ò¸Ñ‘íü;Ê„2÷NiBN*9˜\5;4*\13•`I\7œ-|'½B°>ò\16鈧€°r?Js\14@\29\5‚Æ\28Í\9™ãI ·<\7“ò)¥×\12¨S!8\2K6\28!ÌDXÇ©…þ\7º×Yç.¤Ãp\16Ÿt^6\22VÒE#Ÿ_ò'l|ŠÖHÞù°ì#dçTÔѺ3O.´˜øå%u;\14ž\9\20¢ˆIN\8o§ÑçS&\27\15‹‚1N¨óŸY!v\26~æQçë7½’ŽÐ\14!6bç£0va\25 ûîGb,\2Í\"_ÐÝ^?s›£¯\1ÚéY\19PÿáRÜH\11qÊGò„ß–ÑZ\28î5`¨‘3=±FãcÎ\27ù‘â®>†\1eòipöŸd´˜R;6…o_•7u¡*Ã3KŠ{îÆs\18|z\2­\27C‡/›\15JIªFÉOlS\17h\27Š\14““%„Vý\14ÙE/\27''{£‘'ó¹î^\20ö\28b×™ƒ.E•Lö\0sòÍI¨8\\Z­\2N•(Û~ãx\4ˆöR–Ít,¸¨ÈŠÞ¿„ùÊ\2•m—k‰•_:HÙ\27¨…ˆ°Šk\18Y\25j\27ÉŒ\20\8ˆ``OŒþdüÜ\19A²/')\27 1‰ùÁ;[&'dñ”Mò‘\9Ðc\8|\1´ÒüŸ\"ž \0022ö\12\28áA¥\18UÈQÈ\4'}N„WzÎ1±×9\1é„ïå\21\16V¾¡ã̹I¹ÕéxSÈŽ\27IdΕ*r|)˜vä°\0\7uü2/†R<\12ëÒ‰\2\\\29=ªwj†&ú›Ex‚{›1š~Âr\0‘x‘\9k•w]ãBˆÓUh×e\23^…º\28àÙ¤3zªŽ¼©\12=\14£Uø\18$\21œŒ¢ÿëQo#ÃÕ\23\28]„\9£¿¹†1F@û\12\"ý\3\19âR!‚ûD†^hP=*ì—7®±…H‚Q‹çÁèsÞýçѱµt¹\30U-\14xX'‚F™6?çH?hÚ…5–}A™v\14È–å\0097,‚9?ªµU¯þ‹\7\9î×ÞE3Yœyá\1\21»Üƒš*\20䔽ð_Uîïq\3,5\2É•§|Jé»À˜¤£ÕY˜\0u±‚\20\21Kú‚+αû•ÊFºòZù´ŠÑÍÀÜÿt4œ€žñ\21JDdç²\4¯8Ðå±Ó/vÀFP\18›6¡±B@OÕ:2jâ¸ð„\29,€i,è;cxJȾ\25MO[䣉0\25ó0(SvËõ\4\18É2aÓ¥ô\12ׂW\25ÑÄT¶È\9ã™ÎB%ç)›ÑÈÍó \3#\26a‹Ñ‡÷7ë\29@\18+\0[U©›ñ\23JL8\15´.‡\14\16\11\18*¦Û¶rK:\31®Ø6¸z™w1ÎÞ²[Ðr/óÝ«$Ü„¥ˆD“hØó€šðÕ£Ù\18Öœgt\6”ÁØ‘+®Ã»Utq?òŠFÈÇS˜Òx#ËG’Ó\26ìëŒ\30Rø-\16Îdhð‹cxâÁ¯&yâ\127ø£$ié6°‚•˜ç\14ï ÊʉFÛ°\21\\)sVS(\17Gø†±¯ÈBÇ¥D\0134“ézBf¥t €÷3æ¡.C_“ÁÁ\20A\7¹.þ.A8\7ƒbF+~/@oys\24ùÍ=á6ÍiI°\23\19&‚\16wÀF]\127Ï8¸R\0ôGæp\26}‹+:×P¤\7›»@÷P¨\1u&\15>œõ\27OÔ†¬ßbø®ÃXÌkë’ÊOh\31'Ôwé\24‹\29(¸t\19\30º\26eÍ\26\ +øÂi\4¨gÙ;Û9&\3óÀ‚?6x9øhÈqgE\11S™2ú\7/I #\26Ìð3,ìqM\27^Í\13)ÐÍ­†\0315¡o5\127‡Fc\12[µÞC⮾ð\4YÍ–Å÷@±™\11ž¶È³l#ч+eCgkÃ{@£ëz\0016ªñ\23w·B˜Án°Ç7ªÒÚóQ\0074Ž*ЫÆ{1B’d-ÂC8ì\12tëcS84²2\28N%=×$\1¿i\7$\26Æ<0•-Ýn\23\30Ò¸î\ +Ée\12Wí6£™ôržÓÂ\15˜\19´øúç´\3GK\13OKóƺÐkt©Ø·žr»Žõ\26F¥ÅÄäê\29\0054r-\\GB»‘^\21>8•3Ü+‚­ÆAo?6ŸŽˆX™\ +ŽAN²\26[¹œ˜jÉ;ý‘r¿Èô€€“!ÙÓ¢Ô訡Oq³Ö\11¾‹\26â!=‰í†ð¤ò\3†\"ž“{\2ÏXCáÛ°B/G ’£\12\29(o@\24ysE˜ÒBÂÆé×á‰\"+~ò_šØ“\15\\žÂ\6\4\13Kæòaüj­’±Â3æÅix§©„õÌP;\15\21\18Ƭ%Tôx¶1'tŠÐPh>Sëî\"#ÜlgE]cDN\26—À½\\«» ƒlƒ@ï6w>q\30a.\31C•]öZžõÁ/ä\19Wh\19RÙ»>vüPgµ\18P\11UÄ„cxk\4Þä\17Óc\17GÐIã XÛ¤x€\30ú„qS\17&Ÿ)^{à…‚¼ÏÀ^ô¿\\ë–»\30KÚZØ\5«gÝF^1rõL\0ëí\2\28\29øVÛsÒã\7^Š\20•¨\12\4Ç:ö!-È\18W\31y\3éõcbù›\19;\24Î\26LÔ¾\28›¡ÙðŸÕíæ\22ÚÆ;ØYzC)9€.UR{öO .òâ½&ç„ê!ð YkpÜ×ê€ao\0012ç-\20\2R¬–\1rþ\7‡\20[›mü\21\"i-ªØ±z£>­9OîA\"\11ù­ú¦6ž+nÞ²-\5´9¦\8zpŠ\26Ó\28'¹;I\8F\19V‚ö\12; É\17¬.2‹€š¡„IpÉå'\16$b?«y,\15hÁa+£\22HóMŒÓê\16\9ôà\5¿U\16E?¡L¯^\2\11V]\19M,²FZ °ýjÅ)>Óm\25ö\13|C÷Îî|Tç´\5Çœ!‚Ü,bugŒÃÕ9@Olt +jl‡›\14]\28é\0305\9„‘ê¸/»·Ct<þM7#ÜCÞÈÌàì_BnÓð($A]Œ>Ç…ˆÉÉ/\28\9–\127&&J>¸\\\19q€]ÒÑÑFîÚ6Bë~ñÀÐ=“\0267œÎ6pÉgž‚g¹±˜JŒjÑý#6*é\0281îû‘rþNH~YŽUúå\3\18«¹zÈY{\0250¿Ü90®\27å7rC´F\19ye»|’v*ß-l²SR#FLÅ\2„ª®8¢ÆKHöȃ\26É×\1]…ù¦h6b\15&‘³{ƒ@­O :\127\16°˜TÈî\2hÃF\28€¬\15@ëQ4F€=œ \4Ô*$fÅÇœK\13žˆ_3§]X÷\7¨\7Jwj\21}\28ź—\ +]R»AVA¬\2HîYGå1]¤DF\29\19Ã\4ÉJ\127TÏ:Tܪm±ôù³\15Ó¸… 㨌ñ,ÿýB\3 :Žù¨jÎ÷~Lt£“÷â¨\30Õg²Dáˆ\28sZ¶¬¦VÅt§G‹\9áƒ\0050Á\20ÔGµ\4ßkgÅFˆi\21;\ +\19J™\ +\19ú«q.C:·#EŸKî\28\5\6¡Ô(˜âVÃ8M\28U\ +Þ«`P~'=ñ‰'+C->.Ð\25\127r ú…$yö’û‡JÖ™\4}Ö©ñ-ñ¤L°XÃŒv\8~{Ô\28{\16ñ5@\7}Ïõwþ\8@÷ê}U\6‚-\1†ÜŒh\127ñÁ’wL«éýŒa>Z.KÙ¬ZíŸ\17öü„­\26\127}Ä‹´:¾\"Ó\6´GW`à?¡¨`\127ðh FŒ\22sÔí¸£8ý¶ÂŽAÁ|Ô=Ñr÷\30]ä{]wG\21ÍÔØ&\26¾Õ9H\29IGÑã\13$\18²\31Áø\30ñb8FE\9q¤Àð&¹!T\5#\\qÉ×B&\20,Õ:fé\27Ž\20\23È{ÎÚÇ´)D0üK\7¶\8\8„Söj$\26et‚·}$ó‹#Ϲ£Þ¿m¼\30ÍÞÔ½­9µŒGC\0095:©1¤Ù\0y¦i\20¾ò\0046ßa“ÿF\26Ï>E·:ª›ýãÐzÅÐ1\28 ˆ­Ý‡[ñ‘¢j¯áø\18Ђ#¤\30Ug²àº[MàÞs•#OØ“ú«¤ã­ÉÑψ~õ@nª'!xV¾£\ +5\16\13£Ç‡1\15W}¬¨×\3­â-€ýØMˆ|ô2 ‰Þ‰MH|¤À1GxÕ\29Õ\24Îä6€Ž\\e©Q\20켉(o8LsŽŒ6âji1\7\127Aþ3\4$&TŒ)\\?ÿ¤¸Ã3îäŸj\30÷î!Ø«[ßD»óg Œ|º¯ùPUê\27fË\7hR.ý¿´Žøé^àÃ/«J6Ä;8\14ò6§5;J0iM÷NÛ>¤VŠ‡ø\22v糆Æ\31‘ƒ\30½×êN…¬gêä\25JÙPÛN\127FGΖ3Y–Né#\24|MIéϯ3%>,t!¸ô•¶O\6Ôî\14\23Ö0\29•õ¶\31ù¡:Þï\26â`Î\0048m\25tÄ{ÿPÓŽŠ?Ù\15KÚá?ï\15\14à¤eÝR\31äã™æ\30|OÇS\3ý\8½\0£G~“$Ò{¤…¾À·¹ªO}n]µ\15b­b¼û{òr®ñ\27?g\27>Æ•S.„óÈÓ¼ÉÒQ“ŽoÕ­0îY| ”\16\23<\127§–bS\18w×xÀ£Æøº\5¨ãŸy\20³!ò\5=j\26¼£¦?Óؤˆ@ÏУ†^Ø@¾zT£‰‚Ê\0260«¸ôVGM(Èw¢H@Ð\27#°gª¡_á\19‡r\31 2ÿu°\11\14J\17\8\12Ê\3ª\24Íb–ÂCÇÄšGÚAy_1Ú\0179‚Ž\26…\9³_j\21>\3¸\17’i§ýÔ\17¬}€Ž(÷!Ö÷‘Âl\22\12qÜB $t<ê)8âÛ\7ZÅ~u\15¼»éÏX\11øŒ\30YÒE\4\29‡xT@ª©Iª¦²yùlDÜϠ\5n^T+ƒ\22Ø\14ó\26ˆ¨\16C,¹\27\127\"Tþ¨\0\5\17PzÅ©\23G\28ùw¶¼¾æ;Ëm\22\2ž)$µîgÊ\127ôÒø\31\24þý'\24Üþ‡œÿ…Q#ˆ„%Š:y¬¡¦eGóÿ°oæ\127†Ïÿ\0173x:@œ{ØAýîÿ\3\27/¶f", + ["filename"]="tdict-std-compound.txt", + ["length"]=125327, + }, + { + ["compression"]="zlib", + ["data"]="xÚ”}‹rã8\14í¯øSÛ^×ø5½3{#Å-yo¦eJÝöxËVäDþ›ù”Û\22‰ƒ\3PéÝ[•Š\9€¤ø&\8\2àßýôïûbò÷Ÿ~:üÜ×\18züõ\127÷_þî¿þÝ_ÿîŸþîÿõ÷ý×@\11iîK„V\8­\17ÚH¨_\12ÿÏ\127÷…À÷ùßýêG\6€?ýÝçüõ\31IŽ\127÷7\6~”¦\14ð:üü3üü6bºßÿî;|çi\0~\4Š¡\ +Åð_\29­ý»?\0›…Ÿ|(Pów¿\29bå?b¥„› \30y\127þQª\31åK±Ÿ‡Òq\14J¸\3Õ\14-­\5bÂ×P¥áÿÉ‘cŠ\0?s\8y-c\21û=j¹å’\22èË\2}\25Û¯@³†¯\127\127´¯dB(ùl\25\127~ô\9BÙðù\2ð\22Õ.\7J\13 TrËÝ\17°\26å†!ó\3¸KC<º¤L\19W¨N\21\17q\20?*/íúHû\29Õ”\24÷Ù\16iË„ZC¨p%y}sðñQ\28\20døJͱ®T”\30Ø\31\3e\27€ØÃ\7\30ç\17\24òŸ£R\1û\27€\5Bk\25\1{ Ì$>|4\9\2áf\0\30>„z€?rX¦„\11P\25B\5B¥†¨#\31ð‹\12„/ö Oj}}«\4ø\27Áöžò—\15Êc\0266ÄZ¢\"œÝz(ÄÂÅ]K\\C˜…–æä¿s¹\158øœ&\9\21:qýlŒ8$\127\30)Æ8‡jº\12VnÜ!E1äy\31¾\31-Åi˜“g—ÕÅÁ×´š×´ÈWÛÄ\17Uºï.y\127PTš]›fצµn“/-{?\28iÊÑ<«ÿ¥š}Úâ÷´*÷!í\127©ÜÝMÝO2Ü/\9²³•‹Ø6AÝDZ«1ìc¤\\&ÿ㧺cùŸ¢Þ†ïí?¢­\7Z9F{\16šŸ$2´i2£\31kŸ_6\3Êtâ*]ÞVéž\23\"bOByVn˜­d\15ðØ[ò±\14_¥{ÔÊÅš¦±f\1õó¬B$SõµéÅ\31àØ\26ºN[\" ª!µo»\0153ñ+逢³,G,\29ªM>’,uëdüETæP½‡Ó¼\3ORYdc?\24Ï\27ç\20Õ&¨±\0097rV\9»\30Eù½v¥Þ|ÀelÒÁ¸I›x“n0›tŸØ¤Ëÿ&]¹7C“.RTù_³êY\24\0ì\0292\0054Þfà\127‹\4›tÇfl\1ýÿˆ7²¤mF»s$î£õ=œväÆåÕÿ'\31ò\3Ü\15ãòÅ\0172×S\25\9ü4J1|0³Íù 4iÜÆÍË\12'\30[ÞŒ\22øÊåqöpú\0250ˆ~:g骓ÙÃFD]“…O‹”F]\12\127WÇ\1270­u„v´iÛ´*í¼þˆ– þîg)ƒ–¥ü_D\13ý>BøîP¯¶¸ËÑ\"uI½Æ6®€]&/\29ê–\14¬Ð\15IçŒ\13–·ô\27ïé7z\15›Þÿ ^aYYü/ÕêG>iW®ñº÷i…îéVž¡|1\"vd}ËFWžl”sË’½ïéaºe$Åûè™z Œ\28ê\30Ë\\(þ4Å®F㮆ÀÁ-óS\25¼M‚M\22Ñ,åu2÷©€ÊR”iå¹›¨ù\16ðÛeÀú•!òŒ±\28üøÈ?XNó–ÓÜÉ‚\2jtËÏÓi\28nnÎI5„U‹\31Žr+_þkZÑÀ¹5iqü:ž§3r@=Ê“¥\17}3‡±wÐ@Â0æ)\7—Mù|tÊç£\2(dà±÷ô¤’§³7\31½y`1Rlû!Ö÷ëX¼wéD?±òI2,¦±Ï\31fHççCn×Ó<\25å1’i˜gוÏéP{\30†Œ›ÌÃ}˜ëçt¼<ó!ªþœŒ—GÑž]Ѷ.¯­ûÜÖ•s+â[\31ëœd4&žÚ~ ÀÛ¦c\12Q«Ñl«$ƒU\18Õʯ¶ÒÄ‹\20»J›¥õ°›¸[¹&»¥ØSÊËý ¼:¸³ci›\28¡b$ŸÏ[ŠzO»©÷pšªOçû\22‡;‡½§âämzîØÊ6Y~D¨RÂ{2N@ΞøÞN6Ït\4ň\23·}mGOÇŨüµ\24\29\28E*\\\15·Én=:¥²•\"Éíî%饛H¥ë‹2={”éÈ\9¨ƒCÓ¼}•K7kÊTž[b{³=VŽž+ÊTÔ[&|Z¼JŸ¦¨}Šª\29ªóp:¬Ëtß67ó@y\30½\12ÃÓ¡ú4ûT$RÊ~ï»ïn˜~i¶$RÒÇwÛ¡±É\23)ês‚zì·ÙO\7‘¨ ¤¨6EÜ\20•\0311È ŒdýáÞ\\¦\19¼L¹Z?5v¶\3vnÉÝ%ÜäÎÂÝØ\5\25a\7®`äÎ ÄH\15s;¹ôó_I?\17\27‚k¼s£b7ºFíp‘”~#ó\25Ú)°£ÛŸÚòÝ\7“|7:ÉCÑ|k÷iC†û„pi9N^¦šÎ­¾—Æ‹‡v¿í„ª·“t<¤Ãl—rx;§©òãSÿvåþwPQJ0m\26Ï‹3þJÃ_„ï*Rl—ò>/é\16{I\7ÉËØþúˆxMÓ^“´A$ôh¶S¸Í\26%Ï\6òÈ\7²\20•fR$¨„W|q3ì%ÝM^ÒUö%]Û_F™¢—±ù¨øËèáäeT,úBò\0Ïϼ¤ßü#4’Íã±Eö½ÛaÿGºuVn¯«Ò3H5ÆXW®óªt(Ti7Uc7`\21Ý}lR\28£è®q\31ª3#+àþƒsþ>åö÷£\23WûÑ£ÆÞ\13ª}z\24Û‹~â4ÅžD’ê>•\28ßö˜ÁÿCM\7%(‹ñc8 ²¤@~cÞí9§Ñf\27×\0269Œîì3Qg\11¼îëC†\27%ÂÍX¼û?†¯~v‡‰¹[\11j7èk7xG”`ê´Oëáÿ°Ý%\27H.Ku*Ú\ +¨pÅà\11Ø:Øgÿ:º¸‚Ч\4\127ú«Ýq³¦\116\11ÚHÖ Ý\29Ásžu¸arºY—¨\18:&É©?â\5ëQaiPU5e÷\27]“öM“Þ\0264éVÚ¤+a“ˆK\30¨›‡ÿë§îv’5£“l„ËnDíô£\18\6éS2«þkº‡&ß5E\5ź'ÛyV¥/À¥ƒ+\7\7\6¤N°c÷ÙÇ´\15é)õ˜nBG·ä\31Óýæ˜.«G¯zð\0033&m=ºIz”‹£Üi\24‚V¦(Ÿ£_€ƒÚâÂbº´\18}Z‰û¨º#h^ªrL\7Ü1ÌÈŸ\22:Æ\27Ñ}9ŽÉU)‹süèP\7Â!%¼\27ÌX4/±9N’/&?/Ï,5·\25\13¢\24·\4\6Tû\17¶L°É†q\26VÐÆ­\12§QÞjÀŽq\25§È?%{îI¸¨4Ÿá\14e,j6\26Õg°\31ýØþ£âíÓ\25\8lšõÈyëA8$š—\17Û\22Ư\29§Ñ-`DÌz\26-ìHÂ6å§öiඪ\0045V¿÷¤GÎét=î–§(Q\28Û0ðY$¬ô)rÓÿsš)†àhìåh±ªÑ5â\4)HZ½eª9\18\14‡UŠÕ³@Ò”z¥å¦þi’Ôi;*@\ +ZŽb/“‘†\25‹[^¯ŽÎ H8¤ÇÄ)ñ›þÂõäÄÈ3)b–b«\20µ\31A¥ÓéœnÛç1ÖÿìŽÇç1\17’ŸÄçt©;§«Èùƒ[›s*¹8§\12òÙ+±<Ò-ÓEâ¬\"«(û_\12Šé#œÒ9•@ŸSæñœ.ªç±ƒí™$™>ƒ·´2ïiž½‡ÓÓÖ9pçQIì9½ø›¦L÷4]¡¦©lbÎSÐôKè‘\0253Îf὎šo]S\11®€Ú;TÁ@É€–Ïìݼž'\26=†1oS•.îøxV-˜X2ð\7\3¦4{W€=\19\27÷\1]V^9™r\24¦k;6-=ػÎ3cÂmb-8\22.;†•_uÙ\25=Ω5t0e\13À“ý(µâˆm$°&IÉ@åjÀ°\21Ðt\"æ(\6aݧaæ\31„=¹ú\ +*¬e¹\13æ-Ã\25\1VAã6pí&òw\6”\27}s\26õ:õÍ­RŸ4üÒ’É\6¯w6x\17\0223°y\18Ýî÷†Eô‚ÎÞŠ¦º„L£¼\31æñ'ª¶QBWAƒQì¾ÃÐõ“\24\ +ö\12Ça3‚B§þ‰aø§è{}hsüçpØó–Ç\127\14Gå\127ŽÀ¡c99\8?ûFˆtK¿¡=\21·d•lÕÜ1*àªÑàu`\1ÐE¬–»š=ˆß˜rž¤¦b5sNµ\0198ÖîjNa±š«Yh]óæW;)kí6ÇÚ\0295je\15LÞ\21ç­\21»r\13®Ôzöj v—÷µ˜Ðo‡‘ôkŠ½0ªdÀÈû\24åËÒ¸o+Ür][Es\6-\0052Æ.&\31QJ¶\13\13¨ù\0187cKÍ\24‰{C\28\27$öÂsˆhYR#K\13\25\3°—ñµì4snÿÎõÚMCÜ\13oœÍ[šÍ;ça–±ÚiÎ\7‰ælX\25XWÂ|R£L=Ø+£©\22­:Õ‚\\n0r£•\29ØÇN\1w\18¬AqÃ+dãɜv\27\12\24sðjxh4Žëiœ\26¶Ò£V=uK#ÊÙ¹»þ22ÍƱFÍP]\3\27Î&$nh/Ö\3\ +\\Êx\1P¡zßÝ Â#Q\"\3 Ã\9·\17T\26w\15ÀËK¿\11Ëß\1¾q¡nC–_ÄIEë\8±jã’Æ£lC\7\15³eyÀÆ\13ü$ºø;™²-\8RôøÑGËÿ%£Z\23š3üeœá`â\12ß'±‡ÎìPE—ƒ`ˆ\ +€ü¨œÙ•Šî\27grž²ç\0127 gœGÁy\20ˆSjH$t\11έ„f\2\"îÀ4˜Kn9…R­—cX*0Ë·­!ùY†^ÍküyX\6\127ÅQ!4šîÜg9§ÁR/\18ägø¯Ç©‹Ø2£1aö¼âHK›Èšü]ØLÊ[œÁd¬bg Þj,ÀA\00065ùÜõ\14T,²DÒ\20l”\26¢‹…ˆÉö\28ñF\9ãBåá\21âgœkÆå4\22:\23gˆwa\29y6o1*g d\4ÛE•ÍIFlI¢‡\1¸]2·Üª\21ƒ…©ia£`¶Ž\17åú‹(TO\9¶þ\7¼ò:k®;\20Õ(—ÆšZµq.Êݘ$B“þubS­Ó£.\20ÝyœçîûÊ\22\4øíçÑ{>;C#Þ´Óݵ1g\0174ºM\22QÉ{ñó\20+U€¡Ã›({Sƒm\23Š¨Q̀邭tÁ‰e% d\"*É\28¡ø(EÉpÇÀ»+GO‰£–îÞÁ-M-QÍvðÒ­R[.RÁòJQÂ¥º”V‡\\´\3‰‡„ÚêÂÂvµôš¢^'4À4¶¢º¬ù\8\15¤¨j¹¢&\27S€$íGjÛ\29Ü\1XŒ™®;'$1¨Ÿ§«uÉŽšnµÓä\11ñš$žñ.\4ÕÂÊÃb«·`Â…€¨\0132µ*x¦\5:\6î\6à\17°“Ë÷“(,˜>àïF8,6ÔDq…*'cuÍxlîø\12\9=Ê\11Ïu£\13(¨A­\1Eþ·è\\”\20éßáìÄeø·\29Ç\0015tÞÄ'4ýä\"=Ž¦\11\6¢ß\30‹i¹Q_ÈA\13¯¦ÀûüÌ0|‘í§p¨û\127ýjm§Œh%r\19¼0g\13”ï¬Orñ¥%\29­‘¯\14Ly.\14e®‹.NÆ\0138g¸uôÎÁ=-'#Ù…cëÂd\20Ö¯Á\7\3$U·q…yû/1Î.ÏÛ€ÝÛšf<&\15c%Š‡ê=Ãg\2b\1Ð5<\29¢\28‚Ð&Q‘¢*—\21/Sˆ„JœD Qò'E÷Yì®\29­rVCQÒE<»e²–L…\ +þÜl}¯¶Z*š2Å°€+1Jbõ2nw;.!–Ù}@S\0170nokŒ\9—ÍÙ\9\18¶Ù\7Vm§kDј\16ñ“A·rNá¬áeºæáY»sDÍ\5lH3\5úWS&ë\"p”ÖX‹BoA4;8Ž\31\29tÂÊÂ#ÿÄù\4}“E²r\1¿p9]\28ÌËÌIDÔ&Ñ!í&¯¦ˆ‹dr\1_¹Ô­«ÆÁ\29yN´šìœÈ\12S²uúù#*›\23\30„\9Ê€gGîéðŠ%`Ê‘Ì7^e\0234müîòõ<ÈÉ®š‹I2{ÚQ£\3R1°\7ê“ã9N)Wpr2ùp\127{r«ê™¥˜#ëÔÙuâ™eÈ\1VM4=<^ù„Ä×õ­Cµ|ë…[ä¿P=@†a—3'IÏa\1270u®bBÜ™\24<\15IÅ\2ê\15p™\1Ös\7àÖ%9Ê\5\19G$}³‘ƒÙuØÄVÌ7\2åa-ñg\0139×^\6åÄŽpäkàÏ\12ÿŽÐÓàzxÊ¥x\26|}Ž Ö)ê\ +A‘A\0132ß_\29¡uðá\127¹øO\14>qú\12!¾Æ\11r«ã(­M°qÐì\29Í4£q1ÌIK¾¹_\26öj½ýl\25k\\·†MÝHÖV\5°!VÆ׊Œª]ö\25Gɹ\1\11'ÃA’Üe¼\29á?‘\9ãÞLQ‡*ŽE\18Ñ6~!·÷{Ûœ´M\3>»š\24%ÑØ#ª¢hz‹–­2^5ŠlâŠ{³ë0Qs7ƒw²P\31y¿\0!¨\31|\29\29Ñ\28CçP\21ý>òÜ:¬ºßp„\27Ã/<\ + ç¿v¨»‡ÝòP\13\31zq\9Ç}Ï2í·0¯¹@\11Cîë@~ÁA‡\25Ó4öÏ\"™)à]V_\3ç'³]ïr®CÄpÐÿ̨’û¯\30˜_-<ô‹C½\0127øÃÕN\18Ý\31\23@\24nÀäòÇÑ:nךί\25GÜ»LÃ;ø)\29$\7j\2#aQn£\21¹ãÁùØ\7¡K±Kaó\15Œ\29\127[ÂÑî\14›1,º~cE4]Zó8RÌ(&\ +'å\21\9Ýq\11Ö\18\0196™½\0134\30\24Åëšµëj‡UØ\0204U¼º[èWw\0x…;пp1\11Š)¯È8ØäT°tíÕ^¤\13ý\19oÅèhþšÞ|½†k!\6\12+þ\ +o\8€_¸å_¸\8/à“^yë´(âS\2ªqYœ]þïŽÞ;øΕ{q‚Q\8…4˯”„xT6›}M_R\9qq'\\8lÍX‘••Š½˜ppW\9gZ‹RIËc£\"9G'5Ì\29yÍ\7WhY3¬C÷Àå=¸A~HÇ­¹Fze«ïWHÌ\25îÝ ã\17ôH¬Å<òË-¯©ýê«3µ\1ÌS\13æY­`É]\127BAt\0057:úæÞ†´èiÑdÝz•'v¤80ç‹E}¥&0WªÚ\3\24\25‹cVîp`a½W€\16\5#\30\30ÆÀ7Ù¿3Ö\\ë8y1ü\29\24Þñ¾Û‰h`Gf_\13G?¤9\30(9I‰C¡\6­\15òaß\9×\25¸sS™@\8c|Ë\7ª°\11V¼\23\6\20ü=j\14ÜÚÞØ¡c\19\7‹Škê7öÇÕ¥\22\12\12çÂÁG.Wm\0ˆäyÄ¡Ué\17XJ]3TON¯ÖÔáþm8Ü´—Z@ÜÄõ¡êà«RÎM^çé\0ÿ…<Ô\30JõyÍž\29º56C\20\19«ìÓèòôôÌÛo)êÀ(£Pi´®zy\23¤`¸%Ø>\18Ò‡;I\0Ÿƒ¢\29ÁA¨\31ÿ§†Ð\ +=û®îG\29W÷\"b¹rI\127C«öNÐÔãA.\7ß\8~Ôä_Lo]|œiÈ•!s›yÏüG/:E¾p\5w ¦,F½÷\"€¼q\31\20tþ<#b©!'æë¹Ã—\4<2ø¿|%\23P_¹¼\3\28Ù¡=W¯\26•óà#vÜÂ\1u\11ú¡„\27Í‚#¶<ˆ*'çgÔQêH§§Ú\13j¦\0006\\ï}PÖdî\21v<ÿá\"\29x\17ìå ù¢¦]Z£¸µkYµ\5jƒv‡ž7ž.óÂœ;sþœ\17êz9¿kÛ}\19Öcͨš€¸œ–®\17Žv\8©tÃ\12\20`Û\17T”ÉE\15Ûl\29wåXê\26l\24çwbaKÏ—¼Œ2®æzº3u(âÊzµO–žL¬>* ž?ï¼øÞ±æÞ‡e©\5`æ…^Œê­á\29\30ó\0\7!ÓÅÁá̘Ë\\\24Žiñg(T\12­\17ÚHˆâ­\9ˆWE|Gk#\14c>h¬ÈgèV2¢\22\12,\25X1°!À(ÁSJ¼qI\17£¤Óf$f\20¦\20šj\25u/FÞËŒ1‚Ð?\2è\"\21ki®\0â\11{\17(8N\9t…Ð~B\23kwí1ab\1˜\6ŒpÆ‘\11&†‰oÞ/»0Ü:¸ãþ\6_9òš™·Tœ¦¨EŠÊRÔH^eŠª&c¶‘ŒRÆ86ô'nÄÚµS8D*pf åd+\11Û\22cE\30ƒZ%(Û\"µ}ºÈGÌRTèwWâw‘\31Ø(Ü-5¹Ð+FÒ26\24PýÆ°ï,m½o\\¶o\".S¸e\0’oE)ð]nd\20®\24\8exJQEŠÒ\\Ž\0262hYD\21n\25¸ñº¤-{ÆA\9(­íÅÁ-/W-gØòú¤Yµ\"ÜVØ1„J€¾µ¢ è5(3‡ƒ°ñ\27\15OH)o\28ë\19É.G°n@G‰îYŽGšµ¶Fg—ø(m1\18E\5t\13¸qYñì,nC¼ø\17°j\15)j“ \6CC\7g\14.8‹ø|\2½aÒb;l\\>\7îAAųÞ\24ÖFõiÒ\15\26Ôú#”v„_-üRñN\30Nî<ÄÞÅœ,Ú±E-€å„ô\1¢\17òB©\12¬\24X\19\16\212†ƒcp\31žÔ¨I2 ›Ã\20Ûì”\30´Ö\15í\16§âÂhÒJØPó®)\0S¸í\22\28¹\16\1 F™\15—‡qBsF\13'¼\0¨'‰y8`úX­]@ÆÞJ„9¹Fa5ÏÊ¥m8-\ +s7åi¸Í4Aãzw\29VÛ)–ÁYhÄ\25†É\12Ãd\6žs\6žs&S»³˜¢:,F3\25\12œÓPŠ\25\15¸\25±Ž-ÓWzI.­,q“ò‘4ÑÄ•×ØD_+æ wæ\\úÏ\"_ŒpCDàw\13‰\26V„Ÿ´\13IŽ\16a¿-Ì0þg<þg²Ç\0ˆÎl\21à†Ž7Ñ…ƒ¿!þ\30¡\3wÙu\22\29aA€í›\3±ñìx†Ò:&xF/\18\19MN\11H÷9,ê€s×\6\15›c\3p÷\28ÒÈ\21\3¦\13¾ñ€ðƒ%\127F¨A\2}Od&­!\18Õá\ +\5\27O\\]\"¶A•*έâv4~1gla\3 \4Ð\"Í\ +•¯„//\8¦47ÊI£¼ñ7‚•çÖa{\13‰žz\5ÓÝ\25»×œÅ›$©ú\30õÞ£^{n±=l³\1,8šŽ¬=;C˜A‰t&—xS\6\ +\2¨EöÜŠ{\30i{\30O{4éžÛW}–\3(\9°µŠWK\1@³ù,˜Ðó÷{Î;ˆôG>Úó\23Õµ‹Ïï\0t]Gw13ÑÃj\0hî\7,(\7¸ÿœ‰°{\6\17ø\12\14šflÁ8ã[ë\25»\18š±÷ž\25{ã™±\25ÚŒ_Ƙ‘½ÊŒ]\31¥„Î\17L^\21\1b²5ƒ¦á\12.¾'¢x¶À\4\7 S™\\/\6tAúj{`K„\20w•Ýu\22\24o&|&€†nÃ-؈êö'\1Õìk\6?9(Ì”\1TƒÔMgìâÆèÜi…PKÆ6.3-@‡ÌÎh­3ö›8F0ÛÉWÊ\12¦W3šîvîë\22sæUøŒ}oÅ\0Ì\29On‰1®Af\"\14/ {<³ÏAÌÜ[\01632„œJ_TÜi\23»hjf ›\5á‚5Ë#\11ß}Jëb¬Á/Ï\17RÜBÚf\ +Ô\26¡p\27z„*¢ÃÞx\127Ñí;øS¨¸\23ؼd/÷\20\5ZóJ\17©M èÞ9øF°<1ÈQ¶6Ê\15vMV1U×PëU\"Ôpa\"Ðb„´\0248±ê:oZ9\16ë\28nymhy\20ëŒjáÂoÆs+È\1!ð¸\0[ Trn/‘‹³E«øë\21ǯ\16gÏq´àõ„Ÿ\19ž±_³™Ç\2ˆëÙ…»œœakÔ1Ü¡þñ‘—`U§gÆ«E7܉\29£éÑ t\ +B¬Ú\13+¦n²ƒ\0-LÁx-\19°ñnÿ8\28&\"*£$êÆ¥@nd‘Ò\12õýÂ)\12ËrãP™&Õ\11\4P\24 \8\28¹gÞ\17E­i\1ÄõR—†\30ÕPf¶çtP)xáåL7Aå\14tÇ»C^:›Hc®$$BÅ\25_[ÍøÞ\15‡\28ö™\12Ñ2ˆ¢gát\13›\6]ð\23\ +ß+\\¾\5ÄÈ3yímiyxCÓ/Wüe\5V\30 \25ªë=7\7\\Žß§`ê™fꂽÂÂng!²6®\00889c†I¾\27PÚ.{Îsï:ð£ÍÆDo9¯Žk°gÿ,¾Ácï¤fîò\0186è\ +x¢\1Z\0030±Aè;õ[4©À%ü\7Üò(k¹¯õ;\29\23ÏT×ÔÕÜ\30ÎÜ5ÆÌ]H(\28{‹¢ûÔ™»]œ¥·\ +3Út\"¬·3\0253\11‚Õ6Þ£\ +—*Š¢gñ\7÷\0073ÈÞuá˜òÂ1•\6\7 òî\25‹þ!§8œ šÐ|[@¦>#×õ\24Дlo\11\17õò\0210”‚(ÌÅ\31ˆ\1g\16^ÎX@\26D.\18bt¡!FW\0262hÎgÏ_Ýs´½ûöž“Õœ¬æh5\23ç;¤t³ø\0031Ø\12‚?\28_\20xìÇóá/r‚\0ŒûX`[v`ÊX5\14\ +Ú\30ÃÏð¿\21à\30âï\3¼Ž?AʼnÕ_\25{\11œ\0PêÔ\24¨¯li5\31Xˆ\2_}\0266“èÎn\14¡Ú\\8\13§t¢„/0ÃÆ瘰\19Sä=Ó\ +ð°\1¥†Ü€ó\1xÌŧ M\31 ç\\Jmˆ-’\23\19¹\ +)&ÒîD\21[Æcà>æ̃υGß‚ò\5Õ½:8°æAòð\"Øxé·D[Ä:Vñ\7æ\2\2X5nÆšT¡¾Â!Ñ\28\22b!T\"t\16ÓóœQ_\13€\"\28D\4}ÆX<ˆ¸¿v¨¨~?\23EûÏ\12Ü\0œÐÔúª\28€…\1\28¿@ë\29@ÅÀw\17\14ì]Ùß44Ì[ql{æM®Ôæ\16e\ +Á–6À\25'ä\\D<\ +'Ì\25\3Åä£ï¿\15+Íø(z\23–ý\127\"¿ñ\17šú…^Lœ‡è9|ÄÌEô:Ç\19¨\127\17`?\15ê±\\y–õ\"ÀÔá}×P8 ÎùÉ‚ùÃ\9À£O~\9\15<\12ð#ƒgŒŠ?‡™l6Ë?Å´÷˜F\12§t]õk^Qj”\\eÌsöÐ>w«³qã?wnü\0179c p1÷L<À\12FPñþ÷H~X^W£ä[Ôä Š]4$ž×5•pÏÉŸ?Ã&&Í‚ZÄrkþ^x\19pîÜÝ'4é}\14‰Øi\14'k²\27ÕüÑ–\27³å–âù\11,qß\\ÿéZVcŪyùR/ûsö²/«…)bÇEÄ{\21Ú¨7\13aÀ»2‘hfÎ\23\20ã]ØcMÔ뇹óÊ?w^ùç<ê›àbÎ\27gƒL\0274d\3¦¨áÓ¸\3Xžƒ\"y%Ò\22..`\26\8\13o~#Ùèþ×ð`hÜþ×ð\22×ðÈh\\ï7Üç\13õ^Á¨Æ¦å99³'\ +Š­qG\19;®yºvà4j.ë\15­DX†¢“¸©°ÂG‹|Y-©â⼑+Š\12\0122üužÝ‚¤ëV\16Ód¨*\0221º9óÕÉœü\5‡\21-'l4 \14\13Ö5Ž1•}\"w„O¢G\17}P€Vq*.P¥qÄ|¦\17\23ÒþÓ•^/ˆ; &tR,Ï™ Õèzhž¶çž×ç3;Ö˜“×zmÁ£zÕ\ +¾ØÅðk.kû}‰UþânúæqÕ·¥\8\27NŽ\11\28¦\0ÐÝëÂ\30Q@\127#úp¾2À0–\14n\9ö7^saæÍ·+̽\11ß©\19æÌÙ)¬k‘Yˆ®N>pýHDpýHJpýHP`6Ãk*$ˆ\14æ\0„k/X(µŽ\16ž›Í¹@™†â쥎ð·d\28Q}òÎÙ±\26¬êæ¼þšÅ×\28U®‘ÍŠ:DÅGR\7\\Š\0253K&Tz\25•$je}lå\9§\3G*5”.1à(Z‚\7¦\1ð\11\31®¯d\15xø\0+N´@«¹â6Ñbêµ\19Û1qKP9\3ö\7³YØL\6¬ì+BéÞ¤œO;tK\11`\27_À 6£åEY'{Ëåz‘[à\28¨JC²S\30ÑŸmôKD¯¹\0ë\27µåõP—\1ø\14*¹\31-6.\"ùh\ +_IÐîâKyd]—x÷pAs Ç~W®3EzLæÿÈFu°¥1)ÄÓË\\\29\11…\"ˆ€\18î†\26×\17\13·OÃ¥\29ÞHe\22Œº1`ú/žÁ_‘õ+ò\11ë«øû\127rðayÆi\9`Ý\1\2|çüþ/o7#î<æ✢` d@‡â«ø\27Ì\24>QN÷\25·ŽŽŽq\25B\21œ…òrÞA\17i\14µq\20°#÷G\23 \26\9É5ÿœ\25zeyTb¡âŠ\27\127î\6Á\6ÜßÌù\8ÿÆ[Ú\27±|\17¥2…w|JOàñP¯\7\8Xìß0F{ÉTÛ½\23­Ù6ú$–2‡\30ýÞ£ƒuúÖ¹-ŸóHíÙ\1Æ܉\19zÞ„úà4/†¸Ç\4Ö<´ètè1'\30\21Eôxˆa.FÙ'ö¯ÈØ\29\9-·\16}÷,2éySï±L± ­«U¸Ü³|6JBT\12rG\15é\4»\7}Ñ9•»<\ +±@ºOÖ\0162Æ:áºq.¯Î\0°V¾s6¼š\11›r\1œ!Ú‚\0¹­›ó…ó\\ÞßPÓ³9ß\127\7ú\13À~bEU^dHD’œŽ`_\25›ÁÖ\6¨‚Ö}Èd\0254韸”î©SÅšë×yj|Ê\"2†í\21èœ\13z\0›¶®m#[ÃS\22m81\21Èá\21\19\6I\17áCÔ{hÄBtN¦¤%P«Ì‰\7\19\15Kº”Ÿ»\ +…cÙ¯É\30¯ä¨Ë\15øb\0\17()Ö\\ÏÓ¾3wãsw\17>§»òÊE98¸I“\24ÔÊ¡ô†<°³¡äb57Ç­÷œ®æz\2°v[aºåbÙ_CLÀ™D3Ú\0\7\31|\25ò-´\8\12¬\8Ëðù0×Ã\27ïw vÃéõÎeEá†\25G€–KV€)É£U*\24Ö´ZÄ\31F\\\8 Ê\24\\{uÜÜ\25µa¶4éÌ™:I-£:#£\02936œó7\26®fc›RY\30\26±³0\16frRˆ\22fºîΤ\127\22“Ôül\14)$Û sap\24hANT\16J\6ÇLJ·\21aÌ2IN†Q¾`\0”›7)wƒ—:µ\3œCAaκG3\30'\ +Øû2ãÎ\11€i\0305&›§Æds6âšËÑÂÌ,\16àÔjc\9Ñ\7ŠÂ¯\0ꉕ\29+ ÔˆW’\8ŸYfD\22­y0•\ +¾Ü$\20¦Ô?¨n|Q<\8+â\15´,â… \1Ò-kŽòÏ1y&ÃýLd\127±'žã€¢[‘_À\22ý\2_׿€óúE8m2&#û±OC\12££4“O\6éì\17¨ï\24\8¿ÐKÐ\ +_\24ˆ\23ôÑíT\20 DiÕ\"\8?Á\31GÅÏ\5 F¢“\ +ÿ\2*ü\11Vá_ÈÃÑ\5âìù{gÄ9OÄQÖTCñ\29ʈÂÓ-ñuÊ¥#,\12@±äc\1…ì3\13ɇ\20Upf\22 œ\11\31ÎiÐ+]@-`\1¹ì‚…´\11\28\28\23,G]à°³`ùÞ‚…p\11ˆÌ\22n-䘷 \4òýxñ¾`ÉND‹²Ã\13ŸoQ¤\22enñá–[\3€ÈZxÔ\26õ\2b‘\5ËH\22,\3X„Sû\2'ù\5Nò\11>x.â„\8åÖbÊÑsÓä\2wÕ\11\24Û,px[ˆ‘šŽÆx2Kk×#ë`4´€EÍ‚\15]È¢Jëߣ}ïñ\7\25܇ý¯a×\20\11œV\22`@\23Ì-F`\8(ªØf‰ÀŠ\0b\20\23¬w¹H=B,RõÈ…ó@³Hù9Nu\ +LQÀ\26§\ +\11çTa‘\30\ +\16%K£\20.Jé¿\0026$22\0110]\11VO\\0‹·`O\1\11¬˜q¿ZÀ>\127•v!—O\6E\9\24X\17\16m³“ÈÔ}JøÄ\9\11\13I\22EØ\22akŠ\11F\\*–X4È|\24“€k\3@e\127iÜ„R‰F\0091‡s8ë\3hAÌâÈ\18ôâa)o¦îä\"a‰w\22@Ãûfs~#@ÈQa\12ï\7Ôœô(·\7_Ý÷ò \\00êžÃÉ£\2£¥>ˆ¹õ”¯:\29¹\11Ö\14#X\\ÌÄÝ$\0260/¥“î\8EMu\9¡¤\21\30˜]BÆ\29\4EK$8@¥r\9ϵK¹ŒˆKè’´ï–AÿdÉW¼\0öiü\26ý|À­ÿRØ”%\3úå#Bg\20\31¢§\25\6óA.â^\25.ù\27Wôø\1·ÃKáËL#Ä;ô%¿\11¼ä§\20\1ä\12T\12ìIóÒ\19\26~Už\9íOh#7åŽü†×ã}¶€{.ÿÝ\1¡Îòvâ\18ÍUÓ³\16㥨ſ쇴\27€–³n¤q£‰×\20è¸,­\24Ø\16@£¤Áœh0-T\25#rZKð\\Kp’K¼þº¤á%N{P®„\18M\23åjå+—ÌÓ/åvK\17¯\22ý’àõ÷ÆÑ£ógþˆ\18\24%\30`å»±\27$É\16*ø\19…¢m3\9kªú\3KV&Às—GY¬Z»ô\17¿ºä\27÷%?`¢ÇéxAì\11:à‚DqmºŸ\29\3·(Ú…=?.Åø¤Eü\2rq\22lKá›´œ\23.à:ÂòJÍRMZå2€QK.XmŸŠÍ,!*ëê-ô’Çò•‡ÝÕúðŠôµúú\17žsIÏÜõ\12/G>$ï®\\Ž£X¼\6/RuóÔ†\2rq2uØf\20Û¹\2é>Æß­i&LjßmýàYÔVqÁm©À†€Èéè*s%ÿ™¥EÉ]7Ǻۄê²hŠ•ñ\26y¼´DÑþŒ¸&¨S\0288í‰o&\25{€±%c\27L\24F]ÓˆWn¬Ïî»êZ•`â§L~\16°þ5”èÈ%Èa-\15¸7@ÊL1A·ðë\127åÿ®?g\1¯?á\2¹-ì(*\28\19t•ƒaæ\6°z&&X4?–æ©»\17\20Ù¤\4Õ3w´^6•\22ê\25îK~ý\1M͸\25{u5:»G!~C\19(cs»$'\ +\0253O!\28*v/gÆ)­\6Î*y\1\9ÔMy \5TËpÅô†û¯áuFgL‹-­°I¸Ù\1t%ké®XQsÍ.Ür\0ÐSXË\28€žŒZy\28Ù£\26‡Ê\16\26ŸBLÐû~G¸¦´‚\11U\0]jˆYl…qQ\30°;„*®rE\22îÿÔ(âRéÀm¥'œÖ\29rZ‘Fcû±c®uè\0037œßŒZ²·),Š\14\22­;$´ll°d÷è\ +€ÿÓwhѲ\31²\0­\12M’†¡\"'Çñ¾\"f<\15™·‹\2¼ –‰o\23™\29>¨©,\0250SõU\22‹=à\2íù\ +5F\0rŒ”Á`±2… |cÒž]Æ:Ð^ùÔ÷jQl5Š=\0;…G*yjˆ¸7ó\26Ñ’^s*™\11xå\5ç\21öŘý¯xH\19Àkü‡!¨\11µÌ\29¿Êív°Ù\19_ÝIöÕžòÌÛ7\30Åçøè\13w@tƒ¹wpKÕ#³5‚i4¿&ÌËc\25M±@Ù‰ü*\\Î…áš\15/‚âY!Žw}\5ÏÌèÅV§4\0276L4\11ïç,)z€‰7åXç$\"m•\28‘±ñëKLNDôÌ^GŽ9OP^²´‚ŸðûÂ˹<º#3´cAD'/¤.ñš\2c/\14n\1\23ì\ +OP¢´m`éýq‚ËÔ–³Dh'Ê (\17M_s\"êįÄÔ\30Û•P')ô…B4€\22ì-\8\25BÊS÷òp'\30XÎÃ{åœã\22mÞ£7z^ÒTIo)—FË¡‘6†.Ø5Gܲ\14\28°Gn½’û T´›\1‘.Þµ\22\\Њ\11RqÛVÔ¶Ú;Á\30à7\7\27\9mϯ@3¬ËMOï;ט\25ªm\8\14/¶õ\1Ãûõ|\18íù›µŠ£ü“\3\26£\25ÆÐ7,\27=Þ„R\0*\ +bäžÛ\2ÄH5ÙK´)í•\9w©–TýŽEA\7‡ÊbïCÁÍÚq+\6-\"w^\30î¸ÚŠþn–üâÈ’•\4—¬s¶„ïðO²\3+ zu\17\16g)svr¿Téè\"\16*ØšÆ\29&Òr\13Éb¯ð\11\29àoL¨¢^“לÖ\24G~uc)^Ì\26QpîÙCü’\ +D/‰,‰\23U¸ƒ+¥%½n¨°Q˜[¦^ý\7i´iÎhÕÑr/\28å$\26£œ¹óÎÜE\22 ,ôÞwi•®ªTõ\ +‘4EË_l'Öõ™~EÛ¦åÔ\29e¶•Ìs'K9DD@•'—iË\25íÃ¥Ó>\\Òc\4\6µæÛçeZo¤ª\8³J\"‘Öâ’Þ\20À‘0%D±’/o“\22®A{2ùº7¹Ç^Ò½õÞ$\25†O¼’Ž\23ØKèC†;€¸\31L5$\"\29Ý)TV­ÀŠ€øŠç™’ÅUˆ€ô›ªE¹d¾%Ô#!\31è\3\127³á8\13ç£À†\0¹_â²]WÃ\25j\ +oÏ\11ëÀ \2mó\19\26§ëÇÒÙ\5•\18ÙV\13åÙjHÄÃ\ +\23PÐZ’ÕA„\11„ð}ÑI\\BŸ.\8(%D2͇&\\ô\31$?aˉ\23\12+2ÕÇö•\8Ù¯²3vï¬è\29)\5*±«Š7¬ŽÐ0a\17\127\2\23µb\17ÅŠ=#¯XF°’\3£RZ”?Š\6V|j\71P\2ˆ\9ªø\3R…C9€½\0t=·bïÑ+öù¼‚wÏ\21ì:W‹Y\9ƒ²\7 ÙÕ¸r[ñvÅ®}Wr\4Ÿ2°` c b@?\26mxV0Ì\0Ð#t\15!-l\20’®p›0\12ÿ•}ï(ÂZ¨h´µ\26îÃwäD¤\23°+ײg©\6Ý-®d\23T\25ÄJ\127†ÿZ{À\5Á\"F\1=\30àVrO·¢Åxˆ\5[‡\2uÃ\28ºýxŒ‚\11»\11àß\17Ê\16Ê1%‚­êÖIdW<¦®xòuån\27\8¦\15ª«)\3ó=ƒ#\20Â×_]Œ\18¡\0239\26\4\29’ƒðqGôòu\24\9{ù²HÒW¾¼ÂuÐÊ=TIðÃ\19Ë°´\18¡D¨\18®÷\6Tl\8\29ª:]ohœ8k{z%¡HѠ̇ö\26r\13sÇ7ïC%M1îøÂz&rDÑ’JͬVxÜæ“4î\2q2„¶\8\21Ž\7\\‘h4Â{„j\0074\12´\12(GºJŸ£[¥L½‰•`‰û\\¥oÏñʸ˜Œ-–\17u\22\5‚\21WOËíœ#+ö?NråZ¨ã¶í¸¬¾T7QÅ~a\31ZäŒæì¢f\14öY7\4[«Ÿ•ð²ÅdlÑ\29Ú!þ@Ms\5ŽVùÔ\21s°+~çiåÞyZYŒ/\21­ ½¹ÂCŸ3äh\23¦@ˆ¼ÞŠQ\25B\5\3%ç\\†WZfÂjÄP\7]VÐØ\\Áã[|øt-Æ®¹0aq‘\11‡ùáGlc\2ð°/ø•€aÍB´\26Éþ\9Ê?…G\11Ëø÷°ò¤„#\19:\0203\11?EüA¹\11Èø\2P¡\\ÊЭa5±\22ÖNS·\26B©\11˜3¯aþ\1¯æëp&\11¡j\"’,\6H-iM~\0133À/áQ ,tk¹yD–>'rö±\30z0ÆÜKiî3Ä>0w»–}´C{\4íG\21â­å¸©>˜\0085¬ú€ÏØïÖ¤9y挮¨ð\1MU£Œ\24\30òàûD^knPà\0069h'6¨ü’\0‘F¯å©á((\\C\25d-Œbm\0004[¼âQż5Ùî\17@HÕò\13O›\18MØÌ5óœk¾\26X‹æ¶,°\5§,8e8%B\21GØ\3]\15vLS…ÕX§Ý\31§ÇÅù/\\“ÖÚ”`S®\24åj£Ð(¸°Y\5è*6^Ë(R£\28B\25åúH˜Bñ|-ž&\23üyå\7\3ìú  ¨ÒkHw‘Ÿ–=çœr\27“–\31øæi)g#(Ó¿àäƒXÆ\127ÃZ|õd\12”\0124.æ5…3n¯\ +\11ÍE\22€=àšZ·\2ÛÆTÇ\13€;\7ßùãŸxK¸ðuïZ]’Þ¹ÿèº7d©Š\20\12›O4â0v4\00595]Ëd«\8°s˯i\23ñ€\17é:Ô¯Ö(²b¬\25TÚ=ª‡·&étËð/\0üâ\21Þÿ¡LCé×PT9Q®å\0166åÆ\11¨5F1žðè\25.d\"x,ÃñêmÁQ¾›\"9'L\1»ãýù\26ü\2­é>äŽóèš9“pþ{q\7\29\16¾\14\9«pº\\‹ê\16^\24j¹/\26n*\0†×=¾¸&nÙ\127'ú\0011j¯'ß{pã=Ç\ +ßòÞà\1·\9ˆ\ +Æš;1œY§\0ôûùÀ®ôCGìÑD-ßC\1¾0ÐNœãí5Ïú–:?–s‡CÛšà9|\7KÄÇLÜÑ ÄÓ(´\"êLjGG\1Ä}\25\6\18£Z‡Òq\11\6Š\14ãkž_Ì\25Ð;ÍÒR±\ +ŸÈÿ‚vA£!´ì+:ù\21\25Ä]ðÕí’F…*¦±|DTNº0pÅpxåwù\0·ŽÞ\9h+­£\18PÜÕZ.XE\11·ÙË^ñv.Zö•fEÔ,:¸™ö**¥\25—F™E…©¶w¼njà\11'©É­ƒÆ:A·\127\6жãõ8èИ\21TP4÷\13Š†ÊÍe¢\19\" ø\28\29±Ê†t®xO°žZÓ$š¤gc–Ûä\4}-ª3ÝO\"¢‚´Etx²\9Ä™(Ö\29\17¥ÒPœÐ(\27n=·ÖÁÕåÀ@\3\7=kñú\24˜›6ã\13L_xØçÂÏ\31\5ì¿?Àƾò„\23„þàsse¾q™obÁ\12·:À“‘\26^uLV×w@5òf©¶Ø\13Bp\0:\30o|²¹\9\127óá÷F§\28Ý1oò­ÜÃdí0\18Ý|7Vö\29ãQÝè®ÙÝöšŸ;ZCd¸všÑ\26¯Àg\0062\2†ÇÃ\0û¢{sôµ“E®S'Ekwݯ0—ÏH0ש\4“çÂ+\11“9û:5g_;söµ\19Šr’(Å\\Ç\31\8E×,\0]‹9Q\11¸d™1âW Ç:OeÀ\24âQ¶\8G\1ª^°¯á“Ç\9\24#*㘙¨ÊG8×Aã¥÷5¼ÿ\2.\16ªäHS\1µ×\16=b\127`,ØLEåì5eû\15´r\8E·0ñ¹mÈ-#°—\8ú6auÚÀ]ÇFܨ¼\2홎MX•ÂCê8+\9\13+iT×\11†±áÞu\3¸@(~=fxÆ×ãþ¹aiˆ%^\27ò’R…mxC–¬Œ•#φÏ?\27>¨opDß°\29é†ÅE\0271*Ý`3R[Ñ\16:†Ð\21µ¸Æ÷ÓÉ0bƒ\3ü†Íü6àß7¸¿ÜÈYó°¥lø`¹\17œHT6\16SlpôÛð¡p\3N|ƒ\3؆\29…läÖñ#,©ãJ”è¥õ\"\17£|È;4Ü°ÐyƒÃ‘X\24kù\27.Ô)\30ye\27Þ`<®$4ìä‘[Ü@Ëw#\18œY`d\0oÀø\9ê>`Ef´áç˜8-ײåîÖ\1ÕŠõè…a\0­8eoƒK\17 L|mŒZCÜI­”‹ÞÑÈøpœ‰ð-Žíáìü+\19LShQ¾S¿\12'Ã\0127e\25N‹™8\27¨\5Ð\29x\1Ô\20O\11‡ø\25\3\0056±Œn¾t\ +FøÈ_.±\1Dor™³Š\7Ü1pcà\14àOr«Å¨¹\26\29æ^Çs©cýì\0ÿŽPŽÇ\23\3üŒ\2t”€æR‡!ÚI]–¬Îœ\9\127oŠÒpN\22psÈA¬K…;ŠÙæ!=ðf(8NõÀ2\18oò\127K‘ãýö\20ð\14âtd\22.eŽ|±\23âžp\3XÓ¾!W]±ß¸g`Æó\0ô\30\4\0(ÃY7ãƒo†\13TËÞ+/aQ[ܾ\ +–V×^¤þ\25;¹\9i—\8e\8\21œ\12€\12Š\30¥î¹\ +z…1âJ\19¨\14Ørð9óä9ä\12¹œp§\6À·\14bf¢À\0255>°\5fÎ\28*€\2À™¿qæB\5ï\22ÿà”g,·\\L-Ó\5Œ8Ãæ{\23ñi¹p¨èŒ„Qw—W°BáXñ\25\"si‘“wæx\2ÉÉ\\~êàó(jáPWnM \ +W”\18Ï~\2;M>\26Q&V¸o3¥\127IËñ‚ótNÖ\23íhó5£\5„c\8‡µ\29tÄ9\31¨“{~€\9E\26·\24Í¡ÀñÍÅ—^0.Z\24\0œ!P¥\1\6¡Oµ7€„ßØ%K·5\5 `¦Ž¥r\4çüx{\14)^β»\\Äu•,\26ì‚#¦Ò1Ûð”oøË牼|VáÎ2çЊãèΖ\0²·Ñ>˜ï$+ÌE08•\1µ7QàÞ;'%\9ÝR—\6K\13v¶NÛÞÝ\0073„\ +þ\\\1t)¡ø°`«ÍÂ_®&TMC1Õ°€N\11-°¡}H€N\"Êÿ¨¸6o`ëw¢¶šQ\30œÊ1È-‘ð„\13(Ž\127¹¸”†p\23™&Ê5‰Eƒ™\19¶.V‹!\9T\0261j$Võó¬jW“:Ò¸O™j]te\19X¤A9íë\11\7g\14Þ;ؤïí¥±NQMò»\1è[Q*\24–Çé\7\0050‘\22£Xtàc€L\13€%P`âò.bxN\0F›Àž®\13_.?fÅ‚Ÿ )\24 æ\\2pf uÄŽœnÍh\21µ¾ëóÔw=Š3㾞§\19cî¸L àÊÚÄK³zué:\30,ó\20>@v\5Ô™\7ÐÜMÓ¹›sÜ\2\24ºMB¢04Ž{tÔ\16~†-x\0͹\7æô¦œG™~ù…¢DU =Ãf‚•<ûK×7¥K\\ºÖ*Y¢H(šà¥\29V#9†møÃ\15¼¹ÜïÜFžå6(\17åøèkZ\3-{îysϘ‡&×Ñ´s€Jëòí³#%$Á\16S\28bè\26öÂi_¸a_\\_½¸¥ñÅ-Î/ÌÐ\8L\7ß\11{\15gØÓK\7ï\25~sŸx£¾\127¤\27°9¯ÃÌõ@30¢:×Å/v˜E”ŸBá2aêà…ƒÍç¿ò·ÿ1øåÇä2\12_´¢5£ù@ï1\\põíh¹}íÁli$có—ÓƒÞ\ +×n$Ôn›®-/&ï~0ÐÚøÑ\26idD׸ \2|ûI>o.훺ö\8aíq³jÔPö#ج±„â\3(ÒV²Ö\20c)\30=¿K>\16í<ΣØ>ý†á\26j\30XÑö†÷»\20 5\0\4ibplïx¬.$\\\18t™yg^îYËw>\5¾\7ù×$Í*\14ë>¸*ÏÕý;\29´á\17^\23*BÑl„Ô \27MkF4\19\12Gê\8\27LZÝm¡]1Å?cs·–÷ÔŽ*1íù‹æó\27\2âòrâ÷•s>߆¯–ü¥RÎ\23g*BTÅÏ\20ü/1‚ŠŒìŸ†\19ëù«‡?w¥Ä̓ÔÁr᪦®½*R÷ÖX†c@ÄO²|\23îk;ž>àÞ.\\ô='Ùssí¹\30û±$ñ\25¬Ëdô¹£œ9†\30®\"\2€%’›õÄGú¸ÉèÀ¸ãô¡\23‰*Óº£³ïP\16ÈÙ\19{Î\6%9\27”äбÈYá\2W¡š\26Î#\21…O‰oŽ·î\1¨²E`‡rh>¸ëº\4E÷‹‘VC‰@\4ÿ”wc¿E\26\0069Ûfä\28j9vËUW`ã£Y)`fR%^½\27KVok\6{b»m`ÿÃþ\21sñÙM¥\23>ÈÔ©sÝ«^\22sV ÉI\9\0220)”äÎò$w–'9;3wQ\14¢kh°\13'\\:Ô;{÷Ρ¸‘C\17#‡ÖF\14•Œœís:\3HRûPJŒ•\14¯á9¼†ç\\ˆ\21e&žÄÁÔkœÆ\0ÑÝÈ\7„Ú\17–¤$†¸w¬”(«X¾D;åØ.36WÎYc$gWßhšœQÿä”kh˜äÐ0ÉÙd(X89D¢*äP:ÉEYÁè?\4(9tNòø\19zynH¡#æhñ9²\28\6ï7q~¡„L\12#ú`ÿ=Ç2d•\0254Å^¿J\15˜=öç°¸>C{áY\\íd\1ˆ?\5¢\23\16¨<\11‹1 Ñ^\17ê üÈ.ú\28\4I\8\5!‰úg|\6ïøl\31“Œ^²žñêÒ3™ÏGô™S^)Ú°1=³\23ïgvÏ\29€W„Þ„Ñd½\20\\3>³…Ä3[H¸?³‘\8€\26_;#÷ØlZÍ3—êÌ¥\2 ÷›Ï\16_<ãJ3††é÷,(’Ÿ=Cä÷Ì\23™Ï|‘\25YóçA<óÌÏê=³ó½\8F£öX´+*r%œœžy¤\5WÝ_\0\20“ôkWÔêÊÏŒ>CªðÌwvÏÒ½r\9ò̃£å–\5@³¤E·/\9ãü3ŽóÏ8Îsì=\3\13¢5\24ä\17\24ô\30:œŠŸq/ôÌÆÛÏ`\3Ÿqvzƹè™'×;F\17F´päÏó=\7îìY8±Ç²úLf\6\17.\16:h\8Ûô³\8o\11aÿFÈ7\0õ°^=C÷÷Ù½CóL7÷SZ\7\28¡gæ„žÁØ<3ËóL¯Á׬\29úÌìÇ3?zóì\ +£|Y\4†ð6¥­dxgç\20å™ß'yvú¢Ïî½’éptÓvŠÐ\2¡bB“:†*„jŽZsœ&ljÏñ\7[é³ì®\6U\"Ti\8DÀ\25Èræw\ +·`n·`n·l3\28Ñ0“\1êáï&»\1±¼[\30EçÔIÌ\22myaÑäÖ-\17\23œŸ¶¤‰bà~B¶l%ßço-ã¼uŒ3Èo.º\ +¥·ìçšSím9\30Ÿ\14[×8yŠ¦¸ÈqóL´\3¶Ó\11Þ€dX‡Ñ…ßVÚÒ\13úžá7WÚšZ)\30Â2›<›8ƒ@\5˜\21\0Š*@ž¼)†8YeT`´§¦\"âelKþµc]´–WvàD°<\11íPO#(\20Ðce\29û-!?¦Î\19/êL›\15´½\24Š\6¥ªšãmy†‚¯¸8¸u°/þ÷Ñâ\127ÿ¨|\11\9Ñ\0Ð;ò-Ýv×\14\14\2¾\15c`_Õ\ +\13—ÝÔ%áòÛ3B ü\2'ÒŒew\"†pqp\7Ñ¿ÃV)ö†çt·8ûni\25ÐøZ\28ý¾áwq\21¿æ\"…(Áç–7¦+¹3r9ÊÞ\18à/œ¤ÔŒ›ŠQÇäêPkð¢!\\Z0|ã\17Ñ€øMCbfÉ0eó\13nE\24ž3£\13l#®v¯<‹\3mÄ\18בõ­£½¥\19fœçeÂ2ÅÞ\\o\17–ìv™\22^\\žF“+“4:ßÑš\30­+—Ü\0250‡H\127‰§\21\3w\14Žý¦,b\11Ž!6A‹‰ß2?¡\12@+\14Q\21Ðëì€ú?\14ðte\5tŠ¶nŠ\4ø\5@‰PÅ©+Ò\29×\2î¹Ü{EsÝ‚eÊšaݲ\0o\0256«E\11£8\0:1Z²-4ñ{¬Ä-\31–\21X1—Ö\6\13ƒ­\12ò˜û넸úr2Æâ«\14Èžáž;Vc8šh\29¬zæ\1;Ç\26òêx„WÞä;QgV÷Å\1ûË\24–6’N_\16µg­N®±Ÿ\29\11\9Â\22÷‘[öV\14À\28ü€ò§û\31„ßx€\3Æô›FïÉ_´Ù©ºüe\20«™h±ó‰Ûä;Ì‚Î\29k;ø‚6«»º³ÜÊVÆG\5ÑâÛŠCó\5¬P\26YÃ;>uXêÃHk\0i)\0191\14]põ¡õKÚþ\1[:Ïž\9G>/Ýdûßc`ë\1ì†&¸E÷a¸B–²¼aú¾\9ou%\0201;oI·é\28FŠVäFO.·ß\24À\5×]âƒ|a@Yžc·4?.\14î\4¦Ví¹Uûa .8å’ß(Ù²\20¥O\4Q…”ºBŒ1\20—¢à˜\5©RéçJ\13…\5\6ðN\20S>\7\15s ¼8KHÆÖ\30vÜA |Uß²ôŠç\22fV[Ö{Ù²ÞË\22w\4[Ö)ÙŠvÈ”ÜË)oÓ;™g?*öìe‡«\25\30\23nö°KS€Â­+–2Ž Ï¹B\7a»¸ÌñìÿÍÁæœÞS\1\17©æ\0205wÝ·àb\22p0¿½8X£ÄÑpÒf\26Ùw¬rQmf+þU®&\14û\11\14Qr\13Ù•ÔÐ\ +„J„*„j|€_A‰*\18ñ9Ñ-”d¶âýµƒ«~ nÖý{Eäèca\22¯hä®hË^X\"@:%JP÷,[š€G ^4„‹¡­\24z\28tØÉÝÐV6ÇOòÊxk\9äþ\4¨>ÅêÂqµ±ŸÒ\ +\7Ã4Ö`Ô$\7¾•Ú’ß×O©\22Ò–va\28ˆ•`ÊÜÐðP”ñY\9lA\15=Ý\28¡K\11\0‡’@Åþžr,÷(ˆ\18¼SÏ€ýÆÀw\6®xö`K*\17\ +ó#!Š\29ï Æu\16\127X`×[À6./-ñ7Ü>Bœ\27VëRÔº•V˜,ðšý§(ê¥\15/-<\0Á'û0Æÿp£ùD\2ôçuÐ|Î<ñ\0<˜¿Ï\12T\12\4¿Â,æ\28!‡û´J¤Þ|\21¢Q/\"¨–‹ÖrÑ, ‡­=½'_™´ä\"\0\23¹¤d¶usËLbU?ÛJ_áH©™­¸Úþqã­ÓµÛ:Õ´-éÛ\21.JéàÊÁ‡4‹ƒ‹Ò¤\5SÔ;wô»\29\5ö\13Ž-ß[OCë#´ ÷õKÁÊEô–ÕÜt‰GÊ tü«&£{!E99N$ä¸áŸÊM±~²P´\28\11ÕO¹\16bwÖpË5•{ã\24:P•\3\31”Œái\\““-sŠ1=\11›'.ª£3³©´í\20¨…†„¨¨'Í\13znî\ +<¢*Qá:NÈsÚœnñç¸ÛßÊM01ÅCŘv\29%¼è3ë–\0Þ\20¨Ø:3ŽõMÄ‚[—üÈïlÙoõ6,ü1\4ß×\17 §\7MÄ#7Ü\17Þ“ð>JØpµ­NƒÐ\11Oâ=d\23Ûø\3õ†--þÆ¡:Ñ\6aA‰êÞ\22Ê\19[<€õ%ðv_h><€5¨ñ!©/\"Ãû=\0…X\27G-º/â\18»\"BÇ„–#^\16j5\4\22ša“¬\29†\"g\30\27á[\0ÂSj»aÎÅ3È\23‘MÊ&\25”\31¤¢\5NO\0ðz’GiQ{\13‰~€ycŒhrÌ\5ö\30B±ž\21\14²\19a€žðñ\0D±Š‰y0„o\30Fë1ªIÓÞ\0¨²†\17\0) r¸Ï\14\27xÓ|\28+çC/%á¨\23\7·/q1ùÃ\18¶¢æ¶\19\13ÖýG1¾³\127s\23£3TËí\8òWY…úa}\25'\31åâu¤\4{Q\23\30©º‘\1278Ú\17R.&t<ŠZ\13Á]7à/\\Vô\3<*ð\29\25\15\14mW B\19ªï¥/r:\24ÇÆ\17Ú@’ýÅ…>kH\28៸xA\26 \19Maa¸·LhÜÂr\21¦06¹>iúÅ=Ü\ +¸aAì\23~òT\1ñßñÕbÁ€’_¬/2´tb+ì&¶\26vš.‚kjð]{ÁÊ—\5+_\22Öç¡:6\8‰ó\9?WÀî§à\23T\ +¶æ)Èà <%´–\16U3ÍÇbu/i1®¨\13ÔЮ(™A\13ã\14\18w%\127g΢Ð\23)I\15>`g\18²Ÿ_ˆª@\13Ô\18¡\127Mؤ­\16e«\12‹›JA©Xr¹W°_)Þ\11·ü’‘Ý$­æ\15ÓÔF±\29àrÂ.›\11qöZp{ñŽ!fO1Ä\"ˆ\9o,¦‹cÒ\22ÝÖŠ\17^6B\13Žä\14ã\6Ú|Â÷\0\12´\19++`q¿\18Fv\ +õ&]ˆ\5ò|,V$ð® z;1$œg\13çÀB°S\28¨;º$ t²\3~e¸¶“K?š~QõKW¶ŒÑïÁ‘ë\31´Ÿu…leÖŸy8ê:Ý\14Š#º\22¶üíç‰ÛHZæ÷°3\2UAχé3‘ŒÝ¹qÝòPleôiO—\8}å/k³U£¯’1í«è´~áfÙsÙ\9Í\0317ºV\5Ÿ\13\20\24b=q\31\28Ü\24T\30\26ÏE˜IÜÊ\4$¥«\2g_\6:.\\Ã[l\11\3\1|¶áÏjèÈy\30]ž\127¥\21þ\11¯^2*~é\21å¦PpO\0\23\23L`”¬YÈ®ñD J”ô„Ó1ïˆ÷/ø\29ËD\14Uˆ\2XÅ@`uŸ9IàŸü.\13ÂÅÁÊòtô\8È•>ªî.ÛèL¿\22Ïb9÷^Gïª~IQ(\16mYÊ\26u˜²cí\4&uGÆ~åM¶#6°å†Î¹£¶La)ÿÔ¥ÚòLéÈ\24ɈÀ@3Z^Àr™õ\0144Ò¿¸:}ár\22P/-ì“Å[F\25¸„\6w\17¥Œæ\21=\19‚;#_ç—x/D•xauh ^\\Ñ_äòÔù\5öÌ.ùu4ÓŽ*\19U\1´r_)cQÙ)è¾QõŽÌ!§\19a—)nE‹îO6;\17„Æ3@ÅÀ\21Ó`”ܦðžážó>ˆ\\â‰QkîŸ\8s\27\31R%7!$\3î@WA\9jtù88N¬£ƒ¶‰õ5UñJxH;\1çœoiZóv'°­\12¼/\14[¥9À‘Üw‡ýÆ\9\20¤¨WDÉ—iC{`ìH¨m:nïNÄ\29™J\6?“[S\12_&†ÕË•Ãæ£X7½¢\24@[HKT‹þ]Í‘k‡ZbûV/f…lùsÙþϼívÂuÏéÊ÷À\13™’œ-|{\20Œ=¸U¶q«}“\30”;9šæ\14>òÂÒP‹«˜Âäò;é»[h¸\02800)G›Uõè†Û1ÍÎŒ§£¿ùHé’è!†Q÷tAð’\1Æ^yq:Œžù™\16l\7§)ÖäðÕÙ´3!¾/Ž¶\8æ>¦é*œ\2ÊT*\2”ï[g\29a\8ñJ\6(¨ñå\14ëkÒ$E‰\19û‰cùyÔÉSÖ…CÉ\17ð‘)£™\13A(\127F;7\28Ò­\13À#´F(nA+\2hño°c®qXâf¢Ô7\15hs:ã“gä}žðÛ%-¿t/Xò½`)ï®YJ|ñe(Žæ\4\15,@e\19~P¢d{Ø\18—ˆ%ßs”üêYÉÎ\2˨\3‡Kä\18\4ç\6\9Ek\21jÔs¼0•\0276}Ï&††‰«\14úJy#¢Â)®Ä\13d)Ï9ÌX‰³Œ\23\26ä™\23Ø…„äz¦4Ož\12«¦õ\1í(x-‘o¡!9«*êÀÍq\24ã\1µëŽ:Dqë]:×&\ +Ç·ÑÇó\ +\18ÑÀR’\15Ž\17Bê®ÅFR‘óÇ4ë\5¥â’^\12\16Ä®@i¨åìµËZÚË\20¥É:nÝÎNhûœW™ºQ)\27•Òi<”V\11Ëæ\ +Ÿ'%û<)Ï“Òé0(Ì¥0o}Eýƒ\18ÞPJ¸\11)Å|óf\13\ +c|Up(\15”p1Î{àU¿7œ7èàL.WJŽ\21nù•¢\5]0àýGMéö¢\5\28]gÄGAJ¼ÞU†åw†jÇа\\ŸÇ*“\2ÚZÎ(\17Î\0172NL†u_6\0DÙ#t\0165¬‚Qg\7·Pº(ã\14:´ü?B\5ÿ\ +Ρ\18±\11ŒÍŽÕ\20½\7^‚…9Ø\9Sý\4k¼½ç\"Æ«‹ß÷b\"\15Z.\"Â=¢®6t»!Ý\21eü®nµp¿¥\23ºŒUÛ6Æz—ŽL»ó×UgŸaÕ»gl•˜Âþâ>\29¼Æ©…5\8ê1‘Q‘}\6J} \2\21uL\24¾r_þ&jâ[—ýÎ]ˆ:BáP==šk\30\13IûÂÅnØT\7‘¼B9\19BÎ{GPÃw‡2MT¹jV,\2\0vïômAˆš!\12\7kz HS\2±šÑFWõU‡Úrvájë\11Ç:Š\6sç°\0275|Â\31é\8Ö8m]\11\28õÉ1RöçR\29QªßáÅ9\0\21B\13ÐO¨é\19Î¥;q\9£æ\17Œê\0ß8‹?t\15\127… mÇmüÌ…z\22ñÔ\2°\22-œë\127cIώ܈?;ìˆ\1׎ýèîØïèNRŠÀm—šd²[Ô%z0¦6jù;y‡ÈÏÌ\3[\29îèÉ V\7&šåü€f9ˆ]Mš¿\20þ@r\9¿–\28¬ÑE'ÎKŽöõ\26-Pùó:%ªl;ÒÓ÷\2\6¦=…CÞÎûÂ!»º”¦ïÒìH5Ö‹#\14¢ôl\8;\"\24c}&€ó×\30Ê\24(\6¶c‡‰\24Œ•z\0GÞU\14xz{‡×2‡Ö=sožù\19g¬,\7è2ìxû;Ä‹Wê—\0ÿIŸ%\20wü\5o2lª¨(ÂÌx7A¤sZ†‘X×4Vkª\5”0_oÜÁ\23>¤;Ô·\17\20;wçkà4^—¶×«ƒ»´e|ª¹‹2—eÿÀûÐ8ÇY —\30&0\8m*\23ÃŒ¤\11t6þ—H5žªd«ƒ;^\21í±cCÅfnc}`±\8ò,M1ûió‘Gó4M´tU_\6q>Ã¥‡)Çéh\14M’(ª!ÙtÃ\21\2§\27Y=-ÁÚ­¹\24ŸSTáP­›:Kf·\24å\19¾:¸KKÐÑëïèá¥[á.Ð:6°:wLªÞ³èÕfmêbµ—G°IÁV¶`x>\\nËLgq©ÖnN¯]{­¡cçP\7‡º¦\9[\15ú ã\24]Zš›ƒ{[šèÚºsCæÉ-]O®\14†ç\3Ês~ŽÐ9ì-ýHÏ[Z\16±-\28\\8¸IQçQÔÞ¡®n\2d2·¸m²´Q3f<\25U:Tëáôƒ-XLF}Ÿ$RÆ5Ÿ\23™ý×zúsä¨ðr”œ²Ó\24mšmŸ6NïfM†K\1ÓÖ\30†>HÚ#UŠ…\29QJx,9–ŸñÛvË‘\11^G[¤¨,E•\14Õ¦\5ð|êVXÇ›ÃviÚ\17¶Éœ>ŠŽï\28ªHQßGPÉL'\27s‡jSÔÈVèü{{Â{ÊKŽl\14pI9â!tGïÚV\14>{8â»\23.`üÀ\11§€Ÿå†»Úb\20»wXß~;\17p¯F—¨\29É®yű>\29\24[° \11X?K_FOu\1kì\14˜ægÀˆÜŽ\9ã%iÝÎþ‚\11i\26–„“ˆ‘_Ò\24\1279”ŸÇÆqw’zï¢öcU‰7OvÔ\13*\25\16‰\5ÔÜ\0293æxÃ\11\31ùêjñUÄNÇ4bëavPí\8/)ê{Š:ºj}uüTåæD\5\127RŒò»HÅRL ¢b‡ò±î6û(:éÒã\\•îš\21?\1‰L÷¬/ÇXñ¾0F\24\25Àpp¨»EE—–ó\20µNffÔ19¥'ÿš\\Ë\27¬_!k'\26Ä+²yŠò±z×Ì5=ªá\9ARÏ\5¯\29O•¢¬™Ô\0086]ªšT ˆ§hë\17”È\13U\5ßÅ8:TçÚO\31¸v(/ÚiÒ8\24œxQE“.\12ê†Â£Ò\22øæÚð›s·Í\4s\20>:vð8:êŽ99º…âè>\127t²ÁcÊw²®Z;*Q;Š\27¿SZšÖÃnE8ÊN_Œ¤u\29r\28ýt÷Ñæei\15]Âú'ä‘Uõhù€‘ÚÜÝØ?Ê#U\27\12Ç$\21G²Æ8K\3/ÒH\7h'³PϸÞ`B\5MdÆîÓíüÄÏR[ìÝËvðÆN6†MxÖ“LvÆø\14<¥»K\26+ªl×)êèvß“¨\3öìã\17Y{±ó™¿c\6>Ÿü_\29¶âIc¸Êkz\26ƒ3Îezå\12ò~T–å’\1\8ÓL^ßR\9œyWØ¡n\14Õð\20\2›RÒœµÃªÅ“‰\12\27¾³M/\24FV‘–[±M—¶Väo\5£†¶¶±|ó·\"LÛ3êÉ\0Ü\14f&´î{…‹Y\26@¤5ã·Mm¼¾´Å5íT¹-­\21¦g¢<\31Ôº\26\0067\11…kŠCx?`gýß}u\25}3@*\28öž·€ýž6æ÷´½iB¿1èŒÓ…´ã\"tüŽ§ÃzùD'wI;F\25Ký]j©¿û‰¥þ.µÔß9K}À\27¼u\ +Ôg7F»T~\ +ÔÞÕ:7\0Þ( Tô–²õ\9]C¥¯\"\25šlèÑwꜻuÄK\"·È–·ÑŽÞž3\5\29ñ?\8Zi\0×À;Uj°¥\0309\7{'\6»œ\24ìF\24ì>òX°K=\0220ê.>ž´\17ÔÁΪ3jBóŠÃÎz;xJQ¾Jû”·†Ú®úÚ\26q$É\29µçÙßÉÑæ˜vÑ\8ïß¹\0235v©ã„]ê8Qa˜N]™\26W}¿›\25\15\11;r§\0\19è»#ôl+°³N\22Š\20¥%¿±ú–î67wOs\27ô¹3¦g.~æ\24×›\19wÝ\\ŸÞ »Ï°finÀßDÚièºÓ¾³àìÝÝÇéñÃHðµ1ï|KyçzÞ]½#,<ÄÚí³üG€ÿ\20½ê\23Àj'\23à g\19\2!²Èª*S‹Gß]ÔP\22C™\29œàì„Ý\5 …«a×½\19cZÍöL\0¹µØ‘Cí\5GÎ\28±`bÉ@å²Õâ^\16ºrQ®C\0112LŸ¾ºº]©(QSiÆûk¶6²†šy\30#r¨É†Ô\24¾\18mìøXÃYó\14ϧ0\17`û¡s⸚™ÞÚ-¯5žwßÁ”U\6\3Õù\29þgwâ`S‰w\0\13§\27oÐ\13+·ê5\5Td\":ª‹­F\0ÍVŸÍÔ„{äwu•Æ=·´s2\21³Ú6,F5ò\17/\25iàŒL\0q[\5”\ +H\26<Ã6¡'CÀ\14Føh\0®ƒÊ;XàpIQ~\16\25ÞÑìTÇT¶rä¯ô\24ÚþÜ~\"7!Z\ +³A˜­áÄ\13|FŸÆ®ªºbœy°(`V\13]2μ'„xOS}D^‹;ŒíG1Æoa9ÞÞ¡ZRÂ=8ÚˆÖ\20Ó¶ú0ç\24Ù\8¢˜`žš\20šU¸\ +¨à\26oG\19^­°‘§\25\7\1î\\\27>–²`67 öéÆ€Â5\",0ù¬EýIëùYrßòðú,z{z\20fá‹qÔd`\30G0b\28É÷64¬î>#®›\24Û2|bø7\13ñ¾u\0294sð€å\21ÛÞ•\\\1œHTycò\19çc|1y\20|ŠìR§L@¸BÙ„^\17›ó©`d*æ©KєГ€¬à¶‘HäJÌÑ:\15»o<óÈ\ +°)\\á²(¸æ¹s\16\18°Æ†áêÌ\22ü½—ÀôÕmú^ëÎÎ]’Ê_™‡Ž@\\[L¬ˆíe…‡¡©IWÓL¨\19\31E\5«îV–¼8—ˆµ& q@µû\31ÝL!ÞWgñ\21> Qö,Ä\11ðÞ‰öððÔÎ\13\19/\\\15¨Ü-–Ga<\27®Â‘ØшmÁÿµ\19·/ª¸µeÖP¥ä#Nª€…`p„¦\18ôðÖR1Û’\1Ù6Ey¿(D¶²-$:‘ÜÔ\20ãwÎ!(\21>sÍ\127×\22\18\11Cã¯\3\4\19«•9[ò™5ÐLª\27€\12¡\\êQ9¶%Ð$\28pÅ£ã.Zàèƒvƒ±£\8EZ0Š<ºZ§1æÝoíˆxg…kòŵ“ÕîÂ\0K†Ä]V\0ÖÌB·©\30‘r.íX+S­\29;ÕÚ‰„Z÷\3öet/Œƒ-DŒ\0065\28¥\21ýã)c»;0²Å–$î¦Ã\15ªLùi«\29\31ˆ\7¶û¡\30T\17\28í‡ê¸HPÉk.vÃmÒp\0037\26‡ù\16\127\11bZëè)ô*ù”;Ø3ämt?)\2ÉWô\14…„ãÖ\19¨y$]PâÈh§/Ø’¬) øÌú@ý“ÒGÛ:ÿ;æ\28\28i)1CÈìð\29\\G) Ú`~á\30%ûu\\}níF}n1Ö\11í½w­,\0169ùϾ±µ…QÚŒÀp*›âé\21&Ì0b;Ùõt”\25éUÇb©Ž¥¿\16úÇg¹œ{xÈ܉Û«{\23'ÄÝPÙ¢6Ê\18ð‰\7a,œ\14¹\27\4\0097,z*Q»ñ˜0‚p#ÒÖ\29é†%êÆ\\ÁÍ\21ë&¯Æ¬•\21TѶQŠxãsað\"–!A¸F\127g‹w”è9¦wŠ`bÓìŠ]Ð\15VüÑ\14zª!\9ì2\19mĉ\24\30ž3?ÑÇW\27í{ÓLxK\9fà÷ÃœÕì¼G°\29ÞÎܹ×Ðw©»¯]êÞ\11(U\127ìe\1{a1\\~î¹Óáþë°\2/p®³ãWbþ\31c_£8’dý*Ê·(3n܈”={N²nD’Jå\127FÆÏ\19yÈzb!GσH=j¹æ ›F\27ªä\29GÏ;ÌÞ)\21ô¤W`’Œ{²z\"×~æª\25¼\17-Â^üŽt€ê³\14P©½«R£\8¥¨ü•§š^v°\17ÇK 5Ú\8>™tT:\127NÞC_š¢æ¸µ£)LÎ-\3­\24½…º o^ÐÖ\23‘«\7\31,LyCÚ’ßT\13Ÿ*r+<ý&Oâ÷åIî_\"P\0150O²ºêR¬@û\4Óö¬\2J:¸ô\25˜…콪‰ª_“Ì\8‹Û¥'öÏô$îV\21\28\12€s\27hh\24l\"ô¤ÊìcT)\30N1>·‡”´vYÍÙ¹S’\31yg&t_qm3OõÁ|@…ø[î\18õɶ÷pOÈ u)\13¾³ØÖuåÜ\74g\15AŽšRM‰>ä*“f–d}\4qB-ÐŽ\11¼ƒW›or±\2™œ’*þI#®+k\14»„M«Ž¡ƒ8\5¾ãîjÜR)\30àoaÄÈ9$¦ÐW·<^Û‰•Oë°n¹H\22üOÿoç[éÉ6a<\26\28\13“îù|\15lE˜o\ +ÚqcuÜ a\1Èݨêx^1Á¶ž8ØV`Þ3>sÓâzÓ—w„ÄY$\29qdL⇵{ÝqòEõøŽûA7bJšsô­§\9n½à™Jçá©x\29‰ä{\2C\0167þuÆÉ\0ÄëÔ\19ªr*\14>Ô߃<…µcJ\30;\20Ì8ÙÜaS°Ò\22':‹`L^'˜z\4¨ô\9>¾ 7¡`Æ c°æ\12øÃãqG«\4@_\31E(\4\6M¹èë)þ\9\30Àn°ø†§{y\18¿Zºø*\16\9„N\0157r\31á]x=}áÂëI‚MÇ„3-\25ƒ;Ï\25¤'pÐ*?–\9Þ§­1boıNÂ)~ba/\24\11.[Ø;Mµ\22á\30,ÔÉ\19ƒ\"NäkîFºÒ”´xAÕBSï\4sÁ`É®É@:»·\4]‘fxÝ’gi.GŒ\14’h–hÎ\5@åŠ\0055‘\3><„?‰ü†+¯‘ðsS\21bÑW\29¹î,\16\127\1A(v• =Å?¡ûÞbÇx+Û¶¸gƒïÈW1\127\15rÒQ†qI2žÂi)Ž'‚}odWTJ:h?‡\19È3\4*Ïì~å\25Î2žÃÉú9œ[ñ”ã鈧(q\29\0I6Ÿá&ã\25.6žÙoÅ3;žxf§\5ÏâL 3\0‘ÁAÚÁÅÂ3\ +¡–éÏdG\30sZá\5\0254^\0î\1N(¸†>\127æ\11ªgäS@~öLNd4äY.Ìz€x²y&©¦º\16‹\22-Att‹»LƒƒØ晢F\21HrÄÓ\9¥Uõ°gŠÃ¨ æŸµú„¦ªD\28݉R\19Ž›à™b·¬IóŒ\11‰gÜ5ðû´6*n¬Š>òZÈ9\7õ?P¥CÏ\28\2å\25VsÏ¢ÿ5c°`\25@\21d\24k\3Fë´ç\22¹„§\26ÙÔàÖ¨î\6\31Öð°@ï\"¢gŽƒõ\12m gè’<»^qÀÁù\25\21¯Î+žÙ\14ê™»\13®Æ£s\31OB\11éˆÑn¦µ¦5\19\3»<»Nwb9ij½Y3ý¨Å×ǧ;”ºE©[®%\0¹4BgÓ‰«¥±j§0ím˜\"åÚÄÖ·ÜB<ËÍ‚\25¸-+ºbÀh/e+ο¹toà„'\25I\29¾L§¨\14\2½g\30J\29Þ\23Ûë\28•\19e\8¿#/3Š48ƒ\2Ù€Fê\7^¢ácž1Þt|õRe3à†ü“ÇFŒžÃŸE3ó:ƒ?Ç?8\9=“/›€Iâò,\14Eæ„ãí®b#êxæ“Ô3]£D\\è\19΃Ïr1±öXž[æUú\20\22æo4{/\8SþƒjØu\5w¿—sÓ³\19,(\30öX#iÝW«§å\3‘LÅF¬¯×üZ®<\5w\4lá:\22™=³'ëgç\27ù™½\30?;¯ÇÏî¸\8¼p8ó¿ÇIç\25>ˆŸq.zÆ¡è\25G²gó\11\2C\12r€jB\21>#@¾‹ë‰k\29ä\14Žœ´nBß¿AáÂS¸•;‘´<22y’Qsƒî{Ã}O@Ü€?ãxxÃ庑öG¶\3ù6d}‹Z\12\29ôºqù\25FÿO\ +ã:óxxã.\\aý„ºæO,V?±üüĪòÓå\22—˜ŸXb~†ùã'\22†ŸX\24~baø‰µà'fן˜ë\127b†ÿÉ\23ã?ùbü'_\7þ\12séOÜœýÄ-ÙOlL~\14}q§øÞs¹`LüäÑó“ÇÀO×ç\127:ÁÞÏ´ÿD\15ý‰Nù3ôì\127„\12m÷s\18¢©L$¨JT Ü„ºÚ\0123þ.,\20\2Ä•\16ð£Ã­\1â<²E*US\16 ?™Ëu“÷Õʼ28G\6I5b6\20T \3>O$—\15¾»Á5ê\6¾{6bñ>csÝL\13#dÆÔ™©K\6ô™\\h­ \13Ï\5¼\12`\29þ¨\9}pw­\ +À\27ØÉm8n߆<ënÄš.î¸À»½ú\4§\\„D/\2©Ã71õ̤{\30\31\5E\1ÍÐ\127\11h|\2¬'Ÿùð\6õŸ°áÙ$±?”\20Mè6¬;\15 ¡\ +\29©bRn\0—ºM“ôú„Æ/ÄØ¿€Y$×æÀ‹\26\17)ã\27l“\\nZŽ\11ûMAÞљâh-QÉ:~‚,ñÖ\0T¡à8‰-\29ã»\1\\Æ@Z`ñؤ‘ßYŽ™\\9o(\12¼ÆœÝ°Ïâ\13I#ØóWíÅu[É.š\29¯%SæóglÏh=\22åè2}‡wß¼‘ÛKÔ\ +…7\"6g,66ÌÌ=–´§4ãÂ¥Õº?ê“›Ý@j¹®r˜ÓT2•Œ\20ö9H»pÌŒ™k.xË¥Ö\ +o¹;µ\\ks‹mÉ[~•é\127ñô\ +°FÎÔXD¥*cê9¥*é‹\8 ú\19\2älºa\3sfj½¾sÊwþ²wþY8ü*þà\ +éõɵRÏ\25^ô\9¿þÅ\0061„­\6,'„u½®fø‘†¶gêntPýbÓ›€ÿÒ'C\30uÏ\14ž\22p9q>çÈÕ\28ýÆxï\9$3\29þb¥•€¡‰=\29!ÉÊ‘÷\26Ü^(\16aB\14ê^dIênpƒ•ªBß4\1%\23\"dÖ-X5\26êqCòÑ–mi6r¥\22Ÿj|ZÅÛÇŠ\0138\2\30oMݤVPVÛ°‡±\13\27%\3d\12òá(°ª?\24(°\4ai<%L/3SVÅAê7,wÞ°$uÃ\2S\5¨ÉQê{úÖš‹ÜrKÌ\24<˜V5¿áò´T!\20†qãVàŠš¦ãj£CS’='yãÖ{S²Üòh±MÎ\26X…H¢\127ºá3A…yJM\5ØFìe\25\26©‰Þµ©êÐm ÐŽOÐye¼’CƒêÒn¸÷£ë“Mõ†7&5ÞÖàG\13>-HLuÛ׈8q&û^¿÷ÚcÐï‘˞Ϻ{|Ô\1\5;à\19Ç™nA0ÜìÎãÀ'#]Ø\15Q\31o¬Râ\12tDüÅ3Ý°ÓúØ¢¾…]5¢€lØcêF”Q2\17_|ƦÃÈÑ\22\15™^‚b¹¾dÃ\5®9gœ‹#Å[²\0285;âÃmxà´\23÷Û‹+dÁ«½!ÑLõ•¢D\11xâ\"߈ºDÄ·è\23GîŸpu8%˜1¯%`ª¾¶o¤>s$\15q€oÌû\24–\"ÀØqtÛz’À0ºP\16IÊtâ\5NËb\12Y€g\14?ó^è$5©£\20$]m@jGIk\"Å.£yÝë\19.;u\23N¶ôá\127ñ1\25¸õØUÇ_©\23‚\13/^Á¡Æ=g´ÓšÞ†\26·DÞ§.a±¨cRðK˜;†f¤•˜Á\0097ðŠW=ã\3‰0UJÀSÒ™1Œuú\29aL“7Æú É=s®nSÞt”ק\5ïa x\21\31—AúÀDw\18cÅ\ +1ˆ™jÎÉ'î•Aßz˸vÍPë\19\127üŽ·ša÷¹À¢sJ7u>\\Õ\6=@On­\8ë7\8\0279Icªm¨\19ØÃ]‹ \17\27º\8½÷ØåDÔ¤úSvË…bÞø^6MÔEõ$EæŠnã/ð´Ò'÷aúÄ•qÍeJæ\15ËF\3^ox·ˆ­\"]$läø’óé¼\21Ñ݆q\15@*¯Ô\19TŠÔò\4\15\9ò\11/ë*˜kyom\4É­Û-¶¢R\21Á-çÑLÈ«wÆŸo$n-ùd€<(VÈž\127µç¹£e!]\11;x€Nœ\14\28Sª!õ\0068Y\ +¨\13ùAÀÏ£i³\31f\127‡§¸2¿9qöÛðÿ;úÀ\27Wv‡ [\0\13‹¦áÊpá°\30°;\14¦µIÏŸ»»é>‹¶´‘\0–™Åqó·\27!AË\ +¿m¹è3<Íùà«>\22=fY‘¥R…\25A\0ðÒcÎ^ý†\2›âÜó¸îØÄ\4\21v\31<16ý \27\12ª.NBÒa@†ë‡\21ü¡ƒÄ/Ž‘B4Kó\21?0,# gÓþ+þ¶•+ÏŠ·\24ŠY<⨦¥Vt\0210B­\\ÍúÐ\28 ^èçä\2eã‚\8\1Ÿ\25ô\0\25œñ\0\31d·ßq8\11°Mñ2ø?ݓʹÃK‡}ú]ÒÁã\0Ñ\31®ÓQ»¶£2FWl\\€‚TñTÛ‘¬ÿq„äÆò†Ž(\29\29²3—â/Ó|Ú´Q‰J5n\22Ü\14mèqêrósv²)ê`v¸!ï÷Œ[ÜÊÀcç+Ë ;‰– óJþ™\15+\19qÂâ¯ÒR¥äR/ÛtÒ˹\15丰b~ëp\12Ä°\25ujਯŽZh‡ˆ\26ô™ck½™ë\19³´väç¹r¤µÃ¹Ã~Ê(Ùê\25¤ —¶\17£ý’y=ϲ\1?:ü·]æ¢v“ß©ÑrǸ±ã˾¾âƒ2t%Á‹\17œ\0247Œ;\14H\3ê;÷ÃJÜzNéAÏ1œ‚ŠD\23B\8aœñ'h3ét¶c™\6°)óŽ7Ž\1÷î(k©$èïDÇç\21\9é‰Ã‰”i¤NʾqZ#ÈW=6\29«q£²qÓD“¦ïhÕ·Žd]q.NÚ\16léo\25˜âþÍ8®mQ]h\3Cû8˜½e\5Õ„]?Ê¡;ŽH\21ƒ¿³”Ö\\]E\16gvÃù.óSÜÐ~ ŽTÐóÁ\26\30qõÕSB/£!\7žÉ\19\21¬G„[à¿ô\9Ke·öCê%÷’Ø \6`ì\2c„ê®\3¼ö\11žWØá\31y¯ïŽmÆñ?,ø:(\31Ñä½Ø¾´À\\gZ•¹LRF\28k\4ñQ\0tq‚·\11Þ¦\23\23æÞ™¤\28L6h“mâŸA_q* Ê\8#¸…f熓7l3»a=Ü \127u\7¯«$NPöïtºPji€Ì£†Än€\12ƒo^\13£\21¥GǘÒÝ”a *ûX\17Åiâ™k\12ŒwW'\15H‚Ó\4Ü)™0Uú\19>VÄ$8VD¼Ò'9‰+n¡Ç¼\17×DkK\13\0237\0095¨Ø±X$f’Q¸¸˜6\4ãÕßÞñKKª„+\24¬Ô\6C.q—\29ü\6ÙÊt#£…\0u½Xs\11‡wŒ¹ë&µtߤ¶é\27krž§\9k&\25ýdh\14\28ø\13Új•!³\2¸\\\12F\31È\21\\\19lØÞ|ãÔ¾7äcÙà……Ùä\11|æ†òæé\27kr®ïÑjnô‰îYµ\7\29¸KX`cóÆ\\Œ\26ú†¿UkÁTAÇ-l*£³S\18׌ýF¸çœÕò}§ñ¾\0174ÉPþCš0s\9דD¦\27±ïQ^»w\19ÿÀÆw\3%ß\13\27\25oìUM$AXÀxDL\27Ù\25gœ)\25\26Æ\1Ì(Ê°¾¾Ô'hÝoØHzã좣Ž\0263ï\24Wœ²B°Í\16=‡\24¤|\15e\7e~“ÿ\23<áLI/„\0…I‹ÔZëŸË¶s\21»\19)LÄQ¯~3‘1>Õ'\18\29GÒLŸH¶\31Iaµ„òf¤þ\6y\18«îo`óº‰\30£O˜@Š\22¹µ\29Î'äŒ]ž†½\13\1~7^&Fì7Ò3VP\3uí¾•ž†«Ž{\24¹ƒª\5*¹âJ®•RÔ\15‚øT\2¯º‹wýé°Þ˜\24§†}dcì\13[PhO¼!-\ +ƒï,¦¯\9Óü7žæo¸ëÝÈTº™H4Ê\16„R´ö7V\0\26IÑ}\1\0«7Gª“^FjpŒ\27ÁZ\\ä˜$¬Àª\0011å‰\11VŽ–­tÅ+?+aùY!KWÎò³¢–ö|¯ÉÃ9Pñ\27ƒÎ•Î]ñ(\3%\24yK”\"à\24\127Ué\19×T˜qo¸”;6#ßÐI:ò©OÐÑ5b=~¾„ÓÊ\11t±^Tõ4îó\26PÏá)lÌ\15Ñ\28_<¸½@\21æ\5ª0/P…y&Ã\11+å¾°^Ì\11äI/8¦¼°\5ë\11û¿\127!—ö/á€ü¢î_¢\\¬’ Þ·ƒzmt•?š®“l¯¼F\19‰50®5ý\18ŽÞ/¸Øy\17Qê‹\28½2Pgú\20üJÊ…ƒ&Yà\0094\17ؼ°ãÞ\23Ü\9¾àÒ÷\5×·/|—û‚;Î\23¹ÙŒ*!/80¿°öÚ\11® _È\5‚r\26Y4\26èK½@Üð\2qÄ\11Tn^¢ô‰²ébÜ\7úu£\23]B‚\23\8>^DµÅ|Ô\7*èƒkü\3oë'Ô\0303\1RÊ\30¿èQÞ\30\"\23õ$\23\27\28~âO*q^1\21ï\26WÆ%þÁ'\\ÐJ\23”ü‚Ýã\11oë_° ~“q¶\24j¯aߪœö€´ìöÈ\31^Ø£W\4Ã’èÉáŽKä\15:/©kŸ\0236G{qΑ^Üyç%õ·õ’úzq†š/Î\9Õ‹óÏ\21&„¹<É.ð%Ž;Úœ¾ðÓ\26?½Ã\14î%Ê…“\13ÏTZh:¸™úIK€¸D¿Èl\19fàk¤£­øtŒ* ÀQ»—±j(25gÜ\26\16ºöV¤/ñ\9\ +@\21§Wµ‰-n@¶\"`\\ å&ü‰BÕø„Y\19ø\12 .·¢¼AÓ,\12¤-Ý*Æ»°-®\17¶¬r´…}Í–ì_J‡[Æ\5ã#—\\1gÞº,#æÈ°Ìм;~ׯ ?ßBÕ'<-Éý}\11jÆINlaeIt§½•…F»\13TøŽ\14wŒ_‘±\9²a0o\12\2ãVŸ ÎÞRÁŽ\14k‘æ“ô\14/íÌ]ò8\3\23ûŽ]Ž~S`g\\¦@º\0k\5\"d€/\3\24û”¡`Žô¤nŠ$Êä\22º:[\\\"lIÇ«e¬Ê— 圹Ɗß\"Y‹¦\9ßP &\4«i¬\9º\20fØ\11IårJ5IÔ\19ç–\28ò\27¬é¿ë\19»ÄÞ’c÷#ã3ƒ\30`Á?[p\21,¹\25Zø\6è\25tî…K¾W4c¼\7IªÒ\28OZ7Z(Sˆ’ÇfËóV\11WXŒ\11é>§ôµf\26k¹Ÿ´2\24¬¥xÕ'\\(\3·\12:.ß+\23®Ñ'\30ü­¸!*\25·\12z®\19Õê1+þ0£÷˜ûj禯Ž¦¯‘ìFæ50z¹\27Â\19ºùáð ?\0/yd\25÷º„\19%Ö­ˆñoС:‘2\20üs%Q½¶n¥@\0Ì{`ã¾\21\15SG€œA‰jèÙe\127À78“l\17At+®ŸM\25zô¢áJŠÞÑËRk¢Öaæ\5¤m\29î¸\20fåæ\27°¸òÆ\9?îÁ·\8Ó¾å(o[8\0066¾ß\1Î\0%sJæ„Ý[Å8´ô\15\"‰î\15>2PÍL{á\14}¡a)Z(Ø‘oi½ˆx¥OémÍ\22’ˆ\0pÉ­Q;ä\9ÛUÝ\0232>\11¸\4·N…Ã;\ +\ +[@Ü·å—Ý{\14$f[ZJ\12n=¦±l\24ê<ô–6f\25ã3À«˜\8ÜʘêS^\28ˆ9ôÿs2\26ŠÚq¹hSþ+€ÛpM›\15w\127ª\6\30ð\17OíDü\11Ì\2mæò‘X\"ÑÃaL\18Õ4rŠÑ Ã'ç\22b\9O\25d(CÀO\19\23u\24 5 ¸Æd’jû\4ƒÞo\3U5 Õt7\15§Þ\28sPxšòž,ǨDà¦#@|Šj~9kù;¬\6å †\27«=*å‘×&ÆÆ×HŽVËŒxü‡ä\16Ðä|>Ëõã±\20LW\8\1cºO¹œ:W®Z3\18™´Lb*\5ˆ%õõwD^ŽÝ\4H\21ã>¬z\0¯.\11xŒ7–ÊŽ÷ȶ\1>«´\12àÝ  ¯ËpÄbÀ¥\8\19^žš» ]ã*ò¢O–y-F¦Oa9T\0¥bJVb\7µ\17Ýñ \12WÆ~¨„\0185[]²XÂBº¬\9ˆÃŒ£Ã­Ãg´ræbB\23ó\11'š%À-r*yöP…ÅœÃ$\0L]›\7êÚ\21>P·ÜÿAj1ù”rË\18At®‘³3\13ÆK—_)'£\21“^\1\26¸\22ŶcÎÀÿø\28ŽæÀ=êë\23Jö‹\15hÀ;7‡:*\28ý‚\23\5üf.øE'¹\21\15€_¢ÔŸ2¨\2\127qi—¡õW˜oãÂP¡¬ê04W…Ç(xdOò\1Sಜ\13¤\21൵äJÖ¾9T’sÖOÎ9‚²‚a\"f†˜\30\"I\1rü®}ÚE÷Òù6ŒGH\13c\4±4\26«à-\25¬Ø\20\14T³Ž0)\\ļ2cÃr\0Pw©S–\28B§\\öNÓ\16¡\9u´§ãÉßLº±&€GæÍÙR\20Ô{—é=@Ì\2±\23;¢{Y)Mm\13‡þëÞlåòYÈåü\12q/Y綰ןÿÙ6’ÂÚº\25!a†djm©Q­o‰Q²'¿·Sîą̈˜´vV\29Ì8ÑFËTÅÆÕ¯ñÍ\3*\11\27ŽŽKîWxMã\25\28é.öÍq|ÿ“S©Ó'&õÜ'[öB\2’qs\2j%ŽkWy`øZ­Ùé\28SÍÚ\0ê¸e\ +RìУÓ\18«ž3HŸº\17à\20‹\1nG\25oŽ”CíÓ1Zš\26#uÏÆÄL…Ø¢w¼ƒØ¢µXÔÀèGI‹”ôDŠýå\23l­;\29ä\29NËÀœK‡-\5>Ã|_ÇZ³ ™íô\30&\17Œwî-j§\6Ò«\1<Ôß)¿hðW°ì\3Ô\1ô\ +%Arß¹ß6.`63:‡ÏŒ[V\29\15\27I·2))øƒxÇ\26\11Æ™±Éî–?Æ-*‡ø\7Ë4…À\20G1¸q·lŠŸG¼`„(J\4ú€§\5v\0\7Yä‘ÌG\7\24£\14\31ó\0275›è\17ä¬aÃGvŽûòœ¼áäB\"×79i\19¼@\29B¨Ñbè‚Õç0¬YªZ¼*ÑD\18JˆéN[¢ÃpF¸£’Eê¿å!–7ÎùG™ž.\24ÌÇØ<â/\11¤\3&˜\1“Ž\6’\28Ek%Ã\0009â’/\23m±)\27Ö3õ\28Bªq¦7n§74\28†’Zý´!»\27÷†©ä•áW·²1\13CnF\12q\2„„oC„\26•þp«Z€=cÏB ©ìS\28³ìù\4\22‚œœ¸¦\127ãÍqP\";ñW,¢\0ƒö\31z¡\\¢z¢K\21©\31<¤Ž\24;üÒåðÿšûÍ\18š\22Œ÷â\20åÌŒw–d\5Ò‡\4¤QÒ…›0\\Pê\7Ü`³\1×xZ±OìŽI\29“~ºïÜP¼±š©êP\ +$\30\31ªËj0´Kµr_ÜYçH:\5\17ÇËþ<Þ8'½=g/ïB\18\5\24à\15×µs^ž²5;8|¦þ\26I\31nÞ\ +%F³ØÁY0§äÎQº\1ZB\25''':¹ÇÔ\0254\30#Š‡ØuL2ê!ÚWJ.'Í›ñƒ*\30a\21f8&u\12|ý†ù+çMÚQ<\31T<¦â¾GK£VeøŒÆ~Y\28§\11ÆGqmxrŒÖáÎŽµ¸\13˜ñ\7\31h‘n%\15b'Øòä­\19ÑIø#S<ñ(¾FÊkxæ’ÝŒtšè\24.ž(¯56u\9;^INØ»#ŠÄÑá\30fâ\8Öœ«¢B<ŠèKþÉŠ19;£c)›Ç¥Ã=ã[–‰@{cÎ\31!;qiÃ@ZÚ‚„n\\õJ¾¬göÑam\29õJÁ¸g<\"”`ÆÙ‘|\18ý ûáß‘ñ§å¾—£~é~ñ;)\3ÁÉEÎç1¤ë0I\24\18\23)8v2øž-H\29£÷\24·×L½$Ôh² ‘®\3Ãc\\´x75Ž­Yk×Xòy\0298”é\1B2URqØ Ž\7fH•\6\21¦¢\13Á¸l\7\1©u¤2MU²@)H¸VÐI\0)sؼ.x»úÓŽG^‰€O˜§ŒÁ\31\0001Ì׌3W‹ÙP9GÆ+'¥=}}c£ÛŠ\19¯o'Yût]7¿©ÜDRQð#-^­O²¬ùÔ¿smÖ°œ\ +˜£œ)c¤ž\26vd\14RÉgš“˜suî‡\13ÞÛNØ¥Q\14õå\\®S\27W×Á\9ùš\3þ‚zÔÌ0i·n¦lY\\\11\29$S«º¸ªfQ.r=3>Z–Ò\25/ãÀ¿\24\24\5¹œ•“\20$Áí_³¿úu‘~™Qyr$“ª7ÀÍù –£5â/¼rïíù:Ñx¯Ñòo‹±\15EÀ\17ªKÞ».ôˆ\0BLº¸T*\7hùFAç¡PÊ\13ƒ#\3•X·î2¶e¯1yú½ñ®Òc‰‰m<ôä\")w×âV#‡ð—?!ç+Œó¤Ú)Éq±‘åµ,RmY\5—q˸\24Á¸>\11¤\29o@ZöRŸ“\18è’qϸb‘”Ÿ[×é*ézgK\"\3ú‰³\8qi\0130\5Ñ\1 \13ßp¯Ý³â\0c\21Ü6Ä;û܆pŽÉÿFÆq»Õ¡e:™L±o8\11ƒv¼\27æ:»›ûˆŽ·tݨœ-ø›*8§\0310®0˜ýrç$Î=NÈ™¿™X;·HwqÑ}ûÀ\31–\3Ü1¸E›b“šq\23èìr[B³ÞñÖd\127áxñlmj\13¿«Ò\26­Òã‘£^@mô‰\29ä1Éh\25ub§\19k,&ÖÇ\25\13\13kÆ\12\28zrJN¡\0156²±‰=ñ\29ýé\29?þÀ\11>ˆF;\ +•Û}p+ÆU¨GÏîEXÞzÌêœ`˜‰?z@ŠOQ\18\22}L€zí‹1*2û6\26ØW|\3½\19žï{w íQ\7=\15°žÅ\2~Qí%\30Ñ‘q•Þgôê·–Ò>ð!¬\15w¹\0Qí‹ù¤x§ ˆÕ9’úí\3iÉ¿7Çžu`Mbå¬à\127-'/(æcVpµ™\127í`%wNR\24›]N/vÅ\21÷2_Ž‘ŸtnË\3êÒ%4¹ö–O=?L‚\1©ŠFÏ«N…=(væ°®ËYÆfÄiá\5Ò”®F˯k´áÍ[O\27ûœI-ƒŽ\127\31ÄQ¦O\13ž;í\6¤\23ÏšÚÍþM'%=\23«œû\"VX\7pt^\13\14jV|(¹ â/’­^#\\äz¼c¬õtùºž.\16]¸º.¼á¸ð1ŽÅI¡¬\21OË\3)ª3hñ,ñâªúBŠ€Ç”´²\26%Ã\"\21ÿ°»ž\\Ä~߆\6è¤Z~Ó_\13s¥º\21\9J\0òdÈ©-4\24g~÷ŒÁbB+è’ó[\14Ë÷\18a¾y%À>=\12INb\25xØ™D¼BAn\8DAÅ™q—’‚Òñ\2.[rqŸ\19œNVîç•ø\26¨ä¾íßr\25¨5õ¨O\\\19ð!uf’\2|\17Å'ÍÙš\20ê¤\11éx!ô›±\19…>*—^Œ!¿‘\26\127Á¸³Þ12æõ\19Õ\11£¯¬\\ÃWÎ,4\15†ìñ‰ö‘´çÌà\22X±\2}j¹ŠZþ:­æ–•åyϯøo‡õ\5\29\23¿ãâvÜ\25Î\\}ÆSLžºÎÉSŸ09Û×æξ6w\14ar\23\0142O+úCn¼G®ˆ.ñ2\26Owò$>YrÙ\29GMds\24ßBÇýhc!\25^K‘&F\24ç`@;%\13øÖc½7’Ï\4ûž\4FJ]¸L ÜxÁMü’îsk¤Í8‰‚;.}Å/«`bœS>ä\23&‡ùCt-’ÉH\14ß\0319l&Ø­';ôD&ÃÄkü§ÊçÖ\25æ·çÄ¡»çiò™\22™\0Ô:Ôm]d¬Ø¼CT /Áš:4ú­hî•¢F\18“ªó°\27Q…É\13àu\15T¤\23ë™\27²$Òdeèd7Òd\29BB±ï °Ío+ò­­Ô†Z,â†+kÏ-¸ç:Ý[•u¥²É´Öß>Ø°äð\28\18{Í­ô\22à–\29ÿÜJ\6·¢ôc°ç9Ÿ\14Ž\29mÝGò\28\21\31ø_›õÝeP9G1îwõpÏ\30?nÉÖ>A«\29®g#),GrEXÈ5f)Š\16ª›^ðù/Þ\23¢ÊT„ÝZ!7I(Ì*Pãþ²\16ÑëÖ™y\23â¹Øh ƒ\ +ƒµ\7ØÓ\0228P˜¤…³;)SŽOÌÅŸx\1N¼ˆ* ç]@}¨ …ùøÍ1A\13B¼Å. o\29žÖ¡œªo]ˆ\2YÁ’ô\24C5˜w4\19\9š\30?ñ0‘h@¡ø—Y¼Æ¾L\5N‘d¦?\3¸# \ +d\5\20½\ +v%S@ëKã\15\21¬RU°ÖUÁJ\20\5G~,È%M\17\\—\20R›\18»¨ 8GkÆ'à\19>Í(\5\22ö¶<\30\18\11g\12_8\3\127Æ\26Š“©Šgò$²‹BÃq_Ÿ?P]Ãmì`\9;ÈPÂNr\9ÞÄ¢Ñ>àÃOh…Ó\23#âDo¡Î~rýýÄf•Ö¿Dš$\29a'qX¹\20#­‡5'©¹R,ÐX°âlµ\11º‘ήá_\2x\29nãüТ“ÄFkÑ\9Âdß3¸\0hÏ\7ˆ7a&êG!*ûS\6\0251¯\19ã\29*»EKé͘ÿ\30s\13¦ªà\5ߣ( x!{4B`¨\0Ý\22ŒPÏ\19\14\9\\°÷\"\19?¸ èŠS6Ÿ\2C‹¬ã\22 ž7ôm5§©•,Yk\20>0B<ˆÚ1‚gè5\23~Oͯ8¾û\13)ßP„·Èrˆ\11~U‡¾ Rÿ@V}…\2·µ\26%\13ÁÑL)‚Sòß \26g\18’ˆ\0ª`kB\23ý°\16ß×5¾8þþŒ…ôŒyó\28u†hPœEL2uø†?3Po‚\25v\1É6sžÜ[Ô\\¼\16«ø6¨î\21$üž:aš\127É\3øu`™˜ì\5·±z(S@‹ÕZÓs•là“D0]Û#Ik\0z\1°\6‰rÔŽÒ\6ŸiÉä¬Bé‚“Ã&Õ/.—Qj/skÆØ|ZÍ×Ö µ\6à÷—\9\"¼O8 d!ñI\23 ÿÀ\16\13@ï\11\ +òÌ=Å\17Qâ„Í­P“èžx]iÝüF¹[”\23\21pó¬\2¢b˜„/H bµ\0002\3¢æÈpL³Œx4\27á­õåT\18\0170\21,m*Ää!g±k Ö&½,0ÆårÁnê\"ˆ[i¢¶\\žVÉÃvìw‹Å‘(“œÚ€áá `¨èNæU\24?&mEûêïé\0116I‘M¬º\17†)¾ÖBǵÖq²ŽeÀŠ©Ò\14懶5ãFÀ\4JEˆTÍQeÂ\5K4‹T‚iHÑ(Hw«”ƒD\15m\29uA'\7ÃÈ\28^§/Ì]’RV©uÊ(Ò\2ÕiŽµObû$‰f\11'Š-œ¨¶p¢Ù‰f‹Ô5b‘ºF,œkDN\18Å\5\4¸\5\4¸\5$œ\5$”\5ûÖ.à”x*â\14—•’4Ïõ„Nç\0\"\12.رb1üßLx€ß@HW@ÜY@xVˆå˜!ÝÊ\0264‡K\24Pë„\26-‘à'E“S\17Ø\21Ná\\á\20ì\ +§€ ³`©g\0044ßÍ4=‰\"é\30.€\9‹€² µF‹´\"ëHz”•m\5×A`˜Tæ[è‰+5“ A\13“ô«s|hLóÐr·ú\0072ÈBÖçH\14Nª£gn1ä0Ê…\2ÔqÝ°ç\12Œ\18r’\15±\3ˆ³ò\13㊰蒕bl~\4ˆ7ó%G\18.¡›\16lÿ}1\23\19v÷VFÿVòƒ%?ż\127„?ÙD6øáiØ·•¢eô0\17—41Û\28Éó‰Zlƒ+ÑM\11\20ŒH6:šq1VB4\0306‚%I\15ô#k´T-ÕNfL%¤€%¤€ñ)´\6ÿ@™k<åœ \7¹ÀS§Oì±[\25\3BI7h&b\1K4ñíqË.`J9¥\21RÜ(º-ù®½„Ó\6\6á-Qì.O¬½^†ó’Z´–fr\23&¡eÉBËR,f¿\13ÿÏ|úaâD<\4ŠÅ\6¹¾&Î8ó\12iÖú4TŒ!åú4Œ\2OòßUð\11\11~!s¤Ð\19œya!k@ËàM@œ¾£¿ºÅ§\29e\1˜9\\ £)T¸\2ÿŸ\127–rÙ—38¡jÂgA~\0ݱpcµÇQ(üðž»r0ÛÌ\25\28\25ð+Dä&€Œ\4Á¿p­,x29Ú\27ù\25\23|,–”ÅuÕ˜2X0È\25˜\15Zq1Õ¸\28o[ñ7f\16+2^;œ;\\ó7eî\27³Oò6'¬\5—(sõ‘q\9×\\œ5—eÍ£\3ÂŽ…Æß:>÷¿+~c\16}ƒ\0\127`h\5Üó—­ù\27\6¬Óƒ&é\\}¬ÓþÀÔl”j:½©ž\\\"Û\7X»\1”§\13’³÷¢2^{Ð$rt—.B¢\0062—\19¥»9A\22wÜ+\11×Ë\ +®yœqFóÒ=žÅ6k-JéF‡Ù©\28“ù8&9ñ¸.yùÀOZ—kÇ£µ„ô\20øÝ•êÃÎ\14‘tç\26½´5\27÷R]J\26éF¥³-FŸ)¹(\21o\27Ôr—1Ï I§ª]…×®jjWÜ©}_ÄŸõÙëÖT[¢\9bw…öU{•|FM}3ó﹘{×/\6LW6 \30´s]¦i&fÆÙ»\21m(„¨8s¹\\\18ýyæ\24#$?‡ÀåŠVÂä“\2Çý~õGåAÜÁŒóâtwBþæ\"´´\23¡K&é¦;à5n\20\16Õä\27oANr¥\\ÚTrZºëÔ’ûŒ™‹à\5m*#dÎŒ)\127‹Ù`Ÿè¼£ƒ\31¿ºåTfJ\9¤š;\6̃;‡Ïžo­ {Wš†ílKg\ +Œ\0009ò$&³O|×\6ž3˜\30Q¨ålzÆÿú:ùâkvÆ}Áœê`6\\s}ÿ!Û÷­\\pµâ«%sKø)\4‚OjG\0‡”Ÿýh$i´\22ö~,K×®ÚÍ—âj>—˜AÊ0'Ã@ºåo\\Šã6qLMï\26±7fF7IÝ8\19¦º\14ü\0271íöJîÿ)Ñ`Òï27N㘱$¥TÃX“-­ÆÞq‰ Þ5‘·9E#v\0{ç\9\19µ\30CÝÛQ?}\13ÄÇgךšõ\15}Âq\25\13©£)\15û!âˆ0\28»N¬ x\ +Óþ‘ñö\31¸æê]ãé+1Ã):>£b\26#h\ +®\12}KòÍ\25Ü\17¸ö“œ¦²å(Ã\20i\7«¨Rt0þ•6—\9pGT±.f¬ÇØv·¶%ïC[tèV\22“†qûå–Ϧ­Äÿ(y­káê´äx\14ŒÏ\19öšSÚý\6YNßü\28\23 ݧ©ÂjSMø–­\20·ŽS1,œ«\30G®îÂù8HZU%[§—|ÀmcHh*K\8\17\13å#}M®Ovß\26%ÖœI>òshú•¬UUÒŽÇœ]IŪ$É\14iU•då\18ÁŽA˜¬K¹œ¯ií½~V­{l6\3ˆû7\20=Ê\9ÞD|ø\0l$‚on'ùæäøoð=ÎX«\"`=³¿‰\18\0øtiòÆNÒFHè^oê|Ke,z¤­¹Þ’/\30Ôî‘‘9—¿¹³è›•ˆË\6Me\20á ×ˆµ”žÈuÉêx\23Ò¡fƒlf=Âwb\15(­yê\19Ú8€\11@Îo®A~å­Uì+šcPôÔ=÷\25µp–uhê&þ3†ù9Uü\8ü5ž”†ÛƒøNS£ÚWÞù\4jÄ-¡;T²VQÉZEA\127BÉY¼•%\11Ê\18êB%ÛI–Ð…‰º=%ôiJD´B4\16\2²ñzœ\23Q‰Ä=IcrߌŽÖ\27\ +ã\23Á\ +Ošµ&UuÈ\27²sS3¸\27X¸ƒ\15‹\"÷ïü¶=¦¬[ØÃi`õ\18F\127z*1‡á_ª 8Ì«\8/\18•\31~á\16ú\11\18Ù_Áúì\23N†¿¨RZàË\4QE~Q„Å\0228æ¥N>\0:\20n…\19ñ¯¡˜[3ž³Á0¨O\8Uy¨8IÃ=Uý€;¬/\26nâ©üáf~Ê•ø0è\30ŒÌ‡ÿ)»\1\1î\11µH\11&«ð†0å”qýiߊFS\30sÎY¸²1¿‡F%’Ü8Ò\26O\27W²+ÙVŸhc\21qÁ¿\28ÖyÂq¸¶X•uO\27‹ßbŒ¶¼Õiy?\8\16/\7Í\20Ú¦¡Å\22VïÊ*¾õª 7¯Üï\22˜¶ùíK*±ºM]²Š\26¯Qt…Rñ\21J…+”JN‰\7æl'v\31K´Jî>´²\ +ξ˜P˜Ã†7,-}\14­€8+|ç&laÅX\5)½Fð©`\21^ÉÝÁÍ°\11z\0i\26L À¿uüoA„\7\127(•Ä¾ÑÍü\27ë(ãW:ϼ¹í\25.A4¿'*嵊Mæ\27÷ã\13O]\1_øÍ›ô\0196T\24\ +â\18ðÍ€‘ßp£‚½ì[\26ï¼¢\27\20³KzcŸ#È«\0ÓŸ\13Þ¸™ß\\_3w+ø\16³9y£Í‰Oezù›õéf¨‡¤u\0135¦\19KÇ2{ÁÑéÒ\18=2¤ºLÈ\9î\20œ\27žŽ\2n8€\27çrçP>ŒóÍŠ×k“z\9gó\19çl\23ÅZ¹CIÇ\19yÇ\7ðp\27=§2\4ÁT\24\29Ò[:œk\0257¸\13ÒW\12.þ>øHÚ‘‡3»•\13÷S\11ÎõɹO\0U\27êÅ6I\12òÞr)rþ\22€k­=qå\14ضš¯ëš³\5\16G\8\21Öt­ª36±gxî¬pç\21ž´\"ÏÒõwÀ\127#x©\25'ï®s| æ?¸q?¸q?ðÒ\15ÔƒÎÆ\31$B°M\22y=^®Ò”ž\5?=¿¹ç7÷n%ï¹]z9„Ÿ\29¾\0\23ü³‚_\16Á\5\21~A)/äš!çZ½È&ø \22⑪ûk¾»¹w£ç\0021z\5¡uÅ\30E+ܹT|çRÉË£^nx\25‡rç¼g\19+i\19W©C?ø·“©Wp\20Ë÷.Õà\5ù¿poS‰cdƒüÖ\0058\25“3~\127¦i$^¥¦ÔšZS±ÅÃ\3ãŒ\127ùm¸\3S°á»³½5LþcßÞq;œ¹-Í=W•š×Wt\0095ó¿b{z¦þ¤Í©ç\25×\13•»Hã\2$I$Ø\23‘ø\31æ.I~‰’Ìݘw\19[¥wc•»ïªÜ}\23ÿd„”§¤‚Iw)i.‘Nµ§Mƒ\3HˆKãÓÝ„äË‘Æwg:[1\15Bãogš3ƒ;\2r}V±kÒJΞ-û±œ²[ÒŠ/Ù*\\²U¸]«`–_Á\\¼’/¸\17·äJ+8Ã|tÎCAEì\25ãd¿\29Ôœ•´\22—3J*Ü\27N¸:«(\30\2\2•\25FØ^Üσ…ùœb¤µ.\5;–S†I…•m+ø”Ç:œ†‰o\26¸P2ì­\24?³ò½Ip\17†Zˆ^´¡Ö²\15\13›\2åuì\7¡R@û6x¾\24)ë™âÖ4iÛ@U<\4hPF/êyÇQêx/\2{+.Þ;°gÚw\25ÜyÎð<ÿŒ±tŒ»O\24ªd\20ñw\6\11Î8hVÞã>¸’à§1ñ\ +O\25¿\22à*Œ Å*>¹µN¨Tˆœ9a_ù„\23|£16üo\0ú=ç¹—f\8šþgf\4\11!Åzc]᪺ÂUu\5Ï\17\21–ò˜Ô- ÿ»\11ýïpëûßÃ]n”\28ý·»Øâô(e¯ÙþCÀp²1\17ä\5ˆ¤\17¸\5xEJÕ+«¡\1U‹Rs\9\14ð\0199ú1Œ½NB’÷=©t×Ac,0þ@¾\1278is-ªL÷\6 ‚˜TàED²©6¨¿\5J¼â/^‘¥†¼pŒK =b[\15\0289ÙD.…G, j9XÃ_‹’N)õ ªwªÂ]³\7.\0054IGj«O\8kÅøÀùµ,\ +\1I\5«L:3®aT\0Ò\14\14>jr™è?­ýì›;ˆ\12€ÑrQ@7—¯‰&i¾‚\ +¹4ø'2.È'ñwPƒ.[†¶+±\0†wêZ<=jï4ÎSjÑ›;NH‡þÖ\0î§to@Ê÷ù}wn_™qË=\0Tï{0å-\29ÕÇùdžv,6ÐŒqŒw\12¸R–¼©ûœG\30\127ôj\0¿n¼\5üÈ,Õß‹ÜC9jæ¨{þÌ\22:¢µèŒ~Ÿ\1Ï\2õþ\11ŸWÁz´¦»œ{dK$¹Þ©ÝèªäÍšelÈ\29:Ñ\14åÚAàV‹¹å\17`Ë bPËM‘v“†¶=K¦ê¯Nnn Òà\11\19ú%Ž§úSŽQ‹Zºî\15‘¨Å“\22¥×'(Y\1g²•2Ôœ¿QÛó\16ÿ ¼P‘ÁâªÎh¢í|\0135–š&MRU©Eÿf4¹ä™ƒTAª1H\28Ö\18’$°‡ä\19è\12Õ¸P®9\"5p¸/=ót¡Q¤\25ÿ\6W±ê\25 vÆ£Àº¬â\22´v?Ùq)fÜQLÔfà°—ú\23š\2FוK˜qYî'É™Gñf8ÿš$¿›\31óV\5ØoUÀ0\11\7¨\7g\25˼O\25Þ(˜yüZu¤éÓþ¡O¼1\ +x.]¡dêÊ\25;€ÑÐe¥ùÅÔåû\7fîÿÅ\1279~ÀgÆ™snËŒ£ÃÚŸ¾ë\19©µêÅà\31\\üï¼ðÜÂsÂhÔ™ãDúòfBk'ÆéšåÿH\13˜\29Ïè\21p=ª\11·ÚÙ‹ó)RAÃ{0Lzm„•ë^Ú;3÷‘\25E\29IHi©3ŠòT¤ù\20£;÷“Ýc™Í;xþýú1k}’íâ\8)'Rœ;×)IS=\13ÿ´þr\23=·æÅõ$2ß\25›¹²r\21RÉÁÔ`\29\0075ž\26×’ÁQB\3\7THµà÷ùSÁ)T\26×¥\0268àþŒjVÜ\22ëT|º3O¢\24U‡‹–\0267Û—·âo»!\7\13\28ÃÅ$:Nاñ\9S³‚EÍÞû\1Z\3†%øÉe;‡Çæ:ªYDí'“Êœ\27[:\20NGRqÓ›E¢ÅLÓb¦iá\5`Ë}-ÚÔÂsåø!\16<¿Ñ\14\12SJsFmãH´é\11^\23Øc¦Á\25cýD%gÆáƒ÷8ž_\0260ºæµñ˜–˜B™ÁßÂîÉL\0­\4¢\" ÓÎ’g3\13P“„„ÌskÖW©Evˆ\31D׬ÊI2Äpmé\16uᾧE)ù—¥[v\13é‹\30QbÚlyÏ\0mí{Ãt¸ã~WÂ\5=“p”!ÌoÑ\15«DëB]Ì\8ãºy­9aÇb\16D‘6¤šÛ¦æ¶ñ\27é;]bäê¹&Ÿ2G‡[Æ\27\6&eçr:3è9\ +±£:R¼žÓ\23í]©ö\14t®úþž €†hæ¨õÿ\24ƒ²ïäVâ&˜\19Ö\20à9wX§#¼„'0~a–PŒ^\24H\21ƒ†'–\14·\13\30»,\16\26\7¶\7SÇ~rØ”^G\18på±xK0'ì\0168|OU\24TÅÄ\22þÝ -\ +ò¥Ý\8)È\3ˆAkB‡5M\13x\21È\2Åwò+{¨Ô\24æf#\19¨\21\3Ò(¦\15˜;O_\9u,yÁØTõ\\ZËÿJ¨\20Ä\11¼Î\0r ‚í‡ò–\30»·w®bz÷®`Ü«$#\26îØý–Á,`dêr”Z¥É\31¸iî¹f¾ìØ\21\ +°É)ŒÊÌåÇUc\20V´v‚ÈË8²b†i\21%I\24ÝRto\11nY#Fë\6E’\11›ÜÕPYƒAÓ”Á\15×\127–t§Uôƒë'(Ï\29\25\23ð8œ2Z\17Ð=¥š¢€Q¹\6YñB¯˜ûæ\ +[m&á\12Ø9êÎÍ@«ÑQ´r\3`Å\21¿‚_’š4þv\22_'¼\29»c\11¿}Ô':\6DÜrCÁ\17\4½:Nöšv®gk>\28\5Üóì¼–YÃŒš5o\29\21»TOéz÷\20\0044nKÙ‰ƒ \2b”Nýð}Â0uó$2ç†IuJ\26YQžèä9\29¥fÜñ<ƒÂŸqåGKfÿ²ô?\26^”ù†Å\17!ú^m€x\0ü\14½Ëz4L\31Sï\29ö“ìÖeÔreoa\21Àx‘þ¤æq°…Ã.Æ\13ã‹\1l³ã¨mJ2mŸó°Í¹[ælÌT“#º\25KçÕ=\29WJîæ—\\t\"\12^zls\25\9Ÿ\29\25.<°RMO73˜®\0êé\11ƽÃKù›o;&匃\18JhÑMÊ0¿59à¿Ý>•¨ÔNêhÄà±\1ü‰\0º#Ç\1\22Û%-RÿrX‹-v@¶Óû¥·b\1KÇŠÒŒŸ\28ö–$ÌkÀs¦Sϳø³yóE£‚wLI>•º\13tÔV‚‘enMð{9È\6Wé\11+‡›4É\30ÇlftÁ¡3p/Ã\20Î\15Ïpqȉ|‡l>éÑ}âÒT\26ž6ö¼6ïÑéˆ>G'b3·ßˆ—\21f\11ò·ûáßä•ŠöVg\22Kx‘æ™uD‚€f\1N™vÈ3z5ã5üŒa|žEæ»dÙâY\\„œ(‡«>`¸Þªñ[md5È\9\23o\127â[z,Áý0±+y1Ie¨j¾S#øfÍF7u<·\\ËòÂß|ÁȾˆ\5¥®Û\23ät‘ok¡t^#¼c\29œ\24Å'ŽïW#üX\0s<ÝëS\18-;2þd+“¨èg\0\20íköoV³Ê1pÉ ªhÃ…¢ÚŒ\11Ô°ªr*HÕiv&ÂÓ8â$ìH]Q³^ÂßñN¶#\13Ï\24L:»2o†«&Å\5îáv\8›áE&„í\1Š\19NcÞc?\26\17ÎìH‹®d¼\28Á_ç°ä9ÌX¹q'K”Nà-·zëÚ\19xƒã²cänh\7Åè)Àw<™¥\6\27û\14ø\9O%fGÈ\15.h\20Ì\"S|nËÌŠÉ;\22ì ú\22œÄWg›Vgk«\19º\2…ûQæ~‘±\4ƒ©s7RÀxt\29\11Œµ\4Žåo\13¼\13_|2Ã+õþ‡@VÌ6Q‚\28#w7ˆÌîı¾ÿÝy´º’ˆ!ÔsTst'ù¾‚oth0ïZÀ“4HçÉ\23ótçV· ²1\25ÏFóÖ᤺\0;ºè+™ï\1\2mÑ\127ºÑpW;º—ð\1¾ˆçÅ\8”¡Ë=g‡µ.÷¨Ô¨Úœ“Tµ“0}\18;ÎHHÜ»™¡£\4Ô‘îË<£?Á¼ñ®ÍìwŽsÄ^‡ÍY¬ÜŽÀ‘óŽ\1ì›@qâyƳ3V\31\6üNe\15\30Û»(ÇÐw€zd\\\25Þ=î8X\5_s\28\29V~ÆÌŒ9[\0307\3þD·\15É3Ù.vh‘ÀȆ>3Z·À1£aÒ6Â\\*\27|~\7/FH\2\31\6ãÔr´\18+\12¤^®÷Kd¿bqÇ\20L×{;QD92PùìN¤wgƯÃÝ‚ÅŽ\17Iç+Ï\24—pr\ +~!‰šÂ¡ã\21 †\4rç$¹;Òþ\8ó·2ô÷-\23¯u…‘Ý‹HÀv,çÚ±øéFÄ\7ò\4ç\4;iÊ…¨u\25jÅ>%v¢v\28Á\28\14 vd,\27ñ\22’ŸX}*h\25tpÇ\0œ9l\28ìØùÁ\0141?\24/ùK¢ï” ¸®ð\20'õW\\3½âª:<ý\23ž¾ãéUŸ†npt8–êu8\15îp•\18U‘\26ñ‹¢zf\20­é\26Qlé*¬\27Ü\ +U`wÿ1EÊÎÙâû‹lFÒ¹Dç/2Pžšß4â\20¸ä*($¦¡ezž’r:ÆBƒI½‘\25=§˜¡á ùOR.ûΪ|¨r/\0004Óh\6Ð@ɾ\17\17Úw\3ø7j­Ð°|®Iås\13‡êkØ)~\19‚\22Z§4¸îÞÿ\26èr7|¦VàÎj\13; nxÖ\23@¿¿eE¡F´ÄG–¯&V35žj©ƒè6®á½\0\12@ã\6È\27„\26Ìj$Dƒ[ ls|ÕŸ¨#=\0154Hº`ƒ††\29¬* \13îwNø†§\30ÖÛ\13\14\12á)ÃÓV® ï™\31\3\19¦nþÀîdôtœ[ɯ;\13\23 /(ÛÒ\25j4rChZ8\30ñàÞòÉ\6`Öwé‹.ú\20N\13Mtö{u¦\19\7Ñ\15öì\17p…§šw³æ ªÀ39'D0?yÂùÜ‘:.U+Ÿ¸L\5=ÿ!ÅèÁcì7&ÌÕ§)â¶\31ì\13ú÷\15ˆ²;™\9\4|&qâ\20\29cÍëL;c-AÏ-„†\26ß3€w\6=êí‘›¬\21Åì\6¤8ÞïÐfÆ\4¸¡°ŸeP¦ó$œu\27\25(+Jè= ª´¥à7<¢N2.H>!Š’\15øÔ\12Ž\18\0ô7G<éÜ“±Â\8˜k\3¤:=µ‘#šþ¶u ŸX­ä~bÅ3GÆ+\6\0124›\30CD\28\127zWxÊ»a°$@ß\27¿i\26\\‡¨Ù\27¹j\6ÐD}¦Á¬·\25¶b1“'Š{\22±)Õ“ØnNÙ\29H#öKSºóÈ-#ºô+ÅúyŠéì–^‹À屺6\164è4,FoHSzêÌ(˜\23[)f›»E0'9ä\25¤\5\15Ôœc<6p+\0288\15(YŽû €µ|Ü{å÷Ñ\27\6ð°ìê\4‘Ç=ÉõÂxŽ´?ø…?RÕy0>UÂoÈŠö–7Y^:\9R/¶~¹ëe¹èÊ™v°Ô\24\"¸õþ±“¨Óxh\14\"½\5·Î\19\7æ\"\18\5ÅŠÍ4HØ4²\15Òž¸zžÃ$\ +¼Á\24ˆ\0þÒ@ª\24ÀaCÉÔ–²ŒÞÚïo$¶AF\25+Uë{‹¾žK—.9ý\22G\5à{îµK\0038çœo$AÒÐ̨Á\28n£ñ–\28Ó]ÎJö!\23­…\18\0195Î5?<湩fœ\7†¶oý_îCŒA7Z ‘è}:~\14úÄ®å9ñη\30=\7ÕXUïx\14¨QzmÇšWÒ:5çkÄ\12ä×I±Æ\7az‰/ÍA.8Ÿ‚Ó\20š†\127`n4êtzg\15¸Ë”dî陧ã¶æù æ'\\Ývü:¢Zi€ãùÆaž±ktì³#é¾\2nÁÆ’°P©ækÅ#AΤ[^Çk\30\21µ›\\k\30%µ\\7š\"­Ü/ÑA(l\\Ã^º\26r´Õ9|ö|–,×<\ +kÒ%3M¤\6¢\13{ÑiþïnK\26ò4r\15’¹áhد\5ãXÔ}z\21±w=f\15‡Ó\1Üß„Á¨£½SNó$>¯íI\0r°.7ïo/¡Ú#ØÞõê}ªEÒ?{\5¹»jØCeX\1‹¢Az”y¯uŒLtF3.ùId‡\25‰\"ôG%9\0288;ž¾@;Ø\1³UÜà\28øþNÁ\29\31&tù;ð\29š.„*¦>\4y\16^tÇbk],c¥„­Ö‚Á\14Ù#-\24æÇû}ðˆÞH€ ½a\20\28\3;EÒ\20½+\6¤Fè/\"ÑÖk Ù9 d<“«Hä;ã’êa÷(vSÆ\29\31`\3éÝá\15.ׂ\29‰DŸ\6ù¨#Ù\ +Yði&Äá>0€Õú‰>øÊxgðá~Õó+Â\21\29^\17âgÐç¯,°[mZK¢=\9BAu®MV\28ß©\31ö\13 &¯^sÕ¬y_§˜à[yÍ;?Å\4}£\15)LyE¢\28ú\23QÅA)R]¸K¯)×xï =St*›ÏŽ0 ¾Ë\21½Ù•2{®ù\24ª©0\13êÉxa±ýþkø\13OfG9U–|é~\20ɱé\30[æÿc¸ž¾ãô9\23¬”~|´¤kÝ\14Òx_¤!Íg\14¯\25¿‹¨çÈQ\7\27\9\13\30M©•\18»»VLÃýŠ·\17·®½÷\20c,¦:¸b\30Ü+\14é ;¤=ôL°\00723ÔD<8¯ø#\29†\7›uÌ«vøà&Ž\3UÔ\127~‹Õ.gœÁ7ά„ùÆ\31;3ZñˆÐÁ˜Ü±Ë\17’Ëç\6Kó\9\ +WŒ÷îÜvÂéÀDûnÜ@=9E›\19éÚ|çW‚:âòŠS\28å°8K\25õ\23\12Sr0š´\8§4¡ñNݸÀäMêPÜ‘vŽ\4\1273\0131ŒNSL{ƒ=–à¤&Ã2åw§Ì;:RáŠ}Ã{G¨iò6-ƒÙ,â{np\19zr28\8c\30Üæ\24ŒÜá½û’Û´­oe(d®Ì†YI¯…\\µßŠbwç¾ç–w*§¯U¼ L7\29m›\127Ò8<[Æ5Ï\31n€†_˜[ P\24ߨñµ©Þ=§Øòµ\22¿\127‹Ã%Ò\26‹=ÇÈœÊ\21xjÈÆT£\28\0j8\23žÒÞ\16x\127ŒRW䤤IÙ©¸€Ù#—\8Ì®ùê+0\ +¹Í(gîP#ÓV$Ýó¡é$Ú~¥Ãˆ§áÛ?ð\30ÝU\6x­k˜´ìê›ã˜RïGHj»”h¢qº\"%™ÅöDŠŒ¿\\okF\27gD\7êD\ +‘)/2¼êô…>$xþæ\17ŒÞÝR&\12û‹½ûˆ½\19B€Ú¦\9a×—\127&\21\25Mzt¼Žkw®OræôÊÀ#;Š9;›\5id}™Ë\16(\28•\3¨¯§ßù%\1÷\19òú4çJy\24úÑ\\ÆYË\9!MñÅ|H%-ú»Ëáa\4»Œ\30x&{\24\6ðC:€\31F«à\4@~ý\127\0245¯\14<³sûîn­AêÜ÷\25•]ñ‰IX\23«ÅP£\7^3µ³,ÝVaIÇj«zþy¢6ÝH,I³£dj\16\30N]VkÙ!ݧ\12µarŒžMµ˜waüúm1ŒÖáÎáÞãÔì\21ìñ’úÒ\21ì¼€©ÞÌ\1Œöë­†$Š\11ÞkÚ}¨\127¢»\ +›U=:”\2cí–šðsoûÍ¿\24¹³\3Oñjt–Qß\16\6S ÜeÊ{tbù\19©¬AqöìÊŠ\0203çÆÌui¤û\15‰Æ¿zõÙ5åIˆ\27“³Æ¶\21i‘\26\17„ºæ\28%=\17é:•-‚IN“j.! ›iøè…Jî3žøƒÀ«SªÏ\1Ú8gfü\16GTyº‰\0206µd&cyéHfŠÉ8È\ +“<6S$HÍèo}íꉛÛ\30Œ†«=\8ïö.ÓÇ4ÓL\12Ú»”ºæ{\ +føÍ\30\24#\19A6º\23[ë“x’\30§ê>¨GÖ\26\12gå[÷ó'é‹fÖ\"*ÕK fŒ—ðä\3ÒÊ%yrYã…9~$Å)‚Æð‰œ¤?}ÅsõôÌ×f'§O\12¼\23;Þv’„Ö‘îNW![##:ÉÌð\0290\31½DL\24vÃêxæN1eç\28Ácô­ã“6T©÷®\26\31•\17ªMžhVÃMFíòÝ’ë¼³K{r}v;ºÝU=íQêk2¬D;•\0äÐÍw¤ÐÁ\30ÓÁ•Û´¢«M§ï©Û¦\6íéñ‘E\12;g\22®3TNWÍ+B@ÅaD°l.{‡ã|Öâ.¶…\13ÔTŸR\19Ö–ÍT[’}ß™,œ\"P;>¦Dó—‰söÛˆ2FI Ú¦í˜\127™Ø»\11«É\20n÷µ¤ÿâWýÁ\27ƒ–ÅÝ*‹\14\17ñ~ \3µ¬ÜºDÀ=~y\3!]ØôN\25\20ÎäÌHZºáÿî\18‚z~ê\24NBoŽÅ.é–•€\28Ϩ88ž—7¥ì\17S—4‘šd9ÞÅEæø$…µÖ°)ì\23ëI$Zø\1¨*‚ùœÌ¤áŽ6á\27»V¬á.¸óoe{¨¯xÒ'òü*zµ\19Ò°½g\5âVl/¾sÝ(‰»qî\14\16­\21\7«ì70ŠÏÄ[êÈ\16`ÏFå-Ë»Z²ò¸u¥9q…ä|¨wC‚œyÛBŒ¸øfžÚÃxåä\17µ)¶£†cFñ*\2Š²e\14Ã-+ñµ¬Äg†u\24\23Æ^*„h-†¤-]rÜy\30Óò”Î\29\0133ÖÜWËT.\2ªi°R:ñ\24)Ø\1273ã?LDå¨E,ܪõîV¤\0213&óŽ\21\3£\11À¤Ì‘¼Îy+\26ôºá\2ɼÎÄ\26nÜž¤¥\27þÌ’†—:R\1\11|jwçÔÒfýÈ[‘–¼lýø¿Ô4’Žh¦¹\20GG5{[Pu~+ÓÉwÚ\19•ŸñÆûj›\ +1[òùÐsèņ‚rÂÅ€×J÷æž­U\20\2Ñ£\1¬È\27\5¦\6ð…+HfB©<€žGÀ\23.Låî\5ÌŒZó”_§/¯Ý\1À¸\20\13\127RãúF3\4\24u|zKÃ5¾'#ƒÂRÅDtž\">ÑF)\30œ¹­¢ZŒºT‰cã-Ñ\20\28LÜÀ¼å9êíê“Üêé¾\0139/\8G!Ò\26ØXÏ¿9\3ú7Qªœ2n]úwÇ\127ç÷\127ã*\5†Gîø+£êõ&’\27hA*i(éÌ0\18G!EIÓ™_½¥¢\12«6ë…\9éÚ93—Ê\20%\23ýšJ<\24|ãù\9?Ú3©äï+%ú€Á¹Ã…Ã\21ã\3×zéZ­t­\\òÉæÍ©d¾É<²àª2¦­B²\13iìûßd†ÑRT®ïT®ïTÜ:ƼáMÆC͵¶O?l磩c”˜ÞD5ª`¬ã&Î{[…;lÙ\17:\23›²Ö1\20ß`’2Î.\5Û“]—þ8ä_1I\127\127ËW\30Ú\24\29ë9\25o™À·üYÑòÀé«1ãìHm’+\8ï\127;Q5X2\127ïJ\0³Ï•£®é\18µt¼’kKIXè:ºœVÕ\29¦šE\1\12ý’\7wiܱ€¦#ï<;x«nÄßhÇ\25™½\4Ä~ëѹ›¥ÎÞ,y“\31fëþß+\14Œ\8;ÙY”nŠ\ +.É£Ã\25º×\29\1ÝǶD\26Ü ;Ü{ÌÅÉ\\`P¦^Ü»\30% ·ù¹\ +/\16yÓWzF2.F%•+’ö˜=˜tLI­%\5W6T\13k<™{€\14‚Z\30#üÝq2]P—¤¥Ä\6Ñ_u\0124¿ŠKRóÌP+™ËT§ÖQ]ºýŠ/ÐÍÞ»œM:ç9mpTj…Tg4ØyøÖ\3\3Íf9\20ª”¸Z9•ázÀ\15›IÕ‚?‹Š<“âÎÇ\\³ŸG}$œy1?ËÞØhðŸGj…ÌC(¨\18€IÖsY¿\9£àÚØsyüÔxþl^<§3`Èë\22¦æãT7\15ž¹Ãë\6O·v\31èÛ\31|§\20\7’~Ÿ?Éö|<ò皀ß\16m\2¤3EñÐÞ¦\"´ž—Í^–Í\17·\3=©„æ,*\3ÃX\21\7ªN|=/¿\1ô\\ú\0271Ûä7^;£–S\18{1¹ë\24g¸ƒPÌ¥\20R¢\6Á<õnÒË)a$ŸÓhò\25ë'ô¤+lÚmæv\28=i å¼=èS…ˆ@’­šÌ\"ê­Wû‰9òõ,\6îå*Ñ›•õì=¥—Z[0éAŸDãÊ(G\5ÆÒü$U¯Rß\1½D\3kø'\11}b²~ß’t7Ímzï.…€¡½\16œ_ÿ‹\7Ka~ªu¼âζr=d¥Ë,]á{\13Ãþ3wo}\26\0091P\31ùû\31E\28°u¢›žv5#ù¥œ\25†¤ ãÛ‹ž•mFH.ÀÆ({\\W&MwdƉg‚,õ‚\19&‹Œ\27Ò÷f/­ì£%\20„'y½‘è2£pTs\5ÔÓíÿ’Iþ½J’µèQÞVn£Lër\ +³`9ÞÎ5\19ó~ð¢ìx´¯¥·n¹\19lÙV¤gñ@Ÿ¶?î[ͬ„Ñ\127]\4u*È£Å\30q°\21pßÈ¥;4Ž„ò\4ïYf³0ã´&/#‹ëE>ñèpÉëué–¦RÜšÂW³&ô}²Lk«DˆbN5¾›éÉ͈ÿ²Ò\29V{ã0ÃR·®L'7Û{\14éù\16Ù\27Ÿ\27Wéàœ¤\29#…ƒ{Žì“<⧎\20wÄß:óÌ6¼\31\13Ø3ƈ\18Ç‹c›™\8¤iºûuìö?²ËÏ\24UʸM[\4ŒÒ­2àyë\14æÕÎ\30óf)©µ¤k³\22Ÿ×idï\28õ&m•‚Ç6H§QjOªB·®H~j.ef)h \24\17tPëYºKÜ^Ì)ü®¯NGjNìµ¼/씡ç3cÏ1½€;Øâ;jÎmøêμ½½•VG`f\25Ú;wVý¨`>P\127ñ\12³Gt7L{gø\26Ç\12¼™J0Úp™=ÓnS/²-]»»1½ü¾`ëwÁw\\\\Ò%g¸\28½c\5#\19÷EÆ8\6ì‘‚\12Œ15hüh|/sAu…0\15#EZ}ñÛ5žTîqár•®,ågsÎ……\28\23Ùå\13\11ÆDÖ©>I”m\14ÌdxGàO\19rª«\28|\6pÇ`f\12íLÂ뼄ø 9¨\127p´‰†c]4\8IÚcÖˆ—ˆ—\ +gÐ\26‰´\17õáXª\27\4\17n\16Y£á€»ážkF@\2}2Þ#6ECâ.—Ä$Z+t”\21˜d©õ›áa{aÞås\8²¥\1279ª‰sl©j\23ç_˜Ù·éUàÑQ\0136?9sÄ_\"¹ŸpäÙà;¹``\"Õ6!zè7q_ÌùG»}í°[® \\ŒAÕ\23æžK’sKåö—6èm\3\127­LÒ2—®Ÿâ\"Éàœ±V[Å)+:Úl\16e¥áðË\13.•]5URÄ©ÀŒ\7ƒÖRÍ\19\5|hÁO­iÁÚõ€Úõãšë¡Në}ìy’–¨åá©™›Ð0¬‘Q¤Ô3c·ì\27Æ£\27m-ö\3#¤!Ÿ\"Í\4Û\7C\29/\\-§wCJÍÌ\12û;\"È0Õ4E›¶\6Hç´\24Ø\ +(Éä\21´V\26¦v¡þ¹kƒô2áÃÊ$9¹\0'ýÔ\15ÿà¸Âôš³x,58OsÍÓ\\r÷«:ýUþJI\31<-|pøí\6Î]Lb\30M\31¢¨\16ÃO7ñOˆ.\29žîäiøÝT‚ŸÄ§…>1y)\ +ý¿ƒ”é\19Z›°\ ++=5³'ûÈ^ã)ÇSÁe,¸è\19eCžàÀœ‰’×Ì1\22\0122—EÎÌÂ1EË\6`Í)[~aë²iQW³ X\22ž:ΰã\12:.Ô;'ëEuÂÿ$\\y€±`ÆšÀ°D\5ü\0ÿ\\\ +þ§ÿIζ2Ç‹6\4¬å5°©(\15¸‘ߣ\20ñjdÏÞÓ÷È=ƒ\"â^N!5²Ë ÄÌÂc±ûXâW\7NrP2½‰ºOÆ\29#\2Q:)\6b\1å.Í•\11\18{OPÍ\13²=+ö\1Ì\12йP\2D9Þ\7—Þ´Jƽ0Ckdܪ™ëœª¾±g-Cã!ž\127V0³\6xÓ'1\"ÂNG\11tžÐýã\29\1JóÎ\31ý\14™\19˜šòƒËÝë\19\127AÏ?ÐÛ\2ÆúE—á‰Òä(KŽwå\\ã9÷Ôœ{f\14\5Ï=‚\"à×\ +î\8PW̹+ænf˹k欨\ +æÚ1sf\22æíÃ'ß¹T'.\9¼}å.Ébâœ,\ +°5¤j¢HŒòI·Ì!\16ÜËÁpjÀÿôÿ\30ÊØ0uÆ`Á w­ËÔ;G5õÑ:fi\11&ŠÀH\15\16U|ô3Ïú„ÑŸsÿ\14'üŒ€Í çoê\29gmÀhÕ2£³\12[3ý0LoxHç2\26BÌ$*ÿET]:\30›\5¾ªpä\5žjJ#.ª÷lº¬@ÜèuøêB&Ô©…\5ÿnæÒθHc|Ùˆ\30¨œ\23·s(DÀ¨ïY0ÈÜK<Î]ƹÍس\11÷sýÀ\19×Ò0(/AV0wÔoXŸ˜1#`›\8Þ‚Ò„ô©'ú4êhȯ–ÞGZ÷ZVJX°\ +\19óü7µ\\ˆ–ßÛRíQß-d \3ÇEéÉa4á‘\25Ò8Dí¸æ}¿ï¸H\29\23é¬OüA½vb›sÏ_ÚólPð˜Gì‘‘W^ÄUd¨ñhœ³@’’\3nìÙÚs\8Àá¶\8ø\12Pâ”°´m‰–,y@—I·/¹Û—ÜÇK.‰ö°’ßÑMFç\31\4§=2^\26€@´{9`¦$i¦\18J’\1|ð×õ\\4è†\\øǘ{S\6í/J̲%M¬ñc\127¹Íñ/¾FÚk\0\9å{~6Û/\9\28\20¿­B\21VÃœ÷\15¨a\11ézÕö¿Ÿp\15’þ2›ÏV\0©œŠ?µâ=|\5»‹=l<Œï×½\4©˜2X0ÐwÏ\28ž[Lsp…þQ‘Y‹)ʉßyâ\18›Y³âɱr{žŠ{¯z!ÞC5x\8\24\19r£:e1X3È\25\20\12ô%+k‡ÓtÅ‹jųG\5}eÎCñYŸ¸¬Aë—1\21÷xŸ[A‰u/ñ6¦ôþ)3\22\0122\6æ÷¹\1Ã=Æ!%iùu@¢Kš3&\5èØóž\30\27ú˜ \3¹`r\1òAŸ8“#žNü–\19§1\29ª\22\1d!\30¦jôšûW\4ìq†¨bLÄ%@.´L×Ü7k;=Ö¼<×ÜqM\4\ +0s\6ZOoxê8·Ž¿ë¬OÜ|gþÁ\7\1272ä\31>Û\11À\1m\127@\3Æ\12tî9p·Ð\13¦\24;ßèá\11ÓŽ=]\29¸·èÚ\23[äÈ\27‡#\7AØóÜ$n\25eV‡›Æó„£>ì)<õ\17x§¥>]¥«”[¶\26Õ\3Lj*HŽ\26#71u\13\5#¦zW5Ì\11:äêRaom‰½Þƒc\7\11Œ§Qž1Cv¯ôV>{ë* ãÊ]Â*|Ï\27c3hͱ÷Ä.\16@ZãI[¸Ö'7ŒÇÇð‰ë\22am2G:3^ò\0076,kÑ£[ôÑX\\\4Zž4[\30\29:1D3Ç=Í\15v>ÐU\13ÓBLsvùÄ mü\7ºYËq\7÷Ü-[·qÃ\2iO\7-lFö2\17ѹ^\13óöd_×3nPE°¯3#»\19?µ_0è\3uªÒ\13,ÌÝ2Ñ©êD\1}Ü\6\127Ï #\7Lc$î\16H®Nüòœ– ‘´-ã#\3£O¹\23ͳ\19:‡b‘\19\27Ýa—B•™ñG¬¥&¶K{6ÝÚó\ +£\1\31÷HªkË™/3AŠ\29ø=þA?1RËw×ITj©Â\31#ùÑ\30köM=rÔ½9#ö<ßô¢:tbÒšSæ \23Ã.-–/\30HttÁÀ¿à\18}\31ÿ„{¤or±#O¬×´\23[oeÞYlï¼…D×Ü{ÖvÙ³’Í^”øŒu¿&Ë&jžlË\20B“ÀQú\6n\31ø\23³A#ì›\ +œU&c\18­Ýk\14¦\12\0183\"ç—\31 ö Xž™a+˨Cí­M?“¬\ +„£féÏó”TþÖP?¸ÑFëÇVŽ¾%玓³\6\31¤¬3ÇÏ\8«&¹ùÉ“˜\27û„\26cO_˜×\26@2/C=2öU•V\21ËÄQ¦Ã×Þò\7\6\16*èz\2R¸pí»\20ã›\21NÄ£¼3Qu\31Ð'¿°n¶\3•|¤\15Gʃ\15va©¥DR2¦n\7\27\"\12&s&|Äh¢#\24Áßg\21°Úï\29ÄåÏã\0®ÍûÏ¡¼ñ—ö”Q\31\5P\\[|°&0¼•ä<\14\5—ã\27¦ö¸%8°u \0©™™8}š\3ë1ö0\12³\ +‰ç\"z[&$r_\ +j)õ\16·D•t\31űžîù©ÃËïYÔ\7Œl¯¢Ô\26Ìïh’ïÒ…®íñïí¢Üÿ r¹\31øõ‚üdn\28©D·_ˆ¡Xép’蕈`vøª•\1Ü“µg\5\13\\ÓE–$ÉÉP\13K©–h=.$[ö%¼1 ¬?ÔÔDÆëDúç¼\21·ÀJZ(\19ïÇ-ZÏ8y<À[ÂÌžr\8£™ª¢f¦Ö\\rPa!ºâŽã£ŠtkJ#ÂøÖýP-»™Ô¢A\3éÉ\0007\2\30©¥–®\0169ù¦à\26I\"Áá\23¦m\31]D\9Oâ®\0017ãgô¹‘\0004úŽáy\7ræ¦-Ár·ÏI¢³rL\13Á«Ši„Œe–\7‘âiF¡¿üÓá\31À\21žjܪ\28DÓòÁ’'©ÌIK-ÉXl›\30\27\18)há\29€1|\8?s\1\29uüGRœ™:wUÖr\21´p5\127 Qg\18÷ÈŒ<.{’úp\2µ„\12;¥>¦õ\4Þ‘I•{Û\27ž:¬8\17\12ÿâ\5¬£¶Lˆ5¾‹\14äŠfépθb\0}ýå8\21\2üƒHâ¦üg„f;°®ìÁéÊ\30 \17{`?¶\30sAûdGÓŽþ\"hó¯Ùo\22§@lû\17†ÏíÂuÓÛÎ;%Æ'Q®ð»¹ËúÞ\0îNê”È`n^Ö|CzHS=ˆ\3¥\0042ªÈ…‘)ÐÒb\27Ì\19©Ìr’‰\6ÑÊáÖUÚHh\\Çë8Â\29ó8PÌɽù‰b\30™ßmÜÖ\14Tã´Ù1²„!‘<8íVú_áÞ¹M·×lPv›’^™”³G\18¦¶|!ë\24™#¹Îj\"r´ŽáÛad\310Æ&Þ-ÃLz\28!¹\14\5jë¨\23ì$Päʽ·\30mæOç©ž¯\12=éÓ´n˜Ô£íT§C\15‡íé(Õ¿Í\4æK¯ù\15Ix…?\28£IKÕÈfÃSK݂ѧQ'\29£p$ØóžÓ\"³ûì3\14:\25Ÿz2±ÃŠ:ŸL-Žýè,\18ÓÍ“MAô•…©4â\5·Ô:þ\17oF…`ÚÔ>Ic\20ä\16OW%Ç6ûˆ§\16ø^¼GDRt\26s \15\ +çÛ‚©árø»óä\7vk€ø·+Rª.\18O\20€µ•jÖi&²çA‚ˆTQÞ T\9õˆÚ\15!Åbé4RØÁ\29¿·ˆ)vàð*\1tú$1Œ—LºG4T&e\30\127±ÍC¢SJ\26y[çHf \24\18jr+NqMÿ\0µµå½–îÑ\21Åû|\27c¨\127\11]À·Î«\14“:Ö7ÁËàyÅ\14‹\27Þ;Æ\4\9\29ó>­\12oÆTDC9{ø¹KaÎì[:\9Þ0©ä³'HK×uJ²\\5T¿´méü˜;RïÚ¾„ú&“t}ÚŠ6×I|ÈøÞãybbëÚ+\30\21ŸxÀmeï=Þ3x0ZR’É5÷W,G[\17{ø€e–—t©FÚlžöÀñ\8h_±G‡P“Vàˆ—êƒxŸZ¢óÄ&\ +:CG:Y‰\"ÉAâ¾ùQšK\12Ü\21ã#@æÂõ\6j©O\24\14¹ôªš;l>ê¡í NûÊ/\24;äð\11•’»­‰óQ¢$-çH½#µ.aGÙ«CÀ\14^\6&b6ªçPµ4U\14Nz>\26å!Ú\\âl\16?¤„»b€\30o3S€\25ü¥\19f–Ãôº\2Ó\28¹‚\11™Î\13'Ps`•J\24ùK\4\\Ù\18\8Ž†\28\\¤m\24ÿ0€\3äPGwHÔŽŠà\2û\17 á¹«„\14/€J\22JV·5˜%8LÍÓZÚsï(N)}\0kššÅ¾ds²\3ká\29ÆÔ%\12Õ\8þ<•»QÆ;ÈpåžE*ž\7èá\29>\27Â%\31‰Yfvt¤‘\8bÌv¾Æ\12ï9%Ô~'®\0OXeKwgSÒ\22;\26úŒ\\\6\24\11 ƒ[î*±H\\:lN\17\21ù†.AªQQ•ˆÏg`ª-§šËýË\29ÛG\30tQF˜\21[ê=R†°¹G{W˜ªä\29-¿£s¢ÞJ”%\29)jÉÞY\17—Æ2:b%ªÑykv&\29°Ê‰j·]­Ý^µ†üª\22Å[ýeÆoË@^ãÉ\31Lj~O\1\3\3§o\25ûÙÜRé\0035ÿ†[¶vûÒZb}š\23ŸÈÅL$…iñ\12Ü¥øž%hµÛ¬\15U>>\0209é…Iãß‹ \11<ñ'\12úœ.UÈ>ˆ}È\\¦™8.ðQ×¼v¨ÑÁwàuu»AƒñÉZ\5bgÃd¯á\7ñ\24s\4ðÂÔ\6\1^y›ÑÐ2ù\ +Ò‰´ÃqÀ\28¯àÏR\31™ý^8°0âlI1†•š\9\7\11Ž_nï8¯_%\2Û—>ÇájœšJ\31Ó\20uš¢Ó'òmÞ2uI \30\21\26®»s,¯ªàoD=ôAÎÇ>µ¾¶×'êŒê|¤\11]%™\12JŽ*|pþ“\25Ÿ\9Ûm®º\1=|é²Ó³Lú*9ÒÊîÇõ|\21eŒŒÔ=\29°tGc©Ñ5—Η^»þÀjà2\27i âƒhÃh§8`2O¿a•SÕ“ ñ›t\14UUpŒ…,+̾N(¿\13jä¿ópÊ7+ïðz€;òCp€\29ÊA¾ó7X\1\29HÏàê4&\1’Å=\24j³†\23i©³!¤Ýï\22§é©`úÛ—á_‡n‡Ã#yC8°IÜA\14F÷)é·‘ŸÐ[MÆ\5§*¸:-Gœ>Ž0\14l8{ˆ£&žÑÎ$œ\15¿r ¦cS\28Å-­š\25\29äâì–:\14¢è;ç$¿I¯\13j¹\0252]ˆ\0208ã8ÌîQòã0\6´:ò\13kŒ¸£9}Æ7óW^\127„¯\24Ù½\7éj‰´\21¶¶´ùÓÍ\15RÜ}â\"ÎT¾Yû\4Õî5ï‡JY\\\127¦òt}‚{\2ý”\15\17'™W¦|…\2¡R)Û‹GÞ\0088ÞÚ]]p\ +/ìaÞÈËÚÑ\27Ú“µÐ7\30[‡uŽg<×i¾¯hF`óž Þje>öY\127\19oŽšÃAš6\0035¬\24G‡;‡ÏŒ3ÇÏ\28¿4@6ñzK\8F.\11Cãs yþ\ +®Â™Ýˆ#|Ïžé\19ëÜaáËøc\3é\0236w ɨðÙÑ'`\29];Rïú\16¨;GºpUym\26&µŽÔ¦?TI\0Hg\4‹aj\31\2¹;ÒRcDÛ\29MHdd\22'Yü6|A\25FöŒÝª\0165œ\ +i>ˆ\1\2=æé\0¤G÷¹»4®Ç\28±…ÊþÃQ—bŽ’;Fæê·‘Ûj߈MŠõ‡óá½êa\2Ô9\15–¹hDæî·si…#Sïx/ɤOå'i\"o0Ê\30¿\ +ûOéÆŠi¤.§(\28¤Â‡\27-vw:u<ŸÖËÜ™±ss7\24c<È\30ð,–§?Üm8-än\2}\8\7Nî/þL\1Rå~h¶› õIÂx\9ÛAâ‡3ŸyÃB¤Ó£%ÿ¿}žÜ¼ZñKà\25a׉‡NбZ³\24Ã8.`Œé\4\26Fˆ\18Ò™\30¡óПâ\12\21øsí¢|śٓ\20³¤(±¥ã™Oø“ÛõOÖ¼\11xǽóO\30Â\127‰~š—Š\6žÑ}\9¤ÜÍÎ\127Q4\30³7\9nŠÃ–§±TRŒ\6Éàï<Æ\23úÄ\27Ù\19OáË \17\28v\3O\20tÞo˜–îž5ü¬²¤H'\26°-•k„¤·®”K‰[™ñ•”WÞTüÙ•·q˜qpÁd\5Ç÷䣟Zºš^º\25xÉ.†\14¢^µâ$Fhvâðµ\6so\4©à%ñQD\27K±­í¢«)Ê\13‰Š”äçÈŒ×d\5¿qQ4 (c³¨eҲǔÔòJ”¹\21-“ÆW<«Þú¡\3Fí\20‹˜ç«2KÛLI¦2˜*“ÖcÚ]3\ +AsL©¹»~p<þÅuJ[2¾qø›8@|¤iÊ®)äŸa\15wh$À;V®M¼.6¨›´ý\30J0JRó\20¥}c-ñó´\16\26éWp¢C\19R¹0\11Êð’û“Ó*:Ñ\12fT}NŸi\28>S$:¥Q\1Aýt«ôÄ*f'Q\9êy\"Ûˆ\14\0˧ü‹íèðÛHµd)©q¤ËèE¿H¤£\14Í/îsùgÒb8!ñik·³\1µu¤ÒçåpÇV? ö\30ã\14Ûœ5\16ó´Ê\11×o‹´Û\23ð3\8l¶°\5M\13šQE§¼ƒkœJg•k«‡f?ˆð\26µ)\30Ü\14â\7e®J¶Ô\12;\17lŽtÈË¡æ°†DŠ\8÷ŽÚ:Eu0:þœ\29-ëZMÁáȨ\28”4?aü‹\"\15}ºCDºPèÇ”Q;FãÎe\27@\8\19ñÝ%™yÌ&\29'\0170Œ(lá¥þ\ +ª\25­ÕÿßÙÕ-'Î#ÑWÉ£\14ÙÔ†ÙÚ¯v`\24`+3@Q\16'V\17a\18óv»XêÓ§[J.¾\27pw˲,ë§Õ¿\13™ƒú\26òpW‰s„\0283BR\26ብÎ\2QÞêžýl€]1\28Ý]ÊýGgï\31‡!4¢³„.sQŽCº(0j\1ÔH\0087|¯º[Eáë\17$J]‘ò0CWpü‚ö‡™Új\9oÓ\28\0%Zá†Q†Sè¬VFï-rh™\"\8ÎÇÅÇzů?¶\9\27}ó\0289²/Þ˜8üñ¸n>ÃØ3£:̉(Ë\12B\29äRÆì1Á{þ \19GL‹Þ\2»i”3Æ“ƒ÷eÔ7G>–¨\17P¦Qÿ–i§¨\31Ž·,E3\0298e)\\¬úa¢”æs7)\24À©›Ã\17°oܳ3\22\0019'Ëï5¬ŠsnóœßsNîR{7:àŸ½qu\9Öæ\22-É•\ +“Þã©D™o8çE1Š{íÁÁûŒž•1(~…_n\13úåڹ๭Å\22\30`ó¸XµNŽ…iòÑ\17Ö\12Ga‡}A&`;0\3v!f š‡¥­%mª`M×/õªœ\21K7,Wb.rt(\3\3¸27+¦ø7­8 Ū÷‘Å\22•¨gM”\\\127º‡ý)·*cz\15¸a™nDb\20\5ÀS'‡·ë\ +£/‚OܳƒµŠ\13o\17zÛF„*G‡2ð‚7^³6nÓÛ¸¸‘\21Þ?\"–¨Þ\0î`ïù0ÀÛR°C´\"·ª#k¦Ö*a^>¬\18\\”Kè\23H\25éF¬7ÎŒ6:KZÜ5cúœe\8(²æ\"k7漟,£bñÄš÷,\8+òéäuÒzÖ\2~sÏ4t¨(c‰}víWÓÓX5\28ŸÙ¤Úâv\8mËeg«öAEÕiÿ4fÂŽ¶ä1Â\4óÌè¬Ô,ÖV\29Ý'ðþ®ž{2þ\17Q]\24¨\5;‘§\26v£.BðþÑ‘£“·NÍ\0219“|K¦\12”¿ªuF\14ðU3ÏÜ‹âdQ=*$òŽa³@V\4QÀ¾”§@¦\29\25ÕóÔÛ»õf/\6»\13/ÿ{œ\25“”ÓL¡ƒÌN3N\14r\0067ìæÁuûAŒ\31Ÿœq»£E[u&ìKBÏÍ>Ðɪ|@¥\4×æÉþaÉ\18à?€\27^\20\26÷.\27­\9>ÈP‹L0‹BÅH0²&4Êi÷•U6\22{ý0\23wx`²iV2O\29»ª:~¹\23þú&I\5Pf`¿\"\25^\11w™1;Îäà\4'1µ¼¿!\127.cÚ\127*ƒUœ„ûÝÜT½ÀN.’ÃIX½¿\28l‚\29H\2©ÍyB7œ$ÿ±\2ì\14”QoTþzR{\2\12PZ\20V_Ÿ“xâÜ\13óp‚©wr.:'ñ~ë\28Ê;êXìçÚœ“œ9ô\29·l“qâ±z‚™·~Æ\1õÎ2q]ª:Æ\25ÛöNb!m\1'\29w\4üÀ]™Ì¯7Lÿ44…~ædþ¨^\14\29¶‰ŽÃ˜\11œ9­3¼bZ›¹\20\19\8‚Ëe›MDœÑ]Z=Ín8½S+¢¿G9ÿ:\2©žëXQLL¹·]‰5?iÎ]•ÎX·\28³¦£“WWbÿ+sÄUJ\\#ç2ý…ôrŽàÍ•\29­\19Í=òÎûB\7Ùè§ì¾Ç%\"\11¾\28¡â½èJ\24¾Ü¤g­ xò\21\0045Qß—or®b{†\127‹¸;Y5O™öFŸœk:9J휩DGV•ñá£M\24Ô0è\1272¡—UOØ'ÅŒür/±æ¶|Vpä9È\26dJû‘‹f5›¾Æ{íàzÑÄ2\16˜¾\\£W.\0\\G;°ÇnP s\7ÞŽ2ÐoX}æ\8õâ•sv‡|ôü‘\26f\7:6êîØί³ÞOkÆîÔ\\1÷óKéí”ëQS³³Øü^¯&¸zÔ+gI~Æä\26âùùo‚ÙJYÒéðfr–\20LôXL€sÖךZS\\\26-?L&°*g·’£ÔB²ÉÎäÂÌ­³äo^ðs\20\24±Ð&\17Wtsž!\17•­±$ž™µ>³¡°C¥P·\0159jýõ‰\15Ô¹™óî\\ç\29ôŠ'È\25\19àì<¨ÎâP1’ŠÿÁe\17pzÉ(\30LçO\7\19'Sb¸—Ó³»~ã±óuQÍêH-È‘ÓM£\16³Þa5\ +´\127‡$!¸•Óæ-ëT«åŽl¬|¶Ñb]SƮЎL™wb_lN¢g1½WOŒ÷ü‡2ïlÿýÎößï²ä¾;”±\31\127Ç\0002aµ\12“÷ŽIø>l#³á½WÈà€òÓü¼kEzGnú\7p\31üF\31l<£\21\26ï\15ñ]\8\\$ïšjâÚ‹Å{ô°s<êa¥Ì\27G/\22§•âwzÅ[@‚_dC[3¯Þ#h)`íÒ\30Ç—\30±Ø\\ʼnGî\9¾\14\7l½\6Ux\0å\26TX’ÔüZ\29Xj#¦è\7&êEÌ\31Ö\\Ý\15>L\2öçA\16tßíY[”\1\23©çvNÅ4'\18ŠK²·!o  ™8²ÀV4d=k·‘HkÁ1\3\18{o\0260\19}Ó‘Q^Q\5lt\7\"Ô¸exKV\16ÉU4yu®Üc·¥Œú+{sÐ\18+\127\7g¼²Ú¯JlEæŸx¸%\15ÃÙß(1wG|ºÝÂd\31UÐÑÔÒ–\9FŽãz±\"%é-¹òᡨòµŸøàÙ;Ó9À‡)BHSŠ±{1“ì]Õp{/®‰ÌËQµªÆÉ\13{I(C@\25©­B¨\26Q¦Á;2€S!z¬¿×…w\2öó§}¦Ó€;¹_(\18ö×á¹3¾èY/ÜËÀ6L}ÂîX|ÝK$„‰ƒÍËø§ø¾‰\26aÜ6Z´{awXï”\28=)ƒ;a3Æz\28¤Ú]\127Ð&­Ó…ê°iÉÄê…ßëAFÊVDzž\16\24;á¦\29%\0032`Ja§pZ\21…Ëí8­fË/\23é¨U ÜÄŒâì¯ðkyïQö\24ý6\29Ï{œ¸Fä£\4šd„CÙ%\0“4¯¥Œx÷®È”‹L\ +\20åÕkËÔ¨m™'‘+Z—_g×úËÄU\"ä+æw™Á¯-3øñ}Šú°£ä\ +OÜ\18ûA\25ê¢\28£­K0Ê·,\\‘uYÄ üÌû ˜:ƒtþ†âãä«G¹’´u­ÆhòÑ^TÂ\127\127Ã\1†ë5§\30ÔÔ†©ˆ\2\19\6ÆHת礆sr„ëdn‘cÖѾÖïæçå˜è¸{‰«õ\13a$hQ å=C°\26\13gÆ[\ +nº•Ñµ\16\3‰LÛr\6ÄV¸Ã¤-HÑw¾3íwZ\17FX25Œ\0117x?\\?ZÂu\11³Y>\17\6û\31Šå©wüVO6jÊ\11i\0293\ +Îæ²{$_·1®\30põ(W’\22¯\21E\16\1d¸¨„\9\1y\127o\0ÿ3…_`¸Al\14ä#L„£6\16ùøZ\27\14\13¨¬çšrÁ\9U‘×r}·Ãž¼\0ñ/\\Mù5f@Ïäð\1ßØL€KA†µÎ%®ª²†LûCÍR‹ö\12¯qµáf\1\13äV†Þ|Q6Ö[\ +\15º–Ôxúe`Ž˜aX¯1lû0¡p¼ÌØ$óÊÀ‹˜]\3–\17x\11^€¿Ü«õ®õØw;âˆA†—f›\3ÇiO\\\11þÿ\"¤4ˆmþC^DùþÙ\4d%ØAÀ!\19mV.¤w\18Ý,§´\20X£\3^îˆyzJãç®ÊO•X-{ÍüÌÏ=vB3B¶\18[‡Cë®­ÄKË]Ê»[ŒZô\12ÒÞ]Ï|ø\11Σ{%LÓ_2½zdé@€\21`@€W±1¾€¾Lô¬Ô\15PÈ\6Y9F°\17\9ˆÅ\0238þ\29€Í «œðý‰L²Å\13-ÖI\7»@(Á@Qw\23ˆä\26Ò¡4P¦¦†á-¬è\2ÔÝAB-}Kçö bˆ\127áI\2_§j(±gE6\30$úÃ>\1\17=\21ñþQâ•î\16­8ˆG‰>9J—Áô+gÞÍÄ\23a¿M¬¸Í)ó+›Ð.A¬Æ¦øªˆ\12ñû¼é\26\20jò\6Wô$lOL\19Þó\31ºù\29£/Y\12\4xÜ\4±\15Xƒ¾at\6>䶈Û>0z>hP’¡@ îì\1†L\1j³\0=F\16»šô.\1\2µ\0ÉÁp%¯s\1S\24näÄ\18p°\8Â\26E>\7\0071ï@Ж7­EiÙÇ|†s\5ß·£(ú™v/Wt‚\14Â\28ÝŠ\5Ã’ós\7ð¯ÏŸÓt\7‰8Ú‚¯Iwî$¹cKÃ¥µ/:ÈÐøŽ£EI6gsÔ\11|Â\12Ìô\7\13;ž\5\12Ú\28äìxäŽö\25Ä\25µ,QÚ\22“ë=ð±a”¾õ(5T®„¡O\31‰ÅÍ\2ô¼l\9_½\3áw•ó\15|•7òÿ°µ\6f°\2çQ\14b\19w\15ž-H\\t\14å™\9Ç´\4lµA†Ìö3îû.í$¼©¦›’õ‚…á\9N=°(\ +ÑYÄÝýE\19®¿¿ùŽ\29\26wý²7ÿ\3\21L\27Ë", + ["filename"]="tdict-std.txt", + ["length"]=268756, + }, + }, + ["timestamp"]="2013-05-19 19:08:41", + ["version"]="1.00", +} \ No newline at end of file diff --git a/tex/generic/context/luatex/luatex-basics-gen.lua b/tex/generic/context/luatex/luatex-basics-gen.lua index 0561778b4..4c504a24e 100644 --- a/tex/generic/context/luatex/luatex-basics-gen.lua +++ b/tex/generic/context/luatex/luatex-basics-gen.lua @@ -1,327 +1,327 @@ -if not modules then modules = { } end modules ['luat-basics-gen'] = { - version = 1.100, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local dummyfunction = function() -end - -local dummyreporter = function(c) - return function(...) - (texio.reporter or texio.write_nl)(c .. " : " .. string.formatters(...)) - end -end - -statistics = { - register = dummyfunction, - starttiming = dummyfunction, - stoptiming = dummyfunction, - elapsedtime = nil, -} - -directives = { - register = dummyfunction, - enable = dummyfunction, - disable = dummyfunction, -} - -trackers = { - register = dummyfunction, - enable = dummyfunction, - disable = dummyfunction, -} - -experiments = { - register = dummyfunction, - enable = dummyfunction, - disable = dummyfunction, -} - -storage = { -- probably no longer needed - register = dummyfunction, - shared = { }, -} - -logs = { - new = dummyreporter, - reporter = dummyreporter, - messenger = dummyreporter, - report = dummyfunction, -} - -callbacks = { - register = function(n,f) return callback.register(n,f) end, - -} - -utilities = { - storage = { - allocate = function(t) return t or { } end, - mark = function(t) return t or { } end, - }, -} - -characters = characters or { - data = { } -} - --- we need to cheat a bit here - -texconfig.kpse_init = true - -resolvers = resolvers or { } -- no fancy file helpers used - -local remapper = { - otf = "opentype fonts", - ttf = "truetype fonts", - ttc = "truetype fonts", - dfont = "truetype fonts", -- "truetype dictionary", - cid = "cid maps", - cidmap = "cid maps", - fea = "font feature files", - pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! - pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! -} - -function resolvers.findfile(name,fileformat) - name = string.gsub(name,"\\","/") - if not fileformat or fileformat == "" then - fileformat = file.suffix(name) - if fileformat == "" then - fileformat = "tex" - end - end - fileformat = string.lower(fileformat) - fileformat = remapper[fileformat] or fileformat - local found = kpse.find_file(name,fileformat) - if not found or found == "" then - found = kpse.find_file(name,"other text files") - end - return found -end - --- function resolvers.findbinfile(name,fileformat) --- if not fileformat or fileformat == "" then --- fileformat = file.suffix(name) --- end --- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat) --- end - -resolvers.findbinfile = resolvers.findfile - -function resolvers.resolve(s) - return s -end - -function resolvers.unresolve(s) - return s -end - --- Caches ... I will make a real stupid version some day when I'm in the --- mood. After all, the generic code does not need the more advanced --- ConTeXt features. Cached data is not shared between ConTeXt and other --- usage as I don't want any dependency at all. Also, ConTeXt might have --- different needs and tricks added. - ---~ containers.usecache = true - -caches = { } - -local writable = nil -local readables = { } -local usingjit = jit - -if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then - caches.namespace = 'generic' -end - -do - - -- standard context tree setup - - local cachepaths = kpse.expand_path('$TEXMFCACHE') or "" - - -- quite like tex live or so - - if cachepaths == "" then - cachepaths = kpse.expand_path('$TEXMFVAR') - end - - -- this also happened to be used - - if cachepaths == "" then - cachepaths = kpse.expand_path('$VARTEXMF') - end - - -- and this is a last resort - - if cachepaths == "" then - cachepaths = "." - end - - cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":") - - for i=1,#cachepaths do - local cachepath = cachepaths[i] - if not lfs.isdir(cachepath) then - lfs.mkdirs(cachepath) -- needed for texlive and latex - if lfs.isdir(cachepath) then - texio.write(string.format("(created cache path: %s)",cachepath)) - end - end - if file.is_writable(cachepath) then - writable = file.join(cachepath,"luatex-cache") - lfs.mkdir(writable) - writable = file.join(writable,caches.namespace) - lfs.mkdir(writable) - break - end - end - - for i=1,#cachepaths do - if file.is_readable(cachepaths[i]) then - readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace) - end - end - - if not writable then - texio.write_nl("quiting: fix your writable cache path") - os.exit() - elseif #readables == 0 then - texio.write_nl("quiting: fix your readable cache path") - os.exit() - elseif #readables == 1 and readables[1] == writable then - texio.write(string.format("(using cache: %s)",writable)) - else - texio.write(string.format("(using write cache: %s)",writable)) - texio.write(string.format("(using read cache: %s)",table.concat(readables, " "))) - end - -end - -function caches.getwritablepath(category,subcategory) - local path = file.join(writable,category) - lfs.mkdir(path) - path = file.join(path,subcategory) - lfs.mkdir(path) - return path -end - -function caches.getreadablepaths(category,subcategory) - local t = { } - for i=1,#readables do - t[i] = file.join(readables[i],category,subcategory) - end - return t -end - -local function makefullname(path,name) - if path and path ~= "" then - return file.addsuffix(file.join(path,name),"lua"), file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") - end -end - -function caches.is_writable(path,name) - local fullname = makefullname(path,name) - return fullname and file.is_writable(fullname) -end - -function caches.loaddata(paths,name) - for i=1,#paths do - local data = false - local luaname, lucname = makefullname(paths[i],name) - if lucname and lfs.isfile(lucname) then -- maybe also check for size - texio.write(string.format("(load luc: %s)",lucname)) - data = loadfile(lucname) - if data then - data = data() - end - if data then - return data - else - texio.write(string.format("(loading failed: %s)",lucname)) - end - end - if luaname and lfs.isfile(luaname) then - texio.write(string.format("(load lua: %s)",luaname)) - data = loadfile(luaname) - if data then - data = data() - end - if data then - return data - end - end - end -end - -function caches.savedata(path,name,data) - local luaname, lucname = makefullname(path,name) - if luaname then - texio.write(string.format("(save: %s)",luaname)) - table.tofile(luaname,data,true,{ reduce = true }) - if lucname and type(caches.compile) == "function" then - os.remove(lucname) -- better be safe - texio.write(string.format("(save: %s)",lucname)) - caches.compile(data,luaname,lucname) - end - end -end - --- According to KH os.execute is not permitted in plain/latex so there is --- no reason to use the normal context way. So the method here is slightly --- different from the one we have in context. We also use different suffixes --- as we don't want any clashes (sharing cache files is not that handy as --- context moves on faster.) --- --- Beware: serialization might fail on large files (so maybe we should pcall --- this) in which case one should limit the method to luac and enable support --- for execution. - --- function caches.compile(data,luaname,lucname) --- local d = io.loaddata(luaname) --- if not d or d == "" then --- d = table.serialize(data,true) -- slow --- end --- if d and d ~= "" then --- local f = io.open(lucname,'w') --- if f then --- local s = loadstring(d) --- if s then --- f:write(string.dump(s,true)) --- end --- f:close() --- end --- end --- end - -function caches.compile(data,luaname,lucname) - local d = io.loaddata(luaname) - if not d or d == "" then - d = table.serialize(data,true) -- slow - end - if d and d ~= "" then - local f = io.open(lucname,'wb') - if f then - local s = loadstring(d) - if s then - f:write(string.dump(s,true)) - end - f:close() - end - end -end - --- - -function table.setmetatableindex(t,f) - setmetatable(t,{ __index = f }) -end +if not modules then modules = { } end modules ['luat-basics-gen'] = { + version = 1.100, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local dummyfunction = function() +end + +local dummyreporter = function(c) + return function(...) + (texio.reporter or texio.write_nl)(c .. " : " .. string.formatters(...)) + end +end + +statistics = { + register = dummyfunction, + starttiming = dummyfunction, + stoptiming = dummyfunction, + elapsedtime = nil, +} + +directives = { + register = dummyfunction, + enable = dummyfunction, + disable = dummyfunction, +} + +trackers = { + register = dummyfunction, + enable = dummyfunction, + disable = dummyfunction, +} + +experiments = { + register = dummyfunction, + enable = dummyfunction, + disable = dummyfunction, +} + +storage = { -- probably no longer needed + register = dummyfunction, + shared = { }, +} + +logs = { + new = dummyreporter, + reporter = dummyreporter, + messenger = dummyreporter, + report = dummyfunction, +} + +callbacks = { + register = function(n,f) return callback.register(n,f) end, + +} + +utilities = { + storage = { + allocate = function(t) return t or { } end, + mark = function(t) return t or { } end, + }, +} + +characters = characters or { + data = { } +} + +-- we need to cheat a bit here + +texconfig.kpse_init = true + +resolvers = resolvers or { } -- no fancy file helpers used + +local remapper = { + otf = "opentype fonts", + ttf = "truetype fonts", + ttc = "truetype fonts", + dfont = "truetype fonts", -- "truetype dictionary", + cid = "cid maps", + cidmap = "cid maps", + fea = "font feature files", + pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! + pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! +} + +function resolvers.findfile(name,fileformat) + name = string.gsub(name,"\\","/") + if not fileformat or fileformat == "" then + fileformat = file.suffix(name) + if fileformat == "" then + fileformat = "tex" + end + end + fileformat = string.lower(fileformat) + fileformat = remapper[fileformat] or fileformat + local found = kpse.find_file(name,fileformat) + if not found or found == "" then + found = kpse.find_file(name,"other text files") + end + return found +end + +-- function resolvers.findbinfile(name,fileformat) +-- if not fileformat or fileformat == "" then +-- fileformat = file.suffix(name) +-- end +-- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat) +-- end + +resolvers.findbinfile = resolvers.findfile + +function resolvers.resolve(s) + return s +end + +function resolvers.unresolve(s) + return s +end + +-- Caches ... I will make a real stupid version some day when I'm in the +-- mood. After all, the generic code does not need the more advanced +-- ConTeXt features. Cached data is not shared between ConTeXt and other +-- usage as I don't want any dependency at all. Also, ConTeXt might have +-- different needs and tricks added. + +--~ containers.usecache = true + +caches = { } + +local writable = nil +local readables = { } +local usingjit = jit + +if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then + caches.namespace = 'generic' +end + +do + + -- standard context tree setup + + local cachepaths = kpse.expand_path('$TEXMFCACHE') or "" + + -- quite like tex live or so + + if cachepaths == "" then + cachepaths = kpse.expand_path('$TEXMFVAR') + end + + -- this also happened to be used + + if cachepaths == "" then + cachepaths = kpse.expand_path('$VARTEXMF') + end + + -- and this is a last resort + + if cachepaths == "" then + cachepaths = "." + end + + cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":") + + for i=1,#cachepaths do + local cachepath = cachepaths[i] + if not lfs.isdir(cachepath) then + lfs.mkdirs(cachepath) -- needed for texlive and latex + if lfs.isdir(cachepath) then + texio.write(string.format("(created cache path: %s)",cachepath)) + end + end + if file.is_writable(cachepath) then + writable = file.join(cachepath,"luatex-cache") + lfs.mkdir(writable) + writable = file.join(writable,caches.namespace) + lfs.mkdir(writable) + break + end + end + + for i=1,#cachepaths do + if file.is_readable(cachepaths[i]) then + readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace) + end + end + + if not writable then + texio.write_nl("quiting: fix your writable cache path") + os.exit() + elseif #readables == 0 then + texio.write_nl("quiting: fix your readable cache path") + os.exit() + elseif #readables == 1 and readables[1] == writable then + texio.write(string.format("(using cache: %s)",writable)) + else + texio.write(string.format("(using write cache: %s)",writable)) + texio.write(string.format("(using read cache: %s)",table.concat(readables, " "))) + end + +end + +function caches.getwritablepath(category,subcategory) + local path = file.join(writable,category) + lfs.mkdir(path) + path = file.join(path,subcategory) + lfs.mkdir(path) + return path +end + +function caches.getreadablepaths(category,subcategory) + local t = { } + for i=1,#readables do + t[i] = file.join(readables[i],category,subcategory) + end + return t +end + +local function makefullname(path,name) + if path and path ~= "" then + return file.addsuffix(file.join(path,name),"lua"), file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") + end +end + +function caches.is_writable(path,name) + local fullname = makefullname(path,name) + return fullname and file.is_writable(fullname) +end + +function caches.loaddata(paths,name) + for i=1,#paths do + local data = false + local luaname, lucname = makefullname(paths[i],name) + if lucname and lfs.isfile(lucname) then -- maybe also check for size + texio.write(string.format("(load luc: %s)",lucname)) + data = loadfile(lucname) + if data then + data = data() + end + if data then + return data + else + texio.write(string.format("(loading failed: %s)",lucname)) + end + end + if luaname and lfs.isfile(luaname) then + texio.write(string.format("(load lua: %s)",luaname)) + data = loadfile(luaname) + if data then + data = data() + end + if data then + return data + end + end + end +end + +function caches.savedata(path,name,data) + local luaname, lucname = makefullname(path,name) + if luaname then + texio.write(string.format("(save: %s)",luaname)) + table.tofile(luaname,data,true,{ reduce = true }) + if lucname and type(caches.compile) == "function" then + os.remove(lucname) -- better be safe + texio.write(string.format("(save: %s)",lucname)) + caches.compile(data,luaname,lucname) + end + end +end + +-- According to KH os.execute is not permitted in plain/latex so there is +-- no reason to use the normal context way. So the method here is slightly +-- different from the one we have in context. We also use different suffixes +-- as we don't want any clashes (sharing cache files is not that handy as +-- context moves on faster.) +-- +-- Beware: serialization might fail on large files (so maybe we should pcall +-- this) in which case one should limit the method to luac and enable support +-- for execution. + +-- function caches.compile(data,luaname,lucname) +-- local d = io.loaddata(luaname) +-- if not d or d == "" then +-- d = table.serialize(data,true) -- slow +-- end +-- if d and d ~= "" then +-- local f = io.open(lucname,'w') +-- if f then +-- local s = loadstring(d) +-- if s then +-- f:write(string.dump(s,true)) +-- end +-- f:close() +-- end +-- end +-- end + +function caches.compile(data,luaname,lucname) + local d = io.loaddata(luaname) + if not d or d == "" then + d = table.serialize(data,true) -- slow + end + if d and d ~= "" then + local f = io.open(lucname,'wb') + if f then + local s = loadstring(d) + if s then + f:write(string.dump(s,true)) + end + f:close() + end + end +end + +-- + +function table.setmetatableindex(t,f) + setmetatable(t,{ __index = f }) +end diff --git a/tex/generic/context/luatex/luatex-basics-nod.lua b/tex/generic/context/luatex/luatex-basics-nod.lua index 5ab9df7f9..38ccd8fc7 100644 --- a/tex/generic/context/luatex/luatex-basics-nod.lua +++ b/tex/generic/context/luatex/luatex-basics-nod.lua @@ -1,104 +1,104 @@ -if not modules then modules = { } end modules ['luatex-fonts-nod'] = { - version = 1.001, - comment = "companion to luatex-fonts.lua", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - --- Don't depend on code here as it is only needed to complement the --- font handler code. - --- Attributes: - -if tex.attribute[0] ~= 0 then - - texio.write_nl("log","!") - texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") - texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") - texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") - texio.write_nl("log","!") - - tex.attribute[0] = 0 -- else no features - -end - -attributes = attributes or { } -attributes.unsetvalue = -0x7FFFFFFF - -local numbers, last = { }, 127 - -attributes.private = attributes.private or function(name) - local number = numbers[name] - if not number then - if last < 255 then - last = last + 1 - end - number = last - numbers[name] = number - end - return number -end - --- Nodes: - -nodes = { } -nodes.pool = { } -nodes.handlers = { } - -local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end -local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end -local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" } - -nodes.nodecodes = nodecodes -nodes.whatcodes = whatcodes -nodes.whatsitcodes = whatcodes -nodes.glyphcodes = glyphcodes - -local free_node = node.free -local remove_node = node.remove -local new_node = node.new -local traverse_id = node.traverse_id - -local math_code = nodecodes.math - -nodes.handlers.protectglyphs = node.protect_glyphs -nodes.handlers.unprotectglyphs = node.unprotect_glyphs - -function nodes.remove(head, current, free_too) - local t = current - head, current = remove_node(head,current) - if t then - if free_too then - free_node(t) - t = nil - else - t.next, t.prev = nil, nil - end - end - return head, current, t -end - -function nodes.delete(head,current) - return nodes.remove(head,current,true) -end - -nodes.before = node.insert_before -nodes.after = node.insert_after - -function nodes.pool.kern(k) - local n = new_node("kern",1) - n.kern = k - return n -end - -function nodes.endofmath(n) - for n in traverse_id(math_code,n.next) do - return n - end -end +if not modules then modules = { } end modules ['luatex-fonts-nod'] = { + version = 1.001, + comment = "companion to luatex-fonts.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +-- Don't depend on code here as it is only needed to complement the +-- font handler code. + +-- Attributes: + +if tex.attribute[0] ~= 0 then + + texio.write_nl("log","!") + texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") + texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") + texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") + texio.write_nl("log","!") + + tex.attribute[0] = 0 -- else no features + +end + +attributes = attributes or { } +attributes.unsetvalue = -0x7FFFFFFF + +local numbers, last = { }, 127 + +attributes.private = attributes.private or function(name) + local number = numbers[name] + if not number then + if last < 255 then + last = last + 1 + end + number = last + numbers[name] = number + end + return number +end + +-- Nodes: + +nodes = { } +nodes.pool = { } +nodes.handlers = { } + +local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end +local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end +local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" } + +nodes.nodecodes = nodecodes +nodes.whatcodes = whatcodes +nodes.whatsitcodes = whatcodes +nodes.glyphcodes = glyphcodes + +local free_node = node.free +local remove_node = node.remove +local new_node = node.new +local traverse_id = node.traverse_id + +local math_code = nodecodes.math + +nodes.handlers.protectglyphs = node.protect_glyphs +nodes.handlers.unprotectglyphs = node.unprotect_glyphs + +function nodes.remove(head, current, free_too) + local t = current + head, current = remove_node(head,current) + if t then + if free_too then + free_node(t) + t = nil + else + t.next, t.prev = nil, nil + end + end + return head, current, t +end + +function nodes.delete(head,current) + return nodes.remove(head,current,true) +end + +nodes.before = node.insert_before +nodes.after = node.insert_after + +function nodes.pool.kern(k) + local n = new_node("kern",1) + n.kern = k + return n +end + +function nodes.endofmath(n) + for n in traverse_id(math_code,n.next) do + return n + end +end diff --git a/tex/generic/context/luatex/luatex-fonts-cbk.lua b/tex/generic/context/luatex/luatex-fonts-cbk.lua index 9db94f65e..b4f4c8c91 100644 --- a/tex/generic/context/luatex/luatex-fonts-cbk.lua +++ b/tex/generic/context/luatex/luatex-fonts-cbk.lua @@ -1,68 +1,68 @@ -if not modules then modules = { } end modules ['luatex-fonts-cbk'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -local nodes = nodes - --- Fonts: (might move to node-gef.lua) - -local traverse_id = node.traverse_id -local glyph_code = nodes.nodecodes.glyph - -function nodes.handlers.characters(head) - local fontdata = fonts.hashes.identifiers - if fontdata then - local usedfonts, done, prevfont = { }, false, nil - for n in traverse_id(glyph_code,head) do - local font = n.font - if font ~= prevfont then - prevfont = font - local used = usedfonts[font] - if not used then - local tfmdata = fontdata[font] -- - if tfmdata then - local shared = tfmdata.shared -- we need to check shared, only when same features - if shared then - local processors = shared.processes - if processors and #processors > 0 then - usedfonts[font] = processors - done = true - end - end - end - end - end - end - if done then - for font, processors in next, usedfonts do - for i=1,#processors do - local h, d = processors[i](head,font,0) - head, done = h or head, done or d - end - end - end - return head, true - else - return head, false - end -end - -function nodes.simple_font_handler(head) --- lang.hyphenate(head) - head = nodes.handlers.characters(head) - nodes.injections.handler(head) - nodes.handlers.protectglyphs(head) - head = node.ligaturing(head) - head = node.kerning(head) - return head -end +if not modules then modules = { } end modules ['luatex-fonts-cbk'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +local nodes = nodes + +-- Fonts: (might move to node-gef.lua) + +local traverse_id = node.traverse_id +local glyph_code = nodes.nodecodes.glyph + +function nodes.handlers.characters(head) + local fontdata = fonts.hashes.identifiers + if fontdata then + local usedfonts, done, prevfont = { }, false, nil + for n in traverse_id(glyph_code,head) do + local font = n.font + if font ~= prevfont then + prevfont = font + local used = usedfonts[font] + if not used then + local tfmdata = fontdata[font] -- + if tfmdata then + local shared = tfmdata.shared -- we need to check shared, only when same features + if shared then + local processors = shared.processes + if processors and #processors > 0 then + usedfonts[font] = processors + done = true + end + end + end + end + end + end + if done then + for font, processors in next, usedfonts do + for i=1,#processors do + local h, d = processors[i](head,font,0) + head, done = h or head, done or d + end + end + end + return head, true + else + return head, false + end +end + +function nodes.simple_font_handler(head) +-- lang.hyphenate(head) + head = nodes.handlers.characters(head) + nodes.injections.handler(head) + nodes.handlers.protectglyphs(head) + head = node.ligaturing(head) + head = node.kerning(head) + return head +end diff --git a/tex/generic/context/luatex/luatex-fonts-def.lua b/tex/generic/context/luatex/luatex-fonts-def.lua index 0c2f0dbd5..affe763c2 100644 --- a/tex/generic/context/luatex/luatex-fonts-def.lua +++ b/tex/generic/context/luatex/luatex-fonts-def.lua @@ -1,97 +1,97 @@ -if not modules then modules = { } end modules ['luatex-font-def'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts - --- A bit of tuning for definitions. - -fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload - --- tricky: we sort of bypass the parser and directly feed all into --- the sub parser - -function fonts.definers.getspecification(str) - return "", str, "", ":", str -end - --- the generic name parser (different from context!) - -local list = { } - -local function issome () list.lookup = 'name' end -- xetex mode prefers name (not in context!) -local function isfile () list.lookup = 'file' end -local function isname () list.lookup = 'name' end -local function thename(s) list.name = s end -local function issub (v) list.sub = v end -local function iscrap (s) list.crap = string.lower(s) end -local function iskey (k,v) list[k] = v end -local function istrue (s) list[s] = true end -local function isfalse(s) list[s] = false end - -local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C - -local spaces = P(" ")^0 -local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0 -local crapspec = spaces * P("/") * (((1-P(":"))^0)/iscrap) * spaces -local filename_1 = P("file:")/isfile * (namespec/thename) -local filename_2 = P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]") -local fontname_1 = P("name:")/isname * (namespec/thename) -local fontname_2 = P(true)/issome * (namespec/thename) -local sometext = (R("az","AZ","09") + S("+-."))^1 -local truevalue = P("+") * spaces * (sometext/istrue) -local falsevalue = P("-") * spaces * (sometext/isfalse) -local keyvalue = (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey -local somevalue = sometext/istrue -local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim -local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces -local options = P(":") * spaces * (P(";")^0 * option)^0 - -local pattern = (filename_1 + filename_2 + fontname_1 + fontname_2) * subvalue^0 * crapspec^0 * options^0 - -local function colonized(specification) -- xetex mode - list = { } - lpeg.match(pattern,specification.specification) - list.crap = nil -- style not supported, maybe some day - if list.name then - specification.name = list.name - list.name = nil - end - if list.lookup then - specification.lookup = list.lookup - list.lookup = nil - end - if list.sub then - specification.sub = list.sub - list.sub = nil - end - specification.features.normal = fonts.handlers.otf.features.normalize(list) - return specification -end - -fonts.definers.registersplit(":",colonized,"cryptic") -fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names] - -function fonts.definers.applypostprocessors(tfmdata) - local postprocessors = tfmdata.postprocessors - if postprocessors then - for i=1,#postprocessors do - local extrahash = postprocessors[i](tfmdata) -- after scaling etc - if type(extrahash) == "string" and extrahash ~= "" then - -- e.g. a reencoding needs this - extrahash = string.gsub(lower(extrahash),"[^a-z]","-") - tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash) - end - end - end - return tfmdata -end +if not modules then modules = { } end modules ['luatex-font-def'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts + +-- A bit of tuning for definitions. + +fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload + +-- tricky: we sort of bypass the parser and directly feed all into +-- the sub parser + +function fonts.definers.getspecification(str) + return "", str, "", ":", str +end + +-- the generic name parser (different from context!) + +local list = { } + +local function issome () list.lookup = 'name' end -- xetex mode prefers name (not in context!) +local function isfile () list.lookup = 'file' end +local function isname () list.lookup = 'name' end +local function thename(s) list.name = s end +local function issub (v) list.sub = v end +local function iscrap (s) list.crap = string.lower(s) end +local function iskey (k,v) list[k] = v end +local function istrue (s) list[s] = true end +local function isfalse(s) list[s] = false end + +local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C + +local spaces = P(" ")^0 +local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0 +local crapspec = spaces * P("/") * (((1-P(":"))^0)/iscrap) * spaces +local filename_1 = P("file:")/isfile * (namespec/thename) +local filename_2 = P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]") +local fontname_1 = P("name:")/isname * (namespec/thename) +local fontname_2 = P(true)/issome * (namespec/thename) +local sometext = (R("az","AZ","09") + S("+-."))^1 +local truevalue = P("+") * spaces * (sometext/istrue) +local falsevalue = P("-") * spaces * (sometext/isfalse) +local keyvalue = (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey +local somevalue = sometext/istrue +local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim +local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces +local options = P(":") * spaces * (P(";")^0 * option)^0 + +local pattern = (filename_1 + filename_2 + fontname_1 + fontname_2) * subvalue^0 * crapspec^0 * options^0 + +local function colonized(specification) -- xetex mode + list = { } + lpeg.match(pattern,specification.specification) + list.crap = nil -- style not supported, maybe some day + if list.name then + specification.name = list.name + list.name = nil + end + if list.lookup then + specification.lookup = list.lookup + list.lookup = nil + end + if list.sub then + specification.sub = list.sub + list.sub = nil + end + specification.features.normal = fonts.handlers.otf.features.normalize(list) + return specification +end + +fonts.definers.registersplit(":",colonized,"cryptic") +fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names] + +function fonts.definers.applypostprocessors(tfmdata) + local postprocessors = tfmdata.postprocessors + if postprocessors then + for i=1,#postprocessors do + local extrahash = postprocessors[i](tfmdata) -- after scaling etc + if type(extrahash) == "string" and extrahash ~= "" then + -- e.g. a reencoding needs this + extrahash = string.gsub(lower(extrahash),"[^a-z]","-") + tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash) + end + end + end + return tfmdata +end diff --git a/tex/generic/context/luatex/luatex-fonts-demo-vf-1.lua b/tex/generic/context/luatex/luatex-fonts-demo-vf-1.lua index 3878ae648..6b622aa05 100644 --- a/tex/generic/context/luatex/luatex-fonts-demo-vf-1.lua +++ b/tex/generic/context/luatex/luatex-fonts-demo-vf-1.lua @@ -1,38 +1,38 @@ -local identifiers = fonts.hashes.identifiers - -return function(specification) - local f1, id1 = fonts.constructors.readanddefine('lmroman10-regular', specification.size) - local f2, id2 = fonts.constructors.readanddefine('lmsans10-regular', specification.size) - local f3, id3 = fonts.constructors.readanddefine('lmtypewriter10-regular',specification.size) - if f1 and f2 and f3 then - f1.properties.name = specification.name - f1.properties.virtualized = true - f1.fonts = { - { id = id1 }, - { id = id2 }, - { id = id3 }, - } - local color = { [0] = - { "special", "pdf:0 g" }, - { "special", "pdf:1 0 0 rg" }, - { "special", "pdf:0 1 0 rg" }, - { "special", "pdf:0 0 1 rg" }, - } - local chars = { - identifiers[id1].characters, - identifiers[id2].characters, - identifiers[id3].characters, - } - for u, v in next, f1.characters do - local n = math.floor(math.random(1,3)+0.5) - local c = chars[n][u] or v - v.commands = { color[n], { 'slot', n, u }, color[0] } - v.kerns = nil - v.width = c.width - v.height = c.height - v.depth = c.depth - v.italic = nil - end - end - return f1 -end +local identifiers = fonts.hashes.identifiers + +return function(specification) + local f1, id1 = fonts.constructors.readanddefine('lmroman10-regular', specification.size) + local f2, id2 = fonts.constructors.readanddefine('lmsans10-regular', specification.size) + local f3, id3 = fonts.constructors.readanddefine('lmtypewriter10-regular',specification.size) + if f1 and f2 and f3 then + f1.properties.name = specification.name + f1.properties.virtualized = true + f1.fonts = { + { id = id1 }, + { id = id2 }, + { id = id3 }, + } + local color = { [0] = + { "special", "pdf:0 g" }, + { "special", "pdf:1 0 0 rg" }, + { "special", "pdf:0 1 0 rg" }, + { "special", "pdf:0 0 1 rg" }, + } + local chars = { + identifiers[id1].characters, + identifiers[id2].characters, + identifiers[id3].characters, + } + for u, v in next, f1.characters do + local n = math.floor(math.random(1,3)+0.5) + local c = chars[n][u] or v + v.commands = { color[n], { 'slot', n, u }, color[0] } + v.kerns = nil + v.width = c.width + v.height = c.height + v.depth = c.depth + v.italic = nil + end + end + return f1 +end diff --git a/tex/generic/context/luatex/luatex-fonts-enc.lua b/tex/generic/context/luatex/luatex-fonts-enc.lua index e20c3a03b..03f68118b 100644 --- a/tex/generic/context/luatex/luatex-fonts-enc.lua +++ b/tex/generic/context/luatex/luatex-fonts-enc.lua @@ -1,28 +1,28 @@ -if not modules then modules = { } end modules ['luatex-font-enc'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -fonts.encodings = { } -fonts.encodings.agl = { } - -setmetatable(fonts.encodings.agl, { __index = function(t,k) - if k == "unicodes" then - texio.write(" ") - local unicodes = dofile(resolvers.findfile("font-age.lua")) - fonts.encodings.agl = { unicodes = unicodes } - return unicodes - else - return nil - end -end }) - +if not modules then modules = { } end modules ['luatex-font-enc'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +fonts.encodings = { } +fonts.encodings.agl = { } + +setmetatable(fonts.encodings.agl, { __index = function(t,k) + if k == "unicodes" then + texio.write(" ") + local unicodes = dofile(resolvers.findfile("font-age.lua")) + fonts.encodings.agl = { unicodes = unicodes } + return unicodes + else + return nil + end +end }) + diff --git a/tex/generic/context/luatex/luatex-fonts-ext.lua b/tex/generic/context/luatex/luatex-fonts-ext.lua index b60d04512..63927c035 100644 --- a/tex/generic/context/luatex/luatex-fonts-ext.lua +++ b/tex/generic/context/luatex/luatex-fonts-ext.lua @@ -1,272 +1,272 @@ -if not modules then modules = { } end modules ['luatex-fonts-ext'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -local otffeatures = fonts.constructors.newfeatures("otf") - --- A few generic extensions. - -local function initializeitlc(tfmdata,value) - if value then - -- the magic 40 and it formula come from Dohyun Kim but we might need another guess - local parameters = tfmdata.parameters - local italicangle = parameters.italicangle - if italicangle and italicangle ~= 0 then - local properties = tfmdata.properties - local factor = tonumber(value) or 1 - properties.hasitalics = true - properties.autoitalicamount = factor * (parameters.uwidth or 40)/2 - end - end -end - -otffeatures.register { - name = "itlc", - description = "italic correction", - initializers = { - base = initializeitlc, - node = initializeitlc, - } -} - --- slant and extend - -local function initializeslant(tfmdata,value) - value = tonumber(value) - if not value then - value = 0 - elseif value > 1 then - value = 1 - elseif value < -1 then - value = -1 - end - tfmdata.parameters.slantfactor = value -end - -otffeatures.register { - name = "slant", - description = "slant glyphs", - initializers = { - base = initializeslant, - node = initializeslant, - } -} - -local function initializeextend(tfmdata,value) - value = tonumber(value) - if not value then - value = 0 - elseif value > 10 then - value = 10 - elseif value < -10 then - value = -10 - end - tfmdata.parameters.extendfactor = value -end - -otffeatures.register { - name = "extend", - description = "scale glyphs horizontally", - initializers = { - base = initializeextend, - node = initializeextend, - } -} - --- expansion and protrusion - -fonts.protrusions = fonts.protrusions or { } -fonts.protrusions.setups = fonts.protrusions.setups or { } - -local setups = fonts.protrusions.setups - -local function initializeprotrusion(tfmdata,value) - if value then - local setup = setups[value] - if setup then - local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1 - local emwidth = tfmdata.parameters.quad - tfmdata.parameters.protrusion = { - auto = true, - } - for i, chr in next, tfmdata.characters do - local v, pl, pr = setup[i], nil, nil - if v then - pl, pr = v[1], v[2] - end - if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end - if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end - end - end - end -end - -otffeatures.register { - name = "protrusion", - description = "shift characters into the left and or right margin", - initializers = { - base = initializeprotrusion, - node = initializeprotrusion, - } -} - -fonts.expansions = fonts.expansions or { } -fonts.expansions.setups = fonts.expansions.setups or { } - -local setups = fonts.expansions.setups - -local function initializeexpansion(tfmdata,value) - if value then - local setup = setups[value] - if setup then - local factor = setup.factor or 1 - tfmdata.parameters.expansion = { - stretch = 10 * (setup.stretch or 0), - shrink = 10 * (setup.shrink or 0), - step = 10 * (setup.step or 0), - auto = true, - } - for i, chr in next, tfmdata.characters do - local v = setup[i] - if v and v ~= 0 then - chr.expansion_factor = v*factor - else -- can be option - chr.expansion_factor = factor - end - end - end - end -end - -otffeatures.register { - name = "expansion", - description = "apply hz optimization", - initializers = { - base = initializeexpansion, - node = initializeexpansion, - } -} - --- left over - -function fonts.loggers.onetimemessage() end - --- example vectors - -local byte = string.byte - -fonts.expansions.setups['default'] = { - - stretch = 2, shrink = 2, step = .5, factor = 1, - - [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7, - [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7, - [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7, - [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7, - [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7, - [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7, - [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7, - [byte('w')] = 0.7, [byte('z')] = 0.7, - [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7, -} - -fonts.protrusions.setups['default'] = { - - factor = 1, left = 1, right = 1, - - [0x002C] = { 0, 1 }, -- comma - [0x002E] = { 0, 1 }, -- period - [0x003A] = { 0, 1 }, -- colon - [0x003B] = { 0, 1 }, -- semicolon - [0x002D] = { 0, 1 }, -- hyphen - [0x2013] = { 0, 0.50 }, -- endash - [0x2014] = { 0, 0.33 }, -- emdash - [0x3001] = { 0, 1 }, -- ideographic comma 〠- [0x3002] = { 0, 1 }, -- ideographic full stop 。 - [0x060C] = { 0, 1 }, -- arabic comma ØŒ - [0x061B] = { 0, 1 }, -- arabic semicolon Ø› - [0x06D4] = { 0, 1 }, -- arabic full stop Û” - -} - --- normalizer - -fonts.handlers.otf.features.normalize = function(t) - if t.rand then - t.rand = "random" - end - return t -end - --- bonus - -function fonts.helpers.nametoslot(name) - local t = type(name) - if t == "string" then - local tfmdata = fonts.hashes.identifiers[currentfont()] - local shared = tfmdata and tfmdata.shared - local fntdata = shared and shared.rawdata - return fntdata and fntdata.resources.unicodes[name] - elseif t == "number" then - return n - end -end - --- \font\test=file:somefont:reencode=mymessup --- --- fonts.encodings.reencodings.mymessup = { --- [109] = 110, -- m --- [110] = 109, -- n --- } - -fonts.encodings = fonts.encodings or { } -local reencodings = { } -fonts.encodings.reencodings = reencodings - -local function specialreencode(tfmdata,value) - -- we forget about kerns as we assume symbols and we - -- could issue a message if ther are kerns but it's - -- a hack anyway so we odn't care too much here - local encoding = value and reencodings[value] - if encoding then - local temp = { } - local char = tfmdata.characters - for k, v in next, encoding do - temp[k] = char[v] - end - for k, v in next, temp do - char[k] = temp[k] - end - -- if we use the font otherwise luatex gets confused so - -- we return an additional hash component for fullname - return string.format("reencoded:%s",value) - end -end - -local function reencode(tfmdata,value) - tfmdata.postprocessors = tfmdata.postprocessors or { } - table.insert(tfmdata.postprocessors, - function(tfmdata) - return specialreencode(tfmdata,value) - end - ) -end - -otffeatures.register { - name = "reencode", - description = "reencode characters", - manipulators = { - base = reencode, - node = reencode, - } -} +if not modules then modules = { } end modules ['luatex-fonts-ext'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +local otffeatures = fonts.constructors.newfeatures("otf") + +-- A few generic extensions. + +local function initializeitlc(tfmdata,value) + if value then + -- the magic 40 and it formula come from Dohyun Kim but we might need another guess + local parameters = tfmdata.parameters + local italicangle = parameters.italicangle + if italicangle and italicangle ~= 0 then + local properties = tfmdata.properties + local factor = tonumber(value) or 1 + properties.hasitalics = true + properties.autoitalicamount = factor * (parameters.uwidth or 40)/2 + end + end +end + +otffeatures.register { + name = "itlc", + description = "italic correction", + initializers = { + base = initializeitlc, + node = initializeitlc, + } +} + +-- slant and extend + +local function initializeslant(tfmdata,value) + value = tonumber(value) + if not value then + value = 0 + elseif value > 1 then + value = 1 + elseif value < -1 then + value = -1 + end + tfmdata.parameters.slantfactor = value +end + +otffeatures.register { + name = "slant", + description = "slant glyphs", + initializers = { + base = initializeslant, + node = initializeslant, + } +} + +local function initializeextend(tfmdata,value) + value = tonumber(value) + if not value then + value = 0 + elseif value > 10 then + value = 10 + elseif value < -10 then + value = -10 + end + tfmdata.parameters.extendfactor = value +end + +otffeatures.register { + name = "extend", + description = "scale glyphs horizontally", + initializers = { + base = initializeextend, + node = initializeextend, + } +} + +-- expansion and protrusion + +fonts.protrusions = fonts.protrusions or { } +fonts.protrusions.setups = fonts.protrusions.setups or { } + +local setups = fonts.protrusions.setups + +local function initializeprotrusion(tfmdata,value) + if value then + local setup = setups[value] + if setup then + local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1 + local emwidth = tfmdata.parameters.quad + tfmdata.parameters.protrusion = { + auto = true, + } + for i, chr in next, tfmdata.characters do + local v, pl, pr = setup[i], nil, nil + if v then + pl, pr = v[1], v[2] + end + if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end + if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end + end + end + end +end + +otffeatures.register { + name = "protrusion", + description = "shift characters into the left and or right margin", + initializers = { + base = initializeprotrusion, + node = initializeprotrusion, + } +} + +fonts.expansions = fonts.expansions or { } +fonts.expansions.setups = fonts.expansions.setups or { } + +local setups = fonts.expansions.setups + +local function initializeexpansion(tfmdata,value) + if value then + local setup = setups[value] + if setup then + local factor = setup.factor or 1 + tfmdata.parameters.expansion = { + stretch = 10 * (setup.stretch or 0), + shrink = 10 * (setup.shrink or 0), + step = 10 * (setup.step or 0), + auto = true, + } + for i, chr in next, tfmdata.characters do + local v = setup[i] + if v and v ~= 0 then + chr.expansion_factor = v*factor + else -- can be option + chr.expansion_factor = factor + end + end + end + end +end + +otffeatures.register { + name = "expansion", + description = "apply hz optimization", + initializers = { + base = initializeexpansion, + node = initializeexpansion, + } +} + +-- left over + +function fonts.loggers.onetimemessage() end + +-- example vectors + +local byte = string.byte + +fonts.expansions.setups['default'] = { + + stretch = 2, shrink = 2, step = .5, factor = 1, + + [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7, + [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7, + [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7, + [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7, + [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7, + [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7, + [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7, + [byte('w')] = 0.7, [byte('z')] = 0.7, + [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7, +} + +fonts.protrusions.setups['default'] = { + + factor = 1, left = 1, right = 1, + + [0x002C] = { 0, 1 }, -- comma + [0x002E] = { 0, 1 }, -- period + [0x003A] = { 0, 1 }, -- colon + [0x003B] = { 0, 1 }, -- semicolon + [0x002D] = { 0, 1 }, -- hyphen + [0x2013] = { 0, 0.50 }, -- endash + [0x2014] = { 0, 0.33 }, -- emdash + [0x3001] = { 0, 1 }, -- ideographic comma 〠+ [0x3002] = { 0, 1 }, -- ideographic full stop 。 + [0x060C] = { 0, 1 }, -- arabic comma ØŒ + [0x061B] = { 0, 1 }, -- arabic semicolon Ø› + [0x06D4] = { 0, 1 }, -- arabic full stop Û” + +} + +-- normalizer + +fonts.handlers.otf.features.normalize = function(t) + if t.rand then + t.rand = "random" + end + return t +end + +-- bonus + +function fonts.helpers.nametoslot(name) + local t = type(name) + if t == "string" then + local tfmdata = fonts.hashes.identifiers[currentfont()] + local shared = tfmdata and tfmdata.shared + local fntdata = shared and shared.rawdata + return fntdata and fntdata.resources.unicodes[name] + elseif t == "number" then + return n + end +end + +-- \font\test=file:somefont:reencode=mymessup +-- +-- fonts.encodings.reencodings.mymessup = { +-- [109] = 110, -- m +-- [110] = 109, -- n +-- } + +fonts.encodings = fonts.encodings or { } +local reencodings = { } +fonts.encodings.reencodings = reencodings + +local function specialreencode(tfmdata,value) + -- we forget about kerns as we assume symbols and we + -- could issue a message if ther are kerns but it's + -- a hack anyway so we odn't care too much here + local encoding = value and reencodings[value] + if encoding then + local temp = { } + local char = tfmdata.characters + for k, v in next, encoding do + temp[k] = char[v] + end + for k, v in next, temp do + char[k] = temp[k] + end + -- if we use the font otherwise luatex gets confused so + -- we return an additional hash component for fullname + return string.format("reencoded:%s",value) + end +end + +local function reencode(tfmdata,value) + tfmdata.postprocessors = tfmdata.postprocessors or { } + table.insert(tfmdata.postprocessors, + function(tfmdata) + return specialreencode(tfmdata,value) + end + ) +end + +otffeatures.register { + name = "reencode", + description = "reencode characters", + manipulators = { + base = reencode, + node = reencode, + } +} diff --git a/tex/generic/context/luatex/luatex-fonts-lua.lua b/tex/generic/context/luatex/luatex-fonts-lua.lua index ec3fe38be..9aa45fd22 100644 --- a/tex/generic/context/luatex/luatex-fonts-lua.lua +++ b/tex/generic/context/luatex/luatex-fonts-lua.lua @@ -1,33 +1,33 @@ -if not modules then modules = { } end modules ['luatex-fonts-lua'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -fonts.formats.lua = "lua" - -function fonts.readers.lua(specification) - local fullname = specification.filename or "" - if fullname == "" then - local forced = specification.forced or "" - if forced ~= "" then - fullname = specification.name .. "." .. forced - else - fullname = specification.name - end - end - local fullname = resolvers.findfile(fullname) or "" - if fullname ~= "" then - local loader = loadfile(fullname) - loader = loader and loader() - return loader and loader(specification) - end -end +if not modules then modules = { } end modules ['luatex-fonts-lua'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +fonts.formats.lua = "lua" + +function fonts.readers.lua(specification) + local fullname = specification.filename or "" + if fullname == "" then + local forced = specification.forced or "" + if forced ~= "" then + fullname = specification.name .. "." .. forced + else + fullname = specification.name + end + end + local fullname = resolvers.findfile(fullname) or "" + if fullname ~= "" then + local loader = loadfile(fullname) + loader = loader and loader() + return loader and loader(specification) + end +end diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua index e0fe66cb5..c8923a436 100644 --- a/tex/generic/context/luatex/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex/luatex-fonts-merged.lua @@ -1,49 +1,49 @@ -- merged file : luatex-fonts-merged.lua -- parent file : luatex-fonts.lua --- merge date : 05/18/13 12:41:50 +-- merge date : 05/19/13 19:27:29 do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['l-lua']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['l-lua']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$") -_MAJORVERSION=tonumber(major) or 5 -_MINORVERSION=tonumber(minor) or 1 +_MAJORVERSION=tonumber(major) or 5 +_MINORVERSION=tonumber(minor) or 1 _LUAVERSION=_MAJORVERSION+_MINORVERSION/10 -if not lpeg then - lpeg=require("lpeg") +if not lpeg then + lpeg=require("lpeg") end if loadstring then local loadnormal=load - function load(first,...) - if type(first)=="string" then - return loadstring(first,...) - else - return loadnormal(first,...) - end + function load(first,...) + if type(first)=="string" then + return loadstring(first,...) + else + return loadnormal(first,...) + end end else loadstring=load end if not ipairs then - local function iterate(a,i) - i=i+1 - local v=a[i] - if v~=nil then + local function iterate(a,i) + i=i+1 + local v=a[i] + if v~=nil then return i,v - end + end end - function ipairs(a) - return iterate,a,0 + function ipairs(a) + return iterate,a,0 end end if not pairs then - function pairs(t) + function pairs(t) return next,t end end @@ -58,602 +58,602 @@ end local print,select,tostring=print,select,tostring local inspectors={} function setinspector(inspector) - inspectors[#inspectors+1]=inspector + inspectors[#inspectors+1]=inspector end function inspect(...) - for s=1,select("#",...) do - local value=select(s,...) - local done=false - for i=1,#inspectors do - done=inspectors[i](value) - if done then - break - end - end - if not done then - print(tostring(value)) - end - end + for s=1,select("#",...) do + local value=select(s,...) + local done=false + for i=1,#inspectors do + done=inspectors[i](value) + if done then + break + end + end + if not done then + print(tostring(value)) + end + end end local dummy=function() end -function optionalrequire(...) - local ok,result=xpcall(require,dummy,...) - if ok then - return result - end -end +function optionalrequire(...) + local ok,result=xpcall(require,dummy,...) + if ok then + return result + end +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['l-lpeg']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['l-lpeg']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } lpeg=require("lpeg") -local type,next,tostring=type,next,tostring +local type,next,tostring=type,next,tostring local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format local floor=math.floor -local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt +local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end) lpeg.patterns=lpeg.patterns or {} local patterns=lpeg.patterns -local anything=P(1) -local endofstring=P(-1) +local anything=P(1) +local endofstring=P(-1) local alwaysmatched=P(true) -patterns.anything=anything -patterns.endofstring=endofstring -patterns.beginofstring=alwaysmatched +patterns.anything=anything +patterns.endofstring=endofstring +patterns.beginofstring=alwaysmatched patterns.alwaysmatched=alwaysmatched -local digit,sign=R('09'),S('+-') -local cr,lf,crlf=P("\r"),P("\n"),P("\r\n") +local digit,sign=R('09'),S('+-') +local cr,lf,crlf=P("\r"),P("\n"),P("\r\n") local newline=crlf+S("\r\n") -local escaped=P("\\")*anything -local squote=P("'") -local dquote=P('"') +local escaped=P("\\")*anything +local squote=P("'") +local dquote=P('"') local space=P(" ") -local utfbom_32_be=P('\000\000\254\255') -local utfbom_32_le=P('\255\254\000\000') -local utfbom_16_be=P('\255\254') -local utfbom_16_le=P('\254\255') -local utfbom_8=P('\239\187\191') -local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8 +local utfbom_32_be=P('\000\000\254\255') +local utfbom_32_le=P('\255\254\000\000') +local utfbom_16_be=P('\255\254') +local utfbom_16_le=P('\254\255') +local utfbom_8=P('\239\187\191') +local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8 local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8") local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0) local utf8next=R("\128\191") -patterns.utf8one=R("\000\127") -patterns.utf8two=R("\194\223")*utf8next -patterns.utf8three=R("\224\239")*utf8next*utf8next -patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next -patterns.utfbom=utfbom -patterns.utftype=utftype +patterns.utf8one=R("\000\127") +patterns.utf8two=R("\194\223")*utf8next +patterns.utf8three=R("\224\239")*utf8next*utf8next +patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next +patterns.utfbom=utfbom +patterns.utftype=utftype patterns.utfoffset=utfoffset -local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four +local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false) local utf8character=P(1)*R("\128\191")^0 -patterns.utf8=utf8char -patterns.utf8char=utf8char +patterns.utf8=utf8char +patterns.utf8char=utf8char patterns.utf8character=utf8character -patterns.validutf8=validutf8char +patterns.validutf8=validutf8char patterns.validutf8char=validutf8char -local eol=S("\n\r") +local eol=S("\n\r") local spacer=S(" \t\f\v") -local whitespace=eol+spacer -local nonspacer=1-spacer +local whitespace=eol+spacer +local nonspacer=1-spacer local nonwhitespace=1-whitespace -patterns.eol=eol -patterns.spacer=spacer -patterns.whitespace=whitespace -patterns.nonspacer=nonspacer +patterns.eol=eol +patterns.spacer=spacer +patterns.whitespace=whitespace +patterns.nonspacer=nonspacer patterns.nonwhitespace=nonwhitespace local stripper=spacer^0*C((spacer^0*nonspacer^1)^0) local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0)) -patterns.stripper=stripper +patterns.stripper=stripper patterns.collapser=collapser -patterns.digit=digit -patterns.sign=sign -patterns.cardinal=sign^0*digit^1 -patterns.integer=sign^0*digit^1 -patterns.unsigned=digit^0*P('.')*digit^1 -patterns.float=sign^0*patterns.unsigned -patterns.cunsigned=digit^0*P(',')*digit^1 -patterns.cfloat=sign^0*patterns.cunsigned -patterns.number=patterns.float+patterns.integer -patterns.cnumber=patterns.cfloat+patterns.integer -patterns.oct=P("0")*R("07")^1 -patterns.octal=patterns.oct -patterns.HEX=P("0x")*R("09","AF")^1 -patterns.hex=P("0x")*R("09","af")^1 -patterns.hexadecimal=P("0x")*R("09","AF","af")^1 -patterns.lowercase=R("az") -patterns.uppercase=R("AZ") -patterns.letter=patterns.lowercase+patterns.uppercase -patterns.space=space -patterns.tab=P("\t") -patterns.spaceortab=patterns.space+patterns.tab -patterns.newline=newline -patterns.emptyline=newline^1 -patterns.equal=P("=") -patterns.comma=P(",") -patterns.commaspacer=P(",")*spacer^0 -patterns.period=P(".") -patterns.colon=P(":") -patterns.semicolon=P(";") -patterns.underscore=P("_") -patterns.escaped=escaped -patterns.squote=squote -patterns.dquote=dquote -patterns.nosquote=(escaped+(1-squote))^0 -patterns.nodquote=(escaped+(1-dquote))^0 +patterns.digit=digit +patterns.sign=sign +patterns.cardinal=sign^0*digit^1 +patterns.integer=sign^0*digit^1 +patterns.unsigned=digit^0*P('.')*digit^1 +patterns.float=sign^0*patterns.unsigned +patterns.cunsigned=digit^0*P(',')*digit^1 +patterns.cfloat=sign^0*patterns.cunsigned +patterns.number=patterns.float+patterns.integer +patterns.cnumber=patterns.cfloat+patterns.integer +patterns.oct=P("0")*R("07")^1 +patterns.octal=patterns.oct +patterns.HEX=P("0x")*R("09","AF")^1 +patterns.hex=P("0x")*R("09","af")^1 +patterns.hexadecimal=P("0x")*R("09","AF","af")^1 +patterns.lowercase=R("az") +patterns.uppercase=R("AZ") +patterns.letter=patterns.lowercase+patterns.uppercase +patterns.space=space +patterns.tab=P("\t") +patterns.spaceortab=patterns.space+patterns.tab +patterns.newline=newline +patterns.emptyline=newline^1 +patterns.equal=P("=") +patterns.comma=P(",") +patterns.commaspacer=P(",")*spacer^0 +patterns.period=P(".") +patterns.colon=P(":") +patterns.semicolon=P(";") +patterns.underscore=P("_") +patterns.escaped=escaped +patterns.squote=squote +patterns.dquote=dquote +patterns.nosquote=(escaped+(1-squote))^0 +patterns.nodquote=(escaped+(1-dquote))^0 patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"") patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"") patterns.unquoted=patterns.undouble+patterns.unsingle patterns.unspacer=((patterns.spacer^1)/"")^0 -patterns.singlequoted=squote*patterns.nosquote*squote -patterns.doublequoted=dquote*patterns.nodquote*dquote +patterns.singlequoted=squote*patterns.nosquote*squote +patterns.doublequoted=dquote*patterns.nodquote*dquote patterns.quoted=patterns.doublequoted+patterns.singlequoted patterns.propername=R("AZ","az","__")*R("09","AZ","az","__")^0*P(-1) patterns.somecontent=(anything-newline-space)^1 patterns.beginline=#(1-newline) patterns.longtostring=Cs(whitespace^0/""*nonwhitespace^0*((whitespace^0/" "*(patterns.quoted+nonwhitespace)^1)^0)) local function anywhere(pattern) - return P { P(pattern)+1*V(1) } + return P { P(pattern)+1*V(1) } end lpeg.anywhere=anywhere -function lpeg.instringchecker(p) - p=anywhere(p) - return function(str) - return lpegmatch(p,str) and true or false - end +function lpeg.instringchecker(p) + p=anywhere(p) + return function(str) + return lpegmatch(p,str) and true or false + end end -function lpeg.splitter(pattern,action) - return (((1-P(pattern))^1)/action+1)^0 +function lpeg.splitter(pattern,action) + return (((1-P(pattern))^1)/action+1)^0 end -function lpeg.tsplitter(pattern,action) - return Ct((((1-P(pattern))^1)/action+1)^0) +function lpeg.tsplitter(pattern,action) + return Ct((((1-P(pattern))^1)/action+1)^0) end local splitters_s,splitters_m,splitters_t={},{},{} -local function splitat(separator,single) - local splitter=(single and splitters_s[separator]) or splitters_m[separator] - if not splitter then - separator=P(separator) - local other=C((1-separator)^0) - if single then - local any=anything +local function splitat(separator,single) + local splitter=(single and splitters_s[separator]) or splitters_m[separator] + if not splitter then + separator=P(separator) + local other=C((1-separator)^0) + if single then + local any=anything splitter=other*(separator*C(any^0)+"") - splitters_s[separator]=splitter - else - splitter=other*(separator*other)^0 - splitters_m[separator]=splitter - end - end - return splitter -end -local function tsplitat(separator) - local splitter=splitters_t[separator] - if not splitter then - splitter=Ct(splitat(separator)) - splitters_t[separator]=splitter - end - return splitter -end -lpeg.splitat=splitat + splitters_s[separator]=splitter + else + splitter=other*(separator*other)^0 + splitters_m[separator]=splitter + end + end + return splitter +end +local function tsplitat(separator) + local splitter=splitters_t[separator] + if not splitter then + splitter=Ct(splitat(separator)) + splitters_t[separator]=splitter + end + return splitter +end +lpeg.splitat=splitat lpeg.tsplitat=tsplitat -function string.splitup(str,separator) - if not separator then - separator="," - end - return lpegmatch(splitters_m[separator] or splitat(separator),str) +function string.splitup(str,separator) + if not separator then + separator="," + end + return lpegmatch(splitters_m[separator] or splitat(separator),str) end local cache={} -function lpeg.split(separator,str) - local c=cache[separator] - if not c then - c=tsplitat(separator) - cache[separator]=c - end - return lpegmatch(c,str) -end -function string.split(str,separator) - if separator then - local c=cache[separator] - if not c then - c=tsplitat(separator) - cache[separator]=c - end - return lpegmatch(c,str) - else - return { str } - end +function lpeg.split(separator,str) + local c=cache[separator] + if not c then + c=tsplitat(separator) + cache[separator]=c + end + return lpegmatch(c,str) +end +function string.split(str,separator) + if separator then + local c=cache[separator] + if not c then + c=tsplitat(separator) + cache[separator]=c + end + return lpegmatch(c,str) + else + return { str } + end end local spacing=patterns.spacer^0*newline -local empty=spacing*Cc("") -local nonempty=Cs((1-spacing)^1)*spacing^-1 +local empty=spacing*Cc("") +local nonempty=Cs((1-spacing)^1)*spacing^-1 local content=(empty+nonempty)^1 patterns.textline=content local linesplitter=tsplitat(newline) patterns.linesplitter=linesplitter -function string.splitlines(str) - return lpegmatch(linesplitter,str) +function string.splitlines(str) + return lpegmatch(linesplitter,str) end local cache={} -function lpeg.checkedsplit(separator,str) - local c=cache[separator] - if not c then - separator=P(separator) - local other=C((1-separator)^1) - c=Ct(separator^0*other*(separator^1*other)^0) - cache[separator]=c - end - return lpegmatch(c,str) -end -function string.checkedsplit(str,separator) - local c=cache[separator] - if not c then - separator=P(separator) - local other=C((1-separator)^1) - c=Ct(separator^0*other*(separator^1*other)^0) - cache[separator]=c - end - return lpegmatch(c,str) -end -local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end -local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end +function lpeg.checkedsplit(separator,str) + local c=cache[separator] + if not c then + separator=P(separator) + local other=C((1-separator)^1) + c=Ct(separator^0*other*(separator^1*other)^0) + cache[separator]=c + end + return lpegmatch(c,str) +end +function string.checkedsplit(str,separator) + local c=cache[separator] + if not c then + separator=P(separator) + local other=C((1-separator)^1) + c=Ct(separator^0*other*(separator^1*other)^0) + cache[separator]=c + end + return lpegmatch(c,str) +end +local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end +local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4 patterns.utf8byte=utf8byte local cache={} -function lpeg.stripper(str) - if type(str)=="string" then - local s=cache[str] - if not s then - s=Cs(((S(str)^1)/""+1)^0) - cache[str]=s - end - return s - else - return Cs(((str^1)/""+1)^0) - end +function lpeg.stripper(str) + if type(str)=="string" then + local s=cache[str] + if not s then + s=Cs(((S(str)^1)/""+1)^0) + cache[str]=s + end + return s + else + return Cs(((str^1)/""+1)^0) + end end local cache={} -function lpeg.keeper(str) - if type(str)=="string" then - local s=cache[str] - if not s then - s=Cs((((1-S(str))^1)/""+1)^0) - cache[str]=s - end - return s - else - return Cs((((1-str)^1)/""+1)^0) - end +function lpeg.keeper(str) + if type(str)=="string" then + local s=cache[str] + if not s then + s=Cs((((1-S(str))^1)/""+1)^0) + cache[str]=s + end + return s + else + return Cs((((1-str)^1)/""+1)^0) + end end function lpeg.frontstripper(str) - return (P(str)+P(true))*Cs(anything^0) + return (P(str)+P(true))*Cs(anything^0) end function lpeg.endstripper(str) - return Cs((1-P(str)*endofstring)^0) + return Cs((1-P(str)*endofstring)^0) end function lpeg.replacer(one,two,makefunction,isutf) - local pattern - local u=isutf and utf8char or 1 - if type(one)=="table" then - local no=#one - local p=P(false) - if no==0 then - for k,v in next,one do - p=p+P(k)/v - end - pattern=Cs((p+u)^0) - elseif no==1 then - local o=one[1] + local pattern + local u=isutf and utf8char or 1 + if type(one)=="table" then + local no=#one + local p=P(false) + if no==0 then + for k,v in next,one do + p=p+P(k)/v + end + pattern=Cs((p+u)^0) + elseif no==1 then + local o=one[1] one,two=P(o[1]),o[2] - pattern=Cs((one/two+u)^0) - else - for i=1,no do - local o=one[i] - p=p+P(o[1])/o[2] - end - pattern=Cs((p+u)^0) - end - else - pattern=Cs((P(one)/(two or "")+u)^0) - end - if makefunction then - return function(str) - return lpegmatch(pattern,str) - end - else - return pattern - end -end -function lpeg.finder(lst,makefunction) - local pattern - if type(lst)=="table" then - pattern=P(false) - if #lst==0 then - for k,v in next,lst do + pattern=Cs((one/two+u)^0) + else + for i=1,no do + local o=one[i] + p=p+P(o[1])/o[2] + end + pattern=Cs((p+u)^0) + end + else + pattern=Cs((P(one)/(two or "")+u)^0) + end + if makefunction then + return function(str) + return lpegmatch(pattern,str) + end + else + return pattern + end +end +function lpeg.finder(lst,makefunction) + local pattern + if type(lst)=="table" then + pattern=P(false) + if #lst==0 then + for k,v in next,lst do pattern=pattern+P(k) - end - else - for i=1,#lst do - pattern=pattern+P(lst[i]) - end - end - else - pattern=P(lst) - end - pattern=(1-pattern)^0*pattern - if makefunction then - return function(str) - return lpegmatch(pattern,str) - end - else - return pattern - end + end + else + for i=1,#lst do + pattern=pattern+P(lst[i]) + end + end + else + pattern=P(lst) + end + pattern=(1-pattern)^0*pattern + if makefunction then + return function(str) + return lpegmatch(pattern,str) + end + else + return pattern + end end local splitters_f,splitters_s={},{} function lpeg.firstofsplit(separator) - local splitter=splitters_f[separator] - if not splitter then - separator=P(separator) - splitter=C((1-separator)^0) - splitters_f[separator]=splitter - end - return splitter + local splitter=splitters_f[separator] + if not splitter then + separator=P(separator) + splitter=C((1-separator)^0) + splitters_f[separator]=splitter + end + return splitter end function lpeg.secondofsplit(separator) - local splitter=splitters_s[separator] - if not splitter then - separator=P(separator) - splitter=(1-separator)^0*separator*C(anything^0) - splitters_s[separator]=splitter - end - return splitter -end -function lpeg.balancer(left,right) - left,right=P(left),P(right) - return P { left*((1-left-right)+V(1))^0*right } + local splitter=splitters_s[separator] + if not splitter then + separator=P(separator) + splitter=(1-separator)^0*separator*C(anything^0) + splitters_s[separator]=splitter + end + return splitter +end +function lpeg.balancer(left,right) + left,right=P(left),P(right) + return P { left*((1-left-right)+V(1))^0*right } end local nany=utf8char/"" -function lpeg.counter(pattern) - pattern=Cs((P(pattern)/" "+nany)^0) - return function(str) - return #lpegmatch(pattern,str) - end +function lpeg.counter(pattern) + pattern=Cs((P(pattern)/" "+nany)^0) + return function(str) + return #lpegmatch(pattern,str) + end end utf=utf or (unicode and unicode.utf8) or {} -local utfcharacters=utf and utf.characters or string.utfcharacters -local utfgmatch=utf and utf.gmatch +local utfcharacters=utf and utf.characters or string.utfcharacters +local utfgmatch=utf and utf.gmatch local utfchar=utf and utf.char lpeg.UP=lpeg.P if utfcharacters then - function lpeg.US(str) - local p=P(false) - for uc in utfcharacters(str) do - p=p+P(uc) - end - return p + function lpeg.US(str) + local p=P(false) + for uc in utfcharacters(str) do + p=p+P(uc) + end + return p end elseif utfgmatch then - function lpeg.US(str) - local p=P(false) - for uc in utfgmatch(str,".") do - p=p+P(uc) - end - return p + function lpeg.US(str) + local p=P(false) + for uc in utfgmatch(str,".") do + p=p+P(uc) + end + return p end else - function lpeg.US(str) - local p=P(false) - local f=function(uc) - p=p+P(uc) - end - lpegmatch((utf8char/f)^0,str) - return p + function lpeg.US(str) + local p=P(false) + local f=function(uc) + p=p+P(uc) + end + lpegmatch((utf8char/f)^0,str) + return p end end local range=utf8byte*utf8byte+Cc(false) -function lpeg.UR(str,more) - local first,last - if type(str)=="number" then - first=str - last=more or first - else - first,last=lpegmatch(range,str) - if not last then - return P(str) - end - end - if first==last then - return P(str) +function lpeg.UR(str,more) + local first,last + if type(str)=="number" then + first=str + last=more or first + else + first,last=lpegmatch(range,str) + if not last then + return P(str) + end + end + if first==last then + return P(str) elseif utfchar and (last-first<8) then - local p=P(false) - for i=first,last do - p=p+P(utfchar(i)) - end + local p=P(false) + for i=first,last do + p=p+P(utfchar(i)) + end return p - else - local f=function(b) - return b>=first and b<=last + else + local f=function(b) + return b>=first and b<=last end return utf8byte/f - end + end end -function lpeg.is_lpeg(p) - return p and lpegtype(p)=="pattern" +function lpeg.is_lpeg(p) + return p and lpegtype(p)=="pattern" end function lpeg.oneof(list,...) - if type(list)~="table" then - list={ list,... } + if type(list)~="table" then + list={ list,... } end - local p=P(list[1]) - for l=2,#list do - p=p+P(list[l]) - end - return p + local p=P(list[1]) + for l=2,#list do + p=p+P(list[l]) + end + return p end local sort=table.sort -local function copyindexed(old) - local new={} - for i=1,#old do - new[i]=old - end - return new -end -local function sortedkeys(tab) - local keys,s={},0 - for key,_ in next,tab do - s=s+1 - keys[s]=key - end - sort(keys) - return keys -end -function lpeg.append(list,pp,delayed,checked) - local p=pp - if #list>0 then - local keys=copyindexed(list) - sort(keys) - for i=#keys,1,-1 do - local k=keys[i] - if p then - p=P(k)+p - else - p=P(k) - end - end +local function copyindexed(old) + local new={} + for i=1,#old do + new[i]=old + end + return new +end +local function sortedkeys(tab) + local keys,s={},0 + for key,_ in next,tab do + s=s+1 + keys[s]=key + end + sort(keys) + return keys +end +function lpeg.append(list,pp,delayed,checked) + local p=pp + if #list>0 then + local keys=copyindexed(list) + sort(keys) + for i=#keys,1,-1 do + local k=keys[i] + if p then + p=P(k)+p + else + p=P(k) + end + end elseif delayed then - local keys=sortedkeys(list) - if p then - for i=1,#keys,1 do - local k=keys[i] - local v=list[k] - p=P(k)/list+p - end - else - for i=1,#keys do - local k=keys[i] - local v=list[k] - if p then - p=P(k)+p - else - p=P(k) - end - end - if p then - p=p/list - end - end + local keys=sortedkeys(list) + if p then + for i=1,#keys,1 do + local k=keys[i] + local v=list[k] + p=P(k)/list+p + end + else + for i=1,#keys do + local k=keys[i] + local v=list[k] + if p then + p=P(k)+p + else + p=P(k) + end + end + if p then + p=p/list + end + end elseif checked then - local keys=sortedkeys(list) - for i=1,#keys do - local k=keys[i] - local v=list[k] - if p then - if k==v then - p=P(k)+p - else - p=P(k)/v+p - end - else - if k==v then - p=P(k) - else - p=P(k)/v - end - end - end - else - local keys=sortedkeys(list) - for i=1,#keys do - local k=keys[i] - local v=list[k] - if p then - p=P(k)/v+p - else - p=P(k)/v - end - end - end - return p -end -local function make(t) - local p - local keys=sortedkeys(t) - for i=1,#keys do - local k=keys[i] - local v=t[k] - if not p then - if next(v) then - p=P(k)*make(v) - else - p=P(k) - end - else - if next(v) then - p=p+P(k)*make(v) - else - p=p+P(k) - end - end - end - return p + local keys=sortedkeys(list) + for i=1,#keys do + local k=keys[i] + local v=list[k] + if p then + if k==v then + p=P(k)+p + else + p=P(k)/v+p + end + else + if k==v then + p=P(k) + else + p=P(k)/v + end + end + end + else + local keys=sortedkeys(list) + for i=1,#keys do + local k=keys[i] + local v=list[k] + if p then + p=P(k)/v+p + else + p=P(k)/v + end + end + end + return p +end +local function make(t) + local p + local keys=sortedkeys(t) + for i=1,#keys do + local k=keys[i] + local v=t[k] + if not p then + if next(v) then + p=P(k)*make(v) + else + p=P(k) + end + else + if next(v) then + p=p+P(k)*make(v) + else + p=p+P(k) + end + end + end + return p end function lpeg.utfchartabletopattern(list) - local tree={} - for i=1,#list do - local t=tree - for c in gmatch(list[i],".") do - if not t[c] then - t[c]={} - end - t=t[c] - end - end - return make(tree) + local tree={} + for i=1,#list do + local t=tree + for c in gmatch(list[i],".") do + if not t[c] then + t[c]={} + end + t=t[c] + end + end + return make(tree) end patterns.containseol=lpeg.finder(eol) -local function nextstep(n,step,result) +local function nextstep(n,step,result) local m=n%step local d=floor(n/step) - if d>0 then - local v=V(tostring(step)) - local s=result.start - for i=1,d do - if s then - s=v*s - else - s=v - end - end - result.start=s - end - if step>1 and result.start then - local v=V(tostring(step/2)) - result[tostring(step)]=v*v - end - if step>0 then - return nextstep(m,step/2,result) - else - return result - end -end -function lpeg.times(pattern,n) - return P(nextstep(n,2^16,{ "start",["1"]=pattern })) -end -local digit=R("09") -local period=P(".") -local zero=P("0") + if d>0 then + local v=V(tostring(step)) + local s=result.start + for i=1,d do + if s then + s=v*s + else + s=v + end + end + result.start=s + end + if step>1 and result.start then + local v=V(tostring(step/2)) + result[tostring(step)]=v*v + end + if step>0 then + return nextstep(m,step/2,result) + else + return result + end +end +function lpeg.times(pattern,n) + return P(nextstep(n,2^16,{ "start",["1"]=pattern })) +end +local digit=R("09") +local period=P(".") +local zero=P("0") local trailingzeros=zero^0*-digit -local case_1=period*trailingzeros/"" -local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"") -local number=digit^1*(case_1+case_2) +local case_1=period*trailingzeros/"" +local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"") +local number=digit^1*(case_1+case_2) local stripper=Cs((number+1)^0) lpeg.patterns.stripzeros=stripper @@ -661,1281 +661,1281 @@ end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['l-functions']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['l-functions']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } functions=functions or {} -function functions.dummy() end +function functions.dummy() end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['l-string']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['l-string']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -local string=string -local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower -local lpegmatch,patterns=lpeg.match,lpeg.patterns +local string=string +local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower +local lpegmatch,patterns=lpeg.match,lpeg.patterns local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote -function string.unquoted(str) - return lpegmatch(unquoted,str) or str +function string.unquoted(str) + return lpegmatch(unquoted,str) or str end -function string.quoted(str) +function string.quoted(str) return format("%q",str) end function string.count(str,pattern) - local n=0 + local n=0 for _ in gmatch(str,pattern) do - n=n+1 - end - return n + n=n+1 + end + return n end function string.limit(str,n,sentinel) - if #str>n then - sentinel=sentinel or "..." - return sub(str,1,(n-#sentinel))..sentinel - else - return str - end -end -local stripper=patterns.stripper -local collapser=patterns.collapser + if #str>n then + sentinel=sentinel or "..." + return sub(str,1,(n-#sentinel))..sentinel + else + return str + end +end +local stripper=patterns.stripper +local collapser=patterns.collapser local longtostring=patterns.longtostring -function string.strip(str) - return lpegmatch(stripper,str) or "" +function string.strip(str) + return lpegmatch(stripper,str) or "" end -function string.collapsespaces(str) - return lpegmatch(collapser,str) or "" +function string.collapsespaces(str) + return lpegmatch(collapser,str) or "" end -function string.longtostring(str) - return lpegmatch(longtostring,str) or "" +function string.longtostring(str) + return lpegmatch(longtostring,str) or "" end local pattern=P(" ")^0*P(-1) -function string.is_empty(str) - if str=="" then - return true - else - return lpegmatch(pattern,str) and true or false - end -end -local anything=patterns.anything +function string.is_empty(str) + if str=="" then + return true + else + return lpegmatch(pattern,str) and true or false + end +end +local anything=patterns.anything local allescapes=Cc("%")*S(".-+%?()[]*") local someescapes=Cc("%")*S(".-+%()[]") local matchescapes=Cc(".")*S("*?") -local pattern_a=Cs ((allescapes+anything )^0 ) -local pattern_b=Cs ((someescapes+matchescapes+anything )^0 ) +local pattern_a=Cs ((allescapes+anything )^0 ) +local pattern_b=Cs ((someescapes+matchescapes+anything )^0 ) local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") ) -function string.escapedpattern(str,simple) - return lpegmatch(simple and pattern_b or pattern_a,str) -end -function string.topattern(str,lowercase,strict) - if str=="" or type(str)~="string" then - return ".*" - elseif strict then - str=lpegmatch(pattern_c,str) - else - str=lpegmatch(pattern_b,str) - end - if lowercase then - return lower(str) - else - return str - end -end -function string.valid(str,default) - return (type(str)=="string" and str~="" and str) or default or nil +function string.escapedpattern(str,simple) + return lpegmatch(simple and pattern_b or pattern_a,str) +end +function string.topattern(str,lowercase,strict) + if str=="" or type(str)~="string" then + return ".*" + elseif strict then + str=lpegmatch(pattern_c,str) + else + str=lpegmatch(pattern_b,str) + end + if lowercase then + return lower(str) + else + return str + end +end +function string.valid(str,default) + return (type(str)=="string" and str~="" and str) or default or nil end string.itself=function(s) return s end local pattern=Ct(C(1)^0) -function string.totable(str) - return lpegmatch(pattern,str) +function string.totable(str) + return lpegmatch(pattern,str) end local replacer=lpeg.replacer("@","%%") -function string.tformat(fmt,...) - return format(lpegmatch(replacer,fmt),...) +function string.tformat(fmt,...) + return format(lpegmatch(replacer,fmt),...) end -string.quote=string.quoted -string.unquote=string.unquoted +string.quote=string.quoted +string.unquote=string.unquoted end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['l-table']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['l-table']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select -local table,string=table,string -local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove -local format,lower,dump=string.format,string.lower,string.dump -local getmetatable,setmetatable=getmetatable,setmetatable -local getinfo=debug.getinfo -local lpegmatch,patterns=lpeg.match,lpeg.patterns +local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select +local table,string=table,string +local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove +local format,lower,dump=string.format,string.lower,string.dump +local getmetatable,setmetatable=getmetatable,setmetatable +local getinfo=debug.getinfo +local lpegmatch,patterns=lpeg.match,lpeg.patterns local floor=math.floor local stripper=patterns.stripper -function table.strip(tab) - local lst,l={},0 - for i=1,#tab do - local s=lpegmatch(stripper,tab[i]) or "" +function table.strip(tab) + local lst,l={},0 + for i=1,#tab do + local s=lpegmatch(stripper,tab[i]) or "" if s=="" then - else - l=l+1 - lst[l]=s - end - end - return lst -end -function table.keys(t) - if t then - local keys,k={},0 - for key,_ in next,t do - k=k+1 - keys[k]=key - end - return keys - else - return {} - end -end -local function compare(a,b) + else + l=l+1 + lst[l]=s + end + end + return lst +end +function table.keys(t) + if t then + local keys,k={},0 + for key,_ in next,t do + k=k+1 + keys[k]=key + end + return keys + else + return {} + end +end +local function compare(a,b) local ta,tb=type(a),type(b) - if ta==tb then - return a0 then - local n=0 - for _,v in next,t do - n=n+1 - end - if n==#t then - local tt,nt={},0 - for i=1,#t do - local v=t[i] - local tv=type(v) - if tv=="number" then - nt=nt+1 - if hexify then - tt[nt]=format("0x%04X",v) - else +local function simple_table(t) + if #t>0 then + local n=0 + for _,v in next,t do + n=n+1 + end + if n==#t then + local tt,nt={},0 + for i=1,#t do + local v=t[i] + local tv=type(v) + if tv=="number" then + nt=nt+1 + if hexify then + tt[nt]=format("0x%04X",v) + else tt[nt]=tostring(v) - end - elseif tv=="boolean" then - nt=nt+1 - tt[nt]=tostring(v) - elseif tv=="string" then - nt=nt+1 - tt[nt]=format("%q",v) - else - tt=nil - break - end - end - return tt - end - end - return nil + end + elseif tv=="boolean" then + nt=nt+1 + tt[nt]=tostring(v) + elseif tv=="string" then + nt=nt+1 + tt[nt]=format("%q",v) + else + tt=nil + break + end + end + return tt + end + end + return nil end local propername=patterns.propername local function dummy() end -local function do_serialize(root,name,depth,level,indexed) - if level>0 then - depth=depth.." " - if indexed then - handle(format("%s{",depth)) - else - local tn=type(name) - if tn=="number" then - if hexify then - handle(format("%s[0x%04X]={",depth,name)) - else - handle(format("%s[%s]={",depth,name)) - end - elseif tn=="string" then - if noquotes and not reserved[name] and lpegmatch(propername,name) then - handle(format("%s%s={",depth,name)) - else - handle(format("%s[%q]={",depth,name)) - end - elseif tn=="boolean" then - handle(format("%s[%s]={",depth,tostring(name))) - else - handle(format("%s{",depth)) - end - end +local function do_serialize(root,name,depth,level,indexed) + if level>0 then + depth=depth.." " + if indexed then + handle(format("%s{",depth)) + else + local tn=type(name) + if tn=="number" then + if hexify then + handle(format("%s[0x%04X]={",depth,name)) + else + handle(format("%s[%s]={",depth,name)) + end + elseif tn=="string" then + if noquotes and not reserved[name] and lpegmatch(propername,name) then + handle(format("%s%s={",depth,name)) + else + handle(format("%s[%q]={",depth,name)) + end + elseif tn=="boolean" then + handle(format("%s[%s]={",depth,tostring(name))) + else + handle(format("%s{",depth)) + end + end end if root and next(root) then - local first,last=nil,0 - if compact then - last=#root - for k=1,last do - if root[k]==nil then - last=k-1 - break - end - end - if last>0 then - first=1 - end - end - local sk=sortedkeys(root) - for i=1,#sk do - local k=sk[i] + local first,last=nil,0 + if compact then + last=#root + for k=1,last do + if root[k]==nil then + last=k-1 + break + end + end + if last>0 then + first=1 + end + end + local sk=sortedkeys(root) + for i=1,#sk do + local k=sk[i] local v=root[k] - local t,tk=type(v),type(k) - if compact and first and tk=="number" and k>=first and k<=last then - if t=="number" then - if hexify then - handle(format("%s 0x%04X,",depth,v)) - else + local t,tk=type(v),type(k) + if compact and first and tk=="number" and k>=first and k<=last then + if t=="number" then + if hexify then + handle(format("%s 0x%04X,",depth,v)) + else handle(format("%s %s,",depth,v)) - end - elseif t=="string" then - if reduce and tonumber(v) then - handle(format("%s %s,",depth,v)) - else - handle(format("%s %q,",depth,v)) - end - elseif t=="table" then - if not next(v) then - handle(format("%s {},",depth)) + end + elseif t=="string" then + if reduce and tonumber(v) then + handle(format("%s %s,",depth,v)) + else + handle(format("%s %q,",depth,v)) + end + elseif t=="table" then + if not next(v) then + handle(format("%s {},",depth)) elseif inline then - local st=simple_table(v) - if st then - handle(format("%s { %s },",depth,concat(st,", "))) - else - do_serialize(v,k,depth,level+1,true) - end - else - do_serialize(v,k,depth,level+1,true) - end - elseif t=="boolean" then - handle(format("%s %s,",depth,tostring(v))) - elseif t=="function" then - if functions then - handle(format('%s load(%q),',depth,dump(v))) - else - handle(format('%s "function",',depth)) - end - else - handle(format("%s %q,",depth,tostring(v))) - end + local st=simple_table(v) + if st then + handle(format("%s { %s },",depth,concat(st,", "))) + else + do_serialize(v,k,depth,level+1,true) + end + else + do_serialize(v,k,depth,level+1,true) + end + elseif t=="boolean" then + handle(format("%s %s,",depth,tostring(v))) + elseif t=="function" then + if functions then + handle(format('%s load(%q),',depth,dump(v))) + else + handle(format('%s "function",',depth)) + end + else + handle(format("%s %q,",depth,tostring(v))) + end elseif k=="__p__" then - if false then - handle(format("%s __p__=nil,",depth)) - end - elseif t=="number" then - if tk=="number" then - if hexify then - handle(format("%s [0x%04X]=0x%04X,",depth,k,v)) - else + if false then + handle(format("%s __p__=nil,",depth)) + end + elseif t=="number" then + if tk=="number" then + if hexify then + handle(format("%s [0x%04X]=0x%04X,",depth,k,v)) + else handle(format("%s [%s]=%s,",depth,k,v)) - end - elseif tk=="boolean" then - if hexify then - handle(format("%s [%s]=0x%04X,",depth,tostring(k),v)) - else + end + elseif tk=="boolean" then + if hexify then + handle(format("%s [%s]=0x%04X,",depth,tostring(k),v)) + else handle(format("%s [%s]=%s,",depth,tostring(k),v)) - end - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - if hexify then - handle(format("%s %s=0x%04X,",depth,k,v)) - else + end + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + if hexify then + handle(format("%s %s=0x%04X,",depth,k,v)) + else handle(format("%s %s=%s,",depth,k,v)) - end - else - if hexify then - handle(format("%s [%q]=0x%04X,",depth,k,v)) - else + end + else + if hexify then + handle(format("%s [%q]=0x%04X,",depth,k,v)) + else handle(format("%s [%q]=%s,",depth,k,v)) - end - end - elseif t=="string" then - if reduce and tonumber(v) then - if tk=="number" then - if hexify then - handle(format("%s [0x%04X]=%s,",depth,k,v)) - else - handle(format("%s [%s]=%s,",depth,k,v)) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%s,",depth,tostring(k),v)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%s,",depth,k,v)) - else - handle(format("%s [%q]=%s,",depth,k,v)) - end - else - if tk=="number" then - if hexify then - handle(format("%s [0x%04X]=%q,",depth,k,v)) - else - handle(format("%s [%s]=%q,",depth,k,v)) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%q,",depth,tostring(k),v)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%q,",depth,k,v)) - else - handle(format("%s [%q]=%q,",depth,k,v)) - end - end - elseif t=="table" then - if not next(v) then - if tk=="number" then - if hexify then - handle(format("%s [0x%04X]={},",depth,k)) - else - handle(format("%s [%s]={},",depth,k)) - end - elseif tk=="boolean" then - handle(format("%s [%s]={},",depth,tostring(k))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s={},",depth,k)) - else - handle(format("%s [%q]={},",depth,k)) - end - elseif inline then - local st=simple_table(v) - if st then - if tk=="number" then - if hexify then - handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", "))) - else - handle(format("%s [%s]={ %s },",depth,k,concat(st,", "))) - end - elseif tk=="boolean" then - handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", "))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s={ %s },",depth,k,concat(st,", "))) - else - handle(format("%s [%q]={ %s },",depth,k,concat(st,", "))) - end - else - do_serialize(v,k,depth,level+1) - end - else - do_serialize(v,k,depth,level+1) - end - elseif t=="boolean" then - if tk=="number" then - if hexify then - handle(format("%s [0x%04X]=%s,",depth,k,tostring(v))) - else - handle(format("%s [%s]=%s,",depth,k,tostring(v))) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%s,",depth,k,tostring(v))) - else - handle(format("%s [%q]=%s,",depth,k,tostring(v))) - end - elseif t=="function" then - if functions then + end + end + elseif t=="string" then + if reduce and tonumber(v) then + if tk=="number" then + if hexify then + handle(format("%s [0x%04X]=%s,",depth,k,v)) + else + handle(format("%s [%s]=%s,",depth,k,v)) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%s,",depth,tostring(k),v)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%s,",depth,k,v)) + else + handle(format("%s [%q]=%s,",depth,k,v)) + end + else + if tk=="number" then + if hexify then + handle(format("%s [0x%04X]=%q,",depth,k,v)) + else + handle(format("%s [%s]=%q,",depth,k,v)) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%q,",depth,tostring(k),v)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%q,",depth,k,v)) + else + handle(format("%s [%q]=%q,",depth,k,v)) + end + end + elseif t=="table" then + if not next(v) then + if tk=="number" then + if hexify then + handle(format("%s [0x%04X]={},",depth,k)) + else + handle(format("%s [%s]={},",depth,k)) + end + elseif tk=="boolean" then + handle(format("%s [%s]={},",depth,tostring(k))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s={},",depth,k)) + else + handle(format("%s [%q]={},",depth,k)) + end + elseif inline then + local st=simple_table(v) + if st then + if tk=="number" then + if hexify then + handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", "))) + else + handle(format("%s [%s]={ %s },",depth,k,concat(st,", "))) + end + elseif tk=="boolean" then + handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", "))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s={ %s },",depth,k,concat(st,", "))) + else + handle(format("%s [%q]={ %s },",depth,k,concat(st,", "))) + end + else + do_serialize(v,k,depth,level+1) + end + else + do_serialize(v,k,depth,level+1) + end + elseif t=="boolean" then + if tk=="number" then + if hexify then + handle(format("%s [0x%04X]=%s,",depth,k,tostring(v))) + else + handle(format("%s [%s]=%s,",depth,k,tostring(v))) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%s,",depth,k,tostring(v))) + else + handle(format("%s [%q]=%s,",depth,k,tostring(v))) + end + elseif t=="function" then + if functions then local f=getinfo(v).what=="C" and dump(dummy) or dump(v) - if tk=="number" then - if hexify then - handle(format("%s [0x%04X]=load(%q),",depth,k,f)) - else - handle(format("%s [%s]=load(%q),",depth,k,f)) - end - elseif tk=="boolean" then - handle(format("%s [%s]=load(%q),",depth,tostring(k),f)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=load(%q),",depth,k,f)) - else - handle(format("%s [%q]=load(%q),",depth,k,f)) - end - end - else - if tk=="number" then - if hexify then - handle(format("%s [0x%04X]=%q,",depth,k,tostring(v))) - else - handle(format("%s [%s]=%q,",depth,k,tostring(v))) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%q,",depth,k,tostring(v))) - else - handle(format("%s [%q]=%q,",depth,k,tostring(v))) - end - end - end - end - if level>0 then - handle(format("%s},",depth)) - end + if tk=="number" then + if hexify then + handle(format("%s [0x%04X]=load(%q),",depth,k,f)) + else + handle(format("%s [%s]=load(%q),",depth,k,f)) + end + elseif tk=="boolean" then + handle(format("%s [%s]=load(%q),",depth,tostring(k),f)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=load(%q),",depth,k,f)) + else + handle(format("%s [%q]=load(%q),",depth,k,f)) + end + end + else + if tk=="number" then + if hexify then + handle(format("%s [0x%04X]=%q,",depth,k,tostring(v))) + else + handle(format("%s [%s]=%q,",depth,k,tostring(v))) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%q,",depth,k,tostring(v))) + else + handle(format("%s [%q]=%q,",depth,k,tostring(v))) + end + end + end + end + if level>0 then + handle(format("%s},",depth)) + end end local function serialize(_handle,root,name,specification) - local tname=type(name) - if type(specification)=="table" then - noquotes=specification.noquotes - hexify=specification.hexify - handle=_handle or specification.handle or print - reduce=specification.reduce or false - functions=specification.functions - compact=specification.compact - inline=specification.inline and compact - if functions==nil then - functions=true - end - if compact==nil then - compact=true - end - if inline==nil then - inline=compact - end - else - noquotes=false - hexify=false - handle=_handle or print - reduce=false - compact=true - inline=true - functions=true - end - if tname=="string" then - if name=="return" then - handle("return {") - else - handle(name.."={") - end - elseif tname=="number" then - if hexify then - handle(format("[0x%04X]={",name)) - else - handle("["..name.."]={") - end - elseif tname=="boolean" then - if name then - handle("return {") - else - handle("{") - end - else - handle("t={") - end + local tname=type(name) + if type(specification)=="table" then + noquotes=specification.noquotes + hexify=specification.hexify + handle=_handle or specification.handle or print + reduce=specification.reduce or false + functions=specification.functions + compact=specification.compact + inline=specification.inline and compact + if functions==nil then + functions=true + end + if compact==nil then + compact=true + end + if inline==nil then + inline=compact + end + else + noquotes=false + hexify=false + handle=_handle or print + reduce=false + compact=true + inline=true + functions=true + end + if tname=="string" then + if name=="return" then + handle("return {") + else + handle(name.."={") + end + elseif tname=="number" then + if hexify then + handle(format("[0x%04X]={",name)) + else + handle("["..name.."]={") + end + elseif tname=="boolean" then + if name then + handle("return {") + else + handle("{") + end + else + handle("t={") + end if root then if getmetatable(root) then - local dummy=root._w_h_a_t_e_v_e_r_ - root._w_h_a_t_e_v_e_r_=nil - end - if next(root) then - do_serialize(root,name,"",0) - end - end - handle("}") -end -function table.serialize(root,name,specification) - local t,n={},0 - local function flush(s) - n=n+1 - t[n]=s - end - serialize(flush,root,name,specification) - return concat(t,"\n") + local dummy=root._w_h_a_t_e_v_e_r_ + root._w_h_a_t_e_v_e_r_=nil + end + if next(root) then + do_serialize(root,name,"",0) + end + end + handle("}") +end +function table.serialize(root,name,specification) + local t,n={},0 + local function flush(s) + n=n+1 + t[n]=s + end + serialize(flush,root,name,specification) + return concat(t,"\n") end table.tohandle=serialize local maxtab=2*1024 -function table.tofile(filename,root,name,specification) - local f=io.open(filename,'w') - if f then - if maxtab>1 then - local t,n={},0 - local function flush(s) - n=n+1 - t[n]=s - if n>maxtab then +function table.tofile(filename,root,name,specification) + local f=io.open(filename,'w') + if f then + if maxtab>1 then + local t,n={},0 + local function flush(s) + n=n+1 + t[n]=s + if n>maxtab then f:write(concat(t,"\n"),"\n") t,n={},0 - end - end - serialize(flush,root,name,specification) - f:write(concat(t,"\n"),"\n") - else - local function flush(s) - f:write(s,"\n") - end - serialize(flush,root,name,specification) - end - f:close() - io.flush() - end + end + end + serialize(flush,root,name,specification) + f:write(concat(t,"\n"),"\n") + else + local function flush(s) + f:write(s,"\n") + end + serialize(flush,root,name,specification) + end + f:close() + io.flush() + end end local function flattened(t,f,depth) - if f==nil then - f={} - depth=0xFFFF + if f==nil then + f={} + depth=0xFFFF elseif tonumber(f) then - depth=f - f={} - elseif not depth then - depth=0xFFFF - end - for k,v in next,t do - if type(k)~="number" then - if depth>0 and type(v)=="table" then - flattened(v,f,depth-1) - else - f[#f+1]=v - end - end - end - for k=1,#t do - local v=t[k] - if depth>0 and type(v)=="table" then - flattened(v,f,depth-1) - else - f[#f+1]=v - end - end - return f + depth=f + f={} + elseif not depth then + depth=0xFFFF + end + for k,v in next,t do + if type(k)~="number" then + if depth>0 and type(v)=="table" then + flattened(v,f,depth-1) + else + f[#f+1]=v + end + end + end + for k=1,#t do + local v=t[k] + if depth>0 and type(v)=="table" then + flattened(v,f,depth-1) + else + f[#f+1]=v + end + end + return f end table.flattened=flattened local function unnest(t,f) if not f then f={} - end - for i=1,#t do - local v=t[i] - if type(v)=="table" then - if type(v[1])=="table" then - unnest(v,f) - else - f[#f+1]=v - end - else - f[#f+1]=v - end - end - return f + end + for i=1,#t do + local v=t[i] + if type(v)=="table" then + if type(v[1])=="table" then + unnest(v,f) + else + f[#f+1]=v + end + else + f[#f+1]=v + end + end + return f end function table.unnest(t) - return unnest(t) + return unnest(t) end local function are_equal(a,b,n,m) - if a and b and #a==#b then - n=n or 1 - m=m or #a - for i=n,m do - local ai,bi=a[i],b[i] + if a and b and #a==#b then + n=n or 1 + m=m or #a + for i=n,m do + local ai,bi=a[i],b[i] if ai==bi then - elseif type(ai)=="table" and type(bi)=="table" then - if not are_equal(ai,bi) then - return false - end - else - return false - end - end - return true - else - return false - end + elseif type(ai)=="table" and type(bi)=="table" then + if not are_equal(ai,bi) then + return false + end + else + return false + end + end + return true + else + return false + end end local function identical(a,b) - for ka,va in next,a do - local vb=b[ka] + for ka,va in next,a do + local vb=b[ka] if va==vb then - elseif type(va)=="table" and type(vb)=="table" then - if not identical(va,vb) then - return false - end - else - return false - end - end - return true -end -table.identical=identical + elseif type(va)=="table" and type(vb)=="table" then + if not identical(va,vb) then + return false + end + else + return false + end + end + return true +end +table.identical=identical table.are_equal=are_equal function table.compact(t) - if t then - for k,v in next,t do + if t then + for k,v in next,t do if not next(v) then - t[k]=nil - end - end - end -end -function table.contains(t,v) - if t then - for i=1,#t do - if t[i]==v then - return i - end - end - end - return false -end -function table.count(t) - local n=0 - for k,v in next,t do - n=n+1 - end - return n + t[k]=nil + end + end + end +end +function table.contains(t,v) + if t then + for i=1,#t do + if t[i]==v then + return i + end + end + end + return false +end +function table.count(t) + local n=0 + for k,v in next,t do + n=n+1 + end + return n end function table.swapped(t,s) - local n={} - if s then - for k,v in next,s do - n[k]=v - end - end - for k,v in next,t do - n[v]=k - end - return n + local n={} + if s then + for k,v in next,s do + n[k]=v + end + end + for k,v in next,t do + n[v]=k + end + return n end function table.mirrored(t) - local n={} - for k,v in next,t do - n[v]=k - n[k]=v - end - return n -end -function table.reversed(t) - if t then - local tt,tn={},#t - if tn>0 then - local ttn=0 - for i=tn,1,-1 do - ttn=ttn+1 - tt[ttn]=t[i] - end - end - return tt - end -end -function table.reverse(t) - if t then - local n=#t - for i=1,floor(n/2) do - local j=n-i+1 - t[i],t[j]=t[j],t[i] - end - return t - end + local n={} + for k,v in next,t do + n[v]=k + n[k]=v + end + return n +end +function table.reversed(t) + if t then + local tt,tn={},#t + if tn>0 then + local ttn=0 + for i=tn,1,-1 do + ttn=ttn+1 + tt[ttn]=t[i] + end + end + return tt + end +end +function table.reverse(t) + if t then + local n=#t + for i=1,floor(n/2) do + local j=n-i+1 + t[i],t[j]=t[j],t[i] + end + return t + end end function table.sequenced(t,sep,simple) - if not t then - return "" - end - local n=#t - local s={} + if not t then + return "" + end + local n=#t + local s={} if n>0 then - for i=1,n do - s[i]=tostring(t[i]) - end - else - n=0 - for k,v in sortedhash(t) do - if simple then - if v==true then - n=n+1 - s[n]=k - elseif v and v~="" then - n=n+1 - s[n]=k.."="..tostring(v) - end - else - n=n+1 - s[n]=k.."="..tostring(v) - end - end - end - return concat(s,sep or " | ") -end -function table.print(t,...) - if type(t)~="table" then - print(tostring(t)) + for i=1,n do + s[i]=tostring(t[i]) + end else - serialize(print,t,...) - end + n=0 + for k,v in sortedhash(t) do + if simple then + if v==true then + n=n+1 + s[n]=k + elseif v and v~="" then + n=n+1 + s[n]=k.."="..tostring(v) + end + else + n=n+1 + s[n]=k.."="..tostring(v) + end + end + end + return concat(s,sep or " | ") +end +function table.print(t,...) + if type(t)~="table" then + print(tostring(t)) + else + serialize(print,t,...) + end end setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end) -function table.sub(t,i,j) - return { unpack(t,i,j) } +function table.sub(t,i,j) + return { unpack(t,i,j) } end -function table.is_empty(t) - return not t or not next(t) +function table.is_empty(t) + return not t or not next(t) end -function table.has_one_entry(t) - return t and not next(t,next(t)) +function table.has_one_entry(t) + return t and not next(t,next(t)) end function table.loweredkeys(t) - local l={} - for k,v in next,t do - l[lower(k)]=v - end - return l -end -function table.unique(old) - local hash={} - local new={} - local n=0 - for i=1,#old do - local oi=old[i] - if not hash[oi] then - n=n+1 - new[n]=oi - hash[oi]=true - end - end - return new -end -function table.sorted(t,...) - sort(t,...) + local l={} + for k,v in next,t do + l[lower(k)]=v + end + return l +end +function table.unique(old) + local hash={} + local new={} + local n=0 + for i=1,#old do + local oi=old[i] + if not hash[oi] then + n=n+1 + new[n]=oi + hash[oi]=true + end + end + return new +end +function table.sorted(t,...) + sort(t,...) return t -end +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['l-io']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['l-io']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -local io=io -local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format -local concat=table.concat -local floor=math.floor +local io=io +local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format +local concat=table.concat +local floor=math.floor local type=type -if string.find(os.getenv("PATH"),";") then - io.fileseparator,io.pathseparator="\\",";" -else - io.fileseparator,io.pathseparator="/",":" -end -local function readall(f) - return f:read("*all") -end -local function readall(f) - local size=f:seek("end") - if size==0 then - return "" - elseif size<1024*1024 then - f:seek("set",0) - return f:read('*all') - else - local done=f:seek("set",0) - if size<1024*1024 then - step=1024*1024 - elseif size>16*1024*1024 then - step=16*1024*1024 - else - step=floor(size/(1024*1024))*1024*1024/8 - end - local data={} - while true do - local r=f:read(step) - if not r then - return concat(data) - else - data[#data+1]=r - end - end - end +if string.find(os.getenv("PATH"),";") then + io.fileseparator,io.pathseparator="\\",";" +else + io.fileseparator,io.pathseparator="/",":" +end +local function readall(f) + return f:read("*all") +end +local function readall(f) + local size=f:seek("end") + if size==0 then + return "" + elseif size<1024*1024 then + f:seek("set",0) + return f:read('*all') + else + local done=f:seek("set",0) + if size<1024*1024 then + step=1024*1024 + elseif size>16*1024*1024 then + step=16*1024*1024 + else + step=floor(size/(1024*1024))*1024*1024/8 + end + local data={} + while true do + local r=f:read(step) + if not r then + return concat(data) + else + data[#data+1]=r + end + end + end end io.readall=readall function io.loaddata(filename,textmode) - local f=io.open(filename,(textmode and 'r') or 'rb') - if f then - local data=readall(f) - f:close() - if #data>0 then - return data - end - end -end -function io.savedata(filename,data,joiner) - local f=io.open(filename,"wb") + local f=io.open(filename,(textmode and 'r') or 'rb') if f then - if type(data)=="table" then - f:write(concat(data,joiner or "")) - elseif type(data)=="function" then - data(f) - else - f:write(data or "") - end - f:close() - io.flush() - return true - else - return false - end + local data=readall(f) + f:close() + if #data>0 then + return data + end + end +end +function io.savedata(filename,data,joiner) + local f=io.open(filename,"wb") + if f then + if type(data)=="table" then + f:write(concat(data,joiner or "")) + elseif type(data)=="function" then + data(f) + else + f:write(data or "") + end + f:close() + io.flush() + return true + else + return false + end end function io.loadlines(filename,n) - local f=io.open(filename,'r') + local f=io.open(filename,'r') if not f then - elseif n then - local lines={} - for i=1,n do - local line=f:read("*lines") - if line then - lines[#lines+1]=line - else - break - end - end - f:close() - lines=concat(lines,"\n") - if #lines>0 then - return lines - end - else - local line=f:read("*line") or "" - f:close() - if #line>0 then - return line - end - end -end -function io.loadchunk(filename,n) - local f=io.open(filename,'rb') - if f then - local data=f:read(n or 1024) - f:close() - if #data>0 then - return data - end - end -end -function io.exists(filename) - local f=io.open(filename) - if f==nil then - return false - else - f:close() - return true - end -end -function io.size(filename) - local f=io.open(filename) - if f==nil then - return 0 - else - local s=f:seek("end") - f:close() - return s - end -end -function io.noflines(f) - if type(f)=="string" then - local f=io.open(filename) - if f then - local n=f and io.noflines(f) or 0 - f:close() - return n - else - return 0 - end - else - local n=0 - for _ in f:lines() do - n=n+1 - end - f:seek('set',0) - return n - end -end -local nextchar={ - [ 4]=function(f) - return f:read(1,1,1,1) - end, - [ 2]=function(f) - return f:read(1,1) - end, - [ 1]=function(f) - return f:read(1) - end, - [-2]=function(f) - local a,b=f:read(1,1) - return b,a - end, - [-4]=function(f) - local a,b,c,d=f:read(1,1,1,1) - return d,c,b,a - end + elseif n then + local lines={} + for i=1,n do + local line=f:read("*lines") + if line then + lines[#lines+1]=line + else + break + end + end + f:close() + lines=concat(lines,"\n") + if #lines>0 then + return lines + end + else + local line=f:read("*line") or "" + f:close() + if #line>0 then + return line + end + end +end +function io.loadchunk(filename,n) + local f=io.open(filename,'rb') + if f then + local data=f:read(n or 1024) + f:close() + if #data>0 then + return data + end + end +end +function io.exists(filename) + local f=io.open(filename) + if f==nil then + return false + else + f:close() + return true + end +end +function io.size(filename) + local f=io.open(filename) + if f==nil then + return 0 + else + local s=f:seek("end") + f:close() + return s + end +end +function io.noflines(f) + if type(f)=="string" then + local f=io.open(filename) + if f then + local n=f and io.noflines(f) or 0 + f:close() + return n + else + return 0 + end + else + local n=0 + for _ in f:lines() do + n=n+1 + end + f:seek('set',0) + return n + end +end +local nextchar={ + [ 4]=function(f) + return f:read(1,1,1,1) + end, + [ 2]=function(f) + return f:read(1,1) + end, + [ 1]=function(f) + return f:read(1) + end, + [-2]=function(f) + local a,b=f:read(1,1) + return b,a + end, + [-4]=function(f) + local a,b,c,d=f:read(1,1,1,1) + return d,c,b,a + end } -function io.characters(f,n) - if f then - return nextchar[n or 1],f - end -end -local nextbyte={ - [4]=function(f) - local a,b,c,d=f:read(1,1,1,1) - if d then - return byte(a),byte(b),byte(c),byte(d) - end - end, - [3]=function(f) - local a,b,c=f:read(1,1,1) - if b then - return byte(a),byte(b),byte(c) - end - end, - [2]=function(f) - local a,b=f:read(1,1) - if b then - return byte(a),byte(b) - end - end, - [1]=function (f) - local a=f:read(1) - if a then - return byte(a) - end - end, - [-2]=function (f) - local a,b=f:read(1,1) - if b then - return byte(b),byte(a) - end - end, - [-3]=function(f) - local a,b,c=f:read(1,1,1) - if b then - return byte(c),byte(b),byte(a) - end - end, - [-4]=function(f) - local a,b,c,d=f:read(1,1,1,1) - if d then - return byte(d),byte(c),byte(b),byte(a) - end - end +function io.characters(f,n) + if f then + return nextchar[n or 1],f + end +end +local nextbyte={ + [4]=function(f) + local a,b,c,d=f:read(1,1,1,1) + if d then + return byte(a),byte(b),byte(c),byte(d) + end + end, + [3]=function(f) + local a,b,c=f:read(1,1,1) + if b then + return byte(a),byte(b),byte(c) + end + end, + [2]=function(f) + local a,b=f:read(1,1) + if b then + return byte(a),byte(b) + end + end, + [1]=function (f) + local a=f:read(1) + if a then + return byte(a) + end + end, + [-2]=function (f) + local a,b=f:read(1,1) + if b then + return byte(b),byte(a) + end + end, + [-3]=function(f) + local a,b,c=f:read(1,1,1) + if b then + return byte(c),byte(b),byte(a) + end + end, + [-4]=function(f) + local a,b,c,d=f:read(1,1,1,1) + if d then + return byte(d),byte(c),byte(b),byte(a) + end + end } -function io.bytes(f,n) - if f then - return nextbyte[n or 1],f - else - return nil,nil - end -end -function io.ask(question,default,options) - while true do - io.write(question) - if options then - io.write(format(" [%s]",concat(options,"|"))) - end - if default then - io.write(format(" [%s]",default)) - end - io.write(format(" ")) - io.flush() - local answer=io.read() - answer=gsub(answer,"^%s*(.*)%s*$","%1") - if answer=="" and default then - return default - elseif not options then - return answer - else - for k=1,#options do - if options[k]==answer then - return answer - end - end - local pattern="^"..answer - for k=1,#options do - local v=options[k] - if find(v,pattern) then - return v - end - end - end - end -end -local function readnumber(f,n,m) - if m then - f:seek("set",n) - n=m - end - if n==1 then - return byte(f:read(1)) - elseif n==2 then - local a,b=byte(f:read(2),1,2) - return 256*a+b - elseif n==3 then - local a,b,c=byte(f:read(3),1,3) - return 256*256*a+256*b+c - elseif n==4 then - local a,b,c,d=byte(f:read(4),1,4) - return 256*256*256*a+256*256*b+256*c+d - elseif n==8 then - local a,b=readnumber(f,4),readnumber(f,4) - return 256*a+b - elseif n==12 then - local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4) - return 256*256*a+256*b+c - elseif n==-2 then - local b,a=byte(f:read(2),1,2) - return 256*a+b - elseif n==-3 then - local c,b,a=byte(f:read(3),1,3) - return 256*256*a+256*b+c - elseif n==-4 then - local d,c,b,a=byte(f:read(4),1,4) - return 256*256*256*a+256*256*b+256*c+d - elseif n==-8 then - local h,g,f,e,d,c,b,a=byte(f:read(8),1,8) - return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h - else - return 0 - end +function io.bytes(f,n) + if f then + return nextbyte[n or 1],f + else + return nil,nil + end +end +function io.ask(question,default,options) + while true do + io.write(question) + if options then + io.write(format(" [%s]",concat(options,"|"))) + end + if default then + io.write(format(" [%s]",default)) + end + io.write(format(" ")) + io.flush() + local answer=io.read() + answer=gsub(answer,"^%s*(.*)%s*$","%1") + if answer=="" and default then + return default + elseif not options then + return answer + else + for k=1,#options do + if options[k]==answer then + return answer + end + end + local pattern="^"..answer + for k=1,#options do + local v=options[k] + if find(v,pattern) then + return v + end + end + end + end +end +local function readnumber(f,n,m) + if m then + f:seek("set",n) + n=m + end + if n==1 then + return byte(f:read(1)) + elseif n==2 then + local a,b=byte(f:read(2),1,2) + return 256*a+b + elseif n==3 then + local a,b,c=byte(f:read(3),1,3) + return 256*256*a+256*b+c + elseif n==4 then + local a,b,c,d=byte(f:read(4),1,4) + return 256*256*256*a+256*256*b+256*c+d + elseif n==8 then + local a,b=readnumber(f,4),readnumber(f,4) + return 256*a+b + elseif n==12 then + local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4) + return 256*256*a+256*b+c + elseif n==-2 then + local b,a=byte(f:read(2),1,2) + return 256*a+b + elseif n==-3 then + local c,b,a=byte(f:read(3),1,3) + return 256*256*a+256*b+c + elseif n==-4 then + local d,c,b,a=byte(f:read(4),1,4) + return 256*256*256*a+256*256*b+256*c+d + elseif n==-8 then + local h,g,f,e,d,c,b,a=byte(f:read(8),1,8) + return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h + else + return 0 + end end io.readnumber=readnumber -function io.readstring(f,n,m) - if m then - f:seek("set",n) - n=m - end - local str=gsub(f:read(n),"\000","") - return str +function io.readstring(f,n,m) + if m then + f:seek("set",n) + n=m + end + local str=gsub(f:read(n),"\000","") + return str end if not io.i_limiter then function io.i_limiter() end end if not io.o_limiter then function io.o_limiter() end end @@ -1944,862 +1944,862 @@ end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['l-file']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['l-file']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -file=file or {} +file=file or {} local file=file -if not lfs then - lfs=optionalrequire("lfs") +if not lfs then + lfs=optionalrequire("lfs") end if not lfs then - lfs={ - getcurrentdir=function() - return "." - end, - attributes=function() - return nil - end, - isfile=function(name) - local f=io.open(name,'rb') - if f then - f:close() - return true - end - end, - isdir=function(name) - print("you need to load lfs") - return false - end + lfs={ + getcurrentdir=function() + return "." + end, + attributes=function() + return nil + end, + isfile=function(name) + local f=io.open(name,'rb') + if f then + f:close() + return true + end + end, + isdir=function(name) + print("you need to load lfs") + return false + end } elseif not lfs.isfile then local attributes=lfs.attributes - function lfs.isdir(name) - return attributes(name,"mode")=="directory" + function lfs.isdir(name) + return attributes(name,"mode")=="directory" end - function lfs.isfile(name) - return attributes(name,"mode")=="file" + function lfs.isfile(name) + return attributes(name,"mode")=="file" end end -local insert,concat=table.insert,table.concat -local match,find,gmatch=string.match,string.find,string.gmatch -local lpegmatch=lpeg.match -local getcurrentdir,attributes=lfs.currentdir,lfs.attributes +local insert,concat=table.insert,table.concat +local match,find,gmatch=string.match,string.find,string.gmatch +local lpegmatch=lpeg.match +local getcurrentdir,attributes=lfs.currentdir,lfs.attributes local checkedsplit=string.checkedsplit local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct -local colon=P(":") -local period=P(".") -local periods=P("..") -local fwslash=P("/") -local bwslash=P("\\") -local slashes=S("\\/") -local noperiod=1-period -local noslashes=1-slashes -local name=noperiod^1 +local colon=P(":") +local period=P(".") +local periods=P("..") +local fwslash=P("/") +local bwslash=P("\\") +local slashes=S("\\/") +local noperiod=1-period +local noslashes=1-slashes +local name=noperiod^1 local suffix=period/""*(1-period-slashes)^1*-1 local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1) -local function pathpart(name,default) - return name and lpegmatch(pattern,name) or default or "" +local function pathpart(name,default) + return name and lpegmatch(pattern,name) or default or "" end local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1 -local function basename(name) - return name and lpegmatch(pattern,name) or name +local function basename(name) + return name and lpegmatch(pattern,name) or name end local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0 -local function nameonly(name) - return name and lpegmatch(pattern,name) or name +local function nameonly(name) + return name and lpegmatch(pattern,name) or name end local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1 -local function suffixonly(name) - return name and lpegmatch(pattern,name) or "" +local function suffixonly(name) + return name and lpegmatch(pattern,name) or "" end -file.pathpart=pathpart -file.basename=basename -file.nameonly=nameonly -file.suffixonly=suffixonly +file.pathpart=pathpart +file.basename=basename +file.nameonly=nameonly +file.suffixonly=suffixonly file.suffix=suffixonly file.dirname=pathpart file.extname=suffixonly -local drive=C(R("az","AZ"))*colon -local path=C((noslashes^0*slashes)^0) -local suffix=period*C(P(1-period)^0*P(-1)) -local base=C((1-suffix)^0) +local drive=C(R("az","AZ"))*colon +local path=C((noslashes^0*slashes)^0) +local suffix=period*C(P(1-period)^0*P(-1)) +local base=C((1-suffix)^0) local rest=C(P(1)^0) -drive=drive+Cc("") -path=path+Cc("") -base=base+Cc("") +drive=drive+Cc("") +path=path+Cc("") +base=base+Cc("") suffix=suffix+Cc("") -local pattern_a=drive*path*base*suffix -local pattern_b=path*base*suffix +local pattern_a=drive*path*base*suffix +local pattern_b=path*base*suffix local pattern_c=C(drive*path)*C(base*suffix) local pattern_d=path*rest -function file.splitname(str,splitdrive) +function file.splitname(str,splitdrive) if not str then - elseif splitdrive then + elseif splitdrive then return lpegmatch(pattern_a,str) - else + else return lpegmatch(pattern_b,str) - end + end end -function file.splitbase(str) - if str then +function file.splitbase(str) + if str then return lpegmatch(pattern_d,str) - else + else return "",str - end + end end -function file.nametotable(str,splitdrive) - if str then +function file.nametotable(str,splitdrive) + if str then local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str) - if splitdrive then - return { - path=path, - drive=drive, - subpath=subpath, - name=name, - base=base, - suffix=suffix, - } - else - return { - path=path, - name=name, - base=base, - suffix=suffix, - } - end - end + if splitdrive then + return { + path=path, + drive=drive, + subpath=subpath, + name=name, + base=base, + suffix=suffix, + } + else + return { + path=path, + name=name, + base=base, + suffix=suffix, + } + end + end end local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1) -function file.removesuffix(name) - return name and lpegmatch(pattern,name) +function file.removesuffix(name) + return name and lpegmatch(pattern,name) end -local suffix=period/""*(1-period-slashes)^1*-1 +local suffix=period/""*(1-period-slashes)^1*-1 local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix) -function file.addsuffix(filename,suffix,criterium) - if not filename or not suffix or suffix=="" then - return filename - elseif criterium==true then - return filename.."."..suffix - elseif not criterium then - local n,s=lpegmatch(pattern,filename) - if not s or s=="" then - return filename.."."..suffix - else - return filename - end - else - local n,s=lpegmatch(pattern,filename) - if s and s~="" then - local t=type(criterium) +function file.addsuffix(filename,suffix,criterium) + if not filename or not suffix or suffix=="" then + return filename + elseif criterium==true then + return filename.."."..suffix + elseif not criterium then + local n,s=lpegmatch(pattern,filename) + if not s or s=="" then + return filename.."."..suffix + else + return filename + end + else + local n,s=lpegmatch(pattern,filename) + if s and s~="" then + local t=type(criterium) if t=="table" then - for i=1,#criterium do - if s==criterium[i] then - return filename - end - end + for i=1,#criterium do + if s==criterium[i] then + return filename + end + end elseif t=="string" then - if s==criterium then - return filename - end - end - end - return (n or filename).."."..suffix - end -end -local suffix=period*(1-period-slashes)^1*-1 + if s==criterium then + return filename + end + end + end + return (n or filename).."."..suffix + end +end +local suffix=period*(1-period-slashes)^1*-1 local pattern=Cs((1-suffix)^0) -function file.replacesuffix(name,suffix) - if name and suffix and suffix~="" then - return lpegmatch(pattern,name).."."..suffix - else - return name - end +function file.replacesuffix(name,suffix) + if name and suffix and suffix~="" then + return lpegmatch(pattern,name).."."..suffix + else + return name + end end local reslasher=lpeg.replacer(P("\\"),"/") -function file.reslash(str) - return str and lpegmatch(reslasher,str) +function file.reslash(str) + return str and lpegmatch(reslasher,str) end -function file.is_writable(name) +function file.is_writable(name) if not name then - elseif lfs.isdir(name) then - name=name.."/m_t_x_t_e_s_t.tmp" - local f=io.open(name,"wb") - if f then - f:close() - os.remove(name) - return true - end - elseif lfs.isfile(name) then - local f=io.open(name,"ab") - if f then - f:close() - return true - end - else - local f=io.open(name,"ab") - if f then - f:close() - os.remove(name) - return true - end - end - return false + elseif lfs.isdir(name) then + name=name.."/m_t_x_t_e_s_t.tmp" + local f=io.open(name,"wb") + if f then + f:close() + os.remove(name) + return true + end + elseif lfs.isfile(name) then + local f=io.open(name,"ab") + if f then + f:close() + return true + end + else + local f=io.open(name,"ab") + if f then + f:close() + os.remove(name) + return true + end + end + return false end local readable=P("r")*Cc(true) -function file.is_readable(name) - if name then - local a=attributes(name) - return a and lpegmatch(readable,a.permissions) or false - else - return false - end +function file.is_readable(name) + if name then + local a=attributes(name) + return a and lpegmatch(readable,a.permissions) or false + else + return false + end end file.isreadable=file.is_readable file.iswritable=file.is_writable -function file.size(name) - if name then - local a=attributes(name) - return a and a.size or 0 - else - return 0 - end +function file.size(name) + if name then + local a=attributes(name) + return a and a.size or 0 + else + return 0 + end end function file.splitpath(str,separator) - return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator) + return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator) end function file.joinpath(tab,separator) return tab and concat(tab,separator or io.pathseparator) end -local stripper=Cs(P(fwslash)^0/""*reslasher) -local isnetwork=fwslash*fwslash*(1-fwslash)+(1-fwslash-colon)^1*colon -local isroot=fwslash^1*-1 +local stripper=Cs(P(fwslash)^0/""*reslasher) +local isnetwork=fwslash*fwslash*(1-fwslash)+(1-fwslash-colon)^1*colon +local isroot=fwslash^1*-1 local hasroot=fwslash^1 local deslasher=lpeg.replacer(S("\\/")^1,"/") -function file.join(...) - local lst={... } - local one=lst[1] - if lpegmatch(isnetwork,one) then - local two=lpegmatch(deslasher,concat(lst,"/",2)) - return one.."/"..two - elseif lpegmatch(isroot,one) then - local two=lpegmatch(deslasher,concat(lst,"/",2)) - if lpegmatch(hasroot,two) then - return two - else - return "/"..two - end - elseif one=="" then - return lpegmatch(stripper,concat(lst,"/",2)) - else - return lpegmatch(deslasher,concat(lst,"/")) - end -end -local drivespec=R("az","AZ")^1*colon -local anchors=fwslash+drivespec -local untouched=periods+(1-period)^1*P(-1) -local splitstarter=(Cs(drivespec*(bwslash/"/"+fwslash)^0)+Cc(false))*Ct(lpeg.splitat(S("/\\")^1)) +function file.join(...) + local lst={... } + local one=lst[1] + if lpegmatch(isnetwork,one) then + local two=lpegmatch(deslasher,concat(lst,"/",2)) + return one.."/"..two + elseif lpegmatch(isroot,one) then + local two=lpegmatch(deslasher,concat(lst,"/",2)) + if lpegmatch(hasroot,two) then + return two + else + return "/"..two + end + elseif one=="" then + return lpegmatch(stripper,concat(lst,"/",2)) + else + return lpegmatch(deslasher,concat(lst,"/")) + end +end +local drivespec=R("az","AZ")^1*colon +local anchors=fwslash+drivespec +local untouched=periods+(1-period)^1*P(-1) +local splitstarter=(Cs(drivespec*(bwslash/"/"+fwslash)^0)+Cc(false))*Ct(lpeg.splitat(S("/\\")^1)) local absolute=fwslash function file.collapsepath(str,anchor) - if not str then - return - end - if anchor==true and not lpegmatch(anchors,str) then - str=getcurrentdir().."/"..str - end - if str=="" or str=="." then - return "." - elseif lpegmatch(untouched,str) then - return lpegmatch(reslasher,str) - end - local starter,oldelements=lpegmatch(splitstarter,str) - local newelements={} - local i=#oldelements - while i>0 do - local element=oldelements[i] + if not str then + return + end + if anchor==true and not lpegmatch(anchors,str) then + str=getcurrentdir().."/"..str + end + if str=="" or str=="." then + return "." + elseif lpegmatch(untouched,str) then + return lpegmatch(reslasher,str) + end + local starter,oldelements=lpegmatch(splitstarter,str) + local newelements={} + local i=#oldelements + while i>0 do + local element=oldelements[i] if element=='.' then - elseif element=='..' then - local n=i-1 - while n>0 do - local element=oldelements[n] - if element~='..' and element~='.' then - oldelements[n]='.' - break - else - n=n-1 - end - end - if n<1 then - insert(newelements,1,'..') - end - elseif element~="" then - insert(newelements,1,element) - end - i=i-1 - end - if #newelements==0 then - return starter or "." - elseif starter then - return starter..concat(newelements,'/') - elseif lpegmatch(absolute,str) then - return "/"..concat(newelements,'/') - else - newelements=concat(newelements,'/') - if anchor=="." and find(str,"^%./") then - return "./"..newelements - else - return newelements - end - end -end -local validchars=R("az","09","AZ","--","..") -local pattern_a=lpeg.replacer(1-validchars) -local pattern_a=Cs((validchars+P(1)/"-")^1) -local whatever=P("-")^0/"" + elseif element=='..' then + local n=i-1 + while n>0 do + local element=oldelements[n] + if element~='..' and element~='.' then + oldelements[n]='.' + break + else + n=n-1 + end + end + if n<1 then + insert(newelements,1,'..') + end + elseif element~="" then + insert(newelements,1,element) + end + i=i-1 + end + if #newelements==0 then + return starter or "." + elseif starter then + return starter..concat(newelements,'/') + elseif lpegmatch(absolute,str) then + return "/"..concat(newelements,'/') + else + newelements=concat(newelements,'/') + if anchor=="." and find(str,"^%./") then + return "./"..newelements + else + return newelements + end + end +end +local validchars=R("az","09","AZ","--","..") +local pattern_a=lpeg.replacer(1-validchars) +local pattern_a=Cs((validchars+P(1)/"-")^1) +local whatever=P("-")^0/"" local pattern_b=Cs(whatever*(1-whatever*-1)^1) -function file.robustname(str,strict) - if str then - str=lpegmatch(pattern_a,str) or str - if strict then +function file.robustname(str,strict) + if str then + str=lpegmatch(pattern_a,str) or str + if strict then return lpegmatch(pattern_b,str) or str - else - return str - end - end + else + return str + end + end end -file.readdata=io.loaddata +file.readdata=io.loaddata file.savedata=io.savedata -function file.copy(oldname,newname) - if oldname and newname then - local data=io.loaddata(oldname) - if data and data~="" then - file.savedata(newname,data) - end - end -end -local letter=R("az","AZ")+S("_-+") +function file.copy(oldname,newname) + if oldname and newname then + local data=io.loaddata(oldname) + if data and data~="" then + file.savedata(newname,data) + end + end +end +local letter=R("az","AZ")+S("_-+") local separator=P("://") -local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash +local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash local rootbased=fwslash+letter*colon -lpeg.patterns.qualified=qualified +lpeg.patterns.qualified=qualified lpeg.patterns.rootbased=rootbased -function file.is_qualified_path(filename) - return filename and lpegmatch(qualified,filename)~=nil -end -function file.is_rootbased_path(filename) - return filename and lpegmatch(rootbased,filename)~=nil -end -function file.strip(name,dir) - if name then - local b,a=match(name,"^(.-)"..dir.."(.*)$") - return a~="" and a or name - end -end -function lfs.mkdirs(path) - local full - for sub in gmatch(path,"([^\\/]+)") do - if full then - full=full.."/"..sub - else - full=sub - end - if not lfs.isdir(full) then - lfs.mkdir(full) - end - end -end +function file.is_qualified_path(filename) + return filename and lpegmatch(qualified,filename)~=nil +end +function file.is_rootbased_path(filename) + return filename and lpegmatch(rootbased,filename)~=nil +end +function file.strip(name,dir) + if name then + local b,a=match(name,"^(.-)"..dir.."(.*)$") + return a~="" and a or name + end +end +function lfs.mkdirs(path) + local full + for sub in gmatch(path,"([^\\/]+)") do + if full then + full=full.."/"..sub + else + full=sub + end + if not lfs.isdir(full) then + lfs.mkdir(full) + end + end +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['l-boolean']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['l-boolean']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } local type,tonumber=type,tonumber -boolean=boolean or {} +boolean=boolean or {} local boolean=boolean -function boolean.tonumber(b) +function boolean.tonumber(b) if b then return 1 else return 0 end end function toboolean(str,tolerant) - if str==nil then - return false - elseif str==false then - return false - elseif str==true then - return true - elseif str=="true" then - return true - elseif str=="false" then - return false - elseif not tolerant then - return false - elseif str==0 then - return false - elseif (tonumber(str) or 0)>0 then - return true - else - return str=="yes" or str=="on" or str=="t" - end + if str==nil then + return false + elseif str==false then + return false + elseif str==true then + return true + elseif str=="true" then + return true + elseif str=="false" then + return false + elseif not tolerant then + return false + elseif str==0 then + return false + elseif (tonumber(str) or 0)>0 then + return true + else + return str=="yes" or str=="on" or str=="t" + end end string.toboolean=toboolean -function string.booleanstring(str) - if str=="0" then - return false - elseif str=="1" then - return true - elseif str=="" then - return false - elseif str=="false" then - return false - elseif str=="true" then - return true - elseif (tonumber(str) or 0)>0 then - return true - else - return str=="yes" or str=="on" or str=="t" - end -end -function string.is_boolean(str,default) - if type(str)=="string" then - if str=="true" or str=="yes" or str=="on" or str=="t" then - return true - elseif str=="false" or str=="no" or str=="off" or str=="f" then - return false - end - end - return default -end +function string.booleanstring(str) + if str=="0" then + return false + elseif str=="1" then + return true + elseif str=="" then + return false + elseif str=="false" then + return false + elseif str=="true" then + return true + elseif (tonumber(str) or 0)>0 then + return true + else + return str=="yes" or str=="on" or str=="t" + end +end +function string.is_boolean(str,default) + if type(str)=="string" then + if str=="true" or str=="yes" or str=="on" or str=="t" then + return true + elseif str=="false" or str=="no" or str=="off" or str=="f" then + return false + end + end + return default +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['l-math']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['l-math']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan -if not math.round then - function math.round(x) return floor(x+0.5) end +if not math.round then + function math.round(x) return floor(x+0.5) end end -if not math.div then - function math.div(n,m) return floor(n/m) end +if not math.div then + function math.div(n,m) return floor(n/m) end end -if not math.mod then - function math.mod(n,m) return n%m end +if not math.mod then + function math.mod(n,m) return n%m end end local pipi=2*math.pi/360 -if not math.sind then - function math.sind(d) return sin(d*pipi) end - function math.cosd(d) return cos(d*pipi) end - function math.tand(d) return tan(d*pipi) end -end -if not math.odd then - function math.odd (n) return n%2~=0 end - function math.even(n) return n%2==0 end -end +if not math.sind then + function math.sind(d) return sin(d*pipi) end + function math.cosd(d) return cos(d*pipi) end + function math.tand(d) return tan(d*pipi) end +end +if not math.odd then + function math.odd (n) return n%2~=0 end + function math.even(n) return n%2==0 end +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['util-str']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['util-str']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -utilities=utilities or {} -utilities.strings=utilities.strings or {} +utilities=utilities or {} +utilities.strings=utilities.strings or {} local strings=utilities.strings -local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub -local load,dump=load,string.dump -local tonumber,type,tostring=tonumber,type,tostring -local unpack,concat=table.unpack,table.concat -local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc -local patterns,lpegmatch=lpeg.patterns,lpeg.match +local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub +local load,dump=load,string.dump +local tonumber,type,tostring=tonumber,type,tostring +local unpack,concat=table.unpack,table.concat +local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc +local patterns,lpegmatch=lpeg.patterns,lpeg.match local utfchar,utfbyte=utf.char,utf.byte -local loadstripped=_LUAVERSION<5.2 and load or function(str) +local loadstripped=_LUAVERSION<5.2 and load or function(str) return load(dump(load(str),true)) end if not number then number={} end local stripper=patterns.stripzeros -local function points(n) - return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536)) +local function points(n) + return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536)) end -local function basepoints(n) - return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536)) +local function basepoints(n) + return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536)) end -number.points=points +number.points=points number.basepoints=basepoints -local rubish=patterns.spaceortab^0*patterns.newline -local anyrubish=patterns.spaceortab+patterns.newline -local anything=patterns.anything -local stripped=(patterns.spaceortab^1/"")*patterns.newline -local leading=rubish^0/"" -local trailing=(anyrubish^1*patterns.endofstring)/"" +local rubish=patterns.spaceortab^0*patterns.newline +local anyrubish=patterns.spaceortab+patterns.newline +local anything=patterns.anything +local stripped=(patterns.spaceortab^1/"")*patterns.newline +local leading=rubish^0/"" +local trailing=(anyrubish^1*patterns.endofstring)/"" local redundant=rubish^3/"\n" local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0) -function strings.collapsecrlf(str) - return lpegmatch(pattern,str) +function strings.collapsecrlf(str) + return lpegmatch(pattern,str) end local repeaters={} -function strings.newrepeater(str,offset) - offset=offset or 0 - local s=repeaters[str] - if not s then - s={} - repeaters[str]=s - end - local t=s[offset] - if t then - return t - end - t={} - setmetatable(t,{ __index=function(t,k) - if not k then - return "" - end - local n=k+offset - local s=n>0 and rep(str,n) or "" - t[k]=s - return s - end }) - s[offset]=t - return t +function strings.newrepeater(str,offset) + offset=offset or 0 + local s=repeaters[str] + if not s then + s={} + repeaters[str]=s + end + local t=s[offset] + if t then + return t + end + t={} + setmetatable(t,{ __index=function(t,k) + if not k then + return "" + end + local n=k+offset + local s=n>0 and rep(str,n) or "" + t[k]=s + return s + end }) + s[offset]=t + return t end local extra,tab,start=0,0,4,0 local nspaces=strings.newrepeater(" ") string.nspaces=nspaces -local pattern=Carg(1)/function(t) - extra,tab,start=0,t or 7,1 - end*Cs(( - Cp()*patterns.tab/function(position) - local current=(position-start+1)+extra - local spaces=tab-(current-1)%tab - if spaces>0 then - extra=extra+spaces-1 +local pattern=Carg(1)/function(t) + extra,tab,start=0,t or 7,1 + end*Cs(( + Cp()*patterns.tab/function(position) + local current=(position-start+1)+extra + local spaces=tab-(current-1)%tab + if spaces>0 then + extra=extra+spaces-1 return nspaces[spaces] - else - return "" - end - end+patterns.newline*Cp()/function(position) - extra,start=0,position - end+patterns.anything + else + return "" + end + end+patterns.newline*Cp()/function(position) + extra,start=0,position + end+patterns.anything )^1) -function strings.tabtospace(str,tab) - return lpegmatch(pattern,str,1,tab or 7) +function strings.tabtospace(str,tab) + return lpegmatch(pattern,str,1,tab or 7) end function strings.striplong(str) - str=gsub(str,"^%s*","") - str=gsub(str,"[\n\r]+ *","\n") - return str + str=gsub(str,"^%s*","") + str=gsub(str,"[\n\r]+ *","\n") + return str end -function strings.nice(str) +function strings.nice(str) str=gsub(str,"[:%-+_]+"," ") - return str + return str end local n=0 local sequenced=table.sequenced -function string.autodouble(s,sep) - if s==nil then - return '""' - end - local t=type(s) - if t=="number" then +function string.autodouble(s,sep) + if s==nil then + return '""' + end + local t=type(s) + if t=="number" then return tostring(s) - end - if t=="table" then - return ('"'..sequenced(s,sep or ",")..'"') - end - return ('"'..tostring(s)..'"') -end -function string.autosingle(s,sep) - if s==nil then - return "''" - end - local t=type(s) - if t=="number" then + end + if t=="table" then + return ('"'..sequenced(s,sep or ",")..'"') + end + return ('"'..tostring(s)..'"') +end +function string.autosingle(s,sep) + if s==nil then + return "''" + end + local t=type(s) + if t=="number" then return tostring(s) - end - if t=="table" then - return ("'"..sequenced(s,sep or ",").."'") - end - return ("'"..tostring(s).."'") -end -local tracedchars={} -string.tracedchars=tracedchars + end + if t=="table" then + return ("'"..sequenced(s,sep or ",").."'") + end + return ("'"..tostring(s).."'") +end +local tracedchars={} +string.tracedchars=tracedchars strings.tracers=tracedchars function string.tracedchar(b) - if type(b)=="number" then - return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")") - else - local c=utfbyte(b) - return tracedchars[c] or (b.." (U+"..format('%05X',c)..")") - end -end -function number.signed(i) - if i>0 then - return "+",i - else - return "-",-i - end -end -local preamble=[[ -local type = type -local tostring = tostring -local tonumber = tonumber -local format = string.format -local concat = table.concat -local signed = number.signed -local points = number.points -local basepoints = number.basepoints -local utfchar = utf.char -local utfbyte = utf.byte -local lpegmatch = lpeg.match -local nspaces = string.nspaces -local tracedchar = string.tracedchar -local autosingle = string.autosingle -local autodouble = string.autodouble -local sequenced = table.sequenced + if type(b)=="number" then + return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")") + else + local c=utfbyte(b) + return tracedchars[c] or (b.." (U+"..format('%05X',c)..")") + end +end +function number.signed(i) + if i>0 then + return "+",i + else + return "-",-i + end +end +local preamble=[[ +local type = type +local tostring = tostring +local tonumber = tonumber +local format = string.format +local concat = table.concat +local signed = number.signed +local points = number.points +local basepoints = number.basepoints +local utfchar = utf.char +local utfbyte = utf.byte +local lpegmatch = lpeg.match +local nspaces = string.nspaces +local tracedchar = string.tracedchar +local autosingle = string.autosingle +local autodouble = string.autodouble +local sequenced = table.sequenced ]] -local template=[[ -%s -%s -return function(%s) return %s end +local template=[[ +%s +%s +return function(%s) return %s end ]] local arguments={ "a1" } -setmetatable(arguments,{ __index=function(t,k) - local v=t[k-1]..",a"..k - t[k]=v - return v - end +setmetatable(arguments,{ __index=function(t,k) + local v=t[k-1]..",a"..k + t[k]=v + return v + end }) -local prefix_any=C((S("+- .")+R("09"))^0) +local prefix_any=C((S("+- .")+R("09"))^0) local prefix_tab=C((1-R("az","AZ","09","%%"))^0) -local format_s=function(f) - n=n+1 - if f and f~="" then - return format("format('%%%ss',a%s)",f,n) +local format_s=function(f) + n=n+1 + if f and f~="" then + return format("format('%%%ss',a%s)",f,n) else return format("(a%s or '')",n) - end + end end local format_S=function(f) - n=n+1 - if f and f~="" then - return format("format('%%%ss',tostring(a%s))",f,n) - else - return format("tostring(a%s)",n) - end -end -local format_q=function() - n=n+1 + n=n+1 + if f and f~="" then + return format("format('%%%ss',tostring(a%s))",f,n) + else + return format("tostring(a%s)",n) + end +end +local format_q=function() + n=n+1 return format("(a%s and format('%%q',a%s) or '')",n,n) end local format_Q=function() - n=n+1 - return format("format('%%q',tostring(a%s))",n) + n=n+1 + return format("format('%%q',tostring(a%s))",n) end -local format_i=function(f) - n=n+1 - if f and f~="" then - return format("format('%%%si',a%s)",f,n) - else - return format("a%s",n) - end +local format_i=function(f) + n=n+1 + if f and f~="" then + return format("format('%%%si',a%s)",f,n) + else + return format("a%s",n) + end end local format_d=format_i -local format_I=function(f) - n=n+1 - return format("format('%%s%%%si',signed(a%s))",f,n) -end -local format_f=function(f) - n=n+1 - return format("format('%%%sf',a%s)",f,n) -end -local format_g=function(f) - n=n+1 - return format("format('%%%sg',a%s)",f,n) -end -local format_G=function(f) - n=n+1 - return format("format('%%%sG',a%s)",f,n) -end -local format_e=function(f) - n=n+1 - return format("format('%%%se',a%s)",f,n) -end -local format_E=function(f) - n=n+1 - return format("format('%%%sE',a%s)",f,n) -end -local format_x=function(f) - n=n+1 - return format("format('%%%sx',a%s)",f,n) -end -local format_X=function(f) - n=n+1 - return format("format('%%%sX',a%s)",f,n) -end -local format_o=function(f) - n=n+1 - return format("format('%%%so',a%s)",f,n) -end -local format_c=function() - n=n+1 - return format("utfchar(a%s)",n) -end -local format_C=function() - n=n+1 - return format("tracedchar(a%s)",n) -end -local format_r=function(f) - n=n+1 - return format("format('%%%s.0f',a%s)",f,n) -end -local format_h=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_H=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_u=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_U=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_p=function() - n=n+1 - return format("points(a%s)",n) -end -local format_b=function() - n=n+1 - return format("basepoints(a%s)",n) -end -local format_t=function(f) - n=n+1 - if f and f~="" then - return format("concat(a%s,%q)",n,f) - else - return format("concat(a%s)",n) - end -end -local format_T=function(f) - n=n+1 - if f and f~="" then - return format("sequenced(a%s,%q)",n,f) - else - return format("sequenced(a%s)",n) - end -end -local format_l=function() - n=n+1 - return format("(a%s and 'true' or 'false')",n) -end -local format_L=function() - n=n+1 - return format("(a%s and 'TRUE' or 'FALSE')",n) +local format_I=function(f) + n=n+1 + return format("format('%%s%%%si',signed(a%s))",f,n) +end +local format_f=function(f) + n=n+1 + return format("format('%%%sf',a%s)",f,n) +end +local format_g=function(f) + n=n+1 + return format("format('%%%sg',a%s)",f,n) +end +local format_G=function(f) + n=n+1 + return format("format('%%%sG',a%s)",f,n) +end +local format_e=function(f) + n=n+1 + return format("format('%%%se',a%s)",f,n) +end +local format_E=function(f) + n=n+1 + return format("format('%%%sE',a%s)",f,n) +end +local format_x=function(f) + n=n+1 + return format("format('%%%sx',a%s)",f,n) +end +local format_X=function(f) + n=n+1 + return format("format('%%%sX',a%s)",f,n) +end +local format_o=function(f) + n=n+1 + return format("format('%%%so',a%s)",f,n) +end +local format_c=function() + n=n+1 + return format("utfchar(a%s)",n) +end +local format_C=function() + n=n+1 + return format("tracedchar(a%s)",n) +end +local format_r=function(f) + n=n+1 + return format("format('%%%s.0f',a%s)",f,n) +end +local format_h=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_H=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_u=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_U=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_p=function() + n=n+1 + return format("points(a%s)",n) +end +local format_b=function() + n=n+1 + return format("basepoints(a%s)",n) +end +local format_t=function(f) + n=n+1 + if f and f~="" then + return format("concat(a%s,%q)",n,f) + else + return format("concat(a%s)",n) + end +end +local format_T=function(f) + n=n+1 + if f and f~="" then + return format("sequenced(a%s,%q)",n,f) + else + return format("sequenced(a%s)",n) + end +end +local format_l=function() + n=n+1 + return format("(a%s and 'true' or 'false')",n) +end +local format_L=function() + n=n+1 + return format("(a%s and 'TRUE' or 'FALSE')",n) end local format_N=function() - n=n+1 - return format("tostring(tonumber(a%s) or a%s)",n,n) -end -local format_a=function(f) - n=n+1 - if f and f~="" then - return format("autosingle(a%s,%q)",n,f) - else - return format("autosingle(a%s)",n) - end -end -local format_A=function(f) - n=n+1 - if f and f~="" then - return format("autodouble(a%s,%q)",n,f) - else - return format("autodouble(a%s)",n) - end + n=n+1 + return format("tostring(tonumber(a%s) or a%s)",n,n) +end +local format_a=function(f) + n=n+1 + if f and f~="" then + return format("autosingle(a%s,%q)",n,f) + else + return format("autosingle(a%s)",n) + end +end +local format_A=function(f) + n=n+1 + if f and f~="" then + return format("autodouble(a%s,%q)",n,f) + else + return format("autodouble(a%s)",n) + end end local format_w=function(f) - n=n+1 - f=tonumber(f) + n=n+1 + f=tonumber(f) if f then return format("nspaces[%s+a%s]",f,n) - else + else return format("nspaces[a%s]",n) - end + end end local format_W=function(f) - return format("nspaces[%s]",tonumber(f) or 0) + return format("nspaces[%s]",tonumber(f) or 0) end -local format_rest=function(s) +local format_rest=function(s) return format("%q",s) end -local format_extension=function(extensions,f,name) - local extension=extensions[name] or "tostring(%s)" - local f=tonumber(f) or 1 - if f==0 then - return extension - elseif f==1 then - n=n+1 - local a="a"..n +local format_extension=function(extensions,f,name) + local extension=extensions[name] or "tostring(%s)" + local f=tonumber(f) or 1 + if f==0 then + return extension + elseif f==1 then + n=n+1 + local a="a"..n return format(extension,a,a) - elseif f<0 then - local a="a"..(n+f+1) - return format(extension,a,a) - else - local t={} - for i=1,f do - n=n+1 - t[#t+1]="a"..n - end - return format(extension,unpack(t)) - end -end -local builder=Cs { "start", - start=( - ( - P("%")/""*( + elseif f<0 then + local a="a"..(n+f+1) + return format(extension,a,a) + else + local t={} + for i=1,f do + n=n+1 + t[#t+1]="a"..n + end + return format(extension,unpack(t)) + end +end +local builder=Cs { "start", + start=( + ( + P("%")/""*( V("!") +V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o") +V("c")+V("C")+V("S") @@ -2811,8 +2811,8 @@ local builder=Cs { "start", +V("a") +V("A") +V("*") - )+V("*") - )*(P(-1)+Carg(1)) + )+V("*") + )*(P(-1)+Carg(1)) )^0, ["s"]=(prefix_any*P("s"))/format_s, ["q"]=(prefix_any*P("q"))/format_q, @@ -2848,796 +2848,796 @@ local builder=Cs { "start", ["a"]=(prefix_any*P("a"))/format_a, ["A"]=(prefix_any*P("A"))/format_A, ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%%%")^1)/format_rest, - ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension, + ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension, } -local direct=Cs ( - P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1) +local direct=Cs ( + P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1) ) -local function make(t,str) - local f - local p - local p=lpegmatch(direct,str) - if p then - f=loadstripped(p)() - else - n=0 +local function make(t,str) + local f + local p + local p=lpegmatch(direct,str) + if p then + f=loadstripped(p)() + else + n=0 p=lpegmatch(builder,str,1,"..",t._extensions_) - if n>0 then + if n>0 then p=format(template,preamble,t._preamble_,arguments[n],p) - f=loadstripped(p)() - else - f=function() return str end - end - end - t[str]=f - return f + f=loadstripped(p)() + else + f=function() return str end + end + end + t[str]=f + return f end -local function use(t,fmt,...) - return t[fmt](...) +local function use(t,fmt,...) + return t[fmt](...) end strings.formatters={} -function strings.formatters.new() - local t={ _extensions_={},_preamble_="",_type_="formatter" } - setmetatable(t,{ __index=make,__call=use }) - return t +function strings.formatters.new() + local t={ _extensions_={},_preamble_="",_type_="formatter" } + setmetatable(t,{ __index=make,__call=use }) + return t end local formatters=strings.formatters.new() string.formatters=formatters string.formatter=function(str,...) return formatters[str](...) end -local function add(t,name,template,preamble) - if type(t)=="table" and t._type_=="formatter" then - t._extensions_[name]=template or "%s" - if preamble then +local function add(t,name,template,preamble) + if type(t)=="table" and t._type_=="formatter" then + t._extensions_[name]=template or "%s" + if preamble then t._preamble_=preamble.."\n"..t._preamble_ - end - end + end + end end strings.formatters.add=add -lpeg.patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/"""+P(1))^0) +lpeg.patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/"""+P(1))^0) lpeg.patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0) -add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]]) +add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]]) add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]]) end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['luat-basics-gen']={ - version=1.100, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['luat-basics-gen']={ + version=1.100, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local dummyfunction=function() -end -local dummyreporter=function(c) - return function(...) - (texio.reporter or texio.write_nl)(c.." : "..string.formatters(...)) - end -end -statistics={ - register=dummyfunction, - starttiming=dummyfunction, - stoptiming=dummyfunction, - elapsedtime=nil, +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local dummyfunction=function() +end +local dummyreporter=function(c) + return function(...) + (texio.reporter or texio.write_nl)(c.." : "..string.formatters(...)) + end +end +statistics={ + register=dummyfunction, + starttiming=dummyfunction, + stoptiming=dummyfunction, + elapsedtime=nil, } -directives={ - register=dummyfunction, - enable=dummyfunction, - disable=dummyfunction, +directives={ + register=dummyfunction, + enable=dummyfunction, + disable=dummyfunction, } -trackers={ - register=dummyfunction, - enable=dummyfunction, - disable=dummyfunction, +trackers={ + register=dummyfunction, + enable=dummyfunction, + disable=dummyfunction, } -experiments={ - register=dummyfunction, - enable=dummyfunction, - disable=dummyfunction, +experiments={ + register=dummyfunction, + enable=dummyfunction, + disable=dummyfunction, } storage={ - register=dummyfunction, - shared={}, + register=dummyfunction, + shared={}, } -logs={ - new=dummyreporter, - reporter=dummyreporter, - messenger=dummyreporter, - report=dummyfunction, +logs={ + new=dummyreporter, + reporter=dummyreporter, + messenger=dummyreporter, + report=dummyfunction, } -callbacks={ +callbacks={ register=function(n,f) return callback.register(n,f) end, } -utilities={ - storage={ - allocate=function(t) return t or {} end, - mark=function(t) return t or {} end, - }, +utilities={ + storage={ + allocate=function(t) return t or {} end, + mark=function(t) return t or {} end, + }, } -characters=characters or { - data={} +characters=characters or { + data={} } texconfig.kpse_init=true resolvers=resolvers or {} -local remapper={ - otf="opentype fonts", - ttf="truetype fonts", - ttc="truetype fonts", +local remapper={ + otf="opentype fonts", + ttf="truetype fonts", + ttc="truetype fonts", dfont="truetype fonts", - cid="cid maps", - cidmap="cid maps", - fea="font feature files", + cid="cid maps", + cidmap="cid maps", + fea="font feature files", pfa="type1 fonts", pfb="type1 fonts", } -function resolvers.findfile(name,fileformat) - name=string.gsub(name,"\\","/") - if not fileformat or fileformat=="" then - fileformat=file.suffix(name) - if fileformat=="" then - fileformat="tex" - end - end - fileformat=string.lower(fileformat) - fileformat=remapper[fileformat] or fileformat - local found=kpse.find_file(name,fileformat) - if not found or found=="" then - found=kpse.find_file(name,"other text files") - end - return found +function resolvers.findfile(name,fileformat) + name=string.gsub(name,"\\","/") + if not fileformat or fileformat=="" then + fileformat=file.suffix(name) + if fileformat=="" then + fileformat="tex" + end + end + fileformat=string.lower(fileformat) + fileformat=remapper[fileformat] or fileformat + local found=kpse.find_file(name,fileformat) + if not found or found=="" then + found=kpse.find_file(name,"other text files") + end + return found end resolvers.findbinfile=resolvers.findfile -function resolvers.resolve(s) - return s +function resolvers.resolve(s) + return s end -function resolvers.unresolve(s) - return s +function resolvers.unresolve(s) + return s end caches={} -local writable=nil -local readables={} +local writable=nil +local readables={} local usingjit=jit -if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then - caches.namespace='generic' +if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then + caches.namespace='generic' end do local cachepaths=kpse.expand_path('$TEXMFCACHE') or "" - if cachepaths=="" then - cachepaths=kpse.expand_path('$TEXMFVAR') + if cachepaths=="" then + cachepaths=kpse.expand_path('$TEXMFVAR') end - if cachepaths=="" then - cachepaths=kpse.expand_path('$VARTEXMF') + if cachepaths=="" then + cachepaths=kpse.expand_path('$VARTEXMF') end - if cachepaths=="" then - cachepaths="." + if cachepaths=="" then + cachepaths="." end cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":") - for i=1,#cachepaths do - local cachepath=cachepaths[i] - if not lfs.isdir(cachepath) then + for i=1,#cachepaths do + local cachepath=cachepaths[i] + if not lfs.isdir(cachepath) then lfs.mkdirs(cachepath) - if lfs.isdir(cachepath) then - texio.write(string.format("(created cache path: %s)",cachepath)) - end - end - if file.is_writable(cachepath) then - writable=file.join(cachepath,"luatex-cache") - lfs.mkdir(writable) - writable=file.join(writable,caches.namespace) - lfs.mkdir(writable) - break - end - end - for i=1,#cachepaths do - if file.is_readable(cachepaths[i]) then - readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace) - end - end - if not writable then - texio.write_nl("quiting: fix your writable cache path") - os.exit() - elseif #readables==0 then - texio.write_nl("quiting: fix your readable cache path") - os.exit() - elseif #readables==1 and readables[1]==writable then - texio.write(string.format("(using cache: %s)",writable)) - else - texio.write(string.format("(using write cache: %s)",writable)) - texio.write(string.format("(using read cache: %s)",table.concat(readables," "))) - end -end -function caches.getwritablepath(category,subcategory) - local path=file.join(writable,category) - lfs.mkdir(path) - path=file.join(path,subcategory) - lfs.mkdir(path) - return path -end -function caches.getreadablepaths(category,subcategory) - local t={} - for i=1,#readables do - t[i]=file.join(readables[i],category,subcategory) - end - return t -end -local function makefullname(path,name) - if path and path~="" then - return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") - end -end -function caches.is_writable(path,name) - local fullname=makefullname(path,name) - return fullname and file.is_writable(fullname) -end -function caches.loaddata(paths,name) - for i=1,#paths do - local data=false - local luaname,lucname=makefullname(paths[i],name) + if lfs.isdir(cachepath) then + texio.write(string.format("(created cache path: %s)",cachepath)) + end + end + if file.is_writable(cachepath) then + writable=file.join(cachepath,"luatex-cache") + lfs.mkdir(writable) + writable=file.join(writable,caches.namespace) + lfs.mkdir(writable) + break + end + end + for i=1,#cachepaths do + if file.is_readable(cachepaths[i]) then + readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace) + end + end + if not writable then + texio.write_nl("quiting: fix your writable cache path") + os.exit() + elseif #readables==0 then + texio.write_nl("quiting: fix your readable cache path") + os.exit() + elseif #readables==1 and readables[1]==writable then + texio.write(string.format("(using cache: %s)",writable)) + else + texio.write(string.format("(using write cache: %s)",writable)) + texio.write(string.format("(using read cache: %s)",table.concat(readables," "))) + end +end +function caches.getwritablepath(category,subcategory) + local path=file.join(writable,category) + lfs.mkdir(path) + path=file.join(path,subcategory) + lfs.mkdir(path) + return path +end +function caches.getreadablepaths(category,subcategory) + local t={} + for i=1,#readables do + t[i]=file.join(readables[i],category,subcategory) + end + return t +end +local function makefullname(path,name) + if path and path~="" then + return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") + end +end +function caches.is_writable(path,name) + local fullname=makefullname(path,name) + return fullname and file.is_writable(fullname) +end +function caches.loaddata(paths,name) + for i=1,#paths do + local data=false + local luaname,lucname=makefullname(paths[i],name) if lucname and lfs.isfile(lucname) then - texio.write(string.format("(load luc: %s)",lucname)) - data=loadfile(lucname) - if data then - data=data() - end - if data then - return data - else - texio.write(string.format("(loading failed: %s)",lucname)) - end - end - if luaname and lfs.isfile(luaname) then - texio.write(string.format("(load lua: %s)",luaname)) - data=loadfile(luaname) - if data then - data=data() - end - if data then - return data - end - end - end -end -function caches.savedata(path,name,data) - local luaname,lucname=makefullname(path,name) - if luaname then - texio.write(string.format("(save: %s)",luaname)) - table.tofile(luaname,data,true,{ reduce=true }) - if lucname and type(caches.compile)=="function" then + texio.write(string.format("(load luc: %s)",lucname)) + data=loadfile(lucname) + if data then + data=data() + end + if data then + return data + else + texio.write(string.format("(loading failed: %s)",lucname)) + end + end + if luaname and lfs.isfile(luaname) then + texio.write(string.format("(load lua: %s)",luaname)) + data=loadfile(luaname) + if data then + data=data() + end + if data then + return data + end + end + end +end +function caches.savedata(path,name,data) + local luaname,lucname=makefullname(path,name) + if luaname then + texio.write(string.format("(save: %s)",luaname)) + table.tofile(luaname,data,true,{ reduce=true }) + if lucname and type(caches.compile)=="function" then os.remove(lucname) - texio.write(string.format("(save: %s)",lucname)) - caches.compile(data,luaname,lucname) - end - end -end -function caches.compile(data,luaname,lucname) - local d=io.loaddata(luaname) - if not d or d=="" then + texio.write(string.format("(save: %s)",lucname)) + caches.compile(data,luaname,lucname) + end + end +end +function caches.compile(data,luaname,lucname) + local d=io.loaddata(luaname) + if not d or d=="" then d=table.serialize(data,true) - end - if d and d~="" then - local f=io.open(lucname,'wb') - if f then - local s=loadstring(d) - if s then - f:write(string.dump(s,true)) - end - f:close() - end - end -end -function table.setmetatableindex(t,f) - setmetatable(t,{ __index=f }) -end + end + if d and d~="" then + local f=io.open(lucname,'wb') + if f then + local s=loadstring(d) + if s then + f:write(string.dump(s,true)) + end + f:close() + end + end +end +function table.setmetatableindex(t,f) + setmetatable(t,{ __index=f }) +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['data-con']={ - version=1.100, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['data-con']={ + version=1.100, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } local format,lower,gsub=string.format,string.lower,string.gsub -local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end) -local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end) +local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end) +local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end) local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end) -containers=containers or {} -local containers=containers +containers=containers or {} +local containers=containers containers.usecache=true local report_containers=logs.reporter("resolvers","containers") local allocated={} -local mt={ - __index=function(t,k) - if k=="writable" then - local writable=caches.getwritablepath(t.category,t.subcategory) or { "." } - t.writable=writable - return writable - elseif k=="readables" then - local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." } - t.readables=readables - return readables - end - end, - __storage__=true +local mt={ + __index=function(t,k) + if k=="writable" then + local writable=caches.getwritablepath(t.category,t.subcategory) or { "." } + t.writable=writable + return writable + elseif k=="readables" then + local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." } + t.readables=readables + return readables + end + end, + __storage__=true } -function containers.define(category,subcategory,version,enabled) - if category and subcategory then - local c=allocated[category] - if not c then - c={} - allocated[category]=c - end - local s=c[subcategory] - if not s then - s={ - category=category, - subcategory=subcategory, - storage={}, - enabled=enabled, +function containers.define(category,subcategory,version,enabled) + if category and subcategory then + local c=allocated[category] + if not c then + c={} + allocated[category]=c + end + local s=c[subcategory] + if not s then + s={ + category=category, + subcategory=subcategory, + storage={}, + enabled=enabled, version=version or math.pi, trace=false, - } - setmetatable(s,mt) - c[subcategory]=s - end - return s - end -end -function containers.is_usable(container,name) - return container.enabled and caches and caches.is_writable(container.writable,name) -end -function containers.is_valid(container,name) - if name and name~="" then - local storage=container.storage[name] - return storage and storage.cache_version==container.version - else - return false - end -end -function containers.read(container,name) - local storage=container.storage - local stored=storage[name] - if not stored and container.enabled and caches and containers.usecache then - stored=caches.loaddata(container.readables,name) - if stored and stored.cache_version==container.version then - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","load",container.subcategory,name) - end - else - stored=nil - end - storage[name]=stored - elseif stored then - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","reuse",container.subcategory,name) - end - end - return stored -end -function containers.write(container,name,data) - if data then - data.cache_version=container.version - if container.enabled and caches then - local unique,shared=data.unique,data.shared - data.unique,data.shared=nil,nil - caches.savedata(container.writable,name,data) - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","save",container.subcategory,name) - end - data.unique,data.shared=unique,shared - end - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","store",container.subcategory,name) - end - container.storage[name]=data - end - return data -end -function containers.content(container,name) - return container.storage[name] + } + setmetatable(s,mt) + c[subcategory]=s + end + return s + end +end +function containers.is_usable(container,name) + return container.enabled and caches and caches.is_writable(container.writable,name) +end +function containers.is_valid(container,name) + if name and name~="" then + local storage=container.storage[name] + return storage and storage.cache_version==container.version + else + return false + end +end +function containers.read(container,name) + local storage=container.storage + local stored=storage[name] + if not stored and container.enabled and caches and containers.usecache then + stored=caches.loaddata(container.readables,name) + if stored and stored.cache_version==container.version then + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","load",container.subcategory,name) + end + else + stored=nil + end + storage[name]=stored + elseif stored then + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","reuse",container.subcategory,name) + end + end + return stored +end +function containers.write(container,name,data) + if data then + data.cache_version=container.version + if container.enabled and caches then + local unique,shared=data.unique,data.shared + data.unique,data.shared=nil,nil + caches.savedata(container.writable,name,data) + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","save",container.subcategory,name) + end + data.unique,data.shared=unique,shared + end + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","store",container.subcategory,name) + end + container.storage[name]=data + end + return data +end +function containers.content(container,name) + return container.storage[name] end function containers.cleanname(name) return (gsub(lower(name),"[^%w\128-\255]+","-")) -end +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['luatex-fonts-nod']={ - version=1.001, - comment="companion to luatex-fonts.lua", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['luatex-fonts-nod']={ + version=1.001, + comment="companion to luatex-fonts.lua", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() end if tex.attribute[0]~=0 then - texio.write_nl("log","!") - texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") - texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") - texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") + texio.write_nl("log","!") + texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") + texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") + texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") texio.write_nl("log","!") tex.attribute[0]=0 end -attributes=attributes or {} +attributes=attributes or {} attributes.unsetvalue=-0x7FFFFFFF local numbers,last={},127 -attributes.private=attributes.private or function(name) - local number=numbers[name] - if not number then - if last<255 then - last=last+1 - end - number=last - numbers[name]=number - end - return number -end -nodes={} -nodes.pool={} +attributes.private=attributes.private or function(name) + local number=numbers[name] + if not number then + if last<255 then + last=last+1 + end + number=last + numbers[name]=number + end + return number +end +nodes={} +nodes.pool={} nodes.handlers={} -local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end -local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end +local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end +local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" } -nodes.nodecodes=nodecodes -nodes.whatcodes=whatcodes -nodes.whatsitcodes=whatcodes +nodes.nodecodes=nodecodes +nodes.whatcodes=whatcodes +nodes.whatsitcodes=whatcodes nodes.glyphcodes=glyphcodes -local free_node=node.free -local remove_node=node.remove -local new_node=node.new +local free_node=node.free +local remove_node=node.remove +local new_node=node.new local traverse_id=node.traverse_id local math_code=nodecodes.math -nodes.handlers.protectglyphs=node.protect_glyphs +nodes.handlers.protectglyphs=node.protect_glyphs nodes.handlers.unprotectglyphs=node.unprotect_glyphs -function nodes.remove(head,current,free_too) - local t=current - head,current=remove_node(head,current) - if t then - if free_too then - free_node(t) - t=nil - else - t.next,t.prev=nil,nil - end - end - return head,current,t -end -function nodes.delete(head,current) - return nodes.remove(head,current,true) -end -nodes.before=node.insert_before +function nodes.remove(head,current,free_too) + local t=current + head,current=remove_node(head,current) + if t then + if free_too then + free_node(t) + t=nil + else + t.next,t.prev=nil,nil + end + end + return head,current,t +end +function nodes.delete(head,current) + return nodes.remove(head,current,true) +end +nodes.before=node.insert_before nodes.after=node.insert_after -function nodes.pool.kern(k) - local n=new_node("kern",1) - n.kern=k - return n -end -function nodes.endofmath(n) - for n in traverse_id(math_code,n.next) do - return n - end -end +function nodes.pool.kern(k) + local n=new_node("kern",1) + n.kern=k + return n +end +function nodes.endofmath(n) + for n in traverse_id(math_code,n.next) do + return n + end +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['font-ini']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['font-ini']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } local allocate=utilities.storage.allocate local report_defining=logs.reporter("fonts","defining") -fonts=fonts or {} +fonts=fonts or {} local fonts=fonts fonts.hashes={ identifiers=allocate() } -fonts.tables=fonts.tables or {} -fonts.helpers=fonts.helpers or {} +fonts.tables=fonts.tables or {} +fonts.helpers=fonts.helpers or {} fonts.tracers=fonts.tracers or {} fonts.specifiers=fonts.specifiers or {} fonts.analyzers={} -fonts.readers={} -fonts.definers={ methods={} } +fonts.readers={} +fonts.definers={ methods={} } fonts.loggers={ register=function() end } -fontloader.totable=fontloader.to_table +fontloader.totable=fontloader.to_table end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['font-con']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['font-con']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -local next,tostring,rawget=next,tostring,rawget -local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub -local utfbyte=utf.byte -local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy +local next,tostring,rawget=next,tostring,rawget +local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub +local utfbyte=utf.byte +local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy local derivetable=table.derive -local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) +local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end) local report_defining=logs.reporter("fonts","defining") -local fonts=fonts -local constructors=fonts.constructors or {} -fonts.constructors=constructors +local fonts=fonts +local constructors=fonts.constructors or {} +fonts.constructors=constructors local handlers=fonts.handlers or {} fonts.handlers=handlers -local allocate=utilities.storage.allocate +local allocate=utilities.storage.allocate local setmetatableindex=table.setmetatableindex -constructors.dontembed=allocate() -constructors.autocleanup=true +constructors.dontembed=allocate() +constructors.autocleanup=true constructors.namemode="fullpath" -constructors.version=1.01 +constructors.version=1.01 constructors.cache=containers.define("fonts","constructors",constructors.version,false) constructors.privateoffset=0xF0000 -constructors.keys={ - properties={ - encodingbytes="number", - embedding="number", - cidinfo={}, - format="string", - fontname="string", - fullname="string", - filename="filename", - psname="string", - name="string", - virtualized="boolean", - hasitalics="boolean", - autoitalicamount="basepoints", - nostackmath="boolean", - noglyphnames="boolean", - mode="string", - hasmath="boolean", - mathitalics="boolean", - textitalics="boolean", - finalized="boolean", - }, - parameters={ - mathsize="number", - scriptpercentage="float", - scriptscriptpercentage="float", - units="cardinal", - designsize="scaledpoints", - expansion={ +constructors.keys={ + properties={ + encodingbytes="number", + embedding="number", + cidinfo={}, + format="string", + fontname="string", + fullname="string", + filename="filename", + psname="string", + name="string", + virtualized="boolean", + hasitalics="boolean", + autoitalicamount="basepoints", + nostackmath="boolean", + noglyphnames="boolean", + mode="string", + hasmath="boolean", + mathitalics="boolean", + textitalics="boolean", + finalized="boolean", + }, + parameters={ + mathsize="number", + scriptpercentage="float", + scriptscriptpercentage="float", + units="cardinal", + designsize="scaledpoints", + expansion={ stretch="integerscale", shrink="integerscale", step="integerscale", - auto="boolean", - }, - protrusion={ - auto="boolean", - }, - slantfactor="float", - extendfactor="float", - factor="float", - hfactor="float", - vfactor="float", - size="scaledpoints", - units="scaledpoints", - scaledpoints="scaledpoints", - slantperpoint="scaledpoints", - spacing={ - width="scaledpoints", - stretch="scaledpoints", - shrink="scaledpoints", - extra="scaledpoints", - }, - xheight="scaledpoints", - quad="scaledpoints", - ascender="scaledpoints", - descender="scaledpoints", - synonyms={ - space="spacing.width", - spacestretch="spacing.stretch", - spaceshrink="spacing.shrink", - extraspace="spacing.extra", - x_height="xheight", - space_stretch="spacing.stretch", - space_shrink="spacing.shrink", - extra_space="spacing.extra", - em="quad", - ex="xheight", - slant="slantperpoint", - }, - }, - description={ - width="basepoints", - height="basepoints", - depth="basepoints", - boundingbox={}, - }, - character={ - width="scaledpoints", - height="scaledpoints", - depth="scaledpoints", - italic="scaledpoints", - }, + auto="boolean", + }, + protrusion={ + auto="boolean", + }, + slantfactor="float", + extendfactor="float", + factor="float", + hfactor="float", + vfactor="float", + size="scaledpoints", + units="scaledpoints", + scaledpoints="scaledpoints", + slantperpoint="scaledpoints", + spacing={ + width="scaledpoints", + stretch="scaledpoints", + shrink="scaledpoints", + extra="scaledpoints", + }, + xheight="scaledpoints", + quad="scaledpoints", + ascender="scaledpoints", + descender="scaledpoints", + synonyms={ + space="spacing.width", + spacestretch="spacing.stretch", + spaceshrink="spacing.shrink", + extraspace="spacing.extra", + x_height="xheight", + space_stretch="spacing.stretch", + space_shrink="spacing.shrink", + extra_space="spacing.extra", + em="quad", + ex="xheight", + slant="slantperpoint", + }, + }, + description={ + width="basepoints", + height="basepoints", + depth="basepoints", + boundingbox={}, + }, + character={ + width="scaledpoints", + height="scaledpoints", + depth="scaledpoints", + italic="scaledpoints", + }, } -local designsizes=allocate() -constructors.designsizes=designsizes -local loadedfonts=allocate() +local designsizes=allocate() +constructors.designsizes=designsizes +local loadedfonts=allocate() constructors.loadedfonts=loadedfonts -local factors={ - pt=65536.0, - bp=65781.8, +local factors={ + pt=65536.0, + bp=65781.8, } -function constructors.setfactor(f) - constructors.factor=factors[f or 'pt'] or factors.pt +function constructors.setfactor(f) + constructors.factor=factors[f or 'pt'] or factors.pt end constructors.setfactor() function constructors.scaled(scaledpoints,designsize) - if scaledpoints<0 then - if designsize then - local factor=constructors.factor + if scaledpoints<0 then + if designsize then + local factor=constructors.factor if designsize>factor then return (- scaledpoints/1000)*designsize - else - return (- scaledpoints/1000)*designsize*factor - end - else - return (- scaledpoints/1000)*10*factor - end - else - return scaledpoints - end -end -function constructors.cleanuptable(tfmdata) - if constructors.autocleanup and tfmdata.properties.virtualized then - for k,v in next,tfmdata.characters do + else + return (- scaledpoints/1000)*designsize*factor + end + else + return (- scaledpoints/1000)*10*factor + end + else + return scaledpoints + end +end +function constructors.cleanuptable(tfmdata) + if constructors.autocleanup and tfmdata.properties.virtualized then + for k,v in next,tfmdata.characters do if v.commands then v.commands=nil end - end - end + end + end end -function constructors.calculatescale(tfmdata,scaledpoints) - local parameters=tfmdata.parameters - if scaledpoints<0 then +function constructors.calculatescale(tfmdata,scaledpoints) + local parameters=tfmdata.parameters + if scaledpoints<0 then scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize) - end + end return scaledpoints,scaledpoints/(parameters.units or 1000) end -local unscaled={ - ScriptPercentScaleDown=true, - ScriptScriptPercentScaleDown=true, - RadicalDegreeBottomRaisePercent=true +local unscaled={ + ScriptPercentScaleDown=true, + ScriptScriptPercentScaleDown=true, + RadicalDegreeBottomRaisePercent=true } function constructors.assignmathparameters(target,original) - local mathparameters=original.mathparameters - if mathparameters and next(mathparameters) then - local targetparameters=target.parameters - local targetproperties=target.properties - local targetmathparameters={} - local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor - for name,value in next,mathparameters do - if unscaled[name] then - targetmathparameters[name]=value - else - targetmathparameters[name]=value*factor - end - end - if not targetmathparameters.FractionDelimiterSize then - targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size - end - if not mathparameters.FractionDelimiterDisplayStyleSize then - targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size - end - target.mathparameters=targetmathparameters - end + local mathparameters=original.mathparameters + if mathparameters and next(mathparameters) then + local targetparameters=target.parameters + local targetproperties=target.properties + local targetmathparameters={} + local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor + for name,value in next,mathparameters do + if unscaled[name] then + targetmathparameters[name]=value + else + targetmathparameters[name]=value*factor + end + end + if not targetmathparameters.FractionDelimiterSize then + targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size + end + if not mathparameters.FractionDelimiterDisplayStyleSize then + targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size + end + target.mathparameters=targetmathparameters + end end function constructors.beforecopyingcharacters(target,original) end function constructors.aftercopyingcharacters(target,original) end -function constructors.enhanceparameters(parameters) - local xheight=parameters.x_height - local quad=parameters.quad - local space=parameters.space - local stretch=parameters.space_stretch - local shrink=parameters.space_shrink - local extra=parameters.extra_space - local slant=parameters.slant - parameters.xheight=xheight - parameters.spacestretch=stretch - parameters.spaceshrink=shrink - parameters.extraspace=extra - parameters.em=quad - parameters.ex=xheight - parameters.slantperpoint=slant - parameters.spacing={ - width=space, - stretch=stretch, - shrink=shrink, - extra=extra, - } -end -function constructors.scale(tfmdata,specification) +function constructors.enhanceparameters(parameters) + local xheight=parameters.x_height + local quad=parameters.quad + local space=parameters.space + local stretch=parameters.space_stretch + local shrink=parameters.space_shrink + local extra=parameters.extra_space + local slant=parameters.slant + parameters.xheight=xheight + parameters.spacestretch=stretch + parameters.spaceshrink=shrink + parameters.extraspace=extra + parameters.em=quad + parameters.ex=xheight + parameters.slantperpoint=slant + parameters.spacing={ + width=space, + stretch=stretch, + shrink=shrink, + extra=extra, + } +end +function constructors.scale(tfmdata,specification) local target={} - if tonumber(specification) then - specification={ size=specification } + if tonumber(specification) then + specification={ size=specification } end - local scaledpoints=specification.size + local scaledpoints=specification.size local relativeid=specification.relativeid - local properties=tfmdata.properties or {} - local goodies=tfmdata.goodies or {} - local resources=tfmdata.resources or {} + local properties=tfmdata.properties or {} + local goodies=tfmdata.goodies or {} + local resources=tfmdata.resources or {} local descriptions=tfmdata.descriptions or {} local characters=tfmdata.characters or {} local changed=tfmdata.changed or {} - local shared=tfmdata.shared or {} - local parameters=tfmdata.parameters or {} + local shared=tfmdata.shared or {} + local parameters=tfmdata.parameters or {} local mathparameters=tfmdata.mathparameters or {} - local targetcharacters={} - local targetdescriptions=derivetable(descriptions) - local targetparameters=derivetable(parameters) - local targetproperties=derivetable(properties) + local targetcharacters={} + local targetdescriptions=derivetable(descriptions) + local targetparameters=derivetable(parameters) + local targetproperties=derivetable(properties) local targetgoodies=goodies - target.characters=targetcharacters - target.descriptions=targetdescriptions + target.characters=targetcharacters + target.descriptions=targetdescriptions target.parameters=targetparameters - target.properties=targetproperties - target.goodies=targetgoodies - target.shared=shared - target.resources=resources + target.properties=targetproperties + target.goodies=targetgoodies + target.shared=shared + target.resources=resources target.unscaled=tfmdata - local mathsize=tonumber(specification.mathsize) or 0 - local textsize=tonumber(specification.textsize) or scaledpoints - local forcedsize=tonumber(parameters.mathsize ) or 0 - local extrafactor=tonumber(specification.factor ) or 1 - if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then - scaledpoints=parameters.scriptpercentage*textsize/100 - elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then - scaledpoints=parameters.scriptscriptpercentage*textsize/100 + local mathsize=tonumber(specification.mathsize) or 0 + local textsize=tonumber(specification.textsize) or scaledpoints + local forcedsize=tonumber(parameters.mathsize ) or 0 + local extrafactor=tonumber(specification.factor ) or 1 + if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then + scaledpoints=parameters.scriptpercentage*textsize/100 + elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then + scaledpoints=parameters.scriptscriptpercentage*textsize/100 elseif forcedsize>1000 then - scaledpoints=forcedsize - end + scaledpoints=forcedsize + end targetparameters.mathsize=mathsize targetparameters.textsize=textsize targetparameters.forcedsize=forcedsize targetparameters.extrafactor=extrafactor - local tounicode=resources.tounicode - local defaultwidth=resources.defaultwidth or 0 - local defaultheight=resources.defaultheight or 0 - local defaultdepth=resources.defaultdepth or 0 + local tounicode=resources.tounicode + local defaultwidth=resources.defaultwidth or 0 + local defaultheight=resources.defaultheight or 0 + local defaultdepth=resources.defaultdepth or 0 local units=parameters.units or 1000 - if target.fonts then + if target.fonts then target.fonts=fastcopy(target.fonts) end targetproperties.language=properties.language or "dflt" targetproperties.script=properties.script or "dflt" targetproperties.mode=properties.mode or "base" - local askedscaledpoints=scaledpoints + local askedscaledpoints=scaledpoints local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints) - local hdelta=delta + local hdelta=delta local vdelta=delta target.designsize=parameters.designsize target.units_per_em=units local direction=properties.direction or tfmdata.direction or 0 - target.direction=direction + target.direction=direction properties.direction=direction target.size=scaledpoints - target.encodingbytes=properties.encodingbytes or 1 - target.embedding=properties.embedding or "subset" - target.tounicode=1 - target.cidinfo=properties.cidinfo + target.encodingbytes=properties.encodingbytes or 1 + target.embedding=properties.embedding or "subset" + target.tounicode=1 + target.cidinfo=properties.cidinfo target.format=properties.format local fontname=properties.fontname or tfmdata.fontname local fullname=properties.fullname or tfmdata.fullname @@ -3645,4275 +3645,4275 @@ function constructors.scale(tfmdata,specification) local psname=properties.psname or tfmdata.psname local name=properties.name or tfmdata.name if not psname or psname=="" then - psname=fontname or (fullname and fonts.names.cleanname(fullname)) - end - target.fontname=fontname - target.fullname=fullname - target.filename=filename - target.psname=psname + psname=fontname or (fullname and fonts.names.cleanname(fullname)) + end + target.fontname=fontname + target.fullname=fullname + target.filename=filename + target.psname=psname target.name=name - properties.fontname=fontname - properties.fullname=fullname - properties.filename=filename - properties.psname=psname + properties.fontname=fontname + properties.fullname=fullname + properties.filename=filename + properties.psname=psname properties.name=name - local expansion=parameters.expansion - if expansion then - target.stretch=expansion.stretch - target.shrink=expansion.shrink - target.step=expansion.step - target.auto_expand=expansion.auto - end - local protrusion=parameters.protrusion - if protrusion then - target.auto_protrude=protrusion.auto - end - local extendfactor=parameters.extendfactor or 0 - if extendfactor~=0 and extendfactor~=1 then - hdelta=hdelta*extendfactor + local expansion=parameters.expansion + if expansion then + target.stretch=expansion.stretch + target.shrink=expansion.shrink + target.step=expansion.step + target.auto_expand=expansion.auto + end + local protrusion=parameters.protrusion + if protrusion then + target.auto_protrude=protrusion.auto + end + local extendfactor=parameters.extendfactor or 0 + if extendfactor~=0 and extendfactor~=1 then + hdelta=hdelta*extendfactor target.extend=extendfactor*1000 - else + else target.extend=1000 end - local slantfactor=parameters.slantfactor or 0 - if slantfactor~=0 then - target.slant=slantfactor*1000 - else - target.slant=0 - end - targetparameters.factor=delta - targetparameters.hfactor=hdelta - targetparameters.vfactor=vdelta - targetparameters.size=scaledpoints - targetparameters.units=units + local slantfactor=parameters.slantfactor or 0 + if slantfactor~=0 then + target.slant=slantfactor*1000 + else + target.slant=0 + end + targetparameters.factor=delta + targetparameters.hfactor=hdelta + targetparameters.vfactor=vdelta + targetparameters.size=scaledpoints + targetparameters.units=units targetparameters.scaledpoints=askedscaledpoints - local isvirtual=properties.virtualized or tfmdata.type=="virtual" - local hasquality=target.auto_expand or target.auto_protrude - local hasitalics=properties.hasitalics - local autoitalicamount=properties.autoitalicamount - local stackmath=not properties.nostackmath - local nonames=properties.noglyphnames + local isvirtual=properties.virtualized or tfmdata.type=="virtual" + local hasquality=target.auto_expand or target.auto_protrude + local hasitalics=properties.hasitalics + local autoitalicamount=properties.autoitalicamount + local stackmath=not properties.nostackmath + local nonames=properties.noglyphnames local nodemode=properties.mode=="node" - if changed and not next(changed) then - changed=false + if changed and not next(changed) then + changed=false end target.type=isvirtual and "virtual" or "real" target.postprocessors=tfmdata.postprocessors - local targetslant=(parameters.slant or parameters[1] or 0) - local targetspace=(parameters.space or parameters[2] or 0)*hdelta - local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta - local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta - local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta - local targetquad=(parameters.quad or parameters[6] or 0)*hdelta + local targetslant=(parameters.slant or parameters[1] or 0) + local targetspace=(parameters.space or parameters[2] or 0)*hdelta + local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta + local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta + local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta + local targetquad=(parameters.quad or parameters[6] or 0)*hdelta local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta targetparameters.slant=targetslant - targetparameters.space=targetspace - targetparameters.space_stretch=targetspace_stretch - targetparameters.space_shrink=targetspace_shrink - targetparameters.x_height=targetx_height - targetparameters.quad=targetquad + targetparameters.space=targetspace + targetparameters.space_stretch=targetspace_stretch + targetparameters.space_shrink=targetspace_shrink + targetparameters.x_height=targetx_height + targetparameters.quad=targetquad targetparameters.extra_space=targetextra_space - local ascender=parameters.ascender - if ascender then - targetparameters.ascender=delta*ascender - end - local descender=parameters.descender - if descender then - targetparameters.descender=delta*descender + local ascender=parameters.ascender + if ascender then + targetparameters.ascender=delta*ascender + end + local descender=parameters.descender + if descender then + targetparameters.descender=delta*descender end constructors.enhanceparameters(targetparameters) - local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0 - local scaledwidth=defaultwidth*hdelta - local scaledheight=defaultheight*vdelta + local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0 + local scaledwidth=defaultwidth*hdelta + local scaledheight=defaultheight*vdelta local scaleddepth=defaultdepth*vdelta local hasmath=(properties.hasmath or next(mathparameters)) and true - if hasmath then + if hasmath then constructors.assignmathparameters(target,tfmdata) - properties.hasmath=true - target.nomath=false - target.MathConstants=target.mathparameters - else - properties.hasmath=false - target.nomath=true + properties.hasmath=true + target.nomath=false + target.MathConstants=target.mathparameters + else + properties.hasmath=false + target.nomath=true target.mathparameters=nil end - local italickey="italic" + local italickey="italic" local useitalics=true if hasmath then autoitalicamount=false - elseif properties.textitalics then - italickey="italic_correction" - useitalics=false - if properties.delaytextitalics then - autoitalicamount=false - end - end - if trace_defining then - report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a", - name,fullname,filename,hdelta,vdelta, - hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled") + elseif properties.textitalics then + italickey="italic_correction" + useitalics=false + if properties.delaytextitalics then + autoitalicamount=false + end + end + if trace_defining then + report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a", + name,fullname,filename,hdelta,vdelta, + hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled") end constructors.beforecopyingcharacters(target,tfmdata) local sharedkerns={} - for unicode,character in next,characters do - local chr,description,index,touni + for unicode,character in next,characters do + local chr,description,index,touni if changed then - local c=changed[unicode] - if c then - description=descriptions[c] or descriptions[unicode] or character - character=characters[c] or character - index=description.index or c - if tounicode then + local c=changed[unicode] + if c then + description=descriptions[c] or descriptions[unicode] or character + character=characters[c] or character + index=description.index or c + if tounicode then touni=tounicode[index] if not touni then - local d=descriptions[unicode] or characters[unicode] - local i=d.index or unicode + local d=descriptions[unicode] or characters[unicode] + local i=d.index or unicode touni=tounicode[i] - end - end - else - description=descriptions[unicode] or character - index=description.index or unicode - if tounicode then + end + end + else + description=descriptions[unicode] or character + index=description.index or unicode + if tounicode then touni=tounicode[index] - end - end - else - description=descriptions[unicode] or character - index=description.index or unicode - if tounicode then + end + end + else + description=descriptions[unicode] or character + index=description.index or unicode + if tounicode then touni=tounicode[index] - end - end - local width=description.width - local height=description.height - local depth=description.depth - if width then width=hdelta*width else width=scaledwidth end + end + end + local width=description.width + local height=description.height + local depth=description.depth + if width then width=hdelta*width else width=scaledwidth end if height then height=vdelta*height else height=scaledheight end - if depth and depth~=0 then - depth=delta*depth - if nonames then - chr={ - index=index, - height=height, - depth=depth, - width=width, - } - else - chr={ - name=description.name, - index=index, - height=height, - depth=depth, - width=width, - } - end + if depth and depth~=0 then + depth=delta*depth + if nonames then + chr={ + index=index, + height=height, + depth=depth, + width=width, + } + else + chr={ + name=description.name, + index=index, + height=height, + depth=depth, + width=width, + } + end else - if nonames then - chr={ - index=index, - height=height, - width=width, - } - else - chr={ - name=description.name, - index=index, - height=height, - width=width, - } - end - end - if touni then - chr.tounicode=touni - end + if nonames then + chr={ + index=index, + height=height, + width=width, + } + else + chr={ + name=description.name, + index=index, + height=height, + width=width, + } + end + end + if touni then + chr.tounicode=touni + end if hasquality then - local ve=character.expansion_factor - if ve then + local ve=character.expansion_factor + if ve then chr.expansion_factor=ve*1000 - end - local vl=character.left_protruding - if vl then - chr.left_protruding=protrusionfactor*width*vl - end - local vr=character.right_protruding - if vr then - chr.right_protruding=protrusionfactor*width*vr - end - end - if autoitalicamount then - local vi=description.italic - if not vi then - local vi=description.boundingbox[3]-description.width+autoitalicamount + end + local vl=character.left_protruding + if vl then + chr.left_protruding=protrusionfactor*width*vl + end + local vr=character.right_protruding + if vr then + chr.right_protruding=protrusionfactor*width*vr + end + end + if autoitalicamount then + local vi=description.italic + if not vi then + local vi=description.boundingbox[3]-description.width+autoitalicamount if vi>0 then - chr[italickey]=vi*hdelta - end - elseif vi~=0 then - chr[italickey]=vi*hdelta - end - elseif hasitalics then - local vi=description.italic - if vi and vi~=0 then - chr[italickey]=vi*hdelta - end + chr[italickey]=vi*hdelta + end + elseif vi~=0 then + chr[italickey]=vi*hdelta + end + elseif hasitalics then + local vi=description.italic + if vi and vi~=0 then + chr[italickey]=vi*hdelta + end end if hasmath then - local vn=character.next - if vn then - chr.next=vn - else - local vv=character.vert_variants - if vv then - local t={} - for i=1,#vv do - local vvi=vv[i] - t[i]={ - ["start"]=(vvi["start"] or 0)*vdelta, - ["end"]=(vvi["end"] or 0)*vdelta, - ["advance"]=(vvi["advance"] or 0)*vdelta, - ["extender"]=vvi["extender"], - ["glyph"]=vvi["glyph"], - } - end - chr.vert_variants=t - else - local hv=character.horiz_variants - if hv then - local t={} - for i=1,#hv do - local hvi=hv[i] - t[i]={ - ["start"]=(hvi["start"] or 0)*hdelta, - ["end"]=(hvi["end"] or 0)*hdelta, - ["advance"]=(hvi["advance"] or 0)*hdelta, - ["extender"]=hvi["extender"], - ["glyph"]=hvi["glyph"], - } - end - chr.horiz_variants=t - end - end - end - local va=character.top_accent - if va then - chr.top_accent=vdelta*va - end - if stackmath then + local vn=character.next + if vn then + chr.next=vn + else + local vv=character.vert_variants + if vv then + local t={} + for i=1,#vv do + local vvi=vv[i] + t[i]={ + ["start"]=(vvi["start"] or 0)*vdelta, + ["end"]=(vvi["end"] or 0)*vdelta, + ["advance"]=(vvi["advance"] or 0)*vdelta, + ["extender"]=vvi["extender"], + ["glyph"]=vvi["glyph"], + } + end + chr.vert_variants=t + else + local hv=character.horiz_variants + if hv then + local t={} + for i=1,#hv do + local hvi=hv[i] + t[i]={ + ["start"]=(hvi["start"] or 0)*hdelta, + ["end"]=(hvi["end"] or 0)*hdelta, + ["advance"]=(hvi["advance"] or 0)*hdelta, + ["extender"]=hvi["extender"], + ["glyph"]=hvi["glyph"], + } + end + chr.horiz_variants=t + end + end + end + local va=character.top_accent + if va then + chr.top_accent=vdelta*va + end + if stackmath then local mk=character.mathkerns - if mk then - local kerns={} - local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.top_right=k end - local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.top_left=k end - local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.bottom_left=k end - local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.bottom_right=k end + if mk then + local kerns={} + local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.top_right=k end + local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.top_left=k end + local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.bottom_left=k end + local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.bottom_right=k end chr.mathkern=kerns - end - end - end - if not nodemode then - local vk=character.kerns - if vk then - local s=sharedkerns[vk] - if not s then - s={} - for k,v in next,vk do s[k]=v*hdelta end - sharedkerns[vk]=s - end - chr.kerns=s - end - local vl=character.ligatures - if vl then - if true then + end + end + end + if not nodemode then + local vk=character.kerns + if vk then + local s=sharedkerns[vk] + if not s then + s={} + for k,v in next,vk do s[k]=v*hdelta end + sharedkerns[vk]=s + end + chr.kerns=s + end + local vl=character.ligatures + if vl then + if true then chr.ligatures=vl - else - local tt={} - for i,l in next,vl do - tt[i]=l - end - chr.ligatures=tt - end - end - end - if isvirtual then - local vc=character.commands + else + local tt={} + for i,l in next,vl do + tt[i]=l + end + chr.ligatures=tt + end + end + end + if isvirtual then + local vc=character.commands if vc then - local ok=false - for i=1,#vc do - local key=vc[i][1] - if key=="right" or key=="down" then - ok=true - break - end - end - if ok then - local tt={} - for i=1,#vc do - local ivc=vc[i] - local key=ivc[1] - if key=="right" then - tt[i]={ key,ivc[2]*hdelta } - elseif key=="down" then - tt[i]={ key,ivc[2]*vdelta } - elseif key=="rule" then - tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta } + local ok=false + for i=1,#vc do + local key=vc[i][1] + if key=="right" or key=="down" then + ok=true + break + end + end + if ok then + local tt={} + for i=1,#vc do + local ivc=vc[i] + local key=ivc[1] + if key=="right" then + tt[i]={ key,ivc[2]*hdelta } + elseif key=="down" then + tt[i]={ key,ivc[2]*vdelta } + elseif key=="rule" then + tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta } else tt[i]=ivc - end - end - chr.commands=tt - else - chr.commands=vc - end - chr.index=nil - end - end - targetcharacters[unicode]=chr + end + end + chr.commands=tt + else + chr.commands=vc + end + chr.index=nil + end + end + targetcharacters[unicode]=chr end constructors.aftercopyingcharacters(target,tfmdata) - return target + return target end -function constructors.finalize(tfmdata) - if tfmdata.properties and tfmdata.properties.finalized then - return +function constructors.finalize(tfmdata) + if tfmdata.properties and tfmdata.properties.finalized then + return end - if not tfmdata.characters then - return nil + if not tfmdata.characters then + return nil end - if not tfmdata.goodies then + if not tfmdata.goodies then tfmdata.goodies={} end - local parameters=tfmdata.parameters - if not parameters then - return nil + local parameters=tfmdata.parameters + if not parameters then + return nil end - if not parameters.expansion then - parameters.expansion={ - stretch=tfmdata.stretch or 0, - shrink=tfmdata.shrink or 0, - step=tfmdata.step or 0, - auto=tfmdata.auto_expand or false, - } + if not parameters.expansion then + parameters.expansion={ + stretch=tfmdata.stretch or 0, + shrink=tfmdata.shrink or 0, + step=tfmdata.step or 0, + auto=tfmdata.auto_expand or false, + } end - if not parameters.protrusion then - parameters.protrusion={ - auto=auto_protrude - } + if not parameters.protrusion then + parameters.protrusion={ + auto=auto_protrude + } end - if not parameters.size then - parameters.size=tfmdata.size + if not parameters.size then + parameters.size=tfmdata.size end - if not parameters.extendfactor then - parameters.extendfactor=tfmdata.extend or 0 + if not parameters.extendfactor then + parameters.extendfactor=tfmdata.extend or 0 end - if not parameters.slantfactor then - parameters.slantfactor=tfmdata.slant or 0 + if not parameters.slantfactor then + parameters.slantfactor=tfmdata.slant or 0 end - if not parameters.designsize then - parameters.designsize=tfmdata.designsize or 655360 + if not parameters.designsize then + parameters.designsize=tfmdata.designsize or 655360 end - if not parameters.units then - parameters.units=tfmdata.units_per_em or 1000 + if not parameters.units then + parameters.units=tfmdata.units_per_em or 1000 end - if not tfmdata.descriptions then + if not tfmdata.descriptions then local descriptions={} - setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end) - tfmdata.descriptions=descriptions - end - local properties=tfmdata.properties - if not properties then - properties={} - tfmdata.properties=properties - end - if not properties.virtualized then - properties.virtualized=tfmdata.type=="virtual" - end - if not tfmdata.properties then - tfmdata.properties={ - fontname=tfmdata.fontname, - filename=tfmdata.filename, - fullname=tfmdata.fullname, - name=tfmdata.name, + setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end) + tfmdata.descriptions=descriptions + end + local properties=tfmdata.properties + if not properties then + properties={} + tfmdata.properties=properties + end + if not properties.virtualized then + properties.virtualized=tfmdata.type=="virtual" + end + if not tfmdata.properties then + tfmdata.properties={ + fontname=tfmdata.fontname, + filename=tfmdata.filename, + fullname=tfmdata.fullname, + name=tfmdata.name, psname=tfmdata.psname, - encodingbytes=tfmdata.encodingbytes or 1, - embedding=tfmdata.embedding or "subset", - tounicode=tfmdata.tounicode or 1, - cidinfo=tfmdata.cidinfo or nil, - format=tfmdata.format or "type1", - direction=tfmdata.direction or 0, - } - end - if not tfmdata.resources then - tfmdata.resources={} - end - if not tfmdata.shared then - tfmdata.shared={} - end - if not properties.hasmath then - properties.hasmath=not tfmdata.nomath - end - tfmdata.MathConstants=nil + encodingbytes=tfmdata.encodingbytes or 1, + embedding=tfmdata.embedding or "subset", + tounicode=tfmdata.tounicode or 1, + cidinfo=tfmdata.cidinfo or nil, + format=tfmdata.format or "type1", + direction=tfmdata.direction or 0, + } + end + if not tfmdata.resources then + tfmdata.resources={} + end + if not tfmdata.shared then + tfmdata.shared={} + end + if not properties.hasmath then + properties.hasmath=not tfmdata.nomath + end + tfmdata.MathConstants=nil tfmdata.postprocessors=nil - tfmdata.fontname=nil - tfmdata.filename=nil - tfmdata.fullname=nil + tfmdata.fontname=nil + tfmdata.filename=nil + tfmdata.fullname=nil tfmdata.name=nil tfmdata.psname=nil - tfmdata.encodingbytes=nil - tfmdata.embedding=nil - tfmdata.tounicode=nil - tfmdata.cidinfo=nil - tfmdata.format=nil - tfmdata.direction=nil - tfmdata.type=nil - tfmdata.nomath=nil + tfmdata.encodingbytes=nil + tfmdata.embedding=nil + tfmdata.tounicode=nil + tfmdata.cidinfo=nil + tfmdata.format=nil + tfmdata.direction=nil + tfmdata.type=nil + tfmdata.nomath=nil tfmdata.designsize=nil - tfmdata.size=nil - tfmdata.stretch=nil - tfmdata.shrink=nil - tfmdata.step=nil - tfmdata.auto_expand=nil - tfmdata.auto_protrude=nil - tfmdata.extend=nil - tfmdata.slant=nil + tfmdata.size=nil + tfmdata.stretch=nil + tfmdata.shrink=nil + tfmdata.step=nil + tfmdata.auto_expand=nil + tfmdata.auto_protrude=nil + tfmdata.extend=nil + tfmdata.slant=nil tfmdata.units_per_em=nil properties.finalized=true - return tfmdata + return tfmdata end -local hashmethods={} +local hashmethods={} constructors.hashmethods=hashmethods function constructors.hashfeatures(specification) - local features=specification.features - if features then - local t,tn={},0 - for category,list in next,features do - if next(list) then - local hasher=hashmethods[category] - if hasher then - local hash=hasher(list) - if hash then - tn=tn+1 - t[tn]=category..":"..hash - end - end - end - end - if tn>0 then - return concat(t," & ") - end - end - return "unknown" -end -hashmethods.normal=function(list) - local s={} - local n=0 - for k,v in next,list do + local features=specification.features + if features then + local t,tn={},0 + for category,list in next,features do + if next(list) then + local hasher=hashmethods[category] + if hasher then + local hash=hasher(list) + if hash then + tn=tn+1 + t[tn]=category..":"..hash + end + end + end + end + if tn>0 then + return concat(t," & ") + end + end + return "unknown" +end +hashmethods.normal=function(list) + local s={} + local n=0 + for k,v in next,list do if not k then elseif k=="number" or k=="features" then - else - n=n+1 - s[n]=k - end - end - if n>0 then - sort(s) - for i=1,n do - local k=s[i] - s[i]=k..'='..tostring(list[k]) - end - return concat(s,"+") - end -end -function constructors.hashinstance(specification,force) - local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks - if force or not hash then - hash=constructors.hashfeatures(specification) - specification.hash=hash - end - if size<1000 and designsizes[hash] then - size=math.round(constructors.scaled(size,designsizes[hash])) - specification.size=size - end - if fallbacks then - return hash..' @ '..tostring(size)..' @ '..fallbacks - else - return hash..' @ '..tostring(size) + else + n=n+1 + s[n]=k + end + end + if n>0 then + sort(s) + for i=1,n do + local k=s[i] + s[i]=k..'='..tostring(list[k]) + end + return concat(s,"+") + end +end +function constructors.hashinstance(specification,force) + local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks + if force or not hash then + hash=constructors.hashfeatures(specification) + specification.hash=hash + end + if size<1000 and designsizes[hash] then + size=math.round(constructors.scaled(size,designsizes[hash])) + specification.size=size + end + if fallbacks then + return hash..' @ '..tostring(size)..' @ '..fallbacks + else + return hash..' @ '..tostring(size) end end function constructors.setname(tfmdata,specification) if constructors.namemode=="specification" then - local specname=specification.specification - if specname then - tfmdata.properties.name=specname - if trace_defining then - report_otf("overloaded fontname %a",specname) - end - end - end -end -function constructors.checkedfilename(data) - local foundfilename=data.foundfilename - if not foundfilename then - local askedfilename=data.filename or "" - if askedfilename~="" then + local specname=specification.specification + if specname then + tfmdata.properties.name=specname + if trace_defining then + report_otf("overloaded fontname %a",specname) + end + end + end +end +function constructors.checkedfilename(data) + local foundfilename=data.foundfilename + if not foundfilename then + local askedfilename=data.filename or "" + if askedfilename~="" then askedfilename=resolvers.resolve(askedfilename) - foundfilename=resolvers.findbinfile(askedfilename,"") or "" - if foundfilename=="" then - report_defining("source file %a is not found",askedfilename) - foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or "" - if foundfilename~="" then - report_defining("using source file %a due to cache mismatch",foundfilename) - end - end - end - data.foundfilename=foundfilename - end - return foundfilename -end -local formats=allocate() + foundfilename=resolvers.findbinfile(askedfilename,"") or "" + if foundfilename=="" then + report_defining("source file %a is not found",askedfilename) + foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or "" + if foundfilename~="" then + report_defining("using source file %a due to cache mismatch",foundfilename) + end + end + end + data.foundfilename=foundfilename + end + return foundfilename +end +local formats=allocate() fonts.formats=formats -setmetatableindex(formats,function(t,k) - local l=lower(k) - if rawget(t,k) then - t[k]=l - return l - end - return rawget(t,file.suffix(l)) +setmetatableindex(formats,function(t,k) + local l=lower(k) + if rawget(t,k) then + t[k]=l + return l + end + return rawget(t,file.suffix(l)) end) local locations={} -local function setindeed(mode,target,group,name,action,position) - local t=target[mode] - if not t then - report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode) - os.exit() +local function setindeed(mode,target,group,name,action,position) + local t=target[mode] + if not t then + report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode) + os.exit() elseif position then - insert(t,position,{ name=name,action=action }) - else - for i=1,#t do - local ti=t[i] - if ti.name==name then - ti.action=action - return - end - end - insert(t,{ name=name,action=action }) - end -end -local function set(group,name,target,source) - target=target[group] - if not target then - report_defining("fatal target error in setting feature %a, group %a",name,group) - os.exit() - end - local source=source[group] - if not source then - report_defining("fatal source error in setting feature %a, group %a",name,group) - os.exit() - end - local node=source.node - local base=source.base - local position=source.position - if node then - setindeed("node",target,group,name,node,position) - end - if base then - setindeed("base",target,group,name,base,position) - end -end -local function register(where,specification) - local name=specification.name - if name and name~="" then - local default=specification.default - local description=specification.description - local initializers=specification.initializers - local processors=specification.processors - local manipulators=specification.manipulators - local modechecker=specification.modechecker - if default then - where.defaults[name]=default - end - if description and description~="" then - where.descriptions[name]=description - end - if initializers then - set('initializers',name,where,specification) - end - if processors then - set('processors',name,where,specification) - end - if manipulators then - set('manipulators',name,where,specification) - end - if modechecker then - where.modechecker=modechecker - end - end + insert(t,position,{ name=name,action=action }) + else + for i=1,#t do + local ti=t[i] + if ti.name==name then + ti.action=action + return + end + end + insert(t,{ name=name,action=action }) + end +end +local function set(group,name,target,source) + target=target[group] + if not target then + report_defining("fatal target error in setting feature %a, group %a",name,group) + os.exit() + end + local source=source[group] + if not source then + report_defining("fatal source error in setting feature %a, group %a",name,group) + os.exit() + end + local node=source.node + local base=source.base + local position=source.position + if node then + setindeed("node",target,group,name,node,position) + end + if base then + setindeed("base",target,group,name,base,position) + end +end +local function register(where,specification) + local name=specification.name + if name and name~="" then + local default=specification.default + local description=specification.description + local initializers=specification.initializers + local processors=specification.processors + local manipulators=specification.manipulators + local modechecker=specification.modechecker + if default then + where.defaults[name]=default + end + if description and description~="" then + where.descriptions[name]=description + end + if initializers then + set('initializers',name,where,specification) + end + if processors then + set('processors',name,where,specification) + end + if manipulators then + set('manipulators',name,where,specification) + end + if modechecker then + where.modechecker=modechecker + end + end end constructors.registerfeature=register -function constructors.getfeatureaction(what,where,mode,name) - what=handlers[what].features - if what then - where=what[where] - if where then - mode=where[mode] - if mode then - for i=1,#mode do - local m=mode[i] - if m.name==name then - return m.action - end - end - end - end - end +function constructors.getfeatureaction(what,where,mode,name) + what=handlers[what].features + if what then + where=what[where] + if where then + mode=where[mode] + if mode then + for i=1,#mode do + local m=mode[i] + if m.name==name then + return m.action + end + end + end + end + end end function constructors.newhandler(what) - local handler=handlers[what] - if not handler then - handler={} - handlers[what]=handler - end - return handler + local handler=handlers[what] + if not handler then + handler={} + handlers[what]=handler + end + return handler end function constructors.newfeatures(what) - local handler=handlers[what] - local features=handler.features - if not features then + local handler=handlers[what] + local features=handler.features + if not features then local tables=handler.tables local statistics=handler.statistics - features=allocate { - defaults={}, - descriptions=tables and tables.features or {}, - used=statistics and statistics.usedfeatures or {}, - initializers={ base={},node={} }, - processors={ base={},node={} }, - manipulators={ base={},node={} }, - } - features.register=function(specification) return register(features,specification) end + features=allocate { + defaults={}, + descriptions=tables and tables.features or {}, + used=statistics and statistics.usedfeatures or {}, + initializers={ base={},node={} }, + processors={ base={},node={} }, + manipulators={ base={},node={} }, + } + features.register=function(specification) return register(features,specification) end handler.features=features - end - return features + end + return features end -function constructors.checkedfeatures(what,features) - local defaults=handlers[what].features.defaults - if features and next(features) then +function constructors.checkedfeatures(what,features) + local defaults=handlers[what].features.defaults + if features and next(features) then features=fastcopy(features) - for key,value in next,defaults do - if features[key]==nil then - features[key]=value - end - end - return features - else + for key,value in next,defaults do + if features[key]==nil then + features[key]=value + end + end + return features + else return fastcopy(defaults) - end + end end -function constructors.initializefeatures(what,tfmdata,features,trace,report) - if features and next(features) then +function constructors.initializefeatures(what,tfmdata,features,trace,report) + if features and next(features) then local properties=tfmdata.properties or {} - local whathandler=handlers[what] - local whatfeatures=whathandler.features - local whatinitializers=whatfeatures.initializers + local whathandler=handlers[what] + local whatfeatures=whathandler.features + local whatinitializers=whatfeatures.initializers local whatmodechecker=whatfeatures.modechecker - local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base" + local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base" properties.mode=mode features.mode=mode - local done={} - while true do - local redo=false - local initializers=whatfeatures.initializers[mode] - if initializers then - for i=1,#initializers do - local step=initializers[i] + local done={} + while true do + local redo=false + local initializers=whatfeatures.initializers[mode] + if initializers then + for i=1,#initializers do + local step=initializers[i] local feature=step.name - local value=features[feature] + local value=features[feature] if not value then elseif done[feature] then - else - local action=step.action - if trace then - report("initializing feature %a to %a for mode %a for font %a",feature, - value,mode,tfmdata.properties.fullname) - end + else + local action=step.action + if trace then + report("initializing feature %a to %a for mode %a for font %a",feature, + value,mode,tfmdata.properties.fullname) + end action(tfmdata,value,features) - if mode~=properties.mode or mode~=features.mode then - if whatmodechecker then + if mode~=properties.mode or mode~=features.mode then + if whatmodechecker then properties.mode=whatmodechecker(tfmdata,features,properties.mode) - features.mode=properties.mode - end - if mode~=properties.mode then - mode=properties.mode - redo=true - end - end - done[feature]=true - end - if redo then - break - end - end - if not redo then - break - end - else - break - end - end + features.mode=properties.mode + end + if mode~=properties.mode then + mode=properties.mode + redo=true + end + end + done[feature]=true + end + if redo then + break + end + end + if not redo then + break + end + else + break + end + end properties.mode=mode - return true - else - return false - end -end -function constructors.collectprocessors(what,tfmdata,features,trace,report) - local processes,nofprocesses={},0 - if features and next(features) then - local properties=tfmdata.properties - local whathandler=handlers[what] - local whatfeatures=whathandler.features - local whatprocessors=whatfeatures.processors - local processors=whatprocessors[properties.mode] - if processors then - for i=1,#processors do - local step=processors[i] - local feature=step.name - if features[feature] then - local action=step.action - if trace then - report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) - end - if action then - nofprocesses=nofprocesses+1 - processes[nofprocesses]=action - end - end - end - elseif trace then - report("no feature processors for mode %a for font %a",mode,tfmdata.properties.fullname) - end - end - return processes -end -function constructors.applymanipulators(what,tfmdata,features,trace,report) - if features and next(features) then - local properties=tfmdata.properties - local whathandler=handlers[what] - local whatfeatures=whathandler.features - local whatmanipulators=whatfeatures.manipulators - local manipulators=whatmanipulators[properties.mode] - if manipulators then - for i=1,#manipulators do - local step=manipulators[i] - local feature=step.name - local value=features[feature] - if value then - local action=step.action - if trace then - report("applying feature manipulator %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) - end - if action then - action(tfmdata,feature,value) - end - end - end - end - end -end + return true + else + return false + end +end +function constructors.collectprocessors(what,tfmdata,features,trace,report) + local processes,nofprocesses={},0 + if features and next(features) then + local properties=tfmdata.properties + local whathandler=handlers[what] + local whatfeatures=whathandler.features + local whatprocessors=whatfeatures.processors + local processors=whatprocessors[properties.mode] + if processors then + for i=1,#processors do + local step=processors[i] + local feature=step.name + if features[feature] then + local action=step.action + if trace then + report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) + end + if action then + nofprocesses=nofprocesses+1 + processes[nofprocesses]=action + end + end + end + elseif trace then + report("no feature processors for mode %a for font %a",mode,tfmdata.properties.fullname) + end + end + return processes +end +function constructors.applymanipulators(what,tfmdata,features,trace,report) + if features and next(features) then + local properties=tfmdata.properties + local whathandler=handlers[what] + local whatfeatures=whathandler.features + local whatmanipulators=whatfeatures.manipulators + local manipulators=whatmanipulators[properties.mode] + if manipulators then + for i=1,#manipulators do + local step=manipulators[i] + local feature=step.name + local value=features[feature] + if value then + local action=step.action + if trace then + report("applying feature manipulator %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) + end + if action then + action(tfmdata,feature,value) + end + end + end + end + end +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['luatex-font-enc']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['luatex-font-enc']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() end -local fonts=fonts -fonts.encodings={} +local fonts=fonts +fonts.encodings={} fonts.encodings.agl={} -setmetatable(fonts.encodings.agl,{ __index=function(t,k) - if k=="unicodes" then - texio.write(" ") - local unicodes=dofile(resolvers.findfile("font-age.lua")) - fonts.encodings.agl={ unicodes=unicodes } - return unicodes - else - return nil - end +setmetatable(fonts.encodings.agl,{ __index=function(t,k) + if k=="unicodes" then + texio.write(" ") + local unicodes=dofile(resolvers.findfile("font-age.lua")) + fonts.encodings.agl={ unicodes=unicodes } + return unicodes + else + return nil + end end }) end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['font-cid']={ - version=1.001, - comment="companion to font-otf.lua (cidmaps)", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['font-cid']={ + version=1.001, + comment="companion to font-otf.lua (cidmaps)", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -local format,match,lower=string.format,string.match,string.lower -local tonumber=tonumber +local format,match,lower=string.format,string.match,string.lower +local tonumber=tonumber local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match local fonts,logs,trackers=fonts,logs,trackers local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) local report_otf=logs.reporter("fonts","otf loading") -local cid={} +local cid={} fonts.cid=cid -local cidmap={} +local cidmap={} local cidmax=10 -local number=C(R("09","af","AF")^1) -local space=S(" \n\r\t") -local spaces=space^0 -local period=P(".") -local periods=period*period +local number=C(R("09","af","AF")^1) +local space=S(" \n\r\t") +local spaces=space^0 +local period=P(".") +local periods=period*period local name=P("/")*C((1-space)^1) local unicodes,names={},{} -local function do_one(a,b) - unicodes[tonumber(a)]=tonumber(b,16) -end -local function do_range(a,b,c) - c=tonumber(c,16) - for i=tonumber(a),tonumber(b) do - unicodes[i]=c - c=c+1 - end -end -local function do_name(a,b) - names[tonumber(a)]=b -end -local grammar=P { "start", - start=number*spaces*number*V("series"), - series=(spaces*(V("one")+V("range")+V("named")))^1, - one=(number*spaces*number)/do_one, - range=(number*periods*number*spaces*number)/do_range, - named=(number*spaces*name)/do_name +local function do_one(a,b) + unicodes[tonumber(a)]=tonumber(b,16) +end +local function do_range(a,b,c) + c=tonumber(c,16) + for i=tonumber(a),tonumber(b) do + unicodes[i]=c + c=c+1 + end +end +local function do_name(a,b) + names[tonumber(a)]=b +end +local grammar=P { "start", + start=number*spaces*number*V("series"), + series=(spaces*(V("one")+V("range")+V("named")))^1, + one=(number*spaces*number)/do_one, + range=(number*periods*number*spaces*number)/do_range, + named=(number*spaces*name)/do_name } -local function loadcidfile(filename) - local data=io.loaddata(filename) - if data then - unicodes,names={},{} - lpegmatch(grammar,data) - local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$") - return { - supplement=supplement, - registry=registry, - ordering=ordering, - filename=filename, - unicodes=unicodes, - names=names - } - end +local function loadcidfile(filename) + local data=io.loaddata(filename) + if data then + unicodes,names={},{} + lpegmatch(grammar,data) + local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$") + return { + supplement=supplement, + registry=registry, + ordering=ordering, + filename=filename, + unicodes=unicodes, + names=names + } + end end cid.loadfile=loadcidfile local template="%s-%s-%s.cidmap" -local function locate(registry,ordering,supplement) - local filename=format(template,registry,ordering,supplement) - local hashname=lower(filename) - local found=cidmap[hashname] - if not found then - if trace_loading then - report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename) - end - local fullname=resolvers.findfile(filename,'cid') or "" - if fullname~="" then - found=loadcidfile(fullname) - if found then - if trace_loading then - report_otf("using cidmap file %a",filename) - end - cidmap[hashname]=found - found.usedname=file.basename(filename) - end - end - end - return found -end -function cid.getmap(specification) - if not specification then - report_otf("invalid cidinfo specification, table expected") - return - end - local registry=specification.registry - local ordering=specification.ordering +local function locate(registry,ordering,supplement) + local filename=format(template,registry,ordering,supplement) + local hashname=lower(filename) + local found=cidmap[hashname] + if not found then + if trace_loading then + report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename) + end + local fullname=resolvers.findfile(filename,'cid') or "" + if fullname~="" then + found=loadcidfile(fullname) + if found then + if trace_loading then + report_otf("using cidmap file %a",filename) + end + cidmap[hashname]=found + found.usedname=file.basename(filename) + end + end + end + return found +end +function cid.getmap(specification) + if not specification then + report_otf("invalid cidinfo specification, table expected") + return + end + local registry=specification.registry + local ordering=specification.ordering local supplement=specification.supplement - local filename=format(registry,ordering,supplement) - local found=cidmap[lower(filename)] - if found then - return found - end - if trace_loading then - report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement) - end - found=locate(registry,ordering,supplement) - if not found then - local supnum=tonumber(supplement) + local filename=format(registry,ordering,supplement) + local found=cidmap[lower(filename)] + if found then + return found + end + if trace_loading then + report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement) + end + found=locate(registry,ordering,supplement) + if not found then + local supnum=tonumber(supplement) local cidnum=nil - if supnum0 then - for s=supnum-1,0,-1 do - local c=locate(registry,ordering,s) - if c then - found,cidnum=c,s - break - end - end - end - registry=lower(registry) - ordering=lower(ordering) - if found and cidnum>0 then - for s=0,cidnum-1 do - local filename=format(template,registry,ordering,s) - if not cidmap[filename] then - cidmap[filename]=found - end - end - end - end - return found -end + if supnum0 then + for s=supnum-1,0,-1 do + local c=locate(registry,ordering,s) + if c then + found,cidnum=c,s + break + end + end + end + registry=lower(registry) + ordering=lower(ordering) + if found and cidnum>0 then + for s=0,cidnum-1 do + local filename=format(template,registry,ordering,s) + if not cidmap[filename] then + cidmap[filename]=found + end + end + end + end + return found +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['font-map']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['font-map']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } local tonumber=tonumber -local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower -local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match -local utfbyte=utf.byte +local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower +local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match +local utfbyte=utf.byte local floor=math.floor -local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end) +local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end) local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end) local report_fonts=logs.reporter("fonts","loading") -local fonts=fonts or {} -local mappings=fonts.mappings or {} +local fonts=fonts or {} +local mappings=fonts.mappings or {} fonts.mappings=mappings local function loadlumtable(filename) - local lumname=file.replacesuffix(file.basename(filename),"lum") - local lumfile=resolvers.findfile(lumname,"map") or "" - if lumfile~="" and lfs.isfile(lumfile) then - if trace_loading or trace_mapping then - report_fonts("loading map table %a",lumfile) - end - lumunic=dofile(lumfile) - return lumunic,lumfile - end -end -local hex=R("AF","09") -local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end -local hexsix=(hex*hex*hex*hex*hex*hex)/function(s) return tonumber(s,16) end -local dec=(R("09")^1)/tonumber -local period=P(".") -local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true)) -local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true)) + local lumname=file.replacesuffix(file.basename(filename),"lum") + local lumfile=resolvers.findfile(lumname,"map") or "" + if lumfile~="" and lfs.isfile(lumfile) then + if trace_loading or trace_mapping then + report_fonts("loading map table %a",lumfile) + end + lumunic=dofile(lumfile) + return lumunic,lumfile + end +end +local hex=R("AF","09") +local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end +local hexsix=(hex*hex*hex*hex*hex*hex)/function(s) return tonumber(s,16) end +local dec=(R("09")^1)/tonumber +local period=P(".") +local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true)) +local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true)) local index=P("index")*dec*Cc(false) local parser=unicode+ucode+index local parsers={} -local function makenameparser(str) - if not str or str=="" then - return parser - else - local p=parsers[str] - if not p then - p=P(str)*period*dec*Cc(false) - parsers[str]=p - end - return p - end -end -local function tounicode16(unicode,name) - if unicode<0x10000 then - return format("%04X",unicode) - elseif unicode<0x1FFFFFFFFF then - return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00) - else - report_fonts("can't convert %a in %a into tounicode",unicode,name) - end -end -local function tounicode16sequence(unicodes,name) - local t={} - for l=1,#unicodes do - local unicode=unicodes[l] - if unicode<0x10000 then - t[l]=format("%04X",unicode) - elseif unicode<0x1FFFFFFFFF then - t[l]=format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00) - else - report_fonts ("can't convert %a in %a into tounicode",unicode,name) - end - end - return concat(t) -end -local function fromunicode16(str) - if #str==4 then - return tonumber(str,16) - else - local l,r=match(str,"(....)(....)") - return (tonumber(l,16))*0x400+tonumber(r,16)-0xDC00 - end -end -mappings.loadlumtable=loadlumtable -mappings.makenameparser=makenameparser -mappings.tounicode16=tounicode16 -mappings.tounicode16sequence=tounicode16sequence +local function makenameparser(str) + if not str or str=="" then + return parser + else + local p=parsers[str] + if not p then + p=P(str)*period*dec*Cc(false) + parsers[str]=p + end + return p + end +end +local function tounicode16(unicode,name) + if unicode<0x10000 then + return format("%04X",unicode) + elseif unicode<0x1FFFFFFFFF then + return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts("can't convert %a in %a into tounicode",unicode,name) + end +end +local function tounicode16sequence(unicodes,name) + local t={} + for l=1,#unicodes do + local unicode=unicodes[l] + if unicode<0x10000 then + t[l]=format("%04X",unicode) + elseif unicode<0x1FFFFFFFFF then + t[l]=format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts ("can't convert %a in %a into tounicode",unicode,name) + end + end + return concat(t) +end +local function fromunicode16(str) + if #str==4 then + return tonumber(str,16) + else + local l,r=match(str,"(....)(....)") + return (tonumber(l,16))*0x400+tonumber(r,16)-0xDC00 + end +end +mappings.loadlumtable=loadlumtable +mappings.makenameparser=makenameparser +mappings.tounicode16=tounicode16 +mappings.tounicode16sequence=tounicode16sequence mappings.fromunicode16=fromunicode16 -local separator=S("_.") -local other=C((1-separator)^1) +local separator=S("_.") +local other=C((1-separator)^1) local ligsplitter=Ct(other*(separator*other)^0) -function mappings.addtounicode(data,filename) - local resources=data.resources - local properties=data.properties - local descriptions=data.descriptions - local unicodes=resources.unicodes - if not unicodes then - return - end - unicodes['space']=unicodes['space'] or 32 - unicodes['hyphen']=unicodes['hyphen'] or 45 - unicodes['zwj']=unicodes['zwj'] or 0x200D +function mappings.addtounicode(data,filename) + local resources=data.resources + local properties=data.properties + local descriptions=data.descriptions + local unicodes=resources.unicodes + if not unicodes then + return + end + unicodes['space']=unicodes['space'] or 32 + unicodes['hyphen']=unicodes['hyphen'] or 45 + unicodes['zwj']=unicodes['zwj'] or 0x200D unicodes['zwnj']=unicodes['zwnj'] or 0x200C - local private=fonts.constructors.privateoffset - local unknown=format("%04X",utfbyte("?")) + local private=fonts.constructors.privateoffset + local unknown=format("%04X",utfbyte("?")) local unicodevector=fonts.encodings.agl.unicodes - local tounicode={} - local originals={} - resources.tounicode=tounicode - resources.originals=originals - local lumunic,uparser,oparser - local cidinfo,cidnames,cidcodes,usedmap + local tounicode={} + local originals={} + resources.tounicode=tounicode + resources.originals=originals + local lumunic,uparser,oparser + local cidinfo,cidnames,cidcodes,usedmap if false then - lumunic=loadlumtable(filename) - lumunic=lumunic and lumunic.tounicode + lumunic=loadlumtable(filename) + lumunic=lumunic and lumunic.tounicode end - cidinfo=properties.cidinfo + cidinfo=properties.cidinfo usedmap=cidinfo and fonts.cid.getmap(cidinfo) - if usedmap then - oparser=usedmap and makenameparser(cidinfo.ordering) - cidnames=usedmap.names - cidcodes=usedmap.unicodes - end - uparser=makenameparser() - local ns,nl=0,0 - for unic,glyph in next,descriptions do - local index=glyph.index - local name=glyph.name - if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then - local unicode=lumunic and lumunic[name] or unicodevector[name] - if unicode then - originals[index]=unicode - tounicode[index]=tounicode16(unicode,name) - ns=ns+1 - end - if (not unicode) and usedmap then - local foundindex=lpegmatch(oparser,name) - if foundindex then + if usedmap then + oparser=usedmap and makenameparser(cidinfo.ordering) + cidnames=usedmap.names + cidcodes=usedmap.unicodes + end + uparser=makenameparser() + local ns,nl=0,0 + for unic,glyph in next,descriptions do + local index=glyph.index + local name=glyph.name + if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then + local unicode=lumunic and lumunic[name] or unicodevector[name] + if unicode then + originals[index]=unicode + tounicode[index]=tounicode16(unicode,name) + ns=ns+1 + end + if (not unicode) and usedmap then + local foundindex=lpegmatch(oparser,name) + if foundindex then unicode=cidcodes[foundindex] - if unicode then - originals[index]=unicode - tounicode[index]=tounicode16(unicode,name) - ns=ns+1 - else + if unicode then + originals[index]=unicode + tounicode[index]=tounicode16(unicode,name) + ns=ns+1 + else local reference=cidnames[foundindex] - if reference then - local foundindex=lpegmatch(oparser,reference) - if foundindex then - unicode=cidcodes[foundindex] - if unicode then - originals[index]=unicode - tounicode[index]=tounicode16(unicode,name) - ns=ns+1 - end - end - if not unicode or unicode=="" then - local foundcodes,multiple=lpegmatch(uparser,reference) - if foundcodes then - originals[index]=foundcodes - if multiple then - tounicode[index]=tounicode16sequence(foundcodes) - nl=nl+1 - unicode=true - else - tounicode[index]=tounicode16(foundcodes,name) - ns=ns+1 - unicode=foundcodes - end - end - end - end - end - end - end - if not unicode or unicode=="" then - local split=lpegmatch(ligsplitter,name) - local nplit=split and #split or 0 - if nplit>=2 then - local t,n={},0 - for l=1,nplit do - local base=split[l] - local u=unicodes[base] or unicodevector[base] - if not u then - break - elseif type(u)=="table" then - n=n+1 - t[n]=u[1] - else - n=n+1 - t[n]=u - end - end + if reference then + local foundindex=lpegmatch(oparser,reference) + if foundindex then + unicode=cidcodes[foundindex] + if unicode then + originals[index]=unicode + tounicode[index]=tounicode16(unicode,name) + ns=ns+1 + end + end + if not unicode or unicode=="" then + local foundcodes,multiple=lpegmatch(uparser,reference) + if foundcodes then + originals[index]=foundcodes + if multiple then + tounicode[index]=tounicode16sequence(foundcodes) + nl=nl+1 + unicode=true + else + tounicode[index]=tounicode16(foundcodes,name) + ns=ns+1 + unicode=foundcodes + end + end + end + end + end + end + end + if not unicode or unicode=="" then + local split=lpegmatch(ligsplitter,name) + local nplit=split and #split or 0 + if nplit>=2 then + local t,n={},0 + for l=1,nplit do + local base=split[l] + local u=unicodes[base] or unicodevector[base] + if not u then + break + elseif type(u)=="table" then + n=n+1 + t[n]=u[1] + else + n=n+1 + t[n]=u + end + end if n==0 then - elseif n==1 then - originals[index]=t[1] - tounicode[index]=tounicode16(t[1],name) - else - originals[index]=t - tounicode[index]=tounicode16sequence(t) - end - nl=nl+1 - unicode=true + elseif n==1 then + originals[index]=t[1] + tounicode[index]=tounicode16(t[1],name) + else + originals[index]=t + tounicode[index]=tounicode16sequence(t) + end + nl=nl+1 + unicode=true else - end - end - if not unicode or unicode=="" then - local foundcodes,multiple=lpegmatch(uparser,name) - if foundcodes then - if multiple then - originals[index]=foundcodes - tounicode[index]=tounicode16sequence(foundcodes,name) - nl=nl+1 - unicode=true - else - originals[index]=foundcodes - tounicode[index]=tounicode16(foundcodes,name) - ns=ns+1 - unicode=foundcodes - end - end - end - end - end - if trace_mapping then - for unic,glyph in table.sortedhash(descriptions) do - local name=glyph.name - local index=glyph.index - local toun=tounicode[index] - if toun then - report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun) - else - report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) - end - end - end - if trace_loading and (ns>0 or nl>0) then - report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns) - end -end + end + end + if not unicode or unicode=="" then + local foundcodes,multiple=lpegmatch(uparser,name) + if foundcodes then + if multiple then + originals[index]=foundcodes + tounicode[index]=tounicode16sequence(foundcodes,name) + nl=nl+1 + unicode=true + else + originals[index]=foundcodes + tounicode[index]=tounicode16(foundcodes,name) + ns=ns+1 + unicode=foundcodes + end + end + end + end + end + if trace_mapping then + for unic,glyph in table.sortedhash(descriptions) do + local name=glyph.name + local index=glyph.index + local toun=tounicode[index] + if toun then + report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun) + else + report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) + end + end + end + if trace_loading and (ns>0 or nl>0) then + report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns) + end +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['luatex-fonts-syn']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['luatex-fonts-syn']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() end -local fonts=fonts +local fonts=fonts fonts.names=fonts.names or {} fonts.names.version=1.001 -fonts.names.basename="luatex-fonts-names" -fonts.names.new_to_old={} -fonts.names.old_to_new={} +fonts.names.basename="luatex-fonts-names" +fonts.names.new_to_old={} +fonts.names.old_to_new={} fonts.names.cache=containers.define("fonts","data",fonts.names.version,true) local data,loaded=nil,false local fileformats={ "lua","tex","other text files" } -function fonts.names.reportmissingbase() - texio.write("") - fonts.names.reportmissingbase=nil -end -function fonts.names.reportmissingname() - texio.write("") - fonts.names.reportmissingname=nil -end -function fonts.names.resolve(name,sub) - if not loaded then - local basename=fonts.names.basename - if basename and basename~="" then - data=containers.read(fonts.names.cache,basename) - if not data then - basename=file.addsuffix(basename,"lua") - for i=1,#fileformats do - local format=fileformats[i] - local foundname=resolvers.findfile(basename,format) or "" - if foundname~="" then - data=dofile(foundname) - texio.write("") - break - end - end - end - end - loaded=true - end - if type(data)=="table" and data.version==fonts.names.version then - local condensed=string.gsub(string.lower(name),"[^%a%d]","") - local found=data.mappings and data.mappings[condensed] - if found then - local fontname,filename,subfont=found[1],found[2],found[3] - if subfont then - return filename,fontname - else - return filename,false - end - elseif fonts.names.reportmissingname then - fonts.names.reportmissingname() +function fonts.names.reportmissingbase() + texio.write("") + fonts.names.reportmissingbase=nil +end +function fonts.names.reportmissingname() + texio.write("") + fonts.names.reportmissingname=nil +end +function fonts.names.resolve(name,sub) + if not loaded then + local basename=fonts.names.basename + if basename and basename~="" then + data=containers.read(fonts.names.cache,basename) + if not data then + basename=file.addsuffix(basename,"lua") + for i=1,#fileformats do + local format=fileformats[i] + local foundname=resolvers.findfile(basename,format) or "" + if foundname~="" then + data=dofile(foundname) + texio.write("") + break + end + end + end + end + loaded=true + end + if type(data)=="table" and data.version==fonts.names.version then + local condensed=string.gsub(string.lower(name),"[^%a%d]","") + local found=data.mappings and data.mappings[condensed] + if found then + local fontname,filename,subfont=found[1],found[2],found[3] + if subfont then + return filename,fontname + else + return filename,false + end + elseif fonts.names.reportmissingname then + fonts.names.reportmissingname() return name,false - end - elseif fonts.names.reportmissingbase then - fonts.names.reportmissingbase() - end + end + elseif fonts.names.reportmissingbase then + fonts.names.reportmissingbase() + end end fonts.names.resolvespec=fonts.names.resolve function fonts.names.getfilename(askedname,suffix) - return "" -end + return "" +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['luatex-fonts-tfm']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['luatex-fonts-tfm']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() end -local fonts=fonts -local tfm={} -fonts.handlers.tfm=tfm +local fonts=fonts +local tfm={} +fonts.handlers.tfm=tfm fonts.formats.tfm="type1" -function fonts.readers.tfm(specification) - local fullname=specification.filename or "" - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - fullname=specification.name.."."..forced - else - fullname=specification.name - end - end - local foundname=resolvers.findbinfile(fullname,'tfm') or "" - if foundname=="" then - foundname=resolvers.findbinfile(fullname,'ofm') or "" - end - if foundname~="" then - specification.filename=foundname - specification.format="ofm" - return font.read_tfm(specification.filename,specification.size) - end -end +function fonts.readers.tfm(specification) + local fullname=specification.filename or "" + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + fullname=specification.name.."."..forced + else + fullname=specification.name + end + end + local foundname=resolvers.findbinfile(fullname,'tfm') or "" + if foundname=="" then + foundname=resolvers.findbinfile(fullname,'ofm') or "" + end + if foundname~="" then + specification.filename=foundname + specification.format="ofm" + return font.read_tfm(specification.filename,specification.size) + end +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['font-oti']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['font-oti']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } local lower=string.lower -local fonts=fonts +local fonts=fonts local constructors=fonts.constructors -local otf=constructors.newhandler("otf") -local otffeatures=constructors.newfeatures("otf") -local otftables=otf.tables +local otf=constructors.newhandler("otf") +local otffeatures=constructors.newfeatures("otf") +local otftables=otf.tables local registerotffeature=otffeatures.register local allocate=utilities.storage.allocate -registerotffeature { - name="features", - description="initialization of feature handler", - default=true, +registerotffeature { + name="features", + description="initialization of feature handler", + default=true, } -local function setmode(tfmdata,value) - if value then - tfmdata.properties.mode=lower(value) - end -end -local function setlanguage(tfmdata,value) - if value then - local cleanvalue=lower(value) - local languages=otftables and otftables.languages - local properties=tfmdata.properties - if not languages then - properties.language=cleanvalue - elseif languages[value] then - properties.language=cleanvalue - else - properties.language="dflt" - end - end -end -local function setscript(tfmdata,value) - if value then - local cleanvalue=lower(value) - local scripts=otftables and otftables.scripts - local properties=tfmdata.properties - if not scripts then - properties.script=cleanvalue - elseif scripts[value] then - properties.script=cleanvalue - else - properties.script="dflt" - end - end -end -registerotffeature { - name="mode", - description="mode", - initializers={ - base=setmode, - node=setmode, - } +local function setmode(tfmdata,value) + if value then + tfmdata.properties.mode=lower(value) + end +end +local function setlanguage(tfmdata,value) + if value then + local cleanvalue=lower(value) + local languages=otftables and otftables.languages + local properties=tfmdata.properties + if not languages then + properties.language=cleanvalue + elseif languages[value] then + properties.language=cleanvalue + else + properties.language="dflt" + end + end +end +local function setscript(tfmdata,value) + if value then + local cleanvalue=lower(value) + local scripts=otftables and otftables.scripts + local properties=tfmdata.properties + if not scripts then + properties.script=cleanvalue + elseif scripts[value] then + properties.script=cleanvalue + else + properties.script="dflt" + end + end +end +registerotffeature { + name="mode", + description="mode", + initializers={ + base=setmode, + node=setmode, + } } -registerotffeature { - name="language", - description="language", - initializers={ - base=setlanguage, - node=setlanguage, - } +registerotffeature { + name="language", + description="language", + initializers={ + base=setlanguage, + node=setlanguage, + } } -registerotffeature { - name="script", - description="script", - initializers={ - base=setscript, - node=setscript, - } +registerotffeature { + name="script", + description="script", + initializers={ + base=setscript, + node=setscript, + } } end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['font-otf']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['font-otf']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -local utfbyte=utf.byte -local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip -local type,next,tonumber,tostring=type,next,tonumber,tostring -local abs=math.abs -local getn=table.getn -local lpegmatch=lpeg.match -local reversed,concat,remove=table.reversed,table.concat,table.remove -local ioflush=io.flush -local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive +local utfbyte=utf.byte +local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip +local type,next,tonumber,tostring=type,next,tonumber,tostring +local abs=math.abs +local getn=table.getn +local lpegmatch=lpeg.match +local reversed,concat,remove=table.reversed,table.concat,table.remove +local ioflush=io.flush +local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive local formatters=string.formatters -local allocate=utilities.storage.allocate -local registertracker=trackers.register -local registerdirective=directives.register -local starttiming=statistics.starttiming -local stoptiming=statistics.stoptiming -local elapsedtime=statistics.elapsedtime +local allocate=utilities.storage.allocate +local registertracker=trackers.register +local registerdirective=directives.register +local starttiming=statistics.starttiming +local stoptiming=statistics.stoptiming +local elapsedtime=statistics.elapsedtime local findbinfile=resolvers.findbinfile -local trace_private=false registertracker("otf.private",function(v) trace_private=v end) -local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end) -local trace_features=false registertracker("otf.features",function(v) trace_features=v end) -local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end) -local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end) -local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end) +local trace_private=false registertracker("otf.private",function(v) trace_private=v end) +local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end) +local trace_features=false registertracker("otf.features",function(v) trace_features=v end) +local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end) +local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end) +local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end) local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end) local report_otf=logs.reporter("fonts","otf loading") -local fonts=fonts +local fonts=fonts local otf=fonts.handlers.otf otf.glists={ "gsub","gpos" } otf.version=2.743 otf.cache=containers.define("fonts","otf",otf.version,true) -local fontdata=fonts.hashes.identifiers +local fontdata=fonts.hashes.identifiers local chardata=characters and characters.data -local otffeatures=fonts.constructors.newfeatures("otf") +local otffeatures=fonts.constructors.newfeatures("otf") local registerotffeature=otffeatures.register -local enhancers=allocate() -otf.enhancers=enhancers -local patches={} +local enhancers=allocate() +otf.enhancers=enhancers +local patches={} enhancers.patches=patches -local definers=fonts.definers -local readers=fonts.readers +local definers=fonts.definers +local readers=fonts.readers local constructors=fonts.constructors -local forceload=false +local forceload=false local cleanup=0 local usemetatables=false -local packdata=true -local syncspace=true -local forcenotdef=false +local packdata=true +local syncspace=true +local forcenotdef=false local includesubfonts=false -local wildcard="*" +local wildcard="*" local default="dflt" -local fontloaderfields=fontloader.fields -local mainfields=nil +local fontloaderfields=fontloader.fields +local mainfields=nil local glyphfields=nil -registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end) -registerdirective("fonts.otf.loader.force",function(v) forceload=v end) -registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v end) -registerdirective("fonts.otf.loader.pack",function(v) packdata=v end) -registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end) +registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end) +registerdirective("fonts.otf.loader.force",function(v) forceload=v end) +registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v end) +registerdirective("fonts.otf.loader.pack",function(v) packdata=v end) +registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end) registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end) -local function load_featurefile(raw,featurefile) - if featurefile and featurefile~="" then - if trace_loading then - report_otf("using featurefile %a",featurefile) - end - fontloader.apply_featurefile(raw,featurefile) - end -end -local function showfeatureorder(rawdata,filename) - local sequences=rawdata.resources.sequences - if sequences and #sequences>0 then - if trace_loading then - report_otf("font %a has %s sequences",filename,#sequences) - report_otf(" ") - end - for nos=1,#sequences do - local sequence=sequences[nos] - local typ=sequence.type or "no-type" - local name=sequence.name or "no-name" - local subtables=sequence.subtables or { "no-subtables" } - local features=sequence.features - if trace_loading then - report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables) - end - if features then - for feature,scripts in next,features do - local tt={} - if type(scripts)=="table" then - for script,languages in next,scripts do - local ttt={} - for language,_ in next,languages do - ttt[#ttt+1]=language - end - tt[#tt+1]=formatters["[%s: % t]"](script,ttt) - end - if trace_loading then - report_otf(" %s: % t",feature,tt) - end - else - if trace_loading then - report_otf(" %s: %S",feature,scripts) - end - end - end - end - end - if trace_loading then - report_otf("\n") - end - elseif trace_loading then - report_otf("font %a has no sequences",filename) - end +local function load_featurefile(raw,featurefile) + if featurefile and featurefile~="" then + if trace_loading then + report_otf("using featurefile %a",featurefile) + end + fontloader.apply_featurefile(raw,featurefile) + end +end +local function showfeatureorder(rawdata,filename) + local sequences=rawdata.resources.sequences + if sequences and #sequences>0 then + if trace_loading then + report_otf("font %a has %s sequences",filename,#sequences) + report_otf(" ") + end + for nos=1,#sequences do + local sequence=sequences[nos] + local typ=sequence.type or "no-type" + local name=sequence.name or "no-name" + local subtables=sequence.subtables or { "no-subtables" } + local features=sequence.features + if trace_loading then + report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables) + end + if features then + for feature,scripts in next,features do + local tt={} + if type(scripts)=="table" then + for script,languages in next,scripts do + local ttt={} + for language,_ in next,languages do + ttt[#ttt+1]=language + end + tt[#tt+1]=formatters["[%s: % t]"](script,ttt) + end + if trace_loading then + report_otf(" %s: % t",feature,tt) + end + else + if trace_loading then + report_otf(" %s: %S",feature,scripts) + end + end + end + end + end + if trace_loading then + report_otf("\n") + end + elseif trace_loading then + report_otf("font %a has no sequences",filename) + end end local valid_fields=table.tohash { "ascent", - "cidinfo", + "cidinfo", "copyright", - "descent", - "design_range_bottom", - "design_range_top", - "design_size", - "encodingchanged", - "extrema_bound", - "familyname", - "fontname", - "fontname", - "fontstyle_id", - "fontstyle_name", + "descent", + "design_range_bottom", + "design_range_top", + "design_size", + "encodingchanged", + "extrema_bound", + "familyname", + "fontname", + "fontname", + "fontstyle_id", + "fontstyle_name", "fullname", "hasvmetrics", - "horiz_base", - "issans", - "isserif", + "horiz_base", + "issans", + "isserif", "italicangle", "macstyle", - "onlybitmaps", - "origname", - "os2_version", + "onlybitmaps", + "origname", + "os2_version", "pfminfo", - "serifcheck", + "serifcheck", "sfd_version", - "strokedfont", + "strokedfont", "strokewidth", "table_version", - "ttf_tables", - "uni_interp", - "uniqueid", - "units_per_em", - "upos", - "use_typo_metrics", + "ttf_tables", + "uni_interp", + "uniqueid", + "units_per_em", + "upos", + "use_typo_metrics", "uwidth", - "version", - "vert_base", - "weight", + "version", + "vert_base", + "weight", "weight_width_slope_only", } -local ordered_enhancers={ +local ordered_enhancers={ "prepare tables", - "prepare glyphs", + "prepare glyphs", "prepare lookups", - "analyze glyphs", + "analyze glyphs", "analyze math", "prepare tounicode", - "reorganize lookups", - "reorganize mark classes", + "reorganize lookups", + "reorganize mark classes", "reorganize anchor classes", - "reorganize glyph kerns", - "reorganize glyph lookups", + "reorganize glyph kerns", + "reorganize glyph lookups", "reorganize glyph anchors", "merge kern classes", - "reorganize features", + "reorganize features", "reorganize subtables", - "check glyphs", - "check metadata", + "check glyphs", + "check metadata", "check extra features", "check encoding", "add duplicates", - "cleanup tables", + "cleanup tables", } -local actions=allocate() -local before=allocate() +local actions=allocate() +local before=allocate() local after=allocate() -patches.before=before +patches.before=before patches.after=after -local function enhance(name,data,filename,raw) - local enhancer=actions[name] - if enhancer then - if trace_loading then - report_otf("apply enhancement %a to file %a",name,filename) - ioflush() - end - enhancer(data,filename,raw) +local function enhance(name,data,filename,raw) + local enhancer=actions[name] + if enhancer then + if trace_loading then + report_otf("apply enhancement %a to file %a",name,filename) + ioflush() + end + enhancer(data,filename,raw) else - end + end end -function enhancers.apply(data,filename,raw) - local basename=file.basename(lower(filename)) - if trace_loading then - report_otf("%s enhancing file %a","start",filename) - end +function enhancers.apply(data,filename,raw) + local basename=file.basename(lower(filename)) + if trace_loading then + report_otf("%s enhancing file %a","start",filename) + end ioflush() - for e=1,#ordered_enhancers do - local enhancer=ordered_enhancers[e] - local b=before[enhancer] - if b then - for pattern,action in next,b do - if find(basename,pattern) then - action(data,filename,raw) - end - end - end - enhance(enhancer,data,filename,raw) - local a=after[enhancer] - if a then - for pattern,action in next,a do - if find(basename,pattern) then - action(data,filename,raw) - end - end - end + for e=1,#ordered_enhancers do + local enhancer=ordered_enhancers[e] + local b=before[enhancer] + if b then + for pattern,action in next,b do + if find(basename,pattern) then + action(data,filename,raw) + end + end + end + enhance(enhancer,data,filename,raw) + local a=after[enhancer] + if a then + for pattern,action in next,a do + if find(basename,pattern) then + action(data,filename,raw) + end + end + end ioflush() - end - if trace_loading then - report_otf("%s enhancing file %a","stop",filename) - end + end + if trace_loading then + report_otf("%s enhancing file %a","stop",filename) + end ioflush() end -function patches.register(what,where,pattern,action) - local pw=patches[what] - if pw then - local ww=pw[where] - if ww then - ww[pattern]=action - else - pw[where]={ [pattern]=action} - end - end -end -function patches.report(fmt,...) - if trace_loading then - report_otf("patching: %s",formatters[fmt](...)) - end +function patches.register(what,where,pattern,action) + local pw=patches[what] + if pw then + local ww=pw[where] + if ww then + ww[pattern]=action + else + pw[where]={ [pattern]=action} + end + end +end +function patches.report(fmt,...) + if trace_loading then + report_otf("patching: %s",formatters[fmt](...)) + end end function enhancers.register(what,action) - actions[what]=action -end -function otf.load(filename,format,sub,featurefile) - local base=file.basename(file.removesuffix(filename)) - local name=file.removesuffix(base) - local attr=lfs.attributes(filename) - local size=attr and attr.size or 0 - local time=attr and attr.modification or 0 - if featurefile then - name=name.."@"..file.removesuffix(file.basename(featurefile)) - end - if sub=="" then - sub=false - end - local hash=name - if sub then - hash=hash.."-"..sub - end - hash=containers.cleanname(hash) - local featurefiles - if featurefile then - featurefiles={} - for s in gmatch(featurefile,"[^,]+") do - local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or "" - if name=="" then - report_otf("loading error, no featurefile %a",s) - else - local attr=lfs.attributes(name) - featurefiles[#featurefiles+1]={ - name=name, - size=attr and attr.size or 0, - time=attr and attr.modification or 0, - } - end - end - if #featurefiles==0 then - featurefiles=nil - end - end - local data=containers.read(otf.cache,hash) - local reload=not data or data.size~=size or data.time~=time - if forceload then - report_otf("forced reload of %a due to hard coded flag",filename) - reload=true - end - if not reload then - local featuredata=data.featuredata - if featurefiles then - if not featuredata or #featuredata~=#featurefiles then - reload=true - else - for i=1,#featurefiles do - local fi,fd=featurefiles[i],featuredata[i] - if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then - reload=true - break - end - end - end - elseif featuredata then - reload=true - end - if reload then - report_otf("loading: forced reload due to changed featurefile specification %a",featurefile) - end - end - if reload then - report_otf("loading %a, hash %a",filename,hash) - local fontdata,messages - if sub then - fontdata,messages=fontloader.open(filename,sub) - else - fontdata,messages=fontloader.open(filename) - end - if fontdata then - mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata)) - end - if trace_loading and messages and #messages>0 then - if type(messages)=="string" then - report_otf("warning: %s",messages) - else - for m=1,#messages do - report_otf("warning: %S",messages[m]) - end - end - else - report_otf("loading done") - end - if fontdata then - if featurefiles then - for i=1,#featurefiles do - load_featurefile(fontdata,featurefiles[i].name) - end - end + actions[what]=action +end +function otf.load(filename,format,sub,featurefile) + local base=file.basename(file.removesuffix(filename)) + local name=file.removesuffix(base) + local attr=lfs.attributes(filename) + local size=attr and attr.size or 0 + local time=attr and attr.modification or 0 + if featurefile then + name=name.."@"..file.removesuffix(file.basename(featurefile)) + end + if sub=="" then + sub=false + end + local hash=name + if sub then + hash=hash.."-"..sub + end + hash=containers.cleanname(hash) + local featurefiles + if featurefile then + featurefiles={} + for s in gmatch(featurefile,"[^,]+") do + local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or "" + if name=="" then + report_otf("loading error, no featurefile %a",s) + else + local attr=lfs.attributes(name) + featurefiles[#featurefiles+1]={ + name=name, + size=attr and attr.size or 0, + time=attr and attr.modification or 0, + } + end + end + if #featurefiles==0 then + featurefiles=nil + end + end + local data=containers.read(otf.cache,hash) + local reload=not data or data.size~=size or data.time~=time + if forceload then + report_otf("forced reload of %a due to hard coded flag",filename) + reload=true + end + if not reload then + local featuredata=data.featuredata + if featurefiles then + if not featuredata or #featuredata~=#featurefiles then + reload=true + else + for i=1,#featurefiles do + local fi,fd=featurefiles[i],featuredata[i] + if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then + reload=true + break + end + end + end + elseif featuredata then + reload=true + end + if reload then + report_otf("loading: forced reload due to changed featurefile specification %a",featurefile) + end + end + if reload then + report_otf("loading %a, hash %a",filename,hash) + local fontdata,messages + if sub then + fontdata,messages=fontloader.open(filename,sub) + else + fontdata,messages=fontloader.open(filename) + end + if fontdata then + mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata)) + end + if trace_loading and messages and #messages>0 then + if type(messages)=="string" then + report_otf("warning: %s",messages) + else + for m=1,#messages do + report_otf("warning: %S",messages[m]) + end + end + else + report_otf("loading done") + end + if fontdata then + if featurefiles then + for i=1,#featurefiles do + load_featurefile(fontdata,featurefiles[i].name) + end + end local unicodes={ - } - local splitter=lpeg.splitter(" ",unicodes) - data={ - size=size, - time=time, - format=format, - featuredata=featurefiles, - resources={ + } + local splitter=lpeg.splitter(" ",unicodes) + data={ + size=size, + time=time, + format=format, + featuredata=featurefiles, + resources={ filename=resolvers.unresolve(filename), - version=otf.version, - creator="context mkiv", - unicodes=unicodes, + version=otf.version, + creator="context mkiv", + unicodes=unicodes, indices={ - }, + }, duplicates={ - }, + }, variants={ - }, - lookuptypes={}, - }, + }, + lookuptypes={}, + }, metadata={ - }, + }, properties={ - }, - descriptions={}, - goodies={}, - helpers={ - tounicodelist=splitter, - tounicodetable=lpeg.Ct(splitter), - }, - } - starttiming(data) - report_otf("file size: %s",size) - enhancers.apply(data,filename,fontdata) - local packtime={} - if packdata then - if cleanup>0 then - collectgarbage("collect") - end - starttiming(packtime) - enhance("pack",data,filename,nil) - stoptiming(packtime) - end - report_otf("saving %a in cache",filename) - data=containers.write(otf.cache,hash,data) - if cleanup>1 then - collectgarbage("collect") - end - stoptiming(data) + }, + descriptions={}, + goodies={}, + helpers={ + tounicodelist=splitter, + tounicodetable=lpeg.Ct(splitter), + }, + } + starttiming(data) + report_otf("file size: %s",size) + enhancers.apply(data,filename,fontdata) + local packtime={} + if packdata then + if cleanup>0 then + collectgarbage("collect") + end + starttiming(packtime) + enhance("pack",data,filename,nil) + stoptiming(packtime) + end + report_otf("saving %a in cache",filename) + data=containers.write(otf.cache,hash,data) + if cleanup>1 then + collectgarbage("collect") + end + stoptiming(data) if elapsedtime then - report_otf("preprocessing and caching time %s, packtime %s", - elapsedtime(data),packdata and elapsedtime(packtime) or 0) - end + report_otf("preprocessing and caching time %s, packtime %s", + elapsedtime(data),packdata and elapsedtime(packtime) or 0) + end fontloader.close(fontdata) - if cleanup>3 then - collectgarbage("collect") - end + if cleanup>3 then + collectgarbage("collect") + end data=containers.read(otf.cache,hash) - if cleanup>2 then - collectgarbage("collect") - end - else - data=nil - report_otf("loading failed due to read error") - end - end - if data then - if trace_defining then - report_otf("loading from cache using hash %a",hash) - end - enhance("unpack",data,filename,nil,false) - enhance("add dimensions",data,filename,nil,false) - if trace_sequences then - showfeatureorder(data,filename) - end - end - return data -end -local mt={ + if cleanup>2 then + collectgarbage("collect") + end + else + data=nil + report_otf("loading failed due to read error") + end + end + if data then + if trace_defining then + report_otf("loading from cache using hash %a",hash) + end + enhance("unpack",data,filename,nil,false) + enhance("add dimensions",data,filename,nil,false) + if trace_sequences then + showfeatureorder(data,filename) + end + end + return data +end +local mt={ __index=function(t,k) - if k=="height" then - local ht=t.boundingbox[4] - return ht<0 and 0 or ht - elseif k=="depth" then - local dp=-t.boundingbox[2] - return dp<0 and 0 or dp - elseif k=="width" then - return 0 + if k=="height" then + local ht=t.boundingbox[4] + return ht<0 and 0 or ht + elseif k=="depth" then + local dp=-t.boundingbox[2] + return dp<0 and 0 or dp + elseif k=="width" then + return 0 elseif k=="name" then - return forcenotdef and ".notdef" - end - end + return forcenotdef and ".notdef" + end + end } -actions["prepare tables"]=function(data,filename,raw) - data.properties.hasitalics=false +actions["prepare tables"]=function(data,filename,raw) + data.properties.hasitalics=false end actions["add dimensions"]=function(data,filename) - if data then - local descriptions=data.descriptions - local resources=data.resources - local defaultwidth=resources.defaultwidth or 0 - local defaultheight=resources.defaultheight or 0 - local defaultdepth=resources.defaultdepth or 0 - local basename=trace_markwidth and file.basename(filename) - if usemetatables then - for _,d in next,descriptions do - local wd=d.width - if not wd then - d.width=defaultwidth - elseif trace_markwidth and wd~=0 and d.class=="mark" then + if data then + local descriptions=data.descriptions + local resources=data.resources + local defaultwidth=resources.defaultwidth or 0 + local defaultheight=resources.defaultheight or 0 + local defaultdepth=resources.defaultdepth or 0 + local basename=trace_markwidth and file.basename(filename) + if usemetatables then + for _,d in next,descriptions do + local wd=d.width + if not wd then + d.width=defaultwidth + elseif trace_markwidth and wd~=0 and d.class=="mark" then report_otf("mark %a with width %b found in %a",d.name or "",wd,basename) - end - setmetatable(d,mt) - end - else - for _,d in next,descriptions do - local bb,wd=d.boundingbox,d.width - if not wd then - d.width=defaultwidth - elseif trace_markwidth and wd~=0 and d.class=="mark" then + end + setmetatable(d,mt) + end + else + for _,d in next,descriptions do + local bb,wd=d.boundingbox,d.width + if not wd then + d.width=defaultwidth + elseif trace_markwidth and wd~=0 and d.class=="mark" then report_otf("mark %a with width %b found in %a",d.name or "",wd,basename) end - if bb then - local ht,dp=bb[4],-bb[2] + if bb then + local ht,dp=bb[4],-bb[2] if ht==0 or ht<0 then - else - d.height=ht - end + else + d.height=ht + end if dp==0 or dp<0 then - else - d.depth=dp - end - end - end - end - end + else + d.depth=dp + end + end + end + end + end end local function somecopy(old) - if old then - local new={} - if type(old)=="table" then - for k,v in next,old do + if old then + local new={} + if type(old)=="table" then + for k,v in next,old do if k=="glyphs" then - elseif type(v)=="table" then - new[k]=somecopy(v) - else - new[k]=v - end - end - else - for i=1,#mainfields do - local k=mainfields[i] - local v=old[k] + elseif type(v)=="table" then + new[k]=somecopy(v) + else + new[k]=v + end + end + else + for i=1,#mainfields do + local k=mainfields[i] + local v=old[k] if k=="glyphs" then - elseif type(v)=="table" then - new[k]=somecopy(v) - else - new[k]=v - end - end - end - return new - else - return {} - end -end -actions["prepare glyphs"]=function(data,filename,raw) - local rawglyphs=raw.glyphs - local rawsubfonts=raw.subfonts - local rawcidinfo=raw.cidinfo - local criterium=constructors.privateoffset - local private=criterium - local resources=data.resources - local metadata=data.metadata - local properties=data.properties - local descriptions=data.descriptions + elseif type(v)=="table" then + new[k]=somecopy(v) + else + new[k]=v + end + end + end + return new + else + return {} + end +end +actions["prepare glyphs"]=function(data,filename,raw) + local rawglyphs=raw.glyphs + local rawsubfonts=raw.subfonts + local rawcidinfo=raw.cidinfo + local criterium=constructors.privateoffset + local private=criterium + local resources=data.resources + local metadata=data.metadata + local properties=data.properties + local descriptions=data.descriptions local unicodes=resources.unicodes local indices=resources.indices - local duplicates=resources.duplicates + local duplicates=resources.duplicates local variants=resources.variants if rawsubfonts then - metadata.subfonts=includesubfonts and {} + metadata.subfonts=includesubfonts and {} properties.cidinfo=rawcidinfo - if rawcidinfo.registry then - local cidmap=fonts.cid.getmap(rawcidinfo) - if cidmap then - rawcidinfo.usedname=cidmap.usedname - local nofnames,nofunicodes=0,0 - local cidunicodes,cidnames=cidmap.unicodes,cidmap.names - for cidindex=1,#rawsubfonts do - local subfont=rawsubfonts[cidindex] - local cidglyphs=subfont.glyphs - if includesubfonts then - metadata.subfonts[cidindex]=somecopy(subfont) - end + if rawcidinfo.registry then + local cidmap=fonts.cid.getmap(rawcidinfo) + if cidmap then + rawcidinfo.usedname=cidmap.usedname + local nofnames,nofunicodes=0,0 + local cidunicodes,cidnames=cidmap.unicodes,cidmap.names + for cidindex=1,#rawsubfonts do + local subfont=rawsubfonts[cidindex] + local cidglyphs=subfont.glyphs + if includesubfonts then + metadata.subfonts[cidindex]=somecopy(subfont) + end for index=0,subfont.glyphcnt-1 do - local glyph=cidglyphs[index] - if glyph then - local unicode=glyph.unicode - local name=glyph.name or cidnames[index] - if not unicode or unicode==-1 or unicode>=criterium then - unicode=cidunicodes[index] - end - if unicode and descriptions[unicode] then - report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode) - unicode=-1 - end - if not unicode or unicode==-1 or unicode>=criterium then - if not name then - name=format("u%06X",private) - end - unicode=private - unicodes[name]=private - if trace_private then - report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) - end - private=private+1 - nofnames=nofnames+1 - else - if not name then - name=format("u%06X",unicode) - end - unicodes[name]=unicode - nofunicodes=nofunicodes+1 - end + local glyph=cidglyphs[index] + if glyph then + local unicode=glyph.unicode + local name=glyph.name or cidnames[index] + if not unicode or unicode==-1 or unicode>=criterium then + unicode=cidunicodes[index] + end + if unicode and descriptions[unicode] then + report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode) + unicode=-1 + end + if not unicode or unicode==-1 or unicode>=criterium then + if not name then + name=format("u%06X",private) + end + unicode=private + unicodes[name]=private + if trace_private then + report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) + end + private=private+1 + nofnames=nofnames+1 + else + if not name then + name=format("u%06X",unicode) + end + unicodes[name]=unicode + nofunicodes=nofunicodes+1 + end indices[index]=unicode local description={ - boundingbox=glyph.boundingbox, + boundingbox=glyph.boundingbox, name=glyph.name or name or "unknown", - cidindex=cidindex, - index=index, - glyph=glyph, + cidindex=cidindex, + index=index, + glyph=glyph, } - descriptions[unicode]=description + descriptions[unicode]=description else - end - end - end - if trace_loading then - report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames) - end - elseif trace_loading then - report_otf("unable to remap cid font, missing cid file for %a",filename) - end - elseif trace_loading then - report_otf("font %a has no glyphs",filename) + end + end + end + if trace_loading then + report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames) + end + elseif trace_loading then + report_otf("unable to remap cid font, missing cid file for %a",filename) + end + elseif trace_loading then + report_otf("font %a has no glyphs",filename) end else for index=0,raw.glyphcnt-1 do - local glyph=rawglyphs[index] - if glyph then - local unicode=glyph.unicode - local name=glyph.name - if not unicode or unicode==-1 or unicode>=criterium then - unicode=private - unicodes[name]=private - if trace_private then - report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) - end - private=private+1 - else - unicodes[name]=unicode - end - indices[index]=unicode - if not name then - name=format("u%06X",unicode) - end + local glyph=rawglyphs[index] + if glyph then + local unicode=glyph.unicode + local name=glyph.name + if not unicode or unicode==-1 or unicode>=criterium then + unicode=private + unicodes[name]=private + if trace_private then + report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) + end + private=private+1 + else + unicodes[name]=unicode + end + indices[index]=unicode + if not name then + name=format("u%06X",unicode) + end descriptions[unicode]={ - boundingbox=glyph.boundingbox, - name=name, - index=index, - glyph=glyph, - } - local altuni=glyph.altuni - if altuni then - local d - for i=1,#altuni do - local a=altuni[i] - local u=a.unicode - local v=a.variant + boundingbox=glyph.boundingbox, + name=name, + index=index, + glyph=glyph, + } + local altuni=glyph.altuni + if altuni then + local d + for i=1,#altuni do + local a=altuni[i] + local u=a.unicode + local v=a.variant if v then - local vv=variants[v] - if vv then - vv[u]=unicode + local vv=variants[v] + if vv then + vv[u]=unicode else - vv={ [u]=unicode } - variants[v]=vv - end - elseif d then - d[#d+1]=u - else - d={ u } - end - end - if d then - duplicates[unicode]=d - end - end - else - report_otf("potential problem: glyph %U is used but empty",index) - end + vv={ [u]=unicode } + variants[v]=vv + end + elseif d then + d[#d+1]=u + else + d={ u } + end + end + if d then + duplicates[unicode]=d + end + end + else + report_otf("potential problem: glyph %U is used but empty",index) + end end end resources.private=private end -actions["check encoding"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local properties=data.properties +actions["check encoding"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local properties=data.properties local unicodes=resources.unicodes local indices=resources.indices local duplicates=resources.duplicates - local mapdata=raw.map or {} + local mapdata=raw.map or {} local unicodetoindex=mapdata and mapdata.map or {} - local encname=lower(data.enc_name or mapdata.enc_name or "") + local encname=lower(data.enc_name or mapdata.enc_name or "") local criterium=0xFFFF if find(encname,"unicode") then - if trace_loading then - report_otf("checking embedded unicode map %a",encname) - end + if trace_loading then + report_otf("checking embedded unicode map %a",encname) + end for unicode,index in next,unicodetoindex do - if unicode<=criterium and not descriptions[unicode] then + if unicode<=criterium and not descriptions[unicode] then local parent=indices[index] - if not parent then - report_otf("weird, unicode %U points to nowhere with index %H",unicode,index) - else - local parentdescription=descriptions[parent] - if parentdescription then - local altuni=parentdescription.altuni - if not altuni then - altuni={ { unicode=parent } } - parentdescription.altuni=altuni - duplicates[parent]={ unicode } - else - local done=false - for i=1,#altuni do - if altuni[i].unicode==parent then - done=true - break - end - end + if not parent then + report_otf("weird, unicode %U points to nowhere with index %H",unicode,index) + else + local parentdescription=descriptions[parent] + if parentdescription then + local altuni=parentdescription.altuni + if not altuni then + altuni={ { unicode=parent } } + parentdescription.altuni=altuni + duplicates[parent]={ unicode } + else + local done=false + for i=1,#altuni do + if altuni[i].unicode==parent then + done=true + break + end + end if not done then - altuni[#altuni+1]={ unicode=parent } - table.insert(duplicates[parent],unicode) - end - end - if trace_loading then - report_otf("weird, unicode %U points to nowhere with index %H",unicode,index) - end - else - report_otf("weird, unicode %U points to %U with index %H",unicode,index) - end - end - end - end - elseif properties.cidinfo then - report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname) - else - report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever") - end - if mapdata then + altuni[#altuni+1]={ unicode=parent } + table.insert(duplicates[parent],unicode) + end + end + if trace_loading then + report_otf("weird, unicode %U points to nowhere with index %H",unicode,index) + end + else + report_otf("weird, unicode %U points to %U with index %H",unicode,index) + end + end + end + end + elseif properties.cidinfo then + report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname) + else + report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever") + end + if mapdata then mapdata.map={} - end + end end -actions["add duplicates"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local properties=data.properties +actions["add duplicates"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local properties=data.properties local unicodes=resources.unicodes local indices=resources.indices local duplicates=resources.duplicates - for unicode,d in next,duplicates do - for i=1,#d do - local u=d[i] - if not descriptions[u] then - local description=descriptions[unicode] + for unicode,d in next,duplicates do + for i=1,#d do + local u=d[i] + if not descriptions[u] then + local description=descriptions[unicode] local duplicate=table.copy(description) - duplicate.comment=format("copy of U+%05X",unicode) - descriptions[u]=duplicate - local n=0 - for _,description in next,descriptions do - if kerns then - local kerns=description.kerns - for _,k in next,kerns do - local ku=k[unicode] - if ku then - k[u]=ku - n=n+1 - end - end - end - end - if trace_loading then - report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n) - end - end - end - end + duplicate.comment=format("copy of U+%05X",unicode) + descriptions[u]=duplicate + local n=0 + for _,description in next,descriptions do + if kerns then + local kerns=description.kerns + for _,k in next,kerns do + local ku=k[unicode] + if ku then + k[u]=ku + n=n+1 + end + end + end + end + if trace_loading then + report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n) + end + end + end + end end actions["analyze glyphs"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local metadata=data.metadata - local properties=data.properties - local hasitalics=false - local widths={} + local descriptions=data.descriptions + local resources=data.resources + local metadata=data.metadata + local properties=data.properties + local hasitalics=false + local widths={} local marks={} - for unicode,description in next,descriptions do - local glyph=description.glyph - local italic=glyph.italic_correction + for unicode,description in next,descriptions do + local glyph=description.glyph + local italic=glyph.italic_correction if not italic then elseif italic==0 then - else - description.italic=italic - hasitalics=true - end - local width=glyph.width - widths[width]=(widths[width] or 0)+1 - local class=glyph.class - if class then - if class=="mark" then - marks[unicode]=true - end - description.class=class - end + else + description.italic=italic + hasitalics=true + end + local width=glyph.width + widths[width]=(widths[width] or 0)+1 + local class=glyph.class + if class then + if class=="mark" then + marks[unicode]=true + end + description.class=class + end end properties.hasitalics=hasitalics resources.marks=marks - local wd,most=0,1 - for k,v in next,widths do - if v>most then - wd,most=k,v - end - end + local wd,most=0,1 + for k,v in next,widths do + if v>most then + wd,most=k,v + end + end if most>1000 then - if trace_loading then - report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most) - end - for unicode,description in next,descriptions do + if trace_loading then + report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most) + end + for unicode,description in next,descriptions do if description.width==wd then - else - description.width=description.glyph.width - end - end - resources.defaultwidth=wd - else - for unicode,description in next,descriptions do - description.width=description.glyph.width - end - end -end -actions["reorganize mark classes"]=function(data,filename,raw) - local mark_classes=raw.mark_classes - if mark_classes then - local resources=data.resources - local unicodes=resources.unicodes - local markclasses={} + else + description.width=description.glyph.width + end + end + resources.defaultwidth=wd + else + for unicode,description in next,descriptions do + description.width=description.glyph.width + end + end +end +actions["reorganize mark classes"]=function(data,filename,raw) + local mark_classes=raw.mark_classes + if mark_classes then + local resources=data.resources + local unicodes=resources.unicodes + local markclasses={} resources.markclasses=markclasses - for name,class in next,mark_classes do - local t={} - for s in gmatch(class,"[^ ]+") do - t[unicodes[s]]=true - end - markclasses[name]=t - end - end + for name,class in next,mark_classes do + local t={} + for s in gmatch(class,"[^ ]+") do + t[unicodes[s]]=true + end + markclasses[name]=t + end + end end actions["reorganize features"]=function(data,filename,raw) - local features={} - data.resources.features=features - for k,what in next,otf.glists do - local dw=raw[what] - if dw then - local f={} - features[what]=f - for i=1,#dw do - local d=dw[i] - local dfeatures=d.features - if dfeatures then - for i=1,#dfeatures do - local df=dfeatures[i] - local tag=strip(lower(df.tag)) - local ft=f[tag] - if not ft then - ft={} - f[tag]=ft - end - local dscripts=df.scripts - for i=1,#dscripts do - local d=dscripts[i] - local languages=d.langs - local script=strip(lower(d.script)) - local fts=ft[script] if not fts then fts={} ft[script]=fts end - for i=1,#languages do - fts[strip(lower(languages[i]))]=true - end - end - end - end - end - end - end -end -actions["reorganize anchor classes"]=function(data,filename,raw) - local resources=data.resources - local anchor_to_lookup={} - local lookup_to_anchor={} - resources.anchor_to_lookup=anchor_to_lookup - resources.lookup_to_anchor=lookup_to_anchor + local features={} + data.resources.features=features + for k,what in next,otf.glists do + local dw=raw[what] + if dw then + local f={} + features[what]=f + for i=1,#dw do + local d=dw[i] + local dfeatures=d.features + if dfeatures then + for i=1,#dfeatures do + local df=dfeatures[i] + local tag=strip(lower(df.tag)) + local ft=f[tag] + if not ft then + ft={} + f[tag]=ft + end + local dscripts=df.scripts + for i=1,#dscripts do + local d=dscripts[i] + local languages=d.langs + local script=strip(lower(d.script)) + local fts=ft[script] if not fts then fts={} ft[script]=fts end + for i=1,#languages do + fts[strip(lower(languages[i]))]=true + end + end + end + end + end + end + end +end +actions["reorganize anchor classes"]=function(data,filename,raw) + local resources=data.resources + local anchor_to_lookup={} + local lookup_to_anchor={} + resources.anchor_to_lookup=anchor_to_lookup + resources.lookup_to_anchor=lookup_to_anchor local classes=raw.anchor_classes - if classes then - for c=1,#classes do - local class=classes[c] - local anchor=class.name - local lookups=class.lookup - if type(lookups)~="table" then - lookups={ lookups } - end - local a=anchor_to_lookup[anchor] - if not a then - a={} - anchor_to_lookup[anchor]=a - end - for l=1,#lookups do - local lookup=lookups[l] - local l=lookup_to_anchor[lookup] - if l then - l[anchor]=true - else - l={ [anchor]=true } - lookup_to_anchor[lookup]=l - end - a[lookup]=true - end - end - end -end -actions["prepare tounicode"]=function(data,filename,raw) - fonts.mappings.addtounicode(data,filename) -end -local g_directions={ - gsub_contextchain=1, + if classes then + for c=1,#classes do + local class=classes[c] + local anchor=class.name + local lookups=class.lookup + if type(lookups)~="table" then + lookups={ lookups } + end + local a=anchor_to_lookup[anchor] + if not a then + a={} + anchor_to_lookup[anchor]=a + end + for l=1,#lookups do + local lookup=lookups[l] + local l=lookup_to_anchor[lookup] + if l then + l[anchor]=true + else + l={ [anchor]=true } + lookup_to_anchor[lookup]=l + end + a[lookup]=true + end + end + end +end +actions["prepare tounicode"]=function(data,filename,raw) + fonts.mappings.addtounicode(data,filename) +end +local g_directions={ + gsub_contextchain=1, gpos_contextchain=1, - gsub_reversecontextchain=-1, - gpos_reversecontextchain=-1, + gsub_reversecontextchain=-1, + gpos_reversecontextchain=-1, } -local function supported(features) - for i=1,#features do - if features[i].ismac then - return false - end - end - return true -end -actions["reorganize subtables"]=function(data,filename,raw) - local resources=data.resources - local sequences={} - local lookups={} - local chainedfeatures={} - resources.sequences=sequences - resources.lookups=lookups - for _,what in next,otf.glists do - local dw=raw[what] - if dw then - for k=1,#dw do - local gk=dw[k] +local function supported(features) + for i=1,#features do + if features[i].ismac then + return false + end + end + return true +end +actions["reorganize subtables"]=function(data,filename,raw) + local resources=data.resources + local sequences={} + local lookups={} + local chainedfeatures={} + resources.sequences=sequences + resources.lookups=lookups + for _,what in next,otf.glists do + local dw=raw[what] + if dw then + for k=1,#dw do + local gk=dw[k] local features=gk.features if not features or supported(features) then - local typ=gk.type - local chain=g_directions[typ] or 0 - local subtables=gk.subtables - if subtables then - local t={} - for s=1,#subtables do - t[s]=subtables[s].name - end - subtables=t - end - local flags,markclass=gk.flags,nil - if flags then + local typ=gk.type + local chain=g_directions[typ] or 0 + local subtables=gk.subtables + if subtables then + local t={} + for s=1,#subtables do + t[s]=subtables[s].name + end + subtables=t + end + local flags,markclass=gk.flags,nil + if flags then local t={ - (flags.ignorecombiningmarks and "mark") or false, - (flags.ignoreligatures and "ligature") or false, - (flags.ignorebaseglyphs and "base") or false, - flags.r2l or false, - } - markclass=flags.mark_class - if markclass then - markclass=resources.markclasses[markclass] - end - flags=t + (flags.ignorecombiningmarks and "mark") or false, + (flags.ignoreligatures and "ligature") or false, + (flags.ignorebaseglyphs and "base") or false, + flags.r2l or false, + } + markclass=flags.mark_class + if markclass then + markclass=resources.markclasses[markclass] + end + flags=t end local name=gk.name if not name then - report_otf("skipping weird lookup number %s",k) + report_otf("skipping weird lookup number %s",k) elseif features then - local f={} - for i=1,#features do - local df=features[i] - local tag=strip(lower(df.tag)) - local ft=f[tag] if not ft then ft={} f[tag]=ft end - local dscripts=df.scripts - for i=1,#dscripts do - local d=dscripts[i] - local languages=d.langs - local script=strip(lower(d.script)) - local fts=ft[script] if not fts then fts={} ft[script]=fts end - for i=1,#languages do - fts[strip(lower(languages[i]))]=true - end - end - end - sequences[#sequences+1]={ - type=typ, - chain=chain, - flags=flags, - name=name, - subtables=subtables, - markclass=markclass, - features=f, - } - else - lookups[name]={ - type=typ, - chain=chain, - flags=flags, - subtables=subtables, - markclass=markclass, - } - end - end - end - end - end -end -actions["prepare lookups"]=function(data,filename,raw) - local lookups=raw.lookups - if lookups then - data.lookups=lookups - end -end -local function t_uncover(splitter,cache,covers) - local result={} - for n=1,#covers do - local cover=covers[n] - local uncovered=cache[cover] - if not uncovered then - uncovered=lpegmatch(splitter,cover) - cache[cover]=uncovered - end - result[n]=uncovered - end - return result -end -local function s_uncover(splitter,cache,cover) - if cover=="" then - return nil - else - local uncovered=cache[cover] - if not uncovered then + local f={} + for i=1,#features do + local df=features[i] + local tag=strip(lower(df.tag)) + local ft=f[tag] if not ft then ft={} f[tag]=ft end + local dscripts=df.scripts + for i=1,#dscripts do + local d=dscripts[i] + local languages=d.langs + local script=strip(lower(d.script)) + local fts=ft[script] if not fts then fts={} ft[script]=fts end + for i=1,#languages do + fts[strip(lower(languages[i]))]=true + end + end + end + sequences[#sequences+1]={ + type=typ, + chain=chain, + flags=flags, + name=name, + subtables=subtables, + markclass=markclass, + features=f, + } + else + lookups[name]={ + type=typ, + chain=chain, + flags=flags, + subtables=subtables, + markclass=markclass, + } + end + end + end + end + end +end +actions["prepare lookups"]=function(data,filename,raw) + local lookups=raw.lookups + if lookups then + data.lookups=lookups + end +end +local function t_uncover(splitter,cache,covers) + local result={} + for n=1,#covers do + local cover=covers[n] + local uncovered=cache[cover] + if not uncovered then + uncovered=lpegmatch(splitter,cover) + cache[cover]=uncovered + end + result[n]=uncovered + end + return result +end +local function s_uncover(splitter,cache,cover) + if cover=="" then + return nil + else + local uncovered=cache[cover] + if not uncovered then uncovered=lpegmatch(splitter,cover) - cache[cover]=uncovered - end - return { uncovered } - end -end -local function t_hashed(t,cache) - if t then - local ht={} - for i=1,#t do - local ti=t[i] - local tih=cache[ti] - if not tih then - tih={} - for i=1,#ti do - tih[ti[i]]=true - end - cache[ti]=tih - end - ht[i]=tih - end - return ht - else - return nil - end -end -local function s_hashed(t,cache) - if t then - local ht={} - local tf=t[1] - for i=1,#tf do - ht[i]={ [tf[i]]=true } - end - return ht - else - return nil - end -end -local function r_uncover(splitter,cache,cover,replacements) - if cover=="" then - return nil + cache[cover]=uncovered + end + return { uncovered } + end +end +local function t_hashed(t,cache) + if t then + local ht={} + for i=1,#t do + local ti=t[i] + local tih=cache[ti] + if not tih then + tih={} + for i=1,#ti do + tih[ti[i]]=true + end + cache[ti]=tih + end + ht[i]=tih + end + return ht + else + return nil + end +end +local function s_hashed(t,cache) + if t then + local ht={} + local tf=t[1] + for i=1,#tf do + ht[i]={ [tf[i]]=true } + end + return ht + else + return nil + end +end +local function r_uncover(splitter,cache,cover,replacements) + if cover=="" then + return nil else - local uncovered=cover[1] - local replaced=cache[replacements] - if not replaced then - replaced=lpegmatch(splitter,replacements) - cache[replacements]=replaced - end - local nu,nr=#uncovered,#replaced - local r={} - if nu==nr then - for i=1,nu do - r[uncovered[i]]=replaced[i] - end - end - return r - end + local uncovered=cover[1] + local replaced=cache[replacements] + if not replaced then + replaced=lpegmatch(splitter,replacements) + cache[replacements]=replaced + end + local nu,nr=#uncovered,#replaced + local r={} + if nu==nr then + for i=1,nu do + r[uncovered[i]]=replaced[i] + end + end + return r + end end actions["reorganize lookups"]=function(data,filename,raw) - if data.lookups then - local splitter=data.helpers.tounicodetable - local t_u_cache={} + if data.lookups then + local splitter=data.helpers.tounicodetable + local t_u_cache={} local s_u_cache=t_u_cache - local t_h_cache={} + local t_h_cache={} local s_h_cache=t_h_cache local r_u_cache={} - for _,lookup in next,data.lookups do - local rules=lookup.rules - if rules then - local format=lookup.format - if format=="class" then - local before_class=lookup.before_class - if before_class then - before_class=t_uncover(splitter,t_u_cache,reversed(before_class)) - end - local current_class=lookup.current_class - if current_class then - current_class=t_uncover(splitter,t_u_cache,current_class) - end - local after_class=lookup.after_class - if after_class then - after_class=t_uncover(splitter,t_u_cache,after_class) - end - for i=1,#rules do - local rule=rules[i] - local class=rule.class - local before=class.before - if before then - for i=1,#before do - before[i]=before_class[before[i]] or {} - end - rule.before=t_hashed(before,t_h_cache) - end - local current=class.current - local lookups=rule.lookups - if current then - for i=1,#current do + for _,lookup in next,data.lookups do + local rules=lookup.rules + if rules then + local format=lookup.format + if format=="class" then + local before_class=lookup.before_class + if before_class then + before_class=t_uncover(splitter,t_u_cache,reversed(before_class)) + end + local current_class=lookup.current_class + if current_class then + current_class=t_uncover(splitter,t_u_cache,current_class) + end + local after_class=lookup.after_class + if after_class then + after_class=t_uncover(splitter,t_u_cache,after_class) + end + for i=1,#rules do + local rule=rules[i] + local class=rule.class + local before=class.before + if before then + for i=1,#before do + before[i]=before_class[before[i]] or {} + end + rule.before=t_hashed(before,t_h_cache) + end + local current=class.current + local lookups=rule.lookups + if current then + for i=1,#current do current[i]=current_class[current[i]] or {} - if lookups and not lookups[i] then + if lookups and not lookups[i] then lookups[i]="" end - end - rule.current=t_hashed(current,t_h_cache) - end - local after=class.after - if after then - for i=1,#after do - after[i]=after_class[after[i]] or {} - end - rule.after=t_hashed(after,t_h_cache) - end - rule.class=nil - end - lookup.before_class=nil - lookup.current_class=nil - lookup.after_class=nil - lookup.format="coverage" - elseif format=="coverage" then - for i=1,#rules do - local rule=rules[i] - local coverage=rule.coverage - if coverage then - local before=coverage.before - if before then - before=t_uncover(splitter,t_u_cache,reversed(before)) - rule.before=t_hashed(before,t_h_cache) - end - local current=coverage.current - if current then + end + rule.current=t_hashed(current,t_h_cache) + end + local after=class.after + if after then + for i=1,#after do + after[i]=after_class[after[i]] or {} + end + rule.after=t_hashed(after,t_h_cache) + end + rule.class=nil + end + lookup.before_class=nil + lookup.current_class=nil + lookup.after_class=nil + lookup.format="coverage" + elseif format=="coverage" then + for i=1,#rules do + local rule=rules[i] + local coverage=rule.coverage + if coverage then + local before=coverage.before + if before then + before=t_uncover(splitter,t_u_cache,reversed(before)) + rule.before=t_hashed(before,t_h_cache) + end + local current=coverage.current + if current then current=t_uncover(splitter,t_u_cache,current) - local lookups=rule.lookups - if lookups then - for i=1,#current do - if not lookups[i] then + local lookups=rule.lookups + if lookups then + for i=1,#current do + if not lookups[i] then lookups[i]="" - end - end + end + end end - rule.current=t_hashed(current,t_h_cache) - end - local after=coverage.after - if after then - after=t_uncover(splitter,t_u_cache,after) - rule.after=t_hashed(after,t_h_cache) - end - rule.coverage=nil - end - end + rule.current=t_hashed(current,t_h_cache) + end + local after=coverage.after + if after then + after=t_uncover(splitter,t_u_cache,after) + rule.after=t_hashed(after,t_h_cache) + end + rule.coverage=nil + end + end elseif format=="reversecoverage" then - for i=1,#rules do - local rule=rules[i] - local reversecoverage=rule.reversecoverage - if reversecoverage then - local before=reversecoverage.before - if before then - before=t_uncover(splitter,t_u_cache,reversed(before)) - rule.before=t_hashed(before,t_h_cache) - end - local current=reversecoverage.current - if current then - current=t_uncover(splitter,t_u_cache,current) - rule.current=t_hashed(current,t_h_cache) - end - local after=reversecoverage.after - if after then - after=t_uncover(splitter,t_u_cache,after) - rule.after=t_hashed(after,t_h_cache) - end - local replacements=reversecoverage.replacements - if replacements then - rule.replacements=r_uncover(splitter,r_u_cache,current,replacements) - end - rule.reversecoverage=nil - end - end + for i=1,#rules do + local rule=rules[i] + local reversecoverage=rule.reversecoverage + if reversecoverage then + local before=reversecoverage.before + if before then + before=t_uncover(splitter,t_u_cache,reversed(before)) + rule.before=t_hashed(before,t_h_cache) + end + local current=reversecoverage.current + if current then + current=t_uncover(splitter,t_u_cache,current) + rule.current=t_hashed(current,t_h_cache) + end + local after=reversecoverage.after + if after then + after=t_uncover(splitter,t_u_cache,after) + rule.after=t_hashed(after,t_h_cache) + end + local replacements=reversecoverage.replacements + if replacements then + rule.replacements=r_uncover(splitter,r_u_cache,current,replacements) + end + rule.reversecoverage=nil + end + end elseif format=="glyphs" then - for i=1,#rules do - local rule=rules[i] - local glyphs=rule.glyphs - if glyphs then - local fore=glyphs.fore - if fore and fore~="" then - fore=s_uncover(splitter,s_u_cache,fore) - rule.before=s_hashed(fore,s_h_cache) - end - local back=glyphs.back - if back then - back=s_uncover(splitter,s_u_cache,back) - rule.after=s_hashed(back,s_h_cache) - end - local names=glyphs.names - if names then - names=s_uncover(splitter,s_u_cache,names) - rule.current=s_hashed(names,s_h_cache) - end - rule.glyphs=nil - end - end - end - end - end - end -end -local function check_variants(unicode,the_variants,splitter,unicodes) - local variants=the_variants.variants + for i=1,#rules do + local rule=rules[i] + local glyphs=rule.glyphs + if glyphs then + local fore=glyphs.fore + if fore and fore~="" then + fore=s_uncover(splitter,s_u_cache,fore) + rule.before=s_hashed(fore,s_h_cache) + end + local back=glyphs.back + if back then + back=s_uncover(splitter,s_u_cache,back) + rule.after=s_hashed(back,s_h_cache) + end + local names=glyphs.names + if names then + names=s_uncover(splitter,s_u_cache,names) + rule.current=s_hashed(names,s_h_cache) + end + rule.glyphs=nil + end + end + end + end + end + end +end +local function check_variants(unicode,the_variants,splitter,unicodes) + local variants=the_variants.variants if variants then - local glyphs=lpegmatch(splitter,variants) - local done={ [unicode]=true } - local n=0 - for i=1,#glyphs do - local g=glyphs[i] - if done[g] then - report_otf("skipping cyclic reference %U in math variant %U",g,unicode) - else - if n==0 then - n=1 - variants={ g } - else - n=n+1 - variants[n]=g - end - done[g]=true - end - end - if n==0 then - variants=nil - end - end - local parts=the_variants.parts - if parts then - local p=#parts - if p>0 then - for i=1,p do - local pi=parts[i] - pi.glyph=unicodes[pi.component] or 0 - pi.component=nil - end - else - parts=nil - end - end - local italic_correction=the_variants.italic_correction - if italic_correction and italic_correction==0 then - italic_correction=nil - end - return variants,parts,italic_correction -end -actions["analyze math"]=function(data,filename,raw) - if raw.math then - data.metadata.math=raw.math - local unicodes=data.resources.unicodes - local splitter=data.helpers.tounicodetable - for unicode,description in next,data.descriptions do - local glyph=description.glyph + local glyphs=lpegmatch(splitter,variants) + local done={ [unicode]=true } + local n=0 + for i=1,#glyphs do + local g=glyphs[i] + if done[g] then + report_otf("skipping cyclic reference %U in math variant %U",g,unicode) + else + if n==0 then + n=1 + variants={ g } + else + n=n+1 + variants[n]=g + end + done[g]=true + end + end + if n==0 then + variants=nil + end + end + local parts=the_variants.parts + if parts then + local p=#parts + if p>0 then + for i=1,p do + local pi=parts[i] + pi.glyph=unicodes[pi.component] or 0 + pi.component=nil + end + else + parts=nil + end + end + local italic_correction=the_variants.italic_correction + if italic_correction and italic_correction==0 then + italic_correction=nil + end + return variants,parts,italic_correction +end +actions["analyze math"]=function(data,filename,raw) + if raw.math then + data.metadata.math=raw.math + local unicodes=data.resources.unicodes + local splitter=data.helpers.tounicodetable + for unicode,description in next,data.descriptions do + local glyph=description.glyph local mathkerns=glyph.mathkern - local horiz_variants=glyph.horiz_variants - local vert_variants=glyph.vert_variants - local top_accent=glyph.top_accent - if mathkerns or horiz_variants or vert_variants or top_accent then - local math={} - if top_accent then - math.top_accent=top_accent - end - if mathkerns then - for k,v in next,mathkerns do - if not next(v) then - mathkerns[k]=nil - else - for k,v in next,v do - if v==0 then + local horiz_variants=glyph.horiz_variants + local vert_variants=glyph.vert_variants + local top_accent=glyph.top_accent + if mathkerns or horiz_variants or vert_variants or top_accent then + local math={} + if top_accent then + math.top_accent=top_accent + end + if mathkerns then + for k,v in next,mathkerns do + if not next(v) then + mathkerns[k]=nil + else + for k,v in next,v do + if v==0 then k[v]=nil - end - end - end - end - math.kerns=mathkerns - end - if horiz_variants then - math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes) - end - if vert_variants then - math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes) - end - local italic_correction=description.italic - if italic_correction and italic_correction~=0 then - math.italic_correction=italic_correction - end - description.math=math - end - end - end -end -actions["reorganize glyph kerns"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local unicodes=resources.unicodes - for unicode,description in next,descriptions do - local kerns=description.glyph.kerns - if kerns then - local newkerns={} - for k,kern in next,kerns do - local name=kern.char - local offset=kern.off - local lookup=kern.lookup - if name and offset and lookup then - local unicode=unicodes[name] - if unicode then - if type(lookup)=="table" then - for l=1,#lookup do - local lookup=lookup[l] - local lookupkerns=newkerns[lookup] - if lookupkerns then - lookupkerns[unicode]=offset - else - newkerns[lookup]={ [unicode]=offset } - end - end - else - local lookupkerns=newkerns[lookup] - if lookupkerns then - lookupkerns[unicode]=offset - else - newkerns[lookup]={ [unicode]=offset } - end - end - elseif trace_loading then - report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode) - end - end - end - description.kerns=newkerns - end - end -end -actions["merge kern classes"]=function(data,filename,raw) - local gposlist=raw.gpos - if gposlist then - local descriptions=data.descriptions - local resources=data.resources - local unicodes=resources.unicodes - local splitter=data.helpers.tounicodetable - for gp=1,#gposlist do - local gpos=gposlist[gp] - local subtables=gpos.subtables - if subtables then - for s=1,#subtables do - local subtable=subtables[s] + end + end + end + end + math.kerns=mathkerns + end + if horiz_variants then + math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes) + end + if vert_variants then + math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes) + end + local italic_correction=description.italic + if italic_correction and italic_correction~=0 then + math.italic_correction=italic_correction + end + description.math=math + end + end + end +end +actions["reorganize glyph kerns"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local unicodes=resources.unicodes + for unicode,description in next,descriptions do + local kerns=description.glyph.kerns + if kerns then + local newkerns={} + for k,kern in next,kerns do + local name=kern.char + local offset=kern.off + local lookup=kern.lookup + if name and offset and lookup then + local unicode=unicodes[name] + if unicode then + if type(lookup)=="table" then + for l=1,#lookup do + local lookup=lookup[l] + local lookupkerns=newkerns[lookup] + if lookupkerns then + lookupkerns[unicode]=offset + else + newkerns[lookup]={ [unicode]=offset } + end + end + else + local lookupkerns=newkerns[lookup] + if lookupkerns then + lookupkerns[unicode]=offset + else + newkerns[lookup]={ [unicode]=offset } + end + end + elseif trace_loading then + report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode) + end + end + end + description.kerns=newkerns + end + end +end +actions["merge kern classes"]=function(data,filename,raw) + local gposlist=raw.gpos + if gposlist then + local descriptions=data.descriptions + local resources=data.resources + local unicodes=resources.unicodes + local splitter=data.helpers.tounicodetable + for gp=1,#gposlist do + local gpos=gposlist[gp] + local subtables=gpos.subtables + if subtables then + for s=1,#subtables do + local subtable=subtables[s] local kernclass=subtable.kernclass if kernclass then local split={} - for k=1,#kernclass do - local kcl=kernclass[k] - local firsts=kcl.firsts - local seconds=kcl.seconds - local offsets=kcl.offsets + for k=1,#kernclass do + local kcl=kernclass[k] + local firsts=kcl.firsts + local seconds=kcl.seconds + local offsets=kcl.offsets local lookups=kcl.lookup - if type(lookups)~="table" then - lookups={ lookups } - end - for n,s in next,firsts do - split[s]=split[s] or lpegmatch(splitter,s) - end - local maxseconds=0 - for n,s in next,seconds do - if n>maxseconds then - maxseconds=n - end - split[s]=split[s] or lpegmatch(splitter,s) + if type(lookups)~="table" then + lookups={ lookups } end - for l=1,#lookups do - local lookup=lookups[l] + for n,s in next,firsts do + split[s]=split[s] or lpegmatch(splitter,s) + end + local maxseconds=0 + for n,s in next,seconds do + if n>maxseconds then + maxseconds=n + end + split[s]=split[s] or lpegmatch(splitter,s) + end + for l=1,#lookups do + local lookup=lookups[l] for fk=1,#firsts do - local fv=firsts[fk] - local splt=split[fv] - if splt then - local extrakerns={} - local baseoffset=(fk-1)*maxseconds + local fv=firsts[fk] + local splt=split[fv] + if splt then + local extrakerns={} + local baseoffset=(fk-1)*maxseconds for sk=2,maxseconds do local sv=seconds[sk] - local splt=split[sv] + local splt=split[sv] if splt then - local offset=offsets[baseoffset+sk] - if offset then - for i=1,#splt do - extrakerns[splt[i]]=offset - end - end - end - end - for i=1,#splt do - local first_unicode=splt[i] - local description=descriptions[first_unicode] - if description then - local kerns=description.kerns - if not kerns then + local offset=offsets[baseoffset+sk] + if offset then + for i=1,#splt do + extrakerns[splt[i]]=offset + end + end + end + end + for i=1,#splt do + local first_unicode=splt[i] + local description=descriptions[first_unicode] + if description then + local kerns=description.kerns + if not kerns then kerns={} - description.kerns=kerns - end - local lookupkerns=kerns[lookup] - if not lookupkerns then - lookupkerns={} - kerns[lookup]=lookupkerns - end - for second_unicode,kern in next,extrakerns do - lookupkerns[second_unicode]=kern - end - elseif trace_loading then - report_otf("no glyph data for %U",first_unicode) - end - end - end - end - end - end - subtable.kernclass={} - end - end - end - end - end -end -actions["check glyphs"]=function(data,filename,raw) - for unicode,description in next,data.descriptions do - description.glyph=nil - end -end -actions["check metadata"]=function(data,filename,raw) - local metadata=data.metadata - for _,k in next,mainfields do - if valid_fields[k] then - local v=raw[k] - if not metadata[k] then - metadata[k]=v - end - end - end - local ttftables=metadata.ttf_tables - if ttftables then - for i=1,#ttftables do - ttftables[i].data="deleted" - end - end -end -actions["cleanup tables"]=function(data,filename,raw) + description.kerns=kerns + end + local lookupkerns=kerns[lookup] + if not lookupkerns then + lookupkerns={} + kerns[lookup]=lookupkerns + end + for second_unicode,kern in next,extrakerns do + lookupkerns[second_unicode]=kern + end + elseif trace_loading then + report_otf("no glyph data for %U",first_unicode) + end + end + end + end + end + end + subtable.kernclass={} + end + end + end + end + end +end +actions["check glyphs"]=function(data,filename,raw) + for unicode,description in next,data.descriptions do + description.glyph=nil + end +end +actions["check metadata"]=function(data,filename,raw) + local metadata=data.metadata + for _,k in next,mainfields do + if valid_fields[k] then + local v=raw[k] + if not metadata[k] then + metadata[k]=v + end + end + end + local ttftables=metadata.ttf_tables + if ttftables then + for i=1,#ttftables do + ttftables[i].data="deleted" + end + end +end +actions["cleanup tables"]=function(data,filename,raw) data.resources.indices=nil - data.helpers=nil + data.helpers=nil end -actions["reorganize glyph lookups"]=function(data,filename,raw) - local resources=data.resources - local unicodes=resources.unicodes - local descriptions=data.descriptions +actions["reorganize glyph lookups"]=function(data,filename,raw) + local resources=data.resources + local unicodes=resources.unicodes + local descriptions=data.descriptions local splitter=data.helpers.tounicodelist local lookuptypes=resources.lookuptypes - for unicode,description in next,descriptions do - local lookups=description.glyph.lookups - if lookups then - for tag,lookuplist in next,lookups do - for l=1,#lookuplist do - local lookup=lookuplist[l] - local specification=lookup.specification - local lookuptype=lookup.type - local lt=lookuptypes[tag] - if not lt then - lookuptypes[tag]=lookuptype - elseif lt~=lookuptype then - report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype) - end - if lookuptype=="ligature" then - lookuplist[l]={ lpegmatch(splitter,specification.components) } - elseif lookuptype=="alternate" then - lookuplist[l]={ lpegmatch(splitter,specification.components) } - elseif lookuptype=="substitution" then - lookuplist[l]=unicodes[specification.variant] - elseif lookuptype=="multiple" then - lookuplist[l]={ lpegmatch(splitter,specification.components) } - elseif lookuptype=="position" then - lookuplist[l]={ - specification.x or 0, - specification.y or 0, - specification.h or 0, - specification.v or 0 - } - elseif lookuptype=="pair" then - local one=specification.offsets[1] - local two=specification.offsets[2] - local paired=unicodes[specification.paired] - if one then - if two then - lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } } - else - lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } } - end - else - if two then + for unicode,description in next,descriptions do + local lookups=description.glyph.lookups + if lookups then + for tag,lookuplist in next,lookups do + for l=1,#lookuplist do + local lookup=lookuplist[l] + local specification=lookup.specification + local lookuptype=lookup.type + local lt=lookuptypes[tag] + if not lt then + lookuptypes[tag]=lookuptype + elseif lt~=lookuptype then + report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype) + end + if lookuptype=="ligature" then + lookuplist[l]={ lpegmatch(splitter,specification.components) } + elseif lookuptype=="alternate" then + lookuplist[l]={ lpegmatch(splitter,specification.components) } + elseif lookuptype=="substitution" then + lookuplist[l]=unicodes[specification.variant] + elseif lookuptype=="multiple" then + lookuplist[l]={ lpegmatch(splitter,specification.components) } + elseif lookuptype=="position" then + lookuplist[l]={ + specification.x or 0, + specification.y or 0, + specification.h or 0, + specification.v or 0 + } + elseif lookuptype=="pair" then + local one=specification.offsets[1] + local two=specification.offsets[2] + local paired=unicodes[specification.paired] + if one then + if two then + lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } } + else + lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } } + end + else + if two then lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} } - else - lookuplist[l]={ paired } - end - end - end - end - end - local slookups,mlookups - for tag,lookuplist in next,lookups do - if #lookuplist==1 then - if slookups then - slookups[tag]=lookuplist[1] - else - slookups={ [tag]=lookuplist[1] } - end - else - if mlookups then - mlookups[tag]=lookuplist - else - mlookups={ [tag]=lookuplist } - end - end - end - if slookups then - description.slookups=slookups - end - if mlookups then - description.mlookups=mlookups - end - end + else + lookuplist[l]={ paired } + end + end + end + end + end + local slookups,mlookups + for tag,lookuplist in next,lookups do + if #lookuplist==1 then + if slookups then + slookups[tag]=lookuplist[1] + else + slookups={ [tag]=lookuplist[1] } + end + else + if mlookups then + mlookups[tag]=lookuplist + else + mlookups={ [tag]=lookuplist } + end + end + end + if slookups then + description.slookups=slookups + end + if mlookups then + description.mlookups=mlookups + end + end end end actions["reorganize glyph anchors"]=function(data,filename,raw) - local descriptions=data.descriptions - for unicode,description in next,descriptions do - local anchors=description.glyph.anchors - if anchors then - for class,data in next,anchors do - if class=="baselig" then - for tag,specification in next,data do - for i=1,#specification do - local si=specification[i] - specification[i]={ si.x or 0,si.y or 0 } - end - end - else - for tag,specification in next,data do - data[tag]={ specification.x or 0,specification.y or 0 } - end - end - end - description.anchors=anchors - end - end -end -function otf.setfeatures(tfmdata,features) - local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf) - if okay then - return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf) - else + local descriptions=data.descriptions + for unicode,description in next,descriptions do + local anchors=description.glyph.anchors + if anchors then + for class,data in next,anchors do + if class=="baselig" then + for tag,specification in next,data do + for i=1,#specification do + local si=specification[i] + specification[i]={ si.x or 0,si.y or 0 } + end + end + else + for tag,specification in next,data do + data[tag]={ specification.x or 0,specification.y or 0 } + end + end + end + description.anchors=anchors + end + end +end +function otf.setfeatures(tfmdata,features) + local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf) + if okay then + return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf) + else return {} - end -end -local function copytotfm(data,cache_id) - if data then - local metadata=data.metadata - local resources=data.resources - local properties=derivetable(data.properties) - local descriptions=derivetable(data.descriptions) - local goodies=derivetable(data.goodies) - local characters={} - local parameters={} + end +end +local function copytotfm(data,cache_id) + if data then + local metadata=data.metadata + local resources=data.resources + local properties=derivetable(data.properties) + local descriptions=derivetable(data.descriptions) + local goodies=derivetable(data.goodies) + local characters={} + local parameters={} local mathparameters={} - local pfminfo=metadata.pfminfo or {} - local resources=data.resources + local pfminfo=metadata.pfminfo or {} + local resources=data.resources local unicodes=resources.unicodes - local spaceunits=500 - local spacer="space" - local designsize=metadata.designsize or metadata.design_size or 100 + local spaceunits=500 + local spacer="space" + local designsize=metadata.designsize or metadata.design_size or 100 local mathspecs=metadata.math - if designsize==0 then - designsize=100 - end - if mathspecs then - for name,value in next,mathspecs do - mathparameters[name]=value - end - end + if designsize==0 then + designsize=100 + end + if mathspecs then + for name,value in next,mathspecs do + mathparameters[name]=value + end + end for unicode,_ in next,data.descriptions do - characters[unicode]={} - end + characters[unicode]={} + end if mathspecs then - for unicode,character in next,characters do - local d=descriptions[unicode] - local m=d.math + for unicode,character in next,characters do + local d=descriptions[unicode] + local m=d.math if m then - local variants=m.horiz_variants + local variants=m.horiz_variants local parts=m.horiz_parts - if variants then - local c=character - for i=1,#variants do + if variants then + local c=character + for i=1,#variants do local un=variants[i] - c.next=un + c.next=un c=characters[un] end - c.horiz_variants=parts - elseif parts then - character.horiz_variants=parts - end - local variants=m.vert_variants + c.horiz_variants=parts + elseif parts then + character.horiz_variants=parts + end + local variants=m.vert_variants local parts=m.vert_parts - if variants then - local c=character - for i=1,#variants do + if variants then + local c=character + for i=1,#variants do local un=variants[i] - c.next=un + c.next=un c=characters[un] end - c.vert_variants=parts - elseif parts then - character.vert_variants=parts - end - local italic_correction=m.vert_italic_correction - if italic_correction then + c.vert_variants=parts + elseif parts then + character.vert_variants=parts + end + local italic_correction=m.vert_italic_correction + if italic_correction then character.vert_italic_correction=italic_correction - end - local top_accent=m.top_accent - if top_accent then - character.top_accent=top_accent - end - local kerns=m.kerns - if kerns then - character.mathkerns=kerns - end - end - end - end - local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced") + end + local top_accent=m.top_accent + if top_accent then + character.top_accent=top_accent + end + local kerns=m.kerns + if kerns then + character.mathkerns=kerns + end + end + end + end + local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced") local charwidth=pfminfo.avgwidth - local italicangle=metadata.italicangle - local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight - properties.monospaced=monospaced - parameters.italicangle=italicangle - parameters.charwidth=charwidth + local italicangle=metadata.italicangle + local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight + properties.monospaced=monospaced + parameters.italicangle=italicangle + parameters.charwidth=charwidth parameters.charxheight=charxheight local space=0x0020 local emdash=0x2014 - if monospaced then - if descriptions[space] then - spaceunits,spacer=descriptions[space].width,"space" - end - if not spaceunits and descriptions[emdash] then - spaceunits,spacer=descriptions[emdash].width,"emdash" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - else - if descriptions[space] then - spaceunits,spacer=descriptions[space].width,"space" - end - if not spaceunits and descriptions[emdash] then - spaceunits,spacer=descriptions[emdash].width/2,"emdash/2" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - end + if monospaced then + if descriptions[space] then + spaceunits,spacer=descriptions[space].width,"space" + end + if not spaceunits and descriptions[emdash] then + spaceunits,spacer=descriptions[emdash].width,"emdash" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + else + if descriptions[space] then + spaceunits,spacer=descriptions[space].width,"space" + end + if not spaceunits and descriptions[emdash] then + spaceunits,spacer=descriptions[emdash].width/2,"emdash/2" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + end spaceunits=tonumber(spaceunits) or 500 - local filename=constructors.checkedfilename(resources) - local fontname=metadata.fontname - local fullname=metadata.fullname or fontname + local filename=constructors.checkedfilename(resources) + local fontname=metadata.fontname + local fullname=metadata.fullname or fontname local units=metadata.units_per_em or 1000 if units==0 then - units=1000 - metadata.units_per_em=1000 + units=1000 + metadata.units_per_em=1000 end - parameters.slant=0 + parameters.slant=0 parameters.space=spaceunits parameters.space_stretch=units/2 parameters.space_shrink=1*units/3 parameters.x_height=2*units/5 parameters.quad=units if spaceunits<2*units/5 then - end - if italicangle then - parameters.italicangle=italicangle - parameters.italicfactor=math.cos(math.rad(90+italicangle)) - parameters.slant=- math.round(math.tan(italicangle*math.pi/180)) - end - if monospaced then - parameters.space_stretch=0 - parameters.space_shrink=0 + end + if italicangle then + parameters.italicangle=italicangle + parameters.italicfactor=math.cos(math.rad(90+italicangle)) + parameters.slant=- math.round(math.tan(italicangle*math.pi/180)) + end + if monospaced then + parameters.space_stretch=0 + parameters.space_shrink=0 elseif syncspace then - parameters.space_stretch=spaceunits/2 - parameters.space_shrink=spaceunits/3 - end + parameters.space_stretch=spaceunits/2 + parameters.space_shrink=spaceunits/3 + end parameters.extra_space=parameters.space_shrink - if charxheight then - parameters.x_height=charxheight - else + if charxheight then + parameters.x_height=charxheight + else local x=0x78 - if x then - local x=descriptions[x] - if x then - parameters.x_height=x.height - end - end - end - parameters.designsize=(designsize/10)*65536 - parameters.ascender=abs(metadata.ascent or 0) - parameters.descender=abs(metadata.descent or 0) + if x then + local x=descriptions[x] + if x then + parameters.x_height=x.height + end + end + end + parameters.designsize=(designsize/10)*65536 + parameters.ascender=abs(metadata.ascent or 0) + parameters.descender=abs(metadata.descent or 0) parameters.units=units - properties.space=spacer - properties.encodingbytes=2 - properties.format=data.format or fonts.formats[filename] or "opentype" - properties.noglyphnames=true - properties.filename=filename - properties.fontname=fontname - properties.fullname=fullname - properties.psname=fontname or fullname + properties.space=spacer + properties.encodingbytes=2 + properties.format=data.format or fonts.formats[filename] or "opentype" + properties.noglyphnames=true + properties.filename=filename + properties.fontname=fontname + properties.fullname=fullname + properties.psname=fontname or fullname properties.name=filename or fullname - return { - characters=characters, - descriptions=descriptions, - parameters=parameters, - mathparameters=mathparameters, - resources=resources, - properties=properties, - goodies=goodies, - } - end -end -local function otftotfm(specification) - local cache_id=specification.hash - local tfmdata=containers.read(constructors.cache,cache_id) - if not tfmdata then - local name=specification.name - local sub=specification.sub - local filename=specification.filename - local format=specification.format - local features=specification.features.normal - local rawdata=otf.load(filename,format,sub,features and features.featurefile) - if rawdata and next(rawdata) then - rawdata.lookuphash={} - tfmdata=copytotfm(rawdata,cache_id) + return { + characters=characters, + descriptions=descriptions, + parameters=parameters, + mathparameters=mathparameters, + resources=resources, + properties=properties, + goodies=goodies, + } + end +end +local function otftotfm(specification) + local cache_id=specification.hash + local tfmdata=containers.read(constructors.cache,cache_id) + if not tfmdata then + local name=specification.name + local sub=specification.sub + local filename=specification.filename + local format=specification.format + local features=specification.features.normal + local rawdata=otf.load(filename,format,sub,features and features.featurefile) + if rawdata and next(rawdata) then + rawdata.lookuphash={} + tfmdata=copytotfm(rawdata,cache_id) if tfmdata and next(tfmdata) then - local features=constructors.checkedfeatures("otf",features) - local shared=tfmdata.shared - if not shared then - shared={} - tfmdata.shared=shared - end + local features=constructors.checkedfeatures("otf",features) + local shared=tfmdata.shared + if not shared then + shared={} + tfmdata.shared=shared + end shared.rawdata=rawdata shared.dynamics={} - tfmdata.changed={} - shared.features=features - shared.processes=otf.setfeatures(tfmdata,features) - end - end - containers.write(constructors.cache,cache_id,tfmdata) - end - return tfmdata -end -local function read_from_otf(specification) - local tfmdata=otftotfm(specification) + tfmdata.changed={} + shared.features=features + shared.processes=otf.setfeatures(tfmdata,features) + end + end + containers.write(constructors.cache,cache_id,tfmdata) + end + return tfmdata +end +local function read_from_otf(specification) + local tfmdata=otftotfm(specification) if tfmdata then - tfmdata.properties.name=specification.name + tfmdata.properties.name=specification.name tfmdata.properties.sub=specification.sub - tfmdata=constructors.scale(tfmdata,specification) - local allfeatures=tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf) + tfmdata=constructors.scale(tfmdata,specification) + local allfeatures=tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf) constructors.setname(tfmdata,specification) - fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification) - end - return tfmdata + fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification) + end + return tfmdata end -local function checkmathsize(tfmdata,mathsize) - local mathdata=tfmdata.shared.rawdata.metadata.math - local mathsize=tonumber(mathsize) +local function checkmathsize(tfmdata,mathsize) + local mathdata=tfmdata.shared.rawdata.metadata.math + local mathsize=tonumber(mathsize) if mathdata then - local parameters=tfmdata.parameters - parameters.scriptpercentage=mathdata.ScriptPercentScaleDown - parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown - parameters.mathsize=mathsize - end -end -registerotffeature { - name="mathsize", - description="apply mathsize specified in the font", - initializers={ - base=checkmathsize, - node=checkmathsize, - } + local parameters=tfmdata.parameters + parameters.scriptpercentage=mathdata.ScriptPercentScaleDown + parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown + parameters.mathsize=mathsize + end +end +registerotffeature { + name="mathsize", + description="apply mathsize specified in the font", + initializers={ + base=checkmathsize, + node=checkmathsize, + } } -function otf.collectlookups(rawdata,kind,script,language) - local sequences=rawdata.resources.sequences - if sequences then - local featuremap,featurelist={},{} - for s=1,#sequences do - local sequence=sequences[s] - local features=sequence.features - features=features and features[kind] - features=features and (features[script] or features[default] or features[wildcard]) - features=features and (features[language] or features[default] or features[wildcard]) - if features then - local subtables=sequence.subtables - if subtables then - for s=1,#subtables do - local ss=subtables[s] - if not featuremap[s] then - featuremap[ss]=true - featurelist[#featurelist+1]=ss - end - end - end - end - end - if #featurelist>0 then - return featuremap,featurelist - end - end - return nil,nil -end -local function check_otf(forced,specification,suffix,what) - local name=specification.name - if forced then - name=file.addsuffix(name,suffix,true) - end - local fullname=findbinfile(name,suffix) or "" - if fullname=="" then - fullname=fonts.names.getfilename(name,suffix) or "" - end - if fullname~="" then - specification.filename=fullname - specification.format=what - return read_from_otf(specification) - end -end -local function opentypereader(specification,suffix,what) - local forced=specification.forced or "" - if forced=="otf" then - return check_otf(true,specification,forced,"opentype") - elseif forced=="ttf" or forced=="ttc" or forced=="dfont" then - return check_otf(true,specification,forced,"truetype") - else - return check_otf(false,specification,suffix,what) - end +function otf.collectlookups(rawdata,kind,script,language) + local sequences=rawdata.resources.sequences + if sequences then + local featuremap,featurelist={},{} + for s=1,#sequences do + local sequence=sequences[s] + local features=sequence.features + features=features and features[kind] + features=features and (features[script] or features[default] or features[wildcard]) + features=features and (features[language] or features[default] or features[wildcard]) + if features then + local subtables=sequence.subtables + if subtables then + for s=1,#subtables do + local ss=subtables[s] + if not featuremap[s] then + featuremap[ss]=true + featurelist[#featurelist+1]=ss + end + end + end + end + end + if #featurelist>0 then + return featuremap,featurelist + end + end + return nil,nil +end +local function check_otf(forced,specification,suffix,what) + local name=specification.name + if forced then + name=file.addsuffix(name,suffix,true) + end + local fullname=findbinfile(name,suffix) or "" + if fullname=="" then + fullname=fonts.names.getfilename(name,suffix) or "" + end + if fullname~="" then + specification.filename=fullname + specification.format=what + return read_from_otf(specification) + end +end +local function opentypereader(specification,suffix,what) + local forced=specification.forced or "" + if forced=="otf" then + return check_otf(true,specification,forced,"opentype") + elseif forced=="ttf" or forced=="ttc" or forced=="dfont" then + return check_otf(true,specification,forced,"truetype") + else + return check_otf(false,specification,suffix,what) + end end readers.opentype=opentypereader local formats=fonts.formats -formats.otf="opentype" -formats.ttf="truetype" -formats.ttc="truetype" +formats.otf="opentype" +formats.ttf="truetype" +formats.ttc="truetype" formats.dfont="truetype" -function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end -function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end -function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end +function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end +function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end +function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end -function otf.scriptandlanguage(tfmdata,attr) - local properties=tfmdata.properties - return properties.script or "dflt",properties.language or "dflt" -end +function otf.scriptandlanguage(tfmdata,attr) + local properties=tfmdata.properties + return properties.script or "dflt",properties.language or "dflt" +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['font-otb']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local concat=table.concat -local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip -local type,next,tonumber,tostring=type,next,tonumber,tostring -local lpegmatch=lpeg.match +if not modules then modules={} end modules ['font-otb']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local concat=table.concat +local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip +local type,next,tonumber,tostring=type,next,tonumber,tostring +local lpegmatch=lpeg.match local utfchar=utf.char -local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end) -local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end) -local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end) -local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end) -local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end) -local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end) -local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end) +local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end) +local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end) +local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end) +local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end) +local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end) +local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end) +local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end) local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end) local report_prepare=logs.reporter("fonts","otf prepare") -local fonts=fonts +local fonts=fonts local otf=fonts.handlers.otf -local otffeatures=otf.features +local otffeatures=otf.features local registerotffeature=otffeatures.register otf.defaultbasealternate="none" -local wildcard="*" +local wildcard="*" local default="dflt" -local formatters=string.formatters -local f_unicode=formatters["%U"] -local f_uniname=formatters["%U (%s)"] +local formatters=string.formatters +local f_unicode=formatters["%U"] +local f_uniname=formatters["%U (%s)"] local f_unilist=formatters["% t (% t)"] -local function gref(descriptions,n) - if type(n)=="number" then - local name=descriptions[n].name - if name then - return f_uniname(n,name) - else - return f_unicode(n) - end - elseif n then - local num,nam={},{} - for i=2,#n do - local ni=n[i] +local function gref(descriptions,n) + if type(n)=="number" then + local name=descriptions[n].name + if name then + return f_uniname(n,name) + else + return f_unicode(n) + end + elseif n then + local num,nam={},{} + for i=2,#n do + local ni=n[i] if tonumber(ni) then - local di=descriptions[ni] - num[i]=f_unicode(ni) - nam[i]=di and di.name or "-" - end - end - return f_unilist(num,nam) - else - return "" - end -end -local function cref(feature,lookupname) - if lookupname then - return formatters["feature %a, lookup %a"](feature,lookupname) - else - return formatters["feature %a"](feature) - end -end -local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment) - report_prepare("%s: base alternate %s => %s (%S => %S)", - cref(feature,lookupname), - gref(descriptions,unicode), - replacement and gref(descriptions,replacement), - value, - comment) -end -local function report_substitution(feature,lookupname,descriptions,unicode,substitution) - report_prepare("%s: base substitution %s => %S", - cref(feature,lookupname), - gref(descriptions,unicode), - gref(descriptions,substitution)) -end -local function report_ligature(feature,lookupname,descriptions,unicode,ligature) - report_prepare("%s: base ligature %s => %S", - cref(feature,lookupname), - gref(descriptions,ligature), - gref(descriptions,unicode)) -end -local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value) - report_prepare("%s: base kern %s + %s => %S", - cref(feature,lookupname), - gref(descriptions,unicode), - gref(descriptions,otherunicode), - value) -end -local basemethods={} + local di=descriptions[ni] + num[i]=f_unicode(ni) + nam[i]=di and di.name or "-" + end + end + return f_unilist(num,nam) + else + return "" + end +end +local function cref(feature,lookupname) + if lookupname then + return formatters["feature %a, lookup %a"](feature,lookupname) + else + return formatters["feature %a"](feature) + end +end +local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment) + report_prepare("%s: base alternate %s => %s (%S => %S)", + cref(feature,lookupname), + gref(descriptions,unicode), + replacement and gref(descriptions,replacement), + value, + comment) +end +local function report_substitution(feature,lookupname,descriptions,unicode,substitution) + report_prepare("%s: base substitution %s => %S", + cref(feature,lookupname), + gref(descriptions,unicode), + gref(descriptions,substitution)) +end +local function report_ligature(feature,lookupname,descriptions,unicode,ligature) + report_prepare("%s: base ligature %s => %S", + cref(feature,lookupname), + gref(descriptions,ligature), + gref(descriptions,unicode)) +end +local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value) + report_prepare("%s: base kern %s + %s => %S", + cref(feature,lookupname), + gref(descriptions,unicode), + gref(descriptions,otherunicode), + value) +end +local basemethods={} local basemethod="" -local function applybasemethod(what,...) - local m=basemethods[basemethod][what] - if m then - return m(...) - end +local function applybasemethod(what,...) + local m=basemethods[basemethod][what] + if m then + return m(...) + end end local basehash,basehashes,applied={},1,{} -local function registerbasehash(tfmdata) - local properties=tfmdata.properties - local hash=concat(applied," ") - local base=basehash[hash] - if not base then - basehashes=basehashes+1 - base=basehashes - basehash[hash]=base - end - properties.basehash=base +local function registerbasehash(tfmdata) + local properties=tfmdata.properties + local hash=concat(applied," ") + local base=basehash[hash] + if not base then + basehashes=basehashes+1 + base=basehashes + basehash[hash]=base + end + properties.basehash=base properties.fullname=properties.fullname.."-"..base - applied={} + applied={} end -local function registerbasefeature(feature,value) - applied[#applied+1]=feature.."="..tostring(value) +local function registerbasefeature(feature,value) + applied[#applied+1]=feature.."="..tostring(value) end local trace=false -local function finalize_ligatures(tfmdata,ligatures) - local nofligatures=#ligatures - if nofligatures>0 then - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local unicodes=resources.unicodes - local private=resources.private - local alldone=false - while not alldone do - local done=0 - for i=1,nofligatures do - local ligature=ligatures[i] - if ligature then - local unicode,lookupdata=ligature[1],ligature[2] - if trace then - trace_ligatures_detail("building % a into %a",lookupdata,unicode) - end - local size=#lookupdata +local function finalize_ligatures(tfmdata,ligatures) + local nofligatures=#ligatures + if nofligatures>0 then + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local unicodes=resources.unicodes + local private=resources.private + local alldone=false + while not alldone do + local done=0 + for i=1,nofligatures do + local ligature=ligatures[i] + if ligature then + local unicode,lookupdata=ligature[1],ligature[2] + if trace then + trace_ligatures_detail("building % a into %a",lookupdata,unicode) + end + local size=#lookupdata local firstcode=lookupdata[1] - local firstdata=characters[firstcode] - local okay=false - if firstdata then - local firstname="ctx_"..firstcode + local firstdata=characters[firstcode] + local okay=false + if firstdata then + local firstname="ctx_"..firstcode for i=1,size-1 do - local firstdata=characters[firstcode] - if not firstdata then - firstcode=private - if trace then - trace_ligatures_detail("defining %a as %a",firstname,firstcode) - end - unicodes[firstname]=firstcode - firstdata={ intermediate=true,ligatures={} } - characters[firstcode]=firstdata - descriptions[firstcode]={ name=firstname } - private=private+1 - end - local target - local secondcode=lookupdata[i+1] - local secondname=firstname.."_"..secondcode - if i==size-1 then - target=unicode - if not unicodes[secondname] then + local firstdata=characters[firstcode] + if not firstdata then + firstcode=private + if trace then + trace_ligatures_detail("defining %a as %a",firstname,firstcode) + end + unicodes[firstname]=firstcode + firstdata={ intermediate=true,ligatures={} } + characters[firstcode]=firstdata + descriptions[firstcode]={ name=firstname } + private=private+1 + end + local target + local secondcode=lookupdata[i+1] + local secondname=firstname.."_"..secondcode + if i==size-1 then + target=unicode + if not unicodes[secondname] then unicodes[secondname]=unicode - end - okay=true - else - target=unicodes[secondname] - if not target then - break - end - end - if trace then - trace_ligatures_detail("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target) - end - local firstligs=firstdata.ligatures - if firstligs then - firstligs[secondcode]={ char=target } - else - firstdata.ligatures={ [secondcode]={ char=target } } - end - firstcode=target - firstname=secondname - end - end - if okay then - ligatures[i]=false - done=done+1 - end - end - end - alldone=done==0 - end - if trace then - for k,v in next,characters do - if v.ligatures then table.print(v,k) end - end - end - tfmdata.resources.private=private - end -end -local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local changed=tfmdata.changed - local unicodes=resources.unicodes - local lookuphash=resources.lookuphash + end + okay=true + else + target=unicodes[secondname] + if not target then + break + end + end + if trace then + trace_ligatures_detail("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target) + end + local firstligs=firstdata.ligatures + if firstligs then + firstligs[secondcode]={ char=target } + else + firstdata.ligatures={ [secondcode]={ char=target } } + end + firstcode=target + firstname=secondname + end + end + if okay then + ligatures[i]=false + done=done+1 + end + end + end + alldone=done==0 + end + if trace then + for k,v in next,characters do + if v.ligatures then table.print(v,k) end + end + end + tfmdata.resources.private=private + end +end +local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local changed=tfmdata.changed + local unicodes=resources.unicodes + local lookuphash=resources.lookuphash local lookuptypes=resources.lookuptypes - local ligatures={} - local alternate=tonumber(value) + local ligatures={} + local alternate=tonumber(value) local defaultalt=otf.defaultbasealternate - local trace_singles=trace_baseinit and trace_singles - local trace_alternatives=trace_baseinit and trace_alternatives + local trace_singles=trace_baseinit and trace_singles + local trace_alternatives=trace_baseinit and trace_alternatives local trace_ligatures=trace_baseinit and trace_ligatures - local actions={ - substitution=function(lookupdata,lookupname,description,unicode) - if trace_singles then - report_substitution(feature,lookupname,descriptions,unicode,lookupdata) - end - changed[unicode]=lookupdata - end, - alternate=function(lookupdata,lookupname,description,unicode) - local replacement=lookupdata[alternate] - if replacement then - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal") - end - elseif defaultalt=="first" then - replacement=lookupdata[1] - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - elseif defaultalt=="last" then - replacement=lookupdata[#data] - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - else - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown") - end - end - end, - ligature=function(lookupdata,lookupname,description,unicode) - if trace_ligatures then - report_ligature(feature,lookupname,descriptions,unicode,lookupdata) - end - ligatures[#ligatures+1]={ unicode,lookupdata } - end, + local actions={ + substitution=function(lookupdata,lookupname,description,unicode) + if trace_singles then + report_substitution(feature,lookupname,descriptions,unicode,lookupdata) + end + changed[unicode]=lookupdata + end, + alternate=function(lookupdata,lookupname,description,unicode) + local replacement=lookupdata[alternate] + if replacement then + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal") + end + elseif defaultalt=="first" then + replacement=lookupdata[1] + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + elseif defaultalt=="last" then + replacement=lookupdata[#data] + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + else + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown") + end + end + end, + ligature=function(lookupdata,lookupname,description,unicode) + if trace_ligatures then + report_ligature(feature,lookupname,descriptions,unicode,lookupdata) + end + ligatures[#ligatures+1]={ unicode,lookupdata } + end, } - for unicode,character in next,characters do - local description=descriptions[unicode] - local lookups=description.slookups - if lookups then - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookupdata=lookups[lookupname] - if lookupdata then - local lookuptype=lookuptypes[lookupname] - local action=actions[lookuptype] - if action then - action(lookupdata,lookupname,description,unicode) - end - end - end - end - local lookups=description.mlookups - if lookups then - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookuplist=lookups[lookupname] - if lookuplist then - local lookuptype=lookuptypes[lookupname] - local action=actions[lookuptype] - if action then - for i=1,#lookuplist do - action(lookuplist[i],lookupname,description,unicode) - end - end - end - end - end - end - finalize_ligatures(tfmdata,ligatures) + for unicode,character in next,characters do + local description=descriptions[unicode] + local lookups=description.slookups + if lookups then + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookupdata=lookups[lookupname] + if lookupdata then + local lookuptype=lookuptypes[lookupname] + local action=actions[lookuptype] + if action then + action(lookupdata,lookupname,description,unicode) + end + end + end + end + local lookups=description.mlookups + if lookups then + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookuplist=lookups[lookupname] + if lookuplist then + local lookuptype=lookuptypes[lookupname] + local action=actions[lookuptype] + if action then + for i=1,#lookuplist do + action(lookuplist[i],lookupname,description,unicode) + end + end + end + end + end + end + finalize_ligatures(tfmdata,ligatures) end local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local unicodes=resources.unicodes - local sharedkerns={} - local traceindeed=trace_baseinit and trace_kerns - for unicode,character in next,characters do - local description=descriptions[unicode] + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local unicodes=resources.unicodes + local sharedkerns={} + local traceindeed=trace_baseinit and trace_kerns + for unicode,character in next,characters do + local description=descriptions[unicode] local rawkerns=description.kerns - if rawkerns then - local s=sharedkerns[rawkerns] + if rawkerns then + local s=sharedkerns[rawkerns] if s==false then - elseif s then - character.kerns=s - else - local newkerns=character.kerns - local done=false - for l=1,#lookuplist do - local lookup=lookuplist[l] - local kerns=rawkerns[lookup] - if kerns then - for otherunicode,value in next,kerns do + elseif s then + character.kerns=s + else + local newkerns=character.kerns + local done=false + for l=1,#lookuplist do + local lookup=lookuplist[l] + local kerns=rawkerns[lookup] + if kerns then + for otherunicode,value in next,kerns do if value==0 then - elseif not newkerns then - newkerns={ [otherunicode]=value } - done=true - if traceindeed then - report_kern(feature,lookup,descriptions,unicode,otherunicode,value) - end + elseif not newkerns then + newkerns={ [otherunicode]=value } + done=true + if traceindeed then + report_kern(feature,lookup,descriptions,unicode,otherunicode,value) + end elseif not newkerns[otherunicode] then - newkerns[otherunicode]=value - done=true - if traceindeed then - report_kern(feature,lookup,descriptions,unicode,otherunicode,value) - end - end - end - end - end - if done then - sharedkerns[rawkerns]=newkerns + newkerns[otherunicode]=value + done=true + if traceindeed then + report_kern(feature,lookup,descriptions,unicode,otherunicode,value) + end + end + end + end + end + if done then + sharedkerns[rawkerns]=newkerns character.kerns=newkerns - else - sharedkerns[rawkerns]=false - end - end - end - end -end -basemethods.independent={ - preparesubstitutions=preparesubstitutions, - preparepositionings=preparepositionings, + else + sharedkerns[rawkerns]=false + end + end + end + end +end +basemethods.independent={ + preparesubstitutions=preparesubstitutions, + preparepositionings=preparepositionings, } -local function makefake(tfmdata,name,present) - local resources=tfmdata.resources - local private=resources.private - local character={ intermediate=true,ligatures={} } - resources.unicodes[name]=private - tfmdata.characters[private]=character - tfmdata.descriptions[private]={ name=name } - resources.private=private+1 - present[name]=private - return character -end -local function make_1(present,tree,name) - for k,v in next,tree do - if k=="ligature" then - present[name]=v - else - make_1(present,v,name.."_"..k) - end - end -end -local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname) - for k,v in next,tree do - if k=="ligature" then - local character=characters[preceding] - if not character then - if trace_baseinit then - report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding) - end - character=makefake(tfmdata,name,present) - end - local ligatures=character.ligatures - if ligatures then - ligatures[unicode]={ char=v } - else - character.ligatures={ [unicode]={ char=v } } - end - if done then - local d=done[lookupname] - if not d then - done[lookupname]={ "dummy",v } - else - d[#d+1]=v - end - end - else - local code=present[name] or unicode - local name=name.."_"..k - make_2(present,tfmdata,characters,v,name,code,k,done,lookupname) - end - end -end -local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local changed=tfmdata.changed - local lookuphash=resources.lookuphash +local function makefake(tfmdata,name,present) + local resources=tfmdata.resources + local private=resources.private + local character={ intermediate=true,ligatures={} } + resources.unicodes[name]=private + tfmdata.characters[private]=character + tfmdata.descriptions[private]={ name=name } + resources.private=private+1 + present[name]=private + return character +end +local function make_1(present,tree,name) + for k,v in next,tree do + if k=="ligature" then + present[name]=v + else + make_1(present,v,name.."_"..k) + end + end +end +local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname) + for k,v in next,tree do + if k=="ligature" then + local character=characters[preceding] + if not character then + if trace_baseinit then + report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding) + end + character=makefake(tfmdata,name,present) + end + local ligatures=character.ligatures + if ligatures then + ligatures[unicode]={ char=v } + else + character.ligatures={ [unicode]={ char=v } } + end + if done then + local d=done[lookupname] + if not d then + done[lookupname]={ "dummy",v } + else + d[#d+1]=v + end + end + else + local code=present[name] or unicode + local name=name.."_"..k + make_2(present,tfmdata,characters,v,name,code,k,done,lookupname) + end + end +end +local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local changed=tfmdata.changed + local lookuphash=resources.lookuphash local lookuptypes=resources.lookuptypes - local ligatures={} - local alternate=tonumber(value) + local ligatures={} + local alternate=tonumber(value) local defaultalt=otf.defaultbasealternate - local trace_singles=trace_baseinit and trace_singles - local trace_alternatives=trace_baseinit and trace_alternatives + local trace_singles=trace_baseinit and trace_singles + local trace_alternatives=trace_baseinit and trace_alternatives local trace_ligatures=trace_baseinit and trace_ligatures - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookupdata=lookuphash[lookupname] - local lookuptype=lookuptypes[lookupname] - for unicode,data in next,lookupdata do - if lookuptype=="substitution" then - if trace_singles then - report_substitution(feature,lookupname,descriptions,unicode,data) - end - changed[unicode]=data - elseif lookuptype=="alternate" then - local replacement=data[alternate] - if replacement then - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal") - end - elseif defaultalt=="first" then - replacement=data[1] - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - elseif defaultalt=="last" then - replacement=data[#data] - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - else - if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown") - end - end - elseif lookuptype=="ligature" then - ligatures[#ligatures+1]={ unicode,data,lookupname } - if trace_ligatures then - report_ligature(feature,lookupname,descriptions,unicode,data) - end - end - end + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookupdata=lookuphash[lookupname] + local lookuptype=lookuptypes[lookupname] + for unicode,data in next,lookupdata do + if lookuptype=="substitution" then + if trace_singles then + report_substitution(feature,lookupname,descriptions,unicode,data) + end + changed[unicode]=data + elseif lookuptype=="alternate" then + local replacement=data[alternate] + if replacement then + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal") + end + elseif defaultalt=="first" then + replacement=data[1] + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + elseif defaultalt=="last" then + replacement=data[#data] + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + else + if trace_alternatives then + report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown") + end + end + elseif lookuptype=="ligature" then + ligatures[#ligatures+1]={ unicode,data,lookupname } + if trace_ligatures then + report_ligature(feature,lookupname,descriptions,unicode,data) + end + end + end end local nofligatures=#ligatures if nofligatures>0 then - local characters=tfmdata.characters - local present={} + local characters=tfmdata.characters + local present={} local done=trace_baseinit and trace_ligatures and {} - for i=1,nofligatures do - local ligature=ligatures[i] - local unicode,tree=ligature[1],ligature[2] - make_1(present,tree,"ctx_"..unicode) + for i=1,nofligatures do + local ligature=ligatures[i] + local unicode,tree=ligature[1],ligature[2] + make_1(present,tree,"ctx_"..unicode) end - for i=1,nofligatures do - local ligature=ligatures[i] - local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3] - make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname) + for i=1,nofligatures do + local ligature=ligatures[i] + local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3] + make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname) end end end -local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local lookuphash=resources.lookuphash +local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local lookuphash=resources.lookuphash local traceindeed=trace_baseinit and trace_kerns - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookupdata=lookuphash[lookupname] - for unicode,data in next,lookupdata do - local character=characters[unicode] - local kerns=character.kerns - if not kerns then - kerns={} - character.kerns=kerns - end - if traceindeed then - for otherunicode,kern in next,data do - if not kerns[otherunicode] and kern~=0 then - kerns[otherunicode]=kern - report_kern(feature,lookup,descriptions,unicode,otherunicode,kern) - end - end - else - for otherunicode,kern in next,data do - if not kerns[otherunicode] and kern~=0 then - kerns[otherunicode]=kern - end - end - end - end - end -end -local function initializehashes(tfmdata) - nodeinitializers.features(tfmdata) -end -basemethods.shared={ - initializehashes=initializehashes, - preparesubstitutions=preparesubstitutions, - preparepositionings=preparepositionings, + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookupdata=lookuphash[lookupname] + for unicode,data in next,lookupdata do + local character=characters[unicode] + local kerns=character.kerns + if not kerns then + kerns={} + character.kerns=kerns + end + if traceindeed then + for otherunicode,kern in next,data do + if not kerns[otherunicode] and kern~=0 then + kerns[otherunicode]=kern + report_kern(feature,lookup,descriptions,unicode,otherunicode,kern) + end + end + else + for otherunicode,kern in next,data do + if not kerns[otherunicode] and kern~=0 then + kerns[otherunicode]=kern + end + end + end + end + end +end +local function initializehashes(tfmdata) + nodeinitializers.features(tfmdata) +end +basemethods.shared={ + initializehashes=initializehashes, + preparesubstitutions=preparesubstitutions, + preparepositionings=preparepositionings, } basemethod="independent" -local function featuresinitializer(tfmdata,value) +local function featuresinitializer(tfmdata,value) if true then - local t=trace_preparing and os.clock() - local features=tfmdata.shared.features - if features then - applybasemethod("initializehashes",tfmdata) - local collectlookups=otf.collectlookups - local rawdata=tfmdata.shared.rawdata - local properties=tfmdata.properties - local script=properties.script - local language=properties.language - local basesubstitutions=rawdata.resources.features.gsub - local basepositionings=rawdata.resources.features.gpos - if basesubstitutions then - for feature,data in next,basesubstitutions do - local value=features[feature] - if value then - local validlookups,lookuplist=collectlookups(rawdata,feature,script,language) - if validlookups then - applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist) - registerbasefeature(feature,value) - end - end - end - end - if basepositionings then - for feature,data in next,basepositionings do - local value=features[feature] - if value then - local validlookups,lookuplist=collectlookups(rawdata,feature,script,language) - if validlookups then - applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist) - registerbasefeature(feature,value) - end - end - end - end - registerbasehash(tfmdata) - end - if trace_preparing then - report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname) - end - end -end -registerotffeature { - name="features", - description="features", - default=true, + local t=trace_preparing and os.clock() + local features=tfmdata.shared.features + if features then + applybasemethod("initializehashes",tfmdata) + local collectlookups=otf.collectlookups + local rawdata=tfmdata.shared.rawdata + local properties=tfmdata.properties + local script=properties.script + local language=properties.language + local basesubstitutions=rawdata.resources.features.gsub + local basepositionings=rawdata.resources.features.gpos + if basesubstitutions then + for feature,data in next,basesubstitutions do + local value=features[feature] + if value then + local validlookups,lookuplist=collectlookups(rawdata,feature,script,language) + if validlookups then + applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist) + registerbasefeature(feature,value) + end + end + end + end + if basepositionings then + for feature,data in next,basepositionings do + local value=features[feature] + if value then + local validlookups,lookuplist=collectlookups(rawdata,feature,script,language) + if validlookups then + applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist) + registerbasefeature(feature,value) + end + end + end + end + registerbasehash(tfmdata) + end + if trace_preparing then + report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname) + end + end +end +registerotffeature { + name="features", + description="features", + default=true, initializers={ - base=featuresinitializer, - } + base=featuresinitializer, + } } -directives.register("fonts.otf.loader.basemethod",function(v) - if basemethods[v] then - basemethod=v - end -end) +directives.register("fonts.otf.loader.basemethod",function(v) + if basemethods[v] then + basemethod=v + end +end) end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['node-inj']={ - version=1.001, - comment="companion to node-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files", +if not modules then modules={} end modules ['node-inj']={ + version=1.001, + comment="companion to node-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files", } -local next=next +local next=next local utfchar=utf.char local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end) local report_injections=logs.reporter("nodes","injections") local attributes,nodes,node=attributes,nodes,node -fonts=fonts +fonts=fonts local fontdata=fonts.hashes.identifiers -nodes.injections=nodes.injections or {} +nodes.injections=nodes.injections or {} local injections=nodes.injections -local nodecodes=nodes.nodecodes -local glyph_code=nodecodes.glyph -local kern_code=nodecodes.kern -local nodepool=nodes.pool +local nodecodes=nodes.nodecodes +local glyph_code=nodecodes.glyph +local kern_code=nodecodes.kern +local nodepool=nodes.pool local newkern=nodepool.kern -local traverse_id=node.traverse_id -local insert_node_before=node.insert_before +local traverse_id=node.traverse_id +local insert_node_before=node.insert_before local insert_node_after=node.insert_after -local a_kernpair=attributes.private('kernpair') -local a_ligacomp=attributes.private('ligacomp') -local a_markbase=attributes.private('markbase') -local a_markmark=attributes.private('markmark') -local a_markdone=attributes.private('markdone') -local a_cursbase=attributes.private('cursbase') -local a_curscurs=attributes.private('curscurs') +local a_kernpair=attributes.private('kernpair') +local a_ligacomp=attributes.private('ligacomp') +local a_markbase=attributes.private('markbase') +local a_markmark=attributes.private('markmark') +local a_markdone=attributes.private('markdone') +local a_cursbase=attributes.private('cursbase') +local a_curscurs=attributes.private('curscurs') local a_cursdone=attributes.private('cursdone') -function injections.installnewkern(nk) - newkern=nk or newkern +function injections.installnewkern(nk) + newkern=nk or newkern end -local cursives={} -local marks={} +local cursives={} +local marks={} local kerns={} -function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) - local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2]) - local ws,wn=tfmstart.width,tfmnext.width - local bound=#cursives+1 - start[a_cursbase]=bound - nxt[a_curscurs]=bound - cursives[bound]={ rlmode,dx,dy,ws,wn } - return dx,dy,bound -end -function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) +function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) + local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2]) + local ws,wn=tfmstart.width,tfmnext.width + local bound=#cursives+1 + start[a_cursbase]=bound + nxt[a_curscurs]=bound + cursives[bound]={ rlmode,dx,dy,ws,wn } + return dx,dy,bound +end +function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4] - if x~=0 or w~=0 or y~=0 or h~=0 then - local bound=current[a_kernpair] - if bound then + if x~=0 or w~=0 or y~=0 or h~=0 then + local bound=current[a_kernpair] + if bound then local kb=kerns[bound] - kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h - else - bound=#kerns+1 - current[a_kernpair]=bound - kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width } - end - return x,y,w,h,bound - end + kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h + else + bound=#kerns+1 + current[a_kernpair]=bound + kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width } + end + return x,y,w,h,bound + end return x,y,w,h end -function injections.setkern(current,factor,rlmode,x,tfmchr) - local dx=factor*x - if dx~=0 then - local bound=#kerns+1 - current[a_kernpair]=bound - kerns[bound]={ rlmode,dx } - return dx,bound - else - return 0,0 - end +function injections.setkern(current,factor,rlmode,x,tfmchr) + local dx=factor*x + if dx~=0 then + local bound=#kerns+1 + current[a_kernpair]=bound + kerns[bound]={ rlmode,dx } + return dx,bound + else + return 0,0 + end end function injections.setmark(start,base,factor,rlmode,ba,ma,index) local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2]) local bound=base[a_markbase] - local index=1 - if bound then - local mb=marks[bound] + local index=1 + if bound then + local mb=marks[bound] if mb then - index=#mb+1 - mb[index]={ dx,dy,rlmode } - start[a_markmark]=bound - start[a_markdone]=index - return dx,dy,bound - else - report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound) - end - end - index=index or 1 - bound=#marks+1 - base[a_markbase]=bound - start[a_markmark]=bound - start[a_markdone]=index - marks[bound]={ [index]={ dx,dy,rlmode } } - return dx,dy,bound -end -local function dir(n) - return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" -end -local function trace(head) - report_injections("begin run") - for n in traverse_id(glyph_code,head) do - if n.subtype<256 then - local kp=n[a_kernpair] - local mb=n[a_markbase] - local mm=n[a_markmark] - local md=n[a_markdone] - local cb=n[a_cursbase] - local cc=n[a_curscurs] - local char=n.char - report_injections("font %s, char %U, glyph %c",n.font,char,char) - if kp then - local k=kerns[kp] - if k[3] then - report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) - else - report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) - end - end - if mb then - report_injections(" markbase: bound %a",mb) - end - if mm then - local m=marks[mm] - if mb then - local m=m[mb] - if m then - report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) - else - report_injections(" markmark: bound %a, missing index",mm) - end - else - m=m[1] - report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) - end - end - if cb then - report_injections(" cursbase: bound %a",cb) - end - if cc then - local c=cursives[cc] - report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) - end - end - end - report_injections("end run") -end -local function show_result(head) - local current=head - local skipping=false - while current do - local id=current.id - if id==glyph_code then - report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset) - skipping=false - elseif id==kern_code then - report_injections("kern: %p",current.kern) - skipping=false - elseif not skipping then - report_injections() - skipping=true - end - current=current.next - end -end -function injections.handler(head,where,keep) - local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns) - if has_marks or has_cursives then - if trace_injections then - trace(head) - end - local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0 + index=#mb+1 + mb[index]={ dx,dy,rlmode } + start[a_markmark]=bound + start[a_markdone]=index + return dx,dy,bound + else + report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound) + end + end + index=index or 1 + bound=#marks+1 + base[a_markbase]=bound + start[a_markmark]=bound + start[a_markdone]=index + marks[bound]={ [index]={ dx,dy,rlmode } } + return dx,dy,bound +end +local function dir(n) + return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" +end +local function trace(head) + report_injections("begin run") + for n in traverse_id(glyph_code,head) do + if n.subtype<256 then + local kp=n[a_kernpair] + local mb=n[a_markbase] + local mm=n[a_markmark] + local md=n[a_markdone] + local cb=n[a_cursbase] + local cc=n[a_curscurs] + local char=n.char + report_injections("font %s, char %U, glyph %c",n.font,char,char) + if kp then + local k=kerns[kp] + if k[3] then + report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) + else + report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) + end + end + if mb then + report_injections(" markbase: bound %a",mb) + end + if mm then + local m=marks[mm] + if mb then + local m=m[mb] + if m then + report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) + else + report_injections(" markmark: bound %a, missing index",mm) + end + else + m=m[1] + report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) + end + end + if cb then + report_injections(" cursbase: bound %a",cb) + end + if cc then + local c=cursives[cc] + report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) + end + end + end + report_injections("end run") +end +local function show_result(head) + local current=head + local skipping=false + while current do + local id=current.id + if id==glyph_code then + report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset) + skipping=false + elseif id==kern_code then + report_injections("kern: %p",current.kern) + skipping=false + elseif not skipping then + report_injections() + skipping=true + end + current=current.next + end +end +function injections.handler(head,where,keep) + local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns) + if has_marks or has_cursives then + if trace_injections then + trace(head) + end + local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0 if has_kerns then - local nf,tm=nil,nil + local nf,tm=nil,nil for n in traverse_id(glyph_code,head) do - if n.subtype<256 then - nofvalid=nofvalid+1 - valid[nofvalid]=n - if n.font~=nf then - nf=n.font - tm=fontdata[nf].resources.marks - end - if tm then - mk[n]=tm[n.char] - end - local k=n[a_kernpair] - if k then - local kk=kerns[k] - if kk then - local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0 - local dy=y-h - if dy~=0 then - ky[n]=dy - end - if w~=0 or x~=0 then - wx[n]=kk - end + if n.subtype<256 then + nofvalid=nofvalid+1 + valid[nofvalid]=n + if n.font~=nf then + nf=n.font + tm=fontdata[nf].resources.marks + end + if tm then + mk[n]=tm[n.char] + end + local k=n[a_kernpair] + if k then + local kk=kerns[k] + if kk then + local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0 + local dy=y-h + if dy~=0 then + ky[n]=dy + end + if w~=0 or x~=0 then + wx[n]=kk + end rl[n]=kk[1] - end - end - end - end - else - local nf,tm=nil,nil - for n in traverse_id(glyph_code,head) do - if n.subtype<256 then - nofvalid=nofvalid+1 - valid[nofvalid]=n - if n.font~=nf then - nf=n.font - tm=fontdata[nf].resources.marks - end - if tm then - mk[n]=tm[n.char] - end - end - end - end + end + end + end + end + else + local nf,tm=nil,nil + for n in traverse_id(glyph_code,head) do + if n.subtype<256 then + nofvalid=nofvalid+1 + valid[nofvalid]=n + if n.font~=nf then + nf=n.font + tm=fontdata[nf].resources.marks + end + if tm then + mk[n]=tm[n.char] + end + end + end + end if nofvalid>0 then - local cx={} - if has_kerns and next(ky) then - for n,k in next,ky do - n.yoffset=k - end + local cx={} + if has_kerns and next(ky) then + for n,k in next,ky do + n.yoffset=k + end end - if has_cursives then + if has_cursives then local p_cursbase,p=nil,nil - local t,d,maxt={},{},0 + local t,d,maxt={},{},0 for i=1,nofvalid do - local n=valid[i] - if not mk[n] then - local n_cursbase=n[a_cursbase] - if p_cursbase then - local n_curscurs=n[a_curscurs] - if p_cursbase==n_curscurs then - local c=cursives[n_curscurs] - if c then - local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5] - if rlmode>=0 then - dx=dx-ws - else - dx=dx+wn - end - if dx~=0 then - cx[n]=dx - rl[n]=rlmode + local n=valid[i] + if not mk[n] then + local n_cursbase=n[a_cursbase] + if p_cursbase then + local n_curscurs=n[a_curscurs] + if p_cursbase==n_curscurs then + local c=cursives[n_curscurs] + if c then + local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5] + if rlmode>=0 then + dx=dx-ws + else + dx=dx+wn + end + if dx~=0 then + cx[n]=dx + rl[n]=rlmode end dy=-dy - maxt=maxt+1 - t[maxt]=p - d[maxt]=dy - else - maxt=0 - end - end - elseif maxt>0 then - local ny=n.yoffset - for i=maxt,1,-1 do - ny=ny+d[i] - local ti=t[i] - ti.yoffset=ti.yoffset+ny - end - maxt=0 - end - if not n_cursbase and maxt>0 then - local ny=n.yoffset - for i=maxt,1,-1 do - ny=ny+d[i] - local ti=t[i] - ti.yoffset=ny - end - maxt=0 - end - p_cursbase,p=n_cursbase,n - end - end - if maxt>0 then - local ny=n.yoffset - for i=maxt,1,-1 do - ny=ny+d[i] - local ti=t[i] - ti.yoffset=ny - end - maxt=0 - end - if not keep then - cursives={} - end - end - if has_marks then - for i=1,nofvalid do - local p=valid[i] - local p_markbase=p[a_markbase] - if p_markbase then - local mrks=marks[p_markbase] - local nofmarks=#mrks - for n in traverse_id(glyph_code,p.next) do - local n_markmark=n[a_markmark] - if p_markbase==n_markmark then - local index=n[a_markdone] or 1 - local d=mrks[index] - if d then + maxt=maxt+1 + t[maxt]=p + d[maxt]=dy + else + maxt=0 + end + end + elseif maxt>0 then + local ny=n.yoffset + for i=maxt,1,-1 do + ny=ny+d[i] + local ti=t[i] + ti.yoffset=ti.yoffset+ny + end + maxt=0 + end + if not n_cursbase and maxt>0 then + local ny=n.yoffset + for i=maxt,1,-1 do + ny=ny+d[i] + local ti=t[i] + ti.yoffset=ny + end + maxt=0 + end + p_cursbase,p=n_cursbase,n + end + end + if maxt>0 then + local ny=n.yoffset + for i=maxt,1,-1 do + ny=ny+d[i] + local ti=t[i] + ti.yoffset=ny + end + maxt=0 + end + if not keep then + cursives={} + end + end + if has_marks then + for i=1,nofvalid do + local p=valid[i] + local p_markbase=p[a_markbase] + if p_markbase then + local mrks=marks[p_markbase] + local nofmarks=#mrks + for n in traverse_id(glyph_code,p.next) do + local n_markmark=n[a_markmark] + if p_markbase==n_markmark then + local index=n[a_markdone] or 1 + local d=mrks[index] + if d then local rlmode=d[3] - local k=wx[p] - if k then - local x=k[2] - local w=k[4] - if w then + local k=wx[p] + if k then + local x=k[2] + local w=k[4] + if w then if rlmode and rlmode>=0 then - n.xoffset=p.xoffset-p.width+d[1]-(w-x) + n.xoffset=p.xoffset-p.width+d[1]-(w-x) else - n.xoffset=p.xoffset-d[1]-x - end - else + n.xoffset=p.xoffset-d[1]-x + end + else if rlmode and rlmode>=0 then - n.xoffset=p.xoffset-p.width+d[1] + n.xoffset=p.xoffset-p.width+d[1] else - n.xoffset=p.xoffset-d[1]-x - end - end - else - if rlmode and rlmode>=0 then - n.xoffset=p.xoffset-p.width+d[1] - else - n.xoffset=p.xoffset-d[1] - end - end - if mk[p] then - n.yoffset=p.yoffset+d[2] - else - n.yoffset=n.yoffset+p.yoffset+d[2] + n.xoffset=p.xoffset-d[1]-x + end + end + else + if rlmode and rlmode>=0 then + n.xoffset=p.xoffset-p.width+d[1] + else + n.xoffset=p.xoffset-d[1] + end end - if nofmarks==1 then - break - else - nofmarks=nofmarks-1 + if mk[p] then + n.yoffset=p.yoffset+d[2] + else + n.yoffset=n.yoffset+p.yoffset+d[2] end - end + if nofmarks==1 then + break + else + nofmarks=nofmarks-1 + end + end else - end - end - end - end - if not keep then - marks={} - end - end - if next(wx) then + end + end + end + end + if not keep then + marks={} + end + end + if next(wx) then for n,k in next,wx do - local x=k[2] - local w=k[4] - if w then + local x=k[2] + local w=k[4] + if w then local rl=k[1] - local wx=w-x + local wx=w-x if rl<0 then - if wx~=0 then + if wx~=0 then insert_node_before(head,n,newkern(wx)) - end - if x~=0 then + end + if x~=0 then insert_node_after (head,n,newkern(x)) - end - else - if x~=0 then + end + else + if x~=0 then insert_node_before(head,n,newkern(x)) - end - if wx~=0 then + end + if wx~=0 then insert_node_after (head,n,newkern(wx)) - end - end + end + end elseif x~=0 then insert_node_before(head,n,newkern(x)) - end - end - end - if next(cx) then - for n,k in next,cx do - if k~=0 then - local rln=rl[n] - if rln and rln<0 then + end + end + end + if next(cx) then + for n,k in next,cx do + if k~=0 then + local rln=rl[n] + if rln and rln<0 then insert_node_before(head,n,newkern(-k)) - else + else insert_node_before(head,n,newkern(k)) - end - end - end - end - if not keep then - kerns={} - end - return head,true - elseif not keep then - kerns,cursives,marks={},{},{} - end - elseif has_kerns then - if trace_injections then - trace(head) - end - for n in traverse_id(glyph_code,head) do - if n.subtype<256 then - local k=n[a_kernpair] - if k then - local kk=kerns[k] - if kk then - local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4] - if y and y~=0 then + end + end + end + end + if not keep then + kerns={} + end + return head,true + elseif not keep then + kerns,cursives,marks={},{},{} + end + elseif has_kerns then + if trace_injections then + trace(head) + end + for n in traverse_id(glyph_code,head) do + if n.subtype<256 then + local k=n[a_kernpair] + if k then + local kk=kerns[k] + if kk then + local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4] + if y and y~=0 then n.yoffset=y - end + end if w then - local wx=w-x + local wx=w-x if rl<0 then - if wx~=0 then - insert_node_before(head,n,newkern(wx)) - end - if x~=0 then - insert_node_after (head,n,newkern(x)) - end - else - if x~=0 then - insert_node_before(head,n,newkern(x)) - end - if wx~=0 then - insert_node_after(head,n,newkern(wx)) - end - end + if wx~=0 then + insert_node_before(head,n,newkern(wx)) + end + if x~=0 then + insert_node_after (head,n,newkern(x)) + end + else + if x~=0 then + insert_node_before(head,n,newkern(x)) + end + if wx~=0 then + insert_node_after(head,n,newkern(wx)) + end + end else - if x~=0 then - insert_node_before(head,n,newkern(x)) - end - end - end - end - end - end - if not keep then - kerns={} - end - return head,true + if x~=0 then + insert_node_before(head,n,newkern(x)) + end + end + end + end + end + end + if not keep then + kerns={} + end + return head,true else - end - return head,false -end + end + return head,false +end end -- closure @@ -10374,890 +10374,890 @@ local function prepare_contextchains(tfmdata) nofsequences=nofsequences+1 sequence[nofsequences]=after[n] end - end - if sequence[1] then - nt=nt+1 - t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements } - for unic,_ in next,sequence[start] do - local cu=contexts[unic] - if not cu then - contexts[unic]=t - end - end - end - end - end - else - end - else - report_prepare("missing lookuptype for lookupname %a",lookupname) - end - end - end -end -local function featuresinitializer(tfmdata,value) - if true then - local rawdata=tfmdata.shared.rawdata - local properties=rawdata.properties - if not properties.initialized then - local starttime=trace_preparing and os.clock() - local resources=rawdata.resources - resources.lookuphash=resources.lookuphash or {} - prepare_contextchains(tfmdata) - prepare_lookups(tfmdata) - properties.initialized=true - if trace_preparing then - report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) - end - end - end -end -registerotffeature { - name="features", - description="features", - default=true, - initializers={ - position=1, - node=featuresinitializer, - }, - processors={ - node=featuresprocessor, - } -} -otf.handlers=handlers - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otp']={ - version=1.001, - comment="companion to font-otf.lua (packing)", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local next,type=next,type -local sort,concat=table.sort,table.concat -local sortedhash=table.sortedhash -local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end) -local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) -local report_otf=logs.reporter("fonts","otf loading") -fonts=fonts or {} -local handlers=fonts.handlers or {} -fonts.handlers=handlers -local otf=handlers.otf or {} -handlers.otf=otf -local enhancers=otf.enhancers or {} -otf.enhancers=enhancers -local glists=otf.glists or { "gsub","gpos" } -otf.glists=glists -local criterium=1 -local threshold=0 -local function tabstr_normal(t) - local s={} - local n=0 - for k,v in next,t do - n=n+1 - if type(v)=="table" then - s[n]=k..">"..tabstr_normal(v) - elseif v==true then - s[n]=k.."+" - elseif v then - s[n]=k.."="..v - else - s[n]=k.."-" - end - end - if n==0 then - return "" - elseif n==1 then - return s[1] - else - sort(s) - return concat(s,",") - end -end -local function tabstr_flat(t) - local s={} - local n=0 - for k,v in next,t do - n=n+1 - s[n]=k.."="..v - end - if n==0 then - return "" - elseif n==1 then - return s[1] - else - sort(s) - return concat(s,",") - end -end -local function tabstr_mixed(t) - local s={} - local n=#t - if n==0 then - return "" - elseif n==1 then - local k=t[1] - if k==true then - return "++" - elseif k==false then - return "--" - else - return tostring(k) - end - else - for i=1,n do - local k=t[i] - if k==true then - s[i]="++" - elseif k==false then - s[i]="--" - else - s[i]=k - end - end - return concat(s,",") - end -end -local function tabstr_boolean(t) - local s={} - local n=0 - for k,v in next,t do - n=n+1 - if v then - s[n]=k.."+" - else - s[n]=k.."-" - end - end - if n==0 then - return "" - elseif n==1 then - return s[1] - else - sort(s) - return concat(s,",") - end -end -local function packdata(data) - if data then - local h,t,c={},{},{} - local hh,tt,cc={},{},{} - local nt,ntt=0,0 - local function pack_normal(v) - local tag=tabstr_normal(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_flat(v) - local tag=tabstr_flat(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_boolean(v) - local tag=tabstr_boolean(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_indexed(v) - local tag=concat(v," ") - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_mixed(v) - local tag=tabstr_mixed(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_final(v) - if c[v]<=criterium then - return t[v] - else - local hv=hh[v] - if hv then - return hv - else - ntt=ntt+1 - tt[ntt]=t[v] - hh[v]=ntt - cc[ntt]=c[v] - return ntt - end - end - end - local function success(stage,pass) - if nt==0 then - if trace_loading or trace_packing then - report_otf("pack quality: nothing to pack") - end - return false - elseif nt>=threshold then - local one,two,rest=0,0,0 - if pass==1 then - for k,v in next,c do - if v==1 then - one=one+1 - elseif v==2 then - two=two+1 - else - rest=rest+1 - end - end - else - for k,v in next,cc do - if v>20 then - rest=rest+1 - elseif v>10 then - two=two+1 - else - one=one+1 - end - end - data.tables=tt - end - if trace_loading or trace_packing then - report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",stage,pass,one+two+rest,one,two,rest,criterium) - end - return true - else - if trace_loading or trace_packing then - report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",stage,pass,nt,threshold) - end - return false - end - end - local function packers(pass) - if pass==1 then - return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed - else - return pack_final,pack_final,pack_final,pack_final,pack_final - end - end - local resources=data.resources - local lookuptypes=resources.lookuptypes - for pass=1,2 do - if trace_packing then - report_otf("start packing: stage 1, pass %s",pass) - end - local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) - for unicode,description in next,data.descriptions do - local boundingbox=description.boundingbox - if boundingbox then - description.boundingbox=pack_indexed(boundingbox) - end - local slookups=description.slookups - if slookups then - for tag,slookup in next,slookups do - local what=lookuptypes[tag] - if what=="pair" then - local t=slookup[2] if t then slookup[2]=pack_indexed(t) end - local t=slookup[3] if t then slookup[3]=pack_indexed(t) end - elseif what~="substitution" then - slookups[tag]=pack_indexed(slookup) - end - end - end - local mlookups=description.mlookups - if mlookups then - for tag,mlookup in next,mlookups do - local what=lookuptypes[tag] - if what=="pair" then - for i=1,#mlookup do - local lookup=mlookup[i] - local t=lookup[2] if t then lookup[2]=pack_indexed(t) end - local t=lookup[3] if t then lookup[3]=pack_indexed(t) end - end - elseif what~="substitution" then - for i=1,#mlookup do - mlookup[i]=pack_indexed(mlookup[i]) - end - end - end - end - local kerns=description.kerns - if kerns then - for tag,kern in next,kerns do - kerns[tag]=pack_flat(kern) - end - end - local math=description.math - if math then - local kerns=math.kerns - if kerns then - for tag,kern in next,kerns do - kerns[tag]=pack_normal(kern) - end - end - end - local anchors=description.anchors - if anchors then - for what,anchor in next,anchors do - if what=="baselig" then - for _,a in next,anchor do - for k=1,#a do - a[k]=pack_indexed(a[k]) - end - end - else - for k,v in next,anchor do - anchor[k]=pack_indexed(v) - end - end - end - end - local altuni=description.altuni - if altuni then - for i=1,#altuni do - altuni[i]=pack_flat(altuni[i]) - end - end - end - local lookups=data.lookups - if lookups then - for _,lookup in next,lookups do - local rules=lookup.rules - if rules then - for i=1,#rules do - local rule=rules[i] - local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end - local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end - local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end - local r=rule.replacements if r then rule.replacements=pack_flat (r) end - local r=rule.lookups if r then rule.lookups=pack_indexed(r) end - end - end - end - end - local anchor_to_lookup=resources.anchor_to_lookup - if anchor_to_lookup then - for anchor,lookup in next,anchor_to_lookup do - anchor_to_lookup[anchor]=pack_normal(lookup) - end - end - local lookup_to_anchor=resources.lookup_to_anchor - if lookup_to_anchor then - for lookup,anchor in next,lookup_to_anchor do - lookup_to_anchor[lookup]=pack_normal(anchor) - end - end - local sequences=resources.sequences - if sequences then - for feature,sequence in next,sequences do - local flags=sequence.flags - if flags then - sequence.flags=pack_normal(flags) - end - local subtables=sequence.subtables - if subtables then - sequence.subtables=pack_normal(subtables) - end - local features=sequence.features - if features then - for script,feature in next,features do - features[script]=pack_normal(feature) - end - end - end - end - local lookups=resources.lookups - if lookups then - for name,lookup in next,lookups do - local flags=lookup.flags - if flags then - lookup.flags=pack_normal(flags) - end - local subtables=lookup.subtables - if subtables then - lookup.subtables=pack_normal(subtables) - end - end - end - local features=resources.features - if features then - for _,what in next,glists do - local list=features[what] - if list then - for feature,spec in next,list do - list[feature]=pack_normal(spec) - end - end - end - end - if not success(1,pass) then - return - end - end - if nt>0 then - for pass=1,2 do - if trace_packing then - report_otf("start packing: stage 2, pass %s",pass) - end - local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) - for unicode,description in next,data.descriptions do - local kerns=description.kerns - if kerns then - description.kerns=pack_normal(kerns) - end - local math=description.math - if math then - local kerns=math.kerns - if kerns then - math.kerns=pack_normal(kerns) - end - end - local anchors=description.anchors - if anchors then - description.anchors=pack_normal(anchors) - end - local mlookups=description.mlookups - if mlookups then - for tag,mlookup in next,mlookups do - mlookups[tag]=pack_normal(mlookup) - end - end - local altuni=description.altuni - if altuni then - description.altuni=pack_normal(altuni) - end - end - local lookups=data.lookups - if lookups then - for _,lookup in next,lookups do - local rules=lookup.rules - if rules then - for i=1,#rules do - local rule=rules[i] - local r=rule.before if r then rule.before=pack_normal(r) end - local r=rule.after if r then rule.after=pack_normal(r) end - local r=rule.current if r then rule.current=pack_normal(r) end - end - end - end - end - local sequences=resources.sequences - if sequences then - for feature,sequence in next,sequences do - sequence.features=pack_normal(sequence.features) - end - end - if not success(2,pass) then - end - end - for pass=1,2 do - local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) - for unicode,description in next,data.descriptions do - local slookups=description.slookups - if slookups then - description.slookups=pack_normal(slookups) - end - local mlookups=description.mlookups - if mlookups then - description.mlookups=pack_normal(mlookups) - end - end - end - end - end -end -local unpacked_mt={ - __index=function(t,k) - t[k]=false - return k - end -} -local function unpackdata(data) - if data then - local tables=data.tables - if tables then - local resources=data.resources - local lookuptypes=resources.lookuptypes - local unpacked={} - setmetatable(unpacked,unpacked_mt) - for unicode,description in next,data.descriptions do - local tv=tables[description.boundingbox] - if tv then - description.boundingbox=tv - end - local slookups=description.slookups - if slookups then - local tv=tables[slookups] - if tv then - description.slookups=tv - slookups=unpacked[tv] - end - if slookups then - for tag,lookup in next,slookups do - local what=lookuptypes[tag] - if what=="pair" then - local tv=tables[lookup[2]] - if tv then - lookup[2]=tv - end - local tv=tables[lookup[3]] - if tv then - lookup[3]=tv - end - elseif what~="substitution" then - local tv=tables[lookup] - if tv then - slookups[tag]=tv - end - end - end - end - end - local mlookups=description.mlookups - if mlookups then - local tv=tables[mlookups] - if tv then - description.mlookups=tv - mlookups=unpacked[tv] - end - if mlookups then - for tag,list in next,mlookups do - local tv=tables[list] - if tv then - mlookups[tag]=tv - list=unpacked[tv] - end - if list then - local what=lookuptypes[tag] - if what=="pair" then - for i=1,#list do - local lookup=list[i] - local tv=tables[lookup[2]] - if tv then - lookup[2]=tv - end - local tv=tables[lookup[3]] - if tv then - lookup[3]=tv - end - end - elseif what~="substitution" then - for i=1,#list do - local tv=tables[list[i]] - if tv then - list[i]=tv - end - end - end - end - end - end - end - local kerns=description.kerns - if kerns then - local tm=tables[kerns] - if tm then - description.kerns=tm - kerns=unpacked[tm] - end - if kerns then - for k,kern in next,kerns do - local tv=tables[kern] - if tv then - kerns[k]=tv - end - end - end - end - local math=description.math - if math then - local kerns=math.kerns - if kerns then - local tm=tables[kerns] - if tm then - math.kerns=tm - kerns=unpacked[tm] - end - if kerns then - for k,kern in next,kerns do - local tv=tables[kern] - if tv then - kerns[k]=tv - end - end - end - end - end - local anchors=description.anchors - if anchors then - local ta=tables[anchors] - if ta then - description.anchors=ta - anchors=unpacked[ta] - end - if anchors then - for tag,anchor in next,anchors do - if tag=="baselig" then - for _,list in next,anchor do - for i=1,#list do - local tv=tables[list[i]] - if tv then - list[i]=tv - end - end - end - else - for a,data in next,anchor do - local tv=tables[data] - if tv then - anchor[a]=tv - end - end - end - end - end - end - local altuni=description.altuni - if altuni then - local altuni=tables[altuni] - if altuni then - description.altuni=altuni - for i=1,#altuni do - local tv=tables[altuni[i]] - if tv then - altuni[i]=tv - end - end - end - end - end - local lookups=data.lookups - if lookups then - for _,lookup in next,lookups do - local rules=lookup.rules - if rules then - for i=1,#rules do - local rule=rules[i] - local before=rule.before - if before then - local tv=tables[before] - if tv then - rule.before=tv - before=unpacked[tv] - end - if before then - for i=1,#before do - local tv=tables[before[i]] - if tv then - before[i]=tv - end - end - end - end - local after=rule.after - if after then - local tv=tables[after] - if tv then - rule.after=tv - after=unpacked[tv] - end - if after then - for i=1,#after do - local tv=tables[after[i]] - if tv then - after[i]=tv - end - end - end - end - local current=rule.current - if current then - local tv=tables[current] - if tv then - rule.current=tv - current=unpacked[tv] - end - if current then - for i=1,#current do - local tv=tables[current[i]] - if tv then - current[i]=tv - end - end - end - end - local replacements=rule.replacements - if replacements then - local tv=tables[replacements] - if tv then - rule.replacements=tv - end - end - local fore=rule.fore - if fore then - local tv=tables[fore] - if tv then - rule.fore=tv - end - end - local back=rule.back - if back then - local tv=tables[back] - if tv then - rule.back=tv - end - end - local names=rule.names - if names then - local tv=tables[names] - if tv then - rule.names=tv - end - end - local lookups=rule.lookups - if lookups then - local tv=tables[lookups] - if tv then - rule.lookups=tv - end - end - end - end - end - end - local anchor_to_lookup=resources.anchor_to_lookup - if anchor_to_lookup then - for anchor,lookup in next,anchor_to_lookup do - local tv=tables[lookup] - if tv then - anchor_to_lookup[anchor]=tv - end - end - end - local lookup_to_anchor=resources.lookup_to_anchor - if lookup_to_anchor then - for lookup,anchor in next,lookup_to_anchor do - local tv=tables[anchor] - if tv then - lookup_to_anchor[lookup]=tv - end - end - end - local ls=resources.sequences - if ls then - for _,feature in next,ls do - local flags=feature.flags - if flags then - local tv=tables[flags] - if tv then - feature.flags=tv - end - end - local subtables=feature.subtables - if subtables then - local tv=tables[subtables] - if tv then - feature.subtables=tv - end - end - local features=feature.features - if features then - local tv=tables[features] - if tv then - feature.features=tv - features=unpacked[tv] - end - if features then - for script,data in next,features do - local tv=tables[data] - if tv then - features[script]=tv + end + if sequence[1] then + nt=nt+1 + t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements } + for unic,_ in next,sequence[start] do + local cu=contexts[unic] + if not cu then + contexts[unic]=t + end end end end end + else end + else + report_prepare("missing lookuptype for lookupname %a",lookupname) end - local lookups=resources.lookups - if lookups then - for _,lookup in next,lookups do - local flags=lookup.flags - if flags then - local tv=tables[flags] - if tv then - lookup.flags=tv - end - end - local subtables=lookup.subtables - if subtables then - local tv=tables[subtables] - if tv then - lookup.subtables=tv - end - end - end - end - local features=resources.features - if features then - for _,what in next,glists do - local feature=features[what] - if feature then - for tag,spec in next,feature do - local tv=tables[spec] - if tv then - feature[tag]=tv - end - end - end - end + end + end +end +local function featuresinitializer(tfmdata,value) + if true then + local rawdata=tfmdata.shared.rawdata + local properties=rawdata.properties + if not properties.initialized then + local starttime=trace_preparing and os.clock() + local resources=rawdata.resources + resources.lookuphash=resources.lookuphash or {} + prepare_contextchains(tfmdata) + prepare_lookups(tfmdata) + properties.initialized=true + if trace_preparing then + report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) end - data.tables=nil end end end +registerotffeature { + name="features", + description="features", + default=true, + initializers={ + position=1, + node=featuresinitializer, + }, + processors={ + node=featuresprocessor, + } +} +otf.handlers=handlers + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otp']={ + version=1.001, + comment="companion to font-otf.lua (packing)", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local next,type=next,type +local sort,concat=table.sort,table.concat +local sortedhash=table.sortedhash +local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end) +local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) +local report_otf=logs.reporter("fonts","otf loading") +fonts=fonts or {} +local handlers=fonts.handlers or {} +fonts.handlers=handlers +local otf=handlers.otf or {} +handlers.otf=otf +local enhancers=otf.enhancers or {} +otf.enhancers=enhancers +local glists=otf.glists or { "gsub","gpos" } +otf.glists=glists +local criterium=1 +local threshold=0 +local function tabstr_normal(t) + local s={} + local n=0 + for k,v in next,t do + n=n+1 + if type(v)=="table" then + s[n]=k..">"..tabstr_normal(v) + elseif v==true then + s[n]=k.."+" + elseif v then + s[n]=k.."="..v + else + s[n]=k.."-" + end + end + if n==0 then + return "" + elseif n==1 then + return s[1] + else + sort(s) + return concat(s,",") + end +end +local function tabstr_flat(t) + local s={} + local n=0 + for k,v in next,t do + n=n+1 + s[n]=k.."="..v + end + if n==0 then + return "" + elseif n==1 then + return s[1] + else + sort(s) + return concat(s,",") + end +end +local function tabstr_mixed(t) + local s={} + local n=#t + if n==0 then + return "" + elseif n==1 then + local k=t[1] + if k==true then + return "++" + elseif k==false then + return "--" + else + return tostring(k) + end + else + for i=1,n do + local k=t[i] + if k==true then + s[i]="++" + elseif k==false then + s[i]="--" + else + s[i]=k + end + end + return concat(s,",") + end +end +local function tabstr_boolean(t) + local s={} + local n=0 + for k,v in next,t do + n=n+1 + if v then + s[n]=k.."+" + else + s[n]=k.."-" + end + end + if n==0 then + return "" + elseif n==1 then + return s[1] + else + sort(s) + return concat(s,",") + end +end +local function packdata(data) + if data then + local h,t,c={},{},{} + local hh,tt,cc={},{},{} + local nt,ntt=0,0 + local function pack_normal(v) + local tag=tabstr_normal(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_flat(v) + local tag=tabstr_flat(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_boolean(v) + local tag=tabstr_boolean(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_indexed(v) + local tag=concat(v," ") + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_mixed(v) + local tag=tabstr_mixed(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_final(v) + if c[v]<=criterium then + return t[v] + else + local hv=hh[v] + if hv then + return hv + else + ntt=ntt+1 + tt[ntt]=t[v] + hh[v]=ntt + cc[ntt]=c[v] + return ntt + end + end + end + local function success(stage,pass) + if nt==0 then + if trace_loading or trace_packing then + report_otf("pack quality: nothing to pack") + end + return false + elseif nt>=threshold then + local one,two,rest=0,0,0 + if pass==1 then + for k,v in next,c do + if v==1 then + one=one+1 + elseif v==2 then + two=two+1 + else + rest=rest+1 + end + end + else + for k,v in next,cc do + if v>20 then + rest=rest+1 + elseif v>10 then + two=two+1 + else + one=one+1 + end + end + data.tables=tt + end + if trace_loading or trace_packing then + report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",stage,pass,one+two+rest,one,two,rest,criterium) + end + return true + else + if trace_loading or trace_packing then + report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",stage,pass,nt,threshold) + end + return false + end + end + local function packers(pass) + if pass==1 then + return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed + else + return pack_final,pack_final,pack_final,pack_final,pack_final + end + end + local resources=data.resources + local lookuptypes=resources.lookuptypes + for pass=1,2 do + if trace_packing then + report_otf("start packing: stage 1, pass %s",pass) + end + local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) + for unicode,description in next,data.descriptions do + local boundingbox=description.boundingbox + if boundingbox then + description.boundingbox=pack_indexed(boundingbox) + end + local slookups=description.slookups + if slookups then + for tag,slookup in next,slookups do + local what=lookuptypes[tag] + if what=="pair" then + local t=slookup[2] if t then slookup[2]=pack_indexed(t) end + local t=slookup[3] if t then slookup[3]=pack_indexed(t) end + elseif what~="substitution" then + slookups[tag]=pack_indexed(slookup) + end + end + end + local mlookups=description.mlookups + if mlookups then + for tag,mlookup in next,mlookups do + local what=lookuptypes[tag] + if what=="pair" then + for i=1,#mlookup do + local lookup=mlookup[i] + local t=lookup[2] if t then lookup[2]=pack_indexed(t) end + local t=lookup[3] if t then lookup[3]=pack_indexed(t) end + end + elseif what~="substitution" then + for i=1,#mlookup do + mlookup[i]=pack_indexed(mlookup[i]) + end + end + end + end + local kerns=description.kerns + if kerns then + for tag,kern in next,kerns do + kerns[tag]=pack_flat(kern) + end + end + local math=description.math + if math then + local kerns=math.kerns + if kerns then + for tag,kern in next,kerns do + kerns[tag]=pack_normal(kern) + end + end + end + local anchors=description.anchors + if anchors then + for what,anchor in next,anchors do + if what=="baselig" then + for _,a in next,anchor do + for k=1,#a do + a[k]=pack_indexed(a[k]) + end + end + else + for k,v in next,anchor do + anchor[k]=pack_indexed(v) + end + end + end + end + local altuni=description.altuni + if altuni then + for i=1,#altuni do + altuni[i]=pack_flat(altuni[i]) + end + end + end + local lookups=data.lookups + if lookups then + for _,lookup in next,lookups do + local rules=lookup.rules + if rules then + for i=1,#rules do + local rule=rules[i] + local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end + local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end + local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end + local r=rule.replacements if r then rule.replacements=pack_flat (r) end + local r=rule.lookups if r then rule.lookups=pack_indexed(r) end + end + end + end + end + local anchor_to_lookup=resources.anchor_to_lookup + if anchor_to_lookup then + for anchor,lookup in next,anchor_to_lookup do + anchor_to_lookup[anchor]=pack_normal(lookup) + end + end + local lookup_to_anchor=resources.lookup_to_anchor + if lookup_to_anchor then + for lookup,anchor in next,lookup_to_anchor do + lookup_to_anchor[lookup]=pack_normal(anchor) + end + end + local sequences=resources.sequences + if sequences then + for feature,sequence in next,sequences do + local flags=sequence.flags + if flags then + sequence.flags=pack_normal(flags) + end + local subtables=sequence.subtables + if subtables then + sequence.subtables=pack_normal(subtables) + end + local features=sequence.features + if features then + for script,feature in next,features do + features[script]=pack_normal(feature) + end + end + end + end + local lookups=resources.lookups + if lookups then + for name,lookup in next,lookups do + local flags=lookup.flags + if flags then + lookup.flags=pack_normal(flags) + end + local subtables=lookup.subtables + if subtables then + lookup.subtables=pack_normal(subtables) + end + end + end + local features=resources.features + if features then + for _,what in next,glists do + local list=features[what] + if list then + for feature,spec in next,list do + list[feature]=pack_normal(spec) + end + end + end + end + if not success(1,pass) then + return + end + end + if nt>0 then + for pass=1,2 do + if trace_packing then + report_otf("start packing: stage 2, pass %s",pass) + end + local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) + for unicode,description in next,data.descriptions do + local kerns=description.kerns + if kerns then + description.kerns=pack_normal(kerns) + end + local math=description.math + if math then + local kerns=math.kerns + if kerns then + math.kerns=pack_normal(kerns) + end + end + local anchors=description.anchors + if anchors then + description.anchors=pack_normal(anchors) + end + local mlookups=description.mlookups + if mlookups then + for tag,mlookup in next,mlookups do + mlookups[tag]=pack_normal(mlookup) + end + end + local altuni=description.altuni + if altuni then + description.altuni=pack_normal(altuni) + end + end + local lookups=data.lookups + if lookups then + for _,lookup in next,lookups do + local rules=lookup.rules + if rules then + for i=1,#rules do + local rule=rules[i] + local r=rule.before if r then rule.before=pack_normal(r) end + local r=rule.after if r then rule.after=pack_normal(r) end + local r=rule.current if r then rule.current=pack_normal(r) end + end + end + end + end + local sequences=resources.sequences + if sequences then + for feature,sequence in next,sequences do + sequence.features=pack_normal(sequence.features) + end + end + if not success(2,pass) then + end + end + for pass=1,2 do + local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) + for unicode,description in next,data.descriptions do + local slookups=description.slookups + if slookups then + description.slookups=pack_normal(slookups) + end + local mlookups=description.mlookups + if mlookups then + description.mlookups=pack_normal(mlookups) + end + end + end + end + end +end +local unpacked_mt={ + __index=function(t,k) + t[k]=false + return k + end +} +local function unpackdata(data) + if data then + local tables=data.tables + if tables then + local resources=data.resources + local lookuptypes=resources.lookuptypes + local unpacked={} + setmetatable(unpacked,unpacked_mt) + for unicode,description in next,data.descriptions do + local tv=tables[description.boundingbox] + if tv then + description.boundingbox=tv + end + local slookups=description.slookups + if slookups then + local tv=tables[slookups] + if tv then + description.slookups=tv + slookups=unpacked[tv] + end + if slookups then + for tag,lookup in next,slookups do + local what=lookuptypes[tag] + if what=="pair" then + local tv=tables[lookup[2]] + if tv then + lookup[2]=tv + end + local tv=tables[lookup[3]] + if tv then + lookup[3]=tv + end + elseif what~="substitution" then + local tv=tables[lookup] + if tv then + slookups[tag]=tv + end + end + end + end + end + local mlookups=description.mlookups + if mlookups then + local tv=tables[mlookups] + if tv then + description.mlookups=tv + mlookups=unpacked[tv] + end + if mlookups then + for tag,list in next,mlookups do + local tv=tables[list] + if tv then + mlookups[tag]=tv + list=unpacked[tv] + end + if list then + local what=lookuptypes[tag] + if what=="pair" then + for i=1,#list do + local lookup=list[i] + local tv=tables[lookup[2]] + if tv then + lookup[2]=tv + end + local tv=tables[lookup[3]] + if tv then + lookup[3]=tv + end + end + elseif what~="substitution" then + for i=1,#list do + local tv=tables[list[i]] + if tv then + list[i]=tv + end + end + end + end + end + end + end + local kerns=description.kerns + if kerns then + local tm=tables[kerns] + if tm then + description.kerns=tm + kerns=unpacked[tm] + end + if kerns then + for k,kern in next,kerns do + local tv=tables[kern] + if tv then + kerns[k]=tv + end + end + end + end + local math=description.math + if math then + local kerns=math.kerns + if kerns then + local tm=tables[kerns] + if tm then + math.kerns=tm + kerns=unpacked[tm] + end + if kerns then + for k,kern in next,kerns do + local tv=tables[kern] + if tv then + kerns[k]=tv + end + end + end + end + end + local anchors=description.anchors + if anchors then + local ta=tables[anchors] + if ta then + description.anchors=ta + anchors=unpacked[ta] + end + if anchors then + for tag,anchor in next,anchors do + if tag=="baselig" then + for _,list in next,anchor do + for i=1,#list do + local tv=tables[list[i]] + if tv then + list[i]=tv + end + end + end + else + for a,data in next,anchor do + local tv=tables[data] + if tv then + anchor[a]=tv + end + end + end + end + end + end + local altuni=description.altuni + if altuni then + local altuni=tables[altuni] + if altuni then + description.altuni=altuni + for i=1,#altuni do + local tv=tables[altuni[i]] + if tv then + altuni[i]=tv + end + end + end + end + end + local lookups=data.lookups + if lookups then + for _,lookup in next,lookups do + local rules=lookup.rules + if rules then + for i=1,#rules do + local rule=rules[i] + local before=rule.before + if before then + local tv=tables[before] + if tv then + rule.before=tv + before=unpacked[tv] + end + if before then + for i=1,#before do + local tv=tables[before[i]] + if tv then + before[i]=tv + end + end + end + end + local after=rule.after + if after then + local tv=tables[after] + if tv then + rule.after=tv + after=unpacked[tv] + end + if after then + for i=1,#after do + local tv=tables[after[i]] + if tv then + after[i]=tv + end + end + end + end + local current=rule.current + if current then + local tv=tables[current] + if tv then + rule.current=tv + current=unpacked[tv] + end + if current then + for i=1,#current do + local tv=tables[current[i]] + if tv then + current[i]=tv + end + end + end + end + local replacements=rule.replacements + if replacements then + local tv=tables[replacements] + if tv then + rule.replacements=tv + end + end + local fore=rule.fore + if fore then + local tv=tables[fore] + if tv then + rule.fore=tv + end + end + local back=rule.back + if back then + local tv=tables[back] + if tv then + rule.back=tv + end + end + local names=rule.names + if names then + local tv=tables[names] + if tv then + rule.names=tv + end + end + local lookups=rule.lookups + if lookups then + local tv=tables[lookups] + if tv then + rule.lookups=tv + end + end + end + end + end + end + local anchor_to_lookup=resources.anchor_to_lookup + if anchor_to_lookup then + for anchor,lookup in next,anchor_to_lookup do + local tv=tables[lookup] + if tv then + anchor_to_lookup[anchor]=tv + end + end + end + local lookup_to_anchor=resources.lookup_to_anchor + if lookup_to_anchor then + for lookup,anchor in next,lookup_to_anchor do + local tv=tables[anchor] + if tv then + lookup_to_anchor[lookup]=tv + end + end + end + local ls=resources.sequences + if ls then + for _,feature in next,ls do + local flags=feature.flags + if flags then + local tv=tables[flags] + if tv then + feature.flags=tv + end + end + local subtables=feature.subtables + if subtables then + local tv=tables[subtables] + if tv then + feature.subtables=tv + end + end + local features=feature.features + if features then + local tv=tables[features] + if tv then + feature.features=tv + features=unpacked[tv] + end + if features then + for script,data in next,features do + local tv=tables[data] + if tv then + features[script]=tv + end + end + end + end + end + end + local lookups=resources.lookups + if lookups then + for _,lookup in next,lookups do + local flags=lookup.flags + if flags then + local tv=tables[flags] + if tv then + lookup.flags=tv + end + end + local subtables=lookup.subtables + if subtables then + local tv=tables[subtables] + if tv then + lookup.subtables=tv + end + end + end + end + local features=resources.features + if features then + for _,what in next,glists do + local feature=features[what] + if feature then + for tag,spec in next,feature do + local tv=tables[spec] + if tv then + feature[tag]=tv + end + end + end + end + end + data.tables=nil + end + end +end if otf.enhancers.register then - otf.enhancers.register("pack",packdata) + otf.enhancers.register("pack",packdata) otf.enhancers.register("unpack",unpackdata) end otf.enhancers.unpack=unpackdata @@ -11266,118 +11266,118 @@ end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['luatex-fonts-lua']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['luatex-fonts-lua']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() end -local fonts=fonts +local fonts=fonts fonts.formats.lua="lua" -function fonts.readers.lua(specification) - local fullname=specification.filename or "" - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - fullname=specification.name.."."..forced - else - fullname=specification.name - end - end - local fullname=resolvers.findfile(fullname) or "" - if fullname~="" then - local loader=loadfile(fullname) - loader=loader and loader() - return loader and loader(specification) - end -end +function fonts.readers.lua(specification) + local fullname=specification.filename or "" + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + fullname=specification.name.."."..forced + else + fullname=specification.name + end + end + local fullname=resolvers.findfile(fullname) or "" + if fullname~="" then + local loader=loadfile(fullname) + loader=loader and loader() + return loader and loader(specification) + end +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['font-def']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['font-def']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub -local tostring,next=tostring,next +local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub +local tostring,next=tostring,next local lpegmatch=lpeg.match local allocate=utilities.storage.allocate -local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end) +local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end) local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end) -trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading") +trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading") trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*") local report_defining=logs.reporter("fonts","defining") -local fonts=fonts -local fontdata=fonts.hashes.identifiers -local readers=fonts.readers -local definers=fonts.definers -local specifiers=fonts.specifiers -local constructors=fonts.constructors +local fonts=fonts +local fontdata=fonts.hashes.identifiers +local readers=fonts.readers +local definers=fonts.definers +local specifiers=fonts.specifiers +local constructors=fonts.constructors local fontgoodies=fonts.goodies readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' } -local variants=allocate() +local variants=allocate() specifiers.variants=variants definers.methods=definers.methods or {} local internalized=allocate() local lastdefined=nil -local loadedfonts=constructors.loadedfonts +local loadedfonts=constructors.loadedfonts local designsizes=constructors.designsizes local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end local splitter,splitspecifiers=nil,"" local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc -local left=P("(") -local right=P(")") -local colon=P(":") +local left=P("(") +local right=P(")") +local colon=P(":") local space=P(" ") definers.defaultlookup="file" local prefixpattern=P(false) -local function addspecifier(symbol) - splitspecifiers=splitspecifiers..symbol - local method=S(splitspecifiers) - local lookup=C(prefixpattern)*colon - local sub=left*C(P(1-left-right-method)^1)*right - local specification=C(method)*C(P(1)^1) - local name=C((1-sub-specification)^1) - splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc(""))) -end -local function addlookup(str,default) - prefixpattern=prefixpattern+P(str) +local function addspecifier(symbol) + splitspecifiers=splitspecifiers..symbol + local method=S(splitspecifiers) + local lookup=C(prefixpattern)*colon + local sub=left*C(P(1-left-right-method)^1)*right + local specification=C(method)*C(P(1)^1) + local name=C((1-sub-specification)^1) + splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc(""))) +end +local function addlookup(str,default) + prefixpattern=prefixpattern+P(str) end definers.addlookup=addlookup -addlookup("file") -addlookup("name") +addlookup("file") +addlookup("name") addlookup("spec") -local function getspecification(str) - return lpegmatch(splitter,str) +local function getspecification(str) + return lpegmatch(splitter,str) end definers.getspecification=getspecification -function definers.registersplit(symbol,action,verbosename) - addspecifier(symbol) - variants[symbol]=action - if verbosename then - variants[verbosename]=action - end -end -local function makespecification(specification,lookup,name,sub,method,detail,size) - size=size or 655360 - if not lookup or lookup=="" then - lookup=definers.defaultlookup - end - if trace_defining then - report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a", - specification,lookup,name,sub,method,detail) - end - local t={ +function definers.registersplit(symbol,action,verbosename) + addspecifier(symbol) + variants[symbol]=action + if verbosename then + variants[verbosename]=action + end +end +local function makespecification(specification,lookup,name,sub,method,detail,size) + size=size or 655360 + if not lookup or lookup=="" then + lookup=definers.defaultlookup + end + if trace_defining then + report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a", + specification,lookup,name,sub,method,detail) + end + local t={ lookup=lookup, specification=specification, size=size, @@ -11388,477 +11388,477 @@ local function makespecification(specification,lookup,name,sub,method,detail,siz resolved="", forced="", features={}, - } - return t + } + return t end definers.makespecification=makespecification function definers.analyze(specification,size) - local lookup,name,sub,method,detail=getspecification(specification or "") - return makespecification(specification,lookup,name,sub,method,detail,size) + local lookup,name,sub,method,detail=getspecification(specification or "") + return makespecification(specification,lookup,name,sub,method,detail,size) end -definers.resolvers=definers.resolvers or {} +definers.resolvers=definers.resolvers or {} local resolvers=definers.resolvers -function resolvers.file(specification) +function resolvers.file(specification) local name=resolvefile(specification.name) - local suffix=file.suffix(name) - if fonts.formats[suffix] then - specification.forced=suffix - specification.name=file.removesuffix(name) - else + local suffix=file.suffix(name) + if fonts.formats[suffix] then + specification.forced=suffix + specification.name=file.removesuffix(name) + else specification.name=name - end + end end -function resolvers.name(specification) - local resolve=fonts.names.resolve - if resolve then +function resolvers.name(specification) + local resolve=fonts.names.resolve + if resolve then local resolved,sub=resolve(specification.name,specification.sub,specification) - if resolved then - specification.resolved=resolved - specification.sub=sub - local suffix=file.suffix(resolved) - if fonts.formats[suffix] then - specification.forced=suffix - specification.name=file.removesuffix(resolved) - else - specification.name=resolved - end - end - else - resolvers.file(specification) - end -end -function resolvers.spec(specification) - local resolvespec=fonts.names.resolvespec - if resolvespec then + if resolved then + specification.resolved=resolved + specification.sub=sub + local suffix=file.suffix(resolved) + if fonts.formats[suffix] then + specification.forced=suffix + specification.name=file.removesuffix(resolved) + else + specification.name=resolved + end + end + else + resolvers.file(specification) + end +end +function resolvers.spec(specification) + local resolvespec=fonts.names.resolvespec + if resolvespec then local resolved,sub=resolvespec(specification.name,specification.sub,specification) - if resolved then - specification.resolved=resolved - specification.sub=sub - specification.forced=file.suffix(resolved) - specification.name=file.removesuffix(resolved) - end - else - resolvers.name(specification) - end -end -function definers.resolve(specification) + if resolved then + specification.resolved=resolved + specification.sub=sub + specification.forced=file.suffix(resolved) + specification.name=file.removesuffix(resolved) + end + else + resolvers.name(specification) + end +end +function definers.resolve(specification) if not specification.resolved or specification.resolved=="" then - local r=resolvers[specification.lookup] - if r then - r(specification) - end - end - if specification.forced=="" then - specification.forced=nil - else - specification.forced=specification.forced - end - specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification)) - if specification.sub and specification.sub~="" then - specification.hash=specification.sub..' @ '..specification.hash - end - return specification -end -function definers.applypostprocessors(tfmdata) - local postprocessors=tfmdata.postprocessors - if postprocessors then - local properties=tfmdata.properties - for i=1,#postprocessors do + local r=resolvers[specification.lookup] + if r then + r(specification) + end + end + if specification.forced=="" then + specification.forced=nil + else + specification.forced=specification.forced + end + specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification)) + if specification.sub and specification.sub~="" then + specification.hash=specification.sub..' @ '..specification.hash + end + return specification +end +function definers.applypostprocessors(tfmdata) + local postprocessors=tfmdata.postprocessors + if postprocessors then + local properties=tfmdata.properties + for i=1,#postprocessors do local extrahash=postprocessors[i](tfmdata) if type(extrahash)=="string" and extrahash~="" then - extrahash=gsub(lower(extrahash),"[^a-z]","-") - properties.fullname=format("%s-%s",properties.fullname,extrahash) - end - end - end - return tfmdata -end -local function checkembedding(tfmdata) - local properties=tfmdata.properties - local embedding - if directive_embedall then - embedding="full" - elseif properties and properties.filename and constructors.dontembed[properties.filename] then - embedding="no" - else - embedding="subset" - end - if properties then - properties.embedding=embedding - else - tfmdata.properties={ embedding=embedding } - end - tfmdata.embedding=embedding -end -function definers.loadfont(specification) - local hash=constructors.hashinstance(specification) + extrahash=gsub(lower(extrahash),"[^a-z]","-") + properties.fullname=format("%s-%s",properties.fullname,extrahash) + end + end + end + return tfmdata +end +local function checkembedding(tfmdata) + local properties=tfmdata.properties + local embedding + if directive_embedall then + embedding="full" + elseif properties and properties.filename and constructors.dontembed[properties.filename] then + embedding="no" + else + embedding="subset" + end + if properties then + properties.embedding=embedding + else + tfmdata.properties={ embedding=embedding } + end + tfmdata.embedding=embedding +end +function definers.loadfont(specification) + local hash=constructors.hashinstance(specification) local tfmdata=loadedfonts[hash] - if not tfmdata then - local forced=specification.forced or "" - if forced~="" then - local reader=readers[lower(forced)] - tfmdata=reader and reader(specification) - if not tfmdata then - report_defining("forced type %a of %a not found",forced,specification.name) - end - else + if not tfmdata then + local forced=specification.forced or "" + if forced~="" then + local reader=readers[lower(forced)] + tfmdata=reader and reader(specification) + if not tfmdata then + report_defining("forced type %a of %a not found",forced,specification.name) + end + else local sequence=readers.sequence - for s=1,#sequence do - local reader=sequence[s] + for s=1,#sequence do + local reader=sequence[s] if readers[reader] then - if trace_defining then - report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename) - end - tfmdata=readers[reader](specification) - if tfmdata then - break - else - specification.filename=nil - end - end - end - end - if tfmdata then - tfmdata=definers.applypostprocessors(tfmdata) + if trace_defining then + report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename) + end + tfmdata=readers[reader](specification) + if tfmdata then + break + else + specification.filename=nil + end + end + end + end + if tfmdata then + tfmdata=definers.applypostprocessors(tfmdata) checkembedding(tfmdata) - loadedfonts[hash]=tfmdata - designsizes[specification.hash]=tfmdata.parameters.designsize - end - end - if not tfmdata then - report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup) - end - return tfmdata + loadedfonts[hash]=tfmdata + designsizes[specification.hash]=tfmdata.parameters.designsize + end + end + if not tfmdata then + report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup) + end + return tfmdata end function constructors.checkvirtualids() end function constructors.readanddefine(name,size) - local specification=definers.analyze(name,size) - local method=specification.method - if method and variants[method] then - specification=variants[method](specification) - end - specification=definers.resolve(specification) - local hash=constructors.hashinstance(specification) - local id=definers.registered(hash) - if not id then - local tfmdata=definers.loadfont(specification) - if tfmdata then - tfmdata.properties.hash=hash + local specification=definers.analyze(name,size) + local method=specification.method + if method and variants[method] then + specification=variants[method](specification) + end + specification=definers.resolve(specification) + local hash=constructors.hashinstance(specification) + local id=definers.registered(hash) + if not id then + local tfmdata=definers.loadfont(specification) + if tfmdata then + tfmdata.properties.hash=hash constructors.checkvirtualids(tfmdata) - id=font.define(tfmdata) - definers.register(tfmdata,id) - else + id=font.define(tfmdata) + definers.register(tfmdata,id) + else id=0 - end - end - return fontdata[id],id + end + end + return fontdata[id],id end function definers.current() - return lastdefined -end -function definers.registered(hash) - local id=internalized[hash] - return id,id and fontdata[id] -end -function definers.register(tfmdata,id) - if tfmdata and id then - local hash=tfmdata.properties.hash - if not hash then - report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?") - elseif not internalized[hash] then - internalized[hash]=id - if trace_defining then - report_defining("registering font, id %s, hash %a",id,hash) - end - fontdata[id]=tfmdata - end - end + return lastdefined +end +function definers.registered(hash) + local id=internalized[hash] + return id,id and fontdata[id] +end +function definers.register(tfmdata,id) + if tfmdata and id then + local hash=tfmdata.properties.hash + if not hash then + report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?") + elseif not internalized[hash] then + internalized[hash]=id + if trace_defining then + report_defining("registering font, id %s, hash %a",id,hash) + end + fontdata[id]=tfmdata + end + end end function definers.read(specification,size,id) - statistics.starttiming(fonts) - if type(specification)=="string" then - specification=definers.analyze(specification,size) - end - local method=specification.method - if method and variants[method] then - specification=variants[method](specification) - end - specification=definers.resolve(specification) - local hash=constructors.hashinstance(specification) + statistics.starttiming(fonts) + if type(specification)=="string" then + specification=definers.analyze(specification,size) + end + local method=specification.method + if method and variants[method] then + specification=variants[method](specification) + end + specification=definers.resolve(specification) + local hash=constructors.hashinstance(specification) local tfmdata=definers.registered(hash) - if tfmdata then - if trace_defining then - report_defining("already hashed: %s",hash) - end - else + if tfmdata then + if trace_defining then + report_defining("already hashed: %s",hash) + end + else tfmdata=definers.loadfont(specification) - if tfmdata then - if trace_defining then - report_defining("loaded and hashed: %s",hash) - end - tfmdata.properties.hash=hash - if id then - definers.register(tfmdata,id) - end - else - if trace_defining then - report_defining("not loaded and hashed: %s",hash) - end - end - end + if tfmdata then + if trace_defining then + report_defining("loaded and hashed: %s",hash) + end + tfmdata.properties.hash=hash + if id then + definers.register(tfmdata,id) + end + else + if trace_defining then + report_defining("not loaded and hashed: %s",hash) + end + end + end lastdefined=tfmdata or id if not tfmdata then - report_defining("unknown font %a, loading aborted",specification.name) - elseif trace_defining and type(tfmdata)=="table" then - local properties=tfmdata.properties or {} - local parameters=tfmdata.parameters or {} - report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a", - properties.format,id,properties.name,parameters.size,properties.encodingbytes, - properties.encodingname,properties.fullname,file.basename(properties.filename)) - end - statistics.stoptiming(fonts) - return tfmdata -end -function font.getfont(id) + report_defining("unknown font %a, loading aborted",specification.name) + elseif trace_defining and type(tfmdata)=="table" then + local properties=tfmdata.properties or {} + local parameters=tfmdata.parameters or {} + report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a", + properties.format,id,properties.name,parameters.size,properties.encodingbytes, + properties.encodingname,properties.fullname,file.basename(properties.filename)) + end + statistics.stoptiming(fonts) + return tfmdata +end +function font.getfont(id) return fontdata[id] end -callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)") +callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)") end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['luatex-font-def']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['luatex-font-def']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() end local fonts=fonts fonts.constructors.namemode="specification" -function fonts.definers.getspecification(str) - return "",str,"",":",str +function fonts.definers.getspecification(str) + return "",str,"",":",str end local list={} local function issome () list.lookup='name' end -local function isfile () list.lookup='file' end -local function isname () list.lookup='name' end -local function thename(s) list.name=s end -local function issub (v) list.sub=v end -local function iscrap (s) list.crap=string.lower(s) end -local function iskey (k,v) list[k]=v end -local function istrue (s) list[s]=true end +local function isfile () list.lookup='file' end +local function isname () list.lookup='name' end +local function thename(s) list.name=s end +local function issub (v) list.sub=v end +local function iscrap (s) list.crap=string.lower(s) end +local function iskey (k,v) list[k]=v end +local function istrue (s) list[s]=true end local function isfalse(s) list[s]=false end local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C -local spaces=P(" ")^0 +local spaces=P(" ")^0 local namespec=(1-S("/:("))^0 -local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces -local filename_1=P("file:")/isfile*(namespec/thename) -local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]") -local fontname_1=P("name:")/isname*(namespec/thename) -local fontname_2=P(true)/issome*(namespec/thename) -local sometext=(R("az","AZ","09")+S("+-."))^1 -local truevalue=P("+")*spaces*(sometext/istrue) -local falsevalue=P("-")*spaces*(sometext/isfalse) -local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey -local somevalue=sometext/istrue +local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces +local filename_1=P("file:")/isfile*(namespec/thename) +local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]") +local fontname_1=P("name:")/isname*(namespec/thename) +local fontname_2=P(true)/issome*(namespec/thename) +local sometext=(R("az","AZ","09")+S("+-."))^1 +local truevalue=P("+")*spaces*(sometext/istrue) +local falsevalue=P("-")*spaces*(sometext/isfalse) +local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey +local somevalue=sometext/istrue local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")") -local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces +local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces local options=P(":")*spaces*(P(";")^0*option)^0 local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0 local function colonized(specification) - list={} - lpeg.match(pattern,specification.specification) + list={} + lpeg.match(pattern,specification.specification) list.crap=nil - if list.name then - specification.name=list.name - list.name=nil - end - if list.lookup then - specification.lookup=list.lookup - list.lookup=nil - end - if list.sub then - specification.sub=list.sub - list.sub=nil - end - specification.features.normal=fonts.handlers.otf.features.normalize(list) - return specification -end -fonts.definers.registersplit(":",colonized,"cryptic") + if list.name then + specification.name=list.name + list.name=nil + end + if list.lookup then + specification.lookup=list.lookup + list.lookup=nil + end + if list.sub then + specification.sub=list.sub + list.sub=nil + end + specification.features.normal=fonts.handlers.otf.features.normalize(list) + return specification +end +fonts.definers.registersplit(":",colonized,"cryptic") fonts.definers.registersplit("",colonized,"more cryptic") -function fonts.definers.applypostprocessors(tfmdata) - local postprocessors=tfmdata.postprocessors - if postprocessors then - for i=1,#postprocessors do +function fonts.definers.applypostprocessors(tfmdata) + local postprocessors=tfmdata.postprocessors + if postprocessors then + for i=1,#postprocessors do local extrahash=postprocessors[i](tfmdata) if type(extrahash)=="string" and extrahash~="" then - extrahash=string.gsub(lower(extrahash),"[^a-z]","-") - tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash) - end - end - end - return tfmdata -end + extrahash=string.gsub(lower(extrahash),"[^a-z]","-") + tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash) + end + end + end + return tfmdata +end end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['luatex-fonts-ext']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['luatex-fonts-ext']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() end -local fonts=fonts +local fonts=fonts local otffeatures=fonts.constructors.newfeatures("otf") -local function initializeitlc(tfmdata,value) +local function initializeitlc(tfmdata,value) if value then - local parameters=tfmdata.parameters - local italicangle=parameters.italicangle - if italicangle and italicangle~=0 then - local properties=tfmdata.properties - local factor=tonumber(value) or 1 - properties.hasitalics=true - properties.autoitalicamount=factor*(parameters.uwidth or 40)/2 - end - end -end -otffeatures.register { - name="itlc", - description="italic correction", - initializers={ - base=initializeitlc, - node=initializeitlc, - } + local parameters=tfmdata.parameters + local italicangle=parameters.italicangle + if italicangle and italicangle~=0 then + local properties=tfmdata.properties + local factor=tonumber(value) or 1 + properties.hasitalics=true + properties.autoitalicamount=factor*(parameters.uwidth or 40)/2 + end + end +end +otffeatures.register { + name="itlc", + description="italic correction", + initializers={ + base=initializeitlc, + node=initializeitlc, + } } -local function initializeslant(tfmdata,value) - value=tonumber(value) - if not value then - value=0 - elseif value>1 then - value=1 - elseif value<-1 then - value=-1 - end - tfmdata.parameters.slantfactor=value -end -otffeatures.register { - name="slant", - description="slant glyphs", - initializers={ - base=initializeslant, - node=initializeslant, - } +local function initializeslant(tfmdata,value) + value=tonumber(value) + if not value then + value=0 + elseif value>1 then + value=1 + elseif value<-1 then + value=-1 + end + tfmdata.parameters.slantfactor=value +end +otffeatures.register { + name="slant", + description="slant glyphs", + initializers={ + base=initializeslant, + node=initializeslant, + } } -local function initializeextend(tfmdata,value) - value=tonumber(value) - if not value then - value=0 - elseif value>10 then - value=10 - elseif value<-10 then - value=-10 - end - tfmdata.parameters.extendfactor=value -end -otffeatures.register { - name="extend", - description="scale glyphs horizontally", - initializers={ - base=initializeextend, - node=initializeextend, - } +local function initializeextend(tfmdata,value) + value=tonumber(value) + if not value then + value=0 + elseif value>10 then + value=10 + elseif value<-10 then + value=-10 + end + tfmdata.parameters.extendfactor=value +end +otffeatures.register { + name="extend", + description="scale glyphs horizontally", + initializers={ + base=initializeextend, + node=initializeextend, + } } -fonts.protrusions=fonts.protrusions or {} +fonts.protrusions=fonts.protrusions or {} fonts.protrusions.setups=fonts.protrusions.setups or {} local setups=fonts.protrusions.setups -local function initializeprotrusion(tfmdata,value) - if value then - local setup=setups[value] - if setup then - local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1 - local emwidth=tfmdata.parameters.quad - tfmdata.parameters.protrusion={ - auto=true, - } - for i,chr in next,tfmdata.characters do - local v,pl,pr=setup[i],nil,nil - if v then - pl,pr=v[1],v[2] - end - if pl and pl~=0 then chr.left_protruding=left*pl*factor end - if pr and pr~=0 then chr.right_protruding=right*pr*factor end - end - end - end -end -otffeatures.register { - name="protrusion", - description="shift characters into the left and or right margin", - initializers={ - base=initializeprotrusion, - node=initializeprotrusion, - } +local function initializeprotrusion(tfmdata,value) + if value then + local setup=setups[value] + if setup then + local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1 + local emwidth=tfmdata.parameters.quad + tfmdata.parameters.protrusion={ + auto=true, + } + for i,chr in next,tfmdata.characters do + local v,pl,pr=setup[i],nil,nil + if v then + pl,pr=v[1],v[2] + end + if pl and pl~=0 then chr.left_protruding=left*pl*factor end + if pr and pr~=0 then chr.right_protruding=right*pr*factor end + end + end + end +end +otffeatures.register { + name="protrusion", + description="shift characters into the left and or right margin", + initializers={ + base=initializeprotrusion, + node=initializeprotrusion, + } } -fonts.expansions=fonts.expansions or {} +fonts.expansions=fonts.expansions or {} fonts.expansions.setups=fonts.expansions.setups or {} local setups=fonts.expansions.setups -local function initializeexpansion(tfmdata,value) - if value then - local setup=setups[value] - if setup then - local factor=setup.factor or 1 - tfmdata.parameters.expansion={ - stretch=10*(setup.stretch or 0), - shrink=10*(setup.shrink or 0), - step=10*(setup.step or 0), - auto=true, - } - for i,chr in next,tfmdata.characters do - local v=setup[i] - if v and v~=0 then - chr.expansion_factor=v*factor +local function initializeexpansion(tfmdata,value) + if value then + local setup=setups[value] + if setup then + local factor=setup.factor or 1 + tfmdata.parameters.expansion={ + stretch=10*(setup.stretch or 0), + shrink=10*(setup.shrink or 0), + step=10*(setup.step or 0), + auto=true, + } + for i,chr in next,tfmdata.characters do + local v=setup[i] + if v and v~=0 then + chr.expansion_factor=v*factor else - chr.expansion_factor=factor - end - end - end - end -end -otffeatures.register { - name="expansion", - description="apply hz optimization", - initializers={ - base=initializeexpansion, - node=initializeexpansion, - } + chr.expansion_factor=factor + end + end + end + end +end +otffeatures.register { + name="expansion", + description="apply hz optimization", + initializers={ + base=initializeexpansion, + node=initializeexpansion, + } } function fonts.loggers.onetimemessage() end local byte=string.byte fonts.expansions.setups['default']={ stretch=2,shrink=2,step=.5,factor=1, - [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7, - [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7, - [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7, - [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7, - [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7, - [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7, - [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7, - [byte('w')]=0.7,[byte('z')]=0.7, - [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7, + [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7, + [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7, + [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7, + [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7, + [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7, + [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7, + [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7, + [byte('w')]=0.7,[byte('z')]=0.7, + [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7, } fonts.protrusions.setups['default']={ factor=1,left=1,right=1, @@ -11875,120 +11875,120 @@ fonts.protrusions.setups['default']={ [0x061B]={ 0,1 }, [0x06D4]={ 0,1 }, } -fonts.handlers.otf.features.normalize=function(t) - if t.rand then - t.rand="random" - end - return t -end -function fonts.helpers.nametoslot(name) - local t=type(name) - if t=="string" then - local tfmdata=fonts.hashes.identifiers[currentfont()] - local shared=tfmdata and tfmdata.shared - local fntdata=shared and shared.rawdata - return fntdata and fntdata.resources.unicodes[name] - elseif t=="number" then - return n - end -end -fonts.encodings=fonts.encodings or {} -local reencodings={} +fonts.handlers.otf.features.normalize=function(t) + if t.rand then + t.rand="random" + end + return t +end +function fonts.helpers.nametoslot(name) + local t=type(name) + if t=="string" then + local tfmdata=fonts.hashes.identifiers[currentfont()] + local shared=tfmdata and tfmdata.shared + local fntdata=shared and shared.rawdata + return fntdata and fntdata.resources.unicodes[name] + elseif t=="number" then + return n + end +end +fonts.encodings=fonts.encodings or {} +local reencodings={} fonts.encodings.reencodings=reencodings local function specialreencode(tfmdata,value) - local encoding=value and reencodings[value] - if encoding then - local temp={} - local char=tfmdata.characters - for k,v in next,encoding do - temp[k]=char[v] - end - for k,v in next,temp do - char[k]=temp[k] - end - return string.format("reencoded:%s",value) - end -end -local function reencode(tfmdata,value) - tfmdata.postprocessors=tfmdata.postprocessors or {} - table.insert(tfmdata.postprocessors, - function(tfmdata) - return specialreencode(tfmdata,value) - end - ) -end -otffeatures.register { - name="reencode", - description="reencode characters", - manipulators={ - base=reencode, - node=reencode, - } -} + local encoding=value and reencodings[value] + if encoding then + local temp={} + local char=tfmdata.characters + for k,v in next,encoding do + temp[k]=char[v] + end + for k,v in next,temp do + char[k]=temp[k] + end + return string.format("reencoded:%s",value) + end +end +local function reencode(tfmdata,value) + tfmdata.postprocessors=tfmdata.postprocessors or {} + table.insert(tfmdata.postprocessors, + function(tfmdata) + return specialreencode(tfmdata,value) + end + ) +end +otffeatures.register { + name="reencode", + description="reencode characters", + manipulators={ + base=reencode, + node=reencode, + } +} end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['luatex-fonts-cbk']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" +if not modules then modules={} end modules ['luatex-fonts-cbk']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" } -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() end -local fonts=fonts +local fonts=fonts local nodes=nodes -local traverse_id=node.traverse_id +local traverse_id=node.traverse_id local glyph_code=nodes.nodecodes.glyph -function nodes.handlers.characters(head) - local fontdata=fonts.hashes.identifiers - if fontdata then - local usedfonts,done,prevfont={},false,nil - for n in traverse_id(glyph_code,head) do - local font=n.font - if font~=prevfont then - prevfont=font - local used=usedfonts[font] - if not used then +function nodes.handlers.characters(head) + local fontdata=fonts.hashes.identifiers + if fontdata then + local usedfonts,done,prevfont={},false,nil + for n in traverse_id(glyph_code,head) do + local font=n.font + if font~=prevfont then + prevfont=font + local used=usedfonts[font] + if not used then local tfmdata=fontdata[font] - if tfmdata then + if tfmdata then local shared=tfmdata.shared - if shared then - local processors=shared.processes - if processors and #processors>0 then - usedfonts[font]=processors - done=true - end - end - end - end - end - end - if done then - for font,processors in next,usedfonts do - for i=1,#processors do - local h,d=processors[i](head,font,0) - head,done=h or head,done or d - end - end - end - return head,true - else - return head,false - end + if shared then + local processors=shared.processes + if processors and #processors>0 then + usedfonts[font]=processors + done=true + end + end + end + end + end + end + if done then + for font,processors in next,usedfonts do + for i=1,#processors do + local h,d=processors[i](head,font,0) + head,done=h or head,done or d + end + end + end + return head,true + else + return head,false + end end function nodes.simple_font_handler(head) - head=nodes.handlers.characters(head) - nodes.injections.handler(head) - nodes.handlers.protectglyphs(head) - head=node.ligaturing(head) - head=node.kerning(head) - return head -end + head=nodes.handlers.characters(head) + nodes.injections.handler(head) + nodes.handlers.protectglyphs(head) + head=node.ligaturing(head) + head=node.kerning(head) + return head +end end -- closure diff --git a/tex/generic/context/luatex/luatex-fonts-syn.lua b/tex/generic/context/luatex/luatex-fonts-syn.lua index ea6e3cab5..50c43bce7 100644 --- a/tex/generic/context/luatex/luatex-fonts-syn.lua +++ b/tex/generic/context/luatex/luatex-fonts-syn.lua @@ -1,102 +1,102 @@ -if not modules then modules = { } end modules ['luatex-fonts-syn'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - --- Generic font names support. --- --- Watch out, the version number is the same as the one used in --- the mtx-fonts.lua function scripts.fonts.names as we use a --- simplified font database in the plain solution and by using --- a different number we're less dependent on context. --- --- mtxrun --script font --reload --simple --- --- The format of the file is as follows: --- --- return { --- ["version"] = 1.001, --- ["cache_version"] = 1.001, --- ["mappings"] = { --- ["somettcfontone"] = { "Some TTC Font One", "SomeFontA.ttc", 1 }, --- ["somettcfonttwo"] = { "Some TTC Font Two", "SomeFontA.ttc", 2 }, --- ["somettffont"] = { "Some TTF Font", "SomeFontB.ttf" }, --- ["someotffont"] = { "Some OTF Font", "SomeFontC.otf" }, --- }, --- } - -local fonts = fonts -fonts.names = fonts.names or { } - -fonts.names.version = 1.001 -- not the same as in context but matches mtx-fonts --simple -fonts.names.basename = "luatex-fonts-names" -fonts.names.new_to_old = { } -fonts.names.old_to_new = { } -fonts.names.cache = containers.define("fonts","data",fonts.names.version,true) - -local data, loaded = nil, false - -local fileformats = { "lua", "tex", "other text files" } - -function fonts.names.reportmissingbase() - texio.write("") - fonts.names.reportmissingbase = nil -end - -function fonts.names.reportmissingname() - texio.write("") - fonts.names.reportmissingname = nil -end - -function fonts.names.resolve(name,sub) - if not loaded then - local basename = fonts.names.basename - if basename and basename ~= "" then - data = containers.read(fonts.names.cache,basename) - if not data then - basename = file.addsuffix(basename,"lua") - for i=1,#fileformats do - local format = fileformats[i] - local foundname = resolvers.findfile(basename,format) or "" - if foundname ~= "" then - data = dofile(foundname) - texio.write("") - break - end - end - end - end - loaded = true - end - if type(data) == "table" and data.version == fonts.names.version then - local condensed = string.gsub(string.lower(name),"[^%a%d]","") - local found = data.mappings and data.mappings[condensed] - if found then - local fontname, filename, subfont = found[1], found[2], found[3] - if subfont then - return filename, fontname - else - return filename, false - end - elseif fonts.names.reportmissingname then - fonts.names.reportmissingname() - return name, false -- fallback to filename - end - elseif fonts.names.reportmissingbase then - fonts.names.reportmissingbase() - end -end - -fonts.names.resolvespec = fonts.names.resolve -- only supported in mkiv - -function fonts.names.getfilename(askedname,suffix) -- only supported in mkiv - return "" -end +if not modules then modules = { } end modules ['luatex-fonts-syn'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +-- Generic font names support. +-- +-- Watch out, the version number is the same as the one used in +-- the mtx-fonts.lua function scripts.fonts.names as we use a +-- simplified font database in the plain solution and by using +-- a different number we're less dependent on context. +-- +-- mtxrun --script font --reload --simple +-- +-- The format of the file is as follows: +-- +-- return { +-- ["version"] = 1.001, +-- ["cache_version"] = 1.001, +-- ["mappings"] = { +-- ["somettcfontone"] = { "Some TTC Font One", "SomeFontA.ttc", 1 }, +-- ["somettcfonttwo"] = { "Some TTC Font Two", "SomeFontA.ttc", 2 }, +-- ["somettffont"] = { "Some TTF Font", "SomeFontB.ttf" }, +-- ["someotffont"] = { "Some OTF Font", "SomeFontC.otf" }, +-- }, +-- } + +local fonts = fonts +fonts.names = fonts.names or { } + +fonts.names.version = 1.001 -- not the same as in context but matches mtx-fonts --simple +fonts.names.basename = "luatex-fonts-names" +fonts.names.new_to_old = { } +fonts.names.old_to_new = { } +fonts.names.cache = containers.define("fonts","data",fonts.names.version,true) + +local data, loaded = nil, false + +local fileformats = { "lua", "tex", "other text files" } + +function fonts.names.reportmissingbase() + texio.write("") + fonts.names.reportmissingbase = nil +end + +function fonts.names.reportmissingname() + texio.write("") + fonts.names.reportmissingname = nil +end + +function fonts.names.resolve(name,sub) + if not loaded then + local basename = fonts.names.basename + if basename and basename ~= "" then + data = containers.read(fonts.names.cache,basename) + if not data then + basename = file.addsuffix(basename,"lua") + for i=1,#fileformats do + local format = fileformats[i] + local foundname = resolvers.findfile(basename,format) or "" + if foundname ~= "" then + data = dofile(foundname) + texio.write("") + break + end + end + end + end + loaded = true + end + if type(data) == "table" and data.version == fonts.names.version then + local condensed = string.gsub(string.lower(name),"[^%a%d]","") + local found = data.mappings and data.mappings[condensed] + if found then + local fontname, filename, subfont = found[1], found[2], found[3] + if subfont then + return filename, fontname + else + return filename, false + end + elseif fonts.names.reportmissingname then + fonts.names.reportmissingname() + return name, false -- fallback to filename + end + elseif fonts.names.reportmissingbase then + fonts.names.reportmissingbase() + end +end + +fonts.names.resolvespec = fonts.names.resolve -- only supported in mkiv + +function fonts.names.getfilename(askedname,suffix) -- only supported in mkiv + return "" +end diff --git a/tex/generic/context/luatex/luatex-fonts-tfm.lua b/tex/generic/context/luatex/luatex-fonts-tfm.lua index b9bb1bd0f..3939d8000 100644 --- a/tex/generic/context/luatex/luatex-fonts-tfm.lua +++ b/tex/generic/context/luatex/luatex-fonts-tfm.lua @@ -1,38 +1,38 @@ -if not modules then modules = { } end modules ['luatex-fonts-tfm'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -local tfm = { } -fonts.handlers.tfm = tfm -fonts.formats.tfm = "type1" -- we need to have at least a value here - -function fonts.readers.tfm(specification) - local fullname = specification.filename or "" - if fullname == "" then - local forced = specification.forced or "" - if forced ~= "" then - fullname = specification.name .. "." .. forced - else - fullname = specification.name - end - end - local foundname = resolvers.findbinfile(fullname, 'tfm') or "" - if foundname == "" then - foundname = resolvers.findbinfile(fullname, 'ofm') or "" - end - if foundname ~= "" then - specification.filename = foundname - specification.format = "ofm" - return font.read_tfm(specification.filename,specification.size) - end -end +if not modules then modules = { } end modules ['luatex-fonts-tfm'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +local tfm = { } +fonts.handlers.tfm = tfm +fonts.formats.tfm = "type1" -- we need to have at least a value here + +function fonts.readers.tfm(specification) + local fullname = specification.filename or "" + if fullname == "" then + local forced = specification.forced or "" + if forced ~= "" then + fullname = specification.name .. "." .. forced + else + fullname = specification.name + end + end + local foundname = resolvers.findbinfile(fullname, 'tfm') or "" + if foundname == "" then + foundname = resolvers.findbinfile(fullname, 'ofm') or "" + end + if foundname ~= "" then + specification.filename = foundname + specification.format = "ofm" + return font.read_tfm(specification.filename,specification.size) + end +end diff --git a/tex/generic/context/luatex/luatex-languages.lua b/tex/generic/context/luatex/luatex-languages.lua index 1ea8c1fd1..f7be83c84 100644 --- a/tex/generic/context/luatex/luatex-languages.lua +++ b/tex/generic/context/luatex/luatex-languages.lua @@ -1,45 +1,45 @@ -if not modules then modules = { } end modules ['luatex-languages'] = { - version = 1.001, - comment = "companion to luatex-languages.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- We borrow from ConTeXt. - -languages = languages or { } - -local loaded = { } - -function languages.loadpatterns(tag) - if not loaded[tag] then - loaded[tag] = 0 - local filename = kpse.find_file("lang-" .. tag .. ".lua") - if filename and filename == "" then - print("") - else - local whatever = loadfile(filename) - if type(whatever) == "function" then - whatever = whatever() - if type(whatever) == "table" then - local characters = whatever.patterns.characters or "" - local patterns = whatever.patterns.data or "" - local exceptions = whatever.exceptions.data or "" - local language = lang.new() - for b in string.utfvalues(characters) do - tex.setlccode(b,b) - end - lang.patterns(language, patterns) - lang.hyphenation(language, exceptions) - loaded[tag] = lang.id(language) - else - print("") - end - else - print("") - end - end - end - return loaded[tag] -end +if not modules then modules = { } end modules ['luatex-languages'] = { + version = 1.001, + comment = "companion to luatex-languages.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- We borrow from ConTeXt. + +languages = languages or { } + +local loaded = { } + +function languages.loadpatterns(tag) + if not loaded[tag] then + loaded[tag] = 0 + local filename = kpse.find_file("lang-" .. tag .. ".lua") + if filename and filename == "" then + print("") + else + local whatever = loadfile(filename) + if type(whatever) == "function" then + whatever = whatever() + if type(whatever) == "table" then + local characters = whatever.patterns.characters or "" + local patterns = whatever.patterns.data or "" + local exceptions = whatever.exceptions.data or "" + local language = lang.new() + for b in string.utfvalues(characters) do + tex.setlccode(b,b) + end + lang.patterns(language, patterns) + lang.hyphenation(language, exceptions) + loaded[tag] = lang.id(language) + else + print("") + end + else + print("") + end + end + end + return loaded[tag] +end diff --git a/tex/generic/context/luatex/luatex-math.lua b/tex/generic/context/luatex/luatex-math.lua index c316182ba..66d712d64 100644 --- a/tex/generic/context/luatex/luatex-math.lua +++ b/tex/generic/context/luatex/luatex-math.lua @@ -1,53 +1,53 @@ -if not modules then modules = { } end modules ['luatex-math'] = { - version = 1.001, - comment = "companion to luatex-math.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local gaps = { - [0x1D455] = 0x0210E, - [0x1D49D] = 0x0212C, - [0x1D4A0] = 0x02130, - [0x1D4A1] = 0x02131, - [0x1D4A3] = 0x0210B, - [0x1D4A4] = 0x02110, - [0x1D4A7] = 0x02112, - [0x1D4A8] = 0x02133, - [0x1D4AD] = 0x0211B, - [0x1D4BA] = 0x0212F, - [0x1D4BC] = 0x0210A, - [0x1D4C4] = 0x02134, - [0x1D506] = 0x0212D, - [0x1D50B] = 0x0210C, - [0x1D50C] = 0x02111, - [0x1D515] = 0x0211C, - [0x1D51D] = 0x02128, - [0x1D53A] = 0x02102, - [0x1D53F] = 0x0210D, - [0x1D545] = 0x02115, - [0x1D547] = 0x02119, - [0x1D548] = 0x0211A, - [0x1D549] = 0x0211D, - [0x1D551] = 0x02124, -} - - -local function fixmath(tfmdata,key,value) - if value then - local characters = tfmdata.characters - for gap, mess in pairs(gaps) do - characters[gap] = characters[mess] - end - end -end - -fonts.handlers.otf.features.register { - name = "fixmath", - description = "math font fixing", - manipulators = { - base = fixmath, - node = fixmath, - } -} +if not modules then modules = { } end modules ['luatex-math'] = { + version = 1.001, + comment = "companion to luatex-math.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local gaps = { + [0x1D455] = 0x0210E, + [0x1D49D] = 0x0212C, + [0x1D4A0] = 0x02130, + [0x1D4A1] = 0x02131, + [0x1D4A3] = 0x0210B, + [0x1D4A4] = 0x02110, + [0x1D4A7] = 0x02112, + [0x1D4A8] = 0x02133, + [0x1D4AD] = 0x0211B, + [0x1D4BA] = 0x0212F, + [0x1D4BC] = 0x0210A, + [0x1D4C4] = 0x02134, + [0x1D506] = 0x0212D, + [0x1D50B] = 0x0210C, + [0x1D50C] = 0x02111, + [0x1D515] = 0x0211C, + [0x1D51D] = 0x02128, + [0x1D53A] = 0x02102, + [0x1D53F] = 0x0210D, + [0x1D545] = 0x02115, + [0x1D547] = 0x02119, + [0x1D548] = 0x0211A, + [0x1D549] = 0x0211D, + [0x1D551] = 0x02124, +} + + +local function fixmath(tfmdata,key,value) + if value then + local characters = tfmdata.characters + for gap, mess in pairs(gaps) do + characters[gap] = characters[mess] + end + end +end + +fonts.handlers.otf.features.register { + name = "fixmath", + description = "math font fixing", + manipulators = { + base = fixmath, + node = fixmath, + } +} diff --git a/tex/generic/context/luatex/luatex-mplib.lua b/tex/generic/context/luatex/luatex-mplib.lua index c6628acb3..74f7dbced 100644 --- a/tex/generic/context/luatex/luatex-mplib.lua +++ b/tex/generic/context/luatex/luatex-mplib.lua @@ -1,491 +1,491 @@ -if not modules then modules = { } end modules ['luatex-mplib'] = { - version = 1.001, - comment = "companion to luatex-mplib.tex", - author = "Hans Hagen & Taco Hoekwater", - copyright = "ConTeXt Development Team", - license = "public domain", -} - ---[[ldx-- -

    This module is a stripped down version of libraries that are used -by . It can be used in other macro packages and/or -serve as an example. Embedding in a macro package is upto others and -normally boils down to inputting supp-mpl.tex.

    ---ldx]]-- - -if metapost and metapost.version then - - --[[ldx-- -

    Let's silently quit and make sure that no one loads it - manually in .

    - --ldx]]-- - -else - - local format, concat, abs, match = string.format, table.concat, math.abs, string.match - - local mplib = require ('mplib') - local kpse = require ('kpse') - - --[[ldx-- -

    We create a namespace and some variables to it. If a namespace is - already defined it wil not be initialized. This permits hooking - in code beforehand.

    - -

    We don't make a format automatically. After all, distributions - might have their own preferences and normally a format (mem) file will - have some special place in the tree. Also, there can already - be format files, different memort settings and other nasty pitfalls that - we don't want to interfere with. If you want, you can define a function - metapost.make(name,mem_name) that does the job.

    - --ldx]]-- - - metapost = metapost or { } - metapost.version = 1.00 - metapost.showlog = metapost.showlog or false - metapost.lastlog = "" - - --[[ldx-- -

    A few helpers, taken from l-file.lua.

    - --ldx]]-- - - local file = file or { } - - function file.replacesuffix(filename, suffix) - return (string.gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix - end - - function file.stripsuffix(filename) - return (string.gsub(filename,"%.[%a%d]+$","")) - end - - --[[ldx-- -

    We use the library unless a finder is already - defined.

    - --ldx]]-- - - local mpkpse = kpse.new("luatex","mpost") - - metapost.finder = metapost.finder or function(name, mode, ftype) - if mode == "w" then - return name - else - return mpkpse:find_file(name,ftype) - end - end - - --[[ldx-- -

    You can use your own reported if needed, as long as it handles multiple - arguments and formatted strings.

    - --ldx]]-- - - metapost.report = metapost.report or function(...) - texio.write(format("",format(...))) - end - - --[[ldx-- -

    The rest of this module is not documented. More info can be found in the - manual, articles in user group journals and the files that - ship with .

    - --ldx]]-- - - function metapost.resetlastlog() - metapost.lastlog = "" - end - - local mplibone = tonumber(mplib.version()) <= 1.50 - - if mplibone then - - metapost.make = metapost.make or function(name,mem_name,dump) - local t = os.clock() - local mpx = mplib.new { - ini_version = true, - find_file = metapost.finder, - job_name = file.stripsuffix(name) - } - mpx:execute(string.format("input %s ;",name)) - if dump then - mpx:execute("dump ;") - metapost.report("format %s made and dumped for %s in %0.3f seconds",mem_name,name,os.clock()-t) - else - metapost.report("%s read in %0.3f seconds",name,os.clock()-t) - end - return mpx - end - - function metapost.load(name) - local mem_name = file.replacesuffix(name,"mem") - local mpx = mplib.new { - ini_version = false, - mem_name = mem_name, - find_file = metapost.finder - } - if not mpx and type(metapost.make) == "function" then - -- when i have time i'll locate the format and dump - mpx = metapost.make(name,mem_name) - end - if mpx then - metapost.report("using format %s",mem_name,false) - return mpx, nil - else - return nil, { status = 99, error = "out of memory or invalid format" } - end - end - - else - - local preamble = [[ - boolean mplib ; mplib := true ; - let dump = endinput ; - input %s ; - ]] - - metapost.make = metapost.make or function() - end - - function metapost.load(name) - local mpx = mplib.new { - ini_version = true, - find_file = metapost.finder, - } - local result - if not mpx then - result = { status = 99, error = "out of memory"} - else - result = mpx:execute(format(preamble, file.replacesuffix(name,"mp"))) - end - metapost.reporterror(result) - return mpx, result - end - - end - - function metapost.unload(mpx) - if mpx then - mpx:finish() - end - end - - function metapost.reporterror(result) - if not result then - metapost.report("mp error: no result object returned") - elseif result.status > 0 then - local t, e, l = result.term, result.error, result.log - if t then - metapost.report("mp terminal: %s",t) - end - if e then - metapost.report("mp error: %s", e) - end - if not t and not e and l then - metapost.lastlog = metapost.lastlog .. "\n " .. l - metapost.report("mp log: %s",l) - else - metapost.report("mp error: unknown, no error, terminal or log messages") - end - else - return false - end - return true - end - - function metapost.process(mpx, data) - local converted, result = false, {} - mpx = metapost.load(mpx) - if mpx and data then - local result = mpx:execute(data) - if not result then - metapost.report("mp error: no result object returned") - elseif result.status > 0 then - metapost.report("mp error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error")) - elseif metapost.showlog then - metapost.lastlog = metapost.lastlog .. "\n" .. result.term - metapost.report("mp info: %s",result.term or "no-term") - elseif result.fig then - converted = metapost.convert(result) - else - metapost.report("mp error: unknown error, maybe no beginfig/endfig") - end - else - metapost.report("mp error: mem file not found") - end - return converted, result - end - - local function getobjects(result,figure,f) - return figure:objects() - end - - function metapost.convert(result, flusher) - metapost.flush(result, flusher) - return true -- done - end - - --[[ldx-- -

    We removed some message and tracing code. We might even remove the flusher

    - --ldx]]-- - - local function pdf_startfigure(n,llx,lly,urx,ury) - tex.sprint(format("\\startMPLIBtoPDF{%s}{%s}{%s}{%s}",llx,lly,urx,ury)) - end - - local function pdf_stopfigure() - tex.sprint("\\stopMPLIBtoPDF") - end - - function pdf_literalcode(fmt,...) -- table - tex.sprint(format("\\MPLIBtoPDF{%s}",format(fmt,...))) - end - - function pdf_textfigure(font,size,text,width,height,depth) - text = text:gsub(".","\\hbox{%1}") -- kerning happens in metapost - tex.sprint(format("\\MPLIBtextext{%s}{%s}{%s}{%s}{%s}",font,size,text,0,-( 7200/ 7227)/65536*depth)) - end - - local bend_tolerance = 131/65536 - - local rx, sx, sy, ry, tx, ty, divider = 1, 0, 0, 1, 0, 0, 1 - - local function pen_characteristics(object) - local t = mplib.pen_info(object) - rx, ry, sx, sy, tx, ty = t.rx, t.ry, t.sx, t.sy, t.tx, t.ty - divider = sx*sy - rx*ry - return not (sx==1 and rx==0 and ry==0 and sy==1 and tx==0 and ty==0), t.width - end - - local function concat(px, py) -- no tx, ty here - return (sy*px-ry*py)/divider,(sx*py-rx*px)/divider - end - - local function curved(ith,pth) - local d = pth.left_x - ith.right_x - if abs(ith.right_x - ith.x_coord - d) <= bend_tolerance and abs(pth.x_coord - pth.left_x - d) <= bend_tolerance then - d = pth.left_y - ith.right_y - if abs(ith.right_y - ith.y_coord - d) <= bend_tolerance and abs(pth.y_coord - pth.left_y - d) <= bend_tolerance then - return false - end - end - return true - end - - local function flushnormalpath(path,open) - local pth, ith - for i=1,#path do - pth = path[i] - if not ith then - pdf_literalcode("%f %f m",pth.x_coord,pth.y_coord) - elseif curved(ith,pth) then - pdf_literalcode("%f %f %f %f %f %f c",ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord) - else - pdf_literalcode("%f %f l",pth.x_coord,pth.y_coord) - end - ith = pth - end - if not open then - local one = path[1] - if curved(pth,one) then - pdf_literalcode("%f %f %f %f %f %f c",pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord ) - else - pdf_literalcode("%f %f l",one.x_coord,one.y_coord) - end - elseif #path == 1 then - -- special case .. draw point - local one = path[1] - pdf_literalcode("%f %f l",one.x_coord,one.y_coord) - end - return t - end - - local function flushconcatpath(path,open) - pdf_literalcode("%f %f %f %f %f %f cm", sx, rx, ry, sy, tx ,ty) - local pth, ith - for i=1,#path do - pth = path[i] - if not ith then - pdf_literalcode("%f %f m",concat(pth.x_coord,pth.y_coord)) - elseif curved(ith,pth) then - local a, b = concat(ith.right_x,ith.right_y) - local c, d = concat(pth.left_x,pth.left_y) - pdf_literalcode("%f %f %f %f %f %f c",a,b,c,d,concat(pth.x_coord, pth.y_coord)) - else - pdf_literalcode("%f %f l",concat(pth.x_coord, pth.y_coord)) - end - ith = pth - end - if not open then - local one = path[1] - if curved(pth,one) then - local a, b = concat(pth.right_x,pth.right_y) - local c, d = concat(one.left_x,one.left_y) - pdf_literalcode("%f %f %f %f %f %f c",a,b,c,d,concat(one.x_coord, one.y_coord)) - else - pdf_literalcode("%f %f l",concat(one.x_coord,one.y_coord)) - end - elseif #path == 1 then - -- special case .. draw point - local one = path[1] - pdf_literalcode("%f %f l",concat(one.x_coord,one.y_coord)) - end - return t - end - - --[[ldx-- -

    Support for specials has been removed.

    - --ldx]]-- - - function metapost.flush(result,flusher) - if result then - local figures = result.fig - if figures then - for f=1, #figures do - metapost.report("flushing figure %s",f) - local figure = figures[f] - local objects = getobjects(result,figure,f) - local fignum = tonumber(match(figure:filename(),"([%d]+)$") or figure:charcode() or 0) - local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false - local bbox = figure:boundingbox() - local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4] -- faster than unpack - if urx < llx then - -- invalid - pdf_startfigure(fignum,0,0,0,0) - pdf_stopfigure() - else - pdf_startfigure(fignum,llx,lly,urx,ury) - pdf_literalcode("q") - if objects then - for o=1,#objects do - local object = objects[o] - local objecttype = object.type - if objecttype == "start_bounds" or objecttype == "stop_bounds" then - -- skip - elseif objecttype == "start_clip" then - pdf_literalcode("q") - flushnormalpath(object.path,t,false) - pdf_literalcode("W n") - elseif objecttype == "stop_clip" then - pdf_literalcode("Q") - miterlimit, linecap, linejoin, dashed = -1, -1, -1, false - elseif objecttype == "special" then - -- not supported - elseif objecttype == "text" then - local ot = object.transform -- 3,4,5,6,1,2 - pdf_literalcode("q %f %f %f %f %f %f cm",ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) - pdf_textfigure(object.font,object.dsize,object.text,object.width,object.height,object.depth) - pdf_literalcode("Q") - else - local cs = object.color - if cs and #cs > 0 then - pdf_literalcode(metapost.colorconverter(cs)) - end - local ml = object.miterlimit - if ml and ml ~= miterlimit then - miterlimit = ml - pdf_literalcode("%f M",ml) - end - local lj = object.linejoin - if lj and lj ~= linejoin then - linejoin = lj - pdf_literalcode("%i j",lj) - end - local lc = object.linecap - if lc and lc ~= linecap then - linecap = lc - pdf_literalcode("%i J",lc) - end - local dl = object.dash - if dl then - local d = format("[%s] %i d",concat(dl.dashes or {}," "),dl.offset) - if d ~= dashed then - dashed = d - pdf_literalcode(dashed) - end - elseif dashed then - pdf_literalcode("[] 0 d") - dashed = false - end - local path = object.path - local transformed, penwidth = false, 1 - local open = path and path[1].left_type and path[#path].right_type - local pen = object.pen - if pen then - if pen.type == 'elliptical' then - transformed, penwidth = pen_characteristics(object) -- boolean, value - pdf_literalcode("%f w",penwidth) - if objecttype == 'fill' then - objecttype = 'both' - end - else -- calculated by mplib itself - objecttype = 'fill' - end - end - if transformed then - pdf_literalcode("q") - end - if path then - if transformed then - flushconcatpath(path,open) - else - flushnormalpath(path,open) - end - if objecttype == "fill" then - pdf_literalcode("h f") - elseif objecttype == "outline" then - pdf_literalcode((open and "S") or "h S") - elseif objecttype == "both" then - pdf_literalcode("h B") - end - end - if transformed then - pdf_literalcode("Q") - end - local path = object.htap - if path then - if transformed then - pdf_literalcode("q") - end - if transformed then - flushconcatpath(path,open) - else - flushnormalpath(path,open) - end - if objecttype == "fill" then - pdf_literalcode("h f") - elseif objecttype == "outline" then - pdf_literalcode((open and "S") or "h S") - elseif objecttype == "both" then - pdf_literalcode("h B") - end - if transformed then - pdf_literalcode("Q") - end - end - if cr then - pdf_literalcode(cr) - end - end - end - end - pdf_literalcode("Q") - pdf_stopfigure() - end - end - end - end - end - - function metapost.colorconverter(cr) - local n = #cr - if n == 4 then - local c, m, y, k = cr[1], cr[2], cr[3], cr[4] - return format("%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K",c,m,y,k,c,m,y,k), "0 g 0 G" - elseif n == 3 then - local r, g, b = cr[1], cr[2], cr[3] - return format("%.3f %.3f %.3f rg %.3f %.3f %.3f RG",r,g,b,r,g,b), "0 g 0 G" - else - local s = cr[1] - return format("%.3f g %.3f G",s,s), "0 g 0 G" - end - end - -end +if not modules then modules = { } end modules ['luatex-mplib'] = { + version = 1.001, + comment = "companion to luatex-mplib.tex", + author = "Hans Hagen & Taco Hoekwater", + copyright = "ConTeXt Development Team", + license = "public domain", +} + +--[[ldx-- +

    This module is a stripped down version of libraries that are used +by . It can be used in other macro packages and/or +serve as an example. Embedding in a macro package is upto others and +normally boils down to inputting supp-mpl.tex.

    +--ldx]]-- + +if metapost and metapost.version then + + --[[ldx-- +

    Let's silently quit and make sure that no one loads it + manually in .

    + --ldx]]-- + +else + + local format, concat, abs, match = string.format, table.concat, math.abs, string.match + + local mplib = require ('mplib') + local kpse = require ('kpse') + + --[[ldx-- +

    We create a namespace and some variables to it. If a namespace is + already defined it wil not be initialized. This permits hooking + in code beforehand.

    + +

    We don't make a format automatically. After all, distributions + might have their own preferences and normally a format (mem) file will + have some special place in the tree. Also, there can already + be format files, different memort settings and other nasty pitfalls that + we don't want to interfere with. If you want, you can define a function + metapost.make(name,mem_name) that does the job.

    + --ldx]]-- + + metapost = metapost or { } + metapost.version = 1.00 + metapost.showlog = metapost.showlog or false + metapost.lastlog = "" + + --[[ldx-- +

    A few helpers, taken from l-file.lua.

    + --ldx]]-- + + local file = file or { } + + function file.replacesuffix(filename, suffix) + return (string.gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix + end + + function file.stripsuffix(filename) + return (string.gsub(filename,"%.[%a%d]+$","")) + end + + --[[ldx-- +

    We use the library unless a finder is already + defined.

    + --ldx]]-- + + local mpkpse = kpse.new("luatex","mpost") + + metapost.finder = metapost.finder or function(name, mode, ftype) + if mode == "w" then + return name + else + return mpkpse:find_file(name,ftype) + end + end + + --[[ldx-- +

    You can use your own reported if needed, as long as it handles multiple + arguments and formatted strings.

    + --ldx]]-- + + metapost.report = metapost.report or function(...) + texio.write(format("",format(...))) + end + + --[[ldx-- +

    The rest of this module is not documented. More info can be found in the + manual, articles in user group journals and the files that + ship with .

    + --ldx]]-- + + function metapost.resetlastlog() + metapost.lastlog = "" + end + + local mplibone = tonumber(mplib.version()) <= 1.50 + + if mplibone then + + metapost.make = metapost.make or function(name,mem_name,dump) + local t = os.clock() + local mpx = mplib.new { + ini_version = true, + find_file = metapost.finder, + job_name = file.stripsuffix(name) + } + mpx:execute(string.format("input %s ;",name)) + if dump then + mpx:execute("dump ;") + metapost.report("format %s made and dumped for %s in %0.3f seconds",mem_name,name,os.clock()-t) + else + metapost.report("%s read in %0.3f seconds",name,os.clock()-t) + end + return mpx + end + + function metapost.load(name) + local mem_name = file.replacesuffix(name,"mem") + local mpx = mplib.new { + ini_version = false, + mem_name = mem_name, + find_file = metapost.finder + } + if not mpx and type(metapost.make) == "function" then + -- when i have time i'll locate the format and dump + mpx = metapost.make(name,mem_name) + end + if mpx then + metapost.report("using format %s",mem_name,false) + return mpx, nil + else + return nil, { status = 99, error = "out of memory or invalid format" } + end + end + + else + + local preamble = [[ + boolean mplib ; mplib := true ; + let dump = endinput ; + input %s ; + ]] + + metapost.make = metapost.make or function() + end + + function metapost.load(name) + local mpx = mplib.new { + ini_version = true, + find_file = metapost.finder, + } + local result + if not mpx then + result = { status = 99, error = "out of memory"} + else + result = mpx:execute(format(preamble, file.replacesuffix(name,"mp"))) + end + metapost.reporterror(result) + return mpx, result + end + + end + + function metapost.unload(mpx) + if mpx then + mpx:finish() + end + end + + function metapost.reporterror(result) + if not result then + metapost.report("mp error: no result object returned") + elseif result.status > 0 then + local t, e, l = result.term, result.error, result.log + if t then + metapost.report("mp terminal: %s",t) + end + if e then + metapost.report("mp error: %s", e) + end + if not t and not e and l then + metapost.lastlog = metapost.lastlog .. "\n " .. l + metapost.report("mp log: %s",l) + else + metapost.report("mp error: unknown, no error, terminal or log messages") + end + else + return false + end + return true + end + + function metapost.process(mpx, data) + local converted, result = false, {} + mpx = metapost.load(mpx) + if mpx and data then + local result = mpx:execute(data) + if not result then + metapost.report("mp error: no result object returned") + elseif result.status > 0 then + metapost.report("mp error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error")) + elseif metapost.showlog then + metapost.lastlog = metapost.lastlog .. "\n" .. result.term + metapost.report("mp info: %s",result.term or "no-term") + elseif result.fig then + converted = metapost.convert(result) + else + metapost.report("mp error: unknown error, maybe no beginfig/endfig") + end + else + metapost.report("mp error: mem file not found") + end + return converted, result + end + + local function getobjects(result,figure,f) + return figure:objects() + end + + function metapost.convert(result, flusher) + metapost.flush(result, flusher) + return true -- done + end + + --[[ldx-- +

    We removed some message and tracing code. We might even remove the flusher

    + --ldx]]-- + + local function pdf_startfigure(n,llx,lly,urx,ury) + tex.sprint(format("\\startMPLIBtoPDF{%s}{%s}{%s}{%s}",llx,lly,urx,ury)) + end + + local function pdf_stopfigure() + tex.sprint("\\stopMPLIBtoPDF") + end + + function pdf_literalcode(fmt,...) -- table + tex.sprint(format("\\MPLIBtoPDF{%s}",format(fmt,...))) + end + + function pdf_textfigure(font,size,text,width,height,depth) + text = text:gsub(".","\\hbox{%1}") -- kerning happens in metapost + tex.sprint(format("\\MPLIBtextext{%s}{%s}{%s}{%s}{%s}",font,size,text,0,-( 7200/ 7227)/65536*depth)) + end + + local bend_tolerance = 131/65536 + + local rx, sx, sy, ry, tx, ty, divider = 1, 0, 0, 1, 0, 0, 1 + + local function pen_characteristics(object) + local t = mplib.pen_info(object) + rx, ry, sx, sy, tx, ty = t.rx, t.ry, t.sx, t.sy, t.tx, t.ty + divider = sx*sy - rx*ry + return not (sx==1 and rx==0 and ry==0 and sy==1 and tx==0 and ty==0), t.width + end + + local function concat(px, py) -- no tx, ty here + return (sy*px-ry*py)/divider,(sx*py-rx*px)/divider + end + + local function curved(ith,pth) + local d = pth.left_x - ith.right_x + if abs(ith.right_x - ith.x_coord - d) <= bend_tolerance and abs(pth.x_coord - pth.left_x - d) <= bend_tolerance then + d = pth.left_y - ith.right_y + if abs(ith.right_y - ith.y_coord - d) <= bend_tolerance and abs(pth.y_coord - pth.left_y - d) <= bend_tolerance then + return false + end + end + return true + end + + local function flushnormalpath(path,open) + local pth, ith + for i=1,#path do + pth = path[i] + if not ith then + pdf_literalcode("%f %f m",pth.x_coord,pth.y_coord) + elseif curved(ith,pth) then + pdf_literalcode("%f %f %f %f %f %f c",ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord) + else + pdf_literalcode("%f %f l",pth.x_coord,pth.y_coord) + end + ith = pth + end + if not open then + local one = path[1] + if curved(pth,one) then + pdf_literalcode("%f %f %f %f %f %f c",pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord ) + else + pdf_literalcode("%f %f l",one.x_coord,one.y_coord) + end + elseif #path == 1 then + -- special case .. draw point + local one = path[1] + pdf_literalcode("%f %f l",one.x_coord,one.y_coord) + end + return t + end + + local function flushconcatpath(path,open) + pdf_literalcode("%f %f %f %f %f %f cm", sx, rx, ry, sy, tx ,ty) + local pth, ith + for i=1,#path do + pth = path[i] + if not ith then + pdf_literalcode("%f %f m",concat(pth.x_coord,pth.y_coord)) + elseif curved(ith,pth) then + local a, b = concat(ith.right_x,ith.right_y) + local c, d = concat(pth.left_x,pth.left_y) + pdf_literalcode("%f %f %f %f %f %f c",a,b,c,d,concat(pth.x_coord, pth.y_coord)) + else + pdf_literalcode("%f %f l",concat(pth.x_coord, pth.y_coord)) + end + ith = pth + end + if not open then + local one = path[1] + if curved(pth,one) then + local a, b = concat(pth.right_x,pth.right_y) + local c, d = concat(one.left_x,one.left_y) + pdf_literalcode("%f %f %f %f %f %f c",a,b,c,d,concat(one.x_coord, one.y_coord)) + else + pdf_literalcode("%f %f l",concat(one.x_coord,one.y_coord)) + end + elseif #path == 1 then + -- special case .. draw point + local one = path[1] + pdf_literalcode("%f %f l",concat(one.x_coord,one.y_coord)) + end + return t + end + + --[[ldx-- +

    Support for specials has been removed.

    + --ldx]]-- + + function metapost.flush(result,flusher) + if result then + local figures = result.fig + if figures then + for f=1, #figures do + metapost.report("flushing figure %s",f) + local figure = figures[f] + local objects = getobjects(result,figure,f) + local fignum = tonumber(match(figure:filename(),"([%d]+)$") or figure:charcode() or 0) + local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false + local bbox = figure:boundingbox() + local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4] -- faster than unpack + if urx < llx then + -- invalid + pdf_startfigure(fignum,0,0,0,0) + pdf_stopfigure() + else + pdf_startfigure(fignum,llx,lly,urx,ury) + pdf_literalcode("q") + if objects then + for o=1,#objects do + local object = objects[o] + local objecttype = object.type + if objecttype == "start_bounds" or objecttype == "stop_bounds" then + -- skip + elseif objecttype == "start_clip" then + pdf_literalcode("q") + flushnormalpath(object.path,t,false) + pdf_literalcode("W n") + elseif objecttype == "stop_clip" then + pdf_literalcode("Q") + miterlimit, linecap, linejoin, dashed = -1, -1, -1, false + elseif objecttype == "special" then + -- not supported + elseif objecttype == "text" then + local ot = object.transform -- 3,4,5,6,1,2 + pdf_literalcode("q %f %f %f %f %f %f cm",ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) + pdf_textfigure(object.font,object.dsize,object.text,object.width,object.height,object.depth) + pdf_literalcode("Q") + else + local cs = object.color + if cs and #cs > 0 then + pdf_literalcode(metapost.colorconverter(cs)) + end + local ml = object.miterlimit + if ml and ml ~= miterlimit then + miterlimit = ml + pdf_literalcode("%f M",ml) + end + local lj = object.linejoin + if lj and lj ~= linejoin then + linejoin = lj + pdf_literalcode("%i j",lj) + end + local lc = object.linecap + if lc and lc ~= linecap then + linecap = lc + pdf_literalcode("%i J",lc) + end + local dl = object.dash + if dl then + local d = format("[%s] %i d",concat(dl.dashes or {}," "),dl.offset) + if d ~= dashed then + dashed = d + pdf_literalcode(dashed) + end + elseif dashed then + pdf_literalcode("[] 0 d") + dashed = false + end + local path = object.path + local transformed, penwidth = false, 1 + local open = path and path[1].left_type and path[#path].right_type + local pen = object.pen + if pen then + if pen.type == 'elliptical' then + transformed, penwidth = pen_characteristics(object) -- boolean, value + pdf_literalcode("%f w",penwidth) + if objecttype == 'fill' then + objecttype = 'both' + end + else -- calculated by mplib itself + objecttype = 'fill' + end + end + if transformed then + pdf_literalcode("q") + end + if path then + if transformed then + flushconcatpath(path,open) + else + flushnormalpath(path,open) + end + if objecttype == "fill" then + pdf_literalcode("h f") + elseif objecttype == "outline" then + pdf_literalcode((open and "S") or "h S") + elseif objecttype == "both" then + pdf_literalcode("h B") + end + end + if transformed then + pdf_literalcode("Q") + end + local path = object.htap + if path then + if transformed then + pdf_literalcode("q") + end + if transformed then + flushconcatpath(path,open) + else + flushnormalpath(path,open) + end + if objecttype == "fill" then + pdf_literalcode("h f") + elseif objecttype == "outline" then + pdf_literalcode((open and "S") or "h S") + elseif objecttype == "both" then + pdf_literalcode("h B") + end + if transformed then + pdf_literalcode("Q") + end + end + if cr then + pdf_literalcode(cr) + end + end + end + end + pdf_literalcode("Q") + pdf_stopfigure() + end + end + end + end + end + + function metapost.colorconverter(cr) + local n = #cr + if n == 4 then + local c, m, y, k = cr[1], cr[2], cr[3], cr[4] + return format("%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K",c,m,y,k,c,m,y,k), "0 g 0 G" + elseif n == 3 then + local r, g, b = cr[1], cr[2], cr[3] + return format("%.3f %.3f %.3f rg %.3f %.3f %.3f RG",r,g,b,r,g,b), "0 g 0 G" + else + local s = cr[1] + return format("%.3f g %.3f G",s,s), "0 g 0 G" + end + end + +end diff --git a/tex/generic/context/luatex/luatex-preprocessor.lua b/tex/generic/context/luatex/luatex-preprocessor.lua index 8faa0b47e..610fe4b4d 100644 --- a/tex/generic/context/luatex/luatex-preprocessor.lua +++ b/tex/generic/context/luatex/luatex-preprocessor.lua @@ -1,163 +1,163 @@ -if not modules then modules = { } end modules ['luatex-preprocessor'] = { - version = 1.001, - comment = "companion to luatex-preprocessor.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - ---[[ldx -

    This is a stripped down version of the preprocessor. In - we have a bit more, use a different logger, and -use a few optimizations. A few examples are shown at the end.

    ---ldx]] - -local rep, sub, gmatch = string.rep, string.sub, string.gmatch -local insert, remove = table.insert, table.remove -local setmetatable = setmetatable - -local stack, top, n, hashes = { }, nil, 0, { } - -local function set(s) - if top then - n = n + 1 - if n > 9 then - texio.write_nl("number of arguments > 9, ignoring: " .. s) - else - local ns = #stack - local h = hashes[ns] - if not h then - h = rep("#",ns) - hashes[ns] = h - end - m = h .. n - top[s] = m - return m - end - end -end - -local function get(s) - local m = top and top[s] or s - return m -end - -local function push() - top = { } - n = 0 - local s = stack[#stack] - if s then - setmetatable(top,{ __index = s }) - end - insert(stack,top) -end - -local function pop() - top = remove(stack) -end - -local leftbrace = lpeg.P("{") -local rightbrace = lpeg.P("}") -local escape = lpeg.P("\\") - -local space = lpeg.P(" ") -local spaces = space^1 -local newline = lpeg.S("\r\n") -local nobrace = 1 - leftbrace - rightbrace - -local name = lpeg.R("AZ","az")^1 -local longname = (leftbrace/"") * (nobrace^1) * (rightbrace/"") -local variable = lpeg.P("#") * lpeg.Cs(name + longname) -local escapedname = escape * name -local definer = escape * (lpeg.P("def") + lpeg.P("egdx") * lpeg.P("def")) -local anything = lpeg.P(1) -local always = lpeg.P(true) - -local pushlocal = always / push -local poplocal = always / pop -local declaration = variable / set -local identifier = variable / get - -local function matcherror(str,pos) - texio.write_nl("runaway definition at: " .. sub(str,pos-30,pos)) -end - -local parser = lpeg.Cs { "converter", - definition = pushlocal - * definer - * escapedname - * (declaration + (1-leftbrace))^0 - * lpeg.V("braced") - * poplocal, - braced = leftbrace - * ( lpeg.V("definition") - + identifier - + lpeg.V("braced") - + nobrace - )^0 - * (rightbrace + lpeg.Cmt(always,matcherror)), - converter = (lpeg.V("definition") + anything)^1, -} - ---[[ldx -

    We provide a few commands.

    ---ldx]] - --- local texkpse - -local function find_file(...) - -- texkpse = texkpse or kpse.new("luatex","tex") - -- return texkpse:find_file(...) or "" - return kpse.find_file(...) or "" -end - -commands = commands or { } - -function commands.preprocessed(str) - return lpeg.match(parser,str) -end - -function commands.inputpreprocessed(name) - local name = find_file(name) or "" - if name ~= "" then - -- we could use io.loaddata as it's loaded in luatex-plain - local f = io.open(name,'rb') - if f then - texio.write("("..name) - local d = commands.preprocessed(f:read("*a")) - if d and d ~= "" then - texio.write("processed: " .. name) - for s in gmatch(d,"[^\n\r]+") do - tex.print(s) -- we do a dumb feedback - end - end - f:close() - texio.write(")") - else - tex.error("preprocessor error, invalid file: " .. name) - end - else - tex.error("preprocessor error, unknown file: " .. name) - end -end - -function commands.preprocessfile(oldfile,newfile) -- no checking - if oldfile and oldfile ~= newfile then - local f = io.open(oldfile,'rb') - if f then - local g = io.open(newfile,'wb') - if g then - g:write(lpeg.match(parser,f:read("*a") or "")) - g:close() - end - f:close() - end - end -end - ---~ print(preprocessed([[\def\test#oeps{test:#oeps}]])) ---~ print(preprocessed([[\def\test#oeps{test:#{oeps}}]])) ---~ print(preprocessed([[\def\test#{oeps:1}{test:#{oeps:1}}]])) ---~ print(preprocessed([[\def\test#{oeps}{test:#oeps}]])) ---~ preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}]]) ---~ print(preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}}]])) +if not modules then modules = { } end modules ['luatex-preprocessor'] = { + version = 1.001, + comment = "companion to luatex-preprocessor.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx +

    This is a stripped down version of the preprocessor. In + we have a bit more, use a different logger, and +use a few optimizations. A few examples are shown at the end.

    +--ldx]] + +local rep, sub, gmatch = string.rep, string.sub, string.gmatch +local insert, remove = table.insert, table.remove +local setmetatable = setmetatable + +local stack, top, n, hashes = { }, nil, 0, { } + +local function set(s) + if top then + n = n + 1 + if n > 9 then + texio.write_nl("number of arguments > 9, ignoring: " .. s) + else + local ns = #stack + local h = hashes[ns] + if not h then + h = rep("#",ns) + hashes[ns] = h + end + m = h .. n + top[s] = m + return m + end + end +end + +local function get(s) + local m = top and top[s] or s + return m +end + +local function push() + top = { } + n = 0 + local s = stack[#stack] + if s then + setmetatable(top,{ __index = s }) + end + insert(stack,top) +end + +local function pop() + top = remove(stack) +end + +local leftbrace = lpeg.P("{") +local rightbrace = lpeg.P("}") +local escape = lpeg.P("\\") + +local space = lpeg.P(" ") +local spaces = space^1 +local newline = lpeg.S("\r\n") +local nobrace = 1 - leftbrace - rightbrace + +local name = lpeg.R("AZ","az")^1 +local longname = (leftbrace/"") * (nobrace^1) * (rightbrace/"") +local variable = lpeg.P("#") * lpeg.Cs(name + longname) +local escapedname = escape * name +local definer = escape * (lpeg.P("def") + lpeg.P("egdx") * lpeg.P("def")) +local anything = lpeg.P(1) +local always = lpeg.P(true) + +local pushlocal = always / push +local poplocal = always / pop +local declaration = variable / set +local identifier = variable / get + +local function matcherror(str,pos) + texio.write_nl("runaway definition at: " .. sub(str,pos-30,pos)) +end + +local parser = lpeg.Cs { "converter", + definition = pushlocal + * definer + * escapedname + * (declaration + (1-leftbrace))^0 + * lpeg.V("braced") + * poplocal, + braced = leftbrace + * ( lpeg.V("definition") + + identifier + + lpeg.V("braced") + + nobrace + )^0 + * (rightbrace + lpeg.Cmt(always,matcherror)), + converter = (lpeg.V("definition") + anything)^1, +} + +--[[ldx +

    We provide a few commands.

    +--ldx]] + +-- local texkpse + +local function find_file(...) + -- texkpse = texkpse or kpse.new("luatex","tex") + -- return texkpse:find_file(...) or "" + return kpse.find_file(...) or "" +end + +commands = commands or { } + +function commands.preprocessed(str) + return lpeg.match(parser,str) +end + +function commands.inputpreprocessed(name) + local name = find_file(name) or "" + if name ~= "" then + -- we could use io.loaddata as it's loaded in luatex-plain + local f = io.open(name,'rb') + if f then + texio.write("("..name) + local d = commands.preprocessed(f:read("*a")) + if d and d ~= "" then + texio.write("processed: " .. name) + for s in gmatch(d,"[^\n\r]+") do + tex.print(s) -- we do a dumb feedback + end + end + f:close() + texio.write(")") + else + tex.error("preprocessor error, invalid file: " .. name) + end + else + tex.error("preprocessor error, unknown file: " .. name) + end +end + +function commands.preprocessfile(oldfile,newfile) -- no checking + if oldfile and oldfile ~= newfile then + local f = io.open(oldfile,'rb') + if f then + local g = io.open(newfile,'wb') + if g then + g:write(lpeg.match(parser,f:read("*a") or "")) + g:close() + end + f:close() + end + end +end + +--~ print(preprocessed([[\def\test#oeps{test:#oeps}]])) +--~ print(preprocessed([[\def\test#oeps{test:#{oeps}}]])) +--~ print(preprocessed([[\def\test#{oeps:1}{test:#{oeps:1}}]])) +--~ print(preprocessed([[\def\test#{oeps}{test:#oeps}]])) +--~ preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}]]) +--~ print(preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}}]])) diff --git a/tex/generic/context/luatex/luatex-swiglib-test.lua b/tex/generic/context/luatex/luatex-swiglib-test.lua index db6a72909..2f4cc57f6 100644 --- a/tex/generic/context/luatex/luatex-swiglib-test.lua +++ b/tex/generic/context/luatex/luatex-swiglib-test.lua @@ -1,25 +1,25 @@ -local gm = swiglib("gmwand.core") - -gm.InitializeMagick(".") - -local magick_wand = gm.NewMagickWand() -local drawing_wand = gm.NewDrawingWand() - -gm.MagickSetSize(magick_wand,800,600) -gm.MagickReadImage(magick_wand,"xc:red") - -gm.DrawPushGraphicContext(drawing_wand) - -gm.DrawSetFillColor(drawing_wand,gm.NewPixelWand()) - --- gm.DrawSetFont(drawing_wand, kpse.findfile("DejaVuSerifBold.ttf")) --- gm.DrawSetFontSize(drawing_wand, 96) --- gm.DrawAnnotation(drawing_wand,300,200, "LuaTeX") - -gm.DrawPopGraphicContext(drawing_wand) -gm.MagickDrawImage(magick_wand,drawing_wand) - -gm.MagickWriteImages(magick_wand,"./luatex-swiglib-test.jpg",1) - -gm.DestroyDrawingWand(drawing_wand) -gm.DestroyMagickWand(magick_wand) +local gm = swiglib("gmwand.core") + +gm.InitializeMagick(".") + +local magick_wand = gm.NewMagickWand() +local drawing_wand = gm.NewDrawingWand() + +gm.MagickSetSize(magick_wand,800,600) +gm.MagickReadImage(magick_wand,"xc:red") + +gm.DrawPushGraphicContext(drawing_wand) + +gm.DrawSetFillColor(drawing_wand,gm.NewPixelWand()) + +-- gm.DrawSetFont(drawing_wand, kpse.findfile("DejaVuSerifBold.ttf")) +-- gm.DrawSetFontSize(drawing_wand, 96) +-- gm.DrawAnnotation(drawing_wand,300,200, "LuaTeX") + +gm.DrawPopGraphicContext(drawing_wand) +gm.MagickDrawImage(magick_wand,drawing_wand) + +gm.MagickWriteImages(magick_wand,"./luatex-swiglib-test.jpg",1) + +gm.DestroyDrawingWand(drawing_wand) +gm.DestroyMagickWand(magick_wand) diff --git a/tex/generic/context/luatex/luatex-swiglib.lua b/tex/generic/context/luatex/luatex-swiglib.lua index 7ffcdc375..45416eff4 100644 --- a/tex/generic/context/luatex/luatex-swiglib.lua +++ b/tex/generic/context/luatex/luatex-swiglib.lua @@ -1,62 +1,62 @@ -if not modules then modules = { } end modules ['luatex-swiglib'] = { - version = 1.001, - comment = "companion to luatex-swiglib.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local savedrequire = require - -local libsuffix = os.type == "windows" and ".dll" or ".so" - -function requireswiglib(required,version) - local library = package.loaded[required] - if library then - return library - else - local name = string.gsub(required,"%.","/") .. libsuffix - local list = kpse.show_path("clua") - for root in string.gmatch(list,"([^;]+)") do - local full = false - if type(version) == "string" and version ~= "" then - full = root .. "/" .. version .. "/" .. name - full = lfs.isfile(full) and full - end - if not full then - full = root .. "/" .. name - full = lfs.isfile(full) and full - end - if full then - local path, base = string.match(full,"^(.-)([^\\/]+)" .. libsuffix .."$") - local savedlibrary = package.loaded[base] - package.loaded[base] = nil - local savedpath = lfs.currentdir() - lfs.chdir(path) - library = package.loadlib(full,"luaopen_" .. base) - if type(library) == "function" then - library = library() - texio.write("") - end - lfs.chdir(savedpath) - package.loaded[base] = savedlibrary - package.loaded[required] = library - return library - end - end - texio.write("") -end - -function require(name) - if string.find(name,"^swiglib%.") then - return requireswiglib(name) - else - return savedrequire(name) - end -end - -function swiglib(name,version) - return requireswiglib("swiglib." .. name,version) -end +if not modules then modules = { } end modules ['luatex-swiglib'] = { + version = 1.001, + comment = "companion to luatex-swiglib.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local savedrequire = require + +local libsuffix = os.type == "windows" and ".dll" or ".so" + +function requireswiglib(required,version) + local library = package.loaded[required] + if library then + return library + else + local name = string.gsub(required,"%.","/") .. libsuffix + local list = kpse.show_path("clua") + for root in string.gmatch(list,"([^;]+)") do + local full = false + if type(version) == "string" and version ~= "" then + full = root .. "/" .. version .. "/" .. name + full = lfs.isfile(full) and full + end + if not full then + full = root .. "/" .. name + full = lfs.isfile(full) and full + end + if full then + local path, base = string.match(full,"^(.-)([^\\/]+)" .. libsuffix .."$") + local savedlibrary = package.loaded[base] + package.loaded[base] = nil + local savedpath = lfs.currentdir() + lfs.chdir(path) + library = package.loadlib(full,"luaopen_" .. base) + if type(library) == "function" then + library = library() + texio.write("") + end + lfs.chdir(savedpath) + package.loaded[base] = savedlibrary + package.loaded[required] = library + return library + end + end + texio.write("") +end + +function require(name) + if string.find(name,"^swiglib%.") then + return requireswiglib(name) + else + return savedrequire(name) + end +end + +function swiglib(name,version) + return requireswiglib("swiglib." .. name,version) +end -- cgit v1.2.3